Merge upstream-main. am: 480bc27115 am: 0dfc391aee

Original change: https://android-review.googlesource.com/c/platform/external/autotest/+/2112105

Change-Id: I5670fbbbbe6c637360c7f60bb2f0dfae74cf7cbe
Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
diff --git a/.gitignore b/.gitignore
index 034906c..bc59e1a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -43,6 +43,7 @@
 results/*
 server/tmp
 tko/parsers/test/site_scenarios
+tko/tko_pb2.py
 ExternalSource
 /frontend/client/www/
 /shadow_config.ini
@@ -52,6 +53,8 @@
 server/cros/chaos_shadow_ap_list.conf
 server/site_tests/network_WiFiChaosPSK/control.local
 manifest-versions
+server/cros/bluetooth/LATEST_STABLE_AUTOTEST_COMMIT
+contrib/config/
 
 # These exist on the production servers.
 bug_filing_oauth_credentials.dat
@@ -83,3 +86,5 @@
 .idea
 .vscode
 .cipd_bin
+
+autotest.iml
\ No newline at end of file
diff --git a/BLUETOOTH_OWNERS b/BLUETOOTH_OWNERS
new file mode 100644
index 0000000..6537755
--- /dev/null
+++ b/BLUETOOTH_OWNERS
@@ -0,0 +1,16 @@
+abhishekpandit@google.com
+alainm@chromium.org
+apusaka@chromium.org
+chharry@google.com
+deanliao@chromium.org
+howardchung@chromium.org
+jiangzp@google.com
+josephsih@chromium.org
+laikatherine@google.com
+mcchou@chromium.org
+melhuishj@chromium.org
+michaelfsun@google.com
+mmandlik@chromium.org
+sonnysasaka@chromium.org
+yinghsu@google.com
+yshavit@chromium.org
diff --git a/CTS_OWNERS b/CTS_OWNERS
index ecc17b3..89525dc 100644
--- a/CTS_OWNERS
+++ b/CTS_OWNERS
@@ -1,6 +1,5 @@
 # ARC-CTS-eng team
 
 kinaba@chromium.org
+rkuroiwa@chromium.org
 shaochuan@chromium.org
-jiyounha@chromium.org
-boleynsu@chromium.org
diff --git a/ENGPROD_OWNERS b/ENGPROD_OWNERS
index f876510..6d19581 100644
--- a/ENGPROD_OWNERS
+++ b/ENGPROD_OWNERS
@@ -1,21 +1,19 @@
 # ChromeOS Engprod Team (formerly ChromeOS Test Team)
-bhansknecht@chromium.org
 dbeckett@chromium.org
 dchan@chromium.org
 dhaddock@chromium.org
-gredelston@chromium.org
 harpreet@chromium.org
 ianrlee@chromium.org
-jomag@chromium.org
 kalin@chromium.org
 kathrelkeld@chromium.org
 kmshelton@chromium.org
+kyleshima@chromium.org
 meiring@google.com
+rnanjappan@chromium.org
 rohitbm@chromium.org
 rzakarian@chromium.org
 shijinabraham@chromium.org
 shijinabraham@google.com
-timkovich@chromium.org
 vsuley@chromium.org
 
 # ChromeOS Bluetooth Platform Team
@@ -32,6 +30,5 @@
 jnchase@chromium.org
 jrt@chromium.org
 kdgwill@chromium.org
-pceballos@chromium.org
 turg@google.com
 ziegs@chromium.org
diff --git a/FINGERPRINT_OWNERS b/FINGERPRINT_OWNERS
index 3a5feab..d45a99b 100644
--- a/FINGERPRINT_OWNERS
+++ b/FINGERPRINT_OWNERS
@@ -1,8 +1,7 @@
-# Chrome OS Fingerprint OWNERS
+# ChromeOS Fingerprint OWNERS
 # contact: chromeos-fingerprint@google.com
 
 hesling@chromium.org
 jora@google.com
 josienordrum@google.com
 tomhughes@chromium.org
-yichengli@chromium.org
diff --git a/FIRMWARE_OWNERS b/FIRMWARE_OWNERS
index f8ebf60..32ed43c 100644
--- a/FIRMWARE_OWNERS
+++ b/FIRMWARE_OWNERS
@@ -2,15 +2,17 @@
 
 aaboagye@chromium.org
 alevkoy@chromium.org
-brentpeterson@chromium.org
 dlaurie@google.com
 dossym@chromium.org
+jbettis@chromium.org
 jwerner@chromium.org
 mruthven@chromium.org
 namyoon@chromium.org
 pgeorgi@chromium.org
 philipchen@chromium.org
+roccochen@chromium.org
 shchen@chromium.org
 vbendeb@chromium.org
 waihong@google.com
 yllin@chromium.org
+yupingso@chromium.org
diff --git a/HARNESS_OWNERS b/HARNESS_OWNERS
new file mode 100644
index 0000000..530c5af
--- /dev/null
+++ b/HARNESS_OWNERS
@@ -0,0 +1,7 @@
+# ChromeOS Automation & Frameworks
+dbeckett@chromium.org
+kathrelkeld@chromium.org
+seewaifu@google.com
+jessemcguire@google.com
+# jamesfeister@google.com to be added soon
+# yichiyan@google.com to be added soon
diff --git a/INFRA_OWNERS b/INFRA_OWNERS
index 21e4138..3595669 100644
--- a/INFRA_OWNERS
+++ b/INFRA_OWNERS
@@ -1,15 +1,21 @@
 # Chrome Fleet Software Team
 anhdle@chromium.org
+anhdle@google.com
 ayatane@chromium.org
+ayatane@google.com
 gregorynisbet@chromium.org
+gregorynisbet@google.com
 guocb@chromium.org
+guocb@google.com
 otabek@chromium.org
+otabek@google.com
 sanikak@chromium.org
+sanikak@google.com
 vivekshah@chromium.org
+vivekshah@google.com
 xianuowang@chromium.org
+xianuowang@google.com
 xixuan@chromium.org
+xixuan@google.com
 
-include chromiumos/chromite:/OWNERS.testplatform
-
-# Core Infra Team
-dbeckett@chromium.org
+include chromiumos/chromite:/OWNERS.ci
diff --git a/OWNERS b/OWNERS
new file mode 100644
index 0000000..c33f706
--- /dev/null
+++ b/OWNERS
@@ -0,0 +1,6 @@
+include /HARNESS_OWNERS
+
+per-file site_utils/attribute_allowlist.txt = *
+
+per-file WIFI_OWNERS = file:/WIFI_OWNERS
+per-file ENGPROD_OWNERS = file:/ENGPROD_OWNERS
diff --git a/POWER_OWNERS b/POWER_OWNERS
new file mode 100644
index 0000000..7ed6189
--- /dev/null
+++ b/POWER_OWNERS
@@ -0,0 +1,6 @@
+# ChromeOS Power Team
+
+coconutruben@chromium.org
+fshao@chromium.org
+mqg@chromium.org
+puthik@chromium.org
diff --git a/PRESUBMIT.cfg b/PRESUBMIT.cfg
index 5614d28..863a492 100644
--- a/PRESUBMIT.cfg
+++ b/PRESUBMIT.cfg
@@ -8,7 +8,7 @@
 long_line_check: true
 
 [Hook Scripts]
-git_cl_presubmit = vpython3 $(which presubmit_support.py) $(if [[ "${PRESUBMIT_COMMIT}" = pre-submit ]]; then echo --commit; fi)
+git_cl_presubmit = vpython $(which presubmit_support.py) $(if [[ "${PRESUBMIT_COMMIT}" = pre-submit ]]; then echo --commit; fi)
 # If running in the chroot, EPYTHON ensures the wrappers don't get the wrong
 # version.
 pylint = EPYTHON=python2 ./utils/run_pylint.py
diff --git a/README.md b/README.md
index 392f98b..68d11f1 100644
--- a/README.md
+++ b/README.md
@@ -1,8 +1,8 @@
-# Autotest: Automated integration testing for Android and Chrome OS Devices
+# Autotest: Automated integration testing for Android and ChromeOS Devices
 
 Autotest is a framework for fully automated testing. It was originally designed
-to test the Linux kernel, and expanded by the Chrome OS team to validate
-complete system images of Chrome OS and Android.
+to test the Linux kernel, and expanded by the ChromeOS team to validate
+complete system images of ChromeOS and Android.
 
 Autotest is composed of a number of modules that will help you to do stand alone
 tests or setup a fully automated test grid, depending on what you are up to.
@@ -17,7 +17,7 @@
   infrastructure, and the device under test is controlled remotely via
   SSH/adb/some combination of the above.
 
-* Developer tools to execute one or more tests.  `test_that` for Chrome OS and
+* Developer tools to execute one or more tests.  `test_that` for ChromeOS and
   `test_droid` for Android allow developers to run tests against a device
   connected to their development machine on their desk.  These tools are written
   so that the same test logic that runs in the lab will run at their desk,
@@ -27,7 +27,7 @@
   capable of managing and running tests against thousands of devices in various
   lab environments. This includes code for both synchronous and asynchronous
   scheduling of tests.  Tests are run against this hardware daily to validate
-  every build of Chrome OS.
+  every build of ChromeOS.
 
 * Infrastructure to set up miniature replicas of a full lab.  A full lab does
   entail a certain amount of administrative work which isn't appropriate for
diff --git a/WIFI_OWNERS b/WIFI_OWNERS
new file mode 100644
index 0000000..baa72ff
--- /dev/null
+++ b/WIFI_OWNERS
@@ -0,0 +1,12 @@
+arowa@chromium.org
+billyzhao@chromium.org
+deanliao@chromium.org
+frankgor@google.com
+harpreet@chromium.org
+jemele@chromium.org
+jintaolin@chromium.org
+junyuu@chromium.org
+kglund@google.com
+kuabhs@chromium.org
+matthewmwang@chromium.org
+norvez@chromium.org
diff --git a/autotest_lib b/autotest_lib
new file mode 120000
index 0000000..945c9b4
--- /dev/null
+++ b/autotest_lib
@@ -0,0 +1 @@
+.
\ No newline at end of file
diff --git a/cli/OWNERS b/cli/OWNERS
new file mode 100644
index 0000000..5804625
--- /dev/null
+++ b/cli/OWNERS
@@ -0,0 +1 @@
+include chromiumos/config:/owners/testservice
diff --git a/cli/atest b/cli/atest
index 9919cea..f38b850 100755
--- a/cli/atest
+++ b/cli/atest
@@ -1,4 +1,4 @@
-#!/usr/bin/python2 -u
+#!/usr/bin/python3 -u
 
 import sys
 import common
diff --git a/cli/atest_venv_entry.py b/cli/atest_venv_entry.py
index ee0566b..74513ab 100755
--- a/cli/atest_venv_entry.py
+++ b/cli/atest_venv_entry.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2 -u
+#!/usr/bin/python3 -u
 import os
 import sys
 
@@ -9,15 +9,16 @@
     os.path.join(_AUTOTEST_ROOT, '..', '..', '..', '..'))
 _SKYLAB_INVENTORY_DIR = os.path.join(_CHROMIUMOS_ROOT, 'infra',
                                      'skylab_inventory', 'venv')
-# In any sane chromiumos checkout
+# In any valid chromiumos checkout
 sys.path.append(_SKYLAB_INVENTORY_DIR)
 # TODO: Where is this checked out on infra servers?
 
 try:
-  import skylab_inventory  # pylint: disable=unused-import
+    import skylab_inventory  # pylint: disable=unused-import
 except ImportError as e:
-  raise Exception('Error when importing skylab_inventory (venv dir: %s): %s'
-                  % (_SKYLAB_INVENTORY_DIR, e))
+    raise Exception(
+            'Error when importing skylab_inventory (venv dir: %s): %s' %
+            (_SKYLAB_INVENTORY_DIR, e))
 
 # Import atest after 'import skylab_inventory' as it uses skylab_inventory
 from autotest_lib.cli import atest
diff --git a/cli/rpc_unittest.py b/cli/rpc_unittest.py
index 0f602c4..912672b 100755
--- a/cli/rpc_unittest.py
+++ b/cli/rpc_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #
 # Copyright 2008 Google Inc. All Rights Reserved.
 
diff --git a/cli/server.py b/cli/server.py
index de3435f..9415592 100644
--- a/cli/server.py
+++ b/cli/server.py
@@ -26,14 +26,10 @@
 from autotest_lib.cli import skylab_utils
 from autotest_lib.cli import topic_common
 from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import global_config
 from autotest_lib.client.common_lib import revision_control
 # The django setup is moved here as test_that uses sqlite setup. If this line
 # is in server_manager, test_that unittest will fail.
 from autotest_lib.frontend import setup_django_environment
-from autotest_lib.site_utils import server_manager
-from autotest_lib.site_utils import server_manager_utils
-from chromite.lib import gob_util
 
 try:
     from skylab_inventory import text_manager
@@ -43,8 +39,6 @@
     pass
 
 
-RESPECT_SKYLAB_SERVERDB = global_config.global_config.get_config_value(
-        'SKYLAB', 'respect_skylab_serverdb', type=bool, default=False)
 ATEST_DISABLE_MSG = ('Updating server_db via atest server command has been '
                      'disabled. Please use use go/cros-infra-inventory-tool '
                      'to update it in skylab inventory service.')
@@ -215,15 +209,7 @@
                 self.failure(e, what_failed='Failed to list servers from skylab'
                              ' inventory.', item=self.hostname, fatal=True)
         else:
-            try:
-                return server_manager_utils.get_servers(
-                        hostname=self.hostname,
-                        role=self.role,
-                        status=self.status)
-            except (server_manager_utils.ServerActionError,
-                    error.InvalidDataError) as e:
-                self.failure(e, what_failed='Failed to find servers',
-                             item=self.hostname, fatal=True)
+            return None
 
 
     def output(self, results):
@@ -237,11 +223,11 @@
                 if self.skylab:
                     formatter = skylab_server.format_servers_json
                 else:
-                    formatter = server_manager_utils.format_servers_json
+                    return None
             elif self.namesonly:
-                formatter = server_manager_utils.format_servers_nameonly
+                return None
             else:
-                formatter = server_manager_utils.format_servers
+                return None
             print(formatter(results))
         else:
             self.failure('No server is found.',
@@ -308,30 +294,10 @@
 
         @return: A Server object if it is created successfully.
         """
-        if RESPECT_SKYLAB_SERVERDB:
-            self.failure(ATEST_DISABLE_MSG,
-                         what_failed='Failed to create server',
-                         item=self.hostname, fatal=True)
-
-        if self.skylab:
-            try:
-                return self.execute_skylab()
-            except (skylab_server.SkylabServerActionError,
-                    revision_control.GitError,
-                    gob_util.GOBError,
-                    skylab_utils.InventoryRepoDirNotClean) as e:
-                self.failure(e, what_failed='Failed to create server in skylab '
-                             'inventory.', item=self.hostname, fatal=True)
-        else:
-            try:
-                return server_manager.create(
-                        hostname=self.hostname,
-                        role=self.role,
-                        note=self.note)
-            except (server_manager_utils.ServerActionError,
-                    error.InvalidDataError) as e:
-                self.failure(e, what_failed='Failed to create server',
-                             item=self.hostname, fatal=True)
+        self.failure(ATEST_DISABLE_MSG,
+                     what_failed='Failed to create server',
+                     item=self.hostname,
+                     fatal=True)
 
 
     def output(self, results):
@@ -375,30 +341,10 @@
 
         @return: True if server is deleted successfully.
         """
-        if RESPECT_SKYLAB_SERVERDB:
-            self.failure(ATEST_DISABLE_MSG,
-                         what_failed='Failed to delete server',
-                         item=self.hostname, fatal=True)
-
-        if self.skylab:
-            try:
-                self.execute_skylab()
-                return True
-            except (skylab_server.SkylabServerActionError,
-                    revision_control.GitError,
-                    gob_util.GOBError,
-                    skylab_utils.InventoryRepoDirNotClean) as e:
-                self.failure(e, what_failed='Failed to delete server from '
-                             'skylab inventory.', item=self.hostname,
-                             fatal=True)
-        else:
-            try:
-                server_manager.delete(hostname=self.hostname)
-                return True
-            except (server_manager_utils.ServerActionError,
-                    error.InvalidDataError) as e:
-                self.failure(e, what_failed='Failed to delete server',
-                             item=self.hostname, fatal=True)
+        self.failure(ATEST_DISABLE_MSG,
+                     what_failed='Failed to delete server',
+                     item=self.hostname,
+                     fatal=True)
 
 
     def output(self, results):
@@ -539,31 +485,10 @@
 
         @return: The updated server object if it is modified successfully.
         """
-        if RESPECT_SKYLAB_SERVERDB:
-            self.failure(ATEST_DISABLE_MSG,
-                         what_failed='Failed to modify server',
-                         item=self.hostname, fatal=True)
-
-        if self.skylab:
-            try:
-                return self.execute_skylab()
-            except (skylab_server.SkylabServerActionError,
-                    revision_control.GitError,
-                    gob_util.GOBError,
-                    skylab_utils.InventoryRepoDirNotClean) as e:
-                self.failure(e, what_failed='Failed to modify server in skylab'
-                             ' inventory.', item=self.hostname, fatal=True)
-        else:
-            try:
-                return server_manager.modify(
-                        hostname=self.hostname, role=self.role,
-                        status=self.status, delete=self.delete,
-                        note=self.note, attribute=self.attribute,
-                        value=self.value, action=self.action)
-            except (server_manager_utils.ServerActionError,
-                    error.InvalidDataError) as e:
-                self.failure(e, what_failed='Failed to modify server',
-                             item=self.hostname, fatal=True)
+        self.failure(ATEST_DISABLE_MSG,
+                     what_failed='Failed to modify server',
+                     item=self.hostname,
+                     fatal=True)
 
 
     def output(self, results):
diff --git a/cli/skylab_utils.py b/cli/skylab_utils.py
index 5fb7c6a..82ab1a4 100644
--- a/cli/skylab_utils.py
+++ b/cli/skylab_utils.py
@@ -10,7 +10,7 @@
 import common
 
 from autotest_lib.client.common_lib import revision_control
-from chromite.lib import gob_util
+from autotest_lib.utils.frozen_chromite.lib import gob_util
 
 try:
     from skylab_inventory import text_manager
@@ -124,7 +124,7 @@
 
             logging.info('Inventory repo was already initialized, start '
                          'pulling.')
-            self.git_repo.checkout('master')
+            self.git_repo.checkout('main')
             self.git_repo.pull()
         else:
             logging.info('No inventory repo was found, start cloning.')
@@ -151,7 +151,7 @@
 
         remote = self.git_repo.remote()
         output = self.git_repo.upload_cl(
-                remote, 'master', draft=draft, dryrun=dryrun)
+                remote, 'main', draft=draft, dryrun=dryrun)
 
         if not dryrun:
             change_number = extract_inventory_change(output)
diff --git a/client/OWNERS b/client/OWNERS
new file mode 100644
index 0000000..2e97f12
--- /dev/null
+++ b/client/OWNERS
@@ -0,0 +1 @@
+include /HARNESS_OWNERS
diff --git a/client/autotest_lib/OWNERS b/client/autotest_lib/OWNERS
new file mode 100644
index 0000000..982656f
--- /dev/null
+++ b/client/autotest_lib/OWNERS
@@ -0,0 +1 @@
+include chromiumos/config:/owners/testservice
\ No newline at end of file
diff --git a/client/cros/nfc/__init__.py b/client/autotest_lib/__init__.py
similarity index 100%
copy from client/cros/nfc/__init__.py
copy to client/autotest_lib/__init__.py
diff --git a/client/autotest_lib/client b/client/autotest_lib/client
new file mode 120000
index 0000000..b870225
--- /dev/null
+++ b/client/autotest_lib/client
@@ -0,0 +1 @@
+../
\ No newline at end of file
diff --git a/client/bin/OWNERS b/client/bin/OWNERS
new file mode 100644
index 0000000..2e97f12
--- /dev/null
+++ b/client/bin/OWNERS
@@ -0,0 +1 @@
+include /HARNESS_OWNERS
diff --git a/client/bin/amd_pci_ids.json b/client/bin/amd_pci_ids.json
index ddfab3b..5ea2f32 100644
--- a/client/bin/amd_pci_ids.json
+++ b/client/bin/amd_pci_ids.json
@@ -1,5 +1,8 @@
 {
+    "0x1506": "gc_10_3_7",
     "0x15d8": "picasso",
+    "0x15e7": "green_sardine",
+    "0x1638": "green_sardine",
     "0x9870": "carrizo",
     "0x9874": "carrizo",
     "0x9875": "carrizo",
diff --git a/client/bin/autologin.py b/client/bin/autologin.py
index 681b144..49cfe2c 100755
--- a/client/bin/autologin.py
+++ b/client/bin/autologin.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python
 #
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -38,10 +38,15 @@
                         help='Prevent startup window from opening (no doodle).')
     parser.add_argument('--no-arc-syncs', action='store_true',
                         help='Prevent ARC sync behavior as much as possible.')
-    parser.add_argument('--toggle_ndk', action='store_true',
+    parser.add_argument('--toggle_ndk',
+                        action='append_const',
+                        dest='feature',
+                        const='ArcNativeBridgeExperiment',
                         help='Toggle the translation from houdini to ndk')
-    parser.add_argument('--nativebridge64', action='store_true',
-                        help='Enables the experiment for 64-bit native bridges')
+    parser.add_argument('-f',
+                        '--feature',
+                        action='append',
+                        help='Enables the specified Chrome feature flag')
     parser.add_argument('--url', help='Navigate to URL.')
     args = parser.parse_args(args)
 
@@ -53,11 +58,8 @@
     browser_args = []
     if args.no_startup_window:
         browser_args.append('--no-startup-window')
-    if args.toggle_ndk:
-        browser_args.append('--enable-features=ArcNativeBridgeExperiment')
-    if args.nativebridge64:
-        browser_args.append(
-            '--enable-features=ArcNativeBridge64BitSupportExperiment')
+    if args.feature:
+        browser_args.append('--enable-features=%s' % ','.join(args.feature))
 
     # Avoid calling close() on the Chrome object; this keeps the session active.
     cr = chrome.Chrome(
@@ -72,8 +74,8 @@
         disable_default_apps=(not args.enable_default_apps),
         dont_override_profile=args.dont_override_profile)
     if args.url:
-      tab = cr.browser.tabs[0]
-      tab.Navigate(args.url)
+        tab = cr.browser.tabs[0]
+        tab.Navigate(args.url)
 
 
 if __name__ == '__main__':
diff --git a/client/bin/autotest b/client/bin/autotest
index fe75b6c..599b9fb 100755
--- a/client/bin/autotest
+++ b/client/bin/autotest
@@ -1,4 +1,4 @@
-#!/usr/bin/python2 -u
+#!/usr/bin/python3 -u
 #
 # autotest <control file> - run the autotest control file specified.
 #
diff --git a/client/bin/autotest_client b/client/bin/autotest_client
index 5424281..0d206dd 100755
--- a/client/bin/autotest_client
+++ b/client/bin/autotest_client
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 import common
 
 import sys, os, time, subprocess
diff --git a/client/bin/autotestd b/client/bin/autotestd
index 5af5d8c..6630b41 100755
--- a/client/bin/autotestd
+++ b/client/bin/autotestd
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 import common
 import sys, os, subprocess, fcntl
@@ -48,7 +48,7 @@
 exit_code = subprocess.call("{} {}".format(sys.executable, cmd),
                             shell=True,
                             close_fds=False)
-exit_file.write('%+04d' % exit_code)
+exit_file.write(b'%+04d' % exit_code)
 exit_file.flush()
 fcntl.flock(exit_file, fcntl.LOCK_UN)
 exit_file.close()
diff --git a/client/bin/autotestd_monitor b/client/bin/autotestd_monitor
index fc022d7..3e2ad16 100755
--- a/client/bin/autotestd_monitor
+++ b/client/bin/autotestd_monitor
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 from __future__ import absolute_import
 from __future__ import division
diff --git a/client/bin/base_sysinfo.py b/client/bin/base_sysinfo.py
index 0e168f3..30a1da7 100644
--- a/client/bin/base_sysinfo.py
+++ b/client/bin/base_sysinfo.py
@@ -15,32 +15,33 @@
 
 _DEFAULT_COMMANDS_TO_LOG_PER_TEST = []
 _DEFAULT_COMMANDS_TO_LOG_PER_BOOT = [
-    'lspci -vvn',
-    'gcc --version',
-    'ld --version',
-    'mount',
-    'hostname',
-    'uptime',
-    # for Downloadable Content (DLC)
-    'losetup',
-    'dlcservice_util --list',
+        'lspci -vvnn',
+        'gcc --version',
+        'ld --version',
+        'mount',
+        'hostname',
+        'uptime',
+        # for Downloadable Content (DLC)
+        'losetup',
+        'dlcservice_util --list',
 ]
 _DEFAULT_COMMANDS_TO_LOG_BEFORE_ITERATION = []
 _DEFAULT_COMMANDS_TO_LOG_AFTER_ITERATION = []
 
 _DEFAULT_FILES_TO_LOG_PER_TEST = []
 _DEFAULT_FILES_TO_LOG_PER_BOOT = [
-    '/proc/pci',
-    '/proc/meminfo',
-    '/proc/slabinfo',
-    '/proc/version',
-    '/proc/cpuinfo',
-    '/proc/modules',
-    '/proc/interrupts',
-    '/proc/partitions',
-    '/var/log/bios_info.txt',
-    '/var/log/messages',
-    '/var/log/storage_info.txt',
+        '/proc/pci',
+        '/proc/meminfo',
+        '/proc/slabinfo',
+        '/proc/version',
+        '/proc/cpuinfo',
+        '/proc/modules',
+        '/proc/interrupts',
+        '/proc/partitions',
+        '/sys/firmware/log',
+        '/var/log/bios_info.txt',
+        '/var/log/messages',
+        '/var/log/storage_info.txt',
 ] + list(constants.LOG_PSTORE_DIRS)
 _DEFAULT_FILES_TO_LOG_BEFORE_ITERATION = [
     '/proc/diskstats',
@@ -251,6 +252,21 @@
                                         logf='uname',
                                         log_in_keyval=True))
 
+        # Log cpufreq parameters
+        self.boot_loggables.add(
+                command('cat /sys/bus/cpu/devices/cpu*/cpufreq/scaling_driver | sort -u',
+                        logf='scaling-driver',
+                        log_in_keyval=True))
+        self.boot_loggables.add(
+                command('cat /sys/bus/cpu/devices/cpu*/cpufreq/scaling_governor | sort -u',
+                        logf='scaling-governor',
+                        log_in_keyval=True))
+        # Will only get logged when using the ondemand governor
+        self.boot_loggables.add(
+                logfile('/sys/devices/system/cpu/cpufreq/ondemand/powersave_bias',
+                        logf='scaling-governor-ondemand-powersave-bias',
+                        log_in_keyval=True))
+
         # log contents of DLC directories with meaningful filenames
         self.boot_loggables.add(command('tree /var/cache/dlc',
                                         logf='dlc_images'))
diff --git a/client/bin/base_sysinfo_unittest.py b/client/bin/base_sysinfo_unittest.py
index c45596b..66d41a0 100644
--- a/client/bin/base_sysinfo_unittest.py
+++ b/client/bin/base_sysinfo_unittest.py
@@ -1,7 +1,7 @@
 """Tests for base_sysinfo."""
 
-import mock
 import unittest
+from unittest import mock
 
 import common
 from autotest_lib.client.common_lib import autotemp
diff --git a/client/bin/display_chart.py b/client/bin/display_chart.py
index fc7818f..1d68dd0 100755
--- a/client/bin/display_chart.py
+++ b/client/bin/display_chart.py
@@ -6,11 +6,13 @@
 
 import argparse
 import contextlib
+import json
 import logging
 import os
+import select
 import signal
 import sys
-import time
+import tempfile
 
 # Set chart process preferred logging format before overridden by importing
 # common package.
@@ -19,85 +21,182 @@
         format='%(asctime)s - %(levelname)s - %(message)s')
 
 # This sets up import paths for autotest.
+sys.path.append('/usr/local/autotest/bin')
 import common
 from autotest_lib.client.bin import utils
 from autotest_lib.client.cros import constants
-from autotest_lib.client.cros.multimedia import display_facade_native
+from autotest_lib.client.cros.multimedia import display_facade as display_facade_lib
 from autotest_lib.client.cros.multimedia import facade_resource
 from autotest_lib.client.common_lib.cros import chrome
 
+DEFAULT_DISPLAY_LEVEL = 96.0
+
+
+class Fifo:
+    """Fifo to communicate with chart service."""
+
+    FIFO_POLL_TIMEOUT_MS = 300
+
+    def __init__(self):
+        self._ready = False
+
+    def __enter__(self):
+        # Prepare fifo file.
+        self._tmpdir = tempfile.mkdtemp(prefix='chart_fifo_', dir='/tmp')
+        self._path = os.path.join(self._tmpdir, 'fifo')
+        os.mkfifo(self._path)
+
+        # Hook SIGINT signal to stop fifo.
+        self._original_sig_handler = signal.getsignal(signal.SIGINT)
+
+        def handler(a, b):
+            signal.signal(signal.SIGINT, self._original_sig_handler)
+            self._ready = False
+
+        signal.signal(signal.SIGINT, handler)
+
+        self._ready = True
+        return self
+
+    def __exit__(self, exc_type, exc_value, exc_traceback):
+        signal.signal(signal.SIGINT, self._original_sig_handler)
+        os.unlink(self._path)
+        os.rmdir(self._tmpdir)
+
+    def get_path(self):
+        return self._path
+
+    def read(self):
+        """Read json format command from fifo."""
+        while self._ready:
+            with os.fdopen(os.open(self._path, os.O_RDONLY | os.O_NONBLOCK),
+                           'r') as fd:
+                p = select.poll()
+                p.register(fd, select.POLLIN)
+                if p.poll(self.FIFO_POLL_TIMEOUT_MS):
+                    cmd = fd.read()
+                    return json.loads(cmd)
+        return None
+
 
 @contextlib.contextmanager
-def set_display_brightness(display_level):
-    SET_BRIGHTNESS_CMD = 'backlight_tool --set_brightness_percent=%s'
+def control_brightness():
+    """Help to programmatically control the brightness.
+
+    Returns:
+      A function which can set brightness between [0.0, 100.0].
+    """
+
+    def set_brightness(display_level):
+        utils.system('backlight_tool --set_brightness_percent=%s' %
+                     display_level)
+        logging.info('Set display brightness to %r', display_level)
 
     original_display_level = utils.system_output(
             'backlight_tool --get_brightness_percent')
-    logging.info('Save original display brightness %r '
-                 'and fix display brightness to %r', original_display_level,
-                 display_level)
-    utils.system(SET_BRIGHTNESS_CMD % display_level)
+    logging.info('Save original display brightness %r', original_display_level)
+
     utils.system('stop powerd', ignore_status=True)
-    yield
+    yield set_brightness
     logging.info('Restore display brightness %r', original_display_level)
     utils.system('start powerd', ignore_status=True)
-    utils.system(SET_BRIGHTNESS_CMD % original_display_level)
+    set_brightness(original_display_level)
 
 
-def display(filepath):
-    """Display chart with filepath on device by using telemetry."""
-    DISPLAY_LEVEL = 96.0
-    DISPLAY_ORIENTATION = 90
+@contextlib.contextmanager
+def control_display(cr):
+    """Fix the display orientation instead of using gyro orientation."""
+    board = utils.get_board()
+    logging.info("Board:%s", board)
+    if board == 'scarlet':
+        DISPLAY_ORIENTATION = 90
+    else:
+        DISPLAY_ORIENTATION = 0
 
-    assert os.path.isfile(filepath), 'filepath %r not found.' % filepath
-    filepath = os.path.abspath(filepath)
+    logging.info('Set fullscreen.')
+    facade = facade_resource.FacadeResource(cr)
+    display_facade = display_facade_lib.DisplayFacadeLocal(facade)
+    display_facade.set_fullscreen(True)
 
-    logging.info('Setup SIGINT listener for stop displaying.')
-    displaying = [True]
+    logging.info('Fix screen rotation %d.', DISPLAY_ORIENTATION)
+    internal_display_id = display_facade.get_internal_display_id()
+    original_display_orientation = display_facade.get_display_rotation(
+            internal_display_id)
+    display_facade.set_display_rotation(internal_display_id,
+                                        rotation=DISPLAY_ORIENTATION)
+    yield
+    display_facade.set_display_rotation(internal_display_id,
+                                        rotation=original_display_orientation)
 
-    def handler(signum, frame):
-        """Wait signal to clear running flag."""
-        if signum == signal.SIGINT:
-            displaying.pop()
 
-    signal.signal(signal.SIGINT, handler)
+def display(chart_path, display_level):
+    """Display chart on device by using telemetry."""
+    chart_path = os.path.abspath(chart_path)
+    if os.path.isfile(chart_path):
+        first_chart_name = os.path.basename(chart_path)
+        chart_dir_path = os.path.dirname(chart_path)
+    elif os.path.isdir(chart_path):
+        first_chart_name = None
+        chart_dir_path = chart_path
+    else:
+        assert False, 'chart_path %r not found.' % chart_path
 
-    with chrome.Chrome(
-            extension_paths=[constants.DISPLAY_TEST_EXTENSION],
-            autotest_ext=True,
-            init_network_controller=True) as cr, set_display_brightness(
-                    DISPLAY_LEVEL):
-        logging.info('Set fullscreen.')
-        facade = facade_resource.FacadeResource(cr)
-        display_facade = display_facade_native.DisplayFacadeNative(facade)
-        display_facade.set_fullscreen(True)
-
-        logging.info('Fix screen rotation %d.', DISPLAY_ORIENTATION)
-        internal_display_id = display_facade.get_internal_display_id()
-        display_facade.set_display_rotation(internal_display_id,
-                                            rotation=DISPLAY_ORIENTATION)
-
+    def show_chart(name):
+        """Show image on chart base on file name"""
+        filepath = os.path.join(chart_dir_path, name)
         logging.info('Display chart file of path %r.', filepath)
-        cr.browser.platform.SetHTTPServerDirectories(os.path.dirname(filepath))
         tab = cr.browser.tabs[0]
         tab.Navigate(cr.browser.platform.http_server.UrlOf(filepath))
         tab.WaitForDocumentReadyStateToBeComplete()
 
-        logging.info('Chart is ready.')
+    logging.info('Setup SIGINT listener for stop displaying.')
 
+    with chrome.Chrome(
+            extension_paths=[constants.DISPLAY_TEST_EXTENSION],
+            autotest_ext=True,
+            init_network_controller=True) as cr, \
+            control_brightness() as set_brightness, \
+            control_display(cr), \
+            Fifo() as fifo:
+        set_brightness(display_level)
+
+        cr.browser.platform.SetHTTPServerDirectories(chart_dir_path)
+        if first_chart_name is not None:
+            show_chart(first_chart_name)
+
+        logging.info('Chart is ready. Fifo: %s', fifo.get_path())
         # Flush the 'is ready' message for server test to sync with ready state.
         sys.stdout.flush()
         sys.stderr.flush()
 
-        while displaying:
-            time.sleep(1)
+        while True:
+            cmd = fifo.read()
+            if cmd is None:
+                break
+            new_chart_name = cmd.get('chart_name')
+            if new_chart_name is not None:
+                show_chart(new_chart_name)
+
+            new_display_level = cmd.get('display_level')
+            if new_display_level is not None:
+                set_brightness(new_display_level)
 
 
 if __name__ == '__main__':
     argparser = argparse.ArgumentParser(
             description='Display chart file on chrome by using telemetry.'
             ' Send SIGINT or keyboard interrupt to stop displaying.')
-    argparser.add_argument('filepath', help='Path of displayed chart file.')
+    argparser.add_argument(
+            'chart_path',
+            help='Path of displayed chart file'
+            ' or the directory to put chart files for displaying in fifo mode.'
+    )
+    argparser.add_argument(
+            '--display_level',
+            type=float,
+            default=DEFAULT_DISPLAY_LEVEL,
+            help=
+            'Set brightness as linearly-calculated percent in [0.0, 100.0].')
 
     args = argparser.parse_args()
-    display(args.filepath)
+    display(args.chart_path, args.display_level)
diff --git a/client/bin/fio_util.py b/client/bin/fio_util.py
index 4014151..a812fcf 100644
--- a/client/bin/fio_util.py
+++ b/client/bin/fio_util.py
@@ -347,7 +347,7 @@
         for log in logs.split():
             match = matcher.match(log)
             if not match:
-                logging.warn('Unknown log file %s', log)
+                logging.warning('Unknown log file %s', log)
                 continue
 
             jobname = match.group('jobname')
diff --git a/client/bin/fps_meter.py b/client/bin/fps_meter.py
index 3abc615..2ee5fc0 100755
--- a/client/bin/fps_meter.py
+++ b/client/bin/fps_meter.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/bin/fs_sync.py b/client/bin/fs_sync.py
index fbdd057..a914647 100755
--- a/client/bin/fs_sync.py
+++ b/client/bin/fs_sync.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -76,7 +76,7 @@
     @param fs: the mountpoint path of the filesystem to freeze
     """
     # ioctl: FIFREEZE
-    logging.warn("FREEZING THE FILESYSTEM: %s", fs)
+    logging.warning("FREEZING THE FILESYSTEM: %s", fs)
     run('fsfreeze --freeze %s' % fs)
 
 
@@ -151,24 +151,24 @@
         available_ns = list_result.stdout.strip()
 
         if list_result.rc != 0:
-            logging.warn("Listing namespaces failed (rc=%s); assuming default.",
+            logging.warning("Listing namespaces failed (rc=%s); assuming default.",
                          list_result.rc)
             available_ns = ''
 
         elif available_ns.startswith('Usage:'):
-            logging.warn("Listing namespaces failed (just printed --help);"
+            logging.warning("Listing namespaces failed (just printed --help);"
                          " assuming default.")
             available_ns = ''
 
         elif not available_ns:
-            logging.warn("Listing namespaces failed (empty output).")
+            logging.warning("Listing namespaces failed (empty output).")
 
         if not available_ns:
             # -n Defaults to 0xffffffff, indicating flush for all namespaces.
             flush_result = run('nvme flush %s' % device, strip=True)
 
             if flush_result.rc != 0:
-                logging.warn("Flushing %s failed (rc=%s).",
+                logging.warning("Flushing %s failed (rc=%s).",
                              device, flush_result.rc)
 
         for line in available_ns.splitlines():
@@ -178,7 +178,7 @@
             flush_result = run('nvme flush %s -n %s' % (device, ns), strip=True)
 
             if flush_result.rc != 0:
-                logging.warn("Flushing %s namespace %s failed (rc=%s).",
+                logging.warning("Flushing %s namespace %s failed (rc=%s).",
                              device, ns, flush_result.rc)
 
     elif 'sd' in device:
@@ -198,7 +198,7 @@
         # run('hdparm --verbose -F %s' % device, stderr=subprocess.PIPE)
 
     else:
-        logging.warn("Unhandled device type: %s", device)
+        logging.warning("Unhandled device type: %s", device)
         _flush_blockdev(device, '*')
 
 
diff --git a/client/bin/fsinfo_unittest.py b/client/bin/fsinfo_unittest.py
index b671d9b..0dcee63 100755
--- a/client/bin/fsinfo_unittest.py
+++ b/client/bin/fsinfo_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 from __future__ import absolute_import
 from __future__ import division
diff --git a/client/bin/harness_autoserv.py b/client/bin/harness_autoserv.py
index d187c3a..b5171fd 100644
--- a/client/bin/harness_autoserv.py
+++ b/client/bin/harness_autoserv.py
@@ -34,13 +34,11 @@
                           "global_config.ini missing. This probably means "
                           "a bug on the server code. Please verify.")
 
-
     def run_start(self):
         # set up the package fetcher for direct-from-autoserv fetches
         fetcher = AutoservFetcher(self.job.pkgmgr, self)
         self.job.pkgmgr.add_repository(fetcher)
 
-
     def _send_and_wait(self, title, *args):
         """Send a message to the autoserv and wait for it to signal
         completion.
@@ -67,20 +65,18 @@
         finally:
             fifo_dir.clean()
 
-
     def run_test_complete(self):
         """A test run by this job is complete, signal it to autoserv and
         wait for it to signal to continue"""
         self._send_and_wait('AUTOTEST_TEST_COMPLETE')
 
-
     def test_status(self, status, tag):
         """A test within this job is completing"""
         for line in status.split('\n'):
             # sent status messages with AUTOTEST_STATUS:tag:message
             msg = ('AUTOTEST_STATUS:%s:%s\n' % (tag, line))
             self.status.write(msg)
-
+            self.status.flush()
 
     def fetch_package(self, pkg_name, dest_path):
         """Request a package from the remote autoserv.
@@ -97,7 +93,6 @@
         self.url = "autoserv://"
         self.job_harness = job_harness
 
-
     def fetch_pkg_file(self, filename, dest_path):
         if os.path.exists(dest_path):
             os.remove(dest_path)
diff --git a/client/bin/harness_standalone.py b/client/bin/harness_standalone.py
index fbe50cb..893d2f3 100644
--- a/client/bin/harness_standalone.py
+++ b/client/bin/harness_standalone.py
@@ -5,7 +5,9 @@
 
 __author__ = """Copyright Andy Whitcroft 2007"""
 
-import os, harness
+import os
+from autotest_lib.client.bin import harness
+
 
 class harness_standalone(harness.harness):
     """The standalone server harness
diff --git a/client/bin/harness_unittest.py b/client/bin/harness_unittest.py
index a80a720..058f32e 100755
--- a/client/bin/harness_unittest.py
+++ b/client/bin/harness_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # pylint: disable=missing-docstring
 
 import unittest
@@ -11,11 +11,9 @@
     def setUp(self):
         self.god = mock.mock_god()
 
-
     def tearDown(self):
         self.god.unstub_all()
 
-
     def test_select_none(self):
         job = object()
         self.god.stub_class(harness_standalone, "harness_standalone")
@@ -25,7 +23,6 @@
         harness.select(None, job, harness_args)
         self.god.check_playback()
 
-
     def test_select_standalone(self):
         job = object()
         self.god.stub_class(harness_standalone, "harness_standalone")
@@ -36,5 +33,5 @@
         self.god.check_playback()
 
 
-if  __name__ == "__main__":
+if __name__ == "__main__":
     unittest.main()
diff --git a/client/bin/input/input_device.py b/client/bin/input/input_device.py
index dc7eccd..02543d3 100755
--- a/client/bin/input/input_device.py
+++ b/client/bin/input/input_device.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -25,10 +25,15 @@
 import time
 
 from collections import OrderedDict
-
-from linux_input import *
 from six.moves import range
 
+# Try to import from the autotest_lib structure. If it fails try the default.
+# If this script was run outside of autotest the "except" would be the flow.
+# If run within, the "try" is the flow.
+try:
+    from autotest_lib.client.bin.input.linux_input import *
+except ImportError:
+    from linux_input import *
 
 # The regular expression of possible keyboard types.
 KEYBOARD_TYPES = '(keyboard|chromeos-ec-i2c|cros-ec-spi|cros-ec-i2c|cros_ec)'
@@ -411,7 +416,7 @@
         elif code == ABS_PRESSURE:
             return ABS_MT_PRESSURE
         elif code == ABS_TOOL_WIDTH:
-            return ABS_TOUCH_MAJOR
+            return ABS_MT_TOUCH_MAJOR
         else:
             return code
 
diff --git a/client/bin/input/input_event_player.py b/client/bin/input/input_event_player.py
index 1b39dca..a688910 100755
--- a/client/bin/input/input_event_player.py
+++ b/client/bin/input/input_event_player.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -20,8 +20,14 @@
 import re
 import time
 
-from input_device import InputDevice, InputEvent
 from optparse import OptionParser
+# Try to import from the autotest_lib structure. If it fails try the default.
+# If this script was run outside of autotest the "except" would be the flow.
+# If run within, the "try" is the flow.
+try:
+    from autotest_lib.client.bin.input.input_device import InputDevice, InputEvent
+except ImportError:
+    from input_device import InputDevice, InputEvent
 
 
 class InputEventPlayer:
diff --git a/client/bin/input/input_event_recorder.py b/client/bin/input/input_event_recorder.py
index 5b4f922..2e3560a 100644
--- a/client/bin/input/input_event_recorder.py
+++ b/client/bin/input/input_event_recorder.py
@@ -15,7 +15,8 @@
 import threading
 import time
 
-from linux_input import EV_MSC, EV_SYN, MSC_SCAN, SYN_REPORT
+from autotest_lib.client.bin.input.linux_input import\
+    EV_MSC, EV_SYN, MSC_SCAN, SYN_REPORT
 
 
 # Define extra misc events below as they are not defined in linux_input.
@@ -236,14 +237,16 @@
         """Record input events."""
         logging.info('Recording input events of %s.', self.device_node)
         cmd = 'evtest %s' % self.device_node
-        recorder = subprocess.Popen(cmd, stdout=subprocess.PIPE,
-                                          shell=True)
+        recorder = subprocess.Popen(cmd,
+                                    bufsize=0,
+                                    stdout=subprocess.PIPE,
+                                    shell=True)
         with open(self.tmp_file, 'w') as output_f:
             while True:
                 read_list, _, _ = select.select(
                         [recorder.stdout], [], [], 1)
                 if read_list:
-                    line = recorder.stdout.readline()
+                    line = recorder.stdout.readline().decode()
                     output_f.write(line)
                     ev = Event.from_string(line)
                     if ev:
diff --git a/client/bin/input/linux_input.py b/client/bin/input/linux_input.py
index 2cf3f51..2dd876d 100644
--- a/client/bin/input/linux_input.py
+++ b/client/bin/input/linux_input.py
@@ -12,7 +12,14 @@
 from __future__ import print_function
 from six.moves import range
 
-from linux_ioctl import *
+# Try to import from the autotest_lib structure. If it fails try the default.
+# If this script was run outside of autotest the "except" would be the flow.
+# If run within, the "try" is the flow.
+try:
+    from autotest_lib.client.bin.input.linux_ioctl import *
+except ImportError:
+    from linux_ioctl import *
+
 # The event structure itself
 #   struct input_event {
 #       struct timeval time;
diff --git a/client/bin/intel_pci_ids.json b/client/bin/intel_pci_ids.json
index 17efeff..2590c2a 100644
--- a/client/bin/intel_pci_ids.json
+++ b/client/bin/intel_pci_ids.json
@@ -152,6 +152,36 @@
     "0x3ea7": "coffeelake",
     "0x3ea8": "coffeelake",
     "0x3ea9": "coffeelake",
+    "0x4626": "alderlake",
+    "0x4628": "alderlake",
+    "0x462a": "alderlake",
+    "0x4680": "alderlake",
+    "0x4681": "alderlake",
+    "0x4682": "alderlake",
+    "0x4683": "alderlake",
+    "0x4688": "alderlake",
+    "0x4689": "alderlake",
+    "0x4690": "alderlake",
+    "0x4691": "alderlake",
+    "0x4692": "alderlake",
+    "0x4693": "alderlake",
+    "0x4698": "alderlake",
+    "0x4699": "alderlake",
+    "0x46a0": "alderlake",
+    "0x46a1": "alderlake",
+    "0x46a2": "alderlake",
+    "0x46a3": "alderlake",
+    "0x46a6": "alderlake",
+    "0x46a8": "alderlake",
+    "0x46aa": "alderlake",
+    "0x46b0": "alderlake",
+    "0x46b1": "alderlake",
+    "0x46b2": "alderlake",
+    "0x46b3": "alderlake",
+    "0x46c0": "alderlake",
+    "0x46c1": "alderlake",
+    "0x46c2": "alderlake",
+    "0x46c3": "alderlake",
     "0x4e51": "jasperlake",
     "0x4e55": "jasperlake",
     "0x4e57": "jasperlake",
@@ -178,18 +208,6 @@
     "0x5926": "kabylake",
     "0x5927": "kabylake",
     "0x593b": "kabylake",
-    "0x5a41": "cannonlake",
-    "0x5a42": "cannonlake",
-    "0x5a44": "cannonlake",
-    "0x5a49": "cannonlake",
-    "0x5a4a": "cannonlake",
-    "0x5a50": "cannonlake",
-    "0x5a51": "cannonlake",
-    "0x5a52": "cannonlake",
-    "0x5a54": "cannonlake",
-    "0x5a59": "cannonlake",
-    "0x5a5a": "cannonlake",
-    "0x5a5c": "cannonlake",
     "0x5a84": "broxton",
     "0x5a85": "broxton",
     "0x87c0": "kabylake",
diff --git a/client/bin/job_unittest.py b/client/bin/job_unittest.py
index a51247a..52a694b 100755
--- a/client/bin/job_unittest.py
+++ b/client/bin/job_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # pylint: disable=missing-docstring
 
 import logging
@@ -60,9 +60,14 @@
 class abstract_test_init(base_job_unittest.test_init.generic_tests):
     """Generic client job mixin used when defining variations on the
     job.__init__ generic tests."""
+
+    PUBLIC_ATTRIBUTES = (
+            base_job_unittest.test_init.generic_tests.PUBLIC_ATTRIBUTES -
+            set(['force_full_log_collection']))
+
     OPTIONAL_ATTRIBUTES = (
-        base_job_unittest.test_init.generic_tests.OPTIONAL_ATTRIBUTES
-        - set(['control', 'harness']))
+            base_job_unittest.test_init.generic_tests.OPTIONAL_ATTRIBUTES -
+            set(['control', 'harness', 'force_full_log_collection']))
 
 
 class test_init_minimal_options(abstract_test_init, job_test_case):
@@ -101,6 +106,7 @@
             log = False
             args = ''
             output_dir = ''
+
         self.god.stub_function_to_return(job.utils, 'drop_caches', None)
 
         self.job._job_state = base_job_unittest.stub_job_state
@@ -320,7 +326,7 @@
         # record
         which = "which"
         harness_args = ''
-        harness.select.expect_call(which, self.job, 
+        harness.select.expect_call(which, self.job,
                                    harness_args).and_return(None)
 
         # run and test
@@ -491,10 +497,24 @@
         self._setup_check_post_reboot(mount_info, None)
 
         self.god.stub_function(self.job, "_record_reboot_failure")
-        self.job._record_reboot_failure.expect_call("sub",
-                "reboot.verify_config", "mounted partitions are different after"
-                " reboot (old entries: set([]), new entries: set([('/dev/hdb1',"
-                " '/mnt/hdb1')]))", running_id=None)
+
+        if six.PY2:
+            self.job._record_reboot_failure.expect_call(
+                    "sub",
+                    "reboot.verify_config",
+                    "mounted partitions are different after"
+                    " reboot (old entries: set([]), new entries: set([('/dev/hdb1',"
+                    " '/mnt/hdb1')]))",
+                    running_id=None)
+        else:
+            # Py3 string formatting of sets is a bit different...
+            self.job._record_reboot_failure.expect_call(
+                    "sub",
+                    "reboot.verify_config",
+                    "mounted partitions are different after"
+                    " reboot (old entries: set(), new entries: {('/dev/hdb1',"
+                    " '/mnt/hdb1')})",
+                    running_id=None)
 
         # playback
         self.assertRaises(error.JobError, self.job._check_post_reboot, "sub")
diff --git a/client/bin/kernel_versions_unittest.py b/client/bin/kernel_versions_unittest.py
index 4617bd0..8fafbe3 100755
--- a/client/bin/kernel_versions_unittest.py
+++ b/client/bin/kernel_versions_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 from __future__ import absolute_import
 from __future__ import division
 from __future__ import print_function
diff --git a/client/bin/local_host_unittest.py b/client/bin/local_host_unittest.py
index db578c7..4f67370 100755
--- a/client/bin/local_host_unittest.py
+++ b/client/bin/local_host_unittest.py
@@ -1,8 +1,9 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
-import mock
 import os
+import six
 import unittest
+from unittest import mock
 
 import common
 from autotest_lib.client.common_lib import autotemp
@@ -150,10 +151,14 @@
         # create some files in tmpdir
         open(files[0], 'w').close()
         open(files[1], 'w').close()
-
-        self.assertItemsEqual(
-                files,
-                host.list_files_glob(os.path.join(self.tmpdir.name, '*')))
+        if six.PY2:
+            self.assertItemsEqual(
+                    files,
+                    host.list_files_glob(os.path.join(self.tmpdir.name, '*')))
+        else:
+            self.assertCountEqual(
+                    files,
+                    host.list_files_glob(os.path.join(self.tmpdir.name, '*')))
 
 
     def test_symlink_closure_does_not_add_existent_file(self):
@@ -167,9 +172,12 @@
 
         # test that when the symlinks point to already know files
         # nothing is added
-        self.assertItemsEqual(
-                [fname, sname],
-                host.symlink_closure([fname, sname]))
+        if six.PY2:
+            self.assertItemsEqual([fname, sname],
+                                  host.symlink_closure([fname, sname]))
+        else:
+            self.assertCountEqual([fname, sname],
+                                  host.symlink_closure([fname, sname]))
 
 
     def test_symlink_closure_adds_missing_files(self):
@@ -182,9 +190,12 @@
         os.symlink(fname, sname)
 
         # test that when the symlinks point to unknown files they are added
-        self.assertItemsEqual(
-                [fname, sname],
-                host.symlink_closure([sname]))
+        if six.PY2:
+            self.assertItemsEqual([fname, sname],
+                                  host.symlink_closure([sname]))
+        else:
+            self.assertCountEqual([fname, sname],
+                                  host.symlink_closure([sname]))
 
 
     def test_get_file(self):
diff --git a/client/bin/package_unittest.py b/client/bin/package_unittest.py
index 6da4836..2c9db32 100755
--- a/client/bin/package_unittest.py
+++ b/client/bin/package_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 
 import unittest, os
diff --git a/client/bin/partition_unittest.py b/client/bin/partition_unittest.py
index 76c0875..e4a9459 100755
--- a/client/bin/partition_unittest.py
+++ b/client/bin/partition_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 """Tests for autotest_lib.client.bin.partition."""
 
@@ -10,6 +10,7 @@
 
 import os, sys, unittest
 from six import StringIO
+from six.moves import reload_module as reload
 import common
 from autotest_lib.client.common_lib.test_utils import mock
 from autotest_lib.client.bin import partition
diff --git a/client/bin/result_tools/dedupe_file_throttler.py b/client/bin/result_tools/dedupe_file_throttler.py
index 48b740e..d6d8134 100644
--- a/client/bin/result_tools/dedupe_file_throttler.py
+++ b/client/bin/result_tools/dedupe_file_throttler.py
@@ -12,8 +12,8 @@
 import re
 
 try:
+    from autotest_lib.client.bin.result_tools import delete_file_throttler
     from autotest_lib.client.bin.result_tools import result_info_lib
-
     from autotest_lib.client.bin.result_tools import throttler_lib
     from autotest_lib.client.bin.result_tools import utils_lib
 except ImportError:
@@ -88,9 +88,13 @@
     @param max_result_size_KB: Maximum test result size in KB.
     """
     _, grouped_files = throttler_lib.sort_result_files(summary)
+    # Respect the non-delete patterns in the delete throttler.
+    keep_patterns = (NO_DEDUPE_FILE_PATTERNS +
+                     delete_file_throttler.NON_DELETABLE_FILE_PATH_PATTERNS)
     for pattern in throttler_lib.RESULT_THROTTLE_PRIORITY:
-        throttable_files = list(throttler_lib.get_throttleable_files(
-                grouped_files[pattern], NO_DEDUPE_FILE_PATTERNS))
+        throttable_files = list(
+                throttler_lib.get_throttleable_files(grouped_files[pattern],
+                                                     keep_patterns))
 
         for info in throttable_files:
             info.parent_dir = os.path.dirname(info.path)
diff --git a/client/bin/result_tools/delete_file_throttler.py b/client/bin/result_tools/delete_file_throttler.py
index 69bc10d..fd17ab2 100644
--- a/client/bin/result_tools/delete_file_throttler.py
+++ b/client/bin/result_tools/delete_file_throttler.py
@@ -18,9 +18,18 @@
 DEFAULT_FILE_SIZE_THRESHOLD_BYTE = 1024 * 1024
 
 # Regex for file path that should not be deleted.
+# Note; it is OK to try to compress these.
 NON_DELETABLE_FILE_PATH_PATTERNS = [
-        '.*perf.data$',       # Performance test data.
-        ]
+        '.*perf.data$',  # Performance test data.
+        '.*DEBUG.*',  # Any autotest debug file needs to be preserved
+        '.*full.*',  # tast logs...
+        '.*net.log',  # net logs.
+        '.*android*',  # several android debugging logs are important.
+        '.*screenshot*',  # screenshots are needed for debugging.
+        '.*logcat*',
+        '.*/faillog/*',  # full tast fail logs.
+]
+
 
 def _delete_file(file_info):
     """Delete the given file and update the summary.
diff --git a/client/bin/result_tools/result_info.py b/client/bin/result_tools/result_info.py
index 312d3b6..de75afb 100644
--- a/client/bin/result_tools/result_info.py
+++ b/client/bin/result_tools/result_info.py
@@ -143,7 +143,6 @@
         @param parent_dir: Path to the parent directory.
         @param name: Name of the result file or directory.
         """
-        assert name != None
         self._name = name
 
         # Dictionary to store details of the given path is set to a keyval of
@@ -440,6 +439,8 @@
         @param original_info: A dictionary of the file's size and sub-directory
                 information.
         """
+        if utils_lib.DIRS not in self.details:
+            self.details[utils_lib.DIRS] = []
         self.details[utils_lib.DIRS].append(
                 ResultInfo(parent_dir=self._path,
                            name=name,
diff --git a/client/bin/result_tools/result_info_unittest.py b/client/bin/result_tools/result_info_unittest.py
index 5ebb610..411f959 100644
--- a/client/bin/result_tools/result_info_unittest.py
+++ b/client/bin/result_tools/result_info_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/bin/result_tools/runner.py b/client/bin/result_tools/runner.py
index d8802cb..714a3f3 100644
--- a/client/bin/result_tools/runner.py
+++ b/client/bin/result_tools/runner.py
@@ -19,7 +19,7 @@
 from autotest_lib.client.common_lib import utils as client_utils
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = client_utils.metrics_mock
 
@@ -52,18 +52,12 @@
             'chromeos/autotest/job/send_result_tools_duration',
             fields={'dut_host_name': host.hostname}) as fields:
         try:
-            result = host.run('test -f %s' %
-                      (_SUMMARY_CMD % DEFAULT_AUTOTEST_DIR),
-                   timeout=_FIND_DIR_SUMMARY_TIMEOUT,
-                   ignore_status=True)
-            if result.exit_status == 0:
-                logging.debug('result tools are already deployed to %s.',
-                        host.hostname)
-            else:
-                logging.debug('Deploy result utilities to %s', host.hostname)
-                result_tools_dir = os.path.dirname(__file__)
-                host.send_file(result_tools_dir, DEFAULT_AUTOTEST_DIR,
-                               excludes = _EXCLUDES)
+            logging.debug('Always Deploying result utilities to %s',
+                          host.hostname)
+            result_tools_dir = os.path.dirname(__file__)
+            host.send_file(result_tools_dir,
+                           DEFAULT_AUTOTEST_DIR,
+                           excludes=_EXCLUDES)
             fields['success'] = True
         except error.AutotestHostRunError:
             logging.debug('Failed to deploy result tools using `excludes`. Try '
@@ -106,7 +100,7 @@
                                            host.job.max_result_size_KB)
                     except AttributeError:
                         # In case host job is not set, skip throttling.
-                        logging.warn('host object does not have job attribute, '
+                        logging.warning('host object does not have job attribute, '
                                      'skipping result throttling.')
                 cmd = (_BUILD_DIR_SUMMARY_CMD %
                        (DEFAULT_AUTOTEST_DIR, client_results_dir,
diff --git a/client/bin/result_tools/shrink_file_throttler.py b/client/bin/result_tools/shrink_file_throttler.py
deleted file mode 100644
index e610845..0000000
--- a/client/bin/result_tools/shrink_file_throttler.py
+++ /dev/null
@@ -1,176 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import re
-
-try:
-    from autotest_lib.client.bin.result_tools import throttler_lib
-    from autotest_lib.client.bin.result_tools import utils_lib
-except ImportError:
-    import throttler_lib
-    import utils_lib
-
-
-# File extensions that can be safely shrunk.
-# Extension matching is case-insensitive but the items in this set must be
-# lowercase to match.
-# Files without an extension and with no alphabetic characters in the extension
-# (e.g. file.20201110) are always shrinkable.
-SHRINKABLE_EXTENSIONS = frozenset([
-        '.log',
-        '.txt',
-        '.debug',
-        '.error',
-        '.info',
-        '.warning',
-])
-
-# Regex for paths that should not be shrunk.
-UNSHRINKABLE_PATH_PATTERNS = [
-        # Files in a log_diff/ directory should already be relatively small,
-        # and trimming them further would be detrimental to debugging. If
-        # they're too large, let other throttlers (e.g., zip_file_ or
-        # delete_file_) deal with them.
-        # Only blocklist a few known-useful log_diff's.
-        '/log_diff/messages$',
-        '/log_diff/net\.log$',
-        # Ramoops files are small but relatively important.
-        # The name of this file has changed starting with linux-3.19.
-        # Use a glob to match all existing records.
-        '/console-ramoops.*',
-        ]
-
-TRIMMED_FILE_HEADER = '!!! This file is trimmed !!!\n'
-ORIGINAL_SIZE_TEMPLATE = 'Original size: %d bytes\n\n'
-# Regex pattern to retrieve the original size of the file.
-ORIGINAL_SIZE_REGEX = 'Original size: (\d+) bytes'
-TRIMMED_FILE_INJECT_TEMPLATE = """
-
-========================================================================
-  < %d > characters are trimmed here.
-========================================================================
-
-"""
-
-# Percent of file content to keep at the beginning and end of the file, default
-# to 20%.
-HEAD_SIZE_PERCENT = 0.20
-
-# Default size in byte to trim the file down to.
-DEFAULT_FILE_SIZE_LIMIT_BYTE = 100 * 1024
-
-def _trim_file(file_info, file_size_limit_byte):
-    """Remove the file content in the middle to reduce the file size.
-
-    @param file_info: A ResultInfo object containing summary for the file to be
-            shrunk.
-    @param file_size_limit_byte: Maximum file size in bytes after trimming.
-    """
-    utils_lib.LOG('Trimming file %s to reduce size from %d bytes to %d bytes' %
-                  (file_info.path, file_info.original_size,
-                   file_size_limit_byte))
-    new_path = os.path.join(os.path.dirname(file_info.path),
-                            file_info.name + '_trimmed')
-    original_size_bytes = file_info.original_size
-    with open(new_path, 'w') as new_file, open(file_info.path) as old_file:
-        # Read the beginning part of the old file, if it's already started with
-        # TRIMMED_FILE_HEADER, no need to add the header again.
-        header =  old_file.read(len(TRIMMED_FILE_HEADER))
-        if header != TRIMMED_FILE_HEADER:
-            new_file.write(TRIMMED_FILE_HEADER)
-            new_file.write(ORIGINAL_SIZE_TEMPLATE % file_info.original_size)
-        else:
-            line = old_file.readline()
-            match = re.match(ORIGINAL_SIZE_REGEX, line)
-            if match:
-                original_size_bytes = int(match.group(1))
-        header_size_bytes = new_file.tell()
-        # Move old file reader to the beginning of the file.
-        old_file.seek(0, os.SEEK_SET)
-
-        new_file.write(old_file.read(
-                int((file_size_limit_byte - header_size_bytes) *
-                    HEAD_SIZE_PERCENT)))
-        # Position to seek from the end of the file.
-        seek_pos = -(file_size_limit_byte - new_file.tell() -
-                     len(TRIMMED_FILE_INJECT_TEMPLATE))
-        bytes_to_skip = original_size_bytes + seek_pos - old_file.tell()
-        # Adjust seek position based on string TRIMMED_FILE_INJECT_TEMPLATE
-        seek_pos += len(str(bytes_to_skip)) - 2
-        bytes_to_skip = original_size_bytes + seek_pos - old_file.tell()
-        new_file.write(TRIMMED_FILE_INJECT_TEMPLATE % bytes_to_skip)
-        old_file.seek(seek_pos, os.SEEK_END)
-        new_file.write(old_file.read())
-    stat = os.stat(file_info.path)
-    if not throttler_lib.try_delete_file_on_disk(file_info.path):
-        # Clean up the intermediate file.
-        throttler_lib.try_delete_file_on_disk(new_path)
-        utils_lib.LOG('Failed to shrink %s' % file_info.path)
-        return
-
-    os.rename(new_path, file_info.path)
-    # Modify the new file's timestamp to the old one.
-    os.utime(file_info.path, (stat.st_atime, stat.st_mtime))
-    # Update the trimmed_size.
-    file_info.trimmed_size = file_info.size
-
-
-def _get_shrinkable_files(file_infos, file_size_limit_byte):
-    """Filter the files that can be throttled.
-
-    @param file_infos: A list of ResultInfo objects.
-    @param file_size_limit_byte: Minimum file size in bytes to be throttled.
-    @yield: ResultInfo objects that can be shrunk.
-    """
-    for info in file_infos:
-        ext = os.path.splitext(info.name)[1].lower()
-        # if ext contains alphabetic characters and is not in the allowlist,
-        # skip the file.
-        # islower() returns false if the string does not contain any alphabetic
-        # characters, e.g. '.20201110'.islower() is False.
-        if ext.islower() and ext not in SHRINKABLE_EXTENSIONS:
-            continue
-
-        match_found = False
-        for pattern in UNSHRINKABLE_PATH_PATTERNS:
-            if re.search(pattern, info.path):
-                match_found = True
-                break
-        if match_found:
-            continue
-
-        if info.trimmed_size <= file_size_limit_byte:
-            continue
-
-        yield info
-
-
-def throttle(summary, max_result_size_KB,
-             file_size_limit_byte=DEFAULT_FILE_SIZE_LIMIT_BYTE,
-             skip_autotest_log=False):
-    """Throttle the files in summary by trimming file content.
-
-    Stop throttling until all files are processed or the result file size is
-    already reduced to be under the given max_result_size_KB.
-
-    @param summary: A ResultInfo object containing result summary.
-    @param max_result_size_KB: Maximum test result size in KB.
-    @param file_size_limit_byte: Limit each file's size in the summary to be
-            under the given threshold, until all files are processed or the
-            result size is under the given max_result_size_KB.
-    @param skip_autotest_log: True to skip shrink Autotest logs, default is
-            False.
-    """
-    file_infos, _ = throttler_lib.sort_result_files(summary)
-    extra_patterns = ([throttler_lib.AUTOTEST_LOG_PATTERN] if skip_autotest_log
-                      else [])
-    file_infos = throttler_lib.get_throttleable_files(
-            file_infos, extra_patterns)
-    file_infos = _get_shrinkable_files(file_infos, file_size_limit_byte)
-    for info in file_infos:
-        _trim_file(info, file_size_limit_byte)
-
-        if throttler_lib.check_throttle_limit(summary, max_result_size_KB):
-            return
diff --git a/client/bin/result_tools/shrink_file_throttler_unittest.py b/client/bin/result_tools/shrink_file_throttler_unittest.py
deleted file mode 100644
index fefd496..0000000
--- a/client/bin/result_tools/shrink_file_throttler_unittest.py
+++ /dev/null
@@ -1,154 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import shutil
-import tempfile
-import unittest
-
-import common
-from autotest_lib.client.bin.result_tools import result_info
-from autotest_lib.client.bin.result_tools import shrink_file_throttler
-from autotest_lib.client.bin.result_tools import unittest_lib
-from autotest_lib.client.bin.result_tools import utils_lib
-
-
-ORIGINAL_SIZE_BYTE = 1000
-MAX_RESULT_SIZE_KB = 2
-FILE_SIZE_LIMIT_BYTE = 512
-LARGE_SIZE_BYTE = 100 * 1024
-
-SUMMARY_AFTER_TRIMMING = {
-    '': {utils_lib.DIRS: [
-            {'BUILD_INFO-HT7591A00171': {
-                    utils_lib.ORIGINAL_SIZE_BYTES: ORIGINAL_SIZE_BYTE}},
-            {'file1.xml': {utils_lib.ORIGINAL_SIZE_BYTES: ORIGINAL_SIZE_BYTE}},
-            {'file2.jpg': {utils_lib.ORIGINAL_SIZE_BYTES: ORIGINAL_SIZE_BYTE}},
-            {'file3.log': {utils_lib.ORIGINAL_SIZE_BYTES: ORIGINAL_SIZE_BYTE,
-                           utils_lib.TRIMMED_SIZE_BYTES: FILE_SIZE_LIMIT_BYTE}},
-            {'folder1': {
-                utils_lib.DIRS: [
-                    {'file4': {
-                        utils_lib.ORIGINAL_SIZE_BYTES: ORIGINAL_SIZE_BYTE,
-                        utils_lib.TRIMMED_SIZE_BYTES: FILE_SIZE_LIMIT_BYTE}}],
-                utils_lib.ORIGINAL_SIZE_BYTES: ORIGINAL_SIZE_BYTE,
-                utils_lib.TRIMMED_SIZE_BYTES: FILE_SIZE_LIMIT_BYTE}},
-            {'test_run_details.txt': {
-                    utils_lib.ORIGINAL_SIZE_BYTES: ORIGINAL_SIZE_BYTE}}],
-         utils_lib.ORIGINAL_SIZE_BYTES: 6 * ORIGINAL_SIZE_BYTE,
-         utils_lib.TRIMMED_SIZE_BYTES: (
-                 4 * ORIGINAL_SIZE_BYTE + 2 * FILE_SIZE_LIMIT_BYTE)}
-    }
-
-OLD_TIME = 1498800000
-
-class ShrinkFileThrottleTest(unittest.TestCase):
-    """Test class for shrink_file_throttler.throttle method."""
-
-    def setUp(self):
-        """Setup directory for test."""
-        self.test_dir = tempfile.mkdtemp()
-        self.files_not_shrink = []
-        self.files_to_shrink = []
-
-        build_info = os.path.join(self.test_dir, 'BUILD_INFO-HT7591A00171')
-        unittest_lib.create_file(build_info, ORIGINAL_SIZE_BYTE)
-        self.files_not_shrink.append(build_info)
-
-        file1 = os.path.join(self.test_dir, 'file1.xml')
-        unittest_lib.create_file(file1, ORIGINAL_SIZE_BYTE)
-        self.files_not_shrink.append(file1)
-
-        file2 = os.path.join(self.test_dir, 'file2.jpg')
-        unittest_lib.create_file(file2, ORIGINAL_SIZE_BYTE)
-        self.files_not_shrink.append(file2)
-
-        file3 = os.path.join(self.test_dir, 'file3.log')
-        unittest_lib.create_file(file3, ORIGINAL_SIZE_BYTE)
-        self.files_to_shrink.append(file3)
-        os.utime(file3, (OLD_TIME, OLD_TIME))
-
-        file4 = os.path.join(self.test_dir, 'test_run_details.txt')
-        unittest_lib.create_file(file4, ORIGINAL_SIZE_BYTE)
-        self.files_not_shrink.append(file4)
-
-        folder1 = os.path.join(self.test_dir, 'folder1')
-        os.mkdir(folder1)
-        file4 = os.path.join(folder1, 'file4')
-        unittest_lib.create_file(file4, ORIGINAL_SIZE_BYTE)
-        self.files_to_shrink.append(file4)
-        os.utime(file4, (OLD_TIME, OLD_TIME))
-
-    def tearDown(self):
-        """Cleanup the test directory."""
-        shutil.rmtree(self.test_dir, ignore_errors=True)
-
-    def testTrim(self):
-        """Test throttle method."""
-        summary = result_info.ResultInfo.build_from_path(self.test_dir)
-        shrink_file_throttler.throttle(
-                summary,
-                max_result_size_KB=MAX_RESULT_SIZE_KB,
-                file_size_limit_byte=FILE_SIZE_LIMIT_BYTE)
-
-        self.assertEqual(SUMMARY_AFTER_TRIMMING, summary)
-
-        # Verify files that should not be shrunk are not changed.
-        for f in self.files_not_shrink:
-            self.assertEqual(ORIGINAL_SIZE_BYTE, os.stat(f).st_size,
-                             'File %s should not be shrank!' % f)
-
-        # Verify files that should be shrunk are updated.
-        for f in self.files_to_shrink:
-            stat = os.stat(f)
-            self.assertTrue(FILE_SIZE_LIMIT_BYTE >= stat.st_size,
-                            'File %s is not shrank!' % f)
-            self.assertEqual(OLD_TIME, stat.st_mtime)
-
-
-class MultipleShrinkFileTest(unittest.TestCase):
-    """Test class for shrink_file_throttler.throttle method for files to be
-    shrunk multiple times.
-    """
-
-    def setUp(self):
-        """Setup directory for test."""
-        self.test_dir = tempfile.mkdtemp()
-
-        self.file_to_shrink = os.path.join(self.test_dir, 'file1.txt')
-        unittest_lib.create_file(self.file_to_shrink, LARGE_SIZE_BYTE)
-
-    def tearDown(self):
-        """Cleanup the test directory."""
-        shutil.rmtree(self.test_dir, ignore_errors=True)
-
-    def testTrim(self):
-        """Shrink the file twice and check its content."""
-        summary = result_info.ResultInfo.build_from_path(
-                parent_dir=self.test_dir, name=utils_lib.ROOT_DIR,
-                top_dir=self.test_dir, parent_result_info=None)
-        shrink_file_throttler.throttle(
-                summary, max_result_size_KB=60,
-                file_size_limit_byte=50 * 1024)
-        summary = result_info.ResultInfo.build_from_path(self.test_dir)
-        shrink_file_throttler.throttle(
-                summary, max_result_size_KB=30,
-                file_size_limit_byte=25 * 1024)
-
-        with open(self.file_to_shrink) as f:
-            content = f.read()
-
-        original_size_string = (shrink_file_throttler.ORIGINAL_SIZE_TEMPLATE %
-                                LARGE_SIZE_BYTE)
-        self.assertTrue(original_size_string in content)
-
-        trimmed_size_string = (
-                shrink_file_throttler.TRIMMED_FILE_INJECT_TEMPLATE % 76990)
-        self.assertTrue(trimmed_size_string in content)
-
-
-# this is so the test can be run in standalone mode
-if __name__ == '__main__':
-    """Main"""
-    unittest.main()
diff --git a/client/bin/result_tools/throttler_lib.py b/client/bin/result_tools/throttler_lib.py
index 55ac60b..abb9c22 100644
--- a/client/bin/result_tools/throttler_lib.py
+++ b/client/bin/result_tools/throttler_lib.py
@@ -25,6 +25,7 @@
         'host_keyvals',
         'job_report.html',
         'keyval',
+        'messages',
         'profiling',
         'result_summary.html',
         'sponge_invocation.xml',
@@ -36,7 +37,7 @@
         'test_run_error.txt',
         'test_run_info.txt',
         'test_run_summary.json',
-        ])
+])
 
 # A list of file name patterns that should not be throttled, that is, not
 # modified by deletion, deduping, trimming or compression.
@@ -159,4 +160,4 @@
         os.remove(path)
         return True
     except OSError as e:
-        utils_lib.LOG('Failed to delete file %s, Error: %s' % (path, e))
\ No newline at end of file
+        utils_lib.LOG('Failed to delete file %s, Error: %s' % (path, e))
diff --git a/client/bin/result_tools/utils.py b/client/bin/result_tools/utils.py
index c40af66..e2a1ad1 100755
--- a/client/bin/result_tools/utils.py
+++ b/client/bin/result_tools/utils.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -47,7 +47,6 @@
     from autotest_lib.client.bin.result_tools import dedupe_file_throttler
     from autotest_lib.client.bin.result_tools import delete_file_throttler
     from autotest_lib.client.bin.result_tools import result_info
-    from autotest_lib.client.bin.result_tools import shrink_file_throttler
     from autotest_lib.client.bin.result_tools import throttler_lib
     from autotest_lib.client.bin.result_tools import utils_lib
     from autotest_lib.client.bin.result_tools import zip_file_throttler
@@ -55,7 +54,6 @@
     import dedupe_file_throttler
     import delete_file_throttler
     import result_info
-    import shrink_file_throttler
     import throttler_lib
     import utils_lib
     import zip_file_throttler
@@ -277,9 +275,7 @@
     args_skip_autotest_log['skip_autotest_log'] = True
     # Apply the throttlers in following order.
     throttlers = [
-            (shrink_file_throttler, copy.copy(args_skip_autotest_log)),
             (zip_file_throttler, copy.copy(args_skip_autotest_log)),
-            (shrink_file_throttler, copy.copy(args)),
             (dedupe_file_throttler, copy.copy(args)),
             (zip_file_throttler, copy.copy(args)),
             ]
@@ -369,6 +365,8 @@
     @param max_size_KB: Maximum result size in KB.
     """
     utils_lib.LOG('Running result_tools/utils on path: %s' % path)
+    utils_lib.LOG('Running result_tools/utils in pyversion %s ' % sys.version)
+
     if max_size_KB > 0:
         utils_lib.LOG('Throttle result size to : %s' %
                       utils_lib.get_size_string(max_size_KB * 1024))
diff --git a/client/bin/result_tools/utils_lib_unittest.py b/client/bin/result_tools/utils_lib_unittest.py
index 9c1fc9f..c7a67ed 100644
--- a/client/bin/result_tools/utils_lib_unittest.py
+++ b/client/bin/result_tools/utils_lib_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/bin/result_tools/utils_unittest.py b/client/bin/result_tools/utils_unittest.py
index 9dc2ae0..5108f0e 100644
--- a/client/bin/result_tools/utils_unittest.py
+++ b/client/bin/result_tools/utils_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -19,7 +19,6 @@
 
 import common
 from autotest_lib.client.bin.result_tools import result_info
-from autotest_lib.client.bin.result_tools import shrink_file_throttler
 from autotest_lib.client.bin.result_tools import throttler_lib
 from autotest_lib.client.bin.result_tools import utils as result_utils
 from autotest_lib.client.bin.result_tools import utils_lib
@@ -294,7 +293,31 @@
         collected_bytes, merged_summary, files = result_utils.merge_summaries(
                 self.test_dir)
 
-        self.assertEqual(EXPECTED_MERGED_SUMMARY, merged_summary)
+        # In python3, the dict --> list conversion isn't guaranteed to be in.
+        # this basically drills down to the lowest level values and verifies
+        # each.
+        def _checker(real, expected):
+            if not isinstance(real, list) and not isinstance(real, dict):
+                self.assertEqual(real, expected)
+                return
+
+            if isinstance(real, list):
+                self.assertEqual(type(expected), list)
+                for item in real:
+                    _search_for_item(item, expected)
+                return
+
+            for k, v in real.items():
+                assert(k in expected)
+                _checker(real[k], expected[k])
+
+        def _search_for_item(item, other):
+            for oth in other:
+                if item.keys() == oth.keys():
+                    self.assertEqual(item, oth)
+                    _checker(item, oth)
+
+        _checker(merged_summary, EXPECTED_MERGED_SUMMARY)
         self.assertEqual(collected_bytes, 12 * SIZE)
         self.assertEqual(len(files), 3)
 
@@ -322,121 +345,162 @@
 
 # Not throttled.
 EXPECTED_THROTTLED_SUMMARY_NO_THROTTLE = {
-  '': {utils_lib.ORIGINAL_SIZE_BYTES: 3 * LARGE_SIZE + 5 * SMALL_SIZE,
-       utils_lib.DIRS: [
-           {'files_to_dedupe': {
-               utils_lib.ORIGINAL_SIZE_BYTES: 5 * SMALL_SIZE,
-               utils_lib.DIRS: [
-                   {'file_0.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
-                   {'file_1.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
-                   {'file_2.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
-                   {'file_3.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
-                   {'file_4.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
+        '': {
+                utils_lib.ORIGINAL_SIZE_BYTES:
+                2 * LARGE_SIZE + 5 * SMALL_SIZE,
+                utils_lib.DIRS: [
+                        {
+                                'files_to_dedupe': {
+                                        utils_lib.ORIGINAL_SIZE_BYTES:
+                                        5 * SMALL_SIZE,
+                                        utils_lib.DIRS: [
+                                                {
+                                                        'file_0.dmp': {
+                                                                utils_lib.ORIGINAL_SIZE_BYTES:
+                                                                SMALL_SIZE
+                                                        }
+                                                },
+                                                {
+                                                        'file_1.dmp': {
+                                                                utils_lib.ORIGINAL_SIZE_BYTES:
+                                                                SMALL_SIZE
+                                                        }
+                                                },
+                                                {
+                                                        'file_2.dmp': {
+                                                                utils_lib.ORIGINAL_SIZE_BYTES:
+                                                                SMALL_SIZE
+                                                        }
+                                                },
+                                                {
+                                                        'file_3.dmp': {
+                                                                utils_lib.ORIGINAL_SIZE_BYTES:
+                                                                SMALL_SIZE
+                                                        }
+                                                },
+                                                {
+                                                        'file_4.dmp': {
+                                                                utils_lib.ORIGINAL_SIZE_BYTES:
+                                                                SMALL_SIZE
+                                                        }
+                                                },
+                                        ]
+                                }
+                        },
+                        {
+                                'files_to_delete': {
+                                        utils_lib.ORIGINAL_SIZE_BYTES:
+                                        LARGE_SIZE,
+                                        utils_lib.DIRS: [
+                                                {
+                                                        'file.png': {
+                                                                utils_lib.ORIGINAL_SIZE_BYTES:
+                                                                LARGE_SIZE
+                                                        }
+                                                },
+                                        ]
+                                }
+                        },
+                        {
+                                'files_to_zip': {
+                                        utils_lib.ORIGINAL_SIZE_BYTES:
+                                        LARGE_SIZE,
+                                        utils_lib.DIRS: [
+                                                {
+                                                        'file.xml': {
+                                                                utils_lib.ORIGINAL_SIZE_BYTES:
+                                                                LARGE_SIZE
+                                                        }
+                                                },
+                                        ]
+                                }
+                        },
                 ]
-            }},
-           {'files_to_delete': {
-               utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
-               utils_lib.DIRS: [
-                   {'file.png': {utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE}},
-                ]
-            }},
-           {'files_to_shink': {
-               utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
-               utils_lib.DIRS: [
-                   {'file.txt': {utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE}},
-                ]
-            }},
-           {'files_to_zip': {
-               utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
-               utils_lib.DIRS: [
-                   {'file.xml': {utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE}},
-                ]
-            }},
-        ]
-       }
-    }
-
-SHRINK_SIZE = shrink_file_throttler.DEFAULT_FILE_SIZE_LIMIT_BYTE
-EXPECTED_THROTTLED_SUMMARY_WITH_SHRINK = {
-  '': {utils_lib.ORIGINAL_SIZE_BYTES: 3 * LARGE_SIZE + 5 * SMALL_SIZE,
-       utils_lib.TRIMMED_SIZE_BYTES:
-            2 * LARGE_SIZE + 5 * SMALL_SIZE + SHRINK_SIZE,
-       utils_lib.DIRS: [
-           {'files_to_dedupe': {
-               utils_lib.ORIGINAL_SIZE_BYTES: 5 * SMALL_SIZE,
-               utils_lib.DIRS: [
-                   {'file_0.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
-                   {'file_1.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
-                   {'file_2.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
-                   {'file_3.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
-                   {'file_4.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
-                ]
-            }},
-           {'files_to_delete': {
-               utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
-               utils_lib.DIRS: [
-                   {'file.png': {utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE}},
-                ]
-            }},
-           {'files_to_shink': {
-               utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
-               utils_lib.TRIMMED_SIZE_BYTES: SHRINK_SIZE,
-               utils_lib.DIRS: [
-                   {'file.txt': {utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
-                                 utils_lib.TRIMMED_SIZE_BYTES: SHRINK_SIZE}},
-                ]
-            }},
-           {'files_to_zip': {
-               utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
-               utils_lib.DIRS: [
-                   {'file.xml': {utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE}},
-                ]
-            }},
-        ]
-       }
-    }
+        }
+}
 
 EXPECTED_THROTTLED_SUMMARY_WITH_DEDUPE = {
-  '': {utils_lib.ORIGINAL_SIZE_BYTES: 3 * LARGE_SIZE + 5 * SMALL_SIZE,
-       utils_lib.TRIMMED_SIZE_BYTES:
-            2 * LARGE_SIZE + 3 * SMALL_SIZE + SHRINK_SIZE,
-       utils_lib.DIRS: [
-           {'files_to_dedupe': {
-               utils_lib.ORIGINAL_SIZE_BYTES: 5 * SMALL_SIZE,
-               utils_lib.TRIMMED_SIZE_BYTES: 3 * SMALL_SIZE,
-               utils_lib.DIRS: [
-                   {'file_0.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
-                   {'file_1.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
-                   {'file_2.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE,
-                                   utils_lib.TRIMMED_SIZE_BYTES: 0}},
-                   {'file_3.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE,
-                                   utils_lib.TRIMMED_SIZE_BYTES: 0}},
-                   {'file_4.dmp': {utils_lib.ORIGINAL_SIZE_BYTES: SMALL_SIZE}},
+        '': {
+                utils_lib.ORIGINAL_SIZE_BYTES:
+                2 * LARGE_SIZE + 5 * SMALL_SIZE,
+                utils_lib.TRIMMED_SIZE_BYTES:
+                2 * LARGE_SIZE + 3 * SMALL_SIZE,
+                utils_lib.DIRS: [
+                        {
+                                'files_to_dedupe': {
+                                        utils_lib.ORIGINAL_SIZE_BYTES:
+                                        5 * SMALL_SIZE,
+                                        utils_lib.TRIMMED_SIZE_BYTES:
+                                        3 * SMALL_SIZE,
+                                        utils_lib.DIRS: [
+                                                {
+                                                        'file_0.dmp': {
+                                                                utils_lib.ORIGINAL_SIZE_BYTES:
+                                                                SMALL_SIZE
+                                                        }
+                                                },
+                                                {
+                                                        'file_1.dmp': {
+                                                                utils_lib.ORIGINAL_SIZE_BYTES:
+                                                                SMALL_SIZE
+                                                        }
+                                                },
+                                                {
+                                                        'file_2.dmp': {
+                                                                utils_lib.ORIGINAL_SIZE_BYTES:
+                                                                SMALL_SIZE,
+                                                                utils_lib.TRIMMED_SIZE_BYTES:
+                                                                0
+                                                        }
+                                                },
+                                                {
+                                                        'file_3.dmp': {
+                                                                utils_lib.ORIGINAL_SIZE_BYTES:
+                                                                SMALL_SIZE,
+                                                                utils_lib.TRIMMED_SIZE_BYTES:
+                                                                0
+                                                        }
+                                                },
+                                                {
+                                                        'file_4.dmp': {
+                                                                utils_lib.ORIGINAL_SIZE_BYTES:
+                                                                SMALL_SIZE
+                                                        }
+                                                },
+                                        ]
+                                }
+                        },
+                        {
+                                'files_to_delete': {
+                                        utils_lib.ORIGINAL_SIZE_BYTES:
+                                        LARGE_SIZE,
+                                        utils_lib.DIRS: [
+                                                {
+                                                        'file.png': {
+                                                                utils_lib.ORIGINAL_SIZE_BYTES:
+                                                                LARGE_SIZE
+                                                        }
+                                                },
+                                        ]
+                                }
+                        },
+                        {
+                                'files_to_zip': {
+                                        utils_lib.ORIGINAL_SIZE_BYTES:
+                                        LARGE_SIZE,
+                                        utils_lib.DIRS: [
+                                                {
+                                                        'file.xml': {
+                                                                utils_lib.ORIGINAL_SIZE_BYTES:
+                                                                LARGE_SIZE
+                                                        }
+                                                },
+                                        ]
+                                }
+                        },
                 ]
-            }},
-           {'files_to_delete': {
-               utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
-               utils_lib.DIRS: [
-                   {'file.png': {utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE}},
-                ]
-            }},
-           {'files_to_shink': {
-               utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
-               utils_lib.TRIMMED_SIZE_BYTES: SHRINK_SIZE,
-               utils_lib.DIRS: [
-                   {'file.txt': {utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
-                                 utils_lib.TRIMMED_SIZE_BYTES: SHRINK_SIZE}},
-                ]
-            }},
-           {'files_to_zip': {
-               utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE,
-               utils_lib.DIRS: [
-                   {'file.xml': {utils_lib.ORIGINAL_SIZE_BYTES: LARGE_SIZE}},
-                ]
-            }},
-        ]
-       }
-    }
+        }
+}
 
 
 class ThrottleTest(unittest.TestCase):
@@ -446,11 +510,6 @@
         """Setup directory to match the file structure in MERGED_SUMMARY."""
         self.test_dir = tempfile.mkdtemp()
 
-        folder = os.path.join(self.test_dir, 'files_to_shink')
-        os.mkdir(folder)
-        file1 = os.path.join(folder, 'file.txt')
-        unittest_lib.create_file(file1, LARGE_SIZE)
-
         folder = os.path.join(self.test_dir, 'files_to_zip')
         os.mkdir(folder)
         file1 = os.path.join(folder, 'file.xml')
@@ -478,9 +537,6 @@
         result_utils._throttle_results(summary, LARGE_SIZE * 10 // 1024)
         self.assertEqual(EXPECTED_THROTTLED_SUMMARY_NO_THROTTLE, summary)
 
-        result_utils._throttle_results(summary, LARGE_SIZE * 3 // 1024)
-        self.assertEqual(EXPECTED_THROTTLED_SUMMARY_WITH_SHRINK, summary)
-
     def testThrottleResults_Dedupe(self):
         """Test _throttle_results method with dedupe triggered."""
         # Change AUTOTEST_LOG_PATTERN to protect file.xml from being compressed
@@ -490,7 +546,7 @@
         try:
             summary = result_info.ResultInfo.build_from_path(self.test_dir)
             result_utils._throttle_results(
-                    summary, (2*LARGE_SIZE + 3*SMALL_SIZE + SHRINK_SIZE) // 1024)
+                    summary, (2 * LARGE_SIZE + 3 * SMALL_SIZE) // 1024)
             self.assertEqual(EXPECTED_THROTTLED_SUMMARY_WITH_DEDUPE, summary)
         finally:
             throttler_lib.AUTOTEST_LOG_PATTERN = old_pattern
@@ -499,35 +555,31 @@
         """Test _throttle_results method with dedupe triggered."""
         summary = result_info.ResultInfo.build_from_path(self.test_dir)
         result_utils._throttle_results(
-                summary, (LARGE_SIZE + 3*SMALL_SIZE + SHRINK_SIZE) // 1024 + 2)
-        self.assertEqual(
-                3 * LARGE_SIZE + 5 * SMALL_SIZE, summary.original_size)
+                summary, (LARGE_SIZE + 3 * SMALL_SIZE) // 1024 + 2)
+        self.assertEqual(2 * LARGE_SIZE + 5 * SMALL_SIZE,
+                         summary.original_size)
 
         entry = summary.get_file('files_to_zip').get_file('file.xml.tgz')
         self.assertEqual(LARGE_SIZE, entry.original_size)
         self.assertTrue(LARGE_SIZE > entry.trimmed_size)
 
         # The compressed file size should be less than 2 KB.
-        self.assertTrue(
-                summary.trimmed_size <
-                (LARGE_SIZE + 3*SMALL_SIZE + SHRINK_SIZE + 2 * 1024))
-        self.assertTrue(
-                summary.trimmed_size >
-                (LARGE_SIZE + 3*SMALL_SIZE + SHRINK_SIZE))
+        self.assertTrue(summary.trimmed_size < (LARGE_SIZE + 3 * SMALL_SIZE +
+                                                2 * 1024))
+        self.assertTrue(summary.trimmed_size > (LARGE_SIZE + 3 * SMALL_SIZE))
 
     def testThrottleResults_Delete(self):
         """Test _throttle_results method with delete triggered."""
         summary = result_info.ResultInfo.build_from_path(self.test_dir)
-        result_utils._throttle_results(
-                summary, (3*SMALL_SIZE + SHRINK_SIZE) // 1024 + 2)
+        result_utils._throttle_results(summary, (3 * SMALL_SIZE) // 1024 + 2)
 
         # Confirm the original size is preserved.
-        self.assertEqual(3 * LARGE_SIZE + 5 * SMALL_SIZE, summary.original_size)
+        self.assertEqual(2 * LARGE_SIZE + 5 * SMALL_SIZE,
+                         summary.original_size)
 
-        # Confirm the deduped, zipped and shrunk files are not deleted.
+        # Confirm the deduped and zipped files are not deleted.
         # The compressed file is at least 512 bytes.
-        self.assertTrue(
-                3 * SMALL_SIZE + SHRINK_SIZE + 512 < summary.original_size)
+        self.assertTrue(3 * SMALL_SIZE + 512 < summary.original_size)
 
         # Confirm the file to be zipped is compressed and not deleted.
         entry = summary.get_file('files_to_zip').get_file('file.xml.tgz')
diff --git a/client/bin/self-test/README b/client/bin/self-test/README
deleted file mode 100644
index eb5c4a7..0000000
--- a/client/bin/self-test/README
+++ /dev/null
@@ -1,9 +0,0 @@
-Function Test Suite
-===================
-To run the function test suite:
-
-	sh test test
-
-To clean up afterwards:
-
-	sh test clean
diff --git a/client/bin/self-test/extract_tarball_to_dir b/client/bin/self-test/extract_tarball_to_dir
deleted file mode 100755
index 918df2f..0000000
--- a/client/bin/self-test/extract_tarball_to_dir
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/python2
-import sys, os, os.path
-
-autodir = os.environ['AUTODIR']
-sys.path.insert(0, autodir + '/bin')
-from autotest_lib.client.bin import utils
-
-os.chdir('/tmp')
-utils.extract_tarball_to_dir(autodir +
-                                      '/tests/bonnie/bonnie++-1.03a.tgz',
-                                      'poo')
diff --git a/client/bin/self-test/test b/client/bin/self-test/test
deleted file mode 100755
index 8b07906..0000000
--- a/client/bin/self-test/test
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/bin/sh
-#
-# test -- run a client self test.
-#
-P="test"
-
-fix=`/bin/pwd`
-fix=`dirname $fix`
-fix=`dirname $fix`
-
-me="../autotest"
-
-# XXX: the exit status that indicates a rerun ...
-rerun=5
-
-function runtests {
-	for i in \
-		"$@"
-	do
-		case "$i" in
-		*-filter|*-out|*-tmp|*.state)	continue ;;
-		esac
-
-		##echo "*** $i ...."
-		{
-			"$me" "$i"
-			rc="$?"
-			echo "--SELFTEST-- exit $rc"
-			while [ "$rc" = "$rerun" ]; do
-				"$me" --continue "$i"
-				rc="$?"
-				echo "--SELFTEST-- exit $rc"
-			done
-		} 2>&1 | `dirname "$i"`/NNN-filter "$i" | \
-			sed -e "s@$fix@SRC@" -e "s@, line [0-9]*@, line N@" \
-			>"$i-tmp" 2>&1
-
-		if [ ! -f "$i-out" ]; then
-			echo "$P: WARNING: $i: no results for test"
-			cat "$i-tmp"
-
-		elif ! cmp "$i-out" "$i-tmp"; then
-			echo "$P: ERROR: $i: test failed"
-			diff -u "$i-out" "$i-tmp"
-
-		else
-			echo "$P: PASS: $i: test passed"
-		fi
-	done
-}
-
-# Run all of the tests.
-case "$1" in
-clean)		rm -rf tests/*-tmp tests/*.state ;;
-test)		runtests tests/* ;;
-*)		runtests "$@" ;;
-esac
diff --git a/client/bin/self-test/test_logfile.stderr b/client/bin/self-test/test_logfile.stderr
deleted file mode 100644
index 5429f1c..0000000
--- a/client/bin/self-test/test_logfile.stderr
+++ /dev/null
@@ -1,2 +0,0 @@
-This should go into the stdout logfile 1
-make: *** No rule to make target `love,'.  Stop.
diff --git a/client/bin/self-test/test_logfile.stdout b/client/bin/self-test/test_logfile.stdout
deleted file mode 100644
index e14e582..0000000
--- a/client/bin/self-test/test_logfile.stdout
+++ /dev/null
@@ -1,4 +0,0 @@
-This should go into the stdout logfile 2
-Mon Jan 30 18:27:24 PST 2006
-t logfile 2
-Mon Jan 30 18:18:20 PST 2006
diff --git a/client/bin/self-test/test_redirect b/client/bin/self-test/test_redirect
deleted file mode 100755
index 12ea604..0000000
--- a/client/bin/self-test/test_redirect
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/usr/bin/python2
-import os
-
-stdout = fd_stack(1, sys.stdout)
-stderr = fd_stack(2, sys.stderr)
-
-print "About to redirect stdout 1"
-os.system ("echo About to redirect stdout 2")
-os.system ("date")
-stdout.redirect("test_logfile.stdout")
-print "This should go into the stdout logfile 1"
-os.system ("echo This should go into the stdout logfile 2")
-# stderr.redirect("test_logfile.stderr")
-sys.stderr.write ("This should go into the stdout logfile 1\n")
-os.system ("date")
-# stderr.restore()
-stdout.restore()
-print "stdout all done 1"
-os.system ("echo stdout all done 2")
-os.system ("date")
diff --git a/client/bin/self-test/tests/010-basic-output b/client/bin/self-test/tests/010-basic-output
deleted file mode 100644
index 38efa18..0000000
--- a/client/bin/self-test/tests/010-basic-output
+++ /dev/null
@@ -1 +0,0 @@
-print "--SELFTEST-- abc123 123cba"
diff --git a/client/bin/self-test/tests/010-basic-output-out b/client/bin/self-test/tests/010-basic-output-out
deleted file mode 100644
index 5832ad6..0000000
--- a/client/bin/self-test/tests/010-basic-output-out
+++ /dev/null
@@ -1,2 +0,0 @@
---SELFTEST-- abc123 123cba
---SELFTEST-- exit 0
diff --git a/client/bin/self-test/tests/015-exception-output b/client/bin/self-test/tests/015-exception-output
deleted file mode 100644
index e0c40b5..0000000
--- a/client/bin/self-test/tests/015-exception-output
+++ /dev/null
@@ -1 +0,0 @@
-raise JobError("--SELFTEST-- JobError")
diff --git a/client/bin/self-test/tests/015-exception-output-out b/client/bin/self-test/tests/015-exception-output-out
deleted file mode 100644
index 718570e..0000000
--- a/client/bin/self-test/tests/015-exception-output-out
+++ /dev/null
@@ -1,2 +0,0 @@
-JOB ERROR: --SELFTEST-- JobError
---SELFTEST-- exit 1
diff --git a/client/bin/self-test/tests/017-system-exception b/client/bin/self-test/tests/017-system-exception
deleted file mode 100644
index 810b0f8..0000000
--- a/client/bin/self-test/tests/017-system-exception
+++ /dev/null
@@ -1,4 +0,0 @@
-try:
-	system('false')
-except CmdError, instance:
-	print "--SELFTEST-- CmdError Thrown: " + instance.__str__()
diff --git a/client/bin/self-test/tests/017-system-exception-out b/client/bin/self-test/tests/017-system-exception-out
deleted file mode 100644
index 5ed312c..0000000
--- a/client/bin/self-test/tests/017-system-exception-out
+++ /dev/null
@@ -1,2 +0,0 @@
---SELFTEST-- CmdError Thrown: Command <false> failed, rc=256
---SELFTEST-- exit 0
diff --git a/client/bin/self-test/tests/020-parallel-basic b/client/bin/self-test/tests/020-parallel-basic
deleted file mode 100644
index 8364832..0000000
--- a/client/bin/self-test/tests/020-parallel-basic
+++ /dev/null
@@ -1,20 +0,0 @@
-import time
-
-print "--SELFTEST-- loading test"
-
-def payload1():
-	print "--STEPTEST-- payload1 start"
-	sys.stdout.flush()
-	time.sleep(2)
-	print "--STEPTEST-- payload1 complete"
-	sys.stdout.flush()
-
-def payload2():
-	time.sleep(1)
-	print "--STEPTEST-- payload2 start"
-	sys.stdout.flush()
-	time.sleep(2)
-	print "--STEPTEST-- payload2 complete"
-	sys.stdout.flush()
-
-job.parallel([payload1], [payload2])
diff --git a/client/bin/self-test/tests/020-parallel-basic-out b/client/bin/self-test/tests/020-parallel-basic-out
deleted file mode 100644
index 803ca80..0000000
--- a/client/bin/self-test/tests/020-parallel-basic-out
+++ /dev/null
@@ -1,6 +0,0 @@
---SELFTEST-- loading test
---STEPTEST-- payload1 start
---STEPTEST-- payload2 start
---STEPTEST-- payload1 complete
---STEPTEST-- payload2 complete
---SELFTEST-- exit 0
diff --git a/client/bin/self-test/tests/030-stepper-basic b/client/bin/self-test/tests/030-stepper-basic
deleted file mode 100644
index ad65274..0000000
--- a/client/bin/self-test/tests/030-stepper-basic
+++ /dev/null
@@ -1,8 +0,0 @@
-print "--SELFTEST-- loading test"
-
-def step_init():
-	job.next_step([step_two])
-	print "--STEPTEST-- step_init called"
-	
-def step_two():
-	print "--STEPTEST-- step_two called"
diff --git a/client/bin/self-test/tests/030-stepper-basic-out b/client/bin/self-test/tests/030-stepper-basic-out
deleted file mode 100644
index f80b374..0000000
--- a/client/bin/self-test/tests/030-stepper-basic-out
+++ /dev/null
@@ -1,4 +0,0 @@
---SELFTEST-- loading test
---STEPTEST-- step_init called
---STEPTEST-- step_two called
---SELFTEST-- exit 0
diff --git a/client/bin/self-test/tests/031-stepper-chain b/client/bin/self-test/tests/031-stepper-chain
deleted file mode 100644
index 11c14b4..0000000
--- a/client/bin/self-test/tests/031-stepper-chain
+++ /dev/null
@@ -1,12 +0,0 @@
-print "--SELFTEST-- loading test"
-
-def step_init():
-	job.next_step([step_two])
-	print "--STEPTEST-- step_init called"
-	
-def step_two():
-	job.next_step([step_three])
-	print "--STEPTEST-- step_two called"
-	
-def step_three():
-	print "--STEPTEST-- step_three called"
diff --git a/client/bin/self-test/tests/031-stepper-chain-out b/client/bin/self-test/tests/031-stepper-chain-out
deleted file mode 100644
index 1de9d34..0000000
--- a/client/bin/self-test/tests/031-stepper-chain-out
+++ /dev/null
@@ -1,5 +0,0 @@
---SELFTEST-- loading test
---STEPTEST-- step_init called
---STEPTEST-- step_two called
---STEPTEST-- step_three called
---SELFTEST-- exit 0
diff --git a/client/bin/self-test/tests/032-stepper-list-order b/client/bin/self-test/tests/032-stepper-list-order
deleted file mode 100644
index 8522b5c..0000000
--- a/client/bin/self-test/tests/032-stepper-list-order
+++ /dev/null
@@ -1,12 +0,0 @@
-print "--SELFTEST-- loading test"
-
-def step_init():
-	job.next_step([step_two])
-	job.next_step([step_three])
-	print "--STEPTEST-- step_init called"
-	
-def step_two():
-	print "--STEPTEST-- step_two called"
-	
-def step_three():
-	print "--STEPTEST-- step_three called"
diff --git a/client/bin/self-test/tests/032-stepper-list-order-out b/client/bin/self-test/tests/032-stepper-list-order-out
deleted file mode 100644
index 1de9d34..0000000
--- a/client/bin/self-test/tests/032-stepper-list-order-out
+++ /dev/null
@@ -1,5 +0,0 @@
---SELFTEST-- loading test
---STEPTEST-- step_init called
---STEPTEST-- step_two called
---STEPTEST-- step_three called
---SELFTEST-- exit 0
diff --git a/client/bin/self-test/tests/033-stepper-parameters b/client/bin/self-test/tests/033-stepper-parameters
deleted file mode 100644
index de872e1..0000000
--- a/client/bin/self-test/tests/033-stepper-parameters
+++ /dev/null
@@ -1,9 +0,0 @@
-print "--SELFTEST-- loading test"
-
-def step_init():
-	job.next_step([step_test, 1])
-	job.next_step([step_test, 2])
-	print "--STEPTEST-- step_init called"
-	
-def step_test(iteration):
-	print "--STEPTEST-- step_test called iteration=%d" % iteration
diff --git a/client/bin/self-test/tests/033-stepper-parameters-out b/client/bin/self-test/tests/033-stepper-parameters-out
deleted file mode 100644
index 0c6e0ba..0000000
--- a/client/bin/self-test/tests/033-stepper-parameters-out
+++ /dev/null
@@ -1,5 +0,0 @@
---SELFTEST-- loading test
---STEPTEST-- step_init called
---STEPTEST-- step_test called iteration=1
---STEPTEST-- step_test called iteration=2
---SELFTEST-- exit 0
diff --git a/client/bin/self-test/tests/034-stepper-iterate b/client/bin/self-test/tests/034-stepper-iterate
deleted file mode 100644
index 8483d5c..0000000
--- a/client/bin/self-test/tests/034-stepper-iterate
+++ /dev/null
@@ -1,11 +0,0 @@
-print "--SELFTEST-- loading test"
-
-def step_init():
-	print "--STEPTEST-- step_init called"
-	step_test(1)
-	
-def step_test(iteration):
-	if (iteration < 5):
-		job.next_step([step_test, iteration + 1])
-
-        print "--STEPTEST-- step_test called iteration=%d" % iteration
diff --git a/client/bin/self-test/tests/034-stepper-iterate-out b/client/bin/self-test/tests/034-stepper-iterate-out
deleted file mode 100644
index 4ea0723..0000000
--- a/client/bin/self-test/tests/034-stepper-iterate-out
+++ /dev/null
@@ -1,8 +0,0 @@
---SELFTEST-- loading test
---STEPTEST-- step_init called
---STEPTEST-- step_test called iteration=1
---STEPTEST-- step_test called iteration=2
---STEPTEST-- step_test called iteration=3
---STEPTEST-- step_test called iteration=4
---STEPTEST-- step_test called iteration=5
---SELFTEST-- exit 0
diff --git a/client/bin/self-test/tests/040-stepper-boot-chain b/client/bin/self-test/tests/040-stepper-boot-chain
deleted file mode 100644
index 68d5def..0000000
--- a/client/bin/self-test/tests/040-stepper-boot-chain
+++ /dev/null
@@ -1,14 +0,0 @@
-print "--SELFTEST-- loading test"
-
-def step_init():
-	job.next_step([step_two])
-	print "--STEPTEST-- step_init called"
-	job.quit()
-	
-def step_two():
-	job.next_step([step_three])
-	print "--STEPTEST-- step_two called"
-	job.quit()
-	
-def step_three():
-	print "--STEPTEST-- step_three called"
diff --git a/client/bin/self-test/tests/040-stepper-boot-chain-out b/client/bin/self-test/tests/040-stepper-boot-chain-out
deleted file mode 100644
index 006b807..0000000
--- a/client/bin/self-test/tests/040-stepper-boot-chain-out
+++ /dev/null
@@ -1,9 +0,0 @@
---SELFTEST-- loading test
---STEPTEST-- step_init called
---SELFTEST-- exit 5
---SELFTEST-- loading test
---STEPTEST-- step_two called
---SELFTEST-- exit 5
---SELFTEST-- loading test
---STEPTEST-- step_three called
---SELFTEST-- exit 0
diff --git a/client/bin/self-test/tests/042-stepper-boot-list-order b/client/bin/self-test/tests/042-stepper-boot-list-order
deleted file mode 100644
index 53dd899..0000000
--- a/client/bin/self-test/tests/042-stepper-boot-list-order
+++ /dev/null
@@ -1,14 +0,0 @@
-print "--SELFTEST-- loading test"
-
-def step_init():
-	job.next_step([step_two])
-	job.next_step([step_three])
-	print "--STEPTEST-- step_init called"
-	job.quit()
-	
-def step_two():
-	print "--STEPTEST-- step_two called"
-	job.quit()
-	
-def step_three():
-	print "--STEPTEST-- step_three called"
diff --git a/client/bin/self-test/tests/042-stepper-boot-list-order-out b/client/bin/self-test/tests/042-stepper-boot-list-order-out
deleted file mode 100644
index 006b807..0000000
--- a/client/bin/self-test/tests/042-stepper-boot-list-order-out
+++ /dev/null
@@ -1,9 +0,0 @@
---SELFTEST-- loading test
---STEPTEST-- step_init called
---SELFTEST-- exit 5
---SELFTEST-- loading test
---STEPTEST-- step_two called
---SELFTEST-- exit 5
---SELFTEST-- loading test
---STEPTEST-- step_three called
---SELFTEST-- exit 0
diff --git a/client/bin/self-test/tests/043-stepper-boot-parameters b/client/bin/self-test/tests/043-stepper-boot-parameters
deleted file mode 100644
index f038e63..0000000
--- a/client/bin/self-test/tests/043-stepper-boot-parameters
+++ /dev/null
@@ -1,10 +0,0 @@
-print "--SELFTEST-- loading test"
-
-def step_init():
-	job.next_step([step_test, 1])
-	job.next_step([step_test, 2])
-	print "--STEPTEST-- step_init called"
-	
-def step_test(iteration):
-	print "--STEPTEST-- step_test called iteration=%d" % iteration
-	job.quit()
diff --git a/client/bin/self-test/tests/043-stepper-boot-parameters-out b/client/bin/self-test/tests/043-stepper-boot-parameters-out
deleted file mode 100644
index 2342642..0000000
--- a/client/bin/self-test/tests/043-stepper-boot-parameters-out
+++ /dev/null
@@ -1,9 +0,0 @@
---SELFTEST-- loading test
---STEPTEST-- step_init called
---STEPTEST-- step_test called iteration=1
---SELFTEST-- exit 5
---SELFTEST-- loading test
---STEPTEST-- step_test called iteration=2
---SELFTEST-- exit 5
---SELFTEST-- loading test
---SELFTEST-- exit 0
diff --git a/client/bin/self-test/tests/044-stepper-boot-iterate b/client/bin/self-test/tests/044-stepper-boot-iterate
deleted file mode 100644
index 14fe032..0000000
--- a/client/bin/self-test/tests/044-stepper-boot-iterate
+++ /dev/null
@@ -1,13 +0,0 @@
-print "--SELFTEST-- loading test"
-
-def step_init():
-	print "--STEPTEST-- step_init called"
-	step_test(1)
-	
-def step_test(iteration):
-	if (iteration < 5):
-		job.next_step([step_test, iteration + 1])
-
-        print "--STEPTEST-- step_test called iteration=%d" % iteration
-
-	job.quit()
diff --git a/client/bin/self-test/tests/044-stepper-boot-iterate-out b/client/bin/self-test/tests/044-stepper-boot-iterate-out
deleted file mode 100644
index d735e38..0000000
--- a/client/bin/self-test/tests/044-stepper-boot-iterate-out
+++ /dev/null
@@ -1,18 +0,0 @@
---SELFTEST-- loading test
---STEPTEST-- step_init called
---STEPTEST-- step_test called iteration=1
---SELFTEST-- exit 5
---SELFTEST-- loading test
---STEPTEST-- step_test called iteration=2
---SELFTEST-- exit 5
---SELFTEST-- loading test
---STEPTEST-- step_test called iteration=3
---SELFTEST-- exit 5
---SELFTEST-- loading test
---STEPTEST-- step_test called iteration=4
---SELFTEST-- exit 5
---SELFTEST-- loading test
---STEPTEST-- step_test called iteration=5
---SELFTEST-- exit 5
---SELFTEST-- loading test
---SELFTEST-- exit 0
diff --git a/client/bin/self-test/tests/080-sequencer b/client/bin/self-test/tests/080-sequencer
deleted file mode 100644
index 74c9399..0000000
--- a/client/bin/self-test/tests/080-sequencer
+++ /dev/null
@@ -1,3 +0,0 @@
-job.run_test('selftest', 'mark', 1, tag='test1')
-job.run_test('selftest', 'mark', 3, tag='test3')
-job.run_test('selftest', 'mark', 2, tag='test2')
diff --git a/client/bin/self-test/tests/080-sequencer-out b/client/bin/self-test/tests/080-sequencer-out
deleted file mode 100644
index 75bb69f..0000000
--- a/client/bin/self-test/tests/080-sequencer-out
+++ /dev/null
@@ -1,8 +0,0 @@
-checkpoint 1 1
-GOOD selftest.test1 Completed Successfully
-
-checkpoint 2 3
-FAIL selftest.test3 selftest: sequence was 2 when 3 expected
-
-JOB ERROR: selftest: sequence was 2 when 3 expected
---SELFTEST-- exit 1
diff --git a/client/bin/self-test/tests/085-test-exception b/client/bin/self-test/tests/085-test-exception
deleted file mode 100644
index e4c322f..0000000
--- a/client/bin/self-test/tests/085-test-exception
+++ /dev/null
@@ -1 +0,0 @@
-job.run_test('selftest', 'throw', tag='test1')
diff --git a/client/bin/self-test/tests/085-test-exception-out b/client/bin/self-test/tests/085-test-exception-out
deleted file mode 100644
index 2481808..0000000
--- a/client/bin/self-test/tests/085-test-exception-out
+++ /dev/null
@@ -1,12 +0,0 @@
-FAIL selftest.test1 running test selftest
-Traceback (most recent call last):
-  File "SRC/bin/test.py", line N, in __exec
-    self.execute(*parameters)
-  File "SRC/tests/selftest/selftest.py", line N, in execute
-    self.__throw(*args)
-  File "SRC/tests/selftest/selftest.py", line N, in __throw
-    __does_not_exist = __does_not_exist_either
-NameError: global name '_selftest__does_not_exist_either' is not defined
-
-
---SELFTEST-- exit 0
diff --git a/client/bin/self-test/tests/087-test-output b/client/bin/self-test/tests/087-test-output
deleted file mode 100644
index 313a4f3..0000000
--- a/client/bin/self-test/tests/087-test-output
+++ /dev/null
@@ -1,2 +0,0 @@
-job.run_test('selftest', 'print', "stdout hello\n", tag='test1')
-job.run_test('selftest', 'warn', "stderr hello\n", tag='test2')
diff --git a/client/bin/self-test/tests/087-test-output-out b/client/bin/self-test/tests/087-test-output-out
deleted file mode 100644
index 8d3aa8f..0000000
--- a/client/bin/self-test/tests/087-test-output-out
+++ /dev/null
@@ -1,7 +0,0 @@
-stdout hello
-GOOD selftest.test1 Completed Successfully
-
-stderr hello
-GOOD selftest.test2 Completed Successfully
-
---SELFTEST-- exit 0
diff --git a/client/bin/self-test/tests/100-barrier-2x-master-first b/client/bin/self-test/tests/100-barrier-2x-master-first
deleted file mode 100644
index 7d1823f..0000000
--- a/client/bin/self-test/tests/100-barrier-2x-master-first
+++ /dev/null
@@ -1,25 +0,0 @@
-import time
-
-print "--SELFTEST-- loading test"
-
-us = [ '127.0.0.1#P1', '127.0.0.1#P2' ]
-
-def payload1():
-	print "--STEPTEST-- payload1 start"
-
-	b = job.barrier('127.0.0.1#P1', 'here', 45)
-	b.rendezvous(*us)
-
-	print "--STEPTEST-- payload1 complete"
-
-def payload2():
-	print "--STEPTEST-- payload2 start"
-
-	time.sleep(5)
-	b = job.barrier('127.0.0.1#P2', 'here', 40)
-	b.rendezvous(*us)
-
-	time.sleep(1)
-	print "--STEPTEST-- payload2 complete"
-
-job.parallel([payload1], [payload2])
diff --git a/client/bin/self-test/tests/100-barrier-2x-master-first-out b/client/bin/self-test/tests/100-barrier-2x-master-first-out
deleted file mode 100644
index 2ce9b30..0000000
--- a/client/bin/self-test/tests/100-barrier-2x-master-first-out
+++ /dev/null
@@ -1,23 +0,0 @@
---SELFTEST-- loading test
---STEPTEST-- payload1 start
-barrier: 127.0.0.1#P1 tag=here port=63000 timeout=45 start=NNN
-barrier: 127.0.0.1#P1 members: 127.0.0.1#P1,127.0.0.1#P2
-barrier: 127.0.0.1#P1 selected as master
---STEPTEST-- payload2 start
-barrier: 127.0.0.1#P2 tag=here port=63000 timeout=40 start=NNN
-barrier: 127.0.0.1#P2 members: 127.0.0.1#P1,127.0.0.1#P2
-barrier: 127.0.0.1#P2 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P2
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P2 (ADDR:PORT)
-barrier: 127.0.0.1#P1 master seen 2 of 2
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P2
-barrier: 127.0.0.1#P2 master said: wait
-barrier: 127.0.0.1#P2 master said: ping
-barrier: 127.0.0.1#P2 pong
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P2
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P2
---STEPTEST-- payload1 complete
-barrier: 127.0.0.1#P2 master said: rlse
-barrier: 127.0.0.1#P2 was released, waiting for close
---STEPTEST-- payload2 complete
---SELFTEST-- exit 0
diff --git a/client/bin/self-test/tests/101-barrier-2x-slave-first b/client/bin/self-test/tests/101-barrier-2x-slave-first
deleted file mode 100644
index 1da917e..0000000
--- a/client/bin/self-test/tests/101-barrier-2x-slave-first
+++ /dev/null
@@ -1,25 +0,0 @@
-import time
-
-print "--SELFTEST-- loading test"
-
-us = [ '127.0.0.1#P1', '127.0.0.1#P2' ]
-
-def payload1():
-	print "--STEPTEST-- payload1 start"
-
-	b = job.barrier('127.0.0.1#P2', 'here', 45)
-	b.rendezvous(*us)
-
-	print "--STEPTEST-- payload1 complete"
-
-def payload2():
-	print "--STEPTEST-- payload2 start"
-
-	time.sleep(5)
-	b = job.barrier('127.0.0.1#P1', 'here', 40)
-	b.rendezvous(*us)
-
-	time.sleep(1)
-	print "--STEPTEST-- payload2 complete"
-
-job.parallel([payload1], [payload2])
diff --git a/client/bin/self-test/tests/101-barrier-2x-slave-first-out b/client/bin/self-test/tests/101-barrier-2x-slave-first-out
deleted file mode 100644
index 426c9e6..0000000
--- a/client/bin/self-test/tests/101-barrier-2x-slave-first-out
+++ /dev/null
@@ -1,23 +0,0 @@
---SELFTEST-- loading test
---STEPTEST-- payload1 start
-barrier: 127.0.0.1#P2 tag=here port=63000 timeout=45 start=NNN
-barrier: 127.0.0.1#P2 members: 127.0.0.1#P1,127.0.0.1#P2
-barrier: 127.0.0.1#P2 selected as slave, master=127.0.0.1
---STEPTEST-- payload2 start
-barrier: 127.0.0.1#P1 tag=here port=63000 timeout=40 start=NNN
-barrier: 127.0.0.1#P1 members: 127.0.0.1#P1,127.0.0.1#P2
-barrier: 127.0.0.1#P1 selected as master
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P2
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P2 (ADDR:PORT)
-barrier: 127.0.0.1#P1 master seen 2 of 2
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P2
-barrier: 127.0.0.1#P2 master said: wait
-barrier: 127.0.0.1#P2 master said: ping
-barrier: 127.0.0.1#P2 pong
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P2
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P2
-barrier: 127.0.0.1#P2 master said: rlse
-barrier: 127.0.0.1#P2 was released, waiting for close
---STEPTEST-- payload1 complete
---STEPTEST-- payload2 complete
---SELFTEST-- exit 0
diff --git a/client/bin/self-test/tests/102-barrier-2x-master-only b/client/bin/self-test/tests/102-barrier-2x-master-only
deleted file mode 100644
index 9da109c..0000000
--- a/client/bin/self-test/tests/102-barrier-2x-master-only
+++ /dev/null
@@ -1,14 +0,0 @@
-import time
-
-print "--SELFTEST-- loading test"
-
-us = [ '127.0.0.1#P1', '127.0.0.1#P2' ]
-
-print "--STEPTEST-- payload1 start"
-
-b = job.barrier('127.0.0.1#P1', 'here', 15)
-b.rendezvous(*us)
-
-print "--STEPTEST-- payload1 complete"
-
-print "--STEPTEST-- payload2 complete"
diff --git a/client/bin/self-test/tests/102-barrier-2x-master-only-out b/client/bin/self-test/tests/102-barrier-2x-master-only-out
deleted file mode 100644
index 1dbef0e..0000000
--- a/client/bin/self-test/tests/102-barrier-2x-master-only-out
+++ /dev/null
@@ -1,9 +0,0 @@
---SELFTEST-- loading test
---STEPTEST-- payload1 start
-barrier: 127.0.0.1#P1 tag=here port=63000 timeout=15 start=NNN
-barrier: 127.0.0.1#P1 members: 127.0.0.1#P1,127.0.0.1#P2
-barrier: 127.0.0.1#P1 selected as master
-barrier: 127.0.0.1#P1 master seen 1 of 2
-barrier: 127.0.0.1#P1 master seen 1 of 2
-JOB ERROR: timeout waiting for barrier
---SELFTEST-- exit 1
diff --git a/client/bin/self-test/tests/103-barrier-2x-slave-only b/client/bin/self-test/tests/103-barrier-2x-slave-only
deleted file mode 100644
index 11ade5e..0000000
--- a/client/bin/self-test/tests/103-barrier-2x-slave-only
+++ /dev/null
@@ -1,14 +0,0 @@
-import time
-
-print "--SELFTEST-- loading test"
-
-us = [ '127.0.0.1#P1', '127.0.0.1#P2' ]
-
-print "--STEPTEST-- payload1 start"
-
-b = job.barrier('127.0.0.1#P2', 'here', 15)
-b.rendezvous(*us)
-
-print "--STEPTEST-- payload1 complete"
-
-print "--STEPTEST-- payload2 complete"
diff --git a/client/bin/self-test/tests/103-barrier-2x-slave-only-out b/client/bin/self-test/tests/103-barrier-2x-slave-only-out
deleted file mode 100644
index 72e987e..0000000
--- a/client/bin/self-test/tests/103-barrier-2x-slave-only-out
+++ /dev/null
@@ -1,7 +0,0 @@
---SELFTEST-- loading test
---STEPTEST-- payload1 start
-barrier: 127.0.0.1#P2 tag=here port=63000 timeout=15 start=NNN
-barrier: 127.0.0.1#P2 members: 127.0.0.1#P1,127.0.0.1#P2
-barrier: 127.0.0.1#P2 selected as slave, master=127.0.0.1
-JOB ERROR: timeout waiting for barrier
---SELFTEST-- exit 1
diff --git a/client/bin/self-test/tests/105-barrier-5x-master-middle b/client/bin/self-test/tests/105-barrier-5x-master-middle
deleted file mode 100644
index 04a6be0..0000000
--- a/client/bin/self-test/tests/105-barrier-5x-master-middle
+++ /dev/null
@@ -1,39 +0,0 @@
-import time
-
-print "--SELFTEST-- loading test"
-
-us = [
-	'127.0.0.1#P1',
-	'127.0.0.1#P2',
-	'127.0.0.1#P3',
-	'127.0.0.1#P4',
-	'127.0.0.1#P5'
-]
-
-def payload1(wait, n):
-	time.sleep(wait)
-
-	print "--STEPTEST-- payload1 start"
-
-	b = job.barrier('127.0.0.1#P%d' % (n), 'here', 60)
-	b.rendezvous(*us)
-
-	print "--STEPTEST-- payload1 complete"
-
-def payload2(wait, n):
-	time.sleep(wait)
-	print "--STEPTEST-- payload2 start"
-
-	b = job.barrier('127.0.0.1#P%d' % (n), 'here', 60)
-	b.rendezvous(*us)
-
-	time.sleep(1)
-	print "--STEPTEST-- payload2 complete"
-
-job.parallel(
-	[payload2, 5,  1],
-	[payload2, 10, 2],
-	[payload1, 15, 3],
-	[payload2, 20, 4],
-	[payload2, 25, 5]
-)
diff --git a/client/bin/self-test/tests/105-barrier-5x-master-middle-out b/client/bin/self-test/tests/105-barrier-5x-master-middle-out
deleted file mode 100644
index 618a819..0000000
--- a/client/bin/self-test/tests/105-barrier-5x-master-middle-out
+++ /dev/null
@@ -1,71 +0,0 @@
---SELFTEST-- loading test
---STEPTEST-- payload2 start
-barrier: 127.0.0.1#P1 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P1 members: 127.0.0.1#P1,127.0.0.1#P2,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5
-barrier: 127.0.0.1#P1 selected as master
---STEPTEST-- payload2 start
-barrier: 127.0.0.1#P2 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P2 members: 127.0.0.1#P1,127.0.0.1#P2,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5
-barrier: 127.0.0.1#P2 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P2
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P2 (ADDR:PORT)
-barrier: 127.0.0.1#P1 master seen 2 of 5
-barrier: 127.0.0.1#P2 master said: wait
---STEPTEST-- payload1 start
-barrier: 127.0.0.1#P3 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P3 members: 127.0.0.1#P1,127.0.0.1#P2,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5
-barrier: 127.0.0.1#P3 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P3
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P3 (ADDR:PORT)
-barrier: 127.0.0.1#P1 master seen 3 of 5
-barrier: 127.0.0.1#P3 master said: wait
---STEPTEST-- payload2 start
-barrier: 127.0.0.1#P4 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P4 members: 127.0.0.1#P1,127.0.0.1#P2,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5
-barrier: 127.0.0.1#P4 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P4
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P4 (ADDR:PORT)
-barrier: 127.0.0.1#P1 master seen 4 of 5
-barrier: 127.0.0.1#P4 master said: wait
---STEPTEST-- payload2 start
-barrier: 127.0.0.1#P5 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P5 members: 127.0.0.1#P1,127.0.0.1#P2,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5
-barrier: 127.0.0.1#P5 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P5
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P5 (ADDR:PORT)
-barrier: 127.0.0.1#P1 master seen 5 of 5
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P2
-barrier: 127.0.0.1#P5 master said: wait
-barrier: 127.0.0.1#P2 master said: ping
-barrier: 127.0.0.1#P2 pong
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P3
-barrier: 127.0.0.1#P3 master said: ping
-barrier: 127.0.0.1#P3 pong
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P4
-barrier: 127.0.0.1#P4 master said: ping
-barrier: 127.0.0.1#P4 pong
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P5
-barrier: 127.0.0.1#P5 master said: ping
-barrier: 127.0.0.1#P5 pong
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P2
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P3
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P4
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P5
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P2
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P3
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P4
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P5
-barrier: 127.0.0.1#P2 master said: rlse
-barrier: 127.0.0.1#P2 was released, waiting for close
-barrier: 127.0.0.1#P3 master said: rlse
-barrier: 127.0.0.1#P3 was released, waiting for close
---STEPTEST-- payload1 complete
-barrier: 127.0.0.1#P4 master said: rlse
-barrier: 127.0.0.1#P4 was released, waiting for close
-barrier: 127.0.0.1#P5 master said: rlse
-barrier: 127.0.0.1#P5 was released, waiting for close
---STEPTEST-- payload2 complete
---STEPTEST-- payload2 complete
---STEPTEST-- payload2 complete
---STEPTEST-- payload2 complete
---SELFTEST-- exit 0
diff --git a/client/bin/self-test/tests/110-barrier-unordered-10x b/client/bin/self-test/tests/110-barrier-unordered-10x
deleted file mode 100644
index 4451e0f..0000000
--- a/client/bin/self-test/tests/110-barrier-unordered-10x
+++ /dev/null
@@ -1,24 +0,0 @@
-import time
-import random
-
-print "--SELFTEST-- loading test"
-
-max = 10
-
-def payload1(id):
-	print "--STEPTEST-- payload1 start"
-
-	time.sleep(random.randint(1, 5))
-
-	b = job.barrier('127.0.0.1#P%d' % (id), 'here', 60)
-	b.rendezvous(*us)
-
-	print "--STEPTEST-- payload1 complete"
-
-us = []
-jobs = []
-for n in range(1, max+1):
-	us.append('127.0.0.1#P%d' % (n))
-	jobs.append([payload1, n])
-
-job.parallel(*jobs)
diff --git a/client/bin/self-test/tests/110-barrier-unordered-10x-out b/client/bin/self-test/tests/110-barrier-unordered-10x-out
deleted file mode 100644
index c0182ce..0000000
--- a/client/bin/self-test/tests/110-barrier-unordered-10x-out
+++ /dev/null
@@ -1,151 +0,0 @@
-barrier: 127.0.0.1#P10 master said: ping
-barrier: 127.0.0.1#P10 master said: rlse
-barrier: 127.0.0.1#P10 master said: wait
-barrier: 127.0.0.1#P10 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P2,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P10 pong
-barrier: 127.0.0.1#P10 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P10 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P10 was released, waiting for close
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P10
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P2
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P3
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P4
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P5
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P6
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P7
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P8
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P9
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P10 (127.0.0.1:33419)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P2 (127.0.0.1:33414)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P3 (127.0.0.1:33417)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P4 (127.0.0.1:33418)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P5 (127.0.0.1:33411)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P6 (127.0.0.1:33415)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P7 (127.0.0.1:33412)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P8 (127.0.0.1:33416)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P9 (127.0.0.1:33413)
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P10
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P2
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P3
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P4
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P5
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P6
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P7
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P8
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P9
-barrier: 127.0.0.1#P1 master seen 10 of 10
-barrier: 127.0.0.1#P1 master seen 2 of 10
-barrier: 127.0.0.1#P1 master seen 3 of 10
-barrier: 127.0.0.1#P1 master seen 4 of 10
-barrier: 127.0.0.1#P1 master seen 5 of 10
-barrier: 127.0.0.1#P1 master seen 6 of 10
-barrier: 127.0.0.1#P1 master seen 7 of 10
-barrier: 127.0.0.1#P1 master seen 8 of 10
-barrier: 127.0.0.1#P1 master seen 9 of 10
-barrier: 127.0.0.1#P1 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P2,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P10
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P2
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P3
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P4
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P5
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P6
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P7
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P8
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P9
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P10
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P2
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P3
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P4
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P5
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P6
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P7
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P8
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P9
-barrier: 127.0.0.1#P1 selected as master
-barrier: 127.0.0.1#P1 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P2 master said: ping
-barrier: 127.0.0.1#P2 master said: rlse
-barrier: 127.0.0.1#P2 master said: wait
-barrier: 127.0.0.1#P2 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P2,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P2 pong
-barrier: 127.0.0.1#P2 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P2 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P2 was released, waiting for close
-barrier: 127.0.0.1#P3 master said: ping
-barrier: 127.0.0.1#P3 master said: rlse
-barrier: 127.0.0.1#P3 master said: wait
-barrier: 127.0.0.1#P3 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P2,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P3 pong
-barrier: 127.0.0.1#P3 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P3 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P3 was released, waiting for close
-barrier: 127.0.0.1#P4 master said: ping
-barrier: 127.0.0.1#P4 master said: rlse
-barrier: 127.0.0.1#P4 master said: wait
-barrier: 127.0.0.1#P4 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P2,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P4 pong
-barrier: 127.0.0.1#P4 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P4 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P4 was released, waiting for close
-barrier: 127.0.0.1#P5 master said: ping
-barrier: 127.0.0.1#P5 master said: rlse
-barrier: 127.0.0.1#P5 master said: wait
-barrier: 127.0.0.1#P5 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P2,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P5 pong
-barrier: 127.0.0.1#P5 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P5 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P5 was released, waiting for close
-barrier: 127.0.0.1#P6 master said: ping
-barrier: 127.0.0.1#P6 master said: rlse
-barrier: 127.0.0.1#P6 master said: wait
-barrier: 127.0.0.1#P6 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P2,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P6 pong
-barrier: 127.0.0.1#P6 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P6 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P6 was released, waiting for close
-barrier: 127.0.0.1#P7 master said: ping
-barrier: 127.0.0.1#P7 master said: rlse
-barrier: 127.0.0.1#P7 master said: wait
-barrier: 127.0.0.1#P7 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P2,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P7 pong
-barrier: 127.0.0.1#P7 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P7 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P7 was released, waiting for close
-barrier: 127.0.0.1#P8 master said: ping
-barrier: 127.0.0.1#P8 master said: rlse
-barrier: 127.0.0.1#P8 master said: wait
-barrier: 127.0.0.1#P8 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P2,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P8 pong
-barrier: 127.0.0.1#P8 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P8 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P8 was released, waiting for close
-barrier: 127.0.0.1#P9 master said: ping
-barrier: 127.0.0.1#P9 master said: rlse
-barrier: 127.0.0.1#P9 master said: wait
-barrier: 127.0.0.1#P9 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P2,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P9 pong
-barrier: 127.0.0.1#P9 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P9 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P9 was released, waiting for close
---SELFTEST-- exit 0
---SELFTEST-- loading test
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
diff --git a/client/bin/self-test/tests/111-barrier-unordered-20x b/client/bin/self-test/tests/111-barrier-unordered-20x
deleted file mode 100644
index c467e87..0000000
--- a/client/bin/self-test/tests/111-barrier-unordered-20x
+++ /dev/null
@@ -1,24 +0,0 @@
-import time
-import random
-
-print "--SELFTEST-- loading test"
-
-max = 20
-
-def payload1(id):
-	print "--STEPTEST-- payload1 start"
-
-	time.sleep(random.randint(1, 5))
-
-	b = job.barrier('127.0.0.1#P%d' % (id), 'here', 60)
-	b.rendezvous(*us)
-
-	print "--STEPTEST-- payload1 complete"
-
-us = []
-jobs = []
-for n in range(1, max+1):
-	us.append('127.0.0.1#P%d' % (n))
-	jobs.append([payload1, n])
-
-job.parallel(*jobs)
diff --git a/client/bin/self-test/tests/111-barrier-unordered-20x-out b/client/bin/self-test/tests/111-barrier-unordered-20x-out
deleted file mode 100644
index ddf7ac8..0000000
--- a/client/bin/self-test/tests/111-barrier-unordered-20x-out
+++ /dev/null
@@ -1,311 +0,0 @@
-barrier: 127.0.0.1#P10 master said: ping
-barrier: 127.0.0.1#P10 master said: rlse
-barrier: 127.0.0.1#P10 master said: wait
-barrier: 127.0.0.1#P10 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P10 pong
-barrier: 127.0.0.1#P10 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P10 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P10 was released, waiting for close
-barrier: 127.0.0.1#P11 master said: ping
-barrier: 127.0.0.1#P11 master said: rlse
-barrier: 127.0.0.1#P11 master said: wait
-barrier: 127.0.0.1#P11 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P11 pong
-barrier: 127.0.0.1#P11 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P11 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P11 was released, waiting for close
-barrier: 127.0.0.1#P12 master said: ping
-barrier: 127.0.0.1#P12 master said: rlse
-barrier: 127.0.0.1#P12 master said: wait
-barrier: 127.0.0.1#P12 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P12 pong
-barrier: 127.0.0.1#P12 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P12 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P12 was released, waiting for close
-barrier: 127.0.0.1#P13 master said: ping
-barrier: 127.0.0.1#P13 master said: rlse
-barrier: 127.0.0.1#P13 master said: wait
-barrier: 127.0.0.1#P13 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P13 pong
-barrier: 127.0.0.1#P13 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P13 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P13 was released, waiting for close
-barrier: 127.0.0.1#P14 master said: ping
-barrier: 127.0.0.1#P14 master said: rlse
-barrier: 127.0.0.1#P14 master said: wait
-barrier: 127.0.0.1#P14 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P14 pong
-barrier: 127.0.0.1#P14 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P14 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P14 was released, waiting for close
-barrier: 127.0.0.1#P15 master said: ping
-barrier: 127.0.0.1#P15 master said: rlse
-barrier: 127.0.0.1#P15 master said: wait
-barrier: 127.0.0.1#P15 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P15 pong
-barrier: 127.0.0.1#P15 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P15 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P15 was released, waiting for close
-barrier: 127.0.0.1#P16 master said: ping
-barrier: 127.0.0.1#P16 master said: rlse
-barrier: 127.0.0.1#P16 master said: wait
-barrier: 127.0.0.1#P16 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P16 pong
-barrier: 127.0.0.1#P16 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P16 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P16 was released, waiting for close
-barrier: 127.0.0.1#P17 master said: ping
-barrier: 127.0.0.1#P17 master said: rlse
-barrier: 127.0.0.1#P17 master said: wait
-barrier: 127.0.0.1#P17 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P17 pong
-barrier: 127.0.0.1#P17 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P17 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P17 was released, waiting for close
-barrier: 127.0.0.1#P18 master said: ping
-barrier: 127.0.0.1#P18 master said: rlse
-barrier: 127.0.0.1#P18 master said: wait
-barrier: 127.0.0.1#P18 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P18 pong
-barrier: 127.0.0.1#P18 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P18 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P18 was released, waiting for close
-barrier: 127.0.0.1#P19 master said: ping
-barrier: 127.0.0.1#P19 master said: rlse
-barrier: 127.0.0.1#P19 master said: wait
-barrier: 127.0.0.1#P19 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P19 pong
-barrier: 127.0.0.1#P19 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P19 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P19 was released, waiting for close
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P10
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P11
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P12
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P13
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P14
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P15
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P16
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P17
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P18
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P19
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P2
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P20
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P3
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P4
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P5
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P6
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P7
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P8
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P9
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P10 (127.0.0.1:33427)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P11 (127.0.0.1:33437)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P12 (127.0.0.1:33422)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P13 (127.0.0.1:33430)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P14 (127.0.0.1:33431)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P15 (127.0.0.1:33425)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P16 (127.0.0.1:33423)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P17 (127.0.0.1:33421)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P18 (127.0.0.1:33428)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P19 (127.0.0.1:33438)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P20 (127.0.0.1:33429)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P2 (127.0.0.1:33432)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P3 (127.0.0.1:33433)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P4 (127.0.0.1:33426)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P5 (127.0.0.1:33424)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P6 (127.0.0.1:33420)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P7 (127.0.0.1:33434)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P8 (127.0.0.1:33435)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P9 (127.0.0.1:33436)
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P10
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P11
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P12
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P13
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P14
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P15
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P16
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P17
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P18
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P19
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P2
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P20
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P3
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P4
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P5
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P6
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P7
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P8
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P9
-barrier: 127.0.0.1#P1 master seen 10 of 20
-barrier: 127.0.0.1#P1 master seen 11 of 20
-barrier: 127.0.0.1#P1 master seen 12 of 20
-barrier: 127.0.0.1#P1 master seen 13 of 20
-barrier: 127.0.0.1#P1 master seen 14 of 20
-barrier: 127.0.0.1#P1 master seen 15 of 20
-barrier: 127.0.0.1#P1 master seen 16 of 20
-barrier: 127.0.0.1#P1 master seen 17 of 20
-barrier: 127.0.0.1#P1 master seen 18 of 20
-barrier: 127.0.0.1#P1 master seen 19 of 20
-barrier: 127.0.0.1#P1 master seen 20 of 20
-barrier: 127.0.0.1#P1 master seen 2 of 20
-barrier: 127.0.0.1#P1 master seen 3 of 20
-barrier: 127.0.0.1#P1 master seen 4 of 20
-barrier: 127.0.0.1#P1 master seen 5 of 20
-barrier: 127.0.0.1#P1 master seen 6 of 20
-barrier: 127.0.0.1#P1 master seen 7 of 20
-barrier: 127.0.0.1#P1 master seen 8 of 20
-barrier: 127.0.0.1#P1 master seen 9 of 20
-barrier: 127.0.0.1#P1 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P10
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P11
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P12
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P13
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P14
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P15
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P16
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P17
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P18
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P19
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P2
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P20
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P3
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P4
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P5
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P6
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P7
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P8
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P9
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P10
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P11
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P12
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P13
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P14
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P15
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P16
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P17
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P18
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P19
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P2
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P20
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P3
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P4
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P5
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P6
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P7
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P8
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P9
-barrier: 127.0.0.1#P1 selected as master
-barrier: 127.0.0.1#P1 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P20 master said: ping
-barrier: 127.0.0.1#P20 master said: rlse
-barrier: 127.0.0.1#P20 master said: wait
-barrier: 127.0.0.1#P20 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P20 pong
-barrier: 127.0.0.1#P20 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P20 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P20 was released, waiting for close
-barrier: 127.0.0.1#P2 master said: ping
-barrier: 127.0.0.1#P2 master said: rlse
-barrier: 127.0.0.1#P2 master said: wait
-barrier: 127.0.0.1#P2 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P2 pong
-barrier: 127.0.0.1#P2 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P2 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P2 was released, waiting for close
-barrier: 127.0.0.1#P3 master said: ping
-barrier: 127.0.0.1#P3 master said: rlse
-barrier: 127.0.0.1#P3 master said: wait
-barrier: 127.0.0.1#P3 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P3 pong
-barrier: 127.0.0.1#P3 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P3 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P3 was released, waiting for close
-barrier: 127.0.0.1#P4 master said: ping
-barrier: 127.0.0.1#P4 master said: rlse
-barrier: 127.0.0.1#P4 master said: wait
-barrier: 127.0.0.1#P4 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P4 pong
-barrier: 127.0.0.1#P4 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P4 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P4 was released, waiting for close
-barrier: 127.0.0.1#P5 master said: ping
-barrier: 127.0.0.1#P5 master said: rlse
-barrier: 127.0.0.1#P5 master said: wait
-barrier: 127.0.0.1#P5 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P5 pong
-barrier: 127.0.0.1#P5 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P5 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P5 was released, waiting for close
-barrier: 127.0.0.1#P6 master said: ping
-barrier: 127.0.0.1#P6 master said: rlse
-barrier: 127.0.0.1#P6 master said: wait
-barrier: 127.0.0.1#P6 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P6 pong
-barrier: 127.0.0.1#P6 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P6 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P6 was released, waiting for close
-barrier: 127.0.0.1#P7 master said: ping
-barrier: 127.0.0.1#P7 master said: rlse
-barrier: 127.0.0.1#P7 master said: wait
-barrier: 127.0.0.1#P7 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P7 pong
-barrier: 127.0.0.1#P7 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P7 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P7 was released, waiting for close
-barrier: 127.0.0.1#P8 master said: ping
-barrier: 127.0.0.1#P8 master said: rlse
-barrier: 127.0.0.1#P8 master said: wait
-barrier: 127.0.0.1#P8 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P8 pong
-barrier: 127.0.0.1#P8 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P8 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P8 was released, waiting for close
-barrier: 127.0.0.1#P9 master said: ping
-barrier: 127.0.0.1#P9 master said: rlse
-barrier: 127.0.0.1#P9 master said: wait
-barrier: 127.0.0.1#P9 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P3,127.0.0.1#P4,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P9 pong
-barrier: 127.0.0.1#P9 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P9 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P9 was released, waiting for close
---SELFTEST-- exit 0
---SELFTEST-- loading test
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
diff --git a/client/bin/self-test/tests/112-barrier-unordered-40x b/client/bin/self-test/tests/112-barrier-unordered-40x
deleted file mode 100644
index 5880706..0000000
--- a/client/bin/self-test/tests/112-barrier-unordered-40x
+++ /dev/null
@@ -1,24 +0,0 @@
-import time
-import random
-
-print "--SELFTEST-- loading test"
-
-max = 40
-
-def payload1(id):
-	print "--STEPTEST-- payload1 start"
-
-	time.sleep(random.randint(1, 5))
-
-	b = job.barrier('127.0.0.1#P%d' % (id), 'here', 60)
-	b.rendezvous(*us)
-
-	print "--STEPTEST-- payload1 complete"
-
-us = []
-jobs = []
-for n in range(1, max+1):
-	us.append('127.0.0.1#P%d' % (n))
-	jobs.append([payload1, n])
-
-job.parallel(*jobs)
diff --git a/client/bin/self-test/tests/112-barrier-unordered-40x-out b/client/bin/self-test/tests/112-barrier-unordered-40x-out
deleted file mode 100644
index d338807..0000000
--- a/client/bin/self-test/tests/112-barrier-unordered-40x-out
+++ /dev/null
@@ -1,631 +0,0 @@
-barrier: 127.0.0.1#P10 master said: ping
-barrier: 127.0.0.1#P10 master said: rlse
-barrier: 127.0.0.1#P10 master said: wait
-barrier: 127.0.0.1#P10 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P10 pong
-barrier: 127.0.0.1#P10 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P10 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P10 was released, waiting for close
-barrier: 127.0.0.1#P11 master said: ping
-barrier: 127.0.0.1#P11 master said: rlse
-barrier: 127.0.0.1#P11 master said: wait
-barrier: 127.0.0.1#P11 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P11 pong
-barrier: 127.0.0.1#P11 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P11 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P11 was released, waiting for close
-barrier: 127.0.0.1#P12 master said: ping
-barrier: 127.0.0.1#P12 master said: rlse
-barrier: 127.0.0.1#P12 master said: wait
-barrier: 127.0.0.1#P12 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P12 pong
-barrier: 127.0.0.1#P12 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P12 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P12 was released, waiting for close
-barrier: 127.0.0.1#P13 master said: ping
-barrier: 127.0.0.1#P13 master said: rlse
-barrier: 127.0.0.1#P13 master said: wait
-barrier: 127.0.0.1#P13 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P13 pong
-barrier: 127.0.0.1#P13 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P13 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P13 was released, waiting for close
-barrier: 127.0.0.1#P14 master said: ping
-barrier: 127.0.0.1#P14 master said: rlse
-barrier: 127.0.0.1#P14 master said: wait
-barrier: 127.0.0.1#P14 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P14 pong
-barrier: 127.0.0.1#P14 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P14 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P14 was released, waiting for close
-barrier: 127.0.0.1#P15 master said: ping
-barrier: 127.0.0.1#P15 master said: rlse
-barrier: 127.0.0.1#P15 master said: wait
-barrier: 127.0.0.1#P15 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P15 pong
-barrier: 127.0.0.1#P15 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P15 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P15 was released, waiting for close
-barrier: 127.0.0.1#P16 master said: ping
-barrier: 127.0.0.1#P16 master said: rlse
-barrier: 127.0.0.1#P16 master said: wait
-barrier: 127.0.0.1#P16 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P16 pong
-barrier: 127.0.0.1#P16 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P16 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P16 was released, waiting for close
-barrier: 127.0.0.1#P17 master said: ping
-barrier: 127.0.0.1#P17 master said: rlse
-barrier: 127.0.0.1#P17 master said: wait
-barrier: 127.0.0.1#P17 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P17 pong
-barrier: 127.0.0.1#P17 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P17 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P17 was released, waiting for close
-barrier: 127.0.0.1#P18 master said: ping
-barrier: 127.0.0.1#P18 master said: rlse
-barrier: 127.0.0.1#P18 master said: wait
-barrier: 127.0.0.1#P18 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P18 pong
-barrier: 127.0.0.1#P18 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P18 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P18 was released, waiting for close
-barrier: 127.0.0.1#P19 master said: ping
-barrier: 127.0.0.1#P19 master said: rlse
-barrier: 127.0.0.1#P19 master said: wait
-barrier: 127.0.0.1#P19 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P19 pong
-barrier: 127.0.0.1#P19 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P19 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P19 was released, waiting for close
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P10
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P11
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P12
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P13
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P14
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P15
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P16
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P17
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P18
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P19
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P2
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P20
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P21
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P22
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P23
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P24
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P25
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P26
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P27
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P28
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P29
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P3
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P30
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P31
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P32
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P33
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P34
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P35
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P36
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P37
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P38
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P39
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P4
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P40
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P5
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P6
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P7
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P8
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P9
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P10 (127.0.0.1:33454)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P11 (127.0.0.1:33471)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P12 (127.0.0.1:33465)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P13 (127.0.0.1:33456)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P14 (127.0.0.1:33457)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P15 (127.0.0.1:33485)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P16 (127.0.0.1:33459)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P17 (127.0.0.1:33468)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P18 (127.0.0.1:33481)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P19 (127.0.0.1:33482)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P20 (127.0.0.1:33469)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P21 (127.0.0.1:33467)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P2 (127.0.0.1:33478)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P22 (127.0.0.1:33458)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P23 (127.0.0.1:33479)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P24 (127.0.0.1:33452)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P25 (127.0.0.1:33460)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P26 (127.0.0.1:33483)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P27 (127.0.0.1:33462)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P28 (127.0.0.1:33463)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P29 (127.0.0.1:33449)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P30 (127.0.0.1:33450)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P31 (127.0.0.1:33475)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P3 (127.0.0.1:33472)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P32 (127.0.0.1:33451)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P33 (127.0.0.1:33476)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P34 (127.0.0.1:33461)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P35 (127.0.0.1:33480)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P36 (127.0.0.1:33477)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P37 (127.0.0.1:33453)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P38 (127.0.0.1:33470)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P39 (127.0.0.1:33484)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P40 (127.0.0.1:33464)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P4 (127.0.0.1:33455)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P5 (127.0.0.1:33447)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P6 (127.0.0.1:33448)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P7 (127.0.0.1:33473)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P8 (127.0.0.1:33466)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P9 (127.0.0.1:33474)
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P10
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P11
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P12
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P13
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P14
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P15
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P16
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P17
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P18
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P19
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P2
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P20
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P21
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P22
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P23
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P24
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P25
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P26
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P27
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P28
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P29
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P3
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P30
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P31
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P32
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P33
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P34
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P35
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P36
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P37
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P38
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P39
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P40
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P5
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P6
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P7
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P8
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P9
-barrier: 127.0.0.1#P1 master seen 10 of 40
-barrier: 127.0.0.1#P1 master seen 11 of 40
-barrier: 127.0.0.1#P1 master seen 12 of 40
-barrier: 127.0.0.1#P1 master seen 13 of 40
-barrier: 127.0.0.1#P1 master seen 14 of 40
-barrier: 127.0.0.1#P1 master seen 15 of 40
-barrier: 127.0.0.1#P1 master seen 16 of 40
-barrier: 127.0.0.1#P1 master seen 17 of 40
-barrier: 127.0.0.1#P1 master seen 18 of 40
-barrier: 127.0.0.1#P1 master seen 19 of 40
-barrier: 127.0.0.1#P1 master seen 20 of 40
-barrier: 127.0.0.1#P1 master seen 21 of 40
-barrier: 127.0.0.1#P1 master seen 22 of 40
-barrier: 127.0.0.1#P1 master seen 23 of 40
-barrier: 127.0.0.1#P1 master seen 24 of 40
-barrier: 127.0.0.1#P1 master seen 25 of 40
-barrier: 127.0.0.1#P1 master seen 26 of 40
-barrier: 127.0.0.1#P1 master seen 27 of 40
-barrier: 127.0.0.1#P1 master seen 28 of 40
-barrier: 127.0.0.1#P1 master seen 29 of 40
-barrier: 127.0.0.1#P1 master seen 2 of 40
-barrier: 127.0.0.1#P1 master seen 30 of 40
-barrier: 127.0.0.1#P1 master seen 31 of 40
-barrier: 127.0.0.1#P1 master seen 32 of 40
-barrier: 127.0.0.1#P1 master seen 33 of 40
-barrier: 127.0.0.1#P1 master seen 34 of 40
-barrier: 127.0.0.1#P1 master seen 35 of 40
-barrier: 127.0.0.1#P1 master seen 36 of 40
-barrier: 127.0.0.1#P1 master seen 37 of 40
-barrier: 127.0.0.1#P1 master seen 38 of 40
-barrier: 127.0.0.1#P1 master seen 39 of 40
-barrier: 127.0.0.1#P1 master seen 3 of 40
-barrier: 127.0.0.1#P1 master seen 40 of 40
-barrier: 127.0.0.1#P1 master seen 4 of 40
-barrier: 127.0.0.1#P1 master seen 5 of 40
-barrier: 127.0.0.1#P1 master seen 6 of 40
-barrier: 127.0.0.1#P1 master seen 7 of 40
-barrier: 127.0.0.1#P1 master seen 8 of 40
-barrier: 127.0.0.1#P1 master seen 9 of 40
-barrier: 127.0.0.1#P1 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P10
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P11
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P12
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P13
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P14
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P15
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P16
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P17
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P18
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P19
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P2
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P20
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P21
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P22
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P23
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P24
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P25
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P26
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P27
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P28
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P29
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P3
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P30
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P31
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P32
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P33
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P34
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P35
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P36
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P37
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P38
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P39
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P4
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P40
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P5
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P6
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P7
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P8
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P9
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P10
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P11
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P12
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P13
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P14
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P15
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P16
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P17
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P18
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P19
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P2
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P20
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P21
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P22
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P23
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P24
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P25
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P26
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P27
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P28
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P29
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P3
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P30
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P31
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P32
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P33
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P34
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P35
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P36
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P37
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P38
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P39
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P4
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P40
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P5
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P6
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P7
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P8
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P9
-barrier: 127.0.0.1#P1 selected as master
-barrier: 127.0.0.1#P1 --STEPTEST-- payload1 complete
-barrier: 127.0.0.1#P1 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P20 master said: ping
-barrier: 127.0.0.1#P20 master said: rlse
-barrier: 127.0.0.1#P20 master said: wait
-barrier: 127.0.0.1#P20 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P20 pong
-barrier: 127.0.0.1#P20 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P20 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P20 was released, waiting for close
-barrier: 127.0.0.1#P21 master said: ping
-barrier: 127.0.0.1#P21 master said: rlse
-barrier: 127.0.0.1#P21 master said: wait
-barrier: 127.0.0.1#P21 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P21 pong
-barrier: 127.0.0.1#P21 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P21 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P21 was released, waiting for close
-barrier: 127.0.0.1#P22 master said: ping
-barrier: 127.0.0.1#P22 master said: rlse
-barrier: 127.0.0.1#P22 master said: wait
-barrier: 127.0.0.1#P22 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P22 pong
-barrier: 127.0.0.1#P22 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P22 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P22 was released, waiting for close
-barrier: 127.0.0.1#P23 master said: ping
-barrier: 127.0.0.1#P23 master said: rlse
-barrier: 127.0.0.1#P23 master said: wait
-barrier: 127.0.0.1#P23 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P23 pong
-barrier: 127.0.0.1#P23 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P23 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P23 was released, waiting for close
-barrier: 127.0.0.1#P24 master said: ping
-barrier: 127.0.0.1#P24 master said: rlse
-barrier: 127.0.0.1#P24 master said: wait
-barrier: 127.0.0.1#P24 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P24 pong
-barrier: 127.0.0.1#P24 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P24 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P24 was released, waiting for close
-barrier: 127.0.0.1#P25 master said: ping
-barrier: 127.0.0.1#P25 master said: rlse
-barrier: 127.0.0.1#P25 master said: wait
-barrier: 127.0.0.1#P25 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P25 pong
-barrier: 127.0.0.1#P25 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P25 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P25 was released, waiting for close
-barrier: 127.0.0.1#P26 master said: ping
-barrier: 127.0.0.1#P26 master said: rlse
-barrier: 127.0.0.1#P26 master said: wait
-barrier: 127.0.0.1#P26 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P26 pong
-barrier: 127.0.0.1#P26 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P26 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P26 was released, waiting for close
-barrier: 127.0.0.1#P27 master said: ping
-barrier: 127.0.0.1#P27 master said: rlse
-barrier: 127.0.0.1#P27 master said: wait
-barrier: 127.0.0.1#P27 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P27 pong
-barrier: 127.0.0.1#P27 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P27 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P27 was released, waiting for close
-barrier: 127.0.0.1#P28 master said: ping
-barrier: 127.0.0.1#P28 master said: rlse
-barrier: 127.0.0.1#P28 master said: wait
-barrier: 127.0.0.1#P28 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P28 pong
-barrier: 127.0.0.1#P28 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P28 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P28 was released, waiting for close
-barrier: 127.0.0.1#P29 master said: ping
-barrier: 127.0.0.1#P29 master said: rlse
-barrier: 127.0.0.1#P29 master said: wait
-barrier: 127.0.0.1#P29 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P29 pong
-barrier: 127.0.0.1#P29 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P29 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P29 was released, waiting for close
-barrier: 127.0.0.1#P2 master said: ping
-barrier: 127.0.0.1#P2 master said: rlse
-barrier: 127.0.0.1#P2 master said: wait
-barrier: 127.0.0.1#P2 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P2 pong
-barrier: 127.0.0.1#P2 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P2 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P2 was released, waiting for close
-barrier: 127.0.0.1#P30 master said: ping
-barrier: 127.0.0.1#P30 master said: rlse
-barrier: 127.0.0.1#P30 master said: wait
-barrier: 127.0.0.1#P30 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P30 pong
-barrier: 127.0.0.1#P30 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P30 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P30 was released, waiting for close
-barrier: 127.0.0.1#P31 master said: ping
-barrier: 127.0.0.1#P31 master said: rlse
-barrier: 127.0.0.1#P31 master said: wait
-barrier: 127.0.0.1#P31 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P31 pong
-barrier: 127.0.0.1#P31 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P31 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P31 was released, waiting for close
-barrier: 127.0.0.1#P32 master said: ping
-barrier: 127.0.0.1#P32 master said: rlse
-barrier: 127.0.0.1#P32 master said: wait
-barrier: 127.0.0.1#P32 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P32 pong
-barrier: 127.0.0.1#P32 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P32 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P32 was released, waiting for close
-barrier: 127.0.0.1#P33 master said: ping
-barrier: 127.0.0.1#P33 master said: rlse
-barrier: 127.0.0.1#P33 master said: wait
-barrier: 127.0.0.1#P33 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P33 pong
-barrier: 127.0.0.1#P33 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P33 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P33 was released, waiting for close
-barrier: 127.0.0.1#P34 master said: ping
-barrier: 127.0.0.1#P34 master said: rlse
-barrier: 127.0.0.1#P34 master said: wait
-barrier: 127.0.0.1#P34 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P34 pong
-barrier: 127.0.0.1#P34 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P34 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P34 was released, waiting for close
-barrier: 127.0.0.1#P35 master said: ping
-barrier: 127.0.0.1#P35 master said: rlse
-barrier: 127.0.0.1#P35 master said: wait
-barrier: 127.0.0.1#P35 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P35 pong
-barrier: 127.0.0.1#P35 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P35 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P35 was released, waiting for close
-barrier: 127.0.0.1#P36 master said: ping
-barrier: 127.0.0.1#P36 master said: rlse
-barrier: 127.0.0.1#P36 master said: wait
-barrier: 127.0.0.1#P36 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P36 pong
-barrier: 127.0.0.1#P36 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P36 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P36 was released, waiting for close
-barrier: 127.0.0.1#P37 master said: ping
-barrier: 127.0.0.1#P37 master said: rlse
-barrier: 127.0.0.1#P37 master said: wait
-barrier: 127.0.0.1#P37 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P37 pong
-barrier: 127.0.0.1#P37 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P37 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P37 was released, waiting for close
-barrier: 127.0.0.1#P38 master said: ping
-barrier: 127.0.0.1#P38 master said: rlse
-barrier: 127.0.0.1#P38 master said: wait
-barrier: 127.0.0.1#P38 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P38 pong
-barrier: 127.0.0.1#P38 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P38 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P38 was released, waiting for close
-barrier: 127.0.0.1#P39 master said: ping
-barrier: 127.0.0.1#P39 master said: rlse
-barrier: 127.0.0.1#P39 master said: wait
-barrier: 127.0.0.1#P39 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P39 pong
-barrier: 127.0.0.1#P39 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P39 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P39 was released, waiting for close
-barrier: 127.0.0.1#P3 master said: ping
-barrier: 127.0.0.1#P3 master said: rlse
-barrier: 127.0.0.1#P3 master said: wait
-barrier: 127.0.0.1#P3 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P3 pong
-barrier: 127.0.0.1#P3 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P3 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P3 was released, waiting for close
-barrier: 127.0.0.1#P40 master said: ping
-barrier: 127.0.0.1#P40 master said: rlse
-barrier: 127.0.0.1#P40 master said: wait
-barrier: 127.0.0.1#P40 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P40 pong
-barrier: 127.0.0.1#P40 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P40 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P40 was released, waiting for close
-barrier: 127.0.0.1#P4 master said: ping
-barrier: 127.0.0.1#P4 master said: rlse
-barrier: 127.0.0.1#P4 master said: wait
-barrier: 127.0.0.1#P4 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P4 pong
-barrier: 127.0.0.1#P4 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P4 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P4 was released, waiting for close
-barrier: 127.0.0.1#P5 master said: ping
-barrier: 127.0.0.1#P5 master said: rlse
-barrier: 127.0.0.1#P5 master said: wait
-barrier: 127.0.0.1#P5 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P5 pong
-barrier: 127.0.0.1#P5 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P5 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P5 was released, waiting for close
-barrier: 127.0.0.1#P6 master said: ping
-barrier: 127.0.0.1#P6 master said: rlse
-barrier: 127.0.0.1#P6 master said: wait
-barrier: 127.0.0.1#P6 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P6 pong
-barrier: 127.0.0.1#P6 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P6 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P6 was released, waiting for close
-barrier: 127.0.0.1#P7 master said: ping
-barrier: 127.0.0.1#P7 master said: rlse
-barrier: 127.0.0.1#P7 master said: wait
-barrier: 127.0.0.1#P7 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P7 pong
-barrier: 127.0.0.1#P7 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P7 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P7 was released, waiting for close
-barrier: 127.0.0.1#P8 master said: ping
-barrier: 127.0.0.1#P8 master said: rlse
-barrier: 127.0.0.1#P8 master said: wait
-barrier: 127.0.0.1#P8 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P8 pong
-barrier: 127.0.0.1#P8 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P8 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P8 was released, waiting for close
-barrier: 127.0.0.1#P9 master said: ping
-barrier: 127.0.0.1#P9 master said: rlse
-barrier: 127.0.0.1#P9 master said: wait
-barrier: 127.0.0.1#P9 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P5,127.0.0.1#P6,127.0.0.1#P7,127.0.0.1#P8,127.0.0.1#P9
-barrier: 127.0.0.1#P9 pong
-barrier: 127.0.0.1#P9 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P9 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P9 was released, waiting for close
-closing client: 127.0.0.1#P4
---SELFTEST-- exit 0
---SELFTEST-- loading test
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
diff --git a/client/bin/self-test/tests/113-barrier-unordered-100x b/client/bin/self-test/tests/113-barrier-unordered-100x
deleted file mode 100644
index 858e9c3..0000000
--- a/client/bin/self-test/tests/113-barrier-unordered-100x
+++ /dev/null
@@ -1,24 +0,0 @@
-import time
-import random
-
-print "--SELFTEST-- loading test"
-
-max = 100
-
-def payload1(id):
-	print "--STEPTEST-- payload1 start"
-
-	time.sleep(random.randint(1, 5))
-
-	b = job.barrier('127.0.0.1#P%d' % (id), 'here', 60)
-	b.rendezvous(*us)
-
-	print "--STEPTEST-- payload1 complete"
-
-us = []
-jobs = []
-for n in range(1, max+1):
-	us.append('127.0.0.1#P%d' % (n))
-	jobs.append([payload1, n])
-
-job.parallel(*jobs)
diff --git a/client/bin/self-test/tests/113-barrier-unordered-100x-out b/client/bin/self-test/tests/113-barrier-unordered-100x-out
deleted file mode 100644
index 431fb07..0000000
--- a/client/bin/self-test/tests/113-barrier-unordered-100x-out
+++ /dev/null
@@ -1,1591 +0,0 @@
-barrier: 127.0.0.1#P100 master said: ping
-barrier: 127.0.0.1#P100 master said: rlse
-barrier: 127.0.0.1#P100 master said: wait
-barrier: 127.0.0.1#P100 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P100 pong
-barrier: 127.0.0.1#P100 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P100 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P100 was released, waiting for close
-barrier: 127.0.0.1#P10 master said: ping
-barrier: 127.0.0.1#P10 master said: rlse
-barrier: 127.0.0.1#P10 master said: wait
-barrier: 127.0.0.1#P10 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P10 pong
-barrier: 127.0.0.1#P10 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P10 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P10 was released, waiting for close
-barrier: 127.0.0.1#P11 master said: ping
-barrier: 127.0.0.1#P11 master said: rlse
-barrier: 127.0.0.1#P11 master said: wait
-barrier: 127.0.0.1#P11 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P11 pong
-barrier: 127.0.0.1#P11 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P11 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P11 was released, waiting for close
-barrier: 127.0.0.1#P12 master said: ping
-barrier: 127.0.0.1#P12 master said: rlse
-barrier: 127.0.0.1#P12 master said: wait
-barrier: 127.0.0.1#P12 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P12 pong
-barrier: 127.0.0.1#P12 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P12 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P12 was released, waiting for close
-barrier: 127.0.0.1#P13 master said: ping
-barrier: 127.0.0.1#P13 master said: rlse
-barrier: 127.0.0.1#P13 master said: wait
-barrier: 127.0.0.1#P13 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P13 pong
-barrier: 127.0.0.1#P13 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P13 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P13 was released, waiting for close
-barrier: 127.0.0.1#P14 master said: ping
-barrier: 127.0.0.1#P14 master said: rlse
-barrier: 127.0.0.1#P14 master said: wait
-barrier: 127.0.0.1#P14 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P14 pong
-barrier: 127.0.0.1#P14 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P14 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P14 was released, waiting for close
-barrier: 127.0.0.1#P15 master said: ping
-barrier: 127.0.0.1#P15 master said: rlse
-barrier: 127.0.0.1#P15 master said: wait
-barrier: 127.0.0.1#P15 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P15 pong
-barrier: 127.0.0.1#P15 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P15 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P15 was released, waiting for close
-barrier: 127.0.0.1#P16 master said: ping
-barrier: 127.0.0.1#P16 master said: rlse
-barrier: 127.0.0.1#P16 master said: wait
-barrier: 127.0.0.1#P16 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P16 pong
-barrier: 127.0.0.1#P16 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P16 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P16 was released, waiting for close
-barrier: 127.0.0.1#P17 master said: ping
-barrier: 127.0.0.1#P17 master said: rlse
-barrier: 127.0.0.1#P17 master said: wait
-barrier: 127.0.0.1#P17 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P17 pong
-barrier: 127.0.0.1#P17 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P17 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P17 was released, waiting for close
-barrier: 127.0.0.1#P18 master said: ping
-barrier: 127.0.0.1#P18 master said: rlse
-barrier: 127.0.0.1#P18 master said: wait
-barrier: 127.0.0.1#P18 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P18 pong
-barrier: 127.0.0.1#P18 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P18 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P18 was released, waiting for close
-barrier: 127.0.0.1#P19 master said: ping
-barrier: 127.0.0.1#P19 master said: rlse
-barrier: 127.0.0.1#P19 master said: wait
-barrier: 127.0.0.1#P19 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P19 pong
-barrier: 127.0.0.1#P19 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P19 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P19 was released, waiting for close
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P10
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P100
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P11
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P12
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P13
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P14
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P15
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P16
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P17
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P18
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P19
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P2
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P20
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P21
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P22
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P23
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P24
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P25
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P26
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P27
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P28
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P29
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P3
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P30
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P31
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P32
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P33
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P34
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P35
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P36
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P37
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P38
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P39
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P4
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P40
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P41
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P42
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P43
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P44
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P45
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P46
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P47
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P48
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P49
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P5
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P50
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P51
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P52
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P53
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P54
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P55
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P56
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P57
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P58
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P59
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P6
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P60
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P61
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P62
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P63
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P64
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P65
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P66
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P67
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P68
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P69
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P7
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P70
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P71
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P72
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P73
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P74
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P75
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P76
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P77
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P78
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P79
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P8
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P80
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P81
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P82
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P83
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P84
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P85
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P86
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P87
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P88
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P89
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P9
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P90
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P91
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P92
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P93
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P94
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P95
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P96
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P97
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P98
-barrier: 127.0.0.1#P1 checking client present: 127.0.0.1#P99
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P100 (127.0.0.1:33657)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P10 (127.0.0.1:33590)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P11 (127.0.0.1:33619)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P12 (127.0.0.1:33585)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P13 (127.0.0.1:33600)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P14 (127.0.0.1:33563)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P15 (127.0.0.1:33618)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P16 (127.0.0.1:33559)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P17 (127.0.0.1:33620)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P18 (127.0.0.1:33561)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P19 (127.0.0.1:33639)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P20 (127.0.0.1:33589)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P21 (127.0.0.1:33567)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P2 (127.0.0.1:33634)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P22 (127.0.0.1:33560)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P23 (127.0.0.1:33587)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P24 (127.0.0.1:33601)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P25 (127.0.0.1:33615)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P26 (127.0.0.1:33640)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P27 (127.0.0.1:33565)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P28 (127.0.0.1:33613)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P29 (127.0.0.1:33586)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P30 (127.0.0.1:33599)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P31 (127.0.0.1:33637)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P3 (127.0.0.1:33602)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P32 (127.0.0.1:33638)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P33 (127.0.0.1:33605)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P34 (127.0.0.1:33616)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P35 (127.0.0.1:33606)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P36 (127.0.0.1:33566)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P37 (127.0.0.1:33598)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P38 (127.0.0.1:33614)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P39 (127.0.0.1:33635)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P40 (127.0.0.1:33636)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P41 (127.0.0.1:33562)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P4 (127.0.0.1:33621)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P42 (127.0.0.1:33603)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P43 (127.0.0.1:33625)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P44 (127.0.0.1:33626)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P45 (127.0.0.1:33609)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P46 (127.0.0.1:33644)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P47 (127.0.0.1:33575)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P48 (127.0.0.1:33591)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P49 (127.0.0.1:33607)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P50 (127.0.0.1:33624)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P51 (127.0.0.1:33568)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P5 (127.0.0.1:33588)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P52 (127.0.0.1:33641)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P53 (127.0.0.1:33592)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P54 (127.0.0.1:33642)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P55 (127.0.0.1:33569)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P56 (127.0.0.1:33570)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P57 (127.0.0.1:33571)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P58 (127.0.0.1:33572)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P59 (127.0.0.1:33643)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P60 (127.0.0.1:33608)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P61 (127.0.0.1:33593)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P6 (127.0.0.1:33604)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P62 (127.0.0.1:33622)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P63 (127.0.0.1:33573)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P64 (127.0.0.1:33574)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P65 (127.0.0.1:33628)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P66 (127.0.0.1:33578)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P67 (127.0.0.1:33649)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P68 (127.0.0.1:33648)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P69 (127.0.0.1:33580)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P70 (127.0.0.1:33627)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P71 (127.0.0.1:33647)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P7 (127.0.0.1:33564)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P72 (127.0.0.1:33645)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P73 (127.0.0.1:33646)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P74 (127.0.0.1:33577)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P75 (127.0.0.1:33650)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P76 (127.0.0.1:33610)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P77 (127.0.0.1:33579)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P78 (127.0.0.1:33651)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P79 (127.0.0.1:33582)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P80 (127.0.0.1:33597)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P81 (127.0.0.1:33652)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P8 (127.0.0.1:33623)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P82 (127.0.0.1:33584)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P83 (127.0.0.1:33630)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P84 (127.0.0.1:33595)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P85 (127.0.0.1:33633)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P86 (127.0.0.1:33611)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P87 (127.0.0.1:33576)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P88 (127.0.0.1:33594)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P89 (127.0.0.1:33629)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P90 (127.0.0.1:33596)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P91 (127.0.0.1:33612)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P9 (127.0.0.1:33617)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P92 (127.0.0.1:33581)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P93 (127.0.0.1:33631)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P94 (127.0.0.1:33653)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P95 (127.0.0.1:33583)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P96 (127.0.0.1:33654)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P97 (127.0.0.1:33655)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P98 (127.0.0.1:33632)
-barrier: 127.0.0.1#P1 client now waiting: 127.0.0.1#P99 (127.0.0.1:33656)
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P10
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P100
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P11
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P12
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P13
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P14
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P15
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P16
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P17
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P18
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P19
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P2
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P20
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P21
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P22
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P23
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P24
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P25
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P26
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P27
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P28
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P29
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P3
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P30
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P31
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P32
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P33
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P34
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P35
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P36
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P37
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P38
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P39
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P4
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P40
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P41
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P42
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P43
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P44
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P45
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P46
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P47
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P48
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P49
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P5
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P50
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P51
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P52
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P53
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P54
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P55
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P56
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P57
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P58
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P59
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P6
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P60
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P61
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P62
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P63
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P64
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P65
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P66
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P67
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P68
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P69
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P7
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P70
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P71
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P72
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P73
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P74
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P75
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P76
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P77
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P78
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P79
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P8
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P80
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P81
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P82
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P83
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P84
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P85
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P86
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P87
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P88
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P89
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P9
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P90
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P91
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P92
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P93
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P94
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P95
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P96
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P97
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P98
-barrier: 127.0.0.1#P1 closing client: 127.0.0.1#P99
-barrier: 127.0.0.1#P1 master seen 100 of 100
-barrier: 127.0.0.1#P1 master seen 10 of 100
-barrier: 127.0.0.1#P1 master seen 11 of 100
-barrier: 127.0.0.1#P1 master seen 12 of 100
-barrier: 127.0.0.1#P1 master seen 13 of 100
-barrier: 127.0.0.1#P1 master seen 14 of 100
-barrier: 127.0.0.1#P1 master seen 15 of 100
-barrier: 127.0.0.1#P1 master seen 16 of 100
-barrier: 127.0.0.1#P1 master seen 17 of 100
-barrier: 127.0.0.1#P1 master seen 18 of 100
-barrier: 127.0.0.1#P1 master seen 19 of 100
-barrier: 127.0.0.1#P1 master seen 20 of 100
-barrier: 127.0.0.1#P1 master seen 21 of 100
-barrier: 127.0.0.1#P1 master seen 22 of 100
-barrier: 127.0.0.1#P1 master seen 23 of 100
-barrier: 127.0.0.1#P1 master seen 24 of 100
-barrier: 127.0.0.1#P1 master seen 25 of 100
-barrier: 127.0.0.1#P1 master seen 26 of 100
-barrier: 127.0.0.1#P1 master seen 27 of 100
-barrier: 127.0.0.1#P1 master seen 28 of 100
-barrier: 127.0.0.1#P1 master seen 29 of 100
-barrier: 127.0.0.1#P1 master seen 2 of 100
-barrier: 127.0.0.1#P1 master seen 30 of 100
-barrier: 127.0.0.1#P1 master seen 31 of 100
-barrier: 127.0.0.1#P1 master seen 32 of 100
-barrier: 127.0.0.1#P1 master seen 33 of 100
-barrier: 127.0.0.1#P1 master seen 34 of 100
-barrier: 127.0.0.1#P1 master seen 35 of 100
-barrier: 127.0.0.1#P1 master seen 36 of 100
-barrier: 127.0.0.1#P1 master seen 37 of 100
-barrier: 127.0.0.1#P1 master seen 38 of 100
-barrier: 127.0.0.1#P1 master seen 39 of 100
-barrier: 127.0.0.1#P1 master seen 3 of 100
-barrier: 127.0.0.1#P1 master seen 40 of 100
-barrier: 127.0.0.1#P1 master seen 41 of 100
-barrier: 127.0.0.1#P1 master seen 42 of 100
-barrier: 127.0.0.1#P1 master seen 43 of 100
-barrier: 127.0.0.1#P1 master seen 44 of 100
-barrier: 127.0.0.1#P1 master seen 45 of 100
-barrier: 127.0.0.1#P1 master seen 46 of 100
-barrier: 127.0.0.1#P1 master seen 47 of 100
-barrier: 127.0.0.1#P1 master seen 48 of 100
-barrier: 127.0.0.1#P1 master seen 49 of 100
-barrier: 127.0.0.1#P1 master seen 4 of 100
-barrier: 127.0.0.1#P1 master seen 50 of 100
-barrier: 127.0.0.1#P1 master seen 51 of 100
-barrier: 127.0.0.1#P1 master seen 52 of 100
-barrier: 127.0.0.1#P1 master seen 53 of 100
-barrier: 127.0.0.1#P1 master seen 54 of 100
-barrier: 127.0.0.1#P1 master seen 55 of 100
-barrier: 127.0.0.1#P1 master seen 56 of 100
-barrier: 127.0.0.1#P1 master seen 57 of 100
-barrier: 127.0.0.1#P1 master seen 58 of 100
-barrier: 127.0.0.1#P1 master seen 59 of 100
-barrier: 127.0.0.1#P1 master seen 5 of 100
-barrier: 127.0.0.1#P1 master seen 60 of 100
-barrier: 127.0.0.1#P1 master seen 61 of 100
-barrier: 127.0.0.1#P1 master seen 62 of 100
-barrier: 127.0.0.1#P1 master seen 63 of 100
-barrier: 127.0.0.1#P1 master seen 64 of 100
-barrier: 127.0.0.1#P1 master seen 65 of 100
-barrier: 127.0.0.1#P1 master seen 66 of 100
-barrier: 127.0.0.1#P1 master seen 67 of 100
-barrier: 127.0.0.1#P1 master seen 68 of 100
-barrier: 127.0.0.1#P1 master seen 69 of 100
-barrier: 127.0.0.1#P1 master seen 6 of 100
-barrier: 127.0.0.1#P1 master seen 70 of 100
-barrier: 127.0.0.1#P1 master seen 71 of 100
-barrier: 127.0.0.1#P1 master seen 72 of 100
-barrier: 127.0.0.1#P1 master seen 73 of 100
-barrier: 127.0.0.1#P1 master seen 74 of 100
-barrier: 127.0.0.1#P1 master seen 75 of 100
-barrier: 127.0.0.1#P1 master seen 76 of 100
-barrier: 127.0.0.1#P1 master seen 77 of 100
-barrier: 127.0.0.1#P1 master seen 78 of 100
-barrier: 127.0.0.1#P1 master seen 79 of 100
-barrier: 127.0.0.1#P1 master seen 7 of 100
-barrier: 127.0.0.1#P1 master seen 80 of 100
-barrier: 127.0.0.1#P1 master seen 81 of 100
-barrier: 127.0.0.1#P1 master seen 82 of 100
-barrier: 127.0.0.1#P1 master seen 83 of 100
-barrier: 127.0.0.1#P1 master seen 84 of 100
-barrier: 127.0.0.1#P1 master seen 85 of 100
-barrier: 127.0.0.1#P1 master seen 86 of 100
-barrier: 127.0.0.1#P1 master seen 87 of 100
-barrier: 127.0.0.1#P1 master seen 88 of 100
-barrier: 127.0.0.1#P1 master seen 89 of 100
-barrier: 127.0.0.1#P1 master seen 8 of 100
-barrier: 127.0.0.1#P1 master seen 90 of 100
-barrier: 127.0.0.1#P1 master seen 91 of 100
-barrier: 127.0.0.1#P1 master seen 92 of 100
-barrier: 127.0.0.1#P1 master seen 93 of 100
-barrier: 127.0.0.1#P1 master seen 94 of 100
-barrier: 127.0.0.1#P1 master seen 95 of 100
-barrier: 127.0.0.1#P1 master seen 96 of 100
-barrier: 127.0.0.1#P1 master seen 97 of 100
-barrier: 127.0.0.1#P1 master seen 98 of 100
-barrier: 127.0.0.1#P1 master seen 99 of 100
-barrier: 127.0.0.1#P1 master seen 9 of 100
-barrier: 127.0.0.1#P1 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P10
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P100
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P11
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P12
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P13
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P14
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P15
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P16
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P17
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P18
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P19
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P2
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P20
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P21
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P22
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P23
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P24
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P25
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P26
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P27
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P28
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P29
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P3
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P30
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P31
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P32
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P33
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P34
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P35
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P36
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P37
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P38
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P39
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P4
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P40
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P41
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P42
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P43
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P44
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P45
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P46
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P47
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P48
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P49
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P5
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P50
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P51
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P52
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P53
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P54
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P55
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P56
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P57
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P58
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P59
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P6
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P60
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P61
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P62
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P63
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P64
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P65
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P66
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P67
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P68
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P69
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P7
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P70
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P71
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P72
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P73
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P74
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P75
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P76
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P77
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P78
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P79
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P8
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P80
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P81
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P82
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P83
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P84
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P85
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P86
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P87
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P88
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P89
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P9
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P90
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P91
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P92
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P93
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P94
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P95
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P96
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P97
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P98
-barrier: 127.0.0.1#P1 new client tag=here, name=127.0.0.1#P99
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P10
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P100
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P11
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P12
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P13
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P14
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P15
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P16
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P17
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P18
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P19
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P2
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P20
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P21
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P22
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P23
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P24
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P25
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P26
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P27
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P28
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P29
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P3
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P30
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P31
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P32
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P33
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P34
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P35
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P36
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P37
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P38
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P39
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P4
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P40
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P41
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P42
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P43
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P44
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P45
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P46
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P47
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P48
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P49
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P5
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P50
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P51
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P52
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P53
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P54
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P55
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P56
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P57
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P58
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P59
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P6
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P60
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P61
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P62
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P63
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P64
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P65
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P66
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P67
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P68
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P69
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P7
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P70
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P71
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P72
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P73
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P74
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P75
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P76
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P77
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P78
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P79
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P8
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P80
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P81
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P82
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P83
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P84
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P85
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P86
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P87
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P88
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P89
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P9
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P90
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P91
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P92
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P93
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P94
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P95
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P96
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P97
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P98
-barrier: 127.0.0.1#P1 releasing client: 127.0.0.1#P99
-barrier: 127.0.0.1#P1 selected as master
-barrier: 127.0.0.1#P1 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P20 master said: ping
-barrier: 127.0.0.1#P20 master said: rlse
-barrier: 127.0.0.1#P20 master said: wait
-barrier: 127.0.0.1#P20 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P20 pong
-barrier: 127.0.0.1#P20 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P20 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P20 was released, waiting for close
-barrier: 127.0.0.1#P21 master said: ping
-barrier: 127.0.0.1#P21 master said: rlse
-barrier: 127.0.0.1#P21 master said: wait
-barrier: 127.0.0.1#P21 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P21 pong
-barrier: 127.0.0.1#P21 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P21 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P21 was released, waiting for close
-barrier: 127.0.0.1#P22 master said: ping
-barrier: 127.0.0.1#P22 master said: rlse
-barrier: 127.0.0.1#P22 master said: wait
-barrier: 127.0.0.1#P22 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P22 pong
-barrier: 127.0.0.1#P22 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P22 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P22 was released, waiting for close
-barrier: 127.0.0.1#P23 master said: ping
-barrier: 127.0.0.1#P23 master said: rlse
-barrier: 127.0.0.1#P23 master said: wait
-barrier: 127.0.0.1#P23 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P23 pong
-barrier: 127.0.0.1#P23 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P23 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P23 was released, waiting for close
-barrier: 127.0.0.1#P24 master said: ping
-barrier: 127.0.0.1#P24 master said: rlse
-barrier: 127.0.0.1#P24 master said: wait
-barrier: 127.0.0.1#P24 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P24 pong
-barrier: 127.0.0.1#P24 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P24 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P24 was released, waiting for close
-barrier: 127.0.0.1#P25 master said: ping
-barrier: 127.0.0.1#P25 master said: rlse
-barrier: 127.0.0.1#P25 master said: wait
-barrier: 127.0.0.1#P25 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P25 pong
-barrier: 127.0.0.1#P25 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P25 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P25 was released, waiting for close
-barrier: 127.0.0.1#P26 master said: ping
-barrier: 127.0.0.1#P26 master said: rlse
-barrier: 127.0.0.1#P26 master said: wait
-barrier: 127.0.0.1#P26 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P26 pong
-barrier: 127.0.0.1#P26 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P26 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P26 was released, waiting for close
-barrier: 127.0.0.1#P27 master said: ping
-barrier: 127.0.0.1#P27 master said: rlse
-barrier: 127.0.0.1#P27 master said: wait
-barrier: 127.0.0.1#P27 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P27 pong
-barrier: 127.0.0.1#P27 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P27 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P27 was released, waiting for close
-barrier: 127.0.0.1#P28 master said: ping
-barrier: 127.0.0.1#P28 master said: rlse
-barrier: 127.0.0.1#P28 master said: wait
-barrier: 127.0.0.1#P28 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P28 pong
-barrier: 127.0.0.1#P28 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P28 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P28 was released, waiting for close
-barrier: 127.0.0.1#P29 master said: ping
-barrier: 127.0.0.1#P29 master said: rlse
-barrier: 127.0.0.1#P29 master said: wait
-barrier: 127.0.0.1#P29 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P29 pong
-barrier: 127.0.0.1#P29 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P29 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P29 was released, waiting for close
-barrier: 127.0.0.1#P2 master said: ping
-barrier: 127.0.0.1#P2 master said: rlse
-barrier: 127.0.0.1#P2 master said: wait
-barrier: 127.0.0.1#P2 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P2 pong
-barrier: 127.0.0.1#P2 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P2 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P2 was released, waiting for close
-barrier: 127.0.0.1#P30 master said: ping
-barrier: 127.0.0.1#P30 master said: rlse
-barrier: 127.0.0.1#P30 master said: wait
-barrier: 127.0.0.1#P30 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P30 pong
-barrier: 127.0.0.1#P30 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P30 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P30 was released, waiting for close
-barrier: 127.0.0.1#P31 master said: ping
-barrier: 127.0.0.1#P31 master said: rlse
-barrier: 127.0.0.1#P31 master said: wait
-barrier: 127.0.0.1#P31 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P31 pong
-barrier: 127.0.0.1#P31 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P31 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P31 was released, waiting for close
-barrier: 127.0.0.1#P32 master said: ping
-barrier: 127.0.0.1#P32 master said: rlse
-barrier: 127.0.0.1#P32 master said: wait
-barrier: 127.0.0.1#P32 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P32 pong
-barrier: 127.0.0.1#P32 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P32 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P32 was released, waiting for close
-barrier: 127.0.0.1#P33 master said: ping
-barrier: 127.0.0.1#P33 master said: rlse
-barrier: 127.0.0.1#P33 master said: wait
-barrier: 127.0.0.1#P33 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P33 pong
-barrier: 127.0.0.1#P33 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P33 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P33 was released, waiting for close
-barrier: 127.0.0.1#P34 master said: ping
-barrier: 127.0.0.1#P34 master said: rlse
-barrier: 127.0.0.1#P34 master said: wait
-barrier: 127.0.0.1#P34 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P34 pong
-barrier: 127.0.0.1#P34 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P34 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P34 was released, waiting for close
-barrier: 127.0.0.1#P35 master said: ping
-barrier: 127.0.0.1#P35 master said: rlse
-barrier: 127.0.0.1#P35 master said: wait
-barrier: 127.0.0.1#P35 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P35 pong
-barrier: 127.0.0.1#P35 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P35 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P35 was released, waiting for close
-barrier: 127.0.0.1#P36 master said: ping
-barrier: 127.0.0.1#P36 master said: rlse
-barrier: 127.0.0.1#P36 master said: wait
-barrier: 127.0.0.1#P36 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P36 pong
-barrier: 127.0.0.1#P36 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P36 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P36 was released, waiting for close
-barrier: 127.0.0.1#P37 master said: ping
-barrier: 127.0.0.1#P37 master said: rlse
-barrier: 127.0.0.1#P37 master said: wait
-barrier: 127.0.0.1#P37 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P37 pong
-barrier: 127.0.0.1#P37 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P37 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P37 was released, waiting for close
-barrier: 127.0.0.1#P38 master said: ping
-barrier: 127.0.0.1#P38 master said: rlse
-barrier: 127.0.0.1#P38 master said: wait
-barrier: 127.0.0.1#P38 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P38 pong
-barrier: 127.0.0.1#P38 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P38 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P38 was released, waiting for close
-barrier: 127.0.0.1#P39 master said: ping
-barrier: 127.0.0.1#P39 master said: rlse
-barrier: 127.0.0.1#P39 master said: wait
-barrier: 127.0.0.1#P39 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P39 pong
-barrier: 127.0.0.1#P39 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P39 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P39 was released, waiting for close
-barrier: 127.0.0.1#P3 master said: ping
-barrier: 127.0.0.1#P3 master said: rlse
-barrier: 127.0.0.1#P3 master said: wait
-barrier: 127.0.0.1#P3 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P3 pong
-barrier: 127.0.0.1#P3 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P3 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P3 was released, waiting for close
-barrier: 127.0.0.1#P40 master said: ping
-barrier: 127.0.0.1#P40 master said: rlse
-barrier: 127.0.0.1#P40 master said: wait
-barrier: 127.0.0.1#P40 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P40 pong
-barrier: 127.0.0.1#P40 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P40 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P40 was released, waiting for close
-barrier: 127.0.0.1#P41 master said: ping
-barrier: 127.0.0.1#P41 master said: rlse
-barrier: 127.0.0.1#P41 master said: wait
-barrier: 127.0.0.1#P41 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P41 pong
-barrier: 127.0.0.1#P41 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P41 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P41 was released, waiting for close
-barrier: 127.0.0.1#P42 master said: ping
-barrier: 127.0.0.1#P42 master said: rlse
-barrier: 127.0.0.1#P42 master said: wait
-barrier: 127.0.0.1#P42 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P42 pong
-barrier: 127.0.0.1#P42 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P42 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P42 was released, waiting for close
-barrier: 127.0.0.1#P43 master said: ping
-barrier: 127.0.0.1#P43 master said: rlse
-barrier: 127.0.0.1#P43 master said: wait
-barrier: 127.0.0.1#P43 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P43 pong
-barrier: 127.0.0.1#P43 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P43 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P43 was released, waiting for close
-barrier: 127.0.0.1#P44 master said: ping
-barrier: 127.0.0.1#P44 master said: rlse
-barrier: 127.0.0.1#P44 master said: wait
-barrier: 127.0.0.1#P44 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P44 pong
-barrier: 127.0.0.1#P44 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P44 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P44 was released, waiting for close
-barrier: 127.0.0.1#P45 master said: ping
-barrier: 127.0.0.1#P45 master said: rlse
-barrier: 127.0.0.1#P45 master said: wait
-barrier: 127.0.0.1#P45 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P45 pong
-barrier: 127.0.0.1#P45 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P45 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P45 was released, waiting for close
-barrier: 127.0.0.1#P46 master said: ping
-barrier: 127.0.0.1#P46 master said: rlse
-barrier: 127.0.0.1#P46 master said: wait
-barrier: 127.0.0.1#P46 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P46 pong
-barrier: 127.0.0.1#P46 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P46 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P46 was released, waiting for close
-barrier: 127.0.0.1#P47 master said: ping
-barrier: 127.0.0.1#P47 master said: rlse
-barrier: 127.0.0.1#P47 master said: wait
-barrier: 127.0.0.1#P47 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P47 pong
-barrier: 127.0.0.1#P47 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P47 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P47 was released, waiting for close
-barrier: 127.0.0.1#P48 master said: ping
-barrier: 127.0.0.1#P48 master said: rlse
-barrier: 127.0.0.1#P48 master said: wait
-barrier: 127.0.0.1#P48 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P48 pong
-barrier: 127.0.0.1#P48 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P48 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P48 was released, waiting for close
-barrier: 127.0.0.1#P49 master said: ping
-barrier: 127.0.0.1#P49 master said: rlse
-barrier: 127.0.0.1#P49 master said: wait
-barrier: 127.0.0.1#P49 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P49 pong
-barrier: 127.0.0.1#P49 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P49 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P49 was released, waiting for close
-barrier: 127.0.0.1#P4 master said: ping
-barrier: 127.0.0.1#P4 master said: rlse
-barrier: 127.0.0.1#P4 master said: wait
-barrier: 127.0.0.1#P4 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P4 pong
-barrier: 127.0.0.1#P4 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P4 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P4 was released, waiting for close
-barrier: 127.0.0.1#P50 master said: ping
-barrier: 127.0.0.1#P50 master said: rlse
-barrier: 127.0.0.1#P50 master said: wait
-barrier: 127.0.0.1#P50 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P50 pong
-barrier: 127.0.0.1#P50 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P50 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P50 was released, waiting for close
-barrier: 127.0.0.1#P51 master said: ping
-barrier: 127.0.0.1#P51 master said: rlse
-barrier: 127.0.0.1#P51 master said: wait
-barrier: 127.0.0.1#P51 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P51 pong
-barrier: 127.0.0.1#P51 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P51 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P51 was released, waiting for close
-barrier: 127.0.0.1#P52 master said: ping
-barrier: 127.0.0.1#P52 master said: rlse
-barrier: 127.0.0.1#P52 master said: wait
-barrier: 127.0.0.1#P52 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P52 pong
-barrier: 127.0.0.1#P52 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P52 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P52 was released, waiting for close
-barrier: 127.0.0.1#P53 master said: ping
-barrier: 127.0.0.1#P53 master said: rlse
-barrier: 127.0.0.1#P53 master said: wait
-barrier: 127.0.0.1#P53 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P53 pong
-barrier: 127.0.0.1#P53 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P53 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P53 was released, waiting for close
-barrier: 127.0.0.1#P54 master said: ping
-barrier: 127.0.0.1#P54 master said: rlse
-barrier: 127.0.0.1#P54 master said: wait
-barrier: 127.0.0.1#P54 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P54 pong
-barrier: 127.0.0.1#P54 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P54 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P54 was released, waiting for close
-barrier: 127.0.0.1#P55 master said: ping
-barrier: 127.0.0.1#P55 master said: rlse
-barrier: 127.0.0.1#P55 master said: wait
-barrier: 127.0.0.1#P55 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P55 pong
-barrier: 127.0.0.1#P55 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P55 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P55 was released, waiting for close
-barrier: 127.0.0.1#P56 master said: ping
-barrier: 127.0.0.1#P56 master said: rlse
-barrier: 127.0.0.1#P56 master said: wait
-barrier: 127.0.0.1#P56 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P56 pong
-barrier: 127.0.0.1#P56 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P56 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P56 was released, waiting for close
-barrier: 127.0.0.1#P57 master said: ping
-barrier: 127.0.0.1#P57 master said: rlse
-barrier: 127.0.0.1#P57 master said: wait
-barrier: 127.0.0.1#P57 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P57 pong
-barrier: 127.0.0.1#P57 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P57 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P57 was released, waiting for close
-barrier: 127.0.0.1#P58 master said: ping
-barrier: 127.0.0.1#P58 master said: rlse
-barrier: 127.0.0.1#P58 master said: wait
-barrier: 127.0.0.1#P58 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P58 pong
-barrier: 127.0.0.1#P58 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P58 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P58 was released, waiting for close
-barrier: 127.0.0.1#P59 master said: ping
-barrier: 127.0.0.1#P59 master said: rlse
-barrier: 127.0.0.1#P59 master said: wait
-barrier: 127.0.0.1#P59 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P59 pong
-barrier: 127.0.0.1#P59 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P59 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P59 was released, waiting for close
-barrier: 127.0.0.1#P5 master said: ping
-barrier: 127.0.0.1#P5 master said: rlse
-barrier: 127.0.0.1#P5 master said: wait
-barrier: 127.0.0.1#P5 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P5 pong
-barrier: 127.0.0.1#P5 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P5 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P5 was released, waiting for close
-barrier: 127.0.0.1#P60 master said: ping
-barrier: 127.0.0.1#P60 master said: rlse
-barrier: 127.0.0.1#P60 master said: wait
-barrier: 127.0.0.1#P60 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P60 pong
-barrier: 127.0.0.1#P60 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P60 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P60 was released, waiting for close
-barrier: 127.0.0.1#P61 master said: ping
-barrier: 127.0.0.1#P61 master said: rlse
-barrier: 127.0.0.1#P61 master said: wait
-barrier: 127.0.0.1#P61 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P61 pong
-barrier: 127.0.0.1#P61 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P61 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P61 was released, waiting for close
-barrier: 127.0.0.1#P62 master said: ping
-barrier: 127.0.0.1#P62 master said: rlse
-barrier: 127.0.0.1#P62 master said: wait
-barrier: 127.0.0.1#P62 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P62 pong
-barrier: 127.0.0.1#P62 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P62 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P62 was released, waiting for close
-barrier: 127.0.0.1#P63 master said: ping
-barrier: 127.0.0.1#P63 master said: rlse
-barrier: 127.0.0.1#P63 master said: wait
-barrier: 127.0.0.1#P63 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P63 pong
-barrier: 127.0.0.1#P63 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P63 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P63 was released, waiting for close
-barrier: 127.0.0.1#P64 master said: ping
-barrier: 127.0.0.1#P64 master said: rlse
-barrier: 127.0.0.1#P64 master said: wait
-barrier: 127.0.0.1#P64 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P64 pong
-barrier: 127.0.0.1#P64 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P64 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P64 was released, waiting for close
-barrier: 127.0.0.1#P65 master said: ping
-barrier: 127.0.0.1#P65 master said: rlse
-barrier: 127.0.0.1#P65 master said: wait
-barrier: 127.0.0.1#P65 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P65 pong
-barrier: 127.0.0.1#P65 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P65 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P65 was released, waiting for close
-barrier: 127.0.0.1#P66 master said: ping
-barrier: 127.0.0.1#P66 master said: rlse
-barrier: 127.0.0.1#P66 master said: wait
-barrier: 127.0.0.1#P66 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P66 pong
-barrier: 127.0.0.1#P66 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P66 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P66 was released, waiting for close
-barrier: 127.0.0.1#P67 master said: ping
-barrier: 127.0.0.1#P67 master said: rlse
-barrier: 127.0.0.1#P67 master said: wait
-barrier: 127.0.0.1#P67 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P67 pong
-barrier: 127.0.0.1#P67 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P67 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P67 was released, waiting for close
-barrier: 127.0.0.1#P68 master said: ping
-barrier: 127.0.0.1#P68 master said: rlse
-barrier: 127.0.0.1#P68 master said: wait
-barrier: 127.0.0.1#P68 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P68 pong
-barrier: 127.0.0.1#P68 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P68 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P68 was released, waiting for close
-barrier: 127.0.0.1#P69 master said: ping
-barrier: 127.0.0.1#P69 master said: rlse
-barrier: 127.0.0.1#P69 master said: wait
-barrier: 127.0.0.1#P69 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P69 pong
-barrier: 127.0.0.1#P69 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P69 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P69 was released, waiting for close
-barrier: 127.0.0.1#P6 master said: ping
-barrier: 127.0.0.1#P6 master said: rlse
-barrier: 127.0.0.1#P6 master said: wait
-barrier: 127.0.0.1#P6 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P6 pong
-barrier: 127.0.0.1#P6 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P6 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P6 was released, waiting for close
-barrier: 127.0.0.1#P70 master said: ping
-barrier: 127.0.0.1#P70 master said: rlse
-barrier: 127.0.0.1#P70 master said: wait
-barrier: 127.0.0.1#P70 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P70 pong
-barrier: 127.0.0.1#P70 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P70 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P70 was released, waiting for close
-barrier: 127.0.0.1#P71 master said: ping
-barrier: 127.0.0.1#P71 master said: rlse
-barrier: 127.0.0.1#P71 master said: wait
-barrier: 127.0.0.1#P71 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P71 pong
-barrier: 127.0.0.1#P71 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P71 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P71 was released, waiting for close
-barrier: 127.0.0.1#P72 master said: ping
-barrier: 127.0.0.1#P72 master said: rlse
-barrier: 127.0.0.1#P72 master said: wait
-barrier: 127.0.0.1#P72 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P72 pong
-barrier: 127.0.0.1#P72 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P72 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P72 was released, waiting for close
-barrier: 127.0.0.1#P73 master said: ping
-barrier: 127.0.0.1#P73 master said: rlse
-barrier: 127.0.0.1#P73 master said: wait
-barrier: 127.0.0.1#P73 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P73 pong
-barrier: 127.0.0.1#P73 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P73 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P73 was released, waiting for close
-barrier: 127.0.0.1#P74 master said: ping
-barrier: 127.0.0.1#P74 master said: rlse
-barrier: 127.0.0.1#P74 master said: wait
-barrier: 127.0.0.1#P74 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P74 pong
-barrier: 127.0.0.1#P74 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P74 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P74 was released, waiting for close
-barrier: 127.0.0.1#P75 master said: ping
-barrier: 127.0.0.1#P75 master said: rlse
-barrier: 127.0.0.1#P75 master said: wait
-barrier: 127.0.0.1#P75 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P75 pong
-barrier: 127.0.0.1#P75 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P75 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P75 was released, waiting for close
-barrier: 127.0.0.1#P76 master said: ping
-barrier: 127.0.0.1#P76 master said: rlse
-barrier: 127.0.0.1#P76 master said: wait
-barrier: 127.0.0.1#P76 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P76 pong
-barrier: 127.0.0.1#P76 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P76 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P76 was released, waiting for close
-barrier: 127.0.0.1#P77 master said: ping
-barrier: 127.0.0.1#P77 master said: rlse
-barrier: 127.0.0.1#P77 master said: wait
-barrier: 127.0.0.1#P77 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P77 pong
-barrier: 127.0.0.1#P77 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P77 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P77 was released, waiting for close
-barrier: 127.0.0.1#P78 master said: ping
-barrier: 127.0.0.1#P78 master said: rlse
-barrier: 127.0.0.1#P78 master said: wait
-barrier: 127.0.0.1#P78 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P78 pong
-barrier: 127.0.0.1#P78 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P78 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P78 was released, waiting for close
-barrier: 127.0.0.1#P79 master said: ping
-barrier: 127.0.0.1#P79 master said: rlse
-barrier: 127.0.0.1#P79 master said: wait
-barrier: 127.0.0.1#P79 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P79 pong
-barrier: 127.0.0.1#P79 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P79 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P79 was released, waiting for close
-barrier: 127.0.0.1#P7 master said: ping
-barrier: 127.0.0.1#P7 master said: rlse
-barrier: 127.0.0.1#P7 master said: wait
-barrier: 127.0.0.1#P7 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P7 pong
-barrier: 127.0.0.1#P7 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P7 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P7 was released, waiting for close
-barrier: 127.0.0.1#P80 master said: ping
-barrier: 127.0.0.1#P80 master said: rlse
-barrier: 127.0.0.1#P80 master said: wait
-barrier: 127.0.0.1#P80 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P80 pong
-barrier: 127.0.0.1#P80 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P80 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P80 was released, waiting for close
-barrier: 127.0.0.1#P81 master said: ping
-barrier: 127.0.0.1#P81 master said: rlse
-barrier: 127.0.0.1#P81 master said: wait
-barrier: 127.0.0.1#P81 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P81 pong
-barrier: 127.0.0.1#P81 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P81 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P81 was released, waiting for close
-barrier: 127.0.0.1#P82 master said: ping
-barrier: 127.0.0.1#P82 master said: rlse
-barrier: 127.0.0.1#P82 master said: wait
-barrier: 127.0.0.1#P82 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P82 pong
-barrier: 127.0.0.1#P82 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P82 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P82 was released, waiting for close
-barrier: 127.0.0.1#P83 master said: ping
-barrier: 127.0.0.1#P83 master said: rlse
-barrier: 127.0.0.1#P83 master said: wait
-barrier: 127.0.0.1#P83 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P83 pong
-barrier: 127.0.0.1#P83 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P83 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P83 was released, waiting for close
-barrier: 127.0.0.1#P84 master said: ping
-barrier: 127.0.0.1#P84 master said: rlse
-barrier: 127.0.0.1#P84 master said: wait
-barrier: 127.0.0.1#P84 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P84 pong
-barrier: 127.0.0.1#P84 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P84 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P84 was released, waiting for close
-barrier: 127.0.0.1#P85 master said: ping
-barrier: 127.0.0.1#P85 master said: rlse
-barrier: 127.0.0.1#P85 master said: wait
-barrier: 127.0.0.1#P85 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P85 pong
-barrier: 127.0.0.1#P85 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P85 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P85 was released, waiting for close
-barrier: 127.0.0.1#P86 master said: ping
-barrier: 127.0.0.1#P86 master said: rlse
-barrier: 127.0.0.1#P86 master said: wait
-barrier: 127.0.0.1#P86 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P86 pong
-barrier: 127.0.0.1#P86 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P86 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P86 was released, waiting for close
-barrier: 127.0.0.1#P87 master said: ping
-barrier: 127.0.0.1#P87 master said: rlse
-barrier: 127.0.0.1#P87 master said: wait
-barrier: 127.0.0.1#P87 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P87 pong
-barrier: 127.0.0.1#P87 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P87 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P87 was released, waiting for close
-barrier: 127.0.0.1#P88 master said: ping
-barrier: 127.0.0.1#P88 master said: rlse
-barrier: 127.0.0.1#P88 master said: wait
-barrier: 127.0.0.1#P88 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P88 pong
-barrier: 127.0.0.1#P88 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P88 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P88 was released, waiting for close
-barrier: 127.0.0.1#P89 master said: ping
-barrier: 127.0.0.1#P89 master said: rlse
-barrier: 127.0.0.1#P89 master said: wait
-barrier: 127.0.0.1#P89 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P89 pong
-barrier: 127.0.0.1#P89 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P89 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P89 was released, waiting for close
-barrier: 127.0.0.1#P8 master said: ping
-barrier: 127.0.0.1#P8 master said: rlse
-barrier: 127.0.0.1#P8 master said: wait
-barrier: 127.0.0.1#P8 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P8 pong
-barrier: 127.0.0.1#P8 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P8 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P8 was released, waiting for close
-barrier: 127.0.0.1#P90 master said: ping
-barrier: 127.0.0.1#P90 master said: rlse
-barrier: 127.0.0.1#P90 master said: wait
-barrier: 127.0.0.1#P90 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P90 pong
-barrier: 127.0.0.1#P90 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P90 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P90 was released, waiting for close
-barrier: 127.0.0.1#P91 master said: ping
-barrier: 127.0.0.1#P91 master said: rlse
-barrier: 127.0.0.1#P91 master said: wait
-barrier: 127.0.0.1#P91 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P91 pong
-barrier: 127.0.0.1#P91 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P91 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P91 was released, waiting for close
-barrier: 127.0.0.1#P92 master said: ping
-barrier: 127.0.0.1#P92 master said: rlse
-barrier: 127.0.0.1#P92 master said: wait
-barrier: 127.0.0.1#P92 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P92 pong
-barrier: 127.0.0.1#P92 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P92 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P92 was released, waiting for close
-barrier: 127.0.0.1#P93 master said: ping
-barrier: 127.0.0.1#P93 master said: rlse
-barrier: 127.0.0.1#P93 master said: wait
-barrier: 127.0.0.1#P93 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P93 pong
-barrier: 127.0.0.1#P93 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P93 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P93 was released, waiting for close
-barrier: 127.0.0.1#P94 master said: ping
-barrier: 127.0.0.1#P94 master said: rlse
-barrier: 127.0.0.1#P94 master said: wait
-barrier: 127.0.0.1#P94 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P94 pong
-barrier: 127.0.0.1#P94 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P94 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P94 was released, waiting for close
-barrier: 127.0.0.1#P95 master said: ping
-barrier: 127.0.0.1#P95 master said: rlse
-barrier: 127.0.0.1#P95 master said: wait
-barrier: 127.0.0.1#P95 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P95 pong
-barrier: 127.0.0.1#P95 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P95 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P95 was released, waiting for close
-barrier: 127.0.0.1#P96 master said: ping
-barrier: 127.0.0.1#P96 master said: rlse
-barrier: 127.0.0.1#P96 master said: wait
-barrier: 127.0.0.1#P96 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P96 pong
-barrier: 127.0.0.1#P96 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P96 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P96 was released, waiting for close
-barrier: 127.0.0.1#P97 master said: ping
-barrier: 127.0.0.1#P97 master said: rlse
-barrier: 127.0.0.1#P97 master said: wait
-barrier: 127.0.0.1#P97 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P97 pong
-barrier: 127.0.0.1#P97 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P97 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P97 was released, waiting for close
-barrier: 127.0.0.1#P98 master said: ping
-barrier: 127.0.0.1#P98 master said: rlse
-barrier: 127.0.0.1#P98 master said: wait
-barrier: 127.0.0.1#P98 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P98 pong
-barrier: 127.0.0.1#P98 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P98 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P98 was released, waiting for close
-barrier: 127.0.0.1#P99 master said: ping
-barrier: 127.0.0.1#P99 master said: rlse
-barrier: 127.0.0.1#P99 master said: wait
-barrier: 127.0.0.1#P99 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P99 pong
-barrier: 127.0.0.1#P99 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P99 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P99 was released, waiting for close
-barrier: 127.0.0.1#P9 master said: ping
-barrier: 127.0.0.1#P9 master said: rlse
-barrier: 127.0.0.1#P9 master said: wait
-barrier: 127.0.0.1#P9 members: 127.0.0.1#P1,127.0.0.1#P10,127.0.0.1#P100,127.0.0.1#P11,127.0.0.1#P12,127.0.0.1#P13,127.0.0.1#P14,127.0.0.1#P15,127.0.0.1#P16,127.0.0.1#P17,127.0.0.1#P18,127.0.0.1#P19,127.0.0.1#P2,127.0.0.1#P20,127.0.0.1#P21,127.0.0.1#P22,127.0.0.1#P23,127.0.0.1#P24,127.0.0.1#P25,127.0.0.1#P26,127.0.0.1#P27,127.0.0.1#P28,127.0.0.1#P29,127.0.0.1#P3,127.0.0.1#P30,127.0.0.1#P31,127.0.0.1#P32,127.0.0.1#P33,127.0.0.1#P34,127.0.0.1#P35,127.0.0.1#P36,127.0.0.1#P37,127.0.0.1#P38,127.0.0.1#P39,127.0.0.1#P4,127.0.0.1#P40,127.0.0.1#P41,127.0.0.1#P42,127.0.0.1#P43,127.0.0.1#P44,127.0.0.1#P45,127.0.0.1#P46,127.0.0.1#P47,127.0.0.1#P48,127.0.0.1#P49,127.0.0.1#P5,127.0.0.1#P50,127.0.0.1#P51,127.0.0.1#P52,127.0.0.1#P53,127.0.0.1#P54,127.0.0.1#P55,127.0.0.1#P56,127.0.0.1#P57,127.0.0.1#P58,127.0.0.1#P59,127.0.0.1#P6,127.0.0.1#P60,127.0.0.1#P61,127.0.0.1#P62,127.0.0.1#P63,127.0.0.1#P64,127.0.0.1#P65,127.0.0.1#P66,127.0.0.1#P67,127.0.0.1#P68,127.0.0.1#P69,127.0.0.1#P7,127.0.0.1#P70,127.0.0.1#P71,127.0.0.1#P72,127.0.0.1#P73,127.0.0.1#P74,127.0.0.1#P75,127.0.0.1#P76,127.0.0.1#P77,127.0.0.1#P78,127.0.0.1#P79,127.0.0.1#P8,127.0.0.1#P80,127.0.0.1#P81,127.0.0.1#P82,127.0.0.1#P83,127.0.0.1#P84,127.0.0.1#P85,127.0.0.1#P86,127.0.0.1#P87,127.0.0.1#P88,127.0.0.1#P89,127.0.0.1#P9,127.0.0.1#P90,127.0.0.1#P91,127.0.0.1#P92,127.0.0.1#P93,127.0.0.1#P94,127.0.0.1#P95,127.0.0.1#P96,127.0.0.1#P97,127.0.0.1#P98,127.0.0.1#P99
-barrier: 127.0.0.1#P9 pong
-barrier: 127.0.0.1#P9 selected as slave, master=127.0.0.1
-barrier: 127.0.0.1#P9 tag=here port=63000 timeout=60 start=NNN
-barrier: 127.0.0.1#P9 was released, waiting for close
---SELFTEST-- exit 0
---SELFTEST-- loading test
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 complete
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
---STEPTEST-- payload1 start
diff --git a/client/bin/self-test/tests/NNN-filter b/client/bin/self-test/tests/NNN-filter
deleted file mode 100755
index e0fb15b..0000000
--- a/client/bin/self-test/tests/NNN-filter
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/sh
-
-# Work out which test this is.
-test=`basename "$1"`
-
-case "$test" in
-???-barrier-unordered-*)
-	# Any realistic testing of barriers will involve random ordering
-	# of arrivals at the barrier, so we need to order the output
-	# such that it is comparible.
-	$0 XXX-barrier-ordered | sort
-	;;
-???-barrier-*)
-	# Barriers are timeout based, so we need to drop all the noise.
-	awk '
-		/barrier: .* calling master/	{ next }
-		/barrier: .* remaining: /	{ next }
-						{ print }
-	' | sed -e 's/start=[0-9][0-9]*/start=NNN/g' \
-		-e 's/(127.0.0.1:[0-9][0-9]*)/(ADDR:PORT)/g'
-	;;
-*)
-	cat -
-	;;
-esac
diff --git a/client/bin/setup_job.py b/client/bin/setup_job.py
index 1d39c67..364624e 100644
--- a/client/bin/setup_job.py
+++ b/client/bin/setup_job.py
@@ -146,7 +146,7 @@
             # host. See client/common_lib/utils.py update_version()
             if os.path.exists(client_test.srcdir):
                 versionfile = os.path.join(client_test.srcdir, '.version')
-                pickle.dump(client_test.version, open(versionfile, 'w'))
+                pickle.dump(client_test.version, open(versionfile, 'wb'))
             good_setup = True
         except Exception as err:
             logging.error(err)
diff --git a/client/bin/setup_job_unittest.py b/client/bin/setup_job_unittest.py
index 615bf4a..114b48e 100755
--- a/client/bin/setup_job_unittest.py
+++ b/client/bin/setup_job_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #pylint: disable-msg=C0111
 import logging
 import os
@@ -54,11 +54,24 @@
     """Generic client job mixin used when defining variations on the
     job.__init__ generic tests."""
     PUBLIC_ATTRIBUTES = (
-        base_job_unittest.test_init.generic_tests.PUBLIC_ATTRIBUTES
-        - set(['bootloader', 'control', 'drop_caches',
-               'drop_caches_between_iterations', 'harness', 'hosts', 'logging',
-               'machines', 'num_tests_failed', 'num_tests_run', 'profilers',
-               'sysinfo', 'user',  'warning_loggers', 'warning_manager']))
+            base_job_unittest.test_init.generic_tests.PUBLIC_ATTRIBUTES - set([
+                    'bootloader',
+                    'control',
+                    'drop_caches',
+                    'drop_caches_between_iterations',
+                    'force_full_log_collection',
+                    'harness',
+                    'hosts',
+                    'logging',
+                    'machines',
+                    'num_tests_failed',
+                    'num_tests_run',
+                    'profilers',
+                    'sysinfo',
+                    'user',
+                    'warning_loggers',
+                    'warning_manager',
+            ]))
 
 
 class test_init_minimal_options(abstract_test_init, setup_job_test_case):
@@ -89,7 +102,7 @@
         self.job.__init__(options)
 
 
-class dummy(object):
+class stub(object):
     """A simple placeholder for attributes"""
     pass
 
@@ -119,10 +132,10 @@
         sys.stdout = six.StringIO()
         logging_manager.configure_logging(logging_config.TestingConfig())
         logging.disable(logging.CRITICAL)
-        def dummy_configure_logging(*args, **kwargs):
+        def stub_configure_logging(*args, **kwargs):
             pass
         self.god.stub_with(logging_manager, 'configure_logging',
-                           dummy_configure_logging)
+                           stub_configure_logging)
         real_get_logging_manager = logging_manager.get_logging_manager
         def get_logging_manager_no_fds(manage_stdout_and_stderr=False,
                                        redirect_fds=False):
@@ -169,7 +182,7 @@
         resultdir = self._setup_pre_record_init()
 
         # finish constructor
-        options = dummy()
+        options = stub()
         options.tag = self.jobtag
         options.log = False
         options.verbose = False
diff --git a/client/bin/site_sysinfo.py b/client/bin/site_sysinfo.py
index 2cbee2b..e7ea41f 100755
--- a/client/bin/site_sysinfo.py
+++ b/client/bin/site_sysinfo.py
@@ -11,6 +11,8 @@
 from autotest_lib.client.common_lib import error, utils, global_config
 from autotest_lib.client.bin import base_sysinfo, utils
 from autotest_lib.client.cros import constants
+from autotest_lib.client.cros import tpm
+
 
 get_value = global_config.global_config.get_config_value
 collect_corefiles = get_value('CLIENT', 'collect_corefiles',
@@ -41,7 +43,7 @@
         unpickle it on the DUT (using the version of the class from the build).
         This means that when adding a new attribute to this class, for a while
         the server-side code does not populate that attribute. So, deal with
-        missing attributes in a sane way.
+        missing attributes in a valid way.
         """
         self.__dict__ = state
         if '_excludes' not in state:
@@ -107,7 +109,7 @@
         from an older build, we need to be able to unpickle an instance of
         logdir pickled from a newer version of the class.
 
-        Some old attributes are not sanely handled via __setstate__, so we can't
+        Some old attributes are not accurately handled via __setstate__, so we can't
         drop them without breaking compatibility.
         """
         additional_excludes = list(set(self._excludes) -
@@ -198,8 +200,18 @@
                 full_path = os.path.join(root, f)
                 # Only list regular files or symlinks to those (os.stat follows
                 # symlinks)
-                if stat.S_ISREG(os.stat(full_path).st_mode):
-                    yield full_path
+                try:
+                    if stat.S_ISREG(os.stat(full_path).st_mode):
+                        yield full_path
+                except OSError:
+                    # Semi-often a source of a symlink will get deleted, which
+                    # causes a crash when `stat`d, thus breaks the the hook.
+                    # Instead of quietly crashing, we will just not collect
+                    # the missing of file.
+                    logging.debug(
+                            'File {} could not stat & will not be collected'.
+                            format(full_path))
+                    continue
 
 
     def _copy_new_data_in_file(self, file_path, src_dir, dest_dir):
@@ -222,7 +234,7 @@
                 # File is modified to a smaller size, copy whole file.
                 bytes_to_skip = 0
         try:
-            with open(file_path, 'r') as in_log:
+            with open(file_path, 'rb') as in_log:
                 if bytes_to_skip > 0:
                     in_log.seek(bytes_to_skip)
                 # Skip src_dir in path, e.g., src_dir/[sub_dir]/file_name.
@@ -231,7 +243,7 @@
                 target_dir = os.path.dirname(target_path)
                 if not os.path.exists(target_dir):
                     os.makedirs(target_dir)
-                with open(target_path, "w") as out_log:
+                with open(target_path, 'wb') as out_log:
                     out_log.write(in_log.read())
         except IOError as e:
             logging.error('Diff %s failed with error: %s', file_path, e)
@@ -256,7 +268,6 @@
         for src_file in self._get_all_files(src_dir):
             self._copy_new_data_in_file(src_file, src_dir, dest_dir)
 
-
     def run(self, log_dir, collect_init_status=True, collect_all=False):
         """Copies new content from self.dir to the destination log_dir.
 
@@ -292,9 +303,19 @@
             utils.system("rm -rf %s/*" % (self.dir))
 
 
+class purged_on_init_logdir(logdir):
+    """Represents a log directory that is purged *when initialized*."""
+
+    def __init__(self, directory, excludes=logdir.DEFAULT_EXCLUDES):
+        super(purged_on_init_logdir, self).__init__(directory, excludes)
+
+        if os.path.exists(self.dir):
+            utils.system("rm -rf %s/*" % (self.dir))
+
+
 class site_sysinfo(base_sysinfo.base_sysinfo):
     """Represents site system info."""
-    def __init__(self, job_resultsdir):
+    def __init__(self, job_resultsdir, version=None):
         super(site_sysinfo, self).__init__(job_resultsdir)
         crash_exclude_string = None
         if not collect_corefiles:
@@ -322,26 +343,34 @@
         self.test_loggables.add(
             purgeable_logdir(
                 os.path.join(constants.CRYPTOHOME_MOUNT_PT, "log")))
-        # We only want to gather and purge crash reports after the client test
-        # runs in case a client test is checking that a crash found at boot
-        # (such as a kernel crash) is handled.
+
+        # We do *not* want to purge crashes after iteration to allow post-test
+        # infrastructure to collect them as well. Instead, purge them before.
+        # TODO(mutexlox, ayatane): test_runner should handle the purging.
         self.after_iteration_loggables.add(
-            purgeable_logdir(
-                os.path.join(constants.CRYPTOHOME_MOUNT_PT, "crash"),
-                excludes=logdir.DEFAULT_EXCLUDES + (crash_exclude_string,)))
-        self.after_iteration_loggables.add(
-            purgeable_logdir(
-                constants.CRASH_DIR,
-                excludes=logdir.DEFAULT_EXCLUDES + (crash_exclude_string,)))
+                purged_on_init_logdir(os.path.join(
+                        constants.CRYPTOHOME_MOUNT_PT, "crash"),
+                                      excludes=logdir.DEFAULT_EXCLUDES +
+                                      (crash_exclude_string, )))
+
+        self.test_loggables.add(
+                purgeable_logdir(constants.CRASH_DIR,
+                                 excludes=logdir.DEFAULT_EXCLUDES +
+                                 (crash_exclude_string, )))
+
         self.test_loggables.add(
             logfile(os.path.join(constants.USER_DATA_DIR,
                                  ".Google/Google Talk Plugin/gtbplugin.log")))
-        self.test_loggables.add(purgeable_logdir(
-                constants.CRASH_DIR,
-                excludes=logdir.DEFAULT_EXCLUDES + (crash_exclude_string,)))
+
+        # purged_on_init_logdir not compatible with client R86 and prior.
+        if version and int(version) > 86:
+            self.test_loggables.add(
+                    purged_on_init_logdir(constants.CRASH_DIR,
+                                          excludes=logdir.DEFAULT_EXCLUDES +
+                                          (crash_exclude_string, )))
         # Collect files under /tmp/crash_reporter, which contain the procfs
         # copy of those crashed processes whose core file didn't get converted
-        # into minidump. We need these additional files for post-mortem analysis
+        # into minidump. We need these additional files for retrospective analysis
         # of the conversion failure.
         self.test_loggables.add(
             purgeable_logdir(constants.CRASH_REPORTER_RESIDUE_DIR))
@@ -355,9 +384,11 @@
         """
         super(site_sysinfo, self).log_before_each_test(test)
 
-        for log in self.diffable_loggables:
-            log.run(log_dir=None, collect_init_status=True)
-
+        try:
+            for log in self.diffable_loggables:
+                log.run(log_dir=None, collect_init_status=True)
+        except Exception as e:
+            logging.warning("Exception hit during log_before_each_test %s", e)
 
     @log.log_and_ignore_errors("post-test sysinfo error:")
     def log_after_each_test(self, test):
@@ -370,8 +401,9 @@
         test_sysinfodir = self._get_sysinfodir(test.outputdir)
 
         for log in self.diffable_loggables:
-            log.run(log_dir=test_sysinfodir, collect_init_status=False,
-                    collect_all=not test.success)
+            log.run(log_dir=test_sysinfodir,
+                    collect_init_status=False,
+                    collect_all=not test.success or test.collect_full_logs)
 
 
     def _get_chrome_version(self):
@@ -435,14 +467,9 @@
         keyval["CHROME_VERSION"], keyval["MILESTONE"] = (
                 self._get_chrome_version())
 
-        # TODO(kinaba): crbug.com/707448 Import at the head of this file.
-        # Currently a server-side script server/server_job.py is indirectly
-        # importing this file, so we cannot globaly import cryptohome that
-        # has dependency to a client-only library.
-        from autotest_lib.client.cros import cryptohome
         # Get the dictionary attack counter.
         keyval["TPM_DICTIONARY_ATTACK_COUNTER"] = (
-                cryptohome.get_tpm_da_info().get(
+                tpm.get_tpm_da_info().get(
                         'dictionary_attack_counter',
                         'Failed to query tpm_manager'))
 
diff --git a/client/bin/site_sysinfo_unittest.py b/client/bin/site_sysinfo_unittest.py
index 1aed7e8..e9ef8ae 100644
--- a/client/bin/site_sysinfo_unittest.py
+++ b/client/bin/site_sysinfo_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 """Tests for site_sysinfo."""
 
@@ -73,6 +73,18 @@
             f.write(text)
 
 
+    def get_dest_path(self, src_path):
+        """Get file path in dest dir from the one in src dir.
+
+        @param src_path: File path in src dir.
+
+        """
+        # Make sure src_path is a subpath of self.src_dir
+        self.assertEqual(os.path.commonprefix((src_path, self.src_dir)),
+                         self.src_dir)
+        rel_path = os.path.relpath(src_path, self.src_dir)
+        return os.path.join(self.dest_dir, rel_path)
+
     def assert_trees_equal(self, dir1, dir2, ignore=None):
         """Assert two directory trees contain the same files.
 
@@ -127,17 +139,16 @@
         # Validate files in dest_dir.
         for file_name, file_path in zip(self.existing_files+self.new_files,
                                 self.existing_files_path+self.new_files_path):
-            file_path = file_path.replace('src', 'dest')
+            file_path = self.get_dest_path(file_path)
             with open(file_path, 'r') as f:
                 self.assertEqual(file_name, f.read())
 
         # Assert that FIFOs are not in the diff.
-        self.assertFalse(
-                os.path.exists(self.existing_fifo_path.replace('src', 'dest')),
-                msg='Existing FIFO present in diff sysinfo')
-        self.assertFalse(
-                os.path.exists(new_fifo_path.replace('src', 'dest')),
-                msg='New FIFO present in diff sysinfo')
+        self.assertFalse(os.path.exists(
+                self.get_dest_path(self.existing_fifo_path)),
+                         msg='Existing FIFO present in diff sysinfo')
+        self.assertFalse(os.path.exists(self.get_dest_path(new_fifo_path)),
+                         msg='New FIFO present in diff sysinfo')
 
         # With collect_all=True, full sysinfo should also be present.
         full_sysinfo_path = self.dest_dir + self.src_dir
@@ -246,7 +257,7 @@
                         msg='Failed to copy to %s' % destination_path)
 
     def test_pickle_unpickle_equal(self):
-        """Sanity check pickle-unpickle round-trip."""
+        """Check pickle-unpickle round-trip."""
         logdir = site_sysinfo.logdir(
                 self.from_dir,
                 excludes=(site_sysinfo.logdir.DEFAULT_EXCLUDES + ('a',)))
@@ -289,7 +300,7 @@
         self.assertEqual(logdir.additional_exclude, None)
 
     def test_unpickle_handle_missing__excludes(self):
-        """Sanely handle missing _excludes attribute from pickles
+        """Accurately handle missing _excludes attribute from pickles
 
         This can happen when running brand new version of this class that
         introduced this attribute from older server side code in prod.
@@ -304,7 +315,7 @@
                          site_sysinfo.logdir.DEFAULT_EXCLUDES)
 
     def test_unpickle_handle_missing__excludes_default(self):
-        """Sanely handle missing _excludes attribute from pickles
+        """Accurately handle missing _excludes attribute from pickles
 
         This can happen when running brand new version of this class that
         introduced this attribute from older server side code in prod.
diff --git a/client/bin/telemetry_check.py b/client/bin/telemetry_check.py
new file mode 100755
index 0000000..688e108
--- /dev/null
+++ b/client/bin/telemetry_check.py
@@ -0,0 +1,166 @@
+#!/usr/bin/python3
+#
+# Copyright 2016 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+'''Confidence tests for Chrome on ChromeOS.
+
+This script runs a number of confidence tests to ensure that Chrome browser on
+ChromeOS is functional.
+'''
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import argparse
+import datetime
+import logging
+import sys
+
+# This sets up import paths for autotest.
+import common
+from autotest_lib.client.bin import utils
+from autotest_lib.client.common_lib.cros import arc, arc_common, chrome
+from autotest_lib.client.common_lib.error import TestFail
+from autotest_lib.client.cros import cryptohome
+from six.moves import range
+
+
+class TelemetryCheck(object):
+    """Class for running confidence tests to verify telemetry."""
+
+    def __init__(self,
+                 count=1,
+                 run_cryptohome=True,
+                 run_incognito=True,
+                 run_screenlock=True):
+        self.count = count
+        self.run_cryptohome = run_cryptohome
+        self.run_incognito = run_incognito
+        self.run_screenlock = run_screenlock
+
+    def Run(self):
+        """Run tests."""
+        start = datetime.datetime.now()
+
+        for i in range(self.count):
+            if self.count > 1:
+                logging.info('Starting iteration %d.', i)
+            if self.run_cryptohome:
+                self.RunCryptohomeTest()
+            if self.run_incognito:
+                self.RunIncognitoTest()
+            if self.run_screenlock:
+                self.RunScreenlockTest()
+
+        elapsed = datetime.datetime.now() - start
+        logging.info('Tests succeeded in %s seconds.', elapsed.seconds)
+
+    def RunCryptohomeTest(self):
+        """Test Cryptohome."""
+        logging.info('RunCryptohomeTest: Starting chrome and logging in.')
+        # Only run ARC tests for P.
+        run_arc_tests = (utils.is_arc_available()
+                         and arc.get_android_sdk_version() <= 28)
+        arc_mode = arc_common.ARC_MODE_ENABLED if run_arc_tests else None
+        with chrome.Chrome(arc_mode=arc_mode, num_tries=1) as cr:
+            # Check that the cryptohome is mounted.
+            # is_vault_mounted throws an exception if it fails.
+            logging.info('Checking mounted cryptohome.')
+            cryptohome.is_vault_mounted(user=cr.username, allow_fail=False)
+            # Navigate to about:blank.
+            tab = cr.browser.tabs[0]
+            tab.Navigate('about:blank')
+
+            # Evaluate some javascript.
+            logging.info('Evaluating JavaScript.')
+            if tab.EvaluateJavaScript('2+2') != 4:
+                raise TestFail('EvaluateJavaScript failed')
+
+            # ARC test.
+            if run_arc_tests:
+                arc.wait_for_adb_ready()
+                logging.info('Android booted successfully.')
+                arc.wait_for_android_process('org.chromium.arc.intent_helper')
+                if not arc.is_package_installed('android'):
+                    raise TestFail(
+                            '"android" system package was not listed by '
+                            'Package Manager.')
+
+        if run_arc_tests:
+            utils.poll_for_condition(lambda: not arc.
+                                     is_android_container_alive(),
+                                     timeout=15,
+                                     desc='Android container still running '
+                                     'after Chrome shutdown.')
+
+    def RunIncognitoTest(self):
+        """Test Incognito mode."""
+        logging.info('RunIncognitoTest')
+        with chrome.Chrome(logged_in=False):
+            if not cryptohome.is_guest_vault_mounted():
+                raise TestFail('Expected to find a guest vault mounted.')
+        if cryptohome.is_guest_vault_mounted(allow_fail=True):
+            raise TestFail('Expected to NOT find a guest vault mounted.')
+
+    def RunScreenlockTest(self):
+        """Run a test that locks the screen."""
+        logging.info('RunScreenlockTest')
+        with chrome.Chrome(autotest_ext=True) as cr:
+            cr.autotest_ext.ExecuteJavaScript(
+                    'chrome.autotestPrivate.lockScreen();')
+            utils.poll_for_condition(lambda: cr.login_status['isScreenLocked'],
+                                     timeout=15,
+                                     exception=TestFail('Screen not locked'))
+
+    @staticmethod
+    def ParseArgs(argv):
+        """Parse command line.
+
+    Args:
+      argv: List of command line arguments.
+
+    Returns:
+      List of parsed opts.
+    """
+        parser = argparse.ArgumentParser(description=__doc__)
+        parser.add_argument('--count',
+                            type=int,
+                            default=1,
+                            help='Number of iterations of the test to run.')
+        parser.add_argument('--run-all',
+                            default=False,
+                            action='store_true',
+                            help='Run all tests.')
+        parser.add_argument('--run-cryptohome',
+                            default=False,
+                            action='store_true',
+                            help='Run Cryptohome test.')
+        parser.add_argument('--run-incognito',
+                            default=False,
+                            action='store_true',
+                            help='Run Incognito test.')
+        parser.add_argument('--run-screenlock',
+                            default=False,
+                            action='store_true',
+                            help='Run Screenlock test.')
+        return parser.parse_args(argv)
+
+
+def main(argv):
+    '''The main function.'''
+    opts = TelemetryCheck.ParseArgs(argv)
+
+    # Run all tests if none are specified.
+    if opts.run_all or not (opts.run_cryptohome or opts.run_incognito
+                            or opts.run_screenlock):
+        opts.run_cryptohome = opts.run_screenlock = True
+        opts.run_incognito = False  # crbug.com/970065
+
+    TelemetryCheck(opts.count, opts.run_cryptohome, opts.run_incognito,
+                   opts.run_screenlock).Run()
+
+
+if __name__ == '__main__':
+    sys.exit(main(sys.argv[1:]))
diff --git a/client/bin/telemetry_sanity.py b/client/bin/telemetry_sanity.py
deleted file mode 100755
index 259b8e0..0000000
--- a/client/bin/telemetry_sanity.py
+++ /dev/null
@@ -1,158 +0,0 @@
-#!/usr/bin/python2
-#
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-'''Sanity tests for Chrome on Chrome OS.
-
-This script runs a number of sanity tests to ensure that Chrome browser on
-Chrome OS is functional.
-'''
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-import argparse
-import datetime
-import logging
-import sys
-
-# This sets up import paths for autotest.
-import common
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib.cros import arc, arc_common, chrome
-from autotest_lib.client.common_lib.error import TestFail
-from autotest_lib.client.cros import cryptohome
-from six.moves import range
-
-
-class TelemetrySanity(object):
-  """Class for running sanity tests to verify telemetry."""
-
-
-  def __init__(self, count=1, run_cryptohome=True, run_incognito=True,
-               run_screenlock=True):
-    self.count = count
-    self.run_cryptohome = run_cryptohome
-    self.run_incognito = run_incognito
-    self.run_screenlock = run_screenlock
-
-
-  def Run(self):
-    """Run tests."""
-    start = datetime.datetime.now()
-
-    for i in range(self.count):
-      if self.count > 1:
-        logging.info('Starting iteration %d.', i)
-      if self.run_cryptohome:
-        self.RunCryptohomeTest()
-      if self.run_incognito:
-        self.RunIncognitoTest()
-      if self.run_screenlock:
-        self.RunScreenlockTest()
-
-    elapsed = datetime.datetime.now() - start
-    logging.info('Tests succeeded in %s seconds.', elapsed.seconds)
-
-
-  def RunCryptohomeTest(self):
-    """Test Cryptohome."""
-    logging.info('RunCryptohomeTest: Starting chrome and logging in.')
-    # Only run ARC tests for P.
-    run_arc_tests = (utils.is_arc_available() and
-                     arc.get_android_sdk_version() <= 28)
-    arc_mode = arc_common.ARC_MODE_ENABLED if run_arc_tests else None
-    with chrome.Chrome(arc_mode=arc_mode, num_tries=1) as cr:
-      # Check that the cryptohome is mounted.
-      # is_vault_mounted throws an exception if it fails.
-      logging.info('Checking mounted cryptohome.')
-      cryptohome.is_vault_mounted(user=cr.username, allow_fail=False)
-      # Navigate to about:blank.
-      tab = cr.browser.tabs[0]
-      tab.Navigate('about:blank')
-
-      # Evaluate some javascript.
-      logging.info('Evaluating JavaScript.')
-      if tab.EvaluateJavaScript('2+2') != 4:
-        raise TestFail('EvaluateJavaScript failed')
-
-      # ARC test.
-      if run_arc_tests:
-        arc.wait_for_adb_ready()
-        logging.info('Android booted successfully.')
-        arc.wait_for_android_process('org.chromium.arc.intent_helper')
-        if not arc.is_package_installed('android'):
-          raise TestFail('"android" system package was not listed by '
-                         'Package Manager.')
-
-    if run_arc_tests:
-      utils.poll_for_condition(lambda: not arc.is_android_container_alive(),
-                               timeout=15,
-                               desc='Android container still running '
-                               'after Chrome shutdown.')
-
-
-  def RunIncognitoTest(self):
-    """Test Incognito mode."""
-    logging.info('RunIncognitoTest')
-    with chrome.Chrome(logged_in=False):
-      if not cryptohome.is_guest_vault_mounted():
-        raise TestFail('Expected to find a guest vault mounted.')
-    if cryptohome.is_guest_vault_mounted(allow_fail=True):
-      raise TestFail('Expected to NOT find a guest vault mounted.')
-
-
-  def RunScreenlockTest(self):
-    """Run a test that locks the screen."""
-    logging.info('RunScreenlockTest')
-    with chrome.Chrome(autotest_ext=True) as cr:
-      cr.autotest_ext.ExecuteJavaScript('chrome.autotestPrivate.lockScreen();')
-      utils.poll_for_condition(
-          lambda: cr.login_status['isScreenLocked'],
-          timeout=15,
-          exception=TestFail('Screen not locked'))
-
-
-  @staticmethod
-  def ParseArgs(argv):
-    """Parse command line.
-
-    Args:
-      argv: List of command line arguments.
-
-    Returns:
-      List of parsed opts.
-    """
-    parser = argparse.ArgumentParser(description=__doc__)
-    parser.add_argument('--count', type=int, default=1,
-                        help='Number of iterations of the test to run.')
-    parser.add_argument('--run-all', default=False, action='store_true',
-                        help='Run all tests.')
-    parser.add_argument('--run-cryptohome', default=False, action='store_true',
-                        help='Run Cryptohome test.')
-    parser.add_argument('--run-incognito', default=False, action='store_true',
-                        help='Run Incognito test.')
-    parser.add_argument('--run-screenlock', default=False, action='store_true',
-                        help='Run Screenlock test.')
-    return parser.parse_args(argv)
-
-
-def main(argv):
-    '''The main function.'''
-    opts = TelemetrySanity.ParseArgs(argv)
-
-    # Run all tests if none are specified.
-    if opts.run_all or not (opts.run_cryptohome or opts.run_incognito or
-                            opts.run_screenlock):
-      opts.run_cryptohome = opts.run_screenlock = True
-      opts.run_incognito = False  # crbug.com/970065
-
-    TelemetrySanity(opts.count, opts.run_cryptohome, opts.run_incognito,
-             opts.run_screenlock).Run()
-
-
-if __name__ == '__main__':
-    sys.exit(main(sys.argv[1:]))
diff --git a/client/bin/temperature.py b/client/bin/temperature.py
index fad71b6..c1d23ee 100755
--- a/client/bin/temperature.py
+++ b/client/bin/temperature.py
@@ -1,4 +1,5 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python
+# Lint as: python2, python3
 
 from __future__ import absolute_import
 from __future__ import division
@@ -36,7 +37,7 @@
     print(TEMPERATURE_TYPE.get(temperature_type)())
 
 if args.temperature_type == 'all':
-    for temperature_type in TEMPERATURE_TYPE.keys():
+    for temperature_type in list(TEMPERATURE_TYPE.keys()):
         print_temperature(temperature_type)
 else:
     print_temperature(args.temperature_type)
diff --git a/client/bin/update_intel_pci_ids b/client/bin/update_intel_pci_ids
index 7136fa5..3cafdf9 100755
--- a/client/bin/update_intel_pci_ids
+++ b/client/bin/update_intel_pci_ids
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright 2015 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -42,7 +42,8 @@
         'CML': 'cometlake',
         'WHL': 'whiskeylake',
         'TGL': 'tigerlake',
-        'JSL': 'jasperlake'
+        'JSL': 'jasperlake',
+        'ADL': 'alderlake'
     }
 
     for name in family_name_map:
diff --git a/client/bin/utils.py b/client/bin/utils.py
index 96b0114..5b68295 100644
--- a/client/bin/utils.py
+++ b/client/bin/utils.py
@@ -14,6 +14,7 @@
 from __future__ import print_function
 
 import base64
+import chardet
 import collections
 import errno
 import glob
@@ -28,6 +29,7 @@
 import signal
 import string
 import subprocess
+import sys
 import tempfile
 import time
 import uuid
@@ -309,7 +311,10 @@
     return family
 
 
+# When adding entries here, also add them at the right spot in the
+# INTEL_*_ORDER lists below.
 INTEL_UARCH_TABLE = {
+    '06_9A': 'Alder Lake',
     '06_4C': 'Airmont',
     '06_1C': 'Atom',
     '06_26': 'Atom',
@@ -366,6 +371,15 @@
     '06_2F': 'Westmere',
 }
 
+INTEL_ATOM_ORDER = ['Silvermont', 'Airmont', 'Goldmont', 'Tremont', 'Gracemont']
+
+INTEL_BIGCORE_ORDER = [
+        'Prescott', 'Presler', 'Dothan', 'Merom', 'Nehalem', 'Westmere',
+        'Sandy Bridge', 'Ivy Bridge', 'Ivy Bridge-E', 'Haswell', 'Haswell-E',
+        'Broadwell', 'Skylake', 'Kaby Lake', 'Coffee Lake', 'Whiskey Lake',
+        'Cannon Lake', 'Comet Lake', 'Ice Lake', 'Tiger Lake', 'Alder Lake'
+]
+
 
 def get_intel_cpu_uarch(numeric=False):
     """Return the Intel microarchitecture we're running on, or None.
@@ -386,7 +400,43 @@
     return INTEL_UARCH_TABLE.get(family_model, family_model)
 
 
-INTEL_SILVERMONT_BCLK_TABLE = [83333, 100000, 133333, 116667, 80000];
+def is_intel_uarch_older_than(reference):
+    """Returns True if the DUT's is older than reference, False otherwise.
+
+    Raises a test error exception if the uarch is unknown to make developers
+    add entries to the tables above.
+    """
+
+    uarch = get_intel_cpu_uarch()
+    if uarch is None:
+        raise error.TestError("Doing Intel test for non-Intel hardware.")
+
+    if "_" in uarch:
+        raise error.TestError("Intel uarch unknown. Add to tables.")
+
+    if reference not in INTEL_BIGCORE_ORDER and reference not in INTEL_ATOM_ORDER:
+        raise error.TestError("Testing for unknown reference Intel uarch.")
+
+    result = False
+
+    if reference in INTEL_BIGCORE_ORDER:
+        for v in INTEL_BIGCORE_ORDER:
+            if v == reference:
+                break
+            if v == uarch:
+                result = True
+
+    elif reference in INTEL_ATOM_ORDER:
+        for v in INTEL_ATOM_ORDER:
+            if v == reference:
+                break
+            if v == uarch:
+                result = True
+
+    return result
+
+
+INTEL_SILVERMONT_BCLK_TABLE = [83333, 100000, 133333, 116667, 80000]
 
 
 def get_intel_bclk_khz():
@@ -402,6 +452,15 @@
     return 100000
 
 
+def get_energy_usage():
+    """On Intel chips that support it, return the energy usage."""
+    if get_intel_cpu_uarch() == None:
+        return 0
+
+    with open('/sys/class/powercap/intel-rapl/intel-rapl:0/energy_uj') as fd:
+        return fd.readline()
+
+
 def get_current_kernel_arch():
     """Get the machine architecture, now just a wrap of 'uname -m'."""
     return os.popen('uname -m').read().rstrip()
@@ -410,10 +469,10 @@
 def count_cpus():
     """number of CPUs in the local machine according to /proc/cpuinfo"""
     try:
-       return multiprocessing.cpu_count()
+        return multiprocessing.cpu_count()
     except Exception:
-       logging.exception('can not get cpu count from'
-                        ' multiprocessing.cpu_count()')
+        logging.exception('can not get cpu count from'
+                          ' multiprocessing.cpu_count()')
     cpuinfo = get_cpuinfo()
     # Returns at least one cpu. Check comment #1 in crosbug.com/p/9582.
     return len(cpuinfo) or 1
@@ -750,6 +809,8 @@
                 maj_min = dmsetup_output[4]
             elif dmsetup_output[2] == 'crypt':
                 maj_min = dmsetup_output[6]
+            elif dmsetup_output[2] in ['thin', 'thin-pool', 'linear']:
+                maj_min = dmsetup_output[3]
             cmd = 'realpath "/dev/block/%s"' % maj_min
         elif filename.startswith('/dev/loop'):
             cmd = 'losetup -O BACK-FILE "%s" | tail -1' % filename
@@ -1109,12 +1170,12 @@
     _, command = get_oldest_by_name('chrome')
     matches = re.search('--remote-debugging-port=([0-9]+)', command)
     if not matches:
-      return 0
+        return 0
     port = int(matches.group(1))
     if port:
-      return port
+        return port
     with open('/home/chronos/DevToolsActivePort') as f:
-      return int(f.readline().rstrip())
+        return int(f.readline().rstrip())
 
 
 def get_process_list(name, command_line=None):
@@ -1398,6 +1459,8 @@
         logging.info('After waiting %.1fs CPU utilization is %.3f.',
                      time_passed, fraction_active_time)
         if time_passed > timeout:
+            if fraction_active_time < utilization:
+                break
             logging.warning('CPU did not become idle.')
             log_process_activity()
             # crosbug.com/37389
@@ -1622,7 +1685,7 @@
     except Exception as e:
         logging.warning('Unable to read temperature sensors using ectool %s.',
                         e)
-    # Sanity check for real world values.
+    # Check for real world values.
     if not all(10.0 <= temperature <= 150.0 for temperature in temperatures):
         logging.warning('Unreasonable EC temperatures: %s.', temperatures)
     return temperatures
@@ -1639,7 +1702,7 @@
         temperature = max(all_temps)
     else:
         temperature = -1
-    # Sanity check for real world values.
+    # Check for real world values.
     assert ((temperature > 10.0) and
             (temperature < 150.0)), ('Unreasonable temperature %.1fC.' %
                                      temperature)
@@ -1662,7 +1725,7 @@
         except IOError:
             continue
         max_frequency = max(frequency, max_frequency)
-    # Sanity check.
+    # Confidence check.
     assert max_frequency > 1e8, ('Unreasonably low CPU frequency: %.1f' %
             max_frequency)
     return max_frequency
@@ -1709,6 +1772,22 @@
     return get_board_property('CHROMEOS_RELEASE_VERSION')
 
 
+def get_android_version():
+    """
+    Get the Android SDK version from /etc/lsb-release.
+
+    @return android sdk version.
+    """
+    return get_board_property('CHROMEOS_ARC_ANDROID_SDK_VERSION')
+
+
+def is_arcvm():
+    try:
+        return int(get_android_version()) >= 30
+    except:
+        return False
+
+
 def get_platform():
     """
     Get the ChromeOS platform name.
@@ -1756,6 +1835,14 @@
     return utils.run('crossystem fwid').stdout.strip()
 
 
+def get_hardware_id():
+    """Get hardware id as strings.
+
+    @returns a string representing this host's hardware id.
+    """
+    return utils.run('crossystem hwid').stdout.strip()
+
+
 def get_hardware_revision():
     """Get the hardware revision as strings.
 
@@ -1845,7 +1932,7 @@
     Returns the total memory available in the system in MBytes.
     """
     mem_total = _get_float_from_file(_MEMINFO, 'MemTotal:', 'MemTotal:', ' kB')
-    # Sanity check, all Chromebooks have at least 1GB of memory.
+    # Confidence check, all Chromebooks have at least 1GB of memory.
     assert mem_total > 256 * 1024, 'Unreasonable amount of memory.'
     return int(mem_total / 1024)
 
@@ -1964,7 +2051,7 @@
         return pciid_to_intel_architecture[device_id]
 
 # TODO(ihf): Consider using /etc/lsb-release DEVICETYPE != CHROMEBOOK/CHROMEBASE
-# for sanity check, but usage seems a bit inconsistent. See
+# for confidence check, but usage seems a bit inconsistent. See
 # src/third_party/chromiumos-overlay/eclass/appid.eclass
 _BOARDS_WITHOUT_MONITOR = [
     'anglar', 'mccloud', 'monroe', 'ninja', 'rikku', 'guado', 'jecht', 'tidus',
@@ -2048,7 +2135,7 @@
     @param root_part: current root partition
     """
     if not root_part:
-         root_part = get_root_partition()
+        root_part = get_root_partition()
     current_kernel_map = {'3': '2', '5': '4'}
     return root_part[:-1] + current_kernel_map[root_part[-1]]
 
@@ -2110,7 +2197,7 @@
         utils.run(_CHECK_PACKAGE_INSTALLED_COMMAND % package)
         return True
     except error.CmdError:
-        logging.warn('Package %s is not installed.', package)
+        logging.warning('Package %s is not installed.', package)
         return False
 
 
@@ -2123,7 +2210,7 @@
         __import__(package)
         return True
     except ImportError:
-        logging.warn('Python package %s is not installed.', package)
+        logging.warning('Python package %s is not installed.', package)
         return False
 
 
@@ -2213,27 +2300,38 @@
                     return result_obj
                 except ValueError:
                     pass
-
-        result_obj = func(obj)
-        return result_obj
+        try:
+            result_obj = func(obj)
+            return result_obj
+        except UnicodeEncodeError:
+            pass
     else:
         return obj
 
 
+def is_python2():
+    """True if it is interpreted by Python 2."""
+    return sys.version_info.major == 2
+
+
 def base64_recursive_encode(obj):
     """Apply base64 encode recursively into the obj structure.
 
-    Most of the string-like types could be traced to basestring and bytearray
-    as follows:
-        str: basestring
-        bytes: basestring
-        dbus.String: basestring
-        dbus.Signature: basestring
-        dbus.ByteArray: basestring
+    Python 2 case:
+        Most of the string-like types could be traced to basestring and bytearray
+        as follows:
+            str: basestring
+            bytes: basestring
+            dbus.String: basestring
+            dbus.Signature: basestring
+            dbus.ByteArray: basestring
 
-    Note that all the above types except dbus.String could be traced back to
-    str. In order to cover dbus.String, basestring is used as the ancestor
-    class for string-like types.
+        Note that all the above types except dbus.String could be traced back to
+        str. In order to cover dbus.String, basestring is used as the ancestor
+        class for string-like types.
+
+    Python 3 case:
+        Perform base64 encode on bytes element only.
 
     The other type that needs encoding with base64 in a structure includes
         bytearray: bytearray
@@ -2246,6 +2344,7 @@
         dbus.Dictionary: dict
 
     An example code and output look like
+    in Python 2:
         obj = {'a': 10, 'b': 'hello',
                'c': [100, 200, bytearray(b'\xf0\xf1\xf2\xf3\xf4')],
                'd': {784: bytearray(b'@\x14\x01P'),
@@ -2253,18 +2352,6 @@
         encode_obj = base64_recursive_encode(obj)
         decode_obj = base64_recursive_decode(encode_obj)
 
-        print 'obj: ', obj
-        print 'encode_obj: ', encode_obj
-        print 'decode_obj: ', decode_obj
-        print 'Equal?', obj == decode_obj
-
-        Output:
-        obj:  {'a': 10,
-               'c': [100, 200, bytearray(b'\xf0\xf1\xf2\xf3\xf4')],
-               'b': 'hello',
-               'd': {784: bytearray(b'@\x14\x01P'),
-                     78.0: bytearray(b'\x10\x05\x0b\x10\xb2\x1b\x00')}}
-
         encode_obj:  {'YQ==': 10,
                       'Yw==': [100, 200, '8PHy8/Q='],
                       'Yg==': 'aGVsbG8='
@@ -2274,13 +2361,34 @@
                       'b': 'hello',
                       'd': {784: '@\x14\x01P',
                             78.0: '\x10\x05\x0b\x10\xb2\x1b\x00'}}
-        Equal? True
+
+    in Python 3:
+        obj = {'a': 10, 'b': 'hello',
+               'c': [100, 200, bytearray(b'\xf0\xf1\xf2\xf3\xf4')],
+               'd': {784: bytearray(b'@\x14\x01P'),
+                     78.0: bytearray(b'\x10\x05\x0b\x10\xb2\x1b\x00')}}
+        encode_obj = base64_recursive_encode(obj)
+        decode_obj = base64_recursive_decode(encode_obj)
+
+        encode_obj:  {'a': 10,
+                      'c': [100, 200, '8PHy8/Q='],
+                      'b': 'hello',
+                      'ZA==': {784: 'QBQBUA==', 78.0: 'EAULELIbAA=='}}
+        decode_obj:  {'a': 10,
+                      'c': [100, 200, '\xf0\xf1\xf2\xf3\xf4'],
+                      'b': 'hello',
+                      'd': {784: '@\x14\x01P',
+                            78.0: '\x10\x05\x0b\x10\xb2\x1b\x00'}}
 
     @param obj: the object to apply base64 encoding recursively.
 
     @return: the base64 encoded object.
     """
-    encode_types = (six.string_types, bytearray)
+    if is_python2():
+        encode_types = (six.string_types, bytearray)
+    else:
+        encode_types = (bytes, bytearray)
+
     return recursive_func(obj, base64.standard_b64encode, encode_types)
 
 
@@ -2291,6 +2399,40 @@
 
     @return: the base64 decoded object.
     """
-    decode_types = (six.string_types,)
+    if is_python2():
+        decode_types = (six.string_types, )
+    else:
+        decode_types = (bytes, bytearray)
     return recursive_func(obj, base64.standard_b64decode, decode_types,
                           fix_num_key=True)
+
+
+def bytes_to_str_recursive(obj):
+    """Converts obj's bytes elements to str.
+
+    It focuses on elements in the input obj whose type is bytes or byearray.
+    For the elements, it first guesses the encoding of the input bytes (or
+    bytearray) and decode the bytes to str. For unknown encoding, try UTF-8.
+    If it still fails, converts the element as "ERROR_DECODE_BYTES_TO_STR".
+
+    @param obj: an object.
+
+    @return: an object that converts the input object's bytes elements to
+        strings.
+    """
+    # Python 2's bytes is equivalent to string. Do nothing.
+    if is_python2():
+        return obj
+
+    def bytes_to_str(bytes_obj):
+        guessed_encoding = chardet.detect(bytes_obj).get('encoding')
+        if not guessed_encoding:
+            guessed_encoding = 'utf-8'
+        try:
+            return bytes_obj.decode(guessed_encoding, 'backslashreplace')
+        except:
+            logging.info("Failed to decode bytes %r to str with encoding %r",
+                         bytes_obj, guessed_encoding)
+            return 'ERROR_DECODE_BYTES_TO_STR'
+
+    return recursive_func(obj, bytes_to_str, (bytes, bytearray))
diff --git a/client/bin/utils_unittest.py b/client/bin/utils_unittest.py
index ddafd84..03315b3 100755
--- a/client/bin/utils_unittest.py
+++ b/client/bin/utils_unittest.py
@@ -1,10 +1,11 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 __author__ = "kerl@google.com, gwendal@google.com (Gwendal Grignou)"
 
 import io
-import mock
+import six
 import unittest
+from unittest import mock
 
 from autotest_lib.client.bin import utils
 
@@ -27,7 +28,10 @@
 
     def fake_open(self, path):
         # Use BytesIO instead of StringIO to support with statements.
-        return io.BytesIO(bytes(self.fake_file_text))
+        if six.PY2:
+            return io.BytesIO(bytes(self.fake_file_text))
+        else:
+            return io.StringIO(self.fake_file_text)
 
     def test_concat_partition(self):
         self.assertEquals("nvme0n1p3", utils.concat_partition("nvme0n1", 3))
@@ -174,3 +178,111 @@
             'transfers_per_s': 4.45,
             'written_kb': 188458.0,
         }, statistics)
+
+    def test_base64_recursive_encode(self):
+        obj = {
+                'a': 10,
+                'b': 'hello',
+                'c': [100, 200, bytearray(b'\xf0\xf1\xf2\xf3\xf4')],
+                'd': {
+                        784: bytearray(b'@\x14\x01P'),
+                        78.0: bytearray(b'\x10\x05\x0b\x10\xb2\x1b\x00')
+                }
+        }
+        if utils.is_python2():
+            expected_encoded_obj = {
+                    'YQ==': 10,
+                    'Yg==': 'aGVsbG8=',
+                    'Yw==': [100, 200, '8PHy8/Q='],
+                    'ZA==': {
+                            784: 'QBQBUA==',
+                            78.0: 'EAULELIbAA=='
+                    }
+            }
+        else:
+            expected_encoded_obj = {
+                    'a': 10,
+                    'b': 'hello',
+                    'c': [100, 200, b'8PHy8/Q='],
+                    'd': {
+                            784: b'QBQBUA==',
+                            78.0: b'EAULELIbAA=='
+                    }
+            }
+
+        encoded_obj = utils.base64_recursive_encode(obj)
+        self.assertEqual(expected_encoded_obj, encoded_obj)
+
+    def test_base64_recursive_decode(self):
+        if utils.is_python2():
+            encoded_obj = {
+                    'YQ==': 10,
+                    'Yg==': 'aGVsbG8=',
+                    'Yw==': [100, 200, '8PHy8/Q='],
+                    'ZA==': {
+                            784: 'QBQBUA==',
+                            78.0: 'EAULELIbAA=='
+                    }
+            }
+        else:
+            encoded_obj = {
+                    'a': 10,
+                    'b': 'hello',
+                    'c': [100, 200, b'8PHy8/Q='],
+                    'd': {
+                            784: b'QBQBUA==',
+                            78.0: b'EAULELIbAA=='
+                    }
+            }
+
+        expected_decoded_obj = {
+                'a': 10,
+                'b': 'hello',
+                'c': [100, 200, b'\xf0\xf1\xf2\xf3\xf4'],
+                'd': {
+                        784: b'@\x14\x01P',
+                        78.0: b'\x10\x05\x0b\x10\xb2\x1b\x00'
+                }
+        }
+
+        decoded_obj = utils.base64_recursive_decode(encoded_obj)
+        self.assertEqual(expected_decoded_obj, decoded_obj)
+
+    def test_bytes_to_str_recursive(self):
+        obj = {
+                'a': 10,
+                'b': 'hello',
+                'c': b'b_hello',
+                'd': [100, 200, bytearray(b'\xf0\xf1\xf2\xf3\xf4')],
+                'e': {
+                        784: bytearray(b'@\x14\x01P'),
+                        78.0: bytearray(b'\x10\x05\x0b\x10\xb2\x1b\x00')
+                }
+        }
+
+        if utils.is_python2():
+            self.assertEqual(b'foo', utils.bytes_to_str_recursive(b'foo'))
+            self.assertEqual(b'\x80abc',
+                             utils.bytes_to_str_recursive(b'\x80abc'))
+            self.assertEqual('foo', utils.bytes_to_str_recursive('foo'))
+            self.assertEqual('\x80abc',
+                             utils.bytes_to_str_recursive('\x80abc'))
+            self.assertEqual(obj, utils.bytes_to_str_recursive(obj))
+        else:
+            self.assertEqual('foo', utils.bytes_to_str_recursive(b'foo'))
+            # self.assertEqual('\ufffdabc', utils.bytes_to_str_recursive(b'\x80abc'))
+            self.assertEqual('foo', utils.bytes_to_str_recursive('foo'))
+            self.assertEqual('\x80abc',
+                             utils.bytes_to_str_recursive('\x80abc'))
+            expected_obj = {
+                    'a': 10,
+                    'b': 'hello',
+                    'c': 'b_hello',
+                    # u prefix: Python 2 interpreter friendly.
+                    'd': [100, 200, u'\u0440\u0441\u0442\u0443\u0444'],
+                    'e': {
+                            784: '@\x14\x01P',
+                            78.0: u'\x10\x05\x0b\x10\u00b2\x1b\x00'
+                    }
+            }
+            self.assertEqual(expected_obj, utils.bytes_to_str_recursive(obj))
diff --git a/client/bin/vm_sanity.py b/client/bin/vm_sanity.py
index 6d206ab..caa548a 120000
--- a/client/bin/vm_sanity.py
+++ b/client/bin/vm_sanity.py
@@ -1 +1 @@
-telemetry_sanity.py
\ No newline at end of file
+telemetry_check.py
\ No newline at end of file
diff --git a/client/common_lib/OWNERS b/client/common_lib/OWNERS
new file mode 100644
index 0000000..e8a7df8
--- /dev/null
+++ b/client/common_lib/OWNERS
@@ -0,0 +1,3 @@
+include chromiumos/config:/owners/testservice
+include /ENGPROD_OWNERS
+include /INFRA_OWNERS
diff --git a/client/common_lib/autotemp.py b/client/common_lib/autotemp.py
index 31c7929..b829cf0 100644
--- a/client/common_lib/autotemp.py
+++ b/client/common_lib/autotemp.py
@@ -128,8 +128,8 @@
                 pass
 
 
-class dummy_dir(object):
-    """A dummy object representing a directory with a name.
+class stub_dir(object):
+    """A stub object representing a directory with a name.
 
     Only used for compat with the tmpdir, in cases where we wish to
     reuse a dir with the same interface but not to delete it after
@@ -137,7 +137,7 @@
     """
 
     def __init__(self, name):
-        """Initialize the dummy_dir object.
+        """Initialize the stub_dir object.
 
         @param name: Path the the directory.
         """
diff --git a/client/common_lib/autotemp_unittest.py b/client/common_lib/autotemp_unittest.py
index d044618..1dfed9c 100755
--- a/client/common_lib/autotemp_unittest.py
+++ b/client/common_lib/autotemp_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 import unittest, os
 import common
diff --git a/client/common_lib/barrier.py b/client/common_lib/barrier.py
index cdd3eaf..e1ef894 100644
--- a/client/common_lib/barrier.py
+++ b/client/common_lib/barrier.py
@@ -240,7 +240,7 @@
 
         except socket.timeout:
             # This is nominally an error, but as we do not know
-            # who that was we cannot do anything sane other
+            # who that was we cannot do anything valid other
             # than report it and let the normal timeout kill
             # us when that's appropriate.
             logging.warning("client handshake timeout: (%s:%d)",
@@ -275,7 +275,7 @@
 
         except socket.timeout:
             # This is nominally an error, but as we do not know
-            # who that was we cannot do anything sane other
+            # who that was we cannot do anything valid other
             # than report it and let the normal timeout kill
             # us when that's appropriate.
             logging.error("main handshake timeout: (%s:%d)",
diff --git a/client/common_lib/barrier_unittest.py b/client/common_lib/barrier_unittest.py
index e225342..ac2820a 100755
--- a/client/common_lib/barrier_unittest.py
+++ b/client/common_lib/barrier_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 __author__ = """Ashwin Ganti (aganti@google.com)"""
 
diff --git a/client/common_lib/base_job.py b/client/common_lib/base_job.py
index f5cba1c..6877e36 100644
--- a/client/common_lib/base_job.py
+++ b/client/common_lib/base_job.py
@@ -240,7 +240,7 @@
             # This _is_ necessary in the instance that the pickled job is transferred between the
             # server_job and the job on the DUT. The two can be on different autotest versions
             # (e.g. for non-SSP / client tests the server-side is versioned with the drone vs
-            # client-side versioned with the Chrome OS being tested).
+            # client-side versioned with the ChromeOS being tested).
             try:
                 with open(file_path, 'r') as rf:
                     on_disk_state = pickle.load(rf)
@@ -798,7 +798,8 @@
         '_state', 'max_result_size_KB', 0)
     fast = _job_state.property_factory(
         '_state', 'fast', False)
-
+    extended_timeout = _job_state.property_factory(
+        '_state', 'extended_timeout', None)
     # the use_sequence_number property
     _sequence_number = _job_state.property_factory(
         '_state', '_sequence_number', None)
diff --git a/client/common_lib/base_job_unittest.py b/client/common_lib/base_job_unittest.py
index 7ef1d33..3d3e900 100755
--- a/client/common_lib/base_job_unittest.py
+++ b/client/common_lib/base_job_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # pylint: disable=missing-docstring
 
@@ -81,30 +81,59 @@
         """
 
         PUBLIC_ATTRIBUTES = set([
-            # standard directories
-            'autodir', 'clientdir', 'serverdir', 'resultdir', 'pkgdir',
-            'tmpdir', 'testdir', 'site_testdir', 'bindir',
-            'profdir', 'toolsdir',
+                # standard directories
+                'autodir',
+                'clientdir',
+                'serverdir',
+                'resultdir',
+                'pkgdir',
+                'tmpdir',
+                'testdir',
+                'site_testdir',
+                'bindir',
+                'profdir',
+                'toolsdir',
 
-            # other special attributes
-            'args', 'automatic_test_tag', 'control',
-            'default_profile_only', 'drop_caches',
-            'drop_caches_between_iterations', 'harness', 'hosts',
-            'logging', 'machines', 'num_tests_failed', 'num_tests_run',
-            'pkgmgr', 'profilers', 'resultdir', 'run_test_cleanup',
-            'sysinfo', 'tag', 'user', 'use_sequence_number',
-            'warning_loggers', 'warning_manager', 'label',
-            'parent_job_id', 'in_lab', 'machine_dict_list',
-            'max_result_size_KB', 'fast'
-            ])
+                # other special attributes
+                'args',
+                'automatic_test_tag',
+                'control',
+                'default_profile_only',
+                'drop_caches',
+                'drop_caches_between_iterations',
+                'harness',
+                'hosts',
+                'logging',
+                'machines',
+                'num_tests_failed',
+                'num_tests_run',
+                'pkgmgr',
+                'profilers',
+                'resultdir',
+                'run_test_cleanup',
+                'sysinfo',
+                'tag',
+                'user',
+                'use_sequence_number',
+                'warning_loggers',
+                'warning_manager',
+                'label',
+                'parent_job_id',
+                'in_lab',
+                'machine_dict_list',
+                'max_result_size_KB',
+                'fast',
+                'extended_timeout',
+                'force_full_log_collection',
+        ])
 
         OPTIONAL_ATTRIBUTES = set([
-            'serverdir',
-
-            'automatic_test_tag', 'control', 'harness', 'num_tests_run',
-            'num_tests_failed', 'tag', 'warning_manager', 'warning_loggers',
-            'label', 'parent_job_id', 'max_result_size_KB', 'fast'
-            ])
+                'serverdir', 'automatic_test_tag', 'control', 'harness',
+                'num_tests_run', 'num_tests_failed', 'tag', 'warning_manager',
+                'warning_loggers', 'label', 'parent_job_id',
+                'max_result_size_KB', 'fast', 'extended_timeout',
+                'force_full_log_collection'
+        ])
 
         OPTIONAL_ATTRIBUTES_DEVICE_ERROR = set(['failed_with_device_error'])
 
@@ -1098,9 +1127,12 @@
         self.logger = base_job.status_logger(self.job, self.indenter)
 
 
-    def make_dummy_entry(self, rendered_text, start=False, end=False,
-                         subdir=None):
-        """Helper to make a dummy status log entry with custom rendered text.
+    def make_placeholder_entry(self,
+                               rendered_text,
+                               start=False,
+                               end=False,
+                               subdir=None):
+        """Helper to make a placeholder status log entry with custom rendered text.
 
         Helpful when validating the logging since it lets the test control
         the rendered text and so it doesn't depend on the exact formatting
@@ -1113,24 +1145,26 @@
             of a nested group.
         @param subdir: An optional value to use for the entry subdir field.
 
-        @return: A dummy status log entry object with the given subdir field
-            and a render implementation that returns rendered_text.
+        @return: A placeholder status log entry object with the given subdir
+            field and a render implementation that returns rendered_text.
         """
         assert not start or not end  # real entries would never be both
-        class dummy_entry(object):
+
+        class placeholder_entry(object):
             def is_start(self):
                 return start
             def is_end(self):
                 return end
             def render(self):
                 return rendered_text
-        entry = dummy_entry()
+
+        entry = placeholder_entry()
         entry.subdir = subdir
         return entry
 
 
     def test_render_includes_indent(self):
-        entry = self.make_dummy_entry('LINE0')
+        entry = self.make_placeholder_entry('LINE0')
         self.assertEqual('LINE0', self.logger.render_entry(entry))
         self.indenter.increment()
         self.indenter.increment()
@@ -1138,13 +1172,13 @@
 
 
     def test_render_handles_start(self):
-        entry = self.make_dummy_entry('LINE10', start=True)
+        entry = self.make_placeholder_entry('LINE10', start=True)
         self.indenter.increment()
         self.assertEqual('\tLINE10', self.logger.render_entry(entry))
 
 
     def test_render_handles_end(self):
-        entry = self.make_dummy_entry('LINE20', end=True)
+        entry = self.make_placeholder_entry('LINE20', end=True)
         self.indenter.increment()
         self.indenter.increment()
         self.indenter.increment()
@@ -1152,7 +1186,7 @@
 
 
     def test_writes_toplevel_log(self):
-        entries = [self.make_dummy_entry('LINE%d' % x) for x in range(3)]
+        entries = [self.make_placeholder_entry('LINE%d' % x) for x in range(3)]
         for entry in entries:
             self.logger.record_entry(entry)
         self.assertEqual('LINE0\nLINE1\nLINE2\n', open('status').read())
@@ -1163,9 +1197,11 @@
         self.logger = base_job.status_logger(self.job, self.indenter,
                                              global_filename='global.log',
                                              subdir_filename='subdir.log')
-        self.logger.record_entry(self.make_dummy_entry('LINE1', subdir='sub'))
-        self.logger.record_entry(self.make_dummy_entry('LINE2', subdir='sub'))
-        self.logger.record_entry(self.make_dummy_entry('LINE3'))
+        self.logger.record_entry(
+                self.make_placeholder_entry('LINE1', subdir='sub'))
+        self.logger.record_entry(
+                self.make_placeholder_entry('LINE2', subdir='sub'))
+        self.logger.record_entry(self.make_placeholder_entry('LINE3'))
 
         self.assertEqual('LINE1\nLINE2\nLINE3\n', open('global.log').read())
         self.assertEqual('LINE1\nLINE2\n', open('sub/subdir.log').read())
@@ -1181,12 +1217,14 @@
         self.logger = base_job.status_logger(self.job, self.indenter,
                                              global_filename='global.log',
                                              subdir_filename='subdir.log')
-        self.logger.record_entry(self.make_dummy_entry('LINE1', subdir='sub2'))
-        self.logger.record_entry(self.make_dummy_entry('LINE2'))
+        self.logger.record_entry(
+                self.make_placeholder_entry('LINE1', subdir='sub2'))
+        self.logger.record_entry(self.make_placeholder_entry('LINE2'))
         self.logger.global_filename = 'global.log2'
         self.logger.subdir_filename = 'subdir.log2'
-        self.logger.record_entry(self.make_dummy_entry('LINE3', subdir='sub2'))
-        self.logger.record_entry(self.make_dummy_entry('LINE4'))
+        self.logger.record_entry(
+                self.make_placeholder_entry('LINE3', subdir='sub2'))
+        self.logger.record_entry(self.make_placeholder_entry('LINE4'))
 
         self.assertEqual('LINE1\nLINE2\n', open('global.log').read())
         self.assertEqual('LINE1\n', open('sub2/subdir.log').read())
@@ -1197,10 +1235,13 @@
     def test_writes_subdir_logs(self):
         os.mkdir('abc')
         os.mkdir('123')
-        self.logger.record_entry(self.make_dummy_entry('LINE1'))
-        self.logger.record_entry(self.make_dummy_entry('LINE2', subdir='abc'))
-        self.logger.record_entry(self.make_dummy_entry('LINE3', subdir='abc'))
-        self.logger.record_entry(self.make_dummy_entry('LINE4', subdir='123'))
+        self.logger.record_entry(self.make_placeholder_entry('LINE1'))
+        self.logger.record_entry(
+                self.make_placeholder_entry('LINE2', subdir='abc'))
+        self.logger.record_entry(
+                self.make_placeholder_entry('LINE3', subdir='abc'))
+        self.logger.record_entry(
+                self.make_placeholder_entry('LINE4', subdir='123'))
 
         self.assertEqual('LINE1\nLINE2\nLINE3\nLINE4\n', open('status').read())
         self.assertEqual('LINE2\nLINE3\n', open('abc/status').read())
@@ -1209,11 +1250,14 @@
 
     def test_writes_no_subdir_when_disabled(self):
         os.mkdir('sub')
-        self.logger.record_entry(self.make_dummy_entry('LINE1'))
-        self.logger.record_entry(self.make_dummy_entry('LINE2', subdir='sub'))
-        self.logger.record_entry(self.make_dummy_entry(
-            'LINE3', subdir='sub_nowrite'), log_in_subdir=False)
-        self.logger.record_entry(self.make_dummy_entry('LINE4', subdir='sub'))
+        self.logger.record_entry(self.make_placeholder_entry('LINE1'))
+        self.logger.record_entry(
+                self.make_placeholder_entry('LINE2', subdir='sub'))
+        self.logger.record_entry(self.make_placeholder_entry(
+                'LINE3', subdir='sub_nowrite'),
+                                 log_in_subdir=False)
+        self.logger.record_entry(
+                self.make_placeholder_entry('LINE4', subdir='sub'))
 
         self.assertEqual('LINE1\nLINE2\nLINE3\nLINE4\n', open('status').read())
         self.assertEqual('LINE2\nLINE4\n', open('sub/status').read())
@@ -1221,14 +1265,18 @@
 
 
     def test_indentation(self):
-        self.logger.record_entry(self.make_dummy_entry('LINE1', start=True))
-        self.logger.record_entry(self.make_dummy_entry('LINE2'))
-        self.logger.record_entry(self.make_dummy_entry('LINE3', start=True))
-        self.logger.record_entry(self.make_dummy_entry('LINE4'))
-        self.logger.record_entry(self.make_dummy_entry('LINE5'))
-        self.logger.record_entry(self.make_dummy_entry('LINE6', end=True))
-        self.logger.record_entry(self.make_dummy_entry('LINE7', end=True))
-        self.logger.record_entry(self.make_dummy_entry('LINE8'))
+        self.logger.record_entry(
+                self.make_placeholder_entry('LINE1', start=True))
+        self.logger.record_entry(self.make_placeholder_entry('LINE2'))
+        self.logger.record_entry(
+                self.make_placeholder_entry('LINE3', start=True))
+        self.logger.record_entry(self.make_placeholder_entry('LINE4'))
+        self.logger.record_entry(self.make_placeholder_entry('LINE5'))
+        self.logger.record_entry(self.make_placeholder_entry('LINE6',
+                                                             end=True))
+        self.logger.record_entry(self.make_placeholder_entry('LINE7',
+                                                             end=True))
+        self.logger.record_entry(self.make_placeholder_entry('LINE8'))
 
         expected_log = ('LINE1\n\tLINE2\n\tLINE3\n\t\tLINE4\n\t\tLINE5\n'
                         '\tLINE6\nLINE7\nLINE8\n')
@@ -1236,11 +1284,14 @@
 
 
     def test_multiline_indent(self):
-        self.logger.record_entry(self.make_dummy_entry('LINE1\n  blah\n'))
-        self.logger.record_entry(self.make_dummy_entry('LINE2', start=True))
         self.logger.record_entry(
-            self.make_dummy_entry('LINE3\n  blah\n  two\n'))
-        self.logger.record_entry(self.make_dummy_entry('LINE4', end=True))
+                self.make_placeholder_entry('LINE1\n  blah\n'))
+        self.logger.record_entry(
+                self.make_placeholder_entry('LINE2', start=True))
+        self.logger.record_entry(
+                self.make_placeholder_entry('LINE3\n  blah\n  two\n'))
+        self.logger.record_entry(self.make_placeholder_entry('LINE4',
+                                                             end=True))
 
         expected_log = ('LINE1\n  blah\nLINE2\n'
                         '\tLINE3\n  blah\n  two\nLINE4\n')
@@ -1248,7 +1299,7 @@
 
 
     def test_hook_is_called(self):
-        entries = [self.make_dummy_entry('LINE%d' % x) for x in range(5)]
+        entries = [self.make_placeholder_entry('LINE%d' % x) for x in range(5)]
         recorded_entries = []
         def hook(entry):
             recorded_entries.append(entry)
@@ -1349,7 +1400,7 @@
         self.assertEqual(expected, actual)
 
 
-    def test_subtest_with_master_test_path_and_subdir(self):
+    def test_subtest_with_main_test_path_and_subdir(self):
         self.assertEqual(
             ('test9', 'subtestdir/test9.subdirtag', 'subdirtag'),
             self.job._build_tagged_test_name('test9',
diff --git a/client/common_lib/check_version.py b/client/common_lib/check_version.py
index 68ee16c..a02d661 100644
--- a/client/common_lib/check_version.py
+++ b/client/common_lib/check_version.py
@@ -1,18 +1,26 @@
 # This file must use Python 1.5 syntax.
 import glob
+import logging
 import os
+import re
 import sys
 
+PY_GLOBS = {
+        3: ['/usr/bin/python3*', '/usr/local/bin/python3*'],
+        2: ['/usr/bin/python2*', '/usr/local/bin/python2*']
+}
+
 
 class check_python_version:
 
-    def __init__(self):
+    def __init__(self, desired_version=3):
         # In order to ease the migration to Python3, disable the restart logic
         # when AUTOTEST_NO_RESTART is set. This makes it possible to run
         # autotest locally as Python3 before any other environment is switched
         # to Python3.
         if os.getenv("AUTOTEST_NO_RESTART"):
             return
+        self.desired_version = desired_version
 
         # The change to prefer 2.4 really messes up any systems which have both
         # the new and old version of Python, but where the newer is default.
@@ -23,25 +31,49 @@
         # runs) 'import common' it restarts my shell. Overall, the change was
         # fairly annoying for me (and I can't get around having 2.4 and 2.5
         # installed with 2.5 being default).
-        if sys.version_info.major >= 3:
+        if sys.version_info.major != self.desired_version:
             try:
                 # We can't restart when running under mod_python.
                 from mod_python import apache
             except ImportError:
                 self.restart()
 
-
-    PYTHON_BIN_GLOB_STRINGS = ['/usr/bin/python2*', '/usr/local/bin/python2*']
-
+    def extract_version(self, path):
+        """Return a matching python version to the provided path."""
+        match = re.search(r'/python(\d+)\.(\d+)$', path)
+        if match:
+            return (int(match.group(1)), int(match.group(2)))
+        else:
+            return None
 
     def find_desired_python(self):
         """Returns the path of the desired python interpreter."""
-        # CrOS only ever has Python 2.7 available, so pick whatever matches.
         pythons = []
-        for glob_str in self.PYTHON_BIN_GLOB_STRINGS:
+        for glob_str in PY_GLOBS[self.desired_version]:
             pythons.extend(glob.glob(glob_str))
-        return pythons[0]
 
+        possible_versions = []
+        for python in pythons:
+            version = self.extract_version(python)
+            if not version:
+                continue
+            # Autotest in Python2 is written to 2.4 and above.
+            if self.desired_version == 2:
+                if version < (2, 4):
+                    continue
+            if self.desired_version == 3:
+                # Autotest in Python3 is written to 3.6 and above.
+                if version < (3, 6):
+                    continue
+            possible_versions.append((version, python))
+
+        possible_versions.sort()
+
+        if not possible_versions:
+            raise ValueError('Python %s.x not found' % self.desired_version)
+
+        # Return the lowest compatible major version possible
+        return possible_versions[0][1]
 
     def restart(self):
         python = self.find_desired_python()
diff --git a/client/common_lib/config_vars.py b/client/common_lib/config_vars.py
new file mode 100644
index 0000000..623c958a
--- /dev/null
+++ b/client/common_lib/config_vars.py
@@ -0,0 +1,237 @@
+# Lint as: python2, python3
+# Copyright (c) 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""
+Functions to load config variables from JSON with transformation.
+
+* The config is a key-value dictionary.
+* If the value is a list, then the list constitutes a list of conditions
+  to check.
+* A condition is a key-value dictionary where the key is an external variable
+  name and the value is a case-insensitive regexp to match. If multiple
+  variables used, they all must match for the condition to succeed.
+* A special key "value" is the value to assign if condition succeeds.
+* The first matching condition wins.
+* Condition with zero external vars always succeeds - it should be the last in
+  the list as a last resort case.
+* If none of conditions match, it's an error.
+* The value, in turn, can be a nested list of conditions.
+* If the value is a boolean, the condition checks for the presence or absence
+  of an external variable.
+
+Example:
+    Python source:
+        config = TransformJsonFile(
+                                    "config.json",
+                                    extvars={
+                                        "board": "board1",
+                                        "model": "model1",
+                                    })
+        # config -> {
+        #               "cuj_username": "user",
+        #               "private_key": "SECRET",
+        #               "some_var": "val for board1",
+        #               "some_var2": "default val2",
+        #           }
+
+        config = TransformJsonFile(
+                                    "config.json",
+                                    extvars={
+                                        "board": "board2",
+                                        "model": "model2",
+                                    })
+        # config -> {
+        #               "cuj_username": "user",
+        #               "private_key": "SECRET",
+        #               "some_var": "val for board2",
+        #               "some_var2": "val2 for board2 model2",
+        #           }
+
+    config.json:
+        {
+            "cuj_username": "user",
+            "private_key": "SECRET",
+            "some_var": [
+                {
+                    "board": "board1.*",
+                    "value": "val for board1",
+                },
+                {
+                    "board": "board2.*",
+                    "value": "val for board2",
+                },
+                {
+                    "value": "default val",
+                }
+            ],
+            "some_var2": [
+                {
+                    "board": "board2.*",
+                    "model": "model2.*",
+                    "value": "val2 for board2 model2",
+                },
+                {
+                    "value": "default val2",
+                }
+            ],
+        }
+
+See more examples in config_vars_unittest.py
+
+"""
+
+# Lint as: python2, python3
+# pylint: disable=missing-docstring,bad-indentation
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import json
+import logging
+import re
+
+try:
+    unicode
+except NameError:
+    unicode = str
+
+VERBOSE = False
+
+
+class ConfigTransformError(ValueError):
+    pass
+
+
+def TransformConfig(data, extvars):
+    """Transforms data loaded from JSON to config variables.
+
+    Args:
+        data (dict): input data dictionary from JSON parser
+        extvars (dict): external variables dictionary
+
+    Returns:
+        dict: config variables
+
+    Raises:
+        ConfigTransformError: transformation error
+        're' errors
+    """
+    if not isinstance(data, dict):
+        _Error('Top level configuration object must be a dictionary but got ' +
+               data.__class__.__name__)
+
+    return {key: _GetVal(key, val, extvars) for key, val in data.items()}
+
+
+def TransformJsonText(text, extvars):
+    """Transforms JSON text to config variables.
+
+    Args:
+        text (str): JSON input
+        extvars (dict): external variables dictionary
+
+    Returns:
+        dict: config variables
+
+    Raises:
+        ConfigTransformError: transformation error
+        're' errors
+        'json' errors
+    """
+    data = json.loads(text)
+    return TransformConfig(data, extvars)
+
+
+def TransformJsonFile(file_name, extvars):
+    """Transforms JSON file to config variables.
+
+    Args:
+        file_name (str): JSON file name
+        extvars (dict): external variables dictionary
+
+    Returns:
+        dict: config variables
+
+    Raises:
+        ConfigTransformError: transformation error
+        're' errors
+        'json' errors
+        IO errors
+    """
+    with open(file_name, 'r') as f:
+        data = json.load(f)
+    return TransformConfig(data, extvars)
+
+
+def _GetVal(key, val, extvars):
+    """Calculates and returns the config variable value.
+
+    Args:
+        key (str): key for error reporting
+        val (str | list): variable value or conditions list
+        extvars (dict): external variables dictionary
+
+    Returns:
+        str: resolved variable value
+
+    Raises:
+        ConfigTransformError: transformation error
+    """
+    if (isinstance(val, str) or isinstance(val, unicode)
+                or isinstance(val, int) or isinstance(val, float)):
+        return val
+
+    if not isinstance(val, list):
+        _Error('Conditions must be an array but got ' + val.__class__.__name__,
+               json.dumps(val), key)
+
+    for cond in val:
+        if not isinstance(cond, dict):
+            _Error(
+                    'Condition must be a dictionary but got ' +
+                    cond.__class__.__name__, json.dumps(cond), key)
+        if 'value' not in cond:
+            _Error('Missing mandatory "value" key from condition',
+                   json.dumps(cond), key)
+
+        for cond_key, cond_val in cond.items():
+            if cond_key == 'value':
+                continue
+
+            if isinstance(cond_val, bool):
+                # Boolean value -> check if variable exists
+                if (cond_key in extvars) == cond_val:
+                    continue
+                else:
+                    break
+
+            if cond_key not in extvars:
+                logging.warning('Unknown external var: %s', cond_key)
+                break
+            if re.search(cond_val, extvars[cond_key], re.I) is None:
+                break
+        else:
+            return _GetVal(key, cond['value'], extvars)
+
+    _Error('Condition did not match any external vars',
+           json.dumps(val, indent=4) + '\nvars: ' + extvars.__str__(), key)
+
+
+def _Error(text, extra='', key=''):
+    """Reports and raises an error.
+
+    Args:
+        text (str): Error text
+        extra (str, optional): potentially sensitive error text for verbose output
+        key (str): key for error reporting or empty string if none
+
+    Raises:
+        ConfigTransformError: error
+    """
+    if key:
+        text = key + ': ' + text
+    if VERBOSE and extra:
+        text += ':\n' + extra
+    logging.error('%s', text)
+    raise ConfigTransformError(text)
diff --git a/client/common_lib/config_vars_unittest.py b/client/common_lib/config_vars_unittest.py
new file mode 100755
index 0000000..5c6e6ef
--- /dev/null
+++ b/client/common_lib/config_vars_unittest.py
@@ -0,0 +1,354 @@
+#!/usr/bin/python3
+# Lint as: python2, python3
+# pylint: disable=missing-docstring,bad-indentation
+
+import common
+import unittest
+import logging
+
+from autotest_lib.client.common_lib.config_vars import TransformJsonText, ConfigTransformError
+
+
+class ConfigVarsTransformTestCase(unittest.TestCase):
+    def testSimple(self):
+        self.assertDictEqual(
+                TransformJsonText(
+                        """{
+                            "a": "zzz"
+                        }""", {"qwe": "asd"}), {'a': 'zzz'})
+
+    def testSimpleCond(self):
+        self.assertDictEqual(
+                TransformJsonText(
+                        """{
+                            "a": "zzz",
+                            "b": [
+                                {
+                                    "AAA": "asd",
+                                    "value": "vvvvv"
+                                }
+                            ]
+                        }""", {"AAA": "asd"}), {
+                                'a': 'zzz',
+                                'b': 'vvvvv'
+                        })
+
+    def testSimpleCond2(self):
+        self.assertDictEqual(
+                TransformJsonText(
+                        """{
+                            "a": "zzz",
+                            "b": [
+                                {
+                                    "value": "vvvvv"
+                                }
+                            ]
+                        }""", {"AAA": "asd"}), {
+                                'a': 'zzz',
+                                'b': 'vvvvv'
+                        })
+
+    def testSimpleCondFallback(self):
+        self.assertDictEqual(
+                TransformJsonText(
+                        """{
+                            "a": "zzz",
+                            "b": [
+                                {
+                                    "AAA": "xxx",
+                                    "value": "vvvvv1"
+                                },
+                                {
+                                    "AAA": "yyy",
+                                    "value": "vvvvv2"
+                                },
+                                {
+                                    "value": "vvvvv3"
+                                }
+                            ]
+                        }""", {"AAA": "asd"}), {
+                                'a': 'zzz',
+                                'b': 'vvvvv3'
+                        })
+
+    def testNoMatch(self):
+        logging.disable(logging.CRITICAL)
+        self.assertRaises(
+                ConfigTransformError, TransformJsonText, """{
+                    "a": "zzz",
+                    "b": [
+                        {
+                            "XXX": "asd",
+                            "value": "vvvvv"
+                        }
+                    ]
+                }""", {"AAA": "asd"})
+        logging.disable(logging.NOTSET)
+
+    def testUnmatch(self):
+        logging.disable(logging.CRITICAL)
+        self.assertRaises(
+                ConfigTransformError, TransformJsonText, """{
+                    "a": "zzz",
+                    "b": [
+                        {
+                            "AAA": "zzz",
+                            "value": "vvvvv"
+                        }
+                    ]
+                }""", {"AAA": "asd"})
+        logging.disable(logging.NOTSET)
+
+    def testMatchFirst(self):
+        self.assertDictEqual(
+                TransformJsonText(
+                        """{
+                            "a": "zzz",
+                            "b": [
+                                {
+                                    "AAA": "asd",
+                                    "value": "vvvvv1"
+                                },
+                                {
+                                    "AAA": "asd",
+                                    "value": "vvvvv2"
+                                }
+                            ]
+                        }""", {"AAA": "asd"}), {
+                                'a': 'zzz',
+                                'b': 'vvvvv1'
+                        })
+
+    def testMatchMid(self):
+        self.assertDictEqual(
+                TransformJsonText(
+                        """{
+                            "a": "zzz",
+                            "b": [
+                                {
+                                    "AAA": "zzz",
+                                    "value": "vvvvv1"
+                                },
+                                {
+                                    "AAA": "asd",
+                                    "BBB": "jjj",
+                                    "value": "vvvvv2"
+                                },
+                                {
+                                    "AAA": "asd",
+                                    "BBB": "zxc",
+                                    "value": "vvvvv3"
+                                },
+                                {
+                                    "AAA": "asd",
+                                    "BBB": "zxc",
+                                    "CCC": "qwe",
+                                    "value": "vvvvv4"
+                                }
+                            ]
+                        }""", {
+                                "AAA": "asd",
+                                "BBB": "zxc",
+                                "CCC": "qwe"
+                        }), {
+                                'a': 'zzz',
+                                'b': 'vvvvv3'
+                        })
+
+    def testMatchLast(self):
+        self.assertDictEqual(
+                TransformJsonText(
+                        """{
+                            "a": "zzz",
+                            "b": [
+                                {
+                                    "AAA": "zzz",
+                                    "value": "vvvvv1"
+                                },
+                                {
+                                    "AAA": "asd",
+                                    "BBB": "jjj",
+                                    "value": "vvvvv2"
+                                },
+                                {
+                                    "AAA": "asd",
+                                    "BBB": "zxc",
+                                    "CCC": "jjj",
+                                    "value": "vvvvv3"
+                                },
+                                {
+                                    "AAA": "asd",
+                                    "BBB": "zxc",
+                                    "CCC": "qwe",
+                                    "value": "vvvvv4"
+                                }
+                            ]
+                        }""", {
+                                "AAA": "asd",
+                                "BBB": "zxc",
+                                "CCC": "qwe"
+                        }), {
+                                'a': 'zzz',
+                                'b': 'vvvvv4'
+                        })
+
+    def testNested(self):
+        self.assertDictEqual(
+                TransformJsonText(
+                        """{
+                            "a": "zzz",
+                            "b": [
+                                {
+                                    "AAA": "asd",
+                                    "value": [
+                                        {
+                                            "BBB": "zxc",
+                                            "value": [
+                                                {
+                                                    "CCC": "qwe",
+                                                    "value": "vvvvv4"
+                                                }
+                                            ]
+                                        }
+                                    ]
+                                }
+                            ]
+                        }""", {
+                                "AAA": "asd",
+                                "BBB": "zxc",
+                                "CCC": "qwe"
+                        }), {
+                                'a': 'zzz',
+                                'b': 'vvvvv4'
+                        })
+
+    def testRegex(self):
+        self.assertDictEqual(
+                TransformJsonText(
+                        """{
+                            "a": "zzz",
+                            "b": [
+                                {
+                                    "AAA": "^a.*",
+                                    "value": "vvvvv"
+                                }
+                            ]
+                        }""", {"AAA": "asd"}), {
+                                'a': 'zzz',
+                                'b': 'vvvvv'
+                        })
+
+    def testRegexCase(self):
+        self.assertDictEqual(
+                TransformJsonText(
+                        """{
+                            "a": "zzz",
+                            "b": [
+                                {
+                                    "AAA": "^A.*D$",
+                                    "value": "vvvvv"
+                                }
+                            ]
+                        }""", {"AAA": "asd"}), {
+                                'a': 'zzz',
+                                'b': 'vvvvv'
+                        })
+
+    def testVarExists(self):
+        self.assertDictEqual(
+                TransformJsonText(
+                        """{
+                            "a": "zzz",
+                            "b": [
+                                {
+                                    "AAA": true,
+                                    "value": "aaa"
+                                },
+                                {
+                                    "value": "bbb"
+                                }
+                            ]
+                        }""", {"AAA": ""}), {
+                                'a': 'zzz',
+                                'b': 'aaa'
+                        })
+
+    def testVarExistsNot(self):
+        self.assertDictEqual(
+                TransformJsonText(
+                        """{
+                            "a": "zzz",
+                            "b": [
+                                {
+                                    "BBB": true,
+                                    "value": "aaa"
+                                },
+                                {
+                                    "value": "bbb"
+                                }
+                            ]
+                        }""", {"AAA": ""}), {
+                                'a': 'zzz',
+                                'b': 'bbb'
+                        })
+
+    def testVarNotExists(self):
+        self.assertDictEqual(
+                TransformJsonText(
+                        """{
+                            "a": "zzz",
+                            "b": [
+                                {
+                                    "AAA": false,
+                                    "value": "aaa"
+                                },
+                                {
+                                    "value": "bbb"
+                                }
+                            ]
+                        }""", {"AAA": ""}), {
+                                'a': 'zzz',
+                                'b': 'bbb'
+                        })
+
+    def testVarNotExistsNot(self):
+        self.assertDictEqual(
+                TransformJsonText(
+                        """{
+                            "a": "zzz",
+                            "b": [
+                                {
+                                    "BBB": false,
+                                    "value": "aaa"
+                                },
+                                {
+                                    "value": "bbb"
+                                }
+                            ]
+                        }""", {"AAA": ""}), {
+                                'a': 'zzz',
+                                'b': 'aaa'
+                        })
+
+    def testEmptyInput(self):
+        self.assertRaises(ValueError, TransformJsonText, '', {"qwe": "asd"})
+
+    def testMalformedJson(self):
+        self.assertRaises(ValueError, TransformJsonText, '{qwe',
+                          {"qwe": "asd"})
+
+    def testNonObjectTopLevelJson(self):
+        logging.disable(logging.CRITICAL)
+        self.assertRaises(ConfigTransformError, TransformJsonText, '[1, 2, 3]',
+                          {"qwe": "asd"})
+        logging.disable(logging.NOTSET)
+
+    def testNonObjectTopLevelJson2(self):
+        logging.disable(logging.CRITICAL)
+        self.assertRaises(ConfigTransformError, TransformJsonText, '"wwwww"',
+                          {"qwe": "asd"})
+        logging.disable(logging.NOTSET)
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/client/common_lib/control_data.py b/client/common_lib/control_data.py
index 54d4f4f..47cea20 100644
--- a/client/common_lib/control_data.py
+++ b/client/common_lib/control_data.py
@@ -107,7 +107,10 @@
         self.attributes = set()
         self.max_result_size_KB = DEFAULT_MAX_RESULT_SIZE_KB
         self.priority = priorities.Priority.DEFAULT
-        self.fast = False
+        self.extended_timeout = None
+        self.fast = True
+        # This will only be honored via `test_that`, and not in lab (for now).
+        self.py_version = None
 
         _validate_control_file_fields(self.path, vars, raise_warnings)
 
@@ -317,6 +320,14 @@
     def set_attributes(self, val):
         self._set_set('attributes', val)
 
+    def set_extended_timeout(self, val):
+        """In seconds."""
+        self._set_int('extended_timeout', val)
+
+    def set_py_version(self, val):
+        """In majors, ie: 2 or 3."""
+        self._set_int('py_version', val)
+
 
 def _extract_const(expr):
     assert (expr.__class__ == ast.Str)
diff --git a/client/common_lib/control_data_unittest.py b/client/common_lib/control_data_unittest.py
index a96d1ad..d9c7452 100755
--- a/client/common_lib/control_data_unittest.py
+++ b/client/common_lib/control_data_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # pylint: disable-msg=C0111
 
 from __future__ import absolute_import
diff --git a/client/common_lib/cros/adb_keepalive.py b/client/common_lib/cros/adb_keepalive.py
index cb27fbc..582cbf0 100755
--- a/client/common_lib/cros/adb_keepalive.py
+++ b/client/common_lib/cros/adb_keepalive.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/client/common_lib/cros/arc.py b/client/common_lib/cros/arc.py
index dc95751..805a68e 100644
--- a/client/common_lib/cros/arc.py
+++ b/client/common_lib/cros/arc.py
@@ -1,7 +1,12 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import collections
 import glob
 import logging
@@ -16,6 +21,7 @@
 from autotest_lib.client.bin import test, utils
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib.cros import chrome, arc_common
+from six.moves import range
 
 _ADB_KEYS_PATH = '/tmp/adb_keys'
 _ADB_VENDOR_KEYS = 'ADB_VENDOR_KEYS'
@@ -26,7 +32,7 @@
 _SCREENSHOT_BASENAME = 'arc-screenshot'
 _MAX_SCREENSHOT_NUM = 10
 # This address should match the one present in
-# https://chromium.googlesource.com/chromiumos/overlays/chromiumos-overlay/+/master/chromeos-base/arc-sslh-init/files/sslh.conf
+# https://chromium.googlesource.com/chromiumos/overlays/chromiumos-overlay/+/main/chromeos-base/arc-sslh-init/files/sslh.conf
 _ADBD_ADDRESS = ('100.115.92.2', 5555)
 _ADBD_PID_PATH = '/run/arc/adbd.pid'
 _SDCARD_PID_PATH = '/run/arc/sdcard.pid'
@@ -69,16 +75,16 @@
     _android_shell('setprop sys.usb.config ' + config)
 
     def property_check():
-      return _android_shell('getprop sys.usb.state') == config
+        return _android_shell('getprop sys.usb.state') == config
 
     try:
-      utils.poll_for_condition(
-          condition=property_check,
-          desc='Wait for sys.usb.state',
-          timeout=timeout,
-          sleep_interval=_PROPERTY_CHECK_INTERVAL_SECONDS)
+        utils.poll_for_condition(
+                condition=property_check,
+                desc='Wait for sys.usb.state',
+                timeout=timeout,
+                sleep_interval=_PROPERTY_CHECK_INTERVAL_SECONDS)
     except utils.TimeoutError:
-      raise error.TestFail('Timed out waiting for sys.usb.state change')
+        raise error.TestFail('Timed out waiting for sys.usb.state change')
 
     _android_shell('setprop ctl.restart adbd')
 
@@ -173,7 +179,7 @@
 
     for i in range(attempt_count):
         if _restart_adb_and_wait_for_ready(timeout):
-          return
+            return
     raise error.TestFail(
             'Failed to connect to adb in %d seconds.' % initial_timeout)
 
@@ -204,7 +210,7 @@
 
         # First, collect some information and log it.
         arc_alive = is_android_container_alive()
-        arc_booted = _android_shell('getprop sys.boot_completed',
+        arc_booted = _android_shell('getprop ro.arc.boot_completed',
                                     ignore_status=True)
         arc_system_events = _android_shell(
             'logcat -d -b events *:S arc_system_event', ignore_status=True)
@@ -338,7 +344,7 @@
 
 
 def get_android_data_root():
-    """Returns path to Chrome OS directory that bind-mounts Android's /data."""
+    """Returns path to ChromeOS directory that bind-mounts Android's /data."""
     return _ANDROID_DATA_ROOT_PATH
 
 
@@ -434,7 +440,7 @@
         ignore_status=True)
     stats = output.split(' ')
     if len(stats) != len(mapping):
-      raise error.TestError('Unexpected output from stat: %s' % output)
+        raise error.TestError('Unexpected output from stat: %s' % output)
     _Stats = collections.namedtuple('_Stats', mapping.values())
     return _Stats(*stats)
 
@@ -483,7 +489,7 @@
     """Check if android container is alive."""
     try:
         container_pid = get_container_pid()
-    except Exception, e:
+    except Exception as e:
         logging.error('is_android_container_alive failed: %r', e)
         return False
     return utils.pid_is_alive(int(container_pid))
@@ -561,7 +567,7 @@
                           adb_shell('dumpsys activity recents',
                                     ignore_status=True))
         if not os.path.exists(_SCREENSHOT_DIR_PATH):
-            os.mkdir(_SCREENSHOT_DIR_PATH, 0755)
+            os.mkdir(_SCREENSHOT_DIR_PATH, 0o755)
         obj.num_screenshots += 1
         if obj.num_screenshots <= _MAX_SCREENSHOT_NUM:
             logging.warning('Iteration %d failed, taking a screenshot.',
@@ -633,7 +639,7 @@
         # mode.
         if device_mode == 'clamshell' and \
                 use_fake_sensor_with_lifetime_secs == 0:
-                    return
+            return
         raise err
 
 
@@ -695,7 +701,7 @@
         self.register_before_iteration_hook(_before_iteration_hook)
         self.register_after_iteration_hook(_after_iteration_hook)
         # Keep track of the number of debug screenshots taken and keep the
-        # total number sane to avoid issues.
+        # total number valid to avoid issues.
         self.num_screenshots = 0
 
     def initialize(self, extension_path=None, username=None, password=None,
@@ -862,7 +868,7 @@
 
         # Install apks based on dep_packages/apks/full_pkg_names tuples
         if dep_packages:
-            for i in xrange(len(dep_packages)):
+            for i in range(len(dep_packages)):
                 self._install_apks(dep_packages[i], apks[i], full_pkg_names[i])
 
         if self.uiautomator:
diff --git a/client/common_lib/cros/arc_common.py b/client/common_lib/cros/arc_common.py
index b8507c9..fc87e2a 100644
--- a/client/common_lib/cros/arc_common.py
+++ b/client/common_lib/cros/arc_common.py
@@ -85,7 +85,8 @@
 
     def _is_android_booted():
         output = utils.system_output(
-            'android-sh -c "getprop sys.boot_completed"', ignore_status=True)
+                'android-sh -c "getprop ro.arc.boot_completed"',
+                ignore_status=True)
         return output.strip() == '1'
 
     logging.info('Waiting for Android to boot completely.')
diff --git a/client/common_lib/cros/arc_util.py b/client/common_lib/cros/arc_util.py
index 2b24fbc..f16fa03 100644
--- a/client/common_lib/cros/arc_util.py
+++ b/client/common_lib/cros/arc_util.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -258,7 +259,7 @@
     try:
         extension_pages = browser.extensions.GetByExtensionId(
             opt_in_extension_id)
-    except Exception, e:
+    except Exception as e:
         raise error.TestFail('Could not locate extension for arc opt-in. '
                              'Make sure disable_default_apps is False. '
                              '"%s".' % e)
@@ -279,7 +280,7 @@
         for condition in js_code_did_start_conditions:
             extension_main_page.WaitForJavaScriptCondition(condition,
                                                            timeout=60)
-    except Exception, e:
+    except Exception as e:
         raise error.TestError('Error waiting for "%s": "%s".' % (condition, e))
 
     return extension_main_page
@@ -299,7 +300,7 @@
     try:
         extension_main_page.WaitForJavaScriptCondition('!appWindow',
                                                        timeout=_SIGN_IN_TIMEOUT)
-    except Exception, e:
+    except Exception as e:
         js_read_error_message = """
             err = appWindow.contentWindow.document.getElementById(
                     "error-message");
diff --git a/client/common_lib/cros/authpolicy.py b/client/common_lib/cros/authpolicy.py
index 2d2e359..52d446d 100644
--- a/client/common_lib/cros/authpolicy.py
+++ b/client/common_lib/cros/authpolicy.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -5,10 +6,15 @@
 Wrapper for D-Bus calls ot the AuthPolicy daemon.
 """
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import logging
 import os
 import sys
 
+import common
 import dbus
 
 from autotest_lib.client.common_lib import error
@@ -287,11 +293,11 @@
         def __enter__(self):
             """Creates the password file descriptor."""
             self._read_fd, write_fd = os.pipe()
-            os.write(write_fd, self._password)
+            os.write(write_fd, self._password.encode('utf-8'))
             os.close(write_fd)
             return self._read_fd
 
-        def __exit__(self, mytype, value, traceback):
+        def __exit__(self, my_type, value, traceback):
             """Closes the password file descriptor again."""
             if self._read_fd:
                 os.close(self._read_fd)
diff --git a/client/common_lib/cros/avahi_utils.py b/client/common_lib/cros/avahi_utils.py
index 69e068f..af52e38 100644
--- a/client/common_lib/cros/avahi_utils.py
+++ b/client/common_lib/cros/avahi_utils.py
@@ -1,8 +1,9 @@
+# Lint as: python2, python3
 # Copyright 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import ConfigParser
+import six.moves.configparser
 import io
 import collections
 import logging
@@ -40,7 +41,7 @@
     """
     run = utils.run if host is None else host.run
     existing_config = run('cat %s 2> /dev/null' % src_file).stdout
-    conf = ConfigParser.SafeConfigParser()
+    conf = six.moves.configparser.SafeConfigParser()
     conf.readfp(io.BytesIO(existing_config))
 
     for section, option, value in options:
diff --git a/client/common_lib/cros/bluetooth/OWNERS b/client/common_lib/cros/bluetooth/OWNERS
new file mode 100644
index 0000000..3c5c8a3
--- /dev/null
+++ b/client/common_lib/cros/bluetooth/OWNERS
@@ -0,0 +1 @@
+include /BLUETOOTH_OWNERS
diff --git a/client/common_lib/cros/bluetooth/bluetooth_gatt_server.py b/client/common_lib/cros/bluetooth/bluetooth_gatt_server.py
index 8203ab2..f584a60 100644
--- a/client/common_lib/cros/bluetooth/bluetooth_gatt_server.py
+++ b/client/common_lib/cros/bluetooth/bluetooth_gatt_server.py
@@ -1,3 +1,5 @@
+# Lint as: python2, python3
+
 import asyncore
 import btsocket
 import struct
diff --git a/client/common_lib/cros/bluetooth/bluetooth_quick_tests_base.py b/client/common_lib/cros/bluetooth/bluetooth_quick_tests_base.py
new file mode 100644
index 0000000..b852fca
--- /dev/null
+++ b/client/common_lib/cros/bluetooth/bluetooth_quick_tests_base.py
@@ -0,0 +1,449 @@
+# Lint as: python2, python3
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""
+This class provides base wrapper functions for Bluetooth quick test
+"""
+
+import functools
+import logging
+
+from autotest_lib.client.common_lib import error
+
+
+class BluetoothQuickTestsBase(object):
+    """Provides base helper functions for Bluetooth quick test batches/packages.
+
+    The Bluetooth quick test infrastructure provides a way to quickly run a set
+    of tests. As for today, auto-test ramp up time per test is about 90-120
+    seconds, where a typical Bluetooth test may take ~30-60 seconds to run.
+
+    The quick test infra, implemented in this class, saves this huge overhead
+    by running only the minimal reset and cleanup operations required between
+    each set of tests (takes a few seconds).
+
+    This class provides wrapper functions to start and end a test, a batch or a
+    package. A batch is defined as a set of tests, preferably with a common
+    subject. A package is a set of batches.
+    This class takes care of tests, batches, and packages test results, and
+    prints out summaries to results. The class also resets and cleans up
+    required states between tests, batches and packages.
+
+    A batch can also run as a separate auto-test. There is a place holder to
+    add a way to run a specific test of a batch autonomously.
+
+    A batch can be implemented by inheriting from this class, and using its
+    wrapper functions. A package can be implemented by inheriting from a set of
+    batches.
+
+    Adding a test to one of the batches is as easy as adding a method to the
+    class of the batch.
+    """
+
+    # Some delay is needed between tests. TODO(yshavit): investigate and remove
+    TEST_SLEEP_SECS = 3
+
+    def _print_delimiter(self):
+        logging.info('=======================================================')
+
+    def quick_test_init(self, flag='Quick Health'):
+        """Inits the quick test."""
+
+        self.flag = flag
+        self.test_iter = None
+
+        self.bat_tests_results = []
+        self.bat_pass_count = 0
+        self.bat_fail_count = 0
+        self.bat_testna_count = 0
+        self.bat_warn_count = 0
+        self.bat_name = None
+        self.bat_iter = None
+
+        self.pkg_tests_results = []
+        self.pkg_pass_count = 0
+        self.pkg_fail_count = 0
+        self.pkg_testna_count = 0
+        self.pkg_warn_count = 0
+        self.pkg_name = None
+        self.pkg_iter = None
+        self.pkg_is_running = False
+
+    def quick_test_get_model_name(self):
+        """This method should be implemented by children classes.
+
+        The ways to get the model names are different between server and client
+        sides. The derived class should provide the method to get the info.
+        """
+        raise NotImplementedError
+
+    def quick_test_get_chipset_name(self):
+        """This method should be implemented by children classes.
+
+        The ways to get the chipset names are different between server and
+        client sides. The derived class should provide the method to get the
+        info.
+        """
+        raise NotImplementedError
+
+    @staticmethod
+    def quick_test_test_decorator(test_name,
+                                  flags=None,
+                                  check_runnable_func=None,
+                                  pretest_func=None,
+                                  posttest_func=None,
+                                  model_testNA=None,
+                                  model_testWarn=None,
+                                  skip_models=None,
+                                  skip_chipsets=None,
+                                  skip_common_errors=False):
+        """A decorator providing a wrapper to a quick test.
+
+        Using the decorator a test method can implement only the core
+        test and let the decorator handle the quick test wrapper methods
+        (reset/cleanup/logging).
+
+        @param test_name: The name of the test to log.
+        @param flags: List of string to describe who should run the test. The
+                      string could be one of the following:
+                          ['AVL', 'Quick Health', 'All'].
+        @check_runnable_func: A function that accepts a bluetooth quick test
+                              instance as argument. If not None and returns
+                              False, the test exits early without failure.
+        @pretest_func: A function that accepts a bluetooth quick test instance
+                       as argument. If not None, the function is run right
+                       before the test method.
+        @posttest_func: A function that accepts a bluetooth quick test instance
+                        as argument. If not None, the function is run after the
+                        test summary is logged.
+                        Note that the exception raised from this function is NOT
+                        caught by the decorator.
+        @param model_testNA: If the current platform is in this list, failures
+                             are emitted as TestNAError.
+        @param model_testWarn: If the current platform is in this list, failures
+                               are emitted as TestWarn.
+        @param skip_models: Raises TestNA on these models and doesn't attempt to
+                            run the tests.
+        @param skip_chipsets: Raises TestNA on these chipset and doesn't attempt
+                              to run the tests.
+        @param skip_common_errors: If the test encounters a common error (such
+                                   as USB disconnect or daemon crash), mark the
+                                   test as TESTNA instead. USE THIS SPARINGLY,
+                                   it may mask bugs. This is available for tests
+                                   that require state to be properly retained
+                                   throughout the whole test (i.e. advertising)
+                                   and any outside failure will cause the test
+                                   to fail.
+        """
+
+        if flags is None:
+            flags = ['All']
+        if model_testNA is None:
+            model_testNA = []
+        if model_testWarn is None:
+            model_testWarn = []
+        if skip_models is None:
+            skip_models = []
+        if skip_chipsets is None:
+            skip_chipsets = []
+
+        def decorator(test_method):
+            """A decorator wrapper of the decorated test_method.
+
+            @param test_method: The test method being decorated.
+
+            @return: The wrapper of the test method.
+            """
+
+            @functools.wraps(test_method)
+            def wrapper(self):
+                """A wrapper of the decorated method."""
+
+                # Set test name before exiting so batches correctly identify
+                # failing tests
+                self.test_name = test_name
+
+                # Reset failure info before running any check, so
+                # quick_test_test_log_results() can judge the result correctly.
+                self.fails = []
+                self.had_known_common_failure = False
+
+                # Check that the test is runnable in current setting
+                if not (self.flag in flags or 'All' in flags):
+                    logging.info('SKIPPING TEST %s', test_name)
+                    logging.info('flag %s not in %s', self.flag, flags)
+                    self._print_delimiter()
+                    return
+
+                if check_runnable_func and not check_runnable_func(self):
+                    return
+
+                try:
+                    model = self.quick_test_get_model_name()
+                    if model in skip_models:
+                        logging.info('SKIPPING TEST %s', test_name)
+                        raise error.TestNAError(
+                                'Test not supported on this model')
+
+                    chipset = self.quick_test_get_chipset_name()
+                    logging.debug('Bluetooth module name is %s', chipset)
+                    if chipset in skip_chipsets:
+                        logging.info('SKIPPING TEST %s on chipset %s',
+                                     test_name, chipset)
+                        raise error.TestNAError(
+                                'Test not supported on this chipset')
+
+                    if pretest_func:
+                        pretest_func(self)
+
+                    self._print_delimiter()
+                    logging.info('Starting test: %s', test_name)
+
+                    test_method(self)
+                except error.TestError as e:
+                    fail_msg = '[--- error {} ({})]'.format(
+                            test_method.__name__, str(e))
+                    logging.error(fail_msg)
+                    self.fails.append(fail_msg)
+                except error.TestFail as e:
+                    fail_msg = '[--- failed {} ({})]'.format(
+                            test_method.__name__, str(e))
+                    logging.error(fail_msg)
+                    self.fails.append(fail_msg)
+                except error.TestNAError as e:
+                    fail_msg = '[--- SKIPPED {} ({})]'.format(
+                            test_method.__name__, str(e))
+                    logging.error(fail_msg)
+                    self.fails.append(fail_msg)
+                except Exception as e:
+                    fail_msg = '[--- unknown error {} ({})]'.format(
+                            test_method.__name__, str(e))
+                    logging.exception(fail_msg)
+                    self.fails.append(fail_msg)
+
+                self.quick_test_test_log_results(
+                        model_testNA=model_testNA,
+                        model_testWarn=model_testWarn,
+                        skip_common_errors=skip_common_errors)
+
+                if posttest_func:
+                    posttest_func(self)
+
+            return wrapper
+
+        return decorator
+
+    def quick_test_test_log_results(self,
+                                    model_testNA=None,
+                                    model_testWarn=None,
+                                    skip_common_errors=False):
+        """Logs and tracks the test results."""
+
+        if model_testNA is None:
+            model_testNA = []
+        if model_testWarn is None:
+            model_testWarn = []
+
+        result_msgs = []
+        model = self.quick_test_get_model_name()
+
+        if self.test_iter is not None:
+            result_msgs += ['Test Iter: ' + str(self.test_iter)]
+
+        if self.bat_iter is not None:
+            result_msgs += ['Batch Iter: ' + str(self.bat_iter)]
+
+        if self.pkg_is_running is True:
+            result_msgs += ['Package iter: ' + str(self.pkg_iter)]
+
+        if self.bat_name is not None:
+            result_msgs += ['Batch Name: ' + self.bat_name]
+
+        if self.test_name is not None:
+            result_msgs += ['Test Name: ' + self.test_name]
+
+        result_msg = ", ".join(result_msgs)
+
+        if not bool(self.fails):
+            result_msg = 'PASSED | ' + result_msg
+            self.bat_pass_count += 1
+            self.pkg_pass_count += 1
+        # The test should be marked as TESTNA if any of the test expressions
+        # were SKIPPED (they threw their own TESTNA error) or the model is in
+        # the list of NA models (so any failure is considered NA instead)
+        elif model in model_testNA or any(['SKIPPED' in x
+                                           for x in self.fails]):
+            result_msg = 'TESTNA | ' + result_msg
+            self.bat_testna_count += 1
+            self.pkg_testna_count += 1
+        elif model in model_testWarn:
+            result_msg = 'WARN   | ' + result_msg
+            self.bat_warn_count += 1
+            self.pkg_warn_count += 1
+        # Some tests may fail due to known common failure reasons (like usb
+        # disconnect during suspend, bluetoothd crashes, etc). Skip those tests
+        # with TESTNA when that happens.
+        #
+        # This should be used sparingly because it may hide legitimate errors.
+        elif bool(self.had_known_common_failure) and skip_common_errors:
+            result_msg = 'TESTNA | ' + result_msg
+            self.bat_testna_count += 1
+            self.pkg_testna_count += 1
+        else:
+            result_msg = 'FAIL   | ' + result_msg
+            self.bat_fail_count += 1
+            self.pkg_fail_count += 1
+
+        logging.info(result_msg)
+        self._print_delimiter()
+        self.bat_tests_results.append(result_msg)
+        self.pkg_tests_results.append(result_msg)
+
+    @staticmethod
+    def quick_test_batch_decorator(batch_name):
+        """A decorator providing a wrapper to a batch.
+
+        Using the decorator a test batch method can implement only its core
+        tests invocations and let the decorator handle the wrapper, which is
+        taking care for whether to run a specific test or the batch as a whole
+        and and running the batch in iterations
+
+        @param batch_name: The name of the batch to log.
+        """
+
+        def decorator(batch_method):
+            """A decorator wrapper of the decorated test_method.
+
+            @param test_method: The test method being decorated.
+            @return: The wrapper of the test method.
+            """
+
+            @functools.wraps(batch_method)
+            def wrapper(self, num_iterations=1, test_name=None):
+                """A wrapper of the decorated method.
+
+                @param num_iterations: How many iterations to run.
+                @param test_name: Specific test to run otherwise None to run the
+                                  whole batch.
+                """
+
+                if test_name is not None:
+                    single_test_method = getattr(self, test_name)
+                    for iter in range(1, num_iterations + 1):
+                        self.test_iter = iter
+                        single_test_method()
+
+                    if self.fails:
+                        # If failure is marked as TESTNA, prioritize that over
+                        # a failure. Same with WARN.
+                        if self.bat_testna_count > 0:
+                            raise error.TestNAError(self.fails)
+                        elif self.bat_warn_count > 0:
+                            raise error.TestWarn(self.fails)
+                        else:
+                            raise error.TestFail(self.fails)
+                else:
+                    for iter in range(1, num_iterations + 1):
+                        self.quick_test_batch_start(batch_name, iter)
+                        batch_method(self, num_iterations, test_name)
+                        self.quick_test_batch_end()
+
+            return wrapper
+
+        return decorator
+
+    def quick_test_batch_start(self, bat_name, iteration=1):
+        """Clears and sets test batch variables."""
+
+        self.bat_tests_results = []
+        self.bat_pass_count = 0
+        self.bat_fail_count = 0
+        self.bat_testna_count = 0
+        self.bat_warn_count = 0
+        self.bat_name = bat_name
+        self.bat_iter = iteration
+
+    def quick_test_batch_end(self):
+        """Prints results summary of a test batch."""
+
+        logging.info(
+                '%s Test Batch Summary: total pass %d, total fail %d, '
+                'warn %d, NA %d', self.bat_name, self.bat_pass_count,
+                self.bat_fail_count, self.bat_warn_count,
+                self.bat_testna_count)
+        for result in self.bat_tests_results:
+            logging.info(result)
+        self._print_delimiter()
+        if self.bat_fail_count > 0:
+            logging.error('===> Test Batch Failed! More than one failure')
+            self._print_delimiter()
+            if self.pkg_is_running is False:
+                raise error.TestFail(self.bat_tests_results)
+        elif self.bat_testna_count > 0:
+            logging.error('===> Test Batch Passed! Some TestNA results')
+            self._print_delimiter()
+            if self.pkg_is_running is False:
+                raise error.TestNAError(self.bat_tests_results)
+        elif self.bat_warn_count > 0:
+            logging.error('===> Test Batch Passed! Some WARN results')
+            self._print_delimiter()
+            if self.pkg_is_running is False:
+                raise error.TestWarn(self.bat_tests_results)
+        else:
+            logging.info('===> Test Batch Passed! zero failures')
+            self._print_delimiter()
+
+    def quick_test_package_start(self, pkg_name):
+        """Clears and sets test package variables."""
+
+        self.pkg_tests_results = []
+        self.pkg_pass_count = 0
+        self.pkg_fail_count = 0
+        self.pkg_name = pkg_name
+        self.pkg_is_running = True
+
+    def quick_test_print_summary(self):
+        """Prints results summary of a batch."""
+
+        logging.info(
+                '%s Test Package Summary: total pass %d, total fail %d, '
+                'Warn %d, NA %d', self.pkg_name, self.pkg_pass_count,
+                self.pkg_fail_count, self.pkg_warn_count,
+                self.pkg_testna_count)
+        for result in self.pkg_tests_results:
+            logging.info(result)
+        self._print_delimiter()
+
+    def quick_test_package_update_iteration(self, iteration):
+        """Updates state and prints log per package iteration.
+
+        Must be called to have a proper package test result tracking.
+        """
+
+        self.pkg_iter = iteration
+        if self.pkg_name is None:
+            logging.error('Error: no quick package is running')
+            raise error.TestFail('Error: no quick package is running')
+        logging.info('Starting %s Test Package iteration %d', self.pkg_name,
+                     iteration)
+
+    def quick_test_package_end(self):
+        """Prints final result of a test package."""
+
+        if self.pkg_fail_count > 0:
+            logging.error('===> Test Package Failed! More than one failure')
+            self._print_delimiter()
+            raise error.TestFail(self.bat_tests_results)
+        elif self.pkg_testna_count > 0:
+            logging.error('===> Test Package Passed! Some TestNA results')
+            self._print_delimiter()
+            raise error.TestNAError(self.bat_tests_results)
+        elif self.pkg_warn_count > 0:
+            logging.error('===> Test Package Passed! Some WARN results')
+            self._print_delimiter()
+            raise error.TestWarn(self.bat_tests_results)
+        else:
+            logging.info('===> Test Package Passed! zero failures')
+            self._print_delimiter()
+        self.pkg_is_running = False
diff --git a/client/common_lib/cros/bluetooth/bluetooth_sdp_socket.py b/client/common_lib/cros/bluetooth/bluetooth_sdp_socket.py
index 0de577f..4791278 100644
--- a/client/common_lib/cros/bluetooth/bluetooth_sdp_socket.py
+++ b/client/common_lib/cros/bluetooth/bluetooth_sdp_socket.py
@@ -1,12 +1,18 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import logging
 import socket
 import struct
 
 import btsocket
+from six.moves import range
 
 SDP_HDR_FORMAT        = '>BHH'
 SDP_HDR_SIZE          = struct.calcsize(SDP_HDR_FORMAT)
@@ -215,7 +221,7 @@
             header = struct.pack('>BI', SDP_SEQ32, size)
         else:
             raise BluetoothSDPSocketError('List is too long')
-        return header + data_element_list
+        return header + data_element_list.encode('utf-8')
 
 
     def _pack_uuids(self, uuids, preferred_size):
@@ -317,7 +323,7 @@
         handles = []
 
         while True:
-            request = pattern + cont_state
+            request = pattern + cont_state.encode('utf-8')
 
             # Request without any continuation state is an example of invalid
             # request syntax.
@@ -484,8 +490,8 @@
         complete_response = ''
 
         while True:
-            request = (invalid_request if invalid_request
-                       else pattern + cont_state)
+            request = (invalid_request if invalid_request else pattern +
+                       cont_state.encode('utf-8'))
 
             code, response = self.send_request_and_wait(
                     SDP_SVC_ATTR_REQ, request, forced_pdu_size)
@@ -552,7 +558,7 @@
         complete_response = ''
 
         while True:
-            request = pattern + cont_state
+            request = pattern + cont_state.encode('utf-8')
             if invalid_request:
                 request = invalid_request + request
 
diff --git a/client/common_lib/cros/bluetooth/bluetooth_socket.py b/client/common_lib/cros/bluetooth/bluetooth_socket.py
index 7e02d68..33f3d8a 100644
--- a/client/common_lib/cros/bluetooth/bluetooth_socket.py
+++ b/client/common_lib/cros/bluetooth/bluetooth_socket.py
@@ -1,13 +1,18 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
 import array
 import btsocket
 import fcntl
 import logging
 import socket
 import struct
+import six
 
 
 # Constants from lib/mgmt.h in BlueZ source
@@ -204,13 +209,14 @@
     while pos < len(eirdata):
         # Byte at the current position is the field length, which should be
         # zero at the end of the structure.
-        (field_len,) = struct.unpack('B', buffer(eirdata, pos, 1))
+        (field_len, ) = struct.unpack('B', memoryview(eirdata)[pos:pos + 1])
         if field_len == 0:
             break
         # Next byte is the field type, and the rest of the field is the data.
         # Note that the length field doesn't include itself so that's why the
         # offsets and lengths look a little odd.
-        (field_type,) = struct.unpack('B', buffer(eirdata, pos + 1, 1))
+        (field_type, ) = struct.unpack('B',
+                                       memoryview(eirdata)[pos + 1:pos + 2])
         data = eirdata[pos+2:pos+field_len+1]
         pos += field_len + 1
         # Parse the individual fields to make the data meaningful.
@@ -258,7 +264,7 @@
         self.events = []
 
 
-    def send_command(self, code, index, data=''):
+    def send_command(self, code, index, data=b''):
         """Send a command to the socket.
 
         To send a command, wait for the reply event, and parse it use
@@ -303,14 +309,17 @@
             raise BluetoothInvalidPacketError('Packet shorter than header')
 
         # Parse the header
-        (event, index, length) = struct.unpack_from('<HHH', buffer(hdr))
+        (event, index, length) = struct.unpack_from('<HHH', memoryview(hdr))
         if nbytes < MGMT_HDR_SIZE + length:
             raise BluetoothInvalidPacketError('Packet shorter than length')
 
         return (event, index, data[:length])
 
 
-    def send_command_and_wait(self, cmd_code, cmd_index, cmd_data='',
+    def send_command_and_wait(self,
+                              cmd_code,
+                              cmd_index,
+                              cmd_data=b'',
                               expected_length=None):
         """Send a command to the socket and wait for the reply.
 
@@ -343,7 +352,8 @@
                             ('Incorrect command complete event data length: ' +
                              '%d (expected at least 3)' % len(data)))
 
-                (code, status) = struct.unpack_from('<HB', buffer(data, 0, 3))
+                (code, status) = struct.unpack_from('<HB',
+                                                    memoryview(data)[0:3])
                 logging.debug('[0x%04x] command 0x%04x complete: 0x%02x',
                               index, code, status)
 
@@ -372,7 +382,8 @@
                             ('Incorrect command status event data length: ' +
                              '%d (expected 3)' % len(data)))
 
-                (code, status) = struct.unpack_from('<HB', buffer(data, 0, 3))
+                (code, status) = struct.unpack_from('<HB',
+                                                    memoryview(data)[0:3])
                 logging.debug('[0x%04x] command 0x%02x status: 0x%02x',
                               index, code, status)
 
@@ -389,7 +400,7 @@
                         ('Incorrect controller error event data length: ' +
                          '%d (expected 1)' % len(data)))
 
-                (error_code) = struct.unpack_from('<B', buffer(data, 0, 1))
+                (error_code) = struct.unpack_from('<B', memoryview(data)[0:1])
 
                 raise BluetoothControllerError('Controller error: %d' %
                                                error_code)
@@ -428,7 +439,8 @@
                             ('Incorrect command complete event data length: ' +
                              '%d (expected at least 3)' % len(data)))
 
-                (code, status) = struct.unpack_from('<HB', buffer(data, 0, 3))
+                (code, status) = struct.unpack_from('<HB',
+                                                    memoryview(data)[0:3])
                 logging.debug('[0x%04x] command 0x%04x complete: 0x%02x '
                               '(Ignored)', index, code, status)
 
@@ -438,7 +450,8 @@
                             ('Incorrect command status event data length: ' +
                              '%d (expected 3)' % len(data)))
 
-                (code, status) = struct.unpack_from('<HB', buffer(data, 0, 3))
+                (code, status) = struct.unpack_from('<HB',
+                                                    memoryview(data)[0:3])
                 logging.debug('[0x%04x] command 0x%02x status: 0x%02x '
                               '(Ignored)', index, code, status)
 
@@ -448,7 +461,7 @@
                         ('Incorrect controller error event data length: ' +
                          '%d (expected 1)' % len(data)))
 
-                (error_code) = struct.unpack_from('<B', buffer(data, 0, 1))
+                (error_code) = struct.unpack_from('<B', memoryview(data)[0:1])
                 logging.debug('[0x%04x] controller error: %d (Ignored)',
                               index, error_code)
 
@@ -491,7 +504,7 @@
         if status != MGMT_STATUS_SUCCESS:
             return None
 
-        (version, revision) = struct.unpack_from('<BH', buffer(data))
+        (version, revision) = struct.unpack_from('<BH', memoryview(data))
         return (version, revision)
 
 
@@ -511,7 +524,7 @@
                     ('Incorrect length of data for response: ' +
                      '%d (expected at least 4)' % len(data)))
 
-        (ncommands, nevents) = struct.unpack_from('<HH', buffer(data, 0, 4))
+        (ncommands, nevents) = struct.unpack_from('<HH', memoryview(data)[0:4])
         offset = 4
         expected_length = offset + (ncommands * 2) + (nevents * 2)
         if len(data) != expected_length:
@@ -521,12 +534,16 @@
 
         commands = []
         while len(commands) < ncommands:
-            commands.extend(struct.unpack_from('<H', buffer(data, offset, 2)))
+            commands.extend(
+                    struct.unpack_from('<H',
+                                       memoryview(data)[offset:offset + 2]))
             offset += 2
 
         events = []
         while len(events) < nevents:
-            events.extend(struct.unpack_from('<H', buffer(data, offset, 2)))
+            events.extend(
+                    struct.unpack_from('<H',
+                                       memoryview(data)[offset:offset + 2]))
             offset += 2
 
         return (commands, events)
@@ -548,7 +565,7 @@
                     ('Incorrect length of data for response: ' +
                      '%d (expected at least 2)' % len(data)))
 
-        (nindexes,) = struct.unpack_from('<H', buffer(data, 0, 2))
+        (nindexes, ) = struct.unpack_from('<H', memoryview(data)[0:2])
         offset = 2
         expected_length = offset + (nindexes * 2)
         if len(data) != expected_length:
@@ -558,7 +575,9 @@
 
         indexes = []
         while len(indexes) < nindexes:
-            indexes.extend(struct.unpack_from('<H', buffer(data, offset, 2)))
+            indexes.extend(
+                    struct.unpack_from('<H',
+                                       memoryview(data)[offset:offset + 2]))
             offset += 2
 
         return indexes
@@ -585,24 +604,19 @@
         if status != MGMT_STATUS_SUCCESS:
             return None
 
-        (address, bluetooth_version, manufacturer,
-         supported_settings, current_settings,
-         class_of_device_lo, class_of_device_mid, class_of_device_hi,
-         name, short_name) = struct.unpack_from(
-                '<6sBHLL3B249s11s',
-                buffer(data))
+        (address, bluetooth_version, manufacturer, supported_settings,
+         current_settings, class_of_device_lo, class_of_device_mid,
+         class_of_device_hi, name,
+         short_name) = struct.unpack_from('<6sBHLL3B249s11s', memoryview(data))
 
-        return (
-                ':'.join('%02X' % x
+        return (':'.join('%02X' % x
                          for x in reversed(struct.unpack('6B', address))),
-                bluetooth_version,
-                manufacturer,
-                supported_settings,
+                bluetooth_version, manufacturer, supported_settings,
                 current_settings,
-                (class_of_device_lo |(class_of_device_mid << 8) |
-                        (class_of_device_hi << 16)),
-                name.rstrip('\0'),
-                short_name.rstrip('\0'))
+                (class_of_device_lo |
+                 (class_of_device_mid << 8) | (class_of_device_hi << 16)),
+                six.ensure_text(name).rstrip('\0'),
+                six.ensure_text(short_name).rstrip('\0'))
 
 
     def set_powered(self, index, powered):
@@ -623,7 +637,7 @@
         if status != MGMT_STATUS_SUCCESS:
             return None
 
-        (current_settings, ) = struct.unpack_from('<L', buffer(data))
+        (current_settings, ) = struct.unpack_from('<L', memoryview(data))
         return current_settings
 
 
@@ -651,7 +665,7 @@
         elif status != MGMT_STATUS_SUCCESS:
             return None
 
-        (current_settings, ) = struct.unpack_from('<L', buffer(data))
+        (current_settings, ) = struct.unpack_from('<L', memoryview(data))
         return current_settings
 
 
@@ -676,7 +690,7 @@
         elif status != MGMT_STATUS_SUCCESS:
             return None
 
-        (current_settings, ) = struct.unpack_from('<L', buffer(data))
+        (current_settings, ) = struct.unpack_from('<L', memoryview(data))
         return current_settings
 
 
@@ -716,7 +730,7 @@
                     ('Incorrect length of data for response: ' +
                      '%d (expected 4)' % len(data)))
 
-        (current_settings, ) = struct.unpack_from('<L', buffer(data))
+        (current_settings, ) = struct.unpack_from('<L', memoryview(data))
         return current_settings
 
 
@@ -739,7 +753,7 @@
         if status != MGMT_STATUS_SUCCESS:
             return None
 
-        (current_settings, ) = struct.unpack_from('<L', buffer(data))
+        (current_settings, ) = struct.unpack_from('<L', memoryview(data))
         return current_settings
 
 
@@ -767,7 +781,7 @@
         elif status != MGMT_STATUS_SUCCESS:
             return None
 
-        (current_settings, ) = struct.unpack_from('<L', buffer(data))
+        (current_settings, ) = struct.unpack_from('<L', memoryview(data))
         return current_settings
 
 
@@ -792,7 +806,7 @@
         elif status != MGMT_STATUS_SUCCESS:
             return None
 
-        (current_settings, ) = struct.unpack_from('<L', buffer(data))
+        (current_settings, ) = struct.unpack_from('<L', memoryview(data))
         return current_settings
 
 
@@ -817,7 +831,7 @@
         elif status != MGMT_STATUS_SUCCESS:
             return None
 
-        (current_settings, ) = struct.unpack_from('<L', buffer(data))
+        (current_settings, ) = struct.unpack_from('<L', memoryview(data))
         return current_settings
 
 
@@ -842,7 +856,7 @@
         elif status != MGMT_STATUS_SUCCESS:
             return None
 
-        (current_settings, ) = struct.unpack_from('<L', buffer(data))
+        (current_settings, ) = struct.unpack_from('<L', memoryview(data))
         return current_settings
 
 
@@ -874,7 +888,7 @@
             return None
 
         (class_of_device_lo, class_of_device_mid,
-         class_of_device_hi) = struct.unpack_from('<3B', buffer(data))
+         class_of_device_hi) = struct.unpack_from('<3B', memoryview(data))
         return (class_of_device_lo |(class_of_device_mid << 8) |
                 (class_of_device_hi << 16))
 
@@ -891,7 +905,8 @@
         """
         # Truncate the provided parameters and then zero-pad using struct
         # so we pass a fixed-length null-terminated string to the kernel.
-        msg_data = struct.pack('<249s11s', name[:248], short_name[:10])
+        msg_data = struct.pack('<249s11s', six.ensure_binary(name[:248]),
+                               six.ensure_binary(short_name[:10]))
         (status, data) = self.send_command_and_wait(
                 MGMT_OP_SET_LOCAL_NAME,
                 index,
@@ -900,8 +915,9 @@
         if status != MGMT_STATUS_SUCCESS:
             return None
 
-        (name, short_name) = struct.unpack_from('<249s11s', buffer(data))
-        return (name.rstrip('\0'), short_name.rstrip('\0'))
+        (name, short_name) = struct.unpack_from('<249s11s', memoryview(data))
+        return (six.ensure_text(name).rstrip('\0'),
+                six.ensure_text(short_name).rstrip('\0'))
 
 
     def start_discovery(self, index, address_type):
@@ -925,7 +941,7 @@
         if status != MGMT_STATUS_SUCCESS:
             return None
 
-        (address_type,) = struct.unpack_from('<B', buffer(data))
+        (address_type, ) = struct.unpack_from('<B', memoryview(data))
         return address_type
 
 
@@ -952,7 +968,7 @@
         if status != MGMT_STATUS_SUCCESS:
             return None
 
-        (address_type,) = struct.unpack_from('<B', buffer(data))
+        (address_type, ) = struct.unpack_from('<B', memoryview(data))
         return address_type
 
 
@@ -987,7 +1003,7 @@
                              '%d (expected 2)' % len(data)))
 
                 (address_type,
-                 discovering) = struct.unpack_from('<BB', buffer(data))
+                 discovering) = struct.unpack_from('<BB', memoryview(data))
 
             elif event == MGMT_EV_DEVICE_FOUND:
                 if len(data) < 14:
@@ -995,9 +1011,9 @@
                             ('Incorrect device found event data length: ' +
                              '%d (expected at least 14)' % len(data)))
 
-                (address, address_type, rssi,
-                 flags, eir_len) = struct.unpack_from('<6sBbLH',
-                                                      buffer(data, 0, 14))
+                (address, address_type, rssi, flags,
+                 eir_len) = struct.unpack_from('<6sBbLH',
+                                               memoryview(data)[0:14])
 
                 if len(data) != 14 + eir_len:
                     raise BluetoothInvalidPacketError(
@@ -1057,7 +1073,7 @@
         elif status != MGMT_STATUS_SUCCESS:
             return None
 
-        (current_settings, ) = struct.unpack_from('<L', buffer(data))
+        (current_settings, ) = struct.unpack_from('<L', memoryview(data))
         return current_settings
 
 
@@ -1082,7 +1098,7 @@
         elif status != MGMT_STATUS_SUCCESS:
             return None
 
-        (current_settings, ) = struct.unpack_from('<L', buffer(data))
+        (current_settings, ) = struct.unpack_from('<L', memoryview(data))
         return current_settings
 
 
@@ -1107,7 +1123,7 @@
         elif status != MGMT_STATUS_SUCCESS:
             return None
 
-        (current_settings, ) = struct.unpack_from('<L', buffer(data))
+        (current_settings, ) = struct.unpack_from('<L', memoryview(data))
         return current_settings
 
 
@@ -1123,7 +1139,7 @@
                 None on failure.
 
         """
-        msg_data = struct.pack('<6sBB', address, address_type, action)
+        msg_data = struct.pack('<6sBB', address.encode(), address_type, action)
         (status, data) = self.send_command_and_wait(
                 MGMT_OP_ADD_DEVICE,
                 index,
@@ -1132,8 +1148,11 @@
         if status != MGMT_STATUS_SUCCESS:
             return None
 
-        (address, address_type,) = struct.unpack_from('<6sB', buffer(data))
-        return (address, address_type)
+        (
+                address,
+                address_type,
+        ) = struct.unpack_from('<6sB', memoryview(data))
+        return (address.decode(), address_type)
 
 
     def remove_device(self, index, address, address_type):
@@ -1147,7 +1166,7 @@
                 None on failure.
 
         """
-        msg_data = struct.pack('<6sB', address, address_type)
+        msg_data = struct.pack('<6sB', address.encode(), address_type)
         (status, data) = self.send_command_and_wait(
                 MGMT_OP_REMOVE_DEVICE,
                 index,
@@ -1156,8 +1175,11 @@
         if status != MGMT_STATUS_SUCCESS:
             return None
 
-        (address, address_type,) = struct.unpack_from('<6sB', buffer(data))
-        return (address, address_type)
+        (
+                address_b,
+                address_type,
+        ) = struct.unpack_from('<6sB', memoryview(data))
+        return (address_b.decode(), address_type)
 
 
 class BluetoothRawSocket(BluetoothSocket):
@@ -1190,35 +1212,15 @@
         buf = array.array('B', [0] * 96)
         fcntl.ioctl(self.fileno(), HCIGETDEVINFO, buf, 1)
 
-        ( dev_id, name, address, flags, dev_type, features, pkt_type,
-          link_policy, link_mode, acl_mtu, acl_pkts, sco_mtu, sco_pkts,
-          err_rx, err_tx, cmd_tx, evt_rx, acl_tx, acl_rx, sco_tx, sco_rx,
-          byte_rx, byte_tx ) = struct.unpack_from(
-                '@H8s6sIBQIIIHHHHIIIIIIIIII', buf)
+        (dev_id, name, address, flags, dev_type, features, pkt_type,
+         link_policy, link_mode, acl_mtu, acl_pkts, sco_mtu, sco_pkts, err_rx,
+         err_tx, cmd_tx, evt_rx, acl_tx, acl_rx, sco_tx, sco_rx, byte_rx,
+         byte_tx) = struct.unpack_from('@H8s6sIBQIIIHHHHIIIIIIIIII',
+                                       memoryview(buf))
 
-        return (
-                dev_id,
-                name.rstrip('\0'),
-                ':'.join('%02X' % x
-                         for x in reversed(struct.unpack('6B', address))),
-                flags,
-                (dev_type & 0x30) >> 4,
-                dev_type & 0x0f,
-                features,
-                pkt_type,
-                link_policy,
-                link_mode,
-                acl_mtu,
-                acl_pkts,
-                sco_mtu,
-                sco_pkts,
-                err_rx,
-                err_tx,
-                cmd_tx,
-                evt_rx,
-                acl_tx,
-                acl_rx,
-                sco_tx,
-                sco_rx,
-                byte_rx,
-                byte_tx)
+        return (dev_id, name.decode('utf-8').rstrip('\0'), ':'.join(
+                '%02X' % x for x in reversed(struct.unpack('6B', address))),
+                flags, (dev_type & 0x30) >> 4, dev_type & 0x0f, features,
+                pkt_type, link_policy, link_mode, acl_mtu, acl_pkts, sco_mtu,
+                sco_pkts, err_rx, err_tx, cmd_tx, evt_rx, acl_tx, acl_rx,
+                sco_tx, sco_rx, byte_rx, byte_tx)
diff --git a/client/common_lib/cros/bluetooth/chipinfo.py b/client/common_lib/cros/bluetooth/chipinfo.py
new file mode 100644
index 0000000..b9e1119
--- /dev/null
+++ b/client/common_lib/cros/bluetooth/chipinfo.py
@@ -0,0 +1,51 @@
+# Lint as: python3
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import error
+from typing import NamedTuple
+
+
+class ChipInfo(NamedTuple):
+    """Checks vendor support for the specific chipsets."""
+    aosp_support: bool
+    msft_support: bool
+    msft_ocf: int
+
+
+_chip_info = {
+        'MVL-8897': ChipInfo(False, False, 0),
+        'MVL-8997': ChipInfo(False, False, 0),
+        'QCA-6174A-5-USB': ChipInfo(False, False, 0),
+        'QCA-6174A-3-UART': ChipInfo(False, False, 0),
+        'QCA-WCN6856': ChipInfo(True, True, 0x0170),
+        'WCN3991': ChipInfo(True, True, 0x0170),
+        'WCN6750': ChipInfo(True, True, 0x0170),
+        'Intel-AX200': ChipInfo(False, True, 0x001e),
+        'Intel-AX201': ChipInfo(False, True, 0x001e),
+        'Intel-AC9260': ChipInfo(False, True, 0x001e),
+        'Intel-AC9560': ChipInfo(False, True, 0x001e),
+        'Intel-AC7260': ChipInfo(False, False, 0),
+        'Intel-AC7265': ChipInfo(False, False, 0),
+        'Realtek-RTL8822C-USB': ChipInfo(True, False, 0),
+        'Realtek-RTL8822C-UART': ChipInfo(True, False, 0),
+        'Realtek-RTL8852A-USB': ChipInfo(True, False, 0),
+        'Mediatek-MTK7921-USB': ChipInfo(True, True, 0x0130),
+        'Mediatek-MTK7921-SDIO': ChipInfo(True, True, 0x0130)
+}
+
+
+def query(chip_name):
+    """Returns chip info for the specific chipset name.
+
+    @param chip_name: chipset name.
+
+    @return: named tuple ChipInfo(aosp_support, msft_support, msft_ocf).
+    """
+
+    chip_info = _chip_info.get(chip_name)
+    if chip_info is None:
+        raise error.TestError('Chipset name %r does not exist, please update '
+                              'the list of chipsets' % chip_name)
+    return chip_info
\ No newline at end of file
diff --git a/client/common_lib/cros/cfm/cras_node_collector_unittest.py b/client/common_lib/cros/cfm/cras_node_collector_unittest.py
index 91c88c2..69ad38c 100644
--- a/client/common_lib/cros/cfm/cras_node_collector_unittest.py
+++ b/client/common_lib/cros/cfm/cras_node_collector_unittest.py
@@ -1,5 +1,5 @@
 import unittest
-import mock
+from unittest import mock
 
 from autotest_lib.client.common_lib.cros.cfm import cras_node_collector
 
diff --git a/client/common_lib/cros/cfm/metrics/media_metrics_collector_test.py b/client/common_lib/cros/cfm/metrics/media_metrics_collector_test.py
index a11c1a3..fc2b673 100644
--- a/client/common_lib/cros/cfm/metrics/media_metrics_collector_test.py
+++ b/client/common_lib/cros/cfm/metrics/media_metrics_collector_test.py
@@ -1,8 +1,8 @@
 from autotest_lib.client.common_lib.cros.cfm.metrics import (
         media_metrics_collector)
 
-import mock
 import unittest
+from unittest import mock
 
 # pylint: disable=missing-docstring
 class MediaMetricsCollectorTest(unittest.TestCase):
@@ -30,9 +30,3 @@
       data_point_collector.collect_snapshot()
       data_point_collector.collect_snapshot()
       self.assertEqual(3, len(data_point_collector.get_data_points()))
-
-
-
-
-
-
diff --git a/client/common_lib/cros/cfm/usb/cfm_usb_devices.py b/client/common_lib/cros/cfm/usb/cfm_usb_devices.py
index 6d1d268..67059a9 100644
--- a/client/common_lib/cros/cfm/usb/cfm_usb_devices.py
+++ b/client/common_lib/cros/cfm/usb/cfm_usb_devices.py
@@ -109,11 +109,11 @@
     interfaces=['udl'],
 )
 
-# The MiMO's firmware is tied to the Chrome OS version. The firmware was updated
-# in Chrome OS 65.0.3319.0. This resulted in the PID being changed from 016b to
+# The MiMO's firmware is tied to the ChromeOS version. The firmware was updated
+# in ChromeOS 65.0.3319.0. This resulted in the PID being changed from 016b to
 # 416d. The following device is the device with the new PID. We need to support
 # both versions since we want to support tests at the ToT revision running
-# against older Chrome OS versions.
+# against older ChromeOS versions.
 MIMO_VUE_HD_DISPLAY_PLANKTON = usb_device_spec.UsbDeviceSpec(
     vid='17e9',
     pid='416d',
diff --git a/client/common_lib/cros/cfm/usb/usb_device_collector.py b/client/common_lib/cros/cfm/usb/usb_device_collector.py
index 1055983..78f3c7a 100644
--- a/client/common_lib/cros/cfm/usb/usb_device_collector.py
+++ b/client/common_lib/cros/cfm/usb/usb_device_collector.py
@@ -1,4 +1,4 @@
-import cStringIO
+import six
 
 from autotest_lib.client.bin import utils
 from autotest_lib.client.common_lib.cros import textfsm
@@ -43,7 +43,7 @@
         self._host = host
 
     def _extract_usb_data(self, rawdata):
-      """
+        """
       Populate usb data into a list of dictionaries.
       @param rawdata The output of "usb-devices" on CfM.
       @returns list of dictionary, example dictionary:
@@ -64,12 +64,12 @@
       'ProdID': '0110',
       'tprnt': '14'}
       """
-      usbdata = []
-      rawdata += '\n'
-      re_table = textfsm.TextFSM(cStringIO.StringIO(self.USB_DEVICES_TEMPLATE))
-      fsm_results = re_table.ParseText(rawdata)
-      usbdata = [dict(zip(re_table.header, row)) for row in fsm_results]
-      return usbdata
+        usbdata = []
+        rawdata += '\n'
+        re_table = textfsm.TextFSM(six.StringIO(self.USB_DEVICES_TEMPLATE))
+        fsm_results = re_table.ParseText(rawdata)
+        usbdata = [dict(zip(re_table.header, row)) for row in fsm_results]
+        return usbdata
 
     def _collect_usb_device_data(self):
         """Collecting usb device data."""
diff --git a/client/common_lib/cros/cfm/usb/usb_device_collector_unittest.py b/client/common_lib/cros/cfm/usb/usb_device_collector_unittest.py
index 22c5e50..5f47001 100644
--- a/client/common_lib/cros/cfm/usb/usb_device_collector_unittest.py
+++ b/client/common_lib/cros/cfm/usb/usb_device_collector_unittest.py
@@ -1,5 +1,5 @@
-import mock
 import unittest
+from unittest import mock
 
 from autotest_lib.client.common_lib.cros.cfm.usb import usb_device_collector
 from autotest_lib.client.common_lib.cros.cfm.usb import usb_device_spec
diff --git a/client/common_lib/cros/cfm/usb/usb_port_manager.py b/client/common_lib/cros/cfm/usb/usb_port_manager.py
index 2d48c34..4b443ae 100644
--- a/client/common_lib/cros/cfm/usb/usb_port_manager.py
+++ b/client/common_lib/cros/cfm/usb/usb_port_manager.py
@@ -24,6 +24,18 @@
         PortId(bus=1, port_number=6): 47,  # Back lower USB 2
         PortId(bus=2, port_number=4): 47,  # Back lower USB 3
     },
+    'guado-cfm': {
+        # Front ports
+        PortId(bus=1, port_number=2): 56,  # Front left USB 2
+        PortId(bus=2, port_number=1): 56,  # Front left USB 3
+        PortId(bus=1, port_number=3): 57,  # Front right USB 2
+        PortId(bus=2, port_number=2): 57,  # Front right USB 3
+        # Back ports (same GPIO is used for both ports)
+        PortId(bus=1, port_number=5): 47,  # Back upper USB 2
+        PortId(bus=2, port_number=3): 47,  # Back upper USB 3
+        PortId(bus=1, port_number=6): 47,  # Back lower USB 2
+        PortId(bus=2, port_number=4): 47,  # Back lower USB 3
+    },
     # On Fizz, there are in total 5 usb ports and per port usb power
     # is controlled by EC with user space command:
     # ectool gpioset USBx_ENABLE 0/1 (x from 1 to 5).
@@ -40,6 +52,20 @@
         PortId(bus=2, port_number=5): 1,    # Back left USB 3
         PortId(bus=2, port_number=6): 2,    # Back middle USB 3
         PortId(bus=2, port_number=2): 3,    # Back right USB 3
+    },
+    'fizz-cfm': {
+        # USB 2 bus.
+        PortId(bus=1, port_number=3): 4,    # Front right USB 2
+        PortId(bus=1, port_number=4): 5,    # Front left USB 2
+        PortId(bus=1, port_number=5): 1,    # Back left USB 2
+        PortId(bus=1, port_number=6): 2,    # Back middle USB 2
+        PortId(bus=1, port_number=2): 3,    # Back right USB 2
+        # USB 3 bus.
+        PortId(bus=2, port_number=3): 4,    # Front right USB 3
+        PortId(bus=2, port_number=4): 5,    # Front left USB 3
+        PortId(bus=2, port_number=5): 1,    # Back left USB 3
+        PortId(bus=2, port_number=6): 2,    # Back middle USB 3
+        PortId(bus=2, port_number=2): 3,    # Back right USB 3
     }
 }
 
@@ -157,9 +183,9 @@
         @param gpio_idx The index of the gpio to set the power for.
         @param power_on If True, powers on the GPIO. If False, powers it off.
         """
-        if board == 'guado':
+        if board == 'guado' or board == 'guado-cfm':
             self._set_gpio_power_guado(gpio_index, power_on)
-        elif board == 'fizz':
+        elif board == 'fizz' or board == 'fizz-cfm':
             self._set_gpio_power_fizz(gpio_index, power_on)
         else:
             raise ValueError('Unsupported board type {}.'.format(board))
diff --git a/client/common_lib/cros/cfm/usb/usb_port_manager_unittest.py b/client/common_lib/cros/cfm/usb/usb_port_manager_unittest.py
index c57d5d6..e2568a3 100644
--- a/client/common_lib/cros/cfm/usb/usb_port_manager_unittest.py
+++ b/client/common_lib/cros/cfm/usb/usb_port_manager_unittest.py
@@ -1,6 +1,6 @@
-import mock
+import os
 import unittest
-import os.path
+from unittest import mock
 
 from autotest_lib.client.common_lib.cros.cfm.usb import usb_port_manager
 
diff --git a/client/common_lib/cros/cfm_meetings_api.py b/client/common_lib/cros/cfm_meetings_api.py
index f546036..c00915a 100644
--- a/client/common_lib/cros/cfm_meetings_api.py
+++ b/client/common_lib/cros/cfm_meetings_api.py
@@ -1,10 +1,11 @@
+# Lint as: python2, python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 import logging
 
-from urlparse import urlparse
+from six.moves.urllib.parse import urlparse
 
 from autotest_lib.client.bin import utils
 from autotest_lib.client.common_lib import error
diff --git a/client/common_lib/cros/chrome.py b/client/common_lib/cros/chrome.py
index a9e48c5..1d6f718 100644
--- a/client/common_lib/cros/chrome.py
+++ b/client/common_lib/cros/chrome.py
@@ -1,7 +1,12 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import logging
 import os
 import re
@@ -11,6 +16,7 @@
 from autotest_lib.client.common_lib.cros import assistant_util
 from autotest_lib.client.cros import constants
 from autotest_lib.client.bin import utils
+from six.moves import range
 from telemetry.core import cros_interface, exceptions
 from telemetry.internal.browser import browser_finder, browser_options
 from telemetry.internal.browser import extension_to_load
@@ -54,13 +60,24 @@
     BROWSER_TYPE_GUEST = 'system-guest'
     AUTOTEST_EXT_ID = 'behllobkkfkfnphdnhnkndlbkcpglgmj'
 
-    def __init__(self, logged_in=True, extension_paths=None, autotest_ext=False,
-                 num_tries=3, extra_browser_args=None,
-                 clear_enterprise_policy=True, expect_policy_fetch=False,
-                 dont_override_profile=False, disable_gaia_services=True,
-                 disable_default_apps=True, auto_login=True, gaia_login=False,
-                 username=None, password=None, gaia_id=None,
-                 arc_mode=None, arc_timeout=None,
+    def __init__(self,
+                 logged_in=True,
+                 extension_paths=None,
+                 autotest_ext=False,
+                 num_tries=3,
+                 extra_browser_args=None,
+                 clear_enterprise_policy=True,
+                 expect_policy_fetch=False,
+                 dont_override_profile=False,
+                 disable_gaia_services=True,
+                 disable_default_apps=True,
+                 auto_login=True,
+                 gaia_login=False,
+                 username=None,
+                 password=None,
+                 gaia_id=None,
+                 arc_mode=None,
+                 arc_timeout=None,
                  enable_web_app_auto_install=False,
                  disable_arc_opt_in=True,
                  disable_arc_opt_in_verification=True,
@@ -74,7 +91,8 @@
                  init_network_controller=False,
                  mute_audio=False,
                  proxy_server=None,
-                 login_delay=0):
+                 login_delay=0,
+                 enable_features=None):
         """
         Constructor of telemetry wrapper.
 
@@ -146,6 +164,7 @@
             default proxy-server is disabled
         @param login_delay: Time for idle in login screen to simulate the time
                             required for password typing.
+        @param enable_features: Comma separated list of features to enable.
         """
         self._autotest_ext_path = None
 
@@ -187,7 +206,7 @@
                                                    'autotest_private_ext')
             extension_paths.append(self._autotest_ext_path)
             finder_options.browser_options.AppendExtraBrowserArgs(
-                ['--whitelisted-extension-id=%s' % self.AUTOTEST_EXT_ID])
+                    ['--allowlisted-extension-id=%s' % self.AUTOTEST_EXT_ID])
 
         self._browser_type = (self.BROWSER_TYPE_LOGIN
                               if logged_in else self.BROWSER_TYPE_GUEST)
@@ -197,10 +216,18 @@
             finder_options.browser_options.AppendExtraBrowserArgs(
                     ['--disable-features=DefaultWebAppInstallation'])
 
+        if not auto_login:
+            finder_options.browser_options.AppendExtraBrowserArgs(
+                    ['--enable-oobe-test-api'])
+
         if extra_browser_args:
             finder_options.browser_options.AppendExtraBrowserArgs(
                 extra_browser_args)
 
+        if enable_features:
+            finder_options.browser_options.AppendExtraBrowserArgs(
+                    ['--enable-features=%s' % enable_features])
+
         # finder options must be set before parse_args(), browser options must
         # be set before Create().
         # TODO(crbug.com/360890) Below MUST be '2' so that it doesn't inhibit
diff --git a/client/common_lib/cros/chromedriver.py b/client/common_lib/cros/chromedriver.py
index 1d39fba..cb1bb4a 100644
--- a/client/common_lib/cros/chromedriver.py
+++ b/client/common_lib/cros/chromedriver.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -5,8 +6,8 @@
 import atexit
 import logging
 import os
-import urllib2
-import urlparse
+from six.moves import urllib
+import six.moves.urllib.parse
 
 try:
     from selenium import webdriver
@@ -27,11 +28,21 @@
 class chromedriver(object):
     """Wrapper class, a context manager type, for tests to use Chrome Driver."""
 
-    def __init__(self, extra_chrome_flags=[], subtract_extra_chrome_flags=[],
-                 extension_paths=[], username=None, password=None,
-                 server_port=None, skip_cleanup=False, url_base=None,
-                 extra_chromedriver_args=None, gaia_login=False,
-                 disable_default_apps=True, dont_override_profile=False, *args,
+    def __init__(self,
+                 extra_chrome_flags=[],
+                 subtract_extra_chrome_flags=[],
+                 extension_paths=[],
+                 username=None,
+                 password=None,
+                 server_port=None,
+                 skip_cleanup=False,
+                 url_base=None,
+                 extra_chromedriver_args=None,
+                 gaia_login=False,
+                 disable_default_apps=True,
+                 dont_override_profile=False,
+                 chromeOptions={},
+                 *args,
                  **kwargs):
         """Initialize.
 
@@ -56,6 +67,8 @@
                                       Telemetry will output a warning with this
                                       option.
         """
+        if not isinstance(chromeOptions, dict):
+            raise TypeError("chromeOptions must be of type dict.")
         self._cleanup = not skip_cleanup
         assert os.geteuid() == 0, 'Need superuser privileges'
 
@@ -91,12 +104,14 @@
         # Open a new tab using Chrome remote debugging. ChromeDriver expects
         # a tab opened for remote to work. Tabs opened using Telemetry will be
         # owned by Telemetry, and will be inaccessible to ChromeDriver.
-        urllib2.urlopen('http://localhost:%i/json/new' %
+        urllib.request.urlopen('http://localhost:%i/json/new' %
                         utils.get_chrome_remote_debugging_port())
 
-        chromeOptions = {'debuggerAddress':
-                         ('localhost:%d' %
-                          utils.get_chrome_remote_debugging_port())}
+        chromeBaseOptions = {
+                'debuggerAddress':
+                ('localhost:%d' % utils.get_chrome_remote_debugging_port())
+        }
+        chromeOptions.update(chromeBaseOptions)
         capabilities = {'chromeOptions':chromeOptions}
         # Handle to chromedriver, for chrome automation.
         try:
@@ -181,7 +196,7 @@
         self.url = 'http://localhost:%d' % port
         if url_base:
             chromedriver_args.append('--url-base=%s' % url_base)
-            self.url = urlparse.urljoin(self.url, url_base)
+            self.url = six.moves.urllib.parse.urljoin(self.url, url_base)
 
         if extra_args:
             chromedriver_args.extend(extra_args)
@@ -215,9 +230,9 @@
     def is_running(self):
         """Returns whether the server is up and running."""
         try:
-            urllib2.urlopen(self.url + '/status')
+            urllib.request.urlopen(self.url + '/status')
             return True
-        except urllib2.URLError as e:
+        except urllib.error.URLError as e:
             return False
 
 
@@ -237,7 +252,7 @@
             return
 
         try:
-            urllib2.urlopen(self.url + '/shutdown', timeout=10).close()
+            urllib.request.urlopen(self.url + '/shutdown', timeout=10).close()
         except:
             pass
 
diff --git a/client/common_lib/cros/cr50_utils.py b/client/common_lib/cros/cr50_utils.py
index bdea0de..4c34fff 100644
--- a/client/common_lib/cros/cr50_utils.py
+++ b/client/common_lib/cros/cr50_utils.py
@@ -1,10 +1,17 @@
+# Lint as: python2, python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import argparse
 import logging
 import re
+import six
+from six.moves import range
 
 from autotest_lib.client.common_lib import error
 
@@ -19,7 +26,7 @@
 GET_CR50_VERSION = 'cat %s' % CR50_VERSION
 GET_CR50_MESSAGES ='grep "cr50-.*\[" /var/log/messages'
 UPDATE_FAILURE = 'unexpected cr50-update exit code'
-DUMMY_VER = '-1.-1.-1'
+STUB_VER = '-1.-1.-1'
 # This dictionary is used to search the gsctool output for the version strings.
 # There are two gsctool commands that will return versions: 'fwver' and
 # 'binvers'.
@@ -93,10 +100,28 @@
 # never timeout because they do not force cr50 to reboot. They should all just
 # return information about cr50 and should only have a nonzero exit status if
 # something went wrong.
-gsctool.add_argument('-b', '--binvers', '-f', '--fwver', '-g', '--getbootmode',
-                     '-i', '--board_id', '-r', '--rma_auth', '-F', '--factory',
-                     '-m', '--tpm_mode', '-L', '--flog',
-                     dest='info_cmd', action='store_true')
+gsctool.add_argument('-b',
+                     '--binvers',
+                     '-f',
+                     '--fwver',
+                     '-g',
+                     '--getbootmode',
+                     '-i',
+                     '--board_id',
+                     '-r',
+                     '--rma_auth',
+                     '-F',
+                     '--factory',
+                     '-m',
+                     '--tpm_mode',
+                     '-L',
+                     '--flog',
+                     '-A',
+                     '--get_apro_hash',
+                     '-H',
+                     '--erase_ap_ro_hash',
+                     dest='info_cmd',
+                     action='store_true')
 # upstart and post_reset will post resets instead of rebooting immediately
 gsctool.add_argument('-u', '--upstart', '-p', '--post_reset', dest='post_reset',
                      action='store_true')
@@ -149,9 +174,9 @@
     """
     ver = None
     key = None
-    for k, v in versions.iteritems():
+    for k, v in six.iteritems(versions):
         if name in k:
-            if v == DUMMY_VER:
+            if v == STUB_VER:
                 logging.info('Detected invalid %s %s', name, v)
                 return v
             elif ver:
@@ -211,12 +236,14 @@
         client.run('stop trunksd')
 
 
-def GSCTool(client, args, ignore_status=False):
+def GSCTool(client, args, ignore_status=False, expect_reboot=False):
     """Run gsctool with the given args.
 
     Args:
         client: the object to run commands on
         args: a list of strings that contiain the gsctool args
+        ignore_status: Ignore the exit status
+        expect_reboot: Expect a reboot
 
     Returns:
         the result of gsctool
@@ -230,8 +257,9 @@
     # status so we should ignore it.
     ignore_status = not options.info_cmd or ignore_status
     # immediate reboots are only honored if the command is sent using /dev/tpm0
-    expect_reboot = ((options.systemdev or options.universal) and
-            not options.post_reset and not options.info_cmd)
+    expect_reboot = expect_reboot or ((options.systemdev or options.universal)
+                                      and not options.post_reset
+                                      and not options.info_cmd)
 
     result = client.run('gsctool %s' % ' '.join(args),
                         ignore_status=ignore_status,
@@ -371,7 +399,7 @@
 
     new_ver = GetRunningVersion(client)
     if ver != '':
-        if DUMMY_VER != ver[0]:
+        if STUB_VER != ver[0]:
             AssertVersionsAreEqual('Old RO', ver[0], 'Updated RO', new_ver[0])
         AssertVersionsAreEqual('Old RW', ver[1], 'Updated RW', new_ver[1])
     return new_ver, last_message
@@ -455,7 +483,7 @@
         None if if the given board id info is empty or is not valid
     """
     # Convert board_id_info to a tuple if it's a string.
-    if isinstance(board_id_info, str):
+    if isinstance(board_id_info, six.string_types):
         board_id_info = GetBoardIdInfoTuple(board_id_info)
 
     if not board_id_info:
diff --git a/client/common_lib/cros/cros_config.py b/client/common_lib/cros/cros_config.py
index 5c11548..dffb966 100644
--- a/client/common_lib/cros/cros_config.py
+++ b/client/common_lib/cros/cros_config.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -12,7 +12,7 @@
     platform2/chromeos-config/libcros_config/cros_config_fallback.cc
 If the requested cros_config path/property are mapped to a fallback
 command, then that command is called, and its results is returned.
-That behavior is all native to cros_config.
+That behavior is all natural to cros_config.
 
 Let's say you define a new fallback command in cros_config_fallback.cc:
     `cros_config /foo bar` --> `mosys baz quux`
diff --git a/client/common_lib/cros/cros_config_unittest.py b/client/common_lib/cros/cros_config_unittest.py
index 2ba3a79..b7762dd 100644
--- a/client/common_lib/cros/cros_config_unittest.py
+++ b/client/common_lib/cros/cros_config_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/client/common_lib/cros/dbus_send.py b/client/common_lib/cros/dbus_send.py
index dd4ec7e..cf6a0c1 100644
--- a/client/common_lib/cros/dbus_send.py
+++ b/client/common_lib/cros/dbus_send.py
@@ -1,13 +1,19 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import collections
 import dbus
 import logging
 import pipes
 import re
 import shlex
+import six
 
 from autotest_lib.client.bin import utils
 from autotest_lib.client.common_lib import error
@@ -37,7 +43,7 @@
 
 
 def _parse_value(token_stream):
-    """Turn a stream of tokens from dbus-send output into native python types.
+    """Turn a stream of tokens from dbus-send output into builtin python types.
 
     @param token_stream: output from _build_token_stream() above.
 
@@ -155,7 +161,7 @@
         else:
             return 'boolean:false'
 
-    for prim_type, prefix in int_map.iteritems():
+    for prim_type, prefix in six.iteritems(int_map):
         if isinstance(raw_arg, prim_type):
             return prefix + str(raw_arg)
 
diff --git a/client/common_lib/cros/dbus_send_unittest.py b/client/common_lib/cros/dbus_send_unittest.py
index ce2bc61..a64bd80 100755
--- a/client/common_lib/cros/dbus_send_unittest.py
+++ b/client/common_lib/cros/dbus_send_unittest.py
@@ -1,15 +1,22 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import unittest
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 
 import common
 import dbus
+import six
+import unittest
+
 from autotest_lib.client.common_lib.cros import dbus_send
 
+
 EXAMPLE_SHILL_GET_PROPERTIES_OUTPUT = (
 'method return sender=org.freedesktop.DBus -> destination=:1.37 serial=3 '
 'reply_serial=2\n'
@@ -80,10 +87,6 @@
          variant             string "gateway.2wire.net"
       )
       dict entry(
-         string "LinkMonitorTechnologies"
-         variant             string "wifi"
-      )
-      dict entry(
          string "NoAutoConnectTechnologies"
          variant             string ""
       )
@@ -160,7 +163,6 @@
     'EnabledTechnologies': ['ethernet'],
     'HostName': '',
     'IgnoredDNSSearchPaths': 'gateway.2wire.net',
-    'LinkMonitorTechnologies': 'wifi',
     'NoAutoConnectTechnologies': '',
     'OfflineMode': False,
     'PortalCheckInterval': 30,
@@ -206,7 +208,7 @@
         assert result.sender == 'org.freedesktop.DBus', (
             'Sender == %r' % result.sender)
         assert result.responder == ':1.37', 'Responder == %r' % result.responder
-        for k, v in PARSED_SHILL_GET_PROPERTIES_OUTPUT.iteritems():
+        for k, v in six.iteritems(PARSED_SHILL_GET_PROPERTIES_OUTPUT):
             assert k in result.response, '%r not in response' % k
             actual_v = result.response.pop(k)
             assert actual_v == v, 'Expected %r, got %r' % (v, actual_v)
diff --git a/client/common_lib/cros/dev_server.py b/client/common_lib/cros/dev_server.py
index b2dbb46..ffbd145 100644
--- a/client/common_lib/cros/dev_server.py
+++ b/client/common_lib/cros/dev_server.py
@@ -13,6 +13,9 @@
 import multiprocessing
 import os
 import re
+import shutil
+import subprocess
+from threading import Timer
 import six
 from six.moves import urllib
 import six.moves.html_parser
@@ -28,10 +31,9 @@
 from autotest_lib.client.common_lib import utils
 from autotest_lib.client.common_lib.cros import retry
 
-# TODO(cmasone): redo this class using requests module; http://crosbug.com/30107
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = utils.metrics_mock
 
@@ -155,6 +157,7 @@
     def __init__(self):
         self.reset()
         self.fed = []
+        self.convert_charrefs = True
 
 
     def handle_data(self, d):
@@ -183,7 +186,11 @@
 
 
 def _get_image_storage_server():
-    return CONFIG.get_config_value('CROS', 'image_storage_server', type=str)
+    image_path = CONFIG.get_config_value('CROS',
+                                         'image_storage_server',
+                                         type=str)
+    # see b/203531740; this forces a trailing / if not there yet.
+    return os.path.join(image_path, '')
 
 
 def _get_canary_channel_server():
@@ -193,7 +200,11 @@
 
     @return: The url to the canary channel server.
     """
-    return CONFIG.get_config_value('CROS', 'canary_channel_server', type=str)
+    image_path = CONFIG.get_config_value('CROS',
+                                         'canary_channel_server',
+                                         type=str)
+    # see b/203531740; this forces a trailing / if not there yet.
+    return os.path.join(image_path, '')
 
 
 def _get_storage_server_for_artifacts(artifacts=None):
@@ -602,6 +613,23 @@
 
 
     @classmethod
+    def run_request(cls, call, timeout=None):
+        """Invoke a given devserver call using urllib.open.
+
+        Open the URL with HTTP, and return the text of the response. Exceptions
+        may be raised as for urllib2.urlopen().
+
+        @param call: a url string that calls a method to a devserver.
+        @param timeout: The timeout seconds for this urlopen call.
+
+        @return A HTTPResponse object.
+        """
+        if timeout is None:
+            return urllib.request.urlopen(call)
+        else:
+            return utils.urlopen_socket_timeout(call, timeout=timeout)
+
+    @classmethod
     def run_call(cls, call, readline=False, timeout=None):
         """Invoke a given devserver call using urllib.open.
 
@@ -614,14 +642,11 @@
 
         @return the results of this call.
         """
-        if timeout is not None:
-            return utils.urlopen_socket_timeout(
-                    call, timeout=timeout).read()
-        elif readline:
-            response = urllib.request.urlopen(call)
+        response = cls.run_request(call, timeout=timeout)
+        if readline:
             return [line.rstrip() for line in response]
         else:
-            return urllib.request.urlopen(call).read()
+            return response.read()
 
 
     @staticmethod
@@ -709,12 +734,17 @@
 
 
     @classmethod
-    def get_available_devservers(cls, hostname=None,
+    def get_available_devservers(cls,
+                                 hostname=None,
                                  prefer_local_devserver=PREFER_LOCAL_DEVSERVER,
-                                 restricted_subnets=utils.RESTRICTED_SUBNETS):
+                                 restricted_subnets=utils.ALL_SUBNETS):
         """Get devservers in the same subnet of the given hostname.
 
         @param hostname: Hostname of a DUT to choose devserver for.
+        @param prefer_local_devserver: A boolean indicating using a devserver in
+                                       the same subnet with the DUT.
+        @param restricted_subnets: A list of restricted subnets or p2p subnet
+                                   groups.
 
         @return: A tuple of (devservers, can_retry), devservers is a list of
                  devservers that's available for the given hostname. can_retry
@@ -734,20 +764,33 @@
         if not host_ip:
             return cls.get_unrestricted_devservers(restricted_subnets), False
 
-        # Go through all restricted subnet settings and check if the DUT is
-        # inside a restricted subnet. If so, only return the devservers in the
-        # restricted subnet and doesn't allow retry.
-        if host_ip and restricted_subnets:
-            subnet_ip, mask_bits = _get_subnet_for_host_ip(
-                    host_ip, restricted_subnets=restricted_subnets)
-            if subnet_ip:
-                logging.debug('The host %s (%s) is in a restricted subnet. '
-                              'Try to locate a devserver inside subnet '
-                              '%s:%d.', hostname, host_ip, subnet_ip,
-                              mask_bits)
-                devservers = cls.get_devservers_in_same_subnet(
-                        subnet_ip, mask_bits)
-                return devservers, False
+        # For the sake of backward compatibility, we use the argument
+        # 'restricted_subnets' to store both the legacy subnets (a tuple of
+        # (ip, mask)) and p2p subnets group (a list of subnets, i.e. [(ip,
+        # mask), ...]) data. For consistency, we convert all legacy subnets to
+        # a "singleton p2p subnets" and store them in a new list.
+        all_subnets = []
+        for s in restricted_subnets:
+            if isinstance(s, tuple):
+                all_subnets.append([s])
+            else:
+                all_subnets.append(s)
+
+        # Find devservers in the subnets reachable from the DUT.
+        if host_ip and all_subnets:
+            subnet_group = _get_subnet_group_for_host_ip(
+                    host_ip, all_subnets=all_subnets)
+            if subnet_group:
+                devservers = set()
+                for ip, mask in subnet_group:
+                    logging.debug(
+                            'The host %s (%s) is in a restricted subnet '
+                            '(or its peers). '
+                            'Try to locate devservers inside subnet '
+                            '%s/%d.', hostname, host_ip, ip, mask)
+                    devservers |= set(
+                            cls.get_devservers_in_same_subnet(ip, mask))
+                return sorted(devservers), False
 
         # If prefer_local_devserver is set to True and the host is not in
         # restricted subnet, pick a devserver in the same subnet if possible.
@@ -987,7 +1030,7 @@
         """
         server_name = get_hostname(call)
         is_in_restricted_subnet = utils.get_restricted_subnet(
-                server_name, utils.RESTRICTED_SUBNETS)
+                server_name, utils.get_all_restricted_subnets())
         _EMPTY_SENTINEL_VALUE = object()
         def kickoff_call():
             """Invoke a given devserver call using urllib.open or ssh.
@@ -1004,6 +1047,11 @@
             else:
                 response = cls.run_ssh_call(
                         call, readline=readline, timeout=timeout)
+
+            # six.ensure_str would be nice, but its not in all the envs, so
+            # this is what we are left with for now.
+            if isinstance(response, bytes):
+                response = response.decode()
             # Retry if devserver service is temporarily down, e.g. in a
             # devserver push.
             if ERR_MSG_FOR_DOWN_DEVSERVER in response:
@@ -1036,9 +1084,57 @@
         @param local_file: The path of the file saved to local.
         @param timeout: The timeout seconds for this call.
         """
-        response = cls.run_call(remote_file, timeout=timeout)
-        with open(local_file, 'w') as out_log:
-            out_log.write(response)
+        server_name = get_hostname(remote_file)
+        is_in_restricted_subnet = utils.get_restricted_subnet(
+                server_name, utils.get_all_restricted_subnets())
+
+        if (not ENABLE_SSH_CONNECTION_FOR_DEVSERVER
+                    or not is_in_restricted_subnet):
+            response = super(ImageServerBase, cls).run_request(remote_file,
+                                                               timeout=timeout)
+            with open(local_file, 'wb') as out_log:
+                shutil.copyfileobj(response, out_log)
+        else:
+            timeout_seconds = timeout if timeout else DEVSERVER_SSH_TIMEOUT_MINS * 60
+            # SSH to the dev server and attach the local file as stdout.
+            with open(local_file, 'wb') as out_log:
+                ssh_cmd = [
+                        'ssh', server_name,
+                        'curl -s -S -f "%s"' % utils.sh_escape(remote_file)
+                ]
+                logging.debug("Running command %s", ssh_cmd)
+                with open(os.devnull) as devnull:
+                    cmd = subprocess.Popen(
+                            ssh_cmd,
+                            stdout=out_log,
+                            stdin=devnull,
+                            stderr=subprocess.PIPE,
+                    )
+
+                    # Python 2.7 doesn't have Popen.wait(timeout), so start a
+                    # timer and kill the ssh process if it takes too long.
+                    def stop_process():
+                        """Kills the subprocess after the timeout."""
+                        cmd.kill()
+                        logging.error("ssh call timed out after %s secs",
+                                      timeout_seconds)
+
+                    t = Timer(timeout_seconds, stop_process)
+                    try:
+                        t.start()
+                        cmd.wait()
+                    finally:
+                        t.cancel()
+                    error_output = cmd.stderr.read()
+                    if error_output:
+                        logging.error("ssh call output: %s", error_output)
+                    if cmd.returncode != 0:
+                        c = metrics.Counter(
+                                'chromeos/autotest/devserver/ssh_failure')
+                        c.increment(fields={'dev_server': server_name})
+                        raise DevServerException(
+                                "ssh call failed with exit code %s",
+                                cmd.returncode)
 
 
     def _poll_is_staged(self, **kwargs):
@@ -1124,7 +1220,7 @@
                                      'the call: %s' % (self.url(), call))
 
         if expected_response and not response == expected_response:
-                raise DevServerException(error_message)
+            raise DevServerException(error_message)
 
         # `os_type` is needed in build a devserver call, but not needed for
         # wait_for_artifacts_staged, since that method is implemented by
@@ -1157,7 +1253,8 @@
         @raise DevServerException upon any return code that's not HTTP OK.
         """
         if not archive_url:
-            archive_url = _get_storage_server_for_artifacts(artifacts) + build
+            archive_url = os.path.join(
+                    _get_storage_server_for_artifacts(artifacts), build)
 
         artifacts_arg = ','.join(artifacts) if artifacts else ''
         files_arg = ','.join(files) if files else ''
@@ -1424,12 +1521,17 @@
             self.nton_payload = nton_payload
 
 
-    def wait_for_artifacts_staged(self, archive_url, artifacts='', files=''):
+    def wait_for_artifacts_staged(self,
+                                  archive_url,
+                                  artifacts='',
+                                  files='',
+                                  **kwargs):
         """Polling devserver.is_staged until all artifacts are staged.
 
         @param archive_url: Google Storage URL for the build.
         @param artifacts: Comma separated list of artifacts to download.
         @param files: Comma separated list of files to download.
+        @param kwargs: keyword arguments to make is_staged devserver call.
         @return: True if all artifacts are staged in devserver.
         """
         kwargs = {'archive_url': archive_url,
@@ -1439,8 +1541,14 @@
 
 
     @remote_devserver_call()
-    def call_and_wait(self, call_name, archive_url, artifacts, files,
-                      error_message, expected_response=SUCCESS):
+    def call_and_wait(self,
+                      call_name,
+                      archive_url,
+                      artifacts,
+                      files,
+                      error_message,
+                      expected_response=SUCCESS,
+                      clean=False):
         """Helper method to make a urlopen call, and wait for artifacts staged.
 
         @param call_name: name of devserver rpc call.
@@ -1453,21 +1561,26 @@
                                   to be good.
         @param error_message: Error message to be thrown if response does not
                               match expected_response.
+        @param clean: Force re-loading artifacts/files from cloud, ignoring
+                      cached version.
 
         @return: The response from rpc.
         @raise DevServerException upon any return code that's expected_response.
 
         """
-        kwargs = {'archive_url': archive_url,
-                  'artifacts': artifacts,
-                  'files': files}
+        kwargs = {
+                'archive_url': archive_url,
+                'artifacts': artifacts,
+                'files': files,
+                'clean': clean
+        }
         return self._call_and_wait(call_name, error_message,
                                    expected_response, **kwargs)
 
 
     @remote_devserver_call()
     def stage_artifacts(self, image=None, artifacts=None, files='',
-                        archive_url=None):
+                        archive_url=None, **kwargs):
         """Tell the devserver to download and stage |artifacts| from |image|.
 
          This is the main call point for staging any specific artifacts for a
@@ -1483,13 +1596,15 @@
         @param archive_url: Optional parameter that has the archive_url to stage
                 this artifact from. Default is specified in autotest config +
                 image.
+        @param kwargs: keyword arguments that specify the build information, to
+                make stage devserver call.
 
         @raise DevServerException upon any return code that's not HTTP OK.
         """
         if not artifacts and not files:
             raise DevServerException('Must specify something to stage.')
         image = self.translate(image)
-        self._stage_artifacts(image, artifacts, files, archive_url)
+        self._stage_artifacts(image, artifacts, files, archive_url, **kwargs)
 
 
     @remote_devserver_call(timeout_min=DEVSERVER_SSH_TIMEOUT_MINS)
@@ -2011,6 +2126,26 @@
     return None, None
 
 
+def _get_subnet_group_for_host_ip(host_ip, all_subnets=()):
+    """Get subnet group for a given host IP.
+
+    All subnets in the group are reachable from the input host ip.
+
+    @param host_ip: the IP of a DUT.
+    @param all_subnets: A two level list of subnets including singleton
+                        lists of a restricted subnet and p2p subnets.
+
+    @return: a list of (subnet_ip, mask_bits) tuple. If no matched subnets for
+             the host_ip, return [].
+    """
+    for subnet_group in all_subnets:
+        subnet, _ = _get_subnet_for_host_ip(host_ip,
+                                            restricted_subnets=subnet_group)
+        if subnet:
+            return subnet_group
+    return []
+
+
 def get_least_loaded_devserver(devserver_type=ImageServer, hostname=None):
     """Get the devserver with the least load.
 
diff --git a/client/common_lib/cros/dev_server_unittest.py b/client/common_lib/cros/dev_server_unittest.py
index a923cbd..128bf69 100755
--- a/client/common_lib/cros/dev_server_unittest.py
+++ b/client/common_lib/cros/dev_server_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -8,20 +8,21 @@
 
 import six.moves.http_client
 import json
-import mox
 import os
 import six
 from six.moves import urllib
 import time
 import unittest
-
-import mock
+from unittest import mock
+from unittest.mock import patch, call
 
 import common
 from autotest_lib.client.bin import utils as bin_utils
 from autotest_lib.client.common_lib import android_utils
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib import global_config
+from autotest_lib.client.common_lib.test_utils import comparators
+
 from autotest_lib.client.common_lib import utils
 from autotest_lib.client.common_lib.cros import dev_server
 from autotest_lib.client.common_lib.cros import retry
@@ -65,19 +66,19 @@
 
 
 E403 = urllib.error.HTTPError(url='',
-                         code=six.moves.http_client.FORBIDDEN,
-                         msg='Error 403',
-                         hdrs=None,
-                         fp=six.StringIO('Expected.'))
+                              code=six.moves.http_client.FORBIDDEN,
+                              msg='Error 403',
+                              hdrs=None,
+                              fp=six.StringIO('Expected.'))
 E500 = urllib.error.HTTPError(url='',
-                         code=six.moves.http_client.INTERNAL_SERVER_ERROR,
-                         msg='Error 500',
-                         hdrs=None,
-                         fp=six.StringIO('Expected.'))
+                              code=six.moves.http_client.INTERNAL_SERVER_ERROR,
+                              msg='Error 500',
+                              hdrs=None,
+                              fp=six.StringIO('Expected.'))
 CMD_ERROR = error.CmdError('error_cmd', MockSshError().result_obj)
 
 
-class RunCallTest(mox.MoxTestBase):
+class RunCallTest(unittest.TestCase):
     """Unit tests for ImageServerBase.run_call or DevServer.run_call."""
 
     def setUp(self):
@@ -88,8 +89,14 @@
         self.contents_readline = ['file/one', 'file/two']
         self.save_ssh_config = dev_server.ENABLE_SSH_CONNECTION_FOR_DEVSERVER
         super(RunCallTest, self).setUp()
-        self.mox.StubOutWithMock(urllib.request, 'urlopen')
-        self.mox.StubOutWithMock(utils, 'run')
+
+        run_patcher = patch.object(utils, 'run', spec=True)
+        self.utils_run_mock = run_patcher.start()
+        self.addCleanup(run_patcher.stop)
+
+        urlopen_patcher = patch.object(urllib.request, 'urlopen', spec=True)
+        self.urlopen_mock = urlopen_patcher.start()
+        self.addCleanup(urlopen_patcher.stop)
 
         sleep = mock.patch('time.sleep', autospec=True)
         sleep.start()
@@ -107,62 +114,64 @@
         (call)."""
         dev_server.ENABLE_SSH_CONNECTION_FOR_DEVSERVER = False
 
-        urllib.request.urlopen(mox.StrContains(self.test_call)).AndReturn(
-                six.StringIO(dev_server.ERR_MSG_FOR_DOWN_DEVSERVER))
-        time.sleep(mox.IgnoreArg())
-        urllib.request.urlopen(mox.StrContains(self.test_call)).AndReturn(
-                six.StringIO(self.contents))
-        self.mox.ReplayAll()
+        urllib.request.urlopen.side_effect = [
+                six.StringIO(dev_server.ERR_MSG_FOR_DOWN_DEVSERVER),
+                six.StringIO(self.contents)
+        ]
+
         response = dev_server.ImageServerBase.run_call(self.test_call)
         self.assertEquals(self.contents, response)
-
+        self.urlopen_mock.assert_called_with(
+                comparators.Substring(self.test_call))
 
     def testRunCallSSHWithDownDevserver(self):
         """Test dev_server.ImageServerBase.run_call using http with arg:
         (call)."""
         dev_server.ENABLE_SSH_CONNECTION_FOR_DEVSERVER = True
-        self.mox.StubOutWithMock(utils, 'get_restricted_subnet')
-        utils.get_restricted_subnet(
-                self.hostname, utils.RESTRICTED_SUBNETS).AndReturn(
-                self.hostname)
+        with patch.object(utils, 'get_restricted_subnet') as subnet_patch:
+            utils.get_restricted_subnet.return_value = self.hostname
 
-        to_return1 = MockSshResponse(dev_server.ERR_MSG_FOR_DOWN_DEVSERVER)
-        to_return2 = MockSshResponse(self.contents)
-        utils.run(mox.StrContains(self.test_call),
-                  timeout=mox.IgnoreArg()).AndReturn(to_return1)
-        time.sleep(mox.IgnoreArg())
-        utils.run(mox.StrContains(self.test_call),
-                  timeout=mox.IgnoreArg()).AndReturn(to_return2)
+            to_return1 = MockSshResponse(dev_server.ERR_MSG_FOR_DOWN_DEVSERVER)
+            to_return2 = MockSshResponse(self.contents)
+            utils.run.side_effect = [to_return1, to_return2]
 
-        self.mox.ReplayAll()
-        response = dev_server.ImageServerBase.run_call(self.test_call)
-        self.assertEquals(self.contents, response)
-        dev_server.ENABLE_SSH_CONNECTION_FOR_DEVSERVER = False
+            response = dev_server.ImageServerBase.run_call(self.test_call)
+            self.assertEquals(self.contents, response)
+            dev_server.ENABLE_SSH_CONNECTION_FOR_DEVSERVER = False
 
+            self.utils_run_mock.assert_has_calls([
+                    call(comparators.Substring(self.test_call),
+                         timeout=mock.ANY),
+                    call(comparators.Substring(self.test_call),
+                         timeout=mock.ANY)
+            ])
+
+            subnet_patch.assert_called_with(self.hostname,
+                                            utils.get_all_restricted_subnets())
 
     def testRunCallWithSingleCallHTTP(self):
         """Test dev_server.ImageServerBase.run_call using http with arg:
         (call)."""
         dev_server.ENABLE_SSH_CONNECTION_FOR_DEVSERVER = False
 
-        urllib.request.urlopen(mox.StrContains(self.test_call)).AndReturn(
-                six.StringIO(self.contents))
-        self.mox.ReplayAll()
+        urllib.request.urlopen.return_value = six.StringIO(self.contents)
         response = dev_server.ImageServerBase.run_call(self.test_call)
         self.assertEquals(self.contents, response)
-
+        self.urlopen_mock.assert_called_with(
+                comparators.Substring(self.test_call))
 
     def testRunCallWithCallAndReadlineHTTP(self):
         """Test dev_server.ImageServerBase.run_call using http with arg:
         (call, readline=True)."""
         dev_server.ENABLE_SSH_CONNECTION_FOR_DEVSERVER = False
 
-        urllib.request.urlopen(mox.StrContains(self.test_call)).AndReturn(
-                six.StringIO('\n'.join(self.contents_readline)))
-        self.mox.ReplayAll()
+        urllib.request.urlopen.return_value = (six.StringIO('\n'.join(
+                self.contents_readline)))
         response = dev_server.ImageServerBase.run_call(
                 self.test_call, readline=True)
         self.assertEquals(self.contents_readline, response)
+        self.urlopen_mock.assert_called_with(
+                comparators.Substring(self.test_call))
 
 
     def testRunCallWithCallAndTimeoutHTTP(self):
@@ -170,108 +179,118 @@
         (call, timeout=xxx)."""
         dev_server.ENABLE_SSH_CONNECTION_FOR_DEVSERVER = False
 
-        urllib.request.urlopen(mox.StrContains(self.test_call), data=None).AndReturn(
-                six.StringIO(self.contents))
-        self.mox.ReplayAll()
+        urllib.request.urlopen.return_value = six.StringIO(self.contents)
         response = dev_server.ImageServerBase.run_call(
                 self.test_call, timeout=60)
         self.assertEquals(self.contents, response)
+        self.urlopen_mock.assert_called_with(comparators.Substring(
+                self.test_call),
+                                             data=None)
 
 
     def testRunCallWithSingleCallSSH(self):
         """Test dev_server.ImageServerBase.run_call using ssh with arg:
         (call)."""
         dev_server.ENABLE_SSH_CONNECTION_FOR_DEVSERVER = True
-        self.mox.StubOutWithMock(utils, 'get_restricted_subnet')
-        utils.get_restricted_subnet(
-                self.hostname, utils.RESTRICTED_SUBNETS).AndReturn(
-                self.hostname)
+        with patch.object(utils, 'get_restricted_subnet') as subnet_patch:
+            utils.get_restricted_subnet.return_value = self.hostname
 
-        to_return = MockSshResponse(self.contents)
-        utils.run(mox.StrContains(self.test_call),
-                  timeout=mox.IgnoreArg()).AndReturn(to_return)
-        self.mox.ReplayAll()
-        response = dev_server.ImageServerBase.run_call(self.test_call)
-        self.assertEquals(self.contents, response)
-
+            to_return = MockSshResponse(self.contents)
+            utils.run.return_value = to_return
+            response = dev_server.ImageServerBase.run_call(self.test_call)
+            self.assertEquals(self.contents, response)
+            subnet_patch.assert_called_with(self.hostname,
+                                            utils.get_all_restricted_subnets())
+            expected_str = comparators.Substring(self.test_call)
+            self.utils_run_mock.assert_called_with(expected_str,
+                                                   timeout=mock.ANY)
 
     def testRunCallWithCallAndReadlineSSH(self):
         """Test dev_server.ImageServerBase.run_call using ssh with args:
         (call, readline=True)."""
         dev_server.ENABLE_SSH_CONNECTION_FOR_DEVSERVER = True
-        self.mox.StubOutWithMock(utils, 'get_restricted_subnet')
-        utils.get_restricted_subnet(
-                self.hostname, utils.RESTRICTED_SUBNETS).AndReturn(
-                self.hostname)
+        with patch.object(utils, 'get_restricted_subnet') as subnet_patch:
+            utils.get_restricted_subnet.return_value = self.hostname
 
-        to_return = MockSshResponse('\n'.join(self.contents_readline))
-        utils.run(mox.StrContains(self.test_call),
-                  timeout=mox.IgnoreArg()).AndReturn(to_return)
-        self.mox.ReplayAll()
-        response = dev_server.ImageServerBase.run_call(
-                self.test_call, readline=True)
-        self.assertEquals(self.contents_readline, response)
+            to_return = MockSshResponse('\n'.join(self.contents_readline))
+            utils.run.return_value = to_return
+
+            response = dev_server.ImageServerBase.run_call(self.test_call,
+                                                           readline=True)
+
+            self.assertEquals(self.contents_readline, response)
+            subnet_patch.assert_called_with(self.hostname,
+                                            utils.get_all_restricted_subnets())
+
+            expected_str = comparators.Substring(self.test_call)
+            self.utils_run_mock.assert_called_with(expected_str,
+                                                   timeout=mock.ANY)
 
 
     def testRunCallWithCallAndTimeoutSSH(self):
         """Test dev_server.ImageServerBase.run_call using ssh with args:
         (call, timeout=xxx)."""
         dev_server.ENABLE_SSH_CONNECTION_FOR_DEVSERVER = True
-        self.mox.StubOutWithMock(utils, 'get_restricted_subnet')
-        utils.get_restricted_subnet(
-                self.hostname, utils.RESTRICTED_SUBNETS).AndReturn(
-                self.hostname)
+        with patch.object(utils, 'get_restricted_subnet') as subnet_patch:
+            utils.get_restricted_subnet.return_value = self.hostname
 
-        to_return = MockSshResponse(self.contents)
-        utils.run(mox.StrContains(self.test_call),
-                  timeout=mox.IgnoreArg()).AndReturn(to_return)
-        self.mox.ReplayAll()
-        response = dev_server.ImageServerBase.run_call(
-                self.test_call, timeout=60)
-        self.assertEquals(self.contents, response)
+            to_return = MockSshResponse(self.contents)
+            utils.run.return_value = to_return
+            response = dev_server.ImageServerBase.run_call(self.test_call,
+                                                           timeout=60)
+
+            self.assertEquals(self.contents, response)
+            subnet_patch.assert_called_with(self.hostname,
+                                            utils.get_all_restricted_subnets())
+
+            expected_str = comparators.Substring(self.test_call)
+            self.utils_run_mock.assert_called_with(expected_str,
+                                                   timeout=mock.ANY)
 
 
     def testRunCallWithExceptionHTTP(self):
         """Test dev_server.ImageServerBase.run_call using http with raising
         exception."""
         dev_server.ENABLE_SSH_CONNECTION_FOR_DEVSERVER = False
-        urllib.request.urlopen(mox.StrContains(self.test_call)).AndRaise(E500)
-        self.mox.ReplayAll()
+        urllib.request.urlopen.side_effect = E500
         self.assertRaises(urllib.error.HTTPError,
                           dev_server.ImageServerBase.run_call,
                           self.test_call)
+        self.urlopen_mock.assert_called_with(
+                comparators.Substring(self.test_call))
 
 
     def testRunCallWithExceptionSSH(self):
         """Test dev_server.ImageServerBase.run_call using ssh with raising
         exception."""
         dev_server.ENABLE_SSH_CONNECTION_FOR_DEVSERVER = True
-        self.mox.StubOutWithMock(utils, 'get_restricted_subnet')
-        utils.get_restricted_subnet(
-                self.hostname, utils.RESTRICTED_SUBNETS).AndReturn(
-                self.hostname)
+        with patch.object(utils, 'get_restricted_subnet') as subnet_patch:
+            utils.get_restricted_subnet.return_value = self.hostname
 
-        utils.run(mox.StrContains(self.test_call),
-                  timeout=mox.IgnoreArg()).AndRaise(MockSshError())
-        self.mox.ReplayAll()
-        self.assertRaises(error.CmdError,
-                          dev_server.ImageServerBase.run_call,
-                          self.test_call)
+            utils.run.side_effect = MockSshError()
 
+            self.assertRaises(error.CmdError,
+                              dev_server.ImageServerBase.run_call,
+                              self.test_call)
+            subnet_patch.assert_called_with(self.hostname,
+                                            utils.get_all_restricted_subnets())
+            self.utils_run_mock.assert_called_with(comparators.Substring(
+                    self.test_call),
+                                                   timeout=mock.ANY)
 
     def testRunCallByDevServerHTTP(self):
         """Test dev_server.DevServer.run_call, which uses http, and can be
         directly called by CrashServer."""
-        urllib.request.urlopen(
-                mox.StrContains(self.test_call), data=None).AndReturn(
-                        six.StringIO(self.contents))
-        self.mox.ReplayAll()
+        urllib.request.urlopen.return_value = six.StringIO(self.contents)
         response = dev_server.DevServer.run_call(
                self.test_call, timeout=60)
         self.assertEquals(self.contents, response)
+        self.urlopen_mock.assert_called_with(comparators.Substring(
+                self.test_call),
+                                             data=None)
 
 
-class DevServerTest(mox.MoxTestBase):
+class DevServerTest(unittest.TestCase):
     """Unit tests for dev_server.DevServer.
 
     @var _HOST: fake dev server host address.
@@ -289,55 +308,90 @@
         self.dev_server = dev_server.ImageServer(DevServerTest._HOST)
         self.android_dev_server = dev_server.AndroidBuildServer(
                 DevServerTest._HOST)
-        self.mox.StubOutWithMock(dev_server.ImageServerBase, 'run_call')
-        self.mox.StubOutWithMock(urllib.request, 'urlopen')
-        self.mox.StubOutWithMock(utils, 'run')
-        self.mox.StubOutWithMock(os.path, 'exists')
+        patcher = patch.object(utils, 'run', spec=True)
+        self.utils_run_mock = patcher.start()
+        self.addCleanup(patcher.stop)
+
+        patcher2 = patch.object(urllib.request, 'urlopen', spec=True)
+        self.urlopen_mock = patcher2.start()
+        self.addCleanup(patcher2.stop)
+
+        patcher3 = patch.object(dev_server.ImageServerBase, 'run_call')
+        self.run_call_mock = patcher3.start()
+        self.addCleanup(patcher3.stop)
+
+        patcher4 = patch.object(os.path, 'exists', spec=True)
+        self.os_exists_mock = patcher4.start()
+        self.addCleanup(patcher4.stop)
+
         # Hide local restricted_subnets setting.
         dev_server.RESTRICTED_SUBNETS = []
-        self.mox.StubOutWithMock(dev_server.ImageServer,
-                                 '_read_json_response_from_devserver')
+
+        _read_json_response_from_devserver = patch.object(
+                dev_server.ImageServer, '_read_json_response_from_devserver')
+        self._read_json_mock = _read_json_response_from_devserver.start()
+        self.addCleanup(_read_json_response_from_devserver.stop)
 
         sleep = mock.patch('time.sleep', autospec=True)
         sleep.start()
         self.addCleanup(sleep.stop)
 
+        self.image_name = 'fake/image'
+        first_staged = comparators.Substrings(
+                [self._HOST, self.image_name, 'stage?'])
+        second_staged = comparators.Substrings(
+                [self._HOST, self.image_name, 'is_staged'])
+        self.staged_calls = [call(first_staged), call(second_staged)]
+
+    def _standard_assert_calls(self):
+        """Assert the standard calls are made."""
+        bad_host, good_host = 'http://bad_host:99', 'http://good_host:8080'
+
+        argument1 = comparators.Substring(bad_host)
+        argument2 = comparators.Substring(good_host)
+        calls = [
+                call(argument1, timeout=mock.ANY),
+                call(argument2, timeout=mock.ANY)
+        ]
+        self.run_call_mock.assert_has_calls(calls)
 
     def testSimpleResolve(self):
         """One devserver, verify we resolve to it."""
-        self.mox.StubOutWithMock(dev_server, '_get_dev_server_list')
-        self.mox.StubOutWithMock(dev_server.ImageServer, 'devserver_healthy')
-        dev_server._get_dev_server_list().MultipleTimes().AndReturn(
-                [DevServerTest._HOST])
-        dev_server.ImageServer.devserver_healthy(DevServerTest._HOST).AndReturn(
-                                                                        True)
-        self.mox.ReplayAll()
-        devserver = dev_server.ImageServer.resolve('my_build')
-        self.assertEquals(devserver.url(), DevServerTest._HOST)
+        with patch.object(dev_server,
+             '_get_dev_server_list') as server_list_patch, \
+                patch.object(dev_server.ImageServer,
+                'devserver_healthy') as devserver_healthy_patch:
+
+            dev_server._get_dev_server_list.return_value = ([
+                    DevServerTest._HOST
+            ])
+
+            dev_server.ImageServer.devserver_healthy.return_value = True
+            devserver = dev_server.ImageServer.resolve('my_build')
+            self.assertEquals(devserver.url(), DevServerTest._HOST)
+
+            server_list_patch.assert_called_with()
+            devserver_healthy_patch.assert_called_with(DevServerTest._HOST)
 
 
     def testResolveWithFailure(self):
         """Ensure we rehash on a failed ping on a bad_host."""
-        self.mox.StubOutWithMock(dev_server, '_get_dev_server_list')
-        bad_host, good_host = 'http://bad_host:99', 'http://good_host:8080'
-        dev_server._get_dev_server_list().MultipleTimes().AndReturn(
-                [bad_host, good_host])
-        argument1 = mox.StrContains(bad_host)
-        argument2 = mox.StrContains(good_host)
 
-        # Mock out bad ping failure to bad_host by raising devserver exception.
-        dev_server.ImageServerBase.run_call(
-                argument1, timeout=mox.IgnoreArg()).AndRaise(
-                        dev_server.DevServerException())
-        # Good host is good.
-        dev_server.ImageServerBase.run_call(
-                argument2, timeout=mox.IgnoreArg()).AndReturn(
-                        '{"free_disk": 1024}')
+        with patch.object(dev_server, '_get_dev_server_list'):
+            bad_host, good_host = 'http://bad_host:99', 'http://good_host:8080'
+            dev_server._get_dev_server_list.return_value = ([
+                    bad_host, good_host
+            ])
 
-        self.mox.ReplayAll()
-        host = dev_server.ImageServer.resolve(0) # Using 0 as it'll hash to 0.
-        self.assertEquals(host.url(), good_host)
-        self.mox.VerifyAll()
+            # Mock out bad ping failure by raising devserver exception.
+            dev_server.ImageServerBase.run_call.side_effect = [
+                    dev_server.DevServerException(), '{"free_disk": 1024}'
+            ]
+
+            host = dev_server.ImageServer.resolve(
+                    0)  # Using 0 as it'll hash to 0.
+            self.assertEquals(host.url(), good_host)
+            self._standard_assert_calls()
 
 
     def testResolveWithFailureURLError(self):
@@ -350,390 +404,348 @@
         real_retry = retry.retry
         retry.retry = retry_mock
 
-        self.mox.StubOutWithMock(dev_server, '_get_dev_server_list')
-        bad_host, good_host = 'http://bad_host:99', 'http://good_host:8080'
-        dev_server._get_dev_server_list().MultipleTimes().AndReturn(
-                [bad_host, good_host])
-        argument1 = mox.StrContains(bad_host)
-        argument2 = mox.StrContains(good_host)
+        with patch.object(dev_server, '_get_dev_server_list'):
 
-        # Mock out bad ping failure to bad_host by raising devserver exception.
-        dev_server.ImageServerBase.run_call(
-                argument1, timeout=mox.IgnoreArg()).MultipleTimes().AndRaise(
-                        urllib.error.URLError('urlopen connection timeout'))
+            bad_host, good_host = 'http://bad_host:99', 'http://good_host:8080'
+            dev_server._get_dev_server_list.return_value = ([
+                    bad_host, good_host
+            ])
 
-        # Good host is good.
-        dev_server.ImageServerBase.run_call(
-                argument2, timeout=mox.IgnoreArg()).AndReturn(
-                        '{"free_disk": 1024}')
+            # Mock out bad ping failure by raising devserver exception.
+            dev_server.ImageServerBase.run_call.side_effect = [
+                    urllib.error.URLError('urlopen connection timeout'),
+                    '{"free_disk": 1024}'
+            ]
 
-        self.mox.ReplayAll()
-        host = dev_server.ImageServer.resolve(0) # Using 0 as it'll hash to 0.
-        self.assertEquals(host.url(), good_host)
-        self.mox.VerifyAll()
+            host = dev_server.ImageServer.resolve(
+                    0)  # Using 0 as it'll hash to 0.
+            self.assertEquals(host.url(), good_host)
 
-        retry.retry = real_retry
+            retry.retry = real_retry
+            self._standard_assert_calls()
 
 
     def testResolveWithManyDevservers(self):
         """Should be able to return different urls with multiple devservers."""
-        self.mox.StubOutWithMock(dev_server.ImageServer, 'servers')
-        self.mox.StubOutWithMock(dev_server.DevServer, 'devserver_healthy')
 
-        host0_expected = 'http://host0:8080'
-        host1_expected = 'http://host1:8082'
+        with patch.object(dev_server.ImageServer, 'servers'), \
+                patch.object(dev_server.DevServer,
+                 'devserver_healthy') as devserver_healthy_patch:
 
-        dev_server.ImageServer.servers().MultipleTimes().AndReturn(
-                [host0_expected, host1_expected])
-        dev_server.ImageServer.devserver_healthy(host0_expected).AndReturn(True)
-        dev_server.ImageServer.devserver_healthy(host1_expected).AndReturn(True)
+            host0_expected = 'http://host0:8080'
+            host1_expected = 'http://host1:8082'
 
-        self.mox.ReplayAll()
-        host0 = dev_server.ImageServer.resolve(0)
-        host1 = dev_server.ImageServer.resolve(1)
-        self.mox.VerifyAll()
+            dev_server.ImageServer.servers.return_value = ([
+                    host0_expected, host1_expected
+            ])
+            dev_server.ImageServer.devserver_healthy.return_value = True
+            dev_server.ImageServer.devserver_healthy.return_value = True
 
-        self.assertEqual(host0.url(), host0_expected)
-        self.assertEqual(host1.url(), host1_expected)
+            host0 = dev_server.ImageServer.resolve(0)
+            host1 = dev_server.ImageServer.resolve(1)
 
+            self.assertEqual(host0.url(), host0_expected)
+            self.assertEqual(host1.url(), host1_expected)
 
-    def _mockWriteFile(self):
-        """Mock write content to a file."""
-        mock_file = self.mox.CreateMockAnything()
-        open(mox.IgnoreArg(), 'w').AndReturn(mock_file)
-        mock_file.__enter__().AndReturn(mock_file)
-        mock_file.write(mox.IgnoreArg())
-        mock_file.__exit__(None, None, None)
+            calls = [call(host0_expected), call(host1_expected)]
+            devserver_healthy_patch.assert_has_calls(calls)
 
 
     def testSuccessfulTriggerDownloadSync(self):
         """Call the dev server's download method with synchronous=True."""
-        name = 'fake/image'
-        self.mox.StubOutWithMock(dev_server.ImageServer, '_finish_download')
-        argument1 = mox.And(mox.StrContains(self._HOST), mox.StrContains(name),
-                            mox.StrContains('stage?'))
-        argument2 = mox.And(mox.StrContains(self._HOST), mox.StrContains(name),
-                            mox.StrContains('is_staged'))
-        dev_server.ImageServerBase.run_call(argument1).AndReturn('Success')
-        dev_server.ImageServerBase.run_call(argument2).AndReturn('True')
-        self.dev_server._finish_download(name, mox.IgnoreArg(), mox.IgnoreArg())
+        with patch.object(dev_server.ImageServer,
+                          '_finish_download') as download_patch:
 
-        # Synchronous case requires a call to finish download.
-        self.mox.ReplayAll()
-        self.dev_server.trigger_download(name, synchronous=True)
-        self.mox.VerifyAll()
+            dev_server.ImageServerBase.run_call.side_effect = [
+                    'Success', 'True'
+            ]
+            self.dev_server._finish_download.return_value = None
+
+            # Synchronous case requires a call to finish download.
+            self.dev_server.trigger_download(self.image_name, synchronous=True)
+
+            download_patch.assert_called_with(self.image_name, mock.ANY,
+                                              mock.ANY)
+
+            self.run_call_mock.assert_has_calls(self.staged_calls)
 
 
     def testSuccessfulTriggerDownloadASync(self):
         """Call the dev server's download method with synchronous=False."""
-        name = 'fake/image'
-        argument1 = mox.And(mox.StrContains(self._HOST), mox.StrContains(name),
-                            mox.StrContains('stage?'))
-        argument2 = mox.And(mox.StrContains(self._HOST), mox.StrContains(name),
-                            mox.StrContains('is_staged'))
-        dev_server.ImageServerBase.run_call(argument1).AndReturn('Success')
-        dev_server.ImageServerBase.run_call(argument2).AndReturn('True')
+        dev_server.ImageServerBase.run_call.side_effect = ['Success', 'True']
+        self.dev_server.trigger_download(self.image_name, synchronous=False)
 
-        self.mox.ReplayAll()
-        self.dev_server.trigger_download(name, synchronous=False)
-        self.mox.VerifyAll()
-
+        self.run_call_mock.assert_has_calls(self.staged_calls)
 
     def testURLErrorRetryTriggerDownload(self):
         """Should retry on URLError, but pass through real exception."""
-        self.mox.StubOutWithMock(time, 'sleep')
+        with patch.object(time, 'sleep'):
 
-        refused = urllib.error.URLError('[Errno 111] Connection refused')
-        dev_server.ImageServerBase.run_call(
-                mox.IgnoreArg()).AndRaise(refused)
-        time.sleep(mox.IgnoreArg())
-        dev_server.ImageServerBase.run_call(mox.IgnoreArg()).AndRaise(E403)
-        self.mox.ReplayAll()
-        self.assertRaises(dev_server.DevServerException,
-                          self.dev_server.trigger_download,
-                          '')
+            refused = urllib.error.URLError('[Errno 111] Connection refused')
+            dev_server.ImageServerBase.run_call.side_effect = refused
+            time.sleep(mock.ANY)
+
+            dev_server.ImageServerBase.run_call.side_effect = E403
+            self.assertRaises(dev_server.DevServerException,
+                              self.dev_server.trigger_download, '')
+            self.run_call_mock.assert_called()
 
 
     def testErrorTriggerDownload(self):
         """Should call the dev server's download method using http, fail
         gracefully."""
-        dev_server.ImageServerBase.run_call(mox.IgnoreArg()).AndRaise(E500)
-        self.mox.ReplayAll()
+        dev_server.ImageServerBase.run_call.side_effect = E500
         self.assertRaises(dev_server.DevServerException,
                           self.dev_server.trigger_download,
                           '')
+        self.run_call_mock.assert_called()
 
 
     def testForbiddenTriggerDownload(self):
         """Should call the dev server's download method using http,
         get exception."""
-        dev_server.ImageServerBase.run_call(mox.IgnoreArg()).AndRaise(E403)
-        self.mox.ReplayAll()
+        dev_server.ImageServerBase.run_call.side_effect = E500
         self.assertRaises(dev_server.DevServerException,
                           self.dev_server.trigger_download,
                           '')
+        self.run_call_mock.assert_called()
 
 
     def testCmdErrorTriggerDownload(self):
         """Should call the dev server's download method using ssh, retry
         trigger_download when getting error.CmdError, raise exception for
         urllib2.HTTPError."""
-        dev_server.ImageServerBase.run_call(
-                mox.IgnoreArg()).AndRaise(CMD_ERROR)
-        dev_server.ImageServerBase.run_call(
-                mox.IgnoreArg()).AndRaise(E500)
-        self.mox.ReplayAll()
+
+        dev_server.ImageServerBase.run_call.side_effect = [CMD_ERROR, E500]
         self.assertRaises(dev_server.DevServerException,
                           self.dev_server.trigger_download,
                           '')
+        self.run_call_mock.assert_has_calls([call(mock.ANY), call(mock.ANY)])
 
 
     def testSuccessfulFinishDownload(self):
         """Should successfully call the dev server's finish download method."""
-        name = 'fake/image'
-        argument1 = mox.And(mox.StrContains(self._HOST),
-                            mox.StrContains(name),
-                            mox.StrContains('stage?'))
-        argument2 = mox.And(mox.StrContains(self._HOST),
-                            mox.StrContains(name),
-                            mox.StrContains('is_staged'))
-        dev_server.ImageServerBase.run_call(argument1).AndReturn('Success')
-        dev_server.ImageServerBase.run_call(argument2).AndReturn('True')
+        dev_server.ImageServerBase.run_call.side_effect = ['Success', 'True']
 
         # Synchronous case requires a call to finish download.
-        self.mox.ReplayAll()
-        self.dev_server.finish_download(name)  # Raises on failure.
-        self.mox.VerifyAll()
+        self.dev_server.finish_download(self.image_name)  # Raises on failure.
 
+        self.run_call_mock.assert_has_calls(self.staged_calls)
 
     def testErrorFinishDownload(self):
         """Should call the dev server's finish download method using http, fail
         gracefully."""
-        dev_server.ImageServerBase.run_call(mox.IgnoreArg()).AndRaise(E500)
-        self.mox.ReplayAll()
+        dev_server.ImageServerBase.run_call.side_effect = E500
         self.assertRaises(dev_server.DevServerException,
                           self.dev_server.finish_download,
                           '')
-
+        self.run_call_mock.assert_called()
 
     def testCmdErrorFinishDownload(self):
         """Should call the dev server's finish download method using ssh,
         retry finish_download when getting error.CmdError, raise exception
         for urllib2.HTTPError."""
-        dev_server.ImageServerBase.run_call(
-                mox.IgnoreArg()).AndRaise(CMD_ERROR)
-        dev_server.ImageServerBase.run_call(
-                mox.IgnoreArg()).AndRaise(E500)
-        self.mox.ReplayAll()
+        dev_server.ImageServerBase.run_call.side_effect = [CMD_ERROR, E500]
+
         self.assertRaises(dev_server.DevServerException,
                           self.dev_server.finish_download,
                           '')
-
+        self.run_call_mock.assert_has_calls([call(mock.ANY), call(mock.ANY)])
 
     def testListControlFiles(self):
         """Should successfully list control files from the dev server."""
-        name = 'fake/build'
         control_files = ['file/one', 'file/two']
-        argument = mox.And(mox.StrContains(self._HOST),
-                           mox.StrContains(name))
-        dev_server.ImageServerBase.run_call(
-                argument, readline=True).AndReturn(control_files)
+        argument = comparators.Substrings([self._HOST, self.image_name])
+        dev_server.ImageServerBase.run_call.return_value = control_files
 
-        self.mox.ReplayAll()
-        paths = self.dev_server.list_control_files(name)
+        paths = self.dev_server.list_control_files(self.image_name)
         self.assertEquals(len(paths), 2)
         for f in control_files:
             self.assertTrue(f in paths)
 
+        self.run_call_mock.assert_called_with(argument, readline=True)
 
     def testFailedListControlFiles(self):
         """Should call the dev server's list-files method using http, get
         exception."""
-        dev_server.ImageServerBase.run_call(
-                mox.IgnoreArg(), readline=True).AndRaise(E500)
-        self.mox.ReplayAll()
+        dev_server.ImageServerBase.run_call.side_effect = E500
         self.assertRaises(dev_server.DevServerException,
                           self.dev_server.list_control_files,
                           '')
+        self.run_call_mock.assert_called_with(mock.ANY, readline=True)
 
 
     def testExplodingListControlFiles(self):
         """Should call the dev server's list-files method using http, get
         exception."""
-        dev_server.ImageServerBase.run_call(
-                mox.IgnoreArg(), readline=True).AndRaise(E403)
-        self.mox.ReplayAll()
+        dev_server.ImageServerBase.run_call.side_effect = E403
         self.assertRaises(dev_server.DevServerException,
-                          self.dev_server.list_control_files,
-                          '')
-
+                          self.dev_server.list_control_files, '')
+        self.run_call_mock.assert_called_with(mock.ANY, readline=True)
 
     def testCmdErrorListControlFiles(self):
         """Should call the dev server's list-files method using ssh, retry
         list_control_files when getting error.CmdError, raise exception for
         urllib2.HTTPError."""
-        dev_server.ImageServerBase.run_call(
-                mox.IgnoreArg(), readline=True).AndRaise(CMD_ERROR)
-        dev_server.ImageServerBase.run_call(
-                mox.IgnoreArg(), readline=True).AndRaise(E500)
-        self.mox.ReplayAll()
+        dev_server.ImageServerBase.run_call.side_effect = [CMD_ERROR, E500]
         self.assertRaises(dev_server.DevServerException,
                           self.dev_server.list_control_files,
                           '')
+        self.run_call_mock.assert_called_with(mock.ANY, readline=True)
 
     def testListSuiteControls(self):
         """Should successfully list all contents of control files from the dev
         server."""
-        name = 'fake/build'
         control_contents = ['control file one', 'control file two']
-        argument = mox.And(mox.StrContains(self._HOST),
-                           mox.StrContains(name))
-        dev_server.ImageServerBase.run_call(
-                argument).AndReturn(json.dumps(control_contents))
+        argument = comparators.Substrings([self._HOST, self.image_name])
 
-        self.mox.ReplayAll()
-        file_contents = self.dev_server.list_suite_controls(name)
+        dev_server.ImageServerBase.run_call.return_value = (
+                json.dumps(control_contents))
+
+        file_contents = self.dev_server.list_suite_controls(self.image_name)
         self.assertEquals(len(file_contents), 2)
         for f in control_contents:
             self.assertTrue(f in file_contents)
 
+        self.run_call_mock.assert_called_with(argument)
 
     def testFailedListSuiteControls(self):
         """Should call the dev server's list_suite_controls method using http,
         get exception."""
-        dev_server.ImageServerBase.run_call(
-                mox.IgnoreArg()).AndRaise(E500)
-        self.mox.ReplayAll()
+        dev_server.ImageServerBase.run_call.side_effect = E500
+
         self.assertRaises(dev_server.DevServerException,
                           self.dev_server.list_suite_controls,
                           '')
+        self.run_call_mock.assert_called()
 
 
     def testExplodingListSuiteControls(self):
         """Should call the dev server's list_suite_controls method using http,
         get exception."""
-        dev_server.ImageServerBase.run_call(
-                mox.IgnoreArg()).AndRaise(E403)
-        self.mox.ReplayAll()
+        dev_server.ImageServerBase.run_call.side_effect = E403
+
         self.assertRaises(dev_server.DevServerException,
                           self.dev_server.list_suite_controls,
                           '')
-
+        self.run_call_mock.assert_called()
 
     def testCmdErrorListSuiteControls(self):
         """Should call the dev server's list_suite_controls method using ssh,
         retry list_suite_controls when getting error.CmdError, raise exception
         for urllib2.HTTPError."""
-        dev_server.ImageServerBase.run_call(
-                mox.IgnoreArg()).AndRaise(CMD_ERROR)
-        dev_server.ImageServerBase.run_call(
-                mox.IgnoreArg()).AndRaise(E500)
-        self.mox.ReplayAll()
+        dev_server.ImageServerBase.run_call.side_effect = [CMD_ERROR, E500]
+
         self.assertRaises(dev_server.DevServerException,
                           self.dev_server.list_suite_controls,
                           '')
-
+        self.run_call_mock.assert_has_calls([call(mock.ANY), call(mock.ANY)])
 
     def testGetControlFile(self):
         """Should successfully get a control file from the dev server."""
-        name = 'fake/build'
         file = 'file/one'
         contents = 'Multi-line\nControl File Contents\n'
-        argument = mox.And(mox.StrContains(self._HOST),
-                            mox.StrContains(name),
-                            mox.StrContains(file))
-        dev_server.ImageServerBase.run_call(argument).AndReturn(contents)
+        argument = comparators.Substrings([self._HOST, self.image_name, file])
 
-        self.mox.ReplayAll()
-        self.assertEquals(self.dev_server.get_control_file(name, file),
-                          contents)
+        dev_server.ImageServerBase.run_call.return_value = contents
 
+        self.assertEquals(
+                self.dev_server.get_control_file(self.image_name, file),
+                contents)
+
+        self.run_call_mock.assert_called_with(argument)
 
     def testErrorGetControlFile(self):
         """Should try to get the contents of a control file using http, get
         exception."""
-        dev_server.ImageServerBase.run_call(mox.IgnoreArg()).AndRaise(E500)
-        self.mox.ReplayAll()
+        dev_server.ImageServerBase.run_call.side_effect = E500
         self.assertRaises(dev_server.DevServerException,
                           self.dev_server.get_control_file,
                           '', '')
-
+        self.run_call_mock.assert_called()
 
     def testForbiddenGetControlFile(self):
         """Should try to get the contents of a control file using http, get
         exception."""
-        dev_server.ImageServerBase.run_call(mox.IgnoreArg()).AndRaise(E403)
-        self.mox.ReplayAll()
+        dev_server.ImageServerBase.run_call.side_effect = E403
         self.assertRaises(dev_server.DevServerException,
                           self.dev_server.get_control_file,
                           '', '')
+        self.run_call_mock.assert_called()
 
 
     def testCmdErrorGetControlFile(self):
         """Should try to get the contents of a control file using ssh, retry
         get_control_file when getting error.CmdError, raise exception for
         urllib2.HTTPError."""
-        dev_server.ImageServerBase.run_call(
-                mox.IgnoreArg()).AndRaise(CMD_ERROR)
-        dev_server.ImageServerBase.run_call(
-                mox.IgnoreArg()).AndRaise(E500)
-        self.mox.ReplayAll()
+        dev_server.ImageServerBase.run_call.side_effect = [CMD_ERROR, E500]
+
         self.assertRaises(dev_server.DevServerException,
-                          self.dev_server.get_control_file,
-                          '', '')
+                          self.dev_server.get_control_file, '', '')
+        self.run_call_mock.assert_has_calls([call(mock.ANY), call(mock.ANY)])
 
 
     def testGetLatestBuild(self):
         """Should successfully return a build for a given target."""
-        self.mox.StubOutWithMock(dev_server.ImageServer, 'servers')
-        self.mox.StubOutWithMock(dev_server.DevServer, 'devserver_healthy')
+        with patch.object(dev_server.ImageServer, 'servers'), \
+            patch.object(dev_server.ImageServer,
+                         'devserver_healthy') as devserver_patch:
 
-        dev_server.ImageServer.servers().AndReturn([self._HOST])
-        dev_server.ImageServer.devserver_healthy(self._HOST).AndReturn(True)
+            dev_server.ImageServer.servers.return_value = [self._HOST]
+            dev_server.ImageServer.devserver_healthy.return_value = True
 
-        target = 'x86-generic-release'
-        build_string = 'R18-1586.0.0-a1-b1514'
-        argument = mox.And(mox.StrContains(self._HOST),
-                           mox.StrContains(target))
-        dev_server.ImageServerBase.run_call(argument).AndReturn(build_string)
+            target = 'x86-generic-release'
+            build_string = 'R18-1586.0.0-a1-b1514'
+            argument = comparators.Substrings([self._HOST, target])
 
-        self.mox.ReplayAll()
-        build = dev_server.ImageServer.get_latest_build(target)
-        self.assertEquals(build_string, build)
+            dev_server.ImageServerBase.run_call.return_value = build_string
+
+            build = dev_server.ImageServer.get_latest_build(target)
+            self.assertEquals(build_string, build)
+
+            devserver_patch.assert_called_with(self._HOST)
+            self.run_call_mock.assert_called_with(argument)
 
 
     def testGetLatestBuildWithManyDevservers(self):
         """Should successfully return newest build with multiple devservers."""
-        self.mox.StubOutWithMock(dev_server.ImageServer, 'servers')
-        self.mox.StubOutWithMock(dev_server.DevServer, 'devserver_healthy')
+        with patch.object(dev_server.ImageServer, 'servers'), \
+            patch.object(dev_server.ImageServer,
+                         'devserver_healthy') as devserver_patch:
 
-        host0_expected = 'http://host0:8080'
-        host1_expected = 'http://host1:8082'
+            host0_expected = 'http://host0:8080'
+            host1_expected = 'http://host1:8082'
 
-        dev_server.ImageServer.servers().MultipleTimes().AndReturn(
-                [host0_expected, host1_expected])
+            dev_server.ImageServer.servers.return_value = ([
+                    host0_expected, host1_expected
+            ])
 
-        dev_server.ImageServer.devserver_healthy(host0_expected).AndReturn(True)
-        dev_server.ImageServer.devserver_healthy(host1_expected).AndReturn(True)
+            dev_server.ImageServer.devserver_healthy.return_value = True
 
-        target = 'x86-generic-release'
-        build_string1 = 'R9-1586.0.0-a1-b1514'
-        build_string2 = 'R19-1586.0.0-a1-b3514'
-        argument1 = mox.And(mox.StrContains(host0_expected),
-                            mox.StrContains(target))
-        argument2 = mox.And(mox.StrContains(host1_expected),
-                            mox.StrContains(target))
-        dev_server.ImageServerBase.run_call(argument1).AndReturn(build_string1)
-        dev_server.ImageServerBase.run_call(argument2).AndReturn(build_string2)
+            dev_server.ImageServer.devserver_healthy.return_value = True
 
-        self.mox.ReplayAll()
-        build = dev_server.ImageServer.get_latest_build(target)
-        self.assertEquals(build_string2, build)
+            target = 'x86-generic-release'
+            build_string1 = 'R9-1586.0.0-a1-b1514'
+            build_string2 = 'R19-1586.0.0-a1-b3514'
+            argument1 = comparators.Substrings([host0_expected, target])
+            argument2 = comparators.Substrings([host1_expected, target])
+
+            dev_server.ImageServerBase.run_call.side_effect = ([
+                    build_string1, build_string2
+            ])
+
+            build = dev_server.ImageServer.get_latest_build(target)
+            self.assertEquals(build_string2, build)
+            devserver_patch.assert_has_calls(
+                    [call(host0_expected),
+                     call(host1_expected)])
+
+            self.run_call_mock.assert_has_calls(
+                    [call(argument1), call(argument2)])
 
 
     def testCrashesAreSetToTheCrashServer(self):
         """Should send symbolicate dump rpc calls to crash_server."""
-        self.mox.ReplayAll()
         call = self.crash_server.build_call('symbolicate_dump')
         self.assertTrue(call.startswith(self._CRASH_HOST))
 
@@ -743,32 +755,31 @@
         expected_archive_url = archive_url
         if not archive_url:
             expected_archive_url = 'gs://my_default_url'
-            self.mox.StubOutWithMock(dev_server, '_get_image_storage_server')
-            dev_server._get_image_storage_server().AndReturn(
-                'gs://my_default_url')
+            image_patch = patch.object(dev_server, '_get_image_storage_server')
+            self.image_server_mock = image_patch.start()
+            self.addCleanup(image_patch.stop)
+            dev_server._get_image_storage_server.return_value = (
+                    'gs://my_default_url')
             name = 'fake/image'
         else:
             # This is embedded in the archive_url. Not needed.
             name = ''
 
-        argument1 = mox.And(mox.StrContains(expected_archive_url),
-                            mox.StrContains(name),
-                            mox.StrContains('artifacts=%s' %
-                                            ','.join(artifacts)),
-                            mox.StrContains('files=%s' % ','.join(files)),
-                            mox.StrContains('stage?'))
-        argument2 = mox.And(mox.StrContains(expected_archive_url),
-                            mox.StrContains(name),
-                            mox.StrContains('artifacts=%s' %
-                                            ','.join(artifacts)),
-                            mox.StrContains('files=%s' % ','.join(files)),
-                            mox.StrContains('is_staged'))
-        dev_server.ImageServerBase.run_call(argument1).AndReturn('Success')
-        dev_server.ImageServerBase.run_call(argument2).AndReturn('True')
+        argument1 = comparators.Substrings([
+                expected_archive_url, name,
+                'artifacts=%s' % ','.join(artifacts),
+                'files=%s' % ','.join(files), 'stage?'
+        ])
+        argument2 = comparators.Substrings([
+                expected_archive_url, name,
+                'artifacts=%s' % ','.join(artifacts),
+                'files=%s' % ','.join(files), 'is_staged?'
+        ])
 
-        self.mox.ReplayAll()
+        dev_server.ImageServerBase.run_call.side_effect = ['Success', 'True']
+
         self.dev_server.stage_artifacts(name, artifacts, files, archive_url)
-        self.mox.VerifyAll()
+        self.run_call_mock.assert_has_calls([call(argument1), call(argument2)])
 
 
     def testStageArtifactsBasic(self):
@@ -797,7 +808,7 @@
 
 
     def testStagedFileUrl(self):
-        """Sanity tests that the staged file url looks right."""
+        """Tests that the staged file url looks right."""
         devserver_label = 'x86-mario-release/R30-1234.0.0'
         url = self.dev_server.get_staged_file_url('stateful.tgz',
                                                   devserver_label)
@@ -805,7 +816,7 @@
                                  'stateful.tgz'])
         self.assertEquals(url, expected_url)
 
-        devserver_label = 'something_crazy/that/you_MIGHT/hate'
+        devserver_label = 'something_complex/that/you_MIGHT/hate'
         url = self.dev_server.get_staged_file_url('chromiumos_image.bin',
                                                   devserver_label)
         expected_url = '/'.join([self._HOST, 'static', devserver_label,
@@ -815,43 +826,45 @@
 
     def _StageTimeoutHelper(self):
         """Helper class for testing staging timeout."""
-        self.mox.StubOutWithMock(dev_server.ImageServer, 'call_and_wait')
-        dev_server.ImageServer.call_and_wait(
-                call_name='stage',
-                artifacts=mox.IgnoreArg(),
-                files=mox.IgnoreArg(),
-                archive_url=mox.IgnoreArg(),
-                error_message=mox.IgnoreArg()).AndRaise(bin_utils.TimeoutError())
+        call_patch = patch.object(dev_server.ImageServer, 'call_and_wait')
+        self.call_mock = call_patch.start()
+        self.addCleanup(call_patch.stop)
+        dev_server.ImageServer.call_and_wait.side_effect = (
+                bin_utils.TimeoutError())
+
+    def _VerifyTimeoutHelper(self):
+        self.call_mock.assert_called_with(call_name='stage',
+                                          artifacts=mock.ANY,
+                                          files=mock.ANY,
+                                          archive_url=mock.ANY,
+                                          error_message=mock.ANY)
 
 
     def test_StageArtifactsTimeout(self):
         """Test DevServerException is raised when stage_artifacts timed out."""
         self._StageTimeoutHelper()
-        self.mox.ReplayAll()
+
         self.assertRaises(dev_server.DevServerException,
                           self.dev_server.stage_artifacts,
                           image='fake/image', artifacts=['full_payload'])
-        self.mox.VerifyAll()
+        self._VerifyTimeoutHelper()
 
 
     def test_TriggerDownloadTimeout(self):
         """Test DevServerException is raised when trigger_download timed out."""
         self._StageTimeoutHelper()
-        self.mox.ReplayAll()
         self.assertRaises(dev_server.DevServerException,
                           self.dev_server.trigger_download,
                           image='fake/image')
-        self.mox.VerifyAll()
-
+        self._VerifyTimeoutHelper()
 
     def test_FinishDownloadTimeout(self):
         """Test DevServerException is raised when finish_download timed out."""
         self._StageTimeoutHelper()
-        self.mox.ReplayAll()
         self.assertRaises(dev_server.DevServerException,
                           self.dev_server.finish_download,
                           image='fake/image')
-        self.mox.VerifyAll()
+        self._VerifyTimeoutHelper()
 
 
     def test_compare_load(self):
@@ -889,36 +902,39 @@
         build_id = '123456'
         artifacts = android_utils.AndroidArtifacts.get_artifacts_for_reimage(
                 None)
-        self.mox.StubOutWithMock(dev_server.AndroidBuildServer,
-                                 '_finish_download')
-        argument1 = mox.And(mox.StrContains(self._HOST),
-                            mox.StrContains(target),
-                            mox.StrContains(branch),
-                            mox.StrContains(build_id),
-                            mox.StrContains('stage?'))
-        argument2 = mox.And(mox.StrContains(self._HOST),
-                            mox.StrContains(target),
-                            mox.StrContains(branch),
-                            mox.StrContains(build_id),
-                            mox.StrContains('is_staged'))
-        dev_server.ImageServerBase.run_call(argument1).AndReturn('Success')
-        dev_server.ImageServerBase.run_call(argument2).AndReturn('True')
+        with patch.object(dev_server.AndroidBuildServer, '_finish_download'):
 
-        if synchronous:
-            android_build_info = {'target': target,
-                                  'build_id': build_id,
-                                  'branch': branch}
-            build = dev_server.ANDROID_BUILD_NAME_PATTERN % android_build_info
-            self.android_dev_server._finish_download(
-                    build, artifacts, '', target=target, build_id=build_id,
-                    branch=branch)
+            argument1 = comparators.Substrings(
+                    [self._HOST, target, branch, build_id, 'stage?'])
+            argument2 = comparators.Substrings(
+                    [self._HOST, target, branch, build_id, 'is_staged?'])
 
-        # Synchronous case requires a call to finish download.
-        self.mox.ReplayAll()
-        self.android_dev_server.trigger_download(
-                synchronous=synchronous, target=target, build_id=build_id,
-                branch=branch)
-        self.mox.VerifyAll()
+            dev_server.ImageServerBase.run_call.side_effect = [
+                    'Success', 'True'
+            ]
+
+            if synchronous:
+                android_build_info = {
+                        'target': target,
+                        'build_id': build_id,
+                        'branch': branch
+                }
+                build = (dev_server.ANDROID_BUILD_NAME_PATTERN %
+                         android_build_info)
+                self.android_dev_server._finish_download(build,
+                                                         artifacts,
+                                                         '',
+                                                         target=target,
+                                                         build_id=build_id,
+                                                         branch=branch)
+
+            # Synchronous case requires a call to finish download.
+            self.android_dev_server.trigger_download(synchronous=synchronous,
+                                                     target=target,
+                                                     build_id=build_id,
+                                                     branch=branch)
+            self.run_call_mock.assert_has_calls(
+                    [call(argument1), call(argument2)])
 
 
     def testSuccessfulTriggerDownloadAndroidSync(self):
@@ -936,29 +952,35 @@
         """Test method get_unrestricted_devservers works as expected."""
         restricted_devserver = 'http://192.168.0.100:8080'
         unrestricted_devserver = 'http://172.1.1.3:8080'
-        self.mox.StubOutWithMock(dev_server.ImageServer, 'servers')
-        dev_server.ImageServer.servers().AndReturn([restricted_devserver,
-                                                    unrestricted_devserver])
-        self.mox.ReplayAll()
-        # crbug.com/1027277: get_unrestricted_devservers() now returns all
-        # servers.
-        self.assertEqual(dev_server.ImageServer.get_unrestricted_devservers(
-                                [('192.168.0.0', 24)]),
-                         [unrestricted_devserver])
+        with patch.object(dev_server.ImageServer, 'servers') as servers_patch:
+            dev_server.ImageServer.servers.return_value = ([
+                    restricted_devserver, unrestricted_devserver
+            ])
+            # crbug.com/1027277: get_unrestricted_devservers() now returns all
+            # servers.
+            self.assertEqual(
+                    dev_server.ImageServer.get_unrestricted_devservers([
+                            ('192.168.0.0', 24)
+                    ]), [unrestricted_devserver])
+
+            servers_patch.assert_called_once()
 
     def testGetUnrestrictedDevserversReturnsAll(self):
         """Test method get_unrestricted_devservers works as expected."""
         restricted_devserver = 'http://192.168.0.100:8080'
         unrestricted_devserver = 'http://172.1.1.3:8080'
-        self.mox.StubOutWithMock(dev_server.ImageServer, 'servers')
-        dev_server.ImageServer.servers().AndReturn([restricted_devserver,
-                                                    unrestricted_devserver])
-        self.mox.ReplayAll()
-        # crbug.com/1027277: get_unrestricted_devservers() now returns all
-        # servers.
-        self.assertEqual(dev_server.ImageServer.get_unrestricted_devservers(
-                                [('192.168.0.0', 24)]),
-                         [restricted_devserver, unrestricted_devserver])
+        with patch.object(dev_server.ImageServer, 'servers') as servers_patch:
+            dev_server.ImageServer.servers.return_value = ([
+                    restricted_devserver, unrestricted_devserver
+            ])
+            # crbug.com/1027277: get_unrestricted_devservers() now returns all
+            # servers.
+            self.assertEqual(
+                    dev_server.ImageServer.get_unrestricted_devservers([
+                            ('192.168.0.0', 24)
+                    ]), [restricted_devserver, unrestricted_devserver])
+
+            servers_patch.assert_called_once()
 
     def testDevserverHealthy(self):
         """Test which types of connections that method devserver_healthy uses
@@ -967,52 +989,58 @@
         CrashServer always adopts DevServer.run_call.
         ImageServer and AndroidBuildServer use ImageServerBase.run_call.
         """
-        argument = mox.StrContains(self._HOST)
+        argument = comparators.Substring(self._HOST)
 
         # for testing CrashServer
-        self.mox.StubOutWithMock(dev_server.DevServer, 'run_call')
-        dev_server.DevServer.run_call(
-                argument, timeout=mox.IgnoreArg()).AndReturn(
-                        '{"free_disk": 1024}')
-        # for testing ImageServer
-        dev_server.ImageServerBase.run_call(
-                argument, timeout=mox.IgnoreArg()).AndReturn(
-                        '{"free_disk": 1024}')
-        # for testing AndroidBuildServer
-        dev_server.ImageServerBase.run_call(
-                argument, timeout=mox.IgnoreArg()).AndReturn(
-                        '{"free_disk": 1024}')
 
-        self.mox.ReplayAll()
-        self.assertTrue(dev_server.CrashServer.devserver_healthy(self._HOST))
-        self.assertTrue(dev_server.ImageServer.devserver_healthy(self._HOST))
-        self.assertTrue(
-                dev_server.AndroidBuildServer.devserver_healthy(self._HOST))
+        with patch.object(dev_server.DevServer, 'run_call'):
+            # for testing CrashServer
+            dev_server.DevServer.run_call.return_value = '{"free_disk": 1024}'
+
+            # for testing ImageServer
+            dev_server.ImageServer.run_call.return_value = (
+                    '{"free_disk": 1024}')
+
+            # for testing AndroidBuildServer
+            dev_server.AndroidBuildServer.run_call.return_value = (
+                    '{"free_disk": 1024}')
+
+            self.assertTrue(
+                    dev_server.CrashServer.devserver_healthy(self._HOST))
+            self.assertTrue(
+                    dev_server.ImageServer.devserver_healthy(self._HOST))
+            self.assertTrue(
+                    dev_server.AndroidBuildServer.devserver_healthy(
+                            self._HOST))
+
+            dev_server.DevServer.run_call.assert_called_with(argument,
+                                                             timeout=mock.ANY)
+            dev_server.ImageServer.run_call.assert_called_with(
+                    argument, timeout=mock.ANY)
+            dev_server.AndroidBuildServer.run_call.assert_called_with(
+                    argument, timeout=mock.ANY)
 
 
     def testLocateFile(self):
         """Test locating files for AndriodBuildServer."""
         file_name = 'fake_file'
-        artifacts=['full_payload', 'stateful']
+        artifacts = ['full_payload', 'stateful']
         build = 'fake_build'
-        argument = mox.And(mox.StrContains(file_name),
-                            mox.StrContains(build),
-                            mox.StrContains('locate_file'))
-        dev_server.ImageServerBase.run_call(argument).AndReturn('file_path')
 
-        self.mox.ReplayAll()
+        argument = comparators.Substrings([file_name, build, 'locate_file'])
+        dev_server.ImageServerBase.run_call.return_value = 'file_path'
+
         file_location = 'http://nothing/static/fake_build/file_path'
         self.assertEqual(self.android_dev_server.locate_file(
                 file_name, artifacts, build, None), file_location)
+        self.run_call_mock.assert_called_with(argument)
 
     def testCmdErrorLocateFile(self):
         """Test locating files for AndriodBuildServer for retry
         error.CmdError, and raise urllib2.URLError."""
-        dev_server.ImageServerBase.run_call(
-                mox.IgnoreArg()).AndRaise(CMD_ERROR)
-        dev_server.ImageServerBase.run_call(
-                mox.IgnoreArg()).AndRaise(E500)
-        self.mox.ReplayAll()
+        dev_server.ImageServerBase.run_call.side_effect = CMD_ERROR
+        dev_server.ImageServerBase.run_call.side_effect = E500
+
         self.assertRaises(dev_server.DevServerException,
                           self.dev_server.trigger_download,
                           '')
@@ -1022,11 +1050,11 @@
         """Test method get_available_devservers for CrashServer."""
         crash_servers = ['http://crash_servers1:8080']
         host = '127.0.0.1'
-        self.mox.StubOutWithMock(dev_server.CrashServer, 'servers')
-        dev_server.CrashServer.servers().AndReturn(crash_servers)
-        self.mox.ReplayAll()
-        self.assertEqual(dev_server.CrashServer.get_available_devservers(host),
-                        (crash_servers, False))
+        with patch.object(dev_server.CrashServer, 'servers'):
+            dev_server.CrashServer.servers.return_value = crash_servers
+            self.assertEqual(
+                    dev_server.CrashServer.get_available_devservers(host),
+                    (crash_servers, False))
 
 
     def testGetAvailableDevserversForImageServer(self):
@@ -1040,37 +1068,37 @@
         all_servers = unrestricted_servers + restricted_servers
         # Set restricted subnets
         restricted_subnets = [('127.0.0.0', 24)]
-        self.mox.StubOutWithMock(dev_server.ImageServerBase, 'servers')
-        dev_server.ImageServerBase.servers().MultipleTimes().AndReturn(
-                all_servers)
-        self.mox.ReplayAll()
-        # dut in unrestricted subnet shall be offered devserver in the same
-        # subnet first, and allow retry.
-        self.assertEqual(
-                dev_server.ImageServer.get_available_devservers(
-                        unrestricted_host, True, restricted_subnets),
-                (same_subnet_unrestricted_servers, True))
 
-        # crbug.com/1027277: If prefer_local_devserver is set to False, allow
-        # any devserver, and retry is not allowed.
-        self.assertEqual(
-                dev_server.ImageServer.get_available_devservers(
-                        unrestricted_host, False, restricted_subnets),
-                (all_servers, False))
+        with patch.object(dev_server.ImageServerBase, 'servers'):
+            dev_server.ImageServerBase.servers.return_value = (all_servers)
 
-        # crbug.com/1027277: When no hostname is specified, all devservers
-        # should be considered, and retry is not allowed.
-        self.assertEqual(
-                dev_server.ImageServer.get_available_devservers(
-                        None, True, restricted_subnets),
-                (all_servers, False))
+            # dut in unrestricted subnet shall be offered devserver in the same
+            # subnet first, and allow retry.
+            self.assertEqual(
+                    dev_server.ImageServer.get_available_devservers(
+                            unrestricted_host, True, restricted_subnets),
+                    (same_subnet_unrestricted_servers, True))
 
-        # dut in restricted subnet should only be offered devserver in the
-        # same restricted subnet, and retry is not allowed.
-        self.assertEqual(
-                dev_server.ImageServer.get_available_devservers(
-                        restricted_host, True, restricted_subnets),
-                (restricted_servers, False))
+            # crbug.com/1027277: If prefer_local_devserver is set to False,
+            # allow any devserver, and retry is not allowed.
+            self.assertEqual(
+                    dev_server.ImageServer.get_available_devservers(
+                            unrestricted_host, False, restricted_subnets),
+                    (all_servers, False))
+
+            # crbug.com/1027277: When no hostname is specified, all devservers
+            # should be considered, and retry is not allowed.
+            self.assertEqual(
+                    dev_server.ImageServer.get_available_devservers(
+                            None, True, restricted_subnets),
+                    (all_servers, False))
+
+            # dut in restricted subnet should only be offered devserver in the
+            # same restricted subnet, and retry is not allowed.
+            self.assertEqual(
+                    dev_server.ImageServer.get_available_devservers(
+                            restricted_host, True, restricted_subnets),
+                    (restricted_servers, False))
 
 
 if __name__ == "__main__":
diff --git a/client/common_lib/cros/fake_device_server/README b/client/common_lib/cros/fake_device_server/README
deleted file mode 100644
index dfc0061..0000000
--- a/client/common_lib/cros/fake_device_server/README
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-A simple web service used by Buffet to test interactions with a
-device server. Implements basic functionality for registration, device commands
-and state changes.
-
-To start the test server yourself, run server.py. Otherwise, server.py exposes
-start_server/stop_server methods.
-
-To test locally:
-./server.py
-
-# Register a device.
-curl -X POST -d "" http://localhost:8080/registrationTickets
-curl -X PATCH  -d '{"userEmail": "me"}' -H "Authorization: Bearer 1/TEST-ME" \
-    http://localhost:8080/registrationTickets/<id>
-curl -X POST -d "" \
-    http://localhost:8080/registrationTickets/<id>/finalize
-
-# List devices
-curl -X GET -d "" http://localhost:8080/devices
-
-# Send your device a command.
-curl -X POST -d '{"base": { "Reboot": {}}' http://localhost:8080/devices\
-  ?deviceId=<device_id>
-
-# Update the status of your command
-curl -X PATCH -d '{"state": "done"}' http://localhost:8080/devices/\
-    <command_id>
-
-# You can also use the client library avaiable @ client_lib.
-# Check out client_lib_test to see how you can use the API.
diff --git a/client/common_lib/cros/fake_device_server/client_lib/__init__.py b/client/common_lib/cros/fake_device_server/client_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/client/common_lib/cros/fake_device_server/client_lib/__init__.py
+++ /dev/null
diff --git a/client/common_lib/cros/fake_device_server/client_lib/client_lib_test.py b/client/common_lib/cros/fake_device_server/client_lib/client_lib_test.py
deleted file mode 100755
index 526efa5..0000000
--- a/client/common_lib/cros/fake_device_server/client_lib/client_lib_test.py
+++ /dev/null
@@ -1,133 +0,0 @@
-#!/usr/bin/python3
-
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Small integration test for registration client
-
-This client can work either with the fake_device_server or a live server.
-To use locally (with the fake device server), start the server in the
-background (e.g. ../server.py) and run the program without arguments.
-
-Otherwise, if you want to run against a live server, you must provide an
-auth code. To get an auth code, run this script with the argument URL
-which will print out a link for you to visit and get your auth code.
-
-Then re-run the test with that auth code like so:
-
-./client_lib_test <YOUR_AUTH_CODE>.
-."""
-
-import argparse
-import logging
-import sys
-from six.moves import urllib
-
-import commands
-import devices
-import oauth_helpers
-import registration
-
-
-API_KEY = 'AIzaSyC55ef0RkaFTQvGvTXL_HIh6KI3pzVq4w0'
-CLIENT_ID = ('522003936346-odpbgftanpuruuqhf1puk9e0' +
-             'p2d5ldho.apps.googleusercontent.com')
-CLIENT_SECRET = '9Om2cR2_5cKIKhSY5OFFo8uX'
-SERVER_URL = 'https://www.googleapis.com/clouddevices/v1'
-
-
-def parse_args(args):
-    """Arg parser for this tiny program."""
-    parser = argparse.ArgumentParser(usage=__doc__)
-    parser.add_argument('auth_code', nargs='?',
-                        help=('Either your auth code or "URL" to return the'
-                              ' url to visit to get the code. If not'
-                              ' specified, runs test through local fake server.'
-                              ))
-    return parser.parse_args(args)
-
-
-def main(args):
-    """Main method for integration test."""
-    server_url, api_key = 'http://localhost:8080', None
-    access_token = None
-
-    parsed_args = parse_args(args)
-    if parsed_args.auth_code == 'URL':
-        print(oauth_helpers.get_oauth2_auth_url(CLIENT_ID))
-        return 0
-    elif parsed_args.auth_code:
-        server_url, api_key = SERVER_URL, API_KEY
-        access_token = oauth_helpers.get_oauth2_user_token(
-              CLIENT_ID, CLIENT_SECRET, parsed_args.auth_code)
-
-    r_client = registration.RegistrationClient(server_url=server_url,
-                                               api_key=api_key,
-                                               access_token=access_token)
-    # Device should support base.reboot command.
-    base_reboot_command = {'reboot': {}}
-    finalized_ticket = r_client.register_device(
-            'test_device', 'xmpp', oauth_client_id=CLIENT_ID,
-            base=base_reboot_command)
-    new_device_id = finalized_ticket['deviceDraft']['id']
-    print('Registered new device', finalized_ticket)
-
-    # TODO(sosa): Do better. Change this to use fake auth server when it exists.
-    if not parsed_args.auth_code:
-        robot_token = None
-    else:
-        robot_token = oauth_helpers.get_oauth2_robot_token(
-                CLIENT_ID, CLIENT_SECRET,
-                finalized_ticket['robotAccountAuthorizationCode'])
-
-    d_client = devices.DevicesClient(server_url=server_url,
-                                     api_key=api_key, access_token=robot_token)
-    if not d_client.get_device(new_device_id):
-        print('Device not found in database')
-        return 1
-
-    device_list = d_client.list_devices()['devices']
-    device_ids = [device['id'] for device in device_list]
-    if not new_device_id in device_ids:
-        print('Device found but not listed correctly')
-        return 1
-
-
-    # TODO(sosa): Figure out why I can't send commands.
-    c_client = commands.CommandsClient(server_url=server_url,
-                                       api_key=api_key,
-                                       access_token=robot_token)
-    command_dict = {'base': {'reboot': {}}}
-    new_command = c_client.create_command(device['id'], command_dict)
-    if not c_client.get_command(new_command['id']):
-        print('Command not found')
-        return 1
-
-    command_list = c_client.list_commands(device['id'])['commands']
-    command_ids = [c['id'] for c in command_list]
-    if not new_command['id'] in command_ids:
-        print('Command found but not listed correctly')
-        return 1
-
-    new_command = c_client.update_command(new_command['id'],
-                                          {'state':'finished'})
-    return 0
-
-
-if __name__ == '__main__':
-    logging_format = '%(asctime)s - %(filename)s - %(levelname)-8s: %(message)s'
-    date_format = '%H:%M:%S'
-    logging.basicConfig(level=logging.DEBUG, format=logging_format,
-                        datefmt=date_format)
-    try:
-        error_code = main(sys.argv[1:])
-        if error_code != 0:
-            print('Test Failed')
-
-        sys.exit(error_code)
-    except urllib.error.HTTPError as e:
-        print('Received an HTTPError exception!!!')
-        print(e)
-        print(e.read())
-        sys.exit(1)
diff --git a/client/common_lib/cros/fake_device_server/client_lib/commands.py b/client/common_lib/cros/fake_device_server/client_lib/commands.py
deleted file mode 100644
index 146c60a..0000000
--- a/client/common_lib/cros/fake_device_server/client_lib/commands.py
+++ /dev/null
@@ -1,83 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Module contains a simple client lib to the registration RPC."""
-
-import json
-import logging
-from six.moves import urllib
-
-import common
-from fake_device_server.client_lib import common_client
-from fake_device_server import commands as s_commands
-
-
-class CommandsClient(common_client.CommonClient):
-    """Client library for commands method."""
-
-    def __init__(self, *args, **kwargs):
-        common_client.CommonClient.__init__(
-                self, s_commands.COMMANDS_PATH, *args, **kwargs)
-
-
-    def get_command(self, command_id):
-        """Returns info about the given command using |command_id|.
-
-        @param command_id: valid id for a command.
-        """
-        request = urllib.request.Request(self.get_url([command_id]),
-                                         headers=self.add_auth_headers())
-        url_h = urllib.request.urlopen(request)
-        return json.loads(url_h.read())
-
-
-    def list_commands(self, device_id):
-        """Returns the list of commands for the given |device_id|.
-
-        @param command_id: valid id for a command.
-        """
-        request = urllib.request.Request(
-            self.get_url(params={'deviceId':device_id}),
-            headers=self.add_auth_headers())
-        url_h = urllib.request.urlopen(request)
-        return json.loads(url_h.read())
-
-
-    def update_command(self, command_id, data, replace=False):
-        """Updates the command with |data|.
-
-        @param command_id: id of the command to update.
-        @param data: data to update command with.
-        @param replace: If True, replace all data with the given data using the
-                PUT operation.
-        """
-        if not data:
-            return
-
-        headers = self.add_auth_headers({'Content-Type': 'application/json'})
-        request = urllib.request.Request(
-            self.get_url([command_id]), json.dumps(data),
-            headers=headers)
-        if replace:
-            request.get_method = lambda: 'PUT'
-        else:
-            request.get_method = lambda: 'PATCH'
-
-        url_h = urllib.request.urlopen(request)
-        return json.loads(url_h.read())
-
-
-    def create_command(self, device_id, data):
-        """Creates a new command.
-
-        @device_id: ID of device to send command to.
-        @param data: command.
-        """
-        headers = self.add_auth_headers({'Content-Type': 'application/json'})
-        data['deviceId'] = device_id
-        request = urllib.request.Request(self.get_url(),
-            json.dumps(data),
-            headers=headers)
-        url_h = urllib.request.urlopen(request)
-        return json.loads(url_h.read())
diff --git a/client/common_lib/cros/fake_device_server/client_lib/common.py b/client/common_lib/cros/fake_device_server/client_lib/common.py
deleted file mode 100644
index 3593e0d..0000000
--- a/client/common_lib/cros/fake_device_server/client_lib/common.py
+++ /dev/null
@@ -1,10 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Common to set up import path."""
-
-import os, sys
-dirname = os.path.dirname(sys.modules[__name__].__file__)
-cros_dir = os.path.abspath(os.path.join(dirname, "..", ".."))
-sys.path.insert(0, cros_dir)
diff --git a/client/common_lib/cros/fake_device_server/client_lib/common_client.py b/client/common_lib/cros/fake_device_server/client_lib/common_client.py
deleted file mode 100644
index 458bae6..0000000
--- a/client/common_lib/cros/fake_device_server/client_lib/common_client.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Module contains a simple client lib to the commands RPC."""
-
-import logging
-from six.moves import urllib
-
-
-class CommonClient(object):
-    """Common client class."""
-
-    _DEFAULT_SERVER_URL = 'http://localhost:9876'
-    _URL = '%(server_url)s/%(method)s'
-
-
-    def __init__(self, method, server_url=_DEFAULT_SERVER_URL, api_key=None,
-                 access_token=None,):
-        """
-        @param method: REST method to call e.g. my_method/call
-        @param server_url: Base url for the server e.g. http://localhost:8080
-        @param api_key: API key to use with remote server.
-        @param access_token: Access token to use to interact with server.
-        """
-        self._method = method
-        self.server_url = server_url
-        self.api_key = api_key
-        self.access_token = access_token
-
-
-    def add_auth_headers(self, additional_headers=None):
-        """Returns combined auth headers with any additional headers.
-
-        @param additional_headers: Additional headers to use.
-        """
-        if not self.access_token:
-            return additional_headers if additional_headers else {}
-        else:
-            headers = {'Authorization': self.access_token}
-            if additional_headers:
-                headers.update(additional_headers)
-
-            return headers
-
-
-    def get_url(self, paths=[], params={}):
-        """Returns url to use to talk to the server method.
-
-        @param paths: Parts of a path to append to base url.
-        @param params: Dictionary of url parameters.
-        """
-        if not self._method:
-            raise NotImplementedError('method not defined.')
-
-        # Create the method string.
-        paths_str = ''
-        if paths:
-            paths_str = '/' + '/'.join([str(p) for p in paths])
-
-        # Create the query string.
-        params_str = ''
-        if not params:
-            params = {}
-
-        if self.api_key:
-            params.setdefault('key', self.api_key)
-
-        params_list = []
-        for kw, arg in params.items():
-            params_list.append('='.join([urllib.parse.quote(kw),
-                                         urllib.parse.quote(arg)]))
-
-        if params_list:
-            params_str = '?' + '&'.join(params_list)
-
-        url = self._URL % dict(
-                server_url=self.server_url,
-                method=self._method) + paths_str + params_str
-
-        logging.info("Returning url: %s to use.", url)
-        return url
diff --git a/client/common_lib/cros/fake_device_server/client_lib/devices.py b/client/common_lib/cros/fake_device_server/client_lib/devices.py
deleted file mode 100644
index 4e58657..0000000
--- a/client/common_lib/cros/fake_device_server/client_lib/devices.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Module contains a simple client lib to the devices RPC."""
-
-import json
-from six.moves import urllib
-
-import common
-from fake_device_server.client_lib import common_client
-from fake_device_server import devices as s_devices
-
-
-class DevicesClient(common_client.CommonClient):
-    """Client library for devices method."""
-
-    def __init__(self, *args, **kwargs):
-        common_client.CommonClient.__init__(
-                self, s_devices.DEVICES_PATH, *args, **kwargs)
-
-
-    def get_device(self, device_id):
-        """Returns info about the given |device_id|.
-
-        @param device_id: valid device_id.
-        """
-        request = urllib.request.Request(self.get_url([device_id]),
-                                         headers=self.add_auth_headers())
-        url_h = urllib.request.urlopen(request)
-        return json.loads(url_h.read())
-
-
-    def list_devices(self):
-        """Returns the list of the devices the server currently knows about."""
-        request = urllib.request.Request(self.get_url(),
-                                         headers=self.add_auth_headers())
-        url_h = urllib.request.urlopen(request)
-        return json.loads(url_h.read())
-
-
-    def create_device(self, system_name, channel, **kwargs):
-        """Creates a device using the args.
-
-        @param system_name: name to give the system.
-        @param channel: supported communication channel.
-        @param kwargs: additional dictionary of args to put in config.
-        """
-        data = dict(name=system_name,
-                    channel=channel,
-                    **kwargs)
-        headers = self.add_auth_headers({'Content-Type': 'application/json'})
-        request = urllib.request.Request(self.get_url(), json.dumps(data),
-                                         headers=headers)
-        url_h = urllib.request.urlopen(request)
-        return json.loads(url_h.read())
diff --git a/client/common_lib/cros/fake_device_server/client_lib/fail_control.py b/client/common_lib/cros/fake_device_server/client_lib/fail_control.py
deleted file mode 100644
index 5d93881..0000000
--- a/client/common_lib/cros/fake_device_server/client_lib/fail_control.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Module contains a simple client lib to control failures."""
-
-import json
-from six.moves import urllib
-
-import common
-from fake_device_server.client_lib import common_client
-from fake_device_server import fail_control
-
-
-class FailControlClient(common_client.CommonClient):
-    """Client library for control failing."""
-
-    def __init__(self, *args, **kwargs):
-        common_client.CommonClient.__init__(
-                self, fail_control.FAIL_CONTROL_PATH, *args, **kwargs)
-
-
-    def start_failing_requests(self):
-        """Starts failing request."""
-        headers = self.add_auth_headers({'Content-Type': 'application/json'})
-        request = urllib.request.Request(
-            self.get_url(['start_failing_requests']),
-            json.dumps(dict()), headers=headers)
-        url_h = urllib.request.urlopen(request)
-        return json.loads(url_h.read())
-
-
-    def stop_failing_requests(self):
-        """Stops failing request."""
-        headers = self.add_auth_headers({'Content-Type': 'application/json'})
-        request = urllib.request.Request(
-            self.get_url(['stop_failing_requests']),
-            json.dumps(dict()), headers=headers)
-        url_h = urllib.request.urlopen(request)
-        return json.loads(url_h.read())
diff --git a/client/common_lib/cros/fake_device_server/client_lib/meta.py b/client/common_lib/cros/fake_device_server/client_lib/meta.py
deleted file mode 100644
index 6208656..0000000
--- a/client/common_lib/cros/fake_device_server/client_lib/meta.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Contains a simple client lib to interact with server meta interfaces."""
-
-from six.moves import urllib
-
-import common
-from fake_device_server.client_lib import common_client
-from fake_device_server import meta_handler
-
-
-class MetaClient(common_client.CommonClient):
-    """Client library for interacting meta interfaces to the server."""
-
-    def __init__(self, *args, **kwargs):
-        common_client.CommonClient.__init__(
-                self, meta_handler.META_HANDLER_PATH, *args, **kwargs)
-
-
-    def get_generation(self, timeout_seconds=2):
-        """Retrieve the unique generation of the server.
-
-        @param timeout_seconds: number of seconds to wait for a response.
-        @return generation string or None.
-
-        """
-        try:
-            request = urllib.request.urlopen(self.get_url(['generation']), None,
-                                             timeout_seconds)
-            return request.read().decode('utf-8')
-        except urllib.error.URLError:
-            return None
diff --git a/client/common_lib/cros/fake_device_server/client_lib/oauth.py b/client/common_lib/cros/fake_device_server/client_lib/oauth.py
deleted file mode 100644
index 9c7b7ba..0000000
--- a/client/common_lib/cros/fake_device_server/client_lib/oauth.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Module contains a simple client lib to interact with OAuth."""
-
-import json
-from six.moves import urllib
-
-import common
-from fake_device_server.client_lib import common_client
-from fake_device_server import oauth
-
-
-class OAuthClient(common_client.CommonClient):
-    """Client library for interacting with OAuth."""
-
-    def __init__(self, *args, **kwargs):
-        common_client.CommonClient.__init__(
-                self, oauth.OAUTH_PATH, *args, **kwargs)
-
-
-    def invalidate_all_access_tokens(self):
-        """Invalidates all access tokens previously issued."""
-        headers = self.add_auth_headers({'Content-Type': 'application/json'})
-        request = urllib.request.Request(
-            self.get_url(
-                                    ['invalidate_all_access_tokens']),
-                                  json.dumps(dict()), headers=headers)
-        url_h = urllib.request.urlopen(request)
-        return json.loads(url_h.read())
-
-
-    def invalidate_all_refresh_tokens(self):
-        """Invalidates all refresh tokens previously issued."""
-        headers = self.add_auth_headers({'Content-Type': 'application/json'})
-        request = urllib.request.Request(
-            self.get_url(['invalidate_all_refresh_tokens']),
-            json.dumps(dict()), headers=headers)
-        url_h = urllib.request.urlopen(request)
-        return json.loads(url_h.read())
diff --git a/client/common_lib/cros/fake_device_server/client_lib/oauth_helpers.py b/client/common_lib/cros/fake_device_server/client_lib/oauth_helpers.py
deleted file mode 100644
index 14469a4..0000000
--- a/client/common_lib/cros/fake_device_server/client_lib/oauth_helpers.py
+++ /dev/null
@@ -1,62 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Module containing helpers for interacting with oauth2."""
-
-
-import json
-from six.moves import urllib
-
-
-DEFAULT_SCOPE = 'https://www.googleapis.com/auth/clouddevices'
-OAUTH_URL = 'https://accounts.google.com/o/oauth2'
-# Constant used in oauth2 protocol for device requests.
-REDIRECT_URI = 'urn:ietf:wg:oauth:2.0:oob'
-
-
-def get_oauth2_auth_url(client_id, scope=DEFAULT_SCOPE):
-    auth_url = '%s/%s' % (OAUTH_URL, 'auth')
-    params = dict(client_id=client_id,
-                  scope=scope,
-                  response_type='code',
-                  redirect_uri=REDIRECT_URI)
-    return '%s?%s' % (auth_url, urllib.parse.urlencode(params))
-
-
-def get_oauth2_user_token(client_id, client_secret, code):
-    """Returns the oauth2 token for a user given the auth code."""
-    token_url = '%s/%s' % (OAUTH_URL, 'token')
-    headers = {'Content-Type': 'application/x-www-form-urlencoded'}
-    data = dict(code=code,
-                client_id=client_id,
-                client_secret=client_secret,
-                redirect_uri=REDIRECT_URI,
-                grant_type='authorization_code')
-
-    request = urllib.request.Request(token_url,
-                                     data=urllib.parse.urlencode(data),
-                                     headers=headers)
-    url_h = urllib.request.urlopen(request)
-    auth_result = json.loads(url_h.read())
-    return '%s %s' % (auth_result['token_type'],
-                      auth_result['access_token'])
-
-
-def get_oauth2_robot_token(client_id, client_secret, code):
-    """Returns the oauth2 token for a robot account to use."""
-    token_url = '%s/%s' % (OAUTH_URL, 'token')
-    headers = {'Content-Type': 'application/x-www-form-urlencoded'}
-    data = dict(code=code,
-                client_id=client_id,
-                client_secret=client_secret,
-                redirect_uri='oob',
-                grant_type='authorization_code')
-
-    request = urllib.request.Request(token_url,
-                                     data=urllib.parse.urlencode(data),
-                                     headers=headers)
-    url_h = urllib.request.urlopen(request)
-    auth_result = json.loads(url_h.read())
-    return '%s %s' % (auth_result['token_type'],
-                      auth_result['access_token'])
diff --git a/client/common_lib/cros/fake_device_server/client_lib/registration.py b/client/common_lib/cros/fake_device_server/client_lib/registration.py
deleted file mode 100644
index 6cf9c5c..0000000
--- a/client/common_lib/cros/fake_device_server/client_lib/registration.py
+++ /dev/null
@@ -1,115 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Module contains a simple client lib to the registration RPC."""
-
-import json
-import logging
-from six.moves import urllib
-
-import common
-from fake_device_server.client_lib import common_client
-from fake_device_server import registration_tickets
-
-
-class RegistrationClient(common_client.CommonClient):
-    """Client library for registrationTickets method."""
-
-    def __init__(self, *args, **kwargs):
-        common_client.CommonClient.__init__(
-                self, registration_tickets.REGISTRATION_PATH, *args, **kwargs)
-
-
-    def get_registration_ticket(self, ticket_id):
-        """Returns info about the given |ticket_id|.
-
-        @param ticket_id: valid id for a ticket.
-        """
-        url_h = urllib.request.urlopen(self.get_url([ticket_id]))
-        return json.loads(url_h.read())
-
-
-    def update_registration_ticket(self, ticket_id, data,
-                                   additional_headers=None, replace=False):
-        """Updates the given registration ticket with the new data.
-
-        @param ticket_id: id of the ticket to update.
-        @param data: data to update.
-        @param additional_headers: additional HTTP headers to pass (expects a
-                list of tuples).
-        @param replace: If True, replace all data with the given data using the
-                PUT operation.
-        """
-        if not data:
-            return
-
-        headers = {'Content-Type': 'application/json'}
-        if additional_headers:
-            headers.update(additional_headers)
-
-        request = urllib.request.Request(self.get_url([ticket_id]),
-                                         json.dumps(data), headers=headers)
-        if replace:
-            request.get_method = lambda: 'PUT'
-        else:
-            request.get_method = lambda: 'PATCH'
-
-        url_h = urllib.request.urlopen(request)
-        return json.loads(url_h.read())
-
-
-    def create_registration_ticket(self):
-        """Creates a new registration ticket."""
-        # We're going to fall back onto this test access token, if we don't
-        # have a real one.  Tests rely on this behavior.
-        token = registration_tickets.RegistrationTickets.TEST_ACCESS_TOKEN
-        headers = {'Content-Type': 'application/json',
-                   'Authorization': 'Bearer %s' % token,
-        }
-        auth_headers = self.add_auth_headers()
-        headers.update(auth_headers)
-        data = {'userEmail': 'me'}
-        request = urllib.request.Request(self.get_url(),
-                                         json.dumps(data).encode("utf-8"),
-                                         headers)
-        url_h = urllib.request.urlopen(request)
-        return json.loads(url_h.read())
-
-
-    def finalize_registration_ticket(self, ticket_id):
-        """Finalizes a registration ticket by creating a new device.
-
-        @param ticket_id: id of ticket to finalize.
-        """
-        request = urllib.request.Request(self.get_url([ticket_id, 'finalize']),
-                                  data='')
-        url_h = urllib.request.urlopen(request)
-        return json.loads(url_h.read())
-
-
-    def register_device(self, system_name, channel,
-                        oauth_client_id, **kwargs):
-        """Goes through the entire registration process using the device args.
-
-        @param system_name: name to give the system.
-        @param channel: supported communication channel.
-        @param oauth_client_id: see oauth docs.
-        @param kwargs: additional dictionary of args to put in config.
-        """
-        ticket = self.create_registration_ticket()
-        logging.info('Initial Ticket: %s', ticket)
-        ticket_id = ticket['id']
-
-        device_draft = dict(name=system_name,
-                            channel=dict(supportedType=channel),
-                            **kwargs)
-
-        ticket = self.update_registration_ticket(
-                ticket_id,
-                {'deviceDraft': device_draft,
-                 'userEmail': 'me',
-                 'oauthClientId': oauth_client_id})
-
-        logging.info('Updated Ticket After Claiming: %s', ticket)
-        return self.finalize_registration_ticket(ticket_id)
diff --git a/client/common_lib/cros/fake_device_server/commands.py b/client/common_lib/cros/fake_device_server/commands.py
deleted file mode 100755
index f804357..0000000
--- a/client/common_lib/cros/fake_device_server/commands.py
+++ /dev/null
@@ -1,161 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Module contains a simple implementation of the commands RPC."""
-
-from cherrypy import tools
-import logging
-import uuid
-
-import common
-from fake_device_server import common_util
-from fake_device_server import constants
-from fake_device_server import server_errors
-
-COMMANDS_PATH = 'commands'
-
-
-# TODO(sosa) Support upload method (and mediaPath parameter).
-class Commands(object):
-    """A simple implementation of the commands interface."""
-
-    # Needed for cherrypy to expose this to requests.
-    exposed = True
-
-    # Roots of command resource representation that might contain commands.
-    _COMMAND_ROOTS = set(['base', 'aggregator', 'printer', 'storage', 'test'])
-
-
-    def __init__(self, oauth_handler, fail_control_handler):
-        """Initializes a Commands handler."""
-        # A map of device_id's to maps of command ids to command resources
-        self.device_commands = dict()
-        self._num_commands_created = 0
-        self._oauth_handler = oauth_handler
-        self._fail_control_handler = fail_control_handler
-
-
-    def _generate_command_id(self):
-        """@return unique command ID."""
-        command_id = '%s_%03d' % (uuid.uuid4().hex[0:6],
-                                  self._num_commands_created)
-        self._num_commands_created += 1
-        return command_id
-
-    def new_device(self, device_id):
-        """Adds knowledge of a device with the given |device_id|.
-
-        This method should be called whenever a new device is created. It
-        populates an empty command dict for each device state.
-
-        @param device_id: Device id to add.
-
-        """
-        self.device_commands[device_id] = {}
-
-
-    def remove_device(self, device_id):
-        """Removes knowledge of the given device.
-
-        @param device_id: Device id to remove.
-
-        """
-        del self.device_commands[device_id]
-
-
-    def create_command(self, command_resource):
-        """Creates, queues and returns a new command.
-
-        @param api_key: Api key for the application.
-        @param device_id: Device id of device to send command.
-        @param command_resource: Json dict for command.
-        """
-        device_id = command_resource.get('deviceId', None)
-        if not device_id:
-            raise server_errors.HTTPError(
-                    400, 'Can only create a command if you provide a deviceId.')
-
-        if device_id not in self.device_commands:
-            raise server_errors.HTTPError(
-                    400, 'Unknown device with id %s' % device_id)
-
-        if 'name' not in command_resource:
-            raise server_errors.HTTPError(
-                    400, 'Missing command name.')
-
-        # Print out something useful (command base.Reboot)
-        logging.info('Received command %s', command_resource['name'])
-
-        # TODO(sosa): Check to see if command is in devices CDD.
-        # Queue command, create it and insert to device->command mapping.
-        command_id = self._generate_command_id()
-        command_resource['id'] = command_id
-        command_resource['state'] = constants.QUEUED_STATE
-        self.device_commands[device_id][command_id] = command_resource
-        return command_resource
-
-
-    @tools.json_out()
-    def GET(self, *args, **kwargs):
-        """Handle GETs against the command API.
-
-        GET .../(command_id) returns a command resource
-        GET .../queue?deviceId=... returns the command queue
-        GET .../?deviceId=... returns the command queue
-
-        Supports both the GET / LIST commands for commands. List lists all
-        devices a user has access to, however, this implementation just returns
-        all devices.
-
-        Raises:
-            server_errors.HTTPError if the device doesn't exist.
-
-        """
-        self._fail_control_handler.ensure_not_in_failure_mode()
-        args = list(args)
-        requested_command_id = args.pop(0) if args else None
-        device_id = kwargs.get('deviceId', None)
-        if args:
-            raise server_errors.HTTPError(400, 'Unsupported API')
-        if not device_id or device_id not in self.device_commands:
-            raise server_errors.HTTPError(
-                    400, 'Can only list commands by valid deviceId.')
-        if requested_command_id is None:
-            requested_command_id = 'queue'
-
-        if not self._oauth_handler.is_request_authorized():
-            raise server_errors.HTTPError(401, 'Access denied.')
-
-        if requested_command_id == 'queue':
-            # Returns listing (ignores optional parameters).
-            listing = {'kind': 'clouddevices#commandsListResponse'}
-            requested_state = kwargs.get('state', None)
-            listing['commands'] = []
-            for _, command in iter(self.device_commands[device_id].items()):
-                # Check state for match (if None, just append all of them).
-                if (requested_state is None or
-                        requested_state == command['state']):
-                    listing['commands'].append(command)
-            logging.info('Returning queue of commands: %r', listing)
-            return listing
-
-        for command_id, _ in iter(self.device_commands[device_id].items()):
-            if command_id == requested_command_id:
-                return self.device_commands[device_id][command_id]
-
-        raise server_errors.HTTPError(
-                400, 'No command with ID=%s found' % requested_command_id)
-
-
-    @tools.json_out()
-    def POST(self, *args, **kwargs):
-        """Creates a new command using the incoming json data."""
-        # TODO(wiley) We could check authorization here, which should be
-        #             a client/owner of the device.
-        self._fail_control_handler.ensure_not_in_failure_mode()
-        data = common_util.parse_serialized_json()
-        if not data:
-            raise server_errors.HTTPError(400, 'Require JSON body')
-
-        return self.create_command(data)
diff --git a/client/common_lib/cros/fake_device_server/commands_unittest.py b/client/common_lib/cros/fake_device_server/commands_unittest.py
deleted file mode 100755
index 5dfa48f..0000000
--- a/client/common_lib/cros/fake_device_server/commands_unittest.py
+++ /dev/null
@@ -1,125 +0,0 @@
-#!/usr/bin/python3
-
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Unit tests for commands.py."""
-
-import copy
-import unittest
-
-import common
-from fake_device_server import commands
-from fake_device_server import fake_oauth
-from fake_device_server import fail_control
-from fake_device_server import server_errors
-
-
-class CommandsTest(unittest.TestCase):
-    """Tests for the Commands class.
-
-    Note unlike other unittests in this project, I set the api_key for all
-    tests. This makes the logic easier to read because of the additional
-    dictionary mapping of
-    # commands.devices_commands[(id, api_key)] = dict of commands by command id.
-    """
-
-    def setUp(self):
-        """Sets up a ticket / registration objects."""
-        # Use a fake OAuth module to work around the hack that this
-        # module bypass cherrypy by directly invoking commands.GET.
-        self.oauth = fake_oauth.FakeOAuth()
-        self.fail_control = fail_control.FailControl()
-        self.commands = commands.Commands(self.oauth, self.fail_control)
-
-
-    def testCreateCommand(self):
-        """Tests that we can create a new command."""
-        DEVICE_ID = '1234awesomeDevice'
-        GOOD_COMMAND = {
-            'deviceId': DEVICE_ID,
-            'name': 'base._vendorCommand',
-            'base': {
-                '_vendorCommand': {
-                    'name': 'specialCommand',
-                    'kind': 'buffetSpecialCommand',
-                }
-            }
-        }
-
-        self.commands.new_device(DEVICE_ID)
-        new_command = self.commands.create_command(GOOD_COMMAND)
-        self.assertTrue('id' in new_command)
-        command_id = new_command['id']
-        self.assertEqual(new_command['state'], 'queued')
-        self.assertEqual(
-                self.commands.device_commands[DEVICE_ID][command_id],
-                new_command)
-
-        # Test command without necessary nesting.
-        bad_command = {'base': {}}
-        self.assertRaises(server_errors.HTTPError,
-                          self.commands.create_command, bad_command)
-
-        # Test adding a good command to an unknown device.
-        BAD_COMMAND = copy.deepcopy(GOOD_COMMAND)
-        BAD_COMMAND['deviceId'] = 'not_a_real_device'
-        self.assertRaises(server_errors.HTTPError,
-                          self.commands.create_command, BAD_COMMAND)
-
-
-    def testGet(self):
-        """Tests that we can retrieve a command correctly."""
-        DEVICE_ID = 'device_id'
-        COMMAND_ID = 'command_id'
-        COMMAND_RESOURCE = {'faked': 'out'}
-        self.commands.new_device(DEVICE_ID)
-        self.commands.device_commands[DEVICE_ID][COMMAND_ID] = COMMAND_RESOURCE
-        returned_json = self.commands.GET(COMMAND_ID, deviceId=DEVICE_ID)
-        self.assertEquals(returned_json, COMMAND_RESOURCE)
-
-        BAD_COMMAND_ID = 'fubar'
-        # Non-existing command.
-        self.assertRaises(server_errors.HTTPError,
-                          self.commands.GET, BAD_COMMAND_ID)
-
-
-    def testListing(self):
-        """Tests that we can get a listing back correctly using the GET method.
-        """
-        DEVICE_ID = 'device_id'
-        COMMAND = {
-            'name': 'base.reboot',
-            'deviceId': DEVICE_ID,
-        }
-        self.commands.new_device(DEVICE_ID)
-        command1 = self.commands.create_command(copy.deepcopy(COMMAND))
-        command2 = self.commands.create_command(copy.deepcopy(COMMAND))
-        command1_id = command1['id']
-        command2_id = command2['id']
-        self.commands.device_commands[DEVICE_ID][command1_id]['state'] = \
-                'inProgress'
-
-        # Without state should return all commands.
-        def check_has_commands(expected_ids, state=None):
-            """Check that we get all the commands we expect given a state.
-
-            @param expected_ids: list of string command ids.
-            @param state: Optional state to filter on (a string like 'queued'
-
-            """
-            returned_json = self.commands.GET(deviceId=DEVICE_ID, state=state)
-            self.assertEqual('clouddevices#commandsListResponse',
-                             returned_json['kind'])
-            self.assertTrue('commands' in returned_json)
-            returned_command_ids = [command['id']
-                                    for command in returned_json['commands']]
-            self.assertEqual(sorted(returned_command_ids), sorted(expected_ids))
-
-        check_has_commands([command1_id, command2_id])
-        check_has_commands([command1_id], state='inProgress')
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/client/common_lib/cros/fake_device_server/common.py b/client/common_lib/cros/fake_device_server/common.py
deleted file mode 100644
index 65b3059..0000000
--- a/client/common_lib/cros/fake_device_server/common.py
+++ /dev/null
@@ -1,10 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Common to set up import path."""
-
-import os, sys
-dirname = os.path.dirname(sys.modules[__name__].__file__)
-cros_dir = os.path.abspath(os.path.join(dirname, ".."))
-sys.path.insert(0, cros_dir)
diff --git a/client/common_lib/cros/fake_device_server/common_util.py b/client/common_lib/cros/fake_device_server/common_util.py
deleted file mode 100755
index d89fa86..0000000
--- a/client/common_lib/cros/fake_device_server/common_util.py
+++ /dev/null
@@ -1,90 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Common Utility Methods"""
-
-import cherrypy
-import json
-import logging
-
-import common
-from fake_device_server import server_errors
-
-
-def parse_serialized_json():
-    """Parses incoming cherrypy request as a json."""
-    body_length = int(cherrypy.request.headers.get('Content-Length', 0))
-    data = cherrypy.request.rfile.read(body_length)
-    return json.loads(data) if data else None
-
-
-def grab_header_field(header_name):
-    """Returns the header |header_name| from an incoming request.
-
-    @param header_name: Header name to retrieve.
-    """
-    return cherrypy.request.headers.get(header_name, None)
-
-
-def get_access_token():
-    """Returns the access token from an incoming request.
-
-    @return string access token or None.
-
-    """
-    header = grab_header_field('Authorization')
-    if header is None:
-        logging.error('No authorization header found.')
-        return None
-    fields = header.split()
-    if len(fields) != 2 or fields[0] != "Bearer":
-        logging.error('No access token found.')
-        return None
-    logging.debug('Got authorization header "%s"', header)
-    return fields[1]
-
-
-def parse_common_args(args_tuple, kwargs, supported_operations=set()):
-    """Common method to parse args to a CherryPy RPC for this server.
-
-    |args_tuple| should contain all the sections of the URL after CherryPy
-    removes the pieces that dispatched the URL to this handler. For instance,
-    a GET method receiving '...'/<id>/<method_name> should call:
-    parse_common_args(args_tuple=[<id>, <method_name>]).
-    Some operations take no arguments. Other operations take
-    a single argument. Still other operations take
-    one of supported_operations as a second argument (in the args_tuple).
-
-    @param args_tuple: Tuple of positional args.
-    @param kwargs: Dictionary of named args passed in.
-    @param supported_operations: Set of operations to support if any.
-
-    Returns:
-        A 3-tuple containing the id parsed from the args_tuple, api_key,
-        and finally an optional operation if supported_operations is provided
-        and args_tuple contains one of the supported ops.
-
-    Raises:
-        server_error.HTTPError if combination or args/kwargs doesn't make
-        sense.
-    """
-    args = list(args_tuple)
-    api_key = kwargs.get('key')
-    id = args.pop(0) if args else None
-    operation = args.pop(0) if args else None
-    if operation:
-        if not supported_operations:
-            raise server_errors.HTTPError(
-                    400, 'Received operation when operation was not '
-                    'expected: %s!' % operation)
-        elif not operation in supported_operations:
-            raise server_errors.HTTPError(
-                    400, 'Unsupported operation: %s' % operation)
-
-    # All expected args should be popped off already.
-    if args:
-        raise server_errors.HTTPError(
-                400, 'Could not parse all args: %s' % args)
-
-    return id, api_key, operation
diff --git a/client/common_lib/cros/fake_device_server/common_util_unittest.py b/client/common_lib/cros/fake_device_server/common_util_unittest.py
deleted file mode 100755
index 97e0650..0000000
--- a/client/common_lib/cros/fake_device_server/common_util_unittest.py
+++ /dev/null
@@ -1,88 +0,0 @@
-#!/usr/bin/python3
-
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Unit tests for common_util methods."""
-
-import cherrypy
-import json
-import tempfile
-import unittest
-
-import common
-from fake_device_server import common_util
-from fake_device_server import server_errors
-
-
-class FakeDeviceServerTests(unittest.TestCase):
-    """Contains tests for methods not included in classes."""
-
-    def testParseSerializeJson(self):
-        """Tests that we can seralize / deserialize json from cherrypy."""
-        json_data = json.dumps(dict(a='b', b='c'))
-
-        json_file = tempfile.TemporaryFile()
-        json_file.write(json.dumps(json_data).encode('utf-8'))
-        content_length = json_file.tell()
-        json_file.seek(0)
-        cherrypy.request.headers['Content-Length'] = content_length
-
-        cherrypy.request.rfile = json_file
-
-        self.assertEquals(common_util.parse_serialized_json(), json_data)
-        json_file.close()
-
-        # Also test the edge case without an input file.
-        json_file = tempfile.TemporaryFile()
-        cherrypy.request.rfile = json_file
-
-        self.assertEquals(common_util.parse_serialized_json(), None)
-        json_file.close()
-
-
-    def testParseCommonArgs(self):
-        """Tests various flavors of the parse common args method."""
-        id = 123456
-        key = 'boogity'
-
-        # Should parse all values.
-        id, api_key, op = common_util.parse_common_args(
-                (id, 'boogity',),
-                dict(key=key), supported_operations=set(['boogity']))
-        self.assertEquals(id, id)
-        self.assertEquals(key, api_key)
-        self.assertEquals('boogity', op)
-
-        # Missing op.
-        id, api_key, op = common_util.parse_common_args((id,), dict(key=key))
-        self.assertEquals(id, id)
-        self.assertEquals(key, api_key)
-        self.assertIsNone(op)
-
-        # Missing key.
-        id, api_key, op = common_util.parse_common_args((id,), dict())
-        self.assertEquals(id, id)
-        self.assertIsNone(api_key)
-        self.assertIsNone(op)
-
-        # Missing all.
-        id, api_key, op = common_util.parse_common_args(tuple(), dict())
-        self.assertIsNone(id)
-        self.assertIsNone(api_key)
-        self.assertIsNone(op)
-
-        # Too many args.
-        self.assertRaises(server_errors.HTTPError,
-                          common_util.parse_common_args,
-                          (id, 'lame', 'stuff',), dict())
-
-        # Operation when it's not expected.
-        self.assertRaises(server_errors.HTTPError,
-                          common_util.parse_common_args,
-                          (id, 'boogity'), dict())
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/client/common_lib/cros/fake_device_server/constants.py b/client/common_lib/cros/fake_device_server/constants.py
deleted file mode 100644
index 1829635..0000000
--- a/client/common_lib/cros/fake_device_server/constants.py
+++ /dev/null
@@ -1,11 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Constants used by server methods."""
-
-ROBOT_ACCOUNT_EMAIL = 'robot@test.org'
-
-QUEUED_STATE = 'queued'
-
-DEVICE_STATES = ['aborted', 'done', 'error', 'inProgress', QUEUED_STATE]
\ No newline at end of file
diff --git a/client/common_lib/cros/fake_device_server/devices.py b/client/common_lib/cros/fake_device_server/devices.py
deleted file mode 100644
index a34c99c..0000000
--- a/client/common_lib/cros/fake_device_server/devices.py
+++ /dev/null
@@ -1,200 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Module contains a simple implementation of the devices RPC."""
-
-from cherrypy import tools
-import logging
-import time
-
-import common
-from fake_device_server import common_util
-from fake_device_server import resource_method
-from fake_device_server import server_errors
-
-
-# TODO(sosa): All access to this object should technically require auth. Create
-# setters/getters for the auth token for testing.
-
-DEVICES_PATH = 'devices'
-
-
-class Devices(resource_method.ResourceMethod):
-    """A simple implementation of the device interface.
-
-    A common workflow of using this API is:
-
-    POST .../ # Creates a new device with id <id>.
-    PATCH ..../<id> # Update device state.
-    GET .../<id> # Get device state.
-    DELETE .../<id> # Delete the device.
-    """
-
-    # Needed for cherrypy to expose this to requests.
-    exposed = True
-
-
-    def __init__(self, resource, commands_instance, oauth_instance,
-                 fail_control_handler):
-        """Initializes a registration ticket.
-
-        @param resource: A resource delegate for storing devices.
-        @param commands_instance: Instance of commands method class.
-        @param oauth_instance: Instance of oauth class.
-        @param fail_control_handler: Instance of FailControl.
-        """
-        super(Devices, self).__init__(resource)
-        self.commands_instance = commands_instance
-        self._oauth = oauth_instance
-        self._fail_control_handler = fail_control_handler
-
-
-    def _handle_state_patch(self, device_id, api_key, data):
-        """Patch a device's state with the given update data.
-
-        @param device_id: string device id to update.
-        @param api_key: string api_key to support this resource delegate.
-        @param data: json blob provided to patchState API.
-
-        """
-        # TODO(wiley) this.
-
-
-    def _validate_device_resource(self, resource):
-        # Verify required keys exist in the device draft.
-        if not resource:
-            raise server_errors.HTTPError(400, 'Empty device resource.')
-
-        for key in ['name', 'channel']:
-            if key not in resource:
-                raise server_errors.HTTPError(400, 'Must specify %s' % key)
-
-        # Add server fields.
-        resource['kind'] = 'clouddevices#device'
-        current_time_ms = str(int(round(time.time() * 1000)))
-        resource['creationTimeMs'] = current_time_ms
-        resource['lastUpdateTimeMs'] = current_time_ms
-        resource['lastSeenTimeMs'] = current_time_ms
-
-
-    def create_device(self, api_key, device_config):
-        """Creates a new device given the device_config.
-
-        @param api_key: Api key for the application.
-        @param device_config: Json dict for the device.
-        @raises server_errors.HTTPError: if the config is missing a required key
-        """
-        logging.info('Creating device with api_key=%s and device_config=%r',
-                     api_key, device_config)
-        self._validate_device_resource(device_config)
-        new_device = self.resource.update_data_val(None, api_key,
-                                                   data_in=device_config)
-        self.commands_instance.new_device(new_device['id'])
-        return new_device
-
-
-    @tools.json_out()
-    def GET(self, *args, **kwargs):
-        """GET .../(device_id) gets device info or lists all devices.
-
-        Supports both the GET / LIST commands for devices. List lists all
-        devices a user has access to, however, this implementation just returns
-        all devices.
-
-        Raises:
-            server_errors.HTTPError if the device doesn't exist.
-        """
-        self._fail_control_handler.ensure_not_in_failure_mode()
-        id, api_key, _ = common_util.parse_common_args(args, kwargs)
-        if not api_key:
-            access_token = common_util.get_access_token()
-            api_key = self._oauth.get_api_key_from_access_token(access_token)
-        if id:
-            return self.resource.get_data_val(id, api_key)
-        else:
-            # Returns listing (ignores optional parameters).
-            listing = {'kind': 'clouddevices#devicesListResponse'}
-            listing['devices'] = self.resource.get_data_vals()
-            return listing
-
-
-    @tools.json_out()
-    def POST(self, *args, **kwargs):
-        """Handle POSTs for a device.
-
-        Supported APIs include:
-
-        POST /devices/<device-id>/patchState
-
-        """
-        self._fail_control_handler.ensure_not_in_failure_mode()
-        args = list(args)
-        device_id = args.pop(0) if args else None
-        operation = args.pop(0) if args else None
-        if device_id is None or operation != 'patchState':
-            raise server_errors.HTTPError(400, 'Unsupported operation.')
-        data = common_util.parse_serialized_json()
-        access_token = common_util.get_access_token()
-        api_key = self._oauth.get_api_key_from_access_token(access_token)
-        self._handle_state_patch(device_id, api_key, data)
-        return {'state': self.resource.get_data_val(device_id,
-                                                    api_key)['state']}
-
-
-    @tools.json_out()
-    def PUT(self, *args, **kwargs):
-        """Update an existing device using the incoming json data.
-
-        On startup, devices make a request like:
-
-        PUT http://<server-host>/devices/<device-id>
-
-        {'channel': {'supportedType': 'xmpp'},
-         'commandDefs': {},
-         'description': 'test_description ',
-         'displayName': 'test_display_name ',
-         'id': '4471f7',
-         'location': 'test_location ',
-         'name': 'test_device_name',
-         'state': {'base': {'firmwareVersion': '6771.0.2015_02_09_1429',
-                            'isProximityTokenRequired': False,
-                            'localDiscoveryEnabled': False,
-                            'manufacturer': '',
-                            'model': '',
-                            'serialNumber': '',
-                            'supportUrl': '',
-                            'updateUrl': ''}}}
-
-        This PUT has no API key, but comes with an OAUTH access token.
-
-        """
-        self._fail_control_handler.ensure_not_in_failure_mode()
-        device_id, _, _ = common_util.parse_common_args(args, kwargs)
-        access_token = common_util.get_access_token()
-        if not access_token:
-            raise server_errors.HTTPError(401, 'Access denied.')
-        api_key = self._oauth.get_api_key_from_access_token(access_token)
-        data = common_util.parse_serialized_json()
-        self._validate_device_resource(data)
-
-        logging.info('Updating device with id=%s and device_config=%r',
-                     device_id, data)
-        new_device = self.resource.update_data_val(device_id, api_key,
-                                                   data_in=data)
-        return data
-
-
-    def DELETE(self, *args, **kwargs):
-        """Deletes the given device.
-
-        Format of this call is:
-        DELETE .../device_id
-
-        Raises:
-            server_errors.HTTPError if the device doesn't exist.
-        """
-        self._fail_control_handler.ensure_not_in_failure_mode()
-        id, api_key, _ = common_util.parse_common_args(args, kwargs)
-        self.resource.del_data_val(id, api_key)
-        self.commands_instance.remove_device(id)
diff --git a/client/common_lib/cros/fake_device_server/devices_unittest.py b/client/common_lib/cros/fake_device_server/devices_unittest.py
deleted file mode 100755
index 52f48a2..0000000
--- a/client/common_lib/cros/fake_device_server/devices_unittest.py
+++ /dev/null
@@ -1,95 +0,0 @@
-#!/usr/bin/python3
-
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Unit tests for devices.py."""
-
-import unittest
-
-import common
-from fake_device_server import commands
-from fake_device_server import devices
-from fake_device_server import fail_control
-from fake_device_server import oauth
-from fake_device_server import resource_delegate
-from fake_device_server import server_errors
-
-
-class DevicesTest(unittest.TestCase):
-    """Tests for the Devices class."""
-
-    def setUp(self):
-        """Sets a ticket / registration objects."""
-        self.devices_resource = {}
-        self.fail_control = fail_control.FailControl()
-        self.oauth = oauth.OAuth(self.fail_control)
-        self.commands = commands.Commands(self.oauth, self.fail_control)
-        self.devices = devices.Devices(
-                resource_delegate.ResourceDelegate(self.devices_resource),
-                self.commands,
-                self.oauth,
-                self.fail_control)
-
-
-    def testCreateDevice(self):
-        """Tests that we can create a new device."""
-        good_device_config = dict(userEmail='buffet@tasty.org',
-                                  name='buffet_device',
-                                  channel=dict(supportedType='xmpp'))
-
-        new_device = self.devices.create_device(None, good_device_config)
-        self.assertTrue('id' in new_device)
-        device_id = new_device['id']
-        # New device should be registered with commands handler.
-        self.assertTrue(device_id in self.commands.device_commands)
-
-        bad_device_config = dict(name='buffet_device')
-        self.assertRaises(server_errors.HTTPError,
-                          self.devices.create_device, None, bad_device_config)
-
-
-    def testGet(self):
-        """Tests that we can retrieve a device correctly."""
-        self.devices_resource[(1234, None)] = dict(id=1234)
-        returned_json = self.devices.GET(1234)
-        self.assertEquals(returned_json, self.devices_resource[(1234, None)])
-
-        # Non-existing device.
-        self.assertRaises(server_errors.HTTPError,
-                          self.devices.GET, 1235)
-
-
-    def testListing(self):
-        """Tests that we can get a listing back correctly using the GET method.
-        """
-        self.devices_resource[(1234, None)] = dict(id=1234)
-        self.devices_resource[(1235, None)] = dict(id=1235, boogity='taco')
-
-        returned_json = self.devices.GET()
-        self.assertEqual('clouddevices#devicesListResponse',
-                         returned_json['kind'])
-        self.assertTrue('devices' in returned_json)
-        for device in self.devices_resource.values():
-            self.assertIn(device, returned_json['devices'])
-
-
-    def testDeleteDevice(self):
-        """Tests that we correctly delete a device."""
-        # Register device with commands handler first.
-        self.commands.new_device(12345)
-        self.devices_resource[(12345, None)] = dict(id=12345, nobody='care')
-        self.devices.DELETE(12345)
-
-        self.assertTrue(12345 not in self.devices_resource)
-        # Make sure the device is deleted from the command handler.
-        self.assertRaises(KeyError, self.commands.remove_device, 12345)
-
-        # Should error out if we try to delete something that doesn't exist.
-        self.assertRaises(server_errors.HTTPError,
-                          self.devices.DELETE, 12500)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/client/common_lib/cros/fake_device_server/fail_control.py b/client/common_lib/cros/fake_device_server/fail_control.py
deleted file mode 100644
index 77165fe..0000000
--- a/client/common_lib/cros/fake_device_server/fail_control.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import cherrypy
-
-import common
-import logging
-from fake_device_server import server_errors
-
-FAIL_CONTROL_PATH = 'fail_control'
-
-class FailControl(object):
-    """Interface used to control failing of requests."""
-
-    # Needed for cherrypy to expose this to requests.
-    exposed = True
-
-    def __init__(self):
-        self._in_failure_mode = False
-
-    def ensure_not_in_failure_mode(self):
-        """Ensures we're not in failure mode.
-
-        If instructed to fail, this method raises an HTTPError
-        exception with code 500 (Internal Server Error). Otherwise
-        does nothing.
-
-        """
-        if not self._in_failure_mode:
-            return
-        raise server_errors.HTTPError(500, 'Instructed to fail this request')
-
-    @cherrypy.tools.json_out()
-    def POST(self, *args, **kwargs):
-        """Handle POST messages."""
-        path = list(args)
-        if path == ['start_failing_requests']:
-            self._in_failure_mode = True
-            logging.info('Requested to start failing all requests.')
-            return dict()
-        elif path == ['stop_failing_requests']:
-            self._in_failure_mode = False
-            logging.info('Requested to stop failing all requests.')
-            return dict()
-        else:
-            raise server_errors.HTTPError(
-                    400, 'Unsupported fail_control path %s' % path)
diff --git a/client/common_lib/cros/fake_device_server/fake_gcd_helper.py b/client/common_lib/cros/fake_device_server/fake_gcd_helper.py
deleted file mode 100644
index bb1e540..0000000
--- a/client/common_lib/cros/fake_device_server/fake_gcd_helper.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import time
-import uuid
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import process_watcher
-from autotest_lib.client.common_lib.cros.fake_device_server.client_lib import \
-        meta
-
-
-class FakeGCDHelper(object):
-    """Helper object that knows how to bring up and kill fake GCD instances."""
-
-    def __init__(self, host=None):
-        """Construct an instance.
-
-        @param host: host object if the server should be started on a remote
-                host.
-
-        """
-        self._generation = str(uuid.uuid1())
-        self._process = process_watcher.ProcessWatcher(
-                '/usr/local/autotest/common_lib/cros/'
-                        'fake_device_server/server.py',
-                args=(self._generation,),
-                host=host)
-        self._meta = meta.MetaClient()
-
-
-    def start(self, timeout_seconds=30):
-        """Start this instance and confirm that it is up.
-
-        @param timeout_seconds: number of seconds to wait for server start.
-
-        """
-        self._process.start()
-        start_time = time.time()
-        while time.time() - start_time < timeout_seconds:
-            received_generation = self._meta.get_generation()
-            if self._generation == received_generation:
-                return
-            time.sleep(1)
-
-        raise error.TestError('Failed to start fake GCD server.')
-
-
-    def close(self):
-        """Close this instance."""
-        self._process.close()
diff --git a/client/common_lib/cros/fake_device_server/fake_oauth.py b/client/common_lib/cros/fake_device_server/fake_oauth.py
deleted file mode 100644
index f4e0a7a..0000000
--- a/client/common_lib/cros/fake_device_server/fake_oauth.py
+++ /dev/null
@@ -1,11 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-class FakeOAuth(object):
-    """A Fake for oauth.OAuth to be used in unit-tests."""
-
-
-    def is_request_authorized(self):
-        """Checks if the access token in an incoming request is correct."""
-        return True
diff --git a/client/common_lib/cros/fake_device_server/meta_handler.py b/client/common_lib/cros/fake_device_server/meta_handler.py
deleted file mode 100644
index 60d1620..0000000
--- a/client/common_lib/cros/fake_device_server/meta_handler.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import cherrypy
-
-
-META_HANDLER_PATH = 'meta'
-
-
-class MetaHandler(object):
-    """Exposes meta methods related to the server."""
-
-    # Needed for cherrypy to expose this to requests.
-    exposed = True
-
-    def __init__(self, generation):
-        """Construct an instance.
-
-        @param generation: string unique token for this server (e.g. a UUID).
-
-        """
-        self._generation = generation
-
-    def GET(self, *args, **kwargs):
-        """Handle GET requests to this URL."""
-        if ['generation'] == list(args):
-            return self._generation
-        cherrypy.response.status = 400
-        return ''
diff --git a/client/common_lib/cros/fake_device_server/oauth.py b/client/common_lib/cros/fake_device_server/oauth.py
deleted file mode 100644
index ae4d948..0000000
--- a/client/common_lib/cros/fake_device_server/oauth.py
+++ /dev/null
@@ -1,101 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-import cherrypy
-
-import common
-import logging
-from fake_device_server import common_util
-from fake_device_server import server_errors
-
-OAUTH_PATH = 'oauth'
-
-TEST_API_KEY = 'this_is_an_api_key'
-TEST_DEVICE_ACCESS_TOKEN = 'a_device_access_token'
-TEST_DEVICE_REFRESH_TOKEN = 'a_device_refresh_token'
-TOKEN_EXPIRATION_SECONDS = 24 * 60 * 60  # 24 hours.
-
-
-class OAuth(object):
-    """The bare minimum to make Buffet think its talking to OAuth."""
-
-    # Needed for cherrypy to expose this to requests.
-    exposed = True
-
-    def __init__(self, fail_control_handler):
-        self._device_access_token = TEST_DEVICE_ACCESS_TOKEN
-        self._device_refresh_token = TEST_DEVICE_REFRESH_TOKEN
-        self._fail_control_handler = fail_control_handler
-
-
-    def get_api_key_from_access_token(self, access_token):
-        if access_token == self._device_access_token:
-            return TEST_API_KEY
-        return None
-
-
-    def is_request_authorized(self):
-        """Checks if the access token in an incoming request is correct."""
-        access_token = common_util.get_access_token()
-        if access_token == self._device_access_token:
-            return True
-        logging.info('Wrong access token - expected %s but device sent %s',
-                     self._device_access_token, access_token)
-        return False
-
-
-    @cherrypy.tools.json_out()
-    def POST(self, *args, **kwargs):
-        """Handle a post to get a refresh/access token.
-
-        We expect the device to provide (a subset of) the following parameters.
-
-            code
-            client_id
-            client_secret
-            redirect_uri
-            scope
-            grant_type
-            refresh_token
-
-        in the request body in query-string format (see the OAuth docs
-        for details). Since we're a bare-minimum implementation we're
-        going to ignore most of these.
-
-        """
-        self._fail_control_handler.ensure_not_in_failure_mode()
-        path = list(args)
-        if path == ['token']:
-            body_length = int(cherrypy.request.headers.get('Content-Length', 0))
-            body = cherrypy.request.rfile.read(body_length)
-            params = cherrypy.lib.httputil.parse_query_string(body)
-            refresh_token = params.get('refresh_token')
-            if refresh_token and refresh_token != self._device_refresh_token:
-                logging.info('Wrong refresh token - expected %s but '
-                             'device sent %s',
-                             self._device_refresh_token, refresh_token)
-                cherrypy.response.status = 400
-                response = {'error': 'invalid_grant'}
-                return response
-            response = {
-                'access_token': self._device_access_token,
-                'refresh_token': self._device_refresh_token,
-                'expires_in': TOKEN_EXPIRATION_SECONDS,
-            }
-            return response
-        elif path == ['invalidate_all_access_tokens']:
-            # By concatenating '_X' to the end of existing access
-            # token, this will effectively invalidate the access token
-            # previously granted to a device and cause us to return
-            # the concatenated one for future requests.
-            self._device_access_token += '_X'
-            return dict()
-        elif path == ['invalidate_all_refresh_tokens']:
-            # Same here, only for the refresh token.
-            self._device_refresh_token += '_X'
-            return dict()
-        else:
-            raise server_errors.HTTPError(
-                    400, 'Unsupported oauth path %s' % path)
diff --git a/client/common_lib/cros/fake_device_server/registration_tickets.py b/client/common_lib/cros/fake_device_server/registration_tickets.py
deleted file mode 100755
index 081b848..0000000
--- a/client/common_lib/cros/fake_device_server/registration_tickets.py
+++ /dev/null
@@ -1,204 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Module contains a simple implementation of the registrationTickets RPC."""
-
-import logging
-from cherrypy import tools
-import time
-import uuid
-
-import common
-from fake_device_server import common_util
-from fake_device_server import server_errors
-
-REGISTRATION_PATH = 'registrationTickets'
-
-
-class RegistrationTickets(object):
-    """A simple implementation of the registrationTickets interface.
-
-    A common workflow of using this API is:
-
-    client: POST .../ # Creates a new ticket with id <id> claims the ticket.
-    device: PATCH .../<id> with json blob # Populate ticket with device info
-    device: POST .../<id>/finalize # Finalize the device registration.
-    """
-    # OAUTH2 Bearer Access Token
-    TEST_ACCESS_TOKEN = '1/TEST-ME'
-
-    # Needed for cherrypy to expose this to requests.
-    exposed = True
-
-
-    def __init__(self, resource, devices_instance, fail_control_handler):
-        """Initializes a registration ticket.
-
-        @param resource: A resource delegate.
-        @param devices_instance: Instance of Devices class.
-        @param fail_control_handler: Instance of FailControl.
-        """
-        self.resource = resource
-        self.devices_instance = devices_instance
-        self._fail_control_handler = fail_control_handler
-
-
-    def _default_registration_ticket(self):
-        """Creates and returns a new registration ticket."""
-        current_time_ms = time.time() * 1000
-        ticket = {'kind': 'clouddevices#registrationTicket',
-                  'creationTimeMs': current_time_ms,
-                  'expirationTimeMs': current_time_ms + (10 * 1000)}
-        return ticket
-
-
-    def _finalize(self, id, api_key, ticket):
-        """Finalizes the ticket causing the server to add robot account info."""
-        if 'userEmail' not in ticket:
-            raise server_errors.HTTPError(400, 'Unclaimed ticket')
-
-        robot_account_email = 'robot@test.org'
-        robot_auth = uuid.uuid4().hex
-        new_data = {'robotAccountEmail': robot_account_email,
-                    'robotAccountAuthorizationCode':robot_auth}
-        updated_data_val = self.resource.update_data_val(id, api_key, new_data)
-        updated_data_val['deviceDraft'] = self.devices_instance.create_device(
-            api_key, updated_data_val.get('deviceDraft'))
-        return updated_data_val
-
-
-    def _add_claim_data(self, data):
-        """Adds userEmail to |data| to claim ticket.
-
-        Raises:
-            server_errors.HTTPError if there is an authorization error.
-        """
-        access_token = common_util.grab_header_field('Authorization')
-        if not access_token:
-            raise server_errors.HTTPError(401, 'Missing Authorization.')
-
-        # Authorization should contain "<type> <token>"
-        access_token_list = access_token.split()
-        if len(access_token_list) != 2:
-            raise server_errors.HTTPError(400, 'Malformed Authorization field')
-
-        [type, code] = access_token_list
-        # TODO(sosa): Consider adding HTTP WWW-Authenticate response header
-        # field
-        if type != 'Bearer':
-            raise server_errors.HTTPError(403, 'Authorization requires '
-                                          'bearer token.')
-        elif code != RegistrationTickets.TEST_ACCESS_TOKEN:
-            raise server_errors.HTTPError(403, 'Wrong access token.')
-        else:
-            logging.info('Ticket is being claimed.')
-            data['userEmail'] = 'test_account@chromium.org'
-
-
-    @tools.json_out()
-    def GET(self, *args, **kwargs):
-        """GET .../ticket_number returns info about the ticket.
-
-        Raises:
-            server_errors.HTTPError if the ticket doesn't exist.
-        """
-        self._fail_control_handler.ensure_not_in_failure_mode()
-        id, api_key, _ = common_util.parse_common_args(args, kwargs)
-        return self.resource.get_data_val(id, api_key)
-
-
-    @tools.json_out()
-    def POST(self, *args, **kwargs):
-        """Either creates a ticket OR claim/finalizes a ticket.
-
-        This method implements the majority of the registration workflow.
-        More specifically:
-        POST ... creates a new ticket
-        POST .../ticket_number/claim claims a given ticket with a fake email.
-        POST .../ticket_number/finalize finalizes a ticket with a robot account.
-
-        Raises:
-            server_errors.HTTPError if the ticket should exist but doesn't
-            (claim/finalize) or if we can't parse all the args.
-        """
-        self._fail_control_handler.ensure_not_in_failure_mode()
-        id, api_key, operation = common_util.parse_common_args(
-                args, kwargs, supported_operations=set(['finalize']))
-        if operation:
-            ticket = self.resource.get_data_val(id, api_key)
-            if operation == 'finalize':
-                return self._finalize(id, api_key, ticket)
-            else:
-                raise server_errors.HTTPError(
-                        400, 'Unsupported method call %s' % operation)
-
-        else:
-            data = common_util.parse_serialized_json()
-            if data is None or data.get('userEmail', None) != 'me':
-                raise server_errors.HTTPError(
-                        400,
-                        'Require userEmail=me to create ticket %s' % operation)
-            if [key for key in iter(data) if key != 'userEmail']:
-                raise server_errors.HTTPError(
-                        400, 'Extra data for ticket creation: %r.' % data)
-            if id:
-                raise server_errors.HTTPError(
-                        400, 'Should not specify ticket ID.')
-
-            self._add_claim_data(data)
-            # We have an insert operation so make sure we have all required
-            # fields.
-            data.update(self._default_registration_ticket())
-
-            logging.info('Ticket is being created.')
-            return self.resource.update_data_val(id, api_key, data_in=data)
-
-
-    @tools.json_out()
-    def PATCH(self, *args, **kwargs):
-        """Updates the given ticket with the incoming json blob.
-
-        Format of this call is:
-        PATCH .../ticket_number
-
-        Caller must define a json blob to patch the ticket with.
-
-        Raises:
-            server_errors.HTTPError if the ticket doesn't exist.
-        """
-        self._fail_control_handler.ensure_not_in_failure_mode()
-        id, api_key, _ = common_util.parse_common_args(args, kwargs)
-        if not id:
-            server_errors.HTTPError(400, 'Missing id for operation')
-
-        data = common_util.parse_serialized_json()
-
-        return self.resource.update_data_val(
-                id, api_key, data_in=data)
-
-
-    @tools.json_out()
-    def PUT(self, *args, **kwargs):
-        """Replaces the given ticket with the incoming json blob.
-
-        Format of this call is:
-        PUT .../ticket_number
-
-        Caller must define a json blob to patch the ticket with.
-
-        Raises:
-        """
-        self._fail_control_handler.ensure_not_in_failure_mode()
-        id, api_key, _ = common_util.parse_common_args(args, kwargs)
-        if not id:
-            server_errors.HTTPError(400, 'Missing id for operation')
-
-        data = common_util.parse_serialized_json()
-
-        # Handle claiming a ticket with an authorized request.
-        if data and data.get('userEmail') == 'me':
-            self._add_claim_data(data)
-
-        return self.resource.update_data_val(
-                id, api_key, data_in=data, update=False)
diff --git a/client/common_lib/cros/fake_device_server/registration_tickets_unittest.py b/client/common_lib/cros/fake_device_server/registration_tickets_unittest.py
deleted file mode 100755
index d5376e2..0000000
--- a/client/common_lib/cros/fake_device_server/registration_tickets_unittest.py
+++ /dev/null
@@ -1,127 +0,0 @@
-#!/usr/bin/python3
-
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Unit tests for registration_tickets.py."""
-
-import unittest
-import mock
-
-import common
-from fake_device_server import common_util
-from fake_device_server import commands
-from fake_device_server import devices
-from fake_device_server import fail_control
-from fake_device_server import oauth
-from fake_device_server import registration_tickets
-from fake_device_server import resource_delegate
-from fake_device_server import server_errors
-
-
-class RegistrationTicketsTest(unittest.TestCase):
-    """Tests for the RegistrationTickets class."""
-
-    def setUp(self):
-        """Sets up a ticket / registration objects."""
-        self.tickets = {}
-        self.devices_resource = {}
-        self.fail_control = fail_control.FailControl()
-        self.oauth = oauth.OAuth(self.fail_control)
-        self.commands = commands.Commands(self.oauth, self.fail_control)
-        self.devices = devices.Devices(
-                resource_delegate.ResourceDelegate(self.devices_resource),
-                self.commands,
-                self.oauth,
-                self.fail_control)
-
-        self.registration = registration_tickets.RegistrationTickets(
-                resource_delegate.ResourceDelegate(self.tickets), self.devices,
-                self.fail_control)
-
-
-    def testFinalize(self):
-        """Tests that the finalize workflow does the right thing."""
-        # Unclaimed ticket
-        self.tickets[(1234, None)] = dict(id=1234)
-        self.assertRaises(server_errors.HTTPError,
-                          self.registration.POST, 1234, 'finalize')
-
-        # Claimed ticket
-        expected_ticket = dict(
-                id=1234, userEmail='buffet@tasty.org',
-                deviceDraft=dict(name='buffet_device',
-                                 channel=dict(supportedType='xmpp')))
-        self.tickets[(1234, None)] = expected_ticket
-        returned_json = self.registration.POST(1234, 'finalize')
-        self.assertEquals(returned_json['id'], expected_ticket['id'])
-        self.assertEquals(returned_json['userEmail'],
-                          expected_ticket['userEmail'])
-        self.assertIn('robotAccountEmail', returned_json)
-        self.assertIn('robotAccountAuthorizationCode', returned_json)
-
-
-    def testInsert(self):
-        """Tests that we can create a new ticket."""
-        common_util.parse_serialized_json = mock.MagicMock(
-            return_value={'userEmail': 'me'})
-        common_util.grab_header_field = mock.MagicMock(
-            return_value='Bearer %s' % self.registration.TEST_ACCESS_TOKEN)
-        returned_json = self.registration.POST()
-        self.assertIn('id', returned_json)
-        common_util.parse_serialized_json.assert_called_once()
-        common_util.grab_header_field.assert_called_once()
-
-
-    def testGet(self):
-        """Tests that we can retrieve a ticket correctly."""
-        self.tickets[(1234, None)] = dict(id=1234)
-        returned_json = self.registration.GET(1234)
-        self.assertEquals(returned_json, self.tickets[(1234, None)])
-
-        # Non-existing ticket.
-        self.assertRaises(server_errors.HTTPError,
-                          self.registration.GET, 1235)
-
-
-    def testPatchTicket(self):
-        """Tests that we correctly patch a ticket."""
-        expected_ticket = dict(id=1234, blah='hi')
-        update_ticket = dict(blah='hi')
-        self.tickets[(1234, None)] = dict(id=1234)
-
-        common_util.parse_serialized_json = mock.MagicMock(
-            return_value=update_ticket)
-
-        returned_json = self.registration.PATCH(1234)
-        self.assertEquals(expected_ticket, returned_json)
-        common_util.parse_serialized_json.assert_called_once()
-
-
-    def _testReplaceTicket(self):
-        """Tests that we correctly replace a ticket."""
-        update_ticket = dict(id=12345, blah='hi')
-        self.tickets[(12345, None)] = dict(id=12345)
-
-        common_util.parse_serialized_json = mock.MagicMock(
-            return_value=update_ticket)
-
-        returned_json = self.registration.PUT(12345)
-        self.assertEquals(update_ticket, returned_json)
-        common_util.parse_serialized_json.assert_called_once()
-
-        common_util.parse_serialized_json.reset_mock()
-
-        # Ticket id doesn't match.
-        update_ticket = dict(id=12346, blah='hi')
-        common_util.parse_serialized_json = mock.MagicMock(
-            return_value=update_ticket)
-
-        self.assertRaises(server_errors.HTTPError,
-                          self.registration.PUT, 12345)
-        common_util.parse_serialized_json.assert_called_once()
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/client/common_lib/cros/fake_device_server/resource_delegate.py b/client/common_lib/cros/fake_device_server/resource_delegate.py
deleted file mode 100755
index 74c810b..0000000
--- a/client/common_lib/cros/fake_device_server/resource_delegate.py
+++ /dev/null
@@ -1,116 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Module contains code used in dealing with data resources."""
-
-import logging
-import uuid
-
-import common
-from fake_device_server import server_errors
-
-
-class ResourceDelegate(object):
-    """Delegate for resources held by the various server methods.
-
-    The fake_device_server methods are all fairly similar in that they
-    have similar dictionary representations. Server methods use this class to
-    delegate access to their data.
-
-    Data is stored based on a combination of <id> + <api_key>
-    tuples. The api_key can be passed in to any command with ?key=<api_key>.
-    This isn't necessary though as using a default of None is ok.
-    """
-
-    def __init__(self, data):
-        # Dictionary of data blobs with keys of <id, api_key> pairs that map
-        # to the data e.g. for devices, the values are the device dicts, for
-        # registration tickets, the values are the ticket dicts.
-        self._data = data
-
-
-    def get_data_val(self, id, api_key):
-        """Returns the data value for the given id, api_key pair.
-
-        @param id: ID for data val.
-        @param api_key: optional api_key for the data_val.
-
-        Raises:
-            server_errors.HTTPError if the data_val doesn't exist.
-        """
-        key = (id, api_key)
-        data_val = self._data.get(key)
-        if not data_val:
-            # Put the tuple we want inside another tuple, so that Python doesn't
-            # unroll |key| and complain that we haven't asked to printf two
-            # values.
-            raise server_errors.HTTPError(400, 'Invalid data key: %r' % (key,))
-        return data_val
-
-
-    def get_data_vals(self):
-        """Returns a list of all data values."""
-        return self._data.values()
-
-
-    def del_data_val(self, id, api_key):
-        """Deletes the data value for the given id, api_key pair.
-
-        @param id: ID for data val.
-        @param api_key: optional api_key for the data_val.
-
-        Raises:
-            server_errors.HTTPError if the data_val doesn't exist.
-        """
-        key = (id, api_key)
-        if key not in self._data:
-            # Put the tuple we want inside another tuple, so that Python doesn't
-            # unroll |key| and complain that we haven't asked to printf two
-            # values.
-            raise server_errors.HTTPError(400, 'Invalid data key: %r' % (key,))
-        del self._data[key]
-
-
-    def update_data_val(self, id, api_key, data_in=None, update=True):
-        """Helper method for all mutations to data vals.
-
-        If the id isn't given, creates a new template default with a new id.
-        Otherwise updates/replaces the given dict with the data based on update.
-
-        @param id: id (if None, creates a new data val).
-        @param api_key: optional api_key.
-        @param data_in: data dictionary to either update or replace current.
-        @param update: fully replace data_val given by id, api_key with data_in.
-
-        Raises:
-            server_errors.HTTPError if the id is non-None and not in self._data.
-        """
-        data_val = None
-        if not id:
-            # This is an insertion.
-            if not data_in:
-                raise ValueError('Either id OR data_in must be specified.')
-
-            # Create a new id and insert the data blob into our dictionary.
-            id = uuid.uuid4().hex[0:6]
-            data_in['id'] = id
-            self._data[(id, api_key)] = data_in
-            return data_in
-
-        data_val = self.get_data_val(id, api_key)
-        if not data_in:
-            logging.warning('Received empty data update. Doing nothing.')
-            return data_val
-
-        # Update or replace the existing data val.
-        if update:
-            data_val.update(data_in)
-        else:
-            if data_val.get('id') != data_in.get('id'):
-                raise server_errors.HTTPError(400, "Ticket id doesn't match")
-
-            data_val = data_in
-            self._data[(id, api_key)] = data_in
-
-        return data_val
diff --git a/client/common_lib/cros/fake_device_server/resource_method.py b/client/common_lib/cros/fake_device_server/resource_method.py
deleted file mode 100755
index 58257c3..0000000
--- a/client/common_lib/cros/fake_device_server/resource_method.py
+++ /dev/null
@@ -1,62 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Module contains a simple base class for patching or updating a resource."""
-
-from cherrypy import tools
-
-import common
-from fake_device_server import common_util
-from fake_device_server import server_errors
-
-
-class ResourceMethod(object):
-    """A base class for methods that expose a simple PATCH/PUT mechanism."""
-
-    def __init__(self, resource):
-        """
-        @param resource: A resource delegate for storing devices.
-        """
-        self.resource = resource
-
-
-    @tools.json_out()
-    def PATCH(self, *args, **kwargs):
-        """Updates the given resource with the incoming json blob.
-
-        Format of this call is:
-        PATCH .../resource_id
-
-        Caller must define a json blob to patch the resource with.
-
-        Raises:
-            server_errors.HTTPError if the resource doesn't exist.
-        """
-        id, api_key, _ = common_util.parse_common_args(args, kwargs)
-        if not id:
-            server_errors.HTTPError(400, 'Missing id for operation')
-
-        data = common_util.parse_serialized_json()
-        return self.resource.update_data_val(id, api_key, data_in=data)
-
-
-    @tools.json_out()
-    def PUT(self, *args, **kwargs):
-        """Replaces the given resource with the incoming json blob.
-
-        Format of this call is:
-        PUT .../resource_id
-
-        Caller must define a json blob to patch the resource with.
-
-        Raises:
-            server_errors.HTTPError if the resource doesn't exist.
-        """
-        id, api_key, _ = common_util.parse_common_args(args, kwargs)
-        if not id:
-            server_errors.HTTPError(400, 'Missing id for operation')
-
-        data = common_util.parse_serialized_json()
-        return self.resource.update_data_val(
-                id, api_key, data_in=data, update=False)
diff --git a/client/common_lib/cros/fake_device_server/resource_method_unittest.py b/client/common_lib/cros/fake_device_server/resource_method_unittest.py
deleted file mode 100755
index 516214e..0000000
--- a/client/common_lib/cros/fake_device_server/resource_method_unittest.py
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/usr/bin/python3
-
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Unit tests for resource_method.py."""
-
-import unittest
-import mock
-
-import common
-from fake_device_server import common_util
-from fake_device_server import resource_method
-from fake_device_server import resource_delegate
-from fake_device_server import server_errors
-
-
-class ResourceMethodTest(unittest.TestCase):
-    """Tests for the ResourceMethod class."""
-
-    def setUp(self):
-        """Sets up resource_method object and dict of resources."""
-        self.resources = {}
-        self.resource_method = resource_method.ResourceMethod(
-                resource_delegate.ResourceDelegate(self.resources))
-
-
-    def testPatch(self):
-        """Tests that we correctly patch a resource."""
-        expected_resource = dict(id=1234, blah='hi')
-        update_resource = dict(blah='hi')
-        self.resources[(1234, None)] = dict(id=1234)
-
-        common_util.parse_serialized_json = mock.MagicMock(
-            return_value=update_resource)
-
-        returned_json = self.resource_method.PATCH(1234)
-        self.assertEquals(expected_resource, returned_json)
-        common_util.parse_serialized_json.assert_called_once()
-
-
-    def testPut(self):
-        """Tests that we correctly replace a resource."""
-        update_resource = dict(id=12345, blah='hi')
-        self.resources[(12345, None)] = dict(id=12345)
-
-        common_util.parse_serialized_json = mock.MagicMock(
-            return_value=update_resource)
-
-        returned_json = self.resource_method.PUT(12345)
-        self.assertEquals(update_resource, returned_json)
-        common_util.parse_serialized_json.assert_called_once()
-
-        common_util.parse_serialized_json.reset_mock()
-
-        # Ticket id doesn't match.
-        update_resource = dict(id=12346, blah='hi')
-        common_util.parse_serialized_json = mock.MagicMock(
-            return_value=update_resource)
-
-        self.assertRaises(server_errors.HTTPError,
-                          self.resource_method.PUT, 12345)
-        common_util.parse_serialized_json.assert_called_once()
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/client/common_lib/cros/fake_device_server/server.py b/client/common_lib/cros/fake_device_server/server.py
deleted file mode 100755
index e824cca..0000000
--- a/client/common_lib/cros/fake_device_server/server.py
+++ /dev/null
@@ -1,116 +0,0 @@
-#! /usr/bin/env python
-
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Fake implementation of a Device Server.
-
-This module can be used in testing both in autotests and locally. To use locally
-you can just run this python module directly.
-"""
-
-import argparse
-import logging
-import logging.handlers
-import cherrypy
-
-import common
-from fake_device_server import commands
-from fake_device_server import devices
-from fake_device_server import fail_control
-from fake_device_server import meta_handler
-from fake_device_server import oauth
-from fake_device_server import registration_tickets
-from fake_device_server import resource_delegate
-
-PORT = 9876
-
-
-def stop_server():
-    """Stops the cherrypy server and blocks."""
-    cherrypy.engine.stop()
-
-
-def start_server(generation):
-    """Starts the cherrypy server and blocks.
-   
-    @param generation: string unique to this instance of the fake device server.
-
-    """
-    fail_control_handler = fail_control.FailControl()
-    cherrypy.tree.mount(
-        fail_control_handler, '/' + fail_control.FAIL_CONTROL_PATH,
-        {'/':
-            {'request.dispatch': cherrypy.dispatch.MethodDispatcher()}
-        }
-    )
-    oauth_handler = oauth.OAuth(fail_control_handler)
-    commands_handler = commands.Commands(oauth_handler, fail_control_handler)
-    cherrypy.tree.mount(
-        commands_handler, '/' + commands.COMMANDS_PATH,
-        {'/':
-            {'request.dispatch': cherrypy.dispatch.MethodDispatcher()}
-        }
-    )
-    devices_resource = resource_delegate.ResourceDelegate({})
-    # TODO(wiley): We need to validate device commands.
-    devices_handler = devices.Devices(devices_resource,
-                                      commands_handler,
-                                      oauth_handler,
-                                      fail_control_handler)
-    cherrypy.tree.mount(
-        devices_handler, '/' + devices.DEVICES_PATH,
-        {'/':
-            {'request.dispatch': cherrypy.dispatch.MethodDispatcher()}
-        }
-    )
-    tickets = resource_delegate.ResourceDelegate({})
-    registration_tickets_handler = registration_tickets.RegistrationTickets(
-            tickets, devices_handler, fail_control_handler)
-    cherrypy.tree.mount(
-        registration_tickets_handler,
-        '/' + registration_tickets.REGISTRATION_PATH,
-        {'/':
-            {'request.dispatch': cherrypy.dispatch.MethodDispatcher()}
-        }
-    )
-    cherrypy.tree.mount(
-        oauth_handler,
-        '/' + oauth.OAUTH_PATH,
-        {'/':
-            {'request.dispatch': cherrypy.dispatch.MethodDispatcher()}
-        }
-    )
-    cherrypy.tree.mount(
-        meta_handler.MetaHandler(generation),
-        '/' + meta_handler.META_HANDLER_PATH,
-        {'/':
-            {'request.dispatch': cherrypy.dispatch.MethodDispatcher()}
-        }
-    )
-    # Don't parse POST for params.
-    cherrypy.config.update({'global': {'request.process_request_body': False}})
-    cherrypy.engine.start()
-
-
-def main():
-    """Main method for callers who start this module directly."""
-    parser = argparse.ArgumentParser(
-        description='Acts like a fake instance of GCD')
-    parser.add_argument('generation', metavar='generation', type=str,
-                        help='Unique generation id for confirming health')
-    args = parser.parse_args()
-    cherrypy.config.update({'server.socket_port': PORT})
-    start_server(args.generation)
-    cherrypy.engine.block()
-
-
-if __name__ == '__main__':
-    formatter = logging.Formatter(
-            'fake_gcd_server: [%(levelname)s] %(message)s')
-    handler = logging.handlers.SysLogHandler(address='/dev/log')
-    handler.setFormatter(formatter)
-    logging.basicConfig(level=logging.DEBUG)
-    logging.getLogger().addHandler(handler)
-    main()
diff --git a/client/common_lib/cros/fake_device_server/server_errors.py b/client/common_lib/cros/fake_device_server/server_errors.py
deleted file mode 100755
index 47e5f49..0000000
--- a/client/common_lib/cros/fake_device_server/server_errors.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Contains errors used by the fake_device_server."""
-
-import cherrypy
-
-
-class HTTPError(cherrypy.HTTPError):
-  """Exception class to log the HTTPResponse before routing it to cherrypy."""
-  def __init__(self, status, message):
-      """
-      @param status: HTTPResponse status.
-      @param message: Message associated with the response.
-      """
-      cherrypy.HTTPError.__init__(self, status, message)
-      cherrypy.log('ServerHTTPError status: %s message: %s' % (status, message))
diff --git a/client/common_lib/cros/g2f_utils.py b/client/common_lib/cros/g2f_utils.py
index 193be4d..73fa8dd 100644
--- a/client/common_lib/cros/g2f_utils.py
+++ b/client/common_lib/cros/g2f_utils.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -36,7 +37,7 @@
       attempts += 1
       try:
         return '/dev/' + client.run('ls ' + path).stdout.strip()
-      except error.AutoservRunError, e:
+      except error.AutoservRunError as e:
         logging.info('Could not find U2F device on attempt ' +
                      str(attempts))
       time.sleep(QUERY_U2F_RETRY_DELAY_SEC)
diff --git a/client/common_lib/cros/interactive_xmlrpc_server.py b/client/common_lib/cros/interactive_xmlrpc_server.py
index f5f2534..17e87b6 100755
--- a/client/common_lib/cros/interactive_xmlrpc_server.py
+++ b/client/common_lib/cros/interactive_xmlrpc_server.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/client/common_lib/cros/kernel_utils.py b/client/common_lib/cros/kernel_utils.py
index bdb2154..46093d0 100644
--- a/client/common_lib/cros/kernel_utils.py
+++ b/client/common_lib/cros/kernel_utils.py
@@ -11,6 +11,8 @@
 
 _KERNEL_A = {'name': 'KERN-A', 'kernel': 2, 'root': 3}
 _KERNEL_B = {'name': 'KERN-B', 'kernel': 4, 'root': 5}
+_MINIOS_A = 'A'
+_MINIOS_B = 'B'
 
 # Time to wait for new kernel to be marked successful after auto update.
 _KERNEL_UPDATE_TIMEOUT = 120
@@ -93,22 +95,27 @@
     """
     return _cgpt('-T', kernel, host)
 
-def verify_kernel_state_after_update(host=None):
+
+def verify_kernel_state_after_update(host=None, inactive_kernel=True):
     """
-    Ensure the next kernel to boot is the currently inactive kernel.
+    Ensure the next kernel to boot is the expected kernel.
 
     This is useful for checking after completing an update.
 
     @param host: The DUT to execute the command on. None to execute locally.
-    @returns the inactive kernel.
+    @param inactive_kernel: Indicates if the expected kernel is the inactive
+                            kernel (True) or the active kernel (False).
+    @returns the next kernel.
 
     """
-    inactive_kernel = get_kernel_state(host)[1]
+    expected_kernel = get_kernel_state(host)[1 if inactive_kernel else 0]
     next_kernel = get_next_kernel(host)
-    if next_kernel != inactive_kernel:
-        raise Exception('The kernel for next boot is %s, but %s was expected.'
-                        % (next_kernel['name'], inactive_kernel['name']))
-    return inactive_kernel
+    if next_kernel != expected_kernel:
+        raise Exception(
+                'The kernel for next boot is %s, but %s was expected.' %
+                (next_kernel['name'], expected_kernel['name']))
+    return next_kernel
+
 
 def verify_boot_expectations(expected_kernel, error_message=_BOOT_ERR_MSG,
                              host=None):
@@ -165,3 +172,36 @@
         else:
             raise Exception('update-engine failed to call '
                             'chromeos-setgoodkernel')
+
+
+def get_minios_priority(host=None):
+    """
+    Returns the (<active>, <inactive>) MiniOS partition as a pair.
+
+    @param host: The DUT to execute the command on. None to execute locally.
+
+    """
+    active = _run(['crossystem', 'minios_priority'], host).stdout.strip()
+    if active != _MINIOS_A and active != _MINIOS_B:
+        raise Exception('Encountered unknown MiniOS partition: %s' % active)
+    return (active, _MINIOS_B if active == _MINIOS_A else _MINIOS_A)
+
+
+def verify_minios_priority_after_update(host=None, expected=None):
+    """
+    Ensure the next MiniOS to boot is the expected one.
+
+    This is useful for checking after completing an update.
+
+    @param host: The DUT to execute the command on. None to execute locally.
+    @param expected: The expected MiniOS partition for the next boot.
+    """
+    active = _run(['crossystem', 'minios_priority'], host).stdout.strip()
+    if active != expected:
+        raise Exception(
+                'The MiniOS partition for next boot is %s, but %s was expected.'
+                % (active, expected))
+    else:
+        logging.debug(
+                'The MiniOS partition for next boot is %s, matches expected.',
+                active)
diff --git a/client/common_lib/cros/manual/cfm_helper.py b/client/common_lib/cros/manual/cfm_helper.py
index 5103453..88b8719 100644
--- a/client/common_lib/cros/manual/cfm_helper.py
+++ b/client/common_lib/cros/manual/cfm_helper.py
@@ -8,6 +8,7 @@
 
 import logging
 import re
+import six
 import time
 import common
 from autotest_lib.client.common_lib.cros.manual import get_usb_devices
@@ -110,11 +111,11 @@
             get_usb_devices.get_controller_mimo)
     for get_devices in get_devices_funcs:
         device_list = get_devices(usb_data)
-        for pid_vid, device_count in device_list.iteritems():
+        for pid_vid, device_count in six.iteritems(device_list):
             if device_count > 0:
                 peripheral_map[pid_vid] = device_count
 
-    for pid_vid, device_count in peripheral_map.iteritems():
+    for pid_vid, device_count in six.iteritems(peripheral_map):
         logging.info('---device: %s (%s), count: %d',
                      pid_vid, get_usb_devices.get_device_prod(pid_vid),
                      device_count)
@@ -158,8 +159,8 @@
         return False
 
     if len(type_controller) == 0:
-       logging.info('No controller is found on CfM.')
-       return False
+        logging.info('No controller is found on CfM.')
+        return False
 
 
     if not len(type_controller) == 1:
@@ -175,7 +176,7 @@
         return False
 
     # check CfM have only one camera, huddly and mimo
-    for pid_vid, device_count in peripheral_map.iteritems():
+    for pid_vid, device_count in six.iteritems(peripheral_map):
         if device_count > 1:
             logging.info('Number of device %s connected to CfM : %d',
                          get_usb_devices.get_device_prod(pid_vid),
@@ -268,15 +269,15 @@
     @returns True
     """
     for device in device_list:
-       vid, pid  = device.split(':')
-       logging.info('---going to powercyle device %s:%s', vid, pid)
-       try:
+        vid, pid = device.split(':')
+        logging.info('---going to powercyle device %s:%s', vid, pid)
+        try:
             power_cycle_usb_util.power_cycle_usb_vidpid(dut, board,
                                                         vid, pid, pause)
-       except Exception as e:
-           errmsg = 'Fail to power cycle device.'
-           logging.exception('%s.', errmsg)
-           return False, errmsg
+        except Exception as e:
+            errmsg = 'Fail to power cycle device.'
+            logging.exception('%s.', errmsg)
+            return False, errmsg
 
     return True, None
 
@@ -326,7 +327,7 @@
             last_lines['atrus'] = dut.run_output(cmd).strip().split()[0]
     except Exception as e:
         logging.exception('Fail to get the last line from log files.')
-    for item, timestamp in last_lines.iteritems():
+    for item, timestamp in six.iteritems(last_lines):
         logging.debug('---%s: %s', item, timestamp)
     return last_lines
 
@@ -341,7 +342,7 @@
     if logfile == "ui":
         cmd ='awk \'/{}/,0\' /var/log/ui/ui.LATEST'.format(lastlines[logfile])
     if logfile == 'atrus':
-         cmd ='awk \'/{}/,0\' /var/log/atrus.log'.format(lastlines[logfile])
+        cmd = 'awk \'/{}/,0\' /var/log/atrus.log'.format(lastlines[logfile])
     logging.info('---cmd = %s', cmd)
     try:
         output =  dut.run_output(cmd).split('\n')
@@ -372,7 +373,7 @@
     logging.info('---now check log %s in file %s', checkitem, logfile)
     output = collect_log_since_last_check(dut, timestamp, logfile)
     for _error in error_list[checkitem]:
-         error_log_list.extend([s for s in output if _error in str(s)])
+        error_log_list.extend([s for s in output if _error in str(s)])
     if not error_log_list:
         return True, None
     else:
diff --git a/client/common_lib/cros/manual/cfm_helper_unittest.py b/client/common_lib/cros/manual/cfm_helper_unittest.py
index 148bc04..4e50e6b 100644
--- a/client/common_lib/cros/manual/cfm_helper_unittest.py
+++ b/client/common_lib/cros/manual/cfm_helper_unittest.py
@@ -5,8 +5,8 @@
 
 import unittest
 
-import cfm_helper
-import get_usb_devices
+from autotest_lib.client.common_lib.cros.manual import cfm_helper
+from autotest_lib.client.common_lib.cros.manual import get_usb_devices
 
 SPEAKERS = 'speakers'
 CAMERAS = 'cameras'
diff --git a/client/common_lib/cros/manual/get_usb_devices.py b/client/common_lib/cros/manual/get_usb_devices.py
index 6ef83c2..b2479f3 100644
--- a/client/common_lib/cros/manual/get_usb_devices.py
+++ b/client/common_lib/cros/manual/get_usb_devices.py
@@ -10,7 +10,8 @@
 # 5. Check usb devices's interface.
 # 6. Retrieve usb device based on product and manufacture.
 #
-import cStringIO
+
+from six import StringIO
 from autotest_lib.client.common_lib.cros import textfsm
 
 USB_DEVICES_TPLT = (
@@ -91,7 +92,7 @@
     """
     usbdata = []
     rawdata += '\n'
-    re_table = textfsm.TextFSM(cStringIO.StringIO(USB_DEVICES_TPLT))
+    re_table = textfsm.TextFSM(StringIO.StringIO(USB_DEVICES_TPLT))
     fsm_results = re_table.ParseText(rawdata)
     usbdata = [dict(zip(re_table.header, row)) for row in fsm_results]
     return usbdata
@@ -121,7 +122,7 @@
     audio_device_list = []
     for _data in usbdata:
         if "snd-usb-audio" in _data['intdriver']:
-           audio_device_list.append(_data)
+            audio_device_list.append(_data)
     return audio_device_list
 
 
@@ -133,7 +134,7 @@
     video_device_list = []
     for _data in usbdata:
         if "uvcvideo" in _data['intdriver']:
-             video_device_list.append(_data)
+            video_device_list.append(_data)
     return video_device_list
 
 
diff --git a/client/common_lib/cros/memory_eater.py b/client/common_lib/cros/memory_eater.py
index fd56e10..e38004d 100644
--- a/client/common_lib/cros/memory_eater.py
+++ b/client/common_lib/cros/memory_eater.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -8,6 +8,8 @@
 import time
 import threading
 
+import common
+
 from autotest_lib.client.bin import utils
 
 class MemoryEater(object):
diff --git a/client/common_lib/cros/network/ap_constants.py b/client/common_lib/cros/network/ap_constants.py
index 0a4945a..5289fa9 100644
--- a/client/common_lib/cros/network/ap_constants.py
+++ b/client/common_lib/cros/network/ap_constants.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/common_lib/cros/network/chrome_net_constants.py b/client/common_lib/cros/network/chrome_net_constants.py
index 0a58b69..7a4cdc9 100644
--- a/client/common_lib/cros/network/chrome_net_constants.py
+++ b/client/common_lib/cros/network/chrome_net_constants.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2014 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/common_lib/cros/network/interface.py b/client/common_lib/cros/network/interface.py
index f6f6dfc..c74bdad 100644
--- a/client/common_lib/cros/network/interface.py
+++ b/client/common_lib/cros/network/interface.py
@@ -133,13 +133,15 @@
             raise error.TestFail('Failed to find ethernet interface.')
 
 
-    def __init__(self, name, host=None):
+    def __init__(self, name, host=None, netns=None):
         self._name = name
         if host is None:
             self.host = local_host.LocalHost()
         else:
             self.host = host
         self._run = self.host.run
+        self._namespace = netns
+        self._ns_exec = 'ip netns exec %s ' % netns if netns else ''
 
 
     @property
@@ -164,7 +166,8 @@
         # We extract the second column from any entry for which the first
         # column is an address type we are interested in.  For example,
         # for "inet 172.22.73.124/22 ...", we will capture "172.22.73.124/22".
-        result = self._run('ip addr show %s 2> /dev/null' % self._name,
+        result = self._run(self._ns_exec +
+                           'ip addr show %s 2> /dev/null' % self._name,
                            ignore_status=True)
         address_info = result.stdout
         if result.exit_status != 0:
@@ -320,7 +323,8 @@
         #       valid_lft forever preferred_lft forever
         #
         # We only cares about the flags in the first line.
-        result = self._run('ip addr show %s 2> /dev/null' % self._name,
+        result = self._run(self._ns_exec +
+                           'ip addr show %s 2> /dev/null' % self._name,
                            ignore_status=True)
         address_info = result.stdout
         if result.exit_status != 0:
@@ -352,7 +356,7 @@
         """@return True if RFC 2683 IfOperStatus is UP (i.e., is able to pass
         packets).
         """
-        command = 'ip link show %s' % self._name
+        command = self._ns_exec + 'ip link show %s' % self._name
         result = self._run(command, ignore_status=True)
         if result.exit_status:
             return False
@@ -529,7 +533,8 @@
         #
         # We extract the 'mtu' value (in this example "1500")
         try:
-            result = self._run('ip addr show %s 2> /dev/null' % self._name)
+            result = self._run(self._ns_exec +
+                               'ip addr show %s 2> /dev/null' % self._name)
             address_info = result.stdout
         except error.CmdError as e:
             # The "ip" command will return non-zero if the interface does
@@ -609,7 +614,9 @@
     return [Interface(nic.strip()) for nic in os.listdir(DEVICE_INFO_ROOT)]
 
 
-def get_prioritized_default_route(host=None, interface_name_regex=None):
+def get_prioritized_default_route(host=None,
+                                  interface_name_regex=None,
+                                  namespace=None):
     """
     Query a local or remote host for its prioritized default interface
     and route.
@@ -621,7 +628,10 @@
     # Build a list of default routes, filtered by interface if requested.
     # Example command output: 'default via 172.23.188.254 dev eth0  metric 2'
     run = host.run if host is not None else utils.run
-    output = run('ip route show').stdout
+    command = 'ip route show'
+    if namespace:
+        command = 'ip netns exec %s ' % namespace + command
+    output = run(command).stdout
     output_regex_str = 'default\s+via\s+(\S+)\s+dev\s+(\S+)\s+metric\s+(\d+)'
     output_regex = re.compile(output_regex_str)
     defaults = []
diff --git a/client/common_lib/cros/network/iw_runner_unittest.py b/client/common_lib/cros/network/iw_runner_unittest.py
index 61daac1..ab6eed9 100755
--- a/client/common_lib/cros/network/iw_runner_unittest.py
+++ b/client/common_lib/cros/network/iw_runner_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/client/common_lib/cros/network/xmlrpc_datatypes.py b/client/common_lib/cros/network/xmlrpc_datatypes.py
index f29eff6..25f06a4 100644
--- a/client/common_lib/cros/network/xmlrpc_datatypes.py
+++ b/client/common_lib/cros/network/xmlrpc_datatypes.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/common_lib/cros/network/xmlrpc_security_types.py b/client/common_lib/cros/network/xmlrpc_security_types.py
index 83269b9..e23bffa 100644
--- a/client/common_lib/cros/network/xmlrpc_security_types.py
+++ b/client/common_lib/cros/network/xmlrpc_security_types.py
@@ -185,10 +185,13 @@
 class WPAConfig(SecurityConfig):
     """Abstracts security configuration for a WPA encrypted WiFi network."""
 
-    # We have the option of turning on WPA, WPA2, or both via a bitfield.
+    # We have the option of turning on combinations of WPA, WPA2, or WPA3 via a
+    # bitfield.
     MODE_PURE_WPA = 1
     MODE_PURE_WPA2 = 2
+    MODE_PURE_WPA3 = 4
     MODE_MIXED_WPA = MODE_PURE_WPA | MODE_PURE_WPA2
+    MODE_MIXED_WPA3 = MODE_PURE_WPA2 | MODE_PURE_WPA3
     MODE_DEFAULT = MODE_MIXED_WPA
 
     # WPA2 mandates the use of AES in CCMP mode.
@@ -221,7 +224,7 @@
         @param wpa_gtk_rekey_period int number of second between GTK rekeys.
         @param wpa_gmk_rekey_period int number of seconds between GMK rekeys.
                 The GMK is a key internal to hostapd used to generate GTK.
-                It is the 'master' key.
+                It is the 'main' key.
         @param use_strict_rekey bool True iff hostapd should refresh the GTK
                 whenever any client leaves the group.
         @param ft_mode int one of the FT_MODE_* in SecurityConfig.
@@ -249,11 +252,18 @@
 
     def get_hostapd_config(self):
         """@return dict fragment of hostapd configuration for security."""
-        if not self.wpa_mode:
+        mode = 0
+        # WPA2 and WPA3 are both RSN, so hostapd lumps these together for wpa=.
+        if self.wpa_mode & (self.MODE_PURE_WPA2 | self.MODE_PURE_WPA3):
+            mode |= self.MODE_PURE_WPA2
+        # WPA.
+        if self.wpa_mode & self.MODE_PURE_WPA:
+            mode |= self.MODE_PURE_WPA
+        if not mode:
             raise error.TestFail('Cannot configure WPA unless we know which '
                                  'mode to use.')
 
-        if self.MODE_PURE_WPA & self.wpa_mode and not self.wpa_ciphers:
+        if mode & self.MODE_PURE_WPA and not self.wpa_ciphers:
             raise error.TestFail('Cannot configure WPA unless we know which '
                                  'ciphers to use.')
 
@@ -261,12 +271,20 @@
             raise error.TestFail('Cannot configure WPA2 unless we have some '
                                  'ciphers.')
 
-        ret = {'wpa': self.wpa_mode,
-               'wpa_key_mgmt': 'WPA-PSK'}
-        if self.ft_mode == self.FT_MODE_PURE:
-            ret['wpa_key_mgmt'] = 'FT-PSK'
-        elif self.ft_mode == self.FT_MODE_MIXED:
-            ret['wpa_key_mgmt'] = 'WPA-PSK FT-PSK'
+        key_mgmt = []
+        if self.ft_mode & self.FT_MODE_NONE:
+            if self.wpa_mode & self.MODE_MIXED_WPA:
+                key_mgmt += ['WPA-PSK']
+            if self.wpa_mode & self.MODE_PURE_WPA3:
+                key_mgmt += ['SAE']
+        if self.ft_mode & self.FT_MODE_PURE:
+            if self.wpa_mode & self.MODE_MIXED_WPA:
+                key_mgmt += ['FT-PSK']
+            if self.wpa_mode & self.MODE_PURE_WPA3:
+                key_mgmt += ['FT-SAE']
+
+        ret = {'wpa': mode, 'wpa_key_mgmt': ' '.join(key_mgmt)}
+
         if len(self.psk) == 64:
             ret['wpa_psk'] = self.psk
         else:
@@ -299,15 +317,24 @@
         protos = []
         if self.wpa_mode & self.MODE_PURE_WPA:
             protos.append('WPA')
-        if self.wpa_mode & self.MODE_PURE_WPA2:
+        if self.wpa_mode & (self.MODE_PURE_WPA2 | self.MODE_PURE_WPA3):
             protos.append('RSN')
-        properties.update({'psk': '\\"%s\\"' % self.psk,
-                           'key_mgmt': 'WPA-PSK',
-                           'proto': ' '.join(protos)})
-        if self.ft_mode == self.FT_MODE_PURE:
-            properties['key_mgmt'] = 'FT-PSK'
-        elif self.ft_mode == self.FT_MODE_MIXED:
-            properties['key_mgmt'] = 'WPA-PSK FT-PSK'
+        key_mgmt = []
+        if self.ft_mode & self.FT_MODE_NONE:
+            if self.wpa_mode & self.MODE_MIXED_WPA:
+                key_mgmt += ['WPA-PSK']
+            if self.wpa_mode & self.MODE_PURE_WPA3:
+                key_mgmt += ['SAE']
+        if self.ft_mode & self.FT_MODE_PURE:
+            if self.wpa_mode & self.MODE_MIXED_WPA:
+                key_mgmt += ['FT-PSK']
+            if self.wpa_mode & self.MODE_PURE_WPA3:
+                key_mgmt += ['FT-SAE']
+        properties.update({
+                'psk': '\\"%s\\"' % self.psk,
+                'key_mgmt': ' '.join(key_mgmt),
+                'proto': ' '.join(protos)
+        })
         return properties
 
 
diff --git a/client/common_lib/cros/path_utils.py b/client/common_lib/cros/path_utils.py
index caef195..3ebd468 100644
--- a/client/common_lib/cros/path_utils.py
+++ b/client/common_lib/cros/path_utils.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/common_lib/cros/policy.py b/client/common_lib/cros/policy.py
index d22dc7f..487cb89 100644
--- a/client/common_lib/cros/policy.py
+++ b/client/common_lib/cros/policy.py
@@ -2,7 +2,12 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import dbus, gobject, os, sys
+import dbus, os, sys
+# AU tests use ToT client code, but ToT -3 client version.
+try:
+    from gi.repository import GObject
+except ImportError:
+    import gobject as GObject
 
 import common
 from autotest_lib.client.common_lib import error
@@ -130,7 +135,7 @@
 
     @raises error.TestFail if policy push failed.
     """
-    listener = session_manager.OwnershipSignalListener(gobject.MainLoop())
+    listener = session_manager.OwnershipSignalListener(GObject.MainLoop())
     listener.listen_for_new_policy()
     descriptor = session_manager.make_device_policy_descriptor()
     sm.StorePolicyEx(descriptor,
diff --git a/client/common_lib/cros/power_cycle_usb_util.py b/client/common_lib/cros/power_cycle_usb_util.py
index 3c907d7..10c36f7 100755
--- a/client/common_lib/cros/power_cycle_usb_util.py
+++ b/client/common_lib/cros/power_cycle_usb_util.py
@@ -1,17 +1,22 @@
+# Lint as: python2, python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 """Power cycle a usb port on DUT(device under test)."""
 
+from __future__ import absolute_import
+from __future__ import division
 from __future__ import print_function
 
-from autotest_lib.client.common_lib.cros.cfm.usb import usb_port_manager
-
 import logging
 import os
+from six.moves import zip
 import time
 
+from autotest_lib.client.common_lib.cros.cfm.usb import usb_port_manager
+
+
 TOKEN_NEW_BUS = '/:  '
 TOKEN_ROOT_DEVICE = '\n    |__ '
 
diff --git a/client/common_lib/cros/power_cycle_usb_util_unittest.py b/client/common_lib/cros/power_cycle_usb_util_unittest.py
index ee64147..ec78135 100755
--- a/client/common_lib/cros/power_cycle_usb_util_unittest.py
+++ b/client/common_lib/cros/power_cycle_usb_util_unittest.py
@@ -1,11 +1,11 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 import unittest
 
-import power_cycle_usb_util
+from autotest_lib.client.common_lib.cros import power_cycle_usb_util
 
 
 class PowerCycleUsbUtilTest(unittest.TestCase):
@@ -58,4 +58,4 @@
 
 
 if __name__ == '__main__':
-  unittest.main()
+    unittest.main()
diff --git a/client/common_lib/cros/power_load_util.py b/client/common_lib/cros/power_load_util.py
index baaf38e..3d23199 100644
--- a/client/common_lib/cros/power_load_util.py
+++ b/client/common_lib/cros/power_load_util.py
@@ -2,6 +2,8 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+import logging
+import random
 import tempfile
 
 from autotest_lib.client.common_lib import file_utils
@@ -9,8 +11,8 @@
 _URL_BASE = ('https://sites.google.com/a/chromium.org/dev/chromium-os'
              '/testing/power-testing/pltp')
 _PLTG_URL = _URL_BASE + '/pltg'
-_PLTU_URL = _URL_BASE + '/pltu'
-_PLTP_URL = _URL_BASE + '/pltp'
+_PLTU_URL = _URL_BASE + '/pltu_rand'
+_PLTP_URL = _URL_BASE + '/pltp_rand'
 _MEETU_URL = _URL_BASE + '/meetu'
 _MEETP_URL = _URL_BASE + '/meetp'
 
@@ -26,7 +28,8 @@
     """
     with tempfile.NamedTemporaryFile() as named_file:
         file_utils.download_file(url, named_file.name)
-        return named_file.read().rstrip()
+        # Need decode() since tempfile is opened as binary file
+        return named_file.read().rstrip().decode()
 
 
 def use_gaia_login():
@@ -37,7 +40,10 @@
 
 def get_username():
     """Returns username for load testing."""
-    return _get_content(_PLTU_URL)
+    names = _get_content(_PLTU_URL).splitlines()
+    name = random.choice(names).rstrip()
+    logging.info('power_load_util.get_username: %s', name)
+    return name
 
 
 def get_password():
diff --git a/client/common_lib/cros/retry.py b/client/common_lib/cros/retry.py
index 386305e..1678562 100644
--- a/client/common_lib/cros/retry.py
+++ b/client/common_lib/cros/retry.py
@@ -1,10 +1,16 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import logging
 import random
 import signal
+import six
 import sys
 import threading
 import time
@@ -253,7 +259,7 @@
             # Raise the cached exception with original backtrace.
             if exception_to_raise:
                 raise exception_to_raise('%s: %s' % (exc_info[0], exc_info[1]))
-            raise exc_info[0], exc_info[1], exc_info[2]
+            six.reraise(exc_info[0], exc_info[1], exc_info[2])
 
 
         return func_retry  # true decorator
diff --git a/client/common_lib/cros/retry_unittest.py b/client/common_lib/cros/retry_unittest.py
index 0282102..8f8a5c1 100755
--- a/client/common_lib/cros/retry_unittest.py
+++ b/client/common_lib/cros/retry_unittest.py
@@ -1,5 +1,3 @@
-#!/usr/bin/env python2
-
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -7,19 +5,18 @@
 """Unit tests for client/common_lib/cros/retry.py."""
 
 import itertools
-import mox
+import signal
 import time
 import unittest
-import signal
-
-import mock
+from unittest import mock
 
 import common
+
 from autotest_lib.client.common_lib.cros import retry
 from autotest_lib.client.common_lib import error
 
 
-class RetryTest(mox.MoxTestBase):
+class RetryTest(unittest.TestCase):
     """Unit tests for retry decorators.
 
     @var _FLAKY_FLAG: for use in tests that need to simulate random failures.
@@ -39,20 +36,19 @@
         self._time_mock = patcher.start()
         self.addCleanup(patcher.stop)
 
-
     def testRetryDecoratorSucceeds(self):
-        """Tests that a wrapped function succeeds without retrying."""
+        """Test that a wrapped function succeeds without retrying."""
         @retry.retry(Exception)
         def succeed():
             return True
         self.assertTrue(succeed())
         self.assertFalse(self._sleep_mock.called)
 
-
     def testRetryDecoratorFlakySucceeds(self):
-        """Tests that a wrapped function can retry and succeed."""
+        """Test that a wrapped function can retry and succeed."""
         delay_sec = 10
         self._time_mock.side_effect = itertools.count(delay_sec)
+
         @retry.retry(Exception, delay_sec=delay_sec)
         def flaky_succeed():
             if self._FLAKY_FLAG:
@@ -61,48 +57,43 @@
             raise Exception()
         self.assertTrue(flaky_succeed())
 
-
     def testRetryDecoratorFails(self):
-        """Tests that a wrapped function retries til the timeout, then fails."""
+        """Test that a wrapped function retries til the timeout, then fails."""
         delay_sec = 10
         self._time_mock.side_effect = itertools.count(delay_sec)
+
         @retry.retry(Exception, delay_sec=delay_sec)
         def fail():
             raise Exception()
         self.assertRaises(Exception, fail)
 
-
     def testRetryDecoratorRaisesCrosDynamicSuiteException(self):
-        """Tests that dynamic_suite exceptions raise immediately, no retry."""
+        """Test that dynamic_suite exceptions raise immediately, no retry."""
         @retry.retry(Exception)
         def fail():
             raise error.ControlFileNotFound()
         self.assertRaises(error.ControlFileNotFound, fail)
 
 
-
-
 class ActualRetryTest(unittest.TestCase):
     """Unit tests for retry decorators with real sleep."""
 
     def testRetryDecoratorFailsWithTimeout(self):
-        """Tests that a wrapped function retries til the timeout, then fails."""
+        """Test that a wrapped function retries til the timeout, then fails."""
         @retry.retry(Exception, timeout_min=0.02, delay_sec=0.1)
         def fail():
             time.sleep(2)
             return True
         self.assertRaises(error.TimeoutException, fail)
 
-
     def testRetryDecoratorSucceedsBeforeTimeout(self):
-        """Tests that a wrapped function succeeds before the timeout."""
+        """Test that a wrapped function succeeds before the timeout."""
         @retry.retry(Exception, timeout_min=0.02, delay_sec=0.1)
         def succeed():
             time.sleep(0.1)
             return True
         self.assertTrue(succeed())
 
-
     def testRetryDecoratorSucceedsWithExistingSignal(self):
         """Tests that a wrapped function succeeds before the timeout and
         previous signal being restored."""
@@ -120,34 +111,26 @@
             time.sleep(1.5)
 
         def testHandler(signum, frame):
-            """
-            Register a handler for the timeout.
-            """
+            """Register a handler for the timeout."""
             raise TestTimeoutException('Expected timed out.')
 
         signal.signal(signal.SIGALRM, testHandler)
         signal.alarm(1)
         self.assertRaises(TestTimeoutException, testFunc)
 
-
     def testRetryDecoratorWithNoAlarmLeak(self):
         """Tests that a wrapped function throws exception before the timeout
         and no signal is leaked."""
-
         def testFunc():
             @retry.retry(Exception, timeout_min=0.06, delay_sec=0.1)
             def fail():
                 time.sleep(0.1)
                 raise Exception()
 
-
             def testHandler(signum, frame):
-                """
-                Register a handler for the timeout.
-                """
+                """Register a handler for the timeout."""
                 self.alarm_leaked = True
 
-
             # Set handler for signal.SIGALRM to catch any leaked alarm.
             self.alarm_leaked = False
             signal.signal(signal.SIGALRM, testHandler)
diff --git a/client/common_lib/cros/site_eap_certs.py b/client/common_lib/cros/site_eap_certs.py
index 0dd52dd..946d787 100644
--- a/client/common_lib/cros/site_eap_certs.py
+++ b/client/common_lib/cros/site_eap_certs.py
@@ -184,98 +184,89 @@
 # The following certs are generated with a different CA (both
 # the private key and the Common Name of the CA are different)
 ca_cert_2 = """-----BEGIN CERTIFICATE-----
-MIIDOjCCAqOgAwIBAgIJAMMPy9yYn8UZMA0GCSqGSIb3DQEBBQUAMGsxCzAJBgNV
-BAYTAlVTMRMwEQYDVQQIEwpDYWxpZm9ybmlhMRYwFAYDVQQHEw1Nb3VudGFpbiBW
-aWV3MS8wLQYDVQQDEyZub3QtY2hyb21lbGFiLWNlcnQtcm9vdC5tdHYuZ29vZ2xl
-LmNvbTAeFw0xMTAzMTYwMDExMzFaFw0yMTAzMTMwMDExMzFaMGsxCzAJBgNVBAYT
-AlVTMRMwEQYDVQQIEwpDYWxpZm9ybmlhMRYwFAYDVQQHEw1Nb3VudGFpbiBWaWV3
-MS8wLQYDVQQDEyZub3QtY2hyb21lbGFiLWNlcnQtcm9vdC5tdHYuZ29vZ2xlLmNv
-bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAvm3+VEApDTqXVTjGedO7KRXu
-5XpjsSUnXPgBjRr3adkEpBs5r86X1csf0V6exKRmLNpDFeacI7OzD2QLtOoO3Dn5
-6Km682voZcsRB1DE8v1C21+KeiibPaOzzjkMyOz+a/JyKjtkxR0a+gAUA/K10eup
-9/OmOrnsVR5hu4Fo5YcCAwEAAaOB5TCB4jAdBgNVHQ4EFgQUevdn8FuR7u26wCbB
-r/+GpMZQwqMwgZ0GA1UdIwSBlTCBkoAUevdn8FuR7u26wCbBr/+GpMZQwqOhb6Rt
-MGsxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpDYWxpZm9ybmlhMRYwFAYDVQQHEw1N
-b3VudGFpbiBWaWV3MS8wLQYDVQQDEyZub3QtY2hyb21lbGFiLWNlcnQtcm9vdC5t
-dHYuZ29vZ2xlLmNvbYIJAMMPy9yYn8UZMAwGA1UdEwQFMAMBAf8wEwYDVR0lBAww
-CgYIKwYBBQUHAwMwDQYJKoZIhvcNAQEFBQADgYEAjYtsd2a0haPOq3wbrOhLQQuu
-TlRIomjZUcUsFulleNZtLC5nsRTTu4XhFCix9ivVQGQGKBpMvNYDwE2lONui/1Vf
-3FpYsQWpHqD2RbhhR0aAXrWyt7n8jn1d07A5LE3D/s/dYYohbrqHfe0rrf7gkauL
-6AgVE8VyR7FRuU4/zKk=
+MIICxzCCAjCgAwIBAgIUJE5XXZBXcmCk7JQffAQ2t+WmGdEwDQYJKoZIhvcNAQEF
+BQAwbzELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcM
+DU1vdW50YWluIFZpZXcxMzAxBgNVBAMMKmNocm9tZWxhYi13aWZpLXRlc3RiZWQt
+cm9vdC5tdHYuZ29vZ2xlLmNvbTAeFw0yMTAzMTUwNDA5MjNaFw0zMTAzMTMwNDA5
+MjNaMG8xCzAJBgNVBAYTAlVTMRMwEQYDVQQIDApDYWxpZm9ybmlhMRYwFAYDVQQH
+DA1Nb3VudGFpbiBWaWV3MTMwMQYDVQQDDCpjaHJvbWVsYWItd2lmaS10ZXN0YmVk
+LXJvb3QubXR2Lmdvb2dsZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGB
+ALj2BlvARAdemtZZSC04zfbFDIrKGwrd0C+JVcZA7s+s9YetU1q+PBVqfQVx2kJa
+NWkE1Ofksq+BUO4YR8PDD2afwfHAclLC4+mD+EWxflwoECFflQzQAtX1iSSmSb/z
+Woj0axYtpZawjdj1aBP3nOWqi8eTzplcI7m73xfMSSPnAgMBAAGjYDBeMB0GA1Ud
+DgQWBBQDT7/NH1pf9+137heqjOLqJNkZKzAfBgNVHSMEGDAWgBQDT7/NH1pf9+13
+7heqjOLqJNkZKzAPBgNVHRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjANBgkqhkiG
+9w0BAQUFAAOBgQBU8zITx9H84PQBc0sqpjVGI8HlJrLFhgdf4G1k3ldSAwXYvuhL
+Oftu1WyEuEQEuu8r0QmY8aVBylIJpn/S7YWP11O8/FO2ztWD2MTl0fr1qAdv1AZc
+sIjPdnbvXoyoy4MKJ97M7kIWBvN8vkUbcP8Rtm13dKSyPVNd6Ufc/JAg3g==
 -----END CERTIFICATE-----
 """
 server_cert_2 = """-----BEGIN CERTIFICATE-----
-MIIDVDCCAr2gAwIBAgIDEAABMA0GCSqGSIb3DQEBBAUAMGsxCzAJBgNVBAYTAlVT
-MRMwEQYDVQQIEwpDYWxpZm9ybmlhMRYwFAYDVQQHEw1Nb3VudGFpbiBWaWV3MS8w
-LQYDVQQDEyZub3QtY2hyb21lbGFiLWNlcnQtcm9vdC5tdHYuZ29vZ2xlLmNvbTAe
-Fw0xMTAzMTYwMDExMzFaFw0yMTAzMTMwMDExMzFaMG0xCzAJBgNVBAYTAlVTMRMw
-EQYDVQQIEwpDYWxpZm9ybmlhMRYwFAYDVQQHEw1Nb3VudGFpbiBWaWV3MTEwLwYD
-VQQDEyhub3QtY2hyb21lbGFiLWNlcnQtc2VydmVyLm10di5nb29nbGUuY29tMIGf
-MA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDL7Jh2grPIe+Yoz2moIa84iXUbvnIR
-tSfYEak46xvvTO+XgduEKpsUj6LGGL/uzog5xxkbQ7TrtzEnPpwQCFulQoRC8liD
-8DDkHIBunOtrYpvvWo6Yk+wWlkTjw7AjVk+iIXVe9Cwm7D3aEWlw5qrjBAx+DmFy
-pCAIt20HK02V0wIDAQABo4IBAjCB/zAJBgNVHRMEAjAAMBEGCWCGSAGG+EIBAQQE
-AwIGQDAdBgNVHQ4EFgQUvrWFiKmCwCOJ8gkMXcfdD8X/eHQwgZ0GA1UdIwSBlTCB
-koAUevdn8FuR7u26wCbBr/+GpMZQwqOhb6RtMGsxCzAJBgNVBAYTAlVTMRMwEQYD
-VQQIEwpDYWxpZm9ybmlhMRYwFAYDVQQHEw1Nb3VudGFpbiBWaWV3MS8wLQYDVQQD
-EyZub3QtY2hyb21lbGFiLWNlcnQtcm9vdC5tdHYuZ29vZ2xlLmNvbYIJAMMPy9yY
-n8UZMAsGA1UdDwQEAwIFoDATBgNVHSUEDDAKBggrBgEFBQcDATANBgkqhkiG9w0B
-AQQFAAOBgQAWtVBFEdbyj93qqGRXXJm1Yf9roxYZUDiOnl6us00f2sIAp3cp5Rq5
-kkQj9y5QfBq6ct5mAZQhCXw4enhQ/b0J6zR/Lh7nBaBzDOyT28+L2bM53y1HnCOr
-hyUyhm/WsshH/f63nzyTliIV4cPDX+Od6XUX9E7gFrYxxnm1AQmGZA==
+MIICxTCCAi6gAwIBAgIBAjANBgkqhkiG9w0BAQsFADBvMQswCQYDVQQGEwJVUzET
+MBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4gVmlldzEzMDEG
+A1UEAwwqY2hyb21lbGFiLXdpZmktdGVzdGJlZC1yb290Lm10di5nb29nbGUuY29t
+MB4XDTIxMDMxNTA0MDkyM1oXDTMxMDMxMzA0MDkyM1owcTELMAkGA1UEBhMCVVMx
+EzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDU1vdW50YWluIFZpZXcxNTAz
+BgNVBAMMLGNocm9tZWxhYi13aWZpLXRlc3RiZWQtc2VydmVyLm10di5nb29nbGUu
+Y29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDGTK8KIl0SgzmR24Z+BSl3
+I/hXasAEBp8lTsL8y2rfWJ62eNsA/Nq2R1UiTijIiVQ9bKyqWnYU6rr+Y2moKrI/
+44A566B1AOhSHF8X6M490bPzBrbv9mGgm+cN9oHp9ClilHZAauBQt5t7iZAoAYr0
+37p+3NdiH5m81oR6zQc+DwIDAQABo28wbTAdBgNVHQ4EFgQUREgHP7l+/Kyo/WtL
+9Q7f6oc6S/AwHwYDVR0jBBgwFoAUA0+/zR9aX/ftd+4Xqozi6iTZGSswCQYDVR0T
+BAIwADALBgNVHQ8EBAMCBaAwEwYDVR0lBAwwCgYIKwYBBQUHAwEwDQYJKoZIhvcN
+AQELBQADgYEAMkf4cfuZsTHP7Xat+gER95S76owtooQ0tUtEgjvye5rHcDIIUWhR
+gmZu+S2uwVIhpXkM67UlqtVrT1GfN5lEkYxQKra6f8xn8tyPcRr0Ko2l6sPuZ6Vr
+91mZvmi58PuxqvL1IqpbcunD/0/aymWu0ELmt7GVeCzW47mLMtsvYxs=
 -----END CERTIFICATE-----
 """
 server_private_key_2 = """-----BEGIN RSA PRIVATE KEY-----
-MIICXQIBAAKBgQDL7Jh2grPIe+Yoz2moIa84iXUbvnIRtSfYEak46xvvTO+XgduE
-KpsUj6LGGL/uzog5xxkbQ7TrtzEnPpwQCFulQoRC8liD8DDkHIBunOtrYpvvWo6Y
-k+wWlkTjw7AjVk+iIXVe9Cwm7D3aEWlw5qrjBAx+DmFypCAIt20HK02V0wIDAQAB
-AoGALdY5kvHgDWFkI6ozeppPs2qaSykspLROh2+41NVsCwcFF84VlIirIR0EHnjz
-s27zCeGp0AoRAabEwqh9FdF7H6xQ22v8u/mlN6drfI1n6KyyV9UnW46lWL2b40YQ
-MbHIfUKpli979omdPXccCYNzPf8SDy5qepKf6sq773iVEWkCQQD5K3V8W31pB+IU
-dFYS2lzx8SRbBPQuMBVcptiwdi2h5IReA0ldWxGc8sEtKf8djZHe6EB/cfoi5rhE
-2sCvATh1AkEA0YOgl8ByAZq+azAoBnhRJRseAXsRGUV+zTNTNp4vTAGcmuwMSl3v
-Iw1Q4OKUz9Oo71Za62BpoIxo/eXp4oaMJwJBAJI0bPiuWnUhugfUh5kCZl1U6Mc7
-refYMQCvqiBJB9eh1gWwGgFcaYZVbwKITPjTVA+e6hGeGG8YQHGuhjdqGO0CQQC6
-z1Wq2XIN0i9FHawikLcxkatAgL3vPZLhXoks4wCjEbDURfJzgKwQIfhifEVZxY0U
-0nfA9bdY6rlW4eCYFYXzAkAjNAgtfelk9b9nT7JAIodaCXKyoSaID0/etu+NCqlt
-opbkSIzq136j+PzIJX9RLZ0VXiIwM7MtOEvxZCBjAo8M
+MIICWwIBAAKBgQDGTK8KIl0SgzmR24Z+BSl3I/hXasAEBp8lTsL8y2rfWJ62eNsA
+/Nq2R1UiTijIiVQ9bKyqWnYU6rr+Y2moKrI/44A566B1AOhSHF8X6M490bPzBrbv
+9mGgm+cN9oHp9ClilHZAauBQt5t7iZAoAYr037p+3NdiH5m81oR6zQc+DwIDAQAB
+AoGAVuNK3znnZjExjIOQDq/cV2PfcQ5JTHpKSWKVdYb8MRj3kiSqcWhZvPzMhNeS
+bywdSTzQ+2Pf0ZJ4lPaiWRk8OJPfWrF4QdjUc33Lx5kshS602173o+tfawaDOgUi
+s6PRLA5i8fxnNdim44YRi275mq8weAp5K/nAN5IXXU+GpiECQQD4x1F+GtpXxz9+
+ufaWmyVzTgUiWDM0XHnr9ahnLMTUvMRCdwzcMlkeo5nbNqQzcclidSALugktmrzn
+Jubtmgo9AkEAzA5BCNQh8X9jr4RYukDNmPf46ZXOPWh6L0yAjQtAYT6eZUMO57X4
+dGABhDnoN7+UCT4IB+mehtZfKUAgzyqKOwJAdiKoRxrXTlGWjEYgm7oG2a9V7s41
+WosDdpJMKtpuiZsuE2XXHIVoHo2P1TIoXSmOeQuncQohq59MTvQBOxkn/QJANAM6
+Hob78/O1tCp291rHTUN/5gJyZIK6Ck5kwg7pJSBx/Xu/U961auyB1nNwNq2VjVmL
+cIDjEJmZ/gjDlCrSuwJAaZ8gqOSwOhoeLX4ki7I1eFc/Q/jnb6CFQjZ+Nb38sYDv
+/O79yd2xBUAsL4sNkbukmMFEuK85bhNieUWe7O0B+g==
 -----END RSA PRIVATE KEY-----
 """
 client_cert_2 = """-----BEGIN CERTIFICATE-----
-MIIDQDCCAqmgAwIBAgIDEAACMA0GCSqGSIb3DQEBBAUAMGsxCzAJBgNVBAYTAlVT
-MRMwEQYDVQQIEwpDYWxpZm9ybmlhMRYwFAYDVQQHEw1Nb3VudGFpbiBWaWV3MS8w
-LQYDVQQDEyZub3QtY2hyb21lbGFiLWNlcnQtcm9vdC5tdHYuZ29vZ2xlLmNvbTAe
-Fw0xMTAzMTYwMDExMzJaFw0yMTAzMTMwMDExMzJaMG0xCzAJBgNVBAYTAlVTMRMw
-EQYDVQQIEwpDYWxpZm9ybmlhMRYwFAYDVQQHEw1Nb3VudGFpbiBWaWV3MTEwLwYD
-VQQDEyhub3QtY2hyb21lbGFiLWNlcnQtY2xpZW50Lm10di5nb29nbGUuY29tMIGf
-MA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDxOl9JFvH8aeNj07pPF6PtENx90Lm9
-uo3XwxwsHEIqfmr3rmbq8a2OerCbVAeCqvFOdswXjfNgraYjQntwzjoTdp23Cvon
-M7vb8mO8vp3xpYb6D6IdzJZjQUnsd5jPWzMEIp7oBj8UTWsxVDGy5QI6SB64O8dn
-tsUaLOdIdqE1QwIDAQABo4HvMIHsMAkGA1UdEwQCMAAwHQYDVR0OBBYEFKnxsjiH
-G/CQ7/E07dODtCVzl25CMIGdBgNVHSMEgZUwgZKAFHr3Z/Bbke7tusAmwa//hqTG
-UMKjoW+kbTBrMQswCQYDVQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQG
-A1UEBxMNTW91bnRhaW4gVmlldzEvMC0GA1UEAxMmbm90LWNocm9tZWxhYi1jZXJ0
-LXJvb3QubXR2Lmdvb2dsZS5jb22CCQDDD8vcmJ/FGTALBgNVHQ8EBAMCBaAwEwYD
-VR0lBAwwCgYIKwYBBQUHAwIwDQYJKoZIhvcNAQEEBQADgYEAcJGyJvWHLofcMr70
-k8UO5iaiYDgoR7NbsXwo/rAUktDgeInYKA2KJhiQREgSquKWEaCVK5tZkzGla+2G
-7JtT1s6aAH6iLBiqH8Z+ptBJORpd+Dkrggp003g/WZcYSJy9u3iKRiAlXf+C3YqT
-jOsSVKrlZHZlvjDSRl1Fv+ODljk=
+MIICxTCCAi6gAwIBAgIBBDANBgkqhkiG9w0BAQsFADBvMQswCQYDVQQGEwJVUzET
+MBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4gVmlldzEzMDEG
+A1UEAwwqY2hyb21lbGFiLXdpZmktdGVzdGJlZC1yb290Lm10di5nb29nbGUuY29t
+MB4XDTIxMDMxNTA0MDkyM1oXDTMxMDMxMzA0MDkyM1owcTELMAkGA1UEBhMCVVMx
+EzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDU1vdW50YWluIFZpZXcxNTAz
+BgNVBAMMLGNocm9tZWxhYi13aWZpLXRlc3RiZWQtY2xpZW50Lm10di5nb29nbGUu
+Y29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC8l/vKjyCiaeFGDg3wsiGb
+7q2Q3nivANib0WD9ks1bgh5BUCrn6N3TSuopP4ComsxcWcJD2BP7oKj8ZAL1LFPd
+UriSXAfN7/kZPyzoSE7S4zQRcsPOLJU6L5NyY0i6MkmRStrZpv+S5vQT6WKr+Xud
+cXw2oWN4tEg0K7PBR0wcCQIDAQABo28wbTAdBgNVHQ4EFgQUagtrm9EKcYNSmUHC
+PVQZDDX5sOMwHwYDVR0jBBgwFoAUA0+/zR9aX/ftd+4Xqozi6iTZGSswCQYDVR0T
+BAIwADALBgNVHQ8EBAMCBaAwEwYDVR0lBAwwCgYIKwYBBQUHAwIwDQYJKoZIhvcN
+AQELBQADgYEAcdKKxc4Zl+of2DULOqk7VsIIdjHfGrivs5fROMeM+oQoAE3iwfuz
+1/QJqh3DnTxHCEa7q5Rn62lhDxLrWJM2RANfKbXL8RWia3Yawi0EhTbteUvssWlx
+5u9ayCfnEOpwmcYx0Klzr9hU1Q4r2qMLBb4u8xiWDUwkY4Y8rfh9/XU=
 -----END CERTIFICATE-----
 """
 client_private_key_2 = """-----BEGIN RSA PRIVATE KEY-----
-MIICXgIBAAKBgQDxOl9JFvH8aeNj07pPF6PtENx90Lm9uo3XwxwsHEIqfmr3rmbq
-8a2OerCbVAeCqvFOdswXjfNgraYjQntwzjoTdp23CvonM7vb8mO8vp3xpYb6D6Id
-zJZjQUnsd5jPWzMEIp7oBj8UTWsxVDGy5QI6SB64O8dntsUaLOdIdqE1QwIDAQAB
-AoGACHVdhfiR/6HO4V85NzDDBLsEoDzDZo22D4y93iYXg9nu8kRhQ3e5+DcBHlEx
-+jj4QSIVQfYuRIOSQcZBAdGZY6flibiDyQVa4hYkAZ50q2joXBsO4aVH+vzgS3ER
-hCLAXlIjVH9Hq8zWzXti5mka/afbwu39PegknrCoFbr6c7ECQQD6f7TxViVqWVn+
-19gKwPPi375ThtNW2wwSOxCFceiEVZX1j4EA6puOQN2EieK4XdBN/S//Lw4c/p2R
-kzsVImy3AkEA9oaLkDoBNsqeWyOmCytuxK8kLr1polV9aRENQZXyLCRiiKu9LWpo
-pevoLTCi8eOa5iTJUqIqNljGb1mUR9pH1QJBAJ5CotG6iGb5TpdoTND2eN9EVp20
-177PI4A9GsIEZk99NwHuiSVIE8E7D/jVmIy8kIdFR9akJvmAj5JSf4hpuF8CQQCW
-mbMAtK6xLi53Ntvkzu+oL+ZGt2ekx90JJmWWv2dmGAQpweCbsfmkAu2fTar9YjHq
-jmZ+mJp5ANqYg3zWOD7RAkEAtIt9Whx/izCLLJKZGY8QaeN1+SCyg2Z5Rw8qiPbB
-fvnf1pvhiHBjmbJLYOPR9hslU4pg0HzRMaDaG10Sp9047A==
+MIICXgIBAAKBgQC8l/vKjyCiaeFGDg3wsiGb7q2Q3nivANib0WD9ks1bgh5BUCrn
+6N3TSuopP4ComsxcWcJD2BP7oKj8ZAL1LFPdUriSXAfN7/kZPyzoSE7S4zQRcsPO
+LJU6L5NyY0i6MkmRStrZpv+S5vQT6WKr+XudcXw2oWN4tEg0K7PBR0wcCQIDAQAB
+AoGAIp8IAyx5QjeLum+UEY6x6s1LIaAlqYLPQPOUCZjsiBL/+N99sOsn5cUH7aCR
+vuIECCQWLJw7NuNMgmDdxiJoFpTlzNbbIwMnALQWxYJaAKvp9R42nO7DqSeOtxls
+D0GSJGtr92cdgqSPI8XeqxK0JqWpTtJNfMTHWD7HeB2cfr0CQQD5vonnOOOQ8DR5
+thdIRU7c/OaxuL192wkswsOw70zS3de42WdjBGJJ4AZoLoVRSXz+ggzFl2xHAXl0
+621RZH4bAkEAwVFSjgt/cPjaUlv+MVl6tMUSXY0i/sOOP4Ztn+BWwv68wAXFA+9o
+Jg1CBP4VejrSd8l9V4YwrfXJM7FOmMGgqwJBANm2E2uA+UiBTY2IglVbLfuRb94f
+6PiM+DuP8Z4NjhAImiSnSOxVhwVViOBLJacOBCut3UpmzVo8289j6GFa1F8CQQCQ
+ONhrKn7ITI+sU/8+1++of0HlU08hW8l68RJF7TstPJiadXN3LRu5D7kKOnjReKLe
+RQ7ixjT5A14413W4lvQDAkEA7KEjzXOB7fMuBXs2PdcVcXzIUTyZFz1EQTf4r1Dh
+icTOSZFRHreNuAmApg6B9s5depqncuR91kCaZFxxgY6ePA==
 -----END RSA PRIVATE KEY-----
 """
 dh1024_pem_key_1 = """-----BEGIN DH PARAMETERS-----
diff --git a/client/common_lib/cros/string_utils_unittest.py b/client/common_lib/cros/string_utils_unittest.py
index aa200d8..543731b 100755
--- a/client/common_lib/cros/string_utils_unittest.py
+++ b/client/common_lib/cros/string_utils_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # -*- coding: utf-8 -*-
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -9,8 +9,8 @@
 from __future__ import division
 from __future__ import print_function
 
-import mock
 import unittest
+from unittest import mock
 
 import common
 from autotest_lib.client.common_lib.cros import string_utils
diff --git a/client/common_lib/cros/system_metrics_collector.py b/client/common_lib/cros/system_metrics_collector.py
index 3388562..2c2ad35 100644
--- a/client/common_lib/cros/system_metrics_collector.py
+++ b/client/common_lib/cros/system_metrics_collector.py
@@ -222,6 +222,27 @@
     def collect_metric(self):
         self._store_sample(self.system_facade.get_current_temperature_max())
 
+
+class EnergyUsageMetric(Metric):
+    """
+    Metric that collects the amount of energy used.
+    """
+
+    def __init__(self, system_facade):
+        super(EnergyUsageMetric, self).__init__('energy_usage',
+                                                units='microjoules')
+        self.system_facade = system_facade
+        self.initial_energy = int(self.system_facade.get_energy_usage())
+
+    def collect_metric(self):
+        self._store_sample(
+                int(self.system_facade.get_energy_usage()) -
+                self.initial_energy)
+
+    def _aggregate(self, samples):
+        return samples[-1]
+
+
 def create_default_metric_set(system_facade):
     """
     Creates the default set of metrics.
@@ -238,15 +259,13 @@
     peak_mem = PeakMetric.from_metric(mem)
     peak_temperature = PeakMetric.from_metric(temperature)
     sum_storage_written_amount = SumMetric.from_metric(storage_written_amount)
-    return [cpu,
-            mem,
-            file_handles,
-            storage_written_amount,
-            temperature,
-            peak_cpu,
-            peak_mem,
-            peak_temperature,
-            sum_storage_written_amount]
+    energy = EnergyUsageMetric(system_facade)
+    return [
+            cpu, mem, file_handles, storage_written_amount, temperature,
+            peak_cpu, peak_mem, peak_temperature, sum_storage_written_amount,
+            energy
+    ]
+
 
 class SystemMetricsCollector(object):
     """
@@ -257,7 +276,7 @@
         Initialize with facade and metric classes.
 
         @param system_facade The system facade to use for querying the system,
-                e.g. system_facade_native.SystemFacadeNative for client tests.
+                e.g. system_facade.SystemFacadeLocal for client tests.
         @param metrics List of metric instances. If None, the default set will
                 be created.
         """
diff --git a/client/common_lib/cros/system_metrics_collector_unittest.py b/client/common_lib/cros/system_metrics_collector_unittest.py
index 8cb999c..a5f5c87 100644
--- a/client/common_lib/cros/system_metrics_collector_unittest.py
+++ b/client/common_lib/cros/system_metrics_collector_unittest.py
@@ -64,6 +64,15 @@
         self.assertEqual(3, metric.values[0])
         self.assertEqual(4, metric.values[1])
 
+    def test_energy_usage_metric(self):
+        metric = system_metrics_collector.EnergyUsageMetric(FakeSystemFacade())
+        metric.collect_metric()
+        self.assertEqual(1, metric.values)
+        metric.collect_metric()
+        self.assertEqual(2, metric.values)
+        metric.collect_metric()
+        self.assertEqual(3, metric.values)
+
     def test_collector(self):
         collector = system_metrics_collector.SystemMetricsCollector(
                 FakeSystemFacade(), [TestMetric()])
@@ -197,6 +206,7 @@
             'written_kb': 188458,
         }
         self.bg_worker_output = ''
+        self.energy_usage = 0
 
     def get_mem_total(self):
         return self.mem_total_mb
@@ -228,6 +238,11 @@
     def stop_bg_worker(self):
         pass
 
+    def get_energy_usage(self):
+        self.energy_usage += 1
+        return str(self.energy_usage)
+
+
 class TestMetric(system_metrics_collector.Metric):
     def __init__(self):
         super(TestMetric, self).__init__(
diff --git a/client/common_lib/cros/test_webrtc_peer_connection.py b/client/common_lib/cros/test_webrtc_peer_connection.py
index 35eea4c..3b3616b 100644
--- a/client/common_lib/cros/test_webrtc_peer_connection.py
+++ b/client/common_lib/cros/test_webrtc_peer_connection.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 import logging
 import os
 import time
@@ -8,7 +9,7 @@
 from autotest_lib.client.common_lib.cros import system_metrics_collector
 from autotest_lib.client.common_lib.cros import webrtc_utils
 from autotest_lib.client.cros.graphics import graphics_utils
-from autotest_lib.client.cros.multimedia import system_facade_native
+from autotest_lib.client.cros.multimedia import system_facade
 from autotest_lib.client.cros.video import helper_logger
 from telemetry.util import image_util
 
@@ -232,8 +233,8 @@
         try:
             full_filename = screenshot_name + '_graphics_utils'
             graphics_utils.take_screenshot(self.debugdir, full_filename)
-        except StandardError as e:
-            logging.warn('Screenshot using graphics_utils failed', exc_info = e)
+        except Exception as e:
+            logging.warning('Screenshot using graphics_utils failed', exc_info = e)
 
     def take_browser_tab_screenshot(self, screenshot_name):
         """
@@ -250,11 +251,11 @@
             except Exception:
                 # This can for example occur if Chrome crashes. It will
                 # cause the Screenshot call to timeout.
-                logging.warn(
+                logging.warning(
                         'Screenshot using telemetry tab.Screenshot failed',
                         exc_info=True)
         else:
-            logging.warn(
+            logging.warning(
                     'Screenshot using telemetry tab.Screenshot() not supported')
 
 
@@ -298,7 +299,7 @@
                   iteration_delay_millis,
                   perf_before_start_hook)
           self.collector = system_metrics_collector.SystemMetricsCollector(
-                system_facade_native.SystemFacadeNative())
+                system_facade.SystemFacadeLocal())
           # TODO(crbug/784365): If this proves to work fine, move to a separate
           # module and make more generic.
           delay = 5
diff --git a/client/common_lib/cros/textfsm.py b/client/common_lib/cros/textfsm.py
index ffad097..1aebb0d 100644
--- a/client/common_lib/cros/textfsm.py
+++ b/client/common_lib/cros/textfsm.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #
 # Copyright 2010 Google Inc. All Rights Reserved.
 #
diff --git a/client/common_lib/cros/tpm_utils.py b/client/common_lib/cros/tpm_utils.py
index e7a5f4a..4021e08 100644
--- a/client/common_lib/cros/tpm_utils.py
+++ b/client/common_lib/cros/tpm_utils.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -12,10 +13,14 @@
 _RM_FILES = ['/home/chronos/.oobe_completed',
              '/home/chronos/Local\ State',
              '/var/cache/shill/default.profile']
-_RM_DIRS = ['/home/.shadow/*',
-            os.path.join(constants.ALLOWLIST_DIR, '*'),
-            '/var/cache/app_pack',
-            '/var/lib/tpm']
+# TODO(b/187793661) Delete /var/lib/whitelist once migration is finished.
+_RM_DIRS = [
+        '/home/.shadow/*',
+        os.path.join(constants.DEVICESETTINGS_DIR, '*'),
+        '/var/lib/whitelist/*',
+        '/var/cache/app_pack',
+        '/var/lib/tpm',
+]
 
 
 class NoTPMPasswordException(Exception):
@@ -37,35 +42,22 @@
             continue
         if len(item) == 1:
             item.append('')
-        item = map(lambda x : x.strip(), item)
+        item = [x.strip() for x in item]
         item[1] = True if item[1] == 'true' else item[1]
         item[1] = False if item[1] == 'false' else item[1]
         status[item[0]] = item[1]
     return status
 
 
-def IsTPMAvailable(client):
-    """Returns True if the TPM is unowned and enabled.
-
-    @param client: client object to run commands on.
-    """
-    status = TPMStatus(client)
-    return status['is_enabled'] and not status['is_owned']
-
-
 def ClearTPMServer(client, out_dir):
     """Clears the TPM and reboots from a server-side autotest.
 
     @param client: client object to run commands on.
     @param out_dir: temporary directory.
     """
-    if IsTPMAvailable(client):
-        logging.debug('TPM is not owned')
-        return
-
     client.run('stop ui')
-    client.run('crossystem clear_tpm_owner_request=1')
-    CleanupAndReboot(client)
+    ClearTPMOwnerRequest(client)
+
 
 def ClearTPMOwnerRequest(client, wait_for_ready=False, timeout=60):
     """Clears the TPM using crossystem command.
@@ -74,25 +66,25 @@
     @param wait_for_ready: wait until the TPM status is ready
     @param timeout: number of seconds to wait for the TPM to become ready.
     """
-    if not client.run('crossystem clear_tpm_owner_request=1',
-                      ignore_status=True).exit_status == 0:
-        raise error.TestFail('Unable to clear TPM.')
+    ownership_id = client.run('hwsec-ownership-id id')
+    if not ownership_id.exit_status == 0:
+        raise error.TestFail('Unable to get ownership ID.')
 
+    ownership_id = ownership_id.stdout.strip()
+
+    logging.info('Sending Clear TPM owner request')
+    client.run('crossystem clear_tpm_owner_request=1')
     CleanupAndReboot(client)
 
     if wait_for_ready:
-        status = ''
+        status = 1
         end_time = time.time() + timeout
-        # Wait for tpm_manager to send a successful reply.
-        while 'STATUS_SUCCESS' not in status and time.time() < end_time:
-            status = client.run('tpm_manager_client status --nonsensitive',
-                                ignore_status=True).stdout.strip()
-            logging.debug(status)
+        # Wait for the ownership ID changed.
+        while status != 0 and time.time() < end_time:
+            status = client.run('hwsec-ownership-id diff id=' + ownership_id,
+                                ignore_status=True).exit_status
             time.sleep(1)
-        # Verify if the TPM is unowned.
-        tpm_status = TPMStatus(client)
-        logging.info('TPM status: %s', tpm_status)
-        if tpm_status['is_owned']:
+        if status != 0:
             raise error.TestFail('Failed to clear TPM.')
 
 
@@ -116,3 +108,14 @@
     client.run(full_rm, ignore_status=True)
     client.run('sync', ignore_status=True)
     client.reboot()
+
+
+def FwmpIsAllZero(get_fwmp_output):
+    """Check if firmware management parameters are all zero.
+
+    @param get_fwmp_output: output from the command
+        'cryptohome --action=get_firmware_management_parameters'.
+    """
+    return ('flags=0x00000000' in get_fwmp_output and
+            'hash=0000000000000000000000000000000000000000000000000000000000000000'
+            in get_fwmp_output)
diff --git a/client/common_lib/cros/virtual_ethernet_pair.py b/client/common_lib/cros/virtual_ethernet_pair.py
index d2749d4..306e019 100644
--- a/client/common_lib/cros/virtual_ethernet_pair.py
+++ b/client/common_lib/cros/virtual_ethernet_pair.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -9,11 +10,8 @@
 unconfigured, simply pass None.  You may also specify the subnet of your ip
 addresses.  Failing to do so leaves them with default in ifconfig.
 
-# TODO b:169251326 terms below are set outside of this codebase
-# and should be updated when possible. ("master" -> "main", "slave" -> "node")
-
 Example usage:
-vif = virtual_ethernet_pair.VirtualEthernetPair(interface_name="master",
+vif = virtual_ethernet_pair.VirtualEthernetPair(interface_name="main",
                                                 peer_interface_name="peer",
                                                 interface_ip="10.9.8.1/24",
                                                 peer_interface_ip=None)
@@ -43,20 +41,22 @@
 """
 
 import logging
+import re
 
-from autotest_lib.client.bin import utils
+from autotest_lib.client.common_lib import utils
 from autotest_lib.client.common_lib.cros.network import interface
 
 class VirtualEthernetPair(object):
     """ Class for configuring virtual ethernet device pair. """
 
     def __init__(self,
-                 interface_name='veth_master',
-                 peer_interface_name='veth_slave',
+                 interface_name='veth_main',
+                 peer_interface_name='veth_secondary',
                  interface_ip='10.9.8.1/24',
                  peer_interface_ip='10.9.8.2/24',
                  interface_ipv6=None,
                  peer_interface_ipv6=None,
+                 interface_ns=None,
                  ignore_shutdown_errors=False,
                  host=None):
         """
@@ -76,17 +76,31 @@
         self._peer_interface_ip = peer_interface_ip
         self._interface_ipv6 = interface_ipv6
         self._peer_interface_ipv6 = peer_interface_ipv6
+        self._interface_ns = interface_ns
+        self._ns_exec = ''
+        if interface_ns:
+            self._ns_exec = 'ip netns exec %s ' % self._interface_ns
         self._ignore_shutdown_errors = ignore_shutdown_errors
         self._run = utils.run
         self._host = host
         if host is not None:
             self._run = host.run
+        (self._eth_name, self._eth_ip) = self._get_ipv4_config()
 
+    def _get_ipv4_config(self):
+        """@return Tuple with interface name and IP address used for
+        external communication."""
+        route = utils.system_output("ip route get 8.8.8.8")
+        # Only first line is interesting - match it for interface and
+        # IP address
+        m = re.search(r"dev (\S+) .*? src ((?:\d+\.){3}\d+)",
+                      route[:route.find('\n')])
+        return (m.group(1), m.group(2)) if m else (None, None)
 
     def setup(self):
         """
         Installs a virtual ethernet interface and configures one side with an IP
-        address.  First does some sanity checking and tries to remove an
+        address.  First does some confidence checking and tries to remove an
         existing interface by the same name, and logs messages on failures.
         """
         self._is_healthy = False
@@ -100,7 +114,8 @@
                 return
 
         self._create_test_interface()
-        if not self._interface_exists(self._interface_name):
+        if not self._interface_exists(self._interface_name,
+                                      self._interface_ns):
             logging.error('Failed to create main test interface.')
             return
 
@@ -111,23 +126,67 @@
         # get any IP traffic through.  Since this is basically a loopback
         # device, just allow all traffic.
         for name in (self._interface_name, self._peer_interface_name):
-            status = self._run('iptables -w -I INPUT -i %s -j ACCEPT' % name,
-                               ignore_status=True)
+            command = 'iptables -w -I INPUT -i %s -j ACCEPT' % name
+            if name == self._interface_name and self._interface_ns:
+                status = self._run(self._ns_exec + command, ignore_status=True)
+            else:
+                status = self._run(command, ignore_status=True)
             if status.exit_status != 0:
                 logging.error('iptables rule addition failed for interface %s: '
                               '%s', name, status.stderr)
+        # In addition to INPUT configure also FORWARD'ing for the case
+        # of interface being moved to its own namespace so that there is
+        # contact with "the world" from within that namespace.
+        if self._interface_ns and self._eth_ip:
+            command = 'iptables -w -I FORWARD -i %s -j ACCEPT' \
+                      % self._peer_interface_name
+            status = self._run(command, ignore_status=True)
+            if status.exit_status != 0:
+                logging.warning(
+                        'failed to configure forwarding rule for %s: '
+                        '%s', self._peer_interface_name, status.stderr)
+            command = 'iptables -w -t nat -I POSTROUTING ' \
+                      '--src %s -o %s -j MASQUERADE' % \
+                      (self._interface_ip, self._eth_name)
+            status = self._run(command, ignore_status=True)
+            if status.exit_status != 0:
+                logging.warning('failed to configure nat rule for %s: '
+                                '%s', self._peer_interface_name, status.stderr)
+            # Add default route in namespace to the address used for
+            # outbound traffic
+            commands = [
+                    'ip r add %s dev %s', 'ip route add default via %s dev %s'
+            ]
+            for command in commands:
+                command = command % (self._eth_ip, self._interface_name)
+                status = self._run(self._ns_exec + command, ignore_status=True)
+                if status.exit_status != 0:
+                    logging.warning(
+                            'failed to configure GW route for %s: '
+                            '%s', self._interface_name, status.stderr)
         self._is_healthy = True
 
 
     def teardown(self):
         """
         Removes the interface installed by VirtualEthernetPair.setup(), with
-        some simple sanity checks that print warnings when either the interface
-        isn't there or fails to be removed.
+        some simple confidence checks that print warnings when either the
+        interface isn't there or fails to be removed.
         """
         for name in (self._interface_name, self._peer_interface_name):
-            self._run('iptables -w -D INPUT -i %s -j ACCEPT' % name,
+            command = 'iptables -w -D INPUT -i %s -j ACCEPT' % name
+            if name == self._interface_name and self._interface_ns:
+                self._run(self._ns_exec + command, ignore_status=True)
+            else:
+                self._run(command, ignore_status=True)
+        if self._interface_ns and self._eth_ip:
+            self._run('iptables -w -D FORWARD -i %s -j ACCEPT' %
+                      self._peer_interface_name,
                       ignore_status=True)
+            command = 'iptables -w -t nat -I POSTROUTING ' \
+                      '--src %s -o %s -j MASQUERADE' % \
+                      (self._interface_ip, self._eth_name)
+            self._run(command, ignore_status=True)
         if not self._either_interface_exists():
             logging.warning('VirtualEthernetPair.teardown() called, '
                             'but no interface was found.')
@@ -159,7 +218,8 @@
     @property
     def interface_ip(self):
         """@return string IPv4 address of the interface."""
-        return interface.Interface(self.interface_name).ipv4_address
+        return interface.Interface(self.interface_name,
+                                   netns=self._interface_ns).ipv4_address
 
 
     @property
@@ -171,13 +231,15 @@
     @property
     def interface_subnet_mask(self):
         """@return string IPv4 subnet mask of the interface."""
-        return interface.Interface(self.interface_name).ipv4_subnet_mask
+        return interface.Interface(self.interface_name,
+                                   netns=self._interface_ns).ipv4_subnet_mask
 
 
     @property
     def interface_prefix(self):
         """@return int IPv4 prefix length."""
-        return interface.Interface(self.interface_name).ipv4_prefix
+        return interface.Interface(self.interface_name,
+                                   netns=self._interface_ns).ipv4_prefix
 
 
     @property
@@ -189,7 +251,8 @@
     @property
     def interface_mac(self):
         """@return string MAC address of the interface."""
-        return interface.Interface(self.interface_name).mac_address
+        return interface.Interface(self.interface_name,
+                                   netns=self._interface_ns).mac_address
 
 
     @property
@@ -197,6 +260,10 @@
         """@return string MAC address of the peer interface."""
         return interface.Interface(self._peer_interface_name).mac_address
 
+    @property
+    def interface_namespace(self):
+        """@return interface name space if configured, None otherwise."""
+        return self._interface_ns
 
     def __enter__(self):
         self.setup()
@@ -207,16 +274,19 @@
         self.teardown()
 
 
-    def _interface_exists(self, interface_name):
+    def _interface_exists(self, interface_name, netns=None):
         """
         Returns True iff we found an interface with name |interface_name|.
         """
-        return interface.Interface(interface_name, host=self._host).exists
+        return interface.Interface(interface_name,
+                                   host=self._host,
+                                   netns=netns).exists
 
 
     def _either_interface_exists(self):
-        return (self._interface_exists(self._interface_name) or
-                self._interface_exists(self._peer_interface_name))
+        return (self._interface_exists(self._interface_name,
+                                       self._interface_ns)
+                or self._interface_exists(self._peer_interface_name))
 
 
     def _remove_test_interface(self):
@@ -224,11 +294,12 @@
         Remove the virtual ethernet device installed by
         _create_test_interface().
         """
-        self._run('ip link set %s down' % self._interface_name,
+        self._run(self._ns_exec + 'ip link set %s down' % self._interface_name,
                   ignore_status=self._ignore_shutdown_errors)
         self._run('ip link set %s down' % self._peer_interface_name,
                   ignore_status=self._ignore_shutdown_errors)
-        self._run('ip link delete %s >/dev/null 2>&1' % self._interface_name,
+        self._run(self._ns_exec +
+                  'ip link delete %s >/dev/null 2>&1' % self._interface_name,
                   ignore_status=self._ignore_shutdown_errors)
 
         # Under most normal circumstances a successful deletion of
@@ -238,6 +309,9 @@
         self._run('ip link delete %s >/dev/null 2>&1' %
                   self._peer_interface_name, ignore_status=True)
 
+        if self._interface_ns:
+            self._run('ip netns del %s' % self._interface_ns,
+                      ignore_status=True)
 
     def _create_test_interface(self):
         """
@@ -247,17 +321,22 @@
         self._run('ip link add name %s '
                   'type veth peer name %s >/dev/null 2>&1' %
                   (self._interface_name, self._peer_interface_name))
-        self._run('ip link set %s up' % self._interface_name)
+        if self._interface_ns:
+            self._run('ip netns add %s' % self._interface_ns,
+                      ignore_status=True)
+            self._run('ip link set dev %s netns %s' %
+                      (self._interface_name, self._interface_ns))
+        self._run(self._ns_exec + 'ip link set %s up' % self._interface_name)
         self._run('ip link set %s up' % self._peer_interface_name)
         if self._interface_ip is not None:
-            self._run('ip addr add %s dev %s' % (self._interface_ip,
-                                                 self._interface_name))
+            self._run(self._ns_exec + 'ip addr add %s dev %s' %
+                      (self._interface_ip, self._interface_name))
         if self._peer_interface_ip is not None:
             self._run('ip addr add %s dev %s' % (self._peer_interface_ip,
                                                  self._peer_interface_name))
         if self._interface_ipv6 is not None:
-            self._run('ip -6 addr add %s dev %s' % (self._interface_ipv6,
-                                                    self._interface_name))
+            self._run(self._ns_exec + 'ip -6 addr add %s dev %s' %
+                      (self._interface_ipv6, self._interface_name))
         if self._peer_interface_ipv6 is not None:
             self._run('ip -6 addr add %s dev %s' % (self._peer_interface_ipv6,
                                                     self._peer_interface_name))
diff --git a/client/common_lib/cros/vpd_utils.py b/client/common_lib/cros/vpd_utils.py
index cd7cb2d..3efe714 100644
--- a/client/common_lib/cros/vpd_utils.py
+++ b/client/common_lib/cros/vpd_utils.py
@@ -1,9 +1,10 @@
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 from autotest_lib.client.common_lib import error
-from chromite.lib import retry_util
+from autotest_lib.utils.frozen_chromite.lib import retry_util
 
 
 _VPD_BASE_CMD = 'vpd -i %s %s %s'
diff --git a/client/common_lib/cros/webrtc_scripts/loopback-peerconnection.js b/client/common_lib/cros/webrtc_scripts/loopback-peerconnection.js
deleted file mode 100644
index 34fe2b4..0000000
--- a/client/common_lib/cros/webrtc_scripts/loopback-peerconnection.js
+++ /dev/null
@@ -1,207 +0,0 @@
-/*
- * Copyright 2017 The Chromium Authors. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-/*jshint esversion: 6 */
-
-/**
- * A loopback peer connection with one or more streams.
- */
-class PeerConnection {
-  /**
-   * Creates a loopback peer connection. One stream per supplied resolution is
-   * created.
-   * @param {!Element} videoElement the video element to render the feed on.
-   * @param {!Array<!{x: number, y: number}>} resolutions. A width of -1 will
-   *     result in disabled video for that stream.
-   * @param {?boolean=} cpuOveruseDetection Whether to enable
-   *     googCpuOveruseDetection (lower video quality if CPU usage is high).
-   *     Default is null which means that the constraint is not set at all.
-   */
-  constructor(videoElement, resolutions, cpuOveruseDetection=null) {
-    this.localConnection = null;
-    this.remoteConnection = null;
-    this.remoteView = videoElement;
-    this.streams = [];
-    // Ensure sorted in descending order to conveniently request the highest
-    // resolution first through GUM later.
-    this.resolutions = resolutions.slice().sort((x, y) => y.w - x.w);
-    this.activeStreamIndex = resolutions.length - 1;
-    this.badResolutionsSeen = 0;
-    if (cpuOveruseDetection !== null) {
-      this.pcConstraints = {
-        'optional': [{'googCpuOveruseDetection': cpuOveruseDetection}]
-      };
-    }
-    this.rtcConfig = {'sdpSemantics': 'plan-b'};
-  }
-
-  /**
-   * Starts the connections. Triggers GetUserMedia and starts
-   * to render the video on {@code this.videoElement}.
-   * @return {!Promise} a Promise that resolves when everything is initalized.
-   */
-  start() {
-    // getUserMedia fails if we first request a low resolution and
-    // later a higher one. Hence, sort resolutions above and
-    // start with the highest resolution here.
-    const promises = this.resolutions.map((resolution) => {
-      const constraints = createMediaConstraints(resolution);
-      return navigator.mediaDevices
-        .getUserMedia(constraints)
-        .then((stream) => this.streams.push(stream));
-    });
-    return Promise.all(promises).then(() => {
-      // Start with the smallest video to not overload the machine instantly.
-      return this.onGetUserMediaSuccess_(this.streams[this.activeStreamIndex]);
-    })
-  };
-
-  /**
-   * Verifies that the state of the streams are good. The state is good if all
-   * streams are active and their video elements report the resolution the
-   * stream is in. Video elements are allowed to report bad resolutions
-   * numSequentialBadResolutionsForFailure times before failure is reported
-   * since video elements occasionally report bad resolutions during the tests
-   * when we manipulate the streams frequently.
-   * @param {number=} numSequentialBadResolutionsForFailure number of bad
-   *     resolution observations in a row before failure is reported.
-   * @param {number=} allowedDelta allowed difference between expected and
-   *     actual resolution. We have seen videos assigned a resolution one pixel
-   *     off from the requested.
-   * @throws {Error} in case the state is not-good.
-   */
-  verifyState(numSequentialBadResolutionsForFailure=10, allowedDelta=1) {
-    this.verifyAllStreamsActive_();
-    const expectedResolution = this.resolutions[this.activeStreamIndex];
-    if (expectedResolution.w < 0 || expectedResolution.h < 0) {
-      // Video is disabled.
-      return;
-    }
-    if (!isWithin(
-            this.remoteView.videoWidth, expectedResolution.w, allowedDelta) ||
-        !isWithin(
-            this.remoteView.videoHeight, expectedResolution.h, allowedDelta)) {
-      this.badResolutionsSeen++;
-    } else if (
-        this.badResolutionsSeen < numSequentialBadResolutionsForFailure) {
-      // Reset the count, but only if we have not yet reached the limit. If the
-      // limit is reached, let keep the error state.
-      this.badResolutionsSeen = 0;
-    }
-    if (this.badResolutionsSeen >= numSequentialBadResolutionsForFailure) {
-      throw new Error(
-          'Expected video resolution ' +
-          resStr(expectedResolution.w, expectedResolution.h) +
-          ' but got another resolution ' + this.badResolutionsSeen +
-          ' consecutive times. Last resolution was: ' +
-          resStr(this.remoteView.videoWidth, this.remoteView.videoHeight));
-    }
-  }
-
-  verifyAllStreamsActive_() {
-    if (this.streams.some((x) => !x.active)) {
-      throw new Error('At least one media stream is not active')
-    }
-  }
-
-  /**
-   * Switches to a random stream, i.e., use a random resolution of the
-   * resolutions provided to the constructor.
-   * @return {!Promise} A promise that resolved when everything is initialized.
-   */
-  switchToRandomStream() {
-    const localStreams = this.localConnection.getLocalStreams();
-    const track = localStreams[0];
-    if (track != null) {
-      this.localConnection.removeStream(track);
-      const newStreamIndex = Math.floor(Math.random() * this.streams.length);
-      return this.addStream_(this.streams[newStreamIndex])
-          .then(() => this.activeStreamIndex = newStreamIndex);
-    } else {
-      return Promise.resolve();
-    }
-  }
-
-  onGetUserMediaSuccess_(stream) {
-    this.localConnection = new RTCPeerConnection(this.rtcConfig,
-      this.pcConstraints);
-    this.localConnection.onicecandidate = (event) => {
-      this.onIceCandidate_(this.remoteConnection, event);
-    };
-    this.remoteConnection = new RTCPeerConnection(this.rtcConfig,
-      this.pcConstraints);
-    this.remoteConnection.onicecandidate = (event) => {
-      this.onIceCandidate_(this.localConnection, event);
-    };
-    this.remoteConnection.onaddstream = (e) => {
-      this.remoteView.srcObject = e.stream;
-    };
-    return this.addStream_(stream);
-  }
-
-  addStream_(stream) {
-    this.localConnection.addStream(stream);
-    return this.localConnection
-        .createOffer({offerToReceiveAudio: 1, offerToReceiveVideo: 1})
-        .then((desc) => this.onCreateOfferSuccess_(desc), logError);
-  }
-
-  onCreateOfferSuccess_(desc) {
-    this.localConnection.setLocalDescription(desc);
-    this.remoteConnection.setRemoteDescription(desc);
-    return this.remoteConnection.createAnswer().then(
-        (desc) => this.onCreateAnswerSuccess_(desc), logError);
-  };
-
-  onCreateAnswerSuccess_(desc) {
-    this.remoteConnection.setLocalDescription(desc);
-    this.localConnection.setRemoteDescription(desc);
-  };
-
-  onIceCandidate_(connection, event) {
-    if (event.candidate) {
-      connection.addIceCandidate(new RTCIceCandidate(event.candidate));
-    }
-  };
-}
-
-/**
- * Checks if a value is within an expected value plus/minus a delta.
- * @param {number} actual
- * @param {number} expected
- * @param {number} delta
- * @return {boolean}
- */
-function isWithin(actual, expected, delta) {
-  return actual <= expected + delta && actual >= actual - delta;
-}
-
-/**
- * Creates constraints for use with GetUserMedia.
- * @param {!{x: number, y: number}} widthAndHeight Video resolution.
- */
-function createMediaConstraints(widthAndHeight) {
-  let constraint;
-  if (widthAndHeight.w < 0) {
-    constraint = false;
-  } else {
-    constraint = {
-      width: {exact: widthAndHeight.w},
-      height: {exact: widthAndHeight.h}
-    };
-  }
-  return {
-    audio: true,
-    video: constraint
-  };
-}
-
-function resStr(width, height) {
-  return `${width}x${height}`
-}
-
-function logError(err) {
-  console.error(err);
-}
diff --git a/client/common_lib/cros/xmlrpc_types.py b/client/common_lib/cros/xmlrpc_types.py
index 7d95b3a..f4bb87b 100644
--- a/client/common_lib/cros/xmlrpc_types.py
+++ b/client/common_lib/cros/xmlrpc_types.py
@@ -1,9 +1,17 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import inspect
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+try:
+    from inspect import getfullargspec as get_args
+except ImportError:
+    from inspect import getargspec as get_args
 import logging
+import six
 import sys
 
 
@@ -35,7 +43,7 @@
     if module is None:
         module = sys.modules[__name__]
     klass = getattr(module, serialized[TYPE_KEY])
-    constructor_args = inspect.getargspec(klass.__init__)
+    constructor_args = get_args(klass.__init__)
     optional_args = []
     if constructor_args.defaults:
         # Valid args should now be a list of all the parameters that have
@@ -50,8 +58,7 @@
             return None
 
         args.append(serialized[arg])
-    kwargs = dict(filter(lambda (k, v): k in optional_args,
-                         serialized.iteritems()))
+    kwargs = dict([k_v for k_v in six.iteritems(serialized) if k_v[0] in optional_args])
     logging.debug('Constructing %s object with args=%r, kwargs=%r',
                   serialized[TYPE_KEY], args, kwargs)
     return klass(*args, **kwargs)
diff --git a/client/common_lib/decorators.py b/client/common_lib/decorators.py
index e09feb5..3881d0b 100644
--- a/client/common_lib/decorators.py
+++ b/client/common_lib/decorators.py
@@ -102,13 +102,13 @@
         @return: The function to call based on the value of `module`
         """
 
-        def dummy_func(*args, **kargs):
-            """A dummy function silently pass."""
+        def stub_func(*args, **kargs):
+            """A stub function silently pass."""
             logging.debug('Module %s is not found. Call %s is skipped.', module,
                           f)
             if raise_error:
                 raise ImportError('Module %s is not found.' % module)
 
-        return f if module else dummy_func
+        return f if module else stub_func
 
     return decorator
diff --git a/client/common_lib/decorators_unittest.py b/client/common_lib/decorators_unittest.py
index 2000e90..0b4ef4e 100644
--- a/client/common_lib/decorators_unittest.py
+++ b/client/common_lib/decorators_unittest.py
@@ -1,13 +1,14 @@
-#!/usr/bin/env python2
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+"""Unit tests for client/common_lib/decorators.py."""
+
+
 from __future__ import absolute_import
 from __future__ import division
 from __future__ import print_function
 
-import mox
 from six.moves import range
 import threading
 import time
@@ -17,7 +18,7 @@
 from autotest_lib.client.common_lib import decorators
 
 
-class InContextTest(mox.MoxTestBase):
+class InContextTest(unittest.TestCase):
     """ Unit tests for the in_context decorator. """
 
     @decorators.in_context('lock')
@@ -29,15 +30,15 @@
 
 
     def testDecorator(self):
-        """ Test that the decorator works by using it with a lock. """
+        """ Test that the decorator works by using it with a lock."""
         self.count = 0
         self.lock = threading.RLock()
         iters = 100
         num_threads = 20
-        # Note that it is important for us to go through all this bother to call
-        # a method in_context N times rather than call a method in_context that
-        # does something N times, because by doing the former, we acquire the
-        # context N times (1 time for the latter).
+        # Note that it is important for us to go through all this bother to
+        # call a method in_context N times rather than call a method in_context
+        # that does something N times, because by doing the former, we acquire
+        # the context N times (1 time for the latter).
         thread_body = lambda f, n: [f() for i in range(n)]
         threads = [threading.Thread(target=thread_body,
                                     args=(self.inc_count, iters))
@@ -50,8 +51,10 @@
 
 
 class CachedPropertyTest(unittest.TestCase):
+    """Unit tests for the cached property decorator."""
+
     def testIt(self):
-        """cached_property"""
+        """cached_property."""
         class Example(object):
             def __init__(self, v=0):
                 self.val = v
diff --git a/client/common_lib/enum.py b/client/common_lib/enum.py
deleted file mode 100644
index 0af6d30..0000000
--- a/client/common_lib/enum.py
+++ /dev/null
@@ -1,80 +0,0 @@
-"""\
-Generic enumeration support.
-"""
-
-__author__ = 'showard@google.com (Steve Howard)'
-
-class Enum(object):
-    """
-
-    TODO: b/170215553, this file will be delete prior to completion of Python 3
-    migrations in Q4 2020/ Q1 2021. Instead import/use
-    autotest_lib.client.common_lib.autotest_enum.AutotestEnum.
-
-
-    Utility class to implement Enum-like functionality.
-
-    >>> e = Enum('String one', 'String two')
-    >>> e.STRING_ONE
-    0
-    >>> e.STRING_TWO
-    1
-    >>> e.choices()
-    [(0, 'String one'), (1, 'String two')]
-    >>> e.get_value('String one')
-    0
-    >>> e.get_string(0)
-    'String one'
-
-    >>> e = Enum('Hello', 'Goodbye', string_values=True)
-    >>> e.HELLO, e.GOODBYE
-    ('Hello', 'Goodbye')
-
-    >>> e = Enum('One', 'Two', start_value=1)
-    >>> e.ONE
-    1
-    >>> e.TWO
-    2
-    """
-    def __init__(self, *names, **kwargs):
-        self.string_values = kwargs.get('string_values')
-        start_value = kwargs.get('start_value', 0)
-        step = kwargs.get('step', 1)
-        self.names = names
-        self.values = []
-        for i, name in enumerate(names):
-            if self.string_values:
-                value = name
-            else:
-                value = i * step + start_value
-            self.values.append(value)
-            setattr(self, self.get_attr_name(name), value)
-
-
-    @staticmethod
-    def get_attr_name(string):
-        return string.upper().replace(' ', '_')
-
-
-    def choices(self):
-        'Return choice list suitable for Django model choices.'
-        return zip(self.values, self.names)
-
-
-    def get_value(self, name):
-        """\
-        Convert a string name to it's corresponding value.  If a value
-        is passed in, it is returned.
-        """
-        if isinstance(name, (int, long)) and not self.string_values:
-            # name is already a value
-            return name
-        return getattr(self, self.get_attr_name(name))
-
-
-    def get_string(self, value):
-        ' Given a value, get the string name for it.'
-        if value not in self.values:
-            raise ValueError('Value %s not in this enum' % value)
-        index = self.values.index(value)
-        return self.names[index]
diff --git a/client/common_lib/error.py b/client/common_lib/error.py
index 52350c2..7f5063f 100644
--- a/client/common_lib/error.py
+++ b/client/common_lib/error.py
@@ -258,6 +258,10 @@
     """Indicates that a SSH permission denied error was encountered."""
 
 
+class AutoservSshDnsError(AutoservRunError):
+    """Indicates that a DNS resolution error was encountered."""
+
+
 class AutoservUnsupportedError(AutoservError):
     """Error raised when you try to use an unsupported optional feature"""
 
@@ -278,6 +282,10 @@
     """SSH ping failed"""
 
 
+class AutoservSSPError(AutoservHostError):
+    """SSP setup failed"""
+
+
 class AutoservDiskFullHostError(AutoservHostError):
     """Not enough free disk space on host"""
 
@@ -511,6 +519,10 @@
     """Exception raised when the disk space could not be determined."""
 
 
+class TLSConnectionError(AutoservError):
+    pass
+
+
 # This MUST remain at the end of the file.
 # Limit 'from error import *' to only import the exception instances.
 for _name, _thing in list(locals().items()):
diff --git a/client/common_lib/global_config.py b/client/common_lib/global_config.py
index 42eda8a..cf09b8b 100644
--- a/client/common_lib/global_config.py
+++ b/client/common_lib/global_config.py
@@ -17,9 +17,12 @@
 import collections
 import os
 import re
+import six
 import six.moves.configparser as ConfigParser
 import sys
 
+from six.moves import StringIO
+
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib import lsbrelease_utils
 from autotest_lib.client.common_lib import seven
@@ -290,12 +293,46 @@
                 val = override_config.get(section, option)
                 self.config.set(section, option, val)
 
+    def _load_config_file(self, config_file):
+        """
+        Load the config_file into a StringIO buffer parsable by the current py
+        version.
+
+        TODO b:179407161, when running only in Python 3, force config files
+        to be correct, and remove this special parsing.
+
+        When in Python 3, this will change instances of %, not followed
+        immediately by (, to %%. Thus:
+            "%foo" --> "%%foo"
+            "%(foo" --> "%(foo"
+            "%%foo" --> "%%foo"
+        In Python 2, we will do the opposite, and change instances of %%, to %.
+            "%%foo" --> "%foo"
+            "%%(foo" --> "%(foo"
+            "%foo" --> "%foo"
+        """
+        with open(config_file) as cf:
+            config_file_str = cf.read()
+        if six.PY3:
+            config_file_str = re.sub(r"([^%]|^)%([^%(]|$)", r"\1%%\2",
+                                     config_file_str)
+        else:
+            config_file_str = config_file_str.replace('%%', '%')
+        return StringIO(config_file_str)
+
+    def _read_config(self, config, buf):
+        """Read the provided io buffer, into the specified config."""
+        if six.PY3:
+            config.read_file(buf)
+        else:
+            config.readfp(buf)
 
     def parse_config_file(self):
         """Parse config files."""
         self.config = seven.config_parser()
         if self.config_file and os.path.exists(self.config_file):
-            self.config.read(self.config_file)
+            buf = self._load_config_file(self.config_file)
+            self._read_config(self.config, buf)
         else:
             raise ConfigError('%s not found' % (self.config_file))
 
@@ -304,7 +341,8 @@
         if (lsbrelease_utils.is_moblab() and self.moblab_file and
             os.path.exists(self.moblab_file)):
             moblab_config = seven.config_parser()
-            moblab_config.read(self.moblab_file)
+            mob_buf = self._load_config_file(self.moblab_file)
+            self._read_config(moblab_config, mob_buf)
             # now we merge moblab into global
             self.merge_configs(moblab_config)
 
@@ -313,7 +351,8 @@
         # other config
         if self.shadow_file and os.path.exists(self.shadow_file):
             shadow_config = seven.config_parser()
-            shadow_config.read(self.shadow_file)
+            shadow_buf = self._load_config_file(self.shadow_file)
+            self._read_config(shadow_config, shadow_buf)
             # now we merge shadow into global
             self.merge_configs(shadow_config)
 
diff --git a/client/common_lib/global_config_unittest.py b/client/common_lib/global_config_unittest.py
index a8491d4..b032bfd 100755
--- a/client/common_lib/global_config_unittest.py
+++ b/client/common_lib/global_config_unittest.py
@@ -1,12 +1,12 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 import collections
-import os
-import mox
-import types
+import six
 import unittest
+from unittest.mock import patch
 
 import common
+
 from autotest_lib.client.common_lib import autotemp
 from autotest_lib.client.common_lib import global_config
 from autotest_lib.client.common_lib import lsbrelease_utils
@@ -40,8 +40,17 @@
 random: 1
 wireless_ssid_1.2.3.4/24: ssid_1
 wireless_ssid_4.3.2.1/16: ssid_2
+
+[SECTION_F]
+value_7: %sexample
+value_8: %%sexample
+value_9: %%\(example\)
+value_10: %\(branch\)s
+value_11: %%\(branch\)s
+
 """
 
+
 moblab_config_ini_contents = """
 [SECTION_C]
 value_1: moblab@remotehost
@@ -59,18 +68,21 @@
 def create_config_files():
     """Create config files to be used for test."""
     global_temp = autotemp.tempfile("global", ".ini", text=True)
-    os.write(global_temp.fd, global_config_ini_contents)
+    with open(global_temp.name, 'w') as gt:
+        gt.write(global_config_ini_contents)
 
     moblab_temp = autotemp.tempfile("moblab", ".ini", text=True)
-    os.write(moblab_temp.fd, moblab_config_ini_contents)
+    with open(moblab_temp.name, 'w') as mt:
+        mt.write(moblab_config_ini_contents)
 
     shadow_temp = autotemp.tempfile("shadow", ".ini", text=True)
-    os.write(shadow_temp.fd, shadow_config_ini_contents)
+    with open(shadow_temp.name, 'w') as st:
+        st.write(shadow_config_ini_contents)
 
     return (global_temp, shadow_temp, moblab_temp)
 
 
-class global_config_test(mox.MoxTestBase):
+class global_config_test(unittest.TestCase):
     """Test class"""
     # grab the singelton
     conf = global_config.global_config
@@ -117,23 +129,23 @@
     def test_string(self):
         """Test converting string value."""
         val = self.conf.get_config_value("SECTION_A", "value_2")
-        self.assertEquals(type(val),bytes)
+        self.assertTrue(isinstance(val, six.string_types))
         self.assertEquals(val, "hello")
 
 
-    def setIsMoblab(self, is_moblab):
+    def setIsMoblab(self, value):
         """Set lsbrelease_utils.is_moblab result.
 
-        @param is_moblab: Value to have lsbrelease_utils.is_moblab to return.
+        @param value: Value to have lsbrelease_utils.is_moblab to return.
         """
-        self.mox.StubOutWithMock(lsbrelease_utils, 'is_moblab')
-        lsbrelease_utils.is_moblab().AndReturn(is_moblab)
-
+        patcher = patch.object(lsbrelease_utils, 'is_moblab')
+        is_moblab = patcher.start()
+        self.addCleanup(patcher.stop)
+        is_moblab.return_value = value
 
     def test_override_non_moblab(self):
         """Test value overriding works in non-moblab setup."""
         self.setIsMoblab(False)
-        self.mox.ReplayAll()
 
         self.conf.reset_config_values()
 
@@ -149,7 +161,6 @@
     def test_override_moblab(self):
         """Test value overriding works in moblab setup."""
         self.setIsMoblab(True)
-        self.mox.ReplayAll()
 
         self.conf.reset_config_values()
 
@@ -184,6 +195,24 @@
         self.assertEquals(val, False)
 
 
+    def test_special(self):
+        """Test converting special instances of '%, %%, %s, %%(), %()'."""
+        val7 = self.conf.get_config_value("SECTION_F", "value_7")
+        val8 = self.conf.get_config_value("SECTION_F", "value_8")
+        val9 = self.conf.get_config_value("SECTION_F", "value_9")
+        val10 = self.conf.get_config_value("SECTION_F", "value_10")
+        val11 = self.conf.get_config_value("SECTION_F", "value_11")
+
+        # This is the same parsing done within dev_server and other libs...
+        val10 = (val10.replace('\\', '') % {'branch': 'test_str'})
+        val11 = (val11.replace('\\', '') % {'branch': 'test_str'})
+
+        self.assertEquals(val7, '%sexample')
+        self.assertEquals(val8, '%sexample')
+        self.assertEquals(val9, '%\(example\)')
+        self.assertEquals(val10, 'test_str')
+        self.assertEquals(val11, 'test_str')
+
     def test_defaults(self):
         """Test default value works."""
         val = self.conf.get_config_value("MISSING", "foo", float, 3.6)
diff --git a/client/common_lib/gtest_parser.py b/client/common_lib/gtest_parser.py
index d310678..fe2537c 100644
--- a/client/common_lib/gtest_parser.py
+++ b/client/common_lib/gtest_parser.py
@@ -12,8 +12,6 @@
     """This class knows how to understand GTest test output.
 
     The code was borrowed with minor changes from chrome utility gtest_command.
-        http://src.chromium.org/viewvc/chrome/trunk/tools/build/scripts/master/
-        log_parser/gtest_command.py?view=markup
     """
 
     def __init__(self):
@@ -49,21 +47,22 @@
         # This regexp also matches SomeName.SomeTest/1, which should be
         # harmless.
         test_name_regexp = r'((\w+/)?\w+\.\w+(\.\w+)?(/\d+)?)'
-        self._test_start = re.compile('\[\s+RUN\s+\] ' + test_name_regexp)
-        self._test_ok = re.compile('\[\s+OK\s+\] ' + test_name_regexp)
-        self._test_fail = re.compile('\[\s+FAILED\s+\] ' + test_name_regexp)
+        self._test_start = re.compile(r'\[\s+RUN\s+\] ' + test_name_regexp)
+        self._test_ok = re.compile(r'\[\s+OK\s+\] ' + test_name_regexp)
+        self._test_fail = re.compile(r'\[\s+FAILED\s+\] ' + test_name_regexp)
         self._test_timeout = re.compile(
-            'Test timeout \([0-9]+ ms\) exceeded for ' + test_name_regexp)
-        self._disabled = re.compile('  YOU HAVE (\d+) DISABLED TEST')
-        self._flaky = re.compile('  YOU HAVE (\d+) FLAKY TEST')
+                r'Test timeout \([0-9]+ ms\) exceeded for ' + test_name_regexp)
+        self._disabled = re.compile(r'  YOU HAVE (\d+) DISABLED TEST')
+        self._flaky = re.compile(r'  YOU HAVE (\d+) FLAKY TEST')
 
         self._suppression_start = re.compile(
-            'Suppression \(error hash=#([0-9A-F]+)#\):')
-        self._suppression_end = re.compile('^}\s*$')
+                r'Suppression \(error hash=#([0-9A-F]+)#\):')
+        self._suppression_end = re.compile(r'^}\s*$')
 
         # TODO b:169251326 terms below are set outside of this codebase
         # and should be updated when possible. ("master" -> "main")
-        self._main_name_re = re.compile('\[Running for master: "([^"]*)"')
+        self._main_name_re = re.compile(
+                r'\[Running for master: "([^"]*)"')  # nocheck
         self.main_name = ''
 
         self._error_logging_start_re = re.compile('=' * 70)
diff --git a/client/common_lib/hosts/base_classes_unittest.py b/client/common_lib/hosts/base_classes_unittest.py
index 9b773e6..07bdaf7 100755
--- a/client/common_lib/hosts/base_classes_unittest.py
+++ b/client/common_lib/hosts/base_classes_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 import common
 import os
diff --git a/client/common_lib/hosts/repair.py b/client/common_lib/hosts/repair.py
index 5c61099..e785106 100644
--- a/client/common_lib/hosts/repair.py
+++ b/client/common_lib/hosts/repair.py
@@ -30,7 +30,7 @@
 from autotest_lib.client.common_lib import error
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     from autotest_lib.client.bin.utils import metrics_mock as metrics
 
@@ -320,7 +320,12 @@
                 type(self).__name__)
 
     def _get_node_by_tag(self, tag):
-        """Find verifier by tag, recursive."""
+        """Find verifier by tag, recursive.
+
+        @param tag  Node identifier.
+
+        @returns:   _DependencyNode instance associated with tag
+        """
         if self._tag == tag:
             return self
         for child in self._dependency_list:
@@ -407,10 +412,12 @@
         @param host     The host to be tested for a problem.
         @param silent   If true, don't log host status records.
         """
+        self._verify_dependencies(host, silent)
         try:
             if not self._is_applicable(host):
-                logging.info('Verify %s is not applicable to %s, skipping...',
-                             self.description, host.hostname)
+                logging.info(
+                        'Verify "%s:%s" is not applicable to %s, skipping...',
+                        self.tag, self.description, host.hostname)
                 return
         except Exception as e:
             logging.error('Skipping %s verifier due to unexpect error during'
@@ -423,8 +430,7 @@
             elif self._result:
                 return              # cached success
 
-        self._verify_dependencies(host, silent)
-        logging.info('Verifying this condition: %s', self.description)
+        logging.info('Verifying %s:%s', self.tag, self.description)
         try:
             logging.debug('Start verify task: %s.', type(self).__name__)
             self.verify(host)
@@ -643,6 +649,13 @@
         #
         # If we're blocked by a failed dependency, we exit with an
         # exception.  So set status to 'blocked' first.
+        self.status = 'blocked'
+        try:
+            self._verify_dependencies(host, silent)
+        except Exception as e:
+            self._send_failure_metrics(host, e, 'dep')
+            raise
+
         self.status = 'skipped'
         try:
             if not self._is_applicable(host):
@@ -653,13 +666,6 @@
             logging.error('Skipping %s repair action due to unexpect error'
                           ' during check applicability; %s', self.tag, e)
             return
-
-        self.status = 'blocked'
-        try:
-            self._verify_dependencies(host, silent)
-        except Exception as e:
-            self._send_failure_metrics(host, e, 'dep')
-            raise
         # This is a defensive action.  Every path below should overwrite
         # this setting, but if it doesn't, we want our status to reflect
         # a coding error.
@@ -1006,7 +1012,7 @@
             None - verifier did not run because it is not applicable
                    or blocked due to dependency failure
         """
-        verifier = self._verify_root._get_node_by_tag(tag)
+        verifier = self.node_by_tag(tag)
         if verifier is not None:
             result = verifier._is_good()
             logging.debug('Verifier with associated tag: %s found', tag)
@@ -1021,6 +1027,21 @@
         logging.debug('Verifier with associated tag: %s not found', tag)
         return None
 
+    def node_by_tag(self, tag):
+        """Find and return node by searched tag.
+
+        @param tag: key to be associated with node
+
+        @returns: _DependencyNode instance associated with tag
+        """
+        node = self._verify_root._get_node_by_tag(tag)
+        if node is None:
+            for n in self._repair_actions:
+                node = n._get_node_by_tag(tag)
+                if node is not None:
+                    break
+        return node
+
 
 def _filter_metrics_hostname(host):
     """
diff --git a/client/common_lib/hosts/repair_unittest.py b/client/common_lib/hosts/repair_unittest.py
index 81a0722..4c11a2b 100644
--- a/client/common_lib/hosts/repair_unittest.py
+++ b/client/common_lib/hosts/repair_unittest.py
@@ -25,20 +25,38 @@
 
 class _GoodVerifier(hosts.Verifier):
     """Verifier is always good"""
+
+    def __init__(self, tag, dependencies):
+        super(_GoodVerifier, self).__init__(tag, dependencies)
+        self._count = 0
+
     def verify(self, host):
+        self._count += 1
         pass
 
 
 class _BadVerifier(hosts.Verifier):
     """Verifier is always fail"""
+
+    def __init__(self, tag, dependencies):
+        super(_BadVerifier, self).__init__(tag, dependencies)
+        self._count = 0
+
     def verify(self, host):
+        self._count += 1
         raise Exception('Just not your day')
 
 
 class _SkipVerifier(hosts.Verifier):
     """Verifier is always not applicable"""
+
+    def __init__(self, tag, dependencies):
+        super(_SkipVerifier, self).__init__(tag, dependencies)
+        self._count = 0
+
     def verify(self, host):
-        pass
+        # this point should not be reached
+        self._count += 1
 
     def _is_applicable(self, host):
         return False
@@ -1397,11 +1415,11 @@
     def test_run_verifier_with_dependencies(self):
         """Check the result if dependency fail or not applicable."""
         verify_data = [
-            (_GoodVerifier, 'v1', []),
-            (_BadVerifier, 'v2', []),
-            (_SkipVerifier, 'v3', []),
-            (_GoodVerifier, 'v4', ['v2']),
-            (_GoodVerifier, 'v5', ['v3']),
+                (_GoodVerifier, 'v1', []),
+                (_BadVerifier, 'v2', []),
+                (_SkipVerifier, 'v3', []),
+                (_GoodVerifier, 'v4', ['v2']),
+                (_GoodVerifier, 'v5', ['v3']),
         ]
         strategy = hosts.RepairStrategy(verify_data, (), 'unittest')
         try:
@@ -1423,6 +1441,33 @@
                          strategy.verifier_is_good('v5'))
         self.assertEqual(repair.VERIFY_NOT_RUN,
                          strategy.verifier_is_good('v6'))
+        # Check have many time verifier run
+        self.assertEqual(1, strategy.node_by_tag('v1')._count)
+        self.assertEqual(1, strategy.node_by_tag('v2')._count)
+        self.assertEqual(0, strategy.node_by_tag('v3')._count)
+        self.assertEqual(0, strategy.node_by_tag('v4')._count)
+        self.assertEqual(1, strategy.node_by_tag('v5')._count)
+
+    def test_run_verifier_count_with_dependencies(self):
+        """Check the verifier will run only once."""
+        verify_data = [
+                (_GoodVerifier, 'v1', []),
+                (_GoodVerifier, 'v2', ['v1']),
+                (_GoodVerifier, 'v3', ['v1']),
+                (_GoodVerifier, 'v4', ['v2', 'v3']),
+                (_GoodVerifier, 'v5', ['v2', 'v3', 'v4']),
+        ]
+        strategy = hosts.RepairStrategy(verify_data, (), 'unittest')
+        try:
+            strategy.verify(self._fake_host, silent=True)
+        except Exception as e:
+            pass
+        # Check have many time verifier run
+        self.assertEqual(1, strategy.node_by_tag('v1')._count)
+        self.assertEqual(1, strategy.node_by_tag('v2')._count)
+        self.assertEqual(1, strategy.node_by_tag('v3')._count)
+        self.assertEqual(1, strategy.node_by_tag('v4')._count)
+        self.assertEqual(1, strategy.node_by_tag('v5')._count)
 
 
 if __name__ == '__main__':
diff --git a/client/common_lib/logging_manager.py b/client/common_lib/logging_manager.py
index 643bbba..5a66611 100644
--- a/client/common_lib/logging_manager.py
+++ b/client/common_lib/logging_manager.py
@@ -48,7 +48,6 @@
 import six
 import sys
 import time
-import warnings
 
 # primary public APIs
 
@@ -129,11 +128,6 @@
     return rv
 
 
-if sys.version_info[:2] > (2, 7):
-    warnings.warn('This module has not been reviewed for Python %s' %
-                  sys.version)
-
-
 # Monkey patch our way around logging's design...
 _original_logger__find_caller = logging.Logger.findCaller
 # Do not overwrite in Python 3 and on. It breaks the "<module>:<line num>|"
diff --git a/client/common_lib/logging_manager_test.py b/client/common_lib/logging_manager_test.py
index 1eaa76e..7ae29c1 100755
--- a/client/common_lib/logging_manager_test.py
+++ b/client/common_lib/logging_manager_test.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 from __future__ import absolute_import
 from __future__ import division
@@ -108,11 +108,11 @@
 """
 
 
-class DummyLoggingConfig(logging_config.LoggingConfig):
+class StubLoggingConfig(logging_config.LoggingConfig):
     console_formatter = logging.Formatter(LOGGING_FORMAT)
 
     def __init__(self):
-        super(DummyLoggingConfig, self).__init__()
+        super(StubLoggingConfig, self).__init__()
         self.log = PipedStringIO()
 
 
@@ -142,7 +142,7 @@
         logging.basicConfig(level=logging.INFO, format=LOGGING_FORMAT,
                             stream=self.stdout)
 
-        self._config_object = DummyLoggingConfig()
+        self._config_object = StubLoggingConfig()
         logging_manager.LoggingManager.logging_config_object = (
                 self._config_object)
 
diff --git a/client/common_lib/lsbrelease_utils.py b/client/common_lib/lsbrelease_utils.py
index 5050cd0..df15ce9 100644
--- a/client/common_lib/lsbrelease_utils.py
+++ b/client/common_lib/lsbrelease_utils.py
@@ -31,6 +31,9 @@
     if lsb_release_content is None:
         with open(constants.LSB_RELEASE) as lsb_release_file:
             lsb_release_content = lsb_release_file.read()
+
+    if type(lsb_release_content) == type(b' '):
+        lsb_release_content = lsb_release_content.decode("utf-8")
     for line in lsb_release_content.split('\n'):
         m = re.match(regex, line)
         if m:
@@ -111,12 +114,7 @@
 
     @return the board string if this is a Moblab device or None if it is not.
     """
-    if lsb_release_content is not None:
-        return _lsbrelease_search(r'.*moblab',
-                                  lsb_release_content=lsb_release_content)
-
-    if os.path.exists(constants.LSB_RELEASE):
-        return _lsbrelease_search(r'.*moblab')
+    return 'MOBLAB' in os.environ
 
 
 def is_jetstream(lsb_release_content=None):
diff --git a/client/common_lib/lsbrelease_utils_unittest.py b/client/common_lib/lsbrelease_utils_unittest.py
index 4a2e977..6b0dbbd 100755
--- a/client/common_lib/lsbrelease_utils_unittest.py
+++ b/client/common_lib/lsbrelease_utils_unittest.py
@@ -1,10 +1,10 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2017 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 """Unittests for the lsbrelease_utils module."""
-
+import os
 import unittest
 
 import common
@@ -25,7 +25,7 @@
 CHROMEOS_RELEASE_TRACK=testimage-channel
 CHROMEOS_RELEASE_DESCRIPTION=9641.0.0 (Official Build) dev-channel guado_moblab test
 CHROMEOS_RELEASE_BUILD_TYPE=Official Build
-CHROMEOS_RELEASE_NAME=Chrome OS
+CHROMEOS_RELEASE_NAME=ChromeOS
 CHROMEOS_RELEASE_VERSION=9641.0.0
 CHROMEOS_AUSERVER=https://tools.google.com/service/update2
 """
@@ -43,7 +43,7 @@
 CHROMEOS_RELEASE_TRACK=testimage-channel
 CHROMEOS_RELEASE_DESCRIPTION=9641.0.0 (Official Build) dev-channel link test
 CHROMEOS_RELEASE_BUILD_TYPE=Official Build
-CHROMEOS_RELEASE_NAME=Chrome OS
+CHROMEOS_RELEASE_NAME=ChromeOS
 CHROMEOS_RELEASE_VERSION=9641.0.0
 CHROMEOS_AUSERVER=https://tools.google.com/service/update2
 """
@@ -62,7 +62,7 @@
 CHROMEOS_RELEASE_TRACK=testimage-channel
 CHROMEOS_RELEASE_DESCRIPTION=9641.0.0 (Official Build) dev-channel gale test
 CHROMEOS_RELEASE_BUILD_TYPE=Official Build
-CHROMEOS_RELEASE_NAME=Chrome OS
+CHROMEOS_RELEASE_NAME=ChromeOS
 CHROMEOS_RELEASE_VERSION=9641.0.0
 CHROMEOS_AUSERVER=https://tools.google.com/service/update2
 """
@@ -82,7 +82,7 @@
 CHROMEOS_RELEASE_TRACK=testimage-channel
 CHROMEOS_RELEASE_DESCRIPTION=9641.0.0 (Official Build) dev-channel whirlwind test
 CHROMEOS_RELEASE_BUILD_TYPE=Official Build
-CHROMEOS_RELEASE_NAME=Chrome OS
+CHROMEOS_RELEASE_NAME=ChromeOS
 CHROMEOS_RELEASE_VERSION=9641.0.0
 CHROMEOS_AUSERVER=https://tools.google.com/service/update2
 """
@@ -111,19 +111,17 @@
         self.assertTrue(lsbrelease_utils.is_jetstream(
             _WHIRLWIND_LSB_RELEASE_REDACTED))
 
-    def test_is_moblab_with_empty_lsbrelease(self):
-        """is_moblab correctly validates trivial lsb-release information."""
-        self.assertFalse(lsbrelease_utils.is_moblab(''))
-
-    def test_is_moblab_with_link_lsbrelease(self):
+    def test_is_moblab_with_sbrelease(self):
         """is_moblab correctly validates the contents from some other board."""
-        self.assertFalse(lsbrelease_utils.is_moblab(
-                _LINK_LSB_RELEASE_REDACTED))
+        environ_store = os.environ
+        os.environ = []
+        self.assertFalse(lsbrelease_utils.is_moblab())
+        os.environ = environ_store
 
     def test_is_moblab_with_moblab_lsbrelease(self):
         """is_moblab correctly validates the contents from a moblab device."""
-        self.assertTrue(lsbrelease_utils.is_moblab(
-                _GUADO_MOBLAB_LSB_RELEASE_REDACTED))
+        os.environ['MOBLAB'] = "1"
+        self.assertTrue(lsbrelease_utils.is_moblab())
 
     def test_get_chromeos_release_version(self):
         """Test helper function."""
diff --git a/client/common_lib/magic.py b/client/common_lib/magic.py
index 3dad00b..d954d4e 100755
--- a/client/common_lib/magic.py
+++ b/client/common_lib/magic.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 """
 Library used to determine a file MIME type by its magic number, it doesn't have
 any external dependencies. Based on work of Jason Petrone (jp_py@jsnp.net),
@@ -1053,9 +1053,9 @@
             return 'Data'
 
     # ASCII, do some text tests
-    if string.find('The', data, 0, 8192) > -1:
+    if data.find('The', 0, 8192) > -1:
         return 'English text'
-    if string.find('def', data, 0, 8192) > -1:
+    if data.find('def', 0, 8192) > -1:
         return 'Python Source'
     return 'ASCII text'
 
diff --git a/client/common_lib/mail_unittest.py b/client/common_lib/mail_unittest.py
deleted file mode 100755
index ba13c92..0000000
--- a/client/common_lib/mail_unittest.py
+++ /dev/null
@@ -1,69 +0,0 @@
-#!/usr/bin/python2
-
-import unittest
-import mail, email
-
-class test_data:
-    mail_host = None
-    mail_port = None
-    mail_connect = False
-    mail_from_address = None
-    mail_to_address = None
-    mail_message = None
-
-
-# we define our needed mock SMTP
-class SMTP:
-    def __init__(self, host=None, port=25):
-        test_data.mail_host = host
-        test_data.mail_port = port
-
-        if test_data.mail_host:
-            self.connect(test_data.mail_host, test_data.mail_port)
-
-
-    def connect(self, host, port):
-        test_data.mail_connect = True
-
-
-    def quit(self):
-        test_data.mail_connect = False
-
-
-    def sendmail(self, from_address, to_address, message):
-        test_data.mail_from_address = from_address
-        test_data.mail_to_address = to_address
-        test_data.mail_message = message
-
-
-class mail_test(unittest.TestCase):
-    cached_SMTP = None
-
-    def setUp(self):
-        # now perform the slip
-        self.cached_SMTP = mail.smtplib.SMTP
-        mail.smtplib.SMTP = SMTP
-
-
-    def tearDown(self):
-        # now put things back
-        mail.smtplib.SMTP = self.cached_SMTP
-
-
-    def test_send_message(self):
-        message = email.Message.Message()
-        message["To"] = "you"
-        message["Cc"] = "them"
-        message["From"] = "me"
-        message["Subject"] = "hello"
-        message.set_payload("Hello everybody!")
-
-        mail.send("me", "you", "them", "hello", "Hello everybody!")
-        self.assertEquals("me", test_data.mail_from_address)
-        self.assertEquals(["you","them"], test_data.mail_to_address)
-        self.assertEquals(message.as_string(), test_data.mail_message)
-
-
-# this is so the test can be run in standalone mode
-if __name__ == '__main__':
-    unittest.main()
diff --git a/client/common_lib/packages.py b/client/common_lib/packages.py
index e1f623e..2f51825 100644
--- a/client/common_lib/packages.py
+++ b/client/common_lib/packages.py
@@ -342,7 +342,7 @@
 
     def repo_check(self, repo):
         '''
-        Check to make sure the repo is in a sane state:
+        Check to make sure the repo is in a valid state:
         ensure we have at least XX amount of free space
         Make sure we can write to the repo
         '''
diff --git a/client/common_lib/perf_expectations/expectation_checker.py b/client/common_lib/perf_expectations/expectation_checker.py
deleted file mode 100644
index e5dd9b6..0000000
--- a/client/common_lib/perf_expectations/expectation_checker.py
+++ /dev/null
@@ -1,146 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Automated performance regression detection tool for ChromeOS perf tests.
-
-   Refer to the instruction on how to use this tool at
-   https://sites.google.com/a/chromium.org/dev/perf-regression-detection.
-"""
-
-import logging
-import os
-import re
-
-import common
-from autotest_lib.client.common_lib import utils
-
-
-class TraceNotFound(RuntimeError):
-    """Catch the error when an expectation is not defined for a trace."""
-    pass
-
-
-def divide(x, y):
-    if y == 0:
-        return float('inf')
-    return float(x) / y
-
-
-class perf_expectation_checker(object):
-    """Check performance results against expectations."""
-
-    def __init__(self, test_name, board=None,
-                 expectation_file_path=None):
-        """Initialize a perf expectation checker.
-
-           @param test_name: the name of the performance test,
-               will be used to load the expectation.
-           @param board: an alternative board name, will be used
-               to load the expectation. Defaults to the board name
-               in /etc/lsb-release.
-           @expectation_file_path: an alternative expectation file.
-               Defaults to perf_expectations.json under the same folder
-               of this file.
-        """
-        self._expectations = {}
-        if expectation_file_path:
-            self._expectation_file_path = expectation_file_path
-        else:
-            self._expectation_file_path = os.path.abspath(
-                os.path.join(os.path.dirname(__file__),
-                    'perf_expectations.json'))
-        self._board = board or utils.get_current_board()
-        self._test_name = test_name
-        assert self._board, 'Failed to get board name.'
-        assert self._test_name, (
-               'You must specify a test name when initialize'
-               ' perf_expectation_checker.')
-        self._load_perf_expectations_file()
-
-    def _load_perf_expectations_file(self):
-        """Load perf expectation file."""
-        try:
-            expectation_file = open(self._expectation_file_path)
-        except IOError, e:
-            logging.error('I/O Error reading expectations %s(%s): %s',
-                          self._expectation_file_path, e.errno, e.strerror)
-            raise e
-        # Must import here to make it work with autotest.
-        import json
-        try:
-            self._expectations = json.load(expectation_file)
-        except ValueError, e:
-            logging.error('ValueError parsing expectations %s(%s): %s',
-                          self._expectation_file_path, e.errno, e.strerror)
-            raise e
-        finally:
-            expectation_file.close()
-
-        if not self._expectations:
-            # Will skip checking the perf values against expectations
-            # when no expecation is defined.
-            logging.info('No expectation data found in %s.',
-                         self._expectation_file_path)
-            return
-
-    def compare_one_trace(self, trace, trace_perf_value):
-        """Compare a performance value of a trace with the expectation.
-
-        @param trace: the name of the trace
-        @param trace_perf_value: the performance value of the trace.
-        @return a tuple like one of the below
-            ('regress', 2.3), ('improve', 3.2), ('accept', None)
-            where the float numbers are regress/improve ratios,
-            or None if expectation for trace is not defined.
-        """
-        perf_key = '/'.join([self._board, self._test_name, trace])
-        if perf_key not in self._expectations:
-            raise TraceNotFound('Expectation for trace %s not defined' % trace)
-        perf_data = self._expectations[perf_key]
-        regress = float(perf_data['regress'])
-        improve = float(perf_data['improve'])
-        if (('better' in perf_data and perf_data['better'] == 'lower') or
-            ('better' not in perf_data and regress > improve)):
-            # The "lower is better" case.
-            if trace_perf_value < improve:
-                ratio = 1 - divide(trace_perf_value, improve)
-                return 'improve', ratio
-            elif trace_perf_value > regress:
-                ratio = divide(trace_perf_value, regress) - 1
-                return 'regress', ratio
-        else:
-            # The "higher is better" case.
-            if trace_perf_value > improve:
-                ratio = divide(trace_perf_value, improve) - 1
-                return 'improve', ratio
-            elif trace_perf_value < regress:
-                ratio = 1 - divide(trace_perf_value, regress)
-                return 'regress', ratio
-        return 'accept', None
-
-    def compare_multiple_traces(self, perf_results):
-        """Compare multiple traces with corresponding expectations.
-
-        @param perf_results: a dictionary from trace name to value in float,
-            e.g {"milliseconds_NewTabCalendar": 1231.000000
-                 "milliseconds_NewTabDocs": 889.000000}.
-
-        @return a dictionary of regressions, improvements, and acceptances
-            of the format below:
-            {'regress': [('trace_1', 2.35), ('trace_2', 2.83)...],
-             'improve': [('trace_3', 2.55), ('trace_3', 52.33)...],
-             'accept':  ['trace_4', 'trace_5'...]}
-            where the float number is the regress/improve ratio.
-        """
-        ret_val = {'regress':[], 'improve':[], 'accept':[]}
-        for trace in perf_results:
-            try:
-                # (key, ratio) is like ('regress', 2.83)
-                key, ratio = self.compare_one_trace(trace, perf_results[trace])
-                ret_val[key].append((trace, ratio))
-            except TraceNotFound:
-                logging.debug(
-                    'Skip checking %s/%s/%s, expectation not defined.',
-                    self._board, self._test_name, trace)
-        return ret_val
diff --git a/client/common_lib/perf_expectations/perf_expectations.json b/client/common_lib/perf_expectations/perf_expectations.json
deleted file mode 100644
index 8c06d13..0000000
--- a/client/common_lib/perf_expectations/perf_expectations.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
-"stumpy/desktopui_PyAutoPerfTests/FPS_ScrollBlankPage": {"improve": "415.564", "regress": "307.156", "better":"higher"},
-"stumpy/desktopui_PyAutoPerfTests/FPS_ScrollTextPage": {"improve": "406.1455", "regress": "300.1945", "better":"higher"},
-"stumpy/desktopui_PyAutoPerfTests/FPS_ScrollGooglePlusPage": {"improve": "51.382", "regress": "37.978", "better":"higher"},
-"stumpy/desktopui_PyAutoPerfTests/FPS_ScrollGmail": {"improve": "30.015", "regress": "22.185", "better":"higher"}
-}
diff --git a/client/common_lib/perf_expectations/perf_expectations_test.json b/client/common_lib/perf_expectations/perf_expectations_test.json
deleted file mode 100644
index 7cb0360..0000000
--- a/client/common_lib/perf_expectations/perf_expectations_test.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
-"stumpy/desktopui_PyAutoPerfTests/milliseconds_NewTabCalendar": {"improve": "1230.000000", "regress": "1248.000000", "better":"lower"},
-"stumpy/desktopui_PyAutoPerfTests/milliseconds_NewTabCalendar": {"improve": "870.000000", "regress": "880.000000", "better":"lower"},
-"stumpy/test_1/higher_is_better_trace": {"improve": "200.0", "regress": "100.0", "better":"higher"},
-"stumpy/test_1/lower_is_better_trace": {"improve": "100.0", "regress": "200.0", "better":"lower"}
-}
diff --git a/client/common_lib/pexpect.py b/client/common_lib/pexpect.py
index 63a77bd..247f566 100644
--- a/client/common_lib/pexpect.py
+++ b/client/common_lib/pexpect.py
@@ -429,7 +429,7 @@
             self.use_native_pty_fork = True
 
 
-        # allow dummy instances for subclasses that may not use command or args.
+        # allow stub instances for subclasses that may not use command or args.
         if command is None:
             self.command = None
             self.args = None
diff --git a/client/common_lib/profiler_manager_unittest.py b/client/common_lib/profiler_manager_unittest.py
index 7c5b2b4..f2e9ab5 100755
--- a/client/common_lib/profiler_manager_unittest.py
+++ b/client/common_lib/profiler_manager_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 import unittest
 import common
diff --git a/client/common_lib/pxssh.py b/client/common_lib/pxssh.py
index d289afd..eab0aef 100644
--- a/client/common_lib/pxssh.py
+++ b/client/common_lib/pxssh.py
@@ -233,7 +233,7 @@
         elif i==3: # permission denied -- password was bad.
             self.close()
             raise ExceptionPxssh ('permission denied')
-        elif i==4: # terminal type again? WTF?
+        elif i == 4:  # terminal type again?
             self.close()
             raise ExceptionPxssh ('Weird error. Got "terminal type" prompt twice.')
         elif i==5: # Timeout
diff --git a/client/common_lib/revision_control_unittest.py b/client/common_lib/revision_control_unittest.py
index 5aaef01..a1a9f9b 100755
--- a/client/common_lib/revision_control_unittest.py
+++ b/client/common_lib/revision_control_unittest.py
@@ -1,9 +1,14 @@
-#!/usr/bin/python2
-import logging, mox, os, shutil, tempfile, unittest, utils
+import logging
+import os
+import shutil
+import tempfile
+import unittest
 
 # This makes autotest_lib imports available.
 import common
+
 from autotest_lib.client.common_lib import revision_control
+from autotest_lib.client.common_lib import utils
 
 
 class GitRepoManager(object):
@@ -112,14 +117,14 @@
         If the dependent repo is empty pull from main.
         """
         # TODO b:169251326 terms below are set outside of this codebase
-        # and should be updated when possible. ("master" -> "main")
+        # and should be updated when possible. ("master" -> "main") # nocheck
         # Currently (but I believe it will eventually) does not support
-        # `reset --hard origin/main` (must be origin/master).
-        self.git_repo_manager.reinit_repo_at('master')
+        # `reset --hard origin/main` (must be origin/master). # nocheck
+        self.git_repo_manager.reinit_repo_at('master')  # nocheck
         self.commit_hash = self.git_repo_manager.get_latest_commit_hash()
 
 
-class RevisionControlUnittest(mox.MoxTestBase):
+class RevisionControlUnittest(unittest.TestCase):
     """
     A unittest to exercise build_externals.py's usage
     of revision_control.py's Git wrappers.
@@ -199,4 +204,4 @@
 
 
 if __name__ == '__main__':
-  unittest.main()
+    unittest.main()
diff --git a/client/common_lib/seven.py b/client/common_lib/seven.py
index 13e10dc..75eb7b6 100644
--- a/client/common_lib/seven.py
+++ b/client/common_lib/seven.py
@@ -64,15 +64,15 @@
     return six.exec_(code_obj, globals_, locals_)
 
 
-def config_parser():
+def config_parser(args=None):
     """config_parser returns a non-strict config parser.
 
     Unfortunately, in six configparser is not same between 2/3. For our .ini's
     we do not want it to be strict (ie, error upon duplicates).
     """
     if six.PY3:
-        return six.moves.configparser.ConfigParser(strict=False)
-    return six.moves.configparser.ConfigParser()
+        return six.moves.configparser.ConfigParser(args, strict=False)
+    return six.moves.configparser.ConfigParser(args)
 
 
 def ensure_text(s, encoding='utf-8', errors='strict'):
diff --git a/client/common_lib/seven_unittest.py b/client/common_lib/seven_unittest.py
index d66473a..c53a15c 100755
--- a/client/common_lib/seven_unittest.py
+++ b/client/common_lib/seven_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # Copyright (c) 2020 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -11,10 +11,12 @@
 import tempfile
 import unittest
 
-import seven
+from autotest_lib.client.common_lib import seven
 
 
 class TestExecCompileFile(unittest.TestCase):
+    """Unittests for Seven helpers."""
+
     def _remove_tempfile(self):
         if hasattr(self, "tempfile"):
             try:
diff --git a/client/common_lib/smogcheck_ina219.py b/client/common_lib/smogcheck_ina219.py
index c0985b8..c7ac526 100644
--- a/client/common_lib/smogcheck_ina219.py
+++ b/client/common_lib/smogcheck_ina219.py
@@ -127,7 +127,7 @@
           measure: a string, 'current' or 'voltage'.
 
         Returns:
-          a float, measurement in native units. Or None if error.
+          a float, measurement in units. Or None if error.
 
         Raises:
           InaError: if error reading requested measurement.
diff --git a/client/common_lib/smogcheck_ttci.py b/client/common_lib/smogcheck_ttci.py
index 49f5326..c7dfe56 100644
--- a/client/common_lib/smogcheck_ttci.py
+++ b/client/common_lib/smogcheck_ttci.py
@@ -26,7 +26,7 @@
 from autotest_lib.client.common_lib import smogcheck_ina219, smogcheck_pca9555
 
 
-# I2C slave addresses of INA219 module
+# I2C follower addresses of INA219 module
 INA219_BPWR_SLV = 0x40  # Backup Power
 INA219_MPWR_SLV = 0x44  # Main Power
 
diff --git a/client/common_lib/software_manager.py b/client/common_lib/software_manager.py
index 5d40550..7631c7d 100755
--- a/client/common_lib/software_manager.py
+++ b/client/common_lib/software_manager.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 """
 Software package management library.
 
diff --git a/client/common_lib/test.py b/client/common_lib/test.py
index 5898e62..b7b0cef 100644
--- a/client/common_lib/test.py
+++ b/client/common_lib/test.py
@@ -44,7 +44,7 @@
 from autotest_lib.client.common_lib import utils as client_utils
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = client_utils.metrics_mock
 
@@ -88,6 +88,8 @@
         # Flag to indicate if the test has succeeded or failed.
         self.success = False
 
+        # Flag to indicate if test results should be gathered when pass.
+        self.collect_full_logs = False
 
     def configure_crash_handler(self):
         pass
@@ -895,10 +897,12 @@
 
     try:
         mytest = global_namespace['mytest']
+        if hasattr(job, 'force_full_log_collection'):
+            mytest.force_full_log_collection = job.force_full_log_collection
         if override_test_in_prog_file:
             mytest.test_in_prog_file = override_test_in_prog_file
         mytest.success = False
-        if not job.fast and before_test_hook:
+        if before_test_hook:
             logging.info('Starting before_hook for %s', mytest.tagged_testname)
             with metrics.SecondsTimer(
                     'chromeos/autotest/job/before_hook_duration'):
@@ -915,7 +919,7 @@
         mytest.success = True
     finally:
         os.chdir(pwd)
-        if after_test_hook and (not mytest.success or not job.fast):
+        if after_test_hook:
             logging.info('Starting after_hook for %s', mytest.tagged_testname)
             with metrics.SecondsTimer(
                     'chromeos/autotest/job/after_hook_duration'):
diff --git a/client/common_lib/test_unittest.py b/client/common_lib/test_unittest.py
index 8f3ef9f..044d402 100755
--- a/client/common_lib/test_unittest.py
+++ b/client/common_lib/test_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #pylint: disable-msg=C0111
 """Unit Tests for autotest.client.common_lib.test"""
 
@@ -15,6 +15,7 @@
 import mock as pymock
 import os
 import shutil
+import six
 from six.moves import range
 
 from autotest_lib.client.common_lib import test
@@ -127,7 +128,10 @@
         self.test.postprocess.expect_call()
         self.test.process_failed_constraints.expect_call()
 
-        fake_time = iter(range(4)).next
+        if six.PY2:
+            fake_time = iter(range(4)).next
+        else:
+            fake_time = iter(range(4)).__next__
         self.test.execute(iterations=1, test_length=3, _get_time=fake_time)
         self.god.check_playback()
 
diff --git a/client/common_lib/perf_expectations/common.py b/client/common_lib/test_utils/common.py
similarity index 100%
rename from client/common_lib/perf_expectations/common.py
rename to client/common_lib/test_utils/common.py
diff --git a/client/common_lib/test_utils/comparators.py b/client/common_lib/test_utils/comparators.py
new file mode 100644
index 0000000..b7ae9ea
--- /dev/null
+++ b/client/common_lib/test_utils/comparators.py
@@ -0,0 +1,71 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Comparators that be used inplace of mox.<comparator>."""
+
+
+class IsA():
+    """Helper class to check whether a class is an instance of another class.
+
+    Class to help replace mox.IsA. Defines the __eq__ and equals.
+    Use to compare to str to see if the other string contains this substr.
+    Example:
+        foo = IsA(host)
+        print(host == foo)
+        >>> True
+    """
+
+    def __init__(self, arg):
+        self.arg = arg
+
+    def __eq__(self, other):
+        return self.arg == other
+
+    def equals(self, other):
+        """Wrapper for __eq__."""
+        return self.__eq__(other)
+
+
+class Substrings:
+    """Class for to simplify multiple substring checks."""
+
+    def __init__(self, substrings):
+        self._substrings = substrings
+
+    def __eq__(self, rhs):
+        """Return true iff all of _substrings are in the other string."""
+        if not isinstance(rhs, str):
+            return False
+        return all(substr in rhs for substr in self._substrings)
+
+
+class Substring:
+    """Helper class to check whether a substring exists in a string parameter.
+
+    Class to help replace mox.StrContains. Defines the __eq__ and equals.
+    Use to compare to str to see if the other string contains this substr.
+    Example:
+        foobar = Substring("foobar")
+        print(foo == "foobarfizzbuzz")
+        >>> True
+        print(foo == "fizzfoobarbuzz")
+        >>> True
+        print(foo == "barfoofizzbuzz")
+        >>> False
+    """
+
+    def __init__(self, _substr):
+        if not isinstance(_substr, str):
+            raise TypeError("Substring must be of type str")
+
+        self._substr = _substr
+
+    def __eq__(self, rhs):
+        if not isinstance(rhs, str):
+            return False
+        return self._substr in str(rhs)
+
+    def equals(self, rhs):
+        """Wrapper for __eq__."""
+        return self.__eq__(rhs)
diff --git a/client/common_lib/test_utils/comparators_unittest.py b/client/common_lib/test_utils/comparators_unittest.py
new file mode 100644
index 0000000..2a22718
--- /dev/null
+++ b/client/common_lib/test_utils/comparators_unittest.py
@@ -0,0 +1,46 @@
+#!/usr/bin/python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Unit tests for Comparators."""
+
+import unittest
+
+import common
+
+from autotest_lib.client.common_lib.test_utils import comparators
+
+
+class TestComparators(unittest.TestCase):
+    """Unittests for Seven comparator helpers."""
+
+    def testIsA(self):
+        class MockedClass(object):
+            pass
+
+        class FooClass(object):
+            pass
+
+        foo = comparators.IsA(MockedClass)
+        self.assertTrue(foo == MockedClass)
+        self.assertFalse(foo == FooClass)
+
+    def testAnyStringWith(self):
+        """Test AnyStringWith evaluates properly."""
+        f = comparators.Substring("foobar")
+        self.assertTrue(f == "foobarfizz")
+        self.assertTrue("foobarfizz" == f)
+        self.assertTrue(f == "fizzfoobar")
+        self.assertTrue(f == "foobar")
+        self.assertFalse(f == "fizzfoobuzzbar")
+        self.assertFalse(f == "foo")
+
+    def testAndComparitor(self):
+        """Test AndComparator evaluates properly."""
+        foo = comparators.Substrings(["host", "name", "cros"])
+        self.assertTrue(foo == "a.host.name.cros")
+        self.assertFalse(foo == "a.host.cros")
+
+
+if __name__ == "__main__":
+    unittest.TestCase()
diff --git a/client/common_lib/test_utils/functools_24.py b/client/common_lib/test_utils/functools_24.py
index 05d93a4..585d1ac 100644
--- a/client/common_lib/test_utils/functools_24.py
+++ b/client/common_lib/test_utils/functools_24.py
@@ -31,7 +31,7 @@
 def compose(*args):
 
     if len(args) < 1:
-        raise TypeError, 'compose expects at least one argument'
+        raise TypeError('compose expects at least one argument')
     fs = args[-2::-1]
     g = args[-1]
 
@@ -48,7 +48,7 @@
     try:
         fun = sargs[0]
     except IndexError:
-        raise TypeError, 'fastcut requires at least one argument'
+        raise TypeError('fastcut requires at least one argument')
     sargs = sargs[1:]
 
     def fastcutcall(*args, **kw):
diff --git a/client/common_lib/test_utils/mock.py b/client/common_lib/test_utils/mock.py
index 3952db7..7c4adba 100644
--- a/client/common_lib/test_utils/mock.py
+++ b/client/common_lib/test_utils/mock.py
@@ -1,7 +1,14 @@
+# Lint as: python2, python3
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
 __author__ = "raphtee@google.com (Travis Miller)"
 
 
-import re, collections, StringIO, sys, unittest
+import re, collections, six, sys, unittest
+
+import six
+from six.moves import zip
 
 
 class StubNotFoundError(Exception):
@@ -14,7 +21,7 @@
     pass
 
 
-class SaveDataAfterCloseStringIO(StringIO.StringIO):
+class SaveDataAfterCloseStringIO(six.StringIO):
     """Saves the contents in a final_data property when close() is called.
 
     Useful as a mock output file object to test both that the file was
@@ -24,13 +31,46 @@
       final_data: Set to the StringIO's getvalue() data when close() is
           called.  None if close() has not been called.
     """
+
     final_data = None
 
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, type, value, traceback):
+        self.close()
+
     def close(self):
         self.final_data = self.getvalue()
-        StringIO.StringIO.close(self)
+        six.StringIO.close(self)
 
 
+class SaveDataAfterCloseBytesIO(six.BytesIO):
+    """Saves the contents in a final_data property when close() is called.
+
+    Useful as a mock output file object to test both that the file was
+    closed and what was written.
+
+    Properties:
+      final_data: Set to the BytesIO's getvalue() data when close() is
+          called.  None if close() has not been called.
+    """
+    final_data = None
+
+
+    def __enter__(self):
+        return self
+
+
+    def __exit__(self, type, value, traceback):
+        self.close()
+
+
+    def close(self):
+        self.final_data = self.getvalue()
+        six.BytesIO.close(self)
+
 
 class argument_comparator(object):
     def is_satisfied_by(self, parameter):
@@ -44,7 +84,8 @@
 
     @staticmethod
     def _types_match(arg1, arg2):
-        if isinstance(arg1, basestring) and isinstance(arg2, basestring):
+        if isinstance(arg1, six.string_types) and isinstance(
+                arg2, six.string_types):
             return True
         return type(arg1) == type(arg2)
 
@@ -68,7 +109,7 @@
             if not cls._compare(sorted(actual_arg.keys()),
                                 sorted(expected_arg.keys())):
                 return False
-            for key, value in actual_arg.iteritems():
+            for key, value in six.iteritems(actual_arg):
                 if not cls._compare(value, expected_arg[key]):
                     return False
         elif actual_arg != expected_arg:
@@ -102,7 +143,7 @@
 
 class is_string_comparator(argument_comparator):
     def is_satisfied_by(self, parameter):
-        return isinstance(parameter, basestring)
+        return isinstance(parameter, six.string_types)
 
 
     def __str__(self):
@@ -137,7 +178,7 @@
         self.symbol = symbol
         self.args = [equality_comparator(arg) for arg in args]
         self.dargs = dict((key, equality_comparator(value))
-                          for key, value in dargs.iteritems())
+                          for key, value in six.iteritems(dargs))
         self.error = None
 
 
@@ -150,14 +191,14 @@
                 return False
 
         # check for incorrect dargs
-        for key, value in dargs.iteritems():
+        for key, value in six.iteritems(dargs):
             if key not in self.dargs:
                 return False
             if not self.dargs[key].is_satisfied_by(value):
                 return False
 
         # check for missing dargs
-        for key in self.dargs.iterkeys():
+        for key in six.iterkeys(self.dargs):
             if key not in dargs:
                 return False
 
@@ -294,6 +335,21 @@
         playback = self.__method_playback
         errors = self.errors
 
+
+        class RecordingMockMeta(type):
+            """Metaclass to override default class invocation behavior.
+
+            Normally, calling a class like a function creates and initializes an
+            instance of that class. This metaclass causes class invocation to
+            have no side effects and to return nothing, instead recording the
+            call in the mock_god object to be inspected or asserted against as a
+            part of a test.
+            """
+            def __call__(self, *args, **kwargs):
+                return playback(name, *args, **kwargs)
+
+
+        @six.add_metaclass(RecordingMockMeta)
         class cls_sub(cls):
             cls_count = 0
 
@@ -310,14 +366,9 @@
                 return obj
 
 
-            def __new__(typ, *args, **dargs):
-                return playback(name, *args, **dargs)
-
-
             @classmethod
             def make_new(typ, *args, **dargs):
-                obj = super(cls_sub, typ).__new__(typ, *args,
-                                                  **dargs)
+                obj = super(cls_sub, typ).__new__(typ, *args, **dargs)
 
                 typ.cls_count += 1
                 obj_name = "%s_%s" % (name, typ.cls_count)
@@ -461,8 +512,9 @@
 
     def __method_playback(self, symbol, *args, **dargs):
         if self._debug:
-            print >> sys.__stdout__, (' * Mock call: ' +
-                                      _dump_function_call(symbol, args, dargs))
+            print((' * Mock call: ' +
+                   _dump_function_call(symbol, args, dargs)),
+                  file=sys.__stdout__)
 
         if len(self.recording) != 0:
             func_call = self.recording[0]
@@ -499,7 +551,7 @@
 
     def _append_error(self, error):
         if self._debug:
-            print >> sys.__stdout__, ' *** ' + error
+            print(' *** ' + error, file=sys.__stdout__)
         if self._fail_fast:
             raise CheckPlaybackError(error)
         self.errors.append(error)
@@ -512,9 +564,9 @@
         """
         if len(self.errors) > 0:
             if self._debug:
-                print '\nPlayback errors:'
+                print('\nPlayback errors:')
             for error in self.errors:
-                print >> sys.__stdout__, error
+                print(error, file=sys.__stdout__)
 
             if self._ut:
                 self._ut.fail('\n'.join(self.errors))
@@ -525,7 +577,7 @@
             for func_call in self.recording:
                 error = "%s not called" % (func_call,)
                 errors.append(error)
-                print >> sys.__stdout__, error
+                print(error, file=sys.__stdout__)
 
             if self._ut:
                 self._ut.fail('\n'.join(errors))
@@ -539,8 +591,8 @@
         self.orig_stdout = sys.stdout
         self.orig_stderr = sys.stderr
 
-        self.mock_streams_stdout = StringIO.StringIO('')
-        self.mock_streams_stderr = StringIO.StringIO('')
+        self.mock_streams_stdout = six.StringIO('')
+        self.mock_streams_stderr = six.StringIO('')
 
         sys.stdout = self.mock_streams_stdout
         sys.stderr = self.mock_streams_stderr
@@ -569,6 +621,6 @@
     arg_vec = []
     for arg in args:
         arg_vec.append(_arg_to_str(arg))
-    for key, val in dargs.iteritems():
+    for key, val in six.iteritems(dargs):
         arg_vec.append("%s=%s" % (key, _arg_to_str(val)))
     return "%s(%s)" % (symbol, ', '.join(arg_vec))
diff --git a/client/common_lib/test_utils/mock_demo.py b/client/common_lib/test_utils/mock_demo.py
index 71c27c9..65e808b 100755
--- a/client/common_lib/test_utils/mock_demo.py
+++ b/client/common_lib/test_utils/mock_demo.py
@@ -1,5 +1,7 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
+from __future__ import division
+from __future__ import print_function
 __author__ = "raphtee@google.com (Travis Miller)"
 
 import mock, mock_demo_MUT
@@ -51,34 +53,34 @@
 
 # say we want to test that do_stuff is doing what we think it is doing
 def do_stuff(a, b, func):
-    print b.method1()
-    print b.method3(10)
-    print func("how many")
-    print a.method2(5)
-    print b.method1()
-    print b.method4(1, 4)
-    print b.method2(3)
-    print b.method2("hello")
+    print(b.method1())
+    print(b.method3(10))
+    print(func("how many"))
+    print(a.method2(5))
+    print(b.method1())
+    print(b.method4(1, 4))
+    print(b.method2(3))
+    print(b.method2("hello"))
 
 
 def do_more_stuff(d):
-    print d.method6(False)
+    print(d.method6(False))
     try:
         d.method6(True)
     except:
-        print "caught error"
+        print("caught error")
 
 
 def main():
     god = mock.mock_god()
 
     m1 = god.create_mock_class(A, "A")
-    print m1.var
+    print(m1.var)
     m2 = god.create_mock_class(B, "B")
     f = god.create_mock_function("func")
 
-    print dir(m1)
-    print dir(m2)
+    print(dir(m1))
+    print(dir(m2))
 
     # sets up the "recording"
     m2.method1.expect_call().and_return(1)
@@ -92,7 +94,7 @@
 
     # check the recording order
     for func_call in god.recording:
-        print func_call
+        print(func_call)
 
     # once we start making calls into the methods we are in
     # playback mode
@@ -115,7 +117,7 @@
     answer = c.method5.run_original_function()
 
     # check playback
-    print "answer = %s" % (answer)
+    print("answer = %s" % (answer))
     god.check_playback()
 
     # check exception returns too
diff --git a/client/common_lib/test_utils/mock_demo_MUT.py b/client/common_lib/test_utils/mock_demo_MUT.py
index b2fde77..daf7d1d 100644
--- a/client/common_lib/test_utils/mock_demo_MUT.py
+++ b/client/common_lib/test_utils/mock_demo_MUT.py
@@ -1,5 +1,9 @@
+# Lint as: python2, python3
+
+from __future__ import division
+from __future__ import print_function
 from mock_demo import E
 
 def do_create_stuff():
     obj = E(val=7)
-    print obj.method1()
+    print(obj.method1())
diff --git a/client/common_lib/time_utils.py b/client/common_lib/time_utils.py
index 9ea84cd..8d187da 100644
--- a/client/common_lib/time_utils.py
+++ b/client/common_lib/time_utils.py
@@ -13,20 +13,6 @@
 import six
 import time
 
-from autotest_lib.client.common_lib import decorators
-
-
-try:
-    import pytz
-except ImportError:
-    pytz = None
-
-
-try:
-    import tzlocal
-except ImportError:
-    tzlocal = None
-
 
 # This format is used to parse datetime value in MySQL database and should not
 # be modified.
@@ -106,22 +92,3 @@
         raise ValueError('Value should be a datetime object, string or a '
                          'number. Unexpected value: %s.' % value)
     return value
-
-
-@decorators.test_module_available(pytz, raise_error=True)
-@decorators.test_module_available(tzlocal, raise_error=True)
-def to_utc_timestamp(datetime_val):
-    """Transforms a datetime object into a utc timestamp.
-
-    @param datetime_val: A datetime timestamp.
-
-    @returns A datetime as a UTC floating point timestamp in seconds since
-             epoch.
-    """
-    if datetime_val is None:
-        return None
-
-    epoch = datetime.datetime(1970, 1, 1, tzinfo=pytz.utc)
-    local_datetime = datetime_val.replace(tzinfo=tzlocal.get_localzone())
-    utc_datetime = local_datetime.astimezone(tz=pytz.utc)
-    return (utc_datetime - epoch).total_seconds()
diff --git a/client/common_lib/ui_utils.py b/client/common_lib/ui_utils.py
index 0054a68..5bc6b4f 100644
--- a/client/common_lib/ui_utils.py
+++ b/client/common_lib/ui_utils.py
@@ -43,6 +43,9 @@
                 })
             })'''
 
+    _GET_ON_SCREEN_ITEMS = "findAll({attributes:{role: 'staticText'},state:{" \
+                           "offscreen: false}}).map(node => node.name)"
+
     def __init__(self):
         self.screenshoter = UIScreenshoter()
 
diff --git a/client/common_lib/utils.py b/client/common_lib/utils.py
index 8df0cf2..b5f1987 100644
--- a/client/common_lib/utils.py
+++ b/client/common_lib/utils.py
@@ -20,7 +20,7 @@
 import datetime
 import errno
 import inspect
-import itertools
+import json
 import logging
 import os
 import pickle
@@ -316,7 +316,9 @@
 
         data = os.read(pipe.fileno(), 1024)
         if isinstance(data, bytes) and six.PY3:
-            return data.decode()
+            # On rare occasion, an invalid byte will be read, causing this to
+            # crash. Ignoring these errors seems like the best option for now.
+            return data.decode(errors='ignore')
         return data
 
     def cleanup(self):
@@ -431,12 +433,13 @@
     open_write_close(filename, str(line).rstrip('\n') + '\n')
 
 
-def open_write_close(filename, data):
-    f = open(filename, 'w')
-    try:
+def open_write_close(filename, data, is_binary=False):
+    open_mode = 'w'
+    if is_binary:
+        open_mode = 'wb'
+
+    with open(filename, open_mode) as f:
         f.write(data)
-    finally:
-        f.close()
 
 
 def locate_file(path, base_dir=None):
@@ -462,49 +465,6 @@
     return path
 
 
-def matrix_to_string(matrix, header=None):
-    """
-    Return a pretty, aligned string representation of a nxm matrix.
-
-    This representation can be used to print any tabular data, such as
-    database results. It works by scanning the lengths of each element
-    in each column, and determining the format string dynamically.
-
-    @param matrix: Matrix representation (list with n rows of m elements).
-    @param header: Optional tuple or list with header elements to be displayed.
-    """
-    if type(header) is list:
-        header = tuple(header)
-    lengths = []
-    if header:
-        for column in header:
-            lengths.append(len(column))
-    for row in matrix:
-        for i, column in enumerate(row):
-            column = six.ensure_binary(six.text_type(column), "utf-8")
-            cl = len(column)
-            try:
-                ml = lengths[i]
-                if cl > ml:
-                    lengths[i] = cl
-            except IndexError:
-                lengths.append(cl)
-
-    lengths = tuple(lengths)
-    format_string = ""
-    for length in lengths:
-        format_string += "%-" + str(length) + "s "
-    format_string += "\n"
-
-    matrix_str = ""
-    if header:
-        matrix_str += format_string % header
-    for row in matrix:
-        matrix_str += format_string % tuple(row)
-
-    return matrix_str
-
-
 def read_keyval(path, type_tag=None):
     """
     Read a key-value pair format file into a dictionary, and return it.
@@ -698,8 +658,8 @@
     versionfile = os.path.join(srcdir, '.version')
     install_needed = True
 
-    if os.path.exists(versionfile):
-        old_version = pickle.load(open(versionfile))
+    if os.path.exists(versionfile) and os.path.getsize(versionfile) > 0:
+        old_version = pickle.load(open(versionfile, 'rb'))
         if old_version == new_version:
             install_needed = False
 
@@ -708,7 +668,7 @@
             shutil.rmtree(srcdir)
         install(*args, **dargs)
         if os.path.exists(srcdir):
-            pickle.dump(new_version, open(versionfile, 'w'))
+            pickle.dump(new_version, open(versionfile, 'wb'))
 
 
 def get_stderr_level(stderr_is_expected, stdout_level=DEFAULT_STDOUT_LEVEL):
@@ -921,7 +881,7 @@
             read_ready, write_ready, _ = select.select(read_list, write_list,
                                                        [], SELECT_TIMEOUT)
         except select.error as v:
-            if v[0] == errno.EINTR:
+            if v.args[0] == errno.EINTR:
                 logging.warning(v)
                 continue
             else:
@@ -936,7 +896,10 @@
             # we can write PIPE_BUF bytes without blocking
             # POSIX requires PIPE_BUF is >= 512
             bg_job = reverse_dict[file_obj]
-            file_obj.write(bg_job.string_stdin[:512])
+            string_stdin = bg_job.string_stdin[:512]
+            if isinstance(string_stdin, six.text_type):
+                string_stdin = string_stdin.encode('utf-8', 'strict')
+            file_obj.write(string_stdin)
             bg_job.string_stdin = bg_job.string_stdin[512:]
             # no more input data, close stdin, remove it from the select set
             if not bg_job.string_stdin:
@@ -1406,16 +1369,16 @@
             fn(*args, **dargs)
 
 
-def import_site_module(path, module, dummy=None, modulefile=None):
+def import_site_module(path, module, placeholder=None, modulefile=None):
     """
     Try to import the site specific module if it exists.
 
     @param path full filename of the source file calling this (ie __file__)
     @param module full module name
-    @param dummy dummy value to return in case there is no symbol to import
+    @param placeholder value to return in case there is no symbol to import
     @param modulefile module filename
 
-    @return site specific module or dummy
+    @return site specific module or placeholder
 
     @raises ImportError if the site file exists but imports fails
     """
@@ -1426,33 +1389,33 @@
 
     if os.path.exists(os.path.join(os.path.dirname(path), modulefile)):
         return __import__(module, {}, {}, [short_module])
-    return dummy
+    return placeholder
 
 
-def import_site_symbol(path, module, name, dummy=None, modulefile=None):
+def import_site_symbol(path, module, name, placeholder=None, modulefile=None):
     """
     Try to import site specific symbol from site specific file if it exists
 
     @param path full filename of the source file calling this (ie __file__)
     @param module full module name
     @param name symbol name to be imported from the site file
-    @param dummy dummy value to return in case there is no symbol to import
+    @param placeholder value to return in case there is no symbol to import
     @param modulefile module filename
 
-    @return site specific symbol or dummy
+    @return site specific symbol or placeholder
 
     @raises ImportError if the site file exists but imports fails
     """
     module = import_site_module(path, module, modulefile=modulefile)
     if not module:
-        return dummy
+        return placeholder
 
     # special unique value to tell us if the symbol can't be imported
     cant_import = object()
 
     obj = getattr(module, name, cant_import)
     if obj is cant_import:
-        return dummy
+        return placeholder
 
     return obj
 
@@ -1489,7 +1452,7 @@
     return res
 
 
-def import_site_function(path, module, funcname, dummy, modulefile=None):
+def import_site_function(path, module, funcname, placeholder, modulefile=None):
     """
     Try to import site specific function from site specific file if it exists
 
@@ -1497,15 +1460,15 @@
         path: full filename of the source file calling this (ie __file__)
         module: full module name
         funcname: function name to be imported from site file
-        dummy: dummy function to return in case there is no function to import
+        placeholder: function to return in case there is no function to import
         modulefile: module filename
 
-    Returns: site specific function object or dummy
+    Returns: site specific function object or placeholder
 
     Raises: ImportError if the site file exists but imports fails
     """
 
-    return import_site_symbol(path, module, funcname, dummy, modulefile)
+    return import_site_symbol(path, module, funcname, placeholder, modulefile)
 
 
 def _get_pid_path(program_name):
@@ -1723,6 +1686,18 @@
     return system(cmd, timeout=timeout, ignore_status=ignore_status)
 
 
+def _cmp(x, y):
+    """
+    Replacement for built-in function cmp that was removed in Python 3
+
+    Compare the two objects x and y and return an integer according to
+    the outcome. The return value is negative if x < y, zero if x == y
+    and strictly positive if x > y.
+    """
+
+    return (x > y) - (x < y)
+
+
 def compare_versions(ver1, ver2):
     """Version number comparison between ver1 and ver2 strings.
 
@@ -1752,10 +1727,10 @@
         cx = ax.pop(0)
         cy = ay.pop(0)
         maxlen = max(len(cx), len(cy))
-        c = cmp(cx.zfill(maxlen), cy.zfill(maxlen))
+        c = _cmp(cx.zfill(maxlen), cy.zfill(maxlen))
         if c != 0:
             return c
-    return cmp(len(ax), len(ay))
+    return _cmp(len(ax), len(ay))
 
 
 def args_to_dict(args):
@@ -1830,7 +1805,7 @@
     """
     Reads an x86 MSR from the specified CPU, returns as long integer.
     """
-    with open('/dev/cpu/%s/msr' % cpu, 'r', 0) as fd:
+    with open('/dev/cpu/%s/msr' % cpu, 'rb', 0) as fd:
         fd.seek(address)
         return struct.unpack('=Q', fd.read(8))[0]
 
@@ -1932,24 +1907,65 @@
 _MOBLAB_ETH_0 = 'eth0'
 _MOBLAB_ETH_1 = 'eth1'
 
+
+def _parse_subnet(subnet_str):
+    """Parse a subnet string to a (ip, mask) tuple."""
+    ip, mask = subnet_str.split('/')
+    return ip, int(mask)
+
+
 # A list of subnets that requires dedicated devserver and drone in the same
 # subnet. Each item is a tuple of (subnet_ip, mask_bits), e.g.,
 # ('192.168.0.0', 24))
 RESTRICTED_SUBNETS = []
 
+
 def _setup_restricted_subnets():
     restricted_subnets_list = CONFIG.get_config_value(
             'CROS', 'restricted_subnets', type=list, default=[])
-    # TODO(dshi): Remove the code to split subnet with `:` after R51 is
-    # off stable channel, and update shadow config to use `/` as
-    # delimiter for consistency.
-    for subnet in restricted_subnets_list:
-        ip, mask_bits = subnet.split('/') if '/' in subnet \
-                        else subnet.split(':')
-        RESTRICTED_SUBNETS.append((ip, int(mask_bits)))
+    global RESTRICTED_SUBNETS
+    RESTRICTED_SUBNETS = [_parse_subnet(s) for s in restricted_subnets_list]
+
 
 _setup_restricted_subnets()
 
+
+# A two level list of subnets, e.g. '[["1.1.1.0/24","1.1.2.0/24"],
+# ["1.2.1.0/24", "1.2.2.0/24"]]'. Each element of it is either a singleton list
+# of a restricted subnet, or a list of subnets which can communicate with each
+# other (i.e. p2p subnets).
+ALL_SUBNETS = []
+
+
+def _setup_all_subnets():
+    all_subnets_raw = CONFIG.get_config_value('CROS',
+                                              'p2p_subnets',
+                                              default='[]')
+    all_subnets = json.loads(all_subnets_raw)
+    for subnet_group in all_subnets:
+        ALL_SUBNETS.append([_parse_subnet(s) for s in subnet_group])
+
+    if not RESTRICTED_SUBNETS:
+        _setup_restricted_subnets()
+    for subnet in RESTRICTED_SUBNETS:
+        ALL_SUBNETS.append([subnet])
+
+
+_setup_all_subnets()
+
+
+def get_all_restricted_subnets():
+    """Returns all restricted subnets in a flat list, including subnets that
+    are part of a p2p group.
+
+    This helps us to check if a host is in a restricted subnet."""
+    result = []
+    for s in ALL_SUBNETS:
+        result.extend(s)
+
+    return result
+
+
 # regex pattern for CLIENT/wireless_ssid_ config. For example, global config
 # can have following config in CLIENT section to indicate that hosts in subnet
 # 192.168.0.1/24 should use wireless ssid of `ssid_1`
@@ -1964,13 +1980,13 @@
     present, however fallback is the ethernet mac address.
     """
     for vpd_key in ['serial_number', 'ethernet_mac']:
-      try:
-          cmd_result = run('sudo vpd -g %s' % vpd_key)
-          if cmd_result and cmd_result.stdout:
-            return cmd_result.stdout
-      except error.CmdError as e:
-          logging.error(str(e))
-          logging.info(vpd_key)
+        try:
+            cmd_result = run('sudo vpd -g %s' % vpd_key)
+            if cmd_result and cmd_result.stdout:
+                return cmd_result.stdout
+        except error.CmdError as e:
+            logging.error(str(e))
+            logging.info(vpd_key)
     return 'NoSerialNumber'
 
 
@@ -1979,7 +1995,8 @@
          tries=None,
          timeout=60,
          ignore_timeout=False,
-         user=None):
+         user=None,
+         interface=None):
     """Attempt to ping |host|.
 
     Shell out to 'ping' if host is an IPv4 addres or 'ping6' if host is an
@@ -2001,6 +2018,7 @@
     @param timeout: number of seconds after which to kill 'ping' command.
     @param ignore_timeout: If true, timeouts won't raise CmdTimeoutError.
     @param user: Run as a specific user
+    @param interface: Run on a specific network interface
     @return exit code of ping command.
     """
     args = [host]
@@ -2010,6 +2028,8 @@
         args.append('-w%d' % deadline)
     if tries:
         args.append('-c%d' % tries)
+    if interface:
+        args.append('-I%s' % interface)
 
     if user != None:
         args = [user, '-c', ' '.join([cmd] + args)]
@@ -2086,7 +2106,7 @@
     @param hostname: The hostname to check.
     @returns True if hostname match power lab hostname, otherwise False.
     """
-    pattern = r'chromeos\d+-power-host\d+(\.cros(\.corp(\.google\.com)?)?)?$'
+    pattern = r'chromeos\d.*power.*(\.cros(\.corp(\.google\.com)?)?)?$'
     return re.match(pattern, hostname) is not None
 
 
@@ -2572,7 +2592,7 @@
         run('sudo -n true')
         return False
     except error.CmdError:
-        logging.warn('sudo command requires password.')
+        logging.warning('sudo command requires password.')
         return True
 
 
@@ -3328,3 +3348,21 @@
     for i in buf:
         rv = _table_crc8[ (rv ^ i) & 0xff ]
     return rv
+
+
+def send_msg_to_terminal(job, msg):
+    """Send from the client side to the terminal.
+
+    ONLY to be used on non-scheduled tests (aka local runs).
+    Do not send anything which could be confused for a status.
+    See server/autotest.py client_logger for examples of status's NOT to use
+
+    @param job: The client job obj. Can be accessed from anything built off
+        test.test via self.job
+    @param msg: the msg to send.
+    """
+    status = os.fdopen(3, 'w', 2)
+    try:
+        status.write(msg + '\n')
+    finally:
+        status.flush()
diff --git a/client/common_lib/utils_unittest.py b/client/common_lib/utils_unittest.py
index 10571dc..f30ae07 100755
--- a/client/common_lib/utils_unittest.py
+++ b/client/common_lib/utils_unittest.py
@@ -1,14 +1,11 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # pylint: disable=missing-docstring
 
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
+from __future__ import absolute_import, division, print_function
 
-from six.moves import range
-import six
 import errno
+import io
 import itertools
 import logging
 import os
@@ -17,17 +14,18 @@
 import subprocess
 import time
 import unittest
-from six.moves import urllib
-
-import common
-from autotest_lib.client.common_lib import autotemp
-from autotest_lib.client.common_lib import utils
-from autotest_lib.client.common_lib.test_utils import mock
 
 # mock 1.0.0 (in site-packages/chromite/third_party/mock.py)
 # which is an ancestor of Python's default library starting from Python 3.3.
 # See https://docs.python.org/3/library/unittest.mock.html
 import mock as pymock
+import six
+from six.moves import range, urllib
+
+import common
+
+from autotest_lib.client.common_lib import autotemp, utils
+from autotest_lib.client.common_lib.test_utils import mock
 
 metrics = utils.metrics_mock
 
@@ -171,6 +169,14 @@
         self.god.check_playback()
         self.assertEqual(data, test_file.final_data)
 
+    def test_binary_functionality(self):
+        data = bytearray([0, 1, 3, 23, 0, 71, 254, 255, 127, 128])
+        test_file = mock.SaveDataAfterCloseBytesIO()
+        utils.open.expect_call("filename", "wb").and_return(test_file)
+        utils.open_write_close("filename", data, is_binary=True)
+        self.god.check_playback()
+        self.assertEqual(data, test_file.final_data)
+
 
 class test_read_keyval(unittest.TestCase):
     def setUp(self):
@@ -458,8 +464,8 @@
         data = object()
         timeout = 10
 
-        src_file = self.god.create_mock_class(file, "file")
-        dest_file = self.god.create_mock_class(file, "file")
+        src_file = self.god.create_mock_class(io.IOBase, "file")
+        dest_file = self.god.create_mock_class(io.IOBase, "file")
 
         (utils.urlopen.expect_call(url, data=data, timeout=timeout)
                 .and_return(src_file))
@@ -605,7 +611,7 @@
                                 stderr=open(os.devnull, 'w'))
         stdout, _ = proc.communicate()
         self.assertEqual(proc.returncode, 0)
-        self.assertEqual(stdout[:-1], text)
+        self.assertEqual(stdout[:-1].decode(), text)
 
 
     def test_normal_string(self):
@@ -687,7 +693,7 @@
         quoted_word = utils.sh_quote_word(text)
         echoed_value = subprocess.check_output('echo %s' % quoted_word,
                                                shell=True)
-        self.assertEqual(echoed_value, text + '\n')
+        self.assertEqual(echoed_value.decode(), text + '\n')
 
 
 class test_nested_sh_quote_word(test_sh_quote_word):
@@ -701,7 +707,7 @@
         nested_command = 'echo ' + utils.sh_quote_word(command)
         produced_command = subprocess.check_output(nested_command, shell=True)
         echoed_value = subprocess.check_output(produced_command, shell=True)
-        self.assertEqual(echoed_value, text + '\n')
+        self.assertEqual(echoed_value.decode(), text + '\n')
 
 
 class test_run(unittest.TestCase):
@@ -1553,7 +1559,7 @@
             "Waiting for condition. Reason: Exception('illegal input',)",
             str(e))
         self.assertIsInstance(e.reason, Exception)
-        self.assertEqual('illegal input', e.reason.message)
+        self.assertEqual('illegal input', str(e.reason))
 
         # Positional message argument for backward compatibility.
         e = utils.TimeoutError('Waiting for condition',
@@ -1562,7 +1568,7 @@
             "Waiting for condition. Reason: Exception('illegal input',)",
             str(e))
         self.assertIsInstance(e.reason, Exception)
-        self.assertEqual('illegal input', e.reason.message)
+        self.assertEqual('illegal input', str(e.reason))
 
 
 
diff --git a/client/cros/OWNERS b/client/cros/OWNERS
new file mode 100644
index 0000000..e8a7df8
--- /dev/null
+++ b/client/cros/OWNERS
@@ -0,0 +1,3 @@
+include chromiumos/config:/owners/testservice
+include /ENGPROD_OWNERS
+include /INFRA_OWNERS
diff --git a/client/cros/audio/alsa_utils.py b/client/cros/audio/alsa_utils.py
index 0bdd37e..09d11e4 100644
--- a/client/cros/audio/alsa_utils.py
+++ b/client/cros/audio/alsa_utils.py
@@ -377,6 +377,9 @@
     '''
     cmd = [ACONNECT_PATH, '-io']
     output = cmd_utils.execute(cmd, stdout=subprocess.PIPE, run_as='chronos')
+
+    #py3 migration
+    output = output.decode()
     num_clients = 0
     for line in output.splitlines():
         match = CLIENT_NUM_RE.match(line)
diff --git a/client/cros/audio/audio_analysis_unittest.py b/client/cros/audio/audio_analysis_unittest.py
index 8e8b4bb..81472c1 100755
--- a/client/cros/audio/audio_analysis_unittest.py
+++ b/client/cros/audio/audio_analysis_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 from __future__ import absolute_import
 from __future__ import division
 from __future__ import print_function
@@ -19,7 +19,7 @@
         numpy.random.seed(0)
 
 
-    def dummy_peak_detection(self, array, window_size):
+    def stub_peak_detection(self, array, window_size):
         """Detects peaks in an array in simple way.
 
         A point (i, array[i]) is a peak if array[i] is the maximum among
@@ -79,12 +79,12 @@
     def testPeakDetectionLarge(self):
         array = numpy.random.uniform(0, 1, 1000000)
         window_size = 100
-        logging.debug('Test large array using dummy peak detection')
-        dummy_answer = self.dummy_peak_detection(array, window_size)
+        logging.debug('Test large array using stub peak detection')
+        stub_answer = self.stub_peak_detection(array, window_size)
         logging.debug('Test large array using improved peak detection')
         improved_answer = audio_analysis.peak_detection(array, window_size)
         logging.debug('Compare the result')
-        self.assertEqual(dummy_answer, improved_answer)
+        self.assertEqual(stub_answer, improved_answer)
 
 
     def testSpectralAnalysis(self):
@@ -94,7 +94,7 @@
         freq_2 = 60.0
         coeff_1 = 1
         coeff_2 = 0.3
-        samples = length_in_secs * rate
+        samples = int(length_in_secs * rate)
         noise = numpy.random.standard_normal(samples) * 0.005
         x = numpy.linspace(0.0, (samples - 1) * 1.0 / rate, samples)
         y = (coeff_1 * numpy.sin(freq_1 * 2.0 * numpy.pi * x) +
@@ -116,7 +116,7 @@
         """This unittest checks the spectral analysis works on real data."""
         file_path = os.path.join(
                 os.path.dirname(__file__), 'test_data', '1k_2k.raw')
-        binary = open(file_path, 'r').read()
+        binary = open(file_path, 'rb').read()
         data = audio_data.AudioRawData(binary, 2, 'S32_LE')
         saturate_value = audio_data.get_maximum_value_from_sample_format(
                 'S32_LE')
@@ -135,7 +135,7 @@
         """Checks that sepectral analysis handles un-meaningful data."""
         rate = 48000
         length_in_secs = 0.5
-        samples = length_in_secs * rate
+        samples = int(length_in_secs * rate)
         noise_amplitude = audio_analysis.MEANINGFUL_RMS_THRESHOLD * 0.5
         noise = numpy.random.standard_normal(samples) * noise_amplitude
         results = audio_analysis.spectral_analysis(noise, rate)
@@ -167,7 +167,7 @@
         self.rate = 48000
         self.freq = 440
         length_in_secs = 0.25
-        self.samples = length_in_secs * self.rate
+        self.samples = int(length_in_secs * self.rate)
         x = numpy.linspace(
                 0.0, (self.samples - 1) * 1.0 / self.rate, self.samples)
         self.y = numpy.sin(self.freq * 2.0 * numpy.pi * x)
@@ -197,8 +197,8 @@
         self.anomaly_start_secs = 0.1
         self.anomaly_duration_secs = 0.005
         anomaly_append_secs = self.anomaly_start_secs + self.anomaly_duration_secs
-        anomaly_start_index = self.anomaly_start_secs * self.rate
-        anomaly_append_index = anomaly_append_secs * self.rate
+        anomaly_start_index = int(self.anomaly_start_secs * self.rate)
+        anomaly_append_index = int(anomaly_append_secs * self.rate)
         self.y = numpy.append(self.y[:anomaly_start_index], self.y[anomaly_append_index:])
 
 
diff --git a/client/cros/audio/audio_data.py b/client/cros/audio/audio_data.py
index 8232327..f73405b 100644
--- a/client/cros/audio/audio_data.py
+++ b/client/cros/audio/audio_data.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/cros/audio/audio_helper.py b/client/cros/audio/audio_helper.py
index a55d835..26944a1 100644
--- a/client/cros/audio/audio_helper.py
+++ b/client/cros/audio/audio_helper.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -448,13 +448,13 @@
     cras_utils.set_capture_mute(False)
 
 
-def dump_rms_postmortem(result_dir):
-    """Dumps postmortem for rms tests."""
+def dump_rms_retrospective(result_dir):
+    """Dumps retrospective for rms tests."""
     try:
         dump_audio_diagnostics(
                 os.path.join(result_dir, "audio_diagnostics.txt"))
     except Exception:
-        logging.exception('Error while generating postmortem report')
+        logging.exception('Error while generating retrospective report')
 
 
 def dump_audio_diagnostics(file_path=None):
diff --git a/client/cros/audio/audio_quality_measurement_unittest.py b/client/cros/audio/audio_quality_measurement_unittest.py
index 681a3f3..5e32fda 100755
--- a/client/cros/audio/audio_quality_measurement_unittest.py
+++ b/client/cros/audio/audio_quality_measurement_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/cros/audio/audio_spec.py b/client/cros/audio/audio_spec.py
index 5b41d36..ea41ee6 100644
--- a/client/cros/audio/audio_spec.py
+++ b/client/cros/audio/audio_spec.py
@@ -69,8 +69,8 @@
     return board_name
 
 BOARDS_WITH_HOTWORDING = [
-        'atlas', 'coral', 'eve', 'kevin', 'nami', 'nocturne', 'pyro', 'rammus',
-        'samus'
+        'atlas', 'coral', 'eve', 'kevin', 'nami', 'nocturne', 'rammus',
+        'samus', 'volteer'
 ]
 
 
@@ -95,16 +95,21 @@
 
     """
     board_name = strip_kernelnext_suffix(board_name)
-    return board_name in ['nocturne', 'atlas']
+    return board_name in ['nocturne', 'atlas', 'volteer']
 
 
 BoardInfo = collections.namedtuple('BoardInfo', ['board', 'model', 'sku'])
 
+BOARDS_WITH_FOUR_INTERNAL_SPEAKERS = [
+        BoardInfo('strongbad', 'homestar', ''),
+]
+
 BOARDS_WITH_TWO_INTERNAL_MICS = [
         BoardInfo('coral', 'babytiger', ''),
         BoardInfo('coral', 'nasher360', ''),
         BoardInfo('coral', 'rabbid', ''),
         BoardInfo('coral', 'robo360', ''),
+        BoardInfo('dedede', 'boten', ''),
         BoardInfo('grunt', 'treeya360', '175'),
         BoardInfo('hatch', 'kohaku', ''),
         BoardInfo('octopus', 'ampton', ''),
@@ -120,14 +125,39 @@
         BoardInfo('snappy', 'snappy', '8'),
         BoardInfo('zork', 'dalboz', ''),
         BoardInfo('zork', 'ezkinil', ''),
-        BoardInfo('zork', 'morphius', ''),
+        # b/232791346 clarifies zork-morpheus SKU 1510014998
+        # has a a front mic ONLY. Other SKUs have both UFC and WFC
+        # for which the following line will be valid.
+        #BoardInfo('zork', 'morphius', ''),
         BoardInfo('zork', 'vilboz360', '1518534658'),
         BoardInfo('zork', 'vilboz360', '1518534660'),
         BoardInfo('zork', 'vilboz360', '1518534661'),
         BoardInfo('zork', 'vilboz360', '1518534662'),
+        BoardInfo('keeby', 'lalala', ''),
+        BoardInfo('dedede', 'drawcia', ''),
 ]
 
 
+def get_internal_speaker_channel_count(board_type, board, model, sku):
+    """Gets the channel count of internal speakers.
+    @param board_type: board type string. E.g. CHROMEBOX, CHROMEBIT, and etc.
+    @param board: board name of the DUT.
+    @param model: model name of the DUT.
+    @param sku: sku number string of the DUT.
+
+    @returns: The channel count of internal speakers.
+
+    """
+    if not has_internal_speaker(board_type, board):
+        return 0
+
+    for b in BOARDS_WITH_FOUR_INTERNAL_SPEAKERS:
+        if b.board == board and b.model == model:
+            if b.sku == '' or b.sku == sku:
+                return 4
+    return 2
+
+
 def get_num_internal_microphone(board_type, board, model, sku):
     """Gets the number of internal microphones.
 
diff --git a/client/cros/audio/audio_test_data.py b/client/cros/audio/audio_test_data.py
index 257e6b6..1fc19be 100644
--- a/client/cros/audio/audio_test_data.py
+++ b/client/cros/audio/audio_test_data.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/cros/audio/check_quality.py b/client/cros/audio/check_quality.py
index bdd5b34..f50f581 100755
--- a/client/cros/audio/check_quality.py
+++ b/client/cros/audio/check_quality.py
@@ -1,5 +1,5 @@
-#!/usr/bin/python2
-
+#!/usr/bin/env python3
+# Lint as: python2, python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -443,7 +443,7 @@
         rate = wavefile.rate
     elif args.filename.endswith('.raw'):
         binary = None
-        with open(args.filename, 'r') as f:
+        with open(args.filename, 'rb') as f:
             binary = f.read()
 
         raw_data = audio_data.AudioRawData(
diff --git a/client/cros/audio/cmd_utils.py b/client/cros/audio/cmd_utils.py
index 4a9e442..d175a9e 100644
--- a/client/cros/audio/cmd_utils.py
+++ b/client/cros/audio/cmd_utils.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -88,7 +88,7 @@
             self._loggers.remove(logger)
             return
 
-        for line in data.split('\n'):
+        for line in data.split(b'\n'):
             logging.log(logger._level, '%s%s', logger._prefix, line)
 
 
@@ -100,7 +100,7 @@
         """
         logger = _PipeLogger(level=level, prefix=prefix)
         self._loggers.append(logger)
-        os.write(self._pipe[1], '\0')
+        os.write(self._pipe[1], b'\0')
         return _LoggerProxy(logger)
 
 
diff --git a/client/cros/audio/cras_dbus_utils.py b/client/cros/audio/cras_dbus_utils.py
index 2279a50..cab7ddb 100644
--- a/client/cros/audio/cras_dbus_utils.py
+++ b/client/cros/audio/cras_dbus_utils.py
@@ -9,6 +9,12 @@
 import multiprocessing
 import pprint
 
+# AU tests use ToT client code, but ToT -3 client version.
+try:
+    from gi.repository import GObject
+except ImportError:
+    import gobject as GObject
+
 from autotest_lib.client.cros.audio import cras_utils
 
 
@@ -27,25 +33,6 @@
         raise
     dbus.mainloop.glib.DBusGMainLoop(set_as_default=True)
 
-
-def _get_gobject():
-    """Tries to import gobject.
-
-    @returns: The imported gobject module.
-
-    @raises: ImportError if gobject can not be imported.
-
-    """
-    try:
-        import gobject
-    except ImportError as e:
-        logging.exception(
-                'Can not import gobject: %s. This method should only be '
-                'called on Cros device.', e)
-        raise
-    return gobject
-
-
 class CrasDBusMonitorError(Exception):
     """Error in CrasDBusMonitor."""
     pass
@@ -58,7 +45,7 @@
         # Acquires a new Cras interface through a new dbus.SystemBus instance
         # which has default main loop.
         self._iface = cras_utils.get_cras_control_interface(private=True)
-        self._loop = _get_gobject().MainLoop()
+        self._loop = GObject.MainLoop()
         self._count = 0
 
 
@@ -82,7 +69,7 @@
         self._target_signal_count = target_signal_count
         signal_match = self._iface.connect_to_signal(
                 'NodesChanged', self._nodes_changed_handler)
-        _get_gobject().timeout_add(
+        GObject.timeout_add(
                 timeout_secs * 1000, self._timeout_quit_main_loop)
 
         # Blocks here until _nodes_changed_handler or _timeout_quit_main_loop
@@ -207,7 +194,7 @@
 
         signal_match = self._iface.connect_to_signal(
                 self._signal_name, self._signal_handler)
-        _get_gobject().timeout_add(
+        GObject.timeout_add(
                  int(self._CHECK_QUIT_PERIOD_SECS * 1000),
                  self._check_quit_main_loop)
 
diff --git a/client/cros/audio/cras_utils.py b/client/cros/audio/cras_utils.py
index f065e44..5929cc5 100644
--- a/client/cros/audio/cras_utils.py
+++ b/client/cros/audio/cras_utils.py
@@ -20,6 +20,30 @@
     pass
 
 
+def dump_audio_thread():
+    """Dumps audio thread info.
+
+    @returns: A list of cras audio information.
+    """
+    proc = subprocess.Popen([_CRAS_TEST_CLIENT, '--dump_a'],
+                            stdout=subprocess.PIPE)
+
+    output, err = proc.communicate()
+    if err:
+        raise CrasUtilsError(err)
+    return output.decode().splitlines()
+
+
+def get_audio_thread_summary():
+    """Gets stream summary info.
+
+    @returns: A list of stream summary information.
+    """
+
+    lines = dump_audio_thread()
+    return [l for l in lines if l.startswith('Summary:')]
+
+
 def playback(blocking=True, stdin=None, *args, **kargs):
     """A helper function to execute the playback_cmd.
 
@@ -319,10 +343,11 @@
 # Cras node types reported from Cras DBus control API.
 CRAS_OUTPUT_NODE_TYPES = ['HEADPHONE', 'INTERNAL_SPEAKER', 'HDMI', 'USB',
                           'BLUETOOTH', 'LINEOUT', 'UNKNOWN', 'ALSA_LOOPBACK']
-CRAS_INPUT_NODE_TYPES = ['MIC', 'INTERNAL_MIC', 'USB', 'BLUETOOTH',
-                         'POST_DSP_LOOPBACK', 'POST_MIX_LOOPBACK', 'UNKNOWN',
-                         'KEYBOARD_MIC', 'HOTWORD', 'FRONT_MIC', 'REAR_MIC',
-                         'ECHO_REFERENCE']
+CRAS_INPUT_NODE_TYPES = [
+        'MIC', 'INTERNAL_MIC', 'USB', 'BLUETOOTH', 'POST_DSP_DELAYED_LOOPBACK',
+        'POST_DSP_LOOPBACK', 'POST_MIX_LOOPBACK', 'UNKNOWN', 'KEYBOARD_MIC',
+        'HOTWORD', 'FRONT_MIC', 'REAR_MIC', 'ECHO_REFERENCE'
+]
 CRAS_NODE_TYPES = CRAS_OUTPUT_NODE_TYPES + CRAS_INPUT_NODE_TYPES
 
 
@@ -677,6 +702,38 @@
     raise CrasUtilsError('Cannot find active output node.')
 
 
+def get_noise_cancellation_supported():
+    """Gets whether the device supports Noise Cancellation.
+
+    @returns: True is supported; False otherwise.
+    """
+    return bool(get_cras_control_interface().IsNoiseCancellationSupported())
+
+
+def set_bypass_block_noise_cancellation(bypass):
+    """Sets CRAS to bypass the blocking logic of Noise Cancellation.
+
+    @param bypass: True for bypass; False for un-bypass.
+    """
+    get_cras_control_interface().SetBypassBlockNoiseCancellation(bypass)
+
+
+def set_noise_cancellation_enabled(enabled):
+    """Sets the state to enable or disable Noise Cancellation.
+
+    @param enabled: True to enable; False to disable.
+    """
+    get_cras_control_interface().SetNoiseCancellationEnabled(enabled)
+
+
+def set_floss_enabled(enabled):
+    """Sets whether CRAS stack expects to use Floss.
+
+    @param enabled: True for Floss, False for Bluez.
+    """
+    get_cras_control_interface().SetFlossEnabled(enabled)
+
+
 class CrasTestClient(object):
     """An object to perform cras_test_client functions."""
 
@@ -745,7 +802,7 @@
                     sleep_interval=0.5,
                     desc='Waiting for subprocess to terminate')
         except Exception:
-            logging.warn('Killing subprocess due to timeout')
+            logging.warning('Killing subprocess due to timeout')
             proc.kill()
             proc.wait()
 
@@ -952,6 +1009,19 @@
         return True
 
 
+    def _encode_length_for_dbus(self, length):
+        """Encode length as Int64 for |SetPlayerMetadata|."""
+        try:
+            import dbus
+        except ImportError as e:
+            logging.exception(
+                    'Can not import dbus: %s. This method should only be '
+                    'called on Cros device.', e)
+            raise
+
+        length_variant = dbus.types.Int64(length, variant_level=1)
+        return dbus.Dictionary({'length': length_variant}, signature='sv')
+
     def set_player_length(self, length):
         """Set metadata length for the registered media player.
 
@@ -960,11 +1030,12 @@
         be int32 by default. Separate it from the metadata function to help
         prepare the data differently.
 
-        @param metadata: DBUS dictionary that contains a variant of int64.
+        @param length: Integer value that will be encoded for dbus.
 
         """
         try:
-            get_cras_control_interface().SetPlayerMetadata(length)
+            length_dbus = self._encode_length_for_dbus(length)
+            get_cras_control_interface().SetPlayerMetadata(length_dbus)
         except Exception as e:
             logging.error('Failed to set player length: %s', e)
             return False
diff --git a/client/cros/audio/pack_audio_quality.py b/client/cros/audio/pack_audio_quality.py
index ba0cafe..67ba711 100755
--- a/client/cros/audio/pack_audio_quality.py
+++ b/client/cros/audio/pack_audio_quality.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/client/cros/audio/sox_utils.py b/client/cros/audio/sox_utils.py
index bb0d3e4..ee5edd9 100644
--- a/client/cros/audio/sox_utils.py
+++ b/client/cros/audio/sox_utils.py
@@ -207,7 +207,7 @@
     stat = _SOX_STAT()
 
     for line in stat_output.splitlines():
-        match = _RE_STAT_LINE.match(line)
+        match = _RE_STAT_LINE.match(line.decode('utf-8'))
         if not match:
             continue
         key, value = (_remove_redundant_spaces(x) for x in match.groups())
@@ -303,8 +303,11 @@
     cmd_utils.execute(sox_cmd)
 
 
-def trim_silence_from_wav_file(path_src, path_dst, new_duration, volume=1,
-                               duration_threshold=0):
+def trim_silence_from_wav_file(path_src,
+                               path_dst,
+                               new_duration,
+                               volume=1,
+                               duration_threshold=0.1):
     """Trim silence from beginning of a file.
 
     Trim silence from beginning of file, and trim remaining audio to
@@ -317,7 +320,7 @@
                    which sox will consider silence, defaults to 1 (1%).
     @param duration_threshold: [Optional] A float of the duration in seconds of
                                sound above volume parameter required to consider
-                               end of silence. Defaults to 0 (0 seconds).
+                               end of silence. Defaults to 0.1 (0.1 seconds).
     """
     mins, secs = divmod(new_duration, 60)
     hrs, mins = divmod(mins, 60)
@@ -331,6 +334,61 @@
     cmd_utils.execute(sox_cmd)
 
 
+def mix_two_wav_files(path_src1, path_src2, path_dst, input_volume=None):
+    """Generate the mixed WAV file from two input WAV files.
+
+    Use "man sox" for more details on the mixing.
+
+    @param path_src1: Path to the first source.
+    @param path_src2: Path to the second source.
+    @param path_dst: Path for the generated mixed file.
+    @param input_volume: The volume (0.0~1.0) of input sources on mixing. If not
+                         given, the default value for sox is 1 / (# of sources).
+    """
+    sox_cmd = [SOX_PATH]
+    sox_cmd += ['--combine', 'mix']
+
+    if isinstance(input_volume, (int, float)):
+        input_volume = min(1.0, max(0.0, input_volume))
+        sox_cmd += ['-v', '{:.3f}'.format(input_volume)]
+
+    sox_cmd += [path_src1, path_src2, path_dst]
+
+    cmd_utils.execute(sox_cmd)
+
+
+def get_infos_from_wav_file(file_path):
+    """Get the information set from the header of the input WAV file.
+
+    It returns None if the input file is not WAV format.
+
+    @param file_path: Path to the WAV file.
+
+    @returns: A dict with the following elements:
+        'duration': The length of the audio (in seconds).
+        'channels': The number of channels.
+        'bits': The number of bits of each sample.
+        'rate': The sampling rate.
+    """
+    sox_cmd = [SOX_PATH]
+    sox_cmd += ['--i', None, file_path]  # sox_cmd[2] is placeholder
+
+    def _execute_sox_cmd_info(info_arg):
+        sox_cmd_info = sox_cmd[:2] + [info_arg] + sox_cmd[3:]
+        return cmd_utils.execute(
+                sox_cmd_info, stdout=subprocess.PIPE).decode('utf-8').strip()
+
+    format_output = _execute_sox_cmd_info('-t')
+    if format_output != 'wav':
+        logging.error('the input file format: %s', format_output)
+        return None
+
+    return dict(duration=float(_execute_sox_cmd_info('-D')),
+                channels=int(_execute_sox_cmd_info('-c')),
+                bits=int(_execute_sox_cmd_info('-b')),
+                rate=int(_execute_sox_cmd_info('-r')))
+
+
 def get_file_length(file_path, channels, bits, rate):
     """Get the length in seconds of an audio file.
 
diff --git a/client/cros/audio/visqol_utils.py b/client/cros/audio/visqol_utils.py
new file mode 100644
index 0000000..4d2b507
--- /dev/null
+++ b/client/cros/audio/visqol_utils.py
@@ -0,0 +1,96 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+import subprocess
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.cros.bluetooth.bluetooth_audio_test_data import (
+        VISQOL_PATH, VISQOL_SIMILARITY_MODEL)
+
+
+def parse_visqol_output(stdout, stderr, log_dir):
+    """
+    Parses stdout and stderr string from VISQOL output and parse into
+    a float score.
+
+    On error, stderr will contain the error message, otherwise will be None.
+    On success, stdout will be a string, first line will be
+    VISQOL version, followed by indication of speech mode. Followed by
+    paths to reference and degraded file, and a float MOS-LQO score, which
+    is what we're interested in. Followed by more detailed charts about
+    specific scoring by segments of the files. Stdout is None on error.
+
+    @param stdout: The stdout bytes from commandline output of VISQOL.
+    @param stderr: The stderr bytes from commandline output of VISQOL.
+    @param log_dir: Directory path for storing VISQOL log.
+
+    @returns: A tuple of a float score and string representation of the
+            srderr or None if there was no error.
+    """
+    stdout = '' if stdout is None else stdout.decode('utf-8')
+    stderr = '' if stderr is None else stderr.decode('utf-8')
+
+    # Log verbose VISQOL output:
+    log_file = os.path.join(log_dir, 'VISQOL_LOG.txt')
+    with open(log_file, 'a+') as f:
+        f.write('String Error:\n{}\n'.format(stderr))
+        f.write('String Out:\n{}\n'.format(stdout))
+
+    # pattern matches first float or int after 'MOS-LQO:' in stdout,
+    # e.g. it would match the line 'MOS-LQO       2.3' in the stdout
+    score_pattern = re.compile(r'.*MOS-LQO:\s*(\d+.?\d*)')
+    score_search = re.search(score_pattern, stdout)
+
+    # re.search returns None if no pattern match found, otherwise the score
+    # would be in the match object's group 1 matches just the float score
+    score = float(score_search.group(1)) if score_search else -1.0
+    return stderr, score
+
+
+def get_visqol_score(ref_file,
+                     deg_file,
+                     log_dir,
+                     speech_mode=True,
+                     verbose=True):
+    """
+    Runs VISQOL using the subprocess library on the provided reference file
+    and degraded file and returns the VISQOL score.
+
+    Notes that the difference between the duration of reference and degraded
+    audio must be smaller than 1.0 second.
+
+    @param ref_file: File path to the reference wav file.
+    @param deg_file: File path to the degraded wav file.
+    @param log_dir: Directory path for storing VISQOL log.
+    @param speech_mode: [Optional] Defaults to True, accepts 16k sample
+            rate files and ignores frequencies > 8kHz for scoring.
+    @param verbose: [Optional] Defaults to True, outputs more details.
+
+    @returns: A float score for the tested file.
+    """
+    visqol_cmd = [VISQOL_PATH]
+    visqol_cmd += ['--reference_file', ref_file]
+    visqol_cmd += ['--degraded_file', deg_file]
+    visqol_cmd += ['--similarity_to_quality_model', VISQOL_SIMILARITY_MODEL]
+
+    if speech_mode:
+        visqol_cmd.append('--use_speech_mode')
+    if verbose:
+        visqol_cmd.append('--verbose')
+
+    visqol_process = subprocess.Popen(visqol_cmd,
+                                      stdout=subprocess.PIPE,
+                                      stderr=subprocess.PIPE)
+    stdout, stderr = visqol_process.communicate()
+
+    err, score = parse_visqol_output(stdout, stderr, log_dir)
+
+    if err:
+        raise error.TestError(err)
+    elif score < 0.0:
+        raise error.TestError('Failed to parse score, got {}'.format(score))
+
+    return score
diff --git a/client/cros/bluetooth/OWNERS b/client/cros/bluetooth/OWNERS
new file mode 100644
index 0000000..3c5c8a3
--- /dev/null
+++ b/client/cros/bluetooth/OWNERS
@@ -0,0 +1 @@
+include /BLUETOOTH_OWNERS
diff --git a/client/cros/bluetooth/adv_monitor_helper.py b/client/cros/bluetooth/adv_monitor_helper.py
index df92374..6fe6e08 100644
--- a/client/cros/bluetooth/adv_monitor_helper.py
+++ b/client/cros/bluetooth/adv_monitor_helper.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -7,7 +8,11 @@
 import dbus
 import dbus.mainloop.glib
 import dbus.service
-import gobject
+# AU tests use ToT client code, but ToT -3 client version.
+try:
+    from gi.repository import GObject
+except ImportError:
+    import gobject as GObject
 import logging
 
 from multiprocessing import Process, Pipe
@@ -34,6 +39,11 @@
 
     """
 
+    # Refer doc/advertisement-monitor-api.txt for more info about unset values.
+    UNSET_RSSI = 127
+    UNSET_TIMEOUT = 0
+    UNSET_SAMPLING_PERIOD = 256
+
     # Indexes of the Monitor object parameters in a monitor data list.
     MONITOR_TYPE = 0
     RSSI_FILTER = 1
@@ -44,6 +54,7 @@
     RSSI_H_TIMEOUT = 1
     RSSI_L_THRESH = 2
     RSSI_L_TIMEOUT = 3
+    SAMPLING_PERIOD = 4
 
     # Indexes of the Patterns filter parameters in a monitor data list.
     PATTERN_START_POS = 0
@@ -67,6 +78,8 @@
         self.events['DeviceFound'] = 0
         self.events['DeviceLost'] = 0
 
+        self.target_devices = []
+
         self._set_type(monitor_data[self.MONITOR_TYPE])
         self._set_rssi(monitor_data[self.RSSI_FILTER])
         self._set_patterns(monitor_data[self.PATTERNS])
@@ -91,8 +104,16 @@
         """
         properties = dict()
         properties['Type'] = dbus.String(self.monitor_type)
-        properties['RSSIThresholdsAndTimers'] = dbus.Struct(self.rssi,
-                                                            signature='nqnq')
+        if self.rssi_h_thresh != self.UNSET_RSSI:
+            properties['RSSIHighThreshold'] = dbus.Int16(self.rssi_h_thresh)
+        if self.rssi_h_timeout != self.UNSET_TIMEOUT:
+            properties['RSSIHighTimeout'] = dbus.UInt16(self.rssi_h_timeout)
+        if self.rssi_l_thresh != self.UNSET_RSSI:
+            properties['RSSILowThreshold'] = dbus.Int16(self.rssi_l_thresh)
+        if self.rssi_l_timeout != self.UNSET_TIMEOUT:
+            properties['RSSILowTimeout'] = dbus.UInt16(self.rssi_l_timeout)
+        if self.sampling_period != self.UNSET_SAMPLING_PERIOD:
+            properties['RSSISamplingPeriod'] = dbus.UInt16(self.sampling_period)
         properties['Patterns'] = dbus.Array(self.patterns, signature='(yyay)')
         return {ADV_MONITOR_IFACE: properties}
 
@@ -112,11 +133,11 @@
         @param rssi: the list of rssi threshold and timeout values.
 
         """
-        h_thresh = dbus.Int16(rssi[self.RSSI_H_THRESH])
-        h_timeout = dbus.UInt16(rssi[self.RSSI_H_TIMEOUT])
-        l_thresh = dbus.Int16(rssi[self.RSSI_L_THRESH])
-        l_timeout = dbus.UInt16(rssi[self.RSSI_L_TIMEOUT])
-        self.rssi = (h_thresh, h_timeout, l_thresh, l_timeout)
+        self.rssi_h_thresh = rssi[self.RSSI_H_THRESH]
+        self.rssi_h_timeout = rssi[self.RSSI_H_TIMEOUT]
+        self.rssi_l_thresh = rssi[self.RSSI_L_THRESH]
+        self.rssi_l_timeout = rssi[self.RSSI_L_TIMEOUT]
+        self.sampling_period = rssi[self.SAMPLING_PERIOD]
 
 
     def _set_patterns(self, patterns):
@@ -190,6 +211,17 @@
         return False
 
 
+    def set_target_devices(self, devices):
+        """Set the target devices to the given monitor.
+
+        DeviceFound and DeviceLost will only be counted if it is triggered by a
+        target device.
+
+        @param devices: a list of devices in dbus object path
+
+        """
+        self.target_devices = devices
+
     @dbus.service.method(DBUS_PROP_IFACE,
                          in_signature='s',
                          out_signature='a{sv}')
@@ -238,7 +270,10 @@
 
         """
         logging.info('%s: %s Device Found!', self.path, device)
-        self._update_event_count('DeviceFound')
+        if device in self.target_devices:
+            self._update_event_count('DeviceFound')
+        else:
+            logging.debug('Found an uninteresting device: %s', device)
 
 
     @dbus.service.method(ADV_MONITOR_IFACE,
@@ -251,7 +286,10 @@
 
         """
         logging.info('%s: %s Device Lost!', self.path, device)
-        self._update_event_count('DeviceLost')
+        if device in self.target_devices:
+            self._update_event_count('DeviceLost')
+        else:
+            logging.debug('Lost an uninteresting device: %s', device)
 
 
 class AdvMonitorApp(dbus.service.Object):
@@ -328,7 +366,7 @@
 
         # Emit the InterfacesRemoved signal before removing the Monitor object.
         self.InterfacesRemoved(monitor.get_path(),
-                               monitor.get_properties().keys())
+                               list(monitor.get_properties().keys()))
 
         monitor.remove_monitor()
 
@@ -367,6 +405,23 @@
         return self.monitors[monitor_id].reset_event_count(event)
 
 
+    def set_target_devices(self, monitor_id, devices):
+        """Set the target devices to the given monitor.
+
+        DeviceFound and DeviceLost will only be counted if it is triggered by a
+        target device.
+
+        @param monitor_id: the monitor id.
+        @param devices: a list of devices in dbus object path
+
+        @returns: True on success, False otherwise.
+        """
+        if monitor_id not in self.monitors:
+            return False
+
+        self.monitors[monitor_id].set_target_devices(devices)
+        return True
+
     def _mainloop_thread(self):
         """Run the dbus mainloop thread.
 
@@ -515,6 +570,7 @@
     CMD_REMOVE_MONITOR = 7
     CMD_GET_EVENT_COUNT = 8
     CMD_RESET_EVENT_COUNT = 9
+    CMD_SET_TARGET_DEVICES = 10
 
     def __init__(self):
         """Construction of applications manager object."""
@@ -610,11 +666,11 @@
         @param app_id: the app id of this test app process.
 
         """
-        # Initialize threads in gobject/dbus-glib before creating local threads.
-        gobject.threads_init()
+        # Initialize threads in GObject/dbus-glib before creating local threads.
+        GObject.threads_init()
         dbus.mainloop.glib.threads_init()
 
-        # Arrange for the GLib main loop to be the default.
+        # Arrange for the GObject main loop to be the default.
         dbus.mainloop.glib.DBusGMainLoop(set_as_default=True)
 
         def get_advmon_mgr(bus):
@@ -630,7 +686,7 @@
             return None
 
         bus = dbus.SystemBus()
-        mainloop = gobject.MainLoop()
+        mainloop = GObject.MainLoop()
         advmon_mgr = get_advmon_mgr(bus)
 
         app = AdvMonitorApp(bus, mainloop, advmon_mgr, app_id)
@@ -662,6 +718,9 @@
             elif cmd == self.CMD_RESET_EVENT_COUNT:
                 ret = app.reset_event_count(*data)
 
+            elif cmd == self.CMD_SET_TARGET_DEVICES:
+                ret = app.set_target_devices(*data)
+
             helper_conn.send(ret)
 
 
@@ -822,6 +881,25 @@
                                     (monitor_id, event))
 
 
+    def set_target_devices(self, app_id, monitor_id, devices):
+        """Set the target devices to the given monitor.
+
+        DeviceFound and DeviceLost will only be counted if it is triggered by a
+        target device.
+
+        @param app_id: the app id.
+        @param monitor_id: the monitor id.
+        @param devices: a list of devices in dbus object path
+
+        @returns: True on success, False otherwise.
+        """
+        if app_id not in self.apps:
+            return False
+
+        self._send_to_helper(self.CMD_SET_TARGET_DEVICES, app_id,
+                             (monitor_id, devices))
+        return True
+
     def destroy(self):
         """Clean up the helper process and test app processes."""
 
diff --git a/client/cros/bluetooth/advertisement.py b/client/cros/bluetooth/advertisement.py
index c6a539e..39b081e 100755
--- a/client/cros/bluetooth/advertisement.py
+++ b/client/cros/bluetooth/advertisement.py
@@ -2,7 +2,6 @@
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Construction of an Advertisement object from an advertisement data
 dictionary.
 
@@ -13,30 +12,60 @@
 from __future__ import absolute_import
 from __future__ import division
 from __future__ import print_function
-import dbus
-import dbus.mainloop.glib
-import dbus.service
+from gi.repository import GLib
+
+# TODO(b/215715213) - Wait until ebuild runs as python3 to remove this try
+try:
+    import pydbus
+except:
+    pydbus = {}
+
 import logging
 
-
-DBUS_PROP_IFACE = 'org.freedesktop.DBus.Properties'
 LE_ADVERTISEMENT_IFACE = 'org.bluez.LEAdvertisement1'
 
 
-class Advertisement(dbus.service.Object):
-    """An advertisement object."""
+def InvalidArgsException():
+    return GLib.gerror_new_literal(0, 'org.freedesktop.DBus.Error.InvalidArgs',
+                                   0)
 
+
+class Advertisement:
+    """An advertisement object."""
     def __init__(self, bus, advertisement_data):
         """Construction of an Advertisement object.
 
         @param bus: a dbus system bus.
         @param advertisement_data: advertisement data dictionary.
-
         """
-        self.bus = bus
         self._get_advertising_data(advertisement_data)
-        super(Advertisement, self).__init__(self.bus, self.path)
 
+        # Register self on bus and hold object for unregister
+        self.obj = bus.register_object(self.path, self, None)
+
+    # D-Bus service definition (required by pydbus).
+    dbus = """
+    <node>
+        <interface name="org.bluez.LEAdvertisement1">
+            <method name="Release" />
+        </interface>
+        <interface name="org.freedesktop.DBus.Properties">
+            <method name="Set">
+                <arg type="s" name="interface" direction="in" />
+                <arg type="s" name="prop" direction="in" />
+                <arg type="v" name="value" direction="in" />
+            </method>
+            <method name="GetAll">
+                <arg type="s" name="interface" direction="in" />
+                <arg type="a{sv}" name="properties" direction="out" />
+            </method>
+        </interface>
+    </node>
+    """
+
+    def unregister(self):
+        """Unregister self from bus."""
+        self.obj.unregister()
 
     def _get_advertising_data(self, advertisement_data):
         """Get advertising data from the advertisement_data dictionary.
@@ -63,32 +92,47 @@
         #     device_properties in src/third_party/bluez/src/device.c
         # For explanation about signature types, refer to
         #     https://dbus.freedesktop.org/doc/dbus-specification.html
-        self.manufacturer_data = dbus.Dictionary({}, signature='qv')
+        self.manufacturer_data = {}  # Signature = a{qv}
         manufacturer_data = advertisement_data.get('ManufacturerData', {})
         for key, value in manufacturer_data.items():
-            self.manufacturer_data[int(key, 16)] = dbus.Array(value,
-                                                              signature='y')
+            self.manufacturer_data[int(key, 16)] = GLib.Variant('ay', value)
 
-        self.service_data = dbus.Dictionary({}, signature='sv')
+        self.service_data = {}  # Signature = a{sv}
         service_data = advertisement_data.get('ServiceData', {})
         for uuid, data in service_data.items():
-            self.service_data[uuid] = dbus.Array(data, signature='y')
+            self.service_data[uuid] = GLib.Variant('ay', data)
 
         self.include_tx_power = advertisement_data.get('IncludeTxPower')
 
+        self.discoverable = advertisement_data.get('Discoverable')
+
         self.scan_response = advertisement_data.get('ScanResponseData')
 
+        self.min_interval = advertisement_data.get('MinInterval')
+        self.max_interval = advertisement_data.get('MaxInterval')
+
+        self.tx_power = advertisement_data.get('TxPower')
+
     def get_path(self):
         """Get the dbus object path of the advertisement.
 
         @returns: the advertisement object path.
 
         """
-        return dbus.ObjectPath(self.path)
+        return self.path
 
+    def Set(self, interface, prop, value):
+        """Called when bluetoothd Sets a property on our advertising object
 
-    @dbus.service.method(DBUS_PROP_IFACE, in_signature='s',
-                         out_signature='a{sv}')
+        @param interface: String interface, i.e. org.bluez.LEAdvertisement1
+        @param prop: String name of the property being set
+        @param value: Value of the property being set
+        """
+        logging.info('Setting prop {} value to {}'.format(prop, value))
+
+        if prop == 'TxPower':
+            self.tx_power = value
+
     def GetAll(self, interface):
         """Get the properties dictionary of the advertisement.
 
@@ -101,82 +145,93 @@
             raise InvalidArgsException()
 
         properties = dict()
-        properties['Type'] = dbus.String(self.type)
+        properties['Type'] = GLib.Variant('s', self.type)
 
         if self.service_uuids is not None:
-            properties['ServiceUUIDs'] = dbus.Array(self.service_uuids,
-                                                    signature='s')
+            properties['ServiceUUIDs'] = GLib.Variant('as', self.service_uuids)
         if self.solicit_uuids is not None:
-            properties['SolicitUUIDs'] = dbus.Array(self.solicit_uuids,
-                                                    signature='s')
+            properties['SolicitUUIDs'] = GLib.Variant('as', self.solicit_uuids)
         if self.manufacturer_data is not None:
-            properties['ManufacturerData'] = dbus.Dictionary(
-                self.manufacturer_data, signature='qv')
+            properties['ManufacturerData'] = GLib.Variant(
+                    'a{qv}', self.manufacturer_data)
 
         if self.service_data is not None:
-            properties['ServiceData'] = dbus.Dictionary(self.service_data,
-                                                        signature='sv')
+            properties['ServiceData'] = GLib.Variant('a{sv}',
+                                                     self.service_data)
+        if self.discoverable is not None:
+            properties['Discoverable'] = GLib.Variant('b', self.discoverable)
+
         if self.include_tx_power is not None:
-            properties['IncludeTxPower'] = dbus.Boolean(self.include_tx_power)
+            properties['IncludeTxPower'] = GLib.Variant(
+                    'b', self.include_tx_power)
 
         # Note here: Scan response data is an int (tag) -> array (value) mapping
         # but autotest's xmlrpc server can only accept string keys. For this
         # reason, the scan response key is encoded as a hex string, and then
         # re-mapped here before the advertisement is registered.
         if self.scan_response is not None:
-            scan_rsp = dbus.Dictionary({}, signature='yv')
+            scan_rsp = {}
             for key, value in self.scan_response.items():
-                scan_rsp[int(key, 16)] = dbus.Array(value, signature='y')
+                scan_rsp[int(key, 16)] = GLib.Variant('ay', value)
 
-            properties['ScanResponseData'] = scan_rsp
+            properties['ScanResponseData'] = GLib.Variant('a{yv}', scan_rsp)
+
+        if self.min_interval is not None:
+            properties['MinInterval'] = GLib.Variant('u', self.min_interval)
+
+        if self.max_interval is not None:
+            properties['MaxInterval'] = GLib.Variant('u', self.max_interval)
+
+        if self.tx_power is not None:
+            properties['TxPower'] = GLib.Variant('n', self.tx_power)
 
         return properties
 
-
-    @dbus.service.method(LE_ADVERTISEMENT_IFACE, in_signature='',
-                         out_signature='')
     def Release(self):
         """The method callback at release."""
         logging.info('%s: Advertisement Release() called.', self.path)
 
 
-def example_advertisement():
+def example_advertisement(bus):
     """A demo example of creating an Advertisement object.
 
+    @param bus: a dbus system bus.
     @returns: the Advertisement object.
 
     """
     ADVERTISEMENT_DATA = {
-        'Path': '/org/bluez/test/advertisement1',
+            'Path': '/org/bluez/test/advertisement1',
 
-        # Could be 'central' or 'peripheral'.
-        'Type': 'peripheral',
+            # Could be 'central' or 'peripheral'.
+            'Type': 'peripheral',
 
-        # Refer to the specification for a list of service assgined numbers:
-        # https://www.bluetooth.com/specifications/gatt/services
-        # e.g., 180D represents "Heart Reate" service, and
-        #       180F "Battery Service".
-        'ServiceUUIDs': ['180D', '180F'],
+            # Refer to the specification for a list of service assigned numbers:
+            # https://www.bluetooth.com/specifications/gatt/services
+            # e.g., 180D represents "Heart Reate" service, and
+            #       180F "Battery Service".
+            'ServiceUUIDs': ['180D', '180F'],
 
-        # Service solicitation UUIDs.
-        'SolicitUUIDs': [],
+            # Service solicitation UUIDs.
+            'SolicitUUIDs': [],
 
-        # Two bytes of manufacturer id followed by manufacturer specific data.
-        'ManufacturerData': {'0xff00': [0xa1, 0xa2, 0xa3, 0xa4, 0xa5]},
+            # Two bytes of manufacturer id followed by manufacturer specific data.
+            'ManufacturerData': {
+                    '0xff00': [0xa1, 0xa2, 0xa3, 0xa4, 0xa5]
+            },
 
-        # service UUID followed by additional service data.
-        'ServiceData': {'9999': [0x10, 0x20, 0x30, 0x40, 0x50]},
+            # service UUID followed by additional service data.
+            'ServiceData': {
+                    '9999': [0x10, 0x20, 0x30, 0x40, 0x50]
+            },
 
-        # Does it include transmit power level?
-        'IncludeTxPower': True}
+            # Does it include transmit power level?
+            'IncludeTxPower': True
+    }
 
     return Advertisement(bus, ADVERTISEMENT_DATA)
 
 
 if __name__ == '__main__':
-    # It is required to set the mainloop before creating the system bus object.
-    dbus.mainloop.glib.DBusGMainLoop(set_as_default=True)
-    bus = dbus.SystemBus()
-
-    adv = example_advertisement()
+    bus = pydbus.SystemBus()
+    adv = example_advertisement(bus)
     print(adv.GetAll(LE_ADVERTISEMENT_IFACE))
diff --git a/client/cros/bluetooth/bluetooth_audio_test_data.py b/client/cros/bluetooth/bluetooth_audio_test_data.py
index c7b44ef..6924de9 100644
--- a/client/cros/bluetooth/bluetooth_audio_test_data.py
+++ b/client/cros/bluetooth/bluetooth_audio_test_data.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -13,6 +14,12 @@
 from autotest_lib.client.bin import utils
 
 
+# Chameleon device's data storing path.
+DEVICE_AUDIO_RECORD_DIR = '/tmp/audio'
+# Refer to TEST_DATA_DIR in the chameleon/deploy/deploy file.
+DEVICE_AUDIO_DATA_DIR = '/usr/share/autotest/audio-test-data'
+
+
 DIST_FILES = 'gs://chromeos-localmirror/distfiles'
 DOWNLOAD_TIMEOUT = 90 # timeout for gsutil downloads
 DATA_DIR = '/tmp'
@@ -27,9 +34,8 @@
 # There are several available models for VISQOL, since these VISQOL based tests
 # are primarily for voice quality, this model is more tuned for voice quality.
 # experimentally, the scores have been fairly similar to the default model
-# TODO b:169251326 terms below are set outside of this codebase
-# and should be updated when possible. ("master" -> "main")
-# 'libsvm_nu_svr_model.txt'. Details: github.com/google/visqol/tree/master/model
+# 'libsvm_nu_svr_model.txt'. Details:
+# github.com/google/visqol/tree/61cdced26b7a03098f0c78f7ab71c25dc2e461f5/model
 VISQOL_SIMILARITY_MODEL = os.path.join(
         VISQOL_FOLDER, 'visqol.runfiles', '__main__', 'model',
         'tcdvoip_nu.568_c5.31474325639_g3.17773760038_model.txt')
@@ -51,20 +57,16 @@
 
 
 A2DP = 'a2dp'
+A2DP_MEDIUM = 'a2dp_medium'
 A2DP_LONG = 'a2dp_long'
 AVRCP = 'avrcp'
 HFP_NBS = 'hfp_nbs'
+HFP_NBS_MEDIUM = 'hfp_nbs_medium'
 HFP_WBS = 'hfp_wbs'
+HFP_WBS_MEDIUM = 'hfp_wbs_medium'
 VISQOL_BUFFER_LENGTH = 10.0
 
 
-common_test_data = {
-    'bit_width': 16,
-    'format': 'S16_LE',
-    'duration': 5,
-}
-
-
 def download_file_from_bucket(dir, file_address, verify_download):
     """Extract tarball specified by tar_path to directory dir.
 
@@ -185,6 +187,21 @@
                                      'hfp_nbs_recorded_by_peer.wav'),
     'recorded_by_dut': os.path.join(AUDIO_RECORD_DIR,
                                     'hfp_nbs_recorded_by_dut.raw'),
+    'chunk_in_secs': 1,
+    'bit_width': 16,
+    'format': 'S16_LE',
+    'duration': 5,
+    'chunk_checking_duration': 5,
+
+    # Device side data used by StartPlayingAudioSubprocess function in
+    # bluetooth_audio.py.
+    'device_file': os.path.join(DEVICE_AUDIO_DATA_DIR,
+                                'sine_3500hz_rate8000_ch1_5secs.wav'),
+
+    # Device side data used by HandleOneChunk function in bluetooth_audio.py.
+    'chunk_file': os.path.join(DEVICE_AUDIO_RECORD_DIR,
+                               'hfp_nbs_recorded_by_peer_%d.raw'),
+
     'visqol_test_files': [
         {
             'file': os.path.join(AUDIO_TEST_DATA_DIR,
@@ -196,18 +213,24 @@
             'channels': 1,
             'rate': 8000,
             'duration': 26.112 + VISQOL_BUFFER_LENGTH,
+            'chunk_checking_duration': 26.112 + VISQOL_BUFFER_LENGTH,
             'bit_width': 16,
             'format': 'S16_LE',
             # convenient way to differentiate ViSQOL tests from regular tests
             'visqol_test': True,
             'encoding': 'signed-integer',
             'speech_mode': True,
-            # Passing scored are determined mostly experimentally, the DUT as
-            # sink direction has issues and so for now the score set low.
-            # Ideally both scores should be set to >= 4.0 in fully functioning
-            # scenario.
-            'sink_passing_score': 0.0,
-            'source_passing_score': 4.0,
+            # Passing scored are determined mostly experimentally.
+            # TODO(b/179501232) - NBS is currently not uniformly >= 4.0 on all
+            # devices so reduce the passing score.
+            'sink_passing_score': 3.5,
+            'source_passing_score': 3.5,
+            'reporting_type': 'voice-8k',
+
+            # Device side data used by StartPlayingAudioSubprocess function in
+            # bluetooth_audio.py.
+            'device_file': os.path.join(DEVICE_AUDIO_DATA_DIR,
+                                        'voice_8k.wav'),
         },
         {
             'file': os.path.join(AUDIO_TEST_DATA_DIR,
@@ -219,6 +242,7 @@
             'channels': 1,
             'rate': 8000,
             'duration': 5.0 + VISQOL_BUFFER_LENGTH,
+            'chunk_checking_duration': 5.0 + VISQOL_BUFFER_LENGTH,
             'bit_width': 16,
             'format': 'S16_LE',
             # convenient way to differentiate ViSQOL tests from regular tests
@@ -230,12 +254,19 @@
             # file because its a good for reference, makes it easy to see
             # degradation and verify that this is transmitting the frequency
             # range we would expect
+            # TODO(b/179501232) - NBS is currently not uniformly >= 2.0 on all
+            # devices so reduce the passing score.
             'sink_passing_score': 1.0,
-            'source_passing_score': 2.0,
+            'source_passing_score': 1.0,
+            'reporting_type': 'sine-3.5k',
+
+            # Device side data used by StartPlayingAudioSubprocess function in
+            # bluetooth_audio.py.
+            'device_file': os.path.join(DEVICE_AUDIO_DATA_DIR,
+                                        'sine_3500hz_rate8000_ch1_5secs.wav'),
         }
     ]
 }
-hfp_nbs_test_data.update(common_test_data)
 
 
 # Audio test data for hfp wide band speech
@@ -250,6 +281,21 @@
                                      'hfp_wbs_recorded_by_peer.wav'),
     'recorded_by_dut': os.path.join(AUDIO_RECORD_DIR,
                                     'hfp_wbs_recorded_by_dut.raw'),
+    'chunk_in_secs': 1,
+    'bit_width': 16,
+    'format': 'S16_LE',
+    'duration': 5,
+    'chunk_checking_duration': 5,
+
+    # Device side data used by StartPlayingAudioSubprocess function in
+    # bluetooth_audio.py.
+    'device_file': os.path.join(DEVICE_AUDIO_DATA_DIR,
+                                'sine_7000hz_rate16000_ch1_5secs.wav'),
+
+    # Device side data used by HandleOneChunk function in bluetooth_audio.py.
+    'chunk_file': os.path.join(DEVICE_AUDIO_RECORD_DIR,
+                               'hfp_wbs_recorded_by_peer_%d.raw'),
+
     'visqol_test_files': [
         {
             'file': os.path.join(AUDIO_TEST_DATA_DIR,
@@ -261,18 +307,22 @@
             'channels': 1,
             'rate': 16000,
             'duration': 26.112 + VISQOL_BUFFER_LENGTH,
+            'chunk_checking_duration': 26.112 + VISQOL_BUFFER_LENGTH,
             'bit_width': 16,
             'format': 'S16_LE',
             # convenient way to differentiate ViSQOL tests from regular tests
             'visqol_test': True,
             'encoding': 'signed-integer',
             'speech_mode': True,
-            # Passing scored are determined mostly experimentally, the DUT as
-            # sink direction has issues and so for now the score set low.
-            # Ideally both scores should be set to >= 4.0 in fully functioning
-            # scenario.
-            'sink_passing_score': 0.0,
+            # Passing scored are determined mostly experimentally.
+            'sink_passing_score': 4.0,
             'source_passing_score': 4.0,
+            'reporting_type': 'voice-16k',
+
+            # Device side data used by StartPlayingAudioSubprocess function in
+            # bluetooth_audio.py.
+            'device_file': os.path.join(DEVICE_AUDIO_DATA_DIR,
+                                        'voice.wav'),
         },
         {
             'file': os.path.join(AUDIO_TEST_DATA_DIR,
@@ -284,22 +334,78 @@
             'channels': 1,
             'rate': 16000,
             'duration': 5.0 + VISQOL_BUFFER_LENGTH,
+            'chunk_checking_duration': 5.0 + VISQOL_BUFFER_LENGTH,
             'bit_width': 16,
             'format': 'S16_LE',
             # convenient way to differentiate ViSQOL tests from regular tests
             'visqol_test': True,
             'encoding': 'signed-integer',
             'speech_mode': True,
-            # Passing scored are determined mostly experimentally, the DUT as
-            # sink direction has issues and so for now the score set low.
-            # Ideally both scores should be set to >= 4.0 in fully functioning
-            # scenario.
-            'sink_passing_score': 0.0,
+            # Passing scored are determined mostly experimentally.
+            'sink_passing_score': 4.0,
             'source_passing_score': 4.0,
+            'reporting_type': 'sine-7k',
+
+            # Device side data used by StartPlayingAudioSubprocess function in
+            # bluetooth_audio.py.
+            'device_file': os.path.join(DEVICE_AUDIO_DATA_DIR,
+                                        'sine_7000hz_rate16000_ch1_5secs.wav'),
         }
     ]
 }
-hfp_wbs_test_data.update(common_test_data)
+
+# Audio test data for hfp nbs medium test.
+hfp_nbs_medium_test_data = {
+    'rate': 8000,
+    'channels': 1,
+    'frequencies': (3500,),
+    'file': os.path.join(AUDIO_TEST_DIR,
+                         'sine_3500hz_rate8000_ch1_60secs.raw'),
+    'recorded_by_peer': os.path.join(AUDIO_RECORD_DIR,
+                                     'hfp_nbs_medium_recorded_by_peer.raw'),
+    'recorded_by_dut': os.path.join(AUDIO_RECORD_DIR,
+                                    'hfp_nbs_medium_recorded_by_dut.raw'),
+    'chunk_in_secs': 1,
+    'bit_width': 16,
+    'format': 'S16_LE',
+    'duration': 60,
+    'chunk_checking_duration': 5,
+
+    # Device side data used by StartPlayingAudioSubprocess function in
+    # bluetooth_audio.py.
+    'device_file': os.path.join(DEVICE_AUDIO_DATA_DIR,
+                                'sine_3500hz_rate8000_ch1_60secs.wav'),
+    # Device side data used by HandleOneChunk function in bluetooth_audio.py.
+    'chunk_file': os.path.join(DEVICE_AUDIO_RECORD_DIR,
+                               'hfp_nbs_medium_recorded_by_peer_%d.raw'),
+}
+
+
+# Audio test data for hfp wbs medium test.
+hfp_wbs_medium_test_data = {
+    'rate': 16000,
+    'channels': 1,
+    'frequencies': (7000,),
+    'file': os.path.join(AUDIO_TEST_DIR,
+                         'sine_7000hz_rate16000_ch1_60secs.raw'),
+    'recorded_by_peer': os.path.join(AUDIO_RECORD_DIR,
+                                     'hfp_wbs_medium_recorded_by_peer.raw'),
+    'recorded_by_dut': os.path.join(AUDIO_RECORD_DIR,
+                                    'hfp_wbs_medium_recorded_by_dut.raw'),
+    'chunk_in_secs': 1,
+    'bit_width': 16,
+    'format': 'S16_LE',
+    'duration': 60,
+    'chunk_checking_duration': 5,
+
+    # Device side data used by StartPlayingAudioSubprocess function in
+    # bluetooth_audio.py.
+    'device_file': os.path.join(DEVICE_AUDIO_DATA_DIR,
+                                'sine_7000hz_rate16000_ch1_60secs.wav'),
+    # Device side data used by HandleOneChunk function in bluetooth_audio.py.
+    'chunk_file': os.path.join(DEVICE_AUDIO_RECORD_DIR,
+                               'hfp_wbs_medium_recorded_by_peer_%d.raw'),
+}
 
 
 # Audio test data for a2dp
@@ -312,8 +418,14 @@
     'recorded_by_peer': os.path.join(AUDIO_RECORD_DIR,
                                      'a2dp_recorded_by_peer.raw'),
     'chunk_in_secs': 5,
+    'bit_width': 16,
+    'format': 'S16_LE',
+    'duration': 5,
+
+    # Device side data used by HandleOneChunk function in bluetooth_audio.py.
+    'chunk_file': os.path.join(DEVICE_AUDIO_RECORD_DIR,
+                               'a2dp_recorded_by_peer_%d.raw'),
 }
-a2dp_test_data.update(common_test_data)
 
 
 # Audio test data for a2dp long test. The file and duration attributes
@@ -324,12 +436,32 @@
                                      'a2dp_long_recorded_by_peer.raw'),
     'duration': 0,       # determined at run time
     'chunk_in_secs': 1,
+    # Device side data used by HandleOneChunk function in bluetooth_audio.py.
+    'chunk_file': os.path.join(DEVICE_AUDIO_RECORD_DIR,
+                               'a2dp_long_recorded_by_peer_%d.raw'),
+})
+
+
+# Audio test data for a2dp medium test.
+a2dp_medium_test_data = a2dp_test_data.copy()
+a2dp_medium_test_data.update({
+    'recorded_by_peer': os.path.join(AUDIO_RECORD_DIR,
+                                     'a2dp_medium_recorded_by_peer.raw'),
+    'duration': 60,
+    'chunk_in_secs': 1,
+    'chunk_checking_duration': 5,
+    # Device side data used by HandleOneChunk function in bluetooth_audio.py.
+    'chunk_file': os.path.join(DEVICE_AUDIO_RECORD_DIR,
+                               'a2dp_medium_recorded_by_peer_%d.raw'),
 })
 
 
 audio_test_data = {
     A2DP: a2dp_test_data,
+    A2DP_MEDIUM: a2dp_medium_test_data,
     A2DP_LONG: a2dp_long_test_data,
     HFP_WBS: hfp_wbs_test_data,
+    HFP_WBS_MEDIUM: hfp_wbs_medium_test_data,
     HFP_NBS: hfp_nbs_test_data,
+    HFP_NBS_MEDIUM: hfp_nbs_medium_test_data,
 }
diff --git a/client/cros/bluetooth/bluetooth_device_xmlrpc_server.py b/client/cros/bluetooth/bluetooth_device_xmlrpc_server.py
index 44f46f6..f74df9f 100755
--- a/client/cros/bluetooth/bluetooth_device_xmlrpc_server.py
+++ b/client/cros/bluetooth/bluetooth_device_xmlrpc_server.py
@@ -1,5 +1,5 @@
-#!/usr/bin/env python2
-
+#!/usr/bin/env python
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -14,15 +14,14 @@
 import common
 from autotest_lib.client.cros import constants
 from autotest_lib.client.cros import xmlrpc_server
-from autotest_lib.client.cros.multimedia import bluetooth_facade_native
+from autotest_lib.client.cros.multimedia import bluetooth_facade
 
 
-class BluetoothDeviceXmlRpcDelegate(
-        xmlrpc_server.XmlRpcDelegate,
-        bluetooth_facade_native.BluetoothFacadeNative):
+class BluetoothDeviceXmlRpcDelegate(xmlrpc_server.XmlRpcDelegate,
+                                    bluetooth_facade.BluezFacadeLocal):
     """Exposes DUT methods called remotely during Bluetooth autotests.
 
-    The delegate inherits from BluetoothFacadeNative where all native calls
+    The delegate inherits from BluezFacadeLocal where all native calls
     should be kept. This XmlRpcDelegate is kept around for when Bluetooth needs
     to be called without using the MultimediaRpcDelegate.
 
diff --git a/client/cros/bluetooth/common.py b/client/cros/bluetooth/common.py
index 3bae9bd..d36a6a4 100644
--- a/client/cros/bluetooth/common.py
+++ b/client/cros/bluetooth/common.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/cros/nfc/__init__.py b/client/cros/bluetooth/floss/__init__.py
similarity index 100%
rename from client/cros/nfc/__init__.py
rename to client/cros/bluetooth/floss/__init__.py
diff --git a/client/cros/bluetooth/floss/adapter_client.py b/client/cros/bluetooth/floss/adapter_client.py
new file mode 100644
index 0000000..12a6e1d
--- /dev/null
+++ b/client/cros/bluetooth/floss/adapter_client.py
@@ -0,0 +1,665 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Client class to access the Floss adapter interface."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+from enum import IntEnum
+from gi.repository import GLib
+import logging
+import math
+import random
+
+from autotest_lib.client.cros.bluetooth.floss.observer_base import ObserverBase
+from autotest_lib.client.cros.bluetooth.floss.utils import (glib_call,
+                                                            glib_callback,
+                                                            PropertySet)
+
+
+class BondState(IntEnum):
+    """Bluetooth bonding state."""
+    NOT_BONDED = 0
+    BONDING = 1
+    BONDED = 2
+
+
+class Transport(IntEnum):
+    """Bluetooth transport type."""
+    AUTO = 0
+    BREDR = 1
+    LE = 2
+
+
+class SspVariant(IntEnum):
+    """Bluetooth SSP variant type."""
+    PASSKEY_CONFIRMATION = 0
+    PASSKEY_ENTRY = 1
+    CONSENT = 2
+    PASSKEY_NOTIFICATION = 3
+
+
+class BluetoothCallbacks:
+    """Callbacks for the Adapter Interface.
+
+    Implement this to observe these callbacks when exporting callbacks via
+    register_callback.
+    """
+    def on_address_changed(self, addr):
+        """Adapter address changed.
+
+        @param addr: New address of the adapter.
+        """
+        pass
+
+    def on_device_found(self, remote_device):
+        """Device found via discovery.
+
+        @param remote_device: Remove device found during discovery session.
+        """
+        pass
+
+    def on_discovering_changed(self, discovering):
+        """Discovering state has changed.
+
+        @param discovering: Whether discovery enabled or disabled.
+        """
+        pass
+
+    def on_ssp_request(self, remote_device, class_of_device, variant, passkey):
+        """Simple secure pairing request for agent to reply.
+
+        @param remote_device: Remote device that is being paired.
+        @param class_of_device: Class of device as described in HCI spec.
+        @param variant: SSP variant (0-3). [Confirmation, Entry, Consent, Notification]
+        @param passkey: Passkey to display (so user can confirm or type it).
+        """
+        pass
+
+    def on_bond_state_changed(self, status, device_address, state):
+        """Bonding/Pairing state has changed for a device.
+
+        @param status: Success (0) or failure reason for bonding.
+        @param device_address: This notification is for this BDADDR.
+        @param state: Bonding state. 0 = Not bonded, 1 = Bonding, 2 = Bonded.
+        """
+        pass
+
+
+class BluetoothConnectionCallbacks:
+    """Callbacks for the Device Connection interface.
+
+    Implement this to observe these callbacks when exporting callbacks via
+    register_connection_callback
+    """
+    def on_device_connected(self, remote_device):
+        """Notification that a device has completed HCI connection.
+
+        @param remote_device: Remote device that completed HCI connection.
+        """
+        pass
+
+    def on_device_disconnected(self, remote_device):
+        """Notification that a device has completed HCI disconnection.
+
+        @param remote_device: Remote device that completed HCI disconnection.
+        """
+        pass
+
+
+class FlossAdapterClient(BluetoothCallbacks, BluetoothConnectionCallbacks):
+    """Handles method calls to and callbacks from the Adapter interface."""
+
+    ADAPTER_SERVICE = 'org.chromium.bluetooth'
+    ADAPTER_INTERFACE = 'org.chromium.bluetooth.Bluetooth'
+    ADAPTER_OBJECT_PATTERN = '/org/chromium/bluetooth/hci{}/adapter'
+    ADAPTER_CB_INTF = 'org.chromium.bluetooth.BluetoothCallback'
+    ADAPTER_CB_OBJ_PATTERN = '/org/chromium/bluetooth/hci{}/test_adapter_client{}'
+    ADAPTER_CONN_CB_INTF = 'org.chromium.bluetooth.BluetoothConnectionCallback'
+    ADAPTER_CONN_CB_OBJ_PATTERN = '/org/chromium/bluetooth/hci{}/test_connection_client{}'
+
+    @staticmethod
+    def parse_dbus_device(remote_device_dbus):
+        """Parse a dbus variant dict as a remote device.
+
+        @param remote_device_dbus: Variant dict with signature a{sv}.
+
+        @return Parsing success, BluetoothDevice tuple
+        """
+        if 'address' in remote_device_dbus and 'name' in remote_device_dbus:
+            return True, (str(remote_device_dbus['address']),
+                          str(remote_device_dbus['name']))
+
+        return False, None
+
+    class ExportedAdapterCallbacks(ObserverBase):
+        """
+        <node>
+            <interface name="org.chromium.bluetooth.BluetoothCallback">
+                <method name="OnAddressChanged">
+                    <arg type="s" name="addr" direction="in" />
+                </method>
+                <method name="OnDeviceFound">
+                    <arg type="a{sv}" name="remote_device_dbus" direction="in" />
+                </method>
+                <method name="OnDiscoveringChanged">
+                    <arg type="b" name="discovering" direction="in" />
+                </method>
+                <method name="OnSspRequest">
+                    <arg type="a{sv}" name="remote_device_dbus" direction="in" />
+                    <arg type="u" name="class_of_device" direction="in" />
+                    <arg type="u" name="variant" direction="in" />
+                    <arg type="u" name="passkey" direction="in" />
+                </method>
+                <method name="OnBondStateChanged">
+                    <arg type="u" name="status" direction="in" />
+                    <arg type="s" name="address" direction="in" />
+                    <arg type="u" name="state" direction="in" />
+                </method>
+            </interface>
+        </node>
+        """
+
+        def __init__(self):
+            """Construct exported callbacks object.
+            """
+            ObserverBase.__init__(self)
+
+        def OnAddressChanged(self, addr):
+            """Handle address changed callbacks."""
+            for observer in self.observers.values():
+                observer.on_address_changed(addr)
+
+        def OnDeviceFound(self, remote_device_dbus):
+            """Handle device found from discovery."""
+            parsed, remote_device = FlossAdapterClient.parse_dbus_device(
+                    remote_device_dbus)
+            if not parsed:
+                logging.debug('OnDeviceFound parse error: {}'.format(
+                        remote_device_dbus))
+                return
+
+            for observer in self.observers.values():
+                observer.on_device_found(remote_device)
+
+        def OnDiscoveringChanged(self, discovering):
+            """Handle discovering state changed."""
+            for observer in self.observers.values():
+                observer.on_discovering_changed(bool(discovering))
+
+        def OnSspRequest(self, remote_device_dbus, class_of_device, variant,
+                         passkey):
+            """Handle pairing/bonding request to agent."""
+            parsed, remote_device = FlossAdapterClient.parse_dbus_device(
+                    remote_device_dbus)
+            if not parsed:
+                logging.debug('OnSspRequest parse error: {}'.format(
+                        remote_device_dbus))
+                return
+
+            for observer in self.observers.values():
+                observer.on_ssp_request(remote_device, class_of_device,
+                                        variant, passkey)
+
+        def OnBondStateChanged(self, status, address, state):
+            """Handle bond state changed callbacks."""
+            for observer in self.observers.values():
+                observer.on_bond_state_changed(status, address, state)
+
+    class ExportedConnectionCallbacks(ObserverBase):
+        """
+        <node>
+            <interface name="org.chromium.bluetooth.BluetoothConnectionCallback">
+                <method name="OnDeviceConnected">
+                    <arg type="a{sv}" name="remote_device_dbus" direction="in" />
+                </method>
+                <method name="OnDeviceDisconnected">
+                    <arg type="a{sv}" name="remote_device_dbus" direction="in" />
+                </method>
+            </interface>
+        </node>
+        """
+
+        def __init__(self, bus, object_path):
+            """Construct exported connection callbacks object.
+            """
+            ObserverBase.__init__(self)
+
+        def OnDeviceConnected(self, remote_device_dbus):
+            """Handle device connected."""
+            parsed, remote_device = FlossAdapterClient.parse_dbus_device(
+                    remote_device_dbus)
+            if not parsed:
+                logging.debug('OnDeviceConnected parse error: {}'.format(
+                        remote_device_dbus))
+                return
+
+            for observer in self.observers.values():
+                observer.on_device_connected(remote_device)
+
+        def OnDeviceDisconnected(self, remote_device_dbus):
+            """Handle device disconnected."""
+            parsed, remote_device = FlossAdapterClient.parse_dbus_device(
+                    remote_device_dbus)
+            if not parsed:
+                logging.debug('OnDeviceDisconnected parse error: {}'.format(
+                        remote_device_dbus))
+                return
+
+            for observer in self.observers.values():
+                observer.on_device_disconnected(remote_device)
+
+    def __init__(self, bus, hci):
+        """Construct the client.
+
+        @param bus: DBus bus over which we'll establish connections.
+        @param hci: HCI adapter index. Get this value from `get_default_adapter`
+                    on FlossManagerClient.
+        """
+        self.bus = bus
+        self.hci = hci
+        self.objpath = self.ADAPTER_OBJECT_PATTERN.format(hci)
+
+        # We don't register callbacks by default.
+        self.callbacks = None
+        self.connection_callbacks = None
+
+        # Locally cached values
+        self.known_devices = {}
+        self.discovering = False
+
+        # Initialize properties when registering callbacks (we know proxy is
+        # valid at this point).
+        self.properties = None
+        self.remote_properties = None
+
+    def __del__(self):
+        """Destructor"""
+        del self.callbacks
+        del self.connection_callbacks
+
+    def _make_device(self,
+                     address,
+                     name,
+                     bond_state=BondState.NOT_BONDED,
+                     connected=False):
+        """Make a device dict."""
+        return {
+                'address': address,
+                'name': name,
+                'bond_state': bond_state,
+                'connected': connected,
+        }
+
+    @glib_callback()
+    def on_device_found(self, remote_device):
+        """Remote device was found as part of discovery."""
+        address, name = remote_device
+
+        # Update a new device
+        if not address in self.known_devices:
+            self.known_devices[address] = self._make_device(address, name)
+        # Update name if previous cached value didn't have a name
+        elif not self.known_devices[address]:
+            self.known_devices[address]['name'] = name
+
+    @glib_callback()
+    def on_discovering_changed(self, discovering):
+        """Discovering state has changed."""
+        # Ignore a no-op
+        if self.discovering == discovering:
+            return
+
+        # Cache the value
+        self.discovering = discovering
+
+        # If we are freshly starting discoveyr, clear all locally cached known
+        # devices (that are not bonded or connected)
+        if discovering:
+            # Filter known devices to currently bonded or connected devices
+            self.known_devices = {
+                    key: value
+                    for key, value in self.known_devices.items()
+                    if value.get('bond_state', 0) > 0
+                    or value.get('connected', False)
+            }
+
+    @glib_callback()
+    def on_bond_state_changed(self, status, address, state):
+        """Bond state has changed."""
+        # You can bond unknown devices if it was previously bonded
+        if not address in self.known_devices:
+            self.known_devices[address] = self._make_device(
+                    address,
+                    '',
+                    bond_state=state,
+            )
+        else:
+            self.known_devices[address]['bond_state'] = state
+
+    @glib_callback()
+    def on_device_connected(self, remote_device):
+        """Remote device connected hci."""
+        address, name = remote_device
+        if not address in self.known_devices:
+            self.known_devices[address] = self._make_device(address,
+                                                            name,
+                                                            connected=True)
+        else:
+            self.known_devices[address]['connected'] = True
+
+    @glib_callback()
+    def on_device_disconnected(self, remote_device):
+        """Remote device disconnected hci."""
+        address, name = remote_device
+        if not address in self.known_devices:
+            self.known_devices[address] = self._make_device(address,
+                                                            name,
+                                                            connected=False)
+        else:
+            self.known_devices[address]['connected'] = False
+
+    def _make_dbus_device(self, address, name):
+        return {
+                'address': GLib.Variant('s', address),
+                'name': GLib.Variant('s', name)
+        }
+
+    @glib_call(False)
+    def has_proxy(self):
+        """Checks whether adapter proxy can be acquired."""
+        return bool(self.proxy())
+
+    def proxy(self):
+        """Gets proxy object to adapter interface for method calls."""
+        return self.bus.get(self.ADAPTER_SERVICE,
+                            self.objpath)[self.ADAPTER_INTERFACE]
+
+    # TODO(b/227405934): Not sure we want GetRemoteRssi on adapter api since
+    #                    it's unlikely to be accurate over time. Use a mock for
+    #                    testing for now.
+    def get_mock_remote_rssi(self, device):
+        """Gets mock value for remote device rssi."""
+        return -50
+
+    def register_properties(self):
+        """Registers a property set for this client."""
+        self.properties = PropertySet({
+                'Address': (self.proxy().GetAddress, None),
+                'Name': (self.proxy().GetName, self.proxy().SetName),
+                'Class': (self.proxy().GetBluetoothClass,
+                          self.proxy().SetBluetoothClass),
+                'Uuids': (self.proxy().GetUuids, None),
+                'Discoverable':
+                (self.proxy().GetDiscoverable, self.proxy().SetDiscoverable),
+        })
+
+        self.remote_properties = PropertySet({
+                'Name': (self.proxy().GetRemoteName, None),
+                'Type': (self.proxy().GetRemoteType, None),
+                'Alias': (self.proxy().GetRemoteAlias, None),
+                'Class': (self.proxy().GetRemoteClass, None),
+                'RSSI': (self.get_mock_remote_rssi, None),
+        })
+
+    @glib_call(False)
+    def register_callbacks(self):
+        """Registers callbacks for this client.
+
+        This will also initialize properties and populate the list of bonded
+        devices since this should be the first thing that gets called after we
+        know that the adapter client has a valid proxy object.
+        """
+        # Make sure properties are registered
+        if not self.properties:
+            self.register_properties()
+
+        # Prevent callback registration multiple times
+        if self.callbacks and self.connection_callbacks:
+            return True
+
+        # Generate a random number between 1-1000
+        rnumber = math.floor(random.random() * 1000 + 1)
+
+        # Reset known devices to just bonded devices and their connection
+        # states.
+        self.known_devices.clear()
+        bonded_devices = self.proxy().GetBondedDevices()
+        for device in bonded_devices:
+            (success, devtuple) = FlossAdapterClient.parse_dbus_device(device)
+            if success:
+                (address, name) = devtuple
+                cstate = self.proxy().GetConnectionState(
+                        self._make_dbus_device(address, name))
+                logging.info('[%s:%s] initially bonded. Connected = %d',
+                             address, name, cstate)
+                self.known_devices[address] = self._make_device(
+                        address,
+                        name,
+                        bond_state=BondState.BONDED,
+                        connected=bool(cstate > 0))
+
+        if not self.callbacks:
+            # Create and publish callbacks
+            self.callbacks = self.ExportedAdapterCallbacks()
+            self.callbacks.add_observer('adapter_client', self)
+            objpath = self.ADAPTER_CB_OBJ_PATTERN.format(self.hci, rnumber)
+            self.bus.register_object(objpath, self.callbacks, None)
+
+            # Register published callback with adapter daemon
+            self.proxy().RegisterCallback(objpath)
+
+        if not self.connection_callbacks:
+            self.connection_callbacks = self.ExportedConnectionCallbacks(
+                    self.bus, objpath)
+            self.connection_callbacks.add_observer('adapter_client', self)
+            objpath = self.ADAPTER_CONN_CB_OBJ_PATTERN.format(
+                    self.hci, rnumber)
+            self.bus.register_object(objpath, self.connection_callbacks, None)
+
+            self.proxy().RegisterConnectionCallback(objpath)
+
+        return True
+
+    def register_callback_observer(self, name, observer):
+        """Add an observer for all callbacks.
+
+        @param name: Name of the observer.
+        @param observer: Observer that implements all callback classes.
+        """
+        if isinstance(observer, BluetoothCallbacks):
+            self.callbacks.add_observer(name, observer)
+
+        if isinstance(observer, BluetoothConnectionCallbacks):
+            self.connection_callbacks.add_observer(name, observer)
+
+    def unregister_callback_observer(self, name, observer):
+        """Remove an observer for all callbacks.
+
+        @param name: Name of the observer.
+        @param observer: Observer that implements all callback classes.
+        """
+        if isinstance(observer, BluetoothCallbacks):
+            self.callbacks.remove_observer(name, observer)
+
+        if isinstance(observer, BluetoothConnectionCallbacks):
+            self.connection_callbacks.remove_observer(name, observer)
+
+    @glib_call('')
+    def get_address(self):
+        """Gets the adapter's current address."""
+        return str(self.proxy().GetAddress())
+
+    @glib_call('')
+    def get_name(self):
+        """Gets the adapter's name."""
+        return str(self.proxy().GetName())
+
+    @glib_call(None)
+    def get_property(self, prop_name):
+        """Gets property by name."""
+        return self.properties.get(prop_name)
+
+    @glib_call(None)
+    def get_remote_property(self, address, prop_name):
+        """Gets remote device property by name."""
+        name = 'Test device'
+        if address in self.known_devices:
+            name = self.known_devices[address]['name']
+
+        remote_device = self._make_dbus_device(address, name)
+        return self.remote_properties.get(prop_name, remote_device)
+
+    @glib_call(None)
+    def set_property(self, prop_name, *args):
+        """Sets property by name."""
+        return self.properties.set(prop_name, *args)
+
+    @glib_call(None)
+    def set_remote_property(self, address, prop_name, *args):
+        """Sets remote property by name."""
+        name = 'Test device'
+        if address in self.known_devices:
+            name = self.known_devices[address]['name']
+
+        remote_device = self._make_dbus_device(address, name)
+        return self.properties.set(prop_name, remote_device, *args)
+
+    @glib_call(False)
+    def start_discovery(self):
+        """Starts discovery session."""
+        return bool(self.proxy().StartDiscovery())
+
+    @glib_call(False)
+    def stop_discovery(self):
+        """Stops discovery session."""
+        return bool(self.proxy().CancelDiscovery())
+
+    @glib_call(False)
+    def is_discovering(self):
+        """Is adapter discovering?"""
+        return bool(self.discovering)
+
+    @glib_call(False)
+    def has_device(self, address):
+        """Checks to see if device with address is known."""
+        return address in self.known_devices
+
+    def is_bonded(self, address):
+        """Checks if the given address is currently fully bonded."""
+        return address in self.known_devices and self.known_devices[
+                address].get('bond_state',
+                             BondState.NOT_BONDED) == BondState.BONDED
+
+    @glib_call(False)
+    def create_bond(self, address, transport):
+        """Creates bond with target address.
+        """
+        name = 'Test bond'
+        if address in self.known_devices:
+            name = self.known_devices[address]['name']
+
+        remote_device = self._make_dbus_device(address, name)
+        return bool(self.proxy().CreateBond(remote_device, int(transport)))
+
+    @glib_call(False)
+    def cancel_bond(self, address):
+        """Call cancel bond with no additional checks. Prefer |forget_device|.
+
+        @param address: Device to cancel bond.
+        @returns Result of |CancelBondProcess|.
+        """
+        name = 'Test bond'
+        if address in self.known_devices:
+            name = self.known_devices[address]['name']
+
+        remote_device = self._make_dbus_device(address, name)
+        return bool(self.proxy().CancelBond(remote_device))
+
+    @glib_call(False)
+    def remove_bond(self, address):
+        """Call remove bond with no additional checks. Prefer |forget_device|.
+
+        @param address: Device to remove bond.
+        @returns Result of |RemoveBond|.
+        """
+        name = 'Test bond'
+        if address in self.known_devices:
+            name = self.known_devices[address]['name']
+
+        remote_device = self._make_dbus_device(address, name)
+        return bool(self.proxy().RemoveBond(remote_device))
+
+    @glib_call(False)
+    def forget_device(self, address):
+        """Forgets device from local cache and removes bonding.
+
+        If a device is currently bonding or bonded, it will cancel or remove the
+        bond to totally remove this device.
+
+        @return
+            True if device was known and was removed.
+            False if device was unknown or removal failed.
+        """
+        if address not in self.known_devices:
+            return False
+
+        # Remove the device from known devices first
+        device = self.known_devices[address]
+        del self.known_devices[address]
+
+        remote_device = self._make_dbus_device(device['address'],
+                                               device['name'])
+
+        # Extra actions if bond state is not NOT_BONDED
+        if device['bond_state'] == BondState.BONDING:
+            return bool(self.proxy().CancelBondProcess(remote_device))
+        elif device['bond_state'] == BondState.BONDED:
+            return bool(self.proxy().RemoveBond(remote_device))
+
+        return True
+
+    @glib_call(False)
+    def set_pairing_confirmation(self, address, accept):
+        """Confirm that a pairing should be completed on a bonding device."""
+        # Device should be known or already `Bonding`
+        if address not in self.known_devices:
+            logging.debug('[%s] Unknown device in set_pairing_confirmation',
+                          address)
+            return False
+
+        device = self.known_devices[address]
+        remote_device = self._make_dbus_device(address, device['name'])
+
+        return bool(self.proxy().SetPairingConfirmation(remote_device, accept))
+
+    def get_connected_devices_count(self):
+        """Gets the number of known, connected devices."""
+        return sum([
+                1 for x in self.known_devices.values()
+                if x.get('connected', False)
+        ])
+
+    def is_connected(self, address):
+        """Checks whether a device is connected."""
+        return address in self.known_devices and self.known_devices[
+                address].get('connected', False)
+
+    @glib_call(False)
+    def connect_all_enabled_profiles(self, address):
+        """Connect all enabled profiles for target address."""
+        device = self._make_dbus_device(
+                address,
+                self.known_devices.get(address, {}).get('name', 'Test device'))
+        return bool(self.proxy().ConnectAllEnabledProfiles(device))
+
+    @glib_call(False)
+    def disconnect_all_enabled_profiles(self, address):
+        """Disconnect all enabled profiles for target address."""
+        device = self._make_dbus_device(
+                address,
+                self.known_devices.get(address, {}).get('name', 'Test device'))
+        return bool(self.proxy().DisconnectAllEnabledProfiles(device))
diff --git a/client/cros/bluetooth/floss/manager_client.py b/client/cros/bluetooth/floss/manager_client.py
new file mode 100644
index 0000000..a983e8a
--- /dev/null
+++ b/client/cros/bluetooth/floss/manager_client.py
@@ -0,0 +1,204 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Client class to access the Floss manager interface."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import math
+import random
+
+from autotest_lib.client.cros.bluetooth.floss.observer_base import ObserverBase
+from autotest_lib.client.cros.bluetooth.floss.utils import glib_call, glib_callback
+
+
+class ManagerCallbacks:
+    """Callbacks for the Manager Interface.
+
+    Implement this to observe these callbacks when exporting callbacks via
+    register_callback.
+    """
+    def on_hci_device_changed(self, hci, present):
+        """Hci device presence is updated.
+
+        @param hci: Hci interface number.
+        @param present: Whether this hci interface is appearing or disappearing.
+        """
+        pass
+
+    def on_hci_enabled_changed(self, hci, enabled):
+        """Hci device is being enabled or disabled.
+
+        @param hci: Hci interface number.
+        @param enabled: Whether this hci interface is being enabled or disabled.
+        """
+        pass
+
+
+class FlossManagerClient(ManagerCallbacks):
+    """ Handles method calls to and callbacks from the Manager interface."""
+
+    MGR_SERVICE = 'org.chromium.bluetooth.Manager'
+    MGR_INTERFACE = 'org.chromium.bluetooth.Manager'
+    MGR_OBJECT = '/org/chromium/bluetooth/Manager'
+
+    # Exported callback interface and objects
+    CB_EXPORTED_INTF = 'org.chromium.bluetooth.ManagerCallbacks'
+    CB_EXPORTED_OBJ = '/org/chromium/bluetooth/test_manager_client{}'
+
+    class AdaptersNotParseable(Exception):
+        """An entry in the result of GetAvailableAdapters was not parseable."""
+        pass
+
+    class ExportedManagerCallbacks(ObserverBase):
+        """
+        <node>
+            <interface name="org.chromium.bluetooth.ManagerCallbacks">
+                <method name="OnHciDeviceChanged">
+                    <arg type="i" name="hci" direction="in" />
+                    <arg type="b" name="present" direction="in" />
+                </method>
+                <method name="OnHciEnabledChanged">
+                    <arg type="i" name="hci" direction="in" />
+                    <arg type="b" name="enabled" direction="in" />
+                </method>
+            </interface>
+        </node>
+        """
+        def __init__(self):
+            """Construct exported callbacks object.
+            """
+            ObserverBase.__init__(self)
+
+        def OnHciDeviceChanged(self, hci, present):
+            """Handle device presence callbacks."""
+            for observer in self.observers.values():
+                observer.on_hci_device_changed(hci, present)
+
+        def OnHciEnabledChanged(self, hci, enabled):
+            """Handle device enabled callbacks."""
+            for observer in self.observers.values():
+                observer.on_hci_enabled_changed(hci, enabled)
+
+    def __init__(self, bus):
+        """ Construct the client.
+
+        @param bus: DBus bus over which we'll establish connections.
+        """
+        self.bus = bus
+
+        # We don't register callbacks by default. The client owner must call
+        # register_callbacks to do so.
+        self.callbacks = None
+
+        # Initialize hci devices and their power states
+        self.adapters = {}
+
+    def __del__(self):
+        """Destructor"""
+        del self.callbacks
+
+    @glib_call(False)
+    def has_proxy(self):
+        """Checks whether manager proxy can be acquired."""
+        return bool(self.proxy())
+
+    def proxy(self):
+        """Gets proxy object to manager interface for method calls."""
+        return self.bus.get(self.MGR_SERVICE,
+                            self.MGR_OBJECT)[self.MGR_INTERFACE]
+
+    @glib_call(False)
+    def register_callbacks(self):
+        """Registers manager callbacks for this client if one doesn't already exist.
+        """
+        # Callbacks already registered
+        if self.callbacks:
+            return True
+
+        # Generate a random number between 1-1000
+        rnumber = math.floor(random.random() * 1000 + 1)
+
+        # Create and publish callbacks
+        self.callbacks = self.ExportedManagerCallbacks()
+        self.callbacks.add_observer('manager_client', self)
+        objpath = self.CB_EXPORTED_OBJ.format(rnumber)
+        self.bus.register_object(objpath, self.callbacks, None)
+
+        # Register published callbacks with manager daemon
+        self.proxy().RegisterCallback(objpath)
+
+        return True
+
+    @glib_callback()
+    def on_hci_device_changed(self, hci, present):
+        """Handle device presence change."""
+        if present:
+            self.adapters[hci] = self.adapters.get(hci, False)
+        elif hci in self.adapters:
+            del self.adapters[hci]
+
+    @glib_callback()
+    def on_hci_enabled_changed(self, hci, enabled):
+        """Handle device enabled change."""
+        self.adapters[hci] = enabled
+
+    def get_default_adapter(self):
+        """Get the default adapter in use by the manager."""
+        # TODO(abps): The default adapter is hci0 until we support multiple
+        #             adapters.
+        return 0
+
+    def has_default_adapter(self):
+        """Checks whether the default adapter exists on this system."""
+        return self.get_default_adapter() in self.adapters
+
+    @glib_call()
+    def start(self, hci):
+        """Start a specific adapter."""
+        self.proxy().Start(hci)
+
+    @glib_call()
+    def stop(self, hci):
+        """Stop a specific adapter."""
+        self.proxy().Stop(hci)
+
+    @glib_call(False)
+    def get_adapter_enabled(self, hci):
+        """Checks whether a specific adapter is enabled (i.e. started)."""
+        return bool(self.proxy().GetAdapterEnabled(hci))
+
+    @glib_call(False)
+    def get_floss_enabled(self):
+        """Gets whether Floss is enabled."""
+        return bool(self.proxy().GetFlossEnabled())
+
+    @glib_call()
+    def set_floss_enabled(self, enabled):
+        self.proxy().SetFlossEnabled(enabled)
+
+    @glib_call([])
+    def get_available_adapters(self):
+        """Gets a list of currently available adapters and if they are enabled.
+        """
+        all_adapters = []
+        dbus_result = self.proxy().GetAvailableAdapters()
+
+        for d in dbus_result:
+            if 'hci_interface' in d and 'enabled' in d:
+                all_adapters.append(
+                        (int(d['hci_interface']), bool(d['enabled'])))
+            else:
+                raise FlossManagerClient.AdaptersNotParseable(
+                        'Could not parse: {}', str(d))
+
+        # This function call overwrites any existing cached values of
+        # self.adapters that we may have gotten from observers.
+        self.adapters = {}
+        for (hci, enabled) in all_adapters:
+            self.adapters[hci] = enabled
+
+        return all_adapters
diff --git a/client/cros/bluetooth/floss/observer_base.py b/client/cros/bluetooth/floss/observer_base.py
new file mode 100644
index 0000000..9237380
--- /dev/null
+++ b/client/cros/bluetooth/floss/observer_base.py
@@ -0,0 +1,36 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Simple observer base class."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import logging
+
+class ObserverBase:
+    """Simple observer base class that provides the observer pattern."""
+    def __init__(self):
+        self.observers = {}
+
+    def add_observer(self, name, observer):
+        """Add named observer if it doesn't already exist.
+
+        @param name: Unique name for the observer.
+        @param observer: Object that implements the observer callbacks.
+
+        @return True if observer was added.
+        """
+        if name not in self.observers:
+            self.observers[name] = observer
+            return True
+
+        logging.warn('Observer {} already exists, not adding'.format(name))
+        return False
+
+    def remove_observer(self, name, observer):
+        """Remove named observer."""
+        if name in self.observers:
+            del self.observers[name]
diff --git a/client/cros/bluetooth/floss/utils.py b/client/cros/bluetooth/floss/utils.py
new file mode 100644
index 0000000..3499708
--- /dev/null
+++ b/client/cros/bluetooth/floss/utils.py
@@ -0,0 +1,216 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Simple observer base class."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import functools
+from gi.repository import GLib
+import logging
+import threading
+
+# All GLIB method calls should wait this many seconds by default
+GLIB_METHOD_CALL_TIMEOUT = 2
+
+# GLib thread name that will run the mainloop.
+GLIB_THREAD_NAME = 'glib'
+
+
+class GlibDeadlockException(Exception):
+    """Detected a situation that will cause a deadlock in GLib.
+
+    This exception should be emitted when we detect that a deadlock is likely to
+    occur. For example, a method call running in the mainloop context is making
+    a function call that is wrapped with @glib_call.
+    """
+    pass
+
+
+def glib_call(default_result=None,
+              timeout=GLIB_METHOD_CALL_TIMEOUT,
+              thread_name=GLIB_THREAD_NAME):
+    """Threads method call to glib thread and waits for result.
+
+    The dbus-python package does not support multi-threaded access. As a result,
+    we pipe all dbus function to the mainloop using GLib.idle_add which runs the
+    method as part of the mainloop.
+
+    @param default_result: The default return value from the function call if it
+                           fails or times out.
+    @param timeout: How long to wait for the method call to complete.
+    @param thread_name: Name of the thread that should be running GLib.Mainloop.
+    """
+
+    def decorator(method):
+        """Internal wrapper."""
+
+        def call_and_signal(data):
+            """Calls a function and signals completion.
+
+            This method is called by GLib and added via GLib.idle_add. It will
+            be run in the same thread as the GLib mainloop.
+
+            @param data: Dict containing data to be passed. Must have keys:
+                         event, method, args, kwargs and result. The value for
+                         result should be the default value and will be set
+                         before return.
+
+            @return False so that glib doesn't reschedule this to run again.
+            """
+            (event, method, args, kwargs) = (data['event'], data['method'],
+                                             data['args'], data['kwargs'])
+            logging.info('%s: Running %s',
+                         threading.current_thread().name, str(method))
+            err = None
+            try:
+                data['result'] = method(*args, **kwargs)
+            except Exception as e:
+                logging.error('Exception during %s: %s', str(method), str(e))
+                err = e
+
+            event.set()
+
+            # If method callback is set, this will call that method with results
+            # of this method call and any error that may have resulted.
+            if 'method_callback' in data:
+                data['method_callback'](err, data['result'])
+
+            return False
+
+        @functools.wraps(method)
+        def wrapper(*args, **kwargs):
+            """Sends method call to GLib and waits for its completion.
+
+            @param args: Positional arguments to method.
+            @param kwargs: Keyword arguments to method. Some special keywords:
+                |method_callback|: Returns result via callback without blocking.
+            """
+            method_callback = None
+            # If a method callback is given, we will not block on the completion
+            # of the call but expect the response in the callback instead. The
+            # callback has the signature: def callback(err, result)
+            if 'method_callback' in kwargs:
+                method_callback = kwargs['method_callback']
+                del kwargs['method_callback']
+
+            # Make sure we're not scheduling in the GLib thread since that'll
+            # cause a deadlock. An exception is if we have a method callback
+            # which is async.
+            current_thread_name = threading.current_thread().name
+            if current_thread_name is thread_name and not method_callback:
+                raise GlibDeadlockException(
+                        '{} called in GLib thread'.format(method))
+
+            done_event = threading.Event()
+            data = {
+                    'event': done_event,
+                    'method': method,
+                    'args': args,
+                    'kwargs': kwargs,
+                    'result': default_result,
+            }
+            if method_callback:
+                data['method_callback'] = method_callback
+
+            logging.info('%s: Adding %s to GLib.idle_add',
+                         threading.current_thread().name, str(method))
+            GLib.idle_add(call_and_signal, data)
+
+            if not method_callback:
+                # Wait for the result from the GLib call
+                if not done_event.wait(timeout=timeout):
+                    logging.warn('%s timed out after %d s', str(method),
+                                 timeout)
+
+            return data['result']
+
+        return wrapper
+
+    return decorator
+
+
+def glib_callback(thread_name=GLIB_THREAD_NAME):
+    """Marks callbacks that are called by GLib and checks for errors.
+    """
+
+    def _decorator(method):
+        @functools.wraps(method)
+        def _wrapper(*args, **kwargs):
+            current_thread_name = threading.current_thread().name
+            if current_thread_name is not thread_name:
+                raise GlibDeadlockException(
+                        '{} should be called by GLib'.format(method))
+
+            return method(*args, **kwargs)
+
+        return _wrapper
+
+    return _decorator
+
+
+class PropertySet:
+    """Helper class with getters and setters for properties. """
+
+    class MissingProperty(Exception):
+        """Raised when property is missing in PropertySet."""
+        pass
+
+    class PropertyGetterMissing(Exception):
+        """Raised when get is called on a property that doesn't support it."""
+        pass
+
+    class PropertySetterMissing(Exception):
+        """Raised when set is called on a property that doesn't support it."""
+        pass
+
+    def __init__(self, property_set):
+        """Constructor.
+
+        @param property_set: Dictionary with proxy methods for get/set of named
+                             properties. These are NOT normal DBus properties
+                             that are implemented via
+                             org.freedesktop.DBus.Properties.
+        """
+        self.pset = property_set
+
+    def get(self, prop_name, *args):
+        """Calls the getter function for a property if it exists.
+
+        @param prop_name: The property name to call the getter function on.
+        @param args: Any positional arguments to pass to getter function.
+
+        @return Result from calling the getter function with given args.
+        """
+        if prop_name not in self.pset:
+            raise self.MissingProperty('{} is unknown.'.format(prop_name))
+
+        (getter, _) = self.pset[prop_name]
+
+        if not getter:
+            raise self.PropertyGetterMissing(
+                    '{} has no getter.'.format(prop_name))
+
+        return getter(*args)
+
+    def set(self, prop_name, *args):
+        """Calls the setter function for a property if it exists.
+
+        @param prop_name: The property name to call the setter function on.
+        @param args: Any positional arguments to pass to the setter function.
+
+        @return Result from calling the setter function with given args.
+        """
+        if prop_name not in self.pset:
+            raise self.MissingProperty('{} is unknown.'.format(prop_name))
+
+        (_, setter) = self.pset[prop_name]
+
+        if not setter:
+            raise self.PropertySetterMissing(
+                    '{} has no getter.'.format(prop_name))
+
+        return setter(*args)
diff --git a/client/cros/bluetooth/hcitool.py b/client/cros/bluetooth/hcitool.py
new file mode 100644
index 0000000..7f7b24f
--- /dev/null
+++ b/client/cros/bluetooth/hcitool.py
@@ -0,0 +1,750 @@
+# Lint as: python3
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Module to execute hcitool commands according to Bluetooth Core Spec v5.2."""
+
+import btsocket
+import logging
+import struct
+from autotest_lib.client.bin import utils
+from autotest_lib.client.common_lib import error
+
+
+class Hcitool(object):
+    """Executes hcitool commands according to Bluetooth Core Spec v5.2."""
+    CONTROLLER_PASS_CODE_VALUE = 0
+    HCI_COMMAND_COMPLETE_EVENT = '0x0e'
+
+    def _execute_hcitool_cmd(self, ogf, ocf, *parameter):
+        """Executes hcitool commands using 'hcitool cmd ... '
+
+        NOTE: return list depend on the Bluetooth Core Spec documentation.
+
+        @param ogf: btsocket.OGF_... (int value).
+        @param ocf: btsocket.OCF_... (int value).
+        @param *parameter: parameter as hex string, e.g., ...,'1A','FA'.
+
+        @return: list of the hcitool output. In case
+                of failure, returns [hcitool status].
+        """
+        params = ['hcitool', 'cmd', hex(ogf), hex(ocf)]
+        params.extend(parameter)
+        cmd = ' '.join(params)
+        logging.debug('Running "%s"', cmd)
+        # Output format of hcitool command:
+        # < HCI Command: ogf 0xXX, ocf 0xXXXX, plen X
+        # > HCI Event: 0xXX plen XX
+        #   XX XX XX XX XX XX XX XX XX XX ...
+        output = utils.system_output(cmd, retain_output=True)
+        output_parse_value = HciToolParser.parse_output(output)
+        event_type, plen_value, status, event_bytearray = output_parse_value
+        if event_type != self.HCI_COMMAND_COMPLETE_EVENT:
+            raise error.TestError(
+                    'Expect Command complete event with value: ' +
+                    self.HCI_COMMAND_COMPLETE_EVENT + ' but got ' + event_type)
+
+        if len(event_bytearray) != plen_value:
+            raise error.TestError('Expect plen value of ' + str(plen_value) +
+                                  'but got ' + str(len(event_bytearray)))
+
+        if status != self.CONTROLLER_PASS_CODE_VALUE:
+            return [status]
+
+        return HciToolParser.parse_payload(event_bytearray, ogf, ocf)
+
+    @staticmethod
+    def filter_with_mask(names, mask):
+        """Picks the supported names base on the given mask.
+
+        @param names: List of names like feature,commands,...
+        @param mask: A bitmask (8 bit little-endian) or a list of bitmasks.
+
+        @return: List of supported names (features/commands/...).
+        """
+
+        if isinstance(mask, list):
+            # Convert masks to bitstring in little-endian.
+            mask = ''.join('{0:08b}'.format(m)[::-1] for m in mask)
+        else:
+            mask = '{:b}'.format(mask)
+            mask = mask[::-1]
+        return [names[i] for i, m in enumerate(mask) if m == '1']
+
+    def _execute_hcitool_cmd_or_raise(self, ogf, ocf, *parameter):
+        result = self._execute_hcitool_cmd(ogf, ocf, *parameter)
+        status = result[0]
+        if status != self.CONTROLLER_PASS_CODE_VALUE:
+            raise error.TestError(
+                    'Unexpected command output, the status code is ' +
+                    str(status))
+        return result
+
+    def read_buffer_size(self):
+        """Reads the buffer size of the BT controller.
+
+        @returns: (status, acl_data_packet_length,
+                synchronous_data_packet_length, total_num_acl_data_packets,
+                total_num_synchronous_data_packets).
+        """
+        return self._execute_hcitool_cmd_or_raise(
+                btsocket.OGF_INFO_PARAM, btsocket.OCF_READ_BUFFER_SIZE)
+
+    def read_local_supported_features(self):
+        """Reads local supported features for BR/EDR.
+
+        @returns: (status, [features_name_list]).
+        """
+        execute_command_result = self._execute_hcitool_cmd_or_raise(
+                btsocket.OGF_INFO_PARAM, btsocket.OCF_READ_LOCAL_FEATURES)
+        status = execute_command_result[0]
+        lmp_features_mask = execute_command_result[1]
+        supported_features = SupportedFeatures.SUPPORTED_FEATURES_PAGE_ZERO
+        final_result = self.filter_with_mask(supported_features,
+                                             lmp_features_mask)
+        return status, final_result
+
+    def read_local_extended_features(self, page_number):
+        """Reads local supported extended features for BR/EDR.
+
+        @param: page number (0,1,2).
+
+        @returns: (status, return_page_number,
+                maximum_page_number, [features_name_list]).
+        """
+        if page_number not in (0, 1, 2):
+            raise error.TestError(
+                    'Invalid page_number: want (0, 1, 2), actual: ' +
+                    str(page_number))
+        execute_command_result = self._execute_hcitool_cmd_or_raise(
+                btsocket.OGF_INFO_PARAM, btsocket.OCF_READ_LOCAL_EXT_FEATURES,
+                str(page_number))
+
+        status = execute_command_result[0]
+        return_page_number = execute_command_result[1]
+        maximum_page_number = execute_command_result[2]
+        extended_mask = execute_command_result[3]
+        supported_features = []
+        if page_number == 0:
+            supported_features = SupportedFeatures.SUPPORTED_FEATURES_PAGE_ZERO
+        elif page_number == 1:
+            supported_features = SupportedFeatures.SUPPORTED_FEATURES_PAGE_ONE
+        elif page_number == 2:
+            supported_features = SupportedFeatures.SUPPORTED_FEATURES_PAGE_TWO
+
+        final_result = self.filter_with_mask(supported_features, extended_mask)
+
+        return status, return_page_number, maximum_page_number, final_result
+
+    def read_le_local_supported_features(self):
+        """Reads LE (Low Energy) supported features.
+
+        @return: (status, [LE_features_name_list]).
+        """
+
+        execute_command_result = self._execute_hcitool_cmd_or_raise(
+                btsocket.OGF_LE_CTL,
+                btsocket.OCF_LE_READ_LOCAL_SUPPORTED_FEATURES)
+
+        status = execute_command_result[0]
+        le_features_mask = execute_command_result[1]
+        le_supported_features = SupportedFeatures.LE_SUPPORTED_FEATURE
+        final_result = self.filter_with_mask(le_supported_features,
+                                             le_features_mask)
+
+        return status, final_result
+
+    def set_event_filter(self, filter_type, filter_condition_type, condition):
+        """Sets event filter.
+
+        @param filter_type: filter type.
+        @param filter_condition_type: filter condition type.
+        @param condition: condition.
+
+        @return: [status].
+        """
+        execute_command_result = self._execute_hcitool_cmd(
+                btsocket.OGF_HOST_CTL, btsocket.OCF_SET_EVENT_FLT, filter_type,
+                filter_condition_type, condition)
+
+        return execute_command_result
+
+    def read_local_supported_commands(self):
+        """Reads local supported commands.
+
+        @return: (status, [supported_commands_name_list]).
+        """
+        execute_command_result = self._execute_hcitool_cmd_or_raise(
+                btsocket.OGF_INFO_PARAM, btsocket.OCF_READ_LOCAL_COMMANDS)
+        status = execute_command_result[0]
+        commands_mask = list(execute_command_result[1:])
+        commands = SupportedCommands.SUPPORTED_COMMANDS
+        final_result = self.filter_with_mask(commands, commands_mask)
+
+        return status, final_result
+
+    def check_command_supported(self, command_name):
+        """Check if the given command name is supported.
+
+        @param: command_name as string, e.g., HCI_Inquiry.
+
+        @return: True if the command is supported, False otherwise.
+        """
+        supported_commands = self.read_local_supported_commands()[1]
+
+        return command_name in supported_commands
+
+    def le_read_accept_list_size(self):
+        """Reads accept list size of the BT LE controller.
+
+        @returns: (status, accept_list_size).
+        """
+        return self._execute_hcitool_cmd_or_raise(
+                btsocket.OGF_LE_CTL, btsocket.OCF_LE_READ_ACCEPT_LIST_SIZE)
+
+    def le_read_maximum_data_length(self):
+        """Reads packet data length of the BT LE controller.
+
+        @returns: (status, supported_max_tx_octets, supported_max_tx_time,
+                supported_max_rx_octets, supported_max_rx_time).
+        """
+        return self._execute_hcitool_cmd_or_raise(
+                btsocket.OGF_LE_CTL,
+                HciToolParser.OCF_LE_READ_MAXIMUM_DATA_LENGTH)
+
+    def le_read_resolving_list_size(self):
+        """Reads resolving list size of the BT LE controller.
+        @returns: (status, resolving_list_size).
+        """
+        return self._execute_hcitool_cmd_or_raise(
+                btsocket.OGF_LE_CTL,
+                HciToolParser.OCF_LE_READ_RESOLVING_LIST_SIZE)
+
+    def le_read_number_of_supported_advertising_sets(self):
+        """Reads number of supported advertisement sets.
+
+        @returns: (status, num_supported_advertising_sets).
+        """
+        return self._execute_hcitool_cmd_or_raise(
+                btsocket.OGF_LE_CTL,
+                HciToolParser.OCF_LE_READ_NUMBER_OF_SUPPORTED_ADVERTISING_SETS)
+
+    def vs_msft_read_supported_features(self, msft_ocf):
+        """Reads VS MSFT supported features.
+
+        @param msft_ocf: The msft_ocf for different chipset.
+
+        @returns: (status, subcommand_opcode, [vs_msft_features_name_list],
+                microsoft_event_prefix_length, microsoft_event_prefix)
+        """
+        VS_MSFT_READ_SUPPORTED_FEATURES_SUBCOMMAND_OPCODE = '00'
+        execute_command_result = self._execute_hcitool_cmd_or_raise(
+                btsocket.OGF_VENDOR_CMD, msft_ocf,
+                VS_MSFT_READ_SUPPORTED_FEATURES_SUBCOMMAND_OPCODE)
+        status = execute_command_result[0]
+        vs_msft_features_mask = execute_command_result[2]
+        vs_msft_supported_features = (
+                SupportedFeatures.VS_MSFT_SUPPORTED_FEATURES)
+        final_result = self.filter_with_mask(vs_msft_supported_features,
+                                             vs_msft_features_mask)
+        (_, subcommand_opcode, _, microsoft_event_prefix_length,
+         microsoft_event_prefix) = execute_command_result
+        return (status, subcommand_opcode, final_result,
+                microsoft_event_prefix_length, microsoft_event_prefix)
+
+    def le_get_vendor_capabilities_command(self):
+        """Gets AOSP LE vendor capabilities.
+
+        @returns: (status, max_advt_instances(deprecated),
+                offloaded_resolution_of_private-address(deprecated),
+                total_scan_results_storage, max_irk_list_sz, filtering_support,
+                max_filter, activity_energy_info_support, version_supported,
+                total_num_of_advt_tracked, extended_scan_support,
+                debug_logging_supported,
+                LE_address_generation_offloading_support(deprecated),
+                A2DP_source_offload_capability_mask,
+                bluetooth_quality_report_support, dynamic_audio_buffer_support).
+        """
+        execute_command_result = self._execute_hcitool_cmd_or_raise(
+                btsocket.OGF_VENDOR_CMD,
+                HciToolParser.OCF_LE_GET_VENDOR_CAPABILITIES_COMMAND)
+        pack_format = '<{}B'.format(len(execute_command_result))
+        execute_command_result = struct.pack(pack_format,
+                                             execute_command_result)
+        aosp_formats = [
+                '<BBBHBBBBHHBB',  # v0.95
+                '<BBBHBBBBHHBBB',  # v0.96
+                '<BBBHBBBBHHBBBIB',  # v0.98
+                '<BBBHBBBBHHBBBIBI',  # v1.00
+        ]
+
+        for f in aosp_formats:
+            if struct.calcsize(f) == len(execute_command_result):
+                return struct.unpack(f, execute_command_result)
+        raise error.TestError(
+                'Invalid output of AOSP capability command, length = ' +
+                str(len(execute_command_result)))
+
+
+class HciToolParser:
+    """Parser of hcitool command output based on the hcitool parameters."""
+    OCF_LE_READ_MAXIMUM_DATA_LENGTH = 0x002F
+    OCF_LE_READ_RESOLVING_LIST_SIZE = 0x002A
+    OCF_LE_READ_NUMBER_OF_SUPPORTED_ADVERTISING_SETS = 0x003B
+    OCF_MSFT_INTEL_CHIPSET = 0X001e
+    OCF_MSFT_MEDIATEK_CHIPSET = 0x0130
+    OCF_MSFT_QCA_CHIPSET = 0x0170
+    OCF_LE_GET_VENDOR_CAPABILITIES_COMMAND = 0x0153
+
+    FORMATS = {
+            ################## OGF=0X03 (OGF_HOST_CTL) ##################
+            # Set Event Filter command
+            (btsocket.OGF_HOST_CTL, btsocket.OCF_SET_EVENT_FLT):
+            '<B',
+
+            ################## OGF=0X04 (OGF_INFO_PARAM) ##################
+            # Read Local Supported Commands command
+            (btsocket.OGF_INFO_PARAM, btsocket.OCF_READ_LOCAL_COMMANDS):
+            '<B64B',
+            # Read Local Supported Features command
+            (btsocket.OGF_INFO_PARAM, btsocket.OCF_READ_LOCAL_FEATURES):
+            '<BQ',
+            # Read Local Extended Features command
+            (btsocket.OGF_INFO_PARAM, btsocket.OCF_READ_LOCAL_EXT_FEATURES):
+            '<BBBQ',
+            # Read Buffer Size command
+            (btsocket.OGF_INFO_PARAM, btsocket.OCF_READ_BUFFER_SIZE):
+            '<BHBHH',
+
+            ################## OGF=0X08 (OGF_LE_CTL) ##################
+            # LE Read Local Supported Features command
+            (btsocket.OGF_LE_CTL, btsocket.OCF_LE_READ_LOCAL_SUPPORTED_FEATURES):
+            '<BQ',
+            # LE Set Advertising Data command
+            (btsocket.OGF_LE_CTL, btsocket.OCF_LE_SET_ADVERTISING_DATA):
+            '<B',
+            # Read Data Packet Size
+            (btsocket.OGF_LE_CTL, OCF_LE_READ_MAXIMUM_DATA_LENGTH):
+            '<BHHHH',
+            # LE Read Number of Supported Advertising Sets command
+            (btsocket.OGF_LE_CTL, OCF_LE_READ_NUMBER_OF_SUPPORTED_ADVERTISING_SETS):
+            '<BB',
+            # LE Read Resolving List Size
+            (btsocket.OGF_LE_CTL, OCF_LE_READ_RESOLVING_LIST_SIZE):
+            '<BB',
+            # LE Read Accept List Size command
+            (btsocket.OGF_LE_CTL, btsocket.OCF_LE_READ_ACCEPT_LIST_SIZE):
+            '<BB',
+
+            ################## OGF=0X3f (OGF_VENDOR_CMD) ##################
+            # LE_Get_Vendor_Capabilities_Command
+            (btsocket.OGF_VENDOR_CMD, OCF_LE_GET_VENDOR_CAPABILITIES_COMMAND):
+            None,
+            # HCI_VS_MSFT_Intel_Read_Supported_Features
+            (btsocket.OGF_VENDOR_CMD, OCF_MSFT_INTEL_CHIPSET):
+            '<BBQBB',
+            # HCI_VS_MSFT_QCA_Read_Supported_Features
+            (btsocket.OGF_VENDOR_CMD, OCF_MSFT_QCA_CHIPSET):
+            '<BBQBB',
+            # HCI_VS_MSFT_Mediatek_Read_Supported_Features
+            (btsocket.OGF_VENDOR_CMD, OCF_MSFT_MEDIATEK_CHIPSET):
+            '<BBQBB'
+    }
+
+    @staticmethod
+    def get_parsing_format(ogf, ocf):
+        """Gets the format string to unpack the hcitool command output.
+
+        @param ogf: Opcode Group Field.
+        @param ocf: Opcode Command Field.
+
+        @return: opcode output format according to Bluetooth Core Spec v5.2.
+        """
+        return HciToolParser.FORMATS[(ogf, ocf)]
+
+    @staticmethod
+    def parse_output(output):
+        """Parse hcitool output.
+        @param output: hcitool command output.
+
+        @return: event_type, plen_value, status, event_bytearray.
+        """
+        hci_event = output.split('HCI Event:')[1].strip()
+        event_type, *_, plen_value = hci_event.split('\n')[0].split(' ')
+
+        # for example hci_event_values =XX XX XX XX XX XX XX XX XX XX ...
+        # Sometimes hci_event_values may consist of multiple lines
+        hci_event_values = hci_event.split('\n')[1:]
+        hci_event_values_as_string = ''.join([
+                v for v in hci_event_values
+        ]).strip().replace("'", '').replace(' ', '')
+        status = int(hci_event_values_as_string[6:8])
+        event_bytearray = bytearray.fromhex(hci_event_values_as_string[6:])
+        # Remove first 3 octet from count, not in 'event_bytearray'
+        plen_value = int(plen_value) - 3
+        return event_type, plen_value, status, event_bytearray
+
+    @staticmethod
+    def parse_payload(payload, ogf, ocf):
+        """Parse hcitool payload.
+
+        @param payload: hcitool event payload (as bytearray).
+        @param ogf: btsocket.OGF_... (int value).
+        @param ocf: btsocket.OCF_... (int value).
+
+        @return: parsed result of the hcitool payload based on (ogf, ocf).
+        If it cannot be parsed, returns the payload as bytes.
+        """
+        cmd_output_format = HciToolParser.get_parsing_format(ogf, ocf)
+        if cmd_output_format is None:
+            cmd_output_format = '<{}B'.format(len(payload))
+        return struct.unpack(cmd_output_format, payload)
+
+
+class SupportedFeatures:
+    """List supported features names from BT core spec 5.2."""
+    VS_MSFT_SUPPORTED_FEATURES = [
+            'RSSI Monitoring feature for BR/EDR',
+            'RSSI Monitoring feature for LE connections',
+            'RSSI Monitoring of LE advertisements',
+            'Advertising Monitoring of LE advertisements',
+            'Verifying the validity of P-192 and P-256 keys',
+            'Continuous Advertising Monitoring'
+    ]
+    SUPPORTED_FEATURES_PAGE_ZERO = [
+            '3 slot packets', '5 slot packets', 'Encryption', 'Slot offset',
+            'Timing accuracy', 'Role switch', 'Hold mode', 'Sniff mode',
+            'Previously used', 'Power control requests',
+            'Channel quality driven data rate (CQDDR)', 'SCO link',
+            'HV2 packets', 'HV3 packets', 'u-law log synchronous data',
+            'A-law log synchronous data', 'CVSD synchronous data',
+            'Paging parameter negotiation', 'Power control',
+            'Transparent synchronous data',
+            'Flow control lag (least significant bit)',
+            'Flow control lag (middle bit)',
+            'Flow control lag (most significant bit)', 'Broadcast Encryption',
+            'Reserved for future use', 'Enhanced Data Rate ACL 2 Mb/s mode',
+            'Enhanced Data Rate ACL 3 Mb/s mode', 'Enhanced inquiry scan',
+            'Interlaced inquiry scan', 'Interlaced page scan',
+            'RSSI with inquiry results', 'Extended SCO link (EV3 packets)',
+            'EV4 packets', 'EV5 packets', 'Reserved for future use',
+            'AFH capable slave', 'AFH classification slave',
+            'BR/EDR Not Supported', 'LE Supported (Controller)',
+            '3-slot Enhanced Data Rate ACL packets',
+            '5-slot Enhanced Data Rate ACL packets', 'Sniff subrating',
+            'Pause encryption', 'AFH capable master',
+            'AFH classification master', 'Enhanced Data Rate eSCO 2 Mb/s mode',
+            'Enhanced Data Rate eSCO 3 Mb/s mode',
+            '3-slot Enhanced Data Rate eSCO packets',
+            'Extended Inquiry Response',
+            'Simultaneous LE and BR/EDR to Same Device Capable (Controller)',
+            'Reserved for future use',
+            'Secure Simple Pairing (Controller Support)', 'Encapsulated PDU',
+            'Erroneous Data Reporting', 'Non-flushable Packet Boundary Flag',
+            'Reserved for future use',
+            'HCI_Link_Supervision_Timeout_Changed event',
+            'Variable Inquiry TX Power Level', 'Enhanced Power Control',
+            'Reserved for future use', 'Reserved for future use',
+            'Reserved for future use', 'Reserved for future use',
+            'Extended features'
+    ]
+
+    SUPPORTED_FEATURES_PAGE_ONE = [
+            'Secure Simple Pairing (Host Support)', 'LE Supported (Host)',
+            'Simultaneous LE and BR/EDR to Same Device Capable (Host)',
+            'Secure Connections (Host Support)'
+    ]
+
+    SUPPORTED_FEATURES_PAGE_TWO = [
+            'Connectionless Slave Broadcast – Master Operation',
+            'Connectionless Slave Broadcast – Slave Operation',
+            'Synchronization Train', 'Synchronization Scan',
+            'HCI_Inquiry_Response_Notification event',
+            'Generalized interlaced scan', 'Coarse Clock Adjustment',
+            'Reserved for future use',
+            'Secure Connections (Controller Support)', 'Ping',
+            'Slot Availability Mask', 'Train nudging'
+    ]
+
+    LE_SUPPORTED_FEATURE = [
+            'LE Encryption', 'Connection Parameters Request Procedure',
+            'Extended Reject Indication', 'Slave-initiated Features Exchange',
+            'LE Ping', 'LE Data Packet Length Extension', 'LL Privacy',
+            'Extended Scanner Filter Policies', 'LE 2M PHY',
+            'Stable Modulation Index - Transmitter',
+            'Stable Modulation Index Receiver', 'LE Coded PHY',
+            'LE Extended Advertising', 'LE Periodic Advertising',
+            'Channel Selection Algorithm #2', 'LE Power Class 1',
+            'Minimum Number of Used Channels Procedur',
+            'Connection CTE Request', 'Connection CTE Response',
+            'Connectionless CTE Transmitter', 'Connectionless CTE Receiver',
+            'Antenna Switching During CTE Transmission (AoD)',
+            'Antenna Switching During CTE Reception (AoA)',
+            'Receiving Constant Tone Extensions',
+            'Periodic Advertising Sync Transfer Sender',
+            'Periodic Advertising Sync Transfer Recipient',
+            'Sleep Clock Accuracy Updates', 'Remote Public Key Validation',
+            'Connected Isochronous Stream Master',
+            'Connected Isochronous Stream Slave', 'Isochronous Broadcaster',
+            'Synchronized Receiver', 'Isochronous Channels (Host Support)',
+            'LE Power Control Request', 'LE Power Change Indication',
+            'LE Path Loss Monitoring'
+    ]
+
+
+class SupportedCommands:
+    """List supported command from BT core spec 5.2."""
+    SUPPORTED_COMMANDS = [
+            "HCI_Inquiry", "HCI_Inquiry_Cancel", "HCI_Periodic_Inquiry_Mode",
+            "HCI_Exit_Periodic_Inquiry_Mode", "HCI_Create_Connection",
+            "HCI_Disconnect", "HCI_Add_SCO_Connection",
+            "HCI_Create_Connection_Cancel", "HCI_Accept_Connection_Request",
+            "HCI_Reject_Connection_Request", "HCI_Link_Key_Request_Reply",
+            "HCI_Link_Key_Request_Negative_Reply",
+            "HCI_PIN_Code_Request_Reply",
+            "HCI_PIN_Code_Request_Negative_Reply",
+            "HCI_Change_Connection_Packet_Type",
+            "HCI_Authentication_Requested", "HCI_Set_Connection_Encryption",
+            "HCI_Change_Connection_Link_Key", "HCI_Master_Link_Key",
+            "HCI_Remote_Name_Request", "HCI_Remote_Name_Request_Cancel",
+            "HCI_Read_Remote_Supported_Features",
+            "HCI_Read_Remote_Extended_Features",
+            "HCI_Read_Remote_Version_Information", "HCI_Read_Clock_Offset",
+            "HCI_Read_LMP_Handle", "Reserved for future use",
+            "Reserved for future use", "Reserved for future use",
+            "Reserved for future use", "Reserved for future use",
+            "Reserved for future use", "Reserved for future use",
+            "HCI_Hold_Mode", "HCI_Sniff_Mode", "HCI_Exit_Sniff_Mode",
+            "Previously used", "Previously used", "HCI_QoS_Setup",
+            "HCI_Role_Discovery", "HCI_Switch_Role",
+            "HCI_Read_Link_Policy_Settings", "HCI_Write_Link_Policy_Settings",
+            "HCI_Read_Default_Link_Policy_Settings",
+            "HCI_Write_Default_Link_Policy_Settings", "HCI_Flow_Specification",
+            "HCI_Set_Event_Mask", "HCI_Reset", "HCI_Set_Event_Filter",
+            "HCI_Flush", "HCI_Read_PIN_Type", "HCI_Write_PIN_Type",
+            "Previously used", "HCI_Read_Stored_Link_Key",
+            "HCI_Write_Stored_Link_Key", "HCI_Delete_Stored_Link_Key",
+            "HCI_Write_Local_Name", "HCI_Read_Local_Name",
+            "HCI_Read_Connection_Accept_Timeout",
+            "HCI_Write_Connection_Accept_Timeout", "HCI_Read_Page_Timeout",
+            "HCI_Write_Page_Timeout", "HCI_Read_Scan_Enable",
+            "HCI_Write_Scan_Enable", "HCI_Read_Page_Scan_Activity",
+            "HCI_Write_Page_Scan_Activity", "HCI_Read_Inquiry_Scan_Activity",
+            "HCI_Write_Inquiry_Scan_Activity",
+            "HCI_Read_Authentication_Enable",
+            "HCI_Write_Authentication_Enable", "HCI_Read_Encryption_Mode",
+            "HCI_Write_Encryption_Mode", "HCI_Read_Class_Of_Device",
+            "HCI_Write_Class_Of_Device", "HCI_Read_Voice_Setting",
+            "HCI_Write_Voice_Setting", "HCI_Read_Automatic_Flush_Timeout",
+            "HCI_Write_Automatic_Flush_Timeout",
+            "HCI_Read_Num_Broadcast_Retransmissions",
+            "HCI_Write_Num_Broadcast_Retransmissions",
+            "HCI_Read_Hold_Mode_Activity", "HCI_Write_Hold_Mode_Activity",
+            "HCI_Read_Transmit_Power_Level",
+            "HCI_Read_Synchronous_Flow_Control_Enable",
+            "HCI_Write_Synchronous_Flow_Control_Enable",
+            "HCI_Set_Controller_To_Host_Flow_Control", "HCI_Host_Buffer_Size",
+            "HCI_Host_Number_Of_Completed_Packets",
+            "HCI_Read_Link_Supervision_Timeout",
+            "HCI_Write_Link_Supervision_Timeout",
+            "HCI_Read_Number_Of_Supported_IAC", "HCI_Read_Current_IAC_LAP",
+            "HCI_Write_Current_IAC_LAP", "HCI_Read_Page_Scan_Mode_Period",
+            "HCI_Write_Page_Scan_Mode_Period", "HCI_Read_Page_Scan_Mode",
+            "HCI_Write_Page_Scan_Mode",
+            "HCI_Set_AFH_Host_Channel_Classification",
+            "Reserved for future use", "Reserved for future use",
+            "HCI_Read_Inquiry_Scan_Type", "HCI_Write_Inquiry_Scan_Type",
+            "HCI_Read_Inquiry_Mode", "HCI_Write_Inquiry_Mode",
+            "HCI_Read_Page_Scan_Type", "HCI_Write_Page_Scan_Type",
+            "HCI_Read_AFH_Channel_Assessment_Mode",
+            "HCI_Write_AFH_Channel_Assessment_Mode", "Reserved for future use",
+            "Reserved for future use", "Reserved for future use",
+            "Reserved for future use", "Reserved for future use",
+            "Reserved for future use", "Reserved for future use",
+            "HCI_Read_Local_Version_Information", "Reserved for future use",
+            "HCI_Read_Local_Supported_Features",
+            "HCI_Read_Local_Extended_Features", "HCI_Read_Buffer_Size",
+            "HCI_Read_Country_Code", "HCI_Read_BD_ADDR",
+            "HCI_Read_Failed_Contact_Counter",
+            "HCI_Reset_Failed_Contact_Counter", "HCI_Read_Link_Quality",
+            "HCI_Read_RSSI", "HCI_Read_AFH_Channel_Map", "HCI_Read_Clock",
+            "HCI_Read_Loopback_Mode", "HCI_Write_Loopback_Mode",
+            "HCI_Enable_Device_Under_Test_Mode",
+            "HCI_Setup_Synchronous_Connection_Request",
+            "HCI_Accept_Synchronous_Connection_Request",
+            "HCI_Reject_Synchronous_Connection_Request",
+            "Reserved for future use", "Reserved for future use",
+            "HCI_Read_Extended_Inquiry_Response",
+            "HCI_Write_Extended_Inquiry_Response",
+            "HCI_Refresh_Encryption_Key", "Reserved for future use",
+            "HCI_Sniff_Subrating", "HCI_Read_Simple_Pairing_Mode",
+            "HCI_Write_Simple_Pairing_Mode", "HCI_Read_Local_OOB_Data",
+            "HCI_Read_Inquiry_Response_Transmit_Power_Level",
+            "HCI_Write_Inquiry_Transmit_Power_Level",
+            "HCI_Read_Default_Erroneous_Data_Reporting",
+            "HCI_Write_Default_Erroneous_Data_Reporting",
+            "Reserved for future use", "Reserved for future use",
+            "Reserved for future use", "HCI_IO_Capability_Request_Reply",
+            "HCI_User_Confirmation_Request_Reply",
+            "HCI_User_Confirmation_Request_Negative_Reply",
+            "HCI_User_Passkey_Request_Reply",
+            "HCI_User_Passkey_Request_Negative_Reply",
+            "HCI_Remote_OOB_Data_Request_Reply",
+            "HCI_Write_Simple_Pairing_Debug_Mode", "HCI_Enhanced_Flush",
+            "HCI_Remote_OOB_Data_Request_Negative_Reply",
+            "Reserved for future use", "Reserved for future use",
+            "HCI_Send_Keypress_Notification",
+            "HCI_IO_Capability_Request_Negative_Reply",
+            "HCI_Read_Encryption_Key_Size", "Reserved for future use",
+            "Reserved for future use", "Reserved for future use",
+            "HCI_Create_Physical_Link", "HCI_Accept_Physical_Link",
+            "HCI_Disconnect_Physical_Link", "HCI_Create_Logical_Link",
+            "HCI_Accept_Logical_Link", "HCI_Disconnect_Logical_Link",
+            "HCI_Logical_Link_Cancel", "HCI_Flow_Spec_Modify",
+            "HCI_Read_Logical_Link_Accept_Timeout",
+            "HCI_Write_Logical_Link_Accept_Timeout",
+            "HCI_Set_Event_Mask_Page_2", "HCI_Read_Location_Data",
+            "HCI_Write_Location_Data", "HCI_Read_Local_AMP_Info",
+            "HCI_Read_Local_AMP_ASSOC", "HCI_Write_Remote_AMP_ASSOC",
+            "HCI_Read_Flow_Control_Mode", "HCI_Write_Flow_Control_Mode",
+            "HCI_Read_Data_Block_Size", "Reserved for future use",
+            "Reserved for future use", "HCI_Enable_AMP_Receiver_Reports",
+            "HCI_AMP_Test_End", "HCI_AMP_Test",
+            "HCI_Read_Enhanced_Transmit_Power_Level",
+            "Reserved for future use", "HCI_Read_Best_Effort_Flush_Timeout",
+            "HCI_Write_Best_Effort_Flush_Timeout", "HCI_Short_Range_Mode",
+            "HCI_Read_LE_Host_Support", "HCI_Write_LE_Host_Support",
+            "Reserved for future use", "HCI_LE_Set_Event_Mask",
+            "HCI_LE_Read_Buffer_Size [v1]",
+            "HCI_LE_Read_Local_Supported_Features", "Reserved for future use",
+            "HCI_LE_Set_Random_Address", "HCI_LE_Set_Advertising_Parameters",
+            "HCI_LE_Read_Advertising_Physical_Channel_Tx_Power",
+            "HCI_LE_Set_Advertising_Data", "HCI_LE_Set_Scan_Response_Data",
+            "HCI_LE_Set_Advertising_Enable", "HCI_LE_Set_Scan_Parameters",
+            "HCI_LE_Set_Scan_Enable", "HCI_LE_Create_Connection",
+            "HCI_LE_Create_Connection_Cancel", "HCI_LE_Read_White_List_Size",
+            "HCI_LE_Clear_White_List", "HCI_LE_Add_Device_To_White_List",
+            "HCI_LE_Remove_Device_From_White_List", "HCI_LE_Connection_Update",
+            "HCI_LE_Set_Host_Channel_Classification",
+            "HCI_LE_Read_Channel_Map", "HCI_LE_Read_Remote_Features",
+            "HCI_LE_Encrypt", "HCI_LE_Rand", "HCI_LE_Enable_Encryption",
+            "HCI_LE_Long_Term_Key_Request_Reply",
+            "HCI_LE_Long_Term_Key_Request_Negative_Reply",
+            "HCI_LE_Read_Supported_States", "HCI_LE_Receiver_Test [v1]",
+            "HCI_LE_Transmitter_Test [v1]", "HCI_LE_Test_End",
+            "Reserved for future use", "Reserved for future use",
+            "Reserved for future use", "Reserved for future use",
+            "HCI_Enhanced_Setup_Synchronous_Connection",
+            "HCI_Enhanced_Accept_Synchronous_Connection",
+            "HCI_Read_Local_Supported_Codecs",
+            "HCI_Set_MWS_Channel_Parameters",
+            "HCI_Set_External_Frame_Configuration", "HCI_Set_MWS_Signaling",
+            "HCI_Set_MWS_Transport_Layer", "HCI_Set_MWS_Scan_Frequency_Table",
+            "HCI_Get_MWS_Transport_Layer_Configuration",
+            "HCI_Set_MWS_PATTERN_Configuration",
+            "HCI_Set_Triggered_Clock_Capture", "HCI_Truncated_Page",
+            "HCI_Truncated_Page_Cancel",
+            "HCI_Set_Connectionless_Slave_Broadcast",
+            "HCI_Set_Connectionless_Slave_Broadcast_Receive",
+            "HCI_Start_Synchronization_Train",
+            "HCI_Receive_Synchronization_Train", "HCI_Set_Reserved_LT_ADDR",
+            "HCI_Delete_Reserved_LT_ADDR",
+            "HCI_Set_Connectionless_Slave_Broadcast_Data",
+            "HCI_Read_Synchronization_Train_Parameters",
+            "HCI_Write_Synchronization_Train_Parameters",
+            "HCI_Remote_OOB_Extended_Data_Request_Reply",
+            "HCI_Read_Secure_Connections_Host_Support",
+            "HCI_Write_Secure_Connections_Host_Support",
+            "HCI_Read_Authenticated_Payload_Timeout",
+            "HCI_Write_Authenticated_Payload_Timeout",
+            "HCI_Read_Local_OOB_Extended_Data",
+            "HCI_Write_Secure_Connections_Test_Mode",
+            "HCI_Read_Extended_Page_Timeout",
+            "HCI_Write_Extended_Page_Timeout",
+            "HCI_Read_Extended_Inquiry_Length",
+            "HCI_Write_Extended_Inquiry_Length",
+            "HCI_LE_Remote_Connection_Parameter_Request_Reply",
+            "HCI_LE_Remote_Connection_Parameter_Request_Negative_Reply",
+            "HCI_LE_Set_Data_Length",
+            "HCI_LE_Read_Suggested_Default_Data_Length",
+            "HCI_LE_Write_Suggested_Default_Data_Length",
+            "HCI_LE_Read_Local_P-256_Public_Key", "HCI_LE_Generate_DHKey [v1]",
+            "HCI_LE_Add_Device_To_Resolving_List",
+            "HCI_LE_Remove_Device_From_Resolving_List",
+            "HCI_LE_Clear_Resolving_List", "HCI_LE_Read_Resolving_List_Size",
+            "HCI_LE_Read_Peer_Resolvable_Address",
+            "HCI_LE_Read_Local_Resolvable_Address",
+            "HCI_LE_Set_Address_Resolution_Enable",
+            "HCI_LE_Set_Resolvable_Private_Address_Timeout",
+            "HCI_LE_Read_Maximum_Data_Length", "HCI_LE_Read_PHY",
+            "HCI_LE_Set_Default_PHY", "HCI_LE_Set_PHY",
+            "HCI_LE_Receiver_Test [v2]", "HCI_LE_Transmitter_Test [v2]",
+            "HCI_LE_Set_Advertising_Set_Random_Address",
+            "HCI_LE_Set_Extended_Advertising_Parameters",
+            "HCI_LE_Set_Extended_Advertising_Data",
+            "HCI_LE_Set_Extended_Scan_Response_Data",
+            "HCI_LE_Set_Extended_Advertising_Enable",
+            "HCI_LE_Read_Maximum_Advertising_Data_Length",
+            "HCI_LE_Read_Number_of_Supported_Advertising_Sets",
+            "HCI_LE_Remove_Advertising_Set", "HCI_LE_Clear_Advertising_Sets",
+            "HCI_LE_Set_Periodic_Advertising_Parameters",
+            "HCI_LE_Set_Periodic_Advertising_Data",
+            "HCI_LE_Set_Periodic_Advertising_Enable",
+            "HCI_LE_Set_Extended_Scan_Parameters",
+            "HCI_LE_Set_Extended_Scan_Enable",
+            "HCI_LE_Extended_Create_Connection",
+            "HCI_LE_Periodic_Advertising_Create_Sync",
+            "HCI_LE_Periodic_Advertising_Create_Sync_Cancel",
+            "HCI_LE_Periodic_Advertising_Terminate_Sync",
+            "HCI_LE_Add_Device_To_Periodic_Advertiser_List",
+            "HCI_LE_Remove_Device_From_Periodic_Advertiser_List",
+            "HCI_LE_Clear_Periodic_Advertiser_List",
+            "HCI_LE_Read_Periodic_Advertiser_List_Size",
+            "HCI_LE_Read_Transmit_Power", "HCI_LE_Read_RF_Path_Compensation",
+            "HCI_LE_Write_RF_Path_Compensation", "HCI_LE_Set_Privacy_Mode",
+            "HCI_LE_Receiver_Test [v3]", "HCI_LE_Transmitter_Test [v3]",
+            "HCI_LE_Set_Connectionless_CTE_Transmit_Parameters",
+            "HCI_LE_Set_Connectionless_CTE_Transmit_Enable",
+            "HCI_LE_Set_Connectionless_IQ_Sampling_Enable",
+            "HCI_LE_Set_Connection_CTE_Receive_Parameters",
+            "HCI_LE_Set_Connection_CTE_Transmit_Parameters",
+            "HCI_LE_Connection_CTE_Request_Enable",
+            "HCI_LE_Connection_CTE_Response_Enable",
+            "HCI_LE_Read_Antenna_Information",
+            "HCI_LE_Set_Periodic_Advertising_Receive_Enable",
+            "HCI_LE_Periodic_Advertising_Sync_Transfer",
+            "HCI_LE_Periodic_Advertising_Set_Info_Transfer",
+            "HCI_LE_Set_Periodic_Advertising_Sync_Transfer_Parameters",
+            "HCI_LE_Set_Default_Periodic_Advertising_Sync_Transfer_Parameters",
+            "HCI_LE_Generate_DHKey [v2]",
+            "HCI_Read_Local_Simple_Pairing_Options",
+            "HCI_LE_Modify_Sleep_Clock_Accuracy",
+            "HCI_LE_Read_Buffer_Size [v2]", "HCI_LE_Read_ISO_TX_Sync",
+            "HCI_LE_Set_CIG_Parameters", "HCI_LE_Set_CIG_Parameters_Test",
+            "HCI_LE_Create_CIS", "HCI_LE_Remove_CIG",
+            "HCI_LE_Accept_CIS_Request", "HCI_LE_Reject_CIS_Request",
+            "HCI_LE_Create_BIG", "HCI_LE_Create_BIG_Test",
+            "HCI_LE_Terminate_BIG", "HCI_LE_BIG_Create_Sync",
+            "HCI_LE_BIG_Terminate_Sync", "HCI_LE_Request_Peer_SCA",
+            "HCI_LE_Setup_ISO_Data_Path", "HCI_LE_Remove_ISO_Data_Path",
+            "HCI_LE_ISO_Transmit_Test", "HCI_LE_ISO_Receive_Test",
+            "HCI_LE_ISO_Read_Test_Counters", "HCI_LE_ISO_Test_End",
+            "HCI_LE_Set_Host_Feature", "HCI_LE_Read_ISO_Link_Quality",
+            "HCI_LE_Enhanced_Read_Transmit_Power_Level",
+            "HCI_LE_Read_Remote_Transmit_Power_Level",
+            "HCI_LE_Set_Path_Loss_Reporting_Parameters",
+            "HCI_LE_Set_Path_Loss_Reporting_Enable",
+            "HCI_LE_Set_Transmit_Power_Reporting_Enable",
+            "HCI_LE_Transmitter_Test [v4]", "HCI_Set_Ecosystem_Base_Interval",
+            "HCI_Read_Local_Supported_Codecs [v2]",
+            "HCI_Read_Local_Supported_Codec_Capabilities",
+            "HCI_Read_Local_Supported_Controller_Delay",
+            "HCI_Configure_Data_Path", "Reserved for future use",
+            "Reserved for future use"
+    ]
+    DEPRECATED_COMMANDS = [
+            "HCI_Add_SCO_Connection", "HCI_Read_Encryption_Mode",
+            "HCI_Write_Encryption_Mode", "HCI_Read_Page_Scan_Mode_Period",
+            "HCI_Write_Page_Scan_Mode_Period", "HCI_Read_Page_Scan_Mode",
+            "HCI_Write_Page_Scan_Mode", "HCI_Read_Country_Code"
+    ]
diff --git a/client/cros/bluetooth/hcitool_unittest.py b/client/cros/bluetooth/hcitool_unittest.py
new file mode 100644
index 0000000..e3c4cd5
--- /dev/null
+++ b/client/cros/bluetooth/hcitool_unittest.py
@@ -0,0 +1,28 @@
+# Lint as: python3
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+from autotest_lib.client.cros.bluetooth.hcitool import HciToolParser
+
+
+class HciToolParserTest(unittest.TestCase):
+    """Unit test for class HciToolParser."""
+
+    def test_parse_output(self):
+        VALID_OUTPUT = ('< HCI Command: ogf 0x04, ocf 0x0003, plen 0\n'
+                        '> HCI Event: 0x0e plen 12\n'
+                        '  01 03 10 00 BF FE 0F FE DB FF 7B 87')
+
+        VALID_EVENT_TYPE = '0x0e'
+        VALID_PLEN_VALUE = 9
+        VALID_PASS_STATUS_CODE = 0
+        VALID_PAYLOAD = bytearray.fromhex('00 BF FE 0F FE DB FF 7B 87')
+
+        parser_output = HciToolParser.parse_output(VALID_OUTPUT)
+        event_type, plen_value, status, payload = parser_output
+        self.assertEqual(event_type, VALID_EVENT_TYPE)
+        self.assertEqual(plen_value, VALID_PLEN_VALUE)
+        self.assertEqual(status, VALID_PASS_STATUS_CODE)
+        self.assertEqual(payload, VALID_PAYLOAD)
diff --git a/client/cros/bluetooth/logger_helper.py b/client/cros/bluetooth/logger_helper.py
new file mode 100644
index 0000000..994d495
--- /dev/null
+++ b/client/cros/bluetooth/logger_helper.py
@@ -0,0 +1,254 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Helper class to manage bluetooth logs"""
+
+from datetime import datetime
+import logging
+import os
+import re
+import subprocess
+import time
+
+
+SYSLOG_PATH = '/var/log/messages'
+
+
+class LogManager(object):
+    """The LogManager class helps to collect logs without a listening thread"""
+
+    DEFAULT_ENCODING = 'utf-8'
+
+    class LoggingException(Exception):
+        """A stub exception class for LogManager class."""
+        pass
+
+    def __init__(self, log_path=SYSLOG_PATH, raise_missing=False):
+        """Initialize log manager object
+
+        @param log_path: string path to log file to manage
+        @param raise_missing: raise an exception if the log file is missing
+
+        @raises: LogManager.LoggingException on non-existent log file
+        """
+        if not os.path.isfile(log_path):
+            msg = 'Requested log file {} does not exist'.format(log_path)
+            if raise_missing:
+                raise LogManager.LoggingException(msg)
+            else:
+                self._LogErrorToSyslog(msg)
+
+        self.log_path = log_path
+
+        self.ResetLogMarker()
+        self._bin_log_contents = []
+
+    def _LogErrorToSyslog(self, message):
+        """Create a new syslog file and add a message to syslog."""
+        subprocess.call(['reload', 'syslog'])
+        subprocess.call(['logger', message])
+
+    def _GetSize(self):
+        """Get the size of the log"""
+        try:
+            return os.path.getsize(self.log_path)
+        except Exception as e:
+            logging.error('Failed to get log size: {}'.format(e))
+            return 0
+
+    def ResetLogMarker(self, now_size=None):
+        """Reset the start-of-log marker for later comparison"""
+        if now_size is None:
+            now_size = self._GetSize()
+        self.initial_log_size = now_size
+
+    def StartRecording(self):
+        """Mark initial log size for later comparison"""
+
+        self._bin_log_contents = []
+
+    def StopRecording(self):
+        """Gather the logs since StartRecording was called
+
+        @raises: LogManager.LoggingException if:
+                - Log file disappeared since StartRecording was called
+                - Log file is smaller than when logging began
+                - StartRecording was never called
+        """
+        initial_size = self.initial_log_size
+        now_size = self._GetSize()
+
+        if not os.path.isfile(self.log_path):
+            msg = 'File {} disappeared unexpectedly'.format(self.log_path)
+            raise LogManager.LoggingException(msg)
+
+        if now_size < initial_size:
+            msg = 'Log became smaller unexpectedly'
+            raise LogManager.LoggingException(msg)
+
+        with open(self.log_path, 'rb') as mf:
+            # Skip to the point where we started recording
+            mf.seek(self.initial_log_size)
+
+            readsize = now_size - self.initial_log_size
+            self._bin_log_contents = mf.read(readsize).split(b'\n')
+
+        # Re-set start of log marker
+        self.ResetLogMarker(now_size)
+
+    def LogContains(self, search_str):
+        """Performs simple string checking on each line from the collected log
+
+        @param search_str: string to be located within log contents. This arg
+                is expected to not span between lines in the logs
+
+        @returns: True if search_str was located in the collected log contents,
+                False otherwise
+        """
+        pattern = re.compile(search_str.encode(self.DEFAULT_ENCODING))
+        for line in self._bin_log_contents:
+            if pattern.search(line):
+                return True
+
+        return False
+
+    def FilterOut(self, rm_reg_exp):
+        """Remove lines with specified pattern from the log file
+
+        @param rm_reg_exp: regular expression of the lines to be removed
+        """
+        # If log_path doesn't exist, there's nothing to do
+        if not os.path.isfile(self.log_path):
+            return
+
+        rm_line_cnt = 0
+        initial_size = self._GetSize()
+        rm_pattern = re.compile(rm_reg_exp.encode(self.DEFAULT_ENCODING))
+
+        with open(self.log_path, 'rb+') as mf:
+            lines = mf.readlines()
+            mf.seek(0)
+            for line in lines:
+                if rm_pattern.search(line):
+                    rm_line_cnt += 1
+                else:
+                    mf.write(line)
+            mf.truncate()
+
+        # Some tracebacks point out here causing /var/log/messages missing but
+        # we don't have many clues. Adding a check and logs here.
+        if not os.path.isfile(self.log_path):
+            msg = '{} does not exist after FilterOut'.format(self.log_path)
+            logging.warning(msg)
+            self._LogErrorToSyslog(msg)
+
+        new_size = self._GetSize()
+        rm_byte = initial_size - new_size
+        logging.info('Removed number of line: %d, Reduced log size: %d byte',
+                     rm_line_cnt, rm_byte)
+
+        # Note the new size of the log
+        self.ResetLogMarker(new_size)
+
+
+class InterleaveLogger(LogManager):
+    """LogManager class that focus on interleave scan"""
+
+    # Example bluetooth kernel log:
+    # "2020-11-23T07:52:31.395941Z DEBUG kernel: [ 6469.811135] Bluetooth: "
+    # "cancel_interleave_scan() hci0: cancelling interleave scan"
+    KERNEL_LOG_PATTERN = ('([^ ]+) DEBUG kernel: \[.*\] Bluetooth: '
+                          '{FUNCTION}\(\) hci0: {LOG_STR}')
+    STATE_PATTERN = KERNEL_LOG_PATTERN.format(
+            FUNCTION='hci_req_add_le_interleaved_scan',
+            LOG_STR='next state: (.+)')
+    CANCEL_PATTERN = KERNEL_LOG_PATTERN.format(
+            FUNCTION='cancel_interleave_scan',
+            LOG_STR='cancelling interleave scan')
+    SYSTIME_LENGTH = len('2020-12-18T00:11:22.345678')
+
+    def __init__(self):
+        """ Initialize object
+        """
+        self.reset()
+        self.state_pattern = re.compile(
+                self.STATE_PATTERN.encode(self.DEFAULT_ENCODING))
+        self.cancel_pattern = re.compile(
+                self.CANCEL_PATTERN.encode(self.DEFAULT_ENCODING))
+        super(InterleaveLogger, self).__init__()
+
+    def reset(self):
+        """ Clear data between each log collection attempt
+        """
+        self.records = []
+        self.cancel_events = []
+
+    def StartRecording(self):
+        """ Reset the previous data and start recording.
+        """
+        self.reset()
+        super(InterleaveLogger, self).ResetLogMarker()
+        super(InterleaveLogger, self).StartRecording()
+
+    def StopRecording(self):
+        """ Stop recording and parse logs
+            The following data will be set after this call
+
+            - self.records: a dictionary where each item is a record of
+                            interleave |state| and the |time| the state starts.
+                            |state| could be {'no filter', 'allowlist'}
+                            |time| is system time in sec
+
+            - self.cancel_events: a list of |time| when a interleave cancel
+                                  event log was found
+                                  |time| is system time in sec
+
+            @returns: True if StopRecording success, False otherwise
+
+        """
+        try:
+            super(InterleaveLogger, self).StopRecording()
+        except Exception as e:
+            logging.error(e)
+            return False
+
+        success = True
+
+        def sys_time_to_timestamp(time_str):
+            """ Return timestamp of time_str """
+
+            # This is to remove the suffix of time string, in some cases the
+            # time string ends with an extra 'Z', in other cases, the string
+            # ends with time zone (ex. '+08:00')
+            time_str = time_str[:self.SYSTIME_LENGTH]
+
+            try:
+                dt = datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S.%f")
+            except Exception as e:
+                logging.error(e)
+                success = False
+                return 0
+
+            return time.mktime(dt.timetuple()) + dt.microsecond * (10**-6)
+
+        for line in self._bin_log_contents:
+            line = line.strip().replace(b'\\r\\n', b'')
+            state_pattern = self.state_pattern.search(line)
+            cancel_pattern = self.cancel_pattern.search(line)
+
+            if cancel_pattern:
+                time_str = cancel_pattern.groups()[0].decode(
+                        self.DEFAULT_ENCODING)
+                time_sec = sys_time_to_timestamp(time_str)
+                self.cancel_events.append(time_sec)
+
+            if state_pattern:
+                time_str, state = [
+                        x.decode(self.DEFAULT_ENCODING)
+                        for x in state_pattern.groups()
+                ]
+                time_sec = sys_time_to_timestamp(time_str)
+                self.records.append({'time': time_sec, 'state': state})
+
+        return success
diff --git a/client/cros/bluetooth/output_recorder.py b/client/cros/bluetooth/output_recorder.py
index e41f8b4..09b32a7 100644
--- a/client/cros/bluetooth/output_recorder.py
+++ b/client/cros/bluetooth/output_recorder.py
@@ -15,6 +15,7 @@
 import pty
 import re
 import subprocess
+import sys
 import threading
 import time
 
@@ -24,12 +25,21 @@
     pass
 
 
+def _may_append_encoding_kwargs(kwargs):
+    """Appends encoding kwarg if it is run in Python 3+.
+
+    @param kwargs: dict of kwargs.
+    """
+    if sys.version_info.major > 2:
+        kwargs['encoding'] = 'utf-8'
+
+
 class OutputRecorder(object):
     """A class used to record the output of command line program.
 
     A thread is dedicated to performing non-blocking reading of the
     command outpt in this class. Other possible approaches include
-    1. using gobject.io_add_watch() to register a callback and
+    1. using GObject.io_add_watch() to register a callback and
        reading the output when available, or
     2. using select.select() with a short timeout, and reading
        the output if available.
@@ -58,13 +68,14 @@
         """Construction of output recorder.
 
         @param cmd: the command of which the output is to record.
+                This may be a list or a string.
         @param open_mode: the open mode for writing output to save_file.
                 Could be either 'w' or 'a'.
         @param stop_delay_secs: the delay time before stopping the cmd.
         @param save_file: the file to save the output.
 
         """
-        self.cmd = cmd
+        self.cmd = [cmd] if isinstance(cmd, str) else cmd
         self.open_mode = open_mode
         self.start_delay_secs = start_delay_secs
         self.stop_delay_secs = stop_delay_secs
@@ -77,7 +88,9 @@
 
         # Use pseudo terminal to prevent buffering of the program output.
         self._main, self._node = pty.openpty()
-        self._output = os.fdopen(self._main)
+        fdopen_kwargs = {}
+        _may_append_encoding_kwargs(fdopen_kwargs)
+        self._output = os.fdopen(self._main, **fdopen_kwargs)
 
         # Set non-blocking flag.
         fcntl.fcntl(self._output, fcntl.F_SETFL, os.O_NONBLOCK)
@@ -85,14 +98,20 @@
 
     def record(self):
         """Record the output of the cmd."""
-        logging.info('Recording output of "%s".', self.cmd)
+        logging.info('Recording output of "%s".', ' '.join(self.cmd))
         try:
-            self._recorder = subprocess.Popen(
-                    self.cmd, stdout=self._node, stderr=self._node)
+            popen_kwargs = {'stdout': self._node, 'stderr': self._node}
+            _may_append_encoding_kwargs(popen_kwargs)
+            self._recorder = subprocess.Popen(self.cmd, **popen_kwargs)
         except:
-            raise OutputRecorderError('Failed to run "%s"' % self.cmd)
+            raise OutputRecorderError('Failed to run "%s"' %
+                                      ' '.join(self.cmd))
 
-        with open(self.save_file, self.open_mode) as output_f:
+        ansi_escape_re = re.compile(r'\x1b\[[^m]*m')
+
+        open_kwargs = {}
+        _may_append_encoding_kwargs(open_kwargs)
+        with open(self.save_file, self.open_mode, **open_kwargs) as output_f:
             output_f.write(os.linesep + '*' * 80 + os.linesep)
             while True:
                 try:
@@ -103,13 +122,11 @@
                     line = ''
 
                 if line:
+                    # Remove ANSI escape sequence so that XML converter can work.
+                    line = ansi_escape_re.sub('', line)
                     output_f.write(line)
                     output_f.flush()
-                    # The output, e.g. the output of btmon, may contain some
-                    # special unicode such that we would like to escape.
-                    # In this way, regular expression search could be conducted
-                    # properly.
-                    self.contents.append(line.encode('unicode-escape'))
+                    self.contents.append(line)
                 elif self._stop_recording_thread_event.is_set():
                     self._stop_recording_thread_event.clear()
                     break
@@ -198,7 +215,7 @@
 
 if __name__ == '__main__':
     # A demo using btmon tool to monitor bluetoohd activity.
-    cmd = 'btmon'
+    cmd = ['btmon', '-c', 'never']
     recorder = OutputRecorder(cmd)
 
     if True:
diff --git a/client/cros/cellular/base_station_8960.py b/client/cros/cellular/base_station_8960.py
index c111975..d3b4226 100644
--- a/client/cros/cellular/base_station_8960.py
+++ b/client/cros/cellular/base_station_8960.py
@@ -1,11 +1,21 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import logging
 import re
+import six
 import time
+
+from six.moves import zip
+
 import common
+
 from autotest_lib.client.cros.cellular import cellular_logging
 from autotest_lib.client.cros.cellular import cellular_system_error
 from autotest_lib.client.cros.cellular import air_state_verifier
@@ -13,6 +23,7 @@
 from autotest_lib.client.cros.cellular import cellular
 from autotest_lib.client.bin import utils
 
+
 POLL_SLEEP = 0.2
 
 log = cellular_logging.SetupCellularLogging('base_station_8960')
@@ -70,8 +81,8 @@
             result_text = self.c.Query('CALL:COUNT:DTMonitor:%s:DRATe?' %
                                        counter)
             result = [float(x) for x in result_text.rstrip().split(',')]
-            output[counter] = dict(zip(['Mean', 'Current', 'Max', 'Total'],
-                                       result))
+            output[counter] = dict(list(zip(['Mean', 'Current', 'Max', 'Total'],
+                                       result)))
         logging.info('Data counters: %s', output)
         return output
 
@@ -313,7 +324,7 @@
     # Put each value in "" marks to quote it for GPIB
     TECHNOLOGY_TO_FORMAT = dict([
         (x, '"%s"' % y) for
-        x, y in TECHNOLOGY_TO_FORMAT_RAW.iteritems()])
+        x, y in six.iteritems(TECHNOLOGY_TO_FORMAT_RAW)])
 
     TECHNOLOGY_TO_CONFIG_STANZA = {
         cellular.Technology.CDMA_2000: ConfigStanzas.CDMA_2000_MAX,
diff --git a/client/cros/cellular/base_station_pxt.py b/client/cros/cellular/base_station_pxt.py
index 6e97013..9f37755 100644
--- a/client/cros/cellular/base_station_pxt.py
+++ b/client/cros/cellular/base_station_pxt.py
@@ -1,11 +1,17 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import re
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import six
 import time
 
 import common
+
 from autotest_lib.client.bin import utils
 from autotest_lib.client.cros.cellular import air_state_verifier
 from autotest_lib.client.cros.cellular import base_station_interface
@@ -297,7 +303,7 @@
     # Put each value in "" marks to quote it for GPIB
     TECHNOLOGY_TO_FORMAT = dict([
         (x, '"%s"' % y) for
-        x, y in TECHNOLOGY_TO_FORMAT_RAW.iteritems()])
+        x, y in six.iteritems(TECHNOLOGY_TO_FORMAT_RAW)])
 
     TECHNOLOGY_TO_CONFIG_STANZA = {
         cellular.Technology.CDMA_2000: ConfigStanzas.CDMA_2000_MAX,
diff --git a/client/cros/cellular/base_station_pxt_test_noautorun.py b/client/cros/cellular/base_station_pxt_test_noautorun.py
index ad20459..6761402 100644
--- a/client/cros/cellular/base_station_pxt_test_noautorun.py
+++ b/client/cros/cellular/base_station_pxt_test_noautorun.py
@@ -1,16 +1,22 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 
-import scpi
-import cellular_logging
-import unittest
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
 
+from autotest_lib.client.cros.cellular import scpi
+from autotest_lib.client.cros.cellular import cellular_logging
+
+import unittest
 import common
+
 from autotest_lib.client.cros.cellular import labconfig
-import base_station_pxt
-import prologix_scpi_driver
+from autotest_lib.client.cros.cellular import base_station_pxt
+from autotest_lib.client.cros.cellular import prologix_scpi_driver
 
 log = cellular_logging.SetupCellularLogging('base_station_pxt_test')
 
@@ -46,7 +52,7 @@
         """Test this function on the PXT class"""
         self._call_box_init()
         self.call_box.SetTechnology('Technology:LTE')
-        print self.call_box.GetRatUeDataStatus()
+        print(self.call_box.GetRatUeDataStatus())
         self._call_box_close()
 
 
diff --git a/client/cros/cellular/cellular_logging.py b/client/cros/cellular/cellular_logging.py
index da5c386..1ac7ae1 100644
--- a/client/cros/cellular/cellular_logging.py
+++ b/client/cros/cellular/cellular_logging.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/cros/cellular/emulator_config.py b/client/cros/cellular/emulator_config.py
index 44b8679..9669cd0 100644
--- a/client/cros/cellular/emulator_config.py
+++ b/client/cros/cellular/emulator_config.py
@@ -61,7 +61,7 @@
         tp = cfg['type']
         if call_box_name_part in tp:
             bs_config = cfg
-            log.info('Using this call box: %s ' % cfg)
+            log.info('Using this call box: %s ', cfg)
             break
     if bs_config is None:
         raise Error(
@@ -76,8 +76,7 @@
     rf_switch = _CreateRfSwitch(config)
     if rf_switch:
         port = config.get_rf_switch_port()
-        log.info(
-            'Changing switch port from %s to %s' % (rf_switch.Query(), port))
+        log.info('Changing switch port from %s to %s', rf_switch.Query(), port)
         rf_switch.SelectPort(port)
 
     with bs.checker_context:
diff --git a/client/cros/cellular/ether_io_rf_switch.py b/client/cros/cellular/ether_io_rf_switch.py
index a674ee1..d23847f 100755
--- a/client/cros/cellular/ether_io_rf_switch.py
+++ b/client/cros/cellular/ether_io_rf_switch.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+# Lint as: python2, python3
 # Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -12,12 +12,19 @@
 to the module
 """
 
-import cellular_logging
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import collections
 import socket
 import struct
 import sys
 
+from six.moves import range
+
+from autotest_lib.client.cros.cellular import cellular_logging
+
 log = cellular_logging.SetupCellularLogging('ether_io_rf_switch')
 
 
@@ -39,7 +46,7 @@
 
     def SendOperation(self, opcode, list_bytes):
         """Sends the specified opcode with [list_bytes] as an argument."""
-        payload = opcode + struct.pack(('=%dB' % len(list_bytes)), *list_bytes)
+        payload = opcode + struct.pack(('=%dB' % len(list_bytes)), *list_bytes).decode('utf-8')
         self.SendPayload(payload)
         return payload
 
@@ -48,7 +55,7 @@
         then reads to make sure command was executed."""
         if read_opcode is None:
             read_opcode = write_opcode.lower()
-        for _ in xrange(3):
+        for _ in range(3):
             write_sent = self.SendOperation(write_opcode, list_bytes)
             self.SendOperation(read_opcode, list_bytes)
             try:
@@ -83,8 +90,8 @@
         decode = [0xe, 0xd, 0xb, 0x7]
 
         self.port_mapping = []
-        for upper in xrange(3):
-            for lower in xrange(4):
+        for upper in range(3):
+            for lower in range(4):
                 self.port_mapping.append(decode[upper] << 4 | decode[lower])
 
     def SelectPort(self, n):
@@ -120,16 +127,16 @@
     def Query(switch, unused_remaining_args):
         (raw_status, port, direction) = switch.Query()
         if direction != 0x00:
-            print 'Warning: Direction register is %x, should be 0x00' % \
-                  direction
+            print('Warning: Direction register is %x, should be 0x00' % \
+                  direction)
         if port is None:
             port_str = 'Invalid'
         else:
             port_str = str(port)
-        print 'Port %s  (0x%x)' % (port_str, raw_status)
+        print('Port %s  (0x%x)' % (port_str, raw_status))
 
     def Usage():
-        print 'usage:  %s hostname {query|select portnumber}' % sys.argv[0]
+        print('usage:  %s hostname {query|select portnumber}' % sys.argv[0])
         exit(1)
 
     try:
diff --git a/client/cros/cellular/forward_8960_screen b/client/cros/cellular/forward_8960_screen
deleted file mode 100755
index 59cc5da..0000000
--- a/client/cros/cellular/forward_8960_screen
+++ /dev/null
@@ -1,114 +0,0 @@
-#!/usr/bin/python2
-
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# A local web server that sets up SSH port-forwarding to display the
-# front-panel display of an 8960
-
-import BaseHTTPServer
-import subprocess
-import sys
-
-import labconfig
-
-DOCUMENTATION="""
-This will start up an SSH to port-forward connections to the 8960.
-and then a web server to offer a simple UI to fetch images of the
-8960 front panel display. It will print a localhost URL to visit.
-When you visit that URL, you'll see the front-panel display from
-the instrument. If the image is stale, the display greys out.
-"""
-
-PAGE="""
-<html>
-  <head>
-  </head>
-  <script type="text/javascript">
-    var port = %(ssh_tunnel_port)s;
-    var lastTimestamp = 0;
-    function onTimer() {
-      var imageSpan = document.getElementById('image_span');
-      var newImage = document.createElement('image');
-      var tag = new Date().getTime();
-
-      if (tag - lastTimestamp > 3000) {
-        imageSpan.style.opacity=0.3;
-      }
-
-      newImage.src = 'http://localhost:' + port + '/screen.gif?' + tag;
-      newImage.onload = function () {
-        imageSpan.replaceChild(newImage, imageSpan.children[0]);
-        lastTimestamp = tag;
-        imageSpan.style.opacity=1;
-      }
-      t = setTimeout("onTimer()", 1000);
-    }
-
-    setTimeout("onTimer()", 0);
-  </script>
-
-  <body>
-    <div>8960 in test cell <strong>%(cell)s</strong></div>
-    <span id="image_span">
-      <span>
-        <!-- Placeholder -->
-        8960 screen should go here. <br>
-      </span>
-    </span>
-  </body>
-</html>
-"""
-
-
-try:
-    [cell] = sys.argv[1:]
-except ValueError:
-    print 'Usage: %s [cell-name]' % sys.argv[0]
-    print DOCUMENTATION
-    exit(1)
-
-ssh_tunnel_port = 1839
-http_server_port = 8192
-
-c = labconfig.Configuration(['--cell=%s' % (cell)])
-
-basestation_ip = c.cell['basestations'][0]['bs_addresses'][0]
-bastion_ip = c.cell['perfserver']['address']
-
-ssh_forwarding_configuration = 'localhost:%s:%s:80' % (
-    ssh_tunnel_port, basestation_ip)
-
-
-class PopenContext(object):
-    def __init__(self, *args, **kwargs):
-        self.args = args
-        self.kwargs = kwargs
-
-    def __enter__(self):
-        self.process = subprocess.Popen(*self.args, **self.kwargs)
-        return self.process
-
-    def __exit__(self, exception, value, traceback):
-        self.process.kill()
-
-
-class PageHandler(BaseHTTPServer.BaseHTTPRequestHandler):
-    def do_GET(self):
-        self.send_response(200)
-        self.end_headers()
-        self.wfile.write(PAGE % {'ssh_tunnel_port': ssh_tunnel_port,
-                                 'cell': cell})
-
-with PopenContext(
-    ['/usr/bin/ssh',
-     '-N',                  # Forward ports only
-     '-l','root',
-     '-L', ssh_forwarding_configuration,
-     bastion_ip,]) as ssh:
-
-    httpd = BaseHTTPServer.HTTPServer(('', http_server_port), PageHandler)
-    print DOCUMENTATION
-    print 'http://localhost:%s/8960.html' % http_server_port
-    httpd.serve_forever()
diff --git a/client/cros/cellular/hermes_constants.py b/client/cros/cellular/hermes_constants.py
new file mode 100644
index 0000000..182dc21
--- /dev/null
+++ b/client/cros/cellular/hermes_constants.py
@@ -0,0 +1,97 @@
+# Lint as: python2, python3
+# Copyright (c) 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This module provides bindings for HermesManager DBus constants, such as
+interface names, enumerations, and errors.
+
+"""
+
+#Hermes DBus Binding errors
+DBUS_HERMES_UNKNOWN = 'org.chromium.Hermes.Error.Unknown'
+DBUS_HERMES_UNSUPPORTED = 'org.chromium.Hermes.Error.Unsupported'
+DBUS_HERMES_WRONGSTATE = 'rg.chromium.Hermes.Error.WrongState'
+
+#Hermes DBus other errors
+DBUS_HERMES_PROFILE_ALREADY_DISABLED = 'org.chromium.Hermes.Error.AlreadyDisabled'
+DBUS_HERMES_PROFILE_ALREADY_ENABLED = 'org.chromium.Hermes.Error.AlreadyEnabled'
+DBUS_HERMES_BAD_NOTIFICATION = 'org.chromium.Hermes.Error.BadNotification'
+DBUS_HERMES_BAD_REQUEST = 'org.chromium.Hermes.Error.BadRequest'
+DBUS_HERMES_INTERNAL_LPA_FAILURE = 'org.chromium.Hermes.Error.InternalLpaFailure'
+DBUS_HERMES_INVALID_ACTIVATION_CODE = 'org.chromium.Hermes.Error.InvalidActivationCode'
+DBUS_HERMES_INVALID_ICCID = 'org.chromium.Hermes.Error.InvalidIccid'
+DBUS_HERMES_INVALID_PARAM = 'org.chromium.Hermes.Error.InvalidParameter'
+DBUS_HERMES_MALFORMED_RESPONSE = 'org.chromium.Hermes.Error.MalformedResponse'
+DBUS_HERMES_NEED_CONFIRMATION_CODE = 'org.chromium.Hermes.Error.NeedConfirmationCode'
+DBUS_HERMES_NO_RESPONSE = 'org.chromium.Hermes.Error.NoResponse'
+DBUS_HERMES_PENDING_PROFILE = 'org.chromium.Hermes.Error.PendingProfile'
+DBUS_HERMES_SEND_APDU_FAILURE = 'org.chromium.Hermes.Error.SendApduFailur'
+DBUS_HERMES_SEND_HTTP_FAILURE = 'org.chromium.Hermes.Error.SendHttpsFailure'
+DBUS_HERMES_SEND_NOTIFICATION_FAILURE = 'org.chromium.Hermes.Error.SendNotificationFailure'
+DBUS_HERMES_TEST_PROFILE_INPROD = 'org.chromium.Hermes.Error.TestProfileInProd'
+
+# Interfaces
+# Standard Interfaces
+I_PROPERTIES = 'org.freedesktop.DBus.Properties'
+I_INTROSPECTABLE = 'org.freedesktop.DBus.Introspectable'
+I_OBJECT_MANAGER = 'org.freedesktop.DBus.ObjectManager'
+
+#
+# For eSIM interactions.
+#
+HERMES_SERVICE = 'org.chromium.Hermes'
+HERMES_OBJECT = '/org/chromium/Hermes'
+HERMES_MANAGER_OBJECT = '/org/chromium/Hermes/Manager'
+HERMES_MANAGER_IFACE = 'org.chromium.Hermes.Manager'
+
+HERMES_EUICC_OBJECT = '/org/chromium/Hermes/Euicc'
+HERMES_EUICC_IFACE = 'org.chromium.Hermes.Euicc'
+
+HERMES_PROFILE_OBJECT = '/org/chromium/Hermes/Profile'
+HERMES_PROFILE_IFACE = 'org.chromium.Hermes.Profile'
+
+
+EUICC_ENUMERATION_TIMEOUT = 20
+EUICC_ENABLE_DISABLE_TIMEOUT = 10
+PROFILE_ENABLE_DISABLE_TIMEOUT = 10
+PROFILE_REFRESH_TIMEOUT = 10
+# Amount of time to wait between attempts to connect to HermesManager.
+CONNECT_WAIT_INTERVAL_SECONDS = 20
+HERMES_RESTART_WAIT_SECONDS   = 30
+# DBus method reply timeout in milliseconds
+HERMES_DBUS_METHOD_REPLY_TIMEOUT = 120 * 1000
+
+def ProfileStateToString(state):
+    """
+    Returns a string for the given state.
+
+    @param state: Profile state value.
+
+    @return A string that describes the given state.
+
+    """
+    PROFILE_STATE_STRINGS = [
+        'PENDING',
+        'INACTIVE',
+        'ACTIVE'
+    ]
+    return PROFILE_STATE_STRINGS[state]
+
+
+def ProfileClassToString(pclass):
+    """
+    Returns a string for the given class.
+
+    @param state: Profile class value.
+
+    @return A string that describes the given class.
+
+    """
+    PROFILE_CLASS_STRINGS = [
+        'TESTING',
+        'PROVISIONING',
+        'OPERATIONAL'
+    ]
+    return PROFILE_CLASS_STRINGS[pclass]
diff --git a/client/cros/cellular/hermes_utils.py b/client/cros/cellular/hermes_utils.py
new file mode 100644
index 0000000..5524758
--- /dev/null
+++ b/client/cros/cellular/hermes_utils.py
@@ -0,0 +1,519 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import dbus
+import logging
+import random
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.cros.cellular import hermes_constants
+from autotest_lib.client.cros.networking import hermes_proxy
+from autotest_lib.client.cros.networking import mm1_proxy
+
+# Helper functions
+def connect_to_hermes():
+    """
+    Attempts to connect to a DBus object.
+
+    @raise: error.TestFail if connection fails.
+
+    """
+    hermes_manager = 'None'
+    try:
+        hermes_manager = \
+            hermes_proxy.HermesManagerProxy().get_hermes_manager()
+    except dbus.DBusException as e:
+        logging.error('get_hermes_manager error:%s', e)
+        raise error.TestFail('Connect to Hermes failed')
+    if hermes_manager is 'None':
+        raise error.TestFail('Could not get connection to Hermes')
+    return hermes_manager
+
+def request_installed_profiles(euicc_path, hermes_manager):
+    """
+    Check euicc at given path
+
+    @param euicc_path: path of the sim given
+    @return a dict of profiles objects. Returns None if no profile is found
+    @raise: error.TestFail if no euicc matching given path
+
+    """
+    euicc = hermes_manager.get_euicc(euicc_path)
+    if not euicc:
+        raise error.TestFail('No euicc found at:', euicc_path)
+
+    euicc.request_installed_profiles()
+    installed_profiles = euicc.get_installed_profiles()
+    if not installed_profiles:
+        logging.info('No installed profiles on euicc:%s', euicc_path)
+    return euicc, installed_profiles
+
+def install_profile(euicc_path, hermes_manager, is_prod_ci):
+    """
+    Install a profile on the euicc at euicc_path.
+
+    @param euicc_path: esim path based on testci/prodci
+    @param hermes_manager: hermes manager object
+    @param is_prod_ci: true if it is prodci test and false for testci
+    @return iccid: iccid of the installed profile or None
+
+    """
+    if not is_prod_ci:
+        is_smds_test = random.choice([True, False])
+        logging.info('is_smds_test %s', is_smds_test)
+        if is_smds_test:
+            installed_iccid = install_pending_profile_test(
+            euicc_path, hermes_manager)
+        else:
+            installed_iccid = install_profile_test(
+            euicc_path, hermes_manager)
+    else:
+        installed_iccid = get_profile(
+            euicc_path, hermes_manager, False)
+    return installed_iccid
+
+def uninstall_all_profiles(euicc_path, hermes_manager):
+    """
+    Uninstalls all installed test profiles
+
+    @param euicc_path: esim path based on testci/prodci
+    @param hermes_manager: hermes manager object
+    @raise error.TestFail if any dbus exception happens
+
+    """
+    try:
+        euicc, installed_profiles = \
+            request_installed_profiles(euicc_path, hermes_manager)
+
+        profiles_count = len(installed_profiles)
+        if profiles_count is 0: return
+
+        # also skips iccid 89010000001234567882 - R&S as its TESTING profile
+        for profile in installed_profiles.keys():
+            if ((hermes_constants.ProfileStateToString(
+                installed_profiles[profile].state) == 'INACTIVE') and
+                (hermes_constants.ProfileClassToString(
+                installed_profiles[profile].profileclass) !=
+                    'TESTING')):
+
+                logging.info('Uninstalling profile - iccid:%s',
+                            installed_profiles[profile].iccid)
+                euicc.uninstall_profile(profile)
+        logging.info('Uninstall done')
+    except dbus.DBusException as e:
+        logging.error('Failed to uninstall a profile error:%s', e)
+        raise error.TestFail('Failed to uninstall profile')
+
+
+def initialize_test(is_prod_ci_test):
+    """
+    Initialize euicc paths, connect to hermes, set test mode
+
+    @param is_prod_ci_test:  true if it is prodci test and false for testci
+
+    """
+    logging.info('===initialize_test started===')
+    mm_proxy = mm1_proxy.ModemManager1Proxy.get_proxy()
+
+    logging.info('Connect to Hermes')
+    hermes_manager = connect_to_hermes()
+
+    # Always euicc/0 is prod one and euicc/1 is for test esim profiles
+    # we prefer to hardcode euicc/0, since it acts as a check that Hermes
+    # is able to initialize without any error. If hermes encounters an
+    # error, hermes will start exposing objects such as
+    # self.prod_euicc_path = "/org/chromium/Hermes/euicc/22"
+    # self.test_euicc_path = "/org/chromium/Hermes/euicc/23"
+
+    euicc = None
+    euicc_path = None
+    for path in hermes_manager.get_available_euiccs():
+        logging.info("Found euicc at %s", path)
+        is_prod_euicc = not hermes_manager.get_euicc(path).is_test_euicc()
+        if is_prod_euicc == is_prod_ci_test:
+            euicc_path = path
+            euicc = hermes_manager.get_euicc(euicc_path)
+            break
+
+    if not euicc:
+        raise error.TestFail("Initialize test failed, " +
+                             "prod" if is_prod_ci_test else "test" +
+                             " euicc not found")
+
+    euicc.use_test_certs(not is_prod_ci_test)
+
+    if not is_prod_ci_test:
+        uninstall_all_profiles(euicc_path, hermes_manager)
+    logging.info('===initialize_test done===\n')
+    return  mm_proxy, hermes_manager, euicc_path
+
+def validate_profile_state(euicc_path, hermes_manager, iccid, is_enable):
+    """
+    Validates given profile(iccid) state
+
+    Check state of changed profile
+
+    @param euicc_path: esim path based on testci/prodci
+    @param hermes_manager: hermes manager object
+    @param iccid: iccid of the profile enabled/disabled
+    @param is_enable: true to enable profile and false to disable
+    @raise error.TestFail if any dbus exception happens
+
+    """
+    try:
+        target_state = 'ACTIVE' if is_enable else 'INACTIVE'
+        _, installed_profiles = \
+        request_installed_profiles(euicc_path, hermes_manager)
+
+        # check profile with given iccid has target_state and rest are opposite
+        for profile in installed_profiles.values():
+            # check for inactive profiles when enabled except enabled one
+            if iccid == profile.iccid:
+                if not (hermes_constants.ProfileStateToString(profile.state) ==
+                    target_state):
+                    logging.error('profile:%s not in %s state',
+                    profile.iccid, target_state)
+                    raise error.TestFail('validate_profile_state failed')
+
+        logging.info('validate_profile_state succeeded')
+    except dbus.DBusException as e:
+        logging.error('Profile %s error:%s', target_state, e)
+        raise error.TestFail('validate_profile_state failed')
+
+def set_profile_state(
+    is_active, euicc_path=None, hermes_manager=None,  iccid=None, profile=None):
+    """
+    Enable or Disable already enabled/disabled profile
+
+    @param is_active: True to enable, False to disable profile
+    @param euicc_path: esim path based on testci/prodci
+    @param hermes_manager: hermes manager object
+    @param iccid: profile iccid to enable
+    @param profile: profile object to enable/disable
+    @raise error.TestFail if expected error not resulted
+
+    """
+    logging.info('set_profile_state start')
+    if euicc_path and iccid:
+        euicc = hermes_manager.get_euicc(euicc_path)
+        profile = euicc.get_profile_from_iccid(iccid)
+
+    if is_active:
+        profile.enable()
+    else:
+        profile.disable()
+    logging.info('set_profile_state done')
+
+def get_profile_state(euicc_path, hermes_manager, iccid):
+    """
+    get profile state
+
+    @param euicc_path: esim path based on testci/prodci
+    @param hermes_manager: hermes manager object
+    @param iccid: profile iccid to find state
+    @return True if profile state is Active and False if state is Inactive
+
+    """
+    if euicc_path and iccid:
+        euicc = hermes_manager.get_euicc(euicc_path)
+        profile = euicc.get_profile_from_iccid(iccid)
+
+    return True if (hermes_constants.ProfileStateToString(profile.state) ==
+                    'ACTIVE') else False
+
+def get_profile(euicc_path, hermes_manager, is_active):
+    """
+    Returns a active/inactive profile on given euicc
+
+    This is to get already enabled or disabled profile. if not found enabled
+    profile, enable an inactive profile and if not found disable profile
+    disable an active profile
+
+    @param euicc_path: esim path based on testci/prodci
+    @param hermes_manager: hermes manager object
+    @param is_active: True to get active profile, False to get inactive profile
+    @return iccid: iccid of the active/inactive profile as requested
+    @raise error.TestFail if any dbus exception happens
+
+    """
+    try:
+        _, installed_profiles = \
+            request_installed_profiles(euicc_path, hermes_manager)
+
+        profile_found = False
+        iccid = None
+        profile_needed = 'Enabled' if is_active else 'Disabled'
+        # Find active/inactive profile
+        target_state = 'ACTIVE' if is_active else 'INACTIVE'
+
+        for profile in installed_profiles.values():
+            # skipping TESTING profiles to prevent install/uninstall operations
+            if (hermes_constants.ProfileClassToString(
+                                profile.profileclass) == 'TESTING'):
+                continue
+
+            if not (hermes_constants.ProfileStateToString(profile.state) ==
+                                target_state):
+                set_profile_state(is_active, profile=profile)
+
+            profile_found = True
+            return profile.iccid
+
+        if not profile_found:
+            logging.error('No installed profile which is %s', profile_needed)
+        return iccid
+    except dbus.DBusException as e:
+        raise error.TestFail('get_profile failed :', repr(e))
+
+def get_iccid_of_disabled_profile(euicc_path, hermes_manager, is_prod_ci):
+    """
+    Get profile with disabled status and return its iccid
+
+    For test esim install new profile and return iccid of that profile
+    For prod esim having two profiles is prerequisite, return disabled profile
+
+    @param euicc_path: esim path based on testci/prodci
+    @param hermes_manager: hermes manager object
+    @param is_prod_ci:  true if it is prodci test and false for testci
+    @return iccid: iccid of the installed profile or None
+
+    """
+    if not is_prod_ci:
+        installed_iccid = install_profile_test(euicc_path, hermes_manager)
+    else:
+        # get disabled profile on a prod esim, if not exist then do disable one
+        _, installed_profiles = \
+        request_installed_profiles(euicc_path, hermes_manager)
+        for profile in installed_profiles.values():
+            if (hermes_constants.ProfileClassToString(profile.profileclass) ==
+                    'TESTING'):
+                continue
+
+            if (hermes_constants.ProfileStateToString(profile.state) ==
+                    'INACTIVE'):
+                return profile.iccid
+
+        installed_iccid = get_profile(euicc_path, hermes_manager, False)
+
+    return installed_iccid
+
+# Test functions
+def enable_or_disable_profile_test(
+    euicc_path, hermes_manager, iccid, is_enable):
+    """
+    Validates enable/disable profile api DBus call
+
+    @param euicc_path: esim path based on testci/prodci
+    @param hermes_manager: hermes manager object
+    @param iccid: iccid of the profile to be enabled/disabled
+    @param is_enable: true to enable profile and false to disable
+    @raise error.TestFail if any dbus exception happens
+
+    """
+    try:
+        logging.info('===enable_or_disable_profile_test started===')
+        profile_action = 'Enable' if is_enable else 'Disable'
+        logging.info('%s :', profile_action)
+        euicc, installed_profiles = \
+            request_installed_profiles(euicc_path, hermes_manager)
+        # Profile objects maybe stale if IsActive is false
+        # Switch to the euicc we are interested in before
+        # performing an op.
+
+        profile_found = False
+        target_state = 'ACTIVE' if is_enable else 'INACTIVE'
+        # Find active or inactive profile to enable/disable
+        for profile in installed_profiles.values():
+            if not (hermes_constants.ProfileStateToString(profile.state) ==
+                    target_state):
+                if iccid is None or iccid == profile.iccid:
+                    logging.info('Profile to %s:%s', profile_action,
+                                profile.iccid)
+                    profile_found = True
+                    set_profile_state(is_enable, profile=profile)
+                    logging.info('===enable_or_disable_profile_test '
+                                'succeeded===\n')
+                    break
+        if not profile_found:
+            raise error.TestFail('enable_or_disable_profile_test failed -'
+                    'No profile to ' + profile_action)
+        # Check profile state
+        validate_profile_state(euicc_path, hermes_manager, iccid, is_enable)
+    except dbus.DBusException as e:
+        logging.error('Profile %s error:%s', profile_action, e)
+        raise error.TestFail('enable_or_disable_profile_test Failed')
+
+def install_profile_test(euicc_path, hermes_manager):
+    """
+    Validates InstallProfileFromActivationCode api on test euicc
+
+    use SMDS calls to find iccid, activation code from pending profiles
+    and install those profiles, this requires profiles generated based
+    on EID of test esims in lab devices
+
+    @param euicc_path: esim path based on testci/prodci
+    @param hermes_manager: hermes manager object
+    @return iccid: iccid of the installed profile or None
+    @raise error.TestFail if any dbus exception happens
+
+    """
+    try:
+        # get all pending profiles which are generated on DUT EID
+        # Read all profiles activation code from pending profile dict
+        # Install a profile from activation code, have iccid and
+        # Check the presence of this profile after installation
+
+        logging.info('===install_profile_test started===')
+        activation_code = None
+        confirmation_code = ""
+        iccid = None
+        euicc = None
+
+        euicc, installed_profiles = \
+            request_installed_profiles(euicc_path, hermes_manager)
+
+        euicc.request_pending_profiles(dbus.String('prod.smds.rsp.goog'))
+        logging.info('euicc chosen:%s', euicc_path)
+        profiles_pending = euicc.get_pending_profiles()
+        if not profiles_pending:
+            logging.error('install_profile_test: pending profile not found')
+            raise error.TestFail('No pending profile found on euicc:',
+                                 euicc_path)
+
+        profile_path_to_install, profile_to_install = \
+            list(profiles_pending.items())[0]
+        logging.debug('First pending profile:%s', profile_path_to_install)
+
+        iccid = profile_to_install.iccid
+        activation_code = profile_to_install.activationcode
+
+        logging.info('Installing iccid:%s act_code:%s conf_code:%s',
+                     iccid, activation_code, confirmation_code)
+        # Install
+        euicc.install_profile_from_activation_code(
+            activation_code, confirmation_code)
+
+        # Check if iccid found in installed profiles, installation success
+        installed_profiles = euicc.get_installed_profiles()
+
+        if ((installed_profiles[profile_path_to_install] is None) or
+            (installed_profiles[profile_path_to_install].iccid !=
+             profile_to_install.iccid)):
+            logging.error('install_profile_test failed. Test Failed.')
+            raise error.TestFail('No installed profile found on euicc:',
+                                 euicc_path)
+
+        logging.info('===install_profile_test succeeded===\n')
+        return iccid
+    except dbus.DBusException as e:
+        logging.error('Failed to install a pending profile')
+        raise error.TestFail('install_profile_test failed with ',
+                             repr(e))
+
+def install_pending_profile_test(euicc_path, hermes_manager):
+    """
+    Validates InstallPendingProfile api on test euicc
+    Find a profile from list of esim pending profiles which is not
+    installed yet and install that profile
+
+    Required to create pending profiles for each EID(euicc sim) in lab dut.
+    create profiles from stork giving lab devices EID. puts profiles in
+    pending state for that euicc when RequestPendingProfiles called.
+
+    @param euicc_path: esim path based on testci/prodci
+    @param hermes_manager: hermes manager object
+    @return iccid: iccid of the installed profile or None
+    @raise error.TestFail if any dbus exception happens
+
+    """
+    logging.info('===install_pending_profile_test started===')
+    profile_to_install = None
+
+    euicc, installed_profiles = \
+            request_installed_profiles(euicc_path, hermes_manager)
+
+    euicc.request_pending_profiles(dbus.String('prod.smds.rsp.goog'))
+    profiles_pending = euicc.get_pending_profiles()
+    if not profiles_pending:
+        logging.error(
+            'install_pending_profile_test: pending profile not found')
+        raise error.TestFail('No pending profile found on euicc:',
+                             euicc_path)
+
+    profile_path_to_install, profile_to_install = list(profiles_pending.items())[0]
+    iccid = profile_to_install.iccid
+    activation_code = profile_to_install.activationcode
+
+    logging.info('Installing profile:%s iccid:%s act_code:%s',
+                 profile_path_to_install, iccid, activation_code)
+
+    try:
+        # Install
+        profile = euicc.install_pending_profile(
+            profile_path_to_install, "")
+        logging.info('Installed pending profile is %s', profile)
+        if not profile:
+            logging.error('No profile object returned after install')
+            return None
+    except dbus.DBusException as e:
+        logging.error('Failed to install pending profile:%s', e)
+        raise error.TestFail('Failed to install pending profile',
+                                   repr(e))
+
+    # Find above installed profile, if not exists raise test failure
+    installed_profiles = euicc.get_installed_profiles()
+    if ((installed_profiles[profile_path_to_install] is None) or
+        (installed_profiles[profile_path_to_install].iccid !=
+         profile_to_install.iccid)):
+        raise error.TestFail('Install pending profile failed :',
+                             profile_path_to_install)
+
+    logging.info('===install_pending_profile_test succeeded===\n')
+    return iccid
+
+def uninstall_profile_test(euicc_path, hermes_manager, iccid):
+    """
+    Validates UninstallProfile api by uninstalling any randomly
+    selected installed profile
+
+    @param euicc_path: esim path based on testci/prodci
+    @param hermes_manager: hermes manager object
+    @param iccid: iccid of the profile to be uninstalled
+    @raise error.TestFail if any dbus exception happens
+
+    """
+    logging.info('===uninstall_profile_test started===')
+    # Getinstalled profiles list and randomly uninstall a profile
+    try:
+        euicc, installed_profiles = \
+            request_installed_profiles(euicc_path, hermes_manager)
+
+        profile_to_uninstall = euicc.get_profile_from_iccid(iccid)
+        if not profile_to_uninstall:
+            raise error.TestFail('No valid profile found at:', euicc_path)
+
+        profile_path = profile_to_uninstall.path
+        uninstalled_profile = None
+
+        # Hermes does not support uninstalling test profiles yet.
+        if hermes_constants.ProfileClassToString(
+                profile_to_uninstall.profileclass) != 'TESTING':
+            logging.info('profile to uninstall is:%s', profile_path)
+            euicc.uninstall_profile(profile_path)
+            uninstalled_profile = profile_path
+            logging.info('uninstall_profile_test succeeded')
+
+        if not uninstalled_profile:
+            raise error.TestFail(
+                'uninstall_profile_test failed - No uninstallable profile')
+
+        # Try to find the uninstalled profile, if exists raise test failure
+        profiles_installed = euicc.get_installed_profiles()
+        for profile in profiles_installed.keys():
+            if uninstalled_profile in profile:
+                raise error.TestFail('uninstall_profile_test profile Failed')
+        logging.info('===uninstall_profile_test succeeded===\n')
+    except dbus.DBusException as e:
+        raise error.TestFail('Failed to uninstall profile', e)
diff --git a/client/cros/cellular/labconfig.py b/client/cros/cellular/labconfig.py
index 3c50111..ca86e64 100644
--- a/client/cros/cellular/labconfig.py
+++ b/client/cros/cellular/labconfig.py
@@ -1,14 +1,21 @@
+# Lint as: python2, python3
 # Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import optparse
 import pickle
 import re
+import six
 import subprocess
 
 import common
+
 from autotest_lib.client.cros.cellular import cellular
 from autotest_lib.client.cros.cellular import cellular_logging
 from autotest_lib.client.cros.cellular import labconfig_data
@@ -33,10 +40,13 @@
     stdout = subprocess.Popen(['ip', '-4', 'addr', 'show', 'dev', interface],
                               stdout=subprocess.PIPE).communicate()[0]
 
-    match = re.search(r'inet ([0-9.]+)[/ ]', stdout)
+    if six.PY2:
+        # stdout is a string in py2, but we need it to match a byte pattern.
+        stdout = stdout.encode('ascii')
+    match = re.search(b'inet ([0-9.]+)[/ ]', stdout)
     if not match:
         return None
-    return match.group(1)
+    return match.group(1).decode()
 
 
 class Configuration(object):
@@ -76,7 +86,7 @@
         if name not in labconfig_data.CELLS:
             raise LabConfigError(
                 'Could not find cell %s, valid cells are %s' % (
-                    name, labconfig_data.CELLS.keys()))
+                    name, list(labconfig_data.CELLS.keys())))
 
         return labconfig_data.CELLS[name]
 
@@ -96,15 +106,15 @@
             machine = self.ip
         ifconfig = ''
         if not machine:
-            log.debug('self.ip is : %s ' % self.ip)
+            log.debug('self.ip is : %s ', self.ip)
             # TODO(byronk): use sysfs to find network interface
             possible_interfaces = ['eth0', 'eth1', 'eth_test']
-            log.debug('Looking for an up network interface in : %s' %
+            log.debug('Looking for an up network interface in : %s',
                       possible_interfaces)
             for interface in possible_interfaces:
                 machine = get_interface_ip(interface)
                 if machine:
-                    log.debug('Got an IP address: %s Stopping the search.. ' %
+                    log.debug('Got an IP address: %s Stopping the search.. ',
                               machine)
                     self.ip = machine
                     break
@@ -152,7 +162,7 @@
         @param machine: machine to get rf switch port for
         """
         dut = self._get_dut(machine)
-        print dut
+        print(dut)
         return dut['rf_switch_port']
 
     def get_pickle(self):
diff --git a/client/cros/cellular/labconfig_data.py b/client/cros/cellular/labconfig_data.py
index 49f1c45..383221d 100755
--- a/client/cros/cellular/labconfig_data.py
+++ b/client/cros/cellular/labconfig_data.py
@@ -1,10 +1,15 @@
-#!/usr/bin/python2
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 """Configuration for cell emulator tests."""
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import copy, unittest
+import six
 
 CELLS = {}
 
@@ -30,7 +35,7 @@
     """Combines two dict-of-dict trees, favoring the second."""
     try:
         a = copy.copy(a_original)
-        for (key_b, value_b) in b.iteritems():
+        for (key_b, value_b) in six.iteritems(b):
             a[key_b] = combine_trees(a.get(key_b, None), value_b)
     except AttributeError:  # one argument wasn't a dict.  B wins.
         return b
diff --git a/client/cros/cellular/labconfig_test.py b/client/cros/cellular/labconfig_test.py
index 8922616..be9f375 100755
--- a/client/cros/cellular/labconfig_test.py
+++ b/client/cros/cellular/labconfig_test.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -6,8 +6,8 @@
 # pylint: disable-msg=C0111
 
 import unittest
-import cellular
-import labconfig
+from autotest_lib.client.cros.cellular import cellular
+from autotest_lib.client.cros.cellular import labconfig
 # Use the same import line to keep this global on the same key
 from autotest_lib.client.cros.cellular import labconfig_data
 
@@ -74,4 +74,4 @@
                          c.get_rf_switch_port('one_two_three_four'))
 
 if __name__ == '__main__':
-  unittest.main()
+    unittest.main()
diff --git a/client/cros/cellular/labconfig_write_stanzas b/client/cros/cellular/labconfig_write_stanzas
deleted file mode 100755
index bb19e7e..0000000
--- a/client/cros/cellular/labconfig_write_stanzas
+++ /dev/null
@@ -1,60 +0,0 @@
-#!/usr/bin/python2
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-DOCS="""Print DHCP and /etc/hosts stanzas for hosts in a specified cell."""
-
-import exceptions, io, sys
-import labconfig_data
-
-
-def usage(message=''):
-    print '%s:\n\t%s\n' % (sys.argv[0], DOCS)
-    print '%susage: %s CELLNAME' % (message, sys.argv[0])
-    sys.exit(1)
-
-
-def find_names(visitor, root):
-    """Traverse config tree, calling visitor on dicts with 'name' field."""
-    if type(root) == dict and 'name' in root:
-        visitor(root)
-    if type(root) == dict:
-        for child in root.values():
-            find_names(visitor, child)
-    elif hasattr(root, '__iter__'):
-        for entry in root:
-            find_names(visitor, entry)
-
-
-class Formatter(object):
-    def __init__(self):
-        self.dns = io.StringIO()
-        self.dhcp = io.StringIO()
-
-    def Visit(self, d):
-        if 'address' in d and 'name' in d:
-            self.dns.write(u'%(address)s\t%(name)s\n' % d)
-        else:
-            return
-        if 'ethernet_mac' in d:
-            self.dhcp.write((u'host %(name)s {\n' +
-                              '\thardware ethernet %(ethernet_mac)s;\n' +
-                              '\tfixed-address %(address)s;\n' +
-                              '}\n') % d)
-
-
-if __name__ == '__main__':
-    if len(sys.argv) < 2:
-        usage()
-
-    [cell] = sys.argv[1:]
-    if cell not in labconfig_data.CELLS:
-        usage('Could not find cell %s\n' % cell)
-
-    f = Formatter()
-    find_names(f.Visit, labconfig_data.CELLS[cell])
-
-    print f.dhcp.getvalue()
-    print '\n'
-    print f.dns.getvalue()
diff --git a/client/cros/cellular/mbim_compliance/mbim_channel.py b/client/cros/cellular/mbim_compliance/mbim_channel.py
index 9c95996..a487869 100644
--- a/client/cros/cellular/mbim_compliance/mbim_channel.py
+++ b/client/cros/cellular/mbim_compliance/mbim_channel.py
@@ -4,14 +4,15 @@
 
 import logging
 import multiprocessing
-import Queue
 import struct
 import time
 
 import common
+from six.moves.queue import Empty
+
 from autotest_lib.client.bin import utils
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_channel_endpoint
+from autotest_lib.client.cros.cellular.mbim_compliance import \
+    mbim_channel_endpoint
 from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
 
 
@@ -287,7 +288,7 @@
             try:
                 first_fragment = self._response_queue.get(
                         True, self.FRAGMENT_TIMEOUT_S)
-            except Queue.Empty:
+            except Empty:
                 # *Don't fail* Just return nothing.
                 return fragments
 
@@ -306,7 +307,7 @@
             try:
                 fragment = self._response_queue.get(True,
                                                     self.FRAGMENT_TIMEOUT_S)
-            except Queue.Empty:
+            except Empty:
                 # *Don't fail* Just return the fragments we got so far.
                 break
 
diff --git a/client/cros/cellular/mbim_compliance/mbim_channel_endpoint.py b/client/cros/cellular/mbim_compliance/mbim_channel_endpoint.py
index 67fa171..d216cc9 100644
--- a/client/cros/cellular/mbim_compliance/mbim_channel_endpoint.py
+++ b/client/cros/cellular/mbim_compliance/mbim_channel_endpoint.py
@@ -3,18 +3,17 @@
 # found in the LICENSE file.
 
 import logging
-import Queue
 import signal
 import struct
 import time
-import numpy
-
 from collections import namedtuple
-from usb import core
 
 import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
+import numpy
+from six.moves.queue import Queue
+from usb import core
 
+from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
 
 USBNotificationPacket = namedtuple(
         'USBNotificationPacket',
diff --git a/client/cros/cellular/mbim_compliance/mbim_channel_unittest.py b/client/cros/cellular/mbim_compliance/mbim_channel_unittest.py
index d150bc7..810c083 100644
--- a/client/cros/cellular/mbim_compliance/mbim_channel_unittest.py
+++ b/client/cros/cellular/mbim_compliance/mbim_channel_unittest.py
@@ -2,14 +2,17 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+
 import array
 import logging
-import mox
 import multiprocessing
 import struct
 import unittest
+from unittest.mock import patch
 
 import common
+
 from autotest_lib.client.cros.cellular.mbim_compliance import mbim_channel
 from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
 
@@ -25,16 +28,11 @@
         self._in_buffer_size = 100
 
         self._setup_mock_subprocess()
-        self._mox = mox.Mox()
 
-        # Reach into |MBIMChannel| and mock out the request queue, so we can set
-        # expectations on it.
-        # |multiprocessing.Queue| is actually a function that returns some
-        # hidden |multiprocessing.queues.Queue| class. We'll grab the class from
-        # a temporary object so we can mock it.
-        some_queue = multiprocessing.Queue()
-        queue_class = some_queue.__class__
-        self._mock_request_queue = self._mox.CreateMock(queue_class)
+        patcher = patch('multiprocessing.Queue')
+        self._mock_request_queue = patcher.start()
+        self.addCleanup(patcher.stop)
+
         self._channel._request_queue = self._mock_request_queue
 
         # On the other hand, just grab the real response queue.
@@ -47,7 +45,6 @@
 
     def tearDown(self):
         self._channel.close()
-        self._subprocess_mox.VerifyAll()
 
 
     def _setup_mock_subprocess(self):
@@ -58,22 +55,17 @@
         |tearDown|.
 
         """
-        self._subprocess_mox = mox.Mox()
-        mock_process = self._subprocess_mox.CreateMock(multiprocessing.Process)
-        mock_process(target=mox.IgnoreArg(),
-                     args=mox.IgnoreArg()).AndReturn(mock_process)
-        mock_process.start()
+        patcher = patch.object(multiprocessing, 'Process')
+        mock_process = patcher.start()
+        self.addCleanup(patcher.stop)
+        mock_process.return_value = mock_process
 
         # Each API call into MBIMChannel results in an aliveness ping to the
         # subprocess.
         # Finally, when |self._channel| is destructed, it will attempt to
         # terminate the |mock_process|, with increasingly drastic actions.
-        mock_process.is_alive().MultipleTimes().AndReturn(True)
-        mock_process.join(mox.IgnoreArg())
-        mock_process.is_alive().AndReturn(True)
-        mock_process.terminate()
+        mock_process.is_alive.return_value = True
 
-        self._subprocess_mox.ReplayAll()
         self._channel = mbim_channel.MBIMChannel(
                 self._device,
                 self._interface_number,
@@ -84,7 +76,7 @@
 
     def test_creation(self):
         """ A trivial test that we mocked out the |Process| class correctly. """
-        pass
+        self._setup_mock_subprocess()
 
 
     def test_unfragmented_packet_successful(self):
@@ -264,26 +256,26 @@
         response = self._get_unfragmented_packet(1)
         notification_1 = self._get_fragment(0, 1, 0)
         self._response_queue.put_nowait(notification_1)
-        self._mock_request_queue.qsize().AndReturn(1)
-        self._mock_request_queue.empty().AndReturn(False)
-        self._mock_request_queue.empty().WithSideEffects(
-                self._response_queue.put_nowait(response)).AndReturn(True)
-        self._mox.ReplayAll()
+        self._mock_request_queue.qsize.return_value = 1
+        self._mock_request_queue.empty.return_value = False
+
+        def put_response():
+            """Side effect for mock"""
+            self._response_queue.put_nowait(response)
+
+        self._mock_request_queue.empty.side_effect = [None, put_response]
         self._channel.flush()
-        self._mox.VerifyAll()
         self.assertEqual(0, self._response_queue.qsize())
 
 
     def test_flush_failed(self):
         """ Test the case when the request queue fails to empty out. """
         packet = self._get_unfragmented_packet(1)
-        self._mock_request_queue.qsize().AndReturn(1)
-        self._mock_request_queue.empty().MultipleTimes().AndReturn(False)
-        self._mox.ReplayAll()
+        self._mock_request_queue.qsize.return_value = 1
+        self._mock_request_queue.empty.return_value = False
         self.assertRaises(
                 mbim_errors.MBIMComplianceChannelError,
                 self._channel.flush)
-        self._mox.VerifyAll()
 
 
     def _queue_responses(self, responses):
@@ -303,14 +295,14 @@
         """
 
         last_request = requests[len(requests) - 1]
-        earlier_requests = requests[:len(requests) - 1]
-        for request in earlier_requests:
-            self._mock_request_queue.put_nowait(request)
         if responses:
-            self._mock_request_queue.put_nowait(last_request).WithSideEffects(
-                    lambda _: self._queue_responses(responses))
-        else:
-            self._mock_request_queue.put_nowait(last_request)
+
+            def put_if_last_request(msg):
+                """Side effect for mock"""
+                if msg == last_request:
+                    self._queue_responses(responses)
+
+            self._mock_request_queue.put_nowait.side_effect = put_if_last_request
 
 
     def _verify_transaction_successful(self, requests, responses):
@@ -320,10 +312,8 @@
         @param requests: List of packets sent.
         @param responses: List of packets expected back.
         """
-        self._mox.ReplayAll()
         self.assertEqual(responses,
                          self._channel.bidirectional_transaction(*requests))
-        self._mox.VerifyAll()
 
 
     def _verify_transaction_failed(self, requests):
@@ -333,11 +323,9 @@
         @param requests: List of packets sent.
 
         """
-        self._mox.ReplayAll()
         self.assertRaises(mbim_errors.MBIMComplianceChannelError,
                           self._channel.bidirectional_transaction,
                           *requests)
-        self._mox.VerifyAll()
 
 
     def _get_unfragmented_packet(self, transaction_id):
@@ -371,7 +359,7 @@
 
     def _create_buffer(self, size):
         """ Create an array of the give size initialized to 0x00. """
-        return array.array('B', '\x00' * size)
+        return array.array('B', b'\x00' * size)
 
 
 if __name__ == '__main__':
diff --git a/client/cros/cellular/mbim_compliance/mbim_data_transfer.py b/client/cros/cellular/mbim_compliance/mbim_data_transfer.py
index 2d33464..1b61568 100644
--- a/client/cros/cellular/mbim_compliance/mbim_data_transfer.py
+++ b/client/cros/cellular/mbim_compliance/mbim_data_transfer.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -6,15 +7,22 @@
 from  IP packets and for extracting IP packets from received MBIM NTB frames.
 
 """
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import array
 import struct
 from collections import namedtuple
 
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_data_channel
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
+import six
 
+from six.moves import range
+from six.moves import zip
+
+from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
+from autotest_lib.client.cros.cellular.mbim_compliance import mbim_data_channel
+from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
 
 NTH_SIGNATURE_32 = 0x686D636E  # "ncmh"
 NDP_SIGNATURE_IPS_32 = 0x00737069  # "ips0"
@@ -190,9 +198,10 @@
         @returns offset to place the next payload at.
 
         """
-        next_payload_offset = (
-                (((current_offset + (ntb_divisor - 1)) / ntb_divisor) *
-                 ntb_divisor) + ntb_payload_remainder)
+        next_payload_offset = (((
+                (current_offset +
+                 (ntb_divisor - 1)) // ntb_divisor) * ntb_divisor) +
+                               ntb_payload_remainder)
         return next_payload_offset
 
 
@@ -324,8 +333,8 @@
         # Read the NDP header to find the number of packets in the entry
         self.ndp = self._ndp_class(raw_data=raw_ntb_frame[ndp_offset:])
         num_ndp_entries = (
-               (self.ndp.length - self._ndp_class.get_struct_len()) /
-               self._ndp_entry_class.get_struct_len())
+                (self.ndp.length - self._ndp_class.get_struct_len()) //
+                self._ndp_entry_class.get_struct_len())
         ndp_entries_offset = ndp_offset + self._ndp_class.get_struct_len()
         self.payload = []
         self.ndp_entries = []
@@ -383,7 +392,7 @@
             mbim_errors.log_and_raise(
                     mbim_errors.MBIMComplianceDataTransferError,
                     'Unexpected fields (%s) in %s' % (
-                            kwargs.keys(), cls.__name__))
+                            list(kwargs.keys()), cls.__name__))
     obj = super(cls, cls).__new__(cls, *field_values)
     return obj
 
@@ -407,7 +416,7 @@
             mbim_errors.log_and_raise(
                     mbim_errors.MBIMComplianceDataTransfer,
                     '%s header must have some fields defined' % name)
-        _, field_names = zip(*fields)
+        _, field_names = list(zip(*fields))
         attrs['__new__'] = header_class_new
         header_class = namedtuple(name, field_names)
         # Prepend the class created via namedtuple to |bases| in order to
@@ -418,7 +427,7 @@
         return cls
 
 
-class MBIMNtbHeaders(object):
+class MBIMNtbHeaders(six.with_metaclass(MBIMNtbHeadersMeta, object)):
     """
     Base class for all NTB headers.
 
@@ -430,7 +439,6 @@
         For ex: reserved fields
 
     """
-    __metaclass__ = MBIMNtbHeadersMeta
 
     @classmethod
     def get_fields(cls):
@@ -451,7 +459,7 @@
         @returns The field names of the header structure.
 
         """
-        _, field_names = zip(*cls.get_fields())
+        _, field_names = list(zip(*cls.get_fields()))
         return field_names
 
 
@@ -463,7 +471,7 @@
         @returns The format of fields of the header structure.
 
         """
-        field_formats, _ = zip(*cls.get_fields())
+        field_formats, _ = list(zip(*cls.get_fields()))
         return field_formats
 
 
@@ -548,4 +556,3 @@
     """ The class for MBIM NTH32 objects. """
     _FIELDS = (('I', 'datagram_index'),
                ('I', 'datagram_length'))
-
diff --git a/client/cros/cellular/mbim_compliance/mbim_data_transfer_unittest.py b/client/cros/cellular/mbim_compliance/mbim_data_transfer_unittest.py
index 023d32f..515bdba 100644
--- a/client/cros/cellular/mbim_compliance/mbim_data_transfer_unittest.py
+++ b/client/cros/cellular/mbim_compliance/mbim_data_transfer_unittest.py
@@ -11,15 +11,15 @@
 from autotest_lib.client.cros.cellular.mbim_compliance import mbim_data_transfer
 
 class TestMbimDeviceContext(object):
-    """ Dummy device context. """
+    """ Stub device context. """
     pass
 
 class TestMbimDescriptorCache(object):
-    """ Dummy MBIM descriptor cache. """
+    """ Stub MBIM descriptor cache. """
     pass
 
 class TestMbimEndpointDescriptor(object):
-    """ Dummy MBIM endpoint descriptor. """
+    """ Stub MBIM endpoint descriptor. """
     pass
 
 class MBIMMessageTestCase(unittest.TestCase):
diff --git a/client/cros/cellular/mbim_compliance/mbim_errors.py b/client/cros/cellular/mbim_compliance/mbim_errors.py
index 86b5cd6..b7922c4 100644
--- a/client/cros/cellular/mbim_compliance/mbim_errors.py
+++ b/client/cros/cellular/mbim_compliance/mbim_errors.py
@@ -405,5 +405,5 @@
     trace = traceback.format_stack()
     # Get rid of the current frame from trace
     trace = trace[:len(trace)-1]
-    logging.error('Traceback:\n' + ''.join(trace))
+    logging.error('Traceback:\n%s', ''.join(trace))
     raise error_object
diff --git a/client/cros/cellular/mbim_compliance/mbim_message.py b/client/cros/cellular/mbim_compliance/mbim_message.py
index f4af3f8..4f07008 100644
--- a/client/cros/cellular/mbim_compliance/mbim_message.py
+++ b/client/cros/cellular/mbim_compliance/mbim_message.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -27,12 +28,20 @@
                                               |
                                               |>MBIMHostError
 """
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import array
 import logging
 import struct
-import sys
 from collections import namedtuple
 
+import six
+
+from six.moves import map
+from six.moves import zip
+
 from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
 
 
@@ -132,7 +141,7 @@
             mbim_errors.log_and_raise(
                     mbim_errors.MBIMComplianceControlMessageError,
                     'Unexpected fields (%s) in %s' % (
-                            kwargs.keys(), cls.__name__))
+                            list(kwargs.keys()), cls.__name__))
         return obj
 
 
@@ -186,7 +195,7 @@
             if hasattr(base_class, '_CONSOLIDATED_DEFAULTS'):
                 defaults = getattr(base_class, '_CONSOLIDATED_DEFAULTS').copy()
         if '_FIELDS' in attrs:
-            fields = fields + map(list, attrs['_FIELDS'])
+            fields = fields + list(map(list, attrs['_FIELDS']))
         if '_DEFAULTS' in attrs:
             defaults.update(attrs['_DEFAULTS'])
         attrs['_CONSOLIDATED_FIELDS'] = fields
@@ -198,7 +207,7 @@
                     '%s message must have some fields defined' % name)
 
         attrs['__new__'] = message_class_new
-        _, field_names, _ = zip(*fields)
+        _, field_names, _ = list(zip(*fields))
         message_class = namedtuple(name, field_names)
         # Prepend the class created via namedtuple to |bases| in order to
         # correctly resolve the __new__ method while preserving the class
@@ -208,15 +217,13 @@
         return cls
 
 
-class MBIMControlMessage(object):
+class MBIMControlMessage(six.with_metaclass(MBIMControlMessageMeta, object)):
     """
     MBIMControlMessage base class.
 
     This class should not be instantiated or used directly.
 
     """
-    __metaclass__ = MBIMControlMessageMeta
-
     _NEXT_TRANSACTION_ID = 0X00000000
 
 
@@ -316,7 +323,7 @@
         @returns The field names of the message structure.
 
         """
-        _, field_names, _ = zip(*cls.get_fields(get_all=get_all))
+        _, field_names, _ = list(zip(*cls.get_fields(get_all=get_all)))
         return field_names
 
 
@@ -328,7 +335,7 @@
         @returns The format of fields of the message structure.
 
         """
-        field_formats, _, _ = zip(*cls.get_fields(get_all=get_all))
+        field_formats, _, _ = list(zip(*cls.get_fields(get_all=get_all)))
         return field_formats
 
 
@@ -389,7 +396,7 @@
         @returns The tracsaction id for control message delivery.
 
         """
-        if MBIMControlMessage._NEXT_TRANSACTION_ID > (sys.maxint - 2):
+        if MBIMControlMessage._NEXT_TRANSACTION_ID > (six.MAXSIZE - 2):
             MBIMControlMessage._NEXT_TRANSACTION_ID = 0x00000000
         MBIMControlMessage._NEXT_TRANSACTION_ID += 1
         return MBIMControlMessage._NEXT_TRANSACTION_ID
@@ -434,7 +441,7 @@
                     mbim_errors.MBIMComplianceControlMessageError,
                     "Erorr in finding payload len field in message: %s" %
                     self.__class__.__name__)
-        return payload_len_fields.values()[0]
+        return list(payload_len_fields.values())[0]
 
 
     def get_total_len(self):
@@ -452,7 +459,7 @@
                     mbim_errors.MBIMComplianceControlMessageError,
                     "Erorr in finding total len field in message: %s" %
                     self.__class__.__name__)
-        return total_len_fields.values()[0]
+        return list(total_len_fields.values())[0]
 
 
     def get_num_fragments(self):
@@ -469,7 +476,7 @@
                     mbim_errors.MBIMComplianceControlMessageError,
                     "Erorr in finding num fragments field in message: %s" %
                     self.__class__.__name__)
-        return num_fragment_fields.values()[0]
+        return list(num_fragment_fields.values())[0]
 
 
     def find_payload_class(self):
diff --git a/client/cros/cellular/mbim_compliance/mbim_message_request.py b/client/cros/cellular/mbim_compliance/mbim_message_request.py
index ac32ccc..af9cea4 100644
--- a/client/cros/cellular/mbim_compliance/mbim_message_request.py
+++ b/client/cros/cellular/mbim_compliance/mbim_message_request.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -11,8 +12,13 @@
         http://www.usb.org/developers/docs/devclass_docs/
         MBIM10Errata1_073013.zip
 """
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import logging
 import math
+from six.moves import range
 
 from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
 from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
diff --git a/client/cros/cellular/mbim_compliance/mbim_message_unittest.py b/client/cros/cellular/mbim_compliance/mbim_message_unittest.py
index 3c1f0ae..af157da 100644
--- a/client/cros/cellular/mbim_compliance/mbim_message_unittest.py
+++ b/client/cros/cellular/mbim_compliance/mbim_message_unittest.py
@@ -7,15 +7,16 @@
 import unittest
 
 import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
+
 from autotest_lib.client.cros.cellular.mbim_compliance import \
-        mbim_command_message
+    mbim_command_message
+from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
 from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
 from autotest_lib.client.cros.cellular.mbim_compliance import mbim_message
 from autotest_lib.client.cros.cellular.mbim_compliance import \
-        mbim_message_request
+    mbim_message_request
 from autotest_lib.client.cros.cellular.mbim_compliance import \
-        mbim_message_response
+    mbim_message_response
 
 
 class TestMessage(mbim_message.MBIMControlMessage):
@@ -52,7 +53,7 @@
         with self.assertRaisesRegexp(
                 mbim_errors.MBIMComplianceControlMessageError,
                 '^Missing field value'):
-                message = TestMessage()
+            message = TestMessage()
 
 
     def test_argument_mismatch(self):
@@ -63,7 +64,7 @@
         with self.assertRaisesRegexp(
                 mbim_errors.MBIMComplianceControlMessageError,
                 '^Unexpected fields'):
-                message = TestMessage(message_type=4, fake=5)
+            message = TestMessage(message_type=4, fake=5)
 
 
     def test_message_default_value_set(self):
@@ -195,9 +196,10 @@
         self.assertEqual(message.transaction_id, 1)
         self.assertEqual(message.total_fragments, 2)
         self.assertEqual(message.current_fragment, 0)
-        self.assertEqual(message.device_service_id,
-                         '\x02\x00\x06\xEE\x00\x00\x00\x00\x80\x40\x20\x10'
-                         '\x00\xAA\xBB\xCC')
+        self.assertEqual(
+                message.device_service_id,
+                b'\x02\x00\x06\xEE\x00\x00\x00\x00\x80\x40\x20\x10'
+                b'\x00\xAA\xBB\xCC')
         self.assertEqual(message.cid, 1)
         self.assertEqual(message.status_codes,
                          mbim_constants.MBIM_STATUS_SUCCESS)
@@ -268,9 +270,10 @@
         self.assertEqual(message.transaction_id, 1)
         self.assertEqual(message.total_fragments, 5)
         self.assertEqual(message.current_fragment, 0)
-        self.assertEqual(message.device_service_id,
-                         '\xA2\x89\xCC3\xBC\xBB\x8BO\xB6\xB0\x13>\xC2\xAA\xE6'
-                         '\xDF')
+        self.assertEqual(
+                message.device_service_id,
+                b'\xA2\x89\xCC3\xBC\xBB\x8BO\xB6\xB0\x13>\xC2\xAA\xE6'
+                b'\xDF')
         self.assertEqual(message.cid, 1)
         self.assertEqual(message.status_codes,
                          mbim_constants.MBIM_STATUS_SUCCESS)
diff --git a/client/cros/cellular/mbim_compliance/sequences/open_sequence.py b/client/cros/cellular/mbim_compliance/sequences/open_sequence.py
index 5b1afc7..e1ac440 100644
--- a/client/cros/cellular/mbim_compliance/sequences/open_sequence.py
+++ b/client/cros/cellular/mbim_compliance/sequences/open_sequence.py
@@ -1,9 +1,15 @@
+# Lint as: python2, python3
 # Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import logging
 import struct
+from six.moves import zip
 from usb import control
 
 import common
@@ -34,14 +40,14 @@
 
 
     def __init__(self, *args):
-        _, field_names = zip(*self._FIELDS)
+        _, field_names = list(zip(*self._FIELDS))
         if len(args) != len(field_names):
             mbim_errors.log_and_raise(
                     mbim_errors.MBIMComplianceError,
                     'Expected %d arguments for %s constructor, got %d.' % (
                             len(field_names),self.__class__.__name__,len(args)))
 
-        fields = zip(field_names, args)
+        fields = list(zip(field_names, args))
         for field in fields:
             setattr(self, field[0], field[1])
 
@@ -51,7 +57,7 @@
         """
         @returns The format string composed of concatenated field formats.
         """
-        field_formats, _ = zip(*cls._FIELDS)
+        field_formats, _ = list(zip(*cls._FIELDS))
         return ''.join(field_format for field_format in field_formats)
 
 
diff --git a/client/cros/cellular/mbim_compliance/tests/dummy.py b/client/cros/cellular/mbim_compliance/tests/dummy.py
deleted file mode 100644
index 7404920..0000000
--- a/client/cros/cellular/mbim_compliance/tests/dummy.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance.tests import test
-
-
-class DummyTest(test.Test):
-    """ A dummy test that always passes. """
-
-    def run(self):
-        """ Always passes. """
-        pass
diff --git a/client/cros/cellular/mbim_compliance/tests/stub.py b/client/cros/cellular/mbim_compliance/tests/stub.py
new file mode 100644
index 0000000..3789399
--- /dev/null
+++ b/client/cros/cellular/mbim_compliance/tests/stub.py
@@ -0,0 +1,14 @@
+# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import common
+from autotest_lib.client.cros.cellular.mbim_compliance.tests import test
+
+
+class StubTest(test.Test):
+    """ A stub test that always passes. """
+
+    def run(self):
+        """ Always passes. """
+        pass
diff --git a/client/cros/cellular/mbim_compliance/usb_descriptors.py b/client/cros/cellular/mbim_compliance/usb_descriptors.py
index 8edbc95..de9dcdc 100644
--- a/client/cros/cellular/mbim_compliance/usb_descriptors.py
+++ b/client/cros/cellular/mbim_compliance/usb_descriptors.py
@@ -1,10 +1,19 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import struct
 from collections import namedtuple
 
+import six
+
+from six.moves import zip
+
 from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
 
 # All the MBIM_ONLY_* maps are filters for MBIM only function. These maps
@@ -103,7 +112,7 @@
             raise mbim_errors.MBIMComplianceFrameworkError(
                     '%s must define a _FIELDS attribute' % name)
 
-        field_formats, field_names = zip(*attrs['_FIELDS'])
+        field_formats, field_names = list(zip(*attrs['_FIELDS']))
         # USB descriptor data are in the little-endian format.
         data_format = '<' + ''.join(field_formats)
         unpack_length = struct.calcsize(data_format)
@@ -168,14 +177,13 @@
         return cls
 
 
-class Descriptor(object):
+class Descriptor(six.with_metaclass(DescriptorMeta, object)):
     """
     USB Descriptor base class.
 
     This class should not be instantiated or used directly.
 
     """
-    __metaclass__ = DescriptorMeta
 
 
 class UnknownDescriptor(Descriptor):
@@ -346,7 +354,7 @@
     def __iter__(self):
         return self
 
-    def next(self):
+    def __next__(self):
         """
         Returns the next descriptor found in the descriptor data.
 
@@ -382,6 +390,10 @@
         self._descriptor_index += 1
         return descriptor
 
+    def next(self):
+        """Stub for python2, remove once py2 support is not needed."""
+        return self.__next__()
+
 
 def filter_descriptors(descriptor_type, descriptors):
     """
@@ -395,8 +407,7 @@
     """
     if not descriptors:
         return []
-    return filter(lambda descriptor: isinstance(descriptor, descriptor_type),
-                  descriptors)
+    return [descriptor for descriptor in descriptors if isinstance(descriptor, descriptor_type)]
 
 
 def has_distinct_descriptors(descriptors):
@@ -453,14 +464,13 @@
         @returns True if all fields match, False otherwise.
 
         """
-        for key, value in interface_type.iteritems():
+        for key, value in six.iteritems(interface_type):
             if (not hasattr(interface, key) or
                 getattr(interface, key) != value):
                 return False
         return True
 
-    return filter(lambda descriptor: _match_all_fields(descriptor),
-                  descriptors)
+    return [descriptor for descriptor in descriptors if _match_all_fields(descriptor)]
 
 
 def has_bulk_in_and_bulk_out(endpoints):
diff --git a/client/cros/cellular/mbim_compliance/usb_descriptors_unittest.py b/client/cros/cellular/mbim_compliance/usb_descriptors_unittest.py
index 8cc01b5..d407851 100644
--- a/client/cros/cellular/mbim_compliance/usb_descriptors_unittest.py
+++ b/client/cros/cellular/mbim_compliance/usb_descriptors_unittest.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -175,7 +176,7 @@
                                       0x02, 0x00, 0x02, 0x00])
         parser = DescriptorParser(descriptor_data)
 
-        descriptor = parser.next()
+        descriptor = next(parser)
         self.assertIsInstance(descriptor, ConfigurationDescriptor)
         self.assertIsInstance(descriptor, Descriptor)
         self.assertEquals(9, descriptor.bLength)
@@ -190,7 +191,7 @@
                                      0xa0, 0xfa]),
                          descriptor.data)
 
-        descriptor = parser.next()
+        descriptor = next(parser)
         self.assertIsInstance(descriptor, InterfaceAssociationDescriptor)
         self.assertIsInstance(descriptor, Descriptor)
         self.assertEquals(8, descriptor.bLength)
@@ -205,7 +206,7 @@
                                      0x00]),
                          descriptor.data)
 
-        descriptor = parser.next()
+        descriptor = next(parser)
         self.assertIsInstance(descriptor, InterfaceDescriptor)
         self.assertIsInstance(descriptor, Descriptor)
         self.assertEquals(9, descriptor.bLength)
@@ -221,7 +222,7 @@
                                      0x00, 0x05]),
                          descriptor.data)
 
-        descriptor = parser.next()
+        descriptor = next(parser)
         self.assertIsInstance(descriptor, HeaderFunctionalDescriptor)
         self.assertIsInstance(descriptor, FunctionalDescriptor)
         self.assertIsInstance(descriptor, Descriptor)
@@ -232,7 +233,7 @@
         self.assertEqual(array('B', [0x05, 0x24, 0x00, 0x20, 0x01]),
                          descriptor.data)
 
-        descriptor = parser.next()
+        descriptor = next(parser)
         self.assertIsInstance(descriptor, MBIMFunctionalDescriptor)
         self.assertIsInstance(descriptor, FunctionalDescriptor)
         self.assertIsInstance(descriptor, Descriptor)
@@ -249,7 +250,7 @@
                                      0x20, 0x80, 0x96, 0x05, 0x00]),
                          descriptor.data)
 
-        descriptor = parser.next()
+        descriptor = next(parser)
         self.assertIsInstance(descriptor, MBIMExtendedFunctionalDescriptor)
         self.assertIsInstance(descriptor, FunctionalDescriptor)
         self.assertIsInstance(descriptor, Descriptor)
@@ -263,7 +264,7 @@
                                      0x05]),
                          descriptor.data)
 
-        descriptor = parser.next()
+        descriptor = next(parser)
         self.assertIsInstance(descriptor, UnionFunctionalDescriptor)
         self.assertIsInstance(descriptor, FunctionalDescriptor)
         self.assertIsInstance(descriptor, Descriptor)
@@ -275,7 +276,7 @@
         self.assertEqual(array('B', [0x05, 0x24, 0x06, 0x00, 0x01]),
                          descriptor.data)
 
-        descriptor = parser.next()
+        descriptor = next(parser)
         self.assertIsInstance(descriptor, EndpointDescriptor)
         self.assertIsInstance(descriptor, Descriptor)
         self.assertEquals(7, descriptor.bLength)
@@ -287,7 +288,7 @@
         self.assertEqual(array('B', [0x07, 0x05, 0x81, 0x03, 0x40, 0x00, 0x05]),
                          descriptor.data)
 
-        descriptor = parser.next()
+        descriptor = next(parser)
         self.assertIsInstance(descriptor, InterfaceDescriptor)
         self.assertIsInstance(descriptor, Descriptor)
         self.assertEquals(9, descriptor.bLength)
@@ -303,7 +304,7 @@
                                      0x02, 0x06]),
                          descriptor.data)
 
-        descriptor = parser.next()
+        descriptor = next(parser)
         self.assertIsInstance(descriptor, InterfaceDescriptor)
         self.assertIsInstance(descriptor, Descriptor)
         self.assertEquals(9, descriptor.bLength)
@@ -319,7 +320,7 @@
                                      0x02, 0x07]),
                          descriptor.data)
 
-        descriptor = parser.next()
+        descriptor = next(parser)
         self.assertIsInstance(descriptor, EndpointDescriptor)
         self.assertIsInstance(descriptor, Descriptor)
         self.assertEquals(7, descriptor.bLength)
@@ -331,7 +332,7 @@
         self.assertEqual(array('B', [0x07, 0x05, 0x82, 0x02, 0x00, 0x02, 0x00]),
                          descriptor.data)
 
-        descriptor = parser.next()
+        descriptor = next(parser)
         self.assertIsInstance(descriptor, EndpointDescriptor)
         self.assertIsInstance(descriptor, Descriptor)
         self.assertEquals(7, descriptor.bLength)
@@ -344,7 +345,7 @@
                          descriptor.data)
 
         with self.assertRaises(StopIteration):
-            descriptor = parser.next()
+            descriptor = next(parser)
 
 
 if __name__ == '__main__':
diff --git a/client/cros/cellular/mm.py b/client/cros/cellular/mm.py
index e523846..b7aa980 100644
--- a/client/cros/cellular/mm.py
+++ b/client/cros/cellular/mm.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -28,13 +29,13 @@
     for provider in MMPROVIDERS:
         try:
             return modem.ModemManager(provider)
-        except dbus.exceptions.DBusException, e:
+        except dbus.exceptions.DBusException as e:
             if e._dbus_error_name != SERVICE_UNKNOWN:
                 raise
 
     try:
         return modem1.ModemManager()
-    except dbus.exceptions.DBusException, e:
+    except dbus.exceptions.DBusException as e:
         if e._dbus_error_name != SERVICE_UNKNOWN:
             raise
 
diff --git a/client/cros/cellular/mm1.py b/client/cros/cellular/mm1.py
index 2672e5c..dfc37a7 100644
--- a/client/cros/cellular/mm1.py
+++ b/client/cros/cellular/mm1.py
@@ -1,5 +1,4 @@
-#!/usr/bin/env python2
-
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/cros/cellular/mm1_constants.py b/client/cros/cellular/mm1_constants.py
index 6c77aba..5fe12ca 100644
--- a/client/cros/cellular/mm1_constants.py
+++ b/client/cros/cellular/mm1_constants.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -395,3 +396,18 @@
 MM_SMS_VALIDITY_TYPE_RELATIVE = 1
 MM_SMS_VALIDITY_TYPE_ABSOLUTE = 2
 MM_SMS_VALIDITY_TYPE_ENHANCED = 3
+
+# enum MMSimEsimStatus
+MM_SIM_ESIM_STATUS_UNKNOWN = 0
+MM_SIM_ESIM_STATUS_NO_PROFILES = 1
+MM_SIM_ESIM_STATUS_WITH_PROFILES = 2
+
+MM_INHIBIT_PROCESSING_TIME = 30
+MM_REPROBE_PROCESSING_TIME = 90
+MM_UNINHIBIT_PROCESSING_TIME = MM_INHIBIT_PROCESSING_TIME
+
+# D-Bus path for empty sim slots
+MM_EMPTY_SLOT_PATH = '/'
+
+# Wait times for modem at Modemmanager operations
+MM_MODEM_POLL_TIME = 60
diff --git a/client/cros/cellular/mmtest.py b/client/cros/cellular/mmtest.py
index d548376..795f74a 100644
--- a/client/cros/cellular/mmtest.py
+++ b/client/cros/cellular/mmtest.py
@@ -1,13 +1,12 @@
-#!/usr/bin/python2
+# Lint as: python2, python3
 # Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import cellular_logging
 import dbus, os, subprocess, time
 
 from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import flimflam_test_path
+from autotest_lib.client.cros.cellular import cellular_logging
 from autotest_lib.client.cros.cellular import modem
 
 log = cellular_logging.SetupCellularLogging('mm_test')
diff --git a/client/cros/cellular/modem.py b/client/cros/cellular/modem.py
index 536dba1..87faabc 100644
--- a/client/cros/cellular/modem.py
+++ b/client/cros/cellular/modem.py
@@ -1,12 +1,18 @@
-#!/usr/bin/python2
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import os
 
 from autotest_lib.client.cros.cellular import cellular
+
 import dbus
+import six
 
 MODEM_TIMEOUT=60
 
@@ -125,7 +131,7 @@
     def _CopyPropertiesCheckUnique(src, dest):
         """Copies properties from |src| to |dest| and makes sure there are no
            duplicate properties that have different values."""
-        for key, value in src.iteritems():
+        for key, value in six.iteritems(src):
             if key in dest and value != dest[key]:
                 raise KeyError('Duplicate property %s, different values '
                                '("%s", "%s")' % (key, value, dest[key]))
diff --git a/client/cros/cellular/modem1.py b/client/cros/cellular/modem1.py
index 63384af..b9ce06f 100644
--- a/client/cros/cellular/modem1.py
+++ b/client/cros/cellular/modem1.py
@@ -1,15 +1,22 @@
-#!/usr/bin/python2
+
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 """Implement a modem proxy to talk to a ModemManager1 modem."""
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.cros.cellular import cellular
+from autotest_lib.client.cros.cellular import cellular_logging
 from autotest_lib.client.cros.cellular import mm1
 from autotest_lib.client.cros.cellular import mm1_constants
+
 import dbus
-import cellular_logging
+import six
 
 log = cellular_logging.SetupCellularLogging('modem1')
 
@@ -88,7 +95,7 @@
     def _CopyPropertiesCheckUnique(src, dest):
         """Copies properties from |src| to |dest| and makes sure there are no
            duplicate properties that have different values."""
-        for key, value in src.iteritems():
+        for key, value in six.iteritems(src):
             if key in dest and value != dest[key]:
                 raise KeyError('Duplicate property %s, different values '
                                '("%s", "%s")' % (key, value, dest[key]))
@@ -254,7 +261,7 @@
 
     def EnumerateDevices(self):
         devices = self.objectmanager.GetManagedObjects()
-        return devices.keys()
+        return list(devices.keys())
 
     def GetModem(self, path):
         return Modem(self, path)
diff --git a/client/cros/cellular/modem_utils.py b/client/cros/cellular/modem_utils.py
deleted file mode 100644
index 4e99bf2..0000000
--- a/client/cros/cellular/modem_utils.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.cellular import mm
-
-
-def ClearGobiModemFaultInjection():
-    """If a Gobi modem is present, try to clear its fault-injection state."""
-    try:
-        modem_manager, modem_path = mm.PickOneModem('Gobi')
-    except error.TestError:
-        # Did not find a Gobi modem. Simply return.
-        return
-
-    modem = modem_manager.GetModem(modem_path).GobiModem()
-    if modem:
-        modem.InjectFault('ClearFaults', 1)
diff --git a/client/cros/cellular/net_interface.py b/client/cros/cellular/net_interface.py
index 29f9f38..b47e6eb 100644
--- a/client/cros/cellular/net_interface.py
+++ b/client/cros/cellular/net_interface.py
@@ -1,10 +1,15 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import os
-import urlparse
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
 
+import os
+import six
+import six.moves.urllib.parse
 
 import common
 from autotest_lib.client.bin import utils
@@ -69,11 +74,11 @@
                 (PseudoNetInterface.IFACE_IP_BASE,
                  PseudoNetInterface.IFACE_IP_BASE))
         test_fetch_url_host = \
-                urlparse.urlparse(network.FETCH_URL_PATTERN_FOR_TEST).netloc
+                six.moves.urllib.parse.urlparse(network.FETCH_URL_PATTERN_FOR_TEST).netloc
         dns_lookup_table = {
                 PseudoNetInterface.SHILL_PORTAL_DETECTION_SERVER: peer_ip,
                 test_fetch_url_host: peer_ip }
-        for host, ip in dns_lookup_table.iteritems():
+        for host, ip in six.iteritems(dns_lookup_table):
             dnsmasq_command += '--address=/%s/%s ' % (host, ip)
         return dnsmasq_command
 
@@ -140,4 +145,3 @@
         """
         self.Teardown()
         self.Setup()
-
diff --git a/client/cros/cellular/prologix_scpi_driver.py b/client/cros/cellular/prologix_scpi_driver.py
index 005e66e..d049be1 100644
--- a/client/cros/cellular/prologix_scpi_driver.py
+++ b/client/cros/cellular/prologix_scpi_driver.py
@@ -1,10 +1,11 @@
-#!/usr/bin/python2
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import cellular_system_error
-import cellular_logging
+from autotest_lib.client.cros.cellular import cellular_system_error
+from autotest_lib.client.cros.cellular import cellular_logging
+
 import os
 import select
 import socket
@@ -50,7 +51,7 @@
 
         self.connection_key = "%s:%s" % (hostname, port)
         self.connection_data = {self.connection_key: traceback.format_stack()}
-        if self.connection_key in self.all_open_connections.keys():
+        if self.connection_key in list(self.all_open_connections.keys()):
             raise cellular_system_error.BadState(
               'IP network connection to '
               'prologix is already in use. : %s ' % self.all_open_connections)
diff --git a/client/cros/cellular/prologix_scpi_driver_test_noautorun.py b/client/cros/cellular/prologix_scpi_driver_test_noautorun.py
index 62f1a27..e8e5dfc 100644
--- a/client/cros/cellular/prologix_scpi_driver_test_noautorun.py
+++ b/client/cros/cellular/prologix_scpi_driver_test_noautorun.py
@@ -1,16 +1,17 @@
-#!/usr/bin/python2
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 
 import copy
-import mock
-import prologix_scpi_driver
-import scpi
 import unittest
-import cellular_logging
-import cellular_system_error
+from unittest import mock
+
+from autotest_lib.client.cros.cellular import cellular_logging
+from autotest_lib.client.cros.cellular import cellular_system_error
+from autotest_lib.client.cros.cellular import prologix_scpi_driver
+from autotest_lib.client.cros.cellular import scpi
 
 log = cellular_logging.SetupCellularLogging('scpi_test')
 
@@ -184,7 +185,7 @@
     def _get_idns_and_verify(self, instruments, opc=False):
         """
         Get the idn string from all the instruments, and check that it
-        contains the desired substring. This is a quick sanity check only.
+        contains the desired substring. This is a quick confidence check only.
         """
         for instr in instruments:
             scpi_connection = self._open_prologix(instr, opc_on_stanza=opc)
diff --git a/client/cros/cellular/pseudo_modem.py b/client/cros/cellular/pseudo_modem.py
index d4de0c9..0d804cf 100755
--- a/client/cros/cellular/pseudo_modem.py
+++ b/client/cros/cellular/pseudo_modem.py
@@ -1,5 +1,4 @@
-#!/usr/bin/env python2
-
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -21,6 +20,10 @@
    * implement CDMA modems
 """
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 from optparse import OptionParser
 import logging
 import os
@@ -39,8 +42,13 @@
 from dbus.types import Struct
 from dbus.types import UInt32
 import glib
-import gobject
+# AU tests use ToT client code, but ToT -3 client version.
+try:
+    from gi.repository import GObject
+except ImportError:
+    import gobject as GObject
 import mm1
+from six.moves import range
 
 
 # Miscellaneous delays to simulate a modem
@@ -547,8 +555,8 @@
                          out_signature='ao')
     def List(self, *args, **kwargs):
         logging.info('Modem.Messaging: List: %s',
-                     ', '.join(self.smses.keys()))
-        return self.smses.keys()
+                     ', '.join(list(self.smses.keys())))
+        return list(self.smses.keys())
 
     @dbus.service.method(mm1.MODEM_MESSAGING_INTERFACE, in_signature='o',
                          out_signature='')
@@ -620,7 +628,7 @@
         """Removes a modem device from the list of managed devices."""
         logging.info('ModemManager: remove %s', device.name)
         self.devices.remove(device)
-        interfaces = device.InterfacesAndProperties().keys()
+        interfaces = list(device.InterfacesAndProperties().keys())
         self.InterfacesRemoved(device.path, interfaces)
 
     @dbus.service.method(mm1.OFDOM, out_signature='a{oa{sa{sv}}}')
@@ -629,7 +637,7 @@
         results = {}
         for device in self.devices:
             results[device.path] = device.InterfacesAndProperties()
-        logging.info('GetManagedObjects: %s', ', '.join(results.keys()))
+        logging.info('GetManagedObjects: %s', ', '.join(list(results.keys())))
         return results
 
     @dbus.service.signal(mm1.OFDOM, signature='oa{sa{sv}}')
@@ -678,7 +686,7 @@
     parser.add_option('-c', '--carrier', dest='carrier_name',
                       metavar='<carrier name>',
                       help='<carrier name> := %s' % ' | '.join(
-                          SIM.CARRIERS.keys()))
+                          list(SIM.CARRIERS.keys())))
     parser.add_option('-s', '--smscount', dest='sms_count',
                       default=0,
                       metavar='<smscount>',
@@ -723,12 +731,12 @@
                     sms = SMS(manager, name='/SMS/%s' % index, text=line)
                     modem.AddSMS(sms)
         else:
-            for index in xrange(int(options.sms_count)):
+            for index in range(int(options.sms_count)):
                 sms = SMS(manager, name='/SMS/%s' % index,
                           text=options.sms_text)
                 modem.AddSMS(sms)
 
-        mainloop = gobject.MainLoop()
+        mainloop = GObject.MainLoop()
 
         def SignalHandler(signum, frame):
             logging.info('Signal handler called with signal: %s', signum)
diff --git a/client/cros/cellular/pseudomodem/bearer.py b/client/cros/cellular/pseudomodem/bearer.py
index bea74dd..b522e9e 100644
--- a/client/cros/cellular/pseudomodem/bearer.py
+++ b/client/cros/cellular/pseudomodem/bearer.py
@@ -1,13 +1,14 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 import dbus
 
-import dbus_std_ifaces
-import utils
-
 import common
+from autotest_lib.client.cros.cellular.pseudomodem import dbus_std_ifaces
+from autotest_lib.client.cros.cellular.pseudomodem import utils
+
 from autotest_lib.client.cros.cellular import mm1_constants
 from autotest_lib.client.cros.cellular import net_interface
 
diff --git a/client/cros/cellular/pseudomodem/cdma_activate_machine.py b/client/cros/cellular/pseudomodem/cdma_activate_machine.py
index 556f307..acde1cb 100644
--- a/client/cros/cellular/pseudomodem/cdma_activate_machine.py
+++ b/client/cros/cellular/pseudomodem/cdma_activate_machine.py
@@ -2,11 +2,17 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import gobject
 import logging
 
-import pm_errors
-import state_machine
+import common
+
+# AU tests use ToT client code, but ToT -3 client version.
+try:
+    from gi.repository import GObject
+except ImportError:
+    import gobject as GObject
+from . import pm_errors
+from . import state_machine
 
 from autotest_lib.client.cros.cellular import mm1_constants
 
@@ -56,7 +62,7 @@
         def _DelayedStep():
             self.Step()
             return False
-        gobject.timeout_add(self._step_delay * 1000, _DelayedStep)
+        GObject.timeout_add(self._step_delay * 1000, _DelayedStep)
 
     def _HandleInvalidState(self):
         state = self._modem.Get(mm1_constants.I_MODEM, 'State')
diff --git a/client/cros/cellular/pseudomodem/client.py b/client/cros/cellular/pseudomodem/client.py
index 519243d..6f5fdd8 100755
--- a/client/cros/cellular/pseudomodem/client.py
+++ b/client/cros/cellular/pseudomodem/client.py
@@ -1,17 +1,26 @@
-#!/usr/bin/env python2
 
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import cmd
 import dbus
 import dbus.types
 import dbus.exceptions
 
-import pm_constants
+import six
+
+from six.moves import input
 
 import common
+
+from autotest_lib.client.cros.cellular.pseudomodem import pm_constants
+
 from autotest_lib.client.cros.cellular import mm1_constants
 
 class PseudoModemClient(cmd.Cmd):
@@ -39,7 +48,7 @@
         Starts the interactive shell.
 
         """
-        print '\nWelcome to the PseudoModemManager shell!\n'
+        print('\nWelcome to the PseudoModemManager shell!\n')
         self.cmdloop()
 
 
@@ -56,14 +65,14 @@
 
         """
         if args:
-            print '\nCommand "is_alive" expects no arguments.\n'
+            print('\nCommand "is_alive" expects no arguments.\n')
             return
-        print self._get_proxy().IsAlive(dbus_interface=pm_constants.I_TESTING)
+        print(self._get_proxy().IsAlive(dbus_interface=pm_constants.I_TESTING))
 
 
     def help_is_alive(self):
         """ Handles the 'help is_alive' command. """
-        print '\nChecks that pseudomodem child process is alive.\n'
+        print('\nChecks that pseudomodem child process is alive.\n')
 
 
     def do_properties(self, args):
@@ -74,26 +83,26 @@
 
         """
         if args:
-            print '\nCommand "properties" expects no arguments.\n'
+            print('\nCommand "properties" expects no arguments.\n')
             return
         try:
             props = self._get_proxy().GetAll(
                             pm_constants.I_TESTING,
                             dbus_interface=mm1_constants.I_PROPERTIES)
-            print '\nProperties: '
-            for k, v in props.iteritems():
-                print '   ' + k + ': ' + str(v)
-            print
+            print('\nProperties: ')
+            for k, v in six.iteritems(props):
+                print('   ' + k + ': ' + str(v))
+            print()
         except dbus.exceptions.DBusException as e:
-            print ('\nAn error occurred while communicating with '
+            print(('\nAn error occurred while communicating with '
                    'PseudoModemManager: ' + e.get_dbus_name() + ' - ' +
-                   e.message + '\n')
+                   e.message + '\n'))
         return False
 
 
     def help_properties(self):
         """Handles the 'help properties' command."""
-        print '\nReturns the properties under the testing interface.\n'
+        print('\nReturns the properties under the testing interface.\n')
 
 
     def do_sms(self, args):
@@ -116,23 +125,23 @@
         """
         arglist = args.split(' ', 1)
         if len(arglist) != 2:
-            print '\nMalformed SMS args: ' + args + '\n'
+            print('\nMalformed SMS args: ' + args + '\n')
             return
         try:
             self._get_proxy().ReceiveSms(
                     arglist[0], arglist[1],
                     dbus_interface=pm_constants.I_TESTING)
-            print '\nSMS sent!\n'
+            print('\nSMS sent!\n')
         except dbus.exceptions.DBusException as e:
-            print ('\nAn error occurred while communicating with '
+            print(('\nAn error occurred while communicating with '
                    'PseudoModemManager: ' + e.get_dbus_name() + ' - ' +
-                   e.message + '\n')
+                   e.message + '\n'))
         return False
 
 
     def help_sms(self):
         """Handles the 'help sms' command."""
-        print '\nUsage: sms <sender phone #> <message text>\n'
+        print('\nUsage: sms <sender phone #> <message text>\n')
 
 
     def do_set(self, args):
@@ -147,13 +156,13 @@
         """
         arglist = args.split(' ')
         if len(arglist) < 1:
-            print '\nInvalid command: set ' + args + '\n'
+            print('\nInvalid command: set ' + args + '\n')
             return
         if arglist[0] == 'pco':
             if len(arglist) == 1:
                 arglist.append('')
             elif len(arglist) != 2:
-                print '\nExpected: pco <pco-value>. Found: ' + args + '\n'
+                print('\nExpected: pco <pco-value>. Found: ' + args + '\n')
                 return
             pco_value = arglist[1]
             try:
@@ -164,13 +173,13 @@
                     signature='ubay')]
                 self._get_proxy().UpdatePco(
                         pco_list, dbus_interface=pm_constants.I_TESTING)
-                print '\nPCO value updated!\n'
+                print('\nPCO value updated!\n')
             except dbus.exceptions.DBusException as e:
-                print ('\nAn error occurred while communicating with '
+                print(('\nAn error occurred while communicating with '
                        'PseudoModemManager: ' + e.get_dbus_name() + ' - ' +
-                       e.message + '\n')
+                       e.message + '\n'))
         else:
-            print '\nUnknown command: set ' + args + '\n'
+            print('\nUnknown command: set ' + args + '\n')
         return False
 
 
@@ -183,13 +192,13 @@
     def _get_state_machine(self, args):
         arglist = args.split()
         if len(arglist) != 1:
-            print '\nExpected one argument: Name of state machine\n'
+            print('\nExpected one argument: Name of state machine\n')
             return None
         try:
             return self._get_ism_proxy(arglist[0])
         except dbus.exceptions.DBusException as e:
-            print '\nNo such interactive state machine.\n'
-            print 'Error obtained: |%s|\n' % repr(e)
+            print('\nNo such interactive state machine.\n')
+            print('Error obtained: |%s|\n' % repr(e))
             return None
 
 
@@ -208,11 +217,11 @@
         try:
             is_waiting = ism.IsWaiting(
                     dbus_interface=pm_constants.I_TESTING_ISM)
-            print ('\nState machine is %swaiting.\n' %
-                   ('' if is_waiting else 'not '))
+            print(('\nState machine is %swaiting.\n' %
+                   ('' if is_waiting else 'not ')))
         except dbus.exceptions.DBusException as e:
-            print ('\nCould not determine if |%s| is waiting: |%s|\n' %
-                   (machine, repr(e)))
+            print(('\nCould not determine if |%s| is waiting: |%s|\n' %
+                   (machine, repr(e))))
         return False
 
 
@@ -239,9 +248,9 @@
 
         try:
             success = ism.Advance(dbus_interface=pm_constants.I_TESTING_ISM)
-            print ('\nAdvanced!\n' if success else '\nCould not advance.\n')
+            print(('\nAdvanced!\n' if success else '\nCould not advance.\n'))
         except dbus.exceptions.DBusException as e:
-            print '\nError while advancing state machine: |%s|\n' % repr(e)
+            print('\nError while advancing state machine: |%s|\n' % repr(e))
         return False
 
 
@@ -261,14 +270,14 @@
 
         """
         if args:
-            print '\nCommand "exit" expects no arguments.\n'
+            print('\nCommand "exit" expects no arguments.\n')
             return
-        resp = raw_input('Are you sure? (yes/no): ')
+        resp = input('Are you sure? (yes/no): ')
         if resp == 'yes':
-            print '\nGoodbye!\n'
+            print('\nGoodbye!\n')
             return True
         if resp != 'no':
-            print '\nDid not understand: ' + resp + '\n'
+            print('\nDid not understand: ' + resp + '\n')
         return False
 
 
diff --git a/client/cros/cellular/pseudomodem/connect_cdma_machine.py b/client/cros/cellular/pseudomodem/connect_cdma_machine.py
index 4b844d0..9943443 100644
--- a/client/cros/cellular/pseudomodem/connect_cdma_machine.py
+++ b/client/cros/cellular/pseudomodem/connect_cdma_machine.py
@@ -4,8 +4,8 @@
 
 import logging
 
-import connect_machine
-import pm_errors
+from . import connect_machine
+from . import pm_errors
 
 from autotest_lib.client.cros.cellular import mm1_constants
 
diff --git a/client/cros/cellular/pseudomodem/connect_machine.py b/client/cros/cellular/pseudomodem/connect_machine.py
index f0be2df..e428848 100644
--- a/client/cros/cellular/pseudomodem/connect_machine.py
+++ b/client/cros/cellular/pseudomodem/connect_machine.py
@@ -1,13 +1,19 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import logging
 import subprocess
 
-import pm_errors
-import state_machine
+import six
 
+from autotest_lib.client.cros.cellular.pseudomodem import pm_errors
+from autotest_lib.client.cros.cellular.pseudomodem import state_machine
 from autotest_lib.client.cros.cellular import mm1_constants
 
 class ConnectMachine(state_machine.StateMachine):
@@ -117,9 +123,9 @@
         bearer = None
         bearer_path = None
         bearer_props = {}
-        for p, b in self._modem.bearers.iteritems():
+        for p, b in six.iteritems(self._modem.bearers):
             # assemble bearer props
-            for key, val in self.connect_props.iteritems():
+            for key, val in six.iteritems(self.connect_props):
                 if key in modem.ALLOWED_BEARER_PROPERTIES:
                     bearer_props[key] = val
             if (b.bearer_properties == bearer_props):
diff --git a/client/cros/cellular/pseudomodem/dbus_std_ifaces.py b/client/cros/cellular/pseudomodem/dbus_std_ifaces.py
index 847b4bb..4d64910 100644
--- a/client/cros/cellular/pseudomodem/dbus_std_ifaces.py
+++ b/client/cros/cellular/pseudomodem/dbus_std_ifaces.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -15,10 +16,9 @@
 import dbus.types
 import logging
 
-import pm_errors
-import utils
-
 from autotest_lib.client.cros.cellular import mm1_constants
+from autotest_lib.client.cros.cellular.pseudomodem import pm_errors
+from autotest_lib.client.cros.cellular.pseudomodem import utils
 
 class MMPropertyError(pm_errors.MMError):
     """
@@ -282,7 +282,7 @@
         """
         old_props = self._properties.get(interface, None)
         if old_props:
-            invalidated = old_props.keys()
+            invalidated = list(old_props.keys())
         else:
             invalidated = []
         self._properties[interface] = properties
@@ -365,7 +365,7 @@
         """
         if device in self.devices:
             self.devices.remove(device)
-        interfaces = device.GetInterfacesAndProperties().keys()
+        interfaces = list(device.GetInterfacesAndProperties().keys())
         self.InterfacesRemoved(device.path, interfaces)
         device.remove_from_connection()
 
@@ -386,7 +386,7 @@
             results[dbus.types.ObjectPath(device.path)] = (
                     device.GetInterfacesAndProperties())
         logging.info('%s: GetManagedObjects: %s', self.path,
-                     ', '.join(results.keys()))
+                     ', '.join(list(results.keys())))
         return results
 
 
diff --git a/client/cros/cellular/pseudomodem/disable_machine.py b/client/cros/cellular/pseudomodem/disable_machine.py
index f54cc8b..256c468 100644
--- a/client/cros/cellular/pseudomodem/disable_machine.py
+++ b/client/cros/cellular/pseudomodem/disable_machine.py
@@ -4,8 +4,10 @@
 
 import logging
 
-import pm_errors
-import state_machine
+from . import pm_errors
+from . import state_machine
+
+import common
 
 from autotest_lib.client.cros.cellular import mm1_constants
 
diff --git a/client/cros/cellular/pseudomodem/disconnect_machine.py b/client/cros/cellular/pseudomodem/disconnect_machine.py
index 0367fdd..5966a19 100644
--- a/client/cros/cellular/pseudomodem/disconnect_machine.py
+++ b/client/cros/cellular/pseudomodem/disconnect_machine.py
@@ -4,8 +4,8 @@
 
 import logging
 
-import pm_errors
-import state_machine
+from . import pm_errors
+from . import state_machine
 
 from autotest_lib.client.cros.cellular import mm1_constants
 
diff --git a/client/cros/cellular/pseudomodem/enable_machine.py b/client/cros/cellular/pseudomodem/enable_machine.py
index 9a8ea71..bbf72a6 100644
--- a/client/cros/cellular/pseudomodem/enable_machine.py
+++ b/client/cros/cellular/pseudomodem/enable_machine.py
@@ -4,8 +4,8 @@
 
 import logging
 
-import pm_errors
-import state_machine
+from . import pm_errors
+from . import state_machine
 
 from autotest_lib.client.cros.cellular import mm1_constants
 
diff --git a/client/cros/cellular/pseudomodem/logging_setup.py b/client/cros/cellular/pseudomodem/logging_setup.py
index c3e186e..524d447 100644
--- a/client/cros/cellular/pseudomodem/logging_setup.py
+++ b/client/cros/cellular/pseudomodem/logging_setup.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/cros/cellular/pseudomodem/messaging.py b/client/cros/cellular/pseudomodem/messaging.py
index 03d51b4..14db5e6 100644
--- a/client/cros/cellular/pseudomodem/messaging.py
+++ b/client/cros/cellular/pseudomodem/messaging.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -5,7 +6,7 @@
 import dbus
 import dbus.service
 
-import utils
+from autotest_lib.client.cros.cellular.pseudomodem import utils
 
 from autotest_lib.client.cros.cellular import mm1_constants
 
diff --git a/client/cros/cellular/pseudomodem/modem.py b/client/cros/cellular/pseudomodem/modem.py
index 9a4b46f..11dd384 100644
--- a/client/cros/cellular/pseudomodem/modem.py
+++ b/client/cros/cellular/pseudomodem/modem.py
@@ -1,25 +1,37 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import dbus
 import dbus.service
 import dbus.types
-import gobject
+# AU tests use ToT client code, but ToT -3 client version.
+try:
+    from gi.repository import GObject
+except ImportError:
+    import gobject as GObject
 import logging
 import random
 
-import bearer
-import dbus_std_ifaces
-import messaging
-import modem_simple
-import pm_constants
-import pm_errors
-import sms_handler
-import state_machine_factory as smf
-import utils
+import six
 
 import common
+
+from autotest_lib.client.cros.cellular.pseudomodem import bearer
+from autotest_lib.client.cros.cellular.pseudomodem import dbus_std_ifaces
+from autotest_lib.client.cros.cellular.pseudomodem import messaging
+from autotest_lib.client.cros.cellular.pseudomodem import modem_simple
+from autotest_lib.client.cros.cellular.pseudomodem import pm_constants
+from autotest_lib.client.cros.cellular.pseudomodem import pm_errors
+from autotest_lib.client.cros.cellular.pseudomodem import sms_handler
+from autotest_lib.client.cros.cellular.pseudomodem import state_machine_factory as smf
+from autotest_lib.client.cros.cellular.pseudomodem import utils
+
 from autotest_lib.client.cros.cellular import mm1_constants
 from autotest_lib.client.cros.cellular import net_interface
 
@@ -474,7 +486,7 @@
         @raises: MMCoreError, if one or more properties are invalid.
 
         """
-        for key in properties.iterkeys():
+        for key in six.iterkeys(properties):
             if key not in ALLOWED_BEARER_PROPERTIES:
                 raise pm_errors.MMCoreError(
                         pm_errors.MMCoreError.INVALID_ARGS,
@@ -674,10 +686,10 @@
                         self.Enable(True)
                     return False
 
-                gobject.timeout_add(1000, _DelayedEnable)
+                GObject.timeout_add(1000, _DelayedEnable)
                 return False
 
-            gobject.timeout_add(2000, _DelayedReappear)
+            GObject.timeout_add(2000, _DelayedReappear)
 
         def _ErrorCallback(error):
             raise error
@@ -686,7 +698,7 @@
                 mm1_constants.MM_MODEM_STATE_CONNECTED):
             self.Disconnect('/', _ResetFunc, _ErrorCallback)
         else:
-            gobject.idle_add(_ResetFunc)
+            GObject.idle_add(_ResetFunc)
 
 
     @utils.log_dbus_method()
@@ -858,6 +870,6 @@
 
         """
         bearers = dbus.Array(
-                [dbus.types.ObjectPath(key) for key in self.bearers.iterkeys()],
+                [dbus.types.ObjectPath(key) for key in six.iterkeys(self.bearers)],
                 signature='o')
         self.Set(mm1_constants.I_MODEM, 'Bearers', bearers)
diff --git a/client/cros/cellular/pseudomodem/modem_3gpp.py b/client/cros/cellular/pseudomodem/modem_3gpp.py
index 7092f1f..bb67c4d 100644
--- a/client/cros/cellular/pseudomodem/modem_3gpp.py
+++ b/client/cros/cellular/pseudomodem/modem_3gpp.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -6,10 +7,10 @@
 import dbus.types
 import logging
 
-import modem
-import pm_constants
-import pm_errors
-import utils
+from autotest_lib.client.cros.cellular.pseudomodem import modem
+from autotest_lib.client.cros.cellular.pseudomodem import pm_constants
+from autotest_lib.client.cros.cellular.pseudomodem import pm_errors
+from autotest_lib.client.cros.cellular.pseudomodem import utils
 
 from autotest_lib.client.cros.cellular import mm1_constants
 
diff --git a/client/cros/cellular/pseudomodem/modem_cdma.py b/client/cros/cellular/pseudomodem/modem_cdma.py
index a3150ae..9fd5974 100644
--- a/client/cros/cellular/pseudomodem/modem_cdma.py
+++ b/client/cros/cellular/pseudomodem/modem_cdma.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -6,9 +7,9 @@
 import dbus.types
 import logging
 
-import modem
-import pm_constants
-import utils
+from autotest_lib.client.cros.cellular.pseudomodem import modem
+from autotest_lib.client.cros.cellular.pseudomodem import pm_constants
+from autotest_lib.client.cros.cellular.pseudomodem import utils
 
 from autotest_lib.client.cros.cellular import mm1_constants
 
diff --git a/client/cros/cellular/pseudomodem/modemmanager.py b/client/cros/cellular/pseudomodem/modemmanager.py
index 43e9255..f242e93 100644
--- a/client/cros/cellular/pseudomodem/modemmanager.py
+++ b/client/cros/cellular/pseudomodem/modemmanager.py
@@ -5,8 +5,8 @@
 import dbus
 import dbus.service
 
-import dbus_std_ifaces
-import pm_errors
+from . import dbus_std_ifaces
+from . import pm_errors
 import utils
 
 from autotest_lib.client.cros.cellular import mm1_constants
diff --git a/client/cros/cellular/pseudomodem/pseudomodem.py b/client/cros/cellular/pseudomodem/pseudomodem.py
index 6470cfa..e5a9ff3 100755
--- a/client/cros/cellular/pseudomodem/pseudomodem.py
+++ b/client/cros/cellular/pseudomodem/pseudomodem.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -8,10 +8,18 @@
 # |pseudomodem_context| module that provides a way to launch pseudomodem in a
 # child process.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import argparse
 import dbus
 import dbus.mainloop.glib
-import gobject
+# AU tests use ToT client code, but ToT -3 client version.
+try:
+    from gi.repository import GObject
+except ImportError:
+    import gobject as GObject
 import imp
 import json
 import logging
@@ -19,18 +27,19 @@
 import os.path
 import signal
 import sys
-import testing
 import traceback
-
-import logging_setup
-import modem_cdma
-import modem_3gpp
-import modemmanager
-import sim
-import state_machine_factory as smf
+from six.moves import range
 
 import common
+
 from autotest_lib.client.cros.cellular import mm1_constants
+from autotest_lib.client.cros.cellular.pseudomodem import testing
+from autotest_lib.client.cros.cellular.pseudomodem import logging_setup
+from autotest_lib.client.cros.cellular.pseudomodem import modem_cdma
+from autotest_lib.client.cros.cellular.pseudomodem import modem_3gpp
+from autotest_lib.client.cros.cellular.pseudomodem import modemmanager
+from autotest_lib.client.cros.cellular.pseudomodem import sim
+from autotest_lib.client.cros.cellular.pseudomodem import state_machine_factory as smf
 
 # Flags used by pseudomodem modules only that are defined below in
 # ParserArguments.
@@ -82,7 +91,7 @@
         context.
 
         """
-        self._mainloop = gobject.MainLoop()
+        self._mainloop = GObject.MainLoop()
         self._mainloop.run()
 
 
@@ -146,7 +155,7 @@
                         '00100' + str(i + 1),
                         network_available,
                         technology_gsm)
-                        for i in xrange(self._opts.roaming_networks)]
+                        for i in range(self._opts.roaming_networks)]
                 # TODO(armansito): Support "not activated" initialization option
                 # for 3GPP carriers.
                 self._modem = modem_3gpp.Modem3gpp(
@@ -425,7 +434,7 @@
 
     opts = parser.parse_args(arg_string)
 
-    # Extra sanity checks.
+    # Extra confidence checks.
     if opts.family == 'CDMA' and opts.roaming_networks > 0:
         raise argparse.ArgumentTypeError('CDMA networks do not support '
                                          'roaming networks.')
@@ -518,7 +527,7 @@
                       dump_file_path, str(e))
         return
 
-    dump_file.write(str(exc) + '\n')
+    dump_file.write((str(exc) + '\n').encode('utf-8'))
     dump_file.writelines(traceback.format_exc())
     dump_file.close()
 
diff --git a/client/cros/cellular/pseudomodem/pseudomodem_context.py b/client/cros/cellular/pseudomodem/pseudomodem_context.py
index e80d6dc..96c127c 100644
--- a/client/cros/cellular/pseudomodem/pseudomodem_context.py
+++ b/client/cros/cellular/pseudomodem/pseudomodem_context.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -10,26 +11,34 @@
 # command line. To avoid confusion, please use the shell script run_pseudomodem
 # to run pseudomodem from command line.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import dbus
 import json
 import logging
 import os
 import pwd
 import signal
+import six
 import stat
 import sys
 import subprocess
 import tempfile
 
 import common
+
 from autotest_lib.client.bin import utils
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.cros import service_stopper
 from autotest_lib.client.cros.cellular import mm1_constants
 from autotest_lib.client.cros.cellular import net_interface
 
-import pm_constants
-import pseudomodem
+
+from autotest_lib.client.cros.cellular.pseudomodem import pm_constants
+from autotest_lib.client.cros.cellular.pseudomodem import pseudomodem
+
 
 # TODO(pprabhu) Move this to the right utils file.
 # pprabhu: I haven't yet figured out which of the myriad utils files I should
@@ -252,7 +261,7 @@
 
         """
         cmd_line_flags = []
-        for key, value in flags_map.iteritems():
+        for key, value in six.iteritems(flags_map):
             cmd_line_flags.append('--' + key)
             if key in self.TEST_OBJECT_ARG_FLAGS:
                 cmd_line_flags.append(self._DumpArgToFile(value))
diff --git a/client/cros/cellular/pseudomodem/register_cdma_machine.py b/client/cros/cellular/pseudomodem/register_cdma_machine.py
index 7b38a5b..64654a4 100644
--- a/client/cros/cellular/pseudomodem/register_cdma_machine.py
+++ b/client/cros/cellular/pseudomodem/register_cdma_machine.py
@@ -4,8 +4,8 @@
 
 import logging
 
-import pm_errors
-import register_machine
+from . import pm_errors
+from . import register_machine
 
 from autotest_lib.client.cros.cellular import mm1_constants
 
diff --git a/client/cros/cellular/pseudomodem/register_machine.py b/client/cros/cellular/pseudomodem/register_machine.py
index 39f8f4e..149ebbc 100644
--- a/client/cros/cellular/pseudomodem/register_machine.py
+++ b/client/cros/cellular/pseudomodem/register_machine.py
@@ -4,8 +4,8 @@
 
 import logging
 
-import pm_errors
-import state_machine
+from . import pm_errors
+from . import state_machine
 
 from autotest_lib.client.cros.cellular import mm1_constants
 
diff --git a/client/cros/cellular/pseudomodem/run_pseudomodem.py b/client/cros/cellular/pseudomodem/run_pseudomodem.py
index 822c526..5fa5c58 100755
--- a/client/cros/cellular/pseudomodem/run_pseudomodem.py
+++ b/client/cros/cellular/pseudomodem/run_pseudomodem.py
@@ -1,15 +1,19 @@
-#!/usr/bin/env python2
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import logging
 import sys
 import time
 
-import client
-import pseudomodem
-import pseudomodem_context
+from autotest_lib.client.cros.cellular.pseudomodem import client
+from autotest_lib.client.cros.cellular.pseudomodem import pseudomodem
+from autotest_lib.client.cros.cellular.pseudomodem import pseudomodem_context
 
 def main():
     """ Entry function to run pseudomodem standalone. """
@@ -40,7 +44,7 @@
             while True:
                 time.sleep(30)
     except KeyboardInterrupt:
-        print 'Terminating on user request.'
+        print('Terminating on user request.')
     finally:
         # This is always hit, even when SIGINT is received.
         if pmc:
diff --git a/client/cros/cellular/pseudomodem/sim.py b/client/cros/cellular/pseudomodem/sim.py
index 2b3e6d8..07f6aff 100644
--- a/client/cros/cellular/pseudomodem/sim.py
+++ b/client/cros/cellular/pseudomodem/sim.py
@@ -1,17 +1,23 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Lint as: python2, python3
+# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import dbus
 import dbus.service
 import logging
 
-import dbus_std_ifaces
-import pm_constants
-import pm_errors
-import utils
+import six
 
 from autotest_lib.client.cros.cellular import mm1_constants
+from autotest_lib.client.cros.cellular.pseudomodem import dbus_std_ifaces
+from autotest_lib.client.cros.cellular.pseudomodem import pm_constants
+from autotest_lib.client.cros.cellular.pseudomodem import pm_errors
+from autotest_lib.client.cros.cellular.pseudomodem import utils
 
 class IncorrectPasswordError(pm_errors.MMMobileEquipmentError):
     """ Wrapper around MM_MOBILE_EQUIPMENT_ERROR_INCORRECT_PASSWORD. """
@@ -194,7 +200,7 @@
         retries = dbus.Dictionary(signature='uu')
         if not self._show_retries:
             return retries
-        for k, v in self._lock_data.iteritems():
+        for k, v in six.iteritems(self._lock_data):
             retries[dbus.types.UInt32(k)] = dbus.types.UInt32(v['retries'])
         return retries
 
diff --git a/client/cros/cellular/pseudomodem/sms.py b/client/cros/cellular/pseudomodem/sms.py
index 4290180..ea2f6e1 100644
--- a/client/cros/cellular/pseudomodem/sms.py
+++ b/client/cros/cellular/pseudomodem/sms.py
@@ -1,10 +1,17 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import dbus
 
-import dbus_std_ifaces
+import six
+
+from autotest_lib.client.cros.cellular.pseudomodem import dbus_std_ifaces
 
 from autotest_lib.client.cros.cellular import mm1_constants
 
@@ -72,8 +79,8 @@
                     'Properties: ' + repr(keyset.difference(params)) + ' are '
                     'not settable.')
 
-        for key, value in params.iteritems():
-            if value == 'default' and cls._props_template.has_key(key):
+        for key, value in six.iteritems(params):
+            if value == 'default' and key in cls._props_template:
                 cls._props_template.pop(key)
             else:
                 cls._props_template[key] = value
diff --git a/client/cros/cellular/pseudomodem/sms_handler.py b/client/cros/cellular/pseudomodem/sms_handler.py
index 974570d..ea5e6ac 100644
--- a/client/cros/cellular/pseudomodem/sms_handler.py
+++ b/client/cros/cellular/pseudomodem/sms_handler.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -7,7 +8,7 @@
 import dbus.types
 import logging
 
-import sms
+from autotest_lib.client.cros.cellular.pseudomodem import sms
 
 from autotest_lib.client.cros.cellular import mm1_constants
 
@@ -89,7 +90,7 @@
         Returns a list of DBus object paths belonging to stored SMS messages.
 
         """
-        return self._messages.keys()
+        return list(self._messages.keys())
 
 
     def get_message_with_path(self, path):
diff --git a/client/cros/cellular/pseudomodem/state_machine.py b/client/cros/cellular/pseudomodem/state_machine.py
index 3b213d8..fb297c7 100644
--- a/client/cros/cellular/pseudomodem/state_machine.py
+++ b/client/cros/cellular/pseudomodem/state_machine.py
@@ -4,12 +4,16 @@
 
 import dbus
 import dbus.service
-import gobject
+# AU tests use ToT client code, but ToT -3 client version.
+try:
+    from gi.repository import GObject
+except ImportError:
+    import gobject as GObject
 import logging
 
-import pm_errors
-import pm_constants
-import utils
+from . import pm_errors
+from . import pm_constants
+from . import utils
 
 from autotest_lib.client.cros.cellular import mm1_constants
 
@@ -189,7 +193,7 @@
         delays.
 
         """
-        gobject.idle_add(StateMachine.Step, self)
+        GObject.idle_add(StateMachine.Step, self)
 
 
     def _GetIsmObjectName(self):
diff --git a/client/cros/cellular/pseudomodem/state_machine_factory.py b/client/cros/cellular/pseudomodem/state_machine_factory.py
index f68e085..a57a1c4 100644
--- a/client/cros/cellular/pseudomodem/state_machine_factory.py
+++ b/client/cros/cellular/pseudomodem/state_machine_factory.py
@@ -2,15 +2,15 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import cdma_activate_machine
-import connect_cdma_machine
-import connect_machine
-import disable_machine
-import disconnect_machine
-import enable_machine
-import pm_constants
-import register_cdma_machine
-import register_machine
+from . import cdma_activate_machine
+from . import connect_cdma_machine
+from . import connect_machine
+from . import disable_machine
+from . import disconnect_machine
+from . import enable_machine
+from . import pm_constants
+from . import register_cdma_machine
+from . import register_machine
 
 class StateMachineFactory(object):
     """
diff --git a/client/cros/cellular/pseudomodem/testing.py b/client/cros/cellular/pseudomodem/testing.py
index 292f436..2ac200c 100644
--- a/client/cros/cellular/pseudomodem/testing.py
+++ b/client/cros/cellular/pseudomodem/testing.py
@@ -5,11 +5,10 @@
 import dbus
 import dbus.service
 
-import dbus_std_ifaces
-import pm_constants
-import utils
-
 from autotest_lib.client.cros.cellular import mm1_constants
+from autotest_lib.client.cros.cellular.pseudomodem import dbus_std_ifaces
+from autotest_lib.client.cros.cellular.pseudomodem import pm_constants
+from autotest_lib.client.cros.cellular.pseudomodem import utils
 
 class Testing(dbus_std_ifaces.DBusProperties):
     """
diff --git a/client/cros/cellular/pseudomodem/utils.py b/client/cros/cellular/pseudomodem/utils.py
index 7584982..de5f83a 100644
--- a/client/cros/cellular/pseudomodem/utils.py
+++ b/client/cros/cellular/pseudomodem/utils.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python
 
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/client/cros/cellular/scpi.py b/client/cros/cellular/scpi.py
index 6cbe41c..23ca27a 100644
--- a/client/cros/cellular/scpi.py
+++ b/client/cros/cellular/scpi.py
@@ -1,10 +1,10 @@
-#!/usr/bin/python2
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import cellular_logging
-import cellular_system_error
+from autotest_lib.client.cros.cellular import cellular_logging
+from autotest_lib.client.cros.cellular import cellular_system_error
 
 log = cellular_logging.SetupCellularLogging('scpi_driver')
 
diff --git a/client/cros/cellular/scpi_shell b/client/cros/cellular/scpi_shell
index 83ee800..1979d7b 100755
--- a/client/cros/cellular/scpi_shell
+++ b/client/cros/cellular/scpi_shell
@@ -1,16 +1,18 @@
-#!/usr/bin/python2
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 """Interact with a SCPI device, checking for errors each time."""
 
-import cellular_system_error
 import logging
-import prologix_scpi_driver
-import scpi
 import sys
 
+from autotest_lib.client.cros.cellular import cellular_system_error
+from autotest_lib.client.cros.cellular import prologix_scpi_driver
+from autotest_lib.client.cros.cellular import scpi
+
+
 try:
     [target] = sys.argv[1:]
 except ValueError:
diff --git a/client/cros/cellular/sms.py b/client/cros/cellular/sms.py
index f1996e7..f0dff7d 100644
--- a/client/cros/cellular/sms.py
+++ b/client/cros/cellular/sms.py
@@ -1,9 +1,16 @@
-#!/usr/bin/python2
+# Lint as: python2, python3
 # Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import dbus, logging
+
+from six.moves import zip
+
 from autotest_lib.client.common_lib import error
 
 sample = {
@@ -82,7 +89,7 @@
 
     def _sms_regen_list(self):
         response = ''
-        keys = self.smsdict.keys()
+        keys = list(self.smsdict.keys())
         keys.sort()
         for i in keys:
             pdu = self.smsdict[i]
@@ -121,7 +128,7 @@
     def test_get(self, index, expected):
         try:
             sms = self.gsmsms.Get(index)
-        except dbus.DBusException, db:
+        except dbus.DBusException as db:
             if expected is not None:
                 raise
             return
@@ -141,7 +148,7 @@
             if expected_success == False:
                 raise error.TestFail('SMS.Delete(%d) succeeded unexpectedly' %
                                      index)
-        except dbus.DBusException, db:
+        except dbus.DBusException as db:
             if expected_success:
                 raise
 
diff --git a/client/cros/cellular/test_endpoint.py b/client/cros/cellular/test_endpoint.py
index dfc86f9..9b58c97 100755
--- a/client/cros/cellular/test_endpoint.py
+++ b/client/cros/cellular/test_endpoint.py
@@ -1,13 +1,16 @@
-#!/usr/bin/env python2
-
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import BaseHTTPServer
-import urlparse
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
 
-class TestEndpointHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+import six.moves.BaseHTTPServer
+import six.moves.urllib.parse
+
+class TestEndpointHandler(six.moves.BaseHTTPServer.BaseHTTPRequestHandler):
     """
     A web server that is used by cellular tests.  It serves up the following
     pages:
@@ -24,12 +27,12 @@
 
     def do_GET(self):
         """Handles GET requests."""
-        url = urlparse.urlparse(self.path)
-        print 'URL: %s' % url.path
+        url = six.moves.urllib.parse.urlparse(self.path)
+        print('URL: %s' % url.path)
         if url.path == self.GENERATE_204_PATH:
             self.send_response(204)
         elif url.path == self.DOWNLOAD_URL_PATH:
-            parsed_query = urlparse.parse_qs(url.query)
+            parsed_query = six.moves.urllib.parse.parse_qs(url.query)
             if self.SIZE_PARAM not in parsed_query:
                 pass
             self.send_response(200)
@@ -37,13 +40,13 @@
             self.end_headers()
             self.wfile.write('0' * int(parsed_query[self.SIZE_PARAM][0]))
         else:
-            print 'Unsupported URL path: %s' % url.path
+            print('Unsupported URL path: %s' % url.path)
 
 
 def main():
     """Main entry point when this script is run from the command line."""
     try:
-        server = BaseHTTPServer.HTTPServer(('', 80), TestEndpointHandler)
+        server = six.moves.BaseHTTPServer.HTTPServer(('', 80), TestEndpointHandler)
         server.serve_forever()
     except KeyboardInterrupt:
         server.socket.close()
diff --git a/client/cros/cellular/test_environment.py b/client/cros/cellular/test_environment.py
index e4a0ffe..d115880 100644
--- a/client/cros/cellular/test_environment.py
+++ b/client/cros/cellular/test_environment.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -6,16 +7,19 @@
 import dbus
 import logging
 import sys
+import time
 import traceback
 
 import common
+from autotest_lib.client.bin import local_host
 from autotest_lib.client.bin import utils
 from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import backchannel
+from autotest_lib.client.common_lib.cros import crash_detector
 from autotest_lib.client.cros import upstart
 from autotest_lib.client.cros.cellular import mm
-from autotest_lib.client.cros.cellular.pseudomodem import pseudomodem_context
+from autotest_lib.client.cros.cellular import mm1_constants
 from autotest_lib.client.cros.networking import cellular_proxy
+from autotest_lib.client.cros.networking import mm1_proxy
 from autotest_lib.client.cros.networking import shill_context
 from autotest_lib.client.cros.networking import shill_proxy
 
@@ -24,7 +28,6 @@
     """Setup and verify cellular test environment.
 
     This context manager configures the following:
-        - Sets up backchannel.
         - Shuts down other devices except cellular.
         - Shill and MM logging is enabled appropriately for cellular.
         - Initializes members that tests should use to access test environment
@@ -33,7 +36,6 @@
           us.
 
     Then it verifies the following is valid:
-        - The backchannel is using an Ethernet device.
         - The SIM is inserted and valid.
         - There is one and only one modem in the device.
         - The modem is registered to the network.
@@ -52,14 +54,18 @@
 
     """
 
-    def __init__(self, use_backchannel=True, shutdown_other_devices=True,
-                 modem_pattern='', skip_modem_reset=False):
+    def __init__(self,
+                 shutdown_other_devices=True,
+                 modem_pattern='',
+                 skip_modem_reset=False,
+                 is_esim_test=False,
+                 enable_temp_containments=True):
         """
-        @param use_backchannel: Set up the backchannel that can be used to
-                communicate with the DUT.
         @param shutdown_other_devices: If True, shutdown all devices except
                 cellular.
         @param modem_pattern: Search string used when looking for the modem.
+        @param enable_temp_containments: Enable temporary containments to avoid
+                failures on tests with known problems.
 
         """
         # Tests should use this main loop instead of creating their own.
@@ -70,21 +76,25 @@
         self.modem_manager = None
         self.modem = None
         self.modem_path = None
-        self._backchannel = None
 
         self._modem_pattern = modem_pattern
         self._skip_modem_reset = skip_modem_reset
+        self._is_esim_test = is_esim_test
+        self._enable_temp_containments = enable_temp_containments
+        self._system_service_order = ''
+        self._test_service_order = 'cellular,ethernet'
 
         self._nested = None
         self._context_managers = []
-        if use_backchannel:
-            self._backchannel = backchannel.Backchannel()
-            self._context_managers.append(self._backchannel)
+        self.detect_crash = crash_detector.CrashDetector(
+                local_host.LocalHost())
+        self.detect_crash.remove_crash_files()
         if shutdown_other_devices:
             self._context_managers.append(
-                    shill_context.AllowedTechnologiesContext(
-                            [shill_proxy.ShillProxy.TECHNOLOGY_CELLULAR]))
-
+                    shill_context.AllowedTechnologiesContext([
+                            shill_proxy.ShillProxy.TECHNOLOGY_CELLULAR,
+                            shill_proxy.ShillProxy.TECHNOLOGY_ETHERNET
+                    ]))
 
     @contextlib.contextmanager
     def _disable_shill_autoconnect(self):
@@ -92,17 +102,48 @@
         yield
         self._enable_shill_cellular_autoconnect(True)
 
-
     def __enter__(self):
         try:
+            # Wait for system daemons to stabilize before beginning the test.
+            # Modemfwd, Chrome, Shill and Hermes might be active before the test
+            # begins, and interrupting them abruptly during test setup might
+            # lead to flaky tests. The modem might also appear/disappear
+            # multiple times during this period. Ideally, we would wait for a
+            # green signal from these daemons before performing test setup.
+            with open('/proc/uptime') as uptime_file:
+                uptime = float(uptime_file.readline().split()[0])
+            if uptime < 60:
+                logging.info(
+                        "Waiting %.1f seconds to reach uptime of 1 minute before "
+                        "starting test", 60 - uptime)
+                time.sleep(60 - uptime)
+
             if upstart.has_service('modemfwd') and upstart.is_running('modemfwd'):
+                # Due to b/179796133, stopping modemfwd right after it was
+                # started by a previous test, can wedge the modem. In many
+                # devices, a ~1 second delay solves the problem.
+                time.sleep(4)
                 upstart.stop_job('modemfwd')
             # Temporarily disable shill autoconnect to cellular service while
             # the test environment is setup to prevent a race condition
             # between disconnecting the modem in _verify_cellular_service()
             # and shill autoconnect.
             with self._disable_shill_autoconnect():
-                self._nested = contextlib.nested(*self._context_managers)
+                try:
+                    from contextlib import nested # Python 2
+                except ImportError:
+                    from contextlib import ExitStack, contextmanager
+
+                    @contextmanager
+                    def nested(*contexts):
+                        """ Implementation of nested for python3"""
+                        with ExitStack() as stack:
+                            for ctx in contexts:
+                                stack.enter_context(ctx)
+                            yield contexts
+
+                self._nested = nested(*self._context_managers)
+
                 self._nested.__enter__()
 
                 self._initialize_shill()
@@ -115,8 +156,8 @@
 
                 self._setup_logging()
 
-                self._verify_backchannel()
-                self._wait_for_modem_registration()
+                if not self._is_esim_test:
+                    self._wait_for_modem_registration()
                 self._verify_cellular_service()
 
                 return self
@@ -125,7 +166,7 @@
             except_type, except_value, except_traceback = sys.exc_info()
             lines = traceback.format_exception(except_type, except_value,
                                                except_traceback)
-            logging.error('Error during test initialization:\n' +
+            logging.error('Error during test initialization:\n%s',
                           ''.join(lines))
             self.__exit__(*sys.exc_info())
             raise error.TestError('INIT_ERROR: %s' % str(e))
@@ -133,10 +174,34 @@
             self.__exit__(*sys.exc_info())
             raise
 
-
     def __exit__(self, exception, value, traceback):
-        if upstart.has_service('modemfwd'):
-            upstart.restart_job('modemfwd')
+        exception_on_restore_state = None
+        try:
+            self._restore_state()
+        except Exception as ex:
+            # Exceptions thrown by _restore_state() should be ignored if a
+            # previous exception exist, otherwise the root cause of the test
+            # failure will be overwritten by the clean up error in
+            # _restore_state, and that is not useful.
+            if exception is None:
+                exception_on_restore_state = ex
+
+        # If a test fails and a crash is detected, the crash error takes
+        # priority over the previous failure.
+        crash_files = self.detect_crash.get_new_crash_files()
+        if any(cf for cf in crash_files if any(pr in cf for pr in [
+                'ModemManager', 'shill', 'qmi', 'mbim', 'hermes', 'modemfwd'
+        ])):
+            logging.info(
+                    'A crash was encountered. '
+                    'Overriding the previous error: %s', value)
+            raise error.TestError(
+                    'One or more daemon crashes were detected. '
+                    'See crash dumps: {}'.format(crash_files))
+
+        if exception_on_restore_state is not None:
+            raise exception_on_restore_state
+
         if self._nested:
             return self._nested.__exit__(exception, value, traceback)
         self.shill = None
@@ -144,6 +209,13 @@
         self.modem = None
         self.modem_path = None
 
+    def _restore_state(self):
+        """Try to restore the test environment to a good state.
+        """
+        if upstart.has_service('modemfwd'):
+            upstart.restart_job('modemfwd')
+        if self.shill:
+            self._set_service_order(self._system_service_order)
 
     def _get_shill_cellular_device_object(self):
         return utils.poll_for_condition(
@@ -152,6 +224,24 @@
                                       'Is the modem plugged in?'),
             timeout=shill_proxy.ShillProxy.DEVICE_ENUMERATION_TIMEOUT)
 
+    def _get_service_order(self):
+        """Get the shill service order.
+
+        @return string service order on success, None otherwise.
+
+        """
+        return str(self.shill.manager.GetServiceOrder())
+
+    def _set_service_order(self, order):
+        """Set the shill service order.
+
+        @param order string comma-delimited service order
+        (eg. 'cellular,ethernet')
+        @return bool True on success, False otherwise.
+
+        """
+        self.shill.manager.SetServiceOrder(dbus.String(order))
+        return True
 
     def _enable_modem(self):
         modem_device = self._get_shill_cellular_device_object()
@@ -165,17 +255,15 @@
         utils.poll_for_condition(
             lambda: modem_device.GetProperties()['Powered'],
             exception=error.TestError(
-                    'Failed to enable modem.'),
+                'Failed to enable modem.'),
             timeout=shill_proxy.ShillProxy.DEVICE_ENABLE_DISABLE_TIMEOUT)
 
-
     def _enable_shill_cellular_autoconnect(self, enable):
         shill = cellular_proxy.CellularProxy.get_proxy(self.bus)
         shill.manager.SetProperty(
-                shill_proxy.ShillProxy.
-                MANAGER_PROPERTY_NO_AUTOCONNECT_TECHNOLOGIES,
-                '' if enable else 'cellular')
-
+            shill_proxy.ShillProxy.
+            MANAGER_PROPERTY_NO_AUTOCONNECT_TECHNOLOGIES,
+            '' if enable else 'cellular')
 
     def _is_unsupported_error(self, e):
         return (e.get_dbus_name() ==
@@ -184,7 +272,6 @@
                  shill_proxy.ShillProxy.ERROR_FAILURE and
                  'operation not supported' in e.get_dbus_message()))
 
-
     def _reset_modem(self):
         modem_device = self._get_shill_cellular_device_object()
         try:
@@ -194,7 +281,6 @@
             if not self._is_unsupported_error(e):
                 raise
 
-
     def _initialize_shill(self):
         """Get access to shill."""
         # CellularProxy.get_proxy() checks to see if shill is running and
@@ -203,6 +289,8 @@
         if self.shill is None:
             raise error.TestError('Cannot connect to shill, is shill running?')
 
+        self._system_service_order = self._get_service_order()
+        self._set_service_order(self._test_service_order)
 
     def _initialize_modem_components(self):
         """Reset the modem and get access to modem components."""
@@ -215,18 +303,16 @@
         # PickOneModem() makes sure there's a modem manager and that there is
         # one and only one modem.
         self.modem_manager, self.modem_path = \
-                mm.PickOneModem(self._modem_pattern)
+            mm.PickOneModem(self._modem_pattern)
         self.modem = self.modem_manager.GetModem(self.modem_path)
         if self.modem is None:
             raise error.TestError('Cannot get modem object at %s.' %
                                   self.modem_path)
 
-
     def _setup_logging(self):
         self.shill.set_logging_for_cellular_test()
         self.modem_manager.SetDebugLogging()
 
-
     def _verify_sim(self):
         """Verify SIM is valid.
 
@@ -235,12 +321,17 @@
         @raise error.TestError if SIM does not exist or is locked.
 
         """
+        # check modem SIM slot and properties and switch slot as needed
+        modem_proxy = self._check_for_modem_with_sim()
+        if modem_proxy is None:
+            raise error.TestError('There is no Modem with non empty SIM path.')
+
         modem_device = self._get_shill_cellular_device_object()
         props = modem_device.GetProperties()
 
         # No SIM in CDMA modems.
         family = props[
-                cellular_proxy.CellularProxy.DEVICE_PROPERTY_TECHNOLOGY_FAMILY]
+            cellular_proxy.CellularProxy.DEVICE_PROPERTY_TECHNOLOGY_FAMILY]
         if (family ==
                 cellular_proxy.CellularProxy.
                 DEVICE_PROPERTY_TECHNOLOGY_FAMILY_CDMA):
@@ -252,33 +343,74 @@
 
         # Make sure SIM is not locked.
         lock_status = props.get(
-                cellular_proxy.CellularProxy.DEVICE_PROPERTY_SIM_LOCK_STATUS,
-                None)
+            cellular_proxy.CellularProxy.DEVICE_PROPERTY_SIM_LOCK_STATUS,
+            None)
         if lock_status is None:
             raise error.TestError('Failed to read SIM lock status.')
         locked = lock_status.get(
-                cellular_proxy.CellularProxy.PROPERTY_KEY_SIM_LOCK_ENABLED,
-                None)
+            cellular_proxy.CellularProxy.PROPERTY_KEY_SIM_LOCK_ENABLED,
+            None)
         if locked is None:
             raise error.TestError('Failed to read SIM LockEnabled status.')
         elif locked:
             raise error.TestError(
-                    'SIM is locked, test requires an unlocked SIM.')
+                'SIM is locked, test requires an unlocked SIM.')
 
+    def _check_for_modem_with_sim(self):
+        """
+        Make sure modem got active SIM and path is not empty
 
-    def _verify_backchannel(self):
-        """Verify backchannel is on an ethernet device.
+        switch slot to get non empty sim path and active sim slot for modem
 
-        @raise error.TestError if backchannel is not on an ethernet device.
+        @return active modem object or None
 
         """
-        if self._backchannel is None:
-            return
+        mm_proxy = mm1_proxy.ModemManager1Proxy.get_proxy()
+        if mm_proxy is None:
+            raise error.TestError('Modem manager is not initialized')
 
-        if not self._backchannel.is_using_ethernet():
-            raise error.TestError('An ethernet connection is required between '
-                                  'the test server and the device under test.')
+        modem_proxy = mm_proxy.wait_for_modem(mm1_constants.MM_MODEM_POLL_TIME)
+        if modem_proxy is None:
+            raise error.TestError('Modem not initialized')
 
+        primary_slot = modem_proxy.get_primary_sim_slot()
+        # Get SIM path from modem SIM properties
+        modem_props = modem_proxy.properties(mm1_constants.I_MODEM)
+        sim_path = modem_props['Sim']
+
+        logging.info('Device SIM values=> path:%s '
+                'primary slot:%d', sim_path, primary_slot)
+
+        def is_usable_sim(path):
+            """Check if sim at path can be used to establish a connection"""
+            if path == mm1_constants.MM_EMPTY_SLOT_PATH:
+                return False
+            sim_proxy = modem_proxy.get_sim_at_path(path)
+            sim_props = sim_proxy.properties()
+            return sim_props[
+                    'EsimStatus'] != mm1_constants.MM_SIM_ESIM_STATUS_NO_PROFILES
+
+        # Check current SIM path value and status
+        if is_usable_sim(sim_path):
+            return modem_proxy
+
+        slots = modem_props['SimSlots']
+        logging.info('Dut not in expected state, '
+                    'current sim path:%s slots:%s', sim_path, slots)
+
+        for idx, path in enumerate(slots):
+            if not is_usable_sim(path):
+                continue
+            logging.info('Primary slot does not have a SIM, '
+                        'switching slot to %d', idx+1)
+
+            if (primary_slot != idx + 1):
+                logging.info('setting slot:%d path:%s', idx+1, path)
+                modem_proxy.set_primary_slot(idx+1)
+                modem_proxy = \
+                    mm_proxy.wait_for_modem(mm1_constants.MM_MODEM_POLL_TIME)
+                return modem_proxy
+        return None
 
     def _wait_for_modem_registration(self):
         """Wait for the modem to register with the network.
@@ -289,10 +421,9 @@
         utils.poll_for_condition(
             self.modem.ModemIsRegistered,
             exception=error.TestError(
-                    'Modem failed to register with the network.'),
+                'Modem failed to register with the network.'),
             timeout=cellular_proxy.CellularProxy.SERVICE_REGISTRATION_TIMEOUT)
 
-
     def _verify_cellular_service(self):
         """Make sure a cellular service exists.
 
@@ -311,26 +442,30 @@
                     cellular_proxy.CellularProxy.ERROR_NOT_CONNECTED):
                 raise
         success, state, _ = self.shill.wait_for_property_in(
-                service,
-                cellular_proxy.CellularProxy.SERVICE_PROPERTY_STATE,
-                ('idle',),
-                cellular_proxy.CellularProxy.SERVICE_DISCONNECT_TIMEOUT)
+            service,
+            cellular_proxy.CellularProxy.SERVICE_PROPERTY_STATE,
+            ('idle',),
+            cellular_proxy.CellularProxy.SERVICE_DISCONNECT_TIMEOUT)
         if not success:
             raise error.TestError(
-                    'Cellular service needs to start in the "idle" state. '
-                    'Current state is "%s". '
-                    'Modem disconnect may have failed.' %
-                    state)
+                'Cellular service needs to start in the "idle" state. '
+                'Current state is "%s". '
+                'Modem disconnect may have failed.' %
+                state)
 
 
 class CellularOTATestEnvironment(CellularTestEnvironment):
     """Setup and verify cellular over-the-air (OTA) test environment. """
+
     def __init__(self, **kwargs):
         super(CellularOTATestEnvironment, self).__init__(**kwargs)
 
-
+# pseudomodem tests disabled with b/180627893, cleaningup all pseudomodem
+# related files and imports through: b/205769777
+'''
 class CellularPseudoMMTestEnvironment(CellularTestEnvironment):
     """Setup and verify cellular pseudomodem test environment. """
+
     def __init__(self, pseudomm_args=None, **kwargs):
         """
         @param pseudomm_args: Tuple of arguments passed to the pseudomodem, see
@@ -341,5 +476,14 @@
         kwargs["skip_modem_reset"] = True
         super(CellularPseudoMMTestEnvironment, self).__init__(**kwargs)
         self._context_managers.append(
-                pseudomodem_context.PseudoModemManagerContext(
-                        True, bus=self.bus, *pseudomm_args))
+            pseudomodem_context.PseudoModemManagerContext(
+                True, bus=self.bus, *pseudomm_args))
+'''
+
+class CellularESIMTestEnvironment(CellularTestEnvironment):
+    """Setup cellular eSIM test environment. """
+
+    def __init__(self, esim_arguments=None, **kwargs):
+        kwargs["skip_modem_reset"] = True
+        kwargs["is_esim_test"] = True
+        super(CellularESIMTestEnvironment, self).__init__(**kwargs)
diff --git a/client/cros/chameleon/audio_test_utils.py b/client/cros/chameleon/audio_test_utils.py
index 8da519f..a2857f5 100644
--- a/client/cros/chameleon/audio_test_utils.py
+++ b/client/cros/chameleon/audio_test_utils.py
@@ -807,7 +807,7 @@
                                            input_type=None):
     """Check the target types are available, and set them to be active nodes.
 
-    @param audio_facade: An AudioFacadeNative or AudioFacadeAdapter object.
+    @param audio_facade: An AudioFacadeLocal or AudioFacadeAdapter object.
     @output_type: An output node type defined in cras_utils.CRAS_NODE_TYPES.
                  None to skip.
     @input_type: An input node type defined in cras_utils.CRAS_NODE_TYPES.
diff --git a/client/cros/chameleon/audio_widget.py b/client/cros/chameleon/audio_widget.py
index 0d80e79..5416907 100644
--- a/client/cros/chameleon/audio_widget.py
+++ b/client/cros/chameleon/audio_widget.py
@@ -771,7 +771,7 @@
 
         with tempfile.NamedTemporaryFile(prefix='recorded_') as f:
             self._audio_facade.get_recorded_file(remote_path, f.name)
-            return open(f.name).read()
+            return open(f.name, "rb").read()
 
 
 class CrosUSBInputWidgetHandler(CrosInputWidgetHandler):
diff --git a/client/cros/chameleon/avsync_probe_utils.py b/client/cros/chameleon/avsync_probe_utils.py
index 9683d93..e66ce01 100644
--- a/client/cros/chameleon/avsync_probe_utils.py
+++ b/client/cros/chameleon/avsync_probe_utils.py
@@ -417,13 +417,13 @@
                 # Treat the frame as corrupted frame if the frame duration is
                 # less than 2 video frame duration.
                 if v[TIME_DIFF_INDEX] < 2 * self._video_duration:
-                    logging.warn('Corrupted frame near %s', str(v))
+                    logging.warning('Corrupted frame near %s', str(v))
                     # Correct the code.
                     code = current_code + 1
                     corrupted_frame_count += 1
                     frame_diff = 1
                 else:
-                    logging.warn('Dropped frame near %s', str(v))
+                    logging.warning('Dropped frame near %s', str(v))
                     dropped_frame_count += (frame_diff - 1)
 
             cumulative_frame_count += frame_diff
diff --git a/client/cros/chameleon/chameleon.py b/client/cros/chameleon/chameleon.py
index 873fd6c..70d6287 100644
--- a/client/cros/chameleon/chameleon.py
+++ b/client/cros/chameleon/chameleon.py
@@ -8,6 +8,7 @@
 from __future__ import print_function
 import atexit
 import six.moves.http_client
+import six
 import logging
 import os
 import socket
@@ -483,6 +484,12 @@
         """
         return self._chameleond_proxy.bluetooth_keyboard
 
+    def get_ble_fast_pair(self):
+        """Gets the emulated Bluetooth Fast Pair device on Chameleon.
+
+        @return: A RaspiBLEFastPair object.
+        """
+        return self._chameleond_proxy.ble_fast_pair
 
     def get_bluetooth_ref_controller(self):
         """Gets the emulated BluetoothRefController.
@@ -508,14 +515,6 @@
         return self._chameleond_proxy.motor_board
 
 
-    def get_usb_printer(self):
-        """Gets the printer device on Chameleon.
-
-        @return: A printer object.
-        """
-        return self._chameleond_proxy.printer
-
-
     def get_mac_address(self):
         """Gets the MAC address of Chameleon.
 
@@ -835,7 +834,12 @@
 
         @return An Image object.
         """
-        return Image.fromstring(
+        if six.PY2:
+            return Image.fromstring(
+                    'RGB',
+                    self.get_resolution(),
+                    self.chameleond_proxy.DumpPixels(self.port_id).data)
+        return Image.frombytes(
                 'RGB',
                 self.get_resolution(),
                 self.chameleond_proxy.DumpPixels(self.port_id).data)
diff --git a/client/cros/chameleon/chameleon_port_finder.py b/client/cros/chameleon/chameleon_port_finder.py
index c32efc9..c06fb14 100755
--- a/client/cros/chameleon/chameleon_port_finder.py
+++ b/client/cros/chameleon/chameleon_port_finder.py
@@ -184,7 +184,7 @@
                         old_value=False)
 
                 if not output:
-                    logging.warn('Maybe flaky that no display detected. Retry.')
+                    logging.warning('Maybe flaky that no display detected. Retry.')
                     video_port.unplug()
                     time.sleep(self.REPLUG_DELAY_SEC)
                     video_port.plug()
@@ -204,7 +204,7 @@
                        failed_ports.append(video_port)
                     logging.error('CrOS failed to see any external display')
                     if not video_stable:
-                        logging.warn('Chameleon timed out waiting CrOS video')
+                        logging.warning('Chameleon timed out waiting CrOS video')
             finally:
                 # Unplug the port not to interfere with other tests.
                 video_port.unplug()
diff --git a/client/cros/chameleon/chameleon_screen_test.py b/client/cros/chameleon/chameleon_screen_test.py
index 1f98cb5..70cbba0 100644
--- a/client/cros/chameleon/chameleon_screen_test.py
+++ b/client/cros/chameleon/chameleon_screen_test.py
@@ -166,7 +166,8 @@
                     retry_count = retry_count - 1
                     try:
                         self.load_test_image(test_image_size)
-                        error = self.test_screen(expected_resolution, test_mirrored)
+                        #TODO(kalin): Remove comment after b/232143841 is fixed.
+                        #error = self.test_screen(expected_resolution, test_mirrored)
                         if error is None:
                             return error
                         elif retry_count > 0:
diff --git a/client/cros/chameleon/chameleon_stream_server.py b/client/cros/chameleon/chameleon_stream_server.py
index 0984ee1..981984c 100644
--- a/client/cros/chameleon/chameleon_stream_server.py
+++ b/client/cros/chameleon/chameleon_stream_server.py
@@ -178,7 +178,7 @@
             content += recv_content
 
         if error_code != ErrorCode.OK:
-            logging.warn('Receive error code %d, %r', error_code, content)
+            logging.warning('Receive error code %d, %r', error_code, content)
 
         return (message_type, error_code, length, content)
 
diff --git a/client/cros/chameleon/chameleon_video_capturer.py b/client/cros/chameleon/chameleon_video_capturer.py
index cb0981f..9b13233 100644
--- a/client/cros/chameleon/chameleon_video_capturer.py
+++ b/client/cros/chameleon/chameleon_video_capturer.py
@@ -48,7 +48,7 @@
         Captures frames upto max_frame_count, saves the image with filename
         same as the index of the frame in the frame buffer.
 
-        @param player: object, VimeoPlayer or NativeHTML5Player
+        @param player: object, VimeoPlayer or BuiltinHtml5Player
         @param max_frame_count: int, maximum total number of frames to capture.
         @param box: int tuple, left, upper, right, lower pixel coordinates.
                     Defines the rectangular boundary within which to compare.
@@ -68,7 +68,7 @@
         number of frames captured is equal or more than max_frame_count. Does
         save the images, gets only the checksums.
 
-        @param player: VimeoPlayer or NativeHTML5Player.
+        @param player: VimeoPlayer or BuiltinHtml5Player.
         @param max_frame_count: int, the maximum number of frames we want.
         @param box: int tuple, left, upper, right, lower pixel coordinates.
                     Defines the rectangular boundary within which to compare.
diff --git a/client/cros/chameleon/edid.py b/client/cros/chameleon/edid.py
index 8ab8c3d..9de0666 100644
--- a/client/cros/chameleon/edid.py
+++ b/client/cros/chameleon/edid.py
@@ -14,6 +14,7 @@
 import six
 from six.moves import map
 from six.moves import range
+import sys
 
 
 # TODO: This is a quick workaround; some of our arm devices so far only
@@ -78,8 +79,12 @@
 
         for start in range(0, data_len, Edid.BLOCK_SIZE):
             # Each block (128-byte) has a checksum at the last byte.
-            checksum = reduce(operator.add,
+            if sys.version_info.major < 3:
+                checksum = reduce(operator.add,
                               list(map(ord, data[start:start+Edid.BLOCK_SIZE])))
+            else:
+                checksum = reduce(operator.add,
+                                  list(data[start:start+Edid.BLOCK_SIZE]))
             if checksum % 256 != 0:
                 logging.debug('Wrong checksum in the block %d of EDID',
                               start // Edid.BLOCK_SIZE)
@@ -103,7 +108,7 @@
             data = reduce(operator.add,
                           [codecs.decode(s.strip(), 'hex') for s in open(filename).readlines()])
         else:
-            data = open(filename).read()
+            data = open(filename, 'rb').read()
         return cls(data, skip_verify)
 
 
diff --git a/client/cros/chameleon/screen_capture.py b/client/cros/chameleon/screen_capture.py
index 440bad1..5f8fdeb 100644
--- a/client/cros/chameleon/screen_capture.py
+++ b/client/cros/chameleon/screen_capture.py
@@ -86,7 +86,7 @@
 
 
 class CrosExternalScreenCapturer(object):
-    """A class to capture the external screen on Chrome OS.
+    """A class to capture the external screen on ChromeOS.
 
     Calling its member method capture() captures the screen.
 
@@ -108,7 +108,7 @@
 
 
 class CrosInternalScreenCapturer(object):
-    """A class to capture the internal screen on Chrome OS.
+    """A class to capture the internal screen on ChromeOS.
 
     Calling its member method capture() captures the screen.
 
@@ -130,7 +130,7 @@
 
 
 class CrosCalibrationImageCapturer(object):
-    """A class to capture the calibration image on Chrome OS.
+    """A class to capture the calibration image on ChromeOS.
 
     Calling its member method capture() captures the image.
 
diff --git a/client/cros/chameleon/screen_utility_factory.py b/client/cros/chameleon/screen_utility_factory.py
index 4e4f945..94cfb82 100644
--- a/client/cros/chameleon/screen_utility_factory.py
+++ b/client/cros/chameleon/screen_utility_factory.py
@@ -57,7 +57,7 @@
 
 
     def create_cros_screen_capturer(self, internal_screen=False):
-        """Creates an Chrome OS screen capturer.
+        """Creates an ChromeOS screen capturer.
 
         @param internal_screen: True to compare the internal screen on CrOS.
         """
diff --git a/client/cros/chrome_binary_test.py b/client/cros/chrome_binary_test.py
index 9300066..212845a 100755
--- a/client/cros/chrome_binary_test.py
+++ b/client/cros/chrome_binary_test.py
@@ -1,8 +1,8 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import logging
 import os
 import re
 import shutil
@@ -10,6 +10,7 @@
 import xml.etree.ElementTree as ET
 
 import common
+
 from autotest_lib.client.bin import test, utils
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib import file_utils
@@ -26,32 +27,12 @@
     CHROME_SANDBOX = '/opt/google/chrome/chrome-sandbox'
     COMPONENT_LIB = '/opt/google/chrome/lib'
     home_dir = None
-    cr_source_dir = None
-    test_binary_dir = None
-
-    def setup(self):
-        """
-        Sets up a test.
-        """
-        self.job.setup_dep([self.CHROME_TEST_DEP])
+    test_binary_dir = '/usr/local/libexec/chrome-binary-tests'
 
     def initialize(self):
         """
         Initializes members after setup().
         """
-        test_dep_dir = os.path.join(self.autodir, 'deps', self.CHROME_TEST_DEP)
-        self.job.install_pkg(self.CHROME_TEST_DEP, 'dep', test_dep_dir)
-
-        self.cr_source_dir = '%s/test_src' % test_dep_dir
-        self.test_binary_dir = '%s/out/Release' % self.cr_source_dir
-        # If chrome is a component build then need to create a symlink such
-        # that the _unittest binaries can find the chrome component libraries.
-        Release_lib = os.path.join(self.test_binary_dir, 'lib')
-        if os.path.isdir(self.COMPONENT_LIB):
-            logging.info('Detected component build. This assumes binary '
-                         'compatibility between chrome and *unittest.')
-            if not os.path.islink(Release_lib):
-                os.symlink(self.COMPONENT_LIB, Release_lib)
         self.home_dir = tempfile.mkdtemp()
 
     def cleanup(self):
@@ -134,7 +115,6 @@
         binary_path = self.get_chrome_binary_path(binary_to_run)
         env_vars = ' '.join([
             'HOME=' + self.home_dir,
-            'CR_SOURCE_ROOT=' + self.cr_source_dir,
             'CHROME_DEVEL_SANDBOX=' + self.CHROME_SANDBOX,
             'GTEST_OUTPUT=xml:' + gtest_xml,
             ])
diff --git a/client/cros/constants.py b/client/cros/constants.py
index a41ad0f..609b9a1 100644
--- a/client/cros/constants.py
+++ b/client/cros/constants.py
@@ -2,13 +2,11 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# The names of expected mount-points, devices, magic files, etc on chrome os.
+# The names of expected mount-points, devices, magic files, etc on ChromeOS.
 
 # Constants used by other constants.
 USER_DATA_DIR = '/home/chronos'
-# TODO b:169251326 terms below are set outside of this codebase
-# and should be updated when possible. ("whitelist" -> "allowlist")
-ALLOWLIST_DIR = '/var/lib/whitelist'
+DEVICESETTINGS_DIR = '/var/lib/devicesettings'
 LOG_DIR = '/var/log'
 
 # Rest of constants.
@@ -46,9 +44,17 @@
 SHADOW_ROOT = '/home/.shadow'
 
 CRYPTOHOME_DEV_REGEX_ANY = r'.*'
+CRYPTOHOME_DEV_REGEX_REGULAR_USER_DMCRYPT_DEVICE = r'^/dev/mapper/.*$'
+CRYPTOHOME_DEV_REGEX_REGULAR_USER_LOOP_DEVICE = r'^/dev/(?!loop[0-9]+$)[^/]*$'
 CRYPTOHOME_DEV_REGEX_REGULAR_USER_SHADOW = r'^/home/\.shadow/.*/vault$'
-CRYPTOHOME_DEV_REGEX_REGULAR_USER_DEVICE = r'^/dev/(?!loop[0-9]+$)[^/]*$'
 CRYPTOHOME_DEV_REGEX_REGULAR_USER_EPHEMERAL = r'^ephemeralfs/.*$'
+
+
+# Cryptohome mounts are either backed by a dm-crypt or a loop device.
+CRYPTOHOME_DEV_REGEX_REGULAR_USER_DEVICE = r'(%s|%s)' % (
+        CRYPTOHOME_DEV_REGEX_REGULAR_USER_DMCRYPT_DEVICE,
+        CRYPTOHOME_DEV_REGEX_REGULAR_USER_LOOP_DEVICE)
+
 # Ecryptfs-based user home directory mounts the SHADOW encrypted directory,
 # while ext4-crypto based user home is a bind-mount to an encrypted directory
 # part of a ext4 filesystem that mounts the main disk device. Both can be
@@ -110,7 +116,7 @@
 OAUTH2_GET_AUTH_CODE_URL = '/o/oauth2/programmatic_auth'
 OAUTH2_GET_TOKEN_URL = '/o/oauth2/token'
 
-OWNER_KEY_FILE = ALLOWLIST_DIR + '/owner.key'
+OWNER_KEY_FILE = DEVICESETTINGS_DIR + '/owner.key'
 
 SERVICE_LOGIN_URL = '/accounts/ServiceLogin'
 SERVICE_LOGIN_NEW_URL = '/ServiceLogin'
@@ -118,7 +124,7 @@
 SERVICE_LOGIN_AUTH_ERROR = 'The username or password you entered is incorrect.'
 
 SESSION_MANAGER = 'session_manager'
-SIGNED_POLICY_FILE = ALLOWLIST_DIR + '/policy'
+SIGNED_POLICY_FILE = DEVICESETTINGS_DIR + '/policy'
 SPECIAL_CASE_DOMAIN = 'gmail.com'
 USER_POLICY_DIR = '/run/user_policy'
 USER_POLICY_KEY_FILENAME = 'policy.pub'
@@ -153,7 +159,7 @@
 BLUETOOTH_DEVICE_XMLRPC_SERVER_PORT = 9990
 BLUETOOTH_DEVICE_XMLRPC_SERVER_COMMAND = (
         'cd /usr/local/autotest/cros/bluetooth; '
-        './bluetooth_device_xmlrpc_server.py')
+        './bluetooth_device_xmlrpc_server.py --py_version=3')
 BLUETOOTH_DEVICE_XMLRPC_SERVER_CLEANUP_PATTERN = (
         'bluetooth_device_xmlrpc_server')
 BLUETOOTH_DEVICE_XMLRPC_SERVER_READY_METHOD = 'ready'
diff --git a/client/cros/crash/crash_test.py b/client/cros/crash/crash_test.py
index 96ad863..07b6847 100644
--- a/client/cros/crash/crash_test.py
+++ b/client/cros/crash/crash_test.py
@@ -16,6 +16,32 @@
 from autotest_lib.client.cros import constants, cros_logging
 
 
+_CRASH_RUN_STATE_DIR = '/run/crash_reporter'
+
+
+class FilterOut:
+    """contextmanager-compatible class to block certain crashes during tests."""
+
+    def __init__(self, name):
+        self._FILTER_OUT = _CRASH_RUN_STATE_DIR + '/filter-out'
+        self.name = name
+
+    def __enter__(self):
+        """Writes the given parameter to the filter-out file.
+
+        This is used to ignore crashes in which we have no interest.
+        """
+        utils.open_write_close(self._FILTER_OUT, self.name)
+
+    def __exit__(self, ex_type, value, traceback):
+        """Remove the filter-out file.
+
+        Next time the crash reporter is invoked, it will not filter crashes."""
+        os.remove(self._FILTER_OUT)
+        # Do *not* handle any exception
+        return False
+
+
 class CrashTest(test.test):
     """
     This class deals with running crash tests, which are tests which crash a
@@ -78,7 +104,6 @@
     _CRASH_SENDER_PATH = '/sbin/crash_sender'
     _CRASH_SENDER_RATE_DIR = '/var/lib/crash_sender'
     _CRASH_SENDER_LOCK_PATH = '/run/lock/crash_sender'
-    _CRASH_RUN_STATE_DIR = '/run/crash_reporter'
     _CRASH_TEST_IN_PROGRESS = _CRASH_RUN_STATE_DIR + '/crash-test-in-progress'
     _MOCK_CRASH_SENDING = _CRASH_RUN_STATE_DIR + '/mock-crash-sending'
     _FILTER_IN = _CRASH_RUN_STATE_DIR + '/filter-in'
@@ -178,7 +203,7 @@
         """
         autotest_cros_dir = os.path.join(os.path.dirname(__file__), '..')
         if has_consent:
-            if os.path.isdir(constants.ALLOWLIST_DIR):
+            if os.path.isdir(constants.DEVICESETTINGS_DIR):
                 # Create policy file that enables metrics/consent.
                 shutil.copy('%s/mock_metrics_on.policy' % autotest_cros_dir,
                             constants.SIGNED_POLICY_FILE)
@@ -196,7 +221,7 @@
             shutil.move(temp_file, self._CONSENT_FILE)
             logging.info('Created %s', self._CONSENT_FILE)
         else:
-            if os.path.isdir(constants.ALLOWLIST_DIR):
+            if os.path.isdir(constants.DEVICESETTINGS_DIR):
                 # Create policy file that disables metrics/consent.
                 shutil.copy('%s/mock_metrics_off.policy' % autotest_cros_dir,
                             constants.SIGNED_POLICY_FILE)
@@ -325,13 +350,18 @@
         This writes a file to _SYSTEM_CRASH_DIR with the given name. This is
         used to insert new crash dump files for testing purposes.
 
+        If contents is not a string, binary data is assumed.
+
         @param name: Name of file to write.
-        @param contents: String to write to the file.
+        @param contents: String/binary data to write to the file.
         """
         entry = self.get_crash_dir_name(name)
         if not os.path.exists(self._SYSTEM_CRASH_DIR):
             os.makedirs(self._SYSTEM_CRASH_DIR)
-        utils.open_write_close(entry, contents)
+
+        is_binary = not isinstance(contents, str)
+        utils.open_write_close(entry, contents, is_binary)
+
         return entry
 
 
@@ -382,8 +412,10 @@
         if report is None:
             # Use the same file format as crash does normally:
             # <basename>.#.#.#.meta
-            payload = self.write_crash_dir_entry(
-                '%s.dmp' % self._FAKE_TEST_BASENAME, self._get_dmp_contents())
+            payload = os.path.basename(
+                    self.write_crash_dir_entry(
+                            '%s.dmp' % self._FAKE_TEST_BASENAME,
+                            self._get_dmp_contents()))
             report = self.write_fake_meta(
                 '%s.meta' % self._FAKE_TEST_BASENAME, 'fake', payload)
         return report
@@ -592,7 +624,7 @@
     def enable_crash_filtering(self, name):
         """Writes the given parameter to the filter-in file.
 
-        This is used to ignore crashes in which we have no interest.
+        This is used to collect only crashes in which we have an interest.
 
         @param new_parameter: The filter to write to the file, if any.
         """
@@ -666,7 +698,7 @@
             self._push_consent()
 
         if must_run_all:
-            # Sanity check test_names is complete
+            # Check test_names is complete
             for attr in dir(self):
                 if attr.find('_test_') == 0:
                     test_name = attr[6:]
diff --git a/client/cros/crash/user_crash_test.py b/client/cros/crash/user_crash_test.py
index 2245c74..585d431 100644
--- a/client/cros/crash/user_crash_test.py
+++ b/client/cros/crash/user_crash_test.py
@@ -12,7 +12,7 @@
 import stat
 import subprocess
 
-import crash_test
+from autotest_lib.client.cros.crash import crash_test
 from autotest_lib.client.bin import utils
 from autotest_lib.client.common_lib import error
 
@@ -50,7 +50,7 @@
 
         @param expected_tag: Expected tag in crash_reporter log message.
         @param expected_version: Expected version included in the crash report,
-                                 or None to use the Chrome OS version.
+                                 or None to use the ChromeOS version.
         @param force_user_crash_dir: Always look for crash reports in the crash
                                      directory of the current user session, or
                                      the fallback directory if no sessions.
@@ -92,9 +92,7 @@
         os.mkdir(self._symbol_dir)
 
         basename = os.path.basename(self._crasher_path)
-        utils.system('/usr/bin/dump_syms %s > %s.sym' %
-                     (self._crasher_path,
-                      basename))
+        utils.system('dump_syms %s > %s.sym' % (self._crasher_path, basename))
         sym_name = '%s.sym' % basename
         symbols = utils.read_file(sym_name)
         # First line should be like:
@@ -183,9 +181,6 @@
         """Runs the crasher process.
 
         Will wait up to 10 seconds for crash_reporter to report the crash.
-        crash_reporter_caught will be marked as true when the "Received crash
-        notification message..." appears. While associated logs are likely to be
-        available at this point, the function does not guarantee this.
 
         @param username: Unix user of the crasher process.
         @param cause_crash: Whether the crasher should crash.
@@ -211,7 +206,6 @@
           A dictionary with keys:
             returncode: return code of the crasher
             crashed: did the crasher return segv error code
-            crash_reporter_caught: did crash_reporter catch a segv
             output: stderr output of the crasher process
         """
         if crasher_path is None:
@@ -254,7 +248,7 @@
                                        stdout=subprocess.PIPE,
                                        stderr=subprocess.PIPE)
 
-            output = crasher.communicate()[1]
+            output = crasher.communicate()[1].decode()
             exit_code = crasher.returncode
             pid = None
 
@@ -298,18 +292,7 @@
                 'Timeout waiting for crash_reporter to finish: ' +
                 self._log_reader.get_logs()))
 
-        is_caught = False
-        try:
-            utils.poll_for_condition(
-                lambda: self._log_reader.can_find(expected_message),
-                timeout=5,
-                desc='Logs contain crash_reporter message: ' + expected_message)
-            is_caught = True
-        except utils.TimeoutError:
-            pass
-
         result = {'crashed': exit_code == expected_exit_code,
-                  'crash_reporter_caught': is_caught,
                   'output': output,
                   'returncode': exit_code}
         logging.debug('Crasher process result: %s', result)
@@ -350,7 +333,7 @@
 
     def _check_minidump_stackwalk(self, minidump_path, basename,
                                   from_crash_reporter):
-        stack = utils.system_output('/usr/bin/minidump_stackwalk %s %s' %
+        stack = utils.system_output('minidump_stackwalk %s %s' %
                                     (minidump_path, self._symbol_dir))
         self._verify_stack(stack, basename, from_crash_reporter)
 
@@ -406,7 +389,7 @@
             expected_uid=expected_uid, expected_gid=expected_gid,
             expected_exit_code=expected_exit_code)
 
-        if not result['crashed'] or not result['crash_reporter_caught']:
+        if not result['crashed']:
             return result
 
         crash_dir = self._get_crash_dir(username, self._force_user_crash_dir)
@@ -491,17 +474,6 @@
         return result
 
 
-    def _check_crashed_and_caught(self, result):
-        if not result['crashed']:
-            raise error.TestFail('Crasher returned %d instead of crashing' %
-                                 result['returncode'])
-
-        if not result['crash_reporter_caught']:
-            logging.debug('Logs do not contain crash_reporter message:\n%s',
-                          self._log_reader.get_logs())
-            raise error.TestFail('crash_reporter did not catch crash')
-
-
     def _check_crashing_process(self,
                                 username,
                                 consent=True,
@@ -519,8 +491,9 @@
             expected_gid=expected_gid,
             expected_exit_code=expected_exit_code)
 
-        self._check_crashed_and_caught(result)
-
+        if not result['crashed']:
+            raise error.TestFail('Crasher returned %d instead of crashing' %
+                                 result['returncode'])
         if not consent:
             return
 
@@ -533,10 +506,6 @@
         if not result['minidump']:
             raise error.TestFail('crash reporter did not generate minidump')
 
-        if not self._log_reader.can_find('Stored minidump to ' +
-                                         result['minidump']):
-            raise error.TestFail('crash reporter did not announce minidump')
-
         self._check_minidump_stackwalk(result['minidump'],
                                        result['basename'],
                                        from_crash_reporter=True)
diff --git a/client/cros/cros_disks.py b/client/cros/cros_disks.py
index 5d35e2a..8b51f83 100644
--- a/client/cros/cros_disks.py
+++ b/client/cros/cros_disks.py
@@ -8,8 +8,13 @@
 from __future__ import division
 from __future__ import absolute_import
 
-import dbus, gobject, logging, os, stat
+import dbus, logging, os, stat
 from dbus.mainloop.glib import DBusGMainLoop
+# AU tests use ToT client code, but ToT -3 client version.
+try:
+    from gi.repository import GObject
+except ImportError:
+    import gobject as GObject
 import six
 from six.moves import zip
 
@@ -62,7 +67,7 @@
 class DBusClient(object):
     """ A base class of a DBus proxy client to test a DBus server.
 
-    This class is expected to be used along with a GLib main loop and provides
+    This class is expected to be used along with a GObject main loop and provides
     some convenient functions for testing the DBus API exposed by a DBus server.
     """
 
@@ -70,7 +75,7 @@
         """Initializes the instance.
 
         Args:
-            main_loop: The GLib main loop.
+            main_loop: The GObject main loop.
             bus: The bus where the DBus server is connected to.
             bus_name: The bus name owned by the DBus server.
             object_path: The object path of the DBus server.
@@ -209,7 +214,7 @@
         """Initializes the instance.
 
         Args:
-            main_loop: The GLib main loop.
+            main_loop: The GObject main loop.
             bus: The bus where the DBus server is connected to.
             timeout_seconds: Maximum time in seconds to wait for the DBus
                              connection.
@@ -423,7 +428,7 @@
     def __init__(self, test):
         bus_loop = DBusGMainLoop(set_as_default=True)
         self.bus = dbus.SystemBus(mainloop=bus_loop)
-        self.main_loop = gobject.MainLoop()
+        self.main_loop = GObject.MainLoop()
         super(CrosDisksTester, self).__init__(test, self.main_loop)
         self.cros_disks = CrosDisksClient(self.main_loop, self.bus)
 
diff --git a/client/cros/cros_logging.py b/client/cros/cros_logging.py
index 102fb3c..9065076 100644
--- a/client/cros/cros_logging.py
+++ b/client/cros/cros_logging.py
@@ -127,11 +127,14 @@
         Start line is set by set_start_* functions or
         since the start of the file if none were called.
 
+        All lines must be decoded as a string as Python 3
+        now differentiates between bytearrays and strings
+
         @return string of contents of file since start line.
         """
         logs = []
         for line in self.read_all_logs():
-            logs.append(line)
+            logs.append(line.decode())
         return ''.join(logs)
 
 
diff --git a/client/cros/cryptohome.py b/client/cros/cryptohome.py
index 61f6698..14a1c1b 100644
--- a/client/cros/cryptohome.py
+++ b/client/cros/cryptohome.py
@@ -7,16 +7,15 @@
 from __future__ import division
 from __future__ import print_function
 
-import dbus, gobject, logging, os, random, re, shutil, string, sys, time
-from dbus.mainloop.glib import DBusGMainLoop
-from six.moves import map
+import logging, os, random, re, shutil, string, time
 
 import common
 
 from autotest_lib.client.cros import constants
 from autotest_lib.client.bin import utils
 from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.cros_disks import DBusClient
+from autotest_lib.client.common_lib.cros import tpm_utils
+from autotest_lib.client.cros.tpm import *
 
 ATTESTATION_CMD = '/usr/bin/attestation_client'
 CRYPTOHOME_CMD = '/usr/sbin/cryptohome'
@@ -29,14 +28,8 @@
 
 DBUS_PROTOS_DEP = 'dbus_protos'
 
+LEC_KEY = 'low_entropy_credentials_supported'
 
-class ChromiumOSError(error.TestError):
-    """Generic error for ChromiumOS-specific exceptions."""
-    pass
-
-def __run_cmd(cmd):
-    return utils.system_output(cmd + ' 2>&1', retain_output=True,
-                               ignore_status=True).strip()
 
 def get_user_hash(user):
     """Get the user hash for the given user."""
@@ -83,75 +76,23 @@
     mount_vault(user, password, create=True)
 
 
-def get_tpm_status():
-    """Get the TPM status.
-
-    Returns:
-        A TPM status dictionary, for example:
-        { 'Enabled': True,
-          'Owned': True,
-          'Ready': True
-        }
-    """
-    out = __run_cmd(TPM_MANAGER_CMD + ' status --nonsensitive')
-    status = {}
-    for field in ['is_enabled', 'is_owned']:
-        match = re.search('%s: (true|false)' % field, out)
-        if not match:
-            raise ChromiumOSError('Invalid TPM status: "%s".' % out)
-        status[field] = match.group(1) == 'true'
-    status['Enabled'] = status['is_enabled']
-    status['Owned'] = status['is_owned']
-    status['Ready'] = status['is_enabled'] and status['is_owned']
-    return status
-
-
 def get_tpm_password():
     """Get the TPM password.
 
     Returns:
         A TPM password
     """
-    out = __run_cmd(TPM_MANAGER_CMD + ' status')
+    out = run_cmd(TPM_MANAGER_CMD + ' status')
     match = re.search('owner_password: (\w*)', out)
     password = ''
     if match:
-        hex_pass = match.group(1).decode("hex")
+        hex_pass = match.group(1)
         password = ''.join(
                 chr(int(hex_pass[i:i + 2], 16))
                 for i in range(0, len(hex_pass), 2))
     return password
 
 
-def get_tpm_da_info():
-    """Get the TPM dictionary attack information.
-    Returns:
-        A TPM dictionary attack status dictionary, for example:
-        {
-          'dictionary_attack_counter': 0,
-          'dictionary_attack_threshold': 200,
-          'dictionary_attack_lockout_in_effect': False,
-          'dictionary_attack_lockout_seconds_remaining': 0
-        }
-    """
-    status = {}
-    out = __run_cmd(TPM_MANAGER_CMD + ' get_da_info')
-    for line in out.splitlines()[1:-1]:
-        items = line.strip().split(':')
-        if len(items) != 2:
-            continue
-        if items[1].strip() == 'false':
-            value = False
-        elif items[1].strip() == 'true':
-            value = True
-        elif items[1].split('(')[0].strip().isdigit():
-            value = int(items[1].split('(')[0].strip())
-        else:
-            value = items[1].strip(' "')
-        status[items[0].strip()] = value
-    return status
-
-
 def get_fwmp(cleared_fwmp=False):
     """Get the firmware management parameters.
 
@@ -172,23 +113,25 @@
     Raises:
          ChromiumOSError if any expected field is not found in the cryptohome
          output. This would typically happen when FWMP state does not match
-         'clreared_fwmp'
+         'cleared_fwmp'
     """
-    out = __run_cmd(CRYPTOHOME_CMD +
+    out = run_cmd(CRYPTOHOME_CMD +
                     ' --action=get_firmware_management_parameters')
 
     if cleared_fwmp:
+        if tpm_utils.FwmpIsAllZero(out):
+            return {}
         fields = ['error']
     else:
         fields = ['flags', 'developer_key_hash']
 
     status = {}
     for field in fields:
-        match = re.search('%s: (\S+)\n' % field, out)
+        match = re.search('%s: (\S+)\s' % field, out)
         if not match:
             raise ChromiumOSError('Invalid FWMP field %s: "%s".' %
                                   (field, out))
-        status[field] = match.group(1)
+        status[field] = match.group(1).strip()
     return status
 
 
@@ -207,7 +150,7 @@
     if developer_key_hash:
         cmd += ' --developer_key_hash=' + developer_key_hash
 
-    out = __run_cmd(cmd)
+    out = run_cmd(cmd)
     if 'SetFirmwareManagementParameters success' not in out:
         raise ChromiumOSError('failed to set FWMP: %s' % out)
 
@@ -223,13 +166,11 @@
 
     Returns:
         A login status dictionary containing:
-        { 'owner_user_exists': True|False,
-          'boot_lockbox_finalized': True|False
-        }
+        { 'owner_user_exists': True|False }
     """
-    out = __run_cmd(CRYPTOHOME_CMD + ' --action=get_login_status')
+    out = run_cmd(CRYPTOHOME_CMD + ' --action=get_login_status')
     status = {}
-    for field in ['owner_user_exists', 'boot_lockbox_finalized']:
+    for field in ['owner_user_exists']:
         match = re.search('%s: (true|false)' % field, out)
         if not match:
             raise ChromiumOSError('Invalid login status: "%s".' % out)
@@ -248,14 +189,47 @@
           "VALID"
           "INVALID"
     """
-    out = __run_cmd(CRYPTOHOME_CMD + ' --action=install_attributes_get_status')
+    out = run_cmd(CRYPTOHOME_CMD + ' --action=install_attributes_get_status')
     return out.strip()
 
 
+def lock_install_attributes(attrs):
+    """Set and lock install attributes for the device.
+
+    @param attrs: dict of install attributes.
+    """
+
+    take_tpm_ownership()
+    wait_for_install_attributes_ready()
+    for name, value in attrs.items():
+        args = [
+                CRYPTOHOME_CMD, '--action=install_attributes_set',
+                '--name="%s"' % name,
+                '--value="%s"' % value
+        ]
+        cmd = ' '.join(args)
+        if (utils.system(cmd, ignore_status=True) != 0):
+            return False
+
+    out = run_cmd(CRYPTOHOME_CMD + ' --action=install_attributes_finalize')
+    return (out.strip() == 'InstallAttributesFinalize(): 1')
+
+
+def wait_for_install_attributes_ready():
+    """Wait until install attributes are ready.
+    """
+    cmd = CRYPTOHOME_CMD + ' --action=install_attributes_is_ready'
+    utils.poll_for_condition(
+            lambda: run_cmd(cmd).strip() == 'InstallAttributesIsReady(): 1',
+            timeout=300,
+            exception=error.TestError(
+                    'Timeout waiting for install attributes to be ready'))
+
+
 def get_tpm_attestation_status():
     """Get the TPM attestation status.  Works similar to get_tpm_status().
     """
-    out = __run_cmd(ATTESTATION_CMD + ' status')
+    out = run_cmd(ATTESTATION_CMD + ' status')
     status = {}
     for field in ['prepared_for_enrollment', 'enrolled']:
         match = re.search('%s: (true|false)' % field, out)
@@ -266,12 +240,12 @@
 
 
 def take_tpm_ownership(wait_for_ownership=True):
-    """Take TPM owernship.
+    """Take TPM ownership.
 
     Args:
         wait_for_ownership: block until TPM is owned if true
     """
-    __run_cmd(CRYPTOHOME_CMD + ' --action=tpm_take_ownership')
+    run_cmd(CRYPTOHOME_CMD + ' --action=tpm_take_ownership')
     if wait_for_ownership:
         # Note that waiting for the 'Ready' flag is more correct than waiting
         # for the 'Owned' flag, as the latter is set by cryptohomed before some
@@ -297,7 +271,7 @@
     user_hash = get_user_hash(user)
     logging.debug('Removing vault for user %s with hash %s', user, user_hash)
     cmd = CRYPTOHOME_CMD + ' --action=remove --force --user=%s' % user
-    __run_cmd(cmd)
+    run_cmd(cmd)
     # Ensure that the vault does not exist.
     if os.path.exists(os.path.join(constants.SHADOW_ROOT, user_hash)):
         raise ChromiumOSError('Cryptohome could not remove the user\'s vault.')
@@ -329,7 +303,7 @@
             key_label = 'bar'
     if key_label is not None:
         args += ['--key_label=%s' % key_label]
-    logging.info(__run_cmd(' '.join(args)))
+    logging.info(run_cmd(' '.join(args)))
     # Ensure that the vault exists in the shadow directory.
     user_hash = get_user_hash(user)
     if not os.path.exists(os.path.join(constants.SHADOW_ROOT, user_hash)):
@@ -338,7 +312,7 @@
         while retry < MOUNT_RETRY_COUNT and not mounted:
             time.sleep(1)
             logging.info("Retry %s", str(retry + 1))
-            __run_cmd(' '.join(args))
+            run_cmd(' '.join(args))
             # TODO: Remove this additional call to get_user_hash(user) when
             # crbug.com/690994 is fixed
             user_hash = get_user_hash(user)
@@ -355,7 +329,7 @@
 def mount_guest():
     """Mount the guest vault."""
     args = [CRYPTOHOME_CMD, '--action=mount_guest_ex']
-    logging.info(__run_cmd(' '.join(args)))
+    logging.info(run_cmd(' '.join(args)))
     # Ensure that the guest vault is mounted.
     if not is_guest_vault_mounted(allow_fail=True):
         raise ChromiumOSError('Cryptohome did not mount guest vault.')
@@ -365,7 +339,7 @@
     """Test key auth."""
     cmd = [CRYPTOHOME_CMD, '--action=check_key_ex', '--user=%s' % user,
            '--password=%s' % password, '--async']
-    out = __run_cmd(' '.join(cmd))
+    out = run_cmd(' '.join(cmd))
     logging.info(out)
     return 'Key authenticated.' in out
 
@@ -376,7 +350,7 @@
             '--user=%s' % user, '--password=%s' % password,
             '--new_key_label=%s' % new_key_label,
             '--new_password=%s' % new_password]
-    logging.info(__run_cmd(' '.join(args)))
+    logging.info(run_cmd(' '.join(args)))
 
 
 def remove_key(user, password, remove_key_label):
@@ -384,29 +358,38 @@
     args = [CRYPTOHOME_CMD, '--action=remove_key_ex', '--user=%s' % user,
             '--password=%s' % password,
             '--remove_key_label=%s' % remove_key_label]
-    logging.info(__run_cmd(' '.join(args)))
+    logging.info(run_cmd(' '.join(args)))
 
 
-def get_supported_key_policies():
+def get_supported_key_policies(host=None):
     """Get supported key policies."""
     args = [CRYPTOHOME_CMD, '--action=get_supported_key_policies']
-    out = __run_cmd(' '.join(args))
+    if host is not None:
+        out = host.run(args).stdout
+    else:
+        out = run_cmd(' '.join(args))
     logging.info(out)
     policies = {}
     for line in out.splitlines():
-        match = re.search('  ([^:]+): (true|false)', line)
+        match = re.search('([^:]+): (true|false)', line.strip())
         if match:
             policies[match.group(1)] = match.group(2) == 'true'
     return policies
 
 
+def is_low_entropy_credentials_supported(host=None):
+    """ Returns True if low entropy credentials are supported."""
+    key_policies = get_supported_key_policies(host)
+    return LEC_KEY in key_policies and key_policies[LEC_KEY]
+
+
 def unmount_vault(user=None):
     """Unmount the given user's vault.
 
     Once unmounting for a specific user is supported, the user parameter will
     name the target user. See crosbug.com/20778.
     """
-    __run_cmd(CRYPTOHOME_CMD + ' --action=unmount')
+    run_cmd(CRYPTOHOME_CMD + ' --action=unmount')
     # Ensure that the vault is not mounted.
     if user is not None and is_vault_mounted(user, allow_fail=True):
         raise ChromiumOSError('Cryptohome did not unmount the user.')
@@ -464,6 +447,7 @@
             __get_mount_info(mount_point=system_path(user),
                              allow_fail=allow_fail)]
 
+
 def is_vault_mounted(user, regexes=None, allow_fail=False):
     """Check whether a vault is mounted for the given user.
 
@@ -514,28 +498,30 @@
        or be backed by tmpfs.
     """
     return is_vault_mounted(
-        user=GUEST_USER_NAME,
-        regexes={
-            # Remove tmpfs support when it becomes unnecessary as all guest
-            # modes will use ext4 on a loop device.
-            constants.CRYPTOHOME_FS_REGEX_EXT4 :
-                constants.CRYPTOHOME_DEV_REGEX_LOOP_DEVICE,
-            constants.CRYPTOHOME_FS_REGEX_TMPFS :
-                constants.CRYPTOHOME_DEV_REGEX_GUEST,
-        },
-        allow_fail=allow_fail)
+            user=GUEST_USER_NAME,
+            regexes={
+                    # Remove tmpfs support when it becomes unnecessary as all guest
+                    # modes will use ext4 on a loop device.
+                    constants.CRYPTOHOME_FS_REGEX_EXT4:
+                    constants.CRYPTOHOME_DEV_REGEX_LOOP_DEVICE,
+                    constants.CRYPTOHOME_FS_REGEX_TMPFS:
+                    constants.CRYPTOHOME_DEV_REGEX_GUEST,
+            },
+            allow_fail=allow_fail)
+
 
 def is_permanent_vault_mounted(user, allow_fail=False):
     """Check if user is mounted over ecryptfs or ext4 crypto. """
     return is_vault_mounted(
-        user=user,
-        regexes={
-            constants.CRYPTOHOME_FS_REGEX_ECRYPTFS :
-                constants.CRYPTOHOME_DEV_REGEX_REGULAR_USER_SHADOW,
-            constants.CRYPTOHOME_FS_REGEX_EXT4 :
-                constants.CRYPTOHOME_DEV_REGEX_REGULAR_USER_DEVICE,
-        },
-        allow_fail=allow_fail)
+            user=user,
+            regexes={
+                    constants.CRYPTOHOME_FS_REGEX_ECRYPTFS:
+                    constants.CRYPTOHOME_DEV_REGEX_REGULAR_USER_SHADOW,
+                    constants.CRYPTOHOME_FS_REGEX_EXT4:
+                    constants.CRYPTOHOME_DEV_REGEX_REGULAR_USER_DEVICE,
+            },
+            allow_fail=allow_fail)
+
 
 def get_mounted_vault_path(user, allow_fail=False):
     """Get the path where the decrypted data for the user is located."""
@@ -568,7 +554,7 @@
 def crash_cryptohomed():
     """Let cryptohome crash."""
     # Try to kill cryptohomed so we get something to work with.
-    pid = __run_cmd('pgrep cryptohomed')
+    pid = run_cmd('pgrep cryptohomed')
     try:
         pid = int(pid)
     except ValueError as e:  # empty or invalid string
@@ -603,11 +589,14 @@
             '--key_label=foo',
             '--ecryptfs',
             '--create']
-    logging.info(__run_cmd(' '.join(args)))
-    if not is_vault_mounted(user, regexes={
-        constants.CRYPTOHOME_FS_REGEX_ECRYPTFS :
-            constants.CRYPTOHOME_DEV_REGEX_REGULAR_USER_SHADOW
-    }, allow_fail=True):
+    logging.info(run_cmd(' '.join(args)))
+    if not is_vault_mounted(
+            user,
+            regexes={
+                    constants.CRYPTOHOME_FS_REGEX_ECRYPTFS:
+                    constants.CRYPTOHOME_DEV_REGEX_REGULAR_USER_SHADOW
+            },
+            allow_fail=True):
         raise ChromiumOSError('Ecryptfs home could not be created')
 
 
@@ -626,11 +615,11 @@
             '--to_migrate_from_ecryptfs',
             '--user=%s' % user,
             '--password=%s' % password]
-    logging.info(__run_cmd(' '.join(args)))
+    logging.info(run_cmd(' '.join(args)))
     if not __get_mount_info(temporary_mount_path(user), allow_fail=True):
         raise ChromiumOSError('Failed to mount home for migration')
     args = [CRYPTOHOME_CMD, '--action=migrate_to_dircrypto', '--user=%s' % user]
-    logging.info(__run_cmd(' '.join(args)))
+    logging.info(run_cmd(' '.join(args)))
     utils.poll_for_condition(
         lambda: not __get_mount_info(
                 temporary_mount_path(user), allow_fail=True),
@@ -647,201 +636,7 @@
             '--user=%s' % user,
             '--old_password=%s' % password,
             '--password=%s' % new_password]
-    out = __run_cmd(' '.join(args))
+    out = run_cmd(' '.join(args))
     logging.info(out)
     if 'Key migration succeeded.' not in out:
         raise ChromiumOSError('Key migration failed.')
-
-
-class CryptohomeProxy(DBusClient):
-    """A DBus proxy client for testing the Cryptohome DBus server.
-    """
-    CRYPTOHOME_BUS_NAME = 'org.chromium.Cryptohome'
-    CRYPTOHOME_OBJECT_PATH = '/org/chromium/Cryptohome'
-    CRYPTOHOME_INTERFACE = 'org.chromium.CryptohomeInterface'
-    ASYNC_CALL_STATUS_SIGNAL = 'AsyncCallStatus'
-    ASYNC_CALL_STATUS_SIGNAL_ARGUMENTS = (
-        'async_id', 'return_status', 'return_code'
-    )
-    DBUS_PROPERTIES_INTERFACE = 'org.freedesktop.DBus.Properties'
-
-    # Default timeout in seconds for the D-Bus connection.
-    DEFAULT_DBUS_TIMEOUT = 30
-
-    def __init__(self, bus_loop=None, autodir=None, job=None,
-                 timeout=DEFAULT_DBUS_TIMEOUT):
-        if autodir and job:
-            # Install D-Bus protos necessary for some methods.
-            dep_dir = os.path.join(autodir, 'deps', DBUS_PROTOS_DEP)
-            job.install_pkg(DBUS_PROTOS_DEP, 'dep', dep_dir)
-            sys.path.append(dep_dir)
-
-        # Set up D-Bus main loop and interface.
-        self.main_loop = gobject.MainLoop()
-        if bus_loop is None:
-            bus_loop = DBusGMainLoop(set_as_default=True)
-        self.bus = dbus.SystemBus(mainloop=bus_loop)
-        super(CryptohomeProxy, self).__init__(self.main_loop, self.bus,
-                                              self.CRYPTOHOME_BUS_NAME,
-                                              self.CRYPTOHOME_OBJECT_PATH,
-                                              timeout)
-        self.iface = dbus.Interface(self.proxy_object,
-                                    self.CRYPTOHOME_INTERFACE)
-        self.properties = dbus.Interface(self.proxy_object,
-                                         self.DBUS_PROPERTIES_INTERFACE)
-        self.handle_signal(self.CRYPTOHOME_INTERFACE,
-                           self.ASYNC_CALL_STATUS_SIGNAL,
-                           self.ASYNC_CALL_STATUS_SIGNAL_ARGUMENTS)
-
-
-    # Wrap all proxied calls to catch cryptohomed failures.
-    def __call(self, method, *args):
-        try:
-            return method(*args, timeout=180)
-        except dbus.exceptions.DBusException as e:
-            if e.get_dbus_name() == 'org.freedesktop.DBus.Error.NoReply':
-                logging.error('Cryptohome is not responding. Sending ABRT')
-                crash_cryptohomed()
-                raise ChromiumOSError('cryptohomed aborted. Check crashes!')
-            raise e
-
-
-    def __wait_for_specific_signal(self, signal, data):
-        """Wait for the |signal| with matching |data|
-          Returns the resulting dict on success or {} on error.
-        """
-        # Do not bubble up the timeout here, just return {}.
-        result = {}
-        try:
-            result = self.wait_for_signal(signal)
-        except utils.TimeoutError:
-            return {}
-        for k in data.keys():
-            if k not in result or result[k] != data[k]:
-                return {}
-        return result
-
-
-    # Perform a data-less async call.
-    # TODO(wad) Add __async_data_call.
-    def __async_call(self, method, *args):
-        # Clear out any superfluous async call signals.
-        self.clear_signal_content(self.ASYNC_CALL_STATUS_SIGNAL)
-        out = self.__call(method, *args)
-        logging.debug('Issued call ' + str(method) +
-                      ' with async_id ' + str(out))
-        result = {}
-        try:
-            # __wait_for_specific_signal has a 10s timeout
-            result = utils.poll_for_condition(
-                lambda: self.__wait_for_specific_signal(
-                    self.ASYNC_CALL_STATUS_SIGNAL, {'async_id' : out}),
-                timeout=180,
-                desc='matching %s signal' % self.ASYNC_CALL_STATUS_SIGNAL)
-        except utils.TimeoutError as e:
-            logging.error('Cryptohome timed out. Sending ABRT.')
-            crash_cryptohomed()
-            raise ChromiumOSError('cryptohomed aborted. Check crashes!')
-        return result
-
-
-    def mount(self, user, password, create=False, key_label='bar'):
-        """Mounts a cryptohome.
-
-        Returns True if the mount succeeds or False otherwise.
-        """
-        import rpc_pb2
-
-        acc = rpc_pb2.AccountIdentifier()
-        acc.account_id = user
-
-        auth = rpc_pb2.AuthorizationRequest()
-        auth.key.secret = password
-        auth.key.data.label = key_label
-
-        mount_req = rpc_pb2.MountRequest()
-        if create:
-            mount_req.create.copy_authorization_key = True
-
-        out = self.__call(self.iface.MountEx, acc.SerializeToString(),
-            auth.SerializeToString(), mount_req.SerializeToString())
-        parsed_out = rpc_pb2.BaseReply()
-        parsed_out.ParseFromString(''.join(map(chr, out)))
-        return parsed_out.error == rpc_pb2.CRYPTOHOME_ERROR_NOT_SET
-
-
-    def unmount(self, user):
-        """Unmounts a cryptohome.
-
-        Returns True if the unmount suceeds or false otherwise.
-        """
-        import rpc_pb2
-
-        req = rpc_pb2.UnmountRequest()
-
-        out = self.__call(self.iface.UnmountEx, req.SerializeToString())
-        parsed_out = rpc_pb2.BaseReply()
-        parsed_out.ParseFromString(''.join(map(chr, out)))
-        return parsed_out.error == rpc_pb2.CRYPTOHOME_ERROR_NOT_SET
-
-
-    def is_mounted(self, user):
-        """Tests whether a user's cryptohome is mounted."""
-        return (utils.is_mountpoint(user_path(user))
-                and utils.is_mountpoint(system_path(user)))
-
-
-    def require_mounted(self, user):
-        """Raises a test failure if a user's cryptohome is not mounted."""
-        utils.require_mountpoint(user_path(user))
-        utils.require_mountpoint(system_path(user))
-
-
-    def remove(self, user):
-        """Removes a users cryptohome.
-
-        Returns True if the operation succeeds or False otherwise.
-        """
-        import rpc_pb2
-
-        acc = rpc_pb2.AccountIdentifier()
-        acc.account_id = user
-
-        out = self.__call(self.iface.RemoveEx, acc.SerializeToString())
-        parsed_out = rpc_pb2.BaseReply()
-        parsed_out.ParseFromString(''.join(map(chr, out)))
-        return parsed_out.error == rpc_pb2.CRYPTOHOME_ERROR_NOT_SET
-
-
-    def ensure_clean_cryptohome_for(self, user, password=None):
-        """Ensure a fresh cryptohome exists for user.
-
-        @param user: user who needs a shiny new cryptohome.
-        @param password: if unset, a random password will be used.
-        """
-        if not password:
-            password = ''.join(random.sample(string.ascii_lowercase, 6))
-        self.remove(user)
-        self.mount(user, password, create=True)
-
-    def lock_install_attributes(self, attrs):
-        """Set and lock install attributes for the device.
-
-        @param attrs: dict of install attributes.
-        """
-        take_tpm_ownership()
-        self.wait_for_install_attributes_ready()
-        for key, value in attrs.items():
-            if not self.__call(self.iface.InstallAttributesSet, key,
-                               dbus.ByteArray(value + '\0')):
-                return False
-        return self.__call(self.iface.InstallAttributesFinalize)
-
-    def wait_for_install_attributes_ready(self):
-        """Wait until install attributes are ready.
-        """
-        utils.poll_for_condition(
-            lambda: self.__call(self.iface.InstallAttributesIsReady),
-            timeout=300,
-            exception=error.TestError(
-                    'Timeout waiting for install attributes are ready'))
diff --git a/client/cros/dark_resume_listener.py b/client/cros/dark_resume_listener.py
index 0860a96..bb67fde 100644
--- a/client/cros/dark_resume_listener.py
+++ b/client/cros/dark_resume_listener.py
@@ -7,7 +7,11 @@
 
 import dbus
 import dbus.mainloop.glib
-import gobject
+# AU tests use ToT client code, but ToT -3 client version.
+try:
+    from gi.repository import GObject
+except ImportError:
+    import gobject as GObject
 import os
 
 from autotest_lib.client.cros.input_playback import keyboard
@@ -23,7 +27,7 @@
 
     def __init__(self):
         dbus.mainloop.glib.threads_init()
-        gobject.threads_init()
+        GObject.threads_init()
 
         dbus.mainloop.glib.DBusGMainLoop(set_as_default=True)
         self._bus = dbus.SystemBus()
@@ -39,7 +43,7 @@
             # will never finish. Instead, we process events as they come in. This
             # thread is set to daemon below, which means that the program will exit
             # when the main thread exits.
-            loop = gobject.MainLoop()
+            loop = GObject.MainLoop()
             context = loop.get_context()
             while True:
                 context.iteration(True)
diff --git a/client/cros/dark_resume_xmlrpc_server.py b/client/cros/dark_resume_xmlrpc_server.py
index 77df236..e7347a6 100755
--- a/client/cros/dark_resume_xmlrpc_server.py
+++ b/client/cros/dark_resume_xmlrpc_server.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/client/cros/dbus_util.py b/client/cros/dbus_util.py
index 9e91445..8653eba 100644
--- a/client/cros/dbus_util.py
+++ b/client/cros/dbus_util.py
@@ -42,7 +42,7 @@
     elif isinstance(value, str):
         return str(value)
     elif isinstance(value, six.text_type):
-        return str(value)
+        return str(value.encode('utf-8'))
     elif isinstance(value, list):
         return [dbus2primitive(x) for x in value]
     elif isinstance(value, tuple):
diff --git a/client/cros/dhcp_handling_rule.py b/client/cros/dhcp_handling_rule.py
index 823bcb7..1e620fb 100644
--- a/client/cros/dhcp_handling_rule.py
+++ b/client/cros/dhcp_handling_rule.py
@@ -395,7 +395,15 @@
 
         self.logger.info("Received REQUEST packet, checking fields...")
         server_ip = query_packet.get_option(dhcp_packet.OPTION_SERVER_ID)
-        requested_ip = query_packet.get_option(dhcp_packet.OPTION_REQUESTED_IP)
+        if dhcp_packet.OPTION_REQUESTED_IP in self.options:
+            requested_ip = query_packet.get_option(
+                    dhcp_packet.OPTION_REQUESTED_IP)
+        else:
+            cli_ip = query_packet.get_field(dhcp_packet.FIELD_CLIENT_IP)
+            if cli_ip != dhcp_packet.IPV4_NULL_ADDRESS:
+                requested_ip = cli_ip
+            else:
+                requested_ip = None
         server_ip_provided = server_ip is not None
         if ((server_ip_provided != self._expect_server_ip_set) or
             (requested_ip is None)):
@@ -446,7 +454,7 @@
     """
     This handler is a lot like DhcpHandlingRule_RespondToRequest except that it
     expects request packets like those sent after the T2 deadline (see RFC
-    2131).  This is the only time that you can find a request packet without the
+    2131).  This is the time that you can find a request packet without the
     SERVER_ID option.  It responds to packets in exactly the same way.
     """
     def __init__(self,
@@ -461,14 +469,15 @@
 
         |additional_options| is handled as explained by DhcpHandlingRule.
         """
-        super(DhcpHandlingRule_RespondToPostT2Request, self).__init__(
-                expected_requested_ip,
-                None,
-                additional_options,
-                custom_fields,
-                should_respond=should_respond,
-                response_server_ip=response_server_ip,
-                response_granted_ip=response_granted_ip)
+        super(DhcpHandlingRule_RespondToPostT2Request,
+              self).__init__(expected_requested_ip,
+                             None,
+                             additional_options,
+                             custom_fields,
+                             should_respond=should_respond,
+                             response_server_ip=response_server_ip,
+                             response_granted_ip=response_granted_ip,
+                             expect_server_ip_set=False)
 
     def handle_impl(self, query_packet):
         if not self.is_our_message_type(query_packet):
@@ -480,8 +489,14 @@
                              "is not expected to have, discarding.")
             return RESPONSE_NO_ACTION
 
-        requested_ip = query_packet.get_option(dhcp_packet.OPTION_REQUESTED_IP)
-        if requested_ip is None:
+        if query_packet.get_option(
+                dhcp_packet.OPTION_REQUESTED_IP) is not None:
+            self.logger.info("REQUEST packet had a REQUESTED_IP_ID option, "
+                             "which it is not expected to have, discarding.")
+            return RESPONSE_NO_ACTION
+
+        requested_ip = query_packet.get_field(dhcp_packet.FIELD_CLIENT_IP)
+        if requested_ip == dhcp_packet.IPV4_NULL_ADDRESS:
             self.logger.info("REQUEST packet did not have the expected "
                              "request ip option at all, discarding.")
             return RESPONSE_NO_ACTION
diff --git a/client/cros/dhcp_packet.py b/client/cros/dhcp_packet.py
index 1facaa8..e938680 100644
--- a/client/cros/dhcp_packet.py
+++ b/client/cros/dhcp_packet.py
@@ -35,11 +35,46 @@
 import collections
 import logging
 import random
+import six
 from six.moves import range
 import socket
 import struct
 
 
+def get_ord(value):
+    """
+    Helper method for getting the ordinal value of a character in a byte string
+    during the Python 2 to Python 3 migration.
+
+    In Python 2, the function ord() provides the ordinal value of a character.
+    In Python 3, the byte is its own ordinal value.
+    """
+    if six.PY2:
+        return ord(value)
+    return value
+
+
+def get_bytes(value):
+    """
+    Helper method for converting a string into a byte string during the Python 2
+    to Python 3 migration.
+    """
+    if six.PY2:
+        return value
+    return value.encode('ISO-8859-1')
+
+
+def get_string(value):
+    """
+    Helper method for converting a byte string into a string during the Python 2
+    to Python 3 migration.
+    """
+
+    if six.PY2:
+        return value
+    return value.decode('ISO-8859-1')
+
+
 def CreatePacketPieceClass(super_class, field_format):
     class PacketPiece(super_class):
         @staticmethod
@@ -88,7 +123,7 @@
 class IpListOption(Option):
     @staticmethod
     def pack(value):
-        return "".join([socket.inet_aton(addr) for addr in value])
+        return b"".join([socket.inet_aton(addr) for addr in value])
 
     @staticmethod
     def unpack(byte_string):
@@ -99,21 +134,25 @@
 class RawOption(Option):
     @staticmethod
     def pack(value):
-        return value
+        return get_bytes(value)
 
     @staticmethod
     def unpack(byte_string):
-        return byte_string
+        return get_string(byte_string)
 
 
 class ByteListOption(Option):
     @staticmethod
     def pack(value):
-        return "".join(chr(v) for v in value)
+        if six.PY2:
+            return "".join(chr(v) for v in value)
+        return bytes(value)
 
     @staticmethod
     def unpack(byte_string):
-        return [ord(c) for c in byte_string]
+        if six.PY2:
+            return [ord(c) for c in byte_string]
+        return byte_string
 
 
 class ClasslessStaticRoutesOption(Option):
@@ -126,9 +165,9 @@
     @staticmethod
     def pack(value):
         route_list = value
-        byte_string = ""
+        byte_string = b""
         for prefix_size, destination, router in route_list:
-            byte_string += chr(prefix_size)
+            byte_string += get_bytes(chr(prefix_size))
             # Encode only the significant octets of the destination
             # that fall within the prefix.
             destination_address_count = (prefix_size + 7) // 8
@@ -143,7 +182,7 @@
         route_list = []
         offset = 0
         while offset < len(byte_string):
-            prefix_size = ord(byte_string[offset])
+            prefix_size = get_ord(byte_string[offset])
             destination_address_count = (prefix_size + 7) // 8
             entry_end = offset + 1 + destination_address_count + 4
             if entry_end > len(byte_string):
@@ -153,7 +192,7 @@
             destination_address = byte_string[offset:destination_address_end]
             # Pad the destination address bytes with zero byte octets to
             # fill out an IPv4 address.
-            destination_address += '\x00' * (4 - destination_address_count)
+            destination_address += b'\x00' * (4 - destination_address_count)
             router_address = byte_string[destination_address_end:entry_end]
             route_list.append((prefix_size,
                                socket.inet_ntoa(destination_address),
@@ -179,16 +218,18 @@
     @staticmethod
     def pack(value):
         domain_list = value
-        byte_string = ""
+        byte_string = b""
         for domain in domain_list:
             for part in domain.split("."):
-                byte_string += chr(len(part))
-                byte_string += part
-            byte_string += "\x00"
+                byte_string += get_bytes(chr(len(part)))
+                byte_string += get_bytes(part)
+            byte_string += b"\x00"
         return byte_string
 
     @staticmethod
     def unpack(byte_string):
+        if six.PY3:
+            byte_string = byte_string.decode('ISO-8859-1')
         domain_list = []
         offset = 0
         try:
@@ -512,7 +553,7 @@
         # reason, DHCP allocated 12 bytes to this field.  Ease the burden on
         # developers and hide this detail.
         while len(hwmac_addr) < 12:
-            hwmac_addr += chr(OPTION_PAD)
+            hwmac_addr += get_bytes(chr(OPTION_PAD))
 
         packet = DhcpPacket()
         packet.set_field(FIELD_OP, FIELD_VALUE_OP_CLIENT_REQUEST)
@@ -664,13 +705,14 @@
                                                         field.offset +
                                                         field.size])
         offset = OPTIONS_START_OFFSET
-        domain_search_list_byte_string = ""
-        while offset < len(byte_str) and ord(byte_str[offset]) != OPTION_END:
-            data_type = ord(byte_str[offset])
+        domain_search_list_byte_string = b""
+        while offset < len(byte_str) and get_ord(
+                byte_str[offset]) != OPTION_END:
+            data_type = get_ord(byte_str[offset])
             offset += 1
             if data_type == OPTION_PAD:
                 continue
-            data_length = ord(byte_str[offset])
+            data_length = get_ord(byte_str[offset])
             offset += 1
             data = byte_str[offset: offset + data_length]
             offset += data_length
@@ -750,7 +792,7 @@
         if not self.is_valid:
             return None
         # A list of byte strings to be joined into a single string at the end.
-        data = []
+        data = b""
         offset = 0
         for field in DHCP_ALL_FIELDS:
             if field not in self._fields:
@@ -759,9 +801,9 @@
             while offset < field.offset:
                 # This should only happen when we're padding the fields because
                 # we're not filling in legacy BOOTP stuff.
-                data.append("\x00")
+                data += b"\x00"
                 offset += 1
-            data.append(field_data)
+            data += field_data
             offset += field.size
         # Last field processed is the magic cookie, so we're ready for options.
         # Have to process options
@@ -770,18 +812,16 @@
             if option_value is None:
                 continue
             serialized_value = option.pack(option_value)
-            data.append(struct.pack("BB",
-                                    option.number,
-                                    len(serialized_value)))
+            data += struct.pack("BB", option.number, len(serialized_value))
             offset += 2
-            data.append(serialized_value)
+            data += serialized_value
             offset += len(serialized_value)
-        data.append(chr(OPTION_END))
+        data += get_bytes(chr(OPTION_END))
         offset += 1
         while offset < DHCP_MIN_PACKET_SIZE:
-            data.append(chr(OPTION_PAD))
+            data += get_bytes(chr(OPTION_PAD))
             offset += 1
-        return "".join(data)
+        return data
 
     def __str__(self):
         options = [k.name + "=" + str(v) for k, v in self._options.items()]
diff --git a/client/cros/dhcp_test_base.py b/client/cros/dhcp_test_base.py
index 0ca81e2..27f445b 100644
--- a/client/cros/dhcp_test_base.py
+++ b/client/cros/dhcp_test_base.py
@@ -16,6 +16,7 @@
 from __future__ import print_function
 
 import logging
+import six
 from six.moves import filter
 from six.moves import range
 import socket
@@ -54,6 +55,10 @@
     """Parent class for tests that work verify DHCP behavior."""
     version = 1
 
+    def __init__(self, job, bindir, outputdir, namespace='autotest'):
+        test.test.__init__(self, job, bindir, outputdir)
+        self._namespace = namespace
+
     @staticmethod
     def rewrite_ip_suffix(subnet_mask, ip_in_subnet, ip_suffix):
         """
@@ -119,7 +124,10 @@
         if device is None:
             return []
 
-        device_properties = device.GetProperties(utf8_strings=True)
+        if six.PY2:
+            device_properties = device.GetProperties(utf8_strings=True)
+        else:
+            device_properties = device.GetProperties()
         proxy = self.shill_proxy
 
         ipconfig_object = proxy.DBUS_TYPE_IPCONFIG
@@ -142,18 +150,21 @@
         """
         dhcp_properties = None
         for ipconfig in self.get_interface_ipconfig_objects(interface_name):
-          logging.info('Looking at ipconfig %r', ipconfig)
-          ipconfig_properties = ipconfig.GetProperties(utf8_strings=True)
-          if 'Method' not in ipconfig_properties:
-              logging.info('Found ipconfig object with no method field')
-              continue
-          if ipconfig_properties['Method'] != 'dhcp':
-              logging.info('Found ipconfig object with method != dhcp')
-              continue
-          if dhcp_properties != None:
-              raise error.TestFail('Found multiple ipconfig objects '
-                                   'with method == dhcp')
-          dhcp_properties = ipconfig_properties
+            logging.info('Looking at ipconfig %r', ipconfig)
+            if six.PY2:
+                ipconfig_properties = ipconfig.GetProperties(utf8_strings=True)
+            else:
+                ipconfig_properties = ipconfig.GetProperties()
+            if 'Method' not in ipconfig_properties:
+                logging.info('Found ipconfig object with no method field')
+                continue
+            if ipconfig_properties['Method'] != 'dhcp':
+                logging.info('Found ipconfig object with method != dhcp')
+                continue
+            if dhcp_properties != None:
+                raise error.TestFail('Found multiple ipconfig objects '
+                                     'with method == dhcp')
+            dhcp_properties = ipconfig_properties
         if dhcp_properties is None:
             logging.info('Did not find IPConfig object with method == dhcp')
             return None
@@ -169,6 +180,7 @@
         self._shill_proxy = shill_proxy.ShillProxy()
         try:
             self._ethernet_pair = virtual_ethernet_pair.VirtualEthernetPair(
+                    interface_ns=self._namespace,
                     peer_interface_name='pseudoethernet0',
                     peer_interface_ip=None)
             self._ethernet_pair.setup()
@@ -176,7 +188,9 @@
                 raise error.TestFail('Could not create virtual ethernet pair.')
             self._server_ip = self._ethernet_pair.interface_ip
             self._server = dhcp_test_server.DhcpTestServer(
-                    self._ethernet_pair.interface_name)
+                    interface=self._ethernet_pair.interface_name,
+                    ingress_address='',
+                    namespace=self._namespace)
             self._server.start()
             if not self._server.is_healthy:
                 raise error.TestFail('Could not start DHCP test server.')
diff --git a/client/cros/dhcp_test_data/README b/client/cros/dhcp_test_data/README
index aa4f8a7..c774502 100644
--- a/client/cros/dhcp_test_data/README
+++ b/client/cros/dhcp_test_data/README
@@ -2,4 +2,4 @@
 conversation where the client asks for an address on 10.9.8.xxx and the server
 grants such an address.
 
-We use these logs as part of sanity checking that DhcpPacket parsing works.
+We use these logs as part of confidence checking that DhcpPacket parsing works.
diff --git a/client/cros/dhcp_test_server.py b/client/cros/dhcp_test_server.py
index 4b0ff9e..cceeedd 100644
--- a/client/cros/dhcp_test_server.py
+++ b/client/cros/dhcp_test_server.py
@@ -54,6 +54,7 @@
 from __future__ import print_function
 
 import logging
+import six
 from six.moves import range
 import socket
 import threading
@@ -66,13 +67,34 @@
 # From socket.h
 SO_BINDTODEVICE = 25
 
+# These imports are purely for handling of namespaces
+import os
+import subprocess
+from ctypes import CDLL, get_errno
+from ctypes.util import find_library
+
+
+# Let's throw an exception (with formatted error message) in case of
+# 'setns' failure instead of returning an error code
+def errcheck(ret, func, args):
+    if ret == -1:
+        e = get_errno()
+        raise OSError(e, os.strerror(e))
+
+
+libc = CDLL(find_library('c'))
+libc.setns.errcheck = errcheck
+CLONE_NEWNET = 0x40000000
+
+
 class DhcpTestServer(threading.Thread):
     def __init__(self,
                  interface=None,
                  ingress_address="<broadcast>",
                  ingress_port=67,
                  broadcast_address="255.255.255.255",
-                 broadcast_port=68):
+                 broadcast_port=68,
+                 namespace=None):
         super(DhcpTestServer, self).__init__()
         self._mutex = threading.Lock()
         self._ingress_address = ingress_address
@@ -81,6 +103,7 @@
         self._broadcast_address = broadcast_address
         self._socket = None
         self._interface = interface
+        self._namespace = namespace
         self._stopped = False
         self._test_in_progress = False
         self._last_test_passed = False
@@ -126,28 +149,53 @@
             return False
         self._logger.info("DhcpTestServer started; opening sockets.")
         try:
+            if self._namespace:
+                self._logger.info("Moving to namespace %s.", self._namespace)
+                # Figure out where the mount bind is - ChromeOS does not
+                # follow standard /var/run/netns path so lets try to be more
+                # generic and get it from runtime
+                tgtpath = subprocess.check_output('mount | grep "netns/%s"' %
+                                                  self._namespace,
+                                                  shell=True).split()[2]
+                self._tgtns = open(tgtpath)
+                self._myns = open('/proc/self/ns/net')
+                libc.setns(self._tgtns.fileno(), CLONE_NEWNET)
+            self._logger.info("Opening socket on '%s' port %d.",
+                              self._ingress_address, self._ingress_port)
             self._socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
-            self._logger.info("Opening socket on '%s' port %d." %
-                              (self._ingress_address, self._ingress_port))
             self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
             self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
             if self._interface is not None:
-                self._logger.info("Binding to %s" % self._interface)
-                self._socket.setsockopt(socket.SOL_SOCKET,
-                                        SO_BINDTODEVICE,
-                                        self._interface)
+                self._logger.info("Binding to %s", self._interface)
+                if six.PY2:
+                    self._socket.setsockopt(socket.SOL_SOCKET, SO_BINDTODEVICE,
+                                            self._interface)
+                else:
+                    self._socket.setsockopt(
+                            socket.SOL_SOCKET, SO_BINDTODEVICE,
+                            self._interface.encode('ISO-8859-1'))
             self._socket.bind((self._ingress_address, self._ingress_port))
             # Wait 100 ms for a packet, then return, thus keeping the thread
             # active but mostly idle.
             self._socket.settimeout(0.1)
         except socket.error as socket_error:
-            self._logger.error("Socket error: %s." % str(socket_error))
+            self._logger.error("Socket error: %s.", str(socket_error))
             self._logger.error(traceback.format_exc())
             if not self._socket is None:
                 self._socket.close()
             self._socket = None
             self._logger.error("Failed to open server socket.  Aborting.")
             return
+        except OSError as os_err:
+            self._logger.error("System error: %s.", str(os_err))
+            self._logger.error(traceback.format_exc())
+            self._logger.error("Failed to change namespace.  Aborting.")
+            return
+        finally:
+            if self._namespace:
+                self._tgtns.close()
+                libc.setns(self._myns.fileno(), CLONE_NEWNET)
+                self._myns.close()
         super(DhcpTestServer, self).start()
 
     def stop(self):
@@ -213,7 +261,7 @@
         if packet is None:
             self._logger.error("Handling rule failed to return a packet.")
             return False
-        self._logger.debug("Sending response: %s" % packet)
+        self._logger.debug("Sending response: %s", packet)
         binary_string = packet.to_binary_string()
         if binary_string is None or len(binary_string) < 1:
             self._logger.error("Packet failed to serialize to binary string.")
@@ -232,8 +280,8 @@
                 self._end_test_unsafe(False)
             try:
                 data, _ = self._socket.recvfrom(1024)
-                self._logger.info("Server received packet of length %d." %
-                                   len(data))
+                self._logger.info("Server received packet of length %d.",
+                                  len(data))
             except socket.timeout:
                 # No packets available, lets return and see if the server has
                 # been shut down in the meantime.
@@ -250,16 +298,16 @@
                                      "DHCP port?")
                 return
 
-            logging.debug("Server received a DHCP packet: %s." % packet)
+            logging.debug("Server received a DHCP packet: %s.", packet)
             if len(self._handling_rules) < 1:
-                self._logger.info("No handling rule for packet: %s." %
+                self._logger.info("No handling rule for packet: %s.",
                                   str(packet))
                 self._end_test_unsafe(False)
                 return
 
             handling_rule = self._handling_rules[0]
             response_code = handling_rule.handle(packet)
-            logging.info("Handler gave response: %d" % response_code)
+            logging.info("Handler gave response: %d", response_code)
             if response_code & dhcp_handling_rule.RESPONSE_POP_HANDLER:
                 self._handling_rules.pop(0)
 
@@ -274,7 +322,7 @@
                         return
 
             if response_code & dhcp_handling_rule.RESPONSE_TEST_FAILED:
-                self._logger.info("Handling rule %s rejected packet %s." %
+                self._logger.info("Handling rule %s rejected packet %s.",
                                   (handling_rule, packet))
                 self._end_test_unsafe(False)
                 return
diff --git a/client/cros/dhcp_unittest.py b/client/cros/dhcp_unittest.py
index 8d0fa02..201b1a5 100755
--- a/client/cros/dhcp_unittest.py
+++ b/client/cros/dhcp_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -23,8 +23,8 @@
 TEST_DATA_PATH_PREFIX = "client/cros/dhcp_test_data/"
 
 TEST_CLASSLESS_STATIC_ROUTE_DATA = \
-        "\x12\x0a\x09\xc0\xac\x1f\x9b\x0a" \
-        "\x00\xc0\xa8\x00\xfe"
+        b"\x12\x0a\x09\xc0\xac\x1f\x9b\x0a" \
+        b"\x00\xc0\xa8\x00\xfe"
 
 TEST_CLASSLESS_STATIC_ROUTE_LIST_PARSED = [
         (18, "10.9.192.0", "172.31.155.10"),
@@ -32,20 +32,21 @@
         ]
 
 TEST_DOMAIN_SEARCH_LIST_COMPRESSED = \
-        "\x03eng\x06google\x03com\x00\x09marketing\xC0\x04"
+        b"\x03eng\x06google\x03com\x00\x09marketing\xC0\x04"
 
 TEST_DOMAIN_SEARCH_LIST_PARSED = ("eng.google.com", "marketing.google.com")
 
 # At this time, we don't support the compression allowed in the RFC.
 # This is correct and sufficient for our purposes.
 TEST_DOMAIN_SEARCH_LIST_EXPECTED = \
-        "\x03eng\x06google\x03com\x00\x09marketing\x06google\x03com\x00"
+        b"\x03eng\x06google\x03com\x00\x09marketing\x06google\x03com\x00"
 
 TEST_DOMAIN_SEARCH_LIST1 = \
-        "w\x10\x03eng\x06google\x03com\x00"
+        b"w\x10\x03eng\x06google\x03com\x00"
 
 TEST_DOMAIN_SEARCH_LIST2 = \
-        "w\x16\x09marketing\x06google\x03com\x00"
+        b"w\x16\x09marketing\x06google\x03com\x00"
+
 
 def bin2hex(byte_str, justification=20):
     """
@@ -128,7 +129,7 @@
     return True
 
 def test_broken_domain_search_list_parsing():
-    byte_string = '\x00' * 240 + TEST_DOMAIN_SEARCH_LIST1 + TEST_DOMAIN_SEARCH_LIST2 + '\xff'
+    byte_string = b'\x00' * 240 + TEST_DOMAIN_SEARCH_LIST1 + TEST_DOMAIN_SEARCH_LIST2 + b'\xff'
     packet = dhcp_packet.DhcpPacket(byte_str=byte_string)
     if len(packet._options) != 1:
         print("Expected domain list of length 1")
@@ -167,7 +168,7 @@
     server_ip = "127.0.0.1"
     lease_time_seconds = 60
     test_timeout = 3.0
-    mac_addr = "\x01\x02\x03\x04\x05\x06"
+    mac_addr = b"\x01\x02\x03\x04\x05\x06"
     # Build up our packets and have them request some default option values,
     # like the IP we're being assigned and the address of the server assigning
     # it.
diff --git a/client/cros/dhcpv6_test_base.py b/client/cros/dhcpv6_test_base.py
index 990d255..71f685b 100644
--- a/client/cros/dhcpv6_test_base.py
+++ b/client/cros/dhcpv6_test_base.py
@@ -16,6 +16,7 @@
 from __future__ import print_function
 
 import logging
+import six
 from six.moves import filter
 from six.moves import range
 import time
@@ -83,7 +84,10 @@
         if device is None:
             return []
 
-        device_properties = device.GetProperties(utf8_strings=True)
+        if six.PY2:
+            device_properties = device.GetProperties(utf8_strings=True)
+        else:
+            device_properties = device.GetProperties()
         proxy = self.shill_proxy
 
         ipconfig_object = proxy.DBUS_TYPE_IPCONFIG
@@ -106,18 +110,21 @@
         """
         dhcp_properties = None
         for ipconfig in self.get_interface_ipconfig_objects(interface_name):
-          logging.info('Looking at ipconfig %r', ipconfig)
-          ipconfig_properties = ipconfig.GetProperties(utf8_strings=True)
-          if 'Method' not in ipconfig_properties:
-              logging.info('Found ipconfig object with no method field')
-              continue
-          if ipconfig_properties['Method'] != 'dhcp6':
-              logging.info('Found ipconfig object with method != dhcp6')
-              continue
-          if dhcp_properties != None:
-              raise error.TestFail('Found multiple ipconfig objects '
-                                   'with method == dhcp6')
-          dhcp_properties = ipconfig_properties
+            logging.info('Looking at ipconfig %r', ipconfig)
+            if six.PY2:
+                ipconfig_properties = ipconfig.GetProperties(utf8_strings=True)
+            else:
+                ipconfig_properties = ipconfig.GetProperties()
+            if 'Method' not in ipconfig_properties:
+                logging.info('Found ipconfig object with no method field')
+                continue
+            if ipconfig_properties['Method'] != 'dhcp6':
+                logging.info('Found ipconfig object with method != dhcp6')
+                continue
+            if dhcp_properties != None:
+                raise error.TestFail('Found multiple ipconfig objects '
+                                     'with method == dhcp6')
+            dhcp_properties = ipconfig_properties
         if dhcp_properties is None:
             logging.info('Did not find IPConfig object with method == dhcp6')
             return None
diff --git a/client/cros/ec.py b/client/cros/ec.py
index b350c7f..2d2e016 100644
--- a/client/cros/ec.py
+++ b/client/cros/ec.py
@@ -101,7 +101,7 @@
     HELLO_RE = "EC says hello"
     GET_FANSPEED_RE = "Current fan RPM: ([0-9]*)"
     SET_FANSPEED_RE = "Fan target RPM set."
-    TEMP_SENSOR_TEMP_RE = "Reading temperature...([0-9]*)"
+    TEMP_SENSOR_TEMP_RE = "([0-9]+) K"
     # <sensor idx>: <sensor type> <sensor name>
     TEMP_SENSOR_INFO_RE = "(\d+):\s+(\d+)\s+([a-zA-Z_0-9]+)"
     TOGGLE_AUTO_FAN_RE = "Automatic fan control is now on"
diff --git a/client/cros/enterprise/device_policy_lookup.py b/client/cros/enterprise/device_policy_lookup.py
index 5fc5cf7..e354ea1 100644
--- a/client/cros/enterprise/device_policy_lookup.py
+++ b/client/cros/enterprise/device_policy_lookup.py
@@ -6,13 +6,13 @@
 """
 # TODO b:169251326 terms below are set outside of this codebase
 # and should be updated when possible.
-# ("whitelist" -> "allowlist", "blacklist" --> "blocklist" or "denylist")
+# ("whitelist" -> "allowlist", "blacklist" --> "blocklist" or "denylist") # nocheck
 DEVICE_POLICY_DICT = {
     'DeviceGuestModeEnabled': 'guest_mode_enabled.guest_mode_enabled',
     'DeviceRebootOnShutdown': 'reboot_on_shutdown.reboot_on_shutdown',
     'DeviceShowUserNamesOnSignin': 'show_user_names.show_user_names',
     'DeviceAllowNewUsers': 'allow_new_users.allow_new_users',
-    'DeviceUserWhitelist': 'user_whitelist.user_whitelist',
+    'DeviceUserWhitelist': 'user_whitelist.user_whitelist', # nocheck
     'DeviceEphemeralUsersEnabled': 'ephemeral_users_enabled.ephemeral_users_enabled',
     'LoginAuthenticationBehavior': 'login_authentication_behavior.login_authentication_behavior',
     'DeviceAllowBluetooth': 'allow_bluetooth.allow_bluetooth',
@@ -52,8 +52,10 @@
     'DeviceLoginScreenIsolateOrigins': 'device_login_screen_isolate_origins.isolate_origins',
     'DeviceLoginScreenSitePerProcess': 'device_login_screen_site_per_process.site_per_process',
     'DeviceMachinePasswordChangeRate': 'device_machine_password_change_rate.rate_days',
-    'DeviceNativePrintersBlacklist': 'native_device_printers_blacklist.blacklist',
-    'DeviceNativePrintersWhitelist': 'native_device_printers_whitelist.whitelist',
+    'DeviceNativePrintersBlacklist': 'native_device_printers_blacklist.blacklist', # nocheck
+    'DeviceNativePrintersWhitelist': 'native_device_printers_whitelist.whitelist', # nocheck
+    'DevicePrintersBlocklist': 'device_printers_blocklist.blocklist',
+    'DevicePrintersAllowlist': 'device_printers_allowlist.allowlist',
     'HeartbeatEnabled': 'device_heartbeat_settings.heartbeat_enabled',
     'HeartbeatFrequency': 'device_heartbeat_settings.heartbeat_frequency',
     'ChromeOsReleaseChannel': 'release_channel.release_channel',
diff --git a/client/cros/enterprise/enterprise_fake_dmserver.py b/client/cros/enterprise/enterprise_fake_dmserver.py
index 51de394..79eba2d 100755
--- a/client/cros/enterprise/enterprise_fake_dmserver.py
+++ b/client/cros/enterprise/enterprise_fake_dmserver.py
@@ -4,9 +4,10 @@
 
 import os
 import sys
-import urllib2
 from multiprocessing import Process
 
+from six.moves import urllib
+
 from autotest_lib.client.bin import utils
 
 policy_testserver = None
@@ -17,19 +18,14 @@
 
     def __init__(self):
         """
-        Import the DM testserver from chrome source.
+        Import the DM testserver from chromeos-base/policy-testserver.
 
         """
         self.server_url = None
-        telemetry_src = '/usr/local/telemetry/src'
-        # TODO(976424): Remove 'chrome/browser/policy/test' when CL:1660660 is
-        # available in chrome in Chromium OS.
-        for path in ['chrome/browser/policy/test',
-                     'components/policy/test_support',
-                     'net/tools/testserver',
-                     'third_party/protobuf/python/google',
-                     'third_party/tlslite']:
-            sys.path.append(os.path.join(telemetry_src, path))
+        sys.path.append('/usr/local/share/policy_testserver')
+        sys.path.append('/usr/local/share/policy_testserver/proto_bindings')
+        sys.path.append('/usr/local/share/policy_testserver/testserver')
+        sys.path.append('/usr/local/share/policy_testserver/tlslite')
         global policy_testserver
         import policy_testserver
 
@@ -61,8 +57,8 @@
 
     def stop(self):
         """Terminate the fake DM server instance."""
-        if urllib2.urlopen('%stest/ping' % self.server_url).getcode() == 200:
-            urllib2.urlopen('%sconfiguration/test/exit' % self.server_url)
+        if urllib.request.urlopen('%stest/ping' % self.server_url).getcode() == 200:
+            urllib.request.urlopen('%sconfiguration/test/exit' % self.server_url)
         if self.process.is_alive():
             self.process.join()
 
diff --git a/client/cros/enterprise/enterprise_policy_base.py b/client/cros/enterprise/enterprise_policy_base.py
index eb03bbf..b8e4030 100755
--- a/client/cros/enterprise/enterprise_policy_base.py
+++ b/client/cros/enterprise/enterprise_policy_base.py
@@ -223,7 +223,7 @@
         If the AutoTest fake DM Server is used, make a JSON policy blob
         and upload it to the fake DM server.
 
-        Launch Chrome and sign in to Chrome OS. Examine the user's
+        Launch Chrome and sign in to ChromeOS. Examine the user's
         cryptohome vault, to confirm user is signed in successfully.
 
         @param user_policies: dict of mandatory user policies in
@@ -375,7 +375,7 @@
         Assumes start_arc() was run with use_clouddpc_test.
 
         Determines the policy values to pass to the test from those set in
-        Chrome OS.
+        ChromeOS.
 
         @raises error.TestFail if the test does not pass.
 
diff --git a/client/cros/enterprise/enterprise_policy_utils.py b/client/cros/enterprise/enterprise_policy_utils.py
index a55ec2e..eae8bf9 100644
--- a/client/cros/enterprise/enterprise_policy_utils.py
+++ b/client/cros/enterprise/enterprise_policy_utils.py
@@ -14,7 +14,10 @@
 import json
 import time
 
+import six
+
 from autotest_lib.client.common_lib import error
+
 # Default settings for managed user policies
 
 
@@ -101,7 +104,7 @@
             # No data
             continue
         if 'value' in v:
-            if type(v['value']) == unicode:
+            if type(v['value']) == six.text_type:
                 _remove_visual_formatting(v)
         elif isinstance(v, dict):
             _reformat_policies(v)
diff --git a/client/cros/enterprise/enterprise_policy_utils_unittest.py b/client/cros/enterprise/enterprise_policy_utils_unittest.py
index 60b33db..4953eb9 100644
--- a/client/cros/enterprise/enterprise_policy_utils_unittest.py
+++ b/client/cros/enterprise/enterprise_policy_utils_unittest.py
@@ -3,9 +3,9 @@
 # found in the LICENSE file.
 import ast
 import copy
-from mock import patch
 import os
 import unittest
+from unittest.mock import patch
 
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.cros.enterprise import enterprise_policy_utils as epu
diff --git a/client/cros/enterprise/network_config.py b/client/cros/enterprise/network_config.py
index efbaa41..45fa5ee 100644
--- a/client/cros/enterprise/network_config.py
+++ b/client/cros/enterprise/network_config.py
@@ -5,10 +5,12 @@
 import random
 import string
 
+from autotest_lib.client.common_lib import error
+
 
 def generate_random_guid():
     """Create a random 16 character GUID."""
-    return ''.join(random.choice(string.hexdigits) for _ in xrange(16))
+    return ''.join(random.choice(string.hexdigits) for _ in range(16))
 
 
 class NetworkConfig(object):
diff --git a/client/cros/enterprise/policy_manager_unittest.py b/client/cros/enterprise/policy_manager_unittest.py
index 96634da..3b3992f 100644
--- a/client/cros/enterprise/policy_manager_unittest.py
+++ b/client/cros/enterprise/policy_manager_unittest.py
@@ -2,9 +2,8 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 import json
-from mock import patch
 import unittest
-
+from unittest.mock import patch
 
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.cros.enterprise import policy_group
diff --git a/client/cros/ethernet/network_EthCaps/_control b/client/cros/ethernet/network_EthCaps/_control
new file mode 100644
index 0000000..67efcef
--- /dev/null
+++ b/client/cros/ethernet/network_EthCaps/_control
@@ -0,0 +1,25 @@
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TEST IS DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
+AUTHOR = "ChromeOS Team"
+NAME = "network_EthCaps"
+PURPOSE = 'Verify that LAN devices have the required capabilities.'
+CRITERIA = """
+See server/site_tests/network_EthCapsServer/control for details
+"""
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+
+DOC = """
+See server/site_tests/network_EthCapsServer/control for details
+"""
+
+job.run_test('network_EthCaps', ethname="eth0")
diff --git a/client/cros/ethernet/network_EthCaps/network_EthCaps.py b/client/cros/ethernet/network_EthCaps/network_EthCaps.py
new file mode 100644
index 0000000..7e9e3b4
--- /dev/null
+++ b/client/cros/ethernet/network_EthCaps/network_EthCaps.py
@@ -0,0 +1,241 @@
+# Copyright (c) 2011-2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections, logging, os
+
+from autotest_lib.client.bin import test, utils
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.cros import rtc
+from autotest_lib.client.cros.power import sys_power
+
+# TODO(tbroch) WOL:
+# - Should we test any of the other modes?  I chose magic as it meant that only
+#   the target device should be awaken.
+
+
+class network_EthCaps(test.test):
+    """Base class of EthCaps test.
+
+    Verify Capabilities advertised by an ethernet device work.
+    We can't verify much in reality though. But we can verify
+    WOL for built-in devices which is expected to work.
+
+    @param test.test: test instance
+    """
+    version = 1
+
+    # If WOL setting changed during test then restore to original during cleanup
+    _restore_wol = False
+
+    def _is_usb(self):
+        """Determine if device is USB (or not)
+
+        Add-on USB devices won't report the same 'Supports Wake-on' value
+        as built-in (ie PCI) ethernet devices.
+        """
+        if not self._bus_info:
+            cmd = "ethtool -i %s | awk '/bus-info/ {print $2}'" % self._ethname
+            self._bus_info = utils.system_output(cmd)
+            logging.debug("bus_info is %s", self._bus_info)
+            if not self._bus_info:
+                logging.error("ethtool -i %s has no bus-info", self._ethname)
+
+        # Two bus_info formats are reported by different device drivers:
+        # 1) "usb-0000:00:1d.0-1.2"
+        #    "0000:00:1d.0" is the "platform" info of the USB host controller
+        #    But it's obvious it's USB since that's the prefix. :)
+        if self._bus_info.startswith('usb-'):
+            return True
+
+        # 2) "2-1.2" where "2-" is USB host controller instance
+        return os.path.exists("/sys/bus/usb/devices/%s" % self._bus_info)
+
+    def _parse_ethtool_caps(self):
+        """Retrieve ethernet capabilities.
+
+        Executes ethtool command and parses various capabilities into a
+        dictionary.
+        """
+        caps = collections.defaultdict(list)
+
+        cmd = "ethtool %s" % self._ethname
+        prev_keyname = None
+        for ln in utils.system_output(cmd).splitlines():
+            cap_str = ln.strip()
+            try:
+                (keyname, value) = cap_str.split(': ')
+                caps[keyname].extend(value.split())
+                prev_keyname = keyname
+            except ValueError:
+                # keyname from previous line, add there
+                if prev_keyname:
+                    caps[prev_keyname].extend(cap_str.split())
+
+        for keyname in caps:
+            logging.debug("cap['%s'] = %s", keyname, caps[keyname])
+
+        self._caps = caps
+
+    def _check_eth_caps(self):
+        """Check necessary LAN capabilities are present.
+
+        Hardware and driver should support the following functionality:
+          1000baseT, 100baseT, 10baseT, half-duplex, full-duplex, auto-neg, WOL
+
+        Raises:
+          error.TestError if above LAN capabilities are NOT supported.
+        """
+        default_eth_caps = {
+                'Supported link modes': [
+                        '10baseT/Half', '100baseT/Half', '1000baseT/Half',
+                        '10baseT/Full', '100baseT/Full', '1000baseT/Full'
+                ],
+                'Supports auto-negotiation': ['Yes'],
+                # TODO(tbroch): Other WOL caps: 'a': arp and 's': magicsecure are
+                # they important?  Are any of these undesirable/security holes?
+                'Supports Wake-on': ['pumbg']
+        }
+        errors = 0
+
+        for keyname in default_eth_caps:
+            if keyname not in self._caps:
+                logging.error("\'%s\' not a capability of %s", keyname,
+                              self._ethname)
+                errors += 1
+                continue
+
+            for value in default_eth_caps[keyname]:
+                if value not in self._caps[keyname]:
+                    # WOL not required for USB Ethernet plug-in devices
+                    # But all USB Ethernet devices to date report "pg".
+                    # Enforce that.
+                    # RTL8153 can report 'pumbag'.
+                    # AX88178 can report 'pumbg'.
+                    if self._is_usb() and keyname == 'Supports Wake-on':
+                        if (self._caps[keyname][0].find('p') >= 0) and \
+                            (self._caps[keyname][0].find('g') >= 0):
+                            continue
+
+                    logging.error(
+                            "\'%s\' not a supported mode in \'%s\' of %s",
+                            value, keyname, self._ethname)
+                    errors += 1
+
+        if errors:
+            raise error.TestError("Eth capability checks.  See errors")
+
+    def _test_wol_magic_packet(self):
+        """Check the Wake-on-LAN (WOL) magic packet capabilities of a device.
+
+        Raises:
+          error.TestError if WOL functionality fails
+        """
+        # Magic number WOL supported
+        capname = 'Supports Wake-on'
+        if self._caps[capname][0].find('g') != -1:
+            logging.info("%s support magic number WOL", self._ethname)
+        else:
+            raise error.TestError('%s should support magic number WOL' %
+                                  self._ethname)
+
+        # Check that WOL works
+        if self._caps['Wake-on'][0] != 'g':
+            utils.system_output("ethtool -s %s wol g" % self._ethname)
+            self._restore_wol = True
+
+        # Set RTC as backup to WOL
+        before_secs = rtc.get_seconds()
+        alarm_secs = before_secs + self._suspend_secs + self._threshold_secs
+        rtc.set_wake_alarm(alarm_secs)
+
+        sys_power.do_suspend(self._suspend_secs)
+
+        after_secs = rtc.get_seconds()
+        # flush RTC as it may not work subsequently if wake was not RTC
+        rtc.set_wake_alarm(0)
+
+        suspended_secs = after_secs - before_secs
+        if suspended_secs >= (self._suspend_secs + self._threshold_secs):
+            raise error.TestError("Device woke due to RTC not WOL")
+
+    def _verify_wol_magic(self):
+        """If possible identify wake source was caused by WOL.
+
+        The bits identifying the wake source may be cleared by the time
+        userspace gets a chance to query the kernel.  However, firmware
+        might have a log and expose the wake source.  Attempt to interrogate
+        the wake source details if they are present on the system.
+
+        Returns:
+          True if verified or unable to verify due to system limitations
+          False otherwise
+        """
+        fw_log = "/sys/firmware/log"
+        if not os.path.isfile(fw_log):
+            logging.warning(
+                    "Unable to verify wake in s/w due to missing log %s",
+                    fw_log)
+            return True
+
+        log_info_str = utils.system_output("egrep '(SMI|PM1|GPE0)_STS:' %s" %
+                                           fw_log)
+        status_dict = {}
+        for ln in log_info_str.splitlines():
+            logging.debug("f/w line = %s", ln)
+            try:
+                (status_reg, status_values) = ln.strip().split(":")
+                status_dict[status_reg] = status_values.split()
+            except ValueError:
+                # no bits asserted ... empty list
+                status_dict[status_reg] = list()
+
+        for status_reg in status_dict:
+            logging.debug("status_dict[%s] = %s", status_reg,
+                          status_dict[status_reg])
+
+        return ('PM1' in status_dict['SMI_STS']) and \
+            ('WAK' in status_dict['PM1_STS']) and \
+            ('PCIEXPWAK' in status_dict['PM1_STS']) and \
+            len(status_dict['GPE0_STS']) == 0
+
+    def cleanup(self):
+        if self._restore_wol:
+            utils.system_output("ethtool -s %s wol %s" %
+                                (self._ethname, self._caps['Wake-on'][0]))
+
+    def run_once(self, ethname=None, suspend_secs=5, threshold_secs=10):
+        """Run the test.
+
+        Args:
+          ethname: string of ethernet device under test
+          threshold_secs: integer of seconds to determine whether wake occurred
+            due to WOL versus RTC
+        """
+        if not ethname:
+            raise error.TestError("Name of ethernet device must be declared")
+
+        self._ethname = ethname
+        self._threshold_secs = threshold_secs
+        self._suspend_secs = suspend_secs
+        self._bus_info = None
+
+        self._parse_ethtool_caps()
+        self._check_eth_caps()
+
+        # ChromeOS does not require WOL support for any USB Ethernet Adapters.
+        # In fact, WoL only known to work for PCIe Ethernet devices.
+        # We know _some_ platforms power off all USB ports when suspended.
+        # USB adapters with "pg" capabilities _might_ WoL on _some_ platforms.
+        # allow list/deny listing of platforms will be required to test
+        # WoL against USB dongles in the future.
+        if self._is_usb():
+            logging.debug("Skipping WOL test on USB Ethernet device.")
+            return
+
+        self._test_wol_magic_packet()
+        # TODO(tbroch) There is evidence in the filesystem of the wake source
+        # for coreboot but its still being flushed out.  For now only produce a
+        # warning for this check.
+        if not self._verify_wol_magic():
+            logging.warning("Unable to see evidence of WOL wake in filesystem")
diff --git a/client/cros/ethernet/network_EthernetStressPlug/control b/client/cros/ethernet/network_EthernetStressPlug/control
new file mode 100644
index 0000000..73b21f4
--- /dev/null
+++ b/client/cros/ethernet/network_EthernetStressPlug/control
@@ -0,0 +1,41 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "network_EthernetStressPlug"
+PURPOSE = "Stress-test Ethernet plug/unplug"
+CRITERIA = """
+This test fails if device fails to obtain dhcp through ethernet.
+"""
+PY_VERSION = 3
+
+# Note: This test is currently only intended for manual runs in
+# a "well controlled" enviroment. In other words, document the
+# configuration (DUT mfg/model, Ethernet mfg/model, OS version, etc.)
+# and test results (histogram).
+TIME = "SHORT"
+TEST_CATEGORY = "Stress"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+
+DOC = """
+  Stress-tests simulating plugging/unplugging the ethernet dongle.
+"""
+
+# We want the default number of loops per test run
+# to be 100.
+num_iterations = 100
+interface=None # autodetect interface
+
+# Parse comma-separated args.
+for arg in args:
+    for item in arg.split(','):
+        key, val = item.split('=')
+        if key == 'num_iterations':
+            num_iterations = int(val)
+        if key == 'interface':
+            interface = val
+
+job.run_test('network_EthernetStressPlug', num_iterations=num_iterations,
+    interface=interface)
diff --git a/client/cros/ethernet/network_EthernetStressPlug/network_EthernetStressPlug.py b/client/cros/ethernet/network_EthernetStressPlug/network_EthernetStressPlug.py
new file mode 100644
index 0000000..77cbb8f
--- /dev/null
+++ b/client/cros/ethernet/network_EthernetStressPlug/network_EthernetStressPlug.py
@@ -0,0 +1,539 @@
+# Lint as python2, python3
+# Copyright (c) 2016 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import fcntl
+import logging
+import os
+import pyudev
+import random
+import re
+import socket
+import struct
+import subprocess
+import sys
+import time
+
+from autotest_lib.client.bin import test, utils
+from autotest_lib.client.common_lib import error
+
+
+class EthernetDongle(object):
+    """ Used for definining the desired module expect states. """
+
+    def __init__(self, expect_speed='100', expect_duplex='full'):
+        # Expected values for parameters.
+        self.expected_parameters = {
+                'ifconfig_status': 0,
+                'duplex': expect_duplex,
+                'speed': expect_speed,
+                'mac_address': None,
+                'ipaddress': None,
+        }
+
+    def GetParam(self, parameter):
+        """ pylint wants a docstring. """
+        return self.expected_parameters[parameter]
+
+
+class network_EthernetStressPlug(test.test):
+    """ base class for test """
+    version = 1
+
+    def initialize(self, interface=None):
+        """ Determines and defines the bus information and interface info. """
+
+        self.link_speed_failures = 0
+        sysnet = os.path.join('/', 'sys', 'class', 'net')
+
+        def get_ethernet_interface(interface):
+            """ Valid interface requires link and duplex status."""
+            avail_eth_interfaces = []
+            if interface is None:
+                # This is not the (bridged) eth dev we are looking for.
+                for x in os.listdir(sysnet):
+                    sysdev = os.path.join(sysnet, x, 'device')
+                    syswireless = os.path.join(sysnet, x, 'wireless')
+                    if os.path.exists(
+                            sysdev) and not os.path.exists(syswireless):
+                        avail_eth_interfaces.append(x)
+            else:
+                sysdev = os.path.join(sysnet, interface, 'device')
+                if os.path.exists(sysdev):
+                    avail_eth_interfaces.append(interface)
+                else:
+                    raise error.TestError(
+                            'Network Interface %s is not a device ' % iface)
+
+            link_status = 'unknown'
+            duplex_status = 'unknown'
+            iface = 'unknown'
+
+            for iface in avail_eth_interfaces:
+                syslink = os.path.join(sysnet, iface, 'operstate')
+                try:
+                    link_file = open(syslink)
+                    link_status = link_file.readline().strip()
+                    link_file.close()
+                except:
+                    pass
+
+                sysduplex = os.path.join(sysnet, iface, 'duplex')
+                try:
+                    duplex_file = open(sysduplex)
+                    duplex_status = duplex_file.readline().strip()
+                    duplex_file.close()
+                except:
+                    pass
+
+                if link_status == 'up':
+                    return iface
+
+            raise error.TestError('Network Interface %s not usable (%s, %s)' %
+                                  (iface, link_status, duplex_status))
+
+        def get_net_device_path(device=''):
+            """ Uses udev to get the path of the desired internet device.
+            Args:
+                device: look for the /sys entry for this ethX device
+            Returns:
+                /sys pathname for the found ethX device or raises an error.
+            """
+            net_list = pyudev.Context().list_devices(subsystem='net')
+            for dev in net_list:
+                if dev.sys_path.endswith('net/%s' % device):
+                    return dev.sys_path
+
+            raise error.TestError('Could not find /sys device path for %s' %
+                                  device)
+
+        self.interface = get_ethernet_interface(interface)
+        self.eth_syspath = get_net_device_path(self.interface)
+        self.eth_flagspath = os.path.join(self.eth_syspath, 'flags')
+
+        # USB Dongles: "authorized" file will disable the USB port and
+        # in some cases powers off the port. In either case, net/eth* goes
+        # away. And thus "../../.." won't be valid to access "authorized".
+        # Build the pathname that goes directly to authpath.
+        auth_path = os.path.join(self.eth_syspath, '../../../authorized')
+        if os.path.exists(auth_path):
+            # now rebuild the path w/o use of '..'
+            auth_path = os.path.split(self.eth_syspath)[0]
+            auth_path = os.path.split(auth_path)[0]
+            auth_path = os.path.split(auth_path)[0]
+
+            self.eth_authpath = os.path.join(auth_path, 'authorized')
+        else:
+            self.eth_authpath = None
+
+        # Stores the status of the most recently run iteration.
+        self.test_status = {
+                'ipaddress': None,
+                'eth_state': None,
+                'reason': None,
+                'last_wait': 0
+        }
+
+        self.secs_before_warning = 10
+
+        # Represents the current number of instances in which ethernet
+        # took longer than dhcp_warning_level to come up.
+        self.warning_count = 0
+
+        # The percentage of test warnings before we fail the test.
+        self.warning_threshold = .25
+
+    def GetIPAddress(self):
+        """ Obtains the ipaddress of the interface. """
+        try:
+            s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+            return socket.inet_ntoa(
+                    fcntl.ioctl(
+                            s.fileno(),
+                            0x8915,  # SIOCGIFADDR
+                            struct.pack('256s', self.interface[:15]))[20:24])
+        except:
+            return None
+
+    def GetEthernetStatus(self):
+        """
+        Updates self.test_status with the status of the ethernet interface.
+
+        Returns:
+            True if the ethernet device is up.  False otherwise.
+        """
+
+        def ReadEthVal(param):
+            """ Reads the network parameters of the interface. """
+            eth_path = os.path.join('/', 'sys', 'class', 'net', self.interface,
+                                    param)
+            val = None
+            try:
+                fp = open(eth_path)
+                val = fp.readline().strip()
+                fp.close()
+            except:
+                pass
+            return val
+
+        eth_out = self.ParseEthTool()
+        ethernet_status = {
+                'ifconfig_status':
+                utils.system('ifconfig %s' % self.interface,
+                             ignore_status=True),
+                'duplex':
+                eth_out.get('Duplex'),
+                'speed':
+                eth_out.get('Speed'),
+                'mac_address':
+                ReadEthVal('address'),
+                'ipaddress':
+                self.GetIPAddress()
+        }
+
+        self.test_status['ipaddress'] = ethernet_status['ipaddress']
+
+        for param, val in list(ethernet_status.items()):
+            if self.dongle.GetParam(param) is None:
+                # For parameters with expected values none, we check the
+                # existence of a value.
+                if not bool(val):
+                    self.test_status['eth_state'] = False
+                    self.test_status['reason'] = '%s is not ready: %s == %s' \
+                                                 % (self.interface, param, val)
+                    return False
+            else:
+                if val != self.dongle.GetParam(param):
+                    self.test_status['eth_state'] = False
+                    self.test_status['reason'] = '%s is not ready. (%s)\n' \
+                                                 "  Expected: '%s'\n" \
+                                                 "  Received: '%s'" \
+                                                 % (self.interface, param,
+                                                 self.dongle.GetParam(param),
+                                                 val)
+                    return False
+
+        self.test_status['eth_state'] = True
+        self.test_status['reason'] = None
+        return True
+
+    def _PowerEthernet(self, power=1):
+        """ Sends command to change the power state of ethernet.
+        Args:
+          power: 0 to unplug, 1 to plug.
+        """
+
+        if self.eth_authpath:
+            try:
+                fp = open(self.eth_authpath, 'w')
+                fp.write('%d' % power)
+                fp.close()
+            except:
+                raise error.TestError('Could not write %d to %s' %
+                                      (power, self.eth_authpath))
+
+        # Linux can set network link state by frobbing "flags" bitfields.
+        # Bit fields are documented in include/uapi/linux/if.h.
+        # Bit 0 is IFF_UP (link up=1 or down=0).
+        elif os.path.exists(self.eth_flagspath):
+            try:
+                fp = open(self.eth_flagspath, mode='r')
+                val = int(fp.readline().strip(), 16)
+                fp.close()
+            except:
+                raise error.TestError('Could not read %s' % self.eth_flagspath)
+
+            if power:
+                newval = val | 1
+            else:
+                newval = val & ~1
+
+            if val != newval:
+                try:
+                    fp = open(self.eth_flagspath, mode='w')
+                    fp.write('0x%x' % newval)
+                    fp.close()
+                except:
+                    raise error.TestError('Could not write 0x%x to %s' %
+                                          (newval, self.eth_flagspath))
+                logging.debug("eth flags: 0x%x to 0x%x", val, newval)
+
+        # else use ifconfig eth0 up/down to switch
+        else:
+            logging.warning(
+                    'plug/unplug event control not found. '
+                    'Use ifconfig %s %s instead', self.interface,
+                    'up' if power else 'down')
+            result = subprocess.check_call(
+                    ['ifconfig', self.interface, 'up' if power else 'down'])
+            if result:
+                raise error.TestError('Fail to change the power state of %s' %
+                                      self.interface)
+
+    def TestPowerEthernet(self, power=1, timeout=45):
+        """ Tests enabling or disabling the ethernet.
+        Args:
+            power: 0 to unplug, 1 to plug.
+            timeout: Indicates approximately the number of seconds to timeout
+                     how long we should check for the success of the ethernet
+                     state change.
+
+        Returns:
+            The time in seconds required for device to transfer to the desired
+            state.
+
+        Raises:
+            error.TestFail if the ethernet status is not in the desired state.
+        """
+
+        start_time = time.time()
+        end_time = start_time + timeout
+
+        power_str = ['off', 'on']
+        self._PowerEthernet(power)
+
+        while time.time() < end_time:
+            status = self.GetEthernetStatus()
+
+            # If GetEthernetStatus() detects the wrong link rate, "bouncing"
+            # the link _should_ recover. Keep count of how many times this
+            # happens. Test should fail if happens "frequently".
+            if power and not status and 'speed' in self.test_status['reason']:
+                self._PowerEthernet(0)
+                time.sleep(1)
+                self._PowerEthernet(power)
+                self.link_speed_failures += 1
+                logging.warning('Link Renegotiated %s',
+                                self.test_status['reason'])
+
+            # If ethernet is enabled  and has an IP, OR
+            # if ethernet is disabled and does not have an IP,
+            # then we are in the desired state.
+            # Return the number of "seconds" for this to happen.
+            # (translated to an approximation of the number of seconds)
+            if (power and status and \
+                self.test_status['ipaddress'] is not None) \
+                or \
+                (not power and not status and \
+                self.test_status['ipaddress'] is None):
+                return time.time() - start_time
+
+            time.sleep(1)
+
+        logging.debug(self.test_status['reason'])
+        raise error.TestFail(
+                'ERROR: TIMEOUT : %s IP is %s after setting '
+                'power %s (last_wait = %.2f seconds)', self.interface,
+                self.test_status['ipaddress'], power_str[power],
+                self.test_status['last_wait'])
+
+    def RandSleep(self, min_sleep, max_sleep):
+        """ Sleeps for a random duration.
+
+        Args:
+            min_sleep: Minimum sleep parameter in miliseconds.
+            max_sleep: Maximum sleep parameter in miliseconds.
+        """
+        duration = random.randint(min_sleep, max_sleep) / 1000.0
+        self.test_status['last_wait'] = duration
+        time.sleep(duration)
+
+    def _ParseEthTool_LinkModes(self, line):
+        """ Parses Ethtool Link Mode Entries.
+        Inputs:
+            line: Space separated string of link modes that have the format
+                  (\d+)baseT/(Half|Full) (eg. 100baseT/Full).
+
+        Outputs:
+            List of dictionaries where each dictionary has the format
+            { 'Speed': '<speed>', 'Duplex': '<duplex>' }
+        """
+        parameters = []
+
+        # QCA ESS EDMA driver doesn't report "Supported link modes:"
+        if 'Not reported' in line:
+            return parameters
+
+        for speed_to_parse in line.split():
+            speed_duplex = speed_to_parse.split('/')
+            parameters.append({
+                    'Speed':
+                    re.search('(\d*)', speed_duplex[0]).groups()[0],
+                    'Duplex':
+                    speed_duplex[1],
+            })
+        return parameters
+
+    def ParseEthTool(self):
+        """
+        Parses the output of Ethtools into a dictionary and returns
+        the dictionary with some cleanup in the below areas:
+            Speed: Remove the unit of speed.
+            Supported link modes: Construct a list of dictionaries.
+                                  The list is ordered (relying on ethtool)
+                                  and each of the dictionaries contains a Speed
+                                  kvp and a Duplex kvp.
+            Advertised link modes: Same as 'Supported link modes'.
+
+        Sample Ethtool Output:
+            Supported ports: [ TP MII ]
+            Supported link modes:   10baseT/Half 10baseT/Full
+                                    100baseT/Half 100baseT/Full
+                                    1000baseT/Half 1000baseT/Full
+            Supports auto-negotiation: Yes
+            Advertised link modes:  10baseT/Half 10baseT/Full
+                                    100baseT/Half 100baseT/Full
+                                    1000baseT/Full
+            Advertised auto-negotiation: Yes
+            Speed: 1000Mb/s
+            Duplex: Full
+            Port: MII
+            PHYAD: 2
+            Transceiver: internal
+            Auto-negotiation: on
+            Supports Wake-on: pg
+            Wake-on: d
+            Current message level: 0x00000007 (7)
+            Link detected: yes
+
+        Returns:
+          A dictionary representation of the above ethtool output, or an empty
+          dictionary if no ethernet dongle is present.
+          Eg.
+            {
+              'Supported ports': '[ TP MII ]',
+              'Supported link modes': [{'Speed': '10', 'Duplex': 'Half'},
+                                       {...},
+                                       {'Speed': '1000', 'Duplex': 'Full'}],
+              'Supports auto-negotiation: 'Yes',
+              'Advertised link modes': [{'Speed': '10', 'Duplex': 'Half'},
+                                        {...},
+                                        {'Speed': '1000', 'Duplex': 'Full'}],
+              'Advertised auto-negotiation': 'Yes'
+              'Speed': '1000',
+              'Duplex': 'Full',
+              'Port': 'MII',
+              'PHYAD': '2',
+              'Transceiver': 'internal',
+              'Auto-negotiation': 'on',
+              'Supports Wake-on': 'pg',
+              'Wake-on': 'd',
+              'Current message level': '0x00000007 (7)',
+              'Link detected': 'yes',
+            }
+        """
+        parameters = {}
+        ethtool_out = os.popen('ethtool %s' %
+                               self.interface).read().split('\n')
+        if 'No data available' in ethtool_out:
+            return parameters
+
+        # bridged interfaces only have two lines of ethtool output.
+        if len(ethtool_out) < 3:
+            return parameters
+
+        # For multiline entries, keep track of the key they belong to.
+        current_key = ''
+        for line in ethtool_out:
+            current_line = line.strip().partition(':')
+            if current_line[1] == ':':
+                current_key = current_line[0]
+
+                # Assumes speed does not span more than one line.
+                # Also assigns empty string if speed field
+                # is not available.
+                if current_key == 'Speed':
+                    speed = re.search('^\s*(\d*)', current_line[2])
+                    parameters[current_key] = ''
+                    if speed:
+                        parameters[current_key] = speed.groups()[0]
+                elif (current_key == 'Supported link modes'
+                      or current_key == 'Advertised link modes'):
+                    parameters[current_key] = []
+                    parameters[current_key] += \
+                        self._ParseEthTool_LinkModes(current_line[2])
+                else:
+                    parameters[current_key] = current_line[2].strip()
+            else:
+                if (current_key == 'Supported link modes'
+                            or current_key == 'Advertised link modes'):
+                    parameters[current_key] += \
+                        self._ParseEthTool_LinkModes(current_line[0])
+                else:
+                    parameters[current_key] += current_line[0].strip()
+
+        return parameters
+
+    def GetDongle(self):
+        """ Returns the ethernet dongle object associated with what's connected.
+
+        Dongle uniqueness is retrieved from the 'product' file that is
+        associated with each usb dongle in
+        /sys/devices/pci.*/0000.*/usb.*/.*-.*/product.  The correct
+        dongle object is determined and returned.
+
+        Returns:
+          Object of type EthernetDongle.
+
+        Raises:
+          error.TestFail if ethernet dongle is not found.
+        """
+        ethtool_dict = self.ParseEthTool()
+
+        if not ethtool_dict:
+            raise error.TestFail('Unable to parse ethtool output for %s.',
+                                 self.interface)
+
+        # Ethtool output is ordered in terms of speed so this obtains the
+        # fastest speed supported by dongle.
+        # QCA ESS EDMA driver doesn't report "Supported link modes".
+        max_link = ethtool_dict['Advertised link modes'][-1]
+
+        return EthernetDongle(expect_speed=max_link['Speed'],
+                              expect_duplex=max_link['Duplex'])
+
+    def run_once(self, num_iterations=1):
+        try:
+            self.dongle = self.GetDongle()
+
+            #Sleep for a random duration between .5 and 2 seconds
+            #for unplug and plug scenarios.
+            for i in range(num_iterations):
+                logging.debug('Iteration: %d start', i)
+                linkdown_time = self.TestPowerEthernet(power=0)
+                linkdown_wait = self.test_status['last_wait']
+                if linkdown_time > self.secs_before_warning:
+                    self.warning_count += 1
+
+                self.RandSleep(500, 2000)
+
+                linkup_time = self.TestPowerEthernet(power=1)
+                linkup_wait = self.test_status['last_wait']
+
+                if linkup_time > self.secs_before_warning:
+                    self.warning_count += 1
+
+                self.RandSleep(500, 2000)
+                logging.debug('Iteration: %d end (down:%f/%d up:%f/%d)', i,
+                              linkdown_wait, linkdown_time, linkup_wait,
+                              linkup_time)
+
+                if self.warning_count > num_iterations * self.warning_threshold:
+                    raise error.TestFail(
+                            'ERROR: %.2f%% of total runs (%d) '
+                            'took longer than %d seconds for '
+                            'ethernet to come up.',
+                            self.warning_threshold * 100, num_iterations,
+                            self.secs_before_warning)
+
+            # Link speed failures are secondary.
+            # Report after all iterations complete.
+            if self.link_speed_failures > 1:
+                raise error.TestFail('ERROR: %s : Link Renegotiated %d times',
+                                     self.interface, self.link_speed_failures)
+
+        except Exception as e:
+            exc_info = sys.exc_info()
+            self._PowerEthernet(1)
+            raise exc_info[0](exc_info[1]).with_traceback(exc_info[2])
diff --git a/client/cros/factory_setup_modules.py b/client/cros/factory_setup_modules.py
index 4f8c79f..dac0d34 100644
--- a/client/cros/factory_setup_modules.py
+++ b/client/cros/factory_setup_modules.py
@@ -14,7 +14,7 @@
 extra_path = ([os.path.join(sysroot, 'usr/local/factory/py_pkg')]
               if sysroot else [])
 
-# Try to import cros, or just create a dummy module if it doesn't
+# Try to import cros, or just create a stub module if it doesn't
 # exist.
 try:
     import cros
diff --git a/client/cros/faft/config.py b/client/cros/faft/config.py
index ab31b69..c47efd7 100644
--- a/client/cros/faft/config.py
+++ b/client/cros/faft/config.py
@@ -14,3 +14,4 @@
     rpc_quit_call = 'quit'
     rpc_timeout = 120
     rpc_logfile = '/var/log/faft_xmlrpc_server.log'
+    rpc_request_timeout = 180
diff --git a/client/cros/faft/rpc_functions.py b/client/cros/faft/rpc_functions.py
index 48ead03..82531e0 100644
--- a/client/cros/faft/rpc_functions.py
+++ b/client/cros/faft/rpc_functions.py
@@ -11,15 +11,12 @@
 from __future__ import print_function
 
 import binascii
-import httplib
+from six.moves import http_client as httplib
 import logging
 import os
 import signal
-import six
-import sys
 import tempfile
-import traceback
-import xmlrpclib
+from six.moves import xmlrpc_client as xmlrpclib
 
 from autotest_lib.client.common_lib import lsbrelease_utils
 from autotest_lib.client.common_lib.cros import cros_config
@@ -57,6 +54,7 @@
         self.cgpt = CgptServicer(os_if)
         self.ec = EcServicer(os_if)
         self.kernel = KernelServicer(os_if)
+        self.minios_kernel = KernelServicer(os_if, is_minios=True)
         self.rootfs = RootfsServicer(os_if)
         self.rpc_settings = RpcSettingsServicer(os_if)
         self.system = SystemServicer(os_if)
@@ -68,6 +66,7 @@
                 'cgpt': self.cgpt,
                 'ec': self.ec,
                 'kernel': self.kernel,
+                'minios': self.minios_kernel,
                 'rpc_settings': self.rpc_settings,
                 'rootfs': self.rootfs,
                 'system': self.system,
@@ -84,7 +83,6 @@
         """
         logging.debug("%s: Serving FAFT functions", self.__class__.__name__)
         self._ready = True
-        self._os_if.start_file_logging()
 
     def __exit__(self, exception, value, traceback):
         """Exit the delegate context (when XmlRpcServer.run() finishes).
@@ -93,7 +91,6 @@
         the wrong server when quitting one instance and starting another.
         """
         self._ready = False
-        self._os_if.stop_file_logging()
         logging.debug("%s: Done.", self.__class__.__name__)
 
     def quit(self):
@@ -109,47 +106,19 @@
         """
         return self._ready
 
-    def _report_error(self, fault_code, message, exc_info=False):
-        """Raise the given RPC error text, including information about last
-        exception from sys.exc_info().  The log file gets the traceback in text;
-        the raised exception keeps the old traceback (but not in text).
-
-        Note: this must be called right after the original exception, or it may
-        report the wrong exception.
-
-        @raise: xmlrpclib.Fault
+    def _report_error(self, fault_code, message):
+        """Raise the given RPC error text.
 
         @param fault_code: the status code to use
         @param message: the string message to include before exception text
-        @param exc_info: true to use the tuple from sys.exc_info()
         @return the exception to raise
 
         @type fault_code: int
         @type message: str
-        @type exc_info: bool
         @rtype: Exception
         """
-        if exc_info:
-            tb = None
-            try:
-                (exc_class, exc, tb) = sys.exc_info()
-
-                tb_str = ''.join(
-                        traceback.format_exception(exc_class, exc, tb))
-                self._os_if.log('Error: %s.\n%s' % (message, tb_str.rstrip()))
-
-                if not isinstance(exc, xmlrpclib.Fault):
-                    exc_str = ''.join(
-                            traceback.format_exception_only(exc_class, exc))
-                    exc = xmlrpclib.Fault(
-                            fault_code, '%s. %s' % (message, exc_str.rstrip()))
-                six.reraise(exc, None, tb)
-            finally:
-                del exc_info
-                del tb
-        else:
-            self._os_if.log('Error: %s' % message)
-            return xmlrpclib.Fault(fault_code, message)
+        logging.error(message)
+        return xmlrpclib.Fault(fault_code, message)
 
     def _dispatch(self, called_method, params):
         """
@@ -164,7 +133,7 @@
 
         @raise: xmlrpclib.Fault (using http error codes for fault codes)
         """
-        self._os_if.log('Called: %s%s' % (called_method, params))
+        logging.info('Called: %s%s', called_method, params)
 
         name_pieces = called_method.split('.')
 
@@ -220,18 +189,15 @@
         try:
             method = getattr(holder, method_name)
 
-        except AttributeError:
-            raise self._report_error(
-                    httplib.NOT_IMPLEMENTED,
-                    'RPC method not found: "%s"' % called_method, exc_info=True)
-
+        except AttributeError as e:
+            logging.exception(e)
+            raise
         try:
             return method(*params)
 
-        except Exception:
-            raise self._report_error(
-                    httplib.INTERNAL_SERVER_ERROR,
-                    'RPC call failed: %s()' % called_method, exc_info=True)
+        except Exception as e:
+            logging.exception(e)
+            raise
 
 
 class BiosServicer(object):
@@ -318,62 +284,62 @@
         """
         return self._bios_handler.get_section_fwid(section)
 
-    def corrupt_sig(self, section):
-        """Corrupt the requested firmware section signature.
+    def get_sig_one_byte(self, section):
+        """Get a specific byte of firmware signature of the section.
 
         @param section: A firmware section, either 'a' or 'b'.
+        @return: Tuple of (offset, byte).
         """
-        self._bios_handler.corrupt_firmware(section)
+        return self._bios_handler.get_firmware_sig_one_byte(section)
 
-    def restore_sig(self, section):
-        """Restore the previously corrupted firmware section signature.
+    def modify_sig(self, section, offset, value):
+        """Modify a byte of firmware signature of the section.
 
         @param section: A firmware section, either 'a' or 'b'.
+        @offset: Offset of section to be modified.
+        @value: The byte value.
         """
-        self._bios_handler.restore_firmware(section)
+        return self._bios_handler.modify_firmware_sig(section, offset, value)
 
-    def corrupt_body(self, section, corrupt_all=False):
-        """Corrupt the requested firmware section body.
+    def get_body_one_byte(self, section):
+        """Get a specific byte of firmware body of the section.
 
         @param section: A firmware section, either 'a' or 'b'.
-        @param corrupt_all (optional): Corrupt all bytes of the fw section,
-                                       rather than just one byte.
+        @return: Tuple of (offset, byte).
         """
-        self._bios_handler.corrupt_firmware_body(section, corrupt_all)
+        return self._bios_handler.get_firmware_body_one_byte(section)
 
-    def restore_body(self, section):
-        """Restore the previously corrupted firmware section body.
+    def modify_body(self, section, offset, value):
+        """Modify a byte of firmware body of the section.
 
         @param section: A firmware section, either 'a' or 'b'.
+        @offset: Offset of section to be modified.
+        @value: The byte value.
         """
-        self._bios_handler.restore_firmware_body(section)
+        return self._bios_handler.modify_firmware_body(section, offset, value)
 
-    def _modify_version(self, section, delta):
-        """Modify firmware version for the requested section, by adding delta.
+    def corrupt_mrc_cache(self):
+        """Corrupt MRC cache.
 
-        The passed in delta, a positive or a negative number, is added to the
-        original firmware version.
+        NOTE: This method is not idempotent. A second call will still change the
+        flashrom content of the client.
         """
-        original_version = self.get_version(section)
-        new_version = original_version + delta
-        flags = self._bios_handler.get_section_flags(section)
-        self._os_if.log('Setting firmware section %s version from %d to %d' %
-                        (section, original_version, new_version))
-        self._bios_handler.set_section_version(
-                section, new_version, flags, write_through=True)
-
-    def move_version_backward(self, section):
-        """Decrement firmware version for the requested section."""
-        self._modify_version(section, -1)
-
-    def move_version_forward(self, section):
-        """Increase firmware version for the requested section."""
-        self._modify_version(section, 1)
+        self._bios_handler.corrupt_mrc_cache()
 
     def get_version(self, section):
         """Retrieve firmware version of a section."""
         return self._bios_handler.get_section_version(section)
 
+    def set_version(self, section, version):
+        """Set firmware version of a section."""
+        flags = self._bios_handler.get_section_flags(section)
+        logging.info('Setting firmware section %s version to %d', section,
+                     version)
+        self._bios_handler.set_section_version(section,
+                                               version,
+                                               flags,
+                                               write_through=True)
+
     def get_datakey_version(self, section):
         """Return firmware data key version."""
         return self._bios_handler.get_section_datakey_version(section)
@@ -471,7 +437,7 @@
         """Set kernel attributes for either partition (or both)."""
         partitions = {'A': a, 'B': b}
         rootdev = self._os_if.get_root_dev()
-        modifiable_attributes = self._cgpt_handler.ATTR_TO_COMMAND.keys()
+        modifiable_attributes = list(self._cgpt_handler.ATTR_TO_COMMAND.keys())
         for partition_name in partitions.keys():
             partition = partitions[partition_name]
             if partition is None:
@@ -504,7 +470,7 @@
                     '/usr/share/vboot/devkeys', 'ec')
 
         else:
-            self._os_if.log('No EC is reported by mosys (rc=%s).' % ec_status)
+            logging.info('No EC is reported by mosys (rc=%s).', ec_status)
 
     @property
     def _ec_handler(self):
@@ -524,13 +490,46 @@
         """Reload the firmware image that may be changed."""
         self._ec_handler.new_image()
 
-    def get_version(self):
-        """Get EC version via mosys.
+    def get_version(self, target=None):
+        """Get the requested EC version.
 
-        @return: A string of the EC version.
+        @param target: 'ro'/'rw', or None to signify the active fw.
+                       On a Wilco EC, this would be ignored, since Wilco
+                       doesn't use ro/rw/active versions.
+        @return: A string of the requested EC version, or '' if DUT has no EC.
         """
-        return self._os_if.run_shell_command_get_output(
-                'mosys ec info | sed "s/.*| //"')[0]
+        CROS_EC_FILE = '/dev/cros_ec'
+        WILCO_VERSION_FILE = '/sys/bus/platform/devices/GOOG000C:00/version'
+
+        # If DUT has a Chrome EC, parse `ectool version` for the target.
+        if self._os_if.path_exists(CROS_EC_FILE):
+            out = self._os_if.run_shell_command_get_output('ectool version')
+            keyvals = dict([line.split(':', 1) for line in out])
+            ro = keyvals['RO version'].strip()
+            rw = keyvals['RW version'].strip()
+            active = keyvals['Firmware copy'].strip()
+            if target == None:
+                if active == 'RO':
+                    return ro
+                elif active == 'RW':
+                    return rw
+                raise ValueError(
+                        'Unexpected active FW type: want RO/RW; got ' + active)
+            elif target.lower() == 'ro':
+                return ro
+            elif target.lower() == 'rw':
+                return rw
+            raise ValueError(
+                    'Invalid EC version target: want ro/rw/None; got ' +
+                    target)
+        # If DUT has a Wilco EC read sysfs for the EC version.
+        # Wilco doesn't use RO/RW/active, so ignore target.
+        elif self._os_if.path_exists(WILCO_VERSION_FILE):
+            with open(WILCO_VERSION_FILE, "r") as f:
+                return f.read().strip()
+        # If DUT doesn't have an EC, return the empty string.
+        else:
+            return ''
 
     def get_active_hash(self):
         """Get hash of active EC RW firmware."""
@@ -555,9 +554,12 @@
     def corrupt_body(self, section):
         """Corrupt the requested EC section body.
 
+        NOTE: This method is not idempotent. A second call will still change the
+        flashrom content of the client.
+
         @param section: An EC section, either 'a' or 'b'.
         """
-        self._ec_handler.corrupt_firmware_body(section, corrupt_all=True)
+        self._ec_handler.corrupt_firmware_body(section)
 
     def dump_firmware(self, ec_path):
         """Dump the current EC firmware to a file, specified by ec_path.
@@ -582,7 +584,10 @@
         @return: {'enabled': True/False, 'start': '0x0', 'length': '0x0', ...}
         @rtype: dict
         """
-        return self._ec_handler.get_write_protect_status()
+        logging.debug("Calling self._ec_handler.get_write_protect_status")
+        rec = self._ec_handler.get_write_protect_status()
+        logging.debug("Returning %s", rec)
+        return rec
 
     def is_efs(self):
         """Return True if the EC supports EFS."""
@@ -616,12 +621,14 @@
 class KernelServicer(object):
     """Class to service all Kernel RPCs"""
 
-    def __init__(self, os_if):
+    def __init__(self, os_if, is_minios=False):
         """
         @type os_if: os_interface.OSInterface
+        @type is_minios: True if it is a MiniOS kernel; otherwise, False.
         """
         self._os_if = os_if
-        self._real_kernel_handler = kernel_handler.KernelHandler(self._os_if)
+        self._real_kernel_handler = kernel_handler.KernelHandler(
+                self._os_if, is_minios)
 
     @property
     def _kernel_handler(self):
@@ -657,8 +664,8 @@
         """
         original_version = self._kernel_handler.get_version(section)
         new_version = original_version + delta
-        self._os_if.log('Setting kernel section %s version from %d to %d' %
-                        (section, original_version, new_version))
+        logging.info('Setting kernel section %s version from %d to %d',
+                     section, original_version, new_version)
         self._kernel_handler.set_version(section, new_version)
 
     def move_version_backward(self, section):
@@ -783,14 +790,6 @@
         """
         return True
 
-    def dump_log(self, remove_log=False):
-        """Dump the log file.
-
-        @param remove_log: Remove the log file after dump.
-        @return: String of the log file content.
-        """
-        return self._os_if.dump_log(remove_log=remove_log)
-
     def run_shell_command(self, command, block=True):
         """Run shell command.
 
@@ -861,6 +860,26 @@
         return self._os_if.run_shell_command_get_output(
                 'crossystem %s' % key)[0]
 
+    def get_boot_mode(self):
+        """Get the current firmware boot mode.
+
+        @return: Either 'normal', 'dev', or 'rec'.
+        @raise: ValueError if mainfw_type and devsw_boot do not correspond to
+                an expected boot mode combination.
+        """
+        mainfw_type = self._os_if.cs.mainfw_type
+        devsw_boot = self._os_if.cs.devsw_boot
+        if mainfw_type == 'normal' and devsw_boot == '0':
+            return 'normal'
+        elif mainfw_type == 'developer' and devsw_boot == '1':
+            return 'dev'
+        elif mainfw_type == 'recovery':
+            return 'rec'
+        else:
+            raise ValueError('Unexpected mainfw_type/devsw_boot combination: '
+                             'mainfw_type=%s, devsw_boot=%s' %
+                             (mainfw_type, devsw_boot))
+
     def get_root_dev(self):
         """Get the name of root device without partition number.
 
@@ -892,6 +911,21 @@
         if count:
             self._os_if.cs.fw_try_count = count
 
+    def get_minios_priority(self):
+        """Get minios_priority value, which denotes the minios image to try
+        first. (A or B)
+
+        @return: 'A' or 'B'
+        """
+        return self._os_if.cs.minios_priority
+
+    def set_minios_priority(self, priority):
+        """Set minios_priority to A or B.
+
+        @param priority: MiniOS partition to try first (A or B)
+        """
+        self._os_if.cs.minios_priority = priority
+
     def get_fw_vboot2(self):
         """Get fw_vboot2."""
         try:
diff --git a/client/cros/faft/rpc_server.py b/client/cros/faft/rpc_server.py
index 132cb6e..2a52011 100755
--- a/client/cros/faft/rpc_server.py
+++ b/client/cros/faft/rpc_server.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2 -u
+#!/usr/bin/python3 -u
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/cros/faft/utils/cgpt_handler.py b/client/cros/faft/utils/cgpt_handler.py
index 52782fc..3a50c49 100644
--- a/client/cros/faft/utils/cgpt_handler.py
+++ b/client/cros/faft/utils/cgpt_handler.py
@@ -118,7 +118,7 @@
 
         current = self.get_partition(device, partition_name)
         options = []
-        for prop, value in partition_value.iteritems():
+        for prop, value in partition_value.items():
             try:
                 if value == current[prop]:
                     continue
diff --git a/client/cros/faft/utils/firmware_check_keys.py b/client/cros/faft/utils/firmware_check_keys.py
index 4054211..2dc6a04 100644
--- a/client/cros/faft/utils/firmware_check_keys.py
+++ b/client/cros/faft/utils/firmware_check_keys.py
@@ -2,6 +2,8 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import print_function
+
 import glob
 import logging
 import pprint
@@ -22,7 +24,7 @@
         for evdev in glob.glob("/dev/input/event*"):
             device = InputDevice(evdev)
             if device.is_keyboard():
-                print 'keyboard device %s' % evdev
+                print('keyboard device %s' % evdev)
                 self.device = device
 
     def _keyboard_input(self):
@@ -31,8 +33,8 @@
         while True:
             self.ev.read(self.device.f)
             if self.ev.code != KEY_RESERVED:
-                print "EventCode is %d value is %d" % (self.ev.code,
-                                                       self.ev.value)
+                print("EventCode is %d value is %d" %
+                      (self.ev.code, self.ev.value))
                 if self.ev.type == 0 or self.ev.type == 1:
                     self.actual_output.append(self.ev.code)
                     index = index + 1
@@ -55,9 +57,13 @@
         # the down and up events.  We're not interested in precisely how many
         # repeats of the key there is, just what is the sequence of keys,
         # so, we will make the list unique.
-        uniq_actual_output = sorted(list(set(self.actual_output)))
+        uniq_actual_output = []
+        for i, key in enumerate(self.actual_output):
+            if key not in self.actual_output[:i]:
+                uniq_actual_output.append(key)
+
         if uniq_actual_output != expected_sequence:
-            print 'Keys mismatched %s' % pprint.pformat(uniq_actual_output)
+            print('Keys mismatched %s' % pprint.pformat(uniq_actual_output))
             return -1
-        print 'Key match expected: %s' % pprint.pformat(uniq_actual_output)
+        print('Key match expected: %s' % pprint.pformat(uniq_actual_output))
         return len(uniq_actual_output)
diff --git a/client/cros/faft/utils/firmware_updater.py b/client/cros/faft/utils/firmware_updater.py
index 435cf52..634b390 100644
--- a/client/cros/faft/utils/firmware_updater.py
+++ b/client/cros/faft/utils/firmware_updater.py
@@ -7,7 +7,9 @@
 """
 import array
 import json
+import logging
 import os
+import six
 
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib.cros import chip_utils
@@ -168,13 +170,13 @@
 
     def stop_daemon(self):
         """Stop update-engine daemon."""
-        self.os_if.log('Stopping %s...' % self.DAEMON)
+        logging.info('Stopping %s...', self.DAEMON)
         cmd = 'status %s | grep stop || stop %s' % (self.DAEMON, self.DAEMON)
         self.os_if.run_shell_command(cmd)
 
     def start_daemon(self):
         """Start update-engine daemon."""
-        self.os_if.log('Starting %s...' % self.DAEMON)
+        logging.info('Starting %s...', self.DAEMON)
         cmd = 'status %s | grep start || start %s' % (self.DAEMON, self.DAEMON)
         self.os_if.run_shell_command(cmd)
 
@@ -203,7 +205,7 @@
         handler.new_image(image_path)
         fwid = handler.get_section_fwid(section)
         if fwid is not None:
-            return str(fwid)
+            return str(fwid, 'utf-8')
         else:
             return None
 
@@ -363,14 +365,11 @@
             work_path = self._work_path
         self.os_if.run_shell_command(
                 '/usr/share/vboot/bin/resign_firmwarefd.sh '
-                '%s %s %s %s %s %s %s %s' %
+                '%s %s %s %s %s %s' %
                 (os.path.join(work_path, self._bios_path),
                  os.path.join(self._temp_path, 'output.bin'),
                  os.path.join(self._keys_path, 'firmware_data_key.vbprivk'),
                  os.path.join(self._keys_path, 'firmware.keyblock'),
-                 os.path.join(self._keys_path,
-                              'dev_firmware_data_key.vbprivk'),
-                 os.path.join(self._keys_path, 'dev_firmware.keyblock'),
                  os.path.join(self._keys_path, 'kernel_subkey.vbpubk'),
                  ('%d' % version) if version is not None else ''))
         self.os_if.copy_file(
@@ -526,7 +525,7 @@
         def _has_emulate(option):
             return option == '--emulate' or option.startswith('--emulate=')
 
-        if self.os_if.test_mode and not filter(_has_emulate, options):
+        if self.os_if.test_mode and not list(filter(_has_emulate, options)):
             # if in test mode, forcibly use --emulate, if not already used.
             fake_bios = os.path.join(self._temp_path, 'rpc-test-fake-bios.bin')
             if not os.path.exists(fake_bios):
@@ -557,7 +556,7 @@
 
         Finds bios.bin on the DUT and sets up a temp dir to operate on
         bios.bin.  If a bios.bin was specified, it is copied to the DUT
-        and used instead of the native bios.bin.
+        and used instead of the built-in bios.bin.
 
         @return: The cbfs work directory path.
         """
@@ -633,8 +632,8 @@
         try:
             self.os_if.run_shell_command(extract_cmd)
             if not self.os_if.path_exists(local_filename):
-                self.os_if.log("Warning: file does not exist after extracting:"
-                               " %s" % local_filename)
+                logging.warning("File does not exist after extracting:"
+                                " %s", local_filename)
             return os.path.abspath(local_filename)
         except error.CmdError:
             # already logged by run_shell_command()
@@ -820,7 +819,7 @@
         @type filename: str
         @rtype: str
         """
-        if not isinstance(filename, basestring):
+        if not isinstance(filename, six.string_types):
             raise FirmwareUpdaterError("Filename must be a string: %s" %
                                        repr(filename))
         src_bios = os.path.join(self._work_path, self._bios_path)
@@ -880,4 +879,3 @@
             handler = self._get_handler('bios')
         handler.set_gbb_flags(flags)
         handler.dump_whole(filename)
-
diff --git a/client/cros/faft/utils/flashrom_handler.py b/client/cros/faft/utils/flashrom_handler.py
index cf5a4e8..f5c90e6 100644
--- a/client/cros/faft/utils/flashrom_handler.py
+++ b/client/cros/faft/utils/flashrom_handler.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -11,12 +11,15 @@
 """
 
 import hashlib
+import logging
 import os
 import struct
 import tempfile
 
 import six
 
+import common
+
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib.cros import chip_utils
 from autotest_lib.client.cros.faft.utils import saft_flashrom_util
@@ -272,9 +275,9 @@
                     % (self.target, self._unavailable_err))
 
         if image_file and allow_fallback and not os.path.isfile(image_file):
-            self.os_if.log(
-                    "Using %s flash contents instead of missing image: %s"
-                    % (self.target.upper(), image_file))
+            logging.info(
+                    "Using %s flash contents instead of missing image: %s",
+                    self.target.upper(), image_file)
             image_file = None
 
         self.new_image(image_file)
@@ -316,7 +319,7 @@
 
         self.os_if.create_dir(self.section_file())
 
-        for section in self.fv_sections.itervalues():
+        for section in self.fv_sections.values():
             for subsection_name in section.names():
                 if not subsection_name:
                     continue
@@ -362,15 +365,16 @@
 
         gbb_section = self.fum.get_section(self.image, 'FV_GBB')
 
-        # do some sanity checks
+        # do some confidence checks
         try:
             sig, _, rootk_offs, rootk_size = struct.unpack_from(
                     gbb_header_format, gbb_section)
-        except struct.error, e:
+        except struct.error as e:
             raise FlashromHandlerError(e)
 
-        if sig != '$GBB' or (rootk_offs + rootk_size) > len(gbb_section):
-            raise FlashromHandlerError('Bad gbb header')
+        if sig != b'$GBB' or (rootk_offs + rootk_size) > len(gbb_section):
+            raise FlashromHandlerError("Bad gbb header sig:%s len:%s" %
+                                       (sig, len(gbb_section)))
 
         key_body_offset, key_body_size = struct.unpack_from(
                 pubk_header_format, gbb_section, rootk_offs)
@@ -387,7 +391,7 @@
 
         # All checks passed, let's store the key in a file.
         self.pub_key_file = self.os_if.state_dir_file(self.PUB_KEY_FILE_NAME)
-        with open(self.pub_key_file, 'w') as key_f:
+        with open(self.pub_key_file, 'wb') as key_f:
             key = gbb_section[rootk_offs:rootk_offs + key_body_offset +
                               key_body_size]
             key_f.write(key)
@@ -403,7 +407,7 @@
         exception with the appropriate error message text.
         """
 
-        for section in self.fv_sections.itervalues():
+        for section in self.fv_sections.values():
             if section.get_sig_name():
                 cmd = 'vbutil_firmware --verify %s --signpubkey %s  --fv %s' % (
                         self.section_file(section.get_sig_name()),
@@ -411,24 +415,8 @@
                         self.section_file(section.get_body_name()))
                 self.os_if.run_shell_command(cmd)
 
-    def _modify_section(self,
-                        section,
-                        delta,
-                        body_or_sig=False,
-                        corrupt_all=False):
-        """Modify a firmware section inside the image, either body or signature.
-
-        If corrupt_all is set, the passed in delta is added to all bytes in the
-        section. Otherwise, the delta is added to the value located at 2% offset
-        into the section blob, either body or signature.
-
-        Calling this function again for the same section the complimentary
-        delta value would restore the section contents.
-        """
-
-        if not self.image:
-            raise FlashromHandlerError(
-                    'Attempt at using an uninitialized object')
+    def _get_subsection_name(self, section, body_or_sig):
+        """Get the subsection name of body or signature."""
         if section not in self.fv_sections:
             raise FlashromHandlerError('Unknown FW section %s' % section)
 
@@ -437,80 +425,64 @@
             subsection_name = self.fv_sections[section].get_body_name()
         else:
             subsection_name = self.fv_sections[section].get_sig_name()
-        blob = self.fum.get_section(self.image, subsection_name)
-
-        # Modify the byte in it within 2% of the section blob.
-        modified_index = len(blob) / 50
-        if corrupt_all:
-            blob_list = [('%c' % ((ord(x) + delta) % 0x100)) for x in blob]
-        else:
-            blob_list = list(blob)
-            blob_list[modified_index] = (
-                    '%c' % ((ord(blob[modified_index]) + delta) % 0x100))
-        self.image = self.fum.put_section(self.image, subsection_name,
-                                          ''.join(blob_list))
-
         return subsection_name
 
-    def corrupt_section(self, section, corrupt_all=False):
-        """Corrupt a section signature of the image"""
+    def _get_subsection_one_byte(self, subsection):
+        """Get a specific byte within 2% of the subsection."""
+        if not self.image:
+            raise FlashromHandlerError(
+                    'Attempt at using an uninitialized object')
+        blob = self.fum.get_section(self.image, subsection)
+        offset = len(blob) // 50
+        return offset, blob[offset]
 
-        return self._modify_section(
-                section,
-                self.DELTA,
-                body_or_sig=False,
-                corrupt_all=corrupt_all)
+    def get_firmware_sig_one_byte(self, section):
+        """Get a specific byte of firmware signature of the section."""
+        subsection = self._get_subsection_name(section, body_or_sig=False)
+        return self._get_subsection_one_byte(subsection)
 
-    def corrupt_section_body(self, section, corrupt_all=False):
-        """Corrupt a section body of the image"""
+    def get_firmware_body_one_byte(self, section):
+        """Get a specific byte of firmware body of the section."""
+        subsection = self._get_subsection_name(section, body_or_sig=True)
+        return self._get_subsection_one_byte(subsection)
 
-        return self._modify_section(
-                section, self.DELTA, body_or_sig=True, corrupt_all=corrupt_all)
+    def _modify_subsection(self, subsection, offset, value):
+        """Modify a byte of subsection in the FLASHROM."""
+        if not self.image:
+            raise FlashromHandlerError(
+                    'Attempt at using an uninitialized object')
+        blob = self.fum.get_section(self.image, subsection)
+        blob_list = list(blob)
+        blob_list[offset] = value % 0x100
+        self.image = self.fum.put_section(self.image, subsection,
+                                          bytes(blob_list))
+        self.fum.write_partial(self.image, (subsection, ))
 
-    def restore_section(self, section, restore_all=False):
-        """Restore a previously corrupted section signature of the image."""
+    def modify_firmware_sig(self, section, offset, value):
+        """Modify a byte in firmware signature in the FLASHROM."""
+        subsection = self._get_subsection_name(section, body_or_sig=False)
+        self._modify_subsection(subsection, offset, value)
 
-        return self._modify_section(
-                section,
-                -self.DELTA,
-                body_or_sig=False,
-                corrupt_all=restore_all)
+    def modify_firmware_body(self, section, offset, value):
+        """Modify a byte in firmware body in the FLASHROM."""
+        subsection = self._get_subsection_name(section, body_or_sig=True)
+        self._modify_subsection(subsection, offset, value)
 
-    def restore_section_body(self, section, restore_all=False):
-        """Restore a previously corrupted section body of the image."""
+    def corrupt_firmware_body(self, section):
+        """Corrupt the whole firmware body in the FLASHROM."""
+        subsection = self._get_subsection_name(section, body_or_sig=True)
+        if not self.image:
+            raise FlashromHandlerError(
+                    'Attempt at using an uninitialized object')
+        blob = self.fum.get_section(self.image, subsection)
+        blob_list = [(x + self.DELTA) % 0x100 for x in blob]
+        self.image = self.fum.put_section(self.image, subsection,
+                                          bytes(blob_list))
+        self.fum.write_partial(self.image, (subsection, ))
 
-        return self._modify_section(
-                section,
-                -self.DELTA,
-                body_or_sig=True,
-                corrupt_all=restore_all)
-
-    def corrupt_firmware(self, section, corrupt_all=False):
-        """Corrupt a section signature in the FLASHROM!!!"""
-
-        subsection_name = self.corrupt_section(
-                section, corrupt_all=corrupt_all)
-        self.fum.write_partial(self.image, (subsection_name, ))
-
-    def corrupt_firmware_body(self, section, corrupt_all=False):
-        """Corrupt a section body in the FLASHROM!!!"""
-
-        subsection_name = self.corrupt_section_body(
-                section, corrupt_all=corrupt_all)
-        self.fum.write_partial(self.image, (subsection_name, ))
-
-    def restore_firmware(self, section, restore_all=False):
-        """Restore the previously corrupted section sig in the FLASHROM!!!"""
-
-        subsection_name = self.restore_section(
-                section, restore_all=restore_all)
-        self.fum.write_partial(self.image, (subsection_name, ))
-
-    def restore_firmware_body(self, section, restore_all=False):
-        """Restore the previously corrupted section body in the FLASHROM!!!"""
-
-        subsection_name = self.restore_section_body(section, restore_all=False)
-        self.fum.write_partial(self.image, (subsection_name, ))
+    def corrupt_mrc_cache(self):
+        """Corrupt MRC cache in the FLASHROM."""
+        self.corrupt_firmware_body('rec')
 
     def firmware_sections_equal(self):
         """Check if firmware sections A and B are equal.
@@ -577,7 +549,7 @@
         if not self.image:
             raise FlashromHandlerError(
                     'Attempt at using an uninitialized object')
-        open(filename, 'w').write(self.image)
+        open(filename, 'wb').write(self.image)
 
     def dump_partial(self, subsection_name, filename):
         """Write the subsection part into a file."""
@@ -586,7 +558,7 @@
             raise FlashromHandlerError(
                     'Attempt at using an uninitialized object')
         blob = self.fum.get_section(self.image, subsection_name)
-        open(filename, 'w').write(blob)
+        open(filename, 'wb').write(blob)
 
     def dump_section_body(self, section, filename):
         """Write the body of a firmware section into a file"""
@@ -610,7 +582,7 @@
         gbb_section = self.fum.get_section(self.image, 'FV_GBB')
         try:
             _, gbb_flags = struct.unpack_from(gbb_header_format, gbb_section)
-        except struct.error, e:
+        except struct.error as e:
             raise FlashromHandlerError(e)
         return gbb_flags
 
@@ -621,7 +593,7 @@
         gbb_section = self.fum.get_section(self.image, section_name)
         try:
             formatted_flags = struct.pack(gbb_header_format, flags)
-        except struct.error, e:
+        except struct.error as e:
             raise FlashromHandlerError(e)
         gbb_section = gbb_section[:12] + formatted_flags + gbb_section[16:]
         self.write_partial(section_name, gbb_section, write_through)
@@ -743,7 +715,7 @@
 
         @type section: str
         @type strip_null: bool
-        @rtype: str | None
+        @rtype: bytes | None
 
         """
         subsection_name = self.fv_sections[section].get_fwid_name()
@@ -751,7 +723,7 @@
             return None
         blob = self.fum.get_section(self.image, subsection_name)
         if strip_null:
-            blob = blob.rstrip('\0')
+            blob = blob.rstrip(b'\0')
         return blob
 
     def set_section_body(self, section, blob, write_through=False):
@@ -788,6 +760,9 @@
         """
         if (self.get_section_version(section) == version
                     and self.get_section_flags(section) == flags):
+            logging.info(f"Nothing to do existing "
+                         f"version {self.get_section_version(section)} "
+                         f"flags {self.get_section_flags(section)}")
             return  # No version or flag change, nothing to do.
         if version < 0:
             raise FlashromHandlerError(
@@ -812,13 +787,13 @@
         self.os_if.run_shell_command(cmd)
 
         #  Pad the new signature.
-        with open(sig_name, 'a') as sig_f:
+        with open(sig_name, 'ab') as sig_f:
             f_size = os.fstat(sig_f.fileno()).st_size
-            pad = '\0' * (sig_size - f_size)
+            pad = b'\0' * (sig_size - f_size)
             sig_f.write(pad)
 
         # Inject the new signature block into the image
-        with open(sig_name, 'r') as sig_f:
+        with open(sig_name, 'rb') as sig_f:
             new_sig = sig_f.read()
         self.write_partial(fv_section.get_sig_name(), new_sig, write_through)
 
@@ -845,8 +820,8 @@
                     "FWID (%s, %s) is empty: %s" %
                     (self.target.upper(), section.upper(), repr(fwid)))
 
-        fwid = fwid.rstrip('\0')
-        suffix = self.FWID_MOD_DELIMITER + section.upper()
+        fwid = fwid.rstrip(b'\0')
+        suffix = bytes(self.FWID_MOD_DELIMITER + section.upper(), 'utf-8')
 
         if suffix in fwid:
             raise FlashromHandlerError(
@@ -858,7 +833,7 @@
             fwid = fwid[:fwid_size - len(suffix)]
         fwid += suffix
 
-        padded_fwid = fwid.ljust(fwid_size, '\0')
+        padded_fwid = fwid.ljust(fwid_size, b'\0')
         self.set_section_fwid(section, padded_fwid)
         return fwid
 
@@ -886,14 +861,15 @@
                     "FWID (%s, %s) is empty: %s" %
                     (self.target.upper(), section.upper(), repr(fwid)))
 
-        fwid = fwid.rstrip('\0')
-        mod_indicator = self.FWID_MOD_DELIMITER + section.upper()
+        fwid = fwid.rstrip(b'\0')
+        mod_indicator = bytes(self.FWID_MOD_DELIMITER + section.upper(),
+                              'utf-8')
 
         # Remove any suffix, and return the suffix if found.
         if mod_indicator in fwid:
             (stripped_fwid, remainder) = fwid.split(mod_indicator, 1)
 
-            padded_fwid = stripped_fwid.ljust(fwid_size, '\0')
+            padded_fwid = stripped_fwid.ljust(fwid_size, b'\0')
             self.set_section_fwid(section, padded_fwid, write_through)
 
             return fwid
diff --git a/client/cros/faft/utils/flashrom_handler_unittest.py b/client/cros/faft/utils/flashrom_handler_unittest.py
index 9d74df1..4c56c8e 100644
--- a/client/cros/faft/utils/flashrom_handler_unittest.py
+++ b/client/cros/faft/utils/flashrom_handler_unittest.py
@@ -1,7 +1,8 @@
+#!/usr/bin/python3
 """Unit tests for flashrom_handler.py."""
 
-import mock
 import unittest
+from unittest import mock
 
 from autotest_lib.client.common_lib import autotemp
 from autotest_lib.client.cros.faft.utils import (os_interface,
@@ -22,7 +23,7 @@
         self.good_flashrom = mock.Mock()
         attrs = {
                 'communicate.return_value':
-                ('working flashrom stdout', 'working flashrom stderr')
+                (b'working flashrom stdout', b'working flashrom stderr')
         }
         self.good_flashrom.configure_mock(**attrs)
         self.good_flashrom.returncode = 0
@@ -30,7 +31,7 @@
         self.bad_flashrom = mock.Mock()
         attrs = {
                 'communicate.return_value':
-                ('broken flashrom stdout', 'broken flashrom stderr')
+                (b'broken flashrom stdout', b'broken flashrom stderr')
         }
         self.bad_flashrom.configure_mock(**attrs)
         self.bad_flashrom.returncode = 1
diff --git a/client/cros/faft/utils/kernel_handler.py b/client/cros/faft/utils/kernel_handler.py
index 549a6d4..d7b0035 100644
--- a/client/cros/faft/utils/kernel_handler.py
+++ b/client/cros/faft/utils/kernel_handler.py
@@ -7,8 +7,6 @@
 import os
 import re
 
-TMP_FILE_NAME = 'kernel_header_dump'
-
 # Types of kernel modifications.
 KERNEL_BODY_MOD = 1
 KERNEL_VERSION_MOD = 2
@@ -27,6 +25,7 @@
     (designated by the partition name, A or B.
 
     @type os_if: autotest_lib.client.cros.faft.utils.os_interface.OSInterface
+    @param is_minios: True if it is a MiniOS kernel; otherwise, False.
     """
 
     # This value is used to alter contents of a byte in the appropriate kernel
@@ -37,12 +36,21 @@
     # The maximum kernel size in MB.
     KERNEL_SIZE_MB = 16
 
-    def __init__(self, os_if):
+    def __init__(self, os_if, is_minios=False):
         self.os_if = os_if
         self.dump_file_name = None
         self.partition_map = {}
         self.root_dev = None
         self.initialized = False
+        if is_minios:
+            self.kernel_type = 'MINIOS'
+            self.data_key = 'minios_kernel_data_key.vbprivk'
+            self.keyblock = 'minios_kernel.keyblock'
+        else:
+            self.kernel_type = 'KERN'
+            self.data_key = 'kernel_data_key.vbprivk'
+            self.keyblock = 'kernel.keyblock'
+        self.tmp_file_name = 'kernel_header_dump_%s' % self.kernel_type
 
     def _get_version(self, device):
         """Get version of the kernel hosted on the passed in partition."""
@@ -68,7 +76,7 @@
         else:
             target_device = self.root_dev
 
-        kernel_partitions = re.compile('KERN-([AB])')
+        kernel_partitions = re.compile('%s-([AB])' % self.kernel_type)
         disk_map = self.os_if.run_shell_command_get_output(
                 'cgpt show %s' % target_device)
 
@@ -128,8 +136,8 @@
         self.dump_kernel(section, self.dump_file_name)
         data = list(self.os_if.read_file(self.dump_file_name))
         if modification_type == KERNEL_BODY_MOD:
-            data[0] = '%c' % ((ord(data[0]) + delta) % 0x100)
-            self.os_if.write_file(self.dump_file_name, ''.join(data))
+            data[0] = (data[0] + delta) % 0x100
+            self.os_if.write_file(self.dump_file_name, bytes(data))
             kernel_to_write = self.dump_file_name
         elif modification_type == KERNEL_VERSION_MOD:
             new_version = delta
@@ -139,8 +147,7 @@
                     '--signprivate %s --oldblob %s' %
                     (kernel_to_write, new_version,
                      os.path.join(self.dev_key_path,
-                                  'kernel_data_key.vbprivk'),
-                     self.dump_file_name))
+                                  self.data_key), self.dump_file_name))
         elif modification_type == KERNEL_RESIGN_MOD:
             if key_path and self.os_if.is_dir(key_path):
                 resign_key_path = key_path
@@ -152,9 +159,9 @@
                     'vbutil_kernel --repack %s '
                     '--signprivate %s --oldblob %s --keyblock %s' %
                     (kernel_to_write,
-                     os.path.join(resign_key_path, 'kernel_data_key.vbprivk'),
-                     self.dump_file_name,
-                     os.path.join(resign_key_path, 'kernel.keyblock')))
+                     os.path.join(resign_key_path,
+                                  self.data_key), self.dump_file_name,
+                     os.path.join(resign_key_path, self.keyblock)))
         else:
             return  # Unsupported mode, ignore.
         self.write_kernel(section, kernel_to_write)
@@ -200,6 +207,6 @@
         """
         self.dev_key_path = dev_key_path
         self.root_dev = self.os_if.get_root_dev()
-        self.dump_file_name = self.os_if.state_dir_file(TMP_FILE_NAME)
+        self.dump_file_name = self.os_if.state_dir_file(self.tmp_file_name)
         self._get_partition_map(internal_disk)
         self.initialized = True
diff --git a/client/cros/faft/utils/os_interface.py b/client/cros/faft/utils/os_interface.py
index 9bee2a9..4602c5f 100644
--- a/client/cros/faft/utils/os_interface.py
+++ b/client/cros/faft/utils/os_interface.py
@@ -2,14 +2,12 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 """A module to provide interface to OS services."""
-import datetime
-import errno
 import logging
 import os
 import re
 import struct
 
-import shell_wrapper
+from autotest_lib.client.cros.faft.utils import shell_wrapper
 
 
 class OSInterfaceError(Exception):
@@ -53,13 +51,12 @@
 class OSInterface(object):
     """An object to encapsulate OS services functions."""
 
-    def __init__(self, state_dir=None, log_file=None, test_mode=False):
+    def __init__(self, state_dir=None, test_mode=False):
         """Object initialization (side effect: creates the state_dir)
 
         @param state_dir: the name of the directory to use for storing state.
                             The contents of this directory persist over system
                             restarts and power cycles.
-        @param log_file: the name of the log file kept in the state directory.
         @param test_mode: if true, skip (and just log) any shell call
                           marked with modifies_device=True
         """
@@ -68,18 +65,9 @@
         if state_dir is None:
             state_dir = '/usr/local/tmp/faft'
 
-        if log_file is None:
-            log_file = 'faft_client.log'
-
-        if not os.path.isabs(log_file):
-            log_file = os.path.join(state_dir, log_file)
-
         self.state_dir = state_dir
-        self.log_file = log_file
         self.test_mode = test_mode
 
-        self._use_log_file = False
-
         self.shell = shell_wrapper.LocalShell(self)
         self.host_shell = None
 
@@ -100,7 +88,7 @@
         @raise autotest_lib.client.common_lib.error.CmdError: if command fails
         """
         if self.test_mode and modifies_device:
-            self.log('[SKIPPED] %s' % cmd)
+            logging.info('[SKIPPED] %s', cmd)
         else:
             self.shell.run_command(cmd, block=block)
 
@@ -194,95 +182,6 @@
         """Get a full path of a file in the state directory."""
         return os.path.join(self.state_dir, file_name)
 
-    def log(self, text):
-        """Write text to the log file and print it on the screen, if enabled.
-
-        The entire log (kept across reboots) can be found in self.log_file.
-        """
-        if not self._use_log_file:
-            # Called during init, during shutdown, or after a log write fails.
-            logging.info('%s', text)
-            return
-
-        timestamp = datetime.datetime.strftime(datetime.datetime.now(),
-                                               '%I:%M:%S %p:')
-
-        try:
-            with open(self.log_file, 'a') as log_f:
-                log_f.write('%s %s\n' % (timestamp, text))
-                log_f.flush()
-                os.fdatasync(log_f.fileno())
-        except EnvironmentError:
-            logging.info('%s', text)
-            logging.warn("Couldn't write RPC Log: %s", self.log_file,
-                         exc_info=True)
-            # Report error only once.
-            self._use_log_file = False
-
-    def start_file_logging(self):
-        """Create and start using using the log file (or report failure)"""
-        if self._use_log_file:
-            return
-
-        try:
-
-            with open(self.log_file, 'a'):
-                self._use_log_file = True
-
-            # log to stderr, showing the filename (extra newline to add a gap)
-            logging.debug('Begin RPC Log: %s\n', self.log_file)
-
-            # log into the file, to indicate the start time
-            self.log('Begin RPC Log: %s (this file)' % self.log_file)
-
-        except EnvironmentError:
-            logging.warn("Couldn't write RPC Log: %s", self.log_file,
-                         exc_info=True)
-            self._use_log_file = False
-
-    def stop_file_logging(self):
-        """Stop using the log file (switch back to stderr)."""
-        if not self._use_log_file:
-            return
-
-        # log to the file, to indicate when done (extra newline to add a gap)
-        self.log('End RPC Log.\n')
-
-        self._use_log_file = False
-
-        # log to stderr, to tie timestamps together
-        logging.debug('End RPC Log.')
-
-    def remove_log_file(self):
-        """Delete the log file."""
-        if not self.test_mode:
-            # Test mode shouldn't be able to actually remove the log.
-            try:
-                os.remove(self.log_file)
-            except EnvironmentError as e:
-                if e.errno != errno.ENOENT:
-                    self.log("Could not remove log file: %s" % e)
-
-    def dump_log(self, remove_log=False):
-        """Dump the log file.
-
-        @param remove_log: Remove the log file after dump
-        @return: String of the log file content.
-        """
-        if remove_log and not self.test_mode:
-            # Make sure "end RPC log" is printed before grabbing the log
-            self.stop_file_logging()
-
-        try:
-            with open(self.log_file, 'r') as f:
-                log = f.read()
-        except EnvironmentError as e:
-            log = '<%s>' % e
-
-        if remove_log and not self.test_mode:
-            self.remove_log_file()
-        return log
-
     def is_removable_device(self, device):
         """Check if a certain storage device is removable.
 
@@ -317,7 +216,7 @@
                     devicetype = '/sys/block/%s/device/type' % p.split('/')[2]
                     if (not self.path_exists(devicetype)
                         or self.read_file(devicetype).strip() != 'SD'):
-                         return p
+                        return p
             return '/dev/sda'
         else:
             return self.strip_part(device)
@@ -352,7 +251,8 @@
         preamble_format = '<40sQ'
         magic, _, kb_size = struct.unpack_from(header_format, blob)
 
-        if magic != 'CHROMEOS':
+        if magic != b'CHROMEOS':
+            logging.error(f"Incorrect magic string {magic}")
             return -1  # This could be a corrupted version case.
 
         _, version = struct.unpack_from(preamble_format, blob, kb_size)
@@ -366,7 +266,8 @@
         """
         header_format = '<8s96sQ'
         magic, _, version = struct.unpack_from(header_format, blob)
-        if magic != 'CHROMEOS':
+        if magic != b'CHROMEOS':
+            logging.error(f"Incorrect magic string {magic}")
             return -1  # This could be a corrupted version case.
         return version
 
@@ -380,7 +281,8 @@
         preamble_format = '<72sQ'
         magic, _, kb_size = struct.unpack_from(header_format, blob)
 
-        if magic != 'CHROMEOS':
+        if magic != b'CHROMEOS':
+            logging.error(f"Incorrect magic string {magic}")
             return -1
 
         _, version = struct.unpack_from(preamble_format, blob, kb_size)
@@ -396,7 +298,8 @@
         preamble_format = '<32sII64sI'
         magic, _, kb_size = struct.unpack_from(header_format, blob)
 
-        if magic != 'CHROMEOS':
+        if magic != b'CHROMEOS':
+            logging.error(f"Incorrect magic string {magic}")
             return -1  # This could be a corrupted version case.
 
         _, ver, subver, _, flags = struct.unpack_from(preamble_format, blob,
diff --git a/client/cros/faft/utils/saft_flashrom_util.py b/client/cros/faft/utils/saft_flashrom_util.py
index 8570bff..021905d 100644
--- a/client/cros/faft/utils/saft_flashrom_util.py
+++ b/client/cros/faft/utils/saft_flashrom_util.py
@@ -20,6 +20,7 @@
 For more information, see help(saft_flashrom_util.flashrom_util).
 """
 import re
+import logging
 
 
 class TestError(Exception):
@@ -85,8 +86,8 @@
             if section_base <= base or section_end + 1 < section_base:
                 # Overlapped section is possible, like the fwid which is
                 # inside the main fw section.
-                self.os_if.log('overlapped section at 0x%x..0x%x' %
-                               (section_base, section_end))
+                logging.info('overlapped section at 0x%x..0x%x', section_base,
+                             section_end)
             base = section_end
         if base > file_size:
             raise TestError('Section end 0x%x exceeds file size %x' %
@@ -221,7 +222,8 @@
         ]
         layout_text.sort()  # XXX unstable if range exceeds 2^32
         tmpfn = self._get_temp_filename('lay_')
-        self.os_if.write_file(tmpfn, '\n'.join(layout_text) + '\n')
+        with open(tmpfn, "w") as file:
+            file.write('\n'.join(layout_text) + '\n')
         return tmpfn
 
     def check_target(self):
@@ -253,7 +255,7 @@
         # the data to sign. Make it the same way as firmware creation.
         if section_name in ('FVMAIN', 'FVMAINB', 'ECMAINA', 'ECMAINB'):
             align = 4
-            pad = blob[-1]
+            pad = blob[-1:]
             blob = blob.rstrip(pad)
             blob = blob + ((align - 1) - (len(blob) - 1) % align) * pad
         return blob
@@ -272,7 +274,7 @@
             if (len(data) < pos[1] - pos[0] + 1
                         and section_name in ('FVMAIN', 'FVMAINB', 'ECMAINA',
                                              'ECMAINB', 'RW_FWID')):
-                pad = base_image[pos[1]]
+                pad = base_image[pos[1]:pos[1] + 1]
                 data = data + pad * (pos[1] - pos[0] + 1 - len(data))
             else:
                 raise TestError('INTERNAL ERROR: unmatched data size.')
@@ -334,7 +336,7 @@
         @param enabled: If True, run --wp-enable; if False, run --wp-disable.
                         If None (default), don't specify either one.
         """
-        cmd = 'flashrom %s --verbose --wp-range %s %s' % (
+        cmd = 'flashrom %s --verbose --wp-range %s,%s' % (
                 self._target_command, start, length)
         if enabled is not None:
             cmd += ' '
@@ -361,6 +363,8 @@
 
         output = self.os_if.run_shell_command_get_output(
                 'flashrom %s --wp-status' % self._target_command)
+        logging.debug('`flashrom %s --wp-status` returned %s',
+                      self._target_command, output)
 
         wp_status = {}
         for line in output:
@@ -396,7 +400,7 @@
     def dump_flash(self, filename):
         """Read the flash device's data into a file, but don't parse it."""
         cmd = 'flashrom %s -r "%s"' % (self._target_command, filename)
-        self.os_if.log('flashrom_util.dump_flash(): %s' % cmd)
+        logging.info('flashrom_util.dump_flash(): %s', cmd)
         self.os_if.run_shell_command(cmd)
 
     def read_whole(self):
@@ -406,7 +410,7 @@
         """
         tmpfn = self._get_temp_filename('rd_')
         cmd = 'flashrom %s -r "%s"' % (self._target_command, tmpfn)
-        self.os_if.log('flashrom_util.read_whole(): %s' % cmd)
+        logging.info('flashrom_util.read_whole(): %s', cmd)
         self.os_if.run_shell_command(cmd)
         result = self.os_if.read_file(tmpfn)
         self.set_firmware_layout(tmpfn)
@@ -433,7 +437,7 @@
         write_cmd = 'flashrom %s -l "%s" -i %s -w "%s"' % (
                 self._target_command, layout_fn, ' -i '.join(write_list),
                 tmpfn)
-        self.os_if.log('flashrom.write_partial(): %s' % write_cmd)
+        logging.info('flashrom.write_partial(): %s', write_cmd)
         self.os_if.run_shell_command(write_cmd, modifies_device=True)
 
         # clean temporary resources
diff --git a/client/cros/faft/utils/saft_flashrom_util_unittest.py b/client/cros/faft/utils/saft_flashrom_util_unittest.py
index 4665ad6..6959bfa 100644
--- a/client/cros/faft/utils/saft_flashrom_util_unittest.py
+++ b/client/cros/faft/utils/saft_flashrom_util_unittest.py
@@ -1,7 +1,8 @@
+#!/usr/bin/python3
 """Unit tests for saft_flashrom_util.py."""
 
-import mock
 import unittest
+from unittest import mock
 
 from autotest_lib.client.common_lib import autotemp
 from autotest_lib.client.common_lib import error
@@ -27,7 +28,7 @@
         bad_flashrom = mock.Mock()
         attrs = {
                 'communicate.return_value':
-                ('broken flashrom stdout', 'broken flashrom stderr')
+                (b'broken flashrom stdout', b'broken flashrom stderr')
         }
         bad_flashrom.configure_mock(**attrs)
         bad_flashrom.returncode = 1
@@ -43,7 +44,7 @@
         good_flashrom = mock.Mock()
         attrs = {
                 'communicate.return_value':
-                ('working flashrom stdout', 'working flashrom stderr')
+                (b'working flashrom stdout', b'working flashrom stderr')
         }
         good_flashrom.configure_mock(**attrs)
         good_flashrom.returncode = 0
diff --git a/client/cros/faft/utils/shell_wrapper.py b/client/cros/faft/utils/shell_wrapper.py
index 3ec57d4..d6daa5a 100644
--- a/client/cros/faft/utils/shell_wrapper.py
+++ b/client/cros/faft/utils/shell_wrapper.py
@@ -3,6 +3,7 @@
 # found in the LICENSE file.
 """A module to abstract the shell execution environment on DUT."""
 
+import logging
 import subprocess
 
 import time
@@ -37,7 +38,7 @@
                         'use block=True instead, '
                         'refer to b/172325331 for more details' % cmd)
             raise UnsupportedSuccessToken(errormsg)
-        self._os_if.log('Executing: %s' % cmd)
+        logging.debug('Executing: %s', cmd)
         process = subprocess.Popen(
                 cmd,
                 shell=True,
@@ -45,6 +46,8 @@
                 stderr=subprocess.PIPE)
         if block:
             stdout, stderr = process.communicate()
+            stdout = stdout.decode('utf-8')
+            stderr = stderr.decode('utf-8')
         return process, stdout, stderr
 
     def run_command(self, cmd, block=True):
@@ -64,7 +67,7 @@
             returncode = process.returncode
             duration = time.time() - start_time
             result = utils.CmdResult(cmd, stdout, stderr, returncode, duration)
-            self._os_if.log('Command failed.\n%s' % result)
+            logging.error('Command failed.\n%s', result)
             raise error.CmdError(cmd, result)
 
     def run_command_get_result(self, cmd, ignore_status=False):
@@ -84,10 +87,10 @@
         result = utils.CmdResult(cmd, stdout, stderr, returncode, duration)
 
         if returncode and not ignore_status:
-            self._os_if.log('Command failed:\n%s' % result)
+            logging.error('Command failed:\n%s', result)
             raise error.CmdError(cmd, result)
 
-        self._os_if.log('Command result:\n%s' % result)
+        logging.info('Command result:\n%s', result)
         return result
 
     def run_command_check_output(self, cmd, success_token):
@@ -109,7 +112,7 @@
         if '\n' in success_token:
             raise UnsupportedSuccessToken()
         cmd_stdout = ''.join(self.run_command_get_output(cmd))
-        self._os_if.log('Checking for %s in %s' % (success_token, cmd_stdout))
+        logging.info('Checking for %s in %s', success_token, cmd_stdout)
         return success_token in cmd_stdout
 
     def run_command_get_status(self, cmd):
@@ -134,15 +137,15 @@
 
     def read_file(self, path):
         """Read the content of the file."""
-        with open(path) as f:
+        with open(path, "rb") as f:
             return f.read()
 
     def write_file(self, path, data):
         """Write the data to the file."""
-        with open(path, 'w') as f:
+        with open(path, 'wb') as f:
             f.write(data)
 
     def append_file(self, path, data):
         """Append the data to the file."""
-        with open(path, 'a') as f:
+        with open(path, 'ab') as f:
             f.write(data)
diff --git a/client/cros/faft/utils/shell_wrapper_unittest.py b/client/cros/faft/utils/shell_wrapper_unittest.py
index bb426f1..bfdab65 100644
--- a/client/cros/faft/utils/shell_wrapper_unittest.py
+++ b/client/cros/faft/utils/shell_wrapper_unittest.py
@@ -1,7 +1,8 @@
+#!/usr/bin/python3
 """Unit tests for shell_wrapper.py."""
 
-import mock
 import unittest
+from unittest import mock
 
 from autotest_lib.client.cros.faft.utils import shell_wrapper
 
@@ -15,7 +16,9 @@
         success_token = 'unexpected'
         mock_process = mock.Mock()
         mock_subproc_popen.return_value = mock_process
-        attrs = {'communicate.return_value': ('sucessfully executed foo', '')}
+        attrs = {
+                'communicate.return_value': (b'sucessfully executed foo', b'')
+        }
         mock_process.configure_mock(**attrs)
         os_if = mock.Mock()
         local_shell = shell_wrapper.LocalShell(os_if)
@@ -30,7 +33,7 @@
         mock_subproc_popen.return_value = mock_process
         attrs = {
                 'communicate.return_value':
-                ('successfully executed bar. expected is expected.', '')
+                (b'successfully executed bar. expected is expected.', b'')
         }
         mock_process.configure_mock(**attrs)
         os_if = mock.Mock()
@@ -44,7 +47,9 @@
         success_token = 'malformed token \n'
         mock_process = mock.Mock()
         mock_subproc_popen.return_value = mock_process
-        attrs = {'communicate.return_value': ('successfully executed baz', '')}
+        attrs = {
+                'communicate.return_value': (b'successfully executed baz', b'')
+        }
         mock_process.configure_mock(**attrs)
         os_if = mock.Mock()
         local_shell = shell_wrapper.LocalShell(os_if)
diff --git a/client/cros/faft/utils/tpm_handler.py b/client/cros/faft/utils/tpm_handler.py
index 4d6128e..6b7c9c1 100644
--- a/client/cros/faft/utils/tpm_handler.py
+++ b/client/cros/faft/utils/tpm_handler.py
@@ -143,7 +143,7 @@
         actually uses it.
         """
         self.stop_daemon()
-        for nvram in self.nvrams.itervalues():
+        for nvram in self.nvrams.values():
             nvram.init()
         self.restart_daemon()
         self.initialized = True
diff --git a/client/cros/flimflam_test_path.py b/client/cros/flimflam_test_path.py
index a81b9aa..5c51151 100644
--- a/client/cros/flimflam_test_path.py
+++ b/client/cros/flimflam_test_path.py
@@ -1,10 +1,11 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 import os, sys
 
-import constants
+from autotest_lib.client.cros import constants
 
 sys.path.append(os.environ.get("SYSROOT", "/usr/local/") +
                 constants.FLIMFLAM_TEST_PATH)
diff --git a/client/cros/gpio.py b/client/cros/gpio.py
index b838516..363d3d9 100644
--- a/client/cros/gpio.py
+++ b/client/cros/gpio.py
@@ -1,12 +1,12 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-'''Chrome OS device GPIO library
+'''ChromeOS device GPIO library
 
 This module provides a convenient way to detect, setup, and access to GPIO
-values on a Chrome OS compatible device.
+values on a ChromeOS compatible device.
 
 See help(Gpio) for more information.
 '''
diff --git a/client/cros/graphics/graphics_uinput.py b/client/cros/graphics/graphics_uinput.py
index 9516d6e..72e7e0c 100644
--- a/client/cros/graphics/graphics_uinput.py
+++ b/client/cros/graphics/graphics_uinput.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/cros/graphics/graphics_utils.py b/client/cros/graphics/graphics_utils.py
index 3dab92e..ca8e935 100644
--- a/client/cros/graphics/graphics_utils.py
+++ b/client/cros/graphics/graphics_utils.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -27,9 +28,9 @@
 
 # The uinput module might not be available at SDK test time.
 try:
-  from autotest_lib.client.cros.graphics import graphics_uinput
+    from autotest_lib.client.cros.graphics import graphics_uinput
 except ImportError:
-  graphics_uinput = None
+    graphics_uinput = None
 
 
 class GraphicsTest(test.test):
@@ -154,9 +155,10 @@
                 pass
             self.Foo('test_name') # call Foo with unnamed args
          """
-        def decorator(fn):
+
+        def _decorator(fn):
             @wraps(fn)
-            def wrapper(*args, **kwargs):
+            def _wrapper(*args, **kwargs):
                 if len(args) > 1:
                     raise error.TestError('Unnamed arguments is not accepted. '
                                           'Please apply this decorator to '
@@ -168,8 +170,10 @@
                     # Cherry pick the arguments for the wrapped function.
                     d_args, d_kwargs = utils.cherry_pick_args(fn, args, kwargs)
                     return fn(instance, *d_args, **d_kwargs)
-            return wrapper
-        return decorator
+
+            return _wrapper
+
+        return _decorator
 
     def add_failures(self, name, subtest=None):
         """
@@ -223,9 +227,9 @@
 
         total_failures = 0
         # Report subtests failures
-        for failure in self._failures_by_description.values():
+        for failure in list(self._failures_by_description.values()):
             if len(failure['names']) > 0:
-                logging.debug('GraphicsTest failure: %s' % failure['names'])
+                logging.debug('GraphicsTest failure: %s', failure['names'])
                 total_failures += len(failure['names'])
 
             if not self._test_failure_report_subtest:
@@ -262,7 +266,7 @@
         """
         Get currently recorded failures list.
         """
-        return [name for failure in self._failures_by_description.values()
+        return [name for failure in list(self._failures_by_description.values())
                 for name in failure['names']]
 
     def open_vt1(self):
@@ -709,7 +713,7 @@
     This is a crude way to figure out if the device will not be able to promote
     video frames to overlays at all, which happens for example on Broadwell.
     """
-    modetest_output = utils.system_output('modetest -p')
+    modetest_output = utils.system_output('modetest -p', retain_output=True)
     return "nv12" in modetest_output.lower()
 
 def get_modetest_output_state():
@@ -772,6 +776,14 @@
     return bool(get_internal_connector_name())
 
 
+def has_external_display():
+    """Checks whether the DUT is equipped with an external display.
+
+    @return True if external display is present; False otherwise.
+    """
+    return bool(get_external_connector_name())
+
+
 def get_external_resolution():
     """Gets the resolution of the external display.
 
@@ -829,7 +841,7 @@
             Otherwise, return False.
     """
     outputs = get_display_output_state()
-    for output in outputs.iterkeys():
+    for output in list(outputs.keys()):
         if outputs[output] and (output.startswith('HDMI')
                 or output.startswith('DP')
                 or output.startswith('DVI')
@@ -845,7 +857,7 @@
             Otherwise, return False.
     """
     outputs = get_display_output_state()
-    for output in outputs.iterkeys():
+    for output in list(outputs.keys()):
         # reference: chromium_org/chromeos/display/output_util.cc
         if (output.startswith('eDP')
                 or output.startswith('LVDS')
@@ -1144,13 +1156,12 @@
             if not self._run_on_sw_rasterizer and is_sw_rasterizer():
                 raise error.TestFail('Refusing to run on SW rasterizer.')
             logging.info('Initialize: Checking for old GPU hangs...')
-            messages = open(self._MESSAGES_FILE, 'r')
-            for line in messages:
-                for hang in self._HANGCHECK:
-                    if hang in line:
-                        logging.info(line)
-                        self.existing_hangs[line] = line
-            messages.close()
+            with open(self._MESSAGES_FILE, 'r', encoding='utf-8') as messages:
+                for line in messages:
+                    for hang in self._HANGCHECK:
+                        if hang in line:
+                            logging.info(line)
+                            self.existing_hangs[line] = line
 
     def finalize(self):
         """
@@ -1163,21 +1174,22 @@
         new_gpu_warning = False
         if utils.get_cpu_arch() != 'arm':
             logging.info('Cleanup: Checking for new GPU hangs...')
-            messages = open(self._MESSAGES_FILE, 'r')
-            for line in messages:
-                for hang in self._HANGCHECK:
-                    if hang in line:
-                        if not line in self.existing_hangs.keys():
-                            logging.info(line)
-                            for warn in self._HANGCHECK_WARNING:
-                                if warn in line:
-                                    new_gpu_warning = True
-                                    logging.warning(
-                                        'Saw GPU hang warning during test.')
-                                else:
-                                    logging.warning('Saw GPU hang during test.')
-                                    new_gpu_hang = True
-            messages.close()
+            with open(self._MESSAGES_FILE, 'r', encoding='utf-8') as messages:
+                for line in messages:
+                    for hang in self._HANGCHECK:
+                        if hang in line:
+                            if not line in list(self.existing_hangs.keys()):
+                                logging.info(line)
+                                for warn in self._HANGCHECK_WARNING:
+                                    if warn in line:
+                                        new_gpu_warning = True
+                                        logging.warning(
+                                                'Saw GPU hang warning during test.'
+                                        )
+                                    else:
+                                        logging.warning(
+                                                'Saw GPU hang during test.')
+                                        new_gpu_hang = True
 
             if not self._run_on_sw_rasterizer and is_sw_rasterizer():
                 logging.warning('Finished test on SW rasterizer.')
@@ -1269,6 +1281,10 @@
         )
         return executable
 
+    def get_deqp_dir(self):
+        """Return the base path to deqp."""
+        return self.DEQP_BASEDIR
+
 # Possible paths of the kernel DRI debug text file.
 _DRI_DEBUG_FILE_PATH_0 = "/sys/kernel/debug/dri/0/state"
 _DRI_DEBUG_FILE_PATH_1 = "/sys/kernel/debug/dri/1/state"
@@ -1348,7 +1364,7 @@
     for dev_path in glob.glob(_DEV_DRI_CARD_PATH):
         try:
             logging.debug('trying device %s', dev_path);
-            with open(dev_path, 'rw') as dev:
+            with open(dev_path, 'w') as dev:
                 # Pack a struct drm_set_client_cap: two u64.
                 drm_pack = struct.pack("QQ", _DRM_CLIENT_CAP_ATOMIC, 1)
                 result = fcntl.ioctl(dev, _DRM_IOCTL_SET_CLIENT_CAP, drm_pack)
@@ -1387,4 +1403,4 @@
     possible_crtcs = [[int(bit) for bit in bin(crtc)[2:].zfill(16)]
                          for crtc in packed_possible_crtcs]
     # Accumulate the CRTCs indexes and return the maximum number of 'votes'.
-    return max(map(sum, zip(*possible_crtcs)))
+    return max(list(map(sum, list(zip(*possible_crtcs)))))
diff --git a/client/cros/graphics/graphics_utils_unittest.py b/client/cros/graphics/graphics_utils_unittest.py
index 885b4cd..74f6b5d 100644
--- a/client/cros/graphics/graphics_utils_unittest.py
+++ b/client/cros/graphics/graphics_utils_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/cros/httpd_unittest.py b/client/cros/httpd_unittest.py
index aa908bc..1047f43 100755
--- a/client/cros/httpd_unittest.py
+++ b/client/cros/httpd_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/client/cros/image_comparison/comparison_result.py b/client/cros/image_comparison/comparison_result.py
index 72d0744..35c5769 100644
--- a/client/cros/image_comparison/comparison_result.py
+++ b/client/cros/image_comparison/comparison_result.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/cros/image_comparison/pdiff_image_comparer.py b/client/cros/image_comparison/pdiff_image_comparer.py
index 1b382da..5a3c254 100644
--- a/client/cros/image_comparison/pdiff_image_comparer.py
+++ b/client/cros/image_comparison/pdiff_image_comparer.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -104,4 +105,4 @@
         return self
 
     def __exit__(self, exc_type, exc_val, exc_tb):
-        pass
\ No newline at end of file
+        pass
diff --git a/client/cros/input_playback/input_playback.py b/client/cros/input_playback/input_playback.py
index 98c9f98..09ac4fb 100644
--- a/client/cros/input_playback/input_playback.py
+++ b/client/cros/input_playback/input_playback.py
@@ -100,6 +100,7 @@
             'BRIGHTNESS_CYCLE', 'BRIGHTNESS_AUTO', 'BRIGHTNESS_ZERO',
             'DISPLAY_OFF', 'WWAN', 'WIMAX', 'RFKILL', 'MICMUTE']
 
+    _WACOM_VENDOR_ID = '2d1f'
 
     def __init__(self):
         self.devices = {}
@@ -183,7 +184,7 @@
         @return: string of properties.
 
         """
-        with tempfile.NamedTemporaryFile() as temp_file:
+        with tempfile.NamedTemporaryFile(mode='w+') as temp_file:
             filename = temp_file.name
             evtest_process = subprocess.Popen(['evtest', device],
                                               stdout=temp_file)
@@ -250,6 +251,20 @@
         return utils.run('cat %s' % filepath).stdout.strip()
 
 
+    def _get_vendor_id(self, node_dir):
+        """Gets the vendor ID of an input device, given its node directory.
+
+        @param node_dir: the directory for the input node in sysfs (e.g.
+                         /sys/class/input/event1)
+
+        @returns: the vendor ID, as a string of four lower-case hex digits.
+        """
+        vendor_id_path = os.path.join(node_dir, 'device/id/vendor')
+        if not os.path.exists(vendor_id_path):
+            raise error.TestError('Could not read vendor ID for ' + node_dir)
+        return self._get_contents_of_file(vendor_id_path).lower()
+
+
     def _find_input_name(self, device_dir, name=None):
         """Find the associated input* name for the given device directory.
 
@@ -274,15 +289,27 @@
         raise error.TestError('Could not match input* to this device!')
 
 
-    def _find_device_ids_for_styluses(self, device_dir, name=None):
+    def _find_device_ids_for_styluses(self, node_dir, device_dir, name=None):
         """Find the fw_id and hw_id for the stylus in the given directory.
 
+        @param node_dir: the directory for the input node in sysfs (e.g.
+                         /sys/class/input/event1)
         @param device_dir: the device directory.
         @param name: the device name.
 
-        @returns: firmware id, hardware id for this device.
+        @returns: firmware ID, hardware ID for this device. Since styluses don't
+                  really have hardware IDs, this will actually be 'usi' or
+                  'wacom' depending on the stylus type. Firmware ID may be None.
 
         """
+        if self._get_vendor_id(node_dir) != self._WACOM_VENDOR_ID:
+            # The stylus device only has a distinct hardware and firmware ID if
+            # it's a Wacom digitizer. Otherwise, a USI stylus is being used, in
+            # which case it's handled by the touchscreen controller. So, there's
+            # no point in looking for a firmware ID unless the stylus has a
+            # Wacom vendor ID.
+            return None, 'usi'
+
         hw_id = 'wacom' # Wacom styluses don't actually have hwids.
         fw_id = None
 
@@ -310,12 +337,14 @@
         return fw_id, hw_id
 
 
-    def _find_device_ids(self, device_dir, input_type, name):
+    def _find_device_ids(self, node_dir, device_dir, input_type, name):
         """Find the fw_id and hw_id for the given device directory.
 
         Finding fw_id and hw_id applicable only for touchpads, touchscreens,
         and styluses.
 
+        @param node_dir: the directory for the input node in sysfs (e.g.
+                         /sys/class/input/event1)
         @param device_dir: the device directory.
         @param input_type: string of input type.
         @param name: string of input name.
@@ -329,7 +358,8 @@
                                                 'stylus']:
             return fw_id, hw_id
         if input_type == 'stylus':
-            return self._find_device_ids_for_styluses(device_dir, name)
+            return self._find_device_ids_for_styluses(node_dir, device_dir,
+                                                      name)
 
         # Touch devices with custom drivers usually save this info as a file.
         fw_filenames = ['fw_version', 'firmware_version', 'firmware_id']
@@ -429,7 +459,8 @@
                 if os.path.exists(device_dir):
                     new_device.device_dir = device_dir
                     new_device.fw_id, new_device.hw_id = self._find_device_ids(
-                            device_dir, input_type, new_device.name)
+                            class_folder, device_dir, input_type,
+                            new_device.name)
 
                 if new_device.emulated:
                     self._emulated_device = new_device
diff --git a/client/cros/input_playback/keyboard_ctrl+a_backspace b/client/cros/input_playback/keyboard_ctrl+a_backspace
new file mode 100644
index 0000000..c5f18da
--- /dev/null
+++ b/client/cros/input_playback/keyboard_ctrl+a_backspace
@@ -0,0 +1,18 @@
+E: 1649804445.476288 0004 0004 29
+E: 1649804445.476288 0001 001d 1
+E: 1649804445.476288 0000 0000 0
+E: 1649804445.693952 0004 0004 30
+E: 1649804445.693952 0001 001e 1
+E: 1649804445.693952 0000 0000 0
+E: 1649804445.843161 0004 0004 30
+E: 1649804445.843161 0001 001e 0
+E: 1649804445.843161 0000 0000 0
+E: 1649804445.856315 0004 0004 29
+E: 1649804445.856315 0001 001d 0
+E: 1649804445.856315 0000 0000 0
+E: 1649804446.711596 0004 0004 14
+E: 1649804446.711596 0001 000e 1
+E: 1649804446.711596 0000 0000 0
+E: 1649804446.796671 0004 0004 14
+E: 1649804446.796671 0001 000e 0
+E: 1649804446.796671 0000 0000 0
\ No newline at end of file
diff --git a/client/cros/kernel_config.py b/client/cros/kernel_config.py
index 5a9fa2e..ccf4dd9 100644
--- a/client/cros/kernel_config.py
+++ b/client/cros/kernel_config.py
@@ -145,7 +145,7 @@
         if not os.path.exists(filename):
             utils.system("modprobe configs", ignore_status=True)
         if os.path.exists(filename):
-            with gzip.open(filename, "r") as rf:
+            with gzip.open(filename, "rt") as rf:
                 return rf.readlines()
 
         filename = '/boot/config-%s' % utils.system_output('uname -r')
diff --git a/client/cros/liststorage.py b/client/cros/liststorage.py
index 9484b22..3ecbae0 100644
--- a/client/cros/liststorage.py
+++ b/client/cros/liststorage.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/client/cros/mainloop.py b/client/cros/mainloop.py
index 091b564..ff07f45 100644
--- a/client/cros/mainloop.py
+++ b/client/cros/mainloop.py
@@ -3,7 +3,12 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import gobject, logging, sys, traceback
+import logging, sys, traceback
+# AU tests use ToT client code, but ToT -3 client version.
+try:
+    from gi.repository import GObject
+except ImportError:
+    import gobject as GObject
 
 import common
 
@@ -62,12 +67,12 @@
     self.main_loop.quit()
 
   def run(self):
-    gobject.idle_add(self.idle)
+    GObject.idle_add(self.idle)
     if self.timeout_s > 0:
-      timeout_source = gobject.timeout_add(self.timeout_s * 1000, self._timeout)
+      timeout_source = GObject.timeout_add(self.timeout_s * 1000, self._timeout)
     self.main_loop.run()
     if self.timeout_s > 0:
-      gobject.source_remove(timeout_source)
+      GObject.source_remove(timeout_source)
 
     if self._forwarded_exception:
       raise self._forwarded_exception
diff --git a/client/cros/mock_policies/README b/client/cros/mock_policies/README
deleted file mode 100644
index 129add1..0000000
--- a/client/cros/mock_policies/README
+++ /dev/null
@@ -1,14 +0,0 @@
-This directory contains the ingredients necessary to regenerate
-signed mock policies and the corresponding public keys.  See here
-for details on how to use this code:
-
-https://sites.google.com/a/google.com/chromeos/archive/testing-tools-for-device-policies
-
-Please use ../mock_owner_private.key as the private key.
-Please use madmax@managedchrome.com as the user for generating
-mock_metrics_{on,off}.policy
-protoc --decode_raw is useful for verifying your work by comparing old
-proto and the new.
-
-If you are having to regenerate these files, most likely you have to
-change the code in this directory.  Please keep the code up-to-date in git.
diff --git a/client/cros/mock_policies/asn1der.py b/client/cros/mock_policies/asn1der.py
deleted file mode 100644
index 9040bfe..0000000
--- a/client/cros/mock_policies/asn1der.py
+++ /dev/null
@@ -1,70 +0,0 @@
-#!/usr/bin/python2.7
-# Copyright 2019 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Helper module for ASN.1/DER encoding."""
-
-import binascii
-import struct
-
-# Tags as defined by ASN.1.
-INTEGER = 2
-BIT_STRING = 3
-NULL = 5
-OBJECT_IDENTIFIER = 6
-SEQUENCE = 0x30
-
-def Data(tag, data):
-  """Generic type-length-value encoder.
-
-  Args:
-    tag: the tag.
-    data: the data for the given tag.
-  Returns:
-    encoded TLV value.
-  """
-  if len(data) < 128:
-    return struct.pack(">BB", tag, len(data)) + data;
-  assert len(data) <= 0xffff;
-  return struct.pack(">BBH", tag, 0x82, len(data)) + data;
-
-def Integer(value):
-  """Encodes an integer.
-
-  Args:
-    value: the long value.
-  Returns:
-    encoded TLV value.
-  """
-  data = '%x' % value
-  if (len(data) % 2 == 1):
-    # Odd number of non-zero bytes - pad out our data to a full number of bytes.
-    data = '0' + data
-
-  # If the high bit is set, need to prepend a null byte to denote a positive
-  # number.
-  if (int(data[0], 16) >= 8):
-    data = '00' + data
-
-  return Data(INTEGER, binascii.unhexlify(data))
-
-def Bitstring(value):
-  """Encodes a bit string.
-
-  Args:
-    value: a string holding the binary data.
-  Returns:
-    encoded TLV value.
-  """
-  return Data(BIT_STRING, '\x00' + value)
-
-def Sequence(values):
-  """Encodes a sequence of other values.
-
-  Args:
-    values: the list of values, must be strings holding already encoded data.
-  Returns:
-    encoded TLV value.
-  """
-  return Data(SEQUENCE, ''.join(values))
diff --git a/client/cros/mock_policies/create_blob.py b/client/cros/mock_policies/create_blob.py
deleted file mode 100755
index 8adf140..0000000
--- a/client/cros/mock_policies/create_blob.py
+++ /dev/null
@@ -1,243 +0,0 @@
-#!/usr/bin/python2.7
-# Copyright 2019 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""This implements a simple tool to create policy blobs signed with a given
-key.  It can create both device and user policies. The output will consist of
-two files a policy file and the owner.key file which contains the policy
-signature.
-
-The input file is JSON. The root dictionary contains a list under the
-key "managed_users". Keys in the root dictionary identify request scopes.
-The user-request scope is described by a dictionary that holds two
-sub-dictionaries: "mandatory" and "recommended". Both these hold the policy
-definitions as key/value stores, their format is identical to what the Linux
-implementation reads from /etc.
-The device-scope holds the policy-definition directly as key/value stores
-in the protobuf-format.
-
-Example:
-
-{
-  "google/chromeos/device" : {
-      "guest_mode_enabled" : false
-  },
-  "google/chromeos/user" : {
-     "mandatory" : {
-      "HomepageLocation" : "http://www.chromium.org",
-      "IncognitoEnabled" : false
-    },
-     "recommended" : {
-      "JavascriptEnabled": false
-    }
-  }
-}
-
-"""
-
-import optparse
-import os
-import re
-import sys
-import time
-import tlslite
-import tlslite.api
-import tlslite.utils
-
-# The name and availability of the json module varies in python versions.
-try:
-  import simplejson as json
-except ImportError:
-  try:
-    import json
-  except ImportError:
-    json = None
-
-import asn1der
-import device_management_backend_pb2 as dm
-import cloud_policy_pb2 as cp
-import chrome_device_policy_pb2 as dp
-
-# ASN.1 object identifier for PKCS#1/RSA.
-PKCS1_RSA_OID = '\x2a\x86\x48\x86\xf7\x0d\x01\x01\x01'
-
-def SetProtobufMessageField(group_message, field, field_value):
-  '''Sets a field in a protobuf message.
-
-  Args:
-    group_message: The protobuf message.
-    field: The field of the message to set, it shuold be a member of
-        group_message.DESCRIPTOR.fields.
-    field_value: The value to set.
-  '''
-  if field.label == field.LABEL_REPEATED:
-    assert type(field_value) == list
-    entries = group_message.__getattribute__(field.name)
-    for list_item in field_value:
-      entries.append(list_item)
-    return
-  elif field.type == field.TYPE_BOOL:
-    assert type(field_value) == bool
-  elif field.type == field.TYPE_STRING:
-    assert type(field_value) == str or type(field_value) == unicode
-  elif field.type == field.TYPE_INT64:
-    assert type(field_value) == int
-  elif (field.type == field.TYPE_MESSAGE and
-        field.message_type.name == 'StringList'):
-    assert type(field_value) == list
-    entries = group_message.__getattribute__(field.name).entries
-    for list_item in field_value:
-      entries.append(list_item)
-    return
-  else:
-    raise Exception('Unknown field type %s' % field.type)
-  group_message.__setattr__(field.name, field_value)
-
-def GatherDevicePolicySettings(settings, policies):
-  '''Copies all the policies from a dictionary into a protobuf of type
-  CloudDeviceSettingsProto.
-
-  Args:
-    settings: The destination ChromeDeviceSettingsProto protobuf.
-    policies: The source dictionary containing policies in JSON format.
-  '''
-  for group in settings.DESCRIPTOR.fields:
-    # Create protobuf message for group.
-    group_message = eval('dp.' + group.message_type.name + '()')
-    # Indicates if at least one field was set in |group_message|.
-    got_fields = False
-    # Iterate over fields of the message and feed them from the
-    # policy config file.
-    for field in group_message.DESCRIPTOR.fields:
-      field_value = None
-      if field.name in policies:
-        got_fields = True
-        field_value = policies[field.name]
-        SetProtobufMessageField(group_message, field, field_value)
-    if got_fields:
-      settings.__getattribute__(group.name).CopyFrom(group_message)
-
-def GatherUserPolicySettings(settings, policies):
-  '''Copies all the policies from a dictionary into a protobuf of type
-  CloudPolicySettings.
-
-  Args:
-    settings: The destination: a CloudPolicySettings protobuf.
-    policies: The source: a dictionary containing policies under keys
-        'recommended' and 'mandatory'.
-  '''
-  for group in settings.DESCRIPTOR.fields:
-    # Create protobuf message for group.
-    group_message = eval('cp.' + group.message_type.name + '()')
-    # We assume that this policy group will be recommended, and only switch
-    # it to mandatory if at least one of its members is mandatory.
-    group_message.policy_options.mode = cp.PolicyOptions.RECOMMENDED
-    # Indicates if at least one field was set in |group_message|.
-    got_fields = False
-    # Iterate over fields of the message and feed them from the
-    # policy config file.
-    for field in group_message.DESCRIPTOR.fields:
-      field_value = None
-      if field.name in policies['mandatory']:
-        group_message.policy_options.mode = cp.PolicyOptions.MANDATORY
-        field_value = policies['mandatory'][field.name]
-      elif field.name in policies['recommended']:
-        field_value = policies['recommended'][field.name]
-      if field_value != None:
-        got_fields = True
-        SetProtobufMessageField(group_message, field, field_value)
-    if got_fields:
-      settings.__getattribute__(group.name).CopyFrom(group_message)
-
-def ProcessCloudPolicy(policy_type,
-                       policy_def, policy_key,
-                       username,
-                       output_path):
-  """Creates a policy blob.
-
-  Encodes the policy into protobuf representation, signs it and saves it.
-
-  Args:
-    policy_type: can be 'google/chromeos/user' or 'google/chromeos/device'.
-    policy_def: The JSON file containing the policy definition.
-    policy_key: A private key to be used to sign the blob.
-    username: Username to be integrated in the policy blob.
-    output_path: A directory where to put the output files.
-  """
-  policy = json.loads(open(policy_def).read())
-  policy_value = ''
-  if (policy_type in policy):
-    if policy_type == 'google/chromeos/user':
-      settings = cp.CloudPolicySettings()
-      GatherUserPolicySettings(settings, policy[policy_type])
-      policy_value = settings.SerializeToString()
-    elif policy_type == 'google/chromeos/device':
-      settings = dp.ChromeDeviceSettingsProto()
-      GatherDevicePolicySettings(settings, policy[policy_type])
-      policy_value = settings.SerializeToString()
-
-  key = tlslite.api.parsePEMKey(open(policy_key).read(), private=True)
-
-  algorithm = asn1der.Sequence(
-      [ asn1der.Data(asn1der.OBJECT_IDENTIFIER, PKCS1_RSA_OID),
-        asn1der.Data(asn1der.NULL, '') ])
-  rsa_pubkey = asn1der.Sequence([ asn1der.Integer(key.n),
-                                  asn1der.Integer(key.e) ])
-  pubkey = asn1der.Sequence([ algorithm, asn1der.Bitstring(rsa_pubkey) ])
-  key_version = 1
-
-  # Fill the policy data protobuf.
-  policy_data = dm.PolicyData()
-  policy_data.policy_type = policy_type
-  policy_data.timestamp = int(time.time() * 1000)
-  policy_data.request_token = "DEV_TOKEN"
-  policy_data.policy_value = policy_value
-  policy_data.machine_name = "MEAN_MACHINE"
-  policy_data.public_key_version = 1
-  policy_data.username = username
-  policy_data.device_id = "1337_1D"
-  signed_data = policy_data.SerializeToString()
-
-  response = dm.DeviceManagementResponse()
-  fetch_response = response.policy_response.responses.add()
-  fetch_response.policy_data = signed_data
-  fetch_response.policy_data_signature = bytes(
-      key.hashAndSign(signed_data))
-  fetch_response.new_public_key = pubkey
-
-  open("%s/policy" % output_path,"wb").
-      write(fetch_response.SerializeToString());
-  open("%s/owner.key" % output_path,"wb").write(pubkey);
-
-def main(options):
-  ProcessCloudPolicy(options.policy_type,
-                     options.policy_def, options.policy_key,
-                     options.policy_user,
-                     options.output_path);
-
-if __name__ == '__main__':
-  option_parser = optparse.OptionParser()
-  option_parser.add_option('-k', '--policy-key', default="mykey",
-                           dest='policy_key',
-                           help='Specify a path to a PEM-encoded private key '
-                           'to use for policy signing.')
-  option_parser.add_option('-p', '--policy-def', default="device_management",
-                           dest='policy_def',
-                           help='Specify a path to a PEM-encoded private key '
-                           'to use for policy signing.')
-  option_parser.add_option('-u', '--policy-user', default='user@example.com',
-                           dest='policy_user',
-                           help='Specify the user name the server should '
-                           'report back to the client as the user owning the '
-                           'token used for making the policy request.')
-  option_parser.add_option('-o', '--output-path', default='.',
-                           dest='output_path',
-                           help='Specifies the directory to output policy '
-                           'files to.')
-  option_parser.add_option('-t', '--type', default='google/chromeos/device',
-                           dest='policy_type',
-                           help='Specifies the type of policy to create.')
-  options, args = option_parser.parse_args()
-
-  sys.exit(main(options))
diff --git a/client/cros/mock_policies/mock_metrics_off.txt b/client/cros/mock_policies/mock_metrics_off.txt
deleted file mode 100644
index d3a72a0..0000000
--- a/client/cros/mock_policies/mock_metrics_off.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "google/chromeos/device" : {
-   "metrics_enabled": false
- }
-}
diff --git a/client/cros/mock_policies/mock_metrics_on.txt b/client/cros/mock_policies/mock_metrics_on.txt
deleted file mode 100644
index 2e72c72..0000000
--- a/client/cros/mock_policies/mock_metrics_on.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "google/chromeos/device" : {
-   "metrics_enabled": true
- }
-}
diff --git a/client/cros/multimedia/OWNERS b/client/cros/multimedia/OWNERS
new file mode 100644
index 0000000..3c5c8a3
--- /dev/null
+++ b/client/cros/multimedia/OWNERS
@@ -0,0 +1 @@
+include /BLUETOOTH_OWNERS
diff --git a/client/cros/multimedia/assistant_facade.py b/client/cros/multimedia/assistant_facade.py
new file mode 100644
index 0000000..8a64fa0
--- /dev/null
+++ b/client/cros/multimedia/assistant_facade.py
@@ -0,0 +1,65 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+import logging
+
+from autotest_lib.client.common_lib.cros import assistant_util
+# TODO (crbug.com/949874): Remove this when we make sure assistant_util_private
+# is available.
+try:
+    from autotest_lib.client.common_lib.cros import assistant_util_private
+except ImportError:
+    logging.error("Failed to import assistant_util_private")
+
+class AssistantNativeError(Exception):
+    """Error in AssistantFacadeLocal."""
+    pass
+
+class AssistantFacadeLocal(object):
+    """Facade to access the assistant-related functionality.
+
+    The methods inside this class only accept Python native types.
+
+    """
+    def __init__(self, resource):
+        self._resource = resource
+
+
+    def restart_chrome_for_assistant(self, enable_dsp_hotword=True):
+        """Restarts Chrome with Google assistant enabled.
+
+        @param enable_dsp_hotword: A bool to control the usage of dsp for
+                hotword.
+        """
+        # TODO (paulhsia): Remove this when voice command is ready for non
+        # gaia_login environment.
+        cred = assistant_util_private.get_login_credential()
+        custom_chrome_setup = {
+                "autotest_ext": True,
+                "gaia_login": True,
+                "enable_assistant": True,
+                "username": cred.username,
+                "password": cred.password,
+        }
+
+        if enable_dsp_hotword:
+            custom_chrome_setup["extra_browser_args"] = (
+                ["--enable-features=EnableDspHotword"])
+        self._resource.start_custom_chrome(custom_chrome_setup)
+
+
+    def send_text_query(self, text):
+        """Sends text query to Google assistant and gets response.
+
+        @param text: A str object for text qeury.
+
+        @returns: A str object for query response.
+        """
+        ext = self._resource.get_extension()
+        return assistant_util.send_text_query(ext, text)
+
+
+    def enable_hotword(self):
+        """Enables hotword in Google assistant."""
+        ext = self._resource.get_extension()
+        assistant_util.enable_hotword(ext)
diff --git a/client/cros/multimedia/assistant_facade_native.py b/client/cros/multimedia/assistant_facade_native.py
deleted file mode 100644
index d02c89f..0000000
--- a/client/cros/multimedia/assistant_facade_native.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import logging
-
-from autotest_lib.client.common_lib.cros import assistant_util
-# TODO (crbug.com/949874): Remove this when we make sure assistant_util_private
-# is available.
-try:
-    from autotest_lib.client.common_lib.cros import assistant_util_private
-except ImportError:
-    logging.error("Failed to import assistant_util_private")
-
-class AssistantNativeError(Exception):
-    """Error in AssistantFacadeNative."""
-    pass
-
-class AssistantFacadeNative(object):
-    """Facade to access the assistant-related functionality.
-
-    The methods inside this class only accept Python native types.
-
-    """
-    def __init__(self, resource):
-        self._resource = resource
-
-
-    def restart_chrome_for_assistant(self, enable_dsp_hotword=True):
-        """Restarts Chrome with Google assistant enabled.
-
-        @param enable_dsp_hotword: A bool to control the usage of dsp for
-                hotword.
-        """
-        # TODO (paulhsia): Remove this when voice command is ready for non
-        # gaia_login environment.
-        cred = assistant_util_private.get_login_credential()
-        custom_chrome_setup = {
-                "autotest_ext": True,
-                "gaia_login": True,
-                "enable_assistant": True,
-                "username": cred.username,
-                "password": cred.password,
-        }
-
-        if enable_dsp_hotword:
-            custom_chrome_setup["extra_browser_args"] = (
-                ["--enable-features=EnableDspHotword"])
-        self._resource.start_custom_chrome(custom_chrome_setup)
-
-
-    def send_text_query(self, text):
-        """Sends text query to Google assistant and gets response.
-
-        @param text: A str object for text qeury.
-
-        @returns: A str object for query response.
-        """
-        ext = self._resource.get_extension()
-        return assistant_util.send_text_query(ext, text)
-
-
-    def enable_hotword(self):
-        """Enables hotword in Google assistant."""
-        ext = self._resource.get_extension()
-        assistant_util.enable_hotword(ext)
diff --git a/client/cros/multimedia/audio_facade.py b/client/cros/multimedia/audio_facade.py
new file mode 100644
index 0000000..657b5e9
--- /dev/null
+++ b/client/cros/multimedia/audio_facade.py
@@ -0,0 +1,726 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Facade to access the audio-related functionality."""
+
+import functools
+import glob
+import logging
+import numpy as np
+import os
+import tempfile
+
+from autotest_lib.client.cros import constants
+from autotest_lib.client.cros.audio import audio_helper
+from autotest_lib.client.cros.audio import cmd_utils
+from autotest_lib.client.cros.audio import cras_dbus_utils
+from autotest_lib.client.cros.audio import cras_utils
+from autotest_lib.client.cros.audio import alsa_utils
+from autotest_lib.client.cros.multimedia import audio_extension_handler
+
+
+class AudioFacadeLocalError(Exception):
+    """Error in AudioFacadeLocal."""
+    pass
+
+
+def check_arc_resource(func):
+    """Decorator function for ARC related functions in AudioFacadeLocal."""
+    @functools.wraps(func)
+    def wrapper(instance, *args, **kwargs):
+        """Wrapper for the methods to check _arc_resource.
+
+        @param instance: Object instance.
+
+        @raises: AudioFacadeLocalError if there is no ARC resource.
+
+        """
+        if not instance._arc_resource:
+            raise AudioFacadeLocalError('There is no ARC resource.')
+        return func(instance, *args, **kwargs)
+    return wrapper
+
+
+def file_contains_all_zeros(path):
+    """Reads a file and checks whether the file contains all zeros."""
+    with open(path, 'rb') as f:
+        binary = f.read()
+        # Assume data is in 16 bit signed int format. The real format
+        # does not matter though since we only care if there is nonzero data.
+        np_array = np.fromstring(binary, dtype='<i2')
+        return not np.any(np_array)
+
+
+class AudioFacadeLocal(object):
+    """Facede to access the audio-related functionality.
+
+    The methods inside this class only accept Python native types.
+
+    """
+    _CAPTURE_DATA_FORMATS = [
+            dict(file_type='raw', sample_format='S16_LE',
+                 channel=1, rate=48000),
+            dict(file_type='raw', sample_format='S16_LE',
+                 channel=2, rate=48000)]
+
+    _PLAYBACK_DATA_FORMAT = dict(
+            file_type='raw', sample_format='S16_LE', channel=2, rate=48000)
+
+    _LISTEN_DATA_FORMATS = [
+            dict(file_type='raw', sample_format='S16_LE',
+                 channel=1, rate=16000)]
+
+    def __init__(self, resource, arc_resource=None):
+        """Initializes an audio facade.
+
+        @param resource: A FacadeResource object.
+        @param arc_resource: An ArcResource object.
+
+        """
+        self._resource = resource
+        self._listener = None
+        self._recorders = {}
+        self._player = None
+        self._counter = None
+        self._loaded_extension_handler = None
+        self._arc_resource = arc_resource
+
+
+    @property
+    def _extension_handler(self):
+        """Multimedia test extension handler."""
+        if not self._loaded_extension_handler:
+            extension = self._resource.get_extension(
+                    constants.AUDIO_TEST_EXTENSION)
+            logging.debug('Loaded extension: %s', extension)
+            self._loaded_extension_handler = (
+                    audio_extension_handler.AudioExtensionHandler(extension))
+        return self._loaded_extension_handler
+
+
+    def get_audio_availability(self):
+        """Returns the availability of chrome.audio API.
+
+        @returns: True if chrome.audio exists
+        """
+        return self._extension_handler.get_audio_api_availability()
+
+
+    def get_audio_devices(self):
+        """Returns the audio devices from chrome.audio API.
+
+        @returns: Checks docstring of get_audio_devices of AudioExtensionHandler.
+
+        """
+        return self._extension_handler.get_audio_devices()
+
+
+    def set_chrome_active_volume(self, volume):
+        """Sets the active audio output volume using chrome.audio API.
+
+        @param volume: Volume to set (0~100).
+
+        """
+        self._extension_handler.set_active_volume(volume)
+
+
+    def set_chrome_active_input_gain(self, gain):
+        """Sets the active audio input gain using chrome.audio API.
+
+        @param volume: Gain to set (0~100).
+
+        """
+        self._extension_handler.set_active_input_gain(gain)
+
+
+    def set_chrome_mute(self, mute):
+        """Mutes the active audio output using chrome.audio API.
+
+        @param mute: True to mute. False otherwise.
+
+        """
+        self._extension_handler.set_mute(mute)
+
+
+    def get_chrome_active_volume_mute(self):
+        """Gets the volume state of active audio output using chrome.audio API.
+
+        @param returns: A tuple (volume, mute), where volume is 0~100, and mute
+                        is True if node is muted, False otherwise.
+
+        """
+        return self._extension_handler.get_active_volume_mute()
+
+
+    def set_chrome_active_node_type(self, output_node_type, input_node_type):
+        """Sets active node type through chrome.audio API.
+
+        The node types are defined in cras_utils.CRAS_NODE_TYPES.
+        The current active node will be disabled first if the new active node
+        is different from the current one.
+
+        @param output_node_type: A node type defined in
+                                 cras_utils.CRAS_NODE_TYPES. None to skip.
+        @param input_node_type: A node type defined in
+                                 cras_utils.CRAS_NODE_TYPES. None to skip
+
+        """
+        if output_node_type:
+            node_id = cras_utils.get_node_id_from_node_type(
+                    output_node_type, False)
+            self._extension_handler.set_active_node_id(node_id)
+        if input_node_type:
+            node_id = cras_utils.get_node_id_from_node_type(
+                    input_node_type, True)
+            self._extension_handler.set_active_node_id(node_id)
+
+
+    def check_audio_stream_at_selected_device(self):
+        """Checks the audio output is at expected node"""
+        output_device_name = cras_utils.get_selected_output_device_name()
+        output_device_type = cras_utils.get_selected_output_device_type()
+        logging.info("Output device name is %s", output_device_name)
+        logging.info("Output device type is %s", output_device_type)
+        alsa_utils.check_audio_stream_at_selected_device(output_device_name,
+                                                         output_device_type)
+
+
+    def cleanup(self):
+        """Clean up the temporary files."""
+        for path in glob.glob('/tmp/playback_*'):
+            os.unlink(path)
+
+        for path in glob.glob('/tmp/capture_*'):
+            os.unlink(path)
+
+        for path in glob.glob('/tmp/listen_*'):
+            os.unlink(path)
+
+        if self._recorders:
+            for _, recorder in self._recorders:
+                recorder.cleanup()
+        self._recorders.clear()
+
+        if self._player:
+            self._player.cleanup()
+        if self._listener:
+            self._listener.cleanup()
+
+        if self._arc_resource:
+            self._arc_resource.cleanup()
+
+
+    def playback(self, file_path, data_format, blocking=False, node_type=None,
+                 block_size=None):
+        """Playback a file.
+
+        @param file_path: The path to the file.
+        @param data_format: A dict containing data format including
+                            file_type, sample_format, channel, and rate.
+                            file_type: file type e.g. 'raw' or 'wav'.
+                            sample_format: One of the keys in
+                                           audio_data.SAMPLE_FORMAT.
+                            channel: number of channels.
+                            rate: sampling rate.
+        @param blocking: Blocks this call until playback finishes.
+        @param node_type: A Cras node type defined in cras_utils.CRAS_NODE_TYPES
+                          that we like to pin at. None to have the playback on
+                          active selected device.
+        @param block_size: The number for frames per callback.
+
+        @returns: True.
+
+        @raises: AudioFacadeLocalError if data format is not supported.
+
+        """
+        logging.info('AudioFacadeLocal playback file: %r. format: %r',
+                     file_path, data_format)
+
+        if data_format != self._PLAYBACK_DATA_FORMAT:
+            raise AudioFacadeLocalError(
+                    'data format %r is not supported' % data_format)
+
+        device_id = None
+        if node_type:
+            device_id = int(cras_utils.get_device_id_from_node_type(
+                    node_type, False))
+
+        self._player = Player()
+        self._player.start(file_path, blocking, device_id, block_size)
+
+        return True
+
+
+    def stop_playback(self):
+        """Stops playback process."""
+        self._player.stop()
+
+
+    def start_recording(self, data_format, node_type=None, block_size=None):
+        """Starts recording an audio file.
+
+        Currently the format specified in _CAPTURE_DATA_FORMATS is the only
+        formats.
+
+        @param data_format: A dict containing:
+                            file_type: 'raw'.
+                            sample_format: 'S16_LE' for 16-bit signed integer in
+                                           little-endian.
+                            channel: channel number.
+                            rate: sampling rate.
+        @param node_type: A Cras node type defined in cras_utils.CRAS_NODE_TYPES
+                          that we like to pin at. None to have the recording
+                          from active selected device.
+        @param block_size: The number for frames per callback.
+
+        @returns: True
+
+        @raises: AudioFacadeLocalError if data format is not supported, no
+                 active selected node or the specified node is occupied.
+
+        """
+        logging.info('AudioFacadeLocal record format: %r', data_format)
+
+        if data_format not in self._CAPTURE_DATA_FORMATS:
+            raise AudioFacadeLocalError(
+                    'data format %r is not supported' % data_format)
+
+        if node_type is None:
+            device_id = None
+            node_type = cras_utils.get_selected_input_device_type()
+            if node_type is None:
+                raise AudioFacadeLocalError('No active selected input node.')
+        else:
+            device_id = int(cras_utils.get_device_id_from_node_type(
+                    node_type, True))
+
+        if node_type in self._recorders:
+            raise AudioFacadeLocalError(
+                    'Node %s is already ocuppied' % node_type)
+
+        self._recorders[node_type] = Recorder()
+        self._recorders[node_type].start(data_format, device_id, block_size)
+
+        return True
+
+
+    def stop_recording(self, node_type=None):
+        """Stops recording an audio file.
+        @param node_type: A Cras node type defined in cras_utils.CRAS_NODE_TYPES
+                          that we like to pin at. None to have the recording
+                          from active selected device.
+
+        @returns: The path to the recorded file.
+                  None if capture device is not functional.
+
+        @raises: AudioFacadeLocalError if no recording is started on
+                 corresponding node.
+        """
+        if node_type is None:
+            device_id = None
+            node_type = cras_utils.get_selected_input_device_type()
+            if node_type is None:
+                raise AudioFacadeLocalError('No active selected input node.')
+        else:
+            device_id = int(cras_utils.get_device_id_from_node_type(
+                    node_type, True))
+
+
+        if node_type not in self._recorders:
+            raise AudioFacadeLocalError(
+                    'No recording is started on node %s' % node_type)
+
+        recorder = self._recorders[node_type]
+        recorder.stop()
+        del self._recorders[node_type]
+
+        file_path = recorder.file_path
+        if file_contains_all_zeros(recorder.file_path):
+            logging.error('Recorded file contains all zeros. '
+                          'Capture device is not functional')
+            return None
+
+        return file_path
+
+
+    def start_listening(self, data_format):
+        """Starts listening to hotword for a given format.
+
+        Currently the format specified in _CAPTURE_DATA_FORMATS is the only
+        formats.
+
+        @param data_format: A dict containing:
+                            file_type: 'raw'.
+                            sample_format: 'S16_LE' for 16-bit signed integer in
+                                           little-endian.
+                            channel: channel number.
+                            rate: sampling rate.
+
+
+        @returns: True
+
+        @raises: AudioFacadeLocalError if data format is not supported.
+
+        """
+        logging.info('AudioFacadeLocal record format: %r', data_format)
+
+        if data_format not in self._LISTEN_DATA_FORMATS:
+            raise AudioFacadeLocalError(
+                    'data format %r is not supported' % data_format)
+
+        self._listener = Listener()
+        self._listener.start(data_format)
+
+        return True
+
+
+    def stop_listening(self):
+        """Stops listening to hotword.
+
+        @returns: The path to the recorded file.
+                  None if hotwording is not functional.
+
+        """
+        self._listener.stop()
+        if file_contains_all_zeros(self._listener.file_path):
+            logging.error('Recorded file contains all zeros. '
+                          'Hotwording device is not functional')
+            return None
+        return self._listener.file_path
+
+
+    def set_selected_output_volume(self, volume):
+        """Sets the selected output volume.
+
+        @param volume: the volume to be set(0-100).
+
+        """
+        cras_utils.set_selected_output_node_volume(volume)
+
+
+    def set_selected_node_types(self, output_node_types, input_node_types):
+        """Set selected node types.
+
+        The node types are defined in cras_utils.CRAS_NODE_TYPES.
+
+        @param output_node_types: A list of output node types.
+                                  None to skip setting.
+        @param input_node_types: A list of input node types.
+                                 None to skip setting.
+
+        """
+        cras_utils.set_selected_node_types(output_node_types, input_node_types)
+
+
+    def get_selected_node_types(self):
+        """Gets the selected output and input node types.
+
+        @returns: A tuple (output_node_types, input_node_types) where each
+                  field is a list of selected node types defined in
+                  cras_utils.CRAS_NODE_TYPES.
+
+        """
+        return cras_utils.get_selected_node_types()
+
+
+    def get_plugged_node_types(self):
+        """Gets the plugged output and input node types.
+
+        @returns: A tuple (output_node_types, input_node_types) where each
+                  field is a list of plugged node types defined in
+                  cras_utils.CRAS_NODE_TYPES.
+
+        """
+        return cras_utils.get_plugged_node_types()
+
+
+    def dump_diagnostics(self, file_path):
+        """Dumps audio diagnostics results to a file.
+
+        @param file_path: The path to dump results.
+
+        """
+        audio_helper.dump_audio_diagnostics(file_path)
+
+
+    def start_counting_signal(self, signal_name):
+        """Starts counting DBus signal from Cras.
+
+        @param signal_name: Signal of interest.
+
+        """
+        if self._counter:
+            raise AudioFacadeLocalError('There is an ongoing counting.')
+        self._counter = cras_dbus_utils.CrasDBusBackgroundSignalCounter()
+        self._counter.start(signal_name)
+
+
+    def stop_counting_signal(self):
+        """Stops counting DBus signal from Cras.
+
+        @returns: Number of signals starting from last start_counting_signal
+                  call.
+
+        """
+        if not self._counter:
+            raise AudioFacadeLocalError('Should start counting signal first')
+        result = self._counter.stop()
+        self._counter = None
+        return result
+
+
+    def wait_for_unexpected_nodes_changed(self, timeout_secs):
+        """Waits for unexpected nodes changed signal.
+
+        @param timeout_secs: Timeout in seconds for waiting.
+
+        """
+        cras_dbus_utils.wait_for_unexpected_nodes_changed(timeout_secs)
+
+
+    def get_noise_cancellation_supported(self):
+        """Gets whether the device supports Noise Cancellation.
+
+        @returns: True is supported; False otherwise.
+
+        """
+        return cras_utils.get_noise_cancellation_supported()
+
+
+    def set_bypass_block_noise_cancellation(self, bypass):
+        """Sets CRAS to bypass the blocking logic of Noise Cancellation.
+
+        @param bypass: True for bypass; False for un-bypass.
+
+        """
+        cras_utils.set_bypass_block_noise_cancellation(bypass)
+
+
+    def set_noise_cancellation_enabled(self, enabled):
+        """Sets the state to enable or disable Noise Cancellation.
+
+        @param enabled: True to enable; False to disable.
+
+        """
+        cras_utils.set_noise_cancellation_enabled(enabled)
+
+    @check_arc_resource
+    def start_arc_recording(self):
+        """Starts recording using microphone app in container."""
+        self._arc_resource.microphone.start_microphone_app()
+
+
+    @check_arc_resource
+    def stop_arc_recording(self):
+        """Checks the recording is stopped and gets the recorded path.
+
+        The recording duration of microphone app is fixed, so this method just
+        copies the recorded result from container to a path on Cros device.
+
+        """
+        _, file_path = tempfile.mkstemp(prefix='capture_', suffix='.amr-nb')
+        self._arc_resource.microphone.stop_microphone_app(file_path)
+        return file_path
+
+
+    @check_arc_resource
+    def set_arc_playback_file(self, file_path):
+        """Copies the audio file to be played into container.
+
+        User should call this method to put the file into container before
+        calling start_arc_playback.
+
+        @param file_path: Path to the file to be played on Cros host.
+
+        @returns: Path to the file in container.
+
+        """
+        return self._arc_resource.play_music.set_playback_file(file_path)
+
+
+    @check_arc_resource
+    def start_arc_playback(self, path):
+        """Start playback through Play Music app.
+
+        Before calling this method, user should call set_arc_playback_file to
+        put the file into container.
+
+        @param path: Path to the file in container.
+
+        """
+        self._arc_resource.play_music.start_playback(path)
+
+
+    @check_arc_resource
+    def stop_arc_playback(self):
+        """Stop playback through Play Music app."""
+        self._arc_resource.play_music.stop_playback()
+
+
+class RecorderError(Exception):
+    """Error in Recorder."""
+    pass
+
+
+class Recorder(object):
+    """The class to control recording subprocess.
+
+    Properties:
+        file_path: The path to recorded file. It should be accessed after
+                   stop() is called.
+
+    """
+    def __init__(self):
+        """Initializes a Recorder."""
+        _, self.file_path = tempfile.mkstemp(prefix='capture_', suffix='.raw')
+        self._capture_subprocess = None
+
+
+    def start(self, data_format, pin_device, block_size):
+        """Starts recording.
+
+        Starts recording subprocess. It can be stopped by calling stop().
+
+        @param data_format: A dict containing:
+                            file_type: 'raw'.
+                            sample_format: 'S16_LE' for 16-bit signed integer in
+                                           little-endian.
+                            channel: channel number.
+                            rate: sampling rate.
+        @param pin_device: A integer of device id to record from.
+        @param block_size: The number for frames per callback.
+        """
+        self._capture_subprocess = cmd_utils.popen(
+                cras_utils.capture_cmd(
+                        capture_file=self.file_path, duration=None,
+                        channels=data_format['channel'],
+                        rate=data_format['rate'],
+                        pin_device=pin_device, block_size=block_size))
+
+
+    def stop(self):
+        """Stops recording subprocess."""
+        if self._capture_subprocess.poll() is None:
+            self._capture_subprocess.terminate()
+        else:
+            raise RecorderError(
+                    'Recording process was terminated unexpectedly.')
+
+
+    def cleanup(self):
+        """Cleanup the resources.
+
+        Terminates the recording process if needed.
+
+        """
+        if self._capture_subprocess and self._capture_subprocess.poll() is None:
+            self._capture_subprocess.terminate()
+
+
+class PlayerError(Exception):
+    """Error in Player."""
+    pass
+
+
+class Player(object):
+    """The class to control audio playback subprocess.
+
+    Properties:
+        file_path: The path to the file to play.
+
+    """
+    def __init__(self):
+        """Initializes a Player."""
+        self._playback_subprocess = None
+
+
+    def start(self, file_path, blocking, pin_device, block_size):
+        """Starts playing.
+
+        Starts playing subprocess. It can be stopped by calling stop().
+
+        @param file_path: The path to the file.
+        @param blocking: Blocks this call until playback finishes.
+        @param pin_device: A integer of device id to play on.
+        @param block_size: The number for frames per callback.
+
+        """
+        self._playback_subprocess = cras_utils.playback(
+                blocking, playback_file=file_path, pin_device=pin_device,
+                block_size=block_size)
+
+
+    def stop(self):
+        """Stops playback subprocess."""
+        cmd_utils.kill_or_log_returncode(self._playback_subprocess)
+
+
+    def cleanup(self):
+        """Cleanup the resources.
+
+        Terminates the playback process if needed.
+
+        """
+        self.stop()
+
+
+class ListenerError(Exception):
+    """Error in Listener."""
+    pass
+
+
+class Listener(object):
+    """The class to control listening subprocess.
+
+    Properties:
+        file_path: The path to recorded file. It should be accessed after
+                   stop() is called.
+
+    """
+    def __init__(self):
+        """Initializes a Listener."""
+        _, self.file_path = tempfile.mkstemp(prefix='listen_', suffix='.raw')
+        self._capture_subprocess = None
+
+
+    def start(self, data_format):
+        """Starts listening.
+
+        Starts listening subprocess. It can be stopped by calling stop().
+
+        @param data_format: A dict containing:
+                            file_type: 'raw'.
+                            sample_format: 'S16_LE' for 16-bit signed integer in
+                                           little-endian.
+                            channel: channel number.
+                            rate: sampling rate.
+
+        @raises: ListenerError: If listening subprocess is terminated
+                 unexpectedly.
+
+        """
+        self._capture_subprocess = cmd_utils.popen(
+                cras_utils.listen_cmd(
+                        capture_file=self.file_path, duration=None,
+                        channels=data_format['channel'],
+                        rate=data_format['rate']))
+
+
+    def stop(self):
+        """Stops listening subprocess."""
+        if self._capture_subprocess.poll() is None:
+            self._capture_subprocess.terminate()
+        else:
+            raise ListenerError(
+                    'Listening process was terminated unexpectedly.')
+
+
+    def cleanup(self):
+        """Cleanup the resources.
+
+        Terminates the listening process if needed.
+
+        """
+        if self._capture_subprocess and self._capture_subprocess.poll() is None:
+            self._capture_subprocess.terminate()
diff --git a/client/cros/multimedia/audio_facade_native.py b/client/cros/multimedia/audio_facade_native.py
deleted file mode 100644
index 815c151..0000000
--- a/client/cros/multimedia/audio_facade_native.py
+++ /dev/null
@@ -1,700 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Facade to access the audio-related functionality."""
-
-import functools
-import glob
-import logging
-import numpy as np
-import os
-import tempfile
-
-from autotest_lib.client.cros import constants
-from autotest_lib.client.cros.audio import audio_helper
-from autotest_lib.client.cros.audio import cmd_utils
-from autotest_lib.client.cros.audio import cras_dbus_utils
-from autotest_lib.client.cros.audio import cras_utils
-from autotest_lib.client.cros.audio import alsa_utils
-from autotest_lib.client.cros.multimedia import audio_extension_handler
-
-
-class AudioFacadeNativeError(Exception):
-    """Error in AudioFacadeNative."""
-    pass
-
-
-def check_arc_resource(func):
-    """Decorator function for ARC related functions in AudioFacadeNative."""
-    @functools.wraps(func)
-    def wrapper(instance, *args, **kwargs):
-        """Wrapper for the methods to check _arc_resource.
-
-        @param instance: Object instance.
-
-        @raises: AudioFacadeNativeError if there is no ARC resource.
-
-        """
-        if not instance._arc_resource:
-            raise AudioFacadeNativeError('There is no ARC resource.')
-        return func(instance, *args, **kwargs)
-    return wrapper
-
-
-def file_contains_all_zeros(path):
-    """Reads a file and checks whether the file contains all zeros."""
-    with open(path) as f:
-        binary = f.read()
-        # Assume data is in 16 bit signed int format. The real format
-        # does not matter though since we only care if there is nonzero data.
-        np_array = np.fromstring(binary, dtype='<i2')
-        return not np.any(np_array)
-
-
-class AudioFacadeNative(object):
-    """Facede to access the audio-related functionality.
-
-    The methods inside this class only accept Python native types.
-
-    """
-    _CAPTURE_DATA_FORMATS = [
-            dict(file_type='raw', sample_format='S16_LE',
-                 channel=1, rate=48000),
-            dict(file_type='raw', sample_format='S16_LE',
-                 channel=2, rate=48000)]
-
-    _PLAYBACK_DATA_FORMAT = dict(
-            file_type='raw', sample_format='S16_LE', channel=2, rate=48000)
-
-    _LISTEN_DATA_FORMATS = [
-            dict(file_type='raw', sample_format='S16_LE',
-                 channel=1, rate=16000)]
-
-    def __init__(self, resource, arc_resource=None):
-        """Initializes an audio facade.
-
-        @param resource: A FacadeResource object.
-        @param arc_resource: An ArcResource object.
-
-        """
-        self._resource = resource
-        self._listener = None
-        self._recorders = {}
-        self._player = None
-        self._counter = None
-        self._loaded_extension_handler = None
-        self._arc_resource = arc_resource
-
-
-    @property
-    def _extension_handler(self):
-        """Multimedia test extension handler."""
-        if not self._loaded_extension_handler:
-            extension = self._resource.get_extension(
-                    constants.AUDIO_TEST_EXTENSION)
-            logging.debug('Loaded extension: %s', extension)
-            self._loaded_extension_handler = (
-                    audio_extension_handler.AudioExtensionHandler(extension))
-        return self._loaded_extension_handler
-
-
-    def get_audio_availability(self):
-        """Returns the availability of chrome.audio API.
-
-        @returns: True if chrome.audio exists
-        """
-        return self._extension_handler.get_audio_api_availability()
-
-
-    def get_audio_devices(self):
-        """Returns the audio devices from chrome.audio API.
-
-        @returns: Checks docstring of get_audio_devices of AudioExtensionHandler.
-
-        """
-        return self._extension_handler.get_audio_devices()
-
-
-    def set_chrome_active_volume(self, volume):
-        """Sets the active audio output volume using chrome.audio API.
-
-        @param volume: Volume to set (0~100).
-
-        """
-        self._extension_handler.set_active_volume(volume)
-
-
-    def set_chrome_active_input_gain(self, gain):
-        """Sets the active audio input gain using chrome.audio API.
-
-        @param volume: Gain to set (0~100).
-
-        """
-        self._extension_handler.set_active_input_gain(gain)
-
-
-    def set_chrome_mute(self, mute):
-        """Mutes the active audio output using chrome.audio API.
-
-        @param mute: True to mute. False otherwise.
-
-        """
-        self._extension_handler.set_mute(mute)
-
-
-    def get_chrome_active_volume_mute(self):
-        """Gets the volume state of active audio output using chrome.audio API.
-
-        @param returns: A tuple (volume, mute), where volume is 0~100, and mute
-                        is True if node is muted, False otherwise.
-
-        """
-        return self._extension_handler.get_active_volume_mute()
-
-
-    def set_chrome_active_node_type(self, output_node_type, input_node_type):
-        """Sets active node type through chrome.audio API.
-
-        The node types are defined in cras_utils.CRAS_NODE_TYPES.
-        The current active node will be disabled first if the new active node
-        is different from the current one.
-
-        @param output_node_type: A node type defined in
-                                 cras_utils.CRAS_NODE_TYPES. None to skip.
-        @param input_node_type: A node type defined in
-                                 cras_utils.CRAS_NODE_TYPES. None to skip
-
-        """
-        if output_node_type:
-            node_id = cras_utils.get_node_id_from_node_type(
-                    output_node_type, False)
-            self._extension_handler.set_active_node_id(node_id)
-        if input_node_type:
-            node_id = cras_utils.get_node_id_from_node_type(
-                    input_node_type, True)
-            self._extension_handler.set_active_node_id(node_id)
-
-
-    def check_audio_stream_at_selected_device(self):
-        """Checks the audio output is at expected node"""
-        output_device_name = cras_utils.get_selected_output_device_name()
-        output_device_type = cras_utils.get_selected_output_device_type()
-        logging.info("Output device name is %s", output_device_name)
-        logging.info("Output device type is %s", output_device_type)
-        alsa_utils.check_audio_stream_at_selected_device(output_device_name,
-                                                         output_device_type)
-
-
-    def cleanup(self):
-        """Clean up the temporary files."""
-        for path in glob.glob('/tmp/playback_*'):
-            os.unlink(path)
-
-        for path in glob.glob('/tmp/capture_*'):
-            os.unlink(path)
-
-        for path in glob.glob('/tmp/listen_*'):
-            os.unlink(path)
-
-        if self._recorders:
-            for _, recorder in self._recorders:
-                recorder.cleanup()
-        self._recorders.clear()
-
-        if self._player:
-            self._player.cleanup()
-        if self._listener:
-            self._listener.cleanup()
-
-        if self._arc_resource:
-            self._arc_resource.cleanup()
-
-
-    def playback(self, file_path, data_format, blocking=False, node_type=None,
-                 block_size=None):
-        """Playback a file.
-
-        @param file_path: The path to the file.
-        @param data_format: A dict containing data format including
-                            file_type, sample_format, channel, and rate.
-                            file_type: file type e.g. 'raw' or 'wav'.
-                            sample_format: One of the keys in
-                                           audio_data.SAMPLE_FORMAT.
-                            channel: number of channels.
-                            rate: sampling rate.
-        @param blocking: Blocks this call until playback finishes.
-        @param node_type: A Cras node type defined in cras_utils.CRAS_NODE_TYPES
-                          that we like to pin at. None to have the playback on
-                          active selected device.
-        @param block_size: The number for frames per callback.
-
-        @returns: True.
-
-        @raises: AudioFacadeNativeError if data format is not supported.
-
-        """
-        logging.info('AudioFacadeNative playback file: %r. format: %r',
-                     file_path, data_format)
-
-        if data_format != self._PLAYBACK_DATA_FORMAT:
-            raise AudioFacadeNativeError(
-                    'data format %r is not supported' % data_format)
-
-        device_id = None
-        if node_type:
-            device_id = int(cras_utils.get_device_id_from_node_type(
-                    node_type, False))
-
-        self._player = Player()
-        self._player.start(file_path, blocking, device_id, block_size)
-
-        return True
-
-
-    def stop_playback(self):
-        """Stops playback process."""
-        self._player.stop()
-
-
-    def start_recording(self, data_format, node_type=None, block_size=None):
-        """Starts recording an audio file.
-
-        Currently the format specified in _CAPTURE_DATA_FORMATS is the only
-        formats.
-
-        @param data_format: A dict containing:
-                            file_type: 'raw'.
-                            sample_format: 'S16_LE' for 16-bit signed integer in
-                                           little-endian.
-                            channel: channel number.
-                            rate: sampling rate.
-        @param node_type: A Cras node type defined in cras_utils.CRAS_NODE_TYPES
-                          that we like to pin at. None to have the recording
-                          from active selected device.
-        @param block_size: The number for frames per callback.
-
-        @returns: True
-
-        @raises: AudioFacadeNativeError if data format is not supported, no
-                 active selected node or the specified node is occupied.
-
-        """
-        logging.info('AudioFacadeNative record format: %r', data_format)
-
-        if data_format not in self._CAPTURE_DATA_FORMATS:
-            raise AudioFacadeNativeError(
-                    'data format %r is not supported' % data_format)
-
-        if node_type is None:
-            device_id = None
-            node_type = cras_utils.get_selected_input_device_type()
-            if node_type is None:
-                raise AudioFacadeNativeError('No active selected input node.')
-        else:
-            device_id = int(cras_utils.get_device_id_from_node_type(
-                    node_type, True))
-
-        if node_type in self._recorders:
-            raise AudioFacadeNativeError(
-                    'Node %s is already ocuppied' % node_type)
-
-        self._recorders[node_type] = Recorder()
-        self._recorders[node_type].start(data_format, device_id, block_size)
-
-        return True
-
-
-    def stop_recording(self, node_type=None):
-        """Stops recording an audio file.
-        @param node_type: A Cras node type defined in cras_utils.CRAS_NODE_TYPES
-                          that we like to pin at. None to have the recording
-                          from active selected device.
-
-        @returns: The path to the recorded file.
-                  None if capture device is not functional.
-
-        @raises: AudioFacadeNativeError if no recording is started on
-                 corresponding node.
-        """
-        if node_type is None:
-            device_id = None
-            node_type = cras_utils.get_selected_input_device_type()
-            if node_type is None:
-                raise AudioFacadeNativeError('No active selected input node.')
-        else:
-            device_id = int(cras_utils.get_device_id_from_node_type(
-                    node_type, True))
-
-
-        if node_type not in self._recorders:
-            raise AudioFacadeNativeError(
-                    'No recording is started on node %s' % node_type)
-
-        recorder = self._recorders[node_type]
-        recorder.stop()
-        del self._recorders[node_type]
-
-        file_path = recorder.file_path
-        if file_contains_all_zeros(recorder.file_path):
-            logging.error('Recorded file contains all zeros. '
-                          'Capture device is not functional')
-            return None
-
-        return file_path
-
-
-    def start_listening(self, data_format):
-        """Starts listening to hotword for a given format.
-
-        Currently the format specified in _CAPTURE_DATA_FORMATS is the only
-        formats.
-
-        @param data_format: A dict containing:
-                            file_type: 'raw'.
-                            sample_format: 'S16_LE' for 16-bit signed integer in
-                                           little-endian.
-                            channel: channel number.
-                            rate: sampling rate.
-
-
-        @returns: True
-
-        @raises: AudioFacadeNativeError if data format is not supported.
-
-        """
-        logging.info('AudioFacadeNative record format: %r', data_format)
-
-        if data_format not in self._LISTEN_DATA_FORMATS:
-            raise AudioFacadeNativeError(
-                    'data format %r is not supported' % data_format)
-
-        self._listener = Listener()
-        self._listener.start(data_format)
-
-        return True
-
-
-    def stop_listening(self):
-        """Stops listening to hotword.
-
-        @returns: The path to the recorded file.
-                  None if hotwording is not functional.
-
-        """
-        self._listener.stop()
-        if file_contains_all_zeros(self._listener.file_path):
-            logging.error('Recorded file contains all zeros. '
-                          'Hotwording device is not functional')
-            return None
-        return self._listener.file_path
-
-
-    def set_selected_output_volume(self, volume):
-        """Sets the selected output volume.
-
-        @param volume: the volume to be set(0-100).
-
-        """
-        cras_utils.set_selected_output_node_volume(volume)
-
-
-    def set_selected_node_types(self, output_node_types, input_node_types):
-        """Set selected node types.
-
-        The node types are defined in cras_utils.CRAS_NODE_TYPES.
-
-        @param output_node_types: A list of output node types.
-                                  None to skip setting.
-        @param input_node_types: A list of input node types.
-                                 None to skip setting.
-
-        """
-        cras_utils.set_selected_node_types(output_node_types, input_node_types)
-
-
-    def get_selected_node_types(self):
-        """Gets the selected output and input node types.
-
-        @returns: A tuple (output_node_types, input_node_types) where each
-                  field is a list of selected node types defined in
-                  cras_utils.CRAS_NODE_TYPES.
-
-        """
-        return cras_utils.get_selected_node_types()
-
-
-    def get_plugged_node_types(self):
-        """Gets the plugged output and input node types.
-
-        @returns: A tuple (output_node_types, input_node_types) where each
-                  field is a list of plugged node types defined in
-                  cras_utils.CRAS_NODE_TYPES.
-
-        """
-        return cras_utils.get_plugged_node_types()
-
-
-    def dump_diagnostics(self, file_path):
-        """Dumps audio diagnostics results to a file.
-
-        @param file_path: The path to dump results.
-
-        """
-        audio_helper.dump_audio_diagnostics(file_path)
-
-
-    def start_counting_signal(self, signal_name):
-        """Starts counting DBus signal from Cras.
-
-        @param signal_name: Signal of interest.
-
-        """
-        if self._counter:
-            raise AudioFacadeNativeError('There is an ongoing counting.')
-        self._counter = cras_dbus_utils.CrasDBusBackgroundSignalCounter()
-        self._counter.start(signal_name)
-
-
-    def stop_counting_signal(self):
-        """Stops counting DBus signal from Cras.
-
-        @returns: Number of signals starting from last start_counting_signal
-                  call.
-
-        """
-        if not self._counter:
-            raise AudioFacadeNativeError('Should start counting signal first')
-        result = self._counter.stop()
-        self._counter = None
-        return result
-
-
-    def wait_for_unexpected_nodes_changed(self, timeout_secs):
-        """Waits for unexpected nodes changed signal.
-
-        @param timeout_secs: Timeout in seconds for waiting.
-
-        """
-        cras_dbus_utils.wait_for_unexpected_nodes_changed(timeout_secs)
-
-
-    @check_arc_resource
-    def start_arc_recording(self):
-        """Starts recording using microphone app in container."""
-        self._arc_resource.microphone.start_microphone_app()
-
-
-    @check_arc_resource
-    def stop_arc_recording(self):
-        """Checks the recording is stopped and gets the recorded path.
-
-        The recording duration of microphone app is fixed, so this method just
-        copies the recorded result from container to a path on Cros device.
-
-        """
-        _, file_path = tempfile.mkstemp(prefix='capture_', suffix='.amr-nb')
-        self._arc_resource.microphone.stop_microphone_app(file_path)
-        return file_path
-
-
-    @check_arc_resource
-    def set_arc_playback_file(self, file_path):
-        """Copies the audio file to be played into container.
-
-        User should call this method to put the file into container before
-        calling start_arc_playback.
-
-        @param file_path: Path to the file to be played on Cros host.
-
-        @returns: Path to the file in container.
-
-        """
-        return self._arc_resource.play_music.set_playback_file(file_path)
-
-
-    @check_arc_resource
-    def start_arc_playback(self, path):
-        """Start playback through Play Music app.
-
-        Before calling this method, user should call set_arc_playback_file to
-        put the file into container.
-
-        @param path: Path to the file in container.
-
-        """
-        self._arc_resource.play_music.start_playback(path)
-
-
-    @check_arc_resource
-    def stop_arc_playback(self):
-        """Stop playback through Play Music app."""
-        self._arc_resource.play_music.stop_playback()
-
-
-class RecorderError(Exception):
-    """Error in Recorder."""
-    pass
-
-
-class Recorder(object):
-    """The class to control recording subprocess.
-
-    Properties:
-        file_path: The path to recorded file. It should be accessed after
-                   stop() is called.
-
-    """
-    def __init__(self):
-        """Initializes a Recorder."""
-        _, self.file_path = tempfile.mkstemp(prefix='capture_', suffix='.raw')
-        self._capture_subprocess = None
-
-
-    def start(self, data_format, pin_device, block_size):
-        """Starts recording.
-
-        Starts recording subprocess. It can be stopped by calling stop().
-
-        @param data_format: A dict containing:
-                            file_type: 'raw'.
-                            sample_format: 'S16_LE' for 16-bit signed integer in
-                                           little-endian.
-                            channel: channel number.
-                            rate: sampling rate.
-        @param pin_device: A integer of device id to record from.
-        @param block_size: The number for frames per callback.
-        """
-        self._capture_subprocess = cmd_utils.popen(
-                cras_utils.capture_cmd(
-                        capture_file=self.file_path, duration=None,
-                        channels=data_format['channel'],
-                        rate=data_format['rate'],
-                        pin_device=pin_device, block_size=block_size))
-
-
-    def stop(self):
-        """Stops recording subprocess."""
-        if self._capture_subprocess.poll() is None:
-            self._capture_subprocess.terminate()
-        else:
-            raise RecorderError(
-                    'Recording process was terminated unexpectedly.')
-
-
-    def cleanup(self):
-        """Cleanup the resources.
-
-        Terminates the recording process if needed.
-
-        """
-        if self._capture_subprocess and self._capture_subprocess.poll() is None:
-            self._capture_subprocess.terminate()
-
-
-class PlayerError(Exception):
-    """Error in Player."""
-    pass
-
-
-class Player(object):
-    """The class to control audio playback subprocess.
-
-    Properties:
-        file_path: The path to the file to play.
-
-    """
-    def __init__(self):
-        """Initializes a Player."""
-        self._playback_subprocess = None
-
-
-    def start(self, file_path, blocking, pin_device, block_size):
-        """Starts playing.
-
-        Starts playing subprocess. It can be stopped by calling stop().
-
-        @param file_path: The path to the file.
-        @param blocking: Blocks this call until playback finishes.
-        @param pin_device: A integer of device id to play on.
-        @param block_size: The number for frames per callback.
-
-        """
-        self._playback_subprocess = cras_utils.playback(
-                blocking, playback_file=file_path, pin_device=pin_device,
-                block_size=block_size)
-
-
-    def stop(self):
-        """Stops playback subprocess."""
-        cmd_utils.kill_or_log_returncode(self._playback_subprocess)
-
-
-    def cleanup(self):
-        """Cleanup the resources.
-
-        Terminates the playback process if needed.
-
-        """
-        self.stop()
-
-
-class ListenerError(Exception):
-    """Error in Listener."""
-    pass
-
-
-class Listener(object):
-    """The class to control listening subprocess.
-
-    Properties:
-        file_path: The path to recorded file. It should be accessed after
-                   stop() is called.
-
-    """
-    def __init__(self):
-        """Initializes a Listener."""
-        _, self.file_path = tempfile.mkstemp(prefix='listen_', suffix='.raw')
-        self._capture_subprocess = None
-
-
-    def start(self, data_format):
-        """Starts listening.
-
-        Starts listening subprocess. It can be stopped by calling stop().
-
-        @param data_format: A dict containing:
-                            file_type: 'raw'.
-                            sample_format: 'S16_LE' for 16-bit signed integer in
-                                           little-endian.
-                            channel: channel number.
-                            rate: sampling rate.
-
-        @raises: ListenerError: If listening subprocess is terminated
-                 unexpectedly.
-
-        """
-        self._capture_subprocess = cmd_utils.popen(
-                cras_utils.listen_cmd(
-                        capture_file=self.file_path, duration=None,
-                        channels=data_format['channel'],
-                        rate=data_format['rate']))
-
-
-    def stop(self):
-        """Stops listening subprocess."""
-        if self._capture_subprocess.poll() is None:
-            self._capture_subprocess.terminate()
-        else:
-            raise ListenerError(
-                    'Listening process was terminated unexpectedly.')
-
-
-    def cleanup(self):
-        """Cleanup the resources.
-
-        Terminates the listening process if needed.
-
-        """
-        if self._capture_subprocess and self._capture_subprocess.poll() is None:
-            self._capture_subprocess.terminate()
diff --git a/client/cros/multimedia/bluetooth_facade.py b/client/cros/multimedia/bluetooth_facade.py
new file mode 100644
index 0000000..a96e849
--- /dev/null
+++ b/client/cros/multimedia/bluetooth_facade.py
@@ -0,0 +1,4748 @@
+# Lint as: python2, python3
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Facade to access the bluetooth-related functionality."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import base64
+import binascii
+import collections
+from datetime import datetime, timedelta
+import glob
+# AU tests use ToT client code, but ToT -3 client version.
+try:
+    from gi.repository import GLib, GObject
+except ImportError:
+    import gobject as GObject
+import json
+import logging
+import logging.handlers
+import os
+
+# TODO(b/215715213) - Wait until ebuild runs as python3 to remove this try
+try:
+    import pydbus
+except Exception as e:
+    import platform
+    logging.error('Unable to import pydbus at version=%s: %s',
+                  platform.python_version(), e)
+    pydbus = {}
+
+import re
+import subprocess
+import functools
+import time
+import threading
+import traceback
+
+import common
+from autotest_lib.client.bin import utils
+from autotest_lib.client.common_lib.cros.bluetooth import bluetooth_socket
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.cros.udev_helpers import UdevadmInfo, UdevadmTrigger
+from autotest_lib.client.cros.audio import (audio_test_data as
+                                            audio_test_data_module)
+from autotest_lib.client.cros.audio import check_quality
+from autotest_lib.client.cros.audio import cras_utils
+from autotest_lib.client.cros.audio.sox_utils import (
+        convert_format, convert_raw_file, get_file_length,
+        trim_silence_from_wav_file)
+from autotest_lib.client.cros.bluetooth import advertisement
+from autotest_lib.client.cros.bluetooth import adv_monitor_helper
+from autotest_lib.client.cros.bluetooth import output_recorder
+from autotest_lib.client.cros.bluetooth import logger_helper
+from autotest_lib.client.cros.bluetooth.floss.adapter_client import (
+        FlossAdapterClient, BluetoothCallbacks, BluetoothConnectionCallbacks,
+        BondState, SspVariant, Transport)
+from autotest_lib.client.cros.bluetooth.floss.manager_client import FlossManagerClient
+from autotest_lib.client.cros.bluetooth.floss.utils import GLIB_THREAD_NAME
+from autotest_lib.client.cros.power import sys_power
+import six
+from six.moves import map
+from six.moves import range
+
+CheckQualityArgsClass = collections.namedtuple(
+        'args_type', ['filename', 'rate', 'channel', 'bit_width'])
+
+
+def _dbus_byte_array_to_b64_string(dbus_byte_array):
+    """Base64 encodes a dbus byte array for use with the xml rpc proxy.
+
+    Input is encoded to bytes using base64 encoding. Then the base64 bytes is
+    decoded as string.
+    """
+    return base64.standard_b64encode(bytearray(dbus_byte_array)).decode()
+
+
+def _b64_string_to_dbus_byte_array(b64_string):
+    """Base64 decodes a dbus byte array for use with the xml rpc proxy."""
+    dbus_array = []
+    bytes = bytearray(base64.standard_b64decode(b64_string))
+    for byte in bytes:
+        dbus_array.append(byte)
+    return dbus_array
+
+
+def dbus_safe(default_return_value, return_error=False):
+    """Catch all DBus exceptions and return a default value instead.
+
+    Wrap a function with a try block that catches DBus exceptions and
+    returns the error with the specified return status. The exception is logged
+    to aid in debugging.
+
+    If |return_error| is set, the call will return a tuple with
+    (default_return_value, str(error)).
+
+    @param default_return_value: What value to return in case of errors.
+    @param return_error: Whether to return the error string as well.
+
+    @return Either the return value from the method call if successful or
+            the |default_return_value| or a tuple(default_return_value,
+            str(error))
+    """
+
+    def decorator(wrapped_function):
+        """Call a function and catch DBus errors.
+
+        @param wrapped_function function to call in dbus safe context.
+        @return function return value or default_return_value on failure.
+
+        """
+
+        @functools.wraps(wrapped_function)
+        def wrapper(*args, **kwargs):
+            """Pass args and kwargs to a dbus safe function.
+
+            @param args formal python arguments.
+            @param kwargs keyword python arguments.
+            @return function return value or default_return_value on failure.
+
+            """
+            logging.debug('%s()', wrapped_function.__name__)
+            try:
+                return wrapped_function(*args, **kwargs)
+            except GLib.Error as e:
+                logging.debug('Exception while performing operation %s: %s',
+                              wrapped_function.__name__, e)
+
+                if return_error:
+                    return (default_return_value, str(e))
+                else:
+                    return default_return_value
+            except Exception as e:
+                logging.debug('Exception in %s: %s', wrapped_function.__name__,
+                              e)
+                logging.debug(traceback.format_exc())
+                raise
+
+        return wrapper
+
+    return decorator
+
+
+def raw_dbus_call_sync(bus,
+                       proxy,
+                       iface,
+                       method,
+                       variant_in_args,
+                       variant_out_type,
+                       timeout_ms=None):
+    """Makes a raw D-Bus call and returns the unpacked result.
+
+    @param bus: System bus object.
+    @param proxy: Proxy object.
+    @param iface: D-Bus interface that exposes this method.
+    @param method: Name of method to call.
+    @param variant_in_args: A Glib.Variant that corresponds to the method's
+                            inputs.
+    @param variant_out_type: A Glib.VariantType that describes the output. This
+                             is the type that will be unpacked from the result.
+    @param timeout_ms: Timeout in milliseconds for this method call.
+
+    @returns: Unpacked result from the method call.
+    """
+    if timeout_ms is None:
+        timeout_ms = GLib.MAXINT
+
+    return bus.con.call_sync(proxy._bus_name, proxy._path, iface, method,
+                             variant_in_args, variant_out_type, 0, timeout_ms,
+                             None).unpack()
+
+
+def unpack_if_variant(value):
+    """If given value is GLib.Variant, unpack it to the actual type."""
+    if isinstance(value, GLib.Variant):
+        return value.unpack()
+
+    return value
+
+
+class UpstartClient:
+    """Upstart D-Bus client that allows actions on upstart targets."""
+
+    UPSTART_MANAGER_SERVICE = 'com.ubuntu.Upstart'
+    UPSTART_MANAGER_PATH = '/com/ubuntu/Upstart'
+    UPSTART_MANAGER_IFACE = 'com.ubuntu.Upstart0_6'
+    UPSTART_JOB_IFACE = 'com.ubuntu.Upstart0_6.Job'
+
+    UPSTART_ERROR_UNKNOWNINSTANCE = (
+            'com.ubuntu.Upstart0_6.Error.UnknownInstance')
+    UPSTART_ERROR_ALREADYSTARTED = (
+            'com.ubuntu.Upstart0_6.Error.AlreadyStarted')
+
+    @classmethod
+    def _get_job(cls, job_name):
+        """Get job by name."""
+        bus = pydbus.SystemBus()
+        obj = bus.get(cls.UPSTART_MANAGER_SERVICE, cls.UPSTART_MANAGER_PATH)
+        job_path = obj[cls.UPSTART_MANAGER_IFACE].GetJobByName(job_name)
+
+        return bus.get(cls.UPSTART_MANAGER_SERVICE,
+                       job_path)[cls.UPSTART_JOB_IFACE]
+
+    @staticmethod
+    def _convert_instance_args(source):
+        """Convert instance args dict to array."""
+        return ['{}={}'.format(k, v) for k, v in source.items()]
+
+    @classmethod
+    def start(cls, job_name, instance_args = {}):
+        """Starts a job.
+
+        @param job_name: Name of upstart job to start.
+        @param instance_args: Instance arguments. Will be converted to array of
+                              "key=value".
+
+        @return True if job start was sent successfully.
+        """
+        try:
+            job = cls._get_job(job_name)
+            converted_args = cls._convert_instance_args(instance_args)
+            job.Start(converted_args, True)
+        except TypeError as t:
+            # Can occur if cls._get_job fails
+            logging.error('Error starting {}: {}'.format(job_name, t))
+            return False
+        except GLib.Error as e:
+            # An already started error is ok. All other dbus errors should
+            # return False.
+            if cls.UPSTART_ERROR_ALREADYSTARTED not in str(e):
+                logging.error('Error starting {}: {}'.format(job_name, e))
+                return False
+
+        return True
+
+    @classmethod
+    def stop(cls, job_name, instance_args = {}):
+        """Stops a job.
+
+        @param job_name: Name of upstart job to stop.
+        @param instance_args: Instance arguments. Will be converted to
+                              array of "key=value".
+
+        @return True if job stop was sent successfully.
+        """
+        try:
+            job = cls._get_job(job_name)
+            converted_args = cls._convert_instance_args(instance_args)
+            job.Stop(converted_args, True)
+        except TypeError as t:
+            # Can occur if cls._get_job fails
+            logging.error('Error stopping {}: {}'.format(job_name, t))
+            return False
+        except GLib.Error as e:
+            # If the job was already stopped, we will see an UnknownInstance
+            # exception. All other failure reasons should be treated as
+            # a failure to stop.
+            if cls.UPSTART_ERROR_UNKNOWNINSTANCE not in str(e):
+                logging.error('Error starting {}: {}'.format(job_name, e))
+                return False
+
+        return True
+
+
+class BluetoothBaseFacadeLocal(object):
+    """Base facade shared by Bluez and Floss daemons. This takes care of any
+    functionality that is common across the two daemons.
+    """
+
+    # Both bluez and floss share the same lib dir for configuration and cache
+    BLUETOOTH_LIBDIR = '/var/lib/bluetooth'
+
+    SYSLOG_LEVELS = [
+            'EMERG', 'ALERT', 'CRIT', 'ERR', 'WARNING', 'NOTICE', 'INFO',
+            'DEBUG'
+    ]
+
+    # How long to wait for hid device
+    HID_TIMEOUT = 15
+    HID_CHECK_SECS = 2
+
+    # Due to problems transferring a date object, we convert to stringtime first
+    # This is the standard format that we will use.
+    OUT_DATE_FORMAT = '%Y-%m-%d %H:%M:%S.%f'
+
+    # Upstart job name for the Floss Manager daemon
+    MANAGER_JOB = "btmanagerd"
+    # File path for btmanagerd
+    BTMANGERD_FILE_PATH = '/usr/bin/btmanagerd'
+    # How long we wait for the manager daemon to come up after we start it
+    DAEMON_TIMEOUT_SEC = 5
+
+    # Upstart job name for ChromeOS Audio daemon
+    CRAS_JOB = "cras"
+
+    CHIPSET_TO_VIDPID = {
+            'MVL-8897': [(('0x02df', '0x912d'), 'SDIO')],
+            'MVL-8997': [(('0x1b4b', '0x2b42'), 'USB')],
+            'QCA-6174A-5-USB': [(('0x168c', '0x003e'), 'USB')],
+            'QCA-6174A-3-UART': [(('0x0271', '0x050a'), 'UART')],
+            'QCA-WCN6856': [(('0x17cb', '0x1103'), 'USB')],
+            'Intel-AX200': [(('0x8086', '0x2723'), 'USB')],  # CcP2
+            'Intel-AX201': [
+                    (('0x8086', '0x02f0'), 'USB'),
+                    (('0x8086', '0x4df0'), 'USB'),
+                    (('0x8086', '0xa0f0'), 'USB'),
+            ],  # HrP2
+            'Intel-AC9260': [(('0x8086', '0x2526'), 'USB')],  # ThP2
+            'Intel-AC9560': [
+                    (('0x8086', '0x31dc'), 'USB'),  # JfP2
+                    (('0x8086', '0x9df0'), 'USB')
+            ],
+            'Intel-AC7260': [
+                    (('0x8086', '0x08b1'), 'USB'),  # WP2
+                    (('0x8086', '0x08b2'), 'USB')
+            ],
+            'Intel-AC7265': [
+                    (('0x8086', '0x095a'), 'USB'),  # StP2
+                    (('0x8086', '0x095b'), 'USB')
+            ],
+            'Realtek-RTL8822C-USB': [(('0x10ec', '0xc822'), 'USB')],
+            'Realtek-RTL8822C-UART': [(('0x10ec', '0xc822'), 'UART')],
+            'Realtek-RTL8852A-USB': [(('0x10ec', '0x8852'), 'USB')],
+            'Mediatek-MTK7921-USB': [(('0x14c3', '0x7961'), 'USB')],
+            'Mediatek-MTK7921-SDIO': [(('0x037a', '0x7901'), 'SDIO')]
+
+            # The following doesn't expose vid:pid
+            # 'WCN3991-UART'
+    }
+
+    def __init__(self):
+        # Initialize a messages object to record general logging.
+        self.messages = logger_helper.LogManager()
+
+        # Set up cras test client for audio tests
+        self._cras_test_client = cras_utils.CrasTestClient()
+
+    def configure_floss(self, enabled):
+        """Start and configure the Floss manager daemon.
+
+        In order to manage whether we use bluez or floss, we need to start the
+        Floss manager daemon and then set floss enabled. This exists in the base
+        implementation because bluez tests will need to start the manager to
+        disable Floss.
+
+        @param enabled: Whether to enable Floss
+
+        @return Whether Floss was configured successfully.
+        """
+        # Start manager daemon or exit early
+        if not UpstartClient.start(self.MANAGER_JOB):
+            return False
+
+        # Since we've just started the manager daemon, we also need to recreate
+        # the client.
+        self.manager_client = FlossManagerClient(self.bus)
+
+        # Wait for the manager daemon to come up
+        try:
+            utils.poll_for_condition(
+                    condition=(lambda: self.manager_client.has_proxy()),
+                    desc='Wait for manager daemon to come up',
+                    sleep_interval=0.5,
+                    timeout=self.DAEMON_TIMEOUT_SEC)
+        except Exception as e:
+            logging.error('timeout: error starting manager daemon: %s', e)
+
+        # We need to observe callbacks for proper operation.
+        if not self.manager_client.register_callbacks():
+            logging.error('manager_client: Failed to register callbacks')
+            return False
+
+        # Floss may not yet be enabled so make sure to enable it here.
+        if self.manager_client.get_floss_enabled() != enabled:
+            self.manager_client.set_floss_enabled(enabled)
+            default_adapter = self.manager_client.get_default_adapter()
+            try:
+                utils.poll_for_condition(
+                        condition=(lambda: self.manager_client.
+                                   get_adapter_enabled(default_adapter
+                                                       ) == enabled),
+                        desc='Wait for set floss enabled to complete',
+                        sleep_interval=0.5,
+                        timeout=self.DAEMON_TIMEOUT_SEC)
+            except Exception as e:
+                logging.error('timeout: error waiting for set_floss_enabled')
+
+        # Also configure cras to enable/disable floss
+        self.configure_cras_floss(enabled)
+
+        return True
+
+    def configure_cras_floss(self, enabled):
+        """Configure whether CRAS has floss enabled."""
+        cras_utils.set_floss_enabled(enabled)
+
+    def _restart_cras(self, enable_floss=False):
+        """Restarts CRAS and sets whether Floss is enabled."""
+        UpstartClient.stop(self.CRAS_JOB)
+        started = UpstartClient.start(self.CRAS_JOB)
+
+        def _set_floss():
+            try:
+                self.configure_cras_floss(enable_floss)
+                return True
+            except:
+                return False
+
+        try:
+            if started:
+                utils.poll_for_condition(
+                        condition=_set_floss,
+                        desc='Wait for CRAS to come up and configure floss',
+                        sleep_interval=1,
+                        timeout=self.DAEMON_TIMEOUT_SEC)
+        except Exception as e:
+            logging.error('timeout: error waiting to set floss on cras')
+            return False
+
+        # Did we successfully start the cras daemon?
+        return started
+
+    def log_message(self, msg):
+        """ log a message to /var/log/messages."""
+        try:
+            cmd = ['logger', msg]
+            subprocess.call(cmd)
+        except Exception as e:
+            logging.error("log_message %s failed with %s", cmd, str(e))
+
+    def messages_start(self):
+        """Start messages monitoring.
+
+        @returns: True if logging started successfully, else False
+        """
+
+        try:
+            self.messages.StartRecording()
+            return True
+
+        except Exception as e:
+            logging.error('Failed to start log recording with error: %s', e)
+
+        return False
+
+    def messages_stop(self):
+        """Stop messages monitoring.
+
+        @returns: True if logs were successfully gathered since logging started,
+                else False
+        """
+        try:
+            self.messages.StopRecording()
+            return True
+
+        except Exception as e:
+            logging.error('Failed to stop log recording with error: %s', e)
+
+        return False
+
+    def messages_find(self, pattern_str):
+        """Find if a pattern string exists in messages output.
+
+        @param pattern_str: the pattern string to find.
+
+        @returns: True on success. False otherwise.
+
+        """
+        return self.messages.LogContains(pattern_str)
+
+    def clean_bluetooth_kernel_log(self, log_level):
+        """Remove Bluetooth kernel logs in /var/log/messages with loglevel
+           equal to or greater than |log_level|
+
+        @param log_level: int in range [0..7]
+        """
+        reg_exp = '[^ ]+ ({LEVEL}) kernel: \[.*\] Bluetooth: .*'.format(
+                LEVEL='|'.join(self.SYSLOG_LEVELS[log_level:]))
+
+        logging.debug('Set kernel filter to level %d', log_level)
+
+        self.messages.FilterOut(reg_exp)
+
+    def _encode_base64_json(self, data):
+        """Base64 encode and json encode the data.
+        Required to handle non-ascii data
+
+        @param data: data to be base64 and JSON encoded
+
+        @return: base64 and JSON encoded data
+
+        """
+        logging.debug('_encode_base64_json raw data is %s', data)
+        b64_encoded = utils.base64_recursive_encode(data)
+        logging.debug('base64 encoded data is %s', b64_encoded)
+        json_encoded = json.dumps(b64_encoded)
+        logging.debug('JSON encoded data is %s', json_encoded)
+        return json_encoded
+
+    def is_wrt_supported(self):
+        """Check if Bluetooth adapter support WRT logs
+
+        WRT is supported on Intel adapters other than (StP2 and WP2)
+
+        @returns : True if adapter is Intel made.
+        """
+        # Dict of Intel Adapters that support WRT and vid:pid
+        vid_pid_dict = {
+                'HrP2': '8086:02f0',
+                'ThP2': '8086:2526',
+                'JfP2': '8086:31dc',
+                'JfP2-2': '8086:9df0'
+        }  # On Sarien/Arcada
+
+        def _get_lspci_vid_pid(output):
+            """ parse output of lspci -knn and get the vid:pid
+
+            output is of the form '01:00.0 Network controller [0280]:
+            \Intel Corporation Device [8086:2526] (rev 29)\n'
+
+            @returns : 'vid:pid' or None
+            """
+            try:
+                for i in output.split(b'\n'):
+                    if 'Network controller' in i.decode('utf-8'):
+                        logging.debug('Got line %s', i)
+                        if 'Intel Corporation' in i.decode('utf-8'):
+                            return i.split(b'[')[2].split(b']')[0]
+                return None
+            except Exception as e:
+                logging.debug('Exception in _get_lspci_vidpid %s', str(e))
+                return None
+
+        try:
+            cmd = ['lspci', '-knn']
+            output = subprocess.check_output(cmd, encoding='UTF-8')
+            vid_pid = _get_lspci_vid_pid(output)
+            logging.debug("got vid_pid %s", vid_pid)
+            if vid_pid is not None:
+                if vid_pid in list(vid_pid_dict.values()):
+                    return True
+        except Exception as e:
+            logging.error('is_intel_adapter  failed with %s', cmd, str(e))
+            return False
+
+    def enable_wrt_logs(self):
+        """ Enable WRT logs for Intel Bluetooth adapters.
+
+            This is applicable only to Intel adapters.
+            Execute a series of custom hciconfig commands to
+            setup WRT log collection
+
+            Precondition :
+                1) Check if the DUT has Intel controller other than StP2
+                2) Make sure the controller is powered on
+        """
+        fw_trace_cmd = (
+                'hcitool cmd 3f 7c 01 10 00 00 00 FE 81 02 80 04 00 00'
+                ' 00 01 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00'
+                ' 00 00 00 00 00 00 00')
+        ddc_read_cmd = 'hcitool cmd 3f 8c 28 01'
+        ddc_write_cmd_prefix = 'hcitool cmd 3f 8b 03 28 01'
+        hw_trace_cmd = (
+                'hcitool cmd 3f 6f 01 08 00 00 00 00 00 00 00 00 01 00'
+                ' 00 03 01 03 03 03 10 03 6A 0A 6A 0A 6A 0A 6A 0A 00 00'
+                ' 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00'
+                ' 00 00 00 00 00 00')
+        multi_comm_trace_str = ('000000F600000000005002000000003F3F3F3'
+                                'F3F003F000000000000000001000000000000000000'
+                                '000000000000000000000000000000000000000000'
+                                '00000000000000000000000000000000000000000'
+                                '00000000000000000')
+        multi_comm_trace_file = ('/sys/kernel/debug/ieee80211'
+                                 '/phy0/iwlwifi/iwlmvm/send_hcmd')
+
+        def _execute_cmd(cmd_str, msg=''):
+            """Wrapper around subprocess.check_output.
+
+            @params cmd: Command to be executed as a string
+            @params msg: Optional description of the command
+
+            @returns: (True, output) if execution succeeded
+                  (False, None) if execution failed
+
+            """
+            try:
+                logging.info('Executing %s cmd', msg)
+                cmd = cmd_str.split(' ')
+                logging.debug('command is "%s"', cmd)
+                output = subprocess.check_output(cmd, enconding='UTF-8')
+                logging.info('%s cmd successfully executed', msg)
+                logging.debug('output is %s', output)
+                return (True, output)
+            except Exception as e:
+                logging.error('Exception %s while executing %s command',
+                              str(e), msg)
+                return (False, None)
+
+        def _get_ddc_write_cmd(ddc_read_result, ddc_write_cmd_prefix):
+            """ Create ddc_write_cmd from read command
+
+           This function performs the following
+           1) Take the output of ddc_read_cmd which is in following form
+              '< HCI Command: ogf 0x3f, ocf 0x008c, plen 1\n
+               01 \n>
+               HCI Event: 0x0e plen 6\n  01 8C FC 12 00 18 \n'
+           2) Take the last value of the output
+              01 8C FC 12 00 ===>> 18 <====
+           3) Bitwise or with 0x40
+              0x18 | 0x40 = 0x58
+           4) Add it to the end of the ddc_write_cmd
+              'hcitool 01 8C FC 00 28 01 ===> 58 <===='
+
+           """
+            last_line = [
+                    i for i in ddc_read_result.strip().split(b'\n') if i != ''
+            ][-1]
+            last_byte = [i for i in last_line.split(b' ') if i != ''][-1]
+            processed_byte = hex(int(last_byte, 16) | 0x40).split('0x')[1]
+            cmd = ddc_write_cmd_prefix + ' ' + processed_byte
+            logging.debug('ddc_write_cmd is %s', cmd)
+            return cmd
+
+        try:
+            logging.info('Enabling WRT logs')
+            status, _ = _execute_cmd(fw_trace_cmd, 'FW trace cmd')
+            if not status:
+                logging.info('FW trace command execution failed')
+                return False
+
+            status, ddc_read_result = _execute_cmd(ddc_read_cmd, 'DDC Read')
+            if not status:
+                logging.info('DDC Read command  execution failed')
+                return False
+
+            ddc_write_cmd = _get_ddc_write_cmd(ddc_read_result,
+                                               ddc_write_cmd_prefix)
+            logging.debug('DDC Write command  is %s', ddc_write_cmd)
+            status, _ = _execute_cmd(ddc_write_cmd, 'DDC Write')
+            if not status:
+                logging.info('DDC Write commanad execution failed')
+                return False
+
+            status, hw_trace_result = _execute_cmd(hw_trace_cmd, 'HW trace')
+            if not status:
+                logging.info('HW Trace command  execution failed')
+                return False
+
+            logging.debug('Executing the multi_comm_trace cmd %s to file %s',
+                          multi_comm_trace_str, multi_comm_trace_file)
+            with open(multi_comm_trace_file, 'w') as f:
+                f.write(multi_comm_trace_str + '\n')
+                f.flush()
+
+            logging.info('WRT Logs enabled')
+            return True
+        except Exception as e:
+            logging.error('Exception %s while enabling WRT logs', str(e))
+            return False
+
+    def collect_wrt_logs(self):
+        """Collect the WRT logs for Intel Bluetooth adapters
+
+           This is applicable only to Intel adapters.
+           Execute following command to collect WRT log. The logs are
+           copied to /var/spool/crash/
+
+           'echo 1 > sudo tee /sys/kernel/debug/ieee80211/phy0'
+                           '/iwlwifi/iwlmvm/fw_dbg_collect'
+           This is to be called only after enable_wrt_logs is called
+
+
+           Precondition:
+                 1) enable_wrt_logs has been called
+        """
+
+        def _collect_logs():
+            """Execute command to collect wrt logs."""
+            try:
+                with open(
+                        '/sys/kernel/debug/ieee80211/phy0/iwlwifi/'
+                        'iwlmvm/fw_dbg_collect', 'w') as f:
+                    f.write('1')
+                    f.flush()
+                # There is some flakiness in log collection. This sleep
+                # is due to the flakiness
+                time.sleep(10)
+                return True
+            except Exception as e:
+                logging.error('Exception %s in _collect logs ', str(e))
+                return False
+
+        def _get_num_log_files():
+            """Return number of WRT log files."""
+            try:
+                return len(glob.glob('/var/spool/crash/devcoredump_iwlwifi*'))
+            except Exception as e:
+                logging.debug('Exception %s raised in _get_num_log_files',
+                              str(e))
+                return 0
+
+        try:
+            logging.info('Collecting WRT logs')
+            #
+            # The command to trigger the logs does seems to work always.
+            # As a workaround for this flakiness, execute it multiple times
+            # until a new log is created
+            #
+            num_logs_present = _get_num_log_files()
+            logging.debug('%s logs present', num_logs_present)
+            for i in range(10):
+                time.sleep(1)
+                logging.debug('Executing command to collect WRT logs ')
+                if _collect_logs():
+                    logging.debug('Command to collect WRT logs executed')
+                else:
+                    logging.debug('Command to collect WRT logs failed')
+                    continue
+
+                if _get_num_log_files() > num_logs_present:
+                    logging.info('Successfully collected WRT logs ')
+                    return True
+                else:
+                    logging.debug('Log file not written. Trying again')
+
+            logging.info('Unable to collect WRT logs')
+            return False
+        except Exception as e:
+            logging.error('Exception %s while collecting WRT logs', str(e))
+            return False
+
+    def _get_wake_enabled_path(self):
+        # Walk up the parents from hci0 sysfs path and find the first one with
+        # a power/wakeup property. Return that path (including power/wakeup).
+
+        # Resolve hci path to get full device path (i.e. w/ usb or uart)
+        search_at = os.path.realpath('/sys/class/bluetooth/hci0')
+
+        # Exit early if path doesn't exist
+        if not os.path.exists(search_at):
+            return None
+
+        # Walk up parents and try to find one with 'power/wakeup'
+        for _ in range(search_at.count('/') - 1):
+            search_at = os.path.normpath(os.path.join(search_at, '..'))
+            try:
+                path = os.path.join(search_at, 'power', 'wakeup')
+                with open(path, 'r') as f:
+                    return path
+            except IOError:
+                # No power wakeup at the given location so keep going
+                continue
+
+        return None
+
+    def _is_wake_enabled(self):
+        search_at = self._get_wake_enabled_path()
+
+        if search_at is not None:
+            try:
+                with open(search_at, 'r') as f:
+                    value = f.read()
+                    logging.info('Power/wakeup found at {}: {}'.format(
+                            search_at, value))
+                    return 'enabled' in value
+            except IOError:
+                # Path was not readable
+                return False
+
+        logging.debug('No power/wakeup path found')
+        return False
+
+    def _set_wake_enabled(self, value):
+        path = self._get_wake_enabled_path()
+        if path is not None:
+            try:
+                with open(path, 'w') as f:
+                    f.write('enabled' if value else 'disabled')
+                    return True
+            except IOError:
+                # Path was not writeable
+                return False
+
+        return False
+
+    def is_wake_enabled(self):
+        """Checks whether the bluetooth adapter has wake enabled.
+
+        This will walk through all parents of the hci0 sysfs path and try to
+        find one with a 'power/wakeup' entry and returns whether its value is
+        'enabled'.
+
+        @return True if 'power/wakeup' of an hci0 parent is 'enabled'
+        """
+        enabled = self._is_wake_enabled()
+        return enabled
+
+    def set_wake_enabled(self, value):
+        """Sets wake enabled to the value if path exists.
+
+        This will walk through all parents of the hci0 sysfs path and write the
+        value to the first one it finds.
+
+        @param value: Sets power/wakeup to "enabled" if value is true, else
+                   "disabled"
+
+        @return True if it wrote value to a power/wakeup, False otherwise
+        """
+        return self._set_wake_enabled(value)
+
+    def wait_for_hid_device(self, device_address, timeout, sleep_interval):
+        """Waits for hid device with given device address.
+
+        @param device_address: Peripheral address
+        @param timeout: maximum number of seconds to wait
+        @param sleep_interval: time to sleep between polls
+
+        @return True if hid device found, False otherwise
+        """
+
+        def _match_hid_to_device(hidpath, device_address):
+            """Check if given hid syspath is for the given device address """
+            # If the syspath has a uniq property that matches the peripheral
+            # device's address, then it has matched
+            props = UdevadmInfo.GetProperties(hidpath)
+            if (props.get(b'uniq', b'').lower().decode() == device_address):
+                logging.info('Found hid device for address {} at {}'.format(
+                        device_address, hidpath))
+                return True
+            else:
+                logging.info('Path {} is not right device.'.format(hidpath))
+
+            return False
+
+        def _hid_is_created(device_address):
+            existing_inputs = UdevadmTrigger(
+                    subsystem_match=['input']).DryRun()
+            for entry in existing_inputs:
+                entry = entry.decode()
+                bt_hid = any([t in entry for t in ['uhid', 'hci']])
+                logging.info('udevadm trigger entry is {}: {}'.format(
+                        bt_hid, entry))
+
+                if (bt_hid and _match_hid_to_device(entry,
+                                                    device_address.lower())):
+                    return True
+
+            return False
+
+        if timeout is None:
+            timeout = self.HID_TIMEOUT
+        if sleep_interval is None:
+            sleep_interval = self.HID_CHECK_SECS
+
+        method_name = 'wait_for_hid_device'
+        try:
+            utils.poll_for_condition(
+                    condition=(lambda: _hid_is_created(device_address)),
+                    timeout=timeout,
+                    sleep_interval=sleep_interval,
+                    desc=('Waiting for HID device to be created from %s' %
+                          device_address))
+            return True
+        except utils.TimeoutError as e:
+            logging.error('%s: %s', method_name, e)
+        except Exception as e:
+            logging.error('%s: unexpected error: %s', method_name, e)
+
+        return False
+
+    def _powerd_last_resume_details(self, before=5, after=0):
+        """ Look at powerd logs for last suspend/resume attempt.
+
+        Note that logs are in reverse order (chronologically). Keep that in mind
+        for the 'before' and 'after' parameters.
+
+        @param before: Number of context lines before search item to show.
+        @param after: Number of context lines after search item to show.
+
+        @return Most recent lines containing suspend resume details or ''.
+        """
+        event_file = '/var/log/power_manager/powerd.LATEST'
+
+        # Each powerd_suspend wakeup has a log "powerd_suspend returned 0",
+        # with the return code of the suspend. We search for the last
+        # occurrence in the log, and then find the collocated event_count log,
+        # indicating the wakeup cause. -B option for grep will actually grab the
+        # *next* 5 logs in time, since we are piping the powerd file backwards
+        # with tac command
+        resume_indicator = 'powerd_suspend returned'
+        cmd = 'tac {} | grep -A {} -B {} -m1 "{}"'.format(
+                event_file, after, before, resume_indicator)
+
+        try:
+            return utils.run(cmd).stdout
+        except error.CmdError:
+            logging.error('Could not locate recent suspend')
+
+        return ''
+
+    def bt_caused_last_resume(self):
+        """Checks if last resume from suspend was caused by bluetooth
+
+        @return: True if BT wake path was cause of resume, False otherwise
+        """
+
+        # When the resume cause is printed to powerd log, it omits the
+        # /power/wakeup portion of wake path
+        bt_wake_path = self._get_wake_enabled_path()
+
+        # If bluetooth does not have a valid wake path, it could not have caused
+        # the resume
+        if not bt_wake_path:
+            return False
+
+        bt_wake_path = bt_wake_path.replace('/power/wakeup', '')
+
+        last_resume_details = self._powerd_last_resume_details().rstrip(
+                '\n ').split('\n')
+        logging.debug('/var/log/power_manager/powerd.LATEST: 5 lines after '
+                      'powerd_suspend returns:')
+        for l in last_resume_details[::-1]:
+            logging.debug(l)
+        # If BT caused wake, there will be a line describing the bt wake
+        # path's event_count before and after the resume
+        for line in last_resume_details:
+            if 'event_count' in line:
+                logging.info('Checking wake event: {}'.format(line))
+                if bt_wake_path in line:
+                    logging.debug('BT event woke the DUT')
+                    return True
+
+        return False
+
+    def find_last_suspend_via_powerd_logs(self):
+        """ Finds the last suspend attempt via powerd logs.
+
+        Finds the last suspend attempt using powerd logs by searching backwards
+        through the logs to find the latest entries with 'powerd_suspend'. If we
+        can't find a suspend attempt, we return None.
+
+        @return: Tuple (suspend start time, suspend end time, suspend result) or
+                None if we can't find a suspend attempt
+        """
+        # Logs look like this (ignore newline):
+        # 2021-02-11T18:53:43.561880Z INFO powerd:
+        #       [daemon.cc(724)] powerd_suspend returned 0
+        # ... stuff in between ...
+        # 2021-02-11T18:53:13.277695Z INFO powerd:
+        #       [suspender.cc(574)] Starting suspend
+
+        # Date format for strptime and strftime
+        date_format = '%Y-%m-%dT%H:%M:%S.%fZ'
+        date_group_re = ('(?P<date>[0-9]+-[0-9]+-[0-9]+T'
+                         '[0-9]+:[0-9]+:[0-9]+[.][0-9]+Z)\s')
+
+        finish_suspend_re = re.compile(
+                '^{date_regex}'
+                '.*daemon.*powerd_suspend returned '
+                '(?P<exitcode>[0-9]+)'.format(date_regex=date_group_re))
+        start_suspend_re = re.compile(
+                '^{date_regex}.*suspender.*'
+                'Starting suspend'.format(date_regex=date_group_re))
+
+        now = datetime.now()
+        last_resume_details = self._powerd_last_resume_details(before=0,
+                                                               after=8)
+        if last_resume_details:
+            start_time, end_time, ret = None, None, None
+            try:
+                for line in last_resume_details.split('\n'):
+                    logging.debug('Last suspend search: %s', line)
+                    m = finish_suspend_re.match(line)
+                    if m:
+                        logging.debug('Found suspend end: date(%s) ret(%s)',
+                                      m.group('date'), m.group('exitcode'))
+                        end_time = datetime.strptime(
+                                m.group('date'),
+                                date_format).replace(year=now.year)
+                        ret = int(m.group('exitcode'))
+
+                    m = start_suspend_re.match(line)
+                    if m:
+                        logging.debug('Found suspend start: date(%s)',
+                                      m.group('date'))
+                        start_time = datetime.strptime(
+                                m.group('date'),
+                                date_format).replace(year=now.year)
+                        break
+
+                if all([x is not None for x in [start_time, end_time, ret]]):
+                    # Return dates in string format due to inconsistency between
+                    # python2/3 usage on host and dut
+                    return (start_time.strftime(self.OUT_DATE_FORMAT),
+                            end_time.strftime(self.OUT_DATE_FORMAT), ret)
+                else:
+                    logging.error(
+                            'Failed to parse details from last suspend. %s %s %s',
+                            str(start_time), str(end_time), str(ret))
+            except Exception as e:
+                logging.error('Failed to parse last suspend: %s', str(e))
+        else:
+            logging.error('No powerd_suspend attempt found')
+
+        return None
+
+    def do_suspend(self, seconds, expect_bt_wake):
+        """Suspend DUT using the power manager.
+
+        @param seconds: The number of seconds to suspend the device.
+        @param expect_bt_wake: Whether we expect bluetooth to wake us from
+            suspend. If true, we expect this resume will occur early
+
+        @throws: SuspendFailure on resume with unexpected timing or wake source.
+            The raised exception will be handled as a non-zero retcode over the
+            RPC, signalling for the test to fail.
+        """
+        early_wake = False
+        try:
+            sys_power.do_suspend(seconds)
+
+        except sys_power.SpuriousWakeupError:
+            logging.info('Early resume detected...')
+            early_wake = True
+
+        # Handle error conditions based on test expectations, whether resume
+        # was early, and cause of the resume
+        bt_caused_wake = self.bt_caused_last_resume()
+        logging.info('Cause for resume: {}'.format(
+                'BT' if bt_caused_wake else 'Not BT'))
+
+        if not expect_bt_wake and bt_caused_wake:
+            raise sys_power.SuspendFailure('BT woke us unexpectedly')
+
+        # TODO(b/160803597) - Uncomment when BT wake reason is correctly
+        # captured in powerd log.
+        #
+        # if expect_bt_wake and not bt_caused_wake:
+        #   raise sys_power.SuspendFailure('BT should have woken us')
+        #
+        # if bt_caused_wake and not early_wake:
+        #   raise sys_power.SuspendFailure('BT wake did not come early')
+
+        return True
+
+    def get_wlan_vid_pid(self):
+        """ Return vendor id and product id of the wlan chip on BT/WiFi module
+
+        @returns: (vid,pid) on success; (None,None) on failure
+        """
+        vid = None
+        pid = None
+        path_template = '/sys/class/net/%s/device/'
+        for dev_name in ['wlan0', 'mlan0']:
+            if os.path.exists(path_template % dev_name):
+                path_v = path_template % dev_name + 'vendor'
+                path_d = path_template % dev_name + 'device'
+                logging.debug('Paths are %s %s', path_v, path_d)
+                try:
+                    vid = open(path_v).read().strip('\n')
+                    pid = open(path_d).read().strip('\n')
+                    break
+                except Exception as e:
+                    logging.error('Exception %s while reading vid/pid', str(e))
+        logging.debug('returning vid:%s pid:%s', vid, pid)
+        return (vid, pid)
+
+    def get_bt_transport(self):
+        """ Return transport (UART/USB/SDIO) used by BT module
+
+        @returns: USB/UART/SDIO on success; None on failure
+        """
+        try:
+            transport_str = os.path.realpath(
+                    '/sys/class/bluetooth/hci0/device/driver/module')
+            logging.debug('transport is %s', transport_str)
+            transport = transport_str.split('/')[-1]
+            if transport == 'btusb':
+                return 'USB'
+            elif transport == 'hci_uart':
+                return 'UART'
+            elif transport in ['btmrvl_sdio', 'btmtksdio']:
+                return 'SDIO'
+            else:
+                return None
+        except Exception as e:
+            logging.error('Exception %s in get_bt_transport', str(e))
+            return None
+
+    def get_bt_module_name(self):
+        """ Return bluetooth module name for non-USB devices
+
+        @returns '' on failure. On success return chipset name, if found in
+                 dict.Otherwise it returns the raw string read.
+        """
+        # map the string read from device to chipset name
+        chipset_string_dict = {
+                'qcom,wcn3991-bt\x00': 'WCN3991',
+                'qcom,wcn6750-bt\x00': 'WCN6750',
+        }
+
+        hci_device = '/sys/class/bluetooth/hci0'
+        real_path = os.path.realpath(hci_device)
+
+        logging.debug('real path is %s', real_path)
+        if 'usb' in real_path:
+            return ''
+
+        device_path = os.path.join(real_path, 'device', 'of_node',
+                                   'compatible')
+        try:
+            chipset_string = open(device_path).read()
+            logging.debug('read string %s from %s', chipset_string,
+                          device_path)
+        except Exception as e:
+            logging.error('Exception %s while reading from file', str(e),
+                          device_path)
+            return ''
+
+        if chipset_string in chipset_string_dict:
+            return chipset_string_dict[chipset_string]
+        else:
+            logging.debug("Chipset not known. Returning %s", chipset_string)
+            return chipset_string
+
+    def get_chipset_name(self):
+        """ Get the name of BT/WiFi chipset on this host
+
+        @returns chipset name if successful else ''
+        """
+        (vid, pid) = self.get_wlan_vid_pid()
+        logging.debug('Bluetooth module vid pid is %s %s', vid, pid)
+        transport = self.get_bt_transport()
+        logging.debug('Bluetooth transport is %s', transport)
+        if vid is None or pid is None:
+            # Controllers that aren't WLAN+BT combo chips does not expose
+            # Vendor ID/Product ID. Use alternate method.
+            # This will return one of ['WCN3991', ''] or a string containing
+            # the name of chipset read from DUT
+            return self.get_bt_module_name()
+        for name, l in self.CHIPSET_TO_VIDPID.items():
+            if ((vid, pid), transport) in l:
+                return name
+        return ''
+
+    def get_bt_usb_device_strs(self):
+        """ Return the usb endpoints for the bluetooth device, if they exist
+
+        We wish to be able to identify usb disconnect events that affect our
+        bluetooth operation. To do so, we must first identify the usb endpoint
+        that is associated with our bluetooth device.
+
+        @returns: Relevant usb endpoints for the bluetooth device,
+                  i.e. ['1-1','1-1.2'] if they exist,
+                  [] otherwise
+        """
+
+        hci_device = '/sys/class/bluetooth/hci0'
+        real_path = os.path.realpath(hci_device)
+
+        # real_path for a usb bluetooth controller will look something like:
+        # ../../devices/pci0000:00/0000:00:14.0/usb1/1-4/1-4:1.0/bluetooth/hci0
+        if 'usb' not in real_path:
+            return []
+
+        logging.debug('Searching for usb path: {}'.format(real_path))
+
+        # Grab all numbered entries between 'usb' and 'bluetooth' descriptors
+        m = re.search(r'usb(.*)bluetooth', real_path)
+
+        if not m:
+            logging.error(
+                    'Unable to extract usb dev from {}'.format(real_path))
+            return []
+
+        # Return the path as a list of individual usb descriptors
+        return m.group(1).split('/')
+
+    def get_bt_usb_disconnect_str(self):
+        """ Return the expected log error on USB disconnect
+
+        Locate the descriptor that will be used from the list of all usb
+        descriptors associated with our bluetooth chip, and format into the
+        expected string error for USB disconnect
+
+        @returns: string representing expected usb disconnect log entry if usb
+                  device could be identified, None otherwise
+        """
+        disconnect_log_template = 'usb {}: USB disconnect'
+        descriptors = self.get_bt_usb_device_strs()
+
+        # The usb disconnect log message seems to use the most detailed
+        # descriptor that does not use the ':1.0' entry
+        for d in sorted(descriptors, key=len, reverse=True):
+            if ':' not in d:
+                return disconnect_log_template.format(d)
+
+        return None
+
+    def get_device_utc_time(self):
+        """ Get the current device time in UTC. """
+        return datetime.utcnow().strftime(self.OUT_DATE_FORMAT)
+
+    def create_audio_record_directory(self, audio_record_dir):
+        """Create the audio recording directory.
+
+        @param audio_record_dir: the audio recording directory
+
+        @returns: True on success. False otherwise.
+        """
+        try:
+            if not os.path.exists(audio_record_dir):
+                os.makedirs(audio_record_dir)
+            return True
+        except Exception as e:
+            logging.error('Failed to create %s on the DUT: %s',
+                          audio_record_dir, e)
+            return False
+
+    def start_capturing_audio_subprocess(self, audio_data, recording_device):
+        """Start capturing audio in a subprocess.
+
+        @param audio_data: the audio test data
+        @param recording_device: which device recorded the audio,
+                possible values are 'recorded_by_dut' or 'recorded_by_peer'
+
+        @returns: True on success. False otherwise.
+        """
+        audio_data = json.loads(audio_data)
+        return self._cras_test_client.start_capturing_subprocess(
+                audio_data[recording_device],
+                sample_format=audio_data['format'],
+                channels=audio_data['channels'],
+                rate=audio_data['rate'],
+                duration=audio_data['duration'])
+
+    def stop_capturing_audio_subprocess(self):
+        """Stop capturing audio.
+
+        @returns: True on success. False otherwise.
+        """
+        return self._cras_test_client.stop_capturing_subprocess()
+
+    def _generate_playback_file(self, audio_data):
+        """Generate the playback file if it does not exist yet.
+
+        Some audio test files may be large. Generate them on the fly
+        to save the storage of the source tree.
+
+        @param audio_data: the audio test data
+        """
+        if not os.path.exists(audio_data['file']):
+            data_format = dict(file_type='raw',
+                               sample_format='S16_LE',
+                               channel=audio_data['channels'],
+                               rate=audio_data['rate'])
+
+            # Make the audio file a bit longer to handle any delay
+            # issue in capturing.
+            duration = audio_data['duration'] + 3
+            audio_test_data_module.GenerateAudioTestData(
+                    data_format=data_format,
+                    path=audio_data['file'],
+                    duration_secs=duration,
+                    frequencies=audio_data['frequencies'])
+            logging.debug("Raw file generated: %s", audio_data['file'])
+
+    def start_playing_audio_subprocess(self, audio_data, pin_device=None):
+        """Start playing audio in a subprocess.
+
+        @param audio_data: the audio test data.
+        @param pin_device: the device id to play audio.
+
+        @returns: True on success. False otherwise.
+        """
+        audio_data = json.loads(audio_data)
+        self._generate_playback_file(audio_data)
+        try:
+            return self._cras_test_client.start_playing_subprocess(
+                    audio_data['file'],
+                    pin_device=pin_device,
+                    channels=audio_data['channels'],
+                    rate=audio_data['rate'],
+                    duration=audio_data['duration'])
+        except Exception as e:
+            logging.error("start_playing_subprocess() failed: %s", str(e))
+            return False
+
+    def stop_playing_audio_subprocess(self):
+        """Stop playing audio in the subprocess.
+
+        @returns: True on success. False otherwise.
+        """
+        return self._cras_test_client.stop_playing_subprocess()
+
+    def play_audio(self, audio_data):
+        """Play audio.
+
+        It blocks until it has completed playing back the audio.
+
+        @param audio_data: the audio test data
+
+        @returns: True on success. False otherwise.
+        """
+        audio_data = json.loads(audio_data)
+        self._generate_playback_file(audio_data)
+        return self._cras_test_client.play(audio_data['file'],
+                                           channels=audio_data['channels'],
+                                           rate=audio_data['rate'],
+                                           duration=audio_data['duration'])
+
+    def check_audio_frames_legitimacy(self, audio_test_data, recording_device,
+                                      recorded_file):
+        """Get the number of frames in the recorded audio file.
+
+        @param audio_test_data: the audio test data
+        @param recording_device: which device recorded the audio,
+                possible values are 'recorded_by_dut' or 'recorded_by_peer'
+        @param recorded_file: the recorded file name
+
+        @returns: True if audio frames are legitimate.
+        """
+        if bool(recorded_file):
+            recorded_filename = recorded_file
+        else:
+            audio_test_data = json.loads(audio_test_data)
+            recorded_filename = audio_test_data[recording_device]
+
+        if recorded_filename.endswith('.raw'):
+            # Make sure that the recorded file does not contain all zeros.
+            filesize = os.path.getsize(recorded_filename)
+            cmd_str = 'cmp -s -n %d %s /dev/zero' % (filesize,
+                                                     recorded_filename)
+            try:
+                result = subprocess.call(cmd_str.split())
+                return result != 0
+            except Exception as e:
+                logging.error("Failed: %s (%s)", cmd_str, str(e))
+                return False
+        else:
+            # The recorded wav file should not be empty.
+            wav_file = check_quality.WaveFile(recorded_filename)
+            return wav_file.get_number_frames() > 0
+
+    def convert_audio_sample_rate(self, input_file, out_file, test_data,
+                                  new_rate):
+        """Convert audio file to new sample rate.
+
+        @param input_file: Path to file to upsample.
+        @param out_file: Path to create upsampled file.
+        @param test_data: Dictionary with information about file.
+        @param new_rate: New rate to upsample file to.
+
+        @returns: True if upsampling succeeded, False otherwise.
+        """
+        test_data = json.loads(test_data)
+        logging.debug('Resampling file {} to new rate {}'.format(
+                input_file, new_rate))
+
+        convert_format(input_file,
+                       test_data['channels'],
+                       test_data['bit_width'],
+                       test_data['rate'],
+                       out_file,
+                       test_data['channels'],
+                       test_data['bit_width'],
+                       new_rate,
+                       1.0,
+                       use_src_header=True,
+                       use_dst_header=True)
+
+        return os.path.isfile(out_file)
+
+    def trim_wav_file(self,
+                      in_file,
+                      out_file,
+                      new_duration,
+                      test_data,
+                      tolerance=0.1):
+        """Trim long file to desired length.
+
+        Trims audio file to length by cutting out silence from beginning and
+        end.
+
+        @param in_file: Path to audio file to be trimmed.
+        @param out_file: Path to trimmed audio file to create.
+        @param new_duration: A float representing the desired duration of
+                the resulting trimmed file.
+        @param test_data: Dictionary containing information about the test file.
+        @param tolerance: (optional) A float representing the allowable
+                difference between trimmed file length and desired duration
+
+        @returns: True if file was trimmed successfully, False otherwise.
+        """
+        test_data = json.loads(test_data)
+        trim_silence_from_wav_file(in_file, out_file, new_duration)
+        measured_length = get_file_length(out_file, test_data['channels'],
+                                          test_data['bit_width'],
+                                          test_data['rate'])
+        return abs(measured_length - new_duration) <= tolerance
+
+    def unzip_audio_test_data(self, tar_path, data_dir):
+        """Unzip audio test data files.
+
+        @param tar_path: Path to audio test data tarball on DUT.
+        @oaram data_dir: Path to directory where to extract test data directory.
+
+        @returns: True if audio test data folder exists, False otherwise.
+        """
+        logging.debug('Downloading audio test data on DUT')
+        # creates path to dir to extract test data to by taking name of the
+        # tarball without the extension eg. <dir>/file.ext to data_dir/file/
+        audio_test_dir = os.path.join(
+                data_dir,
+                os.path.split(tar_path)[1].split('.', 1)[0])
+
+        unzip_cmd = 'tar -xf {0} -C {1}'.format(tar_path, data_dir)
+
+        unzip_proc = subprocess.Popen(unzip_cmd.split(),
+                                      stdout=subprocess.PIPE,
+                                      stderr=subprocess.PIPE)
+        _, stderr = unzip_proc.communicate()
+
+        if stderr:
+            logging.error('Error occurred in unzipping audio data: {}'.format(
+                    str(stderr)))
+            return False
+
+        return unzip_proc.returncode == 0 and os.path.isdir(audio_test_dir)
+
+    def convert_raw_to_wav(self, input_file, output_file, test_data):
+        """Convert raw audio file to wav file.
+
+        @oaram input_file: the location of the raw file
+        @param output_file: the location to place the resulting wav file
+        @param test_data: the data for the file being converted
+
+        @returns: True if conversion was successful otherwise false
+        """
+        test_data = json.loads(test_data)
+        convert_raw_file(input_file, test_data['channels'],
+                         test_data['bit_width'], test_data['rate'],
+                         output_file)
+
+        return os.path.isfile(output_file)
+
+    def get_primary_frequencies(self, audio_test_data, recording_device,
+                                recorded_file):
+        """Get primary frequencies of the audio test file.
+
+        @param audio_test_data: the audio test data
+        @param recording_device: which device recorded the audio,
+                possible values are 'recorded_by_dut' or 'recorded_by_peer'
+        @param recorded_file: the recorded file name
+
+        @returns: a list of primary frequencies of channels in the audio file
+        """
+        audio_test_data = json.loads(audio_test_data)
+
+        if bool(recorded_file):
+            recorded_filename = recorded_file
+        else:
+            recorded_filename = audio_test_data[recording_device]
+
+        args = CheckQualityArgsClass(filename=recorded_filename,
+                                     rate=audio_test_data['rate'],
+                                     channel=audio_test_data['channels'],
+                                     bit_width=16)
+        raw_data, rate = check_quality.read_audio_file(args)
+        checker = check_quality.QualityChecker(raw_data, rate)
+        # The highest frequency recorded would be near 24 Khz
+        # as the max sample rate is 48000 in our tests.
+        # So let's set ignore_high_freq to be 48000.
+        checker.do_spectral_analysis(ignore_high_freq=48000,
+                                     check_quality=False,
+                                     quality_params=None)
+        spectra = checker._spectrals
+        primary_freq = [
+                float(spectra[i][0][0]) if spectra[i] else 0
+                for i in range(len(spectra))
+        ]
+        primary_freq.sort()
+        return primary_freq
+
+    def enable_wbs(self, value):
+        """Enable or disable wideband speech (wbs) per the value.
+
+        @param value: True to enable wbs.
+
+        @returns: True if the operation succeeds.
+        """
+        return self._cras_test_client.enable_wbs(value)
+
+    def set_player_playback_status(self, status):
+        """Set playback status for the registered media player.
+
+        @param status: playback status in string.
+
+        """
+        return self._cras_test_client.set_player_playback_status(status)
+
+    def set_player_position(self, position):
+        """Set media position for the registered media player.
+
+        @param position: position in micro seconds.
+
+        """
+        return self._cras_test_client.set_player_position(position)
+
+    def set_player_metadata(self, metadata):
+        """Set metadata for the registered media player.
+
+        @param metadata: dictionary of media metadata.
+
+        """
+        return self._cras_test_client.set_player_metadata(metadata)
+
+    def set_player_length(self, length):
+        """Set media length for the registered media player.
+
+        Media length is a part of metadata information. However, without
+        specify its type to int64. dbus-python will guess the variant type to
+        be int32 by default. Separate it from the metadata function to help
+        prepare the data differently.
+
+        @param length: length in micro seconds.
+
+        """
+        return self._cras_test_client.set_player_length(length)
+
+    def select_input_device(self, device_name):
+        """Select the audio input device.
+
+        @param device_name: the name of the Bluetooth peer device
+
+        @returns: True if the operation succeeds.
+        """
+        return self._cras_test_client.select_input_device(device_name)
+
+    @dbus_safe(None)
+    def select_output_node(self, node_type):
+        """Select the audio output node.
+
+        @param node_type: the node type of the Bluetooth peer device
+
+        @returns: True if the operation succeeds.
+        """
+        return cras_utils.set_single_selected_output_node(node_type)
+
+    @dbus_safe(None)
+    def get_selected_output_device_type(self):
+        """Get the selected audio output node type.
+
+        @returns: the node type of the selected output device.
+        """
+        # Note: should convert the dbus.String to the regular string.
+        return str(cras_utils.get_selected_output_device_type())
+
+    @dbus_safe(None)
+    def get_device_id_from_node_type(self, node_type, is_input):
+        """Gets device id from node type.
+
+        @param node_type: a node type defined in CRAS_NODE_TYPES.
+        @param is_input: True if the node is input. False otherwise.
+
+        @returns: a string for device id.
+        """
+        return cras_utils.get_device_id_from_node_type(node_type, is_input)
+
+    def get_audio_thread_summary(self):
+        """Dumps audio thread info.
+
+        @returns: a list of cras audio information.
+        """
+        return cras_utils.get_audio_thread_summary()
+
+    def is_btmanagerd_present(self):
+        """ Check if /usr/bin/btmanagerd file is present
+
+        @returns: True if /usr/bin/btmanagerd is present and False if not
+        """
+        return os.path.exists(self.BTMANGERD_FILE_PATH)
+
+
+class BluezPairingAgent:
+    """The agent handling the authentication process of bluetooth pairing.
+
+    BluezPairingAgent overrides RequestPinCode method to return a given pin code.
+    User can use this agent to pair bluetooth device which has a known
+    pin code.
+
+    TODO (josephsih): more pairing modes other than pin code would be
+    supported later.
+
+    """
+
+    def __init__(self, bus, path, pin):
+        """Constructor.
+
+        @param bus: system bus object.
+        @param path: Object path to register.
+        @param pin: Pin to respond with for |RequestPinCode|.
+        """
+        self._pin = pin
+        self.path = path
+        self.obj = bus.register_object(path, self, None)
+
+    # D-Bus service definition (required by pydbus).
+    dbus = """
+        <node>
+            <interface name="org.bluez.Agent1">
+                <method name="RequestPinCode">
+                    <arg type="o" name="device_path" direction="in" />
+                    <arg type="s" name="response" direction="out" />
+                </method>
+                <method name="AuthorizeService">
+                    <arg type="o" name="device_path" direction="in" />
+                    <arg type="s" name="uuid" direction="in" />
+                    <arg type="b" name="response" direction="out" />
+                </method>
+            </interface>
+        </node>
+        """
+
+    def unregister(self):
+        """Unregisters self from bus."""
+        self.obj.unregister()
+
+    def RequestPinCode(self, device_path):
+        """Requests pin code for a device.
+
+        Returns the known pin code for the request.
+
+        @param device_path: The object path of the device.
+
+        @returns: The known pin code.
+
+        """
+        logging.info('RequestPinCode for %s; return %s', device_path,
+                     self._pin)
+        return self._pin
+
+    def AuthorizeService(self, device_path, uuid):
+        """Authorize given service for device.
+
+        @param device_path: The object path of the device.
+        @param uuid: The service that needs to be authorized.
+
+        @returns: True (we authorize everything since this is a test)
+        """
+        return True
+
+
+class BluezFacadeLocal(BluetoothBaseFacadeLocal):
+    """Exposes DUT methods called remotely during Bluetooth autotests for the
+    Bluez daemon.
+
+    All instance methods of this object without a preceding '_' are exposed via
+    an XML-RPC server. This is not a stateless handler object, which means that
+    if you store state inside the delegate, that state will remain around for
+    future calls.
+    """
+
+    BLUETOOTHD_JOB = 'bluetoothd'
+
+    DBUS_ERROR_SERVICEUNKNOWN = 'org.freedesktop.DBus.Error.ServiceUnknown'
+
+    BLUEZ_SERVICE_NAME = 'org.bluez'
+    BLUEZ_MANAGER_PATH = '/'
+    BLUEZ_DEBUG_LOG_PATH = '/org/chromium/Bluetooth'
+    BLUEZ_DEBUG_LOG_IFACE = 'org.chromium.Bluetooth.Debug'
+    BLUEZ_MANAGER_IFACE = 'org.freedesktop.DBus.ObjectManager'
+    BLUEZ_ADAPTER_IFACE = 'org.bluez.Adapter1'
+    BLUEZ_ADMIN_POLICY_SET_IFACE = 'org.bluez.AdminPolicySet1'
+    BLUEZ_ADMIN_POLICY_STATUS_IFACE = 'org.bluez.AdminPolicyStatus1'
+    BLUEZ_BATTERY_IFACE = 'org.bluez.Battery1'
+    BLUEZ_DEVICE_IFACE = 'org.bluez.Device1'
+    BLUEZ_GATT_SERV_IFACE = 'org.bluez.GattService1'
+    BLUEZ_GATT_CHAR_IFACE = 'org.bluez.GattCharacteristic1'
+    BLUEZ_GATT_DESC_IFACE = 'org.bluez.GattDescriptor1'
+    BLUEZ_LE_ADVERTISING_MANAGER_IFACE = 'org.bluez.LEAdvertisingManager1'
+    BLUEZ_ADV_MONITOR_MANAGER_IFACE = 'org.bluez.AdvertisementMonitorManager1'
+    BLUEZ_AGENT_MANAGER_PATH = '/org/bluez'
+    BLUEZ_AGENT_MANAGER_IFACE = 'org.bluez.AgentManager1'
+    BLUEZ_PROFILE_MANAGER_PATH = '/org/bluez'
+    BLUEZ_PROFILE_MANAGER_IFACE = 'org.bluez.ProfileManager1'
+    BLUEZ_ERROR_ALREADY_EXISTS = 'org.bluez.Error.AlreadyExists'
+    BLUEZ_PLUGIN_DEVICE_IFACE = 'org.chromium.BluetoothDevice'
+    DBUS_PROP_IFACE = 'org.freedesktop.DBus.Properties'
+    AGENT_PATH = '/test/agent'
+
+    BTMON_STOP_DELAY_SECS = 3
+
+    # Timeout for how long we'll wait for BlueZ and the Adapter to show up
+    # after reset.
+    ADAPTER_TIMEOUT = 30
+
+    # How long we should wait for property update signal before we cancel it.
+    PROPERTY_UPDATE_TIMEOUT_MILLI_SECS = 5000
+
+    # How often we should check for property update exit.
+    PROPERTY_UPDATE_CHECK_MILLI_SECS = 500
+
+    def __init__(self):
+        # Init the BaseFacade first
+        super(BluezFacadeLocal, self).__init__()
+
+        # Open the Bluetooth Raw socket to the kernel which provides us direct,
+        # raw, access to the HCI controller.
+        self._raw = bluetooth_socket.BluetoothRawSocket()
+
+        # Open the Bluetooth Control socket to the kernel which provides us
+        # raw management access to the Bluetooth Host Subsystem. Read the list
+        # of adapter indexes to determine whether or not this device has a
+        # Bluetooth Adapter or not.
+        self._control = bluetooth_socket.BluetoothControlSocket()
+        self._has_adapter = len(self._control.read_index_list()) > 0
+
+        # Create an Advertisement Monitor App Manager instance.
+        # This needs to be created before making any dbus connections as
+        # AdvMonitorAppMgr internally forks a new helper process and due to
+        # a limitation of python, it is not possible to fork a new process
+        # once any dbus connections are established.
+        self.advmon_appmgr = adv_monitor_helper.AdvMonitorAppMgr()
+
+        # Set up the connection to the D-Bus System Bus, get the object for
+        # the Bluetooth Userspace Daemon (BlueZ) and that daemon's object for
+        # the Bluetooth Adapter, and the advertising manager.
+        self.bus = pydbus.SystemBus()
+        self._update_bluez()
+        self._update_adapter()
+        self._update_advertising()
+        self._update_adv_monitor_manager()
+
+        # The agent to handle pin code request, which will be
+        # created when user calls pair_legacy_device method.
+        self._pairing_agent = None
+        # The default capability of the agent.
+        self._capability = 'KeyboardDisplay'
+
+        # Initialize a btmon object to record bluetoothd's activity.
+        self.btmon = output_recorder.OutputRecorder(
+                ['btmon', '-c', 'never'],
+                stop_delay_secs=self.BTMON_STOP_DELAY_SECS)
+
+        self.advertisements = []
+        self.advmon_interleave_logger = logger_helper.InterleaveLogger()
+        self._chrc_property = None
+        self._timeout_id = 0
+        self._signal_watch = None
+        self._dbus_mainloop = GObject.MainLoop()
+
+    @dbus_safe(False)
+    def set_debug_log_levels(self, bluez_vb, kernel_vb):
+        """Enable or disable the debug logs of bluetooth
+
+        @param bluez_vb: verbosity of bluez debug log, either 0 or 1
+        @param kernel_vb: verbosity of kernel debug log, either 0 or 1
+
+        """
+        debug_object = self.bus.get(self.BLUEZ_SERVICE_NAME,
+                                    self.BLUEZ_DEBUG_LOG_PATH)
+
+        # Make a raw synchronous call using GLib (pydbus doesn't correctly
+        # serialize '(yy)'.
+        raw_dbus_call_sync(self.bus, debug_object, self.BLUEZ_DEBUG_LOG_IFACE,
+                           'SetLevels',
+                           GLib.Variant('(yy)', (bluez_vb, kernel_vb)),
+                           GLib.VariantType.new('()'))
+        return
+
+    @dbus_safe(False)
+    def set_quality_debug_log(self, enable):
+        """Enable or disable bluez quality debug log in the DUT
+        @param enable: True to enable all of the debug log,
+                       False to disable all of the debug log.
+        """
+        bluez_debug = self.bus.get(
+                self.BLUEZ_SERVICE_NAME, self.BLUEZ_DEBUG_LOG_PATH)[
+                        self.BLUEZ_DEBUG_LOG_IFACE]
+        bluez_debug.SetQualityDebug(enable)
+
+    @dbus_safe(False)
+    def start_bluetoothd(self):
+        """start bluetoothd.
+
+        This includes powering up the adapter.
+
+        @returns: True if bluetoothd is started correctly.
+                  False otherwise.
+
+        """
+        # Always start bluez tests with Floss disabled
+        self.configure_floss(enabled=False)
+
+        # Start the daemon and exit if that fails.
+        if not UpstartClient.start(self.BLUETOOTHD_JOB):
+            return False
+
+        logging.debug('waiting for bluez start')
+        try:
+            utils.poll_for_condition(condition=self._update_bluez,
+                                     desc='Bluetooth Daemon has started.',
+                                     timeout=self.ADAPTER_TIMEOUT)
+        except Exception as e:
+            logging.error('timeout: error starting bluetoothd: %s', e)
+            return False
+
+        # Waiting for the self._adapter object.
+        # This does not mean that the adapter is powered on.
+        logging.debug('waiting for bluez to obtain adapter information')
+        try:
+            utils.poll_for_condition(
+                    condition=self._update_adapter,
+                    desc='Bluetooth Daemon has adapter information.',
+                    timeout=self.ADAPTER_TIMEOUT)
+        except Exception as e:
+            logging.error('timeout: error starting adapter: %s', e)
+            return False
+
+        # Waiting for the self._advertising interface object.
+        logging.debug('waiting for bluez to obtain interface manager.')
+        try:
+            utils.poll_for_condition(
+                    condition=self._update_advertising,
+                    desc='Bluetooth Daemon has advertising interface.',
+                    timeout=self.ADAPTER_TIMEOUT)
+        except utils.TimeoutError:
+            logging.error('timeout: error getting advertising interface')
+            return False
+
+        # Register the pairing agent so we can authorize connections
+        logging.debug('registering default pairing agent')
+        self._setup_pairing_agent(0)
+
+        return True
+
+    @dbus_safe(False)
+    def stop_bluetoothd(self):
+        """stop bluetoothd.
+
+        @returns: True if bluetoothd is stopped correctly.
+                  False otherwise.
+
+        """
+
+        def bluez_stopped():
+            """Checks the bluetooth daemon status.
+
+            @returns: True if bluez is stopped. False otherwise.
+
+            """
+            return not self._update_bluez()
+
+        # Stop the daemon and exit if that fails.
+        if not UpstartClient.stop(self.BLUETOOTHD_JOB):
+            return False
+
+        logging.debug('waiting for bluez stop')
+        try:
+            utils.poll_for_condition(condition=bluez_stopped,
+                                     desc='Bluetooth Daemon has stopped.',
+                                     timeout=self.ADAPTER_TIMEOUT)
+            bluetoothd_stopped = True
+        except Exception as e:
+            logging.error('timeout: error stopping bluetoothd: %s', e)
+            bluetoothd_stopped = False
+
+        return bluetoothd_stopped
+
+    def restart_cras(self):
+        """Restarts the cras daemon."""
+        return self._restart_cras()
+
+    def is_bluetoothd_running(self):
+        """Is bluetoothd running?
+
+        @returns: True if bluetoothd is running
+
+        """
+        return bool(self._get_dbus_proxy_for_bluetoothd())
+
+    def is_bluetoothd_proxy_valid(self):
+        """Checks whether the proxy object for bluetoothd is ok.
+
+        The dbus proxy object (self._bluez) can become unusable if bluetoothd
+        crashes or restarts for any reason. This method checks whether this has
+        happened by attempting to use the object proxy. If bluetoothd has
+        restarted (or is not available), then the session will no longer be
+        valid and this will result in a dbus exception (GLib.Error).
+
+        Returns:
+            True if the bluez proxy is still usable. False otherwise.
+        """
+
+        try:
+            return self.is_bluetoothd_running() and bool(
+                    self._objmgr_proxy) and bool(
+                            self._objmgr_proxy.GetManagedObjects())
+        except GLib.Error:
+            return False
+
+    def _update_bluez(self):
+        """Store a D-Bus proxy for the Bluetooth daemon in self._bluez.
+
+        This may be called in a loop until it returns True to wait for the
+        daemon to be ready after it has been started.
+
+        @return True on success, False otherwise.
+
+        """
+        self._bluez = self._get_dbus_proxy_for_bluetoothd()
+        return bool(self._bluez)
+
+    @property
+    def _objmgr_proxy(self):
+        """Returns proxy object to object manager if bluez is valid."""
+        if self._bluez:
+            return self._bluez[self.BLUEZ_MANAGER_IFACE]
+
+        return None
+
+    @dbus_safe(False)
+    def _get_dbus_proxy_for_bluetoothd(self):
+        """Get the D-Bus proxy for the Bluetooth daemon.
+
+        @return True on success, False otherwise.
+
+        """
+        bluez = None
+        try:
+            bluez = self.bus.get(self.BLUEZ_SERVICE_NAME,
+                                 self.BLUEZ_MANAGER_PATH)
+            logging.debug('bluetoothd is running')
+        except GLib.Error as e:
+            # When bluetoothd is not running, the exception looks like
+            #     org.freedesktop.DBus.Error.ServiceUnknown: The name org.bluez
+            #     was not provided by any .service files
+            if self.DBUS_ERROR_SERVICEUNKNOWN in str(e):
+                logging.debug('bluetoothd is not running')
+            else:
+                logging.error('Error getting dbus proxy for Bluez: %s', e)
+        return bluez
+
+    def _update_adapter(self):
+        """Store a D-Bus proxy for the local adapter in self._adapter.
+
+        This may be called in a loop until it returns True to wait for the
+        daemon to be ready, and have obtained the adapter information itself,
+        after it has been started.
+
+        Since not all devices will have adapters, this will also return True
+        in the case where we have obtained an empty adapter index list from the
+        kernel.
+
+        Note that this method does not power on the adapter.
+
+        @return True on success, including if there is no local adapter,
+            False otherwise.
+
+        """
+        self._adapter = None
+        self._adapter_path = None
+
+        # Re-check kernel to make sure adapter is available
+        self._has_adapter = len(self._control.read_index_list()) > 0
+
+        if self._bluez is None:
+            logging.warning('Bluez not found!')
+            return False
+        if not self._has_adapter:
+            logging.debug('Device has no adapter; returning')
+            return True
+        (self._adapter, self._adapter_path) = self._get_adapter()
+        return bool(self._adapter)
+
+    def _update_advertising(self):
+        """Store a D-Bus proxy for the local advertising interface manager.
+
+        This may be called repeatedly in a loop until True is returned;
+        otherwise we wait for bluetoothd to start. After bluetoothd starts, we
+        check the existence of a local adapter and proceed to get the
+        advertisement interface manager.
+
+        Since not all devices will have adapters, this will also return True
+        in the case where there is no adapter.
+
+        @return True on success, including if there is no local adapter,
+                False otherwise.
+
+        """
+        self._advertising = None
+        if self._bluez is None:
+            logging.warning('Bluez not found!')
+            return False
+        if not self._has_adapter:
+            logging.debug('Device has no adapter; returning')
+            return True
+        self._advertising = self._advertising_proxy
+        return bool(self._advertising)
+
+    def _update_adv_monitor_manager(self):
+        """Store a D-Bus proxy for the local advertisement monitor manager.
+
+        This may be called repeatedly in a loop until True is returned;
+        otherwise we wait for bluetoothd to start. After bluetoothd starts, we
+        check the existence of a local adapter and proceed to get the
+        advertisement monitor manager interface.
+
+        Since not all devices will have adapters, this will also return True
+        in the case where there is no adapter.
+
+        @return True on success, including if there is no local adapter,
+                False otherwise.
+
+        """
+        self._adv_monitor_manager = None
+        if self._bluez is None:
+            logging.warning('Bluez not found!')
+            return False
+        if not self._has_adapter:
+            logging.debug('Device has no adapter; returning without '
+                          'advertisement monitor manager')
+            return True
+        self._adv_monitor_manager = self._get_adv_monitor_manager()
+        return bool(self._adv_monitor_manager)
+
+    @dbus_safe(False)
+    def _get_adapter(self):
+        """Get the D-Bus proxy for the local adapter.
+
+        @return Tuple of (adapter, object_path) on success else (None, None).
+
+        """
+        objects = self._objmgr_proxy.GetManagedObjects()
+        for path, ifaces in six.iteritems(objects):
+            logging.debug('%s -> %r', path, list(ifaces.keys()))
+            if self.BLUEZ_ADAPTER_IFACE in ifaces:
+                logging.debug('using adapter %s', path)
+                adapter = self.bus.get(self.BLUEZ_SERVICE_NAME, path)
+                return (adapter, path)
+        else:
+            logging.warning('No adapter found in interface!')
+            return (None, None)
+
+    @property
+    def _adapter_proxy(self):
+        """Returns proxy object to adapter interface if adapter is valid."""
+        if self._adapter:
+            return self._adapter[self.BLUEZ_ADAPTER_IFACE]
+
+        return None
+
+    @property
+    def _property_proxy(self):
+        """Returns proxy object to adapter properties if adapter is valid."""
+        if self._adapter:
+            return self._adapter[self.DBUS_PROP_IFACE]
+
+        return None
+
+    @property
+    def _advertising_proxy(self):
+        """Returns proxy object to advertising interface if adapter is valid."""
+        if self._adapter:
+            return self._adapter[self.BLUEZ_LE_ADVERTISING_MANAGER_IFACE]
+
+        return None
+
+    @dbus_safe(False)
+    def _get_adv_monitor_manager(self):
+        """Get the D-Bus proxy for the local advertisement monitor manager.
+
+        @return the advertisement monitor manager interface object.
+
+        """
+        return self._adapter[self.BLUEZ_ADV_MONITOR_MANAGER_IFACE]
+
+    @dbus_safe(False)
+    def reset_on(self):
+        """Reset the adapter and settings and power up the adapter.
+
+        @return True on success, False otherwise.
+
+        """
+        return self._reset(set_power=True)
+
+    @dbus_safe(False)
+    def reset_off(self):
+        """Reset the adapter and settings, leave the adapter powered off.
+
+        @return True on success, False otherwise.
+
+        """
+        return self._reset(set_power=False)
+
+    def has_adapter(self):
+        """Return if an adapter is present.
+
+        This will only return True if we have determined both that there is
+        a Bluetooth adapter on this device (kernel adapter index list is not
+        empty) and that the Bluetooth daemon has exported an object for it.
+
+        @return True if an adapter is present, False if not.
+
+        """
+        return self._has_adapter and self._adapter is not None
+
+    def _reset(self, set_power=False):
+        """Remove remote devices and set adapter to set_power state.
+
+        Do not restart bluetoothd as this may incur a side effect.
+        The unhappy chrome may disable the adapter randomly.
+
+        @param set_power: adapter power state to set (True or False).
+
+        @return True on success, False otherwise.
+
+        """
+        logging.debug('_reset')
+
+        if not self._adapter:
+            logging.warning('Adapter not found!')
+            return False
+
+        objects = self._objmgr_proxy.GetManagedObjects()
+
+        devices = []
+        for path, ifaces in six.iteritems(objects):
+            if self.BLUEZ_DEVICE_IFACE in ifaces:
+                devices.append(objects[path][self.BLUEZ_DEVICE_IFACE])
+
+        # Turn on the adapter in order to remove all remote devices.
+        if not self.is_powered_on():
+            if not self.set_powered(True):
+                logging.warning('Unable to power on the adapter')
+                return False
+
+        for device in devices:
+            logging.debug('removing %s', device.get('Address'))
+            self.remove_device_object(device.get('Address'))
+
+        # Toggle power to the adapter.
+        if not self.set_powered(False):
+            logging.warning('Unable to power off adapter')
+            return False
+        if set_power and not self.set_powered(True):
+            logging.warning('Unable to power on adapter')
+            return False
+
+        return True
+
+    @dbus_safe(False)
+    def is_discoverable(self):
+        """Returns whether the adapter is discoverable."""
+        return bool(self._get_adapter_properties().get('Discoverable') == 1)
+
+    @dbus_safe(False)
+    def set_powered(self, powered):
+        """Set the adapter power state.
+
+        @param powered: adapter power state to set (True or False).
+
+        @return True on success, False otherwise.
+
+        """
+        if not self._adapter:
+            if not powered:
+                # Return success if we are trying to power off an adapter that's
+                # missing or gone away, since the expected result has happened.
+                return True
+            else:
+                logging.warning('Adapter not found!')
+                return False
+
+        logging.debug('_set_powered %r', powered)
+        self._property_proxy.Set(self.BLUEZ_ADAPTER_IFACE, 'Powered',
+                                 GLib.Variant('b', powered))
+
+        return True
+
+    @dbus_safe(False)
+    def set_discoverable(self, discoverable):
+        """Set the adapter discoverable state.
+
+        @param discoverable: adapter discoverable state to set (True or False).
+
+        @return True on success, False otherwise.
+
+        """
+        if not discoverable and not self._adapter:
+            # Return success if we are trying to make an adapter that's
+            # missing or gone away, undiscoverable, since the expected result
+            # has happened.
+            return True
+        self._property_proxy.Set(self.BLUEZ_ADAPTER_IFACE, 'Discoverable',
+                                 GLib.Variant('b', discoverable))
+        return True
+
+    @dbus_safe(False)
+    def get_discoverable_timeout(self):
+        """Get the adapter discoverable_timeout.
+
+        @return True on success, False otherwise.
+
+        """
+        return int(
+                self._property_proxy.Get(self.BLUEZ_ADAPTER_IFACE,
+                                         'DiscoverableTimeout'))
+
+    @dbus_safe(False)
+    def set_discoverable_timeout(self, discoverable_timeout):
+        """Set the adapter discoverable_timeout property.
+
+        @param discoverable_timeout: adapter discoverable_timeout value
+               in seconds to set (Integer).
+
+        @return True on success, False otherwise.
+
+        """
+        self._property_proxy.Set(self.BLUEZ_ADAPTER_IFACE,
+                                 'DiscoverableTimeout',
+                                 GLib.Variant('u', discoverable_timeout))
+        return True
+
+    @dbus_safe(False)
+    def get_pairable_timeout(self):
+        """Get the adapter pairable_timeout.
+
+        @return True on success, False otherwise.
+
+        """
+        return int(
+                self._property_proxy.Get(self.BLUEZ_ADAPTER_IFACE,
+                                         'PairableTimeout'))
+
+    @dbus_safe(False)
+    def set_pairable_timeout(self, pairable_timeout):
+        """Set the adapter pairable_timeout property.
+
+        @param pairable_timeout: adapter pairable_timeout value
+               in seconds to set (Integer).
+
+        @return True on success, False otherwise.
+
+        """
+        self._property_proxy.Set(self.BLUEZ_ADAPTER_IFACE, 'PairableTimeout',
+                                 GLib.Variant('u', pairable_timeout))
+        return True
+
+    @dbus_safe(False)
+    def get_pairable(self):
+        """Gets the adapter pairable state.
+
+        @return Pairable property value.
+        """
+        return bool(
+                self._property_proxy.Get(self.BLUEZ_ADAPTER_IFACE, 'Pairable'))
+
+    @dbus_safe(False)
+    def set_pairable(self, pairable):
+        """Set the adapter pairable state.
+
+        @param pairable: adapter pairable state to set (True or False).
+
+        @return True on success, False otherwise.
+
+        """
+        self._property_proxy.Set(self.BLUEZ_ADAPTER_IFACE, 'Pairable',
+                                 GLib.Variant('b', pairable))
+        return True
+
+    @dbus_safe(False)
+    def set_adapter_alias(self, alias):
+        """Set the adapter alias.
+
+        @param alias: adapter alias to set with type String
+
+        @return True on success, False otherwise.
+        """
+        self._property_proxy.Set(self.BLUEZ_ADAPTER_IFACE, 'Alias',
+                                 GLib.Variant('s', alias))
+        return True
+
+    def _get_adapter_properties(self):
+        """Read the adapter properties from the Bluetooth Daemon.
+
+        @return the properties as a JSON-encoded dictionary on success,
+            the value False otherwise.
+
+        """
+
+        @dbus_safe({})
+        def get_props():
+            """Get props from dbus."""
+            objects = self._objmgr_proxy.GetManagedObjects()
+            return objects[self._adapter_path][self.BLUEZ_ADAPTER_IFACE]
+
+        if self._bluez and self._adapter:
+            props = get_props().copy()
+        else:
+            props = {}
+        logging.debug('get_adapter_properties')
+        for i in props.items():
+            logging.debug(i)
+        return props
+
+    def get_adapter_properties(self):
+        return json.dumps(self._get_adapter_properties())
+
+    def is_powered_on(self):
+        """Checks whether the adapter is currently powered."""
+        return bool(self._get_adapter_properties().get('Powered'))
+
+    def get_address(self):
+        """Gets the current bluez adapter address."""
+        return str(self._get_adapter_properties()['Address'])
+
+    def get_bluez_version(self):
+        """Get the BlueZ version.
+
+        Returns:
+            Bluez version like 'BlueZ 5.39'.
+        """
+        return str(self._get_adapter_properties()['Name'])
+
+    def get_bluetooth_class(self):
+        """Get the bluetooth class of the adapter.
+
+        Example for Chromebook: 4718852
+
+        Returns:
+            Class of device for the adapter.
+        """
+        return str(self._get_adapter_properties()['Class'])
+
+    def read_version(self):
+        """Read the version of the management interface from the Kernel.
+
+        @return the information as a JSON-encoded tuple of:
+          ( version, revision )
+
+        """
+        #TODO(howardchung): resolve 'cannot allocate memory' error when
+        #                   BluetoothControlSocket idle too long(about 3 secs)
+        #                   (b:137603211)
+        _control = bluetooth_socket.BluetoothControlSocket()
+        return json.dumps(_control.read_version())
+
+    def read_supported_commands(self):
+        """Read the set of supported commands from the Kernel.
+
+        @return the information as a JSON-encoded tuple of:
+          ( commands, events )
+
+        """
+        #TODO(howardchung): resolve 'cannot allocate memory' error when
+        #                   BluetoothControlSocket idle too long(about 3 secs)
+        #                   (b:137603211)
+        _control = bluetooth_socket.BluetoothControlSocket()
+        return json.dumps(_control.read_supported_commands())
+
+    def read_index_list(self):
+        """Read the list of currently known controllers from the Kernel.
+
+        @return the information as a JSON-encoded array of controller indexes.
+
+        """
+        #TODO(howardchung): resolve 'cannot allocate memory' error when
+        #                   BluetoothControlSocket idle too long(about 3 secs)
+        #                   (b:137603211)
+        _control = bluetooth_socket.BluetoothControlSocket()
+        return json.dumps(_control.read_index_list())
+
+    def read_info(self):
+        """Read the adapter information from the Kernel.
+
+        @return the information as a JSON-encoded tuple of:
+          ( address, bluetooth_version, manufacturer_id,
+            supported_settings, current_settings, class_of_device,
+            name, short_name )
+
+        """
+        #TODO(howardchung): resolve 'cannot allocate memory' error when
+        #                   BluetoothControlSocket idle too long(about 3 secs)
+        #                   (b:137603211)
+        _control = bluetooth_socket.BluetoothControlSocket()
+        return json.dumps(_control.read_info(0))
+
+    def add_device(self, address, address_type, action):
+        """Add a device to the Kernel action list.
+
+        @param address: Address of the device to add.
+        @param address_type: Type of device in @address.
+        @param action: Action to take.
+
+        @return on success, a JSON-encoded typle of:
+          ( address, address_type ), None on failure.
+
+        """
+        #TODO(howardchung): resolve 'cannot allocate memory' error when
+        #                   BluetoothControlSocket idle too long(about 3 secs)
+        #                   (b:137603211)
+        _control = bluetooth_socket.BluetoothControlSocket()
+        return json.dumps(_control.add_device(0, address, address_type,
+                                              action))
+
+    def remove_device(self, address, address_type):
+        """Remove a device from the Kernel action list.
+
+        @param address: Address of the device to remove.
+        @param address_type: Type of device in @address.
+
+        @return on success, a JSON-encoded typle of:
+          ( address, address_type ), None on failure.
+
+        """
+        #TODO(howardchung): resolve 'cannot allocate memory' error when
+        #                   BluetoothControlSocket idle too long(about 3 secs)
+        #                   (b:137603211)
+        _control = bluetooth_socket.BluetoothControlSocket()
+        return json.dumps(_control.remove_device(0, address, address_type))
+
+    @dbus_safe(False)
+    def _get_devices(self):
+        """Read information about remote devices known to the adapter.
+
+        @return the properties of each device in a list
+
+        """
+        objects = self._objmgr_proxy.GetManagedObjects()
+        devices = []
+        for path, ifaces in six.iteritems(objects):
+            if self.BLUEZ_DEVICE_IFACE in ifaces:
+                devices.append(objects[path][self.BLUEZ_DEVICE_IFACE])
+        return devices
+
+    def _encode_json(self, data):
+        """Encodes input data as JSON object.
+
+        Note that for bytes elements in the input data, they are decoded as
+        unicode string.
+
+        @param data: data to be JSON encoded
+
+        @return: JSON encoded data
+        """
+        logging.debug('_encode_json raw data is %s', data)
+        str_data = utils.bytes_to_str_recursive(data)
+        json_encoded = json.dumps(str_data)
+        logging.debug('JSON encoded data is %s', json_encoded)
+        return json_encoded
+
+    def get_devices(self):
+        """Read information about remote devices known to the adapter.
+
+        @return the properties of each device as a JSON-encoded array of
+            dictionaries on success, the value False otherwise.
+
+        """
+        devices = self._get_devices()
+        # Note that bluetooth facade now runs in Python 3.
+        # Refer to crrev.com/c/3268347.
+        return self._encode_json(devices)
+
+    def get_num_connected_devices(self):
+        """ Return number of remote devices currently connected to the DUT.
+
+        @returns: The number of devices known to bluez with the Connected
+            property active
+        """
+        num_connected_devices = 0
+        for dev in self._get_devices():
+            if dev and dev.get('Connected', False):
+                num_connected_devices += 1
+
+        return num_connected_devices
+
+    @dbus_safe(None)
+    def get_device_property(self, address, prop_name):
+        """Read a property of BT device by directly querying device dbus object
+
+        @param address: Address of the device to query
+        @param prop_name: Property to be queried
+
+        @return Base 64 JSON repr of property if device is found and has
+                property, otherwise None on failure. JSON is a recursive
+                converter, automatically converting dbus types to python natives
+                and base64 allows us to pass special characters over xmlrpc.
+                Decode is done in bluetooth_device.py
+        """
+
+        prop_val = None
+
+        # Grab dbus object, _find_device will catch any thrown dbus error
+        device_obj = self._find_device(address)
+
+        if device_obj:
+            # Query dbus object for property
+            prop_val = unpack_if_variant(device_obj[self.DBUS_PROP_IFACE].Get(
+                    self.BLUEZ_DEVICE_IFACE, prop_name))
+
+        return self._encode_json(prop_val)
+
+    @dbus_safe(None)
+    def get_battery_property(self, address, prop_name):
+        """Read a property from Battery1 interface.
+
+        @param address: Address of the device to query
+        @param prop_name: Property to be queried
+
+        @return The battery percentage value, or None if does not exist.
+        """
+
+        prop_val = None
+
+        # Grab dbus object, _find_battery will catch any thrown dbus error
+        battery_obj = self._find_battery(address)
+
+        if battery_obj:
+            # Query dbus object for property
+            prop_val = unpack_if_variant(battery_obj[self.DBUS_PROP_IFACE].Get(
+                    self.BLUEZ_BATTERY_IFACE, prop_name))
+
+        return prop_val
+
+    @dbus_safe(False)
+    def set_discovery_filter(self, filter):
+        """Set the discovery filter.
+
+        @param filter: The discovery filter to set.
+
+        @return True on success, False otherwise.
+
+        """
+        if not self._adapter:
+            return False
+
+        converted_filter = {}
+        for key in filter:
+            converted_filter[key] = GLib.Variant('s', filter[key])
+
+        self._adapter_proxy.SetDiscoveryFilter(converted_filter)
+        return True
+
+    @dbus_safe(False, return_error=True)
+    def start_discovery(self):
+        """Start discovery of remote devices.
+
+        Obtain the discovered device information using get_devices(), called
+        stop_discovery() when done.
+
+        @return True on success, False otherwise.
+
+        """
+        if not self._adapter:
+            return (False, "Adapter Not Found")
+        self._adapter_proxy.StartDiscovery()
+        return (True, None)
+
+    @dbus_safe(False, return_error=True)
+    def stop_discovery(self):
+        """Stop discovery of remote devices.
+
+        @return True on success, False otherwise.
+
+        """
+        if not self._adapter:
+            return (False, "Adapter Not Found")
+        self._adapter_proxy.StopDiscovery()
+        return (True, None)
+
+    def is_discovering(self):
+        """Check if adapter is discovering."""
+        return self._get_adapter_properties().get('Discovering', 0) == 1
+
+    def get_dev_info(self):
+        """Read raw HCI device information.
+
+        @return JSON-encoded tuple of:
+                (index, name, address, flags, device_type, bus_type,
+                       features, pkt_type, link_policy, link_mode,
+                       acl_mtu, acl_pkts, sco_mtu, sco_pkts,
+                       err_rx, err_tx, cmd_tx, evt_rx, acl_tx, acl_rx,
+                       sco_tx, sco_rx, byte_rx, byte_tx) on success,
+                None on failure.
+
+        """
+        return json.dumps(self._raw.get_dev_info(0))
+
+    @dbus_safe(None, return_error=True)
+    def get_supported_capabilities(self):
+        """ Get supported capabilities of the adapter
+
+        @returns (capabilities, None) on Success. (None, <error>) on failure
+        """
+        value = self._adapter_proxy.GetSupportedCapabilities()
+        return (json.dumps(value), None)
+
+    @dbus_safe(False)
+    def register_profile(self, path, uuid, options):
+        """Register new profile (service).
+
+        @param path: Path to the profile object.
+        @param uuid: Service Class ID of the service as string.
+        @param options: Dictionary of options for the new service, compliant
+                        with BlueZ D-Bus Profile API standard.
+
+        @return True on success, False otherwise.
+
+        """
+        converted_options = {}
+        if 'ServiceRecord' in options:
+            converted_options['ServiceRecord'] = GLib.Variant(
+                    's', options['ServiceRecord'])
+
+        profile_manager = self.bus.get(
+                self.BLUEZ_SERVICE_NAME, self.BLUEZ_PROFILE_MANAGER_PATH)[
+                        self.BLUEZ_PROFILE_MANAGER_IFACE]
+        profile_manager.RegisterProfile(path, uuid, converted_options)
+        return True
+
+    def has_device(self, address):
+        """Checks if the device with a given address exists.
+
+        @param address: Address of the device.
+
+        @returns: True if there is an interface object with that address.
+                  False if the device is not found.
+
+        @raises: Exception if a D-Bus error is encountered.
+
+        """
+        result = self._find_device(address)
+        logging.debug('has_device result: %s', str(result))
+
+        # The result being False indicates that there is a D-Bus error.
+        if result is False:
+            raise Exception('dbus.Interface error')
+
+        # Return True if the result is not None, e.g. a D-Bus interface object;
+        # False otherwise.
+        return bool(result)
+
+    @dbus_safe(False)
+    def _find_device(self, address):
+        """Finds the device with a given address.
+
+        Find the device with a given address and returns the
+        device interface.
+
+        @param address: Address of the device.
+
+        @returns: An 'org.bluez.Device1' interface to the device.
+                  None if device can not be found.
+        """
+        path = self._get_device_path(address)
+        if path:
+            return self.bus.get(self.BLUEZ_SERVICE_NAME, path)
+        logging.info('Device not found')
+        return None
+
+    @dbus_safe(None)
+    def _find_battery(self, address):
+        """Finds the battery with a given address.
+
+        Find the battery with a given address and returns the
+        battery interface.
+
+        @param address: Address of the device.
+
+        @returns: An 'org.bluez.Battery1' interface to the device.
+                  None if device can not be found.
+        """
+        path = self._get_device_path(address)
+        if path:
+            try:
+                obj = self.bus.get(self.BLUEZ_SERVICE_NAME, path)
+                if obj[self.BLUEZ_BATTERY_IFACE] is not None:
+                    return obj
+            except:
+                pass
+        logging.info('Battery not found')
+        return None
+
+    @dbus_safe(False)
+    def _get_device_path(self, address):
+        """Gets the path for a device with a given address.
+
+        Find the device with a given address and returns the
+        the path for the device.
+
+        @param address: Address of the device.
+
+        @returns: The path to the address of the device, or None if device is
+            not found in the object tree.
+
+        """
+
+        # Create device path, i.e. '/org/bluez/hci0/dev_AA_BB_CC_DD_EE_FF' based
+        # on path assignment scheme used in bluez
+        address_up = address.replace(':', '_')
+        device_path = '{}/dev_{}'.format(self._adapter_path, address_up)
+
+        # Verify the Address property agrees to confirm we have the device
+        try:
+            device = self.bus.get(self.BLUEZ_SERVICE_NAME, device_path)
+            found_addr = device[self.DBUS_PROP_IFACE].Get(
+                    self.BLUEZ_DEVICE_IFACE, 'Address')
+
+            if found_addr == address:
+                logging.info('Device found at {}'.format(device_path))
+                return device_path
+
+        except KeyError as ke:
+            logging.debug('Couldn\'t reach device: %s: %s', address, ke)
+        except GLib.Error as e:
+            log_msg = 'Couldn\'t reach device: {}'.format(str(e))
+            logging.debug(log_msg)
+
+        logging.debug('No device found at {}'.format(device_path))
+        return None
+
+    @dbus_safe(False)
+    def _setup_pairing_agent(self, pin):
+        """Initializes and resiters a BluezPairingAgent to handle authentication.
+
+        @param pin: The pin code this agent will answer.
+
+        """
+        if self._pairing_agent:
+            logging.info(
+                    'Removing the old agent before initializing a new one')
+            self._pairing_agent.unregister()
+            self._pairing_agent = None
+
+        # Create and register pairing agent
+        self._pairing_agent = BluezPairingAgent(self.bus, self.AGENT_PATH, pin)
+
+        agent_manager = self.bus.get(
+                self.BLUEZ_SERVICE_NAME,
+                self.BLUEZ_AGENT_MANAGER_PATH)[self.BLUEZ_AGENT_MANAGER_IFACE]
+        try:
+            # Make sure agent is accessible on bus
+            #agent_obj = self.bus.get(self.BLUEZ_SERVICE_NAME, self.AGENT_PATH)
+            agent_manager.RegisterAgent(self.AGENT_PATH, str(self._capability))
+        except GLib.Error as e:
+            if self.BLUEZ_ERROR_ALREADY_EXISTS in str(e):
+                logging.info('Unregistering old agent and registering the new')
+                agent_manager.UnregisterAgent(self.AGENT_PATH)
+                agent_manager.RegisterAgent(self.AGENT_PATH,
+                                            str(self._capability))
+            else:
+                logging.error('Error setting up pin agent: %s', e)
+                raise
+        except Exception as e:
+            logging.debug('Setup pairing agent: %s', str(e))
+            raise
+        logging.info('Agent registered: %s', self.AGENT_PATH)
+
+    @dbus_safe(False)
+    def _is_paired(self, device):
+        """Checks if a device is paired.
+
+        @param device: An 'org.bluez.Device1' interface to the device.
+
+        @returns: True if device is paired. False otherwise.
+
+        """
+        props = device[self.DBUS_PROP_IFACE]
+        paired = props.Get(self.BLUEZ_DEVICE_IFACE, 'Paired')
+        return bool(paired)
+
+    @dbus_safe(False)
+    def device_is_paired(self, address):
+        """Checks if a device is paired.
+
+        @param address: address of the device.
+
+        @returns: True if device is paired. False otherwise.
+
+        """
+        device = self._find_device(address)
+        if not device:
+            logging.error('Device not found')
+            return False
+        return self._is_paired(device)
+
+    @dbus_safe(False)
+    def _is_connected(self, device):
+        """Checks if a device is connected.
+
+        @param device: An 'org.bluez.Device1' interface to the device.
+
+        @returns: True if device is connected. False otherwise.
+
+        """
+        props = device[self.DBUS_PROP_IFACE]
+        connected = props.Get(self.BLUEZ_DEVICE_IFACE, 'Connected')
+        logging.info('Got connected = %r', connected)
+        return bool(connected)
+
+    @dbus_safe(False)
+    def _set_trusted_by_device(self, device, trusted=True):
+        """Set the device trusted by device object.
+
+        @param device: the device object to set trusted.
+        @param trusted: True or False indicating whether to set trusted or not.
+
+        @returns: True if successful. False otherwise.
+
+        """
+        try:
+            properties = device[self.DBUS_PROP_IFACE]
+            properties.Set(self.BLUEZ_DEVICE_IFACE, 'Trusted',
+                           GLib.Variant('b', trusted))
+            return True
+        except Exception as e:
+            logging.error('_set_trusted_by_device: %s', e)
+        except:
+            logging.error('_set_trusted_by_device: unexpected error')
+        return False
+
+    @dbus_safe(False)
+    def _set_trusted_by_path(self, device_path, trusted=True):
+        """Set the device trusted by the device path.
+
+        @param device_path: the object path of the device.
+        @param trusted: True or False indicating whether to set trusted or not.
+
+        @returns: True if successful. False otherwise.
+
+        """
+        try:
+            device = self.bus.get(self.BLUEZ_SERVICE_NAME, device_path)
+            return self._set_trusted_by_device(device, trusted)
+        except Exception as e:
+            logging.error('_set_trusted_by_path: %s', e)
+        except:
+            logging.error('_set_trusted_by_path: unexpected error')
+        return False
+
+    @dbus_safe(False)
+    def set_trusted(self, address, trusted=True):
+        """Set the device trusted by address.
+
+        @param address: The bluetooth address of the device.
+        @param trusted: True or False indicating whether to set trusted or not.
+
+        @returns: True if successful. False otherwise.
+
+        """
+        try:
+            device = self._find_device(address)
+            return self._set_trusted_by_device(device, trusted)
+        except Exception as e:
+            logging.error('set_trusted: %s', e)
+        except:
+            logging.error('set_trusted: unexpected error')
+        return False
+
+    @dbus_safe(False)
+    def pair_legacy_device(self, address, pin, trusted, timeout=60):
+        """Pairs a device with a given pin code.
+
+        Registers a agent who handles pin code request and
+        pairs a device with known pin code. After pairing, this function will
+        automatically connect to the device as well (prevents timing issues
+        between pairing and connect and reduces overall test execution time).
+
+        @param address: Address of the device to pair.
+        @param pin: The pin code of the device to pair.
+        @param trusted: indicating whether to set the device trusted.
+        @param timeout: The timeout in seconds for pairing.
+
+        @returns: True on success. False otherwise.
+
+        """
+
+        def connect_reply():
+            """Handler when connect succeeded."""
+            logging.info('Device connected: %s', device_path)
+
+        def connect_error(error):
+            """Handler when connect failed.
+
+            @param error: one of the errors defined in org.bluez.Error
+            representing the error in connect.
+            """
+            logging.error('Connect device failed: %s', error)
+
+        def pair_reply():
+            """Handler when pairing succeeded."""
+            logging.info('Device paired: %s', device_path)
+            if trusted:
+                self._set_trusted_by_path(device_path, trusted=True)
+                logging.info('Device trusted: %s', device_path)
+
+            # On finishing pairing, also connect
+            self.dbus_method_with_handlers(device.Connect,
+                                           connect_reply,
+                                           connect_error,
+                                           timeout=timeout * 1000)
+
+        def pair_error(error):
+            """Handler when pairing failed.
+
+            @param error: one of errors defined in org.bluez.Error representing
+                          the error in pairing.
+
+            """
+            if 'org.freedesktop.DBus.Error.NoReply' in str(error):
+                logging.error('Timed out after %d ms. Cancelling pairing.',
+                              timeout)
+                device.CancelPairing()
+            else:
+                logging.error('Pairing device failed: %s', error)
+
+        device = self._find_device(address)
+        if not device:
+            logging.error('Device not found')
+            return False
+
+        device_path = self._get_device_path(address)
+        logging.info('Device %s is found.', device_path)
+
+        self._setup_pairing_agent(pin)
+
+        try:
+            if not self._is_paired(device):
+                logging.info('Device is not paired. Pair and Connect.')
+                self.dbus_method_with_handlers(device.Pair,
+                                               pair_reply,
+                                               pair_error,
+                                               timeout=timeout * 1000)
+            elif not self._is_connected(device):
+                logging.info('Device is already paired. Connect.')
+                self.dbus_method_with_handlers(device.Connect,
+                                               connect_reply,
+                                               connect_error,
+                                               tiemout=timeout * 1000)
+        except Exception as e:
+            logging.error('Exception %s in pair_legacy_device', e)
+            return False
+
+        return self._is_paired(device) and self._is_connected(device)
+
+    @dbus_safe(False)
+    def remove_device_object(self, address):
+        """Removes a device object and the pairing information.
+
+        Calls RemoveDevice method to remove remote device
+        object and the pairing information.
+
+        @param address: Address of the device to unpair.
+
+        @returns: True on success. False otherwise.
+
+        """
+        device = self._find_device(address)
+        if not device:
+            logging.error('Device not found')
+            return False
+        self._adapter_proxy.RemoveDevice(self._get_device_path(address))
+        return True
+
+    @dbus_safe(False)
+    def connect_device(self, address):
+        """Connects a device.
+
+        Connects a device if it is not connected.
+
+        @param address: Address of the device to connect.
+
+        @returns: True on success. False otherwise.
+
+        """
+        device = self._find_device(address)
+        if not device:
+            logging.error('Device not found')
+            return False
+        if self._is_connected(device):
+            logging.info('Device is already connected')
+            return True
+        device.Connect()
+        return self._is_connected(device)
+
+    @dbus_safe(False)
+    def device_is_connected(self, address):
+        """Checks if a device is connected.
+
+        @param address: Address of the device to connect.
+
+        @returns: True if device is connected. False otherwise.
+
+        """
+        device = self._find_device(address)
+        if not device:
+            logging.error('Device not found')
+            return False
+        return self._is_connected(device)
+
+    @dbus_safe(False)
+    def disconnect_device(self, address):
+        """Disconnects a device.
+
+        Disconnects a device if it is connected.
+
+        @param address: Address of the device to disconnect.
+
+        @returns: True on success. False otherwise.
+
+        """
+        device = self._find_device(address)
+        if not device:
+            logging.error('Device not found')
+            return False
+        if not self._is_connected(device):
+            logging.info('Device is not connected')
+            return True
+        device.Disconnect()
+        return not self._is_connected(device)
+
+    @dbus_safe(False)
+    def _device_services_resolved(self, device):
+        """Checks if services are resolved.
+
+        @param device: An 'org.bluez.Device1' interface to the device.
+
+        @returns: True if device is connected. False otherwise.
+
+        """
+        logging.info('device for services resolved: %s', device)
+        props = device[self.DBUS_PROP_IFACE]
+        resolved = props.Get(self.BLUEZ_DEVICE_IFACE, 'ServicesResolved')
+        logging.info('Services resolved = %r', resolved)
+        return bool(resolved)
+
+    @dbus_safe(False)
+    def device_services_resolved(self, address):
+        """Checks if service discovery is complete on a device.
+
+        Checks whether service discovery has been completed..
+
+        @param address: Address of the remote device.
+
+        @returns: True on success. False otherwise.
+
+        """
+        device = self._find_device(address)
+        if not device:
+            logging.error('Device not found')
+            return False
+
+        if not self._is_connected(device):
+            logging.info('Device is not connected')
+            return False
+
+        return self._device_services_resolved(device)
+
+    def btmon_start(self):
+        """Start btmon monitoring."""
+        self.btmon.start()
+
+    def btmon_stop(self):
+        """Stop btmon monitoring."""
+        self.btmon.stop()
+
+    def btmon_get(self, search_str, start_str):
+        """Get btmon output contents.
+
+        @param search_str: only lines with search_str would be kept.
+        @param start_str: all lines before the occurrence of start_str would be
+                filtered.
+
+        @returns: the recorded btmon output.
+
+        """
+        return self.btmon.get_contents(search_str=search_str,
+                                       start_str=start_str)
+
+    def btmon_find(self, pattern_str):
+        """Find if a pattern string exists in btmon output.
+
+        @param pattern_str: the pattern string to find.
+
+        @returns: True on success. False otherwise.
+
+        """
+        return self.btmon.find(pattern_str)
+
+    def dbus_method_with_handlers(self, dbus_method, reply_handler,
+                                  error_handler, *args, **kwargs):
+        """Run an async dbus method.
+
+        @param dbus_method: the dbus async method to invoke.
+        @param reply_handler: the reply handler for the dbus method.
+        @param error_handler: the error handler for the dbus method.
+        @param *args: additional arguments for the dbus method.
+        @param **kwargs: additional keyword arguments for the dbus method.
+
+        @returns: an empty string '' on success;
+                  None if there is no _advertising interface manager; and
+                  an error string if the dbus method fails or exception occurs
+
+        """
+
+        def successful_cb():
+            """Called when the dbus_method completed successfully."""
+            reply_handler()
+            self.dbus_cb_msg = ''
+
+        def error_cb(error):
+            """Called when the dbus_method failed."""
+            error_handler(error)
+            self.dbus_cb_msg = str(error)
+
+        # Successful dbus calls will have a non-throwing result and error
+        # results will throw GLib.Error.
+        try:
+            _ = dbus_method(*args, **kwargs)
+            successful_cb()
+        except GLib.Error as e:
+            error_cb(e)
+        except Exception as e:
+            logging.error('Exception %s in dbus_method_with_handlers ', e)
+            return str(e)
+
+        return self.dbus_cb_msg
+
+    def advmon_check_manager_interface_exist(self):
+        """Check if AdvertisementMonitorManager1 interface is available.
+
+        @returns: True if Manager interface is available, False otherwise.
+
+        """
+        objects = self._objmgr_proxy.GetManagedObjects()
+        for _, ifaces in six.iteritems(objects):
+            if self.BLUEZ_ADV_MONITOR_MANAGER_IFACE in ifaces:
+                return True
+
+        return False
+
+    def advmon_read_supported_types(self):
+        """Read the Advertisement Monitor supported monitor types.
+
+        Reads the value of 'SupportedMonitorTypes' property of the
+        AdvertisementMonitorManager1 interface on the adapter.
+
+        @returns: the list of the supported monitor types.
+
+        """
+        return unpack_if_variant(
+                self._property_proxy.Get(self.BLUEZ_ADV_MONITOR_MANAGER_IFACE,
+                                         'SupportedMonitorTypes'))
+
+    def advmon_read_supported_features(self):
+        """Read the Advertisement Monitor supported features.
+
+        Reads the value of 'SupportedFeatures' property of the
+        AdvertisementMonitorManager1 interface on the adapter.
+
+        @returns: the list of the supported features.
+
+        """
+        return unpack_if_variant(
+                self._property_proxy.Get(self.BLUEZ_ADV_MONITOR_MANAGER_IFACE,
+                                         'SupportedFeatures'))
+
+    def advmon_create_app(self):
+        """Create an advertisement monitor app.
+
+        @returns: app id, once the app is created.
+
+        """
+        return self.advmon_appmgr.create_app()
+
+    def advmon_exit_app(self, app_id):
+        """Exit an advertisement monitor app.
+
+        @param app_id: the app id.
+
+        @returns: True on success, False otherwise.
+
+        """
+        return self.advmon_appmgr.exit_app(app_id)
+
+    def advmon_kill_app(self, app_id):
+        """Kill an advertisement monitor app by sending SIGKILL.
+
+        @param app_id: the app id.
+
+        @returns: True on success, False otherwise.
+
+        """
+        return self.advmon_appmgr.kill_app(app_id)
+
+    def advmon_register_app(self, app_id):
+        """Register an advertisement monitor app.
+
+        @param app_id: the app id.
+
+        @returns: True on success, False otherwise.
+
+        """
+        return self.advmon_appmgr.register_app(app_id)
+
+    def advmon_unregister_app(self, app_id):
+        """Unregister an advertisement monitor app.
+
+        @param app_id: the app id.
+
+        @returns: True on success, False otherwise.
+
+        """
+        return self.advmon_appmgr.unregister_app(app_id)
+
+    def advmon_add_monitor(self, app_id, monitor_data):
+        """Create an Advertisement Monitor object.
+
+        @param app_id: the app id.
+        @param monitor_data: the list containing monitor type, RSSI filter
+                             values and patterns.
+
+        @returns: monitor id, once the monitor is created, None otherwise.
+
+        """
+        return self.advmon_appmgr.add_monitor(app_id, monitor_data)
+
+    def advmon_remove_monitor(self, app_id, monitor_id):
+        """Remove the Advertisement Monitor object.
+
+        @param app_id: the app id.
+        @param monitor_id: the monitor id.
+
+        @returns: True on success, False otherwise.
+
+        """
+        return self.advmon_appmgr.remove_monitor(app_id, monitor_id)
+
+    def advmon_get_event_count(self, app_id, monitor_id, event):
+        """Read the count of a particular event on the given monitor.
+
+        @param app_id: the app id.
+        @param monitor_id: the monitor id.
+        @param event: name of the specific event or 'All' for all events.
+
+        @returns: count of the specific event or dict of counts of all events.
+
+        """
+        return self.advmon_appmgr.get_event_count(app_id, monitor_id, event)
+
+    def advmon_reset_event_count(self, app_id, monitor_id, event):
+        """Reset the count of a particular event on the given monitor.
+
+        @param app_id: the app id.
+        @param monitor_id: the monitor id.
+        @param event: name of the specific event or 'All' for all events.
+
+        @returns: True on success, False otherwise.
+
+        """
+        return self.advmon_appmgr.reset_event_count(app_id, monitor_id, event)
+
+    def advmon_set_target_devices(self, app_id, monitor_id, devices):
+        """Set the target devices to the given monitor.
+
+        DeviceFound and DeviceLost will only be counted if it is triggered by a
+        target device.
+
+        @param app_id: the app id.
+        @param monitor_id: the monitor id.
+        @param devices: a list of devices in MAC address
+
+        @returns: True on success, False otherwise.
+
+        """
+        paths = []
+        for addr in devices:
+            paths.append('{}/dev_{}'.format(self._adapter_path,
+                                            addr.replace(':', '_')))
+
+        return self.advmon_appmgr.set_target_devices(app_id, monitor_id, paths)
+
+    def advmon_interleave_scan_logger_start(self):
+        """ Start interleave logger recording
+        """
+        self.advmon_interleave_logger.StartRecording()
+
+    def advmon_interleave_scan_logger_stop(self):
+        """ Stop interleave logger recording
+
+        @returns: True if logs were successfully collected,
+                  False otherwise.
+
+        """
+        return self.advmon_interleave_logger.StopRecording()
+
+    def advmon_interleave_scan_logger_get_records(self):
+        """ Get records in previous log collections
+
+        @returns: a list of records, where each item is a record of
+                  interleave |state| and the |time| the state starts.
+                  |state| could be {'no filter', 'allowlist'}
+                  |time| is system time in sec
+
+        """
+        return self.advmon_interleave_logger.records
+
+    def advmon_interleave_scan_logger_get_cancel_events(self):
+        """ Get cancel events in previous log collections
+
+        @returns: a list of cancel |time| when a interleave cancel event log
+                  was found.
+                  |time| is system time in sec
+
+        """
+        return self.advmon_interleave_logger.cancel_events
+
+    def register_advertisement(self, advertisement_data):
+        """Register an advertisement.
+
+        Note that rpc supports only conformable types. Hence, a
+        dict about the advertisement is passed as a parameter such
+        that the advertisement object could be constructed on the host.
+
+        @param advertisement_data: a dict of the advertisement to register.
+
+        @returns: True on success. False otherwise.
+
+        """
+        adv = advertisement.Advertisement(self.bus, advertisement_data)
+        self.advertisements.append(adv)
+        return self.dbus_method_with_handlers(
+                self._advertising.RegisterAdvertisement,
+                # reply handler
+                lambda: logging.info('register_advertisement: succeeded.'),
+                # error handler
+                lambda error: logging.error(
+                        'register_advertisement: failed: %s', str(error)),
+                # other arguments
+                adv.get_path(),
+                {})
+
+    def unregister_advertisement(self, advertisement_data):
+        """Unregister an advertisement.
+
+        Note that to unregister an advertisement, it is required to use
+        the same self._advertising interface manager. This is because
+        bluez only allows the same sender to invoke UnregisterAdvertisement
+        method. Hence, watch out that the bluetoothd is not restarted or
+        self.start_bluetoothd() is not executed between the time span that
+        an advertisement is registered and unregistered.
+
+        @param advertisement_data: a dict of the advertisements to unregister.
+
+        @returns: True on success. False otherwise.
+
+        """
+        path = advertisement_data.get('Path')
+        for index, adv in enumerate(self.advertisements):
+            if adv.get_path() == path:
+                break
+        else:
+            logging.error('Fail to find the advertisement under the path: %s',
+                          path)
+            return False
+
+        result = self.dbus_method_with_handlers(
+                self._advertising.UnregisterAdvertisement,
+                # reply handler
+                lambda: logging.info('unregister_advertisement: succeeded.'),
+                # error handler
+                lambda error: logging.error(
+                        'unregister_advertisement: failed: %s', str(error)),
+                # other arguments
+                adv.get_path())
+
+        # Call unregister() so that the same path could be reused.
+        adv.unregister()
+        del self.advertisements[index]
+
+        return result
+
+    def set_advertising_intervals(self, min_adv_interval_ms,
+                                  max_adv_interval_ms):
+        """Set advertising intervals.
+
+        @param min_adv_interval_ms: the min advertising interval in ms.
+        @param max_adv_interval_ms: the max advertising interval in ms.
+
+        @returns: True on success. False otherwise.
+
+        """
+        return self.dbus_method_with_handlers(
+                self._advertising.SetAdvertisingIntervals,
+                # reply handler
+                lambda: logging.info('set_advertising_intervals: succeeded.'),
+                # error handler
+                lambda error: logging.error(
+                        'set_advertising_intervals: failed: %s', str(error)),
+                # other arguments
+                min_adv_interval_ms,
+                max_adv_interval_ms)
+
+    def get_advertisement_property(self, adv_path, prop_name):
+        """Grab property of an advertisement registered on the DUT
+
+        The service on the DUT registers a dbus object and holds it. During the
+        test, some properties on the object may change, so this allows the test
+        access to the properties at run-time.
+
+        @param adv_path: string path of the dbus object
+        @param prop_name: string name of the property required
+
+        @returns: the value of the property in standard (non-dbus) type if the
+                    property exists, else None
+        """
+        for adv in self.advertisements:
+            if str(adv.get_path()) == adv_path:
+                adv_props = adv.GetAll('org.bluez.LEAdvertisement1')
+                return unpack_if_variant(adv_props.get(prop_name, None))
+
+        return None
+
+    def get_advertising_manager_property(self, prop_name):
+        """Grab property of the bluez advertising manager
+
+        This allows us to understand the DUT's advertising capabilities, for
+        instance the maximum number of advertising instances supported, so that
+        we can test these capabilities.
+
+        @param adv_path: string path of the dbus object
+        @param prop_name: string name of the property required
+
+        @returns: the value of the property in standard (non-dbus) type if the
+                    property exists, else None
+        """
+
+        return unpack_if_variant(
+                self._property_proxy.Get(
+                        self.BLUEZ_LE_ADVERTISING_MANAGER_IFACE, prop_name))
+
+    def reset_advertising(self):
+        """Reset advertising.
+
+        This includes un-registering all advertisements, reset advertising
+        intervals, and disable advertising.
+
+        @returns: True on success. False otherwise.
+
+        """
+        # It is required to execute unregister() to unregister the
+        # object-path handler of each advertisement. In this way, we could
+        # register an advertisement with the same path repeatedly.
+        for adv in self.advertisements:
+            adv.unregister()
+        del self.advertisements[:]
+
+        return self.dbus_method_with_handlers(
+                self._advertising.ResetAdvertising,
+                # reply handler
+                lambda: logging.info('reset_advertising: succeeded.'),
+                # error handler
+                lambda error: logging.error('reset_advertising: failed: %s',
+                                            str(error)))
+
+    def get_gatt_attributes_map(self, address):
+        """Return a JSON formatted string of the GATT attributes of a device,
+        keyed by UUID
+        @param address: a string of the MAC address of the device
+
+        @return: JSON formated string, stored the nested structure of the
+        attributes. Each attribute has 'path' and
+        ['characteristics' | 'descriptors'], which store their object path and
+        children respectively.
+
+        """
+        attribute_map = dict()
+
+        device_object_path = self._get_device_path(address)
+        objects = self._objmgr_proxy.GetManagedObjects()
+        service_map = self._get_service_map(device_object_path, objects)
+
+        servs = dict()
+        attribute_map['services'] = servs
+
+        for uuid, path in service_map.items():
+
+            servs[uuid] = dict()
+            serv = servs[uuid]
+
+            serv['path'] = path
+            serv['characteristics'] = dict()
+            chrcs = serv['characteristics']
+
+            chrcs_map = self._get_characteristic_map(path, objects)
+            for uuid, path in chrcs_map.items():
+                chrcs[uuid] = dict()
+                chrc = chrcs[uuid]
+
+                chrc['path'] = path
+                chrc['descriptors'] = dict()
+                descs = chrc['descriptors']
+
+                descs_map = self._get_descriptor_map(path, objects)
+
+                for uuid, path in descs_map.items():
+                    descs[uuid] = dict()
+                    desc = descs[uuid]
+
+                    desc['path'] = path
+
+        return json.dumps(attribute_map)
+
+    def _get_gatt_interface(self, uuid, object_path, interface):
+        """Get dbus interface by uuid
+        @param uuid: a string of uuid
+        @param object_path: a string of the object path of the service
+
+        @return: a dbus interface
+        """
+
+        return self.bus.get(self.BLUEZ_SERVICE_NAME, object_path)[interface]
+
+    def get_gatt_service_property(self, object_path, property_name):
+        """Get property from a service attribute
+        @param object_path: a string of the object path of the service
+        @param property_name: a string of a property, ex: 'Value', 'UUID'
+
+        @return: the property if success,
+                 none otherwise
+
+        """
+        return self.get_gatt_attribute_property(object_path,
+                                                self.BLUEZ_GATT_SERV_IFACE,
+                                                property_name)
+
+    def get_gatt_characteristic_property(self, object_path, property_name):
+        """Get property from a characteristic attribute
+        @param object_path: a string of the object path of the characteristic
+        @param property_name: a string of a property, ex: 'Value', 'UUID'
+
+        @return: the property if success,
+                 none otherwise
+
+        """
+        return self.get_gatt_attribute_property(object_path,
+                                                self.BLUEZ_GATT_CHAR_IFACE,
+                                                property_name)
+
+    def get_gatt_descriptor_property(self, object_path, property_name):
+        """Get property from descriptor attribute
+        @param object_path: a string of the object path of the descriptor
+        @param property_name: a string of a property, ex: 'Value', 'UUID'
+
+        @return: the property if success,
+                 none otherwise
+
+        """
+        return self.get_gatt_attribute_property(object_path,
+                                                self.BLUEZ_GATT_DESC_IFACE,
+                                                property_name)
+
+    @dbus_safe(None)
+    def get_gatt_attribute_property(self, object_path, interface,
+                                    property_name):
+        """Get property from attribute
+        @param object_path: a string of the bject path
+        @param property_name: a string of a property, ex: 'Value', 'UUID'
+
+        @return: the property if success,
+                 none otherwise
+
+        """
+        gatt_object = self.bus.get(self.BLUEZ_SERVICE_NAME, object_path)
+        prop = self._get_dbus_object_property(gatt_object, interface,
+                                              property_name)
+        logging.info(prop)
+        if isinstance(prop, bytearray):
+            return _dbus_byte_array_to_b64_string(prop)
+        if isinstance(prop, bool):
+            return bool(prop)
+        if isinstance(prop, list):
+            return list(map(str, prop))
+        return prop
+
+    @dbus_safe(None)
+    def gatt_characteristic_read_value(self, uuid, object_path):
+        """Perform method ReadValue on a characteristic attribute
+        @param uuid: a string of uuid
+        @param object_path: a string of the object path of the characteristic
+
+        @return: base64 string of dbus bytearray
+        """
+
+        dbus_interface = self._get_gatt_interface(uuid, object_path,
+                                                  self.BLUEZ_GATT_CHAR_IFACE)
+        value = dbus_interface.ReadValue({})
+        return _dbus_byte_array_to_b64_string(value)
+
+    @dbus_safe(None)
+    def gatt_descriptor_read_value(self, uuid, object_path):
+        """Perform method ReadValue on a descriptor attribute
+        @param uuid: a string of uuid
+        @param object_path: a string of the object path of the descriptor
+
+        @return: base64 string of dbus bytearray
+        """
+
+        dbus_interface = self._get_gatt_interface(uuid, object_path,
+                                                  self.BLUEZ_GATT_DESC_IFACE)
+        value = dbus_interface.ReadValue({})
+        return _dbus_byte_array_to_b64_string(value)
+
+    @dbus_safe(False)
+    def _get_attribute_map(self, object_path, dbus_interface, objects):
+        """Gets a map of object paths under an object path.
+
+        Walks the object tree, and returns a map of UUIDs to object paths for
+        all resolved gatt object.
+
+        @param object_path: The object path of the attribute to retrieve
+            gatt  UUIDs and paths from.
+        @param objects: The managed objects.
+
+        @returns: A dictionary of object paths, keyed by UUID.
+
+        """
+        attr_map = {}
+
+        if object_path:
+            for path, ifaces in six.iteritems(objects):
+                if (dbus_interface in ifaces and path.startswith(object_path)):
+                    uuid = ifaces[dbus_interface]['UUID'].lower()
+                    attr_map[uuid] = path
+
+        else:
+            logging.warning('object_path %s is not valid', object_path)
+
+        return attr_map
+
+    def _get_service_map(self, device_path, objects):
+        """Gets a map of service paths for a device.
+
+        @param device_path: the object path of the device.
+        @param objects: The managed objects.
+        """
+        return self._get_attribute_map(device_path, self.BLUEZ_GATT_SERV_IFACE,
+                                       objects)
+
+    def _get_characteristic_map(self, serv_path, objects):
+        """Gets a map of characteristic paths for a service.
+
+        @param serv_path: the object path of the service.
+        @param objects: The managed objects.
+        """
+        return self._get_attribute_map(serv_path, self.BLUEZ_GATT_CHAR_IFACE,
+                                       objects)
+
+    def _get_descriptor_map(self, chrc_path, objects):
+        """Gets a map of descriptor paths for a characteristic.
+
+        @param chrc_path: the object path of the characteristic.
+        @param objects: The managed objects.
+        """
+        return self._get_attribute_map(chrc_path, self.BLUEZ_GATT_DESC_IFACE,
+                                       objects)
+
+    @dbus_safe(None)
+    def _get_dbus_object_property(self, dbus_object, dbus_interface,
+                                  dbus_property):
+        """Get the property in an object.
+
+        @param dbus_object: a dbus object
+        @param dbus_interface: a dbus interface where the property exists
+        @param dbus_property: a dbus property of the dbus object, as a string
+
+        @return: dbus type object if it success, e.g. dbus.Boolean, dbus.String,
+                 none otherwise
+
+        """
+        return dbus_object[self.DBUS_PROP_IFACE].Get(dbus_interface,
+                                                     dbus_property)
+
+    @dbus_safe(False)
+    def get_characteristic_map(self, address):
+        """Gets a map of characteristic paths for a device.
+
+        Walks the object tree, and returns a map of uuids to object paths for
+        all resolved gatt characteristics.
+
+        @param address: The MAC address of the device to retrieve
+            gatt characteristic uuids and paths from.
+
+        @returns: A dictionary of characteristic paths, keyed by uuid.
+
+        """
+        device_path = self._get_device_path(address)
+        char_map = {}
+
+        if device_path:
+            objects = self._objmgr_proxy.GetManagedObjects()
+
+            for path, ifaces in six.iteritems(objects):
+                if (self.BLUEZ_GATT_CHAR_IFACE in ifaces
+                            and path.startswith(device_path)):
+                    uuid = ifaces[self.BLUEZ_GATT_CHAR_IFACE]['UUID'].lower()
+                    char_map[uuid] = path
+        else:
+            logging.warning('Device %s not in object tree.', address)
+
+        return char_map
+
+    @dbus_safe(None)
+    def _get_char_object(self, uuid, address):
+        """Gets a characteristic object.
+
+        Gets a characteristic object for a given UUID and address.
+
+        @param uuid: The UUID of the characteristic, as a string.
+        @param address: The MAC address of the remote device.
+
+        @returns: A dbus interface for the characteristic if the uuid/address
+                      is in the object tree.
+                  None if the address/uuid is not found in the object tree.
+
+        """
+        path = self.get_characteristic_map(address).get(uuid)
+        if not path:
+            logging.error("path not found: %s %s", uuid, address)
+            return None
+        return self.bus.get(self.BLUEZ_SERVICE_NAME,
+                            path)[self.BLUEZ_GATT_CHAR_IFACE]
+
+    @dbus_safe(None)
+    def read_characteristic(self, uuid, address):
+        """Reads the value of a gatt characteristic.
+
+        Reads the current value of a gatt characteristic. Base64 endcoding is
+        used for compatibility with the XML RPC interface.
+
+        @param uuid: The uuid of the characteristic to read, as a string.
+        @param address: The MAC address of the remote device.
+
+        @returns: A b64 encoded version of a byte array containing the value
+                      if the uuid/address is in the object tree.
+                  None if the uuid/address was not found in the object tree, or
+                      if a DBus exception was raised by the read operation.
+
+        """
+        char_obj = self._get_char_object(uuid, address)
+        if char_obj is None:
+            return None
+        value = char_obj.ReadValue({})
+        return _dbus_byte_array_to_b64_string(value)
+
+    @dbus_safe(None)
+    def write_characteristic(self, uuid, address, value):
+        """Performs a write operation on a gatt characteristic.
+
+        Writes to a GATT characteristic on a remote device. Base64 endcoding is
+        used for compatibility with the XML RPC interface.
+
+        @param uuid: The uuid of the characteristic to write to, as a string.
+        @param address: The MAC address of the remote device, as a string.
+        @param value: A byte array containing the data to write.
+
+        @returns: True if the write operation does not raise an exception.
+                  None if the uuid/address was not found in the object tree, or
+                      if a DBus exception was raised by the write operation.
+
+        """
+        char_obj = self._get_char_object(uuid, address)
+        if char_obj is None:
+            return None
+        dbus_value = _b64_string_to_dbus_byte_array(value)
+        char_obj.WriteValue(dbus_value, {})
+        return True
+
+    @dbus_safe(None)
+    def exchange_messages(self, tx_object_path, rx_object_path, value):
+        """Performs a write operation on a gatt characteristic and wait for
+        the response on another characteristic.
+
+        @param tx_object_path: the object path of the characteristic to write.
+        @param rx_object_path: the object path of the characteristic to read.
+        @param value: A byte array containing the data to write.
+
+        @returns: The value of the characteristic to read from.
+                  None if the uuid/address was not found in the object tree, or
+                      if a DBus exception was raised by the write operation.
+
+        """
+        tx_obj = self._get_gatt_characteristic_object(tx_object_path)
+
+        if tx_obj is None:
+            return None
+
+        self._chrc_property = ''.encode('utf-8')
+
+        value = str(value)
+        proxy = self.bus.get(self.BLUEZ_SERVICE_NAME, rx_object_path)[self.DBUS_PROP_IFACE]
+        self._signal_watch = proxy.PropertiesChanged.connect(self._property_changed)
+
+        # Start timeout source
+        self._timeout_start = time.time()
+        self._timeout_early = False
+        self._timeout_id = GObject.timeout_add(
+                self.PROPERTY_UPDATE_CHECK_MILLI_SECS,
+                self._property_wait_timeout)
+
+        write_value = _b64_string_to_dbus_byte_array(value)
+        tx_obj.WriteValue(write_value, {})
+
+        self._dbus_mainloop.run()
+
+        return _dbus_byte_array_to_b64_string(self._chrc_property)
+
+    def _property_changed(self, *args, **kwargs):
+        """Handler for properties changed signal."""
+        # We don't cancel the timeout here due to a problem with the GLib
+        # mainloop. See |_property_wait_timeout| for a full explanation.
+        self._timeout_early = True
+        self._signal_watch.disconnect()
+        changed_prop = args
+
+        logging.info(changed_prop)
+        prop_dict = changed_prop[1]
+        self._chrc_property = prop_dict['Value']
+        if self._dbus_mainloop.is_running():
+            self._dbus_mainloop.quit()
+
+    def _property_wait_timeout(self):
+        """Timeout handler when waiting for properties update signal."""
+        # Sometimes, GLib.Mainloop doesn't exit after |mainloop.quit()| is
+        # called. This seems to occur only if a timeout source was active and
+        # was removed before it had a chance to run. To mitigate this, we don't
+        # cancel the timeout but mark an early completion instead.
+        # See b/222364364#comment3 for more information.
+        if not self._timeout_early and int(
+                (time.time() - self._timeout_start) *
+                1000) <= self.PROPERTY_UPDATE_TIMEOUT_MILLI_SECS:
+            # Returning True means this will be called again.
+            return True
+
+        self._signal_watch.disconnect()
+        if self._dbus_mainloop.is_running():
+            logging.warning("quit main loop due to timeout")
+            self._dbus_mainloop.quit()
+        # Return false so that this method will not be called again.
+        return False
+
+    @dbus_safe(False)
+    def _get_gatt_characteristic_object(self, object_path):
+        return self.bus.get(self.BLUEZ_SERVICE_NAME,
+                            object_path)[self.BLUEZ_GATT_CHAR_IFACE]
+
+    @dbus_safe(False)
+    def start_notify(self, object_path, cccd_value):
+        """Starts the notification session on the gatt characteristic.
+
+        @param object_path: the object path of the characteristic.
+        @param cccd_value: Possible CCCD values include
+               0x00 - inferred from the remote characteristic's properties
+               0x01 - notification
+               0x02 - indication
+
+        @returns: True if the operation succeeds.
+                  False if the characteristic is not found, or
+                      if a DBus exception was raised by the operation.
+
+        """
+        char_obj = self._get_gatt_characteristic_object(object_path)
+        if char_obj is None:
+            logging.error("characteristic not found: %s %s", object_path)
+            return False
+
+        try:
+            char_obj.StartNotify(cccd_value)
+            return True
+        except Exception as e:
+            logging.error('start_notify: %s', e)
+        except:
+            logging.error('start_notify: unexpected error')
+        return False
+
+    @dbus_safe(False)
+    def stop_notify(self, object_path):
+        """Stops the notification session on the gatt characteristic.
+
+        @param object_path: the object path of the characteristic.
+
+        @returns: True if the operation succeeds.
+                  False if the characteristic is not found, or
+                      if a DBus exception was raised by the operation.
+
+        """
+        char_obj = self._get_gatt_characteristic_object(object_path)
+        if char_obj is None:
+            logging.error("characteristic not found: %s %s", object_path)
+            return False
+
+        try:
+            char_obj.StopNotify()
+            return True
+        except Exception as e:
+            logging.error('stop_notify: %s', e)
+        except:
+            logging.error('stop_notify: unexpected error')
+        return False
+
+    @dbus_safe(False)
+    def is_notifying(self, object_path):
+        """Is the GATT characteristic in a notifying session?
+
+        @param object_path: the object path of the characteristic.
+
+        @return True if it is in a notification session. False otherwise.
+
+        """
+
+        return self.get_gatt_characteristic_property(object_path, 'Notifying')
+
+    @dbus_safe(False)
+    def is_characteristic_path_resolved(self, uuid, address):
+        """Checks whether a characteristic is in the object tree.
+
+        Checks whether a characteristic is curently found in the object tree.
+
+        @param uuid: The uuid of the characteristic to search for.
+        @param address: The MAC address of the device on which to search for
+            the characteristic.
+
+        @returns: True if the characteristic is found.
+                  False if the characteristic path is not found.
+
+        """
+        return bool(self.get_characteristic_map(address).get(uuid))
+
+    @dbus_safe(False)
+    def get_connection_info(self, address):
+        """Get device connection info.
+
+        @param address: The MAC address of the device.
+
+        @returns: On success, a JSON-encoded tuple of:
+                      ( RSSI, transmit_power, max_transmit_power )
+                  None otherwise.
+
+        """
+        plugin_device = self._get_plugin_device_interface(address)
+        if plugin_device is None:
+            return None
+
+        try:
+            connection_info = plugin_device.GetConnInfo()
+            return json.dumps(connection_info)
+        except Exception as e:
+            logging.error('get_connection_info: %s', e)
+        except:
+            logging.error('get_connection_info: unexpected error')
+        return None
+
+    def has_connection_info(self, address):
+        """Checks whether the address has connection info.
+
+        @param address: The MAC address of the device.
+        @returns True if connection info can be found.
+        """
+        return self.get_connection_info(address) is not None
+
+    @dbus_safe(False)
+    def set_le_connection_parameters(self, address, parameters):
+        """Set the LE connection parameters.
+
+        @param address: The MAC address of the device.
+        @param parameters: The LE connection parameters to set.
+
+        @return: True on success. False otherwise.
+
+        """
+        plugin_device = self._get_plugin_device_interface(address)
+        if plugin_device is None:
+            return False
+
+        return not self.dbus_method_with_handlers(
+                plugin_device.SetLEConnectionParameters,
+                # reply handler
+                lambda: logging.info('set_le_connection_parameters: succeeded.'
+                                     ),
+                # error handler
+                lambda error: logging.
+                error('set_le_connection_parameters: failed: %s', str(error)),
+                # other arguments
+                parameters)
+
+    @dbus_safe(False)
+    def _get_plugin_device_interface(self, address):
+        """Get the BlueZ Chromium device plugin interface.
+
+        This interface can be used to issue dbus requests such as
+        GetConnInfo and SetLEConnectionParameters.
+
+        @param address: The MAC address of the device.
+
+        @return: On success, the BlueZ Chromium device plugin interface
+                 None otherwise.
+
+        """
+        path = self._get_device_path(address)
+        if path is None:
+            return None
+
+        return self.bus.get(self.BLUEZ_SERVICE_NAME,
+                            path)[self.BLUEZ_PLUGIN_DEVICE_IFACE]
+
+    @dbus_safe(False)
+    def policy_get_service_allow_list(self):
+        """Get the service allow list for enterprise policy.
+
+        @returns: array of strings representing the allowed service UUIDs.
+        """
+        uuids = unpack_if_variant(
+                self._property_proxy.Get(self.BLUEZ_ADMIN_POLICY_STATUS_IFACE,
+                                         'ServiceAllowList'))
+        logging.debug('ServiceAllowList: %s', uuids)
+        return uuids
+
+    @dbus_safe(False, return_error=True)
+    def policy_set_service_allow_list(self, uuids):
+        """Set the service allow list for enterprise policy.
+
+        @param uuids: a string representing the uuids; e.g., "1234,0xabcd" or ""
+
+        @returns: (True, '') on success, (False, '<error>') on failure.
+        """
+        dbus_array = []
+        if bool(uuids.strip()):
+            for uuid in uuids.split(','):
+                dbus_array.append(uuid.strip())
+
+        logging.debug('policy_set_service_allow_list: %s', dbus_array)
+        self._adapter[self.BLUEZ_ADMIN_POLICY_SET_IFACE].SetServiceAllowList(
+                dbus_array)
+        return (True, '')
+
+    @dbus_safe(False, return_error=True)
+    def policy_get_device_affected(self, device_address):
+        """Check if the device is affected by enterprise policy.
+
+        @param device_address: address of the device
+                               e.g. '6C:29:95:1A:D4:6F'
+
+        @returns: True if the device is affected by the enterprise policy.
+                  False if not. None if the device is not found.
+        """
+        device = self._find_device(device_address)
+        if not device:
+            logging.debug('Failed to find device %s', device_address)
+            return None
+
+        affected = unpack_if_variant(device[self.DBUS_PROP_IFACE].Get(
+                self.BLUEZ_ADMIN_POLICY_STATUS_IFACE, 'AffectedByPolicy'))
+        logging.debug('policy_get_device_affected(%s): %s', device_address,
+                      affected)
+        return affected
+
+    def cleanup(self):
+        """Cleanup before exiting the client xmlrpc process."""
+
+        self.advmon_appmgr.destroy()
+
+    def get_sysconfig(self):
+        """Helper function to get default controller parameters
+
+        @returns: dict of type to values, both are in string form,
+                  None if the operation read-sysconfig failed.
+        """
+        tlv_re = re.compile('Type: (0x[0-9A-Fa-f]{4})\s+'
+                            'Length: ([0-9A-Fa-f]{2})\s+'
+                            'Value: ([0-9A-Fa-f]+)')
+
+        cmd = 'btmgmt read-sysconfig'
+        # btmgmt needs stdin, otherwise it won't output anything.
+        # Please refer to
+        # third_party/bluez/current/src/shared/shell.c:bt_shell_printf
+        # for more information
+        output = subprocess.check_output(cmd.split(),
+                                         stdin=subprocess.PIPE,
+                                         encoding='UTF-8')
+
+        if output is None:
+            logging.warning('Unable to retrieve output of %s', cmd)
+            return None
+
+        sysconfig = dict()
+
+        for line in output.splitlines():
+            try:
+                m = tlv_re.match(line)
+                t, l, v = m.groups()
+                sysconfig[int(t, 16)] = v
+            except Exception as e:
+                logging.warning('Unexpected error %s at "%s"', str(e), line)
+
+        logging.debug("default controller parameters: %s", sysconfig)
+        return sysconfig
+
+    def _le_hex_to_int(self, le_hex):
+        """Convert a little-endian hex-string to an unsigned integer.
+        For example, _le_hex_to_int('0x0102') returns the same value as
+        int('0201', 16)
+        """
+        if le_hex is None:
+            return None
+
+        ba = bytearray.fromhex(le_hex)
+        ba.reverse()
+        return int(binascii.hexlify(ba), 16)
+
+    def get_advmon_interleave_durations(self):
+        """Get durations of allowlist scan and no filter scan
+
+        @returns: a dict of {'allowlist': allowlist_duration,
+                             'no filter': no_filter_duration},
+                  or None if something went wrong
+        """
+
+        sysconfig = self.get_sysconfig()
+
+        if sysconfig is None:
+            return None
+
+        AllowlistScanDuration = self._le_hex_to_int(sysconfig.get(
+                0x001d, None))
+        NoFilterScanDuration = self._le_hex_to_int(sysconfig.get(0x001e, None))
+
+        return {
+                'allowlist': AllowlistScanDuration,
+                'no filter': NoFilterScanDuration
+        }
+
+
+class FlossFacadeLocal(BluetoothBaseFacadeLocal):
+    """Exposes DUT methods called remotely during Bluetooth autotests for the
+    Floss daemon.
+
+    All instance methods of this object without a preceding '_' are exposed via
+    an XML-RPC server. This is not a stateless handler object, which means that
+    if you store state inside the delegate, that state will remain around for
+    future calls.
+    """
+
+    # Default to this adapter during init. We will initialize to the correct
+    # default adapter after the manager client is initialized.
+    DEFAULT_ADAPTER = 0
+
+    # How long we wait for the adapter to come up after we start it
+    ADAPTER_DAEMON_TIMEOUT_SEC = 20
+
+    # Floss stops discovery after ~12s after starting. To improve discovery
+    # chances in tests, we need to keep restarting discovery. This timeout
+    # tracks how long an overall discovery session should be.
+    DISCOVERY_TIMEOUT_SEC = 60
+
+    class DiscoveryObserver(BluetoothCallbacks):
+        """ Discovery observer that restarts discovery until a timeout.
+
+        By default, the Floss stack stops discovery after ~12s. This can be an
+        insufficient amount of time to discover a device, especially classic
+        devices. To mimic Bluez, we have this observer restart discovery each
+        time it is stopped up until a given timeout.
+        """
+
+        def __init__(self, adapter_client, timeout_secs):
+            """Constructor.
+
+            @param adapter_client: Already initialized client instance.
+            @param timeout_secs: How long to continue refreshing discovery.
+            """
+            self.adapter_client = adapter_client
+            self.deadline = datetime.now() + timedelta(seconds=timeout_secs)
+            self.adapter_client.register_callback_observer(
+                    'DiscoveryObserver', self)
+            self.discovering = None
+
+        def __del__(self):
+            if self.adapter_client:
+                self.cleanup()
+
+        def cleanup(self):
+            """Clean up after this observer."""
+            self.adapter_client.unregister_callback_observer(
+                    'DiscoveryObserver', self)
+            self.adapter_client = None
+
+        def on_discovering_changed(self, discovering):
+            """Discovering has changed."""
+
+            logging.info('Discovering changed to %s', discovering)
+
+            prev = self.discovering
+            self.discovering = discovering
+
+            # No-op if this is the same notification sent multiple times
+            if prev == discovering:
+                pass
+            # If discovering ended, check if the observer has timed out yet. If
+            # not, re-start the discovery.
+            if not discovering and datetime.now() < self.deadline:
+                self.adapter_client.start_discovery(
+                        method_callback=self.start_discovery_rsp)
+
+        def start_discovery_rsp(self, err, result):
+            """Result to |adapter_client.start_discovery|."""
+            # Log any errors that may have occurred
+            if err:
+                logging.error('Error on start_discovery: %s', err)
+            elif result:
+                logging.error('Error on start_discovery: Status=%s', result)
+
+    def __init__(self):
+        # Init the BaseFacade first
+        super(FlossFacadeLocal, self).__init__()
+
+        # Start mainloop thread in background. This will also initialize a few
+        # other variables (self.bus, self.mainloop, self.event_context) that may
+        # be necessary for proper operation.
+        self.mainloop_quit = threading.Event()
+        self.mainloop_ready = threading.Event()
+        self.thread = threading.Thread(
+                name=GLIB_THREAD_NAME,
+                target=FlossFacadeLocal.mainloop_thread,
+                args=(self, ))
+        self.thread.start()
+
+        # Wait for mainloop to be ready
+        if not self.mainloop_ready.wait(timeout=5):
+            raise Exception('Unable to initialize GLib mainloop')
+
+        # Always initialize the manager client since there is a single instance.
+        self.manager_client = FlossManagerClient(self.bus)
+        self.adapter_client = FlossAdapterClient(self.bus,
+                                                 self.DEFAULT_ADAPTER)
+
+        self.is_clean = False
+
+        # Discovery needs to last longer than the default 12s. Keep an observer
+        # that re-enables discovery up to some timeout.
+        self.discovery_observer = None
+
+        # Cache some mock properties for testing. These may be properties that
+        # are required in bluez but don't carry over well into Floss.
+        self.mock_properties = {}
+
+    def __del__(self):
+        if not self.is_clean:
+            self.cleanup()
+
+    def cleanup(self):
+        """Clean up the mainloop thread."""
+        self.mainloop_quit.set()
+        self.mainloop.quit()
+        self.is_clean = True
+
+    @staticmethod
+    def mainloop_thread(self):
+        """Runs GLib mainloop until we signal that we should quit."""
+
+        # Set up mainloop. All subsequent buses and connections will use this
+        # mainloop. We also use a separate main context to avoid multithreading
+        # issues.
+        #self.event_context = GLib.MainContext()
+        #self.mainloop = GLib.MainLoop(context=self.event_context)
+        GLib.threads_init()
+        self.mainloop = GLib.MainLoop()
+
+        # Set up bus connection
+        self.bus = pydbus.SystemBus()
+
+        # Set thread ready
+        self.mainloop_ready.set()
+
+        while not self.mainloop_quit.is_set():
+            self.mainloop.run()
+
+    def get_floss_enabled(self):
+        """Is Floss enabled right now?
+
+        Returns:
+            True if Floss is enabled, False if Bluez is enabled.
+        """
+        return self.manager_client.get_floss_enabled()
+
+    def set_floss_enabled(self, enabled):
+        """Enable or disable Floss."""
+        self.manager_client.set_floss_enabled(enabled)
+
+    def start_bluetoothd(self):
+        """Starts Floss. This includes enabling the adapter.
+
+        Returns:
+            True if default adapter is enabled successfully. False otherwise.
+        """
+        # Start manager and enable Floss
+        if not self.configure_floss(enabled=True):
+            return False
+
+        # Restarts the default adapter
+        if not self.reset_on():
+            return False
+
+        # If we need to wait for any other interfaces, add below here:
+        # ------------------------------------------------------------
+
+        return True
+
+    def stop_bluetoothd(self):
+        """Stops Floss. This includes disabling btmanagerd.
+
+        Returns:
+            True if adapter daemon and manager daemon are both off.
+        """
+        # First power off the adapter
+        if not self.reset_off():
+            logging.warn('Failed to stop btadapterd')
+            return False
+
+        if not UpstartClient.stop(self.MANAGER_JOB):
+            logging.warn('Failed to stop btmanagerd')
+            return False
+
+        def _daemon_stopped():
+            return all([
+                    not self.manager_client.has_proxy(),
+                    not self.adapter_client.has_proxy(),
+            ])
+
+        try:
+            utils.poll_for_condition(condition=_daemon_stopped,
+                                     desc='Bluetooth daemons have stopped',
+                                     timeout=self.DAEMON_TIMEOUT_SEC)
+            daemon_stopped = True
+        except Exception as e:
+            logging.error('timeout: error stopping floss daemons: %s', e)
+            daemon_stopped = False
+
+        return daemon_stopped
+
+    def restart_cras(self):
+        """Restarts the cras daemon."""
+        self._restart_cras(enable_floss=True)
+
+    def is_bluetoothd_proxy_valid(self):
+        """Checks whether the proxy objects for Floss are ok."""
+        return all([
+                self.manager_client.has_proxy(),
+                self.adapter_client.has_proxy()
+        ])
+
+    def is_bluetoothd_running(self):
+        """Checks whether Floss daemon is running."""
+        # This api doesn't enforce that the adapter is powered so we only check
+        # that the manager proxy is up.
+        return self.manager_client.has_proxy()
+
+    def has_adapter(self):
+        """Checks whether an adapter exists."""
+        return len(self.manager_client.get_available_adapters()) > 0
+
+    def set_debug_log_levels(self, bluez_vb, kernel_vb):
+        """Enables verbose logging."""
+        # TODO(abps) - This will be necessary for Floss but may not need to
+        #              touch the kernel. This needs to be implemented at the
+        #              daemon level still.
+        return False
+
+    def start_discovery(self):
+        """Start discovery of remote devices."""
+        if not self.adapter_client.has_proxy():
+            return (False, 'Adapter not found')
+
+        if self.discovery_observer:
+            self.discovery_observer.cleanup()
+
+        self.discovery_observer = self.DiscoveryObserver(
+                self.adapter_client, self.DISCOVERY_TIMEOUT_SEC)
+        return (self.adapter_client.start_discovery(), '')
+
+    def stop_discovery(self):
+        """Stop discovery of remote deviecs."""
+        if not self.adapter_client.has_proxy():
+            return (False, 'Adapter not found')
+
+        if self.discovery_observer:
+            self.discovery_observer.cleanup()
+            self.discovery_observer = None
+
+        return (self.adapter_client.stop_discovery(), '')
+
+    def is_discovering(self):
+        """Check if adapter is discovering."""
+        return self.adapter_client.is_discovering()
+
+    def is_powered_on(self):
+        """Gets whether the default adapter is enabled."""
+        default_adapter = self.manager_client.get_default_adapter()
+        return self.manager_client.get_adapter_enabled(default_adapter)
+
+    def set_powered(self, powered):
+        """Sets the default adapter's enabled state."""
+        default_adapter = self.manager_client.get_default_adapter()
+
+        if powered and not self.manager_client.has_default_adapter():
+            logging.warning('set_powered: Default adapter not available.')
+            return False
+
+        if powered:
+            self.manager_client.start(default_adapter)
+        else:
+            self.manager_client.stop(default_adapter)
+
+        return True
+
+    def reset_on(self):
+        """Reset the default adapter into an ON state."""
+        return self.do_reset(True)
+
+    def reset_off(self):
+        """Reset the default adapter into an OFF state."""
+        return self.do_reset(False)
+
+    def do_reset(self, power_on):
+        """Resets the default adapter."""
+        # Start manager and enable Floss if not already up
+        if not self.configure_floss(enabled=True):
+            return False
+
+        default_adapter = self.manager_client.get_default_adapter()
+
+        def _is_adapter_down(client):
+            return lambda: not client.has_proxy()
+
+        def _is_adapter_ready(client):
+            return lambda: client.has_proxy() and client.get_address()
+
+        self.manager_client.stop(default_adapter)
+        try:
+            condition = _is_adapter_down(self.adapter_client)
+            utils.poll_for_condition(condition=condition,
+                                     desc='Wait for adapter stop',
+                                     sleep_interval=0.5,
+                                     timeout=self.ADAPTER_DAEMON_TIMEOUT_SEC)
+        except Exception as e:
+            logging.error('timeout: error stopping adapter daemon: %s', e)
+            logging.error(traceback.format_exc())
+            return False
+
+        if not power_on:
+            logging.debug('do_reset: Completed with power_on=False')
+            return True
+
+        # Start the client again
+        self.manager_client.start(default_adapter)
+        self.adapter_client = FlossAdapterClient(self.bus, default_adapter)
+
+        try:
+            condition = _is_adapter_ready(self.adapter_client)
+            utils.poll_for_condition(condition=condition,
+                                     desc='Wait for adapter start',
+                                     sleep_interval=0.5,
+                                     timeout=self.ADAPTER_DAEMON_TIMEOUT_SEC)
+        except Exception as e:
+            logging.error('timeout: error starting adapter daemon: %s', e)
+            logging.error(traceback.format_exc())
+            return False
+
+        # We need to observe callbacks for proper operation.
+        if not self.adapter_client.register_callbacks():
+            logging.error('adapter_client: Failed to register callbacks')
+            return False
+
+        logging.debug('do_reset: Completed with power_on=True')
+        return True
+
+    def policy_get_service_allow_list(self):
+        """Gets the service allow list for enterprise policy."""
+        # TODO(abps) - Actually implement this
+        return []
+
+    def policy_set_service_allow_list(self, uuids):
+        """Sets the service allow list for enterprise policy."""
+        # TODO(abps) - Actually implement this
+        return (True, '')
+
+    def get_address(self):
+        """Gets the default adapter address."""
+        return self.adapter_client.get_address()
+
+    def has_device(self, address):
+        """Checks if adapter knows the device."""
+        return self.adapter_client.has_device(address)
+
+    def remove_device_object(self, address):
+        """Removes a known device object."""
+        return self.adapter_client.forget_device(address)
+
+    def connect_device(self, address):
+        """Connect a specific address."""
+        return self.adapter_client.connect_all_enabled_profiles(address)
+
+    def disconnect_device(self, address):
+        """Disconnect a specific address."""
+        return self.adapter_client.disconnect_all_enabled_profiles(address)
+
+    def get_device_property(self, address, prop_name):
+        """Read a property from a remote device.
+
+        @param address: Address of the device to query
+        @param prop_name: Property to be queried
+
+        @return Base64 encoded json if property exists or None.
+        """
+        prop_val = None
+
+        if self.adapter_client.has_device(address):
+            prop_val = self.adapter_client.get_remote_property(
+                    address, prop_name)
+
+        return self._encode_base64_json(prop_val)
+
+    def get_pairable(self):
+        """Gets whether the default adapter is pairable.
+
+        @return True if default adapter is pairable.
+        """
+        # TODO(abps) - Control pairable setting on adapter
+        return self.mock_properties.get('Pairable', False)
+
+    def set_pairable(self, pairable):
+        """Sets default adapter as pairable.
+
+        @param pairable: Control pairable property of the adapter.
+
+        @return True on success.
+        """
+        # TODO(abps) - Control pairable setting on adapter
+        self.mock_properties['Pairable'] = pairable
+        return True
+
+    def pair_legacy_device(self, address, pin, trusted, timeout=60):
+        """Pairs a peer device.
+
+        @param address: BT address of the peer device.
+        @param pin: What pin to use for pairing.
+        @param trusted: Unused by Floss.
+        @param timeout: How long to wait for pairing to complete.
+        """
+
+        class PairingObserver(BluetoothCallbacks,
+                              BluetoothConnectionCallbacks):
+            """Observer of certain callbacks for pairing."""
+
+            def __init__(self, adapter_client, done_event, address, pin):
+                self.adapter_client = adapter_client
+                self.adapter_client.register_callback_observer(
+                        'PairingObserver' + address, self)
+
+                # Event to trigger once we are paired and connected.
+                self.done_event = done_event
+                self.address = address
+                self.pin = pin
+                self.bond_state = BondState.NOT_BONDED
+                self.connected = self.adapter_client.is_connected(address)
+
+            def __del__(self):
+                """Destructor"""
+                if self.adapter_client:
+                    self.cleanup()
+
+            def cleanup(self):
+                """Clean up after this observer."""
+                self.adapter_client.unregister_callback_observer(
+                        'PairingObserver' + address, self)
+                self.adapter_client = None
+
+            def on_bond_state_changed(self, status, device_address, state):
+                """Handle bond state change."""
+                logging.info('[%s] bond state=%d', device_address, state)
+
+                if device_address != self.address:
+                    return
+
+                # If we have a non-zero status, bonding failed in some way.
+                # Report it and unblock the main thread.
+                if status != 0:
+                    logging.error('[%s] failed to bond. Status=%d, State=%d',
+                                  device_address, status, state)
+                    self.done_event.set()
+                    return
+
+                self.bond_state = state
+                logging.info('[%s] bond state=%d', device_address, state)
+
+                # We've completed bonding. Make sure to connect
+                if state == BondState.BONDED:
+                    # If not connected, connect profiles and wait for connected
+                    # callback. Else, unblock the main thread.
+                    if not self.connected:
+                        if not self.adapter_client.connect_all_enabled_profiles(
+                                self.address):
+                            logging.error(
+                                    '[%s] failed on connect_all_enabled_profiles',
+                                    self.address)
+                            self.done_event.set()
+                    else:
+                        self.done_event.set()
+
+            def on_ssp_request(self, remote_device, class_of_device, variant,
+                               passkey):
+                """Handle SSP request."""
+                (remote_address, remote_name) = remote_device
+
+                if remote_address != self.address:
+                    return
+
+                logging.info('Ssp: [%s: %s]: Class=%d, Variant=%d, Passkey=%d',
+                             remote_address, remote_name, class_of_device,
+                             variant, passkey)
+
+                if variant == int(SspVariant.CONSENT):
+                    self.adapter_client.set_pairing_confirmation(
+                            remote_address,
+                            True,
+                            method_callback=self.on_set_pairing_confirmation)
+
+                logging.info('Exited ssp request.')
+
+            def on_set_pairing_confirmation(self, err, result):
+                """Handle async method result from set pairing confirmation."""
+                if err or not result:
+                    logging.error(
+                            'Pairing confirmation failed: err[%s], result[%s]',
+                            err, result)
+                    self.done_event.set()
+
+            def on_device_connected(self, remote_device):
+                """Handle device connection."""
+                (remote_address, _) = remote_device
+
+                logging.info('[%s] connected', remote_address)
+
+                if remote_address != self.address:
+                    return
+
+                self.connected = True
+
+                # If we're already bonded, unblock the main thread.
+                if self.bond_state == BondState.BONDED:
+                    self.done_event.set()
+
+        # Start pairing process in main thread
+
+        done_evt = threading.Event()
+
+        # First we need an observer that watches for callbacks
+        pairing_observer = PairingObserver(self.adapter_client, done_evt,
+                                           address, pin)
+
+        # Pair and connect. If either action fails, mark the done event so that
+        # we fall through without blocking.
+        if not self.device_is_paired(address):
+            if not self.adapter_client.create_bond(address, Transport.AUTO):
+                done_evt.set()
+        elif not self.device_is_connected(address):
+            if not self.adapter_client.connect_all_enabled_profiles(address):
+                done_evt.set()
+
+        done_evt.wait(timeout=timeout)
+        if not done_evt.is_set():
+            logging.error('Timed out waiting for pairing to complete.')
+
+        is_paired = self.device_is_paired(address)
+        is_connected = self.device_is_connected(address)
+
+        # If pairing and hci connection is complete, also trigger all profile
+        # connections here. This is necessary because device connection doesn't
+        # always imply profile connection.
+        if is_paired and is_connected:
+            self.adapter_client.connect_all_enabled_profiles(address)
+
+        logging.info('Pairing result: paired(%s) connected(%s)', is_paired,
+                     is_connected)
+
+        return is_paired and is_connected
+
+    def device_is_connected(self, address):
+        """Checks whether a device is connected.
+
+        @param address: BT address of peer device.
+        @return True if connected.
+        """
+        return self.adapter_client.is_connected(address)
+
+    def has_connection_info(self, address):
+        """Same as |device_is_connected| on Floss.
+
+        Bluez has a separate ConnectionInfo tuple that is read from the kernel
+        but Floss doesn't have this. We have this function simply for
+        compatibility.
+
+        @param address: BT address of peer device.
+        @return True if connected.
+        """
+        return self.device_is_connected(address)
+
+    def get_num_connected_devices(self):
+        """ Return number of remote devices currently connected to the DUT.
+
+        @returns: The number of devices known to bluez with the Connected
+            property active
+        """
+        return self.adapter_client.get_connected_devices_count()
+
+    def device_is_paired(self, address):
+        """Checks if a device is paired.
+
+        @param address: address of the device.
+        @returns: True if device is paired. False otherwise.
+        """
+        return self.adapter_client.is_bonded(address)
+
+    def is_discoverable(self):
+        """Return whether the adapter is currently discoverable."""
+        return self.adapter_client.get_property('Discoverable')
+
+    def set_discoverable(self, discoverable, duration=60):
+        """Sets the adapter as discoverable for given duration in seconds."""
+        return self.adapter_client.set_property('Discoverable', discoverable,
+                                                duration)
+
+    def get_supported_capabilities(self):
+        """" Get supported capabilities of the adapter."""
+        return (json.dumps({}), 'Not yet implemented')
diff --git a/client/cros/multimedia/bluetooth_facade_native.py b/client/cros/multimedia/bluetooth_facade_native.py
deleted file mode 100644
index 63ba703..0000000
--- a/client/cros/multimedia/bluetooth_facade_native.py
+++ /dev/null
@@ -1,3574 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Facade to access the bluetooth-related functionality."""
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-import base64
-import collections
-from datetime import datetime
-import dbus
-import dbus.mainloop.glib
-import dbus.service
-import glob
-import gobject
-import json
-import logging
-import logging.handlers
-import os
-import re
-import subprocess
-import functools
-import time
-
-import common
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib.cros.bluetooth import bluetooth_socket
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import dbus_util
-from autotest_lib.client.cros.udev_helpers import UdevadmInfo, UdevadmTrigger
-from autotest_lib.client.cros import xmlrpc_server
-from autotest_lib.client.cros.audio import (audio_test_data as
-                                            audio_test_data_module)
-from autotest_lib.client.cros.audio import check_quality
-from autotest_lib.client.cros.audio import cras_utils
-from autotest_lib.client.cros.audio.sox_utils import (
-        convert_format, convert_raw_file, get_file_length,
-        trim_silence_from_wav_file)
-from autotest_lib.client.cros.bluetooth import advertisement
-from autotest_lib.client.cros.bluetooth import adv_monitor_helper
-from autotest_lib.client.cros.bluetooth import output_recorder
-from autotest_lib.client.cros.power import sys_power
-import six
-from six.moves import map
-from six.moves import range
-
-CheckQualityArgsClass = collections.namedtuple(
-        'args_type', ['filename', 'rate', 'channel', 'bit_width'])
-
-
-def _dbus_byte_array_to_b64_string(dbus_byte_array):
-    """Base64 encodes a dbus byte array for use with the xml rpc proxy."""
-    return base64.standard_b64encode(bytearray(dbus_byte_array))
-
-
-def _b64_string_to_dbus_byte_array(b64_string):
-    """Base64 decodes a dbus byte array for use with the xml rpc proxy."""
-    dbus_array = dbus.Array([], signature=dbus.Signature('y'))
-    bytes = bytearray(base64.standard_b64decode(b64_string))
-    for byte in bytes:
-        dbus_array.append(dbus.Byte(byte))
-    return dbus_array
-
-
-def dbus_print_error(default_return_value=False):
-    """Catch all DBus exceptions and return the error.
-
-    Wrap a function with a try block that catches DBus exceptions and
-    returns the error with the specified return status. The exception is logged
-    to aid in debugging.
-
-    @param wrapped_function function to wrap.
-
-    """
-
-    def decorator(wrapped_function):
-        """Call a function and catch DBus errors.
-
-        @param wrapped_function function to call in dbus safe context.
-        @return function return value or default_return_value on failure.
-
-        """
-
-        @functools.wraps(wrapped_function)
-        def wrapper(*args, **kwargs):
-            """Pass args and kwargs to a dbus safe function.
-
-            @param args formal python arguments.
-            @param kwargs keyword python arguments.
-            @return function return value or default_return_value on failure.
-
-            """
-            logging.debug('%s()', wrapped_function.__name__)
-            try:
-                return wrapped_function(*args, **kwargs)
-
-            except dbus.exceptions.DBusException as e:
-                logging.debug(
-                        'Exception while performing operation %s: %s: %s',
-                        wrapped_function.__name__, e.get_dbus_name(),
-                        e.get_dbus_message())
-                return (default_return_value, str(e))
-
-        return wrapper
-
-    return decorator
-
-
-class LogRecorder(object):
-    """The LogRecorder class helps to collect logs without a listening thread"""
-
-    class LoggingException(Exception):
-        """A dummy exception class for LogRecorder class."""
-        pass
-
-    def __init__(self, log_path):
-        """Initialize log recorder object
-
-        @param log_path: string path to log file to record
-
-        @raises: LogRecorder.LoggingException on non-existent log file
-        """
-        if not os.path.isfile(log_path):
-            msg = 'Requested log file {} does not exist'.format(log_path)
-            raise LogRecorder.LoggingException(msg)
-
-        self.log_path = log_path
-
-        self.initial_log_size = -1
-        self.log_contents = []
-
-    def StartRecording(self):
-        """Mark initial log size for later comparison"""
-
-        self.initial_log_size = os.path.getsize(self.log_path)
-        self.log_contents = []
-
-    def StopRecording(self):
-        """Gather the logs since StartRecording was called
-
-        @raises: LogRecorder.LoggingException if:
-                - Log file disappeared since StartRecording was called
-                - Log file is smaller than when logging began
-                - StartRecording was never called
-        """
-        now_size = os.path.getsize(self.log_path)
-
-        if not os.path.isfile(self.log_path):
-            msg = 'File {} disappeared unexpectedly'.format(self.log_path)
-            raise LogRecorder.LoggingException(msg)
-
-        if now_size < self.initial_log_size:
-            msg = 'Log became smaller unexpectedly'
-            raise LogRecorder.LoggingException(msg)
-
-        if self.initial_log_size < 0:
-            msg = 'Recording stopped before it started'
-            raise LogRecorder.LoggingException(msg)
-
-        with open(self.log_path, 'r') as mf:
-            # Skip to the point where we started recording
-            mf.seek(self.initial_log_size)
-
-            readsize = now_size - self.initial_log_size
-            self.log_contents = mf.read(readsize).split('\n')
-
-    def LogContains(self, search_str):
-        """Performs simple string checking on each line from the collected log
-
-        @param search_str: string to be located within log contents. This arg
-                is expected to not span between lines in the logs
-
-        @returns: True if search_str was located in the collected log contents,
-                False otherwise
-        """
-
-        for line in self.log_contents:
-            if search_str in line:
-                return True
-
-        return False
-
-
-class InterleaveLogger(LogRecorder):
-    """LogRecorder class that focus on interleave scan"""
-
-    SYSLOG_PATH = '/var/log/messages'
-
-    # Example bluetooth kernel log:
-    # "2020-11-23T07:52:31.395941Z DEBUG kernel: [ 6469.811135] Bluetooth: "
-    # "cancel_interleave_scan() hci0: hci0 cancelling interleave scan"
-    KERNEL_LOG_PATTERN = ('([^ ]+) DEBUG kernel: \[.*\] Bluetooth: '
-                          '{FUNCTION}\(\) hci0: {LOG_STR}')
-    STATE_PATTERN = KERNEL_LOG_PATTERN.format(
-            FUNCTION='add_le_interleave_adv_monitor_scan',
-            LOG_STR='next state: (.+)')
-    CANCEL_PATTERN = KERNEL_LOG_PATTERN.format(
-            FUNCTION='cancel_interleave_scan',
-            LOG_STR='hci0 cancelling interleave scan')
-    SYSTIME_LENGTH = len('2020-12-18T00:11:22.345678')
-
-    def __init__(self):
-        """ Initialize object
-        """
-        self.reset()
-        self.state_pattern = re.compile(self.STATE_PATTERN)
-        self.cancel_pattern = re.compile(self.CANCEL_PATTERN)
-        super(InterleaveLogger, self).__init__(self.SYSLOG_PATH)
-
-    def reset(self):
-        """ Clear data between each log collection attempt
-        """
-        self.records = []
-        self.cancel_events = []
-
-    def StartRecording(self):
-        """ Reset the previous data and start recording.
-        """
-        self.reset()
-        super(InterleaveLogger, self).StartRecording()
-
-    def StopRecording(self):
-        """ Stop recording and parse logs
-            The following data will be set after this call
-
-            - self.records: a dictionary where each item is a record of
-                            interleave |state| and the |time| the state starts.
-                            |state| could be {'no filter', 'allowlist'}
-                            |time| is system time in sec
-
-            - self.cancel_events: a list of |time| when a interleave cancel
-                                  event log was found
-                                  |time| is system time in sec
-
-            @returns: True if StopRecording success, False otherwise
-
-        """
-        try:
-            super(InterleaveLogger, self).StopRecording()
-        except Exception as e:
-            logging.error(e)
-            return False
-
-        success = True
-
-        def sys_time_to_timestamp(time_str):
-            """ Return timestamp of time_str """
-
-            # This is to remove the suffix of time string, in some cases the
-            # time string ends with an extra 'Z', in other cases, the string
-            # ends with time zone (ex. '+08:00')
-            time_str = time_str[:self.SYSTIME_LENGTH]
-
-            try:
-                dt = datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S.%f")
-            except Exception as e:
-                logging.error(e)
-                success = False
-                return 0
-
-            return time.mktime(dt.timetuple()) + dt.microsecond * (10**-6)
-
-        for line in self.log_contents:
-            line = line.strip().replace('\\r\\n', '')
-            state_pattern = self.state_pattern.search(line)
-            cancel_pattern = self.cancel_pattern.search(line)
-
-            if cancel_pattern:
-                time_str = cancel_pattern.groups()[0]
-                time_sec = sys_time_to_timestamp(time_str)
-                self.cancel_events.append(time_sec)
-
-            if state_pattern:
-                time_str, state = state_pattern.groups()
-                time_sec = sys_time_to_timestamp(time_str)
-                self.records.append({'time': time_sec, 'state': state})
-
-        return success
-
-
-class PairingAgent(dbus.service.Object):
-    """The agent handling the authentication process of bluetooth pairing.
-
-    PairingAgent overrides RequestPinCode method to return a given pin code.
-    User can use this agent to pair bluetooth device which has a known
-    pin code.
-
-    TODO (josephsih): more pairing modes other than pin code would be
-    supported later.
-
-    """
-
-    def __init__(self, pin, *args, **kwargs):
-        super(PairingAgent, self).__init__(*args, **kwargs)
-        self._pin = pin
-
-    @dbus.service.method('org.bluez.Agent1',
-                         in_signature='o',
-                         out_signature='s')
-    def RequestPinCode(self, device_path):
-        """Requests pin code for a device.
-
-        Returns the known pin code for the request.
-
-        @param device_path: The object path of the device.
-
-        @returns: The known pin code.
-
-        """
-        logging.info('RequestPinCode for %s; return %s', device_path,
-                     self._pin)
-        return self._pin
-
-
-class BluetoothFacadeNative(object):
-    """Exposes DUT methods called remotely during Bluetooth autotests.
-
-    All instance methods of this object without a preceding '_' are exposed via
-    an XML-RPC server. This is not a stateless handler object, which means that
-    if you store state inside the delegate, that state will remain around for
-    future calls.
-    """
-
-    UPSTART_PATH = 'unix:abstract=/com/ubuntu/upstart'
-    UPSTART_MANAGER_PATH = '/com/ubuntu/Upstart'
-    UPSTART_MANAGER_IFACE = 'com.ubuntu.Upstart0_6'
-    UPSTART_JOB_IFACE = 'com.ubuntu.Upstart0_6.Job'
-
-    UPSTART_ERROR_UNKNOWNINSTANCE = \
-            'com.ubuntu.Upstart0_6.Error.UnknownInstance'
-    UPSTART_ERROR_ALREADYSTARTED = \
-            'com.ubuntu.Upstart0_6.Error.AlreadyStarted'
-
-    BLUETOOTHD_JOB = 'bluetoothd'
-
-    DBUS_ERROR_SERVICEUNKNOWN = 'org.freedesktop.DBus.Error.ServiceUnknown'
-
-    BLUETOOTH_SERVICE_NAME = 'org.chromium.Bluetooth'
-    BLUEZ_SERVICE_NAME = 'org.bluez'
-    BLUEZ_MANAGER_PATH = '/'
-    BLUEZ_DEBUG_LOG_PATH = '/org/chromium/Bluetooth'
-    BLUEZ_DEBUG_LOG_IFACE = 'org.chromium.Bluetooth.Debug'
-    BLUEZ_MANAGER_IFACE = 'org.freedesktop.DBus.ObjectManager'
-    BLUEZ_ADAPTER_IFACE = 'org.bluez.Adapter1'
-    BLUEZ_BATTERY_IFACE = 'org.bluez.Battery1'
-    BLUEZ_DEVICE_IFACE = 'org.bluez.Device1'
-    BLUEZ_GATT_SERV_IFACE = 'org.bluez.GattService1'
-    BLUEZ_GATT_CHAR_IFACE = 'org.bluez.GattCharacteristic1'
-    BLUEZ_GATT_DESC_IFACE = 'org.bluez.GattDescriptor1'
-    BLUEZ_LE_ADVERTISING_MANAGER_IFACE = 'org.bluez.LEAdvertisingManager1'
-    BLUEZ_ADV_MONITOR_MANAGER_IFACE = 'org.bluez.AdvertisementMonitorManager1'
-    BLUEZ_AGENT_MANAGER_PATH = '/org/bluez'
-    BLUEZ_AGENT_MANAGER_IFACE = 'org.bluez.AgentManager1'
-    BLUEZ_PROFILE_MANAGER_PATH = '/org/bluez'
-    BLUEZ_PROFILE_MANAGER_IFACE = 'org.bluez.ProfileManager1'
-    BLUEZ_ERROR_ALREADY_EXISTS = 'org.bluez.Error.AlreadyExists'
-    BLUEZ_PLUGIN_DEVICE_IFACE = 'org.chromium.BluetoothDevice'
-    DBUS_PROP_IFACE = 'org.freedesktop.DBus.Properties'
-    AGENT_PATH = '/test/agent'
-
-    BLUETOOTH_LIBDIR = '/var/lib/bluetooth'
-    BTMON_STOP_DELAY_SECS = 3
-
-    # Due to problems transferring a date object, we convert to stringtime first
-    # This is the standard format that we will use.
-    OUT_DATE_FORMAT = '%Y-%m-%d %H:%M:%S.%f'
-
-    # Timeout for how long we'll wait for BlueZ and the Adapter to show up
-    # after reset.
-    ADAPTER_TIMEOUT = 30
-
-    # How long to wait for hid device
-    HID_TIMEOUT = 15
-    HID_CHECK_SECS = 2
-
-    # How long we should wait for property update signal before we cancel it
-    PROPERTY_UPDATE_TIMEOUT_MILLI_SECS = 5000
-
-    def __init__(self):
-        # Open the Bluetooth Raw socket to the kernel which provides us direct,
-        # raw, access to the HCI controller.
-        self._raw = bluetooth_socket.BluetoothRawSocket()
-
-        # Open the Bluetooth Control socket to the kernel which provides us
-        # raw management access to the Bluetooth Host Subsystem. Read the list
-        # of adapter indexes to determine whether or not this device has a
-        # Bluetooth Adapter or not.
-        self._control = bluetooth_socket.BluetoothControlSocket()
-        self._has_adapter = len(self._control.read_index_list()) > 0
-
-        # Create an Advertisement Monitor App Manager instance.
-        # This needs to be created before making any dbus connections as
-        # AdvMonitorAppMgr internally forks a new helper process and due to
-        # a limitation of python, it is not possible to fork a new process
-        # once any dbus connections are established.
-        self.advmon_appmgr = adv_monitor_helper.AdvMonitorAppMgr()
-
-        # Set up the connection to Upstart so we can start and stop services
-        # and fetch the bluetoothd job.
-        self._upstart_conn = dbus.connection.Connection(self.UPSTART_PATH)
-        self._upstart = self._upstart_conn.get_object(
-                None, self.UPSTART_MANAGER_PATH)
-
-        bluetoothd_path = self._upstart.GetJobByName(
-                self.BLUETOOTHD_JOB, dbus_interface=self.UPSTART_MANAGER_IFACE)
-        self._bluetoothd = self._upstart_conn.get_object(None, bluetoothd_path)
-
-        # Arrange for the GLib main loop to be the default.
-        dbus.mainloop.glib.DBusGMainLoop(set_as_default=True)
-
-        # Set up the connection to the D-Bus System Bus, get the object for
-        # the Bluetooth Userspace Daemon (BlueZ) and that daemon's object for
-        # the Bluetooth Adapter, and the advertising manager.
-        self._system_bus = dbus.SystemBus()
-        self._update_bluez()
-        self._update_adapter()
-        self._update_advertising()
-        self._update_adv_monitor_manager()
-
-        # The agent to handle pin code request, which will be
-        # created when user calls pair_legacy_device method.
-        self._pairing_agent = None
-        # The default capability of the agent.
-        self._capability = 'KeyboardDisplay'
-
-        # Initailize a btmon object to record bluetoothd's activity.
-        self.btmon = output_recorder.OutputRecorder(
-                'btmon', stop_delay_secs=self.BTMON_STOP_DELAY_SECS)
-
-        # Initialize a messages object to record general logging.
-        self.messages = LogRecorder('/var/log/messages')
-
-        self._cras_test_client = cras_utils.CrasTestClient()
-
-        self.advertisements = []
-        self.advmon_interleave_logger = InterleaveLogger()
-        self._chrc_property = None
-        self._timeout_id = 0
-        self._signal_watch = None
-        self._dbus_mainloop = gobject.MainLoop()
-
-    @xmlrpc_server.dbus_safe(False)
-    def set_debug_log_levels(self, dispatcher_vb, newblue_vb, bluez_vb,
-                             kernel_vb):
-        """Enable or disable the debug logs of bluetooth
-
-        @param dispatcher_vb: verbosity of btdispatcher debug log, either 0 or 1
-        @param newblue_vb: verbosity of newblued debug log, either 0 or 1
-        @param bluez_vb: verbosity of bluez debug log, either 0 or 1
-        @param kernel_vb: verbosity of kernel debug log, either 0 or 1
-
-        """
-
-        # TODO(b/145163508, b/145749798): update when debug logs is migrated to
-        #                                 bluez.
-        debug_object = self._system_bus.get_object(self.BLUETOOTH_SERVICE_NAME,
-                                                   self.BLUEZ_DEBUG_LOG_PATH)
-        debug_object.SetLevels(dbus.Byte(dispatcher_vb),
-                               dbus.Byte(newblue_vb),
-                               dbus.Byte(bluez_vb),
-                               dbus.Byte(kernel_vb),
-                               dbus_interface=self.BLUEZ_DEBUG_LOG_IFACE)
-        return
-
-    def log_message(self, msg):
-        """ log a message to /var/log/messages."""
-        try:
-            cmd = ['logger', msg]
-            subprocess.call(cmd)
-        except Exception as e:
-            logging.error("log_message %s failed with %s", cmd, str(e))
-
-    def is_wrt_supported(self):
-        """Check if Bluetooth adapter support WRT logs
-
-        WRT is supported on Intel adapters other than (StP2 and WP2)
-
-        @returns : True if adapter is Intel made.
-        """
-        # Dict of Intel Adapters that support WRT and vid:pid
-        vid_pid_dict = {
-                'HrP2': '8086:02f0',
-                'ThP2': '8086:2526',
-                'JfP2': '8086:31dc',
-                'JfP2-2': '8086:9df0'
-        }  # On Sarien/Arcada
-
-        def _get_lspci_vid_pid(output):
-            """ parse output of lspci -knn and get the vid:pid
-
-            output is of the form '01:00.0 Network controller [0280]:
-            \Intel Corporation Device [8086:2526] (rev 29)\n'
-
-            @returns : 'vid:pid' or None
-            """
-            try:
-                for i in output.split(b'\n'):
-                    if 'Network controller' in i.decode('utf-8'):
-                        logging.debug('Got line %s', i)
-                        if 'Intel Corporation' in i.decode('utf-8'):
-                            return i.split(b'[')[2].split(b']')[0]
-                return None
-            except Exception as e:
-                logging.debug('Exception in _get_lspci_vidpid %s', str(e))
-                return None
-
-        try:
-            cmd = ['lspci', '-knn']
-            output = subprocess.check_output(cmd)
-            vid_pid = _get_lspci_vid_pid(output)
-            logging.debug("got vid_pid %s", vid_pid)
-            if vid_pid is not None:
-                if vid_pid in list(vid_pid_dict.values()):
-                    return True
-        except Exception as e:
-            logging.error('is_intel_adapter  failed with %s', cmd, str(e))
-            return False
-
-    def enable_wrt_logs(self):
-        """ Enable WRT logs for Intel Bluetooth adapters.
-
-            This is applicable only to Intel adapters.
-            Execute a series of custom hciconfig commands to
-            setup WRT log collection
-
-            Precondition :
-                1) Check if the DUT has Intel controller other than StP2
-                2) Make sure the controller is powered on
-        """
-        fw_trace_cmd = (
-                'hcitool cmd 3f 7c 01 10 00 00 00 FE 81 02 80 04 00 00'
-                ' 00 01 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00'
-                ' 00 00 00 00 00 00 00')
-        ddc_read_cmd = 'hcitool cmd 3f 8c 28 01'
-        ddc_write_cmd_prefix = 'hcitool cmd 3f 8b 03 28 01'
-        hw_trace_cmd = (
-                'hcitool cmd 3f 6f 01 08 00 00 00 00 00 00 00 00 01 00'
-                ' 00 03 01 03 03 03 10 03 6A 0A 6A 0A 6A 0A 6A 0A 00 00'
-                ' 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00'
-                ' 00 00 00 00 00 00')
-        multi_comm_trace_str = ('000000F600000000005002000000003F3F3F3'
-                                'F3F003F000000000000000001000000000000000000'
-                                '000000000000000000000000000000000000000000'
-                                '00000000000000000000000000000000000000000'
-                                '00000000000000000')
-        multi_comm_trace_file = ('/sys/kernel/debug/ieee80211'
-                                 '/phy0/iwlwifi/iwlmvm/send_hcmd')
-
-        def _execute_cmd(cmd_str, msg=''):
-            """Wrapper around subprocess.check_output.
-
-            @params cmd: Command to be executed as a string
-            @params msg: Optional description of the command
-
-            @returns: (True, output) if execution succeeded
-                  (False, None) if execution failed
-
-            """
-            try:
-                logging.info('Executing %s cmd', msg)
-                cmd = cmd_str.split(' ')
-                logging.debug('command is "%s"', cmd)
-                output = subprocess.check_output(cmd)
-                logging.info('%s cmd successfully executed', msg)
-                logging.debug('output is %s', output)
-                return (True, output)
-            except Exception as e:
-                logging.error('Exception %s while executing %s command',
-                              str(e), msg)
-                return (False, None)
-
-        def _get_ddc_write_cmd(ddc_read_result, ddc_write_cmd_prefix):
-            """ Create ddc_write_cmd from read command
-
-           This function performs the following
-           1) Take the output of ddc_read_cmd which is in following form
-              '< HCI Command: ogf 0x3f, ocf 0x008c, plen 1\n
-               01 \n>
-               HCI Event: 0x0e plen 6\n  01 8C FC 12 00 18 \n'
-           2) Take the last value of the output
-              01 8C FC 12 00 ===>> 18 <====
-           3) Bitwise or with 0x40
-              0x18 | 0x40 = 0x58
-           4) Add it to the end of the ddc_write_cmd
-              'hcitool 01 8C FC 00 28 01 ===> 58 <===='
-
-           """
-            last_line = [
-                    i for i in ddc_read_result.strip().split(b'\n') if i != ''
-            ][-1]
-            last_byte = [i for i in last_line.split(b' ') if i != ''][-1]
-            processed_byte = hex(int(last_byte, 16) | 0x40).split('0x')[1]
-            cmd = ddc_write_cmd_prefix + ' ' + processed_byte
-            logging.debug('ddc_write_cmd is %s', cmd)
-            return cmd
-
-        try:
-            logging.info('Enabling WRT logs')
-            status, _ = _execute_cmd(fw_trace_cmd, 'FW trace cmd')
-            if not status:
-                logging.info('FW trace command execution failed')
-                return False
-
-            status, ddc_read_result = _execute_cmd(ddc_read_cmd, 'DDC Read')
-            if not status:
-                logging.info('DDC Read command  execution failed')
-                return False
-
-            ddc_write_cmd = _get_ddc_write_cmd(ddc_read_result,
-                                               ddc_write_cmd_prefix)
-            logging.debug('DDC Write command  is %s', ddc_write_cmd)
-            status, _ = _execute_cmd(ddc_write_cmd, 'DDC Write')
-            if not status:
-                logging.info('DDC Write commanad execution failed')
-                return False
-
-            status, hw_trace_result = _execute_cmd(hw_trace_cmd, 'HW trace')
-            if not status:
-                logging.info('HW Trace command  execution failed')
-                return False
-
-            logging.debug('Executing the multi_comm_trace cmd %s to file %s',
-                          multi_comm_trace_str, multi_comm_trace_file)
-            with open(multi_comm_trace_file, 'w') as f:
-                f.write(multi_comm_trace_str + '\n')
-                f.flush()
-
-            logging.info('WRT Logs enabled')
-            return True
-        except Exception as e:
-            logging.error('Exception %s while enabling WRT logs', str(e))
-            return False
-
-    def collect_wrt_logs(self):
-        """Collect the WRT logs for Intel Bluetooth adapters
-
-           This is applicable only to Intel adapters.
-           Execute following command to collect WRT log. The logs are
-           copied to /var/spool/crash/
-
-           'echo 1 > sudo tee /sys/kernel/debug/ieee80211/phy0'
-                           '/iwlwifi/iwlmvm/fw_dbg_collect'
-           This is to be called only after enable_wrt_logs is called
-
-
-           Precondition:
-                 1) enable_wrt_logs has been called
-        """
-
-        def _collect_logs():
-            """Execute command to collect wrt logs."""
-            try:
-                with open(
-                        '/sys/kernel/debug/ieee80211/phy0/iwlwifi/'
-                        'iwlmvm/fw_dbg_collect', 'w') as f:
-                    f.write('1')
-                    f.flush()
-                # There is some flakiness in log collection. This sleep
-                # is due to the flakiness
-                time.sleep(10)
-                return True
-            except Exception as e:
-                logging.error('Exception %s in _collect logs ', str(e))
-                return False
-
-        def _get_num_log_files():
-            """Return number of WRT log files."""
-            try:
-                return len(glob.glob('/var/spool/crash/devcoredump_iwlwifi*'))
-            except Exception as e:
-                logging.debug('Exception %s raised in _get_num_log_files',
-                              str(e))
-                return 0
-
-        try:
-            logging.info('Collecting WRT logs')
-            #
-            # The command to trigger the logs does seems to work always.
-            # As a workaround for this flakiness, execute it multiple times
-            # until a new log is created
-            #
-            num_logs_present = _get_num_log_files()
-            logging.debug('%s logs present', num_logs_present)
-            for i in range(10):
-                time.sleep(1)
-                logging.debug('Executing command to collect WRT logs ')
-                if _collect_logs():
-                    logging.debug('Command to collect WRT logs executed')
-                else:
-                    logging.debug('Command to collect WRT logs failed')
-                    continue
-
-                if _get_num_log_files() > num_logs_present:
-                    logging.info('Successfully collected WRT logs ')
-                    return True
-                else:
-                    logging.debug('Log file not written. Trying again')
-
-            logging.info('Unable to collect WRT logs')
-            return False
-        except Exception as e:
-            logging.error('Exception %s while collecting WRT logs', str(e))
-            return False
-
-    @xmlrpc_server.dbus_safe(False)
-    def start_bluetoothd(self):
-        """start bluetoothd.
-
-        This includes powering up the adapter.
-
-        @returns: True if bluetoothd is started correctly.
-                  False otherwise.
-
-        """
-        try:
-            self._bluetoothd.Start(dbus.Array(signature='s'),
-                                   True,
-                                   dbus_interface=self.UPSTART_JOB_IFACE)
-        except dbus.exceptions.DBusException as e:
-            # if bluetoothd was already started, the exception looks like
-            #     dbus.exceptions.DBusException:
-            #     com.ubuntu.Upstart0_6.Error.AlreadyStarted: Job is already
-            #     running: bluetoothd
-            if e.get_dbus_name() != self.UPSTART_ERROR_ALREADYSTARTED:
-                logging.error('Error starting bluetoothd: %s', e)
-                return False
-
-        logging.debug('waiting for bluez start')
-        try:
-            utils.poll_for_condition(condition=self._update_bluez,
-                                     desc='Bluetooth Daemon has started.',
-                                     timeout=self.ADAPTER_TIMEOUT)
-        except Exception as e:
-            logging.error('timeout: error starting bluetoothd: %s', e)
-            return False
-
-        # Waiting for the self._adapter object.
-        # This does not mean that the adapter is powered on.
-        logging.debug('waiting for bluez to obtain adapter information')
-        try:
-            utils.poll_for_condition(
-                    condition=self._update_adapter,
-                    desc='Bluetooth Daemon has adapter information.',
-                    timeout=self.ADAPTER_TIMEOUT)
-        except Exception as e:
-            logging.error('timeout: error starting adapter: %s', e)
-            return False
-
-        # Waiting for the self._advertising interface object.
-        logging.debug('waiting for bluez to obtain interface manager.')
-        try:
-            utils.poll_for_condition(
-                    condition=self._update_advertising,
-                    desc='Bluetooth Daemon has advertising interface.',
-                    timeout=self.ADAPTER_TIMEOUT)
-        except utils.TimeoutError:
-            logging.error('timeout: error getting advertising interface')
-            return False
-
-        return True
-
-    @xmlrpc_server.dbus_safe(False)
-    def stop_bluetoothd(self):
-        """stop bluetoothd.
-
-        @returns: True if bluetoothd is stopped correctly.
-                  False otherwise.
-
-        """
-
-        def bluez_stopped():
-            """Checks the bluetooth daemon status.
-
-            @returns: True if bluez is stopped. False otherwise.
-
-            """
-            return not self._update_bluez()
-
-        try:
-            self._bluetoothd.Stop(dbus.Array(signature='s'),
-                                  True,
-                                  dbus_interface=self.UPSTART_JOB_IFACE)
-        except dbus.exceptions.DBusException as e:
-            # If bluetoothd was stopped already, the exception looks like
-            #    dbus.exceptions.DBusException:
-            #    com.ubuntu.Upstart0_6.Error.UnknownInstance: Unknown instance:
-            if e.get_dbus_name() != self.UPSTART_ERROR_UNKNOWNINSTANCE:
-                logging.error('Error stopping bluetoothd!')
-                return False
-
-        logging.debug('waiting for bluez stop')
-        try:
-            utils.poll_for_condition(condition=bluez_stopped,
-                                     desc='Bluetooth Daemon has stopped.',
-                                     timeout=self.ADAPTER_TIMEOUT)
-            bluetoothd_stopped = True
-        except Exception as e:
-            logging.error('timeout: error stopping bluetoothd: %s', e)
-            bluetoothd_stopped = False
-
-        return bluetoothd_stopped
-
-    def is_bluetoothd_running(self):
-        """Is bluetoothd running?
-
-        @returns: True if bluetoothd is running
-
-        """
-        return bool(self._get_dbus_proxy_for_bluetoothd())
-
-    def is_bluetoothd_proxy_valid(self):
-        """Checks whether the proxy object for bluetoothd is ok.
-
-        The dbus proxy object (self._bluez) can become unusable if bluetoothd
-        crashes or restarts for any reason. This method checks whether this has
-        happened by attempting to use the object proxy. If bluetoothd has
-        restarted (or is not available), then the session will no longer be
-        valid and this will result in a dbus exception.
-
-        Returns:
-            True if the bluez proxy is still usable. False otherwise.
-        """
-
-        try:
-            _ = self._bluez.GetManagedObjects(
-                    dbus_interface=self.BLUEZ_MANAGER_IFACE)
-        except dbus.exceptions.DBusException:
-            return False
-
-        return True
-
-    def _update_bluez(self):
-        """Store a D-Bus proxy for the Bluetooth daemon in self._bluez.
-
-        This may be called in a loop until it returns True to wait for the
-        daemon to be ready after it has been started.
-
-        @return True on success, False otherwise.
-
-        """
-        self._bluez = self._get_dbus_proxy_for_bluetoothd()
-        return bool(self._bluez)
-
-    @xmlrpc_server.dbus_safe(False)
-    def _get_dbus_proxy_for_bluetoothd(self):
-        """Get the D-Bus proxy for the Bluetooth daemon.
-
-        @return True on success, False otherwise.
-
-        """
-        bluez = None
-        try:
-            bluez = self._system_bus.get_object(self.BLUEZ_SERVICE_NAME,
-                                                self.BLUEZ_MANAGER_PATH)
-            logging.debug('bluetoothd is running')
-        except dbus.exceptions.DBusException as e:
-            # When bluetoothd is not running, the exception looks like
-            #     dbus.exceptions.DBusException:
-            #     org.freedesktop.DBus.Error.ServiceUnknown: The name org.bluez
-            #     was not provided by any .service files
-            if e.get_dbus_name() == self.DBUS_ERROR_SERVICEUNKNOWN:
-                logging.debug('bluetoothd is not running')
-            else:
-                logging.error('Error getting dbus proxy for Bluez: %s', e)
-        return bluez
-
-    def _update_adapter(self):
-        """Store a D-Bus proxy for the local adapter in self._adapter.
-
-        This may be called in a loop until it returns True to wait for the
-        daemon to be ready, and have obtained the adapter information itself,
-        after it has been started.
-
-        Since not all devices will have adapters, this will also return True
-        in the case where we have obtained an empty adapter index list from the
-        kernel.
-
-        Note that this method does not power on the adapter.
-
-        @return True on success, including if there is no local adapter,
-            False otherwise.
-
-        """
-        self._adapter = None
-        if self._bluez is None:
-            logging.warning('Bluez not found!')
-            return False
-        if not self._has_adapter:
-            logging.debug('Device has no adapter; returning')
-            return True
-        self._adapter = self._get_adapter()
-        return bool(self._adapter)
-
-    def _update_advertising(self):
-        """Store a D-Bus proxy for the local advertising interface manager.
-
-        This may be called repeatedly in a loop until True is returned;
-        otherwise we wait for bluetoothd to start. After bluetoothd starts, we
-        check the existence of a local adapter and proceed to get the
-        advertisement interface manager.
-
-        Since not all devices will have adapters, this will also return True
-        in the case where there is no adapter.
-
-        @return True on success, including if there is no local adapter,
-                False otherwise.
-
-        """
-        self._advertising = None
-        if self._bluez is None:
-            logging.warning('Bluez not found!')
-            return False
-        if not self._has_adapter:
-            logging.debug('Device has no adapter; returning')
-            return True
-        self._advertising = self._get_advertising()
-        return bool(self._advertising)
-
-    def _update_adv_monitor_manager(self):
-        """Store a D-Bus proxy for the local advertisement monitor manager.
-
-        This may be called repeatedly in a loop until True is returned;
-        otherwise we wait for bluetoothd to start. After bluetoothd starts, we
-        check the existence of a local adapter and proceed to get the
-        advertisement monitor manager interface.
-
-        Since not all devices will have adapters, this will also return True
-        in the case where there is no adapter.
-
-        @return True on success, including if there is no local adapter,
-                False otherwise.
-
-        """
-        self._adv_monitor_manager = None
-        if self._bluez is None:
-            logging.warning('Bluez not found!')
-            return False
-        if not self._has_adapter:
-            logging.debug('Device has no adapter; returning without '
-                          'advertisement monitor manager')
-            return True
-        self._adv_monitor_manager = self._get_adv_monitor_manager()
-        return bool(self._adv_monitor_manager)
-
-    @xmlrpc_server.dbus_safe(False)
-    def _get_adapter(self):
-        """Get the D-Bus proxy for the local adapter.
-
-        @return the adapter on success. None otherwise.
-
-        """
-        objects = self._bluez.GetManagedObjects(
-                dbus_interface=self.BLUEZ_MANAGER_IFACE)
-        for path, ifaces in six.iteritems(objects):
-            logging.debug('%s -> %r', path, list(ifaces.keys()))
-            if self.BLUEZ_ADAPTER_IFACE in ifaces:
-                logging.debug('using adapter %s', path)
-                adapter = self._system_bus.get_object(self.BLUEZ_SERVICE_NAME,
-                                                      path)
-                return adapter
-        else:
-            logging.warning('No adapter found in interface!')
-            return None
-
-    @xmlrpc_server.dbus_safe(False)
-    def _get_advertising(self):
-        """Get the D-Bus proxy for the local advertising interface.
-
-        @return the advertising interface object.
-
-        """
-        return dbus.Interface(self._adapter,
-                              self.BLUEZ_LE_ADVERTISING_MANAGER_IFACE)
-
-    @xmlrpc_server.dbus_safe(False)
-    def _get_adv_monitor_manager(self):
-        """Get the D-Bus proxy for the local advertisement monitor manager.
-
-        @return the advertisement monitor manager interface object.
-
-        """
-        return dbus.Interface(self._adapter,
-                              self.BLUEZ_ADV_MONITOR_MANAGER_IFACE)
-
-    @xmlrpc_server.dbus_safe(False)
-    def reset_on(self):
-        """Reset the adapter and settings and power up the adapter.
-
-        @return True on success, False otherwise.
-
-        """
-        return self._reset(set_power=True)
-
-    @xmlrpc_server.dbus_safe(False)
-    def reset_off(self):
-        """Reset the adapter and settings, leave the adapter powered off.
-
-        @return True on success, False otherwise.
-
-        """
-        return self._reset(set_power=False)
-
-    def has_adapter(self):
-        """Return if an adapter is present.
-
-        This will only return True if we have determined both that there is
-        a Bluetooth adapter on this device (kernel adapter index list is not
-        empty) and that the Bluetooth daemon has exported an object for it.
-
-        @return True if an adapter is present, False if not.
-
-        """
-        return self._has_adapter and self._adapter is not None
-
-    def is_wake_enabled(self):
-        """Checks whether the bluetooth adapter has wake enabled.
-
-        This will walk through all parents of the hci0 sysfs path and try to
-        find one with a 'power/wakeup' entry and returns whether its value is
-        'enabled'.
-
-        @return True if 'power/wakeup' of an hci0 parent is 'enabled'
-        """
-        enabled = self._is_wake_enabled()
-        return enabled
-
-    def set_wake_enabled(self, value):
-        """Sets wake enabled to the value if path exists.
-
-        This will walk through all parents of the hci0 sysfs path and write the
-        value to the first one it finds.
-
-        Args:
-            value: Sets power/wakeup to "enabled" if value is true, else
-                   "disabled"
-
-        @return True if it wrote value to a power/wakeup, False otherwise
-        """
-        return self._set_wake_enabled(value)
-
-    def wait_for_hid_device(self, device_address):
-        """Waits for hid device with given device address.
-
-        Args:
-            device_address: Peripheral address
-        """
-
-        def match_hid_to_device(hidpath, device_address):
-            """Check if given hid syspath is for the given device address """
-            # If the syspath has a uniq property that matches the peripheral
-            # device's address, then it has matched
-            props = UdevadmInfo.GetProperties(hidpath)
-            if props.get('uniq', '').lower() == device_address.lower():
-                logging.info('Found hid device for address {} at {}'.format(
-                        device_address, hidpath))
-                return True
-            else:
-                logging.info('Path {} is not right device.'.format(hidpath))
-
-            return False
-
-        start = datetime.now()
-
-        # Keep scanning udev for correct hid device
-        while (datetime.now() - start).seconds <= self.HID_TIMEOUT:
-            existing_inputs = UdevadmTrigger(
-                    subsystem_match=['input']).DryRun()
-            for entry in existing_inputs:
-                bt_hid = any([t in entry for t in ['uhid', 'hci']])
-                logging.info('udevadm trigger entry is {}: {}'.format(
-                        bt_hid, entry))
-
-                if bt_hid and match_hid_to_device(entry, device_address):
-                    return True
-
-            time.sleep(self.HID_CHECK_SECS)
-
-        return False
-
-    def _reset(self, set_power=False):
-        """Remove remote devices and set adapter to set_power state.
-
-        Do not restart bluetoothd as this may incur a side effect.
-        The unhappy chrome may disable the adapter randomly.
-
-        @param set_power: adapter power state to set (True or False).
-
-        @return True on success, False otherwise.
-
-        """
-        logging.debug('_reset')
-
-        if not self._adapter:
-            logging.warning('Adapter not found!')
-            return False
-
-        objects = self._bluez.GetManagedObjects(
-                dbus_interface=self.BLUEZ_MANAGER_IFACE, byte_arrays=True)
-
-        devices = []
-        for path, ifaces in six.iteritems(objects):
-            if self.BLUEZ_DEVICE_IFACE in ifaces:
-                devices.append(objects[path][self.BLUEZ_DEVICE_IFACE])
-
-        # Turn on the adapter in order to remove all remote devices.
-        if not self._is_powered_on():
-            if not self._set_powered(True):
-                logging.warning('Unable to power on the adapter')
-                return False
-
-        for device in devices:
-            logging.debug('removing %s', device.get('Address'))
-            self.remove_device_object(device.get('Address'))
-
-        # Toggle power to the adapter.
-        if not self._set_powered(False):
-            logging.warning('Unable to power off adapter')
-            return False
-        if set_power and not self._set_powered(True):
-            logging.warning('Unable to power on adapter')
-            return False
-
-        return True
-
-    @xmlrpc_server.dbus_safe(False)
-    def set_powered(self, powered):
-        """Set the adapter power state.
-
-        @param powered: adapter power state to set (True or False).
-
-        @return True on success, False otherwise.
-
-        """
-        if not self._adapter:
-            if not powered:
-                # Return success if we are trying to power off an adapter that's
-                # missing or gone away, since the expected result has happened.
-                return True
-            else:
-                logging.warning('Adapter not found!')
-                return False
-        return self._set_powered(powered)
-
-    @xmlrpc_server.dbus_safe(False)
-    def _set_powered(self, powered):
-        """Set the adapter power state.
-
-        @param powered: adapter power state to set (True or False).
-
-        """
-        logging.debug('_set_powered %r', powered)
-        self._adapter.Set(self.BLUEZ_ADAPTER_IFACE,
-                          'Powered',
-                          dbus.Boolean(powered, variant_level=1),
-                          dbus_interface=dbus.PROPERTIES_IFACE)
-        return True
-
-    @xmlrpc_server.dbus_safe(False)
-    def set_discoverable(self, discoverable):
-        """Set the adapter discoverable state.
-
-        @param discoverable: adapter discoverable state to set (True or False).
-
-        @return True on success, False otherwise.
-
-        """
-        if not discoverable and not self._adapter:
-            # Return success if we are trying to make an adapter that's
-            # missing or gone away, undiscoverable, since the expected result
-            # has happened.
-            return True
-        self._adapter.Set(self.BLUEZ_ADAPTER_IFACE,
-                          'Discoverable',
-                          dbus.Boolean(discoverable, variant_level=1),
-                          dbus_interface=dbus.PROPERTIES_IFACE)
-        return True
-
-    @xmlrpc_server.dbus_safe(False)
-    def get_discoverable_timeout(self):
-        """Get the adapter discoverable_timeout.
-
-        @return True on success, False otherwise.
-
-        """
-        return int(
-                self._adapter.Get(self.BLUEZ_ADAPTER_IFACE,
-                                  'DiscoverableTimeout',
-                                  dbus_interface=dbus.PROPERTIES_IFACE))
-
-    @xmlrpc_server.dbus_safe(False)
-    def set_discoverable_timeout(self, discoverable_timeout):
-        """Set the adapter discoverable_timeout property.
-
-        @param discoverable_timeout: adapter discoverable_timeout value
-               in seconds to set (Integer).
-
-        @return True on success, False otherwise.
-
-        """
-        self._adapter.Set(self.BLUEZ_ADAPTER_IFACE,
-                          'DiscoverableTimeout',
-                          dbus.UInt32(discoverable_timeout, variant_level=1),
-                          dbus_interface=dbus.PROPERTIES_IFACE)
-        return True
-
-    @xmlrpc_server.dbus_safe(False)
-    def get_pairable_timeout(self):
-        """Get the adapter pairable_timeout.
-
-        @return True on success, False otherwise.
-
-        """
-        return int(
-                self._adapter.Get(self.BLUEZ_ADAPTER_IFACE,
-                                  'PairableTimeout',
-                                  dbus_interface=dbus.PROPERTIES_IFACE))
-
-    @xmlrpc_server.dbus_safe(False)
-    def set_pairable_timeout(self, pairable_timeout):
-        """Set the adapter pairable_timeout property.
-
-        @param pairable_timeout: adapter pairable_timeout value
-               in seconds to set (Integer).
-
-        @return True on success, False otherwise.
-
-        """
-        self._adapter.Set(self.BLUEZ_ADAPTER_IFACE,
-                          'PairableTimeout',
-                          dbus.UInt32(pairable_timeout, variant_level=1),
-                          dbus_interface=dbus.PROPERTIES_IFACE)
-        return True
-
-    @xmlrpc_server.dbus_safe(False)
-    def set_pairable(self, pairable):
-        """Set the adapter pairable state.
-
-        @param pairable: adapter pairable state to set (True or False).
-
-        @return True on success, False otherwise.
-
-        """
-        self._adapter.Set(self.BLUEZ_ADAPTER_IFACE,
-                          'Pairable',
-                          dbus.Boolean(pairable, variant_level=1),
-                          dbus_interface=dbus.PROPERTIES_IFACE)
-        return True
-
-    @xmlrpc_server.dbus_safe(False)
-    def set_adapter_alias(self, alias):
-        """Set the adapter alias.
-
-        @param alias: adapter alias to set with type String
-
-        @return True on success, False otherwise.
-        """
-        self._adapter.Set(self.BLUEZ_ADAPTER_IFACE,
-                          'Alias',
-                          dbus.String(alias),
-                          dbus_interface=dbus.PROPERTIES_IFACE)
-        return True
-
-    @xmlrpc_server.dbus_safe(False)
-    def _get_adapter_properties(self):
-        """Read the adapter properties from the Bluetooth Daemon.
-
-        @return the properties as a JSON-encoded dictionary on success,
-            the value False otherwise.
-
-        """
-        if self._bluez and self._adapter:
-            objects = self._bluez.GetManagedObjects(
-                    dbus_interface=self.BLUEZ_MANAGER_IFACE)
-            props = objects[self._adapter.object_path][
-                    self.BLUEZ_ADAPTER_IFACE]
-        else:
-            props = {}
-        logging.debug('get_adapter_properties')
-        for i in props.items():
-            logging.debug(i)
-        return props
-
-    def get_adapter_properties(self):
-        return json.dumps(self._get_adapter_properties())
-
-    def _is_powered_on(self):
-        return bool(self._get_adapter_properties().get(u'Powered'))
-
-    def _get_wake_enabled_path(self):
-        # Walk up the parents from hci0 sysfs path and find the first one with
-        # a power/wakeup property. Return that path (including power/wakeup).
-
-        # Resolve hci path to get full device path (i.e. w/ usb or uart)
-        search_at = os.path.realpath('/sys/class/bluetooth/hci0')
-
-        # Exit early if path doesn't exist
-        if not os.path.exists(search_at):
-            return None
-
-        # Walk up parents and try to find one with 'power/wakeup'
-        for _ in range(search_at.count('/') - 1):
-            search_at = os.path.normpath(os.path.join(search_at, '..'))
-            try:
-                path = os.path.join(search_at, 'power', 'wakeup')
-                with open(path, 'r') as f:
-                    return path
-            except IOError:
-                # No power wakeup at the given location so keep going
-                continue
-
-        return None
-
-    def _is_wake_enabled(self):
-        search_at = self._get_wake_enabled_path()
-
-        if search_at is not None:
-            try:
-                with open(search_at, 'r') as f:
-                    value = f.read()
-                    logging.info('Power/wakeup found at {}: {}'.format(
-                            search_at, value))
-                    return 'enabled' in value
-            except IOError:
-                # Path was not readable
-                return False
-
-        logging.debug('No power/wakeup path found')
-        return False
-
-    def _set_wake_enabled(self, value):
-        path = self._get_wake_enabled_path()
-        if path is not None:
-            try:
-                with open(path, 'w') as f:
-                    f.write('enabled' if value else 'disabled')
-                    return True
-            except IOError:
-                # Path was not writeable
-                return False
-
-        return False
-
-    def read_version(self):
-        """Read the version of the management interface from the Kernel.
-
-        @return the information as a JSON-encoded tuple of:
-          ( version, revision )
-
-        """
-        #TODO(howardchung): resolve 'cannot allocate memory' error when
-        #                   BluetoothControlSocket idle too long(about 3 secs)
-        #                   (b:137603211)
-        _control = bluetooth_socket.BluetoothControlSocket()
-        return json.dumps(_control.read_version())
-
-    def read_supported_commands(self):
-        """Read the set of supported commands from the Kernel.
-
-        @return the information as a JSON-encoded tuple of:
-          ( commands, events )
-
-        """
-        #TODO(howardchung): resolve 'cannot allocate memory' error when
-        #                   BluetoothControlSocket idle too long(about 3 secs)
-        #                   (b:137603211)
-        _control = bluetooth_socket.BluetoothControlSocket()
-        return json.dumps(_control.read_supported_commands())
-
-    def read_index_list(self):
-        """Read the list of currently known controllers from the Kernel.
-
-        @return the information as a JSON-encoded array of controller indexes.
-
-        """
-        #TODO(howardchung): resolve 'cannot allocate memory' error when
-        #                   BluetoothControlSocket idle too long(about 3 secs)
-        #                   (b:137603211)
-        _control = bluetooth_socket.BluetoothControlSocket()
-        return json.dumps(_control.read_index_list())
-
-    def read_info(self):
-        """Read the adapter information from the Kernel.
-
-        @return the information as a JSON-encoded tuple of:
-          ( address, bluetooth_version, manufacturer_id,
-            supported_settings, current_settings, class_of_device,
-            name, short_name )
-
-        """
-        #TODO(howardchung): resolve 'cannot allocate memory' error when
-        #                   BluetoothControlSocket idle too long(about 3 secs)
-        #                   (b:137603211)
-        _control = bluetooth_socket.BluetoothControlSocket()
-        return json.dumps(_control.read_info(0))
-
-    def add_device(self, address, address_type, action):
-        """Add a device to the Kernel action list.
-
-        @param address: Address of the device to add.
-        @param address_type: Type of device in @address.
-        @param action: Action to take.
-
-        @return on success, a JSON-encoded typle of:
-          ( address, address_type ), None on failure.
-
-        """
-        #TODO(howardchung): resolve 'cannot allocate memory' error when
-        #                   BluetoothControlSocket idle too long(about 3 secs)
-        #                   (b:137603211)
-        _control = bluetooth_socket.BluetoothControlSocket()
-        return json.dumps(_control.add_device(0, address, address_type,
-                                              action))
-
-    def remove_device(self, address, address_type):
-        """Remove a device from the Kernel action list.
-
-        @param address: Address of the device to remove.
-        @param address_type: Type of device in @address.
-
-        @return on success, a JSON-encoded typle of:
-          ( address, address_type ), None on failure.
-
-        """
-        #TODO(howardchung): resolve 'cannot allocate memory' error when
-        #                   BluetoothControlSocket idle too long(about 3 secs)
-        #                   (b:137603211)
-        _control = bluetooth_socket.BluetoothControlSocket()
-        return json.dumps(_control.remove_device(0, address, address_type))
-
-    @xmlrpc_server.dbus_safe(False)
-    def _get_devices(self):
-        """Read information about remote devices known to the adapter.
-
-        @return the properties of each device in a list
-
-        """
-        objects = self._bluez.GetManagedObjects(
-                dbus_interface=self.BLUEZ_MANAGER_IFACE, byte_arrays=True)
-        devices = []
-        for path, ifaces in six.iteritems(objects):
-            if self.BLUEZ_DEVICE_IFACE in ifaces:
-                devices.append(objects[path][self.BLUEZ_DEVICE_IFACE])
-        return devices
-
-    def _encode_base64_json(self, data):
-        """Base64 encode and json encode the data.
-        Required to handle non-ascii data
-
-        @param data: data to be base64 and JSON encoded
-
-        @return: base64 and JSON encoded data
-
-        """
-        logging.debug('_encode_base64_json raw data is %s', data)
-        b64_encoded = utils.base64_recursive_encode(data)
-        logging.debug('base64 encoded data is %s', b64_encoded)
-        json_encoded = json.dumps(b64_encoded)
-        logging.debug('JSON encoded data is %s', json_encoded)
-        return json_encoded
-
-    def get_devices(self):
-        """Read information about remote devices known to the adapter.
-
-        @return the properties of each device as a JSON-encoded array of
-            dictionaries on success, the value False otherwise.
-
-        """
-        devices = self._get_devices()
-        return self._encode_base64_json(devices)
-
-    @xmlrpc_server.dbus_safe(None)
-    def get_device_property(self, address, prop_name):
-        """Read a property of BT device by directly querying device dbus object
-
-        @param address: Address of the device to query
-        @param prop_name: Property to be queried
-
-        @return Base 64 JSON repr of property if device is found and has
-                property, otherwise None on failure. JSON is a recursive
-                converter, automatically converting dbus types to python natives
-                and base64 allows us to pass special characters over xmlrpc.
-                Decode is done in bluetooth_device.py
-        """
-
-        prop_val = None
-
-        # Grab dbus object, _find_device will catch any thrown dbus error
-        device_obj = self._find_device(address)
-
-        if device_obj:
-            # Query dbus object for property
-            prop_val = device_obj.Get(self.BLUEZ_DEVICE_IFACE,
-                                      prop_name,
-                                      dbus_interface=dbus.PROPERTIES_IFACE)
-
-        return self._encode_base64_json(prop_val)
-
-    @xmlrpc_server.dbus_safe(None)
-    def get_battery_property(self, address, prop_name):
-        """Read a property from Battery1 interface.
-
-        @param address: Address of the device to query
-        @param prop_name: Property to be queried
-
-        @return The battery percentage value, or None if does not exist.
-        """
-
-        prop_val = None
-
-        # Grab dbus object, _find_battery will catch any thrown dbus error
-        battery_obj = self._find_battery(address)
-
-        if battery_obj:
-            # Query dbus object for property
-            prop_val = battery_obj.Get(self.BLUEZ_BATTERY_IFACE,
-                                       prop_name,
-                                       dbus_interface=dbus.PROPERTIES_IFACE)
-
-        return dbus_util.dbus2primitive(prop_val)
-
-    @xmlrpc_server.dbus_safe(False)
-    def set_discovery_filter(self, filter):
-        """Set the discovery filter.
-
-        @param filter: The discovery filter to set.
-
-        @return True on success, False otherwise.
-
-        """
-        if not self._adapter:
-            return False
-        self._adapter.SetDiscoveryFilter(
-                filter, dbus_interface=self.BLUEZ_ADAPTER_IFACE)
-        return True
-
-    @xmlrpc_server.dbus_safe(False)
-    @dbus_print_error()
-    def start_discovery(self):
-        """Start discovery of remote devices.
-
-        Obtain the discovered device information using get_devices(), called
-        stop_discovery() when done.
-
-        @return True on success, False otherwise.
-
-        """
-        if not self._adapter:
-            return (False, "Adapter Not Found")
-        self._adapter.StartDiscovery(dbus_interface=self.BLUEZ_ADAPTER_IFACE)
-        return (True, None)
-
-    @dbus_print_error()
-    def stop_discovery(self):
-        """Stop discovery of remote devices.
-
-        @return True on success, False otherwise.
-
-        """
-        if not self._adapter:
-            return (False, "Adapter Not Found")
-        self._adapter.StopDiscovery(dbus_interface=self.BLUEZ_ADAPTER_IFACE)
-        return (True, None)
-
-    def get_dev_info(self):
-        """Read raw HCI device information.
-
-        @return JSON-encoded tuple of:
-                (index, name, address, flags, device_type, bus_type,
-                       features, pkt_type, link_policy, link_mode,
-                       acl_mtu, acl_pkts, sco_mtu, sco_pkts,
-                       err_rx, err_tx, cmd_tx, evt_rx, acl_tx, acl_rx,
-                       sco_tx, sco_rx, byte_rx, byte_tx) on success,
-                None on failure.
-
-        """
-        return json.dumps(self._raw.get_dev_info(0))
-
-    @dbus_print_error(None)
-    def get_supported_capabilities(self):
-        """ Get supported capabilities of the adapter
-
-        @returns (capabilities, None) on Success. (None, <error>) on failure
-        """
-        value = self._adapter.GetSupportedCapabilities(
-                dbus_interface=self.BLUEZ_ADAPTER_IFACE)
-        return (json.dumps(value), None)
-
-    @xmlrpc_server.dbus_safe(False)
-    def register_profile(self, path, uuid, options):
-        """Register new profile (service).
-
-        @param path: Path to the profile object.
-        @param uuid: Service Class ID of the service as string.
-        @param options: Dictionary of options for the new service, compliant
-                        with BlueZ D-Bus Profile API standard.
-
-        @return True on success, False otherwise.
-
-        """
-        profile_manager = dbus.Interface(
-                self._system_bus.get_object(self.BLUEZ_SERVICE_NAME,
-                                            self.BLUEZ_PROFILE_MANAGER_PATH),
-                self.BLUEZ_PROFILE_MANAGER_IFACE)
-        dbus_object = self._system_bus.get_object(self.BLUEZ_SERVICE_NAME,
-                                                  path)
-        profile_manager.RegisterProfile(
-                dbus_object, uuid, dbus.Dictionary(options, signature='sv'))
-        return True
-
-    def has_device(self, address):
-        """Checks if the device with a given address exists.
-
-        @param address: Address of the device.
-
-        @returns: True if there is an interface object with that address.
-                  False if the device is not found.
-
-        @raises: Exception if a D-Bus error is encountered.
-
-        """
-        result = self._find_device(address)
-        logging.debug('has_device result: %s', str(result))
-
-        # The result being False indicates that there is a D-Bus error.
-        if result is False:
-            raise Exception('dbus.Interface error')
-
-        # Return True if the result is not None, e.g. a D-Bus interface object;
-        # False otherwise.
-        return bool(result)
-
-    @xmlrpc_server.dbus_safe(False)
-    def _find_device(self, address):
-        """Finds the device with a given address.
-
-        Find the device with a given address and returns the
-        device interface.
-
-        @param address: Address of the device.
-
-        @returns: An 'org.bluez.Device1' interface to the device.
-                  None if device can not be found.
-        """
-        path = self._get_device_path(address)
-        if path:
-            obj = self._system_bus.get_object(self.BLUEZ_SERVICE_NAME, path)
-            return dbus.Interface(obj, self.BLUEZ_DEVICE_IFACE)
-        logging.info('Device not found')
-        return None
-
-    @xmlrpc_server.dbus_safe(None)
-    def _find_battery(self, address):
-        """Finds the battery with a given address.
-
-        Find the battery with a given address and returns the
-        battery interface.
-
-        @param address: Address of the device.
-
-        @returns: An 'org.bluez.Battery1' interface to the device.
-                  None if device can not be found.
-        """
-        path = self._get_device_path(address)
-        if path:
-            obj = self._system_bus.get_object(self.BLUEZ_SERVICE_NAME, path)
-            return dbus.Interface(obj, self.BLUEZ_BATTERY_IFACE)
-        logging.info('Battery not found')
-        return None
-
-    @xmlrpc_server.dbus_safe(False)
-    def _get_device_path(self, address):
-        """Gets the path for a device with a given address.
-
-        Find the device with a given address and returns the
-        the path for the device.
-
-        @param address: Address of the device.
-
-        @returns: The path to the address of the device, or None if device is
-            not found in the object tree.
-
-        """
-
-        # Create device path, i.e. '/org/bluez/hci0/dev_AA_BB_CC_DD_EE_FF' based
-        # on path assignment scheme used in bluez
-        address_up = address.replace(':', '_')
-        device_path = '{}/dev_{}'.format(self._adapter.object_path, address_up)
-
-        # Verify the Address property agrees to confirm we have the device
-        try:
-            device = self._system_bus.get_object(self.BLUEZ_SERVICE_NAME,
-                                                 device_path)
-            found_addr = device.Get(self.BLUEZ_DEVICE_IFACE,
-                                    'Address',
-                                    dbus_interface=dbus.PROPERTIES_IFACE)
-
-            if found_addr == address:
-                logging.info('Device found at {}'.format(device_path))
-                return device_path
-
-        except dbus.exceptions.DBusException as e:
-            log_msg = 'Couldn\'t reach device: {}'.format(str(e))
-            logging.debug(log_msg)
-
-        logging.debug('No device found at {}'.format(device_path))
-        return None
-
-    @xmlrpc_server.dbus_safe(False)
-    def _setup_pairing_agent(self, pin):
-        """Initializes and resiters a PairingAgent to handle authentication.
-
-        @param pin: The pin code this agent will answer.
-
-        """
-        if self._pairing_agent:
-            logging.info(
-                    'Removing the old agent before initializing a new one')
-            self._pairing_agent.remove_from_connection()
-            self._pairing_agent = None
-        self._pairing_agent = PairingAgent(pin, self._system_bus,
-                                           self.AGENT_PATH)
-        agent_manager = dbus.Interface(
-                self._system_bus.get_object(self.BLUEZ_SERVICE_NAME,
-                                            self.BLUEZ_AGENT_MANAGER_PATH),
-                self.BLUEZ_AGENT_MANAGER_IFACE)
-        try:
-            agent_obj = self._system_bus.get_object(self.BLUEZ_SERVICE_NAME,
-                                                    self.AGENT_PATH)
-            agent_manager.RegisterAgent(agent_obj,
-                                        dbus.String(self._capability))
-        except dbus.exceptions.DBusException as e:
-            if e.get_dbus_name() == self.BLUEZ_ERROR_ALREADY_EXISTS:
-                logging.info('Unregistering old agent and registering the new')
-                agent_manager.UnregisterAgent(agent_obj)
-                agent_manager.RegisterAgent(agent_obj,
-                                            dbus.String(self._capability))
-            else:
-                logging.error('Error setting up pin agent: %s', e)
-                raise
-        logging.info('Agent registered: %s', self.AGENT_PATH)
-
-    @xmlrpc_server.dbus_safe(False)
-    def _is_paired(self, device):
-        """Checks if a device is paired.
-
-        @param device: An 'org.bluez.Device1' interface to the device.
-
-        @returns: True if device is paired. False otherwise.
-
-        """
-        props = dbus.Interface(device, dbus.PROPERTIES_IFACE)
-        paired = props.Get(self.BLUEZ_DEVICE_IFACE, 'Paired')
-        return bool(paired)
-
-    @xmlrpc_server.dbus_safe(False)
-    def device_is_paired(self, address):
-        """Checks if a device is paired.
-
-        @param address: address of the device.
-
-        @returns: True if device is paired. False otherwise.
-
-        """
-        device = self._find_device(address)
-        if not device:
-            logging.error('Device not found')
-            return False
-        return self._is_paired(device)
-
-    @xmlrpc_server.dbus_safe(False)
-    def _is_connected(self, device):
-        """Checks if a device is connected.
-
-        @param device: An 'org.bluez.Device1' interface to the device.
-
-        @returns: True if device is connected. False otherwise.
-
-        """
-        props = dbus.Interface(device, dbus.PROPERTIES_IFACE)
-        connected = props.Get(self.BLUEZ_DEVICE_IFACE, 'Connected')
-        logging.info('Got connected = %r', connected)
-        return bool(connected)
-
-    @xmlrpc_server.dbus_safe(False)
-    def _set_trusted_by_device(self, device, trusted=True):
-        """Set the device trusted by device object.
-
-        @param device: the device object to set trusted.
-        @param trusted: True or False indicating whether to set trusted or not.
-
-        @returns: True if successful. False otherwise.
-
-        """
-        try:
-            properties = dbus.Interface(device, self.DBUS_PROP_IFACE)
-            properties.Set(self.BLUEZ_DEVICE_IFACE, 'Trusted',
-                           dbus.Boolean(trusted, variant_level=1))
-            return True
-        except Exception as e:
-            logging.error('_set_trusted_by_device: %s', e)
-        except:
-            logging.error('_set_trusted_by_device: unexpected error')
-        return False
-
-    @xmlrpc_server.dbus_safe(False)
-    def _set_trusted_by_path(self, device_path, trusted=True):
-        """Set the device trusted by the device path.
-
-        @param device_path: the object path of the device.
-        @param trusted: True or False indicating whether to set trusted or not.
-
-        @returns: True if successful. False otherwise.
-
-        """
-        try:
-            device = self._system_bus.get_object(self.BLUEZ_SERVICE_NAME,
-                                                 device_path)
-            return self._set_trusted_by_device(device, trusted)
-        except Exception as e:
-            logging.error('_set_trusted_by_path: %s', e)
-        except:
-            logging.error('_set_trusted_by_path: unexpected error')
-        return False
-
-    @xmlrpc_server.dbus_safe(False)
-    def set_trusted(self, address, trusted=True):
-        """Set the device trusted by address.
-
-        @param address: The bluetooth address of the device.
-        @param trusted: True or False indicating whether to set trusted or not.
-
-        @returns: True if successful. False otherwise.
-
-        """
-        try:
-            device = self._find_device(address)
-            return self._set_trusted_by_device(device, trusted)
-        except Exception as e:
-            logging.error('set_trusted: %s', e)
-        except:
-            logging.error('set_trusted: unexpected error')
-        return False
-
-    @xmlrpc_server.dbus_safe(False)
-    def pair_legacy_device(self, address, pin, trusted, timeout=60):
-        """Pairs a device with a given pin code.
-
-        Registers a agent who handles pin code request and
-        pairs a device with known pin code. After pairing, this function will
-        automatically connect to the device as well (prevents timing issues
-        between pairing and connect and reduces overall test execution time).
-
-        @param address: Address of the device to pair.
-        @param pin: The pin code of the device to pair.
-        @param trusted: indicating whether to set the device trusted.
-        @param timeout: The timeout in seconds for pairing.
-
-        @returns: True on success. False otherwise.
-
-        """
-
-        def connect_reply():
-            """Handler when connect succeeded."""
-            logging.info('Device connected: %s', device_path)
-            mainloop.quit()
-
-        def connect_error(error):
-            """Handler when connect failed.
-
-            @param error: one of the errors defined in org.bluez.Error
-            representing the error in connect.
-            """
-            logging.error('Connect device failed: %s', error)
-            mainloop.quit()
-
-        def pair_reply():
-            """Handler when pairing succeeded."""
-            logging.info('Device paired: %s', device_path)
-            if trusted:
-                self._set_trusted_by_path(device_path, trusted=True)
-                logging.info('Device trusted: %s', device_path)
-
-            # On finishing pairing, also connect; let connect result exit
-            # mainloop instead
-            device.Connect(reply_handler=connect_reply,
-                           error_handler=connect_error,
-                           timeout=timeout * 1000)
-
-        def pair_error(error):
-            """Handler when pairing failed.
-
-            @param error: one of errors defined in org.bluez.Error representing
-                          the error in pairing.
-
-            """
-            try:
-                error_name = error.get_dbus_name()
-                if error_name == 'org.freedesktop.DBus.Error.NoReply':
-                    logging.error('Timed out after %d ms. Cancelling pairing.',
-                                  timeout)
-                    device.CancelPairing()
-                else:
-                    logging.error('Pairing device failed: %s', error)
-            finally:
-                mainloop.quit()
-
-        device = self._find_device(address)
-        if not device:
-            logging.error('Device not found')
-            return False
-
-        device_path = device.object_path
-        logging.info('Device %s is found.', device.object_path)
-
-        self._setup_pairing_agent(pin)
-        mainloop = gobject.MainLoop()
-
-        try:
-            if not self._is_paired(device):
-                logging.info('Device is not paired. Pair and Connect.')
-                device.Pair(reply_handler=pair_reply,
-                            error_handler=pair_error,
-                            timeout=timeout * 1000)
-                mainloop.run()
-            elif not self._is_connected(device):
-                logging.info('Device is already paired. Connect.')
-                device.Connect(reply_handler=connect_reply,
-                               error_handler=connect_error,
-                               timeout=timeout * 1000)
-                mainloop.run()
-        except Exception as e:
-            logging.error('Exception %s in pair_legacy_device', e)
-            return False
-
-        return self._is_paired(device) and self._is_connected(device)
-
-    @xmlrpc_server.dbus_safe(False)
-    def remove_device_object(self, address):
-        """Removes a device object and the pairing information.
-
-        Calls RemoveDevice method to remove remote device
-        object and the pairing information.
-
-        @param address: Address of the device to unpair.
-
-        @returns: True on success. False otherwise.
-
-        """
-        device = self._find_device(address)
-        if not device:
-            logging.error('Device not found')
-            return False
-        self._adapter.RemoveDevice(device.object_path,
-                                   dbus_interface=self.BLUEZ_ADAPTER_IFACE)
-        return True
-
-    @xmlrpc_server.dbus_safe(False)
-    def connect_device(self, address):
-        """Connects a device.
-
-        Connects a device if it is not connected.
-
-        @param address: Address of the device to connect.
-
-        @returns: True on success. False otherwise.
-
-        """
-        device = self._find_device(address)
-        if not device:
-            logging.error('Device not found')
-            return False
-        if self._is_connected(device):
-            logging.info('Device is already connected')
-            return True
-        device.Connect()
-        return self._is_connected(device)
-
-    @xmlrpc_server.dbus_safe(False)
-    def device_is_connected(self, address):
-        """Checks if a device is connected.
-
-        @param address: Address of the device to connect.
-
-        @returns: True if device is connected. False otherwise.
-
-        """
-        device = self._find_device(address)
-        if not device:
-            logging.error('Device not found')
-            return False
-        return self._is_connected(device)
-
-    @xmlrpc_server.dbus_safe(False)
-    def disconnect_device(self, address):
-        """Disconnects a device.
-
-        Disconnects a device if it is connected.
-
-        @param address: Address of the device to disconnect.
-
-        @returns: True on success. False otherwise.
-
-        """
-        device = self._find_device(address)
-        if not device:
-            logging.error('Device not found')
-            return False
-        if not self._is_connected(device):
-            logging.info('Device is not connected')
-            return True
-        device.Disconnect()
-        return not self._is_connected(device)
-
-    @xmlrpc_server.dbus_safe(False)
-    def _device_services_resolved(self, device):
-        """Checks if services are resolved.
-
-        @param device: An 'org.bluez.Device1' interface to the device.
-
-        @returns: True if device is connected. False otherwise.
-
-        """
-        logging.info('device for services resolved: %s', device)
-        props = dbus.Interface(device, dbus.PROPERTIES_IFACE)
-        resolved = props.Get(self.BLUEZ_DEVICE_IFACE, 'ServicesResolved')
-        logging.info('Services resolved = %r', resolved)
-        return bool(resolved)
-
-    @xmlrpc_server.dbus_safe(False)
-    def device_services_resolved(self, address):
-        """Checks if service discovery is complete on a device.
-
-        Checks whether service discovery has been completed..
-
-        @param address: Address of the remote device.
-
-        @returns: True on success. False otherwise.
-
-        """
-        device = self._find_device(address)
-        if not device:
-            logging.error('Device not found')
-            return False
-
-        if not self._is_connected(device):
-            logging.info('Device is not connected')
-            return False
-
-        return self._device_services_resolved(device)
-
-    def btmon_start(self):
-        """Start btmon monitoring."""
-        self.btmon.start()
-
-    def btmon_stop(self):
-        """Stop btmon monitoring."""
-        self.btmon.stop()
-
-    def btmon_get(self, search_str, start_str):
-        """Get btmon output contents.
-
-        @param search_str: only lines with search_str would be kept.
-        @param start_str: all lines before the occurrence of start_str would be
-                filtered.
-
-        @returns: the recorded btmon output.
-
-        """
-        return self.btmon.get_contents(search_str=search_str,
-                                       start_str=start_str)
-
-    def btmon_find(self, pattern_str):
-        """Find if a pattern string exists in btmon output.
-
-        @param pattern_str: the pattern string to find.
-
-        @returns: True on success. False otherwise.
-
-        """
-        return self.btmon.find(pattern_str)
-
-    def messages_start(self):
-        """Start messages monitoring.
-
-        @returns: True if logging started successfully, else False
-        """
-
-        try:
-            self.messages.StartRecording()
-            return True
-
-        except Exception as e:
-            logging.error('Failed to start log recording with error: %s', e)
-
-        return False
-
-    def messages_stop(self):
-        """Stop messages monitoring.
-
-        @returns: True if logs were successfully gathered since logging started,
-                else False
-        """
-        try:
-            self.messages.StopRecording()
-            return True
-
-        except Exception as e:
-            logging.error('Failed to stop log recording with error: %s', e)
-
-        return False
-
-    def messages_find(self, pattern_str):
-        """Find if a pattern string exists in messages output.
-
-        @param pattern_str: the pattern string to find.
-
-        @returns: True on success. False otherwise.
-
-        """
-        return self.messages.LogContains(pattern_str)
-
-    @xmlrpc_server.dbus_safe(False)
-    def dbus_async_method(self, dbus_method, reply_handler, error_handler,
-                          *args):
-        """Run an async dbus method.
-
-        @param dbus_method: the dbus async method to invoke.
-        @param reply_handler: the reply handler for the dbus method.
-        @param error_handler: the error handler for the dbus method.
-        @param *args: additional arguments for the dbus method.
-
-        @returns: an empty string '' on success;
-                  None if there is no _advertising interface manager; and
-                  an error string if the dbus method fails or exception occurs
-
-        """
-
-        def successful_cb():
-            """Called when the dbus_method completed successfully."""
-            reply_handler()
-            self.dbus_cb_msg = ''
-            self._dbus_mainloop.quit()
-
-        def error_cb(error):
-            """Called when the dbus_method failed."""
-            error_handler(error)
-            self.dbus_cb_msg = str(error)
-            self._dbus_mainloop.quit()
-
-        # Call dbus_method with handlers.
-        try:
-            dbus_method(*args,
-                        reply_handler=successful_cb,
-                        error_handler=error_cb)
-        except Exception as e:
-            logging.error('Exception %s in dbus_async_method ', e)
-            return str(e)
-
-        self._dbus_mainloop.run()
-
-        return self.dbus_cb_msg
-
-
-    def advmon_check_manager_interface_exist(self):
-        """Check if AdvertisementMonitorManager1 interface is available.
-
-        @returns: True if Manager interface is available, False otherwise.
-
-        """
-        objects = self._bluez.GetManagedObjects(
-                dbus_interface=self.BLUEZ_MANAGER_IFACE)
-        for _, ifaces in six.iteritems(objects):
-            if self.BLUEZ_ADV_MONITOR_MANAGER_IFACE in ifaces:
-                return True
-
-        return False
-
-
-    def advmon_read_supported_types(self):
-        """Read the Advertisement Monitor supported monitor types.
-
-        Reads the value of 'SupportedMonitorTypes' property of the
-        AdvertisementMonitorManager1 interface on the adapter.
-
-        @returns: the list of the supported monitor types.
-
-        """
-        types = self._adapter.Get(self.BLUEZ_ADV_MONITOR_MANAGER_IFACE,
-                                  'SupportedMonitorTypes',
-                                  dbus_interface=self.DBUS_PROP_IFACE)
-        return dbus_util.dbus2primitive(types)
-
-    def advmon_read_supported_features(self):
-        """Read the Advertisement Monitor supported features.
-
-        Reads the value of 'SupportedFeatures' property of the
-        AdvertisementMonitorManager1 interface on the adapter.
-
-        @returns: the list of the supported features.
-
-        """
-        features = self._adapter.Get(self.BLUEZ_ADV_MONITOR_MANAGER_IFACE,
-                                     'SupportedFeatures',
-                                     dbus_interface=self.DBUS_PROP_IFACE)
-        return dbus_util.dbus2primitive(features)
-
-    def advmon_create_app(self):
-        """Create an advertisement monitor app.
-
-        @returns: app id, once the app is created.
-
-        """
-        return self.advmon_appmgr.create_app()
-
-    def advmon_exit_app(self, app_id):
-        """Exit an advertisement monitor app.
-
-        @param app_id: the app id.
-
-        @returns: True on success, False otherwise.
-
-        """
-        return self.advmon_appmgr.exit_app(app_id)
-
-    def advmon_kill_app(self, app_id):
-        """Kill an advertisement monitor app by sending SIGKILL.
-
-        @param app_id: the app id.
-
-        @returns: True on success, False otherwise.
-
-        """
-        return self.advmon_appmgr.kill_app(app_id)
-
-    def advmon_register_app(self, app_id):
-        """Register an advertisement monitor app.
-
-        @param app_id: the app id.
-
-        @returns: True on success, False otherwise.
-
-        """
-        return self.advmon_appmgr.register_app(app_id)
-
-    def advmon_unregister_app(self, app_id):
-        """Unregister an advertisement monitor app.
-
-        @param app_id: the app id.
-
-        @returns: True on success, False otherwise.
-
-        """
-        return self.advmon_appmgr.unregister_app(app_id)
-
-    def advmon_add_monitor(self, app_id, monitor_data):
-        """Create an Advertisement Monitor object.
-
-        @param app_id: the app id.
-        @param monitor_data: the list containing monitor type, RSSI filter
-                             values and patterns.
-
-        @returns: monitor id, once the monitor is created, None otherwise.
-
-        """
-        return self.advmon_appmgr.add_monitor(app_id, monitor_data)
-
-    def advmon_remove_monitor(self, app_id, monitor_id):
-        """Remove the Advertisement Monitor object.
-
-        @param app_id: the app id.
-        @param monitor_id: the monitor id.
-
-        @returns: True on success, False otherwise.
-
-        """
-        return self.advmon_appmgr.remove_monitor(app_id, monitor_id)
-
-    def advmon_get_event_count(self, app_id, monitor_id, event):
-        """Read the count of a particular event on the given monitor.
-
-        @param app_id: the app id.
-        @param monitor_id: the monitor id.
-        @param event: name of the specific event or 'All' for all events.
-
-        @returns: count of the specific event or dict of counts of all events.
-
-        """
-        return self.advmon_appmgr.get_event_count(app_id, monitor_id, event)
-
-    def advmon_reset_event_count(self, app_id, monitor_id, event):
-        """Reset the count of a particular event on the given monitor.
-
-        @param app_id: the app id.
-        @param monitor_id: the monitor id.
-        @param event: name of the specific event or 'All' for all events.
-
-        @returns: True on success, False otherwise.
-
-        """
-        return self.advmon_appmgr.reset_event_count(app_id, monitor_id, event)
-
-    def advmon_interleave_scan_logger_start(self):
-        """ Start interleave logger recording
-        """
-        self.advmon_interleave_logger.StartRecording()
-
-    def advmon_interleave_scan_logger_stop(self):
-        """ Stop interleave logger recording
-
-        @returns: True if logs were successfully collected,
-                  False otherwise.
-
-        """
-        return self.advmon_interleave_logger.StopRecording()
-
-    def advmon_interleave_scan_logger_get_records(self):
-        """ Get records in previous log collections
-
-        @returns: a list of records, where each item is a record of
-                  interleave |state| and the |time| the state starts.
-                  |state| could be {'no filter', 'allowlist'}
-                  |time| is system time in sec
-
-        """
-        return self.advmon_interleave_logger.records
-
-    def advmon_interleave_scan_logger_get_cancel_events(self):
-        """ Get cancel events in previous log collections
-
-        @returns: a list of cancel |time| when a interleave cancel event log
-                  was found.
-                  |time| is system time in sec
-
-        """
-        return self.advmon_interleave_logger.cancel_events
-
-    def register_advertisement(self, advertisement_data):
-        """Register an advertisement.
-
-        Note that rpc supports only conformable types. Hence, a
-        dict about the advertisement is passed as a parameter such
-        that the advertisement object could be constructed on the host.
-
-        @param advertisement_data: a dict of the advertisement to register.
-
-        @returns: True on success. False otherwise.
-
-        """
-        adv = advertisement.Advertisement(self._system_bus, advertisement_data)
-        self.advertisements.append(adv)
-        return self.dbus_async_method(
-                self._advertising.RegisterAdvertisement,
-                # reply handler
-                lambda: logging.info('register_advertisement: succeeded.'),
-                # error handler
-                lambda error: logging.error(
-                        'register_advertisement: failed: %s', str(error)),
-                # other arguments
-                adv.get_path(),
-                dbus.Dictionary({}, signature='sv'))
-
-    def unregister_advertisement(self, advertisement_data):
-        """Unregister an advertisement.
-
-        Note that to unregister an advertisement, it is required to use
-        the same self._advertising interface manager. This is because
-        bluez only allows the same sender to invoke UnregisterAdvertisement
-        method. Hence, watch out that the bluetoothd is not restarted or
-        self.start_bluetoothd() is not executed between the time span that
-        an advertisement is registered and unregistered.
-
-        @param advertisement_data: a dict of the advertisements to unregister.
-
-        @returns: True on success. False otherwise.
-
-        """
-        path = advertisement_data.get('Path')
-        for index, adv in enumerate(self.advertisements):
-            if adv.get_path() == path:
-                break
-        else:
-            logging.error('Fail to find the advertisement under the path: %s',
-                          path)
-            return False
-
-        result = self.dbus_async_method(
-                self._advertising.UnregisterAdvertisement,
-                # reply handler
-                lambda: logging.info('unregister_advertisement: succeeded.'),
-                # error handler
-                lambda error: logging.error(
-                        'unregister_advertisement: failed: %s', str(error)),
-                # other arguments
-                adv.get_path())
-
-        # Call remove_from_connection() so that the same path could be reused.
-        adv.remove_from_connection()
-        del self.advertisements[index]
-
-        return result
-
-    def set_advertising_intervals(self, min_adv_interval_ms,
-                                  max_adv_interval_ms):
-        """Set advertising intervals.
-
-        @param min_adv_interval_ms: the min advertising interval in ms.
-        @param max_adv_interval_ms: the max advertising interval in ms.
-
-        @returns: True on success. False otherwise.
-
-        """
-        return self.dbus_async_method(
-                self._advertising.SetAdvertisingIntervals,
-                # reply handler
-                lambda: logging.info('set_advertising_intervals: succeeded.'),
-                # error handler
-                lambda error: logging.error(
-                        'set_advertising_intervals: failed: %s', str(error)),
-                # other arguments
-                dbus.UInt16(min_adv_interval_ms),
-                dbus.UInt16(max_adv_interval_ms))
-
-    def reset_advertising(self):
-        """Reset advertising.
-
-        This includes un-registering all advertisements, reset advertising
-        intervals, and disable advertising.
-
-        @returns: True on success. False otherwise.
-
-        """
-        # It is required to execute remove_from_connection() to unregister the
-        # object-path handler of each advertisement. In this way, we could
-        # register an advertisement with the same path repeatedly.
-        for adv in self.advertisements:
-            adv.remove_from_connection()
-        del self.advertisements[:]
-
-        return self.dbus_async_method(
-                self._advertising.ResetAdvertising,
-                # reply handler
-                lambda: logging.info('reset_advertising: succeeded.'),
-                # error handler
-                lambda error: logging.error('reset_advertising: failed: %s',
-                                            str(error)))
-
-    def create_audio_record_directory(self, audio_record_dir):
-        """Create the audio recording directory.
-
-        @param audio_record_dir: the audio recording directory
-
-        @returns: True on success. False otherwise.
-        """
-        try:
-            if not os.path.exists(audio_record_dir):
-                os.makedirs(audio_record_dir)
-            return True
-        except Exception as e:
-            logging.error('Failed to create %s on the DUT: %s',
-                          audio_record_dir, e)
-            return False
-
-    def start_capturing_audio_subprocess(self, audio_data, recording_device):
-        """Start capturing audio in a subprocess.
-
-        @param audio_data: the audio test data
-        @param recording_device: which device recorded the audio,
-                possible values are 'recorded_by_dut' or 'recorded_by_peer'
-
-        @returns: True on success. False otherwise.
-        """
-        audio_data = json.loads(audio_data)
-        return self._cras_test_client.start_capturing_subprocess(
-                audio_data[recording_device],
-                sample_format=audio_data['format'],
-                channels=audio_data['channels'],
-                rate=audio_data['rate'],
-                duration=audio_data['duration'])
-
-    def stop_capturing_audio_subprocess(self):
-        """Stop capturing audio.
-
-        @returns: True on success. False otherwise.
-        """
-        return self._cras_test_client.stop_capturing_subprocess()
-
-    def _generate_playback_file(self, audio_data):
-        """Generate the playback file if it does not exist yet.
-
-        Some audio test files may be large. Generate them on the fly
-        to save the storage of the source tree.
-
-        @param audio_data: the audio test data
-        """
-        if not os.path.exists(audio_data['file']):
-            data_format = dict(file_type='raw',
-                               sample_format='S16_LE',
-                               channel=audio_data['channels'],
-                               rate=audio_data['rate'])
-
-            # Make the audio file a bit longer to handle any delay
-            # issue in capturing.
-            duration = audio_data['duration'] + 3
-            audio_test_data_module.GenerateAudioTestData(
-                    data_format=data_format,
-                    path=audio_data['file'],
-                    duration_secs=duration,
-                    frequencies=audio_data['frequencies'])
-            logging.debug("Raw file generated: %s", audio_data['file'])
-
-    def start_playing_audio_subprocess(self, audio_data):
-        """Start playing audio in a subprocess.
-
-        @param audio_data: the audio test data
-
-        @returns: True on success. False otherwise.
-        """
-        audio_data = json.loads(audio_data)
-        self._generate_playback_file(audio_data)
-        try:
-            return self._cras_test_client.start_playing_subprocess(
-                    audio_data['file'],
-                    channels=audio_data['channels'],
-                    rate=audio_data['rate'],
-                    duration=audio_data['duration'])
-        except Exception as e:
-            logging.error("start_playing_subprocess() failed: %s", str(e))
-            return False
-
-    def stop_playing_audio_subprocess(self):
-        """Stop playing audio in the subprocess.
-
-        @returns: True on success. False otherwise.
-        """
-        return self._cras_test_client.stop_playing_subprocess()
-
-    def play_audio(self, audio_data):
-        """Play audio.
-
-        It blocks until it has completed playing back the audio.
-
-        @param audio_data: the audio test data
-
-        @returns: True on success. False otherwise.
-        """
-        audio_data = json.loads(audio_data)
-        self._generate_playback_file(audio_data)
-        return self._cras_test_client.play(audio_data['file'],
-                                           channels=audio_data['channels'],
-                                           rate=audio_data['rate'],
-                                           duration=audio_data['duration'])
-
-    def check_audio_frames_legitimacy(self, audio_test_data, recording_device,
-                                      recorded_file):
-        """Get the number of frames in the recorded audio file.
-
-        @param audio_test_data: the audio test data
-        @param recording_device: which device recorded the audio,
-                possible values are 'recorded_by_dut' or 'recorded_by_peer'
-        @param recorded_file: the recorded file name
-
-        @returns: True if audio frames are legitimate.
-        """
-        if bool(recorded_file):
-            recorded_filename = recorded_file
-        else:
-            audio_test_data = json.loads(audio_test_data)
-            recorded_filename = audio_test_data[recording_device]
-
-        if recorded_filename.endswith('.raw'):
-            # Make sure that the recorded file does not contain all zeros.
-            filesize = os.path.getsize(recorded_filename)
-            cmd_str = 'cmp -s -n %d %s /dev/zero' % (filesize,
-                                                     recorded_filename)
-            try:
-                result = subprocess.call(cmd_str.split())
-                return result != 0
-            except Exception as e:
-                logging.error("Failed: %s (%s)", cmd_str, str(e))
-                return False
-        else:
-            # The recorded wav file should not be empty.
-            wav_file = check_quality.WaveFile(recorded_filename)
-            return wav_file.get_number_frames() > 0
-
-    def convert_audio_sample_rate(self, input_file, out_file, test_data,
-                                  new_rate):
-        """Convert audio file to new sample rate.
-
-        @param input_file: Path to file to upsample.
-        @param out_file: Path to create upsampled file.
-        @param test_data: Dictionary with information about file.
-        @param new_rate: New rate to upsample file to.
-
-        @returns: True if upsampling succeeded, False otherwise.
-        """
-        test_data = json.loads(test_data)
-        logging.debug('Resampling file {} to new rate {}'.format(
-                input_file, new_rate))
-
-        convert_format(input_file,
-                       test_data['channels'],
-                       test_data['bit_width'],
-                       test_data['rate'],
-                       out_file,
-                       test_data['channels'],
-                       test_data['bit_width'],
-                       new_rate,
-                       1.0,
-                       use_src_header=True,
-                       use_dst_header=True)
-
-        return os.path.isfile(out_file)
-
-    def trim_wav_file(self,
-                      in_file,
-                      out_file,
-                      new_duration,
-                      test_data,
-                      tolerance=0.1):
-        """Trim long file to desired length.
-
-        Trims audio file to length by cutting out silence from beginning and
-        end.
-
-        @param in_file: Path to audio file to be trimmed.
-        @param out_file: Path to trimmed audio file to create.
-        @param new_duration: A float representing the desired duration of
-                the resulting trimmed file.
-        @param test_data: Dictionary containing information about the test file.
-        @param tolerance: (optional) A float representing the allowable
-                difference between trimmed file length and desired duration
-
-        @returns: True if file was trimmed successfully, False otherwise.
-        """
-        test_data = json.loads(test_data)
-        trim_silence_from_wav_file(in_file, out_file, new_duration)
-        measured_length = get_file_length(out_file, test_data['channels'],
-                                          test_data['bit_width'],
-                                          test_data['rate'])
-        return abs(measured_length - new_duration) <= tolerance
-
-    def unzip_audio_test_data(self, tar_path, data_dir):
-        """Unzip audio test data files.
-
-        @param tar_path: Path to audio test data tarball on DUT.
-        @oaram data_dir: Path to directory where to extract test data directory.
-
-        @returns: True if audio test data folder exists, False otherwise.
-        """
-        logging.debug('Downloading audio test data on DUT')
-        # creates path to dir to extract test data to by taking name of the
-        # tarball without the extension eg. <dir>/file.ext to data_dir/file/
-        audio_test_dir = os.path.join(
-                data_dir,
-                os.path.split(tar_path)[1].split('.', 1)[0])
-
-        unzip_cmd = 'tar -xf {0} -C {1}'.format(tar_path, data_dir)
-
-        unzip_proc = subprocess.Popen(unzip_cmd.split(),
-                                      stdout=subprocess.PIPE,
-                                      stderr=subprocess.PIPE)
-        _, stderr = unzip_proc.communicate()
-
-        if stderr:
-            logging.error('Error occurred in unzipping audio data: {}'.format(
-                    str(stderr)))
-            return False
-
-        return unzip_proc.returncode == 0 and os.path.isdir(audio_test_dir)
-
-    def convert_raw_to_wav(self, input_file, output_file, test_data):
-        """Convert raw audio file to wav file.
-
-        @oaram input_file: the location of the raw file
-        @param output_file: the location to place the resulting wav file
-        @param test_data: the data for the file being converted
-
-        @returns: True if conversion was successful otherwise false
-        """
-        test_data = json.loads(test_data)
-        convert_raw_file(input_file, test_data['channels'],
-                         test_data['bit_width'], test_data['rate'],
-                         output_file)
-
-        return os.path.isfile(output_file)
-
-    def get_primary_frequencies(self, audio_test_data, recording_device,
-                                recorded_file):
-        """Get primary frequencies of the audio test file.
-
-        @param audio_test_data: the audio test data
-        @param recording_device: which device recorded the audio,
-                possible values are 'recorded_by_dut' or 'recorded_by_peer'
-        @param recorded_file: the recorded file name
-
-        @returns: a list of primary frequencies of channels in the audio file
-        """
-        audio_test_data = json.loads(audio_test_data)
-
-        if bool(recorded_file):
-            recorded_filename = recorded_file
-        else:
-            recorded_filename = audio_test_data[recording_device]
-
-        args = CheckQualityArgsClass(filename=recorded_filename,
-                                     rate=audio_test_data['rate'],
-                                     channel=audio_test_data['channels'],
-                                     bit_width=16)
-        raw_data, rate = check_quality.read_audio_file(args)
-        checker = check_quality.QualityChecker(raw_data, rate)
-        # The highest frequency recorded would be near 24 Khz
-        # as the max sample rate is 48000 in our tests.
-        # So let's set ignore_high_freq to be 48000.
-        checker.do_spectral_analysis(ignore_high_freq=48000,
-                                     check_quality=False,
-                                     quality_params=None)
-        spectra = checker._spectrals
-        primary_freq = [
-                float(spectra[i][0][0]) if spectra[i] else 0
-                for i in range(len(spectra))
-        ]
-        primary_freq.sort()
-        return primary_freq
-
-    def enable_wbs(self, value):
-        """Enable or disable wideband speech (wbs) per the value.
-
-        @param value: True to enable wbs.
-
-        @returns: True if the operation succeeds.
-        """
-        return self._cras_test_client.enable_wbs(value)
-
-    def set_player_playback_status(self, status):
-        """Set playback status for the registered media player.
-
-        @param status: playback status in string.
-
-        """
-        return self._cras_test_client.set_player_playback_status(status)
-
-    def set_player_position(self, position):
-        """Set media position for the registered media player.
-
-        @param position: position in micro seconds.
-
-        """
-        return self._cras_test_client.set_player_position(position)
-
-    def set_player_metadata(self, metadata):
-        """Set metadata for the registered media player.
-
-        @param metadata: dictionary of media metadata.
-
-        """
-        return self._cras_test_client.set_player_metadata(metadata)
-
-    def set_player_length(self, length):
-        """Set media length for the registered media player.
-
-        Media length is a part of metadata information. However, without
-        specify its type to int64. dbus-python will guess the variant type to
-        be int32 by default. Separate it from the metadata function to help
-        prepare the data differently.
-
-        @param length: length in micro seconds.
-
-        """
-        length_variant = dbus.types.Int64(length, variant_level=1)
-        length_dict = dbus.Dictionary({'length': length_variant},
-                                      signature='sv')
-        return self._cras_test_client.set_player_length(length_dict)
-
-    def select_input_device(self, device_name):
-        """Select the audio input device.
-
-        @param device_name: the name of the Bluetooth peer device
-
-        @returns: True if the operation succeeds.
-        """
-        return self._cras_test_client.select_input_device(device_name)
-
-    @xmlrpc_server.dbus_safe(None)
-    def select_output_node(self, node_type):
-        """Select the audio output node.
-
-        @param node_type: the node type of the Bluetooth peer device
-
-        @returns: True if the operation succeeds.
-        """
-        return cras_utils.set_single_selected_output_node(node_type)
-
-    @xmlrpc_server.dbus_safe(None)
-    def get_selected_output_device_type(self):
-        """Get the selected audio output node type.
-
-        @returns: the node type of the selected output device.
-        """
-        # Note: should convert the dbus.String to the regular string.
-        return str(cras_utils.get_selected_output_device_type())
-
-    def get_gatt_attributes_map(self, address):
-        """Return a JSON formatted string of the GATT attributes of a device,
-        keyed by UUID
-        @param address: a string of the MAC address of the device
-
-        @return: JSON formated string, stored the nested structure of the
-        attributes. Each attribute has 'path' and
-        ['characteristics' | 'descriptors'], which store their object path and
-        children respectively.
-
-        """
-        attribute_map = dict()
-
-        device_object_path = self._get_device_path(address)
-        objects = self._bluez.GetManagedObjects(
-                dbus_interface=self.BLUEZ_MANAGER_IFACE, byte_arrays=False)
-        service_map = self._get_service_map(device_object_path, objects)
-
-        servs = dict()
-        attribute_map['services'] = servs
-
-        for uuid, path in service_map.items():
-
-            servs[uuid] = dict()
-            serv = servs[uuid]
-
-            serv['path'] = path
-            serv['characteristics'] = dict()
-            chrcs = serv['characteristics']
-
-            chrcs_map = self._get_characteristic_map(path, objects)
-            for uuid, path in chrcs_map.items():
-                chrcs[uuid] = dict()
-                chrc = chrcs[uuid]
-
-                chrc['path'] = path
-                chrc['descriptors'] = dict()
-                descs = chrc['descriptors']
-
-                descs_map = self._get_descriptor_map(path, objects)
-
-                for uuid, path in descs_map.items():
-                    descs[uuid] = dict()
-                    desc = descs[uuid]
-
-                    desc['path'] = path
-
-        return json.dumps(attribute_map)
-
-    def _get_gatt_interface(self, uuid, object_path, interface):
-        """Get dbus interface by uuid
-        @param uuid: a string of uuid
-        @param object_path: a string of the object path of the service
-
-        @return: a dbus interface
-        """
-
-        return dbus.Interface(
-                self._system_bus.get_object(self.BLUEZ_SERVICE_NAME,
-                                            object_path), interface)
-
-    def get_gatt_service_property(self, object_path, property_name):
-        """Get property from a service attribute
-        @param object_path: a string of the object path of the service
-        @param property_name: a string of a property, ex: 'Value', 'UUID'
-
-        @return: the property if success,
-                 none otherwise
-
-        """
-        return self.get_gatt_attribute_property(object_path,
-                                                self.BLUEZ_GATT_SERV_IFACE,
-                                                property_name)
-
-    def get_gatt_characteristic_property(self, object_path, property_name):
-        """Get property from a characteristic attribute
-        @param object_path: a string of the object path of the characteristic
-        @param property_name: a string of a property, ex: 'Value', 'UUID'
-
-        @return: the property if success,
-                 none otherwise
-
-        """
-        return self.get_gatt_attribute_property(object_path,
-                                                self.BLUEZ_GATT_CHAR_IFACE,
-                                                property_name)
-
-    def get_gatt_descriptor_property(self, object_path, property_name):
-        """Get property from descriptor attribute
-        @param object_path: a string of the object path of the descriptor
-        @param property_name: a string of a property, ex: 'Value', 'UUID'
-
-        @return: the property if success,
-                 none otherwise
-
-        """
-        return self.get_gatt_attribute_property(object_path,
-                                                self.BLUEZ_GATT_DESC_IFACE,
-                                                property_name)
-
-    @xmlrpc_server.dbus_safe(None)
-    def get_gatt_attribute_property(self, object_path, interface,
-                                    property_name):
-        """Get property from attribute
-        @param object_path: a string of the bject path
-        @param property_name: a string of a property, ex: 'Value', 'UUID'
-
-        @return: the property if success,
-                 none otherwise
-
-        """
-        gatt_object = self._system_bus.get_object(self.BLUEZ_SERVICE_NAME,
-                                                  object_path)
-        prop = self._get_dbus_object_property(gatt_object, interface,
-                                              property_name)
-        logging.info(prop)
-        if isinstance(prop, dbus.ByteArray):
-            return _dbus_byte_array_to_b64_string(prop)
-        if isinstance(prop, dbus.Boolean):
-            return bool(prop)
-        if isinstance(prop, dbus.String):
-            return str(prop)
-        if isinstance(prop, dbus.ObjectPath):
-            return str(prop)
-        if isinstance(prop, dbus.Array):
-            return list(map(str, prop))
-        return prop
-
-    @xmlrpc_server.dbus_safe(None)
-    def gatt_characteristic_read_value(self, uuid, object_path):
-        """Perform method ReadValue on a characteristic attribute
-        @param uuid: a string of uuid
-        @param object_path: a string of the object path of the characteristic
-
-        @return: base64 string of dbus bytearray
-        """
-
-        dbus_interface = self._get_gatt_interface(uuid, object_path,
-                                                  self.BLUEZ_GATT_CHAR_IFACE)
-        value = dbus_interface.ReadValue(dbus.Dictionary({}, signature='sv'))
-        return _dbus_byte_array_to_b64_string(value)
-
-    @xmlrpc_server.dbus_safe(None)
-    def gatt_descriptor_read_value(self, uuid, object_path):
-        """Perform method ReadValue on a descriptor attribute
-        @param uuid: a string of uuid
-        @param object_path: a string of the object path of the descriptor
-
-        @return: base64 string of dbus bytearray
-        """
-
-        dbus_interface = self._get_gatt_interface(uuid, object_path,
-                                                  self.BLUEZ_GATT_DESC_IFACE)
-        value = dbus_interface.ReadValue(dbus.Dictionary({}, signature='sv'))
-        return _dbus_byte_array_to_b64_string(value)
-
-    @xmlrpc_server.dbus_safe(False)
-    def _get_attribute_map(self, object_path, dbus_interface, objects):
-        """Gets a map of object paths under an object path.
-
-        Walks the object tree, and returns a map of UUIDs to object paths for
-        all resolved gatt object.
-
-        @param object_path: The object path of the attribute to retrieve
-            gatt  UUIDs and paths from.
-        @param objects: The managed objects.
-
-        @returns: A dictionary of object paths, keyed by UUID.
-
-        """
-        attr_map = {}
-
-        if object_path:
-            for path, ifaces in six.iteritems(objects):
-                if (dbus_interface in ifaces and path.startswith(object_path)):
-                    uuid = ifaces[dbus_interface]['UUID'].lower()
-                    attr_map[uuid] = path
-
-        else:
-            logging.warning('object_path %s is not valid', object_path)
-
-        return attr_map
-
-    def _get_service_map(self, device_path, objects):
-        """Gets a map of service paths for a device.
-
-        @param device_path: the object path of the device.
-        @param objects: The managed objects.
-        """
-        return self._get_attribute_map(device_path, self.BLUEZ_GATT_SERV_IFACE,
-                                       objects)
-
-    def _get_characteristic_map(self, serv_path, objects):
-        """Gets a map of characteristic paths for a service.
-
-        @param serv_path: the object path of the service.
-        @param objects: The managed objects.
-        """
-        return self._get_attribute_map(serv_path, self.BLUEZ_GATT_CHAR_IFACE,
-                                       objects)
-
-    def _get_descriptor_map(self, chrc_path, objects):
-        """Gets a map of descriptor paths for a characteristic.
-
-        @param chrc_path: the object path of the characteristic.
-        @param objects: The managed objects.
-        """
-        return self._get_attribute_map(chrc_path, self.BLUEZ_GATT_DESC_IFACE,
-                                       objects)
-
-    @xmlrpc_server.dbus_safe(None)
-    def _get_dbus_object_property(self, dbus_object, dbus_interface,
-                                  dbus_property):
-        """Get the property in an object.
-
-        @param dbus_object: a dbus object
-        @param dbus_property: a dbus property of the dbus object, as a string
-
-        @return: dbus type object if it success, e.g. dbus.Boolean, dbus.String,
-                 none otherwise
-
-        """
-        return dbus_object.Get(dbus_interface,
-                               dbus_property,
-                               dbus_interface=dbus.PROPERTIES_IFACE)
-
-    @xmlrpc_server.dbus_safe(False)
-    def get_characteristic_map(self, address):
-        """Gets a map of characteristic paths for a device.
-
-        Walks the object tree, and returns a map of uuids to object paths for
-        all resolved gatt characteristics.
-
-        @param address: The MAC address of the device to retrieve
-            gatt characteristic uuids and paths from.
-
-        @returns: A dictionary of characteristic paths, keyed by uuid.
-
-        """
-        device_path = self._get_device_path(address)
-        char_map = {}
-
-        if device_path:
-            objects = self._bluez.GetManagedObjects(
-                    dbus_interface=self.BLUEZ_MANAGER_IFACE, byte_arrays=False)
-
-            for path, ifaces in six.iteritems(objects):
-                if (self.BLUEZ_GATT_CHAR_IFACE in ifaces
-                            and path.startswith(device_path)):
-                    uuid = ifaces[self.BLUEZ_GATT_CHAR_IFACE]['UUID'].lower()
-                    char_map[uuid] = path
-        else:
-            logging.warning('Device %s not in object tree.', address)
-
-        return char_map
-
-    @xmlrpc_server.dbus_safe(None)
-    def _get_char_object(self, uuid, address):
-        """Gets a characteristic object.
-
-        Gets a characteristic object for a given UUID and address.
-
-        @param uuid: The UUID of the characteristic, as a string.
-        @param address: The MAC address of the remote device.
-
-        @returns: A dbus interface for the characteristic if the uuid/address
-                      is in the object tree.
-                  None if the address/uuid is not found in the object tree.
-
-        """
-        path = self.get_characteristic_map(address).get(uuid)
-        if not path:
-            logging.error("path not found: %s %s", uuid, address)
-            return None
-        return dbus.Interface(
-                self._system_bus.get_object(self.BLUEZ_SERVICE_NAME, path),
-                self.BLUEZ_GATT_CHAR_IFACE)
-
-    @xmlrpc_server.dbus_safe(None)
-    def read_characteristic(self, uuid, address):
-        """Reads the value of a gatt characteristic.
-
-        Reads the current value of a gatt characteristic. Base64 endcoding is
-        used for compatibility with the XML RPC interface.
-
-        @param uuid: The uuid of the characteristic to read, as a string.
-        @param address: The MAC address of the remote device.
-
-        @returns: A b64 encoded version of a byte array containing the value
-                      if the uuid/address is in the object tree.
-                  None if the uuid/address was not found in the object tree, or
-                      if a DBus exception was raised by the read operation.
-
-        """
-        char_obj = self._get_char_object(uuid, address)
-        if char_obj is None:
-            return None
-        value = char_obj.ReadValue(dbus.Dictionary({}, signature='sv'))
-        return _dbus_byte_array_to_b64_string(value)
-
-    @xmlrpc_server.dbus_safe(None)
-    def write_characteristic(self, uuid, address, value):
-        """Performs a write operation on a gatt characteristic.
-
-        Writes to a GATT characteristic on a remote device. Base64 endcoding is
-        used for compatibility with the XML RPC interface.
-
-        @param uuid: The uuid of the characteristic to write to, as a string.
-        @param address: The MAC address of the remote device, as a string.
-        @param value: A byte array containing the data to write.
-
-        @returns: True if the write operation does not raise an exception.
-                  None if the uuid/address was not found in the object tree, or
-                      if a DBus exception was raised by the write operation.
-
-        """
-        char_obj = self._get_char_object(uuid, address)
-        if char_obj is None:
-            return None
-        dbus_value = _b64_string_to_dbus_byte_array(value)
-        char_obj.WriteValue(dbus_value, dbus.Dictionary({}, signature='sv'))
-        return True
-
-    @xmlrpc_server.dbus_safe(None)
-    def exchange_messages(self, tx_object_path, rx_object_path, value):
-        """Performs a write operation on a gatt characteristic and wait for
-        the response on another characteristic.
-
-        @param tx_object_path: the object path of the characteristic to write.
-        @param rx_object_path: the object path of the characteristic ti read.
-        @param value: A byte array containing the data to write.
-
-        @returns: The value of the characteristic to read from.
-                  None if the uuid/address was not found in the object tree, or
-                      if a DBus exception was raised by the write operation.
-
-        """
-        tx_obj = self._get_gatt_characteristic_object(tx_object_path)
-
-        if tx_obj is None:
-            return None
-
-        self._chrc_property = None
-
-        self._signal_watch = self._system_bus.add_signal_receiver(
-                self._property_changed,
-                signal_name='PropertiesChanged',
-                path=rx_object_path)
-
-        self._timeout_id = gobject.timeout_add(
-                self.PROPERTY_UPDATE_TIMEOUT_MILLI_SECS,
-                self._property_wait_timeout)
-
-        write_value = _b64_string_to_dbus_byte_array(value)
-        tx_obj.WriteValue(write_value, dbus.Dictionary({}, signature='sv'))
-
-        self._dbus_mainloop.run()
-
-        return _dbus_byte_array_to_b64_string(self._chrc_property)
-
-    def _property_changed(self, *args, **kwargs):
-        """Handler for properties changed signal."""
-        gobject.source_remove(self._timeout_id)
-        self._signal_watch.remove()
-        changed_prop = args
-
-        logging.info(changed_prop)
-        prop_dict = changed_prop[1]
-        self._chrc_property = prop_dict['Value']
-        if self._dbus_mainloop.is_running():
-            self._dbus_mainloop.quit()
-
-    def _property_wait_timeout(self):
-        """Timeout handler when waiting for properties update signal."""
-        self._signal_watch.remove()
-        if self._dbus_mainloop.is_running():
-            logging.warn("quit main loop due to timeout")
-            self._dbus_mainloop.quit()
-        # Return false so that this method will not be called again.
-        return False
-
-    @xmlrpc_server.dbus_safe(False)
-    def _get_gatt_characteristic_object(self, object_path):
-        return dbus.Interface(
-                self._system_bus.get_object(self.BLUEZ_SERVICE_NAME,
-                                            object_path),
-                self.BLUEZ_GATT_CHAR_IFACE)
-
-    @xmlrpc_server.dbus_safe(False)
-    def start_notify(self, object_path, cccd_value):
-        """Starts the notification session on the gatt characteristic.
-
-        @param object_path: the object path of the characteristic.
-        @param cccd_value: Possible CCCD values include
-               0x00 - inferred from the remote characteristic's properties
-               0x01 - notification
-               0x02 - indication
-
-        @returns: True if the operation succeeds.
-                  False if the characteristic is not found, or
-                      if a DBus exception was raised by the operation.
-
-        """
-        char_obj = self._get_gatt_characteristic_object(object_path)
-        if char_obj is None:
-            logging.error("characteristic not found: %s %s", object_path)
-            return False
-
-        try:
-            char_obj.StartNotify(dbus.Byte(cccd_value))
-            return True
-        except Exception as e:
-            logging.error('start_notify: %s', e)
-        except:
-            logging.error('start_notify: unexpected error')
-        return False
-
-    @xmlrpc_server.dbus_safe(False)
-    def stop_notify(self, object_path):
-        """Stops the notification session on the gatt characteristic.
-
-        @param object_path: the object path of the characteristic.
-
-        @returns: True if the operation succeeds.
-                  False if the characteristic is not found, or
-                      if a DBus exception was raised by the operation.
-
-        """
-        char_obj = self._get_gatt_characteristic_object(object_path)
-        if char_obj is None:
-            logging.error("characteristic not found: %s %s", object_path)
-            return False
-
-        try:
-            char_obj.StopNotify()
-            return True
-        except Exception as e:
-            logging.error('stop_notify: %s', e)
-        except:
-            logging.error('stop_notify: unexpected error')
-        return False
-
-    @xmlrpc_server.dbus_safe(False)
-    def is_notifying(self, object_path):
-        """Is the GATT characteristic in a notifying session?
-
-        @param object_path: the object path of the characteristic.
-
-        @return True if it is in a notification session. False otherwise.
-
-        """
-
-        return self.get_gatt_characteristic_property(object_path, 'Notifying')
-
-    @xmlrpc_server.dbus_safe(False)
-    def is_characteristic_path_resolved(self, uuid, address):
-        """Checks whether a characteristic is in the object tree.
-
-        Checks whether a characteristic is curently found in the object tree.
-
-        @param uuid: The uuid of the characteristic to search for.
-        @param address: The MAC address of the device on which to search for
-            the characteristic.
-
-        @returns: True if the characteristic is found.
-                  False if the characteristic path is not found.
-
-        """
-        return bool(self.get_characteristic_map(address).get(uuid))
-
-    @xmlrpc_server.dbus_safe(False)
-    def get_connection_info(self, address):
-        """Get device connection info.
-
-        @param address: The MAC address of the device.
-
-        @returns: On success, a JSON-encoded tuple of:
-                      ( RSSI, transmit_power, max_transmit_power )
-                  None otherwise.
-
-        """
-        plugin_device = self._get_plugin_device_interface(address)
-        if plugin_device is None:
-            return None
-
-        try:
-            connection_info = plugin_device.GetConnInfo()
-            return json.dumps(connection_info)
-        except Exception as e:
-            logging.error('get_connection_info: %s', e)
-        except:
-            logging.error('get_connection_info: unexpected error')
-        return None
-
-    @xmlrpc_server.dbus_safe(False)
-    def set_le_connection_parameters(self, address, parameters):
-        """Set the LE connection parameters.
-
-        @param address: The MAC address of the device.
-        @param parameters: The LE connection parameters to set.
-
-        @return: True on success. False otherwise.
-
-        """
-        plugin_device = self._get_plugin_device_interface(address)
-        if plugin_device is None:
-            return False
-
-        return not self.dbus_async_method(
-                plugin_device.SetLEConnectionParameters,
-                # reply handler
-                lambda: logging.info('set_le_connection_parameters: succeeded.'
-                                     ),
-                # error handler
-                lambda error: logging.
-                error('set_le_connection_parameters: failed: %s', str(error)),
-                # other arguments
-                parameters)
-
-    @xmlrpc_server.dbus_safe(False)
-    def _get_plugin_device_interface(self, address):
-        """Get the BlueZ Chromium device plugin interface.
-
-        This interface can be used to issue dbus requests such as
-        GetConnInfo and SetLEConnectionParameters.
-
-        @param address: The MAC address of the device.
-
-        @return: On success, the BlueZ Chromium device plugin interface
-                 None otherwise.
-
-        """
-        path = self._get_device_path(address)
-        if path is None:
-            return None
-
-        return dbus.Interface(
-                self._system_bus.get_object(self.BLUEZ_SERVICE_NAME, path),
-                self.BLUEZ_PLUGIN_DEVICE_IFACE)
-
-    def _powerd_last_resume_details(self, before=5, after=0):
-        """ Look at powerd logs for last suspend/resume attempt.
-
-        Note that logs are in reverse order (chronologically). Keep that in mind
-        for the 'before' and 'after' parameters.
-
-        @param before: Number of context lines before search item to show.
-        @param after: Number of context lines after search item to show.
-
-        @return Most recent lines containing suspend resume details or ''.
-        """
-        event_file = '/var/log/power_manager/powerd.LATEST'
-
-        # Each powerd_suspend wakeup has a log "powerd_suspend returned 0",
-        # with the return code of the suspend. We search for the last
-        # occurrence in the log, and then find the collocated event_count log,
-        # indicating the wakeup cause. -B option for grep will actually grab the
-        # *next* 5 logs in time, since we are piping the powerd file backwards
-        # with tac command
-        resume_indicator = 'powerd_suspend returned'
-        cmd = 'tac {} | grep -A {} -B {} -m1 "{}"'.format(
-                event_file, after, before, resume_indicator)
-
-        try:
-            return utils.run(cmd).stdout
-        except error.CmdError:
-            logging.error('Could not locate recent suspend')
-
-        return ''
-
-    def bt_caused_last_resume(self):
-        """Checks if last resume from suspend was caused by bluetooth
-
-        @return: True if BT wake path was cause of resume, False otherwise
-        """
-
-        # When the resume cause is printed to powerd log, it omits the
-        # /power/wakeup portion of wake path
-        bt_wake_path = self._get_wake_enabled_path()
-
-        # If bluetooth does not have a valid wake path, it could not have caused
-        # the resume
-        if not bt_wake_path:
-            return False
-
-        bt_wake_path = bt_wake_path.replace('/power/wakeup', '')
-
-        last_resume_details = self._powerd_last_resume_details()
-
-        # If BT caused wake, there will be a line describing the bt wake
-        # path's event_count before and after the resume
-        for line in last_resume_details.split('\n'):
-            if 'event_count' in line:
-                logging.info('Checking wake event: {}'.format(line))
-                if bt_wake_path in line:
-                    return True
-
-        return False
-
-    def find_last_suspend_via_powerd_logs(self):
-        """ Finds the last suspend attempt via powerd logs.
-
-        Finds the last suspend attempt using powerd logs by searching backwards
-        through the logs to find the latest entries with 'powerd_suspend'. If we
-        can't find a suspend attempt, we return None.
-
-        @return: Tuple (suspend start time, suspend end time, suspend result) or
-                None if we can't find a suspend attempt
-        """
-        # Logs look like this:
-        # [1102/202036.973853:INFO:daemon.cc(704)] powerd_suspend returned 0
-        # ... stuff in between ...
-        # [1102/202025.785372:INFO:suspender.cc(574)] Starting suspend
-
-        # Date format for strptime and strftime
-        date_format = '%m%d/%H%M%S.%f'
-        date_group_re = '(?P<date>[0-9]+/[0-9]+[.][0-9]+)'
-
-        finish_suspend_re = re.compile(
-                '^\\[{date_regex}'
-                '.*daemon.*powerd_suspend returned '
-                '(?P<exitcode>[0-9]+)'.format(date_regex=date_group_re))
-        start_suspend_re = re.compile(
-                '^\\[{date_regex}.*suspender.*'
-                'Starting suspend'.format(date_regex=date_group_re))
-
-        now = datetime.now()
-        last_resume_details = self._powerd_last_resume_details(before=0,
-                                                               after=8)
-        if last_resume_details:
-            start_time, end_time, ret = None, None, None
-            try:
-                for line in last_resume_details.split('\n'):
-                    logging.debug('Last suspend search: %s', line)
-                    m = finish_suspend_re.match(line)
-                    if m:
-                        logging.debug('Found suspend end: date(%s) ret(%s)',
-                                      m.group('date'), m.group('exitcode'))
-                        end_time = datetime.strptime(
-                                m.group('date'),
-                                date_format).replace(year=now.year)
-                        ret = int(m.group('exitcode'))
-
-                    m = start_suspend_re.match(line)
-                    if m:
-                        logging.debug('Found suspend start: date(%s)',
-                                      m.group('date'))
-                        start_time = datetime.strptime(
-                                m.group('date'),
-                                date_format).replace(year=now.year)
-                        break
-
-                if all([x is not None for x in [start_time, end_time, ret]]):
-                    # Return dates in string format due to inconsistency between
-                    # python2/3 usage on host and dut
-                    return (start_time.strftime(self.OUT_DATE_FORMAT),
-                            end_time.strftime(self.OUT_DATE_FORMAT), ret)
-                else:
-                    logging.error(
-                            'Failed to parse details from last suspend. %s %s %s',
-                            str(start_time), str(end_time), str(ret))
-            except Exception as e:
-                logging.error('Failed to parse last suspend: %s', str(e))
-        else:
-            logging.error('No powerd_suspend attempt found')
-
-        return None
-
-    def do_suspend(self, seconds, expect_bt_wake):
-        """Suspend DUT using the power manager.
-
-        @param seconds: The number of seconds to suspend the device.
-        @param expect_bt_wake: Whether we expect bluetooth to wake us from
-            suspend. If true, we expect this resume will occur early
-
-        @throws: SuspendFailure on resume with unexpected timing or wake source.
-            The raised exception will be handled as a non-zero retcode over the
-            RPC, signalling for the test to fail.
-        """
-        early_wake = False
-        try:
-            sys_power.do_suspend(seconds)
-
-        except sys_power.SpuriousWakeupError:
-            logging.info('Early resume detected...')
-            early_wake = True
-
-        # Handle error conditions based on test expectations, whether resume
-        # was early, and cause of the resume
-        bt_caused_wake = self.bt_caused_last_resume()
-        logging.info('Cause for resume: {}'.format(
-                'BT' if bt_caused_wake else 'Not BT'))
-
-        if not expect_bt_wake and bt_caused_wake:
-            raise sys_power.SuspendFailure('BT woke us unexpectedly')
-
-        # TODO(b/160803597) - Uncomment when BT wake reason is correctly
-        # captured in powerd log.
-        #
-        # if expect_bt_wake and not bt_caused_wake:
-        #   raise sys_power.SuspendFailure('BT should have woken us')
-        #
-        # if bt_caused_wake and not early_wake:
-        #   raise sys_power.SuspendFailure('BT wake did not come early')
-
-        return True
-
-    def get_wlan_vid_pid(self):
-        """ Return vendor id and product id of the wlan chip on BT/WiFi module
-
-        @returns: (vid,pid) on success; (None,None) on failure
-        """
-        vid = None
-        pid = None
-        path_template = '/sys/class/net/%s/device/'
-        for dev_name in ['wlan0', 'mlan0']:
-            if os.path.exists(path_template % dev_name):
-                path_v = path_template % dev_name + 'vendor'
-                path_d = path_template % dev_name + 'device'
-                logging.debug('Paths are %s %s', path_v, path_d)
-                try:
-                    vid = open(path_v).read().strip('\n')
-                    pid = open(path_d).read().strip('\n')
-                    break
-                except Exception as e:
-                    logging.error('Exception %s while reading vid/pid', str(e))
-        logging.debug('returning vid:%s pid:%s', vid, pid)
-        return (vid, pid)
-
-    def get_bt_module_name(self):
-        """ Return bluetooth module name for non-USB devices
-
-        @returns '' on failure. On success return chipset name, if found in
-                 dict.Otherwise it returns the raw string read.
-        """
-        # map the string read from device to chipset name
-        chipset_string_dict = {'qcom,wcn3991-bt\x00': 'WCN3991'}
-
-        hci_device = '/sys/class/bluetooth/hci0'
-        real_path = os.path.realpath(hci_device)
-
-        logging.debug('real path is %s', real_path)
-        if 'usb' in real_path:
-            return ''
-
-        device_path = os.path.join(real_path, 'device', 'of_node',
-                                   'compatible')
-        try:
-            chipset_string = open(device_path).read()
-            logging.debug('read string %s from %s', chipset_string,
-                          device_path)
-        except Exception as e:
-            logging.error('Exception %s while reading from file', str(e),
-                          device_path)
-            return ''
-
-        if chipset_string in chipset_string_dict:
-            return chipset_string_dict[chipset_string]
-        else:
-            logging.debug("Chipset not known. Returning %s", chipset_string)
-            return chipset_string
-
-    def get_device_time(self):
-        """ Get the current device time. """
-        return datetime.now().strftime(self.OUT_DATE_FORMAT)
-
-    def cleanup(self):
-        """Cleanup before exiting the client xmlrpc process."""
-
-        self.advmon_appmgr.destroy()
diff --git a/client/cros/multimedia/browser_facade.py b/client/cros/multimedia/browser_facade.py
new file mode 100644
index 0000000..f448a27
--- /dev/null
+++ b/client/cros/multimedia/browser_facade.py
@@ -0,0 +1,140 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""An interface to access the local browser facade."""
+
+import logging
+
+class BrowserFacadeLocalError(Exception):
+    """Error in BrowserFacadeLocal."""
+    pass
+
+
+class BrowserFacadeLocal(object):
+    """Facade to access the browser-related functionality."""
+
+    def __init__(self, resource):
+        """Initializes the USB facade.
+
+        @param resource: A FacadeResource object.
+
+        """
+        self._resource = resource
+
+
+    def start_custom_chrome(self, kwargs):
+        """Start a custom Chrome with given arguments.
+
+        @param kwargs: A dict of keyword arguments passed to Chrome.
+        @return: True on success, False otherwise.
+
+        """
+        return self._resource.start_custom_chrome(kwargs)
+
+
+    def start_default_chrome(self, restart=False, extra_browser_args=None,
+                             disable_arc=False):
+        """Start the default Chrome.
+
+        @param restart: True to start Chrome without clearing previous state.
+        @param extra_browser_args: A list containing extra browser args passed
+                                   to Chrome in addition to default ones.
+        @param disable_arc: True to disable ARC++.
+        @return: True on success, False otherwise.
+
+        """
+        return self._resource.start_default_chrome(restart, extra_browser_args,
+                                                   disable_arc)
+
+
+    def set_http_server_directories(self, directories):
+        """Starts an HTTP server.
+
+        @param directories: Directories to start serving.
+
+        @return True on success. False otherwise.
+
+        """
+        return self._resource.set_http_server_directories(directories)
+
+
+    def http_server_url_of(self, fullpath):
+        """Converts a path to a URL.
+
+        @param fullpath: String containing the full path to the content.
+
+        @return the URL for the provided path.
+
+        """
+        return self._resource.http_server_url_of(fullpath)
+
+
+    def new_tab(self, url):
+        """Opens a new tab and loads URL.
+
+        @param url: The URL to load.
+        @return a str, the tab descriptor of the opened tab.
+
+        """
+        logging.debug('Load URL %s', url)
+        return self._resource.load_url(url)
+
+
+    def close_tab(self, tab_descriptor):
+        """Closes a previously opened tab.
+
+        @param tab_descriptor: Indicate which tab to be closed.
+
+        """
+        tab = self._resource.get_tab_by_descriptor(tab_descriptor)
+        logging.debug('Closing URL %s', tab.url)
+        self._resource.close_tab(tab_descriptor)
+
+
+    def wait_for_javascript_expression(
+            self, tab_descriptor, expression, timeout):
+        """Waits for the given JavaScript expression to be True on the
+        given tab.
+
+        @param tab_descriptor: Indicate on which tab to wait for the expression.
+        @param expression: Indiate for what expression to wait.
+        @param timeout: Indicate the timeout of the expression.
+        """
+        self._resource.wait_for_javascript_expression(
+                tab_descriptor, expression, timeout)
+
+
+    def execute_javascript(self, tab_descriptor, statement, timeout):
+        """Executes a JavaScript statement on the given tab.
+
+        @param tab_descriptor: Indicate on which tab to execute the statement.
+        @param statement: Indiate what statement to execute.
+        @param timeout: Indicate the timeout of the statement.
+        """
+        self._resource.execute_javascript(
+                tab_descriptor, statement, timeout)
+
+
+    def evaluate_javascript(self, tab_descriptor, expression, timeout):
+        """Evaluates a JavaScript expression on the given tab.
+
+        @param tab_descriptor: Indicate on which tab to evaluate the expression.
+        @param expression: Indiate what expression to evaluate.
+        @param timeout: Indicate the timeout of the expression.
+        @return the JSONized result of the given expression
+        """
+        return self._resource.evaluate_javascript(
+                tab_descriptor, expression, timeout)
+
+
+    def get_tab_urls(self):
+        """Gets urls from current Chrome tabs.
+
+        @returns: A list of str objects which contain urls from current Chrome
+        tabs.
+        """
+        logging.info("Getting tab objects from Chrome...")
+        tabs = self._resource.get_tabs()
+
+        return [tab.url for tab in tabs]
diff --git a/client/cros/multimedia/browser_facade_native.py b/client/cros/multimedia/browser_facade_native.py
deleted file mode 100644
index e67ebb6..0000000
--- a/client/cros/multimedia/browser_facade_native.py
+++ /dev/null
@@ -1,140 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""An interface to access the local browser facade."""
-
-import logging
-
-class BrowserFacadeNativeError(Exception):
-    """Error in BrowserFacadeNative."""
-    pass
-
-
-class BrowserFacadeNative(object):
-    """Facade to access the browser-related functionality."""
-
-    def __init__(self, resource):
-        """Initializes the USB facade.
-
-        @param resource: A FacadeResource object.
-
-        """
-        self._resource = resource
-
-
-    def start_custom_chrome(self, kwargs):
-        """Start a custom Chrome with given arguments.
-
-        @param kwargs: A dict of keyword arguments passed to Chrome.
-        @return: True on success, False otherwise.
-
-        """
-        return self._resource.start_custom_chrome(kwargs)
-
-
-    def start_default_chrome(self, restart=False, extra_browser_args=None,
-                             disable_arc=False):
-        """Start the default Chrome.
-
-        @param restart: True to start Chrome without clearing previous state.
-        @param extra_browser_args: A list containing extra browser args passed
-                                   to Chrome in addition to default ones.
-        @param disable_arc: True to disable ARC++.
-        @return: True on success, False otherwise.
-
-        """
-        return self._resource.start_default_chrome(restart, extra_browser_args,
-                                                   disable_arc)
-
-
-    def set_http_server_directories(self, directories):
-        """Starts an HTTP server.
-
-        @param directories: Directories to start serving.
-
-        @return True on success. False otherwise.
-
-        """
-        return self._resource.set_http_server_directories(directories)
-
-
-    def http_server_url_of(self, fullpath):
-        """Converts a path to a URL.
-
-        @param fullpath: String containing the full path to the content.
-
-        @return the URL for the provided path.
-
-        """
-        return self._resource.http_server_url_of(fullpath)
-
-
-    def new_tab(self, url):
-        """Opens a new tab and loads URL.
-
-        @param url: The URL to load.
-        @return a str, the tab descriptor of the opened tab.
-
-        """
-        logging.debug('Load URL %s', url)
-        return self._resource.load_url(url)
-
-
-    def close_tab(self, tab_descriptor):
-        """Closes a previously opened tab.
-
-        @param tab_descriptor: Indicate which tab to be closed.
-
-        """
-        tab = self._resource.get_tab_by_descriptor(tab_descriptor)
-        logging.debug('Closing URL %s', tab.url)
-        self._resource.close_tab(tab_descriptor)
-
-
-    def wait_for_javascript_expression(
-            self, tab_descriptor, expression, timeout):
-        """Waits for the given JavaScript expression to be True on the
-        given tab.
-
-        @param tab_descriptor: Indicate on which tab to wait for the expression.
-        @param expression: Indiate for what expression to wait.
-        @param timeout: Indicate the timeout of the expression.
-        """
-        self._resource.wait_for_javascript_expression(
-                tab_descriptor, expression, timeout)
-
-
-    def execute_javascript(self, tab_descriptor, statement, timeout):
-        """Executes a JavaScript statement on the given tab.
-
-        @param tab_descriptor: Indicate on which tab to execute the statement.
-        @param statement: Indiate what statement to execute.
-        @param timeout: Indicate the timeout of the statement.
-        """
-        self._resource.execute_javascript(
-                tab_descriptor, statement, timeout)
-
-
-    def evaluate_javascript(self, tab_descriptor, expression, timeout):
-        """Evaluates a JavaScript expression on the given tab.
-
-        @param tab_descriptor: Indicate on which tab to evaluate the expression.
-        @param expression: Indiate what expression to evaluate.
-        @param timeout: Indicate the timeout of the expression.
-        @return the JSONized result of the given expression
-        """
-        return self._resource.evaluate_javascript(
-                tab_descriptor, expression, timeout)
-
-
-    def get_tab_urls(self):
-        """Gets urls from current Chrome tabs.
-
-        @returns: A list of str objects which contain urls from current Chrome
-        tabs.
-        """
-        logging.info("Getting tab objects from Chrome...")
-        tabs = self._resource.get_tabs()
-
-        return [tab.url for tab in tabs]
diff --git a/client/cros/multimedia/cfm_facade.py b/client/cros/multimedia/cfm_facade.py
new file mode 100644
index 0000000..b8563fe
--- /dev/null
+++ b/client/cros/multimedia/cfm_facade.py
@@ -0,0 +1,582 @@
+# Lint as: python2, python3
+# Copyright 2017 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Facade to access the CFM functionality."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import glob
+import logging
+import os
+import time
+import six
+import six.moves.urllib.parse
+import six.moves.xmlrpc_client
+
+from autotest_lib.client.bin import utils
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib.cros import cfm_hangouts_api
+from autotest_lib.client.common_lib.cros import cfm_meetings_api
+from autotest_lib.client.common_lib.cros import enrollment
+from autotest_lib.client.common_lib.cros import kiosk_utils
+from autotest_lib.client.cros.graphics import graphics_utils
+
+
+class TimeoutException(Exception):
+    """Timeout Exception class."""
+    pass
+
+
+class CFMFacadeLocal(object):
+    """Facade to access the CFM functionality.
+
+    The methods inside this class only accept Python native types.
+    """
+    _USER_ID = 'cr0s-cfm-la6-aut0t3st-us3r@croste.tv'
+    _PWD = 'test0000'
+    _EXT_ID = 'ikfcpmgefdpheiiomgmhlmmkihchmdlj'
+    _ENROLLMENT_DELAY = 45
+    _DEFAULT_TIMEOUT = 30
+
+    # Log file locations
+    _BASE_DIR = '/home/chronos/user/Storage/ext/'
+    _CALLGROK_LOGS_PATTERN = _BASE_DIR + _EXT_ID + '/0*/File System/000/t/00/0*'
+    _PA_LOGS_PATTERN = _BASE_DIR + _EXT_ID + '/def/File System/primary/p/00/0*'
+
+
+    def __init__(self, resource, screen):
+        """Initializes a CFMFacadeLocal.
+
+        @param resource: A FacadeResource object.
+        """
+        self._resource = resource
+        self._screen = screen
+
+
+    def enroll_device(self):
+        """Enroll device into CFM."""
+        logging.info('Enrolling device...')
+        extra_browser_args = ["--force-devtools-available"]
+        self._resource.start_custom_chrome({
+            "auto_login": False,
+            "disable_gaia_services": False,
+            "extra_browser_args": extra_browser_args})
+
+        enrollment.RemoraEnrollment(self._resource._browser, self._USER_ID,
+                self._PWD)
+        # Timeout to allow for the device to stablize and go back to the
+        # OOB screen before proceeding. The device may restart the app a couple
+        # of times before it reaches the OOB screen.
+        time.sleep(self._ENROLLMENT_DELAY)
+        logging.info('Enrollment completed.')
+
+
+    def restart_chrome_for_cfm(self, extra_chrome_args=None):
+        """Restart chrome with custom values for CFM.
+
+        @param extra_chrome_args a list with extra command line arguments for
+                Chrome.
+        """
+        logging.info('Restarting chrome for CfM...')
+        custom_chrome_setup = {"clear_enterprise_policy": False,
+                               "dont_override_profile": True,
+                               "disable_gaia_services": False,
+                               "disable_default_apps": False,
+                               "auto_login": False}
+        custom_chrome_setup["extra_browser_args"] = (
+            ["--force-devtools-available"])
+        if extra_chrome_args:
+            custom_chrome_setup["extra_browser_args"].extend(extra_chrome_args)
+        self._resource.start_custom_chrome(custom_chrome_setup)
+        logging.info('Chrome process restarted in CfM mode.')
+
+
+    def check_hangout_extension_context(self):
+        """Check to make sure hangout app launched.
+
+        @raises error.TestFail if the URL checks fails.
+        """
+        logging.info('Verifying extension contexts...')
+        ext_contexts = kiosk_utils.wait_for_kiosk_ext(
+                self._resource._browser, self._EXT_ID)
+        ext_urls = [context.EvaluateJavaScript('location.href;')
+                        for context in ext_contexts]
+        expected_urls = ['chrome-extension://' + self._EXT_ID + '/' + path
+                         for path in ['hangoutswindow.html?windowid=0',
+                                      'hangoutswindow.html?windowid=1',
+                                      'hangoutswindow.html?windowid=2',
+                                      '_generated_background_page.html']]
+        for url in ext_urls:
+            logging.info('Extension URL %s', url)
+            if url not in expected_urls:
+                raise error.TestFail(
+                    'Unexpected extension context urls, expected one of %s, '
+                    'got %s' % (expected_urls, url))
+        logging.info('Hangouts extension contexts verified.')
+
+
+    def take_screenshot(self, screenshot_name):
+        """
+        Takes a screenshot of what is currently displayed in png format.
+
+        The screenshot is stored in /tmp. Uses the low level graphics_utils API.
+
+        @param screenshot_name: Name of the screenshot file.
+        @returns The path to the screenshot or None.
+        """
+        try:
+            return graphics_utils.take_screenshot('/tmp', screenshot_name)
+        except Exception as e:
+            logging.warning('Taking screenshot failed', exc_info = e)
+            return None
+
+
+    def get_latest_callgrok_file_path(self):
+        """
+        @return The path to the lastest callgrok log file, if any.
+        """
+        try:
+            return max(glob.iglob(self._CALLGROK_LOGS_PATTERN),
+                       key=os.path.getctime)
+        except ValueError as e:
+            logging.exception('Error while searching for callgrok logs.')
+            return None
+
+
+    def get_latest_pa_logs_file_path(self):
+        """
+        @return The path to the lastest packaged app log file, if any.
+        """
+        try:
+            return max(self.get_all_pa_logs_file_path(), key=os.path.getctime)
+        except ValueError as e:
+            logging.exception('Error while searching for packaged app logs.')
+            return None
+
+
+    def get_all_pa_logs_file_path(self):
+        """
+        @return The paths to the all packaged app log files, if any.
+        """
+        return glob.glob(self._PA_LOGS_PATTERN)
+
+    def reboot_device_with_chrome_api(self):
+        """Reboot device using chrome runtime API."""
+        ext_contexts = kiosk_utils.wait_for_kiosk_ext(
+                self._resource._browser, self._EXT_ID)
+        for context in ext_contexts:
+            context.WaitForDocumentReadyStateToBeInteractiveOrBetter()
+            ext_url = context.EvaluateJavaScript('document.URL')
+            background_url = ('chrome-extension://' + self._EXT_ID +
+                              '/_generated_background_page.html')
+            if ext_url in background_url:
+                context.ExecuteJavaScript('chrome.runtime.restart();')
+
+
+    def _get_webview_context_by_screen(self, screen):
+        """Get webview context that matches the screen param in the url.
+
+        @param screen: Value of the screen param, e.g. 'hotrod' or 'control'.
+        """
+        def _get_context():
+            try:
+                ctxs = kiosk_utils.get_webview_contexts(self._resource._browser,
+                                                        self._EXT_ID)
+                for ctx in ctxs:
+                    parse_result = six.moves.urllib.parse.urlparse(ctx.GetUrl())
+                    url_path = parse_result.path
+                    logging.info('Webview path: "%s"', url_path)
+                    url_query = parse_result.query
+                    logging.info('Webview query: "%s"', url_query)
+                    params = six.moves.urllib.parse.parse_qs(url_query,
+                                               keep_blank_values = True)
+                    is_oobe_node_screen = (
+                        # Hangouts Classic
+                        ('nooobestatesync' in params and 'oobedone' in params)
+                        # Hangouts Meet
+                        or ('oobesecondary' in url_path))
+                    if is_oobe_node_screen:
+                        # Skip the oobe node screen. Not doing this can cause
+                        # the wrong webview context to be returned.
+                        continue
+                    if 'screen' in params and params['screen'][0] == screen:
+                        return ctx
+            except Exception as e:
+                # Having a MIMO attached to the DUT causes a couple of webview
+                # destruction/construction operations during OOBE. If we query a
+                # destructed webview it will throw an exception. Instead of
+                # failing the test, we just swallow the exception.
+                logging.exception(
+                    "Exception occured while querying the webview contexts.")
+            return None
+
+        return utils.poll_for_condition(
+                    _get_context,
+                    exception=error.TestFail(
+                        'Webview with screen param "%s" not found.' % screen),
+                    timeout=self._DEFAULT_TIMEOUT,
+                    sleep_interval = 1)
+
+
+    def skip_oobe_after_enrollment(self):
+        """Skips oobe and goes to the app landing page after enrollment."""
+        # Due to a variying amount of app restarts before we reach the OOB page
+        # we need to restart Chrome in order to make sure we have the devtools
+        # handle available and up-to-date.
+        self.restart_chrome_for_cfm()
+        self.check_hangout_extension_context()
+        self.wait_for_telemetry_commands()
+        self.wait_for_oobe_start_page()
+        self.skip_oobe_screen()
+
+
+    @property
+    def _webview_context(self):
+        """Get webview context object."""
+        return self._get_webview_context_by_screen(self._screen)
+
+
+    @property
+    def _cfmApi(self):
+        """Instantiate appropriate cfm api wrapper"""
+        if self._webview_context.EvaluateJavaScript(
+                "typeof window.hrRunDiagnosticsForTest == 'function'"):
+            return cfm_hangouts_api.CfmHangoutsAPI(self._webview_context)
+        if self._webview_context.EvaluateJavaScript(
+                "typeof window.hrTelemetryApi != 'undefined'"):
+            return cfm_meetings_api.CfmMeetingsAPI(self._webview_context)
+        raise error.TestFail('No hangouts or meet telemetry API available. '
+                             'Current url is "%s"' %
+                             self._webview_context.GetUrl())
+
+
+    def wait_for_telemetry_commands(self):
+        """Wait for telemetry commands."""
+        logging.info('Wait for Hangouts telemetry commands')
+        self._webview_context.WaitForJavaScriptCondition(
+            """typeof window.hrOobIsStartPageForTest == 'function'
+               || typeof window.hrTelemetryApi != 'undefined'
+            """,
+            timeout=self._DEFAULT_TIMEOUT)
+
+
+    def wait_for_meetings_in_call_page(self):
+        """Waits for the in-call page to launch."""
+        self.wait_for_telemetry_commands()
+        self._cfmApi.wait_for_meetings_in_call_page()
+
+
+    def wait_for_meetings_landing_page(self):
+        """Waits for the landing page screen."""
+        self.wait_for_telemetry_commands()
+        self._cfmApi.wait_for_meetings_landing_page()
+
+
+    # UI commands/functions
+    def wait_for_oobe_start_page(self):
+        """Wait for oobe start screen to launch."""
+        logging.info('Waiting for OOBE screen')
+        self._cfmApi.wait_for_oobe_start_page()
+
+
+    def skip_oobe_screen(self):
+        """Skip Chromebox for Meetings oobe screen."""
+        logging.info('Skipping OOBE screen')
+        self._cfmApi.skip_oobe_screen()
+
+
+    def is_oobe_start_page(self):
+        """Check if device is on CFM oobe start screen.
+
+        @return a boolean, based on oobe start page status.
+        """
+        return self._cfmApi.is_oobe_start_page()
+
+
+    # Hangouts commands/functions
+    def start_new_hangout_session(self, session_name):
+        """Start a new hangout session.
+
+        @param session_name: Name of the hangout session.
+        """
+        self._cfmApi.start_new_hangout_session(session_name)
+
+
+    def end_hangout_session(self):
+        """End current hangout session."""
+        self._cfmApi.end_hangout_session()
+
+
+    def is_in_hangout_session(self):
+        """Check if device is in hangout session.
+
+        @return a boolean, for hangout session state.
+        """
+        return self._cfmApi.is_in_hangout_session()
+
+
+    def is_ready_to_start_hangout_session(self):
+        """Check if device is ready to start a new hangout session.
+
+        @return a boolean for hangout session ready state.
+        """
+        return self._cfmApi.is_ready_to_start_hangout_session()
+
+
+    def join_meeting_session(self, session_name):
+        """Joins a meeting.
+
+        @param session_name: Name of the meeting session.
+        """
+        self._cfmApi.join_meeting_session(session_name)
+
+
+    def start_meeting_session(self):
+        """Start a meeting.
+
+        @return code for the started meeting
+        """
+        return self._cfmApi.start_meeting_session()
+
+
+    def end_meeting_session(self):
+        """End current meeting session."""
+        self._cfmApi.end_meeting_session()
+
+
+    def get_participant_count(self):
+        """Gets the total participant count in a call."""
+        return self._cfmApi.get_participant_count()
+
+
+    # Diagnostics commands/functions
+    def is_diagnostic_run_in_progress(self):
+        """Check if hotrod diagnostics is running.
+
+        @return a boolean for diagnostic run state.
+        """
+        return self._cfmApi.is_diagnostic_run_in_progress()
+
+
+    def wait_for_diagnostic_run_to_complete(self):
+        """Wait for hotrod diagnostics to complete."""
+        self._cfmApi.wait_for_diagnostic_run_to_complete()
+
+
+    def run_diagnostics(self):
+        """Run hotrod diagnostics."""
+        self._cfmApi.run_diagnostics()
+
+
+    def get_last_diagnostics_results(self):
+        """Get latest hotrod diagnostics results.
+
+        @return a dict with diagnostic test results.
+        """
+        return self._cfmApi.get_last_diagnostics_results()
+
+
+    # Mic audio commands/functions
+    def is_mic_muted(self):
+        """Check if mic is muted.
+
+        @return a boolean for mic mute state.
+        """
+        return self._cfmApi.is_mic_muted()
+
+
+    def mute_mic(self):
+        """Local mic mute from toolbar."""
+        self._cfmApi.mute_mic()
+
+
+    def unmute_mic(self):
+        """Local mic unmute from toolbar."""
+        self._cfmApi.unmute_mic()
+
+
+    def remote_mute_mic(self):
+        """Remote mic mute request from cPanel."""
+        self._cfmApi.remote_mute_mic()
+
+
+    def remote_unmute_mic(self):
+        """Remote mic unmute request from cPanel."""
+        self._cfmApi.remote_unmute_mic()
+
+
+    def get_mic_devices(self):
+        """Get all mic devices detected by hotrod.
+
+        @return a list of mic devices.
+        """
+        return self._cfmApi.get_mic_devices()
+
+
+    def get_preferred_mic(self):
+        """Get mic preferred for hotrod.
+
+        @return a str with preferred mic name.
+        """
+        return self._cfmApi.get_preferred_mic()
+
+
+    def set_preferred_mic(self, mic):
+        """Set preferred mic for hotrod.
+
+        @param mic: String with mic name.
+        """
+        self._cfmApi.set_preferred_mic(mic)
+
+
+    # Speaker commands/functions
+    def get_speaker_devices(self):
+        """Get all speaker devices detected by hotrod.
+
+        @return a list of speaker devices.
+        """
+        return self._cfmApi.get_speaker_devices()
+
+
+    def get_preferred_speaker(self):
+        """Get speaker preferred for hotrod.
+
+        @return a str with preferred speaker name.
+        """
+        return self._cfmApi.get_preferred_speaker()
+
+
+    def set_preferred_speaker(self, speaker):
+        """Set preferred speaker for hotrod.
+
+        @param speaker: String with speaker name.
+        """
+        self._cfmApi.set_preferred_speaker(speaker)
+
+
+    def set_speaker_volume(self, volume_level):
+        """Set speaker volume.
+
+        @param volume_level: String value ranging from 0-100 to set volume to.
+        """
+        self._cfmApi.set_speaker_volume(volume_level)
+
+
+    def get_speaker_volume(self):
+        """Get current speaker volume.
+
+        @return a str value with speaker volume level 0-100.
+        """
+        return self._cfmApi.get_speaker_volume()
+
+
+    def play_test_sound(self):
+        """Play test sound."""
+        self._cfmApi.play_test_sound()
+
+
+    # Camera commands/functions
+    def get_camera_devices(self):
+        """Get all camera devices detected by hotrod.
+
+        @return a list of camera devices.
+        """
+        return self._cfmApi.get_camera_devices()
+
+
+    def get_preferred_camera(self):
+        """Get camera preferred for hotrod.
+
+        @return a str with preferred camera name.
+        """
+        return self._cfmApi.get_preferred_camera()
+
+
+    def set_preferred_camera(self, camera):
+        """Set preferred camera for hotrod.
+
+        @param camera: String with camera name.
+        """
+        self._cfmApi.set_preferred_camera(camera)
+
+
+    def is_camera_muted(self):
+        """Check if camera is muted (turned off).
+
+        @return a boolean for camera muted state.
+        """
+        return self._cfmApi.is_camera_muted()
+
+
+    def mute_camera(self):
+        """Turned camera off."""
+        self._cfmApi.mute_camera()
+
+
+    def unmute_camera(self):
+        """Turned camera on."""
+        self._cfmApi.unmute_camera()
+
+    def move_camera(self, camera_motion):
+        """Move camera(PTZ commands).
+
+        @param camera_motion: Set of allowed commands
+            defined in cfmApi.move_camera.
+        """
+        self._cfmApi.move_camera(camera_motion)
+
+    def _convert_large_integers(self, o):
+        if type(o) is list:
+            return [self._convert_large_integers(x) for x in o]
+        elif type(o) is dict:
+            return {
+                    k: self._convert_large_integers(v)
+                    for k, v in six.iteritems(o)
+            }
+        else:
+            if type(o) is int and o > six.moves.xmlrpc_client.MAXINT:
+                return float(o)
+            else:
+                return o
+
+    def get_media_info_data_points(self):
+        """
+        Gets media info data points containing media stats.
+
+        These are exported on the window object when the
+        ExportMediaInfo mod is enabled.
+
+        @returns A list with dictionaries of media info data points.
+        @raises RuntimeError if the data point API is not available.
+        """
+        is_api_available_script = (
+                '"realtime" in window '
+                '&& "media" in realtime '
+                '&& "getMediaInfoDataPoints" in realtime.media')
+        if not self._webview_context.EvaluateJavaScript(
+                is_api_available_script):
+            raise RuntimeError(
+                    'realtime.media.getMediaInfoDataPoints not available. '
+                    'Is the ExportMediaInfo mod active? '
+                    'The mod is only available for Meet.')
+
+        # Sanitize the timestamp on the JS side to work around crbug.com/851482.
+        # Use JSON stringify/parse to create a deep copy of the data point.
+        get_data_points_js_script = """
+            var dataPoints = window.realtime.media.getMediaInfoDataPoints();
+            dataPoints.map((point) => {
+                var sanitizedPoint = JSON.parse(JSON.stringify(point));
+                sanitizedPoint["timestamp"] /= 1000.0;
+                return sanitizedPoint;
+            });"""
+
+        data_points = self._webview_context.EvaluateJavaScript(
+            get_data_points_js_script)
+        # XML RCP gives overflow errors when trying to send too large
+        # integers or longs so we convert media stats to floats.
+        data_points = self._convert_large_integers(data_points)
+        return data_points
diff --git a/client/cros/multimedia/cfm_facade_native.py b/client/cros/multimedia/cfm_facade_native.py
deleted file mode 100644
index 2ee0663..0000000
--- a/client/cros/multimedia/cfm_facade_native.py
+++ /dev/null
@@ -1,582 +0,0 @@
-# Lint as: python2, python3
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Facade to access the CFM functionality."""
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-import glob
-import logging
-import os
-import time
-import six
-import six.moves.urllib.parse
-import six.moves.xmlrpc_client
-
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import cfm_hangouts_api
-from autotest_lib.client.common_lib.cros import cfm_meetings_api
-from autotest_lib.client.common_lib.cros import enrollment
-from autotest_lib.client.common_lib.cros import kiosk_utils
-from autotest_lib.client.cros.graphics import graphics_utils
-
-
-class TimeoutException(Exception):
-    """Timeout Exception class."""
-    pass
-
-
-class CFMFacadeNative(object):
-    """Facade to access the CFM functionality.
-
-    The methods inside this class only accept Python native types.
-    """
-    _USER_ID = 'cr0s-cfm-la6-aut0t3st-us3r@croste.tv'
-    _PWD = 'test0000'
-    _EXT_ID = 'ikfcpmgefdpheiiomgmhlmmkihchmdlj'
-    _ENROLLMENT_DELAY = 45
-    _DEFAULT_TIMEOUT = 30
-
-    # Log file locations
-    _BASE_DIR = '/home/chronos/user/Storage/ext/'
-    _CALLGROK_LOGS_PATTERN = _BASE_DIR + _EXT_ID + '/0*/File System/000/t/00/0*'
-    _PA_LOGS_PATTERN = _BASE_DIR + _EXT_ID + '/def/File System/primary/p/00/0*'
-
-
-    def __init__(self, resource, screen):
-        """Initializes a CFMFacadeNative.
-
-        @param resource: A FacadeResource object.
-        """
-        self._resource = resource
-        self._screen = screen
-
-
-    def enroll_device(self):
-        """Enroll device into CFM."""
-        logging.info('Enrolling device...')
-        extra_browser_args = ["--force-devtools-available"]
-        self._resource.start_custom_chrome({
-            "auto_login": False,
-            "disable_gaia_services": False,
-            "extra_browser_args": extra_browser_args})
-
-        enrollment.RemoraEnrollment(self._resource._browser, self._USER_ID,
-                self._PWD)
-        # Timeout to allow for the device to stablize and go back to the
-        # OOB screen before proceeding. The device may restart the app a couple
-        # of times before it reaches the OOB screen.
-        time.sleep(self._ENROLLMENT_DELAY)
-        logging.info('Enrollment completed.')
-
-
-    def restart_chrome_for_cfm(self, extra_chrome_args=None):
-        """Restart chrome with custom values for CFM.
-
-        @param extra_chrome_args a list with extra command line arguments for
-                Chrome.
-        """
-        logging.info('Restarting chrome for CfM...')
-        custom_chrome_setup = {"clear_enterprise_policy": False,
-                               "dont_override_profile": True,
-                               "disable_gaia_services": False,
-                               "disable_default_apps": False,
-                               "auto_login": False}
-        custom_chrome_setup["extra_browser_args"] = (
-            ["--force-devtools-available"])
-        if extra_chrome_args:
-            custom_chrome_setup["extra_browser_args"].extend(extra_chrome_args)
-        self._resource.start_custom_chrome(custom_chrome_setup)
-        logging.info('Chrome process restarted in CfM mode.')
-
-
-    def check_hangout_extension_context(self):
-        """Check to make sure hangout app launched.
-
-        @raises error.TestFail if the URL checks fails.
-        """
-        logging.info('Verifying extension contexts...')
-        ext_contexts = kiosk_utils.wait_for_kiosk_ext(
-                self._resource._browser, self._EXT_ID)
-        ext_urls = [context.EvaluateJavaScript('location.href;')
-                        for context in ext_contexts]
-        expected_urls = ['chrome-extension://' + self._EXT_ID + '/' + path
-                         for path in ['hangoutswindow.html?windowid=0',
-                                      'hangoutswindow.html?windowid=1',
-                                      'hangoutswindow.html?windowid=2',
-                                      '_generated_background_page.html']]
-        for url in ext_urls:
-            logging.info('Extension URL %s', url)
-            if url not in expected_urls:
-                raise error.TestFail(
-                    'Unexpected extension context urls, expected one of %s, '
-                    'got %s' % (expected_urls, url))
-        logging.info('Hangouts extension contexts verified.')
-
-
-    def take_screenshot(self, screenshot_name):
-        """
-        Takes a screenshot of what is currently displayed in png format.
-
-        The screenshot is stored in /tmp. Uses the low level graphics_utils API.
-
-        @param screenshot_name: Name of the screenshot file.
-        @returns The path to the screenshot or None.
-        """
-        try:
-            return graphics_utils.take_screenshot('/tmp', screenshot_name)
-        except Exception as e:
-            logging.warning('Taking screenshot failed', exc_info = e)
-            return None
-
-
-    def get_latest_callgrok_file_path(self):
-        """
-        @return The path to the lastest callgrok log file, if any.
-        """
-        try:
-            return max(glob.iglob(self._CALLGROK_LOGS_PATTERN),
-                       key=os.path.getctime)
-        except ValueError as e:
-            logging.exception('Error while searching for callgrok logs.')
-            return None
-
-
-    def get_latest_pa_logs_file_path(self):
-        """
-        @return The path to the lastest packaged app log file, if any.
-        """
-        try:
-            return max(self.get_all_pa_logs_file_path(), key=os.path.getctime)
-        except ValueError as e:
-            logging.exception('Error while searching for packaged app logs.')
-            return None
-
-
-    def get_all_pa_logs_file_path(self):
-        """
-        @return The paths to the all packaged app log files, if any.
-        """
-        return glob.glob(self._PA_LOGS_PATTERN)
-
-    def reboot_device_with_chrome_api(self):
-        """Reboot device using chrome runtime API."""
-        ext_contexts = kiosk_utils.wait_for_kiosk_ext(
-                self._resource._browser, self._EXT_ID)
-        for context in ext_contexts:
-            context.WaitForDocumentReadyStateToBeInteractiveOrBetter()
-            ext_url = context.EvaluateJavaScript('document.URL')
-            background_url = ('chrome-extension://' + self._EXT_ID +
-                              '/_generated_background_page.html')
-            if ext_url in background_url:
-                context.ExecuteJavaScript('chrome.runtime.restart();')
-
-
-    def _get_webview_context_by_screen(self, screen):
-        """Get webview context that matches the screen param in the url.
-
-        @param screen: Value of the screen param, e.g. 'hotrod' or 'control'.
-        """
-        def _get_context():
-            try:
-                ctxs = kiosk_utils.get_webview_contexts(self._resource._browser,
-                                                        self._EXT_ID)
-                for ctx in ctxs:
-                    parse_result = six.moves.urllib.parse.urlparse(ctx.GetUrl())
-                    url_path = parse_result.path
-                    logging.info('Webview path: "%s"', url_path)
-                    url_query = parse_result.query
-                    logging.info('Webview query: "%s"', url_query)
-                    params = six.moves.urllib.parse.parse_qs(url_query,
-                                               keep_blank_values = True)
-                    is_oobe_node_screen = (
-                        # Hangouts Classic
-                        ('nooobestatesync' in params and 'oobedone' in params)
-                        # Hangouts Meet
-                        or ('oobesecondary' in url_path))
-                    if is_oobe_node_screen:
-                        # Skip the oobe node screen. Not doing this can cause
-                        # the wrong webview context to be returned.
-                        continue
-                    if 'screen' in params and params['screen'][0] == screen:
-                        return ctx
-            except Exception as e:
-                # Having a MIMO attached to the DUT causes a couple of webview
-                # destruction/construction operations during OOBE. If we query a
-                # destructed webview it will throw an exception. Instead of
-                # failing the test, we just swallow the exception.
-                logging.exception(
-                    "Exception occured while querying the webview contexts.")
-            return None
-
-        return utils.poll_for_condition(
-                    _get_context,
-                    exception=error.TestFail(
-                        'Webview with screen param "%s" not found.' % screen),
-                    timeout=self._DEFAULT_TIMEOUT,
-                    sleep_interval = 1)
-
-
-    def skip_oobe_after_enrollment(self):
-        """Skips oobe and goes to the app landing page after enrollment."""
-        # Due to a variying amount of app restarts before we reach the OOB page
-        # we need to restart Chrome in order to make sure we have the devtools
-        # handle available and up-to-date.
-        self.restart_chrome_for_cfm()
-        self.check_hangout_extension_context()
-        self.wait_for_telemetry_commands()
-        self.wait_for_oobe_start_page()
-        self.skip_oobe_screen()
-
-
-    @property
-    def _webview_context(self):
-        """Get webview context object."""
-        return self._get_webview_context_by_screen(self._screen)
-
-
-    @property
-    def _cfmApi(self):
-        """Instantiate appropriate cfm api wrapper"""
-        if self._webview_context.EvaluateJavaScript(
-                "typeof window.hrRunDiagnosticsForTest == 'function'"):
-            return cfm_hangouts_api.CfmHangoutsAPI(self._webview_context)
-        if self._webview_context.EvaluateJavaScript(
-                "typeof window.hrTelemetryApi != 'undefined'"):
-            return cfm_meetings_api.CfmMeetingsAPI(self._webview_context)
-        raise error.TestFail('No hangouts or meet telemetry API available. '
-                             'Current url is "%s"' %
-                             self._webview_context.GetUrl())
-
-
-    def wait_for_telemetry_commands(self):
-        """Wait for telemetry commands."""
-        logging.info('Wait for Hangouts telemetry commands')
-        self._webview_context.WaitForJavaScriptCondition(
-            """typeof window.hrOobIsStartPageForTest == 'function'
-               || typeof window.hrTelemetryApi != 'undefined'
-            """,
-            timeout=self._DEFAULT_TIMEOUT)
-
-
-    def wait_for_meetings_in_call_page(self):
-        """Waits for the in-call page to launch."""
-        self.wait_for_telemetry_commands()
-        self._cfmApi.wait_for_meetings_in_call_page()
-
-
-    def wait_for_meetings_landing_page(self):
-        """Waits for the landing page screen."""
-        self.wait_for_telemetry_commands()
-        self._cfmApi.wait_for_meetings_landing_page()
-
-
-    # UI commands/functions
-    def wait_for_oobe_start_page(self):
-        """Wait for oobe start screen to launch."""
-        logging.info('Waiting for OOBE screen')
-        self._cfmApi.wait_for_oobe_start_page()
-
-
-    def skip_oobe_screen(self):
-        """Skip Chromebox for Meetings oobe screen."""
-        logging.info('Skipping OOBE screen')
-        self._cfmApi.skip_oobe_screen()
-
-
-    def is_oobe_start_page(self):
-        """Check if device is on CFM oobe start screen.
-
-        @return a boolean, based on oobe start page status.
-        """
-        return self._cfmApi.is_oobe_start_page()
-
-
-    # Hangouts commands/functions
-    def start_new_hangout_session(self, session_name):
-        """Start a new hangout session.
-
-        @param session_name: Name of the hangout session.
-        """
-        self._cfmApi.start_new_hangout_session(session_name)
-
-
-    def end_hangout_session(self):
-        """End current hangout session."""
-        self._cfmApi.end_hangout_session()
-
-
-    def is_in_hangout_session(self):
-        """Check if device is in hangout session.
-
-        @return a boolean, for hangout session state.
-        """
-        return self._cfmApi.is_in_hangout_session()
-
-
-    def is_ready_to_start_hangout_session(self):
-        """Check if device is ready to start a new hangout session.
-
-        @return a boolean for hangout session ready state.
-        """
-        return self._cfmApi.is_ready_to_start_hangout_session()
-
-
-    def join_meeting_session(self, session_name):
-        """Joins a meeting.
-
-        @param session_name: Name of the meeting session.
-        """
-        self._cfmApi.join_meeting_session(session_name)
-
-
-    def start_meeting_session(self):
-        """Start a meeting.
-
-        @return code for the started meeting
-        """
-        return self._cfmApi.start_meeting_session()
-
-
-    def end_meeting_session(self):
-        """End current meeting session."""
-        self._cfmApi.end_meeting_session()
-
-
-    def get_participant_count(self):
-        """Gets the total participant count in a call."""
-        return self._cfmApi.get_participant_count()
-
-
-    # Diagnostics commands/functions
-    def is_diagnostic_run_in_progress(self):
-        """Check if hotrod diagnostics is running.
-
-        @return a boolean for diagnostic run state.
-        """
-        return self._cfmApi.is_diagnostic_run_in_progress()
-
-
-    def wait_for_diagnostic_run_to_complete(self):
-        """Wait for hotrod diagnostics to complete."""
-        self._cfmApi.wait_for_diagnostic_run_to_complete()
-
-
-    def run_diagnostics(self):
-        """Run hotrod diagnostics."""
-        self._cfmApi.run_diagnostics()
-
-
-    def get_last_diagnostics_results(self):
-        """Get latest hotrod diagnostics results.
-
-        @return a dict with diagnostic test results.
-        """
-        return self._cfmApi.get_last_diagnostics_results()
-
-
-    # Mic audio commands/functions
-    def is_mic_muted(self):
-        """Check if mic is muted.
-
-        @return a boolean for mic mute state.
-        """
-        return self._cfmApi.is_mic_muted()
-
-
-    def mute_mic(self):
-        """Local mic mute from toolbar."""
-        self._cfmApi.mute_mic()
-
-
-    def unmute_mic(self):
-        """Local mic unmute from toolbar."""
-        self._cfmApi.unmute_mic()
-
-
-    def remote_mute_mic(self):
-        """Remote mic mute request from cPanel."""
-        self._cfmApi.remote_mute_mic()
-
-
-    def remote_unmute_mic(self):
-        """Remote mic unmute request from cPanel."""
-        self._cfmApi.remote_unmute_mic()
-
-
-    def get_mic_devices(self):
-        """Get all mic devices detected by hotrod.
-
-        @return a list of mic devices.
-        """
-        return self._cfmApi.get_mic_devices()
-
-
-    def get_preferred_mic(self):
-        """Get mic preferred for hotrod.
-
-        @return a str with preferred mic name.
-        """
-        return self._cfmApi.get_preferred_mic()
-
-
-    def set_preferred_mic(self, mic):
-        """Set preferred mic for hotrod.
-
-        @param mic: String with mic name.
-        """
-        self._cfmApi.set_preferred_mic(mic)
-
-
-    # Speaker commands/functions
-    def get_speaker_devices(self):
-        """Get all speaker devices detected by hotrod.
-
-        @return a list of speaker devices.
-        """
-        return self._cfmApi.get_speaker_devices()
-
-
-    def get_preferred_speaker(self):
-        """Get speaker preferred for hotrod.
-
-        @return a str with preferred speaker name.
-        """
-        return self._cfmApi.get_preferred_speaker()
-
-
-    def set_preferred_speaker(self, speaker):
-        """Set preferred speaker for hotrod.
-
-        @param speaker: String with speaker name.
-        """
-        self._cfmApi.set_preferred_speaker(speaker)
-
-
-    def set_speaker_volume(self, volume_level):
-        """Set speaker volume.
-
-        @param volume_level: String value ranging from 0-100 to set volume to.
-        """
-        self._cfmApi.set_speaker_volume(volume_level)
-
-
-    def get_speaker_volume(self):
-        """Get current speaker volume.
-
-        @return a str value with speaker volume level 0-100.
-        """
-        return self._cfmApi.get_speaker_volume()
-
-
-    def play_test_sound(self):
-        """Play test sound."""
-        self._cfmApi.play_test_sound()
-
-
-    # Camera commands/functions
-    def get_camera_devices(self):
-        """Get all camera devices detected by hotrod.
-
-        @return a list of camera devices.
-        """
-        return self._cfmApi.get_camera_devices()
-
-
-    def get_preferred_camera(self):
-        """Get camera preferred for hotrod.
-
-        @return a str with preferred camera name.
-        """
-        return self._cfmApi.get_preferred_camera()
-
-
-    def set_preferred_camera(self, camera):
-        """Set preferred camera for hotrod.
-
-        @param camera: String with camera name.
-        """
-        self._cfmApi.set_preferred_camera(camera)
-
-
-    def is_camera_muted(self):
-        """Check if camera is muted (turned off).
-
-        @return a boolean for camera muted state.
-        """
-        return self._cfmApi.is_camera_muted()
-
-
-    def mute_camera(self):
-        """Turned camera off."""
-        self._cfmApi.mute_camera()
-
-
-    def unmute_camera(self):
-        """Turned camera on."""
-        self._cfmApi.unmute_camera()
-
-    def move_camera(self, camera_motion):
-        """Move camera(PTZ commands).
-
-        @param camera_motion: Set of allowed commands
-            defined in cfmApi.move_camera.
-        """
-        self._cfmApi.move_camera(camera_motion)
-
-    def _convert_large_integers(self, o):
-        if type(o) is list:
-            return [self._convert_large_integers(x) for x in o]
-        elif type(o) is dict:
-            return {
-                    k: self._convert_large_integers(v)
-                    for k, v in six.iteritems(o)
-            }
-        else:
-            if type(o) is int and o > six.moves.xmlrpc_client.MAXINT:
-                return float(o)
-            else:
-                return o
-
-    def get_media_info_data_points(self):
-        """
-        Gets media info data points containing media stats.
-
-        These are exported on the window object when the
-        ExportMediaInfo mod is enabled.
-
-        @returns A list with dictionaries of media info data points.
-        @raises RuntimeError if the data point API is not available.
-        """
-        is_api_available_script = (
-                '"realtime" in window '
-                '&& "media" in realtime '
-                '&& "getMediaInfoDataPoints" in realtime.media')
-        if not self._webview_context.EvaluateJavaScript(
-                is_api_available_script):
-            raise RuntimeError(
-                    'realtime.media.getMediaInfoDataPoints not available. '
-                    'Is the ExportMediaInfo mod active? '
-                    'The mod is only available for Meet.')
-
-        # Sanitize the timestamp on the JS side to work around crbug.com/851482.
-        # Use JSON stringify/parse to create a deep copy of the data point.
-        get_data_points_js_script = """
-            var dataPoints = window.realtime.media.getMediaInfoDataPoints();
-            dataPoints.map((point) => {
-                var sanitizedPoint = JSON.parse(JSON.stringify(point));
-                sanitizedPoint["timestamp"] /= 1000.0;
-                return sanitizedPoint;
-            });"""
-
-        data_points = self._webview_context.EvaluateJavaScript(
-            get_data_points_js_script)
-        # XML RCP gives overflow errors when trying to send too large
-        # integers or longs so we convert media stats to floats.
-        data_points = self._convert_large_integers(data_points)
-        return data_points
diff --git a/client/cros/multimedia/cfm_facade_native_unittest.py b/client/cros/multimedia/cfm_facade_native_unittest.py
deleted file mode 100644
index b2a6d53..0000000
--- a/client/cros/multimedia/cfm_facade_native_unittest.py
+++ /dev/null
@@ -1,143 +0,0 @@
-"""
-Unit tests for cfm_facade_native.py.
-
-To execute them run:
-utils/unittest_suite.py \
-    autotest_lib.client.cros.multimedia.cfm_facade_native_unittest
-"""
-
-# pylint: disable=missing-docstring
-
-import mock
-import unittest
-
-from autotest_lib.client.common_lib import error
-# Mock cros and graphics modules as they import telemetry which is not available
-# in unit tests.
-cros_mock = mock.Mock()
-graphics_mock = mock.Mock()
-modules = {'autotest_lib.client.common_lib.cros': cros_mock,
-           'autotest_lib.client.cros.graphics': graphics_mock}
-with mock.patch.dict('sys.modules', modules):
-    from autotest_lib.client.cros.multimedia import cfm_facade_native
-
-BACKGROUD_PAGE = '_generated_background_page.html'
-HANGOUT_WINDOW_0 = 'hangoutswindow.html?windowid=0'
-
-
-def create_mock_context(url):
-    ctx = mock.Mock()
-    ctx.GetUrl.return_value = url
-    return ctx
-
-
-class CfmFacadeNativeUnitTest(unittest.TestCase):
-
-    def setUp(self):
-        self.facade_resource = mock.Mock()
-        self.browser = self.facade_resource._browser
-        self.screen = 'hotrod'
-        self.cfm_facade = cfm_facade_native.CFMFacadeNative(
-            self.facade_resource, self.screen)
-        cfm_facade_native.CFMFacadeNative._DEFAULT_TIMEOUT = 1
-        self.extension_path = 'chrome-extension://' + self.cfm_facade._EXT_ID
-
-    @mock.patch.object(cfm_facade_native, 'kiosk_utils')
-    def test_check_hangout_extension_context(self, mock_kiosk_utils):
-        dummy_ctx = create_mock_context('foo.bar?screen=dummy')
-        dummy_ctx.EvaluateJavaScript.return_value = (
-            '%s/%s' % (self.extension_path, HANGOUT_WINDOW_0))
-
-        mock_kiosk_utils.wait_for_kiosk_ext.return_value = [dummy_ctx]
-        self.cfm_facade.check_hangout_extension_context()
-        mock_kiosk_utils.wait_for_kiosk_ext.assert_called_with(self.browser,
-            self.cfm_facade._EXT_ID)
-
-    @mock.patch.object(cfm_facade_native, 'kiosk_utils')
-    def test_webview_context_property(self, mock_kiosk_utils):
-        dummy_ctx = create_mock_context('foo.bar?screen=dummy')
-        hotrod_ctx = create_mock_context('www.qbc?screen=%s' % self.screen)
-        mock_kiosk_utils.get_webview_contexts.return_value = [dummy_ctx,
-                                                              hotrod_ctx]
-        self.assertEqual(self.cfm_facade._webview_context, hotrod_ctx)
-        mock_kiosk_utils.get_webview_contexts.assert_called_with(self.browser,
-            self.cfm_facade._EXT_ID)
-
-    @mock.patch.object(cfm_facade_native, 'kiosk_utils')
-    def test_get_webview_context_by_screen_two_screens(self, mock_kiosk_utils):
-        screen_param = 'foo'
-        dummy_ctx = create_mock_context('foo.bar?screen=dummy')
-        hotrod_ctx = create_mock_context('www.qbc?screen=%s' % screen_param)
-        mock_kiosk_utils.get_webview_contexts.return_value = [dummy_ctx,
-                                                              hotrod_ctx]
-        found_ctx = self.cfm_facade._get_webview_context_by_screen(screen_param)
-        self.assertEqual(found_ctx, hotrod_ctx)
-
-    @mock.patch.object(cfm_facade_native, 'kiosk_utils')
-    def test_get_webview_context_by_screen_only_hotrod_screen(self,
-                                                              mock_kiosk_utils):
-        screen_param = 'foo'
-        dummy_ctx = create_mock_context('foo.bar?screen=dummy')
-        hotrod_ctx = create_mock_context('www.qbc?screen=%s' % screen_param)
-        mock_kiosk_utils.get_webview_contexts.return_value = [hotrod_ctx]
-        found_ctx = self.cfm_facade._get_webview_context_by_screen(screen_param)
-        self.assertEqual(found_ctx, hotrod_ctx)
-
-    @mock.patch.object(cfm_facade_native, 'kiosk_utils')
-    def test_get_webview_context_by_screen_with_mimo_and_main_screen(
-            self, mock_kiosk_utils):
-        screen_param = 'foo'
-        mimo_ctx = create_mock_context('www.qbc?screen=control')
-        hotrod_ctx = create_mock_context('www.qbc?screen=%s' % screen_param)
-        mock_kiosk_utils.get_webview_contexts.return_value = [hotrod_ctx,
-                                                              mimo_ctx]
-        found_ctx = self.cfm_facade._get_webview_context_by_screen(screen_param)
-        self.assertEqual(found_ctx, hotrod_ctx)
-
-    @mock.patch.object(cfm_facade_native, 'kiosk_utils')
-    def test_get_webview_context_during_oobe_with_two_screens(self,
-                                                              mock_kiosk_utils):
-        screen_param = 'foo'
-        node_screen_ctx = create_mock_context(
-            'node.screen.com?screen=hotrod&nooobestatesync&oobedone')
-        main_screen_ctx = create_mock_context(
-            'mimo.screen.com?screen=%s' % screen_param)
-        mock_kiosk_utils.get_webview_contexts.return_value = [
-            node_screen_ctx, main_screen_ctx]
-        found_ctx = self.cfm_facade._get_webview_context_by_screen(screen_param)
-        self.assertEqual(found_ctx, main_screen_ctx)
-
-    @mock.patch.object(cfm_facade_native, 'kiosk_utils')
-    def test_get_webview_context_no_screen_found(self, mock_kiosk_utils):
-        foo_ctx = create_mock_context('node.screen.com?screen=foo')
-        bar_ctx = create_mock_context('mimo.screen.com?screen=bar')
-        mock_kiosk_utils.get_webview_contexts.return_value = [foo_ctx, bar_ctx]
-        with self.assertRaises(error.TestFail):
-            self.cfm_facade._get_webview_context_by_screen('unknown_param')
-
-    @mock.patch.object(cfm_facade_native, 'kiosk_utils')
-    def test_reboot_device_with_chrome_api(self, mock_kiosk_utils):
-        dummy_ctx = create_mock_context('foo.bar?screen=dummy')
-        dummy_ctx.EvaluateJavaScript.return_value = (
-            '%s/%s' % (self.extension_path, BACKGROUD_PAGE))
-        mock_kiosk_utils.wait_for_kiosk_ext.return_value = [dummy_ctx]
-        self.cfm_facade.reboot_device_with_chrome_api()
-        dummy_ctx.ExecuteJavaScript.assert_called_with(
-            'chrome.runtime.restart();')
-
-    @mock.patch.object(cfm_facade_native, 'kiosk_utils')
-    def test_large_integers_in_media_info_data_points(self, mock_kiosk_utils):
-        hotrod_ctx = create_mock_context('www.qbc?screen=%s' % self.screen)
-        mock_kiosk_utils.get_webview_contexts.return_value = [hotrod_ctx]
-        hotrod_ctx.EvaluateJavaScript.return_value = [{
-                'a': 123,
-                'b': {
-                        'c': 2**31 - 1,
-                        'd': 2**31
-                }
-        }, [-123]]
-        data_points = self.cfm_facade.get_media_info_data_points()
-        self.assertIsInstance(data_points[0]['a'], int)
-        self.assertIsInstance(data_points[0]['b']['c'], int)
-        self.assertIsInstance(data_points[0]['b']['d'], float)
-        self.assertIsInstance(data_points[1][0], int)
diff --git a/client/cros/multimedia/cfm_facade_unittest.py b/client/cros/multimedia/cfm_facade_unittest.py
new file mode 100644
index 0000000..86b3965
--- /dev/null
+++ b/client/cros/multimedia/cfm_facade_unittest.py
@@ -0,0 +1,145 @@
+"""
+Unit tests for cfm_facade.py.
+
+To execute them run:
+utils/unittest_suite.py \
+    autotest_lib.client.cros.multimedia.cfm_facade_unittest
+"""
+
+# pylint: disable=missing-docstring
+
+import unittest
+from unittest import mock
+
+from autotest_lib.client.common_lib import error
+# Mock cros and graphics modules as they import telemetry which is not available
+# in unit tests.
+cros_mock = mock.Mock()
+graphics_mock = mock.Mock()
+modules = {'autotest_lib.client.common_lib.cros': cros_mock,
+           'autotest_lib.client.cros.graphics': graphics_mock}
+with mock.patch.dict('sys.modules', modules):
+    from autotest_lib.client.cros.multimedia import cfm_facade
+
+BACKGROUD_PAGE = '_generated_background_page.html'
+HANGOUT_WINDOW_0 = 'hangoutswindow.html?windowid=0'
+
+
+def create_mock_context(url):
+    ctx = mock.Mock()
+    ctx.GetUrl.return_value = url
+    return ctx
+
+
+class CfmFacadeLocalUnitTest(unittest.TestCase):
+
+    def setUp(self):
+        self.facade_resource = mock.Mock()
+        self.browser = self.facade_resource._browser
+        self.screen = 'hotrod'
+        self.cfm_facade = cfm_facade.CFMFacadeLocal(
+            self.facade_resource, self.screen)
+        cfm_facade.CFMFacadeLocal._DEFAULT_TIMEOUT = 1
+        self.extension_path = 'chrome-extension://' + self.cfm_facade._EXT_ID
+
+    @mock.patch.object(cfm_facade, 'kiosk_utils')
+    def test_check_hangout_extension_context(self, mock_kiosk_utils):
+        stub_ctx = create_mock_context('foo.bar?screen=stub')
+        stub_ctx.EvaluateJavaScript.return_value = (
+                '%s/%s' % (self.extension_path, HANGOUT_WINDOW_0))
+
+        mock_kiosk_utils.wait_for_kiosk_ext.return_value = [stub_ctx]
+        self.cfm_facade.check_hangout_extension_context()
+        mock_kiosk_utils.wait_for_kiosk_ext.assert_called_with(self.browser,
+            self.cfm_facade._EXT_ID)
+
+    @mock.patch.object(cfm_facade, 'kiosk_utils')
+    def test_webview_context_property(self, mock_kiosk_utils):
+        stub_ctx = create_mock_context('foo.bar?screen=stub')
+        hotrod_ctx = create_mock_context('www.qbc?screen=%s' % self.screen)
+        mock_kiosk_utils.get_webview_contexts.return_value = [
+                stub_ctx, hotrod_ctx
+        ]
+        self.assertEqual(self.cfm_facade._webview_context, hotrod_ctx)
+        mock_kiosk_utils.get_webview_contexts.assert_called_with(self.browser,
+            self.cfm_facade._EXT_ID)
+
+    @mock.patch.object(cfm_facade, 'kiosk_utils')
+    def test_get_webview_context_by_screen_two_screens(self, mock_kiosk_utils):
+        screen_param = 'foo'
+        stub_ctx = create_mock_context('foo.bar?screen=stub')
+        hotrod_ctx = create_mock_context('www.qbc?screen=%s' % screen_param)
+        mock_kiosk_utils.get_webview_contexts.return_value = [
+                stub_ctx, hotrod_ctx
+        ]
+        found_ctx = self.cfm_facade._get_webview_context_by_screen(screen_param)
+        self.assertEqual(found_ctx, hotrod_ctx)
+
+    @mock.patch.object(cfm_facade, 'kiosk_utils')
+    def test_get_webview_context_by_screen_only_hotrod_screen(self,
+                                                              mock_kiosk_utils):
+        screen_param = 'foo'
+        stub_ctx = create_mock_context('foo.bar?screen=stub')
+        hotrod_ctx = create_mock_context('www.qbc?screen=%s' % screen_param)
+        mock_kiosk_utils.get_webview_contexts.return_value = [hotrod_ctx]
+        found_ctx = self.cfm_facade._get_webview_context_by_screen(screen_param)
+        self.assertEqual(found_ctx, hotrod_ctx)
+
+    @mock.patch.object(cfm_facade, 'kiosk_utils')
+    def test_get_webview_context_by_screen_with_mimo_and_main_screen(
+            self, mock_kiosk_utils):
+        screen_param = 'foo'
+        mimo_ctx = create_mock_context('www.qbc?screen=control')
+        hotrod_ctx = create_mock_context('www.qbc?screen=%s' % screen_param)
+        mock_kiosk_utils.get_webview_contexts.return_value = [hotrod_ctx,
+                                                              mimo_ctx]
+        found_ctx = self.cfm_facade._get_webview_context_by_screen(screen_param)
+        self.assertEqual(found_ctx, hotrod_ctx)
+
+    @mock.patch.object(cfm_facade, 'kiosk_utils')
+    def test_get_webview_context_during_oobe_with_two_screens(self,
+                                                              mock_kiosk_utils):
+        screen_param = 'foo'
+        node_screen_ctx = create_mock_context(
+            'node.screen.com?screen=hotrod&nooobestatesync&oobedone')
+        main_screen_ctx = create_mock_context(
+            'mimo.screen.com?screen=%s' % screen_param)
+        mock_kiosk_utils.get_webview_contexts.return_value = [
+            node_screen_ctx, main_screen_ctx]
+        found_ctx = self.cfm_facade._get_webview_context_by_screen(screen_param)
+        self.assertEqual(found_ctx, main_screen_ctx)
+
+    @mock.patch.object(cfm_facade, 'kiosk_utils')
+    def test_get_webview_context_no_screen_found(self, mock_kiosk_utils):
+        foo_ctx = create_mock_context('node.screen.com?screen=foo')
+        bar_ctx = create_mock_context('mimo.screen.com?screen=bar')
+        mock_kiosk_utils.get_webview_contexts.return_value = [foo_ctx, bar_ctx]
+        with self.assertRaises(error.TestFail):
+            self.cfm_facade._get_webview_context_by_screen('unknown_param')
+
+    @mock.patch.object(cfm_facade, 'kiosk_utils')
+    def test_reboot_device_with_chrome_api(self, mock_kiosk_utils):
+        stub_ctx = create_mock_context('foo.bar?screen=stub')
+        stub_ctx.EvaluateJavaScript.return_value = (
+                '%s/%s' % (self.extension_path, BACKGROUD_PAGE))
+        mock_kiosk_utils.wait_for_kiosk_ext.return_value = [stub_ctx]
+        self.cfm_facade.reboot_device_with_chrome_api()
+        stub_ctx.ExecuteJavaScript.assert_called_with(
+                'chrome.runtime.restart();')
+
+    @mock.patch.object(cfm_facade, 'kiosk_utils')
+    def test_large_integers_in_media_info_data_points(self, mock_kiosk_utils):
+        hotrod_ctx = create_mock_context('www.qbc?screen=%s' % self.screen)
+        mock_kiosk_utils.get_webview_contexts.return_value = [hotrod_ctx]
+        hotrod_ctx.EvaluateJavaScript.return_value = [{
+                'a': 123,
+                'b': {
+                        'c': 2**31 - 1,
+                        'd': 2**31
+                }
+        }, [-123]]
+        data_points = self.cfm_facade.get_media_info_data_points()
+        self.assertIsInstance(data_points[0]['a'], int)
+        self.assertIsInstance(data_points[0]['b']['c'], int)
+        self.assertIsInstance(data_points[0]['b']['d'], float)
+        self.assertIsInstance(data_points[1][0], int)
diff --git a/client/cros/multimedia/display_facade.py b/client/cros/multimedia/display_facade.py
new file mode 100644
index 0000000..c6b51f2
--- /dev/null
+++ b/client/cros/multimedia/display_facade.py
@@ -0,0 +1,807 @@
+# Lint as: python2, python3
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Facade to access the display-related functionality."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+import logging
+import multiprocessing
+import numpy
+import os
+import re
+import shutil
+import time
+import json
+from autotest_lib.client.bin import utils
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib import utils as common_utils
+from autotest_lib.client.common_lib.cros import retry
+from autotest_lib.client.cros import constants
+from autotest_lib.client.cros.graphics import graphics_utils
+from autotest_lib.client.cros.multimedia import facade_resource
+from autotest_lib.client.cros.multimedia import image_generator
+from autotest_lib.client.cros.power import sys_power
+from six.moves import range
+from telemetry.internal.browser import web_contents
+
+class TimeoutException(Exception):
+    """Timeout Exception class."""
+    pass
+
+
+_FLAKY_CALL_RETRY_TIMEOUT_SEC = 60
+_FLAKY_DISPLAY_CALL_RETRY_DELAY_SEC = 2
+
+_retry_display_call = retry.retry(
+        (KeyError, error.CmdError),
+        timeout_min=_FLAKY_CALL_RETRY_TIMEOUT_SEC / 60.0,
+        delay_sec=_FLAKY_DISPLAY_CALL_RETRY_DELAY_SEC)
+
+
+class DisplayFacadeLocal(object):
+    """Facade to access the display-related functionality.
+
+    The methods inside this class only accept Python core types.
+    """
+
+    CALIBRATION_IMAGE_PATH = '/tmp/calibration.png'
+    MINIMUM_REFRESH_RATE_EXPECTED = 25.0
+    DELAY_TIME = 3
+    MAX_TYPEC_PORT = 6
+
+    def __init__(self, resource):
+        """Initializes a DisplayFacadeLocal.
+
+        @param resource: A FacadeResource object.
+        """
+        self._resource = resource
+        self._image_generator = image_generator.ImageGenerator()
+
+
+    @facade_resource.retry_chrome_call
+    def get_display_info(self):
+        """Gets the display info from Chrome.system.display API.
+
+        @return array of dict for display info.
+        """
+        extension = self._resource.get_extension(
+                constants.DISPLAY_TEST_EXTENSION)
+        extension.ExecuteJavaScript('window.__display_info = null;')
+        extension.ExecuteJavaScript(
+                "chrome.system.display.getInfo(function(info) {"
+                "window.__display_info = info;})")
+        utils.wait_for_value(lambda: (
+                extension.EvaluateJavaScript("window.__display_info") != None),
+                expected_value=True)
+        return extension.EvaluateJavaScript("window.__display_info")
+
+
+    @facade_resource.retry_chrome_call
+    def get_window_info(self):
+        """Gets the current window info from Chrome.system.window API.
+
+        @return a dict for the information of the current window.
+        """
+        extension = self._resource.get_extension()
+        extension.ExecuteJavaScript('window.__window_info = null;')
+        extension.ExecuteJavaScript(
+                "chrome.windows.getCurrent(function(info) {"
+                "window.__window_info = info;})")
+        utils.wait_for_value(lambda: (
+                extension.EvaluateJavaScript("window.__window_info") != None),
+                expected_value=True)
+        return extension.EvaluateJavaScript("window.__window_info")
+
+
+    @facade_resource.retry_chrome_call
+    def create_window(self, url='chrome://newtab'):
+        """Creates a new window from chrome.windows.create API.
+
+        @param url: Optional URL for the new window.
+
+        @return Identifier for the new window.
+
+        @raise TimeoutException if it fails.
+        """
+        extension = self._resource.get_extension()
+
+        extension.ExecuteJavaScript(
+                """
+                var __new_window_id = null;
+                chrome.windows.create(
+                        {url: '%s'},
+                        function(win) {
+                            __new_window_id = win.id});
+                """ % (url)
+        )
+        extension.WaitForJavaScriptCondition(
+                "__new_window_id !== null",
+                timeout=web_contents.DEFAULT_WEB_CONTENTS_TIMEOUT)
+
+        return extension.EvaluateJavaScript("__new_window_id")
+
+
+    @facade_resource.retry_chrome_call
+    def update_window(self, window_id, state=None, bounds=None):
+        """Updates an existing window using the chrome.windows.update API.
+
+        @param window_id: Identifier for the window to update.
+        @param state: Optional string to set the state such as 'normal',
+                      'maximized', or 'fullscreen'.
+        @param bounds: Optional dictionary with keys top, left, width, and
+                       height to reposition the window.
+
+        @return True if success.
+
+        @raise TimeoutException if it fails.
+        """
+        extension = self._resource.get_extension()
+        params = {}
+
+        if state:
+            params['state'] = state
+        if bounds:
+            params['top'] = bounds['top']
+            params['left'] = bounds['left']
+            params['width'] = bounds['width']
+            params['height'] = bounds['height']
+
+        if not params:
+            logging.info('Nothing to update for window_id={}'.format(window_id))
+            return True
+
+        extension.ExecuteJavaScript(
+                """
+                var __status = 'Running';
+                chrome.windows.update(%d, %s,
+                        function(win) {
+                            __status = 'Done'});
+                """ % (window_id, json.dumps(params))
+        )
+        extension.WaitForJavaScriptCondition(
+                "__status == 'Done'",
+                timeout=web_contents.DEFAULT_WEB_CONTENTS_TIMEOUT)
+
+        return True
+
+
+    def _get_display_by_id(self, display_id):
+        """Gets a display by ID.
+
+        @param display_id: id of the display.
+
+        @return: A dict of various display info.
+        """
+        for display in self.get_display_info():
+            if display['id'] == display_id:
+                return display
+        raise RuntimeError('Cannot find display ' + display_id)
+
+
+    def get_display_modes(self, display_id):
+        """Gets all the display modes for the specified display.
+
+        @param display_id: id of the display to get modes from.
+
+        @return: A list of DisplayMode dicts.
+        """
+        display = self._get_display_by_id(display_id)
+        return display['modes']
+
+
+    def get_display_rotation(self, display_id):
+        """Gets the display rotation for the specified display.
+
+        @param display_id: id of the display to get modes from.
+
+        @return: Degree of rotation.
+        """
+        display = self._get_display_by_id(display_id)
+        return display['rotation']
+
+
+    def get_display_notifications(self):
+        """Gets the display notifications
+
+        @return: Returns a list of display related notifications only.
+        """
+        display_notifications = []
+        for notification in self._resource.get_visible_notifications():
+            if notification['id'] == 'chrome://settings/display':
+                display_notifications.append(notification)
+        return display_notifications
+
+
+    def set_display_rotation(self, display_id, rotation,
+                             delay_before_rotation=0, delay_after_rotation=0):
+        """Sets the display rotation for the specified display.
+
+        @param display_id: id of the display to get modes from.
+        @param rotation: degree of rotation
+        @param delay_before_rotation: time in second for delay before rotation
+        @param delay_after_rotation: time in second for delay after rotation
+        """
+        time.sleep(delay_before_rotation)
+        extension = self._resource.get_extension(
+                constants.DISPLAY_TEST_EXTENSION)
+        extension.ExecuteJavaScript(
+                """
+                window.__set_display_rotation_has_error = null;
+                chrome.system.display.setDisplayProperties('%(id)s',
+                    {"rotation": %(rotation)d}, () => {
+                    if (chrome.runtime.lastError) {
+                        console.error('Failed to set display rotation',
+                            chrome.runtime.lastError);
+                        window.__set_display_rotation_has_error = "failure";
+                    } else {
+                        window.__set_display_rotation_has_error = "success";
+                    }
+                });
+                """
+                % {'id': display_id, 'rotation': rotation}
+        )
+        utils.wait_for_value(lambda: (
+                extension.EvaluateJavaScript(
+                    'window.__set_display_rotation_has_error') != None),
+                expected_value=True)
+        time.sleep(delay_after_rotation)
+        result = extension.EvaluateJavaScript(
+                'window.__set_display_rotation_has_error')
+        if result != 'success':
+            raise RuntimeError('Failed to set display rotation: %r' % result)
+
+
+    def get_available_resolutions(self, display_id):
+        """Gets the resolutions from the specified display.
+
+        @return a list of (width, height) tuples.
+        """
+        display = self._get_display_by_id(display_id)
+        modes = display['modes']
+        if 'widthInNativePixels' not in modes[0]:
+            raise RuntimeError('Cannot find widthInNativePixels attribute')
+        if display['isInternal']:
+            logging.info("Getting resolutions of internal display")
+            return list(set([(mode['width'], mode['height']) for mode in
+                             modes]))
+        return list(set([(mode['widthInNativePixels'],
+                          mode['heightInNativePixels']) for mode in modes]))
+
+
+    def has_internal_display(self):
+        """Returns whether the device has an internal display.
+
+        @return whether the device has an internal display
+        """
+        return len([d for d in self.get_display_info() if d['isInternal']]) > 0
+
+
+    def get_internal_display_id(self):
+        """Gets the internal display id.
+
+        @return the id of the internal display.
+        """
+        for display in self.get_display_info():
+            if display['isInternal']:
+                return display['id']
+        raise RuntimeError('Cannot find internal display')
+
+
+    def get_first_external_display_id(self):
+        """Gets the first external display id.
+
+        @return the id of the first external display; -1 if not found.
+        """
+        # Get the first external and enabled display
+        for display in self.get_display_info():
+            if display['isEnabled'] and not display['isInternal']:
+                return display['id']
+        return -1
+
+
+    def set_resolution(self, display_id, width, height, timeout=3):
+        """Sets the resolution of the specified display.
+
+        @param display_id: id of the display to set resolution for.
+        @param width: width of the resolution
+        @param height: height of the resolution
+        @param timeout: maximal time in seconds waiting for the new resolution
+                to settle in.
+        @raise TimeoutException when the operation is timed out.
+        """
+
+        extension = self._resource.get_extension(
+                constants.DISPLAY_TEST_EXTENSION)
+        extension.ExecuteJavaScript(
+                """
+                window.__set_resolution_progress = null;
+                chrome.system.display.getInfo((info_array) => {
+                    var mode;
+                    for (var info of info_array) {
+                        if (info['id'] == '%(id)s') {
+                            for (var m of info['modes']) {
+                                if (m['width'] == %(width)d &&
+                                    m['height'] == %(height)d) {
+                                    mode = m;
+                                    break;
+                                }
+                            }
+                            break;
+                        }
+                    }
+                    if (mode === undefined) {
+                        console.error('Failed to select the resolution ' +
+                            '%(width)dx%(height)d');
+                        window.__set_resolution_progress = "mode not found";
+                        return;
+                    }
+
+                    chrome.system.display.setDisplayProperties('%(id)s',
+                        {'displayMode': mode}, () => {
+                            if (chrome.runtime.lastError) {
+                                window.__set_resolution_progress = "failed: " +
+                                    chrome.runtime.lastError.message;
+                            } else {
+                                window.__set_resolution_progress = "succeeded";
+                            }
+                        }
+                    );
+                });
+                """
+                % {'id': display_id, 'width': width, 'height': height}
+        )
+        utils.wait_for_value(lambda: (
+                extension.EvaluateJavaScript(
+                    'window.__set_resolution_progress') != None),
+                expected_value=True)
+        result = extension.EvaluateJavaScript(
+                'window.__set_resolution_progress')
+        if result != 'succeeded':
+            raise RuntimeError('Failed to set resolution: %r' % result)
+
+
+    @_retry_display_call
+    def get_external_resolution(self):
+        """Gets the resolution of the external screen.
+
+        @return The resolution tuple (width, height)
+        """
+        return graphics_utils.get_external_resolution()
+
+    def get_internal_resolution(self):
+        """Gets the resolution of the internal screen.
+
+        @return The resolution tuple (width, height) or None if internal screen
+                is not available
+        """
+        for display in self.get_display_info():
+            if display['isInternal']:
+                bounds = display['bounds']
+                return (bounds['width'], bounds['height'])
+        return None
+
+
+    def set_content_protection(self, state):
+        """Sets the content protection of the external screen.
+
+        @param state: One of the states 'Undesired', 'Desired', or 'Enabled'
+        """
+        connector = self.get_external_connector_name()
+        graphics_utils.set_content_protection(connector, state)
+
+
+    def get_content_protection(self):
+        """Gets the state of the content protection.
+
+        @param output: The output name as a string.
+        @return: A string of the state, like 'Undesired', 'Desired', or 'Enabled'.
+                 False if not supported.
+        """
+        connector = self.get_external_connector_name()
+        return graphics_utils.get_content_protection(connector)
+
+
+    def get_external_crtc_id(self):
+        """Gets the external crtc.
+
+        @return The id of the external crtc."""
+        return graphics_utils.get_external_crtc_id()
+
+
+    def get_internal_crtc_id(self):
+        """Gets the internal crtc.
+
+        @retrun The id of the internal crtc."""
+        return graphics_utils.get_internal_crtc_id()
+
+
+    def take_internal_screenshot(self, path):
+        """Takes internal screenshot.
+
+        @param path: path to image file.
+        """
+        self.take_screenshot_crtc(path, self.get_internal_crtc_id())
+
+
+    def take_external_screenshot(self, path):
+        """Takes external screenshot.
+
+        @param path: path to image file.
+        """
+        self.take_screenshot_crtc(path, self.get_external_crtc_id())
+
+
+    def take_screenshot_crtc(self, path, id):
+        """Captures the DUT screenshot, use id for selecting screen.
+
+        @param path: path to image file.
+        @param id: The id of the crtc to screenshot.
+        """
+
+        graphics_utils.take_screenshot_crop(path, crtc_id=id)
+        return True
+
+
+    def save_calibration_image(self, path):
+        """Save the calibration image to the given path.
+
+        @param path: path to image file.
+        """
+        shutil.copy(self.CALIBRATION_IMAGE_PATH, path)
+        return True
+
+
+    def take_tab_screenshot(self, output_path, url_pattern=None):
+        """Takes a screenshot of the tab specified by the given url pattern.
+
+        @param output_path: A path of the output file.
+        @param url_pattern: A string of url pattern used to search for tabs.
+                            Default is to look for .svg image.
+        """
+        if url_pattern is None:
+            # If no URL pattern is provided, defaults to capture the first
+            # tab that shows SVG image.
+            url_pattern = '.svg'
+
+        tabs = self._resource.get_tabs()
+        for i in range(0, len(tabs)):
+            if url_pattern in tabs[i].url:
+                data = tabs[i].Screenshot(timeout=5)
+                # Flip the colors from BGR to RGB.
+                data = numpy.fliplr(data.reshape(-1, 3)).reshape(data.shape)
+                data.tofile(output_path)
+                break
+        return True
+
+
+    def toggle_mirrored(self):
+        """Toggles mirrored."""
+        graphics_utils.screen_toggle_mirrored()
+        return True
+
+
+    def hide_cursor(self):
+        """Hides mouse cursor."""
+        graphics_utils.hide_cursor()
+        return True
+
+
+    def hide_typing_cursor(self):
+        """Hides typing cursor."""
+        graphics_utils.hide_typing_cursor()
+        return True
+
+
+    def is_mirrored_enabled(self):
+        """Checks the mirrored state.
+
+        @return True if mirrored mode is enabled.
+        """
+        return bool(self.get_display_info()[0]['mirroringSourceId'])
+
+
+    def set_mirrored(self, is_mirrored):
+        """Sets mirrored mode.
+
+        @param is_mirrored: True or False to indicate mirrored state.
+        @return True if success, False otherwise.
+        """
+        if self.is_mirrored_enabled() == is_mirrored:
+            return True
+
+        retries = 4
+        while retries > 0:
+            self.toggle_mirrored()
+            result = utils.wait_for_value(self.is_mirrored_enabled,
+                                          expected_value=is_mirrored,
+                                          timeout_sec=3)
+            if result == is_mirrored:
+                return True
+            retries -= 1
+        return False
+
+
+    def is_display_primary(self, internal=True):
+        """Checks if internal screen is primary display.
+
+        @param internal: is internal/external screen primary status requested
+        @return boolean True if internal display is primary.
+        """
+        for info in self.get_display_info():
+            if info['isInternal'] == internal and info['isPrimary']:
+                return True
+        return False
+
+
+    def suspend_resume(self, suspend_time=10):
+        """Suspends the DUT for a given time in second.
+
+        @param suspend_time: Suspend time in second.
+        """
+        sys_power.do_suspend(suspend_time)
+        return True
+
+
+    def suspend_resume_bg(self, suspend_time=10):
+        """Suspends the DUT for a given time in second in the background.
+
+        @param suspend_time: Suspend time in second.
+        """
+        process = multiprocessing.Process(target=self.suspend_resume,
+                                          args=(suspend_time,))
+        process.start()
+        return True
+
+
+    @_retry_display_call
+    def get_external_connector_name(self):
+        """Gets the name of the external output connector.
+
+        @return The external output connector name as a string, if any.
+                Otherwise, return False.
+        """
+        return graphics_utils.get_external_connector_name()
+
+
+    def get_internal_connector_name(self):
+        """Gets the name of the internal output connector.
+
+        @return The internal output connector name as a string, if any.
+                Otherwise, return False.
+        """
+        return graphics_utils.get_internal_connector_name()
+
+
+    def wait_external_display_connected(self, display):
+        """Waits for the specified external display to be connected.
+
+        @param display: The display name as a string, like 'HDMI1', or
+                        False if no external display is expected.
+        @return: True if display is connected; False otherwise.
+        """
+        result = utils.wait_for_value(self.get_external_connector_name,
+                                      expected_value=display)
+        return result == display
+
+
+    @facade_resource.retry_chrome_call
+    def move_to_display(self, display_id):
+        """Moves the current window to the indicated display.
+
+        @param display_id: The id of the indicated display.
+        @return True if success.
+
+        @raise TimeoutException if it fails.
+        """
+        display_info = self._get_display_by_id(display_id)
+        if not display_info['isEnabled']:
+            raise RuntimeError('Cannot find the indicated display')
+        target_bounds = display_info['bounds']
+
+        extension = self._resource.get_extension()
+        # If the area of bounds is empty (here we achieve this by setting
+        # width and height to zero), the window_sizer will automatically
+        # determine an area which is visible and fits on the screen.
+        # For more details, see chrome/browser/ui/window_sizer.cc
+        # Without setting state to 'normal', if the current state is
+        # 'minimized', 'maximized' or 'fullscreen', the setting of
+        # 'left', 'top', 'width' and 'height' will be ignored.
+        # For more details, see chrome/browser/extensions/api/tabs/tabs_api.cc
+        extension.ExecuteJavaScript(
+                """
+                var __status = 'Running';
+                chrome.windows.update(
+                        chrome.windows.WINDOW_ID_CURRENT,
+                        {left: %d, top: %d, width: 0, height: 0,
+                         state: 'normal'},
+                        function(info) {
+                            if (info.left == %d && info.top == %d &&
+                                info.state == 'normal')
+                                __status = 'Done'; });
+                """
+                % (target_bounds['left'], target_bounds['top'],
+                   target_bounds['left'], target_bounds['top'])
+        )
+        extension.WaitForJavaScriptCondition(
+                "__status == 'Done'",
+                timeout=web_contents.DEFAULT_WEB_CONTENTS_TIMEOUT)
+        return True
+
+
+    def is_fullscreen_enabled(self):
+        """Checks the fullscreen state.
+
+        @return True if fullscreen mode is enabled.
+        """
+        return self.get_window_info()['state'] == 'fullscreen'
+
+
+    def set_fullscreen(self, is_fullscreen):
+        """Sets the current window to full screen.
+
+        @param is_fullscreen: True or False to indicate fullscreen state.
+        @return True if success, False otherwise.
+        """
+        extension = self._resource.get_extension()
+        if not extension:
+            raise RuntimeError('Autotest extension not found')
+
+        if is_fullscreen:
+            window_state = "fullscreen"
+        else:
+            window_state = "normal"
+        extension.ExecuteJavaScript(
+                """
+                var __status = 'Running';
+                chrome.windows.update(
+                        chrome.windows.WINDOW_ID_CURRENT,
+                        {state: '%s'},
+                        function() { __status = 'Done'; });
+                """
+                % window_state)
+        utils.wait_for_value(lambda: (
+                extension.EvaluateJavaScript('__status') == 'Done'),
+                expected_value=True)
+        return self.is_fullscreen_enabled() == is_fullscreen
+
+
+    def load_url(self, url):
+        """Loads the given url in a new tab. The new tab will be active.
+
+        @param url: The url to load as a string.
+        @return a str, the tab descriptor of the opened tab.
+        """
+        return self._resource.load_url(url)
+
+
+    def load_calibration_image(self, resolution):
+        """Opens a new tab and loads a full screen calibration
+           image from the HTTP server.
+
+        @param resolution: A tuple (width, height) of resolution.
+        @return a str, the tab descriptor of the opened tab.
+        """
+        path = self.CALIBRATION_IMAGE_PATH
+        self._image_generator.generate_image(resolution[0], resolution[1], path)
+        os.chmod(path, 0o644)
+        tab_descriptor = self.load_url('file://%s' % path)
+        return tab_descriptor
+
+
+    def load_color_sequence(self, tab_descriptor, color_sequence):
+        """Displays a series of colors on full screen on the tab.
+        tab_descriptor is returned by any open tab API of display facade.
+        e.g.,
+        tab_descriptor = load_url('about:blank')
+        load_color_sequence(tab_descriptor, color)
+
+        @param tab_descriptor: Indicate which tab to test.
+        @param color_sequence: An integer list for switching colors.
+        @return A list of the timestamp for each switch.
+        """
+        tab = self._resource.get_tab_by_descriptor(tab_descriptor)
+        color_sequence_for_java_script = (
+                'var color_sequence = [' +
+                ','.join("'#%06X'" % x for x in color_sequence) +
+                '];')
+        # Paints are synchronized to the fresh rate of the screen by
+        # window.requestAnimationFrame.
+        tab.ExecuteJavaScript(color_sequence_for_java_script + """
+            function render(timestamp) {
+                window.timestamp_list.push(timestamp);
+                if (window.count < color_sequence.length) {
+                    document.body.style.backgroundColor =
+                            color_sequence[count];
+                    window.count++;
+                    window.requestAnimationFrame(render);
+                }
+            }
+            window.count = 0;
+            window.timestamp_list = [];
+            window.requestAnimationFrame(render);
+            """)
+
+        # Waiting time is decided by following concerns:
+        # 1. MINIMUM_REFRESH_RATE_EXPECTED: the minimum refresh rate
+        #    we expect it to be. Real refresh rate is related to
+        #    not only hardware devices but also drivers and browsers.
+        #    Most graphics devices support at least 60fps for a single
+        #    monitor, and under mirror mode, since the both frames
+        #    buffers need to be updated for an input frame, the refresh
+        #    rate will decrease by half, so here we set it to be a
+        #    little less than 30 (= 60/2) to make it more tolerant.
+        # 2. DELAY_TIME: extra wait time for timeout.
+        tab.WaitForJavaScriptCondition(
+                'window.count == color_sequence.length',
+                timeout=(
+                    (len(color_sequence) / self.MINIMUM_REFRESH_RATE_EXPECTED)
+                    + self.DELAY_TIME))
+        return tab.EvaluateJavaScript("window.timestamp_list")
+
+
+    def close_tab(self, tab_descriptor):
+        """Disables fullscreen and closes the tab of the given tab descriptor.
+        tab_descriptor is returned by any open tab API of display facade.
+        e.g.,
+        1.
+        tab_descriptor = load_url(url)
+        close_tab(tab_descriptor)
+
+        2.
+        tab_descriptor = load_calibration_image(resolution)
+        close_tab(tab_descriptor)
+
+        @param tab_descriptor: Indicate which tab to be closed.
+        """
+        if tab_descriptor:
+            # set_fullscreen(False) is necessary here because currently there
+            # is a bug in tabs.Close(). If the current state is fullscreen and
+            # we call close_tab() without setting state back to normal, it will
+            # cancel fullscreen mode without changing system configuration, and
+            # so that the next time someone calls set_fullscreen(True), the
+            # function will find that current state is already 'fullscreen'
+            # (though it is not) and do nothing, which will break all the
+            # following tests.
+            self.set_fullscreen(False)
+            self._resource.close_tab(tab_descriptor)
+        else:
+            logging.error('close_tab: not a valid tab_descriptor')
+
+        return True
+
+
+    def reset_connector_if_applicable(self, connector_type):
+        """Resets Type-C video connector from host end if applicable.
+
+        It's the workaround sequence since sometimes Type-C dongle becomes
+        corrupted and needs to be re-plugged.
+
+        @param connector_type: A string, like "VGA", "DVI", "HDMI", or "DP".
+        """
+        if connector_type != 'HDMI' and connector_type != 'DP':
+            return
+        # Decide if we need to add --name=cros_pd
+        usbpd_command = 'ectool --name=cros_pd usbpd'
+        try:
+            common_utils.run('%s 0' % usbpd_command)
+        except error.CmdError:
+            usbpd_command = 'ectool usbpd'
+
+        port = 0
+        while port < self.MAX_TYPEC_PORT:
+            # We use usbpd to get Role information and then power cycle the
+            # SRC one.
+            command = '%s %d' % (usbpd_command, port)
+            try:
+                output = common_utils.run(command).stdout
+                if re.compile('Role.*SRC').search(output):
+                    logging.info('power-cycle Type-C port %d', port)
+                    common_utils.run('%s sink' % command)
+                    common_utils.run('%s auto' % command)
+                port += 1
+            except error.CmdError:
+                break
diff --git a/client/cros/multimedia/display_facade_adapter.py b/client/cros/multimedia/display_facade_adapter.py
index 130cb8c..fb7c80e 100644
--- a/client/cros/multimedia/display_facade_adapter.py
+++ b/client/cros/multimedia/display_facade_adapter.py
@@ -14,7 +14,7 @@
 from PIL import Image
 
 from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.multimedia import display_facade_native
+from autotest_lib.client.cros.multimedia import display_facade
 from autotest_lib.client.cros.multimedia import facade_resource
 from autotest_lib.client.cros.multimedia.display_info import DisplayInfo
 from autotest_lib.client.cros.power import sys_power
@@ -25,7 +25,7 @@
     """DisplayFacadeLocalAdapter is an adapter to control the local display.
 
     Methods with non-native-type arguments go to this class and do some
-    conversion; otherwise, go to the DisplayFacadeNative class.
+    conversion; otherwise, go to the DisplayFacadeLocal class.
     """
 
     def __init__(self, chrome):
@@ -33,9 +33,9 @@
 
         @param chrome: A Chrome object.
         """
-        # Create a DisplayFacadeNative object as a component such that this
+        # Create a DisplayFacadeLocal object as a component such that this
         # class can expose and manipulate its interfaces.
-        self._display_component = display_facade_native.DisplayFacadeNative(
+        self._display_component = display_facade.DisplayFacadeLocal(
                 facade_resource.FacadeResource(chrome_object=chrome))
 
 
diff --git a/client/cros/multimedia/display_facade_native.py b/client/cros/multimedia/display_facade_native.py
deleted file mode 100644
index 4691412..0000000
--- a/client/cros/multimedia/display_facade_native.py
+++ /dev/null
@@ -1,807 +0,0 @@
-# Lint as: python2, python3
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Facade to access the display-related functionality."""
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-import logging
-import multiprocessing
-import numpy
-import os
-import re
-import shutil
-import time
-import json
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils as common_utils
-from autotest_lib.client.common_lib.cros import retry
-from autotest_lib.client.cros import constants
-from autotest_lib.client.cros.graphics import graphics_utils
-from autotest_lib.client.cros.multimedia import facade_resource
-from autotest_lib.client.cros.multimedia import image_generator
-from autotest_lib.client.cros.power import sys_power
-from six.moves import range
-from telemetry.internal.browser import web_contents
-
-class TimeoutException(Exception):
-    """Timeout Exception class."""
-    pass
-
-
-_FLAKY_CALL_RETRY_TIMEOUT_SEC = 60
-_FLAKY_DISPLAY_CALL_RETRY_DELAY_SEC = 2
-
-_retry_display_call = retry.retry(
-        (KeyError, error.CmdError),
-        timeout_min=_FLAKY_CALL_RETRY_TIMEOUT_SEC / 60.0,
-        delay_sec=_FLAKY_DISPLAY_CALL_RETRY_DELAY_SEC)
-
-
-class DisplayFacadeNative(object):
-    """Facade to access the display-related functionality.
-
-    The methods inside this class only accept Python native types.
-    """
-
-    CALIBRATION_IMAGE_PATH = '/tmp/calibration.png'
-    MINIMUM_REFRESH_RATE_EXPECTED = 25.0
-    DELAY_TIME = 3
-    MAX_TYPEC_PORT = 6
-
-    def __init__(self, resource):
-        """Initializes a DisplayFacadeNative.
-
-        @param resource: A FacadeResource object.
-        """
-        self._resource = resource
-        self._image_generator = image_generator.ImageGenerator()
-
-
-    @facade_resource.retry_chrome_call
-    def get_display_info(self):
-        """Gets the display info from Chrome.system.display API.
-
-        @return array of dict for display info.
-        """
-        extension = self._resource.get_extension(
-                constants.DISPLAY_TEST_EXTENSION)
-        extension.ExecuteJavaScript('window.__display_info = null;')
-        extension.ExecuteJavaScript(
-                "chrome.system.display.getInfo(function(info) {"
-                "window.__display_info = info;})")
-        utils.wait_for_value(lambda: (
-                extension.EvaluateJavaScript("window.__display_info") != None),
-                expected_value=True)
-        return extension.EvaluateJavaScript("window.__display_info")
-
-
-    @facade_resource.retry_chrome_call
-    def get_window_info(self):
-        """Gets the current window info from Chrome.system.window API.
-
-        @return a dict for the information of the current window.
-        """
-        extension = self._resource.get_extension()
-        extension.ExecuteJavaScript('window.__window_info = null;')
-        extension.ExecuteJavaScript(
-                "chrome.windows.getCurrent(function(info) {"
-                "window.__window_info = info;})")
-        utils.wait_for_value(lambda: (
-                extension.EvaluateJavaScript("window.__window_info") != None),
-                expected_value=True)
-        return extension.EvaluateJavaScript("window.__window_info")
-
-
-    @facade_resource.retry_chrome_call
-    def create_window(self, url='chrome://newtab'):
-        """Creates a new window from chrome.windows.create API.
-
-        @param url: Optional URL for the new window.
-
-        @return Identifier for the new window.
-
-        @raise TimeoutException if it fails.
-        """
-        extension = self._resource.get_extension()
-
-        extension.ExecuteJavaScript(
-                """
-                var __new_window_id = null;
-                chrome.windows.create(
-                        {url: '%s'},
-                        function(win) {
-                            __new_window_id = win.id});
-                """ % (url)
-        )
-        extension.WaitForJavaScriptCondition(
-                "__new_window_id !== null",
-                timeout=web_contents.DEFAULT_WEB_CONTENTS_TIMEOUT)
-
-        return extension.EvaluateJavaScript("__new_window_id")
-
-
-    @facade_resource.retry_chrome_call
-    def update_window(self, window_id, state=None, bounds=None):
-        """Updates an existing window using the chrome.windows.update API.
-
-        @param window_id: Identifier for the window to update.
-        @param state: Optional string to set the state such as 'normal',
-                      'maximized', or 'fullscreen'.
-        @param bounds: Optional dictionary with keys top, left, width, and
-                       height to reposition the window.
-
-        @return True if success.
-
-        @raise TimeoutException if it fails.
-        """
-        extension = self._resource.get_extension()
-        params = {}
-
-        if state:
-            params['state'] = state
-        if bounds:
-            params['top'] = bounds['top']
-            params['left'] = bounds['left']
-            params['width'] = bounds['width']
-            params['height'] = bounds['height']
-
-        if not params:
-            logging.info('Nothing to update for window_id={}'.format(window_id))
-            return True
-
-        extension.ExecuteJavaScript(
-                """
-                var __status = 'Running';
-                chrome.windows.update(%d, %s,
-                        function(win) {
-                            __status = 'Done'});
-                """ % (window_id, json.dumps(params))
-        )
-        extension.WaitForJavaScriptCondition(
-                "__status == 'Done'",
-                timeout=web_contents.DEFAULT_WEB_CONTENTS_TIMEOUT)
-
-        return True
-
-
-    def _get_display_by_id(self, display_id):
-        """Gets a display by ID.
-
-        @param display_id: id of the display.
-
-        @return: A dict of various display info.
-        """
-        for display in self.get_display_info():
-            if display['id'] == display_id:
-                return display
-        raise RuntimeError('Cannot find display ' + display_id)
-
-
-    def get_display_modes(self, display_id):
-        """Gets all the display modes for the specified display.
-
-        @param display_id: id of the display to get modes from.
-
-        @return: A list of DisplayMode dicts.
-        """
-        display = self._get_display_by_id(display_id)
-        return display['modes']
-
-
-    def get_display_rotation(self, display_id):
-        """Gets the display rotation for the specified display.
-
-        @param display_id: id of the display to get modes from.
-
-        @return: Degree of rotation.
-        """
-        display = self._get_display_by_id(display_id)
-        return display['rotation']
-
-
-    def get_display_notifications(self):
-        """Gets the display notifications
-
-        @return: Returns a list of display related notifications only.
-        """
-        display_notifications = []
-        for notification in self._resource.get_visible_notifications():
-            if notification['id'] == 'chrome://settings/display':
-                display_notifications.append(notification)
-        return display_notifications
-
-
-    def set_display_rotation(self, display_id, rotation,
-                             delay_before_rotation=0, delay_after_rotation=0):
-        """Sets the display rotation for the specified display.
-
-        @param display_id: id of the display to get modes from.
-        @param rotation: degree of rotation
-        @param delay_before_rotation: time in second for delay before rotation
-        @param delay_after_rotation: time in second for delay after rotation
-        """
-        time.sleep(delay_before_rotation)
-        extension = self._resource.get_extension(
-                constants.DISPLAY_TEST_EXTENSION)
-        extension.ExecuteJavaScript(
-                """
-                window.__set_display_rotation_has_error = null;
-                chrome.system.display.setDisplayProperties('%(id)s',
-                    {"rotation": %(rotation)d}, () => {
-                    if (chrome.runtime.lastError) {
-                        console.error('Failed to set display rotation',
-                            chrome.runtime.lastError);
-                        window.__set_display_rotation_has_error = "failure";
-                    } else {
-                        window.__set_display_rotation_has_error = "success";
-                    }
-                });
-                """
-                % {'id': display_id, 'rotation': rotation}
-        )
-        utils.wait_for_value(lambda: (
-                extension.EvaluateJavaScript(
-                    'window.__set_display_rotation_has_error') != None),
-                expected_value=True)
-        time.sleep(delay_after_rotation)
-        result = extension.EvaluateJavaScript(
-                'window.__set_display_rotation_has_error')
-        if result != 'success':
-            raise RuntimeError('Failed to set display rotation: %r' % result)
-
-
-    def get_available_resolutions(self, display_id):
-        """Gets the resolutions from the specified display.
-
-        @return a list of (width, height) tuples.
-        """
-        display = self._get_display_by_id(display_id)
-        modes = display['modes']
-        if 'widthInNativePixels' not in modes[0]:
-            raise RuntimeError('Cannot find widthInNativePixels attribute')
-        if display['isInternal']:
-            logging.info("Getting resolutions of internal display")
-            return list(set([(mode['width'], mode['height']) for mode in
-                             modes]))
-        return list(set([(mode['widthInNativePixels'],
-                          mode['heightInNativePixels']) for mode in modes]))
-
-
-    def has_internal_display(self):
-        """Returns whether the device has an internal display.
-
-        @return whether the device has an internal display
-        """
-        return len([d for d in self.get_display_info() if d['isInternal']]) > 0
-
-
-    def get_internal_display_id(self):
-        """Gets the internal display id.
-
-        @return the id of the internal display.
-        """
-        for display in self.get_display_info():
-            if display['isInternal']:
-                return display['id']
-        raise RuntimeError('Cannot find internal display')
-
-
-    def get_first_external_display_id(self):
-        """Gets the first external display id.
-
-        @return the id of the first external display; -1 if not found.
-        """
-        # Get the first external and enabled display
-        for display in self.get_display_info():
-            if display['isEnabled'] and not display['isInternal']:
-                return display['id']
-        return -1
-
-
-    def set_resolution(self, display_id, width, height, timeout=3):
-        """Sets the resolution of the specified display.
-
-        @param display_id: id of the display to set resolution for.
-        @param width: width of the resolution
-        @param height: height of the resolution
-        @param timeout: maximal time in seconds waiting for the new resolution
-                to settle in.
-        @raise TimeoutException when the operation is timed out.
-        """
-
-        extension = self._resource.get_extension(
-                constants.DISPLAY_TEST_EXTENSION)
-        extension.ExecuteJavaScript(
-                """
-                window.__set_resolution_progress = null;
-                chrome.system.display.getInfo((info_array) => {
-                    var mode;
-                    for (var info of info_array) {
-                        if (info['id'] == '%(id)s') {
-                            for (var m of info['modes']) {
-                                if (m['width'] == %(width)d &&
-                                    m['height'] == %(height)d) {
-                                    mode = m;
-                                    break;
-                                }
-                            }
-                            break;
-                        }
-                    }
-                    if (mode === undefined) {
-                        console.error('Failed to select the resolution ' +
-                            '%(width)dx%(height)d');
-                        window.__set_resolution_progress = "mode not found";
-                        return;
-                    }
-
-                    chrome.system.display.setDisplayProperties('%(id)s',
-                        {'displayMode': mode}, () => {
-                            if (chrome.runtime.lastError) {
-                                window.__set_resolution_progress = "failed: " +
-                                    chrome.runtime.lastError.message;
-                            } else {
-                                window.__set_resolution_progress = "succeeded";
-                            }
-                        }
-                    );
-                });
-                """
-                % {'id': display_id, 'width': width, 'height': height}
-        )
-        utils.wait_for_value(lambda: (
-                extension.EvaluateJavaScript(
-                    'window.__set_resolution_progress') != None),
-                expected_value=True)
-        result = extension.EvaluateJavaScript(
-                'window.__set_resolution_progress')
-        if result != 'succeeded':
-            raise RuntimeError('Failed to set resolution: %r' % result)
-
-
-    @_retry_display_call
-    def get_external_resolution(self):
-        """Gets the resolution of the external screen.
-
-        @return The resolution tuple (width, height)
-        """
-        return graphics_utils.get_external_resolution()
-
-    def get_internal_resolution(self):
-        """Gets the resolution of the internal screen.
-
-        @return The resolution tuple (width, height) or None if internal screen
-                is not available
-        """
-        for display in self.get_display_info():
-            if display['isInternal']:
-                bounds = display['bounds']
-                return (bounds['width'], bounds['height'])
-        return None
-
-
-    def set_content_protection(self, state):
-        """Sets the content protection of the external screen.
-
-        @param state: One of the states 'Undesired', 'Desired', or 'Enabled'
-        """
-        connector = self.get_external_connector_name()
-        graphics_utils.set_content_protection(connector, state)
-
-
-    def get_content_protection(self):
-        """Gets the state of the content protection.
-
-        @param output: The output name as a string.
-        @return: A string of the state, like 'Undesired', 'Desired', or 'Enabled'.
-                 False if not supported.
-        """
-        connector = self.get_external_connector_name()
-        return graphics_utils.get_content_protection(connector)
-
-
-    def get_external_crtc_id(self):
-        """Gets the external crtc.
-
-        @return The id of the external crtc."""
-        return graphics_utils.get_external_crtc_id()
-
-
-    def get_internal_crtc_id(self):
-        """Gets the internal crtc.
-
-        @retrun The id of the internal crtc."""
-        return graphics_utils.get_internal_crtc_id()
-
-
-    def take_internal_screenshot(self, path):
-        """Takes internal screenshot.
-
-        @param path: path to image file.
-        """
-        self.take_screenshot_crtc(path, self.get_internal_crtc_id())
-
-
-    def take_external_screenshot(self, path):
-        """Takes external screenshot.
-
-        @param path: path to image file.
-        """
-        self.take_screenshot_crtc(path, self.get_external_crtc_id())
-
-
-    def take_screenshot_crtc(self, path, id):
-        """Captures the DUT screenshot, use id for selecting screen.
-
-        @param path: path to image file.
-        @param id: The id of the crtc to screenshot.
-        """
-
-        graphics_utils.take_screenshot_crop(path, crtc_id=id)
-        return True
-
-
-    def save_calibration_image(self, path):
-        """Save the calibration image to the given path.
-
-        @param path: path to image file.
-        """
-        shutil.copy(self.CALIBRATION_IMAGE_PATH, path)
-        return True
-
-
-    def take_tab_screenshot(self, output_path, url_pattern=None):
-        """Takes a screenshot of the tab specified by the given url pattern.
-
-        @param output_path: A path of the output file.
-        @param url_pattern: A string of url pattern used to search for tabs.
-                            Default is to look for .svg image.
-        """
-        if url_pattern is None:
-            # If no URL pattern is provided, defaults to capture the first
-            # tab that shows SVG image.
-            url_pattern = '.svg'
-
-        tabs = self._resource.get_tabs()
-        for i in range(0, len(tabs)):
-            if url_pattern in tabs[i].url:
-                data = tabs[i].Screenshot(timeout=5)
-                # Flip the colors from BGR to RGB.
-                data = numpy.fliplr(data.reshape(-1, 3)).reshape(data.shape)
-                data.tofile(output_path)
-                break
-        return True
-
-
-    def toggle_mirrored(self):
-        """Toggles mirrored."""
-        graphics_utils.screen_toggle_mirrored()
-        return True
-
-
-    def hide_cursor(self):
-        """Hides mouse cursor."""
-        graphics_utils.hide_cursor()
-        return True
-
-
-    def hide_typing_cursor(self):
-        """Hides typing cursor."""
-        graphics_utils.hide_typing_cursor()
-        return True
-
-
-    def is_mirrored_enabled(self):
-        """Checks the mirrored state.
-
-        @return True if mirrored mode is enabled.
-        """
-        return bool(self.get_display_info()[0]['mirroringSourceId'])
-
-
-    def set_mirrored(self, is_mirrored):
-        """Sets mirrored mode.
-
-        @param is_mirrored: True or False to indicate mirrored state.
-        @return True if success, False otherwise.
-        """
-        if self.is_mirrored_enabled() == is_mirrored:
-            return True
-
-        retries = 4
-        while retries > 0:
-            self.toggle_mirrored()
-            result = utils.wait_for_value(self.is_mirrored_enabled,
-                                          expected_value=is_mirrored,
-                                          timeout_sec=3)
-            if result == is_mirrored:
-                return True
-            retries -= 1
-        return False
-
-
-    def is_display_primary(self, internal=True):
-        """Checks if internal screen is primary display.
-
-        @param internal: is internal/external screen primary status requested
-        @return boolean True if internal display is primary.
-        """
-        for info in self.get_display_info():
-            if info['isInternal'] == internal and info['isPrimary']:
-                return True
-        return False
-
-
-    def suspend_resume(self, suspend_time=10):
-        """Suspends the DUT for a given time in second.
-
-        @param suspend_time: Suspend time in second.
-        """
-        sys_power.do_suspend(suspend_time)
-        return True
-
-
-    def suspend_resume_bg(self, suspend_time=10):
-        """Suspends the DUT for a given time in second in the background.
-
-        @param suspend_time: Suspend time in second.
-        """
-        process = multiprocessing.Process(target=self.suspend_resume,
-                                          args=(suspend_time,))
-        process.start()
-        return True
-
-
-    @_retry_display_call
-    def get_external_connector_name(self):
-        """Gets the name of the external output connector.
-
-        @return The external output connector name as a string, if any.
-                Otherwise, return False.
-        """
-        return graphics_utils.get_external_connector_name()
-
-
-    def get_internal_connector_name(self):
-        """Gets the name of the internal output connector.
-
-        @return The internal output connector name as a string, if any.
-                Otherwise, return False.
-        """
-        return graphics_utils.get_internal_connector_name()
-
-
-    def wait_external_display_connected(self, display):
-        """Waits for the specified external display to be connected.
-
-        @param display: The display name as a string, like 'HDMI1', or
-                        False if no external display is expected.
-        @return: True if display is connected; False otherwise.
-        """
-        result = utils.wait_for_value(self.get_external_connector_name,
-                                      expected_value=display)
-        return result == display
-
-
-    @facade_resource.retry_chrome_call
-    def move_to_display(self, display_id):
-        """Moves the current window to the indicated display.
-
-        @param display_id: The id of the indicated display.
-        @return True if success.
-
-        @raise TimeoutException if it fails.
-        """
-        display_info = self._get_display_by_id(display_id)
-        if not display_info['isEnabled']:
-            raise RuntimeError('Cannot find the indicated display')
-        target_bounds = display_info['bounds']
-
-        extension = self._resource.get_extension()
-        # If the area of bounds is empty (here we achieve this by setting
-        # width and height to zero), the window_sizer will automatically
-        # determine an area which is visible and fits on the screen.
-        # For more details, see chrome/browser/ui/window_sizer.cc
-        # Without setting state to 'normal', if the current state is
-        # 'minimized', 'maximized' or 'fullscreen', the setting of
-        # 'left', 'top', 'width' and 'height' will be ignored.
-        # For more details, see chrome/browser/extensions/api/tabs/tabs_api.cc
-        extension.ExecuteJavaScript(
-                """
-                var __status = 'Running';
-                chrome.windows.update(
-                        chrome.windows.WINDOW_ID_CURRENT,
-                        {left: %d, top: %d, width: 0, height: 0,
-                         state: 'normal'},
-                        function(info) {
-                            if (info.left == %d && info.top == %d &&
-                                info.state == 'normal')
-                                __status = 'Done'; });
-                """
-                % (target_bounds['left'], target_bounds['top'],
-                   target_bounds['left'], target_bounds['top'])
-        )
-        extension.WaitForJavaScriptCondition(
-                "__status == 'Done'",
-                timeout=web_contents.DEFAULT_WEB_CONTENTS_TIMEOUT)
-        return True
-
-
-    def is_fullscreen_enabled(self):
-        """Checks the fullscreen state.
-
-        @return True if fullscreen mode is enabled.
-        """
-        return self.get_window_info()['state'] == 'fullscreen'
-
-
-    def set_fullscreen(self, is_fullscreen):
-        """Sets the current window to full screen.
-
-        @param is_fullscreen: True or False to indicate fullscreen state.
-        @return True if success, False otherwise.
-        """
-        extension = self._resource.get_extension()
-        if not extension:
-            raise RuntimeError('Autotest extension not found')
-
-        if is_fullscreen:
-            window_state = "fullscreen"
-        else:
-            window_state = "normal"
-        extension.ExecuteJavaScript(
-                """
-                var __status = 'Running';
-                chrome.windows.update(
-                        chrome.windows.WINDOW_ID_CURRENT,
-                        {state: '%s'},
-                        function() { __status = 'Done'; });
-                """
-                % window_state)
-        utils.wait_for_value(lambda: (
-                extension.EvaluateJavaScript('__status') == 'Done'),
-                expected_value=True)
-        return self.is_fullscreen_enabled() == is_fullscreen
-
-
-    def load_url(self, url):
-        """Loads the given url in a new tab. The new tab will be active.
-
-        @param url: The url to load as a string.
-        @return a str, the tab descriptor of the opened tab.
-        """
-        return self._resource.load_url(url)
-
-
-    def load_calibration_image(self, resolution):
-        """Opens a new tab and loads a full screen calibration
-           image from the HTTP server.
-
-        @param resolution: A tuple (width, height) of resolution.
-        @return a str, the tab descriptor of the opened tab.
-        """
-        path = self.CALIBRATION_IMAGE_PATH
-        self._image_generator.generate_image(resolution[0], resolution[1], path)
-        os.chmod(path, 0o644)
-        tab_descriptor = self.load_url('file://%s' % path)
-        return tab_descriptor
-
-
-    def load_color_sequence(self, tab_descriptor, color_sequence):
-        """Displays a series of colors on full screen on the tab.
-        tab_descriptor is returned by any open tab API of display facade.
-        e.g.,
-        tab_descriptor = load_url('about:blank')
-        load_color_sequence(tab_descriptor, color)
-
-        @param tab_descriptor: Indicate which tab to test.
-        @param color_sequence: An integer list for switching colors.
-        @return A list of the timestamp for each switch.
-        """
-        tab = self._resource.get_tab_by_descriptor(tab_descriptor)
-        color_sequence_for_java_script = (
-                'var color_sequence = [' +
-                ','.join("'#%06X'" % x for x in color_sequence) +
-                '];')
-        # Paints are synchronized to the fresh rate of the screen by
-        # window.requestAnimationFrame.
-        tab.ExecuteJavaScript(color_sequence_for_java_script + """
-            function render(timestamp) {
-                window.timestamp_list.push(timestamp);
-                if (window.count < color_sequence.length) {
-                    document.body.style.backgroundColor =
-                            color_sequence[count];
-                    window.count++;
-                    window.requestAnimationFrame(render);
-                }
-            }
-            window.count = 0;
-            window.timestamp_list = [];
-            window.requestAnimationFrame(render);
-            """)
-
-        # Waiting time is decided by following concerns:
-        # 1. MINIMUM_REFRESH_RATE_EXPECTED: the minimum refresh rate
-        #    we expect it to be. Real refresh rate is related to
-        #    not only hardware devices but also drivers and browsers.
-        #    Most graphics devices support at least 60fps for a single
-        #    monitor, and under mirror mode, since the both frames
-        #    buffers need to be updated for an input frame, the refresh
-        #    rate will decrease by half, so here we set it to be a
-        #    little less than 30 (= 60/2) to make it more tolerant.
-        # 2. DELAY_TIME: extra wait time for timeout.
-        tab.WaitForJavaScriptCondition(
-                'window.count == color_sequence.length',
-                timeout=(
-                    (len(color_sequence) / self.MINIMUM_REFRESH_RATE_EXPECTED)
-                    + self.DELAY_TIME))
-        return tab.EvaluateJavaScript("window.timestamp_list")
-
-
-    def close_tab(self, tab_descriptor):
-        """Disables fullscreen and closes the tab of the given tab descriptor.
-        tab_descriptor is returned by any open tab API of display facade.
-        e.g.,
-        1.
-        tab_descriptor = load_url(url)
-        close_tab(tab_descriptor)
-
-        2.
-        tab_descriptor = load_calibration_image(resolution)
-        close_tab(tab_descriptor)
-
-        @param tab_descriptor: Indicate which tab to be closed.
-        """
-        if tab_descriptor:
-            # set_fullscreen(False) is necessary here because currently there
-            # is a bug in tabs.Close(). If the current state is fullscreen and
-            # we call close_tab() without setting state back to normal, it will
-            # cancel fullscreen mode without changing system configuration, and
-            # so that the next time someone calls set_fullscreen(True), the
-            # function will find that current state is already 'fullscreen'
-            # (though it is not) and do nothing, which will break all the
-            # following tests.
-            self.set_fullscreen(False)
-            self._resource.close_tab(tab_descriptor)
-        else:
-            logging.error('close_tab: not a valid tab_descriptor')
-
-        return True
-
-
-    def reset_connector_if_applicable(self, connector_type):
-        """Resets Type-C video connector from host end if applicable.
-
-        It's the workaround sequence since sometimes Type-C dongle becomes
-        corrupted and needs to be re-plugged.
-
-        @param connector_type: A string, like "VGA", "DVI", "HDMI", or "DP".
-        """
-        if connector_type != 'HDMI' and connector_type != 'DP':
-            return
-        # Decide if we need to add --name=cros_pd
-        usbpd_command = 'ectool --name=cros_pd usbpd'
-        try:
-            common_utils.run('%s 0' % usbpd_command)
-        except error.CmdError:
-            usbpd_command = 'ectool usbpd'
-
-        port = 0
-        while port < self.MAX_TYPEC_PORT:
-            # We use usbpd to get Role information and then power cycle the
-            # SRC one.
-            command = '%s %d' % (usbpd_command, port)
-            try:
-                output = common_utils.run(command).stdout
-                if re.compile('Role.*SRC').search(output):
-                    logging.info('power-cycle Type-C port %d', port)
-                    common_utils.run('%s sink' % command)
-                    common_utils.run('%s auto' % command)
-                port += 1
-            except error.CmdError:
-                break
diff --git a/client/cros/multimedia/facade_resource.py b/client/cros/multimedia/facade_resource.py
index 3b40e87..4fb75c1 100644
--- a/client/cros/multimedia/facade_resource.py
+++ b/client/cros/multimedia/facade_resource.py
@@ -5,14 +5,15 @@
 
 """A module providing common resources for different facades."""
 
-import exceptions
 import logging
 import time
 
 from autotest_lib.client.bin import utils
+from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib.cros import chrome
 from autotest_lib.client.common_lib.cros import retry
 from autotest_lib.client.cros import constants
+from telemetry.internal.backends.chrome_inspector import devtools_http
 
 import py_utils
 
@@ -20,7 +21,7 @@
 _FLAKY_CHROME_CALL_RETRY_DELAY_SEC = 1
 
 retry_chrome_call = retry.retry(
-        (chrome.Error, exceptions.IndexError, exceptions.Exception),
+        (chrome.Error, IndexError, Exception),
         timeout_min=_FLAKY_CALL_RETRY_TIMEOUT_SEC / 60.0,
         delay_sec=_FLAKY_CHROME_CALL_RETRY_DELAY_SEC)
 
@@ -190,7 +191,7 @@
                 try:
                     tab.Close()
                 except py_utils.TimeoutException:
-                    logging.warn('close tab timeout %r, %s', tab, tab.url)
+                    logging.warning('close tab timeout %r, %s', tab, tab.url)
 
 
     @retry_chrome_call
@@ -346,3 +347,112 @@
             raise RuntimeError('There is no tab for %s' % tab_descriptor)
         return self._tabs[tab_descriptor].EvaluateJavaScript(
                 expression, timeout=timeout)
+
+class Application(FacadeResource):
+    """ This class provides access to WebStore Applications"""
+
+    APP_NAME_IDS = {
+        'camera' : 'njfbnohfdkmbmnjapinfcopialeghnmh',
+        'files' : 'hhaomjibdihmijegdhdafkllkbggdgoj'
+    }
+    # Time in seconds to load the app
+    LOAD_TIME = 5
+
+    def __init__(self, chrome_object=None):
+        super(Application, self).__init__(chrome_object)
+
+    @retry_chrome_call
+    def evaluate_javascript(self, code):
+        """Executes javascript and returns some result.
+
+        Occasionally calls to EvaluateJavascript on the autotest_ext will fail
+        to find the extension. Instead of wrapping every call in a try/except,
+        calls will go through this function instead.
+
+        @param code: The javascript string to execute
+
+        """
+        try:
+            result = self._chrome.autotest_ext.EvaluateJavaScript(code)
+            return result
+        except KeyError:
+            logging.exception('Could not find autotest_ext')
+        except (devtools_http.DevToolsClientUrlError,
+                devtools_http.DevToolsClientConnectionError):
+            logging.exception('Could not connect to DevTools')
+
+        raise error.TestError("Could not execute %s" % code)
+
+    def click_on(self, ui, name, isRegex=False, role=None):
+        """
+        Click on given role and name matches
+
+        @ui: ui_utils object
+        @param name: item node name.
+        @param isRegex: If name is in regex format then isRegex should be
+                        True otherwise False.
+        @param role: role of the element. Example: button or window etc.
+        @raise error.TestError if the test is failed to find given node
+        """
+        if not ui.item_present(name, isRegex=isRegex, role=role):
+            raise error.TestError("name=%s, role=%s did not appeared with in "
+                                 "time" % (name, role))
+        ui.doDefault_on_obj(name, isRegex=isRegex, role=role)
+
+    def is_app_opened(self, name):
+        """
+        Verify if the Webstore app is opened or not
+
+        @param name: Name of the app to verify.
+
+        """
+        self.evaluate_javascript("var isShown = null;")
+        is_app_shown_js = """
+            chrome.autotestPrivate.isAppShown('%s',
+            function(appShown){isShown = appShown});
+            """ % self.APP_NAME_IDS[name.lower()]
+        self.evaluate_javascript(is_app_shown_js)
+        return self.evaluate_javascript('isShown')
+
+    def launch_app(self, name):
+        """
+        Launch the app/extension by its ID and verify that it opens.
+
+        @param name: Name of the app to launch.
+
+        """
+        logging.info("Launching %s app" % name)
+        if name == "camera":
+            webapps_js = "chrome.autotestPrivate.waitForSystemWebAppsInstall(" \
+                     "function(){})"
+            self.evaluate_javascript(webapps_js)
+            launch_js = "chrome.autotestPrivate.launchSystemWebApp('%s', '%s', " \
+                    "function(){})" % ("Camera",
+                                       "chrome://camera-app/views/main.html")
+        else:
+            launch_js = "chrome.autotestPrivate.launchApp('%s', function(){})" \
+                         % self.APP_NAME_IDS[name.lower()]
+        self.evaluate_javascript(launch_js)
+        def is_app_opened():
+            return self.is_app_opened(name)
+        utils.poll_for_condition(condition=is_app_opened,
+                    desc="%s app is not launched" % name,
+                    timeout=self.LOAD_TIME)
+        logging.info('%s app is launched', name)
+
+    def close_app(self, name):
+        """
+        Close the app/extension by its ID and verify that it closes.
+
+        @param name: Name of the app to close.
+
+        """
+        close_js = "chrome.autotestPrivate.closeApp('%s', function(){})" \
+                    % self.APP_NAME_IDS[name.lower()]
+        self.evaluate_javascript(close_js)
+        def is_app_closed():
+            return not self.is_app_opened(name)
+        utils.poll_for_condition(condition=is_app_closed,
+                    desc="%s app is not closed" % name,
+                    timeout=self.LOAD_TIME)
+        logging.info('%s app is closed', name)
diff --git a/client/cros/multimedia/graphics_facade.py b/client/cros/multimedia/graphics_facade.py
new file mode 100644
index 0000000..1c6af27
--- /dev/null
+++ b/client/cros/multimedia/graphics_facade.py
@@ -0,0 +1,42 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""An interface to access the local graphics facade."""
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.cros.graphics import graphics_utils
+
+
+class GraphicsFacadeLocal(object):
+    """Facade to access graphics utilities for catching GPU hangs."""
+
+
+    def __init__(self):
+        """Initializes the graphics facade.
+
+        Graphics state checker is initialized with a dedicated function to
+        control timing for the initial set of errors extracted from logs.
+
+        """
+        self._graphics_state_checker = None
+
+
+    def graphics_state_checker_initialize(self):
+        """Create and initialize the graphics state checker object.
+
+        This will establish existing errors and take a snapshot of graphics
+        kernel memory.
+
+        """
+        self._graphics_state_checker = graphics_utils.GraphicsStateChecker()
+
+
+    def graphics_state_checker_finalize(self):
+        """Throw an error on new GPU hang messages in system logs.
+
+        @raises TestError: Finalize was called before initialize.
+        """
+        if self._graphics_state_checker is None:
+             raise error.TestError('Graphics state checker initialize not called.')
+        self._graphics_state_checker.finalize()
diff --git a/client/cros/multimedia/graphics_facade_native.py b/client/cros/multimedia/graphics_facade_native.py
deleted file mode 100644
index 77ba41e..0000000
--- a/client/cros/multimedia/graphics_facade_native.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""An interface to access the local graphics facade."""
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.graphics import graphics_utils
-
-
-class GraphicsFacadeNative(object):
-    """Facade to access graphics utilities for catching GPU hangs."""
-
-
-    def __init__(self):
-        """Initializes the graphics facade.
-
-        Graphics state checker is initialized with a dedicated function to
-        control timing for the initial set of errors extracted from logs.
-
-        """
-        self._graphics_state_checker = None
-
-
-    def graphics_state_checker_initialize(self):
-        """Create and initialize the graphics state checker object.
-
-        This will establish existing errors and take a snapshot of graphics
-        kernel memory.
-
-        """
-        self._graphics_state_checker = graphics_utils.GraphicsStateChecker()
-
-
-    def graphics_state_checker_finalize(self):
-        """Throw an error on new GPU hang messages in system logs.
-
-        @raises TestError: Finalize was called before initialize.
-        """
-        if self._graphics_state_checker is None:
-             raise error.TestError('Graphics state checker initialize not called.')
-        self._graphics_state_checker.finalize()
diff --git a/client/cros/multimedia/image_generator.py b/client/cros/multimedia/image_generator.py
index 22e18b9..ade874a 100755
--- a/client/cros/multimedia/image_generator.py
+++ b/client/cros/multimedia/image_generator.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/cros/multimedia/input_facade.py b/client/cros/multimedia/input_facade.py
new file mode 100644
index 0000000..f1cbf00
--- /dev/null
+++ b/client/cros/multimedia/input_facade.py
@@ -0,0 +1,127 @@
+# Copyright 2017 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""An interface to access the local input facade."""
+
+
+import json
+import logging
+import threading
+
+from autotest_lib.client.bin.input import input_event_recorder
+from autotest_lib.client.cros.input_playback import input_playback
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.cros.graphics import graphics_utils
+
+
+class InputFacadeLocalError(Exception):
+    """Error in InputFacadeLocal."""
+    pass
+
+
+class InputFacadeLocal(object):
+    """Facade to access the record input events."""
+
+    def __init__(self):
+        """Initializes the input facade."""
+        self.recorders_lock = threading.Lock()
+        self.recorders = dict()
+
+    def initialize_input_playback(self, input_type='keyboard', property_file=None):
+        """Initialize for input events simulation.
+
+        @param input_type: the name of the input device.
+        @param property_file: Property file of device to be emulated.
+        """
+        self._player = input_playback.InputPlayback()
+        self._player.emulate(input_type=input_type, property_file=property_file)
+        self._player.find_connected_inputs()
+
+    def initialize_input_recorder(self, device_name, uniq):
+        """Initialize an input event recorder object.
+
+        @param device_name: the name of the input device to record.
+        @param uniq: Unique address of input device (None if not used)
+
+        """
+        with self.recorders_lock:
+            self.recorders[device_name] = \
+                input_event_recorder.InputEventRecorder(device_name, uniq)
+            logging.info('input event device: %s [uniq=%s] (%s)',
+                         self.recorders[device_name].device_name,
+                         self.recorders[device_name].uniq,
+                         self.recorders[device_name].device_node)
+
+
+    def clear_input_events(self, device_name):
+        """Clear the event list.
+
+        @param device_name: the name of the input device to record.
+
+        """
+        with self.recorders_lock:
+            if self.recorders[device_name] is None:
+                raise error.TestError(
+                    'input facade: input device name not given')
+            self.recorders[device_name].clear_events()
+
+
+    def start_input_recorder(self, device_name):
+        """Start the recording thread.
+
+        @param device_name: the name of the input device to record.
+
+        """
+        with self.recorders_lock:
+            if self.recorders[device_name] is None:
+                raise error.TestError(
+                    'input facade: input device name not given')
+            self.recorders[device_name].start()
+
+
+    def stop_input_recorder(self, device_name):
+        """Stop the recording thread.
+
+        @param device_name: the name of the input device to record.
+
+        """
+        with self.recorders_lock:
+            if self.recorders[device_name] is None:
+                raise error.TestError(
+                    'input facade: input device name not given')
+            self.recorders[device_name].stop()
+
+
+    def get_input_events(self, device_name):
+        """Get the bluetooth device input events.
+
+        @param device_name: the name of the input device to record.
+
+        @returns: the recorded input events.
+
+        """
+        with self.recorders_lock:
+            if self.recorders[device_name] is None:
+                raise error.TestError(
+                    'input facade: input device name not given')
+            events = self.recorders[device_name].get_events()
+        return json.dumps(events)
+
+
+    def press_keys(self, key_list):
+        """ Simulating key press
+
+        @param key_list: A list of key strings, e.g. ['LEFTCTRL', 'F4']
+        """
+        graphics_utils.press_keys(key_list)
+
+
+    def blocking_playback_of_default_file(self, input_type, filename):
+        """Simulate events
+
+        @param input_type: input device name
+        @param filename: input events
+        """
+        self._player.blocking_playback_of_default_file(input_type=input_type,
+                                                       filename=filename)
diff --git a/client/cros/multimedia/input_facade_native.py b/client/cros/multimedia/input_facade_native.py
deleted file mode 100644
index ca59a94..0000000
--- a/client/cros/multimedia/input_facade_native.py
+++ /dev/null
@@ -1,127 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""An interface to access the local input facade."""
-
-
-import json
-import logging
-import threading
-
-from autotest_lib.client.bin.input import input_event_recorder
-from autotest_lib.client.cros.input_playback import input_playback
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.graphics import graphics_utils
-
-
-class InputFacadeNativeError(Exception):
-    """Error in InputFacadeNative."""
-    pass
-
-
-class InputFacadeNative(object):
-    """Facade to access the record input events."""
-
-    def __init__(self):
-        """Initializes the input facade."""
-        self.recorders_lock = threading.Lock()
-        self.recorders = dict()
-
-    def initialize_input_playback(self, input_type='keyboard', property_file=None):
-        """Initialize for input events simulation.
-
-        @param input_type: the name of the input device.
-        @param property_file: Property file of device to be emulated.
-        """
-        self._player = input_playback.InputPlayback()
-        self._player.emulate(input_type=input_type, property_file=property_file)
-        self._player.find_connected_inputs()
-
-    def initialize_input_recorder(self, device_name, uniq):
-        """Initialize an input event recorder object.
-
-        @param device_name: the name of the input device to record.
-        @param uniq: Unique address of input device (None if not used)
-
-        """
-        with self.recorders_lock:
-            self.recorders[device_name] = \
-                input_event_recorder.InputEventRecorder(device_name, uniq)
-            logging.info('input event device: %s [uniq=%s] (%s)',
-                         self.recorders[device_name].device_name,
-                         self.recorders[device_name].uniq,
-                         self.recorders[device_name].device_node)
-
-
-    def clear_input_events(self, device_name):
-        """Clear the event list.
-
-        @param device_name: the name of the input device to record.
-
-        """
-        with self.recorders_lock:
-            if self.recorders[device_name] is None:
-                raise error.TestError(
-                    'input facade: input device name not given')
-            self.recorders[device_name].clear_events()
-
-
-    def start_input_recorder(self, device_name):
-        """Start the recording thread.
-
-        @param device_name: the name of the input device to record.
-
-        """
-        with self.recorders_lock:
-            if self.recorders[device_name] is None:
-                raise error.TestError(
-                    'input facade: input device name not given')
-            self.recorders[device_name].start()
-
-
-    def stop_input_recorder(self, device_name):
-        """Stop the recording thread.
-
-        @param device_name: the name of the input device to record.
-
-        """
-        with self.recorders_lock:
-            if self.recorders[device_name] is None:
-                raise error.TestError(
-                    'input facade: input device name not given')
-            self.recorders[device_name].stop()
-
-
-    def get_input_events(self, device_name):
-        """Get the bluetooth device input events.
-
-        @param device_name: the name of the input device to record.
-
-        @returns: the recorded input events.
-
-        """
-        with self.recorders_lock:
-            if self.recorders[device_name] is None:
-                raise error.TestError(
-                    'input facade: input device name not given')
-            events = self.recorders[device_name].get_events()
-        return json.dumps(events)
-
-
-    def press_keys(self, key_list):
-        """ Simulating key press
-
-        @param key_list: A list of key strings, e.g. ['LEFTCTRL', 'F4']
-        """
-        graphics_utils.press_keys(key_list)
-
-
-    def blocking_playback_of_default_file(self, input_type, filename):
-        """Simulate events
-
-        @param input_type: input device name
-        @param filename: input events
-        """
-        self._player.blocking_playback_of_default_file(input_type=input_type,
-                                                       filename=filename)
diff --git a/client/cros/multimedia/kiosk_facade.py b/client/cros/multimedia/kiosk_facade.py
new file mode 100644
index 0000000..bb70c85
--- /dev/null
+++ b/client/cros/multimedia/kiosk_facade.py
@@ -0,0 +1,38 @@
+# Copyright 2016 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib.cros import kiosk_utils
+
+
+class KioskFacadeLocal(object):
+    """Facade to access the Kiosk functionality."""
+
+
+    def __init__(self, resource):
+        """
+        Initializes a KioskFacadeLocal.
+
+        @param resource: A FacadeResource object.
+
+        """
+        self._resource = resource
+
+
+    def config_rise_player(self, ext_id, app_config_id):
+        """
+        Configure Rise Player app with a specific display id.
+
+        @param ext_id: extension id of the Rise Player Kiosk App.
+        @param app_config_id: display id for the Rise Player app.
+
+        """
+        custom_chrome_setup = {"clear_enterprise_policy": False,
+                               "dont_override_profile": True,
+                               "disable_gaia_services": False,
+                               "disable_default_apps": False,
+                               "auto_login": False}
+        self._resource.start_custom_chrome(custom_chrome_setup)
+        kiosk_utils.config_riseplayer(
+                self._resource._browser, ext_id, app_config_id)
diff --git a/client/cros/multimedia/kiosk_facade_native.py b/client/cros/multimedia/kiosk_facade_native.py
deleted file mode 100644
index 516cda1..0000000
--- a/client/cros/multimedia/kiosk_facade_native.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import kiosk_utils
-
-
-class KioskFacadeNative(object):
-    """Facade to access the Kiosk functionality."""
-
-
-    def __init__(self, resource):
-        """
-        Initializes a KioskFacadeNative.
-
-        @param resource: A FacadeResource object.
-
-        """
-        self._resource = resource
-
-
-    def config_rise_player(self, ext_id, app_config_id):
-        """
-        Configure Rise Player app with a specific display id.
-
-        @param ext_id: extension id of the Rise Player Kiosk App.
-        @param app_config_id: display id for the Rise Player app.
-
-        """
-        custom_chrome_setup = {"clear_enterprise_policy": False,
-                               "dont_override_profile": True,
-                               "disable_gaia_services": False,
-                               "disable_default_apps": False,
-                               "auto_login": False}
-        self._resource.start_custom_chrome(custom_chrome_setup)
-        kiosk_utils.config_riseplayer(
-                self._resource._browser, ext_id, app_config_id)
diff --git a/client/cros/multimedia/multimedia_xmlrpc_server.py b/client/cros/multimedia/multimedia_xmlrpc_server.py
index e77997e..7706a88 100755
--- a/client/cros/multimedia/multimedia_xmlrpc_server.py
+++ b/client/cros/multimedia/multimedia_xmlrpc_server.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -10,6 +10,7 @@
 import logging
 import os
 import six.moves.xmlrpc_client
+import sys
 import traceback
 
 import common   # pylint: disable=unused-import
@@ -18,19 +19,22 @@
 from autotest_lib.client.cros import constants
 from autotest_lib.client.cros import upstart
 from autotest_lib.client.cros import xmlrpc_server
-from autotest_lib.client.cros.multimedia import assistant_facade_native
-from autotest_lib.client.cros.multimedia import audio_facade_native
-from autotest_lib.client.cros.multimedia import bluetooth_facade_native
-from autotest_lib.client.cros.multimedia import browser_facade_native
-from autotest_lib.client.cros.multimedia import cfm_facade_native
-from autotest_lib.client.cros.multimedia import display_facade_native
+from autotest_lib.client.cros.multimedia import assistant_facade
+from autotest_lib.client.cros.multimedia import audio_facade
+from autotest_lib.client.cros.multimedia import browser_facade
+from autotest_lib.client.cros.multimedia import cfm_facade
+from autotest_lib.client.cros.multimedia import display_facade
 from autotest_lib.client.cros.multimedia import facade_resource
-from autotest_lib.client.cros.multimedia import graphics_facade_native
-from autotest_lib.client.cros.multimedia import input_facade_native
-from autotest_lib.client.cros.multimedia import kiosk_facade_native
-from autotest_lib.client.cros.multimedia import system_facade_native
-from autotest_lib.client.cros.multimedia import usb_facade_native
-from autotest_lib.client.cros.multimedia import video_facade_native
+from autotest_lib.client.cros.multimedia import graphics_facade
+from autotest_lib.client.cros.multimedia import input_facade
+from autotest_lib.client.cros.multimedia import kiosk_facade
+from autotest_lib.client.cros.multimedia import system_facade
+from autotest_lib.client.cros.multimedia import usb_facade
+from autotest_lib.client.cros.multimedia import video_facade
+
+# Python3 required for the following:
+if sys.version_info[0] >= 3:
+    from autotest_lib.client.cros.multimedia import bluetooth_facade
 
 
 class MultimediaXmlRpcDelegate(xmlrpc_server.XmlRpcDelegate):
@@ -50,47 +54,53 @@
 
         self._facades = {
                 'assistant':
-                assistant_facade_native.AssistantFacadeNative(resource),
+                assistant_facade.AssistantFacadeLocal(resource),
                 'audio':
-                audio_facade_native.AudioFacadeNative(resource,
+                audio_facade.AudioFacadeLocal(resource,
                                                       arc_resource=arc_res),
-                'bluetooth':
-                bluetooth_facade_native.BluetoothFacadeNative(),
                 'video':
-                video_facade_native.VideoFacadeNative(resource,
+                video_facade.VideoFacadeLocal(resource,
                                                       arc_resource=arc_res),
                 'display':
-                display_facade_native.DisplayFacadeNative(resource),
+                display_facade.DisplayFacadeLocal(resource),
                 'system':
-                system_facade_native.SystemFacadeNative(),
+                system_facade.SystemFacadeLocal(),
                 'usb':
-                usb_facade_native.USBFacadeNative(),
+                usb_facade.USBFacadeLocal(),
                 'browser':
-                browser_facade_native.BrowserFacadeNative(resource),
+                browser_facade.BrowserFacadeLocal(resource),
                 'input':
-                input_facade_native.InputFacadeNative(),
+                input_facade.InputFacadeLocal(),
                 'cfm_main_screen':
-                cfm_facade_native.CFMFacadeNative(resource, 'hotrod'),
+                cfm_facade.CFMFacadeLocal(resource, 'hotrod'),
                 'cfm_mimo_screen':
-                cfm_facade_native.CFMFacadeNative(resource, 'control'),
+                cfm_facade.CFMFacadeLocal(resource, 'control'),
                 'kiosk':
-                kiosk_facade_native.KioskFacadeNative(resource),
+                kiosk_facade.KioskFacadeLocal(resource),
                 'graphics':
-                graphics_facade_native.GraphicsFacadeNative()
+                graphics_facade.GraphicsFacadeLocal()
         }
 
+        # Limit some facades to python3
+        if sys.version_info[0] >= 3:
+            self._facades[
+                    'bluetooth'] = bluetooth_facade.BluezFacadeLocal()
+            self._facades['floss'] = bluetooth_facade.FlossFacadeLocal(
+            )
 
     def __exit__(self, exception, value, traceback):
         """Clean up the resources."""
         self._facades['audio'].cleanup()
 
+        if 'floss' in self._facades:
+            self._facades['floss'].cleanup()
 
     def _dispatch(self, method, params):
         """Dispatches the method to the proper facade.
 
         We turn off allow_dotted_names option. The method handles the dot
         and dispatches the method to the proper native facade, like
-        DisplayFacadeNative.
+        DisplayFacadeLocal.
 
         """
         try:
diff --git a/client/cros/multimedia/system_facade.py b/client/cros/multimedia/system_facade.py
new file mode 100644
index 0000000..9d271bd
--- /dev/null
+++ b/client/cros/multimedia/system_facade.py
@@ -0,0 +1,244 @@
+# Lint as: python2, python3
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Facade to access the system-related functionality."""
+
+import six
+import os
+import threading
+import time
+
+from autotest_lib.client.bin import utils
+
+
+class SystemFacadeLocalError(Exception):
+    """Error in SystemFacadeLocal."""
+    pass
+
+
+class SystemFacadeLocal(object):
+    """Facede to access the system-related functionality.
+
+    The methods inside this class only accept Python native types.
+
+    """
+    SCALING_GOVERNOR_MODES = [
+            'performance',
+            'powersave',
+            'userspace',
+            'ondemand',
+            'conservative',
+            'schedutil',
+            'interactive', # deprecated since kernel v4.14
+            'sched' # deprecated since kernel v4.14
+            ]
+
+    def __init__(self):
+        self._bg_worker = None
+
+    def set_scaling_governor_mode(self, index, mode):
+        """Set mode of CPU scaling governor on one CPU.
+
+        @param index: CPU index starting from 0.
+
+        @param mode: Mode of scaling governor, accept 'interactive' or
+                     'performance'.
+
+        @returns: The original mode.
+
+        """
+        if mode not in self.SCALING_GOVERNOR_MODES:
+            raise SystemFacadeLocalError('mode %s is invalid' % mode)
+
+        governor_path = os.path.join(
+                '/sys/devices/system/cpu/cpu%d' % index,
+                'cpufreq/scaling_governor')
+        if not os.path.exists(governor_path):
+            raise SystemFacadeLocalError(
+                    'scaling governor of CPU %d is not available' % index)
+
+        original_mode = utils.read_one_line(governor_path)
+        utils.open_write_close(governor_path, mode)
+
+        return original_mode
+
+
+    def get_cpu_usage(self):
+        """Returns machine's CPU usage.
+
+        Returns:
+            A dictionary with 'user', 'nice', 'system' and 'idle' values.
+            Sample dictionary:
+            {
+                'user': 254544,
+                'nice': 9,
+                'system': 254768,
+                'idle': 2859878,
+            }
+        """
+        return utils.get_cpu_usage()
+
+
+    def compute_active_cpu_time(self, cpu_usage_start, cpu_usage_end):
+        """Computes the fraction of CPU time spent non-idling.
+
+        This function should be invoked using before/after values from calls to
+        get_cpu_usage().
+        """
+        return utils.compute_active_cpu_time(cpu_usage_start,
+                                                  cpu_usage_end)
+
+
+    def get_mem_total(self):
+        """Returns the total memory available in the system in MBytes."""
+        return utils.get_mem_total()
+
+
+    def get_mem_free(self):
+        """Returns the currently free memory in the system in MBytes."""
+        return utils.get_mem_free()
+
+    def get_mem_free_plus_buffers_and_cached(self):
+        """
+        Returns the free memory in MBytes, counting buffers and cached as free.
+
+        This is most often the most interesting number since buffers and cached
+        memory can be reclaimed on demand. Note however, that there are cases
+        where this as misleading as well, for example used tmpfs space
+        count as Cached but can not be reclaimed on demand.
+        See https://www.kernel.org/doc/Documentation/filesystems/tmpfs.txt.
+        """
+        return utils.get_mem_free_plus_buffers_and_cached()
+
+    def get_ec_temperatures(self):
+        """Uses ectool to return a list of all sensor temperatures in Celsius.
+        """
+        return utils.get_ec_temperatures()
+
+    def get_current_temperature_max(self):
+        """
+        Returns the highest reported board temperature (all sensors) in Celsius.
+        """
+        return utils.get_current_temperature_max()
+
+    def get_current_board(self):
+        """Returns the current device board name."""
+        return utils.get_current_board()
+
+
+    def get_chromeos_release_version(self):
+        """Returns chromeos version in device under test as string. None on
+        fail.
+        """
+        return utils.get_chromeos_release_version()
+
+    def get_num_allocated_file_handles(self):
+        """
+        Returns the number of currently allocated file handles.
+        """
+        return utils.get_num_allocated_file_handles()
+
+    def get_storage_statistics(self, device=None):
+        """
+        Fetches statistics for a storage device.
+        """
+        return utils.get_storage_statistics(device)
+
+    def get_energy_usage(self):
+        """
+        Gets the energy counter value as a string.
+        """
+        return utils.get_energy_usage()
+
+    def start_bg_worker(self, command):
+        """
+        Start executing the command in a background worker.
+        """
+        self._bg_worker = BackgroundWorker(command, do_process_output=True)
+        self._bg_worker.start()
+
+    def get_and_discard_bg_worker_output(self):
+        """
+        Returns the output collected so far since the last call to this method.
+        """
+        if self._bg_worker is None:
+            SystemFacadeLocalError('Background worker has not been started.')
+
+        return self._bg_worker.get_and_discard_output()
+
+    def stop_bg_worker(self):
+        """
+        Stop the worker.
+        """
+        if self._bg_worker is None:
+            SystemFacadeLocalError('Background worker has not been started.')
+
+        self._bg_worker.stop()
+        self._bg_worker = None
+
+
+class BackgroundWorker(object):
+    """
+    Worker intended for executing a command in the background and collecting its
+    output.
+    """
+
+    def __init__(self, command, do_process_output=False):
+        self._bg_job = None
+        self._command = command
+        self._do_process_output = do_process_output
+        self._output_lock = threading.Lock()
+        self._process_output_thread = None
+        self._stdout = six.StringIO()
+
+    def start(self):
+        """
+        Start executing the command.
+        """
+        self._bg_job = utils.BgJob(self._command, stdout_tee=self._stdout)
+        self._bg_job.sp.poll()
+        if self._bg_job.sp.returncode is not None:
+            self._exit_bg_job()
+
+        if self._do_process_output:
+            self._process_output_thread = threading.Thread(
+                    target=self._process_output)
+            self._process_output_thread.start()
+
+    def _process_output(self, sleep_interval=0.01):
+        while self._do_process_output:
+            with self._output_lock:
+                self._bg_job.process_output()
+            time.sleep(sleep_interval)
+
+    def get_and_discard_output(self):
+        """
+        Returns the output collected so far and then clears the output buffer.
+        In other words, subsequent calls to this method will not include output
+        that has already been returned before.
+        """
+        output = ""
+        with self._output_lock:
+            self._stdout.flush()
+            output = self._stdout.getvalue()
+            self._stdout.truncate(0)
+            self._stdout.seek(0)
+        return output
+
+    def stop(self):
+        """
+        Stop executing the command.
+        """
+        if self._do_process_output:
+            self._do_process_output = False
+            self._process_output_thread.join(1)
+        self._exit_bg_job()
+
+    def _exit_bg_job(self):
+        utils.nuke_subprocess(self._bg_job.sp)
+        utils.join_bg_jobs([self._bg_job])
+        if self._bg_job.result.exit_status > 0:
+            raise SystemFacadeLocalError('Background job failed: %s' %
+                                          self._bg_job.result.command)
diff --git a/client/cros/multimedia/system_facade_native.py b/client/cros/multimedia/system_facade_native.py
deleted file mode 100644
index ded8a90..0000000
--- a/client/cros/multimedia/system_facade_native.py
+++ /dev/null
@@ -1,238 +0,0 @@
-# Lint as: python2, python3
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Facade to access the system-related functionality."""
-
-import six
-import os
-import threading
-import time
-
-from autotest_lib.client.bin import utils
-
-
-class SystemFacadeNativeError(Exception):
-    """Error in SystemFacadeNative."""
-    pass
-
-
-class SystemFacadeNative(object):
-    """Facede to access the system-related functionality.
-
-    The methods inside this class only accept Python native types.
-
-    """
-    SCALING_GOVERNOR_MODES = [
-            'performance',
-            'powersave',
-            'userspace',
-            'ondemand',
-            'conservative',
-            'schedutil',
-            'interactive', # deprecated since kernel v4.14
-            'sched' # deprecated since kernel v4.14
-            ]
-
-    def __init__(self):
-        self._bg_worker = None
-
-    def set_scaling_governor_mode(self, index, mode):
-        """Set mode of CPU scaling governor on one CPU.
-
-        @param index: CPU index starting from 0.
-
-        @param mode: Mode of scaling governor, accept 'interactive' or
-                     'performance'.
-
-        @returns: The original mode.
-
-        """
-        if mode not in self.SCALING_GOVERNOR_MODES:
-            raise SystemFacadeNativeError('mode %s is invalid' % mode)
-
-        governor_path = os.path.join(
-                '/sys/devices/system/cpu/cpu%d' % index,
-                'cpufreq/scaling_governor')
-        if not os.path.exists(governor_path):
-            raise SystemFacadeNativeError(
-                    'scaling governor of CPU %d is not available' % index)
-
-        original_mode = utils.read_one_line(governor_path)
-        utils.open_write_close(governor_path, mode)
-
-        return original_mode
-
-
-    def get_cpu_usage(self):
-        """Returns machine's CPU usage.
-
-        Returns:
-            A dictionary with 'user', 'nice', 'system' and 'idle' values.
-            Sample dictionary:
-            {
-                'user': 254544,
-                'nice': 9,
-                'system': 254768,
-                'idle': 2859878,
-            }
-        """
-        return utils.get_cpu_usage()
-
-
-    def compute_active_cpu_time(self, cpu_usage_start, cpu_usage_end):
-        """Computes the fraction of CPU time spent non-idling.
-
-        This function should be invoked using before/after values from calls to
-        get_cpu_usage().
-        """
-        return utils.compute_active_cpu_time(cpu_usage_start,
-                                                  cpu_usage_end)
-
-
-    def get_mem_total(self):
-        """Returns the total memory available in the system in MBytes."""
-        return utils.get_mem_total()
-
-
-    def get_mem_free(self):
-        """Returns the currently free memory in the system in MBytes."""
-        return utils.get_mem_free()
-
-    def get_mem_free_plus_buffers_and_cached(self):
-        """
-        Returns the free memory in MBytes, counting buffers and cached as free.
-
-        This is most often the most interesting number since buffers and cached
-        memory can be reclaimed on demand. Note however, that there are cases
-        where this as misleading as well, for example used tmpfs space
-        count as Cached but can not be reclaimed on demand.
-        See https://www.kernel.org/doc/Documentation/filesystems/tmpfs.txt.
-        """
-        return utils.get_mem_free_plus_buffers_and_cached()
-
-    def get_ec_temperatures(self):
-        """Uses ectool to return a list of all sensor temperatures in Celsius.
-        """
-        return utils.get_ec_temperatures()
-
-    def get_current_temperature_max(self):
-        """
-        Returns the highest reported board temperature (all sensors) in Celsius.
-        """
-        return utils.get_current_temperature_max()
-
-    def get_current_board(self):
-        """Returns the current device board name."""
-        return utils.get_current_board()
-
-
-    def get_chromeos_release_version(self):
-        """Returns chromeos version in device under test as string. None on
-        fail.
-        """
-        return utils.get_chromeos_release_version()
-
-    def get_num_allocated_file_handles(self):
-        """
-        Returns the number of currently allocated file handles.
-        """
-        return utils.get_num_allocated_file_handles()
-
-    def get_storage_statistics(self, device=None):
-        """
-        Fetches statistics for a storage device.
-        """
-        return utils.get_storage_statistics(device)
-
-    def start_bg_worker(self, command):
-        """
-        Start executing the command in a background worker.
-        """
-        self._bg_worker = BackgroundWorker(command, do_process_output=True)
-        self._bg_worker.start()
-
-    def get_and_discard_bg_worker_output(self):
-        """
-        Returns the output collected so far since the last call to this method.
-        """
-        if self._bg_worker is None:
-            SystemFacadeNativeError('Background worker has not been started.')
-
-        return self._bg_worker.get_and_discard_output()
-
-    def stop_bg_worker(self):
-        """
-        Stop the worker.
-        """
-        if self._bg_worker is None:
-            SystemFacadeNativeError('Background worker has not been started.')
-
-        self._bg_worker.stop()
-        self._bg_worker = None
-
-
-class BackgroundWorker(object):
-    """
-    Worker intended for executing a command in the background and collecting its
-    output.
-    """
-
-    def __init__(self, command, do_process_output=False):
-        self._bg_job = None
-        self._command = command
-        self._do_process_output = do_process_output
-        self._output_lock = threading.Lock()
-        self._process_output_thread = None
-        self._stdout = six.StringIO()
-
-    def start(self):
-        """
-        Start executing the command.
-        """
-        self._bg_job = utils.BgJob(self._command, stdout_tee=self._stdout)
-        self._bg_job.sp.poll()
-        if self._bg_job.sp.returncode is not None:
-            self._exit_bg_job()
-
-        if self._do_process_output:
-            self._process_output_thread = threading.Thread(
-                    target=self._process_output)
-            self._process_output_thread.start()
-
-    def _process_output(self, sleep_interval=0.01):
-        while self._do_process_output:
-            with self._output_lock:
-                self._bg_job.process_output()
-            time.sleep(sleep_interval)
-
-    def get_and_discard_output(self):
-        """
-        Returns the output collected so far and then clears the output buffer.
-        In other words, subsequent calls to this method will not include output
-        that has already been returned before.
-        """
-        output = ""
-        with self._output_lock:
-            self._stdout.flush()
-            output = self._stdout.getvalue()
-            self._stdout.truncate(0)
-            self._stdout.seek(0)
-        return output
-
-    def stop(self):
-        """
-        Stop executing the command.
-        """
-        if self._do_process_output:
-            self._do_process_output = False
-            self._process_output_thread.join(1)
-        self._exit_bg_job()
-
-    def _exit_bg_job(self):
-        utils.nuke_subprocess(self._bg_job.sp)
-        utils.join_bg_jobs([self._bg_job])
-        if self._bg_job.result.exit_status > 0:
-            raise SystemFacadeNativeError('Background job failed: %s' %
-                                          self._bg_job.result.command)
diff --git a/client/cros/multimedia/usb_facade.py b/client/cros/multimedia/usb_facade.py
new file mode 100644
index 0000000..c6a3342
--- /dev/null
+++ b/client/cros/multimedia/usb_facade.py
@@ -0,0 +1,415 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""An interface to access the local USB facade."""
+
+import glob
+import logging
+import os
+import time
+
+from autotest_lib.client.bin import utils
+from autotest_lib.client.cros.audio import cras_dbus_utils
+from autotest_lib.client.cros.audio import cras_utils
+
+
+class USBFacadeLocalError(Exception):
+    """Error in USBFacadeLocal."""
+    pass
+
+
+class USBFacadeLocal(object):
+    """Facade to access the USB-related functionality.
+
+    Property:
+      _drivers_manager: A USBDeviceDriversManager object used to manage the
+                        status of drivers associated with the USB audio gadget
+                        on the host side.
+
+    """
+    _DEFAULT_DEVICE_PRODUCT_NAME = 'Linux USB Audio Gadget'
+    _TIMEOUT_FINDING_USB_DEVICE_SECS = 10
+    _TIMEOUT_CRAS_NODES_CHANGE_SECS = 30
+
+    def __init__(self):
+        """Initializes the USB facade.
+
+        The _drivers_manager is set with a USBDeviceDriversManager, which is
+        used to control the visibility and availability of a USB device on a
+        host Cros device.
+
+        """
+        self._drivers_manager = USBDeviceDriversManager()
+
+
+    def _reenumerate_usb_devices(self):
+        """Resets host controller to re-enumerate usb devices."""
+        self._drivers_manager.reset_host_controller()
+
+
+    def plug(self):
+        """Sets and plugs the USB device into the host.
+
+        The USB device is initially set to one with the default product name,
+        which is assumed to be the name of the USB audio gadget on Chameleon.
+        This method blocks until Cras enumerate USB nodes within a timeout
+        specified in _wait_for_nodes_changed.
+
+        """
+        # Only supports controlling one USB device of default name.
+        device_name = self._DEFAULT_DEVICE_PRODUCT_NAME
+
+        def find_usb_device():
+            """Find USB device with name device_name.
+
+            @returns: True if succeed to find the device, False otherwise.
+
+            """
+            try:
+                self._drivers_manager.find_usb_device(device_name)
+                return True
+            except USBDeviceDriversManagerError:
+                logging.debug('Can not find %s yet' % device_name)
+                return False
+
+        if self._drivers_manager.has_found_device(device_name):
+            if self._drivers_manager.drivers_are_bound():
+                return
+            self._drivers_manager.bind_usb_drivers()
+            self._wait_for_nodes_changed()
+        else:
+            # If driver manager has not found device yet, re-enumerate USB
+            # devices. The correct USB driver will be binded automatically.
+            self._reenumerate_usb_devices()
+            self._wait_for_nodes_changed()
+            # Wait some time for paths and fields in sysfs to be created.
+            utils.poll_for_condition(
+                    condition=find_usb_device,
+                    desc='Find USB device',
+                    timeout=self._TIMEOUT_FINDING_USB_DEVICE_SECS)
+
+
+    def unplug(self):
+        """Unplugs the USB device from the host."""
+        self._drivers_manager.unbind_usb_drivers()
+
+
+    def _wait_for_nodes_changed(self):
+        """Waits for Cras to enumerate USB nodes.
+
+        USB nodes will be plugged, but not necessarily selected.
+
+        """
+        def find_usb_node():
+            """Checks if USB input and output nodes are plugged.
+
+            @returns: True if USB input and output nodes are plugged. False
+                      otherwise.
+            """
+            out_nodes, in_nodes = cras_utils.get_plugged_node_types()
+            logging.info('Cras nodes: output: %s, input: %s',
+                         out_nodes, in_nodes)
+            return 'USB' in out_nodes and 'USB' in in_nodes
+
+        utils.poll_for_condition(
+                condition=find_usb_node,
+                desc='Find USB node',
+                timeout=self._TIMEOUT_CRAS_NODES_CHANGE_SECS)
+
+
+class USBDeviceDriversManagerError(Exception):
+    """Error in USBDeviceDriversManager."""
+    pass
+
+
+class HostControllerDriver(object):
+    """Abstract a host controller driver.
+
+    This class stores id and path like:
+    path: /sys/bus/pci/drivers/echi_hcd
+    id: 0000:00:1a.0
+    Then, it can bind/unbind driver by writing
+    0000:00:1a.0 to /sys/bus/pci/drivers/echi_hcd/bind
+    and /sys/bus/pci/drivers/echi_hcd/unbind.
+
+    """
+    def __init__(self, hcd_id, hcd_path):
+        """Inits an HostControllerDriver object.
+
+        @param hcd_id: The HCD id, e.g. 0000:00:1a.0
+        @param hcd_path: The path to HCD, e.g. /sys/bus/pci/drivers/echi_hcd.
+
+        """
+        logging.debug('hcd id: %s, hcd path: %s', hcd_id, hcd_path)
+        self._hcd_id = hcd_id
+        self._hcd_path = hcd_path
+
+
+    def reset(self):
+        """Resets HCD by unbinding and binding driver."""
+        utils.open_write_close(
+            os.path.join(self._hcd_path, 'unbind'), self._hcd_id)
+        utils.open_write_close(
+            os.path.join(self._hcd_path, 'bind'), self._hcd_id)
+
+
+class USBDeviceDriversManager(object):
+    """The class to control the USB drivers associated with a USB device.
+
+    By binding/unbinding certain USB driver, we can emulate the plug/unplug
+    action on that bus. However, this method only applies when the USB driver
+    has already been binded once.
+    To solve above problem, we can unbind then bind USB host controller driver
+    (HCD), then, HCD will re-enumerate all the USB devices. This method has
+    a side effect that all the USB devices will be disconnected for several
+    seconds, so we should only do it if needed.
+    Note that there might be multiple HCDs, e.g. 0000:00:1a.0 for bus1 and
+    0000:00:1b.0 for bus2.
+
+    Properties:
+        _device_product_name: The product name given to the USB device.
+        _device_bus_id: The bus ID of the USB device in the host.
+        _hcd_ids: The host controller driver IDs.
+        _hcds: A list of HostControllerDrivers.
+
+    """
+    # The file to write to bind USB drivers of specified device
+    _USB_BIND_FILE_PATH = '/sys/bus/usb/drivers/usb/bind'
+    # The file to write to unbind USB drivers of specified device
+    _USB_UNBIND_FILE_PATH = '/sys/bus/usb/drivers/usb/unbind'
+    # The file path that exists when drivers are bound for current device
+    _USB_BOUND_DRIVERS_FILE_PATH = '/sys/bus/usb/drivers/usb/%s/driver'
+    # The pattern to glob usb drivers
+    _USB_DRIVER_GLOB_PATTERN = '/sys/bus/usb/drivers/usb/usb?/'
+    # The path to search for HCD on PCI or platform bus.
+    # The HCD id should be filled in the end.
+    _HCD_GLOB_PATTERNS = [
+            '/sys/bus/pci/drivers/*/%s',
+            '/sys/bus/platform/drivers/*/%s']
+
+
+    def __init__(self):
+        """Initializes the manager.
+
+        _device_product_name and _device_bus_id are initially set to None.
+
+        """
+        self._device_product_name = None
+        self._device_bus_id = None
+        self._hcd_ids = None
+        self._hcds = None
+        self._find_hcd_ids()
+        self._create_hcds()
+
+
+    def _find_hcd_ids(self):
+        """Finds host controller driver ids for USB.
+
+        We can find the HCD id for USB from driver's realpath.
+        E.g. On ARM device:
+        /sys/bus/usb/drivers/usb/usb1 links to
+        /sys/devices/soc0/70090000.usb/xhci-hcd.0.auto/usb1
+        => HCD id is xhci-hcd.0.auto
+
+        E.g. On X86 device:
+        /sys/bus/usb/drivers/usb/usb1 links to
+        /sys/devices/pci0000:00/0000:00:14.0/usb1
+        => HCD id is 0000:00:14.0
+
+        There might be multiple HCD ids like 0000:00:1a.0 for usb1,
+        and 0000:00:1d.0 for usb2.
+
+        @raises: USBDeviceDriversManagerError if HCD id can not be found.
+
+        """
+        def _get_dir_name(path):
+            return os.path.basename(os.path.dirname(path))
+
+        hcd_ids = set()
+
+        for search_root_path in glob.glob(self._USB_DRIVER_GLOB_PATTERN):
+            hcd_id = _get_dir_name(os.path.realpath(search_root_path))
+            hcd_ids.add(hcd_id)
+
+        if not hcd_ids:
+            raise USBDeviceDriversManagerError('Can not find HCD id')
+
+        self._hcd_ids = hcd_ids
+        logging.debug('Found HCD ids: %s', self._hcd_ids)
+
+
+    def _create_hcds(self):
+        """Finds HCD paths from HCD id and create HostControllerDrivers.
+
+        HCD is under /sys/bus/pci/drivers/ for x86 boards, and under
+        /sys/bus/platform/drivers/ for ARM boards.
+
+        For each HCD id, finds HCD by checking HCD id under it, e.g.
+        /sys/bus/pci/drivers/ehci_hcd has 0000:00:1a.0 under it.
+        Then, create a HostControllerDriver and store it in self._hcds.
+
+        @raises: USBDeviceDriversManagerError if there are multiple
+                 HCD path found for a given HCD id.
+
+        @raises: USBDeviceDriversManagerError if no HostControllerDriver is found.
+
+        """
+        self._hcds = []
+
+        for hcd_id in self._hcd_ids:
+            for glob_pattern in self._HCD_GLOB_PATTERNS:
+                glob_pattern = glob_pattern % hcd_id
+                hcd_id_paths = glob.glob(glob_pattern)
+                if not hcd_id_paths:
+                    continue
+                if len(hcd_id_paths) > 1:
+                    raise USBDeviceDriversManagerError(
+                            'More than 1 HCD id path found: %s' % hcd_id_paths)
+                hcd_id_path = hcd_id_paths[0]
+
+                # Gets /sys/bus/pci/drivers/echi_hcd from
+                # /sys/bus/pci/drivers/echi_hcd/0000:00:1a.0
+                hcd_path = os.path.dirname(hcd_id_path)
+                self._hcds.append(
+                        HostControllerDriver(hcd_id=hcd_id, hcd_path=hcd_path))
+
+
+    def reset_host_controller(self):
+        """Resets host controller by unbinding then binding HCD.
+
+        @raises: USBDeviceDriversManagerError if there is no HCD to control.
+
+        """
+        if not self._hcds:
+            raise USBDeviceDriversManagerError('HCD is not found yet')
+        for hcd in self._hcds:
+            hcd.reset()
+
+
+    def _find_usb_device_bus_id(self, product_name):
+        """Finds the bus ID of the USB device with the given product name.
+
+        @param product_name: The product name of the USB device as it appears
+                             to the host.
+
+        @returns: The bus ID of the USB device if it is detected by the host
+                  successfully; or None if there is no such device with the
+                  given product name.
+
+        """
+        def product_matched(path):
+            """Checks if the product field matches expected product name.
+
+            @returns: True if the product name matches, False otherwise.
+
+            """
+            read_product_name = utils.read_one_line(path)
+            logging.debug('Read product at %s = %s', path, read_product_name)
+            return read_product_name == product_name
+
+        # Find product field at these possible paths:
+        # '/sys/bus/usb/drivers/usb/usbX/X-Y/product' => bus id is X-Y.
+        # '/sys/bus/usb/drivers/usb/usbX/X-Y/X-Y.Z/product' => bus id is X-Y.Z.
+
+        for search_root_path in glob.glob(self._USB_DRIVER_GLOB_PATTERN):
+            logging.debug('search_root_path: %s', search_root_path)
+            for root, dirs, _ in os.walk(search_root_path):
+                logging.debug('root: %s', root)
+                for bus_id in dirs:
+                    logging.debug('bus_id: %s', bus_id)
+                    product_path = os.path.join(root, bus_id, 'product')
+                    logging.debug('product_path: %s', product_path)
+                    if not os.path.exists(product_path):
+                        continue
+                    if not product_matched(product_path):
+                        continue
+                    logging.debug(
+                            'Bus ID of %s found: %s', product_name, bus_id)
+                    return bus_id
+
+        logging.error('Bus ID of %s not found', product_name)
+        return None
+
+
+    def has_found_device(self, product_name):
+        """Checks if the device has been found.
+
+        @param product_name: The product name of the USB device as it appears
+                             to the host.
+
+        @returns: True if device has been found, False otherwise.
+
+        """
+        return self._device_product_name == product_name
+
+
+    def find_usb_device(self, product_name):
+        """Sets _device_product_name and _device_bus_id if it can be found.
+
+        @param product_name: The product name of the USB device as it appears
+                             to the host.
+
+        @raises: USBDeviceDriversManagerError if device bus ID cannot be found
+                 for the device with the given product name.
+
+        """
+        device_bus_id = self._find_usb_device_bus_id(product_name)
+        if device_bus_id is None:
+            error_message = 'Cannot find device with product name: %s'
+            raise USBDeviceDriversManagerError(error_message % product_name)
+        else:
+            self._device_product_name = product_name
+            self._device_bus_id = device_bus_id
+
+
+    def drivers_are_bound(self):
+        """Checks whether the drivers with the of current device are bound.
+
+        If the drivers are already bound, calling bind_usb_drivers will be
+        redundant and also result in an error.
+
+        @return: True if the path to the drivers exist, meaning the drivers
+                 are already bound. False otherwise.
+
+        """
+        if self._device_bus_id is None:
+            raise USBDeviceDriversManagerError('USB Bus ID is not set yet.')
+        driver_path = self._USB_BOUND_DRIVERS_FILE_PATH % self._device_bus_id
+        return os.path.exists(driver_path)
+
+
+    def bind_usb_drivers(self):
+        """Binds the USB driver(s) of the current device to the host.
+
+        This is applied to all the drivers associated with and listed under
+        the USB device with the current _device_product_name and _device_bus_id.
+
+        @raises: USBDeviceDriversManagerError if device bus ID for this instance
+                 has not been set yet.
+
+        """
+        if self._device_bus_id is None:
+            raise USBDeviceDriversManagerError('USB Bus ID is not set yet.')
+        if self.drivers_are_bound():
+            return
+        utils.open_write_close(self._USB_BIND_FILE_PATH,
+                self._device_bus_id)
+
+
+    def unbind_usb_drivers(self):
+        """Unbinds the USB driver(s) of the current device from the host.
+
+        This is applied to all the drivers associated with and listed under
+        the USB device with the current _device_product_name and _device_bus_id.
+
+        @raises: USBDeviceDriversManagerError if device bus ID for this instance
+                 has not been set yet.
+
+        """
+        if self._device_bus_id is None:
+            raise USBDeviceDriversManagerError('USB Bus ID is not set yet.')
+        if not self.drivers_are_bound():
+            return
+        utils.open_write_close(self._USB_UNBIND_FILE_PATH,
+                                    self._device_bus_id)
diff --git a/client/cros/multimedia/usb_facade_native.py b/client/cros/multimedia/usb_facade_native.py
deleted file mode 100644
index 550560a..0000000
--- a/client/cros/multimedia/usb_facade_native.py
+++ /dev/null
@@ -1,415 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""An interface to access the local USB facade."""
-
-import glob
-import logging
-import os
-import time
-
-from autotest_lib.client.bin import utils
-from autotest_lib.client.cros.audio import cras_dbus_utils
-from autotest_lib.client.cros.audio import cras_utils
-
-
-class USBFacadeNativeError(Exception):
-    """Error in USBFacadeNative."""
-    pass
-
-
-class USBFacadeNative(object):
-    """Facade to access the USB-related functionality.
-
-    Property:
-      _drivers_manager: A USBDeviceDriversManager object used to manage the
-                        status of drivers associated with the USB audio gadget
-                        on the host side.
-
-    """
-    _DEFAULT_DEVICE_PRODUCT_NAME = 'Linux USB Audio Gadget'
-    _TIMEOUT_FINDING_USB_DEVICE_SECS = 10
-    _TIMEOUT_CRAS_NODES_CHANGE_SECS = 30
-
-    def __init__(self):
-        """Initializes the USB facade.
-
-        The _drivers_manager is set with a USBDeviceDriversManager, which is
-        used to control the visibility and availability of a USB device on a
-        host Cros device.
-
-        """
-        self._drivers_manager = USBDeviceDriversManager()
-
-
-    def _reenumerate_usb_devices(self):
-        """Resets host controller to re-enumerate usb devices."""
-        self._drivers_manager.reset_host_controller()
-
-
-    def plug(self):
-        """Sets and plugs the USB device into the host.
-
-        The USB device is initially set to one with the default product name,
-        which is assumed to be the name of the USB audio gadget on Chameleon.
-        This method blocks until Cras enumerate USB nodes within a timeout
-        specified in _wait_for_nodes_changed.
-
-        """
-        # Only supports controlling one USB device of default name.
-        device_name = self._DEFAULT_DEVICE_PRODUCT_NAME
-
-        def find_usb_device():
-            """Find USB device with name device_name.
-
-            @returns: True if succeed to find the device, False otherwise.
-
-            """
-            try:
-                self._drivers_manager.find_usb_device(device_name)
-                return True
-            except USBDeviceDriversManagerError:
-                logging.debug('Can not find %s yet' % device_name)
-                return False
-
-        if self._drivers_manager.has_found_device(device_name):
-            if self._drivers_manager.drivers_are_bound():
-                return
-            self._drivers_manager.bind_usb_drivers()
-            self._wait_for_nodes_changed()
-        else:
-            # If driver manager has not found device yet, re-enumerate USB
-            # devices. The correct USB driver will be binded automatically.
-            self._reenumerate_usb_devices()
-            self._wait_for_nodes_changed()
-            # Wait some time for paths and fields in sysfs to be created.
-            utils.poll_for_condition(
-                    condition=find_usb_device,
-                    desc='Find USB device',
-                    timeout=self._TIMEOUT_FINDING_USB_DEVICE_SECS)
-
-
-    def unplug(self):
-        """Unplugs the USB device from the host."""
-        self._drivers_manager.unbind_usb_drivers()
-
-
-    def _wait_for_nodes_changed(self):
-        """Waits for Cras to enumerate USB nodes.
-
-        USB nodes will be plugged, but not necessarily selected.
-
-        """
-        def find_usb_node():
-            """Checks if USB input and output nodes are plugged.
-
-            @returns: True if USB input and output nodes are plugged. False
-                      otherwise.
-            """
-            out_nodes, in_nodes = cras_utils.get_plugged_node_types()
-            logging.info('Cras nodes: output: %s, input: %s',
-                         out_nodes, in_nodes)
-            return 'USB' in out_nodes and 'USB' in in_nodes
-
-        utils.poll_for_condition(
-                condition=find_usb_node,
-                desc='Find USB node',
-                timeout=self._TIMEOUT_CRAS_NODES_CHANGE_SECS)
-
-
-class USBDeviceDriversManagerError(Exception):
-    """Error in USBDeviceDriversManager."""
-    pass
-
-
-class HostControllerDriver(object):
-    """Abstract a host controller driver.
-
-    This class stores id and path like:
-    path: /sys/bus/pci/drivers/echi_hcd
-    id: 0000:00:1a.0
-    Then, it can bind/unbind driver by writing
-    0000:00:1a.0 to /sys/bus/pci/drivers/echi_hcd/bind
-    and /sys/bus/pci/drivers/echi_hcd/unbind.
-
-    """
-    def __init__(self, hcd_id, hcd_path):
-        """Inits an HostControllerDriver object.
-
-        @param hcd_id: The HCD id, e.g. 0000:00:1a.0
-        @param hcd_path: The path to HCD, e.g. /sys/bus/pci/drivers/echi_hcd.
-
-        """
-        logging.debug('hcd id: %s, hcd path: %s', hcd_id, hcd_path)
-        self._hcd_id = hcd_id
-        self._hcd_path = hcd_path
-
-
-    def reset(self):
-        """Resets HCD by unbinding and binding driver."""
-        utils.open_write_close(
-            os.path.join(self._hcd_path, 'unbind'), self._hcd_id)
-        utils.open_write_close(
-            os.path.join(self._hcd_path, 'bind'), self._hcd_id)
-
-
-class USBDeviceDriversManager(object):
-    """The class to control the USB drivers associated with a USB device.
-
-    By binding/unbinding certain USB driver, we can emulate the plug/unplug
-    action on that bus. However, this method only applies when the USB driver
-    has already been binded once.
-    To solve above problem, we can unbind then bind USB host controller driver
-    (HCD), then, HCD will re-enumerate all the USB devices. This method has
-    a side effect that all the USB devices will be disconnected for several
-    seconds, so we should only do it if needed.
-    Note that there might be multiple HCDs, e.g. 0000:00:1a.0 for bus1 and
-    0000:00:1b.0 for bus2.
-
-    Properties:
-        _device_product_name: The product name given to the USB device.
-        _device_bus_id: The bus ID of the USB device in the host.
-        _hcd_ids: The host controller driver IDs.
-        _hcds: A list of HostControllerDrivers.
-
-    """
-    # The file to write to bind USB drivers of specified device
-    _USB_BIND_FILE_PATH = '/sys/bus/usb/drivers/usb/bind'
-    # The file to write to unbind USB drivers of specified device
-    _USB_UNBIND_FILE_PATH = '/sys/bus/usb/drivers/usb/unbind'
-    # The file path that exists when drivers are bound for current device
-    _USB_BOUND_DRIVERS_FILE_PATH = '/sys/bus/usb/drivers/usb/%s/driver'
-    # The pattern to glob usb drivers
-    _USB_DRIVER_GLOB_PATTERN = '/sys/bus/usb/drivers/usb/usb?/'
-    # The path to search for HCD on PCI or platform bus.
-    # The HCD id should be filled in the end.
-    _HCD_GLOB_PATTERNS = [
-            '/sys/bus/pci/drivers/*/%s',
-            '/sys/bus/platform/drivers/*/%s']
-
-
-    def __init__(self):
-        """Initializes the manager.
-
-        _device_product_name and _device_bus_id are initially set to None.
-
-        """
-        self._device_product_name = None
-        self._device_bus_id = None
-        self._hcd_ids = None
-        self._hcds = None
-        self._find_hcd_ids()
-        self._create_hcds()
-
-
-    def _find_hcd_ids(self):
-        """Finds host controller driver ids for USB.
-
-        We can find the HCD id for USB from driver's realpath.
-        E.g. On ARM device:
-        /sys/bus/usb/drivers/usb/usb1 links to
-        /sys/devices/soc0/70090000.usb/xhci-hcd.0.auto/usb1
-        => HCD id is xhci-hcd.0.auto
-
-        E.g. On X86 device:
-        /sys/bus/usb/drivers/usb/usb1 links to
-        /sys/devices/pci0000:00/0000:00:14.0/usb1
-        => HCD id is 0000:00:14.0
-
-        There might be multiple HCD ids like 0000:00:1a.0 for usb1,
-        and 0000:00:1d.0 for usb2.
-
-        @raises: USBDeviceDriversManagerError if HCD id can not be found.
-
-        """
-        def _get_dir_name(path):
-            return os.path.basename(os.path.dirname(path))
-
-        hcd_ids = set()
-
-        for search_root_path in glob.glob(self._USB_DRIVER_GLOB_PATTERN):
-            hcd_id = _get_dir_name(os.path.realpath(search_root_path))
-            hcd_ids.add(hcd_id)
-
-        if not hcd_ids:
-            raise USBDeviceDriversManagerError('Can not find HCD id')
-
-        self._hcd_ids = hcd_ids
-        logging.debug('Found HCD ids: %s', self._hcd_ids)
-
-
-    def _create_hcds(self):
-        """Finds HCD paths from HCD id and create HostControllerDrivers.
-
-        HCD is under /sys/bus/pci/drivers/ for x86 boards, and under
-        /sys/bus/platform/drivers/ for ARM boards.
-
-        For each HCD id, finds HCD by checking HCD id under it, e.g.
-        /sys/bus/pci/drivers/ehci_hcd has 0000:00:1a.0 under it.
-        Then, create a HostControllerDriver and store it in self._hcds.
-
-        @raises: USBDeviceDriversManagerError if there are multiple
-                 HCD path found for a given HCD id.
-
-        @raises: USBDeviceDriversManagerError if no HostControllerDriver is found.
-
-        """
-        self._hcds = []
-
-        for hcd_id in self._hcd_ids:
-            for glob_pattern in self._HCD_GLOB_PATTERNS:
-                glob_pattern = glob_pattern % hcd_id
-                hcd_id_paths = glob.glob(glob_pattern)
-                if not hcd_id_paths:
-                    continue
-                if len(hcd_id_paths) > 1:
-                    raise USBDeviceDriversManagerError(
-                            'More than 1 HCD id path found: %s' % hcd_id_paths)
-                hcd_id_path = hcd_id_paths[0]
-
-                # Gets /sys/bus/pci/drivers/echi_hcd from
-                # /sys/bus/pci/drivers/echi_hcd/0000:00:1a.0
-                hcd_path = os.path.dirname(hcd_id_path)
-                self._hcds.append(
-                        HostControllerDriver(hcd_id=hcd_id, hcd_path=hcd_path))
-
-
-    def reset_host_controller(self):
-        """Resets host controller by unbinding then binding HCD.
-
-        @raises: USBDeviceDriversManagerError if there is no HCD to control.
-
-        """
-        if not self._hcds:
-            raise USBDeviceDriversManagerError('HCD is not found yet')
-        for hcd in self._hcds:
-            hcd.reset()
-
-
-    def _find_usb_device_bus_id(self, product_name):
-        """Finds the bus ID of the USB device with the given product name.
-
-        @param product_name: The product name of the USB device as it appears
-                             to the host.
-
-        @returns: The bus ID of the USB device if it is detected by the host
-                  successfully; or None if there is no such device with the
-                  given product name.
-
-        """
-        def product_matched(path):
-            """Checks if the product field matches expected product name.
-
-            @returns: True if the product name matches, False otherwise.
-
-            """
-            read_product_name = utils.read_one_line(path)
-            logging.debug('Read product at %s = %s', path, read_product_name)
-            return read_product_name == product_name
-
-        # Find product field at these possible paths:
-        # '/sys/bus/usb/drivers/usb/usbX/X-Y/product' => bus id is X-Y.
-        # '/sys/bus/usb/drivers/usb/usbX/X-Y/X-Y.Z/product' => bus id is X-Y.Z.
-
-        for search_root_path in glob.glob(self._USB_DRIVER_GLOB_PATTERN):
-            logging.debug('search_root_path: %s', search_root_path)
-            for root, dirs, _ in os.walk(search_root_path):
-                logging.debug('root: %s', root)
-                for bus_id in dirs:
-                    logging.debug('bus_id: %s', bus_id)
-                    product_path = os.path.join(root, bus_id, 'product')
-                    logging.debug('product_path: %s', product_path)
-                    if not os.path.exists(product_path):
-                        continue
-                    if not product_matched(product_path):
-                        continue
-                    logging.debug(
-                            'Bus ID of %s found: %s', product_name, bus_id)
-                    return bus_id
-
-        logging.error('Bus ID of %s not found', product_name)
-        return None
-
-
-    def has_found_device(self, product_name):
-        """Checks if the device has been found.
-
-        @param product_name: The product name of the USB device as it appears
-                             to the host.
-
-        @returns: True if device has been found, False otherwise.
-
-        """
-        return self._device_product_name == product_name
-
-
-    def find_usb_device(self, product_name):
-        """Sets _device_product_name and _device_bus_id if it can be found.
-
-        @param product_name: The product name of the USB device as it appears
-                             to the host.
-
-        @raises: USBDeviceDriversManagerError if device bus ID cannot be found
-                 for the device with the given product name.
-
-        """
-        device_bus_id = self._find_usb_device_bus_id(product_name)
-        if device_bus_id is None:
-            error_message = 'Cannot find device with product name: %s'
-            raise USBDeviceDriversManagerError(error_message % product_name)
-        else:
-            self._device_product_name = product_name
-            self._device_bus_id = device_bus_id
-
-
-    def drivers_are_bound(self):
-        """Checks whether the drivers with the of current device are bound.
-
-        If the drivers are already bound, calling bind_usb_drivers will be
-        redundant and also result in an error.
-
-        @return: True if the path to the drivers exist, meaning the drivers
-                 are already bound. False otherwise.
-
-        """
-        if self._device_bus_id is None:
-            raise USBDeviceDriversManagerError('USB Bus ID is not set yet.')
-        driver_path = self._USB_BOUND_DRIVERS_FILE_PATH % self._device_bus_id
-        return os.path.exists(driver_path)
-
-
-    def bind_usb_drivers(self):
-        """Binds the USB driver(s) of the current device to the host.
-
-        This is applied to all the drivers associated with and listed under
-        the USB device with the current _device_product_name and _device_bus_id.
-
-        @raises: USBDeviceDriversManagerError if device bus ID for this instance
-                 has not been set yet.
-
-        """
-        if self._device_bus_id is None:
-            raise USBDeviceDriversManagerError('USB Bus ID is not set yet.')
-        if self.drivers_are_bound():
-            return
-        utils.open_write_close(self._USB_BIND_FILE_PATH,
-                self._device_bus_id)
-
-
-    def unbind_usb_drivers(self):
-        """Unbinds the USB driver(s) of the current device from the host.
-
-        This is applied to all the drivers associated with and listed under
-        the USB device with the current _device_product_name and _device_bus_id.
-
-        @raises: USBDeviceDriversManagerError if device bus ID for this instance
-                 has not been set yet.
-
-        """
-        if self._device_bus_id is None:
-            raise USBDeviceDriversManagerError('USB Bus ID is not set yet.')
-        if not self.drivers_are_bound():
-            return
-        utils.open_write_close(self._USB_UNBIND_FILE_PATH,
-                                    self._device_bus_id)
diff --git a/client/cros/multimedia/video_facade.py b/client/cros/multimedia/video_facade.py
new file mode 100644
index 0000000..a289399
--- /dev/null
+++ b/client/cros/multimedia/video_facade.py
@@ -0,0 +1,164 @@
+# Copyright 2016 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Facade to access the video-related functionality."""
+
+import functools
+import glob
+import os
+
+from autotest_lib.client.bin import utils
+from autotest_lib.client.cros.multimedia import display_facade
+from autotest_lib.client.cros.video import builtin_html5_player
+
+
+class VideoFacadeLocalError(Exception):
+    """Error in VideoFacadeLocal."""
+    pass
+
+
+def check_arc_resource(func):
+    """Decorator function for ARC related functions in VideoFacadeLocal."""
+    @functools.wraps(func)
+    def wrapper(instance, *args, **kwargs):
+        """Wrapper for the methods to check _arc_resource.
+
+        @param instance: Object instance.
+
+        @raises: VideoFacadeLocalError if there is no ARC resource.
+
+        """
+        if not instance._arc_resource:
+            raise VideoFacadeLocalError('There is no ARC resource.')
+        return func(instance, *args, **kwargs)
+    return wrapper
+
+
+class VideoFacadeLocal(object):
+    """Facede to access the video-related functionality.
+
+    The methods inside this class only accept Python native types.
+
+    """
+
+    def __init__(self, resource, arc_resource=None):
+        """Initializes an video facade.
+
+        @param resource: A FacadeResource object.
+        @param arc_resource: An ArcResource object.
+
+        """
+        self._resource = resource
+        self._player = None
+        self._arc_resource = arc_resource
+        self._display_facade = display_facade.DisplayFacadeLocal(
+                resource)
+        self.bindir = os.path.dirname(os.path.realpath(__file__))
+
+
+    def cleanup(self):
+        """Clean up the temporary files."""
+        for path in glob.glob('/tmp/playback_*'):
+            os.unlink(path)
+
+        if self._arc_resource:
+            self._arc_resource.cleanup()
+
+
+    def prepare_playback(self, file_path, fullscreen=True):
+        """Copies the html file to /tmp and loads the webpage.
+
+        @param file_path: The path to the file.
+        @param fullscreen: Plays the video in fullscreen.
+
+        """
+        # Copies the html file to /tmp to make it accessible.
+        utils.get_file(
+                os.path.join(self.bindir, 'video.html'),
+                '/tmp/playback_video.html')
+
+        html_path = 'file:///tmp/playback_video.html'
+
+        tab = self._resource._browser.tabs.New()
+        tab.Navigate(html_path)
+        self._player = builtin_html5_player.BuiltinHtml5Player(
+                tab=tab,
+                full_url=html_path,
+                video_id='video',
+                video_src_path=file_path)
+        self._player.load_video()
+
+        if fullscreen:
+            self._display_facade.set_fullscreen(True)
+
+
+    def start_playback(self, blocking=False):
+        """Starts video playback on the webpage.
+
+        Before calling this method, user should call prepare_playback to
+        put the files to /tmp and load the webpage.
+
+        @param blocking: Blocks this call until playback finishes.
+
+        """
+        self._player.play()
+        if blocking:
+            self._player.wait_video_ended()
+
+
+    def pause_playback(self):
+        """Pauses playback on the webpage."""
+        self._player.pause()
+
+
+    def dropped_frame_count(self):
+        """
+        Gets the number of dropped frames.
+
+        @returns: An integer indicates the number of dropped frame.
+
+        """
+        return self._player.dropped_frame_count()
+
+
+    @check_arc_resource
+    def prepare_arc_playback(self, file_path, fullscreen=True):
+        """Copies the video file to be played into container and starts the app.
+
+        User should call this method to put the file into container before
+        calling start_arc_playback.
+
+        @param file_path: Path to the file to be played on Cros host.
+        @param fullscreen: Plays the video in fullscreen.
+
+        """
+        self._arc_resource.play_video.prepare_playback(file_path, fullscreen)
+
+
+    @check_arc_resource
+    def start_arc_playback(self, blocking_secs=None):
+        """Starts playback through Play Video app.
+
+        Before calling this method, user should call set_arc_playback_file to
+        put the file into container and start the app.
+
+        @param blocking_secs: A positive number indicates the timeout to wait
+                              for the playback is finished. Set None to make
+                              it non-blocking.
+
+
+        """
+        self._arc_resource.play_video.start_playback(blocking_secs)
+
+
+    @check_arc_resource
+    def pause_arc_playback(self):
+        """Pauses playback through Play Video app."""
+        self._arc_resource.play_video.pause_playback()
+
+
+    @check_arc_resource
+    def stop_arc_playback(self):
+        """Stops playback through Play Video app."""
+        self._arc_resource.play_video.stop_playback()
diff --git a/client/cros/multimedia/video_facade_native.py b/client/cros/multimedia/video_facade_native.py
deleted file mode 100644
index 8665d5c..0000000
--- a/client/cros/multimedia/video_facade_native.py
+++ /dev/null
@@ -1,164 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Facade to access the video-related functionality."""
-
-import functools
-import glob
-import os
-
-from autotest_lib.client.bin import utils
-from autotest_lib.client.cros.multimedia import display_facade_native
-from autotest_lib.client.cros.video import native_html5_player
-
-
-class VideoFacadeNativeError(Exception):
-    """Error in VideoFacadeNative."""
-    pass
-
-
-def check_arc_resource(func):
-    """Decorator function for ARC related functions in VideoFacadeNative."""
-    @functools.wraps(func)
-    def wrapper(instance, *args, **kwargs):
-        """Wrapper for the methods to check _arc_resource.
-
-        @param instance: Object instance.
-
-        @raises: VideoFacadeNativeError if there is no ARC resource.
-
-        """
-        if not instance._arc_resource:
-            raise VideoFacadeNativeError('There is no ARC resource.')
-        return func(instance, *args, **kwargs)
-    return wrapper
-
-
-class VideoFacadeNative(object):
-    """Facede to access the video-related functionality.
-
-    The methods inside this class only accept Python native types.
-
-    """
-
-    def __init__(self, resource, arc_resource=None):
-        """Initializes an video facade.
-
-        @param resource: A FacadeResource object.
-        @param arc_resource: An ArcResource object.
-
-        """
-        self._resource = resource
-        self._player = None
-        self._arc_resource = arc_resource
-        self._display_facade = display_facade_native.DisplayFacadeNative(
-                resource)
-        self.bindir = os.path.dirname(os.path.realpath(__file__))
-
-
-    def cleanup(self):
-        """Clean up the temporary files."""
-        for path in glob.glob('/tmp/playback_*'):
-            os.unlink(path)
-
-        if self._arc_resource:
-            self._arc_resource.cleanup()
-
-
-    def prepare_playback(self, file_path, fullscreen=True):
-        """Copies the html file to /tmp and loads the webpage.
-
-        @param file_path: The path to the file.
-        @param fullscreen: Plays the video in fullscreen.
-
-        """
-        # Copies the html file to /tmp to make it accessible.
-        utils.get_file(
-                os.path.join(self.bindir, 'video.html'),
-                '/tmp/playback_video.html')
-
-        html_path = 'file:///tmp/playback_video.html'
-
-        tab = self._resource._browser.tabs.New()
-        tab.Navigate(html_path)
-        self._player = native_html5_player.NativeHtml5Player(
-                tab=tab,
-                full_url=html_path,
-                video_id='video',
-                video_src_path=file_path)
-        self._player.load_video()
-
-        if fullscreen:
-            self._display_facade.set_fullscreen(True)
-
-
-    def start_playback(self, blocking=False):
-        """Starts video playback on the webpage.
-
-        Before calling this method, user should call prepare_playback to
-        put the files to /tmp and load the webpage.
-
-        @param blocking: Blocks this call until playback finishes.
-
-        """
-        self._player.play()
-        if blocking:
-            self._player.wait_video_ended()
-
-
-    def pause_playback(self):
-        """Pauses playback on the webpage."""
-        self._player.pause()
-
-
-    def dropped_frame_count(self):
-        """
-        Gets the number of dropped frames.
-
-        @returns: An integer indicates the number of dropped frame.
-
-        """
-        return self._player.dropped_frame_count()
-
-
-    @check_arc_resource
-    def prepare_arc_playback(self, file_path, fullscreen=True):
-        """Copies the video file to be played into container and starts the app.
-
-        User should call this method to put the file into container before
-        calling start_arc_playback.
-
-        @param file_path: Path to the file to be played on Cros host.
-        @param fullscreen: Plays the video in fullscreen.
-
-        """
-        self._arc_resource.play_video.prepare_playback(file_path, fullscreen)
-
-
-    @check_arc_resource
-    def start_arc_playback(self, blocking_secs=None):
-        """Starts playback through Play Video app.
-
-        Before calling this method, user should call set_arc_playback_file to
-        put the file into container and start the app.
-
-        @param blocking_secs: A positive number indicates the timeout to wait
-                              for the playback is finished. Set None to make
-                              it non-blocking.
-
-
-        """
-        self._arc_resource.play_video.start_playback(blocking_secs)
-
-
-    @check_arc_resource
-    def pause_arc_playback(self):
-        """Pauses playback through Play Video app."""
-        self._arc_resource.play_video.pause_playback()
-
-
-    @check_arc_resource
-    def stop_arc_playback(self):
-        """Stops playback through Play Video app."""
-        self._arc_resource.play_video.stop_playback()
diff --git a/client/cros/multimedia/webrtc_utils.py b/client/cros/multimedia/webrtc_utils.py
index d739330..976f84b 100644
--- a/client/cros/multimedia/webrtc_utils.py
+++ b/client/cros/multimedia/webrtc_utils.py
@@ -64,7 +64,7 @@
     def __init__(self, browser_facade):
         """Initializes an AppRTCController.
 
-        @param browser_facade: A BrowserFacadeNative (for client side) or
+        @param browser_facade: A BrowserFacadeLocal (for client side) or
                                BrowserFacadeAdapter (for server side).
 
         """
diff --git a/client/cros/netprotos/fake_host.py b/client/cros/netprotos/fake_host.py
index f76a189..9211a1e 100644
--- a/client/cros/netprotos/fake_host.py
+++ b/client/cros/netprotos/fake_host.py
@@ -59,3 +59,6 @@
         self._bind_port = port
         self._bind_recv_callback = recv_callback
 
+    def close(self):
+        """Mock close for python3 unit tests."""
+        pass
diff --git a/client/cros/netprotos/zeroconf.py b/client/cros/netprotos/zeroconf.py
index 66382ca..abca7e2 100644
--- a/client/cros/netprotos/zeroconf.py
+++ b/client/cros/netprotos/zeroconf.py
@@ -5,6 +5,7 @@
 import collections
 import dpkt
 import logging
+import six
 import socket
 import time
 
@@ -40,13 +41,13 @@
         if not hasattr(rra, key):
             continue
         if key == 'cls':
-          # cls attribute should be masked for the cache flush bit.
-          if (getattr(rra, key) & ~DNS_CACHE_FLUSH !=
-                getattr(rrb, key) & ~DNS_CACHE_FLUSH):
-              return False
+            # cls attribute should be masked for the cache flush bit.
+            if (getattr(rra, key) & ~DNS_CACHE_FLUSH !=
+                        getattr(rrb, key) & ~DNS_CACHE_FLUSH):
+                return False
         else:
-          if getattr(rra, key) != getattr(rrb, key):
-              return False
+            if getattr(rra, key) != getattr(rrb, key):
+                return False
     return True
 
 
@@ -436,7 +437,7 @@
         if not rrtype in self._peer_records[rrname]:
             return []
         res = []
-        for data, data_ts in self._peer_records[rrname][rrtype].iteritems():
+        for data, data_ts in six.iteritems(self._peer_records[rrname][rrtype]):
             if data_ts >= timestamp:
                 res.append(DnsRecord(rrname, rrtype, data, data_ts))
         return res
diff --git a/client/cros/netprotos/zeroconf_unittest.py b/client/cros/netprotos/zeroconf_unittest.py
index deeab13..6c107c5 100644
--- a/client/cros/netprotos/zeroconf_unittest.py
+++ b/client/cros/netprotos/zeroconf_unittest.py
@@ -1,3 +1,4 @@
+#!/usr/bin/python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -5,9 +6,12 @@
 import unittest
 
 import dpkt
-import fake_host
 import socket
-import zeroconf
+
+import common
+
+from autotest_lib.client.cros.netprotos import fake_host
+from autotest_lib.client.cros.netprotos import zeroconf
 
 
 FAKE_HOSTNAME = 'fakehost1'
@@ -110,18 +114,17 @@
         # Build the mDNS packet with two TXT records.
         domain_name = 'other_host.local'
         answers = [
-                dpkt.dns.DNS.RR(
-                        type = dpkt.dns.DNS_TXT,
-                        cls = dpkt.dns.DNS_IN,
-                        ttl = 120,
-                        name = domain_name,
-                        text = ['one', 'two']),
-                dpkt.dns.DNS.RR(
-                        type = dpkt.dns.DNS_TXT,
-                        cls = dpkt.dns.DNS_IN,
-                        ttl = 120,
-                        name = domain_name,
-                        text = ['two'])]
+                dpkt.dns.DNS.RR(type=dpkt.dns.DNS_TXT,
+                                cls=dpkt.dns.DNS_IN,
+                                ttl=120,
+                                name=domain_name,
+                                text=['one'.encode(), 'two'.encode()]),
+                dpkt.dns.DNS.RR(type=dpkt.dns.DNS_TXT,
+                                cls=dpkt.dns.DNS_IN,
+                                ttl=120,
+                                name=domain_name,
+                                text=['two'.encode()])
+        ]
         # The packet is a query packet, with extra answers on the autoritative
         # section.
         mdns = dpkt.dns.DNS(
@@ -138,7 +141,7 @@
         # Send the packet to the registered callback.
         sock = self._host._sockets[0]
         cbk = sock._bind_recv_callback
-        cbk(str(mdns), '1234', 5353)
+        cbk(bytes(mdns), 1234, 5353)
 
         # Check that the answers callback is called with all the answers in the
         # received order.
diff --git a/client/cros/network.py b/client/cros/network.py
index 33c6481..03b7a20 100644
--- a/client/cros/network.py
+++ b/client/cros/network.py
@@ -4,70 +4,58 @@
 # found in the LICENSE file.
 
 import logging
-import re
 from six.moves import urllib
 import socket
 import time
 
-import common
-
 from autotest_lib.client.bin import utils
 from autotest_lib.client.common_lib import error
 
 
-def CheckInterfaceForDestination(host, expected_interface,
-                                 family=socket.AF_UNSPEC):
+def CheckThatInterfaceCanAccessDestination(host,
+                                           interface,
+                                           families=[socket.AF_UNSPEC]):
     """
-    Checks that routes for host go through a given interface.
-
-    The concern here is that our network setup may have gone wrong
-    and our test connections may go over some other network than
-    the one we're trying to test.  So we take all the IP addresses
-    for the supplied host and make sure they go through the given
-    network interface.
+    Checks that we can access a host using a specific interface.
 
     @param host: Destination host
-    @param expected_interface: Expected interface name
-    @raises: error.TestFail if the routes for the given host go through
-            a different interface than the expected one.
+    @param interface: Name of the network interface to be used
+    @raises: error.TestFail if the interface cannot access the specified host.
 
     """
-    def _MatchesRoute(address, expected_interface):
-        """
-        Returns whether or not |expected_interface| is used to reach |address|.
-
-        @param address: string containing an IP (v4 or v6) address.
-        @param expected_interface: string containing an interface name.
-
-        """
-        output = utils.run('ip route get %s' % address).stdout
-
-        if re.search(r'unreachable', output):
-            return False
-
-        match = re.search(r'\sdev\s(\S+)', output)
-        if match is None:
-            return False
-        interface = match.group(1)
-
-        logging.info('interface for %s: %s', address, interface)
-        if interface != expected_interface:
-            raise error.TestFail('Target server %s uses interface %s'
-                                 '(%s expected).' %
-                                 (address, interface, expected_interface))
-        return True
-
+    logging.debug('Check connection to %s', host)
     # addrinfo records: (family, type, proto, canonname, (addr, port))
-    server_addresses = [record[4][0]
-                        for record in socket.getaddrinfo(host, 80, family)]
-    for address in server_addresses:
+    server_addresses = []
+    for family in families:
+        try:
+            records = socket.getaddrinfo(host, 80, family)
+        except:
+            # Just ignore this family.
+            continue
+        server_addresses.extend(record[4][0] for record in records)
+
+    found_route = False
+    failing_addresses = []
+    for address in set(server_addresses):
         # Routes may not always be up by this point. Note that routes for v4 or
         # v6 may come up before the other, so we simply do this poll for all
         # addresses.
-        utils.poll_for_condition(
-            condition=lambda: _MatchesRoute(address, expected_interface),
-            exception=error.TestFail('No route to %s' % address),
-            timeout=1)
+        try:
+            utils.poll_for_condition(condition=lambda: utils.ping(
+                    address, interface=interface, tries=2, timeout=3) == 0,
+                                     exception=Exception('No route to %s' %
+                                                         address),
+                                     timeout=2)
+        except Exception as e:
+            logging.info(e)
+            failing_addresses.append(address)
+        else:
+            found_route = True
+
+    if not found_route:
+        raise error.TestFail('Interface %s cannot connect to %s' % (interface,
+                             failing_addresses))
+
 
 FETCH_URL_PATTERN_FOR_TEST = \
     'http://testing-chargen.appspot.com/download?size=%d'
diff --git a/client/cros/networking/OWNERS b/client/cros/networking/OWNERS
new file mode 100644
index 0000000..97e5eb6
--- /dev/null
+++ b/client/cros/networking/OWNERS
@@ -0,0 +1 @@
+include /WIFI_OWNERS
diff --git a/client/cros/networking/apmanager_proxy.py b/client/cros/networking/apmanager_proxy.py
index a125e35..2700938 100644
--- a/client/cros/networking/apmanager_proxy.py
+++ b/client/cros/networking/apmanager_proxy.py
@@ -1,9 +1,15 @@
+# Lint as: python2, python3
 # Copyright 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import dbus
 import dbus.mainloop.glib
+import six
 import time
 
 from autotest_lib.client.common_lib.cros.network import apmanager_constants
@@ -173,7 +179,7 @@
                                         self.DBUS_SERVICE_INTERFACE,
                                         self.SERVICE_PROPERTY_CONFIG))
         # Set configuration properties.
-        for name, value in config_params.iteritems():
+        for name, value in six.iteritems(config_params):
             if name in self.CONFIG_PROPERTY_DBUS_TYPE_MAPPING:
                 func = self.CONFIG_PROPERTY_DBUS_TYPE_MAPPING[name]
                 self._set_dbus_property(service_config,
diff --git a/client/cros/networking/apmanager_xmlrpc_server.py b/client/cros/networking/apmanager_xmlrpc_server.py
index 5e066f1..4255d21 100755
--- a/client/cros/networking/apmanager_xmlrpc_server.py
+++ b/client/cros/networking/apmanager_xmlrpc_server.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # Copyright 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -8,6 +8,7 @@
 import logging.handlers
 
 import common
+
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.cros import constants
 from autotest_lib.client.cros import xmlrpc_server
diff --git a/client/cros/networking/cellular_proxy.py b/client/cros/networking/cellular_proxy.py
index 37bd3bd..d16ec77 100644
--- a/client/cros/networking/cellular_proxy.py
+++ b/client/cros/networking/cellular_proxy.py
@@ -1,10 +1,10 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 import dbus
 import logging
-import time
 
 from autotest_lib.client.bin import utils
 from autotest_lib.client.cros.networking import shill_proxy
@@ -61,7 +61,10 @@
                 service found.
 
         """
-        return self.find_object('Service', {'Type': self.TECHNOLOGY_CELLULAR})
+        return self.find_object('Service', {
+                'Type': self.TECHNOLOGY_CELLULAR,
+                'Connectable': True
+        })
 
 
     def wait_for_cellular_service_object(
@@ -123,7 +126,7 @@
         """
         logging.info('Resetting modem')
         # Obtain identifying information about the modem.
-        properties = modem.GetProperties(utf8_strings=True)
+        properties = modem.GetProperties()
         # NOTE: Using the Model ID means that this will break if we have two
         # identical cellular modems in a DUT. Fortunately, we only support one
         # modem at a time.
@@ -141,45 +144,43 @@
         if not manufacturer:
             raise shill_proxy.ShillProxyError(
                     'Failed to get the manufacturer for the modem.')
-        if "QUALCOMM" in manufacturer:
-            logging.info(
-                    'Qualcomm modem found. Bypassing modem reset (b/168113309)'
-            )
-            new_modem = modem
-        else:
-            modem.Reset()
 
-            # (1) Wait for the old modem to disappear
-            utils.poll_for_condition(lambda: self._is_old_modem_gone(
-                    old_modem_path, old_modem_mm_object),
-                                     exception=shill_proxy.
-                                     ShillProxyTimeoutError(
-                                             'Old modem disappeared'),
-                                     timeout=60)
+        # On Qualcomm modems, rebooting the modem causes ModemManager to also
+        # restart, because when the Qrtr services are removed, qc-netmgr
+        # restarts ModemManager.
+        mm_rebooted = "QUALCOMM" in manufacturer
+        modem.Reset()
 
-            # (2) Wait for the device to reappear
-            if not expect_device:
-                return None, None
-            # The timeout here should be sufficient for our slowest modem to
-            # reappear.
-            new_modem = utils.poll_for_condition(
-                    lambda: self._get_reappeared_modem(model_id,
-                                                       old_modem_mm_object),
-                    exception=shill_proxy.ShillProxyTimeoutError(
-                            'The modem reappeared after reset.'),
-                    timeout=60)
+        # (1) Wait for the old modem to disappear
+        utils.poll_for_condition(lambda: self._is_old_modem_gone(
+                old_modem_path, old_modem_mm_object),
+                                 exception=shill_proxy.ShillProxyTimeoutError(
+                                         'Old modem disappeared'),
+                                 timeout=60)
 
-            # (3) Check powered state of the device
-            if not expect_powered:
-                return new_modem, None
-            success, _, _ = self.wait_for_property_in(
-                    new_modem,
-                    self.DEVICE_PROPERTY_POWERED, [self.VALUE_POWERED_ON],
-                    timeout_seconds=10)
-            if not success:
-                raise shill_proxy.ShillProxyError(
-                        'After modem reset, new modem failed to enter powered '
-                        'state.')
+        # (2) Wait for the device to reappear
+        if not expect_device:
+            return None, None
+        # The timeout here should be sufficient for our slowest modem to
+        # reappear.
+        new_modem = utils.poll_for_condition(
+                lambda: self._get_reappeared_modem(
+                        model_id, old_modem_mm_object, mm_rebooted),
+                exception=shill_proxy.ShillProxyTimeoutError(
+                        'The modem reappeared after reset.'),
+                timeout=60)
+
+        # (3) Check powered state of the device
+        if not expect_powered:
+            return new_modem, None
+        success, _, _ = self.wait_for_property_in(new_modem,
+                                                  self.DEVICE_PROPERTY_POWERED,
+                                                  [self.VALUE_POWERED_ON],
+                                                  timeout_seconds=15)
+        if not success:
+            raise shill_proxy.ShillProxyError(
+                    'After modem reset, new modem failed to enter powered '
+                    'state.')
 
         # (4) Check that service reappears
         if not expect_service:
@@ -248,12 +249,15 @@
             return False
 
 
-    def _get_reappeared_modem(self, model_id, old_modem_mm_object):
-        """Check that a vanished modem reappers.
+    def _get_reappeared_modem(self, model_id, old_modem_mm_object, mm_reboot):
+        """Check that a vanished modem reappeers.
 
         @param model_id: The model ID reported by the vanished modem.
         @param old_modem_mm_object: The previously reported modemmanager object
                 path for this modem.
+        @param mm_reboot: indicates when modemmanager was rebooted.
+                When modemmanager reboots, the previous modem object name has
+                no importance.
 
         @return The reappeared DBus object, if any. None otherwise.
 
@@ -263,9 +267,11 @@
         device = self.find_cellular_device_object()
         if not device:
             return None
-        properties = device.GetProperties(utf8_strings=True)
+        properties = device.GetProperties()
         if (model_id == properties.get(self.DEVICE_PROPERTY_MODEL_ID) and
-            (old_modem_mm_object !=
-             properties.get(self.DEVICE_PROPERTY_DBUS_OBJECT))):
+            (mm_reboot or
+             (old_modem_mm_object != properties.get(
+                     self.DEVICE_PROPERTY_DBUS_OBJECT)
+              and '/' in properties.get(self.DEVICE_PROPERTY_DBUS_OBJECT)))):
             return device
         return None
diff --git a/client/cros/networking/chrome_testing/chrome_networking_test_api.py b/client/cros/networking/chrome_testing/chrome_networking_test_api.py
index a44c72a..1f19ac9 100644
--- a/client/cros/networking/chrome_testing/chrome_networking_test_api.py
+++ b/client/cros/networking/chrome_testing/chrome_networking_test_api.py
@@ -1,10 +1,17 @@
+# Lint as: python2, python3
 # Copyright 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import logging
 import time
 
+from six.moves import range
+
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.cros.networking.chrome_testing import test_utils
 
@@ -98,7 +105,7 @@
     def scan_for_networks(self, timeout=SHORT_TIMEOUT):
         """Scan for all the available networks
 
-        @param timeout int seconds to sleep while scanning for networks 
+        @param timeout int seconds to sleep while scanning for networks
 
         """
         self._chrome_testing.call_test_function_async('requestNetworkScan')
diff --git a/client/cros/networking/chrome_testing/chrome_networking_test_context.py b/client/cros/networking/chrome_testing/chrome_networking_test_context.py
index 4822bcf..83ad20d 100644
--- a/client/cros/networking/chrome_testing/chrome_networking_test_context.py
+++ b/client/cros/networking/chrome_testing/chrome_networking_test_context.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/cros/networking/chrome_testing/network_test_ext/manifest.json b/client/cros/networking/chrome_testing/network_test_ext/manifest.json
index a91e8e0..bc9d6c5 100644
--- a/client/cros/networking/chrome_testing/network_test_ext/manifest.json
+++ b/client/cros/networking/chrome_testing/network_test_ext/manifest.json
@@ -2,7 +2,7 @@
   "background": {
      "scripts": [ "background.js" ]
   },
-  "description": "Extension that enables end to end UI testing of Chrome OS network functionality",
+  "description": "Extension that enables end to end UI testing of ChromeOS network functionality",
   "key": "MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDCtCBRrosxnJ4X1+0N1kYM7koUvklF9oWqVDnIJJ6mERYFNiS+c2b9pVUCW3aeFvMUzIRZAOFP4iY1LTuKATbqWU90zg7jTah1AFfyHPoyRwxobj4NdSLjxCmc13+zbL+Cwjl7lH2Rjkop6whsbb+30lWju3lac9PCVFlGwHubZQIDAQAB",
   "manifest_version": 2,
   "name": "ChromeOS Networking Telemetry AutoTest Extension",
diff --git a/client/cros/networking/chrome_testing/test_utils.py b/client/cros/networking/chrome_testing/test_utils.py
index 3ae2a75..4d54a73 100644
--- a/client/cros/networking/chrome_testing/test_utils.py
+++ b/client/cros/networking/chrome_testing/test_utils.py
@@ -84,8 +84,10 @@
             timeout)
 
 
-def simple_network_sanity_check(
-        network, expected_name, expected_type, check_name_prefix=True):
+def simple_network_check(network,
+                         expected_name,
+                         expected_type,
+                         check_name_prefix=True):
     """
     Simple check to ensure that the network type and name match the expected
     values.
diff --git a/client/cros/networking/common.py b/client/cros/networking/common.py
index 3bae9bd..d36a6a4 100644
--- a/client/cros/networking/common.py
+++ b/client/cros/networking/common.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/cros/networking/hermes_proxy.py b/client/cros/networking/hermes_proxy.py
new file mode 100644
index 0000000..9d65a57
--- /dev/null
+++ b/client/cros/networking/hermes_proxy.py
@@ -0,0 +1,504 @@
+# Lint as: python2, python3
+# Copyright (c) 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This module provides bindings for Hermes.
+
+"""
+import dbus
+import logging
+import dbus.mainloop.glib
+from autotest_lib.client.bin import utils
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.cros.cellular import cellular_logging
+from autotest_lib.client.cros.cellular import hermes_constants
+from autotest_lib.client.cros.cellular import mm1_constants
+
+log = cellular_logging.SetupCellularLogging('Hermes')
+
+def _is_unknown_dbus_binding_exception(e):
+    return (isinstance(e, dbus.exceptions.DBusException) and
+            e.get_dbus_name() in [mm1_constants.DBUS_SERVICE_UNKNOWN,
+                                  mm1_constants.DBUS_UNKNOWN_METHOD,
+                                  mm1_constants.DBUS_UNKNOWN_OBJECT,
+                                  mm1_constants.DBUS_UNKNOWN_INTERFACE])
+
+class HermesManagerProxyError(Exception):
+    """Exceptions raised by HermesManager1ProxyError and it's children."""
+    pass
+
+class HermesManagerProxy(object):
+    """A wrapper around a DBus proxy for HermesManager."""
+
+    @classmethod
+    def get_hermes_manager(cls, bus=None, timeout_seconds=10):
+        """Connect to HermesManager over DBus, retrying if necessary.
+
+        After connecting to HermesManager, this method will verify that
+        HermesManager is answering RPCs.
+
+        @param bus: D-Bus bus to use, or specify None and this object will
+            create a mainloop and bus.
+        @param timeout_seconds: float number of seconds to try connecting
+            A value <= 0 will cause the method to return immediately,
+            without trying to connect.
+        @return a HermesManagerProxy instance if we connected, or None
+            otherwise.
+        @raise HermesManagerProxyError if it fails to connect to
+            HermesManager.
+
+        """
+        def _connect_to_hermes_manager(bus):
+            try:
+                # We create instance of class on which this classmethod was
+                # called. This way, calling get_hermes_manager
+                # SubclassOfHermesManagerProxy._connect_to_hermes_manager()
+                # will get a proxy of the right type
+                return cls(bus=bus)
+            except dbus.exceptions.DBusException as e:
+                if _is_unknown_dbus_binding_exception(e):
+                    return None
+                raise HermesManagerProxyError(
+                    'Error connecting to HermesManager. DBus error: |%s|',
+                    repr(e))
+
+        utils.poll_for_condition(
+            condition=lambda: _connect_to_hermes_manager(bus) is not None,
+            exception=HermesManagerProxyError(
+                'Timed out connecting to HermesManager dbus'),
+            desc='Waiting for hermes to start',
+            timeout=timeout_seconds,
+            sleep_interval=hermes_constants.CONNECT_WAIT_INTERVAL_SECONDS)
+        connection = _connect_to_hermes_manager(bus)
+        return connection
+
+    def __init__(self, bus=None):
+        if bus is None:
+            dbus.mainloop.glib.DBusGMainLoop(set_as_default=True)
+            bus = dbus.SystemBus()
+        self._bus = bus
+        self._manager = dbus.Interface(
+            self._bus.get_object(hermes_constants.HERMES_SERVICE,
+                                 hermes_constants.HERMES_MANAGER_OBJECT),
+            hermes_constants.HERMES_MANAGER_IFACE)
+
+    @property
+    def manager(self):
+        """@return the DBus Hermes Manager object."""
+        return self._manager
+
+    @property
+    def iface_properties(self):
+        """@return org.freedesktop.DBus.Properties DBus interface."""
+        return dbus.Interface(self._manager, hermes_constants.I_PROPERTIES)
+
+    def properties(self, iface=hermes_constants.HERMES_MANAGER_IFACE):
+        """
+        Return the properties associated with the specified interface.
+
+        @param iface: Name of interface to retrieve the properties from.
+        @return array of properties.
+
+        """
+        return self.iface_properties.GetAll(iface)
+
+    def get_available_euiccs(self):
+        """
+        Return AvailableEuiccs property from manager interface
+
+        @return array of euicc paths
+
+        """
+        available_euiccs = self.properties()
+        if len(available_euiccs) <= 0:
+            return None
+
+        return available_euiccs.get('AvailableEuiccs')
+
+    def get_first_inactive_euicc(self):
+        """
+        Read all euiccs objects in loop and get an non active euicc object
+
+        @return non active euicc object
+
+        """
+        try:
+            euiccs = self.get_available_euiccs()
+            euicc_obj = None
+            for euicc in euiccs:
+                euicc_obj = self.get_euicc(euicc)
+                props = euicc_obj.properties()
+                if not props.get('IsActive'):
+                    break
+            return euicc_obj
+        except dbus.DBusException as e:
+            logging.error('get non active euicc failed with error:%s', e)
+
+    def get_first_active_euicc(self):
+        """
+        Read all euiccs and get an active euicc object
+        by reading isactive property of each euicc object
+
+        @return active euicc dbus object path
+
+        """
+        try:
+            euiccs = self.get_available_euiccs()
+            euicc_obj = None
+            for euicc in euiccs:
+                euicc_obj = self.get_euicc(euicc)
+                props = euicc_obj.properties()
+                if props.get('IsActive'):
+                    break
+            return euicc_obj
+        except dbus.DBusException as e:
+            logging.error('get active euicc failed with error:%s', e)
+
+    def get_euicc(self, euicc_path):
+        """
+        Create a proxy object for given euicc path
+
+        @param euicc_path: available euicc dbus path as string
+        @return euicc proxy dbus object
+
+        """
+        if not euicc_path:
+            logging.debug('No euicc path given for %s', euicc_path)
+            raise error.TestFail('No euicc path given for' + euicc_path)
+
+        try:
+            euicc_proxy = EuiccProxy(self._bus, euicc_path)
+            props = euicc_proxy.properties()
+            if not props:
+                raise error.TestFail('No euicc props found for ' + euicc_path)
+            return euicc_proxy
+        except dbus.exceptions.DBusException as e:
+            if _is_unknown_dbus_binding_exception(e):
+                return None
+            raise HermesManagerProxyError(
+                'Failed to obtain dbus object for the euicc. DBus error: '
+                '|%s|', repr(e))
+
+    def get_profile_from_iccid(self, iccid):
+        """
+        Generic function to get profile based on given iccid
+
+        @return euicc object and profile object
+
+        """
+        logging.debug('Get profile from given iccid:%s', iccid)
+        euiccs = self.get_available_euiccs()
+        for euicc in euiccs:
+            euicc_obj = self.get_euicc(euicc)
+            if euicc_obj.get_profile_from_iccid(iccid) != None:
+                return euicc_obj, euicc.get_profile_from_iccid
+        return None
+
+    def set_debug_logging(self):
+        self.manager.SetLogging('DEBUG')
+
+    def get_profile_iccid(self, profile_path):
+        profile_proxy = ProfileProxy(self._bus, profile_path)
+        props = profile_proxy.properties()
+        return props.get('Iccid')
+
+# End of Manager class
+
+class ProfileProxy(object):
+    """A wrapper around a DBus proxy for Hermes profile object."""
+
+    # Amount of time to wait for a state transition.
+    STATE_TRANSITION_WAIT_SECONDS = 10
+
+    def __init__(self, bus, path):
+        self._bus = bus
+        self._path = path
+        self._profile = self._bus.get_object(
+            hermes_constants.HERMES_SERVICE, path)
+
+    def enable(self):
+        """ Enables a profile """
+        profile_interface = dbus.Interface(
+            self.profile, hermes_constants.HERMES_PROFILE_IFACE)
+        logging.debug('ProfileProxy Manager enable_profile')
+        return profile_interface.Enable(
+                    timeout=hermes_constants.HERMES_DBUS_METHOD_REPLY_TIMEOUT)
+
+    def disable(self):
+        """ Disables a profile """
+        profile_interface = dbus.Interface(
+            self.profile, hermes_constants.HERMES_PROFILE_IFACE)
+        logging.debug('ProfileProxy Manager disable_profile')
+        return profile_interface.Disable(
+                    timeout=hermes_constants.HERMES_DBUS_METHOD_REPLY_TIMEOUT)
+
+    @property
+    def profile(self):
+        """@return the DBus profiles object."""
+        return self._profile
+
+    @property
+    def path(self):
+        """@return profile path."""
+        return self._path
+
+    @property
+    def iface_properties(self):
+        """@return org.freedesktop.DBus.Properties DBus interface."""
+        return dbus.Interface(self._profile, dbus.PROPERTIES_IFACE)
+
+    def iface_profile(self):
+        """@return org.freedesktop.HermesManager.Profile DBus interface."""
+        return dbus.Interface(self._profile,
+                              hermes_constants.HERMES_PROFILE_IFACE)
+
+    def properties(self, iface=hermes_constants.HERMES_PROFILE_IFACE):
+        """Return the properties associated with the specified interface.
+        @param iface: Name of interface to retrieve the properties from.
+        @return array of properties.
+        """
+        return self.iface_properties.GetAll(iface)
+
+    # Get functions for each property from properties
+    #"Iccid", "ServiceProvider", "MccMnc", "ActivationCode", "State"
+    #"ProfileClass", "Name", "Nickname"
+    @property
+    def iccid(self):
+        """ @return iccid of profile also confirmation code """
+        props = self.properties(hermes_constants.HERMES_PROFILE_IFACE)
+        return props.get('Iccid')
+
+    @property
+    def serviceprovider(self):
+        """ @return serviceprovider of profile """
+        props = self.properties(hermes_constants.HERMES_PROFILE_IFACE)
+        return props.get('ServiceProvider')
+
+    @property
+    def mccmnc(self):
+        """ @return mccmnc of profile """
+        props = self.properties(hermes_constants.HERMES_PROFILE_IFACE)
+        return props.get('MccMnc')
+
+    @property
+    def activationcode(self):
+        """ @return activationcode of profile """
+        props = self.properties(hermes_constants.HERMES_PROFILE_IFACE)
+        return props.get('ActivationCode')
+
+    @property
+    def state(self):
+        """ @return state of profile """
+        props = self.properties(hermes_constants.HERMES_PROFILE_IFACE)
+        return props.get('State')
+
+    @property
+    def profileclass(self):
+        """ @return profileclass of profile """
+        props = self.properties(hermes_constants.HERMES_PROFILE_IFACE)
+        return props.get('ProfileClass')
+
+    @property
+    def name(self):
+        """ @return name of profile """
+        props = self.properties(hermes_constants.HERMES_PROFILE_IFACE)
+        return props.get('Name')
+
+    @property
+    def nickname(self):
+        """ @return nickname of profile """
+        props = self.properties(hermes_constants.HERMES_PROFILE_IFACE)
+        return props.get('Nickname')
+
+class EuiccProxy(object):
+    """A wrapper around a DBus proxy for Hermes euicc object."""
+
+    def __init__(self, bus, path):
+        self._bus = bus
+        self._euicc = self._bus.get_object(
+            hermes_constants.HERMES_SERVICE, path)
+
+    @property
+    def euicc(self):
+        """@return the DBus Euicc object."""
+        return self._euicc
+
+    @property
+    def iface_properties(self):
+        """@return org.freedesktop.DBus.Properties DBus interface."""
+        return dbus.Interface(self._euicc, dbus.PROPERTIES_IFACE)
+
+    @property
+    def iface_euicc(self):
+        """@return org.freedesktop.HermesManager.Euicc DBus interface."""
+        return dbus.Interface(self._euicc, hermes_constants.HERMES_EUICC_IFACE)
+
+    def properties(self, iface=hermes_constants.HERMES_EUICC_IFACE):
+        """
+        Return the properties associated with the specified interface.
+
+        @param iface: Name of interface to retrieve the properties from.
+        @return array of properties.
+
+        """
+        return self.iface_properties.GetAll(iface)
+
+    def request_installed_profiles(self):
+        """Refreshes/Loads current euicc object profiles.
+        """
+        self.iface_euicc.RequestInstalledProfiles(
+                    timeout=hermes_constants.HERMES_DBUS_METHOD_REPLY_TIMEOUT)
+
+    def request_pending_profiles(self, root_smds):
+        """Refreshes/Loads current euicc object pending profiles.
+        @return profile objects
+        """
+        logging.debug(
+            'Request pending profile call here for %s bus %s',
+                self._euicc, self._bus)
+        return self.iface_euicc.RequestPendingProfiles(
+                    dbus.String(root_smds),
+                    timeout=hermes_constants.HERMES_DBUS_METHOD_REPLY_TIMEOUT)
+
+    def is_test_euicc(self):
+        """
+      Returns if the eUICC is a test eSIM. Automatically chooses the correct
+      TLS certs to use for the eUICC
+      """
+        try:
+            logging.info('Calling Euicc.IsTestEuicc')
+            return self.iface_euicc.IsTestEuicc()
+        except dbus.DBusException as e:
+            logging.error('IsTestEuicc failed with error: %s', e)
+
+    def use_test_certs(self, is_test_certs):
+        """
+        Sets Hermes daemon to test mode, required to run autotests
+
+        Set to true if downloading profiles from an SMDX with a test
+        certificate. This method used to download profiles to an esim from a
+        test CI.
+
+        @param is_test_certs boolean to set true or false
+
+        """
+        try:
+            logging.info('Hermes call UseTestCerts')
+            self.iface_euicc.UseTestCerts(dbus.Boolean(is_test_certs))
+        except dbus.DBusException as e:
+            logging.error('Hermes UseTestCerts failed with error:%s', e)
+
+    def install_profile_from_activation_code(self, act_code, conf_code):
+        """ Install the profile from given act code, confirmation code """
+        profile = self.iface_euicc.InstallProfileFromActivationCode(
+                    act_code,
+                    conf_code,
+                    timeout=hermes_constants.HERMES_DBUS_METHOD_REPLY_TIMEOUT)
+        return profile
+
+    def install_pending_profile(self, profile_path, conf_code):
+        """ Install the profile from given confirmation code"""
+        profile = self.iface_euicc.InstallPendingProfile(
+                    profile_path,
+                    conf_code,
+                    timeout=hermes_constants.HERMES_DBUS_METHOD_REPLY_TIMEOUT)
+        return profile
+
+    def uninstall_profile(self, profile_path):
+        """ uninstall the given profile"""
+        self.iface_euicc.UninstallProfile(
+                    profile_path,
+                    timeout=hermes_constants.HERMES_DBUS_METHOD_REPLY_TIMEOUT)
+
+    def get_installed_profiles(self):
+        """
+        Return all the available profiles objects.
+
+        Every call to |get_installed_profiles| obtains a fresh DBus proxy
+        for the profiles. So, if the profiles DBus object has changed between
+        two calls to this method, the proxy returned will be for the currently
+        available profiles.
+
+        @return a dict of profiles objects. Return None if no profile is found.
+        @raise HermesManagerProxyError if any corrupted profile found.
+
+        """
+        if self.installedprofiles is None:
+            return None
+        try:
+            profiles_dict = {}
+            for profile in self.installedprofiles:
+                profile_proxy = ProfileProxy(self._bus, profile)
+                profiles_dict[profile] = profile_proxy
+            logging.debug('Get installed profiles for current euicc')
+            return profiles_dict
+        except dbus.exceptions.DBusException as e:
+            if _is_unknown_dbus_binding_exception(e):
+                return None
+            raise HermesManagerProxyError(
+                'Failed to obtain dbus object for the profiles. DBus error: '
+                '|%s|', repr(e))
+
+    def get_profile_from_iccid(self, iccid):
+        """@return profile object having given iccid or none if not found"""
+        profiles = self.installedprofiles
+        for profile in profiles:
+            profile_proxy = ProfileProxy(self._bus, profile)
+            props = profile_proxy.properties()
+            if props.get('Iccid') == iccid:
+                return profile_proxy
+        return None
+
+    def get_pending_profiles(self):
+        """
+        Read all pending profiles of current euicc and create & return dict of
+        all pending profiles
+
+        @return dictionary of pending profiles proxy dbus objects
+
+        """
+        try:
+            logging.debug('Hermes euicc getting pending profiles')
+
+            if self.pendingprofiles is None:
+                return None
+
+            profiles_dict = {}
+            # Read & Create each profile object and add to dictionary
+            for profile in self.pendingprofiles:
+                profile_proxy = ProfileProxy(self._bus, profile)
+                profiles_dict[profile] = profile_proxy
+                logging.debug('Hermes euicc pending profile: %s', profile)
+            return profiles_dict
+        except dbus.exceptions.DBusException as e:
+            if _is_unknown_dbus_binding_exception(e):
+                return None
+            raise HermesManagerProxyError(
+                'Failed to obtain dbus object for the profiles. DBus error: '
+                '|%s|', repr(e))
+
+    @property
+    def get_eid(self):
+        """@return Eid string property of euicc"""
+        props = self.properties()
+        return props.get('Eid')
+
+    @property
+    def installedprofiles(self):
+        """@return the installedprofiles ao property of euicc"""
+        props = self.properties()
+        return props.get('InstalledProfiles')
+
+    @property
+    def isactive(self):
+        """@return the isactive property of euicc"""
+        props = self.properties()
+        return props.get('IsActive')
+
+    @property
+    def pendingprofiles(self):
+        """@return the pendingprofiles ao property of euicc"""
+        props = self.properties()
+        return props.get('PendingProfiles')
diff --git a/client/cros/networking/mm1_proxy.py b/client/cros/networking/mm1_proxy.py
index 3f55c93..496934d 100644
--- a/client/cros/networking/mm1_proxy.py
+++ b/client/cros/networking/mm1_proxy.py
@@ -1,4 +1,5 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
+# Lint as: python2, python3
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -9,6 +10,8 @@
 
 import dbus
 import dbus.mainloop.glib
+import logging
+import time
 
 from autotest_lib.client.bin import utils
 from autotest_lib.client.cros.cellular import mm1_constants
@@ -62,13 +65,13 @@
                 if _is_unknown_dbus_binding_exception(e):
                     return None
                 raise ModemManager1ProxyError(
-                        'Error connecting to ModemManager1. DBus error: |%s|',
-                        repr(e))
+                    'Error connecting to ModemManager1. DBus error: |%s|',
+                    repr(e))
 
         utils.poll_for_condition(
             lambda: _connect_to_mm1(bus) is not None,
             exception=ModemManager1ProxyError(
-                    'Timed out connecting to ModemManager1'),
+                'Timed out connecting to ModemManager1'),
             timeout=timeout_seconds,
             sleep_interval=ModemManager1Proxy.CONNECT_WAIT_INTERVAL_SECONDS)
         connection = _connect_to_mm1(bus)
@@ -79,23 +82,78 @@
 
         return connection
 
-
     def __init__(self, bus=None):
         if bus is None:
             dbus.mainloop.glib.DBusGMainLoop(set_as_default=True)
             bus = dbus.SystemBus()
         self._bus = bus
         self._manager = dbus.Interface(
-                self._bus.get_object(mm1_constants.I_MODEM_MANAGER,
-                                     mm1_constants.MM1),
-                mm1_constants.I_MODEM_MANAGER)
-
+            self._bus.get_object(mm1_constants.I_MODEM_MANAGER,
+                                 mm1_constants.MM1),
+            mm1_constants.I_MODEM_MANAGER)
+        self._device = None
 
     @property
     def manager(self):
         """@return the DBus ModemManager1 Manager object."""
         return self._manager
 
+    def inhibit_device(self, inhibit):
+        """
+
+        Uses Modem Manager InhibitDevice DBus API to inhibit/uninhibit
+        @param inhibit: true to inhibit the modem and false to uninhibit it.
+
+        InhibitDevice API:
+        @uid: the unique ID of the physical device, given in the
+              #org.freedesktop.ModemManager1.Modem:Device property.
+        @inhibit: %TRUE to inhibit the modem and %FALSE to uninhibit it.
+
+        Inhibit or uninhibit the device.
+
+        When the modem is inhibited ModemManager will close all its ports and
+        unexport it from the bus, so that users of the interface are no longer
+        able to operate with it.
+
+        This operation binds the inhibition request to the existence of the
+        caller in the DBus bus. If the caller disappears from the bus, the
+        inhibition will automatically removed.
+        """
+        try:
+            if not self._manager:
+                raise ModemManager1ProxyError(
+                    'Failed to obtain dbus manager object.- No manager')
+            if not inhibit and not self._device:
+                raise ModemManager1ProxyError(
+                    'Uninhibit called before inhibit %s' % self._device)
+
+            if inhibit:
+                modem = self.get_modem()
+                if not modem:
+                    raise ModemManager1ProxyError(
+                        'Failed to to obtain dbus manager object. - No modem')
+
+                self._device = modem.properties(
+                mm1_constants.I_MODEM).get('Device')
+
+            logging.debug('device to be inhibited/uninhibited %s', self._device)
+            self._manager.InhibitDevice(dbus.String(self._device), inhibit)
+
+            logging.debug('inhibit=%r done with %s', inhibit, self._device)
+
+            if inhibit:
+                time.sleep(mm1_constants.MM_INHIBIT_PROCESSING_TIME)
+            else:
+                result = self.wait_for_modem(
+                    mm1_constants.MM_UNINHIBIT_PROCESSING_TIME)
+
+                time.sleep(mm1_constants.MM_REPROBE_PROCESSING_TIME)
+                if result is None:
+                    raise ModemManager1ProxyError('No modem after uninhibit')
+        except dbus.exceptions.DBusException as e:
+            raise ModemManager1ProxyError(
+                'Failed to to obtain dbus object for the modem.'
+                'DBus error: %s' % repr(e))
 
     def get_modem(self):
         """
@@ -123,16 +181,16 @@
             modems = object_manager.GetManagedObjects()
         except dbus.exceptions.DBusException as e:
             raise ModemManager1ProxyError(
-                    'Failed to list the available modems. DBus error: |%s|',
-                    repr(e))
+                'Failed to list the available modems. DBus error: %s' %
+                repr(e))
 
         if not modems:
             return None
         elif len(modems) > 1:
             raise ModemManager1ProxyError(
-                    'Expected one modem object, found %d', len(modems))
+                'Expected one modem object, found %d' % len(modems))
 
-        modem_proxy = ModemProxy(self._bus, modems.keys()[0])
+        modem_proxy = ModemProxy(self._bus, list(modems.keys())[0])
         # Check that this object is valid
         try:
             modem_proxy.modem.GetAll(mm1_constants.I_MODEM,
@@ -142,9 +200,8 @@
             if _is_unknown_dbus_binding_exception(e):
                 return None
             raise ModemManager1ProxyError(
-                    'Failed to obtain dbus object for the modem. DBus error: '
-                    '|%s|', repr(e))
-
+                'Failed to obtain dbus object for the modem. DBus error: %s' %
+                repr(e))
 
     def wait_for_modem(self, timeout_seconds):
         """
@@ -159,58 +216,51 @@
 
         """
         return utils.poll_for_condition(
-                self.get_modem,
-                exception=ModemManager1ProxyError('No modem found'),
-                timeout=timeout_seconds)
+            self.get_modem,
+            exception=ModemManager1ProxyError('No modem found'),
+            timeout=timeout_seconds)
 
 
 class ModemProxy(object):
     """A wrapper around a DBus proxy for ModemManager1 modem object."""
 
     # Amount of time to wait for a state transition.
-    STATE_TRANSITION_WAIT_SECONDS = 10
+    STATE_TRANSITION_WAIT_SECONDS = 60
 
     def __init__(self, bus, path):
         self._bus = bus
         self._modem = self._bus.get_object(mm1_constants.I_MODEM_MANAGER, path)
 
-
     @property
     def modem(self):
         """@return the DBus modem object."""
         return self._modem
 
-
     @property
     def iface_modem(self):
         """@return org.freedesktop.ModemManager1.Modem DBus interface."""
         return dbus.Interface(self._modem, mm1_constants.I_MODEM)
 
-
     @property
     def iface_simple_modem(self):
         """@return org.freedesktop.ModemManager1.Simple DBus interface."""
         return dbus.Interface(self._modem, mm1_constants.I_MODEM_SIMPLE)
 
-
     @property
     def iface_gsm_modem(self):
         """@return org.freedesktop.ModemManager1.Modem3gpp DBus interface."""
         return dbus.Interface(self._modem, mm1_constants.I_MODEM_3GPP)
 
-
     @property
     def iface_cdma_modem(self):
         """@return org.freedesktop.ModemManager1.ModemCdma DBus interface."""
         return dbus.Interface(self._modem, mm1_constants.I_MODEM_CDMA)
 
-
     @property
     def iface_properties(self):
         """@return org.freedesktop.DBus.Properties DBus interface."""
         return dbus.Interface(self._modem, dbus.PROPERTIES_IFACE)
 
-
     def properties(self, iface):
         """Return the properties associated with the specified interface.
 
@@ -220,7 +270,6 @@
         """
         return self.iface_properties.GetAll(iface)
 
-
     def get_sim(self):
         """
         Return the SIM proxy object associated with this modem.
@@ -231,6 +280,15 @@
         sim_path = self.properties(mm1_constants.I_MODEM).get('Sim')
         if not sim_path:
             return None
+        return self.get_sim_at_path(sim_path)
+
+    def get_sim_at_path(self, sim_path):
+        """
+        Return the SIM proxy object associated with the sim_path.
+
+        @return SimProxy object or None if no SIM exists.
+
+        """
         sim_proxy = SimProxy(self._bus, sim_path)
         # Check that this object is valid
         try:
@@ -240,9 +298,67 @@
             if _is_unknown_dbus_binding_exception(e):
                 return None
             raise ModemManager1ProxyError(
-                    'Failed to obtain dbus object for the SIM. DBus error: '
-                    '|%s|', repr(e))
+                'Failed to obtain dbus object for the SIM. DBus error: %s' %
+                repr(e))
 
+    def get_sim_slots(self):
+        """
+        The list of SIM slots available in the system, including the SIM object
+        paths if the cards are present. If a given SIM slot at a given index
+        doesn't have a SIM card available, an empty object path will be given.
+
+        The length of this array of objects will be equal to the amount of
+        available SIM slots in the system, and the index in the array is the
+        slot index.
+
+        This list includes the SIM object considered as primary active SIM slot
+        (#org.freedesktop.ModemManager1.Modem.Sim) at index
+        #org.freedesktop.ModemManager1.Modem.ActiveSimSlot.
+
+        @return list of SimSlot paths
+
+        """
+        return self.properties(mm1_constants.I_MODEM).get('SimSlots')
+
+    def get_primary_sim_slot(self):
+        """
+        The index of the primary active SIM slot in the
+        #org.freedesktop.ModemManager1.Modem.SimSlots array, given in the [1,N]
+        range.
+
+        If multiple SIM slots aren't supported, this property will report None
+
+        In a Multi SIM Single Standby setup, this index identifies the only SIM
+        that is currently active. All the remaining slots will be inactive.
+
+        In a Multi SIM Multi Standby setup, this index identifies the active SIM
+        that is considered primary, i.e. the one that will be used when a data
+        connection is setup.
+
+        @return current primary slot index
+
+        """
+        return self.properties(mm1_constants.I_MODEM).get('PrimarySimSlot')
+
+    def set_primary_slot(self, sim_slot):
+        """
+        Selects which SIM slot to be considered as primary, on devices that
+        expose multiple slots in the #org.freedesktop.ModemManager1.Modem
+        :SimSlots property.
+
+        When the switch happens the modem may require a full device reprobe,
+        so the modem object in DBus will get removed, and recreated once the
+        selected SIM slot is in use.
+
+        There is no limitation on which SIM slot to select, so the user may
+        also set as primary a slot that doesn't currently have any valid SIM
+        card inserted.
+
+        @param: sim_slot: SIM slot number to set as primary.
+        @return: success or raise error
+
+        """
+        self.iface_modem.SetPrimarySimSlot(dbus.UInt32(sim_slot))
 
     def wait_for_states(self, states,
                         timeout_seconds=STATE_TRANSITION_WAIT_SECONDS):
@@ -266,19 +382,18 @@
                          mm1_constants.MM_MODEM_STATE_DISCONNECTING,
                          mm1_constants.MM_MODEM_STATE_CONNECTING]:
                 raise ModemManager1ProxyError(
-                        'wait_for_states() does not support transitory states.')
+                    'wait_for_states() does not support transitory states.')
 
         utils.poll_for_condition(
-                lambda: self.properties(mm1_constants.I_MODEM)[
-                        mm1_constants.MM_MODEM_PROPERTY_NAME_STATE] in states,
-                exception=ModemManager1ProxyError(
-                        'Timed out waiting for modem to enter one of these '
-                        'states: %s, current state=%s',
-                        states,
-                        self.properties(mm1_constants.I_MODEM)[
-                                mm1_constants.MM_MODEM_PROPERTY_NAME_STATE]),
-                timeout=timeout_seconds)
-
+            lambda: self.properties(mm1_constants.I_MODEM)[
+                mm1_constants.MM_MODEM_PROPERTY_NAME_STATE] in states,
+            exception=ModemManager1ProxyError(
+                'Timed out waiting for modem to enter one of these '
+                'states: %s, current state=%s' %
+                (states,
+                 self.properties(mm1_constants.I_MODEM)[
+                     mm1_constants.MM_MODEM_PROPERTY_NAME_STATE])),
+            timeout=timeout_seconds)
 
 class SimProxy(object):
     """A wrapper around a DBus proxy for ModemManager1 SIM object."""
@@ -287,25 +402,21 @@
         self._bus = bus
         self._sim = self._bus.get_object(mm1_constants.I_MODEM_MANAGER, path)
 
-
     @property
     def sim(self):
         """@return the DBus SIM object."""
         return self._sim
 
-
     @property
     def iface_properties(self):
         """@return org.freedesktop.DBus.Properties DBus interface."""
         return dbus.Interface(self._sim, dbus.PROPERTIES_IFACE)
 
-
     @property
     def iface_sim(self):
         """@return org.freedesktop.ModemManager1.Sim DBus interface."""
         return dbus.Interface(self._sim, mm1_constants.I_SIM)
 
-
     def properties(self, iface=mm1_constants.I_SIM):
         """Return the properties associated with the specified interface.
 
diff --git a/client/cros/networking/pm_proxy.py b/client/cros/networking/pm_proxy.py
index 55fca67..d32f7c9 100644
--- a/client/cros/networking/pm_proxy.py
+++ b/client/cros/networking/pm_proxy.py
@@ -10,7 +10,9 @@
 import dbus
 import logging
 
-import mm1_proxy
+import common
+
+from autotest_lib.client.cros.networking import mm1_proxy
 
 from autotest_lib.client.bin import utils
 from autotest_lib.client.cros.cellular import mm1_constants
diff --git a/client/cros/networking/shill_context.py b/client/cros/networking/shill_context.py
index cdfe47f..99d1b8d 100644
--- a/client/cros/networking/shill_context.py
+++ b/client/cros/networking/shill_context.py
@@ -6,6 +6,7 @@
 
 import dbus
 import logging
+import six
 
 from contextlib import contextmanager
 
@@ -146,7 +147,11 @@
         if not service_properties[
                 shill_proxy.ShillProxy.SERVICE_PROPERTY_PROFILE]:
             shill = shill_proxy.ShillProxy.get_proxy()
-            manager_properties = shill.manager.GetProperties(utf8_strings=True)
+            if six.PY2:
+                manager_properties = shill.manager.GetProperties(
+                        utf8_strings=True)
+            else:
+                manager_properties = shill.manager.GetProperties()
             active_profile = manager_properties[
                     shill_proxy.ShillProxy.MANAGER_PROPERTY_ACTIVE_PROFILE]
             logging.info('ServiceAutoConnectContext: change cellular service '
diff --git a/client/cros/networking/shill_proxy.py b/client/cros/networking/shill_proxy.py
index 538ea02..57acdc8 100644
--- a/client/cros/networking/shill_proxy.py
+++ b/client/cros/networking/shill_proxy.py
@@ -1,16 +1,29 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import collections
 import dbus
 import dbus.mainloop.glib
-import gobject
+# AU tests use ToT client code, but ToT -3 client version.
+try:
+    from gi.repository import GObject
+except ImportError:
+    import gobject as GObject
 import time
+import six
+
+from six.moves import map
+from six.moves import range
+from six import PY2
 
 from autotest_lib.client.cros import dbus_util
 
-
 class ShillProxyError(Exception):
     """Exceptions raised by ShillProxy and its children."""
     pass
@@ -82,6 +95,7 @@
     DEVICE_ENUMERATION_TIMEOUT = 30
     DEVICE_ENABLE_DISABLE_TIMEOUT = 60
     SERVICE_DISCONNECT_TIMEOUT = 5
+    SERVICE_READY_TIMEOUT = 5
 
     SERVICE_PROPERTY_AUTOCONNECT = 'AutoConnect'
     SERVICE_PROPERTY_DEVICE = 'Device'
@@ -275,11 +289,11 @@
 
         """
         dbus_dict = {}
-        for key, value in in_dict.iteritems():
-                if key not in ShillProxy.SERVICE_PROPERTY_MAP:
-                        raise ShillProxyError('Unsupported property %s' % (key))
-                (dbus_type, kwargs) = ShillProxy.SERVICE_PROPERTY_MAP[key]
-                dbus_dict[key] = dbus_type(value, variant_level=1, **kwargs)
+        for key, value in list(in_dict.items()):
+            if key not in ShillProxy.SERVICE_PROPERTY_MAP:
+                raise ShillProxyError('Unsupported property %s' % (key))
+            (dbus_type, kwargs) = ShillProxy.SERVICE_PROPERTY_MAP[key]
+            dbus_dict[key] = dbus_type(value, variant_level=1, **kwargs)
         return dbus_dict
 
 
@@ -302,7 +316,7 @@
         @return python typed object representing property value or None
 
         """
-        properties = interface.GetProperties(utf8_strings=True)
+        properties = interface.GetProperties()
         if property_key in properties:
             return ShillProxy.dbus2primitive(properties[property_key])
         else:
@@ -318,7 +332,7 @@
         @param value string value to set for property on interface from string
 
         """
-        properties = interface.GetProperties(utf8_strings=True)
+        properties = interface.GetProperties()
         if property_key not in properties:
             raise ShillProxyError('No property %s found in %s' %
                     (property_key, interface.object_path))
@@ -342,8 +356,7 @@
 
         """
         if property_key not in ShillProxy.MANAGER_OPTIONAL_PROPERTY_MAP:
-                raise ShillProxyError('Unsupported property %s' %
-                                      (property_key))
+            raise ShillProxyError('Unsupported property %s' % (property_key))
         else:
             dbus_class = ShillProxy.MANAGER_OPTIONAL_PROPERTY_MAP[property_key]
             interface.SetProperty(property_key,
@@ -509,7 +522,7 @@
             # Check to make sure we're not already in a target state.
             try:
                 properties = self.dbus2primitive(
-                        dbus_object.GetProperties(utf8_strings=True))
+                        dbus_object.GetProperties())
                 last_value = properties.get(property_name, '(no value found)')
                 if last_value in expected_values:
                     return True, last_value, duration()
@@ -517,15 +530,15 @@
             except dbus.exceptions.DBusException:
                 return False, '(object reference became invalid)', duration()
 
-            context = gobject.MainLoop().get_context()
+            context = GObject.MainLoop().get_context()
             while duration() < timeout_seconds:
                 # Dispatch all pending events.
                 while context.iteration(False):
                     pass
 
                 while update_queue:
-                    updated_property, value = map(self.dbus2primitive,
-                                                  update_queue.popleft())
+                    updated_property, value = list(
+                            map(self.dbus2primitive, update_queue.popleft()))
                     if property_name != updated_property:
                         continue
 
@@ -554,7 +567,7 @@
         @return dbus object representing the active profile.
 
         """
-        properties = self.manager.GetProperties(utf8_strings=True)
+        properties = self.manager.GetProperties()
         return self.get_dbus_object(
                 self.DBUS_TYPE_PROFILE,
                 properties[self.MANAGER_PROPERTY_ACTIVE_PROFILE])
@@ -576,14 +589,14 @@
 
     def get_devices(self):
         """Return the list of devices as dbus Interface objects"""
-        properties = self.manager.GetProperties(utf8_strings=True)
+        properties = self.manager.GetProperties()
         return [self.get_dbus_object(self.DBUS_TYPE_DEVICE, path)
                 for path in properties[self.MANAGER_PROPERTY_DEVICES]]
 
 
     def get_profiles(self):
         """Return the list of profiles as dbus Interface objects"""
-        properties = self.manager.GetProperties(utf8_strings=True)
+        properties = self.manager.GetProperties()
         return [self.get_dbus_object(self.DBUS_TYPE_PROFILE, path)
                 for path in properties[self.MANAGER_PROPERTY_PROFILES]]
 
@@ -610,7 +623,7 @@
                 otherwise.
 
         """
-        properties = self.manager.GetProperties(utf8_strings=True)
+        properties = self.manager.GetProperties()
         all_services = properties.get(self.MANAGER_PROPERTY_ALL_SERVICES,
                                       None)
         if not all_services:
@@ -619,7 +632,7 @@
         for service_path in all_services:
             service = self.get_dbus_object(self.DBUS_TYPE_SERVICE,
                                            service_path)
-            properties = service.GetProperties(utf8_strings=True)
+            properties = service.GetProperties()
             device_path = properties.get(self.SERVICE_PROPERTY_DEVICE, None)
             if device_path == device.object_path:
                 return service
@@ -646,19 +659,19 @@
             return None
 
         dbus_type, manager_property = self.OBJECT_TYPE_PROPERTY_MAP[object_type]
-        manager_properties = self.manager.GetProperties(utf8_strings=True)
+        manager_properties = self.manager.GetProperties()
         for path in manager_properties[manager_property]:
             try:
                 test_object = self.get_dbus_object(dbus_type, path)
-                object_properties = test_object.GetProperties(utf8_strings=True)
-                for name, value in properties.iteritems():
+                object_properties = test_object.GetProperties()
+                for name, value in list(properties.items()):
                     if (name not in object_properties or
                         self.dbus2primitive(object_properties[name]) != value):
                         break
                 else:
                     return test_object
 
-            except dbus.exceptions.DBusException, e:
+            except dbus.exceptions.DBusException as _:
                 # This could happen if for instance, you're enumerating services
                 # and test_object was removed in shill between the call to get
                 # the manager properties and the call to get the service
@@ -698,10 +711,18 @@
         except dbus.exceptions.DBusException as e:
             if e.get_dbus_name() != self.ERROR_ALREADY_CONNECTED:
                 raise e
-        success, _, _ = self.wait_for_property_in(
-                service, self.SERVICE_PROPERTY_STATE,
-                self.SERVICE_CONNECTED_STATES,
-                timeout_seconds=timeout_seconds)
+
+        # 'ready' might be an intermittent state; poll for a stable state.
+        for _ in range(self.SERVICE_READY_TIMEOUT):
+            success, state, _ = self.wait_for_property_in(
+                    service,
+                    self.SERVICE_PROPERTY_STATE,
+                    self.SERVICE_CONNECTED_STATES,
+                    timeout_seconds=timeout_seconds)
+            if state != 'ready':
+                break
+            time.sleep(1)
+
         return success
 
 
diff --git a/client/cros/networking/shill_xmlrpc_server.py b/client/cros/networking/shill_xmlrpc_server.py
index 175dfa3..d876dbb 100755
--- a/client/cros/networking/shill_xmlrpc_server.py
+++ b/client/cros/networking/shill_xmlrpc_server.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -8,8 +8,10 @@
 import logging
 import logging.handlers
 import multiprocessing
+import six
 
 import common
+
 from autotest_lib.client.common_lib import utils
 from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
 from autotest_lib.client.cros import xmlrpc_server
@@ -112,8 +114,12 @@
         """
         while True:
             active_profile = self._wifi_proxy.get_active_profile()
+            if six.PY2:
+                profile_props = active_profile.GetProperties(utf8_strings=True)
+            else:
+                profile_props = active_profile.GetProperties()
             profile_name = self._wifi_proxy.dbus2primitive(
-                    active_profile.GetProperties(utf8_strings=True)['Name'])
+                    profile_props['Name'])
             if profile_name == 'default':
                 return True
             self._wifi_proxy.manager.PopProfile(profile_name)
@@ -214,8 +220,11 @@
         """
         shill = self._wifi_proxy
         for profile in shill.get_profiles():
-            profile_properties = shill.dbus2primitive(
-                    profile.GetProperties(utf8_strings=True))
+            if six.PY2:
+                profile_properties = profile.GetProperties(utf8_strings=True)
+            else:
+                profile_properties = profile.GetProperties()
+            profile_properties = shill.dbus2primitive(profile_properties)
             entry_ids = profile_properties[shill.PROFILE_PROPERTY_ENTRIES]
             for entry_id in entry_ids:
                 entry = profile.GetEntry(entry_id)
@@ -252,8 +261,11 @@
         ret = []
         devices = self._wifi_proxy.get_devices()
         for device in devices:
-            properties = self._wifi_proxy.dbus2primitive(
-                    device.GetProperties(utf8_strings=True))
+            if six.PY2:
+                properties = device.GetProperties(utf8_strings=True)
+            else:
+                properties = device.GetProperties()
+            properties = self._wifi_proxy.dbus2primitive(properties)
             if properties[self._wifi_proxy.DEVICE_PROPERTY_TYPE] != 'wifi':
                 continue
             ret.append(properties[self._wifi_proxy.DEVICE_PROPERTY_NAME])
@@ -322,7 +334,7 @@
         """Get a dict of properties for a service.
 
         @param ssid string service to get properties for.
-        @return dict of Python friendly native types or None on failures.
+        @return dict of Python friendly built-in types or None on failures.
 
         """
         discovery_params = {self._wifi_proxy.SERVICE_PROPERTY_TYPE: 'wifi',
@@ -331,14 +343,21 @@
                 discovery_params)
         service_object = self._wifi_proxy.get_dbus_object(
                 self._wifi_proxy.DBUS_TYPE_SERVICE, service_path)
-        service_properties = service_object.GetProperties(
-                utf8_strings=True)
+        if six.PY2:
+            service_properties = service_object.GetProperties(
+                    utf8_strings=True)
+        else:
+            service_properties = service_object.GetProperties()
         return self._wifi_proxy.dbus2primitive(service_properties)
 
 
     @xmlrpc_server.dbus_safe(None)
     def get_manager_properties(self):
-        manager_props = self._wifi_proxy.manager.GetProperties(utf8_strings=True)
+        if six.PY2:
+            manager_props = self._wifi_proxy.manager.GetProperties(
+                    utf8_strings=True)
+        else:
+            manager_props = self._wifi_proxy.manager.GetProperties()
         return self._wifi_proxy.dbus2primitive(manager_props)
 
 
diff --git a/client/cros/networking/wifi_proxy.py b/client/cros/networking/wifi_proxy.py
index 2c23cd5..9db6a71 100644
--- a/client/cros/networking/wifi_proxy.py
+++ b/client/cros/networking/wifi_proxy.py
@@ -1,9 +1,15 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import dbus
 import logging
+import six
 import time
 
 from autotest_lib.client.common_lib import utils
@@ -29,7 +35,7 @@
         """Iterate over all pushed profiles and remove WiFi entries."""
         profiles = self.get_profiles()
         for profile in profiles:
-            profile_properties = profile.GetProperties(utf8_strings=True)
+            profile_properties = profile.GetProperties()
             entries = profile_properties[self.PROFILE_PROPERTY_ENTRIES]
             for entry_id in entries:
                 try:
@@ -167,8 +173,7 @@
             service_object = self.find_matching_service(discovery_params)
             if service_object:
                 try:
-                    service_properties = service_object.GetProperties(
-                            utf8_strings=True)
+                    service_properties = service_object.GetProperties()
                 except dbus.exceptions.DBusException:
                     # This usually means the service handle has become invalid.
                     # Which is sort of like not getting a handle back from
@@ -192,7 +197,7 @@
         # to connect it, and watch the states roll by.
         logging.info('Connecting...')
         try:
-            for service_property, value in security_parameters.iteritems():
+            for service_property, value in six.iteritems(security_parameters):
                 service_object.SetProperty(service_property, value)
             if guid is not None:
                 service_object.SetProperty(self.SERVICE_PROPERTY_GUID, guid)
@@ -201,7 +206,7 @@
                                            autoconnect)
             service_object.Connect()
             logging.info('Called connect on service')
-        except dbus.exceptions.DBusException, e:
+        except dbus.exceptions.DBusException as e:
             logging.error('Caught an error while trying to connect: %s',
                           e.get_dbus_message())
             return (False, discovery_time, association_time,
@@ -296,7 +301,7 @@
                       'BgscanMethod': (dbus.String, method),
                       'BgscanShortInterval': (dbus.UInt16, short_interval),
                       'BgscanSignalThreshold': (dbus.Int32, signal)}
-        for k, (type_cast, value) in attributes.iteritems():
+        for k, (type_cast, value) in six.iteritems(attributes):
             if value is None:
                 continue
 
@@ -312,14 +317,14 @@
 
     def get_active_wifi_SSIDs(self):
         """@return list of string SSIDs with at least one BSS we've scanned."""
-        properties = self.manager.GetProperties(utf8_strings=True)
+        properties = self.manager.GetProperties()
         services = [self.get_dbus_object(self.DBUS_TYPE_SERVICE, path)
                     for path in properties[self.MANAGER_PROPERTY_SERVICES]]
         wifi_services = []
         for service in services:
             try:
-                service_properties = self.dbus2primitive(service.GetProperties(
-                        utf8_strings=True))
+                service_properties = self.dbus2primitive(
+                        service.GetProperties())
             except dbus.exceptions.DBusException:
                 pass  # Probably the service disappeared before GetProperties().
             logging.debug('Considering service with properties: %r',
@@ -331,7 +336,7 @@
                 # is not a valid ASCII string.
                 ssid = service_properties[self.SERVICE_PROPERTY_HEX_SSID]
                 logging.info('Found active WiFi service: %s', ssid)
-                wifi_services.append(ssid.decode('hex'))
+                wifi_services.append(six.ensure_text(ssid, 'hex'))
         return wifi_services
 
 
diff --git a/client/cros/nfc/console.py b/client/cros/nfc/console.py
deleted file mode 100755
index 96302cd..0000000
--- a/client/cros/nfc/console.py
+++ /dev/null
@@ -1,671 +0,0 @@
-#!/usr/bin/env python2
-
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import cmd
-import dbus
-import dbus.exceptions
-import dbus.mainloop.glib
-import gobject
-import threading
-
-from functools import wraps
-
-
-DBUS_ERROR = 'org.freedesktop.DBus.Error'
-NEARD_PATH = '/org/neard/'
-PROMPT = 'NFC> '
-
-class NfcClientException(Exception):
-    """Exception class for exceptions thrown by NfcClient."""
-
-
-def print_message(message, newlines=2):
-    """
-    Prints the given message with extra wrapping newline characters.
-
-    @param message: Message to print.
-    @param newlines: Integer, specifying the number of '\n' characters that
-            should be padded at the beginning and end of |message| before
-            being passed to "print".
-
-    """
-    padding = newlines * '\n'
-    message = padding + message + padding
-    print message
-
-
-def handle_errors(func):
-    """
-    Decorator for handling exceptions that are commonly raised by many of the
-    methods in NfcClient.
-
-    @param func: The function this decorator is wrapping.
-
-    """
-    @wraps(func)
-    def _error_handler(*args):
-        try:
-            return func(*args)
-        except dbus.exceptions.DBusException as e:
-            if e.get_dbus_name() == DBUS_ERROR + '.ServiceUnknown':
-                print_message('neard may have crashed or disappeared. '
-                              'Check if neard is running and run "initialize" '
-                              'from this shell.')
-                return
-            if e.get_dbus_name() == DBUS_ERROR + '.UnknownObject':
-                print_message('Could not find object.')
-                return
-            print_message(str(e))
-        except Exception as e:
-            print_message(str(e))
-    return _error_handler
-
-
-class NfcClient(object):
-    """
-    neard D-Bus client
-
-    """
-    NEARD_SERVICE_NAME = 'org.neard'
-    IMANAGER = NEARD_SERVICE_NAME + '.Manager'
-    IADAPTER = NEARD_SERVICE_NAME + '.Adapter'
-    ITAG = NEARD_SERVICE_NAME + '.Tag'
-    IRECORD = NEARD_SERVICE_NAME + '.Record'
-    IDEVICE = NEARD_SERVICE_NAME + '.Device'
-
-    def __init__(self):
-        self._mainloop = None
-        self._mainloop_thread = None
-        self._adapters = {}
-        self._adapter_property_handler_matches = {}
-
-    def begin(self):
-        """
-        Starts the D-Bus client.
-
-        """
-        # Here we run a GLib MainLoop in its own thread, so that the client can
-        # listen to D-Bus signals while keeping the console interactive.
-        self._dbusmainloop = dbus.mainloop.glib.DBusGMainLoop(
-                set_as_default=True)
-        dbus.mainloop.glib.threads_init()
-        gobject.threads_init()
-
-        def _mainloop_thread_func():
-            self._mainloop = gobject.MainLoop()
-            context = self._mainloop.get_context()
-            self._run_loop = True
-            while self._run_loop:
-                context.iteration(True)
-        self._mainloop_thread = threading.Thread(None, _mainloop_thread_func)
-        self._mainloop_thread.start()
-
-        self._bus = dbus.SystemBus()
-        self.setup_manager()
-
-    def end(self):
-        """
-        Stops the D-Bus client.
-
-        """
-        self._run_loop = False
-        self._mainloop.quit()
-        self._mainloop_thread.join()
-
-    def restart(self):
-        """Reinitializes the NFC client."""
-        self.setup_manager()
-
-    @handle_errors
-    def _get_manager_proxy(self):
-        return dbus.Interface(
-                self._bus.get_object(self.NEARD_SERVICE_NAME, '/'),
-                self.IMANAGER)
-
-    @handle_errors
-    def _get_adapter_proxy(self, adapter):
-        return dbus.Interface(
-                self._bus.get_object(self.NEARD_SERVICE_NAME, adapter),
-                self.IADAPTER)
-
-    def _get_cached_adapter_proxy(self, adapter):
-        adapter_proxy = self._adapters.get(adapter, None)
-        if not adapter_proxy:
-            raise NfcClientException('Adapter "' + adapter + '" not found.')
-        return adapter_proxy
-
-
-    @handle_errors
-    def _get_tag_proxy(self, tag):
-        return dbus.Interface(
-                self._bus.get_object(self.NEARD_SERVICE_NAME, tag),
-                self.ITAG)
-
-    @handle_errors
-    def _get_device_proxy(self, device):
-        return dbus.Interface(
-                self._bus.get_object(self.NEARD_SERVICE_NAME, device),
-                self.IDEVICE)
-
-    @handle_errors
-    def _get_record_proxy(self, record):
-        return dbus.Interface(
-                self._bus.get_object(self.NEARD_SERVICE_NAME, record),
-                self.IRECORD)
-
-    @handle_errors
-    def _get_adapter_properties(self, adapter):
-        adapter_proxy = self._get_cached_adapter_proxy(adapter)
-        return adapter_proxy.GetProperties()
-
-    def _get_adapters(self):
-        props = self._manager.GetProperties()
-        return props.get('Adapters', None)
-
-    def setup_manager(self):
-        """
-        Creates a manager proxy and subscribes to adapter signals. This method
-        will also initialize proxies for adapters if any are available.
-
-        """
-        # Create the manager proxy.
-        self._adapters.clear()
-        self._manager = self._get_manager_proxy()
-        if not self._manager:
-            print_message('Failed to create a proxy to the Manager interface.')
-            return
-
-        # Listen to the adapter added and removed signals.
-        self._manager.connect_to_signal(
-                'AdapterAdded',
-                lambda adapter: self.register_adapter(str(adapter)))
-        self._manager.connect_to_signal(
-                'AdapterRemoved',
-                lambda adapter: self.unregister_adapter(str(adapter)))
-
-        # See if there are any adapters and create proxies for each.
-        adapters = self._get_adapters()
-        if adapters:
-            for adapter in adapters:
-                self.register_adapter(adapter)
-
-    def register_adapter(self, adapter):
-        """
-        Registers an adapter proxy with the given object path and subscribes to
-        adapter signals.
-
-        @param adapter: string, containing the adapter's D-Bus object path.
-
-        """
-        print_message('Added adapter: ' + adapter)
-        adapter_proxy = self._get_adapter_proxy(adapter)
-        self._adapters[adapter] = adapter_proxy
-
-        # Tag found/lost currently don't get fired. Monitor property changes
-        # instead.
-        if self._adapter_property_handler_matches.get(adapter, None) is None:
-            self._adapter_property_handler_matches[adapter] = (
-                    adapter_proxy.connect_to_signal(
-                            'PropertyChanged',
-                            (lambda name, value:
-                                    self._adapter_property_changed_signal(
-                                            adapter, name, value))))
-
-    def unregister_adapter(self, adapter):
-        """
-        Removes the adapter proxy for the given object path from the internal
-        cache of adapters.
-
-        @param adapter: string, containing the adapter's D-Bus object path.
-
-        """
-        print_message('Removed adapter: ' + adapter)
-        match = self._adapter_property_handler_matches.get(adapter, None)
-        if match is not None:
-            match.remove()
-            self._adapter_property_handler_matches.pop(adapter)
-        self._adapters.pop(adapter)
-
-    def _adapter_property_changed_signal(self, adapter, name, value):
-        if name == 'Tags' or name == 'Devices':
-            print_message('Found ' + name + ': ' +
-                          self._dbus_array_to_string(value))
-
-    @handle_errors
-    def show_adapters(self):
-        """
-        Prints the D-Bus object paths of all adapters that are available.
-
-        """
-        adapters = self._get_adapters()
-        if not adapters:
-            print_message('No adapters found.')
-            return
-        for adapter in adapters:
-            print_message('  ' + str(adapter), newlines=0)
-        print
-
-    def _dbus_array_to_string(self, array):
-        string = '[ '
-        for value in array:
-            string += ' ' + str(value) + ', '
-        string += ' ]'
-        return string
-
-    def print_adapter_status(self, adapter):
-        """
-        Prints the properties of the given adapter.
-
-        @param adapter: string, containing the adapter's D-Bus object path.
-
-        """
-        props = self._get_adapter_properties(adapter)
-        if not props:
-            return
-        print_message('Status ' + adapter + ': ', newlines=0)
-        for key, value in props.iteritems():
-            if type(value) == dbus.Array:
-                value = self._dbus_array_to_string(value)
-            else:
-                value = str(value)
-            print_message('  ' + key + ' = ' + value, newlines=0)
-        print
-
-    @handle_errors
-    def set_powered(self, adapter, powered):
-        """
-        Enables or disables the adapter.
-
-        @param adapter: string, containing the adapter's D-Bus object path.
-        @param powered: boolean that dictates whether the adapter will be
-                enabled or disabled.
-
-        """
-        adapter_proxy = self._get_cached_adapter_proxy(adapter)
-        if not adapter_proxy:
-            return
-        adapter_proxy.SetProperty('Powered', powered)
-
-    @handle_errors
-    def start_polling(self, adapter):
-        """
-        Starts polling for nearby tags and devices in "Initiator" mode.
-
-        @param adapter: string, containing the adapter's D-Bus object path.
-
-        """
-        adapter_proxy = self._get_cached_adapter_proxy(adapter)
-        adapter_proxy.StartPollLoop('Initiator')
-        print_message('Started polling.')
-
-    @handle_errors
-    def stop_polling(self, adapter):
-        """
-        Stops polling for nearby tags and devices.
-
-        @param adapter: string, containing the adapter's D-Bus object path.
-
-        """
-        adapter_proxy = self._get_cached_adapter_proxy(adapter)
-        adapter_proxy.StopPollLoop()
-        self._polling_stopped = True
-        print_message('Stopped polling.')
-
-    @handle_errors
-    def show_tag_data(self, tag):
-        """
-        Prints the properties of the given tag, as well as the contents of any
-        records associated with it.
-
-        @param tag: string, containing the tag's D-Bus object path.
-
-        """
-        tag_proxy = self._get_tag_proxy(tag)
-        if not tag_proxy:
-            print_message('Tag "' + tag + '" not found.')
-            return
-        props = tag_proxy.GetProperties()
-        print_message('Tag ' + tag + ': ', newlines=1)
-        for key, value in props.iteritems():
-            if key != 'Records':
-                print_message('  ' + key + ' = ' + str(value), newlines=0)
-        records = props['Records']
-        if not records:
-            return
-        print_message('Records: ', newlines=1)
-        for record in records:
-            self.show_record_data(str(record))
-        print
-
-    @handle_errors
-    def show_device_data(self, device):
-        """
-        Prints the properties of the given device, as well as the contents of
-        any records associated with it.
-
-        @param device: string, containing the device's D-Bus object path.
-
-        """
-        device_proxy = self._get_device_proxy(device)
-        if not device_proxy:
-            print_message('Device "' + device + '" not found.')
-            return
-        records = device_proxy.GetProperties()['Records']
-        if not records:
-            print_message('No records on device.')
-            return
-        print_message('Records: ', newlines=1)
-        for record in records:
-            self.show_record_data(str(record))
-        print
-
-    @handle_errors
-    def show_record_data(self, record):
-        """
-        Prints the contents of the given record.
-
-        @param record: string, containing the record's D-Bus object path.
-
-        """
-        record_proxy = self._get_record_proxy(record)
-        if not record_proxy:
-            print_message('Record "' + record + '" not found.')
-            return
-        props = record_proxy.GetProperties()
-        print_message('Record ' + record + ': ', newlines=1)
-        for key, value in props.iteritems():
-            print '  ' + key + ' = ' + value
-        print
-
-    def _create_record_data(self, record_type, params):
-        if record_type == 'Text':
-            possible_keys = [ 'Encoding', 'Language', 'Representation' ]
-            tag_data = { 'Type': 'Text' }
-        elif record_type == 'URI':
-            possible_keys = [ 'URI' ]
-            tag_data = { 'Type': 'URI' }
-        else:
-            print_message('Writing record type "' + record_type +
-                          '" currently not supported.')
-            return None
-        for key, value in params.iteritems():
-            if key in possible_keys:
-                tag_data[key] = value
-        return tag_data
-
-    @handle_errors
-    def write_tag(self, tag, record_type, params):
-        """
-        Writes an NDEF record to the given tag.
-
-        @param tag: string, containing the tag's D-Bus object path.
-        @param record_type: The type of the record, e.g. Text or URI.
-        @param params: dictionary, containing the parameters of the NDEF.
-
-        """
-        tag_data = self._create_record_data(record_type, params)
-        if not tag_data:
-            return
-        tag_proxy = self._get_tag_proxy(tag)
-        if not tag_proxy:
-            print_message('Tag "' + tag + '" not found.')
-            return
-        tag_proxy.Write(tag_data)
-        print_message('Tag written!')
-
-    @handle_errors
-    def push_to_device(self, device, record_type, params):
-        """
-        Pushes an NDEF record to the given device.
-
-        @param device: string, containing the device's D-Bus object path.
-        @param record_type: The type of the record, e.g. Text or URI.
-        @param params: dictionary, containing the parameters of the NDEF.
-
-        """
-        record_data = self._create_record_data(record_type, params)
-        if not record_data:
-            return
-        device_proxy = self._get_device_proxy(device)
-        if not device_proxy:
-            print_message('Device "' + device + '" not found.')
-            return
-        device_proxy.Push(record_data)
-        print_message('NDEF pushed to device!')
-
-
-class NfcConsole(cmd.Cmd):
-    """
-    Interactive console to interact with the NFC daemon.
-
-    """
-    def __init__(self):
-        cmd.Cmd.__init__(self)
-        self.prompt = PROMPT
-
-    def begin(self):
-        """
-        Starts the interactive shell.
-
-        """
-        print_message('NFC console! Run "help" for a list of commands.',
-                      newlines=1)
-        self._nfc_client = NfcClient()
-        self._nfc_client.begin()
-        self.cmdloop()
-
-    def can_exit(self):
-        """Override"""
-        return True
-
-    def do_initialize(self, args):
-        """Handles "initialize"."""
-        if args:
-            print_message('Command "initialize" expects no arguments.')
-            return
-        self._nfc_client.restart()
-
-    def help_initialize(self):
-        """Prints the help message for "initialize"."""
-        print_message('Initializes the neard D-Bus client. This can be '
-                      'run many times to restart the client in case of '
-                      'neard failures or crashes.')
-
-    def do_adapters(self, args):
-        """Handles "adapters"."""
-        if args:
-            print_message('Command "adapters" expects no arguments.')
-            return
-        self._nfc_client.show_adapters()
-
-    def help_adapters(self):
-        """Prints the help message for "adapters"."""
-        print_message('Displays the D-Bus object paths of the available '
-                      'adapter objects.')
-
-    def do_adapter_status(self, args):
-        """Handles "adapter_status"."""
-        args = args.strip().split(' ')
-        if len(args) != 1 or not args[0]:
-            print_message('Usage: adapter_status <adapter>')
-            return
-        self._nfc_client.print_adapter_status(NEARD_PATH + args[0])
-
-    def help_adapter_status(self):
-        """Prints the help message for "adapter_status"."""
-        print_message('Returns the properties of the given NFC adapter.\n\n'
-                      '    Ex: "adapter_status nfc0"')
-
-    def do_enable_adapter(self, args):
-        """Handles "enable_adapter"."""
-        args = args.strip().split(' ')
-        if len(args) != 1 or not args[0]:
-            print_message('Usage: enable_adapter <adapter>')
-            return
-        self._nfc_client.set_powered(NEARD_PATH + args[0], True)
-
-    def help_enable_adapter(self):
-        """Prints the help message for "enable_adapter"."""
-        print_message('Powers up the adapter. Ex: "enable_adapter nfc0"')
-
-    def do_disable_adapter(self, args):
-        """Handles "disable_adapter"."""
-        args = args.strip().split(' ')
-        if len(args) != 1 or not args[0]:
-            print_message('Usage: disable_adapter <adapter>')
-            return
-        self._nfc_client.set_powered(NEARD_PATH + args[0], False)
-
-    def help_disable_adapter(self):
-        """Prints the help message for "disable_adapter"."""
-        print_message('Powers down the adapter. Ex: "disable_adapter nfc0"')
-
-    def do_start_poll(self, args):
-        """Handles "start_poll"."""
-        args = args.strip().split(' ')
-        if len(args) != 1 or not args[0]:
-            print_message('Usage: start_poll <adapter>')
-            return
-        self._nfc_client.start_polling(NEARD_PATH + args[0])
-
-    def help_start_poll(self):
-        """Prints the help message for "start_poll"."""
-        print_message('Initiates a poll loop.\n\n    Ex: "start_poll nfc0"')
-
-    def do_stop_poll(self, args):
-        """Handles "stop_poll"."""
-        args = args.split(' ')
-        if len(args) != 1 or not args[0]:
-            print_message('Usage: stop_poll <adapter>')
-            return
-        self._nfc_client.stop_polling(NEARD_PATH + args[0])
-
-    def help_stop_poll(self):
-        """Prints the help message for "stop_poll"."""
-        print_message('Stops a poll loop.\n\n    Ex: "stop_poll nfc0"')
-
-    def do_read_tag(self, args):
-        """Handles "read_tag"."""
-        args = args.strip().split(' ')
-        if len(args) != 1 or not args[0]:
-            print_message('Usage read_tag <tag>')
-            return
-        self._nfc_client.show_tag_data(NEARD_PATH + args[0])
-
-    def help_read_tag(self):
-        """Prints the help message for "read_tag"."""
-        print_message('Reads the contents of a tag.  Ex: read_tag nfc0/tag0')
-
-    def _parse_record_args(self, record_type, args):
-        if record_type == 'Text':
-            if len(args) < 5:
-                print_message('Usage: write_tag <tag> Text <encoding> '
-                              '<language> <representation>')
-                return None
-            if args[2] not in [ 'UTF-8', 'UTF-16' ]:
-                print_message('Encoding must be one of "UTF-8" or "UTF-16".')
-                return None
-            return {
-                'Encoding': args[2],
-                'Language': args[3],
-                'Representation': ' '.join(args[4:])
-            }
-        if record_type == 'URI':
-            if len(args) != 3:
-                print_message('Usage: write_tag <tag> URI <uri>')
-                return None
-            return {
-                'URI': args[2]
-            }
-        print_message('Only types "Text" and "URI" are supported by this '
-                      'script.')
-        return None
-
-    def do_write_tag(self, args):
-        """Handles "write_tag"."""
-        args = args.strip().split(' ')
-        if len(args) < 3:
-            print_message('Usage: write_tag <tag> [params]')
-            return
-        record_type = args[1]
-        params = self._parse_record_args(record_type, args)
-        if not params:
-            return
-        self._nfc_client.write_tag(NEARD_PATH + args[0],
-                                   record_type, params)
-
-    def help_write_tag(self):
-        """Prints the help message for "write_tag"."""
-        print_message('Writes the given data to a tag. Usage:\n'
-                      '  write_tag <tag> Text <encoding> <language> '
-                      '<representation>\n  write_tag <tag> URI <uri>')
-
-    def do_read_device(self, args):
-        """Handles "read_device"."""
-        args = args.strip().split(' ')
-        if len(args) != 1 or not args[0]:
-            print_message('Usage read_device <device>')
-            return
-        self._nfc_client.show_device_data(NEARD_PATH + args[0])
-
-    def help_read_device(self):
-        """Prints the help message for "read_device"."""
-        print_message('Reads the contents of a device.  Ex: read_device '
-                      'nfc0/device0')
-
-    def do_push_to_device(self, args):
-        """Handles "push_to_device"."""
-        args = args.strip().split(' ')
-        if len(args) < 3:
-            print_message('Usage: push_to_device <device> [params]')
-            return
-        record_type = args[1]
-        params = self._parse_record_args(record_type, args)
-        if not params:
-            return
-        self._nfc_client.push_to_device(NEARD_PATH + args[0],
-                                        record_type, params)
-
-    def help_push_to_device(self):
-        """Prints the help message for "push_to_device"."""
-        print_message('Pushes the given data to a device. Usage:\n'
-                      '  push_to_device <device> Text <encoding> <language> '
-                      '<representation>\n  push_to_device <device> URI <uri>')
-
-    def do_exit(self, args):
-        """
-        Handles the 'exit' command.
-
-        @param args: Arguments to the command. Unused.
-
-        """
-        if args:
-            print_message('Command "exit" expects no arguments.')
-            return
-        resp = raw_input('Are you sure? (yes/no): ')
-        if resp == 'yes':
-            print_message('Goodbye!')
-            self._nfc_client.end()
-            return True
-        if resp != 'no':
-            print_message('Did not understand: ' + resp)
-        return False
-
-    def help_exit(self):
-        """Handles the 'help exit' command."""
-        print_message('Exits the console.')
-
-    do_EOF = do_exit
-    help_EOF = help_exit
-
-
-def main():
-    """Main function."""
-    NfcConsole().begin()
-
-
-if __name__ == '__main__':
-    main()
diff --git a/client/cros/ownership.py b/client/cros/ownership.py
index de87aaa..75aab9b 100644
--- a/client/cros/ownership.py
+++ b/client/cros/ownership.py
@@ -1,13 +1,17 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 import logging, os, shutil, tempfile
 
-import common, constants, cryptohome
+import common
+
 from autotest_lib.client.bin import utils
 from autotest_lib.client.common_lib import autotemp, error
+from autotest_lib.client.cros import constants
 from autotest_lib.client.cros import cros_ui
+from autotest_lib.client.cros import cryptohome
 
 
 PK12UTIL = 'pk12util'
diff --git a/client/cros/pkcs11.py b/client/cros/pkcs11.py
deleted file mode 100644
index 9b229cb..0000000
--- a/client/cros/pkcs11.py
+++ /dev/null
@@ -1,271 +0,0 @@
-# Lint as: python2, python3
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions used for PKCS#11 library testing."""
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-import grp, logging, os, pwd, re, stat, sys, shutil, pwd, grp
-
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-
-USER_TOKEN_PREFIX = 'User TPM Token '
-TMP_CHAPS_DIR = '/tmp/chaps'
-CHAPS_DIR_PERM = 0o750
-SYSTEM_TOKEN_NAME = 'System TPM Token'
-SYSTEM_TOKEN_DIR = '/var/lib/chaps'
-INVALID_SLOT_ID = '100'
-
-
-def __run_cmd(cmd, ignore_status=False):
-    """Runs a command and returns the output from both stdout and stderr."""
-    return utils.system_output(cmd + ' 2>&1', retain_output=True,
-                               ignore_status=ignore_status).strip()
-
-def __get_token_paths(exclude_system_token):
-    """Return a list with a path for each PKCS #11 token currently loaded."""
-    token_paths = []
-    for line in __run_cmd('chaps_client --list').split('\n'):
-        match = re.search(r'Slot \d+: (/.*)$', line)
-        if match:
-            if exclude_system_token and match.group(1) == SYSTEM_TOKEN_DIR:
-                continue
-            token_paths.append(match.group(1))
-    return token_paths
-
-def __get_pkcs11_file_list(token_path):
-    """Return string with PKCS#11 file paths and their associated metadata."""
-    find_args = '-printf "\'%p\', \'%u:%g\', 0%m\n"'
-    file_list_output = __run_cmd('find %s ' % token_path + find_args)
-    return file_list_output
-
-def __get_token_slot_by_path(token_path):
-    token_list = __run_cmd('p11_replay --list_tokens')
-    for line in token_list.split('\n'):
-        match = re.search(r'^Slot (\d+): ' + token_path, line)
-        if not match:
-            continue
-        return match.group(1)
-    return INVALID_SLOT_ID
-
-def __verify_tokenname(token_path):
-    """Verify that the TPM token name is correct."""
-    # The token path is expected to be of the form:
-    # /run/daemon-store/chaps/<obfuscated_user_id>
-    match = re.search(r'/run/daemon-store/chaps/(.*)', token_path)
-    if not match:
-        return False
-    obfuscated_user = match.group(1)
-    # We expect the token label to contain first 16 characters of the obfuscated
-    # user id. This is the same value we extracted from |token_path|.
-    expected_user_token_label = USER_TOKEN_PREFIX + obfuscated_user[:16]
-    # The p11_replay tool will list tokens in the following form:
-    # Slot 1: <token label>
-    token_list = __run_cmd('p11_replay --list_tokens')
-    for line in token_list.split('\n'):
-        match = re.search(r'^Slot \d+: (.*)$', line)
-        if not match:
-            continue
-        token_label = match.group(1).rstrip()
-        if (token_label == expected_user_token_label):
-            return True
-        # Ignore the system token label.
-        if token_label == SYSTEM_TOKEN_NAME:
-            continue
-        logging.error('Unexpected token label: |%s|', token_label)
-    logging.error('Invalid or missing PKCS#11 token label!')
-    return False
-
-def __verify_permissions(token_path):
-    """Verify that the permissions on the initialized token dir are correct."""
-    # List of 3-tuples consisting of (path, user:group, octal permissions).
-    # Can be generated (for example), by:
-    # find <token_path>/chaps -printf "'%p', '%u:%g', 0%m\n"
-    expected_permissions = [
-        (token_path, 'chaps:chronos-access', CHAPS_DIR_PERM),
-        ('%s/database' % token_path, 'chaps:chronos-access', CHAPS_DIR_PERM)]
-    for item in expected_permissions:
-        path = item[0]
-        (user, group) = item[1].split(':')
-        perms = item[2]
-        stat_buf = os.lstat(path)
-        if not stat_buf:
-            logging.error('Could not stat %s while checking for permissions.',
-                          path)
-            return False
-        # Check ownership.
-        path_user = pwd.getpwuid(stat_buf.st_uid).pw_name
-        path_group = grp.getgrgid(stat_buf.st_gid).gr_name
-        if path_user != user or path_group != group:
-            logging.error('Ownership of %s does not match! Got = (%s, %s)'
-                          ', Expected = (%s, %s)', path, path_user, path_group,
-                          user, group)
-            return False
-
-        # Check permissions.
-        path_perms = stat.S_IMODE(stat_buf.st_mode)
-        if path_perms != perms:
-            logging.error('Permissions for %s do not match! (Got = %s'
-                          ', Expected = %s)', path, oct(path_perms), oct(perms))
-            return False
-
-    return True
-
-def verify_pkcs11_initialized():
-    """Checks if the PKCS#11 token is initialized properly."""
-    token_path_list = __get_token_paths(exclude_system_token=True)
-    if len(token_path_list) != 1:
-        logging.error('Expecting a single signed-in user with a token.')
-        return False
-
-    verify_cmd = ('cryptohome --action=pkcs11_is_user_token_ok')
-    __run_cmd(verify_cmd)
-
-    verify_result = True
-    # Do additional sanity tests.
-    if not __verify_tokenname(token_path_list[0]):
-        logging.error('Verification of token name failed!')
-        verify_result = False
-    if not __verify_permissions(token_path_list[0]):
-        logging.error('PKCS#11 file list:\n%s',
-                      __get_pkcs11_file_list(token_path_list[0]))
-        logging.error(
-            'Verification of PKCS#11 subsystem and token permissions failed!')
-        verify_result = False
-    return verify_result
-
-def load_p11_test_token(auth_data='1234'):
-    """Loads the test token onto a slot.
-
-    @param auth_data: The authorization data to use for the token.
-    """
-    utils.system('sudo chaps_client --load --path=%s --auth="%s"' %
-                 (TMP_CHAPS_DIR, auth_data))
-
-def change_p11_test_token_auth_data(auth_data, new_auth_data):
-    """Changes authorization data for the test token.
-
-    @param auth_data: The current authorization data.
-    @param new_auth_data: The new authorization data.
-    """
-    utils.system('sudo chaps_client --change_auth --path=%s --auth="%s" '
-                 '--new_auth="%s"' % (TMP_CHAPS_DIR, auth_data, new_auth_data))
-
-def unload_p11_test_token():
-    """Unloads a loaded test token."""
-    utils.system('sudo chaps_client --unload --path=%s' % TMP_CHAPS_DIR)
-
-def copytree_with_ownership(src, dst):
-    """Like shutil.copytree but also copies owner and group attributes.
-    @param src: Source directory.
-    @param dst: Destination directory.
-    """
-    utils.system('cp -rp %s %s' % (src, dst))
-
-def setup_p11_test_token(unload_user_tokens, auth_data='1234'):
-    """Configures a PKCS #11 token for testing.
-
-    Any existing test token will be automatically cleaned up.
-
-    @param unload_user_tokens: Whether to unload all user tokens.
-    @param auth_data: Initial token authorization data.
-    """
-    cleanup_p11_test_token()
-    if unload_user_tokens:
-        for path in __get_token_paths(exclude_system_token=False):
-            utils.system('sudo chaps_client --unload --path=%s' % path)
-    os.makedirs(TMP_CHAPS_DIR)
-    uid = pwd.getpwnam('chaps')[2]
-    gid = grp.getgrnam('chronos-access')[2]
-    os.chown(TMP_CHAPS_DIR, uid, gid)
-    os.chmod(TMP_CHAPS_DIR, CHAPS_DIR_PERM)
-    load_p11_test_token(auth_data)
-    unload_p11_test_token()
-    copytree_with_ownership(TMP_CHAPS_DIR, '%s_bak' % TMP_CHAPS_DIR)
-
-def restore_p11_test_token():
-    """Restores a PKCS #11 test token to its initial state."""
-    shutil.rmtree(TMP_CHAPS_DIR)
-    copytree_with_ownership('%s_bak' % TMP_CHAPS_DIR, TMP_CHAPS_DIR)
-
-def get_p11_test_token_db_path():
-    """Returns the test token database path."""
-    return '%s/database' % TMP_CHAPS_DIR
-
-def verify_p11_test_token():
-    """Verifies that a test token is working and persistent."""
-    output = __run_cmd('p11_replay --generate --replay_wifi',
-                       ignore_status=True)
-    if not re.search('Sign: CKR_OK', output):
-        print(output, file=sys.stderr)
-        return False
-    unload_p11_test_token()
-    load_p11_test_token()
-    output = __run_cmd('p11_replay --replay_wifi --cleanup',
-                       ignore_status=True)
-    if not re.search('Sign: CKR_OK', output):
-        print(output, file=sys.stderr)
-        return False
-    return True
-
-def cleanup_p11_test_token():
-    """Deletes the test token."""
-    unload_p11_test_token()
-    shutil.rmtree(TMP_CHAPS_DIR, ignore_errors=True)
-    shutil.rmtree('%s_bak' % TMP_CHAPS_DIR, ignore_errors=True)
-
-def wait_for_pkcs11_token():
-    """Waits for the PKCS #11 token to be available.
-
-    This should be called only after a login and is typically called immediately
-    after a login.
-
-    Returns:
-        True if the token is available.
-    """
-    try:
-        utils.poll_for_condition(
-            lambda: utils.system('cryptohome --action=pkcs11_is_user_token_ok',
-                                 ignore_status=True) == 0,
-            desc='PKCS #11 token.',
-            timeout=300)
-    except utils.TimeoutError:
-        return False
-    return True
-
-def __p11_replay_on_user_token(extra_args=''):
-    """Executes a typical command replay on the current user token.
-
-    Args:
-        extra_args: Additional arguments to pass to p11_replay.
-
-    Returns:
-        The command output.
-    """
-    if not wait_for_pkcs11_token():
-       raise error.TestError('Timeout while waiting for pkcs11 token')
-    return __run_cmd('p11_replay --slot=%s %s'
-                     % (__get_token_slot_by_path(USER_TOKEN_PREFIX),
-                        extra_args),
-                     ignore_status=True)
-
-def inject_and_test_key():
-    """Injects a key into a PKCS #11 token and tests that it can sign."""
-    output = __p11_replay_on_user_token('--replay_wifi --inject')
-    return re.search('Sign: CKR_OK', output)
-
-def test_and_cleanup_key():
-    """Tests a PKCS #11 key before deleting it."""
-    output = __p11_replay_on_user_token('--replay_wifi --cleanup')
-    return re.search('Sign: CKR_OK', output)
-
-def generate_user_key():
-    """Generates a key in the current user token."""
-    output = __p11_replay_on_user_token('--replay_wifi --generate')
-    return re.search('Sign: CKR_OK', output)
-
diff --git a/client/cros/power/OWNERS b/client/cros/power/OWNERS
new file mode 100644
index 0000000..850e02e
--- /dev/null
+++ b/client/cros/power/OWNERS
@@ -0,0 +1,2 @@
+include /POWER_OWNERS
+include ../OWNERS
diff --git a/client/cros/power/force_discharge_utils.py b/client/cros/power/force_discharge_utils.py
new file mode 100644
index 0000000..90ca1e8
--- /dev/null
+++ b/client/cros/power/force_discharge_utils.py
@@ -0,0 +1,127 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Helper class for power autotests that force DUT to discharge with EC."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import logging
+import time
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.cros import ec
+from autotest_lib.client.cros.power import power_utils
+from six.moves import range
+
+_FORCE_DISCHARGE_SETTINGS = ['false', 'true', 'optional']
+
+
+def _parse(force_discharge):
+    """
+    Parse and return force discharge setting.
+
+    @param force_discharge: string of whether to tell ec to discharge battery
+            even when the charger is plugged in. 'false' means no forcing
+            discharge; 'true' means forcing discharge and raising an error when
+            it fails; 'optional' means forcing discharge when possible but not
+            raising an error when it fails, which is more friendly to devices
+            without a battery.
+
+    @return: string representing valid force discharge setting.
+
+    @raise error.TestError: for invalid force discharge setting.
+
+    """
+    setting = str(force_discharge).lower()
+    if setting not in _FORCE_DISCHARGE_SETTINGS:
+        raise error.TestError(
+                'Force discharge setting \'%s\' need to be one of %s.' %
+                (str(force_discharge), _FORCE_DISCHARGE_SETTINGS))
+    return setting
+
+
+def _wait_for_battery_discharge(status):
+    """
+    Polling every 100ms for 2 seconds until battery is discharging. This
+    normally would take about 350ms.
+
+    @param status: DUT power status object.
+
+    @return: boolean indicating force discharge success.
+    """
+    for _ in range(20):
+        status.refresh()
+        if status.battery_discharging():
+            return True
+        time.sleep(0.1)
+    return False
+
+
+def process(force_discharge, status):
+    """
+    Perform force discharge steps.
+
+    @param force_discharge: string of whether to tell ec to discharge battery
+            even when the charger is plugged in. 'false' means no forcing
+            discharge; 'true' means forcing discharge and raising an error when
+            it fails; 'optional' means forcing discharge when possible but not
+            raising an error when it fails, which is more friendly to devices
+            without a battery.
+    @param status: DUT power status object.
+
+    @return: bool to indicate whether force discharge steps are successful. Note
+            that DUT cannot force discharge if DUT is not connected to AC.
+
+    @raise error.TestError: for invalid force discharge setting.
+    @raise error.TestNAError: when force_discharge is 'true' and the DUT is
+            incapable of forcing discharge.
+    @raise error.TestError: when force_discharge is 'true' and the DUT command
+            to force discharge fails.
+    """
+    force_discharge = _parse(force_discharge)
+
+    if force_discharge == 'true':
+        if not status.battery:
+            raise error.TestNAError('DUT does not have battery. '
+                                    'Could not force discharge.')
+        if not ec.has_cros_ec():
+            raise error.TestNAError('DUT does not have CrOS EC. '
+                                    'Could not force discharge.')
+        if not power_utils.charge_control_by_ectool(False):
+            raise error.TestError('Could not run battery force discharge.')
+        if not _wait_for_battery_discharge(status):
+            logging.warning('Battery does not report discharging state.')
+        return True
+    elif force_discharge == 'optional':
+        if not status.battery:
+            logging.warning('DUT does not have battery. '
+                            'Do not force discharge.')
+            return False
+        if not ec.has_cros_ec():
+            logging.warning('DUT does not have CrOS EC. '
+                            'Do not force discharge.')
+            return False
+        if not power_utils.charge_control_by_ectool(False):
+            logging.warning('Could not run battery force discharge. '
+                            'Do not force discharge.')
+            return False
+        if not _wait_for_battery_discharge(status):
+            logging.warning('Battery does not report discharging state.')
+        return True
+    elif force_discharge == 'false':
+        return False
+
+
+def restore(force_discharge_success):
+    """
+    Set DUT back to charging.
+
+    @param force_discharge_success: if DUT previously forced discharge
+            successfully, set DUT back to charging.
+    """
+    if force_discharge_success:
+        if not power_utils.charge_control_by_ectool(True):
+            logging.warning('Can not restore from force discharge.')
diff --git a/client/cros/power/power_dashboard.py b/client/cros/power/power_dashboard.py
index 9c4b6dd..7e9ac92 100644
--- a/client/cros/power/power_dashboard.py
+++ b/client/cros/power/power_dashboard.py
@@ -1,7 +1,12 @@
+# Lint as: python2, python3
 # Copyright (c) 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import collections
 import json
 import logging
@@ -10,8 +15,8 @@
 import os
 import re
 import time
-import urllib
-import urllib2
+from six.moves import range
+from six.moves import urllib
 
 from autotest_lib.client.bin import utils
 from autotest_lib.client.common_lib import error
@@ -19,6 +24,7 @@
 from autotest_lib.client.common_lib.cros import retry
 from autotest_lib.client.cros.power import power_status
 from autotest_lib.client.cros.power import power_utils
+from six.moves import zip
 
 _HTML_CHART_STR = '''
 <!DOCTYPE html>
@@ -27,12 +33,13 @@
 <script type="text/javascript" src="https://www.gstatic.com/charts/loader.js">
 </script>
 <script type="text/javascript">
-    google.charts.load('current', {{'packages':['corechart']}});
+    google.charts.load('current', {{'packages':['corechart', 'table']}});
     google.charts.setOnLoadCallback(drawChart);
     function drawChart() {{
-        var data = google.visualization.arrayToDataTable([
+        var dataArray = [
 {data}
-        ]);
+        ];
+        var data = google.visualization.arrayToDataTable(dataArray);
         var numDataCols = data.getNumberOfColumns() - 1;
         var unit = '{unit}';
         var options = {{
@@ -56,6 +63,15 @@
                         '#f0f4c3', '#c8e6c9', '#cddc39', '#81c784', '#43a047'];
             }}
             chart = new google.visualization.SteppedAreaChart(element);
+        }} else if (data.getNumberOfRows() == 2 && unit == 'point') {{
+            var newArray = [['key', 'value']];
+            for (var i = 1; i < dataArray[0].length; i++) {{
+                newArray.push([dataArray[0][i], dataArray[1][i]]);
+            }}
+            data = google.visualization.arrayToDataTable(newArray);
+            delete options.width;
+            delete options.height;
+            chart = new google.visualization.Table(element);
         }} else {{
             chart = new google.visualization.LineChart(element);
         }}
@@ -69,17 +85,27 @@
 </html>
 '''
 
-_HTML_LINK_STR = '''
-<!DOCTYPE html>
-<html>
-<body>
-<a href="http://chrome-power.appspot.com/dashboard?board={board}&test={test}&datetime={datetime}">
-  Link to power dashboard
-</a>
-</body>
-</html>
+_HWID_LINK_STR = '''
+<a href="http://goto.google.com/pdash-hwid?query={hwid}">
+  Link to hwid lookup.
+</a><br />
 '''
 
+_PDASH_LINK_STR = '''
+<a href="http://chrome-power.appspot.com/dashboard?board={board}&test={test}&datetime={datetime}">
+  Link to power dashboard.
+</a><br />
+'''
+
+_TDASH_LINK_STR = '''
+<a href="http://chrome-power.appspot.com/thermal_dashboard?note={note}">
+  Link to thermal dashboard.
+</a><br />
+'''
+
+# Global variable to avoid duplicate dashboard link in BaseDashboard._save_html
+generated_dashboard_link = False
+
 
 class BaseDashboard(object):
     """Base class that implements method for prepare and upload data to power
@@ -115,11 +141,12 @@
             A dictionary of powerlog
         """
         powerlog_dict = {
-            'format_version': 5,
-            'timestamp': self._start_ts,
-            'test': self._testname,
-            'dut': self._create_dut_info_dict(raw_measurement['data'].keys()),
-            'power': raw_measurement,
+                'format_version': 6,
+                'timestamp': self._start_ts,
+                'test': self._testname,
+                'dut': self._create_dut_info_dict(
+                        list(raw_measurement['data'].keys())),
+                'power': raw_measurement,
         }
 
         return powerlog_dict
@@ -152,9 +179,37 @@
         json_str = json.dumps(powerlog_dict, indent=4, separators=(',', ': '),
                               ensure_ascii=False)
         json_str = utils.strip_non_printable(json_str)
-        with file(filename, 'a') as f:
+        with open(filename, 'a') as f:
             f.write(json_str)
 
+    def _generate_dashboard_link(self, powerlog_dict):
+        """Generate link to power and thermal dashboard"""
+        # Use global variable to generate this only once.
+        global generated_dashboard_link
+        if generated_dashboard_link:
+            return ''
+        generated_dashboard_link = True
+
+        board = powerlog_dict['dut']['board']
+        test = powerlog_dict['test']
+        datetime = time.strftime('%Y%m%d%H%M',
+                                 time.gmtime(powerlog_dict['timestamp']))
+        hwid = powerlog_dict['dut']['sku']['hwid']
+        note = powerlog_dict['dut']['note']
+
+        html_str = '<!DOCTYPE html><html><body>'
+        html_str += _HWID_LINK_STR.format(hwid=hwid)
+        html_str += _PDASH_LINK_STR.format(board=board,
+                                           test=test,
+                                           datetime=datetime)
+
+        if re.match('ThermalQual.(full|lab).*', note):
+            html_str += _TDASH_LINK_STR.format(note=note)
+
+        html_str += '</body></html>'
+
+        return html_str
+
     def _save_html(self, powerlog_dict, resultsdir, filename='power_log.html'):
         """Convert powerlog dict to chart in HTML page and append to
         <resultsdir>/<filename>.
@@ -167,19 +222,11 @@
             resultsdir: directory to save HTML page
             filename: filename to append to
         """
-        # Generate link to power dashboard,
-        board = powerlog_dict['dut']['board']
-        test = powerlog_dict['test']
-        datetime = time.strftime('%Y%m%d%H%M',
-                                 time.gmtime(powerlog_dict['timestamp']))
-
-        html_str = _HTML_LINK_STR.format(board=board,
-                                         test=test,
-                                         datetime=datetime)
+        html_str = self._generate_dashboard_link(powerlog_dict)
 
         # Create dict from type to sorted list of rail names.
         rail_type = collections.defaultdict(list)
-        for r, t in powerlog_dict['power']['type'].iteritems():
+        for r, t in powerlog_dict['power']['type'].items():
             rail_type[t].append(r)
         for t in rail_type:
             rail_type[t] = sorted(rail_type[t])
@@ -208,7 +255,7 @@
         if not os.path.exists(resultsdir):
             raise error.TestError('resultsdir %s does not exist.' % resultsdir)
         filename = os.path.join(resultsdir, filename)
-        with file(filename, 'a') as f:
+        with open(filename, 'a') as f:
             f.write(html_str)
 
     def _upload(self, powerlog_dict, uploadurl):
@@ -220,13 +267,16 @@
         """
         json_str = json.dumps(powerlog_dict, ensure_ascii=False)
         data_obj = {'data': utils.strip_non_printable(json_str)}
-        encoded = urllib.urlencode(data_obj)
-        req = urllib2.Request(uploadurl, encoded)
+        encoded = urllib.parse.urlencode(data_obj).encode('utf-8')
+        req = urllib.request.Request(uploadurl, encoded)
 
-        @retry.retry(urllib2.URLError, raiselist=[urllib2.HTTPError],
-                     timeout_min=5.0, delay_sec=1, backoff=2)
+        @retry.retry(urllib.error.URLError,
+                     raiselist=[urllib.error.HTTPError],
+                     timeout_min=5.0,
+                     delay_sec=1,
+                     backoff=2)
         def _do_upload():
-            urllib2.urlopen(req)
+            urllib.request.urlopen(req)
 
         _do_upload()
 
@@ -257,7 +307,7 @@
         # Create list of check point event tuple.
         # Tuple format: (checkpoint_name:str, event_time:float, is_start:bool)
         checkpoint_event_list = []
-        for name, intervals in checkpoint_dict.iteritems():
+        for name, intervals in checkpoint_dict.items():
             for start, finish in intervals:
                 checkpoint_event_list.append((name, start, True))
                 checkpoint_event_list.append((name, finish, False))
@@ -265,7 +315,7 @@
         checkpoint_event_list = sorted(checkpoint_event_list,
                                        key=operator.itemgetter(1))
 
-        # Add dummy check point at 1e9 seconds.
+        # Add placeholder check point at 1e9 seconds.
         checkpoint_event_list.append(('dummy', 1e9, True))
 
         interval_set = set()
@@ -352,27 +402,30 @@
             board += '_hammer'
 
         dut_info_dict = {
-            'board': board,
-            'version': {
-                'hw': utils.get_hardware_revision(),
-                'milestone': lsbrelease_utils.get_chromeos_release_milestone(),
-                'os': lsbrelease_utils.get_chromeos_release_version(),
-                'channel': lsbrelease_utils.get_chromeos_channel(),
-                'firmware': utils.get_firmware_version(),
-                'ec': utils.get_ec_version(),
-                'kernel': utils.get_kernel_version(),
-            },
-            'sku': {
-                'cpu': utils.get_cpu_name(),
-                'memory_size': utils.get_mem_total_gb(),
-                'storage_size': utils.get_disk_size_gb(utils.get_root_device()),
-                'display_resolution': utils.get_screen_resolution(),
-            },
-            'ina': {
-                'version': 0,
-                'ina': power_rails,
-            },
-            'note': self._note,
+                'board': board,
+                'version': {
+                        'hw': utils.get_hardware_revision(),
+                        'milestone':
+                        lsbrelease_utils.get_chromeos_release_milestone(),
+                        'os': lsbrelease_utils.get_chromeos_release_version(),
+                        'channel': lsbrelease_utils.get_chromeos_channel(),
+                        'firmware': utils.get_firmware_version(),
+                        'ec': utils.get_ec_version(),
+                        'kernel': utils.get_kernel_version(),
+                },
+                'sku': {
+                        'cpu': utils.get_cpu_name(),
+                        'memory_size': utils.get_mem_total_gb(),
+                        'storage_size':
+                        utils.get_disk_size_gb(utils.get_root_device()),
+                        'display_resolution': utils.get_screen_resolution(),
+                        'hwid': utils.get_hardware_id(),
+                },
+                'ina': {
+                        'version': 0,
+                        'ina': power_rails,
+                },
+                'note': self._note,
         }
 
         if power_utils.has_battery():
@@ -437,7 +490,7 @@
             raw measurement dictionary or None if no readings
         """
         if len(self._logger.readings) == 0:
-            logging.warn('No readings in logger ... ignoring')
+            logging.warning('No readings in logger ... ignoring')
             return None
 
         power_dict = collections.defaultdict(dict, {
@@ -498,10 +551,12 @@
     See power_SpeedoMeter2 for implementation example.
     """
 
-    def __init__(self, start_ts, end_ts):
+    def __init__(self, start_ts, end_ts=None):
         # Do not call parent constructor to avoid making a new thread.
         self.times = [start_ts]
-        self._duration_secs = end_ts - start_ts
+        self._start_ts = start_ts
+        self._fixed_end_ts = end_ts  # prefer this (end time set by tests)
+        self._updating_end_ts = time.time()  # updated when a new item is added
         self.keys = []
         self.values = []
         self.units = []
@@ -529,6 +584,17 @@
         self.values.append(value)
         self.units.append(unit)
         self.types.append(type_)
+        self._updating_end_ts = time.time()
+
+    def set_end(self, end_ts):
+        """Set the end timestamp.
+
+        If the end timestamp is not set explicitly by tests, use the timestamp
+        of the last added item instead.
+
+        @param end_ts: end timestamp for KeyvalLogger.
+        """
+        self._fixed_end_ts = end_ts
 
     def calc(self, mtype=None):
         return {}
@@ -542,16 +608,26 @@
 
     def _convert(self):
         """Convert KeyvalLogger data to power dict."""
-        power_dict =  {
-            # 2 samples to show flat value spanning across duration of the test.
-            'sample_count': 2,
-            'sample_duration': self._logger._duration_secs,
-            'average': dict(zip(self._logger.keys, self._logger.values)),
-            'data': dict(zip(self._logger.keys,
-                             ([v, v] for v in self._logger.values))),
-            'unit': dict(zip(self._logger.keys, self._logger.units)),
-            'type': dict(zip(self._logger.keys, self._logger.types)),
-            'checkpoint': [[self._testname], [self._testname]],
+        power_dict = {
+                # 2 samples to show flat value spanning across duration of the test.
+                'sample_count':
+                2,
+                'sample_duration':
+                (self._logger._fixed_end_ts -
+                 self._logger._start_ts) if self._logger._fixed_end_ts else
+                (self._logger._updating_end_ts - self._logger._start_ts),
+                'average':
+                dict(list(zip(self._logger.keys, self._logger.values))),
+                'data':
+                dict(
+                        list(
+                                zip(self._logger.keys,
+                                    ([v, v] for v in self._logger.values)))),
+                'unit':
+                dict(list(zip(self._logger.keys, self._logger.units))),
+                'type':
+                dict(list(zip(self._logger.keys, self._logger.types))),
+                'checkpoint': [[self._testname], [self._testname]],
         }
         return power_dict
 
@@ -583,6 +659,8 @@
 
     def _convert(self):
         power_dict = super(CPUStatsLoggerDashboard, self)._convert()
+        if not power_dict or not power_dict['data']:
+            return None
         remove_rail = []
         for rail in power_dict['data']:
             if rail.startswith('wavg_cpu'):
@@ -663,6 +741,19 @@
         self._unit = 'rpm'
         self._type = 'fan'
 
+
+class FreeMemoryLoggerDashboard(MeasurementLoggerDashboard):
+    """Dashboard class for power_status.FreeMemoryLogger."""
+
+    def __init__(self, logger, testname, resultsdir, uploadurl, note):
+        # Don't upload to dashboard
+        uploadurl = None
+        super(FreeMemoryLoggerDashboard,
+              self).__init__(logger, testname, resultsdir, uploadurl, note)
+        self._unit = 'point'
+        self._type = 'mem'
+
+
 dashboard_factory = None
 def get_dashboard_factory():
     global dashboard_factory
@@ -674,12 +765,13 @@
     """Class to generate client test dashboard object from logger."""
 
     loggerToDashboardDict = {
-        power_status.CPUStatsLogger: CPUStatsLoggerDashboard,
-        power_status.PowerLogger:    PowerLoggerDashboard,
-        power_status.TempLogger:     TempLoggerDashboard,
-        power_status.VideoFpsLogger: VideoFpsLoggerDashboard,
-        power_status.FanRpmLogger:   FanRpmLoggerDashboard,
-        KeyvalLogger:                KeyvalLoggerDashboard,
+            power_status.CPUStatsLogger: CPUStatsLoggerDashboard,
+            power_status.PowerLogger: PowerLoggerDashboard,
+            power_status.TempLogger: TempLoggerDashboard,
+            power_status.VideoFpsLogger: VideoFpsLoggerDashboard,
+            power_status.FanRpmLogger: FanRpmLoggerDashboard,
+            power_status.FreeMemoryLogger: FreeMemoryLoggerDashboard,
+            KeyvalLogger: KeyvalLoggerDashboard,
     }
 
     def registerDataType(self, logger_type, dashboard_type):
@@ -697,3 +789,33 @@
             uploadurl = 'http://chrome-power.appspot.com/rapl'
         dashboard = self.loggerToDashboardDict[type(logger)]
         return dashboard(logger, testname, resultsdir, uploadurl, note)
+
+
+def generate_parallax_report(output_dir):
+    """Generate parallax report in the result directory."""
+    parallax_url = 'http://crospower.page.link/parallax'
+    local_dir = '/usr/local'
+    parallax_tar = os.path.join(local_dir, 'parallax.tar.xz')
+    parallax_dir = os.path.join(local_dir, 'report_analysis')
+    parallax_exe = os.path.join(parallax_dir, 'process.py')
+    results_dir = os.path.join(output_dir, 'results')
+    parallax_html = os.path.join(results_dir, 'parallax.html')
+
+    # Download the source
+    cmd = ' '.join(['wget', parallax_url, '-O', parallax_tar])
+    utils.run(cmd)
+
+    # Extract the tool
+    cmd = ' '.join(['tar', 'xf', parallax_tar, '-C', local_dir])
+    utils.run(cmd)
+
+    # Run the tool
+    cmd = ' '.join([
+            'python', parallax_exe, '-t', 'PowerQual', '-p', output_dir, '-o',
+            parallax_html
+    ])
+    utils.run(cmd)
+
+    # Clean up the tool
+    cmd = ' '.join(['rm', '-rf', parallax_tar, parallax_dir])
+    utils.run(cmd)
diff --git a/client/cros/power/power_rapl.py b/client/cros/power/power_rapl.py
index 8d29a97..eb6a206 100644
--- a/client/cros/power/power_rapl.py
+++ b/client/cros/power/power_rapl.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -248,7 +249,7 @@
             with open(root + '/name', 'r') as fn:
                 name = fn.read().rstrip()
                 rapl_map[name] = root
-    powercaps = [Powercap(name, root) for name, root in rapl_map.iteritems()]
+    powercaps = [Powercap(name, root) for name, root in rapl_map.items()]
 
     pl1_path = os.path.join(powercap, 'intel-rapl:0',
                             'constraint_0_power_limit_uw')
diff --git a/client/cros/power/power_status.py b/client/cros/power/power_status.py
index 681506e..6f40c30 100644
--- a/client/cros/power/power_status.py
+++ b/client/cros/power/power_status.py
@@ -1,7 +1,12 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import collections
 import contextlib
 import ctypes
@@ -25,14 +30,12 @@
 from autotest_lib.client.common_lib.utils import poll_for_condition_ex
 from autotest_lib.client.cros import kernel_trace
 from autotest_lib.client.cros.power import power_utils
+from collections import namedtuple
+from six.moves import range
+from six.moves import zip
 
 BatteryDataReportType = autotest_enum.AutotestEnum('CHARGE', 'ENERGY')
 
-# For devices whose full capacity is significantly lower than design full
-# capacity, scale down their design full capacity.
-BATTERY_DESIGN_FULL_SCALE = {'jinlon': 0.95, # b/161307060
-                             'berknip': 0.94, # b/172625511
-                             }
 # battery data reported at 1e6 scale
 BATTERY_DATA_SCALE = 1e6
 # number of times to retry reading the battery in the case of bad data
@@ -56,7 +59,7 @@
         """
         Reset all class fields to None to mark their status as unknown.
         """
-        for field in self.fields.iterkeys():
+        for field in self.fields.keys():
             setattr(self, field, None)
 
 
@@ -79,7 +82,7 @@
     def read_all_vals(self):
         """Read all values.
         """
-        for field, prop in self.fields.iteritems():
+        for field, prop in self.fields.items():
             if prop[0]:
                 val = self.read_val(prop[0], prop[1])
                 setattr(self, field, val)
@@ -123,6 +126,26 @@
         # Browse the thermal folder for trip point fields.
         self.num_trip_points = 0
 
+        if path is None:
+            path = ThermalStatACPI.path
+
+        self.zones = {}
+        thermal_zones = glob.glob(path)
+        for (i, zone) in enumerate(thermal_zones):
+            desc_path = os.path.join(zone, 'device/description')
+            desc = ''
+            if os.path.exists(desc_path):
+                desc = utils.read_one_line(desc_path)
+
+            # If there's no description then use the type to create a description
+            if desc == '':
+                domain_path = os.path.join(zone, 'type')
+                domain = utils.read_one_line(domain_path)
+                desc = '%s%d' % (domain, i)
+
+            desc = desc.replace(' ', '_')
+            self.zones[desc] = os.path.join(zone, 'temp')
+
         thermal_fields = glob.glob(path + '/*')
         for file in thermal_fields:
             field = file[len(path + '/'):]
@@ -157,7 +180,8 @@
 
 class ThermalStatHwmon(DevStat):
     """
-    hwmon-based thermal status.
+    hwmon-based thermal status. Excludes overlaps with thermal zones by default
+    since thermal zones generally provide a more usable description.
 
     Fields:
     int   <tname>_temp<num>_input: Current temperature in millidegrees Celsius
@@ -169,7 +193,17 @@
     path = '/sys/class/hwmon'
 
     thermal_fields = {}
-    def __init__(self, rootpath=None):
+
+    def __init__(self, rootpath=None, exclude_tz=True):
+        excluded_domains = set()
+        if exclude_tz:
+            thermal_zones = glob.glob('/sys/class/thermal/thermal_zone*')
+            for zone in thermal_zones:
+                domain_path = os.path.join(zone, 'type')
+                domain = utils.read_one_line(domain_path)
+
+                excluded_domains.add(domain)
+
         if not rootpath:
             rootpath = self.path
         for subpath1 in glob.glob('%s/hwmon*' % rootpath):
@@ -179,10 +213,13 @@
                     bname = os.path.basename(gpath)
                     field_path = os.path.join(subpath1, subpath2, bname)
 
-                    tname_path = os.path.join(os.path.dirname(gpath), "name")
-                    tname = utils.read_one_line(tname_path)
+                    domain_path = os.path.join(os.path.dirname(gpath), "name")
+                    domain = utils.read_one_line(domain_path)
 
-                    field_key = "%s_%s" % (tname, bname)
+                    if domain in excluded_domains:
+                        continue
+
+                    field_key = "%s_%s" % (domain, bname)
                     self.thermal_fields[field_key] = [field_path, int]
 
         super(ThermalStatHwmon, self).__init__(self.thermal_fields, rootpath)
@@ -255,34 +292,44 @@
     float charge_full_design: Full capacity by design [Ah]
     float charge_now:         Remaining charge [Ah]
     float current_now:        Battery discharge rate [A]
+    int   cycle_count:        Battery cycle count
     float energy:             Current battery charge [Wh]
     float energy_full:        Last full capacity reached [Wh]
     float energy_full_design: Full capacity by design [Wh]
     float energy_rate:        Battery discharge rate [W]
+    str   manufacturer:       Battery manufacturer
+    str   model_name:         Battery model name
     float power_now:          Battery discharge rate [W]
+    int   present:            Whether battery is present
     float remaining_time:     Remaining discharging time [h]
+    str   serial_number:      Battery serial number
+    str   status:             Charging status
     float voltage_min_design: Minimum voltage by design [V]
     float voltage_max_design: Maximum voltage by design [V]
     float voltage_now:        Voltage now [V]
     """
 
     battery_fields = {
-        'status':               ['status', str],
-        'charge_full':          ['charge_full', float],
-        'charge_full_design':   ['charge_full_design', float],
-        'charge_now':           ['charge_now', float],
-        'current_now':          ['current_now', float],
-        'voltage_min_design':   ['voltage_min_design', float],
-        'voltage_max_design':   ['voltage_max_design', float],
-        'voltage_now':          ['voltage_now', float],
-        'energy':               ['energy_now', float],
-        'energy_full':          ['energy_full', float],
-        'energy_full_design':   ['energy_full_design', float],
-        'power_now':            ['power_now', float],
-        'present':              ['present', int],
-        'energy_rate':          ['', ''],
-        'remaining_time':       ['', '']
-        }
+            'status': ['status', str],
+            'charge_full': ['charge_full', float],
+            'charge_full_design': ['charge_full_design', float],
+            'charge_now': ['charge_now', float],
+            'current_now': ['current_now', float],
+            'cycle_count': ['cycle_count', int],
+            'voltage_min_design': ['voltage_min_design', float],
+            'voltage_max_design': ['voltage_max_design', float],
+            'voltage_now': ['voltage_now', float],
+            'energy': ['energy_now', float],
+            'energy_full': ['energy_full', float],
+            'energy_full_design': ['energy_full_design', float],
+            'power_now': ['power_now', float],
+            'present': ['present', int],
+            'manufacturer': ['manufacturer', str],
+            'model_name': ['model_name', str],
+            'serial_number': ['serial_number', str],
+            'energy_rate': ['', ''],
+            'remaining_time': ['', '']
+    }
 
     def __init__(self, path=None):
         super(BatteryStat, self).__init__(self.battery_fields, path)
@@ -290,14 +337,14 @@
 
 
     def update(self):
-        for _ in xrange(BATTERY_RETRY_COUNT):
+        for _ in range(BATTERY_RETRY_COUNT):
             try:
                 self._read_battery()
                 return
             except error.TestError as e:
-                logging.warn(e)
-                for field, prop in self.battery_fields.iteritems():
-                    logging.warn(field + ': ' + repr(getattr(self, field)))
+                logging.warning(e)
+                for field, prop in self.battery_fields.items():
+                    logging.warning(field + ': ' + repr(getattr(self, field)))
                 continue
         raise error.TestError('Failed to read battery state')
 
@@ -319,12 +366,6 @@
             raise error.TestError('Failed to determine battery voltage')
 
         battery_design_full_scale = 1
-        model = utils.get_platform()
-        if model in BATTERY_DESIGN_FULL_SCALE:
-            battery_design_full_scale = BATTERY_DESIGN_FULL_SCALE.get(model)
-            logging.info(
-                    'Apply %f scale to design full battery capacity for model '
-                    '%s', battery_design_full_scale, model)
 
         # Since charge data is present, calculate parameters based upon
         # reported charge data.
@@ -390,10 +431,10 @@
             raise error.TestError('Unreasonable charge_now value')
 
 
-class LineStatDummy(DevStat):
+class LineStatPlaceholder(DevStat):
     """
-    Dummy line stat for devices which don't provide power_supply related sysfs
-    interface.
+    Placeholder line stat for devices which don't provide power_supply related
+    sysfs interface.
     """
     def __init__(self):
         self.online = True
@@ -485,7 +526,7 @@
         for path in self.linepower_path:
             self.linepower.append(LineStat(path))
         if not self.linepower:
-            self.linepower = [ LineStatDummy() ]
+            self.linepower = [ LineStatPlaceholder() ]
 
         temp_str = self.thermal.get_temps()
         if temp_str:
@@ -518,7 +559,7 @@
                 return True
 
         if not self.battery_path:
-            logging.warn('Unable to determine battery charge status')
+            logging.warning('Unable to determine battery charge status')
             return False
 
         return self.battery.status.rstrip() == 'Charging'
@@ -529,7 +570,7 @@
         Returns true if battery is currently discharging or false otherwise.
         """
         if not self.battery_path:
-            logging.warn('Unable to determine battery discharge status')
+            logging.warning('Unable to determine battery discharge status')
             return False
 
         return self.battery.status.rstrip() == 'Discharging'
@@ -539,7 +580,7 @@
         Returns true if battery is currently full or false otherwise.
         """
         if not self.battery_path:
-            logging.warn('Unable to determine battery fullness status')
+            logging.warning('Unable to determine battery fullness status')
             return False
 
         return self.battery.status.rstrip() == 'Full'
@@ -680,10 +721,10 @@
         """
         Turns a dict with absolute time values into a dict with percentages.
         """
-        total = sum(stats.itervalues())
+        total = sum(stats.values())
         if total == 0:
             return {k: 0 for k in stats}
-        return dict((k, v * 100.0 / total) for (k, v) in stats.iteritems())
+        return dict((k, v * 100.0 / total) for (k, v) in stats.items())
 
 
     @staticmethod
@@ -691,7 +732,7 @@
         """
         Returns a dict with value deltas from two dicts with matching keys.
         """
-        return dict((k, new[k] - old.get(k, 0)) for k in new.iterkeys())
+        return dict((k, new[k] - old.get(k, 0)) for k in new.keys())
 
 
     @staticmethod
@@ -739,11 +780,11 @@
         if self.incremental:
             stats = self.do_diff(stats, self._first_stats)
 
-        total = sum(stats.itervalues())
+        total = sum(stats.values())
         if total == 0:
             return None
 
-        return sum(float(k) * v / total for k, v in stats.iteritems())
+        return sum(float(k) * v / total for k, v in stats.items())
 
     def _supports_automatic_weighted_average(self):
         """
@@ -935,10 +976,10 @@
         Turns a dict with absolute time values into a dict with percentages.
         Ignore the |non_c0_stat_name| which is aggegate stat in the total count.
         """
-        total = sum(v for k, v in stats.iteritems() if k != self._non_c0_stat)
+        total = sum(v for k, v in stats.items() if k != self._non_c0_stat)
         if total == 0:
             return {k: 0 for k in stats}
-        return {k: v * 100.0 / total for k, v in stats.iteritems()}
+        return {k: v * 100.0 / total for k, v in stats.items()}
 
 
 class CPUIdleStats(CPUCStateStats):
@@ -1040,6 +1081,7 @@
                 'Silvermont':   self.SILVERMONT,
                 'Skylake':      self.BROADWELL,
                 'Tiger Lake':   self.BROADWELL,
+                'Alder Lake':   self.BROADWELL,
                 'Tremont':      self.GOLDMONT,
                 'Westmere':     self.NEHALEM,
                 }.get(cpu_uarch, None)
@@ -1067,7 +1109,7 @@
             packages.add(package)
 
             stats['C0_C1'] += utils.rdmsr(0x10, cpu) # TSC
-            for (state, msr) in self._platform_states.iteritems():
+            for (state, msr) in self._platform_states.items():
                 ticks = utils.rdmsr(msr, cpu)
                 stats[state] += ticks
                 stats['non-C0_C1'] += ticks
@@ -1244,7 +1286,8 @@
                         max_mhz = result[0]
                         continue
                 if min_mhz and max_mhz:
-                    for i in xrange(int(min_mhz), int(max_mhz) + 1):
+                    real_min_mhz = min(int(min_mhz), int(cur_mhz))
+                    for i in range(real_min_mhz, int(max_mhz) + 1):
                         if i % 100 in self._I915_FREQ_STEPS:
                             self._freqs.append(str(i))
 
@@ -1438,7 +1481,8 @@
     cpufreq_stat_class = CPUFreqStats
     # assumes cpufreq driver for CPU0 is the same as the others.
     cpufreq_driver = '/sys/devices/system/cpu/cpu0/cpufreq/scaling_driver'
-    if utils.read_one_line(cpufreq_driver) == 'intel_pstate':
+    if (os.path.exists(cpufreq_driver) and
+        utils.read_one_line(cpufreq_driver) == 'intel_pstate'):
         logging.debug('intel_pstate driver active')
         cpufreq_stat_class = CPUFreqStatsPState
 
@@ -1484,7 +1528,7 @@
             if stat_obj.name is 'gpu':
                 # TODO(tbroch) remove this once GPU freq stats have proved
                 # reliable
-                stats_secs = sum(stat_obj._stats.itervalues())
+                stats_secs = sum(stat_obj._stats.values())
                 if stats_secs < (tot_secs * 0.9) or \
                         stats_secs > (tot_secs * 1.1):
                     logging.warning('%s stats dont look right.  Not publishing.',
@@ -1701,7 +1745,7 @@
             start_time = self._start_time
 
         checkpoint_dict = {}
-        for tname, tlist in self.checkpoint_data.iteritems():
+        for tname, tlist in self.checkpoint_data.items():
             checkpoint_dict[tname] = [(tstart - start_time, tend - start_time)
                     for tstart, tend in tlist]
 
@@ -1715,7 +1759,7 @@
             fname: String, name of file to write results to
         """
         fname = os.path.join(resultsdir, fname)
-        with file(fname, 'wt') as f:
+        with open(fname, 'wt') as f:
             json.dump(self.checkpoint_data, f, indent=4, separators=(',', ': '))
 
     def load_checkpoint_data(self, resultsdir,
@@ -1733,8 +1777,9 @@
                                                  object_hook=to_checkpoint_data)
                 # Set start time to the earliest start timestamp in file.
                 self._start_time = min(
-                        ts_pair[0] for ts_pair in itertools.chain.from_iterable(
-                                self.checkpoint_data.itervalues()))
+                        ts_pair[0]
+                        for ts_pair in itertools.chain.from_iterable(
+                                list(self.checkpoint_data.values())))
         except Exception as exc:
             logging.warning('Failed to load checkpoint data from json file %s, '
                             'see exception: %s', fname, exc)
@@ -1749,7 +1794,7 @@
             fname: String, name of file to load results from
         """
         fname = os.path.join(resultsdir, fname)
-        with file(fname, 'r') as f:
+        with open(fname, 'r') as f:
             checkpoint_data = json.load(f)
         return checkpoint_data
 
@@ -1763,7 +1808,7 @@
         a defaultdict in CheckpointLogger data format
     """
     checkpoint_data = collections.defaultdict(list)
-    for tname, tlist in json_dict.iteritems():
+    for tname, tlist in json_dict.items():
         checkpoint_data[tname].extend([tuple(ts_pair) for ts_pair in tlist])
     return checkpoint_data
 
@@ -1882,8 +1927,9 @@
 
     def run(self):
         """Threads run method."""
-        loop = 0
+        loop = 1
         start_time = time.time()
+        time.sleep(self.seconds_period)
         while(not self.done):
             # TODO (dbasehore): We probably need proper locking in this file
             # since there have been race conditions with modifying and accessing
@@ -1972,7 +2018,8 @@
                               ', '.join(self.domains))
                 raise
 
-            for tname, tlist in self._checkpoint_logger.checkpoint_data.iteritems():
+            for tname, tlist in \
+                    self._checkpoint_logger.checkpoint_data.items():
                 if tname:
                     prefix = '%s_%s' % (tname, domain)
                 else:
@@ -1988,7 +2035,7 @@
                     # is not fixed.
                     try:
                         masks.append(numpy.logical_and(tstart < t, t < tend))
-                    except ValueError, e:
+                    except ValueError as e:
                         logging.debug('Error logging measurements: %s', str(e))
                         logging.debug('timestamps %d %s', t.len, t)
                         logging.debug('timestamp start, end %f %f', tstart, tend)
@@ -2073,20 +2120,23 @@
         self._stats = get_available_cpu_stats()
         self._stats.append(GPUFreqStats())
         self.domains = []
+        self._refresh_count = 0
+        self._last_wavg = collections.defaultdict(int)
+
+    def _set_domains(self):
+        self.domains = []
         for stat in self._stats:
             self.domains.extend([stat.name + '_' + str(state_name)
                                  for state_name in stat.refresh()])
             if stat.weighted_average():
                 self.domains.append('wavg_' + stat.name)
-        self._refresh_count = 0
-        self._last_wavg = collections.defaultdict(int)
 
     def refresh(self):
         self._refresh_count += 1
         count = self._refresh_count
         ret = []
         for stat in self._stats:
-            ret.extend(stat.refresh().values())
+            ret.extend(list(stat.refresh().values()))
             wavg = stat.weighted_average()
             if wavg:
                 if stat.incremental:
@@ -2100,6 +2150,15 @@
                     ret.append(wavg * count - last_wavg * (count - 1))
                 else:
                     ret.append(wavg)
+        if not self.domains:
+            self._set_domains()
+        elif len(self.domains) != len(ret):
+            # This would make data jumble but better than IndexError.
+            # Add the log to help detecting the root cause.
+            logging.warning('b:162610351 len(self.domains) != len(ret)')
+            logging.warning('old_domains: (%s)', ', '.join(self.domains))
+            self._set_domains()
+            logging.warning('new_domains: (%s)', ', '.join(self.domains))
         return ret
 
     def save_results(self, resultsdir, fname_prefix=None):
@@ -2186,7 +2245,26 @@
             float, temperature in degrees Celsius.
         """
         result = utils.run(self._path, timeout=5, ignore_status=True)
-        return float(result.stdout)
+
+        value = float(result.stdout)
+
+        # `battery_temp` return in celsius unit.
+        if 0 < value < 100:
+            return round(value, 1)
+
+        # `battery_temp` return in kelvin unit.
+        if 273 < value < 373:
+            return round(value - 273.15, 1)
+
+        # `battery_temp` return in millicelsius unit.
+        if 1000 < value < 100000:
+            return round(value / 1000., 1)
+
+        # The command return value in millikelvin unit.
+        if 273150 < value < 373150:
+            return round(value / 1000. - 273.15, 1)
+
+        raise ValueError
 
 
 def has_battery_temp():
@@ -2219,37 +2297,26 @@
 
     def create_measurements(self):
         """Create measurements for TempLogger."""
-        domains = set()
         measurements = []
+
+        zstats = ThermalStatACPI()
+        for desc, fpath in zstats.zones.items():
+            new_meas = TempMeasurement(desc, fpath)
+            measurements.append(new_meas)
+
         tstats = ThermalStatHwmon()
         for kname in tstats.fields:
             match = re.match(r'(\S+)_temp(\d+)_input', kname)
             if not match:
                 continue
-            domain = match.group(1) + '-t' + match.group(2)
+            desc = match.group(1) + '-t' + match.group(2)
             fpath = tstats.fields[kname][0]
-            new_meas = TempMeasurement(domain, fpath)
+            new_meas = TempMeasurement(desc, fpath)
             measurements.append(new_meas)
-            domains.add(domain)
 
         if has_battery_temp():
             measurements.append(BatteryTempMeasurement())
 
-        sysfs_paths = '/sys/class/thermal/thermal_zone*'
-        paths = glob.glob(sysfs_paths)
-        for path in paths:
-            domain_path = os.path.join(path, 'type')
-            temp_path = os.path.join(path, 'temp')
-
-            domain = utils.read_one_line(domain_path)
-
-            # Skip when thermal_zone and hwmon have same domain.
-            if domain in domains:
-                continue
-
-            domain = domain.replace(' ', '_')
-            new_meas = TempMeasurement(domain, temp_path)
-            measurements.append(new_meas)
         return measurements
 
     def save_results(self, resultsdir, fname_prefix=None):
@@ -2282,7 +2349,7 @@
             float, number of seconds elasped until condition met.
 
         Raises:
-            py_utils.TimeoutException if condition are not met by timeout.
+            error.TestFail if condition are not met by timeout.
         """
         start_time = time.time()
 
@@ -2294,7 +2361,21 @@
         c = ('Math.min(...Array.from(document.getElementsByTagName("video"))'
              '.map(v => v.currentTime)) >= 0.001')
         timeout_left = timeout - (time.time() - start_time)
-        tab.WaitForJavaScriptCondition(c, timeout=timeout_left)
+        try:
+            tab.WaitForJavaScriptCondition(c, timeout=timeout_left)
+        # Broad exception because py_utils.TimeoutException require libchrome
+        except Exception:
+            times = tab.EvaluateJavaScript(
+                    'Array.from(document.getElementsByTagName("video"))'
+                    '.map(v => v.currentTime)')
+            # Not timeout exception, re-raise original exception
+            if min(times) > 0.001:
+                raise
+            videos = tab.EvaluateJavaScript(
+                    'Array.from(document.getElementsByTagName("video"))'
+                    '.map(v => v.id)')
+            failed_videos = [v for v, t in zip(videos, times) if t < 0.001]
+            raise error.TestFail('Media playback failed: %s' % failed_videos)
         return time.time() - start_time
 
     def __init__(self, tab, seconds_period=1.0, checkpoint_logger=None):
@@ -2400,6 +2481,31 @@
         return super(FanRpmLogger, self).calc(mtype)
 
 
+class FreeMemoryLogger(MeasurementLogger):
+    """Class to measure free memory from /proc/meminfo in KB unit."""
+
+    def __init__(self, seconds_period=1.0, checkpoint_logger=None):
+        """Initialize a FreeMemoryLogger."""
+        super(FreeMemoryLogger, self).__init__([], seconds_period,
+                                               checkpoint_logger)
+        self.domains = ['MemFree', 'MemAvailable']
+        self.refresh()
+
+    def refresh(self):
+        return [
+                utils.read_from_meminfo('MemFree'),
+                utils.read_from_meminfo('MemAvailable')
+        ]
+
+    def save_results(self, resultsdir, fname_prefix=None):
+        if not fname_prefix:
+            fname_prefix = 'free_memory_results_%.0f' % time.time()
+        super(FreeMemoryLogger, self).save_results(resultsdir, fname_prefix)
+
+    def calc(self, mtype='kB'):
+        return super(FreeMemoryLogger, self).calc(mtype)
+
+
 def create_measurement_loggers(seconds_period=20.0, checkpoint_logger=None):
     """Create loggers for power test that is not test-specific.
 
@@ -2411,9 +2517,10 @@
         list of loggers created.
     """
     loggers = [
-        PowerLogger(None, seconds_period, checkpoint_logger),
-        TempLogger(None, seconds_period, checkpoint_logger),
-        CPUStatsLogger(seconds_period, checkpoint_logger),
+            PowerLogger(None, seconds_period, checkpoint_logger),
+            TempLogger(None, seconds_period, checkpoint_logger),
+            CPUStatsLogger(seconds_period, checkpoint_logger),
+            FreeMemoryLogger(seconds_period, checkpoint_logger),
     ]
     if has_fan():
         loggers.append(FanRpmLogger(seconds_period, checkpoint_logger))
@@ -2521,7 +2628,7 @@
         try:
             with open(self._device_path, 'r') as dev:
                 result = fcntl.ioctl(dev, 0x2285, sgio_header)
-        except IOError, e:
+        except IOError as e:
             raise error.TestError('ioctl(SG_IO) error: %s' % str(e))
         _, _, _, _, status, host_status, driver_status = \
             struct.unpack("4x4xxx2x4xPPP4x4x4xPBxxxHH4x4x4x", result)
@@ -2566,7 +2673,7 @@
                 else:
                     self._stats[state] = new_time - self._time
                 self._time = new_time
-        except error.TestError, e:
+        except error.TestError as e:
             self._error = e
             self._running = False
 
@@ -2583,6 +2690,25 @@
         """Returns the _error exception... please only call after result()."""
         return self._error
 
+S0ixAmdStats = namedtuple('S0ixAmdStats',['entry','exit','residency'])
+
+def parse_amd_pmc_s0ix_residency_info():
+    """
+    Parses S0ix residency for AMD systems
+
+    @returns S0ixAmdStats
+    @raises error.TestNAError if the debugfs file not found.
+    """
+    s = []
+    with open('/sys/kernel/debug/amd_pmc/s0ix_stats',"r") as f:
+        for line in f:
+            if ':' in line:
+                val = line.split(": ")
+                s.append(int(val[1]))
+        stat = S0ixAmdStats(entry=s[0], exit=s[1], residency=s[2])
+        return stat
+    raise error.TestNAError('AMD S0ix residency not supported')
+
 def parse_pmc_s0ix_residency_info():
     """
     Parses S0ix residency for PMC based Intel systems
@@ -2609,13 +2735,23 @@
     Measures the S0ix residency of a given board over time.
     """
     def __init__(self):
-        self._initial_residency = parse_pmc_s0ix_residency_info()
+        if "amd" in utils.get_cpu_soc_family():
+            self._initial_residency = parse_amd_pmc_s0ix_residency_info()
+        else:
+            self._initial_residency = parse_pmc_s0ix_residency_info()
 
     def get_accumulated_residency_secs(self):
         """
         @returns S0ix Residency since the class has been initialized.
         """
-        return parse_pmc_s0ix_residency_info() - self._initial_residency
+        if "amd" in utils.get_cpu_soc_family():
+            s0ix = parse_amd_pmc_s0ix_residency_info()
+            if s0ix != self._initial_residency:
+                return s0ix.residency
+            else:
+                return 0
+        else:
+            return parse_pmc_s0ix_residency_info() - self._initial_residency
 
 
 class S2IdleStateStats(object):
@@ -2693,8 +2829,8 @@
     total_usecs = 0
 
     all_stats = get_s2idle_stats()
-    for stats in all_stats.itervalues():
-        for st in stats.itervalues():
+    for stats in all_stats.values():
+        for st in stats.values():
             total_usecs += st.time
 
     return total_usecs
@@ -2918,7 +3054,7 @@
         if on_ip:
             on_ip_in_warn_list = on_ip & S0IX_WARNLIST
             if on_ip_in_warn_list:
-                logging.warn('Found PCH IP that may be able to powergate: %s',
+                logging.warning('Found PCH IP that may be able to powergate: %s',
                              ', '.join(on_ip_in_warn_list))
             on_ip -= S0IX_WARNLIST
 
diff --git a/client/cros/power/power_suspend.py b/client/cros/power/power_suspend.py
index 255e86b..2e0d12d 100644
--- a/client/cros/power/power_suspend.py
+++ b/client/cros/power/power_suspend.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -13,6 +14,8 @@
 #pylint: disable=W0611
 from autotest_lib.client.cros import flimflam_test_path
 import flimflam
+from six.moves import range
+
 
 class Suspender(object):
     """Class for suspend/resume measurements.
@@ -47,31 +50,11 @@
         _identify_driver: Return the driver name of a device (or "unknown").
     """
 
-    # board-specific "time to suspend" values determined empirically
-    # TODO: migrate to separate file with http://crosbug.com/38148
-    _DEFAULT_SUSPEND_DELAY = 5
-    _SUSPEND_DELAY = {
-        # TODO: Reevaluate this when http://crosbug.com/38460 is fixed
-        'daisy': 6,
-        'daisy_spring': 6,
-        'peach_pit': 6,
-
-        # TODO: Reevaluate these when http://crosbug.com/38225 is fixed
-        'x86-mario': 6,
-        'x86-alex': 5,
-
-        # Lumpy and Stumpy need high values, because it seems to mitigate their
-        # RTC interrupt problem. See http://crosbug.com/36004
-        'lumpy': 5,
-        'stumpy': 5,
-
-        # RTS5209 card reader has a really bad staging driver, can take ~1 sec
-        'butterfly': 4,
-
-        # Hard disk sync and overall just slow
-        'parrot': 8,
-        'kiev': 9,
-    }
+    _DEFAULT_SUSPEND_DELAY = 15
+    # Dictionary of board-specific "time to suspend" values determined
+    # empirically where 'key' is <board> and 'value' is suspend delay in
+    # seconds.
+    _SUSPEND_DELAY = {}
 
     # alarm/not_before value guaranteed to raise SpuriousWakeup in _hwclock_ts
     _ALARM_FORCE_EARLY_WAKEUP = 2147483647
@@ -94,7 +77,7 @@
     # enough to include ACPI Wake Reason... 10 should be far on the safe side.
     _RELEVANT_EVENTLOG_LINES = 10
 
-    # Sanity check value to catch overlong resume times (from missed RTC wakes)
+    # Check value to catch overlong resume times (from missed RTC wakes)
     _MAX_RESUME_TIME = 10
 
     # File written by powerd_suspend containing the hwclock time at resume.
@@ -203,8 +186,12 @@
         """
         Returns timestamp of last matching line or None
         """
-        with open(filename) as f:
-            lines = f.readlines()
+        try:
+            with open(filename) as f:
+                lines = f.readlines()
+        except IOError:
+            logging.info('Cannot open %s to retrieve the latest ts.', filename)
+        else:
             for line in reversed(lines):
                 if re.search(pattern, line):
                     matches = re.search(self._POWERD_TS_RE, line)
@@ -237,7 +224,9 @@
         """Throw away cached log lines and reset log pointer to current end."""
         if self._log_file:
             self._log_file.close()
-        self._log_file = open('/var/log/messages')
+        self._log_file = open('/var/log/messages',
+                              mode='r+',
+                              **power_utils.encoding_kwargs())
         self._log_file.seek(0, os.SEEK_END)
         self._logs = []
 
@@ -253,7 +242,7 @@
         false if took too long
         """
         finished_regex = re.compile(r'powerd_suspend\[\d+\]: Resume finished')
-        for retry in xrange(retries + 1):
+        for retry in range(retries + 1):
             lines = self._log_file.readlines()
             if lines:
                 if self._logs and self._logs[-1][-1] != '\n':
@@ -271,7 +260,7 @@
     def _ts(self, name, retries=11):
         """Searches logs for last timestamp with a given suspend message."""
         # Occasionally need to retry due to races from process wakeup order
-        for retry in xrange(retries + 1):
+        for retry in range(retries + 1):
             try:
                 f = open(self._TIMINGS_FILE)
                 for line in f:
@@ -307,8 +296,9 @@
                 early_wakeup = True
         if early_wakeup:
             logging.debug('Early wakeup, dumping eventlog if it exists:\n')
-            elog = utils.system_output('mosys eventlog list | tail -n %d' %
-                    self._RELEVANT_EVENTLOG_LINES, ignore_status=True)
+            elog = utils.system_output('elogtool list | tail -n %d' %
+                                       self._RELEVANT_EVENTLOG_LINES,
+                                       ignore_status=True)
             wake_elog = (['unknown'] + re.findall(r'Wake Source.*', elog))[-1]
             for line in reversed(self._logs):
                 match = re.search(r'PM1_STS: WAK.*', line)
@@ -365,23 +355,23 @@
         phase_times = []
         regex = re.compile(r'PM: (\w+ )?(resume|suspend) of devices complete')
         for line in self._logs:
-          match = regex.search(line)
-          if match:
-            ts = cros_logging.extract_kernel_timestamp(line)
-            phase = match.group(1)
-            if not phase:
-              phase = 'REG'
-            phase_times.append((phase.upper(), ts))
+            match = regex.search(line)
+            if match:
+                ts = cros_logging.extract_kernel_timestamp(line)
+                phase = match.group(1)
+                if not phase:
+                    phase = 'REG'
+                phase_times.append((phase.upper(), ts))
         return sorted(phase_times, key = lambda entry: entry[1])
 
 
     def _get_phase(self, ts, phase_table, dev):
-      for entry in phase_table:
-        #checking if timestamp was before that phase's cutoff
-        if ts < entry[1]:
-          return entry[0]
-      raise error.TestError('Device %s has a timestamp after all devices %s',
-                            dev, 'had already resumed')
+        for entry in phase_table:
+            #checking if timestamp was before that phase's cutoff
+            if ts < entry[1]:
+                return entry[0]
+        raise error.TestError('Device %s has a timestamp after all devices %s',
+                              dev, 'had already resumed')
 
 
     def _individual_device_times(self, start_resume):
@@ -391,35 +381,35 @@
         regex = re.compile(r'call ([^ ]+)\+ returned 0 after ([0-9]+) usecs')
         phase_table = self._get_phase_times()
         for line in self._logs:
-          match = regex.search(line)
-          if match:
-            device = match.group(1).replace(':', '-')
-            key = 'seconds_dev_' + device
-            secs = float(match.group(2)) / 1e6
-            ts = cros_logging.extract_kernel_timestamp(line)
-            if ts > start_resume:
-              key += '_resume'
-            else:
-              key += '_suspend'
-            #looking if we're in a special phase
-            phase = self._get_phase(ts, phase_table, device)
-            dev = dev_details[key]
-            if phase in dev:
-              logging.warning('Duplicate %s entry for device %s, +%f', phase,
-                              device, secs)
-              dev[phase] += secs
-            else:
-              dev[phase] = secs
+            match = regex.search(line)
+            if match:
+                device = match.group(1).replace(':', '-')
+                key = 'seconds_dev_' + device
+                secs = float(match.group(2)) / 1e6
+                ts = cros_logging.extract_kernel_timestamp(line)
+                if ts > start_resume:
+                    key += '_resume'
+                else:
+                    key += '_suspend'
+                #looking if we're in a special phase
+                phase = self._get_phase(ts, phase_table, device)
+                dev = dev_details[key]
+                if phase in dev:
+                    logging.warning('Duplicate %s entry for device %s, +%f',
+                                    phase, device, secs)
+                    dev[phase] += secs
+                else:
+                    dev[phase] = secs
 
-        for dev_key, dev in dev_details.iteritems():
-          total_secs = sum(dev.values())
-          self.device_times[-1][dev_key] = total_secs
-          report = '%s: %f TOT' % (dev_key, total_secs)
-          for phase in dev.keys():
-            if phase is 'REG':
-              continue
-            report += ', %f %s' % (dev[phase], phase)
-          logging.debug(report)
+        for dev_key, dev in dev_details.items():
+            total_secs = sum(dev.values())
+            self.device_times[-1][dev_key] = total_secs
+            report = '%s: %f TOT' % (dev_key, total_secs)
+            for phase in dev.keys():
+                if phase is 'REG':
+                    continue
+                report += ', %f %s' % (dev[phase], phase)
+            logging.debug(report)
 
 
     def _identify_driver(self, device):
@@ -458,7 +448,7 @@
 
         # TODO(scottz): warning_monitor crosbug.com/38092
         log_len = len(self._logs)
-        for i in xrange(log_len):
+        for i in range(log_len):
             line = self._logs[i]
             if warning_regex.search(line):
                 # match the source file from the WARNING line, and the
@@ -476,7 +466,7 @@
                         break
                 else:
                     if ignore_kernel_warns:
-                        logging.warn('Non-allowlisted KernelError: %s', src)
+                        logging.warning('Non-allowlisted KernelError: %s', src)
                     else:
                         raise sys_power.KernelError("%s\n%s" % (src, text))
             if abort_regex.search(line):
@@ -519,7 +509,7 @@
                   ' ArcPowerManagerService:D"'
         regex_resume = re.compile(r'^\s*(\d+\.\d+).*ArcPowerManagerService: '
                                   'Device finished resuming$')
-        for retry in xrange(retries + 1):
+        for retry in range(retries + 1):
             arc_logcat = utils.system_output(command, ignore_status=False)
             arc_logcat = arc_logcat.splitlines()
             for line in arc_logcat:
@@ -536,8 +526,8 @@
 
 
     def get_suspend_delay(self):
-            return self._SUSPEND_DELAY.get(self._get_board(),
-                                           self._DEFAULT_SUSPEND_DELAY)
+        return self._SUSPEND_DELAY.get(self._get_board(),
+                                       self._DEFAULT_SUSPEND_DELAY)
 
 
     def suspend(self, duration=10, ignore_kernel_warns=False,
@@ -566,7 +556,7 @@
         try:
             iteration = len(self.failures) + len(self.successes) + 1
             # Retry suspend in case we hit a known (allowlisted) bug
-            for _ in xrange(10):
+            for _ in range(10):
                 # Clear powerd_suspend RTC timestamp, to avoid stale results.
                 utils.open_write_close(self.HWCLOCK_FILE, '')
                 self._reset_logs()
@@ -605,7 +595,7 @@
 
                 if not self._check_resume_finished():
                     if not self._aborted_due_to_locking():
-                        raise error.TestError("Sanity check failed: did not try to suspend.")
+                        raise error.TestError("Confidence check failed: did not try to suspend.")
                     logging.warning('Aborted suspend due to power override, will retry\n')
                     continue
                 if not self._check_for_errors(ignore_kernel_warns):
@@ -637,7 +627,7 @@
                 # can be missing on non-SMP machines
                 cpu_up = None
             if total_up > self._MAX_RESUME_TIME:
-                raise error.TestError('Sanity check failed: missed RTC wakeup.')
+                raise error.TestError('Confidence check failed: missed RTC wakeup.')
 
             logging.info('Success(%d): %g down, %g up, %g board, %g firmware, '
                          '%g kernel, %g cpu, %g devices',
@@ -653,7 +643,7 @@
                     msg = 'S0ix residency did not change.'
                     if cpu_uarch not in self._IGNORE_S0IX_RESIDENCY_CHECK:
                         raise sys_power.S0ixResidencyNotChanged(msg)
-                    logging.warn(msg)
+                    logging.warning(msg)
                 logging.info('S0ix residency : %d secs.', s0ix_residency_secs)
             elif hasattr(self, '_s2idle_residency_stats'):
                 s2idle_residency_usecs = \
diff --git a/client/cros/power/power_telemetry_utils.py b/client/cros/power/power_telemetry_utils.py
index 1448e08..7b00228 100644
--- a/client/cros/power/power_telemetry_utils.py
+++ b/client/cros/power/power_telemetry_utils.py
@@ -1,13 +1,19 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 """Helper class for power autotests requiring telemetry devices."""
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import logging
 import time
 
 import numpy
+from six.moves import range
 
 CUSTOM_START = 'PowerTelemetryLogger custom start.'
 CUSTOM_END = 'PowerTelemetryLogger custom end.'
@@ -56,7 +62,7 @@
     if max_sample_gap is not None or max_sample_time_gap is not None:
         # Flag to keep track whether the loop is in a measurement gap (NaN).
         consecutive_nan_start = None
-        # Add a dummy at the end to make sure the iteration covers all real
+        # Add a stub at the end to make sure the iteration covers all real
         # examples.
         for i, isnan in enumerate(numpy.append(nan_data, False)):
             if isnan and consecutive_nan_start is None:
@@ -93,7 +99,7 @@
     if not len(sample_idx):
         raise TelemetryUtilsError('Data has no valid readings. Cannot '
                                   'interpolate.')
-    output = numpy.interp(range(len(data)), sample_idx, sample_vals)
+    output = numpy.interp(list(range(len(data))), sample_idx, sample_vals)
     return [round(x, INTERPOLATION_RESOLUTION) for x in output]
 
 def log_event_ts(message=None, timestamp=None, offset=0):
diff --git a/client/cros/power/power_telemetry_utils_unittest.py b/client/cros/power/power_telemetry_utils_unittest.py
index 7e3326b..396c055 100644
--- a/client/cros/power/power_telemetry_utils_unittest.py
+++ b/client/cros/power/power_telemetry_utils_unittest.py
@@ -1,3 +1,4 @@
+#!/usr/bin/python3
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -6,142 +7,153 @@
 
 import unittest
 
-import power_telemetry_utils
+import common
+
+from autotest_lib.client.cros.power import power_telemetry_utils
 
 
 class TestInterpolateData(unittest.TestCase):
-  """Collection of tests to test smooten_data function in utils."""
+    """Collection of tests to test smooten_data function in utils."""
 
-  def test_Interpolate(self):
-      """Test that regular smoothening of data works."""
-      data = [1.2, 3.6, float('nan'), float('nan'), 2.7]
-      expected_interp_data = [1.2, 3.6, 3.3, 3.0, 2.7]
-      interp_data = power_telemetry_utils.interpolate_missing_data(data)
-      self.assertListEqual(interp_data, expected_interp_data)
+    def test_Interpolate(self):
+        """Test that regular smoothening of data works."""
+        data = [1.2, 3.6, float('nan'), float('nan'), 2.7]
+        expected_interp_data = [1.2, 3.6, 3.3, 3.0, 2.7]
+        interp_data = power_telemetry_utils.interpolate_missing_data(data)
+        self.assertListEqual(interp_data, expected_interp_data)
 
-  def test_InterpolateAllNaN(self):
-      """Test that a full NaN array cannot be smoothed."""
-      data = [float('nan'), float('nan'), float('nan'), float('nan')]
-      with self.assertRaisesRegexp(power_telemetry_utils.TelemetryUtilsError,
-                                   'Data has no valid readings.'):
-          power_telemetry_utils.interpolate_missing_data(data)
+    def test_InterpolateAllNaN(self):
+        """Test that a full NaN array cannot be smoothed."""
+        data = [float('nan'), float('nan'), float('nan'), float('nan')]
+        with self.assertRaisesRegexp(power_telemetry_utils.TelemetryUtilsError,
+                                     'Data has no valid readings.'):
+            power_telemetry_utils.interpolate_missing_data(data)
 
-  def test_InterpolateGapStartAtBeginning(self):
-      """Test that a gap starting at the start gets the first known value."""
-      data = [float('nan'), float('nan'), 2.6]
-      expected_interp_data = [2.6, 2.6, 2.6]
-      interp_data = power_telemetry_utils.interpolate_missing_data(data)
-      self.assertListEqual(interp_data, expected_interp_data)
+    def test_InterpolateGapStartAtBeginning(self):
+        """Test that a gap starting at the start gets the first known value."""
+        data = [float('nan'), float('nan'), 2.6]
+        expected_interp_data = [2.6, 2.6, 2.6]
+        interp_data = power_telemetry_utils.interpolate_missing_data(data)
+        self.assertListEqual(interp_data, expected_interp_data)
 
-  def test_InterpolateGapEndsAtEnd(self):
-      """Test that a gap that ends at the end receives the last known value."""
-      data = [2.6, float('nan'), float('nan')]
-      expected_interp_data = [2.6, 2.6, 2.6]
-      interp_data = power_telemetry_utils.interpolate_missing_data(data)
-      self.assertListEqual(interp_data, expected_interp_data)
+    def test_InterpolateGapEndsAtEnd(self):
+        """Test that a gap that ends at the end receives the last known value."""
+        data = [2.6, float('nan'), float('nan')]
+        expected_interp_data = [2.6, 2.6, 2.6]
+        interp_data = power_telemetry_utils.interpolate_missing_data(data)
+        self.assertListEqual(interp_data, expected_interp_data)
 
-  def test_InterpolateTwoGaps(self):
-      """Test that two distinct gaps receive distinct values."""
-      data = [2.6, float('nan'), 3.4, 2.0 , float('nan'), 2.5]
-      expected_interp_data = [2.6, 3.0, 3.4, 2.0, 2.25, 2.5]
-      interp_data = power_telemetry_utils.interpolate_missing_data(data)
-      self.assertListEqual(interp_data, expected_interp_data)
+    def test_InterpolateTwoGaps(self):
+        """Test that two distinct gaps receive distinct values."""
+        data = [2.6, float('nan'), 3.4, 2.0, float('nan'), 2.5]
+        expected_interp_data = [2.6, 3.0, 3.4, 2.0, 2.25, 2.5]
+        interp_data = power_telemetry_utils.interpolate_missing_data(data)
+        self.assertListEqual(interp_data, expected_interp_data)
 
-  def test_InterpolateHandlesIntegerDivision(self):
-      """Test that integer division does not cause issues."""
-      data = [2, float('nan'), 3]
-      expected_interp_data = [2, 2.5, 3]
-      interp_data = power_telemetry_utils.interpolate_missing_data(data)
-      self.assertListEqual(interp_data, expected_interp_data)
+    def test_InterpolateHandlesIntegerDivision(self):
+        """Test that integer division does not cause issues."""
+        data = [2, float('nan'), 3]
+        expected_interp_data = [2, 2.5, 3]
+        interp_data = power_telemetry_utils.interpolate_missing_data(data)
+        self.assertListEqual(interp_data, expected_interp_data)
 
-  def test_AcceptableNaNRatio(self):
-      """Validation succeeds if the ratio of NaN is below the threshold."""
-      data = [2, float('nan'), 3, 4, 5, 6]
-      # This should pass as there are only 1/6 NaN in the data.
-      max_nan_ratio = 0.3
-      args = {'max_nan_ratio': max_nan_ratio}
-      interp_data = power_telemetry_utils.interpolate_missing_data(data,
-                                                                   **args)
+    def test_AcceptableNaNRatio(self):
+        """Validation succeeds if the ratio of NaN is below the threshold."""
+        data = [2, float('nan'), 3, 4, 5, 6]
+        # This should pass as there are only 1/6 NaN in the data.
+        max_nan_ratio = 0.3
+        args = {'max_nan_ratio': max_nan_ratio}
+        interp_data = power_telemetry_utils.interpolate_missing_data(
+                data, **args)
 
-  def test_ExcessiveNaNRatio(self):
-      """Validation fails if the ratio of NaN to valid readings is too high."""
-      data = [2, float('nan'), 3, 4, 5, 6]
-      # This should fail as there are 1/6 NaN in the data.
-      max_nan_ratio = 0.1
-      args = {'max_nan_ratio': max_nan_ratio}
-      with self.assertRaisesRegexp(power_telemetry_utils.TelemetryUtilsError,
-                                   'NaN ratio of'):
-          interp_data = power_telemetry_utils.interpolate_missing_data(data,
-                                                                       **args)
+    def test_ExcessiveNaNRatio(self):
+        """Validation fails if the ratio of NaN to valid readings is too high."""
+        data = [2, float('nan'), 3, 4, 5, 6]
+        # This should fail as there are 1/6 NaN in the data.
+        max_nan_ratio = 0.1
+        args = {'max_nan_ratio': max_nan_ratio}
+        with self.assertRaisesRegexp(power_telemetry_utils.TelemetryUtilsError,
+                                     'NaN ratio of'):
+            interp_data = power_telemetry_utils.interpolate_missing_data(
+                    data, **args)
 
-  def test_ExcessiveNaNSampleGap(self):
-      """Validation fails on too many consecutive NaN samples."""
-      data = [2, float('nan'), float('nan'), float('nan'), 3, 4, 5, 6]
-      # This should fail as there is a 3 NaN gap.
-      max_sample_gap = 2
-      args = {'max_sample_gap': max_sample_gap}
-      with self.assertRaisesRegexp(power_telemetry_utils.TelemetryUtilsError,
-                                   'Too many consecutive NaN samples:'):
-          interp_data = power_telemetry_utils.interpolate_missing_data(data,
-                                                                       **args)
+    def test_ExcessiveNaNSampleGap(self):
+        """Validation fails on too many consecutive NaN samples."""
+        data = [2, float('nan'), float('nan'), float('nan'), 3, 4, 5, 6]
+        # This should fail as there is a 3 NaN gap.
+        max_sample_gap = 2
+        args = {'max_sample_gap': max_sample_gap}
+        with self.assertRaisesRegexp(power_telemetry_utils.TelemetryUtilsError,
+                                     'Too many consecutive NaN samples:'):
+            interp_data = power_telemetry_utils.interpolate_missing_data(
+                    data, **args)
 
-  def test_ExcessiveNaNSampleGapAtBeginning(self):
-      """Validation fails on too many consecutive NaN samples at the start."""
-      data = [float('nan'), float('nan'), float('nan'), 2]
-      # This should fail as there is a 3 NaN gap.
-      max_sample_gap = 2
-      args = {'max_sample_gap': max_sample_gap}
-      with self.assertRaisesRegexp(power_telemetry_utils.TelemetryUtilsError,
-                                   'Too many consecutive NaN samples:'):
-          interp_data = power_telemetry_utils.interpolate_missing_data(data,
-                                                                       **args)
+    def test_ExcessiveNaNSampleGapAtBeginning(self):
+        """Validation fails on too many consecutive NaN samples at the start."""
+        data = [float('nan'), float('nan'), float('nan'), 2]
+        # This should fail as there is a 3 NaN gap.
+        max_sample_gap = 2
+        args = {'max_sample_gap': max_sample_gap}
+        with self.assertRaisesRegexp(power_telemetry_utils.TelemetryUtilsError,
+                                     'Too many consecutive NaN samples:'):
+            interp_data = power_telemetry_utils.interpolate_missing_data(
+                    data, **args)
 
-  def test_ExcessiveNaNSampleGapAtEnd(self):
-      """Validation fails on too many consecutive NaN samples at the end."""
-      data = [2, float('nan'), float('nan'), float('nan')]
-      # This should fail as there is a 3 NaN gap.
-      max_sample_gap = 2
-      args = {'max_sample_gap': max_sample_gap}
-      with self.assertRaisesRegexp(power_telemetry_utils.TelemetryUtilsError,
-                                   'Too many consecutive NaN samples:'):
-          interp_data = power_telemetry_utils.interpolate_missing_data(data,
-                                                                       **args)
+    def test_ExcessiveNaNSampleGapAtEnd(self):
+        """Validation fails on too many consecutive NaN samples at the end."""
+        data = [2, float('nan'), float('nan'), float('nan')]
+        # This should fail as there is a 3 NaN gap.
+        max_sample_gap = 2
+        args = {'max_sample_gap': max_sample_gap}
+        with self.assertRaisesRegexp(power_telemetry_utils.TelemetryUtilsError,
+                                     'Too many consecutive NaN samples:'):
+            interp_data = power_telemetry_utils.interpolate_missing_data(
+                    data, **args)
 
-  def test_AcceptableNaNTimeSampleGap(self):
-      """Validation succeeds if NaN gap is below threshold given a timeline."""
-      data = [2, float('nan'), float('nan'), 3, 4, 5, 6]
-      # Timeline is s for the data above.
-      timeline = [1, 4, 7, 10, 13, 16, 19]
-      # This should not fail as there is only 9s gap.
-      max_sample_time_gap = 10
-      args = {'max_sample_time_gap': max_sample_time_gap,
-              'timeline': timeline}
-      interp_data = power_telemetry_utils.interpolate_missing_data(data, **args)
+    def test_AcceptableNaNTimeSampleGap(self):
+        """Validation succeeds if NaN gap is below threshold given a timeline."""
+        data = [2, float('nan'), float('nan'), 3, 4, 5, 6]
+        # Timeline is s for the data above.
+        timeline = [1, 4, 7, 10, 13, 16, 19]
+        # This should not fail as there is only 9s gap.
+        max_sample_time_gap = 10
+        args = {
+                'max_sample_time_gap': max_sample_time_gap,
+                'timeline': timeline
+        }
+        interp_data = power_telemetry_utils.interpolate_missing_data(
+                data, **args)
 
-  def test_ExcessiveNaNTimeSampleGap(self):
-      """Validation fails if NaN gap is too long on a given timeline."""
-      data = [2, float('nan'), float('nan'), 3, 4, 5, 6]
-      # Timeline is s for the data above.
-      timeline = [1, 4, 7, 10, 13, 16, 19]
-      # This should fail as there 9s of gap.
-      max_sample_time_gap = 8
-      args = {'max_sample_time_gap': max_sample_time_gap,
-              'timeline': timeline}
-      with self.assertRaisesRegexp(power_telemetry_utils.TelemetryUtilsError,
-                                   'Excessively long sample gap'):
-          interp_data = power_telemetry_utils.interpolate_missing_data(data,
-                                                                       **args)
+    def test_ExcessiveNaNTimeSampleGap(self):
+        """Validation fails if NaN gap is too long on a given timeline."""
+        data = [2, float('nan'), float('nan'), 3, 4, 5, 6]
+        # Timeline is s for the data above.
+        timeline = [1, 4, 7, 10, 13, 16, 19]
+        # This should fail as there 9s of gap.
+        max_sample_time_gap = 8
+        args = {
+                'max_sample_time_gap': max_sample_time_gap,
+                'timeline': timeline
+        }
+        with self.assertRaisesRegexp(power_telemetry_utils.TelemetryUtilsError,
+                                     'Excessively long sample gap'):
+            interp_data = power_telemetry_utils.interpolate_missing_data(
+                    data, **args)
 
-  def test_NaNTimeSampleGapRequiresTimeline(self):
-      """|timeline| arg is required if checking for sample gap time."""
-      data = [2, float('nan'), float('nan'), 3, 4, 5, 6]
-      # Timeline is s for the data above.
-      timeline = [1, 4, 7, 10, 13, 16, 19]
-      # This should fail the timeline is not provided in the args but the check
-      # is requested.
-      max_sample_time_gap = 2
-      args = {'max_sample_time_gap': max_sample_time_gap}
-      with self.assertRaisesRegexp(power_telemetry_utils.TelemetryUtilsError,
-                                   'Supplying max_sample_time_gap'):
-          interp_data = power_telemetry_utils.interpolate_missing_data(data,
-                                                                       **args)
+    def test_NaNTimeSampleGapRequiresTimeline(self):
+        """|timeline| arg is required if checking for sample gap time."""
+        data = [2, float('nan'), float('nan'), 3, 4, 5, 6]
+        # Timeline is s for the data above.
+        timeline = [1, 4, 7, 10, 13, 16, 19]
+        # This should fail the timeline is not provided in the args but the check
+        # is requested.
+        max_sample_time_gap = 2
+        args = {'max_sample_time_gap': max_sample_time_gap}
+        with self.assertRaisesRegexp(power_telemetry_utils.TelemetryUtilsError,
+                                     'Supplying max_sample_time_gap'):
+            interp_data = power_telemetry_utils.interpolate_missing_data(
+                    data, **args)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/client/cros/power/power_test.py b/client/cros/power/power_test.py
index de89c69..7976044 100644
--- a/client/cros/power/power_test.py
+++ b/client/cros/power/power_test.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -6,12 +7,14 @@
 import time
 
 from autotest_lib.client.bin import test
+from autotest_lib.client.bin import utils
 from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib.cros import arc_common
 from autotest_lib.client.common_lib.cros import retry
 from autotest_lib.client.common_lib.cros.network import interface
-from autotest_lib.client.cros import ec
 from autotest_lib.client.cros import service_stopper
 from autotest_lib.client.cros.camera import camera_utils
+from autotest_lib.client.cros.power import force_discharge_utils
 from autotest_lib.client.cros.power import power_dashboard
 from autotest_lib.client.cros.power import power_status
 from autotest_lib.client.cros.power import power_telemetry_utils
@@ -26,15 +29,24 @@
     histogram_re = 'Histogram: %s recorded (\d+) samples, mean = (\d+\.\d+)'
     hist_percentile_re = '^(\d+).+\{(\d+)\.\d+\%\}'
 
-    def initialize(self, seconds_period=20., pdash_note='',
-                   force_discharge=False,
-                   check_network=False):
+    def initialize(self,
+                   seconds_period=20.,
+                   pdash_note='',
+                   force_discharge='false',
+                   check_network=False,
+                   run_arc=True):
         """Perform necessary initialization prior to power test run.
 
         @param seconds_period: float of probing interval in seconds.
         @param pdash_note: note of the current run to send to power dashboard.
-        @param force_discharge: force battery to discharge during the test.
+        @param force_discharge: string of whether to tell ec to discharge
+                battery even when the charger is plugged in. 'false' means no
+                forcing discharge; 'true' means forcing discharge and raising an
+                error when it fails; 'optional' means forcing discharge when
+                possible but not raising an error when it fails, which is more
+                friendly to devices without a battery.
         @param check_network: check that Ethernet interface is not running.
+        @param run_arc: bool, whether to run with ARC (if available)
 
         @var backlight: power_utils.Backlight object.
         @var keyvals: dictionary of result keyvals.
@@ -49,24 +61,17 @@
         @var _meas_logs: list of power_status.MeasurementLoggers
         """
         super(power_Test, self).initialize()
-        self.backlight = power_utils.Backlight()
-        self.backlight.set_default()
         self.keyvals = dict()
         self.status = power_status.get_status()
 
         self._checkpoint_logger = power_status.CheckpointLogger()
         self._seconds_period = seconds_period
 
-        self._force_discharge = force_discharge
-        if force_discharge:
-            if not self.status.battery:
-                raise error.TestNAError('DUT does not have battery. '
-                                        'Could not force discharge.')
-            if not ec.has_cros_ec():
-                raise error.TestNAError('DUT does not have CrOS EC. '
-                                        'Could not force discharge.')
-            if not power_utils.charge_control_by_ectool(False):
-                raise error.TestError('Could not run battery force discharge.')
+        self._force_discharge_success = force_discharge_utils.process(
+                force_discharge, self.status)
+        self.backlight = power_utils.Backlight(
+                force_battery=self._force_discharge_success)
+        self.backlight.set_default()
 
         ifaces = [iface for iface in interface.get_interfaces()
                 if (not iface.is_wifi_device() and
@@ -91,6 +96,12 @@
             logging.debug('%s: %s', type(log).__name__, ", ".join(log.domains))
 
         self._pdash_note = pdash_note
+        self._failure_messages = []
+
+        self._arc_mode = arc_common.ARC_MODE_DISABLED
+        if run_arc and utils.is_arc_available():
+            self._arc_mode = arc_common.ARC_MODE_ENABLED
+        self.keyvals['arc_mode'] = self._arc_mode
 
     def get_extra_browser_args_for_camera_test(self):
         """Return Chrome args for camera power test."""
@@ -101,6 +112,12 @@
                 '--force-tablet-mode=clamshell',
                 # Prefer using constant frame rate for camera streaming.
                 '--enable-features=PreferConstantFrameRate',
+                # Bypass HID detection for Chromebox / Chromebase.
+                '--disable-hid-detection-on-oobe',
+                # Disable test account info sync, eg. Wi-Fi credentials,
+                # so that each test run does not remember info from last test
+                # run.
+                '--disable-sync'
         ]
 
         # Use fake camera for DUT without camera, e.g. chromebox.
@@ -127,6 +144,7 @@
         self._start_time = time.time()
         if self.status.battery:
             self._start_energy = self.status.battery.energy
+        self._keyvallogger = power_dashboard.KeyvalLogger(self._start_time)
         power_telemetry_utils.start_measurement()
 
     def loop_sleep(self, loop, sleep_secs):
@@ -160,7 +178,6 @@
 
         keypress_histogram_end = histogram_verifier.get_histogram(
             cr, self.keypress_histogram)
-        logger = power_dashboard.KeyvalLogger(self._start_time, time.time())
         matches = re.search((self.histogram_re % self.keypress_histogram),
                             keypress_histogram_end)
 
@@ -175,9 +192,10 @@
             self.output_perf_value(description='keypress_latency_us_avg',
                                    value=mean_latency,
                                    higher_is_better=False)
-            logger.add_item('keypress_cnt', count, 'point', 'keypress')
-            logger.add_item('keypress_latency_us_avg', mean_latency, 'point',
-                            'keypress')
+            self._keyvallogger.add_item('keypress_cnt', count, 'point',
+                                        'keypress')
+            self._keyvallogger.add_item('keypress_latency_us_avg',
+                                        mean_latency, 'point', 'keypress')
 
         # Capture the first bucket >= 90th percentile
         for s in keypress_histogram_end.splitlines():
@@ -194,25 +212,26 @@
                     self.output_perf_value(
                         description='keypress_high_percentile', value=perc,
                         higher_is_better=False)
-                    logger.add_item('keypress_latency_us_high', lat, 'point',
-                                    'keypress')
-                    logger.add_item('keypress_high_percentile', perc, 'point',
-                                    'keypress')
+                    self._keyvallogger.add_item('keypress_latency_us_high',
+                                                lat, 'point', 'keypress')
+                    self._keyvallogger.add_item('keypress_high_percentile',
+                                                perc, 'point', 'keypress')
                     break
 
-        self._meas_logs.append(logger)
-
     def publish_keyvals(self):
         """Publish power result keyvals."""
         keyvals = self._stats.publish()
         keyvals['level_backlight_max'] = self.backlight.get_max_level()
         keyvals['level_backlight_current'] = self.backlight.get_level()
 
-        # record battery stats if not on AC
-        if not self._force_discharge and self.status.on_ac():
-            keyvals['b_on_ac'] = 1
-        else:
-            keyvals['b_on_ac'] = 0
+        # record battery stats if battery exists
+        keyvals['b_on_ac'] = int(not self._force_discharge_success
+                                 and self.status.on_ac())
+        keyvals['force_discharge'] = int(self._force_discharge_success)
+        for key in [
+                'b_on_ac', 'force_discharge', 'percent_usb_suspended_time'
+        ]:
+            self._keyvallogger.add_item(key, keyvals[key], 'point', 'perf')
 
         if self.status.battery:
             keyvals['ah_charge_full'] = self.status.battery.charge_full
@@ -230,18 +249,30 @@
             runtime_minutes = (time.time() - self._start_time) / 60.
             keyvals['wh_energy_used'] = energy_used
             keyvals['minutes_tested'] = runtime_minutes
+            self._keyvallogger.add_item('minutes_tested',
+                                        keyvals['minutes_tested'], 'point',
+                                        'perf')
 
             low_batt = power_utils.get_low_battery_shutdown_percent()
             keyvals['percent_sys_low_battery'] = low_batt
 
             if energy_used > 0 and runtime_minutes > 1:
                 keyvals['w_energy_rate'] = energy_used * 60. / runtime_minutes
+                self._keyvallogger.add_item('w_energy_rate',
+                                            keyvals['w_energy_rate'], 'point',
+                                            'perf')
                 energy_avail = self.status.battery.energy_full_design * \
                     ((100. - low_batt) / 100.)
                 keyvals['minutes_battery_life'] = energy_avail / energy_used * \
                     runtime_minutes
+                self._keyvallogger.add_item('minutes_battery_life',
+                                            keyvals['minutes_battery_life'],
+                                            'point', 'perf')
                 keyvals['hours_battery_life'] = \
                     keyvals['minutes_battery_life'] / 60.
+                self._keyvallogger.add_item('hours_battery_life',
+                                            keyvals['hours_battery_life'],
+                                            'point', 'perf')
 
             keyvals['v_voltage_min_design'] = \
                                 self.status.battery.voltage_min_design
@@ -262,23 +293,26 @@
         self.publish_keyvals()
 
         # publish power values
-        for key, values in self.keyvals.iteritems():
+        for key, values in self.keyvals.items():
             if key.endswith('pwr_avg'):
                 self.output_perf_value(description=key, value=values, units='W',
                         higher_is_better=False, graph='power')
 
         # publish temperature values
-        for key, values in self.keyvals.iteritems():
+        for key, values in self.keyvals.items():
             if key.endswith('temp_avg'):
                 self.output_perf_value(description=key, value=values, units='C',
                         higher_is_better=False, graph='temperature')
 
         # publish fps values
-        for key, values in self.keyvals.iteritems():
+        for key, values in self.keyvals.items():
             if key.endswith('fps_avg'):
                 self.output_perf_value(description=key, value=values,
                         units='fps', higher_is_better=True, graph='fps')
 
+        # include KeyvalLogger in dashboard
+        self._meas_logs.append(self._keyvallogger)
+
         # publish to power dashboard
         dashboard_factory = power_dashboard.get_dashboard_factory()
         for log in self._meas_logs:
@@ -301,12 +335,14 @@
         super(power_Test, self).postprocess_iteration()
         self.publish_dashboard()
         self._save_results()
+        power_dashboard.generate_parallax_report(self.outputdir)
+        if self._failure_messages:
+            raise error.TestFail('Test has failed with messages: %s' %
+                                 self._failure_messages)
 
     def cleanup(self):
         """Reverse setting change in initialization."""
-        if self._force_discharge:
-            if not power_utils.charge_control_by_ectool(True):
-                logging.warn('Can not restore from force discharge.')
+        force_discharge_utils.restore(self._force_discharge_success)
         if self.backlight:
             self.backlight.restore()
         self._services.restore_services()
diff --git a/client/cros/power/power_utils.py b/client/cros/power/power_utils.py
index c8acfac..268563f 100644
--- a/client/cros/power/power_utils.py
+++ b/client/cros/power/power_utils.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -6,11 +7,13 @@
 import os
 import re
 import shutil
+import sys
 import time
 from autotest_lib.client.bin import utils
 from autotest_lib.client.bin.input.input_device import InputDevice
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.cros import upstart
+from six.moves import range
 
 
 # Possible display power settings. Copied from chromeos::DisplayPowerState
@@ -146,11 +149,9 @@
              'power:AC_only' when the device has no battery at all.
     """
     try:
-        psu = utils.system_output('mosys psu type')
+        psu = utils.system_output('cros_config /hardware-properties psu-type')
     except Exception:
-        # The psu command for mosys is not included for all platforms. The
-        # assumption is that the device will have a battery if the command
-        # is not found.
+        # Assume battery if unspecified in cros_config.
         return 'power:battery'
 
     psu_str = psu.strip()
@@ -176,24 +177,7 @@
     Returns:
         Boolean, False if known not to have battery, True otherwise.
     """
-    rv = True
-    power_supply = get_power_supply()
-    if power_supply == 'power:battery':
-        # TODO(tbroch) if/when 'power:battery' param is reliable
-        # remove board type logic.  Also remove verbose mosys call.
-        _NO_BATTERY_BOARD_TYPE = ['CHROMEBOX', 'CHROMEBIT', 'CHROMEBASE']
-        board_type = utils.get_board_type()
-        if board_type in _NO_BATTERY_BOARD_TYPE:
-            logging.warn('Do NOT believe type %s has battery. '
-                         'See debug for mosys details', board_type)
-            psu = utils.system_output('mosys -vvvv psu type',
-                                      ignore_status=True)
-            logging.debug(psu)
-            rv = False
-    elif power_supply == 'power:AC_only':
-        rv = False
-
-    return rv
+    return get_power_supply() == 'power:battery'
 
 
 def get_low_battery_shutdown_percent():
@@ -267,7 +251,7 @@
     Raises:
       error.CmdError: if ectool returns non-zero exit status.
     """
-    for i in xrange(ECTOOL_CHARGECONTROL_RETRY_TIMES):
+    for i in range(ECTOOL_CHARGECONTROL_RETRY_TIMES):
         if _charge_control_by_ectool(is_charge, ignore_status):
             return True
 
@@ -295,7 +279,17 @@
                          .*_[cg]pu(freq(_\d+)+)?_\d{3,}_.*|
                          .*cpu(idle|pkg)[ABD-Za-z0-9_\-]+C[^0].*
                          """, re.X)
-    return {k: v for k, v in keyvals.iteritems() if not matcher.match(k)}
+    return {k: v for k, v in keyvals.items() if not matcher.match(k)}
+
+
+# TODO(b/220192766): Remove when Python 2 completely phase out.
+def encoding_kwargs():
+    """Use encoding kwarg if it is running in Python 3+.
+    """
+    if sys.version_info.major > 2:
+        return {'encoding': 'utf-8'}
+    else:
+        return {}
 
 
 class BacklightException(Exception):
@@ -308,9 +302,6 @@
     Public methods:
        set_level: Set backlight level to the given brightness.
        set_percent: Set backlight level to the given brightness percent.
-       set_resume_level: Set backlight level on resume to the given brightness.
-       set_resume_percent: Set backlight level on resume to the given brightness
-                           percent.
        set_default: Set backlight to CrOS default.
 
        get_level: Get backlight level currently.
@@ -319,7 +310,10 @@
        restore: Restore backlight to initial level when instance created.
 
     Public attributes:
-        default_brightness_percent: float of default brightness
+        default_brightness_percent: float of default brightness.
+        force_battery: bool; if True, force backlight_tool to assume that the
+                       device is on battery and have AC disconnected; if False,
+                       use the device's real power source.
 
     Private methods:
         _try_bl_cmd: run a backlight command.
@@ -333,7 +327,7 @@
     # See http://www.chromium.org/chromium-os/testing/power-testing for more
     # details.
 
-    def __init__(self, default_brightness_percent=0):
+    def __init__(self, default_brightness_percent=0, force_battery=False):
         """Constructor.
 
         attributes:
@@ -352,14 +346,16 @@
             return
 
         if not self.default_brightness_percent:
-            cmd = \
-                "backlight_tool --get_initial_brightness --lux=150 2>/dev/null"
+            force_battery_arg = "--force_battery " if force_battery else ""
+            cmd = ("backlight_tool --get_initial_brightness --lux=150 " +
+                   force_battery_arg + "2>/dev/null")
             try:
                 level = float(utils.system_output(cmd).rstrip())
                 self.default_brightness_percent = \
                     (level / self.get_max_level()) * 100
-                logging.info("Default backlight brightness percent = %f",
-                             self.default_brightness_percent)
+                logging.info("Default backlight brightness percent = %f%s",
+                             self.default_brightness_percent,
+                             " with force battery" if force_battery else "")
             except error.CmdError:
                 self.default_brightness_percent = 40.0
                 logging.warning("Unable to determine default backlight "
@@ -403,22 +399,6 @@
         """
         self._try_bl_cmd('--set_brightness_percent=%f' % (percent))
 
-    def set_resume_level(self, level):
-        """Set backlight level on resume to the given brightness.
-
-        Args:
-          level: integer of brightness to set
-        """
-        self._try_bl_cmd('--set_resume_brightness=%d' % (level))
-
-    def set_resume_percent(self, percent):
-        """Set backlight level on resume to the given brightness percent.
-
-        Args:
-          percent: float between 0 and 100
-        """
-        self._try_bl_cmd('--set_resume_brightness_percent=%f' % (percent))
-
     def set_default(self):
         """Set backlight to CrOS default.
         """
@@ -500,8 +480,8 @@
             raise KbdBacklightException('Keyboard backlight support' +
                                         'is not enabled')
         try:
-            cmd = \
-                "backlight_tool --keyboard --get_initial_brightness 2>/dev/null"
+            cmd = ("backlight_tool --keyboard --get_initial_brightness "
+                   "--lux=0 2>/dev/null")
             self._default_backlight_level = int(
                 utils.system_output(cmd).rstrip())
             logging.info("Default keyboard backlight brightness level = %d",
@@ -534,8 +514,7 @@
         @param percent: float value in the range [0.0, 100.0]
                         to set keyboard backlight to.
         """
-        cmd = ('backlight_tool --keyboard --set_brightness_percent=' +
-               str(percent))
+        cmd = 'backlight_tool --keyboard --set_brightness_percent=%f' % percent
         utils.system(cmd)
 
     def set_level(self, level):
@@ -544,7 +523,7 @@
         Args:
         @param level: level to set keyboard backlight to.
         """
-        cmd = 'backlight_tool --keyboard --set_brightness=' + str(level)
+        cmd = 'backlight_tool --keyboard --set_brightness=%d' % level
         utils.system(cmd)
 
 
@@ -655,7 +634,7 @@
 
     def __init__(self, prefs):
         shutil.copytree(self._PREFDIR, self._TEMPDIR)
-        for name, value in prefs.iteritems():
+        for name, value in prefs.items():
             utils.write_one_line('%s/%s' % (self._TEMPDIR, name), value)
         utils.system('mount --bind %s %s' % (self._TEMPDIR, self._PREFDIR))
         upstart.restart_job('powerd')
@@ -742,7 +721,7 @@
 
     def _verify_registers(self, reg_name, read_fn, match_list):
         errors = 0
-        for k, v in match_list.iteritems():
+        for k, v in match_list.items():
             r = read_fn(k)
             for item in v:
                 good = self._shift_mask_match(reg_name, r, item)
@@ -762,7 +741,7 @@
         @param match_list: match list
         """
         errors = 0
-        for cpu_id in xrange(0, max(utils.count_cpus(), 1)):
+        for cpu_id in range(0, max(utils.count_cpus(), 1)):
             self._cpu_id = cpu_id
             errors += self._verify_registers('msr', self._read_msr, match_list)
         return errors
@@ -862,8 +841,8 @@
         self._alist_file = \
             '/etc/laptop-mode/conf.d/board-specific/usb-autosuspend.conf'
         # TODO b:169251326 terms below are set outside of this codebase
-        # and should be updated when possible. ("WHITELIST" -> "ALLOWLIST")
-        self._alist_vname = '$AUTOSUSPEND_USBID_WHITELIST'
+        # and should be updated when possible. ("WHITELIST" -> "ALLOWLIST") # nocheck
+        self._alist_vname = '$AUTOSUSPEND_USBID_WHITELIST' # nocheck
         self._allowlisted = None
         self.devices = []
 
@@ -969,9 +948,12 @@
         return int(count) if count else None
 
     def _calc_residency(self):
-        """Calculate the PSR residency."""
+        """Calculate the PSR residency.
+
+        @returns: PSR residency in percent or -1 if not able to calculate.
+        """
         if not self.supported:
-            return 0
+            return -1
 
         tdelta = time.time() - self._init_time
         cdelta = self._get_counter() - self._init_counter
diff --git a/client/cros/power/power_videotest.py b/client/cros/power/power_videotest.py
index c1813842..85dd887 100644
--- a/client/cros/power/power_videotest.py
+++ b/client/cros/power/power_videotest.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -31,11 +32,11 @@
 
 
     def initialize(self, seconds_period=3, pdash_note='',
-                   force_discharge=False):
+                   force_discharge=False, run_arc=True):
         """Create and mount ram disk to download video."""
         super(power_VideoTest, self).initialize(
                 seconds_period=seconds_period, pdash_note=pdash_note,
-                force_discharge=force_discharge)
+                force_discharge=force_discharge, run_arc=run_arc)
         utils.run('mkdir -p %s' % self._RAMDISK)
         # Don't throw an exception on errors.
         result = utils.run('mount -t ramfs -o context=u:object_r:tmpfs:s0 '
@@ -104,15 +105,24 @@
         @param secs_per_video: time in seconds to play video and measure power.
         @param use_hw_decode: if False, disable hw video decoding.
         """
-        extra_browser_args = []
+        # --disable-sync disables test account info sync, eg. Wi-Fi credentials,
+        # so that each test run does not remember info from last test run.
+        extra_browser_args = ['--disable-sync']
+        # b/228256145 to avoid powerd restart
+        extra_browser_args.append('--disable-features=FirmwareUpdaterApp')
         if not use_hw_decode:
             extra_browser_args.append(self._DISABLE_HW_VIDEO_DECODE_ARGS)
 
         with chrome.Chrome(extra_browser_args=extra_browser_args,
-                           init_network_controller=True) as self.cr:
-            tab = self.cr.browser.tabs.New()
+                           init_network_controller=True,
+                           arc_mode=self._arc_mode) as self.cr:
+            # Chrome always starts with an empty tab, so we just use that one.
+            tab = self.cr.browser.tabs[0]
             tab.Activate()
 
+            # Stop services again as Chrome might have restarted them.
+            self._services.stop_services()
+
             # Just measure power in full-screen.
             fullscreen = tab.EvaluateJavaScript('document.webkitIsFullScreen')
             if not fullscreen:
diff --git a/client/cros/power/sys_power.py b/client/cros/power/sys_power.py
index f749cfc..036e1b4 100644
--- a/client/cros/power/sys_power.py
+++ b/client/cros/power/sys_power.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -61,12 +62,22 @@
 class FirmwareError(SuspendFailure):
     """String 'ERROR' found in firmware log after resume."""
     ALLOWLIST = [
-        # crosbug.com/36762: no one knows, but it has always been there
-        ('^stumpy', r'PNP: 002e\.4 70 irq size: 0x0000000001 not assigned'),
-        # crbug.com/221538: no one knows what ME errors mean anyway
-        ('^parrot', r'ME failed to respond'),
-        # b/64684441: eve SKU without eMMC
-        ('^eve', r'Card did not respond to voltage select!'),
+            # crosbug.com/36762: no one knows, but it has always been there
+            ('^stumpy', r'PNP: 002e\.4 70 irq size: 0x0000000001 not assigned'
+             ),
+            # crbug.com/221538: no one knows what ME errors mean anyway
+            ('^parrot', r'ME failed to respond'),
+            # b/64684441: eve SKU without eMMC
+            ('^eve', r'Card did not respond to voltage select!'),
+            # b/187561710#comment6: waive mcache error from volteer
+            ('^volteer', r'mcache overflow, should increase CBFS_MCACHE size!'
+             ),
+            # b/221113302: Guybrush do not save firmware log to cbmem during
+            #              resume, so all errors seen actually occurred during boot.
+            #              This workaround can be removed when b/221231786 is fixed.
+            ('^guybrush', r'.*'),
+            # b/195336611: waive the eMMC error for the non-emmc sku
+            ('^brya', r'No known Realtek reader found'),
     ]
 
 
diff --git a/client/cros/radvd_server.py b/client/cros/radvd_server.py
index a0704d8..e6dc509 100644
--- a/client/cros/radvd_server.py
+++ b/client/cros/radvd_server.py
@@ -57,12 +57,13 @@
     manages startup and cleanup of the process.
     """
 
-    def __init__(self, interface = None):
+    def __init__(self, interface=None, namespace=None):
         if not os.path.exists(RADVD_EXECUTABLE):
             raise error.TestNAError('Could not find executable %s; '
                                     'this is likely an old version of '
                                     'ChromiumOS' %
                                     RADVD_EXECUTABLE)
+        self._namespace = namespace
         self._options = {
             OPTION_INTERFACE: interface,
             OPTION_ADV_ON_LINK: RADVD_DEFAULT_ADV_ON_LINK,
@@ -131,7 +132,10 @@
         """
         self._cleanup()
         self._write_config_file()
-        utils.system('%s -p %s -C %s' %
+        cmd = '%s -p %s -C %s'
+        if self._namespace:
+            cmd = ('ip netns exec %s ' % self._namespace) + cmd
+        utils.system(cmd %
                      (RADVD_EXECUTABLE, RADVD_PID_FILE, RADVD_CONFIG_FILE))
 
     def stop_server(self):
diff --git a/client/cros/scripts/device b/client/cros/scripts/device
deleted file mode 100644
index 7b4231a..0000000
--- a/client/cros/scripts/device
+++ /dev/null
@@ -1,99 +0,0 @@
-#!/usr/bin/python2
-
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import pprint
-import sys
-
-import common
-from autotest_lib.client.cros.networking import shill_proxy
-
-def usage():
-    """ Prints a script usage message. """
-    cmd = sys.argv[0]
-    print 'Usage: %s <command> [more parameters]' % cmd
-    print 'Example uses:'
-    print cmd, 'list - List devices and their properties.'
-    print cmd, 'get-property <devname> [propname] - List device property.'
-    print cmd, 'set-property <devname> <propname> <value>'
-    print '     Set property on devname to value'
-    return False
-
-
-def set_device_property(device, property_key, value):
-    """Sets a property on a device
-
-    @param device Interface representing a device
-    @param property_key string name of property
-    @param value string value of property to set
-
-    """
-    shill_proxy.ShillProxy.set_dbus_property(device, property_key, value)
-    return True
-
-
-def print_device_properties(device, property_key):
-    """Prints one or all properties on a device
-
-    @param device Interface representing a device
-    @param property_key string name of property or None
-
-    """
-    shill = shill_proxy.ShillProxy()
-    if property_key is None:
-        pprint.pprint(
-                shill.dbus2primitive(device.GetProperties(utf8_strings=True)),
-                indent=2)
-    else:
-        pprint.pprint({property_key:
-                shill_proxy.ShillProxy.get_dbus_property(device, property_key)},
-                indent=2)
-    return True
-
-
-def list_devices():
-    """ Display detailed device information. """
-    shill = shill_proxy.ShillProxy()
-    for device in shill.get_devices():
-        print 'Device: %s' % device.object_path
-        print_device_properties(device, None)
-        print
-    return True
-
-
-def main():
-    """ Main entry point for the device script. """
-    if len(sys.argv) < 2:
-        return usage()
-
-    command = sys.argv[1]
-
-    if command == 'list':
-      return list_devices()
-
-    if len(sys.argv) > 2:
-        shill = shill_proxy.ShillProxy()
-        device = shill.find_object('Device', {'Name': sys.argv[2]})
-        if device is None:
-            print "No device named %s found" % sys.argv[2]
-            return usage()
-
-        if command == 'get-property':
-            return print_device_properties(
-                    device,
-                    None if len(sys.argv) < 4 else sys.argv[3])
-
-        if command == 'set-property' and len(sys.argv) == 5:
-            return set_device_property(
-                    device,
-                    sys.argv[3],
-                    sys.argv[4])
-
-    return usage()
-
-
-if __name__ == '__main__':
-    if not main():
-        sys.exit(1)
diff --git a/client/cros/scripts/profile b/client/cros/scripts/profile
deleted file mode 100755
index 124dd18..0000000
--- a/client/cros/scripts/profile
+++ /dev/null
@@ -1,171 +0,0 @@
-#!/usr/bin/python2
-
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import pprint
-import sys
-
-import common
-from autotest_lib.client.cros.networking import shill_proxy
-
-def usage():
-    """ Prints a script usage message. """
-    cmd = sys.argv[0]
-    print 'Usage: %s <command> [more parameters]' % cmd
-    print 'Example uses:'
-    print cmd, 'create <name> - Create a profile called |name|.'
-    print cmd, 'push <name> - Push a previously created profile called |name|.'
-    print cmd, 'pop <name> - Pop a profile called |name|.'
-    print cmd, 'pop - Pop the top-most profile.'
-    print cmd, 'remove <name> - Remove a profile called |name| from disk.'
-    print cmd, 'clean - Pop and remove profiles above the default profile.'
-    print cmd, 'list - List profiles and their properties.'
-    print cmd, 'list-entries - List profile entries.'
-    print cmd, 'delete-entry <entry> [name] - Delete an entry from a ' \
-        'profile called |name| or all profiles if no name is given.'
-    return False
-
-
-def print_profile_path(profile_path, active_path):
-    """ Print profile_path and indicate if it is the active profile. """
-    if profile_path == active_path:
-        print 'Profile: %s  <== active' % profile_path
-    else:
-        print 'Profile: %s' % profile_path
-
-
-def clean_profiles():
-    """ Pop and remove all profiles until 'default' is found. """
-    shill = shill_proxy.ShillProxy()
-    while True:
-        active_profile = shill.get_active_profile()
-        properties = active_profile.GetProperties(utf8_strings=True)
-        active_name = shill.dbus2primitive(
-                properties[shill.PROFILE_PROPERTY_NAME])
-        if active_name == 'default':
-            return True
-        else:
-            print 'Removing profile: %s' % active_name
-            shill.manager.PopProfile(active_name)
-            shill.manager.RemoveProfile(active_name)
-
-
-def delete_entry():
-    """
-    Remove an entry from the specified profile, or all profiles if no profile
-    is given.
-
-    """
-    if len(sys.argv) <= 2:
-        return usage()
-    identifier = sys.argv[2]
-    profile_path = None
-    if len(sys.argv) > 3:
-      profile_path = sys.argv[3]
-    shill = shill_proxy.ShillProxy()
-    properties = shill.dbus2primitive(
-        shill.manager.GetProperties(utf8_strings=True))
-    active_profile = shill.get_active_profile()
-    for path in properties[shill.MANAGER_PROPERTY_PROFILES]:
-        print_profile_path(path, active_profile.object_path)
-        if profile_path and path != profile_path:
-            continue
-
-        profile = shill.get_dbus_object(shill.DBUS_TYPE_PROFILE, path)
-        try:
-            profile.DeleteEntry(identifier)
-            print " -> delete succeeded"
-        except:
-            print " -> delete failed"
-
-
-def list_entries():
-    """ Display detailed profile entry information. """
-    shill = shill_proxy.ShillProxy()
-    active_profile = shill.get_active_profile()
-    for profile in shill.get_profiles():
-        print_profile_path(profile.object_path, active_profile.object_path)
-        properties = shill.dbus2primitive(
-                profile.GetProperties(utf8_strings=True))
-        if not shill.PROFILE_PROPERTY_ENTRIES in properties:
-            continue
-        for ident in properties[shill.PROFILE_PROPERTY_ENTRIES]:
-            print 'Entry: %s' % ident
-            pprint.pprint(shill.dbus2primitive(profile.GetEntry(ident)),
-                          indent=2)
-        print
-    return True
-
-
-def list_profiles():
-    """ List shill profiles and their properties. """
-    shill = shill_proxy.ShillProxy()
-    active_profile = shill.get_active_profile()
-    for profile in shill.get_profiles():
-        print_profile_path(profile.object_path, active_profile.object_path)
-        properties = shill.dbus2primitive(
-                profile.GetProperties(utf8_strings=True))
-        pprint.pprint(properties, indent=2)
-        print
-    return True
-
-
-def main():
-    """ Main entry point for the profile script. """
-    if len(sys.argv) < 2:
-        return usage()
-
-    command = sys.argv[1]
-    shill = shill_proxy.ShillProxy()
-
-    if command == 'clean':
-        return clean_profiles()
-
-    if command == 'delete-entry':
-        return delete_entry()
-
-    if command == 'list':
-      return list_profiles()
-
-    if command == 'list-entries':
-        return list_entries()
-
-    if command == 'pop' and len(sys.argv) == 2:
-        shill.manager.PopAnyProfile()
-        print 'Popped profile.'
-        return True
-
-    # All the remaining operations require a profile name.
-    if len(sys.argv) < 3:
-        return usage()
-
-    name = sys.argv[2]
-
-    if command == 'pop':
-        shill.manager.PopProfile(name)
-        print 'Popped profile %s.' % name
-        return True
-
-    if command == 'create':
-        path = shill.manager.CreateProfile(name)
-        print 'New profile created at %s.' % path
-        return True
-
-    if command == 'push':
-        path = shill.manager.PushProfile(name)
-        print 'Pushed profile %s.' % path
-        return True
-
-    if command == 'remove':
-        shill.manager.RemoveProfile(name)
-        print 'Removed profile %s.' % name
-        return True
-
-    return usage()
-
-
-if __name__ == '__main__':
-    if not main():
-        sys.exit(1)
diff --git a/client/cros/scripts/wifi b/client/cros/scripts/wifi
index 4ba3593..111f872 100755
--- a/client/cros/scripts/wifi
+++ b/client/cros/scripts/wifi
@@ -1,52 +1,56 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+import os
 import sys
 
+# Prevent Autotest smarts from trying to switch us back to python2.
+os.environ['PY_VERSION'] = '3'
+
 import common
 from autotest_lib.client.cros.networking import wifi_proxy
 
 SERVICE_PROP_PARSERS = {
-    'EAP.AnonymousIdentity': unicode,
-    'EAP.CACertID': unicode,
-    'EAP.CACertNSS': unicode,
-    'EAP.CACertPEM': unicode,
-    'EAP.CertID': unicode,
-    'EAP.ClientCert': unicode,
-    'EAP.EAP': unicode,
-    'EAP.Identity': unicode,
-    'EAP.InnerEAP': unicode,
-    'EAP.KeyID': unicode,
-    'EAP.KeyMgmt': unicode,
-    'EAP.Password': unicode,
-    'EAP.PIN': unicode,
-    'EAP.SubjectMatch': unicode,
+    'EAP.AnonymousIdentity': str,
+    'EAP.CACertID': str,
+    'EAP.CACertNSS': str,
+    'EAP.CACertPEM': str,
+    'EAP.CertID': str,
+    'EAP.ClientCert': str,
+    'EAP.EAP': str,
+    'EAP.Identity': str,
+    'EAP.InnerEAP': str,
+    'EAP.KeyID': str,
+    'EAP.KeyMgmt': str,
+    'EAP.Password': str,
+    'EAP.PIN': str,
+    'EAP.SubjectMatch': str,
     'EAP.UseSystemCAs': bool,
-    wifi_proxy.WifiProxy.SERVICE_PROPERTY_SECURITY_CLASS: unicode,
+    wifi_proxy.WifiProxy.SERVICE_PROPERTY_SECURITY_CLASS: str,
     }
 
 
 def usage():
     """ Prints a usage message and returns False. """
     cmd = sys.argv[0]
-    print 'Usage:'
-    print cmd, 'connect <ssid> [passphrase] [security]'
-    print '    |security| defaults to "psk" when |passphrase|',
-    print 'is given without |security|'
-    print
-    print cmd, 'disconnect <ssid> [timeout seconds]'
-    print
-    print cmd, 'connect_with_props <ssid> <timeout seconds>'
-    print '    <Security=[none|psk|802_1x]> [Property=Value ...]'
-    print '    for Property in:'
-    print '\n'.join(['\t\t' + x for x in sorted(SERVICE_PROP_PARSERS.keys())])
-    print
-    print cmd, 'configure <ssid> [passphrase] [security]'
-    print '    |security| defaults to "psk" when |passphrase|',
-    print 'is given without |security|'
+    print('Usage:')
+    print(cmd, 'connect <ssid> [passphrase] [security]')
+    print('    |security| defaults to "psk" when |passphrase|', end=' ')
+    print('is given without |security|')
+    print()
+    print(cmd, 'disconnect <ssid> [timeout seconds]')
+    print()
+    print(cmd, 'connect_with_props <ssid> <timeout seconds>')
+    print('    <SecurityClass=[none|psk|802_1x]> [Property=Value ...]')
+    print('    for Property in:')
+    print('\n'.join(['\t\t' + x for x in sorted(SERVICE_PROP_PARSERS.keys())]))
+    print()
+    print(cmd, 'configure <ssid> [passphrase] [security]')
+    print('    |security| defaults to "psk" when |passphrase|', end=' ')
+    print('is given without |security|')
     return False
 
 
@@ -58,9 +62,9 @@
     successful = wifi.configure_wifi_service(ssid, security,
                                              security_parameters)
     if successful:
-        print 'Operation succeeded.'
+        print('Operation succeeded.')
     else:
-        print 'Operation failed.'
+        print('Operation failed.')
     return successful
 
 
@@ -88,12 +92,12 @@
             configuration_timeout_seconds=timeout)
     (successful, discovery, association, configuration, reason) = result
     if successful:
-        print 'Operation succeeded.'
+        print('Operation succeeded.')
     else:
-        print 'Operation failed. (%s)' % reason
-    print 'Discovery time: %f.' % discovery
-    print 'Association time: %f.' % association
-    print 'Configuration time: %f.' % configuration
+        print('Operation failed. (%s)' % reason)
+    print('Discovery time: %f.' % discovery)
+    print('Association time: %f.' % association)
+    print('Configuration time: %f.' % configuration)
     return successful
 
 
@@ -113,31 +117,31 @@
     result = wifi.disconnect_from_wifi_network(ssid, timeout)
     (successful, duration, reason) = result
     if successful:
-        print 'Operation succeeded.'
+        print('Operation succeeded.')
     else:
-        print 'Operation failed: %s.' % reason
-    print 'Disconnect time: %f.' % duration
+        print('Operation failed: %s.' % reason)
+    print('Disconnect time: %f.' % duration)
     return successful
 
 
-def parse_security_from_credentials(credentials):
-    """Parses SERVICE_PROPERTY_SECURITY from credentials.
+def parse_security_class_from_credentials(credentials):
+    """Parses SERVICE_PROPERTY_SECURITY_CLASS from credentials.
 
     @param credentials dict of service properties that includes credentials
             like the passphrase for psk security.
-    @return SERVICE_PROPERTY_SECURITY value from credentials,
+    @return SERVICE_PROPERTY_SECURITY_CLASS value from credentials,
             or exit if no such key/value in credentials.
 
     """
     security = credentials.pop(
-            wifi_proxy.WifiProxy.SERVICE_PROPERTY_SECURITY, None)
+            wifi_proxy.WifiProxy.SERVICE_PROPERTY_SECURITY_CLASS, None)
     if security is None:
-        print "Error: security type not provided"
+        print("Error: security type not provided")
         usage()
         sys.exit(1)
 
     if security not in ['none', 'wep', 'psk', '802_1x']:
-        print "Error: invalid security type %s" % security
+        print("Error: invalid security type %s" % security)
         usage()
         sys.exit(1)
 
@@ -154,14 +158,14 @@
     property_name, raw_value = property_string.split('=', 1)
 
     if not property_name in SERVICE_PROP_PARSERS:
-        print '%s is not a recognized service property' % property_name
+        print('%s is not a recognized service property' % property_name)
         usage()
         sys.exit(1)
 
     try:
         return property_name, SERVICE_PROP_PARSERS[property_name](raw_value)
     except:
-        print 'Failed parsing value from %s' % property_string
+        print('Failed parsing value from %s' % property_string)
         usage()
         sys.exit(1)
 
@@ -205,9 +209,9 @@
         timeout = float(args[2])
         credentials = {}
         if len(args) > 3:
-            for i in xrange(3, len(args)):
+            for i in range(3, len(args)):
                 credentials.update((parse_service_property(args[i]),))
-        security = parse_security_from_credentials(credentials)
+        security = parse_security_class_from_credentials(credentials)
         return connect(ssid, security, credentials, save_credentials, timeout)
 
     if command == 'disconnect':
diff --git a/client/cros/service_stopper.py b/client/cros/service_stopper.py
index 2464f4c..b793341 100644
--- a/client/cros/service_stopper.py
+++ b/client/cros/service_stopper.py
@@ -44,7 +44,7 @@
       _services_stopped: list of services that were successfully stopped
     """
 
-    POWER_DRAW_SERVICES = ['powerd', 'update-engine', 'vnc']
+    POWER_DRAW_SERVICES = ['fwupd', 'powerd', 'update-engine', 'vnc']
 
     # List of thermal throttling services that should be disabled.
     # - temp_metrics for link.
diff --git a/client/cros/storage_tests/fio_test.py b/client/cros/storage_tests/fio_test.py
index e613124..cef4eab 100644
--- a/client/cros/storage_tests/fio_test.py
+++ b/client/cros/storage_tests/fio_test.py
@@ -1,10 +1,16 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import fcntl, logging, os, re, stat, struct, time
 from autotest_lib.client.bin import fio_util, test, utils
 from autotest_lib.client.common_lib import error
+import six
 
 
 class FioTest(test.test):
@@ -20,6 +26,7 @@
     DEFAULT_FILE_SIZE = 1024 * 1024 * 1024
     VERIFY_OPTION = 'v'
     CONTINUE_ERRORS = 'verify'
+    DEVICE_REGEX = r'.*(sd[a-z]|mmcblk[0-9]+|nvme[0-9]+n[0-9]+|loop[0-9]|dm\-[0-9]+)p?[0-9]*'
     REMOVABLE = False
 
     # Initialize fail counter used to determine test pass/fail.
@@ -49,10 +56,11 @@
         # Then read the vendor and model name in its grand-parent directory.
 
         # Obtain the device name by stripping the partition number.
-        # For example, sda3 => sda; mmcblk1p3 => mmcblk1, nvme0n1p3 => nvme0n1.
-        device = re.match(r'.*(sd[a-z]|mmcblk[0-9]+|nvme[0-9]+n[0-9]+)p?[0-9]*',
-                          self.__filename).group(1)
-        findsys = utils.run('find /sys/devices -name %s | grep -v virtual'
+        # For example, sda3 => sda; mmcblk1p3 => mmcblk1, nvme0n1p3 => nvme0n1,
+        # loop1p1 => loop1; dm-1 => dm-1 (no partitions/multipath device
+        # support for device mapper).
+        device = re.match(self.DEVICE_REGEX, self.__filename).group(1)
+        findsys = utils.run('find /sys/devices -name %s'
                             % device)
         device_path = findsys.stdout.rstrip()
 
@@ -183,10 +191,12 @@
            stat.S_ISBLK(os.stat(self.__filename).st_mode) and \
            self.__filesize != 0 and blkdiscard:
             try:
+                logging.info("Doing a blkdiscard using ioctl %s",
+                             self.IOCTL_TRIM_CMD)
                 fd = os.open(self.__filename, os.O_RDWR)
                 fcntl.ioctl(fd, self.IOCTL_TRIM_CMD,
                             struct.pack('QQ', 0, self.__filesize))
-            except IOError, err:
+            except IOError as err:
                 logging.info("blkdiscard failed %s", err)
                 pass
             finally:
@@ -220,13 +230,13 @@
                                 'device': self.__description})
         logging.info('Device Description: %s', self.__description)
         self.write_perf_keyval(results)
-        for k, v in results.iteritems():
+        for k, v in six.iteritems(results):
             if k.endswith('_error'):
                 self._error_code = int(v)
                 if self._error_code != 0 and self._fail_count == 0:
                     self._fail_count = 1
             elif k.endswith('_total_err'):
-                self._fail_count = int(v)
+                self._fail_count += int(v)
         if self._fail_count > 0:
             if self.REMOVABLE and not self.__verify_only:
                 raise error.TestWarn('%s failed verifications, '
diff --git a/client/cros/storage_tests/seq_rw_verify b/client/cros/storage_tests/seq_rw_verify
new file mode 100644
index 0000000..fa1e10a
--- /dev/null
+++ b/client/cros/storage_tests/seq_rw_verify
@@ -0,0 +1,27 @@
+; Copyright 2021 The Chromium Authors. All rights reserved.
+; Use of this source code is governed by a BSD-style license that can be
+; found in the LICENSE file.
+;
+; Sequential write performance at the front of the disk
+;
+
+[seq_rw_verify]
+filename=${FILENAME}
+size=${FILESIZE}
+verify_only=${VERIFY_ONLY}
+
+time_based
+runtime=5m
+
+ioengine=libaio
+iodepth=1
+direct=1
+
+readwrite=rw
+rwmixread=10
+bs=512k
+
+do_verify=1
+verify=md5
+verify_dump=1
+continue_on_error=${CONTINUE_ERRORS}
\ No newline at end of file
diff --git a/client/cros/tpm.py b/client/cros/tpm.py
new file mode 100644
index 0000000..1793247
--- /dev/null
+++ b/client/cros/tpm.py
@@ -0,0 +1,82 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utilities to interact with the TPM on a CrOS device."""
+
+import logging
+import re
+
+import common
+
+from autotest_lib.client.bin import utils
+from autotest_lib.client.common_lib import error
+
+CRYPTOHOME_CMD = '/usr/sbin/cryptohome'
+UNAVAILABLE_ACTION = 'Unknown action or no action given.'
+TPM_MANAGER_CMD = '/usr/bin/tpm_manager_client'
+
+
+class ChromiumOSError(error.TestError):
+    """Generic error for ChromiumOS-specific exceptions."""
+
+    pass
+
+
+def get_tpm_status():
+    """Get the TPM status.
+
+    Returns:
+        A TPM status dictionary, for example:
+        { 'Enabled': True,
+          'Owned': True,
+          'Ready': True
+        }
+    """
+    out = run_cmd(TPM_MANAGER_CMD + ' status')
+    status = {}
+    for field in ['enabled', 'owned']:
+        match = re.search('%s: (true|false)' % field, out)
+        if not match:
+            raise ChromiumOSError('Invalid TPM status: "%s".' % out)
+        status[field] = match.group(1) == 'true'
+    status['Enabled'] = status['enabled']
+    status['Owned'] = status['owned']
+    status['Ready'] = status['enabled'] and status['owned']
+    return status
+
+
+def get_tpm_da_info():
+    """Get the TPM dictionary attack information.
+    Returns:
+        A TPM dictionary attack status dictionary, for example:
+        {
+          'dictionary_attack_counter': 0,
+          'dictionary_attack_threshold': 200,
+          'dictionary_attack_lockout_in_effect': False,
+          'dictionary_attack_lockout_seconds_remaining': 0
+        }
+    """
+    status = {}
+    out = run_cmd(TPM_MANAGER_CMD + ' get_da_info')
+    for line in out.splitlines()[1:-1]:
+        items = line.strip().split(':')
+        if len(items) != 2:
+            continue
+        if items[1].strip() == 'false':
+            value = False
+        elif items[1].strip() == 'true':
+            value = True
+        elif items[1].split('(')[0].strip().isdigit():
+            value = int(items[1].split('(')[0].strip())
+        else:
+            value = items[1].strip(' "')
+        status[items[0].strip()] = value
+    return status
+
+
+
+def run_cmd(cmd):
+    """Run a command on utils.system_output, and append '2>&1'."""
+    return utils.system_output(cmd + ' 2>&1', retain_output=True,
+                               ignore_status=True).strip()
diff --git a/client/cros/update_engine/dlc_util.py b/client/cros/update_engine/dlc_util.py
index fd4f9b5..bf12881 100644
--- a/client/cros/update_engine/dlc_util.py
+++ b/client/cros/update_engine/dlc_util.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -37,6 +38,14 @@
                          overwritten to run remotely from a server test.
 
         """
+        self.set_run(run_func)
+
+    def set_run(self, run_func):
+        """Initializes the run function if it has been changed.
+
+        @param run_func: See __init__.
+
+        """
         self._run = run_func
 
 
@@ -141,4 +150,4 @@
         @return True if the DLC is installed, False if it's not.
 
         """
-        return dlc_id in self.list().keys()
+        return dlc_id in self.list()
diff --git a/client/cros/update_engine/nebraska_wrapper.py b/client/cros/update_engine/nebraska_wrapper.py
index f4a9b60..bf486b2 100644
--- a/client/cros/update_engine/nebraska_wrapper.py
+++ b/client/cros/update_engine/nebraska_wrapper.py
@@ -1,13 +1,20 @@
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import errno
 import json
 import logging
 import os
 import requests
 import subprocess
-import urlparse
+import six
+import six.moves.urllib.parse
 
 from autotest_lib.client.bin import utils
 from autotest_lib.client.common_lib import autotemp
@@ -20,6 +27,11 @@
 KEY_METADATA_SIZE='metadata_size'
 KEY_SHA256='sha256_hex'
 
+# Path to the startup config file.
+NEBRASKA_DIR = '/usr/local/nebraska'
+NEBRASKA_CONFIG = os.path.join(NEBRASKA_DIR, 'config.json')
+NEBRASKA_METADATA_DIR = os.path.join(NEBRASKA_DIR, 'metadata')
+
 
 class NebraskaWrapper(object):
     """
@@ -30,7 +42,11 @@
 
     """
 
-    def __init__(self, log_dir=None, payload_url=None, **props_to_override):
+    def __init__(self,
+                 log_dir=None,
+                 payload_url=None,
+                 persist_metadata=False,
+                 **props_to_override):
         """
         Initializes the NebraskaWrapper module.
 
@@ -40,6 +56,12 @@
                             or a list of URLs to return multiple payload URLs
                             (such as a platform payload + DLC payloads) in the
                             responses.
+        @param persist_metadata: True to store the update and install metadata
+                                 in a location that will survive a reboot. Use
+                                 this if you plan on starting nebraska at
+                                 system startup using a conf file. If False,
+                                 the metadata will be stored in /tmp and will
+                                 not persist after rebooting the device.
         @param props_to_override: Dictionary of key/values to use in responses
                 instead of the default values in payload_url's properties file.
 
@@ -59,14 +81,23 @@
         self._install_metadata_dir = None
         self._install_payloads_address = None
 
-        # Create a temporary directory for the metadata and download the
-        # metadata files.
+        # Download the metadata files and save them in a tempdir for general
+        # use, or in a directory that will survive reboot if we want nebraska
+        # to be up after a reboot. If saving to a tempdir, save a reference
+        # to it to ensure its reference count does not go to zero causing the
+        # directory to be deleted.
         if payload_url:
             # Normalize payload_url to be a list.
             if not isinstance(payload_url, list):
                 payload_url = [payload_url]
 
-            self._update_metadata_dir = autotemp.tempdir()
+            if persist_metadata:
+                self._create_nebraska_dir(metadata=True)
+                self._update_metadata_dir = NEBRASKA_METADATA_DIR
+            else:
+                self._tempdir = autotemp.tempdir()
+                self._update_metadata_dir = self._tempdir.name
+
             self._update_payloads_address = ''.join(
                 payload_url[0].rpartition('/')[0:2])
             # We can reuse _update_metadata_dir and _update_payloads_address
@@ -81,9 +112,9 @@
             self._install_payloads_address = self._update_payloads_address
 
             for url in payload_url:
-                self.get_payload_properties_file(
-                    url, self._update_metadata_dir.name,
-                    **props_to_override)
+                self.get_payload_properties_file(url,
+                                                 self._update_metadata_dir,
+                                                 **props_to_override)
 
     def __enter__(self):
         """So that NebraskaWrapper can be used as a Context Manager."""
@@ -114,11 +145,11 @@
         if self._log_dir:
             cmd += ['--log-file', os.path.join(self._log_dir, 'nebraska.log')]
         if self._update_metadata_dir:
-            cmd += ['--update-metadata', self._update_metadata_dir.name]
+            cmd += ['--update-metadata', self._update_metadata_dir]
         if self._update_payloads_address:
             cmd += ['--update-payloads-address', self._update_payloads_address]
         if self._install_metadata_dir:
-            cmd += ['--install-metadata', self._install_metadata_dir.name]
+            cmd += ['--install-metadata', self._install_metadata_dir]
         if self._install_payloads_address:
             cmd += ['--install-payloads-address',
                     self._install_payloads_address]
@@ -168,12 +199,13 @@
         """
 
         query = '&'.join('%s=%s' % (k, v) for k, v in kwargs.items())
-        url = urlparse.SplitResult(scheme='http',
-                                   netloc='127.0.0.1:%d' % self._port,
-                                   path='/update',
-                                   query=query,
-                                   fragment='')
-        return urlparse.urlunsplit(url)
+        url = six.moves.urllib.parse.SplitResult(scheme='http',
+                                                 netloc='127.0.0.1:%d' %
+                                                 self._port,
+                                                 path='/update',
+                                                 query=query,
+                                                 fragment='')
+        return six.moves.urllib.parse.urlunsplit(url)
 
     def get_payload_properties_file(self, payload_url, target_dir, **kwargs):
         """
@@ -190,7 +222,7 @@
         try:
             response = json.loads(requests.get(payload_props_url).text)
             # Override existing keys if any.
-            for k, v in kwargs.iteritems():
+            for k, v in six.iteritems(kwargs):
                 # Don't set default None values. We don't want to override good
                 # values to None.
                 if v is not None:
@@ -204,3 +236,59 @@
             raise error.TestError(
                 'Failed to get update payload properties: %s with error: %s' %
                 (payload_props_url, err))
+
+    def update_config(self, **kwargs):
+        """
+        Updates the current running nebraska's config.
+
+        @param kwargs: A dictionary of key/values to update the nebraska's
+                       config.  See platform/dev/nebraska/nebraska.py for more
+                       information.
+
+        """
+        requests.post('http://127.0.0.1:%d/update_config' % self._port,
+                      json=kwargs)
+
+    def _create_nebraska_dir(self, metadata=True):
+        """
+        Creates /usr/local/nebraska for storing the startup conf and
+        persistent metadata files.
+
+        @param metadata: True to create a subdir for metadata.
+
+        """
+        dir_to_make = NEBRASKA_DIR
+        if metadata:
+            dir_to_make = NEBRASKA_METADATA_DIR
+        try:
+            os.makedirs(dir_to_make)
+        except OSError as e:
+            if errno.EEXIST != e.errno:
+                raise error.TestError('Failed to create %s with error: %s',
+                                      dir_to_make, e)
+
+    def create_startup_config(self, **kwargs):
+        """
+        Creates a nebraska startup config file. If this file is present, nebraska
+        will start before update_engine does during system startup.
+
+        @param kwargs: A dictionary of key/values for nebraska config options.
+                       See platform/dev/nebraska/nebraska.py for more info.
+
+        """
+        conf = {}
+        if self._update_metadata_dir:
+            conf['update_metadata'] = self._update_metadata_dir
+        if self._update_payloads_address:
+            conf['update_payloads_address'] = self._update_payloads_address
+        if self._install_metadata_dir:
+            conf['install_metadata'] = self._install_metadata_dir
+        if self._install_payloads_address:
+            conf['install_payloads_address'] = self._install_payloads_address
+
+        for k, v in six.iteritems(kwargs):
+            conf[k] = v
+
+        self._create_nebraska_dir()
+        with open(NEBRASKA_CONFIG, 'w') as fp:
+            json.dump(conf, fp)
diff --git a/client/cros/update_engine/update_engine_event.py b/client/cros/update_engine/update_engine_event.py
index ed26ef4..4484cd1 100644
--- a/client/cros/update_engine/update_engine_event.py
+++ b/client/cros/update_engine/update_engine_event.py
@@ -1,8 +1,14 @@
+# Lint as: python2, python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 # Update event types.
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+import six
+
 EVENT_TYPE_DOWNLOAD_COMPLETE = 1
 EVENT_TYPE_INSTALL_COMPLETE = 2
 EVENT_TYPE_UPDATE_COMPLETE = 3
@@ -65,8 +71,10 @@
 
     def __str__(self):
         """Returns a comma separated list of the event data."""
-        return '{%s}' % ', '.join(['%s:%s' % (k, v) for k, v in
-                                   self._expected_attrs.iteritems()])
+        return '{%s}' % ', '.join([
+                '%s:%s' % (k, v)
+                for k, v in six.iteritems(self._expected_attrs)
+        ])
 
     def equals(self, actual_event):
         """
@@ -81,11 +89,10 @@
 
         """
         mismatched_attrs = []
-        for expected_name, expected_val in self._expected_attrs.iteritems():
+        for expected_name, expected_val in six.iteritems(self._expected_attrs):
             actual_val = actual_event.get(expected_name)
             if (expected_val and (actual_val is None or
                                   expected_val != actual_val)):
                 mismatched_attrs.append(expected_name)
 
         return mismatched_attrs if mismatched_attrs else None
-
diff --git a/client/cros/update_engine/update_engine_test.py b/client/cros/update_engine/update_engine_test.py
index c3c39d7..bf6a846 100644
--- a/client/cros/update_engine/update_engine_test.py
+++ b/client/cros/update_engine/update_engine_test.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -5,7 +6,7 @@
 import logging
 import shutil
 import time
-import urlparse
+import six.moves.urllib.parse
 
 from autotest_lib.client.bin import test, utils
 from autotest_lib.client.common_lib import error
@@ -97,17 +98,16 @@
             raise error.TestFail('Disabling the internet connection failed.')
 
 
-    def _disconnect_reconnect_network_test(self, update_url,
-                                          time_without_network=120,
-                                          accepted_movement=0.015):
+    def _disconnect_reconnect_network_test(self,
+                                           time_without_network=25,
+                                           accepted_movement=0.015,
+                                           ping_server='google.com'):
         """
         Disconnects the network for a period of time, verifies that the update
         pauses, reconnects the network, and ensures that the update picks up
         from where it left off. This will be used as a part of
         autoupdate_ForcedOOBEUpdate.interrupt and autoupdate_Interruptions.
 
-        @param update_url: The update url used by the test. We will ping it to
-                           check whether we are online/offline.
         @param time_without_network: Duration of the network disconnection in
                                      seconds.
         @param accepted_movement: Acceptable movement of update_engine
@@ -115,17 +115,17 @@
                                   Sometimes when network is disabled
                                   update_engine progress will move a little,
                                   which can cause false positives.
+        @param ping_server: The server to ping to check we are online.
 
         """
         logging.info('Starting network interruption check.')
         if self._is_update_finished_downloading():
             raise error.TestFail('The update has already finished before we '
                                  'can disconnect network.')
-        self._update_server = urlparse.urlparse(update_url).hostname
         self._disable_internet()
 
         # Check that we are offline.
-        result = utils.ping(self._update_server, deadline=5, timeout=5)
+        result = utils.ping(ping_server, deadline=5, timeout=5)
         if result != 2:
             raise error.TestFail('Ping succeeded even though we were offline.')
 
diff --git a/client/cros/update_engine/update_engine_util.py b/client/cros/update_engine/update_engine_util.py
index ce7aab7..40808db 100644
--- a/client/cros/update_engine/update_engine_util.py
+++ b/client/cros/update_engine/update_engine_util.py
@@ -1,18 +1,28 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+from ctypes import c_size_t
 import datetime
+import json
 import logging
 import os
 import re
 import shutil
 import time
-import urlparse
+import six
+from six.moves import range
+import six.moves.urllib_parse as urlparse
 
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib import utils
 from autotest_lib.client.common_lib.cros import kernel_utils
+from autotest_lib.client.cros.update_engine import nebraska_wrapper
 from autotest_lib.client.cros.update_engine import update_engine_event
 
 _DEFAULT_RUN = utils.run
@@ -59,6 +69,7 @@
     _UPDATE_ENGINE_LOG_DIR = '/var/log/update_engine/'
     _CUSTOM_LSB_RELEASE = '/mnt/stateful_partition/etc/lsb-release'
     _UPDATE_ENGINE_PREFS_DIR = '/var/lib/update_engine/prefs/'
+    _STATEFUL_MOUNT_DIR = '/mnt/stateful_partition/'
 
     # Update engine prefs
     _UPDATE_CHECK_RESPONSE_HASH = 'update-check-response-hash'
@@ -78,6 +89,15 @@
     _BEFORE_INTERRUPT_FILENAME = 'before_interrupt.png'
     _AFTER_INTERRUPT_FILENAME = 'after_interrupt.png'
 
+    # Test name
+    _CLIENT_TEST = 'autoupdate_CannedOmahaUpdate'
+
+    # Feature name
+    _REPEATED_UPDATES_FEATURE = 'feature-repeated-updates'
+
+    # Credentials to use for the fake login in login tests.
+    _LOGIN_TEST_USERNAME = 'autotest'
+    _LOGIN_TEST_PASSWORD = 'password'
 
     def __init__(self, run_func=_DEFAULT_RUN, get_file=_DEFAULT_COPY):
         """
@@ -188,10 +208,32 @@
         """
         self._wait_for_update_status(self._UPDATE_STATUS_UPDATED_NEED_REBOOT)
         if check_kernel_after_update:
-          kernel_utils.verify_kernel_state_after_update(
-              self._host if hasattr(self, '_host') else None)
+            kernel_utils.verify_kernel_state_after_update(
+                    self._host if hasattr(self, '_host') else None)
 
 
+    def _wait_for_update_to_idle(self,
+                                 check_kernel_after_update=False,
+                                 inactive_kernel=False):
+        """
+        Wait for update status to reach IDLE.
+
+        @param check_kernel_after_update: True to also verify kernel state after
+                                          the update is the expected kernel.
+        @param inactive_kernel: True to indicate the expected kernel is the
+                                inactive kernel.
+
+        """
+        while True:
+            status = self._get_update_engine_status()
+            if self._is_update_engine_idle(status):
+                break
+            time.sleep(1)
+        if check_kernel_after_update:
+            kernel_utils.verify_kernel_state_after_update(
+                    self._host if hasattr(self, '_host') else None,
+                    inactive_kernel)
+
     def _wait_for_update_status(self, status_to_wait_for):
         """
         Wait for the update to reach a certain status.
@@ -245,8 +287,11 @@
         return status_dict
 
 
-    def _check_update_engine_log_for_entry(self, entry, raise_error=False,
+    def _check_update_engine_log_for_entry(self,
+                                           entry,
+                                           raise_error=False,
                                            err_str=None,
+                                           min_count=1,
                                            update_engine_log=None):
         """
         Checks for entries in the update_engine log.
@@ -254,6 +299,8 @@
         @param entry: String or tuple of strings to search for.
         @param raise_error: Fails tests if log doesn't contain entry.
         @param err_str: The error string to raise if we cannot find entry.
+        @param min_count: The minimum number of times each item should be
+                          found in the log. Default one.
         @param update_engine_log: Update engine log string you want to
                                   search. If None, we will read from the
                                   current update engine log.
@@ -268,7 +315,7 @@
         if not update_engine_log:
             update_engine_log = self._get_update_engine_log()
 
-        if all(msg in update_engine_log for msg in entry):
+        if all(update_engine_log.count(msg) >= min_count for msg in entry):
             return True
 
         if not raise_error:
@@ -280,6 +327,20 @@
         raise error.TestFail(err_str if err_str else error_str)
 
 
+    def _set_feature(self, feature_name, enable=True):
+        """
+        Enables or disables feature from update engine client.
+        @param feature_name: Name of the feature to enable or disable
+        @param enable: Enables feature if true, disables if false.
+                       Default True.
+        """
+        if not enable:
+            feature_request = '--disable_feature=' + feature_name
+        else:
+            feature_request = '--enable_feature=' + feature_name
+        self._run([self._UPDATE_ENGINE_CLIENT_CMD, feature_request])
+
+
     def _is_update_finished_downloading(self, status=None):
         """
         Checks if the update has moved to the final stages.
@@ -379,7 +440,7 @@
             logging.debug('Comparing %d and %d', int(before_match[i]),
                           int(after_match[i]))
             if int(before_match[i]) > int(after_match[i]):
-              return False
+                return False
         return True
 
 
@@ -525,20 +586,34 @@
 
         @param update_url: String of url to use for update check.
         @param build: String of the build number to use. Represents the
-                      Chrome OS build this device thinks it is on.
+                      ChromeOS build this device thinks it is on.
         @param kwargs: A dictionary of key/values to be made into a query string
                        and appended to the update_url
 
         """
         update_url = self._append_query_to_url(update_url, kwargs)
+        release_version = 'CHROMEOS_RELEASE_VERSION=%s' % build
+        auserver = 'CHROMEOS_AUSERVER=%s' % update_url
 
         self._run(['mkdir', os.path.dirname(self._CUSTOM_LSB_RELEASE)],
                   ignore_status=True)
         self._run(['touch', self._CUSTOM_LSB_RELEASE])
-        self._run(['echo', 'CHROMEOS_RELEASE_VERSION=%s' % build, '>',
-                   self._CUSTOM_LSB_RELEASE])
-        self._run(['echo', 'CHROMEOS_AUSERVER=%s' % update_url, '>>',
-                   self._CUSTOM_LSB_RELEASE])
+        self._run(['echo', release_version, '>', self._CUSTOM_LSB_RELEASE])
+        self._run(['echo', auserver, '>>', self._CUSTOM_LSB_RELEASE])
+
+        # Confirm the custom lsb-release file was created successfully.
+        def custom_lsb_created():
+            """
+            Checks if the custom lsb-release file exists and has the correct
+            contents.
+
+            @returns: True if the file exists with the expected contents
+                      False otherwise
+            """
+            contents = self._run(['cat', self._CUSTOM_LSB_RELEASE]).stdout
+            return auserver in contents and release_version in contents
+
+        utils.poll_for_condition(condition=custom_lsb_created)
 
 
     def _clear_custom_lsb_release(self):
@@ -551,16 +626,34 @@
         self._run(['rm', self._CUSTOM_LSB_RELEASE], ignore_status=True)
 
 
-    def _remove_update_engine_pref(self, pref):
+    def _remove_update_engine_pref(self, pref, is_dir=False):
         """
-        Delete an update_engine pref file.
+        Delete an update_engine pref file or directory.
 
         @param pref: The pref file to delete
+        @param is_dir: True for removing a whole pref subdirectory.
 
         """
         pref_file = os.path.join(self._UPDATE_ENGINE_PREFS_DIR, pref)
-        self._run(['rm', pref_file], ignore_status=True)
+        self._run(['rm', '-r' if is_dir else '', pref_file],
+                  ignore_status=True)
 
+    def _create_update_engine_pref(self, pref_name, pref_val="", sub_dir=None):
+        """
+        Create an update_engine pref file.
+
+        @param pref_name: The name of pref file to create.
+        @param pref_val: The content string in pref file.
+        @param sub_dir: The sub directory for the pref.
+
+        """
+        pref_dir = self._UPDATE_ENGINE_PREFS_DIR
+        if sub_dir:
+            pref_dir = os.path.join(pref_dir, sub_dir)
+            self._run(['mkdir', '-p', pref_dir], ignore_status=True)
+
+        pref_file = os.path.join(pref_dir, pref_name)
+        self._run(['echo', '-n', pref_val, '>', pref_file])
 
     def _get_update_requests(self):
         """
@@ -586,22 +679,43 @@
         """
         update_log = self._get_update_engine_log()
 
-        # Matches any single line with "MMDD/HHMMSS ... Request ... xml", e.g.
-        # "[0723/133526:INFO:omaha_request_action.cc(794)] Request: <?xml".
-        result = re.findall(r'([0-9]{4}/[0-9]{6}).*Request.*xml', update_log)
-        if not result:
-            return None
+        # Matches any line with "YYYY-MM-DDTHH:MM:SS ... Request ... xml",
+        # e.g.
+        # "2021-01-28T10:14:33.998217Z INFO update_engine: \
+        # [omaha_request_action.cc(794)] Request: <?xml"
+        pattern = r'(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}).* Request:.*xml'
+        LOG_TIMESTAMP_FORMAT = '%Y-%m-%dT%H:%M:%S'
 
-        LOG_TIMESTAMP_FORMAT = '%m%d/%H%M%S'
-        match = result[-1]
+        result = re.findall(pattern, update_log)
 
-        # The log does not include the year, so set it as this year.
-        # This assumption could cause incorrect behavior, but is unlikely to.
-        current_year = datetime.datetime.now().year
-        log_datetime = datetime.datetime.strptime(match, LOG_TIMESTAMP_FORMAT)
-        log_datetime = log_datetime.replace(year=current_year)
+        if result:
+            match = result[-1]
+            log_datetime = datetime.datetime.strptime(match,
+                                                      LOG_TIMESTAMP_FORMAT)
+            epoch = datetime.datetime(1970, 1, 1)
 
-        return time.mktime(log_datetime.timetuple())
+            # Since log_datetime is in UTC, simply take the diff from epoch.
+            return (log_datetime - epoch).total_seconds()
+        else:
+            # If no match for new timestamp, try old timestamp format.
+            # "[0723/133526:INFO:omaha_request_action.cc(794)] Request: <?xml".
+            pattern_old = r'([0-9]{4}/[0-9]{6}).*Request.*xml'
+            LOG_TIMESTAMP_FORMAT_OLD = '%m%d/%H%M%S'
+
+            result = re.findall(pattern_old, update_log)
+            if not result:
+                return None
+
+            match = result[-1]
+
+            # The old format does not include the year, so set it as this year.
+            # This could cause incorrect behavior, but is unlikely to.
+            current_year = datetime.datetime.now().year
+            log_datetime = datetime.datetime.strptime(
+                    match, LOG_TIMESTAMP_FORMAT_OLD)
+            log_datetime = log_datetime.replace(year=current_year)
+
+            return time.mktime(log_datetime.timetuple())
 
 
     def _take_screenshot(self, filename):
@@ -643,9 +757,9 @@
         targets = [line for line in log if err_str in line]
         logging.debug('Error lines found: %s', targets)
         if not targets:
-          return None
+            return None
         else:
-          return targets[-1].rpartition(err_str)[2]
+            return targets[-1].rpartition(err_str)[2]
 
 
     def _get_latest_initial_request(self):
@@ -669,7 +783,7 @@
             return None
 
         MATCH_STR = r'eventtype="(.*?)"'
-        for i in xrange(len(requests) - 1, -1, -1):
+        for i in range(len(requests) - 1, -1, -1):
             search = re.search(MATCH_STR, requests[i])
             if (not search or
                 (search.group(1) ==
@@ -677,3 +791,54 @@
                 return requests[i]
 
         return None
+
+    def _edit_nebraska_startup_config(self, **kwargs):
+        """
+        Edits an existing nebraska startup config file.
+
+        @param kwargs: A dictionary of key/values for nebraska config options.
+                       See platform/dev/nebraska/nebraska.py for more info.
+
+        """
+        conf = json.loads(
+                self._run(['cat', nebraska_wrapper.NEBRASKA_CONFIG]).stdout)
+        for k, v in six.iteritems(kwargs):
+            conf[k] = v
+        self._run([
+                'echo',
+                json.dumps(conf), '>', nebraska_wrapper.NEBRASKA_CONFIG
+        ])
+
+    def _clear_nebraska_dir(self):
+        """
+        Clears the nebraska dir on the DUT where the nebraska config and payload
+        metadata files are stored.
+
+        """
+        self._run(['rm', '-rf', '/usr/local/nebraska'])
+
+    def _get_nebraska_update_url(self):
+        """
+        Gets the update URL for an active nebraska server. Assumes nebraska is
+        up and running.
+
+        @returns: string of the update URL for the active nebraska.
+
+        """
+        nebraska_port = self._run(['cat', '/run/nebraska/port']).stdout
+        return 'http://localhost:%s/update' % nebraska_port
+
+    def _get_exclusion_name(self, payload_url):
+        """
+        Get the exclusion name of a payload url by calculating its hash in the
+        same way of base::StringPieceHash in libchrome.
+
+        @param payload_url: The payload url to be excluded.
+
+        @returns: The payload URL hash string as the exclusion name.
+
+        """
+        result = c_size_t(0)
+        for c in payload_url:
+            result = c_size_t((result.value * 131) + ord(c))
+        return str(result.value)
diff --git a/client/cros/verity_utils.py b/client/cros/verity_utils.py
index b76047c..92aa897 100644
--- a/client/cros/verity_utils.py
+++ b/client/cros/verity_utils.py
@@ -130,7 +130,7 @@
         self.reset()
 
     def _create_image(self):
-        """Creates a dummy file."""
+        """Creates a placeholder file."""
         # TODO(wad) replace with python
         utils.system_output(self.dd_cmd % (self.file, self.blocks))
 
diff --git a/client/cros/video/builtin_html5_player.py b/client/cros/video/builtin_html5_player.py
new file mode 100644
index 0000000..d9c1fd8
--- /dev/null
+++ b/client/cros/video/builtin_html5_player.py
@@ -0,0 +1,199 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+from autotest_lib.client.cros.video import video_player
+
+import py_utils
+import logging
+
+
+class BuiltinHtml5Player(video_player.VideoPlayer):
+    """
+    Provides an interface to interact with native html5 player in chrome.
+
+    """
+
+
+    def inject_source_file(self):
+        """
+        Injects the path to the video file under test into the html doc.
+
+
+        """
+        self.tab.ExecuteJavaScript(
+            'loadVideoSource("%s")' % self.video_src_path)
+
+
+    def is_video_ready(self):
+        """
+        Determines if a native html5 video is ready by using javascript.
+
+        returns: bool, True if video is ready, else False.
+
+        """
+        return self.tab.EvaluateJavaScript('canplay()')
+
+
+    def is_javascript_ready(self):
+        """
+        returns: True if javascript variables and functions have been defined,
+
+        else False.
+
+        """
+        return self.tab.EvaluateJavaScript(
+                    'typeof script_ready!="undefined" && script_ready == true')
+
+
+    def play(self):
+        """
+        Plays the video.
+
+        """
+        self.tab.ExecuteJavaScript('play()')
+
+
+    def pause(self):
+        """
+        Pauses the video.
+
+        """
+        self.tab.ExecuteJavaScript('pause()')
+
+
+    def paused(self):
+        """
+        Checks whether video paused.
+
+        """
+        cmd = '%s.paused' % self.video_id
+        return self.tab.EvaluateJavaScript(cmd)
+
+
+    def ended(self):
+        """
+        Checks whether video paused.
+
+        """
+        cmd = '%s.ended' % self.video_id
+        return self.tab.EvaluateJavaScript(cmd)
+
+
+    def currentTime(self):
+        """
+        Returns the current time of the video element.
+
+        """
+        return self.tab.EvaluateJavaScript('currentTime()')
+
+
+    def seek_to(self, t):
+        """
+        Seeks a video to a time stamp.
+
+        @param t: timedelta, time value to seek to.
+
+        """
+        cmd = '%s.currentTime=%.3f' % (self.video_id, t.total_seconds())
+        self.tab.ExecuteJavaScript(cmd)
+
+
+    def has_video_finished_seeking(self):
+        """
+        Determines if the video has finished seeking.
+
+        """
+        return self.tab.EvaluateJavaScript('finishedSeeking()')
+
+
+    def wait_for_error(self):
+        """
+        Determines if the video has any errors
+
+        """
+        return self.tab.WaitForJavaScriptCondition('errorDetected();',
+                                                   timeout=30)
+
+
+    def reload_page(self):
+        """
+        Reloads current page
+
+        """
+        self.tab.ExecuteJavaScript('location.reload()')
+
+
+    def enable_VideoControls(self):
+        """
+        For enabling controls
+
+        """
+        self.tab.ExecuteJavaScript('setControls()')
+
+
+    def dropped_frame_count(self):
+        """
+        Gets the number of dropped frames.
+
+        @returns: An integer indicates the number of dropped frame.
+
+        """
+        cmd = '%s.webkitDroppedFrameCount' % self.video_id
+        return self.tab.EvaluateJavaScript(cmd)
+
+
+    def duration(self):
+        """
+        Gets the duration of the video.
+
+        @returns: An number indicates the duration of the video.
+
+        """
+        cmd = '%s.duration' % self.video_id
+        return self.tab.EvaluateJavaScript(cmd)
+
+
+    def wait_video_ended(self):
+        """
+        Waits until the video playback is ended.
+
+        """
+        cmd = '%s.ended' % self.video_id
+        self.tab.WaitForJavaScriptCondition(cmd, timeout=(self.duration() * 2))
+
+
+    def wait_ended_or_error(self):
+        """
+        Waits until the video ends or an error happens.
+
+        """
+        try:
+            # unit of timeout is second.
+            self.tab.WaitForJavaScriptCondition('endOrError()',
+                                                timeout=(self.duration() + 30))
+        except py_utils.TimeoutException:
+            logging.error('Timeout in waiting endOrError()')
+            raise
+
+
+    def check_error(self):
+        """
+        Check whether an error happens.
+
+        """
+        return self.tab.EvaluateJavaScript('errorDetected()')
+
+
+    def get_error_info(self):
+        """
+        Get error code and message
+        @returns string,string: error code and message
+
+        """
+        error_code = self.tab.EvaluateJavaScript(
+                          '%s.error.code' % self.video_id)
+        error_message = self.tab.EvaluateJavaScript(
+                          '%s.error.message' % self.video_id)
+        return error_code, error_message
diff --git a/client/cros/video/device_capability.py b/client/cros/video/device_capability.py
index de97026..6df3bf5 100644
--- a/client/cros/video/device_capability.py
+++ b/client/cros/video/device_capability.py
@@ -61,7 +61,7 @@
                                          'managed-capabilities.yaml')
         if not os.path.exists(managed_cap_fpath):
             raise error.TestFail("%s is not installed" % managed_cap_fpath)
-        managed_caps = yaml.load(file(managed_cap_fpath))
+        managed_caps = yaml.safe_load(open(managed_cap_fpath))
 
         cap_files = [f for f in os.listdir(settings_path)
                      if re.match(r'^[0-9]+-.*\.yaml$', f)]
@@ -72,7 +72,7 @@
         for fname in cap_files:
             logging.debug('Processing caps: %s', fname)
             fname = os.path.join(settings_path, fname)
-            for rule in yaml.load(file(fname)):
+            for rule in yaml.safe_load(open(fname)):
                 # The type of rule is string or dict
                 # If the type is a string, it is a capability (e.g. webcam).
                 # If a specific condition (e.g. kepler, cpu type) is required,
diff --git a/client/cros/video/histogram_verifier.py b/client/cros/video/histogram_verifier.py
index dc9f093..351bb6f 100644
--- a/client/cros/video/histogram_verifier.py
+++ b/client/cros/video/histogram_verifier.py
@@ -5,37 +5,36 @@
 import collections
 import logging
 import re
+import six
 
 from autotest_lib.client.bin import utils
 from autotest_lib.client.common_lib import error
 
 
 def get_histogram_text(tab, histogram_name):
-     """
+    """
      This returns contents of the given histogram.
 
      @param tab: object, Chrome tab instance
      @param histogram_name: string, name of the histogram
      @returns string: contents of the histogram
      """
-     docEle = 'document.documentElement'
-     tab.Navigate('chrome://histograms/%s' % histogram_name)
-     tab.WaitForDocumentReadyStateToBeComplete()
-     raw_text = tab.EvaluateJavaScript(
-          '{0} && {0}.innerText'.format(docEle))
-     # extract the contents of the histogram
-     histogram = raw_text[raw_text.find('Histogram:'):].strip()
-     if histogram:
-          logging.debug('chrome://histograms/%s:\n%s', histogram_name,
-                        histogram)
-     else:
-          logging.debug('No histogram is shown in chrome://histograms/%s',
-                        histogram_name)
-     return histogram
+    docEle = 'document.documentElement'
+    tab.Navigate('chrome://histograms/%s' % histogram_name)
+    tab.WaitForDocumentReadyStateToBeComplete()
+    raw_text = tab.EvaluateJavaScript('{0} && {0}.innerText'.format(docEle))
+    # extract the contents of the histogram
+    histogram = raw_text[raw_text.find('Histogram:'):].strip()
+    if histogram:
+        logging.debug('chrome://histograms/%s:\n%s', histogram_name, histogram)
+    else:
+        logging.debug('No histogram is shown in chrome://histograms/%s',
+                      histogram_name)
+    return histogram
 
 
 def loaded(tab, histogram_name, pattern):
-     """
+    """
      Checks if the histogram page has been fully loaded.
 
      @param tab: object, Chrome tab instance
@@ -45,11 +44,11 @@
               None otherwise
 
      """
-     return re.search(pattern, get_histogram_text(tab, histogram_name))
+    return re.search(pattern, get_histogram_text(tab, histogram_name))
 
 
 def  verify(cr, histogram_name, histogram_bucket_value):
-     """
+    """
      Verifies histogram string and success rate in a parsed histogram bucket.
      The histogram buckets are outputted in debug log regardless of the
      verification result.
@@ -63,19 +62,19 @@
      @raises error.TestError if histogram is not successful
 
      """
-     bucket_pattern = '\n'+ str(histogram_bucket_value) +'.*100\.0%.*'
-     error_msg_format = ('{} not loaded or histogram bucket not found '
-                         'or histogram bucket found at < 100%')
-     tab = cr.browser.tabs.New()
-     msg = error_msg_format.format(histogram_name)
-     utils.poll_for_condition(lambda : loaded(tab, histogram_name,
-                                              bucket_pattern),
-                              exception=error.TestError(msg),
-                              sleep_interval=1)
+    bucket_pattern = '\n' + str(histogram_bucket_value) + '.*100\.0%.*'
+    error_msg_format = ('{} not loaded or histogram bucket not found '
+                        'or histogram bucket found at < 100%')
+    tab = cr.browser.tabs.New()
+    msg = error_msg_format.format(histogram_name)
+    utils.poll_for_condition(lambda: loaded(tab, histogram_name, bucket_pattern
+                                            ),
+                             exception=error.TestError(msg),
+                             sleep_interval=1)
 
 
 def is_bucket_present(cr,histogram_name, histogram_bucket_value):
-     """
+    """
      This returns histogram succes or fail to called function
 
      @param cr: object, the Chrome instance
@@ -85,16 +84,16 @@
               False otherwise
 
      """
-     try:
-          verify(cr,histogram_name, histogram_bucket_value)
-     except error.TestError:
-          return False
-     else:
-          return True
+    try:
+        verify(cr, histogram_name, histogram_bucket_value)
+    except error.TestError:
+        return False
+    else:
+        return True
 
 
 def is_histogram_present(cr, histogram_name):
-     """
+    """
      This checks if the given histogram is present and non-zero.
 
      @param cr: object, the Chrome instance
@@ -103,22 +102,22 @@
               False otherwise
 
      """
-     histogram_pattern = 'Histogram: '+ histogram_name + ' recorded ' + \
-                         r'[1-9][0-9]*' + ' samples'
-     tab = cr.browser.tabs.New()
-     try:
-          utils.poll_for_condition(lambda : loaded(tab, histogram_name,
-                                                   histogram_pattern),
-                                   timeout=2,
-                                   sleep_interval=0.1)
-          return True
-     except utils.TimeoutError:
-          # the histogram is not present, and then returns false
-          return False
+    histogram_pattern = 'Histogram: '+ histogram_name + ' recorded ' + \
+                        r'[1-9][0-9]*' + ' samples'
+    tab = cr.browser.tabs.New()
+    try:
+        utils.poll_for_condition(lambda: loaded(tab, histogram_name,
+                                                histogram_pattern),
+                                 timeout=2,
+                                 sleep_interval=0.1)
+        return True
+    except utils.TimeoutError:
+        # the histogram is not present, and then returns false
+        return False
 
 
 def get_histogram(cr, histogram_name):
-     """
+    """
      This returns contents of the given histogram.
 
      @param cr: object, the Chrome instance
@@ -126,34 +125,33 @@
      @returns string: contents of the histogram
 
      """
-     tab = cr.browser.tabs.New()
-     return get_histogram_text(tab, histogram_name)
+    tab = cr.browser.tabs.New()
+    return get_histogram_text(tab, histogram_name)
 
 
 def parse_histogram(histogram_text):
-     """
+    """
      Parses histogram text into bucket structure.
 
      @param histogram_text: histogram raw text.
      @returns dict(bucket_value, bucket_count)
      """
-     # Match separator line, e.g. "1   ..."
-     RE_SEPEARTOR = re.compile(r'\d+\s+\.\.\.')
-     # Match bucket line, e.g. "2  --O  (46 = 1.5%) {46.1%}"
-     RE_BUCKET = re.compile(
-          r'(\d+)\s+\-*O\s+\((\d+) = (\d+\.\d+)%\).*')
-     result = {}
-     for line in histogram_text.splitlines():
-          if RE_SEPEARTOR.match(line):
-               continue
-          m = RE_BUCKET.match(line)
-          if m:
-               result[int(m.group(1))] = int(m.group(2))
-     return result
+    # Match separator line, e.g. "1   ..."
+    RE_SEPEARTOR = re.compile(r'\d+\s+\.\.\.')
+    # Match bucket line, e.g. "2  --O  (46 = 1.5%) {46.1%}"
+    RE_BUCKET = re.compile(r'(\d+)\s+\-*O\s+\((\d+) = (\d+\.\d+)%\).*')
+    result = {}
+    for line in histogram_text.splitlines():
+        if RE_SEPEARTOR.match(line):
+            continue
+        m = RE_BUCKET.match(line)
+        if m:
+            result[int(m.group(1))] = int(m.group(2))
+    return result
 
 
 def subtract_histogram(minuend, subtrahend):
-     """
+    """
      Subtracts histogram: minuend - subtrahend
 
      @param minuend: histogram bucket dict from which another is to be
@@ -162,17 +160,17 @@
      @result difference of the two histograms in bucket dict. Note that
              zero-counted buckets are removed.
      """
-     result = collections.defaultdict(int, minuend)
-     for k, v in subtrahend.iteritems():
-          result[k] -= v
+    result = collections.defaultdict(int, minuend)
+    for k, v in six.iteritems(subtrahend):
+        result[k] -= v
 
-     # Remove zero counted buckets.
-     return {k: v for k, v in result.iteritems() if v}
+    # Remove zero counted buckets.
+    return {k: v for k, v in six.iteritems(result) if v}
 
 
 def expect_sole_bucket(histogram_differ, bucket, bucket_name, timeout=10,
                        sleep_interval=1):
-     """
+    """
      Returns true if the given bucket solely exists in histogram differ.
 
      @param histogram_differ: a HistogramDiffer instance used to get histogram
@@ -184,25 +182,25 @@
      @returns True if the given bucket solely exists in histogram.
      @raises TestError if bucket doesn't exist or other buckets exist.
      """
-     timer = utils.Timer(timeout)
-     histogram = {}
-     histogram_name = histogram_differ.histogram_name
-     while timer.sleep(sleep_interval):
-          histogram = histogram_differ.end()
-          if histogram:
-               break
+    timer = utils.Timer(timeout)
+    histogram = {}
+    histogram_name = histogram_differ.histogram_name
+    while timer.sleep(sleep_interval):
+        histogram = histogram_differ.end()
+        if histogram:
+            break
 
-     if bucket not in histogram:
-          raise error.TestError('Expect %s has %s. Histogram: %r' %
-                                (histogram_name, bucket_name, histogram))
-     if len(histogram) > 1:
-          raise error.TestError('%s has bucket other than %s. Histogram: %r' %
-                                (histogram_name, bucket_name, histogram))
-     return True
+    if bucket not in histogram:
+        raise error.TestError('Expect %s has %s. Histogram: %r' %
+                              (histogram_name, bucket_name, histogram))
+    if len(histogram) > 1:
+        raise error.TestError('%s has bucket other than %s. Histogram: %r' %
+                              (histogram_name, bucket_name, histogram))
+    return True
 
 
 def poll_histogram_grow(histogram_differ, timeout=2, sleep_interval=0.1):
-     """
+    """
      Polls histogram to see if it grows within |timeout| seconds.
 
      @param histogram_differ: a HistogramDiffer instance used to get histogram
@@ -212,16 +210,16 @@
      @returns (True, histogram_diff) if the histogram grows.
               (False, {}) if it does not grow in |timeout| seconds.
      """
-     timer = utils.Timer(timeout)
-     while timer.sleep(sleep_interval):
-          histogram_diff = histogram_differ.end()
-          if histogram_diff:
-               return (True, histogram_diff)
-     return (False, {})
+    timer = utils.Timer(timeout)
+    while timer.sleep(sleep_interval):
+        histogram_diff = histogram_differ.end()
+        if histogram_diff:
+            return (True, histogram_diff)
+    return (False, {})
 
 
 class HistogramDiffer(object):
-     """
+    """
      Calculates a histogram's progress between begin() and end().
 
      Usage:
@@ -229,54 +227,55 @@
        ....
        diff_gvd_error = differ.end()
      """
-     def __init__(self, cr, histogram_name, begin=True):
-          """
+
+    def __init__(self, cr, histogram_name, begin=True):
+        """
           Constructor.
 
           @param: cr: object, the Chrome instance
           @param: histogram_name: string, name of the histogram
           @param: begin: if set, calls begin().
           """
-          self.cr = cr
-          self.histogram_name = histogram_name
-          self.begin_histogram_text = ''
-          self.end_histogram_text = ''
-          self.begin_histogram = {}
-          self.end_histogram = {}
-          if begin:
-               self.begin()
+        self.cr = cr
+        self.histogram_name = histogram_name
+        self.begin_histogram_text = ''
+        self.end_histogram_text = ''
+        self.begin_histogram = {}
+        self.end_histogram = {}
+        if begin:
+            self.begin()
 
-     def _get_histogram(self):
-          """
+    def _get_histogram(self):
+        """
           Gets current histogram bucket.
 
           @returns (dict(bucket_value, bucket_count), histogram_text)
           """
-          tab = self.cr.browser.tabs.New()
-          text = get_histogram_text(tab, self.histogram_name)
-          tab.Close()
-          return (parse_histogram(text), text)
+        tab = self.cr.browser.tabs.New()
+        text = get_histogram_text(tab, self.histogram_name)
+        tab.Close()
+        return (parse_histogram(text), text)
 
-     def begin(self):
-          """
+    def begin(self):
+        """
           Takes a histogram snapshot as begin_histogram.
           """
-          (self.begin_histogram,
-           self.begin_histogram_text) = self._get_histogram()
-          logging.debug('begin histograms/%s: %r\nraw_text: %s',
-                        self.histogram_name, self.begin_histogram,
-                        self.begin_histogram_text)
+        (self.begin_histogram,
+         self.begin_histogram_text) = self._get_histogram()
+        logging.debug('begin histograms/%s: %r\nraw_text: %s',
+                      self.histogram_name, self.begin_histogram,
+                      self.begin_histogram_text)
 
-     def end(self):
-          """
+    def end(self):
+        """
           Takes a histogram snapshot as end_histogram.
 
           @returns self.diff()
           """
-          self.end_histogram, self.end_histogram_text = self._get_histogram()
-          logging.debug('end histograms/%s: %r\nraw_text: %s',
-                        self.histogram_name, self.end_histogram,
-                        self.end_histogram_text)
-          diff = subtract_histogram(self.end_histogram, self.begin_histogram)
-          logging.debug('histogram diff: %r', diff)
-          return diff
+        self.end_histogram, self.end_histogram_text = self._get_histogram()
+        logging.debug('end histograms/%s: %r\nraw_text: %s',
+                      self.histogram_name, self.end_histogram,
+                      self.end_histogram_text)
+        diff = subtract_histogram(self.end_histogram, self.begin_histogram)
+        logging.debug('histogram diff: %r', diff)
+        return diff
diff --git a/client/cros/video/histogram_verifier_unittest.py b/client/cros/video/histogram_verifier_unittest.py
index 6dee212..714f0b0 100755
--- a/client/cros/video/histogram_verifier_unittest.py
+++ b/client/cros/video/histogram_verifier_unittest.py
@@ -1,16 +1,16 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 import logging
 import unittest
+from unittest import mock
 
 import common
 
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.cros.video import histogram_verifier
-import mock
 
 
 class HistogramVerifierTest(unittest.TestCase):
diff --git a/client/cros/video/native_html5_player.py b/client/cros/video/native_html5_player.py
deleted file mode 100644
index 0dcd706..0000000
--- a/client/cros/video/native_html5_player.py
+++ /dev/null
@@ -1,199 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-from autotest_lib.client.cros.video import video_player
-
-import py_utils
-import logging
-
-
-class NativeHtml5Player(video_player.VideoPlayer):
-    """
-    Provides an interface to interact with native html5 player in chrome.
-
-    """
-
-
-    def inject_source_file(self):
-        """
-        Injects the path to the video file under test into the html doc.
-
-
-        """
-        self.tab.ExecuteJavaScript(
-            'loadVideoSource("%s")' % self.video_src_path)
-
-
-    def is_video_ready(self):
-        """
-        Determines if a native html5 video is ready by using javascript.
-
-        returns: bool, True if video is ready, else False.
-
-        """
-        return self.tab.EvaluateJavaScript('canplay()')
-
-
-    def is_javascript_ready(self):
-        """
-        returns: True if javascript variables and functions have been defined,
-
-        else False.
-
-        """
-        return self.tab.EvaluateJavaScript(
-                    'typeof script_ready!="undefined" && script_ready == true')
-
-
-    def play(self):
-        """
-        Plays the video.
-
-        """
-        self.tab.ExecuteJavaScript('play()')
-
-
-    def pause(self):
-        """
-        Pauses the video.
-
-        """
-        self.tab.ExecuteJavaScript('pause()')
-
-
-    def paused(self):
-        """
-        Checks whether video paused.
-
-        """
-        cmd = '%s.paused' % self.video_id
-        return self.tab.EvaluateJavaScript(cmd)
-
-
-    def ended(self):
-        """
-        Checks whether video paused.
-
-        """
-        cmd = '%s.ended' % self.video_id
-        return self.tab.EvaluateJavaScript(cmd)
-
-
-    def currentTime(self):
-        """
-        Returns the current time of the video element.
-
-        """
-        return self.tab.EvaluateJavaScript('currentTime()')
-
-
-    def seek_to(self, t):
-        """
-        Seeks a video to a time stamp.
-
-        @param t: timedelta, time value to seek to.
-
-        """
-        cmd = '%s.currentTime=%.3f' % (self.video_id, t.total_seconds())
-        self.tab.ExecuteJavaScript(cmd)
-
-
-    def has_video_finished_seeking(self):
-        """
-        Determines if the video has finished seeking.
-
-        """
-        return self.tab.EvaluateJavaScript('finishedSeeking()')
-
-
-    def wait_for_error(self):
-        """
-        Determines if the video has any errors
-
-        """
-        return self.tab.WaitForJavaScriptCondition('errorDetected();',
-                                                   timeout=30)
-
-
-    def reload_page(self):
-        """
-        Reloads current page
-
-        """
-        self.tab.ExecuteJavaScript('location.reload()')
-
-
-    def enable_VideoControls(self):
-        """
-        For enabling controls
-
-        """
-        self.tab.ExecuteJavaScript('setControls()')
-
-
-    def dropped_frame_count(self):
-        """
-        Gets the number of dropped frames.
-
-        @returns: An integer indicates the number of dropped frame.
-
-        """
-        cmd = '%s.webkitDroppedFrameCount' % self.video_id
-        return self.tab.EvaluateJavaScript(cmd)
-
-
-    def duration(self):
-        """
-        Gets the duration of the video.
-
-        @returns: An number indicates the duration of the video.
-
-        """
-        cmd = '%s.duration' % self.video_id
-        return self.tab.EvaluateJavaScript(cmd)
-
-
-    def wait_video_ended(self):
-        """
-        Waits until the video playback is ended.
-
-        """
-        cmd = '%s.ended' % self.video_id
-        self.tab.WaitForJavaScriptCondition(cmd, timeout=(self.duration() * 2))
-
-
-    def wait_ended_or_error(self):
-        """
-        Waits until the video ends or an error happens.
-
-        """
-        try:
-            # unit of timeout is second.
-            self.tab.WaitForJavaScriptCondition('endOrError()',
-                                                timeout=(self.duration() + 30))
-        except py_utils.TimeoutException:
-            logging.error('Timeout in waiting endOrError()')
-            raise
-
-
-    def check_error(self):
-        """
-        Check whether an error happens.
-
-        """
-        return self.tab.EvaluateJavaScript('errorDetected()')
-
-
-    def get_error_info(self):
-        """
-        Get error code and message
-        @returns string,string: error code and message
-
-        """
-        error_code = self.tab.EvaluateJavaScript(
-                          '%s.error.code' % self.video_id)
-        error_message = self.tab.EvaluateJavaScript(
-                          '%s.error.message' % self.video_id)
-        return error_code, error_message
diff --git a/client/cros/vpn_server.py b/client/cros/vpn_server.py
deleted file mode 100644
index da02d11..0000000
--- a/client/cros/vpn_server.py
+++ /dev/null
@@ -1,316 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.bin import utils
-from autotest_lib.client.cros import network_chroot
-from autotest_lib.client.common_lib.cros import site_eap_certs
-
-class VPNServer(object):
-    """Context enclosing the use of a VPN server instance."""
-
-    def __enter__(self):
-        self.start_server()
-        return self
-
-
-    def __exit__(self, exception, value, traceback):
-        logging.info('Log contents: %s', self.get_log_contents())
-        self.stop_server()
-
-
-class L2TPIPSecVPNServer(VPNServer):
-    """Implementation of an L2TP/IPSec VPN.  Uses ipsec starter and xl2tpd."""
-    ROOT_DIRECTORIES = ('etc/ipsec.d', 'etc/ipsec.d/cacerts',
-                        'etc/ipsec.d/certs', 'etc/ipsec.d/crls',
-                        'etc/ipsec.d/private', 'etc/ppp', 'etc/xl2tpd')
-    CHAP_USER = 'chapuser'
-    CHAP_SECRET = 'chapsecret'
-    IPSEC_COMMAND = '/usr/sbin/ipsec'
-    IPSEC_LOGFILE = 'var/log/charon.log'
-    IPSEC_PRESHARED_KEY = 'preshared-key'
-    IPSEC_CA_CERTIFICATE = 'etc/ipsec.d/cacerts/ca.cert'
-    IPSEC_SERVER_CERTIFICATE = 'etc/ipsec.d/certs/server.cert'
-    PPPD_PID_FILE = 'run/ppp0.pid'
-    XAUTH_USER = 'xauth_user'
-    XAUTH_PASSWORD = 'xauth_password'
-    XAUTH_SECONDARY_AUTHENTICATION_STANZA = 'rightauth2=xauth'
-    XL2TPD_COMMAND = '/usr/sbin/xl2tpd'
-    XL2TPD_CONFIG_FILE = 'etc/xl2tpd/xl2tpd.conf'
-    XL2TPD_PID_FILE = 'run/xl2tpd.pid'
-    SERVER_IP_ADDRESS = '192.168.1.99'
-    IPSEC_COMMON_CONFIGS = {
-        'etc/strongswan.conf' :
-            'charon {\n'
-            '  filelog {\n'
-            '    test_vpn {\n'
-            '      path = %(charon-logfile)s\n'
-            '      default = 3\n'
-            '      time_format = %%b %%e %%T\n'
-            '    }\n'
-            '  }\n'
-            '  install_routes = no\n'
-            '  ignore_routing_tables = 0\n'
-            '  routing_table = 0\n'
-            '}\n',
-
-        'etc/passwd' :
-            'root:x:0:0:root:/root:/bin/bash\n'
-            'ipsec:*:212:212::/dev/null:/bin/false\n',
-
-        'etc/group' :
-            'ipsec:x:212:\n',
-
-        XL2TPD_CONFIG_FILE :
-            '[global]\n'
-            '\n'
-            '[lns default]\n'
-            '  ip range = 192.168.1.128-192.168.1.254\n'
-            '  local ip = 192.168.1.99\n'
-            '  require chap = yes\n'
-            '  refuse pap = yes\n'
-            '  require authentication = yes\n'
-            '  name = LinuxVPNserver\n'
-            '  ppp debug = yes\n'
-            '  pppoptfile = /etc/ppp/options.xl2tpd\n'
-            '  length bit = yes\n',
-
-        'etc/xl2tpd/l2tp-secrets' :
-            '*      them    l2tp-secret',
-
-        'etc/ppp/chap-secrets' :
-            '%(chap-user)s        *       %(chap-secret)s      *',
-
-        'etc/ppp/options.xl2tpd' :
-            'ipcp-accept-local\n'
-            'ipcp-accept-remote\n'
-            'noccp\n'
-            'auth\n'
-            'crtscts\n'
-            'idle 1800\n'
-            'mtu 1410\n'
-            'mru 1410\n'
-            'nodefaultroute\n'
-            'debug\n'
-            'lock\n'
-            'proxyarp\n'
-    }
-    IPSEC_TYPED_CONFIGS = {
-        'psk': {
-            'etc/ipsec.conf' :
-                'config setup\n'
-                '  charondebug="%(charon-debug-flags)s"\n'
-                'conn L2TP\n'
-                '  keyexchange=ikev1\n'
-                '  ike=aes128-sha1-modp2048!\n'
-                '  esp=3des-sha1!\n'
-                '  type=transport\n'
-                '  authby=psk\n'
-                '  %(xauth-stanza)s\n'
-                '  rekey=no\n'
-                '  left=%(local-ip)s\n'
-                '  leftprotoport=17/1701\n'
-                '  right=%%any\n'
-                '  rightprotoport=17/%%any\n'
-                '  auto=add\n',
-
-            'etc/ipsec.secrets' :
-              '%(local-ip)s %%any : PSK "%(preshared-key)s"\n'
-              '%(xauth-user)s : XAUTH "%(xauth-password)s"\n',
-        },
-        'cert': {
-            'etc/ipsec.conf' :
-                'config setup\n'
-                '  charondebug="%(charon-debug-flags)s"\n'
-                'conn L2TP\n'
-                '  keyexchange=ikev1\n'
-                '  ike=aes128-sha1-modp2048!\n'
-                '  esp=3des-sha1!\n'
-                '  type=transport\n'
-                '  left=%(local-ip)s\n'
-                '  leftcert=server.cert\n'
-                '  leftid="C=US, ST=California, L=Mountain View, '
-                'CN=chromelab-wifi-testbed-server.mtv.google.com"\n'
-                '  leftprotoport=17/1701\n'
-                '  right=%%any\n'
-                '  rightca="C=US, ST=California, L=Mountain View, '
-                'CN=chromelab-wifi-testbed-root.mtv.google.com"\n'
-                '  rightprotoport=17/%%any\n'
-                '  auto=add\n',
-
-            'etc/ipsec.secrets' : ': RSA server.key ""\n',
-
-            IPSEC_SERVER_CERTIFICATE : site_eap_certs.server_cert_1,
-            IPSEC_CA_CERTIFICATE : site_eap_certs.ca_cert_1,
-            'etc/ipsec.d/private/server.key' :
-                site_eap_certs.server_private_key_1,
-        },
-    }
-
-    """Implementation of an L2TP/IPSec server instance."""
-    def __init__(self, auth_type, interface_name, address, network_prefix,
-                 perform_xauth_authentication=False,
-                 local_ip_is_public_ip=False):
-        self._auth_type = auth_type
-        self._chroot = network_chroot.NetworkChroot(interface_name,
-                                                    address, network_prefix)
-        self._perform_xauth_authentication = perform_xauth_authentication
-
-        if local_ip_is_public_ip:
-            self.IPSEC_COMMON_CONFIGS[self.XL2TPD_CONFIG_FILE] = \
-                self.IPSEC_COMMON_CONFIGS[self.XL2TPD_CONFIG_FILE].replace(
-                    self.SERVER_IP_ADDRESS, address)
-            self.SERVER_IP_ADDRESS = address
-
-
-    def start_server(self):
-        """Start VPN server instance"""
-        if self._auth_type not in self.IPSEC_TYPED_CONFIGS:
-            raise RuntimeError('L2TP/IPSec type %s is not define' %
-                               self._auth_type)
-        chroot = self._chroot
-        chroot.add_root_directories(self.ROOT_DIRECTORIES)
-        chroot.add_config_templates(self.IPSEC_COMMON_CONFIGS)
-        chroot.add_config_templates(self.IPSEC_TYPED_CONFIGS[self._auth_type])
-        chroot.add_config_values({
-            'chap-user': self.CHAP_USER,
-            'chap-secret': self.CHAP_SECRET,
-            'charon-debug-flags': 'dmn 2, mgr 2, ike 2, net 2',
-            'charon-logfile': self.IPSEC_LOGFILE,
-            'preshared-key': self.IPSEC_PRESHARED_KEY,
-            'xauth-user': self.XAUTH_USER,
-            'xauth-password': self.XAUTH_PASSWORD,
-            'xauth-stanza': self.XAUTH_SECONDARY_AUTHENTICATION_STANZA
-                    if self._perform_xauth_authentication else '',
-        })
-        chroot.add_startup_command('%s start' % self.IPSEC_COMMAND)
-        chroot.add_startup_command('%s -c /%s -C /tmp/l2tpd.control' %
-                                   (self.XL2TPD_COMMAND,
-                                    self.XL2TPD_CONFIG_FILE))
-        chroot.startup()
-
-
-    def stop_server(self):
-        """Start VPN server instance"""
-        chroot = self._chroot
-        chroot.run([self.IPSEC_COMMAND, 'stop'], ignore_status=True)
-        chroot.kill_pid_file(self.XL2TPD_PID_FILE, missing_ok=True)
-        chroot.kill_pid_file(self.PPPD_PID_FILE, missing_ok=True)
-        chroot.shutdown()
-
-
-    def get_log_contents(self):
-        """Return all logs related to the chroot."""
-        return self._chroot.get_log_contents()
-
-
-class OpenVPNServer(VPNServer):
-    """Implementation of an OpenVPN service."""
-    PRELOAD_MODULES = ('tun',)
-    ROOT_DIRECTORIES = ('etc/openvpn',)
-    CA_CERTIFICATE_FILE = 'etc/openvpn/ca.crt'
-    SERVER_CERTIFICATE_FILE = 'etc/openvpn/server.crt'
-    SERVER_KEY_FILE = 'etc/openvpn/server.key'
-    DIFFIE_HELLMAN_FILE = 'etc/openvpn/diffie-hellman.pem'
-    OPENVPN_COMMAND = '/usr/sbin/openvpn'
-    OPENVPN_CONFIG_FILE = 'etc/openvpn/openvpn.conf'
-    OPENVPN_PID_FILE = 'run/openvpn.pid'
-    OPENVPN_STATUS_FILE = 'tmp/openvpn.status'
-    AUTHENTICATION_SCRIPT = 'etc/openvpn_authentication_script.sh'
-    EXPECTED_AUTHENTICATION_FILE = 'etc/openvpn_expected_authentication.txt'
-    PASSWORD = 'password'
-    USERNAME = 'username'
-    SERVER_IP_ADDRESS = '10.11.12.1'
-    # TODO b:169251326 terms below are set outside of this codebase
-    # and should be updated when possible. ("blacklist" -> "blocklist")
-    CONFIGURATION = {
-        'etc/ssl/blacklist' : '',
-        CA_CERTIFICATE_FILE : site_eap_certs.ca_cert_1,
-        SERVER_CERTIFICATE_FILE : site_eap_certs.server_cert_1,
-        SERVER_KEY_FILE : site_eap_certs.server_private_key_1,
-        DIFFIE_HELLMAN_FILE : site_eap_certs.dh1024_pem_key_1,
-        AUTHENTICATION_SCRIPT :
-            '#!/bin/bash\n'
-            'diff -q $1 %(expected-authentication-file)s\n',
-        EXPECTED_AUTHENTICATION_FILE : '%(username)s\n%(password)s\n',
-        OPENVPN_CONFIG_FILE :
-            'ca /%(ca-cert)s\n'
-            'cert /%(server-cert)s\n'
-            'dev tun\n'
-            'dh /%(diffie-hellman-params-file)s\n'
-            'keepalive 10 120\n'
-            'local %(local-ip)s\n'
-            'log /var/log/openvpn.log\n'
-            'ifconfig-pool-persist /tmp/ipp.txt\n'
-            'key /%(server-key)s\n'
-            'persist-key\n'
-            'persist-tun\n'
-            'port 1194\n'
-            'proto udp\n'
-            'server 10.11.12.0 255.255.255.0\n'
-            'status /%(status-file)s\n'
-            'verb 5\n'
-            'writepid /%(pid-file)s\n'
-            '%(optional-user-verification)s\n'
-    }
-
-    def __init__(self, interface_name, address, network_prefix,
-                 perform_username_authentication=False):
-        self._chroot = network_chroot.NetworkChroot(interface_name,
-                                                    address, network_prefix)
-        self._perform_username_authentication = perform_username_authentication
-
-
-    def start_server(self):
-        """Start VPN server instance"""
-        chroot = self._chroot
-        chroot.add_root_directories(self.ROOT_DIRECTORIES)
-        # Create a configuration template from the key-value pairs.
-        chroot.add_config_templates(self.CONFIGURATION)
-        config_values = {
-            'ca-cert': self.CA_CERTIFICATE_FILE,
-            'diffie-hellman-params-file': self.DIFFIE_HELLMAN_FILE,
-            'expected-authentication-file': self.EXPECTED_AUTHENTICATION_FILE,
-            'optional-user-verification': '',
-            'password': self.PASSWORD,
-            'pid-file': self.OPENVPN_PID_FILE,
-            'server-cert': self.SERVER_CERTIFICATE_FILE,
-            'server-key': self.SERVER_KEY_FILE,
-            'status-file': self.OPENVPN_STATUS_FILE,
-            'username': self.USERNAME,
-        }
-        if self._perform_username_authentication:
-            config_values['optional-user-verification'] = (
-                    'auth-user-pass-verify /%s via-file\nscript-security 2' %
-                    self.AUTHENTICATION_SCRIPT)
-        chroot.add_config_values(config_values)
-        chroot.add_startup_command('chmod 755 %s' % self.AUTHENTICATION_SCRIPT)
-        chroot.add_startup_command('%s --config /%s &' %
-                                   (self.OPENVPN_COMMAND,
-                                    self.OPENVPN_CONFIG_FILE))
-        chroot.add_environment({
-                'OPENSSL_CONF': '/etc/ssl/openssl.cnf.compat',
-                'OPENSSL_CHROMIUM_SKIP_TRUSTED_PURPOSE_CHECK': '1'
-            });
-        self.preload_modules()
-        chroot.startup()
-
-
-    def preload_modules(self):
-        """Pre-load modules since they can't be loaded from chroot."""
-        for module in self.PRELOAD_MODULES:
-            utils.system('modprobe %s' % module)
-
-
-    def get_log_contents(self):
-        """Return all logs related to the chroot."""
-        return self._chroot.get_log_contents()
-
-
-    def stop_server(self):
-        """Start VPN server instance"""
-        chroot = self._chroot
-        chroot.kill_pid_file(self.OPENVPN_PID_FILE, missing_ok=True)
-        chroot.shutdown()
diff --git a/client/cros/webstore_test.py b/client/cros/webstore_test.py
deleted file mode 100644
index 91a6b8e..0000000
--- a/client/cros/webstore_test.py
+++ /dev/null
@@ -1,286 +0,0 @@
-# Lint as: python2, python3
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-This module allows tests to interact with the Chrome Web Store (CWS)
-using ChromeDriver. They should inherit from the webstore_test class,
-and should override the run() method.
-"""
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-import logging
-import six
-from six.moves import range
-from six.moves import zip
-import time
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chromedriver
-from autotest_lib.client.common_lib.global_config import global_config
-from selenium.webdriver.common.by import By
-from selenium.webdriver.support import expected_conditions
-from selenium.webdriver.support.ui import WebDriverWait
-
-# How long to wait, in seconds, for an app to launch. This is larger
-# than it needs to be, because it might be slow on older Chromebooks
-_LAUNCH_DELAY = 4
-
-# How long to wait before entering the password when logging in to the CWS
-_ENTER_PASSWORD_DELAY = 2
-
-# How long to wait before entering payment info
-_PAYMENT_DELAY = 5
-
-def enum(*enumNames):
-    """
-    Creates an enum. Returns an enum object with a value for each enum
-    name, as well as from_string and to_string mappings.
-
-    @param enumNames: The strings representing the values of the enum
-    """
-    enums = dict(zip(enumNames, list(range(len(enumNames)))))
-    reverse = dict((value, key) for key, value in six.iteritems(enums))
-    enums['from_string'] = enums
-    enums['to_string'] = reverse
-    return type('Enum', (), enums)
-
-# TODO: staging and PNL don't work in these tests (crbug/396660)
-TestEnv = enum('staging', 'pnl', 'prod', 'sandbox')
-
-ItemType = enum(
-    'hosted_app',
-    'packaged_app',
-    'chrome_app',
-    'extension',
-    'theme',
-)
-
-# NOTE: paid installs don't work right now
-InstallType = enum(
-    'free',
-    'free_trial',
-    'paid',
-)
-
-def _labeled_button(label):
-    """
-    Returns a button with the class webstore-test-button-label and the
-    specified label
-
-    @param label: The label on the button
-    """
-    return ('//div[contains(@class,"webstore-test-button-label") '
-            'and text()="' + label + '"]')
-
-def _install_type_click_xpath(item_type, install_type):
-    """
-    Returns the XPath of the button to install an item of the given type.
-
-    @param item_type: The type of the item to install
-    @param install_type: The type of installation being used
-    """
-    if install_type == InstallType.free:
-        return _labeled_button('Free')
-    elif install_type == InstallType.free_trial:
-        # Both of these cases return buttons that say "Add to Chrome",
-        # but they are actually different buttons with only one being
-        # visible at a time.
-        if item_type == ItemType.hosted_app:
-            return ('//div[@id="cxdialog-install-paid-btn" and '
-                    '@aria-label="Add to Chrome"]')
-        else:
-            return _labeled_button('Add to Chrome')
-    else:
-        return ('//div[contains(@aria-label,"Buy for") '
-                'and not(contains(@style,"display: none"))]')
-
-def _get_chrome_flags(test_env):
-    """
-    Returns the Chrome flags for the given test environment.
-    """
-    flags = ['--apps-gallery-install-auto-confirm-for-tests=accept']
-    if test_env == TestEnv.prod:
-        return flags
-
-    url_middle = {
-            TestEnv.staging: 'staging.corp',
-            TestEnv.sandbox: 'staging.sandbox',
-            TestEnv.pnl: 'prod-not-live.corp'
-            }[test_env]
-    download_url_middle = {
-            TestEnv.staging: 'download-staging.corp',
-            TestEnv.sandbox: 'download-staging.sandbox',
-            TestEnv.pnl: 'omaha.sandbox'
-            }[test_env]
-    flags.append('--apps-gallery-url=https://webstore-' + url_middle +
-            '.google.com')
-    flags.append('--apps-gallery-update-url=https://' + download_url_middle +
-            '.google.com/service/update2/crx')
-    logging.info('Using flags %s', flags)
-    return flags
-
-
-class webstore_test(test.test):
-    """
-    The base class for tests that interact with the web store.
-
-    Subclasses must define run(), but should not override run_once().
-    Subclasses should use methods in this module such as install_item,
-    but they can also use the driver directly if they need to.
-    """
-
-    def initialize(self, test_env=TestEnv.sandbox,
-                   account='cwsbotdeveloper1@gmail.com'):
-        """
-        Initialize the test.
-
-        @param test_env: The test environment to use
-        """
-        super(webstore_test, self).initialize()
-
-        self.username = account
-        self.password = global_config.get_config_value(
-                'CLIENT', 'webstore_test_password', type=str)
-
-        self.test_env = test_env
-        self._chrome_flags = _get_chrome_flags(test_env)
-        self.webstore_url = {
-                TestEnv.staging:
-                    'https://webstore-staging.corp.google.com',
-                TestEnv.sandbox:
-                    'https://webstore-staging.sandbox.google.com/webstore',
-                TestEnv.pnl:
-                    'https://webstore-prod-not-live.corp.google.com/webstore',
-                TestEnv.prod:
-                    'https://chrome.google.com/webstore'
-                }[test_env]
-
-
-    def build_url(self, page):
-        """
-        Builds a webstore URL for the specified page.
-
-        @param page: the page to build a URL for
-        """
-        return self.webstore_url + page + "?gl=US"
-
-
-    def detail_page(self, item_id):
-        """
-        Returns the URL of the detail page for the given item
-
-        @param item_id: The item ID
-        """
-        return self.build_url("/detail/" + item_id)
-
-
-    def wait_for(self, xpath):
-        """
-        Waits until the element specified by the given XPath is visible
-
-        @param xpath: The xpath of the element to wait for
-        """
-        self._wait.until(expected_conditions.visibility_of_element_located(
-                (By.XPATH, xpath)))
-
-
-    def run_once(self, **kwargs):
-        with chromedriver.chromedriver(
-                username=self.username,
-                password=self.password,
-                extra_chrome_flags=self._chrome_flags) \
-                as chromedriver_instance:
-            self.driver = chromedriver_instance.driver
-            self.driver.implicitly_wait(15)
-            self._wait = WebDriverWait(self.driver, 20)
-            logging.info('Running test on test environment %s',
-                    TestEnv.to_string[self.test_env])
-            self.run(**kwargs)
-
-
-    def run(self):
-        """
-        Runs the test. Should be overridden by subclasses.
-        """
-        raise error.TestError('The test needs to override run()')
-
-
-    def install_item(self, item_id, item_type, install_type):
-        """
-        Installs an item from the CWS.
-
-        @param item_id: The ID of the item to install
-                (a 32-char string of letters)
-        @param item_type: The type of the item to install
-        @param install_type: The type of installation
-                (free, free trial, or paid)
-        """
-        logging.info('Installing item %s of type %s with install_type %s',
-                item_id, ItemType.to_string[item_type],
-                InstallType.to_string[install_type])
-
-        # We need to go to the CWS home page before going to the detail
-        # page due to a bug in the CWS
-        self.driver.get(self.webstore_url)
-        self.driver.get(self.detail_page(item_id))
-
-        install_type_click_xpath = _install_type_click_xpath(
-                item_type, install_type)
-        if item_type == ItemType.extension or item_type == ItemType.theme:
-            post_install_xpath = (
-                '//div[@aria-label="Added to Chrome" '
-                ' and not(contains(@style,"display: none"))]')
-        else:
-            post_install_xpath = _labeled_button('Launch app')
-
-        # In this case we need to sign in again
-        if install_type != InstallType.free:
-            button_xpath = _labeled_button('Sign in to add')
-            logging.info('Clicking button %s', button_xpath)
-            self.driver.find_element_by_xpath(button_xpath).click()
-            time.sleep(_ENTER_PASSWORD_DELAY)
-            password_field = self.driver.find_element_by_xpath(
-                    '//input[@id="Passwd"]')
-            password_field.send_keys(self.password)
-            self.driver.find_element_by_xpath('//input[@id="signIn"]').click()
-
-        logging.info('Clicking %s', install_type_click_xpath)
-        self.driver.find_element_by_xpath(install_type_click_xpath).click()
-
-        if install_type == InstallType.paid:
-            handle = self.driver.current_window_handle
-            iframe = self.driver.find_element_by_xpath(
-                '//iframe[contains(@src, "sandbox.google.com/checkout")]')
-            self.driver.switch_to_frame(iframe)
-            self.driver.find_element_by_id('purchaseButton').click()
-            time.sleep(_PAYMENT_DELAY) # Wait for animation to finish
-            self.driver.find_element_by_id('finishButton').click()
-            self.driver.switch_to_window(handle)
-
-        self.wait_for(post_install_xpath)
-
-
-    def launch_app(self, app_id):
-        """
-        Launches an app. Verifies that it launched by verifying that
-        a new tab/window was opened.
-
-        @param app_id: The ID of the app to run
-        """
-        logging.info('Launching app %s', app_id)
-        num_handles_before = len(self.driver.window_handles)
-        self.driver.get(self.webstore_url)
-        self.driver.get(self.detail_page(app_id))
-        launch_button = self.driver.find_element_by_xpath(
-            _labeled_button('Launch app'))
-        launch_button.click();
-        time.sleep(_LAUNCH_DELAY) # Wait for the app to launch
-        num_handles_after = len(self.driver.window_handles)
-        if num_handles_after <= num_handles_before:
-            raise error.TestError('App failed to launch')
diff --git a/client/cros/xmlrpc_server.py b/client/cros/xmlrpc_server.py
index bceb1bb..5a9685e 100644
--- a/client/cros/xmlrpc_server.py
+++ b/client/cros/xmlrpc_server.py
@@ -3,7 +3,16 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import contextlib
+# contextlib.nested is deprecated and removed in python3
+# Since the apis are quite different, keep track of whether to use nested or not
+# based on the availability of contextlib.nested and take different code paths.
+try:
+    from contextlib import nested
+    use_nested = True
+except ImportError:
+    import contextlib
+    use_nested = False
+
 import dbus
 import errno
 import functools
@@ -24,8 +33,8 @@
     line.  This should avoid including processes such as editors and 'tail' of
     logs, which might match a simple pkill.
 
-    exe=/usr/local/bin/python2.7
-    cmdline=['/usr/bin/python2', '-u', '/usr/local/autotest/.../rpc_server.py']
+    exe=/usr/local/bin/python3
+    cmdline=['/usr/bin/python3', '-u', '/usr/local/autotest/.../rpc_server.py']
 
     @param script_name: The filename of the main script, used to match processes
     @param sigterm_timeout: Wait N seconds after SIGTERM before trying SIGKILL.
@@ -73,13 +82,13 @@
         except psutil.NoSuchProcess as e:
             logging.debug('%s: %s', e, proc)
         except psutil.Error as e:
-            logging.warn('%s: %s', e, proc)
+            logging.warning('%s: %s', e, proc)
 
     (terminated, running) = psutil.wait_procs(running, sigterm_timeout)
     if not running:
         return
 
-    running.sort()
+    running.sort(key=lambda p: p.pid)
     logging.info('Trying SIGKILL: pids=%s', [p.pid for p in running])
     for proc in running:
         try:
@@ -87,12 +96,12 @@
         except psutil.NoSuchProcess as e:
             logging.debug('%s: %s', e, proc)
         except psutil.Error as e:
-            logging.warn('%s: %s', e, proc)
+            logging.warning('%s: %s', e, proc)
 
     (sigkilled, running) = psutil.wait_procs(running, sigkill_timeout)
     if running:
-        running.sort()
-        logging.warn('Found leftover processes %s; address may be in use!',
+        running.sort(key=lambda p: p.pid)
+        logging.warning('Found leftover processes %s; address may be in use!',
                      [p.pid for p in running])
     else:
         logging.debug('Leftover processes have exited.')
@@ -101,7 +110,7 @@
 class XmlRpcServer(threading.Thread):
     """Simple XMLRPC server implementation.
 
-    In theory, Python should provide a sane XMLRPC server implementation as
+    In theory, Python should provide a valid XMLRPC server implementation as
     part of its standard library.  In practice the provided implementation
     doesn't handle signals, not even EINTR.  As a result, we have this class.
 
@@ -154,13 +163,12 @@
         self._server.register_instance(delegate)
         self._delegates.append(delegate)
 
-
     def run(self):
         """Block and handle many XmlRpc requests."""
         logging.info('XmlRpcServer starting...')
-        # TODO(wiley) nested is deprecated, but we can't use the replacement
-        #       until we move to Python 3.0.
-        with contextlib.nested(*self._delegates):
+
+        def stack_inner():
+            """Handle requests to server until asked to stop running."""
             while self._keep_running:
                 try:
                     self._server.handle_request()
@@ -170,6 +178,14 @@
                     if v[0] != errno.EINTR:
                         raise
 
+        if use_nested:
+            with nested(*self._delegates):
+                stack_inner()
+        else:
+            with contextlib.ExitStack() as stack:
+                delegates = [stack.enter_context(d) for d in self._delegates]
+                stack_inner()
+
         for delegate in self._delegates:
             if hasattr(delegate, 'cleanup'):
                 delegate.cleanup()
diff --git a/client/deps/camera_hal3/camera_hal3.py b/client/deps/camera_hal3/camera_hal3.py
index f3b3137..f60be69 100755
--- a/client/deps/camera_hal3/camera_hal3.py
+++ b/client/deps/camera_hal3/camera_hal3.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # Copyright (c) 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/client/deps/dbus_protos/dbus_protos.py b/client/deps/dbus_protos/dbus_protos.py
index 276bd29..8f4f265 100644
--- a/client/deps/dbus_protos/dbus_protos.py
+++ b/client/deps/dbus_protos/dbus_protos.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # Copyright 2018 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/client/deps/fakegudev/fakegudev.py b/client/deps/fakegudev/fakegudev.py
index 234930d..ea45cd9 100755
--- a/client/deps/fakegudev/fakegudev.py
+++ b/client/deps/fakegudev/fakegudev.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/client/deps/fakegudev/src/Makefile b/client/deps/fakegudev/src/Makefile
index 6ea8442..82426fa 100644
--- a/client/deps/fakegudev/src/Makefile
+++ b/client/deps/fakegudev/src/Makefile
@@ -9,7 +9,7 @@
 PKGS_FAKESYSCALLS := glib-2.0
 PKG_CFLAGS_FAKESYSCALLS := $(shell ${PKG_CONFIG} --cflags $(PKGS_FAKESYSCALLS))
 PKG_LIBS_FAKESYSCALLS := $(shell ${PKG_CONFIG} --libs $(PKGS_FAKESYSCALLS))
-WARN := -Werror -Wall
+WARN := -Werror -Wall -Wno-deprecated-non-prototype
 DEBUG := -g -DFAKE_G_UDEV_DEBUG -DFAKE_SYSCALLS_DEBUG
 OPT := -O2
 
diff --git a/client/deps/fakemodem/fakemodem.py b/client/deps/fakemodem/fakemodem.py
index 234930d..ea45cd9 100755
--- a/client/deps/fakemodem/fakemodem.py
+++ b/client/deps/fakemodem/fakemodem.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/client/deps/fakemodem/src/fakemodem.c b/client/deps/fakemodem/src/fakemodem.c
index e0d1643..d921996 100644
--- a/client/deps/fakemodem/src/fakemodem.c
+++ b/client/deps/fakemodem/src/fakemodem.c
@@ -20,7 +20,7 @@
 #include <dbus/dbus-glib.h>
 
 GIOChannel* ioc;
-int masterfd;
+int primaryfd;
 
 typedef struct {
   GRegex *command;
@@ -61,7 +61,7 @@
 {
 }
 
-static gboolean master_read (GIOChannel *source, GIOCondition condition,
+static gboolean primary_read (GIOChannel *source, GIOCondition condition,
                              gpointer data);
 
 static const gchar *handle_cmd (FakeModem *fakemodem, const gchar *cmd);
@@ -216,7 +216,7 @@
   DBusGConnection *bus;
   DBusGProxy *proxy;
   GMainLoop* loop;
-  const char *slavedevice;
+  const char *helperdevice;
   struct termios t;
   FakeModem *fakemodem;
   GOptionContext *opt_ctx;
@@ -287,26 +287,26 @@
                                        "/",
                                        G_OBJECT (fakemodem));
 
-  masterfd = posix_openpt (O_RDWR | O_NOCTTY);
+  primaryfd = posix_openpt (O_RDWR | O_NOCTTY);
 
-  if (masterfd == -1
-      || grantpt (masterfd) == -1
-      || unlockpt (masterfd) == -1
-      || (slavedevice = ptsname (masterfd)) == NULL)
+  if (primaryfd == -1
+      || grantpt (primaryfd) == -1
+      || unlockpt (primaryfd) == -1
+      || (helperdevice = ptsname (primaryfd)) == NULL)
     exit (1);
 
-  printf ("%s\n", slavedevice);
+  printf ("%s\n", helperdevice);
   fflush (stdout);
 
   /* Echo is actively harmful here */
-  tcgetattr (masterfd, &t);
+  tcgetattr (primaryfd, &t);
   t.c_lflag &= ~ECHO;
-  tcsetattr (masterfd, TCSANOW, &t);
+  tcsetattr (primaryfd, TCSANOW, &t);
 
-  ioc = g_io_channel_unix_new (masterfd);
+  ioc = g_io_channel_unix_new (primaryfd);
   g_io_channel_set_encoding (ioc, NULL, NULL);
   g_io_channel_set_line_term (ioc, "\r", 1);
-  g_io_add_watch (ioc, G_IO_IN, master_read, fakemodem);
+  g_io_add_watch (ioc, G_IO_IN, primary_read, fakemodem);
 
   g_main_loop_run (loop);
 
@@ -346,7 +346,7 @@
 #undef VALUE
 #undef CVALUE
 
-static gboolean master_read (GIOChannel *source, GIOCondition condition,
+static gboolean primary_read (GIOChannel *source, GIOCondition condition,
                              gpointer data)
 {
   FakeModem *fakemodem = data;
@@ -388,9 +388,9 @@
   printf ("Line: '%s'\n", line);
 
   if (fakemodem->echo) {
-    rval = write (masterfd, line, term);
+    rval = write (primaryfd, line, term);
     assert(term == rval);
-    rval = write (masterfd, "\r\n", 2);
+    rval = write (primaryfd, "\r\n", 2);
     assert(2 == rval);
   }
 
@@ -433,13 +433,13 @@
     if (response == NULL)
       response = "OK";
     rstr = g_strdup_printf("\r\n%s\r\n", response);
-    rval = write (masterfd, rstr, strlen (rstr));
+    rval = write (primaryfd, rstr, strlen (rstr));
     assert(strlen(rstr) == rval);
     g_free (rstr);
   } else {
     gchar *rstr;
     rstr = g_strdup_printf("%s\n", response);
-    rval = write (masterfd, rstr, strlen (rstr));
+    rval = write (primaryfd, rstr, strlen (rstr));
     assert(strlen(rstr) == rval);
     g_free (rstr);
   }
@@ -495,9 +495,9 @@
   if (pat->reply && pat->reply[0]) {
     int rval;
     printf (" Reply: '%s'\n", pat->reply);
-    rval = write (masterfd, pat->reply, strlen (pat->reply));
+    rval = write (primaryfd, pat->reply, strlen (pat->reply));
     assert(strlen(pat->reply) == rval);
-    rval = write (masterfd, "\r\n", 2);
+    rval = write (primaryfd, "\r\n", 2);
     assert(2 == rval);
   }
 
@@ -510,10 +510,10 @@
 {
   int rval;
 
-  rval = write (masterfd, "\r\n", 2);
-  rval = write (masterfd, text, strlen (text));
+  rval = write (primaryfd, "\r\n", 2);
+  rval = write (primaryfd, text, strlen (text));
   assert(strlen(text) == rval);
-  rval = write (masterfd, "\r\n", 2);
+  rval = write (primaryfd, "\r\n", 2);
   assert(2 == rval);
 
   return TRUE;
diff --git a/client/deps/glmark2/glmark2.py b/client/deps/glmark2/glmark2.py
index f3b549a..443d5ab 100755
--- a/client/deps/glmark2/glmark2.py
+++ b/client/deps/glmark2/glmark2.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/client/deps/graphics/graphics.py b/client/deps/graphics/graphics.py
index 2c6e533..65f38f2 100755
--- a/client/deps/graphics/graphics.py
+++ b/client/deps/graphics/graphics.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/client/deps/gtest/gtest.py b/client/deps/gtest/gtest.py
index 455a1ec..26b7d11 100755
--- a/client/deps/gtest/gtest.py
+++ b/client/deps/gtest/gtest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/client/deps/iwcap/iwcap.py b/client/deps/iwcap/iwcap.py
index 3ab3ad6..ff0bee9 100755
--- a/client/deps/iwcap/iwcap.py
+++ b/client/deps/iwcap/iwcap.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 import os, common
 from autotest_lib.client.bin import utils
diff --git a/client/deps/lansim/lansim.py b/client/deps/lansim/lansim.py
index 65599de..030666f 100644
--- a/client/deps/lansim/lansim.py
+++ b/client/deps/lansim/lansim.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/deps/libnet/libnet.py b/client/deps/libnet/libnet.py
index 3322004..fa7c50a 100755
--- a/client/deps/libnet/libnet.py
+++ b/client/deps/libnet/libnet.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 import os
 from autotest_lib.client.bin import utils
diff --git a/client/deps/mysql/mysql.py b/client/deps/mysql/mysql.py
index e5f1d08..73b1377 100755
--- a/client/deps/mysql/mysql.py
+++ b/client/deps/mysql/mysql.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 import os
 from autotest_lib.client.bin import utils
diff --git a/client/deps/nvmap_compactor/README b/client/deps/nvmap_compactor/README
index dcbb23c..aad9e2d 100644
--- a/client/deps/nvmap_compactor/README
+++ b/client/deps/nvmap_compactor/README
@@ -3,5 +3,5 @@
 # found in the LICENSE file.
 
 This deps brings install ChromeOS nvmap_compactor into an image. Tests that depend
-on nvmap_compactor should add this as a dep in the setup. It leverages the Chrome OS
+on nvmap_compactor should add this as a dep in the setup. It leverages the ChromeOS
 build system and installs the package that gets created for nvmap_compactor.
diff --git a/client/deps/nvmap_compactor/nvmap_compactor.py b/client/deps/nvmap_compactor/nvmap_compactor.py
index 44c6fde..eeb7238 100755
--- a/client/deps/nvmap_compactor/nvmap_compactor.py
+++ b/client/deps/nvmap_compactor/nvmap_compactor.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/client/deps/pgpool/pgpool.py b/client/deps/pgpool/pgpool.py
index f1e2922..c6eb19f 100755
--- a/client/deps/pgpool/pgpool.py
+++ b/client/deps/pgpool/pgpool.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 import os
 from autotest_lib.client.bin import utils
diff --git a/client/deps/pgsql/pgsql.py b/client/deps/pgsql/pgsql.py
index c980701..83d4305 100755
--- a/client/deps/pgsql/pgsql.py
+++ b/client/deps/pgsql/pgsql.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 import os
 from autotest_lib.client.bin import utils
diff --git a/client/deps/policy_protos/policy_protos.py b/client/deps/policy_protos/policy_protos.py
index 74c860b..418c80f 100644
--- a/client/deps/policy_protos/policy_protos.py
+++ b/client/deps/policy_protos/policy_protos.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # Copyright 2018 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -25,6 +25,9 @@
         'usr/share/protofiles/device_management_backend.proto',
         'usr/share/protofiles/cloud_policy.proto',
         'usr/share/protofiles/chrome_extension_policy.proto',
+        'usr/share/protofiles/private_membership_rlwe.proto',
+        'usr/share/protofiles/private_membership.proto',
+        'usr/share/protofiles/serialization.proto',
         'usr/include/chromeos/dbus/login_manager/policy_descriptor.proto',
 ]
 
diff --git a/client/deps/webgl_mpd/webgl_mpd.py b/client/deps/webgl_mpd/webgl_mpd.py
index c3092a9..7ac0e28 100755
--- a/client/deps/webgl_mpd/webgl_mpd.py
+++ b/client/deps/webgl_mpd/webgl_mpd.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/client/profilers/cmdprofile/__init__.py b/client/profilers/cmdprofile/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/client/profilers/cmdprofile/__init__.py
+++ /dev/null
diff --git a/client/profilers/cmdprofile/cmdprofile.py b/client/profilers/cmdprofile/cmdprofile.py
deleted file mode 100644
index e119c3c..0000000
--- a/client/profilers/cmdprofile/cmdprofile.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# Lint as: python2, python3
-"""
-Sets up a subprocess to run any generic command in the background every
-few seconds (by default the interval is 60 secs)
-"""
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-import time, os, subprocess
-from six.moves import zip
-
-from autotest_lib.client.bin import profiler
-from autotest_lib.client.common_lib import utils, error
-
-class cmdprofile(profiler.profiler):
-    version = 2
-    supports_reboot = True
-
-
-    def initialize(self, cmds=['ps'], interval=60, outputfile='cmdprofile',
-                   outputfiles=None):
-
-        # do some basic sanity checking on the parameters
-        if not outputfiles and not outputfile:
-            raise error.TestError(
-                'cmdprofile cannot run if neither outputfile nor outputfile '
-                'is specified')
-        elif outputfiles and len(outputfiles) != len(cmds):
-            raise error.TestError(
-                'cmdprofile paramter outputfiles has length %d and cmds has '
-                'length %d, but both lists must have the same length' %
-                (len(outputfiles), len(cmds)))
-
-        self.interval = interval
-        self.cmds = cmds
-        if outputfiles:
-            # outputfiles overrides outputfile
-            self.outputfiles = outputfiles
-        else:
-            self.outputfiles = [outputfile] * len(cmds)
-
-
-    def start(self, test):
-        self.pid = os.fork()
-        if self.pid:  # parent
-            return
-        else:  # child
-            while True:
-                for cmd, outputfile in zip(self.cmds, self.outputfiles):
-                    logfile = open(os.path.join(test.profdir, outputfile), 'a')
-                    utils.run(cmd, stdout_tee=logfile, stderr_tee=logfile)
-                    logfile.write('\n')
-                    logfile.close()
-                time.sleep(self.interval)
-
-
-    def stop(self, test):
-        utils.nuke_pid(self.pid)
diff --git a/client/profilers/cmdprofile/control b/client/profilers/cmdprofile/control
deleted file mode 100644
index 557f6b1..0000000
--- a/client/profilers/cmdprofile/control
+++ /dev/null
@@ -1,3 +0,0 @@
-job.profilers.add('cmdprofile')
-job.run_test('sleeptest', seconds=5)
-job.profilers.delete('cmdprofile')
diff --git a/client/profilers/cpistat/__init__.py b/client/profilers/cpistat/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/client/profilers/cpistat/__init__.py
+++ /dev/null
diff --git a/client/profilers/cpistat/control b/client/profilers/cpistat/control
deleted file mode 100644
index 7eac61e..0000000
--- a/client/profilers/cpistat/control
+++ /dev/null
@@ -1,3 +0,0 @@
-job.profilers.add('cpistat')
-job.run_test('sleeptest', seconds=10)
-job.profilers.delete('cpistat')
diff --git a/client/profilers/cpistat/cpistat b/client/profilers/cpistat/cpistat
deleted file mode 100755
index 1ba23ff..0000000
--- a/client/profilers/cpistat/cpistat
+++ /dev/null
@@ -1,89 +0,0 @@
-#!/usr/bin/python2
-
-"""
-python-libpfm4 provides python bindings to the libpfm4
-library and the perf_event kernel subsystem. This
-script builds on them to provide a *stat like interface
-to CPU performance counters.
-
-Run as: ./cpistat -c cpulist -e eventlist
-
-Depends on libpfm4: http://perfmon2.sf.net/
-
-git://perfmon2.git.sourceforge.net/gitroot/perfmon2/libpfm4
-"""
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-import sys, os, optparse, time, struct, perfmon
-from six.moves import range
-
-if __name__ == '__main__':
-    parser = optparse.OptionParser()
-    parser.add_option('-e', '--events', help='Events to use',
-                       action='store', dest='events')
-    parser.add_option('-c', '--cpulist', help='CPUs to monitor',
-                       action='store', dest='cpulist')
-    parser.set_defaults(events='PERF_COUNT_HW_CPU_CYCLES,' +
-                               'PERF_COUNT_HW_INSTRUCTIONS')
-    (options, args) = parser.parse_args()
-
-    show_per_cpu = False
-    if not options.cpulist:
-        ncpus = os.sysconf('SC_NPROCESSORS_ONLN')
-        cpus = list(range(0, ncpus))
-    else:
-        cpus = options.cpulist.split(',')
-        cpus = [ int(c) for c in cpus ]
-        show_per_cpu = True
-
-    if options.events:
-        events = options.events.split(',')
-    else:
-        raise ValueError('You need to specify events to monitor')
-
-    s = perfmon.SystemWideSession(cpus, events)
-
-    s.start()
-    # Measuring loop
-    interval = 1
-    iters = -1
-    infinite = True
-    if len(args) == 2:
-        interval = int(args[0])
-        iters = int(args[1])
-        infinite = False
-
-    delta = {}
-    last = {}
-    sum = {}
-    for e in events:
-        delta[e] = {}
-        last[e] = {}
-        sum[e] = {}
-        for c in cpus:
-            delta[e][c] = 0
-            last[e][c] = 0
-
-    while infinite or iters:
-        for i in range(0, len(events)):
-          e = events[i]
-          sum[e] = 0
-          for c in cpus:
-              count = struct.unpack('L', s.read(c, i))[0]
-              delta[e][c] = count - last[e][c]
-              last[e][c] = count
-              if show_per_cpu:
-                  print('''CPU%d: %s\t%lu''' % (c, e, delta[e][c]))
-              sum[e] += delta[e][c]
-
-        cycles = sum['PERF_COUNT_HW_CPU_CYCLES']
-        instructions = sum['PERF_COUNT_HW_INSTRUCTIONS']
-        CPI = cycles * 1.0/instructions
-        print('cycles: %12lu, instructions: %12lu, CPI: %2.4f'
-               % (cycles, instructions, CPI))
-        sys.stdout.flush()
-        time.sleep(interval)
-        iters = iters - 1
diff --git a/client/profilers/cpistat/cpistat.py b/client/profilers/cpistat/cpistat.py
deleted file mode 100644
index 1a4328d..0000000
--- a/client/profilers/cpistat/cpistat.py
+++ /dev/null
@@ -1,28 +0,0 @@
-"""
-Uses perf_events to count cycles and instructions
-
-Defaults options:
-job.profilers.add('cpistat', interval=1)
-"""
-import time, os, subprocess
-from autotest_lib.client.bin import profiler
-
-class cpistat(profiler.profiler):
-    version = 1
-
-    def initialize(self, interval = 1):
-        self.interval = interval
-
-
-    def start(self, test):
-        cmd = os.path.join(self.bindir, 'site_cpistat')
-        if not os.path.exists(cmd):
-            cmd = os.path.join(self.bindir, 'cpistat')
-        logfile = open(os.path.join(test.profdir, "cpistat"), 'w')
-        p = subprocess.Popen(cmd, stdout=logfile,
-                             stderr=subprocess.STDOUT)
-        self.pid = p.pid
-
-
-    def stop(self, test):
-        os.kill(self.pid, 15)
diff --git a/client/profilers/lttng/__init__.py b/client/profilers/lttng/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/client/profilers/lttng/__init__.py
+++ /dev/null
diff --git a/client/profilers/lttng/control b/client/profilers/lttng/control
deleted file mode 100644
index a760649..0000000
--- a/client/profilers/lttng/control
+++ /dev/null
@@ -1,5 +0,0 @@
-# trace syscall entry/exit, irq entry/exit, trap entry/exit,
-# and context switches
-job.profilers.add('lttng')
-job.run_test('sleeptest', seconds=1)
-job.profilers.delete('lttng')
diff --git a/client/profilers/lttng/ltt-control-0.51-12082008.tar.gz b/client/profilers/lttng/ltt-control-0.51-12082008.tar.gz
deleted file mode 100644
index 428c287..0000000
--- a/client/profilers/lttng/ltt-control-0.51-12082008.tar.gz
+++ /dev/null
Binary files differ
diff --git a/client/profilers/lttng/lttng.py b/client/profilers/lttng/lttng.py
deleted file mode 100644
index c1eaead..0000000
--- a/client/profilers/lttng/lttng.py
+++ /dev/null
@@ -1,121 +0,0 @@
-# Lint as: python2, python3
-"""
-Trace kernel events with Linux Tracing Toolkit (lttng).
-You need to install the lttng patched kernel in order to use the profiler.
-
-Examples:
-    job.profilers.add('lttng', tracepoints = None): enable all trace points.
-    job.profilers.add('lttng', tracepoints = []): disable all trace points.
-    job.profilers.add('lttng', tracepoints = ['kernel_arch_syscall_entry',
-                                              'kernel_arch_syscall_exit'])
-                               will only trace syscall events.
-Take a look at /proc/ltt for the list of the tracing events currently
-supported by lttng and their output formats.
-
-To view the collected traces, copy results/your-test/profiler/lttng
-to a machine that has Linux Tracing Toolkit Viewer (lttv) installed:
-    test$ scp -r results/your-test/profiler/lttng user@localmachine:/home/tmp/
-Then you can examine the traces either in text mode or in GUI:
-    localmachine$ lttv -m textDump -t /home/tmp/lttng
-or
-    localmachine$ lttv-gui -t /home/tmp/lttng &
-"""
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-import os, shutil, time
-
-from autotest_lib.client.bin import utils, profiler
-from autotest_lib.client.common_lib import error
-
-class lttng(profiler.profiler):
-    version = 1
-
-    # http://ltt.polymtl.ca/lttng/ltt-control-0.51-12082008.tar.gz
-    def setup(self, tarball='ltt-control-0.51-12082008.tar.gz', **dargs):
-        self.tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(self.tarball, self.srcdir)
-        os.chdir(self.srcdir)
-
-        utils.configure()
-        utils.make()
-
-
-    # tracepoints: list of trace points to enable
-    # outputsize: size limit for lttng output file. -1: no limit.
-    def initialize(self, outputsize=1048576, tracepoints=None, **dargs):
-        self.job.require_gcc()
-
-        self.tracepoints = tracepoints
-        self.ltt_bindir = os.path.join(self.srcdir, 'lttctl')
-        self.lttctl = os.path.join(self.ltt_bindir, 'lttctl')
-        self.lttd = os.path.join(self.srcdir, 'lttd', 'lttd')
-        self.armall = os.path.join(self.ltt_bindir, 'ltt-armall')
-        self.disarmall = os.path.join(self.ltt_bindir, 'ltt-disarmall')
-        self.mountpoint = '/mnt/debugfs'
-        self.outputsize = outputsize
-
-        os.putenv('LTT_DAEMON', self.lttd)
-
-        if not os.path.exists(self.mountpoint):
-            os.mkdir(self.mountpoint)
-
-        utils.system('mount -t debugfs debugfs ' + self.mountpoint,
-                                                            ignore_status=True)
-        utils.system('modprobe ltt-control')
-        utils.system('modprobe ltt-statedump')
-        # clean up from any tracing we left running
-        utils.system(self.lttctl + ' -n test -R', ignore_status=True)
-        utils.system(self.disarmall, ignore_status=True)
-
-        if tracepoints is None:
-            utils.system(self.armall, ignore_status=True)
-        else:
-            for tracepoint in self.tracepoints:
-                if tracepoint in ('list_process_state',
-                                  'user_generic_thread_brand', 'fs_exec',
-                                  'kernel_process_fork', 'kernel_process_free',
-                                  'kernel_process_exit',
-                                  'kernel_arch_kthread_create',
-                                  'list_statedump_end', 'list_vm_map'):
-                    channel = 'processes'
-                elif tracepoint in ('list_interrupt',
-                                    'statedump_idt_table',
-                                    'statedump_sys_call_table'):
-                    channel = 'interrupts'
-                elif tracepoint in ('list_network_ipv4_interface',
-                                    'list_network_ip_interface'):
-                    channel = 'network'
-                elif tracepoint in ('kernel_module_load', 'kernel_module_free'):
-                    channel = 'modules'
-                else:
-                    channel = ''
-                print('Connecting ' + tracepoint)
-                utils.write_one_line('/proc/ltt', 'connect ' + tracepoint
-                                     + ' default dynamic ' + channel)
-
-    def start(self, test):
-        self.output = os.path.join(test.profdir, 'lttng')
-        utils.system('%s -n test -d -l %s/ltt -t %s' %
-                                  (self.lttctl, self.mountpoint, self.output))
-
-
-    def stop(self, test):
-        utils.system(self.lttctl + ' -n test -R')
-        time.sleep(10)
-        if self.outputsize != -1:
-            # truncate lttng output file to the specified limit
-            for filename in os.listdir(self.output):
-                file_path = os.path.join(self.output, filename)
-                if os.path.isdir(file_path):
-                    continue
-                size = os.stat(file_path)[6] # grab file size
-                if size > self.outputsize:
-                    f = open(file_path, 'r+')
-                    f.truncate(self.outputsize)
-                    f.close()
-        tarball = os.path.join(test.profdir, 'lttng.tar.bz2')
-        utils.system("tar -cvjf %s -C %s %s" % (tarball, test.profdir, 'lttng'))
-        utils.system('rm -rf ' + self.output)
diff --git a/client/profilers/mpstat/__init__.py b/client/profilers/mpstat/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/client/profilers/mpstat/__init__.py
+++ /dev/null
diff --git a/client/profilers/mpstat/control b/client/profilers/mpstat/control
deleted file mode 100644
index d922eff..0000000
--- a/client/profilers/mpstat/control
+++ /dev/null
@@ -1,3 +0,0 @@
-job.profilers.add('mpstat')
-job.run_test('sleeptest', seconds=5)
-job.profilers.delete('mpstat')
diff --git a/client/profilers/mpstat/mpstat.py b/client/profilers/mpstat/mpstat.py
deleted file mode 100644
index 74fc43f..0000000
--- a/client/profilers/mpstat/mpstat.py
+++ /dev/null
@@ -1,29 +0,0 @@
-"""
-Sets up a subprocess to run mpstat on a specified interval, default 1 second
-"""
-import time, os, subprocess
-from autotest_lib.client.bin import profiler
-
-
-class mpstat(profiler.profiler):
-    version = 1
-
-
-    def initialize(self, interval = 1):
-        self.interval = interval
-
-
-    def start(self, test):
-        cmd = "mpstat -P ALL %d" % self.interval
-        logfile = open(os.path.join(test.profdir, "mpstat"), 'w')
-        p = subprocess.Popen(cmd, shell=True, stdout=logfile,
-                                        stderr=subprocess.STDOUT)
-        self.pid = p.pid
-
-
-    def stop(self, test):
-        os.kill(self.pid, 15)
-
-
-    def report(self, test):
-        return None
diff --git a/client/profilers/powertop/src/Changelog b/client/profilers/powertop/src/Changelog
index 04a659a..4f8b013 100644
--- a/client/profilers/powertop/src/Changelog
+++ b/client/profilers/powertop/src/Changelog
@@ -17,7 +17,7 @@
 	devices
 	* Add support for using the sysfs power supply class
 	  (Donnie Berkholz)
-	* Fix USB suspend suggestion to take semi blacklisted stuff into
+	* Fix USB suspend suggestion to take semi denylisted stuff into
 	  account
 	* Add support for Intel's 4965 wireless power saving mode
 	* On new enough cpus (and kernel 2.6.25), report which C-states the
diff --git a/client/profilers/sar/__init__.py b/client/profilers/sar/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/client/profilers/sar/__init__.py
+++ /dev/null
diff --git a/client/profilers/sar/control b/client/profilers/sar/control
deleted file mode 100644
index ffda506..0000000
--- a/client/profilers/sar/control
+++ /dev/null
@@ -1,3 +0,0 @@
-job.profilers.add('sar', 2)
-job.run_test('sleeptest', seconds=5)
-job.profilers.delete('sar')
diff --git a/client/profilers/sar/sar.py b/client/profilers/sar/sar.py
deleted file mode 100644
index 7ee28b3..0000000
--- a/client/profilers/sar/sar.py
+++ /dev/null
@@ -1,81 +0,0 @@
-"""
-Sets up a subprocess to run sar from the sysstat suite
-
-Default options:
-sar -A -f
-"""
-import os, shutil, subprocess, time
-from autotest_lib.client.bin import utils, profiler, os_dep
-
-
-class sar(profiler.profiler):
-    """
-    The sar command writes to standard output the contents of selected
-    cumulative activity counters in the operating system. This profiler
-    executes sar and redirects its output in a file located in the profiler
-    results dir.
-    """
-    version = 1
-
-    def initialize(self, interval=1):
-        """
-        Set sar interval and verify what flags the installed sar supports.
-
-        @param interval: Interval used by sar to produce system data.
-        """
-        self.interval = interval
-        self.sar_path = os_dep.command('sar')
-        # If using older versions of sar, command below means: Measure default
-        # params using interval of 1 second continuously. For newer versions,
-        # behavior has changed - to generate reports continuously just omit the
-        # count parameter.
-        t_cmd = self.sar_path + " 1 0"
-        t_process = subprocess.Popen(t_cmd, shell=True,
-                                     stdout=subprocess.PIPE,
-                                     stderr=subprocess.PIPE)
-        # Wait a little to see if process is going to fail or work
-        time.sleep(3)
-        if t_process.poll():
-            # Sar process returned, so 0 doesn't mean generate continuously
-            self.cmd = self.sar_path + " -o %s %d"
-        else:
-            # Sar process didn't return, so 0 means generate continuously
-            # Just terminate the process
-            self.cmd = self.sar_path + " -o %s %d 0"
-            os.kill(t_process.pid, 15)
-
-
-    def start(self, test):
-        """
-        Starts sar subprocess.
-
-        @param test: Autotest test on which this profiler will operate on.
-        """
-        logfile = open(os.path.join(test.profdir, "sar"), 'w')
-        # Save the sar data as binary, convert to text after the test.
-        raw = os.path.join(test.profdir, "sar.raw")
-        cmd = self.cmd % (raw, self.interval)
-        self.sar_process = subprocess.Popen(cmd, shell=True, stdout=logfile,
-                                            stderr=subprocess.STDOUT)
-
-
-    def stop(self, test):
-        """
-        Stops profiler execution by sending a SIGTERM to sar process.
-
-        @param test: Autotest test on which this profiler will operate on.
-        """
-        try:
-            os.kill(self.sar_process.pid, 15)
-        except OSError:
-            pass
-
-    def report(self, test):
-        """
-        Report function. Convert the binary sar data to text.
-
-        @param test: Autotest test on which this profiler will operate on.
-        """
-        raw = os.path.join(test.profdir, "sar.raw")
-        output = os.path.join(test.profdir, "sar")
-        utils.system('/usr/bin/sar -A -f %s > %s' % (raw, output))
diff --git a/client/samples/control.fs b/client/samples/control.fs
deleted file mode 100644
index a0a65c7..0000000
--- a/client/samples/control.fs
+++ /dev/null
@@ -1,32 +0,0 @@
-AUTHOR = "Autotest Team <autotest@test.kernel.org>"
-TIME = "MEDIUM"
-NAME = "Sample - Filesystem tests with different filesystems"
-TEST_TYPE = "client"
-TEST_CLASS = "Kernel"
-TEST_CATEGORY = "Functional"
-
-DOC = """
-Runs a series of filesystem tests on a loopback partition using different
-filesystem types. his shows some features of the job.partition method, such as
-creating loopback partitions instead of using real disk partitions, looping.
-"""
-
-partition = job.partition('/tmp/looped', 1024, job.tmpdir)
-# You can use also 'real' partitions, just comment the above and uncomment
-# the below
-#partition = job.partition('/dev/sdb1', job.tmpdir)
-
-def test_fs():
-    partition.mkfs(fstype)
-    partition.mount()
-    try:
-        job.run_test('fsx', dir=partition.mountpoint, tag=fstype)
-        job.run_test('iozone', dir=partition.mountpoint, tag=fstype)
-        job.run_test('dbench', dir=partition.mountpoint, tag=fstype)
-    finally:
-        partition.unmount()
-        partition.fsck()
-
-
-for fstype in ('ext2', 'ext3', 'jfs', 'xfs', 'reiserfs'):
-    job.run_group(test_fs)
diff --git a/client/samples/control.fs_options b/client/samples/control.fs_options
deleted file mode 100644
index e126c3c..0000000
--- a/client/samples/control.fs_options
+++ /dev/null
@@ -1,36 +0,0 @@
-AUTHOR = "Autotest Team <autotest@test.kernel.org>"
-TIME = "MEDIUM"
-NAME = "Sample - Filesystem tests with different fs options"
-TEST_TYPE = "client"
-TEST_CLASS = "Kernel"
-TEST_CATEGORY = "Functional"
-
-DOC = """
-Runs a series of filesystem tests on a loopback partition. This shows some
-features of the job.partition method, such as creating loopback partitions
-instead of using real disk partitions, looping and tags.
-"""
-
-partition = job.partition(device='/tmp/looped', loop_size=1024,
-                          mountpoint=job.tmpdir)
-# You can use also 'real' partitions, just comment the above and uncomment
-# the below
-#partition = job.partition('/dev/sdb1', job.tmpdir)
-
-iters = 10
-
-for fstype, mountopts, tag in (('ext2', '', 'ext2'),
-                               ('ext3', '-o data=writeback', 'ext3writeback'),
-                               ('ext3', '-o data=ordered', 'ext3ordered'),
-                               ('ext3', '-o data=journal', 'ext3journal'),
-                               ('ext4', '-o data=ordered', 'ext4ordered'),
-                               ('ext4', '-o data=journal', 'ext4journal'),):
-    partition.mkfs(fstype)
-    partition.mount(args=mountopts)
-    try:
-        job.run_test('fsx', dir=job.tmpdir, tag=tag)
-        job.run_test('iozone', dir=job.tmpdir, iterations=iters, tag=tag)
-        job.run_test('dbench', iterations=iters, dir=job.tmpdir, tag=tag)
-        job.run_test('tiobench', dir=job.tmpdir, tag=tag)
-    finally:
-        partition.unmount()
diff --git a/client/samples/control.interactive_console b/client/samples/control.interactive_console
deleted file mode 100644
index 75b09ab..0000000
--- a/client/samples/control.interactive_console
+++ /dev/null
@@ -1,25 +0,0 @@
-AUTHOR = "Steve Howard <showard@google.com>"
-TIME = "SHORT"
-NAME = "Sample - Autotest console"
-TEST_TYPE = "client"
-TEST_CLASS = "Kernel"
-TEST_CATEGORY = "Functional"
-
-DOC = """
-Gives you an interactive interpreter within an autotest control file.
-
-If you install IPython (http://ipython.scipy.org/, Ubuntu and Fedora's package
-"ipython"), you'll get a snazzy IPython console with readline and completion
-and all that. Otherwise you'll get a simple python console.
-
-The point of this control file is to give you an interactive interpreter with
-all autotest 'magic' loaded in, so you can inspect objects and have fun.
-"""
-
-try:
-    import IPython
-    ipshell = IPython.Shell.IPShellEmbed(argv=[], banner='autotest console')
-    ipshell()
-except ImportError:
-    import code
-    code.interact('autotest console', raw_input)
diff --git a/client/samples/control.oprofile b/client/samples/control.oprofile
deleted file mode 100644
index db5730a..0000000
--- a/client/samples/control.oprofile
+++ /dev/null
@@ -1,24 +0,0 @@
-AUTHOR = "Autotest Team <autotest@test.kernel.org>"
-TIME = "MEDIUM"
-NAME = "Sample - Using profilers"
-TEST_TYPE = "client"
-TEST_CLASS = "Kernel"
-TEST_CATEGORY = "Functional"
-
-DOC = """
-Runs our sleeptest (bogus test that only sleeps for a given amount of time),
-while running the oprofile profilers, with and without special parameters
-passed to the profiler.
-"""
-
-import logging
-
-logging.info("Testing default event")
-job.profilers.add('oprofile')
-job.run_test('sleeptest', seconds=5, tag='default')
-job.profilers.delete('oprofile')
-
-logging.info("Testing ICACHE_MISSES")
-job.profilers.add('oprofile', 'ICACHE_MISSES:100000')
-job.run_test('sleeptest', seconds=5, tag='icache_misses')
-job.profilers.delete('oprofile')
diff --git a/client/samples/control.oprofile_power5 b/client/samples/control.oprofile_power5
deleted file mode 100644
index 18eecc9..0000000
--- a/client/samples/control.oprofile_power5
+++ /dev/null
@@ -1,43 +0,0 @@
-AUTHOR = "Autotest Team <autotest@test.kernel.org>"
-TIME = "MEDIUM"
-NAME = "Sample - Using oprofile - specific power5 options"
-TEST_TYPE = "client"
-TEST_CLASS = "Kernel"
-TEST_CATEGORY = "Functional"
-
-DOC = """
-Runs our sleeptest (bogus test that only sleeps for a given amount of time),
-while running the oprofile profilers, with and without special parameters
-passed to the profiler. This particular control shows special features of
-oprofile under power 5 cpu architecture.
-"""
-
-import logging
-
-logging.info("Testing default event")
-job.profilers.add('oprofile')
-job.run_test('sleeptest', seconds=5, tag='default')
-job.profilers.delete('oprofile')
-
-logging.info("Testing specified vmlinux")
-job.profilers.add('oprofile', '/boot/vmlinux-autotest')
-job.run_test('sleeptest', seconds=5, tag='vmlinux')
-job.profilers.delete('oprofile')
-
-logging.info("Testing one event")
-job.profilers.add('oprofile', None, ['PM_RUN_CYC_GRP153:100000'])
-job.run_test('sleeptest', seconds=5, tag='one')
-job.profilers.delete('oprofile')
-
-logging.info("Testing multiple events")
-job.profilers.add('oprofile', None,
-                  ['PM_RUN_CYC_GRP153:100000', 'PM_INST_CMPL_GRP153:10000'])
-job.run_test('sleeptest', seconds=5, tag='multi')
-job.profilers.delete('oprofile')
-
-logging.info("Testing other args")
-job.profilers.add('oprofile', None,
-                  ['PM_RUN_CYC_GRP153:150000', 'PM_INST_CMPL_GRP153:150000'],
-                  '--callgraph=3')
-job.run_test('sleeptest', seconds=5, tag='other')
-job.profilers.delete('oprofile')
diff --git a/client/samples/control.parallel b/client/samples/control.parallel
deleted file mode 100644
index 47a5ac9..0000000
--- a/client/samples/control.parallel
+++ /dev/null
@@ -1,20 +0,0 @@
-AUTHOR = "Autotest Team <autotest@test.kernel.org>"
-TIME = "SHORT"
-NAME = "Sample - Parallel test execution"
-TEST_TYPE = "client"
-TEST_CLASS = "Kernel"
-TEST_CATEGORY = "Functional"
-
-DOC = """
-Runs 2 client tests in parallel, with different options.
-"""
-
-def kernbench():
-    job.run_test('kernbench', iterations=2, threads=5)
-
-
-def dbench():
-    job.run_test('dbench')
-
-
-job.parallel([kernbench], [dbench])
diff --git a/client/samples/control.profilers b/client/samples/control.profilers
deleted file mode 100644
index 5ed545f..0000000
--- a/client/samples/control.profilers
+++ /dev/null
@@ -1,24 +0,0 @@
-AUTHOR = "Autotest Team <autotest@test.kernel.org>"
-TIME = "SHORT"
-NAME = "Sample - More profilers"
-TEST_TYPE = "client"
-TEST_CLASS = "Kernel"
-TEST_CATEGORY = "Functional"
-
-DOC = """
-Runs sleeptest with differnt profilers present in the autotest tree. Also,
-it shows the convenience logging methods, such as logging.info and
-logging.error.
-"""
-
-import logging
-
-for profiler in ('readprofile', 'oprofile', 'catprofile', 'lockmeter'):
-    try:
-        logging.info("Testing profiler %s", profiler)
-        job.profilers.add(profiler)
-        job.run_test('sleeptest', seconds=5, tag=profiler)
-        job.profilers.delete(profiler)
-    except:
-        logging.error("Test of profiler %s failed", profiler)
-        raise
diff --git a/client/samples/control.soft_reboot b/client/samples/control.soft_reboot
deleted file mode 100644
index 30f3403..0000000
--- a/client/samples/control.soft_reboot
+++ /dev/null
@@ -1,29 +0,0 @@
-AUTHOR = "Autotest Team <autotest@test.kernel.org>"
-TIME = 'MEDIUM'
-NAME = "Sample - Machine reboot"
-TEST_TYPE = "client"
-TEST_CLASS = "Kernel"
-
-DOC = """
-This test will reboot the machine ITERATIONS number of times.  Note that if you
-use this, you'll need to use the step engine for any tests that occur after
-this one.  This means that this may not play well when run from the autotest
-front end.
-"""
-
-ITERATIONS = 5
-
-tries = job.get_state('soft_reboot_tries', 0) + 1
-job.set_state("soft_reboot_tries", tries)
-
-if tries < ITERATIONS:
-    import sys
-    this_functions_name = sys._getframe().f_code.co_name
-    if this_functions_name != "?":
-        # If we're not in a function (i.e. we get "?") then we're
-        # not using the step engine and thus no next step is
-        # necessary
-        job.next_step_prepend(this_functions_name)
-
-if tries <= ITERATIONS:
-    job.reboot()
diff --git a/client/samples/filesystem b/client/samples/filesystem
deleted file mode 100644
index 2fd1b9d..0000000
--- a/client/samples/filesystem
+++ /dev/null
@@ -1,25 +0,0 @@
-# Uncomment this line, and replace the device with something sensible
-# for you ...
-# fs = job.partition('/dev/hda2', job.tmpdir)
-# or ...
-
-part = job.partition('/tmp/looped', 1024, job.tmpdir)
-
-# dbench 1024, ltp, 1024-byte blocksize, a few other things.  Lots of fscking.
-# I haven't tested nobh mode yet, 
-# and I have yet to point run-bash-shared-mapping at it.
-# (different mount options for ext3)
-
-def test_fs():
-	part.mkfs(fstype)
-	part.mount()
-	try:
-		job.run_test('fsx', dir=part.mountpoint, tag=fstype)
-		job.run_test('iozone', dir=part.mountpoint, tag=fstype)
-		job.run_test('dbench', dir=part.mountpoint, tag=fstype)
-	finally:
-		part.unmount()
-		part.fsck()
-
-for fstype in ('ext2', 'ext3', 'jfs', 'xfs', 'reiserfs'):
-	job.run_group(test_fs)
diff --git a/client/setup_modules.py b/client/setup_modules.py
index 9cb59eb..ef5c51e 100644
--- a/client/setup_modules.py
+++ b/client/setup_modules.py
@@ -1,67 +1,74 @@
-__author__ = "jadmanski@google.com (John Admanski)"
-
-import os, sys
+import os
+import re
+import six
+import sys
 
 # This must run on Python versions less than 2.4.
 dirname = os.path.dirname(sys.modules[__name__].__file__)
-common_dir = os.path.abspath(os.path.join(dirname, "common_lib"))
+common_dir = os.path.abspath(os.path.join(dirname, 'common_lib'))
 sys.path.insert(0, common_dir)
 import check_version
 sys.path.pop(0)
-check_version.check_python_version()
-
-import glob, traceback, types
 
 
-def _create_module(name):
-    """Create a single top-level module"""
-    module = types.ModuleType(name)
-    sys.modules[name] = module
-    return module
+def _get_pyversion_from_args():
+    """Extract, format, & pop the current py_version from args, if provided."""
+    py_version = 3
+    py_version_re = re.compile(r'--py_version=(\w+)\b')
+
+    version_found = False
+    for i, arg in enumerate(sys.argv):
+        if not arg.startswith('--py_version'):
+            continue
+        result = py_version_re.search(arg)
+        if result:
+            if version_found:
+                raise ValueError('--py_version may only be specified once.')
+            py_version = result.group(1)
+            version_found = True
+            if py_version not in ('2', '3'):
+                raise ValueError('Python version must be "2" or "3".')
+
+            # Remove the arg so other argparsers don't get grumpy.
+            sys.argv.pop(i)
+
+    return py_version
 
 
-def _create_module_and_parents(name):
-    """Create a module, and all the necessary parents"""
-    parts = name.split(".")
-    # first create the top-level module
-    parent = _create_module(parts[0])
-    created_parts = [parts[0]]
-    parts.pop(0)
-    # now, create any remaining child modules
-    while parts:
-        child_name = parts.pop(0)
-        module = types.ModuleType(child_name)
-        setattr(parent, child_name, module)
-        created_parts.append(child_name)
-        sys.modules[".".join(created_parts)] = module
-        parent = module
+def _desired_version():
+    """
+    Returns desired python version.
+
+    If the PY_VERSION env var is set, just return that. This is the case
+    when autoserv kicks of autotest on the server side via a job.run(), or
+    a process created a subprocess.
+
+    Otherwise, parse & pop the sys.argv for the '--py_version' flag. If no
+    flag is set, default to python 3.
+
+    """
+    # Even if the arg is in the env vars, we will attempt to get it from the
+    # args, so that it can be popped prior to other argparsers hitting.
+    py_version = _get_pyversion_from_args()
+
+    if os.getenv('PY_VERSION'):
+        return int(os.getenv('PY_VERSION'))
+
+    os.environ['PY_VERSION'] = str(py_version)
+    return int(py_version)
 
 
-def _import_children_into_module(parent_module_name, path):
-    """Import all the packages on a path into a parent module"""
-    # find all the packages at 'path'
-    names = []
-    for filename in os.listdir(path):
-        full_name = os.path.join(path, filename)
-        if not os.path.isdir(full_name):
-            continue   # skip files
-        if "." in filename:
-            continue   # if "." is in the name it's not a valid package name
-        if not os.access(full_name, os.R_OK | os.X_OK):
-            continue   # need read + exec access to make a dir importable
-        if "__init__.py" in os.listdir(full_name):
-            names.append(filename)
-    # import all the packages and insert them into 'parent_module'
-    sys.path.insert(0, path)
-    for name in names:
-        module = __import__(name)
-        # add the package to the parent
-        parent_module = sys.modules[parent_module_name]
-        setattr(parent_module, name, module)
-        full_name = parent_module_name + "." + name
-        sys.modules[full_name] = module
-    # restore the system path
-    sys.path.pop(0)
+desired_version = _desired_version()
+if desired_version == sys.version_info.major:
+    os.environ['AUTOTEST_NO_RESTART'] = 'True'
+else:
+    # There are cases were this can be set (ie by test_that), but a subprocess
+    # is launched in the incorrect version.
+    if os.getenv('AUTOTEST_NO_RESTART'):
+        del os.environ['AUTOTEST_NO_RESTART']
+    check_version.check_python_version(desired_version)
+
+import glob, traceback
 
 
 def import_module(module, from_where):
@@ -91,10 +98,10 @@
 
 def _monkeypatch_logging_handle_error():
     # Hack out logging.py*
-    logging_py = os.path.join(os.path.dirname(__file__), "common_lib",
-                              "logging.py*")
+    logging_py = os.path.join(os.path.dirname(__file__), 'common_lib',
+                              'logging.py*')
     if glob.glob(logging_py):
-        os.system("rm -f %s" % logging_py)
+        os.system('rm -f %s' % logging_py)
 
     # Monkey patch our own handleError into the logging module's StreamHandler.
     # A nicer way of doing this -might- be to have our own logging module define
@@ -105,35 +112,153 @@
     logging.Handler.handleError = _autotest_logging_handle_error
 
 
-def setup(base_path, root_module_name=""):
+def _insert_site_packages(root):
+    # Allow locally installed third party packages to be found
+    # before any that are installed on the system itself when not.
+    # running as a client.
+    # This is primarily for the benefit of frontend and tko so that they
+    # may use libraries other than those available as system packages.
+    if six.PY2:
+        sys.path.insert(0, os.path.join(root, 'site-packages'))
+
+
+import importlib
+
+ROOT_MODULE_NAME_ALLOW_LIST = (
+        'autotest_lib',
+        'autotest_lib.client',
+)
+
+
+def _setup_top_level_symlink(base_path):
+    """Create a self pointing symlink in the base_path)."""
+    if os.path.islink(os.path.join(base_path, 'autotest_lib')):
+        return
+    os.chdir(base_path)
+    os.symlink('.', 'autotest_lib')
+
+
+def _setup_client_symlink(base_path):
+    """Setup the client symlink for the DUT.
+
+    Creates a "autotest_lib" folder in client, then creates a symlink called
+    "client" pointing back to ../, as well as an __init__ for the folder.
     """
-    Perform all the necessary setup so that all the packages at
-    'base_path' can be imported via "import root_module_name.package".
-    If root_module_name is empty, then all the packages at base_path
-    are inserted as top-level packages.
 
-    Also, setup all the common.* aliases for modules in the common
-    library.
+    def _create_client_symlink():
+        os.chdir(autotest_lib_dir)
+        with open('__init__.py', 'w'):
+            pass
+        os.symlink('../', 'client')
 
-    The setup must be different if you are running on an Autotest server
-    or on a test machine that just has the client directories installed.
-    """
-    # Hack... Any better ideas?
-    if (root_module_name == 'autotest_lib.client' and
-        os.path.exists(os.path.join(os.path.dirname(__file__),
-                                    '..', 'server'))):
-        root_module_name = 'autotest_lib'
-        base_path = os.path.abspath(os.path.join(base_path, '..'))
+    autotest_lib_dir = os.path.join(base_path, 'autotest_lib')
+    link_path = os.path.join(autotest_lib_dir, 'client')
 
-    _create_module_and_parents(root_module_name)
-    _import_children_into_module(root_module_name, base_path)
+    # TODO: Use os.makedirs(..., exist_ok=True) after switching to Python 3
+    if not os.path.isdir(autotest_lib_dir):
+        try:
+            os.mkdir(autotest_lib_dir)
+        except FileExistsError as e:
+            if not os.path.isdir(autotest_lib_dir):
+                raise e
 
+    if os.path.islink(link_path):
+        return
+
+    try:
+        _create_client_symlink()
+    # It's possible 2 autotest processes are running at once, and one
+    # creates the symlink in the time between checking and creating.
+    # Thus if the symlink DNE, and we cannot create it, check for its
+    # existence and exit if it exists.
+    except FileExistsError as e:
+        if os.path.islink(link_path):
+            return
+        raise e
+
+
+def _symlink_check(base_path, root_dir):
+    """Verify the required symlinks are present, and add them if not."""
+    # Note the starting cwd to later change back to it.
+    starting_dir = os.getcwd()
+    if root_dir == 'autotest_lib':
+        _setup_top_level_symlink(base_path)
+    elif root_dir == 'autotest_lib.client':
+        _setup_client_symlink(base_path)
+
+    os.chdir(starting_dir)
+
+
+def setup(base_path, root_module_name):
+    _symlink_check(base_path, root_module_name)
+    if root_module_name not in ROOT_MODULE_NAME_ALLOW_LIST:
+        raise Exception('Unexpected root module: ' + root_module_name)
+
+    _insert_site_packages(base_path)
+
+    # Ie, server (or just not /client)
     if root_module_name == 'autotest_lib':
-        # Allow locally installed third party packages to be found
-        # before any that are installed on the system itself when not.
-        # running as a client.
-        # This is primarily for the benefit of frontend and tko so that they
-        # may use libraries other than those available as system packages.
-        sys.path.insert(0, os.path.join(base_path, "site-packages"))
+        # Base path is just x/x/x/x/autotest/files
+        _setup_autotest_lib(base_path)
+        _preimport_top_level_packages(os.path.join(base_path, 'autotest_lib'),
+                                      parent='autotest_lib')
+    else:  # aka, in /client/
+        if os.path.exists(os.path.join(os.path.dirname(base_path), 'server')):
+
+            # Takes you from /client/ to /files
+            # this is because on DUT there is no files/client
+            autotest_base_path = os.path.dirname(base_path)
+
+        else:
+            autotest_base_path = base_path
+
+        _setup_autotest_lib(autotest_base_path)
+        _preimport_top_level_packages(os.path.join(autotest_base_path,
+                                                   'autotest_lib'),
+                                      parent='autotest_lib')
+        _preimport_top_level_packages(
+                os.path.join(autotest_base_path, 'autotest_lib', 'client'),
+                parent='autotest_lib.client',
+        )
 
     _monkeypatch_logging_handle_error()
+
+
+def _setup_autotest_lib(path):
+    sys.path.insert(0, path)
+    # This is a symlink back to the root directory, that does all the magic.
+    importlib.import_module('autotest_lib')
+    sys.path.pop(0)
+
+
+def _preimport_top_level_packages(root, parent):
+    # The old code to setup the packages used to fetch the top-level packages
+    # inside autotest_lib. We keep that behaviour in order to avoid having to
+    # add import statements for the top-level packages all over the codebase.
+    #
+    # e.g.,
+    #  import common
+    #  from autotest_lib.server import utils
+    #
+    # must continue to work. The _right_ way to do that import would be.
+    #
+    #  import common
+    #  import autotest_lib.server
+    #  from autotest_lib.server import utils
+    names = []
+    for filename in os.listdir(root):
+        path = os.path.join(root, filename)
+        if not os.path.isdir(path):
+            continue  # skip files
+        if '.' in filename:
+            continue  # if "." is in the name it's not a valid package name
+        if not os.access(path, os.R_OK | os.X_OK):
+            continue  # need read + exec access to make a dir importable
+        if '__init__.py' in os.listdir(path):
+            names.append(filename)
+
+    for name in names:
+        pname = parent + '.' + name
+        importlib.import_module(pname)
+        if name != 'autotest_lib':
+            sys.modules[name] = sys.modules[pname]
diff --git a/client/setup_modules_unittest.py b/client/setup_modules_unittest.py
index e3ce795..89f63fe 100755
--- a/client/setup_modules_unittest.py
+++ b/client/setup_modules_unittest.py
@@ -1,38 +1,38 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
-import cStringIO, logging, os, sys, unittest
+import logging, os, sys, unittest
+from six.moves import StringIO
 
 # direct imports; autotest_lib has not been setup while testing this.
-from common_lib.test_utils import mock
-import setup_modules
+from client.common_lib.test_utils import mock
+from client import setup_modules
 
 
 class LoggingErrorStderrTests(unittest.TestCase):
+    """Test the setup_modules."""
+
     def setUp(self):
         autotest_dir = os.path.abspath(os.path.join(setup_modules.dirname,
                                                     '..'))
         setup_modules.setup(autotest_dir, root_module_name='autotest_lib')
         self.god = mock.mock_god()
-        self.test_stderr = cStringIO.StringIO()
+        self.test_stderr = StringIO()
         self.god.stub_with(sys, 'stderr', self.test_stderr)
         self.old_root_logging_level = logging.root.level
         logging.basicConfig(level=logging.ERROR)
         # _autotest_logging_handle_error unsets this after being called once.
         logging.raiseExceptions = 1
 
-
     def tearDown(self):
         self.god.unstub_all()
         # Undo the setUp logging.basicConfig call.
         logging.basicConfig(level=self.old_root_logging_level)
 
-
     def assert_autotest_logging_handle_error_called(self):
         self.stderr_str = self.test_stderr.getvalue()
         self.assertTrue('Exception occurred formatting' in self.stderr_str,
                         repr(self.stderr_str))
 
-
     def test_autotest_logging_handle_error(self):
         record = logging.LogRecord(
                 'test', logging.DEBUG, __file__, 0, 'MESSAGE', 'ARGS', None)
@@ -54,21 +54,18 @@
         # Make sure this was turned off by our handle_error.
         self.assertFalse(logging.raiseExceptions)
 
-
     def test_logging_monkey_patch_wrong_number_of_args(self):
         logging.error('logging unittest %d %s', 32)
         self.assert_autotest_logging_handle_error_called()
         self.assertTrue('logging unittest' in self.stderr_str,
                         repr(self.stderr_str))
 
-
     def test_logging_monkey_patch_wrong_type_of_arg(self):
         logging.error('logging unittest %d', 'eighteen')
         self.assert_autotest_logging_handle_error_called()
         self.assertTrue('logging unittest' in self.stderr_str,
                         repr(self.stderr_str))
 
-
     def test_logging_no_error(self):
         logging.error('logging unittest.  %s %s', 'meep', 'meep!')
         self.assertEqual('', self.test_stderr.getvalue())
diff --git a/client/site_tests/OWNERS b/client/site_tests/OWNERS
new file mode 100644
index 0000000..f9bd0f4
--- /dev/null
+++ b/client/site_tests/OWNERS
@@ -0,0 +1,3 @@
+include /INFRA_OWNERS
+include /ENGPROD_OWNERS
+*
diff --git a/client/site_tests/accessibility_ChromeVoxSound/accessibility_ChromeVoxSound.py b/client/site_tests/accessibility_ChromeVoxSound/accessibility_ChromeVoxSound.py
deleted file mode 100644
index 58486fa..0000000
--- a/client/site_tests/accessibility_ChromeVoxSound/accessibility_ChromeVoxSound.py
+++ /dev/null
@@ -1,125 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import logging
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.cros.a11y import a11y_test_base
-from autotest_lib.client.cros.audio import cras_utils
-from autotest_lib.client.cros.audio import sox_utils
-
-
-class accessibility_ChromeVoxSound(a11y_test_base.a11y_test_base):
-    """Check whether ChromeVox makes noise on real hardware."""
-    version = 1
-
-    _audio_chunk_size = 1 # Length of chunk size in seconds.
-    _detect_time = 40 # Max length of time to spend detecting audio in seconds.
-
-
-    def _detect_audio(self, name, min_time):
-        """Detects whether audio was heard and returns the approximate time.
-
-        Runs for at most self._detect_time, checking each chunk for sound.
-        After first detecting a chunk that has audio, counts the subsequent
-        chunks that also do.
-
-        Finally, check whether the found audio matches the expected length.
-
-        @param name: a string representing which sound is expected.
-        @param min_time: the minimum allowed sound length in seconds.
-
-        @raises: error.TestFail if the observed behavior doesn't match
-                 expected: either no sound or sound of bad length.
-
-        """
-        count = 0
-        counting = False
-        saw_sound_end = False
-
-        for i in xrange(self._detect_time / self._audio_chunk_size):
-            rms = self._rms_of_next_audio_chunk()
-            if rms > 0:
-                logging.info('Found passing chunk: %d.', i)
-                if not counting:
-                    start_time = time.time()
-                    counting = True
-                count += 1
-            elif counting:
-                audio_length = time.time() - start_time
-                saw_sound_end = True
-                break
-        if not counting:
-            raise error.TestFail('No audio for %s was found!' % name)
-        if not saw_sound_end:
-            raise error.TestFail('Audio for %s was more than % seconds!' % (
-                    name, self._detect_time))
-
-        logging.info('Time taken - %s: %f', name, audio_length)
-        if audio_length < min_time:
-            raise error.TestFail(
-                    '%s audio was only %f seconds long!' % (name, audio_length))
-        return
-
-
-    def _rms_of_next_audio_chunk(self):
-        """Finds the sox_stats values of the next chunk of audio."""
-        cras_utils.loopback(self._loopback_file, channels=1,
-                            duration=self._audio_chunk_size)
-        stat_output = sox_utils.get_stat(self._loopback_file)
-        logging.info(stat_output)
-        return vars(stat_output)['rms']
-
-
-    def _check_chromevox_sound(self, cr):
-        """Test contents.
-
-        Enable ChromeVox, navigate to a new page, and open a new tab.  Check
-        the audio output at each point.
-
-        @param cr: the chrome.Chrome() object
-
-        """
-        chromevox_start_time = time.time()
-        self._toggle_chromevox()
-        self._confirm_chromevox_state(True)
-
-        # TODO: this sound doesn't play for Telemetry user.  crbug.com/590403
-        # Welcome ding
-        # self._detect_audio('enable ChromeVox ding', 1)
-
-        # "ChromeVox Spoken Feedback is ready!"
-        self._detect_audio('welcome message', 2)
-        chromevox_open_time = time.time() - chromevox_start_time
-        logging.info('ChromeVox took %f seconds to start.')
-
-        # New tab sound.
-        tab = cr.browser.tabs.New()
-        self._detect_audio('new tab ding', 2)
-
-        # Page navigation sound.
-        tab.Navigate('chrome://version')
-        self._detect_audio('page navigation sound', 2)
-
-
-    def run_once(self):
-        """Entry point of this test."""
-        self._loopback_file = os.path.join(self.bindir, 'cras_loopback.wav')
-        extension_path = self._get_extension_path()
-
-        with chrome.Chrome(extension_paths=[extension_path]) as cr:
-            self._extension = cr.get_extension(extension_path)
-            cr.browser.tabs[0].WaitForDocumentReadyStateToBeComplete()
-            self._confirm_chromevox_state(False)
-            self._check_chromevox_sound(cr)
-
-
-    def _child_test_cleanup(self):
-        try:
-            os.remove(self._loopback_file)
-        except OSError:
-            pass
diff --git a/client/site_tests/accessibility_ChromeVoxSound/control b/client/site_tests/accessibility_ChromeVoxSound/control
deleted file mode 100644
index f84b7d7..0000000
--- a/client/site_tests/accessibility_ChromeVoxSound/control
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "kathrelkeld"
-NAME = "accessibility_ChromeVoxSound"
-PURPOSE = "Enable ChromeVox and check for sound."
-CRITERIA = """
-This test will fail if ChromeVox is not running or produces insufficient sound.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "accessibility"
-TEST_TYPE = "client"
-DEPENDENCIES = 'audio_loopback_dongle'
-
-DOC = """
-Uses audio loopback to record snippets of audio, checking whether there was any
-actual sound.  Test performs actions such as enabling Chromevox, navigating to
-a page, and opening a new tab - checking for audio after each.
-"""
-
-job.run_test('accessibility_ChromeVoxSound')
diff --git a/client/site_tests/accessibility_Sanity/accessibility_Sanity.py b/client/site_tests/accessibility_Sanity/accessibility_Sanity.py
deleted file mode 100644
index 217c4d0..0000000
--- a/client/site_tests/accessibility_Sanity/accessibility_Sanity.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import time
-
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.cros.a11y import a11y_test_base
-
-
-class accessibility_Sanity(a11y_test_base.a11y_test_base):
-    """Enables then disables all a11y features via accessibilityFeatures API."""
-    version = 1
-
-    # Features that do not have their own separate tests
-    _FEATURE_LIST = [
-        'largeCursor',
-        'stickyKeys',
-        'highContrast',
-        'screenMagnifier',
-        'autoclick',
-        'virtualKeyboard'
-    ]
-
-
-    def _check_chromevox(self):
-        """Run ChromeVox specific checks.
-
-        Check the reported state of ChromeVox before/after enable and disable.
-
-        """
-        # ChromeVox is initially off.
-        self._confirm_chromevox_state(False)
-
-        # Turn ChromeVox on and check that all the pieces work.
-        self._toggle_chromevox()
-        self._confirm_chromevox_state(True)
-
-        # Turn ChromeVox off.
-        self._toggle_chromevox()
-        self._confirm_chromevox_state(False)
-
-
-    def run_once(self):
-        """Entry point of this test."""
-        extension_path = self._get_extension_path()
-
-        with chrome.Chrome(extension_paths=[extension_path]) as cr:
-            self._extension = cr.get_extension(extension_path)
-
-            # Check specific features.
-            self._check_chromevox()
-
-            # Enable then disable all other accessibility features.
-            for value in [True, False]:
-                for feature in self._FEATURE_LIST:
-                    logging.info('Setting %s to %s.', feature, value)
-                    self._set_feature(feature, value)
-                    time.sleep(1)
diff --git a/client/site_tests/accessibility_Sanity/control b/client/site_tests/accessibility_Sanity/control
deleted file mode 100644
index 6f7480a..0000000
--- a/client/site_tests/accessibility_Sanity/control
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "kathrelkeld"
-NAME = "accessibility_Sanity"
-PURPOSE = "Sanity test for accessibility features."
-CRITERIA = """
-This test will fail if features are not set or if Chrome crashes.  It will
-also fail if ChromeVox extension is not enabled/disabled or if ChromeVox
-indicators are not present.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "accessibility"
-TEST_TYPE = "client"
-
-DOC = """
-This is a test which enables then disables accessibility features via the
-accessibilityFeatures API.  It also performs specific tests for ChromeVox by
-looking at the value of management.get.enabled and checking for the presence
-of cvox_indicator_container elements.
-"""
-
-job.run_test('accessibility_Sanity')
diff --git a/client/site_tests/audio_Aconnect/audio_Aconnect.py b/client/site_tests/audio_Aconnect/audio_Aconnect.py
index 3054eee..d2788c2 100644
--- a/client/site_tests/audio_Aconnect/audio_Aconnect.py
+++ b/client/site_tests/audio_Aconnect/audio_Aconnect.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/audio_Aconnect/control b/client/site_tests/audio_Aconnect/control
index aa915ce..3eac15b 100644
--- a/client/site_tests/audio_Aconnect/control
+++ b/client/site_tests/audio_Aconnect/control
@@ -9,10 +9,12 @@
 CRITERIA = """
 Fails if /dev/snd/seq is inaccessible.
 """
+ATTRIBUTES = 'suite:infra_qual'
 TIME='SHORT'
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = "audio"
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 Check that aconnect can open and read information from /dev/snd/seq.
diff --git a/client/site_tests/audio_ActiveStreamStress/audio.m4a b/client/site_tests/audio_ActiveStreamStress/audio.m4a
deleted file mode 100644
index 943fccd..0000000
--- a/client/site_tests/audio_ActiveStreamStress/audio.m4a
+++ /dev/null
Binary files differ
diff --git a/client/site_tests/audio_ActiveStreamStress/audio.mp3 b/client/site_tests/audio_ActiveStreamStress/audio.mp3
deleted file mode 100644
index 5f80a68..0000000
--- a/client/site_tests/audio_ActiveStreamStress/audio.mp3
+++ /dev/null
Binary files differ
diff --git a/client/site_tests/audio_ActiveStreamStress/audio.wav b/client/site_tests/audio_ActiveStreamStress/audio.wav
deleted file mode 100644
index dbf7683..0000000
--- a/client/site_tests/audio_ActiveStreamStress/audio.wav
+++ /dev/null
Binary files differ
diff --git a/client/site_tests/audio_ActiveStreamStress/audio_ActiveStreamStress.py b/client/site_tests/audio_ActiveStreamStress/audio_ActiveStreamStress.py
deleted file mode 100644
index b80738d..0000000
--- a/client/site_tests/audio_ActiveStreamStress/audio_ActiveStreamStress.py
+++ /dev/null
@@ -1,140 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import random
-import time
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.audio import cras_utils
-
-_STRESS_ITERATIONS = 100 # Total number of iterations.
-_MAX_OPENED_TAB = 20     # Max number of tabs open and playing audio.
-_RETAIN_TAB = 5          # In case we hit the _MAX_OPENED_TAB limit,
-                         # close all except last 5 tabs.
-_MAX_TABS_TO_OPEN = 10   # Max number of tabs can be opened in one iteration.
-_CRASH_PATH = '/var/spool/crash'
-
-
-class audio_ActiveStreamStress(test.test):
-    """Verifies the active audio streams."""
-
-    version = 1
-
-    _active_stream_count = 0
-    _existing_cras_reports = []
-    _cr = None
-    # TODO(rohitbm): add more(including video) file types and download them from gs://.
-    _streams = ('audio.mp3', 'audio.wav', 'audio.m4a')
-    _stream_index = 0
-    _tab_count = 0
-
-    def run_once(self):
-
-        # Collect existing cras crash reports.
-        self._existing_cras_reports = self.collect_cras_crash()
-
-        with chrome.Chrome(init_network_controller=True) as self._cr:
-            self._cr.browser.platform.SetHTTPServerDirectories(self.bindir)
-            self.push_new_stream(self._cr.browser.tabs[0])
-            # TODO(rohitbm): decide whether to perform verification on each
-            # open/close or at end of the iteration.
-            self.verify_active_streams()
-            push_count = 0
-            pop_count = 0
-
-            # Stress test logic:
-            # Test runs for n number of iterations. For one iteration,
-            # a = random(10) tabs(streams) are created and
-            # b = random(a) tabs are closed. If the next iteration finds that,
-            # total number of opened tabs are more than _MAX_OPENED_TAB,
-            # test will close (total opened tabs - 5) tabs.
-            # This will balance number of opened tabs and will allow to close
-            # tabs in a control manner.
-
-            for count in xrange(1, _STRESS_ITERATIONS):
-                if self._tab_count > _MAX_OPENED_TAB:
-                     for i in xrange(1, (self._tab_count - _RETAIN_TAB)):
-                         pop_count += 1
-                         self.pop_stream()
-                         logging.info('Total streams closed: %d', pop_count)
-                random_tab = random.randint(1, 10)
-                for i in xrange(1, random_tab):
-                    push_count += 1
-                    self.push_new_stream(self._cr.browser.tabs.New())
-                    logging.info('Total new streams created: %d', push_count)
-                time.sleep(5) # Delay for active streams to play.
-                for i in xrange(1, random.randint(1, random_tab)):
-                    pop_count += 1
-                    self.pop_stream()
-                    logging.info('Total streams closed: %d', pop_count)
-
-
-    def get_stream_index(self):
-        if self._stream_index == len(self._streams):
-            # Reset the stream index if the index reached to the end.
-            self._stream_index = 0
-        return self._stream_index
-
-
-    def push_new_stream(self, tab):
-        """Starts next audio stream from self._streams list.
-
-        @param tab: tab to open an audio stream.
-        """
-        self._tab_count += 1
-        tab.Navigate(self._cr.browser.platform.http_server.UrlOf(
-                    os.path.join(self.bindir,
-                                 self._streams[self.get_stream_index()])))
-        tab.ExecuteJavaScript(
-                "document.getElementsByTagName('video')[0].loop=true")
-        # TODO(rohitbm): add playback verification.
-        self._stream_index += 1
-        self._active_stream_count += 1
-        time.sleep(1) # Adding a delay so cras can update the active count.
-        self.verify_active_streams()
-
-
-    def pop_stream(self):
-        """Turns off the first available stream by closing the first tab."""
-        if len(self._cr.browser.tabs) > 0:
-            self._cr.browser.tabs[0].Close()
-            self._tab_count -= 1
-            self._active_stream_count -= 1
-        time.sleep(1) # Adding delay so cras can update the active count.
-        self.verify_active_streams()
-
-
-    def verify_active_streams(self):
-        """Verifies test active audio streams with cras active streams."""
-        cras_stream_count = cras_utils.get_active_stream_count()
-        if self._active_stream_count != cras_stream_count:
-            cras_crash_reports = self.collect_cras_crash()
-            new_reports = list(set(cras_crash_reports) -
-                               set(self._existing_cras_reports))
-            error_msg = ('Active stream count: %d is not matching with '
-                         'cras active stream count: %d. '
-                         'Number of cras crashes %d : %s' %
-                         (self._active_stream_count, cras_stream_count,
-                         len(new_reports), new_reports))
-            raise error.TestError(error_msg)
-
-
-    def collect_cras_crash(self):
-        """Check for cras crashes.
-
-        @return a list of cras crash reports found.
-        """
-
-        crash_reports = []
-        if not os.path.isdir(_CRASH_PATH):
-            logging.debug('No cras crash detected!')
-        else:
-            cras_reports = os.listdir(_CRASH_PATH)
-            crash_reports = [report for report in cras_reports
-                             if report.startswith('cras')]
-        return crash_reports
diff --git a/client/site_tests/audio_ActiveStreamStress/control b/client/site_tests/audio_ActiveStreamStress/control
deleted file mode 100644
index 395d323..0000000
--- a/client/site_tests/audio_ActiveStreamStress/control
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "rohitbm@chromium.org"
-NAME = "audio_ActiveStreamStress"
-PURPOSE = "Verifies active audio streams"
-CRITERIA = """
-Test will fail if cras active stream count doesn't match to open active streams.
-"""
-# TODO(paulhsia): Add this test to an appropriate suite.
-TIME = "MEDIUM"
-TEST_CATEGORY = "General"
-TEST_CLASS = "audio"
-TEST_TYPE = "client"
-
-BUG_TEMPLATE = {
-    'owner': 'rohitbm@chromium.org',
-    'labels': ['OS-Chrome'],
-    'components': ['OS>Kernel>Video'],
-}
-
-DOC = """
-Test opens/closes audio streams and verifies active stream.
-
-This is a stress test to ensure that cras works flawlessly while adding/removing
-streams for a long time.
-"""
-
-job.run_test('audio_ActiveStreamStress')
diff --git a/client/site_tests/audio_AlsaAPI/audio_AlsaAPI.py b/client/site_tests/audio_AlsaAPI/audio_AlsaAPI.py
deleted file mode 100644
index 610e30b..0000000
--- a/client/site_tests/audio_AlsaAPI/audio_AlsaAPI.py
+++ /dev/null
@@ -1,167 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import re
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.audio import alsa_utils
-
-class audio_AlsaAPI(test.test):
-    """Checks that simple ALSA API functions correctly."""
-    version = 2
-    _SND_DEV_DIR = '/dev/snd/'
-    _PLAYBACK_DEVICE_NAME = '^pcmC(\d+)D(\d+)p$'
-    # A list of boards that do not correctly implement snd_pcm_drop, see
-    # crosbug.com/p/51882
-    _BOARDS_WITHOUT_DROP_SUPPORT = ['banon', 'elm', 'samus', 'squawks']
-    # A dict of list of (card name, device) to be skipped on some boards.
-    _DEVICES_TO_BE_SKIPPED = {
-        # On the following boards, devices 4,5,6 are HDMI devices.
-        'asuka': {'sklnau8825max': [4, 5, 6]},
-        'cave': {'sklnau8825max': [4, 5, 6]},
-        'snappy': {'bxtda7219max': [4, 5, 6]},
-        # Chell's HDMI device 4 can not be used without being plugged.
-        # Also, its HDMI devices 5 and 6 are dummy devices.
-        'chell': {'sklnau8825adi': [4, 5, 6]},
-        # Kevin's device 3 is a DisplayPort device.
-        'kevin': {'rk3399-gru-sound': [3]},
-    }
-
-    def run_once(self, to_test):
-        """Run alsa_api_test binary and verify its result.
-
-        Checks the source code of alsa_api_test in audiotest repo for detail.
-
-        @param to_test: support these test items:
-                        move: Checks snd_pcm_forward API.
-                        fill: Checks snd_pcm_mmap_begin API.
-                        drop: Checks snd_pcm_drop API.
-
-        """
-        # Skip test_drop on boards that do not implement snd_pcm_drop
-        # correctly, as it cannot pass.
-        board = utils.get_board().lower()
-        if to_test == 'drop' and \
-            board.replace('-kernelnext', '') in \
-            self._BOARDS_WITHOUT_DROP_SUPPORT:
-            logging.info('Skipping test_drop for unsupported board: %s', board)
-            return
-
-        self._cardnames = alsa_utils.get_soundcard_names()
-        self._devices = []
-        self._find_sound_devices()
-        method_name = '_test_' + to_test
-        method = getattr(self, method_name)
-
-        # Stop CRAS to make sure the audio device won't be occupied.
-        utils.stop_service('cras', ignore_status=True)
-
-        try:
-            for card_index, device_index in self._devices:
-                device = 'hw:%s,%s' % (card_index, device_index)
-                method(device)
-        finally:
-            # Restart CRAS.
-            utils.start_service('cras', ignore_status=True)
-
-
-    def _skip_device(self, card_device):
-        """Skips devices on some boards.
-
-        @param card_device: A tuple of (card index, device index).
-
-        @returns: True if the device should be skipped. False otherwise.
-
-        """
-        card_name = self._cardnames[card_device[0]]
-
-        return card_device[1] in self._DEVICES_TO_BE_SKIPPED.get(
-                utils.get_board().lower(), {}).get(card_name, [])
-
-
-    def _find_sound_devices(self):
-        """Finds playback devices in sound device directory.
-
-        @raises: error.TestError if there is no playback device.
-        """
-        filenames = os.listdir(self._SND_DEV_DIR)
-        for filename in filenames:
-            search = re.match(self._PLAYBACK_DEVICE_NAME, filename)
-            if search:
-                card_device = (search.group(1), int(search.group(2)))
-                if not self._skip_device(card_device):
-                    self._devices.append(card_device)
-        if not self._devices:
-            raise error.TestError('There is no playback device')
-
-
-    def _make_alsa_api_test_command(self, option, device):
-        """Makes command for alsa_api_test.
-
-        @param option: same as to_test in run_once.
-        @param device: device in hw:<card index>:<device index> format.
-
-        @returns: The command in a list of args.
-
-        """
-        return ['alsa_api_test', '--device', device, '--%s' % option]
-
-
-    def _test_move(self, device):
-        """Runs alsa_api_test command and checks the return code.
-
-        Test snd_pcm_forward can move appl_ptr to hw_ptr.
-
-        @param device: device in hw:<card index>:<device index> format.
-
-        @raises error.TestError if command fails.
-
-        """
-        ret = utils.system(
-                command=self._make_alsa_api_test_command('move', device),
-                ignore_status=True)
-        if ret:
-            raise error.TestError(
-                    'ALSA API failed to move appl_ptr on device %s' % device)
-
-
-    def _test_fill(self, device):
-        """Runs alsa_api_test command and checks the return code.
-
-        Test snd_pcm_mmap_begin can provide the access to the buffer, and memset
-        can fill it with zeros without using snd_pcm_mmap_commit.
-
-        @param device: device in hw:<card index>:<device index> format.
-
-        @raises error.TestError if command fails.
-
-        """
-        ret = utils.system(
-                command=self._make_alsa_api_test_command('fill', device),
-                ignore_status=True)
-        if ret:
-            raise error.TestError(
-                    'ALSA API failed to fill buffer on device %s' % device)
-
-
-    def _test_drop(self, device):
-        """Runs alsa_api_test command and checks the return code.
-
-        Test snd_pcm_drop can stop playback and reset hw_ptr to 0 in hardware.
-
-        @param device: device in hw:<card index>:<device index> format.
-
-        @raises error.TestError if command fails.
-
-        """
-        ret = utils.system(
-                command=self._make_alsa_api_test_command('drop', device),
-                ignore_status=True)
-        if ret:
-            raise error.TestError(
-                    'ALSA API failed to drop playback and reset hw_ptr'
-                    'on device %s' % device)
diff --git a/client/site_tests/audio_AlsaAPI/control.drop b/client/site_tests/audio_AlsaAPI/control.drop
deleted file mode 100644
index a552719..0000000
--- a/client/site_tests/audio_AlsaAPI/control.drop
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'The Chromium OS Authors,chromeos-audio@google.com'
-NAME = 'audio_AlsaAPI.drop'
-ATTRIBUTES = ""
-PURPOSE = 'Test that simple ALSA API succeeds to drop playback and reset hw_ptr.'
-CRITERIA = """
-Check that the ALSA API succeeds.
-"""
-TIME='SHORT'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = "audio"
-TEST_TYPE = 'client'
-
-DOC = """
-Check ALSA API succeeds to drop playback and reset hw_ptr.
-"""
-
-job.run_test('audio_AlsaAPI', to_test='drop', tag='drop')
diff --git a/client/site_tests/audio_AlsaAPI/control.fill b/client/site_tests/audio_AlsaAPI/control.fill
deleted file mode 100644
index f14a9ba..0000000
--- a/client/site_tests/audio_AlsaAPI/control.fill
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'The Chromium OS Authors,chromeos-audio@google.com'
-NAME = 'audio_AlsaAPI.fill'
-ATTRIBUTES = ""
-PURPOSE = 'Test that simple ALSA API succeeds to fill audio buffer with zeros.'
-CRITERIA = """
-Check that the ALSA API succeeds.
-"""
-TIME='SHORT'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = "audio"
-TEST_TYPE = 'client'
-
-DOC = """
-Check ALSA API succeeds to fill buffer with zeros.
-"""
-
-job.run_test('audio_AlsaAPI', to_test='fill', tag='fill')
diff --git a/client/site_tests/audio_AlsaAPI/control.move b/client/site_tests/audio_AlsaAPI/control.move
deleted file mode 100644
index e6a7681..0000000
--- a/client/site_tests/audio_AlsaAPI/control.move
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'The Chromium OS Authors,chromeos-audio@google.com'
-NAME = 'audio_AlsaAPI.move'
-ATTRIBUTES = ""
-PURPOSE = 'Test that simple ALSA API succeeds to move appl_ptr.'
-CRITERIA = """
-Check that the ALSA API succeeds.
-"""
-TIME='SHORT'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = "audio"
-TEST_TYPE = 'client'
-
-DOC = """
-Check ALSA API succeeds to move appl_ptr.
-"""
-
-job.run_test('audio_AlsaAPI', to_test='move', tag='move')
diff --git a/client/site_tests/audio_Aplay/audio_Aplay.py b/client/site_tests/audio_Aplay/audio_Aplay.py
index 4470a3b..4dfa3c4 100644
--- a/client/site_tests/audio_Aplay/audio_Aplay.py
+++ b/client/site_tests/audio_Aplay/audio_Aplay.py
@@ -1,24 +1,27 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+import os
 import logging
 import time
 
 from autotest_lib.client.bin import test, utils
 from autotest_lib.client.common_lib import error
+from autotest_lib.client.cros import upstart
 from autotest_lib.client.cros.audio import alsa_utils
+from autotest_lib.client.cros.audio import audio_helper
 from autotest_lib.client.cros.audio import audio_spec
 from autotest_lib.client.cros.audio import cras_utils
 
-APLAY_FILE = '/dev/zero' # raw data
+APLAY_FILE = '/dev/zero'  # raw data
 
 # Expected results of 'aplay -v' commands.
-APLAY_EXPECTED = set([
-      ('stream', 'PLAYBACK')])
+APLAY_EXPECTED = set([('stream', 'PLAYBACK')])
 
 
-def _play_audio(device_name, duration=1):
+def _play_audio(device_name, duration=1, channel_count=2):
     """Play a tone and try to ensure it played properly.
 
     Sample output from aplay -v:
@@ -54,28 +57,33 @@
     @return String output from the command (may be empty).
     @raises CmdError when cmd returns <> 0.
     """
-    cmd = ['aplay',
-           '-v', # show verbose details
-           '-D %s' % device_name,
-           '-d %d' % duration,
-           '-f cd', # format
-           APLAY_FILE,
-           '2>&1'] # verbose details
+    cmd = [
+            'aplay',
+            '-v',  # show verbose details
+            '-D %s' % device_name,
+            '-d %d' % duration,
+            '-c %d' % channel_count,
+            '-r 44100',
+            '-f S16_LE',
+            APLAY_FILE,
+            '2>&1'
+    ]  # verbose details
     return utils.system_output(' '.join(cmd)).strip()
 
 
-def _check_play(device_name, duration, expected):
+def _check_play(device_name, duration, channel_count, expected):
     """Runs aplay command and checks the output against an expected result.
 
     The expected results are compared as sets of tuples.
 
     @param device_name: The output device for aplay.
     @param duration: Duration supplied to aplay.
+    @param channel_count: Channel count supplied to aplay
     @param expected: The set of expected tuples.
     @raises error.TestError for invalid output or invalidly matching expected.
     """
     error_msg = 'invalid response from aplay'
-    results = _play_audio(device_name, duration)
+    results = _play_audio(device_name, duration, channel_count)
     if not results.startswith("Playing raw data '%s' :" % APLAY_FILE):
         raise error.TestError('%s: %s' % (error_msg, results))
     result_set = utils.set_from_keyval_output(results, '[\s]*:[\s]*')
@@ -89,6 +97,14 @@
     """Checks that simple aplay functions correctly."""
     version = 1
 
+    def initialize(self):
+        """Stop ui while running the test."""
+        upstart.stop_job('ui')
+
+    def cleanup(self):
+        """Start ui back after the test."""
+        upstart.restart_job('ui')
+
     def run_once(self, duration=1, test_headphone=False):
         """Run aplay and verify its output is as expected.
 
@@ -114,8 +130,12 @@
 
         if test_headphone:
             output_node = audio_spec.get_headphone_node(utils.get_board())
+            channel_count = 2
         else:
             output_node = "INTERNAL_SPEAKER"
+            channel_count = audio_spec.get_internal_speaker_channel_count(
+                    utils.get_board_type(), utils.get_board(),
+                    utils.get_platform(), utils.get_sku())
         logging.debug("Test output device %s", output_node)
 
         cras_utils.set_single_selected_output_node(output_node)
@@ -123,11 +143,15 @@
         cras_device_type = cras_utils.get_selected_output_device_type()
         logging.debug("Selected output device type=%s", cras_device_type)
         if cras_device_type != output_node:
+            audio_helper.dump_audio_diagnostics(
+                    os.path.join(self.resultsdir, "audio_diagnostics.txt"))
             raise error.TestFail("Fail to select output device.")
 
         cras_device_name = cras_utils.get_selected_output_device_name()
         logging.debug("Selected output device name=%s", cras_device_name)
         if cras_device_name is None:
+            audio_helper.dump_audio_diagnostics(
+                    os.path.join(self.resultsdir, "audio_diagnostics.txt"))
             raise error.TestFail("Fail to get selected output device.")
 
         alsa_device_name = alsa_utils.convert_device_name(cras_device_name)
@@ -135,7 +159,8 @@
         # Stop CRAS to make sure the audio device won't be occupied.
         utils.stop_service('cras', ignore_status=True)
         try:
-            _check_play(alsa_device_name, duration, APLAY_EXPECTED)
+            _check_play(alsa_device_name, duration, channel_count,
+                        APLAY_EXPECTED)
         finally:
             #Restart CRAS
             utils.start_service('cras', ignore_status=True)
diff --git a/client/site_tests/audio_Aplay/control b/client/site_tests/audio_Aplay/control
index 1d23fe1..7c91471 100644
--- a/client/site_tests/audio_Aplay/control
+++ b/client/site_tests/audio_Aplay/control
@@ -4,7 +4,7 @@
 
 AUTHOR = 'The Chromium OS Authors,chromeos-audio@google.com'
 NAME = 'audio_Aplay'
-ATTRIBUTES = "suite:kernel_per-build_regression"
+ATTRIBUTES = "suite:kernel_per-build_regression, suite:pvs-kernel"
 PURPOSE = """
 Test that simple Aplay with internal speaker succeeds. Used in other tests.
 """
@@ -15,6 +15,7 @@
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = "audio,qav"
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 Check that the system plays audio via internal speaker.
diff --git a/client/site_tests/audio_Aplay/control.headphone b/client/site_tests/audio_Aplay/control.headphone
index 9c45b71..48f556f 100644
--- a/client/site_tests/audio_Aplay/control.headphone
+++ b/client/site_tests/audio_Aplay/control.headphone
@@ -14,6 +14,7 @@
 TEST_CLASS = "audio,qav"
 TEST_TYPE = 'client'
 DEPENDENCIES = 'audio_loopback_dongle'
+PY_VERSION = 3
 
 DOC = """
 Check that the system plays audio via headphone.
diff --git a/client/site_tests/audio_AudioCorruption/audio.html b/client/site_tests/audio_AudioCorruption/audio.html
deleted file mode 100644
index 059330f..0000000
--- a/client/site_tests/audio_AudioCorruption/audio.html
+++ /dev/null
@@ -1,24 +0,0 @@
-<!-- This is a test html file for audio test. -->
-<html>
-<body>
-    <audio id='testaudio' controls>
-    </audio>
-</body>
-
-<script type="text/javascript">
-var corruption = false;
-var testaudio = document.getElementById('testaudio');
-
-function loadSourceAndRunCorruptionTest(audio) {
-  // Listen to the error event for playing corrupted audio.
-  testaudio.addEventListener("error", function() { corruption = true; });
-
-  testaudio.src = audio;
-  testaudio.play();
-}
-
-function corruptionDetected() {
-  return corruption;
-}
-</script>
-</html>
diff --git a/client/site_tests/audio_AudioCorruption/audio_AudioCorruption.py b/client/site_tests/audio_AudioCorruption/audio_AudioCorruption.py
deleted file mode 100755
index 7ce13f8..0000000
--- a/client/site_tests/audio_AudioCorruption/audio_AudioCorruption.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chrome
-
-WAIT_TIMEOUT_S = 30
-
-class audio_AudioCorruption(test.test):
-    """This test verifies playing corrupted audio in Chrome."""
-    version = 1
-
-    def run_once(self, audio):
-        """Tests whether Chrome handles corrupted audio gracefully.
-
-        @param audio: Sample corrupted audio file to be played in Chrome.
-        """
-        with chrome.Chrome(init_network_controller=True) as cr:
-            cr.browser.platform.SetHTTPServerDirectories(self.bindir)
-            tab = cr.browser.tabs[0]
-            tab.Navigate(cr.browser.platform.http_server.UrlOf(
-                    os.path.join(self.bindir, 'audio.html')))
-            tab.WaitForDocumentReadyStateToBeComplete()
-
-            tab.EvaluateJavaScript(
-                    'loadSourceAndRunCorruptionTest("%s")' % audio)
-
-            # Expect corruption being detected after playing corrupted audio.
-            utils.poll_for_condition(
-                    lambda: tab.EvaluateJavaScript('corruptionDetected()'),
-                    exception=error.TestError('Corruption test is timeout'),
-                    timeout=WAIT_TIMEOUT_S,
-                    sleep_interval=1)
diff --git a/client/site_tests/audio_AudioCorruption/control b/client/site_tests/audio_AudioCorruption/control
deleted file mode 100644
index f2a9418..0000000
--- a/client/site_tests/audio_AudioCorruption/control
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Project, chromeos-video@google.com"
-NAME = "audio_AudioCorruption"
-PURPOSE = "Verify that Chrome can handle corrupted mp3 audio"
-CRITERIA = """
-This test will fail if Chrome can't catch error for playing corrupted mp3 audio.
-"""
-ATTRIBUTES = "suite:bvt-perbuild"
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "audio"
-TEST_TYPE = "client"
-
-DOC = """
-This test verifies Chrome can catch error for playing corrupted mp3 audio.
-"""
-
-audio = 'http://commondatastorage.googleapis.com/chromiumos-test-assets-public/audio_AudioCorruption/corrupted.mp3'
-job.run_test('audio_AudioCorruption', audio=audio)
diff --git a/client/site_tests/audio_AudioInputGain/audio_AudioInputGain.py b/client/site_tests/audio_AudioInputGain/audio_AudioInputGain.py
deleted file mode 100644
index 7324ece..0000000
--- a/client/site_tests/audio_AudioInputGain/audio_AudioInputGain.py
+++ /dev/null
@@ -1,150 +0,0 @@
-# Copyright 2020 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import collections
-import logging
-import os
-import time
-
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.cros import constants as cros_constants
-from autotest_lib.client.cros.audio import audio_helper
-from autotest_lib.client.cros.audio import audio_test_data
-from autotest_lib.client.cros.audio import check_quality
-from autotest_lib.client.cros.audio import cmd_utils
-from autotest_lib.client.cros.audio import cras_utils
-from autotest_lib.client.cros.audio import sox_utils
-from autotest_lib.client.cros.multimedia import audio_facade_native
-
-
-CheckQualityArgsClass = collections.namedtuple(
-        'args_type', ['filename', 'rate', 'channel', 'bit_width'])
-
-
-class audio_AudioInputGain(audio_helper.cras_rms_test):
-    """Verifies input capture gain of chrome.audio API."""
-    version = 1
-
-    ALOOP_CRAS_NODE_TYPE = 'ALSA_LOOPBACK'
-    ALOOP_MODULE_NAME = 'snd-aloop'
-    CAPTURE_DURATION = 1
-    # 25: -20 dB
-    # 75: 10 dB
-    # Expected gain: 10 * sqrt(10)
-    LOW_GAIN = 25
-    HIGH_GAIN = 75
-    EXPECTED_GAIN = 31.62
-    FREQ_TOLERANCE = 1
-    SECOND_PEAK_RATIO_TOLERANCE = 0.05
-    GAIN_TOLERANCE = 10
-
-    def run_once(self):
-        """Entry point of this test."""
-        def cras_playback_record(gain_level):
-            """Do capture record at CRAS level.
-
-            @param gain_level: The input gain level.
-
-            @returns: A string for the recorded file path.
-
-            """
-            # Sine raw file lasts 5 seconds
-            raw_path = os.path.join(self.bindir, '5SEC.raw')
-            raw_file = audio_test_data.GenerateAudioTestData(
-                    path=raw_path,
-                    duration_secs=5,
-                    frequencies=[440, 440],
-                    volume_scale=0.05)
-
-            recorded_file = os.path.join(self.resultsdir,
-                                         'cras_recorded_%d.raw' % gain_level)
-
-            # Note: we've found that a couple of seconds after Chrome is up,
-            #       there may be a ~30-second-long output stream sourced from
-            #       "What's New In Your Chromebook", and it plays no sound.
-            #       Just ignore it and continue testing.
-            p = cmd_utils.popen(cras_utils.playback_cmd(raw_file.path))
-            try:
-                cras_utils.capture(recorded_file,
-                                   duration=self.CAPTURE_DURATION)
-                # Make sure the audio is still playing.
-                if p.poll() != None:
-                    raise error.TestError('playback stopped')
-            finally:
-                cmd_utils.kill_or_log_returncode(p)
-                raw_file.delete()
-            return recorded_file
-
-        # Check CRAS server is alive. If not, restart it and wait a second to
-        # get server ready.
-        if utils.get_service_pid('cras') == 0:
-            logging.debug('CRAS server is down. Restart it.')
-            utils.start_service('cras', ignore_status=True)
-            time.sleep(1)
-
-        utils.load_module(self.ALOOP_MODULE_NAME)
-
-        try:
-            with chrome.Chrome(
-                    extension_paths=[cros_constants.AUDIO_TEST_EXTENSION],
-                    autotest_ext=True) as cr:
-                audio_facade = audio_facade_native.AudioFacadeNative(cr)
-                audio_facade.set_chrome_active_node_type(
-                        self.ALOOP_CRAS_NODE_TYPE, self.ALOOP_CRAS_NODE_TYPE)
-
-                rms_value = []
-                for gain in [self.LOW_GAIN, self.HIGH_GAIN]:
-                    logging.debug('Start testing loopback with gain %d.', gain)
-                    audio_facade.set_chrome_active_input_gain(gain)
-                    recorded_file = cras_playback_record(gain)
-                    args = CheckQualityArgsClass(filename=recorded_file,
-                                                 rate=48000,
-                                                 channel=1,
-                                                 bit_width=16)
-                    raw_data, rate = check_quality.read_audio_file(args)
-                    checker = check_quality.QualityChecker(raw_data, rate)
-                    # The highest frequency recorded would be near 24 Khz
-                    # as the max sample rate is 48000 in our tests.
-                    # So let's set ignore_high_freq to be 48000.
-                    checker.do_spectral_analysis(ignore_high_freq=48000,
-                                                 check_quality=False,
-                                                 quality_params=None)
-                    spectra = checker._spectrals
-                    primary_freq = float(spectra[0][0][0])
-                    if abs(primary_freq - 440.0) > self.FREQ_TOLERANCE:
-                        raise error.TestFail(
-                                'Primary freq is beyond the expectation: '
-                                'got %.2f, expected 440.00, tolerance %f' %
-                                        (primary_freq, self.FREQ_TOLERANCE))
-
-                    if len(spectra[0]) > 1:
-                        peak_ratio = (float(spectra[0][1][1]) /
-                                float(spectra[0][0][1]))
-                        if peak_ratio > self.SECOND_PEAK_RATIO_TOLERANCE:
-                            raise error.TestFail(
-                                    'The second peak is not negligible: '
-                                    'f %.2f, peak_ratio %f (tolerance %f)' %
-                                            (float(spectra[0][1][0]),
-                                             peak_ratio,
-                                             self.SECOND_PEAK_RATIO_TOLERANCE))
-
-                    sox_stat = sox_utils.get_stat(input=recorded_file,
-                                                  channels=1,
-                                                  bits=16,
-                                                  rate=48000)
-                    rms_value.append(float(sox_stat.rms))
-                    logging.debug('signal RMS from sox = %f', rms_value[-1])
-
-                gain = rms_value[1] / rms_value[0]
-                if abs(gain - self.EXPECTED_GAIN) > self.GAIN_TOLERANCE:
-                    raise error.TestFail(
-                        'Gain is beyond the expectation: '
-                        'got %.2f, expected %.2f, tolerance %f' %
-                                (gain, self.EXPECTED_GAIN, self.GAIN_TOLERANCE))
-        finally:
-            utils.stop_service('cras', ignore_status=True)
-            utils.unload_module(self.ALOOP_MODULE_NAME)
-            utils.start_service('cras', ignore_status=True)
diff --git a/client/site_tests/audio_AudioInputGain/control b/client/site_tests/audio_AudioInputGain/control
deleted file mode 100644
index f59b340..0000000
--- a/client/site_tests/audio_AudioInputGain/control
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'The Chromium OS Audio Team, chromeos-audio@google.com'
-NAME = 'audio_AudioInputGain'
-PURPOSE = 'Test that the input capture gain is controllable.'
-CRITERIA = '''
-Control the input capture gain by chrome.audio API, and check the actual gain
-from the recorded wave of ALSA loopback.
-'''
-ATTRIBUTES = 'suite:audio_essential'
-TIME = 'SHORT'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'audio'
-TEST_TYPE = 'client'
-DEPENDENCIES = ''
-
-DOC = '''
-Test that the input capture gain is controllable.
-
-NOTE: For this test kernel config is required to have CONFIG_SND_ALOOP=m which
-makes snd-aloop is manually probeable.
-'''
-
-job.run_test('audio_AudioInputGain')
diff --git a/client/site_tests/audio_CRASFormatConversion/audio_CRASFormatConversion.py b/client/site_tests/audio_CRASFormatConversion/audio_CRASFormatConversion.py
index 74af7c2..4fc86e6 100755
--- a/client/site_tests/audio_CRASFormatConversion/audio_CRASFormatConversion.py
+++ b/client/site_tests/audio_CRASFormatConversion/audio_CRASFormatConversion.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/audio_CRASFormatConversion/control b/client/site_tests/audio_CRASFormatConversion/control
index 99f7b16..87e2b39 100644
--- a/client/site_tests/audio_CRASFormatConversion/control
+++ b/client/site_tests/audio_CRASFormatConversion/control
@@ -12,8 +12,9 @@
 TEST_CATEGORY = 'PLAYBACKCAPTURE'
 TEST_CLASS = 'audio'
 TEST_TYPE = 'client'
-ATTRIBUTES = "suite:audio"
+ATTRIBUTES = ''
 DEPENDENCIES = 'audio_loopback_dongle'
+PY_VERSION = 3
 
 DOC = """
 NOTE: For this test to pass you need to have the line out looped back to mic-in.
diff --git a/client/site_tests/audio_CrasAec/audio_CrasAec.py b/client/site_tests/audio_CrasAec/audio_CrasAec.py
new file mode 100644
index 0000000..b238010
--- /dev/null
+++ b/client/site_tests/audio_CrasAec/audio_CrasAec.py
@@ -0,0 +1,156 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import subprocess
+import time
+
+from autotest_lib.client.bin import test
+from autotest_lib.client.bin import utils
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.cros.audio import audio_helper
+from autotest_lib.client.cros.audio import sox_utils
+
+
+class audio_CrasAec(test.test):
+    """Verifies echo cancellation functions well."""
+    version = 1
+
+    INT_SPK_CRAS_NODE_TYPE = 'INTERNAL_SPEAKER'
+    INT_MIC_CRAS_NODE_TYPE = 'INTERNAL_MIC'
+
+    # (sample rate, channels, rms threshold)
+    # The rms_threshold value is determined by experiments.
+    TEST_DATA = [
+            (48000, 1, 0.015),
+            (44100, 1, 0.015),
+            (16000, 1, 0.015),
+            (44100, 2, 0.015),
+            (48000, 2, 0.015),
+            (16000, 2, 0.015),
+    ]
+
+    def play_sound(self):
+        """Plays the given audio content."""
+        cmd = [
+                'cras_test_client', '--playback_file',
+                os.path.join(self.bindir, 'human-voice.raw')
+        ]
+        self._play_sound_proc = subprocess.Popen(cmd)
+
+    def record_aec(self, rate, channels):
+        """Records the looped audio with AEC processing. """
+        file_name = os.path.join(self.resultsdir,
+                                 'record-%d-ch%d.raw' % (rate, channels))
+        cmd = [
+                'cras_test_client', '--loopback_file', file_name, '--effects',
+                'aec', '--rate',
+                str(rate), '--post_dsp', '2', '--num_channels',
+                str(channels)
+        ]
+        self._record_aec_proc = subprocess.Popen(cmd)
+        return file_name
+
+    def aecdump(self, stream_id, rate, channels):
+        """Do the AEC dump parallelly."""
+
+        file_name = os.path.join(self.resultsdir,
+                                 'aecdump-%d-ch%d.raw' % (rate, channels))
+        cmd = [
+                'cras_test_client', '--aecdump', file_name, '--stream_id',
+                str(stream_id), '--duration',
+                str(10)
+        ]
+        self._dump_aec_proc = subprocess.Popen(cmd)
+
+    def setup_test_procs(self):
+        """Initializes process variables for this test."""
+        self._dump_aec_proc = None
+        self._record_aec_proc = None
+        self._play_sound_proc = None
+
+    def cleanup_test_procs(self):
+        """Cleans up all cras_test_client processes used in test."""
+        if self._dump_aec_proc:
+            self._dump_aec_proc.kill()
+        if self._record_aec_proc:
+            self._record_aec_proc.kill()
+        if self._play_sound_proc:
+            self._play_sound_proc.kill()
+
+    def get_aec_stream_id(self):
+        """Gets the first AEC stream id in decimal. """
+        proc = subprocess.Popen(['cras_test_client', '--dump_a'],
+                                stdout=subprocess.PIPE)
+        output, err = proc.communicate()
+        lines = output.decode().split('\n')
+        # Filter through the summary lines by effects 0x0001 to find
+        # the stream id.
+        for line in lines:
+            words = line.split(' ')
+            if words[0] != 'Summary:':
+                continue
+
+            logging.debug("audio dump summaries: %s", line)
+            if words[8] == '0x0001':
+                return int(words[3], 16)
+
+        return None
+
+    def test_sample_rate_and_channels(self, rate, channels):
+        """
+        Configures CRAS to use aloop as input and output option.
+        Plays the given audio content then record through aloop.
+        Expects the AEC cancels well because the two-way data
+        are the same except scaling and time shift.
+
+        @param rarte: the sample rate to create capture stream
+        @param channels: the number of channels to create capture stream
+
+        @returns: the rms value reported by sox util.
+        """
+        self.setup_test_procs()
+
+        try:
+            self.play_sound()
+            recorded_file = self.record_aec(rate, channels)
+
+            # Wait at most 2 seconds for AEC stream to be ready for aecdump.
+            stream_id = utils.poll_for_condition(self.get_aec_stream_id,
+                                                 timeout=2,
+                                                 sleep_interval=0.1)
+
+            self.aecdump(stream_id, rate, channels)
+            time.sleep(3)
+        except utils.TimeoutError:
+            # Possibly error has occurred in capture proess.
+            audio_helper.dump_audio_diagnostics(
+                    os.path.join(self.resultsdir, "audio_diagnostics.txt"))
+            raise error.TestFail("Fail to find aec stream's id")
+        finally:
+            self.cleanup_test_procs()
+
+        sox_stat = sox_utils.get_stat(recorded_file,
+                                      channels=channels,
+                                      rate=rate)
+        return sox_stat.rms
+
+    def run_once(self):
+        """Entry point of this test."""
+        rms_results = []
+        test_pass = True
+        try:
+            for sample_rate, channels, rms_threshold in self.TEST_DATA:
+                rms = self.test_sample_rate_and_channels(sample_rate, channels)
+                if rms > rms_threshold:
+                    test_pass = False
+                rms_results.append(rms)
+        finally:
+            logging.debug("rms results: %s", rms_results)
+
+        if not test_pass:
+            raise error.TestFail("rms too high in at least one case %s" %
+                                 rms_results)
diff --git a/client/site_tests/audio_CrasAec/control b/client/site_tests/audio_CrasAec/control
new file mode 100644
index 0000000..905c919
--- /dev/null
+++ b/client/site_tests/audio_CrasAec/control
@@ -0,0 +1,25 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'The Chromium OS Audio Team, chromeos-audio@google.com'
+NAME = 'audio_CrasAec'
+PURPOSE = 'Verifies echo cancellation function'
+CRITERIA = '''
+Check if the echo cancellation works good under various recording formats.
+'''
+ATTRIBUTES = 'suite:audio'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'audio'
+TEST_TYPE = 'client'
+PY_VERSION = 3
+
+DOC = '''
+Test echo cancellation function using the post DSP delayed loopback device
+in CRAS. Since the loopback content is expected to be the same as the fixed
+playback human voice, the processed recording is expected to have very low
+average rms value.
+'''
+
+job.run_test('audio_CrasAec')
diff --git a/client/site_tests/audio_CrasAec/human-voice.raw b/client/site_tests/audio_CrasAec/human-voice.raw
new file mode 100644
index 0000000..e41e836
--- /dev/null
+++ b/client/site_tests/audio_CrasAec/human-voice.raw
Binary files differ
diff --git a/client/site_tests/audio_CrasCheck/audio_CrasCheck.py b/client/site_tests/audio_CrasCheck/audio_CrasCheck.py
new file mode 100644
index 0000000..75eb33b
--- /dev/null
+++ b/client/site_tests/audio_CrasCheck/audio_CrasCheck.py
@@ -0,0 +1,134 @@
+# Lint as: python2, python3
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import time
+
+from autotest_lib.client.bin import test
+from autotest_lib.client.bin import utils
+from autotest_lib.client.common_lib.cros import chrome
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.cros.audio import cras_utils
+
+_CRASH_PATH = '/var/spool/crash'
+_JS = """
+var c = new AudioContext();
+var o = c.createOscillator();
+o.connect(c.destination); o.start();
+"""
+
+
+class audio_CrasCheck(test.test):
+    """Verifies cras using its status, active streams and crashes"""
+
+    version = 1
+    _check = {
+            'crashes_on_boot': False,
+            'stream_activation': False,
+            'cras_status': False,
+            'crashes_at_end': False
+    }
+
+    def run_once(self):
+        # Check for existing cras crashes which might occur during UI bring up.
+        # TODO: (rohitbm) check if we need to reboot the DUT before the test
+        #       start to verify cras crashes during boot.
+        existing_crash_reports = self.collect_cras_crash()
+        if len(existing_crash_reports) == 0:
+            self._check['crashes_on_boot'] = True
+
+        # Capturing cras pid before startig the test.
+        cras_pid_1 = utils.get_oldest_pid_by_name('/usr/bin/cras')
+
+        with chrome.Chrome(init_network_controller=True) as self._cr:
+            # Push the 1st stream
+            self.push_new_stream(self._cr.browser.tabs.New())
+
+            # Capturing cras pid before opening a new set of audio streams.
+            cras_pid_2 = utils.get_oldest_pid_by_name('/usr/bin/cras')
+
+            # Push the 2nd stream
+            self.push_new_stream(self._cr.browser.tabs.New())
+
+            # Let's play audio for sometime to ensure that
+            # long playback is good.
+            time.sleep(10)
+
+            total_tests = 2
+            active_streams = cras_utils.get_active_stream_count()
+            logging.debug(
+                    'Number of active streams after opening all tabs: %d.',
+                    active_streams)
+            if active_streams >= total_tests:
+                self._check['stream_activation'] = True
+
+            # Capturing cras pid after opening all audio/video streams.
+            cras_pid_3 = utils.get_oldest_pid_by_name('/usr/bin/cras')
+
+            # Close all open audio streams.
+            while total_tests > 0:
+                self._cr.browser.tabs[total_tests].Close()
+                total_tests -= 1
+                time.sleep(1)
+            active_streams = cras_utils.get_active_stream_count()
+            logging.debug(
+                    'Number of active streams after closing all tabs: %d.',
+                    active_streams)
+
+            # Capturing cras pid after closing all audio/stream streams.
+            cras_pid_4 = utils.get_oldest_pid_by_name('/usr/bin/cras')
+
+            if cras_pid_1 == cras_pid_2 == cras_pid_3 == cras_pid_4:
+                self._check['cras_status'] = True
+
+        new_crash_reports = self.collect_cras_crash()
+        new_reports = list(
+                set(new_crash_reports) - set(existing_crash_reports))
+        if len(new_reports) == 0:
+            self._check['crashes_at_end'] = True
+
+        err_msg = ''
+        if list(self._check.values()).count(False) > 0:
+            if not self._check['crashes_on_boot']:
+                err_msg = ('1. Found cras crashes on boot: %s.\n' %
+                           existing_crash_reports)
+            if not self._check['stream_activation']:
+                err_msg += ('2. CRAS stream count is not matching with '
+                            'number of streams.\n')
+            if not self._check['cras_status']:
+                err_msg += ('CRAS PID changed during the test. CRAS might be '
+                            'crashing while adding/removing streams.\n')
+            if not self._check['crashes_at_end']:
+                err_msg += ('Found cras crashes at the end of the test : %s.' %
+                            new_reports)
+            raise error.TestError(err_msg)
+
+    def push_new_stream(self, tab):
+        """Starts next audio stream from self._streams list.
+
+        @param tab: tab to open an audio stream.
+        """
+        tab.Activate()
+        tab.Navigate("file:///")
+        tab.ExecuteJavaScript(_JS)
+        time.sleep(1)  # Adding a delay so cras can update the active count.
+
+    def collect_cras_crash(self):
+        """Check for cras crashes.
+
+        @return a list of cras crash reports found.
+        """
+
+        crash_reports = []
+        if not os.path.isdir(_CRASH_PATH):
+            logging.debug('No cras crash detected!')
+        else:
+            cras_reports = os.listdir(_CRASH_PATH)
+            crash_reports = [
+                    report for report in cras_reports
+                    if report.startswith('cras')
+            ]
+        return crash_reports
diff --git a/client/site_tests/audio_CrasCheck/control b/client/site_tests/audio_CrasCheck/control
new file mode 100644
index 0000000..2957b32
--- /dev/null
+++ b/client/site_tests/audio_CrasCheck/control
@@ -0,0 +1,33 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'rohitbm@chromium.org, ChromeOS Audio'
+NAME = 'audio_CrasCheck'
+PURPOSE = 'Performs basic audio check for active streams and crashes'
+CRITERIA = '''
+Test will fail if 1. cras is not running 2. cras crashes 3. active stream count
+does not match.
+'''
+# Temporarily remove the test to unblock PFQ.  See crbug/718171 in detail.
+#ATTRIBUTES = "suite:bvt-cq, suite:partners"
+ATTRIBUTES = "suite:bvt-perbuild"
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'General'
+TEST_CLASS = 'audio'
+TEST_TYPE = 'client'
+JOB_RETRIES = 2
+PY_VERSION = 3
+
+BUG_TEMPLATE = {
+    'owner': 'vsuley@chromium.org',
+    'labels': ['OS-Chrome'],
+    'components': ['OS>Kernel>Audio'],
+}
+
+DOC = '''
+Test checks for basic audio confidence by checking cras status, crashes and
+active streams.
+'''
+
+job.run_test('audio_CrasCheck')
diff --git a/client/site_tests/audio_CrasDevSwitchStress/audio_CrasDevSwitchStress.py b/client/site_tests/audio_CrasDevSwitchStress/audio_CrasDevSwitchStress.py
index c048c4d..e1b1d6c 100755
--- a/client/site_tests/audio_CrasDevSwitchStress/audio_CrasDevSwitchStress.py
+++ b/client/site_tests/audio_CrasDevSwitchStress/audio_CrasDevSwitchStress.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -106,7 +107,7 @@
         proc = subprocess.Popen(['cras_test_client', '--dump_a'],
                                 stdout=subprocess.PIPE)
         output, err = proc.communicate()
-        self._last_log_time = self._get_time(output.split('\n')[-2])
+        self._last_log_time = self._get_time(output.decode().split('\n')[-2])
 
     def _get_buffer_level(self, match_str, dev_id):
         """
@@ -123,7 +124,7 @@
                                 stdout=subprocess.PIPE)
         output, err = proc.communicate()
         buffer_level = 0
-        lines = output.split('\n')
+        lines = output.decode().split('\n')
         start = False
         for line in lines:
             if not line or not start:
@@ -170,7 +171,7 @@
                                   ignore_status=True).strip()
         try:
             pid = int(pid)
-        except ValueError, e:  # empty or invalid string
+        except ValueError as e:  # empty or invalid string
             raise error.TestFail('CRAS not running')
 
     def _switch_to_node(self, node):
diff --git a/client/site_tests/audio_CrasDevSwitchStress/control b/client/site_tests/audio_CrasDevSwitchStress/control
index 07f7c6d..be6509c 100644
--- a/client/site_tests/audio_CrasDevSwitchStress/control
+++ b/client/site_tests/audio_CrasDevSwitchStress/control
@@ -13,6 +13,7 @@
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = "audio"
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 Test device buffer can stay at reasonable level under repeated switching.
diff --git a/client/site_tests/audio_CrasDevSwitchStress/control.input b/client/site_tests/audio_CrasDevSwitchStress/control.input
index 72f61bf..6996fe5 100644
--- a/client/site_tests/audio_CrasDevSwitchStress/control.input
+++ b/client/site_tests/audio_CrasDevSwitchStress/control.input
@@ -13,6 +13,7 @@
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = "audio"
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 Test device buffer can stay at reasonable level under repeated switching.
diff --git a/client/site_tests/audio_CrasGetNodes/audio_CrasGetNodes.py b/client/site_tests/audio_CrasGetNodes/audio_CrasGetNodes.py
index 8be476e..c52281a 100644
--- a/client/site_tests/audio_CrasGetNodes/audio_CrasGetNodes.py
+++ b/client/site_tests/audio_CrasGetNodes/audio_CrasGetNodes.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2020 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -11,7 +12,7 @@
 from autotest_lib.client.common_lib.cros import chrome
 from autotest_lib.client.cros import constants as cros_constants
 from autotest_lib.client.cros.audio import cras_utils
-from autotest_lib.client.cros.multimedia import audio_facade_native
+from autotest_lib.client.cros.multimedia import audio_facade
 
 class audio_CrasGetNodes(test.test):
     """Verifies dbus GetNodes API of CRAS."""
@@ -35,8 +36,8 @@
             with chrome.Chrome(
                     extension_paths=[cros_constants.AUDIO_TEST_EXTENSION],
                     autotest_ext=True) as cr:
-                audio_facade = audio_facade_native.AudioFacadeNative(cr)
-                audio_facade.set_chrome_active_node_type(
+                audio_facade_local = audio_facade.AudioFacadeLocal(cr)
+                audio_facade_local.set_chrome_active_node_type(
                         self.ALOOP_CRAS_NODE_TYPE, self.ALOOP_CRAS_NODE_TYPE)
 
             # Checks active output and input node types are correct.
diff --git a/client/site_tests/audio_CrasGetNodes/control b/client/site_tests/audio_CrasGetNodes/control
index 4dd12fe..f57ecf4 100644
--- a/client/site_tests/audio_CrasGetNodes/control
+++ b/client/site_tests/audio_CrasGetNodes/control
@@ -15,6 +15,7 @@
 TEST_CLASS = 'audio'
 TEST_TYPE = 'client'
 DEPENDENCIES = ''
+PY_VERSION = 3
 
 DOC = '''
 Test that dbus GetNodes API of CRAS is working.
diff --git a/client/site_tests/audio_CrasPinnedStream/audio_CrasPinnedStream.py b/client/site_tests/audio_CrasPinnedStream/audio_CrasPinnedStream.py
index 127aa76..d22d59e 100755
--- a/client/site_tests/audio_CrasPinnedStream/audio_CrasPinnedStream.py
+++ b/client/site_tests/audio_CrasPinnedStream/audio_CrasPinnedStream.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2019 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/audio_CrasPinnedStream/control b/client/site_tests/audio_CrasPinnedStream/control
index ea8bac1..3fa5952 100644
--- a/client/site_tests/audio_CrasPinnedStream/control
+++ b/client/site_tests/audio_CrasPinnedStream/control
@@ -12,12 +12,13 @@
 Check if the internal mic can record samples while the headset is also
 recording.
 """
-ATTRIBUTES = "suite:audio, suite:partners"
+ATTRIBUTES = ""
 TIME = 'SHORT'
 TEST_CATEGORY = 'PLAYBACKCAPTURE'
 TEST_CLASS = "audio"
 TEST_TYPE = 'client'
 DEPENDENCIES = 'audio_loopback_dongle'
+PY_VERSION = 3
 
 DOC = """
 Test that we can record from multiple devices simultaneously.
diff --git a/client/site_tests/audio_CrasSanity/audio_CrasSanity.py b/client/site_tests/audio_CrasSanity/audio_CrasSanity.py
deleted file mode 100644
index b80e412..0000000
--- a/client/site_tests/audio_CrasSanity/audio_CrasSanity.py
+++ /dev/null
@@ -1,131 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import time
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.audio import cras_utils
-
-_CRASH_PATH = '/var/spool/crash'
-_JS = """
-var c = new AudioContext();
-var o = c.createOscillator();
-o.connect(c.destination); o.start();
-"""
-
-class audio_CrasSanity(test.test):
-    """Verifies cras sanity using its status, active streams and crashes"""
-
-    version = 1
-    _check = {'crashes_on_boot': False,
-              'stream_activation': False,
-              'cras_status': False,
-              'crashes_at_end': False
-             }
-
-    def run_once(self):
-        # Check for existing cras crashes which might occur during UI bring up.
-        # TODO: (rohitbm) check if we need to reboot the DUT before the test
-        #       start to verify cras crashes during boot.
-        existing_crash_reports = self.collect_cras_crash()
-        if len(existing_crash_reports) == 0:
-            self._check['crashes_on_boot'] = True
-
-        # Capturing cras pid before startig the test.
-        cras_pid_1 = utils.get_oldest_pid_by_name('/usr/bin/cras')
-
-        with chrome.Chrome(init_network_controller=True) as self._cr:
-            # Push the 1st stream
-            self.push_new_stream(self._cr.browser.tabs.New())
-
-            # Capturing cras pid before opening a new set of audio streams.
-            cras_pid_2 = utils.get_oldest_pid_by_name('/usr/bin/cras')
-
-            # Push the 2nd stream
-            self.push_new_stream(self._cr.browser.tabs.New())
-
-            # Let's play audio for sometime to ensure that
-            # long playback is good.
-            time.sleep(10)
-
-            total_tests = 2
-            active_streams = cras_utils.get_active_stream_count()
-            logging.debug(
-                'Number of active streams after opening all tabs: %d.',
-                active_streams)
-            if active_streams >= total_tests:
-                self._check['stream_activation'] = True
-
-            # Capturing cras pid after opening all audio/video streams.
-            cras_pid_3 = utils.get_oldest_pid_by_name('/usr/bin/cras')
-
-            # Close all open audio streams.
-            while total_tests > 0:
-                self._cr.browser.tabs[total_tests].Close()
-                total_tests -= 1
-                time.sleep(1)
-            active_streams = cras_utils.get_active_stream_count()
-            logging.debug(
-                'Number of active streams after closing all tabs: %d.',
-                active_streams)
-
-            # Capturing cras pid after closing all audio/stream streams.
-            cras_pid_4 = utils.get_oldest_pid_by_name('/usr/bin/cras')
-
-            if cras_pid_1 == cras_pid_2 == cras_pid_3 == cras_pid_4:
-                self._check['cras_status'] = True
-
-        new_crash_reports = self.collect_cras_crash()
-        new_reports = list(set(new_crash_reports) -
-                           set(existing_crash_reports))
-        if len(new_reports) == 0:
-            self._check['crashes_at_end'] = True
-
-        err_msg = ''
-        if self._check.values().count(False) > 0:
-            if not self._check['crashes_on_boot']:
-                err_msg = ('1. Found cras crashes on boot: %s.\n'
-                           % existing_crash_reports)
-            if not self._check['stream_activation']:
-                err_msg += ('2. CRAS stream count is not matching with '
-                            'number of streams.\n')
-            if not self._check['cras_status']:
-                err_msg += ('CRAS PID changed during the test. CRAS might be '
-                            'crashing while adding/removing streams.\n')
-            if not self._check['crashes_at_end']:
-                err_msg += ('Found cras crashes at the end of the test : %s.' %
-                            new_reports)
-            raise error.TestError(err_msg)
-
-
-    def push_new_stream(self, tab):
-        """Starts next audio stream from self._streams list.
-
-        @param tab: tab to open an audio stream.
-        """
-        tab.Activate()
-        tab.Navigate("file:///")
-        tab.ExecuteJavaScript(_JS)
-        time.sleep(1) # Adding a delay so cras can update the active count.
-
-
-    def collect_cras_crash(self):
-        """Check for cras crashes.
-
-        @return a list of cras crash reports found.
-        """
-
-        crash_reports = []
-        if not os.path.isdir(_CRASH_PATH):
-            logging.debug('No cras crash detected!')
-        else:
-            cras_reports = os.listdir(_CRASH_PATH)
-            crash_reports = [report for report in cras_reports
-                             if report.startswith('cras')]
-        return crash_reports
diff --git a/client/site_tests/audio_CrasSanity/control b/client/site_tests/audio_CrasSanity/control
deleted file mode 100644
index e3a71bd..0000000
--- a/client/site_tests/audio_CrasSanity/control
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rohitbm@chromium.org, ChromeOS Audio'
-NAME = 'audio_CrasSanity'
-PURPOSE = 'Performs basic audio check for active streams and crashes'
-CRITERIA = '''
-Test will fail if 1. cras is not running 2. cras crashes 3. active stream count
-does not match.
-'''
-# Temporarily remove the test to unblock PFQ.  See crbug/718171 in detail.
-#ATTRIBUTES = "suite:bvt-cq, suite:partners"
-ATTRIBUTES = "suite:bvt-perbuild"
-TIME = 'MEDIUM'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'audio'
-TEST_TYPE = 'client'
-JOB_RETRIES = 2
-BUG_TEMPLATE = {
-    'owner': 'vsuley@chromium.org',
-    'labels': ['OS-Chrome'],
-    'components': ['OS>Kernel>Audio'],
-}
-
-DOC = '''
-Test checks for basic audio sanity by checking cras status, crashes and active
-streams.
-'''
-
-job.run_test('audio_CrasSanity')
diff --git a/client/site_tests/audio_CrasStress/audio_CrasStress.py b/client/site_tests/audio_CrasStress/audio_CrasStress.py
index f6c1361..fde84b4 100755
--- a/client/site_tests/audio_CrasStress/audio_CrasStress.py
+++ b/client/site_tests/audio_CrasStress/audio_CrasStress.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -147,7 +148,7 @@
                                 stdout=subprocess.PIPE)
         output, err = proc.communicate()
         buffer_level = 0
-        for line in output.split('\n'):
+        for line in output.decode().split('\n'):
             search = re.match(match_str, line)
             if search:
                 tmp = int(search.group(1))
diff --git a/client/site_tests/audio_CrasStress/control b/client/site_tests/audio_CrasStress/control
index aa05d8a..9ab9728 100644
--- a/client/site_tests/audio_CrasStress/control
+++ b/client/site_tests/audio_CrasStress/control
@@ -13,6 +13,7 @@
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = "audio"
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 Test device buffer can stay at reasonable level by running input and output
diff --git a/client/site_tests/audio_CrasStress/control.input_only b/client/site_tests/audio_CrasStress/control.input_only
index 0671474..0a5342c 100644
--- a/client/site_tests/audio_CrasStress/control.input_only
+++ b/client/site_tests/audio_CrasStress/control.input_only
@@ -13,6 +13,7 @@
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = "audio"
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 Test input buffer can stay at reasonable level
diff --git a/client/site_tests/audio_CrasStress/control.output_only b/client/site_tests/audio_CrasStress/control.output_only
index c173fe6..db13287 100644
--- a/client/site_tests/audio_CrasStress/control.output_only
+++ b/client/site_tests/audio_CrasStress/control.output_only
@@ -13,6 +13,7 @@
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = "audio"
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 Test output buffer can stay at reasonable level
diff --git a/client/site_tests/audio_PlaybackPower/audio_PlaybackPower.py b/client/site_tests/audio_PlaybackPower/audio_PlaybackPower.py
deleted file mode 100644
index b961288..0000000
--- a/client/site_tests/audio_PlaybackPower/audio_PlaybackPower.py
+++ /dev/null
@@ -1,117 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import hashlib, logging, os, time
-
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error, file_utils
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.cros import service_stopper
-from autotest_lib.client.cros.power import power_status, power_utils
-
-_DOWNLOAD_BASE = ('http://commondatastorage.googleapis.com/'
-                  'chromiumos-test-assets-public/audio_power/')
-
-# Minimum battery charge percentage to run the test
-BATTERY_INITIAL_CHARGED_MIN = 10
-
-# Measurement duration in seconds.
-MEASUREMENT_DURATION = 150
-
-POWER_DESCRIPTION = 'avg_energy_rate_'
-
-# Time to exclude from calculation after playing audio [seconds].
-STABILIZATION_DURATION = 10
-
-
-class audio_PlaybackPower(test.test):
-    """Captures power usage for audio playback."""
-
-    version = 1
-
-
-    def initialize(self):
-        self._service_stopper = None
-        self._backlight = None
-
-    def run_power_test(self, audio_type):
-        """
-        Captures power usage and reports it to the perf dashboard.
-
-        @param audio_type: audio format label to attach with perf keyval.
-        """
-
-        self._backlight = power_utils.Backlight()
-        self._backlight.set_default()
-
-        self._service_stopper = service_stopper.ServiceStopper(
-                service_stopper.ServiceStopper.POWER_DRAW_SERVICES)
-        self._service_stopper.stop_services()
-
-        self._power_status = power_status.get_status()
-        # Verify that we are running on battery and the battery is sufficiently
-        # charged.
-        self._power_status.assert_battery_state(BATTERY_INITIAL_CHARGED_MIN)
-
-        measurements = [power_status.SystemPower(
-                self._power_status.battery_path)]
-
-        def get_power():
-            power_logger = power_status.PowerLogger(measurements)
-            power_logger.start()
-            time.sleep(STABILIZATION_DURATION)
-            start_time = time.time()
-            time.sleep(MEASUREMENT_DURATION)
-            power_logger.checkpoint('result', start_time)
-            keyval = power_logger.calc()
-            logging.info('Power output %s', keyval)
-            return keyval['result_' + measurements[0].domain + '_pwr_avg']
-
-        energy_rate = get_power()
-        perf_keyval = {}
-        perf_keyval[POWER_DESCRIPTION + audio_type] = energy_rate
-        self.output_perf_value(description=POWER_DESCRIPTION + audio_type,
-                               value=energy_rate, units='W',
-                               higher_is_better=False)
-        self.write_perf_keyval(perf_keyval)
-
-
-    def run_once(self, test_file, checksum):
-        local_path = os.path.join(self.bindir, '%s' % test_file)
-        file_utils.download_file(_DOWNLOAD_BASE + test_file, local_path)
-        logging.info('Downloaded file: %s. Expected checksum: %s',
-                     local_path, checksum)
-        with open(local_path, 'r') as r:
-            md5sum = hashlib.md5(r.read()).hexdigest()
-            if md5sum != checksum:
-                raise error.TestError('unmatched md5 sum: %s' % md5sum)
-        with chrome.Chrome(init_network_controller=True) as cr:
-            cr.browser.platform.SetHTTPServerDirectories(self.bindir)
-            url = cr.browser.platform.http_server.UrlOf(local_path)
-            self.play_audio(cr.browser.tabs[0], url)
-            self.run_power_test(url.split('.')[-1])
-
-
-    def play_audio(self, tab, url):
-        """Navigates to an audio file over http and plays it in loop.
-
-        @param tab: tab to open an audio stream.
-        @param url: audio/video test url.
-        """
-        tab.Navigate(url)
-        tab.ExecuteJavaScript(
-                "document.getElementsByTagName('video')[0].loop=true")
-        tab.ExecuteJavaScript(
-                "document.getElementsByTagName('video')[0].volume=1")
-
-
-    def cleanup(self):
-        # cleanup() is run by common_lib/test.py.
-        if self._backlight:
-            self._backlight.restore()
-        if self._service_stopper:
-            self._service_stopper.restore_services()
-
-        super(audio_PlaybackPower, self).cleanup()
diff --git a/client/site_tests/audio_PlaybackPower/control b/client/site_tests/audio_PlaybackPower/control
deleted file mode 100644
index 7434a3b..0000000
--- a/client/site_tests/audio_PlaybackPower/control
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rohitbm@chromium.org, ChromeOS Audio'
-NAME = 'audio_PlaybackPower'
-PURPOSE = 'Records power consumption for audio playback'
-CRITERIA = '''
-This is a perf test. Test only fails if there is a problem with the test setup.
-'''
-TIME = 'MEDIUM'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'audio'
-TEST_TYPE = 'client'
-
-DOC = '''
-Test verifies audio power consumption and reports it to the perf dashboard.
-'''
-
-VIDEO_NAME = 'polka_crowd_128kbps_44_1khz.mp3'
-CHECKSUM = '7171529bb34c6e17dd163b03aa2b7c9c'
-
-job.run_test('audio_PlaybackPower', test_file=VIDEO_NAME, checksum=CHECKSUM)
diff --git a/client/site_tests/audio_SeekAudioFeedback/audio_SeekAudioFeedback.py b/client/site_tests/audio_SeekAudioFeedback/audio_SeekAudioFeedback.py
deleted file mode 100755
index 98d012b..0000000
--- a/client/site_tests/audio_SeekAudioFeedback/audio_SeekAudioFeedback.py
+++ /dev/null
@@ -1,104 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import tempfile
-
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.audio import audio_helper
-from autotest_lib.client.cros.audio import cmd_utils
-from autotest_lib.client.cros.audio import cras_utils
-from autotest_lib.client.cros.audio import sox_utils
-
-
-NUMBER_OF_SEEKING = 6
-SEEKING_TIME = 50
-
-class audio_SeekAudioFeedback(audio_helper.chrome_rms_test):
-    """Verifies audio output on seeking forward/back."""
-
-    version = 1
-
-    def run_once(self, test_file, test_duration):
-        self._rms_values = {}
-        noise_file = os.path.join(self.resultsdir, 'noise.wav')
-        noiseprof_file = tempfile.NamedTemporaryFile()
-
-        # Record a sample of "silence" to use as a noise profile.
-        cras_utils.capture(noise_file, duration=2)
-        sox_utils.noise_profile(noise_file, noiseprof_file.name)
-
-        # Open the test page
-        self.chrome.browser.platform.SetHTTPServerDirectories(self.bindir)
-        tab = self.chrome.browser.tabs[0]
-        tab.Navigate(self.chrome.browser.platform.http_server.UrlOf(
-                os.path.join(self.bindir, 'play.html')))
-        tab.WaitForDocumentReadyStateToBeComplete()
-
-        # Test audio file.
-        self.rms_test(tab, test_file, noiseprof_file.name, test_duration)
-        self.write_perf_keyval(self._rms_values)
-
-
-    def rms_test(self, tab, test_file, noiseprof_file, test_duration):
-        logging.info('rms test on media file %s.', test_file)
-        recorded_file = os.path.join(self.resultsdir, 'recorded.wav')
-        loopback_file = os.path.join(self.resultsdir, 'loopback.wav')
-
-
-        # Plays the test_file in the browser and seek 6 times.
-        for x in range(0,NUMBER_OF_SEEKING):
-            self.play_media(tab, test_file, x)
-            # Record the audio output and also the CRAS loopback output.
-            p1 = cmd_utils.popen(cras_utils.capture_cmd(
-                    recorded_file, duration=test_duration ))
-            p2 = cmd_utils.popen(cras_utils.loopback_cmd(
-                    loopback_file, duration=test_duration ))
-            cmd_utils.wait_and_check_returncode(p1, p2)
-
-            # See if we recorded something.
-
-            # We captured two channels of audio in the CRAS loopback.
-            # The RMS values are for debugging only.
-            loopback_stats = [audio_helper.get_channel_sox_stat(
-                    loopback_file, i) for i in (1, 2)]
-            logging.info('loopback stat: %s', [str(s) for s in loopback_stats])
-
-            reduced_file = tempfile.NamedTemporaryFile()
-            sox_utils.noise_reduce(
-                    recorded_file, reduced_file.name, noiseprof_file)
-            rms = audio_helper.get_rms(reduced_file.name)[0]
-
-            self._rms_values['%s_rms_value' % test_file.replace('.', '_')]=rms
-
-
-    def wait_player_end(self, tab):
-        """Wait for player ends playing."""
-        utils.poll_for_condition(
-            condition=lambda: tab.EvaluateJavaScript('player.ended'),
-            exception=error.TestError('Player never end until timeout.'))
-
-
-    def play_media(self, tab, test_file, value):
-        """Plays a media file in Chromium.
-
-        @param test_file: Media file to test.
-        @param vlaue: Index of the loop
-        """
-        tab.EvaluateJavaScript('play("%s")' % test_file)
-        def get_current_time():
-            return tab.EvaluateJavaScript('player.currentTime')
-
-        if value <=(NUMBER_OF_SEEKING/2):
-            new_seek = (value * SEEKING_TIME)
-        else:
-            new_seek = (((NUMBER_OF_SEEKING - value) * SEEKING_TIME))
-        # Make sure the audio is being played
-        old_time = get_current_time()
-        utils.poll_for_condition(
-            condition=lambda: get_current_time() > old_time,
-            exception=error.TestError('Player never start until timeout.'))
-        tab.EvaluateJavaScript('player.currentTime = %d' % new_seek)
diff --git a/client/site_tests/audio_SeekAudioFeedback/audio_SeekAudioFeedbackaudio.mp3 b/client/site_tests/audio_SeekAudioFeedback/audio_SeekAudioFeedbackaudio.mp3
deleted file mode 100755
index 5f80a68..0000000
--- a/client/site_tests/audio_SeekAudioFeedback/audio_SeekAudioFeedbackaudio.mp3
+++ /dev/null
Binary files differ
diff --git a/client/site_tests/audio_SeekAudioFeedback/control b/client/site_tests/audio_SeekAudioFeedback/control
deleted file mode 100755
index c036435..0000000
--- a/client/site_tests/audio_SeekAudioFeedback/control
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chromium OS Project"
-NAME = "audio_SeekAudioFeedback"
-PURPOSE = "Verifies audio output for seeking forward and back"
-CRITERIA = """
-This test will fail if audio file sound is not audible after seek.
-"""
-ATTRIBUTES = "suite:audio"
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "audio"
-TEST_TYPE = "client"
-DEPENDENCIES = 'audio_loopback_dongle'
-
-DOC = """
-Test that the sound from media files can be heard by recording from mic in
-by seeking forward and back
-"""
-
-# Media formats to be tested.
-TEST_DURATION = 5
-TEST_FILE = 'audio_SeekAudioFeedbackaudio.mp3'
-
-constraints = ['%s_rms_value > 0.001' % TEST_FILE.replace('.', '_') ]
-
-job.run_test('audio_SeekAudioFeedback',
-             test_file=TEST_FILE,
-             test_duration=TEST_DURATION,
-             constraints=constraints)
diff --git a/client/site_tests/audio_SeekAudioFeedback/play.html b/client/site_tests/audio_SeekAudioFeedback/play.html
deleted file mode 100755
index 4556ea5..0000000
--- a/client/site_tests/audio_SeekAudioFeedback/play.html
+++ /dev/null
@@ -1,11 +0,0 @@
-<html>
-<script>
-  function play(media_file) {
-    player.src = media_file;
-    player.play();
-  }
-</script>
-<body>
-  <audio controls id="player"></audio>
-</body>
-</html>
diff --git a/client/site_tests/audio_WebRtcAudioLoopback/audio_WebRtcAudioLoopback.py b/client/site_tests/audio_WebRtcAudioLoopback/audio_WebRtcAudioLoopback.py
deleted file mode 100644
index 000ac59..0000000
--- a/client/site_tests/audio_WebRtcAudioLoopback/audio_WebRtcAudioLoopback.py
+++ /dev/null
@@ -1,123 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.cros.video import helper_logger
-from autotest_lib.client.cros.audio import audio_helper
-from autotest_lib.client.cros.audio import cras_utils
-
-# Suppress the media Permission Dialog.
-EXTRA_BROWSER_ARGS = [
-    '--use-fake-ui-for-media-stream',  # Suppress the Permission Dialog
-    '--use-fake-device-for-media-stream'  # Use fake audio & video
-]
-
-AUDIO_LOOPBACK_PAGE = 'audio_loopback.html'
-
-# The test's runtime.
-TEST_RUNTIME_SECONDS = 10
-
-# Number of peer connections to use.
-NUM_PEER_CONNECTIONS = 1
-
-# Polling timeout.
-TIMEOUT = TEST_RUNTIME_SECONDS + 10
-
-
-class audio_WebRtcAudioLoopback(test.test):
-    """Tests a WebRTC call with a fake audio."""
-    version = 1
-
-    def start_test(self, cr, recorded_file):
-        """Opens the WebRTC audio loopback page and records audio output.
-
-        @param cr: Autotest Chrome instance.
-        @param recorded_file: File to recorder the audio output to.
-        """
-        cr.browser.platform.SetHTTPServerDirectories(self.bindir)
-
-        self.tab = cr.browser.tabs[0]
-        self.tab.Navigate(cr.browser.platform.http_server.UrlOf(
-            os.path.join(self.bindir, AUDIO_LOOPBACK_PAGE)))
-        self.tab.WaitForDocumentReadyStateToBeComplete()
-        self.tab.EvaluateJavaScript(
-            "run(%d, %d)" % (TEST_RUNTIME_SECONDS, NUM_PEER_CONNECTIONS))
-        self.wait_for_active_stream_count(1)
-        cras_utils.capture(recorded_file, duration=TEST_RUNTIME_SECONDS)
-
-    def wait_test_completed(self, timeout_secs):
-        """Waits until the test is done.
-
-        @param timeout_secs Max time to wait in seconds.
-
-        @raises TestError on timeout, or javascript eval fails.
-        """
-        def _test_done():
-            status = self.tab.EvaluateJavaScript('testRunner.getStatus()')
-            logging.info(status)
-            return status == 'ok-done'
-
-        utils.poll_for_condition(
-                _test_done, timeout=timeout_secs, sleep_interval=1,
-                desc='audio.html reports itself as finished')
-
-    @staticmethod
-    def wait_for_active_stream_count(expected_count):
-        """Waits for the expected number of active streams.
-
-        @param expected_count: expected count of active streams.
-        """
-        utils.poll_for_condition(
-            lambda: cras_utils.get_active_stream_count() == expected_count,
-            exception=error.TestError(
-                'Timeout waiting active stream count to become "%d",'
-                'current value is "%d"' % (
-                    expected_count, cras_utils.get_active_stream_count())))
-
-    @helper_logger.video_log_wrapper
-    def run_once(self):
-        """Runs the audio_WebRtcAudioLoopback test."""
-        # Record a sample of "silence" to use as a noise profile.
-        noise_file = os.path.join(self.resultsdir, 'cras_noise.wav')
-        cras_utils.capture(noise_file, duration=1)
-
-        # Create a file for the audio recording.
-        recorded_file = os.path.join(self.resultsdir, 'cras_recorded.wav')
-
-        self.wait_for_active_stream_count(0)
-        with chrome.Chrome(extra_browser_args=EXTRA_BROWSER_ARGS +\
-                            [helper_logger.chrome_vmodule_flag()],
-                           init_network_controller=True) as cr:
-            self.start_test(cr, recorded_file)
-            self.wait_test_completed(TIMEOUT)
-            self.print_result(recorded_file, noise_file)
-
-    def print_result(self, recorded_file, noise_file):
-        """Prints results unless status is different from ok-done.
-
-        @raises TestError if the test failed outright.
-        @param recorded_file: File to recorder the audio output to.
-        @param noise_file: Noise recording, used for comparison.
-        """
-        status = self.tab.EvaluateJavaScript('testRunner.getStatus()')
-        if status != 'ok-done':
-            raise error.TestFail('Failed: %s' % status)
-
-        results = self.tab.EvaluateJavaScript('testRunner.getResults()')
-        logging.info('runTimeSeconds: %.2f', results['runTimeSeconds'])
-
-        rms_value = audio_helper.reduce_noise_and_get_rms(
-                recorded_file, noise_file)[0]
-        logging.info('rms_value: %f', rms_value)
-        self.output_perf_value(
-                description='rms_value',
-                value=rms_value,
-                units='', higher_is_better=True)
-
diff --git a/client/site_tests/audio_WebRtcAudioLoopback/audio_loopback.html b/client/site_tests/audio_WebRtcAudioLoopback/audio_loopback.html
deleted file mode 100644
index 3eaa530..0000000
--- a/client/site_tests/audio_WebRtcAudioLoopback/audio_loopback.html
+++ /dev/null
@@ -1,10 +0,0 @@
-<!DOCTYPE html>
-<html>
-<head><title>Loopback test</title></head>
-<body>
-  <p>Status: <span id="status">not-started</span></p>
-  <table border="0" id="test-table"></table>
-  <script src="audio_loopback_test.js"></script>
-</body>
-</html>
-
diff --git a/client/site_tests/audio_WebRtcAudioLoopback/audio_loopback_test.js b/client/site_tests/audio_WebRtcAudioLoopback/audio_loopback_test.js
deleted file mode 100644
index e6aaa83..0000000
--- a/client/site_tests/audio_WebRtcAudioLoopback/audio_loopback_test.js
+++ /dev/null
@@ -1,166 +0,0 @@
-/*
- * Copyright 2017 The Chromium Authors. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-/*jshint esversion: 6 */
-
-'use strict';
-
-const $ = document.getElementById.bind(document);
-
-function logError(err) {
-  console.error(err);
-}
-
-
-class FeedTable {
-  constructor() {
-    this.numCols = 5;
-    this.col = 0;
-    this.testTable = document.getElementById('test-table');
-    this.row = this.testTable.insertRow(-1);
-  }
-
-  addNewAudioCell() {
-    if (this.col == this.numCols) {
-      this.row = this.testTable.insertRow(-1);
-      this.col = 0;
-    }
-    var newCell = this.row.insertCell(-1);
-    var audio = document.createElement('audio');
-    audio.autoplay = false;
-    newCell.appendChild(audio);
-    this.col++;
-    return audio;
-  }
-}
-
-
-class PeerConnection {
-  constructor(audioElement) {
-    this.localConnection = null;
-    this.remoteConnection = null;
-    this.remoteAudio = audioElement;
-  }
-
-  start() {
-    const onGetUserMediaSuccess = this.onGetUserMediaSuccess.bind(this);
-    return navigator.mediaDevices
-        .getUserMedia({audio: true, video: true})
-        .then(onGetUserMediaSuccess);
-  }
-
-  onGetUserMediaSuccess(stream) {
-    this.localConnection = new RTCPeerConnection(null);
-    this.localConnection.onicecandidate = (event) => {
-      this.onIceCandidate(this.remoteConnection, event);
-    };
-    this.localConnection.addStream(stream);
-
-    this.remoteConnection = new RTCPeerConnection(null);
-    this.remoteConnection.onicecandidate = (event) => {
-      this.onIceCandidate(this.localConnection, event);
-    };
-    this.remoteConnection.onaddstream = (e) => {
-      this.remoteAudio.srcObject = e.stream;
-    };
-
-    var onCreateOfferSuccess = this.onCreateOfferSuccess.bind(this);
-    this.localConnection
-        .createOffer({offerToReceiveAudio: 1, offerToReceiveVideo: 1})
-        .then(onCreateOfferSuccess, logError);
-  }
-
-  onCreateOfferSuccess(desc) {
-    this.localConnection.setLocalDescription(desc);
-    this.remoteConnection.setRemoteDescription(desc);
-
-    var onCreateAnswerSuccess = this.onCreateAnswerSuccess.bind(this);
-    this.remoteConnection.createAnswer().then(onCreateAnswerSuccess, logError);
-  }
-
-  onCreateAnswerSuccess(desc) {
-    this.remoteConnection.setLocalDescription(desc);
-    this.localConnection.setRemoteDescription(desc);
-  }
-
-  onIceCandidate(connection, event) {
-    if (event.candidate) {
-      connection.addIceCandidate(new RTCIceCandidate(event.candidate));
-    }
-  }
-}
-
-
-class TestRunner {
-  constructor(runtimeSeconds) {
-    this.runtimeSeconds = runtimeSeconds;
-    this.audioElements = [];
-    this.peerConnections = [];
-    this.feedTable = new FeedTable();
-    this.iteration = 0;
-    this.startTime;
-    this.lastIterationTime;
-  }
-
-  addPeerConnection() {
-    const audioElement = this.feedTable.addNewAudioCell();
-    this.audioElements.push(audioElement);
-    this.peerConnections.push(new PeerConnection(audioElement));
-  }
-
-  startTest() {
-    this.startTime = Date.now();
-    let promises = testRunner.peerConnections.map((conn) => conn.start());
-    Promise.all(promises)
-        .then(() => {
-          this.startTime = Date.now();
-          this.audioElements.forEach((feed) => feed.play());
-          this.pauseAndPlayLoop();
-        })
-        .catch((e) => {throw e});
-  }
-
-  pauseAndPlayLoop() {
-    this.iteration++;
-    const status = this.getStatus();
-    this.lastIterationTime = Date.now();
-    $('status').textContent = status
-    if (status != 'ok-done') {
-      setTimeout(() => this.pauseAndPlayLoop());
-    } else {
-      // Finished, pause the audio.
-      this.audioElements.forEach((feed) => feed.pause());
-    }
-  }
-
-  getStatus() {
-    if (this.iteration == 0) {
-      return 'not-started';
-    }
-    const timeSpent = Date.now() - this.startTime;
-    if (timeSpent >= this.runtimeSeconds * 1000) {
-      return 'ok-done';
-    } else {
-      return `running, iteration: ${this.iteration}`;
-    }
-  }
-
-  getResults() {
-    const runTimeMillis = this.lastIterationTime - this.startTime;
-    return {'runTimeSeconds': runTimeMillis / 1000};
-  }
-}
-
-
-let testRunner;
-
-function run(runtimeSeconds, numPeerConnections) {
-  testRunner = new TestRunner(runtimeSeconds);
-  for (let i = 0; i < numPeerConnections; i++) {
-    testRunner.addPeerConnection();
-  }
-  testRunner.startTest();
-}
-
diff --git a/client/site_tests/audio_WebRtcAudioLoopback/control b/client/site_tests/audio_WebRtcAudioLoopback/control
deleted file mode 100644
index f8b6d7f..0000000
--- a/client/site_tests/audio_WebRtcAudioLoopback/control
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "dtosic@google.com, hangouts-engprod-sto@google.com"
-NAME = "audio_WebRtcAudioLoopback"
-PURPOSE = "Measures the RMS score for a local audio loopback call"
-CRITERIA = "This test will fail if can't compute an RMS score."
-ATTRIBUTES = "suite:hotrod"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Performance"
-TEST_CLASS = "audio"
-TEST_TYPE = "client"
-BUG_TEMPLATE = {
-    "labels": ["OS-Chrome"],
-    "components": ["Blink>WebRTC>Audio"],
-}
-
-DOC = """
-Test that a local WebRTC audio loopback works.
-
-This test starts a local WebRTC call with two peer
-connections. It records the audio output to recording
-file and computes the RMS which is reported to the
-performance dashboard.
-"""
-
-job.run_test("audio_WebRtcAudioLoopback")
-
diff --git a/client/site_tests/audio_YoutubePlayback/audio_YoutubePlayback.py b/client/site_tests/audio_YoutubePlayback/audio_YoutubePlayback.py
deleted file mode 100644
index 8403a76..0000000
--- a/client/site_tests/audio_YoutubePlayback/audio_YoutubePlayback.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.audio import audio_helper
-from autotest_lib.client.cros.audio import cmd_utils
-from autotest_lib.client.cros.audio import cras_utils
-
-TEST_DURATION = 15
-
-class audio_YoutubePlayback(audio_helper.chrome_rms_test):
-    """Verifies if youtube playback can be captured."""
-    version = 1
-
-    def play_video(self, tab, video_url):
-        """Plays a Youtube video to record audio samples.
-
-           @param tab: the tab to load and play the video.
-        """
-        tab.Navigate(video_url)
-
-        def player_is_ready():
-            """Returns whether the player is ready."""
-            return tab.EvaluateJavaScript('typeof player != "undefined"')
-
-        utils.poll_for_condition(
-            condition=player_is_ready,
-            exception=error.TestError('Failed to load the Youtube player'))
-
-        tab.ExecuteJavaScript('player.playVideo()')
-
-        # Make sure the video is playing
-        def get_current_time():
-            """Returns current time."""
-            return tab.EvaluateJavaScript('player.getCurrentTime()')
-
-        old_time = get_current_time()
-        utils.poll_for_condition(
-            condition=lambda: get_current_time() > old_time,
-            exception=error.TestError('Video is not played until timeout'))
-
-
-    def run_once(self):
-        """Entry point of this test."""
-        self.chrome.browser.platform.SetHTTPServerDirectories(self.bindir)
-
-        video_url = self.chrome.browser.platform.http_server.UrlOf(
-                os.path.join(self.bindir, 'youtube.html'))
-        logging.info('Playing back youtube media file %s.', video_url)
-        noise_file = os.path.join(self.resultsdir, "noise.wav")
-        recorded_file = os.path.join(self.resultsdir, "recorded.wav")
-        loopback_file = os.path.join(self.resultsdir, "loopback.wav")
-
-        # Record a sample of "silence" to use as a noise profile.
-        cras_utils.capture(noise_file, duration=3)
-
-        # Play a video and record the audio output
-        self.play_video(self.chrome.browser.tabs[0], video_url)
-
-        p1 = cmd_utils.popen(cras_utils.capture_cmd(
-                recorded_file, duration=TEST_DURATION))
-        p2 = cmd_utils.popen(cras_utils.loopback_cmd(
-                loopback_file, duration=TEST_DURATION))
-
-        cmd_utils.wait_and_check_returncode(p1, p2)
-
-        # See if we recorded something
-        loopback_stats = [audio_helper.get_channel_sox_stat(
-                loopback_file, i) for i in (1, 2)]
-        logging.info('loopback stats: %s', [str(s) for s in loopback_stats])
-        rms_value = audio_helper.reduce_noise_and_get_rms(
-            recorded_file, noise_file)[0]
-
-        self.write_perf_keyval({'rms_value': rms_value})
diff --git a/client/site_tests/audio_YoutubePlayback/control b/client/site_tests/audio_YoutubePlayback/control
deleted file mode 100644
index a8d382b..0000000
--- a/client/site_tests/audio_YoutubePlayback/control
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chromium OS Project"
-NAME = "audio_YoutubePlayback"
-PURPOSE = "Verify youtube video sound is audible"
-CRITERIA = """
-This test will fail if youtube video sound is not audible.
-"""
-ATTRIBUTES = "suite:audio"
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "audio"
-TEST_TYPE = "client"
-DEPENDENCIES = 'audio_loopback_dongle'
-
-DOC = """
-Test that youtube video sound can be heard by arecord at mic in.
-
-NOTE: For this test to pass you need to have the line out looped back to mic-in.
-This can be through a 3.5mm male-to-male cable or a loopback dongle depending on
-your device.
-"""
-
-job.run_test('audio_YoutubePlayback', constraints=['rms_value > 0.05'])
diff --git a/client/site_tests/audio_YoutubePlayback/youtube.html b/client/site_tests/audio_YoutubePlayback/youtube.html
deleted file mode 100644
index 948d2a3..0000000
--- a/client/site_tests/audio_YoutubePlayback/youtube.html
+++ /dev/null
@@ -1,26 +0,0 @@
-<!-- This is a test html file for Youtube video tests. -->
-<html>
-  <body>
-    <iframe id="yt_frame" type="text/html" width="640" height="390"
-        src="http://www.youtube.com/embed/MNsabRmyH-Y?enablejsapi=1"
-        frameborder="0"></iframe>
-    <br>
-    <script>
-      var tag = document.createElement('script');
-      tag.src = "http://www.youtube.com/iframe_api";
-      var firstScriptTag = document.getElementsByTagName('script')[0];
-      firstScriptTag.parentNode.insertBefore(tag, firstScriptTag);
-      var player;
-      function onYouTubeIframeAPIReady() {
-        new YT.Player('yt_frame', {
-          events: {
-            'onReady': onPlayerReady,
-          }
-        });
-      }
-      function onPlayerReady(event) {
-        player = event.target;
-      }
-    </script>
-  </body>
-</html>
diff --git a/client/site_tests/autoupdate_Backoff/autoupdate_Backoff.py b/client/site_tests/autoupdate_Backoff/autoupdate_Backoff.py
index ff05b9a..c521855 100644
--- a/client/site_tests/autoupdate_Backoff/autoupdate_Backoff.py
+++ b/client/site_tests/autoupdate_Backoff/autoupdate_Backoff.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/autoupdate_Backoff/control b/client/site_tests/autoupdate_Backoff/control
index bf00bbb..b92b3a2 100644
--- a/client/site_tests/autoupdate_Backoff/control
+++ b/client/site_tests/autoupdate_Backoff/control
@@ -14,5 +14,6 @@
 TEST_CLASS = 'platform'
 TEST_CATEGORY = 'Functional'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 job.run_test('autoupdate_Backoff')
diff --git a/client/site_tests/autoupdate_BadMetadata/autoupdate_BadMetadata.py b/client/site_tests/autoupdate_BadMetadata/autoupdate_BadMetadata.py
index 35a08f6..8d05a49 100644
--- a/client/site_tests/autoupdate_BadMetadata/autoupdate_BadMetadata.py
+++ b/client/site_tests/autoupdate_BadMetadata/autoupdate_BadMetadata.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/autoupdate_BadMetadata/control b/client/site_tests/autoupdate_BadMetadata/control
index 94e1643..c1daf0d 100644
--- a/client/site_tests/autoupdate_BadMetadata/control
+++ b/client/site_tests/autoupdate_BadMetadata/control
@@ -9,6 +9,7 @@
 TEST_TYPE = "client"
 PURPOSE = "Test an update with bad metadata in the omaha response."
 TIME = "SHORT"
+PY_VERSION = 3
 
 DOC = """
 Test an update with bad metadata in the omaha response.
diff --git a/client/site_tests/autoupdate_CannedOmahaUpdate/autoupdate_CannedOmahaUpdate.py b/client/site_tests/autoupdate_CannedOmahaUpdate/autoupdate_CannedOmahaUpdate.py
index 2175bc9..d2c2a2d 100644
--- a/client/site_tests/autoupdate_CannedOmahaUpdate/autoupdate_CannedOmahaUpdate.py
+++ b/client/site_tests/autoupdate_CannedOmahaUpdate/autoupdate_CannedOmahaUpdate.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -15,18 +16,21 @@
     version = 1
 
 
-    def run_canned_update(self, allow_failure, update_url):
+    def run_canned_update(self, allow_failure, update_url, interactive):
         """
         Performs the update.
 
         @param allow_failure: True if we dont raise an error on failure.
         @param update_url: The URL to get an update.
+        @param interactive: Whether the update is interactive or not.
 
         """
 
         try:
-            self._check_for_update(update_url, critical_update=True,
-                                   wait_for_completion=True)
+            self._check_for_update(update_url,
+                                   critical_update=True,
+                                   wait_for_completion=True,
+                                   interactive=interactive)
         except error.CmdError as e:
             if not allow_failure:
                 raise error.TestFail('Update attempt failed: %s' %
@@ -35,8 +39,12 @@
                 logging.info('Ignoring failed update. Failure reason: %s', e)
 
 
-    def run_once(self, payload_url, allow_failure=False, public_key=None,
-                 use_cellular=False):
+    def run_once(self,
+                 payload_url,
+                 allow_failure=False,
+                 public_key=None,
+                 use_cellular=False,
+                 interactive=True):
         """
         Runs an update with a canned response using Nebraska.
 
@@ -44,6 +52,7 @@
         @param allow_failure: If true, failing the update is expected.
         @param public_key: The public key to serve to the update client.
         @param use_cellular: True if this test uses cellular.
+        @param interactive: Whether the update is interactive or not.
 
         """
 
@@ -52,7 +61,8 @@
             public_key=public_key) as nebraska:
 
             if not use_cellular:
-                self.run_canned_update(allow_failure, nebraska.get_update_url())
+                self.run_canned_update(allow_failure,
+                                       nebraska.get_update_url(), interactive)
                 return
 
             # Setup DUT so that we have ssh over ethernet but DUT uses
@@ -67,7 +77,8 @@
                     test_env.shill.connect_service_synchronous(
                             service, CONNECT_TIMEOUT)
                     self.run_canned_update(allow_failure,
-                                           nebraska.get_update_url())
+                                           nebraska.get_update_url(),
+                                           interactive)
             except error.TestError as e:
                 # Raise as test failure instead of test error so it is
                 # propagated to the server test's failure message.
diff --git a/client/site_tests/autoupdate_CannedOmahaUpdate/control b/client/site_tests/autoupdate_CannedOmahaUpdate/control
index 3f31e7f..4614ee6 100644
--- a/client/site_tests/autoupdate_CannedOmahaUpdate/control
+++ b/client/site_tests/autoupdate_CannedOmahaUpdate/control
@@ -16,5 +16,6 @@
 TEST_CLASS = 'platform'
 TEST_CATEGORY = 'Functional'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 job.run_test('autoupdate_CannedOmahaUpdate')
diff --git a/client/site_tests/autoupdate_EOL/autoupdate_EOL.py b/client/site_tests/autoupdate_EOL/autoupdate_EOL.py
index 504e6f5..73aa869 100644
--- a/client/site_tests/autoupdate_EOL/autoupdate_EOL.py
+++ b/client/site_tests/autoupdate_EOL/autoupdate_EOL.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -78,7 +79,11 @@
         tab.Navigate('chrome://os-settings/help/details')
         tab.WaitForDocumentReadyStateToBeComplete()
         eol_js = '''
-            settings.AboutPageBrowserProxyImpl.getInstance().getEndOfLifeInfo()
+            async function getEOL() {
+                return await import('chrome://os-settings/chromeos/os_settings.js').then(m =>
+                    m.AboutPageBrowserProxyImpl.getInstance().getEndOfLifeInfo());
+            }
+            getEOL();
         '''
         eol_promise = tab.EvaluateJavaScript(eol_js, promise=True)
         expected_eol_date = self._get_expected_eol_date(eol_date)
diff --git a/client/site_tests/autoupdate_EOL/control b/client/site_tests/autoupdate_EOL/control
index 0dd120a..0fe1b05 100644
--- a/client/site_tests/autoupdate_EOL/control
+++ b/client/site_tests/autoupdate_EOL/control
@@ -10,6 +10,7 @@
 ATTRIBUTES = "suite:au-perbuild"
 PURPOSE = "Tests End of Life (EOL) / Autoupdate Expiration (AUE)"
 TIME = "SHORT"
+PY_VERSION = 3
 
 DOC = """
 This test will check that the DUT behaves correctly in an end of life scenario.
diff --git a/client/site_tests/autoupdate_EOL/control.approaching_eol b/client/site_tests/autoupdate_EOL/control.approaching_eol
index efdf37a..8699e73 100644
--- a/client/site_tests/autoupdate_EOL/control.approaching_eol
+++ b/client/site_tests/autoupdate_EOL/control.approaching_eol
@@ -10,6 +10,7 @@
 ATTRIBUTES = "suite:au-perbuild"
 PURPOSE = "Tests Approaching End of Life (EOL) / Autoupdate Expiration (AUE)"
 TIME = "SHORT"
+PY_VERSION = 3
 
 DOC = """
 This test will check that the DUT behaves correctly in an approaching end of
@@ -22,4 +23,4 @@
 import datetime
 job.run_test('autoupdate_EOL',
              eol_date=(datetime.datetime.utcnow()
-                       - datetime.datetime(1970,1,1)).days + 1)
\ No newline at end of file
+                       - datetime.datetime(1970,1,1)).days + 1)
diff --git a/client/site_tests/autoupdate_EOL/control.future_eol b/client/site_tests/autoupdate_EOL/control.future_eol
index 96cd958..77b23c7 100644
--- a/client/site_tests/autoupdate_EOL/control.future_eol
+++ b/client/site_tests/autoupdate_EOL/control.future_eol
@@ -10,6 +10,7 @@
 ATTRIBUTES = "suite:au-perbuild"
 PURPOSE = "Tests Approaching End of Life (EOL) / Autoupdate Expiration (AUE)"
 TIME = "SHORT"
+PY_VERSION = 3
 
 DOC = """
 This test will check that the DUT behaves correctly in a future end of life
diff --git a/client/site_tests/autoupdate_InstallAndUpdateDLC/autoupdate_InstallAndUpdateDLC.py b/client/site_tests/autoupdate_InstallAndUpdateDLC/autoupdate_InstallAndUpdateDLC.py
index 9908c3b..20857b8 100644
--- a/client/site_tests/autoupdate_InstallAndUpdateDLC/autoupdate_InstallAndUpdateDLC.py
+++ b/client/site_tests/autoupdate_InstallAndUpdateDLC/autoupdate_InstallAndUpdateDLC.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -12,7 +13,7 @@
     """Tests installing DLCs and updating them along with the OS. """
     version = 1
 
-    def run_once(self, payload_urls, full_payload=True):
+    def run_once(self, payload_urls, interactive=True):
         """
         Install DLC and perform an update, using nebraska.
 
@@ -23,7 +24,7 @@
                              install the DLC. In case of a delta update, both
                              full and delta DLC payloads should be included in
                              payload_urls.
-        @param full_payload: True for a full payload, False for delta.
+        @param interactive: Whether the update should be interactive.
 
         """
         with nebraska_wrapper.NebraskaWrapper(
@@ -34,7 +35,9 @@
             self._dlc_util.install(self._dlc_util._SAMPLE_DLC_ID, nebraska_url)
 
             if not self._dlc_util.is_installed(self._dlc_util._SAMPLE_DLC_ID):
-                raise error.TestFail('Dummy DLC was not installed.')
+                raise error.TestFail('Test DLC was not installed.')
 
             logging.debug('Updating OS and DLC')
-            self._check_for_update(nebraska_url, wait_for_completion=True)
+            self._check_for_update(nebraska_url,
+                                   wait_for_completion=True,
+                                   interactive=interactive)
diff --git a/client/site_tests/autoupdate_InstallAndUpdateDLC/control b/client/site_tests/autoupdate_InstallAndUpdateDLC/control
index dcb99b1..1278fb8 100644
--- a/client/site_tests/autoupdate_InstallAndUpdateDLC/control
+++ b/client/site_tests/autoupdate_InstallAndUpdateDLC/control
@@ -9,6 +9,7 @@
 TEST_TYPE = "client"
 PURPOSE = "Tests installing a DLC and doing an N-to-N update with DLC present."
 TIME = "SHORT"
+PY_VERSION = 3
 
 DOC = """
 This test will first install a DLC and check that it was installed
diff --git a/client/site_tests/autoupdate_InvalidateSuccessfulUpdate/autoupdate_InvalidateSuccessfulUpdate.py b/client/site_tests/autoupdate_InvalidateSuccessfulUpdate/autoupdate_InvalidateSuccessfulUpdate.py
new file mode 100644
index 0000000..dcee779
--- /dev/null
+++ b/client/site_tests/autoupdate_InvalidateSuccessfulUpdate/autoupdate_InvalidateSuccessfulUpdate.py
@@ -0,0 +1,65 @@
+# Lint as: python2, python3
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.cros.update_engine import nebraska_wrapper
+from autotest_lib.client.cros.update_engine import update_engine_test
+
+
+class autoupdate_InvalidateSuccessfulUpdate(update_engine_test.UpdateEngineTest
+                                            ):
+    """Tests installing an update and then invalidating it."""
+
+    version = 1
+
+    def _apply_update(self, update_url):
+        """
+        Performs the update and ensures it is successful.
+
+        @param update_url: The URL to get an update.
+
+        """
+        try:
+            self._check_for_update(update_url,
+                                   critical_update=True,
+                                   wait_for_completion=True)
+        except error.CmdError as e:
+            raise error.TestFail('Update attempt failed: %s' %
+                                 self._get_last_error_string())
+
+    def _check_invalidated_update(self, update_url):
+        """
+        Performs an update check and confirms that it results
+        in an invalidated update.
+
+        @param update_url: The URL to get an update.
+
+        """
+        try:
+            self._check_for_update(update_url,
+                                   critical_update=True,
+                                   wait_for_completion=False)
+            self._wait_for_update_to_idle(check_kernel_after_update=True,
+                                          inactive_kernel=False)
+        except error.CmdError as e:
+            raise error.TestFail('Invalidate attempt failed: %s' %
+                                 self._get_last_error_string())
+        self._check_update_engine_log_for_entry(
+                'Invalidating previous update.',
+                raise_error=True,
+                err_str='Failed to invalidate previous update')
+
+    def run_once(self, payload_url):
+        """
+        Runs an update and then invalidates it using Nebraska.
+
+        @param payload_url: Path to a payload on Google storage.
+
+        """
+        with nebraska_wrapper.NebraskaWrapper(
+                log_dir=self.resultsdir, payload_url=payload_url) as nebraska:
+            self._apply_update(nebraska.get_update_url())
+            nebraska.update_config(invalidate_last_update=True)
+            self._check_invalidated_update(nebraska.get_update_url())
diff --git a/client/site_tests/autoupdate_InvalidateSuccessfulUpdate/control b/client/site_tests/autoupdate_InvalidateSuccessfulUpdate/control
new file mode 100644
index 0000000..8f97c81
--- /dev/null
+++ b/client/site_tests/autoupdate_InvalidateSuccessfulUpdate/control
@@ -0,0 +1,20 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "hbarnor, Chromium OS"
+NAME = "autoupdate_InvalidateSuccessfulUpdate"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "client"
+PURPOSE = "Tests installing an update and invalidating it before it is booted."
+TIME = "SHORT"
+PY_VERSION = 3
+
+DOC = """
+This test will first apply an update and check that it was applied successfully
+and DUT is in NEED_REBOOT state. Then it will invalidate the update and check
+that DUT invalidated the update by looking at state and boot priority.
+"""
+
+job.run_test('autoupdate_InvalidateSuccessfullUpdate')
diff --git a/client/site_tests/autoupdate_LoginStartUpdateLogout/autoupdate_LoginStartUpdateLogout.py b/client/site_tests/autoupdate_LoginStartUpdateLogout/autoupdate_LoginStartUpdateLogout.py
index ee9c3cd..7097cea 100644
--- a/client/site_tests/autoupdate_LoginStartUpdateLogout/autoupdate_LoginStartUpdateLogout.py
+++ b/client/site_tests/autoupdate_LoginStartUpdateLogout/autoupdate_LoginStartUpdateLogout.py
@@ -1,9 +1,12 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from autotest_lib.client.bin import utils
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib.cros import chrome
+from autotest_lib.client.cros.update_engine import nebraska_wrapper
 from autotest_lib.client.cros.update_engine import update_engine_test
 
 class autoupdate_LoginStartUpdateLogout(update_engine_test.UpdateEngineTest):
@@ -15,14 +18,17 @@
     """
     version = 1
 
-    def run_once(self, update_url, progress_to_complete, full_payload=True,
+    def run_once(self,
+                 payload_url,
+                 progress_to_complete,
+                 full_payload=True,
                  interrupt_network=False):
         """
         Login, start an update, and logout. If specified, this test will also
         disconnect the internet upon reaching a target update progress,
         wait a while, and reconnect the internet before logging out.
 
-        @param update_url: The omaha url to call.
+        @param payload_url: Payload url to pass to Nebraska.
         @param progress_to_complete: If interrupt_network is
                                      True, the internet will be disconnected
                                      when the update reaches this progress.
@@ -35,21 +41,39 @@
 
         """
         # Login as regular user. Start an update. Then Logout
-        with chrome.Chrome(logged_in=True):
-            self._check_for_update(update_url, critical_update=True,
-                                   full_payload=full_payload)
-            if interrupt_network:
-                self._wait_for_progress(progress_to_complete)
-                completed = self._get_update_progress()
-                self._disconnect_reconnect_network_test(update_url)
 
-                if self._is_update_engine_idle():
-                    raise error.TestFail(
-                        'The update was IDLE after interrupt.')
-                if not self._update_continued_where_it_left_off(completed):
-                    raise error.TestFail('The update did not continue where '
-                                         'it left off after interruption.')
+        with nebraska_wrapper.NebraskaWrapper(
+                log_dir=self.resultsdir,
+                payload_url=payload_url,
+                persist_metadata=True) as nebraska:
 
-        # Log in and out with a new user during the update.
-        with chrome.Chrome(logged_in=True, dont_override_profile=False):
-            pass
+            config = {'critical_update': True, 'full_payload': full_payload}
+            nebraska.update_config(**config)
+            update_url = nebraska.get_update_url()
+            # Create a nebraska config, which causes nebraska to start up
+            # before update_engine. This will allow nebraska to be up right
+            # after system startup so it can be used in the reboot
+            # interruption test.
+            nebraska.create_startup_config(**config)
+
+            with chrome.Chrome(logged_in=True):
+                self._check_for_update(update_url)
+                # Wait for the update to start.
+                utils.poll_for_condition(self._is_update_started, timeout=30)
+
+                if interrupt_network:
+                    self._wait_for_progress(progress_to_complete)
+                    completed = self._get_update_progress()
+                    self._disconnect_reconnect_network_test()
+
+                    if self._is_update_engine_idle():
+                        raise error.TestFail(
+                                'The update was IDLE after interrupt.')
+                    if not self._update_continued_where_it_left_off(completed):
+                        raise error.TestFail(
+                                'The update did not continue where '
+                                'it left off after interruption.')
+
+            # Log in and out with a new user during the update.
+            with chrome.Chrome(logged_in=True, dont_override_profile=False):
+                pass
diff --git a/client/site_tests/autoupdate_LoginStartUpdateLogout/control b/client/site_tests/autoupdate_LoginStartUpdateLogout/control
index 10943ae..b8b802a 100644
--- a/client/site_tests/autoupdate_LoginStartUpdateLogout/control
+++ b/client/site_tests/autoupdate_LoginStartUpdateLogout/control
@@ -9,6 +9,7 @@
 TEST_TYPE = "client"
 PURPOSE = "Tests logging in and out during an update."
 TIME = "SHORT"
+PY_VERSION = 3
 DOC = """ Tests logging in and out during an update. """
 
 job.run_test('autoupdate_LoginStartUpdateLogout')
diff --git a/client/site_tests/autoupdate_PeriodicCheck/autoupdate_PeriodicCheck.py b/client/site_tests/autoupdate_PeriodicCheck/autoupdate_PeriodicCheck.py
index c0251ad..3458f29 100644
--- a/client/site_tests/autoupdate_PeriodicCheck/autoupdate_PeriodicCheck.py
+++ b/client/site_tests/autoupdate_PeriodicCheck/autoupdate_PeriodicCheck.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -43,8 +44,8 @@
                 log_dir=self.resultsdir, payload_url=payload_url) as nebraska:
 
             logging.info('Setting first update response to return no update.')
-            self._create_custom_lsb_release(
-                    nebraska.get_update_url(no_update=True))
+            nebraska.update_config(no_update=True)
+            self._create_custom_lsb_release(nebraska.get_update_url())
             self._restart_update_engine()
 
             # Wait for the first update check.
@@ -60,14 +61,17 @@
             logging.info('First periodic update was initiated.')
 
             logging.info('Setting the next update response to be an update.')
-            self._create_custom_lsb_release(nebraska.get_update_url())
+            nebraska.update_config(no_update=False)
 
-            # Wait for the second update check.
+            # Wait for the subsequent update checks.
             try:
-                utils.poll_for_condition(
-                    lambda: len(self._get_update_requests()) == 2,
-                    desc='2nd periodic update check.',
-                    timeout=2 * periodic_interval)
+                utils.poll_for_condition(lambda: len(self._get_update_requests(
+                )) > 1,
+                                         desc='2nd periodic update check.',
+                                         timeout=2 * periodic_interval)
+                logging.info(
+                        'Setting further update responses back to no update.')
+                nebraska.update_config(no_update=True)
             except utils.TimeoutError:
                 raise error.TestFail('2nd periodic check not found.')
             logging.info('Second periodic update was initiated.')
diff --git a/client/site_tests/autoupdate_PeriodicCheck/control b/client/site_tests/autoupdate_PeriodicCheck/control
index 1360ce8..3146826 100644
--- a/client/site_tests/autoupdate_PeriodicCheck/control
+++ b/client/site_tests/autoupdate_PeriodicCheck/control
@@ -11,5 +11,6 @@
 TEST_CLASS = 'platform'
 TEST_CATEGORY = 'Functional'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 job.run_test('autoupdate_PeriodicCheck')
diff --git a/client/site_tests/autoupdate_StartOOBEUpdate/autoupdate_StartOOBEUpdate.py b/client/site_tests/autoupdate_StartOOBEUpdate/autoupdate_StartOOBEUpdate.py
index 9cd5b4d..5895176 100644
--- a/client/site_tests/autoupdate_StartOOBEUpdate/autoupdate_StartOOBEUpdate.py
+++ b/client/site_tests/autoupdate_StartOOBEUpdate/autoupdate_StartOOBEUpdate.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -15,7 +16,7 @@
 class autoupdate_StartOOBEUpdate(update_engine_test.UpdateEngineTest):
     """Starts a forced update at OOBE.
 
-    Chrome OS will restart when the update is complete so this test will just
+    ChromeOS will restart when the update is complete so this test will just
     start the update. The rest of the processing will be done in a server
     side test.
     """
@@ -28,26 +29,55 @@
         self._clear_custom_lsb_release()
 
 
-    def _skip_to_oobe_update_screen(self):
-        """Skips to the OOBE update check screen."""
-        self._oobe.WaitForJavaScriptCondition("typeof Oobe == 'function' && "
-                                              "Oobe.readyForTesting",
-                                              timeout=30)
-        self._oobe.ExecuteJavaScript('Oobe.skipToUpdateForTesting()')
+    def _navigate_to_oobe_update_screen(self):
+        """Navigates to the OOBE update check screen."""
+        timeout = 30
+        self._oobe.WaitForJavaScriptCondition(
+                "typeof Oobe == 'function' && typeof OobeAPI == 'object' && "
+                "Oobe.readyForTesting",
+                timeout=timeout)
+        self._oobe.WaitForJavaScriptCondition(
+                "OobeAPI.screens.WelcomeScreen.isVisible()", timeout=timeout)
+        self._oobe.ExecuteJavaScript(
+                "OobeAPI.screens.WelcomeScreen.clickNext()")
 
+        if not self._oobe.EvaluateJavaScript(
+                "OobeAPI.screens.NetworkScreen.shouldSkip()"):
+            self._oobe.WaitForJavaScriptCondition(
+                    "OobeAPI.screens.NetworkScreen.isVisible()",
+                    timeout=timeout)
+            self._oobe.ExecuteJavaScript(
+                    "OobeAPI.screens.NetworkScreen.clickNext()")
 
-    def _start_oobe_update(self, update_url, critical_update, full_payload):
+        if not self._oobe.EvaluateJavaScript(
+                "OobeAPI.screens.EulaScreen.shouldSkip()"):
+            self._oobe.WaitForJavaScriptCondition(
+                    "OobeAPI.screens.EulaScreen.isVisible()", timeout=timeout)
+            self._oobe.WaitForJavaScriptCondition(
+                    "OobeAPI.screens.EulaScreen.nextButton.isEnabled()",
+                    timeout=timeout)
+            self._oobe.ExecuteJavaScript(
+                    "OobeAPI.screens.EulaScreen.clickNext()")
+
+        # TODO(yunkez): remove this check after M92 is in stable
+        if self._oobe.EvaluateJavaScript(
+                "typeof OobeAPI.screens.UpdateScreen == 'object'"):
+            self._oobe.WaitForJavaScriptCondition(
+                    "OobeAPI.screens.UpdateScreen.isVisible()",
+                    timeout=timeout)
+        else:
+            self._oobe.WaitForJavaScriptCondition("!$('oobe-update').hidden",
+                                                  timeout=timeout)
+
+    def _start_oobe_update(self, update_url, critical_update):
         """
         Jump to the update check screen at OOBE and wait for update to start.
 
         @param update_url: The omaha update URL we expect to call.
         @param critical_update: True if the update is critical.
-        @param full_payload: Whether we want the full payload or delta.
 
         """
-        self._create_custom_lsb_release(update_url,
-                                        critical_update=critical_update,
-                                        full_payload=full_payload)
+        self._create_custom_lsb_release(update_url)
         # Start chrome instance to interact with OOBE.
         extra_browser_args = []
         if lsbrelease_utils.get_device_type() != 'CHROMEBOOK':
@@ -55,7 +85,7 @@
         self._chrome = chrome.Chrome(auto_login=False,
                                      extra_browser_args=extra_browser_args)
         self._oobe = self._chrome.browser.oobe
-        self._skip_to_oobe_update_screen()
+        self._navigate_to_oobe_update_screen()
 
         timeout = 180
         err_str = 'Update did not start within %d seconds.' % timeout
@@ -76,15 +106,17 @@
                     raise e
 
 
-    def run_once(self, update_url=None, payload_url=None, cellular=False,
-                 critical_update=True, full_payload=None,
-                 interrupt_network=False, interrupt_progress=0.0):
+    def run_once(self,
+                 payload_url=None,
+                 cellular=False,
+                 critical_update=True,
+                 full_payload=None,
+                 interrupt_network=False,
+                 interrupt_progress=0.0):
         """
         Test that will start a forced update at OOBE.
 
-        @param update_url: The omaha URL to call from the OOBE update screen.
-        @param payload_url: Payload url to pass to Nebraska for non-critical
-                            and cellular tests.
+        @param payload_url: Payload url to pass to Nebraska.
         @param cellular: True if we should run this test using a sim card.
         @param critical_update: True if we should have deadline:now in omaha
                                 response.
@@ -100,36 +132,40 @@
 
         """
 
-        if critical_update and not cellular:
-            self._start_oobe_update(update_url, critical_update, full_payload)
-            if interrupt_network:
-                self._wait_for_progress(interrupt_progress)
-                self._take_screenshot(self._BEFORE_INTERRUPT_FILENAME)
-                completed = self._get_update_progress()
-                self._disconnect_reconnect_network_test(update_url)
-                self._take_screenshot(self._AFTER_INTERRUPT_FILENAME)
-
-                if self._is_update_engine_idle():
-                    raise error.TestFail(
-                        'The update was IDLE after interrupt.')
-                if not self._update_continued_where_it_left_off(completed):
-                    raise error.TestFail('The update did not continue where '
-                                         'it left off after interruption.')
-
-                # Remove screenshots since the interrupt test succeeded.
-                self._remove_screenshots()
-            return
-
-        # Setup a Nebraska instance on the DUT for cellular tests and
-        # non-critical updates. Ceullar tests cannot reach devservers.
-        # Non-critical tests don't need a devserver.
         with nebraska_wrapper.NebraskaWrapper(
-            log_dir=self.resultsdir, payload_url=payload_url) as nebraska:
+                log_dir=self.resultsdir,
+                payload_url=payload_url,
+                persist_metadata=True) as nebraska:
 
-            update_url = nebraska.get_update_url(
-                critical_update=critical_update)
+            config = {
+                    'critical_update': critical_update,
+                    'full_payload': full_payload
+            }
+            nebraska.update_config(**config)
+            update_url = nebraska.get_update_url()
+            # Create a nebraska config, which causes nebraska to start up before update_engine.
+            # This will allow nebraska to be up right after system startup so it can be used in interruption tests.
+            nebraska.create_startup_config(**config)
+
             if not cellular:
-                self._start_oobe_update(update_url, critical_update, None)
+                self._start_oobe_update(update_url, critical_update)
+                if interrupt_network:
+                    self._wait_for_progress(interrupt_progress)
+                    self._take_screenshot(self._BEFORE_INTERRUPT_FILENAME)
+                    completed = self._get_update_progress()
+                    self._disconnect_reconnect_network_test()
+                    self._take_screenshot(self._AFTER_INTERRUPT_FILENAME)
+
+                    if self._is_update_engine_idle():
+                        raise error.TestFail(
+                                'The update was IDLE after interrupt.')
+                    if not self._update_continued_where_it_left_off(completed):
+                        raise error.TestFail(
+                                'The update did not continue where '
+                                'it left off after interruption.')
+
+                    # Remove screenshots since the interrupt test succeeded.
+                    self._remove_screenshots()
                 return
 
             try:
@@ -141,10 +177,13 @@
                     test_env.shill.connect_service_synchronous(service,
                                                                connect_timeout)
 
-                    self._start_oobe_update(update_url, critical_update, None)
+                    self._start_oobe_update(update_url, critical_update)
 
-                    # Remove the custom omaha server from lsb release because
-                    # after we reboot it will no longer be running.
+                    # Set the nebraska startup config's no_update=True for the
+                    # post-reboot update check, just in case we don't have time
+                    # to server-side.
+                    config['no_update'] = True
+                    nebraska.create_startup_config(**config)
                     self._clear_custom_lsb_release()
 
                     # Need to return from this client test before OOBE reboots
diff --git a/client/site_tests/autoupdate_StartOOBEUpdate/control b/client/site_tests/autoupdate_StartOOBEUpdate/control
index 162fff6..4805264 100644
--- a/client/site_tests/autoupdate_StartOOBEUpdate/control
+++ b/client/site_tests/autoupdate_StartOOBEUpdate/control
@@ -9,6 +9,7 @@
 TEST_TYPE = "client"
 PURPOSE = "Start a forced autoupdate at OOBE."
 TIME = "SHORT"
+PY_VERSION = 3
 
 DOC = """
 This test will start a forced autoupdate at OOBE.
diff --git a/client/site_tests/autoupdate_UpdateFromUI/autoupdate_UpdateFromUI.py b/client/site_tests/autoupdate_UpdateFromUI/autoupdate_UpdateFromUI.py
index 19e2803..7d75d32 100644
--- a/client/site_tests/autoupdate_UpdateFromUI/autoupdate_UpdateFromUI.py
+++ b/client/site_tests/autoupdate_UpdateFromUI/autoupdate_UpdateFromUI.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -12,7 +13,7 @@
 from telemetry.core import exceptions
 
 class autoupdate_UpdateFromUI(update_engine_test.UpdateEngineTest):
-    """Starts an update from the Chrome OS Settings app. """
+    """Starts an update from the ChromeOS Settings app. """
     version = 1
 
     _NOTIFICATION_INTERVAL = 1
@@ -52,7 +53,7 @@
 
     def run_once(self, payload_url):
         """
-        Tests that a Chrome OS software update can be completed from the UI,
+        Tests that a ChromeOS software update can be completed from the UI,
         and that the post-update notification appears when the update is
         complete.
 
@@ -81,9 +82,15 @@
                 tab.Navigate('chrome://os-settings/help')
                 tab.WaitForDocumentReadyStateToBeComplete()
                 self._take_screenshot('before_check_for_updates.png')
+                request_update_js = '''
+                    async function checkForUpdate() {
+                        return await import('chrome://os-settings/chromeos/os_settings.js').then(m =>
+                          m.AboutPageBrowserProxyImpl.getInstance().requestUpdate());
+                    }
+                    checkForUpdate();
+                '''
                 try:
-                    tab.EvaluateJavaScript('settings.AboutPageBrowserProxyImpl'
-                                           '.getInstance().requestUpdate()')
+                    tab.EvaluateJavaScript(request_update_js)
                 except exceptions.EvaluateException:
                     raise error.TestFail(
                         'Failed to find and click Check For Updates button.')
diff --git a/client/site_tests/autoupdate_UpdateFromUI/control b/client/site_tests/autoupdate_UpdateFromUI/control
index 0737a57..a67d29d 100644
--- a/client/site_tests/autoupdate_UpdateFromUI/control
+++ b/client/site_tests/autoupdate_UpdateFromUI/control
@@ -9,6 +9,7 @@
 TEST_TYPE = "client"
 PURPOSE = "Tests triggering an update from the UI."
 TIME = "SHORT"
+PY_VERSION = 3
 DOC = """ Tests triggering an update from the UI. """
 
 job.run_test('autoupdate_UpdateFromUI')
diff --git a/client/site_tests/autoupdate_UrlSwitch/autoupdate_UrlSwitch.py b/client/site_tests/autoupdate_UrlSwitch/autoupdate_UrlSwitch.py
index 945a399..50a660a 100644
--- a/client/site_tests/autoupdate_UrlSwitch/autoupdate_UrlSwitch.py
+++ b/client/site_tests/autoupdate_UrlSwitch/autoupdate_UrlSwitch.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/autoupdate_UrlSwitch/control b/client/site_tests/autoupdate_UrlSwitch/control
index 5e9ad16..bc553b7 100644
--- a/client/site_tests/autoupdate_UrlSwitch/control
+++ b/client/site_tests/autoupdate_UrlSwitch/control
@@ -9,6 +9,7 @@
 TEST_TYPE = "client"
 PURPOSE = "Start a forced autoupdate at OOBE."
 TIME = "SHORT"
+PY_VERSION = 3
 
 DOC = """
 Omaha returns a response with two URLs. We want to test that when we disable
diff --git a/client/site_tests/autoupdate_UserData/autoupdate_UserData.py b/client/site_tests/autoupdate_UserData/autoupdate_UserData.py
index e4ff51b..d347f38 100644
--- a/client/site_tests/autoupdate_UserData/autoupdate_UserData.py
+++ b/client/site_tests/autoupdate_UserData/autoupdate_UserData.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -81,7 +82,9 @@
     def _perform_after_update_checks(self):
         """Check the user preferences and files are the same."""
         with chrome.Chrome(dont_override_profile=True,
-                           autotest_ext=True) as cr:
+                           autotest_ext=True,
+                           username=self._LOGIN_TEST_USERNAME,
+                           password=self._LOGIN_TEST_PASSWORD) as cr:
             # Check test file is still present.
             if not os.path.exists(self._TEST_FILE):
                 raise error.TestFail('Test file was not present after update.')
@@ -112,7 +115,9 @@
         if payload_url:
             with nebraska_wrapper.NebraskaWrapper(
                 log_dir=self.resultsdir, payload_url=payload_url) as nebraska:
-                with chrome.Chrome(autotest_ext=True) as cr:
+                with chrome.Chrome(autotest_ext=True,
+                                   username=self._LOGIN_TEST_USERNAME,
+                                   password=self._LOGIN_TEST_PASSWORD) as cr:
                     self._cr = cr
                     utils.run(['echo', 'hello', '>', self._TEST_FILE])
                     self._modify_input_methods()
diff --git a/client/site_tests/autoupdate_UserData/control b/client/site_tests/autoupdate_UserData/control
index f070345..9b88e4e 100644
--- a/client/site_tests/autoupdate_UserData/control
+++ b/client/site_tests/autoupdate_UserData/control
@@ -9,6 +9,7 @@
 TEST_TYPE = "client"
 PURPOSE = "Tests logging in and out during an update."
 TIME = "SHORT"
+PY_VERSION = 3
 DOC = """ Tests logging in and out during an update. """
 
 job.run_test('autoupdate_UserData')
diff --git a/client/site_tests/bluetooth_AVLHCI/bluetooth_AVLHCI.py b/client/site_tests/bluetooth_AVLHCI/bluetooth_AVLHCI.py
new file mode 100644
index 0000000..f44e00d
--- /dev/null
+++ b/client/site_tests/bluetooth_AVLHCI/bluetooth_AVLHCI.py
@@ -0,0 +1,510 @@
+# Lint as: python3
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+from autotest_lib.client.bin import test
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.cros.bluetooth.hcitool import Hcitool
+from autotest_lib.client.common_lib.cros.bluetooth import chipinfo
+from autotest_lib.client.cros.multimedia import bluetooth_facade
+
+
+class bluetooth_AVLHCI(test.test):
+    """Test bluetooth avl HCI requirements."""
+    version = 1
+    MIN_ACL_BUFFER_SIZE = 1021
+    ACL_DATA_PACKET_LENGTH_VALUE_INDEX = 1
+    MIN_ACL_PACKETS_NUMBER = 4
+    MIN_ACL_PACKETS_NUMBER_OPTIONAL = 6
+    TOTAL_NUM_ACL_DATA_PACKETS_VALUE_INDEX = 3
+    MIN_SCO_PACKETS_NUMBER = 6
+    TOTAL_NUM_SYNCHRONOUS_DATA_PACKETS_VALUE_INDEX = 4
+    NON_FLUSHABLE_PACKET_BOUNDARY_FEATURE = 'Non-flushable Packet Boundary Flag'
+    ERRONEOUS_DATA_REPORTING_FEATURE = 'Erroneous Data Reporting'
+    MAC_EVENT_FILTERS = [['1', '2', '00 17 C9 AA AA AA'],
+                         ['1', '2', '00 17 9B AA AA AA'],
+                         ['1', '2', '00 17 94 AA AA AA'],
+                         ['1', '2', '00 17 95 AA AA AA'],
+                         ['1', '2', '00 17 B0 AA AA AA'],
+                         ['1', '2', '00 17 C0 AA AA AA'],
+                         ['1', '2', '00 17 08 AA AA AA'],
+                         ['1', '2', '00 16 EA AA AA AA']]
+
+    CONTROLLER_MEMORY_FULL_STATUS_VALUE = 7
+    CONTROLLER_SUCCESS_STATUS_VALUE = 0
+    SCO_BUFFER_SIZE_VALUE_INDEX = 2
+    MIN_SCO_BUFFER_SIZE = 60
+    LE_CONTROLLER_FEATURE = 'LE Supported (Controller)'
+    BR_EDR_NOT_SUPPORT_FEATURE = 'BR/EDR Not Supported'
+    LE_AND_BR_EDR_CONTROLLER_FEATURE = (
+            'Simultaneous LE and BR/EDR to Same Device Capable (Controller)')
+    MIN_ACCEPT_LIST_SIZE_ENTRIES = 8
+    BR_SECURE_CONNECTION_FEATURE = 'Secure Connections (Controller Support)'
+    LE_DATA_PACKETS_LENGTH_EXTENSION_FEATURE = 'LE Data Packet Length Extension'
+    LE_LINK_LAYER_PRIVACY_FEATURE = 'LL Privacy'
+    MAX_PACKET_LENGTH = 251
+    MIN_RESOLVING_LIST_SIZE_ENTRIES = 8
+    LE_EXTENDED_ADVERTISING_FEATURE = 'LE Extended Advertising'
+    LE_TWO_MEGA_PHYSICAL_CHANNEL_FEATURE = 'LE 2M PHY'
+    MIN_ADVERTISEMENT_SETS_NUMBER = 10
+    LE_ISOCHRONOUS_CHANNELS_FEATURE = 'Isochronous Channels (Host Support)'
+    LE_POWER_CONTROL_REQUEST_FEATURE = 'LE Power Control Request'
+    LE_POWER_CHANGE_INDICATION_FEATURE = 'LE Power Change Indication'
+    GOOGLE_FEATURE_SPECIFICATION_VERSION = 98
+    LE_ADV_RSSI_MONITORING = 'RSSI Monitoring of LE advertisements'
+    LE_ADV_MONITORING = 'Advertising Monitoring of LE advertisements'
+
+    def initialize(self):
+        """Initializes Autotest."""
+        self.hcitool = Hcitool()
+        self.facade = bluetooth_facade.BluezFacadeLocal()
+
+    def spec_legacy_test(self):
+        """Checks Bluetooth legacy specification."""
+        logging.info('* Running Bluetooth spec_legacy_test:')
+        self.test_flushable_data_packets()
+        self.test_erroneous_data_reporting()
+        self.test_event_filter_size()
+        self.test_acl_min_buffer_number()
+        self.test_acl_min_buffer_number_optional()
+        self.test_acl_min_buffer_size()
+        self.test_sco_min_buffer_number()
+        self.test_sco_min_buffer_size()
+
+    def spec_4_0_test(self):
+        """Checks Bluetooth version 4.0 specification."""
+        logging.info('* Running Bluetooth spec_4_0_test:')
+        self.test_low_energy_feature()
+        self.test_accept_list_size()
+
+    def spec_4_1_test(self):
+        """Checks Bluetooth version 4.1 specification."""
+        logging.info('* Running Bluetooth spec_4_1_test:')
+        self.test_le_dual_mode_topology_feature()
+        self.test_br_edr_controller_secure_connection_feature()
+
+    def spec_4_2_test(self):
+        """Checks Bluetooth version 4.2 specification."""
+        logging.info('* Running Bluetooth spec_4_2_test:')
+        self.test_le_data_packet_length_extension_feature()
+        self.test_packet_data_length()
+        self.test_le_link_layer_privacy_feature()
+        self.test_resolving_list_size()
+
+    def spec_5_0_test(self):
+        """Check Bluetooth version 5.0 specification."""
+        logging.info('* Running Bluetooth spec_5_0_test:')
+        self.test_le_extended_advertising_feature()
+        self.test_advertisement_sets_number()
+        self.test_le_two_mega_physical_channel_feature()
+
+    def spec_5_2_test(self):
+        """Checks Bluetooth version 5.0 specification."""
+        logging.info('* Running Bluetooth spec_5_2_test:')
+        self.test_le_isochronous_channels_feature()
+        self.test_le_power_control_feature()
+
+    def hci_ext_msft_test(self):
+        """Checks Microsoft Bluetooth HCI command execution."""
+        logging.info('* Running Bluetooth hci_ext_msft_test:')
+        self.test_hci_vs_msft_read_supported_features()
+
+    def hci_ext_aosp_test(self):
+        """Checks Android Bluetooth HCI command execution."""
+        logging.info('* Running Bluetooth hci_ext_aosp_test:')
+        self.test_aosp_quality_report()
+        self.test_le_apcf()
+        self.test_le_batch_scan_and_events()
+        self.test_le_extended_set_scan_parameters()
+        self.test_le_get_controller_activity_energy_info()
+        self.test_get_controller_debug_info_sub_event()
+
+    def assert_not_support(self, feature, supported_features):
+        """Verifies that the feature is not supported.
+
+        @param feature: The feature which should be unsupported.
+        @param supported_features: List of supported features.
+
+        @raise error.TestFail: If the feature is supported.
+        """
+        if feature in supported_features:
+            raise error.TestFail(feature + ' should not be supported')
+        logging.info('%s is not supported as expected.', feature)
+
+    def assert_support(self, feature, supported_features):
+        """Verifies that the feature is supported.
+
+        @param feature: The feature which should be supported.
+        @param supported_features: List of supported features.
+
+        @raise error.TestFail: If the feature is unsupported.
+        """
+        if feature not in supported_features:
+            raise error.TestFail(feature + ' should be supported')
+        logging.info('%s is supported.', feature)
+
+    def assert_equal(self, actual, expected, value_name):
+        """Verifies that actual value is equal to expected value.
+
+        @param actual: The value we got.
+        @param expected: The value we expected.
+        @param value_name: The name of the value. It is used for TestFail
+        message.
+
+        @raise error.TestFail: If the values are unequal.
+        """
+        if actual != expected:
+            raise error.TestFail('%s: Got %s, expected %s' %
+                                 (value_name, actual, expected))
+        logging.info('%s = %d, which is expected.' % (value_name, actual))
+
+    def assert_greater_equal(self, value, threshold, value_name):
+        """Verifies that value is greater than or equal to threshold.
+
+        @param value: The value we got.
+        @param threshold: The threshold of the value.
+        @param value_name: The name of the value. It is used for TestFail
+        message.
+
+        @raise error.TestFail: If the value is less than threshold.
+        """
+        if value < threshold:
+            raise error.TestFail('%s: %s is below the threshold %s' %
+                                 (value_name, value, threshold))
+        logging.info('%s = %d, which is >= %d.' %
+                     (value_name, value, threshold))
+
+    def test_flushable_data_packets(self):
+        """Checks the Bluetooth controller must support flushable data packets.
+
+        Note: As long as the chips are verified by SIG, setting the
+                'Non-flushable Packet Boundary Flag' bit guarantees the related
+                functionalities.
+        """
+        logging.info('** Running Bluetooth flushable data packets test:')
+        supported_features = self.hcitool.read_local_supported_features()[1]
+        self.assert_support(self.NON_FLUSHABLE_PACKET_BOUNDARY_FEATURE,
+                            supported_features)
+
+    def test_erroneous_data_reporting(self):
+        """Checks the Bluetooth controller supports Erroneous Data Reporting."""
+        logging.info('** Running Bluetooth erroneous data reporting test:')
+        supported_features = self.hcitool.read_local_supported_features()[1]
+        self.assert_support(self.ERRONEOUS_DATA_REPORTING_FEATURE,
+                            supported_features)
+
+    def test_event_filter_size(self):
+        """Checks the Bluetooth controller event filter entries count.
+
+        Checks the Bluetooth controller event filter has at least 8 entries.
+        """
+        logging.info('** Running Bluetooth event filter size test:')
+        number_of_added_filters = 0
+        for event_filter in self.MAC_EVENT_FILTERS:
+            set_filter_result = self.hcitool.set_event_filter(
+                    event_filter[0], event_filter[1], event_filter[2])[0]
+            if set_filter_result == self.CONTROLLER_MEMORY_FULL_STATUS_VALUE:
+                self.facade.reset_on()
+                raise error.TestFail('Filter ' + ''.join(event_filter) +
+                                     ' failed to apply. Only ' +
+                                     str(number_of_added_filters) +
+                                     ' filters were added')
+
+            elif set_filter_result != self.CONTROLLER_SUCCESS_STATUS_VALUE:
+                self.facade.reset_on()
+                raise error.TestError(
+                        'Failed to apply filter, status code is ' +
+                        set_filter_result)
+            number_of_added_filters += 1
+        logging.info(
+                'All 8 event filters were set successfully with values %s',
+                self.MAC_EVENT_FILTERS)
+        # Reset filter after done with test
+        if not self.hcitool.set_event_filter('0', '0', '0'):
+            logging.error('Unable to clear filter, reset bluetooth')
+            self.facade.reset_on()
+        else:
+            logging.debug('Filter cleared')
+
+    def test_acl_min_buffer_number(self):
+        """Checks if ACL minimum buffers count(number of data packets) >=4."""
+        logging.info('** Running Bluetooth acl min buffer number test:')
+        acl_buffers_count = self.hcitool.read_buffer_size()[
+                self.TOTAL_NUM_ACL_DATA_PACKETS_VALUE_INDEX]
+        self.assert_greater_equal(acl_buffers_count,
+                                  self.MIN_ACL_PACKETS_NUMBER,
+                                  'ACL buffers count')
+
+    def test_acl_min_buffer_number_optional(self):
+        """Checks if ACL minimum buffers count(number of data packets) >=6."""
+        logging.info(
+                '** Running Bluetooth acl min buffer number test (optional)":')
+        acl_buffers_count = self.hcitool.read_buffer_size()[
+                self.TOTAL_NUM_ACL_DATA_PACKETS_VALUE_INDEX]
+        if acl_buffers_count < self.MIN_ACL_PACKETS_NUMBER_OPTIONAL:
+            raise error.TestWarn(
+                    'ACL buffers count: %d is below the optional threshold %d'
+                    %
+                    (acl_buffers_count, self.MIN_ACL_PACKETS_NUMBER_OPTIONAL))
+        logging.info('ACL buffers count = %d, which is >= %d.' %
+                     (acl_buffers_count, self.MIN_ACL_PACKETS_NUMBER_OPTIONAL))
+
+    def test_acl_min_buffer_size(self):
+        """Checks if ACL minimum buffers size >=1021."""
+        logging.info('** Running Bluetooth acl min buffer size test:')
+        acl_buffer_size = self.hcitool.read_buffer_size()[
+                self.ACL_DATA_PACKET_LENGTH_VALUE_INDEX]
+        self.assert_greater_equal(acl_buffer_size, self.MIN_ACL_BUFFER_SIZE,
+                                  'ACL buffer size')
+
+    def test_sco_min_buffer_number(self):
+        """Checks if SCO minimum buffer size(number of data packets) >=6."""
+        logging.info('** Running Bluetooth sco min buffer number test:')
+        sco_buffers_count = self.hcitool.read_buffer_size()[
+                self.TOTAL_NUM_SYNCHRONOUS_DATA_PACKETS_VALUE_INDEX]
+        self.assert_greater_equal(sco_buffers_count,
+                                  self.MIN_SCO_PACKETS_NUMBER,
+                                  'SCO buffers count')
+
+    def test_sco_min_buffer_size(self):
+        """Checks if SCO minimum buffer size >=60."""
+        logging.info('** Running Bluetooth SCO min buffer size test:')
+        sco_buffer_size = self.hcitool.read_buffer_size()[
+                self.SCO_BUFFER_SIZE_VALUE_INDEX]
+        self.assert_greater_equal(sco_buffer_size, self.MIN_SCO_BUFFER_SIZE,
+                                  'SCO buffer size')
+
+    def test_low_energy_feature(self):
+        """Checks if Bluetooth controller must use support
+        Bluetooth Low Energy (BLE)."""
+        logging.info(
+                '** Running support Bluetooth Low Energy (BLE) feature test:')
+        supported_features = self.hcitool.read_local_supported_features()[1]
+        self.assert_support(self.LE_CONTROLLER_FEATURE, supported_features)
+
+    def test_accept_list_size(self):
+        """Checks if accept list size >= 8 entries."""
+        logging.info('** Running accept list size test:')
+        accept_list_entries_count = self.hcitool.le_read_accept_list_size()[1]
+        self.assert_greater_equal(accept_list_entries_count,
+                                  self.MIN_ACCEPT_LIST_SIZE_ENTRIES,
+                                  'Accept list size')
+
+    def test_le_dual_mode_topology_feature(self):
+        """Checks if Bluetooth controller supports LE dual mode topology."""
+        logging.info('** Running LE dual mode topology feature test:')
+        supported_features = self.hcitool.read_local_supported_features()[1]
+        self.assert_not_support(self.BR_EDR_NOT_SUPPORT_FEATURE,
+                                supported_features)
+        self.assert_support(self.LE_CONTROLLER_FEATURE, supported_features)
+        self.assert_support(self.LE_AND_BR_EDR_CONTROLLER_FEATURE,
+                            supported_features)
+
+    def test_br_edr_controller_secure_connection_feature(self):
+        """Checks if Bluetooth controller supports BR/EDR secure connections."""
+        logging.info('** Running BR/EDR controller secure connection feature '
+                     'test:')
+        supported_features = self.hcitool.read_local_extended_features(2)[3]
+        self.assert_support(self.BR_SECURE_CONNECTION_FEATURE,
+                            supported_features)
+
+    def test_le_data_packet_length_extension_feature(self):
+        """Checks LE data packet length extension support."""
+        logging.info('** Running LE data packet length extension test:')
+        supported_features = self.hcitool.read_le_local_supported_features()[1]
+        self.assert_support(self.LE_DATA_PACKETS_LENGTH_EXTENSION_FEATURE,
+                            supported_features)
+
+    def test_packet_data_length(self):
+        """Checks if data packet length <= 251."""
+        logging.info('** Running packet data length test:')
+        packet_data_length = self.hcitool.le_read_maximum_data_length()[1]
+        self.assert_equal(packet_data_length, self.MAX_PACKET_LENGTH,
+                          'Packet data length')
+
+    def test_le_link_layer_privacy_feature(self):
+        """Checks if Bluetooth controller supports link layer privacy."""
+        logging.info('** Running link layer privacy test:')
+        supported_features = self.hcitool.read_le_local_supported_features()[1]
+        self.assert_support(self.LE_LINK_LAYER_PRIVACY_FEATURE,
+                            supported_features)
+
+    def test_resolving_list_size(self):
+        """Checks if resolving list size >= 8 entries."""
+        logging.info('** Running resolving list size test:')
+        resolving_list_entries_count = self.hcitool.le_read_resolving_list_size(
+        )[1]
+        self.assert_greater_equal(resolving_list_entries_count,
+                                  self.MIN_RESOLVING_LIST_SIZE_ENTRIES,
+                                  'Resolving list size')
+
+    def test_le_extended_advertising_feature(self):
+        """Checks if Bluetooth controller supports LE advertising extension."""
+        logging.info('** Running LE extended advertising feature test:')
+        supported_features = self.hcitool.read_le_local_supported_features()[1]
+        self.assert_support(self.LE_EXTENDED_ADVERTISING_FEATURE,
+                            supported_features)
+
+    def test_advertisement_sets_number(self):
+        """Checks if number of advertisement sets >= 10."""
+        logging.info('** Running advertisement sets number feature test:')
+        advertisement_sets_number = (
+                self.hcitool.le_read_number_of_supported_advertising_sets()[1])
+        self.assert_greater_equal(advertisement_sets_number,
+                                  self.MIN_ADVERTISEMENT_SETS_NUMBER,
+                                  'Advertisement sets number')
+
+    def test_le_two_mega_physical_channel_feature(self):
+        """Checks if Bluetooth controller supports 2 Msym/s PHY for LE."""
+        logging.info('** Running LE two mega physical channel feature test:')
+        supported_features = self.hcitool.read_le_local_supported_features()[1]
+        self.assert_support(self.LE_TWO_MEGA_PHYSICAL_CHANNEL_FEATURE,
+                            supported_features)
+
+    def test_le_isochronous_channels_feature(self):
+        """Checks if ISO channels feature is supported."""
+        logging.info('** Running LE isochronous channels feature test:')
+        supported_features = self.hcitool.read_le_local_supported_features()[1]
+        self.assert_support(self.LE_ISOCHRONOUS_CHANNELS_FEATURE,
+                            supported_features)
+
+    def test_le_power_control_feature(self):
+        """Checks if Bluetooth controller supports LE power control."""
+        logging.info('** Running LE power control feature test:')
+        supported_features = self.hcitool.read_le_local_supported_features()[1]
+        self.assert_support(self.LE_POWER_CONTROL_REQUEST_FEATURE,
+                            supported_features)
+        self.assert_support(self.LE_POWER_CHANGE_INDICATION_FEATURE,
+                            supported_features)
+
+    def test_hci_vs_msft_read_supported_features(self):
+        """Checks if Bluetooth controller supports VS MSFT features."""
+        logging.info('** Running hci VS MSFT read supported features:')
+        chipset_name = self.facade.get_chipset_name()
+        chip_info = chipinfo.query(chipset_name)
+        if not chip_info.msft_support:
+            raise error.TestNAError('Chipset ' + chipset_name +
+                                    ' does not support MSFT HCI extensions')
+        vs_msft_supported_features = (
+                self.hcitool.vs_msft_read_supported_features(
+                        chip_info.msft_ocf)[2])
+        self.assert_support(self.LE_ADV_RSSI_MONITORING,
+                            vs_msft_supported_features)
+        self.assert_support(self.LE_ADV_MONITORING, vs_msft_supported_features)
+
+    def assert_aosp_hci(self):
+        """Checks if a chipset supports AOSP HCI extensions."""
+        chipset_name = self.facade.get_chipset_name()
+        chip_info = chipinfo.query(chipset_name)
+        if not chip_info.aosp_support:
+            raise error.TestNAError('Chipset ' + chipset_name +
+                                    ' does not support AOSP HCI extensions')
+
+    def test_aosp_quality_report(self):
+        """Checks if Bluetooth controller supports AOSP quality report."""
+        logging.info('** Running aosp quality report test:')
+        self.assert_aosp_hci()
+        version_supported = self.hcitool.le_get_vendor_capabilities_command(
+        )[8]
+        if version_supported < self.GOOGLE_FEATURE_SPECIFICATION_VERSION:
+            raise error.TestFail('Version supported = ' + version_supported +
+                                 ' but expected >=' +
+                                 self.GOOGLE_FEATURE_SPECIFICATION_VERSION)
+        bluetooth_quality_report_support = (
+                self.hcitool.le_get_vendor_capabilities_command()[14])
+        if not bluetooth_quality_report_support:
+            raise error.TestFail('AOSP Quality Report is not supported')
+        logging.info(
+                'With bluetooth_quality_report_support =%d and '
+                'version_supported >=%s, the controller supports the '
+                'Android HCI Extension Bluetooth Quality Report.',
+                bluetooth_quality_report_support, version_supported)
+
+    def test_le_apcf(self):
+        """Checks if APCF filtering feature is supported."""
+        logging.info('** Running LE APCF test:')
+        self.assert_aosp_hci()
+        filtering_support = self.hcitool.le_get_vendor_capabilities_command(
+        )[5]
+        if not filtering_support:
+            raise error.TestFail('LE APCF feature is not supported')
+        logging.info('LE APCF feature is supported.')
+
+    def test_le_batch_scan_and_events(self):
+        """Checks if LE batch scan and events feature is supported."""
+        logging.info('** Running LE batch scan and events test:')
+        self.assert_aosp_hci()
+        total_scan_result_storage = (
+                self.hcitool.le_get_vendor_capabilities_command()[3])
+        if total_scan_result_storage == 0:
+            raise error.TestFail(
+                    'LE batch scan and events feature is not supported')
+        logging.info('LE batch scan and events feature is supported.')
+
+    def test_le_extended_set_scan_parameters(self):
+        """Checks if LE extended set scan parameters feature is supported."""
+        logging.info('** Running LE extended set scan parameters test:')
+        self.assert_aosp_hci()
+        extended_scan_support = self.hcitool.le_get_vendor_capabilities_command(
+        )[10]
+        if not extended_scan_support:
+            raise error.TestFail(
+                    'LE extended set scan parameters feature is not supported')
+        logging.info('LE extended set scan parameters feature is supported.')
+
+    def test_le_get_controller_activity_energy_info(self):
+        """Checks if LE get controller activity energy info feature is
+        supported. """
+        logging.info('** Running LE get controller activity energy info test:')
+        self.assert_aosp_hci()
+        activity_energy_info_support = (
+                self.hcitool.le_get_vendor_capabilities_command()[7])
+        if not activity_energy_info_support:
+            raise error.TestFail(
+                    'LE get controller activity energy info feature is '
+                    'not supported')
+        logging.info(
+                'LE get controller activity energy info feature is supported.')
+
+    def test_get_controller_debug_info_sub_event(self):
+        """Checks if get controller debug info and sub-event features is
+        supported. """
+        logging.info('** Running get controller debug info sub-event test:')
+        self.assert_aosp_hci()
+        debug_logging_support = self.hcitool.le_get_vendor_capabilities_command(
+        )[11]
+        if not debug_logging_support:
+            raise error.TestFail(
+                    'Get controller debug info and sub-event features is not '
+                    'supported')
+        logging.info(
+                'Get controller debug info and sub-event features is supported.'
+        )
+
+    def avl_hci_batch_run(self, test_name=None):
+        """Runs bluetooth_AVLHCI test batch (all test).
+
+        @param test_name: test name as string from control file.
+        """
+        if test_name is None:
+            self.spec_legacy_test()
+            self.spec_4_0_test()
+            self.spec_4_1_test()
+            self.spec_4_2_test()
+            self.spec_5_0_test()
+            self.spec_5_2_test()
+            self.hci_ext_msft_test()
+            self.hci_ext_aosp_test()
+        else:
+            getattr(self, test_name)()
+
+    def run_once(self, test_name=None):
+        """Runs bluetooth_AVLHCI.
+
+        @param test_name: test name as string from control file.
+        """
+        self.facade.reset_on()
+        self.avl_hci_batch_run(test_name)
+        self.facade.reset_on()
diff --git a/client/site_tests/bluetooth_AVLHCI/control b/client/site_tests/bluetooth_AVLHCI/control
new file mode 100644
index 0000000..92fcaa5
--- /dev/null
+++ b/client/site_tests/bluetooth_AVLHCI/control
@@ -0,0 +1,38 @@
+# Lint as: python3
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Mohammad Sabri <mohammad.kh.sabri@exalt.ps>'
+NAME = 'bluetooth_AVLHCI'
+ATTRIBUTES = ''
+PURPOSE = (
+    'Batch of bluetooth AVL standard specifications tests'
+)
+CRITERIA = 'all tests passed'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = "bluetooth"
+TEST_TYPE = 'client'
+PY_VERSION = 3
+
+DOC = """
+    A Batch of bluetooth_AVLHCI tests. This test is written as a batch of tests
+    in order to reduce test time.
+
+    This class can be called to run the entire test batch or to run a specific
+    test only.
+
+    Currently, the batch contains the following tests:
+    - spec_legacy_test
+    - spec_4_0_test
+    - spec_4_1_test
+    - spec_4_2_test
+    - spec_5_0_test
+    - spec_5_2_test
+    - hci_ext_msft_test
+    _ hci_ext_aosp_test
+
+    """
+
+job.run_test('bluetooth_AVLHCI')
diff --git a/client/site_tests/bluetooth_AVLHCI/control.hci_ext_aosp_test b/client/site_tests/bluetooth_AVLHCI/control.hci_ext_aosp_test
new file mode 100644
index 0000000..27c5e58
--- /dev/null
+++ b/client/site_tests/bluetooth_AVLHCI/control.hci_ext_aosp_test
@@ -0,0 +1,32 @@
+# Lint as: python3
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Rajaa Abdallah <rajaa.abdallah@exalt.ps>'
+NAME = 'bluetooth_AVLHCI.hci_ext_aosp_test'
+ATTRIBUTES = 'suite:bluetooth_flaky'
+PURPOSE = (
+'Tests the Android HCI extension part in the Bluetooth AVL requirements'
+)
+CRITERIA = 'all subtests passed'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'client'
+PY_VERSION = 3
+
+DOC = """
+    This class can be called to run bluetooth_AVLHCI.hci_ext_aosp_test
+
+    This test contains these subtests:
+       test_aosp_quality_report()
+       test_le_apcf()
+       test_le_batch_scan_and_events()
+       test_le_extended_set_scan_parameters()
+       test_le_get_controller_activity_energy_info()
+       test_get_controller_debug_info_sub_event()
+
+    """
+
+job.run_test('bluetooth_AVLHCI', test_name=NAME.split('.')[1])
diff --git a/client/site_tests/bluetooth_AVLHCI/control.hci_ext_msft_test b/client/site_tests/bluetooth_AVLHCI/control.hci_ext_msft_test
new file mode 100644
index 0000000..aa8b10d
--- /dev/null
+++ b/client/site_tests/bluetooth_AVLHCI/control.hci_ext_msft_test
@@ -0,0 +1,27 @@
+# Lint as: python3
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Rajaa Abdallah <rajaa.abdallah@exalt.ps>'
+NAME = 'bluetooth_AVLHCI.hci_ext_msft_test'
+ATTRIBUTES = 'suite:bluetooth_flaky'
+PURPOSE = (
+'Tests the Microsoft HCI extension part in the Bluetooth AVL requirements'
+)
+CRITERIA = 'all subtests passed'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'client'
+PY_VERSION = 3
+
+DOC = """
+    This class can be called to run bluetooth_AVLHCI.hci_ext_msft_test
+
+    This test contains these subtests:
+        test_hci_vs_msft_read_supported_features()
+
+    """
+
+job.run_test('bluetooth_AVLHCI', test_name=NAME.split('.')[1])
diff --git a/client/site_tests/bluetooth_AVLHCI/control.spec_4_0_test b/client/site_tests/bluetooth_AVLHCI/control.spec_4_0_test
new file mode 100644
index 0000000..c7f5e08
--- /dev/null
+++ b/client/site_tests/bluetooth_AVLHCI/control.spec_4_0_test
@@ -0,0 +1,26 @@
+# Lint as: python3
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Rajaa Abdallah <rajaa.abdallah@exalt.ps>'
+NAME = 'bluetooth_AVLHCI.spec_4_0_test'
+ATTRIBUTES = 'suite:bluetooth_flaky'
+PURPOSE = 'Tests the Bluetooth v4.0 specification of Bluetooth AVL requirements'
+CRITERIA = 'all subtests passed'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'client'
+PY_VERSION = 3
+
+DOC = """
+    This class can be called to run bluetooth_AVLHCI.spec_4_0_test
+
+    This test contains these subtests:
+        test_low_energy_feature()
+        test_accept_list_size()
+
+    """
+
+job.run_test('bluetooth_AVLHCI', test_name=NAME.split('.')[1])
diff --git a/client/site_tests/bluetooth_AVLHCI/control.spec_4_1_test b/client/site_tests/bluetooth_AVLHCI/control.spec_4_1_test
new file mode 100644
index 0000000..16fbaba
--- /dev/null
+++ b/client/site_tests/bluetooth_AVLHCI/control.spec_4_1_test
@@ -0,0 +1,28 @@
+# Lint as: python3
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = (
+    'Mohammad Sabri <mohammad.kh.sabri@exalt.ps>, '
+    'Rajaa Abdallah <rajaa.abdallah@exalt.ps>')
+NAME = 'bluetooth_AVLHCI.spec_4_1_test'
+ATTRIBUTES = 'suite:bluetooth_flaky'
+PURPOSE = 'Tests the Bluetooth v4.1 specification of Bluetooth AVL requirements'
+CRITERIA = 'all subtests passed'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'client'
+PY_VERSION = 3
+
+DOC = """
+    This class can be called to run bluetooth_AVLHCI.spec_4_1_test
+
+    This test contains these subtests:
+        test_le_dual_mode_topology_feature()
+        test_br_edr_controller_secure_connection_feature()
+
+    """
+
+job.run_test('bluetooth_AVLHCI', test_name=NAME.split('.')[1])
diff --git a/client/site_tests/bluetooth_AVLHCI/control.spec_4_2_test b/client/site_tests/bluetooth_AVLHCI/control.spec_4_2_test
new file mode 100644
index 0000000..9489de1
--- /dev/null
+++ b/client/site_tests/bluetooth_AVLHCI/control.spec_4_2_test
@@ -0,0 +1,28 @@
+# Lint as: python3
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Rajaa Abdallah <rajaa.abdallah@exalt.ps>'
+NAME = 'bluetooth_AVLHCI.spec_4_2_test'
+ATTRIBUTES = 'suite:bluetooth_flaky'
+PURPOSE = 'Tests the Bluetooth v4.2 specification of Bluetooth AVL requirements'
+CRITERIA = 'all subtests passed'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'client'
+PY_VERSION = 3
+
+DOC = """
+    This class can be called to run bluetooth_AVLHCI.spec_4_2_test
+
+    This test contains these subtests:
+        test_le_data_packet_length_extension_feature()
+        test_packet_data_length()
+        test_le_link_layer_privacy_feature()
+        test_resolving_list_size()
+
+    """
+
+job.run_test('bluetooth_AVLHCI', test_name=NAME.split('.')[1])
diff --git a/client/site_tests/bluetooth_AVLHCI/control.spec_5_0_test b/client/site_tests/bluetooth_AVLHCI/control.spec_5_0_test
new file mode 100644
index 0000000..e30ce5b
--- /dev/null
+++ b/client/site_tests/bluetooth_AVLHCI/control.spec_5_0_test
@@ -0,0 +1,29 @@
+# Lint as: python3
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = (
+    'Rajaa Abdallah <rajaa.abdallah@exalt.ps>, '
+    'Mohammad Sabri <mohammad.kh.sabri@exalt.ps>')
+NAME = 'bluetooth_AVLHCI.spec_5_0_test'
+ATTRIBUTES = 'suite:bluetooth_flaky'
+PURPOSE = 'Tests the Bluetooth v5.0 specification of Bluetooth AVL requirements'
+CRITERIA = 'all subtests passed'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'client'
+PY_VERSION = 3
+
+DOC = """
+    This class can be called to run bluetooth_AVLHCI.spec_5_0_test
+
+    This test contains these subtests:
+        test_le_extended_advertising_feature()
+        test_advertisement_sets_number()
+        test_le_two_mega_physical_channel_feature()
+
+    """
+
+job.run_test('bluetooth_AVLHCI', test_name=NAME.split('.')[1])
diff --git a/client/site_tests/bluetooth_AVLHCI/control.spec_5_2_test b/client/site_tests/bluetooth_AVLHCI/control.spec_5_2_test
new file mode 100644
index 0000000..cf1621e
--- /dev/null
+++ b/client/site_tests/bluetooth_AVLHCI/control.spec_5_2_test
@@ -0,0 +1,26 @@
+# Lint as: python3
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Rajaa Abdallah <rajaa.abdallah@exalt.ps>'
+NAME = 'bluetooth_AVLHCI.spec_5_2_test'
+ATTRIBUTES = 'suite:bluetooth_flaky'
+PURPOSE = 'Tests the Bluetooth v5.2 specification of Bluetooth AVL requirements'
+CRITERIA = 'all subtests passed'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'client'
+PY_VERSION = 3
+
+DOC = """
+    This class can be called to run bluetooth_AVLHCI.spec_5_2_test
+
+    This test contains these subtests:
+        test_le_isochronous_channels_feature()
+        test_le_power_control_feature()
+
+    """
+
+job.run_test('bluetooth_AVLHCI', test_name=NAME.split('.')[1])
diff --git a/client/site_tests/bluetooth_AVLHCI/control.spec_legacy_test b/client/site_tests/bluetooth_AVLHCI/control.spec_legacy_test
new file mode 100644
index 0000000..b86c277
--- /dev/null
+++ b/client/site_tests/bluetooth_AVLHCI/control.spec_legacy_test
@@ -0,0 +1,34 @@
+# Lint as: python3
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Mohammad Sabri <mohammad.kh.sabri@exalt.ps>'
+NAME = 'bluetooth_AVLHCI.spec_legacy_test'
+ATTRIBUTES = 'suite:bluetooth_flaky'
+PURPOSE = (
+    'Tests the legacy standard specifications of bluetooth AVL requirements'
+)
+CRITERIA = 'all subtests passed'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = "bluetooth"
+TEST_TYPE = 'client'
+PY_VERSION = 3
+
+DOC = """
+    This class can be called to run bluetooth_AVLHCI.spec_legacy_test
+
+    This test contains these subtests:
+        test_flushable_data_packets()
+        test_erroneous_data_reporting()
+        test_event_filter_size()
+        test_acl_min_buffer_number()
+        test_acl_min_buffer_number_optional()
+        test_acl_min_buffer_size()
+        test_sco_min_buffer_number()
+        test_sco_min_buffer_size()
+
+    """
+
+job.run_test('bluetooth_AVLHCI', test_name=NAME.split('.')[1])
diff --git a/client/site_tests/bluetooth_AdapterHealth/bluetooth_AdapterHealth.py b/client/site_tests/bluetooth_AdapterHealth/bluetooth_AdapterHealth.py
deleted file mode 100644
index 62a045e..0000000
--- a/client/site_tests/bluetooth_AdapterHealth/bluetooth_AdapterHealth.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.bluetooth import bluetooth_semiauto_helper
-
-
-class bluetooth_AdapterHealth(
-        bluetooth_semiauto_helper.BluetoothSemiAutoHelper):
-    """Checks whether the Bluetooth adapter is present and working."""
-    version = 1
-
-    def _find_kernel_errors(self):
-        """Fail test for any suspicious log entries from kernel.
-
-        Ignore some known errors in order to find new ones.
-
-        """
-        fail_terms = ['[^a-z]err[^a-z]']
-        ignore_terms = ['RFKILL control',
-                        '"Service Changed" characteristic',
-                        'Unknown Evt ID: 19',
-                        'Failed to set privacy: Rejected']
-
-        log_cmd = 'grep -i bluetooth /var/log/messages'
-        for term in ignore_terms:
-            log_cmd += ' | grep -v \'%s\'' % term
-
-        for term in fail_terms:
-            search_cmd = '%s | grep -i \'%s\'' % (log_cmd, term)
-            log_entries = utils.run(search_cmd, ignore_status=True).stdout
-            if len(log_entries) > 0:
-                log_entries = [l for l in log_entries.split('\n') if l != '']
-                logging.info(log_entries)
-                self.collect_logs('Bluetooth kernel error')
-
-                # Add snippet of the log to the error message
-                # unless there are many errors (>5)
-                # This is helpful when looking at stainless results
-                error_str = 'Bluetooth kernel error found!'
-                if len(log_entries) <= 5:
-                    error_str = error_str + ' | '
-                    for l in log_entries:
-                        error_str = error_str +  l.split('ERR')[1] + ' | '
-
-                raise error.TestFail(error_str)
-
-    def warmup(self):
-        """Overwrite parent warmup; no need to log in."""
-        pass
-
-    def run_once(self):
-        """Entry point of this test."""
-        if not self.supports_bluetooth():
-            return
-
-        # Start btmon running.
-        self.start_dump()
-
-        self.poll_adapter_presence()
-
-        # Enable then disable adapter.
-        self.set_adapter_power(True)
-        self.set_adapter_power(False)
-
-        # Check for errors in logs.
-        self._find_kernel_errors()
diff --git a/client/site_tests/bluetooth_AdapterHealth/control b/client/site_tests/bluetooth_AdapterHealth/control
deleted file mode 100644
index e3293bd..0000000
--- a/client/site_tests/bluetooth_AdapterHealth/control
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "kathrelkeld"
-NAME = "bluetooth_AdapterHealth"
-PURPOSE = "Basic health test for Bluetooth adapter."
-CRITERIA = """
-This test will fail if there are any basic problems with the Bluetooth adapter.
-"""
-ATTRIBUTES = ""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "touch"
-TEST_TYPE = "client"
-DEPENDENCIES = 'bluetooth'
-
-DOC = """
-Uses dbus commands to enable the adapter then disable it.  Test will fail if
-the adapter is not found.  Older boards without Bluetooth are ignored.
-"""
-
-job.run_test('bluetooth_AdapterHealth')
diff --git a/client/site_tests/bluetooth_FastPairUI/bluetooth_FastPairUI.py b/client/site_tests/bluetooth_FastPairUI/bluetooth_FastPairUI.py
new file mode 100644
index 0000000..6600bba
--- /dev/null
+++ b/client/site_tests/bluetooth_FastPairUI/bluetooth_FastPairUI.py
@@ -0,0 +1,126 @@
+# Lint as: python2, python3
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from autotest_lib.client.bin import utils
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib import ui_utils
+from autotest_lib.client.common_lib.cros import chrome
+from autotest_lib.client.cros.graphics import graphics_utils
+from autotest_lib.client.cros.bluetooth import bluetooth_device_xmlrpc_server
+
+
+class bluetooth_FastPairUI(graphics_utils.GraphicsTest):
+    """Click through the Fast Pair pairing flow UI"""
+
+    version = 1
+
+    # Notification IDs
+    DISCOVERY_GUEST_ID = 'cros_fast_pair_discovery_guest_notification_id'
+    DISCOVERY_USER_ID = 'cros_fast_pair_discovery_user_notification_id'
+    PAIRING_ID = 'cros_fast_pair_pairing_notification_id'
+    ERROR_ID = 'cros_fast_pair_error_notification_id'
+
+    # Node roles
+    BUTTON_ROLE = 'button'
+
+    # Node names
+    CONNECT = 'CONNECT'
+
+    # Amount of seconds we wait for notifications to show/disappear
+    NOTIFICATION_WAIT_TIMEOUT = 30
+
+    def initialize(self):
+        """Autotest initialize function"""
+        self.xmlrpc_delegate = \
+            bluetooth_device_xmlrpc_server.BluetoothDeviceXmlRpcDelegate()
+        super(bluetooth_FastPairUI, self).initialize(raise_error_on_hang=True)
+
+    def cleanup(self):
+        """Autotest cleanup function"""
+        if self._GSC:
+            keyvals = self._GSC.get_memory_difference_keyvals()
+            for key, val in keyvals.items():
+                self.output_perf_value(description=key,
+                                       value=val,
+                                       units='bytes',
+                                       higher_is_better=False)
+            self.write_perf_keyval(keyvals)
+        super(bluetooth_FastPairUI, self).cleanup()
+
+    def find_notification(self, expected_id):
+        """Returns True if notification with expected_id is found"""
+        notifications = self._cr.get_visible_notifications()
+        return any([n['id'] == expected_id for n in (notifications or [])])
+
+    def wait_for_notification_to_show(self, expected_id):
+        """Wait for the notification with expected_id to show"""
+        logging.info('Waiting for notificaiton with id:%s to show',
+                     expected_id)
+        utils.poll_for_condition(
+                condition=lambda: self.find_notification(expected_id),
+                exception=error.TestError(
+                        """Timed out waiting for {} notification
+                                      to show""".format(expected_id)),
+                timeout=self.NOTIFICATION_WAIT_TIMEOUT)
+
+    def wait_for_notification_to_disappear(self, expected_id):
+        """Wait for the notification with expected_id to disappear"""
+        logging.info('Waiting for notificaiton with id:%s to disappear',
+                     expected_id)
+        utils.poll_for_condition(
+                condition=lambda: not self.find_notification(expected_id),
+                exception=error.TestError(
+                        """Timed out waiting for {} notification
+                                      to disappear""".format(expected_id)),
+                timeout=self.NOTIFICATION_WAIT_TIMEOUT)
+
+    def wait_for_discovery_notification(self):
+        """Wait for an instance of the discovery notification to show"""
+        logging.info('Waiting for discovery notification to show.')
+        utils.poll_for_condition(
+                condition=lambda: (self.find_notification(
+                        self.DISCOVERY_GUEST_ID) or self.find_notification(
+                                self.DISCOVERY_USER_ID)),
+                exception=error.TestError("""Timed out waiting for discovery
+                                      notification to show"""),
+                timeout=self.NOTIFICATION_WAIT_TIMEOUT)
+
+    def run_once(self, username, password):
+        """Click through the Fast Pair pairing flow UI"""
+        try:
+            # (b/221155928) Remove enable_features when it is on by default.
+            with chrome.Chrome(autotest_ext=True,
+                               enable_features='FastPair',
+                               gaia_login=True,
+                               username=username,
+                               password=password) as cr:
+                ui = ui_utils.UI_Handler()
+                ui.start_ui_root(cr)
+                self._cr = cr
+
+                # Wait for the initial discovery notification to show.
+                self.wait_for_discovery_notification()
+
+                # Click 'connect' on the discovery notification.
+                ui.doDefault_on_obj(name=self.CONNECT,
+                                    isRegex=False,
+                                    role=self.BUTTON_ROLE)
+
+                # Wait for the pairing notification to show and then disappear.
+                self.wait_for_notification_to_show(self.PAIRING_ID)
+                self.wait_for_notification_to_disappear(self.PAIRING_ID)
+
+                # Check if the error notification is shown.
+                if self.find_notification(self.ERROR_ID):
+                    raise error.TestFail('Pairing failed.')
+        except error.TestFail:
+            raise
+        except Exception as e:
+            logging.error('Exception "%s" seen during test', e)
+            raise error.TestFail('Exception "%s" seen during test' % e)
+        finally:
+            self.xmlrpc_delegate.reset_on()
diff --git a/client/site_tests/bluetooth_FastPairUI/control b/client/site_tests/bluetooth_FastPairUI/control
new file mode 100644
index 0000000..b247dd1
--- /dev/null
+++ b/client/site_tests/bluetooth_FastPairUI/control
@@ -0,0 +1,23 @@
+# Copyright (c) 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "shafit, Chromium OS, chromeos-cross-device-eng@google.com"
+NAME = "bluetooth_FastPairUI"
+PURPOSE = "To test the Fast Pair feature"
+CRITERIA = "Fail if any hang or crash"
+ATTRIBUTES = ''
+TIME = "MEDIUM"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "Bluetooth"
+TEST_TYPE = "client"
+PY_VERSION = 3
+
+DOC = """
+This test is used in conjunction with the bluetoothFastPair server test. The
+server test sets up the enviornment (e.g. create a Fast Pair device and start
+scanning), and then invokes this test to run through the UI and attempt to pair
+the device.
+"""
+
+job.run_test("bluetooth_FastPairUI")
diff --git a/client/site_tests/bluetooth_RegressionClient/bluetooth_RegressionClient.py b/client/site_tests/bluetooth_RegressionClient/bluetooth_RegressionClient.py
deleted file mode 100644
index c7586f2..0000000
--- a/client/site_tests/bluetooth_RegressionClient/bluetooth_RegressionClient.py
+++ /dev/null
@@ -1,124 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging, time
-
-from autotest_lib.client.cros.bluetooth import bluetooth_semiauto_helper
-
-
-class bluetooth_RegressionClient(
-        bluetooth_semiauto_helper.BluetoothSemiAutoHelper):
-    """Implement Bluetooth Regression Tests with some interaction."""
-    version = 1
-
-    def _test_init(self, test_type):
-        """Init test by collecting intial logs, starting dump, etc.
-
-        @param: test_type: short string label for log files and messages
-        """
-        self._test_type = test_type
-        logging.info('Beginning test of type %s.', test_type)
-        self.start_dump()
-        self.collect_logs(message=('Before %s.' % test_type))
-
-    def _power_off(self):
-        self._test_init('power_off')
-
-    def _os_idle(self):
-        self._test_init('os_idle')
-        self.ask_user('OS Idle test: after pressing PASS, the OS will idle '
-                      'after a short delay.  Do not prevent it from idling.'
-                      '<br>After OS has idled for at least 10 seconds, use '
-                      'a Bluetooth device to wake machine (or use onboard '
-                      'inputs if no Bluetooth device is capable).<br>'
-                      'Make sure audio continues to play over Bluetooth.')
-        self.os_idle_time_set()
-        self.tell_user('Going to sleep now...')
-        time.sleep(20)
-        self.check_working()
-        self.os_idle_time_set(reset=True)
-        self.collect_logs(message='After idle.')
-
-    def _suspend(self):
-        self._test_init('suspend')
-        self.ask_user('OS Suspend test: after pressing PASS, the OS will '
-                      'suspend.<br>It will wake on its own after some time.'
-                      '<br>Audio will stop playing.')
-        self.os_suspend()
-        self.check_working()
-        self.collect_logs(message='After suspend.')
-
-    def _log_off(self):
-        self._test_init('log_off')
-        self.close_browser()
-        self.login_and_open_browser()
-        self.check_working()
-        self.collect_logs(message='After login.')
-
-    def _disconnect(self):
-        self._test_init('disconnect')
-        self.tell_user('Please disconnect all Bluetooth devices using (x).')
-        self.wait_for_adapter(adapter_status=True)
-        self.wait_for_connections(paired_status=True, connected_status=False)
-        self.ask_user('Audio NOT playing through onboard speakers?<br>'
-                      'Audio NOT playing through Bluetooth device?')
-        self.collect_logs(message='After disconnect.')
-        self.check_working()
-        self.collect_logs(message='After reconnect.')
-
-    def _device_off(self):
-        self._test_init('device_off')
-        self.tell_user('Please turn off all Bluetooth devices.<br>'
-                       'Disconnect them on the Settings page if needed.')
-        self.wait_for_adapter(adapter_status=True)
-        self.wait_for_connections(paired_status=True, connected_status=False)
-        self.ask_user('Audio NOT playing through onboard speakers?')
-        self.collect_logs(message='After device turned off.')
-        self.check_working(message='Please turn devices back on and connect.')
-        self.collect_logs(message='After device on.')
-
-    def _unpair(self):
-        self._test_init('unpair')
-        self.tell_user('Please unpair all Bluetooth devices (using (x))')
-        self.wait_for_adapter(adapter_status=True)
-        self.wait_for_connections(paired_status=False, connected_status=False)
-        self.ask_user('No Bluetooth devices work.<br> Audio is NOT playing '
-                      'through onboard speakers or wired headphones.')
-        self.collect_logs(message='After unpair.')
-        self.check_working(message='Please re-pair and connect devices.')
-        self.collect_logs(message='After re-pair.')
-
-    def _disable(self):
-        self._test_init('disable')
-        self.tell_user('Please disable Bluetooth (uncheck Enable Bluetooth).')
-        self.wait_for_adapter(adapter_status=False)
-        self.collect_logs(message='While disabled')
-        self.wait_for_connections(paired_status=True, connected_status=False)
-        self.ask_user('No Bluetooth devices work?<br> Audio is NOT playing '
-                      'through onboard speakers or wired headphones?')
-        self.tell_user('Please enable Bluetooth (check Enable Bluetooth).<br>'
-                       'Make sure all devices are still listed after enable.')
-        self.wait_for_adapter(adapter_status=True)
-        self.check_working()
-        self.collect_logs(message='After re-enable.')
-
-    def run_once(self):
-        """Runs Regression tests for Bluetooth.
-
-        Two phases: before and after reboot by server. Called by run_test.
-        """
-        self.check_working()
-
-        if self._test_phase == 'reboot':
-            self._disable()
-            self._power_off()
-        elif self._test_phase == 'client':
-            self._power_off()
-            self._os_idle()
-            self._suspend()
-            self._log_off()
-            self._disconnect()
-            self._device_off()
-            self._unpair()
-
diff --git a/client/site_tests/bluetooth_RegressionClient/control b/client/site_tests/bluetooth_RegressionClient/control
deleted file mode 100644
index de7ab6d..0000000
--- a/client/site_tests/bluetooth_RegressionClient/control
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "bluetooth_RegressionClient"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Benchmark"
-TEST_CLASS = "bluetooth"
-TEST_TYPE = "client"
-
-
-DOC = """
-Client Side Bluetooth Semi-Auto Regression Tests.
-
-Implements most of the Bluetooth Regression Tests with some tester
-interaction (putting external devices in pairing mode, verifying audio
-quality, etc.).
-"""
-
-job.run_test('bluetooth_RegressionClient', args)
-
diff --git a/client/site_tests/bluetooth_TurnOnOffUI/bluetooth_TurnOnOffUI.py b/client/site_tests/bluetooth_TurnOnOffUI/bluetooth_TurnOnOffUI.py
deleted file mode 100644
index 0e04077..0000000
--- a/client/site_tests/bluetooth_TurnOnOffUI/bluetooth_TurnOnOffUI.py
+++ /dev/null
@@ -1,130 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import ui_utils
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.cros.graphics import graphics_utils
-from autotest_lib.client.cros.bluetooth import bluetooth_device_xmlrpc_server
-
-
-class bluetooth_TurnOnOffUI(graphics_utils.GraphicsTest):
-
-    """Go to Status Tray and turn BT On/Off"""
-    version = 1
-
-    # Node roles
-    BUTTON_ROLE = "button"
-    SWITCH_ROLE = "switch"
-
-    # Node names
-    STATUS_TRAY_REGEXP = "/Status tray, /i"
-    SHOW_BLUETOOTH_SETTINGS = "/Show Bluetooth settings./i"
-    BLUETOOTH = "Bluetooth"
-    DELAY_BW_TOGGLE_ON_OFF = 5
-
-    def initialize(self):
-        """Autotest initialize function"""
-        self.xmlrpc_delegate = \
-            bluetooth_device_xmlrpc_server.BluetoothDeviceXmlRpcDelegate()
-        super(bluetooth_TurnOnOffUI, self).initialize(raise_error_on_hang=True)
-
-    def cleanup(self):
-        """Autotest cleanup function"""
-        if self._GSC:
-            keyvals = self._GSC.get_memory_difference_keyvals()
-            for key, val in keyvals.iteritems():
-                self.output_perf_value(
-                    description=key,
-                    value=val,
-                    units='bytes',
-                    higher_is_better=False)
-            self.write_perf_keyval(keyvals)
-        super(bluetooth_TurnOnOffUI, self).cleanup()
-
-    def open_status_tray(self, ui):
-        """Open status tray
-
-        @param ui: ui object
-        """
-        logging.info("Opening status tray")
-        ui.doDefault_on_obj(self.STATUS_TRAY_REGEXP, True, self.BUTTON_ROLE)
-        ui.wait_for_ui_obj(self.SHOW_BLUETOOTH_SETTINGS, True,
-                           role=self.BUTTON_ROLE)
-
-    def open_bluetooth_page(self, ui):
-        """Opens bluetooth settings in tray
-
-        @param ui: ui object
-        """
-        logging.info("Opening bluetooth settings in tray")
-        ui.doDefault_on_obj(self.SHOW_BLUETOOTH_SETTINGS, True,
-                            self.BUTTON_ROLE)
-        ui.wait_for_ui_obj(self.BLUETOOTH, False, role=self.SWITCH_ROLE)
-
-    def is_bluetooth_enabled(self):
-        """Returns True if bluetoothd is powered on, otherwise False"""
-
-        return self.xmlrpc_delegate._is_powered_on()
-
-    def turn_on_bluetooth(self, ui):
-        """Turn on BT in status tray
-
-        @param ui: ui object
-        """
-        if self.is_bluetooth_enabled():
-            logging.info('Bluetooth is turned on already..')
-        else:
-            logging.info("Turning on bluetooth")
-            ui.doDefault_on_obj(self.BLUETOOTH, False, self.SWITCH_ROLE)
-            time.sleep(self.DELAY_BW_TOGGLE_ON_OFF)
-            if self.is_bluetooth_enabled():
-                logging.info('Turned on BT successfully..')
-            else:
-                raise error.TestFail('BT is not turned on..')
-
-    def turn_off_bluetooth(self, ui):
-        """Turn off BT in status tray
-
-        @param ui: ui object
-        """
-        if not self.is_bluetooth_enabled():
-            logging.info('Bluetooth is turned off already')
-        else:
-            logging.info("Turning off bluetooth")
-            ui.doDefault_on_obj(self.BLUETOOTH, False, self.SWITCH_ROLE)
-            time.sleep(self.DELAY_BW_TOGGLE_ON_OFF)
-            if not self.is_bluetooth_enabled():
-                logging.info('Turned off BT successfully..')
-            else:
-                raise error.TestFail('Bluetooth is not turned off within time')
-
-    def run_once(self, iteration_count=3):
-        """Turn on/off bluetooth in status tray
-
-        @param iteration_count: Number of iterations to toggle on/off
-
-        """
-        try:
-            with chrome.Chrome(autotest_ext=True) as cr:
-                ui = ui_utils.UI_Handler()
-                ui.start_ui_root(cr)
-                self.open_status_tray(ui)
-                self.open_bluetooth_page(ui)
-                logging.info("Turning off bluetooth before start test")
-                self.turn_off_bluetooth(ui)
-                for iteration in range(1, iteration_count + 1):
-                    logging.info("Iteration: %d", iteration)
-                    self.turn_on_bluetooth(ui)
-                    self.turn_off_bluetooth(ui)
-        except error.TestFail:
-            raise
-        except Exception as e:
-            logging.error('Exception "%s" seen during test', e)
-            raise error.TestFail('Exception "%s" seen during test' % e)
-        finally:
-            self.xmlrpc_delegate.reset_on()
diff --git a/client/site_tests/bluetooth_TurnOnOffUI/control b/client/site_tests/bluetooth_TurnOnOffUI/control
deleted file mode 100644
index 0f565bd..0000000
--- a/client/site_tests/bluetooth_TurnOnOffUI/control
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Intel"
-NAME = "bluetooth_TurnOnOffUI"
-PURPOSE = "To test the DUT state on BT turned on/off through UI"
-CRITERIA = "Fail if any hang or crash"
-ATTRIBUTES = 'suite:bluetooth_flaky'
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "Bluetooth"
-TEST_TYPE = "client"
-DOC = """
-Go to status tray and turn on/off BT
-Command to execute is
-test_that -b $BOARD $DUT_IP $TEST
-"""
-
-job.run_test("bluetooth_TurnOnOffUI", iteration_count=30)
diff --git a/client/site_tests/camera_HAL3/camera_HAL3.py b/client/site_tests/camera_HAL3/camera_HAL3.py
deleted file mode 100644
index 51ddb58..0000000
--- a/client/site_tests/camera_HAL3/camera_HAL3.py
+++ /dev/null
@@ -1,130 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""A test which verifies the camera function with HAL3 interface."""
-
-import contextlib
-import json
-import logging
-import os
-import xml.etree.ElementTree
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import service_stopper
-from autotest_lib.client.cros.camera import camera_utils
-from autotest_lib.client.cros.video import device_capability
-from sets import Set
-
-
-class camera_HAL3(test.test):
-    """
-    This test is a wrapper of the test binary cros_camera_test.
-    """
-
-    version = 1
-    test_binary = 'cros_camera_test'
-    dep = 'camera_hal3'
-    cros_camera_service = 'cros-camera'
-    media_profiles_path = os.path.join('vendor', 'etc', 'media_profiles.xml')
-    tablet_board_list = ['scarlet', 'nocturne']
-    test_config_path = '/var/cache/camera/test_config.json'
-
-    def setup(self):
-        """
-        Run common setup steps.
-        """
-        self.dep_dir = os.path.join(self.autodir, 'deps', self.dep)
-        self.job.setup_dep([self.dep])
-        logging.debug('mydep is at %s', self.dep_dir)
-
-    @contextlib.contextmanager
-    def set_test_config(self, test_config):
-        with open(self.test_config_path, 'w') as fp:
-            json.dump(test_config, fp)
-        yield
-        os.remove(self.test_config_path)
-
-    def get_recording_params(self):
-        """
-        Get recording parameters from media profiles
-        """
-        xml_content = utils.system_output([
-            'android-sh', '-c',
-            'cat "%s"' % utils.sh_escape(self.media_profiles_path)
-        ])
-        root = xml.etree.ElementTree.fromstring(xml_content)
-        recording_params = Set()
-        for camcorder_profiles in root.findall('CamcorderProfiles'):
-            for encoder_profile in camcorder_profiles.findall('EncoderProfile'):
-                video = encoder_profile.find('Video')
-                recording_params.add('%s:%s:%s:%s' % (
-                    camcorder_profiles.get('cameraId'), video.get('width'),
-                    video.get('height'), video.get('frameRate')))
-        return '--recording_params=' + ','.join(recording_params)
-
-    def run_once(self,
-                 cmd_timeout=600,
-                 camera_hals=None,
-                 options=None,
-                 capability=None,
-                 test_config=None):
-        """
-        Entry point of this test.
-
-        @param cmd_timeout: Seconds. Timeout for running the test command.
-        @param camera_hals: The camera HALs to be tested. e.g. ['usb.so']
-        @param options: Option strings passed to test command. e.g. ['--v=1']
-        @param capability: Capability required for executing this test.
-        """
-        if options is None:
-            options = []
-
-        if test_config is None:
-            test_config = {}
-
-        if capability:
-            device_capability.DeviceCapability().ensure_capability(capability)
-
-        self.job.install_pkg(self.dep, 'dep', self.dep_dir)
-
-        camera_hal_paths = camera_utils.get_camera_hal_paths_for_test()
-        if camera_hals is not None:
-            name_map = dict((os.path.basename(path), path)
-                            for path in camera_hal_paths)
-            camera_hal_paths = []
-            for name in camera_hals:
-                path = name_map.get(name)
-                if path is None:
-                    msg = 'HAL %r is not available for test' % name
-                    raise error.TestNAError(msg)
-                camera_hal_paths.append(path)
-
-        binary_path = os.path.join(self.dep_dir, 'bin', self.test_binary)
-
-        with service_stopper.ServiceStopper([self.cros_camera_service]), \
-                self.set_test_config(test_config):
-            has_facing_option = False
-            cmd = ['sudo', '--user=arc-camera', binary_path]
-            for option in options:
-                if 'gtest_filter' in option:
-                    filters = option.split('=')[1]
-                    if 'Camera3DeviceTest' in filters.split('-')[0]:
-                        if utils.get_current_board() in self.tablet_board_list:
-                            option += (':' if '-' in filters else '-')
-                            option += '*SensorOrientationTest/*'
-                    if any(name in filters.split('-')[0] for name in
-                           ('Camera3ModuleFixture', 'Camera3RecordingFixture')):
-                        cmd.append(self.get_recording_params())
-                elif 'camera_facing' in option:
-                    has_facing_option = True
-                cmd.append(option)
-
-            if has_facing_option:
-                utils.system(cmd, timeout=cmd_timeout)
-            else:
-                for camera_hal_path in camera_hal_paths:
-                    logging.info('Run test with %r', camera_hal_path)
-                    cmd.append('--camera_hal_path=%s' % camera_hal_path)
-                    utils.system(cmd, timeout=cmd_timeout)
-                    cmd.pop()
diff --git a/client/site_tests/camera_HAL3/control.device b/client/site_tests/camera_HAL3/control.device
deleted file mode 100644
index b902138..0000000
--- a/client/site_tests/camera_HAL3/control.device
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team, chromeos-video@google.com"
-NAME = "camera_HAL3.device"
-PURPOSE = "Verify Chromium camera device function with HAL3 interface."
-CRITERIA = """
-This test will fail if any of the device tests in cros_camera_test fails.
-"""
-ATTRIBUTES = ""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "video"
-TEST_TYPE = "client"
-BUG_TEMPLATE = {
-  'labels': ['OS-Chrome', 'VideoTestFailure'],
-  'cc': ['chromeos-video-test-failures@google.com'],
-}
-
-DOC = """
-This is a wrapper test for cros_camera_test.
-For more information on HAL3 see:
-https://source.android.com/devices/camera/camera3.html
-https://android.googlesource.com/platform/hardware/libhardware/+/master/include/hardware/camera3.h
-"""
-
-job.run_test('camera_HAL3', options=['--gtest_filter=Camera3DeviceTest/*'])
diff --git a/client/site_tests/camera_HAL3/control.frame b/client/site_tests/camera_HAL3/control.frame
deleted file mode 100644
index f1854ba..0000000
--- a/client/site_tests/camera_HAL3/control.frame
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = "Chrome OS Team, chromeos-video@google.com"
-NAME = "camera_HAL3.frame"
-PURPOSE = "Verify Chromium camera frame function with HAL3 interface."
-CRITERIA = """
-This test will fail if any of the frame tests in cros_camera_test
-fails.
-"""
-ATTRIBUTES = ""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "video"
-TEST_TYPE = "client"
-BUG_TEMPLATE = {
-    'labels': ['OS-Chrome', 'VideoTestFailure'],
-    'cc': ['chromeos-video-test-failures@google.com'],
-}
-
-DOC = """
-This is a wrapper test for cros_camera_test.
-For more information on HAL3 see:
-https://source.android.com/devices/camera/camera3.html
-https://android.googlesource.com/platform/hardware/libhardware/+/master/include/hardware/camera3.h
-"""
-
-facing_options = [
-    '--%s=%s' % kv
-    for kv in utils.args_to_dict(args).items()
-    if kv[0] == 'camera_facing'
-]
-
-job.run_test(
-    'camera_HAL3',
-    cmd_timeout=900,
-    options=['--gtest_filter=Camera3FrameTest/*'] + facing_options)
diff --git a/client/site_tests/camera_HAL3/control.jda b/client/site_tests/camera_HAL3/control.jda
deleted file mode 100644
index 14bf509..0000000
--- a/client/site_tests/camera_HAL3/control.jda
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = "Chrome OS Team, chromeos-video@google.com"
-NAME = "camera_HAL3.jda"
-PURPOSE = "Verify Jpeg decode accelerator works in USB HALv3."
-CRITERIA = """
-This test will fail if there is a JDA decode error.
-"""
-ATTRIBUTES = ""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "video"
-TEST_TYPE = "client"
-BUG_TEMPLATE = {
-    'labels': ['OS-Chrome', 'VideoTestFailure'],
-    'cc': ['chromeos-video-test-failures@google.com'],
-}
-
-DOC = """
-This test runs camera3 test to verify JDA function.
-"""
-
-facing_options = [
-    '--%s=%s' % kv
-    for kv in utils.args_to_dict(args).items()
-    if kv[0] == 'camera_facing'
-]
-
-job.run_test(
-    'camera_HAL3',
-    cmd_timeout=5,
-    camera_hals=['usb.so'],
-    options=['--gtest_filter=*/Camera3SingleFrameTest.GetFrame/0'] +
-    facing_options,
-    capability='hw_dec_jpeg',
-    test_config={'force_jpeg_hw_dec': True})
diff --git a/client/site_tests/camera_HAL3/control.jea b/client/site_tests/camera_HAL3/control.jea
deleted file mode 100644
index 472ff9f..0000000
--- a/client/site_tests/camera_HAL3/control.jea
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = "Chrome OS Team, chromeos-video@google.com"
-NAME = "camera_HAL3.jea"
-PURPOSE = "Verify Jpeg encode accelerator works in USB HALv3."
-CRITERIA = """
-This test will fail if there is a JEA encode error.
-"""
-ATTRIBUTES = ""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "video"
-TEST_TYPE = "client"
-BUG_TEMPLATE = {
-    'labels': ['OS-Chrome', 'VideoTestFailure'],
-    'cc': ['chromeos-video-test-failures@google.com'],
-}
-
-DOC = """
-This test runs camera3 test to verify JEA function.
-"""
-
-facing_options = [
-    '--%s=%s' % kv
-    for kv in utils.args_to_dict(args).items()
-    if kv[0] == 'camera_facing'
-]
-
-job.run_test(
-    'camera_HAL3',
-    cmd_timeout=10,
-    options=[
-        '--gtest_filter=*/Camera3SimpleStillCaptureTest.TakePictureTest/0'
-    ] + facing_options,
-    capability='hw_enc_jpeg',
-    test_config={'force_jpeg_hw_enc': True})
diff --git a/client/site_tests/camera_HAL3/control.module b/client/site_tests/camera_HAL3/control.module
deleted file mode 100644
index b2893ac..0000000
--- a/client/site_tests/camera_HAL3/control.module
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team, chromeos-video@google.com"
-NAME = "camera_HAL3.module"
-PURPOSE = "Verify Chromium camera module function with HAL3 interface."
-CRITERIA = """
-This test will fail if any of the module tests in cros_camera_test fails.
-"""
-ATTRIBUTES = ""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "video"
-TEST_TYPE = "client"
-BUG_TEMPLATE = {
-  'labels': ['OS-Chrome', 'VideoTestFailure'],
-  'cc': ['chromeos-video-test-failures@google.com'],
-}
-
-DOC = """
-This is a wrapper test for cros_camera_test.
-For more information on HAL3 see:
-https://source.android.com/devices/camera/camera3.html
-https://android.googlesource.com/platform/hardware/libhardware/+/master/include/hardware/camera3.h
-"""
-
-job.run_test('camera_HAL3', options=['--gtest_filter=Camera3ModuleFixture.*'])
diff --git a/client/site_tests/camera_HAL3/control.preview b/client/site_tests/camera_HAL3/control.preview
deleted file mode 100644
index a55ed55..0000000
--- a/client/site_tests/camera_HAL3/control.preview
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = "Chrome OS Team, chromeos-video@google.com"
-NAME = "camera_HAL3.preview"
-PURPOSE = "Verify Chromium camera preview function with HAL3 interface."
-CRITERIA = """
-This test will fail if any of the preview tests in cros_camera_test fails.
-"""
-ATTRIBUTES = ""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "video"
-TEST_TYPE = "client"
-BUG_TEMPLATE = {
-    'labels': ['OS-Chrome', 'VideoTestFailure'],
-    'cc': ['chromeos-video-test-failures@google.com'],
-}
-
-DOC = """
-This is a wrapper test for cros_camera_test.
-For more information on HAL3 see:
-https://source.android.com/devices/camera/camera3.html
-https://android.googlesource.com/platform/hardware/libhardware/+/master/include/hardware/camera3.h
-"""
-
-facing_options = [
-    '--%s=%s' % kv
-    for kv in utils.args_to_dict(args).items()
-    if kv[0] == 'camera_facing'
-]
-
-job.run_test(
-    'camera_HAL3',
-    options=['--gtest_filter=Camera3PreviewTest/*'] + facing_options)
diff --git a/client/site_tests/camera_HAL3/control.recording b/client/site_tests/camera_HAL3/control.recording
deleted file mode 100644
index 49f406c..0000000
--- a/client/site_tests/camera_HAL3/control.recording
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = "Chrome OS Team, chromeos-video@google.com"
-NAME = "camera_HAL3.recording"
-PURPOSE = "Verify Chromium camera recording function with HAL3 interface."
-CRITERIA = """
-This test will fail if any of the recording tests in cros_camera_test fails.
-"""
-ATTRIBUTES = ""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "video"
-TEST_TYPE = "client"
-BUG_TEMPLATE = {
-    'labels': ['OS-Chrome', 'VideoTestFailure'],
-    'cc': ['chromeos-video-test-failures@google.com'],
-}
-
-DOC = """
-This is a wrapper test for cros_camera_test.
-For more information on HAL3 see:
-https://source.android.com/devices/camera/camera3.html
-https://android.googlesource.com/platform/hardware/libhardware/+/master/include/hardware/camera3.h
-"""
-
-facing_options = [
-    '--%s=%s' % kv
-    for kv in utils.args_to_dict(args).items()
-    if kv[0] == 'camera_facing'
-]
-
-job.run_test(
-    'camera_HAL3',
-    options=['--gtest_filter=Camera3RecordingFixture/*'] + facing_options)
diff --git a/client/site_tests/camera_HAL3/control.still_capture b/client/site_tests/camera_HAL3/control.still_capture
deleted file mode 100644
index 34f9cc5..0000000
--- a/client/site_tests/camera_HAL3/control.still_capture
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = "Chrome OS Team, chromeos-video@google.com"
-NAME = "camera_HAL3.still_capture"
-PURPOSE = "Verify Chromium camera still capture function with HAL3 interface."
-CRITERIA = """
-This test will fail if any of the still capture tests in cros_camera_test
-fails.
-"""
-ATTRIBUTES = ""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "video"
-TEST_TYPE = "client"
-BUG_TEMPLATE = {
-    'labels': ['OS-Chrome', 'VideoTestFailure'],
-    'cc': ['chromeos-video-test-failures@google.com'],
-}
-
-DOC = """
-This is a wrapper test for cros_camera_test.
-For more information on HAL3 see:
-https://source.android.com/devices/camera/camera3.html
-https://android.googlesource.com/platform/hardware/libhardware/+/master/include/hardware/camera3.h
-"""
-
-facing_options = [
-    '--%s=%s' % kv
-    for kv in utils.args_to_dict(args).items()
-    if kv[0] == 'camera_facing'
-]
-
-job.run_test(
-    'camera_HAL3',
-    options=['--gtest_filter=Camera3StillCaptureTest/*'] + facing_options)
diff --git a/client/site_tests/camera_HAL3/control.stream b/client/site_tests/camera_HAL3/control.stream
deleted file mode 100644
index 6a25955..0000000
--- a/client/site_tests/camera_HAL3/control.stream
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = "Chrome OS Team, chromeos-video@google.com"
-NAME = "camera_HAL3.stream"
-PURPOSE = "Verify Chromium camera stream function with HAL3 interface."
-CRITERIA = """
-This test will fail if any of the stream tests in cros_camera_test fails.
-"""
-ATTRIBUTES = ""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "video"
-TEST_TYPE = "client"
-BUG_TEMPLATE = {
-    'labels': ['OS-Chrome', 'VideoTestFailure'],
-    'cc': ['chromeos-video-test-failures@google.com'],
-}
-
-DOC = """
-This is a wrapper test for cros_camera_test.
-For more information on HAL3 see:
-https://source.android.com/devices/camera/camera3.html
-https://android.googlesource.com/platform/hardware/libhardware/+/master/include/hardware/camera3.h
-"""
-
-facing_options = [
-    '--%s=%s' % kv
-    for kv in utils.args_to_dict(args).items()
-    if kv[0] == 'camera_facing'
-]
-
-job.run_test(
-    'camera_HAL3',
-    options=['--gtest_filter=Camera3StreamTest/*'] + facing_options)
diff --git a/client/site_tests/camera_HAL3Perf/camera_HAL3Perf.py b/client/site_tests/camera_HAL3Perf/camera_HAL3Perf.py
index 267f0ce..51fd3cc 100644
--- a/client/site_tests/camera_HAL3Perf/camera_HAL3Perf.py
+++ b/client/site_tests/camera_HAL3Perf/camera_HAL3Perf.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2017 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -10,11 +11,15 @@
     4. Shot to shot time
 """
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
 import os, logging
 from autotest_lib.client.bin import test, utils
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.cros import service_stopper
 from autotest_lib.client.cros.camera import camera_utils
+from six.moves import map
 
 
 class camera_HAL3Perf(test.test):
@@ -65,7 +70,7 @@
                     else:
                         msg = 'Error in parsing the log file (%s)' % log_file
                         raise error.TestFail(msg)
-        except IOError, err:
+        except IOError as err:
             msg = 'Error in reading the log file (%s): %s' % (log_file, err)
             raise error.TestFail(msg)
 
diff --git a/client/site_tests/camera_HAL3Perf/control b/client/site_tests/camera_HAL3Perf/control
index 37d3c3a..03d921a 100644
--- a/client/site_tests/camera_HAL3Perf/control
+++ b/client/site_tests/camera_HAL3Perf/control
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team, chromeos-video@google.com"
+AUTHOR = "ChromeOS Team, chromeos-video@google.com"
 NAME = "camera_HAL3Perf"
 PURPOSE = "Monitor the performance of the camera HAL3."
 CRITERIA = """
@@ -18,6 +18,7 @@
     'labels': ['OS-Chrome', 'VideoTestFailure'],
     'cc': ['chromeos-video-test-failures@google.com'],
 }
+PY_VERSION = 3
 
 DOC = """
 This test utilizes the cros_camera_test to measure the performance of the
diff --git a/client/site_tests/camera_V4L2/camera_V4L2.py b/client/site_tests/camera_V4L2/camera_V4L2.py
deleted file mode 100644
index 632570a..0000000
--- a/client/site_tests/camera_V4L2/camera_V4L2.py
+++ /dev/null
@@ -1,54 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.video import device_capability
-
-
-class camera_V4L2(test.test):
-    version = 1
-    preserve_srcdir = True
-
-    def run_once(self, capability=None, test_list=None):
-        if capability is not None:
-            device_capability.DeviceCapability().ensure_capability(capability)
-        # Enable USB camera HW timestamp
-        path = "/sys/module/uvcvideo/parameters/hwtimestamps"
-        if os.path.exists(path):
-            utils.system("echo 1 > %s" % path)
-
-        if test_list is None:
-            test_list = "halv3" if self.should_test_halv3() else "default"
-        self.test_list = test_list
-
-        self.find_video_capture_devices()
-
-        for device in self.v4l2_devices:
-            self.run_v4l2_test(device)
-
-    def should_test_halv3(self):
-        has_v3 = os.path.exists('/usr/bin/cros_camera_service')
-        has_v1 = os.path.exists('/usr/bin/arc_camera_service')
-        return has_v3 and not has_v1
-
-    def find_video_capture_devices(self):
-        cmd = ["media_v4l2_test", "--list_usbcam"]
-        stdout = utils.system_output(cmd, retain_output=True)
-        self.v4l2_devices = stdout.splitlines()
-        if not self.v4l2_devices:
-            raise error.TestFail("No V4L2 devices found!")
-
-    def run_v4l2_test(self, device):
-        cmd = [
-                "media_v4l2_test",
-                "--device_path=%s" % device,
-        ]
-        if self.test_list:
-            cmd.append("--test_list=%s" % self.test_list)
-
-        logging.info("Running %s", cmd)
-        stdout = utils.system_output(cmd, retain_output=True)
diff --git a/client/site_tests/camera_V4L2/control b/client/site_tests/camera_V4L2/control
deleted file mode 100644
index de3ea50..0000000
--- a/client/site_tests/camera_V4L2/control
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'The Chromium OS Authors'
-NAME = 'camera_V4L2'
-PURPOSE = 'Exercises v4l2 camera devices to verify required operations.'
-CRITERIA = """
-This is a complex text and tests many different functions. This test will fail
-if any of the following conditions occur:
-  - No v4L2 device is found
-  - If a mandatory control is not supported
-  - If streaming is not supported
-  - If a required resolution is not supported when capturing a stream
-"""
-# Do not run it in bvt-perbuild suite since it's already migrated to Tast.
-# The Autotest version is kept for moblab AVL usage (https://crbug.com/1001983).
-ATTRIBUTES = ""
-TIME='MEDIUM'
-TEST_CATEGORY = 'V4L2'
-TEST_TYPE = 'client'
-BUG_TEMPLATE = {
-    'labels': ['OS-Chrome', 'VideoTestFailure'],
-    'cc': ['chromeos-video-test-failures@google.com'],
-}
-
-DOC = """
-This test executes media_v4l2_test and media_v4l2_unittest binaries.
-For more information on V4L2 see:
-http://www.linuxfordevices.com/c/a/Linux-For-Devices-Articles/Intro-to-V4L2/
-"""
-
-capability='builtin_usb_camera'
-job.run_test('camera_V4L2', capability=capability)
diff --git a/client/site_tests/camera_V4L2/control.certification b/client/site_tests/camera_V4L2/control.certification
deleted file mode 100644
index 458227d..0000000
--- a/client/site_tests/camera_V4L2/control.certification
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'The Chromium OS Authors'
-NAME = 'camera_V4L2.certification'
-PURPOSE = 'Exercises v4l2 camera devices to verify required operations.'
-CRITERIA = """
-This is used for third-party lab to verify new camera modules.
-It is a complex test and tests many different functions. This test will fail
-if any of the following conditions occur:
-  - No camera configuration file is found
-  - No v4L2 device is found
-  - If a mandatory control is not supported
-  - If streaming is not supported
-  - If a required resolution is not supported when capturing a stream
-  - If frame rate is not constant
-  - If cropping doesn't meet android cropping requirement
-"""
-ATTRIBUTES = "suite:usb-camera"
-TIME='MEDIUM'
-TEST_CATEGORY = 'V4L2'
-TEST_TYPE = 'client'
-BUG_TEMPLATE = {
-    'labels': ['OS-Chrome', 'VideoTestFailure'],
-    'cc': ['chromeos-video-test-failures@google.com'],
-}
-
-DOC = """
-This test executes media_v4l2_test and media_v4l2_unittest binaries.
-For more information on V4L2 see:
-http://www.linuxfordevices.com/c/a/Linux-For-Devices-Articles/Intro-to-V4L2/
-
-This is used for third-party lab to verify camera module. The camera module is
-an external camera device to run all test cases.
-"""
-
-job.run_test('camera_V4L2', test_list='certification', tag='certification')
diff --git a/client/site_tests/cellular_ActivateLTE/cellular_ActivateLTE.py b/client/site_tests/cellular_ActivateLTE/cellular_ActivateLTE.py
deleted file mode 100644
index 8afad98..0000000
--- a/client/site_tests/cellular_ActivateLTE/cellular_ActivateLTE.py
+++ /dev/null
@@ -1,253 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import dbus
-import logging
-import os
-import time
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.cellular import mm1_constants
-from autotest_lib.client.cros.cellular import test_environment
-from autotest_lib.client.cros.networking import pm_proxy
-
-I_ACTIVATION_TEST = 'Interface.LTEActivationTest'
-TEST_MODEMS_MODULE_PATH = os.path.join(os.path.dirname(__file__), 'files',
-                                       'modems.py')
-
-LONG_TIMEOUT = 20
-SHORT_TIMEOUT = 10
-
-class ActivationTest(object):
-    """
-    Super class that implements setup code that is common to the individual
-    tests.
-
-    """
-    def __init__(self, test):
-        self.test = test
-
-
-    def Cleanup(self):
-        """
-        Makes the modem look like it has been activated to satisfy the test
-        end condition.
-
-        """
-        # Set the MDN to a non-zero value, so that shill removes the ICCID from
-        # activating_iccid_store.profile. This way, individual test runs won't
-        # interfere with each other.
-        modem = self.test.pseudomm.wait_for_modem(timeout_seconds=LONG_TIMEOUT)
-        modem.iface_properties.Set(mm1_constants.I_MODEM,
-                                   'OwnNumbers',
-                                   ['1111111111'])
-        # Put the modem in the unknown subscription state so that the mdn value is
-        # used to remove the iccid entry
-        self.test.pseudomm.iface_testing.SetSubscriptionState(
-                mm1_constants.MM_MODEM_3GPP_SUBSCRIPTION_STATE_UNKNOWN)
-        time.sleep(5)
-        self.test.CheckServiceActivationState('activated')
-
-
-    def Run(self):
-        """
-        Configures the pseudomodem to run with the test modem, runs the test
-        and cleans up.
-
-        """
-        self.RunTest()
-        self.Cleanup()
-
-
-    def RunTest(self):
-        """
-        Runs the body of the test. Should be implemented by the subclass.
-
-        """
-        raise NotImplementedError()
-
-
-class ActivationResetTest(ActivationTest):
-    """
-    This test verifies that the modem resets after online payment.
-
-    """
-    def RunTest(self):
-        # Service should appear as 'not-activated'.
-        self.test.CheckServiceActivationState('not-activated')
-        self.test.CheckResetCalled(False)
-
-        # Call 'CompleteActivation' on the device. The service will become
-        # 'activating' and the modem should reset immediately.
-        # Not checking for the intermediate 'activating' state because it makes
-        # the test too fragile
-        service = self.test.FindCellularService()
-        service.CompleteCellularActivation()
-        time.sleep(SHORT_TIMEOUT)
-        self.test.CheckResetCalled(True)
-
-
-class ActivationDueToMdnTest(ActivationTest):
-    """
-    This test verifies that a valid MDN should cause the service to get marked
-    as 'activated' when the modem is in unknown subscription state.
-
-    """
-    def RunTest(self):
-        # Service should appear as 'not-activated'.
-        self.test.CheckServiceActivationState('not-activated')
-
-        # Update the MDN. The service should get marked as activated.
-        modem = self.test.pseudomm.get_modem()
-        modem.iface_properties.Set(mm1_constants.I_MODEM,
-                                   'OwnNumbers',
-                                   ['1111111111'])
-        # Put the modem in the unknown subscription state so that the mdn value is
-        # used to determine the service activation status.
-        self.test.pseudomm.iface_testing.SetSubscriptionState(
-                mm1_constants.MM_MODEM_3GPP_SUBSCRIPTION_STATE_UNKNOWN)
-        time.sleep(SHORT_TIMEOUT)
-        self.test.CheckServiceActivationState('activated')
-
-
-class cellular_ActivateLTE(test.test):
-    """
-    After an online payment to activate a network, shill keeps track of service
-    activation by monitoring changes to network registration and MDN updates
-    combined with a modem reset. The test checks that the
-    Cellular.ActivationState property of the service has the correct value
-    associated with it by simulating possible scenarios using the pseudo modem
-    manager.
-
-    """
-    version = 1
-
-    def GetModemState(self):
-        """Returns the current ModemManager modem state."""
-        modem = self.pseudomm.get_modem()
-        props = modem.properties(mm1_constants.I_MODEM)
-        return props['State']
-
-
-    def SetResetCalled(self, value):
-        """
-        Sets the value of the "ResetCalled" property of the current
-        modem.
-
-        @param value: Value to set in the property.
-
-        """
-        modem = self.pseudomm.get_modem()
-        if modem is None:
-            return
-        modem.iface_properties.Set(
-                I_ACTIVATION_TEST,
-                'ResetCalled',
-                dbus.types.Boolean(value))
-
-
-    def GetResetCalled(self, modem):
-        """
-        Returns the current value of the "ResetCalled" property of the current
-        modem.
-
-        @param modem: Modem proxy to send the query to.
-
-        """
-        return modem.properties(I_ACTIVATION_TEST)['ResetCalled']
-
-
-    def _CheckResetCalledHelper(self, expected_value):
-        modem = self.pseudomm.get_modem()
-        if modem is None:
-            return False
-        try:
-            return self.GetResetCalled(modem) == expected_value
-        except dbus.exceptions.DBusException as e:
-            name = e.get_dbus_name()
-            if (name == mm1_constants.DBUS_UNKNOWN_METHOD or
-                name == mm1_constants.DBUS_UNKNOWN_OBJECT):
-                return False
-            raise e
-
-
-    def CheckResetCalled(self, expected_value):
-        """
-        Checks that the ResetCalled property on the modem matches the expect
-        value.
-
-        @param expected_value: The expected value of ResetCalled.
-
-        """
-        utils.poll_for_condition(
-            lambda: self._CheckResetCalledHelper(expected_value),
-            exception=error.TestFail("\"ResetCalled\" did not match: " +
-                                     str(expected_value)),
-            timeout=LONG_TIMEOUT)
-
-
-    def CheckServiceActivationState(self, expected_state):
-        """
-        Asserts that the service activation state matches |expected_state|
-        within SHORT_TIMEOUT.
-
-        @param expected_state: The expected service activation state.
-
-        """
-        logging.info('Checking for service activation state: %s',
-                     expected_state)
-        service = self.FindCellularService()
-        success, state, duration = self.test_env.shill.wait_for_property_in(
-            service,
-            'Cellular.ActivationState',
-            [expected_state],
-            SHORT_TIMEOUT)
-        if not success and state != expected_state:
-            raise error.TestError(
-                'Service activation state should be \'%s\', but it is \'%s\'.'
-                % (expected_state, state))
-
-
-    def FindCellularService(self, check_not_none=True):
-        """
-        Returns the current cellular service.
-
-        @param check_not_none: If True, an error will be raised if no service
-                was found.
-
-        """
-        if check_not_none:
-            utils.poll_for_condition(
-                    lambda: (self.test_env.shill.find_cellular_service_object()
-                             is not None),
-                    exception=error.TestError(
-                            'Could not find cellular service within timeout.'),
-                    timeout=LONG_TIMEOUT);
-
-        service = self.test_env.shill.find_cellular_service_object()
-
-        # Check once more, to make sure it's valid.
-        if check_not_none and not service:
-            raise error.TestError('Could not find cellular service.')
-        return service
-
-
-    def run_once(self):
-        tests = [
-            ActivationResetTest(self),
-            ActivationDueToMdnTest(self),
-        ]
-
-        for test in tests:
-            logging.info("Running sub-test %s", test.__class__.__name__)
-            self.test_env = test_environment.CellularPseudoMMTestEnvironment(
-                    pseudomm_args = ({'family' : '3GPP',
-                                      'test-module' : TEST_MODEMS_MODULE_PATH,
-                                      'test-modem-class' : 'TestModem',
-                                      'test-sim-class' : 'TestSIM'},))
-            with self.test_env:
-                self.pseudomm = pm_proxy.PseudoMMProxy.get_proxy()
-                test.Run()
diff --git a/client/site_tests/cellular_ActivateLTE/control.pseudomodem b/client/site_tests/cellular_ActivateLTE/control.pseudomodem
deleted file mode 100644
index 91025bb..0000000
--- a/client/site_tests/cellular_ActivateLTE/control.pseudomodem
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_ActivateLTE.pseudomodem"
-PURPOSE = """
-Test various scenarios that may arise during the post-payment LTE
-activation process.
-"""
-CRITERIA = """
-Test will fail if the cellular service is found to be in an
-unexpected state.
-"""
-ATTRIBUTES = "suite:cellular_pseudomodem"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-DOC = """
-  After an online payment to activate a network, shill keeps track of service
-  activation by monitoring changes to network registration and MDN updates
-  combined with a modem reset. The test checks that the Cellular.ActivationState
-  property of the service has the correct value associated with it by simulating
-  possible scenarios using the pseudo modem manager.
-"""
-
-job.run_test('cellular_ActivateLTE')
diff --git a/client/site_tests/cellular_ActivateLTE/files/__init__.py b/client/site_tests/cellular_ActivateLTE/files/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/client/site_tests/cellular_ActivateLTE/files/__init__.py
+++ /dev/null
diff --git a/client/site_tests/cellular_ActivateLTE/files/common.py b/client/site_tests/cellular_ActivateLTE/files/common.py
deleted file mode 100644
index fcba34e..0000000
--- a/client/site_tests/cellular_ActivateLTE/files/common.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# setup the environment so that autotest_lib can be imported when this file is
-# run as an executable
-
-import os, sys
-
-dirname = os.path.dirname(sys.modules[__name__].__file__)
-client_dir = os.path.abspath(os.path.join(dirname, "..", "..", ".."))
-sys.path.insert(0, client_dir)
-
-import setup_modules
-
-sys.path.pop(0)
-setup_modules.setup(base_path=client_dir,
-                    root_module_name="autotest_lib.client")
diff --git a/client/site_tests/cellular_ActivateLTE/files/modems.py b/client/site_tests/cellular_ActivateLTE/files/modems.py
deleted file mode 100644
index 3800570..0000000
--- a/client/site_tests/cellular_ActivateLTE/files/modems.py
+++ /dev/null
@@ -1,87 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import dbus
-import dbus.types
-import time
-
-from autotest_lib.client.cros.cellular import mm1_constants
-from autotest_lib.client.cros.cellular.pseudomodem import modem_3gpp
-from autotest_lib.client.cros.cellular.pseudomodem import sim
-from autotest_lib.client.cros.cellular.pseudomodem import utils as pm_utils
-
-I_ACTIVATION_TEST = 'Interface.LTEActivationTest'
-
-class TestModem(modem_3gpp.Modem3gpp):
-    """
-    Base class for the custom 3GPP fake modems that are defined in this test.
-    This modem boots up as unprovisioned & becomes activated only if it has
-    been explicitly activated by calling CompleteCellularActivation
-
-    """
-    def __init__(self,
-                 state_machine_factory=None,
-                 bus=None,
-                 device='pseudomodem0',
-                 index=0,
-                 roaming_networks=None,
-                 config=None):
-        super(TestModem, self).__init__(state_machine_factory,
-                                        bus=bus,
-                                        device=device,
-                                        roaming_networks=roaming_networks,
-                                        config=config)
-        # Update the registered susbscription state as unprovisioned
-        # for this activation test
-        self._cached_registered_subscription_state = (
-                mm1_constants.MM_MODEM_3GPP_SUBSCRIPTION_STATE_UNPROVISIONED)
-
-
-    def _InitializeProperties(self):
-        props = modem_3gpp.Modem3gpp._InitializeProperties(self)
-        modem_props = props[mm1_constants.I_MODEM]
-        modem_props['OwnNumbers'] = ['0000000000']
-        modem_props['AccessTechnologies'] = dbus.types.UInt32(
-            mm1_constants.MM_MODEM_ACCESS_TECHNOLOGY_LTE)
-        modem_props['ModemCapabilities'] = dbus.types.UInt32(
-            mm1_constants.MM_MODEM_CAPABILITY_LTE)
-        modem_props['CurrentCapabilities'] = dbus.types.UInt32(
-            mm1_constants.MM_MODEM_CAPABILITY_LTE)
-
-        # For the purposes of this test, introduce a property to help
-        # verify that a reset has taken place. Expose this under a test
-        # specific interface.
-        if hasattr(self, '_properties'):
-            reset_called = \
-                self._properties[I_ACTIVATION_TEST]['ResetCalled']
-        else:
-            reset_called = False
-        props[I_ACTIVATION_TEST] = {
-            'ResetCalled' : dbus.types.Boolean(reset_called)
-        }
-        return props
-
-
-    @pm_utils.log_dbus_method()
-    def Reset(self):
-        self.Set(
-            I_ACTIVATION_TEST, 'ResetCalled', dbus.types.Boolean(True))
-        modem_3gpp.Modem3gpp.Reset(self)
-
-
-class TestSIM(sim.SIM):
-    """ SIM instantiated with the default test network, for ease of use. """
-    def __init__(self):
-        # Shill's activating ICCID store tracks which SIM identifiers are in
-        # the process of activation. If we use the same SIM identifier for
-        # every test pass, then a failed test may leave a stale entry in the
-        # activating ICCD store which will erroneously mark the SIM as pending
-        # activation. So, to avoid this, try to use a unique SIM identifier
-        # each time.
-        sim_identifier = int(time.time())
-        sim.SIM.__init__(
-                self,
-                sim.SIM.Carrier('test'),
-                mm1_constants.MM_MODEM_ACCESS_TECHNOLOGY_LTE,
-                msin=str(sim_identifier))
diff --git a/client/site_tests/cellular_ConnectFailure/cellular_ConnectFailure.py b/client/site_tests/cellular_ConnectFailure/cellular_ConnectFailure.py
index cc9426b..6c21513 100644
--- a/client/site_tests/cellular_ConnectFailure/cellular_ConnectFailure.py
+++ b/client/site_tests/cellular_ConnectFailure/cellular_ConnectFailure.py
@@ -1,10 +1,17 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import dbus
 import logging
 
+from six.moves import range
+
 from autotest_lib.client.bin import test
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.cros.cellular.pseudomodem import modem_3gpp
@@ -93,7 +100,7 @@
     def run_once(self, test_env, connect_count=4):
         with test_env:
             self.test_env = test_env
-            for count in xrange(connect_count):
+            for count in range(connect_count):
                 logging.info('Connect attempt %d', count + 1)
                 self._connect_to_3g_network(config_timeout=
                         cellular_proxy.CellularProxy.SERVICE_CONNECT_TIMEOUT)
diff --git a/client/site_tests/cellular_ConnectFailure/control b/client/site_tests/cellular_ConnectFailure/control
index 2705e13..70a8ffc 100644
--- a/client/site_tests/cellular_ConnectFailure/control
+++ b/client/site_tests/cellular_ConnectFailure/control
@@ -13,6 +13,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "network"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
   Tests that 3G connect failures are handled by cromo & flimflam properly
diff --git a/client/site_tests/cellular_ConnectFailure/control.pseudomodem b/client/site_tests/cellular_ConnectFailure/control.pseudomodem
index 4b7ba9b..589a84f 100644
--- a/client/site_tests/cellular_ConnectFailure/control.pseudomodem
+++ b/client/site_tests/cellular_ConnectFailure/control.pseudomodem
@@ -9,11 +9,11 @@
 This test will fail if a connect failure does not immediately cause the
 service to enter the Failed state.
 """
-ATTRIBUTES = "suite:cellular_pseudomodem"
 TIME = "FAST"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "network"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
   Tests that 3G connect failures are handled by cromo & flimflam properly
diff --git a/client/site_tests/cellular_DeferredRegistration/cellular_DeferredRegistration.py b/client/site_tests/cellular_DeferredRegistration/cellular_DeferredRegistration.py
deleted file mode 100644
index 9f6df23..0000000
--- a/client/site_tests/cellular_DeferredRegistration/cellular_DeferredRegistration.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import dbus
-import logging
-import time
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-
-from autotest_lib.client.cros.cellular import mm1_constants
-from autotest_lib.client.cros.cellular import test_environment
-from autotest_lib.client.cros.networking import pm_proxy
-
-class cellular_DeferredRegistration(test.test):
-    """
-    Tests that shill can handle temporary registration loss without
-    disconnecting the service because some modems periodically go searching for
-    a better signal while still connected to the network.  Conversely, make
-    sure that shill still disconnects a service that has suffered a
-    registration loss for an extended period of time (>15s).
-
-    """
-    version = 1
-
-    DEFERRED_REGISTRATION_TIMEOUT_SECONDS = 15
-
-    def _init(self):
-        self.pseudomm = pm_proxy.PseudoMMProxy.get_proxy()
-        service = self.test_env.shill.find_cellular_service_object()
-        self.test_env.shill.connect_service_synchronous(
-                service,
-                timeout_seconds=self.test_env.shill.SERVICE_CONNECT_TIMEOUT)
-
-
-    def _set_modem_registration_state(self, state):
-        self.pseudomm.get_modem().iface_properties.Set(
-                mm1_constants.I_MODEM_3GPP,
-                'RegistrationState',
-                dbus.types.UInt32(state))
-
-
-    def _test_temporary_registration_loss(self):
-        logging.info('Verifying temporary loss of registration behavior')
-        self._set_modem_registration_state(
-                mm1_constants.MM_MODEM_3GPP_REGISTRATION_STATE_SEARCHING)
-        time.sleep(self.DEFERRED_REGISTRATION_TIMEOUT_SECONDS / 2)
-        self._set_modem_registration_state(
-                mm1_constants.MM_MODEM_3GPP_REGISTRATION_STATE_HOME)
-        time.sleep(self.DEFERRED_REGISTRATION_TIMEOUT_SECONDS * 2)
-        if self.test_env.shill.find_cellular_service_object() is None:
-            raise error.TestFail('Cellular service should not have been '
-                                 'destroyed after temporary registration loss.')
-        logging.info('Successfully verified temporary loss of registration '
-                     'behavior')
-
-
-    def _test_permanent_registration_loss(self):
-        logging.info('Verifying permanent loss of registration behavior')
-        self._set_modem_registration_state(
-                mm1_constants.MM_MODEM_3GPP_REGISTRATION_STATE_SEARCHING)
-        time.sleep(self.DEFERRED_REGISTRATION_TIMEOUT_SECONDS * 2)
-        if self.test_env.shill.find_cellular_service_object() is not None:
-            raise error.TestFail('Cellular service should have been destroyed '
-                                 'after permanent registration loss.')
-        logging.info('Successfully verified permanent loss of registration '
-                     'behavior')
-
-
-    def run_once(self):
-        """Called by autotest to run this test."""
-
-        with test_environment.CellularPseudoMMTestEnvironment(
-                pseudomm_args=({'family': '3GPP'},)) as self.test_env:
-            self._init()
-
-            tests = [self._test_temporary_registration_loss,
-                     self._test_permanent_registration_loss]
-
-            for test in tests:
-                test()
diff --git a/client/site_tests/cellular_DeferredRegistration/control.pseudomodem b/client/site_tests/cellular_DeferredRegistration/control.pseudomodem
deleted file mode 100644
index 5086e73..0000000
--- a/client/site_tests/cellular_DeferredRegistration/control.pseudomodem
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_DeferredRegistration.pseudomodem"
-PURPOSE = "Verify shill deferred regisrtration behavior."
-CRITERIA = """
-This test will fail if one of the following conditions occurs:
-  - Shill destroys a service due to temporary registration loss
-  - Shill never destroys a service after permanent registration loss
-"""
-ATTRIBUTES = "suite:cellular_pseudomodem"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-DOC = """
-  Tests that shill can handle temporary registration loss without destroying
-  the service because some modems periodically go searching for a better signal
-  while still connected to the network.  Additionally, make sure that shill
-  still destroys a service that has suffered a registration loss for an
-  extended period of time (>15s).
-"""
-
-job.run_test('cellular_DeferredRegistration')
diff --git a/client/site_tests/cellular_DisableWhileConnecting/cellular_DisableWhileConnecting.py b/client/site_tests/cellular_DisableWhileConnecting/cellular_DisableWhileConnecting.py
index 42cc75c..536d97d 100644
--- a/client/site_tests/cellular_DisableWhileConnecting/cellular_DisableWhileConnecting.py
+++ b/client/site_tests/cellular_DisableWhileConnecting/cellular_DisableWhileConnecting.py
@@ -1,14 +1,18 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import gobject
+# AU tests use ToT client code, but ToT -3 client version.
+try:
+    from gi.repository import GObject
+except ImportError:
+    import gobject as GObject
 import logging
 import time
 
 from autotest_lib.client.bin import test
 from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.cellular import modem_utils
 from autotest_lib.client.cros.mainloop import ExceptionForward
 from autotest_lib.client.cros.mainloop import GenericTesterMainLoop
 from autotest_lib.client.cros.networking import shill_proxy
@@ -29,7 +33,7 @@
         self.test_kwargs.get('delay_before_disable_ms', 0) +
         self.test.iteration *
         self.test_kwargs.get('disable_delay_per_iteration_ms', 0))
-    gobject.timeout_add(disable_delay_ms, self._start_disable)
+    GObject.timeout_add(disable_delay_ms, self._start_disable)
     self._start_test()
 
   @ExceptionForward
@@ -59,14 +63,12 @@
     logging.info('Got status')
     self.requirement_completed('get_status', warn_if_already_completed=False)
     if self.status_delay_ms:
-      gobject.timeout_add(self.status_delay_ms, self._start_get_status)
+      GObject.timeout_add(self.status_delay_ms, self._start_get_status)
 
   def after_main_loop(self):
     """Called by GenericTesterMainLoop after the main loop has exited."""
     enabled = self._enabled()
     logging.info('Modem enabled: %s', enabled)
-    # Will return happily if no Gobi present
-    modem_utils.ClearGobiModemFaultInjection()
 
 
 class ShillDisableTester(DisableTester):
@@ -188,7 +190,7 @@
     if self._is_gobi():
       self.remaining_requirements.add('get_status')
       self.status_delay_ms = self.test_kwargs.get('status_delay_ms', 200)
-      gobject.timeout_add(self.status_delay_ms, self._start_get_status)
+      GObject.timeout_add(self.status_delay_ms, self._start_get_status)
 
     self._start_connect()
 
@@ -262,7 +264,7 @@
   def run_once(self, test_env, **kwargs):
     self.test_env = test_env
     timeout_s = kwargs.get('timeout_s', DEFAULT_TEST_TIMEOUT_S)
-    gobject_main_loop = gobject.MainLoop()
+    gobject_main_loop = GObject.MainLoop()
 
     with test_env:
       logging.info('Shill-level test')
@@ -272,11 +274,8 @@
       shill_level_test.run(**kwargs)
 
     with test_env:
-      try:
-        logging.info('Modem-level test')
-        modem_level_test = ModemDisableTester(self,
-                                              gobject_main_loop,
-                                              timeout_s=timeout_s)
-        modem_level_test.run(**kwargs)
-      finally:
-        modem_utils.ClearGobiModemFaultInjection()
+      logging.info('Modem-level test')
+      modem_level_test = ModemDisableTester(self,
+                                            gobject_main_loop,
+                                            timeout_s=timeout_s)
+      modem_level_test.run(**kwargs)
diff --git a/client/site_tests/cellular_DisableWhileConnecting/control b/client/site_tests/cellular_DisableWhileConnecting/control
index 8a7a02c..1577970 100644
--- a/client/site_tests/cellular_DisableWhileConnecting/control
+++ b/client/site_tests/cellular_DisableWhileConnecting/control
@@ -14,6 +14,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "network"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 Check that the 3G modem can handle a disconnect while connecting.
diff --git a/client/site_tests/cellular_DisableWhileConnecting/control.amarisoft b/client/site_tests/cellular_DisableWhileConnecting/control.amarisoft
new file mode 100644
index 0000000..5f4e5fb
--- /dev/null
+++ b/client/site_tests/cellular_DisableWhileConnecting/control.amarisoft
@@ -0,0 +1,47 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_DisableWhileConnecting.amarisoft"
+PURPOSE = "Check that the 3G/4G modem can handle a disconnect while connecting."
+CRITERIA = """
+This test will fail if a disconnect request while the modem is
+connecting is not promptly executed.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:amarisoft"
+PY_VERSION = 3
+
+DOC = """
+Check that the 3G/4G modem can handle a disconnect while connecting.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+ITERATIONS_PER_TEST=1
+
+test_env = test_environment.CellularOTATestEnvironment()
+# Disabling this test. More details @ b/174480387
+"""
+job.run_test('cellular_DisableWhileConnecting',
+             test_env=test_env,
+             delay_before_disable_ms=0,
+             disable_delay_per_iteration_ms=0,
+             iterations=ITERATIONS_PER_TEST,
+             tag='instant_disable') """
+
+# Since dbus-python emits our calls asynchronously, it is sometimes
+# (often?) the case that the disable arrives before the connect.
+# There's no good way to track what actually gets sent first, so we
+# sleep.
+job.run_test('cellular_DisableWhileConnecting',
+             test_env=test_env,
+             delay_before_disable_ms=0,
+             disable_delay_per_iteration_ms=int(1000.0 / ITERATIONS_PER_TEST),
+             iterations=ITERATIONS_PER_TEST,
+             tag='disable_delay_ramp')
diff --git a/client/site_tests/cellular_DisableWhileConnecting/control.att b/client/site_tests/cellular_DisableWhileConnecting/control.att
index ba14ce6..3e2c0a6 100644
--- a/client/site_tests/cellular_DisableWhileConnecting/control.att
+++ b/client/site_tests/cellular_DisableWhileConnecting/control.att
@@ -15,6 +15,7 @@
 TEST_CLASS = "network"
 TEST_TYPE = "client"
 DEPENDENCIES = "carrier:att"
+PY_VERSION = 3
 
 DOC = """
 Check that the 3G modem can handle a disconnect while connecting.
diff --git a/client/site_tests/cellular_DisableWhileConnecting/control.docomo b/client/site_tests/cellular_DisableWhileConnecting/control.docomo
new file mode 100644
index 0000000..845a94e
--- /dev/null
+++ b/client/site_tests/cellular_DisableWhileConnecting/control.docomo
@@ -0,0 +1,47 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_DisableWhileConnecting.docomo"
+PURPOSE = "Check that the 3G modem can handle a disconnect while connecting."
+CRITERIA = """
+This test will fail if a disconnect request while the modem is
+connecting is not promptly executed.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:docomo"
+PY_VERSION = 3
+
+DOC = """
+Check that the 3G modem can handle a disconnect while connecting.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+ITERATIONS_PER_TEST=1
+
+test_env = test_environment.CellularOTATestEnvironment()
+# Disabling this test. More details @ b/174480387
+"""
+job.run_test('cellular_DisableWhileConnecting',
+             test_env=test_env,
+             delay_before_disable_ms=0,
+             disable_delay_per_iteration_ms=0,
+             iterations=ITERATIONS_PER_TEST,
+             tag='instant_disable') """
+
+# Since dbus-python emits our calls asynchronously, it is sometimes
+# (often?) the case that the disable arrives before the connect.
+# There's no good way to track what actually gets sent first, so we
+# sleep.
+job.run_test('cellular_DisableWhileConnecting',
+             test_env=test_env,
+             delay_before_disable_ms=0,
+             disable_delay_per_iteration_ms=int(1000.0 / ITERATIONS_PER_TEST),
+             iterations=ITERATIONS_PER_TEST,
+             tag='disable_delay_ramp')
diff --git a/client/site_tests/cellular_DisableWhileConnecting/control.ee b/client/site_tests/cellular_DisableWhileConnecting/control.ee
new file mode 100644
index 0000000..a408c73
--- /dev/null
+++ b/client/site_tests/cellular_DisableWhileConnecting/control.ee
@@ -0,0 +1,47 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_DisableWhileConnecting.ee"
+PURPOSE = "Check that the 3G modem can handle a disconnect while connecting."
+CRITERIA = """
+This test will fail if a disconnect request while the modem is
+connecting is not promptly executed.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:ee"
+PY_VERSION = 3
+
+DOC = """
+Check that the 3G modem can handle a disconnect while connecting.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+ITERATIONS_PER_TEST=1
+
+test_env = test_environment.CellularOTATestEnvironment()
+# Disabling this test. More details @ b/174480387
+"""
+job.run_test('cellular_DisableWhileConnecting',
+             test_env=test_env,
+             delay_before_disable_ms=0,
+             disable_delay_per_iteration_ms=0,
+             iterations=ITERATIONS_PER_TEST,
+             tag='instant_disable') """
+
+# Since dbus-python emits our calls asynchronously, it is sometimes
+# (often?) the case that the disable arrives before the connect.
+# There's no good way to track what actually gets sent first, so we
+# sleep.
+job.run_test('cellular_DisableWhileConnecting',
+             test_env=test_env,
+             delay_before_disable_ms=0,
+             disable_delay_per_iteration_ms=int(1000.0 / ITERATIONS_PER_TEST),
+             iterations=ITERATIONS_PER_TEST,
+             tag='disable_delay_ramp')
diff --git a/client/site_tests/cellular_DisableWhileConnecting/control.kddi b/client/site_tests/cellular_DisableWhileConnecting/control.kddi
new file mode 100644
index 0000000..bb04fab
--- /dev/null
+++ b/client/site_tests/cellular_DisableWhileConnecting/control.kddi
@@ -0,0 +1,47 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_DisableWhileConnecting.kddi"
+PURPOSE = "Check that the 3G modem can handle a disconnect while connecting."
+CRITERIA = """
+This test will fail if a disconnect request while the modem is
+connecting is not promptly executed.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:kddi"
+PY_VERSION = 3
+
+DOC = """
+Check that the 3G modem can handle a disconnect while connecting.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+ITERATIONS_PER_TEST=1
+
+test_env = test_environment.CellularOTATestEnvironment()
+# Disabling this test. More details @ b/174480387
+"""
+job.run_test('cellular_DisableWhileConnecting',
+             test_env=test_env,
+             delay_before_disable_ms=0,
+             disable_delay_per_iteration_ms=0,
+             iterations=ITERATIONS_PER_TEST,
+             tag='instant_disable') """
+
+# Since dbus-python emits our calls asynchronously, it is sometimes
+# (often?) the case that the disable arrives before the connect.
+# There's no good way to track what actually gets sent first, so we
+# sleep.
+job.run_test('cellular_DisableWhileConnecting',
+             test_env=test_env,
+             delay_before_disable_ms=0,
+             disable_delay_per_iteration_ms=int(1000.0 / ITERATIONS_PER_TEST),
+             iterations=ITERATIONS_PER_TEST,
+             tag='disable_delay_ramp')
diff --git a/client/site_tests/cellular_DisableWhileConnecting/control.pseudomodem b/client/site_tests/cellular_DisableWhileConnecting/control.pseudomodem
index bbac237..6344692 100644
--- a/client/site_tests/cellular_DisableWhileConnecting/control.pseudomodem
+++ b/client/site_tests/cellular_DisableWhileConnecting/control.pseudomodem
@@ -13,6 +13,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "network"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 Check that the 3G modem can handle a disconnect while connecting.
diff --git a/client/site_tests/cellular_DisableWhileConnecting/control.rakuten b/client/site_tests/cellular_DisableWhileConnecting/control.rakuten
new file mode 100644
index 0000000..edaa006
--- /dev/null
+++ b/client/site_tests/cellular_DisableWhileConnecting/control.rakuten
@@ -0,0 +1,47 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_DisableWhileConnecting.rakuten"
+PURPOSE = "Check that the 3G modem can handle a disconnect while connecting."
+CRITERIA = """
+This test will fail if a disconnect request while the modem is
+connecting is not promptly executed.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:rakuten"
+PY_VERSION = 3
+
+DOC = """
+Check that the 3G modem can handle a disconnect while connecting.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+ITERATIONS_PER_TEST=1
+
+test_env = test_environment.CellularOTATestEnvironment()
+# Disabling this test. More details @ b/174480387
+"""
+job.run_test('cellular_DisableWhileConnecting',
+             test_env=test_env,
+             delay_before_disable_ms=0,
+             disable_delay_per_iteration_ms=0,
+             iterations=ITERATIONS_PER_TEST,
+             tag='instant_disable') """
+
+# Since dbus-python emits our calls asynchronously, it is sometimes
+# (often?) the case that the disable arrives before the connect.
+# There's no good way to track what actually gets sent first, so we
+# sleep.
+job.run_test('cellular_DisableWhileConnecting',
+             test_env=test_env,
+             delay_before_disable_ms=0,
+             disable_delay_per_iteration_ms=int(1000.0 / ITERATIONS_PER_TEST),
+             iterations=ITERATIONS_PER_TEST,
+             tag='disable_delay_ramp')
diff --git a/client/site_tests/cellular_DisableWhileConnecting/control.softbank b/client/site_tests/cellular_DisableWhileConnecting/control.softbank
new file mode 100644
index 0000000..f53a52e
--- /dev/null
+++ b/client/site_tests/cellular_DisableWhileConnecting/control.softbank
@@ -0,0 +1,47 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_DisableWhileConnecting.softbank"
+PURPOSE = "Check that the 3G modem can handle a disconnect while connecting."
+CRITERIA = """
+This test will fail if a disconnect request while the modem is
+connecting is not promptly executed.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:softbank"
+PY_VERSION = 3
+
+DOC = """
+Check that the 3G modem can handle a disconnect while connecting.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+ITERATIONS_PER_TEST=1
+
+test_env = test_environment.CellularOTATestEnvironment()
+# Disabling this test. More details @ b/174480387
+"""
+job.run_test('cellular_DisableWhileConnecting',
+             test_env=test_env,
+             delay_before_disable_ms=0,
+             disable_delay_per_iteration_ms=0,
+             iterations=ITERATIONS_PER_TEST,
+             tag='instant_disable') """
+
+# Since dbus-python emits our calls asynchronously, it is sometimes
+# (often?) the case that the disable arrives before the connect.
+# There's no good way to track what actually gets sent first, so we
+# sleep.
+job.run_test('cellular_DisableWhileConnecting',
+             test_env=test_env,
+             delay_before_disable_ms=0,
+             disable_delay_per_iteration_ms=int(1000.0 / ITERATIONS_PER_TEST),
+             iterations=ITERATIONS_PER_TEST,
+             tag='disable_delay_ramp')
diff --git a/client/site_tests/cellular_DisableWhileConnecting/control.sprint b/client/site_tests/cellular_DisableWhileConnecting/control.sprint
index 81b3695..29de355 100644
--- a/client/site_tests/cellular_DisableWhileConnecting/control.sprint
+++ b/client/site_tests/cellular_DisableWhileConnecting/control.sprint
@@ -15,6 +15,7 @@
 TEST_CLASS = "network"
 TEST_TYPE = "client"
 DEPENDENCIES = "carrier:sprint"
+PY_VERSION = 3
 
 DOC = """
 Check that the 3G modem can handle a disconnect while connecting.
diff --git a/client/site_tests/cellular_DisableWhileConnecting/control.tmobile b/client/site_tests/cellular_DisableWhileConnecting/control.tmobile
index ca28011..d1c1b5e 100644
--- a/client/site_tests/cellular_DisableWhileConnecting/control.tmobile
+++ b/client/site_tests/cellular_DisableWhileConnecting/control.tmobile
@@ -15,6 +15,7 @@
 TEST_CLASS = "network"
 TEST_TYPE = "client"
 DEPENDENCIES = "carrier:tmobile"
+PY_VERSION = 3
 
 DOC = """
 Check that the 3G modem can handle a disconnect while connecting.
diff --git a/client/site_tests/cellular_DisableWhileConnecting/control.verizon b/client/site_tests/cellular_DisableWhileConnecting/control.verizon
index 13ec8e4..2e5b0c0 100644
--- a/client/site_tests/cellular_DisableWhileConnecting/control.verizon
+++ b/client/site_tests/cellular_DisableWhileConnecting/control.verizon
@@ -15,6 +15,7 @@
 TEST_CLASS = "network"
 TEST_TYPE = "client"
 DEPENDENCIES = "carrier:verizon"
+PY_VERSION = 3
 
 DOC = """
 Check that the 3G modem can handle a disconnect while connecting.
diff --git a/client/site_tests/cellular_DisableWhileConnecting/control.vodafone b/client/site_tests/cellular_DisableWhileConnecting/control.vodafone
new file mode 100644
index 0000000..4e00911
--- /dev/null
+++ b/client/site_tests/cellular_DisableWhileConnecting/control.vodafone
@@ -0,0 +1,47 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_DisableWhileConnecting.vodafone"
+PURPOSE = "Check that the 3G modem can handle a disconnect while connecting."
+CRITERIA = """
+This test will fail if a disconnect request while the modem is
+connecting is not promptly executed.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:vodafone"
+PY_VERSION = 3
+
+DOC = """
+Check that the 3G modem can handle a disconnect while connecting.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+ITERATIONS_PER_TEST=1
+
+test_env = test_environment.CellularOTATestEnvironment()
+# Disabling this test. More details @ b/174480387
+"""
+job.run_test('cellular_DisableWhileConnecting',
+             test_env=test_env,
+             delay_before_disable_ms=0,
+             disable_delay_per_iteration_ms=0,
+             iterations=ITERATIONS_PER_TEST,
+             tag='instant_disable') """
+
+# Since dbus-python emits our calls asynchronously, it is sometimes
+# (often?) the case that the disable arrives before the connect.
+# There's no good way to track what actually gets sent first, so we
+# sleep.
+job.run_test('cellular_DisableWhileConnecting',
+             test_env=test_env,
+             delay_before_disable_ms=0,
+             disable_delay_per_iteration_ms=int(1000.0 / ITERATIONS_PER_TEST),
+             iterations=ITERATIONS_PER_TEST,
+             tag='disable_delay_ramp')
diff --git a/client/site_tests/cellular_DisconnectFailure/cellular_DisconnectFailure.py b/client/site_tests/cellular_DisconnectFailure/cellular_DisconnectFailure.py
index cfdc7cf..fed28c2 100644
--- a/client/site_tests/cellular_DisconnectFailure/cellular_DisconnectFailure.py
+++ b/client/site_tests/cellular_DisconnectFailure/cellular_DisconnectFailure.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -155,7 +156,7 @@
 
         """
         service = self.test_env.shill.find_cellular_service_object()
-        properties = service.GetProperties(utf8_strings=True)
+        properties = service.GetProperties()
         state = properties.get('State', None)
         return state in ['portal', 'online']
 
@@ -166,7 +167,7 @@
 
         """
         service = self.test_env.shill.find_cellular_service_object()
-        properties = service.GetProperties(utf8_strings=True)
+        properties = service.GetProperties()
         state = properties.get('State', None)
         return state == 'idle'
 
diff --git a/client/site_tests/cellular_DisconnectFailure/control.pseudomodem b/client/site_tests/cellular_DisconnectFailure/control.pseudomodem
index d711960..4cb803e 100644
--- a/client/site_tests/cellular_DisconnectFailure/control.pseudomodem
+++ b/client/site_tests/cellular_DisconnectFailure/control.pseudomodem
@@ -19,6 +19,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "network"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 The test uses the pseudo modem manager to simulate two failure scenarios of a
diff --git a/client/site_tests/cellular_HermesMM_InstallEnable/cellular_HermesMM_InstallEnable.py b/client/site_tests/cellular_HermesMM_InstallEnable/cellular_HermesMM_InstallEnable.py
new file mode 100644
index 0000000..6ecbaac
--- /dev/null
+++ b/client/site_tests/cellular_HermesMM_InstallEnable/cellular_HermesMM_InstallEnable.py
@@ -0,0 +1,105 @@
+# Lint as: python2, python3
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import dbus
+import logging
+
+from autotest_lib.client.bin import test
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.cros.cellular import cellular_logging
+from autotest_lib.client.cros.cellular import hermes_utils
+from autotest_lib.client.cros.cellular import mm1_constants
+
+log = cellular_logging.SetupCellularLogging('HermesMMInstallEnable')
+
+class cellular_HermesMM_InstallEnable(test.test):
+    """
+    Tests Install & Enable functions on active/inactive Euicc and
+    validates the same on Modem Manager
+
+    This test fails when fails to Install/Enable a given Euicc profile
+    or not reflecting same on Modem Manager
+
+    Prerequisites
+
+    1) For test CI:
+       Before running this test on test CI, a profile needs to be created on
+    go/stork-profile. The profile needs to be linked to the EID of the dut.
+    Profiles with class=operational and type=Android GTS test are known to work
+    well with this test.
+
+       We rely on the SMDS event to find the activation code for the test.
+    There is a limit of 99 downloads before the profile needs to be deleted and
+    recreated.(b/181723689)
+
+    2) For prod CI:
+       Install a production profile before running the test.
+
+    """
+    version = 1
+
+    def _validate_sim_data(self, euicc_path):
+        """
+        Validate SIM details that Modem Manager getting from modem
+
+        Check Installed profile presence on an euicc
+        Check Profile enabled and same  SIM details reflected on MM
+
+        @param euicc_path: available euicc dbus path as string
+        @raise error.TestFail if dbus exception happens or validation fail
+
+        """
+        try:
+            logging.info('validate_sim_data start')
+            euicc = self.hermes_manager.get_euicc(euicc_path)
+            if not euicc:
+                logging.error('No Euicc enumerated')
+                raise error.TestFail('Validation of profile installation on MM'
+                                    ' failed as no euicc enumerated')
+
+            modem_proxy = self.mm_proxy.wait_for_modem(
+                    mm1_constants.MM_MODEM_POLL_TIME)
+            if not modem_proxy:
+                logging.info('No modem object yet can not validate')
+                raise error.TestFail('Validation of profile installation on MM'
+                                    ' failed as no modem')
+
+            # Read MM SIM properties and validate with installed profile data
+            sim_proxy = modem_proxy.get_sim()
+            sim_props = sim_proxy.properties()
+
+            logging.info('MM-SIM properties are SimIdentifier:%s Active:%s'
+                          ' Imsi:%s', sim_props['SimIdentifier'],
+                          sim_props['Active'], sim_props['Imsi'])
+
+            if (sim_props['SimIdentifier'] == self.installed_iccid):
+                logging.info('===validate_sim_data succeeded===\n')
+            else:
+                raise error.TestFail('Validation of profile Installation on MM'
+                                    ' failed:' + self.installed_iccid)
+
+            return True
+        except dbus.DBusException as e:
+            logging.error('Resulted Modem Manager Validation error:%s', e)
+            raise error.TestFail('MM validation failed')
+
+    def run_once(self, test_env, is_prod_ci=False):
+        """ Install & Enable Euicc by enabling a profile """
+        self.is_prod_ci = is_prod_ci
+
+        self.mm_proxy, self.hermes_manager, euicc_path = \
+                    hermes_utils.initialize_test(is_prod_ci)
+
+        logging.info('Getting profile to enable')
+        self.installed_iccid = hermes_utils.get_iccid_of_disabled_profile(
+            euicc_path, self.hermes_manager, self.is_prod_ci)
+
+        hermes_utils.enable_or_disable_profile_test(
+        euicc_path, self.hermes_manager, self.installed_iccid, True)
+
+        # Validate esim profile enabled is same as MM sim profile
+        self._validate_sim_data(euicc_path)
+
+        logging.info('HermesMMInstallEnableTest Completed')
diff --git a/client/site_tests/cellular_HermesMM_InstallEnable/control.prodci b/client/site_tests/cellular_HermesMM_InstallEnable/control.prodci
new file mode 100644
index 0000000..af32a8e
--- /dev/null
+++ b/client/site_tests/cellular_HermesMM_InstallEnable/control.prodci
@@ -0,0 +1,32 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_HermesMM_InstallEnable.prodci"
+PURPOSE = "Verify that a failed hermes Install/Enable attempt reported"
+CRITERIA = """
+This test will fail if could not able to Install/Enable a given Euicc through
+Hermes dbus daemon or the enabled profile SIM details not appeared on MM
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:esim"
+PY_VERSION = 3
+DOC = """
+  Tests that Hermes Euicc Install & Enable
+
+  This test will fail if failed to Enable Euicc OR could not able to
+  find and connect to Hermes dbus daemon OR validation of SIM detail fail on MM
+  It requires a dut with a modem and euicc
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularESIMTestEnvironment()
+job.run_test('cellular_HermesMM_InstallEnable', test_env=test_env,
+            is_prod_ci=True)
+
diff --git a/client/site_tests/cellular_HermesMM_InstallEnable/control.testci b/client/site_tests/cellular_HermesMM_InstallEnable/control.testci
new file mode 100644
index 0000000..e165328
--- /dev/null
+++ b/client/site_tests/cellular_HermesMM_InstallEnable/control.testci
@@ -0,0 +1,33 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_HermesMM_InstallEnable.testci"
+PURPOSE = "Verify that a failed hermes MM Install Enable attempt reported"
+CRITERIA = """
+This test will fail if could not able to Install & Enable given Euicc through
+Hermes dbus daemon or the enabled profile SIM details not appeared on MM
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:testesim"
+PY_VERSION = 3
+DOC = """
+  Tests that Hermes Profile Install & Enable/Disable and validates the same on
+  Modem Manager
+
+  This test will fail if failed to Install & Enable Profile OR could not able
+  to connect to Hermes dbus daemon OR validation of SIM detail fail on MM
+  It requires a dut with a modem and euicc
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularESIMTestEnvironment()
+job.run_test('cellular_HermesMM_InstallEnable', test_env=test_env,
+            is_prod_ci=False)
+
diff --git a/client/site_tests/cellular_Hermes_MultiProfile/cellular_Hermes_MultiProfile.py b/client/site_tests/cellular_Hermes_MultiProfile/cellular_Hermes_MultiProfile.py
new file mode 100644
index 0000000..3ff52ef
--- /dev/null
+++ b/client/site_tests/cellular_Hermes_MultiProfile/cellular_Hermes_MultiProfile.py
@@ -0,0 +1,111 @@
+# Lint as: python2, python3
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import logging
+import random
+
+from six.moves import range
+
+from autotest_lib.client.bin import test
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.cros.cellular import cellular_logging
+from autotest_lib.client.cros.cellular import hermes_utils
+
+log = cellular_logging.SetupCellularLogging('HermesMultiProfile')
+
+class cellular_Hermes_MultiProfile(test.test):
+    """
+    Test that Hermes can perform enable/disable operations on multiple profiles
+
+    Prerequisites
+
+    1) For test CI:
+       Before running this test on test CI, two profiles need to be created on
+    go/stork-profile. The profiles required to be linked to the EID of the dut.
+    Profiles with class=operational and type=Android GTS test are known to work
+    well with this test.
+
+       We rely on the SMDS event to find the activation codes for the test.
+    There is a limit of 99 downloads before profiles to be deleted and
+    recreated.(b/181723689)
+
+    2) For prod CI:
+       Install two production profiles before running the test.
+
+    """
+    version = 1
+
+    def run_once(self, test_env, is_prod_ci=False):
+        """ Enable/Disable a profile """
+        self.is_prod_ci = is_prod_ci
+
+        self.mm_proxy, self.hermes_manager, euicc_path = \
+                    hermes_utils.initialize_test(is_prod_ci)
+        # separate testci and prodici procedure to get 2 iccids
+        if not is_prod_ci:
+            first_iccid = hermes_utils.install_profile_test(euicc_path, self.hermes_manager)
+            second_iccid = hermes_utils.install_profile_test(euicc_path, self.hermes_manager)
+        else:
+            _, installed_profiles = \
+            hermes_utils.request_installed_profiles(euicc_path, self.hermes_manager)
+
+            profiles_count = len(installed_profiles)
+            if profiles_count < 2:
+                raise error.TestError('Two distinct profiles need to be '
+                'installed before test begins but count is '+ profiles_count)
+
+            first_iccid = list(installed_profiles.values())[0].iccid
+            second_iccid = list(installed_profiles.values())[1].iccid
+
+        if not first_iccid or not second_iccid :
+            fail_iccid = 'first' if not first_iccid else 'second'
+            raise error.TestError('Could not get' + fail_iccid  + ' iccid')
+
+        if first_iccid == second_iccid:
+            raise error.TestError('Two distinct profiles need to be installed '
+                'before test begins. Got only ' + first_iccid)
+
+        # first get two profiles, check first_iccid and disable if not disabled,
+        # also check second profile is enabled or not. if not enable 2nd one
+        logging.info('Disabling first profile to prevent enabling already '
+                    'enabled profile in next stress loop. first_iccid:%s, '
+                    'second_iccid:%s', first_iccid, second_iccid)
+        # get profile state to make sure to keep in expected state
+        first_state = hermes_utils.get_profile_state(
+        euicc_path, self.hermes_manager, first_iccid)
+
+        if first_state:
+            hermes_utils.set_profile_state(
+            False, euicc_path, self.hermes_manager, first_iccid, None)
+
+        second_state = hermes_utils.get_profile_state(
+        euicc_path, self.hermes_manager, second_iccid)
+
+        if not second_state:
+            hermes_utils.set_profile_state(
+            True, euicc_path, self.hermes_manager, second_iccid, None)
+
+        logging.info('Stress enable/disable profiles')
+        for i in range(1,5):
+            logging.info('Iteration :: %d', i)
+            for iccid in [first_iccid, second_iccid]:
+                if not hermes_utils.get_profile_state(
+                    euicc_path, self.hermes_manager, iccid):
+                    logging.info('Enabling profile:%s', iccid)
+                    hermes_utils.enable_or_disable_profile_test(
+                        euicc_path, self.hermes_manager, iccid, True)
+                explicitly_disable_profile = random.choice([True,False])
+                if (explicitly_disable_profile):
+                    if hermes_utils.get_profile_state(
+                        euicc_path, self.hermes_manager, iccid):
+                        logging.info('Disabling profile:%s', iccid)
+                        hermes_utils.enable_or_disable_profile_test(
+                            euicc_path, self.hermes_manager, iccid, False)
+
+        logging.info('HermesMultiProfileTest Completed')
diff --git a/client/site_tests/cellular_Hermes_MultiProfile/control.prodci b/client/site_tests/cellular_Hermes_MultiProfile/control.prodci
new file mode 100644
index 0000000..932373c
--- /dev/null
+++ b/client/site_tests/cellular_Hermes_MultiProfile/control.prodci
@@ -0,0 +1,32 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_Hermes_MultiProfile.prodci"
+PURPOSE = "Verify that a failed hermes Enable/Disable attempt reported"
+CRITERIA = """
+This test will fail if could not able to Enable/Disable a given profile through
+Hermes dbus daemon
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:esim"
+PY_VERSION = 3
+DOC = """
+  Tests that profile Enable & Disable works on multiple prod profiles
+
+  This test will fail if failed to Enable profile OR could not able to
+  find and connect to Hermes dbus daemon OR validation of profile status
+  It requires a dut with a modem and euicc
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularESIMTestEnvironment()
+job.run_test('cellular_Hermes_MultiProfile', test_env=test_env,
+            is_prod_ci=True)
+
diff --git a/client/site_tests/cellular_Hermes_MultiProfile/control.testci b/client/site_tests/cellular_Hermes_MultiProfile/control.testci
new file mode 100644
index 0000000..daa65c9
--- /dev/null
+++ b/client/site_tests/cellular_Hermes_MultiProfile/control.testci
@@ -0,0 +1,32 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_Hermes_MultiProfile.testci"
+PURPOSE = "Verify that a failed hermes Enable/Disable attempt reported"
+CRITERIA = """
+This test will fail if could not able to Enable/Disable given profile through
+Hermes dbus daemon
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:testesim"
+PY_VERSION = 3
+DOC = """
+  Tests that profile Enable & Disable works on multiple test profiles
+
+  This test will fail if failed to Enable/Disable Profile OR could not able
+  to connect to Hermes dbus daemon OR validation of profile status
+  It requires a dut with a modem and euicc
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularESIMTestEnvironment()
+job.run_test('cellular_Hermes_MultiProfile', test_env=test_env,
+            is_prod_ci=False)
+
diff --git a/client/site_tests/cellular_Hermes_Restart_SlotSwitch/cellular_Hermes_Restart_SlotSwitch.py b/client/site_tests/cellular_Hermes_Restart_SlotSwitch/cellular_Hermes_Restart_SlotSwitch.py
new file mode 100644
index 0000000..ec974f4
--- /dev/null
+++ b/client/site_tests/cellular_Hermes_Restart_SlotSwitch/cellular_Hermes_Restart_SlotSwitch.py
@@ -0,0 +1,200 @@
+# Lint as: python2, python3
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import dbus
+import logging
+import re
+import time
+import subprocess
+
+from six.moves import range
+
+from autotest_lib.client.bin import test
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.cros import upstart
+from autotest_lib.client.cros.cellular import cellular_logging
+from autotest_lib.client.cros.cellular import hermes_constants
+from autotest_lib.client.cros.cellular import hermes_utils
+from autotest_lib.client.cros.cellular import mm1_constants
+from autotest_lib.client.cros.networking import mm1_proxy
+
+log = cellular_logging.SetupCellularLogging('HermesRestartSlotSwitchTest')
+class cellular_Hermes_Restart_SlotSwitch(test.test):
+    """
+    This test restarts hermes or switches slots in between hermes
+    operations.
+
+    The test fails if any of the hermes operations fail.
+
+    Prerequisites
+
+    1) For test CI:
+       Before running this test on test CI, a profile needs to be created on
+    go/stork-profile. The profile needs to be linked to the EID of the dut.
+    Profiles with class=operational and type=Android GTS test are known to work
+    well with this test.
+
+       We rely on the SMDS event to find the activation code for the test.
+    There is a limit of 99 downloads before the profile needs to be deleted and
+    recreated.(b/181723689)
+
+    2) For prod CI:
+       Install a production profile before running the test.
+
+    """
+    version = 1
+
+    def restart_hermes(self):
+        """ Restarts Hermes daemon """
+        logging.info('->restart_hermes start')
+        upstart.restart_job('hermes')
+
+        # hermes takes 15+ sec to load all apis, wait on hermes dbus not enough
+        time.sleep(hermes_constants.HERMES_RESTART_WAIT_SECONDS)
+        self.hermes_manager = hermes_utils.connect_to_hermes()
+
+        euicc = self.hermes_manager.get_euicc(self.euicc_path)
+        if not euicc:
+            raise error.TestFail('restart_hermes operation failed - no euicc')
+        euicc.use_test_certs(not self.is_prod_ci)
+        logging.info('restart_hermes done')
+
+        if not self.hermes_manager:
+            logging.error('restart_hermes failed, no hermes daemon')
+            raise error.TestFail('restart_hermes operation failed')
+
+    def qmi_get_active_slot(self):
+        """
+        Gets current active slot
+
+        @return parse qmicli slot status result and return active slot number
+
+        sample slot status:
+        [qrtr://0] Successfully got slots status
+        [qrtr://0] 2 physical slots found:
+        Physical slot 1:
+            Card status: present
+            Slot status: inactive
+            Logical slot: 1
+            ICCID: unknown
+            Protocol: uicc
+            Num apps: 3
+            Is eUICC: yes
+            EID: 89033023425120000000001236712288
+        Physical slot 2:
+            Card status: present
+            Slot status: active
+            ICCID: unknown
+            Protocol: uicc
+            Num apps: 0
+            Is eUICC: yes
+            EID: 89033023425120000000000024200260
+        """
+        # Read qmi slot status and parse to return current active slot no
+        status_cmd = 'qmicli -p -d qrtr://0 --uim-get-slot-status'
+        slot_status = subprocess.check_output(status_cmd, shell=True)
+        slot_status_list = re.findall('.*active.*', slot_status.decode('utf-8'), re.I)
+        for slot_num, status in enumerate(slot_status_list):
+            if "inactive" not in status:
+                logging.info('active slot is %d', slot_num+1)
+                return slot_num+1
+
+    def qmi_switch_slot(self, slot):
+        """
+        Perform switch slot using qmicli commands
+
+        Command usage:
+
+        localhost ~ # qmicli -d qrtr://0 --uim-switch-slot 1
+        error: couldn't switch slots: QMI protocol error (26): 'NoEffect'
+        localhost ~ # echo $?
+        1
+        localhost ~ # qmicli -d qrtr://0 --uim-switch-slot 2
+        [qrtr://0] Successfully switched slots
+        localhost ~ # echo $?
+        0
+        """
+        # switch to given slot using qmicli command
+        switch_cmd = 'qmicli -d qrtr://0 --uim-switch-slot ' + str(slot)
+        if (self.qmi_get_active_slot() == slot):
+            logging.info('slot switch not needed, same slot %d is active', slot)
+            return
+        logging.info('call qmicli cmd to switch to:%s cmd:%s', slot, switch_cmd)
+        ret = subprocess.call(switch_cmd, shell=True)
+        # As we are not testing slot switching here, timeout is to make sure all
+        # euiic's are powered up. allowing modem FW to switch slots and load euiccs.
+        time.sleep(8)
+        logging.info(switch_cmd + ':return value is %d', ret)
+        if ret is not 0:
+            raise error.TestFail('qmi switch slot failed:', slot)
+
+    def hermes_operations_test(self):
+        """
+        Perform hermes operations with restart, slotswitch in between
+
+        Do Install, Enable, Disable, Uninstall operations combined with
+        operations hermes restart, sim slot switch
+
+        @return Fails the test if any operation fails
+
+        """
+        try:
+            logging.info('hermes_operations_test start')
+
+            for slot in range(1,3):
+                # Do restart, slotswitch and install, enable, disable, uninstall
+                self.qmi_switch_slot(slot)
+                self.restart_hermes()
+                logging.info('Iteration on slot %d', slot)
+                if not self.is_prod_ci:
+                    installed_iccid = None
+                    logging.info('INSTALL:\n')
+                    installed_iccid = hermes_utils.install_profile(
+                    self.euicc_path, self.hermes_manager, self.is_prod_ci)
+                else:
+                    installed_iccid = hermes_utils.get_iccid_of_disabled_profile(
+                    self.euicc_path, self.hermes_manager, self.is_prod_ci)
+
+                self.qmi_switch_slot(slot)
+                self.restart_hermes()
+                logging.info('ENABLE:\n')
+                hermes_utils.enable_or_disable_profile_test(
+                self.euicc_path, self.hermes_manager, installed_iccid, True)
+
+                self.qmi_switch_slot(slot)
+                self.restart_hermes()
+                logging.info('DISABLE:\n')
+                hermes_utils.enable_or_disable_profile_test(
+                self.euicc_path, self.hermes_manager, installed_iccid, False)
+
+                if not self.is_prod_ci:
+                    self.qmi_switch_slot(slot)
+                    self.restart_hermes()
+                    logging.info('UNINSTALL:\n')
+                    hermes_utils.uninstall_profile_test(
+                    self.euicc_path, self.hermes_manager, installed_iccid)
+
+            logging.info('===hermes_operations_test succeeded===\n')
+        except dbus.DBusException as e:
+            logging.error('hermes_operations failed')
+            raise error.TestFail('hermes_operations_test failed', e)
+
+    def run_once(self, test_env, is_prod_ci=False):
+        """
+        Perform hermes ops with hermes restart and sim slot switch
+
+        """
+        self.test_env = test_env
+        self.is_prod_ci = is_prod_ci
+
+        self.mm_proxy, self.hermes_manager, self.euicc_path = \
+                    hermes_utils.initialize_test(is_prod_ci)
+
+        self.hermes_operations_test()
+        logging.info('HermesRestartSlotSwitchTest Completed')
diff --git a/client/site_tests/cellular_Hermes_Restart_SlotSwitch/control.prodci b/client/site_tests/cellular_Hermes_Restart_SlotSwitch/control.prodci
new file mode 100644
index 0000000..685f315
--- /dev/null
+++ b/client/site_tests/cellular_Hermes_Restart_SlotSwitch/control.prodci
@@ -0,0 +1,35 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_Hermes_Restart_SlotSwitch.prodci"
+PURPOSE = "Verify that a failed hermes random esim operations attempt reported"
+CRITERIA = """
+  This test will fail if failed to Install, Enable, Disable, Uninstall Profile
+  OR could not able to connect to Hermes dbus daemon.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:esim, board:trogdor"
+PY_VERSION = 3
+
+DOC = """
+  Defines randomly occuring operations:
+  RS = Restart Hermes + Slot Switch
+  Do test RS-> Install -> RS -> Enable -> RS-> Disable -> RS -> Uninstall
+
+  This test will fail if failed to Install, Enable, Disable, Uninstall Profile
+  OR could not able to connect to Hermes dbus daemon.
+  It requires a dut with a modem and euicc
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularESIMTestEnvironment()
+job.run_test('cellular_Hermes_Restart_SlotSwitch', test_env=test_env,
+            is_prod_ci=True)
+
diff --git a/client/site_tests/cellular_Hermes_Restart_SlotSwitch/control.testci b/client/site_tests/cellular_Hermes_Restart_SlotSwitch/control.testci
new file mode 100644
index 0000000..18becc4
--- /dev/null
+++ b/client/site_tests/cellular_Hermes_Restart_SlotSwitch/control.testci
@@ -0,0 +1,35 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_Hermes_Restart_SlotSwitch.testci"
+PURPOSE = "Verify that a failed hermes random esim operations attempt reported"
+CRITERIA = """
+  This test will fail if failed to Install, Enable, Disable, Uninstall Profile
+  OR could not able to connect to Hermes dbus daemon.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:testesim, board:trogdor"
+PY_VERSION = 3
+
+DOC = """
+  Defines randomly occuring operations:
+  RS = Restart Hermes + Slot Switch
+  Do test RS-> Install -> RS -> Enable -> RS-> Disable -> RS -> Uninstall
+
+  This test will fail if failed to Install, Enable, Disable, Uninstall Profile
+  OR could not able to connect to Hermes dbus daemon.
+  It requires a dut with a modem and euicc
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularESIMTestEnvironment()
+job.run_test('cellular_Hermes_Restart_SlotSwitch', test_env=test_env,
+            is_prod_ci=False)
+
diff --git a/client/site_tests/cellular_Hermes_SingleProfile/cellular_Hermes_SingleProfile.py b/client/site_tests/cellular_Hermes_SingleProfile/cellular_Hermes_SingleProfile.py
new file mode 100644
index 0000000..0eaaef1
--- /dev/null
+++ b/client/site_tests/cellular_Hermes_SingleProfile/cellular_Hermes_SingleProfile.py
@@ -0,0 +1,60 @@
+# Lint as: python2, python3
+# Copyright (c) 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from autotest_lib.client.bin import test
+from autotest_lib.client.cros.cellular import cellular_logging
+from autotest_lib.client.cros.cellular import hermes_utils
+
+log = cellular_logging.SetupCellularLogging('HermesSingleProfileTest')
+
+class cellular_Hermes_SingleProfile(test.test):
+    """
+    Tests Enable and Disable functions on active/inactive Euicc present
+
+    This test fails when not able to Enable/Disable a given Euicc profile
+
+    Prerequisites
+
+    1) For test CI:
+       Before running this test on test CI, a profile needs to be created on
+    go/stork-profile. The profile needs to be linked to the EID of the dut.
+    Profiles with class=operational and type=Android GTS test are known to work
+    well with this test.
+
+       We rely on the SMDS event to find the activation code for the test.
+    There is a limit of 99 downloads before the profile needs to be deleted and
+    recreated.(b/181723689)
+
+    2) For prod CI:
+       Install a production profile before running the test.
+
+    """
+    version = 1
+
+    def run_once(self, is_prod_ci=False):
+        """ Enable Disable Euicc by enabling or disabling a profile """
+        self.is_prod_ci = is_prod_ci
+
+        self.mm_proxy, self.hermes_manager, euicc_path = \
+            hermes_utils.initialize_test(is_prod_ci)
+
+        self.installed_iccid = None
+
+        self.installed_iccid = hermes_utils.install_profile(
+        euicc_path, self.hermes_manager, self.is_prod_ci)
+
+        hermes_utils.enable_or_disable_profile_test(
+        euicc_path, self.hermes_manager, self.installed_iccid, True)
+
+        hermes_utils.enable_or_disable_profile_test(
+        euicc_path, self.hermes_manager, self.installed_iccid, False)
+
+        if not self.is_prod_ci:
+            hermes_utils.uninstall_profile_test(
+            euicc_path, self.hermes_manager, self.installed_iccid)
+
+        logging.info('HermesSingleProfileTest Completed')
diff --git a/client/site_tests/cellular_Hermes_SingleProfile/control.prodci b/client/site_tests/cellular_Hermes_SingleProfile/control.prodci
new file mode 100644
index 0000000..5eed926
--- /dev/null
+++ b/client/site_tests/cellular_Hermes_SingleProfile/control.prodci
@@ -0,0 +1,31 @@
+# Copyright (c) 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_Hermes_SingleProfile.prodci"
+PURPOSE = "Verify that a failed hermes connect attempt reported"
+CRITERIA = """
+This test will fail if could not able to Enable/Disable a given Euicc through
+Hermes dbus daemon
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:esim"
+PY_VERSION = 3
+DOC = """
+  Tests that Hermes Euicc Enable and Disable
+
+  This test will fail if failed to Enable/Disable Euicc OR could not able to
+  find and connect to Hermes dbus daemon
+  It requires a dut with a modem and euicc
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularESIMTestEnvironment()
+job.run_test('cellular_Hermes_SingleProfile', test_env=test_env,
+            is_prod_ci=True)
diff --git a/client/site_tests/cellular_Hermes_SingleProfile/control.testci b/client/site_tests/cellular_Hermes_SingleProfile/control.testci
new file mode 100644
index 0000000..d324118
--- /dev/null
+++ b/client/site_tests/cellular_Hermes_SingleProfile/control.testci
@@ -0,0 +1,31 @@
+# Copyright (c) 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_Hermes_SingleProfile.testci"
+PURPOSE = "Verify that a failed hermes connect attempt reported"
+CRITERIA = """
+This test will fail if could not able to Enable/Disable a given Euicc through
+Hermes dbus daemon
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:testesim"
+PY_VERSION = 3
+DOC = """
+  Tests that Hermes Euicc Enable and Disable
+
+  This test will fail if failed to Enable/Disable Euicc OR could not able to
+  find and connect to Hermes dbus daemon
+  It requires a dut with a modem and euicc
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularESIMTestEnvironment()
+job.run_test('cellular_Hermes_SingleProfile', test_env=test_env,
+            is_prod_ci=False)
diff --git a/client/site_tests/cellular_Identifiers/cellular_Identifiers.py b/client/site_tests/cellular_Identifiers/cellular_Identifiers.py
index 52448b3..fb90eea 100644
--- a/client/site_tests/cellular_Identifiers/cellular_Identifiers.py
+++ b/client/site_tests/cellular_Identifiers/cellular_Identifiers.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -5,6 +6,7 @@
 import logging
 
 from autotest_lib.client.bin import test
+from autotest_lib.client.bin import utils
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.cros.cellular import mm1_constants
 from autotest_lib.client.cros.cellular.pseudomodem import sim
@@ -90,11 +92,16 @@
         """Called by autotest to run this test."""
         with test_env:
             device = test_env.shill.find_cellular_device_object()
+            device_props = device.GetProperties()
             service = test_env.shill.find_cellular_service_object()
-            device_props = device.GetProperties(utf8_strings=True)
-            service_props = service.GetProperties(utf8_strings=True)
+            service_props = service.GetProperties()
             self.is_modemmanager = 'freedesktop' in device_props['DBus.Service']
 
+            utils.poll_for_condition(
+                test_env.modem.ModemIsRegistered,
+                exception = error.TestFail(
+                    'Modem failed to register with the network'),
+                timeout = SERVICE_REGISTRATION_TIMEOUT)
             modem_props = test_env.modem.GetModemProperties()
 
             logging.debug('shill service properties: %s', service_props)
diff --git a/client/site_tests/cellular_Identifiers/control b/client/site_tests/cellular_Identifiers/control
index f146446..77656d3 100644
--- a/client/site_tests/cellular_Identifiers/control
+++ b/client/site_tests/cellular_Identifiers/control
@@ -1,4 +1,4 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Copyright (c) 2022 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -9,11 +9,12 @@
 This test will fail if one of the following conditions occur:
   - Cellular modem does not provide an identifier
 """
-ATTRIBUTES = "suite:cellular_qual"
+ATTRIBUTES = "suite:cellular_ota_avl"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "network"
 TEST_TYPE = "client"
+PY_VERSION = 3
 DOC = """
   Tests that a cellular modem provides correct identification information
 
diff --git a/client/site_tests/cellular_Identifiers/control.amarisoft b/client/site_tests/cellular_Identifiers/control.amarisoft
new file mode 100644
index 0000000..2e93c07
--- /dev/null
+++ b/client/site_tests/cellular_Identifiers/control.amarisoft
@@ -0,0 +1,33 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_Identifiers.amarisoft"
+PURPOSE = "Verify cellular modem provides correct identification information."
+CRITERIA = """
+This test will fail if one of the following conditions occur:
+  - Cellular modem does not provide an identifier
+"""
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+ATTRIBUTES = "suite:cellular_ota_flaky"
+DEPENDENCIES = "carrier:amarisoft"
+PY_VERSION = 3
+DOC = """
+  Tests that a cellular modem provides correct identification information
+
+  The test queries the modem for the following identifiers:
+    - MEID
+    - IMEI
+    - IMSI
+    - ICCID
+    - SIM operator ID
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularOTATestEnvironment()
+job.run_test('cellular_Identifiers', test_env=test_env)
diff --git a/client/site_tests/cellular_Identifiers/control.att b/client/site_tests/cellular_Identifiers/control.att
index 06a4407..aa6cee5 100644
--- a/client/site_tests/cellular_Identifiers/control.att
+++ b/client/site_tests/cellular_Identifiers/control.att
@@ -13,8 +13,9 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "network"
 TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_ota"
+ATTRIBUTES = "suite:cellular_ota, suite:cellular-cq"
 DEPENDENCIES = "carrier:att"
+PY_VERSION = 3
 DOC = """
   Tests that a cellular modem provides correct identification information
 
diff --git a/client/site_tests/cellular_Identifiers/control.docomo b/client/site_tests/cellular_Identifiers/control.docomo
new file mode 100644
index 0000000..0e35977
--- /dev/null
+++ b/client/site_tests/cellular_Identifiers/control.docomo
@@ -0,0 +1,33 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_Identifiers.docomo"
+PURPOSE = "Verify cellular modem provides correct identification information."
+CRITERIA = """
+This test will fail if one of the following conditions occur:
+  - Cellular modem does not provide an identifier
+"""
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+ATTRIBUTES = "suite:cellular_ota_flaky"
+DEPENDENCIES = "carrier:docomo"
+PY_VERSION = 3
+DOC = """
+  Tests that a cellular modem provides correct identification information
+
+  The test queries the modem for the following identifiers:
+    - MEID
+    - IMEI
+    - IMSI
+    - ICCID
+    - SIM operator ID
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularOTATestEnvironment()
+job.run_test('cellular_Identifiers', test_env=test_env)
diff --git a/client/site_tests/cellular_Identifiers/control.ee b/client/site_tests/cellular_Identifiers/control.ee
new file mode 100644
index 0000000..28af9a9
--- /dev/null
+++ b/client/site_tests/cellular_Identifiers/control.ee
@@ -0,0 +1,33 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_Identifiers.ee"
+PURPOSE = "Verify cellular modem provides correct identification information."
+CRITERIA = """
+This test will fail if one of the following conditions occur:
+  - Cellular modem does not provide an identifier
+"""
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+ATTRIBUTES = "suite:cellular_ota_flaky"
+DEPENDENCIES = "carrier:ee"
+PY_VERSION = 3
+DOC = """
+  Tests that a cellular modem provides correct identification information
+
+  The test queries the modem for the following identifiers:
+    - MEID
+    - IMEI
+    - IMSI
+    - ICCID
+    - SIM operator ID
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularOTATestEnvironment()
+job.run_test('cellular_Identifiers', test_env=test_env)
diff --git a/client/site_tests/cellular_Identifiers/control.kddi b/client/site_tests/cellular_Identifiers/control.kddi
new file mode 100644
index 0000000..213aa20
--- /dev/null
+++ b/client/site_tests/cellular_Identifiers/control.kddi
@@ -0,0 +1,33 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_Identifiers.kddi"
+PURPOSE = "Verify cellular modem provides correct identification information."
+CRITERIA = """
+This test will fail if one of the following conditions occur:
+  - Cellular modem does not provide an identifier
+"""
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+ATTRIBUTES = "suite:cellular_ota_flaky"
+DEPENDENCIES = "carrier:kddi"
+PY_VERSION = 3
+DOC = """
+  Tests that a cellular modem provides correct identification information
+
+  The test queries the modem for the following identifiers:
+    - MEID
+    - IMEI
+    - IMSI
+    - ICCID
+    - SIM operator ID
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularOTATestEnvironment()
+job.run_test('cellular_Identifiers', test_env=test_env)
diff --git a/client/site_tests/cellular_Identifiers/control.pseudomodem b/client/site_tests/cellular_Identifiers/control.pseudomodem
index 2a606df..0bc369d 100644
--- a/client/site_tests/cellular_Identifiers/control.pseudomodem
+++ b/client/site_tests/cellular_Identifiers/control.pseudomodem
@@ -9,11 +9,11 @@
 This test will fail if one of the following conditions occur:
   - Cellular modem does not provide an identifier
 """
-ATTRIBUTES = "suite:cellular_pseudomodem"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "network"
 TEST_TYPE = "client"
+PY_VERSION = 3
 DOC = """
   Tests that a cellular modem provides correct identification information
 
diff --git a/client/site_tests/cellular_Identifiers/control.rakuten b/client/site_tests/cellular_Identifiers/control.rakuten
new file mode 100644
index 0000000..aeb795a
--- /dev/null
+++ b/client/site_tests/cellular_Identifiers/control.rakuten
@@ -0,0 +1,33 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_Identifiers.rakuten"
+PURPOSE = "Verify cellular modem provides correct identification information."
+CRITERIA = """
+This test will fail if one of the following conditions occur:
+  - Cellular modem does not provide an identifier
+"""
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+ATTRIBUTES = "suite:cellular_ota_flaky"
+DEPENDENCIES = "carrier:rakuten"
+PY_VERSION = 3
+DOC = """
+  Tests that a cellular modem provides correct identification information
+
+  The test queries the modem for the following identifiers:
+    - MEID
+    - IMEI
+    - IMSI
+    - ICCID
+    - SIM operator ID
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularOTATestEnvironment()
+job.run_test('cellular_Identifiers', test_env=test_env)
diff --git a/client/site_tests/cellular_Identifiers/control.softbank b/client/site_tests/cellular_Identifiers/control.softbank
new file mode 100644
index 0000000..de4fbb9
--- /dev/null
+++ b/client/site_tests/cellular_Identifiers/control.softbank
@@ -0,0 +1,33 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_Identifiers.softbank"
+PURPOSE = "Verify cellular modem provides correct identification information."
+CRITERIA = """
+This test will fail if one of the following conditions occur:
+  - Cellular modem does not provide an identifier
+"""
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+ATTRIBUTES = "suite:cellular_ota_flaky"
+DEPENDENCIES = "carrier:softbank"
+PY_VERSION = 3
+DOC = """
+  Tests that a cellular modem provides correct identification information
+
+  The test queries the modem for the following identifiers:
+    - MEID
+    - IMEI
+    - IMSI
+    - ICCID
+    - SIM operator ID
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularOTATestEnvironment()
+job.run_test('cellular_Identifiers', test_env=test_env)
diff --git a/client/site_tests/cellular_Identifiers/control.sprint b/client/site_tests/cellular_Identifiers/control.sprint
index 44c4102..e15ac89 100644
--- a/client/site_tests/cellular_Identifiers/control.sprint
+++ b/client/site_tests/cellular_Identifiers/control.sprint
@@ -15,6 +15,7 @@
 TEST_CLASS = "network"
 TEST_TYPE = "client"
 DEPENDENCIES = "carrier:sprint"
+PY_VERSION = 3
 DOC = """
   Tests that a cellular modem provides correct identification information
 
diff --git a/client/site_tests/cellular_Identifiers/control.tmobile b/client/site_tests/cellular_Identifiers/control.tmobile
index 0ab609a..8cc612b 100644
--- a/client/site_tests/cellular_Identifiers/control.tmobile
+++ b/client/site_tests/cellular_Identifiers/control.tmobile
@@ -13,8 +13,9 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "network"
 TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_ota_flaky"
+ATTRIBUTES = "suite:cellular_ota, suite:infra_qual_cellular"
 DEPENDENCIES = "carrier:tmobile"
+PY_VERSION = 3
 DOC = """
   Tests that a cellular modem provides correct identification information
 
diff --git a/client/site_tests/cellular_Identifiers/control.verizon b/client/site_tests/cellular_Identifiers/control.verizon
index 4b1846e..407673e 100644
--- a/client/site_tests/cellular_Identifiers/control.verizon
+++ b/client/site_tests/cellular_Identifiers/control.verizon
@@ -13,8 +13,9 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "network"
 TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_ota"
+ATTRIBUTES = "suite:cellular_ota, suite:cellular-cq"
 DEPENDENCIES = "carrier:verizon"
+PY_VERSION = 3
 DOC = """
   Tests that a cellular modem provides correct identification information
 
diff --git a/client/site_tests/cellular_Identifiers/control.vodafone b/client/site_tests/cellular_Identifiers/control.vodafone
new file mode 100644
index 0000000..d34d994
--- /dev/null
+++ b/client/site_tests/cellular_Identifiers/control.vodafone
@@ -0,0 +1,33 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_Identifiers.vodafone"
+PURPOSE = "Verify cellular modem provides correct identification information."
+CRITERIA = """
+This test will fail if one of the following conditions occur:
+  - Cellular modem does not provide an identifier
+"""
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+ATTRIBUTES = "suite:cellular_ota_flaky"
+DEPENDENCIES = "carrier:vodafone"
+PY_VERSION = 3
+DOC = """
+  Tests that a cellular modem provides correct identification information
+
+  The test queries the modem for the following identifiers:
+    - MEID
+    - IMEI
+    - IMSI
+    - ICCID
+    - SIM operator ID
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularOTATestEnvironment()
+job.run_test('cellular_Identifiers', test_env=test_env)
diff --git a/client/site_tests/cellular_MbimComplianceControlCommand/cellular_MbimComplianceCID01.py b/client/site_tests/cellular_MbimComplianceControlCommand/cellular_MbimComplianceCID01.py
deleted file mode 100644
index 82e44c3..0000000
--- a/client/site_tests/cellular_MbimComplianceControlCommand/cellular_MbimComplianceCID01.py
+++ /dev/null
@@ -1,55 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import get_descriptors_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import mbim_cid_device_caps_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import mbim_open_generic_sequence
-
-
-class cellular_MbimComplianceCID01(mbim_test_base.MbimTestBase):
-    """
-    Validation of IP flags for functions that support CDMA.
-
-    This test verifies that a function that supports CDMA specifies at least
-    one of the following IP flags: MBIMCtrlCapsCdmaMobileIP,
-    MBIMCtrlCapsCdmaSimpleIP.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 52
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-
-    """
-    version = 1
-
-    def run_internal(self):
-        """ Run the CM_01 test. """
-        # Precondition.
-        desc_sequence = get_descriptors_sequence.GetDescriptorsSequence(
-                self.device_context)
-        descriptors = desc_sequence.run()
-        self.device_context.update_descriptor_cache(descriptors)
-        open_sequence = mbim_open_generic_sequence.MBIMOpenGenericSequence(
-                self.device_context)
-        open_sequence.run()
-        caps_sequence = mbim_cid_device_caps_sequence.MBIMCIDDeviceCapsSequence(
-                self.device_context)
-        _, caps_response = caps_sequence.run()
-
-        # Step1
-        if (caps_response.cellular_class &
-            mbim_constants.CELLULAR_CLASS_MASK_CDMA):
-            if not ((caps_response.control_caps &
-                     mbim_constants.CTRL_CAPS_MASK_CDMA_MOBILE_IP) or
-                    (caps_response.control_caps &
-                     mbim_constants.CTRL_CAPS_MASK_CDMA_SIMPLE_IP)):
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'mbim1.0:10.5.1.3#1')
diff --git a/client/site_tests/cellular_MbimComplianceControlCommand/cellular_MbimComplianceControlCommand.py b/client/site_tests/cellular_MbimComplianceControlCommand/cellular_MbimComplianceControlCommand.py
deleted file mode 100644
index 197f838..0000000
--- a/client/site_tests/cellular_MbimComplianceControlCommand/cellular_MbimComplianceControlCommand.py
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_test_runner
-
-class cellular_MbimComplianceControlCommand(mbim_test_runner.MbimTestRunner):
-    """
-    Main test runner for all the tests within this directory. This just a
-    harness for invoking various tests within this directory which is not
-    currently supported by Autotest.
-
-    """
-    _TEST_AREA_FOLDER = os.path.dirname(__file__)
diff --git a/client/site_tests/cellular_MbimComplianceControlCommand/control.CID01 b/client/site_tests/cellular_MbimComplianceControlCommand/control.CID01
deleted file mode 100644
index d0b1cbd..0000000
--- a/client/site_tests/cellular_MbimComplianceControlCommand/control.CID01
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceControlCommand.CID01"
-PURPOSE = """
-MBIM Compliance Test: Validation of IP flags for functions that support CDMA.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-CID_01 Test listed in the MBIM Compliance Test Suite specification.
-Validation of IP flags for functions that support CDMA.
-"""
-
-job.run_test('cellular_MbimComplianceControlCommand',
-             subtest_name='cellular_MbimComplianceCID01')
diff --git a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM01.py b/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM01.py
deleted file mode 100644
index dd72b9d..0000000
--- a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM01.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import get_descriptors_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import mbim_open_generic_sequence
-
-
-class cellular_MbimComplianceCM01(mbim_test_base.MbimTestBase):
-    """
-    CM_01 Validation of |transaction_id| and |status_codes| in modem's
-    response to MBIM_OPEN_MSG.
-
-    This test verifies that MBIM_OPEN_DONE message is issued by the function
-    in response to MBIM_OPEN_MSG message and checks TransactionId and
-    Status fields.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 38
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-
-    """
-    version = 1
-
-    def run_internal(self):
-        """ Run the CM_01 test. """
-        # Precondition.
-        descriptors = get_descriptors_sequence.GetDescriptorsSequence(
-                self.device_context).run()
-        self.device_context.update_descriptor_cache(descriptors)
-
-        # Step 1
-        open_message, response_message = (
-                mbim_open_generic_sequence.MBIMOpenGenericSequence(
-                        self.device_context).run())
-
-        # TODO(rpius): Complete the rest of the test
diff --git a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM02.py b/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM02.py
deleted file mode 100644
index f4513b8a..0000000
--- a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM02.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import get_descriptors_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import mbim_open_generic_sequence
-
-
-class cellular_MbimComplianceCM02(mbim_test_base.MbimTestBase):
-    """
-    CM_02 Validation of Message Length of the response to MBIM_OPEN_MSG.
-
-    This test validates MessageLength field in MBIM_MESSAGE_HEADER.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 38
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-
-    """
-    version = 1
-
-    def run_internal(self):
-        """ Run the CM_02 test. """
-        # Precondition.
-        descriptors = get_descriptors_sequence.GetDescriptorsSequence(
-                self.device_context).run()
-        self.device_context.update_descriptor_cache(descriptors)
-
-        # Step 1
-        _, response_message = (
-                mbim_open_generic_sequence.MBIMOpenGenericSequence(
-                        self.device_context).run())
-
-        # Validate message length of response to MBIM_OPEN_MESSAGE.
-        if response_message.message_length < 0x0C:
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:9.1#2')
diff --git a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM03.py b/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM03.py
deleted file mode 100644
index 94ba196..0000000
--- a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM03.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import get_descriptors_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import mbim_open_generic_sequence
-
-
-class cellular_MbimComplianceCM03(mbim_test_base.MbimTestBase):
-    """
-    CM_03 Validation of Function's Behavior for an Unsynchronized MBIM_OPEN_MSG.
-
-    This test validates function's behavior in case of an unsynchronized open
-    operation.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 38
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-    """
-    version = 1
-
-    def run_internal(self):
-        """ Run CM_03 test. """
-        # Precondition
-        descriptors = get_descriptors_sequence.GetDescriptorsSequence(
-                self.device_context).run()
-        self.device_context.update_descriptor_cache(descriptors)
-        mbim_open_generic_sequence.MBIMOpenGenericSequence(
-                self.device_context).run()
-
-        # Step 1
-        _, response_message = (
-                mbim_open_generic_sequence.MBIMOpenGenericSequence(
-                        self.device_context).run())
-
-        # Validate function's behaviour for an unsynchronized MBIM_OPEN_MSG.
-        if response_message.message_type == mbim_constants.MBIM_CLOSE_DONE:
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:9.3.1#1')
diff --git a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM04.py b/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM04.py
deleted file mode 100644
index cfe4af0..0000000
--- a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM04.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import get_descriptors_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import mbim_cid_device_caps_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import mbim_open_generic_sequence
-
-
-class cellular_MbimComplianceCM04(mbim_test_base.MbimTestBase):
-    """
-    CM_04 Validation of |transaction_id| in modem's response to MBIM_COMMAND_MSG
-
-    This section contains tests that validate the specifics of MBIM_COMMAND_MSG
-    request and the function's response.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 38
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-    """
-    version = 1
-
-    def run_internal(self):
-        """ Run CM_04 test. """
-        # Precondition
-        descriptors = get_descriptors_sequence.GetDescriptorsSequence(
-                self.device_context).run()
-        self.device_context.update_descriptor_cache(descriptors)
-        mbim_open_generic_sequence.MBIMOpenGenericSequence(
-                self.device_context).run()
-
-        # Step 1
-        command_message, response_message = (
-                mbim_cid_device_caps_sequence.MBIMCIDDeviceCapsSequence(
-                        self.device_context).run())
-        # Validate |transaction_id| in the response to MBIM_COMMAND_MSG.
-        if response_message.transaction_id != command_message.transaction_id:
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:9.4.3')
diff --git a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM05.py b/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM05.py
deleted file mode 100644
index e03b9b9..0000000
--- a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM05.py
+++ /dev/null
@@ -1,126 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-import common
-from autotest_lib.client.bin import utils
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_channel
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_command_message
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_message_request
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_message_response
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import get_descriptors_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import mbim_open_generic_sequence
-
-
-class cellular_MbimComplianceCM05(mbim_test_base.MbimTestBase):
-    """
-    CM_05 Validation for modem's responses to two consecutive MBIM command
-    messages are correct with regards to |transaction_id|, |service_id| and
-    |cid|.
-
-    This test verifies that the function uses separate transactions to deliver
-    control message responses.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 39
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-    """
-    version = 1
-
-    def run_internal(self):
-        """ Run CM_05 test. """
-        # Precondition
-        descriptors = get_descriptors_sequence.GetDescriptorsSequence(
-                self.device_context).run()
-        self.device_context.update_descriptor_cache(descriptors)
-        mbim_open_generic_sequence.MBIMOpenGenericSequence(
-                self.device_context).run()
-
-        device_context = self.device_context
-        descriptor_cache = device_context.descriptor_cache
-        self.channel = mbim_channel.MBIMChannel(
-                device_context._device,
-                descriptor_cache.mbim_communication_interface.bInterfaceNumber,
-                descriptor_cache.interrupt_endpoint.bEndpointAddress,
-                device_context.max_control_transfer_size)
-
-        # Step 1
-        caps_command_message = mbim_command_message.MBIMDeviceCapsQuery()
-        caps_packets = mbim_message_request.generate_request_packets(
-                caps_command_message,
-                device_context.max_control_transfer_size)
-        self.caps_transaction_id = caps_command_message.transaction_id
-        self.channel.unidirectional_transaction(*caps_packets)
-
-        # Step 2
-        services_command_message = (
-                mbim_command_message.MBIMDeviceServicesQuery())
-        services_packets = mbim_message_request.generate_request_packets(
-                services_command_message,
-                device_context.max_control_transfer_size)
-        self.services_transaction_id = services_command_message.transaction_id
-        self.channel.unidirectional_transaction(*services_packets)
-
-        # Step 3
-        utils.poll_for_condition(
-                self._get_response_packets,
-                timeout=5,
-                exception=mbim_errors.MBIMComplianceChannelError(
-                        'Failed to retrieve the response packets to specific '
-                        'control messages.'))
-        self.channel.close()
-
-        caps_response_message = self.caps_response
-        services_response_message = self.services_response
-        is_caps_message_valid = isinstance(
-                caps_response_message,
-                mbim_command_message.MBIMDeviceCapsInfo)
-        is_services_message_valid = isinstance(
-                services_response_message,
-                mbim_command_message.MBIMDeviceServicesInfo)
-        if not ((is_caps_message_valid and is_services_message_valid) and
-                (caps_response_message.transaction_id ==
-                 caps_command_message.transaction_id) and
-                (caps_response_message.device_service_id ==
-                 caps_command_message.device_service_id) and
-                caps_response_message.cid == caps_command_message.cid and
-                (services_command_message.transaction_id ==
-                 services_response_message.transaction_id) and
-                (services_command_message.device_service_id ==
-                 services_response_message.device_service_id) and
-                services_command_message.cid == services_response_message.cid):
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:8.1.2#2')
-
-
-    def _get_response_packets(self):
-        """
-        Condition method for |poll_for_condition| to check the retrieval of
-        target packets.
-
-        @returns True if both caps response packet and services response packet
-                are received, False otherwise.
-
-        """
-        packets = self.channel.get_outstanding_packets()
-        self.caps_response = None
-        self.services_response = None
-        for packet in packets:
-            message_response = mbim_message_response.parse_response_packets(
-                    packet)
-            if message_response.transaction_id == self.caps_transaction_id:
-                self.caps_response = message_response
-            elif message_response.transaction_id == self.services_transaction_id:
-                self.services_response = message_response
-            if self.caps_response and self.services_response:
-                return True
-        return False
diff --git a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM06.py b/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM06.py
deleted file mode 100644
index 47335a2..0000000
--- a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM06.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import get_descriptors_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import mbim_cid_device_caps_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import mbim_open_generic_sequence
-
-class cellular_MbimComplianceCM06(mbim_test_base.MbimTestBase):
-    """
-    CM_06 Validation of |status_codes| in modem's response to MBIM_COMMAND_MSG.
-
-    This test verifies that the function returns MBIM_STATUS_SUCCESS in Status
-    field of MBIM_COMMAND_DONE response in case of a successfully executed
-    command.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 39
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-    """
-    version = 1
-
-    def run_internal(self):
-        """ Run CM_06 test. """
-        # Precondition
-        descriptors = get_descriptors_sequence.GetDescriptorsSequence(
-                self.device_context).run()
-        self.device_context.update_descriptor_cache(descriptors)
-        mbim_open_generic_sequence.MBIMOpenGenericSequence(
-                self.device_context).run()
-
-        # Step 1
-        _, response_message = (
-                mbim_cid_device_caps_sequence.MBIMCIDDeviceCapsSequence(
-                        self.device_context).run())
-        if response_message.status_codes != mbim_constants.MBIM_STATUS_SUCCESS:
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:9.4.5#1')
diff --git a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM07.py b/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM07.py
deleted file mode 100644
index adf37cd..0000000
--- a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM07.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_channel
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_message_request
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_message_response
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import get_descriptors_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import mbim_open_generic_sequence
-
-
-class cellular_MbimComplianceCM07(mbim_test_base.MbimTestBase):
-    """
-    CM_07 Validation of status in case of an unsupported CID in MBIM_COMMAND_MSG.
-
-    This test verifies that the function returns MBIM_STATUS_NO_DEVICE_SUPPORT
-    in Status field of the MBIM_COMMAND_DONE response when a command is not
-    supported by the function.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 40
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-    """
-    version = 1
-
-    def run_internal(self):
-        """ Run CM_07 test. """
-        # Precondition
-        descriptors = get_descriptors_sequence.GetDescriptorsSequence(
-                self.device_context).run()
-        self.device_context.update_descriptor_cache(descriptors)
-        mbim_open_generic_sequence.MBIMOpenGenericSequence(
-                self.device_context).run()
-
-        # Step 1
-        # 255 is an unsupported CID.
-        device_context = self.device_context
-        descriptor_cache = device_context.descriptor_cache
-        command_message = mbim_message_request.MBIMCommand(
-                device_service_id=mbim_constants.UUID_BASIC_CONNECT.bytes,
-                cid=255,
-                command_type=mbim_constants.COMMAND_TYPE_QUERY,
-                information_buffer_length=0)
-        packets = mbim_message_request.generate_request_packets(
-                command_message,
-                device_context.max_control_transfer_size)
-        channel = mbim_channel.MBIMChannel(
-                device_context._device,
-                descriptor_cache.mbim_communication_interface.bInterfaceNumber,
-                descriptor_cache.interrupt_endpoint.bEndpointAddress,
-                device_context.max_control_transfer_size)
-        response_packets = channel.bidirectional_transaction(*packets)
-        channel.close()
-
-        # Step 2
-        response_message = mbim_message_response.parse_response_packets(
-                response_packets)
-
-        # Step 3
-        if (response_message.message_type != mbim_constants.MBIM_COMMAND_DONE or
-            (response_message.status_codes !=
-             mbim_constants.MBIM_STATUS_NO_DEVICE_SUPPORT)):
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:9.4.5#2')
diff --git a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM08.py b/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM08.py
deleted file mode 100644
index f1a1421..0000000
--- a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM08.py
+++ /dev/null
@@ -1,78 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import array
-import struct
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_channel
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_message_request
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_message_response
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import get_descriptors_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import mbim_open_generic_sequence
-
-
-class cellular_MbimComplianceCM08(mbim_test_base.MbimTestBase):
-    """
-    CM_08 Validation of InformationBuffer in case of a failure in
-    MBIM_COMMAND_MSG.
-
-    This test verifies that in case of a command failure the buffer in the
-    MBIM_COMMAND_DONE response is empty.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 40
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-    """
-    version = 1
-
-    def run_internal(self):
-        """ Run CM_08 test. """
-        # Precondition
-        descriptors = get_descriptors_sequence.GetDescriptorsSequence(
-                self.device_context).run()
-        self.device_context.update_descriptor_cache(descriptors)
-        mbim_open_generic_sequence.MBIMOpenGenericSequence(
-                self.device_context).run()
-
-        # Step 1
-        device_context = self.device_context
-        descriptor_cache = device_context.descriptor_cache
-        command_message = mbim_message_request.MBIMCommand(
-                device_service_id=mbim_constants.UUID_BASIC_CONNECT.bytes,
-                cid=mbim_constants.MBIM_CID_RADIO_STATE,
-                command_type=mbim_constants.COMMAND_TYPE_SET,
-                information_buffer_length=4,
-                payload_buffer=array.array('B', struct.pack('I', 2)))
-        packets = mbim_message_request.generate_request_packets(
-                command_message,
-                device_context.max_control_transfer_size)
-        channel = mbim_channel.MBIMChannel(
-                device_context._device,
-                descriptor_cache.mbim_communication_interface.bInterfaceNumber,
-                descriptor_cache.interrupt_endpoint.bEndpointAddress,
-                device_context.max_control_transfer_size)
-        response_packets = channel.bidirectional_transaction(*packets)
-        channel.close()
-
-        # Step 2
-        response_message = mbim_message_response.parse_response_packets(
-                response_packets)
-
-        # Step 3
-        if ((response_message.message_type !=
-             mbim_constants.MBIM_COMMAND_DONE) or
-            (response_message.status_codes ==
-             mbim_constants.MBIM_STATUS_SUCCESS) or
-            response_message.information_buffer_length != 0):
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:9.4.5#3')
diff --git a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM09.py b/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM09.py
deleted file mode 100644
index 2863852..0000000
--- a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM09.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import connect_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import get_descriptors_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import mbim_open_generic_sequence
-
-
-class cellular_MbimComplianceCM09(mbim_test_base.MbimTestBase):
-    """
-    CM_09 Validation of TransactionId for notifications received after connect
-    sequence.
-
-    This test verifies that TransactionId for notifications is zero.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 41
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-
-    """
-    version = 1
-
-    def run_internal(self):
-        """ Run CM_09 test. """
-        # Precondition
-        descriptors = get_descriptors_sequence.GetDescriptorsSequence(
-                self.device_context).run()
-        self.device_context.update_descriptor_cache(descriptors)
-        mbim_open_generic_sequence.MBIMOpenGenericSequence(
-                self.device_context).run()
-
-        # Step 1
-        _, _, notifications = (
-                connect_sequence.ConnectSequence(self.device_context).run())
-
-        # Step 2
-        for notification in notifications:
-            if notification.transaction_id != 0:
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'mbim1.0:9.1#1')
diff --git a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM10.py b/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM10.py
deleted file mode 100644
index 51fb5b8..0000000
--- a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM10.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import get_descriptors_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import mbim_close_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import mbim_open_generic_sequence
-
-
-class cellular_MbimComplianceCM10(mbim_test_base.MbimTestBase):
-    """
-    CM_10 Validation of Modem's Response to MBIM_CLOSE_MSG.
-
-    This test verifies that an MBIM_CLOSE_DONE message is issued by the
-    function in response to an MBIM_CLOSE_MSG message and checks TransactionId
-    and Status fields.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 41
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-
-    """
-    version = 1
-
-    def run_internal(self):
-        """ Run CM_10 test. """
-        # Precondition
-        descriptors = get_descriptors_sequence.GetDescriptorsSequence(
-                self.device_context).run()
-        self.device_context.update_descriptor_cache(descriptors)
-        mbim_open_generic_sequence.MBIMOpenGenericSequence(
-                self.device_context).run()
-
-        # Step 1
-        close_message, response_message = mbim_close_sequence.MBIMCloseSequence(
-                self.device_context).run()
diff --git a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM13.py b/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM13.py
deleted file mode 100644
index 434d7d3..0000000
--- a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM13.py
+++ /dev/null
@@ -1,77 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_channel
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_command_message
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_message_request
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_message_response
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import connect_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import get_descriptors_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import mbim_close_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import mbim_open_generic_sequence
-
-
-class cellular_MbimComplianceCM13(mbim_test_base.MbimTestBase):
-    """
-    CM_13 Validation of active context termination on function's closing.
-
-    This test verifies that no any active context exists after closing of the
-    function.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 42
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-
-    """
-    version = 1
-
-    def run_internal(self):
-        """ Run CM_13 test. """
-        # Precondition
-        descriptors = get_descriptors_sequence.GetDescriptorsSequence(
-                self.device_context).run()
-        self.device_context.update_descriptor_cache(descriptors)
-        mbim_open_generic_sequence.MBIMOpenGenericSequence(
-                self.device_context).run()
-        connect_sequence.ConnectSequence(self.device_context).run()
-        mbim_close_sequence.MBIMCloseSequence(self.device_context).run()
-        mbim_open_generic_sequence.MBIMOpenGenericSequence(
-                self.device_context).run()
-
-        # Step 1
-        device_context = self.device_context
-        descriptor_cache = device_context.descriptor_cache
-        command_message = mbim_command_message.MBIMConnectQuery(session_id=0)
-        packets = mbim_message_request.generate_request_packets(
-                command_message,
-                device_context.max_control_transfer_size)
-        channel = mbim_channel.MBIMChannel(
-                device_context._device,
-                descriptor_cache.mbim_communication_interface.bInterfaceNumber,
-                descriptor_cache.interrupt_endpoint.bEndpointAddress,
-                device_context.max_control_transfer_size)
-        response_packets = channel.bidirectional_transaction(*packets)
-        channel.close()
-
-        # Step 2
-        response_message = mbim_message_response.parse_response_packets(
-                response_packets)
-
-        # Step 3
-        if (response_message.status_codes !=
-            mbim_constants.MBIM_STATUS_CONTEXT_NOT_ACTIVATED):
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:9.3.2#3')
diff --git a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM14.py b/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM14.py
deleted file mode 100644
index e1ed35c..0000000
--- a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM14.py
+++ /dev/null
@@ -1,54 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import connect_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import get_descriptors_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import mbim_close_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import mbim_open_generic_sequence
-
-
-class cellular_MbimComplianceCM14(mbim_test_base.MbimTestBase):
-    """
-    CM_14 Validation of not sending data payload in error messages.
-
-    This test verifies that an MBIM_FUNCTION_ERROR_MSG does contain a data
-    payload.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 43
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-
-    """
-    version = 1
-
-    def run_internal(self):
-        """ Run CM_14 test. """
-        # Precondition
-        descriptors = get_descriptors_sequence.GetDescriptorsSequence(
-                self.device_context).run()
-        self.device_context.update_descriptor_cache(descriptors)
-        mbim_open_generic_sequence.MBIMOpenGenericSequence(
-                self.device_context).run()
-        mbim_close_sequence.MBIMCloseSequence(self.device_context).run()
-
-        # Step 1
-        _, response_message, _ = (
-                connect_sequence.ConnectSequence(self.device_context).run(
-                        raise_exception_on_failure=False))
-
-        # Step 2
-        if ((response_message.message_type !=
-             mbim_constants.MBIM_FUNCTION_ERROR_MSG) or
-            (response_message.message_length != 16)):
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:9.3.4#2')
diff --git a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM15.py b/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM15.py
deleted file mode 100644
index f0af3b3..0000000
--- a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM15.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_command_message
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import get_descriptors_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import mbim_cid_device_caps_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import mbim_open_generic_sequence
-
-
-class cellular_MbimComplianceCM15(mbim_test_base.MbimTestBase):
-    """
-    CM_15 Validation of message fragmentation ability.
-
-    This test verifies that the function follows the rules of control message
-    fragmentation.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 43
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-
-    """
-    version = 1
-
-    def run_internal(self):
-        """ Run CM_15 test. """
-        # Precondition
-        descriptors = get_descriptors_sequence.GetDescriptorsSequence(
-                self.device_context).run()
-        self.device_context.update_descriptor_cache(descriptors)
-        open_sequence = mbim_open_generic_sequence.MBIMOpenGenericSequence(
-                self.device_context)
-        open_sequence.run(max_control_transfer_size=64)
-
-        # Step 1
-        caps_sequence = mbim_cid_device_caps_sequence.MBIMCIDDeviceCapsSequence(
-                self.device_context)
-        _, response_message = caps_sequence.run()
-        if not isinstance(response_message,
-                          mbim_command_message.MBIMDeviceCapsInfo):
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:9.2')
diff --git a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM16.py b/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM16.py
deleted file mode 100644
index 707804a..0000000
--- a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceCM16.py
+++ /dev/null
@@ -1,143 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-import common
-from autotest_lib.client.bin import utils
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_channel
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_command_message
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_message_request
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_message_response
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import get_descriptors_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import mbim_open_generic_sequence
-
-
-class cellular_MbimComplianceCM16(mbim_test_base.MbimTestBase):
-    """
-    CM_16 Validation of fragmented message transmission in case of multiple
-    fragmented messages.
-
-    This test verifies that fragmented messages sent from the function are not
-    intermixed. Note that this test is only applicable for devices that support
-    multiple outstanding commands.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 44
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-    """
-    version = 1
-
-    def run_internal(self):
-        """ Run CM_16 test. """
-        # Precondition
-        desc_sequence = get_descriptors_sequence.GetDescriptorsSequence(
-                self.device_context)
-        descriptors = desc_sequence.run()
-        self.device_context.update_descriptor_cache(descriptors)
-        open_sequence = mbim_open_generic_sequence.MBIMOpenGenericSequence(
-                self.device_context)
-        open_sequence.run(max_control_transfer_size=64)
-
-        device_context = self.device_context
-        descriptor_cache = device_context.descriptor_cache
-        self.channel = mbim_channel.MBIMChannel(
-                device_context.device,
-                descriptor_cache.mbim_communication_interface.bInterfaceNumber,
-                descriptor_cache.interrupt_endpoint.bEndpointAddress,
-                device_context.max_control_transfer_size)
-
-        # Step 1
-        caps_command_message = mbim_command_message.MBIMDeviceCapsQuery()
-        caps_packets = mbim_message_request.generate_request_packets(
-                caps_command_message,
-                device_context.max_control_transfer_size)
-        self.caps_transaction_id = caps_command_message.transaction_id
-
-        # Step 2
-        services_command_message = (
-                mbim_command_message.MBIMDeviceServicesQuery())
-        services_packets = mbim_message_request.generate_request_packets(
-                services_command_message,
-                device_context.max_control_transfer_size)
-        self.services_transaction_id = services_command_message.transaction_id
-
-        # Transmit the messages now
-        self.channel.unidirectional_transaction(*caps_packets)
-        self.channel.unidirectional_transaction(*services_packets)
-
-        # Step 3
-        utils.poll_for_condition(
-                self._get_response_packets,
-                timeout=5,
-                exception=mbim_errors.MBIMComplianceChannelError(
-                        'Failed to retrieve the response packets to specific '
-                        'control messages.'))
-        self.channel.close()
-
-        caps_response_message = self.caps_response
-        services_response_message = self.services_response
-        is_caps_message_valid = isinstance(
-                caps_response_message,
-                mbim_command_message.MBIMDeviceCapsInfo)
-        is_services_message_valid = isinstance(
-                services_response_message,
-                mbim_command_message.MBIMDeviceServicesInfo)
-        if not ((is_caps_message_valid and is_services_message_valid) and
-                (caps_response_message.transaction_id ==
-                 caps_command_message.transaction_id) and
-                (caps_response_message.device_service_id ==
-                 caps_command_message.device_service_id) and
-                caps_response_message.cid == caps_command_message.cid and
-                (services_command_message.transaction_id ==
-                 services_response_message.transaction_id) and
-                (services_command_message.device_service_id ==
-                 services_response_message.device_service_id) and
-                services_command_message.cid == services_response_message.cid):
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:9.5#1')
-
-
-    def _get_response_packets(self):
-        """
-        Condition method for |poll_for_condition| to check the retrieval of
-        target packets.
-
-        @returns True if both caps response packet and services response packet
-                are received, False otherwise.
-
-        """
-        try:
-            packets = self.channel.get_outstanding_packets()
-        except mbim_errors.MBIMComplianceChannelError:
-            logging.debug("Error in receiving response fragments from the device")
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:9.5#1')
-        self.caps_response = None
-        self.services_response = None
-        for packet in packets:
-            try:
-                message_response = mbim_message_response.parse_response_packets(
-                        packet)
-            except mbim_errors.MBIMComplianceControlMessageError:
-                logging.debug("Error in parsing response fragments from the device")
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'mbim1.0:9.5#1')
-            if message_response.transaction_id == self.caps_transaction_id:
-                self.caps_response = message_response
-            elif (message_response.transaction_id ==
-                  self.services_transaction_id):
-                self.services_response = message_response
-            if self.caps_response and self.services_response:
-                return True
-        return False
diff --git a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceControlRequest.py b/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceControlRequest.py
deleted file mode 100644
index 1fe44bc..0000000
--- a/client/site_tests/cellular_MbimComplianceControlRequest/cellular_MbimComplianceControlRequest.py
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_test_runner
-
-class cellular_MbimComplianceControlRequest(mbim_test_runner.MbimTestRunner):
-    """
-    Main test runner for all the tests within this directory. This just a
-    harness for invoking various tests within this directory which is not
-    currently supported by Autotest.
-
-    """
-    _TEST_AREA_FOLDER = os.path.dirname(__file__)
diff --git a/client/site_tests/cellular_MbimComplianceControlRequest/control.CM01 b/client/site_tests/cellular_MbimComplianceControlRequest/control.CM01
deleted file mode 100644
index b672d83..0000000
--- a/client/site_tests/cellular_MbimComplianceControlRequest/control.CM01
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceControlRequest.CM01"
-PURPOSE = """
-MBIM Compliance Test: Validation of |transaction_id| and |status_codes| in
-modem's response to MBIM_OPEN_MSG.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-CM_01 Test listed in the MBIM Compliance Test Suite specification.
-Validation of |transaction_id| and |status_codes| in modem's response to
-MBIM_OPEN_MSG.
-"""
-
-job.run_test('cellular_MbimComplianceControlRequest',
-             subtest_name='cellular_MbimComplianceCM01')
diff --git a/client/site_tests/cellular_MbimComplianceControlRequest/control.CM02 b/client/site_tests/cellular_MbimComplianceControlRequest/control.CM02
deleted file mode 100644
index d45601f..0000000
--- a/client/site_tests/cellular_MbimComplianceControlRequest/control.CM02
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceControlRequest.CM02"
-PURPOSE = """
-MBIM Compliance Test: Validation of Message Length of the response to
-MBIM_OPEN_MSG.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-CM_02 Test listed in the MBIM Compliance Test Suite specification.
-Validation of Message Length of the response to MBIM_OPEN_MSG.
-"""
-
-job.run_test('cellular_MbimComplianceControlRequest',
-             subtest_name='cellular_MbimComplianceCM02')
diff --git a/client/site_tests/cellular_MbimComplianceControlRequest/control.CM03 b/client/site_tests/cellular_MbimComplianceControlRequest/control.CM03
deleted file mode 100644
index f7a07f7..0000000
--- a/client/site_tests/cellular_MbimComplianceControlRequest/control.CM03
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceControlRequest.CM03"
-PURPOSE = """
-MBIM Compliance Test: Validation of function's behavior for an unsynchronized
-MBIM_OPEN_MSG.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-CM_03 Test listed in the MBIM Compliance Test Suite specification.
-Validation of function's behavior for an unsynchronized MBIM_OPEN_MSG.
-"""
-
-job.run_test('cellular_MbimComplianceControlRequest',
-             subtest_name='cellular_MbimComplianceCM03')
diff --git a/client/site_tests/cellular_MbimComplianceControlRequest/control.CM04 b/client/site_tests/cellular_MbimComplianceControlRequest/control.CM04
deleted file mode 100644
index d33ee05..0000000
--- a/client/site_tests/cellular_MbimComplianceControlRequest/control.CM04
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceControlRequest.CM04"
-PURPOSE = """
-MBIM Compliance Test: Validation of |transaction_id| in modem's response to
-MBIM_COMMAND_MSG.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-CM_04 Test listed in the MBIM Compliance Test Suite specification.
-Validation of |transaction_id| in modem's response to MBIM_COMMAND_MSG.
-"""
-
-job.run_test('cellular_MbimComplianceControlRequest',
-             subtest_name='cellular_MbimComplianceCM04')
diff --git a/client/site_tests/cellular_MbimComplianceControlRequest/control.CM05 b/client/site_tests/cellular_MbimComplianceControlRequest/control.CM05
deleted file mode 100644
index 519d5e2..0000000
--- a/client/site_tests/cellular_MbimComplianceControlRequest/control.CM05
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceControlRequest.CM05"
-PURPOSE = """
-MBIM Compliance Test: Validation for modem's responses to two consecutive
-MBIM command messages are correct with regards to |transaction_id|,
-|service_id| and |cid|.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-CM_05 Test listed in the MBIM Compliance Test Suite specification.
-Validation for modem's responses to two consecutive MBIM command messages
-are correct with regards to |transaction_id|, |service_id| and |cid|.
-"""
-
-job.run_test('cellular_MbimComplianceControlRequest',
-             subtest_name='cellular_MbimComplianceCM05')
diff --git a/client/site_tests/cellular_MbimComplianceControlRequest/control.CM06 b/client/site_tests/cellular_MbimComplianceControlRequest/control.CM06
deleted file mode 100644
index 43914b1..0000000
--- a/client/site_tests/cellular_MbimComplianceControlRequest/control.CM06
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceControlRequest.CM06"
-PURPOSE = """
-MBIM Compliance Test: Validation of |status_codes| in modem's response to
-MBIM_COMMAND_MSG.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-CM_06 Test listed in the MBIM Compliance Test Suite specification.
-Validation of |status_codes| in modem's response to MBIM_COMMAND_MSG.
-"""
-
-job.run_test('cellular_MbimComplianceControlRequest',
-             subtest_name='cellular_MbimComplianceCM06')
diff --git a/client/site_tests/cellular_MbimComplianceControlRequest/control.CM07 b/client/site_tests/cellular_MbimComplianceControlRequest/control.CM07
deleted file mode 100644
index a2dc4ee..0000000
--- a/client/site_tests/cellular_MbimComplianceControlRequest/control.CM07
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceControlRequest.CM07"
-PURPOSE = """
-MBIM Compliance Test: Validation of status in case of an unsupported CID in
-MBIM_COMMAND_MSG.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-CM_07 Test listed in the MBIM Compliance Test Suite specification.
-Validation of status in case of an unsupported CID in MBIM_COMMAND_MSG.
-"""
-
-job.run_test('cellular_MbimComplianceControlRequest',
-             subtest_name='cellular_MbimComplianceCM07')
diff --git a/client/site_tests/cellular_MbimComplianceControlRequest/control.CM08 b/client/site_tests/cellular_MbimComplianceControlRequest/control.CM08
deleted file mode 100644
index f01a4ea..0000000
--- a/client/site_tests/cellular_MbimComplianceControlRequest/control.CM08
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceControlRequest.CM08"
-PURPOSE = """
-MBIM Compliance Test: Validation of InformationBuffer in case of a failure in
-MBIM_COMMAND_MSG.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-CM_08 Test listed in the MBIM Compliance Test Suite specification.
-Validation of InformationBuffer in case of a failure in MBIM_COMMAND_MSG.
-"""
-
-job.run_test('cellular_MbimComplianceControlRequest',
-             subtest_name='cellular_MbimComplianceCM08')
diff --git a/client/site_tests/cellular_MbimComplianceControlRequest/control.CM09 b/client/site_tests/cellular_MbimComplianceControlRequest/control.CM09
deleted file mode 100644
index 41a2d15..0000000
--- a/client/site_tests/cellular_MbimComplianceControlRequest/control.CM09
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2094 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceControlRequest.CM09"
-PURPOSE = """
-MBIM Compliance Test: Validation of TransactionId for notifications received
-after connect sequence.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-CM_09 Test listed in the MBIM Compliance Test Suite specification.
-MBIM Compliance Test: Validation of TransactionId for notifications received
-after connect sequence.
-"""
-
-job.run_test('cellular_MbimComplianceControlRequest',
-             subtest_name='cellular_MbimComplianceCM09')
diff --git a/client/site_tests/cellular_MbimComplianceControlRequest/control.CM10 b/client/site_tests/cellular_MbimComplianceControlRequest/control.CM10
deleted file mode 100644
index e102f2f..0000000
--- a/client/site_tests/cellular_MbimComplianceControlRequest/control.CM10
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceControlRequest.CM10"
-PURPOSE = """
-MBIM Compliance Test: Validation of modem's response to MBIM_CLOSE_MSG.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-CM_10 Test listed in the MBIM Compliance Test Suite specification.
-Validation of modem's response to MBIM_CLOSE_MSG.
-"""
-
-job.run_test('cellular_MbimComplianceControlRequest',
-             subtest_name='cellular_MbimComplianceCM10')
diff --git a/client/site_tests/cellular_MbimComplianceControlRequest/control.CM13 b/client/site_tests/cellular_MbimComplianceControlRequest/control.CM13
deleted file mode 100644
index f99c4b1..0000000
--- a/client/site_tests/cellular_MbimComplianceControlRequest/control.CM13
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceControlRequest.CM13"
-PURPOSE = """
-MBIM Compliance Test: Validation of active context termination on function's
-closing.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-CM_13 Test listed in the MBIM Compliance Test Suite specification.
-Validation of active context termination on function's closing.
-"""
-
-job.run_test('cellular_MbimComplianceControlRequest',
-             subtest_name='cellular_MbimComplianceCM13')
diff --git a/client/site_tests/cellular_MbimComplianceControlRequest/control.CM14 b/client/site_tests/cellular_MbimComplianceControlRequest/control.CM14
deleted file mode 100644
index 053d933..0000000
--- a/client/site_tests/cellular_MbimComplianceControlRequest/control.CM14
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceControlRequest.CM14"
-PURPOSE = """
-MBIM Compliance Test: Validation of not sending data payload in error messages.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-CM_14 Test listed in the MBIM Compliance Test Suite specification.
-Validation of not sending data payload in error messages.
-"""
-
-job.run_test('cellular_MbimComplianceControlRequest',
-             subtest_name='cellular_MbimComplianceCM14')
diff --git a/client/site_tests/cellular_MbimComplianceControlRequest/control.CM15 b/client/site_tests/cellular_MbimComplianceControlRequest/control.CM15
deleted file mode 100644
index 1142aba..0000000
--- a/client/site_tests/cellular_MbimComplianceControlRequest/control.CM15
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceControlRequest.CM15"
-PURPOSE = """
-MBIM Compliance Test: Validation of message fragmentation ability.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-CM_15 Test listed in the MBIM Compliance Test Suite specification.
-Validation of message fragmentation ability.
-"""
-
-job.run_test('cellular_MbimComplianceControlRequest',
-             subtest_name='cellular_MbimComplianceCM15')
diff --git a/client/site_tests/cellular_MbimComplianceControlRequest/control.CM16 b/client/site_tests/cellular_MbimComplianceControlRequest/control.CM16
deleted file mode 100644
index a1d830e..0000000
--- a/client/site_tests/cellular_MbimComplianceControlRequest/control.CM16
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceControlRequest.CM16"
-PURPOSE = """
-MBIM Compliance Test: Validation of fragmented message transmission in case
-of multiple fragmented messages.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-CM_16 Test listed in the MBIM Compliance Test Suite specification.
-Validation of fragmented message transmission in case of multiple fragmented
-messages.
-"""
-
-job.run_test('cellular_MbimComplianceControlRequest',
-             subtest_name='cellular_MbimComplianceCM16')
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS01.py b/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS01.py
deleted file mode 100644
index 72f9e59..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS01.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_dts_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import loopback_sequence
-
-
-class cellular_MbimComplianceDTS01(mbim_dts_test_base.MbimDtsTestBase):
-    """
-    Validation for alternate setting 1 of the communication interface.
-
-    This test validates data transfer operation for alternate setting 1 of the
-    Communication Interface.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 28
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-
-    """
-    version = 1
-
-    def run_internal(self):
-        """ Run DTS_01 test. """
-        # Precondition
-        _, _, _ = self.run_precondition(mbim_constants.NTB_FORMAT_16)
-
-        # Step 1
-        loopback = loopback_sequence.LoopbackSequence(self.device_context)
-        _, _, _, payload = loopback.run(ntb_format=mbim_constants.NTB_FORMAT_16)
-
-        # Step 2
-        # Let's check the first byte of the first received payload to verify
-        # that it is an IPv4 packet
-        if payload[0][0] != 0x45:
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:3.2.1#5')
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS0208.py b/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS0208.py
deleted file mode 100644
index 58d3aba..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS0208.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_data_transfer
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_dts_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import loopback_sequence
-
-
-class cellular_MbimComplianceDTS0208(mbim_dts_test_base.MbimDtsTestBase):
-    """
-    Validation of dwSignature.
-
-    This test validates 16-bit/32-bit NCM Transfer Header signature.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 28
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-
-    """
-    version = 1
-
-    def run_internal(self, ntb_format):
-        """
-        Run DTS_02/DTS_08 test.
-
-        @param ntb_format: Whether to send/receive an NTB16 or NTB32 frame.
-                Possible values: NTB_FORMAT_16, NTB_FORMAT_32 (mbim_constants)
-
-        """
-        # Precondition
-        _, _, _ = self.run_precondition(ntb_format)
-
-        # Step 1
-        loopback = loopback_sequence.LoopbackSequence(self.device_context)
-        nth, _, _, _ = loopback.run(ntb_format=ntb_format)
-
-        # Step 2
-        if ntb_format == mbim_constants.NTB_FORMAT_16:
-            if nth.signature != mbim_data_transfer.NTH_SIGNATURE_16:
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'ncm1.0:3.2.1#1')
-        else:
-            if nth.signature != mbim_data_transfer.NTH_SIGNATURE_32:
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'ncm1.0:3.2.2#1')
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS0309.py b/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS0309.py
deleted file mode 100644
index 6602247..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS0309.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_dts_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import loopback_sequence
-
-
-class cellular_MbimComplianceDTS0309(mbim_dts_test_base.MbimDtsTestBase):
-    """
-    Validation of wHeaderLength.
-
-    This test validates the value in wHeaderLength field of NTH16/NTH32.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 28
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-
-    """
-    version = 1
-
-    def run_internal(self, ntb_format):
-        """
-        Run DTS_03/DTS_09 test.
-
-        @param ntb_format: Whether to send/receive an NTB16 or NTB32 frame.
-                Possible values: NTB_FORMAT_16, NTB_FORMAT_32 (mbim_constants)
-
-        """
-        # Precondition
-        _, _, _ = self.run_precondition(ntb_format)
-
-        # Step 1
-        loopback = loopback_sequence.LoopbackSequence(self.device_context)
-        nth, _, _, _ = loopback.run(ntb_format=ntb_format)
-
-        # Step 2
-        if ntb_format == mbim_constants.NTB_FORMAT_16:
-            if nth.header_length != 12:
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'ncm1.0:3.2.1#2')
-        else:
-            if nth.header_length != 16:
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'ncm1.0:3.2.2#2')
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS0410.py b/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS0410.py
deleted file mode 100644
index 64ab62f..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS0410.py
+++ /dev/null
@@ -1,61 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_data_transfer
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_dts_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import loopback_sequence
-
-
-class cellular_MbimComplianceDTS0410(mbim_dts_test_base.MbimDtsTestBase):
-    """
-    Validation of wSequence after function reset.
-
-    This test verifies that function reset properly re-initializes the sequence
-    number.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 28
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-
-    """
-    version = 1
-
-    def run_internal(self, ntb_format):
-        """
-        Run DTS_04/DTS_10 test.
-
-        @param ntb_format: Whether to send/receive an NTB16 or NTB32 frame.
-                Possible values: NTB_FORMAT_16, NTB_FORMAT_32 (mbim_constants)
-
-        """
-        # Precondition
-        _, open_sequence, connect_sequence = self.run_precondition(ntb_format)
-
-        # Step 1
-        loopback = loopback_sequence.LoopbackSequence(self.device_context)
-        _, _, _, _ = loopback.run(ntb_format=ntb_format)
-
-        # Step 2
-        open_sequence.run(ntb_format=ntb_format)
-        connect_sequence.run()
-        mbim_data_transfer.MBIMNtb.reset_sequence_number()
-
-        # Step 3
-        nth, _, _, _ = loopback.run(ntb_format=ntb_format)
-
-        # Step 4
-        if ntb_format == mbim_constants.NTB_FORMAT_16:
-            if nth.sequence_number != 0:
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'ncm1.0:3.2.1#3')
-        else:
-            if nth.sequence_number != 0:
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'ncm1.0:3.2.2#3')
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS0511.py b/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS0511.py
deleted file mode 100644
index 576f071..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS0511.py
+++ /dev/null
@@ -1,54 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_dts_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import loopback_sequence
-
-
-class cellular_MbimComplianceDTS0511(mbim_dts_test_base.MbimDtsTestBase):
-    """
-    Validation of wSequence increment.
-
-    This test verifies that the expected increment happens for wSequence.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 28
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-
-    """
-    version = 1
-
-    def run_internal(self, ntb_format):
-        """
-        Run DTS_05/DTS_11 test.
-
-        @param ntb_format: Whether to send/receive an NTB16 or NTB32 frame.
-                Possible values: NTB_FORMAT_16, NTB_FORMAT_32 (mbim_constants)
-
-        """
-        # Precondition
-        _, _, _ = self.run_precondition(ntb_format)
-
-        # Step 1
-        loopback = loopback_sequence.LoopbackSequence(self.device_context)
-        nth_1, _, _, _ = loopback.run(ntb_format=ntb_format)
-
-        # Step 2
-        nth_2, _, _, _ = loopback.run(ntb_format=ntb_format)
-
-        # Step 3
-        if ntb_format == mbim_constants.NTB_FORMAT_16:
-            if nth_2.sequence_number != nth_1.sequence_number + 1:
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'ncm1.0:3.2.1#4')
-        else:
-            if nth_2.sequence_number != nth_1.sequence_number + 1:
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'ncm1.0:3.2.2#4')
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS0612.py b/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS0612.py
deleted file mode 100644
index 75a5bed..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS0612.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_dts_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import loopback_sequence
-
-
-class cellular_MbimComplianceDTS0612(mbim_dts_test_base.MbimDtsTestBase):
-    """
-    Validation of wBlockLength.
-
-    This test validates the value in wBlockLength field of NTH16/NTH32.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 28
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-
-    """
-    version = 1
-
-    def run_internal(self, ntb_format):
-        """
-        Run DTS_06/DTS_12 test.
-
-        @param ntb_format: Whether to send/receive an NTB16 or NTB32 frame.
-                Possible values: NTB_FORMAT_16, NTB_FORMAT_32 (mbim_constants)
-
-        """
-        # Precondition
-        _, _, _ = self.run_precondition(ntb_format)
-
-        # Step 1
-        loopback = loopback_sequence.LoopbackSequence(self.device_context)
-        nth, _, _, _ = loopback.run(ntb_format=ntb_format)
-
-        # Step 2
-        if ntb_format == mbim_constants.NTB_FORMAT_16:
-            if nth.block_length > self.device_context.max_in_data_transfer_size:
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'ncm1.0:3.2.1#5')
-        else:
-            if nth.block_length > self.device_context.max_in_data_transfer_size:
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'ncm1.0:3.2.2#5')
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS0713.py b/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS0713.py
deleted file mode 100644
index ce6a14c..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS0713.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_dts_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import loopback_sequence
-
-
-class cellular_MbimComplianceDTS0713(mbim_dts_test_base.MbimDtsTestBase):
-    """
-    Validation of wNdpIndex.
-
-    This test validates the value in wNdpIndex field of NTH16/NTH32.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 28
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-
-    """
-    version = 1
-
-    def run_internal(self, ntb_format):
-        """
-        Run DTS_07/DTS_13 test.
-
-        @param ntb_format: Whether to send/receive an NTB16 or NTB32 frame.
-                Possible values: NTB_FORMAT_16, NTB_FORMAT_32 (mbim_constants)
-
-        """
-        # Precondition
-        _, _, _ = self.run_precondition(ntb_format)
-
-        # Step 1
-        loopback = loopback_sequence.LoopbackSequence(self.device_context)
-        nth, _, _, _ = loopback.run(ntb_format=ntb_format)
-
-        # Step 2
-        if ntb_format == mbim_constants.NTB_FORMAT_16:
-            if (nth.fp_index < 12) or (nth.fp_index % 4 != 0):
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'ncm1.0:3.2.1#6')
-        else:
-            if (nth.fp_index < 16) or (nth.fp_index % 4 != 0):
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'ncm1.0:3.2.2#6')
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS1420.py b/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS1420.py
deleted file mode 100644
index cba6538..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS1420.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_data_transfer
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_dts_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import loopback_sequence
-
-
-class cellular_MbimComplianceDTS1420(mbim_dts_test_base.MbimDtsTestBase):
-    """
-    Validation of dwSignature for IP Stream.
-
-    This test validates 16/32-bit NCM Datagram Pointer signature for IP stream.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 33
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-
-    """
-    version = 1
-
-    def run_internal(self, ntb_format):
-        """
-        Run DTS_14/DTS_20 test.
-
-        @param ntb_format: Whether to send/receive an NTB16 or NTB32 frame.
-                Possible values: NTB_FORMAT_16, NTB_FORMAT_32 (mbim_constants)
-
-        """
-        # Precondition
-        _, _, _ = self.run_precondition(ntb_format)
-
-        # Step 1
-        loopback = loopback_sequence.LoopbackSequence(self.device_context)
-        _, ndp, _, _ = loopback.run(ntb_format=ntb_format)
-
-        # Step 2
-        if ntb_format == mbim_constants.NTB_FORMAT_16:
-            if ndp.signature != mbim_data_transfer.NDP_SIGNATURE_IPS_16:
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'mbim1.0:7#1')
-        else:
-            if ndp.signature != mbim_data_transfer.NDP_SIGNATURE_IPS_32:
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'mbim1.0:7#3')
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS1521.py b/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS1521.py
deleted file mode 100644
index 8c799a9..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS1521.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_dts_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import loopback_sequence
-
-
-class cellular_MbimComplianceDTS1521(mbim_dts_test_base.MbimDtsTestBase):
-    """
-    Validation of wLength.
-
-    This test validates the value in wLength field of NDP16/32.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 33
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-
-    """
-    version = 1
-
-    def run_internal(self, ntb_format):
-        """
-        Run DTS_15/DTS_21 test.
-
-        @param ntb_format: Whether to send/receive an NTB16 or NTB32 frame.
-                Possible values: NTB_FORMAT_16, NTB_FORMAT_32 (mbim_constants)
-
-        """
-        # Precondition
-        _, _, _ = self.run_precondition(ntb_format)
-
-        # Step 1
-        loopback = loopback_sequence.LoopbackSequence(self.device_context)
-        _, ndp, _, _ = loopback.run(ntb_format=ntb_format)
-
-        # Step 2
-        if ntb_format == mbim_constants.NTB_FORMAT_16:
-            if (ndp.length < 16) or (ndp.length % 4 != 0):
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'ncm1.0:3.3.1#1')
-        else:
-            if (ndp.length < 32) or (ndp.length % 8 != 0):
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'ncm1.0:3.3.2#1')
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS1622.py b/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS1622.py
deleted file mode 100644
index 01cf242..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS1622.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_dts_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import loopback_sequence
-
-
-class cellular_MbimComplianceDTS1622(mbim_dts_test_base.MbimDtsTestBase):
-    """
-    Validation of wDatagramIndex[0].
-
-    This test validates the value in wDatagramIndex[0] field of NDP16/32.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 33
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-
-    """
-    version = 1
-
-    def run_internal(self, ntb_format):
-        """
-        Run DTS_16/DTS_22 test.
-
-        @param ntb_format: Whether to send/receive an NTB16 or NTB32 frame.
-                Possible values: NTB_FORMAT_16, NTB_FORMAT_32 (mbim_constants)
-
-        """
-        # Precondition
-        _, _, _ = self.run_precondition(ntb_format)
-
-        # Step 1
-        loopback = loopback_sequence.LoopbackSequence(self.device_context)
-        _, _, ndp_entries, _ = loopback.run(ntb_format=ntb_format)
-
-        # Step 2
-        if ntb_format == mbim_constants.NTB_FORMAT_16:
-            if ndp_entries[0].datagram_index < 12:
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'ncm1.0:3.3.1#2')
-        else:
-            if ndp_entries[0].datagram_index < 16:
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'ncm1.0:3.3.2#2')
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS1723.py b/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS1723.py
deleted file mode 100644
index bfacbbc..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS1723.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_dts_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import loopback_sequence
-
-
-class cellular_MbimComplianceDTS1723(mbim_dts_test_base.MbimDtsTestBase):
-    """
-    Validation of wDatagramLength[0].
-
-    This test validates the value in wDatagramLength[0] field of NDP16/32.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 33
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-
-    """
-    version = 1
-
-    def run_internal(self, ntb_format):
-        """
-        Run DTS_17/DTS_23 test.
-
-        @param ntb_format: Whether to send/receive an NTB16 or NTB32 frame.
-                Possible values: NTB_FORMAT_16, NTB_FORMAT_32 (mbim_constants)
-
-        """
-        # Precondition
-        _, _, _ = self.run_precondition(ntb_format)
-
-        # Step 1
-        loopback = loopback_sequence.LoopbackSequence(self.device_context)
-        _, _, ndp_entries, _ = loopback.run(ntb_format=ntb_format)
-
-        # Step 2
-        if ntb_format == mbim_constants.NTB_FORMAT_16:
-            if ndp_entries[0].datagram_length < 20:
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'ncm1.0:3.3.1#3')
-        else:
-            if ndp_entries[0].datagram_length < 20:
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'ncm1.0:3.3.2#3')
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS1824.py b/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS1824.py
deleted file mode 100644
index bd80a11..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS1824.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_dts_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import loopback_sequence
-
-
-class cellular_MbimComplianceDTS1824(mbim_dts_test_base.MbimDtsTestBase):
-    """
-    Validation of the Last wDatagramIndex.
-
-    This test validates the value in wDatagramIndex[(wLength-8)/4 - 1] field of
-    NDP16/32.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 34
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-
-    """
-    version = 1
-
-    def run_internal(self, ntb_format):
-        """
-        Run DTS_18/DTS_24 test.
-
-        @param ntb_format: Whether to send/receive an NTB16 or NTB32 frame.
-                Possible values: NTB_FORMAT_16, NTB_FORMAT_32 (mbim_constants)
-
-        """
-        # Precondition
-        _, _, _ = self.run_precondition(ntb_format)
-
-        # Step 1
-        loopback = loopback_sequence.LoopbackSequence(self.device_context)
-        _, _, ndp_entries, _ = loopback.run(ntb_format=ntb_format)
-
-        # Step 2
-        if ntb_format == mbim_constants.NTB_FORMAT_16:
-            if ndp_entries[-1].datagram_index != 0:
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'ncm1.0:3.3.1#4')
-        else:
-            if ndp_entries[-1].datagram_index != 0:
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'ncm1.0:3.3.2#4')
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS1925.py b/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS1925.py
deleted file mode 100644
index 98e9ac2..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDTS1925.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_dts_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import loopback_sequence
-
-
-class cellular_MbimComplianceDTS1925(mbim_dts_test_base.MbimDtsTestBase):
-    """
-    Validation of the Last wDatagramLength.
-
-    This test validates the value in wDatagramLength[(wLength-8)/4 - 1] field of
-    NDP16.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 34
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-
-    """
-    version = 1
-
-    def run_internal(self, ntb_format):
-        """
-        Run DTS_19/DTS_25 test.
-
-        @param ntb_format: Whether to send/receive an NTB16 or NTB32 frame.
-                Possible values: NTB_FORMAT_16, NTB_FORMAT_32 (mbim_constants)
-
-        """
-        # Precondition
-        _, _, _ = self.run_precondition(ntb_format)
-
-        # Step 1
-        loopback = loopback_sequence.LoopbackSequence(self.device_context)
-        _, _, ndp_entries, _ = loopback.run(ntb_format=ntb_format)
-
-        # Step 2
-        if ntb_format == mbim_constants.NTB_FORMAT_16:
-            if ndp_entries[-1].datagram_length != 0:
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'ncm1.0:3.3.1#5')
-        else:
-            if ndp_entries[-1].datagram_length != 0:
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'ncm1.0:3.3.2#5')
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDataTransfer.py b/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDataTransfer.py
deleted file mode 100644
index 938f8ba..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/cellular_MbimComplianceDataTransfer.py
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_test_runner
-
-class cellular_MbimComplianceDataTransfer(mbim_test_runner.MbimTestRunner):
-    """
-    Main test runner for all the tests within this directory. This just a
-    harness for invoking various tests within this directory which is not
-    currently supported by Autotest.
-
-    """
-    _TEST_AREA_FOLDER = os.path.dirname(__file__)
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS01 b/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS01
deleted file mode 100644
index ac86564..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS01
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceDataTransfer.DTS01"
-PURPOSE = """
-MBIM Compliance Test: Validation for alternate setting 1 of the
-communication interface.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-DTS_01 Test listed in the MBIM Compliance Test Suite specification.
-Validation for alternate setting 1 of the communication interface.
-
-This test sets the data interface of the modem in loopback mode
-(specified by MBIM spec) and verifies that an ICMP packet can be
-routed through the data interface in alternate setting 1 of the
-communication interface.
-"""
-
-job.run_test('cellular_MbimComplianceDataTransfer',
-             subtest_name='cellular_MbimComplianceDTS01')
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS02 b/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS02
deleted file mode 100644
index b7ea4d5..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS02
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceDataTransfer.DTS02"
-PURPOSE = """
-MBIM Compliance Test: Validation of dwSignature.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-DTS_02 Test listed in the MBIM Compliance Test Suite specification.
-Validation of dwSignature.
-
-This test verifies the MBIM NTB-16 data transfer capability of the device.
-"""
-
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-
-job.run_test('cellular_MbimComplianceDataTransfer',
-             subtest_name='cellular_MbimComplianceDTS0208',
-             ntb_format=mbim_constants.NTB_FORMAT_16)
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS03 b/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS03
deleted file mode 100644
index 9b799a0..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS03
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceDataTransfer.DTS03"
-PURPOSE = """
-MBIM Compliance Test: Validation of dwSignature.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-DTS_03 Test listed in the MBIM Compliance Test Suite specification.
-Validation of wHeaderLength.
-
-This test verifies the MBIM NTB-16 data transfer capability of the device.
-"""
-
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-
-job.run_test('cellular_MbimComplianceDataTransfer',
-             subtest_name='cellular_MbimComplianceDTS0309',
-             ntb_format=mbim_constants.NTB_FORMAT_16)
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS04 b/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS04
deleted file mode 100644
index daed982..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS04
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceDataTransfer.DTS04"
-PURPOSE = """
-MBIM Compliance Test: Validation of wSequence after function reset.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-DTS_04 Test listed in the MBIM Compliance Test Suite specification.
-Validation of wSequence after function reset.
-
-This test verifies the MBIM NTB-16 data transfer capability of the device.
-"""
-
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-
-job.run_test('cellular_MbimComplianceDataTransfer',
-             subtest_name='cellular_MbimComplianceDTS0410',
-             ntb_format=mbim_constants.NTB_FORMAT_16)
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS05 b/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS05
deleted file mode 100644
index 94af2b6..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS05
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceDataTransfer.DTS05"
-PURPOSE = """
-MBIM Compliance Test: Validation of wSequence increment.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-DTS_05 Test listed in the MBIM Compliance Test Suite specification.
-Validation of wSequence increment.
-
-This test verifies the MBIM NTB-16 data transfer capability of the device.
-"""
-
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-
-job.run_test('cellular_MbimComplianceDataTransfer',
-             subtest_name='cellular_MbimComplianceDTS0511',
-             ntb_format=mbim_constants.NTB_FORMAT_16)
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS06 b/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS06
deleted file mode 100644
index 0a73855..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS06
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceDataTransfer.DTS06"
-PURPOSE = """
-MBIM Compliance Test: Validation of wBlockLength.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-DTS_06 Test listed in the MBIM Compliance Test Suite specification.
-Validation of wBlockLength.
-
-This test verifies the MBIM NTB-16 data transfer capability of the device.
-"""
-
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-
-job.run_test('cellular_MbimComplianceDataTransfer',
-             subtest_name='cellular_MbimComplianceDTS0612',
-             ntb_format=mbim_constants.NTB_FORMAT_16)
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS07 b/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS07
deleted file mode 100644
index 7d31692..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS07
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceDataTransfer.DTS07"
-PURPOSE = """
-MBIM Compliance Test: Validation of wNdpIndex.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-DTS_07 Test listed in the MBIM Compliance Test Suite specification.
-Validation of wNdpIndex.
-
-This test verifies the MBIM NTB-16 data transfer capability of the device.
-"""
-
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-
-job.run_test('cellular_MbimComplianceDataTransfer',
-             subtest_name='cellular_MbimComplianceDTS0713',
-             ntb_format=mbim_constants.NTB_FORMAT_16)
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS08 b/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS08
deleted file mode 100644
index 5d5be30..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS08
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceDataTransfer.DTS08"
-PURPOSE = """
-MBIM Compliance Test: Validation of dwSignature.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-DTS_08 Test listed in the MBIM Compliance Test Suite specification.
-Validation of dwSignature.
-
-This test verifies the MBIM NTB-32 data transfer capability of the device.
-"""
-
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-
-job.run_test('cellular_MbimComplianceDataTransfer',
-             subtest_name='cellular_MbimComplianceDTS0208',
-             ntb_format=mbim_constants.NTB_FORMAT_32)
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS09 b/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS09
deleted file mode 100644
index a6c3e27..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS09
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceDataTransfer.DTS09"
-PURPOSE = """
-MBIM Compliance Test: Validation of dwSignature.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-DTS_09 Test listed in the MBIM Compliance Test Suite specification.
-Validation of wHeaderLength.
-
-This test verifies the MBIM NTB-32 data transfer capability of the device.
-"""
-
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-
-job.run_test('cellular_MbimComplianceDataTransfer',
-             subtest_name='cellular_MbimComplianceDTS0309',
-             ntb_format=mbim_constants.NTB_FORMAT_32)
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS10 b/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS10
deleted file mode 100644
index ba29e47..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS10
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceDataTransfer.DTS10"
-PURPOSE = """
-MBIM Compliance Test: Validation of wSequence after function reset.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-DTS_10 Test listed in the MBIM Compliance Test Suite specification.
-Validation of wSequence after function reset.
-
-This test verifies the MBIM NTB-32 data transfer capability of the device.
-"""
-
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-
-job.run_test('cellular_MbimComplianceDataTransfer',
-             subtest_name='cellular_MbimComplianceDTS0410',
-             ntb_format=mbim_constants.NTB_FORMAT_32)
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS11 b/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS11
deleted file mode 100644
index d14a554..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS11
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceDataTransfer.DTS11"
-PURPOSE = """
-MBIM Compliance Test: Validation of wSequence increment.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-DTS_11 Test listed in the MBIM Compliance Test Suite specification.
-Validation of wSequence increment.
-
-This test verifies the MBIM NTB-32 data transfer capability of the device.
-"""
-
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-
-job.run_test('cellular_MbimComplianceDataTransfer',
-             subtest_name='cellular_MbimComplianceDTS0511',
-             ntb_format=mbim_constants.NTB_FORMAT_32)
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS12 b/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS12
deleted file mode 100644
index 3905ca3..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS12
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceDataTransfer.DTS12"
-PURPOSE = """
-MBIM Compliance Test: Validation of dwBlockLength.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-DTS_12 Test listed in the MBIM Compliance Test Suite specification.
-Validation of dwBlockLength.
-
-This test verifies the MBIM NTB-32 data transfer capability of the device.
-"""
-
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-
-job.run_test('cellular_MbimComplianceDataTransfer',
-             subtest_name='cellular_MbimComplianceDTS0612',
-             ntb_format=mbim_constants.NTB_FORMAT_32)
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS13 b/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS13
deleted file mode 100644
index f907e9e..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS13
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceDataTransfer.DTS13"
-PURPOSE = """
-MBIM Compliance Test: Validation of dwNdpIndex.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-DTS_13 Test listed in the MBIM Compliance Test Suite specification.
-Validation of dwNdpIndex.
-
-This test verifies the MBIM NTB-32 data transfer capability of the device.
-"""
-
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-
-job.run_test('cellular_MbimComplianceDataTransfer',
-             subtest_name='cellular_MbimComplianceDTS0713',
-             ntb_format=mbim_constants.NTB_FORMAT_32)
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS14 b/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS14
deleted file mode 100644
index dfd76ba..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS14
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceDataTransfer.DTS14"
-PURPOSE = """
-MBIM Compliance Test: Validation of dwSignature for IP Stream.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-DTS_14 Test listed in the MBIM Compliance Test Suite specification.
-Validation of dwSignature for IP Stream.
-
-This test verifies the MBIM NTB-16 data transfer capability of the device.
-"""
-
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-
-job.run_test('cellular_MbimComplianceDataTransfer',
-             subtest_name='cellular_MbimComplianceDTS1420',
-             ntb_format=mbim_constants.NTB_FORMAT_16)
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS15 b/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS15
deleted file mode 100644
index e40d46f..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS15
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceDataTransfer.DTS15"
-PURPOSE = """
-MBIM Compliance Test: Validation of wLength.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-DTS_15 Test listed in the MBIM Compliance Test Suite specification.
-Validation of wLength.
-
-This test verifies the MBIM NTB-16 data transfer capability of the device.
-"""
-
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-
-job.run_test('cellular_MbimComplianceDataTransfer',
-             subtest_name='cellular_MbimComplianceDTS1521',
-             ntb_format=mbim_constants.NTB_FORMAT_16)
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS16 b/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS16
deleted file mode 100644
index 6f37688..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS16
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceDataTransfer.DTS16"
-PURPOSE = """
-MBIM Compliance Test: Validation of wDatagramIndex[0].
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-DTS_16 Test listed in the MBIM Compliance Test Suite specification.
-Validation of wDatagramIndex[0].
-
-This test verifies the MBIM NTB-16 data transfer capability of the device.
-"""
-
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-
-job.run_test('cellular_MbimComplianceDataTransfer',
-             subtest_name='cellular_MbimComplianceDTS1622',
-             ntb_format=mbim_constants.NTB_FORMAT_16)
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS17 b/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS17
deleted file mode 100644
index 5d85025..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS17
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceDataTransfer.DTS17"
-PURPOSE = """
-MBIM Compliance Test: Validation of wDatagramLength[0].
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-DTS_17 Test listed in the MBIM Compliance Test Suite specification.
-Validation of wDatagramLength[0].
-
-This test verifies the MBIM NTB-16 data transfer capability of the device.
-"""
-
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-
-job.run_test('cellular_MbimComplianceDataTransfer',
-             subtest_name='cellular_MbimComplianceDTS1723',
-             ntb_format=mbim_constants.NTB_FORMAT_16)
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS18 b/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS18
deleted file mode 100644
index dc2992c..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS18
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceDataTransfer.DTS18"
-PURPOSE = """
-MBIM Compliance Test: Validation of the Last wDatagramIndex.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-DTS_18 Test listed in the MBIM Compliance Test Suite specification.
-Validation of the Last wDatagramIndex.
-
-This test verifies the MBIM NTB-16 data transfer capability of the device.
-"""
-
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-
-job.run_test('cellular_MbimComplianceDataTransfer',
-             subtest_name='cellular_MbimComplianceDTS1824',
-             ntb_format=mbim_constants.NTB_FORMAT_16)
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS19 b/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS19
deleted file mode 100644
index 333e392..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS19
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceDataTransfer.DTS19"
-PURPOSE = """
-MBIM Compliance Test: Validation of the Last wDatagramLength.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-DTS_19 Test listed in the MBIM Compliance Test Suite specification.
-Validation of the Last wDatagramLength.
-
-This test verifies the MBIM NTB-16 data transfer capability of the device.
-"""
-
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-
-job.run_test('cellular_MbimComplianceDataTransfer',
-             subtest_name='cellular_MbimComplianceDTS1925',
-             ntb_format=mbim_constants.NTB_FORMAT_16)
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS20 b/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS20
deleted file mode 100644
index 0aa8651..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS20
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceDataTransfer.DTS20"
-PURPOSE = """
-MBIM Compliance Test: Validation of dwSignature for IP Stream.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-DTS_20 Test listed in the MBIM Compliance Test Suite specification.
-Validation of dwSignature for IP Stream.
-
-This test verifies the MBIM NTB-32 data transfer capability of the device.
-"""
-
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-
-job.run_test('cellular_MbimComplianceDataTransfer',
-             subtest_name='cellular_MbimComplianceDTS1420',
-             ntb_format=mbim_constants.NTB_FORMAT_32)
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS21 b/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS21
deleted file mode 100644
index b2becef..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS21
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2114 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceDataTransfer.DTS21"
-PURPOSE = """
-MBIM Compliance Test: Validation of wLength.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-DTS_21 Test listed in the MBIM Compliance Test Suite specification.
-Validation of wLength.
-
-This test verifies the MBIM NTB-32 data transfer capability of the device.
-"""
-
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-
-job.run_test('cellular_MbimComplianceDataTransfer',
-             subtest_name='cellular_MbimComplianceDTS1521',
-             ntb_format=mbim_constants.NTB_FORMAT_32)
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS22 b/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS22
deleted file mode 100644
index c8444d3..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS22
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceDataTransfer.DTS22"
-PURPOSE = """
-MBIM Compliance Test: Validation of dwDatagramIndex[0].
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-DTS_22 Test listed in the MBIM Compliance Test Suite specification.
-Validation of dwDatagramIndex[0].
-
-This test verifies the MBIM NTB-32 data transfer capability of the device.
-"""
-
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-
-job.run_test('cellular_MbimComplianceDataTransfer',
-             subtest_name='cellular_MbimComplianceDTS1622',
-             ntb_format=mbim_constants.NTB_FORMAT_32)
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS23 b/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS23
deleted file mode 100644
index 226d0ab..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS23
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceDataTransfer.DTS23"
-PURPOSE = """
-MBIM Compliance Test: Validation of dwDatagramLength[0].
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-DTS_23 Test listed in the MBIM Compliance Test Suite specification.
-Validation of dwDatagramLength[0].
-
-This test verifies the MBIM NTB-32 data transfer capability of the device.
-"""
-
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-
-job.run_test('cellular_MbimComplianceDataTransfer',
-             subtest_name='cellular_MbimComplianceDTS1723',
-             ntb_format=mbim_constants.NTB_FORMAT_32)
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS24 b/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS24
deleted file mode 100644
index cb88d0d..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS24
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2032 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceDataTransfer.DTS24"
-PURPOSE = """
-MBIM Compliance Test: Validation of the Last dwDatagramIndex.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-DTS_24 Test listed in the MBIM Compliance Test Suite specification.
-Validation of the Last dwDatagramIndex.
-
-This test verifies the MBIM NTB-32 data transfer capability of the device.
-"""
-
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-
-job.run_test('cellular_MbimComplianceDataTransfer',
-             subtest_name='cellular_MbimComplianceDTS1824',
-             ntb_format=mbim_constants.NTB_FORMAT_32)
diff --git a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS25 b/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS25
deleted file mode 100644
index e47a470..0000000
--- a/client/site_tests/cellular_MbimComplianceDataTransfer/control.DTS25
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceDataTransfer.DTS25"
-PURPOSE = """
-MBIM Compliance Test: Validation of the Last dwDatagramLength.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-DTS_25 Test listed in the MBIM Compliance Test Suite specification.
-Validation of the Last dwDatagramLength.
-
-This test verifies the MBIM NTB-32 data transfer capability of the device.
-"""
-
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-
-job.run_test('cellular_MbimComplianceDataTransfer',
-             subtest_name='cellular_MbimComplianceDTS1925',
-             ntb_format=mbim_constants.NTB_FORMAT_32)
diff --git a/client/site_tests/cellular_MbimComplianceDescriptor/cellular_MbimComplianceDES01.py b/client/site_tests/cellular_MbimComplianceDescriptor/cellular_MbimComplianceDES01.py
deleted file mode 100644
index 9f52f9b..0000000
--- a/client/site_tests/cellular_MbimComplianceDescriptor/cellular_MbimComplianceDES01.py
+++ /dev/null
@@ -1,214 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import get_descriptors_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance import usb_descriptors
-
-
-class cellular_MbimComplianceDES01(mbim_test_base.MbimTestBase):
-    """
-    DES_01 Descriptors Validation for NCM/MBIM Functions
-
-    This test validates descriptors for the combination NCM/MBIM functions.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 23
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-
-    """
-    version = 1
-
-    def run_internal(self):
-        """ Run the DES_01 test. """
-        # Precondition.
-        descriptors = get_descriptors_sequence.GetDescriptorsSequence(
-                self.device_context).run()
-
-        # Test step 1
-        # Get ncm communication interface and mbim communication interface.
-        interfaces = usb_descriptors.filter_descriptors(
-                usb_descriptors.InterfaceDescriptor, descriptors)
-
-        ncm_communication_interfaces = (
-                usb_descriptors.filter_interface_descriptors(
-                        interfaces,
-                        usb_descriptors.NCM_MBIM_COMMUNICATION_INTERFACE_NCM))
-
-        mbim_communication_interfaces = (
-                usb_descriptors.filter_interface_descriptors(
-                        interfaces,
-                        usb_descriptors.NCM_MBIM_COMMUNICATION_INTERFACE_MBIM))
-
-        # If we don't find both NCM and MBIM interfaces, then we should bail
-        # out of this test
-        if (not ncm_communication_interfaces or
-            not mbim_communication_interfaces):
-            return
-
-        if len(ncm_communication_interfaces) != 1:
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:3.2.1#2')
-        ncm_communication_interface = ncm_communication_interfaces[0]
-
-        if len(mbim_communication_interfaces) != 1:
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:3.2.1#3')
-        mbim_communication_interface = mbim_communication_interfaces[0]
-
-        if (ncm_communication_interface.bInterfaceNumber !=
-            mbim_communication_interface.bInterfaceNumber):
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:3.2.1#1')
-
-        # Test step 2
-        if (ncm_communication_interface.index >
-            mbim_communication_interface.index):
-            mbim_errors.log_and_raise(
-                    mbim_errors.MBIMComplianceGenericAssertionError,
-                    'Alternate setting 1 of the interface must appear after'
-                    'alternate setting 0 of the interface.')
-
-        # Test step 3
-        # Get header functional descriptor, union functinoal descriptor,
-        # MBIM functinoal descriptor and MBIM extended functional
-        # descriptor from |ncm_communication_interface|[0].
-        ncm_communication_interface_bundle = (
-                usb_descriptors.get_descriptor_bundle(
-                        descriptors, ncm_communication_interface))
-
-        header_descriptors = usb_descriptors.filter_descriptors(
-                usb_descriptors.HeaderFunctionalDescriptor,
-                ncm_communication_interface_bundle)
-        union_descriptors = usb_descriptors.filter_descriptors(
-                usb_descriptors.UnionFunctionalDescriptor,
-                ncm_communication_interface_bundle)
-        mbim_descriptors = usb_descriptors.filter_descriptors(
-                usb_descriptors.MBIMFunctionalDescriptor,
-                ncm_communication_interface_bundle)
-        mbim_extended_descriptors = usb_descriptors.filter_descriptors(
-                usb_descriptors.MBIMExtendedFunctionalDescriptor,
-                ncm_communication_interface_bundle)
-        if not(header_descriptors and union_descriptors and mbim_descriptors):
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:6.3#2')
-
-        # Test step 4
-        # Check header funcional descriptor.
-        if usb_descriptors.has_distinct_descriptors(header_descriptors):
-            mbim_errors.log_and_raise(
-                    mbim_errors.MBIMComplianceGenericAssertionError,
-                    'Expected 1 unique header functional descriptor.')
-        header_descriptor = header_descriptors[0]
-        if not(header_descriptor.bDescriptorType == 0x24 and
-               header_descriptor.bDescriptorSubtype == 0x00 and
-               header_descriptor.bLength == 5 and
-               header_descriptor.bcdCDC >= 0x0120):
-            mbim_errors.log_and_raise(
-                    mbim_errors.MBIMComplianceGenericAssertionError,
-                    'Header functional descriptor: wrong value(s)')
-
-        # Test step 5
-        # Check union functional descriptor.
-        if usb_descriptors.has_distinct_descriptors(union_descriptors):
-            mbim_errors.log_and_raise(
-                    mbim_errors.MBIMComplianceGenericAssertionError,
-                    'Expected 1 unique union functional descriptor.')
-
-        union_descriptor = union_descriptors[0]
-        if union_descriptor.index < header_descriptor.index:
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:6.3#3')
-        # Get no data data interface.
-        no_data_data_interfaces = usb_descriptors.filter_interface_descriptors(
-                interfaces, usb_descriptors.NCM_MBIM_DATA_INTERFACE_NO_DATA)
-
-        if len(no_data_data_interfaces) != 1:
-            mbim_errors.log_and_raise(
-                    mbim_errors.MBIMComplianceAssertionError,
-                    'mbim1.0:3.2.2.4#2')
-
-        no_data_data_interface = no_data_data_interfaces[0]
-        no_data_data_interface_bundle = usb_descriptors.get_descriptor_bundle(
-                descriptors, no_data_data_interface)
-        endpoint_descriptors = (
-                usb_descriptors.filter_descriptors(
-                        usb_descriptors.EndpointDescriptor,
-                        no_data_data_interface_bundle))
-
-        if endpoint_descriptors:
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:3.2.2.4#4')
-
-        # Get NCM data interface.
-        ncm_data_interfaces = (
-                usb_descriptors.filter_interface_descriptors(
-                        interfaces,
-                        usb_descriptors.NCM_MBIM_DATA_INTERFACE_NCM))
-
-        if len(ncm_data_interfaces) != 1:
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:3.2.2.4#2')
-        ncm_data_interface = ncm_data_interfaces[0]
-        if ncm_data_interface.bNumEndpoints != 2:
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:3.2.2.4#4')
-        ncm_data_interface_bundle = (
-                usb_descriptors.get_descriptor_bundle(descriptors,
-                                                      ncm_data_interface))
-        endpoint_descriptors = (
-                usb_descriptors.filter_descriptors(
-                        usb_descriptors.EndpointDescriptor,
-                        ncm_data_interface_bundle))
-        # Check endpoint descriptors in |ncm_data_interface_bundle|
-        # There should be one bulk OUT and one bulk IN.
-        if not usb_descriptors.has_bulk_in_and_bulk_out(endpoint_descriptors):
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:3.2.2.4#4')
-
-        # Get MBIM data interface.
-        mbim_data_interfaces = usb_descriptors.filter_interface_descriptors(
-                interfaces, usb_descriptors.NCM_MBIM_DATA_INTERFACE_MBIM)
-
-        if len(mbim_data_interfaces) != 1:
-           mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                     'mbim1.0:3.2.2.4#3')
-        mbim_data_interface = mbim_data_interfaces[0]
-        if mbim_data_interface.bNumEndpoints != 2:
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:3.2.2.4#4')
-
-        mbim_data_interface_bundle = (
-                usb_descriptors.get_descriptor_bundle(descriptors,
-                                                      mbim_data_interface))
-        endpoint_descriptors = (
-                usb_descriptors.filter_descriptors(
-                        usb_descriptors.EndpointDescriptor,
-                        mbim_data_interface_bundle))
-        # Check endpoint descriptors in |mbim_data_interface_bundle|
-        # alternate setting 2. There should be one bulk OUT and one bulk IN.
-        if not usb_descriptors.has_bulk_in_and_bulk_out(endpoint_descriptors):
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:3.2.2.4#4')
-
-        if not(no_data_data_interface.bInterfaceNumber ==
-               ncm_data_interface.bInterfaceNumber ==
-               mbim_data_interface.bInterfaceNumber):
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:3.2.2.4#1')
-
-        if not(union_descriptor.bLength == 5 and
-               union_descriptor.bControlInterface == (
-                       ncm_communication_interface.bInterfaceNumber) and
-               union_descriptors.bSubordinateInterface0 == (
-                       no_data_data_interface.bInterfaceNumber)):
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:6.3#4')
-        # TODO(mcchou): Continue the remaining test steps.
-
-    # End of run_internal()
diff --git a/client/site_tests/cellular_MbimComplianceDescriptor/cellular_MbimComplianceDES02.py b/client/site_tests/cellular_MbimComplianceDescriptor/cellular_MbimComplianceDES02.py
deleted file mode 100644
index 944fbf4..0000000
--- a/client/site_tests/cellular_MbimComplianceDescriptor/cellular_MbimComplianceDES02.py
+++ /dev/null
@@ -1,337 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import get_descriptors_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance \
-        import mbim_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance import usb_descriptors
-
-
-class cellular_MbimComplianceDES02(mbim_test_base.MbimTestBase):
-    """
-    DES_02 Descriptors Validation for MBIM Only Functions
-
-    This test validates descriptors for MBIM only functions.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 26
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-
-    """
-    version = 1
-
-    def run_internal(self):
-        """ Run the DES_02 test. """
-        # Precondition.
-        descriptors = get_descriptors_sequence.GetDescriptorsSequence(
-                self.device_context).run()
-
-        # Test step 1
-        # Get MBIM communication interface.
-        interfaces = usb_descriptors.filter_descriptors(
-                usb_descriptors.InterfaceDescriptor, descriptors)
-
-        mbim_communication_interfaces = (
-                usb_descriptors.filter_interface_descriptors(
-                        interfaces,
-                        usb_descriptors.MBIM_ONLY_COMMUNICATION_INTERFACE))
-
-        if not mbim_communication_interfaces:
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:6.3#1')
-
-        if len(mbim_communication_interfaces) > 1:
-            mbim_errors.log_and_raise(
-                    mbim_errors.MBIMComplianceGenericAssertionError,
-                    'Expected 1 mbim communication interface, got %d.' % (
-                            len(mbim_communication_interfaces)))
-        mbim_communication_interface = mbim_communication_interfaces[0]
-
-        # Test step 2
-        # Get header functional descriptor, union functional descriptor,
-        # MBIM functional descriptor and MBIM extended functional
-        # descriptor.
-        mbim_communication_interface_bundle = (
-                usb_descriptors.get_descriptor_bundle(
-                        descriptors, mbim_communication_interface))
-
-        header_descriptors = usb_descriptors.filter_descriptors(
-                usb_descriptors.HeaderFunctionalDescriptor,
-                mbim_communication_interface_bundle)
-        union_descriptors = usb_descriptors.filter_descriptors(
-                usb_descriptors.UnionFunctionalDescriptor,
-                mbim_communication_interface_bundle)
-        mbim_descriptors = usb_descriptors.filter_descriptors(
-                usb_descriptors.MBIMFunctionalDescriptor,
-                mbim_communication_interface_bundle)
-        mbim_extended_descriptors = usb_descriptors.filter_descriptors(
-                usb_descriptors.MBIMExtendedFunctionalDescriptor,
-                mbim_communication_interface_bundle)
-        if not(header_descriptors and union_descriptors and mbim_descriptors):
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:6.3#2')
-
-        # Test step 3
-        # Check header functional descriptor.
-        if usb_descriptors.has_distinct_descriptors(header_descriptors):
-            mbim_errors.log_and_raise(
-                    mbim_errors.MBIMComplianceGenericAssertionError,
-                    'Expected 1 unique header functional descriptor.')
-        header_descriptor = header_descriptors[0]
-        if not(header_descriptor.bDescriptorType == 0x24 and
-               header_descriptor.bDescriptorSubtype == 0x00 and
-               header_descriptor.bLength == 5 and
-               header_descriptor.bcdCDC >= 0x0120):
-            mbim_errors.log_and_raise(
-                mbim_errors.MBIMComplianceGenericAssertionError,
-                'Header functional descriptor: wrong value(s)')
-
-        # Test step 4
-        # Check union functional descriptor.
-        if usb_descriptors.has_distinct_descriptors(union_descriptors):
-            mbim_errors.log_and_raise(
-                    mbim_errors.MBIMComplianceGenerisAssertionError,
-                    'Expected 1 unique union functional descriptor.')
-        union_descriptor = union_descriptors[0]
-        if union_descriptor.index < header_descriptor.index:
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:6.3#3')
-
-        # Get CDC no data data interface.
-        no_data_data_interfaces = usb_descriptors.filter_interface_descriptors(
-                interfaces, usb_descriptors.MBIM_ONLY_DATA_INTERFACE_NO_DATA)
-        if not no_data_data_interfaces:
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:6.6#4')
-        if len(no_data_data_interfaces) > 1:
-            mbim_errors.log_and_raise(
-                    mbim_errors.MBIMComplianceGenericAssertionError,
-                    'Exactly 1 CDC data interface, got %d.' % (
-                            len(no_data_data_interfaces)))
-        no_data_data_interface = no_data_data_interfaces[0]
-        no_data_data_interface_bundle = usb_descriptors.get_descriptor_bundle(
-                descriptors, no_data_data_interface)
-        data_endpoint_descriptors = (
-                usb_descriptors.filter_descriptors(
-                        usb_descriptors.EndpointDescriptor,
-                        no_data_data_interface_bundle))
-        if data_endpoint_descriptors:
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:6.6#2')
-
-        # Get MBIM data interface.
-        mbim_data_interfaces = usb_descriptors.filter_interface_descriptors(
-                interfaces, usb_descriptors.MBIM_ONLY_DATA_INTERFACE_MBIM)
-        if not mbim_data_interfaces:
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:6.6#4')
-        if len(mbim_data_interfaces) > 1:
-            mbim_errors.log_and_raise(
-                    mbim_errors.MBIMComplianceGenericAssertionError,
-                    'Expected 1 MBIM data interface, got %d.' % (
-                            len(mbim_data_interfaces)))
-        mbim_data_interface = mbim_data_interfaces[0]
-
-        # Check if there are two endpoint descriptors.
-        if mbim_data_interface.bNumEndpoints != 2:
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:6.6#3.')
-
-        mbim_data_interface_bundle = usb_descriptors.get_descriptor_bundle(
-                descriptors, mbim_data_interface)
-        data_endpoint_descriptors = usb_descriptors.filter_descriptors(
-                usb_descriptors.EndpointDescriptor,
-                mbim_data_interface_bundle)
-
-        # Check the values of fields in endpoint descriptors.
-        # There should be one bulk OUT and one bulk IN.
-        if not usb_descriptors.has_bulk_in_and_bulk_out(
-                data_endpoint_descriptors):
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:6.6#3')
-
-        # MBIM cdc data interface should have both no data data interface and
-        # MBIM data interface. Therefore two interface numbers should be
-        # the same.
-        if (no_data_data_interface.bInterfaceNumber !=
-            mbim_data_interface.bInterfaceNumber):
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:6.6#1')
-
-        # Check the fields of union functional descriptor
-        if not(union_descriptor.bLength == 5 and
-               (union_descriptor.bControlInterface ==
-                mbim_communication_interface.bInterfaceNumber) and
-               (union_descriptor.bSubordinateInterface0 ==
-                mbim_data_interface.bInterfaceNumber)):
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:6.3#4')
-
-        # Test step 5
-        # Get MBIM functional descriptor.
-        if usb_descriptors.has_distinct_descriptors(mbim_descriptors):
-            mbim_errors.log_and_raise(
-                    mbim_errors.MBIMComplianceGenericAssertionError,
-                    'Expected 1 unique MBIM functional descriptor.')
-        mbim_descriptor = mbim_descriptors[0]
-
-        if mbim_descriptor.index < header_descriptor.index:
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:6.3#3')
-
-        if mbim_descriptor.bLength != 12:
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:6.4#5')
-
-        if mbim_descriptor.bcdMBIMVersion != 0x0100:
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:6.4#6')
-
-        if mbim_descriptor.wMaxControlMessage < 64:
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:6.4#1')
-
-        if mbim_descriptor.bNumberFilters < 16:
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:6.4#2')
-
-        if mbim_descriptor.bMaxFilterSize > 192:
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:6.4#3')
-
-        # TODO(mcchou): Most of vendors set wMaxSegmentSize to be less than
-        # 1500, so this assertion is skipped for now.
-        #
-        #if not mbim_descriptor.wMaxSegmentSize >= 2048:
-        #    mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-        #                              'mbim1.0:6.4#4')
-
-        # Use a byte as the mask to check if D0, D1, D2, D4, D6 and D7 are
-        # zeros.
-        if (mbim_descriptor.bmNetworkCapabilities & 0b11010111) > 0:
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:6.4#7')
-
-        # Test step 6
-        # Get MBIM extended functional descriptor, which is optional.
-        if len(mbim_extended_descriptors) >= 1:
-            if usb_descriptors.has_distinct_descriptors(
-                    mbim_extended_descriptors):
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceGenerisAssertionError,
-                        'Expected 1 unique MBIM extended functional '
-                        'descriptor.')
-            mbim_extended_descriptor = mbim_extended_descriptors[0]
-
-            if mbim_extended_descriptor.index < mbim_descriptor.index:
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'mbim1.0:6.5#1')
-
-            if mbim_extended_descriptor.bLength != 8:
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'mbim1.0:6.5#2')
-
-            if mbim_extended_descriptor.bcdMBIMExtendedVersion != 0x0100:
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'mbim1.0:6.5#3')
-
-            if mbim_extended_descriptor.bMaxOutstandingCommandMessages == 0:
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'mbim1.0:6.5#4')
-
-        # Test step 7
-        # Get the first endpoint for the communication interface.
-        interrupt_endpoint_descriptors = usb_descriptors.filter_descriptors(
-                usb_descriptors.EndpointDescriptor,
-                mbim_communication_interface_bundle)
-
-        if len(interrupt_endpoint_descriptors) != 1:
-            mbim_errors.log_and_raise(
-                    mbim_errors.MBIMComplianceGenericAssertionError,
-                    'Expected 1 endpoint, got %d.' % (
-                            len(interrupt_endpoint_descriptors)))
-        interrupt_endpoint_descriptor = interrupt_endpoint_descriptors[0]
-        if not (interrupt_endpoint_descriptor.bDescriptorType == 0x05 and
-                interrupt_endpoint_descriptor.bLength == 7 and
-                interrupt_endpoint_descriptor.bEndpointAddress >= 0x80 and
-                interrupt_endpoint_descriptor.bmAttributes == 0x03):
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:6.3#5')
-
-        appear_before_functional_descriptors = False
-        if mbim_extended_descriptors:
-            if (mbim_extended_descriptor.index >
-                interrupt_endpoint_descriptor.index):
-                appear_before_functional_descriptors = True
-        else:
-            if (mbim_descriptor.index > interrupt_endpoint_descriptor.index or
-                union_descriptor.index > interrupt_endpoint_descriptor.index):
-                appear_before_functional_descriptors = True
-        if appear_before_functional_descriptors:
-            mbim_errors.log_and_raise(
-                    mbim_errors.MBIMComplianceGenericAssertionError,
-                    'All functional descriptors must appear before endpoint'
-                    'descriptors.')
-
-        # Test step 8
-        # Get interface association descriptor.
-        interface_association_descriptors = (
-                usb_descriptors.filter_descriptors(
-                        usb_descriptors.InterfaceAssociationDescriptor,
-                        descriptors))
-
-        if usb_descriptors.has_distinct_descriptors(
-                interface_association_descriptors):
-            mbim_errors.log_and_raise(
-                    mbim_errors.MBIMComplianceGenericAssertionError,
-                    'Expected 1 interface association descriptor, got %d.' % (
-                            len(interface_association_descriptors)))
-
-        for association_descriptor in interface_association_descriptors:
-            # Check interface association descriptor if one of the following
-            # condition is met:
-            # 1. bFirstInterface <= bControlInterface < (bFirstInterface +
-            #                                            bInterfaceCount)
-            # 2. bFirstInterface <= bSubordinateInterface0 < (
-            #            bFirstInterface + bInterfaceCount)
-            b_first_interface = association_descriptor.bFirstInterface
-            b_interface_count = association_descriptor.bInterfaceCount
-            b_control_interface = union_descriptor.bControlInterface
-            b_subordinate_interface_0 = (
-                    union_descriptor.bSubordinateInterface0)
-            check_inteface_association_descriptor = False
-
-            if ((b_first_interface <= b_control_interface < (
-                         b_first_interface + b_interface_count)) or
-                (b_first_interface <= b_subordinate_interface_0 < (
-                         b_first_interface + b_interface_count))):
-                check_interface_association_descriptor = True
-
-            if not check_interface_association_descriptor:
-                mbim_errors.log_and_raise(
-                        mbim_errors.MBIMComplianceAssertionError,
-                        'mbim1.0:6.1#1')
-
-            if check_interface_association_descriptor:
-                if not((b_first_interface == b_control_interface or
-                        b_first_interface == b_subordinate_interface_0) and
-                       (b_interface_count == 2) and
-                       (b_subordinate_interface_0 == b_control_interface + 1 or
-                        b_subordinate_interface_0 ==
-                        b_control_interface - 1) and
-                       (association_descriptor.bFunctionClass == 0x02) and
-                       (association_descriptor.bFunctionSubClass == 0x0E) and
-                       (association_descriptor.bFunctionProtocol == 0x00)):
-                    mbim_errors.log_and_raise(
-                            mbim_errors.MBIMComplianceAssertionError,
-                            'mbim1.0:6.1#2')
-
-    # End of run_internal().
diff --git a/client/site_tests/cellular_MbimComplianceDescriptor/cellular_MbimComplianceDescriptor.py b/client/site_tests/cellular_MbimComplianceDescriptor/cellular_MbimComplianceDescriptor.py
deleted file mode 100644
index 29c3f78..0000000
--- a/client/site_tests/cellular_MbimComplianceDescriptor/cellular_MbimComplianceDescriptor.py
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_test_runner
-
-class cellular_MbimComplianceDescriptor(mbim_test_runner.MbimTestRunner):
-    """
-    Main test runner for all the tests within this directory. This just a
-    harness for invoking various tests within this directory which is not
-    currently supported by Autotest.
-
-    """
-    _TEST_AREA_FOLDER = os.path.dirname(__file__)
diff --git a/client/site_tests/cellular_MbimComplianceDescriptor/control.DES01 b/client/site_tests/cellular_MbimComplianceDescriptor/control.DES01
deleted file mode 100644
index 5e3b082..0000000
--- a/client/site_tests/cellular_MbimComplianceDescriptor/control.DES01
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceDescriptor.DES01"
-PURPOSE = """
-MBIM Compliance Test: Descriptors Validation for NCM/MBIM Functions.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-DES_01 Test listed in the MBIM Compliance Test Suite specification.
-Descriptors Validation for NCM/MBIM Functions.
-"""
-
-job.run_test('cellular_MbimComplianceDescriptor',
-             subtest_name='cellular_MbimComplianceDES01')
diff --git a/client/site_tests/cellular_MbimComplianceDescriptor/control.DES02 b/client/site_tests/cellular_MbimComplianceDescriptor/control.DES02
deleted file mode 100644
index 47000a9..0000000
--- a/client/site_tests/cellular_MbimComplianceDescriptor/control.DES02
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceDescriptor.DES02"
-PURPOSE = """
-MBIM Compliance Test: Descriptors Validation for MBIM Only Functions.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-DES_02 Test listed in the MBIM Compliance Test Suite specification.
-Descriptors Validation for MBIM Only Functions.
-"""
-
-job.run_test('cellular_MbimComplianceDescriptor',
-             subtest_name='cellular_MbimComplianceDES02')
diff --git a/client/site_tests/cellular_MbimComplianceError/cellular_MbimComplianceERR01.py b/client/site_tests/cellular_MbimComplianceError/cellular_MbimComplianceERR01.py
deleted file mode 100644
index 597d286..0000000
--- a/client/site_tests/cellular_MbimComplianceError/cellular_MbimComplianceERR01.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import connect_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import get_descriptors_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import mbim_open_generic_sequence
-
-
-class cellular_MbimComplianceERR01(mbim_test_base.MbimTestBase):
-    """
-    Validation of function's response to messages with variable-length encoding
-    errors.
-
-    This test verifies that incoming messages are rejected when variable-length
-    encoding rules are not followed.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 45
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-
-    """
-    version = 1
-
-    def run_internal(self):
-        """ Run ERR_01 test. """
-        # Precondition
-        descriptors = get_descriptors_sequence.GetDescriptorsSequence(
-                self.device_context).run()
-        self.device_context.update_descriptor_cache(descriptors)
-        open_sequence = mbim_open_generic_sequence.MBIMOpenGenericSequence(
-                self.device_context)
-        open_sequence.run()
-
-        # Step 1
-        request_message, response_message, _ = (
-                connect_sequence.ConnectSequence(self.device_context).run(
-                        introduce_error_in_access_offset=True,
-                        raise_exception_on_failure=False))
-
-        # Step 2
-        if ((response_message.transaction_id !=
-             request_message.transaction_id) or
-            (response_message.device_service_id !=
-             request_message.device_service_id) or
-            (response_message.cid != request_message.cid)):
-            mbim_errors.log_and_raise(
-                    mbim_errors.MBIMComplianceTestError,
-                    'Mismatch in request/response message params: '
-                    '(transaction_id, service_id, cid). '
-                    'Request Message: (%s, %s, %s), '
-                    'Response Message: (%s, %s, %s)' % (
-                        request_message.transaction_id,
-                        request_message.device_service_id,
-                        request_message.cid,
-                        response_message.transaction_id,
-                        response_message.device_service_id,
-                        response_message.cid))
-
-        # Step 3
-        if ((response_message.message_type !=
-             mbim_constants.MBIM_COMMAND_DONE) or
-            (response_message.status_codes !=
-             mbim_constants.MBIM_STATUS_INVALID_PARAMETERS)):
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:10.3#2')
diff --git a/client/site_tests/cellular_MbimComplianceError/cellular_MbimComplianceERR02.py b/client/site_tests/cellular_MbimComplianceError/cellular_MbimComplianceERR02.py
deleted file mode 100644
index 730add3..0000000
--- a/client/site_tests/cellular_MbimComplianceError/cellular_MbimComplianceERR02.py
+++ /dev/null
@@ -1,54 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import connect_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import get_descriptors_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import mbim_open_generic_sequence
-
-
-class cellular_MbimComplianceERR02(mbim_test_base.MbimTestBase):
-    """
-    Validation of issuing the error message.
-
-    This test verifies that an error message with status code
-    MBIM_ERROR_FRAGMENT_OUT_OF_SEQUENCE is issued when fragments received in a
-    wrong order.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 45
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-
-    """
-    version = 1
-
-    def run_internal(self):
-        """ Run ERR_02 test. """
-        # Precondition
-        descriptors = get_descriptors_sequence.GetDescriptorsSequence(
-                self.device_context).run()
-        self.device_context.update_descriptor_cache(descriptors)
-        open_sequence = mbim_open_generic_sequence.MBIMOpenGenericSequence(
-                self.device_context)
-        open_sequence.run(max_control_transfer_size=64)
-
-        # Step 1
-        request_message, response_message, _ = (
-                connect_sequence.ConnectSequence(self.device_context).run(
-                        introduce_error_in_packets_order=[1, 0, 2],
-                        raise_exception_on_failure=False))
-
-        # Step 2
-        if ((response_message.message_type !=
-             mbim_constants.MBIM_FUNCTION_ERROR_MSG) or
-            (response_message.error_status_code !=
-             mbim_constants.MBIM_ERROR_FRAGMENT_OUT_OF_SEQUENCE)):
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:9.3.4#3')
diff --git a/client/site_tests/cellular_MbimComplianceError/cellular_MbimComplianceERR03.py b/client/site_tests/cellular_MbimComplianceError/cellular_MbimComplianceERR03.py
deleted file mode 100644
index 2a15a47..0000000
--- a/client/site_tests/cellular_MbimComplianceError/cellular_MbimComplianceERR03.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import connect_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import get_descriptors_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import mbim_open_generic_sequence
-
-
-class cellular_MbimComplianceERR03(mbim_test_base.MbimTestBase):
-    """
-    Validation of error message transaction Id.
-
-    This test verifies that transaction Id of an error message with status code
-    MBIM_ERROR_FRAGMENT_OUT_OF_SEQUENCE is the same as transaction Id of the
-    incorrectly fragmented message.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 45
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-
-    """
-    version = 1
-
-    def run_internal(self):
-        """ Run ERR_03 test. """
-        # Precondition
-        descriptors = get_descriptors_sequence.GetDescriptorsSequence(
-                self.device_context).run()
-        self.device_context.update_descriptor_cache(descriptors)
-        open_sequence = mbim_open_generic_sequence.MBIMOpenGenericSequence(
-                self.device_context)
-        open_sequence.run(max_control_transfer_size=64)
-
-        # Step 1
-        request_message, response_message, _ = (
-                connect_sequence.ConnectSequence(self.device_context).run(
-                        introduce_error_in_packets_order=[1, 0, 2],
-                        raise_exception_on_failure=False))
-
-        # Step 2
-        if ((response_message.message_type !=
-             mbim_constants.MBIM_FUNCTION_ERROR_MSG) or
-            (response_message.transaction_id !=
-             request_message.transaction_id) or
-            (response_message.error_status_code !=
-             mbim_constants.MBIM_ERROR_FRAGMENT_OUT_OF_SEQUENCE)):
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:9.3.4.2#2')
diff --git a/client/site_tests/cellular_MbimComplianceError/cellular_MbimComplianceERR04.py b/client/site_tests/cellular_MbimComplianceError/cellular_MbimComplianceERR04.py
deleted file mode 100644
index 6fdc1b8..0000000
--- a/client/site_tests/cellular_MbimComplianceError/cellular_MbimComplianceERR04.py
+++ /dev/null
@@ -1,54 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import connect_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import get_descriptors_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import mbim_open_generic_sequence
-
-
-class cellular_MbimComplianceERR04(mbim_test_base.MbimTestBase):
-    """
-    Validation of discarding packets in case of an error.
-
-    This test verifies that in case of an error message with status code
-    MBIM_ERROR_FRAGMENT_OUT_OF_SEQUENCE all packets of the message caused the
-    error are discarded by the function.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 46
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-
-    """
-    version = 1
-
-    def run_internal(self):
-        """ Run ERR_04 test. """
-        # Precondition
-        descriptors = get_descriptors_sequence.GetDescriptorsSequence(
-                self.device_context).run()
-        self.device_context.update_descriptor_cache(descriptors)
-        open_sequence = mbim_open_generic_sequence.MBIMOpenGenericSequence(
-                self.device_context)
-        open_sequence.run(max_control_transfer_size=64)
-
-        # Step 1
-        request_message, response_message, _ = (
-                connect_sequence.ConnectSequence(self.device_context).run(
-                        introduce_error_in_packets_order=[1, 0, 2],
-                        raise_exception_on_failure=False))
-
-        # Step 2
-        if ((response_message.transaction_id !=
-             request_message.transaction_id) or
-            (response_message.message_type ==
-             mbim_constants.MBIM_COMMAND_DONE)):
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:9.3.4.2#3')
diff --git a/client/site_tests/cellular_MbimComplianceError/cellular_MbimComplianceERR05.py b/client/site_tests/cellular_MbimComplianceError/cellular_MbimComplianceERR05.py
deleted file mode 100644
index 89ea008..0000000
--- a/client/site_tests/cellular_MbimComplianceError/cellular_MbimComplianceERR05.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_constants
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_errors
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_test_base
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import connect_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import get_descriptors_sequence
-from autotest_lib.client.cros.cellular.mbim_compliance.sequences \
-        import mbim_open_generic_sequence
-
-
-class cellular_MbimComplianceERR05(mbim_test_base.MbimTestBase):
-    """
-    Validation of issuing a new error message.
-
-    This test verifies that another error message with status code
-    MBIM_ERROR_FRAGMENT_OUT_OF_SEQUENCE is issued when another message with
-    out-of-order fragmentation with the same TransactionId is received.
-
-    Reference:
-        [1] Universal Serial Bus Communication Class MBIM Compliance Testing: 46
-        http://www.usb.org/developers/docs/devclass_docs/MBIM-Compliance-1.0.pdf
-
-    """
-    version = 1
-
-    def run_internal(self):
-        """ Run ERR_05 test. """
-        # Precondition
-        descriptors = get_descriptors_sequence.GetDescriptorsSequence(
-                self.device_context).run()
-        self.device_context.update_descriptor_cache(descriptors)
-        open_sequence = mbim_open_generic_sequence.MBIMOpenGenericSequence(
-                self.device_context)
-        open_sequence.run(max_control_transfer_size=64)
-
-        # Step 1
-        request_message, first_response_message, notifications = (
-                connect_sequence.ConnectSequence(self.device_context).run(
-                        introduce_error_in_packets_order=[1, 1],
-                        raise_exception_on_failure=False))
-
-        if len(notifications) > 1:
-            mbim_errors.log_and_raise(
-                    mbim_errors.MBIMComplianceTestError,
-                    'Not expecting more than 1 pending response.')
-        second_response_message = notifications[0]
-
-        # Step 2
-        if (((first_response_message.transaction_id !=
-              request_message.transaction_id) or
-             (first_response_message.message_type !=
-              mbim_constants.MBIM_FUNCTION_ERROR_MSG) or
-             (first_response_message.error_status_code !=
-              mbim_constants.MBIM_ERROR_FRAGMENT_OUT_OF_SEQUENCE)) or
-            ((second_response_message.transaction_id !=
-              request_message.transaction_id) or
-             (second_response_message.message_type !=
-              mbim_constants.MBIM_FUNCTION_ERROR_MSG) or
-             (second_response_message.error_status_code !=
-              mbim_constants.MBIM_ERROR_FRAGMENT_OUT_OF_SEQUENCE))):
-            mbim_errors.log_and_raise(mbim_errors.MBIMComplianceAssertionError,
-                                      'mbim1.0:9.3.4.2#4')
diff --git a/client/site_tests/cellular_MbimComplianceError/cellular_MbimComplianceError.py b/client/site_tests/cellular_MbimComplianceError/cellular_MbimComplianceError.py
deleted file mode 100644
index 40bdade..0000000
--- a/client/site_tests/cellular_MbimComplianceError/cellular_MbimComplianceError.py
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-
-import common
-from autotest_lib.client.cros.cellular.mbim_compliance import mbim_test_runner
-
-class cellular_MbimComplianceError(mbim_test_runner.MbimTestRunner):
-    """
-    Main test runner for all the tests within this directory. This just a
-    harness for invoking various tests within this directory which is not
-    currently supported by Autotest.
-
-    """
-    _TEST_AREA_FOLDER = os.path.dirname(__file__)
diff --git a/client/site_tests/cellular_MbimComplianceError/control.ERR01 b/client/site_tests/cellular_MbimComplianceError/control.ERR01
deleted file mode 100644
index 0df7a43..0000000
--- a/client/site_tests/cellular_MbimComplianceError/control.ERR01
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceError.ERR01"
-PURPOSE = """
-MBIM Compliance Test: Validation of function's response to messages
-with variable-length encoding errors.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-ERR_01 Test listed in the MBIM Compliance Test Suite specification.
-Validation of function's response to messages with variable-length encoding
-errors.
-"""
-
-job.run_test('cellular_MbimComplianceError',
-             subtest_name='cellular_MbimComplianceERR01')
diff --git a/client/site_tests/cellular_MbimComplianceError/control.ERR02 b/client/site_tests/cellular_MbimComplianceError/control.ERR02
deleted file mode 100644
index ff1cf52..0000000
--- a/client/site_tests/cellular_MbimComplianceError/control.ERR02
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceError.ERR02"
-PURPOSE = """
-MBIM Compliance Test: Validation of issuing the error message.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-ERR_02 Test listed in the MBIM Compliance Test Suite specification.
-Validation of issuing the error message.
-"""
-
-job.run_test('cellular_MbimComplianceError',
-             subtest_name='cellular_MbimComplianceERR02')
diff --git a/client/site_tests/cellular_MbimComplianceError/control.ERR03 b/client/site_tests/cellular_MbimComplianceError/control.ERR03
deleted file mode 100644
index 6ebd2e7..0000000
--- a/client/site_tests/cellular_MbimComplianceError/control.ERR03
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceError.ERR03"
-PURPOSE = """
-MBIM Compliance Test: Validation of error message transaction Id.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-ERR_03 Test listed in the MBIM Compliance Test Suite specification.
-Validation of error message transaction Id.
-"""
-
-job.run_test('cellular_MbimComplianceError',
-             subtest_name='cellular_MbimComplianceERR03')
diff --git a/client/site_tests/cellular_MbimComplianceError/control.ERR04 b/client/site_tests/cellular_MbimComplianceError/control.ERR04
deleted file mode 100644
index 3a3ef6f..0000000
--- a/client/site_tests/cellular_MbimComplianceError/control.ERR04
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceError.ERR04"
-PURPOSE = """
-MBIM Compliance Test: Validation of discarding packets in case of an error.
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-ERR_04 Test listed in the MBIM Compliance Test Suite specification.
-Validation of discarding packets in case of an error.
-"""
-
-job.run_test('cellular_MbimComplianceError',
-             subtest_name='cellular_MbimComplianceERR04')
diff --git a/client/site_tests/cellular_MbimComplianceError/control.ERR05 b/client/site_tests/cellular_MbimComplianceError/control.ERR05
deleted file mode 100644
index 0979410..0000000
--- a/client/site_tests/cellular_MbimComplianceError/control.ERR05
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_MbimComplianceError.ERR05"
-PURPOSE = """
-MBIM Compliance Test: Validation of issuing a new error message..
-"""
-CRITERIA = """
-This test will fail if the modem is not MBIM compliant.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:cellular_mbim_compliance"
-
-DOC = """
-ERR_05 Test listed in the MBIM Compliance Test Suite specification.
-Validation of issuing a new error message..
-"""
-
-job.run_test('cellular_MbimComplianceError',
-             subtest_name='cellular_MbimComplianceERR05')
diff --git a/client/site_tests/cellular_ModemControl/cellular_ModemControl.py b/client/site_tests/cellular_ModemControl/cellular_ModemControl.py
deleted file mode 100644
index 6fef19c..0000000
--- a/client/site_tests/cellular_ModemControl/cellular_ModemControl.py
+++ /dev/null
@@ -1,423 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import contextlib
-import dbus
-import logging
-import random
-import time
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.cros.cellular import cellular
-from autotest_lib.client.cros.networking import cellular_proxy
-from autotest_lib.client.cros.networking import shill_context
-from autotest_lib.client.cros.networking import shill_proxy
-
-# Number of seconds we wait for the cellular service to perform an action.
-DEVICE_TIMEOUT=45
-SERVICE_TIMEOUT=75
-
-# Number of times and seconds between modem state checks to ensure that the
-# modem is not in a temporary transition state.
-NUM_MODEM_STATE_CHECKS=2
-MODEM_STATE_CHECK_PERIOD_SECONDS=5
-
-# Number of seconds to sleep after a connect request in slow-connect mode.
-SLOW_CONNECT_WAIT_SECONDS=20
-
-# Number of seconds to sleep after a disable request in slow-disable mode.
-SLOW_DISABLE_WAIT_SECONDS=2
-
-
-class TechnologyCommands():
-    """Control the modem mostly using shill Technology interfaces."""
-    def __init__(self, shill, command_delegate, slow_disable):
-        self.shill = shill
-        self.command_delegate = command_delegate
-        self.slow_disable = slow_disable
-
-    def Enable(self):
-        self.shill.manager.EnableTechnology(
-                shill_proxy.ShillProxy.TECHNOLOGY_CELLULAR)
-
-    def Disable(self):
-        self.shill.manager.DisableTechnology(
-                shill_proxy.ShillProxy.TECHNOLOGY_CELLULAR)
-        if self.slow_disable:
-            time.sleep(SLOW_DISABLE_WAIT_SECONDS)
-            #TODO(pholla): Make ModemManager return a response only after
-            #QC Modem is confirmed to be idle (b/160446543)
-
-    def Connect(self, **kwargs):
-        self.command_delegate.Connect(**kwargs)
-
-    def Disconnect(self):
-        return self.command_delegate.Disconnect()
-
-    def __str__(self):
-        return 'Technology Commands'
-
-
-class ModemCommands():
-    """Control the modem using modem manager DBUS interfaces."""
-    def __init__(self, modem, slow_connect, slow_disable):
-        self.modem = modem
-        self.slow_connect = slow_connect
-        self.slow_disable = slow_disable
-
-    def Enable(self):
-        self.modem.Enable(True)
-
-    def Disable(self):
-        self.modem.Enable(False)
-        if self.slow_disable:
-            time.sleep(SLOW_DISABLE_WAIT_SECONDS)
-            #TODO(pholla): Make ModemManager return a response only after
-            #QC Modem is confirmed to be idle (b/160446543)
-
-    def Connect(self, simple_connect_props):
-        logging.debug('Connecting with properties: %r' % simple_connect_props)
-        self.modem.Connect(simple_connect_props)
-        if self.slow_connect:
-            time.sleep(SLOW_CONNECT_WAIT_SECONDS)
-
-    def Disconnect(self):
-        """
-        Disconnect Modem.
-
-        Returns:
-            True - to indicate that shill may autoconnect again.
-        """
-        try:
-            self.modem.Disconnect()
-        except dbus.DBusException as e:
-            if (e.get_dbus_name() !=
-                    'org.chromium.ModemManager.Error.OperationInitiated'):
-                raise e
-        return True
-
-    def __str__(self):
-        return 'Modem Commands'
-
-
-class DeviceCommands():
-    """Control the modem using shill device interfaces."""
-    def __init__(self, shill, device, slow_connect, slow_disable):
-        self.shill = shill
-        self.device = device
-        self.slow_connect = slow_connect
-        self.slow_disable = slow_disable
-        self.service = None
-
-    def GetService(self):
-        service = self.shill.find_cellular_service_object()
-        if not service:
-            raise error.TestFail(
-                'Service failed to appear when using device commands.')
-        return service
-
-    def Enable(self):
-        self.device.Enable(timeout=DEVICE_TIMEOUT)
-
-    def Disable(self):
-        self.service = None
-        self.device.Disable(timeout=DEVICE_TIMEOUT)
-        if self.slow_disable:
-            time.sleep(SLOW_DISABLE_WAIT_SECONDS)
-            #TODO(pholla): Make ModemManager return a response only after
-            #QC Modem is confirmed to be idle (b/160446543)
-
-    def Connect(self, **kwargs):
-        self.GetService().Connect()
-        if self.slow_connect:
-            time.sleep(SLOW_CONNECT_WAIT_SECONDS)
-
-    def Disconnect(self):
-        """
-        Disconnect Modem.
-
-        Returns:
-            False - to indicate that shill may not autoconnect again.
-        """
-        self.GetService().Disconnect()
-        return False
-
-    def __str__(self):
-        return 'Device Commands'
-
-
-class MixedRandomCommands():
-    """Control the modem using a mixture of commands on device, modems, etc."""
-    def __init__(self, commands_list):
-        self.commands_list = commands_list
-
-    def PickRandomCommands(self):
-        return self.commands_list[random.randrange(len(self.commands_list))]
-
-    def Enable(self):
-        cmds = self.PickRandomCommands()
-        logging.info('Enable with %s' % cmds)
-        cmds.Enable()
-
-    def Disable(self):
-        cmds = self.PickRandomCommands()
-        logging.info('Disable with %s' % cmds)
-        cmds.Disable()
-
-    def Connect(self, **kwargs):
-        cmds = self.PickRandomCommands()
-        logging.info('Connect with %s' % cmds)
-        cmds.Connect(**kwargs)
-
-    def Disconnect(self):
-        cmds = self.PickRandomCommands()
-        logging.info('Disconnect with %s' % cmds)
-        return cmds.Disconnect()
-
-    def __str__(self):
-        return 'Mixed Commands'
-
-
-class cellular_ModemControl(test.test):
-    version = 1
-
-    def CompareModemPowerState(self, modem, expected_state):
-        """Compare modem manager power state of a modem to an expected state."""
-        return modem.IsEnabled() == expected_state
-
-    def CompareDevicePowerState(self, device, expected_state):
-        """Compare the shill device power state to an expected state."""
-        state = self.test_env.shill.get_dbus_property(
-                device, shill_proxy.ShillProxy.DEVICE_PROPERTY_POWERED)
-        logging.info('Device Enabled = %s' % state)
-        return state == expected_state
-
-    def CompareServiceState(self, service, expected_states):
-        """Compare the shill service state to a set of expected states."""
-        if not service:
-            logging.info('Service not found.')
-            return False
-
-        state = self.test_env.shill.get_dbus_property(
-                service, shill_proxy.ShillProxy.SERVICE_PROPERTY_STATE)
-        logging.info('Service State = %s' % state)
-        return state in expected_states
-
-    def EnsureNotConnectingOrDisconnecting(self):
-        """
-        Ensure modem is not connecting or disconnecting.
-
-        Raises:
-            error.TestFail if it timed out waiting for the modem to finish
-            connecting or disconnecting.
-        """
-        # Shill retries a failed connect attempt with a different APN so
-        # check a few times to ensure the modem is not in between connect
-        # attempts.
-        for _ in range(NUM_MODEM_STATE_CHECKS):
-            utils.poll_for_condition(
-                lambda: not self.test_env.modem.IsConnectingOrDisconnecting(),
-                error.TestFail('Timed out waiting for modem to finish ' +
-                               'connecting or disconnecting.'),
-                timeout=SERVICE_TIMEOUT)
-            time.sleep(MODEM_STATE_CHECK_PERIOD_SECONDS)
-
-    def EnsureDisabled(self):
-        """
-        Ensure modem disabled, device powered off, and no service.
-
-        Raises:
-            error.TestFail if the states are not consistent.
-        """
-        utils.poll_for_condition(
-            lambda: self.CompareModemPowerState(self.test_env.modem, False),
-            error.TestFail('Modem failed to enter state Disabled.'))
-        utils.poll_for_condition(
-            lambda: self.CompareDevicePowerState(self.device, False),
-            error.TestFail('Device failed to enter state Powered=False.'))
-        utils.poll_for_condition(
-            lambda: not self.test_env.shill.find_cellular_service_object(),
-            error.TestFail('Service should not be available.'),
-            timeout=SERVICE_TIMEOUT)
-
-    def EnsureEnabled(self, check_idle):
-        """
-        Ensure modem enabled, device powered and service exists.
-
-        Args:
-            check_idle: if True, then ensure that the service is idle
-                        (i.e. not connected) otherwise ignore the
-                        service state
-
-        Raises:
-            error.TestFail if the states are not consistent.
-        """
-        utils.poll_for_condition(
-            lambda: self.CompareModemPowerState(self.test_env.modem, True),
-            error.TestFail('Modem failed to enter state Enabled'))
-        utils.poll_for_condition(
-            lambda: self.CompareDevicePowerState(self.device, True),
-            error.TestFail('Device failed to enter state Powered=True.'),
-            timeout=30)
-
-        service = self.test_env.shill.wait_for_cellular_service_object()
-        if check_idle:
-            utils.poll_for_condition(
-                lambda: self.CompareServiceState(service, ['idle']),
-                error.TestFail('Service failed to enter idle state.'),
-                timeout=SERVICE_TIMEOUT)
-
-    def EnsureConnected(self):
-        """
-        Ensure modem connected, device powered on, service connected.
-
-        Raises:
-            error.TestFail if the states are not consistent.
-        """
-        self.EnsureEnabled(check_idle=False)
-        utils.poll_for_condition(
-            lambda: self.CompareServiceState(
-                    self.test_env.shill.find_cellular_service_object(),
-                    ['ready', 'portal', 'online']),
-            error.TestFail('Service failed to connect.'),
-            timeout=SERVICE_TIMEOUT)
-
-
-    def TestCommands(self, commands):
-        """
-        Manipulate the modem using modem, device or technology commands.
-
-        Changes the state of the modem in various ways including
-        disable while connected and then verifies the state of the
-        modem manager and shill.
-
-        Raises:
-            error.TestFail if the states are not consistent.
-
-        """
-        logging.info('Testing using %s' % commands)
-
-        logging.info('Enabling')
-        commands.Enable()
-        self.EnsureEnabled(check_idle=not self.autoconnect)
-
-        technology_family = self.test_env.modem.GetCurrentTechnologyFamily()
-        if technology_family == cellular.TechnologyFamily.CDMA:
-            simple_connect_props = {'number': r'#777'}
-        else:
-            simple_connect_props = {'number': r'#777', 'apn': self.FindAPN()}
-
-        # Icera modems behave weirdly if we cancel the operation while the
-        # modem is connecting. Work around the issue by waiting until the
-        # connect operation completes.
-        # TODO(benchan): Remove this workaround once the issue is addressed
-        # on the modem side.
-        self.EnsureNotConnectingOrDisconnecting()
-
-        logging.info('Disabling')
-        commands.Disable()
-        self.EnsureDisabled()
-
-        logging.info('Enabling again')
-        commands.Enable()
-        self.EnsureEnabled(check_idle=not self.autoconnect)
-
-        if not self.autoconnect:
-            logging.info('Connecting')
-            commands.Connect(simple_connect_props=simple_connect_props)
-        else:
-            logging.info('Expecting AutoConnect to connect')
-        self.EnsureConnected()
-
-        logging.info('Disconnecting')
-        will_autoreconnect = commands.Disconnect()
-
-        if not (self.autoconnect and will_autoreconnect):
-            # Icera modems behave weirdly if we cancel the operation while the
-            # modem is disconnecting. Work around the issue by waiting until
-            # the disconnect operation completes.
-            # TODO(benchan): Remove this workaround once the issue is addressed
-            # on the modem side.
-            self.EnsureNotConnectingOrDisconnecting()
-
-            self.EnsureEnabled(check_idle=True)
-            logging.info('Connecting manually, since AutoConnect was on')
-            commands.Connect(simple_connect_props=simple_connect_props)
-        self.EnsureConnected()
-
-        logging.info('Disabling')
-        commands.Disable()
-        self.EnsureDisabled()
-
-    def FindAPN(self):
-        default = 'None'
-        service = self.test_env.shill.find_cellular_service_object()
-        last_good_apn = self.test_env.shill.get_dbus_property(
-                service,
-                cellular_proxy.CellularProxy.SERVICE_PROPERTY_LAST_GOOD_APN)
-        if not last_good_apn:
-            return default
-        return last_good_apn.get(
-                cellular_proxy.CellularProxy.APN_INFO_PROPERTY_APN, default)
-
-    def run_once(self, test_env, autoconnect, mixed_iterations=2,
-                 slow_connect=False, slow_disable=False):
-        self.test_env = test_env
-        self.autoconnect = autoconnect
-
-        with test_env:
-            self.device = self.test_env.shill.find_cellular_device_object()
-
-            modem_commands = ModemCommands(self.test_env.modem,
-                                           slow_connect,
-                                           slow_disable)
-            technology_commands = TechnologyCommands(self.test_env.shill,
-                                                     modem_commands,
-                                                     slow_disable)
-            device_commands = DeviceCommands(self.test_env.shill,
-                                             self.device,
-                                             slow_connect,
-                                             slow_disable)
-
-            # shill disables autoconnect on any cellular service before a user
-            # logs in (CL:851267). To test the autoconnect scenario, we need a
-            # user session to run the test.
-            chrome_context = chrome.Chrome()
-
-            # Set up the autoconnect context after starting a user session so
-            # that we ensure the autoconnect property is set on the cellular
-            # service that may be in the user profile.
-            autoconnect_context = shill_context.ServiceAutoConnectContext(
-                    self.test_env.shill.wait_for_cellular_service_object,
-                    self.autoconnect)
-
-            with contextlib.nested(chrome_context, autoconnect_context):
-                # Start with cellular disabled.
-                self.test_env.shill.manager.DisableTechnology(
-                        shill_proxy.ShillProxy.TECHNOLOGY_CELLULAR)
-                self.EnsureDisabled()
-
-                # Run the device commands test first to make sure we have
-                # a valid APN needed to connect using the modem commands.
-                self.TestCommands(device_commands)
-                self.TestCommands(technology_commands)
-                self.TestCommands(modem_commands)
-
-                # Run several times using commands mixed from each type
-                mixed = MixedRandomCommands([modem_commands,
-                                             technology_commands,
-                                             device_commands])
-                for _ in range(mixed_iterations):
-                    self.TestCommands(mixed)
-
-                # Ensure cellular is re-enabled in order to restore AutoConnect
-                # settings when ServiceAutoConnectContext exits.
-                # TODO(benchan): Refactor this logic into
-                # ServiceAutoConnectContext and update other users of
-                # ServiceAutoConnectContext.
-                self.test_env.shill.manager.EnableTechnology(
-                        shill_proxy.ShillProxy.TECHNOLOGY_CELLULAR)
-                self.EnsureEnabled(check_idle=False)
diff --git a/client/site_tests/cellular_ModemControl/control b/client/site_tests/cellular_ModemControl/control
deleted file mode 100644
index ed7c0d2..0000000
--- a/client/site_tests/cellular_ModemControl/control
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_ModemControl"
-PURPOSE = "Verify commands sent to a modem manager are reflected in flimflam."
-CRITERIA = """
-This test will fail if modem manager state differs from flimflam state.
-"""
-ATTRIBUTES = "suite:cellular_qual"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-
-DOC = """
-  Tests that commands sent to ModemManager are reflected in flimflam.
-
-  Issues many connect, disconnect, enable, and disable commands to ensure
-  that the modem state is always properly reflected in flimflam -- even
-  if the commands are sent to the modem manager instead of flimflam.
-"""
-
-from autotest_lib.client.cros.cellular import test_environment
-
-test_env = test_environment.CellularOTATestEnvironment()
-job.run_test('cellular_ModemControl', test_env=test_env, autoconnect=False,
-             tag='no-autoconnect')
-job.run_test('cellular_ModemControl', test_env=test_env, autoconnect=True,
-             tag='autoconnect')
diff --git a/client/site_tests/cellular_ModemControl/control.att b/client/site_tests/cellular_ModemControl/control.att
deleted file mode 100644
index 8f6ca34..0000000
--- a/client/site_tests/cellular_ModemControl/control.att
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_ModemControl.att"
-PURPOSE = "Verify commands sent to a modem manager are reflected in flimflam."
-CRITERIA = """
-This test will fail if modem manager state differs from flimflam state.
-"""
-ATTRIBUTES = "suite:cellular_ota_flaky"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-DEPENDENCIES = "carrier:att"
-
-DOC = """
-  Tests that commands sent to ModemManager are reflected in flimflam.
-
-  Issues many connect, disconnect, enable, and disable commands to ensure
-  that the modem state is always properly reflected in flimflam -- even
-  if the commands are sent to the modem manager instead of flimflam.
-"""
-
-from autotest_lib.client.cros.cellular import test_environment
-
-test_env = test_environment.CellularOTATestEnvironment()
-job.run_test('cellular_ModemControl', test_env=test_env, autoconnect=False,
-             tag='no-autoconnect')
-job.run_test('cellular_ModemControl', test_env=test_env, autoconnect=True,
-             tag='autoconnect')
diff --git a/client/site_tests/cellular_ModemControl/control.pseudomodem b/client/site_tests/cellular_ModemControl/control.pseudomodem
deleted file mode 100644
index 62e5c63..0000000
--- a/client/site_tests/cellular_ModemControl/control.pseudomodem
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_ModemControl.pseudomodem"
-PURPOSE = "Verify commands sent to a modem manager are reflected in shill."
-CRITERIA = """
-This test will fail if modem manager state differs from shill state.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-
-DOC = """
-  Tests that commands sent to ModemManager1 are reflected in shill.
-
-  Issues many connect, disconnect, enable, and disable commands to
-  ensure that the modem state is always properly reflected in shill --
-  even if the commands are sent to the modem manager instead of shill.
-  Uses a pseudo modem to simulate modem manager so that this test can
-  be run on a virtual machine.
-"""
-
-from autotest_lib.client.cros.cellular import test_environment
-
-# 3GPP
-test_env = test_environment.CellularPseudoMMTestEnvironment(
-        pseudomm_args=({'family': '3GPP'},))
-job.run_test('cellular_ModemControl', test_env=test_env, autoconnect=False,
-             tag='no-autoconnect-pseudomodem.3GPP')
-job.run_test('cellular_ModemControl', test_env=test_env, autoconnect=True,
-             tag='autoconnect-pseudomodem.3GPP')
-
-# CDMA
-test_env = test_environment.CellularPseudoMMTestEnvironment(
-        pseudomm_args=({'family': 'CDMA'},))
-job.run_test('cellular_ModemControl', test_env=test_env, autoconnect=False,
-             tag='no-autoconnect-pseudomodem.CDMA')
-job.run_test('cellular_ModemControl', test_env=test_env, autoconnect=True,
-             tag='autoconnect-pseudomodem.CDMA')
diff --git a/client/site_tests/cellular_ModemControl/control.qualcomm b/client/site_tests/cellular_ModemControl/control.qualcomm
deleted file mode 100644
index 8877bc6..0000000
--- a/client/site_tests/cellular_ModemControl/control.qualcomm
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_ModemControl.qualcomm"
-PURPOSE = "Verify commands sent to a modem manager are reflected in flimflam."
-CRITERIA = """
-This test will fail if modem manager state differs from flimflam state.
-"""
-ATTRIBUTES = "suite:cellular_ota_att, suite:cellular_ota_sprint, suite:cellular_ota_verizon"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-DEPENDENCIES = "modem:qualcomm"
-
-DOC = """
-  Tests that commands sent to ModemManager are reflected in flimflam.
-
-  Issues many connect, disconnect, enable, and disable commands to ensure
-  that the modem state is always properly reflected in flimflam -- even
-  if the commands are sent to the modem manager instead of flimflam.
-"""
-
-from autotest_lib.client.cros.cellular import test_environment
-
-test_env = test_environment.CellularOTATestEnvironment()
-job.run_test('cellular_ModemControl', test_env=test_env, autoconnect=False, slow_disable=True,
-             tag='no-autoconnect-qualcomm')
-job.run_test('cellular_ModemControl', test_env=test_env, autoconnect=True, slow_disable=True,
-             tag='autoconnect-qualcomm')
diff --git a/client/site_tests/cellular_ModemControl/control.sprint b/client/site_tests/cellular_ModemControl/control.sprint
deleted file mode 100644
index 3dc746e..0000000
--- a/client/site_tests/cellular_ModemControl/control.sprint
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_ModemControl.sprint"
-PURPOSE = "Verify commands sent to a modem manager are reflected in flimflam."
-CRITERIA = """
-This test will fail if modem manager state differs from flimflam state.
-"""
-ATTRIBUTES = "suite:suite:cellular_ota_flaky"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-DEPENDENCIES = "carrier:sprint"
-
-DOC = """
-  Tests that commands sent to ModemManager are reflected in flimflam.
-
-  Issues many connect, disconnect, enable, and disable commands to ensure
-  that the modem state is always properly reflected in flimflam -- even
-  if the commands are sent to the modem manager instead of flimflam.
-"""
-
-from autotest_lib.client.cros.cellular import test_environment
-
-test_env = test_environment.CellularOTATestEnvironment()
-job.run_test('cellular_ModemControl', test_env=test_env, autoconnect=False,
-             tag='no-autoconnect')
-job.run_test('cellular_ModemControl', test_env=test_env, autoconnect=True,
-             tag='autoconnect')
diff --git a/client/site_tests/cellular_ModemControl/control.tmobile b/client/site_tests/cellular_ModemControl/control.tmobile
deleted file mode 100644
index dba3369..0000000
--- a/client/site_tests/cellular_ModemControl/control.tmobile
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_ModemControl.tmobile"
-PURPOSE = "Verify commands sent to a modem manager are reflected in flimflam."
-CRITERIA = """
-This test will fail if modem manager state differs from flimflam state.
-"""
-ATTRIBUTES = "suite:cellular_ota_flaky"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-DEPENDENCIES = "carrier:tmobile"
-
-DOC = """
-  Tests that commands sent to ModemManager are reflected in flimflam.
-
-  Issues many connect, disconnect, enable, and disable commands to ensure
-  that the modem state is always properly reflected in flimflam -- even
-  if the commands are sent to the modem manager instead of flimflam.
-"""
-
-from autotest_lib.client.cros.cellular import test_environment
-
-test_env = test_environment.CellularOTATestEnvironment()
-job.run_test('cellular_ModemControl', test_env=test_env, autoconnect=False,
-             tag='no-autoconnect')
-job.run_test('cellular_ModemControl', test_env=test_env, autoconnect=True,
-             tag='autoconnect')
diff --git a/client/site_tests/cellular_ModemControl/control.verizon b/client/site_tests/cellular_ModemControl/control.verizon
deleted file mode 100644
index a515de0..0000000
--- a/client/site_tests/cellular_ModemControl/control.verizon
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_ModemControl.verizon"
-PURPOSE = "Verify commands sent to a modem manager are reflected in flimflam."
-CRITERIA = """
-This test will fail if modem manager state differs from flimflam state.
-"""
-ATTRIBUTES = "suite:cellular_ota_flaky"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-DEPENDENCIES = "carrier:verizon"
-
-DOC = """
-  Tests that commands sent to ModemManager are reflected in flimflam.
-
-  Issues many connect, disconnect, enable, and disable commands to ensure
-  that the modem state is always properly reflected in flimflam -- even
-  if the commands are sent to the modem manager instead of flimflam.
-"""
-
-from autotest_lib.client.cros.cellular import test_environment
-
-test_env = test_environment.CellularOTATestEnvironment()
-job.run_test('cellular_ModemControl', test_env=test_env, autoconnect=False,
-             tag='no-autoconnect')
-job.run_test('cellular_ModemControl', test_env=test_env, autoconnect=True,
-             tag='autoconnect')
diff --git a/client/site_tests/cellular_OutOfCreditsSubscriptionState/cellular_OutOfCreditsSubscriptionState.py b/client/site_tests/cellular_OutOfCreditsSubscriptionState/cellular_OutOfCreditsSubscriptionState.py
deleted file mode 100644
index 1a9c951..0000000
--- a/client/site_tests/cellular_OutOfCreditsSubscriptionState/cellular_OutOfCreditsSubscriptionState.py
+++ /dev/null
@@ -1,113 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-
-from autotest_lib.client.cros.cellular import mm1_constants
-from autotest_lib.client.cros.cellular import test_environment
-from autotest_lib.client.cros.networking import cellular_proxy
-from autotest_lib.client.cros.networking import pm_proxy
-from autotest_lib.client.cros.networking import shill_context
-
-SHORT_TIMEOUT = 10
-
-
-class cellular_OutOfCreditsSubscriptionState(test.test):
-    """
-    This test verifies that shill out-of-credits behavior works properly based
-    on the modem subscription state.
-
-    """
-    version = 1
-
-    def _initialize_modem(self, subscription_state):
-        # Simulate an Altair 3100 modem since that modem supports subscription
-        # state information.
-        self.test_env.shill.disable_modem_for_test_setup()
-        # TODO(thieule): Set the modem model using the pseudomodem testing
-        # interface (crbug.com/343258).
-        self.modem.iface_properties.Set(
-                mm1_constants.I_MODEM,
-                mm1_constants.MM_MODEM_PROPERTY_NAME_PLUGIN,
-                'Altair LTE')
-        self.pseudomm.iface_testing.SetSubscriptionState(
-                subscription_state)
-        self.test_env.shill.manager.EnableTechnology(
-                cellular_proxy.CellularProxy.TECHNOLOGY_CELLULAR)
-        # Wait for a registered state.
-        self.modem.wait_for_states([mm1_constants.MM_MODEM_STATE_REGISTERED,
-                                    mm1_constants.MM_MODEM_STATE_CONNECTED])
-
-
-    def _is_out_of_credits(self, cellular_service):
-        properties = cellular_service.GetProperties(utf8_strings=True)
-        return properties[cellular_proxy.CellularProxy.
-                          DEVICE_PROPERTY_OUT_OF_CREDITS]
-
-
-    def _test_provisioned(self):
-        logging.info('Initialize modem with provisioned state')
-        self._initialize_modem(
-                mm1_constants.MM_MODEM_3GPP_SUBSCRIPTION_STATE_PROVISIONED)
-        logging.info('Verify out-of-credits is not set in cellular service')
-        cellular_service = \
-                self.test_env.shill.wait_for_cellular_service_object()
-        if self._is_out_of_credits(cellular_service):
-            error_msg = 'Service marked as out-of-credits when it ' \
-                        'should not be.'
-            logging.error(error_msg)
-            raise error.TestFail(error_msg)
-
-
-    def _test_out_of_credits_at_start(self):
-        logging.info('Initialize modem with out-of-credits state')
-        self._initialize_modem(
-                mm1_constants.MM_MODEM_3GPP_SUBSCRIPTION_STATE_OUT_OF_DATA)
-        logging.info('Verify out-of-credits is set in cellular service')
-        cellular_service = \
-                self.test_env.shill.wait_for_cellular_service_object()
-        if not self._is_out_of_credits(cellular_service):
-            error_msg = 'Service not marked out-of-credits when it ' \
-                        'should be.'
-            logging.error(error_msg)
-            raise error.TestFail(error_msg)
-
-
-    def _test_out_of_credits_while_connected(self):
-        logging.info('Initialize modem with provisioned state')
-        self._initialize_modem(
-                mm1_constants.MM_MODEM_3GPP_SUBSCRIPTION_STATE_PROVISIONED)
-        cellular_service = \
-                self.test_env.shill.wait_for_cellular_service_object()
-        logging.info('Mark modem as out-of-credits')
-        self.pseudomm.iface_testing.SetSubscriptionState(
-                mm1_constants.MM_MODEM_3GPP_SUBSCRIPTION_STATE_OUT_OF_DATA)
-        logging.info('Verify out-of-credits set in cellular service')
-        try:
-            utils.poll_for_condition(
-                    lambda: self._is_out_of_credits(cellular_service),
-                    exception=error.TestFail('Service failed to be marked as '
-                                             'out-of-credits.'),
-                    timeout=SHORT_TIMEOUT)
-        except error.TestFail as e:
-            logging.error(repr(e))
-            raise e
-
-
-    def run_once(self):
-        """Calls by autotest to run this test."""
-        self.test_env = test_environment.CellularPseudoMMTestEnvironment(
-                pseudomm_args=({'family': '3GPP'},))
-        with self.test_env, shill_context.ServiceAutoConnectContext(
-                self.test_env.shill.wait_for_cellular_service_object, False):
-            self.pseudomm = pm_proxy.PseudoMMProxy.get_proxy()
-            self.modem = self.pseudomm.get_modem()
-
-            self._test_provisioned()
-            self._test_out_of_credits_at_start()
-            self._test_out_of_credits_while_connected()
diff --git a/client/site_tests/cellular_OutOfCreditsSubscriptionState/control.pseudomodem b/client/site_tests/cellular_OutOfCreditsSubscriptionState/control.pseudomodem
deleted file mode 100644
index a96a5ec..0000000
--- a/client/site_tests/cellular_OutOfCreditsSubscriptionState/control.pseudomodem
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_OutOfCreditsSubscriptionState.pseudomodem"
-PURPOSE = "Verify shill out-of-credits behavior using subscription state."
-CRITERIA = """
-This test will fail if one of the following conditions occurs:
-  - Shill does not mark a cellular service as out-of-credits when the modem
-    reports it as so.
-  - Shill does not clear the out-of-credits flag when the subscription state
-    transitions to provisioned after being out-of-credits.
-"""
-ATTRIBUTES = "suite:cellular_pseudomodem"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-DOC = """
-  Tests that shill marks a cellular service as out-of-credits according to the
-  modem subscription state by using a pseudomodem.
-"""
-
-job.run_test('cellular_OutOfCreditsSubscriptionState')
diff --git a/client/site_tests/cellular_SIMLocking/cellular_SIMLocking.py b/client/site_tests/cellular_SIMLocking/cellular_SIMLocking.py
index da932a5..435bcd1 100644
--- a/client/site_tests/cellular_SIMLocking/cellular_SIMLocking.py
+++ b/client/site_tests/cellular_SIMLocking/cellular_SIMLocking.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -82,7 +83,7 @@
 
     def _get_sim_lock_status(self):
         """ Helper method to safely obtain SIM lock status. """
-        properties = self.device.GetProperties(utf8_strings=True)
+        properties = self.device.GetProperties()
         sim_lock_status = properties.get(
                 self.test_env.shill.DEVICE_PROPERTY_SIM_LOCK_STATUS,
                 None)
diff --git a/client/site_tests/cellular_SIMLocking/control.pseudomodem b/client/site_tests/cellular_SIMLocking/control.pseudomodem
index 1f11ad4..dac5da4 100644
--- a/client/site_tests/cellular_SIMLocking/control.pseudomodem
+++ b/client/site_tests/cellular_SIMLocking/control.pseudomodem
@@ -14,6 +14,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "network"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 Test the SIM locking functionality of shill.
diff --git a/client/site_tests/cellular_SafetyDance/cellular_SafetyDance.py b/client/site_tests/cellular_SafetyDance/cellular_SafetyDance.py
index bf28a6b..7fe3e3f 100644
--- a/client/site_tests/cellular_SafetyDance/cellular_SafetyDance.py
+++ b/client/site_tests/cellular_SafetyDance/cellular_SafetyDance.py
@@ -1,17 +1,29 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import dbus
 import logging
 import random
 import time
 
-from autotest_lib.client.bin import test
+from six.moves import range
+
+from autotest_lib.client.bin import test, utils
 from autotest_lib.client.common_lib import error
+from autotest_lib.client.cros.cellular import mm1_constants
 from autotest_lib.client.cros.networking import cellular_proxy
 from autotest_lib.client.cros.networking import shill_context
 from autotest_lib.client.cros.networking import shill_proxy
+from autotest_lib.client.cros.networking import mm1_proxy
+
+SERVICE_DISABLE_TIMEOUT = 60
+SERVICE_ENABLE_TIMEOUT = 60
 
 
 class cellular_SafetyDance(test.test):
@@ -29,13 +41,35 @@
         v = None
         try:
             v = fn()
-        except dbus.exceptions.DBusException, error:
+        except dbus.exceptions.DBusException as error:
             if error.get_dbus_name() in self.okerrors:
                 return v, error.get_dbus_message()
             else:
                 raise error
         return v, ''
 
+    def _ensure_disabled(self):
+        """
+        Ensure modem is disabled.
+
+        Raises:
+            error.TestFail if the states are not consistent.
+        """
+
+        # b/188448918 : QC modems indicate that they are disabled even if they
+        # are enabled. There is no way to know when the disable completed until
+        # b/188448918 is fixed, and MM receives power state indications from the
+        # modem. The sleep can be removed once b/188448918 is fixed.
+        time.sleep(2)
+
+        utils.poll_for_condition(
+                lambda: not self.test_env.modem.IsEnabled(),
+                error.TestFail('Modem failed to enter state Disabled.'))
+        utils.poll_for_condition(
+                lambda: not self.test_env.shill.find_cellular_service_object(),
+                error.TestFail('Service should not be available.'),
+                timeout=SERVICE_DISABLE_TIMEOUT)
+
     def _enable(self):
         logging.info('Enable')
         self._filterexns(lambda:
@@ -45,6 +79,7 @@
         logging.info('Disable')
         self._filterexns(lambda:
             self.test_env.shill.manager.DisableTechnology('cellular'))
+        self._ensure_disabled()
 
     def _ignoring(self, reason):
         if ('AlreadyConnected' in reason or
@@ -68,6 +103,15 @@
         except shill_proxy.ShillProxyError:
             return
 
+        mm_proxy = mm1_proxy.ModemManager1Proxy.get_proxy()
+        if not mm_proxy:
+            raise error.TestFail('Could not get mm_proxy')
+        modem_proxy = mm_proxy.get_modem()
+        modem_proxy.wait_for_states([
+                mm1_constants.MM_MODEM_STATE_REGISTERED,
+                mm1_constants.MM_MODEM_STATE_CONNECTED
+        ])
+
         success, reason = self._filterexns(lambda:
                 self.test_env.shill.connect_service_synchronous(
                         service=service,
@@ -119,7 +163,7 @@
         self._disable()
         logging.info('Seed: %d', seed)
         random.seed(seed)
-        for _ in xrange(ops):
+        for _ in range(ops):
             self._op()
 
     def run_once(self, test_env, ops=30, seed=None):
diff --git a/client/site_tests/cellular_SafetyDance/control b/client/site_tests/cellular_SafetyDance/control
index ec98a13..8df2200 100644
--- a/client/site_tests/cellular_SafetyDance/control
+++ b/client/site_tests/cellular_SafetyDance/control
@@ -1,24 +1,25 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Copyright (c) 2022 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 AUTHOR = "ChromeOS Team"
 NAME = "cellular_SafetyDance"
-PURPOSE = "Stress-test all connman 3G operations."
+PURPOSE = "Stress-test all common 4G operations."
 CRITERIA = """
 This test will fail if any DBus call times out or flimflam crashes.
 """
-ATTRIBUTES = "suite:cellular_qual"
+ATTRIBUTES = "suite:cellular_ota_avl"
 TIME = "SHORT"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "network"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
-  Stress-tests all connman 3G operations.
+  Stress-tests all common 4G operations.
 
-  This test runs a long series of 3G operations in pseudorandom order. All of
-  these 3G operations must return a convincing result (EINPROGRESS or no error).
+  This test runs a long series of 4G operations in pseudorandom order. All of
+  these 4G operations must return a convincing result (EINPROGRESS or no error).
 """
 
 from autotest_lib.client.cros.cellular import test_environment
diff --git a/client/site_tests/cellular_SafetyDance/control.amarisoft b/client/site_tests/cellular_SafetyDance/control.amarisoft
new file mode 100644
index 0000000..4eb759d
--- /dev/null
+++ b/client/site_tests/cellular_SafetyDance/control.amarisoft
@@ -0,0 +1,29 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_SafetyDance.amarisoft"
+PURPOSE = "Stress-test all common 3G/4G operations."
+CRITERIA = """
+This test will fail if any DBus call times out or flimflam crashes.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Stress"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:amarisoft"
+PY_VERSION = 3
+
+DOC = """
+  Stress-tests all common 3G/4G operations.
+
+  This test runs a long series of 3G/4G operations in pseudorandom order. All of
+  these 3G/4G operations must return a convincing result (EINPROGRESS or no error).
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularOTATestEnvironment()
+job.run_test('cellular_SafetyDance', test_env=test_env)
diff --git a/client/site_tests/cellular_SafetyDance/control.att b/client/site_tests/cellular_SafetyDance/control.att
index 64f6b24..e0ae77d 100644
--- a/client/site_tests/cellular_SafetyDance/control.att
+++ b/client/site_tests/cellular_SafetyDance/control.att
@@ -14,6 +14,7 @@
 TEST_CLASS = "network"
 TEST_TYPE = "client"
 DEPENDENCIES = "carrier:att"
+PY_VERSION = 3
 
 DOC = """
   Stress-tests all connman 3G operations.
diff --git a/client/site_tests/cellular_SafetyDance/control.docomo b/client/site_tests/cellular_SafetyDance/control.docomo
new file mode 100644
index 0000000..2b64d9f
--- /dev/null
+++ b/client/site_tests/cellular_SafetyDance/control.docomo
@@ -0,0 +1,29 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_SafetyDance.docomo"
+PURPOSE = "Stress-test all connman 3G operations."
+CRITERIA = """
+This test will fail if any DBus call times out or flimflam crashes.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Stress"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:docomo"
+PY_VERSION = 3
+
+DOC = """
+  Stress-tests all connman 3G operations.
+
+  This test runs a long series of 3G operations in pseudorandom order. All of
+  these 3G operations must return a convincing result (EINPROGRESS or no error).
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularOTATestEnvironment()
+job.run_test('cellular_SafetyDance', test_env=test_env)
diff --git a/client/site_tests/cellular_SafetyDance/control.ee b/client/site_tests/cellular_SafetyDance/control.ee
new file mode 100644
index 0000000..aaa7c66
--- /dev/null
+++ b/client/site_tests/cellular_SafetyDance/control.ee
@@ -0,0 +1,29 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_SafetyDance.ee"
+PURPOSE = "Stress-test all connman 3G operations."
+CRITERIA = """
+This test will fail if any DBus call times out or flimflam crashes.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Stress"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:ee"
+PY_VERSION = 3
+
+DOC = """
+  Stress-tests all connman 3G operations.
+
+  This test runs a long series of 3G operations in pseudorandom order. All of
+  these 3G operations must return a convincing result (EINPROGRESS or no error).
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularOTATestEnvironment()
+job.run_test('cellular_SafetyDance', test_env=test_env)
diff --git a/client/site_tests/cellular_SafetyDance/control.kddi b/client/site_tests/cellular_SafetyDance/control.kddi
new file mode 100644
index 0000000..e9a816d
--- /dev/null
+++ b/client/site_tests/cellular_SafetyDance/control.kddi
@@ -0,0 +1,29 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_SafetyDance.kddi"
+PURPOSE = "Stress-test all connman 3G operations."
+CRITERIA = """
+This test will fail if any DBus call times out or flimflam crashes.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Stress"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:kddi"
+PY_VERSION = 3
+
+DOC = """
+  Stress-tests all connman 3G operations.
+
+  This test runs a long series of 3G operations in pseudorandom order. All of
+  these 3G operations must return a convincing result (EINPROGRESS or no error).
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularOTATestEnvironment()
+job.run_test('cellular_SafetyDance', test_env=test_env)
diff --git a/client/site_tests/cellular_SafetyDance/control.pseudomodem b/client/site_tests/cellular_SafetyDance/control.pseudomodem
index 35922f6..3a49999 100644
--- a/client/site_tests/cellular_SafetyDance/control.pseudomodem
+++ b/client/site_tests/cellular_SafetyDance/control.pseudomodem
@@ -8,11 +8,11 @@
 CRITERIA = """
 This test will fail if any DBus call times out or flimflam crashes.
 """
-ATTRIBUTES = "suite:cellular_pseudomodem"
 TIME = "SHORT"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "network"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
   Stress-tests all connman 3G operations.
diff --git a/client/site_tests/cellular_SafetyDance/control.rakuten b/client/site_tests/cellular_SafetyDance/control.rakuten
new file mode 100644
index 0000000..0637243
--- /dev/null
+++ b/client/site_tests/cellular_SafetyDance/control.rakuten
@@ -0,0 +1,29 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_SafetyDance.rakuten"
+PURPOSE = "Stress-test all connman 3G operations."
+CRITERIA = """
+This test will fail if any DBus call times out or flimflam crashes.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Stress"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:rakuten"
+PY_VERSION = 3
+
+DOC = """
+  Stress-tests all connman 3G operations.
+
+  This test runs a long series of 3G operations in pseudorandom order. All of
+  these 3G operations must return a convincing result (EINPROGRESS or no error).
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularOTATestEnvironment()
+job.run_test('cellular_SafetyDance', test_env=test_env)
diff --git a/client/site_tests/cellular_SafetyDance/control.softbank b/client/site_tests/cellular_SafetyDance/control.softbank
new file mode 100644
index 0000000..2f84c9c
--- /dev/null
+++ b/client/site_tests/cellular_SafetyDance/control.softbank
@@ -0,0 +1,29 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_SafetyDance.softbank"
+PURPOSE = "Stress-test all connman 3G operations."
+CRITERIA = """
+This test will fail if any DBus call times out or flimflam crashes.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Stress"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:softbank"
+PY_VERSION = 3
+
+DOC = """
+  Stress-tests all connman 3G operations.
+
+  This test runs a long series of 3G operations in pseudorandom order. All of
+  these 3G operations must return a convincing result (EINPROGRESS or no error).
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularOTATestEnvironment()
+job.run_test('cellular_SafetyDance', test_env=test_env)
diff --git a/client/site_tests/cellular_SafetyDance/control.sprint b/client/site_tests/cellular_SafetyDance/control.sprint
index 1fe94d7..1305235 100644
--- a/client/site_tests/cellular_SafetyDance/control.sprint
+++ b/client/site_tests/cellular_SafetyDance/control.sprint
@@ -14,6 +14,7 @@
 TEST_CLASS = "network"
 TEST_TYPE = "client"
 DEPENDENCIES = "carrier:sprint"
+PY_VERSION = 3
 
 DOC = """
   Stress-tests all connman 3G operations.
diff --git a/client/site_tests/cellular_SafetyDance/control.tmobile b/client/site_tests/cellular_SafetyDance/control.tmobile
index c22c474..74dd05d 100644
--- a/client/site_tests/cellular_SafetyDance/control.tmobile
+++ b/client/site_tests/cellular_SafetyDance/control.tmobile
@@ -14,6 +14,7 @@
 TEST_CLASS = "network"
 TEST_TYPE = "client"
 DEPENDENCIES = "carrier:tmobile"
+PY_VERSION = 3
 
 DOC = """
   Stress-tests all connman 3G operations.
diff --git a/client/site_tests/cellular_SafetyDance/control.verizon b/client/site_tests/cellular_SafetyDance/control.verizon
index 36b5028..8e69679 100644
--- a/client/site_tests/cellular_SafetyDance/control.verizon
+++ b/client/site_tests/cellular_SafetyDance/control.verizon
@@ -14,6 +14,7 @@
 TEST_CLASS = "network"
 TEST_TYPE = "client"
 DEPENDENCIES = "carrier:verizon"
+PY_VERSION = 3
 
 DOC = """
   Stress-tests all connman 3G operations.
diff --git a/client/site_tests/cellular_SafetyDance/control.vodafone b/client/site_tests/cellular_SafetyDance/control.vodafone
new file mode 100644
index 0000000..4bf6065
--- /dev/null
+++ b/client/site_tests/cellular_SafetyDance/control.vodafone
@@ -0,0 +1,29 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_SafetyDance.vodafone"
+PURPOSE = "Stress-test all connman 3G operations."
+CRITERIA = """
+This test will fail if any DBus call times out or flimflam crashes.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Stress"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:vodafone"
+PY_VERSION = 3
+
+DOC = """
+  Stress-tests all connman 3G operations.
+
+  This test runs a long series of 3G operations in pseudorandom order. All of
+  these 3G operations must return a convincing result (EINPROGRESS or no error).
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularOTATestEnvironment()
+job.run_test('cellular_SafetyDance', test_env=test_env)
diff --git a/client/site_tests/cellular_ScanningProperty/cellular_ScanningProperty.py b/client/site_tests/cellular_ScanningProperty/cellular_ScanningProperty.py
deleted file mode 100644
index f5c1eb0..0000000
--- a/client/site_tests/cellular_ScanningProperty/cellular_ScanningProperty.py
+++ /dev/null
@@ -1,359 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import dbus
-import logging
-import os
-import time
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.cellular import mm1_constants
-from autotest_lib.client.cros.cellular.pseudomodem import pm_constants
-from autotest_lib.client.cros.cellular.pseudomodem import pseudomodem_context
-from autotest_lib.client.cros.networking import cellular_proxy
-
-# Used for software message propagation latencies.
-SHORT_TIMEOUT_SECONDS = 2
-STATE_MACHINE_SCAN = 'ScanMachine'
-TEST_MODEMS_MODULE_PATH = os.path.join(os.path.dirname(__file__), 'files',
-                                       'modems.py')
-
-class cellular_ScanningProperty(test.test):
-    """
-    Test that the |Scanning| Property of the shill cellular device object is
-    updated correctly in the following two scenarios:
-      (1) When a user requests a cellular network scan using the |RequestScan|
-          method of the shill Manager interface.
-      (2) During the initial modem enable-register-connect sequence.
-
-    """
-    version = 1
-
-    def _find_mm_modem(self):
-        """
-        Find the modemmanager modem object.
-
-        Assumption: There is only one modem in the system.
-
-        @raises: TestError unless exactly one modem is found.
-
-        """
-        object_manager = dbus.Interface(
-                self._bus.get_object(mm1_constants.I_MODEM_MANAGER,
-                                     mm1_constants.MM1),
-                mm1_constants.I_OBJECT_MANAGER)
-        try:
-            modems = object_manager.GetManagedObjects()
-        except dbus.exceptions.DBusException as e:
-            raise error.TestFail('Failed to list the available modems. '
-                                 'DBus error: |%s|', repr(e))
-        if len(modems) != 1:
-            raise error.TestFail('Expected one modem object, found %d' %
-                                 len(modems))
-
-        modem_path = modems.keys()[0]
-        modem_object = self._bus.get_object(mm1_constants.I_MODEM_MANAGER,
-                                            modem_path)
-        # Check that this object is valid
-        try:
-            modem_object.GetAll(mm1_constants.I_MODEM,
-                                dbus_interface=mm1_constants.I_PROPERTIES)
-        except dbus.exceptions.DBusException as e:
-            raise error.TestFail('Failed to obtain dbus object for the modem '
-                                 'DBus error: |%s|', repr(e))
-
-        return dbus.Interface(modem_object, mm1_constants.I_MODEM)
-
-
-    def _check_mm_state(self, modem, states):
-        """
-        Verify that the modemmanager state is |state|.
-
-        @param modem: A DBus object for the modemmanager modem.
-        @param states: The expected state of the modem. This is either a single
-                state, or a list of states.
-        @raises: TestError if the state differs.
-        """
-        if not isinstance(states, list):
-            states = [states]
-        properties = modem.GetAll(mm1_constants.I_MODEM,
-                                  dbus_interface=mm1_constants.I_PROPERTIES)
-        actual_state = properties[mm1_constants.MM_MODEM_PROPERTY_NAME_STATE]
-        if actual_state not in states:
-            state_names = [mm1_constants.ModemStateToString(x) for x in states]
-            raise error.TestFail(
-                    'Expected modemmanager modem state to be one of %s but '
-                    'found %s' %
-                    (state_names,
-                     mm1_constants.ModemStateToString(actual_state)))
-
-
-    def _check_shill_property_update(self, cellular_device, property_name,
-                                     old_state, new_state):
-        """
-        Check the value of property of shill.
-
-        @param cellular_device: The DBus proxy object for the cellular device.
-        @param property_name: Name of the property to check.
-        @param old_state: old value of property.
-        @param new_state: new expected value of property.
-        @raises: TestError if the property fails to enter the given state.
-
-        """
-        # If we don't expect a change in the value, there is a race between this
-        # check and a possible (erronous) update of the value. Allow some time
-        # for the property to be updated before checking.
-        if old_state == new_state:
-            time.sleep(SHORT_TIMEOUT_SECONDS)
-            polling_timeout = 0
-        else:
-            polling_timeout = SHORT_TIMEOUT_SECONDS
-        success, _, _ = self._cellular_proxy.wait_for_property_in(
-                cellular_device,
-                property_name,
-                (new_state,),
-                timeout_seconds=polling_timeout)
-        if not success:
-            raise error.TestFail('Shill failed to set |%s| to %s.' %
-                                 (property_name, str(new_state)))
-
-
-    def _itesting_machine(self, machine_name, timeout=SHORT_TIMEOUT_SECONDS):
-        """
-        Get the testing interface of the given interactive state machine.
-
-        @param machine_name: The name of the interactive state machine.
-        @return dbus.Interface for the testing interface of
-                InteractiveScanningMachine, if found. None otherwise.
-        @raises utils.TimeoutError if a valid dbus object can't be found.
-
-        """
-        def _get_machine():
-            machine = self._bus.get_object(
-                    mm1_constants.I_MODEM_MANAGER,
-                    '/'.join([pm_constants.TESTING_PATH, machine_name]))
-            if machine:
-                i_machine = dbus.Interface(machine, pm_constants.I_TESTING_ISM)
-                # Only way to know if this DBus object is valid is to call a
-                # method on it.
-                try:
-                    i_machine.IsWaiting()  # Ignore result.
-                    return i_machine
-                except dbus.exceptions.DBusException as e:
-                    logging.debug(e)
-                    return None
-
-        utils.poll_for_condition(_get_machine, timeout=timeout)
-        return _get_machine()
-
-
-    def test_user_initiated_cellular_scan(self):
-        """
-        Test that the |RequestScan| DBus method exported by shill Manager
-        interfac correctly updates the cellular object |Scanning| property while
-        the scan is in progress.
-        """
-        with pseudomodem_context.PseudoModemManagerContext(
-                True,
-                {'test-module' : TEST_MODEMS_MODULE_PATH,
-                 'test-modem-class' : 'AsyncScanModem'}):
-            self._cellular_proxy = cellular_proxy.CellularProxy.get_proxy()
-            self._bus = dbus.SystemBus()
-            self._cellular_proxy.set_logging_for_cellular_test()
-
-            logging.info('Sanity check initial values')
-            utils.poll_for_condition(
-                    self._cellular_proxy.find_cellular_device_object,
-                    exception=error.TestFail(
-                            'Bad initial state: Failed to obtain a cellular '
-                            'device in pseudomodem context.'),
-                    timeout=SHORT_TIMEOUT_SECONDS)
-            device = self._cellular_proxy.find_cellular_device_object()
-            try:
-                self._itesting_machine(STATE_MACHINE_SCAN, 0)
-                raise error.TestFail('Bad initial state: scan machine created '
-                                     'by pseudomodem before scan is proposed.')
-            except utils.TimeoutError:
-                pass
-
-            self._check_shill_property_update(
-                    device,
-                    self._cellular_proxy.DEVICE_PROPERTY_SCANNING,
-                    False,
-                    False)
-
-            logging.info('Test actions and checks')
-            self._cellular_proxy.manager.RequestScan(
-                    self._cellular_proxy.TECHNOLOGY_CELLULAR)
-            try:
-                itesting_scan_machine = self._itesting_machine(
-                        STATE_MACHINE_SCAN)
-            except utils.TimeoutError:
-                raise error.TestFail('Pseudomodem failed to launch %s' %
-                                     STATE_MACHINE_SCAN)
-            utils.poll_for_condition(
-                    itesting_scan_machine.IsWaiting,
-                    exception=error.TestFail('Scan machine failed to enter '
-                                             'scan state'),
-                    timeout=SHORT_TIMEOUT_SECONDS)
-            self._check_shill_property_update(
-                    device,
-                    self._cellular_proxy.DEVICE_PROPERTY_SCANNING,
-                    False,
-                    True)
-
-            itesting_scan_machine.Advance()
-            utils.poll_for_condition(
-                    lambda: not itesting_scan_machine.IsWaiting(),
-                    exception=error.TestFail('Scan machine failed to exit '
-                                             'scan state'),
-                    timeout=SHORT_TIMEOUT_SECONDS)
-            self._check_shill_property_update(
-                    device,
-                    self._cellular_proxy.DEVICE_PROPERTY_SCANNING,
-                    True,
-                    False)
-
-
-    def test_activated_service_states(self):
-        """
-        Test that shill |Scanning| property is updated correctly when an
-        activated 3GPP service connects.
-        """
-        with pseudomodem_context.PseudoModemManagerContext(
-                True,
-                {'test-module' : TEST_MODEMS_MODULE_PATH,
-                 'test-state-machine-factory-class' :
-                        'InteractiveStateMachineFactory'}):
-            self._cellular_proxy = cellular_proxy.CellularProxy.get_proxy()
-            self._bus = dbus.SystemBus()
-            self._cellular_proxy.set_logging_for_cellular_test()
-
-            logging.info('Sanity check initial values')
-            enable_machine = self._itesting_machine(
-                    pm_constants.STATE_MACHINE_ENABLE)
-            utils.poll_for_condition(
-                    enable_machine.IsWaiting,
-                    exception=error.TestFail(
-                            'Bad initial state: Pseudomodem did not launch '
-                            'Enable machine'),
-                    timeout=SHORT_TIMEOUT_SECONDS)
-            utils.poll_for_condition(
-                    self._cellular_proxy.find_cellular_device_object,
-                    exception=error.TestFail(
-                            'Bad initial state: Failed to obtain a cellular '
-                            'device in pseudomodem context.'),
-                    timeout=SHORT_TIMEOUT_SECONDS)
-            device = self._cellular_proxy.find_cellular_device_object()
-            mm_modem = self._find_mm_modem()
-
-            logging.info('Test Connect sequence')
-            self._check_mm_state(mm_modem,
-                                 mm1_constants.MM_MODEM_STATE_DISABLED)
-            self._check_shill_property_update(
-                    device,
-                    self._cellular_proxy.DEVICE_PROPERTY_POWERED,
-                    False,
-                    False)
-            self._check_shill_property_update(
-                    device,
-                    self._cellular_proxy.DEVICE_PROPERTY_SCANNING,
-                    False,
-                    False)
-            logging.info('Expectation met: |Scanning| is False in MM state '
-                         'Disabled')
-            enable_machine.Advance()
-
-            # MM state: Enabling
-            utils.poll_for_condition(
-                    enable_machine.IsWaiting,
-                    exception=error.TestFail('EnableMachine failed to wait in '
-                                             'Enabling state'),
-                    timeout=SHORT_TIMEOUT_SECONDS)
-            self._check_mm_state(mm_modem,
-                                 mm1_constants.MM_MODEM_STATE_ENABLING)
-            self._check_shill_property_update(
-                    device,
-                    self._cellular_proxy.DEVICE_PROPERTY_SCANNING,
-                    False,
-                    True)
-            logging.info('Expectation met: |Scanning| is True in MM state '
-                         'Enabling')
-            enable_machine.Advance()
-
-            # MM state: Enabled
-            utils.poll_for_condition(
-                    enable_machine.IsWaiting,
-                    exception=error.TestFail('EnableMachine failed to wait in '
-                                             'Enabled state'),
-                    timeout=SHORT_TIMEOUT_SECONDS)
-            # Finish the enable call.
-            enable_machine.Advance()
-
-            self._check_mm_state(mm_modem, mm1_constants.MM_MODEM_STATE_ENABLED)
-            self._check_shill_property_update(
-                    device,
-                    self._cellular_proxy.DEVICE_PROPERTY_POWERED,
-                    False,
-                    True)
-            self._check_shill_property_update(
-                    device,
-                    self._cellular_proxy.DEVICE_PROPERTY_SCANNING,
-                    True,
-                    True)
-
-            register_machine = self._itesting_machine(
-                    pm_constants.STATE_MACHINE_REGISTER)
-            utils.poll_for_condition(
-                    register_machine.IsWaiting,
-                    exception=error.TestFail('SearchingMachine failed to wait '
-                                             'in Enabled state'),
-                    timeout=SHORT_TIMEOUT_SECONDS)
-            logging.info('Expectation met: |Scanning| is True in MM state '
-                         'Enabled')
-            register_machine.Advance()
-
-            # MM state: Searching
-            utils.poll_for_condition(
-                    register_machine.IsWaiting,
-                    exception=error.TestFail('SearchingMachine failed to wait '
-                                             'in Searching state'),
-                    timeout=SHORT_TIMEOUT_SECONDS)
-            self._check_mm_state(mm_modem,
-                                 mm1_constants.MM_MODEM_STATE_SEARCHING)
-            enable_machine.Advance()
-            self._check_shill_property_update(
-                    device,
-                    self._cellular_proxy.DEVICE_PROPERTY_SCANNING,
-                    True,
-                    True)
-            logging.info('Expectation met: |Scanning| is True in MM state '
-                         'Searching')
-            register_machine.Advance()
-
-            # MM state: >= Registered
-            utils.poll_for_condition(
-                    self._cellular_proxy.find_cellular_service_object,
-                    error.TestFail('Failed to create Cellular Service for a '
-                                   'registered modem'),
-                    timeout=SHORT_TIMEOUT_SECONDS)
-            self._check_mm_state(mm_modem,
-                                 [mm1_constants.MM_MODEM_STATE_REGISTERED,
-                                  mm1_constants.MM_MODEM_STATE_CONNECTING,
-                                  mm1_constants.MM_MODEM_STATE_CONNECTED])
-            self._check_shill_property_update(
-                    device,
-                    self._cellular_proxy.DEVICE_PROPERTY_SCANNING,
-                    True,
-                    False)
-            logging.info('Expectation met: |Scanning| is False in MM state '
-                         'Registered')
-
-
-    def run_once(self):
-        """ Autotest entry function """
-        self.test_user_initiated_cellular_scan()
-        self.test_activated_service_states()
diff --git a/client/site_tests/cellular_ScanningProperty/control.pseudomodem b/client/site_tests/cellular_ScanningProperty/control.pseudomodem
deleted file mode 100644
index 2d2c51a..0000000
--- a/client/site_tests/cellular_ScanningProperty/control.pseudomodem
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "cellular_ScanningProperty.pseudomodem"
-PURPOSE = "Verify cellular device 'Scanning' property is updated correctly."
-CRITERIA = """
-This test will fail if shill does not update the |Scanning| property correctly
-during the initial modem enable-register-connect sequence, or when the user
-requests a network scan using |RequestScan|.
-"""
-ATTRIBUTES = "suite:cellular_pseudomodem"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-DOC = """
-Test that the |Scanning| Property of the shill cellular device object is
-updated correctly in the following two scenarios:
-  (1) When a user requests a network scan using the |RequestScan| method of
-      shill Manager interface.
-  (2) During the initial modem enable-register-connect sequence.
-"""
-
-job.run_test('cellular_ScanningProperty')
diff --git a/client/site_tests/cellular_ScanningProperty/files/__init__.py b/client/site_tests/cellular_ScanningProperty/files/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/client/site_tests/cellular_ScanningProperty/files/__init__.py
+++ /dev/null
diff --git a/client/site_tests/cellular_ScanningProperty/files/common.py b/client/site_tests/cellular_ScanningProperty/files/common.py
deleted file mode 100644
index fcba34e..0000000
--- a/client/site_tests/cellular_ScanningProperty/files/common.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# setup the environment so that autotest_lib can be imported when this file is
-# run as an executable
-
-import os, sys
-
-dirname = os.path.dirname(sys.modules[__name__].__file__)
-client_dir = os.path.abspath(os.path.join(dirname, "..", "..", ".."))
-sys.path.insert(0, client_dir)
-
-import setup_modules
-
-sys.path.pop(0)
-setup_modules.setup(base_path=client_dir,
-                    root_module_name="autotest_lib.client")
diff --git a/client/site_tests/cellular_ScanningProperty/files/modems.py b/client/site_tests/cellular_ScanningProperty/files/modems.py
deleted file mode 100644
index bd21a31..0000000
--- a/client/site_tests/cellular_ScanningProperty/files/modems.py
+++ /dev/null
@@ -1,98 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-import common
-from autotest_lib.client.cros.cellular.pseudomodem import modem_3gpp
-from autotest_lib.client.cros.cellular.pseudomodem import pm_constants
-from autotest_lib.client.cros.cellular.pseudomodem import state_machine
-from autotest_lib.client.cros.cellular.pseudomodem import state_machine_factory
-
-class InteractiveStateMachineFactory(state_machine_factory.StateMachineFactory):
-    """ Run relevant state machines in interactive mode. """
-    def __init__(self):
-        super(InteractiveStateMachineFactory, self).__init__()
-        self.SetInteractive(pm_constants.STATE_MACHINE_ENABLE)
-        self.SetInteractive(pm_constants.STATE_MACHINE_REGISTER)
-
-
-class ScanMachine(state_machine.StateMachine):
-    """
-    Handle shill initiated 3GPP scan request.
-
-    A simple machine that allows the test to hook into the Scan asynchronous
-    call.
-
-    """
-    # State machine states.
-    SCAN_STATE = 'Scan'
-    DONE_STATE = 'Done'
-
-    def __init__(self, modem):
-        super(ScanMachine, self).__init__(modem)
-        self._state = ScanMachine.SCAN_STATE
-
-
-    def _HandleScanState(self):
-        """ The only real state in this machine. """
-        self._modem.DoScan()
-        self._state = ScanMachine.DONE_STATE
-        return True
-
-
-    def _GetCurrentState(self):
-        return self._state
-
-
-    def _GetModemStateFunctionMap(self):
-        return {
-                ScanMachine.SCAN_STATE: ScanMachine._HandleScanState,
-                # ScanMachine.DONE_STATE is the final state. So, no handler.
-        }
-
-
-    def _ShouldStartStateMachine(self):
-        return True
-
-
-class ScanStateMachineFactory(state_machine_factory.StateMachineFactory):
-    """ Extend StateMachineFactory to create an interactive ScanMachine. """
-    def ScanMachine(self, *args, **kwargs):
-        """ Create a ScanMachine when needed in the modem. """
-        machine = ScanMachine(*args, **kwargs)
-        machine.EnterInteractiveMode(self._bus)
-        return machine
-
-
-class AsyncScanModem(modem_3gpp.Modem3gpp):
-    """ 3GPP modem that uses ScanMachine for the Scan call. """
-    def __init__(self):
-        super(AsyncScanModem, self).__init__(
-                state_machine_factory=ScanStateMachineFactory())
-
-
-    def Scan(self, return_cb, raise_cb):
-        """ Overriden from Modem3gpp. """
-        # Stash away the scan_ok callback for when the Scan finishes.
-        logging.debug('Network scan initiated.')
-        self._scan_ok_callback = return_cb
-        self._scan_failed_callback = raise_cb
-        self._scan_machine = self._state_machine_factory.ScanMachine(self)
-        self._scan_machine.Start()
-
-
-    def DoScan(self):
-        """ Defer to Modem3gpp to take the original |SyncScan| action. """
-        # We're done scanning, drop |_scan_machine| reference.
-        self._scan_machine = None
-        try:
-            scan_result = super(AsyncScanModem, self).SyncScan()
-        except dbus.exceptions.DBusException as e:
-            logging.warning('Network scan failed')
-            self._scan_failed_callback(e)
-            return
-
-        logging.debug('Network scan completed.')
-        self._scan_ok_callback(scan_result)
diff --git a/client/site_tests/cellular_ServiceName/cellular_ServiceName.py b/client/site_tests/cellular_ServiceName/cellular_ServiceName.py
deleted file mode 100644
index bc4efed..0000000
--- a/client/site_tests/cellular_ServiceName/cellular_ServiceName.py
+++ /dev/null
@@ -1,155 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.cellular import mm1_constants
-from autotest_lib.client.cros.cellular import test_environment
-from autotest_lib.client.cros.cellular.pseudomodem import modem_3gpp
-from autotest_lib.client.cros.cellular.pseudomodem import modem_cdma
-from autotest_lib.client.cros.cellular.pseudomodem import sim
-
-TEST_MODEMS_MODULE_PATH = __file__
-
-# Use valid carrier info since shill looks this up in its database.
-TEST_3GPP_HOME_CARRIER = 'Orange'
-TEST_3GPP_HOME_CARRIER_MCC = '232'
-TEST_3GPP_HOME_CARRIER_MNC = '05'
-TEST_3GPP_ROAMING_CARRIER = 'T-Mobile'
-TEST_3GPP_ROAMING_OPERATOR_CODE = '23203'
-TEST_CDMA_CARRIER = 'Test Network'
-TEST_CDMA_SID = 99998
-
-class TestModemRoaming(modem_3gpp.Modem3gpp):
-    """
-    Test modem that registers with a roaming network.
-
-    """
-    def __init__(self):
-        roaming_networks = [modem_3gpp.Modem3gpp.GsmNetwork(
-                operator_long=TEST_3GPP_ROAMING_CARRIER,
-                operator_short=TEST_3GPP_ROAMING_CARRIER,
-                operator_code=TEST_3GPP_ROAMING_OPERATOR_CODE,
-                status=mm1_constants.
-                        MM_MODEM_3GPP_NETWORK_AVAILABILITY_AVAILABLE,
-                access_technology=mm1_constants.MM_MODEM_ACCESS_TECHNOLOGY_LTE)]
-        modem_3gpp.Modem3gpp.__init__(self, roaming_networks=roaming_networks)
-
-
-    def RegisterWithNetwork(
-        self, operator_id='', return_cb=None, raise_cb=None):
-        """ Overriden from superclass. """
-        logging.info('Force modem to register with roaming network |%s| '
-                     'instead of |%s|',
-                     TEST_3GPP_ROAMING_OPERATOR_CODE, operator_id)
-        modem_3gpp.Modem3gpp.RegisterWithNetwork(
-                self, TEST_3GPP_ROAMING_OPERATOR_CODE, return_cb, raise_cb)
-
-
-class TestSIM(sim.SIM):
-    """
-    Test SIM with a specific carrier name that the tests below are expecting.
-
-    """
-    def __init__(self):
-        carrier = sim.SIM.Carrier()
-        carrier.mcc = TEST_3GPP_HOME_CARRIER_MCC
-        carrier.mnc = TEST_3GPP_HOME_CARRIER_MNC
-        carrier.operator_name = TEST_3GPP_HOME_CARRIER
-        carrier.operator_id = carrier.mcc + carrier.mnc
-        sim.SIM.__init__(self, carrier,
-                         mm1_constants.MM_MODEM_ACCESS_TECHNOLOGY_LTE)
-
-
-class TestCdmaModem(modem_cdma.ModemCdma):
-    """
-    Test modem that simulates a CDMA modem.
-
-    """
-    def __init__(self):
-        network = modem_cdma.ModemCdma.CdmaNetwork(sid=TEST_CDMA_SID)
-        modem_cdma.ModemCdma.__init__(self, home_network=network)
-
-
-class cellular_ServiceName(test.test):
-    """
-    Verifies that shill reports the correct service name depending on the SIM
-    provider information and the network registration status.
-
-    """
-    version = 1
-
-    def _verify_service_name(self, expected_name):
-        """
-        Verifies the service name is as expected.
-
-        @param expected_name: Service name that is expected.
-        @raises error.TestFail() if the service name and expected name does not
-                match.
-
-        """
-        cellular_service = \
-                self.test_env.shill.wait_for_cellular_service_object()
-        service_name = cellular_service.GetProperties()['Name']
-        if service_name != expected_name:
-            raise error.TestFail('Expected service name: |%s|, '
-                                 'actual service name: |%s|' %
-                                 (expected_name, service_name))
-        logging.info('Successfully verified service name |%s|',
-                     expected_name)
-
-
-    def _test_3gpp_no_roaming(self):
-        """
-        Checks the service name when the SIM and the network is the same
-        carrier.
-
-        """
-        logging.info('Testing service name for 3GPP no roaming')
-        self.test_env = test_environment.CellularPseudoMMTestEnvironment(
-                pseudomm_args=({'family': '3GPP',
-                                'test-module': TEST_MODEMS_MODULE_PATH,
-                                'test-sim-class': 'TestSIM'},))
-        with self.test_env:
-            self._verify_service_name(TEST_3GPP_HOME_CARRIER)
-
-
-    def _test_3gpp_roaming(self):
-        """
-        Checks the service name when roaming.
-
-        The service name while roaming should be (per 3GPP TS 31.102 and
-        annex A of 122.101):
-                <home provider> | <serving operator>
-
-        """
-        logging.info('Testing service name for 3GPP roaming')
-        self.test_env = test_environment.CellularPseudoMMTestEnvironment(
-                pseudomm_args=({'family': '3GPP',
-                                'test-module': TEST_MODEMS_MODULE_PATH,
-                                'test-modem-class': 'TestModemRoaming',
-                                'test-sim-class': 'TestSIM'},))
-        with self.test_env:
-            expected_name = (TEST_3GPP_HOME_CARRIER + ' | ' +
-                             TEST_3GPP_ROAMING_CARRIER)
-            self._verify_service_name(expected_name)
-
-
-    def _test_cdma(self):
-        """ Checks the service name for a CDMA network. """
-        logging.info('Testing service name for CDMA')
-        self.test_env = test_environment.CellularPseudoMMTestEnvironment(
-                pseudomm_args=({'family': 'CDMA',
-                                'test-module': TEST_MODEMS_MODULE_PATH,
-                                'test-modem-class': 'TestCdmaModem'},))
-        with self.test_env:
-            self._verify_service_name(TEST_CDMA_CARRIER)
-
-
-    def run_once(self):
-        self._test_3gpp_no_roaming()
-        self._test_3gpp_roaming()
-        self._test_cdma()
diff --git a/client/site_tests/cellular_ServiceName/control.pseudomodem b/client/site_tests/cellular_ServiceName/control.pseudomodem
deleted file mode 100644
index 8cbdcf7..0000000
--- a/client/site_tests/cellular_ServiceName/control.pseudomodem
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "cellular_ServiceName.pseudomodem"
-PURPOSE = "Verify the cellular service name is correctly populated"
-CRITERIA = """
-This test will fail if one of the following conditions occur:
-  - Shill reports an incorrect service name
-"""
-ATTRIBUTES = "suite:cellular_pseudomodem"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-
-DOC = """
-  Tests that shill reports the correct service name depending on the SIM
-  home provider name and current network registration.
-
-  The service name is what the UI displays to the user.
-"""
-
-job.run_test('cellular_ServiceName')
diff --git a/client/site_tests/cellular_Smoke/cellular_Smoke.py b/client/site_tests/cellular_Smoke/cellular_Smoke.py
index bb2b275..9e78d41 100644
--- a/client/site_tests/cellular_Smoke/cellular_Smoke.py
+++ b/client/site_tests/cellular_Smoke/cellular_Smoke.py
@@ -1,11 +1,19 @@
+# Lint as: python2, python3
 # Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import logging
 import socket
 import time
-import urlparse
+
+from six.moves import range
+
+import six.moves.urllib.parse
 
 from autotest_lib.client.bin import test
 from autotest_lib.client.common_lib import error
@@ -18,6 +26,10 @@
 CONNECT_TIMEOUT = 120
 DISCONNECT_TIMEOUT = 60
 
+PORTAL_URL_PATTERN = ('https://quickaccess.verizonwireless.com/'
+                      'images_b2c/shared/nav/vz_logo_quickaccess.jpg?foo=%d')
+
+
 class cellular_Smoke(test.test):
     """
     Tests that 3G modem can connect to the network
@@ -32,13 +44,10 @@
 
 
     def run_once_internal(self):
-        """
-        Executes the test.
-
-        """
+        """Executes the test."""
         old_modem_info = self.test_env.modem.GetModemProperties()
 
-        for _ in xrange(self.connect_count):
+        for i in range(self.connect_count):
             device = self.test_env.shill.find_cellular_device_object()
             if not device:
                 raise error.TestError('No cellular device found.')
@@ -56,27 +65,29 @@
             logging.info('Service state = %s', state)
 
             if state == 'portal':
-                url_pattern = ('https://quickaccess.verizonwireless.com/'
-                               'images_b2c/shared/nav/'
-                               'vz_logo_quickaccess.jpg?foo=%d')
+                url_pattern = PORTAL_URL_PATTERN
                 bytes_to_fetch = 4476
-            else:
+            elif state == 'online':
                 url_pattern = network.FETCH_URL_PATTERN_FOR_TEST
                 bytes_to_fetch = 64 * 1024
+            else:
+                raise error.TestError('Cellular state not online: %s' % state)
 
             interface = self.test_env.shill.get_dbus_property(
                     device, shill_proxy.ShillProxy.DEVICE_PROPERTY_INTERFACE)
             logging.info('Expected interface for %s: %s',
                          service.object_path, interface)
-            # TODO(b/114292737): Once IPv6 support is enabled on
-            # cellular, we should not need to limit this check to just
-            # AF_INET.
-            network.CheckInterfaceForDestination(
-                urlparse.urlparse(url_pattern).hostname,
-                interface, socket.AF_INET)
+            network.CheckThatInterfaceCanAccessDestination(
+                    six.moves.urllib.parse.urlparse(url_pattern).hostname, interface,
+                    [socket.AF_INET, socket.AF_INET6])
 
-            fetch_time = network.FetchUrl(url_pattern, bytes_to_fetch,
-                                          self.fetch_timeout)
+            try:
+                fetch_time = network.FetchUrl(url_pattern, bytes_to_fetch,
+                                              self.fetch_timeout)
+            except:
+                raise error.TestError('FetchUrl timed out after %d' %
+                                      self.fetch_timeout)
+
             self.write_perf_keyval({
                 'seconds_3G_fetch_time': fetch_time,
                 'bytes_3G_bytes_received': bytes_to_fetch,
@@ -99,8 +110,12 @@
                 time.sleep(self.sleep_kludge)
 
 
-    def run_once(self, test_env, connect_count=5, sleep_kludge=5,
-                 fetch_timeout=120):
+    def run_once(self,
+                 test_env,
+                 connect_count=5,
+                 sleep_kludge=5,
+                 fetch_timeout=30):
+        """ Runs the test once """
         with test_env, shill_context.ServiceAutoConnectContext(
                 test_env.shill.wait_for_cellular_service_object, False):
             self.test_env = test_env
diff --git a/client/site_tests/cellular_Smoke/control b/client/site_tests/cellular_Smoke/control
index 19e014c..caa0aff 100644
--- a/client/site_tests/cellular_Smoke/control
+++ b/client/site_tests/cellular_Smoke/control
@@ -1,25 +1,27 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Copyright (c) 2022 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 AUTHOR = "ChromeOS Team"
 NAME = "cellular_Smoke"
-PURPOSE = "Verify 3G modem can connect to the network."
+PURPOSE = "Verify 4G modem can connect to the network."
 CRITERIA = """
 This test will fail if one of the following conditions occur:
-  - 3G modem fails to connect to network
+  - 4G modem fails to connect to network
   - the modem is not left in a working state
 """
-ATTRIBUTES = "suite:cellular_qual"
+ATTRIBUTES = "suite:cellular_ota_avl"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "network"
 TEST_TYPE = "client"
-DOC = """
-  Tests that 3G modem can connect to the network
+PY_VERSION = 3
 
-  The test attempts to connect using the 3G network.  It assumes that
-  a 3G modem is plugged in and has a signal.  The test then
+DOC = """
+  Tests that 4G modem can connect to the network
+
+  The test attempts to connect using the 4G network.  It assumes that
+  a 4G modem is plugged in and has a signal.  The test then
   disconnects from the network, and verifies that the modem still
   responds to modem manager DBUS API calls.  It repeats the
   connect/disconnect sequence several times.
diff --git a/client/site_tests/cellular_Smoke/control.amarisoft b/client/site_tests/cellular_Smoke/control.amarisoft
new file mode 100644
index 0000000..2f0a43b
--- /dev/null
+++ b/client/site_tests/cellular_Smoke/control.amarisoft
@@ -0,0 +1,38 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_Smoke.amarisoft"
+PURPOSE = "Verify 3G/4G modem can connect to the network."
+CRITERIA = """
+This test will fail if one of the following conditions occur:
+  - 3G modem fails to connect to network
+  - the modem is not left in a working state
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:amarisoft"
+PY_VERSION = 3
+
+# TODO(crbug.com/932661) Fix the root cause of the flakiness and
+# remove this hack.
+JOB_RETRIES=3
+
+DOC = """
+  Tests that 3G/4G modem can connect to the network
+
+  The test attempts to connect using the 3G/4G network.  It assumes that
+  a cellular modem is plugged in and has a signal.  The test then
+  disconnects from the network, and verifies that the modem still
+  responds to modem manager DBUS API calls.  It repeats the
+  connect/disconnect sequence several times.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularOTATestEnvironment()
+job.run_test('cellular_Smoke', test_env=test_env)
diff --git a/client/site_tests/cellular_Smoke/control.att b/client/site_tests/cellular_Smoke/control.att
index a4bf712..fe7b51d 100644
--- a/client/site_tests/cellular_Smoke/control.att
+++ b/client/site_tests/cellular_Smoke/control.att
@@ -16,6 +16,7 @@
 TEST_CLASS = "network"
 TEST_TYPE = "client"
 DEPENDENCIES = "carrier:att"
+PY_VERSION = 3
 
 # TODO(crbug.com/932661) Fix the root cause of the flakiness and
 # remove this hack.
diff --git a/client/site_tests/cellular_Smoke/control.docomo b/client/site_tests/cellular_Smoke/control.docomo
new file mode 100644
index 0000000..f2c89b6
--- /dev/null
+++ b/client/site_tests/cellular_Smoke/control.docomo
@@ -0,0 +1,38 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_Smoke.docomo"
+PURPOSE = "Verify 3G modem can connect to the network."
+CRITERIA = """
+This test will fail if one of the following conditions occur:
+  - 3G modem fails to connect to network
+  - the modem is not left in a working state
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:docomo"
+PY_VERSION = 3
+
+# TODO(crbug.com/932661) Fix the root cause of the flakiness and
+# remove this hack.
+JOB_RETRIES=3
+
+DOC = """
+  Tests that 3G modem can connect to the network
+
+  The test attempts to connect using the 3G network.  It assumes that
+  a 3G modem is plugged in and has a signal.  The test then
+  disconnects from the network, and verifies that the modem still
+  responds to modem manager DBUS API calls.  It repeats the
+  connect/disconnect sequence several times.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularOTATestEnvironment()
+job.run_test('cellular_Smoke', test_env=test_env)
diff --git a/client/site_tests/cellular_Smoke/control.ee b/client/site_tests/cellular_Smoke/control.ee
new file mode 100644
index 0000000..73a7d59
--- /dev/null
+++ b/client/site_tests/cellular_Smoke/control.ee
@@ -0,0 +1,38 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_Smoke.ee"
+PURPOSE = "Verify 3G modem can connect to the network."
+CRITERIA = """
+This test will fail if one of the following conditions occur:
+  - 3G modem fails to connect to network
+  - the modem is not left in a working state
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:ee"
+PY_VERSION = 3
+
+# TODO(crbug.com/932661) Fix the root cause of the flakiness and
+# remove this hack.
+JOB_RETRIES=3
+
+DOC = """
+  Tests that 3G modem can connect to the network
+
+  The test attempts to connect using the 3G network.  It assumes that
+  a 3G modem is plugged in and has a signal.  The test then
+  disconnects from the network, and verifies that the modem still
+  responds to modem manager DBUS API calls.  It repeats the
+  connect/disconnect sequence several times.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularOTATestEnvironment()
+job.run_test('cellular_Smoke', test_env=test_env)
diff --git a/client/site_tests/cellular_Smoke/control.kddi b/client/site_tests/cellular_Smoke/control.kddi
new file mode 100644
index 0000000..31bd086
--- /dev/null
+++ b/client/site_tests/cellular_Smoke/control.kddi
@@ -0,0 +1,38 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_Smoke.kddi"
+PURPOSE = "Verify 3G modem can connect to the network."
+CRITERIA = """
+This test will fail if one of the following conditions occur:
+  - 3G modem fails to connect to network
+  - the modem is not left in a working state
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:kddi"
+PY_VERSION = 3
+
+# TODO(crbug.com/932661) Fix the root cause of the flakiness and
+# remove this hack.
+JOB_RETRIES=3
+
+DOC = """
+  Tests that 3G modem can connect to the network
+
+  The test attempts to connect using the 3G network.  It assumes that
+  a 3G modem is plugged in and has a signal.  The test then
+  disconnects from the network, and verifies that the modem still
+  responds to modem manager DBUS API calls.  It repeats the
+  connect/disconnect sequence several times.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularOTATestEnvironment()
+job.run_test('cellular_Smoke', test_env=test_env)
diff --git a/client/site_tests/cellular_Smoke/control.pseudomodem b/client/site_tests/cellular_Smoke/control.pseudomodem
index 7889c24..caa7f33 100644
--- a/client/site_tests/cellular_Smoke/control.pseudomodem
+++ b/client/site_tests/cellular_Smoke/control.pseudomodem
@@ -10,11 +10,11 @@
   - 3G modem fails to connect to network
   - the modem is not left in a working state
 """
-ATTRIBUTES = "suite:cellular_pseudomodem"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "network"
 TEST_TYPE = "client"
+PY_VERSION = 3
 DOC = """
   Tests that 3G modem can connect to the network
 
diff --git a/client/site_tests/cellular_Smoke/control.rakuten b/client/site_tests/cellular_Smoke/control.rakuten
new file mode 100644
index 0000000..dc8691e
--- /dev/null
+++ b/client/site_tests/cellular_Smoke/control.rakuten
@@ -0,0 +1,38 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_Smoke.rakuten"
+PURPOSE = "Verify 3G modem can connect to the network."
+CRITERIA = """
+This test will fail if one of the following conditions occur:
+  - 3G modem fails to connect to network
+  - the modem is not left in a working state
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:rakuten"
+PY_VERSION = 3
+
+# TODO(crbug.com/932661) Fix the root cause of the flakiness and
+# remove this hack.
+JOB_RETRIES=3
+
+DOC = """
+  Tests that 3G modem can connect to the network
+
+  The test attempts to connect using the 3G network.  It assumes that
+  a 3G modem is plugged in and has a signal.  The test then
+  disconnects from the network, and verifies that the modem still
+  responds to modem manager DBUS API calls.  It repeats the
+  connect/disconnect sequence several times.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularOTATestEnvironment()
+job.run_test('cellular_Smoke', test_env=test_env)
diff --git a/client/site_tests/cellular_Smoke/control.softbank b/client/site_tests/cellular_Smoke/control.softbank
new file mode 100644
index 0000000..4fff560
--- /dev/null
+++ b/client/site_tests/cellular_Smoke/control.softbank
@@ -0,0 +1,38 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_Smoke.softbank"
+PURPOSE = "Verify 3G modem can connect to the network."
+CRITERIA = """
+This test will fail if one of the following conditions occur:
+  - 3G modem fails to connect to network
+  - the modem is not left in a working state
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:softbank"
+PY_VERSION = 3
+
+# TODO(crbug.com/932661) Fix the root cause of the flakiness and
+# remove this hack.
+JOB_RETRIES=3
+
+DOC = """
+  Tests that 3G modem can connect to the network
+
+  The test attempts to connect using the 3G network.  It assumes that
+  a 3G modem is plugged in and has a signal.  The test then
+  disconnects from the network, and verifies that the modem still
+  responds to modem manager DBUS API calls.  It repeats the
+  connect/disconnect sequence several times.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularOTATestEnvironment()
+job.run_test('cellular_Smoke', test_env=test_env)
diff --git a/client/site_tests/cellular_Smoke/control.sprint b/client/site_tests/cellular_Smoke/control.sprint
index 4ca0b3e..5ed6be7 100644
--- a/client/site_tests/cellular_Smoke/control.sprint
+++ b/client/site_tests/cellular_Smoke/control.sprint
@@ -16,6 +16,7 @@
 TEST_CLASS = "network"
 TEST_TYPE = "client"
 DEPENDENCIES = "carrier:sprint"
+PY_VERSION = 3
 
 # TODO(crbug.com/932661) Fix the root cause of the flakiness and
 # remove this hack.
diff --git a/client/site_tests/cellular_Smoke/control.tmobile b/client/site_tests/cellular_Smoke/control.tmobile
index 805fd6d..f0e2603 100644
--- a/client/site_tests/cellular_Smoke/control.tmobile
+++ b/client/site_tests/cellular_Smoke/control.tmobile
@@ -16,10 +16,8 @@
 TEST_CLASS = "network"
 TEST_TYPE = "client"
 DEPENDENCIES = "carrier:tmobile"
-
-# TODO(crbug.com/932661) Fix the root cause of the flakiness and
-# remove this hack.
-JOB_RETRIES=3
+PY_VERSION = 3
+JOB_RETRIES=1
 
 DOC = """
   Tests that 3G modem can connect to the network
diff --git a/client/site_tests/cellular_Smoke/control.verizon b/client/site_tests/cellular_Smoke/control.verizon
index e56cd88..da77297 100644
--- a/client/site_tests/cellular_Smoke/control.verizon
+++ b/client/site_tests/cellular_Smoke/control.verizon
@@ -10,12 +10,13 @@
   - 3G modem fails to connect to network
   - the modem is not left in a working state
 """
-ATTRIBUTES = "suite:cellular_ota_flaky"
+ATTRIBUTES = "suite:cellular_ota"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "network"
 TEST_TYPE = "client"
 DEPENDENCIES = "carrier:verizon"
+PY_VERSION = 3
 
 # TODO(crbug.com/932661) Fix the root cause of the flakiness and
 # remove this hack.
diff --git a/client/site_tests/cellular_Smoke/control.vodafone b/client/site_tests/cellular_Smoke/control.vodafone
new file mode 100644
index 0000000..7c2763e
--- /dev/null
+++ b/client/site_tests/cellular_Smoke/control.vodafone
@@ -0,0 +1,38 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_Smoke.vodafone"
+PURPOSE = "Verify 3G modem can connect to the network."
+CRITERIA = """
+This test will fail if one of the following conditions occur:
+  - 3G modem fails to connect to network
+  - the modem is not left in a working state
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:vodafone"
+PY_VERSION = 3
+
+# TODO(crbug.com/932661) Fix the root cause of the flakiness and
+# remove this hack.
+JOB_RETRIES=3
+
+DOC = """
+  Tests that 3G modem can connect to the network
+
+  The test attempts to connect using the 3G network.  It assumes that
+  a 3G modem is plugged in and has a signal.  The test then
+  disconnects from the network, and verifies that the modem still
+  responds to modem manager DBUS API calls.  It repeats the
+  connect/disconnect sequence several times.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularOTATestEnvironment()
+job.run_test('cellular_Smoke', test_env=test_env)
diff --git a/client/site_tests/cellular_StressEnable/cellular_StressEnable.py b/client/site_tests/cellular_StressEnable/cellular_StressEnable.py
index 4dcbff8..5309abb 100644
--- a/client/site_tests/cellular_StressEnable/cellular_StressEnable.py
+++ b/client/site_tests/cellular_StressEnable/cellular_StressEnable.py
@@ -1,11 +1,18 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import dbus
 import logging
 import time
 
+from six.moves import range
+
 from autotest_lib.client.bin import test
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.cros.networking import shill_context
@@ -30,7 +37,7 @@
                 self.device.Enable(timeout=timeout)
             else:
                 self.device.Disable(timeout=timeout)
-        except dbus.exceptions.DBusException, err:
+        except dbus.exceptions.DBusException as err:
             if err.get_dbus_name() in cellular_StressEnable.okerrors:
                 return
             raise error.TestFail(err)
@@ -47,8 +54,8 @@
         with test_env, shill_context.ServiceAutoConnectContext(
                 test_env.shill.wait_for_cellular_service_object, False):
             self.device = test_env.shill.find_cellular_device_object()
-            for t in xrange(max, min, -1):
-                for n in xrange(cycles):
+            for t in range(max, min, -1):
+                for n in range(cycles):
                     # deciseconds are an awesome unit.
                     logging.info('Cycle %d: %f seconds delay.', n, t / 10.0)
                     self._test(t / 10.0)
diff --git a/client/site_tests/cellular_StressEnable/control b/client/site_tests/cellular_StressEnable/control
index 9f259fd..8a0e90f 100644
--- a/client/site_tests/cellular_StressEnable/control
+++ b/client/site_tests/cellular_StressEnable/control
@@ -1,4 +1,4 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Copyright (c) 2022 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,11 +8,12 @@
 CRITERIA = """
 This test fails if flimflam ever fails to respond to a DBus message or crashes.
 """
-ATTRIBUTES = "suite:cellular_qual"
+ATTRIBUTES = "suite:cellular_ota_avl"
 TIME = "SHORT"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "network"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
   Stress-tests enabling and disabling a technology at short intervals.
diff --git a/client/site_tests/cellular_StressEnable/control.amarisoft b/client/site_tests/cellular_StressEnable/control.amarisoft
new file mode 100644
index 0000000..5d48481
--- /dev/null
+++ b/client/site_tests/cellular_StressEnable/control.amarisoft
@@ -0,0 +1,26 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_StressEnable.amarisoft"
+PURPOSE = "Stress-test Connman enable/disable"
+CRITERIA = """
+This test fails if flimflam ever fails to respond to a DBus message or crashes.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Stress"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:amarisoft"
+PY_VERSION = 3
+
+DOC = """
+  Stress-tests enabling and disabling a technology at short intervals.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularOTATestEnvironment()
+job.run_test('cellular_StressEnable', test_env=test_env)
diff --git a/client/site_tests/cellular_StressEnable/control.att b/client/site_tests/cellular_StressEnable/control.att
index 0307426..fd9ad4e 100644
--- a/client/site_tests/cellular_StressEnable/control.att
+++ b/client/site_tests/cellular_StressEnable/control.att
@@ -8,12 +8,13 @@
 CRITERIA = """
 This test fails if flimflam ever fails to respond to a DBus message or crashes.
 """
-ATTRIBUTES = "suite:cellular_ota"
+ATTRIBUTES = "suite:cellular_ota, suite:cellular-cq"
 TIME = "SHORT"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "network"
 TEST_TYPE = "client"
 DEPENDENCIES = "carrier:att"
+PY_VERSION = 3
 
 DOC = """
   Stress-tests enabling and disabling a technology at short intervals.
diff --git a/client/site_tests/cellular_StressEnable/control.docomo b/client/site_tests/cellular_StressEnable/control.docomo
new file mode 100644
index 0000000..8ea2b96
--- /dev/null
+++ b/client/site_tests/cellular_StressEnable/control.docomo
@@ -0,0 +1,26 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_StressEnable.docomo"
+PURPOSE = "Stress-test Connman enable/disable"
+CRITERIA = """
+This test fails if flimflam ever fails to respond to a DBus message or crashes.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Stress"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:docomo"
+PY_VERSION = 3
+
+DOC = """
+  Stress-tests enabling and disabling a technology at short intervals.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularOTATestEnvironment()
+job.run_test('cellular_StressEnable', test_env=test_env)
diff --git a/client/site_tests/cellular_StressEnable/control.ee b/client/site_tests/cellular_StressEnable/control.ee
new file mode 100644
index 0000000..8f0b0af
--- /dev/null
+++ b/client/site_tests/cellular_StressEnable/control.ee
@@ -0,0 +1,26 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_StressEnable.ee"
+PURPOSE = "Stress-test Connman enable/disable"
+CRITERIA = """
+This test fails if flimflam ever fails to respond to a DBus message or crashes.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Stress"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:ee"
+PY_VERSION = 3
+
+DOC = """
+  Stress-tests enabling and disabling a technology at short intervals.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularOTATestEnvironment()
+job.run_test('cellular_StressEnable', test_env=test_env)
diff --git a/client/site_tests/cellular_StressEnable/control.kddi b/client/site_tests/cellular_StressEnable/control.kddi
new file mode 100644
index 0000000..967d4e2
--- /dev/null
+++ b/client/site_tests/cellular_StressEnable/control.kddi
@@ -0,0 +1,26 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_StressEnable.kddi"
+PURPOSE = "Stress-test Connman enable/disable"
+CRITERIA = """
+This test fails if flimflam ever fails to respond to a DBus message or crashes.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Stress"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:kddi"
+PY_VERSION = 3
+
+DOC = """
+  Stress-tests enabling and disabling a technology at short intervals.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularOTATestEnvironment()
+job.run_test('cellular_StressEnable', test_env=test_env)
diff --git a/client/site_tests/cellular_StressEnable/control.pseudomodem b/client/site_tests/cellular_StressEnable/control.pseudomodem
index e17eff2..53010eb 100644
--- a/client/site_tests/cellular_StressEnable/control.pseudomodem
+++ b/client/site_tests/cellular_StressEnable/control.pseudomodem
@@ -8,11 +8,11 @@
 CRITERIA = """
 This test fails if flimflam ever fails to respond to a DBus message or crashes.
 """
-ATTRIBUTES = "suite:cellular_pseudomodem"
 TIME = "SHORT"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "network"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
   Stress-tests enabling and disabling a technology at short intervals.
diff --git a/client/site_tests/cellular_StressEnable/control.rakuten b/client/site_tests/cellular_StressEnable/control.rakuten
new file mode 100644
index 0000000..3c9ddc1
--- /dev/null
+++ b/client/site_tests/cellular_StressEnable/control.rakuten
@@ -0,0 +1,26 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_StressEnable.rakuten"
+PURPOSE = "Stress-test Connman enable/disable"
+CRITERIA = """
+This test fails if flimflam ever fails to respond to a DBus message or crashes.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Stress"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:rakuten"
+PY_VERSION = 3
+
+DOC = """
+  Stress-tests enabling and disabling a technology at short intervals.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularOTATestEnvironment()
+job.run_test('cellular_StressEnable', test_env=test_env)
diff --git a/client/site_tests/cellular_StressEnable/control.softbank b/client/site_tests/cellular_StressEnable/control.softbank
new file mode 100644
index 0000000..32fe8e2
--- /dev/null
+++ b/client/site_tests/cellular_StressEnable/control.softbank
@@ -0,0 +1,26 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_StressEnable.softbank"
+PURPOSE = "Stress-test Connman enable/disable"
+CRITERIA = """
+This test fails if flimflam ever fails to respond to a DBus message or crashes.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Stress"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:softbank"
+PY_VERSION = 3
+
+DOC = """
+  Stress-tests enabling and disabling a technology at short intervals.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularOTATestEnvironment()
+job.run_test('cellular_StressEnable', test_env=test_env)
diff --git a/client/site_tests/cellular_StressEnable/control.sprint b/client/site_tests/cellular_StressEnable/control.sprint
index f9e9841..81af012 100644
--- a/client/site_tests/cellular_StressEnable/control.sprint
+++ b/client/site_tests/cellular_StressEnable/control.sprint
@@ -14,6 +14,7 @@
 TEST_CLASS = "network"
 TEST_TYPE = "client"
 DEPENDENCIES = "carrier:sprint"
+PY_VERSION = 3
 
 DOC = """
   Stress-tests enabling and disabling a technology at short intervals.
diff --git a/client/site_tests/cellular_StressEnable/control.tmobile b/client/site_tests/cellular_StressEnable/control.tmobile
index 0489d17..7331ad1 100644
--- a/client/site_tests/cellular_StressEnable/control.tmobile
+++ b/client/site_tests/cellular_StressEnable/control.tmobile
@@ -14,6 +14,7 @@
 TEST_CLASS = "network"
 TEST_TYPE = "client"
 DEPENDENCIES = "carrier:tmobile"
+PY_VERSION = 3
 
 DOC = """
   Stress-tests enabling and disabling a technology at short intervals.
diff --git a/client/site_tests/cellular_StressEnable/control.verizon b/client/site_tests/cellular_StressEnable/control.verizon
index 65b9c40..0f87d38 100644
--- a/client/site_tests/cellular_StressEnable/control.verizon
+++ b/client/site_tests/cellular_StressEnable/control.verizon
@@ -8,12 +8,13 @@
 CRITERIA = """
 This test fails if flimflam ever fails to respond to a DBus message or crashes.
 """
-ATTRIBUTES = "suite:cellular_ota"
+ATTRIBUTES = "suite:cellular_ota, suite:cellular-cq"
 TIME = "SHORT"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "network"
 TEST_TYPE = "client"
 DEPENDENCIES = "carrier:verizon"
+PY_VERSION = 3
 
 DOC = """
   Stress-tests enabling and disabling a technology at short intervals.
diff --git a/client/site_tests/cellular_StressEnable/control.vodafone b/client/site_tests/cellular_StressEnable/control.vodafone
new file mode 100644
index 0000000..18f0a59
--- /dev/null
+++ b/client/site_tests/cellular_StressEnable/control.vodafone
@@ -0,0 +1,26 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_StressEnable.vodafone"
+PURPOSE = "Stress-test Connman enable/disable"
+CRITERIA = """
+This test fails if flimflam ever fails to respond to a DBus message or crashes.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Stress"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:vodafone"
+PY_VERSION = 3
+
+DOC = """
+  Stress-tests enabling and disabling a technology at short intervals.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+test_env = test_environment.CellularOTATestEnvironment()
+job.run_test('cellular_StressEnable', test_env=test_env)
diff --git a/client/site_tests/cellular_SuspendResume/cellular_SuspendResume.py b/client/site_tests/cellular_SuspendResume/cellular_SuspendResume.py
index 5cc2765..df85688 100644
--- a/client/site_tests/cellular_SuspendResume/cellular_SuspendResume.py
+++ b/client/site_tests/cellular_SuspendResume/cellular_SuspendResume.py
@@ -1,12 +1,21 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+from functools import reduce
+
 import dbus
 import logging
 from random import choice, randint
 import time
 
+from six.moves import range
+
 from autotest_lib.client.bin import test, utils
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib.cros import chrome
@@ -15,8 +24,10 @@
 
 # Special import to define the location of the flimflam library.
 from autotest_lib.client.cros import flimflam_test_path
+
 import flimflam
 
+
 SHILL_LOG_SCOPES = 'cellular+dbus+device+dhcp+manager+modem+portal+service'
 
 class cellular_SuspendResume(test.test):
@@ -58,7 +69,7 @@
     def filterexns(self, function, exn_list):
         try:
             function()
-        except dbus.exceptions.DBusException, e:
+        except dbus.exceptions.DBusException as e:
             if e._dbus_error_name not in exn_list:
                 raise e
 
@@ -73,14 +84,14 @@
         return None
 
     def get_powered(self, device):
-        properties = device.GetProperties(utf8_strings=True)
+        properties = device.GetProperties()
         logging.debug(properties)
         logging.info('Power state of mobile device is %s.',
                      ['off', 'on'][properties['Powered']])
         return properties['Powered']
 
     def _check_powered(self, device, check_enabled):
-        properties = device.GetProperties(utf8_strings=True)
+        properties = device.GetProperties()
         power_state = (properties['Powered'] == 1)
         return power_state if check_enabled else not power_state
 
@@ -133,7 +144,7 @@
         while properties is None and time.time() < timeout:
             try:
                 device = self.flim.FindCellularDevice(timeout)
-                properties = device.GetProperties(utf8_strings=True)
+                properties = device.GetProperties()
             except dbus.exceptions.DBusException:
                 logging.debug('Mobile device not ready yet')
                 device = None
@@ -227,13 +238,13 @@
         if not service:
             raise error.TestError('Unable to find mobile service')
 
-        props = service.GetProperties(utf8_strings=True)
+        props = service.GetProperties()
         if props['AutoConnect']:
             expected_states = ['ready', 'online', 'portal']
         else:
             expected_states = ['idle']
 
-        for _ in xrange(5):
+        for _ in range(5):
             # Must wait at least 20 seconds to ensure that the suspend occurs
             self.suspend_resume(20)
 
@@ -310,7 +321,7 @@
         with chrome.Chrome():
             # Replace the test type with the list of tests
             if (scenario_group not in
-                    cellular_SuspendResume.scenarios.keys()):
+                    list(cellular_SuspendResume.scenarios.keys())):
                 scenario_group = 'all'
             logging.info('Running scenario group: %s' % scenario_group)
             scenarios = cellular_SuspendResume.scenarios[scenario_group]
diff --git a/client/site_tests/cellular_SuspendResume/control b/client/site_tests/cellular_SuspendResume/control
index 76a8bf0..448829d 100644
--- a/client/site_tests/cellular_SuspendResume/control
+++ b/client/site_tests/cellular_SuspendResume/control
@@ -16,6 +16,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "network"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
     Verify that cellular can be enabled under all suspend/resume situations.
diff --git a/client/site_tests/cellular_SuspendResume/control.amarisoft b/client/site_tests/cellular_SuspendResume/control.amarisoft
new file mode 100644
index 0000000..a0b2274
--- /dev/null
+++ b/client/site_tests/cellular_SuspendResume/control.amarisoft
@@ -0,0 +1,29 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_SuspendResume.amarisoft"
+PURPOSE = "Test cellular modem state after suspend/resume"
+CRITERIA = """
+    Check the cellular modem state of the device after suspend and resume.
+    Verify that cellular_modem can be enabled under all situations after resuming of device from suspend state.
+    Verify if modem autoconnects after resuming when autoconnect is turned ON.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:amarisoft"
+PY_VERSION = 3
+
+DOC = """
+    Verify that cellular_modem can be enabled under all suspend/resume scenarios.
+"""
+
+# Run all scenarios twice, first with autoconnect off, then with it on
+job.run_test('cellular_SuspendResume',
+             autoconnect=False, tag='autoconnect_off')
+job.run_test('cellular_SuspendResume',
+             autoconnect=True, tag='autoconnect_on')
diff --git a/client/site_tests/cellular_SuspendResume/control.att b/client/site_tests/cellular_SuspendResume/control.att
index 809277a..818e30c 100644
--- a/client/site_tests/cellular_SuspendResume/control.att
+++ b/client/site_tests/cellular_SuspendResume/control.att
@@ -17,6 +17,7 @@
 TEST_CLASS = "network"
 TEST_TYPE = "client"
 DEPENDENCIES = "carrier:att"
+PY_VERSION = 3
 
 DOC = """
     Verify that 3g can be enabled under all suspend/resume situations.
diff --git a/client/site_tests/cellular_SuspendResume/control.docomo b/client/site_tests/cellular_SuspendResume/control.docomo
new file mode 100644
index 0000000..14b60bf
--- /dev/null
+++ b/client/site_tests/cellular_SuspendResume/control.docomo
@@ -0,0 +1,30 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_SuspendResume.docomo"
+PURPOSE = "Test 3g modem state after suspend/resume"
+CRITERIA = """
+    Check the 3g state of the device after suspend and resume.  Verify
+    that 3g can be enabled under all situations after resuming of device.
+    Verify that if autoconnect is turned on that the modem autoconnects
+    after resuming.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:docomo"
+PY_VERSION = 3
+
+DOC = """
+    Verify that 3g can be enabled under all suspend/resume situations.
+"""
+
+# Run all scenarios twice, first with autoconnect off, then with it on
+job.run_test('cellular_SuspendResume',
+             autoconnect=False, tag='autoconnect_off')
+job.run_test('cellular_SuspendResume',
+             autoconnect=True, tag='autoconnect_on')
diff --git a/client/site_tests/cellular_SuspendResume/control.ee b/client/site_tests/cellular_SuspendResume/control.ee
new file mode 100644
index 0000000..b7c29c0
--- /dev/null
+++ b/client/site_tests/cellular_SuspendResume/control.ee
@@ -0,0 +1,30 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_SuspendResume.ee"
+PURPOSE = "Test 3g modem state after suspend/resume"
+CRITERIA = """
+    Check the 3g state of the device after suspend and resume.  Verify
+    that 3g can be enabled under all situations after resuming of device.
+    Verify that if autoconnect is turned on that the modem autoconnects
+    after resuming.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:ee"
+PY_VERSION = 3
+
+DOC = """
+    Verify that 3g can be enabled under all suspend/resume situations.
+"""
+
+# Run all scenarios twice, first with autoconnect off, then with it on
+job.run_test('cellular_SuspendResume',
+             autoconnect=False, tag='autoconnect_off')
+job.run_test('cellular_SuspendResume',
+             autoconnect=True, tag='autoconnect_on')
diff --git a/client/site_tests/cellular_SuspendResume/control.kddi b/client/site_tests/cellular_SuspendResume/control.kddi
new file mode 100644
index 0000000..d432b24
--- /dev/null
+++ b/client/site_tests/cellular_SuspendResume/control.kddi
@@ -0,0 +1,30 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_SuspendResume.kddi"
+PURPOSE = "Test 3g modem state after suspend/resume"
+CRITERIA = """
+    Check the 3g state of the device after suspend and resume.  Verify
+    that 3g can be enabled under all situations after resuming of device.
+    Verify that if autoconnect is turned on that the modem autoconnects
+    after resuming.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:kddi"
+PY_VERSION = 3
+
+DOC = """
+    Verify that 3g can be enabled under all suspend/resume situations.
+"""
+
+# Run all scenarios twice, first with autoconnect off, then with it on
+job.run_test('cellular_SuspendResume',
+             autoconnect=False, tag='autoconnect_off')
+job.run_test('cellular_SuspendResume',
+             autoconnect=True, tag='autoconnect_on')
diff --git a/client/site_tests/cellular_SuspendResume/control.rakuten b/client/site_tests/cellular_SuspendResume/control.rakuten
new file mode 100644
index 0000000..bb325ab
--- /dev/null
+++ b/client/site_tests/cellular_SuspendResume/control.rakuten
@@ -0,0 +1,30 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_SuspendResume.rakuten"
+PURPOSE = "Test 3g modem state after suspend/resume"
+CRITERIA = """
+    Check the 3g state of the device after suspend and resume.  Verify
+    that 3g can be enabled under all situations after resuming of device.
+    Verify that if autoconnect is turned on that the modem autoconnects
+    after resuming.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:rakuten"
+PY_VERSION = 3
+
+DOC = """
+    Verify that 3g can be enabled under all suspend/resume situations.
+"""
+
+# Run all scenarios twice, first with autoconnect off, then with it on
+job.run_test('cellular_SuspendResume',
+             autoconnect=False, tag='autoconnect_off')
+job.run_test('cellular_SuspendResume',
+             autoconnect=True, tag='autoconnect_on')
diff --git a/client/site_tests/cellular_SuspendResume/control.softbank b/client/site_tests/cellular_SuspendResume/control.softbank
new file mode 100644
index 0000000..320b23a
--- /dev/null
+++ b/client/site_tests/cellular_SuspendResume/control.softbank
@@ -0,0 +1,30 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_SuspendResume.softbank"
+PURPOSE = "Test 3g modem state after suspend/resume"
+CRITERIA = """
+    Check the 3g state of the device after suspend and resume.  Verify
+    that 3g can be enabled under all situations after resuming of device.
+    Verify that if autoconnect is turned on that the modem autoconnects
+    after resuming.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:softbank"
+PY_VERSION = 3
+
+DOC = """
+    Verify that 3g can be enabled under all suspend/resume situations.
+"""
+
+# Run all scenarios twice, first with autoconnect off, then with it on
+job.run_test('cellular_SuspendResume',
+             autoconnect=False, tag='autoconnect_off')
+job.run_test('cellular_SuspendResume',
+             autoconnect=True, tag='autoconnect_on')
diff --git a/client/site_tests/cellular_SuspendResume/control.sprint b/client/site_tests/cellular_SuspendResume/control.sprint
index e926f7f..b44f810 100644
--- a/client/site_tests/cellular_SuspendResume/control.sprint
+++ b/client/site_tests/cellular_SuspendResume/control.sprint
@@ -17,6 +17,7 @@
 TEST_CLASS = "network"
 TEST_TYPE = "client"
 DEPENDENCIES = "carrier:sprint"
+PY_VERSION = 3
 
 DOC = """
     Verify that 3g can be enabled under all suspend/resume situations.
diff --git a/client/site_tests/cellular_SuspendResume/control.stress b/client/site_tests/cellular_SuspendResume/control.stress
index 7009588..236718a 100644
--- a/client/site_tests/cellular_SuspendResume/control.stress
+++ b/client/site_tests/cellular_SuspendResume/control.stress
@@ -15,6 +15,7 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "network"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
     Verify that 3g can be enabled under all suspend/resume situations.
diff --git a/client/site_tests/cellular_SuspendResume/control.tmobile b/client/site_tests/cellular_SuspendResume/control.tmobile
index 97435e3..c79c03c 100644
--- a/client/site_tests/cellular_SuspendResume/control.tmobile
+++ b/client/site_tests/cellular_SuspendResume/control.tmobile
@@ -17,6 +17,7 @@
 TEST_CLASS = "network"
 TEST_TYPE = "client"
 DEPENDENCIES = "carrier:tmobile"
+PY_VERSION = 3
 
 DOC = """
     Verify that 3g can be enabled under all suspend/resume situations.
diff --git a/client/site_tests/cellular_SuspendResume/control.verizon b/client/site_tests/cellular_SuspendResume/control.verizon
index a11e268..29a4000 100644
--- a/client/site_tests/cellular_SuspendResume/control.verizon
+++ b/client/site_tests/cellular_SuspendResume/control.verizon
@@ -17,6 +17,7 @@
 TEST_CLASS = "network"
 TEST_TYPE = "client"
 DEPENDENCIES = "carrier:verizon"
+PY_VERSION = 3
 
 DOC = """
     Verify that 3g can be enabled under all suspend/resume situations.
diff --git a/client/site_tests/cellular_SuspendResume/control.vodafone b/client/site_tests/cellular_SuspendResume/control.vodafone
new file mode 100644
index 0000000..f756b78
--- /dev/null
+++ b/client/site_tests/cellular_SuspendResume/control.vodafone
@@ -0,0 +1,30 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_SuspendResume.vodafone"
+PURPOSE = "Test 3g modem state after suspend/resume"
+CRITERIA = """
+    Check the 3g state of the device after suspend and resume.  Verify
+    that 3g can be enabled under all situations after resuming of device.
+    Verify that if autoconnect is turned on that the modem autoconnects
+    after resuming.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:vodafone"
+PY_VERSION = 3
+
+DOC = """
+    Verify that 3g can be enabled under all suspend/resume situations.
+"""
+
+# Run all scenarios twice, first with autoconnect off, then with it on
+job.run_test('cellular_SuspendResume',
+             autoconnect=False, tag='autoconnect_off')
+job.run_test('cellular_SuspendResume',
+             autoconnect=True, tag='autoconnect_on')
diff --git a/client/site_tests/cellular_ValidateTestEnvironment/cellular_ValidateTestEnvironment.py b/client/site_tests/cellular_ValidateTestEnvironment/cellular_ValidateTestEnvironment.py
new file mode 100644
index 0000000..deae38a
--- /dev/null
+++ b/client/site_tests/cellular_ValidateTestEnvironment/cellular_ValidateTestEnvironment.py
@@ -0,0 +1,20 @@
+# Lint as: python2, python3
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.bin import test
+
+
+class cellular_ValidateTestEnvironment(test.test):
+    """
+    Verify that the test setup common to all other tests has no failures.
+    """
+    version = 1
+
+    def run_once(self, test_env):
+        """ Runs the test once """
+        with test_env:
+            self.test_env = test_env
+            # Do nothing else. This is enough to initialize and terminate the
+            # test environment.
diff --git a/client/site_tests/cellular_ValidateTestEnvironment/control b/client/site_tests/cellular_ValidateTestEnvironment/control
new file mode 100644
index 0000000..fa8fb7f
--- /dev/null
+++ b/client/site_tests/cellular_ValidateTestEnvironment/control
@@ -0,0 +1,36 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_ValidateTestEnvironment"
+PURPOSE = "Verify that the test setup common to all other tests has no failures."
+CRITERIA = """
+    Check that no errors occur when the setup and tear down of the
+    base class CellularTestEnvironment is executed multiple times.
+"""
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+PY_VERSION = 3
+
+DOC = """
+    Verify that the test setup common to all other tests has no failures.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+ITERATIONS_PER_TEST=3
+
+test_env = test_environment.CellularTestEnvironment(enable_temp_containments=False)
+job.run_test('cellular_ValidateTestEnvironment',
+             tag='Base',
+             test_env=test_env,
+             iterations=ITERATIONS_PER_TEST)
+
+test_env = test_environment.CellularOTATestEnvironment(enable_temp_containments=False)
+job.run_test('cellular_ValidateTestEnvironment',
+             tag='OTA',
+             test_env=test_env,
+             iterations=ITERATIONS_PER_TEST)
diff --git a/client/site_tests/cellular_ValidateTestEnvironment/control.amarisoft b/client/site_tests/cellular_ValidateTestEnvironment/control.amarisoft
new file mode 100644
index 0000000..7d63394
--- /dev/null
+++ b/client/site_tests/cellular_ValidateTestEnvironment/control.amarisoft
@@ -0,0 +1,38 @@
+# Copyright (c) 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_ValidateTestEnvironment.amarisoft"
+PURPOSE = "Verify that the test setup common to all other tests has no failures."
+CRITERIA = """
+    Check that no errors occur when the setup and tear down of the
+    base class CellularTestEnvironment is executed multiple times.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:amarisoft"
+PY_VERSION = 3
+
+DOC = """
+    Verify that the test setup common to all other tests has no failures.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+ITERATIONS_PER_TEST=3
+
+test_env = test_environment.CellularTestEnvironment(enable_temp_containments=False)
+job.run_test('cellular_ValidateTestEnvironment',
+             tag='Base',
+             test_env=test_env,
+             iterations=ITERATIONS_PER_TEST)
+
+test_env = test_environment.CellularOTATestEnvironment(enable_temp_containments=False)
+job.run_test('cellular_ValidateTestEnvironment',
+             tag='OTA',
+             test_env=test_env,
+             iterations=ITERATIONS_PER_TEST)
diff --git a/client/site_tests/cellular_ValidateTestEnvironment/control.att b/client/site_tests/cellular_ValidateTestEnvironment/control.att
new file mode 100644
index 0000000..08ff0e4
--- /dev/null
+++ b/client/site_tests/cellular_ValidateTestEnvironment/control.att
@@ -0,0 +1,38 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_ValidateTestEnvironment.att"
+PURPOSE = "Verify that the test setup common to all other tests has no failures."
+CRITERIA = """
+    Check that no errors occur when the setup and tear down of the
+    base class CellularTestEnvironment is executed multiple times.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:att"
+PY_VERSION = 3
+
+DOC = """
+    Verify that the test setup common to all other tests has no failures.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+ITERATIONS_PER_TEST=3
+
+test_env = test_environment.CellularTestEnvironment(enable_temp_containments=False)
+job.run_test('cellular_ValidateTestEnvironment',
+             tag='Base',
+             test_env=test_env,
+             iterations=ITERATIONS_PER_TEST)
+
+test_env = test_environment.CellularOTATestEnvironment(enable_temp_containments=False)
+job.run_test('cellular_ValidateTestEnvironment',
+             tag='OTA',
+             test_env=test_env,
+             iterations=ITERATIONS_PER_TEST)
diff --git a/client/site_tests/cellular_ValidateTestEnvironment/control.docomo b/client/site_tests/cellular_ValidateTestEnvironment/control.docomo
new file mode 100644
index 0000000..ce5bb4b
--- /dev/null
+++ b/client/site_tests/cellular_ValidateTestEnvironment/control.docomo
@@ -0,0 +1,38 @@
+# Copyright (c) 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_ValidateTestEnvironment.docomo"
+PURPOSE = "Verify that the test setup common to all other tests has no failures."
+CRITERIA = """
+    Check that no errors occur when the setup and tear down of the
+    base class CellularTestEnvironment is executed multiple times.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:docomo"
+PY_VERSION = 3
+
+DOC = """
+    Verify that the test setup common to all other tests has no failures.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+ITERATIONS_PER_TEST=3
+
+test_env = test_environment.CellularTestEnvironment(enable_temp_containments=False)
+job.run_test('cellular_ValidateTestEnvironment',
+             tag='Base',
+             test_env=test_env,
+             iterations=ITERATIONS_PER_TEST)
+
+test_env = test_environment.CellularOTATestEnvironment(enable_temp_containments=False)
+job.run_test('cellular_ValidateTestEnvironment',
+             tag='OTA',
+             test_env=test_env,
+             iterations=ITERATIONS_PER_TEST)
diff --git a/client/site_tests/cellular_ValidateTestEnvironment/control.ee b/client/site_tests/cellular_ValidateTestEnvironment/control.ee
new file mode 100644
index 0000000..80614f4
--- /dev/null
+++ b/client/site_tests/cellular_ValidateTestEnvironment/control.ee
@@ -0,0 +1,38 @@
+# Copyright (c) 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_ValidateTestEnvironment.ee"
+PURPOSE = "Verify that the test setup common to all other tests has no failures."
+CRITERIA = """
+    Check that no errors occur when the setup and tear down of the
+    base class CellularTestEnvironment is executed multiple times.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:ee"
+PY_VERSION = 3
+
+DOC = """
+    Verify that the test setup common to all other tests has no failures.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+ITERATIONS_PER_TEST=3
+
+test_env = test_environment.CellularTestEnvironment(enable_temp_containments=False)
+job.run_test('cellular_ValidateTestEnvironment',
+             tag='Base',
+             test_env=test_env,
+             iterations=ITERATIONS_PER_TEST)
+
+test_env = test_environment.CellularOTATestEnvironment(enable_temp_containments=False)
+job.run_test('cellular_ValidateTestEnvironment',
+             tag='OTA',
+             test_env=test_env,
+             iterations=ITERATIONS_PER_TEST)
diff --git a/client/site_tests/cellular_ValidateTestEnvironment/control.kddi b/client/site_tests/cellular_ValidateTestEnvironment/control.kddi
new file mode 100644
index 0000000..b3ce7ff
--- /dev/null
+++ b/client/site_tests/cellular_ValidateTestEnvironment/control.kddi
@@ -0,0 +1,38 @@
+# Copyright (c) 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_ValidateTestEnvironment.kddi"
+PURPOSE = "Verify that the test setup common to all other tests has no failures."
+CRITERIA = """
+    Check that no errors occur when the setup and tear down of the
+    base class CellularTestEnvironment is executed multiple times.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:kddi"
+PY_VERSION = 3
+
+DOC = """
+    Verify that the test setup common to all other tests has no failures.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+ITERATIONS_PER_TEST=3
+
+test_env = test_environment.CellularTestEnvironment(enable_temp_containments=False)
+job.run_test('cellular_ValidateTestEnvironment',
+             tag='Base',
+             test_env=test_env,
+             iterations=ITERATIONS_PER_TEST)
+
+test_env = test_environment.CellularOTATestEnvironment(enable_temp_containments=False)
+job.run_test('cellular_ValidateTestEnvironment',
+             tag='OTA',
+             test_env=test_env,
+             iterations=ITERATIONS_PER_TEST)
diff --git a/client/site_tests/cellular_ValidateTestEnvironment/control.rakuten b/client/site_tests/cellular_ValidateTestEnvironment/control.rakuten
new file mode 100644
index 0000000..9410f9f
--- /dev/null
+++ b/client/site_tests/cellular_ValidateTestEnvironment/control.rakuten
@@ -0,0 +1,38 @@
+# Copyright (c) 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_ValidateTestEnvironment.rakuten"
+PURPOSE = "Verify that the test setup common to all other tests has no failures."
+CRITERIA = """
+    Check that no errors occur when the setup and tear down of the
+    base class CellularTestEnvironment is executed multiple times.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:rakuten"
+PY_VERSION = 3
+
+DOC = """
+    Verify that the test setup common to all other tests has no failures.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+ITERATIONS_PER_TEST=3
+
+test_env = test_environment.CellularTestEnvironment(enable_temp_containments=False)
+job.run_test('cellular_ValidateTestEnvironment',
+             tag='Base',
+             test_env=test_env,
+             iterations=ITERATIONS_PER_TEST)
+
+test_env = test_environment.CellularOTATestEnvironment(enable_temp_containments=False)
+job.run_test('cellular_ValidateTestEnvironment',
+             tag='OTA',
+             test_env=test_env,
+             iterations=ITERATIONS_PER_TEST)
diff --git a/client/site_tests/cellular_ValidateTestEnvironment/control.softbank b/client/site_tests/cellular_ValidateTestEnvironment/control.softbank
new file mode 100644
index 0000000..510392b
--- /dev/null
+++ b/client/site_tests/cellular_ValidateTestEnvironment/control.softbank
@@ -0,0 +1,38 @@
+# Copyright (c) 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_ValidateTestEnvironment.softbank"
+PURPOSE = "Verify that the test setup common to all other tests has no failures."
+CRITERIA = """
+    Check that no errors occur when the setup and tear down of the
+    base class CellularTestEnvironment is executed multiple times.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:softbank"
+PY_VERSION = 3
+
+DOC = """
+    Verify that the test setup common to all other tests has no failures.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+ITERATIONS_PER_TEST=3
+
+test_env = test_environment.CellularTestEnvironment(enable_temp_containments=False)
+job.run_test('cellular_ValidateTestEnvironment',
+             tag='Base',
+             test_env=test_env,
+             iterations=ITERATIONS_PER_TEST)
+
+test_env = test_environment.CellularOTATestEnvironment(enable_temp_containments=False)
+job.run_test('cellular_ValidateTestEnvironment',
+             tag='OTA',
+             test_env=test_env,
+             iterations=ITERATIONS_PER_TEST)
diff --git a/client/site_tests/cellular_ValidateTestEnvironment/control.sprint b/client/site_tests/cellular_ValidateTestEnvironment/control.sprint
new file mode 100644
index 0000000..000dae8
--- /dev/null
+++ b/client/site_tests/cellular_ValidateTestEnvironment/control.sprint
@@ -0,0 +1,38 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_ValidateTestEnvironment.sprint"
+PURPOSE = "Verify that the test setup common to all other tests has no failures."
+CRITERIA = """
+    Check that no errors occur when the setup and tear down of the
+    base class CellularTestEnvironment is executed multiple times.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:sprint"
+PY_VERSION = 3
+
+DOC = """
+    Verify that the test setup common to all other tests has no failures..
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+ITERATIONS_PER_TEST=3
+
+test_env = test_environment.CellularTestEnvironment(enable_temp_containments=False)
+job.run_test('cellular_ValidateTestEnvironment',
+             tag='Base',
+             test_env=test_env,
+             iterations=ITERATIONS_PER_TEST)
+
+test_env = test_environment.CellularOTATestEnvironment(enable_temp_containments=False)
+job.run_test('cellular_ValidateTestEnvironment',
+             tag='OTA',
+             test_env=test_env,
+             iterations=ITERATIONS_PER_TEST)
diff --git a/client/site_tests/cellular_ValidateTestEnvironment/control.tmobile b/client/site_tests/cellular_ValidateTestEnvironment/control.tmobile
new file mode 100644
index 0000000..e1d805e
--- /dev/null
+++ b/client/site_tests/cellular_ValidateTestEnvironment/control.tmobile
@@ -0,0 +1,38 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_ValidateTestEnvironment.tmobile"
+PURPOSE = "Verify that the test setup common to all other tests has no failures."
+CRITERIA = """
+    Check that no errors occur when the setup and tear down of the
+    base class CellularTestEnvironment is executed multiple times.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:tmobile"
+PY_VERSION = 3
+
+DOC = """
+    Verify that the test setup common to all other tests has no failures.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+ITERATIONS_PER_TEST=3
+
+test_env = test_environment.CellularTestEnvironment(enable_temp_containments=False)
+job.run_test('cellular_ValidateTestEnvironment',
+             tag='Base',
+             test_env=test_env,
+             iterations=ITERATIONS_PER_TEST)
+
+test_env = test_environment.CellularOTATestEnvironment(enable_temp_containments=False)
+job.run_test('cellular_ValidateTestEnvironment',
+             tag='OTA',
+             test_env=test_env,
+             iterations=ITERATIONS_PER_TEST)
diff --git a/client/site_tests/cellular_ValidateTestEnvironment/control.verizon b/client/site_tests/cellular_ValidateTestEnvironment/control.verizon
new file mode 100644
index 0000000..d8e0fe0
--- /dev/null
+++ b/client/site_tests/cellular_ValidateTestEnvironment/control.verizon
@@ -0,0 +1,38 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_ValidateTestEnvironment.verizon"
+PURPOSE = "Verify that the test setup common to all other tests has no failures."
+CRITERIA = """
+    Check that no errors occur when the setup and tear down of the
+    base class CellularTestEnvironment is executed multiple times.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:verizon"
+PY_VERSION = 3
+
+DOC = """
+    Verify that the test setup common to all other tests has no failures.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+ITERATIONS_PER_TEST=3
+
+test_env = test_environment.CellularTestEnvironment(enable_temp_containments=False)
+job.run_test('cellular_ValidateTestEnvironment',
+             tag='Base',
+             test_env=test_env,
+             iterations=ITERATIONS_PER_TEST)
+
+test_env = test_environment.CellularOTATestEnvironment(enable_temp_containments=False)
+job.run_test('cellular_ValidateTestEnvironment',
+             tag='OTA',
+             test_env=test_env,
+             iterations=ITERATIONS_PER_TEST)
diff --git a/client/site_tests/cellular_ValidateTestEnvironment/control.vodafone b/client/site_tests/cellular_ValidateTestEnvironment/control.vodafone
new file mode 100644
index 0000000..a7a5bc8
--- /dev/null
+++ b/client/site_tests/cellular_ValidateTestEnvironment/control.vodafone
@@ -0,0 +1,38 @@
+# Copyright (c) 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_ValidateTestEnvironment.vodafone"
+PURPOSE = "Verify that the test setup common to all other tests has no failures."
+CRITERIA = """
+    Check that no errors occur when the setup and tear down of the
+    base class CellularTestEnvironment is executed multiple times.
+"""
+ATTRIBUTES = "suite:cellular_ota_flaky"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "client"
+DEPENDENCIES = "carrier:vodafone"
+PY_VERSION = 3
+
+DOC = """
+    Verify that the test setup common to all other tests has no failures.
+"""
+
+from autotest_lib.client.cros.cellular import test_environment
+
+ITERATIONS_PER_TEST=3
+
+test_env = test_environment.CellularTestEnvironment(enable_temp_containments=False)
+job.run_test('cellular_ValidateTestEnvironment',
+             tag='Base',
+             test_env=test_env,
+             iterations=ITERATIONS_PER_TEST)
+
+test_env = test_environment.CellularOTATestEnvironment(enable_temp_containments=False)
+job.run_test('cellular_ValidateTestEnvironment',
+             tag='OTA',
+             test_env=test_env,
+             iterations=ITERATIONS_PER_TEST)
diff --git a/client/site_tests/cfm_AutotestSmokeTest/cfm_AutotestSmokeTest.py b/client/site_tests/cfm_AutotestSmokeTest/cfm_AutotestSmokeTest.py
deleted file mode 100644
index db06c61..0000000
--- a/client/site_tests/cfm_AutotestSmokeTest/cfm_AutotestSmokeTest.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib.cros import chrome
-
-
-class cfm_AutotestSmokeTest(test.test):
-    """
-    Starts a web browser and verifies that nothing crashes.
-    """
-    version = 1
-
-    def run_once(self):
-        """
-        Runs the test.
-        """
-        with chrome.Chrome(init_network_controller = True) as cr:
-            cr.browser.platform.SetHTTPServerDirectories(self.bindir)
-            self.tab = cr.browser.tabs[0]
-            self.tab.Navigate(cr.browser.platform.http_server.UrlOf(
-                os.path.join(self.bindir, 'smoke_test.html')))
-            self.tab.WaitForDocumentReadyStateToBeComplete()
diff --git a/client/site_tests/cfm_AutotestSmokeTest/control b/client/site_tests/cfm_AutotestSmokeTest/control
deleted file mode 100644
index 3bb914e..0000000
--- a/client/site_tests/cfm_AutotestSmokeTest/control
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "malmnas@google.com, chromeos-meetings@google.com"
-NAME = "cfm_AutotestSmokeTest"
-PURPOSE = ("Smoke test used for tracking the stability of the Autotest "
-        "framework.")
-CRITERIA = "Fails if the Autotest framework doesn't work as expected."
-ATTRIBUTES = "suite:hotrod, suite:bluestreak-pre-cq"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Functional"
-TEST_TYPE = "client"
-BUG_TEMPLATE = {
-    "labels": ["OS-Chrome"],
-}
-
-DOC = """
-This test tracks the stability of the Auotest framework. The test opens a
-web browser and verifies that nothing crashes.
-"""
-
-job.run_test("cfm_AutotestSmokeTest")
diff --git a/client/site_tests/cfm_AutotestSmokeTest/smoke_test.html b/client/site_tests/cfm_AutotestSmokeTest/smoke_test.html
deleted file mode 100644
index b5ce3e9..0000000
--- a/client/site_tests/cfm_AutotestSmokeTest/smoke_test.html
+++ /dev/null
@@ -1,3 +0,0 @@
-<html>
-  <title>AutotestSmokeTest</title>
-</html>
diff --git a/client/site_tests/desktopui_CheckRlzPingSent/control b/client/site_tests/desktopui_CheckRlzPingSent/control
index 89bc977..ce83e80 100644
--- a/client/site_tests/desktopui_CheckRlzPingSent/control
+++ b/client/site_tests/desktopui_CheckRlzPingSent/control
@@ -10,5 +10,6 @@
 PURPOSE = "Tests that the CAF and CAI rlz pings are sent after ping timeout."
 TIME = "SHORT"
 DOC = """ Tests that the CAF and CAI rlz pings are sent after ping timeout. """
+PY_VERSION = 3
 
 job.run_test('desktopui_CheckRlzPingSent')
diff --git a/client/site_tests/desktopui_CheckRlzPingSent/desktopui_CheckRlzPingSent.py b/client/site_tests/desktopui_CheckRlzPingSent/desktopui_CheckRlzPingSent.py
index 18fef0a..0e872ae 100644
--- a/client/site_tests/desktopui_CheckRlzPingSent/desktopui_CheckRlzPingSent.py
+++ b/client/site_tests/desktopui_CheckRlzPingSent/desktopui_CheckRlzPingSent.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/desktopui_ChromeCheck/control b/client/site_tests/desktopui_ChromeCheck/control
new file mode 100644
index 0000000..ecf9d00
--- /dev/null
+++ b/client/site_tests/desktopui_ChromeCheck/control
@@ -0,0 +1,42 @@
+# Copyright 2017 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TEST SOON TO BE DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
+AUTHOR = "derat, chromeos-ui"
+NAME = "desktopui_ChromeCheck"
+ATTRIBUTES = "suite:bvt-perbuild"
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "desktopui"
+TEST_TYPE = "client"
+JOB_RETRIES = 0
+
+DOC = """
+Test that Chrome is at least marginally usable.
+
+Uses Telemetry to log in, load a simple HTML page, and log out. Along the way,
+tests that session_manager emits the expected SessionStateChanged D-Bus signal
+about the session starting and that the user's encrypted home directory is
+mounted.
+
+A failure can indicate several things:
+
+- Telemetry-to-Chrome communication isn't working (is Chrome crashing?).
+- Chrome and session_manager aren't coordinating login properly.
+- Chrome is internally broken and can't load web pages.
+- Chrome and cryptohome aren't coordinating user home dir mounting properly.
+
+That's a fairly broad range of causes. They're all covered here with the intent
+of providing a single fast-running, minimally-flaky test that can be used to
+validate new versions of Chrome before integrating them into ChromeOS.
+
+The info log provides high-level information about what the test is doing and
+can hopefully be used to narrow down the location of failure.
+"""
+
+job.run_test('desktopui_ChromeCheck')
diff --git a/client/site_tests/desktopui_ChromeCheck/desktopui_ChromeCheck.py b/client/site_tests/desktopui_ChromeCheck/desktopui_ChromeCheck.py
new file mode 100644
index 0000000..818a60d
--- /dev/null
+++ b/client/site_tests/desktopui_ChromeCheck/desktopui_ChromeCheck.py
@@ -0,0 +1,95 @@
+# Copyright 2017 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import time
+
+from autotest_lib.client.bin import test
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib.cros import chrome, session_manager
+from autotest_lib.client.cros import cryptohome
+from autotest_lib.client.cros.graphics import graphics_utils
+
+from dbus.mainloop.glib import DBusGMainLoop
+# AU tests use ToT client code, but ToT -3 client version.
+try:
+    from gi.repository import GObject
+except ImportError:
+    import gobject as GObject
+
+class desktopui_ChromeCheck(test.test):
+    """Performs basic integration testing for Chrome.
+
+    This test performs very basic tests to verify that Chrome is somewhat
+    usable in conjunction with the rest of the system.
+    """
+    version = 1
+
+    _CHECK_CHROME_TIMEOUT_SEC = 30
+    _SESSION_START_TIMEOUT_SEC = 20
+
+    _TEST_FILENAME = 'test.html'
+    _TEST_CONTENT = 'Page loaded successfully.'
+
+    _SCREENSHOT_DIR = '/usr/local/autotest/results/default/' \
+            'desktopui_ChromeCheck/results/'
+
+    def initialize(self):
+        super(desktopui_ChromeCheck, self).initialize()
+
+    def run_once(self):
+        """
+        Runs the test.
+        """
+        dbus_loop = DBusGMainLoop(set_as_default=True)
+        listener = session_manager.SessionSignalListener(GObject.MainLoop())
+        listener.listen_for_session_state_change('started')
+
+        logging.info('Logging in...')
+        with chrome.Chrome(init_network_controller=True) as cr:
+            # Check that Chrome asks session_manager to start a session.
+            listener.wait_for_signals(
+                    desc=('SessionStateChanged "started" D-Bus signal from '
+                          'session_manager'),
+                    timeout=self._SESSION_START_TIMEOUT_SEC)
+            logging.info('Successfully logged in as "%s"', cr.username)
+
+            # Check that the user's encrypted home directory was mounted.
+            if not cryptohome.is_vault_mounted(user=cr.username,
+                                               allow_fail=False):
+                raise error.TestFail(
+                        'Didn\'t find mounted cryptohome for "%s"' %
+                        cr.username)
+
+            # Check that Chrome is able to load a web page.
+            logging.info('derek binddir {}'.format(self.bindir))
+            cr.browser.platform.SetHTTPServerDirectories(self.bindir)
+            url = cr.browser.platform.http_server.UrlOf(
+                    os.path.join(self.bindir, self._TEST_FILENAME))
+            logging.info('Loading %s...', url)
+
+            try:
+                tab = cr.browser.tabs.New()
+                tab.Navigate(url)
+                tab.WaitForDocumentReadyStateToBeComplete()
+                content = tab.EvaluateJavaScript(
+                        'document.documentElement.innerText')
+                if content != self._TEST_CONTENT:
+                    raise error.TestFail(
+                            'Expected page content "%s" but got "%s"' %
+                            (self._TEST_CONTENT, content))
+                logging.info('Saw expected content')
+            except Exception as e:
+                prefix = 'screenshot-%s' % time.strftime('%Y%m%d-%H%M%S')
+                logging.info('Got exception; saving screenshot to %s/%s',
+                             self._SCREENSHOT_DIR, prefix)
+                if not os.path.exists(self._SCREENSHOT_DIR):
+                    os.makedirs(self._SCREENSHOT_DIR)
+                graphics_utils.take_screenshot(self._SCREENSHOT_DIR, prefix)
+
+                if isinstance(e, error.TestFail):
+                    raise e
+                else:
+                    raise error.TestFail(str(e))
diff --git a/client/site_tests/desktopui_ChromeSanity/test.html b/client/site_tests/desktopui_ChromeCheck/test.html
similarity index 100%
rename from client/site_tests/desktopui_ChromeSanity/test.html
rename to client/site_tests/desktopui_ChromeCheck/test.html
diff --git a/client/site_tests/desktopui_ChromeSanity/control b/client/site_tests/desktopui_ChromeSanity/control
deleted file mode 100644
index 9604c7f..0000000
--- a/client/site_tests/desktopui_ChromeSanity/control
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "derat, chromeos-ui"
-NAME = "desktopui_ChromeSanity"
-ATTRIBUTES = "suite:bvt-perbuild"
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "desktopui"
-TEST_TYPE = "client"
-JOB_RETRIES = 0
-
-DOC = """
-Test that Chrome is at least marginally usable.
-
-Uses Telemetry to log in, load a simple HTML page, and log out. Along the way,
-tests that session_manager emits the expected SessionStateChanged D-Bus signal
-about the session starting and that the user's encrypted home directory is
-mounted.
-
-A failure can indicate several things:
-
-- Telemetry-to-Chrome communication isn't working (is Chrome crashing?).
-- Chrome and session_manager aren't coordinating login properly.
-- Chrome is internally broken and can't load web pages.
-- Chrome and cryptohome aren't coordinating user home dir mounting properly.
-
-That's a fairly broad range of causes. They're all covered here with the intent
-of providing a single fast-running, minimally-flaky test that can be used to
-validate new versions of Chrome before integrating them into Chrome OS.
-
-The info log provides high-level information about what the test is doing and
-can hopefully be used to narrow down the location of failure.
-"""
-
-job.run_test('desktopui_ChromeSanity')
diff --git a/client/site_tests/desktopui_ChromeSanity/desktopui_ChromeSanity.py b/client/site_tests/desktopui_ChromeSanity/desktopui_ChromeSanity.py
deleted file mode 100644
index a36ac79..0000000
--- a/client/site_tests/desktopui_ChromeSanity/desktopui_ChromeSanity.py
+++ /dev/null
@@ -1,94 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import gobject
-import logging
-import os
-import time
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chrome, session_manager
-from autotest_lib.client.cros import cryptohome
-from autotest_lib.client.cros.graphics import graphics_utils
-
-from dbus.mainloop.glib import DBusGMainLoop
-
-
-class desktopui_ChromeSanity(test.test):
-    """Performs basic integration testing for Chrome.
-
-    This test performs very basic tests to verify that Chrome is somewhat
-    usable in conjunction with the rest of the system.
-    """
-    version = 1
-
-    _CHECK_CHROME_TIMEOUT_SEC = 30
-    _SESSION_START_TIMEOUT_SEC = 20
-
-    _TEST_FILENAME = 'test.html'
-    _TEST_CONTENT = 'Page loaded successfully.'
-
-    _SCREENSHOT_DIR = '/usr/local/autotest/results/default/' \
-            'desktopui_ChromeSanity/results/'
-
-
-    def initialize(self):
-        super(desktopui_ChromeSanity, self).initialize()
-
-
-    def run_once(self):
-        """
-        Runs the test.
-        """
-        dbus_loop = DBusGMainLoop(set_as_default=True)
-        listener = session_manager.SessionSignalListener(gobject.MainLoop())
-        listener.listen_for_session_state_change('started')
-
-        logging.info('Logging in...')
-        with chrome.Chrome(init_network_controller=True) as cr:
-            # Check that Chrome asks session_manager to start a session.
-            listener.wait_for_signals(
-                    desc=('SessionStateChanged "started" D-Bus signal from '
-                          'session_manager'),
-                    timeout=self._SESSION_START_TIMEOUT_SEC)
-            logging.info('Successfully logged in as "%s"', cr.username)
-
-            # Check that the user's encrypted home directory was mounted.
-            if not cryptohome.is_vault_mounted(user=cr.username,
-                                               allow_fail=False):
-                raise error.TestFail(
-                        'Didn\'t find mounted cryptohome for "%s"' %
-                        cr.username)
-
-            # Check that Chrome is able to load a web page.
-            cr.browser.platform.SetHTTPServerDirectories(self.bindir)
-            url = cr.browser.platform.http_server.UrlOf(
-                    os.path.join(self.bindir, self._TEST_FILENAME))
-            logging.info('Loading %s...', url)
-
-            try:
-                tab = cr.browser.tabs.New()
-                tab.Navigate(url)
-                tab.WaitForDocumentReadyStateToBeComplete()
-                content = tab.EvaluateJavaScript(
-                        'document.documentElement.innerText')
-                if content != self._TEST_CONTENT:
-                    raise error.TestFail(
-                            'Expected page content "%s" but got "%s"' %
-                            (self._TEST_CONTENT, content))
-                logging.info('Saw expected content')
-            except Exception as e:
-                prefix = 'screenshot-%s' % time.strftime('%Y%m%d-%H%M%S')
-                logging.info(
-                        'Got exception; saving screenshot to %s/%s',
-                        self._SCREENSHOT_DIR, prefix)
-                if not os.path.exists(self._SCREENSHOT_DIR):
-                    os.makedirs(self._SCREENSHOT_DIR)
-                graphics_utils.take_screenshot(self._SCREENSHOT_DIR, prefix)
-
-                if isinstance(e, error.TestFail):
-                    raise e
-                else:
-                    raise error.TestFail(str(e))
diff --git a/client/site_tests/desktopui_ConnectivityDiagnostics/control b/client/site_tests/desktopui_ConnectivityDiagnostics/control
index 5200af3..3ac4e12 100644
--- a/client/site_tests/desktopui_ConnectivityDiagnostics/control
+++ b/client/site_tests/desktopui_ConnectivityDiagnostics/control
@@ -2,9 +2,14 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+# TEST SOON TO BE DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
 AUTHOR = "zork, ebeach"
 NAME = "desktopui_ConnectivityDiagnostics"
-PURPOSE = "Basic sanity check of connectivity diagnostics in Chrome."
+PURPOSE = "Basic confidence check of connectivity diagnostics in Chrome."
 CRITERIA = "This test will fail if the connectivity diagnostics smoke tests fail."
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
diff --git a/client/site_tests/desktopui_ConnectivityDiagnostics/desktopui_ConnectivityDiagnostics.py b/client/site_tests/desktopui_ConnectivityDiagnostics/desktopui_ConnectivityDiagnostics.py
index 26df32a..c7cd657 100644
--- a/client/site_tests/desktopui_ConnectivityDiagnostics/desktopui_ConnectivityDiagnostics.py
+++ b/client/site_tests/desktopui_ConnectivityDiagnostics/desktopui_ConnectivityDiagnostics.py
@@ -8,7 +8,7 @@
 
 
 class desktopui_ConnectivityDiagnostics(test.test):
-    """Basic sanity check of connectivity diagnostics in Chrome."""
+    """Basic confidence check of connectivity diagnostics in Chrome."""
     version = 1
 
 
diff --git a/client/site_tests/desktopui_FontCache/control b/client/site_tests/desktopui_FontCache/control
index c7d52af..c511a48 100644
--- a/client/site_tests/desktopui_FontCache/control
+++ b/client/site_tests/desktopui_FontCache/control
@@ -2,7 +2,12 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+# TEST SOON TO BE DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
+AUTHOR = "ChromeOS Team"
 NAME = "desktopui_FontCache"
 PURPOSE = "Test font cache freshness."
 CRITERIA = """
diff --git a/client/site_tests/desktopui_GmailLatency/control b/client/site_tests/desktopui_GmailLatency/control
deleted file mode 100644
index f7686f6..0000000
--- a/client/site_tests/desktopui_GmailLatency/control
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chromium OS Project"
-NAME = "desktopui_GmailLatency"
-PURPOSE = "Basic test of performance and latency of Gmail on Chrome OS."
-CRITERIA = """
-This test will fail if the URL fails to load or the test does not complete
-within the allotted time.
-"""
-TIME = "MEDIUM"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "desktopui"
-TEST_TYPE = "client"
-
-DOC = """
-This test opens a new browser to the internal Gmail latency test URL and
-waits for the test to complete.
-TODO(sosa@chromium.org) - Must find way to bring over tests from image (add
-to chromium buildbot, etc).  url_fetch_test is from ~chromium/src/chrome/test
-TODO(sjg@chromium.org) - Create an externally-accessible URL for use by this
-test. At present the test can only be run by one dev, and is intended for use
-by the Gmail team.
-TODO(sjg@chromium.org) - Move this to use PyAuto or some other mechanism
-which uses the running Chrome and doesn't involve us including a binary with
-this test
-"""
-
-job.run_test('desktopui_GmailLatency')
diff --git a/client/site_tests/desktopui_GmailLatency/desktopui_GmailLatency.py b/client/site_tests/desktopui_GmailLatency/desktopui_GmailLatency.py
deleted file mode 100644
index 16bc5fd..0000000
--- a/client/site_tests/desktopui_GmailLatency/desktopui_GmailLatency.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging, os, time
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import constants, cros_ui
-from autotest_lib.client.cros.graphics import graphics_utils
-
-class desktopui_GmailLatency(test.test):
-    version = 1
-
-    def run_once(self):
-        url = 'http://azlaba29.mtv.corp.google.com:9380/auto/google3/java/'\
-                'com/google/caribou/ui/pinto/modules/auto/tests/'\
-                'latencytest_auto.html'
-        js_expr = 'domAutomationController.send(!!window.G_testRunner'\
-                '&& window.G_testRunner.isFinished())'
-
-        # timeout is in ms, so allow a 5 minute timeout
-        # as of jan-11 it normally takes about 2 minutes on x86-mario
-        timeout = 5 * 60 * 1000
-
-        os.chdir(self.bindir)
-
-        # Select correct binary.
-        cpuType = utils.get_cpu_arch()
-        url_fetch_test = 'url_fetch_test'
-        if cpuType == "arm":
-            url_fetch_test += '.arm'
-
-        # Stop chrome from restarting and kill login manager.
-        try:
-            orig_pid = utils.system_output('pgrep %s' %
-                constants.SESSION_MANAGER)
-            open(constants.DISABLE_BROWSER_RESTART_MAGIC_FILE, 'w').close()
-        except IOError, e:
-            logging.debug(e)
-            raise error.TestError('Failed to disable browser restarting.')
-
-        # We could kill with signal 9 so that the session manager doesn't exit.
-        # But this seems to leave the screen blank while the test is running.
-        # So do it this way (which means clean_exit is always False)
-        utils.nuke_process_by_name(name=constants.BROWSER)
-
-        clean_exit = False
-        try:
-            time.sleep(1)
-            new_pid = utils.system_output('pgrep %s' %
-                constants.SESSION_MANAGER)
-            if orig_pid != new_pid:
-                # This is expected behaviour of the session manager.
-                pass
-
-            # Copy over chrome, chrome.pak, locales, chromeos needed for test.
-            utils.system('cp -r %s/* .' % '/opt/google/chrome')
-
-            # Setup parameters
-            params = ('--url="%s" --wait_js_expr="%s" --wait_js_timeout=%d' %
-                        (url, js_expr, timeout))
-            utils.system('./%s %s' % (url_fetch_test, params))
-
-        except error.CmdError, e:
-            logging.debug(e)
-            raise error.TestFail('Gmail Latency test was unsuccessful in %s'
-                                 % os.getcwd())
-
-        finally:
-            # Allow chrome to be restarted again.
-            os.unlink(constants.DISABLE_BROWSER_RESTART_MAGIC_FILE)
-
-            # Reset the UI but only if we need to (avoid double reset).
-            if not clean_exit:
-                cros_ui.nuke()
diff --git a/client/site_tests/desktopui_GmailLatency/url_fetch_test b/client/site_tests/desktopui_GmailLatency/url_fetch_test
deleted file mode 100755
index 0303de5..0000000
--- a/client/site_tests/desktopui_GmailLatency/url_fetch_test
+++ /dev/null
Binary files differ
diff --git a/client/site_tests/desktopui_GmailLatency/url_fetch_test.arm b/client/site_tests/desktopui_GmailLatency/url_fetch_test.arm
deleted file mode 100755
index 084ea46..0000000
--- a/client/site_tests/desktopui_GmailLatency/url_fetch_test.arm
+++ /dev/null
Binary files differ
diff --git a/client/site_tests/desktopui_HangDetector/control b/client/site_tests/desktopui_HangDetector/control
deleted file mode 100644
index 0f07638..0000000
--- a/client/site_tests/desktopui_HangDetector/control
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "cmasone@chromium.org"
-NAME = "desktopui_HangDetector"
-ATTRIBUTES = "suite:regression"
-TIME = "FAST"
-TEST_TYPE = "client"
-
-DOC = """
-Enable and verify session_manager-driven browser hang detection.
-"""
-
-job.run_test('desktopui_HangDetector')
-
diff --git a/client/site_tests/desktopui_HangDetector/desktopui_HangDetector.py b/client/site_tests/desktopui_HangDetector/desktopui_HangDetector.py
deleted file mode 100644
index bac7368..0000000
--- a/client/site_tests/desktopui_HangDetector/desktopui_HangDetector.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging, os, signal, time
-
-import common
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import constants, cros_logging, cros_ui, login
-
-class desktopui_HangDetector(test.test):
-    """
-    This class enables browser process hang detection, simulates a hang
-    by sending a SIGSTOP to the browser, and then checks to see that it
-    got killed and restarted successfully -- without the UI getting bounced.
-    """
-    version = 1
-
-
-    def initialize(self):
-        self._pauser = cros_logging.LogRotationPauser()
-        self._pauser.begin()
-
-
-    def _get_oldest_pid_by_name(self, name):
-        try:
-            pid = utils.get_oldest_pid_by_name(name)
-            logging.debug('Found %d for %s', pid, name)
-        except error.CmdError as e:
-            raise error.TestError('Could not find pid of %s: %r' % (name, e))
-        except ValueError as e:
-            raise error.TestError('Got bad pid looking up %s: %r' % (name, e))
-        if not pid:
-            raise error.TestError('Got no pid looking up %s' % name)
-        return pid
-
-
-    def run_once(self):
-        # Create magic file to enable browser liveness checking and
-        # bounce the session manager to pick up the flag file.
-        cros_ui.stop()
-        os.mknod(constants.ENABLE_BROWSER_HANG_DETECTION_FILE)
-        cros_ui.start()
-
-        browser_pid = self._get_oldest_pid_by_name(constants.BROWSER)
-        sm_pid = self._get_oldest_pid_by_name(constants.SESSION_MANAGER)
-
-        # Reading the log is the best way to watch for the hang detector.
-        reader = cros_logging.LogReader()
-        reader.set_start_by_current()
-
-        # To simulate a hang, STOP the browser and wait for it to get
-        # hit by the session manager.  It won't actually exit until it gets
-        # a SIGCONT, though.
-        try:
-            os.kill(browser_pid, signal.SIGSTOP)  # Simulate hang.
-        except OSError as e:
-            raise error.TestError('Cannot STOP browser: %r' % e)
-
-        # Watch for hang detection.
-        utils.poll_for_condition(
-            condition=lambda: reader.can_find('Aborting browser process.'),
-            exception=utils.TimeoutError('Waiting for hang detector.'),
-            sleep_interval=5,
-            timeout=60)
-
-        try:
-            os.kill(browser_pid, signal.SIGCONT)  # Allow browser to die.
-        except OSError as e:
-            raise error.TestError('Cannot CONT browser: %r' % e)
-
-        # Wait for old browser process to be gone.
-        utils.poll_for_condition(
-            condition= lambda: utils.pid_is_alive(browser_pid),
-            exception=utils.TimeoutError(
-                'Browser does not seem to have restarted!'),
-            timeout=60)
-
-        # Wait for new browser to come up.
-        login.wait_for_browser()
-        if sm_pid != self._get_oldest_pid_by_name(constants.SESSION_MANAGER):
-            raise error.TestFail('session_manager seems to have restarted')
-
-
-    def cleanup(self):
-        if os.path.exists(constants.ENABLE_BROWSER_HANG_DETECTION_FILE):
-            os.remove(constants.ENABLE_BROWSER_HANG_DETECTION_FILE)
-        self._pauser.end()
diff --git a/client/site_tests/desktopui_MediaAudioFeedback/control b/client/site_tests/desktopui_MediaAudioFeedback/control
index 9c337b7..74c9613 100644
--- a/client/site_tests/desktopui_MediaAudioFeedback/control
+++ b/client/site_tests/desktopui_MediaAudioFeedback/control
@@ -2,13 +2,18 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+# TEST SOON TO BE DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
 AUTHOR = "Chromium OS Project"
 NAME = "desktopui_MediaAudioFeedback"
 PURPOSE = "Verify sound from media files is audible"
 CRITERIA = """
 This test will fail if media files sound is not audible.
 """
-ATTRIBUTES = "suite:bvt-perbuild"
+ATTRIBUTES = ""
 TIME = "SHORT"
 TEST_CATEGORY = "General"
 TEST_CLASS = "desktopui"
diff --git a/client/site_tests/desktopui_RootfsLacros/control b/client/site_tests/desktopui_RootfsLacros/control
new file mode 100644
index 0000000..2f7bf6e
--- /dev/null
+++ b/client/site_tests/desktopui_RootfsLacros/control
@@ -0,0 +1,16 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "dhaddock, Chromium OS"
+NAME = "desktopui_RootfsLacros"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "client"
+PURPOSE = "Get rootfs-lacros version."
+TIME = "SHORT"
+ATTRIBUTES = "suite:bvt-perbuild"
+PY_VERSION = 3
+DOC = """ Tests launching rootfs-lacros and checking version number. """
+
+job.run_test('desktopui_RootfsLacros')
diff --git a/client/site_tests/desktopui_RootfsLacros/desktopui_RootfsLacros.py b/client/site_tests/desktopui_RootfsLacros/desktopui_RootfsLacros.py
new file mode 100644
index 0000000..d4b39c4
--- /dev/null
+++ b/client/site_tests/desktopui_RootfsLacros/desktopui_RootfsLacros.py
@@ -0,0 +1,72 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import time
+
+from autotest_lib.client.bin import test
+from autotest_lib.client.bin import utils
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib import ui_utils
+from autotest_lib.client.common_lib.cros import chrome
+
+
+class desktopui_RootfsLacros(test.test):
+    """Tests logging in, opening lacros, and verifying the version number."""
+    version = 1
+
+    def is_lacros_running(self):
+        """ Return True if lacros is running. """
+        process = utils.run('pgrep -f /run/lacros/chrome',
+                            ignore_status=True).stdout
+        return len(process) > 0
+
+    def run_once(self, dont_override_profile=False):
+        """Check rootfs-lacros opens and its version number."""
+        # Use args to keep test as hermetic as possible.
+        # See crbug.com/1268252 and crbug.com/1268743 for details.
+        browser_args = [
+                '--lacros-selection=rootfs', '--enable-features=LacrosSupport',
+                '--enable-features=LacrosPrimary',
+                '--disable-lacros-keep-alive', '--disable-login-lacros-opening'
+        ]
+
+        with chrome.Chrome(autotest_ext=True,
+                           dont_override_profile=dont_override_profile,
+                           extra_browser_args=browser_args) as cr:
+            # Use chrome.automation API to drive UI.
+            self.ui = ui_utils.UI_Handler()
+            self.ui.start_ui_root(cr)
+
+            # Click the shelf button for lacors.
+            self.ui.wait_for_ui_obj('Lacros', role='button')
+            self.ui.doDefault_on_obj('Lacros', role='button')
+
+            # Check that lacros process is running.
+            try:
+                utils.poll_for_condition(condition=self.is_lacros_running)
+            except utils.TimeoutError:
+                raise error.TestFail(
+                        'No Lacros processes running after clicking shelf icon'
+                )
+
+            # Get lacros version
+            res = utils.run('/run/lacros/chrome -version').stdout
+            version = str(utils.parse_chrome_version(res)[0])
+            logging.info('Lacros version is %s', version)
+
+            # Save lacros version for other uses.
+            save_file = os.path.join(self.resultsdir, 'lacros_version.txt')
+            tmp_file = '/tmp/lacros_version.txt'
+            utils.run(['echo', version, '>', save_file])
+            utils.run(['echo', version, '>', tmp_file])
+
+            # Wait to make sure lacros doesn't crash.
+            time.sleep(10)
+            try:
+                utils.poll_for_condition(condition=self.is_lacros_running)
+            except utils.TimeoutError:
+                raise error.TestFail('No Lacros processes running after 10s')
diff --git a/client/site_tests/desktopui_ScreenLocker/control b/client/site_tests/desktopui_ScreenLocker/control
index 14537d6..af9481e 100644
--- a/client/site_tests/desktopui_ScreenLocker/control
+++ b/client/site_tests/desktopui_ScreenLocker/control
@@ -2,7 +2,12 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+# TEST SOON TO BE DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
+AUTHOR = "ChromeOS Team"
 NAME = "desktopui_ScreenLocker"
 PURPOSE = "Verify screen locker is functional."
 CRITERIA = """
@@ -24,5 +29,3 @@
 """
 
 job.run_test('desktopui_ScreenLocker')
-
-
diff --git a/client/site_tests/desktopui_ScreenLocker/desktopui_ScreenLocker.py b/client/site_tests/desktopui_ScreenLocker/desktopui_ScreenLocker.py
index 58d90f2..d82f093 100644
--- a/client/site_tests/desktopui_ScreenLocker/desktopui_ScreenLocker.py
+++ b/client/site_tests/desktopui_ScreenLocker/desktopui_ScreenLocker.py
@@ -2,7 +2,6 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import gobject
 import logging
 
 from autotest_lib.client.bin import test, utils
@@ -13,6 +12,11 @@
 
 from datetime import datetime
 from dbus.mainloop.glib import DBusGMainLoop
+# AU tests use ToT client code, but ToT -3 client version.
+try:
+    from gi.repository import GObject
+except ImportError:
+    import gobject as GObject
 
 class desktopui_ScreenLocker(test.test):
     """This is a client side test that exercises the screenlocker."""
@@ -67,7 +71,7 @@
         if self.screen_locked:
             raise error.TestFail('Screen already locked')
         signal_listener = session_manager.ScreenIsLockedSignalListener(
-                gobject.MainLoop())
+                GObject.MainLoop())
         ext = self._chrome.autotest_ext
 
         start = datetime.now()
diff --git a/client/site_tests/desktopui_SetFieldsWithChromeDriver/control b/client/site_tests/desktopui_SetFieldsWithChromeDriver/control
index 0301897..fdcc444 100644
--- a/client/site_tests/desktopui_SetFieldsWithChromeDriver/control
+++ b/client/site_tests/desktopui_SetFieldsWithChromeDriver/control
@@ -2,6 +2,11 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+# TEST SOON TO BE DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
 AUTHOR = "beeps@chromium.org"
 NAME = "desktopui_SetFieldsWithChromeDriver"
 PURPOSE = "Verify that we can set fields with chromedriver."
diff --git a/client/site_tests/desktopui_SimpleLogin/control b/client/site_tests/desktopui_SimpleLogin/control
index dc223ca..a2e1238 100644
--- a/client/site_tests/desktopui_SimpleLogin/control
+++ b/client/site_tests/desktopui_SimpleLogin/control
@@ -12,6 +12,7 @@
 TEST_CATEGORY = "General"
 TEST_CLASS = "desktopui"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test logs in and waits forever (or until the exit flag is seen).
diff --git a/client/site_tests/desktopui_SimpleLogin/desktopui_SimpleLogin.py b/client/site_tests/desktopui_SimpleLogin/desktopui_SimpleLogin.py
index bc2d5a4..39c4f96 100755
--- a/client/site_tests/desktopui_SimpleLogin/desktopui_SimpleLogin.py
+++ b/client/site_tests/desktopui_SimpleLogin/desktopui_SimpleLogin.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/desktopui_SpeechSynthesisSemiAuto/control b/client/site_tests/desktopui_SpeechSynthesisSemiAuto/control
index ed45e7e..157565d 100644
--- a/client/site_tests/desktopui_SpeechSynthesisSemiAuto/control
+++ b/client/site_tests/desktopui_SpeechSynthesisSemiAuto/control
@@ -2,6 +2,11 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+# TEST SOON TO BE DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
 AUTHOR = "Chromium OS Team"
 NAME = "desktopui_SpeechSynthesisSemiAuto"
 PURPOSE = "Verify that text strings are synthesized correctly."
diff --git a/client/site_tests/desktopui_UrlFetchWithChromeDriver/control b/client/site_tests/desktopui_UrlFetchWithChromeDriver/control
deleted file mode 100644
index 7ba812d..0000000
--- a/client/site_tests/desktopui_UrlFetchWithChromeDriver/control
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "dshi@chromium.org"
-NAME = "desktopui_UrlFetchWithChromeDriver"
-PURPOSE = "Verify URL loads and cookies are stored in Chrome."
-CRITERIA = """
-This test will fail if the URL fails to load or the cookie is not saved.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "desktopui"
-TEST_TYPE = "client"
-
-DOC = """
-This test opens a new browser to the given URL and waits for the specified
-cookie to be saved.
-"""
-
-job.run_test('desktopui_UrlFetchWithChromeDriver', live=True, tag='live')
-job.run_test('desktopui_UrlFetchWithChromeDriver', live=False, tag='not-live')
diff --git a/client/site_tests/desktopui_UrlFetchWithChromeDriver/control.bvt b/client/site_tests/desktopui_UrlFetchWithChromeDriver/control.bvt
deleted file mode 100644
index f45c58c..0000000
--- a/client/site_tests/desktopui_UrlFetchWithChromeDriver/control.bvt
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "dshi@chromium.org"
-NAME = "desktopui_UrlFetchWithChromeDriver"
-PURPOSE = "Verify URL loads and cookies are stored in Chrome."
-CRITERIA = """
-This test will fail if the URL fails to load or the cookie is not saved.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-ATTRIBUTES = "suite:experimental"
-TEST_CLASS = "desktopui"
-TEST_TYPE = "client"
-
-DOC = """
-This test opens a new browser to the given URL and waits for the specified
-cookie to be saved.
-"""
-
-job.run_test('desktopui_UrlFetchWithChromeDriver', live=False, tag='not-live')
diff --git a/client/site_tests/desktopui_UrlFetchWithChromeDriver/desktopui_UrlFetchWithChromeDriver.py b/client/site_tests/desktopui_UrlFetchWithChromeDriver/desktopui_UrlFetchWithChromeDriver.py
deleted file mode 100755
index 801f14e..0000000
--- a/client/site_tests/desktopui_UrlFetchWithChromeDriver/desktopui_UrlFetchWithChromeDriver.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-import common
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chromedriver
-from autotest_lib.client.cros import httpd
-
-
-class desktopui_UrlFetchWithChromeDriver(test.test):
-    """Test fetching url and cookie using Chrome Driver."""
-    version = 1
-
-    def initialize(self, live=True):
-        """Initialize the test.
-
-        @param live: Set to True to access external websites. Otherwise, test
-                     with localhost http server. Default value is set to True.
-        """
-        self._live = live
-        super(desktopui_UrlFetchWithChromeDriver, self).initialize()
-
-        if self._live:
-            self._test_url = 'http://www.msn.com/'
-            self._expected_title = 'MSN.com'
-            self._domain = '.msn.com'
-        else:
-            self._test_url = 'http://localhost:8000/hello.html'
-            self._expected_title = 'Hello World'
-            self._domain = 'localhost'
-            self._testServer = httpd.HTTPListener(8000, docroot=self.bindir)
-            self._testServer.run()
-
-
-    def cleanup(self):
-        """Clean up the test environment, e.g., stop local http server."""
-        if not self._live and hasattr(self, '_testServer'):
-            self._testServer.stop()
-        super(desktopui_UrlFetchWithChromeDriver, self).cleanup()
-
-
-    def run_once(self):
-        """Run the test code."""
-        with chromedriver.chromedriver() as chromedriver_instance:
-            driver = chromedriver_instance.driver
-            driver.delete_all_cookies()
-            driver.get(self._test_url)
-
-            logging.info('Expected tab title: %s. Got: %s',
-                         self._expected_title, driver.title)
-            if driver.title != self._expected_title:
-                raise error.TestError('Getting title failed, got title: %s'
-                                      % driver.title)
-
-            cookie_found = any([cookie for cookie in
-                                driver.get_cookies()
-                                if cookie['domain'] == self._domain])
-            if not cookie_found:
-                raise error.TestError('Expected cookie for %s' % self._test_url)
-
diff --git a/client/site_tests/desktopui_UrlFetchWithChromeDriver/hello.html b/client/site_tests/desktopui_UrlFetchWithChromeDriver/hello.html
deleted file mode 100644
index 28e1fff..0000000
--- a/client/site_tests/desktopui_UrlFetchWithChromeDriver/hello.html
+++ /dev/null
@@ -1,13 +0,0 @@
-<html>
-
-<head>
-  <META HTTP-EQUIV="Set-Cookie" CONTENT="mykey=myval">
-  <title>Hello World</title>
-</head>
-
-<body>
-  <b><h1>Hello there!</h1></b>
-</body>
-
-</html>
-
diff --git a/client/site_tests/detachablebase_TriggerHammerd/control b/client/site_tests/detachablebase_TriggerHammerd/control
index 4bf9c82..5b519b8 100644
--- a/client/site_tests/detachablebase_TriggerHammerd/control
+++ b/client/site_tests/detachablebase_TriggerHammerd/control
@@ -3,7 +3,7 @@
 # found in the LICENSE file.
 
 NAME = "detachablebase_TriggerHammerd"
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 PURPOSE = "Check hammerd starts correctly on boot."
 CRITERIA = "This test will fail if hammerd returns non-zero value."
 TIME = "SHORT"
@@ -12,6 +12,7 @@
 TEST_TYPE = "client"
 ATTRIBUTES = "suite:bvt-perbuild"
 DEPENDENCIES = "detachablebase"
+PY_VERSION = 3
 
 DOC = """
 The test checks hammerd upstart job is triggered on boot and exits normally.
diff --git a/client/site_tests/display_ClientChameleonConnection/control b/client/site_tests/display_ClientChameleonConnection/control
deleted file mode 100644
index 6d2693c..0000000
--- a/client/site_tests/display_ClientChameleonConnection/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'chromeos-chameleon'
-NAME = 'display_ClientChameleonConnection'
-PURPOSE = 'Chameleon connection test from client-side.'
-CRITERIA = 'This test fails if DUT and Chameleon are not connected properly.'
-ATTRIBUTES = "suite:chameleon_dp, suite:chameleon_dp_hdmi, suite:chameleon_hdmi, suite:chameleon_vga"
-TIME = 'SHORT'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'display'
-TEST_TYPE = 'client'
-DEPENDENCIES = 'chameleon'
-
-DOC = """
-This test checks the connection between DUT and Chameleon.
-"""
-
-host = next(iter(job.hosts))
-job.run_test('display_ClientChameleonConnection', host=host, args=args)
diff --git a/client/site_tests/display_ClientChameleonConnection/display_ClientChameleonConnection.py b/client/site_tests/display_ClientChameleonConnection/display_ClientChameleonConnection.py
deleted file mode 100755
index 7aac6bc..0000000
--- a/client/site_tests/display_ClientChameleonConnection/display_ClientChameleonConnection.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""This is a client-side test to check the Chameleon connection."""
-
-import logging
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.cros import constants
-from autotest_lib.client.cros.chameleon import chameleon
-from autotest_lib.client.cros.chameleon import chameleon_port_finder
-from autotest_lib.client.cros.multimedia import local_facade_factory
-
-
-class display_ClientChameleonConnection(test.test):
-    """Chameleon connection client test.
-
-    This test talks to a Chameleon board from DUT. Try to plug the Chameleon
-    ports and see if DUT detects them.
-    """
-    version = 1
-
-    def run_once(self, host, args):
-        ext_paths = [constants.DISPLAY_TEST_EXTENSION]
-        with chrome.Chrome(extension_paths=ext_paths, autotest_ext=True) as cr:
-            factory = local_facade_factory.LocalFacadeFactory(cr)
-            display_facade = factory.create_display_facade()
-
-            chameleon_board = chameleon.create_chameleon_board(host.hostname,
-                                                               args)
-            chameleon_board.setup_and_reset(self.outputdir)
-            finder = chameleon_port_finder.ChameleonVideoInputFinder(
-                    chameleon_board, display_facade)
-            ports = finder.find_all_ports()
-
-            connected_ports = ports.connected
-            dut_failed_ports = ports.failed
-
-            msg = str(finder)
-            logging.debug(msg)
-
-            if dut_failed_ports or not connected_ports:
-                raise error.TestFail(msg)
diff --git a/client/site_tests/display_DisplayContainEdid/control b/client/site_tests/display_DisplayContainEdid/control
index 2979a32..9ae2097 100644
--- a/client/site_tests/display_DisplayContainEdid/control
+++ b/client/site_tests/display_DisplayContainEdid/control
@@ -11,6 +11,7 @@
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'display'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 Test that the internal display's EDID data is correctly plumbed up to
diff --git a/client/site_tests/display_DisplayContainEdid/display_DisplayContainEdid.py b/client/site_tests/display_DisplayContainEdid/display_DisplayContainEdid.py
index 40fb467..baf18d1 100755
--- a/client/site_tests/display_DisplayContainEdid/display_DisplayContainEdid.py
+++ b/client/site_tests/display_DisplayContainEdid/display_DisplayContainEdid.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -11,7 +12,7 @@
 
 class display_DisplayContainEdid(test.test):
     """
-    Verifies that display information returned from Chrome OS specific
+    Verifies that display information returned from ChromeOS specific
     chrome.system.display API contain EDID information.
     """
     version = 1
diff --git a/client/site_tests/display_InternalDisplayRotation/control b/client/site_tests/display_InternalDisplayRotation/control
index 9d1fdbf..fc2e26d 100644
--- a/client/site_tests/display_InternalDisplayRotation/control
+++ b/client/site_tests/display_InternalDisplayRotation/control
@@ -11,6 +11,7 @@
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'display'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 Change orientation of internal display
diff --git a/client/site_tests/display_InternalDisplayRotation/display_InternalDisplayRotation.py b/client/site_tests/display_InternalDisplayRotation/display_InternalDisplayRotation.py
index 59ee60a..7b95973 100755
--- a/client/site_tests/display_InternalDisplayRotation/display_InternalDisplayRotation.py
+++ b/client/site_tests/display_InternalDisplayRotation/display_InternalDisplayRotation.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -7,7 +8,7 @@
 
 from autotest_lib.client.bin import test
 from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.multimedia import display_facade_native
+from autotest_lib.client.cros.multimedia import display_facade
 from autotest_lib.client.cros.multimedia import facade_resource
 
 
@@ -33,7 +34,7 @@
         """Test to rotate internal display"""
         facade = facade_resource.FacadeResource()
         facade.start_default_chrome()
-        self.display_facade = display_facade_native.DisplayFacadeNative(facade)
+        self.display_facade = display_facade.DisplayFacadeLocal(facade)
         self.internal_display_id = self.display_facade.get_internal_display_id()
         logging.info("Internal display ID is %s", self.internal_display_id)
         rotation_before_starts = self.display_facade.get_display_rotation(
@@ -50,4 +51,4 @@
                     self.internal_display_id)
             logging.info("Internal display rotation is set to %s", rotation)
             if rotation != angle:
-                raise error.TestFail('Failed to set %d rotation' % angle)
\ No newline at end of file
+                raise error.TestFail('Failed to set %d rotation' % angle)
diff --git a/client/site_tests/documentscan_AppTestWithFakeLorgnette/amazon_mouse.prop b/client/site_tests/documentscan_AppTestWithFakeLorgnette/amazon_mouse.prop
deleted file mode 100644
index db73f40..0000000
--- a/client/site_tests/documentscan_AppTestWithFakeLorgnette/amazon_mouse.prop
+++ /dev/null
@@ -1,24 +0,0 @@
-N: Amazon Test Mouse
-I: 0003 04f2 0939 0111
-P: 00 00 00 00 00 00 00 00
-B: 00 17 00 00 00 00 00 00 00
-B: 01 00 00 00 00 00 00 00 00
-B: 01 00 00 00 00 00 00 00 00
-B: 01 00 00 00 00 00 00 00 00
-B: 01 00 00 00 00 00 00 00 00
-B: 01 00 00 07 00 00 00 00 00
-B: 01 00 00 00 00 00 00 00 00
-B: 01 00 00 00 00 00 00 00 00
-B: 01 00 00 00 00 00 00 00 00
-B: 01 00 00 00 00 00 00 00 00
-B: 01 00 00 00 00 00 00 00 00
-B: 01 00 00 00 00 00 00 00 00
-B: 01 00 00 00 00 00 00 00 00
-B: 02 03 01 00 00 00 00 00 00
-B: 03 00 00 00 00 00 00 00 00
-B: 04 10 00 00 00 00 00 00 00
-B: 05 00 00 00 00 00 00 00 00
-B: 11 00 00 00 00 00 00 00 00
-B: 12 00 00 00 00 00 00 00 00
-B: 15 00 00 00 00 00 00 00 00
-B: 15 00 00 00 00 00 00 00 00
diff --git a/client/site_tests/documentscan_AppTestWithFakeLorgnette/button_click.event b/client/site_tests/documentscan_AppTestWithFakeLorgnette/button_click.event
deleted file mode 100644
index 16c94b0..0000000
--- a/client/site_tests/documentscan_AppTestWithFakeLorgnette/button_click.event
+++ /dev/null
@@ -1,1475 +0,0 @@
-E: 1429740164.606158 0002 0000 -4
-E: 1429740164.606158 0002 0001 -1
-E: 1429740164.606158 0000 0000 0
-E: 1429740164.614154 0002 0000 -8
-E: 1429740164.614154 0002 0001 -5
-E: 1429740164.614154 0000 0000 0
-E: 1429740164.622155 0002 0000 -7
-E: 1429740164.622155 0002 0001 -5
-E: 1429740164.622155 0000 0000 0
-E: 1429740164.630104 0002 0000 -8
-E: 1429740164.630104 0002 0001 -7
-E: 1429740164.630104 0000 0000 0
-E: 1429740164.638158 0002 0000 -8
-E: 1429740164.638158 0002 0001 -6
-E: 1429740164.638158 0000 0000 0
-E: 1429740164.646157 0002 0000 -8
-E: 1429740164.646157 0002 0001 -6
-E: 1429740164.646157 0000 0000 0
-E: 1429740164.654093 0002 0000 -7
-E: 1429740164.654093 0002 0001 -7
-E: 1429740164.654093 0000 0000 0
-E: 1429740164.662091 0002 0000 -6
-E: 1429740164.662091 0002 0001 -6
-E: 1429740164.662091 0000 0000 0
-E: 1429740164.670154 0002 0000 -7
-E: 1429740164.670154 0002 0001 -5
-E: 1429740164.670154 0000 0000 0
-E: 1429740164.678155 0002 0000 -7
-E: 1429740164.678155 0002 0001 -6
-E: 1429740164.678155 0000 0000 0
-E: 1429740164.686153 0002 0000 -8
-E: 1429740164.686153 0002 0001 -7
-E: 1429740164.686153 0000 0000 0
-E: 1429740164.694154 0002 0000 -10
-E: 1429740164.694154 0002 0001 -7
-E: 1429740164.694154 0000 0000 0
-E: 1429740164.702157 0002 0000 -9
-E: 1429740164.702157 0002 0001 -7
-E: 1429740164.702157 0000 0000 0
-E: 1429740164.710157 0002 0000 -11
-E: 1429740164.710157 0002 0001 -7
-E: 1429740164.710157 0000 0000 0
-E: 1429740164.718158 0002 0000 -10
-E: 1429740164.718158 0002 0001 -7
-E: 1429740164.718158 0000 0000 0
-E: 1429740164.726106 0002 0000 -9
-E: 1429740164.726106 0002 0001 -6
-E: 1429740164.726106 0000 0000 0
-E: 1429740164.734158 0002 0000 -10
-E: 1429740164.734158 0002 0001 -7
-E: 1429740164.734158 0000 0000 0
-E: 1429740164.742103 0002 0000 -12
-E: 1429740164.742103 0002 0001 -7
-E: 1429740164.742103 0000 0000 0
-E: 1429740164.750104 0002 0000 -11
-E: 1429740164.750104 0002 0001 -8
-E: 1429740164.750104 0000 0000 0
-E: 1429740164.758156 0002 0000 -11
-E: 1429740164.758156 0002 0001 -7
-E: 1429740164.758156 0000 0000 0
-E: 1429740164.766158 0002 0000 -12
-E: 1429740164.766158 0002 0001 -8
-E: 1429740164.766158 0000 0000 0
-E: 1429740164.774089 0002 0000 -11
-E: 1429740164.774089 0002 0001 -6
-E: 1429740164.774089 0000 0000 0
-E: 1429740164.782089 0002 0000 -10
-E: 1429740164.782089 0002 0001 -8
-E: 1429740164.782089 0000 0000 0
-E: 1429740164.790079 0002 0000 -11
-E: 1429740164.790079 0002 0001 -8
-E: 1429740164.790079 0000 0000 0
-E: 1429740164.798090 0002 0000 -8
-E: 1429740164.798090 0002 0001 -6
-E: 1429740164.798090 0000 0000 0
-E: 1429740164.806090 0002 0000 -10
-E: 1429740164.806090 0002 0001 -8
-E: 1429740164.806090 0000 0000 0
-E: 1429740164.814088 0002 0000 -10
-E: 1429740164.814088 0002 0001 -6
-E: 1429740164.814088 0000 0000 0
-E: 1429740164.822160 0002 0000 -11
-E: 1429740164.822160 0002 0001 -8
-E: 1429740164.822160 0000 0000 0
-E: 1429740164.830107 0002 0000 -12
-E: 1429740164.830107 0002 0001 -7
-E: 1429740164.830107 0000 0000 0
-E: 1429740164.838159 0002 0000 -11
-E: 1429740164.838159 0002 0001 -7
-E: 1429740164.838159 0000 0000 0
-E: 1429740164.846090 0002 0000 -12
-E: 1429740164.846090 0002 0001 -7
-E: 1429740164.846090 0000 0000 0
-E: 1429740164.854159 0002 0000 -10
-E: 1429740164.854159 0002 0001 -8
-E: 1429740164.854159 0000 0000 0
-E: 1429740164.862157 0002 0000 -13
-E: 1429740164.862157 0002 0001 -8
-E: 1429740164.862157 0000 0000 0
-E: 1429740164.870155 0002 0000 -10
-E: 1429740164.870155 0002 0001 -8
-E: 1429740164.870155 0000 0000 0
-E: 1429740164.878157 0002 0000 -10
-E: 1429740164.878157 0002 0001 -8
-E: 1429740164.878157 0000 0000 0
-E: 1429740164.886154 0002 0000 -10
-E: 1429740164.886154 0002 0001 -8
-E: 1429740164.886154 0000 0000 0
-E: 1429740164.894157 0002 0000 -10
-E: 1429740164.894157 0002 0001 -8
-E: 1429740164.894157 0000 0000 0
-E: 1429740164.902158 0002 0000 -9
-E: 1429740164.902158 0002 0001 -10
-E: 1429740164.902158 0000 0000 0
-E: 1429740164.910089 0002 0000 -9
-E: 1429740164.910089 0002 0001 -9
-E: 1429740164.910089 0000 0000 0
-E: 1429740164.918088 0002 0000 -8
-E: 1429740164.918088 0002 0001 -8
-E: 1429740164.918088 0000 0000 0
-E: 1429740164.926158 0002 0000 -8
-E: 1429740164.926158 0002 0001 -10
-E: 1429740164.926158 0000 0000 0
-E: 1429740164.934157 0002 0000 -9
-E: 1429740164.934157 0002 0001 -9
-E: 1429740164.934157 0000 0000 0
-E: 1429740164.942087 0002 0000 -8
-E: 1429740164.942087 0002 0001 -9
-E: 1429740164.942087 0000 0000 0
-E: 1429740164.950160 0002 0000 -8
-E: 1429740164.950160 0002 0001 -9
-E: 1429740164.950160 0000 0000 0
-E: 1429740164.958160 0002 0000 -7
-E: 1429740164.958160 0002 0001 -9
-E: 1429740164.958160 0000 0000 0
-E: 1429740164.966157 0002 0000 -6
-E: 1429740164.966157 0002 0001 -8
-E: 1429740164.966157 0000 0000 0
-E: 1429740164.974090 0002 0000 -7
-E: 1429740164.974090 0002 0001 -7
-E: 1429740164.974090 0000 0000 0
-E: 1429740164.982160 0002 0000 -5
-E: 1429740164.982160 0002 0001 -7
-E: 1429740164.982160 0000 0000 0
-E: 1429740164.990088 0002 0000 -6
-E: 1429740164.990088 0002 0001 -8
-E: 1429740164.990088 0000 0000 0
-E: 1429740164.998164 0002 0000 -6
-E: 1429740164.998164 0002 0001 -8
-E: 1429740164.998164 0000 0000 0
-E: 1429740165.006094 0002 0000 -7
-E: 1429740165.006094 0002 0001 -7
-E: 1429740165.006094 0000 0000 0
-E: 1429740165.014159 0002 0000 -6
-E: 1429740165.014159 0002 0001 -7
-E: 1429740165.014159 0000 0000 0
-E: 1429740165.022089 0002 0000 -7
-E: 1429740165.022089 0002 0001 -7
-E: 1429740165.022089 0000 0000 0
-E: 1429740165.030160 0002 0000 -7
-E: 1429740165.030160 0002 0001 -7
-E: 1429740165.030160 0000 0000 0
-E: 1429740165.038090 0002 0000 -7
-E: 1429740165.038090 0002 0001 -7
-E: 1429740165.038090 0000 0000 0
-E: 1429740165.046085 0002 0000 -7
-E: 1429740165.046085 0002 0001 -6
-E: 1429740165.046085 0000 0000 0
-E: 1429740165.054092 0002 0000 -8
-E: 1429740165.054092 0002 0001 -6
-E: 1429740165.054092 0000 0000 0
-E: 1429740165.062111 0002 0000 -7
-E: 1429740165.062111 0002 0001 -6
-E: 1429740165.062111 0000 0000 0
-E: 1429740165.070090 0002 0000 -7
-E: 1429740165.070090 0002 0001 -6
-E: 1429740165.070090 0000 0000 0
-E: 1429740165.078091 0002 0000 -7
-E: 1429740165.078091 0002 0001 -5
-E: 1429740165.078091 0000 0000 0
-E: 1429740165.086092 0002 0000 -5
-E: 1429740165.086092 0002 0001 -5
-E: 1429740165.086092 0000 0000 0
-E: 1429740165.094095 0002 0000 -5
-E: 1429740165.094095 0002 0001 -5
-E: 1429740165.094095 0000 0000 0
-E: 1429740165.102115 0002 0000 -5
-E: 1429740165.102115 0002 0001 -4
-E: 1429740165.102115 0000 0000 0
-E: 1429740165.110095 0002 0000 -6
-E: 1429740165.110095 0002 0001 -5
-E: 1429740165.110095 0000 0000 0
-E: 1429740165.118161 0002 0000 -6
-E: 1429740165.118161 0002 0001 -4
-E: 1429740165.118161 0000 0000 0
-E: 1429740165.126165 0002 0000 -5
-E: 1429740165.126165 0002 0001 -3
-E: 1429740165.126165 0000 0000 0
-E: 1429740165.134085 0002 0000 -4
-E: 1429740165.134085 0002 0001 -4
-E: 1429740165.134085 0000 0000 0
-E: 1429740165.142096 0002 0000 -4
-E: 1429740165.142096 0002 0001 -3
-E: 1429740165.142096 0000 0000 0
-E: 1429740165.150110 0002 0000 -4
-E: 1429740165.150110 0002 0001 -3
-E: 1429740165.150110 0000 0000 0
-E: 1429740165.158086 0002 0000 -5
-E: 1429740165.158086 0002 0001 -2
-E: 1429740165.158086 0000 0000 0
-E: 1429740165.166161 0002 0000 -4
-E: 1429740165.166161 0002 0001 -4
-E: 1429740165.166161 0000 0000 0
-E: 1429740165.174092 0002 0000 -4
-E: 1429740165.174092 0002 0001 -2
-E: 1429740165.174092 0000 0000 0
-E: 1429740165.182162 0002 0000 -4
-E: 1429740165.182162 0002 0001 -2
-E: 1429740165.182162 0000 0000 0
-E: 1429740165.190093 0002 0000 -4
-E: 1429740165.190093 0002 0001 -3
-E: 1429740165.190093 0000 0000 0
-E: 1429740165.198163 0002 0000 -4
-E: 1429740165.198163 0002 0001 -2
-E: 1429740165.198163 0000 0000 0
-E: 1429740165.206092 0002 0000 -4
-E: 1429740165.206092 0002 0001 -3
-E: 1429740165.206092 0000 0000 0
-E: 1429740165.214161 0002 0000 -4
-E: 1429740165.214161 0002 0001 -2
-E: 1429740165.214161 0000 0000 0
-E: 1429740165.222093 0002 0000 -4
-E: 1429740165.222093 0002 0001 -3
-E: 1429740165.222093 0000 0000 0
-E: 1429740165.230161 0002 0000 -4
-E: 1429740165.230161 0002 0001 -3
-E: 1429740165.230161 0000 0000 0
-E: 1429740165.238094 0002 0000 -4
-E: 1429740165.238094 0002 0001 -2
-E: 1429740165.238094 0000 0000 0
-E: 1429740165.246093 0002 0000 -3
-E: 1429740165.246093 0002 0001 -3
-E: 1429740165.246093 0000 0000 0
-E: 1429740165.254092 0002 0000 -2
-E: 1429740165.254092 0002 0001 -1
-E: 1429740165.254092 0000 0000 0
-E: 1429740165.262112 0002 0000 -2
-E: 1429740165.262112 0002 0001 -2
-E: 1429740165.262112 0000 0000 0
-E: 1429740165.270092 0002 0000 -1
-E: 1429740165.270092 0002 0001 -2
-E: 1429740165.270092 0000 0000 0
-E: 1429740165.278111 0002 0000 -2
-E: 1429740165.278111 0002 0001 -2
-E: 1429740165.278111 0000 0000 0
-E: 1429740165.286089 0002 0000 -2
-E: 1429740165.286089 0002 0001 -3
-E: 1429740165.286089 0000 0000 0
-E: 1429740165.294098 0002 0000 -2
-E: 1429740165.294098 0002 0001 -2
-E: 1429740165.294098 0000 0000 0
-E: 1429740165.302085 0002 0000 -1
-E: 1429740165.302085 0002 0001 -2
-E: 1429740165.302085 0000 0000 0
-E: 1429740165.310095 0002 0000 -2
-E: 1429740165.310095 0002 0001 -2
-E: 1429740165.310095 0000 0000 0
-E: 1429740165.318162 0002 0000 -2
-E: 1429740165.318162 0002 0001 -2
-E: 1429740165.318162 0000 0000 0
-E: 1429740165.326163 0002 0000 -3
-E: 1429740165.326163 0002 0001 -3
-E: 1429740165.326163 0000 0000 0
-E: 1429740165.334162 0002 0000 -2
-E: 1429740165.334162 0002 0001 -2
-E: 1429740165.334162 0000 0000 0
-E: 1429740165.342095 0002 0000 -2
-E: 1429740165.342095 0002 0001 -3
-E: 1429740165.342095 0000 0000 0
-E: 1429740165.350161 0002 0000 -2
-E: 1429740165.350161 0002 0001 -2
-E: 1429740165.350161 0000 0000 0
-E: 1429740165.358114 0002 0000 -2
-E: 1429740165.358114 0002 0001 -2
-E: 1429740165.358114 0000 0000 0
-E: 1429740165.366162 0002 0000 -2
-E: 1429740165.366162 0002 0001 -3
-E: 1429740165.366162 0000 0000 0
-E: 1429740165.374158 0002 0000 -2
-E: 1429740165.374158 0002 0001 -2
-E: 1429740165.374158 0000 0000 0
-E: 1429740165.382161 0002 0000 -3
-E: 1429740165.382161 0002 0001 -3
-E: 1429740165.382161 0000 0000 0
-E: 1429740165.390092 0002 0000 -3
-E: 1429740165.390092 0002 0001 -3
-E: 1429740165.390092 0000 0000 0
-E: 1429740165.398088 0002 0000 -4
-E: 1429740165.398088 0002 0001 -3
-E: 1429740165.398088 0000 0000 0
-E: 1429740165.406093 0002 0000 -4
-E: 1429740165.406093 0002 0001 -3
-E: 1429740165.406093 0000 0000 0
-E: 1429740165.414112 0002 0000 -3
-E: 1429740165.414112 0002 0001 -2
-E: 1429740165.414112 0000 0000 0
-E: 1429740165.422093 0002 0000 -3
-E: 1429740165.422093 0002 0001 -2
-E: 1429740165.422093 0000 0000 0
-E: 1429740165.430097 0002 0000 -3
-E: 1429740165.430097 0002 0001 -3
-E: 1429740165.430097 0000 0000 0
-E: 1429740165.438093 0002 0000 -3
-E: 1429740165.438093 0002 0001 -2
-E: 1429740165.438093 0000 0000 0
-E: 1429740165.446095 0002 0000 -4
-E: 1429740165.446095 0002 0001 -3
-E: 1429740165.446095 0000 0000 0
-E: 1429740165.454093 0002 0000 -2
-E: 1429740165.454093 0002 0001 -2
-E: 1429740165.454093 0000 0000 0
-E: 1429740165.462096 0002 0000 -4
-E: 1429740165.462096 0002 0001 -2
-E: 1429740165.462096 0000 0000 0
-E: 1429740165.470094 0002 0000 -2
-E: 1429740165.470094 0002 0001 -2
-E: 1429740165.470094 0000 0000 0
-E: 1429740165.478114 0002 0000 -3
-E: 1429740165.478114 0002 0001 -2
-E: 1429740165.478114 0000 0000 0
-E: 1429740165.486092 0002 0000 -2
-E: 1429740165.486092 0002 0001 -2
-E: 1429740165.486092 0000 0000 0
-E: 1429740165.494097 0002 0000 -3
-E: 1429740165.494097 0002 0001 -2
-E: 1429740165.494097 0000 0000 0
-E: 1429740165.502098 0002 0000 -3
-E: 1429740165.502098 0002 0001 -2
-E: 1429740165.502098 0000 0000 0
-E: 1429740165.510097 0002 0000 -3
-E: 1429740165.510097 0002 0001 -2
-E: 1429740165.510097 0000 0000 0
-E: 1429740165.518116 0002 0000 -3
-E: 1429740165.518116 0002 0001 -1
-E: 1429740165.518116 0000 0000 0
-E: 1429740165.526164 0002 0000 -3
-E: 1429740165.526164 0002 0001 -3
-E: 1429740165.526164 0000 0000 0
-E: 1429740165.534163 0002 0000 -4
-E: 1429740165.534163 0002 0001 -2
-E: 1429740165.534163 0000 0000 0
-E: 1429740165.542098 0002 0000 -2
-E: 1429740165.542098 0002 0001 -1
-E: 1429740165.542098 0000 0000 0
-E: 1429740165.550167 0002 0000 -3
-E: 1429740165.550167 0002 0001 -2
-E: 1429740165.550167 0000 0000 0
-E: 1429740165.558096 0002 0000 -3
-E: 1429740165.558096 0002 0001 -2
-E: 1429740165.558096 0000 0000 0
-E: 1429740165.566094 0002 0000 -3
-E: 1429740165.566094 0002 0001 -2
-E: 1429740165.566094 0000 0000 0
-E: 1429740165.574094 0002 0000 -3
-E: 1429740165.574094 0002 0001 -2
-E: 1429740165.574094 0000 0000 0
-E: 1429740165.582105 0002 0000 -4
-E: 1429740165.582105 0002 0001 -2
-E: 1429740165.582105 0000 0000 0
-E: 1429740165.590106 0002 0000 -2
-E: 1429740165.590106 0002 0001 -3
-E: 1429740165.590106 0000 0000 0
-E: 1429740165.598091 0002 0000 -4
-E: 1429740165.598091 0002 0001 -3
-E: 1429740165.598091 0000 0000 0
-E: 1429740165.606096 0002 0000 -3
-E: 1429740165.606096 0002 0001 -2
-E: 1429740165.606096 0000 0000 0
-E: 1429740165.614164 0002 0000 -4
-E: 1429740165.614164 0002 0001 -3
-E: 1429740165.614164 0000 0000 0
-E: 1429740165.622098 0002 0000 -4
-E: 1429740165.622098 0002 0001 -3
-E: 1429740165.622098 0000 0000 0
-E: 1429740165.630094 0002 0000 -4
-E: 1429740165.630094 0002 0001 -3
-E: 1429740165.630094 0000 0000 0
-E: 1429740165.638094 0002 0000 -4
-E: 1429740165.638094 0002 0001 -3
-E: 1429740165.638094 0000 0000 0
-E: 1429740165.646098 0002 0000 -4
-E: 1429740165.646098 0002 0001 -4
-E: 1429740165.646098 0000 0000 0
-E: 1429740165.654096 0002 0000 -4
-E: 1429740165.654096 0002 0001 -3
-E: 1429740165.654096 0000 0000 0
-E: 1429740165.662094 0002 0000 -4
-E: 1429740165.662094 0002 0001 -3
-E: 1429740165.662094 0000 0000 0
-E: 1429740165.670095 0002 0000 -3
-E: 1429740165.670095 0002 0001 -4
-E: 1429740165.670095 0000 0000 0
-E: 1429740165.678114 0002 0000 -3
-E: 1429740165.678114 0002 0001 -4
-E: 1429740165.678114 0000 0000 0
-E: 1429740165.686092 0002 0000 -3
-E: 1429740165.686092 0002 0001 -3
-E: 1429740165.686092 0000 0000 0
-E: 1429740165.694099 0002 0000 -3
-E: 1429740165.694099 0002 0001 -4
-E: 1429740165.694099 0000 0000 0
-E: 1429740165.702165 0002 0000 -3
-E: 1429740165.702165 0002 0001 -4
-E: 1429740165.702165 0000 0000 0
-E: 1429740165.710096 0002 0000 -3
-E: 1429740165.710096 0002 0001 -4
-E: 1429740165.710096 0000 0000 0
-E: 1429740165.718165 0002 0000 -3
-E: 1429740165.718165 0002 0001 -4
-E: 1429740165.718165 0000 0000 0
-E: 1429740165.726098 0002 0000 -4
-E: 1429740165.726098 0002 0001 -5
-E: 1429740165.726098 0000 0000 0
-E: 1429740165.734167 0002 0000 -3
-E: 1429740165.734167 0002 0001 -5
-E: 1429740165.734167 0000 0000 0
-E: 1429740165.742167 0002 0000 -4
-E: 1429740165.742167 0002 0001 -5
-E: 1429740165.742167 0000 0000 0
-E: 1429740165.750168 0002 0000 -4
-E: 1429740165.750168 0002 0001 -5
-E: 1429740165.750168 0000 0000 0
-E: 1429740165.758165 0002 0000 -4
-E: 1429740165.758165 0002 0001 -4
-E: 1429740165.758165 0000 0000 0
-E: 1429740165.766171 0002 0000 -4
-E: 1429740165.766171 0002 0001 -6
-E: 1429740165.766171 0000 0000 0
-E: 1429740165.774167 0002 0000 -4
-E: 1429740165.774167 0002 0001 -4
-E: 1429740165.774167 0000 0000 0
-E: 1429740165.782169 0002 0000 -5
-E: 1429740165.782169 0002 0001 -6
-E: 1429740165.782169 0000 0000 0
-E: 1429740165.790169 0002 0000 -6
-E: 1429740165.790169 0002 0001 -5
-E: 1429740165.790169 0000 0000 0
-E: 1429740165.798167 0002 0000 -5
-E: 1429740165.798167 0002 0001 -6
-E: 1429740165.798167 0000 0000 0
-E: 1429740165.806167 0002 0000 -6
-E: 1429740165.806167 0002 0001 -5
-E: 1429740165.806167 0000 0000 0
-E: 1429740165.814102 0002 0000 -5
-E: 1429740165.814102 0002 0001 -7
-E: 1429740165.814102 0000 0000 0
-E: 1429740165.822165 0002 0000 -6
-E: 1429740165.822165 0002 0001 -6
-E: 1429740165.822165 0000 0000 0
-E: 1429740165.830118 0002 0000 -6
-E: 1429740165.830118 0002 0001 -7
-E: 1429740165.830118 0000 0000 0
-E: 1429740165.838167 0002 0000 -7
-E: 1429740165.838167 0002 0001 -6
-E: 1429740165.838167 0000 0000 0
-E: 1429740165.846168 0002 0000 -5
-E: 1429740165.846168 0002 0001 -5
-E: 1429740165.846168 0000 0000 0
-E: 1429740165.854171 0002 0000 -6
-E: 1429740165.854171 0002 0001 -7
-E: 1429740165.854171 0000 0000 0
-E: 1429740165.862171 0002 0000 -7
-E: 1429740165.862171 0002 0001 -6
-E: 1429740165.862171 0000 0000 0
-E: 1429740165.870167 0002 0000 -6
-E: 1429740165.870167 0002 0001 -6
-E: 1429740165.870167 0000 0000 0
-E: 1429740165.878170 0002 0000 -6
-E: 1429740165.878170 0002 0001 -6
-E: 1429740165.878170 0000 0000 0
-E: 1429740165.886169 0002 0000 -5
-E: 1429740165.886169 0002 0001 -6
-E: 1429740165.886169 0000 0000 0
-E: 1429740165.894173 0002 0000 -5
-E: 1429740165.894173 0002 0001 -6
-E: 1429740165.894173 0000 0000 0
-E: 1429740165.902169 0002 0000 -5
-E: 1429740165.902169 0002 0001 -7
-E: 1429740165.902169 0000 0000 0
-E: 1429740165.910168 0002 0000 -5
-E: 1429740165.910168 0002 0001 -6
-E: 1429740165.910168 0000 0000 0
-E: 1429740165.918170 0002 0000 -6
-E: 1429740165.918170 0002 0001 -6
-E: 1429740165.918170 0000 0000 0
-E: 1429740165.926169 0002 0000 -5
-E: 1429740165.926169 0002 0001 -7
-E: 1429740165.926169 0000 0000 0
-E: 1429740165.934171 0002 0000 -6
-E: 1429740165.934171 0002 0001 -7
-E: 1429740165.934171 0000 0000 0
-E: 1429740165.942102 0002 0000 -6
-E: 1429740165.942102 0002 0001 -6
-E: 1429740165.942102 0000 0000 0
-E: 1429740165.950171 0002 0000 -5
-E: 1429740165.950171 0002 0001 -7
-E: 1429740165.950171 0000 0000 0
-E: 1429740165.958176 0002 0000 -5
-E: 1429740165.958176 0002 0001 -6
-E: 1429740165.958176 0000 0000 0
-E: 1429740165.966170 0002 0000 -6
-E: 1429740165.966170 0002 0001 -7
-E: 1429740165.966170 0000 0000 0
-E: 1429740165.974170 0002 0000 -5
-E: 1429740165.974170 0002 0001 -6
-E: 1429740165.974170 0000 0000 0
-E: 1429740165.982172 0002 0000 -5
-E: 1429740165.982172 0002 0001 -7
-E: 1429740165.982172 0000 0000 0
-E: 1429740165.990171 0002 0000 -5
-E: 1429740165.990171 0002 0001 -6
-E: 1429740165.990171 0000 0000 0
-E: 1429740165.998151 0002 0000 -4
-E: 1429740165.998151 0002 0001 -7
-E: 1429740165.998151 0000 0000 0
-E: 1429740166.006170 0002 0000 -5
-E: 1429740166.006170 0002 0001 -8
-E: 1429740166.006170 0000 0000 0
-E: 1429740166.014174 0002 0000 -5
-E: 1429740166.014174 0002 0001 -6
-E: 1429740166.014174 0000 0000 0
-E: 1429740166.022177 0002 0000 -4
-E: 1429740166.022177 0002 0001 -8
-E: 1429740166.022177 0000 0000 0
-E: 1429740166.030171 0002 0000 -5
-E: 1429740166.030171 0002 0001 -7
-E: 1429740166.030171 0000 0000 0
-E: 1429740166.038173 0002 0000 -4
-E: 1429740166.038173 0002 0001 -7
-E: 1429740166.038173 0000 0000 0
-E: 1429740166.046171 0002 0000 -5
-E: 1429740166.046171 0002 0001 -8
-E: 1429740166.046171 0000 0000 0
-E: 1429740166.054172 0002 0000 -4
-E: 1429740166.054172 0002 0001 -7
-E: 1429740166.054172 0000 0000 0
-E: 1429740166.062175 0002 0000 -4
-E: 1429740166.062175 0002 0001 -6
-E: 1429740166.062175 0000 0000 0
-E: 1429740166.070106 0002 0000 -5
-E: 1429740166.070106 0002 0001 -8
-E: 1429740166.070106 0000 0000 0
-E: 1429740166.078109 0002 0000 -4
-E: 1429740166.078109 0002 0001 -8
-E: 1429740166.078109 0000 0000 0
-E: 1429740166.086104 0002 0000 -5
-E: 1429740166.086104 0002 0001 -7
-E: 1429740166.086104 0000 0000 0
-E: 1429740166.094126 0002 0000 -5
-E: 1429740166.094126 0002 0001 -7
-E: 1429740166.094126 0000 0000 0
-E: 1429740166.102177 0002 0000 -3
-E: 1429740166.102177 0002 0001 -8
-E: 1429740166.102177 0000 0000 0
-E: 1429740166.110127 0002 0000 -4
-E: 1429740166.110127 0002 0001 -8
-E: 1429740166.110127 0000 0000 0
-E: 1429740166.118175 0002 0000 -5
-E: 1429740166.118175 0002 0001 -7
-E: 1429740166.118175 0000 0000 0
-E: 1429740166.126175 0002 0000 -4
-E: 1429740166.126175 0002 0001 -8
-E: 1429740166.126175 0000 0000 0
-E: 1429740166.134175 0002 0000 -3
-E: 1429740166.134175 0002 0001 -7
-E: 1429740166.134175 0000 0000 0
-E: 1429740166.142177 0002 0000 -4
-E: 1429740166.142177 0002 0001 -8
-E: 1429740166.142177 0000 0000 0
-E: 1429740166.150181 0002 0000 -4
-E: 1429740166.150181 0002 0001 -8
-E: 1429740166.150181 0000 0000 0
-E: 1429740166.158174 0002 0000 -3
-E: 1429740166.158174 0002 0001 -8
-E: 1429740166.158174 0000 0000 0
-E: 1429740166.166178 0002 0000 -4
-E: 1429740166.166178 0002 0001 -7
-E: 1429740166.166178 0000 0000 0
-E: 1429740166.174174 0002 0000 -3
-E: 1429740166.174174 0002 0001 -8
-E: 1429740166.174174 0000 0000 0
-E: 1429740166.182176 0002 0000 -3
-E: 1429740166.182176 0002 0001 -8
-E: 1429740166.182176 0000 0000 0
-E: 1429740166.190176 0002 0000 -3
-E: 1429740166.190176 0002 0001 -8
-E: 1429740166.190176 0000 0000 0
-E: 1429740166.198108 0002 0000 -3
-E: 1429740166.198108 0002 0001 -8
-E: 1429740166.198108 0000 0000 0
-E: 1429740166.206177 0002 0000 -3
-E: 1429740166.206177 0002 0001 -8
-E: 1429740166.206177 0000 0000 0
-E: 1429740166.214176 0002 0000 -3
-E: 1429740166.214176 0002 0001 -8
-E: 1429740166.214176 0000 0000 0
-E: 1429740166.222176 0002 0000 -4
-E: 1429740166.222176 0002 0001 -8
-E: 1429740166.222176 0000 0000 0
-E: 1429740166.230178 0002 0000 -4
-E: 1429740166.230178 0002 0001 -8
-E: 1429740166.230178 0000 0000 0
-E: 1429740166.238178 0002 0000 -3
-E: 1429740166.238178 0002 0001 -8
-E: 1429740166.238178 0000 0000 0
-E: 1429740166.246177 0002 0000 -4
-E: 1429740166.246177 0002 0001 -8
-E: 1429740166.246177 0000 0000 0
-E: 1429740166.254176 0002 0000 -4
-E: 1429740166.254176 0002 0001 -8
-E: 1429740166.254176 0000 0000 0
-E: 1429740166.262177 0002 0000 -3
-E: 1429740166.262177 0002 0001 -8
-E: 1429740166.262177 0000 0000 0
-E: 1429740166.270179 0002 0000 -3
-E: 1429740166.270179 0002 0001 -7
-E: 1429740166.270179 0000 0000 0
-E: 1429740166.278184 0002 0000 -3
-E: 1429740166.278184 0002 0001 -8
-E: 1429740166.278184 0000 0000 0
-E: 1429740166.286179 0002 0000 -3
-E: 1429740166.286179 0002 0001 -9
-E: 1429740166.286179 0000 0000 0
-E: 1429740166.294179 0002 0000 -4
-E: 1429740166.294179 0002 0001 -8
-E: 1429740166.294179 0000 0000 0
-E: 1429740166.302179 0002 0000 -3
-E: 1429740166.302179 0002 0001 -7
-E: 1429740166.302179 0000 0000 0
-E: 1429740166.310178 0002 0000 -4
-E: 1429740166.310178 0002 0001 -9
-E: 1429740166.310178 0000 0000 0
-E: 1429740166.318178 0002 0000 -4
-E: 1429740166.318178 0002 0001 -8
-E: 1429740166.318178 0000 0000 0
-E: 1429740166.326111 0002 0000 -5
-E: 1429740166.326111 0002 0001 -8
-E: 1429740166.326111 0000 0000 0
-E: 1429740166.334177 0002 0000 -5
-E: 1429740166.334177 0002 0001 -9
-E: 1429740166.334177 0000 0000 0
-E: 1429740166.342178 0002 0000 -5
-E: 1429740166.342178 0002 0001 -8
-E: 1429740166.342178 0000 0000 0
-E: 1429740166.350180 0002 0000 -5
-E: 1429740166.350180 0002 0001 -8
-E: 1429740166.350180 0000 0000 0
-E: 1429740166.358185 0002 0000 -5
-E: 1429740166.358185 0002 0001 -8
-E: 1429740166.358185 0000 0000 0
-E: 1429740166.366181 0002 0000 -4
-E: 1429740166.366181 0002 0001 -8
-E: 1429740166.366181 0000 0000 0
-E: 1429740166.374181 0002 0000 -5
-E: 1429740166.374181 0002 0001 -8
-E: 1429740166.374181 0000 0000 0
-E: 1429740166.382179 0002 0000 -4
-E: 1429740166.382179 0002 0001 -8
-E: 1429740166.382179 0000 0000 0
-E: 1429740166.390179 0002 0000 -4
-E: 1429740166.390179 0002 0001 -8
-E: 1429740166.390179 0000 0000 0
-E: 1429740166.398180 0002 0000 -5
-E: 1429740166.398180 0002 0001 -7
-E: 1429740166.398180 0000 0000 0
-E: 1429740166.406185 0002 0000 -5
-E: 1429740166.406185 0002 0001 -9
-E: 1429740166.406185 0000 0000 0
-E: 1429740166.414182 0002 0000 -4
-E: 1429740166.414182 0002 0001 -9
-E: 1429740166.414182 0000 0000 0
-E: 1429740166.422182 0002 0000 -6
-E: 1429740166.422182 0002 0001 -8
-E: 1429740166.422182 0000 0000 0
-E: 1429740166.430183 0002 0000 -6
-E: 1429740166.430183 0002 0001 -9
-E: 1429740166.430183 0000 0000 0
-E: 1429740166.438182 0002 0000 -5
-E: 1429740166.438182 0002 0001 -9
-E: 1429740166.438182 0000 0000 0
-E: 1429740166.446181 0002 0000 -4
-E: 1429740166.446181 0002 0001 -8
-E: 1429740166.446181 0000 0000 0
-E: 1429740166.454115 0002 0000 -5
-E: 1429740166.454115 0002 0001 -9
-E: 1429740166.454115 0000 0000 0
-E: 1429740166.462182 0002 0000 -4
-E: 1429740166.462182 0002 0001 -8
-E: 1429740166.462182 0000 0000 0
-E: 1429740166.470182 0002 0000 -5
-E: 1429740166.470182 0002 0001 -8
-E: 1429740166.470182 0000 0000 0
-E: 1429740166.478178 0002 0000 -4
-E: 1429740166.478178 0002 0001 -8
-E: 1429740166.478178 0000 0000 0
-E: 1429740166.486180 0002 0000 -5
-E: 1429740166.486180 0002 0001 -7
-E: 1429740166.486180 0000 0000 0
-E: 1429740166.494182 0002 0000 -4
-E: 1429740166.494182 0002 0001 -9
-E: 1429740166.494182 0000 0000 0
-E: 1429740166.502183 0002 0000 -5
-E: 1429740166.502183 0002 0001 -7
-E: 1429740166.502183 0000 0000 0
-E: 1429740166.510182 0002 0000 -4
-E: 1429740166.510182 0002 0001 -8
-E: 1429740166.510182 0000 0000 0
-E: 1429740166.518183 0002 0000 -4
-E: 1429740166.518183 0002 0001 -8
-E: 1429740166.518183 0000 0000 0
-E: 1429740166.526144 0002 0000 -3
-E: 1429740166.526144 0002 0001 -8
-E: 1429740166.526144 0000 0000 0
-E: 1429740166.534189 0002 0000 -4
-E: 1429740166.534189 0002 0001 -6
-E: 1429740166.534189 0000 0000 0
-E: 1429740166.542184 0002 0000 -3
-E: 1429740166.542184 0002 0001 -7
-E: 1429740166.542184 0000 0000 0
-E: 1429740166.550186 0002 0000 -4
-E: 1429740166.550186 0002 0001 -6
-E: 1429740166.550186 0000 0000 0
-E: 1429740166.558184 0002 0000 -3
-E: 1429740166.558184 0002 0001 -6
-E: 1429740166.558184 0000 0000 0
-E: 1429740166.566184 0002 0000 -5
-E: 1429740166.566184 0002 0001 -7
-E: 1429740166.566184 0000 0000 0
-E: 1429740166.574185 0002 0000 -3
-E: 1429740166.574185 0002 0001 -6
-E: 1429740166.574185 0000 0000 0
-E: 1429740166.582106 0002 0000 -5
-E: 1429740166.582106 0002 0001 -6
-E: 1429740166.582106 0000 0000 0
-E: 1429740166.590186 0002 0000 -4
-E: 1429740166.590186 0002 0001 -7
-E: 1429740166.590186 0000 0000 0
-E: 1429740166.598190 0002 0000 -4
-E: 1429740166.598190 0002 0001 -6
-E: 1429740166.598190 0000 0000 0
-E: 1429740166.606189 0002 0000 -5
-E: 1429740166.606189 0002 0001 -6
-E: 1429740166.606189 0000 0000 0
-E: 1429740166.614187 0002 0000 -5
-E: 1429740166.614187 0002 0001 -7
-E: 1429740166.614187 0000 0000 0
-E: 1429740166.622185 0002 0000 -5
-E: 1429740166.622185 0002 0001 -7
-E: 1429740166.622185 0000 0000 0
-E: 1429740166.630188 0002 0000 -5
-E: 1429740166.630188 0002 0001 -7
-E: 1429740166.630188 0000 0000 0
-E: 1429740166.638186 0002 0000 -4
-E: 1429740166.638186 0002 0001 -6
-E: 1429740166.638186 0000 0000 0
-E: 1429740166.646188 0002 0000 -4
-E: 1429740166.646188 0002 0001 -7
-E: 1429740166.646188 0000 0000 0
-E: 1429740166.654186 0002 0000 -4
-E: 1429740166.654186 0002 0001 -6
-E: 1429740166.654186 0000 0000 0
-E: 1429740166.662191 0002 0000 -3
-E: 1429740166.662191 0002 0001 -6
-E: 1429740166.662191 0000 0000 0
-E: 1429740166.670187 0002 0000 -4
-E: 1429740166.670187 0002 0001 -7
-E: 1429740166.670187 0000 0000 0
-E: 1429740166.678188 0002 0000 -4
-E: 1429740166.678188 0002 0001 -9
-E: 1429740166.678188 0000 0000 0
-E: 1429740166.686185 0002 0000 -4
-E: 1429740166.686185 0002 0001 -7
-E: 1429740166.686185 0000 0000 0
-E: 1429740166.694186 0002 0000 -4
-E: 1429740166.694186 0002 0001 -7
-E: 1429740166.694186 0000 0000 0
-E: 1429740166.702189 0002 0000 -4
-E: 1429740166.702189 0002 0001 -7
-E: 1429740166.702189 0000 0000 0
-E: 1429740166.710117 0002 0000 -4
-E: 1429740166.710117 0002 0001 -7
-E: 1429740166.710117 0000 0000 0
-E: 1429740166.718185 0002 0000 -3
-E: 1429740166.718185 0002 0001 -7
-E: 1429740166.718185 0000 0000 0
-E: 1429740166.726192 0002 0000 -4
-E: 1429740166.726192 0002 0001 -7
-E: 1429740166.726192 0000 0000 0
-E: 1429740166.734190 0002 0000 -3
-E: 1429740166.734190 0002 0001 -5
-E: 1429740166.734190 0000 0000 0
-E: 1429740166.742187 0002 0000 -3
-E: 1429740166.742187 0002 0001 -7
-E: 1429740166.742187 0000 0000 0
-E: 1429740166.750187 0002 0000 -3
-E: 1429740166.750187 0002 0001 -6
-E: 1429740166.750187 0000 0000 0
-E: 1429740166.758190 0002 0000 -4
-E: 1429740166.758190 0002 0001 -5
-E: 1429740166.758190 0000 0000 0
-E: 1429740166.766189 0002 0000 -3
-E: 1429740166.766189 0002 0001 -6
-E: 1429740166.766189 0000 0000 0
-E: 1429740166.774189 0002 0000 -3
-E: 1429740166.774189 0002 0001 -6
-E: 1429740166.774189 0000 0000 0
-E: 1429740166.782190 0002 0000 -4
-E: 1429740166.782190 0002 0001 -5
-E: 1429740166.782190 0000 0000 0
-E: 1429740166.790193 0002 0000 -2
-E: 1429740166.790193 0002 0001 -6
-E: 1429740166.790193 0000 0000 0
-E: 1429740166.798189 0002 0000 -3
-E: 1429740166.798189 0002 0001 -5
-E: 1429740166.798189 0000 0000 0
-E: 1429740166.806190 0002 0000 -2
-E: 1429740166.806190 0002 0001 -5
-E: 1429740166.806190 0000 0000 0
-E: 1429740166.814191 0002 0000 -2
-E: 1429740166.814191 0002 0001 -6
-E: 1429740166.814191 0000 0000 0
-E: 1429740166.822191 0002 0000 -3
-E: 1429740166.822191 0002 0001 -5
-E: 1429740166.822191 0000 0000 0
-E: 1429740166.830192 0002 0000 -3
-E: 1429740166.830192 0002 0001 -5
-E: 1429740166.830192 0000 0000 0
-E: 1429740166.838123 0002 0000 -2
-E: 1429740166.838123 0002 0001 -5
-E: 1429740166.838123 0000 0000 0
-E: 1429740166.846191 0002 0000 -3
-E: 1429740166.846191 0002 0001 -5
-E: 1429740166.846191 0000 0000 0
-E: 1429740166.854190 0002 0000 -3
-E: 1429740166.854190 0002 0001 -5
-E: 1429740166.854190 0000 0000 0
-E: 1429740166.862193 0002 0000 -2
-E: 1429740166.862193 0002 0001 -5
-E: 1429740166.862193 0000 0000 0
-E: 1429740166.870191 0002 0000 -2
-E: 1429740166.870191 0002 0001 -5
-E: 1429740166.870191 0000 0000 0
-E: 1429740166.878191 0002 0000 -3
-E: 1429740166.878191 0002 0001 -4
-E: 1429740166.878191 0000 0000 0
-E: 1429740166.886190 0002 0000 -2
-E: 1429740166.886190 0002 0001 -5
-E: 1429740166.886190 0000 0000 0
-E: 1429740166.894195 0002 0000 -2
-E: 1429740166.894195 0002 0001 -5
-E: 1429740166.894195 0000 0000 0
-E: 1429740166.902192 0002 0000 -2
-E: 1429740166.902192 0002 0001 -4
-E: 1429740166.902192 0000 0000 0
-E: 1429740166.910192 0002 0000 -2
-E: 1429740166.910192 0002 0001 -4
-E: 1429740166.910192 0000 0000 0
-E: 1429740166.918196 0002 0000 -3
-E: 1429740166.918196 0002 0001 -5
-E: 1429740166.918196 0000 0000 0
-E: 1429740166.926197 0002 0000 -2
-E: 1429740166.926197 0002 0001 -5
-E: 1429740166.926197 0000 0000 0
-E: 1429740166.934194 0002 0000 -3
-E: 1429740166.934194 0002 0001 -6
-E: 1429740166.934194 0000 0000 0
-E: 1429740166.942193 0002 0000 -3
-E: 1429740166.942193 0002 0001 -6
-E: 1429740166.942193 0000 0000 0
-E: 1429740166.950194 0002 0000 -2
-E: 1429740166.950194 0002 0001 -5
-E: 1429740166.950194 0000 0000 0
-E: 1429740166.958193 0002 0000 -3
-E: 1429740166.958193 0002 0001 -6
-E: 1429740166.958193 0000 0000 0
-E: 1429740166.966126 0002 0000 -1
-E: 1429740166.966126 0002 0001 -5
-E: 1429740166.966126 0000 0000 0
-E: 1429740166.974194 0002 0000 -2
-E: 1429740166.974194 0002 0001 -5
-E: 1429740166.974194 0000 0000 0
-E: 1429740166.982193 0002 0000 -2
-E: 1429740166.982193 0002 0001 -6
-E: 1429740166.982193 0000 0000 0
-E: 1429740166.990194 0002 0000 -2
-E: 1429740166.990194 0002 0001 -5
-E: 1429740166.990194 0000 0000 0
-E: 1429740166.998195 0002 0000 -1
-E: 1429740166.998195 0002 0001 -5
-E: 1429740166.998195 0000 0000 0
-E: 1429740167.006194 0002 0000 -2
-E: 1429740167.006194 0002 0001 -5
-E: 1429740167.006194 0000 0000 0
-E: 1429740167.014195 0002 0000 -2
-E: 1429740167.014195 0002 0001 -4
-E: 1429740167.014195 0000 0000 0
-E: 1429740167.022194 0002 0000 -2
-E: 1429740167.022194 0002 0001 -4
-E: 1429740167.022194 0000 0000 0
-E: 1429740167.030195 0002 0000 -1
-E: 1429740167.030195 0002 0001 -4
-E: 1429740167.030195 0000 0000 0
-E: 1429740167.038192 0002 0000 -1
-E: 1429740167.038192 0002 0001 -3
-E: 1429740167.038192 0000 0000 0
-E: 1429740167.046192 0002 0000 -2
-E: 1429740167.046192 0002 0001 -4
-E: 1429740167.046192 0000 0000 0
-E: 1429740167.054195 0002 0000 -1
-E: 1429740167.054195 0002 0001 -3
-E: 1429740167.054195 0000 0000 0
-E: 1429740167.062199 0002 0000 -2
-E: 1429740167.062199 0002 0001 -3
-E: 1429740167.062199 0000 0000 0
-E: 1429740167.070197 0002 0001 -3
-E: 1429740167.070197 0000 0000 0
-E: 1429740167.078148 0002 0000 -2
-E: 1429740167.078148 0002 0001 -2
-E: 1429740167.078148 0000 0000 0
-E: 1429740167.086127 0002 0001 -3
-E: 1429740167.086127 0000 0000 0
-E: 1429740167.094131 0002 0000 -2
-E: 1429740167.094131 0002 0001 -2
-E: 1429740167.094131 0000 0000 0
-E: 1429740167.102198 0002 0001 -2
-E: 1429740167.102198 0000 0000 0
-E: 1429740167.110202 0002 0000 -1
-E: 1429740167.110202 0002 0001 -2
-E: 1429740167.110202 0000 0000 0
-E: 1429740167.118199 0002 0000 -1
-E: 1429740167.118199 0002 0001 -3
-E: 1429740167.118199 0000 0000 0
-E: 1429740167.126199 0002 0001 -1
-E: 1429740167.126199 0000 0000 0
-E: 1429740167.134202 0002 0000 -1
-E: 1429740167.134202 0002 0001 -2
-E: 1429740167.134202 0000 0000 0
-E: 1429740167.142202 0002 0000 -1
-E: 1429740167.142202 0002 0001 -2
-E: 1429740167.142202 0000 0000 0
-E: 1429740167.150141 0002 0000 -1
-E: 1429740167.150141 0002 0001 -3
-E: 1429740167.150141 0000 0000 0
-E: 1429740167.158196 0002 0000 -1
-E: 1429740167.158196 0002 0001 -2
-E: 1429740167.158196 0000 0000 0
-E: 1429740167.166199 0002 0000 -1
-E: 1429740167.166199 0002 0001 -3
-E: 1429740167.166199 0000 0000 0
-E: 1429740167.174204 0002 0000 -2
-E: 1429740167.174204 0002 0001 -3
-E: 1429740167.174204 0000 0000 0
-E: 1429740167.182200 0002 0000 -1
-E: 1429740167.182200 0002 0001 -3
-E: 1429740167.182200 0000 0000 0
-E: 1429740167.190198 0002 0000 -2
-E: 1429740167.190198 0002 0001 -3
-E: 1429740167.190198 0000 0000 0
-E: 1429740167.198200 0002 0000 -1
-E: 1429740167.198200 0002 0001 -3
-E: 1429740167.198200 0000 0000 0
-E: 1429740167.206201 0002 0000 -3
-E: 1429740167.206201 0002 0001 -3
-E: 1429740167.206201 0000 0000 0
-E: 1429740167.214201 0002 0000 -2
-E: 1429740167.214201 0002 0001 -4
-E: 1429740167.214201 0000 0000 0
-E: 1429740167.222132 0002 0000 -2
-E: 1429740167.222132 0002 0001 -4
-E: 1429740167.222132 0000 0000 0
-E: 1429740167.230200 0002 0000 -2
-E: 1429740167.230200 0002 0001 -3
-E: 1429740167.230200 0000 0000 0
-E: 1429740167.238205 0002 0000 -1
-E: 1429740167.238205 0002 0001 -2
-E: 1429740167.238205 0000 0000 0
-E: 1429740167.246199 0002 0000 -2
-E: 1429740167.246199 0002 0001 -4
-E: 1429740167.246199 0000 0000 0
-E: 1429740167.254204 0002 0000 -1
-E: 1429740167.254204 0002 0001 -2
-E: 1429740167.254204 0000 0000 0
-E: 1429740167.262201 0002 0000 -1
-E: 1429740167.262201 0002 0001 -3
-E: 1429740167.262201 0000 0000 0
-E: 1429740167.270202 0002 0000 -2
-E: 1429740167.270202 0002 0001 -3
-E: 1429740167.270202 0000 0000 0
-E: 1429740167.278203 0002 0000 -2
-E: 1429740167.278203 0002 0001 -3
-E: 1429740167.278203 0000 0000 0
-E: 1429740167.286201 0002 0000 -1
-E: 1429740167.286201 0002 0001 -3
-E: 1429740167.286201 0000 0000 0
-E: 1429740167.294202 0002 0000 -3
-E: 1429740167.294202 0002 0001 -3
-E: 1429740167.294202 0000 0000 0
-E: 1429740167.302207 0002 0000 -1
-E: 1429740167.302207 0002 0001 -4
-E: 1429740167.302207 0000 0000 0
-E: 1429740167.310202 0002 0000 -2
-E: 1429740167.310202 0002 0001 -3
-E: 1429740167.310202 0000 0000 0
-E: 1429740167.318203 0002 0000 -3
-E: 1429740167.318203 0002 0001 -3
-E: 1429740167.318203 0000 0000 0
-E: 1429740167.326204 0002 0000 -1
-E: 1429740167.326204 0002 0001 -4
-E: 1429740167.326204 0000 0000 0
-E: 1429740167.334203 0002 0000 -2
-E: 1429740167.334203 0002 0001 -3
-E: 1429740167.334203 0000 0000 0
-E: 1429740167.342204 0002 0000 -2
-E: 1429740167.342204 0002 0001 -3
-E: 1429740167.342204 0000 0000 0
-E: 1429740167.350136 0002 0000 -1
-E: 1429740167.350136 0002 0001 -3
-E: 1429740167.350136 0000 0000 0
-E: 1429740167.358203 0002 0000 -2
-E: 1429740167.358203 0002 0001 -3
-E: 1429740167.358203 0000 0000 0
-E: 1429740167.366208 0002 0000 -1
-E: 1429740167.366208 0002 0001 -3
-E: 1429740167.366208 0000 0000 0
-E: 1429740167.374201 0002 0000 -2
-E: 1429740167.374201 0002 0001 -3
-E: 1429740167.374201 0000 0000 0
-E: 1429740167.382203 0002 0000 -1
-E: 1429740167.382203 0002 0001 -2
-E: 1429740167.382203 0000 0000 0
-E: 1429740167.390205 0002 0000 -2
-E: 1429740167.390205 0002 0001 -3
-E: 1429740167.390205 0000 0000 0
-E: 1429740167.398206 0002 0000 -2
-E: 1429740167.398206 0002 0001 -3
-E: 1429740167.398206 0000 0000 0
-E: 1429740167.406204 0002 0000 -1
-E: 1429740167.406204 0002 0001 -3
-E: 1429740167.406204 0000 0000 0
-E: 1429740167.414204 0002 0000 -1
-E: 1429740167.414204 0002 0001 -2
-E: 1429740167.414204 0000 0000 0
-E: 1429740167.422204 0002 0000 -1
-E: 1429740167.422204 0002 0001 -2
-E: 1429740167.422204 0000 0000 0
-E: 1429740167.430174 0002 0000 -1
-E: 1429740167.430174 0002 0001 -2
-E: 1429740167.430174 0000 0000 0
-E: 1429740167.438202 0002 0000 -1
-E: 1429740167.438202 0002 0001 -2
-E: 1429740167.438202 0000 0000 0
-E: 1429740167.446206 0002 0000 -1
-E: 1429740167.446206 0002 0001 -2
-E: 1429740167.446206 0000 0000 0
-E: 1429740167.454207 0002 0000 -1
-E: 1429740167.454207 0002 0001 -2
-E: 1429740167.454207 0000 0000 0
-E: 1429740167.470205 0002 0000 -1
-E: 1429740167.470205 0002 0001 -2
-E: 1429740167.470205 0000 0000 0
-E: 1429740167.478159 0002 0000 -1
-E: 1429740167.478159 0002 0001 -1
-E: 1429740167.478159 0000 0000 0
-E: 1429740167.486206 0002 0001 -1
-E: 1429740167.486206 0000 0000 0
-E: 1429740167.494207 0002 0001 -1
-E: 1429740167.494207 0000 0000 0
-E: 1429740167.502209 0002 0000 -1
-E: 1429740167.502209 0002 0001 -1
-E: 1429740167.502209 0000 0000 0
-E: 1429740167.542207 0002 0001 -1
-E: 1429740167.542207 0000 0000 0
-E: 1429740167.878214 0002 0000 1
-E: 1429740167.878214 0000 0000 0
-E: 1429740167.886164 0002 0000 1
-E: 1429740167.886164 0000 0000 0
-E: 1429740167.894215 0002 0000 1
-E: 1429740167.894215 0002 0001 1
-E: 1429740167.894215 0000 0000 0
-E: 1429740167.902215 0002 0000 1
-E: 1429740167.902215 0002 0001 1
-E: 1429740167.902215 0000 0000 0
-E: 1429740167.918215 0002 0000 1
-E: 1429740167.918215 0000 0000 0
-E: 1429740167.934213 0002 0000 1
-E: 1429740167.934213 0002 0001 1
-E: 1429740167.934213 0000 0000 0
-E: 1429740167.942216 0002 0000 1
-E: 1429740167.942216 0000 0000 0
-E: 1429740167.950214 0002 0000 1
-E: 1429740167.950214 0002 0001 1
-E: 1429740167.950214 0000 0000 0
-E: 1429740167.958219 0002 0000 1
-E: 1429740167.958219 0000 0000 0
-E: 1429740167.966217 0002 0000 1
-E: 1429740167.966217 0002 0001 1
-E: 1429740167.966217 0000 0000 0
-E: 1429740167.974215 0002 0000 1
-E: 1429740167.974215 0002 0001 1
-E: 1429740167.974215 0000 0000 0
-E: 1429740167.982216 0002 0000 1
-E: 1429740167.982216 0000 0000 0
-E: 1429740167.990150 0002 0000 1
-E: 1429740167.990150 0002 0001 1
-E: 1429740167.990150 0000 0000 0
-E: 1429740167.998216 0002 0000 1
-E: 1429740167.998216 0002 0001 1
-E: 1429740167.998216 0000 0000 0
-E: 1429740168.006218 0002 0000 1
-E: 1429740168.006218 0002 0001 1
-E: 1429740168.006218 0000 0000 0
-E: 1429740168.014218 0002 0000 2
-E: 1429740168.014218 0002 0001 1
-E: 1429740168.014218 0000 0000 0
-E: 1429740168.022218 0002 0000 1
-E: 1429740168.022218 0002 0001 1
-E: 1429740168.022218 0000 0000 0
-E: 1429740168.030218 0002 0000 1
-E: 1429740168.030218 0002 0001 2
-E: 1429740168.030218 0000 0000 0
-E: 1429740168.038213 0002 0000 2
-E: 1429740168.038213 0002 0001 1
-E: 1429740168.038213 0000 0000 0
-E: 1429740168.046218 0002 0000 2
-E: 1429740168.046218 0002 0001 1
-E: 1429740168.046218 0000 0000 0
-E: 1429740168.054218 0002 0000 2
-E: 1429740168.054218 0002 0001 2
-E: 1429740168.054218 0000 0000 0
-E: 1429740168.062218 0002 0000 3
-E: 1429740168.062218 0002 0001 1
-E: 1429740168.062218 0000 0000 0
-E: 1429740168.070150 0002 0000 2
-E: 1429740168.070150 0002 0001 1
-E: 1429740168.070150 0000 0000 0
-E: 1429740168.078218 0002 0000 2
-E: 1429740168.078218 0002 0001 1
-E: 1429740168.078218 0000 0000 0
-E: 1429740168.086149 0002 0000 1
-E: 1429740168.086149 0002 0001 1
-E: 1429740168.086149 0000 0000 0
-E: 1429740168.094165 0002 0000 3
-E: 1429740168.094165 0002 0001 1
-E: 1429740168.094165 0000 0000 0
-E: 1429740168.102219 0002 0000 1
-E: 1429740168.102219 0002 0001 1
-E: 1429740168.102219 0000 0000 0
-E: 1429740168.110152 0002 0000 2
-E: 1429740168.110152 0002 0001 1
-E: 1429740168.110152 0000 0000 0
-E: 1429740168.118155 0002 0000 1
-E: 1429740168.118155 0002 0001 1
-E: 1429740168.118155 0000 0000 0
-E: 1429740168.126219 0002 0000 1
-E: 1429740168.126219 0000 0000 0
-E: 1429740168.134220 0002 0000 1
-E: 1429740168.134220 0002 0001 1
-E: 1429740168.134220 0000 0000 0
-E: 1429740168.142220 0002 0000 2
-E: 1429740168.142220 0000 0000 0
-E: 1429740168.150168 0002 0000 1
-E: 1429740168.150168 0002 0001 1
-E: 1429740168.150168 0000 0000 0
-E: 1429740168.158151 0002 0000 2
-E: 1429740168.158151 0002 0001 1
-E: 1429740168.158151 0000 0000 0
-E: 1429740168.166219 0002 0000 1
-E: 1429740168.166219 0002 0001 1
-E: 1429740168.166219 0000 0000 0
-E: 1429740168.174151 0002 0000 2
-E: 1429740168.174151 0002 0001 1
-E: 1429740168.174151 0000 0000 0
-E: 1429740168.182222 0002 0000 1
-E: 1429740168.182222 0000 0000 0
-E: 1429740168.190152 0002 0000 2
-E: 1429740168.190152 0002 0001 1
-E: 1429740168.190152 0000 0000 0
-E: 1429740168.198221 0002 0000 1
-E: 1429740168.198221 0000 0000 0
-E: 1429740168.206153 0002 0000 1
-E: 1429740168.206153 0002 0001 1
-E: 1429740168.206153 0000 0000 0
-E: 1429740168.214150 0002 0000 1
-E: 1429740168.214150 0000 0000 0
-E: 1429740168.222151 0002 0000 2
-E: 1429740168.222151 0002 0001 1
-E: 1429740168.222151 0000 0000 0
-E: 1429740168.238151 0002 0000 1
-E: 1429740168.238151 0002 0001 1
-E: 1429740168.238151 0000 0000 0
-E: 1429740168.246154 0002 0000 1
-E: 1429740168.246154 0000 0000 0
-E: 1429740168.254150 0002 0000 2
-E: 1429740168.254150 0000 0000 0
-E: 1429740168.262222 0002 0000 2
-E: 1429740168.262222 0002 0001 1
-E: 1429740168.262222 0000 0000 0
-E: 1429740168.270149 0002 0000 1
-E: 1429740168.270149 0002 0001 2
-E: 1429740168.270149 0000 0000 0
-E: 1429740168.278220 0002 0000 2
-E: 1429740168.278220 0002 0001 1
-E: 1429740168.278220 0000 0000 0
-E: 1429740168.286149 0002 0000 2
-E: 1429740168.286149 0002 0001 1
-E: 1429740168.286149 0000 0000 0
-E: 1429740168.294222 0002 0000 1
-E: 1429740168.294222 0002 0001 1
-E: 1429740168.294222 0000 0000 0
-E: 1429740168.302220 0002 0000 2
-E: 1429740168.302220 0002 0001 1
-E: 1429740168.302220 0000 0000 0
-E: 1429740168.310170 0002 0000 1
-E: 1429740168.310170 0002 0001 1
-E: 1429740168.310170 0000 0000 0
-E: 1429740168.318222 0002 0000 2
-E: 1429740168.318222 0002 0001 2
-E: 1429740168.318222 0000 0000 0
-E: 1429740168.326155 0002 0000 3
-E: 1429740168.326155 0002 0001 2
-E: 1429740168.326155 0000 0000 0
-E: 1429740168.334200 0002 0000 3
-E: 1429740168.334200 0002 0001 3
-E: 1429740168.334200 0000 0000 0
-E: 1429740168.342155 0002 0000 3
-E: 1429740168.342155 0002 0001 3
-E: 1429740168.342155 0000 0000 0
-E: 1429740168.350151 0002 0000 4
-E: 1429740168.350151 0002 0001 2
-E: 1429740168.350151 0000 0000 0
-E: 1429740168.358161 0002 0000 3
-E: 1429740168.358161 0002 0001 3
-E: 1429740168.358161 0000 0000 0
-E: 1429740168.366176 0002 0000 3
-E: 1429740168.366176 0002 0001 3
-E: 1429740168.366176 0000 0000 0
-E: 1429740168.374146 0002 0000 2
-E: 1429740168.374146 0002 0001 3
-E: 1429740168.374146 0000 0000 0
-E: 1429740168.382190 0002 0000 2
-E: 1429740168.382190 0002 0001 2
-E: 1429740168.382190 0000 0000 0
-E: 1429740168.390153 0002 0000 2
-E: 1429740168.390153 0002 0001 2
-E: 1429740168.390153 0000 0000 0
-E: 1429740168.398225 0002 0000 2
-E: 1429740168.398225 0002 0001 2
-E: 1429740168.398225 0000 0000 0
-E: 1429740168.406154 0002 0000 1
-E: 1429740168.406154 0002 0001 2
-E: 1429740168.406154 0000 0000 0
-E: 1429740168.414154 0002 0000 1
-E: 1429740168.414154 0002 0001 1
-E: 1429740168.414154 0000 0000 0
-E: 1429740168.422153 0002 0000 1
-E: 1429740168.422153 0002 0001 1
-E: 1429740168.422153 0000 0000 0
-E: 1429740168.430224 0002 0000 1
-E: 1429740168.430224 0002 0001 1
-E: 1429740168.430224 0000 0000 0
-E: 1429740168.438152 0002 0000 2
-E: 1429740168.438152 0002 0001 2
-E: 1429740168.438152 0000 0000 0
-E: 1429740168.446172 0002 0000 3
-E: 1429740168.446172 0002 0001 2
-E: 1429740168.446172 0000 0000 0
-E: 1429740168.454153 0002 0000 2
-E: 1429740168.454153 0002 0001 2
-E: 1429740168.454153 0000 0000 0
-E: 1429740168.462153 0002 0000 2
-E: 1429740168.462153 0002 0001 2
-E: 1429740168.462153 0000 0000 0
-E: 1429740168.470154 0002 0000 2
-E: 1429740168.470154 0002 0001 2
-E: 1429740168.470154 0000 0000 0
-E: 1429740168.478219 0002 0000 1
-E: 1429740168.478219 0002 0001 1
-E: 1429740168.478219 0000 0000 0
-E: 1429740168.486151 0002 0000 2
-E: 1429740168.486151 0002 0001 1
-E: 1429740168.486151 0000 0000 0
-E: 1429740168.494224 0002 0000 1
-E: 1429740168.494224 0002 0001 1
-E: 1429740168.494224 0000 0000 0
-E: 1429740168.502160 0002 0000 1
-E: 1429740168.502160 0002 0001 2
-E: 1429740168.502160 0000 0000 0
-E: 1429740168.510157 0002 0000 2
-E: 1429740168.510157 0002 0001 1
-E: 1429740168.510157 0000 0000 0
-E: 1429740168.518221 0002 0000 1
-E: 1429740168.518221 0002 0001 2
-E: 1429740168.518221 0000 0000 0
-E: 1429740168.526155 0002 0000 1
-E: 1429740168.526155 0000 0000 0
-E: 1429740168.534154 0002 0000 1
-E: 1429740168.534154 0002 0001 1
-E: 1429740168.534154 0000 0000 0
-E: 1429740168.542157 0002 0001 1
-E: 1429740168.542157 0000 0000 0
-E: 1429740168.550170 0002 0000 1
-E: 1429740168.550170 0000 0000 0
-E: 1429740168.558158 0002 0000 1
-E: 1429740168.558158 0002 0001 1
-E: 1429740168.558158 0000 0000 0
-E: 1429740168.838229 0002 0000 1
-E: 1429740168.838229 0002 0001 1
-E: 1429740168.838229 0000 0000 0
-E: 1429740168.846229 0002 0000 2
-E: 1429740168.846229 0002 0001 2
-E: 1429740168.846229 0000 0000 0
-E: 1429740168.854228 0002 0000 1
-E: 1429740168.854228 0002 0001 1
-E: 1429740168.854228 0000 0000 0
-E: 1429740168.862177 0002 0000 3
-E: 1429740168.862177 0002 0001 3
-E: 1429740168.862177 0000 0000 0
-E: 1429740168.870228 0002 0000 1
-E: 1429740168.870228 0002 0001 2
-E: 1429740168.870228 0000 0000 0
-E: 1429740168.878224 0002 0000 2
-E: 1429740168.878224 0002 0001 3
-E: 1429740168.878224 0000 0000 0
-E: 1429740168.886154 0002 0000 1
-E: 1429740168.886154 0002 0001 1
-E: 1429740168.886154 0000 0000 0
-E: 1429740168.894228 0002 0000 1
-E: 1429740168.894228 0002 0001 1
-E: 1429740168.894228 0000 0000 0
-E: 1429740168.902174 0002 0000 1
-E: 1429740168.902174 0002 0001 1
-E: 1429740168.902174 0000 0000 0
-E: 1429740168.910158 0002 0001 1
-E: 1429740168.910158 0000 0000 0
-E: 1429740168.918177 0002 0000 1
-E: 1429740168.918177 0002 0001 1
-E: 1429740168.918177 0000 0000 0
-E: 1429740168.926230 0002 0001 1
-E: 1429740168.926230 0000 0000 0
-E: 1429740168.934230 0002 0000 1
-E: 1429740168.934230 0002 0001 1
-E: 1429740168.934230 0000 0000 0
-E: 1429740168.942230 0002 0000 1
-E: 1429740168.942230 0000 0000 0
-E: 1429740168.950229 0002 0001 1
-E: 1429740168.950229 0000 0000 0
-E: 1429740168.966231 0002 0001 1
-E: 1429740168.966231 0000 0000 0
-E: 1429740168.982176 0002 0000 1
-E: 1429740168.982176 0000 0000 0
-E: 1429740169.750252 0004 0004 589825
-E: 1429740169.750252 0001 0110 1
-E: 1429740169.750252 0000 0000 0
-E: 1429740170.070256 0004 0004 589825
-E: 1429740170.070256 0001 0110 0
-E: 1429740170.070256 0000 0000 0
-E: 1429740170.246260 0002 0000 -2
-E: 1429740170.246260 0000 0000 0
-E: 1429740170.254262 0002 0000 -4
-E: 1429740170.254262 0002 0001 1
-E: 1429740170.254262 0000 0000 0
-E: 1429740170.262211 0002 0000 -4
-E: 1429740170.262211 0002 0001 1
-E: 1429740170.262211 0000 0000 0
-E: 1429740170.270262 0002 0000 -5
-E: 1429740170.270262 0000 0000 0
-E: 1429740170.278261 0002 0000 -4
-E: 1429740170.278261 0002 0001 1
-E: 1429740170.278261 0000 0000 0
-E: 1429740170.286262 0002 0000 -3
-E: 1429740170.286262 0000 0000 0
-E: 1429740170.294210 0002 0000 -4
-E: 1429740170.294210 0002 0001 1
-E: 1429740170.294210 0000 0000 0
-E: 1429740170.302191 0002 0000 -4
-E: 1429740170.302191 0002 0001 1
-E: 1429740170.302191 0000 0000 0
-E: 1429740170.310261 0002 0000 -5
-E: 1429740170.310261 0000 0000 0
-E: 1429740170.318258 0002 0000 -6
-E: 1429740170.318258 0002 0001 1
-E: 1429740170.318258 0000 0000 0
-E: 1429740170.326261 0002 0000 -4
-E: 1429740170.326261 0002 0001 1
-E: 1429740170.326261 0000 0000 0
-E: 1429740170.350208 0002 0000 -1
-E: 1429740170.350208 0000 0000 0
-E: 1429740170.462265 0002 0001 1
-E: 1429740170.462265 0000 0000 0
-E: 1429740170.470264 0002 0001 1
-E: 1429740170.470264 0000 0000 0
-E: 1429740170.478259 0002 0001 1
-E: 1429740170.478259 0000 0000 0
-E: 1429740170.486266 0002 0000 1
-E: 1429740170.486266 0002 0001 2
-E: 1429740170.486266 0000 0000 0
-E: 1429740170.494195 0002 0001 1
-E: 1429740170.494195 0000 0000 0
-E: 1429740170.502264 0002 0000 1
-E: 1429740170.502264 0002 0001 1
-E: 1429740170.502264 0000 0000 0
-E: 1429740170.510265 0002 0001 1
-E: 1429740170.510265 0000 0000 0
-E: 1429740170.518201 0002 0001 1
-E: 1429740170.518201 0000 0000 0
-E: 1429740170.526211 0002 0000 1
-E: 1429740170.526211 0000 0000 0
-E: 1429740170.534264 0002 0001 1
-E: 1429740170.534264 0000 0000 0
-E: 1429740170.566266 0002 0000 1
-E: 1429740170.566266 0002 0001 1
-E: 1429740170.566266 0000 0000 0
-E: 1429740170.582211 0002 0001 1
-E: 1429740170.582211 0000 0000 0
-E: 1429740170.598266 0002 0000 1
-E: 1429740170.598266 0002 0001 1
-E: 1429740170.598266 0000 0000 0
-E: 1429740170.614265 0002 0001 1
-E: 1429740170.614265 0000 0000 0
-E: 1429740170.622265 0002 0000 1
-E: 1429740170.622265 0000 0000 0
-E: 1429740170.638265 0002 0001 1
-E: 1429740170.638265 0000 0000 0
-E: 1429740170.654265 0002 0001 1
-E: 1429740170.654265 0000 0000 0
-E: 1429740170.670267 0002 0000 1
-E: 1429740170.670267 0002 0001 1
-E: 1429740170.670267 0000 0000 0
-E: 1429740170.694226 0002 0001 1
-E: 1429740170.694226 0000 0000 0
-E: 1429740170.702271 0002 0000 1
-E: 1429740170.702271 0002 0001 1
-E: 1429740170.702271 0000 0000 0
-E: 1429740170.710200 0002 0001 1
-E: 1429740170.710200 0000 0000 0
-E: 1429740170.718264 0002 0000 1
-E: 1429740170.718264 0002 0001 1
-E: 1429740170.718264 0000 0000 0
-E: 1429740170.726266 0002 0000 1
-E: 1429740170.726266 0002 0001 2
-E: 1429740170.726266 0000 0000 0
-E: 1429740170.734268 0002 0000 2
-E: 1429740170.734268 0002 0001 2
-E: 1429740170.734268 0000 0000 0
-E: 1429740170.742196 0002 0000 1
-E: 1429740170.742196 0002 0001 2
-E: 1429740170.742196 0000 0000 0
-E: 1429740170.750266 0002 0000 2
-E: 1429740170.750266 0002 0001 2
-E: 1429740170.750266 0000 0000 0
-E: 1429740170.758195 0002 0000 1
-E: 1429740170.758195 0002 0001 2
-E: 1429740170.758195 0000 0000 0
-E: 1429740170.766215 0002 0000 2
-E: 1429740170.766215 0002 0001 1
-E: 1429740170.766215 0000 0000 0
-E: 1429740170.774213 0002 0001 1
-E: 1429740170.774213 0000 0000 0
-E: 1429740170.782215 0002 0000 1
-E: 1429740170.782215 0002 0001 1
-E: 1429740170.782215 0000 0000 0
-E: 1429740170.902274 0002 0001 1
-E: 1429740170.902274 0000 0000 0
-E: 1429740171.214278 0002 0000 1
-E: 1429740171.214278 0000 0000 0
diff --git a/client/site_tests/documentscan_AppTestWithFakeLorgnette/control b/client/site_tests/documentscan_AppTestWithFakeLorgnette/control
deleted file mode 100644
index f74efa1..0000000
--- a/client/site_tests/documentscan_AppTestWithFakeLorgnette/control
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'pstew, kathrelkeld'
-NAME = 'documentscan_AppTestWithFakeLorgnette'
-TIME = 'SHORT'
-TEST_TYPE = 'client'
-
-
-DOC = """
-This test verifies that the Chrome APIs work correctly to acquire
-image data from a mocked version of the lorgnette document image
-capture daemon.  This test fails if:
-
-  - The test extension cannot be installed
-  - The extension is unable to interact with the Chrome scanning API
-  - The Chrome scanning API cannot interact with the mock lorgnette
-
-This test is built up out of a number of interlocking components:
-
-  - A chrome app that accesses the Document Scan API (in the
-    "document_scan_test_app" directory).  The test launches this
-    app in fullscreen mode.
-
-  - An emulated mouse, implemented by subclassing touch_playback_test_base
-    and using the "amazon_mouse.prop" file.
-
-  - A mouse move and button click event stream stored in the
-    "button_click.event" file.  The pair of this and the emulated mouse
-    allows us to provide a gesture when clicking on the "Scan" button
-    within the App.  Since the Chrome document scan API requires a user
-    gesture, this method provides a viable option as opposed to
-    triggering the scan from javascript.
-
-  - The "mock_lorgnette" module that provides a fake lorgnette daemon
-    that the Chrome process will communicate with for the purposes of
-    this test.  It will accept the D-Bus RPC calls that Chrome makes
-    and provide the "lorgnette-test.png" image in response to a scan
-    request.
-
-"""
-
-job.run_test('documentscan_AppTestWithFakeLorgnette', iface_name='all', tag='all')
diff --git a/client/site_tests/documentscan_AppTestWithFakeLorgnette/document_scan_test_app/README.md b/client/site_tests/documentscan_AppTestWithFakeLorgnette/document_scan_test_app/README.md
deleted file mode 100644
index 22c61ca..0000000
--- a/client/site_tests/documentscan_AppTestWithFakeLorgnette/document_scan_test_app/README.md
+++ /dev/null
@@ -1,10 +0,0 @@
-# Document Scanning API Sample
-
-This demo interfaces with the Chrome document scanning API to acquire scanned
-images.
-
-## APIs
-
-* [Document scanning API](https://developer.chrome.com/apps/document_scan)
-* [Runtime](https://developer.chrome.com/apps/runtime)
-* [Window](https://developer.chrome.com/apps/app_window)
diff --git a/client/site_tests/documentscan_AppTestWithFakeLorgnette/document_scan_test_app/background.js b/client/site_tests/documentscan_AppTestWithFakeLorgnette/document_scan_test_app/background.js
deleted file mode 100644
index de01d19..0000000
--- a/client/site_tests/documentscan_AppTestWithFakeLorgnette/document_scan_test_app/background.js
+++ /dev/null
@@ -1,14 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-chrome.app.runtime.onLaunched.addListener(function() {
-  chrome.app.window.create('scan.html', {
-    singleton: true,
-    id: "ChromeApps-Sample-Document-Scan",
-    bounds: {
-     'width': 480,
-     'height': 640
-    }
-  });
-});
diff --git a/client/site_tests/documentscan_AppTestWithFakeLorgnette/document_scan_test_app/manifest.json b/client/site_tests/documentscan_AppTestWithFakeLorgnette/document_scan_test_app/manifest.json
deleted file mode 100644
index b9d0269..0000000
--- a/client/site_tests/documentscan_AppTestWithFakeLorgnette/document_scan_test_app/manifest.json
+++ /dev/null
@@ -1,12 +0,0 @@
-{
-  "name": "Document Scanning API Sample",
-  "version": "0.1",
-  "manifest_version": 2,
-  "minimum_chrome_version": "37",
-  "app": {
-    "background": {
-      "scripts": ["background.js"]
-    }
-  },
-  "permissions": [ "documentScan", "fullscreen" ]
-}
diff --git a/client/site_tests/documentscan_AppTestWithFakeLorgnette/document_scan_test_app/scan.css b/client/site_tests/documentscan_AppTestWithFakeLorgnette/document_scan_test_app/scan.css
deleted file mode 100644
index 7d5c95a..0000000
--- a/client/site_tests/documentscan_AppTestWithFakeLorgnette/document_scan_test_app/scan.css
+++ /dev/null
@@ -1,42 +0,0 @@
-/* Copyright 2015 The Chromium Authors. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-
-#waitAnimation {
-  position: absolute;
-  left: 0px;
-  top: 0px;
-  height:100%;
-  width:100%;
-  z-index:1000;
-  background-color:black;
-  opacity:0.6;
-}
-
-#waitSpinner {
-  position: absolute;
-  height:60px;
-  width:60px;
-  top: 50%;
-  left: 50%;
-  margin-left: -30px;
-  margin-top: -30px;
-  -webkit-animation: rotation .6s infinite linear;
-  animation: rotation .6s infinite linear;
-  border-left:6px solid rgba(180,174,239,.15);
-  border-right:6px solid rgba(180,174,239,.15);
-  border-bottom:6px solid rgba(180,174,239,.15);
-  border-top:6px solid rgba(180,174,239,.8);
-  border-radius:100%;
-}
-
-#scanButton {
-  width: 100%;
-  height: 200px;
-}
-
-@-webkit-keyframes rotation {
-  from {-webkit-transform: rotate(0deg);}
-  to {-webkit-transform: rotate(359deg);}
-}
diff --git a/client/site_tests/documentscan_AppTestWithFakeLorgnette/document_scan_test_app/scan.html b/client/site_tests/documentscan_AppTestWithFakeLorgnette/document_scan_test_app/scan.html
deleted file mode 100644
index a7b24b6..0000000
--- a/client/site_tests/documentscan_AppTestWithFakeLorgnette/document_scan_test_app/scan.html
+++ /dev/null
@@ -1,18 +0,0 @@
-<!DOCTYPE html>
-<html>
-  <head>
-    <title>Scanner Control</title>
-    <link rel="stylesheet" type="text/css" href="scan.css">
-  </head>
-  <body>
-    <div id="waitAnimation" style="display: none;">
-      <div id="waitSpinner"></div>
-    </div>
-    </img>
-    <button id="requestButton">Request App permissions</button>
-    <button id="scanButton">Scan</button>
-    <div id="scannedImages">
-    </div>
-    <script src="scan.js"></script>
-  </body>
-</html>
diff --git a/client/site_tests/documentscan_AppTestWithFakeLorgnette/document_scan_test_app/scan.js b/client/site_tests/documentscan_AppTestWithFakeLorgnette/document_scan_test_app/scan.js
deleted file mode 100644
index 211482f..0000000
--- a/client/site_tests/documentscan_AppTestWithFakeLorgnette/document_scan_test_app/scan.js
+++ /dev/null
@@ -1,69 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-var requestButton = document.getElementById("requestButton");
-var scanButton = document.getElementById('scanButton');
-var scannedImages = document.getElementById('scannedImages');
-var waitAnimation = document.getElementById('waitAnimation');
-var imageMimeType;
-
-function setOnlyChild(parent, child) {
-  while (parent.firstChild) {
-    parent.removeChild(parent.firstChild);
-  }
-  parent.appendChild(child);
-}
-
-var gotPermission = function(result) {
-  waitAnimation.style.display = 'block';
-  requestButton.style.display = 'none';
-  scanButton.style.display = 'block';
-  console.log('App was granted the "documentScan" permission.');
-  waitAnimation.style.display = 'none';
-};
-
-var permissionObj = {permissions: ['documentScan']};
-
-requestButton.addEventListener('click', function() {
-  waitAnimation.style.display = 'block';
-  chrome.permissions.request( permissionObj, function(result) {
-    if (result) {
-      gotPermission();
-    } else {
-      console.log('App was not granted the "documentScan" permission.');
-      console.log(chrome.runtime.lastError);
-    }
-  });
-});
-
-var onScanCompleted = function(scan_results) {
-  waitAnimation.style.display = 'none';
-  if (chrome.runtime.lastError) {
-    console.log('Scan failed: ' + chrome.runtime.lastError.message);
-    return;
-  }
-  numImages = scan_results.dataUrls.length;
-  console.log('Scan completed with ' + numImages + ' images.');
-  for (var i = 0; i < numImages; i++) {
-    urlData = scan_results.dataUrls[i]
-    console.log('Scan ' + i + ' data length ' +
-                urlData.length + '.');
-    console.log('URL is ' + urlData);
-    var scannedImage = document.createElement('img');
-    scannedImage.src = urlData;
-    scannedImages.insertBefore(scannedImage, scannedImages.firstChild);
-  }
-};
-
-scanButton.addEventListener('click', function() {
-  var scanProperties = {};
-  waitAnimation.style.display = 'block';
-  chrome.documentScan.scan(scanProperties, onScanCompleted);
-});
-
-chrome.permissions.contains(permissionObj, function(result) {
-  if (result) {
-    gotPermission();
-  }
-});
diff --git a/client/site_tests/documentscan_AppTestWithFakeLorgnette/documentscan_AppTestWithFakeLorgnette.py b/client/site_tests/documentscan_AppTestWithFakeLorgnette/documentscan_AppTestWithFakeLorgnette.py
deleted file mode 100644
index 829ade6..0000000
--- a/client/site_tests/documentscan_AppTestWithFakeLorgnette/documentscan_AppTestWithFakeLorgnette.py
+++ /dev/null
@@ -1,167 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import base64
-import mock_lorgnette
-import os
-
-from autotest_lib.client.cros import touch_playback_test_base
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chrome
-
-
-class documentscan_AppTestWithFakeLorgnette(
-        touch_playback_test_base.touch_playback_test_base):
-    """ Test that an extension using the DocumentScan Chrome API can
-        successfully retrieve a scanned document from a mocked version
-        of the lorgnette daemon.
-    """
-    version = 1
-
-    # Application ID of the test scan application.
-    _APP_ID = 'mljeglgkknlanoeffbeehogdhkhnaidk'
-
-    # Document to open in order to launch the scan application.
-    _APP_DOCUMENT = 'scan.html'
-
-    # Window ID that references the scan application window.
-    _APP_WINDOW_ID = 'ChromeApps-Sample-Document-Scan'
-
-    # Element within the scan application document that contains image scans.
-    _APP_SCANNED_IMAGE_ELEMENT = 'scannedImages'
-
-    # Description of the fake mouse we add to the system.
-    _MOUSE_DESCRIPTION = 'amazon_mouse.prop'
-
-    # This input file was created as follows:
-    #  - Insert USB mouse (in this case the Amazon mouse)
-    #  - head /sys/class/input/*/name | grep -iB1 mouse
-    #    This will give you the /sys/class/inputXX for the mouse.
-    #  - evemu-record /dev/input/eventXX -1 > /tmp/button_click.event
-    #    Move the mouse diagonally upwards to the upper left, move
-    #    down and right a bit then click.
-    _PLAYBACK_FILE = 'button_click.event'
-
-    # Image file to serve up to Chrome in response to a scan request.
-    _IMAGE_FILENAME = 'lorgnette-test.png'
-
-    # Expected prefix for the SRC tag of the scanned images.
-    _BASE64_IMAGE_HEADER = 'data:image/png;base64,'
-
-    def _play_events(self, event_filename):
-        """Simulate mouse events since the Chrome API enforces that
-        the scan action come from a user gesture.
-
-        @param event_filename string filename containing events to play back
-        """
-
-        file_path = os.path.join(self.bindir, event_filename)
-        self._blocking_playback(file_path, touch_type='mouse')
-
-
-    def _launch_app(self, chrome_instance):
-        """Launches the sample scanner Chrome app.
-
-        @param chrome_instance object of type chrome.Chrome
-        """
-
-        self._extension = chrome_instance.get_extension(self._extension_path)
-
-        # TODO(pstew): chrome.management.launchApp() would have been
-        # ideal here, but is not available even after adding the
-        # "management" permission to the app.  Instead, we perform
-        # the launch action of the extension directly.
-        cmd = '''
-            chrome.app.window.create('%s', {
-              singleton: true,
-              id: '%s',
-              state: 'fullscreen'
-            });
-        ''' % (self._APP_DOCUMENT, self._APP_WINDOW_ID)
-        self._extension.ExecuteJavaScript(cmd)
-
-
-    def _query_scan_element(self, query):
-        """Queries the "scannedImages" element within the app window.
-
-        @param query string javascript query to execute on the DIV element.
-        """
-
-        cmd = '''
-           app_window = chrome.app.window.get('%s');
-           element = app_window.contentWindow.document.getElementById('%s');
-           element.%s;
-        ''' % (self._APP_WINDOW_ID, self._APP_SCANNED_IMAGE_ELEMENT, query)
-        return self._extension.EvaluateJavaScript(cmd)
-
-
-    def _get_scan_count(self):
-        """Counts the number of successful scanned images displayed.
-
-        @param chrome_instance object of type chrome.Chrome
-        """
-
-        result = self._query_scan_element('childNodes.length')
-
-        # Subtract 1 for the text node member of the DIV element.
-        return int(result) - 1
-
-
-    def _validate_image_data(self, expected_image_data):
-        """Validates that the scanned image displayed by the app is the same
-        as the image provided by the fake lorgnette daemon.
-        """
-
-        image_src = self._query_scan_element('childNodes[0].src')
-        if not image_src.startswith(self._BASE64_IMAGE_HEADER):
-            raise error.TestError(
-                    'Image SRC does not start with base64 data header: %s' %
-                    image_src)
-
-        base64_data = image_src[len(self._BASE64_IMAGE_HEADER):]
-        data = base64.b64decode(base64_data)
-        if expected_image_data != data:
-            raise error.TestError('Image data from tag is not the same as '
-                                  'the test image data')
-
-
-    def _validate_mock_method_calls(self, calls):
-        """Validate the method calls made on the lorgnette mock instance.
-
-        @param calls list of MethodCall named tuples from mock lorgnette.
-        """
-
-        if len(calls) != 2:
-            raise error.TestError('Expected 2 method calls but got: %r' % calls)
-
-        for index, method_name in enumerate(['ListScanners', 'ScanImage']):
-            if calls[index].method != method_name:
-                raise error.TestError('Call #%d was %s instead of expected %s' %
-                                      (index, calls[index].method, method_name))
-
-
-    def run_once(self):
-        """Entry point of this test."""
-        mouse_file = os.path.join(self.bindir, self._MOUSE_DESCRIPTION)
-        self._emulate_mouse(property_file=mouse_file)
-
-        self._extension_path = os.path.join(os.path.dirname(__file__),
-                                            'document_scan_test_app')
-
-        with chrome.Chrome(extension_paths=[self._extension_path],
-                           init_network_controller=True) as cr:
-            img = os.path.join(self.bindir, self._IMAGE_FILENAME)
-            with mock_lorgnette.MockLorgnette(img) as lorgnette_instance:
-                self._launch_app(cr)
-
-                self._play_events(self._PLAYBACK_FILE)
-
-                scan_count = self._get_scan_count()
-                if scan_count != 1:
-                    raise error.TestError('Scan count is %d instead of 1' %
-                                          scan_count)
-
-                self._validate_image_data(lorgnette_instance.image_data)
-                self._validate_mock_method_calls(
-                        lorgnette_instance.get_method_calls())
diff --git a/client/site_tests/documentscan_AppTestWithFakeLorgnette/lorgnette-test.png b/client/site_tests/documentscan_AppTestWithFakeLorgnette/lorgnette-test.png
deleted file mode 100644
index 0b29ddb..0000000
--- a/client/site_tests/documentscan_AppTestWithFakeLorgnette/lorgnette-test.png
+++ /dev/null
Binary files differ
diff --git a/client/site_tests/documentscan_AppTestWithFakeLorgnette/mock_lorgnette.py b/client/site_tests/documentscan_AppTestWithFakeLorgnette/mock_lorgnette.py
deleted file mode 100644
index 521ee93..0000000
--- a/client/site_tests/documentscan_AppTestWithFakeLorgnette/mock_lorgnette.py
+++ /dev/null
@@ -1,140 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import collections
-import dbus
-import dbus.service
-import dbus.mainloop.glib
-import gobject
-import logging
-import os
-import threading
-import time
-
-""" MockLorgnette provides mocked methods from the lorgnette
-    D-Bus API so that we can perform an image scan operation in
-    Chrome without access to a physical scanner. """
-
-MethodCall = collections.namedtuple("MethodCall", ["method", "argument"])
-
-class LorgnetteManager(dbus.service.Object):
-    """ The lorgnette DBus Manager object instance.  Methods in this
-        object are called whenever a DBus RPC method is invoked. """
-
-    SCANNER_NAME = 'scanner1'
-    SCANNER_MANUFACTURER = 'Chromascanner'
-    SCANNER_MODEL = 'Fakebits2000'
-    SCANNER_TYPE = 'Virtual'
-
-    def __init__(self, bus, object_path, scan_image_data):
-        dbus.service.Object.__init__(self, bus, object_path)
-        self.method_calls = []
-        self.scan_image_data = scan_image_data
-
-
-    @dbus.service.method('org.chromium.lorgnette.Manager',
-                         in_signature='', out_signature='a{sa{ss}}')
-    def ListScanners(self):
-        """Lists available scanners. """
-        self.add_method_call('ListScanners', '')
-        return { self.SCANNER_NAME: {
-                       'Manufacturer': self.SCANNER_MANUFACTURER,
-                       'Model': self.SCANNER_MODEL,
-                       'Type': self.SCANNER_TYPE }}
-
-
-    @dbus.service.method('org.chromium.lorgnette.Manager',
-                         in_signature='sha{sv}', out_signature='')
-    def ScanImage(self, device, out_fd, scan_properties):
-        """Writes test image date to |out_fd|.  Do so in chunks since the
-        entire dataset cannot be successfully written at once.
-
-        @param device string name of the device to scan from.
-        @param out_fd file handle for the output scan data.
-        @param scan_properties dict containing parameters for the scan.
-
-        """
-        self.add_method_call('ScanImage', (device, scan_properties))
-        scan_output_fd = out_fd.take()
-        os.write(scan_output_fd, self.scan_image_data)
-        os.close(scan_output_fd)
-
-        # TODO(pstew): Ensure the timing between return of this method
-        # and the EOF returned to Chrome at the end of this data stream
-        # are distinct.  This comes naturally with a real scanner.
-        time.sleep(1)
-
-
-    def add_method_call(self, method, arg):
-        """Note that a method call was made.
-
-        @param method string the method that was called.
-        @param arg tuple list of arguments that were called on |method|.
-
-        """
-        logging.info("Mock Lorgnette method %s called with argument %s",
-                     method, arg)
-        self.method_calls.append(MethodCall(method, arg))
-
-
-    def get_method_calls(self):
-        """Provide the method call list, clears this list internally.
-
-        @return list of MethodCall objects
-
-        """
-        method_calls = self.method_calls
-        self.method_calls = []
-        return method_calls
-
-
-class MockLorgnette(threading.Thread):
-    """This thread object instantiates a mock lorgnette manager and
-    runs a mainloop that receives DBus API messages. """
-    LORGNETTE = "org.chromium.lorgnette"
-    def __init__(self, image_file):
-        threading.Thread.__init__(self)
-        gobject.threads_init()
-        self.image_file = image_file
-
-
-    def __enter__(self):
-        self.start()
-        return self
-
-
-    def __exit__(self, type, value, tb):
-        self.quit()
-        self.join()
-
-
-    def run(self):
-        """Runs the main loop."""
-        dbus.mainloop.glib.DBusGMainLoop(set_as_default=True)
-        self.bus = dbus.SystemBus()
-        name = dbus.service.BusName(self.LORGNETTE, self.bus)
-        with open(self.image_file) as f:
-            self.image_data = f.read()
-        self.manager = LorgnetteManager(
-                self.bus, '/org/chromium/lorgnette/Manager', self.image_data)
-        self.mainloop = gobject.MainLoop()
-        self.mainloop.run()
-
-
-    def quit(self):
-        """Quits the main loop."""
-        self.mainloop.quit()
-
-
-    def get_method_calls(self):
-        """Returns the method calls that were called on the mock object.
-
-        @return list of MethodCall objects representing the methods called.
-
-         """
-        return self.manager.get_method_calls()
-
-
-if __name__ == '__main__':
-    MockLorgnette().run()
diff --git a/client/site_tests/dummy_Fail/control b/client/site_tests/dummy_Fail/control
index e8cc260..7cf3fda 100644
--- a/client/site_tests/dummy_Fail/control
+++ b/client/site_tests/dummy_Fail/control
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "dummy_Fail"
 PURPOSE = "Demonstrate failure methods of autotests."
 CRITERIA = "This test will never succeed."
@@ -15,6 +15,7 @@
 TEST_CLASS = "dummy"
 TEST_TYPE = "client"
 JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This is a helper test that will fail in a number of ways.
@@ -25,4 +26,3 @@
 job.run_test('dummy_Fail', tag='Warn', to_throw='TestWarn')
 job.run_test('dummy_Fail', tag='NAError', to_throw='TestNAError')
 job.run_test('dummy_Fail', tag='Crash', to_throw=None)
-
diff --git a/client/site_tests/dummy_Fail/control.dependency b/client/site_tests/dummy_Fail/control.dependency
index 7dd5938..abb0e1f 100644
--- a/client/site_tests/dummy_Fail/control.dependency
+++ b/client/site_tests/dummy_Fail/control.dependency
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "dummy_Fail.dependency"
 PURPOSE = """
 Test if dependency is properly processed, the test should be shown as TEST_NA in
@@ -16,6 +16,7 @@
 TEST_CATEGORY = "General"
 TEST_CLASS = "dummy"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This is a helper test that will fail in a number of ways.
diff --git a/client/site_tests/dummy_Fail/control.naerror b/client/site_tests/dummy_Fail/control.naerror
index ea8f6b0..a97706f 100644
--- a/client/site_tests/dummy_Fail/control.naerror
+++ b/client/site_tests/dummy_Fail/control.naerror
@@ -9,6 +9,7 @@
 TEST_CATEGORY = "General"
 TEST_CLASS = "dummy"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This is a helper test that will always generate TEST_NA
diff --git a/client/site_tests/dummy_Fail/dummy_Fail.py b/client/site_tests/dummy_Fail/dummy_Fail.py
index 466e2db..bd275fd 100644
--- a/client/site_tests/dummy_Fail/dummy_Fail.py
+++ b/client/site_tests/dummy_Fail/dummy_Fail.py
@@ -28,7 +28,8 @@
         if retry_count == retry_success_count:
             return
         if to_throw:
-            if to_throw == 'TestFail': logging.error('It is an error!')
+            if to_throw == 'TestFail':
+                logging.error('It is an error!')
             raise getattr(error, to_throw)('always fail')
         else:  # Generate a crash to test that behavior.
             self.write_perf_keyval({'perf_key': 102.7})
diff --git a/client/site_tests/dummy_IdleSuspend/dummy_IdleSuspend.py b/client/site_tests/dummy_IdleSuspend/dummy_IdleSuspend.py
deleted file mode 100644
index 64f3ab9..0000000
--- a/client/site_tests/dummy_IdleSuspend/dummy_IdleSuspend.py
+++ /dev/null
@@ -1,55 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os, time
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.cros.power import power_suspend, power_utils
-
-
-class dummy_IdleSuspend(test.test):
-    """
-    This is not a complete test. It is a dummy test that must be run in parallel
-    with power_SuspendStress(method='idle') to control powerd idle values and
-    perform a login.
-    """
-    version = 1
-
-    _IDLE_TIMINGS = {
-        'disable_idle_suspend': 0,
-        'ignore_external_policy': 1,
-        'unplugged_dim_ms': 4000,
-        'unplugged_off_ms': 6000,
-        'unplugged_suspend_ms': 8000,
-        'plugged_dim_ms': 4000,
-        'plugged_off_ms': 6000,
-        'plugged_suspend_ms': 8000,
-    }
-
-    # Don't wait longer than this to start... if power_SuspendStress died before
-    # creating the HWCLOCK_FILE, we might otherwise wait forever
-    _TEST_START_TIMEOUT = 70
-
-    def run_once(self):
-        with chrome.Chrome():
-            # Just idle while power_SuspendStress does all the work. Existence
-            # of the HWCLOCK_FILE tells us when it starts and when it's done.
-            for _ in xrange(self._TEST_START_TIMEOUT):
-                time.sleep(1)
-                if os.path.exists(power_suspend.Suspender.HWCLOCK_FILE):
-                    break
-            else:
-                raise error.TestError("Parallel test didn't create Suspender.")
-
-            # These must not be enabled too soon, or the system might suspend
-            # before a wakeup is scheduled. They must not be disabled too late
-            # either, or we might suspend again after the parallel test is done.
-            power_prefs = power_utils.PowerPrefChanger(self._IDLE_TIMINGS)
-
-            while os.path.exists(power_suspend.Suspender.HWCLOCK_FILE):
-                time.sleep(1)
-
-            power_prefs.finalize()
diff --git a/client/site_tests/dummy_Pass/control b/client/site_tests/dummy_Pass/control
deleted file mode 100644
index 46be50d..0000000
--- a/client/site_tests/dummy_Pass/control
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "dummy_Pass"
-PURPOSE = "Demonstrate success methods of autotests."
-CRITERIA = "This test will always succeed."
-ATTRIBUTES = (
-        "suite:dummy, suite:dummyclientretries, suite:push_to_prod,"
-        " suite:skylab_staging_test, suite:something_else,"
-        " suite:dev_drone_image_test, suite:infra_qual"
-)
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "dummy"
-TEST_TYPE = "client"
-MAX_RESULT_SIZE_KB = 5000
-
-DOC = """
-This is a helper test that will succeed.
-"""
-
-job.run_test('dummy_Pass')
diff --git a/client/site_tests/dummy_Pass/control.actionable b/client/site_tests/dummy_Pass/control.actionable
deleted file mode 100644
index 35bf803..0000000
--- a/client/site_tests/dummy_Pass/control.actionable
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "dummy_Pass.actionable"
-PURPOSE = "Demonstrate success methods of autotests."
-CRITERIA = "This test will always succeed."
-ATTRIBUTES = (
-        "suite:dummy, suite:dummyclientretries, suite:push_to_prod,"
-        " suite:skylab_staging_test, suite:something_else"
-)
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "dummy"
-TEST_TYPE = "client"
-DEPENDENCIES = "cleanup-reboot"
-
-DOC = """
-This is a helper test that will succeed.
-"""
-
-job.run_test('dummy_Pass', tag='actionable')
diff --git a/client/site_tests/dummy_Pass/control.bluetooth b/client/site_tests/dummy_Pass/control.bluetooth
deleted file mode 100755
index 5cf4f17..0000000
--- a/client/site_tests/dummy_Pass/control.bluetooth
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "dummy_Pass.bluetooth"
-PURPOSE = "Demonstrate DEPENDENCIES in autotests."
-CRITERIA = "This test will always succeed."
-DEPENDENCIES = "bluetooth"
-ATTRIBUTES = "suite:dummy, suite:push_to_prod, suite:skylab_staging_test"
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "dummy"
-TEST_TYPE = "client"
-
-DOC = """
-This is a helper test that can only run on bluetooth devices,
-and should succeed trivially.
-"""
-
-job.run_test('dummy_Pass', tag='bluetooth')
diff --git a/client/site_tests/dummy_Pass/control.experimental b/client/site_tests/dummy_Pass/control.experimental
deleted file mode 100644
index 22744f1..0000000
--- a/client/site_tests/dummy_Pass/control.experimental
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "dummy_Pass.experimental"
-PURPOSE = "Demonstrate success methods of autotests."
-CRITERIA = "This test will always succeed."
-ATTRIBUTES = "suite:dummy, suite:dummyclientretries, suite:something_else"
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "dummy"
-TEST_TYPE = "client"
-
-DOC = """
-This is a helper test that will succeed.
-"""
-
-job.run_test('dummy_Pass', tag='experimental')
-
-
diff --git a/client/site_tests/dummy_Pass/control.wifichaos b/client/site_tests/dummy_Pass/control.wifichaos
deleted file mode 100644
index bb6bd0d..0000000
--- a/client/site_tests/dummy_Pass/control.wifichaos
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "dummy_Pass.wifichaos"
-PURPOSE = "To re-image chaos_dut machines nightly."
-CRITERIA = "This test will always succeed."
-ATTRIBUTES = "suite:wifichaos"
-DEPENDENCIES = "chaos_dut"
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "dummy"
-TEST_TYPE = "client"
-
-DOC = """
-This is a placeholder test for allowing the scheduler to install tests on the
-chaos_dut machines
-"""
-
-job.run_test('dummy_Pass', tag='wifichaos')
-
-
diff --git a/client/site_tests/dummy_Pass/def.star b/client/site_tests/dummy_Pass/def.star
deleted file mode 100644
index f62258a..0000000
--- a/client/site_tests/dummy_Pass/def.star
+++ /dev/null
@@ -1,53 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-load("//metadata/test_common.star", "test_common")
-
-DOC = """
-This is a helper test that will succeed. Used to verify various
-autotest scheduling features, including pass results, dependencies, etc.
-"""
-
-TESTS = [
-    test_common.define_client_test(
-        test_name = "dummy_Pass",
-        purpose = "Demonstrate success methods of autotests.",
-        doc = DOC,
-        owner_emails = ["email_addr@chromium.org"],
-        owner_groups = ["team-mdb-group"],
-        suites = ["dummy", "dummyclientretries", "push_to_prod",
-            "skylab_staging_test", "something_else"],
-        #TODO: max_result_size_kb = 5000
-    ),
-
-    test_common.define_client_test(
-        test_name = "dummy_Pass.actionable",
-        purpose = "Demonstrate success methods of autotests",
-        doc = DOC,
-        owner_emails = ["email_addr@chromium.org"],
-        suites = ["dummy", "dummyclientretries", "push_to_prod",
-            "skylab_staging_test", "something_else"],
-        #TODO: common_deps = ["cleanup-reboot"],
-        named_args = {"tag": "actionable"},
-    ),
-
-    test_common.define_client_test(
-        test_name = "dummy_Pass.bluetooth",
-        purpose = "Demonstrate DEPENDENCIES in autotests.",
-        doc = DOC,
-        owner_emails = ["email_addr@chromium.org"],
-        suites = ["dummy", "push_to_prod", "skylab_staging_test"],
-        common_deps = ["bluetooth"],
-        named_args = {"tag": "bluetooth"},
-    ),
-
-    test_common.define_client_test(
-        test_name = "dummy_Pass.experimental",
-        purpose = "Demonstrate success methods of autotests.",
-        doc = DOC,
-        owner_emails = ["email_addr@chromium.org"],
-        suites = ["dummy", "dummyclientretries", "something_else"],
-        named_args = {"tag": "experimental"},
-    ),
-]
diff --git a/client/site_tests/dummy_Pass/dummy_Pass.py b/client/site_tests/dummy_Pass/dummy_Pass.py
deleted file mode 100644
index 208841a..0000000
--- a/client/site_tests/dummy_Pass/dummy_Pass.py
+++ /dev/null
@@ -1,12 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-
-class dummy_Pass(test.test):
-    version = 1
-
-    def run_once(self):
-        return
diff --git a/client/site_tests/dummy_Pass/wifichaos.star b/client/site_tests/dummy_Pass/wifichaos.star
deleted file mode 100644
index aa975db..0000000
--- a/client/site_tests/dummy_Pass/wifichaos.star
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-load("//metadata/test_common.star", "test_common")
-
-DOC = """
-This is a placeholder test for allowing the scheduler to install tests on the
-chaos_dut machines.
-"""
-
-TESTS = [
-    test_common.define_client_test(
-        test_name = "dummy_Pass.wifichaos",
-        purpose = "To re-image chaos_dut machines nightly.",
-        doc = DOC,
-        # TODO: find real owners for this test.
-        owner_emails = ["email_addr@chromium.org"],
-        suites = ["wifichaos"],
-        # TODO: common_deps = ["chaos_dut"],
-    ),
-]
diff --git a/client/site_tests/dummy_SynchronousOffload/control b/client/site_tests/dummy_SynchronousOffload/control
deleted file mode 100644
index 6d81c37..0000000
--- a/client/site_tests/dummy_SynchronousOffload/control
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "jkop@google.com"
-NAME = "dummy_SynchronousOffload"
-PURPOSE = "Verify tests can offload output from DUTs."
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "dummy"
-TEST_TYPE = "client"
-# Disable this test until it can be fixed: http://b/171572182
-# ATTRIBUTES = "suite:offloads"
-
-DOC = """
-This test creates a file in $SYNCHRONOUS_OFFLOAD_DIR on the DUT and succeeds.
-The task will fail only if the creation or offload of that file fails.
-"""
-
-job.run_test('dummy_SynchronousOffload')
diff --git a/client/site_tests/dummy_SynchronousOffload/dummy_SynchronousOffload.py b/client/site_tests/dummy_SynchronousOffload/dummy_SynchronousOffload.py
deleted file mode 100644
index 9b6736f..0000000
--- a/client/site_tests/dummy_SynchronousOffload/dummy_SynchronousOffload.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright (c) 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-
-class dummy_SynchronousOffload(test.test):
-  version = 1
-
-  def initialize(self):
-    pass
-
-  def run_once(self):
-    DIR = os.getenv('SYNCHRONOUS_OFFLOAD_DIR', "")
-    if DIR == "":
-      raise error.TestFail("Did not find value for SYNCHRONOUS_OFFLOAD_DIR")
-    if not os.path.isdir(DIR):
-      raise error.TestFail("$SYNCHRONOUS_OFFLOAD_DIR=%s, which is not "
-                           "a valid directory." % DIR)
-    logging.debug("Writing to directory %s", DIR)
-    with open(os.path.join(DIR,"test_file"), "w") as f:
-      f.write("Test string which should be offloaded")
-      logging.debug("Wrote string to test file.")
-
-  def cleanup(self):
-    pass
diff --git a/client/site_tests/enterprise_CFM_ApolloPeripheralPowerCycle/control.viking b/client/site_tests/enterprise_CFM_ApolloPeripheralPowerCycle/control.viking
deleted file mode 100644
index 700eb78..0000000
--- a/client/site_tests/enterprise_CFM_ApolloPeripheralPowerCycle/control.viking
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "meet-devices-eng@google.com"
-NAME = "enterprise_CFM_ApolloPeripheralPowerCycle.viking"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-ATTRIBUTES = "suite:hotrod-remora"
-TEST_TYPE = "client"
-# TODO: Apollo peripherals should be added as Skylab inventory labels, e.g.:
-# DEPENDENCIES = "viking"
-
-DOC = """
-This test disables then re-enables Power over Ethernet for connected Viking soundbar.
-"""
-
-# Viking is always port 0
-# (Our current Apollo has a Falcon Lite on 1 and a Houston on 2)
-job.run_test('enterprise_CFM_ApolloPeripheralPowerCycle', port=0)
diff --git a/client/site_tests/enterprise_CFM_ApolloPeripheralPowerCycle/enterprise_CFM_ApolloPeripheralPowerCycle.py b/client/site_tests/enterprise_CFM_ApolloPeripheralPowerCycle/enterprise_CFM_ApolloPeripheralPowerCycle.py
deleted file mode 100644
index ae016b2..0000000
--- a/client/site_tests/enterprise_CFM_ApolloPeripheralPowerCycle/enterprise_CFM_ApolloPeripheralPowerCycle.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright (c) 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.cfm.apollo import apollo_utils
-
-
-class enterprise_CFM_ApolloPeripheralPowerCycle(test.test):
-    """
-    Disables then re-enables Power over Ethernet for the given IP port.
-
-    NOTE: This test turns off the given peripheral, which could impact other Apollo tests.
-    (autotests shouldn't assume anything about the device state.)
-    """
-    version = 1
-
-    def run_once(self, port):
-        apollo_utils.ectool_pse_disable(port)
-        status = apollo_utils.ectool_pse_status(port)
-
-        if status != 'disabled':
-            raise error.TestError('IP port status not disabled')
-
-        # TODO: Is there meaningful check we can add here?
-        # (We may need to add a "peripheral" param as well)
-
-        apollo_utils.ectool_pse_enable(port)
-        status = apollo_utils.ectool_pse_status(port)
-
-        if status not in ['enabled', 'powered']:
-            raise error.TestError('IP port status not enabled')
diff --git a/client/site_tests/enterprise_CFM_AtrusUpdaterStress/control b/client/site_tests/enterprise_CFM_AtrusUpdaterStress/control
deleted file mode 100644
index aafa3a0..0000000
--- a/client/site_tests/enterprise_CFM_AtrusUpdaterStress/control
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "dtosic@chromium.org, wirebrand@chromium.org"
-NAME = "enterprise_CFM_AtrusUpdaterStress"
-TIME = "LONG"
-TEST_CATEGORY = "Stress"
-TEST_CLASS = "enterprise"
-ATTRIBUTES = "suite:hotrod-remora"
-TEST_TYPE = "client"
-DEPENDENCIES = "atrus"
-
-DOC = """
-This test repeatedly runs a firmware upgrade of the device. During the upgrade
-transfer of the binary, and writing of the binary to flash is validated.
-"""
-
-job.run_test('enterprise_CFM_AtrusUpdaterStress', repeat=10)
diff --git a/client/site_tests/enterprise_CFM_AtrusUpdaterStress/enterprise_CFM_AtrusUpdaterStress.py b/client/site_tests/enterprise_CFM_AtrusUpdaterStress/enterprise_CFM_AtrusUpdaterStress.py
deleted file mode 100644
index 4f38067..0000000
--- a/client/site_tests/enterprise_CFM_AtrusUpdaterStress/enterprise_CFM_AtrusUpdaterStress.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# Copyright (c) 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Auto test for Atrus firmware updater functionality."""
-
-import logging
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.cfm.atrus import atrus_utils
-
-
-class enterprise_CFM_AtrusUpdaterStress(test.test):
-    """
-    Atrus firmware updater functionality test for Chrome Box for Meetings.
-
-    The procedure of the test is:
-    1. Trigger forced upgrade of the atrus via atrusctl with dbus.
-    2. Wait for the updater to finish, and check status of upgrade.
-            The upgrade will be successfull if the transfer of the binary was
-            successfull, and if the writing of the binary to flash was
-            successfull.
-    3. Repeat
-    """
-
-    version = 1
-
-    def run_once(self, repeat=1):
-        """Main test procedure."""
-        successfull_upgrades = 0
-
-        # Check if Atrusctl is running and have dbus enabled
-        if not atrus_utils.check_dbus_available():
-            raise error.TestError('No DBus support in atrusd.')
-
-        for cycle in xrange(repeat):
-
-            atrus_utils.wait_for_atrus_enumeration()
-
-            if atrus_utils.force_upgrade_atrus():
-                successfull_upgrades += 1
-
-            logging.info('Successful attempts: {}/{}'
-                    .format(successfull_upgrades,cycle+1))
-
-        if successfull_upgrades < repeat:
-            raise error.TestFail('Upgrade failed in {}/{} of tries.'
-                    .format(repeat - successfull_upgrades, repeat))
diff --git a/client/site_tests/enterprise_FakeEnrollment/control b/client/site_tests/enterprise_FakeEnrollment/control
deleted file mode 100644
index efe2038..0000000
--- a/client/site_tests/enterprise_FakeEnrollment/control
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "achuith"
-NAME = "enterprise_FakeEnrollment"
-TIME = "SHORT"
-TEST_CATEGORY = "Enterprise"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "client"
-# Reboot after test ends.
-DEPENDENCIES='cleanup-reboot'
-
-DOC = """
-This test enrolls a ChromeOS device without GAIA or a DMServer.
-"""
-
-job.run_test('enterprise_FakeEnrollment')
diff --git a/client/site_tests/enterprise_FakeEnrollment/enterprise_FakeEnrollment.py b/client/site_tests/enterprise_FakeEnrollment/enterprise_FakeEnrollment.py
deleted file mode 100644
index f9b9c05..0000000
--- a/client/site_tests/enterprise_FakeEnrollment/enterprise_FakeEnrollment.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-class enterprise_FakeEnrollment(enterprise_policy_base.EnterprisePolicyTest):
-    """Test to fake enroll and fake login using a fake dmserver."""
-    version = 1
-
-    def run_once(self):
-        """Enroll and login."""
-        self.setup_case(enroll=True, auto_login=True)
diff --git a/client/site_tests/enterprise_KioskEnrollment/control b/client/site_tests/enterprise_KioskEnrollment/control
deleted file mode 100644
index 7958df4..0000000
--- a/client/site_tests/enterprise_KioskEnrollment/control
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "achuith, zelidrag"
-NAME = "enterprise_KioskEnrollment"
-TIME = "SHORT"
-TEST_CATEGORY = "Enterprise"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "client"
-# Reboot after test ends.
-DEPENDENCIES='cleanup-reboot'
-
-DOC = """
-This test enrolls a ChromeOS device in Kiosk mode.
-"""
-
-job.run_test('enterprise_KioskEnrollment')
diff --git a/client/site_tests/enterprise_KioskEnrollment/credentials.chromesign b/client/site_tests/enterprise_KioskEnrollment/credentials.chromesign
deleted file mode 100644
index 44e036e..0000000
--- a/client/site_tests/enterprise_KioskEnrollment/credentials.chromesign
+++ /dev/null
@@ -1 +0,0 @@
-kiosk.longevity.default@croste.tv:test0000
diff --git a/client/site_tests/enterprise_KioskEnrollment/credentials.riseplayer b/client/site_tests/enterprise_KioskEnrollment/credentials.riseplayer
deleted file mode 100644
index 1b79745..0000000
--- a/client/site_tests/enterprise_KioskEnrollment/credentials.riseplayer
+++ /dev/null
@@ -1 +0,0 @@
-riseplayer.longevity@croste.tv:test0000
diff --git a/client/site_tests/enterprise_KioskEnrollment/enterprise_KioskEnrollment.py b/client/site_tests/enterprise_KioskEnrollment/enterprise_KioskEnrollment.py
deleted file mode 100644
index 0187df9..0000000
--- a/client/site_tests/enterprise_KioskEnrollment/enterprise_KioskEnrollment.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import time
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.common_lib.cros import enrollment
-from autotest_lib.client.common_lib.cros import kiosk_utils
-from autotest_lib.client.common_lib import utils as utils2
-
-KIOSK_MODE = 'Starting kiosk mode...'
-
-
-class enterprise_KioskEnrollment(test.test):
-    """Enroll the device in enterprise."""
-    version = 1
-
-    APP_NAME = 'chromesign'
-    EXT_ID = 'odjaaghiehpobimgdjjfofmablbaleem'
-    EXT_PAGE = 'viewer.html'
-
-    def run_once(self, kiosk_app_attributes=None):
-        if kiosk_app_attributes:
-            self.APP_NAME, self.EXT_ID, self.EXT_PAGE = \
-                    kiosk_app_attributes.rstrip().split(':')
-        user_id, password = utils.get_signin_credentials(os.path.join(
-                os.path.dirname(os.path.realpath(__file__)),
-                'credentials.' + self.APP_NAME))
-        if not (user_id and password):
-            logging.warn('No credentials found - exiting test.')
-            return
-
-        with chrome.Chrome(auto_login=False,
-                           disable_gaia_services=False) as cr:
-            enrollment.EnterpriseEnrollment(cr.browser, user_id, password)
-
-        time.sleep(15)
-        running_apps = utils2.system_output(
-            'cat /var/log/messages | grep kiosk')
-        if KIOSK_MODE not in running_apps:
-            raise error.TestFail(
-                'DUT did not enter kiosk mode. and it should have.')
diff --git a/client/site_tests/enterprise_KioskPerf/control b/client/site_tests/enterprise_KioskPerf/control
deleted file mode 100644
index 17f586a..0000000
--- a/client/site_tests/enterprise_KioskPerf/control
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "rohitbm"
-NAME = "enterprise_KioskPerf"
-PURPOSE = 'Records cpu and memory usage for demo running in Kiosk mode.'
-TIME = "SHORT"
-TEST_CATEGORY = "Performance"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "client"
-
-DOC = """
-This test does enterprise enrollment on a ChromeOS device and launches Kiosk
-mode and records cpu/memory usage for the given time.
-
-You need a credentials.txt file with user_id:password in this directory for
-this test to succeed. The credentials are used to enroll the device as a Remora
-device.
-"""
-
-job.run_test('enterprise_KioskPerf')
diff --git a/client/site_tests/enterprise_KioskPerf/enterprise_KioskPerf.py b/client/site_tests/enterprise_KioskPerf/enterprise_KioskPerf.py
deleted file mode 100644
index b405969..0000000
--- a/client/site_tests/enterprise_KioskPerf/enterprise_KioskPerf.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import csv, logging, os
-import time
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chrome
-
-# Measurement duration [seconds] for one interation.
-MEASUREMENT_DURATION = 10
-
-TOTAL_TEST_DURATION = 600 # change the test time to 7 days [seconds].
-
-# Time to exclude from calculation after launching the demo [seconds].
-STABILIZATION_DURATION = 20
-
-_PERF_RESULT_FILE = '/tmp/perf.csv'
-
-class enterprise_KioskPerf(test.test):
-    """Enrolls to kiosk mode and monitors cpu/memory usage."""
-
-    version = 1
-
-
-    def test_cpu_usage(self):
-        """
-        Runs the video cpu usage test.
-
-        @param local_path: the path to the video file.
-
-        @returns a dictionary that contains the test result.
-        """
-        cpu_usage_start = utils.get_cpu_usage()
-        time.sleep(MEASUREMENT_DURATION)
-        cpu_usage_end = utils.get_cpu_usage()
-        return utils.compute_active_cpu_time(cpu_usage_start,
-                                                  cpu_usage_end) * 100
-
-
-    def used_mem(self):
-        """Returns total used memory in %."""
-        total_memory = utils.get_mem_total()
-        return (total_memory - utils.get_mem_free()) * 100 / total_memory
-
-    def verify_enrollment(self, user_id):
-        """Verifies enterprise enrollment using /home/.shadow config."""
-        with open('/home/.shadow/install_attributes.pb') as f:
-            if not user_id in f.read():
-                raise error.TestError('Device is not enrolled or '
-                                      'enterprise owned.')
-
-    def run_once(self):
-        user_id, password = utils.get_signin_credentials(os.path.join(
-                os.path.dirname(os.path.realpath(__file__)), 'credentials.txt'))
-        if not (user_id and password):
-            logging.warn('No credentials found - exiting test.')
-            return
-
-        with chrome.Chrome(auto_login=False) as cr:
-            cr.browser.oobe.NavigateGaiaLogin(
-                    user_id, password,
-                    enterprise_enroll=True,
-                    for_user_triggered_enrollment=True)
-            time.sleep(STABILIZATION_DURATION)
-            self.verify_enrollment(user_id)
-            start_time = time.time()
-            perf_keyval = {}
-            perf_file = open(_PERF_RESULT_FILE, 'w')
-            writer = csv.writer(perf_file)
-            writer.writerow(['cpu','memory', 'timestamp'])
-            while (time.time() - start_time) < TOTAL_TEST_DURATION:
-                perf_keyval['cpu_usage'] = self.test_cpu_usage()
-                perf_keyval['memory_usage'] = self.used_mem()
-                writer.writerow([perf_keyval['cpu_usage'],
-                                perf_keyval['memory_usage'],
-                                time.strftime('%Y/%m/%d %H:%M:%S')])
-                self.write_perf_keyval(perf_keyval)
-                time.sleep(10)
-            perf_file.close()
diff --git a/client/site_tests/enterprise_OnlineDemoModeEnrollment/control b/client/site_tests/enterprise_OnlineDemoModeEnrollment/control
deleted file mode 100644
index c50044a..0000000
--- a/client/site_tests/enterprise_OnlineDemoModeEnrollment/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "wzang"
-NAME = "enterprise_OnlineDemoModeEnrollment"
-TIME = "SHORT"
-TEST_CATEGORY = "Enterprise"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "client"
-# Reboot after test ends.
-DEPENDENCIES='cleanup-reboot'
-
-DOC = """
-This test enrolls a Chrome OS device into online Demo Mode.
-
-This test will not be run standalone. It will be kicked off from a server side
-test.
-"""
-
-job.run_test('enterprise_OnlineDemoModeEnrollment')
diff --git a/client/site_tests/enterprise_OnlineDemoModeEnrollment/enterprise_OnlineDemoModeEnrollment.py b/client/site_tests/enterprise_OnlineDemoModeEnrollment/enterprise_OnlineDemoModeEnrollment.py
deleted file mode 100644
index 04c553b..0000000
--- a/client/site_tests/enterprise_OnlineDemoModeEnrollment/enterprise_OnlineDemoModeEnrollment.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib.cros import chrome, enrollment
-from telemetry.core import exceptions
-
-# Id of the Infinite Painter app.
-_INFINITE_PAINTER_APP_ID = 'afihfgfghkmdmggakhkgnfhlikhdpima'
-
-def _launch_arc_app(autotest_ext, app_id):
-    try:
-        autotest_ext.ExecuteJavaScript('''
-            chrome.autotestPrivate.launchArcApp(
-              '%s', /* app_id */
-              '%s', /* intent */
-              function(app_launched) {
-                window.__app_launched = app_launched;
-            });
-        ''' % (app_id, 'intent'))
-        return autotest_ext.EvaluateJavaScript('window.__app_launched')
-    except exceptions.EvaluateException as e:
-        pass
-    return False
-
-class enterprise_OnlineDemoModeEnrollment(test.test):
-    """Enrolls to online demo mode."""
-    version = 1
-
-
-    def run_once(self):
-        """Starts online demo mode enrollment. Waits for active session to start
-           and launch an arc app.
-        """
-        with chrome.Chrome(
-                auto_login=False,
-                disable_gaia_services=False,
-                autotest_ext=True,
-                extra_browser_args='--force-devtools-available') as cr:
-            enrollment.OnlineDemoMode(cr.browser)
-            utils.poll_for_condition(
-                    condition=lambda: _launch_arc_app(cr.autotest_ext,
-                            _INFINITE_PAINTER_APP_ID),
-                    desc='Launching the app %s' %
-                            _INFINITE_PAINTER_APP_ID,
-                    timeout=300,
-                    sleep_interval=1)
\ No newline at end of file
diff --git a/client/site_tests/enterprise_PowerManagement/control b/client/site_tests/enterprise_PowerManagement/control
deleted file mode 100644
index 2389f36..0000000
--- a/client/site_tests/enterprise_PowerManagement/control
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "kaliamoorthi, achuith"
-NAME = "enterprise_PowerManagement"
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "client"
-
-DOC = """
-This test checks if setting the power management policies function as expected.
-"""
-
-job.run_test('enterprise_PowerManagement')
diff --git a/client/site_tests/enterprise_PowerManagement/enterprise_PowerManagement.py b/client/site_tests/enterprise_PowerManagement/enterprise_PowerManagement.py
deleted file mode 100644
index c1ad036..0000000
--- a/client/site_tests/enterprise_PowerManagement/enterprise_PowerManagement.py
+++ /dev/null
@@ -1,150 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import json
-import logging
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.common_lib.cros import policy
-from autotest_lib.client.cros import cryptohome
-from autotest_lib.client.cros.enterprise import enterprise_fake_dmserver
-from autotest_lib.client.cros.power import power_status
-
-
-class enterprise_PowerManagement(test.test):
-    """Verify the power management policy setting."""
-    version = 1
-
-    def initialize(self, percent_initial_charge_min=10):
-        """
-        Setup local variables and  init the fake DM server
-
-        @param percent_initial_charge_min: Minimum percentage of battery
-                                           required for the test to run.
-
-        """
-        # Username and password for the fake dm server can be anything
-        # they are not used to authenticate against GAIA.
-        self.username = 'fake-user@managedchrome.com'
-        self.password = 'fakepassword'
-
-        self._power_status = power_status.get_status()
-        if not self._power_status.on_ac():
-            # Ensure that the battery has some charge.
-            self._power_status.assert_battery_state(percent_initial_charge_min)
-        logging.info("Device power type is %s", self._power_type)
-
-        # Note: FakeDMServer requires policy protos to be installed.
-        policy.install_protobufs(self.autodir, self.job)
-        self.fake_dm_server = enterprise_fake_dmserver.FakeDMServer()
-        self.fake_dm_server.start(self.tmpdir, self.debugdir)
-
-    def cleanup(self):
-        """Close out anything used by this test."""
-        self.fake_dm_server.stop()
-
-    @property
-    def _power_type(self):
-        """
-        Returns appropriate power type based on whether DUT is on AC or not.
-
-        @returns string of power type.
-
-        """
-        if self._power_status.on_ac():
-            return "AC"
-
-        return "Battery"
-
-    def _setup_lock_policy(self):
-        """Setup policy to lock screen in 10 seconds of idle time."""
-        self._screen_lock_delay = 10
-        screen_lock_policy = '{ "%s": %d }' % (self._power_type,
-                                               self._screen_lock_delay * 1000)
-        policy_blob = """{
-            "google/chromeos/user": {
-                "mandatory": {
-                    "ScreenLockDelays": %s
-                }
-            },
-            "managed_users": [ "*" ],
-            "policy_user": "%s",
-            "current_key_index": 0,
-            "invalidation_source": 16,
-            "invalidation_name": "test_policy"
-        }""" % (json.dumps(screen_lock_policy), self.username)
-
-        self.fake_dm_server.setup_policy(policy_blob)
-
-    def _setup_logout_policy(self):
-        """Setup policy to logout in 10 seconds of idle time."""
-        self._screen_logout_delay = 10
-        idle_settings_policy = '''{
-            "%s": {
-                "Delays": {
-                    "ScreenDim": 2000,
-                    "ScreenOff": 3000,
-                    "IdleWarning": 4000,
-                    "Idle": %d
-                 },
-                 "IdleAction": "Logout"
-            }
-        }''' % (self._power_type, self._screen_logout_delay * 1000)
-
-        policy_blob = """{
-            "google/chromeos/user": {
-                "mandatory": {
-                    "PowerManagementIdleSettings": %s
-                }
-            },
-            "managed_users": [ "*" ],
-            "policy_user": "%s",
-            "current_key_index": 0,
-            "invalidation_source": 16,
-            "invalidation_name": "test_policy"
-        }""" % (json.dumps(idle_settings_policy), self.username)
-
-        self.fake_dm_server.setup_policy(policy_blob)
-
-    def _create_chrome(self):
-        """
-        Create an instance of chrome.
-
-        @returns a telemetry browser instance.
-
-        """
-        extra_browser_args = '--device-management-url=%s ' % (
-                self.fake_dm_server.server_url)
-        return chrome.Chrome(
-                extra_browser_args=extra_browser_args,
-                autotest_ext=True,
-                disable_gaia_services=False,
-                gaia_login=False,
-                username=self.username,
-                password=self.password,
-                expect_policy_fetch=True)
-
-    def run_once(self):
-        """Run the power management policy tests."""
-        self._setup_lock_policy()
-        with self._create_chrome() as cr:
-            utils.poll_for_condition(
-                    lambda: cr.login_status['isScreenLocked'],
-                    exception=error.TestFail('User is not locked'),
-                    timeout=self._screen_lock_delay * 2,
-                    sleep_interval=1,
-                    desc='Expects to find Chrome locked.')
-
-        self._setup_logout_policy()
-        with self._create_chrome() as cr:
-            utils.poll_for_condition(
-                    lambda: not cryptohome.is_vault_mounted(user=self.username,
-                            allow_fail=True),
-                            exception=error.TestFail('User is not logged out'),
-                            timeout=self._screen_logout_delay*2,
-                            sleep_interval=1,
-                            desc='Expects to find user logged out.')
diff --git a/client/site_tests/enterprise_RemoraRequisition/control b/client/site_tests/enterprise_RemoraRequisition/control
deleted file mode 100644
index 4fea538..0000000
--- a/client/site_tests/enterprise_RemoraRequisition/control
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "achuith, zelidrag"
-NAME = "enterprise_RemoraRequisition"
-TIME = "SHORT"
-TEST_CATEGORY = "Enterprise"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "client"
-# Reboot after test ends.
-DEPENDENCIES='cleanup-reboot'
-
-DOC = """
-This test enrolls a Chrome device as a Remora device.
-
-You need a credentials.txt file with user_id:password in this directory for
-this test to succeed. The credentials are used to enroll the device as a Remora
-device.
-"""
-
-job.run_test('enterprise_RemoraRequisition')
diff --git a/client/site_tests/enterprise_RemoraRequisition/credentials.txt b/client/site_tests/enterprise_RemoraRequisition/credentials.txt
deleted file mode 100644
index ef73590..0000000
--- a/client/site_tests/enterprise_RemoraRequisition/credentials.txt
+++ /dev/null
@@ -1 +0,0 @@
-cr0s-cfm-la6-aut0t3st-us3r@croste.tv:test0000
diff --git a/client/site_tests/enterprise_RemoraRequisition/enterprise_RemoraRequisition.py b/client/site_tests/enterprise_RemoraRequisition/enterprise_RemoraRequisition.py
deleted file mode 100644
index cbdc8d0..0000000
--- a/client/site_tests/enterprise_RemoraRequisition/enterprise_RemoraRequisition.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging, os
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib.cros import chrome, enrollment
-from autotest_lib.client.cros.multimedia import cfm_facade_native
-
-
-class enterprise_RemoraRequisition(test.test):
-    """Enroll as a Remora device."""
-    version = 1
-
-
-    def run_once(self):
-        """
-        Runs the test.
-        """
-        user_id, password = utils.get_signin_credentials(os.path.join(
-                os.path.dirname(os.path.realpath(__file__)), 'credentials.txt'))
-        if not (user_id and password):
-            logging.warn('No credentials found - exiting test.')
-            return
-
-        with chrome.Chrome(
-                auto_login=False,
-                disable_gaia_services=False,
-                extra_browser_args="--force-devtools-available") as cr:
-            enrollment.RemoraEnrollment(cr.browser, user_id, password)
-            self.cfm_facade = cfm_facade_native.CFMFacadeNative(cr, 'hotrod')
-            self.cfm_facade.check_hangout_extension_context()
diff --git a/client/site_tests/enterprise_RemoraRequisitionDisplayUsage/control b/client/site_tests/enterprise_RemoraRequisitionDisplayUsage/control
deleted file mode 100644
index f2fb2b7..0000000
--- a/client/site_tests/enterprise_RemoraRequisitionDisplayUsage/control
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "felixe"
-NAME = "enterprise_RemoraRequisitionDisplayUsage"
-TIME = "SHORT"
-TEST_CATEGORY = "Enterprise"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "client"
-DEPENDENCIES='mimo'
-ATTRIBUTES="suite:hotrod-remora"
-
-DOC = """
-This test verifies that the Chrome enrollment window is shown on the correct
-display in the Remora case.
-
-When there's a Mimo display present during OOBE it should be used to display
-the OOBE window.
-"""
-
-job.run_test('enterprise_RemoraRequisitionDisplayUsage')
diff --git a/client/site_tests/enterprise_RemoraRequisitionDisplayUsage/enterprise_RemoraRequisitionDisplayUsage.py b/client/site_tests/enterprise_RemoraRequisitionDisplayUsage/enterprise_RemoraRequisitionDisplayUsage.py
deleted file mode 100644
index caa2085..0000000
--- a/client/site_tests/enterprise_RemoraRequisitionDisplayUsage/enterprise_RemoraRequisitionDisplayUsage.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib.cros import chrome, enrollment
-from autotest_lib.client.common_lib import error
-from py_utils import TimeoutException
-
-
-class enterprise_RemoraRequisitionDisplayUsage(test.test):
-    """Start enrollment and ensure that the window is shown on a Mimo display"""
-    version = 1
-
-    def supports_display_fetching(self, oobe):
-        """Return whether Chromium supports fetching the primary display name"""
-        oobe.WaitForJavaScriptCondition(
-            'typeof Oobe !== \'undefined\'', timeout=10)
-
-        return oobe.EvaluateJavaScript(
-            '"getPrimaryDisplayNameForTesting" in Oobe')
-
-    def assert_mimo_is_primary(self, oobe):
-        """Fails the test if the Mimo is not the primary display"""
-        oobe.ExecuteJavaScript('window.__oobe_display = ""')
-
-        mimo_is_primary = ("Oobe.getPrimaryDisplayNameForTesting().then("
-            "display => window.__oobe_display = display);"
-            "window.__oobe_display.indexOf('MIMO') >= 0")
-
-        try:
-            oobe.WaitForJavaScriptCondition(mimo_is_primary, timeout=10)
-        except TimeoutException:
-            display = oobe.EvaluateJavaScript('window.__oobe_display')
-            raise error.TestFail(
-                'Primary display is {}, not Mimo'.format(display))
-
-    def run_once(self):
-        with chrome.Chrome(auto_login=False) as cr:
-            if not self.supports_display_fetching(cr.browser.oobe):
-                return
-
-            self.assert_mimo_is_primary(cr.browser.oobe)
diff --git a/client/site_tests/enterprise_SmbProviderDaemon/control b/client/site_tests/enterprise_SmbProviderDaemon/control
deleted file mode 100644
index fd42868..0000000
--- a/client/site_tests/enterprise_SmbProviderDaemon/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "allenvic"
-NAME = "enterprise_SmbProviderDaemon"
-TIME = "SHORT"
-TEST_CATEGORY = "Enterprise"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "client"
-
-DOC = """
-Runs commands on the smbprovider daemon. Connects to an actual SMB server.
-"""
-
-from autotest_lib.client.common_lib import base_utils
-
-args_dict = base_utils.args_to_dict(args)
-mount_path = args_dict.get('mount_path')
-
-job.run_test('enterprise_SmbProviderDaemon', mount_path=mount_path)
\ No newline at end of file
diff --git a/client/site_tests/enterprise_SmbProviderDaemon/enterprise_SmbProviderDaemon.py b/client/site_tests/enterprise_SmbProviderDaemon/enterprise_SmbProviderDaemon.py
deleted file mode 100644
index 3e2cbc9..0000000
--- a/client/site_tests/enterprise_SmbProviderDaemon/enterprise_SmbProviderDaemon.py
+++ /dev/null
@@ -1,412 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import sys
-
-from dbus.mainloop.glib import DBusGMainLoop
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-from autotest_lib.client.common_lib.cros import smbprovider
-
-class enterprise_SmbProviderDaemon(test.test):
-    """
-    Test for SmbProvider Daemon.
-
-    """
-
-    version = 1
-
-    WORKGROUP = ''
-    USERNAME = ''
-    PASSWORD = ''
-
-    def setup(self):
-        """
-        Compiles protobufs for error type and input/output parameters.
-
-        """
-
-        os.chdir(self.srcdir)
-        utils.make('OUT_DIR=.')
-
-    def initialize(self):
-        """
-        Initializes the D-Bus loop and creates Python wrapper.
-
-        """
-
-        bus_loop = DBusGMainLoop(set_as_default=True)
-        self._smbprovider = smbprovider.SmbProvider(bus_loop, self.srcdir)
-
-        # Append path for directory_entry_pb2 imports.
-        sys.path.append(self.srcdir)
-
-    def run_once(self, mount_path):
-        """
-        Runs smbproviderd D-Bus commands.
-
-        @param mount_path: Address of the SMB share.
-        """
-
-        self.sanity_test(mount_path)
-
-    def _generate_random_id(self, size):
-        """
-        Generates a random string of size N.
-
-        @param size: Size of the generated string.
-
-        @return: Returns a random alphanumeric string of size N.
-
-        """
-
-        import string
-        import random
-
-        return ''.join(random.choice(string.ascii_uppercase +
-                                string.digits) for i in range(size))
-
-    def sanity_test(self, mount_path):
-        """
-        Sanity test that runs through all filesystem operations
-        on the SmbProvider Daemon.
-
-        @param mount_path: Address of the SMB share.
-
-        """
-
-        from directory_entry_pb2 import ERROR_EXISTS
-
-        # Mount the SMB share.
-        mount_id = self._check_mount(mount_path)
-
-        # Generate random directory.
-        rand_dir_id = self._generate_random_id(10)
-        test_dir = '/autotest_' + rand_dir_id + '/'
-        self._check_create_directory(mount_id, test_dir, False)
-
-        # Get metadata of a directory.
-        metadata = self._check_get_metadata(mount_id, test_dir)
-
-        # Check that GetMetadata has correct values of a directory.
-        self._check_metadata(test_dir[1:-1], 0, True, metadata)
-
-        # Create file inside directory.
-        test_file = test_dir + '1.txt'
-        self._check_create_file(mount_id, test_file)
-
-        # Open file with Read-Only privileges.
-        file_id = self._check_open_file(mount_id, test_file, False)
-        self._check_close_file(mount_id, file_id)
-
-        # Open + Close file with Read-Only privileges.
-        file_id = self._check_open_file(mount_id, test_file, False)
-        self._check_close_file(mount_id, file_id)
-
-        # Open file for writing.
-        file_id = self._check_open_file(mount_id, test_file, True)
-
-        # Write data to file.
-        data = 'Hello World!'
-        self._check_write_file(mount_id, file_id, 0, data)
-
-        # Read data from file.
-        read_data = self._check_read_file(mount_id, file_id, 0, len(data))
-
-        # Close file.
-        self._check_close_file(mount_id, file_id)
-
-        # Verify data is written to file correctly.
-        self._check_contents(data, read_data)
-
-        # Get the metadata of the file.
-        metadata = self._check_get_metadata(mount_id, test_file)
-
-        # Check that GetMetadeta has correct values of a file.
-        # TODO(jimmyxgong): len() only works properly for UTF-8. Find way to
-        # get size universally.
-        self._check_metadata('1.txt', len(data), False, metadata)
-
-        # Delete file.
-        self._check_delete_entry(mount_id, test_file, False)
-
-        # Create recursive directories.
-        recursive_dir = test_dir + 'test1/test2/'
-        self._check_create_directory(mount_id, recursive_dir, True)
-
-        # Create file within the new directory.
-        test_file2 = recursive_dir + '2.txt'
-        self._check_create_file(mount_id, test_file2)
-
-        # Check moving to existing entry is handled.
-        self._check_move_entry(mount_id, test_file2, test_dir, ERROR_EXISTS)
-
-        # Move file up to root test directory.
-        self._check_move_entry(mount_id, test_file2, test_dir + 'moved.txt')
-
-        # Move back down to original location.
-        self._check_move_entry(mount_id, test_dir + 'moved.txt', test_file2)
-
-        # TODO(jimmyxgong): Delete contents of autotest directory recursively.
-        self._check_delete_entry(mount_id, test_file2, False)
-        self._check_delete_entry(mount_id, test_dir + 'test1/test2/', False)
-        self._check_delete_entry(mount_id, test_dir + 'test1/', False)
-
-        # Delete autotest directory.
-        self._check_delete_entry(mount_id, test_dir, False)
-
-        # Unmount the SMB share.
-        self._check_unmount(mount_id)
-
-    def _check_mount(self, mount_path):
-        """
-        Checks that mount is working.
-
-        @param mount_path: Address of the SMB share.
-
-        @return mount_id: Unique identifier of the mount.
-
-        """
-
-        from directory_entry_pb2 import ERROR_OK
-
-        error, mount_id = self._smbprovider.mount(mount_path,
-                                                  self.WORKGROUP,
-                                                  self.USERNAME,
-                                                  self.PASSWORD)
-
-        if mount_id < 0 :
-            raise error.TestFail('Unexpected failure with mount id.')
-
-        self._check_result('Mount', error)
-        return mount_id
-
-    def _check_unmount(self, mount_id):
-        """
-        Checks that unmount is working.
-
-        @param mount_id: Unique identifier of the mount.
-
-        """
-
-        error = self._smbprovider.unmount(mount_id)
-
-        self._check_result('Unmount', error)
-
-    def _check_get_metadata(self, mount_id, entry_path):
-        """
-        Checks that get metadata is working.
-
-        @param mount_id: Unique identifier of the mount.
-        @param entry_path: Path of the entry.
-
-        @return: GetMetaDataEntryOptionsProto blob string returned by the D-Bus
-                 call.
-
-        """
-
-        error, metadata_blob = self._smbprovider.get_metadata(mount_id,
-                                                              entry_path)
-
-        self._check_result('Get Metadata', error)
-
-        return metadata_blob
-
-    def _check_metadata(self, entry_path, size, is_dir, metadata_blob):
-        """
-        Checks that metadata_blob has the correct values.
-
-        @param entry_path: File path of the entry we are checking.
-        @param size: Size of the entry in bytes.
-        @param is_dir: Boolean that indicates whether the entry is a directory.
-        @param metadata_blob: Blob that contains metadata of the entry.
-
-        """
-
-        if entry_path != metadata_blob.name or \
-                size != metadata_blob.size or \
-                is_dir != metadata_blob.is_directory:
-            logging.error('Failed: Metadata is incorrect')
-            raise error.TestFail('Unexpected error with metadata')
-
-    def _check_create_file(self, mount_id, file_path):
-        """
-        Checks that create file is working.
-
-        @param mount_id: Unique identifier of the mount.
-        @param file_path: Path of where the new file will be created.
-
-        """
-
-        error = self._smbprovider.create_file(mount_id, file_path)
-
-        self._check_result('Create File', error)
-
-    def _check_open_file(self, mount_id, file_path, writeable):
-        """
-        Checks that open file is working.
-
-        @param mount_id: Unique identifier of the mount.
-        @param file_path: Path of where the file is located.
-        @param writeable: Boolean to indicated whether the file should
-                be opened with write access.
-
-        """
-
-        error, file_id = self._smbprovider.open_file(mount_id,
-                                                     file_path,
-                                                     writeable)
-        if file_id < 0:
-            raise error.TestFail('Unexpected file id failure.')
-
-        self._check_result('Open File', error)
-
-        return file_id
-
-    def _check_close_file(self, mount_id, file_id):
-        """
-        Checks that close file is working.
-
-        @param mount_id: Unique identifier of the mount.
-        @param file_id: Unique identifier of the file.
-
-        """
-
-        error = self._smbprovider.close_file(mount_id, file_id)
-
-        self._check_result('Close File', error)
-
-    def _check_write_file(self, mount_id, file_id, offset, data):
-        """
-        Checks that write file is working.
-
-        @param mount_id: Unique identifier of the mount.
-        @param file_id: Unique identifier of the file.
-        @param offset: Offset of the file to start writing to.
-        @param data: Data to be written.
-
-        """
-
-        error = self._smbprovider.write_file(mount_id, file_id, offset, data)
-
-        self._check_result('Write File', error)
-
-    def _check_read_file(self, mount_id, file_id, offset, length):
-        """
-        Checks that read file is working.
-
-        @param mount_id: Unique identifier of the mount.
-        @param file_id: Unique identifier of the file.
-        @param offset: Offset of the file to start reading from.
-        @param length: Length of data to read in bytes.
-
-        @return A buffer containing the data read.
-
-        """
-
-        error, fd = self._smbprovider.read_file(mount_id, file_id, offset,
-                                                                   length)
-
-        self._check_result('Read File', error)
-
-        return fd
-
-    def _check_contents(self, data, read_data):
-        """
-        Checks that read_data is equal to data.
-
-        @param data: Original data to be compared to.
-        @param read_data: Data to be compared to the original data.
-
-        """
-
-        if data != read_data:
-            logging.error('Failed: Written data does not match Read data')
-            raise error.TestFail(
-                    'Unexpected mismatch of written data and read data.\
-                    Expected: %s , but got: %s' % (data, read_data))
-
-    def _check_create_directory(self, mount_id,
-                                      directory_path,
-                                      recursive):
-        """
-        Checks that create directory is working.
-
-        @param mount_id: Unique identifier of the mount.
-        @param directory_path: Path for the test directory.
-        @param recursive: Boolean to indicate whether directories should be
-                created recursively.
-
-        """
-
-        error = self._smbprovider.create_directory(mount_id,
-                                                   directory_path,
-                                                   recursive)
-
-        self._check_result('Create Directory', error)
-
-    def _check_delete_entry(self, mount_id, entry_path, recursive):
-        """
-        Checks that delete an entry works.
-
-        @param mount_id: Unique identifier of the mount.
-        @param entry_path: Path to the file/directory to delete.
-        @param recursive: Boolean to indicate recursive deletes.
-
-        """
-
-        error = self._smbprovider.delete_entry(mount_id,
-                                               entry_path,
-                                               recursive)
-
-        self._check_result('Delete Entry', error)
-
-    def _check_move_entry(self, mount_id, source_path, target_path,
-                                                       expected=None):
-        """
-        Checks that move entry is working.
-
-        @param mount_id: Unique identifier of the mount.
-        @param source_path: Path of the entry to be moved.
-        @param target_path: Path of the destination for the entry.
-        @param expected: Expected ErrorType. Default: None (ERROR_OK)
-
-        """
-
-        error = self._smbprovider.move_entry(mount_id,
-                                             source_path,
-                                             target_path)
-
-        self._check_result('Move Entry', error, expected)
-
-    def _check_result(self, method_name, result, expected=None):
-        """
-        Helper to check error codes and throw on mismatch.
-
-        Checks whether the returned ErrorType from a D-Bus call to smbproviderd
-        matches the expected ErrorType. In case of a mismatch, throws a
-        TestError.
-
-        @param method_name: Name of the D-Bus method that was called.
-        @param result: ErrorType returned from the D-Bus call.
-        @param expected: Expected ErrorType. Default: ErrorType.ERROR_OK.
-
-        """
-
-        from directory_entry_pb2 import ErrorType
-        from directory_entry_pb2 import ERROR_OK
-
-        if not expected:
-            expected = ERROR_OK
-
-        if result != expected:
-            logging.error('Failed to run %s', method_name)
-            raise error.TestFail(
-                    '%s failed with error %s (%s), expected %s (%s)' % (
-                    method_name, result, ErrorType.Name(result), expected,
-                    ErrorType.Name(expected)))
diff --git a/client/site_tests/enterprise_SmbProviderDaemon/src/Makefile b/client/site_tests/enterprise_SmbProviderDaemon/src/Makefile
deleted file mode 100644
index e5916dd..0000000
--- a/client/site_tests/enterprise_SmbProviderDaemon/src/Makefile
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-ifndef SYSROOT
-  $(error Define SYSROOT)
-endif
-
-OUT_DIR ?= .
-PROTO_PATH = $(SYSROOT)/usr/include/chromeos/dbus/smbprovider
-PROTO_DEFS = $(PROTO_PATH)/directory_entry.proto
-PROTO_BINDINGS = $(OUT_DIR)/directory_entry_pb2.py
-
-all: $(PROTO_BINDINGS)
-
-$(PROTO_BINDINGS): $(PROTO_DEFS)
-	protoc --proto_path=$(PROTO_PATH) --python_out=$(OUT_DIR) $(PROTO_DEFS)
-
-clean:
-	rm -f $(PROTO_BINDINGS)
\ No newline at end of file
diff --git a/client/site_tests/example_UnitTest/control b/client/site_tests/example_UnitTest/control
index 7e4215d..dfe36f8 100644
--- a/client/site_tests/example_UnitTest/control
+++ b/client/site_tests/example_UnitTest/control
@@ -12,6 +12,7 @@
 TEST_CATEGORY = 'Performance'
 TEST_CLASS = "example"
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 This is an example unit test.
diff --git a/client/site_tests/files_CopyFileToGoogleDriveUI/control b/client/site_tests/files_CopyFileToGoogleDriveUI/control
deleted file mode 100644
index db22115..0000000
--- a/client/site_tests/files_CopyFileToGoogleDriveUI/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'Intel'
-NAME = "files_CopyFileToGoogleDriveUI"
-ATTRIBUTES = "suite:files"
-PURPOSE = "To test the copy operations to Google Drive"
-CRITERIA = """
-Fail if copy and sync in Google drive is not successful
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "Files"
-TEST_TYPE = "client"
-DOC = """
-Copy file to Google drive and will check the sync.
-We need real username and password to sync the copied file.
-"""
-
-job.run_test('files_CopyFileToGoogleDriveUI')
diff --git a/client/site_tests/files_CopyFileToGoogleDriveUI/files_CopyFileToGoogleDriveUI.py b/client/site_tests/files_CopyFileToGoogleDriveUI/files_CopyFileToGoogleDriveUI.py
deleted file mode 100644
index 79e0aca..0000000
--- a/client/site_tests/files_CopyFileToGoogleDriveUI/files_CopyFileToGoogleDriveUI.py
+++ /dev/null
@@ -1,324 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import time
-
-from commands import *
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chromedriver
-from selenium.webdriver.common.keys import Keys
-from autotest_lib.client.cros.graphics import graphics_utils
-from selenium.webdriver.common.action_chains import ActionChains
-from selenium.common.exceptions import WebDriverException
-
-TIMEOUT_TO_COPY = 1800  # in Secs. This timeout is for files beyond 1GB
-SEARCH_BUTTON_ID = "search-button"
-SEARCH_BOX_CSS = "div#search-box"
-PAPER_CONTAINTER = "paper-input-container"
-DELETE_BUTTON_ID = "delete-button"
-FILE_LIST_ID = "file-list"
-LABLE_ENTRY_CSS = "span.label.entry-name"
-CR_DIALOG_CLASS = "cr-dialog-ok"
-USER_LOCATION = "/home/chronos/user"
-# Using graphics_utils to simulate below keys
-OPEN_FILES_APPLICATION_KEYS = ["KEY_RIGHTSHIFT", "KEY_LEFTALT", "KEY_M"]
-SWITCH_TO_APP_KEY_COMBINATION = ["KEY_LEFTALT", 'KEY_TAB']
-SELECT_ALL_KEY_COMBINATION = ["KEY_LEFTCTRL", "KEY_A"]
-PASTE_KEY_COMBINATION = ["KEY_LEFTCTRL", "KEY_V"]
-GOOGLE_DRIVE = 'My Drive'
-
-
-class files_CopyFileToGoogleDriveUI(graphics_utils.GraphicsTest):
-
-    """Copy a file from Downloads folder to Google drive"""
-
-    version = 1
-    TIME_DELAY = 5
-    _WAIT_TO_LOAD = 5
-
-    def initialize(self):
-        """Autotest initialize function"""
-        super(files_CopyFileToGoogleDriveUI, self).initialize(
-                raise_error_on_hang=True)
-
-    def cleanup(self):
-        """Autotest cleanup function"""
-        if self._GSC:
-            keyvals = self._GSC.get_memory_difference_keyvals()
-            for key, val in keyvals.iteritems():
-                self.output_perf_value(
-                    description=key,
-                    value=val,
-                    units='bytes',
-                    higher_is_better=False)
-            self.write_perf_keyval(keyvals)
-        super(files_CopyFileToGoogleDriveUI, self).cleanup()
-        # If test fails then script will collect the screen shot to know at
-        # which instance failure occurred.
-        if not self.success:
-            graphics_utils.take_screenshot(os.path.join(self.debugdir),
-                                           "chrome")
-
-    def switch_to_app(self, driver, title):
-        """Switching to application using title
-
-        @param driver: chrome driver object
-        @param title: Title of the application
-        @return: True if the app is detected otherwise False
-        """
-        windows = driver.window_handles
-        logging.debug("Windows opened: %s", windows)
-        # Checking current window initially..
-        logging.debug("Current window is %s", driver.title)
-        if driver.title.strip().lower() == title.lower():
-            return True
-        # Switching to all opened windows to find out the required window
-        for window in windows:
-            try:
-                logging.debug("Switching to window")
-                driver.switch_to_window(window)
-                logging.debug("Switched to window: %s", driver.title)
-                time.sleep(2)
-                if driver.title.strip().lower() == title.lower():
-                    logging.info("%s application opened!", title)
-                    return True
-            except WebDriverException as we:
-                logging.debug("Webdriver exception occurred. Exception: %s",
-                              str(we))
-            except Exception as e:
-                logging.debug("Exception: %s", str(e))
-        return False
-
-    def open_files_application(self, driver):
-        """Open and switch to files application using graphics_utils.py
-
-        @param driver: chrome driver object
-        """
-        logging.info("Opening files application")
-        graphics_utils.press_keys(OPEN_FILES_APPLICATION_KEYS)
-        time.sleep(self._WAIT_TO_LOAD)
-        try:
-            self.switch_to_files(driver)
-        except Exception as e:
-            logging.error("Exception when switching files application.. %s",
-                          str(e))
-            logging.error("Failed to find files application. Trying again.")
-            graphics_utils.press_keys(OPEN_FILES_APPLICATION_KEYS)
-            time.sleep(self._WAIT_TO_LOAD)
-            self.switch_to_files(driver)
-
-    def switch_to_files(self, driver, title="Downloads"):
-        """Switch to files application
-
-        @param driver: chrome driver object
-        @param title: Title of the Files application
-        """
-        logging.debug("Switching/Focus on the Files app")
-        if self.switch_to_app(driver, title):
-            logging.info("Focused on Files application")
-            graphics_utils.press_keys(SWITCH_TO_APP_KEY_COMBINATION)
-            time.sleep(1)
-        else:
-            raise error.TestFail("Failed to open on Files application")
-
-    def check_folder_opened(self, driver, title):
-        """Check the selected folder is opened or not
-
-        @param driver: chrome driver object
-        @param title: Folder name
-        @return: Returns True if expected folder is opened otherwise False
-        """
-        logging.info("Actual files application title is %s", driver.title)
-        logging.info("Expected files application title is %s", title)
-        if driver.title == title:
-            return True
-        return False
-
-    def open_folder(self, driver, folder):
-        """Open given folder
-
-        @param driver: chrome driver object
-        @param folder: Directory name
-        """
-        folder_webelements = driver.find_elements_by_css_selector(
-            LABLE_ENTRY_CSS)
-        for element in folder_webelements:
-            try:
-                logging.debug("Found folder name: %s", element.text.strip())
-                if folder == element.text.strip():
-                    element.click()
-                    time.sleep(3)
-                    if self.check_folder_opened(driver, element.text.strip()):
-                        logging.info("Folder is opened!")
-                        return
-            except Exception as e:
-                logging.error("Exception when getting Files application "
-                              "folders %s", str(e))
-        raise error.TestError("Folder :%s is not opened or found", folder)
-
-    def list_files(self, driver):
-        """List files in the folder
-
-        @param driver: chrome driver object
-        @return: Returns list of files
-        """
-        return driver.find_element_by_id(
-            FILE_LIST_ID).find_elements_by_tag_name('li')
-
-    def search_file(self, driver, file_name):
-        """Search given file in Files application
-
-        @param driver: chrome driver object
-        @param file_name: Required file
-        """
-        driver.find_element_by_id(SEARCH_BUTTON_ID).click()
-        search_box_element = driver.find_element_by_css_selector(
-            SEARCH_BOX_CSS)
-        search_box_element.find_element_by_css_selector(
-            PAPER_CONTAINTER).find_element_by_tag_name('input').clear()
-        search_box_element.find_element_by_css_selector(
-            PAPER_CONTAINTER).find_element_by_tag_name('input').send_keys(
-            file_name)
-
-    def copy_file(self, driver, source, destination, file_name, clean=True):
-        """Copy file from one directory to another
-
-        @param driver: chrome driver object
-        @param source: Directory name from where to copy
-        @param destination: Directory name to where to copy
-        @param file_name: File to copy
-        @param clean: Cleans destination if True otherwise nothing
-        """
-        self.open_folder(driver, source)
-        self.search_file(driver, file_name)
-        files = self.list_files(driver)
-        action_chains = ActionChains(driver)
-
-        for item in files:
-            logging.info("Selecting file to copy in %s", file_name)
-            item.click()
-            file_size = item.text.split()[1].strip()
-            file_size_units = item.text.split()[2].strip()
-            logging.debug("Select copy")
-            action_chains.move_to_element(item) \
-                .click(item).key_down(Keys.CONTROL) \
-                .send_keys("c") \
-                .key_up(Keys.CONTROL) \
-                .perform()
-            self.open_folder(driver, destination)
-            if clean:
-                drive_files = self.list_files(driver)
-                if len(drive_files) != 0:
-                    logging.info("Removing existing files from %s",
-                                 destination)
-                    drive_files[0].click()
-                    logging.debug("Select all files/dirs")
-                    graphics_utils.press_keys(SELECT_ALL_KEY_COMBINATION)
-                    time.sleep(0.2)
-                    driver.find_element_by_id(DELETE_BUTTON_ID).click()
-                    driver.find_element_by_class_name(CR_DIALOG_CLASS).click()
-                    time.sleep(self.TIME_DELAY)
-            logging.debug("Pressing control+v to paste the file in required "
-                          "location")
-            graphics_utils.press_keys(PASTE_KEY_COMBINATION)
-            time.sleep(self.TIME_DELAY)
-            # Take dummy values initially
-            required_file_size = "0"
-            required_file_size_units = "KB"
-            required_file = None
-            # wait till the data copied
-            start_time = time.time()
-            while required_file_size != file_size and \
-                required_file_size_units != file_size_units and \
-                    (time.time() - start_time <= TIMEOUT_TO_COPY):
-                drive_files_during_copy = self.list_files(driver)
-                if len(drive_files_during_copy) == 0:
-                    raise error.TestError("File copy not started!")
-                for i_item in drive_files_during_copy:
-                    if i_item.text.strip().split()[0].strip() == file_name:
-                        logging.info("File found %s", i_item.text.split()[
-                            0].strip())
-                        required_file = file
-                if not required_file:
-                    raise error.TestError("No such file/directory in drive, "
-                                          "%s", required_file)
-                logging.info(required_file.text.split())
-                required_file_size = required_file.text.split()[1]
-                required_file_size_units = required_file.text.split()[2]
-                time.sleep(5)
-                logging.debug("%s %s data copied" % (required_file_size,
-                                                     required_file_size_units))
-            # Validation starts here
-            found = False
-            drive_files_after_copy = self.list_files(driver)
-            for copied_file in drive_files_after_copy:
-                logging.debug("File in destination: %s",
-                              copied_file.text.strip())
-                if copied_file.find_element_by_class_name(
-                        'entry-name').text.strip() == file_name:
-                    found = True
-                    break
-
-            if found:
-                logging.info("Copied the file successfully!")
-            else:
-                raise error.TestFail("File not transferred successfully!")
-
-    def catch_info_or_error_messages(self, driver):
-        """Logic to catch the error
-
-        @param driver: chrome driver object
-        """
-        errors = []
-        try:
-            driver.find_element_by_css_selector(
-                'div.button-frame').find_element_by_class_name('open').click()
-        except Exception as e:
-            logging.info("Error in open error messages")
-            logging.info(str(e))
-        error_elements = driver.find_elements_by_css_selector(
-            'div.progress-frame')
-        if len(error_elements) != 0:
-            for error_element in error_elements:
-                info_text = error_element.find_element_by_tag_name(
-                    'label').text
-                if info_text != "":
-                    errors.append(info_text)
-        return errors
-
-    def create_file(self, filename):
-        """Create a file"""
-        status, output = getstatusoutput('dd if=/dev/zero of=%s bs=%s '
-                                         'count=1 iflag=fullblock' %
-                                         (filename, 1024))
-        if status:
-            raise error.TestError("Failed to create file")
-
-    def run_once(self, username=None, password=None, source="Downloads",
-                 file_name='test.dat'):
-        """Copy file to Google Drive in Files application
-
-        @param username: Real user(Not default autotest user)
-        @param password: Password for the user.
-        @param source: From where to copy file
-        @param file_name: File name
-        """
-        self.success = False  # Used to capture the screenshot if the TC fails
-        with chromedriver.chromedriver(username=username,
-                                       password=password,
-                                       disable_default_apps=False,
-                                       gaia_login=True) as cr_instance:
-            driver = cr_instance.driver
-            self.open_files_application(driver)
-            self.create_file(os.path.join(os.path.join(USER_LOCATION,
-                                                       source), file_name))
-            self.copy_file(driver, source, GOOGLE_DRIVE, file_name)
-            errors = self.catch_info_or_error_messages(driver)
-            if len(errors):
-                raise error.TestFail("Test failed with the following"
-                                     " errors. %s", errors)
-        self.success = True
diff --git a/client/site_tests/firmware_CbfsMcache/control b/client/site_tests/firmware_CbfsMcache/control
new file mode 100644
index 0000000..44490f7
--- /dev/null
+++ b/client/site_tests/firmware_CbfsMcache/control
@@ -0,0 +1,27 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "firmware_CbfsMcache"
+PURPOSE = "Ensure the CBFS metadata cache did not overflow."
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv4"
+CRITERIA = "Fail if either RO or RW CBFS mcache overflowed."
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "firmware"
+TEST_TYPE = "client"
+PY_VERSION = 3
+
+DOC = """
+The CBFS metadata cache size in coreboot is configured by CONFIG_CBFS_MCACHE_SIZE
+(x86) or the size of the CBFS_MCACHE() region defined in memlayout (arm). If the
+mcache overflows, the platform will still boot securely but needs to read data
+from flash more times than necessary, leading to avoidable boot time impact. If
+this test fails, the mcache size should be increased until it doesn't. (If the
+RW mcache overflows by a lot, adjusting CONFIG_CBFS_MCACHE_RW_PERCENTAGE may
+also be interesting.)
+
+"""
+
+job.run_test('firmware_CbfsMcache')
diff --git a/client/site_tests/firmware_CbfsMcache/firmware_CbfsMcache.py b/client/site_tests/firmware_CbfsMcache/firmware_CbfsMcache.py
new file mode 100644
index 0000000..26cff2d
--- /dev/null
+++ b/client/site_tests/firmware_CbfsMcache/firmware_CbfsMcache.py
@@ -0,0 +1,48 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import subprocess
+
+from autotest_lib.client.bin import test
+from autotest_lib.client.bin import utils
+from autotest_lib.client.common_lib import error
+
+
+class firmware_CbfsMcache(test.test):
+    """Validates that the CBFS metadata cache didn't overflow."""
+    version = 1
+
+    MCACHE_MAGIC_FULL = b'FULL'
+    MCACHE_MAGIC_END = b'$END'
+
+    CBMEM_RO_MCACHE = b'524d5346'
+    CBMEM_RW_MCACHE = b'574d5346'
+
+    def cbmem(self, *args):
+        """Runs 'cbmem' utility with specified arguments and returns output."""
+        # Cannot use utils.run because it force-decodes stdout as UTF-8.
+        return subprocess.check_output(('cbmem', ) + args)
+
+    def has_mcache(self):
+        """Returns true iff there's an RO MCACHE section in CBMEM."""
+        return self.CBMEM_RO_MCACHE in self.cbmem('-l')
+
+    def check_mcache(self, cbmem_id, name):
+        """Fail if the cbmem_id mcache wasn't terminated with an END token."""
+        mcache = self.cbmem('-r', cbmem_id)
+        if mcache[-4:] == self.MCACHE_MAGIC_FULL:
+            raise error.TestFail("CBFS %s mcache overflowed!" % name)
+        if mcache[-4:] != self.MCACHE_MAGIC_END:
+            raise error.TestError(
+                    "CBFS %s mcache ends with invalid token (%s)!" %
+                    mcache[-4:].__repr__())
+
+    def run_once(self):
+        """Fail if mcaches exists and wasn't terminated with an END token."""
+        if utils.get_board() == 'volteer':
+            raise error.TestNAError("Skipped on Volteer, see b/187561710.")
+        if not self.has_mcache():
+            raise error.TestNAError("This platform doesn't use CBFS mcache.")
+        self.check_mcache(self.CBMEM_RO_MCACHE, "RO")
+        self.check_mcache(self.CBMEM_RW_MCACHE, "RW")
diff --git a/client/site_tests/firmware_CheckEOPState/control b/client/site_tests/firmware_CheckEOPState/control
new file mode 100644
index 0000000..5902d66
--- /dev/null
+++ b/client/site_tests/firmware_CheckEOPState/control
@@ -0,0 +1,41 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "firmware_CheckEOPState"
+PURPOSE = "Ensure that firmware told ME that the boot process is complete."
+ATTRIBUTES = "suite:faft_bios_ro_qual, suite:faft_bios_rw_qual"
+CRITERIA = "Fail if ME on Tiger Lake or later is not in post-boot state"
+TIME = "SHORT"
+TEST_CATEGORY = "Security"
+TEST_CLASS = "firmware"
+TEST_TYPE = "client"
+PY_VERSION = 3
+
+DOC = """
+Intel x86 CPUs come with a Management Engine (going by more complicated
+names sometimes, look for ME, CSE or CSME) that coordinates various
+tasks during boot and after.
+
+That coprocessor needs to know when the boot process is complete so
+it can reject privileged boot-only requests until next reboot.
+
+This test PASSES if the ME reports that it's in Post-Boot State.
+
+This test FAILS if any of the following is true:
+ - The Intel microarchitecture isn't defined in client/bin/utils.py
+ - The ME reports it's not in Post-Boot State. This is the main condition.
+ - The ME's firmware supports a sufficiently new protocol but returns
+   failure to execute the query, failure to understand the query, responded
+   to a different query or returns an unexpectedly formatted response.
+ - The kernel doesn't provide a /dev/mei0 device even though we expect
+   it to be there.
+
+This test reports NA if any of the following is true:
+ - The test is executed on non-Intel CPUs (e.g. AMD or ARM)
+ - The ME reports an old protocol version where we can't query the
+   state. (At least Sky Lake-E and older)
+"""
+
+job.run_test('firmware_CheckEOPState')
diff --git a/client/site_tests/firmware_CheckEOPState/firmware_CheckEOPState.py b/client/site_tests/firmware_CheckEOPState/firmware_CheckEOPState.py
new file mode 100644
index 0000000..44b20ee
--- /dev/null
+++ b/client/site_tests/firmware_CheckEOPState/firmware_CheckEOPState.py
@@ -0,0 +1,84 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import array
+import fcntl
+import os
+import struct
+import uuid
+
+from autotest_lib.client.bin import test, utils
+from autotest_lib.client.common_lib import error
+
+
+class firmware_CheckEOPState(test.test):
+    """Validates that the ME has been told by firmware that POST is done"""
+    # Needed by autotest
+    version = 1
+
+    def read_post_boot_state(self):
+        """Fail if the ME should be capable of reporting EOP but doesn't."""
+        HECI_MKHI = uuid.UUID('{8e6a6715-9abc-4043-88ef-9e39c6f63e0f}')
+        IOCTL_MEI_CONNECT_CLIENT = 0xc0104801  # _IOWR('H', 1, 16);
+
+        try:
+            mei_dev = os.open('/dev/mei0', os.O_RDWR)
+        except OSError:
+            raise error.TestFail('ME device not found, probably old kernel.')
+
+        # Connect to MKHI
+        buf = array.array('B', 16 * [0])
+        struct.pack_into('<16s', buf, 0, HECI_MKHI.bytes_le)
+        fcntl.ioctl(mei_dev, IOCTL_MEI_CONNECT_CLIENT, buf)
+        max_msg_length, protocol_version = struct.unpack_from('<IB', buf)
+
+        # Protocol 2 appears to be the minimum version that allows querying EOP
+        if protocol_version < 2:
+            os.close(mei_dev)
+            raise error.TestNAError(
+                    'ME protocol too old. Not checking for EOP.')
+
+        # query EOP State
+        group_id = 0xff
+        command = 0x1d
+        os.write(mei_dev, struct.pack('<BBBB', group_id, command, 0, 0))
+        inb = os.read(mei_dev, max_msg_length)
+        os.close(mei_dev)
+
+        if len(inb) != 8:
+            raise error.TestFail('Unknown response by ME.')
+
+        group_id_resp, command_plus_80, rsvd, result, eop_state = struct.unpack(
+                '<BBBBI', inb)
+
+        if (group_id_resp != group_id) or (command_plus_80 != command | 0x80):
+            raise error.TestFail('ME didn\'t respond to Query EOP State.')
+        if result == 0x8d:
+            raise error.TestFail('ME didn\'t understand Query EOP State.')
+        if result == 0x8e:
+            raise error.TestFail('ME reported failure on Query EOP State.')
+        if result != 0:
+            raise error.TestFail(
+                    'ME gave unknown response to Query EOP State.')
+
+        # if True, EOP has been issued by firmware and we're in Post-Boot State
+        eop_state = (eop_state & 0xff) == 0
+
+        return eop_state
+
+    def run_once(self):
+        """Fail unless ME returns Post-Boot State"""
+        cpu_family = utils.get_cpu_soc_family()
+        if cpu_family not in ('x86_64', 'i386'):
+            raise error.TestNAError(
+                    'This test is not applicable, '
+                    'because a non-Intel device has been detected. '
+                    'Such devices do not have an ME (Management Engine)')
+
+        if utils.is_intel_uarch_older_than('Tiger Lake'):
+            raise error.TestNAError('Skipping test on pre-TGL')
+        if utils.is_intel_uarch_older_than('Gracemont'):
+            raise error.TestNAError('Skipping test on production Atom designs')
+
+        self.read_post_boot_state()
diff --git a/client/site_tests/firmware_Cr50VirtualNVRam/control b/client/site_tests/firmware_Cr50VirtualNVRam/control
index 234689c..b9c7136 100644
--- a/client/site_tests/firmware_Cr50VirtualNVRam/control
+++ b/client/site_tests/firmware_Cr50VirtualNVRam/control
@@ -7,6 +7,7 @@
 PURPOSE = "Test vNVRAM functionality in cr50"
 TIME = "SHORT"
 TEST_TYPE = "Client"
+PY_VERSION = 3
 
 DOC = """
 Verifies basic read functionality for vNVRAM indices, and checks
diff --git a/client/site_tests/firmware_Cr50VirtualNVRam/firmware_Cr50VirtualNVRam.py b/client/site_tests/firmware_Cr50VirtualNVRam/firmware_Cr50VirtualNVRam.py
index 3565cd8..2a7c1a5 100644
--- a/client/site_tests/firmware_Cr50VirtualNVRam/firmware_Cr50VirtualNVRam.py
+++ b/client/site_tests/firmware_Cr50VirtualNVRam/firmware_Cr50VirtualNVRam.py
@@ -173,7 +173,7 @@
                 '00 ' + pw_sz +               # password length
                 tpm_pw_hex)                   # password
 
-    def __definespace_sanity_check(self):
+    def __definespace_check(self):
         # A space outside the virtual range can be defined
         check_tpmc(self.__get_define_cmd('01 4f aa df', 12),
                    '(0x[0-9]{2} ){6}'
@@ -299,7 +299,7 @@
             raise error.TestNAError("TPM does not support vNVRAM")
 
         self.__readpublic_test()
-        self.__definespace_sanity_check()
+        self.__definespace_check()
         self.__definespace_tests()
         self.__undefinespace_tests()
         self.__readlock_test()
diff --git a/client/site_tests/firmware_LockedME/control b/client/site_tests/firmware_LockedME/control
index fb518e8..bc69424 100644
--- a/client/site_tests/firmware_LockedME/control
+++ b/client/site_tests/firmware_LockedME/control
@@ -11,6 +11,7 @@
 TEST_CATEGORY = "Security"
 TEST_CLASS = "firmware"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 Intel x86 CPUs (Sandybridge and later) reserve a portion of the BIOS
diff --git a/client/site_tests/firmware_LockedME/firmware_LockedME.py b/client/site_tests/firmware_LockedME/firmware_LockedME.py
index aa8a43e..e05c2d6 100644
--- a/client/site_tests/firmware_LockedME/firmware_LockedME.py
+++ b/client/site_tests/firmware_LockedME/firmware_LockedME.py
@@ -65,7 +65,7 @@
                               'count=1', 'bs=%d' % (size)))
         self.flashrom(args=('-V', '-w', self.BIOS_FILE,
                             '-i' , '%s:%s' % (sectname, self.RANDOM_FILE),
-                            '--fast-verify'),
+                            '--noverify-all'),
                       ignore_status=True)
         self.flashrom(args=('-r',
                             '-i', '%s:%s' % (sectname, self.FLASHED_FILE)))
@@ -75,7 +75,7 @@
             logging.info('Oops, it worked! Put it back...')
             self.flashrom(args=('-w', self.BIOS_FILE,
                                 '-i', '%s:%s' % (sectname, sectname),
-                                '--fast-verify'),
+                                '--noverify-all'),
                           ignore_status=True)
             raise error.TestFail('%s is writable, ME is unlocked' % sectname)
 
diff --git a/client/site_tests/firmware_RomSize/control b/client/site_tests/firmware_RomSize/control
index eb0e713..f087b5a 100644
--- a/client/site_tests/firmware_RomSize/control
+++ b/client/site_tests/firmware_RomSize/control
@@ -10,6 +10,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test relies on dmidecode to find the ROM Size.
diff --git a/client/site_tests/firmware_RomSize/control.hwqual b/client/site_tests/firmware_RomSize/control.hwqual
index 648a57b..0c11881 100644
--- a/client/site_tests/firmware_RomSize/control.hwqual
+++ b/client/site_tests/firmware_RomSize/control.hwqual
@@ -11,6 +11,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test relies on dmidecode to find the ROM Size.
diff --git a/client/site_tests/firmware_SetFWMP/firmware_SetFWMP.py b/client/site_tests/firmware_SetFWMP/firmware_SetFWMP.py
index bf1a1f5..450e4c7 100644
--- a/client/site_tests/firmware_SetFWMP/firmware_SetFWMP.py
+++ b/client/site_tests/firmware_SetFWMP/firmware_SetFWMP.py
@@ -25,6 +25,7 @@
         raise error.TestFail('Failed to own the TPM %s' % status)
 
     def run_once(self, fwmp_cleared=True, flags=None, dev_key_hash=None):
+        """Own the TPM and set the FWMP."""
         # make sure the FMWP is in the expected state
         cryptohome.get_fwmp(fwmp_cleared)
         status = cryptohome.get_tpm_status()
@@ -32,10 +33,6 @@
         if not status['Owned']:
             status = self.own_tpm()
 
-        # Verify we have access to the password
-        if not status['Password']:
-            logging.warning('No access to the password')
-
         logging.info(status)
 
         # Set the FWMP flags using a dev key hash
diff --git a/client/site_tests/firmware_VbootCrypto/control b/client/site_tests/firmware_VbootCrypto/control
index 6542ea8..7ebb60a 100644
--- a/client/site_tests/firmware_VbootCrypto/control
+++ b/client/site_tests/firmware_VbootCrypto/control
@@ -22,6 +22,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test implements various RSA and SHA by creating and verifying various
diff --git a/client/site_tests/graphics_Chrome/control.ozone_gl_unittests b/client/site_tests/graphics_Chrome/control.ozone_gl_unittests
index c9aa733..ef04abf 100644
--- a/client/site_tests/graphics_Chrome/control.ozone_gl_unittests
+++ b/client/site_tests/graphics_Chrome/control.ozone_gl_unittests
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 AUTHOR = "chromeos-gfx"
 NAME = "graphics_Chrome.ozone_gl_unittests"
 TIME = "SHORT"
diff --git a/client/site_tests/graphics_Chrome/graphics_Chrome.py b/client/site_tests/graphics_Chrome/graphics_Chrome.py
index 03e72ce..79288c2 100644
--- a/client/site_tests/graphics_Chrome/graphics_Chrome.py
+++ b/client/site_tests/graphics_Chrome/graphics_Chrome.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/graphics_GLAPICheck/control b/client/site_tests/graphics_GLAPICheck/control
index e77acd5..4dc7df7 100644
--- a/client/site_tests/graphics_GLAPICheck/control
+++ b/client/site_tests/graphics_GLAPICheck/control
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 NAME = 'graphics_GLAPICheck'
 AUTHOR = 'chromeos-gfx'
 PURPOSE = 'Verify correctness of OpenGL/GLES.'
@@ -13,7 +14,7 @@
   - EGL version is less than 1.3
   - If GL extensions don't include needed extensions
 """
-ATTRIBUTES = "suite:bvt-perbuild, suite:graphics, suite:graphics_per-day, suite:graphics_system, suite:hwqual"
+ATTRIBUTES = "suite:bvt-perbuild, suite:graphics, suite:graphics_per-day, suite:graphics_system, suite:hwqual, suite:pvs-graphics"
 TIME='SHORT'
 TEST_CATEGORY = 'Performance'
 TEST_CLASS = "graphics"
diff --git a/client/site_tests/graphics_GLAPICheck/graphics_GLAPICheck.py b/client/site_tests/graphics_GLAPICheck/graphics_GLAPICheck.py
index 86007ad..4353299 100644
--- a/client/site_tests/graphics_GLAPICheck/graphics_GLAPICheck.py
+++ b/client/site_tests/graphics_GLAPICheck/graphics_GLAPICheck.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/graphics_GLBench/control b/client/site_tests/graphics_GLBench/control
index 5e15548..5a34c77 100644
--- a/client/site_tests/graphics_GLBench/control
+++ b/client/site_tests/graphics_GLBench/control
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 NAME = 'graphics_GLBench'
 AUTHOR = 'chromeos-gfx'
 PURPOSE = 'Benchmark the graphics library performance.'
diff --git a/client/site_tests/graphics_GLBench/control.bvt b/client/site_tests/graphics_GLBench/control.bvt
index 6ee4185..783eda1 100644
--- a/client/site_tests/graphics_GLBench/control.bvt
+++ b/client/site_tests/graphics_GLBench/control.bvt
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 NAME = 'graphics_GLBench.bvt'
 AUTHOR = 'chromeos-gfx'
 PURPOSE = 'Benchmark the graphics library performance.'
@@ -19,7 +20,8 @@
 Note: it should nearly never be required to remove old versions of good/bad
 images from these directories.
 """
-ATTRIBUTES = "suite:bvt-perbuild, suite:graphics, suite:graphics_per-day, suite:graphics_system, suite:infra_qual"
+# TODO(b/227756677) Add suite:infra_qual back to attributes
+ATTRIBUTES = "suite:bvt-perbuild, suite:graphics, suite:graphics_per-day, suite:graphics_system"
 TIME='FAST'
 TEST_CATEGORY = 'Performance'
 TEST_CLASS = "gl"
diff --git a/client/site_tests/graphics_GLBench/control.hwqual b/client/site_tests/graphics_GLBench/control.hwqual
index 93d82c8..1e33602 100644
--- a/client/site_tests/graphics_GLBench/control.hwqual
+++ b/client/site_tests/graphics_GLBench/control.hwqual
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 NAME = 'graphics_GLBench.hwqual'
 AUTHOR = 'chromeos-gfx'
 PURPOSE = 'Benchmark the graphics library performance.'
diff --git a/client/site_tests/graphics_GLBench/graphics_GLBench.py b/client/site_tests/graphics_GLBench/graphics_GLBench.py
index 894094c..b7c8f51 100644
--- a/client/site_tests/graphics_GLBench/graphics_GLBench.py
+++ b/client/site_tests/graphics_GLBench/graphics_GLBench.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -218,7 +219,7 @@
         # Don't throw an exception and remind there is a problem.
         keyvals[testname] = -1.0
         f.write('# knownbad [' + imagefile + '] (setting perf as -1.0)\n')
-        # This failure is whitelisted so don't add to failed_tests.
+        # This failure is allowlisted so don't add to failed_tests.
       elif imagefile in reference_imagenames:
         # Known good reference images (default).
         keyvals[testname] = testrating
diff --git a/client/site_tests/graphics_GLMark2/_control b/client/site_tests/graphics_GLMark2/_control
new file mode 100644
index 0000000..1cac2f2
--- /dev/null
+++ b/client/site_tests/graphics_GLMark2/_control
@@ -0,0 +1,28 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+PY_VERSION = 3
+NAME = 'graphics_GLMark2'
+AUTHOR = 'chromeos-gfx'
+PURPOSE = 'Benchmark the graphics library performance.'
+# TODO(ihf): reenable this test once b/213954429 is fixed.
+# ATTRIBUTES = "suite:graphics, suite:graphics_per-day, suite:graphics_system"
+TIME='MEDIUM'
+TEST_CATEGORY = 'Performance'
+TEST_CLASS = "gl"
+TEST_TYPE = 'client'
+# Reboot in the lab after the test ends.
+DEPENDENCIES='cleanup-reboot'
+BUG_TEMPLATE = {
+    'components': ['OS>Kernel>Graphics'],
+}
+
+DOC = """
+glmark2 is a benchmark for OpenGL (ES) 2.0. It uses only the subset of the
+OpenGL 2.0 API that is compatible with OpenGL ES 2.0.
+
+https://launchpad.net/glmark2
+"""
+
+job.run_test('graphics_GLMark2')
diff --git a/client/site_tests/graphics_GLMark2/_control.bvt b/client/site_tests/graphics_GLMark2/_control.bvt
new file mode 100644
index 0000000..5b6dc27
--- /dev/null
+++ b/client/site_tests/graphics_GLMark2/_control.bvt
@@ -0,0 +1,29 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+PY_VERSION = 3
+NAME = 'graphics_GLMark2.bvt'
+AUTHOR = 'chromeos-gfx'
+PURPOSE = 'Benchmark the graphics library performance.'
+# TODO(ihf): reenable this test once b/213954429 is fixed.
+# ATTRIBUTES = 'suite:bvt-perbuild, suite:graphics, suite:graphics_per-day, suite:graphics_system, suite:infra_qual'
+TIME='MEDIUM'
+TEST_CATEGORY = 'Performance'
+TEST_CLASS = "gl"
+TEST_TYPE = 'client'
+JOB_RETRIES = 2
+# Reboot in the lab after the test ends.
+DEPENDENCIES='cleanup-reboot'
+BUG_TEMPLATE = {
+    'components': ['OS>Kernel>Graphics'],
+}
+
+DOC = """
+glmark2 is a benchmark for OpenGL (ES) 2.0. It uses only the subset of the
+OpenGL 2.0 API that is compatible with OpenGL ES 2.0.
+
+https://launchpad.net/glmark2
+"""
+
+job.run_test('graphics_GLMark2', hasty = True)
diff --git a/client/site_tests/graphics_GLMark2/control b/client/site_tests/graphics_GLMark2/control
deleted file mode 100644
index 519e587..0000000
--- a/client/site_tests/graphics_GLMark2/control
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = 'graphics_GLMark2'
-AUTHOR = 'chromeos-gfx'
-PURPOSE = 'Benchmark the graphics library performance.'
-ATTRIBUTES = "suite:graphics, suite:graphics_per-day, suite:graphics_system"
-TIME='MEDIUM'
-TEST_CATEGORY = 'Performance'
-TEST_CLASS = "gl"
-TEST_TYPE = 'client'
-# Reboot in the lab after the test ends.
-DEPENDENCIES='cleanup-reboot'
-BUG_TEMPLATE = {
-    'components': ['OS>Kernel>Graphics'],
-}
-
-DOC = """
-glmark2 is a benchmark for OpenGL (ES) 2.0. It uses only the subset of the
-OpenGL 2.0 API that is compatible with OpenGL ES 2.0.
-
-https://launchpad.net/glmark2
-"""
-
-job.run_test('graphics_GLMark2')
diff --git a/client/site_tests/graphics_GLMark2/control.bvt b/client/site_tests/graphics_GLMark2/control.bvt
deleted file mode 100644
index d3cb3c2..0000000
--- a/client/site_tests/graphics_GLMark2/control.bvt
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = 'graphics_GLMark2.bvt'
-AUTHOR = 'chromeos-gfx'
-PURPOSE = 'Benchmark the graphics library performance.'
-ATTRIBUTES = 'suite:bvt-perbuild, suite:graphics, suite:graphics_per-day, suite:graphics_system, suite:infra_qual'
-TIME='MEDIUM'
-TEST_CATEGORY = 'Performance'
-TEST_CLASS = "gl"
-TEST_TYPE = 'client'
-JOB_RETRIES = 2
-# Reboot in the lab after the test ends.
-DEPENDENCIES='cleanup-reboot'
-BUG_TEMPLATE = {
-    'components': ['OS>Kernel>Graphics'],
-}
-
-DOC = """
-glmark2 is a benchmark for OpenGL (ES) 2.0. It uses only the subset of the
-OpenGL 2.0 API that is compatible with OpenGL ES 2.0.
-
-https://launchpad.net/glmark2
-"""
-
-job.run_test('graphics_GLMark2', hasty = True)
diff --git a/client/site_tests/graphics_GLMark2/graphics_GLMark2.py b/client/site_tests/graphics_GLMark2/graphics_GLMark2.py
index 7065d67..6590344 100644
--- a/client/site_tests/graphics_GLMark2/graphics_GLMark2.py
+++ b/client/site_tests/graphics_GLMark2/graphics_GLMark2.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -8,27 +9,14 @@
 import logging
 import os
 import re
-import string
 
 from autotest_lib.client.bin import test, utils
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.cros import service_stopper
 from autotest_lib.client.cros.graphics import graphics_utils
 
-GLMARK2_TEST_RE = (
-    r'^\[(?P<scene>.*)\] (?P<options>.*): FPS: (?P<fps>\d+) FrameTime: '
-    r'(?P<frametime>\d+.\d+) ms$')
 GLMARK2_SCORE_RE = r'glmark2 Score: (\d+)'
 
-# perf value description strings may only contain letters, numbers, periods,
-# dashes and underscores.
-# But glmark2 test names are usually in the form:
-#   scene-name:opt=val:opt=v1,v2;v3,v4 or scene:<default>
-# which we convert to:
-#   scene-name.opt_val.opt_v1-v2_v3-v4 or scene.default
-description_table = string.maketrans(':,=;', '.-__')
-description_delete = '<>'
-
 
 class graphics_GLMark2(graphics_utils.GraphicsTest):
     """Runs glmark2, which benchmarks only calls compatible with OpenGL ES 2.0"""
diff --git a/client/site_tests/graphics_Gbm/control b/client/site_tests/graphics_Gbm/control
index 9f6fb48..09bbab3 100644
--- a/client/site_tests/graphics_Gbm/control
+++ b/client/site_tests/graphics_Gbm/control
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 NAME = 'graphics_Gbm'
 AUTHOR = 'chromeos-gfx'
 PURPOSE = 'Tests the Mesa graphics buffer management.'
diff --git a/client/site_tests/graphics_Gbm/graphics_Gbm.py b/client/site_tests/graphics_Gbm/graphics_Gbm.py
index 00df0e7..3fea342 100644
--- a/client/site_tests/graphics_Gbm/graphics_Gbm.py
+++ b/client/site_tests/graphics_Gbm/graphics_Gbm.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/graphics_Gbm/src/gbmtest.c b/client/site_tests/graphics_Gbm/src/gbmtest.c
index a780c27..2284ec9 100644
--- a/client/site_tests/graphics_Gbm/src/gbmtest.c
+++ b/client/site_tests/graphics_Gbm/src/gbmtest.c
@@ -51,115 +51,41 @@
 static struct gbm_device *gbm;
 
 static const uint32_t format_list[] = {
-	GBM_FORMAT_C8,
-	GBM_FORMAT_RGB332,
-	GBM_FORMAT_BGR233,
-	GBM_FORMAT_XRGB4444,
-	GBM_FORMAT_XBGR4444,
-	GBM_FORMAT_RGBX4444,
-	GBM_FORMAT_BGRX4444,
-	GBM_FORMAT_ARGB4444,
-	GBM_FORMAT_ABGR4444,
-	GBM_FORMAT_RGBA4444,
-	GBM_FORMAT_BGRA4444,
-	GBM_FORMAT_XRGB1555,
-	GBM_FORMAT_XBGR1555,
-	GBM_FORMAT_RGBX5551,
-	GBM_FORMAT_BGRX5551,
-	GBM_FORMAT_ARGB1555,
-	GBM_FORMAT_ABGR1555,
-	GBM_FORMAT_RGBA5551,
-	GBM_FORMAT_BGRA5551,
+	GBM_FORMAT_R8,
 	GBM_FORMAT_RGB565,
-	GBM_FORMAT_BGR565,
-	GBM_FORMAT_RGB888,
 	GBM_FORMAT_BGR888,
 	GBM_FORMAT_XRGB8888,
 	GBM_FORMAT_XBGR8888,
-	GBM_FORMAT_RGBX8888,
-	GBM_FORMAT_BGRX8888,
 	GBM_FORMAT_ARGB8888,
 	GBM_FORMAT_ABGR8888,
-	GBM_FORMAT_RGBA8888,
-	GBM_FORMAT_BGRA8888,
 	GBM_FORMAT_XRGB2101010,
 	GBM_FORMAT_XBGR2101010,
-	GBM_FORMAT_RGBX1010102,
-	GBM_FORMAT_BGRX1010102,
 	GBM_FORMAT_ARGB2101010,
 	GBM_FORMAT_ABGR2101010,
-	GBM_FORMAT_RGBA1010102,
-	GBM_FORMAT_BGRA1010102,
-	GBM_FORMAT_YUYV,
-	GBM_FORMAT_YVYU,
-	GBM_FORMAT_UYVY,
-	GBM_FORMAT_VYUY,
-	GBM_FORMAT_AYUV,
+	GBM_FORMAT_ABGR16161616F,
 	GBM_FORMAT_NV12,
 	GBM_FORMAT_YVU420,
 };
 
-struct plane_info {
+struct format_info {
+	uint32_t pixel_format;
 	uint32_t bits_per_pixel;
-	uint32_t subsample_rate;
 	uint32_t data_mask;
 };
 
-#define MAX_PLANES 3
-struct format_info {
-	uint32_t pixel_format;
-	int num_planes;
-	struct plane_info planes[MAX_PLANES];
-};
-
 /* Bits per pixel for each. */
-static const struct format_info format_info_list[] = {
-	{GBM_FORMAT_C8, 1, {{8, 1, 0xFF}}},
-	{GBM_FORMAT_RGB332, 1, {{8, 1, 0xFF}}},
-	{GBM_FORMAT_BGR233, 1, {{8, 1, 0xFF}}},
-	{GBM_FORMAT_XRGB4444, 1, {{16, 1, 0x0FFF}}},
-	{GBM_FORMAT_XBGR4444, 1, {{16, 1, 0x0FFF}}},
-	{GBM_FORMAT_RGBX4444, 1, {{16, 1, 0xFFF0}}},
-	{GBM_FORMAT_BGRX4444, 1, {{16, 1, 0xFFF0}}},
-	{GBM_FORMAT_ARGB4444, 1, {{16, 1, 0xFFFF}}},
-	{GBM_FORMAT_ABGR4444, 1, {{16, 1, 0xFFFF}}},
-	{GBM_FORMAT_RGBA4444, 1, {{16, 1, 0xFFFF}}},
-	{GBM_FORMAT_BGRA4444, 1, {{16, 1, 0xFFFF}}},
-	{GBM_FORMAT_XRGB1555, 1, {{16, 1, 0x7FFF}}},
-	{GBM_FORMAT_XBGR1555, 1, {{16, 1, 0x7FFF}}},
-	{GBM_FORMAT_RGBX5551, 1, {{16, 1, 0xFFFE}}},
-	{GBM_FORMAT_BGRX5551, 1, {{16, 1, 0xFFFE}}},
-	{GBM_FORMAT_ARGB1555, 1, {{16, 1, 0xFFFF}}},
-	{GBM_FORMAT_ABGR1555, 1, {{16, 1, 0xFFFF}}},
-	{GBM_FORMAT_RGBA5551, 1, {{16, 1, 0xFFFF}}},
-	{GBM_FORMAT_BGRA5551, 1, {{16, 1, 0xFFFF}}},
-	{GBM_FORMAT_RGB565, 1, {{16, 1, 0xFFFF}}},
-	{GBM_FORMAT_BGR565, 1, {{16, 1, 0xFFFF}}},
-	{GBM_FORMAT_RGB888, 1, {{24, 1, 0xFFFFFF}}},
-	{GBM_FORMAT_BGR888, 1, {{24, 1, 0xFFFFFF}}},
-	{GBM_FORMAT_XRGB8888, 1, {{32, 1, 0x00FFFFFF}}},
-	{GBM_FORMAT_XBGR8888, 1, {{32, 1, 0x00FFFFFF}}},
-	{GBM_FORMAT_RGBX8888, 1, {{32, 1, 0xFFFFFF00}}},
-	{GBM_FORMAT_BGRX8888, 1, {{32, 1, 0xFFFFFF00}}},
-	{GBM_FORMAT_ARGB8888, 1, {{32, 1, 0xFFFFFFFF}}},
-	{GBM_FORMAT_ABGR8888, 1, {{32, 1, 0xFFFFFFFF}}},
-	{GBM_FORMAT_RGBA8888, 1, {{32, 1, 0xFFFFFFFF}}},
-	{GBM_FORMAT_BGRA8888, 1, {{32, 1, 0xFFFFFFFF}}},
-	{GBM_FORMAT_XRGB2101010, 1, {{32, 1, 0x3FFFFFFF}}},
-	{GBM_FORMAT_XBGR2101010, 1, {{32, 1, 0x3FFFFFFF}}},
-	{GBM_FORMAT_RGBX1010102, 1, {{32, 1, 0xFFFFFFFC}}},
-	{GBM_FORMAT_BGRX1010102, 1, {{32, 1, 0xFFFFFFFC}}},
-	{GBM_FORMAT_ARGB2101010, 1, {{32, 1, 0xFFFFFFFF}}},
-	{GBM_FORMAT_ABGR2101010, 1, {{32, 1, 0xFFFFFFFF}}},
-	{GBM_FORMAT_RGBA1010102, 1, {{32, 1, 0xFFFFFFFF}}},
-	{GBM_FORMAT_BGRA1010102, 1, {{32, 1, 0xFFFFFFFF}}},
-	{GBM_FORMAT_YUYV, 1, {{16, 1, 0xFFFF}}},
-	{GBM_FORMAT_YVYU, 1, {{16, 1, 0xFFFF}}},
-	{GBM_FORMAT_UYVY, 1, {{16, 1, 0xFFFF}}},
-	{GBM_FORMAT_VYUY, 1, {{16, 1, 0xFFFF}}},
-	{GBM_FORMAT_AYUV, 1, {{32, 1, 0xFFFFFFFF}}},
-	{GBM_FORMAT_NV12, 2, {{8, 1, 0xFF}, {16, 2, 0xFFFF}}},
-	{GBM_FORMAT_YVU420, 3, {{8, 1, 0xFF}, {8, 2, 0xFF}, {8,2, 0xFF}}},
+static const struct format_info mappable_format_list[] = {
+	{GBM_FORMAT_R8, 8, 0xFF},
+	{GBM_FORMAT_RGB565, 16, 0xFFFF},
+	{GBM_FORMAT_BGR888, 24, 0xFFFFFF},
+	{GBM_FORMAT_XRGB8888, 32, 0x00FFFFFF},
+	{GBM_FORMAT_XBGR8888, 32, 0x00FFFFFF},
+	{GBM_FORMAT_ARGB8888, 32, 0xFFFFFFFF},
+	{GBM_FORMAT_ABGR8888, 32, 0xFFFFFFFF},
+	{GBM_FORMAT_XRGB2101010, 32, 0x3FFFFFFF},
+	{GBM_FORMAT_XBGR2101010, 32, 0x3FFFFFFF},
+	{GBM_FORMAT_ARGB2101010, 32, 0xFFFFFFFF},
+	{GBM_FORMAT_ABGR2101010, 32, 0xFFFFFFFF},
 };
 
 static const uint32_t usage_list[] = {
@@ -168,26 +94,13 @@
 	GBM_BO_USE_RENDERING,
 	GBM_BO_USE_LINEAR,
 	GBM_BO_USE_SW_READ_OFTEN,
-	GBM_BO_USE_SW_READ_RARELY,
 	GBM_BO_USE_SW_WRITE_OFTEN,
-	GBM_BO_USE_SW_WRITE_RARELY,
 };
 
-static const uint32_t buffer_list[] = {
-	GBM_BO_USE_SCANOUT | GBM_BO_USE_SW_READ_RARELY | GBM_BO_USE_SW_WRITE_RARELY,
-	GBM_BO_USE_RENDERING | GBM_BO_USE_SW_READ_RARELY | GBM_BO_USE_SW_WRITE_RARELY,
-	GBM_BO_USE_SW_READ_RARELY | GBM_BO_USE_SW_WRITE_RARELY,
-	GBM_BO_USE_SW_READ_RARELY | GBM_BO_USE_SW_WRITE_RARELY | GBM_BO_USE_TEXTURING,
-	GBM_BO_USE_SW_READ_RARELY | GBM_BO_USE_SW_WRITE_RARELY | GBM_BO_USE_TEXTURING,
-
-	GBM_BO_USE_RENDERING | GBM_BO_USE_SW_READ_RARELY | GBM_BO_USE_SW_WRITE_RARELY |
-	GBM_BO_USE_TEXTURING,
-
-	GBM_BO_USE_RENDERING | GBM_BO_USE_SCANOUT | GBM_BO_USE_SW_READ_RARELY |
-	GBM_BO_USE_SW_WRITE_RARELY,
-
-	GBM_BO_USE_RENDERING | GBM_BO_USE_SCANOUT | GBM_BO_USE_SW_READ_RARELY |
-	GBM_BO_USE_SW_WRITE_RARELY | GBM_BO_USE_TEXTURING,
+static const uint32_t mappable_usage_list[] = {
+	GBM_BO_USE_SCANOUT | GBM_BO_USE_SW_READ_OFTEN | GBM_BO_USE_SW_WRITE_OFTEN,
+	GBM_BO_USE_RENDERING | GBM_BO_USE_SW_READ_OFTEN | GBM_BO_USE_SW_WRITE_OFTEN,
+	GBM_BO_USE_TEXTURING | GBM_BO_USE_SW_READ_OFTEN | GBM_BO_USE_SW_WRITE_OFTEN,
 };
 
 static int check_bo(struct gbm_bo *bo)
@@ -219,19 +132,16 @@
 	CHECK(gbm_bo_get_handle_for_plane(bo, 0).u32 == gbm_bo_get_handle(bo).u32);
 
 	CHECK(gbm_bo_get_offset(bo, 0) == 0);
-	CHECK(gbm_bo_get_plane_size(bo, 0) >=
-		gbm_bo_get_width(bo) * gbm_bo_get_height(bo));
 	CHECK(gbm_bo_get_stride_for_plane(bo, 0) == gbm_bo_get_stride(bo));
 
 	for (plane = 0; plane < num_planes; plane++) {
 		CHECK(gbm_bo_get_handle_for_plane(bo, plane).u32);
 
-		fd = gbm_bo_get_plane_fd(bo, plane);
+		fd = gbm_bo_get_fd_for_plane(bo, plane);
 		CHECK(fd > 0);
 		close(fd);
 
 		gbm_bo_get_offset(bo, plane);
-		CHECK(gbm_bo_get_plane_size(bo, plane));
 		CHECK(gbm_bo_get_stride_for_plane(bo, plane));
 	}
 
@@ -317,61 +227,6 @@
 	return ENODRM;
 }
 
-static int drm_open_vgem()
-{
-	const char g_sys_card_path_format[] =
-		"/sys/bus/platform/devices/vgem/drm/card%d";
-	const char g_dev_card_path_format[] =
-		"/dev/dri/card%d";
-	char *name;
-	int i, fd;
-
-	for (i = 0; i < 16; i++) {
-		struct stat _stat;
-		int ret;
-		ret = asprintf(&name, g_sys_card_path_format, i);
-		assert(ret != -1);
-
-		if (stat(name, &_stat) == -1) {
-			free(name);
-			continue;
-		}
-
-		free(name);
-		ret = asprintf(&name, g_dev_card_path_format, i);
-		assert(ret != -1);
-
-		fd = open(name, O_RDWR);
-		free(name);
-		if (fd == -1) {
-			return -1;
-		}
-		return fd;
-	}
-	return -1;
-}
-
-static int create_vgem_bo(int fd, size_t size, uint32_t * handle)
-{
-	struct drm_mode_create_dumb create;
-	int ret;
-
-	memset(&create, 0, sizeof(create));
-	create.height = size;
-	create.width = 1;
-	create.bpp = 8;
-
-	ret = drmIoctl(fd, DRM_IOCTL_MODE_CREATE_DUMB, &create);
-	if (ret)
-		return ret;
-
-	assert(create.size >= size);
-
-	*handle = create.handle;
-
-	return 0;
-}
-
 /*
  * Tests initialization.
  */
@@ -591,43 +446,6 @@
 }
 
 /*
- * Tests prime import using VGEM sharing buffer.
- */
-static int test_import_vgem()
-{
-	struct gbm_import_fd_data fd_data;
-	int vgem_fd = drm_open_vgem();
-	struct drm_prime_handle prime_handle;
-	struct gbm_bo *bo;
-	const int width = 123;
-	const int height = 456;
-	const int bytes_per_pixel = 4;
-	const int size = width * height * bytes_per_pixel;
-
-	if (vgem_fd <= 0)
-		return 1;
-
-	CHECK(create_vgem_bo(vgem_fd, size, &prime_handle.handle) == 0);
-	prime_handle.flags = DRM_CLOEXEC;
-	CHECK(drmIoctl(vgem_fd, DRM_IOCTL_PRIME_HANDLE_TO_FD, &prime_handle) == 0);
-
-	fd_data.fd = prime_handle.fd;
-	fd_data.width = width;
-	fd_data.height = height;
-	fd_data.stride = width * bytes_per_pixel;
-	fd_data.format = GBM_FORMAT_XRGB8888;
-
-	bo = gbm_bo_import(gbm, GBM_BO_IMPORT_FD, &fd_data, GBM_BO_USE_RENDERING);
-	CHECK(check_bo(bo));
-	gbm_bo_destroy(bo);
-	close(prime_handle.fd);
-
-	close(vgem_fd);
-
-	return 1;
-}
-
-/*
  * Tests prime import using dma-buf API.
  */
 static int test_import_dmabuf()
@@ -687,7 +505,7 @@
 			fd_data.num_fds = num_planes;
 
 			for (p = 0; p < num_planes; p++) {
-				fd_data.fds[p] = gbm_bo_get_plane_fd(bo1, p);
+				fd_data.fds[p] = gbm_bo_get_fd_for_plane(bo1, p);
 				CHECK(fd_data.fds[p] >= 0);
 
 				fd_data.strides[p] = gbm_bo_get_stride_for_plane(bo1, p);
@@ -737,11 +555,11 @@
 	addr = map_data = NULL;
 
 	bo = gbm_bo_create(gbm, width, height, GBM_FORMAT_ARGB8888,
-			   GBM_BO_USE_SW_READ_RARELY | GBM_BO_USE_SW_WRITE_RARELY);
+			   GBM_BO_USE_SW_READ_OFTEN | GBM_BO_USE_SW_WRITE_OFTEN);
 	CHECK(check_bo(bo));
 
-	addr = gbm_bo_map2(bo, 0, 0, width, height, GBM_BO_TRANSFER_READ_WRITE, &stride,
-			   &map_data, 0);
+	addr = gbm_bo_map(bo, 0, 0, width, height, GBM_BO_TRANSFER_READ_WRITE, &stride,
+			  &map_data);
 
 	CHECK(addr != MAP_FAILED);
 	CHECK(map_data);
@@ -757,8 +575,8 @@
 	stride = 0;
 	addr = map_data = NULL;
 
-	addr = gbm_bo_map2(bo, 0, 0, width, height, GBM_BO_TRANSFER_READ_WRITE, &stride,
-			   &map_data, 0);
+	addr = gbm_bo_map(bo, 0, 0, width, height, GBM_BO_TRANSFER_READ_WRITE, &stride,
+			  &map_data);
 
 	CHECK(addr != MAP_FAILED);
 	CHECK(map_data);
@@ -792,7 +610,7 @@
 	CHECK(prime_fd > 0);
 
 	stride = gbm_bo_get_stride(bo);
-	length = gbm_bo_get_plane_size(bo, 0);
+	length = (uint32_t)lseek(prime_fd, 0, SEEK_END);;
 	CHECK(stride > 0);
 	CHECK(length > 0);
 
@@ -850,8 +668,8 @@
 	ret = close(prime_fd);
 	CHECK(ret == 0);
 
-	addr = gbm_bo_map2(bo, 0, 0, width, height, GBM_BO_TRANSFER_READ, &stride,
-			   &map_data, 0);
+	addr = gbm_bo_map(bo, 0, 0, width, height, GBM_BO_TRANSFER_READ, &stride,
+			  &map_data);
 
 	CHECK(addr != MAP_FAILED);
 	CHECK(map_data);
@@ -886,8 +704,8 @@
 	bo = gbm_bo_create(gbm, width, height, GBM_FORMAT_ARGB8888, buffer_create_flag);
 	CHECK(check_bo(bo));
 
-	addr = gbm_bo_map2(bo, 0, 0, width, height, GBM_BO_TRANSFER_WRITE, &stride,
-			   &map_data, 0);
+	addr = gbm_bo_map(bo, 0, 0, width, height, GBM_BO_TRANSFER_WRITE, &stride,
+			  &map_data);
 
 	CHECK(addr != MAP_FAILED);
 	CHECK(map_data);
@@ -907,8 +725,8 @@
 	stride = 0;
 	addr = map_data = NULL;
 
-	addr = gbm_bo_map2(bo, 0, 0, width, height, GBM_BO_TRANSFER_READ, &stride,
-			   &map_data, 0);
+	addr = gbm_bo_map(bo, 0, 0, width, height, GBM_BO_TRANSFER_READ, &stride,
+			  &map_data);
 
 	CHECK(addr != MAP_FAILED);
 	CHECK(map_data);
@@ -928,19 +746,18 @@
 	return 1;
 }
 
-
 static int test_gem_map_format(int format_index,
 			       enum gbm_bo_flags buffer_create_flag)
 {
 	uint8_t *pixel;
 	struct gbm_bo *bo;
 	void *map_data, *addr;
-	uint32_t x, y, p, w, h, b, planes, bytes_per_pixel, pixel_data_mask, idx;
+	uint32_t x, y, b, bytes_per_pixel, pixel_data_mask, idx;
 	uint8_t byte_mask;
 	uint32_t stride = 0;
 	const int width = 333;
 	const int height = 444;
-	const uint32_t pixel_format = format_info_list[format_index].pixel_format;
+	const uint32_t pixel_format = mappable_format_list[format_index].pixel_format;
 
 	addr = map_data = NULL;
 	if (!gbm_device_is_format_supported(gbm, pixel_format, buffer_create_flag))
@@ -948,66 +765,54 @@
 
 	bo = gbm_bo_create(gbm, width, height, pixel_format, buffer_create_flag);
 	CHECK(check_bo(bo));
-	planes = gbm_bo_get_plane_count(bo);
-	CHECK(planes == format_info_list[format_index].num_planes);
 
-	for (p = 0; p < planes; ++p) {
-		w = width / format_info_list[format_index].planes[p].subsample_rate;
-		h = height / format_info_list[format_index].planes[p].subsample_rate;
-		addr = gbm_bo_map2(bo, 0, 0, w, h, GBM_BO_TRANSFER_WRITE, &stride,
-				   &map_data, p);
+	addr = gbm_bo_map(bo, 0, 0, width, height, GBM_BO_TRANSFER_WRITE, &stride,
+			   &map_data);
 
-		CHECK(addr != MAP_FAILED);
-		CHECK(map_data);
-		CHECK(stride > 0);
+	CHECK(addr != MAP_FAILED);
+	CHECK(map_data);
+	CHECK(stride > 0);
 
-		pixel = (uint8_t *)addr;
-		bytes_per_pixel = format_info_list[format_index].planes[p].bits_per_pixel / 8;
-		for (y = 0; y < h; ++y) {
-			for (x = 0; x < w; ++x) {
-				idx = y * stride + x * bytes_per_pixel;
-				for (b = 0; b < bytes_per_pixel; ++b)
-					pixel[idx + b] = y ^ x ^ b;
-			}
+	pixel = (uint8_t *)addr;
+	bytes_per_pixel = mappable_format_list[format_index].bits_per_pixel / 8;
+	for (y = 0; y < height; ++y) {
+		for (x = 0; x < width; ++x) {
+			idx = y * stride + x * bytes_per_pixel;
+			for (b = 0; b < bytes_per_pixel; ++b)
+				pixel[idx + b] = y ^ x ^ b;
 		}
-		gbm_bo_unmap(bo, map_data);
-		stride = 0;
-		addr = map_data = NULL;
 	}
+	gbm_bo_unmap(bo, map_data);
+	stride = 0;
+	addr = map_data = NULL;
 
 	/* Re-map and verify written previously data. */
-	for (p = 0; p < planes; ++p) {
-		w = width / format_info_list[format_index].planes[p].subsample_rate;
-		h = height / format_info_list[format_index].planes[p].subsample_rate;
-		addr = gbm_bo_map2(bo, 0, 0, w, h, GBM_BO_TRANSFER_READ, &stride,
-				   &map_data, p);
+	addr = gbm_bo_map(bo, 0, 0, width, height, GBM_BO_TRANSFER_READ, &stride,
+			  &map_data);
 
-		CHECK(addr != MAP_FAILED);
-		CHECK(map_data);
-		CHECK(stride > 0);
+	CHECK(addr != MAP_FAILED);
+	CHECK(map_data);
+	CHECK(stride > 0);
 
-		pixel = (uint8_t *)addr;
-		bytes_per_pixel = format_info_list[format_index].planes[p].bits_per_pixel / 8;
-		pixel_data_mask = format_info_list[format_index].planes[p].data_mask;
-		for (y = 0; y < h; ++y) {
-			for (x = 0; x < w; ++x) {
-				idx = y * stride + x * bytes_per_pixel;
-				for (b = 0; b < bytes_per_pixel; ++b) {
-					byte_mask = pixel_data_mask >> (8 * b);
-					CHECK((pixel[idx + b] & byte_mask) == ((uint8_t)(y ^ x ^ b) & byte_mask));
-				}
+	pixel = (uint8_t *)addr;
+	pixel_data_mask = mappable_format_list[format_index].data_mask;
+	for (y = 0; y < height; ++y) {
+		for (x = 0; x < width; ++x) {
+			idx = y * stride + x * bytes_per_pixel;
+			for (b = 0; b < bytes_per_pixel; ++b) {
+				byte_mask = pixel_data_mask >> (8 * b);
+				CHECK((pixel[idx + b] & byte_mask) == ((uint8_t)(y ^ x ^ b) & byte_mask));
 			}
 		}
-		gbm_bo_unmap(bo, map_data);
-		stride = 0;
-		addr = map_data = NULL;
 	}
+	gbm_bo_unmap(bo, map_data);
+	stride = 0;
+	addr = map_data = NULL;
 
 	gbm_bo_destroy(bo);
 	return 1;
 }
 
-
 int main(int argc, char *argv[])
 {
 	int result, i, j;
@@ -1025,17 +830,16 @@
 	result &= test_alloc_free_usage();
 	result &= test_user_data();
 	result &= test_export();
-	result &= test_import_vgem();
 	result &= test_import_dmabuf();
 	result &= test_import_modifier();
 	result &= test_gem_map();
 
 	// TODO(crbug.com/752669)
 	if (strcmp(gbm_device_get_backend_name(gbm), "tegra")) {
-		for (i = 0; i < ARRAY_SIZE(buffer_list); ++i) {
-			result &= test_gem_map_tiling(buffer_list[i]);
-			for (j = 0; j < ARRAY_SIZE(format_info_list); ++j)
-				result &= test_gem_map_format(j, buffer_list[i]);
+		for (i = 0; i < ARRAY_SIZE(mappable_usage_list); ++i) {
+			result &= test_gem_map_tiling(mappable_usage_list[i]);
+			for (j = 0; j < ARRAY_SIZE(mappable_format_list); ++j)
+				result &= test_gem_map_format(j, mappable_usage_list[i]);
 		}
 
 		result &= test_dmabuf_map();
diff --git a/client/site_tests/graphics_HwOverlays/canvas_2d_low_latency.html b/client/site_tests/graphics_HwOverlays/canvas_2d_low_latency.html
index 707a09e..c931724 100644
--- a/client/site_tests/graphics_HwOverlays/canvas_2d_low_latency.html
+++ b/client/site_tests/graphics_HwOverlays/canvas_2d_low_latency.html
@@ -53,7 +53,7 @@
     ctx.stroke(); // Draw it
     draw_passes_count++;
   }
-  setInterval(draw_pass, 1000);
+  setInterval(draw_pass, 33);
 
   function get_draw_passes_count() {
     return draw_passes_count;
diff --git a/client/site_tests/graphics_HwOverlays/canvas_3d.html b/client/site_tests/graphics_HwOverlays/canvas_3d.html
index a2206ed..0962d63 100644
--- a/client/site_tests/graphics_HwOverlays/canvas_3d.html
+++ b/client/site_tests/graphics_HwOverlays/canvas_3d.html
@@ -50,7 +50,7 @@
     ctx.clear(ctx.COLOR_BUFFER_BIT);
     draw_passes_count++;
   }
-  setInterval(draw_pass, 1000);
+  setInterval(draw_pass, 33);
 
   function get_draw_passes_count() {
     return draw_passes_count;
diff --git a/client/site_tests/graphics_HwOverlays/control.SkiaRenderer.canvas2DLowLatency b/client/site_tests/graphics_HwOverlays/control.SkiaRenderer.canvas2DLowLatency
index 902104d..7aad0d5 100644
--- a/client/site_tests/graphics_HwOverlays/control.SkiaRenderer.canvas2DLowLatency
+++ b/client/site_tests/graphics_HwOverlays/control.SkiaRenderer.canvas2DLowLatency
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 AUTHOR = "chromeos-gfx"
 NAME = "graphics_HwOverlays.SkiaRenderer.canvas2DLowLatency"
 TIME = "SHORT"
diff --git a/client/site_tests/graphics_HwOverlays/control.SkiaRenderer.canvas3D b/client/site_tests/graphics_HwOverlays/control.SkiaRenderer.canvas3D
index 4fd5fd8..7fc0ccc 100644
--- a/client/site_tests/graphics_HwOverlays/control.SkiaRenderer.canvas3D
+++ b/client/site_tests/graphics_HwOverlays/control.SkiaRenderer.canvas3D
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 AUTHOR = "chromeos-gfx"
 NAME = "graphics_HwOverlays.SkiaRenderer.canvas3D"
 TIME = "SHORT"
diff --git a/client/site_tests/graphics_HwOverlays/control.SkiaRenderer.video b/client/site_tests/graphics_HwOverlays/control.SkiaRenderer.video
index cdc4bdf..60dedbe 100644
--- a/client/site_tests/graphics_HwOverlays/control.SkiaRenderer.video
+++ b/client/site_tests/graphics_HwOverlays/control.SkiaRenderer.video
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 AUTHOR = "chromeos-gfx"
 NAME = "graphics_HwOverlays.SkiaRenderer.video"
 TIME = "FAST"
diff --git a/client/site_tests/graphics_HwOverlays/control.canvas2DLowLatency b/client/site_tests/graphics_HwOverlays/control.canvas2DLowLatency
index 5e8399a..f71ad82 100644
--- a/client/site_tests/graphics_HwOverlays/control.canvas2DLowLatency
+++ b/client/site_tests/graphics_HwOverlays/control.canvas2DLowLatency
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 AUTHOR = "chromeos-gfx"
 NAME = "graphics_HwOverlays.canvas2DLowLatency"
 TIME = "SHORT"
diff --git a/client/site_tests/graphics_HwOverlays/control.canvas3D b/client/site_tests/graphics_HwOverlays/control.canvas3D
index f4f2499..cddd329 100644
--- a/client/site_tests/graphics_HwOverlays/control.canvas3D
+++ b/client/site_tests/graphics_HwOverlays/control.canvas3D
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 AUTHOR = "chromeos-gfx"
 NAME = "graphics_HwOverlays.canvas3D"
 TIME = "SHORT"
diff --git a/client/site_tests/graphics_HwOverlays/control.video b/client/site_tests/graphics_HwOverlays/control.video
index 89b9470..7181138 100644
--- a/client/site_tests/graphics_HwOverlays/control.video
+++ b/client/site_tests/graphics_HwOverlays/control.video
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 AUTHOR = "chromeos-gfx"
 NAME = "graphics_HwOverlays.video"
 TIME = "FAST"
@@ -22,5 +23,4 @@
 
 job.run_test('graphics_HwOverlays',
              html_file=HTML_TEST_FILE,
-             data_file_url=DATA_FILE_URL,
-             is_video=True)
+             data_file_url=DATA_FILE_URL)
diff --git a/client/site_tests/graphics_HwOverlays/graphics_HwOverlays.py b/client/site_tests/graphics_HwOverlays/graphics_HwOverlays.py
index 27f2942..9185ac5 100644
--- a/client/site_tests/graphics_HwOverlays/graphics_HwOverlays.py
+++ b/client/site_tests/graphics_HwOverlays/graphics_HwOverlays.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -7,18 +8,17 @@
 
 from autotest_lib.client.bin import utils
 from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import chrome_binary_test
 from autotest_lib.client.cros.graphics import graphics_utils
 from autotest_lib.client.common_lib.cros import chrome
 from autotest_lib.client.cros import constants
-from autotest_lib.client.cros.multimedia import display_facade_native
+from autotest_lib.client.cros.multimedia import display_facade as display_facade_lib
 from autotest_lib.client.cros.multimedia import facade_resource
 
 EXTRA_BROWSER_ARGS = ['--enable-experimental-web-platform-features',
                       '--force-tablet-mode=clamshell']
 
-class graphics_HwOverlays(graphics_utils.GraphicsTest,
-                          chrome_binary_test.ChromeBinaryTest):
+
+class graphics_HwOverlays(graphics_utils.GraphicsTest):
     """Runs a given html and measures stuff."""
     version = 1
 
@@ -46,22 +46,23 @@
         logging.info("Internal display ID is %s", internal_display_id)
         display_facade.set_display_rotation(internal_display_id, rotation=0)
 
-    def run_once(self, html_file, data_file_url = None, is_video = False,
+    def run_once(self, html_file, data_file_url = None,
                  use_skia_renderer = False):
         """Normalizes the environment, starts a Chrome environment, and
         executes the test in `html_file`.
         """
         if not graphics_utils.is_drm_atomic_supported():
-            logging.info('Skipping test: platform does not support DRM atomic')
-            return
+            raise error.TestNAError(
+                    'Skipping test: platform does not support DRM atomic')
 
         if graphics_utils.get_max_num_available_drm_planes() <= 2:
-            logging.info('Skipping test: platform supports 2 or less planes')
-            return
+            raise error.TestNAError(
+                    'Skipping test: platform supports 2 or less planes')
 
+        is_video = "video" in html_file
         if is_video and not graphics_utils.is_nv12_supported_by_drm_planes():
-            logging.info('Skipping test: platform does not support NV12 planes')
-            return
+            raise error.TestNAError(
+                    'Skipping test: platform does not support NV12 planes')
 
         extra_browser_args = EXTRA_BROWSER_ARGS
         if use_skia_renderer:
@@ -75,12 +76,12 @@
                            autotest_ext=True,
                            init_network_controller=True) as cr:
             facade = facade_resource.FacadeResource(cr)
-            display_facade = display_facade_native.DisplayFacadeNative(facade)
+            display_facade = display_facade_lib.DisplayFacadeLocal(facade)
             # TODO(crbug.com/927103): Run on an external monitor if one is
             # present.
             if not display_facade.has_internal_display():
-                logging.info('Skipping test: platform has no internal display')
-                return
+                raise error.TestNAError(
+                        'Skipping test: platform has no internal display')
 
             self.set_rotation_to_zero(display_facade)
 
diff --git a/client/site_tests/graphics_Idle/control b/client/site_tests/graphics_Idle/control
index 8a87da4..a92d49f 100644
--- a/client/site_tests/graphics_Idle/control
+++ b/client/site_tests/graphics_Idle/control
@@ -2,9 +2,10 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 AUTHOR = 'chromeos-gfx'
 NAME = "graphics_Idle"
-ATTRIBUTES = "suite:bvt-perbuild, suite:graphics, suite:graphics_per-day, suite:graphics_system"
+ATTRIBUTES = "suite:bvt-perbuild, suite:graphics, suite:graphics_per-day, suite:graphics_system, suite:pvs-graphics"
 PURPOSE = "Verify that graphics behaves as expected on idle."
 CRITERIA = """
 This test will fail if we don't see the appropriate GPU idle states.
diff --git a/client/site_tests/graphics_Idle/control.arc b/client/site_tests/graphics_Idle/control.arc
index a96fd47..b1b9c27 100644
--- a/client/site_tests/graphics_Idle/control.arc
+++ b/client/site_tests/graphics_Idle/control.arc
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 AUTHOR = 'chromeos-gfx'
 NAME = "graphics_Idle.arc"
 ATTRIBUTES = "suite:bvt-perbuild, suite:graphics, suite:graphics_per-day, suite:graphics_system"
diff --git a/client/site_tests/graphics_Idle/graphics_Idle.py b/client/site_tests/graphics_Idle/graphics_Idle.py
index 8dff479..f37d3d5 100755
--- a/client/site_tests/graphics_Idle/graphics_Idle.py
+++ b/client/site_tests/graphics_Idle/graphics_Idle.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -25,7 +26,11 @@
 GEM_OBJECTS_PATHS = ['/sys/kernel/debug/dri/0/i915_gem_objects']
 GEM_PATHS = ['/sys/kernel/debug/dri/0/i915_gem_active']
 PSR_PATHS = ['/sys/kernel/debug/dri/0/i915_edp_psr_status']
-RC6_PATHS = ['/sys/kernel/debug/dri/0/i915_drpc_info']
+# Kernel 5.7+ has DRPC info in gt/ subdirectory
+RC6_PATHS = [
+    '/sys/kernel/debug/dri/0/i915_drpc_info',
+    '/sys/kernel/debug/dri/0/gt/drpc'
+]
 
 
 class graphics_Idle(graphics_utils.GraphicsTest):
@@ -429,6 +434,14 @@
         to become idle (i.e. the i915_gem_active list or i915_gem_objects
         client/process gem object counts need to go to 0);
         idle before doing so, and retry every second for 20 seconds."""
+        kernel_version = utils.get_kernel_version()[0:4].rstrip(".")
+        # Skip test on kernel 5.10 and above.
+        if common_utils.compare_versions(kernel_version, '5.10') != -1:
+            # The data needed for this test was removed in the 5.10 kernel.
+            # See b/179453336 for details.
+            logging.info('Skipping gem idle check on 5.10 and above')
+            return ''
+
         logging.info('Running verify_graphics_gem_idle')
         if utils.get_cpu_soc_family() == 'x86_64':
             tries = 0
diff --git a/client/site_tests/graphics_KernelConfig/control b/client/site_tests/graphics_KernelConfig/control
index f11bc49..a8cd1e1 100644
--- a/client/site_tests/graphics_KernelConfig/control
+++ b/client/site_tests/graphics_KernelConfig/control
@@ -2,11 +2,12 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 NAME = "graphics_KernelConfig"
 AUTHOR = "The Chromium OS Authors"
 PURPOSE = "Verify various graphics kernel flags."
 CRITERIA = "Fails if kernel configuration is unexpected."
-ATTRIBUTES = 'suite:graphics_per-day, suite:graphics_system'
+ATTRIBUTES = 'suite:graphics_per-day, suite:graphics_system, suite:pvs-graphics'
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = 'graphics'
diff --git a/client/site_tests/graphics_KernelConfig/graphics_KernelConfig.py b/client/site_tests/graphics_KernelConfig/graphics_KernelConfig.py
index 67b7ac2..b523c65 100644
--- a/client/site_tests/graphics_KernelConfig/graphics_KernelConfig.py
+++ b/client/site_tests/graphics_KernelConfig/graphics_KernelConfig.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -16,28 +17,28 @@
     userspace_arch = None
 
     IS_BUILTIN = [
-        # Sanity checks; should be present in builds as builtins.
+            # Confidence checks; should be present in builds as builtins.
     ]
     IS_MODULE = [
-        # Sanity checks; should be present in builds as modules.
+            # Confidence checks; should be present in builds as modules.
     ]
     IS_ENABLED = [
-        # Sanity checks; should be enabled.
+            # Confidence checks; should be enabled.
     ]
     IS_MISSING = [
-        # Sanity checks; should be disabled.
-        'DRM_KMS_FB_HELPER'
-        'FB',
-        'FB_CFB_COPYAREA',
-        'FB_CFB_FILLRECT',
-        'FB_CFB_IMAGEBLIT',
-        'FB_CFB_REV_PIXELS_IN_BYTE',
-        'FB_SIMPLE',
-        'FB_SYS_COPYAREA',
-        'FB_SYS_FOPS',
-        'FB_SYS_FILLRECT',
-        'FB_SYS_IMAGEBLIT',
-        'FB_VIRTUAL'
+            # Confidence checks; should be disabled.
+            'DRM_KMS_FB_HELPER'
+            'FB',
+            'FB_CFB_COPYAREA',
+            'FB_CFB_FILLRECT',
+            'FB_CFB_IMAGEBLIT',
+            'FB_CFB_REV_PIXELS_IN_BYTE',
+            'FB_SIMPLE',
+            'FB_SYS_COPYAREA',
+            'FB_SYS_FOPS',
+            'FB_SYS_FILLRECT',
+            'FB_SYS_IMAGEBLIT',
+            'FB_VIRTUAL'
     ]
 
     def setup(self):
@@ -60,10 +61,10 @@
         logging.debug(config._config)
 
         # Run the static checks.
-        map(config.has_builtin, self.IS_BUILTIN)
-        map(config.has_module, self.IS_MODULE)
-        map(config.is_enabled, self.IS_ENABLED)
-        map(config.is_missing, self.IS_MISSING)
+        list(map(config.has_builtin, self.IS_BUILTIN))
+        list(map(config.has_module, self.IS_MODULE))
+        list(map(config.is_enabled, self.IS_ENABLED))
+        list(map(config.is_missing, self.IS_MISSING))
 
         # Raise a failure if anything unexpected was seen.
         if len(config.failures()):
diff --git a/client/site_tests/graphics_KernelMemory/control b/client/site_tests/graphics_KernelMemory/control
index 31d2729..917a5c1 100644
--- a/client/site_tests/graphics_KernelMemory/control
+++ b/client/site_tests/graphics_KernelMemory/control
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 AUTHOR = 'chromeos-gfx'
 NAME = "graphics_KernelMemory"
 PURPOSE = 'Verify that kernel exports graphics memory info through sysfs.'
@@ -16,7 +17,7 @@
 TEST_CLASS = "graphics"
 TEST_TYPE = "client"
 
-ATTRIBUTES = "suite:bvt-perbuild, suite:graphics, suite:graphics_per-day, suite:graphics_system"
+ATTRIBUTES = "suite:bvt-perbuild, suite:graphics, suite:graphics_per-day, suite:graphics_system, suite:pvs-graphics"
 BUG_TEMPLATE = {
     'components': ['OS>Kernel>Graphics'],
 }
diff --git a/client/site_tests/graphics_KernelMemory/graphics_KernelMemory.py b/client/site_tests/graphics_KernelMemory/graphics_KernelMemory.py
index 7886822..b2ca385 100644
--- a/client/site_tests/graphics_KernelMemory/graphics_KernelMemory.py
+++ b/client/site_tests/graphics_KernelMemory/graphics_KernelMemory.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/graphics_LibDRM/control b/client/site_tests/graphics_LibDRM/control
index b7d521d..1b6b554 100644
--- a/client/site_tests/graphics_LibDRM/control
+++ b/client/site_tests/graphics_LibDRM/control
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 AUTHOR = 'chromeos-gfx'
 NAME = "graphics_LibDRM"
 PURPOSE = 'Verify libdrm is working correctly by running libdrm test apps.'
diff --git a/client/site_tests/graphics_LibDRM/graphics_LibDRM.py b/client/site_tests/graphics_LibDRM/graphics_LibDRM.py
index 49077ec..3c186fc 100644
--- a/client/site_tests/graphics_LibDRM/graphics_LibDRM.py
+++ b/client/site_tests/graphics_LibDRM/graphics_LibDRM.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/graphics_PerfControl/control b/client/site_tests/graphics_PerfControl/control
index 6bff9c7..e20d4ff 100644
--- a/client/site_tests/graphics_PerfControl/control
+++ b/client/site_tests/graphics_PerfControl/control
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 NAME = 'graphics_PerfControl'
 AUTHOR = 'chromeos-gfx'
 PURPOSE = 'Verifies that we are able to obtain a cool and idle machine.'
@@ -10,7 +11,7 @@
 This test will also fail if more than 10 percent of the system CPU
 even after a wait of 60 seconds.
 """
-ATTRIBUTES = "suite:bvt-perbuild, suite:graphics, suite:graphics_per-day, suite:graphics_system"
+ATTRIBUTES = "suite:bvt-perbuild, suite:graphics, suite:graphics_per-day, suite:graphics_system, suite:pvs-graphics"
 TIME='fast'
 TEST_CATEGORY = 'Performance'
 TEST_CLASS = "Performance"
diff --git a/client/site_tests/graphics_PerfControl/graphics_PerfControl.py b/client/site_tests/graphics_PerfControl/graphics_PerfControl.py
index 95fbb51..8348155 100644
--- a/client/site_tests/graphics_PerfControl/graphics_PerfControl.py
+++ b/client/site_tests/graphics_PerfControl/graphics_PerfControl.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/graphics_Power/control.dev b/client/site_tests/graphics_Power/control.dev
index 6d6c7c9..06a7bff 100644
--- a/client/site_tests/graphics_Power/control.dev
+++ b/client/site_tests/graphics_Power/control.dev
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 AUTHOR = "chromeos-gfx"
 NAME = "graphics_Power.dev"
 PURPOSE = "Execute the graphics_Power system logger for development/testing"
diff --git a/client/site_tests/graphics_Power/graphics_Power.py b/client/site_tests/graphics_Power/graphics_Power.py
index 13a76ad..6cd2bdc 100644
--- a/client/site_tests/graphics_Power/graphics_Power.py
+++ b/client/site_tests/graphics_Power/graphics_Power.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/graphics_SanAngeles/control b/client/site_tests/graphics_SanAngeles/control
index 3172009..cff2e12 100644
--- a/client/site_tests/graphics_SanAngeles/control
+++ b/client/site_tests/graphics_SanAngeles/control
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 AUTHOR = 'chromeos-gfx'
 NAME = 'graphics_SanAngeles'
 PURPOSE = 'Benchmark OpenGL object rendering.'
diff --git a/client/site_tests/graphics_SanAngeles/control.bvt b/client/site_tests/graphics_SanAngeles/control.bvt
index da1b2c4..181cf8d 100644
--- a/client/site_tests/graphics_SanAngeles/control.bvt
+++ b/client/site_tests/graphics_SanAngeles/control.bvt
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 AUTHOR = 'chromeos-gfx'
 NAME = 'graphics_SanAngeles.bvt'
 PURPOSE = 'Benchmark OpenGL object rendering.'
diff --git a/client/site_tests/graphics_SanAngeles/control.hwqual b/client/site_tests/graphics_SanAngeles/control.hwqual
index 5883274..8873eca 100644
--- a/client/site_tests/graphics_SanAngeles/control.hwqual
+++ b/client/site_tests/graphics_SanAngeles/control.hwqual
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 AUTHOR = 'chromeos-gfx'
 NAME = 'graphics_SanAngeles.hwqual'
 PURPOSE = 'Benchmark OpenGL object rendering.'
diff --git a/client/site_tests/graphics_SanAngeles/graphics_SanAngeles.py b/client/site_tests/graphics_SanAngeles/graphics_SanAngeles.py
index 8ac4e49..1d3df33 100644
--- a/client/site_tests/graphics_SanAngeles/graphics_SanAngeles.py
+++ b/client/site_tests/graphics_SanAngeles/graphics_SanAngeles.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/graphics_Sanity/control b/client/site_tests/graphics_Sanity/control
deleted file mode 100644
index 868ee42..0000000
--- a/client/site_tests/graphics_Sanity/control
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'chromeos-gfx'
-NAME = 'graphics_Sanity'
-PURPOSE = 'Verifies we can draw and capture graphics from applications.'
-CRITERIA = """
-This test fails if application screen shots cannot capture the screen output.
-"""
-ATTRIBUTES = "suite:graphics, suite:graphics_per-day, suite:graphics_system"
-TIME='SHORT'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = "graphics"
-TEST_TYPE = 'client'
-JOB_RETRIES = 2
-BUG_TEMPLATE = {
-    'components': ['OS>Kernel>Graphics'],
-}
-
-DOC = """
-This test verifies we can capture graphics from applications. It is meant to be
-a basic check if the graphics system is alive.
-"""
-
-job.run_test('graphics_Sanity')
diff --git a/client/site_tests/graphics_Sanity/graphics_Sanity.py b/client/site_tests/graphics_Sanity/graphics_Sanity.py
deleted file mode 100644
index 84ca18e..0000000
--- a/client/site_tests/graphics_Sanity/graphics_Sanity.py
+++ /dev/null
@@ -1,145 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import logging
-
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.cros import service_stopper
-from autotest_lib.client.cros.graphics import graphics_utils
-
-# to run this test manually on a test target
-# ssh root@machine
-# cd /usr/local/glbench/bin
-# stop ui
-# ./windowmanagertest --screenshot1_sec 2 --screenshot2_sec 1 --cooldown_sec 1 \
-#    --screenshot1_cmd "screenshot screenshot1_generated.png" \
-#    --screenshot2_cmd "screenshot screenshot2_generated.png"
-# start ui
-
-
-class graphics_Sanity(graphics_utils.GraphicsTest):
-    """
-    This test is meant to be used as a quick sanity check for GL/GLES.
-    """
-    version = 1
-
-    # None-init vars used by cleanup() here, in case setup() fails
-    _services = None
-
-    def cleanup(self):
-        super(graphics_Sanity, self).cleanup()
-        if self._services:
-            self._services.restore_services()
-
-    def test_something_on_screen(self):
-        """Check if something is drawn on screen: i.e. not a black screen.
-
-        @raises TestFail if we cannot determine there was something on screen.
-        """
-
-        def can_take_screenshot():
-            """Check that taking a screenshot can succeed.
-
-            There are cases when trying to take a screenshot on the device
-            fails. e.g. the display has gone to sleep, we have logged out and
-            the UI has not come back up yet etc.
-            """
-            try:
-                graphics_utils.take_screenshot(self.resultsdir,
-                                               'temp screenshot')
-                return True
-            except:
-                return False
-
-        utils.poll_for_condition(
-            can_take_screenshot,
-            sleep_interval=1,
-            desc='Failed to take a screenshot. There may be an issue with this '
-            'ChromeOS image.')
-
-        w, h = graphics_utils.get_internal_resolution()
-        megapixels = (w * h) / 1000000
-        filesize_threshold = 15 * megapixels
-        screenshot1 = graphics_utils.take_screenshot(self.resultsdir,
-                                                     'oobe or signin')
-
-        with chrome.Chrome() as cr:
-            tab = cr.browser.tabs[0]
-            tab.Navigate('chrome://settings')
-            tab.WaitForDocumentReadyStateToBeComplete()
-
-            screenshot2 = graphics_utils.take_screenshot(
-                self.resultsdir, 'settings page')
-
-        for screenshot in [screenshot1, screenshot2]:
-            file_size_kb = os.path.getsize(screenshot) / 1000
-
-            # Use compressed file size to tell if anything is on screen.
-            if file_size_kb > filesize_threshold:
-                return
-
-        raise error.TestFail(
-            'Screenshot filesize is smaller than expected(%s <= %s). This '
-            'indicates that there is nothing on screen. This ChromeOS image '
-            'could be unusable. Check the screenshot in the results folder.' %
-            (file_size_kb, filesize_threshold))
-
-    def test_generated_screenshots_match_expectation(self):
-        """Draws a texture with a soft ellipse twice and captures each image.
-        Compares the output fuzzily against reference images.
-        """
-        self._services = service_stopper.ServiceStopper(['ui'])
-        self._services.stop_services()
-
-        screenshot1_reference = os.path.join(self.bindir,
-                                             'screenshot1_reference.png')
-        screenshot1_generated = os.path.join(self.resultsdir,
-                                             'screenshot1_generated.png')
-        screenshot1_resized = os.path.join(self.resultsdir,
-                                           'screenshot1_generated_resized.png')
-        screenshot2_reference = os.path.join(self.bindir,
-                                             'screenshot2_reference.png')
-        screenshot2_generated = os.path.join(self.resultsdir,
-                                             'screenshot2_generated.png')
-        screenshot2_resized = os.path.join(self.resultsdir,
-                                           'screenshot2_generated_resized.png')
-
-        exefile = os.path.join('/usr/local/', 'glbench', 'bin', 'windowmanagertest');
-
-        # Delay before screenshot: 1 second has caused failures.
-        options = ' --screenshot1_sec 2'
-        options += ' --screenshot2_sec 1'
-        options += ' --cooldown_sec 1'
-        # perceptualdiff can handle only 8 bit images.
-        screenshot_cmd = ' "screenshot %s"'
-        options += ' --screenshot1_cmd' + screenshot_cmd % screenshot1_generated
-        options += ' --screenshot2_cmd' + screenshot_cmd % screenshot2_generated
-
-        cmd = exefile + ' ' + options
-        utils.run(
-            cmd, stdout_tee=utils.TEE_TO_LOGS, stderr_tee=utils.TEE_TO_LOGS)
-
-        convert_cmd = ('convert -channel RGB -colorspace RGB -depth 8'
-                       " -resize '100x100!' %s %s")
-        utils.system(convert_cmd % (screenshot1_generated, screenshot1_resized))
-        utils.system(convert_cmd % (screenshot2_generated, screenshot2_resized))
-        os.remove(screenshot1_generated)
-        os.remove(screenshot2_generated)
-
-        diff_cmd = 'perceptualdiff -verbose %s %s'
-        utils.system(diff_cmd % (screenshot1_reference, screenshot1_resized))
-        utils.system(diff_cmd % (screenshot2_reference, screenshot2_resized))
-
-    def run_once(self):
-        if graphics_utils.get_display_resolution() is None:
-            logging.warning('Skipping test because there is no screen')
-            return
-        self.add_failures('graphics_Sanity')
-        self.wake_screen_with_keyboard()
-        self.test_something_on_screen()
-        self.test_generated_screenshots_match_expectation()
-        self.remove_failures('graphics_Sanity')
diff --git a/client/site_tests/graphics_Sanity/screenshot1_reference.png b/client/site_tests/graphics_Sanity/screenshot1_reference.png
deleted file mode 100644
index 8a19aa9..0000000
--- a/client/site_tests/graphics_Sanity/screenshot1_reference.png
+++ /dev/null
Binary files differ
diff --git a/client/site_tests/graphics_Sanity/screenshot2_reference.png b/client/site_tests/graphics_Sanity/screenshot2_reference.png
deleted file mode 100644
index a0c1d32..0000000
--- a/client/site_tests/graphics_Sanity/screenshot2_reference.png
+++ /dev/null
Binary files differ
diff --git a/client/site_tests/graphics_Stress/control.50spirit b/client/site_tests/graphics_Stress/control.50spirit
deleted file mode 100644
index 11fa919..0000000
--- a/client/site_tests/graphics_Stress/control.50spirit
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "chromeos-gfx"
-NAME = "graphics_Stress.50spirit"
-PURPOSE = "Execute a graphics stress test."
-CRITERIA = "All tests must not crash/hang the GPU."
-# TODO(dbehr): reenable this test after rewriting as server test.
-#ATTRIBUTES = "suite:graphics, suite:graphics_browser, suite:graphics_per-day"
-TIME = "LENGTHY"
-TEST_CATEGORY = "Stress"
-TEST_CLASS = "graphics"
-TEST_TYPE = "client"
-BUG_TEMPLATE = {
-    'components': ['OS>Kernel>Graphics'],
-}
-
-DOC = """
-This test runs a graphics stress with 50 tabs SpiritBox.
-"""
-
-job.run_test('graphics_Stress', subtest='50spirit')
diff --git a/client/site_tests/graphics_Stress/control.blobaquariumyt b/client/site_tests/graphics_Stress/control.blobaquariumyt
index 16f066e..7a5f7e9 100644
--- a/client/site_tests/graphics_Stress/control.blobaquariumyt
+++ b/client/site_tests/graphics_Stress/control.blobaquariumyt
@@ -2,11 +2,12 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 AUTHOR = "chromeos-gfx"
 NAME = "graphics_Stress.blobaquariumyt"
 PURPOSE = "Execute a graphics stress test."
 CRITERIA = "All tests must not crash/hang the GPU."
-ATTRIBUTES = "suite:graphics, suite:graphics_browser, suite:graphics_per-day"
+ATTRIBUTES = "suite:graphics, suite:graphics_browser, suite:graphics_per-week, suite:pvs-graphics"
 TIME = "LENGTHY"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "graphics"
diff --git a/client/site_tests/graphics_Stress/control.gmaps b/client/site_tests/graphics_Stress/control.gmaps
index d94cc51..e9044dd 100644
--- a/client/site_tests/graphics_Stress/control.gmaps
+++ b/client/site_tests/graphics_Stress/control.gmaps
@@ -2,11 +2,12 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 AUTHOR = "chromeos-gfx"
 NAME = "graphics_Stress.gmaps"
 PURPOSE = "Execute a graphics stress test."
 CRITERIA = "All tests must not crash/hang the GPU."
-ATTRIBUTES = "suite:graphics, suite:graphics_browser, suite:graphics_per-day"
+ATTRIBUTES = "suite:graphics, suite:graphics_browser, suite:graphics_per-week, suite:pvs-graphics"
 TIME = "LENGTHY"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "graphics"
diff --git a/client/site_tests/graphics_Stress/control.restart b/client/site_tests/graphics_Stress/control.restart
index 91978d8..3a8936b 100644
--- a/client/site_tests/graphics_Stress/control.restart
+++ b/client/site_tests/graphics_Stress/control.restart
@@ -2,11 +2,12 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 AUTHOR = "chromeos-gfx"
 NAME = "graphics_Stress.restart"
 PURPOSE = "Execute a graphics stress test."
 CRITERIA = "All tests must not crash/hang the GPU."
-ATTRIBUTES = "suite:graphics, suite:graphics_browser, suite:graphics_per-day"
+ATTRIBUTES = "suite:graphics, suite:graphics_browser, suite:graphics_per-week, suite:pvs-graphics"
 TIME = "LENGTHY"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "graphics"
diff --git a/client/site_tests/graphics_Stress/control.tabopenclose b/client/site_tests/graphics_Stress/control.tabopenclose
index b88bcfd..5a0fbd5 100644
--- a/client/site_tests/graphics_Stress/control.tabopenclose
+++ b/client/site_tests/graphics_Stress/control.tabopenclose
@@ -2,11 +2,12 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 AUTHOR = "chromeos-gfx"
 NAME = "graphics_Stress.tabopenclose"
 PURPOSE = "Execute a graphics stress test."
 CRITERIA = "All tests must not crash/hang the GPU."
-ATTRIBUTES = "suite:graphics, suite:graphics_browser, suite:graphics_per-day"
+ATTRIBUTES = "suite:graphics, suite:graphics_browser, suite:graphics_per-week"
 TIME = "LENGTHY"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "graphics"
diff --git a/client/site_tests/graphics_Stress/control.ytvimeowebgl b/client/site_tests/graphics_Stress/control.ytvimeowebgl
deleted file mode 100644
index 153a3c5..0000000
--- a/client/site_tests/graphics_Stress/control.ytvimeowebgl
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "chromeos-gfx"
-NAME = "graphics_Stress.ytvimeowebgl"
-PURPOSE = "Execute a graphics stress test."
-CRITERIA = "All tests must not crash/hang the GPU."
-ATTRIBUTES = "suite:graphics, suite:graphics_browser, suite:graphics_per-day"
-TIME = "LENGTHY"
-TEST_CATEGORY = "Stress"
-TEST_CLASS = "graphics"
-TEST_TYPE = "client"
-BUG_TEMPLATE = {
-    'components': ['OS>Kernel>Graphics'],
-}
-
-DOC = """
-This test runs a graphics stress with YouTube, Vimeo and WebGL Aquarium.
-"""
-
-job.run_test('graphics_Stress', subtest='yt+vimeo+webgl')
diff --git a/client/site_tests/graphics_Stress/graphics_Stress.py b/client/site_tests/graphics_Stress/graphics_Stress.py
index 552cec0..80e4852 100644
--- a/client/site_tests/graphics_Stress/graphics_Stress.py
+++ b/client/site_tests/graphics_Stress/graphics_Stress.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/graphics_VTSwitch/control b/client/site_tests/graphics_VTSwitch/control
deleted file mode 100644
index 00c9f48..0000000
--- a/client/site_tests/graphics_VTSwitch/control
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'chromeos-gfx'
-NAME = "graphics_VTSwitch"
-PURPOSE = 'Verify that VT switching works.'
-CRITERIA = """
-Test fails if any of the following happen:
-- Unable to get a valid screenshot.
-- VT1 and VT2 are not sufficiently different.
-- VT1 does not remain the same between VT switches.
-- VT2 does not remain the same between VT switches.
-- Cannot login after repeated VT switching.
-"""
-TIME = "FAST"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "graphics"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:graphics_per-day, suite:bvt-perbuild"
-BUG_TEMPLATE = {
-    'components': ['OS>Kernel>Graphics'],
-}
-
-DOC = """
-Switches between VT1 and VT2 repeatedly and logs in/out of Chrome to make sure
-the VT switching was successful.  Also, checks and compares screenshots to make
-sure each VT remains the same between VT switches, and that VT1 and VT2 are
-sufficiently different.
-"""
-job.run_test("graphics_VTSwitch")
diff --git a/client/site_tests/graphics_VTSwitch/control.100 b/client/site_tests/graphics_VTSwitch/control.100
deleted file mode 100644
index d44edc9..0000000
--- a/client/site_tests/graphics_VTSwitch/control.100
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "sque"
-NAME = "graphics_VTSwitch.100"
-PURPOSE = 'Verify that VT switching works.'
-CRITERIA = """
-Test fails if any of the following happen:
-- Unable to get a valid screenshot.
-- VT1 and VT2 are not sufficiently different.
-- VT1 does not remain the same between VT switches.
-- VT2 does not remain the same between VT switches.
-- Cannot login after repeated VT switching.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "graphics"
-TEST_TYPE = "client"
-BUG_TEMPLATE = {
-    'components': ['OS>Kernel>Graphics'],
-}
-
-DOC = """
-Switches between VT1 and VT2 repeatedly and logs in/out of Chrome to make sure
-the VT switching was successful.  Also, checks and compares screenshots to make
-sure each VT remains the same between VT switches, and that VT1 and VT2 are
-sufficiently different.
-"""
-
-job.run_test("graphics_VTSwitch", num_iterations=100)
diff --git a/client/site_tests/graphics_VTSwitch/graphics_VTSwitch.py b/client/site_tests/graphics_VTSwitch/graphics_VTSwitch.py
deleted file mode 100644
index 92963f6..0000000
--- a/client/site_tests/graphics_VTSwitch/graphics_VTSwitch.py
+++ /dev/null
@@ -1,158 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import time
-
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.cros.graphics import graphics_utils
-from autotest_lib.client.cros.image_comparison import pdiff_image_comparer
-
-def get_percent_difference(file1, file2):
-    """
-    Performs pixel comparison of two files, given by their paths |file1|
-    and |file2| using terminal tool 'perceptualdiff' and returns percentage
-    difference of the total file size.
-
-    @param file1: path to image
-    @param file2: path to secondary image
-    @return: percentage difference of total file size.
-    @raise ValueError: if image dimensions are not the same
-    @raise OSError: if file does not exist or cannot be opened.
-
-    """
-    # Using pdiff image comparer to compare the two images. This class
-    # invokes the terminal tool perceptualdiff.
-    pdi = pdiff_image_comparer.PdiffImageComparer()
-    diff_bytes = pdi.compare(file1, file2)[0]
-    return round(100. * diff_bytes / os.path.getsize(file1))
-
-
-class graphics_VTSwitch(graphics_utils.GraphicsTest):
-    """
-    Verify that VT switching works.
-    """
-    version = 2
-    _WAIT = 5
-    # TODO(crosbug.com/36417): Need to handle more than one display screen.
-
-    @graphics_utils.GraphicsTest.failure_report_decorator('graphics_VTSwitch')
-    def run_once(self,
-                 num_iterations=2,
-                 similarity_percent_threshold=95,
-                 difference_percent_threshold=5):
-
-        # Check for chromebook type devices
-        if not utils.get_board_type() == 'CHROMEBOOK':
-            raise error.TestNAError('DUT is not Chromebook. Test Skipped.')
-
-        self._num_errors = 0
-        keyvals = {}
-
-        # Make sure we start in VT1.
-        self.open_vt1()
-
-        with chrome.Chrome():
-
-            # wait for Chrome to start before taking screenshot
-            time.sleep(10)
-
-            # Take screenshot of browser.
-            vt1_screenshot = self._take_current_vt_screenshot(1)
-
-            keyvals['num_iterations'] = num_iterations
-
-            # Go to VT2 and take a screenshot.
-            self.open_vt2()
-            vt2_screenshot = self._take_current_vt_screenshot(2)
-
-            # Make sure VT1 and VT2 are sufficiently different.
-            diff = get_percent_difference(vt1_screenshot, vt2_screenshot)
-            keyvals['percent_initial_VT1_VT2_difference'] = diff
-            if not diff >= difference_percent_threshold:
-                self._num_errors += 1
-                logging.error('VT1 and VT2 screenshots only differ by ' + \
-                              '%d %%: %s vs %s' %
-                              (diff, vt1_screenshot, vt2_screenshot))
-
-            num_identical_vt1_screenshots = 0
-            num_identical_vt2_screenshots = 0
-            max_vt1_difference_percent = 0
-            max_vt2_difference_percent = 0
-
-            # Repeatedly switch between VT1 and VT2.
-            for iteration in xrange(num_iterations):
-                logging.info('Iteration #%d', iteration)
-
-                # Go to VT1 and take a screenshot.
-                self.open_vt1()
-                current_vt1_screenshot = self._take_current_vt_screenshot(1)
-
-                # Make sure the current VT1 screenshot is the same as (or similar
-                # to) the original login screen screenshot.
-                diff = get_percent_difference(vt1_screenshot,
-                                              current_vt1_screenshot)
-                if not diff < similarity_percent_threshold:
-                    max_vt1_difference_percent = \
-                        max(diff, max_vt1_difference_percent)
-                    self._num_errors += 1
-                    logging.error('VT1 screenshots differ by %d %%: %s vs %s',
-                                  diff, vt1_screenshot,
-                                  current_vt1_screenshot)
-                else:
-                    num_identical_vt1_screenshots += 1
-
-                # Go to VT2 and take a screenshot.
-                self.open_vt2()
-                current_vt2_screenshot = self._take_current_vt_screenshot(2)
-
-                # Make sure the current VT2 screenshot is the same as (or
-                # similar to) the first VT2 screenshot.
-                diff = get_percent_difference(vt2_screenshot,
-                                              current_vt2_screenshot)
-                if not diff <= similarity_percent_threshold:
-                    max_vt2_difference_percent = \
-                        max(diff, max_vt2_difference_percent)
-                    self._num_errors += 1
-                    logging.error(
-                        'VT2 screenshots differ by %d %%: %s vs %s',
-                        diff, vt2_screenshot, current_vt2_screenshot)
-                else:
-                    num_identical_vt2_screenshots += 1
-
-        self.open_vt1()
-
-        keyvals['percent_VT1_screenshot_max_difference'] = \
-            max_vt1_difference_percent
-        keyvals['percent_VT2_screenshot_max_difference'] = \
-            max_vt2_difference_percent
-        keyvals['num_identical_vt1_screenshots'] = num_identical_vt1_screenshots
-        keyvals['num_identical_vt2_screenshots'] = num_identical_vt2_screenshots
-
-        self.write_perf_keyval(keyvals)
-
-        if self._num_errors > 0:
-            raise error.TestFail('Failed: saw %d VT switching errors.' %
-                                  self._num_errors)
-
-    def _take_current_vt_screenshot(self, current_vt):
-        """
-        Captures a screenshot of the current VT screen in PNG format.
-
-        @param current_vt: desired vt for screenshot.
-
-        @returns the path of the screenshot file.
-
-        """
-        return graphics_utils.take_screenshot(self.resultsdir,
-                                              'graphics_VTSwitch_VT%d' % current_vt)
-
-    def cleanup(self):
-        # Return to VT1 when done.  Ideally, the screen should already be in VT1
-        # but the test might fail and terminate while in VT2.
-        self.open_vt1()
-        super(graphics_VTSwitch, self).cleanup()
diff --git a/client/site_tests/graphics_VideoRenderingPower/control.h264 b/client/site_tests/graphics_VideoRenderingPower/control.h264
index 6c22566..11c11e0 100644
--- a/client/site_tests/graphics_VideoRenderingPower/control.h264
+++ b/client/site_tests/graphics_VideoRenderingPower/control.h264
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 AUTHOR = "chromeos-gfx"
 NAME = "graphics_VideoRenderingPower.h264"
 PURPOSE = "Measure power consumption of H264 (a.k.a AVC1) video rendering"
diff --git a/client/site_tests/graphics_VideoRenderingPower/control.vp8 b/client/site_tests/graphics_VideoRenderingPower/control.vp8
index 8fec487..27870e9 100644
--- a/client/site_tests/graphics_VideoRenderingPower/control.vp8
+++ b/client/site_tests/graphics_VideoRenderingPower/control.vp8
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 AUTHOR = "chromeos-gfx"
 NAME = "graphics_VideoRenderingPower.vp8"
 PURPOSE = "Measure power consumption of VP8 video rendering"
diff --git a/client/site_tests/graphics_VideoRenderingPower/control.vp9 b/client/site_tests/graphics_VideoRenderingPower/control.vp9
index f000e12..40d3b03 100644
--- a/client/site_tests/graphics_VideoRenderingPower/control.vp9
+++ b/client/site_tests/graphics_VideoRenderingPower/control.vp9
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 AUTHOR = "chromeos-gfx"
 NAME = "graphics_VideoRenderingPower.vp9"
 PURPOSE = "Measure power consumption of VP9 video rendering"
diff --git a/client/site_tests/graphics_VideoRenderingPower/graphics_VideoRenderingPower.py b/client/site_tests/graphics_VideoRenderingPower/graphics_VideoRenderingPower.py
index 113c04d..58f46a1 100644
--- a/client/site_tests/graphics_VideoRenderingPower/graphics_VideoRenderingPower.py
+++ b/client/site_tests/graphics_VideoRenderingPower/graphics_VideoRenderingPower.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/graphics_WebGLAquarium/control b/client/site_tests/graphics_WebGLAquarium/control
index 85cba51..7daf6c5 100644
--- a/client/site_tests/graphics_WebGLAquarium/control
+++ b/client/site_tests/graphics_WebGLAquarium/control
@@ -2,11 +2,12 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 AUTHOR = 'chromeos-gfx'
 NAME = "graphics_WebGLAquarium"
 PURPOSE = "Execute the WebGL aquarium test suite."
 CRITERIA = "All suite tests must not crash/hang."
-ATTRIBUTES = "suite:crosbolt_perf_perbuild, suite:bvt-perbuild, suite:graphics, suite:graphics_browser, suite:graphics_per-day, suite:partners"
+ATTRIBUTES = "suite:crosbolt_perf_perbuild, suite:bvt-perbuild, suite:graphics, suite:graphics_browser, suite:graphics_per-day, suite:partners, suite:pvs-graphics"
 TIME = "medium"
 TEST_CATEGORY = "Performance"
 TEST_CLASS = "graphics"
diff --git a/client/site_tests/graphics_WebGLAquarium/control.memory_pressure b/client/site_tests/graphics_WebGLAquarium/control.memory_pressure
index 3da9d51..1d011fa 100644
--- a/client/site_tests/graphics_WebGLAquarium/control.memory_pressure
+++ b/client/site_tests/graphics_WebGLAquarium/control.memory_pressure
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 AUTHOR = "chromeos-performance"
 NAME = "graphics_WebGLAquarium.memory_pressure"
 PURPOSE = "Execute the WebGL aquarium test suite under memory pressure."
diff --git a/client/site_tests/graphics_WebGLAquarium/control.power b/client/site_tests/graphics_WebGLAquarium/control.power
index 0ddea5b..5a4bdc1 100644
--- a/client/site_tests/graphics_WebGLAquarium/control.power
+++ b/client/site_tests/graphics_WebGLAquarium/control.power
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 AUTHOR = "rohitbm@chromium.org, chromeos-gfx"
 NAME = "graphics_WebGLAquarium.power"
 PURPOSE = "Execute the WebGL aquarium test suite to measure power."
diff --git a/client/site_tests/graphics_WebGLAquarium/control.thermal b/client/site_tests/graphics_WebGLAquarium/control.thermal
index 263827c..1ba4263 100644
--- a/client/site_tests/graphics_WebGLAquarium/control.thermal
+++ b/client/site_tests/graphics_WebGLAquarium/control.thermal
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 AUTHOR = "rohitbm@chromium.org, chromeos-gfx"
 NAME = "graphics_WebGLAquarium.thermal"
 PURPOSE = "Execute the WebGL aquarium test suite to measure power."
diff --git a/client/site_tests/graphics_WebGLAquarium/graphics_WebGLAquarium.py b/client/site_tests/graphics_WebGLAquarium/graphics_WebGLAquarium.py
index e95a5c9..dcbf807 100644
--- a/client/site_tests/graphics_WebGLAquarium/graphics_WebGLAquarium.py
+++ b/client/site_tests/graphics_WebGLAquarium/graphics_WebGLAquarium.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -12,6 +13,8 @@
             function call to complete.
 """
 
+from __future__ import print_function
+
 import functools
 import logging
 import math
@@ -191,9 +194,9 @@
                 'rapl_%04d_fishes' % num_fishes)
             # Remove entries that we don't care about.
             rapl_rate = {key: rapl_rate[key]
-                         for key in rapl_rate.keys() if key.endswith('pwr')}
+                         for key in list(rapl_rate.keys()) if key.endswith('pwr')}
             # Report to chromeperf/ dashboard.
-            for key, values in rapl_rate.iteritems():
+            for key, values in list(rapl_rate.items()):
                 self.output_perf_value(
                     description=key,
                     value=values,
@@ -263,9 +266,9 @@
             now = time.time()
             results = {}
             info_str = ['\nfb_id wait_kds flipped']
-            for value in sampler_obj.frame_buffers.itervalues():
+            for value in list(sampler_obj.frame_buffers.values()):
                 results[value.fb] = {}
-                for state, stats in value.states.iteritems():
+                for state, stats in list(value.states.items()):
                     results[value.fb][state] = (stats.avg, stats.stdev)
                 info_str.append('%s: %s %s' % (value.fb,
                                                results[value.fb]['wait_kds'][0],
@@ -295,7 +298,7 @@
                     f.write('%s %s %s\n' %
                             (t, self.flip_stats[t]['avg_fps'],
                              self.flip_stats[t]['avg_render_time']))
-                for fb, stats in self.flip_stats[t].iteritems():
+                for fb, stats in list(self.flip_stats[t].items()):
                     if not isinstance(fb, int):
                         continue
                     f.write('%s %s ' % (t, fb))
@@ -317,7 +320,7 @@
         out_file = os.path.join(self.resultsdir, filename)
         with open(out_file, 'w') as f:
             for sample in samples:
-                print >> f, sample
+                print(sample, file=f)
 
     def run_fish_test_with_memory_pressure(
         self, browser, test_url, num_fishes, memory_pressure):
diff --git a/client/site_tests/graphics_WebGLAquarium/sampler.py b/client/site_tests/graphics_WebGLAquarium/sampler.py
index 79e24f8..a38eb22 100644
--- a/client/site_tests/graphics_WebGLAquarium/sampler.py
+++ b/client/site_tests/graphics_WebGLAquarium/sampler.py
@@ -185,7 +185,7 @@
 
     def calc_stat(self):
         """Calculate the statistics of state duration of all frame buffers"""
-        for fb in self.frame_buffers.values():
+        for fb in list(self.frame_buffers.values()):
             fb.calc_state_avg_stdev()
 
     def frame_buffer_unique_hash(self, pipe, fb):
@@ -248,7 +248,7 @@
     def calc_state_avg_stdev(self):
         """Call all states to compute its own average and standard deviation"""
         logging.debug("====pipe:%d, fb:%d====", self.pipe, self.fb)
-        for s in self.states.values():
+        for s in list(self.states.values()):
             s.calc_avg_stdev()
 
 
diff --git a/client/site_tests/graphics_WebGLAquarium/system_sampler.py b/client/site_tests/graphics_WebGLAquarium/system_sampler.py
index 5abd346..37ff801 100644
--- a/client/site_tests/graphics_WebGLAquarium/system_sampler.py
+++ b/client/site_tests/graphics_WebGLAquarium/system_sampler.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/graphics_WebGLManyPlanetsDeep/control b/client/site_tests/graphics_WebGLManyPlanetsDeep/control
index da4cf15..32f43b5 100644
--- a/client/site_tests/graphics_WebGLManyPlanetsDeep/control
+++ b/client/site_tests/graphics_WebGLManyPlanetsDeep/control
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 AUTHOR = 'chromeos-gfx'
 NAME = "graphics_WebGLManyPlanetsDeep"
 PURPOSE = "Execute the WebGL many planets deep test suite."
diff --git a/client/site_tests/graphics_WebGLManyPlanetsDeep/graphics_WebGLManyPlanetsDeep.py b/client/site_tests/graphics_WebGLManyPlanetsDeep/graphics_WebGLManyPlanetsDeep.py
index 1132b72..b72b30a 100644
--- a/client/site_tests/graphics_WebGLManyPlanetsDeep/graphics_WebGLManyPlanetsDeep.py
+++ b/client/site_tests/graphics_WebGLManyPlanetsDeep/graphics_WebGLManyPlanetsDeep.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -74,7 +75,7 @@
     def calculate_perf_values(self):
         """Calculates all the perf values from the collected data."""
         arr = numpy.array([[v['frame_elapsed_time'], v['js_elapsed_time']]
-                           for v in self.frame_data.itervalues()])
+                           for v in list(self.frame_data.values())])
         std = arr.std(axis=0)
         mean = arr.mean(axis=0)
         avg_fps = 1000.0 / mean[0]
@@ -88,9 +89,9 @@
 
         # Remove entries that we don't care about.
         rapl_rate = {key: self.rapl_rate[key]
-                     for key in self.rapl_rate.keys() if key.endswith('pwr')}
+                     for key in list(self.rapl_rate.keys()) if key.endswith('pwr')}
         # Report to chromeperf/ dashboard.
-        for key, values in rapl_rate.iteritems():
+        for key, values in list(rapl_rate.items()):
             self.output_perf_value(
                 description=key,
                 value=values,
diff --git a/client/site_tests/graphics_dEQP/control.bvt b/client/site_tests/graphics_dEQP/control.bvt
deleted file mode 100644
index df85963..0000000
--- a/client/site_tests/graphics_dEQP/control.bvt
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Please do not edit this file! It has been created by generate_controlfiles.py.
-
-NAME = 'graphics_dEQP.bvt'
-AUTHOR = 'chromeos-gfx'
-PURPOSE = 'Run the drawElements Quality Program test suite.'
-CRITERIA = 'All of the individual tests must pass.'
-ATTRIBUTES = 'suite:deqp, suite:graphics_per-day, suite:graphics_system, suite:infra_qual'
-TIME = 'FAST'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'graphics'
-TEST_TYPE = 'client'
-MAX_RESULT_SIZE_KB = 131072
-DOC = """
-This test runs the drawElements Quality Program test suite.
-"""
-job.run_test('graphics_dEQP',
-             tag = 'bvt',
-             opts = args + [
-                 'test_names_file=/usr/local/autotest/tests/graphics_dEQP/master/bvt.txt',
-                 'hasty=False',
-                 'perf_failure_description=Failures_BVT',
-                 'shard_number=0',
-                 'shard_count=1'
-             ])
\ No newline at end of file
diff --git a/client/site_tests/graphics_dEQP/control.filter_args b/client/site_tests/graphics_dEQP/control.filter_args
deleted file mode 100644
index 5a1564f..0000000
--- a/client/site_tests/graphics_dEQP/control.filter_args
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This control file runs no tests by default. It can be used to run specific
-# tests or test sets by specifying a filter. The filter is compared
-# to the beginning of each test name to determine what to run.
-# Example:
-# test_that --args="filter=dEQP-GLES3.info" <ip> graphics_dEQP.filter_args
-
-NAME = 'graphics_dEQP.filter_args'
-AUTHOR = 'chromeos-gfx'
-PURPOSE = 'Run the drawElements Quality Program test suite.'
-CRITERIA = 'All of the individual tests must pass.'
-#ATTRIBUTES = ''
-TIME = 'FAST'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'graphics'
-TEST_TYPE = 'client'
-DOC = """
-This test runs the drawElements Quality Program test suite.
-"""
-
-job.run_test('graphics_dEQP', opts = args)
diff --git a/client/site_tests/graphics_dEQP/control.gles2-master b/client/site_tests/graphics_dEQP/control.gles2-master
deleted file mode 100644
index ca30af7..0000000
--- a/client/site_tests/graphics_dEQP/control.gles2-master
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Please do not edit this file! It has been created by generate_controlfiles.py.
-
-NAME = 'graphics_dEQP.gles2-master'
-AUTHOR = 'chromeos-gfx'
-PURPOSE = 'Run the drawElements Quality Program test suite.'
-CRITERIA = 'All of the individual tests must pass.'
-ATTRIBUTES = 'suite:deqp, suite:graphics_per-day, suite:graphics_system'
-TIME = 'LENGTHY'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'graphics'
-TEST_TYPE = 'client'
-MAX_RESULT_SIZE_KB = 131072
-DOC = """
-This test runs the drawElements Quality Program test suite.
-"""
-job.run_test('graphics_dEQP',
-             tag = 'gles2-master',
-             opts = args + [
-                 'test_names_file=/usr/local/deqp/master/gles2-master.txt',
-                 'hasty=False',
-                 'perf_failure_description=Failures_GLES2',
-                 'shard_number=0',
-                 'shard_count=1'
-             ])
\ No newline at end of file
diff --git a/client/site_tests/graphics_dEQP/control.gles3-master b/client/site_tests/graphics_dEQP/control.gles3-master
deleted file mode 100644
index 4116fb6..0000000
--- a/client/site_tests/graphics_dEQP/control.gles3-master
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Please do not edit this file! It has been created by generate_controlfiles.py.
-
-NAME = 'graphics_dEQP.gles3-master'
-AUTHOR = 'chromeos-gfx'
-PURPOSE = 'Run the drawElements Quality Program test suite.'
-CRITERIA = 'All of the individual tests must pass.'
-ATTRIBUTES = 'suite:deqp, suite:graphics_per-day, suite:graphics_system'
-TIME = 'LENGTHY'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'graphics'
-TEST_TYPE = 'client'
-MAX_RESULT_SIZE_KB = 131072
-DOC = """
-This test runs the drawElements Quality Program test suite.
-"""
-job.run_test('graphics_dEQP',
-             tag = 'gles3-master',
-             opts = args + [
-                 'test_names_file=/usr/local/deqp/master/gles3-master.txt',
-                 'hasty=False',
-                 'perf_failure_description=Failures_GLES3',
-                 'shard_number=0',
-                 'shard_count=1'
-             ])
\ No newline at end of file
diff --git a/client/site_tests/graphics_dEQP/control.gles3.accuracy b/client/site_tests/graphics_dEQP/control.gles3.accuracy
deleted file mode 100644
index 552227f..0000000
--- a/client/site_tests/graphics_dEQP/control.gles3.accuracy
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Please do not edit this file! It has been created by generate_controlfiles.py.
-
-NAME = 'graphics_dEQP.gles3.accuracy'
-AUTHOR = 'chromeos-gfx'
-PURPOSE = 'Run the drawElements Quality Program test suite.'
-CRITERIA = 'All of the individual tests must pass.'
-ATTRIBUTES = ''
-TIME = 'FAST'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'graphics'
-TEST_TYPE = 'client'
-MAX_RESULT_SIZE_KB = 131072
-DOC = """
-This test runs the drawElements Quality Program test suite.
-"""
-job.run_test('graphics_dEQP',
-             opts = args + [
-                 'filter=dEQP-GLES3.accuracy',
-                 'subset_to_run=Pass',
-                 'hasty=False',
-                 'shard_number=0',
-                 'shard_count=1'
-             ])
\ No newline at end of file
diff --git a/client/site_tests/graphics_dEQP/control.gles3.performance b/client/site_tests/graphics_dEQP/control.gles3.performance
deleted file mode 100644
index 3886abb..0000000
--- a/client/site_tests/graphics_dEQP/control.gles3.performance
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Please do not edit this file! It has been created by generate_controlfiles.py.
-
-NAME = 'graphics_dEQP.gles3.performance'
-AUTHOR = 'chromeos-gfx'
-PURPOSE = 'Run the drawElements Quality Program test suite.'
-CRITERIA = 'All of the individual tests must pass.'
-ATTRIBUTES = ''
-TIME = 'LONG'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'graphics'
-TEST_TYPE = 'client'
-MAX_RESULT_SIZE_KB = 131072
-DOC = """
-This test runs the drawElements Quality Program test suite.
-"""
-job.run_test('graphics_dEQP',
-             opts = args + [
-                 'filter=dEQP-GLES3.performance',
-                 'subset_to_run=Pass',
-                 'hasty=False',
-                 'shard_number=0',
-                 'shard_count=1'
-             ])
\ No newline at end of file
diff --git a/client/site_tests/graphics_dEQP/control.gles31-master b/client/site_tests/graphics_dEQP/control.gles31-master
deleted file mode 100644
index 7440e2a..0000000
--- a/client/site_tests/graphics_dEQP/control.gles31-master
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Please do not edit this file! It has been created by generate_controlfiles.py.
-
-NAME = 'graphics_dEQP.gles31-master'
-AUTHOR = 'chromeos-gfx'
-PURPOSE = 'Run the drawElements Quality Program test suite.'
-CRITERIA = 'All of the individual tests must pass.'
-ATTRIBUTES = 'suite:deqp, suite:graphics_per-day, suite:graphics_system'
-TIME = 'LENGTHY'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'graphics'
-TEST_TYPE = 'client'
-MAX_RESULT_SIZE_KB = 131072
-DOC = """
-This test runs the drawElements Quality Program test suite.
-"""
-job.run_test('graphics_dEQP',
-             tag = 'gles31-master',
-             opts = args + [
-                 'test_names_file=/usr/local/deqp/master/gles31-master.txt',
-                 'hasty=False',
-                 'perf_failure_description=Failures_GLES31',
-                 'shard_number=0',
-                 'shard_count=1'
-             ])
\ No newline at end of file
diff --git a/client/site_tests/graphics_dEQP/control.vk-master b/client/site_tests/graphics_dEQP/control.vk-master
deleted file mode 100644
index d8928f7..0000000
--- a/client/site_tests/graphics_dEQP/control.vk-master
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Please do not edit this file! It has been created by generate_controlfiles.py.
-
-NAME = 'graphics_dEQP.vk-master'
-AUTHOR = 'chromeos-gfx'
-PURPOSE = 'Run the drawElements Quality Program test suite.'
-CRITERIA = 'All of the individual tests must pass.'
-ATTRIBUTES = 'suite:deqp, suite:graphics_per-day, suite:graphics_system'
-TIME = 'LENGTHY'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'graphics'
-TEST_TYPE = 'client'
-MAX_RESULT_SIZE_KB = 131072
-DOC = """
-This test runs the drawElements Quality Program test suite.
-"""
-job.run_test('graphics_dEQP',
-             tag = 'vk-master',
-             opts = args + [
-                 'test_names_file=/usr/local/deqp/master/vk-master.txt',
-                 'hasty=True',
-                 'perf_failure_description=Failures_VK',
-                 'shard_number=0',
-                 'shard_count=1'
-             ])
\ No newline at end of file
diff --git a/client/site_tests/graphics_dEQP/generate_controlfiles.py b/client/site_tests/graphics_dEQP/generate_controlfiles.py
deleted file mode 100644
index 3e3afea..0000000
--- a/client/site_tests/graphics_dEQP/generate_controlfiles.py
+++ /dev/null
@@ -1,163 +0,0 @@
-#!/usr/bin/env python2
-
-"""
-This script generates autotest control files for dEQP. It supports
-1) Generate control files for tests with Passing expectations.
-2) Generate control files to run tests that are not passing.
-3) Decomposing a test into shards. Ideally shard_count is chosen such that
-   each shard will run less than 1 minute. It mostly makes sense in
-   combination with "hasty".
-"""
-import os
-from collections import namedtuple
-# Use 'sudo pip install enum34' to install.
-from enum import Enum
-# Use 'sudo pip install jinja2' to install.
-from jinja2 import Template
-
-Test = namedtuple('Test', 'filter, suite, shards, time, hasty, tag, test_file, perf_failure_description')
-
-
-ATTRIBUTES_BVT_PB = (
-    'suite:deqp, suite:graphics_per-day, suite:graphics_system, '
-    'suite:bvt-perbuild'
-)
-ATTRIBUTES_DAILY = 'suite:deqp, suite:graphics_per-day, suite:graphics_system'
-
-class Suite(Enum):
-    none = 1
-    daily = 2
-    bvtcq = 3
-    bvtpb = 4
-
-test_file_folder = '/usr/local/deqp/master/'
-BVT_MASTER_FILE = '/usr/local/autotest/tests/graphics_dEQP/master/bvt.txt'
-GLES2_MASTER_FILE = os.path.join(test_file_folder, 'gles2-master.txt')
-GLES3_MASTER_FILE = os.path.join(test_file_folder, 'gles3-master.txt')
-GLES31_MASTER_FILE = os.path.join(test_file_folder, 'gles31-master.txt')
-VK_MASTER_FILE = os.path.join(test_file_folder, 'vk-master.txt')
-
-# List of tests' filter that should not append 'hasty' to its name.
-hasty_exclude_list = ['dEQP-VK-master']
-
-tests = [
-    Test('bvt',                    Suite.daily, shards=1,  hasty=False, time='FAST',     tag='bvt',           test_file=BVT_MASTER_FILE,    perf_failure_description='Failures_BVT'),
-    Test('dEQP-GLES2-master',      Suite.daily, shards=1,  hasty=False, time='LENGTHY',  tag='gles2-master',  test_file=GLES2_MASTER_FILE,  perf_failure_description='Failures_GLES2'),
-    # As we are following tot with dEQP the hasty shards have too much noise that is impossible to expect.
-    #Test('dEQP-GLES2-master',      Suite.bvtpb, shards=10, hasty=True,  time='FAST',     tag='gles2-master',  test_file=GLES2_MASTER_FILE,  perf_failure_description=None),
-    # The stress, accuracy and performance tests are not part of -master lists.
-    # Hence we create control files in case we want to run them. But there is
-    # no strict requirement to keep them passing.
-    Test('dEQP-GLES3.accuracy',    Suite.none,  shards=1,  hasty=False, time='FAST',     tag=None,            test_file=None,               perf_failure_description=None),
-    Test('dEQP-GLES3-master',      Suite.daily, shards=1,  hasty=False, time='LENGTHY',  tag='gles3-master',  test_file=GLES3_MASTER_FILE,  perf_failure_description='Failures_GLES3'),
-    #Test('dEQP-GLES3-master',      Suite.bvtpb, shards=10, hasty=True,  time='FAST',     tag='gles3-master',  test_file=GLES3_MASTER_FILE,  perf_failure_description=None),
-    Test('dEQP-GLES3.performance', Suite.none,  shards=1,  hasty=False, time='LONG',     tag=None,            test_file=None,               perf_failure_description=None),
-    # It is not worth running GLES3.stress in addition to GLES2.stress and GLES31.stress just to find stability issues.
-    Test('dEQP-GLES31-master',     Suite.daily, shards=1,  hasty=False, time='LENGTHY',  tag='gles31-master', test_file=GLES31_MASTER_FILE, perf_failure_description='Failures_GLES31'),
-    #Test('dEQP-GLES31-master',     Suite.bvtpb, shards=10, hasty=True,  time='FAST',     tag='gles31-master', test_file=GLES31_MASTER_FILE, perf_failure_description=None),
-    Test('dEQP-VK-master',         Suite.daily, shards=1,  hasty=True,  time='LENGTHY',  tag='vk-master',     test_file=VK_MASTER_FILE,     perf_failure_description='Failures_VK'),
-]
-
-CONTROLFILE_TEMPLATE = Template(
-"""\
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Please do not edit this file! It has been created by generate_controlfiles.py.
-
-NAME = '{{testname}}'
-AUTHOR = 'chromeos-gfx'
-PURPOSE = 'Run the drawElements Quality Program test suite.'
-CRITERIA = 'All of the individual tests must pass.'
-ATTRIBUTES = '{{attributes}}'
-TIME = '{{time}}'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'graphics'
-TEST_TYPE = 'client'
-MAX_RESULT_SIZE_KB = 131072
-DOC = \"\"\"
-This test runs the drawElements Quality Program test suite.
-\"\"\"
-job.run_test('graphics_dEQP',{% if tag != None %}
-             tag = '{{tag}}',{% endif %}
-             opts = args + [
-                 {% if test_file == None %}'filter={{filter}}',
-                 'subset_to_run={{subset}}',
-                 {% else %}'test_names_file={{test_file}}',
-                 {% endif %}'hasty={{hasty}}',
-                 {% if perf_failure_description %}'perf_failure_description={{perf_failure_description}}',
-                 {% endif %}'shard_number={{shard}}',
-                 'shard_count={{shards}}'
-             ])"""
-    )
-
-#Unlike the normal version it batches many tests in a single run
-#to reduce testing time. Unfortunately this is less robust and
-#can lead to spurious failures.
-
-
-def get_controlfilename(test, shard=0):
-    return 'control.%s' % get_name(test, shard)
-
-
-def get_attributes(test):
-    if test.suite == Suite.bvtpb:
-        return ATTRIBUTES_BVT_PB
-    if test.suite == Suite.daily:
-        return ATTRIBUTES_DAILY
-    return ''
-
-
-def get_time(test):
-    return test.time
-
-
-def get_name(test, shard):
-    name = test.filter.replace('dEQP-', '', 1).lower()
-    if test.hasty and test.filter not in hasty_exclude_list:
-        name = '%s.hasty' % name
-    if test.shards > 1:
-        name = '%s.%d' % (name, shard)
-    return name
-
-
-def get_testname(test, shard=0):
-    return 'graphics_dEQP.%s' % get_name(test, shard)
-
-
-def write_controlfile(filename, content):
-    print 'Writing %s.' % filename
-    with open(filename, 'w+') as f:
-        f.write(content)
-
-
-def write_controlfiles(test):
-    attributes = get_attributes(test)
-    time = get_time(test)
-
-    for shard in xrange(0, test.shards):
-        testname = get_testname(test, shard)
-        filename = get_controlfilename(test, shard)
-        content = CONTROLFILE_TEMPLATE.render(
-            testname=testname,
-            attributes=attributes,
-            time=time,
-            filter=test.filter,
-            subset='Pass',
-            hasty=test.hasty,
-            shard=shard,
-            shards=test.shards,
-            test_file=test.test_file,
-            tag=test.tag,
-            perf_failure_description=test.perf_failure_description
-        )
-        write_controlfile(filename, content)
-
-
-def main():
-    for test in tests:
-        write_controlfiles(test)
-
-if __name__ == "__main__":
-    main()
diff --git a/client/site_tests/graphics_dEQP/graphics_dEQP.py b/client/site_tests/graphics_dEQP/graphics_dEQP.py
deleted file mode 100644
index d162df9..0000000
--- a/client/site_tests/graphics_dEQP/graphics_dEQP.py
+++ /dev/null
@@ -1,645 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import bz2
-import glob
-import json
-import logging
-import os
-import re
-import shutil
-import tempfile
-import time
-import xml.etree.ElementTree as et
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import cros_logging, service_stopper
-from autotest_lib.client.cros.graphics import graphics_utils
-
-RERUN_RATIO = 0.02  # Ratio to rerun failing test for hasty mode
-
-
-class graphics_dEQP(graphics_utils.GraphicsTest):
-    """Run the drawElements Quality Program test suite."""
-    version = 1
-    _services = None
-    _hasty = False
-    _hasty_batch_size = 100  # Batch size in hasty mode.
-    _shard_number = 0
-    _shard_count = 1
-    _board = None
-    _cpu_type = None
-    _gpu_type = None
-    _surface = None
-    _filter = None
-    _width = 256  # Use smallest width for which all tests run/pass.
-    _height = 256  # Use smallest height for which all tests run/pass.
-    _timeout = 70  # Larger than twice the dEQP watchdog timeout at 30s.
-    _test_names = None
-    _test_names_file = None
-    _log_path = None  # Location for detailed test output logs (in /tmp/).
-    _debug = False  # Analyze kernel messages.
-    _log_reader = None  # Reader to analyze (kernel) messages log.
-    _log_filter = re.compile('.* .* kernel:')  # kernel messages filter.
-    _env = None  # environment for test processes
-    DEQP_MODULES = {
-        'dEQP-EGL': 'egl',
-        'dEQP-GLES2': 'gles2',
-        'dEQP-GLES3': 'gles3',
-        'dEQP-GLES31': 'gles31',
-        'dEQP-VK': 'vk',
-    }
-    # We do not consider these results as failures.
-    TEST_RESULT_FILTER = [
-        'pass', 'notsupported', 'internalerror', 'qualitywarning',
-        'compatibilitywarning', 'skipped'
-    ]
-
-    def initialize(self):
-        super(graphics_dEQP, self).initialize()
-        self._api_helper = graphics_utils.GraphicsApiHelper()
-        self._board = utils.get_board()
-        self._cpu_type = utils.get_cpu_soc_family()
-        self._gpu_type = utils.get_gpu_family()
-
-        # deqp may depend on libraries that are present only on test images.
-        # Those libraries are installed in /usr/local.
-        self._env = os.environ.copy()
-        old_ld_path = self._env.get('LD_LIBRARY_PATH', '')
-        if old_ld_path:
-            self._env[
-                'LD_LIBRARY_PATH'] = '/usr/local/lib:/usr/local/lib64:' + old_ld_path
-        else:
-            self._env['LD_LIBRARY_PATH'] = '/usr/local/lib:/usr/local/lib64'
-
-        self._services = service_stopper.ServiceStopper(['ui', 'powerd'])
-        # Valid choices are fbo and pbuffer. The latter avoids dEQP assumptions.
-        self._surface = 'pbuffer'
-        self._services.stop_services()
-
-    def cleanup(self):
-        if self._services:
-            self._services.restore_services()
-        super(graphics_dEQP, self).cleanup()
-
-    def _archive_test_results(self, result_filename):
-        """Reduce space usage.
-
-        The default /tmp result file location is memory backed and capped at 1/2
-        of main memory. We have experienced out of storage situations. Avoid
-        this for instance by using compression.
-        """
-        os.system('gzip %s' % result_filename)
-
-    def _parse_test_results(self,
-                            result_filename,
-                            test_results=None,
-                            failing_test=None):
-        """Handles result files with one or more test results.
-
-        @param result_filename: log file to parse.
-        @param test_results: Result parsed will be appended to it.
-        @param failing_test: Tests considered failed will append to it.
-
-        @return: dictionary of parsed test results.
-        """
-        xml = ''
-        xml_start = False
-        xml_complete = False
-        xml_bad = False
-        result = 'ParseTestResultFail'
-
-        if test_results is None:
-            test_results = {}
-
-        if not os.path.isfile(result_filename):
-            logging.error('Did not find file %s', result_filename)
-            return test_results
-
-        with open(result_filename) as result_file:
-            for line in result_file.readlines():
-                # If the test terminates early, the XML will be incomplete
-                # and should not be parsed.
-                if line.startswith('#terminateTestCaseResult'):
-                    result = line.strip().split()[1]
-                    xml_bad = True
-                # Will only see #endTestCaseResult if the test does not
-                # terminate early.
-                elif line.startswith('#endTestCaseResult'):
-                    xml_complete = True
-                elif xml_start:
-                    xml += line
-                elif line.startswith('#beginTestCaseResult'):
-                    # If we see another begin before an end then something is
-                    # wrong.
-                    if xml_start:
-                        xml_bad = True
-                    else:
-                        xml_start = True
-                        test_case = line.split(' ')[1]
-
-                if xml_complete or xml_bad:
-                    if xml_complete:
-                        myparser = et.XMLParser(encoding='ISO-8859-1')
-                        root = et.fromstring(xml, parser=myparser)
-                        test_case = root.attrib['CasePath']
-                        result = root.find('Result').get('StatusCode').strip()
-                        xml_complete = False
-                    test_results[result] = test_results.get(result, 0) + 1
-                    if (result.lower() not in self.TEST_RESULT_FILTER and
-                            failing_test != None):
-                        failing_test.append(test_case)
-                    xml_bad = False
-                    xml_start = False
-                    result = 'ParseTestResultFail'
-                    xml = ''
-
-        return test_results
-
-    def _load_not_passing_cases(self, test_filter):
-        """Load all test cases that are in non-'Pass' expectations."""
-        not_passing_cases = []
-        expectations_dir = os.path.join(self.bindir, 'expectations',
-                                        self._gpu_type)
-        subset_spec = '%s.*' % test_filter
-        subset_paths = glob.glob(os.path.join(expectations_dir, subset_spec))
-        for subset_file in subset_paths:
-            # Filter against extra hasty failures only in hasty mode.
-            if (not '.Pass.bz2' in subset_file and
-                (self._hasty or '.hasty.' not in subset_file)):
-                not_passing_cases.extend(
-                    bz2.BZ2File(subset_file).read().splitlines())
-        not_passing_cases.sort()
-        return not_passing_cases
-
-    def _translate_name_to_api(self, name):
-        """Translate test_names or test_filter to api."""
-        test_prefix = name.split('.')[0]
-        if test_prefix in self.DEQP_MODULES:
-            api = self.DEQP_MODULES[test_prefix]
-        else:
-            raise error.TestFail('Failed: Invalid test name: %s' % name)
-        return api
-
-    def _get_executable(self, api):
-        """Return the executable path of the api."""
-        return self._api_helper.get_deqp_executable(api)
-
-    def _can_run(self, api):
-        """Check if specific api is supported in this board."""
-        return api in self._api_helper.get_supported_apis()
-
-    def _bootstrap_new_test_cases(self, test_filter):
-        """Ask dEQP for all test cases and removes non-Pass'ing ones.
-
-        This function will query dEQP for test cases and remove all cases that
-        are not in 'Pass'ing expectations from the list. This can be used
-        incrementally updating failing/hangin tests over several runs.
-
-        @param test_filter: string like 'dEQP-GLES2.info', 'dEQP-GLES3.stress'.
-
-        @return: List of dEQP tests to run.
-        """
-        test_cases = []
-        api = self._translate_name_to_api(test_filter)
-        if self._can_run(api):
-            executable = self._get_executable(api)
-        else:
-            return test_cases
-
-        # Must be in the executable directory when running for it to find it's
-        # test data files!
-        os.chdir(os.path.dirname(executable))
-
-        not_passing_cases = self._load_not_passing_cases(test_filter)
-        # We did not find passing cases in expectations. Assume everything else
-        # that is there should not be run this time.
-        expectations_dir = os.path.join(self.bindir, 'expectations',
-                                        self._gpu_type)
-        subset_spec = '%s.*' % test_filter
-        subset_paths = glob.glob(os.path.join(expectations_dir, subset_spec))
-        for subset_file in subset_paths:
-            # Filter against hasty failures only in hasty mode.
-            if self._hasty or '.hasty.' not in subset_file:
-                not_passing_cases.extend(
-                    bz2.BZ2File(subset_file).read().splitlines())
-
-        # Now ask dEQP executable nicely for whole list of tests. Needs to be
-        # run in executable directory. Output file is plain text file named
-        # e.g. 'dEQP-GLES2-cases.txt'.
-        command = ('%s '
-                   '--deqp-runmode=txt-caselist '
-                   '--deqp-surface-type=%s '
-                   '--deqp-gl-config-name=rgba8888d24s8ms0 ' %
-                   (executable, self._surface))
-        logging.info('Running command %s', command)
-        utils.run(
-            command,
-            env=self._env,
-            timeout=60,
-            stderr_is_expected=False,
-            ignore_status=False,
-            stdin=None)
-
-        # Now read this caselist file.
-        caselist_name = '%s-cases.txt' % test_filter.split('.')[0]
-        caselist_file = os.path.join(os.path.dirname(executable), caselist_name)
-        if not os.path.isfile(caselist_file):
-            raise error.TestFail(
-                'Failed: No caselist file at %s!' % caselist_file)
-
-        # And remove non-Pass'ing expectations from caselist.
-        caselist = open(caselist_file).read().splitlines()
-        # Contains lines like "TEST: dEQP-GLES2.capability"
-        test_cases = []
-        match = 'TEST: %s' % test_filter
-        logging.info('Bootstrapping test cases matching "%s".', match)
-        for case in caselist:
-            if case.startswith(match):
-                case = case.split('TEST: ')[1]
-                test_cases.append(case)
-
-        test_cases = list(set(test_cases) - set(not_passing_cases))
-        if not test_cases:
-            raise error.TestFail(
-                'Failed: Unable to bootstrap %s!' % test_filter)
-
-        test_cases.sort()
-        return test_cases
-
-    def _get_test_cases_from_names_file(self):
-        if os.path.exists(self._test_names_file):
-            file_path = self._test_names_file
-            test_cases = [line.rstrip('\n') for line in open(file_path)]
-            return [test for test in test_cases if test and not test.isspace()]
-        return []
-
-    def _get_test_cases(self, test_filter, subset):
-        """Gets the test cases for 'Pass', 'Fail' etc.
-
-        expectations.
-
-        This function supports bootstrapping of new GPU families and dEQP
-        binaries. In particular if there are not 'Pass' expectations found for
-        this GPU family it will query the dEQP executable for a list of all
-        available tests. It will then remove known non-'Pass'ing tests from
-        this list to avoid getting into hangs/crashes etc.
-
-        @param test_filter: string like 'dEQP-GLES2.info', 'dEQP-GLES3.stress'.
-        @param subset: string from 'Pass', 'Fail', 'Timeout' etc.
-
-        @return: List of dEQP tests to run.
-        """
-        expectations_dir = os.path.join(self.bindir, 'expectations',
-                                        self._gpu_type)
-        subset_name = '%s.%s.bz2' % (test_filter, subset)
-        subset_path = os.path.join(expectations_dir, subset_name)
-        if not os.path.isfile(subset_path):
-            if subset == 'NotPass':
-                # TODO(ihf): Running hasty and NotPass together is an invitation
-                # for trouble (stability). Decide if it should be disallowed.
-                return self._load_not_passing_cases(test_filter)
-            if subset != 'Pass':
-                raise error.TestFail(
-                    'Failed: No subset file found for %s!' % subset_path)
-            # Ask dEQP for all cases and remove the failing ones.
-            return self._bootstrap_new_test_cases(test_filter)
-
-        test_cases = bz2.BZ2File(subset_path).read().splitlines()
-        if not test_cases:
-            raise error.TestFail(
-                'Failed: No test cases found in subset file %s!' % subset_path)
-        return test_cases
-
-    def _run_tests_individually(self, test_cases, failing_test=None):
-        """Runs tests as isolated from each other, but slowly.
-
-        This function runs each test case separately as a command.
-        This means a new context for each test etc. Failures will be more
-        isolated, but runtime quite high due to overhead.
-
-        @param test_cases: List of dEQP test case strings.
-        @param failing_test: Tests considered failed will be appended to it.
-
-        @return: dictionary of test results.
-        """
-        test_results = {}
-        width = self._width
-        height = self._height
-
-        i = 0
-        for test_case in test_cases:
-            i += 1
-            logging.info('[%d/%d] TestCase: %s', i, len(test_cases), test_case)
-            result_prefix = os.path.join(self._log_path, test_case)
-            log_file = '%s.log' % result_prefix
-            debug_file = '%s.debug' % result_prefix
-            api = self._translate_name_to_api(test_case)
-            if not self._can_run(api):
-                result = 'Skipped'
-                logging.info('Skipping on %s: %s', self._gpu_type, test_case)
-            else:
-                executable = self._get_executable(api)
-                command = ('%s '
-                           '--deqp-case=%s '
-                           '--deqp-surface-type=%s '
-                           '--deqp-gl-config-name=rgba8888d24s8ms0 '
-                           '--deqp-log-images=disable '
-                           '--deqp-watchdog=enable '
-                           '--deqp-surface-width=%d '
-                           '--deqp-surface-height=%d '
-                           '--deqp-log-filename=%s' %
-                           (executable, test_case, self._surface, width, height,
-                            log_file))
-                logging.debug('Running single: %s', command)
-
-                # Must be in the executable directory when running for it to find it's
-                # test data files!
-                os.chdir(os.path.dirname(executable))
-
-                # Must initialize because some errors don't repopulate
-                # run_result, leaving old results.
-                run_result = {}
-                start_time = time.time()
-                try:
-                    run_result = utils.run(
-                        command,
-                        env=self._env,
-                        timeout=self._timeout,
-                        stderr_is_expected=False,
-                        ignore_status=True)
-                    result_counts = self._parse_test_results(
-                        log_file, failing_test=failing_test)
-                    self._archive_test_results(log_file)
-                    if result_counts:
-                        result = result_counts.keys()[0]
-                    else:
-                        result = 'Unknown'
-                except error.CmdTimeoutError:
-                    result = 'TestTimeout'
-                except error.CmdError:
-                    result = 'CommandFailed'
-                except Exception:
-                    result = 'UnexpectedError'
-                end_time = time.time()
-
-                if self._debug:
-                    # Collect debug info and save to json file.
-                    output_msgs = {
-                        'start_time': start_time,
-                        'end_time': end_time,
-                        'stdout': [],
-                        'stderr': [],
-                        'dmesg': []
-                    }
-                    logs = self._log_reader.get_logs()
-                    self._log_reader.set_start_by_current()
-                    output_msgs['dmesg'] = [
-                        msg for msg in logs.splitlines()
-                        if self._log_filter.match(msg)
-                    ]
-                    if run_result:
-                        output_msgs['stdout'] = run_result.stdout.splitlines()
-                        output_msgs['stderr'] = run_result.stderr.splitlines()
-                    with open(debug_file, 'w') as fd:
-                        json.dump(
-                            output_msgs,
-                            fd,
-                            indent=4,
-                            separators=(',', ' : '),
-                            sort_keys=True)
-
-            logging.info('Result: %s', result)
-            test_results[result] = test_results.get(result, 0) + 1
-
-        return test_results
-
-    def _run_tests_hasty(self, test_cases, failing_test=None):
-        """Runs tests as quickly as possible.
-
-        This function runs all the test cases, but does not isolate tests and
-        may take shortcuts/not run all tests to provide maximum coverage at
-        minumum runtime.
-
-        @param test_cases: List of dEQP test case strings.
-        @param failing_test: Test considered failed will append to it.
-
-        @return: dictionary of test results.
-        """
-        # TODO(ihf): It saves half the test time to use 32*32 but a few tests
-        # fail as they need surfaces larger than 200*200.
-        width = self._width
-        height = self._height
-        results = {}
-
-        # All tests combined less than 1h in hasty.
-        batch_timeout = min(3600, self._timeout * self._hasty_batch_size)
-        num_test_cases = len(test_cases)
-
-        # We are dividing the number of tests into several shards but run them
-        # in smaller batches. We start and end at multiples of batch_size
-        # boundaries.
-        shard_start = self._hasty_batch_size * (
-            (self._shard_number *
-             (num_test_cases / self._shard_count)) / self._hasty_batch_size)
-        shard_end = self._hasty_batch_size * (
-            ((self._shard_number + 1) *
-             (num_test_cases / self._shard_count)) / self._hasty_batch_size)
-        # The last shard will be slightly larger than the others. Extend it to
-        # cover all test cases avoiding rounding problems with the integer
-        # arithmetics done to compute shard_start and shard_end.
-        if self._shard_number + 1 == self._shard_count:
-            shard_end = num_test_cases
-
-        for batch in xrange(shard_start, shard_end, self._hasty_batch_size):
-            batch_to = min(batch + self._hasty_batch_size, shard_end)
-            batch_cases = '\n'.join(test_cases[batch:batch_to])
-            # This assumes all tests in the batch are kicked off via the same
-            # executable.
-            api = self._translate_name_to_api(test_cases[batch])
-            if not self._can_run(api):
-                logging.info('Skipping tests on %s: %s', self._gpu_type,
-                             batch_cases)
-            else:
-                executable = self._get_executable(api)
-                log_file = os.path.join(
-                    self._log_path, '%s_hasty_%d.log' % (self._filter, batch))
-                command = ('%s '
-                           '--deqp-stdin-caselist '
-                           '--deqp-surface-type=%s '
-                           '--deqp-gl-config-name=rgba8888d24s8ms0 '
-                           '--deqp-log-images=disable '
-                           '--deqp-visibility=hidden '
-                           '--deqp-watchdog=enable '
-                           '--deqp-surface-width=%d '
-                           '--deqp-surface-height=%d '
-                           '--deqp-log-filename=%s' %
-                           (executable, self._surface, width, height, log_file))
-
-                logging.info('Running tests %d...%d out of %d:\n%s\n%s',
-                             batch + 1, batch_to, num_test_cases, command,
-                             batch_cases)
-
-                # Must be in the executable directory when running for it to
-                # find it's test data files!
-                os.chdir(os.path.dirname(executable))
-
-                try:
-                    utils.run(
-                        command,
-                        env=self._env,
-                        timeout=batch_timeout,
-                        stderr_is_expected=False,
-                        ignore_status=False,
-                        stdin=batch_cases)
-                except Exception:
-                    pass
-                # We are trying to handle all errors by parsing the log file.
-                results = self._parse_test_results(log_file, results,
-                                                   failing_test)
-                self._archive_test_results(log_file)
-                logging.info(results)
-        return results
-
-    def _run_once(self, test_cases):
-        """Run dEQP test_cases in individual/hasty mode.
-
-        @param test_cases: test cases to run.
-        """
-        failing_test = []
-        if self._hasty:
-            logging.info('Running in hasty mode.')
-            test_results = self._run_tests_hasty(test_cases, failing_test)
-        else:
-            logging.info('Running each test individually.')
-            test_results = self._run_tests_individually(test_cases,
-                                                        failing_test)
-        return test_results, failing_test
-
-    def run_once(self, opts=None):
-        options = dict(
-            filter='',
-            test_names='',  # e.g., dEQP-GLES3.info.version,
-            # dEQP-GLES2.functional,
-            # dEQP-GLES3.accuracy.texture, etc.
-            test_names_file='',
-            timeout=self._timeout,
-            subset_to_run='Pass',  # Pass, Fail, Timeout, NotPass...
-            hasty='False',
-            shard_number='0',
-            shard_count='1',
-            debug='False',
-            perf_failure_description=None)
-        if opts is None:
-            opts = []
-        options.update(utils.args_to_dict(opts))
-        logging.info('Test Options: %s', options)
-
-        self._hasty = (options['hasty'] == 'True')
-        self._timeout = int(options['timeout'])
-        self._test_names_file = options['test_names_file']
-        self._test_names = options['test_names']
-        self._shard_number = int(options['shard_number'])
-        self._shard_count = int(options['shard_count'])
-        self._debug = (options['debug'] == 'True')
-        if not (self._test_names_file or self._test_names):
-            self._filter = options['filter']
-            if not self._filter:
-                raise error.TestFail('Failed: No dEQP test filter specified')
-        if options['perf_failure_description']:
-            self._test_failure_description = options['perf_failure_description']
-        else:
-            # Do not report failure if failure description is not specified.
-            self._test_failure_report_enable = False
-
-        # Some information to help post-process logs into blacklists later.
-        logging.info('ChromeOS BOARD = %s', self._board)
-        logging.info('ChromeOS CPU family = %s', self._cpu_type)
-        logging.info('ChromeOS GPU family = %s', self._gpu_type)
-
-        # Create a place to put detailed test output logs.
-        filter_name = self._filter or os.path.basename(self._test_names_file)
-        logging.info('dEQP test filter = %s', filter_name)
-        self._log_path = os.path.join(tempfile.gettempdir(),
-                                      '%s-logs' % filter_name)
-        shutil.rmtree(self._log_path, ignore_errors=True)
-        os.mkdir(self._log_path)
-
-        # Load either tests specified by test_names_file, test_names or filter.
-        test_cases = []
-        if self._test_names_file:
-            test_cases = self._get_test_cases_from_names_file()
-        elif self._test_names:
-            test_cases = []
-            for name in self._test_names.split(','):
-                test_cases.extend(self._get_test_cases(name, 'Pass'))
-        elif self._filter:
-            test_cases = self._get_test_cases(self._filter,
-                                              options['subset_to_run'])
-
-        if self._debug:
-            # LogReader works on /var/log/messages by default.
-            self._log_reader = cros_logging.LogReader()
-            self._log_reader.set_start_by_current()
-
-        # Assume all tests failed at the beginning.
-        for test_case in test_cases:
-            self.add_failures(test_case)
-
-        test_results, failing_test = self._run_once(test_cases)
-        # Rerun the test if we are in hasty mode.
-        if self._hasty and len(failing_test) > 0:
-            if len(failing_test) < sum(test_results.values()) * RERUN_RATIO:
-                logging.info('Because we are in hasty mode, we will rerun the '
-                             'failing tests one at a time')
-                rerun_results, failing_test = self._run_once(failing_test)
-                # Update failing test result from the test_results
-                for result in test_results:
-                    if result.lower() not in self.TEST_RESULT_FILTER:
-                        test_results[result] = 0
-                for result in rerun_results:
-                    test_results[result] = (
-                        test_results.get(result, 0) + rerun_results[result])
-            else:
-                logging.info('There are too many failing tests. It would '
-                             'take too long to rerun them. Giving up.')
-
-        # Update failing tests to the chrome perf dashboard records.
-        for test_case in test_cases:
-            if test_case not in failing_test:
-                self.remove_failures(test_case)
-
-        logging.info('Test results:')
-        logging.info(test_results)
-        logging.debug('Test Failed: %s', failing_test)
-        self.write_perf_keyval(test_results)
-
-        test_count = 0
-        test_failures = 0
-        test_passes = 0
-        test_skipped = 0
-        for result in test_results:
-            test_count += test_results[result]
-            if result.lower() in ['pass']:
-                test_passes += test_results[result]
-            if result.lower() not in self.TEST_RESULT_FILTER:
-                test_failures += test_results[result]
-            if result.lower() in ['skipped']:
-                test_skipped += test_results[result]
-        # The text "Completed all tests." is used by the process_log.py script
-        # and should always appear at the end of a completed test run.
-        logging.info(
-            'Completed all tests. Saw %d tests, %d passes and %d failures.',
-            test_count, test_passes, test_failures)
-
-        if self._filter and test_count == 0 and options[
-                'subset_to_run'] != 'NotPass':
-            logging.warning('No test cases found for filter: %s!', self._filter)
-
-        if test_failures:
-            raise error.TestFail('Failed: on %s %d/%d tests failed.' %
-                                 (self._gpu_type, test_failures, test_count))
-        if test_skipped > 0:
-            logging.info('On %s %d tests skipped, %d passes', self._gpu_type,
-                         test_skipped, test_passes)
diff --git a/client/site_tests/graphics_dEQP/master/bvt.txt b/client/site_tests/graphics_dEQP/master/bvt.txt
deleted file mode 100644
index a293672..0000000
--- a/client/site_tests/graphics_dEQP/master/bvt.txt
+++ /dev/null
@@ -1,35 +0,0 @@
-dEQP-GLES2.info.vendor
-dEQP-GLES2.info.renderer
-dEQP-GLES2.info.version
-dEQP-GLES2.info.shading_language_version
-dEQP-GLES2.info.extensions
-dEQP-GLES2.info.render_target
-dEQP-GLES2.functional.prerequisite.state_reset
-dEQP-GLES2.functional.prerequisite.clear_color
-dEQP-GLES2.functional.prerequisite.read_pixels
-dEQP-GLES3.info.vendor
-dEQP-GLES3.info.renderer
-dEQP-GLES3.info.version
-dEQP-GLES3.info.shading_language_version
-dEQP-GLES3.info.extensions
-dEQP-GLES3.info.render_target
-dEQP-GLES3.functional.prerequisite.state_reset
-dEQP-GLES3.functional.prerequisite.clear_color
-dEQP-GLES3.functional.prerequisite.read_pixels
-dEQP-GLES31.info.vendor
-dEQP-GLES31.info.renderer
-dEQP-GLES31.info.version
-dEQP-GLES31.info.shading_language_version
-dEQP-GLES31.info.extensions
-dEQP-GLES31.info.render_target
-dEQP-VK.info.build
-dEQP-VK.info.device
-dEQP-VK.info.platform
-dEQP-VK.info.memory_limits
-dEQP-VK.api.smoke.create_sampler
-dEQP-VK.api.smoke.create_shader
-dEQP-VK.api.smoke.triangle
-dEQP-VK.api.smoke.triangle_ext_structs
-dEQP-VK.api.smoke.asm_triangle
-dEQP-VK.api.smoke.asm_triangle_no_opname
-dEQP-VK.api.smoke.unused_resolve_attachment
diff --git a/client/site_tests/graphics_dEQP/scripts/failure_matrix.py b/client/site_tests/graphics_dEQP/scripts/failure_matrix.py
deleted file mode 100755
index 44411174..0000000
--- a/client/site_tests/graphics_dEQP/scripts/failure_matrix.py
+++ /dev/null
@@ -1,153 +0,0 @@
-#!/usr/bin/python2
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# pylint: disable-msg=W0311
-
-import argparse
-import json
-import os
-
-
-gpu_list = [
-    #'pinetrail',
-    'sandybridge',
-    'ivybridge',
-    'baytrail',
-    'haswell',
-    'broadwell',
-    'braswell',
-    'skylake',
-    'broxton',
-    'mali-t604',
-    'mali-t628',
-    'mali-t760',
-    'mali-t860',
-    'rogue',
-    'tegra',
-]
-
-_PROBLEM_STATUS = ['Fail', 'Flaky']
-_UNKNOWN_STATUS = ['NotSupported', 'Skipped', 'Unknown', None]
-
-status_dict = {
-    'Fail': 'FAIL ',
-    'Flaky': 'flaky',
-    'Pass': '  +  ',
-    'NotSupported': ' --- ',
-    'Skipped': ' === ',
-    'QualityWarning': 'qw   ',
-    'CompatibilityWarning': 'cw   ',
-    'Unknown': ' ??? ',
-    None: ' n/a ',
-}
-
-def load_expectation_dict(json_file):
-  data = {}
-  if os.path.isfile(json_file):
-    with open(json_file, 'r') as f:
-      text = f.read()
-      data = json.loads(text)
-  return data
-
-
-def load_expectations(json_file):
-  data = load_expectation_dict(json_file)
-  expectations = {}
-  # Convert from dictionary of lists to dictionary of sets.
-  for key in data:
-    expectations[key] = set(data[key])
-  return expectations
-
-
-def get_problem_count(dict, gpu):
-  if gpu in dict:
-    if not dict[gpu]:
-      return None
-    count = 0
-    for status in dict[gpu]:
-      if status not in _UNKNOWN_STATUS:
-        count = count + len((dict[gpu])[status])
-    # If every result has an unknown status then don't return a count.
-    if count < 1:
-      return None
-    count = 0
-    # Return counts of truly problematic statuses.
-    for status in _PROBLEM_STATUS:
-      if status in dict[gpu]:
-        count = count + len((dict[gpu])[status])
-  else:
-    print 'Warning: %s not found in dict!' % gpu
-  return count
-
-
-def get_problem_tests(dict):
-  tests = set([])
-  for gpu in dict:
-    for status in _PROBLEM_STATUS:
-      if status in dict[gpu]:
-        tests = tests.union((dict[gpu])[status])
-  return sorted(list(tests))
-
-
-def get_test_result(dict, test):
-  for key in dict:
-    if test in dict[key]:
-      return key
-  return None
-
-
-argparser = argparse.ArgumentParser(
-    description='Create a matrix of failing tests per GPU.')
-argparser.add_argument('interface',
-                       default='gles2',
-                       help='Interface for matrix (gles2, gles3, gles31).')
-args = argparser.parse_args()
-status = '%s-master.txt.json' % args.interface
-
-dict = {}
-for gpu in gpu_list:
-  filename = 'expectations/%s/%s' % (gpu, status)
-  dict[gpu] = load_expectations(filename)
-
-tests = get_problem_tests(dict)
-
-print 'Legend:'
-for key in status_dict:
-  print '%s  -->  %s' % (status_dict[key], key)
-print
-
-offset = ''
-for gpu in gpu_list:
-  print '%s%s' % (offset, gpu)
-  offset = '%s    |    ' % offset
-print offset
-
-text_count = ''
-text_del = ''
-for gpu in gpu_list:
-  problem_count = get_problem_count(dict, gpu)
-  if problem_count is None:
-    text_count = '%s  %s  ' % (text_count, status_dict[problem_count])
-  else:
-    text_count = '%s%5d    ' % (text_count, problem_count)
-  text_del = '%s=========' % text_del
-text_count = '%s  Total failure count (Fail + Flaky)' % text_count
-print text_del
-print text_count
-print text_del
-
-for test in tests:
-  text = ''
-  for gpu in gpu_list:
-    result = get_test_result(dict[gpu], test)
-    status = status_dict[result]
-    text = '%s  %s  ' % (text, status)
-  text = '%s  %s' % (text, test)
-  print text
-
-print text_del
-print '%s repeated' % text_count
-print text_del
-
diff --git a/client/site_tests/graphics_dEQP/scripts/process_logs.py b/client/site_tests/graphics_dEQP/scripts/process_logs.py
deleted file mode 100644
index 8284082..0000000
--- a/client/site_tests/graphics_dEQP/scripts/process_logs.py
+++ /dev/null
@@ -1,333 +0,0 @@
-#!/usr/bin/python2
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# pylint: disable-msg=W0311
-
-from collections import namedtuple
-import argparse
-import glob
-import json
-import os
-import pprint
-import re
-import subprocess
-
-_EXPECTATIONS_DIR = 'expectations'
-_AUTOTEST_RESULT_ID_TEMPLATE = 'gs://chromeos-autotest-results/%s-chromeos-test/chromeos*/graphics_dEQP/debug/graphics_dEQP.DEBUG'
-#_AUTOTEST_RESULT_TAG_TEMPLATE = 'gs://chromeos-autotest-results/%s/graphics_dEQP/debug/graphics_dEQP.DEBUG'
-_AUTOTEST_RESULT_TAG_TEMPLATE = 'gs://chromeos-autotest-results/%s/debug/client.0.DEBUG'
-# Use this template for tryjob results:
-#_AUTOTEST_RESULT_TEMPLATE = 'gs://chromeos-autotest-results/%s-ihf/*/graphics_dEQP/debug/graphics_dEQP.DEBUG'
-_BOARD_REGEX = re.compile(r'ChromeOS BOARD = (.+)')
-_CPU_FAMILY_REGEX = re.compile(r'ChromeOS CPU family = (.+)')
-_GPU_FAMILY_REGEX = re.compile(r'ChromeOS GPU family = (.+)')
-_TEST_FILTER_REGEX = re.compile(r'dEQP test filter = (.+)')
-_HASTY_MODE_REGEX = re.compile(r'\'hasty\': \'True\'|Running in hasty mode.')
-
-#04/23 07:30:21.624 INFO |graphics_d:0240| TestCase: dEQP-GLES3.functional.shaders.operator.unary_operator.bitwise_not.highp_ivec3_vertex
-#04/23 07:30:21.840 INFO |graphics_d:0261| Result: Pass
-_TEST_RESULT_REGEX = re.compile(r'TestCase: (.+?)$\n.+? Result: (.+?)$',
-                                re.MULTILINE)
-_HASTY_TEST_RESULT_REGEX = re.compile(
-    r'\[stdout\] Test case \'(.+?)\'..$\n'
-    r'.+?\[stdout\]   (Pass|NotSupported|QualityWarning|CompatibilityWarning|'
-    r'Fail|ResourceError|Crash|Timeout|InternalError|Skipped) \((.+)\)', re.MULTILINE)
-Logfile = namedtuple('Logfile', 'job_id name gs_path')
-
-
-def execute(cmd_list):
-  sproc = subprocess.Popen(cmd_list, stdout=subprocess.PIPE)
-  return sproc.communicate()[0]
-
-
-def get_metadata(s):
-  cpu = re.search(_CPU_FAMILY_REGEX, s).group(1)
-  gpu = re.search(_GPU_FAMILY_REGEX, s).group(1)
-  board = re.search(_BOARD_REGEX, s).group(1)
-  filter = re.search(_TEST_FILTER_REGEX, s).group(1)
-  hasty = False
-  if re.search(_HASTY_MODE_REGEX, s):
-    hasty = True
-  print('Found results from %s for GPU = %s, filter = %s and hasty = %r.' %
-        (board, gpu, filter, hasty))
-  return board, gpu, filter, hasty
-
-
-def copy_logs_from_gs_path(autotest_result_path):
-  logs = []
-  gs_paths = execute(['gsutil', 'ls', autotest_result_path]).splitlines()
-  for gs_path in gs_paths:
-    job_id = gs_path.split('/')[3].split('-')[0]
-    # DEBUG logs have more information than INFO logs, especially for hasty.
-    name = os.path.join('logs', job_id + '_graphics_dEQP.DEBUG')
-    logs.append(Logfile(job_id, name, gs_path))
-  for log in logs:
-    execute(['gsutil', 'cp', log.gs_path, log.name])
-  return logs
-
-
-def get_local_logs():
-  logs = []
-  for name in glob.glob(os.path.join('logs', '*_graphics_dEQP.INFO')):
-    job_id = name.split('_')[0]
-    logs.append(Logfile(job_id, name, name))
-  for name in glob.glob(os.path.join('logs', '*_graphics_dEQP.DEBUG')):
-    job_id = name.split('_')[0]
-    logs.append(Logfile(job_id, name, name))
-  return logs
-
-
-def get_all_tests(text):
-  tests = []
-  for test, result in re.findall(_TEST_RESULT_REGEX, text):
-    tests.append((test, result))
-  for test, result, details in re.findall(_HASTY_TEST_RESULT_REGEX, text):
-    tests.append((test, result))
-  return tests
-
-
-def get_not_passing_tests(text):
-  not_passing = []
-  for test, result in re.findall(_TEST_RESULT_REGEX, text):
-    if not (result == 'Pass' or result == 'NotSupported' or result == 'Skipped' or
-            result == 'QualityWarning' or result == 'CompatibilityWarning'):
-      not_passing.append((test, result))
-  for test, result, details in re.findall(_HASTY_TEST_RESULT_REGEX, text):
-    if result != 'Pass':
-      not_passing.append((test, result))
-  return not_passing
-
-
-def load_expectation_dict(json_file):
-  data = {}
-  if os.path.isfile(json_file):
-    print 'Loading file ' + json_file
-    with open(json_file, 'r') as f:
-      text = f.read()
-      data = json.loads(text)
-  return data
-
-
-def load_expectations(json_file):
-  data = load_expectation_dict(json_file)
-  expectations = {}
-  # Convert from dictionary of lists to dictionary of sets.
-  for key in data:
-    expectations[key] = set(data[key])
-  return expectations
-
-
-def expectation_list_to_dict(tests):
-  data = {}
-  tests = list(set(tests))
-  for test, result in tests:
-    if data.has_key(result):
-      new_list = list(set(data[result].append(test)))
-      data.pop(result)
-      data[result] = new_list
-    else:
-      data[result] = [test]
-  return data
-
-
-def save_expectation_dict(expectation_path, expectation_dict):
-  # Clean up obsolete expectations.
-  for file_name in glob.glob(expectation_path + '.*'):
-    if not '.hasty.' in file_name or '.hasty' in expectation_path:
-      os.remove(file_name)
-  # Dump json for next iteration.
-  with open(expectation_path + '.json', 'w') as f:
-    json.dump(expectation_dict,
-              f,
-              sort_keys=True,
-              indent=4,
-              separators=(',', ': '))
-  # Dump plain text for autotest.
-  for key in expectation_dict:
-    if expectation_dict[key]:
-      with open(expectation_path + '.' + key, 'w') as f:
-        for test in expectation_dict[key]:
-          f.write(test)
-          f.write('\n')
-
-
-# Figure out duplicates and move them to Flaky result set/list.
-def process_flaky(status_dict):
-  """Figure out duplicates and move them to Flaky result set/list."""
-  clean_dict = {}
-  flaky = set([])
-  if status_dict.has_key('Flaky'):
-    flaky = status_dict['Flaky']
-
-  # FLaky tests are tests with 2 distinct results.
-  for key1 in status_dict.keys():
-    for key2 in status_dict.keys():
-      if key1 != key2:
-        flaky |= status_dict[key1] & status_dict[key2]
-
-  # Remove Flaky tests from other status and convert to dict of list.
-  for key in status_dict.keys():
-    if key != 'Flaky':
-      not_flaky = list(status_dict[key] - flaky)
-      not_flaky.sort()
-      print 'Number of "%s" is %d.' % (key, len(not_flaky))
-      clean_dict[key] = not_flaky
-
-  # And finally process flaky list/set.
-  flaky_list = list(flaky)
-  flaky_list.sort()
-  clean_dict['Flaky'] = flaky_list
-
-  return clean_dict
-
-
-def merge_expectation_list(expectation_path, tests):
-  status_dict = {}
-  expectation_json = expectation_path + '.json'
-  if os.access(expectation_json, os.R_OK):
-    status_dict = load_expectations(expectation_json)
-  else:
-    print 'Could not load', expectation_json
-  for test, result in tests:
-    if status_dict.has_key(result):
-      new_set = status_dict[result]
-      new_set.add(test)
-      status_dict.pop(result)
-      status_dict[result] = new_set
-    else:
-      status_dict[result] = set([test])
-  clean_dict = process_flaky(status_dict)
-  save_expectation_dict(expectation_path, clean_dict)
-
-
-def load_log(name):
-  """Load test log and clean it from stderr spew."""
-  with open(name) as f:
-    lines = f.read().splitlines()
-  text = ''
-  for line in lines:
-    if ('dEQP test filter =' in line or 'ChromeOS BOARD = ' in line or
-        'ChromeOS CPU family =' in line or 'ChromeOS GPU family =' in line or
-        'TestCase: ' in line or 'Result: ' in line or
-        'Test Options: ' in line or 'Running in hasty mode.' in line or
-        # For hasty logs we have:
-        'Pass (' in line or 'NotSupported (' in line or 'Skipped (' in line or
-        'QualityWarning (' in line or 'CompatibilityWarning (' in line or
-        'Fail (' in line or 'ResourceError (' in line or 'Crash (' in line or
-        'Timeout (' in line or 'InternalError (' in line or
-        ' Test case \'' in line):
-      text += line + '\n'
-  # TODO(ihf): Warn about or reject log files missing the end marker.
-  return text
-
-
-def all_passing(tests):
-  for _, result in tests:
-    if not (result == 'Pass'):
-      return False
-  return True
-
-
-def process_logs(logs):
-  for log in logs:
-    text = load_log(log.name)
-    if text:
-      print '================================================================'
-      print 'Loading %s...' % log.name
-      try:
-        _, gpu, filter, hasty = get_metadata(text)
-        tests = get_all_tests(text)
-        print 'Found %d test results.' % len(tests)
-        if all_passing(tests):
-          # Delete logs that don't contain failures.
-          os.remove(log.name)
-        else:
-          # GPU family goes first in path to simplify adding/deleting families.
-          output_path = os.path.join(_EXPECTATIONS_DIR, gpu)
-          if not os.access(output_path, os.R_OK):
-            os.makedirs(output_path)
-          expectation_path = os.path.join(output_path, filter)
-          if hasty:
-            expectation_path = os.path.join(output_path, filter + '.hasty')
-          merge_expectation_list(expectation_path, tests)
-      except:
-        print 'Error processing %s' % log.name
-
-
-JOB_TAGS_ALL = (
-'select distinct job_tag from chromeos_autotest_db.tko_test_view_2 '
-'where not job_tag like "%%hostless" and '
-'test_name LIKE "graphics_dEQP%%" and '
-'build_version>="%s" and '
-'build_version<="%s" and '
-'((status = "FAIL" and not job_name like "%%.NotPass") or '
-'job_name like "%%.functional" or '
-'job_name like "%%-master")' )
-
-JOB_TAGS_MASTER = (
-'select distinct job_tag from chromeos_autotest_db.tko_test_view_2 '
-'where not job_tag like "%%hostless" and '
-'test_name LIKE "graphics_dEQP%%" and '
-'build_version>="%s" and '
-'build_version<="%s" and '
-'job_name like "%%-master"' )
-
-def get_result_paths_from_autotest_db(host, user, password, build_from,
-                                      build_to):
-  paths = []
-  # TODO(ihf): Introduce flag to toggle between JOB_TAGS_ALL and _MASTER.
-  sql = JOB_TAGS_MASTER % (build_from, build_to)
-  cmd = ['mysql', '-u%s' % user, '-p%s' % password, '--host', host, '-e', sql]
-  p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
-  for line in p.communicate()[0].splitlines():
-    # Skip over unrelated sql spew (really first line only):
-    if line and 'chromeos-test' in line:
-      paths.append(_AUTOTEST_RESULT_TAG_TEMPLATE % line.rstrip())
-  print 'Found %d potential results in the database.' % len(paths)
-  return paths
-
-
-def copy_logs_from_gs_paths(paths):
-  i = 1
-  for gs_path in paths:
-    print '[%d/%d] %s' % (i, len(paths), gs_path)
-    copy_logs_from_gs_path(gs_path)
-    i = i+1
-
-
-argparser = argparse.ArgumentParser(
-    description='Download from GS and process dEQP logs into expectations.')
-argparser.add_argument(
-    '--host',
-    dest='host',
-    default='173.194.81.83',
-    help='Host containing autotest result DB.')
-argparser.add_argument('--user', dest='user', help='Database user account.')
-argparser.add_argument(
-    '--password',
-    dest='password',
-    help='Password for user account.')
-argparser.add_argument(
-    '--from',
-    dest='build_from',
-    help='Lowest build revision to include. Example: R51-8100.0.0')
-argparser.add_argument(
-    '--to',
-    dest='build_to',
-    help='Highest build revision to include. Example: R51-8101.0.0')
-
-args = argparser.parse_args()
-
-print pprint.pformat(args)
-# This is somewhat optional. Remove existing expectations to start clean, but
-# feel free to process them incrementally.
-execute(['rm', '-rf', _EXPECTATIONS_DIR])
-
-copy_logs_from_gs_paths(get_result_paths_from_autotest_db(
-    args.host, args.user, args.password, args.build_from, args.build_to))
-
-# This will include the just downloaded logs from GS as well.
-logs = get_local_logs()
-process_logs(logs)
diff --git a/client/site_tests/graphics_parallel_dEQP/README.md b/client/site_tests/graphics_parallel_dEQP/README.md
new file mode 100644
index 0000000..96e6746
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/README.md
@@ -0,0 +1,2 @@
+Please maintain expectations for this test in
+~/chromiumos/src/platform/graphics/expectations/deqp/
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/alderlake-fails.txt b/client/site_tests/graphics_parallel_dEQP/boards/alderlake-fails.txt
new file mode 100644
index 0000000..8ad940d
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/alderlake-fails.txt
@@ -0,0 +1,42 @@
+dEQP-VK.api.tooling_info.validate_getter,Crash
+dEQP-VK.api.tooling_info.validate_tools_properties,Crash
+dEQP-VK.drm_format_modifiers.export_import.a4r4g4b4_unorm_pack16,Fail
+dEQP-VK.drm_format_modifiers.export_import.b4g4r4a4_unorm_pack16,Fail
+dEQP-VK.drm_format_modifiers.export_import.r16_sfloat,Fail
+dEQP-VK.drm_format_modifiers.export_import.r16_unorm,Fail
+dEQP-VK.drm_format_modifiers.export_import.r4g4b4a4_unorm_pack16,Fail
+dEQP-VK.drm_format_modifiers.export_import.r8_unorm,Fail
+dEQP-VK.info.instance_extensions,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.all_packed_ss_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.all_packed_su_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.all_packed_us_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.all_packed_uu_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.all_ss_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.all_su_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.all_us_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.all_uu_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.limits_packed_ss_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.limits_packed_su_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.limits_packed_us_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.limits_packed_uu_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.limits_ss_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.limits_su_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.limits_us_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.limits_uu_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.small_packed_ss_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.small_packed_su_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.small_packed_us_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.small_packed_uu_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.small_ss_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.small_su_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.small_us_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.small_uu_v4i8_out32,Fail
+
+# This test is buggy in the CTS and should be fixed on the next CTS release.
+# https://gitlab.khronos.org/Tracker/vk-gl-cts/-/issues/3505
+dEQP-VK.subgroups.multiple_dispatches.uniform_subgroup_size,Fail
+
+dEQP-VK.subgroups.size_control.compute.require_full_subgroups_allow_varying_subgroup_size_flags_spirv16,Fail
+dEQP-VK.subgroups.size_control.compute.require_full_subgroups_allow_varying_subgroup_size_spirv16,Fail
+dEQP-VK.subgroups.size_control.compute.require_full_subgroups_flags_spirv16,Fail
+dEQP-VK.subgroups.size_control.compute.require_full_subgroups_spirv16,Fail
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/alderlake-flakes.txt b/client/site_tests/graphics_parallel_dEQP/boards/alderlake-flakes.txt
new file mode 100644
index 0000000..b3ceb58
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/alderlake-flakes.txt
@@ -0,0 +1,3 @@
+dEQP-VK.api.version_check.version
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_front_le_inc_wrap_clear_254_ref_253_depthfail
+dEQP-VK.wsi.display_control.register_display_event
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/alderlake-skips.txt b/client/site_tests/graphics_parallel_dEQP/boards/alderlake-skips.txt
new file mode 100644
index 0000000..1c2430e
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/alderlake-skips.txt
@@ -0,0 +1,7 @@
+# https://gitlab.freedesktop.org/mesa/mesa/-/issues/4641
+dEQP-VK.ssbo.phys.layout.random.16bit.scalar.13
+
+# These tests should not exist. They are from renderpass-with-dynamic-rendering.txt which should
+# have been removed upstream.
+dEQP-VK.renderpass_with_dynamic_rendering.dedicated_allocation.*
+dEQP-VK.renderpass_with_dynamic_rendering.suballocation.*
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/all-chipsets-flakes.txt b/client/site_tests/graphics_parallel_dEQP/boards/all-chipsets-flakes.txt
new file mode 100644
index 0000000..8327a07
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/all-chipsets-flakes.txt
@@ -0,0 +1,3 @@
+# This test flakes across all Mesa drivers (Intel KBL+GLK, AMD, and Qualcomm, at least).
+# https://gitlab.freedesktop.org/mesa/mesa/-/issues/4575
+dEQP-VK.wsi.display.get_display_plane_capabilities
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/all-chipsets-skips.txt b/client/site_tests/graphics_parallel_dEQP/boards/all-chipsets-skips.txt
new file mode 100644
index 0000000..d0a4d92
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/all-chipsets-skips.txt
@@ -0,0 +1,18 @@
+# We don't care to run the performance or stress tests from dEQP-GLES on
+# any boards -- they're very slow but don't generally fail.
+dEQP-GLES.*.performance.*
+dEQP-GLES.*.stress.*
+
+# This set of tests may emit warnings, but is not required for
+# conformance and I've never seen anyone pay attention to it.
+dEQP-GLES.*.accuracy.*
+
+# The deqp package ships an Android mustpass list instead of a normal
+# Linux one, which helps us on host check for some extended behavior
+# expectations from Android, but also has some expectations that host
+# dEQP should *not* be trying to enforce (maximum Vulkan version,
+# extensions exposed, layers exposed).  Skip until upstream dEQP can
+# get fixed.  Related: https://gerrit.khronos.org/c/vk-gl-cts/+/5715
+dEQP-VK.api.info.android.no_layers
+dEQP-VK.api.info.android.no_unknown_extensions
+
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/asuka-fails.txt b/client/site_tests/graphics_parallel_dEQP/boards/asuka-fails.txt
new file mode 100644
index 0000000..5fe5a31
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/asuka-fails.txt
@@ -0,0 +1,2 @@
+dEQP-GLES31.functional.image_load_store.2d_array.atomic.exchange_r32f_return_value,Fail
+dEQP-GLES31.functional.image_load_store.3d.atomic.exchange_r32f_return_value,Fail
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/asuka-flakes.txt b/client/site_tests/graphics_parallel_dEQP/boards/asuka-flakes.txt
new file mode 100644
index 0000000..3ecfe7a
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/asuka-flakes.txt
@@ -0,0 +1,5 @@
+# Flakes seen in, for example:
+# dEQP-GLES31.functional.synchronization.inter_invocation.ssbo_atomic_write_read
+# dEQP-GLES31.functional.ssbo.layout.2_level_array.packed.column_major_mat3
+# dEQP-GLES31.functional.ssbo.layout.random.arrays_of_arrays.1
+dEQP-GLES31.functional.*ssbo.*
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/broadwell-fails.txt b/client/site_tests/graphics_parallel_dEQP/boards/broadwell-fails.txt
new file mode 100644
index 0000000..efbe429
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/broadwell-fails.txt
@@ -0,0 +1,2 @@
+# https://gitlab.freedesktop.org/mesa/mesa/-/issues/4102
+dEQP-GLES31.functional.geometry_shading.layered.render_with_default_layer_cubemap,Fail
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/broadwell-flakes.txt b/client/site_tests/graphics_parallel_dEQP/boards/broadwell-flakes.txt
new file mode 100644
index 0000000..2bb1eec
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/broadwell-flakes.txt
@@ -0,0 +1,2 @@
+# Intermittently passes on lulu GT2, at least
+dEQP-GLES31.functional.geometry_shading.layered.render_with_default_layer_cubemap
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/broadwell-skips.txt b/client/site_tests/graphics_parallel_dEQP/boards/broadwell-skips.txt
new file mode 100644
index 0000000..1e2dc05
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/broadwell-skips.txt
@@ -0,0 +1,7 @@
+# Intermittent timeouts
+dEQP-GLES31.functional.ssbo.layout.random.all_shared_buffer.48
+
+# These tests should not exist. They are from renderpass-with-dynamic-rendering.txt which should
+# have been removed upstream.
+dEQP-VK.renderpass_with_dynamic_rendering.dedicated_allocation.*
+dEQP-VK.renderpass_with_dynamic_rendering.suballocation.*
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/broxton-fails.txt b/client/site_tests/graphics_parallel_dEQP/boards/broxton-fails.txt
new file mode 100644
index 0000000..20f5676
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/broxton-fails.txt
@@ -0,0 +1,40 @@
+# https://gitlab.freedesktop.org/mesa/mesa/-/issues/2833
+dEQP-VK.rasterization.interpolation_multisample_16_bit.lines_wide,Fail
+dEQP-VK.rasterization.interpolation_multisample_16_bit.non_strict_lines_wide,Fail
+dEQP-VK.rasterization.interpolation_multisample_2_bit.lines_wide,Fail
+dEQP-VK.rasterization.interpolation_multisample_2_bit.non_strict_lines_wide,Fail
+dEQP-VK.rasterization.interpolation_multisample_4_bit.lines_wide,Fail
+dEQP-VK.rasterization.interpolation_multisample_4_bit.non_strict_lines_wide,Fail
+dEQP-VK.rasterization.interpolation_multisample_8_bit.lines_wide,Fail
+dEQP-VK.rasterization.interpolation_multisample_8_bit.non_strict_lines_wide,Fail
+
+dEQP-VK.api.image_clearing.core.partial_clear_color_attachment.single_layer.r16g16_sfloat_200x180,Fail
+dEQP-VK.api.image_clearing.dedicated_allocation.partial_clear_color_attachment.single_layer.a2r10g10b10_unorm_pack32,Fail
+dEQP-VK.clipping.user_defined.clip_cull_distance_dynamic_index.vert.4_4,Fail
+dEQP-VK.clipping.user_defined.clip_cull_distance.vert_geom.5_3,Fail
+dEQP-VK.clipping.user_defined.clip_distance_dynamic_index.vert_tess.6,Fail
+dEQP-VK.clipping.user_defined.clip_distance_dynamic_index.vert_tess.8,Fail
+dEQP-VK.clipping.user_defined.clip_distance.vert_geom.7_fragmentshader_read,Fail
+dEQP-VK.clipping.user_defined.clip_distance.vert_tess_geom.7,Fail
+dEQP-VK.fragment_operations.scissor.points.outside,Fail
+dEQP-VK.memory.pipeline_barrier.host_write_index_buffer.1024,Fail
+dEQP-VK.memory.pipeline_barrier.host_write_index_buffer.8192,Fail
+dEQP-VK.pipeline.dynamic_offset.graphics.multiset.storage_buffer.numcmdbuffers_2.reverseorder.numdescriptorsetbindings_1.numdynamicbindings_1.numnondynamicbindings_1,Fail
+dEQP-VK.pipeline.dynamic_offset.graphics.single_set.uniform_buffer.numcmdbuffers_2.reverseorder.numdescriptorsetbindings_1.numdynamicbindings_2.numnondynamicbindings_0,Fail
+dEQP-VK.pipeline.dynamic_offset.graphics.single_set.uniform_buffer.numcmdbuffers_2.reverseorder.numdescriptorsetbindings_1.numdynamicbindings_2.numnondynamicbindings_1,Fail
+dEQP-VK.pipeline.multisample.sampled_image.64x64_4.r32g32b32a32_sfloat.samples_8,Fail
+dEQP-VK.pipeline.multisample.sampled_image.64x64_4.r32_uint.samples_4,Fail
+dEQP-VK.pipeline.multisample.sampled_image.79x31_4.r32_uint.samples_16,Fail
+dEQP-VK.pipeline.render_to_image.core.2d_array.small.r32g32b32a32_sfloat_d16_unorm,Fail
+dEQP-VK.pipeline.render_to_image.core.2d.huge.height.r8g8b8a8_unorm,Fail
+dEQP-VK.pipeline.render_to_image.core.2d.huge.height.r8g8b8a8_unorm_s8_uint,Fail
+dEQP-VK.pipeline.render_to_image.core.cube.huge.width_height.r8g8b8a8_unorm,Fail
+dEQP-VK.pipeline.render_to_image.core.cube.small.r32_uint_s8_uint,Fail
+dEQP-VK.pipeline.render_to_image.dedicated_allocation.2d_array.small.a2b10g10r10_uint_pack32_d16_unorm,Fail
+dEQP-VK.pipeline.render_to_image.dedicated_allocation.2d_array.small.r32g32b32a32_sfloat_s8_uint,Fail
+dEQP-VK.pipeline.render_to_image.dedicated_allocation.cube.small.r32g32b32a32_sfloat_d16_unorm,Fail
+dEQP-VK.query_pool.occlusion_query.copy_results_conservative_size_32_wait_query_with_availability_draw_points,Fail
+dEQP-VK.renderpass.dedicated_allocation.formats.r8g8b8a8_unorm.clear.clear_draw,Fail
+dEQP-VK.tessellation.primitive_discard.quads_fractional_even_spacing_ccw,Fail
+dEQP-VK.tessellation.shader_input_output.patch_vertices_5_in_10_out,Fail
+dEQP-VK.tessellation.user_defined_io.per_patch_array.vertex_io_array_size_implicit.quads,Fail
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/broxton-flakes.txt b/client/site_tests/graphics_parallel_dEQP/boards/broxton-flakes.txt
new file mode 100644
index 0000000..ef71503
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/broxton-flakes.txt
@@ -0,0 +1,11 @@
+# There is clearly some flakiness around depth/stencil.
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face.*
+dEQP-VK.pipeline.stencil.format.*.states.*
+dEQP-VK.pipeline.depth.*.format.*
+
+# Something wrong with semaphores
+dEQP-VK.synchronization.cross_instance.*binary_semaphore.*
+
+# Some other flakes
+dEQP-VK.transform_feedback.simple.*
+dEQP-VK.pipeline.dynamic_offset.graphics.arrays.storage_buffer.*
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/broxton-skips.txt b/client/site_tests/graphics_parallel_dEQP/boards/broxton-skips.txt
new file mode 100644
index 0000000..1c2430e
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/broxton-skips.txt
@@ -0,0 +1,7 @@
+# https://gitlab.freedesktop.org/mesa/mesa/-/issues/4641
+dEQP-VK.ssbo.phys.layout.random.16bit.scalar.13
+
+# These tests should not exist. They are from renderpass-with-dynamic-rendering.txt which should
+# have been removed upstream.
+dEQP-VK.renderpass_with_dynamic_rendering.dedicated_allocation.*
+dEQP-VK.renderpass_with_dynamic_rendering.suballocation.*
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/cometlake-fails.txt b/client/site_tests/graphics_parallel_dEQP/boards/cometlake-fails.txt
new file mode 100644
index 0000000..6b7a044
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/cometlake-fails.txt
@@ -0,0 +1,371 @@
+dEQP-VK.query_pool.concurrent_queries.primary_command_buffer,Fail
+dEQP-VK.query_pool.concurrent_queries.secondary_command_buffer,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_cmdcopyquerypoolresults_line_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_cmdcopyquerypoolresults_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_cmdcopyquerypoolresults_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_cmdcopyquerypoolresults_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_cmdcopyquerypoolresults_point_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_cmdcopyquerypoolresults_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_cmdcopyquerypoolresults_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_cmdcopyquerypoolresults_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_cmdcopyquerypoolresults_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_cmdcopyquerypoolresults_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_dstoffset_cmdcopyquerypoolresults_line_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_dstoffset_cmdcopyquerypoolresults_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_dstoffset_cmdcopyquerypoolresults_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_dstoffset_cmdcopyquerypoolresults_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_dstoffset_cmdcopyquerypoolresults_point_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_dstoffset_cmdcopyquerypoolresults_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_dstoffset_cmdcopyquerypoolresults_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_dstoffset_cmdcopyquerypoolresults_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_dstoffset_cmdcopyquerypoolresults_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_dstoffset_cmdcopyquerypoolresults_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_line_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_point_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.32bits_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_cmdcopyquerypoolresults_line_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_cmdcopyquerypoolresults_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_cmdcopyquerypoolresults_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_cmdcopyquerypoolresults_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_cmdcopyquerypoolresults_point_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_cmdcopyquerypoolresults_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_cmdcopyquerypoolresults_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_cmdcopyquerypoolresults_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_cmdcopyquerypoolresults_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_cmdcopyquerypoolresults_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_dstoffset_cmdcopyquerypoolresults_line_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_dstoffset_cmdcopyquerypoolresults_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_dstoffset_cmdcopyquerypoolresults_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_dstoffset_cmdcopyquerypoolresults_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_dstoffset_cmdcopyquerypoolresults_point_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_dstoffset_cmdcopyquerypoolresults_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_dstoffset_cmdcopyquerypoolresults_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_dstoffset_cmdcopyquerypoolresults_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_dstoffset_cmdcopyquerypoolresults_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_dstoffset_cmdcopyquerypoolresults_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_line_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_point_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.primary.64bits_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_cmdcopyquerypoolresults_line_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_cmdcopyquerypoolresults_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_cmdcopyquerypoolresults_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_cmdcopyquerypoolresults_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_cmdcopyquerypoolresults_point_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_cmdcopyquerypoolresults_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_cmdcopyquerypoolresults_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_cmdcopyquerypoolresults_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_cmdcopyquerypoolresults_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_cmdcopyquerypoolresults_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_dstoffset_cmdcopyquerypoolresults_line_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_dstoffset_cmdcopyquerypoolresults_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_dstoffset_cmdcopyquerypoolresults_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_dstoffset_cmdcopyquerypoolresults_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_dstoffset_cmdcopyquerypoolresults_point_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_dstoffset_cmdcopyquerypoolresults_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_dstoffset_cmdcopyquerypoolresults_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_dstoffset_cmdcopyquerypoolresults_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_dstoffset_cmdcopyquerypoolresults_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_dstoffset_cmdcopyquerypoolresults_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_line_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_point_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.32bits_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_cmdcopyquerypoolresults_line_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_cmdcopyquerypoolresults_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_cmdcopyquerypoolresults_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_cmdcopyquerypoolresults_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_cmdcopyquerypoolresults_point_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_cmdcopyquerypoolresults_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_cmdcopyquerypoolresults_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_cmdcopyquerypoolresults_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_cmdcopyquerypoolresults_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_cmdcopyquerypoolresults_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_dstoffset_cmdcopyquerypoolresults_line_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_dstoffset_cmdcopyquerypoolresults_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_dstoffset_cmdcopyquerypoolresults_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_dstoffset_cmdcopyquerypoolresults_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_dstoffset_cmdcopyquerypoolresults_point_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_dstoffset_cmdcopyquerypoolresults_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_dstoffset_cmdcopyquerypoolresults_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_dstoffset_cmdcopyquerypoolresults_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_dstoffset_cmdcopyquerypoolresults_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_dstoffset_cmdcopyquerypoolresults_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_line_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_point_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary.64bits_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_cmdcopyquerypoolresults_line_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_cmdcopyquerypoolresults_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_cmdcopyquerypoolresults_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_cmdcopyquerypoolresults_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_cmdcopyquerypoolresults_point_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_cmdcopyquerypoolresults_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_cmdcopyquerypoolresults_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_cmdcopyquerypoolresults_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_cmdcopyquerypoolresults_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_cmdcopyquerypoolresults_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_dstoffset_cmdcopyquerypoolresults_line_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_dstoffset_cmdcopyquerypoolresults_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_dstoffset_cmdcopyquerypoolresults_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_dstoffset_cmdcopyquerypoolresults_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_dstoffset_cmdcopyquerypoolresults_point_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_dstoffset_cmdcopyquerypoolresults_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_dstoffset_cmdcopyquerypoolresults_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_dstoffset_cmdcopyquerypoolresults_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_dstoffset_cmdcopyquerypoolresults_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_dstoffset_cmdcopyquerypoolresults_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_line_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_point_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.32bits_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_cmdcopyquerypoolresults_line_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_cmdcopyquerypoolresults_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_cmdcopyquerypoolresults_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_cmdcopyquerypoolresults_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_cmdcopyquerypoolresults_point_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_cmdcopyquerypoolresults_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_cmdcopyquerypoolresults_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_cmdcopyquerypoolresults_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_cmdcopyquerypoolresults_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_cmdcopyquerypoolresults_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_dstoffset_cmdcopyquerypoolresults_line_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_dstoffset_cmdcopyquerypoolresults_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_dstoffset_cmdcopyquerypoolresults_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_dstoffset_cmdcopyquerypoolresults_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_dstoffset_cmdcopyquerypoolresults_point_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_dstoffset_cmdcopyquerypoolresults_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_dstoffset_cmdcopyquerypoolresults_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_dstoffset_cmdcopyquerypoolresults_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_dstoffset_cmdcopyquerypoolresults_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_dstoffset_cmdcopyquerypoolresults_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_line_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_point_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.fragment_shader_invocations.secondary_inherited.64bits_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_cmdcopyquerypoolresults_line_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_cmdcopyquerypoolresults_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_cmdcopyquerypoolresults_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_cmdcopyquerypoolresults_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_cmdcopyquerypoolresults_point_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_cmdcopyquerypoolresults_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_cmdcopyquerypoolresults_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_cmdcopyquerypoolresults_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_cmdcopyquerypoolresults_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_cmdcopyquerypoolresults_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_dstoffset_cmdcopyquerypoolresults_line_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_dstoffset_cmdcopyquerypoolresults_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_dstoffset_cmdcopyquerypoolresults_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_dstoffset_cmdcopyquerypoolresults_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_dstoffset_cmdcopyquerypoolresults_point_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_dstoffset_cmdcopyquerypoolresults_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_dstoffset_cmdcopyquerypoolresults_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_dstoffset_cmdcopyquerypoolresults_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_dstoffset_cmdcopyquerypoolresults_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_dstoffset_cmdcopyquerypoolresults_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_line_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_point_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.32bits_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_cmdcopyquerypoolresults_line_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_cmdcopyquerypoolresults_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_cmdcopyquerypoolresults_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_cmdcopyquerypoolresults_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_cmdcopyquerypoolresults_point_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_cmdcopyquerypoolresults_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_cmdcopyquerypoolresults_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_cmdcopyquerypoolresults_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_cmdcopyquerypoolresults_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_cmdcopyquerypoolresults_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_dstoffset_cmdcopyquerypoolresults_line_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_dstoffset_cmdcopyquerypoolresults_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_dstoffset_cmdcopyquerypoolresults_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_dstoffset_cmdcopyquerypoolresults_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_dstoffset_cmdcopyquerypoolresults_point_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_dstoffset_cmdcopyquerypoolresults_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_dstoffset_cmdcopyquerypoolresults_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_dstoffset_cmdcopyquerypoolresults_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_dstoffset_cmdcopyquerypoolresults_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_dstoffset_cmdcopyquerypoolresults_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_line_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_point_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.primary.64bits_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_cmdcopyquerypoolresults_line_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_cmdcopyquerypoolresults_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_cmdcopyquerypoolresults_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_cmdcopyquerypoolresults_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_cmdcopyquerypoolresults_point_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_cmdcopyquerypoolresults_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_cmdcopyquerypoolresults_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_cmdcopyquerypoolresults_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_cmdcopyquerypoolresults_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_cmdcopyquerypoolresults_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_dstoffset_cmdcopyquerypoolresults_line_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_dstoffset_cmdcopyquerypoolresults_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_dstoffset_cmdcopyquerypoolresults_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_dstoffset_cmdcopyquerypoolresults_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_dstoffset_cmdcopyquerypoolresults_point_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_dstoffset_cmdcopyquerypoolresults_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_dstoffset_cmdcopyquerypoolresults_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_dstoffset_cmdcopyquerypoolresults_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_dstoffset_cmdcopyquerypoolresults_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_dstoffset_cmdcopyquerypoolresults_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_line_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_point_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.32bits_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_cmdcopyquerypoolresults_line_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_cmdcopyquerypoolresults_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_cmdcopyquerypoolresults_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_cmdcopyquerypoolresults_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_cmdcopyquerypoolresults_point_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_cmdcopyquerypoolresults_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_cmdcopyquerypoolresults_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_cmdcopyquerypoolresults_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_cmdcopyquerypoolresults_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_cmdcopyquerypoolresults_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_dstoffset_cmdcopyquerypoolresults_line_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_dstoffset_cmdcopyquerypoolresults_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_dstoffset_cmdcopyquerypoolresults_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_dstoffset_cmdcopyquerypoolresults_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_dstoffset_cmdcopyquerypoolresults_point_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_dstoffset_cmdcopyquerypoolresults_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_dstoffset_cmdcopyquerypoolresults_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_dstoffset_cmdcopyquerypoolresults_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_dstoffset_cmdcopyquerypoolresults_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_dstoffset_cmdcopyquerypoolresults_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_line_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_point_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary.64bits_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_cmdcopyquerypoolresults_line_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_cmdcopyquerypoolresults_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_cmdcopyquerypoolresults_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_cmdcopyquerypoolresults_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_cmdcopyquerypoolresults_point_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_cmdcopyquerypoolresults_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_cmdcopyquerypoolresults_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_cmdcopyquerypoolresults_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_cmdcopyquerypoolresults_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_cmdcopyquerypoolresults_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_dstoffset_cmdcopyquerypoolresults_line_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_dstoffset_cmdcopyquerypoolresults_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_dstoffset_cmdcopyquerypoolresults_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_dstoffset_cmdcopyquerypoolresults_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_dstoffset_cmdcopyquerypoolresults_point_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_dstoffset_cmdcopyquerypoolresults_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_dstoffset_cmdcopyquerypoolresults_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_dstoffset_cmdcopyquerypoolresults_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_dstoffset_cmdcopyquerypoolresults_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_dstoffset_cmdcopyquerypoolresults_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_line_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_point_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.32bits_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_cmdcopyquerypoolresults_line_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_cmdcopyquerypoolresults_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_cmdcopyquerypoolresults_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_cmdcopyquerypoolresults_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_cmdcopyquerypoolresults_point_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_cmdcopyquerypoolresults_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_cmdcopyquerypoolresults_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_cmdcopyquerypoolresults_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_cmdcopyquerypoolresults_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_cmdcopyquerypoolresults_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_dstoffset_cmdcopyquerypoolresults_line_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_dstoffset_cmdcopyquerypoolresults_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_dstoffset_cmdcopyquerypoolresults_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_dstoffset_cmdcopyquerypoolresults_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_dstoffset_cmdcopyquerypoolresults_point_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_dstoffset_cmdcopyquerypoolresults_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_dstoffset_cmdcopyquerypoolresults_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_dstoffset_cmdcopyquerypoolresults_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_dstoffset_cmdcopyquerypoolresults_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_dstoffset_cmdcopyquerypoolresults_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_line_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_line_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_line_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_line_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_point_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_triangle_fan,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_triangle_list,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_triangle_list_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_triangle_strip,Fail
+dEQP-VK.query_pool.statistics_query.host_query_reset.fragment_shader_invocations.secondary_inherited.64bits_triangle_strip_with_adjacency,Fail
+dEQP-VK.query_pool.statistics_query.multiple_queries.input_assembly_vertex_fragment_cmdcopy_dstoffset,Fail
+dEQP-VK.query_pool.statistics_query.multiple_queries.input_assembly_vertex_fragment_cmdcopy,Fail
+dEQP-VK.query_pool.statistics_query.multiple_queries.input_assembly_vertex_fragment,Fail
+dEQP-VK.query_pool.statistics_query.multiple_queries.input_assembly_vertex_fragment_partial_cmdcopy_dstoffset,Fail
+dEQP-VK.query_pool.statistics_query.multiple_queries.input_assembly_vertex_fragment_partial_cmdcopy,Fail
+dEQP-VK.query_pool.statistics_query.multiple_queries.input_assembly_vertex_fragment_partial,Fail
+dEQP-VK.query_pool.statistics_query.multiple_queries.input_assembly_vertex_fragment_wait_cmdcopy_dstoffset,Fail
+dEQP-VK.query_pool.statistics_query.multiple_queries.input_assembly_vertex_fragment_wait_cmdcopy,Fail
+dEQP-VK.query_pool.statistics_query.multiple_queries.input_assembly_vertex_fragment_wait,Fail
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/cometlake-skips.txt b/client/site_tests/graphics_parallel_dEQP/boards/cometlake-skips.txt
new file mode 100644
index 0000000..4e4c17f
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/cometlake-skips.txt
@@ -0,0 +1,12 @@
+# https://gitlab.freedesktop.org/mesa/mesa/-/issues/4641
+dEQP-VK.ssbo.phys.layout.random.16bit.scalar.13
+
+dEQP-VK.binding_model.descriptorset_random.sets8.constant.ubolimitlow.sbolimithigh.imglimithigh.iublimithigh.uab.frag.0
+dEQP-VK.binding_model.descriptorset_random.sets8.unifindexed.ubolimitlow.sbolimithigh.imglimithigh.iublimitlow.uab.frag.0
+dEQP-VK.binding_model.descriptorset_random.sets8.unifindexed.ubolimitlow.sbolimithigh.imglimithigh.iublimithigh.uab.frag.0
+dEQP-VK.binding_model.descriptorset_random.sets8.unifindexed.ubolimitlow.sbolimithigh.imglimithigh.noiub.uab.frag.0
+
+# These tests should not exist. They are from renderpass-with-dynamic-rendering.txt which should
+# have been removed upstream.
+dEQP-VK.renderpass_with_dynamic_rendering.dedicated_allocation.*
+dEQP-VK.renderpass_with_dynamic_rendering.suballocation.*
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/geminilake-fails.txt b/client/site_tests/graphics_parallel_dEQP/boards/geminilake-fails.txt
new file mode 100644
index 0000000..0767da3
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/geminilake-fails.txt
@@ -0,0 +1,8 @@
+dEQP-VK.info.instance_extensions,Fail
+
+# This test is buggy in the CTS and should be fixed on the next CTS release.
+# https://gitlab.khronos.org/Tracker/vk-gl-cts/-/issues/3505
+dEQP-VK.subgroups.multiple_dispatches.uniform_subgroup_size,Fail
+
+# This should be fixed on the next Mesa uprev.
+dEQP-VK.wsi.display_control.register_device_event,Fail
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/geminilake-skips.txt b/client/site_tests/graphics_parallel_dEQP/boards/geminilake-skips.txt
new file mode 100644
index 0000000..50daaf1
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/geminilake-skips.txt
@@ -0,0 +1,20 @@
+# https://gitlab.freedesktop.org/mesa/mesa/-/issues/4641
+dEQP-VK.ssbo.phys.layout.random.16bit.scalar.13
+
+# timeouts on octopus.  buffer_device_address has workarounds in VK-GL-CTS
+# to extend the watchdog during pipeline creation, so we're probably just
+# slow in the compiler.
+dEQP-VK.binding_model.buffer_device_address.set3.depth3.basessbo.convertcheck.*
+dEQP-VK.binding_model.buffer_device_address.set3.depth3.baseubo.convertcheck.*
+
+# These tests should not exist. They are from renderpass-with-dynamic-rendering.txt which should
+# have been removed upstream.
+dEQP-VK.renderpass_with_dynamic_rendering.dedicated_allocation.*
+dEQP-VK.renderpass_with_dynamic_rendering.suballocation.*
+
+# We have random failures all over compute_shader_invocations.
+dEQP-VK.query_pool.statistics_query.compute_shader_invocations.*
+dEQP-VK.query_pool.statistics_query.host_query_reset.compute_shader_invocations.*
+
+# Test sometimes fails.
+dEQP-VK.wsi.display_control.register_display_event
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/jasperlake-fails.txt b/client/site_tests/graphics_parallel_dEQP/boards/jasperlake-fails.txt
new file mode 100644
index 0000000..927df63
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/jasperlake-fails.txt
@@ -0,0 +1,12 @@
+dEQP-VK.api.tooling_info.validate_getter,Crash
+dEQP-VK.api.tooling_info.validate_tools_properties,Crash
+dEQP-VK.info.instance_extensions,Fail
+
+# This test is buggy in the CTS and should be fixed on the next CTS release.
+# https://gitlab.khronos.org/Tracker/vk-gl-cts/-/issues/3505
+dEQP-VK.subgroups.multiple_dispatches.uniform_subgroup_size,Fail
+
+dEQP-VK.subgroups.size_control.compute.require_full_subgroups_allow_varying_subgroup_size_flags_spirv16,Fail
+dEQP-VK.subgroups.size_control.compute.require_full_subgroups_allow_varying_subgroup_size_spirv16,Fail
+dEQP-VK.subgroups.size_control.compute.require_full_subgroups_flags_spirv16,Fail
+dEQP-VK.subgroups.size_control.compute.require_full_subgroups_spirv16,Fail
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/jasperlake-flakes.txt b/client/site_tests/graphics_parallel_dEQP/boards/jasperlake-flakes.txt
new file mode 100644
index 0000000..00bed99
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/jasperlake-flakes.txt
@@ -0,0 +1,3 @@
+dEQP-VK.api.version_check.version
+dEQP-VK.ssbo.phys.layout.random.all_shared_buffer
+dEQP-VK.wsi.display_control.register_display_event
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/jasperlake-skips.txt b/client/site_tests/graphics_parallel_dEQP/boards/jasperlake-skips.txt
new file mode 100644
index 0000000..3091f86
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/jasperlake-skips.txt
@@ -0,0 +1,14 @@
+# https://gitlab.freedesktop.org/mesa/mesa/-/issues/4641
+dEQP-VK.ssbo.phys.layout.random.16bit.scalar.13
+
+# timeouts on dedede.  buffer_device_address has workarounds in VK-GL-CTS
+# to extend the watchdog during pipeline creation, so we're probably just
+# slow in the compiler.
+dEQP-VK.binding_model.buffer_device_address.set3.depth3.basessbo.convertcheck.*
+dEQP-VK.binding_model.buffer_device_address.set3.depth3.baseubo.convertcheck.*
+
+# These tests should not exist. They are from renderpass-with-dynamic-rendering.txt which should
+# have been removed upstream.
+dEQP-VK.renderpass_with_dynamic_rendering.dedicated_allocation.*
+dEQP-VK.renderpass_with_dynamic_rendering.suballocation.*
+
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/kabylake-fails.txt b/client/site_tests/graphics_parallel_dEQP/boards/kabylake-fails.txt
new file mode 100644
index 0000000..0767da3
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/kabylake-fails.txt
@@ -0,0 +1,8 @@
+dEQP-VK.info.instance_extensions,Fail
+
+# This test is buggy in the CTS and should be fixed on the next CTS release.
+# https://gitlab.khronos.org/Tracker/vk-gl-cts/-/issues/3505
+dEQP-VK.subgroups.multiple_dispatches.uniform_subgroup_size,Fail
+
+# This should be fixed on the next Mesa uprev.
+dEQP-VK.wsi.display_control.register_device_event,Fail
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/kabylake-skips.txt b/client/site_tests/graphics_parallel_dEQP/boards/kabylake-skips.txt
new file mode 100644
index 0000000..1c2430e
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/kabylake-skips.txt
@@ -0,0 +1,7 @@
+# https://gitlab.freedesktop.org/mesa/mesa/-/issues/4641
+dEQP-VK.ssbo.phys.layout.random.16bit.scalar.13
+
+# These tests should not exist. They are from renderpass-with-dynamic-rendering.txt which should
+# have been removed upstream.
+dEQP-VK.renderpass_with_dynamic_rendering.dedicated_allocation.*
+dEQP-VK.renderpass_with_dynamic_rendering.suballocation.*
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/mali-g57-fails.txt b/client/site_tests/graphics_parallel_dEQP/boards/mali-g57-fails.txt
new file mode 100644
index 0000000..66fbfc7
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/mali-g57-fails.txt
@@ -0,0 +1 @@
+dEQP-GLES31.functional.debug.negative_coverage.log.tessellation.single_tessellation_stage,Crash
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/mali-g57-skips.txt b/client/site_tests/graphics_parallel_dEQP/boards/mali-g57-skips.txt
new file mode 100644
index 0000000..6127dc2
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/mali-g57-skips.txt
@@ -0,0 +1,6 @@
+# The flush_finish tests throw warnings when behavior doesn't match
+# their expectations, except that tiling GPUs don't behave the way they
+# expect in their timing setup so the tests take exceptionally long
+# while just wasting developer time.  Not required to be warnings-free
+# for conformance.
+dEQP-GLES.*.functional.flush_finish.*
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/mali-g72-fails.txt b/client/site_tests/graphics_parallel_dEQP/boards/mali-g72-fails.txt
new file mode 100644
index 0000000..476912b
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/mali-g72-fails.txt
@@ -0,0 +1,7 @@
+dEQP-VK.api.device_init.create_device_unsupported_features,Fail
+dEQP-VK.api.version_check.version,Fail
+dEQP-VK.draw.renderpass.multiple_interpolation.separate.with_sample_decoration.4_samples,Fail
+dEQP-VK.info.instance_extensions,Fail
+dEQP-VK.pipeline.multisample_shader_builtin.write_sample_mask.1_samples,Fail
+dEQP-VK.transform_feedback.fuzz.2_level_struct_array.instance_array_vertex,Fail
+dEQP-VK.transform_feedback.fuzz.random_vertex.all_instance_array.76,Fail
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/mali-g72-skips.txt b/client/site_tests/graphics_parallel_dEQP/boards/mali-g72-skips.txt
new file mode 100644
index 0000000..144db95
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/mali-g72-skips.txt
@@ -0,0 +1,15 @@
+# Times out at 60 seconds
+dEQP-VK.ssbo.phys.layout.random.16bit.scalar.13
+dEQP-VK.api.device_init.create_instance_device_intentional_alloc_fail
+
+# These tests should not exist. They are from renderpass-with-dynamic-rendering.txt which should
+# have been removed upstream.
+dEQP-VK.renderpass_with_dynamic_rendering.dedicated_allocation.*
+dEQP-VK.renderpass_with_dynamic_rendering.suballocation.*
+
+# The flush_finish tests throw warnings when behavior doesn't match
+# their expectations, except that tiling GPUs don't behave the way they
+# expect in their timing setup so the tests take exceptionally long
+# while just wasting developer time.  Not required to be warnings-free
+# for conformance.
+dEQP-GLES.*.functional.flush_finish.*
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/mali-t760-fails.txt b/client/site_tests/graphics_parallel_dEQP/boards/mali-t760-fails.txt
new file mode 100644
index 0000000..552a309
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/mali-t760-fails.txt
@@ -0,0 +1,5 @@
+dEQP-GLES3.functional.shaders.matrix.inverse.dynamic.lowp_mat4_float_vertex,Fail
+dEQP-GLES3.functional.shaders.matrix.inverse.dynamic.mediump_mat3_float_vertex,Fail
+dEQP-GLES3.functional.shaders.matrix.inverse.dynamic.mediump_mat4_float_vertex,Fail
+dEQP-GLES3.functional.shaders.matrix.inverse.dynamic.lowp_mat3_float_vertex,Fail
+dEQP-GLES3.functional.state_query.indexed.transform_feedback_switching_buffer,Fail
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/mali-t860-fails.txt b/client/site_tests/graphics_parallel_dEQP/boards/mali-t860-fails.txt
new file mode 100644
index 0000000..d5c7160
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/mali-t860-fails.txt
@@ -0,0 +1,99 @@
+dEQP-GLES3.functional.state_query.indexed.transform_feedback_switching_buffer,Fail
+
+# Tracked in b/231478389.
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.packed_block_column_major_matrix_column_major_matrixarray_row_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.packed_block_column_major_matrix_column_major_matrixarray_row_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.packed_block_column_major_matrix_row_major_matrixarray_column_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.packed_block_column_major_matrix_row_major_matrixarray_column_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.packed_block_column_major_matrix_row_major_matrixarray_row_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.packed_block_column_major_matrix_row_major_matrixarray_row_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.packed_block_row_major_matrix_column_major_matrixarray_column_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.packed_block_row_major_matrix_column_major_matrixarray_column_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.packed_block_row_major_matrix_column_major_matrixarray_row_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.packed_block_row_major_matrix_column_major_matrixarray_row_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.packed_block_row_major_matrix_row_major_matrixarray_column_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.packed_block_row_major_matrix_row_major_matrixarray_column_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.shared_block_column_major_matrix_column_major_matrixarray_row_major_instance_array_both,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.shared_block_column_major_matrix_column_major_matrixarray_row_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.shared_block_column_major_matrix_column_major_matrixarray_row_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.shared_block_column_major_matrix_row_major_matrixarray_column_major_instance_array_both,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.shared_block_column_major_matrix_row_major_matrixarray_column_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.shared_block_column_major_matrix_row_major_matrixarray_column_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.shared_block_column_major_matrix_row_major_matrixarray_row_major_instance_array_both,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.shared_block_column_major_matrix_row_major_matrixarray_row_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.shared_block_column_major_matrix_row_major_matrixarray_row_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.shared_block_row_major_matrix_column_major_matrixarray_column_major_instance_array_both,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.shared_block_row_major_matrix_column_major_matrixarray_column_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.shared_block_row_major_matrix_column_major_matrixarray_column_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.shared_block_row_major_matrix_column_major_matrixarray_row_major_instance_array_both,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.shared_block_row_major_matrix_column_major_matrixarray_row_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.shared_block_row_major_matrix_column_major_matrixarray_row_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.shared_block_row_major_matrix_row_major_matrixarray_column_major_instance_array_both,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.shared_block_row_major_matrix_row_major_matrixarray_column_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.shared_block_row_major_matrix_row_major_matrixarray_column_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.std140_block_column_major_matrix_column_major_matrixarray_row_major_instance_array_both,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.std140_block_column_major_matrix_column_major_matrixarray_row_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.std140_block_column_major_matrix_column_major_matrixarray_row_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.std140_block_column_major_matrix_row_major_matrixarray_column_major_instance_array_both,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.std140_block_column_major_matrix_row_major_matrixarray_column_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.std140_block_column_major_matrix_row_major_matrixarray_column_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.std140_block_column_major_matrix_row_major_matrixarray_row_major_instance_array_both,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.std140_block_column_major_matrix_row_major_matrixarray_row_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.std140_block_column_major_matrix_row_major_matrixarray_row_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.std140_block_row_major_matrix_column_major_matrixarray_column_major_instance_array_both,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.std140_block_row_major_matrix_column_major_matrixarray_column_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.std140_block_row_major_matrix_column_major_matrixarray_column_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.std140_block_row_major_matrix_column_major_matrixarray_row_major_instance_array_both,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.std140_block_row_major_matrix_column_major_matrixarray_row_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.std140_block_row_major_matrix_column_major_matrixarray_row_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.std140_block_row_major_matrix_row_major_matrixarray_column_major_instance_array_both,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.std140_block_row_major_matrix_row_major_matrixarray_column_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.per_block_buffer.std140_block_row_major_matrix_row_major_matrixarray_column_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.packed_block_column_major_matrix_column_major_matrixarray_row_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.packed_block_column_major_matrix_column_major_matrixarray_row_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.packed_block_column_major_matrix_row_major_matrixarray_column_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.packed_block_column_major_matrix_row_major_matrixarray_column_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.packed_block_column_major_matrix_row_major_matrixarray_row_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.packed_block_column_major_matrix_row_major_matrixarray_row_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.packed_block_row_major_matrix_column_major_matrixarray_column_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.packed_block_row_major_matrix_column_major_matrixarray_column_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.packed_block_row_major_matrix_column_major_matrixarray_row_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.packed_block_row_major_matrix_column_major_matrixarray_row_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.packed_block_row_major_matrix_row_major_matrixarray_column_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.packed_block_row_major_matrix_row_major_matrixarray_column_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.shared_block_column_major_matrix_column_major_matrixarray_row_major_instance_array_both,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.shared_block_column_major_matrix_column_major_matrixarray_row_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.shared_block_column_major_matrix_column_major_matrixarray_row_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.shared_block_column_major_matrix_row_major_matrixarray_column_major_instance_array_both,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.shared_block_column_major_matrix_row_major_matrixarray_column_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.shared_block_column_major_matrix_row_major_matrixarray_column_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.shared_block_column_major_matrix_row_major_matrixarray_row_major_instance_array_both,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.shared_block_column_major_matrix_row_major_matrixarray_row_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.shared_block_column_major_matrix_row_major_matrixarray_row_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.shared_block_row_major_matrix_column_major_matrixarray_column_major_instance_array_both,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.shared_block_row_major_matrix_column_major_matrixarray_column_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.shared_block_row_major_matrix_column_major_matrixarray_column_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.shared_block_row_major_matrix_column_major_matrixarray_row_major_instance_array_both,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.shared_block_row_major_matrix_column_major_matrixarray_row_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.shared_block_row_major_matrix_column_major_matrixarray_row_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.shared_block_row_major_matrix_row_major_matrixarray_column_major_instance_array_both,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.shared_block_row_major_matrix_row_major_matrixarray_column_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.shared_block_row_major_matrix_row_major_matrixarray_column_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.std140_block_column_major_matrix_column_major_matrixarray_row_major_instance_array_both,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.std140_block_column_major_matrix_column_major_matrixarray_row_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.std140_block_column_major_matrix_column_major_matrixarray_row_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.std140_block_column_major_matrix_row_major_matrixarray_column_major_instance_array_both,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.std140_block_column_major_matrix_row_major_matrixarray_column_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.std140_block_column_major_matrix_row_major_matrixarray_column_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.std140_block_column_major_matrix_row_major_matrixarray_row_major_instance_array_both,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.std140_block_column_major_matrix_row_major_matrixarray_row_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.std140_block_column_major_matrix_row_major_matrixarray_row_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.std140_block_row_major_matrix_column_major_matrixarray_column_major_instance_array_both,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.std140_block_row_major_matrix_column_major_matrixarray_column_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.std140_block_row_major_matrix_column_major_matrixarray_column_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.std140_block_row_major_matrix_column_major_matrixarray_row_major_instance_array_both,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.std140_block_row_major_matrix_column_major_matrixarray_row_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.std140_block_row_major_matrix_column_major_matrixarray_row_major_instance_array_vertex,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.std140_block_row_major_matrix_row_major_matrixarray_column_major_instance_array_both,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.std140_block_row_major_matrix_row_major_matrixarray_column_major_instance_array_fragment,Fail
+dEQP-GLES3.functional.ubo.single_nested_struct_mixed_matrix_packing.single_buffer.std140_block_row_major_matrix_row_major_matrixarray_column_major_instance_array_vertex,Fail
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/mali-t860-skips.txt b/client/site_tests/graphics_parallel_dEQP/boards/mali-t860-skips.txt
new file mode 100644
index 0000000..6127dc2
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/mali-t860-skips.txt
@@ -0,0 +1,6 @@
+# The flush_finish tests throw warnings when behavior doesn't match
+# their expectations, except that tiling GPUs don't behave the way they
+# expect in their timing setup so the tests take exceptionally long
+# while just wasting developer time.  Not required to be warnings-free
+# for conformance.
+dEQP-GLES.*.functional.flush_finish.*
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/picasso-fails.txt b/client/site_tests/graphics_parallel_dEQP/boards/picasso-fails.txt
new file mode 100644
index 0000000..dec8b51
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/picasso-fails.txt
@@ -0,0 +1,62 @@
+dEQP-VK.api.buffer.invalid_buffer_features.vk_format_d16_unorm,Fail
+dEQP-VK.api.buffer.invalid_buffer_features.vk_format_d32_sfloat,Fail
+dEQP-VK.api.buffer.invalid_buffer_features.vk_format_s8_uint,Fail
+dEQP-VK.api.copy_and_blit.copy_commands2.resolve_image.diff_layout_copy_before_resolving.4_bit_general_general,Fail
+dEQP-VK.api.copy_and_blit.copy_commands2.resolve_image.diff_layout_copy_before_resolving.4_bit_transfer_src_optimal_general,Fail
+dEQP-VK.api.copy_and_blit.copy_commands2.resolve_image.diff_layout_copy_before_resolving.8_bit_general_general,Fail
+dEQP-VK.api.copy_and_blit.copy_commands2.resolve_image.diff_layout_copy_before_resolving.8_bit_transfer_src_optimal_general,Fail
+dEQP-VK.api.copy_and_blit.core.resolve_image.diff_layout_copy_before_resolving.4_bit_general_general,Fail
+dEQP-VK.api.copy_and_blit.core.resolve_image.diff_layout_copy_before_resolving.4_bit_transfer_src_optimal_general,Fail
+dEQP-VK.api.copy_and_blit.core.resolve_image.diff_layout_copy_before_resolving.8_bit_general_general,Fail
+dEQP-VK.api.copy_and_blit.core.resolve_image.diff_layout_copy_before_resolving.8_bit_transfer_src_optimal_general,Fail
+dEQP-VK.api.copy_and_blit.dedicated_allocation.resolve_image.diff_layout_copy_before_resolving.4_bit_general_general,Fail
+dEQP-VK.api.copy_and_blit.dedicated_allocation.resolve_image.diff_layout_copy_before_resolving.4_bit_transfer_src_optimal_general,Fail
+dEQP-VK.api.copy_and_blit.dedicated_allocation.resolve_image.diff_layout_copy_before_resolving.8_bit_general_general,Fail
+dEQP-VK.api.copy_and_blit.dedicated_allocation.resolve_image.diff_layout_copy_before_resolving.8_bit_transfer_src_optimal_general,Fail
+dEQP-VK.api.device_init.create_device_unsupported_features,Fail
+dEQP-VK.draw.multiple_interpolation.separate.4_samples,Fail
+dEQP-VK.draw.multiple_interpolation.structured.4_samples,Fail
+dEQP-VK.fragment_operations.transient_attachment_bit.stencil_load_store_op_test_local_bit,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.isampler1darray_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.isampler1d_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.isampler2darray_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.isampler2d_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.isampler3d_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.isamplercubearray_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.isamplercube_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.sampler1darray_fixed_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.sampler1darray_float_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.sampler1darrayshadow_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.sampler1d_fixed_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.sampler1d_float_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.sampler1dshadow_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.sampler2darray_fixed_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.sampler2darray_float_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.sampler2darrayshadow_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.sampler2d_fixed_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.sampler2d_float_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.sampler2dshadow_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.sampler3d_fixed_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.sampler3d_float_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.samplercubearray_fixed_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.samplercubearray_float_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.samplercubearrayshadow_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.samplercube_fixed_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.samplercube_float_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.samplercubeshadow_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.usampler1darray_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.usampler1d_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.usampler2darray_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.usampler2d_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.usampler3d_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.usamplercubearray_zero_uv_width_fragment,Fail
+dEQP-VK.glsl.texture_functions.query.texturequerylod.usamplercube_zero_uv_width_fragment,Fail
+dEQP-VK.graphicsfuzz.cov-condition-bitfield-extract-integer,Fail
+dEQP-VK.info.instance_extensions,Fail
+dEQP-VK.query_pool.occlusion_query.copy_results_size_32_stride_0_without_availability,Fail
+dEQP-VK.query_pool.occlusion_query.copy_results_size_64_stride_0_without_availability,Fail
+dEQP-VK.renderpass2.depth_stencil_resolve.image_2d_49_13.samples_2.d32_sfloat_s8_uint_separate_layouts.depth_zero_stencil_max_testing_stencil_samplemask,Fail
+dEQP-VK.texture.filtering_anisotropy.single_level.anisotropy_2.mag_linear_min_linear,Fail
+dEQP-VK.texture.filtering_anisotropy.single_level.anisotropy_4.mag_linear_min_linear,Fail
+dEQP-VK.texture.filtering_anisotropy.single_level.anisotropy_8.mag_linear_min_linear,Fail
+dEQP-VK.texture.filtering_anisotropy.single_level.anisotropy_max.mag_linear_min_linear,Fail
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/picasso-flakes.txt b/client/site_tests/graphics_parallel_dEQP/boards/picasso-flakes.txt
new file mode 100644
index 0000000..710af82
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/picasso-flakes.txt
@@ -0,0 +1,2 @@
+# Intermittent failure in ~5% of runs on zork
+dEQP-VK.multiview.multisample.1_2_4_8
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/picasso-skips.txt b/client/site_tests/graphics_parallel_dEQP/boards/picasso-skips.txt
new file mode 100644
index 0000000..6cba30d
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/picasso-skips.txt
@@ -0,0 +1,4 @@
+# These tests should not exist. They are from renderpass-with-dynamic-rendering.txt which should
+# have been removed upstream.
+dEQP-VK.renderpass_with_dynamic_rendering.dedicated_allocation.*
+dEQP-VK.renderpass_with_dynamic_rendering.suballocation.*
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/pyro-fails.txt b/client/site_tests/graphics_parallel_dEQP/boards/pyro-fails.txt
new file mode 100644
index 0000000..1e444f7
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/pyro-fails.txt
@@ -0,0 +1,1334 @@
+dEQP-VK.api.copy_and_blit.copy_commands2.resolve_image.diff_image_size.src_256_256_11_2_bit,Fail
+dEQP-VK.api.copy_and_blit.core.resolve_image.partial.2_bit,Fail
+dEQP-VK.api.copy_and_blit.core.resolve_image.with_regions.8_bit,Fail
+dEQP-VK.api.image_clearing.core.clear_color_image.2d.optimal.multiple_layers.b8g8r8a8_unorm_200x180_clamp_input_multiple_subresourcerange,Fail
+dEQP-VK.api.image_clearing.core.clear_color_image.2d.optimal.remaining_array_layers.r16g16b16a16_unorm_clamp_input_multiple_subresourcerange,Fail
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.always_not_equal_never_not_equal,Fail
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.less_greater_equal_equal,Fail
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.never_less_greater_or_equal_less_or_equal,Fail
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.never_never_less_greater,Fail
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.always_greater_or_equal_greater_or_equal_less_or_equal,Fail
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.always_less_greater_greater,Fail
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.always_never_equal_less,Fail
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.equal_not_equal_equal_equal,Fail
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.greater_greater_not_equal_greater,Fail
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.greater_never_greater_or_equal_equal,Fail
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.greater_or_equal_greater_or_equal_not_equal_less_or_equal,Fail
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.less_greater_less_or_equal_never,Fail
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.less_not_equal_less_greater_or_equal,Fail
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.less_or_equal_less_not_equal_greater_or_equal,Fail
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.never_greater_or_equal_never_equal,Fail
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.not_equal_equal_equal_greater,Fail
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint_separate_layouts.compare_ops.always_equal_less_or_equal_greater_or_equal,Fail
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint_separate_layouts.compare_ops.always_not_equal_never_not_equal,Fail
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint_separate_layouts.compare_ops.greater_never_always_less_or_equal,Fail
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint_separate_layouts.compare_ops.greater_or_equal_always_less_never,Fail
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint_separate_layouts.compare_ops.greater_or_equal_less_or_equal_always_less,Fail
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint_separate_layouts.compare_ops.less_always_greater_or_equal_less,Fail
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint_separate_layouts.compare_ops.less_or_equal_less_or_equal_greater_or_equal_never,Fail
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint_separate_layouts.compare_ops.not_equal_never_never_never,Fail
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.always_less_or_equal_not_equal_equal,Fail
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.equal_greater_or_equal_greater_greater,Fail
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.greater_or_equal_greater_or_equal_never_less,Fail
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.less_equal_never_less_or_equal,Fail
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.less_greater_equal_equal,Fail
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.never_not_equal_not_equal_never,Fail
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.not_equal_always_always_equal,Fail
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.not_equal_not_equal_greater_always,Fail
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.equal_greater_or_equal_always_never,Fail
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.greater_greater_not_equal_greater,Fail
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.greater_less_or_equal_never_greater_or_equal,Fail
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.less_or_equal_less_not_equal_greater_or_equal,Fail
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.not_equal_equal_equal_greater,Fail
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.not_equal_less_or_equal_not_equal_greater,Fail
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint_separate_layouts.compare_ops.always_equal_less_or_equal_greater_or_equal,Fail
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint_separate_layouts.compare_ops.always_not_equal_never_not_equal,Fail
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint_separate_layouts.compare_ops.equal_always_equal_less_or_equal,Fail
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint_separate_layouts.compare_ops.equal_equal_not_equal_less,Fail
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint_separate_layouts.compare_ops.greater_always_less_not_equal,Fail
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint_separate_layouts.compare_ops.greater_equal_greater_or_equal_always,Fail
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint_separate_layouts.compare_ops.greater_less_equal_never,Fail
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint_separate_layouts.compare_ops.greater_or_equal_greater_equal_greater_or_equal,Fail
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint_separate_layouts.compare_ops.less_greater_equal_equal,Fail
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint_separate_layouts.compare_ops.less_never_not_equal_always,Fail
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint_separate_layouts.compare_ops.less_or_equal_equal_less_equal,Fail
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint_separate_layouts.compare_ops.never_less_greater_or_equal_less_or_equal,Fail
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint_separate_layouts.compare_ops.not_equal_always_always_equal,Fail
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.always_greater_or_equal_greater_or_equal_less_or_equal,Fail
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.always_less_greater_greater,Fail
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.equal_less_never_always,Fail
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.greater_less_or_equal_never_greater_or_equal,Fail
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.greater_or_equal_not_equal_greater_or_equal_greater,Fail
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.less_greater_less_or_equal_never,Fail
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.less_or_equal_greater_never_less,Fail
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.less_or_equal_never_greater_not_equal,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d16_unorm.compare_ops.always_less_greater_greater,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d16_unorm.compare_ops.equal_less_never_always,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d16_unorm.compare_ops.equal_less_or_equal_less_less_or_equal,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d16_unorm.compare_ops.equal_not_equal_equal_equal,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d16_unorm.compare_ops.greater_never_greater_or_equal_equal,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d16_unorm.compare_ops.greater_or_equal_always_not_equal_always,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d16_unorm.compare_ops.less_not_equal_less_greater_or_equal,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d16_unorm.compare_ops.not_equal_greater_or_equal_greater_or_equal_greater_or_equal,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d24_unorm_s8_uint.compare_ops.always_greater_less_always,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d24_unorm_s8_uint.compare_ops.always_less_or_equal_not_equal_equal,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d24_unorm_s8_uint.compare_ops.greater_never_always_less_or_equal,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d24_unorm_s8_uint.compare_ops.greater_or_equal_greater_equal_greater_or_equal,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d24_unorm_s8_uint.compare_ops.greater_or_equal_less_or_equal_always_less,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d24_unorm_s8_uint.compare_ops.less_or_equal_equal_less_equal,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d24_unorm_s8_uint.compare_ops.less_or_equal_greater_or_equal_equal_always,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d24_unorm_s8_uint.compare_ops.never_never_less_greater,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d24_unorm_s8_uint_separate_layouts.compare_ops.greater_greater_not_equal_greater,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d24_unorm_s8_uint_separate_layouts.compare_ops.greater_or_equal_always_not_equal_always,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d24_unorm_s8_uint_separate_layouts.compare_ops.greater_or_equal_greater_or_equal_not_equal_less_or_equal,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d24_unorm_s8_uint_separate_layouts.compare_ops.greater_or_equal_not_equal_greater_or_equal_greater,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d24_unorm_s8_uint_separate_layouts.compare_ops.less_greater_less_or_equal_never,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d24_unorm_s8_uint_separate_layouts.compare_ops.less_not_equal_less_greater_or_equal,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d24_unorm_s8_uint_separate_layouts.compare_ops.less_or_equal_greater_always_greater,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d24_unorm_s8_uint_separate_layouts.compare_ops.less_or_equal_greater_never_less,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d32_sfloat.compare_ops.always_always_never_greater,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d32_sfloat.compare_ops.always_never_equal_less,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d32_sfloat.compare_ops.greater_less_or_equal_never_greater_or_equal,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d32_sfloat.compare_ops.greater_not_equal_greater_less,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d32_sfloat.compare_ops.greater_or_equal_equal_greater_not_equal,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d32_sfloat.compare_ops.greater_or_equal_never_greater_never,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d32_sfloat.compare_ops.less_or_equal_never_greater_not_equal,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d32_sfloat.compare_ops.not_equal_greater_or_equal_greater_or_equal_greater_or_equal,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d32_sfloat.compare_ops.not_equal_less_or_equal_not_equal_greater,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d32_sfloat_s8_uint.compare_ops.always_equal_always_never,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d32_sfloat_s8_uint.compare_ops.always_greater_less_always,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d32_sfloat_s8_uint.compare_ops.always_not_equal_never_not_equal,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d32_sfloat_s8_uint.compare_ops.greater_equal_greater_or_equal_always,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d32_sfloat_s8_uint.compare_ops.greater_less_equal_never,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d32_sfloat_s8_uint.compare_ops.greater_or_equal_greater_or_equal_never_less,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d32_sfloat_s8_uint.compare_ops.less_equal_never_less_or_equal,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d32_sfloat_s8_uint.compare_ops.less_greater_equal_equal,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d32_sfloat_s8_uint.compare_ops.less_never_not_equal_always,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d32_sfloat_s8_uint.compare_ops.less_or_equal_less_or_equal_greater_or_equal_never,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d32_sfloat_s8_uint.compare_ops.never_equal_less_or_equal_less,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d32_sfloat_s8_uint.compare_ops.never_never_less_greater,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d32_sfloat_s8_uint.compare_ops.not_equal_always_always_equal,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d32_sfloat_s8_uint_separate_layouts.compare_ops.always_always_never_greater,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d32_sfloat_s8_uint_separate_layouts.compare_ops.equal_less_never_always,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d32_sfloat_s8_uint_separate_layouts.compare_ops.equal_less_or_equal_less_less_or_equal,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d32_sfloat_s8_uint_separate_layouts.compare_ops.less_greater_or_equal_less_greater,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d32_sfloat_s8_uint_separate_layouts.compare_ops.less_or_equal_greater_never_less,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d32_sfloat_s8_uint_separate_layouts.compare_ops.not_equal_equal_equal_greater,Fail
+dEQP-VK.pipeline.depth.nocolor.format.d32_sfloat_s8_uint_separate_layouts.compare_ops.not_equal_greater_or_equal_greater_or_equal_greater_or_equal,Fail
+dEQP-VK.pipeline.depth.nocolor.format.x8_d24_unorm_pack32.compare_ops.always_equal_less_or_equal_greater_or_equal,Fail
+dEQP-VK.pipeline.depth.nocolor.format.x8_d24_unorm_pack32.compare_ops.greater_greater_or_equal_less_or_equal_not_equal,Fail
+dEQP-VK.pipeline.depth.nocolor.format.x8_d24_unorm_pack32.compare_ops.greater_or_equal_greater_equal_greater_or_equal,Fail
+dEQP-VK.pipeline.depth.nocolor.format.x8_d24_unorm_pack32.compare_ops.greater_or_equal_greater_or_equal_never_less,Fail
+dEQP-VK.pipeline.depth.nocolor.format.x8_d24_unorm_pack32.compare_ops.greater_or_equal_less_or_equal_always_less,Fail
+dEQP-VK.pipeline.depth.nocolor.format.x8_d24_unorm_pack32.compare_ops.less_always_greater_or_equal_less,Fail
+dEQP-VK.pipeline.depth.nocolor.format.x8_d24_unorm_pack32.compare_ops.not_equal_not_equal_greater_always,Fail
+dEQP-VK.pipeline.dynamic_offset.graphics.arrays.uniform_buffer.numcmdbuffers_2.reverseorder.numdescriptorsetbindings_1.numdynamicbindings_1.numnondynamicbindings_0,Fail
+dEQP-VK.pipeline.dynamic_offset.graphics.multiset.storage_buffer.numcmdbuffers_2.sameorder.numdescriptorsetbindings_1.numdynamicbindings_1.numnondynamicbindings_0,Fail
+dEQP-VK.pipeline.dynamic_offset.graphics.multiset.uniform_buffer.numcmdbuffers_1.sameorder.numdescriptorsetbindings_2.numdynamicbindings_1.numnondynamicbindings_0,Fail
+dEQP-VK.pipeline.dynamic_offset.graphics.multiset.uniform_buffer.numcmdbuffers_2.reverseorder.numdescriptorsetbindings_1.numdynamicbindings_1.numnondynamicbindings_0,Fail
+dEQP-VK.pipeline.dynamic_offset.graphics.single_set.storage_buffer.numcmdbuffers_1.sameorder.numdescriptorsetbindings_1.numdynamicbindings_1.numnondynamicbindings_0,Fail
+dEQP-VK.pipeline.dynamic_offset.graphics.single_set.storage_buffer.numcmdbuffers_1.sameorder.numdescriptorsetbindings_2.numdynamicbindings_1.numnondynamicbindings_0,Fail
+dEQP-VK.pipeline.dynamic_offset.graphics.single_set.storage_buffer.numcmdbuffers_2.reverseorder.numdescriptorsetbindings_1.numdynamicbindings_1.numnondynamicbindings_0,Fail
+dEQP-VK.pipeline.dynamic_offset.graphics.single_set.uniform_buffer.numcmdbuffers_1.sameorder.numdescriptorsetbindings_2.numdynamicbindings_1.numnondynamicbindings_0,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_back_eq_inc_wrap_clear_254_ref_255_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_back_ge_dec_wrap_clear_1_ref_0_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_back_gt_keep_clear_102_ref_103_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_back_le_inc_wrap_clear_255_ref_255_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_back_xf_dec_wrap_clear_0_ref_1_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_back_xf_inc_clamp_clear_254_ref_255_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_back_xt_dec_clamp_clear_1_ref_1_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_both_dual_eq_dec_clamp_clear_1_ref_1_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_both_dual_ge_dec_clamp_clear_0_ref_0_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_both_dual_ge_dec_wrap_clear_1_ref_0_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_both_dual_ge_zero_clear_102_ref_101_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_both_dual_gt_replace_clear_102_ref_103_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_both_dual_gt_zero_clear_102_ref_103_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_both_dual_xf_inc_clamp_clear_254_ref_255_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_both_dual_xf_invert_clear_102_ref_101_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_both_dual_xf_zero_clear_102_ref_101_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_both_dual_xt_dec_clamp_clear_1_ref_1_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_both_dual_xt_dec_wrap_clear_0_ref_1_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_both_dual_xt_inc_clamp_clear_255_ref_255_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_both_single_ge_inc_wrap_clear_254_ref_253_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_both_single_ge_inc_wrap_clear_254_ref_255_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_both_single_ge_zero_clear_102_ref_101_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_both_single_gt_dec_wrap_clear_1_ref_0_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_both_single_gt_replace_clear_102_ref_103_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_both_single_le_dec_wrap_clear_1_ref_1_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_both_single_le_inc_clamp_clear_255_ref_254_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_both_single_lt_dec_wrap_clear_1_ref_2_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_both_single_xf_dec_clamp_clear_1_ref_1_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_both_single_xt_invert_clear_102_ref_103_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_both_single_xt_keep_clear_102_ref_102_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_front_ge_dec_wrap_clear_0_ref_1_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_front_gt_inc_clamp_clear_254_ref_255_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_front_gt_replace_clear_102_ref_103_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_front_gt_zero_clear_102_ref_103_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_front_lt_invert_clear_102_ref_102_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_front_xf_dec_wrap_clear_0_ref_1_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_front_xf_replace_clear_102_ref_102_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_front_xt_dec_clamp_clear_1_ref_1_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_front_xt_invert_clear_102_ref_101_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_front_xt_invert_clear_102_ref_103_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_front_xt_zero_clear_102_ref_101_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.after_pipelines.stencil_state_face_front_xt_zero_clear_102_ref_103_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_back_ge_dec_clamp_clear_0_ref_1_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_back_gt_dec_wrap_clear_0_ref_1_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_back_gt_inc_clamp_clear_255_ref_255_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_back_gt_invert_clear_102_ref_101_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_back_le_inc_wrap_clear_255_ref_254_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_back_xf_dec_clamp_clear_0_ref_0_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_back_xf_dec_wrap_clear_1_ref_2_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_back_xf_inc_clamp_clear_254_ref_253_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_back_xt_dec_clamp_clear_0_ref_1_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_back_xt_dec_clamp_clear_1_ref_0_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_back_xt_dec_clamp_clear_1_ref_2_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_back_xt_dec_wrap_clear_0_ref_0_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_back_xt_inc_clamp_clear_255_ref_254_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_back_xt_inc_wrap_clear_254_ref_253_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_back_xt_inc_wrap_clear_254_ref_255_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_both_dual_eq_dec_clamp_clear_1_ref_0_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_both_dual_eq_dec_wrap_clear_0_ref_1_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_both_dual_eq_replace_clear_102_ref_103_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_both_dual_gt_dec_wrap_clear_0_ref_1_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_both_dual_gt_inc_clamp_clear_255_ref_255_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_both_dual_le_dec_wrap_clear_1_ref_0_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_both_dual_lt_replace_clear_102_ref_101_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_both_dual_xt_dec_clamp_clear_0_ref_1_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_both_dual_xt_inc_clamp_clear_254_ref_254_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_both_dual_xt_replace_clear_102_ref_103_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_both_single_gt_dec_clamp_clear_1_ref_2_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_both_single_gt_inc_wrap_clear_255_ref_254_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_both_single_gt_keep_clear_102_ref_102_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_both_single_gt_replace_clear_102_ref_102_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_both_single_lt_dec_wrap_clear_0_ref_1_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_both_single_lt_keep_clear_102_ref_101_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_both_single_xt_keep_clear_102_ref_103_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_front_eq_dec_clamp_clear_1_ref_0_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_front_ge_dec_clamp_clear_0_ref_1_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_front_ge_dec_clamp_clear_1_ref_2_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_front_le_dec_clamp_clear_0_ref_1_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_front_le_invert_clear_102_ref_102_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_front_xf_zero_clear_102_ref_103_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_front_xt_dec_clamp_clear_0_ref_1_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_front_xt_dec_clamp_clear_1_ref_2_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_front_xt_inc_wrap_clear_255_ref_255_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_back_eq_inc_wrap_clear_255_ref_255_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_back_eq_replace_clear_102_ref_101_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_back_ge_invert_clear_102_ref_101_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_back_ge_keep_clear_102_ref_101_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_back_gt_dec_wrap_clear_1_ref_2_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_back_le_dec_clamp_clear_1_ref_2_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_back_lt_dec_clamp_clear_1_ref_2_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_back_lt_inc_clamp_clear_254_ref_254_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_back_lt_invert_clear_102_ref_101_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_back_xf_dec_clamp_clear_1_ref_0_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_both_dual_eq_inc_clamp_clear_255_ref_254_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_both_dual_eq_invert_clear_102_ref_102_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_both_dual_eq_zero_clear_102_ref_101_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_both_dual_ge_invert_clear_102_ref_103_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_both_dual_ge_replace_clear_102_ref_102_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_both_dual_gt_inc_wrap_clear_255_ref_255_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_both_dual_le_inc_wrap_clear_254_ref_254_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_both_dual_le_keep_clear_102_ref_103_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_both_dual_lt_dec_wrap_clear_1_ref_1_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_both_dual_lt_zero_clear_102_ref_102_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_both_dual_xf_dec_wrap_clear_0_ref_0_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_both_dual_xf_inc_wrap_clear_255_ref_255_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_both_single_eq_dec_clamp_clear_0_ref_0_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_both_single_gt_dec_clamp_clear_0_ref_0_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_both_single_le_zero_clear_102_ref_102_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_both_single_lt_dec_clamp_clear_1_ref_2_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_both_single_lt_invert_clear_102_ref_101_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_both_single_xf_keep_clear_102_ref_103_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_front_ge_invert_clear_102_ref_103_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_front_gt_dec_clamp_clear_0_ref_0_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_front_gt_dec_clamp_clear_1_ref_0_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_front_gt_invert_clear_102_ref_102_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_front_le_keep_clear_102_ref_103_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_front_le_zero_clear_102_ref_102_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_front_lt_inc_clamp_clear_254_ref_254_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_front_lt_inc_wrap_clear_254_ref_253_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_front_lt_keep_clear_102_ref_102_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.before_good_static.stencil_state_face_front_xf_dec_clamp_clear_0_ref_1_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_back_eq_invert_clear_102_ref_101_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_back_eq_replace_clear_102_ref_102_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_back_gt_dec_clamp_clear_0_ref_1_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_back_gt_dec_wrap_clear_1_ref_1_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_back_gt_inc_wrap_clear_254_ref_255_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_back_lt_dec_wrap_clear_0_ref_0_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_back_lt_invert_clear_102_ref_103_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_back_xf_invert_clear_102_ref_102_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_back_xf_zero_clear_102_ref_102_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_both_dual_eq_dec_clamp_clear_1_ref_2_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_both_dual_gt_dec_wrap_clear_1_ref_1_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_both_dual_gt_inc_wrap_clear_254_ref_255_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_both_dual_gt_invert_clear_102_ref_103_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_both_dual_gt_keep_clear_102_ref_101_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_both_dual_le_inc_wrap_clear_254_ref_253_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_both_dual_lt_dec_clamp_clear_1_ref_0_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_both_dual_lt_dec_wrap_clear_0_ref_0_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_both_dual_xf_dec_wrap_clear_1_ref_1_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_both_single_eq_dec_clamp_clear_1_ref_2_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_both_single_eq_inc_clamp_clear_254_ref_254_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_both_single_eq_invert_clear_102_ref_101_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_both_single_ge_keep_clear_102_ref_102_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_both_single_ge_replace_clear_102_ref_101_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_both_single_gt_replace_clear_102_ref_101_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_both_single_lt_dec_clamp_clear_0_ref_1_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_both_single_lt_dec_clamp_clear_1_ref_0_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_both_single_xf_dec_wrap_clear_1_ref_1_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_front_eq_dec_clamp_clear_1_ref_2_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_front_le_zero_clear_102_ref_101_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.between_pipelines.stencil_state_face_front_lt_inc_wrap_clear_254_ref_255_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.depth_test_enable,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_back_eq_inc_clamp_clear_254_ref_253_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_back_eq_inc_wrap_clear_255_ref_255_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_back_ge_invert_clear_102_ref_101_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_back_ge_replace_clear_102_ref_102_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_back_le_keep_clear_102_ref_103_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_back_le_zero_clear_102_ref_102_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_back_xf_dec_clamp_clear_0_ref_1_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_back_xf_dec_wrap_clear_0_ref_0_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_both_dual_eq_dec_clamp_clear_0_ref_0_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_both_dual_eq_inc_clamp_clear_255_ref_254_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_both_dual_eq_inc_wrap_clear_255_ref_255_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_both_dual_gt_inc_wrap_clear_255_ref_255_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_both_dual_lt_dec_clamp_clear_1_ref_2_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_both_dual_lt_dec_wrap_clear_1_ref_1_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_both_dual_xf_replace_clear_102_ref_101_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_both_single_ge_invert_clear_102_ref_103_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_both_single_gt_dec_clamp_clear_1_ref_0_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_both_single_gt_dec_wrap_clear_1_ref_2_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_both_single_lt_inc_clamp_clear_255_ref_255_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_both_single_lt_invert_clear_102_ref_101_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_both_single_lt_zero_clear_102_ref_102_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_both_single_xf_dec_clamp_clear_0_ref_1_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_both_single_xf_inc_clamp_clear_254_ref_254_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_both_single_xf_keep_clear_102_ref_103_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_front_eq_inc_clamp_clear_255_ref_254_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_front_eq_inc_wrap_clear_254_ref_254_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_front_eq_inc_wrap_clear_255_ref_255_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_front_eq_zero_clear_102_ref_101_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_front_ge_inc_clamp_clear_255_ref_254_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_front_ge_keep_clear_102_ref_101_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_front_gt_dec_clamp_clear_0_ref_0_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_front_gt_invert_clear_102_ref_102_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_front_lt_replace_clear_102_ref_102_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_front_xf_dec_clamp_clear_1_ref_0_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_state_face_front_xf_inc_wrap_clear_255_ref_255_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.cmd_buffer_start.stencil_test_enable,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_back_eq_dec_wrap_clear_1_ref_1_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_back_eq_zero_clear_102_ref_103_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_back_ge_dec_clamp_clear_0_ref_1_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_back_ge_dec_clamp_clear_1_ref_2_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_back_ge_dec_wrap_clear_0_ref_0_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_back_ge_inc_clamp_clear_254_ref_253_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_back_ge_inc_wrap_clear_254_ref_254_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_back_ge_zero_clear_102_ref_102_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_back_gt_replace_clear_102_ref_102_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_back_le_invert_clear_102_ref_102_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_back_xf_invert_clear_102_ref_103_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_back_xt_dec_clamp_clear_1_ref_0_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_back_xt_dec_wrap_clear_0_ref_0_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_back_xt_inc_wrap_clear_254_ref_253_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_back_xt_replace_clear_102_ref_103_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_back_xt_zero_clear_102_ref_102_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_both_dual_eq_dec_clamp_clear_1_ref_0_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_both_dual_ge_dec_clamp_clear_0_ref_1_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_both_dual_gt_dec_wrap_clear_0_ref_1_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_both_dual_gt_inc_wrap_clear_255_ref_254_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_both_dual_le_dec_clamp_clear_1_ref_1_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_both_dual_le_keep_clear_102_ref_102_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_both_dual_xt_dec_clamp_clear_1_ref_2_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_both_dual_xt_dec_wrap_clear_0_ref_0_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_both_dual_xt_inc_clamp_clear_255_ref_254_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_both_dual_xt_inc_wrap_clear_254_ref_253_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_both_dual_xt_inc_wrap_clear_254_ref_255_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_both_dual_xt_inc_wrap_clear_255_ref_255_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_both_single_eq_dec_clamp_clear_1_ref_0_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_both_single_eq_dec_wrap_clear_0_ref_1_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_both_single_ge_dec_clamp_clear_1_ref_2_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_both_single_ge_inc_clamp_clear_254_ref_253_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_both_single_gt_inc_wrap_clear_255_ref_254_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_both_single_le_dec_clamp_clear_0_ref_1_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_both_single_lt_inc_clamp_clear_255_ref_254_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_both_single_lt_zero_clear_102_ref_101_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_both_single_xt_dec_wrap_clear_1_ref_1_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_both_single_xt_invert_clear_102_ref_102_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_both_single_xt_keep_clear_102_ref_103_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_front_eq_dec_wrap_clear_1_ref_1_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_front_eq_inc_clamp_clear_254_ref_255_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_front_eq_keep_clear_102_ref_103_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_front_eq_zero_clear_102_ref_103_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_front_ge_dec_clamp_clear_0_ref_1_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_front_ge_inc_wrap_clear_254_ref_254_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_front_ge_zero_clear_102_ref_102_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_front_gt_inc_wrap_clear_254_ref_253_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_front_le_dec_clamp_clear_0_ref_1_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_front_le_dec_wrap_clear_1_ref_0_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_front_le_inc_wrap_clear_255_ref_254_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_front_lt_dec_wrap_clear_1_ref_0_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_front_lt_zero_clear_102_ref_101_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stencil_state_face_front_xt_inc_clamp_clear_254_ref_254_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_dynamic.stride_with_offset_and_padding,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_static.stencil_state_face_back_eq_dec_wrap_clear_0_ref_0_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_static.stencil_state_face_back_ge_replace_clear_102_ref_101_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_static.stencil_state_face_back_gt_keep_clear_102_ref_101_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_static.stencil_state_face_back_gt_zero_clear_102_ref_101_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_static.stencil_state_face_back_le_inc_wrap_clear_254_ref_253_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_static.stencil_state_face_back_lt_dec_clamp_clear_0_ref_1_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_static.stencil_state_face_back_lt_dec_wrap_clear_0_ref_0_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_static.stencil_state_face_back_xf_dec_wrap_clear_1_ref_1_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_static.stencil_state_face_both_dual_eq_invert_clear_102_ref_101_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_static.stencil_state_face_both_dual_ge_inc_wrap_clear_255_ref_254_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_static.stencil_state_face_both_dual_gt_inc_clamp_clear_255_ref_254_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_static.stencil_state_face_both_dual_gt_invert_clear_102_ref_103_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_static.stencil_state_face_both_dual_lt_dec_clamp_clear_0_ref_1_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_static.stencil_state_face_both_dual_xf_keep_clear_102_ref_101_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_static.stencil_state_face_both_single_eq_inc_wrap_clear_254_ref_253_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_static.stencil_state_face_both_single_eq_replace_clear_102_ref_102_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_static.stencil_state_face_both_single_gt_invert_clear_102_ref_103_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_static.stencil_state_face_both_single_le_inc_clamp_clear_254_ref_254_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_static.stencil_state_face_both_single_le_inc_wrap_clear_254_ref_253_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_static.stencil_state_face_both_single_xf_inc_clamp_clear_255_ref_254_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_static.stencil_state_face_front_eq_replace_clear_102_ref_102_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_static.stencil_state_face_front_eq_zero_clear_102_ref_102_depthfail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_static.stencil_state_face_front_gt_keep_clear_102_ref_101_fail,Fail
+dEQP-VK.pipeline.extended_dynamic_state.two_draws_static.stencil_state_face_front_xf_inc_wrap_clear_254_ref_255_fail,Fail
+dEQP-VK.pipeline.input_assembly.primitive_restart.index_type_uint32.triangle_strip_with_adjacency,Fail
+dEQP-VK.pipeline.input_assembly.primitive_topology.index_type_uint8.line_strip_with_adjacency,Fail
+dEQP-VK.pipeline.multisample.sampled_image.64x64_1.r16g16_sint.samples_8,Fail
+dEQP-VK.pipeline.multisample.sampled_image.64x64_1.r8g8b8a8_unorm.samples_8,Fail
+dEQP-VK.pipeline.multisample.sampled_image.64x64_4.r32g32b32a32_sfloat.samples_2,Fail
+dEQP-VK.pipeline.multisample.sampled_image.79x31_1.r32_uint.samples_2,Fail
+dEQP-VK.pipeline.multisample.sampled_image.79x31_1.r8g8b8a8_unorm.samples_8,Fail
+dEQP-VK.pipeline.multisample.sampled_image.79x31_4.r16g16_sint.samples_8,Fail
+dEQP-VK.pipeline.multisample.sampled_image.79x31_4.r32_uint.samples_2,Fail
+dEQP-VK.pipeline.multisample.sampled_image.79x31_4.r8g8b8a8_unorm.samples_8,Fail
+dEQP-VK.pipeline.render_to_image.core.2d_array.huge.width_height.r8g8b8a8_unorm,Fail
+dEQP-VK.pipeline.render_to_image.core.cube.small.r8g8b8a8_unorm,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decc.pass_decc.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decc.pass_decc.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decc.pass_decw.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decc.pass_decw.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decc.pass_decw.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decc.pass_decw.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decc.pass_decw.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decc.pass_incc.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decc.pass_incc.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decc.pass_incc.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decc.pass_incc.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decc.pass_inv.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decc.pass_inv.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decc.pass_inv.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decc.pass_keep.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decc.pass_keep.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decc.pass_repl.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decc.pass_repl.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decc.pass_zero.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decc.pass_zero.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decc.pass_zero.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decc.pass_zero.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decc.pass_zero.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decw.pass_decc.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decw.pass_decc.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decw.pass_decc.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decw.pass_decc.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decw.pass_decc.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decw.pass_decw.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decw.pass_decw.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decw.pass_incc.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decw.pass_incc.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decw.pass_incc.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decw.pass_inv.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decw.pass_inv.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decw.pass_inv.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decw.pass_keep.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decw.pass_keep.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decw.pass_keep.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decw.pass_repl.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decw.pass_wrap.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decw.pass_wrap.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decw.pass_wrap.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decw.pass_zero.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decw.pass_zero.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_decw.pass_zero.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_incc.pass_decc.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_incc.pass_decc.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_incc.pass_decc.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_incc.pass_decw.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_incc.pass_decw.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_incc.pass_decw.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_incc.pass_decw.dfail_incc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_incc.pass_decw.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_incc.pass_inv.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_incc.pass_inv.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_incc.pass_inv.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_incc.pass_inv.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_incc.pass_keep.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_incc.pass_keep.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_incc.pass_keep.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_incc.pass_repl.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_incc.pass_wrap.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_incc.pass_zero.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_incc.pass_zero.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_incc.pass_zero.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_incc.pass_zero.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_inv.pass_decc.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_inv.pass_decc.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_inv.pass_decw.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_inv.pass_decw.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_inv.pass_incc.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_inv.pass_incc.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_inv.pass_inv.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_inv.pass_keep.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_inv.pass_keep.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_inv.pass_repl.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_inv.pass_repl.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_inv.pass_repl.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_inv.pass_repl.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_inv.pass_wrap.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_inv.pass_wrap.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_inv.pass_wrap.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_inv.pass_wrap.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_inv.pass_wrap.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_inv.pass_wrap.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_inv.pass_zero.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_inv.pass_zero.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_keep.pass_decc.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_keep.pass_decc.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_keep.pass_decc.dfail_incc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_keep.pass_decc.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_keep.pass_decc.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_keep.pass_decc.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_keep.pass_decc.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_keep.pass_decw.dfail_incc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_keep.pass_decw.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_keep.pass_decw.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_keep.pass_incc.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_keep.pass_incc.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_keep.pass_incc.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_keep.pass_inv.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_keep.pass_inv.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_keep.pass_inv.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_keep.pass_inv.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_keep.pass_keep.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_keep.pass_keep.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_keep.pass_repl.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_keep.pass_repl.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_keep.pass_repl.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_keep.pass_repl.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_keep.pass_wrap.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_keep.pass_wrap.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_keep.pass_zero.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_keep.pass_zero.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_keep.pass_zero.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_repl.pass_decc.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_repl.pass_decc.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_repl.pass_decw.dfail_incc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_repl.pass_decw.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_repl.pass_incc.dfail_incc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_repl.pass_incc.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_repl.pass_incc.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_repl.pass_incc.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_repl.pass_inv.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_repl.pass_inv.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_repl.pass_inv.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_repl.pass_inv.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_repl.pass_keep.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_repl.pass_keep.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_repl.pass_repl.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_repl.pass_repl.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_repl.pass_repl.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_repl.pass_repl.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_repl.pass_repl.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_repl.pass_wrap.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_repl.pass_wrap.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_repl.pass_wrap.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_repl.pass_wrap.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_repl.pass_zero.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_repl.pass_zero.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_repl.pass_zero.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_repl.pass_zero.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_wrap.pass_decc.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_wrap.pass_decw.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_wrap.pass_incc.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_wrap.pass_incc.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_wrap.pass_incc.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_wrap.pass_inv.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_wrap.pass_inv.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_wrap.pass_keep.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_wrap.pass_keep.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_wrap.pass_repl.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_wrap.pass_repl.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_wrap.pass_wrap.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_wrap.pass_wrap.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_wrap.pass_wrap.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_wrap.pass_wrap.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_wrap.pass_zero.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_wrap.pass_zero.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_wrap.pass_zero.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_wrap.pass_zero.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_zero.pass_decc.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_zero.pass_decc.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_zero.pass_decc.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_zero.pass_decw.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_zero.pass_decw.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_zero.pass_decw.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_zero.pass_decw.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_zero.pass_decw.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_zero.pass_incc.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_zero.pass_incc.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_zero.pass_incc.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_zero.pass_incc.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_zero.pass_incc.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_zero.pass_inv.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_zero.pass_inv.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_zero.pass_inv.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_zero.pass_keep.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_zero.pass_keep.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_zero.pass_keep.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_zero.pass_repl.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_zero.pass_repl.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_zero.pass_wrap.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_zero.pass_wrap.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_zero.pass_zero.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_zero.pass_zero.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint_separate_layouts.states.fail_zero.pass_zero.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decc.pass_decc.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decc.pass_decc.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decc.pass_decc.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decc.pass_decc.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decc.pass_decw.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decc.pass_decw.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decc.pass_incc.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decc.pass_incc.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decc.pass_incc.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decc.pass_inv.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decc.pass_inv.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decc.pass_inv.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decc.pass_inv.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decc.pass_keep.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decc.pass_repl.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decc.pass_repl.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decc.pass_repl.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decc.pass_repl.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decc.pass_wrap.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decc.pass_wrap.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decc.pass_wrap.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decc.pass_wrap.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decc.pass_wrap.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decc.pass_zero.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decc.pass_zero.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decc.pass_zero.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decc.pass_zero.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decw.pass_decc.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decw.pass_decc.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decw.pass_decc.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decw.pass_decw.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decw.pass_decw.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decw.pass_incc.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decw.pass_incc.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decw.pass_inv.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decw.pass_inv.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decw.pass_inv.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decw.pass_keep.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decw.pass_keep.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decw.pass_keep.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decw.pass_repl.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decw.pass_wrap.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decw.pass_wrap.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decw.pass_wrap.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decw.pass_zero.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_decw.pass_zero.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_incc.pass_decc.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_incc.pass_decc.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_incc.pass_decw.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_incc.pass_decw.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_incc.pass_decw.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_incc.pass_incc.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_incc.pass_incc.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_incc.pass_incc.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_incc.pass_inv.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_incc.pass_keep.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_incc.pass_keep.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_incc.pass_keep.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_incc.pass_repl.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_incc.pass_wrap.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_incc.pass_wrap.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_incc.pass_wrap.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_incc.pass_zero.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_incc.pass_zero.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_incc.pass_zero.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_incc.pass_zero.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_incc.pass_zero.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_inv.pass_decc.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_inv.pass_decc.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_inv.pass_decc.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_inv.pass_decc.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_inv.pass_decc.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_inv.pass_decw.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_inv.pass_decw.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_inv.pass_incc.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_inv.pass_incc.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_inv.pass_incc.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_inv.pass_inv.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_inv.pass_inv.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_inv.pass_keep.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_inv.pass_keep.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_inv.pass_keep.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_inv.pass_repl.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_inv.pass_repl.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_inv.pass_wrap.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_inv.pass_wrap.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_inv.pass_wrap.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_inv.pass_wrap.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_inv.pass_wrap.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_inv.pass_wrap.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_inv.pass_zero.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_inv.pass_zero.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_inv.pass_zero.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_inv.pass_zero.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_inv.pass_zero.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_keep.pass_decc.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_keep.pass_decc.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_keep.pass_decc.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_keep.pass_decc.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_keep.pass_decc.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_keep.pass_decw.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_keep.pass_decw.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_keep.pass_incc.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_keep.pass_incc.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_keep.pass_incc.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_keep.pass_inv.dfail_incc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_keep.pass_inv.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_keep.pass_keep.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_keep.pass_keep.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_keep.pass_keep.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_keep.pass_keep.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_keep.pass_keep.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_keep.pass_keep.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_keep.pass_repl.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_keep.pass_repl.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_keep.pass_repl.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_keep.pass_repl.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_keep.pass_wrap.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_keep.pass_wrap.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_keep.pass_wrap.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_keep.pass_zero.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_keep.pass_zero.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_keep.pass_zero.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_keep.pass_zero.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_repl.pass_decc.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_repl.pass_decc.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_repl.pass_decc.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_repl.pass_decc.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_repl.pass_incc.dfail_incc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_repl.pass_inv.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_repl.pass_inv.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_repl.pass_inv.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_repl.pass_inv.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_repl.pass_inv.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_repl.pass_inv.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_repl.pass_keep.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_repl.pass_keep.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_repl.pass_keep.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_repl.pass_keep.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_repl.pass_repl.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_repl.pass_repl.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_repl.pass_wrap.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_repl.pass_wrap.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_repl.pass_wrap.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_repl.pass_wrap.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_repl.pass_zero.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_repl.pass_zero.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_repl.pass_zero.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_repl.pass_zero.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_repl.pass_zero.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_wrap.pass_decc.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_wrap.pass_decc.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_wrap.pass_decc.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_wrap.pass_decc.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_wrap.pass_decc.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_wrap.pass_decw.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_wrap.pass_decw.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_wrap.pass_decw.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_wrap.pass_incc.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_wrap.pass_incc.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_wrap.pass_inv.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_wrap.pass_keep.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_wrap.pass_keep.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_wrap.pass_keep.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_wrap.pass_repl.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_wrap.pass_repl.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_wrap.pass_repl.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_wrap.pass_repl.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_wrap.pass_wrap.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_wrap.pass_zero.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_wrap.pass_zero.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_wrap.pass_zero.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_zero.pass_decc.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_zero.pass_decc.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_zero.pass_decw.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_zero.pass_incc.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_zero.pass_incc.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_zero.pass_incc.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_zero.pass_inv.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_zero.pass_keep.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_zero.pass_keep.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_zero.pass_keep.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_zero.pass_keep.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_zero.pass_repl.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_zero.pass_repl.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_zero.pass_wrap.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_zero.pass_zero.dfail_incc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_zero.pass_zero.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_zero.pass_zero.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.fail_zero.pass_zero.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decc.pass_decc.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decc.pass_decc.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decc.pass_decc.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decc.pass_decc.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decc.pass_decw.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decc.pass_incc.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decc.pass_incc.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decc.pass_incc.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decc.pass_inv.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decc.pass_inv.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decc.pass_inv.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decc.pass_keep.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decc.pass_keep.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decc.pass_repl.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decc.pass_wrap.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decc.pass_wrap.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decc.pass_wrap.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decc.pass_wrap.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decc.pass_zero.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decw.pass_decc.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decw.pass_decc.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decw.pass_decc.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decw.pass_decw.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decw.pass_decw.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decw.pass_decw.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decw.pass_decw.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decw.pass_incc.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decw.pass_incc.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decw.pass_inv.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decw.pass_inv.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decw.pass_inv.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decw.pass_inv.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decw.pass_repl.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decw.pass_repl.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decw.pass_repl.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decw.pass_wrap.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decw.pass_wrap.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decw.pass_wrap.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decw.pass_wrap.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decw.pass_zero.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decw.pass_zero.dfail_incc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decw.pass_zero.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decw.pass_zero.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decw.pass_zero.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_decw.pass_zero.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_incc.pass_decc.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_incc.pass_decc.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_incc.pass_decw.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_incc.pass_decw.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_incc.pass_keep.dfail_incc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_incc.pass_keep.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_incc.pass_repl.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_incc.pass_repl.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_incc.pass_wrap.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_incc.pass_wrap.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_incc.pass_wrap.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_incc.pass_zero.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_incc.pass_zero.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_inv.pass_decc.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_inv.pass_decc.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_inv.pass_decc.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_inv.pass_decc.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_inv.pass_decw.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_inv.pass_decw.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_inv.pass_decw.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_inv.pass_incc.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_inv.pass_incc.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_inv.pass_incc.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_inv.pass_incc.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_inv.pass_incc.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_inv.pass_incc.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_inv.pass_inv.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_inv.pass_inv.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_inv.pass_inv.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_inv.pass_keep.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_inv.pass_repl.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_inv.pass_repl.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_inv.pass_wrap.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_inv.pass_wrap.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_inv.pass_wrap.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_inv.pass_zero.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_inv.pass_zero.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_inv.pass_zero.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_keep.pass_decc.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_keep.pass_decc.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_keep.pass_decw.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_keep.pass_decw.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_keep.pass_incc.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_keep.pass_inv.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_keep.pass_inv.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_keep.pass_inv.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_keep.pass_inv.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_keep.pass_keep.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_keep.pass_keep.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_keep.pass_keep.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_keep.pass_repl.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_keep.pass_wrap.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_keep.pass_wrap.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_keep.pass_wrap.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_keep.pass_wrap.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_keep.pass_zero.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_keep.pass_zero.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_repl.pass_decw.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_repl.pass_incc.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_repl.pass_incc.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_repl.pass_incc.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_repl.pass_incc.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_repl.pass_incc.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_repl.pass_inv.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_repl.pass_inv.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_repl.pass_inv.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_repl.pass_keep.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_repl.pass_keep.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_repl.pass_keep.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_repl.pass_keep.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_repl.pass_repl.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_repl.pass_repl.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_repl.pass_wrap.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_repl.pass_wrap.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_repl.pass_wrap.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_repl.pass_zero.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_repl.pass_zero.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_wrap.pass_decc.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_wrap.pass_decw.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_wrap.pass_decw.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_wrap.pass_decw.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_wrap.pass_decw.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_wrap.pass_incc.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_wrap.pass_incc.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_wrap.pass_incc.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_wrap.pass_incc.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_wrap.pass_inv.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_wrap.pass_inv.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_wrap.pass_inv.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_wrap.pass_keep.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_wrap.pass_keep.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_wrap.pass_repl.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_wrap.pass_repl.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_wrap.pass_wrap.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_wrap.pass_wrap.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_wrap.pass_wrap.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_wrap.pass_wrap.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_wrap.pass_zero.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_wrap.pass_zero.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_wrap.pass_zero.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_wrap.pass_zero.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_decc.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_decc.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_decc.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_decc.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_decc.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_decw.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_decw.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_decw.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_incc.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_incc.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_incc.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_inv.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_inv.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_inv.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_inv.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_inv.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_keep.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_keep.dfail_incc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_keep.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_keep.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_repl.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_repl.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_repl.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_wrap.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_wrap.dfail_incc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_wrap.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_wrap.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_zero.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_zero.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_zero.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_zero.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_zero.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint_separate_layouts.states.fail_zero.pass_zero.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decc.pass_decc.dfail_incc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decc.pass_decc.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decc.pass_decc.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decc.pass_decw.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decc.pass_decw.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decc.pass_decw.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decc.pass_decw.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decc.pass_decw.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decc.pass_incc.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decc.pass_incc.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decc.pass_inv.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decc.pass_inv.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decc.pass_keep.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decc.pass_keep.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decc.pass_repl.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decc.pass_repl.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decc.pass_repl.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decc.pass_repl.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decc.pass_repl.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decc.pass_wrap.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decc.pass_wrap.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decc.pass_wrap.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decc.pass_wrap.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decc.pass_zero.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decc.pass_zero.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decc.pass_zero.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decc.pass_zero.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decc.pass_zero.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decw.pass_decc.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decw.pass_decc.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decw.pass_decc.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decw.pass_decc.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decw.pass_decw.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decw.pass_decw.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decw.pass_decw.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decw.pass_incc.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decw.pass_incc.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decw.pass_incc.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decw.pass_incc.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decw.pass_inv.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decw.pass_inv.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decw.pass_inv.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decw.pass_inv.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decw.pass_keep.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decw.pass_repl.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decw.pass_repl.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decw.pass_wrap.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decw.pass_wrap.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decw.pass_wrap.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decw.pass_wrap.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decw.pass_wrap.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_decw.pass_zero.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_incc.pass_decc.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_incc.pass_decc.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_incc.pass_decc.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_incc.pass_decc.dfail_incc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_incc.pass_decc.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_incc.pass_decw.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_incc.pass_decw.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_incc.pass_incc.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_incc.pass_incc.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_incc.pass_inv.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_incc.pass_inv.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_incc.pass_inv.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_incc.pass_inv.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_incc.pass_keep.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_incc.pass_keep.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_incc.pass_keep.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_incc.pass_keep.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_incc.pass_keep.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_incc.pass_repl.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_incc.pass_repl.dfail_incc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_incc.pass_wrap.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_incc.pass_wrap.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_incc.pass_wrap.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_incc.pass_wrap.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_incc.pass_zero.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_incc.pass_zero.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_inv.pass_decc.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_inv.pass_decc.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_inv.pass_decc.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_inv.pass_decw.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_inv.pass_incc.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_inv.pass_incc.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_inv.pass_incc.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_inv.pass_inv.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_inv.pass_inv.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_inv.pass_inv.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_inv.pass_inv.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_inv.pass_keep.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_inv.pass_keep.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_inv.pass_repl.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_inv.pass_repl.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_inv.pass_wrap.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_inv.pass_wrap.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_inv.pass_wrap.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_inv.pass_zero.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_inv.pass_zero.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_keep.pass_decc.dfail_incc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_keep.pass_decc.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_keep.pass_decc.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_keep.pass_decw.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_keep.pass_decw.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_keep.pass_incc.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_keep.pass_incc.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_keep.pass_incc.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_keep.pass_incc.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_keep.pass_incc.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_keep.pass_incc.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_keep.pass_inv.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_keep.pass_inv.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_keep.pass_inv.dfail_incc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_keep.pass_keep.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_keep.pass_keep.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_keep.pass_keep.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_keep.pass_repl.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_keep.pass_repl.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_keep.pass_repl.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_keep.pass_repl.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_keep.pass_repl.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_keep.pass_wrap.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_keep.pass_wrap.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_keep.pass_wrap.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_keep.pass_wrap.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_repl.pass_decc.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_repl.pass_decc.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_repl.pass_decc.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_repl.pass_decc.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_repl.pass_decw.dfail_incc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_repl.pass_decw.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_repl.pass_incc.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_repl.pass_incc.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_repl.pass_incc.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_repl.pass_incc.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_repl.pass_incc.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_repl.pass_inv.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_repl.pass_keep.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_repl.pass_keep.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_repl.pass_keep.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_repl.pass_keep.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_repl.pass_keep.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_repl.pass_repl.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_repl.pass_wrap.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_repl.pass_wrap.dfail_incc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_repl.pass_wrap.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_repl.pass_wrap.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_repl.pass_zero.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_repl.pass_zero.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_wrap.pass_decc.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_wrap.pass_decc.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_wrap.pass_decc.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_wrap.pass_decw.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_wrap.pass_incc.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_wrap.pass_incc.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_wrap.pass_incc.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_wrap.pass_inv.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_wrap.pass_inv.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_wrap.pass_inv.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_wrap.pass_inv.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_wrap.pass_keep.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_wrap.pass_keep.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_wrap.pass_keep.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_wrap.pass_keep.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_wrap.pass_keep.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_wrap.pass_repl.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_wrap.pass_repl.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_wrap.pass_repl.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_wrap.pass_repl.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_wrap.pass_repl.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_wrap.pass_repl.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_wrap.pass_wrap.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_wrap.pass_wrap.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_wrap.pass_zero.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_wrap.pass_zero.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_wrap.pass_zero.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_zero.pass_decc.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_zero.pass_decc.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_zero.pass_decc.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_zero.pass_decw.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_zero.pass_decw.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_zero.pass_decw.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_zero.pass_decw.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_zero.pass_incc.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_zero.pass_incc.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_zero.pass_inv.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_zero.pass_keep.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_zero.pass_keep.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_zero.pass_repl.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_zero.pass_repl.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_zero.pass_repl.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_zero.pass_repl.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_zero.pass_wrap.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_zero.pass_wrap.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_zero.pass_wrap.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_zero.pass_zero.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_zero.pass_zero.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_zero.pass_zero.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.fail_zero.pass_zero.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decc.pass_decc.dfail_incc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decc.pass_decc.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decc.pass_decc.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decc.pass_decc.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decc.pass_decw.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decc.pass_decw.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decc.pass_decw.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decc.pass_incc.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decc.pass_incc.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decc.pass_incc.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decc.pass_incc.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decc.pass_inv.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decc.pass_inv.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decc.pass_inv.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decc.pass_inv.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decc.pass_inv.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decc.pass_repl.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decc.pass_repl.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decc.pass_repl.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decc.pass_wrap.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decc.pass_wrap.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decc.pass_wrap.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decc.pass_wrap.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decc.pass_zero.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decc.pass_zero.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decc.pass_zero.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decc.pass_zero.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decw.pass_decc.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decw.pass_decc.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decw.pass_decc.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decw.pass_decc.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decw.pass_decw.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decw.pass_decw.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decw.pass_incc.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decw.pass_incc.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decw.pass_incc.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decw.pass_inv.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decw.pass_inv.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decw.pass_inv.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decw.pass_keep.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decw.pass_keep.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decw.pass_repl.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decw.pass_wrap.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decw.pass_wrap.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decw.pass_wrap.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decw.pass_zero.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_decw.pass_zero.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_incc.pass_decc.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_incc.pass_decc.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_incc.pass_decc.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_incc.pass_decc.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_incc.pass_decc.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_incc.pass_decc.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_incc.pass_decw.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_incc.pass_decw.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_incc.pass_inv.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_incc.pass_inv.dfail_incc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_incc.pass_inv.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_incc.pass_inv.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_incc.pass_keep.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_incc.pass_keep.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_incc.pass_repl.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_incc.pass_repl.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_incc.pass_wrap.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_incc.pass_wrap.dfail_incc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_incc.pass_wrap.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_incc.pass_wrap.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_incc.pass_zero.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_incc.pass_zero.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_incc.pass_zero.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_inv.pass_decc.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_inv.pass_decc.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_inv.pass_decc.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_inv.pass_decc.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_inv.pass_decw.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_inv.pass_decw.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_inv.pass_incc.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_inv.pass_incc.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_inv.pass_inv.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_inv.pass_inv.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_inv.pass_repl.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_inv.pass_repl.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_inv.pass_repl.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_inv.pass_wrap.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_inv.pass_wrap.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_inv.pass_wrap.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_inv.pass_wrap.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_inv.pass_wrap.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_inv.pass_zero.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_keep.pass_decc.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_keep.pass_decc.dfail_incc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_keep.pass_decc.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_keep.pass_decc.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_keep.pass_decc.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_keep.pass_decw.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_keep.pass_decw.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_keep.pass_decw.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_keep.pass_incc.dfail_incc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_keep.pass_incc.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_keep.pass_incc.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_keep.pass_incc.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_keep.pass_inv.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_keep.pass_inv.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_keep.pass_inv.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_keep.pass_inv.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_keep.pass_keep.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_keep.pass_keep.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_keep.pass_repl.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_keep.pass_repl.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_keep.pass_repl.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_keep.pass_wrap.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_keep.pass_zero.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_repl.pass_decc.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_repl.pass_decw.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_repl.pass_decw.dfail_incc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_repl.pass_decw.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_repl.pass_incc.dfail_incc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_repl.pass_incc.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_repl.pass_incc.dfail_wrap.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_repl.pass_incc.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_repl.pass_inv.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_repl.pass_inv.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_repl.pass_inv.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_repl.pass_keep.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_repl.pass_repl.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_repl.pass_repl.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_repl.pass_wrap.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_repl.pass_zero.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_wrap.pass_decc.dfail_decw.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_wrap.pass_decc.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_wrap.pass_decc.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_wrap.pass_decw.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_wrap.pass_decw.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_wrap.pass_inv.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_wrap.pass_inv.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_wrap.pass_keep.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_wrap.pass_keep.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_wrap.pass_wrap.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_wrap.pass_zero.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_zero.pass_decc.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_zero.pass_decc.dfail_decw.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_zero.pass_decc.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_zero.pass_decc.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_zero.pass_decw.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_zero.pass_decw.dfail_zero.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_zero.pass_decw.dfail_zero.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_zero.pass_incc.dfail_inv.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_zero.pass_inv.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_zero.pass_inv.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_zero.pass_keep.dfail_inv.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_zero.pass_keep.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_zero.pass_repl.dfail_decc.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_zero.pass_repl.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_zero.pass_repl.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_zero.pass_repl.dfail_wrap.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_zero.pass_wrap.dfail_decc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_zero.pass_wrap.dfail_incc.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_zero.pass_wrap.dfail_keep.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_zero.pass_wrap.dfail_repl.comp_less,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_zero.pass_zero.dfail_keep.comp_not_equal,Fail
+dEQP-VK.pipeline.stencil.format.s8_uint.states.fail_zero.pass_zero.dfail_repl.comp_not_equal,Fail
+dEQP-VK.pipeline.timestamp.misc_tests.consistent_results,Fail
+dEQP-VK.query_pool.occlusion_query.copy_results_conservative_size_32_wait_queue_without_availability_draw_triangles,Fail
+dEQP-VK.query_pool.occlusion_query.copy_results_size_64_stride_104_without_availability,Fail
+dEQP-VK.query_pool.occlusion_query.copy_results_size_64_stride_16_with_availability,Fail
+dEQP-VK.transform_feedback.simple.query_copy_0_4_32bits,Fail
+dEQP-VK.transform_feedback.simple.query_copy_0_509_32bits,Fail
+dEQP-VK.transform_feedback.simple.query_copy_1_61_32bits,Fail
+dEQP-VK.transform_feedback.simple.query_copy_3_509_32bits,Fail
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/qualcomm-fails.txt b/client/site_tests/graphics_parallel_dEQP/boards/qualcomm-fails.txt
new file mode 100644
index 0000000..49f59a8
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/qualcomm-fails.txt
@@ -0,0 +1,15 @@
+dEQP-VK.info.instance_extensions,Fail
+
+dEQP-VK.spirv_assembly.instruction.compute.float_controls.fp16.generated_args.denorm_nclamp_nan_flush_to_zero_nostorage,Fail
+dEQP-VK.spirv_assembly.instruction.compute.float_controls.fp16.generated_args.denorm_nmax_nan_flush_to_zero_nostorage,Fail
+dEQP-VK.spirv_assembly.instruction.compute.float_controls.fp16.generated_args.denorm_nmin_nan_flush_to_zero_nostorage,Fail
+dEQP-VK.spirv_assembly.instruction.graphics.float_controls.fp16.generated_args.denorm_nclamp_nan_flush_to_zero_nostorage_frag,Fail
+dEQP-VK.spirv_assembly.instruction.graphics.float_controls.fp16.generated_args.denorm_nclamp_nan_flush_to_zero_nostorage_vert,Fail
+dEQP-VK.spirv_assembly.instruction.graphics.float_controls.fp16.generated_args.denorm_nmax_nan_flush_to_zero_nostorage_frag,Fail
+dEQP-VK.spirv_assembly.instruction.graphics.float_controls.fp16.generated_args.denorm_nmax_nan_flush_to_zero_nostorage_vert,Fail
+dEQP-VK.spirv_assembly.instruction.graphics.float_controls.fp16.generated_args.denorm_nmin_nan_flush_to_zero_nostorage_frag,Fail
+dEQP-VK.spirv_assembly.instruction.graphics.float_controls.fp16.generated_args.denorm_nmin_nan_flush_to_zero_nostorage_vert,Fail
+
+# This test is buggy in the CTS and should be fixed on the next CTS release.
+# https://gitlab.khronos.org/Tracker/vk-gl-cts/-/issues/3505
+dEQP-VK.subgroups.multiple_dispatches.uniform_subgroup_size,Fail
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/qualcomm-flakes.txt b/client/site_tests/graphics_parallel_dEQP/boards/qualcomm-flakes.txt
new file mode 100644
index 0000000..1951f9d
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/qualcomm-flakes.txt
@@ -0,0 +1,3 @@
+# https://gitlab.khronos.org/Tracker/vk-gl-cts/-/issues/3582
+dEQP-VK.image.sample_texture.128_bit_compressed_format_cubemap
+dEQP-VK.image.sample_texture.64_bit_compressed_format_cubemap
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/qualcomm-skips.txt b/client/site_tests/graphics_parallel_dEQP/boards/qualcomm-skips.txt
new file mode 100644
index 0000000..e43c64f
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/qualcomm-skips.txt
@@ -0,0 +1,11 @@
+# These tests should not exist. They are from renderpass-with-dynamic-rendering.txt which should
+# have been removed upstream.
+dEQP-VK.renderpass_with_dynamic_rendering.dedicated_allocation.*
+dEQP-VK.renderpass_with_dynamic_rendering.suballocation.*
+
+# The flush_finish tests throw warnings when behavior doesn't match
+# their expectations, except that tiling GPUs don't behave the way they
+# expect in their timing setup so the tests take exceptionally long
+# while just wasting developer time.  Not required to be warnings-free
+# for conformance.
+dEQP-GLES.*.functional.flush_finish.*
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/rogue-skips.txt b/client/site_tests/graphics_parallel_dEQP/boards/rogue-skips.txt
new file mode 100644
index 0000000..a100b95
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/rogue-skips.txt
@@ -0,0 +1,14 @@
+# These all time out after 60 seconds, some intermittently. b/232020872
+dEQP-GLES31.functional.debug.negative_coverage.*
+
+# These tests should not exist. They are from renderpass-with-dynamic-rendering.txt which should
+# have been removed upstream.
+dEQP-VK.renderpass_with_dynamic_rendering.dedicated_allocation.*
+dEQP-VK.renderpass_with_dynamic_rendering.suballocation.*
+
+# The flush_finish tests throw warnings when behavior doesn't match
+# their expectations, except that tiling GPUs don't behave the way they
+# expect in their timing setup so the tests take exceptionally long
+# while just wasting developer time.  Not required to be warnings-free
+# for conformance.
+dEQP-GLES.*.functional.flush_finish.*
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/skylake-fails.txt b/client/site_tests/graphics_parallel_dEQP/boards/skylake-fails.txt
new file mode 100644
index 0000000..be1cf04
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/skylake-fails.txt
@@ -0,0 +1,28 @@
+# Possibly related to https://gitlab.freedesktop.org/mesa/mesa/-/issues/1478
+dEQP-VK.glsl.builtin.precision.cos.highp.scalar,Fail
+dEQP-VK.glsl.builtin.precision.cos.mediump.scalar,Fail
+dEQP-VK.glsl.builtin.precision.cos.highp.vec2,Fail
+dEQP-VK.glsl.builtin.precision.cos.mediump.vec2,Fail
+dEQP-VK.glsl.builtin.precision.cos.highp.vec3,Fail
+dEQP-VK.glsl.builtin.precision.cos.mediump.vec3,Fail
+dEQP-VK.glsl.builtin.precision.cos.highp.vec4,Fail
+dEQP-VK.glsl.builtin.precision.cos.mediump.vec4,Fail
+dEQP-VK.glsl.builtin.precision.sin.highp.scalar,Fail
+dEQP-VK.glsl.builtin.precision.sin.mediump.scalar,Fail
+dEQP-VK.glsl.builtin.precision.sin.highp.vec2,Fail
+dEQP-VK.glsl.builtin.precision.sin.mediump.vec2,Fail
+dEQP-VK.glsl.builtin.precision.sin.highp.vec3,Fail
+dEQP-VK.glsl.builtin.precision.sin.mediump.vec3,Fail
+dEQP-VK.glsl.builtin.precision.sin.highp.vec4,Fail
+dEQP-VK.glsl.builtin.precision.sin.mediump.vec4,Fail
+
+# https://gitlab.freedesktop.org/mesa/mesa/-/issues/2833
+dEQP-VK.rasterization.interpolation_multisample_2_bit.lines_wide,Fail
+dEQP-VK.rasterization.interpolation_multisample_2_bit.non_strict_lines_wide,Fail
+dEQP-VK.rasterization.interpolation_multisample_4_bit.lines_wide,Fail
+dEQP-VK.rasterization.interpolation_multisample_4_bit.non_strict_lines_wide,Fail
+dEQP-VK.rasterization.interpolation_multisample_8_bit.lines_wide,Fail
+dEQP-VK.rasterization.interpolation_multisample_8_bit.non_strict_lines_wide,Fail
+dEQP-VK.rasterization.interpolation_multisample_16_bit.lines_wide,Fail
+dEQP-VK.rasterization.interpolation_multisample_16_bit.non_strict_lines_wide,Fail
+
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/skylake-flakes.txt b/client/site_tests/graphics_parallel_dEQP/boards/skylake-flakes.txt
new file mode 100644
index 0000000..59b1dfd
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/skylake-flakes.txt
@@ -0,0 +1,29 @@
+# Note that Skylake boards (asuka, at least) can have a GT1 and GT2
+# variant, and this can impact the set of tests that fail, but we
+# don't distinguish them in testing.  So, some failing cases end up
+# listed here as flakes even if they might reliably pass or fail on
+# a specific variant, because the results will be flaky dependening
+# on ChromeOS infra choices.
+
+# These almost always fail (though occasionally flake to a Pass) on
+# asuka GT1 and lars GT1
+dEQP-GLES31.functional.geometry_shading.layered.render_with_default_layer_2d_array
+dEQP-GLES31.functional.geometry_shading.layered.render_with_default_layer_cubemap
+dEQP-GLES31.functional.geometry_shading.layered.render_with_default_layer_2d_multisample_array
+dEQP-GLES31.functional.geometry_shading.layered.render_with_default_layer_3d
+
+# Seen on both asuka (Skylake U, GT1 510 and GT2 520) and sentry ().  Not seen yet on cave or caroline (Skylake Y, GT2 515).
+dEQP-VK.subgroups.ballot_broadcast.compute.subgroupbroadcast_f16vec4
+dEQP-VK.subgroups.ballot_broadcast.compute.subgroupbroadcast_i16vec4
+dEQP-VK.subgroups.ballot_broadcast.compute.subgroupbroadcast_i8vec4
+dEQP-VK.subgroups.ballot_broadcast.compute.subgroupbroadcast_u16vec4
+dEQP-VK.subgroups.ballot_broadcast.compute.subgroupbroadcast_u8vec4
+dEQP-VK.subgroups.*.compute.subgroupclustered.*64vec4_requiredsubgroupsize
+dEQP-VK.subgroups.*.compute.subgroupclustered.*dvec4_requiredsubgroupsize
+
+# Seen on asuka GT1 and sentry, at least.
+dEQP-GLES31.functional.image_load_store.2d_array.atomic.exchange_r32f_return_value
+dEQP-GLES31.functional.image_load_store.3d.atomic.exchange_r32f_return_value
+dEQP-GLES31.functional.image_load_store.cube.atomic.exchange_r32f_return_value
+
+dEQP-VK.binding_model.descriptorset_random.sets8.noarray.ubolimitlow.sbolimitlow.sampledimglow.outimgtexlow.iublimitlow.uab.frag.noia.0
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/skylake-skips.txt b/client/site_tests/graphics_parallel_dEQP/boards/skylake-skips.txt
new file mode 100644
index 0000000..e330a2d
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/skylake-skips.txt
@@ -0,0 +1,10 @@
+# https://gitlab.freedesktop.org/mesa/mesa/-/issues/4641
+dEQP-VK.ssbo.phys.layout.random.16bit.scalar.13
+
+dEQP-VK.binding_model.buffer_device_address.set3.depth3.basessbo.convertcheck.*
+dEQP-VK.binding_model.buffer_device_address.set3.depth3.baseubo.convertcheck.*
+
+# These tests should not exist. They are from renderpass-with-dynamic-rendering.txt which should
+# have been removed upstream.
+dEQP-VK.renderpass_with_dynamic_rendering.dedicated_allocation.*
+dEQP-VK.renderpass_with_dynamic_rendering.suballocation.*
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/stoney-fails.txt b/client/site_tests/graphics_parallel_dEQP/boards/stoney-fails.txt
new file mode 100644
index 0000000..0e71f12
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/stoney-fails.txt
@@ -0,0 +1,67 @@
+dEQP-VK.api.image_clearing.core.clear_depth_stencil_attachment.multiple_layers.d16_unorm_s8_uint_1x33,Fail
+dEQP-VK.api.image_clearing.core.clear_depth_stencil_attachment.multiple_layers.d16_unorm_s8_uint_separate_layouts_stencil_1x33,Fail
+dEQP-VK.api.image_clearing.core.clear_depth_stencil_image.multiple_layers.d16_unorm_s8_uint_1x33,Fail
+dEQP-VK.api.image_clearing.core.clear_depth_stencil_image.remaining_array_layers.d16_unorm_s8_uint_1x33,Fail
+dEQP-VK.api.image_clearing.core.clear_depth_stencil_image.remaining_array_layers_twostep.d16_unorm_s8_uint_1x33,Fail
+dEQP-VK.api.image_clearing.core.clear_depth_stencil_image.remaining_array_layers_twostep.d16_unorm_s8_uint_separate_layouts_stencil_1x33,Fail
+dEQP-VK.api.image_clearing.core.clear_depth_stencil_image.single_layer.d16_unorm_s8_uint_1x33,Fail
+dEQP-VK.api.image_clearing.dedicated_allocation.clear_depth_stencil_attachment.single_layer.d16_unorm_s8_uint_1x33,Fail
+dEQP-VK.api.image_clearing.dedicated_allocation.clear_depth_stencil_attachment.single_layer.d16_unorm_s8_uint_separate_layouts_stencil_1x33,Fail
+dEQP-VK.api.image_clearing.dedicated_allocation.clear_depth_stencil_image.multiple_layers.d16_unorm_s8_uint_1x33_multiple_subresourcerange,Fail
+dEQP-VK.api.image_clearing.dedicated_allocation.clear_depth_stencil_image.multiple_layers.d16_unorm_s8_uint_separate_layouts_stencil_1x33,Fail
+dEQP-VK.api.image_clearing.dedicated_allocation.clear_depth_stencil_image.remaining_array_layers.d16_unorm_s8_uint_1x33_multiple_subresourcerange,Fail
+dEQP-VK.api.image_clearing.dedicated_allocation.clear_depth_stencil_image.remaining_array_layers.d16_unorm_s8_uint_separate_layouts_stencil_1x33,Fail
+dEQP-VK.api.image_clearing.dedicated_allocation.clear_depth_stencil_image.single_layer.d16_unorm_s8_uint_1x33_multiple_subresourcerange,Fail
+dEQP-VK.api.image_clearing.dedicated_allocation.clear_depth_stencil_image.single_layer.d16_unorm_s8_uint_separate_layouts_stencil_1x33,Fail
+dEQP-VK.draw.multiple_interpolation.structured.4_samples,Fail
+dEQP-VK.pipeline.timestamp.calibrated.calibration_test,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rg32i.dontunroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_12.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rg32i.dontunroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_20.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rg32i.dontunroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_252.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rg32i.dontunroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_260.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rg32i.dontunroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_39.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rg32i.dontunroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_4.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rg32i.unroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_12.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rg32i.unroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_20.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rg32i.unroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_252.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rg32i.unroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_260.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rg32i.unroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_39.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rg32i.unroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_4.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rg32ui.dontunroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_12.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rg32ui.dontunroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_20.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rg32ui.dontunroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_252.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rg32ui.dontunroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_260.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rg32ui.dontunroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_39.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rg32ui.dontunroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_4.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rg32ui.unroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_12.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rg32ui.unroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_20.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rg32ui.unroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_252.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rg32ui.unroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_260.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rg32ui.unroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_39.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rg32ui.unroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_4.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rgba32i.dontunroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_12.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rgba32i.dontunroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_20.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rgba32i.dontunroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_252.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rgba32i.dontunroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_260.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rgba32i.dontunroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_39.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rgba32i.dontunroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_4.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rgba32i.unroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_12.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rgba32i.unroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_20.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rgba32i.unroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_252.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rgba32i.unroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_260.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rgba32i.unroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_39.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rgba32i.unroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_4.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rgba32ui.dontunroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_12.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rgba32ui.dontunroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_20.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rgba32ui.dontunroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_252.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rgba32ui.dontunroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_260.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rgba32ui.dontunroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_39.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rgba32ui.dontunroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_4.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rgba32ui.unroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_12.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rgba32ui.unroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_20.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rgba32ui.unroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_252.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rgba32ui.unroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_260.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rgba32ui.unroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_39.samples_1.1d.vert,Fail
+dEQP-VK.robustness.robustness2.bind.notemplate.rgba32ui.unroll.nonvolatile.vertex_attribute_fetch.no_fmt_qual.len_4.samples_1.1d.vert,Fail
+dEQP-VK.spirv_assembly.instruction.compute.float32.comparison_1.modfstruct,Crash
+dEQP-VK.spirv_assembly.instruction.compute.float32.comparison_3.modfstruct,Crash
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/stoney-skips.txt b/client/site_tests/graphics_parallel_dEQP/boards/stoney-skips.txt
new file mode 100644
index 0000000..6cba30d
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/stoney-skips.txt
@@ -0,0 +1,4 @@
+# These tests should not exist. They are from renderpass-with-dynamic-rendering.txt which should
+# have been removed upstream.
+dEQP-VK.renderpass_with_dynamic_rendering.dedicated_allocation.*
+dEQP-VK.renderpass_with_dynamic_rendering.suballocation.*
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/tigerlake-fails.txt b/client/site_tests/graphics_parallel_dEQP/boards/tigerlake-fails.txt
new file mode 100644
index 0000000..8ad940d
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/tigerlake-fails.txt
@@ -0,0 +1,42 @@
+dEQP-VK.api.tooling_info.validate_getter,Crash
+dEQP-VK.api.tooling_info.validate_tools_properties,Crash
+dEQP-VK.drm_format_modifiers.export_import.a4r4g4b4_unorm_pack16,Fail
+dEQP-VK.drm_format_modifiers.export_import.b4g4r4a4_unorm_pack16,Fail
+dEQP-VK.drm_format_modifiers.export_import.r16_sfloat,Fail
+dEQP-VK.drm_format_modifiers.export_import.r16_unorm,Fail
+dEQP-VK.drm_format_modifiers.export_import.r4g4b4a4_unorm_pack16,Fail
+dEQP-VK.drm_format_modifiers.export_import.r8_unorm,Fail
+dEQP-VK.info.instance_extensions,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.all_packed_ss_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.all_packed_su_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.all_packed_us_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.all_packed_uu_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.all_ss_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.all_su_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.all_us_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.all_uu_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.limits_packed_ss_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.limits_packed_su_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.limits_packed_us_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.limits_packed_uu_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.limits_ss_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.limits_su_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.limits_us_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.limits_uu_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.small_packed_ss_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.small_packed_su_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.small_packed_us_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.small_packed_uu_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.small_ss_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.small_su_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.small_us_v4i8_out32,Fail
+dEQP-VK.spirv_assembly.instruction.compute.opudotaccsatkhr.small_uu_v4i8_out32,Fail
+
+# This test is buggy in the CTS and should be fixed on the next CTS release.
+# https://gitlab.khronos.org/Tracker/vk-gl-cts/-/issues/3505
+dEQP-VK.subgroups.multiple_dispatches.uniform_subgroup_size,Fail
+
+dEQP-VK.subgroups.size_control.compute.require_full_subgroups_allow_varying_subgroup_size_flags_spirv16,Fail
+dEQP-VK.subgroups.size_control.compute.require_full_subgroups_allow_varying_subgroup_size_spirv16,Fail
+dEQP-VK.subgroups.size_control.compute.require_full_subgroups_flags_spirv16,Fail
+dEQP-VK.subgroups.size_control.compute.require_full_subgroups_spirv16,Fail
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/tigerlake-flakes.txt b/client/site_tests/graphics_parallel_dEQP/boards/tigerlake-flakes.txt
new file mode 100644
index 0000000..a6c25b1
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/tigerlake-flakes.txt
@@ -0,0 +1,4 @@
+dEQP-VK.api.version_check.version
+dEQP-VK.drm_format_modifiers.export_import.a4r4g4b4_unorm_pack16_ext
+dEQP-VK.pipeline.extended_dynamic_state.before_draw.stencil_state_face_front_le_inc_wrap_clear_254_ref_253_depthfail
+dEQP-VK.ssbo.phys.layout.random.16bit.scalar.13
diff --git a/client/site_tests/graphics_parallel_dEQP/boards/tigerlake-skips.txt b/client/site_tests/graphics_parallel_dEQP/boards/tigerlake-skips.txt
new file mode 100644
index 0000000..6cba30d
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/boards/tigerlake-skips.txt
@@ -0,0 +1,4 @@
+# These tests should not exist. They are from renderpass-with-dynamic-rendering.txt which should
+# have been removed upstream.
+dEQP-VK.renderpass_with_dynamic_rendering.dedicated_allocation.*
+dEQP-VK.renderpass_with_dynamic_rendering.suballocation.*
diff --git a/client/site_tests/graphics_parallel_dEQP/control.gles2 b/client/site_tests/graphics_parallel_dEQP/control.gles2
new file mode 100644
index 0000000..a4d4bc3
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/control.gles2
@@ -0,0 +1,30 @@
+# Copyright 2015-2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Please do not edit this file! It has been created by generate_controlfiles.py.
+
+PY_VERSION = 3
+NAME = 'graphics_parallel_dEQP.gles2'
+AUTHOR = 'chromeos-gfx'
+PURPOSE = 'Run the drawElements Quality Program test suite with deqp-runner.'
+CRITERIA = 'All of the individual tests must pass unless marked as known failures.'
+ATTRIBUTES = 'suite:deqp, suite:graphics_per-day, suite:graphics_system, suite:pvs-graphics'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'graphics'
+TEST_TYPE = 'client'
+MAX_RESULT_SIZE_KB = 131072
+EXTENDED_TIMEOUT = 86400
+DOC = """
+This test runs the drawElements Quality Program test suite.
+"""
+job.run_test('graphics_parallel_dEQP',
+             tag = 'gles2',
+             opts = args + [
+                 'api=gles2',
+                 'caselist=/usr/local/deqp/caselists/gles2.txt',
+                 'perf_failure_description=Failures_GLES2',
+                 'shard_number=0',
+                 'shard_count=1'
+             ])
\ No newline at end of file
diff --git a/client/site_tests/graphics_parallel_dEQP/control.gles3 b/client/site_tests/graphics_parallel_dEQP/control.gles3
new file mode 100644
index 0000000..1d01b1b
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/control.gles3
@@ -0,0 +1,30 @@
+# Copyright 2015-2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Please do not edit this file! It has been created by generate_controlfiles.py.
+
+PY_VERSION = 3
+NAME = 'graphics_parallel_dEQP.gles3'
+AUTHOR = 'chromeos-gfx'
+PURPOSE = 'Run the drawElements Quality Program test suite with deqp-runner.'
+CRITERIA = 'All of the individual tests must pass unless marked as known failures.'
+ATTRIBUTES = 'suite:deqp, suite:graphics_per-day, suite:graphics_system, suite:pvs-graphics'
+TIME = 'LONG'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'graphics'
+TEST_TYPE = 'client'
+MAX_RESULT_SIZE_KB = 131072
+EXTENDED_TIMEOUT = 86400
+DOC = """
+This test runs the drawElements Quality Program test suite.
+"""
+job.run_test('graphics_parallel_dEQP',
+             tag = 'gles3',
+             opts = args + [
+                 'api=gles3',
+                 'caselist=/usr/local/deqp/caselists/gles3.txt',
+                 'perf_failure_description=Failures_GLES3',
+                 'shard_number=0',
+                 'shard_count=1'
+             ])
\ No newline at end of file
diff --git a/client/site_tests/graphics_parallel_dEQP/control.gles31 b/client/site_tests/graphics_parallel_dEQP/control.gles31
new file mode 100644
index 0000000..c31bceb
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/control.gles31
@@ -0,0 +1,30 @@
+# Copyright 2015-2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Please do not edit this file! It has been created by generate_controlfiles.py.
+
+PY_VERSION = 3
+NAME = 'graphics_parallel_dEQP.gles31'
+AUTHOR = 'chromeos-gfx'
+PURPOSE = 'Run the drawElements Quality Program test suite with deqp-runner.'
+CRITERIA = 'All of the individual tests must pass unless marked as known failures.'
+ATTRIBUTES = 'suite:deqp, suite:graphics_per-day, suite:graphics_system, suite:pvs-graphics'
+TIME = 'LONG'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'graphics'
+TEST_TYPE = 'client'
+MAX_RESULT_SIZE_KB = 131072
+EXTENDED_TIMEOUT = 86400
+DOC = """
+This test runs the drawElements Quality Program test suite.
+"""
+job.run_test('graphics_parallel_dEQP',
+             tag = 'gles31',
+             opts = args + [
+                 'api=gles31',
+                 'caselist=/usr/local/deqp/caselists/gles31.txt',
+                 'perf_failure_description=Failures_GLES31',
+                 'shard_number=0',
+                 'shard_count=1'
+             ])
\ No newline at end of file
diff --git a/client/site_tests/graphics_parallel_dEQP/control.vk.0 b/client/site_tests/graphics_parallel_dEQP/control.vk.0
new file mode 100644
index 0000000..5523b00
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/control.vk.0
@@ -0,0 +1,30 @@
+# Copyright 2015-2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Please do not edit this file! It has been created by generate_controlfiles.py.
+
+PY_VERSION = 3
+NAME = 'graphics_parallel_dEQP.vk.0'
+AUTHOR = 'chromeos-gfx'
+PURPOSE = 'Run the drawElements Quality Program test suite with deqp-runner.'
+CRITERIA = 'All of the individual tests must pass unless marked as known failures.'
+ATTRIBUTES = 'suite:deqp, suite:graphics_per-day, suite:graphics_system'
+TIME = 'LONG'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'graphics'
+TEST_TYPE = 'client'
+MAX_RESULT_SIZE_KB = 131072
+EXTENDED_TIMEOUT = 86400
+DOC = """
+This test runs the drawElements Quality Program test suite.
+"""
+job.run_test('graphics_parallel_dEQP',
+             tag = 'vk',
+             opts = args + [
+                 'api=vk',
+                 'caselist=/usr/local/deqp/caselists/vk.txt',
+                 'perf_failure_description=Failures_VK',
+                 'shard_number=0',
+                 'shard_count=4'
+             ])
\ No newline at end of file
diff --git a/client/site_tests/graphics_parallel_dEQP/control.vk.1 b/client/site_tests/graphics_parallel_dEQP/control.vk.1
new file mode 100644
index 0000000..4ee66e6
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/control.vk.1
@@ -0,0 +1,30 @@
+# Copyright 2015-2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Please do not edit this file! It has been created by generate_controlfiles.py.
+
+PY_VERSION = 3
+NAME = 'graphics_parallel_dEQP.vk.1'
+AUTHOR = 'chromeos-gfx'
+PURPOSE = 'Run the drawElements Quality Program test suite with deqp-runner.'
+CRITERIA = 'All of the individual tests must pass unless marked as known failures.'
+ATTRIBUTES = 'suite:deqp, suite:graphics_per-day, suite:graphics_system'
+TIME = 'LONG'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'graphics'
+TEST_TYPE = 'client'
+MAX_RESULT_SIZE_KB = 131072
+EXTENDED_TIMEOUT = 86400
+DOC = """
+This test runs the drawElements Quality Program test suite.
+"""
+job.run_test('graphics_parallel_dEQP',
+             tag = 'vk',
+             opts = args + [
+                 'api=vk',
+                 'caselist=/usr/local/deqp/caselists/vk.txt',
+                 'perf_failure_description=Failures_VK',
+                 'shard_number=1',
+                 'shard_count=4'
+             ])
\ No newline at end of file
diff --git a/client/site_tests/graphics_parallel_dEQP/control.vk.2 b/client/site_tests/graphics_parallel_dEQP/control.vk.2
new file mode 100644
index 0000000..e1ff157
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/control.vk.2
@@ -0,0 +1,30 @@
+# Copyright 2015-2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Please do not edit this file! It has been created by generate_controlfiles.py.
+
+PY_VERSION = 3
+NAME = 'graphics_parallel_dEQP.vk.2'
+AUTHOR = 'chromeos-gfx'
+PURPOSE = 'Run the drawElements Quality Program test suite with deqp-runner.'
+CRITERIA = 'All of the individual tests must pass unless marked as known failures.'
+ATTRIBUTES = 'suite:deqp, suite:graphics_per-day, suite:graphics_system'
+TIME = 'LONG'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'graphics'
+TEST_TYPE = 'client'
+MAX_RESULT_SIZE_KB = 131072
+EXTENDED_TIMEOUT = 86400
+DOC = """
+This test runs the drawElements Quality Program test suite.
+"""
+job.run_test('graphics_parallel_dEQP',
+             tag = 'vk',
+             opts = args + [
+                 'api=vk',
+                 'caselist=/usr/local/deqp/caselists/vk.txt',
+                 'perf_failure_description=Failures_VK',
+                 'shard_number=2',
+                 'shard_count=4'
+             ])
\ No newline at end of file
diff --git a/client/site_tests/graphics_parallel_dEQP/control.vk.3 b/client/site_tests/graphics_parallel_dEQP/control.vk.3
new file mode 100644
index 0000000..1d2ddc1
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/control.vk.3
@@ -0,0 +1,30 @@
+# Copyright 2015-2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Please do not edit this file! It has been created by generate_controlfiles.py.
+
+PY_VERSION = 3
+NAME = 'graphics_parallel_dEQP.vk.3'
+AUTHOR = 'chromeos-gfx'
+PURPOSE = 'Run the drawElements Quality Program test suite with deqp-runner.'
+CRITERIA = 'All of the individual tests must pass unless marked as known failures.'
+ATTRIBUTES = 'suite:deqp, suite:graphics_per-day, suite:graphics_system'
+TIME = 'LONG'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'graphics'
+TEST_TYPE = 'client'
+MAX_RESULT_SIZE_KB = 131072
+EXTENDED_TIMEOUT = 86400
+DOC = """
+This test runs the drawElements Quality Program test suite.
+"""
+job.run_test('graphics_parallel_dEQP',
+             tag = 'vk',
+             opts = args + [
+                 'api=vk',
+                 'caselist=/usr/local/deqp/caselists/vk.txt',
+                 'perf_failure_description=Failures_VK',
+                 'shard_number=3',
+                 'shard_count=4'
+             ])
\ No newline at end of file
diff --git a/client/site_tests/graphics_parallel_dEQP/generate_controlfiles.py b/client/site_tests/graphics_parallel_dEQP/generate_controlfiles.py
new file mode 100644
index 0000000..85a6707
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/generate_controlfiles.py
@@ -0,0 +1,168 @@
+#!/usr/bin/env python
+"""
+This script generates autotest control files for dEQP. It supports
+1) Generate control files for tests with Passing expectations.
+2) Generate control files to run tests that are not passing.
+3) Decomposing a test into shards. Ideally shard_count is chosen such that
+   each shard will run less than 1 minute. It mostly makes sense in
+   combination with "hasty".
+"""
+import os
+from collections import namedtuple
+# Use 'sudo pip install enum34' to install.
+from enum import Enum
+# Use 'sudo pip install jinja2' to install.
+from jinja2 import Template
+
+Test = namedtuple(
+        'Test',
+        'filter, suite, shards, time, tag, api, caselist, perf_failure_description'
+)
+
+ATTRIBUTES_PVS = ('suite:deqp, suite:graphics_per-day, suite:graphics_system, '
+                  'suite:pvs-graphics')
+ATTRIBUTES_DAILY = 'suite:deqp, suite:graphics_per-day, suite:graphics_system'
+
+
+class Suite(Enum):
+    none = 1
+    daily = 2
+    pvs = 3
+
+
+deqp_dir = '/usr/local/deqp'
+caselists = 'caselists'
+GLES2_FILE = os.path.join(deqp_dir, caselists, 'gles2.txt')
+GLES3_FILE = os.path.join(deqp_dir, caselists, 'gles3.txt')
+GLES31_FILE = os.path.join(deqp_dir, caselists, 'gles31.txt')
+VK_FILE = os.path.join(deqp_dir, caselists, 'vk.txt')
+
+tests = [
+        Test('dEQP-GLES2',
+             Suite.pvs,
+             shards=1,
+             time='MEDIUM',
+             tag='gles2',
+             api='gles2',
+             caselist=GLES2_FILE,
+             perf_failure_description='Failures_GLES2'),
+        Test('dEQP-GLES3',
+             Suite.pvs,
+             shards=1,
+             time='LONG',
+             tag='gles3',
+             api='gles3',
+             caselist=GLES3_FILE,
+             perf_failure_description='Failures_GLES3'),
+        Test('dEQP-GLES31',
+             Suite.pvs,
+             shards=1,
+             time='LONG',
+             tag='gles31',
+             api='gles31',
+             caselist=GLES31_FILE,
+             perf_failure_description='Failures_GLES31'),
+        Test('dEQP-VK',
+             Suite.daily,
+             shards=4,
+             time='LONG',
+             tag='vk',
+             api='vk',
+             caselist=VK_FILE,
+             perf_failure_description='Failures_VK'),
+]
+
+CONTROLFILE_TEMPLATE = Template("""\
+# Copyright 2015-2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Please do not edit this file! It has been created by generate_controlfiles.py.
+
+PY_VERSION = 3
+NAME = '{{testname}}'
+AUTHOR = 'chromeos-gfx'
+PURPOSE = 'Run the drawElements Quality Program test suite with deqp-runner.'
+CRITERIA = 'All of the individual tests must pass unless marked as known failures.'
+ATTRIBUTES = '{{attributes}}'
+TIME = '{{time}}'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'graphics'
+TEST_TYPE = 'client'
+MAX_RESULT_SIZE_KB = 131072
+EXTENDED_TIMEOUT = 86400
+DOC = \"\"\"
+This test runs the drawElements Quality Program test suite.
+\"\"\"
+job.run_test('graphics_parallel_dEQP',{% if tag != None %}
+             tag = '{{tag}}',{% endif %}
+             opts = args + [
+                 'api={{api}}',
+                 'caselist={{caselist}}',
+                 {% if perf_failure_description %}'perf_failure_description={{perf_failure_description}}',{% endif %}
+                 'shard_number={{shard}}',
+                 'shard_count={{shards}}'
+             ])""")
+
+
+def get_controlfilename(test, shard=0):
+    return 'control.%s' % get_name(test, shard)
+
+
+def get_attributes(test):
+    if test.suite == Suite.pvs:
+        return ATTRIBUTES_PVS
+    if test.suite == Suite.daily:
+        return ATTRIBUTES_DAILY
+    return ''
+
+
+def get_time(test):
+    return test.time
+
+
+def get_name(test, shard):
+    name = test.filter.replace('dEQP-', '', 1).lower()
+    if test.shards > 1:
+        name = '%s.%d' % (name, shard)
+    return name
+
+
+def get_testname(test, shard=0):
+    return 'graphics_parallel_dEQP.%s' % get_name(test, shard)
+
+
+def write_controlfile(filename, content):
+    print(('Writing %s.' % filename))
+    with open(filename, 'w+') as f:
+        f.write(content)
+
+
+def write_controlfiles(test):
+    attributes = get_attributes(test)
+    time = get_time(test)
+
+    for shard in range(0, test.shards):
+        testname = get_testname(test, shard)
+        filename = get_controlfilename(test, shard)
+        content = CONTROLFILE_TEMPLATE.render(
+                testname=testname,
+                attributes=attributes,
+                time=time,
+                subset='Pass',
+                shard=shard,
+                shards=test.shards,
+                api=test.api,
+                caselist=test.caselist,
+                tag=test.tag,
+                perf_failure_description=test.perf_failure_description)
+        write_controlfile(filename, content)
+
+
+def main():
+    for test in tests:
+        write_controlfiles(test)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/client/site_tests/graphics_parallel_dEQP/graphics_parallel_dEQP.py b/client/site_tests/graphics_parallel_dEQP/graphics_parallel_dEQP.py
new file mode 100644
index 0000000..083de80
--- /dev/null
+++ b/client/site_tests/graphics_parallel_dEQP/graphics_parallel_dEQP.py
@@ -0,0 +1,287 @@
+# Lint as: python3
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import re
+import shutil
+from autotest_lib.client.bin import utils
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.cros import cros_logging, service_stopper
+from autotest_lib.client.cros.graphics import graphics_utils
+
+
+class graphics_parallel_dEQP(graphics_utils.GraphicsTest):
+    """Run the drawElements Quality Program test suite."""
+    version = 1
+    _services = None
+    _shard_number = 0
+    _shard_count = 1
+    _board = None
+    _cpu_type = None
+    _gpu_type = None
+    _surface = None
+    _filter = None
+    _width = 256  # Use smallest width for which all tests run/pass.
+    _height = 256  # Use smallest height for which all tests7 run/pass.
+    _caselist = None
+    _log_path = None  # Location for detailed test output logs
+    _debug = False  # Analyze kernel messages.
+    _log_reader = None  # Reader to analyze (kernel) messages log.
+    _log_filter = re.compile('.* .* kernel:')  # kernel messages filter.
+    _env = None  # environment for test processes
+    _skips = []
+    _fails = []
+    _flakes = []
+    _api_helper = None
+    # We do not consider these results as failures.
+    TEST_RESULT_FILTER = [
+        'pass', 'notsupported', 'internalerror', 'qualitywarning',
+        'compatibilitywarning', 'skipped'
+    ]
+    _expectations_dir = '/usr/local/graphics/expectations/deqp/'
+
+    def initialize(self):
+        """Initialize the test."""
+        super().initialize()
+        self._api_helper = graphics_utils.GraphicsApiHelper()
+        self._board = utils.get_board()
+        self._cpu_type = utils.get_cpu_soc_family()
+        self._gpu_type = utils.get_gpu_family()
+
+        # deqp may depend on libraries that are present only on test images.
+        # Those libraries are installed in /usr/local.
+        self._env = os.environ.copy()
+        old_ld_path = self._env.get('LD_LIBRARY_PATH', '')
+        if old_ld_path:
+            self._env[
+                'LD_LIBRARY_PATH'] = '/usr/local/lib:/usr/local/lib64:' + old_ld_path
+        else:
+            self._env['LD_LIBRARY_PATH'] = '/usr/local/lib:/usr/local/lib64'
+
+        self._services = service_stopper.ServiceStopper(['ui', 'powerd'])
+        # Valid choices are fbo and pbuffer. The latter avoids dEQP assumptions.
+        self._surface = 'pbuffer'
+        self._services.stop_services()
+
+    def cleanup(self):
+        """Clean up the test state from initialize()."""
+        if self._services:
+            self._services.restore_services()
+        super().cleanup()
+
+    def _get_executable(self, api):
+        """Return the executable path of the api."""
+        return self._api_helper.get_deqp_executable(api)
+
+    def _can_run(self, api):
+        """Check if specific api is supported in this board."""
+        return api in self._api_helper.get_supported_apis()
+
+    def read_file(self, filename):
+        """Board/GPU expectation file read helper."""
+        expects_path = os.path.join(self._expectations_dir, filename)
+        try:
+            with open(expects_path, encoding='utf-8') as file:
+                logging.debug(f'Reading board test list from {expects_path}')
+                return file.readlines()
+        except IOError as _:
+            logging.debug('No file found at %s', format(expects_path))
+            return []
+
+    def read_expectations(self, name):
+        """Appends the skips, fails and flakes files if they exist."""
+        self._skips += self.read_file(name + '-skips.txt')
+        self._fails += self.read_file(name + '-fails.txt')
+        self._flakes += self.read_file(name + '-flakes.txt')
+
+    def setup_case_list_filters(self):
+        """Set up the skip/flake/fails filter lists.
+
+        The expected fails list will be entries like
+        'dEQP-SUITE.test.name,Crash', such as you find in a failures.csv,
+        results.csv, or the "Some failures found:" stdout output of a previous
+        run.  Enter a test here when it has an expected state other than Pass or
+        Skip.
+
+        The skips list is a list of regexs to match test names to not run at
+        all. This is good for tests that are too slow or uninteresting to ever
+        see status for.
+
+        The flakes list is a list of regexes to match test names that may have
+        unreliable status.  Any unexpected result of that test will be marked
+        with the Flake status and not cause the test run to fail.  The runner
+        does automatic flake detection on its own to try to mitigate
+        intermittent failures, but even with that we can see too many spurious
+        failures in CI when run across many boards and many builds, so this lets
+        you run those tests while avoiding having them fail out CI runs until
+        the source of the flakiness can be resolved.
+
+        The primary source of board skip/flake/fails will be files in this test
+        directory under boards/, but we also list some common entries directly
+        in the code here to save repetition of the explanations.  The files may
+        contain empty lines or comments starting with '#'.
+
+        We could avoid adding filters for other apis than the one being tested,
+        but it's harmless to have unused tests in the lists and makes
+        copy-and-paste mistakes less likely.
+        """
+        # Add expectations common for all boards/chipsets.
+        self.read_expectations('all-chipsets')
+
+        # Add any chipset specific expectations. Most issues should be here.
+        self.read_expectations(self._gpu_type)
+
+        # Add any board-specific expectations. Lets hope we never need models.
+        self.read_expectations(self._board)
+
+    def add_filter_arg(self, command, tests, arg, filename):
+        """Adds an arg for xfail/skip/flake filtering if we made the file for it."""
+        if not tests:
+            return
+
+        path = os.path.join(self._log_path, filename)
+        with open(path, 'w', encoding='utf-8') as file:
+            for test in tests:
+                file.write(test + '\n')
+        command.append(arg + '=' + path)
+
+    def run_once(self, opts=None):
+        """Invokes deqp-runner to run a deqp test group."""
+        options = dict(
+            api=None,
+            caselist=None,
+            filter='',
+            subset_to_run='Pass',  # Pass, Fail, Timeout, NotPass...
+            shard_number='0',
+            shard_count='1',
+            debug='False',
+            perf_failure_description=None)
+        if opts is None:
+            opts = []
+        options.update(utils.args_to_dict(opts))
+        logging.info('Test Options: %s', options)
+
+        self._caselist = options['caselist']
+        self._shard_number = int(options['shard_number'])
+        self._shard_count = int(options['shard_count'])
+        self._debug = (options['debug'] == 'True')
+
+        api = options['api']
+
+        if not self._can_run(api):
+            logging.info('Skipping on %s due to lack of %s API support',
+                         self._gpu_type, api)
+            return
+
+        # Some information to help post-process logs.
+        logging.info('ChromeOS BOARD = %s', self._board)
+        logging.info('ChromeOS CPU family = %s', self._cpu_type)
+        logging.info('ChromeOS GPU family = %s', self._gpu_type)
+
+        self.setup_case_list_filters()
+
+        # Create a place to put detailed test output logs.
+        filter_name = self._filter or os.path.basename(self._caselist)
+        logging.info('dEQP test filter = %s', filter_name)
+        self._log_path = os.path.join(os.getcwd(), 'deqp-runner')
+        shutil.rmtree(self._log_path, ignore_errors=True)
+        os.mkdir(self._log_path)
+
+        if self._debug:
+            # LogReader works on /var/log/messages by default.
+            self._log_reader = cros_logging.LogReader()
+            self._log_reader.set_start_by_current()
+
+        executable = self._get_executable(api)
+        # Must be in the executable directory when running for it to find it's
+        # test data files!
+        os.chdir(os.path.dirname(executable))
+
+        command = ['deqp-runner', 'run']
+        command.append(f'--output={self._log_path}')
+        command.append(f'--deqp={executable}')
+        command.append('--testlog-to-xml=%s' % os.path.join(
+            self._api_helper.get_deqp_dir(), 'executor', 'testlog-to-xml'))
+        command.append(f'--caselist={self._caselist}')
+        if self._shard_number != 0:
+            command.append(f'--fraction-start={self._shard_number + 1}')
+        if self._shard_count != 1:
+            command.append(f'--fraction={self._shard_count}')
+
+        self.add_filter_arg(command, self._flakes, '--flakes',
+                            'known_flakes.txt')
+        self.add_filter_arg(command, self._skips, '--skips', 'skips.txt')
+        self.add_filter_arg(command, self._fails, '--baseline',
+                            'expected-fails.txt')
+
+        command.append('--')
+        command.append(f'--deqp-surface-type={self._surface}')
+        command.append(f'--deqp-surface-width={self._width}')
+        command.append(f'--deqp-surface-height={self._height}')
+        command.append('--deqp-gl-config-name=rgba8888d24s8ms0')
+
+        # Must initialize because some errors don't repopulate
+        # run_result, leaving old results.
+        run_result = {}
+        try:
+            logging.info(command)
+            run_result = utils.run(
+                command,
+                env=self._env,
+                ignore_status=True,
+                stdout_tee=utils.TEE_TO_LOGS,
+                stdout_level=logging.INFO,
+                stderr_tee=utils.TEE_TO_LOGS)
+        except error.CmdError:
+            raise error.TestFail("Failed starting '%s'" % command)
+
+        # Update failing tests to the chrome perf dashboard records.
+        fails = []
+        try:
+            with open(
+                    os.path.join(self._log_path, 'failures.csv'),
+                    encoding='utf-8') as fails_file:
+                for line in fails_file.readlines():
+                    fails.append(line)
+                    self.add_failures(line)
+        except IOError:
+            # failures.csv not created if there were were no failures
+            pass
+
+        include_css = False
+        for path in os.listdir(self._log_path):
+            path = os.path.join(self._log_path, path)
+            # Remove the large (~15Mb) temporary .shader_cache files generated by the dEQP runs
+            # so we don't upload them with the logs to stainless.
+            if path.endswith('.shader_cache') and os.path.isfile(path):
+                os.remove(os.path.join(self._log_path, path))
+
+            if path.endswith('.xml'):
+                include_css = True
+
+        # If we have any QPA XML files, then we'll want to include the CSS
+        # in the logs so you can view them.
+        if include_css:
+            stylesheet = os.path.join(self._api_helper.get_deqp_dir(),
+                                      'testlog-stylesheet')
+            for file in ['testlog.css', 'testlog.xsl']:
+                shutil.copy(os.path.join(stylesheet, file), self._log_path)
+
+        if fails:
+            if len(fails) == 1:
+                raise error.TestFail(f'Failed test: {fails[0]}')
+            # We format the failure message so it is not too long and reasonably
+            # stable even if there are a few flaky tests to simplify triaging
+            # on stainless and testmon. We sort the failing tests and report
+            # first and last failure.
+            fails.sort()
+            fail_msg = f'Failed {len(fails)} tests: '
+            fail_msg += fails[0].rstrip() + ', ..., ' + fails[-1].rstrip()
+            fail_msg += ' (see failures.csv)'
+            raise error.TestFail(fail_msg)
+        if run_result.exit_status != 0:
+            raise error.TestFail(
+                f'dEQP run failed with status code {run_result.exit_status}')
diff --git a/client/site_tests/hardware_Backlight/control b/client/site_tests/hardware_Backlight/control
deleted file mode 100644
index ca99d47..0000000
--- a/client/site_tests/hardware_Backlight/control
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "hardware_Backlight"
-PURPOSE = 'Verify that the backlight can be adjusted in software.'
-CRITERIA = """
-If backlight_tool fails to adjust the backlight brightness this test will fail.
-"""
-ATTRIBUTES = "suite:hwqual"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "hardware"
-TEST_TYPE = "client"
-
-DOC = """
-This test will utilize the /sys/class/backlight interface to adjust the
-strength of the backlight.
-"""
-
-job.run_test("hardware_Backlight")
diff --git a/client/site_tests/hardware_Backlight/hardware_Backlight.py b/client/site_tests/hardware_Backlight/hardware_Backlight.py
deleted file mode 100644
index 7748fb9..0000000
--- a/client/site_tests/hardware_Backlight/hardware_Backlight.py
+++ /dev/null
@@ -1,90 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import logging
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import service_stopper
-from autotest_lib.client.cros.power import power_utils
-
-
-class hardware_Backlight(test.test):
-    version = 1
-
-    def initialize(self):
-        """Perform necessary initialization prior to test run.
-
-        Private Attributes:
-          _backlight: power_utils.Backlight object
-          _services: service_stopper.ServiceStopper object
-        """
-        super(hardware_Backlight, self).initialize()
-        self._backlight = None
-        # Stop powerd to avoid it adjusting backlight levels
-        self._services = service_stopper.ServiceStopper(['powerd'])
-        self._services.stop_services()
-
-
-    def run_once(self):
-        # optionally test keyboard backlight
-        kblight = None
-        kblight_errs = 0
-        try:
-            kblight = power_utils.KbdBacklight()
-        except power_utils.KbdBacklightException as e:
-            logging.info("Assuming no keyboard backlight due to %s", str(e))
-
-        if kblight:
-            init_percent = kblight.get_percent()
-            try:
-                for i in xrange(100, -1, -1):
-                    kblight.set_percent(i)
-                    result = int(kblight.get_percent())
-                    if i != result:
-                        logging.error('keyboard backlight set %d != %d get',
-                                      i, result)
-                        kblight_errs += 1
-            finally:
-                kblight.set_percent(init_percent)
-
-        if kblight_errs:
-            raise error.TestFail("%d errors testing keyboard backlight." % \
-                                 kblight_errs)
-
-        self._backlight = power_utils.Backlight()
-        backlight_errs = 0
-        backlight_max = self._backlight.get_max_level()
-        for i in xrange(backlight_max + 1):
-            self._backlight.set_level(i)
-            result = self._backlight.get_level()
-            if i != result:
-                # The kernel Documentation/ABI/stable/sysfs-class-backlight
-                # states that the requested brightness may not be the
-                # actual_brightness.
-                # Although not specified in the docs, let's allow the difference
-                # between requested brightness and actual_brightness percent be
-                # within a tolerance of 1 of each other.
-                actual_percent = self._backlight.get_percent()
-                expected_percent = float(i) / float(backlight_max) * 100.0
-                diff_percent = abs(actual_percent - expected_percent)
-                log_level_func = logging.warn
-                if diff_percent > 1:
-                    backlight_errs += 1
-                    log_level_func = logging.error
-                    log_level_func('backlight expected vs. actual exceeds error'
-                                   'tolerance')
-                log_level_func('backlight set %d != %d get', i, result)
-                log_level_func('backlight percent difference is %f%%',
-                               diff_percent)
-
-        if backlight_errs:
-            raise error.TestFail("%d errors testing backlight." % \
-                                 backlight_errs)
-
-
-    def cleanup(self):
-        if self._backlight:
-            self._backlight.restore()
-        self._services.restore_services()
-        super(hardware_Backlight, self).cleanup()
diff --git a/client/site_tests/hardware_Badblocks/control b/client/site_tests/hardware_Badblocks/control
deleted file mode 100644
index 1ab3d9c..0000000
--- a/client/site_tests/hardware_Badblocks/control
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = 'hardware_Badblocks'
-AUTHOR = 'jcasse, grundler'
-PURPOSE = 'Check for unusable blocks in unmounted partition in SSD storage.'
-TIME = 'LENGTHY'
-TEST_TYPE = 'client'
-
-DOC = """
-This test checks for unusable blocks in an unmounted partition of SSD by
-running the badblocks Linux utility.
-"""
-
-job.run_test('hardware_Badblocks')
diff --git a/client/site_tests/hardware_Badblocks/control.hwqual b/client/site_tests/hardware_Badblocks/control.hwqual
deleted file mode 100644
index 681a8e7..0000000
--- a/client/site_tests/hardware_Badblocks/control.hwqual
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = 'hardware_Badblocks'
-AUTHOR = 'jcasse, grundler'
-PURPOSE = 'Check for unusable blocks in unmounted partition in SSD storage.'
-TIME = 'LENGTHY'
-TEST_TYPE = 'client'
-ATTRIBUTES = "suite:ssdqual"
-
-DOC = """
-This test checks for unusable blocks in an unmounted partition of SSD by
-running the badblocks Linux utility.
-"""
-
-job.run_test('hardware_Badblocks', iters=24, timeout=60 * 60 * 24)
diff --git a/client/site_tests/hardware_Badblocks/control.storagequal b/client/site_tests/hardware_Badblocks/control.storagequal
deleted file mode 100644
index 91d3475..0000000
--- a/client/site_tests/hardware_Badblocks/control.storagequal
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = 'hardware_Badblocks'
-AUTHOR = 'jcasse, grundler'
-PURPOSE = 'Check for unusable blocks in unmounted partition in SSD storage.'
-TIME = 'LENGTHY'
-TEST_TYPE = 'client'
-ATTRIBUTES = "suite:storagequal"
-
-DOC = """
-This test checks for unusable blocks in an unmounted partition of SSD by
-running the badblocks Linux utility.
-"""
-
-job.run_test('hardware_Badblocks', iters=24, timeout=60 * 60 * 24)
diff --git a/client/site_tests/hardware_Badblocks/hardware_Badblocks.py b/client/site_tests/hardware_Badblocks/hardware_Badblocks.py
deleted file mode 100644
index 45bcf14..0000000
--- a/client/site_tests/hardware_Badblocks/hardware_Badblocks.py
+++ /dev/null
@@ -1,191 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging, re, subprocess, threading
-import common
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-
-class hardware_Badblocks(test.test):
-    """
-    Runs badblocks on the root partition that is not being used.
-
-    """
-
-    version = 1
-
-    # Define output that is expected from a successful badblocks run.
-    _EXPECTED_BADBLOCKS_OUTPUT = (
-            'Pass completed, 0 bad blocks found. (0/0/0 errors)')
-
-    # Define Linux badblocks utility name.
-    _BADBLOCKS = 'badblocks'
-
-    # Define variables to store some statistics of the runs.
-    _pass_count = 0
-    _fail_count = 0
-    _longest_runtime = 0
-
-
-    def _get_sector_size(self, dev):
-        """
-        Finds block device's sector size in bytes.
-
-        @return the sector size.
-
-        """
-
-        argv = ('parted ' + dev + ' print | grep "Sector size" | awk -F ' +
-                '"/" \'{print $3}\' | sed \'$s/.$//\'')
-
-        return utils.system_output(argv)
-
-
-    def _timeout(self, badblocks_proc):
-        """
-        Timeout callback for the badblocks process.
-
-        Kills badblocks process if still running and fails test.
-
-        """
-
-        # Kill badblocks, report if not killed, log any exceptions.
-        if badblocks_proc.poll() == None:
-            try:
-                logging.info('badblocks taking too long---sending SIGKILL')
-                badblocks_proc.kill()
-            except Exception as e:
-                logging.info('%s', e)
-            finally:
-                # name of the kernel function in which the process is sleeping.
-                argv = ('ps eopid,fname,wchan | grep ' + self._BADBLOCKS +
-                        ' | awk \'{print $3}\'')
-                waiton = utils.system_output(argv)
-                if waiton:
-                    logging.info('badblocks is waiting on %s', waiton)
-                raise error.TestError('Error: badblocks timed out.')
-
-
-    def _run_badblocks(self, dev, sector_size, tmout):
-        """
-        Runs badblocks.
-
-        """
-
-        # Run badblocks on the selected partition, with parameters:
-        # -s = show progress
-        # -v = verbose (print error count)
-        # -w = destructive write+read test
-        # -b = block size (set equal to sector size)
-        argv = [self._BADBLOCKS, '-svw', '-d', str(sector_size), dev]
-        msg = 'Running: ' + ' '.join(map(str, argv))
-        logging.info(msg)
-        badblocks_proc = subprocess.Popen(
-                argv,
-                shell=False,
-                stderr=subprocess.STDOUT, # Combine stderr with stdout.
-                stdout=subprocess.PIPE)
-
-        # Start timeout timer thread.
-        t = threading.Timer(tmout, self._timeout, [badblocks_proc])
-        t.start()
-
-        # Get badblocks output.
-        stdout, _ = badblocks_proc.communicate()
-
-        # Stop timer if badblocks has finished.
-        t.cancel()
-
-        # Check badblocks exit status.
-        if badblocks_proc.returncode != 0:
-            raise error.TestError('badblocks returned with code: %s',
-                                  badblocks_proc.returncode)
-
-        # Parse and log badblocks output.
-        logging.info('badblocks output:')
-        lines = stdout.split('\n')
-        del lines[-1] # Remove blank line at end.
-        logging.info(lines[0])
-        logging.info(lines[1])
-        # Log the progress of badblocks (line 2 onwards, minus last line).
-        for line in lines[2:-1]:
-            # replace backspace characters with a newline character.
-            line = re.sub(r'[\b]+', '\n', line)
-            # Log test pattern info.
-            pattern_info = line[:line.find(':') + 1]
-            logging.info('%s', pattern_info)
-            sublines = line[line.find(':') + 2:].split('\n')
-            for subline in sublines:
-                logging.info('%s', subline)
-        # Log result (last line).
-        logging.info(lines[-1])
-
-        # Get run time in seconds.
-        min_sec = re.match(r'(\w+):(\w+)', lines[-2].split()[-4])
-        runtime = int(min_sec.group(1)) * 60 + int(min_sec.group(2))
-
-        # Update longest run time.
-        if self._longest_runtime < runtime:
-            self._longest_runtime = runtime
-
-        # Check badblocks result.
-        result = lines[-1].strip()
-        if result != self._EXPECTED_BADBLOCKS_OUTPUT:
-            self._fail_count += 1
-            return
-        self._pass_count += 1
-
-
-    def run_once(self, iters=1, tmout=60 * 60):
-        """
-        Executes test.
-
-        @param iters: Number of times to run badblocks.
-        @param tmout: Time allowed badblocks to run before killing it.
-                      (Default time is 60 minutes.)
-
-        """
-
-        # Log starting message.
-        logging.info('Statring hardware_Badblocks Test.')
-        logging.info('Iterations: %d', iters)
-        logging.info('badblocks Timeout (sec): %d', tmout)
-
-        # Determine which device and partition to use.
-        logging.info('Determine unused root partition to test on:')
-        dev = utils.get_free_root_partition()
-        logging.info('Testing on ' + dev)
-
-        # Get block device's sector size.
-        logging.info('Determine block device sector size:')
-        sector_size = self._get_sector_size(utils.get_root_device())
-        logging.info('Sector size (bytes): ' + sector_size)
-
-        # Get partition size.
-        logging.info('Determine partition size:')
-        part_size = utils.get_disk_size(dev)
-        logging.info('Partition size (bytes): %s', part_size)
-
-        # Run badblocks.
-        for i in range(iters):
-            logging.info('Starting iteration %d', i)
-            self._run_badblocks(dev, sector_size, tmout)
-
-        # Report statistics.
-        logging.info('Total pass: %d', self._pass_count)
-        logging.info('Total fail: %d', self._fail_count)
-        stats = {}
-        stats['ea_badblocks_runs'] = iters
-        stats['ea_passed_count'] = self._pass_count
-        stats['ea_failed_count'] = self._fail_count
-        stats['sec_longest_run'] = self._longest_runtime
-        # TODO: change write_perf_keyval() to output_perf_value() as soon as
-        # autotest is ready for it.
-        self.write_perf_keyval(stats)
-
-        # Report test pass/fail.
-        if self._pass_count != iters:
-            raise error.TestFail('One or more runs found bad blocks on'
-                                 ' storage device.')
diff --git a/client/site_tests/hardware_DiskFirmwareUpgrade/control b/client/site_tests/hardware_DiskFirmwareUpgrade/control
index 6dc6524..0419092 100644
--- a/client/site_tests/hardware_DiskFirmwareUpgrade/control
+++ b/client/site_tests/hardware_DiskFirmwareUpgrade/control
@@ -10,6 +10,7 @@
 TEST_CATEGORY = "General"
 TEST_CLASS = "hardware"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This control files use the default parameters. It install the existing
diff --git a/client/site_tests/hardware_DiskFirmwareUpgrade/hardware_DiskFirmwareUpgrade.py b/client/site_tests/hardware_DiskFirmwareUpgrade/hardware_DiskFirmwareUpgrade.py
index b0726a9..79b7958 100644
--- a/client/site_tests/hardware_DiskFirmwareUpgrade/hardware_DiskFirmwareUpgrade.py
+++ b/client/site_tests/hardware_DiskFirmwareUpgrade/hardware_DiskFirmwareUpgrade.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/hardware_DiskSize/control b/client/site_tests/hardware_DiskSize/control
index e419a93..0de4f62 100644
--- a/client/site_tests/hardware_DiskSize/control
+++ b/client/site_tests/hardware_DiskSize/control
@@ -10,6 +10,7 @@
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = "hardware"
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 This test will find the disk where /dev is located by searching /proc/cmdline,
diff --git a/client/site_tests/hardware_DiskSize/control.hwqual b/client/site_tests/hardware_DiskSize/control.hwqual
index 401190f..8557de4 100644
--- a/client/site_tests/hardware_DiskSize/control.hwqual
+++ b/client/site_tests/hardware_DiskSize/control.hwqual
@@ -11,6 +11,7 @@
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = "hardware"
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 This test will find the disk where /dev is located by searching /proc/cmdline,
diff --git a/client/site_tests/hardware_DiskSize/hardware_DiskSize.py b/client/site_tests/hardware_DiskSize/hardware_DiskSize.py
index 05bc402..3d070bc 100644
--- a/client/site_tests/hardware_DiskSize/hardware_DiskSize.py
+++ b/client/site_tests/hardware_DiskSize/hardware_DiskSize.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -88,4 +89,3 @@
         if (gb < min_gb):
             raise error.TestError("DiskSize %.3f GB below minimum (%.3f GB)" \
                 % (gb, min_gb))
-
diff --git a/client/site_tests/hardware_EC/control b/client/site_tests/hardware_EC/control
index 1224f28..1db2d1f 100644
--- a/client/site_tests/hardware_EC/control
+++ b/client/site_tests/hardware_EC/control
@@ -11,6 +11,7 @@
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = "hardware"
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 This is a hardware test for EC. The test uses ectool to check if the EC can
diff --git a/client/site_tests/hardware_GPIOSwitches/control b/client/site_tests/hardware_GPIOSwitches/control
index 0772fc1..8133f17 100644
--- a/client/site_tests/hardware_GPIOSwitches/control
+++ b/client/site_tests/hardware_GPIOSwitches/control
@@ -12,6 +12,7 @@
 TEST_CATEGORY = 'factory'
 TEST_CLASS = 'hardware'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 This test checks if the buttons and mode switches are set as expected
diff --git a/client/site_tests/hardware_GPS/control b/client/site_tests/hardware_GPS/control
index 4f0fc92..07cb92f 100644
--- a/client/site_tests/hardware_GPS/control
+++ b/client/site_tests/hardware_GPS/control
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "hardware_GPS"
 PURPOSE = "Verify basic functionality of a GPS device."
 CRITERIA = """
@@ -12,6 +12,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "hardware"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This is a an automated test that checks the basic functionality of a GPS
diff --git a/client/site_tests/hardware_GobiGPS/control b/client/site_tests/hardware_GobiGPS/control
index bd76581..4dbaea2 100644
--- a/client/site_tests/hardware_GobiGPS/control
+++ b/client/site_tests/hardware_GobiGPS/control
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "hardware_GobiGPS"
 PURPOSE = "Verify basic functionality of a Gobi GPS device."
 CRITERIA = """
@@ -15,6 +15,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "hardware"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This is a an automated test that checks the basic functionality of the
diff --git a/client/site_tests/hardware_Keyboard/control b/client/site_tests/hardware_Keyboard/control
index 43606a0..70579c4 100644
--- a/client/site_tests/hardware_Keyboard/control
+++ b/client/site_tests/hardware_Keyboard/control
@@ -6,10 +6,12 @@
 TEST_CLASS = 'Hardware'
 TEST_CATEGORY = 'Functional'
 TEST_TYPE = 'Client'
+PY_VERSION = 3
+
 DOC = """
 This test uses a modified version of evtest to probe for and test the keyboard.
 The test finds the keyboard event in /dev/input/, and queries to ensure that
-the driver presents all of the expected Chrome OS keyboard keys.
+the driver presents all of the expected ChromeOS keyboard keys.
 """
 
 job.run_test('hardware_Keyboard')
diff --git a/client/site_tests/hardware_Keyboard/hardware_Keyboard.py b/client/site_tests/hardware_Keyboard/hardware_Keyboard.py
index 3846cdd..f52ece8 100644
--- a/client/site_tests/hardware_Keyboard/hardware_Keyboard.py
+++ b/client/site_tests/hardware_Keyboard/hardware_Keyboard.py
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import glob, logging, os, sys, commands
+import glob, logging, os, subprocess
 
 from autotest_lib.client.bin import test, utils
 from autotest_lib.client.common_lib import error
@@ -33,11 +33,13 @@
     def _supported(self, event, key_name):
         cmd = os.path.join(self.srcdir, 'evtest') + ' ' + event
         cmd += ' -s ' + key_name
-        (status, output) = commands.getstatusoutput(cmd)
+        proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
+        output, _ = proc.communicate()
+        status = proc.returncode
         if status:
-            logging.error('Unsupported Key : %s' % key_name)
+            logging.error('Unsupported Key : %s', key_name)
             return False
-        logging.info('%s : %s' % (key_name, output))
+        logging.info('%s : %s', key_name, output)
         return True
 
     def run_once(self):
@@ -47,17 +49,19 @@
             # Find the event file with the most keys
             cmd = os.path.join(self.srcdir, 'evtest') + ' ' + event
             cmd += ' -n'
-            (status, output) = commands.getstatusoutput(cmd)
+            proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
+            output, _ = proc.communicate()
+            status = proc.returncode
             if status:  ## bad event, log the command's output as a warning
-                logging.warning("Bad event. cmd : %s" % cmd)
+                logging.warning("Bad event. cmd : %s", cmd)
                 logging.warning(output)
                 continue
             num_keys = int(output)
             if (num_keys > high_key_count):
                 high_key_count = num_keys
                 high_key_event = event
-        logging.info('Event with most is %s with %d keys' % (high_key_event,
-                                                             high_key_count))
+        logging.info('Event with most is %s with %d keys', high_key_event,
+                     high_key_count)
         if (high_key_count < len(hardware_Keyboard.supported_keys)):
             raise error.TestError('No suitable keyboard found.')
         # Check that all necessary keyboard keys exist.
@@ -68,8 +72,10 @@
         # Test one live keystroke. Test will wait on user input.
         cmd = os.path.join(self.srcdir, 'evtest') + ' ' + high_key_event
         cmd += ' -k'
-        (status, output) = commands.getstatusoutput(cmd)
+        proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
+        output, _ = proc.communicate()
+        status = proc.returncode
         if status:
             raise error.TestError('Key Capture Test failed : %s' % output);
-        if (output != hardware_Keyboard.live_test_key):
+        if (output.decode() != hardware_Keyboard.live_test_key):
             raise error.TestError('Incorrect key pressed : %s' % output);
diff --git a/client/site_tests/hardware_LightSensor/control b/client/site_tests/hardware_LightSensor/control
deleted file mode 100644
index e67af6b..0000000
--- a/client/site_tests/hardware_LightSensor/control
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "hardware_LightSensor"
-PURPOSE = "Verify the Light Sensor Device is supported."
-CRITERIA = """
-Fails if the light sensor kernel module is not loaded, or its sys files
-are not exported.
-"""
-ATTRIBUTES = "suite:hwqual"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "hardware"
-TEST_TYPE = "client"
-
-DOC = """
-This tests the interface provided by the light sensor kernel module for
-controlling various light sensors (TSL2560/1/2/3, ISL29018, etc)
-light-to-digital converters (ie, light sensor chips).
-"""
-
-job.run_test('hardware_LightSensor')
diff --git a/client/site_tests/hardware_LightSensor/hardware_LightSensor.py b/client/site_tests/hardware_LightSensor/hardware_LightSensor.py
deleted file mode 100644
index 0275582..0000000
--- a/client/site_tests/hardware_LightSensor/hardware_LightSensor.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging, os, glob
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-LIGHT_SENSOR_LOCATION = "/sys/bus/iio/devices/*/"
-# Match sensors files from
-# src/platform2/power_manager/powerd/system/ambient_light_sensor.cc
-LIGHT_SENSOR_FILES = [ "in_illuminance0_input",
-                       "in_illuminance_input",
-                       "in_illuminance0_raw",
-                       "in_illuminance_raw",
-                       "illuminance0_input",
-                     ]
-
-class hardware_LightSensor(test.test):
-    """
-    Test the system's Light Sensor device.
-    Failure to find the device likely indicates the kernel module is not loaded.
-    Or it could mean the I2C probe for the device failed because of an incorrect
-    I2C address or bus specification.
-    The ebuild scripts should properly load the udev rules for light sensor so
-    we can find its files in LIGHT_SENSOR_LOCATIONS depending
-    on the kernel version.
-    """
-    version = 1
-
-    def _waiver(self):
-        path = os.path.join(self.job.testdir, "hardware_LightSensor",
-                            "no_light_sensor_ok")
-        if os.path.exists(path):
-            return True
-        return False
-
-
-    def run_once(self):
-        if self._waiver():
-            raise error.TestNAError("Light sensor not required for this device")
-
-        found_light_sensor = 0
-        for location in glob.glob(LIGHT_SENSOR_LOCATION):
-            for fname in LIGHT_SENSOR_FILES:
-                path = location + fname
-                if os.path.exists(path):
-                    found_light_sensor = 1
-                    break
-                else:
-                    logging.info("Did not find light sensor reading at " + path)
-
-            if found_light_sensor:
-                break
-
-        if not found_light_sensor:
-            raise error.TestFail("No light sensor reading found.")
-        else:
-            logging.info("Found light sensor at " + path)
-
-        val = utils.read_one_line(path)
-        reading = int(val)
-        if reading < 0:
-            raise error.TestFail("Invalid light sensor reading (%s)" % val)
-        logging.debug("light sensor reading is %d", reading)
diff --git a/client/site_tests/hardware_MemoryLatency/control b/client/site_tests/hardware_MemoryLatency/control
index edc5d88..b2975d0 100644
--- a/client/site_tests/hardware_MemoryLatency/control
+++ b/client/site_tests/hardware_MemoryLatency/control
@@ -11,6 +11,8 @@
 TEST_CATEGORY = 'Performance'
 TEST_CLASS = "hardware"
 TEST_TYPE = 'client'
+ATTRIBUTES = "suite:memory_qual2"
+PY_VERSION = 3
 
 DOC = """
 This test will use the lat_mem_rd benchmark from lmbench3 to measure memory
diff --git a/client/site_tests/hardware_MemoryLatency/hardware_MemoryLatency.py b/client/site_tests/hardware_MemoryLatency/hardware_MemoryLatency.py
index b3be26c..8fea2d6 100644
--- a/client/site_tests/hardware_MemoryLatency/hardware_MemoryLatency.py
+++ b/client/site_tests/hardware_MemoryLatency/hardware_MemoryLatency.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -88,4 +89,3 @@
     def cleanup(self):
         self._services.restore_services()
         super(hardware_MemoryLatency, self).cleanup()
-
diff --git a/client/site_tests/hardware_MemoryThroughput/control b/client/site_tests/hardware_MemoryThroughput/control
index d96e7f3..eeda262 100644
--- a/client/site_tests/hardware_MemoryThroughput/control
+++ b/client/site_tests/hardware_MemoryThroughput/control
@@ -11,6 +11,7 @@
 TEST_CATEGORY = 'Performance'
 TEST_CLASS = 'hardware'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 This uses the lmbench 3 bw_mem benchmark for reads, writes, and copy
diff --git a/client/site_tests/hardware_MemoryThroughput/control.cache_copy_1thread b/client/site_tests/hardware_MemoryThroughput/control.cache_copy_1thread
index 270ebbe..966b665 100644
--- a/client/site_tests/hardware_MemoryThroughput/control.cache_copy_1thread
+++ b/client/site_tests/hardware_MemoryThroughput/control.cache_copy_1thread
@@ -10,6 +10,7 @@
 TEST_CATEGORY = 'Performance'
 TEST_CLASS = 'hardware'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 This uses the lmbench 3 bw_mem benchmark for reads, writes, and copy
diff --git a/client/site_tests/hardware_MemoryThroughput/control.copy_1thread_full_range b/client/site_tests/hardware_MemoryThroughput/control.copy_1thread_full_range
index b9969a4..cdd1db5 100644
--- a/client/site_tests/hardware_MemoryThroughput/control.copy_1thread_full_range
+++ b/client/site_tests/hardware_MemoryThroughput/control.copy_1thread_full_range
@@ -10,6 +10,7 @@
 TEST_CATEGORY = 'Performance'
 TEST_CLASS = 'hardware'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 This uses the lmbench 3 bw_mem benchmark for reads, writes, and copy
diff --git a/client/site_tests/hardware_MemoryThroughput/control.copy_2thread_full_range b/client/site_tests/hardware_MemoryThroughput/control.copy_2thread_full_range
index 6b63aa0..d0cce25 100644
--- a/client/site_tests/hardware_MemoryThroughput/control.copy_2thread_full_range
+++ b/client/site_tests/hardware_MemoryThroughput/control.copy_2thread_full_range
@@ -10,6 +10,7 @@
 TEST_CATEGORY = 'Performance'
 TEST_CLASS = 'hardware'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 This uses the lmbench 3 bw_mem benchmark for reads, writes, and copy
diff --git a/client/site_tests/hardware_MemoryThroughput/control.memory_qual b/client/site_tests/hardware_MemoryThroughput/control.memory_qual
index 974255c..18e6d43 100644
--- a/client/site_tests/hardware_MemoryThroughput/control.memory_qual
+++ b/client/site_tests/hardware_MemoryThroughput/control.memory_qual
@@ -10,6 +10,9 @@
 TEST_CATEGORY = 'Performance'
 TEST_CLASS = 'hardware'
 TEST_TYPE = 'client'
+ATTRIBUTES = "suite:memory_qual2"
+EXTENDED_TIMEOUT = 7200 # 2 hours
+PY_VERSION = 3
 
 DOC = """
 This uses the lmbench 3 bw_mem benchmark for reads, writes, and copy
diff --git a/client/site_tests/hardware_MemoryThroughput/control.read_1thread_full_range b/client/site_tests/hardware_MemoryThroughput/control.read_1thread_full_range
index 8f3bcda..762d427 100644
--- a/client/site_tests/hardware_MemoryThroughput/control.read_1thread_full_range
+++ b/client/site_tests/hardware_MemoryThroughput/control.read_1thread_full_range
@@ -10,6 +10,7 @@
 TEST_CATEGORY = 'Performance'
 TEST_CLASS = 'hardware'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 This uses the lmbench 3 bw_mem benchmark for reads, writes, and copy
diff --git a/client/site_tests/hardware_MemoryThroughput/control.read_2thread_full_range b/client/site_tests/hardware_MemoryThroughput/control.read_2thread_full_range
index 57e513b..d51a2f7 100644
--- a/client/site_tests/hardware_MemoryThroughput/control.read_2thread_full_range
+++ b/client/site_tests/hardware_MemoryThroughput/control.read_2thread_full_range
@@ -10,6 +10,7 @@
 TEST_CATEGORY = 'Performance'
 TEST_CLASS = 'hardware'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 This uses the lmbench 3 bw_mem benchmark for reads, writes, and copy
diff --git a/client/site_tests/hardware_MemoryThroughput/hardware_MemoryThroughput.py b/client/site_tests/hardware_MemoryThroughput/hardware_MemoryThroughput.py
index 58708eb..c1e426d 100644
--- a/client/site_tests/hardware_MemoryThroughput/hardware_MemoryThroughput.py
+++ b/client/site_tests/hardware_MemoryThroughput/hardware_MemoryThroughput.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2009 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/hardware_MemoryZRAMThroughput/control b/client/site_tests/hardware_MemoryZRAMThroughput/control
index f8ca83d..dbb59ff 100644
--- a/client/site_tests/hardware_MemoryZRAMThroughput/control
+++ b/client/site_tests/hardware_MemoryZRAMThroughput/control
@@ -11,6 +11,7 @@
 TEST_CLASS = "hardware"
 TEST_TYPE = 'client'
 JOB_RETRIES = 2
+PY_VERSION = 3
 
 DOC = """
 This test uses AutoTest's utils.memtotal() and utils.swaptotal().
diff --git a/client/site_tests/hardware_MemoryZRAMThroughput/hardware_MemoryZRAMThroughput.py b/client/site_tests/hardware_MemoryZRAMThroughput/hardware_MemoryZRAMThroughput.py
index 8eb2483..9703521 100644
--- a/client/site_tests/hardware_MemoryZRAMThroughput/hardware_MemoryZRAMThroughput.py
+++ b/client/site_tests/hardware_MemoryZRAMThroughput/hardware_MemoryZRAMThroughput.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -55,7 +56,7 @@
         # First run memory-eater wth 60% of total memory size to measure the
         # page access throughput
         cmd = ("memory-eater --size %d --speed --repeat 4 --chunk 500 "
-               "--wait 0" % long(mem_size * 0.60 / 1024))
+               "--wait 0" % int(mem_size * 0.60 / 1024))
         logging.debug('cmd: %s', cmd)
         out = utils.system_output(cmd)
         self._log_results("60_Percent_RAM", out)
@@ -67,7 +68,7 @@
         # by half and the test itself will fork a child process to double the
         # memory usage. Each process will take turns to access 500 pages
         # (via --chunk) until all pages are accessed 4 times (via --repeat).
-        half_mem_pressure_size = long((mem_size+swap_size * 0.3) / 1024) / 2;
+        half_mem_pressure_size = int((mem_size+swap_size * 0.3) / 1024) / 2;
         cmd = ("memory-eater --size %d --speed --fork --repeat 4 --chunk 500"
                "--wait 0" % half_mem_pressure_size)
         logging.debug('cmd: %s', cmd)
@@ -76,4 +77,3 @@
 
     def cleanup(self):
         utils.system('start ui')
-
diff --git a/client/site_tests/hardware_Memtester/control b/client/site_tests/hardware_Memtester/control
index fe6d866..c8a6ef0 100644
--- a/client/site_tests/hardware_Memtester/control
+++ b/client/site_tests/hardware_Memtester/control
@@ -9,6 +9,7 @@
 TIME='LENGTHY'
 TEST_CLASS = "hardware"
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 This test uses memtester to find memory subsystem faults. Amount of memory
diff --git a/client/site_tests/hardware_Memtester/control.memory_qual b/client/site_tests/hardware_Memtester/control.memory_qual
index 850d120..e35df95 100644
--- a/client/site_tests/hardware_Memtester/control.memory_qual
+++ b/client/site_tests/hardware_Memtester/control.memory_qual
@@ -9,6 +9,9 @@
 TIME = 'LENGTHY'
 TEST_CLASS = "hardware"
 TEST_TYPE = 'client'
+ATTRIBUTES = "suite:memory_qual2"
+EXTENDED_TIMEOUT = 1209600 # 2 weeks
+PY_VERSION = 3
 
 DOC = """
 This test uses memtester to find memory subsystem faults. Amount of memory
diff --git a/client/site_tests/hardware_Memtester/control.quick b/client/site_tests/hardware_Memtester/control.quick
index 5736a4d..38d4f20 100644
--- a/client/site_tests/hardware_Memtester/control.quick
+++ b/client/site_tests/hardware_Memtester/control.quick
@@ -11,6 +11,7 @@
 TEST_TYPE = 'client'
 JOB_RETRIES = 2
 ATTRIBUTES = "suite:bvt-perbuild"
+PY_VERSION = 3
 
 DOC = """
 This test uses memtester to find memory subsystem faults. The quick test
diff --git a/client/site_tests/hardware_Memtester/hardware_Memtester.py b/client/site_tests/hardware_Memtester/hardware_Memtester.py
index ba65ae5..769d09a 100644
--- a/client/site_tests/hardware_Memtester/hardware_Memtester.py
+++ b/client/site_tests/hardware_Memtester/hardware_Memtester.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/hardware_MultiReader/control b/client/site_tests/hardware_MultiReader/control
deleted file mode 100644
index 0adca30..0000000
--- a/client/site_tests/hardware_MultiReader/control
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = 'hardware_MultiReader'
-AUTHOR = 'The Chromium OS Authors'
-PURPOSE = 'Verify a USB multi card reader basically works.'
-CRITERIA = 'Fails if no removable block devices are found.'
-TIME='SHORT'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = "hardware"
-TEST_TYPE = 'client'
-
-DOC = """
-This test checks for any removable block devices.
-It assumes a multi card reader is plugged in to USB and that it
-has media installed such as (for example) an SD card.
-"""
-
-job.run_test('hardware_MultiReader')
diff --git a/client/site_tests/hardware_MultiReader/hardware_MultiReader.py b/client/site_tests/hardware_MultiReader/hardware_MultiReader.py
deleted file mode 100644
index 73bd22e..0000000
--- a/client/site_tests/hardware_MultiReader/hardware_MultiReader.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import glob, logging, os
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-class hardware_MultiReader(test.test):
-    version = 1
-
-    def run_once(self):
-        blockdev_paths = glob.glob("/sys/block/*/removable")
-        for path in blockdev_paths:
-            removable = utils.read_one_line(path)
-            if removable == "1":
-                logging.info("Found removable block device %s",
-                             os.path.dirname(path))
-                return True
-
-        raise error.TestFail("No removable block devices are seen.")
diff --git a/client/site_tests/hardware_MultiReaderPowerConsumption/control b/client/site_tests/hardware_MultiReaderPowerConsumption/control
deleted file mode 100644
index 6c6731a..0000000
--- a/client/site_tests/hardware_MultiReaderPowerConsumption/control
+++ /dev/null
@@ -1,54 +0,0 @@
-# Copyright (c) 2012 Collabora Ltd. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = "hardware_MultiReaderPowerConsumption"
-AUTHOR = "Vivia Nikolaidou <vivia.nikolaidou@collabora.co.uk>"
-PURPOSE = """Test card reader CPU power consumption to be within acceptable
-range while performing random r/w"""
-CRITERIA = """Fails if power consumption readings during heavy-duty random r/w
-fall outside predefined ranges"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "hardware"
-TEST_TYPE = "client"
-
-DOC = """
-This test runs a heavy-duty random read/write test, which is defined as
-running a `dd if=/dev/urandom` in parallel with a `tail -f`. The test is
-run three times: once on a ramdisk with the SD card mounted, once on the
-SD card with the ramdisk unmounted, and once on the ramdisk with the SD
-card unmounted. Power consumption is measured after each test and then
-reported. Preconditions:
-
-1) User must not be logged on to the GUI
-2) An empty SD card must be inserted and formatted with a single mountable
-   partition
-3) No other usb storage devices must be inserted.
-
-@param ramdisk_size: size of the ramdisk (integer in MiB).
-@param file_size: test file size (integer in MiB).
-@param ramdisk_path: path to the ramdisk mount point.
-@param fs_uuid: the UUID for the attached card. Use this parameter is
-       autodetection does not work as expected.
-@param drain_limit: maximum ratio between the card reader energy consumption
-       and each of the two ramdisk read/write test energy consumption values.
-       1.00 means the card reader test may not consume more energy than either
-       ramdisk test, 0.9 means it may consume no more than 90% of the ramdisk
-       value, and so forth. default is 1.05.
-"""
-
-from autotest_lib.client.cros import storage as storage_mod
-
-volume_filter, args_dict = storage_mod.args_to_storage_dict(args)
-ramdisk_size = int(args_dict.get("ramdisk_size", 513)) # MiB
-file_size = int(args_dict.get("file_size", 512)) # MiB
-drain_limit = float(args_dict.get("drain_limit", 1.05))
-if not volume_filter:
-    volume_filter = {"bus": "usb"}
-
-job.run_test("hardware_MultiReaderPowerConsumption",
-             ramdisk_size=ramdisk_size,
-             file_size=file_size,
-             drain_limit=drain_limit,
-             volume_filter=volume_filter)
diff --git a/client/site_tests/hardware_MultiReaderPowerConsumption/hardware_MultiReaderPowerConsumption.py b/client/site_tests/hardware_MultiReaderPowerConsumption/hardware_MultiReaderPowerConsumption.py
deleted file mode 100644
index 93fbc91..0000000
--- a/client/site_tests/hardware_MultiReaderPowerConsumption/hardware_MultiReaderPowerConsumption.py
+++ /dev/null
@@ -1,178 +0,0 @@
-import os, logging
-
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error, autotemp
-from autotest_lib.client.cros import storage as storage_mod
-from autotest_lib.client.cros.power import power_status
-
-
-class hardware_MultiReaderPowerConsumption(storage_mod.StorageTester):
-    version = 1
-    _files_to_delete = []
-    _ramdisk_path = None
-    _storage = None
-
-
-    def initialize(self):
-        super(hardware_MultiReaderPowerConsumption, self).initialize()
-
-        # Make sure we're not on AC power
-        self.status = power_status.get_status()
-        if self.status.on_ac():
-            raise error.TestNAError(
-                  'This test needs to be run with the AC power offline')
-
-
-    def cleanup(self):
-        # Remove intermediate files
-        for path in self._files_to_delete:
-            utils.system('rm -f %s' % path)
-
-        if self._storage and os.path.ismount(self._storage['mountpoint']):
-            self.scanner.umount_volume(storage_dict=self._storage)
-
-        if self._ramdisk_path and os.path.ismount(self._ramdisk_path.name):
-            umount_ramdisk(self._ramdisk_path.name)
-            self._ramdisk_path.clean()
-
-        super(hardware_MultiReaderPowerConsumption, self).cleanup()
-
-
-    def readwrite_test(self, path, size, delete_file=False):
-        """Heavy-duty random read/write test. Run `dd` & `tail -f` in parallel
-
-        The random write is done by writing a file from /dev/urandom into the
-        given location, while the random read is done by concurrently reading
-        that file.
-
-        @param path: The directory that will create the test file.
-        @param size: Size of the test file, in MiB.
-        @param delete_file: Flag the file to be deleted on test exit.
-               Otherwise file deletion won't be performed.
-        """
-        # Calculate the parameters for dd
-        size = 1024*1024*size
-        blocksize = 8192
-
-        # Calculate the filename and full path, flag to delete if needed
-        filename = 'tempfile.%d.delete-me' % size
-        pathfile = os.path.join(path, filename)
-        if delete_file:
-            self._files_to_delete.append(pathfile)
-
-        pid = os.fork() # We need to run two processes in parallel
-        if pid:
-            # parent
-            utils.BgJob('tail -f %s --pid=%s > /dev/null'
-                        % (pathfile, pid))
-            # Reap the dd child so that tail does not wait for the zombie
-            os.waitpid(pid, 0)
-        else:
-            # child
-            utils.system('dd if=/dev/urandom of=%s bs=%d count=%s'
-                         % (pathfile, blocksize, (size//blocksize)))
-            # A forked child is exiting here, so we really do want os._exit:
-            os._exit(0)
-
-
-    def run_once(self, ramdisk_size=513, file_size=512, drain_limit=1.05,
-                 volume_filter={'bus': 'usb'}):
-        """Test card reader CPU power consumption to be within acceptable
-        range while performing random r/w
-
-        The random r/w is performed in the readwrite_test() method, by
-        concurrently running `dd if=/dev/urandom` and `tail -f`. It is run once
-        on a ramdisk with the SD card mounted, then on the SD card with the
-        ramdisk unmounted, and then on the SD card with the ramdisk unmounted.
-        The measured values are then reported.
-
-        @param ramdisk_size: Size of ramdisk (in MiB).
-        @param file_size: Size of test file (in MiB).
-        @param volume_filter: Where to find the card reader.
-        @param drain_limit: maximum ratio between the card reader
-                            energy consumption and each of the two
-                            ramdisk read/write test energy consumption
-                            values. 1.00 means the card reader test may
-                            not consume more energy than either ramdisk
-                            test, 0.9 means it may consume no more than
-                            90% of the ramdisk value, and so forth.
-        """
-        # Switch to VT2 so the screen turns itself off automatically instead of
-        # dimming, in order to reduce the battery consuption caused by other
-        # variables.
-        utils.system('chvt 2')
-
-        logging.debug('STEP 1: ensure SD card is inserted and mounted')
-        self._storage = self.wait_for_device(volume_filter, cycles=1,
-                                             mount_volume=True)[0]
-
-        logging.debug('STEP 2: mount the ramdisk')
-        self._ramdisk_path = autotemp.tempdir(unique_id='ramdisk',
-                                              dir=self.tmpdir)
-        mount_ramdisk(self._ramdisk_path.name, ramdisk_size)
-
-        # Read current charge, as well as maximum charge.
-        self.status.refresh()
-        max_charge = self.status.battery.charge_full_design
-        initial_charge = self.status.battery.charge_now
-
-        logging.debug('STEP 3: perform heavy-duty read-write test on ramdisk')
-        self.readwrite_test(self._ramdisk_path.name, file_size)
-        # Read current charge (reading A)
-        self.status.refresh()
-        charge_A = self.status.battery.charge_now
-
-        logging.debug('STEP 4: unmount ramdisk')
-        umount_ramdisk(self._ramdisk_path.name)
-
-        logging.debug('STEP 5: perform identical read write test on SD card')
-        self.readwrite_test(self._storage['mountpoint'], file_size,
-                            delete_file=True)
-        # Read current charge (reading B)
-        self.status.refresh()
-        charge_B = self.status.battery.charge_now
-
-        logging.debug('STEP 6: unmount card')
-        self.scanner.umount_volume(storage_dict=self._storage, args='-f -l')
-
-        logging.debug('STEP 7: perform ramdisk test again')
-        mount_ramdisk(self._ramdisk_path.name, ramdisk_size)
-        self.readwrite_test(self._ramdisk_path.name, file_size)
-        # Read current charge (reading C)
-        self.status.refresh()
-        charge_C = self.status.battery.charge_now
-
-        # Compute the results
-        ramdisk_plus = initial_charge - charge_A
-        sd_card_solo = charge_A - charge_B
-        ramdisk_solo = charge_B - charge_C
-
-        sd_card_drain_ratio_a = (sd_card_solo / ramdisk_plus)
-        sd_card_drain_ratio_b = (sd_card_solo / ramdisk_solo)
-
-        msg = None
-        if sd_card_drain_ratio_a > drain_limit:
-            msg = ('Card reader drain exceeds mounted baseline by > %f (%f)'
-                   % (drain_limit, sd_card_drain_ratio_a))
-        elif sd_card_drain_ratio_b > drain_limit:
-            msg = ('Card reader drain exceeds unmounted baseline by > %f (%f)'
-                   % (drain_limit, sd_card_drain_ratio_b))
-
-        if msg:
-            raise error.TestError(msg)
-        else:
-            fmt = 'Card reader drain ratio Ok: mounted %f; unmounted %f'
-            logging.info(fmt % (sd_card_drain_ratio_a, sd_card_drain_ratio_b))
-
-
-def mount_ramdisk(path, size):
-    utils.system('mount -t tmpfs none %s -o size=%sm' % (path, size))
-
-
-def umount_ramdisk(path):
-    """Umount ramdisk mounted at |path|
-
-    @param path: the mountpoint for the mountd RAM disk
-    """
-    utils.system('rm -rf %s/*' % path)
-    utils.system('umount -f -l %s' % path)
diff --git a/client/site_tests/hardware_ProbeComponents/control b/client/site_tests/hardware_ProbeComponents/control
deleted file mode 100644
index f28d768..0000000
--- a/client/site_tests/hardware_ProbeComponents/control
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "hardware_ProbeComponents"
-PURPOSE = "Probe all hardware components for HWID generation"
-CRITERIA = ""
-TIME = "SHORT"
-TEST_CATEGORY = "Logging"
-TEST_TYPE = "client"
-DOC = "This test logs the output of gooftool probe command used for HWID."
-
-job.run_test('hardware_ProbeComponents')
diff --git a/client/site_tests/hardware_ProbeComponents/hardware_ProbeComponents.py b/client/site_tests/hardware_ProbeComponents/hardware_ProbeComponents.py
deleted file mode 100755
index 0d225c1..0000000
--- a/client/site_tests/hardware_ProbeComponents/hardware_ProbeComponents.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-hardware_ProbeComponents runs "gooftool probe" command. The results will be
-provided back to Google by the OEMs/ODMs who are qualifying new hardware
-components and used to generate a new HWID.
-"""
-
-import logging
-from autotest_lib.client.bin import test, utils
-
-class hardware_ProbeComponents(test.test):
-    """Logs "gooftool probe" command output"""
-    version = 1
-
-
-    def run_once(self):
-        probe_results = utils.system_output('gooftool probe')
-        logging.info(probe_results)
-        return
diff --git a/client/site_tests/hardware_RamFio/control b/client/site_tests/hardware_RamFio/control
index 2619fd6..de923fa 100644
--- a/client/site_tests/hardware_RamFio/control
+++ b/client/site_tests/hardware_RamFio/control
@@ -8,13 +8,12 @@
 TIME = 'SHORT'
 TEST_CLASS = 'hardware'
 TEST_TYPE = 'client'
-ATTRIBUTES = "suite:bvt-perbuild, suite:crosbolt_perf_weekly"
+ATTRIBUTES = "suite:bvt-perbuild, suite:crosbolt_perf_weekly, suite:memory_qual2"
 JOB_RETRIES = 2
+PY_VERSION = 3
 
 DOC = """
 Create ram disk and use hardware_StorageFio to test for ram throughput
 """
 
 job.run_test('hardware_RamFio', requirements=[('ram_suite', [])])
-
-
diff --git a/client/site_tests/hardware_RamFio/hardware_RamFio.py b/client/site_tests/hardware_RamFio/hardware_RamFio.py
index daf2aa4..4672d87 100644
--- a/client/site_tests/hardware_RamFio/hardware_RamFio.py
+++ b/client/site_tests/hardware_RamFio/hardware_RamFio.py
@@ -1,4 +1,5 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
+# Lint as: python2, python3
+# # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
diff --git a/client/site_tests/hardware_RealtekCardReader/control b/client/site_tests/hardware_RealtekCardReader/control
deleted file mode 100644
index f194e9b..0000000
--- a/client/site_tests/hardware_RealtekCardReader/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = 'hardware_RealtekCardReader'
-AUTHOR = 'The Chromium OS Authors'
-PURPOSE = 'Verify the Realtek card reader basically works.'
-CRITERIA = 'Fails if the Realtek card reader is not detected.'
-TIME='SHORT'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = "hardware"
-TEST_TYPE = 'client'
-
-DOC = """
-This test checks for the Realtek USB device and for any removable
-block devices.
-This test requires an SD card to be plugged in the SD slot.
-This test applies to x86 systems only.
-"""
-
-job.run_test('hardware_RealtekCardReader')
diff --git a/client/site_tests/hardware_RealtekCardReader/hardware_RealtekCardReader.py b/client/site_tests/hardware_RealtekCardReader/hardware_RealtekCardReader.py
deleted file mode 100644
index 8ad7809..0000000
--- a/client/site_tests/hardware_RealtekCardReader/hardware_RealtekCardReader.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import glob, logging, os
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-class hardware_RealtekCardReader(test.test):
-    version = 1
-
-    def run_once(self):
-        # Look for the Realtek USB card reader.
-        # This requires a plugged in SD card.
-        lsusb_output = utils.system_output("lsusb -t")
-        if not "Driver=ums-realtek" in lsusb_output:
-            raise error.TestFail("The Realtek card reader USB device was not "
-                                 "detected.  This test requires an SD card to "
-                                 "be inserted to detect the USB device.")
-
-        blockdevs = glob.glob("/sys/block/*")
-        for dev in blockdevs:
-            removable = utils.read_one_line(os.path.join(dev, "removable"))
-            if removable == "1":
-                logging.info("Found removable block device %s", dev)
-                return True
-
-        raise error.TestFail("The card reader USB device was detected, but "
-                             "no removable block devices are seen.")
diff --git a/client/site_tests/hardware_Resolution/control b/client/site_tests/hardware_Resolution/control
deleted file mode 100644
index 18aea41..0000000
--- a/client/site_tests/hardware_Resolution/control
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "kdlucas@chromium.org (Kelly Lucas)"
-NAME = "hardware_Resolution"
-PURPOSE = "Determine if the current screen resolution is supported."
-CRITERIA = """
-Screen resolutions supported:
-  - 1280x800
-  - 1366x768
-"""
-ATTRIBUTES = "suite:hwqual"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "hardware"
-TEST_TYPE = "client"
-
-DOC = """
-This test uses xrandr to gather the current display resolution.
-"""
-
-job.run_test('hardware_Resolution')
diff --git a/client/site_tests/hardware_Resolution/hardware_Resolution.py b/client/site_tests/hardware_Resolution/hardware_Resolution.py
deleted file mode 100644
index d686d5c..0000000
--- a/client/site_tests/hardware_Resolution/hardware_Resolution.py
+++ /dev/null
@@ -1,87 +0,0 @@
-#!/usr/bin/python2
-#
-# Copyright (c) 2010 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-__author__ = 'kdlucas@chromium.org (Kelly Lucas)'
-
-import logging, re
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.graphics import graphics_utils
-
-_SUPPORTED_LVDS_RESOLUTIONS = ['1280x800', '1366x768']
-
-_MODETEST_COMMAND = 'modetest -c'
-_MODETEST_CONNECTED = 'connected'
-_MODETEST_CONNECTOR_LVDS = 'LVDS'
-# The list of connectors in this regex pattern comes from an array called
-# connector_type_names in the libdrm file caled modetest.c .
-_MODETEST_CONNECTOR_PATTERN = (r'\d+\s+\d+\s+(connected|disconnected)\s+'
-                               r'(unknown|VGA|DVI-I|DVI-D|DVI-A|composite|'
-                               r's-video|LVDS|component|9-pin DIN|HDMI-A|'
-                               r'HDMI-B|TV|eDP)\s+\d+x\d+\s+\d+\s+\d+')
-_MODETEST_MODE_PATTERN = (r'\s+.+\d+\s+(\d+)\s+\d+\s+\d+\s+\d+\s+(\d+)\s+\d+\s+'
-                          r'\d+\s+\d+\s+flags:')
-
-_LVDS_UNSUPPORTED_MESSAGE = '%s is not a supported LVDS resolution'
-
-class hardware_Resolution(test.test):
-    """
-    Verify the current screen resolution is supported.
-    """
-    version = 1
-
-    def is_lvds_res(self, res, xrandr_output):
-        """
-        Returns True if the supplied resolution is associated with
-        an LVDS connection.
-        """
-        search_str = r'LVDS\d+ connected ' + res
-        for line in xrandr_output:
-            if re.match(search_str, line):
-                return True
-
-        return False
-
-    def get_current_res(self, xrandr_output):
-        """
-        Get the current video resolution.
-        Returns:
-            string: represents the video resolution.
-        """
-        for line in xrandr_output:
-            if 'Screen 0' in line:
-                sections = line.split(',')
-                for item in sections:
-                    if 'current' in item:
-                        res = item.split()
-                        return '%s%s%s' % (res[1], res[2], res[3])
-
-        return None
-
-    def run_once(self):
-        modetest_output = utils.system_output(_MODETEST_COMMAND)
-        logging.info('modetest output: \n{0}'.format(modetest_output))
-
-        # True if the information being read is about a connected LVDS
-        # connector, False otherwise
-        connected_lvds = False
-
-        for line in modetest_output.splitlines():
-            connector_match = re.match(_MODETEST_CONNECTOR_PATTERN, line)
-            if connector_match is not None:
-                connected_lvds = False
-                if connector_match.group(1) == _MODETEST_CONNECTED:
-                    if connector_match.group(2) == _MODETEST_CONNECTOR_LVDS:
-                        connected_lvds = True
-
-            if connected_lvds:
-                mode_match = re.match(_MODETEST_MODE_PATTERN, line)
-                if mode_match is not None:
-                    res = '{0}x{1}'.format(int(mode_match.group(1)),
-                                           int(mode_match.group(2)))
-                    if res not in _SUPPORTED_LVDS_RESOLUTIONS:
-                        raise error.TestFail(_LVDS_UNSUPPORTED_MESSAGE % res)
diff --git a/client/site_tests/hardware_SAT/control b/client/site_tests/hardware_SAT/control
index c424a40..228e8c3 100644
--- a/client/site_tests/hardware_SAT/control
+++ b/client/site_tests/hardware_SAT/control
@@ -22,9 +22,10 @@
 http://code.google.com/p/stressapptest/
 """
 NAME = 'hardware_SAT'
-ATTRIBUTES = "suite:kernel_per-build_regression"
+ATTRIBUTES = "suite:kernel_per-build_regression, suite:pvs-kernel"
 TEST_CLASS = "hardware"
 TEST_CATEGORY = 'Stress'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 job.run_test('hardware_SAT')
diff --git a/client/site_tests/hardware_SAT/control.hwqual b/client/site_tests/hardware_SAT/control.hwqual
index e06627d..5e13c14 100644
--- a/client/site_tests/hardware_SAT/control.hwqual
+++ b/client/site_tests/hardware_SAT/control.hwqual
@@ -26,5 +26,6 @@
 TEST_CLASS = 'hardware'
 TEST_CATEGORY = 'Stress'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 job.run_test('hardware_SAT', seconds=600)
diff --git a/client/site_tests/hardware_SAT/control.memory_qual b/client/site_tests/hardware_SAT/control.memory_qual
index da4450c..2be5dd5 100644
--- a/client/site_tests/hardware_SAT/control.memory_qual
+++ b/client/site_tests/hardware_SAT/control.memory_qual
@@ -28,5 +28,8 @@
 TEST_CLASS = 'hardware'
 TEST_CATEGORY = 'Stress'
 TEST_TYPE = 'client'
+ATTRIBUTES = "suite:memory_qual2"
+EXTENDED_TIMEOUT = 30600 # 8.5 hours
+PY_VERSION = 3
 
 job.run_test('hardware_SAT', tag='memory_qual', seconds=8 * HOUR_IN_SECS)
diff --git a/client/site_tests/hardware_SAT/hardware_SAT.py b/client/site_tests/hardware_SAT/hardware_SAT.py
index 02341fa..9bc0858 100644
--- a/client/site_tests/hardware_SAT/hardware_SAT.py
+++ b/client/site_tests/hardware_SAT/hardware_SAT.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/hardware_Smartctl/control b/client/site_tests/hardware_Smartctl/control
deleted file mode 100644
index bcfb6fd..0000000
--- a/client/site_tests/hardware_Smartctl/control
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = 'hardware_Smartctl'
-AUTHOR = 'puthik'
-PURPOSE = 'Read S.M.A.R.T attribute for root device'
-TIME = 'SHORT'
-TEST_CLASS = 'hardware'
-TEST_TYPE = 'client'
-
-DOC = """
-Run smartctl to retrieve S.M.A.R.T attribute and report in keyval format.
-Note: Usage of this test case is to be called by server test during the
-storage stress test to determine status of the SSD
-"""
-
-job.run_test('hardware_Smartctl', iteration=1000)
-
-
diff --git a/client/site_tests/hardware_Smartctl/hardware_Smartctl.py b/client/site_tests/hardware_Smartctl/hardware_Smartctl.py
deleted file mode 100644
index 29bc24c..0000000
--- a/client/site_tests/hardware_Smartctl/hardware_Smartctl.py
+++ /dev/null
@@ -1,136 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging, re
-from autotest_lib.client.bin import test
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-
-
-class hardware_Smartctl(test.test):
-    """
-    Run smartctl to retrieve S.M.A.R.T attribute and report in keyval format.
-    """
-
-    version = 1
-
-    _SMARTCTL_DEVICE_MODEL_PATTERN = 'Device Model: *(?P<model>[^ ].*)$'
-    _SMARTCTL_RESULT_PATTERN = '.*[P-][O-][S-][R-][C-][K-].*'
-
-    # Temporary table: This value should be in smartctl in March 2014
-    # http://sourceforge.net/apps/trac/smartmontools/ticket/272
-    _SMARTCTL_LOOKUP_TABLE = {
-            'SanDisk SSD i100': {
-                    171 : 'Program_Fail_Count',
-                    172 : 'Erase_Fail_Count',
-                    173 : 'Average_Write_Erase_Count',
-                    174 : 'Unexpected_Power_Loss_Count',
-                    230 : 'Percent_Write_Erase_Count',
-                    234 : 'Percent_Write_Erase_Count_BC'
-            }
-    }
-
-    def run_once(self, iteration=1, dev=''):
-        """
-        Read S.M.A.R.T attribute from target device
-
-        @param dev:    target device
-        """
-        if dev == '':
-            logging.info('Run rootdev to determine boot device')
-            dev = utils.get_root_device()
-
-        logging.info(str('dev: %s' % dev))
-
-        # Skip this test if dev is an eMMC device without raising an error
-        if re.match('.*mmc.*', dev):
-            logging.info('Target device is an eMMC device. Skip testing')
-            self.write_perf_keyval({'device_model' : 'eMMC'})
-            return
-
-        last_result = ''
-
-
-        # run multiple time to test the firmware part that retrieve SMART value
-        for loop in range(1, iteration + 1):
-            cmd = 'smartctl -a -f brief %s' % dev
-            result = utils.run(cmd, ignore_status=True)
-            exit_status = result.exit_status
-            result_text = result.stdout
-            result_lines = result_text.split('\n')
-
-            # log all line if line count is different
-            # otherwise log only changed line
-            if result_text != last_result:
-                logging.info(str('Iteration #%d' % loop))
-                last_result_lines = last_result.split('\n')
-                if len(last_result_lines) != len(result_lines):
-                    for line in result_lines:
-                        logging.info(line)
-                else:
-                    for i, line in enumerate(result_lines):
-                        if line != last_result_lines[i]:
-                            logging.info(line)
-                last_result = result_text
-
-            # Ignore error other than first two bits
-            if exit_status & 0x3:
-                # Error message should be in 4th line of the output
-                msg = 'Test failed with error: %s' % result_lines[3]
-                raise error.TestFail(msg)
-
-        logging.info(str('smartctl exit status: 0x%x' % exit_status))
-
-        # find drive model
-        lookup_table = {}
-        pattern = re.compile(self._SMARTCTL_DEVICE_MODEL_PATTERN)
-        for line in result_lines:
-            if pattern.match(line):
-                model = pattern.match(line).group('model')
-                for known_model in self._SMARTCTL_LOOKUP_TABLE:
-                    if model.startswith(known_model):
-                        lookup_table = self._SMARTCTL_LOOKUP_TABLE[known_model]
-                        break
-                break
-        else:
-            raise error.TestFail('Can not find drive model')
-
-        # Example of smart ctl result
-        # ID# ATTRIBUTE_NAME          FLAGS    VALUE WORST THRESH FAIL RAW_VALUE
-        #  12 Power_Cycle_Count       -O----   100   100   000    -    204
-        # use flag field to find a valid line
-        pattern = re.compile(self._SMARTCTL_RESULT_PATTERN)
-        keyval = {}
-        fail = []
-        for line in result_lines:
-            if not pattern.match(line):
-                continue
-            field = line.split()
-
-            id = int(field[0])
-            if id in lookup_table:
-                # look up table overwrite smartctl name
-                key = lookup_table[id]
-            else:
-                key = field[1] # ATTRIBUTE_NAME
-                if key == 'Unknown_Attribute':
-                    key = "Smart_Attribute_ID_%d" % id
-
-            keyval[key] = field[7] # RAW_VALUE
-
-            # check for failing attribute
-            if field[6] != '-':
-                fail += [key]
-
-        if len(keyval) == 0:
-            raise error.TestFail(
-                    'Test failed with error: Can not parse smartctl keyval')
-
-        if len(fail) > 0:
-            keyval['fail'] = fail
-
-        keyval['exit_status'] = exit_status
-        keyval['device_model'] = model
-        self.write_perf_keyval(keyval)
-
diff --git a/client/site_tests/hardware_SsdDetection/control b/client/site_tests/hardware_SsdDetection/control
index c830b70..1566c76 100644
--- a/client/site_tests/hardware_SsdDetection/control
+++ b/client/site_tests/hardware_SsdDetection/control
@@ -11,6 +11,7 @@
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = "hardware"
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 This test uses hdparm to determine if the rotation rate properties match that of
diff --git a/client/site_tests/hardware_SsdDetection/hardware_SsdDetection.py b/client/site_tests/hardware_SsdDetection/hardware_SsdDetection.py
index e9fb673..514fdd4 100644
--- a/client/site_tests/hardware_SsdDetection/hardware_SsdDetection.py
+++ b/client/site_tests/hardware_SsdDetection/hardware_SsdDetection.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/hardware_StorageFio/control b/client/site_tests/hardware_StorageFio/control
index 7b3133c..28e0ef9 100644
--- a/client/site_tests/hardware_StorageFio/control
+++ b/client/site_tests/hardware_StorageFio/control
@@ -11,6 +11,7 @@
 TEST_CLASS = "hardware"
 TEST_CATEGORY = 'Performance'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 This test uses FIO to spawn a number of threads to perform a particular type of
diff --git a/client/site_tests/hardware_StorageFio/control.bvt b/client/site_tests/hardware_StorageFio/control.bvt
index d3ca538..8dc2ece 100644
--- a/client/site_tests/hardware_StorageFio/control.bvt
+++ b/client/site_tests/hardware_StorageFio/control.bvt
@@ -7,8 +7,10 @@
 PURPOSE = 'Make sure that hardware StorageFio work'
 TIME = 'FAST'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 ATTRIBUTES = "suite:experimental"
 JOB_RETRIES = 2
+PY_VERSION = 3
 
 DOC = """
 This test uses FIO test to make sure that test work correctly.
diff --git a/client/site_tests/hardware_StorageFio/control.hwqual b/client/site_tests/hardware_StorageFio/control.hwqual
index 6eddcea..eaa571e 100644
--- a/client/site_tests/hardware_StorageFio/control.hwqual
+++ b/client/site_tests/hardware_StorageFio/control.hwqual
@@ -11,6 +11,7 @@
 TEST_CLASS = "hardware"
 TEST_CATEGORY = 'Performance'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 This test uses FIO to spawn a number of threads to perform a particular type of
diff --git a/client/site_tests/hardware_StorageFio/control.integrity b/client/site_tests/hardware_StorageFio/control.integrity
index b720f25..3a2e1eb 100644
--- a/client/site_tests/hardware_StorageFio/control.integrity
+++ b/client/site_tests/hardware_StorageFio/control.integrity
@@ -7,6 +7,7 @@
 PURPOSE = 'Verify that data written to disk remains valid after 72 hours.'
 TIME = 'LENGTHY'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 This test uses FIO to spawn a number of threads to perform a particular type of
diff --git a/client/site_tests/hardware_StorageFio/control.integrity_full_disk b/client/site_tests/hardware_StorageFio/control.integrity_full_disk
index 74b1a36..ec9ef09 100644
--- a/client/site_tests/hardware_StorageFio/control.integrity_full_disk
+++ b/client/site_tests/hardware_StorageFio/control.integrity_full_disk
@@ -7,6 +7,7 @@
 PURPOSE = 'Verify that data written to disk remains valid after 72 hours.'
 TIME = 'LENGTHY'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 Test full drive integrity for internal disk.
diff --git a/client/site_tests/hardware_StorageFio/control.quick_integrity b/client/site_tests/hardware_StorageFio/control.quick_integrity
index 8454d5a..fa241b1 100644
--- a/client/site_tests/hardware_StorageFio/control.quick_integrity
+++ b/client/site_tests/hardware_StorageFio/control.quick_integrity
@@ -7,7 +7,9 @@
 PURPOSE = 'Short integrity test to verify the integrity test'
 TIME = 'LENGTHY'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 ATTRIBUTES = "suite:storagequal"
+PY_VERSION = 3
 
 DOC = """
 This test uses FIO to spawn a number of threads to perform a particular type of
diff --git a/client/site_tests/hardware_StorageFio/control.quicktest b/client/site_tests/hardware_StorageFio/control.quicktest
index 5f9cb11..993d1b4 100644
--- a/client/site_tests/hardware_StorageFio/control.quicktest
+++ b/client/site_tests/hardware_StorageFio/control.quicktest
@@ -11,6 +11,7 @@
 TEST_CLASS = "hardware"
 TEST_CATEGORY = 'Performance'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 This test uses FIO to spawn a number of threads to perform a particular type of
diff --git a/client/site_tests/hardware_StorageFio/control.stress b/client/site_tests/hardware_StorageFio/control.stress
index e3a12ee..a402508 100644
--- a/client/site_tests/hardware_StorageFio/control.stress
+++ b/client/site_tests/hardware_StorageFio/control.stress
@@ -7,7 +7,9 @@
 PURPOSE = 'Test the root device, when booted from a USB stick'
 TIME = 'LENGTHY'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 ATTRIBUTES = "suite:storagequal"
+PY_VERSION = 3
 
 DOC = """
 Send stress pattern to the disk for 30 minutes: 64K random write with 15% of
diff --git a/client/site_tests/hardware_StorageFio/control.trim b/client/site_tests/hardware_StorageFio/control.trim
index b254849..c9f574b 100644
--- a/client/site_tests/hardware_StorageFio/control.trim
+++ b/client/site_tests/hardware_StorageFio/control.trim
@@ -7,6 +7,7 @@
 PURPOSE = 'Test the root device for trim command support'
 TIME = 'MEDIUM'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 This test uses FIO to test for trim command support for the root device.
diff --git a/client/site_tests/hardware_StorageFio/control.vendor b/client/site_tests/hardware_StorageFio/control.vendor
index 48b33c6..73f6150 100644
--- a/client/site_tests/hardware_StorageFio/control.vendor
+++ b/client/site_tests/hardware_StorageFio/control.vendor
@@ -7,6 +7,7 @@
 PURPOSE = 'Test the internal device for new storage vendor'
 TIME = 'LENGTHY'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 This test uses FIO test for new storage vendor using the following workload.
diff --git a/client/site_tests/hardware_StorageFio/hardware_StorageFio.py b/client/site_tests/hardware_StorageFio/hardware_StorageFio.py
index b517e95..4a8e807 100644
--- a/client/site_tests/hardware_StorageFio/hardware_StorageFio.py
+++ b/client/site_tests/hardware_StorageFio/hardware_StorageFio.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/hardware_StorageFioOther/control b/client/site_tests/hardware_StorageFioOther/control
index 3b80f03..97d094c 100644
--- a/client/site_tests/hardware_StorageFioOther/control
+++ b/client/site_tests/hardware_StorageFioOther/control
@@ -10,6 +10,7 @@
 TEST_CLASS = 'hardware'
 TEST_CATEGORY = 'Performance'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 This test uses FIO to spawn a number of threads to perform a particular type of
@@ -23,4 +24,4 @@
                         '_seq_read_read_bw >= 50 * 1024',
                         '_seq_write_write_bw >= 30 * 1024',
                         '_16k_write_write_iops >= 250',
-               ])
\ No newline at end of file
+               ])
diff --git a/client/site_tests/hardware_StorageFioOther/control.verify_usb b/client/site_tests/hardware_StorageFioOther/control.verify_usb
new file mode 100644
index 0000000..4032a0b
--- /dev/null
+++ b/client/site_tests/hardware_StorageFioOther/control.verify_usb
@@ -0,0 +1,24 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+NAME = 'hardware_StorageFioOther.verify_usb'
+AUTHOR = 'The Chromium OS Authors'
+PURPOSE = 'Verify storage performance using an external device'
+TIME = 'MEDIUM'
+TEST_CLASS = 'hardware'
+TEST_CATEGORY = 'Performance'
+TEST_TYPE = 'client'
+PY_VERSION = 3
+DEPENDENCIES = "servo_state:WORKING"
+
+DOC = """
+Run fio sequential and random workloads and verify the data written.
+"""
+
+job.run_test('hardware_StorageFioOther', wait=0,
+             requirements=[
+	     ('seq_rw_verify', []),
+	     ('seq_rw_verify', ['v']),
+	     ('write_stress', []),
+	     ('write_stress', ['v'])])
diff --git a/client/site_tests/hardware_StorageFioOther/hardware_StorageFioOther.py b/client/site_tests/hardware_StorageFioOther/hardware_StorageFioOther.py
index 7cb9cca..cd86aa2 100644
--- a/client/site_tests/hardware_StorageFioOther/hardware_StorageFioOther.py
+++ b/client/site_tests/hardware_StorageFioOther/hardware_StorageFioOther.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/hardware_StorageTrim/control b/client/site_tests/hardware_StorageTrim/control
index da86b80..28f1472 100644
--- a/client/site_tests/hardware_StorageTrim/control
+++ b/client/site_tests/hardware_StorageTrim/control
@@ -8,6 +8,7 @@
 TIME = 'LENGTHY'
 TEST_CLASS = 'hardware'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 This test done the following. Fill the drive 2 times using random write.
@@ -15,5 +16,3 @@
 """
 
 job.run_test('hardware_StorageTrim')
-
-
diff --git a/client/site_tests/hardware_StorageTrim/control.mmc b/client/site_tests/hardware_StorageTrim/control.mmc
index fd0a75a..6bb1b51 100644
--- a/client/site_tests/hardware_StorageTrim/control.mmc
+++ b/client/site_tests/hardware_StorageTrim/control.mmc
@@ -8,6 +8,7 @@
 TIME = 'LENGTHY'
 TEST_CLASS = 'hardware'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 This test done the following. Fill the drive 2 times using random write.
diff --git a/client/site_tests/hardware_StorageTrim/hardware_StorageTrim.py b/client/site_tests/hardware_StorageTrim/hardware_StorageTrim.py
index c678fb6..4b92150 100644
--- a/client/site_tests/hardware_StorageTrim/hardware_StorageTrim.py
+++ b/client/site_tests/hardware_StorageTrim/hardware_StorageTrim.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -72,4 +73,3 @@
                           filesize=0,
                           requirements=requirements,
                           tag='after_trim')
-
diff --git a/client/site_tests/hardware_StorageWearoutDetect/control b/client/site_tests/hardware_StorageWearoutDetect/control
index 7fe05bb..d65a21a 100644
--- a/client/site_tests/hardware_StorageWearoutDetect/control
+++ b/client/site_tests/hardware_StorageWearoutDetect/control
@@ -8,12 +8,14 @@
 TIME = 'FAST'
 TEST_CLASS = 'hardware'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 # This test measures the hardware health of individual DUTs. If it fails the DUT
 # needs to be replaced. This test should not run in bvt-cq which will cause
 # flakes due to random machine allocation when a DUT nears end of life (and slow
 # bug filing/DUT replacement in lab).
 ATTRIBUTES = "suite:bvt-perbuild"
 JOB_RETRIES = 2
+PY_VERSION = 3
 
 DOC = """
 This test check wear out status for storage device available in SMART for SSD
diff --git a/client/site_tests/hardware_StorageWearoutDetect/control.non_cache b/client/site_tests/hardware_StorageWearoutDetect/control.non_cache
index bd5b475..a943545 100644
--- a/client/site_tests/hardware_StorageWearoutDetect/control.non_cache
+++ b/client/site_tests/hardware_StorageWearoutDetect/control.non_cache
@@ -8,6 +8,7 @@
 TIME = 'FAST'
 TEST_CLASS = 'hardware'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 This test check wear out status for storage device available in SMART for SSD
diff --git a/client/site_tests/hardware_StorageWearoutDetect/hardware_StorageWearoutDetect.py b/client/site_tests/hardware_StorageWearoutDetect/hardware_StorageWearoutDetect.py
index d3786cf..b401413 100644
--- a/client/site_tests/hardware_StorageWearoutDetect/hardware_StorageWearoutDetect.py
+++ b/client/site_tests/hardware_StorageWearoutDetect/hardware_StorageWearoutDetect.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/hardware_TPMCheck/control b/client/site_tests/hardware_TPMCheck/control
index f128c91..4f11050 100644
--- a/client/site_tests/hardware_TPMCheck/control
+++ b/client/site_tests/hardware_TPMCheck/control
@@ -4,16 +4,17 @@
 
 AUTHOR = "The Chromium OS Authors"
 NAME = "hardware_TPMCheck"
-ATTRIBUTES = "suite:kernel_per-build_regression"
+ATTRIBUTES = "suite:kernel_per-build_regression, suite:pvs-kernel"
 PURPOSE = "Basic check of the TPM state"
-CRITERIA = "Check that the TPM is in the expected state for use in Chrome OS."
+CRITERIA = "Check that the TPM is in the expected state for use in ChromeOS."
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
-    A basic sanity check of the state of the TPM (Trusted Platform Module)
+    A basic confidence check of the state of the TPM (Trusted Platform Module)
 """
 
 job.run_test('hardware_TPMCheck')
diff --git a/client/site_tests/hardware_TPMCheck/hardware_TPMCheck.py b/client/site_tests/hardware_TPMCheck/hardware_TPMCheck.py
index 6bfe9fd..ca8ed24 100644
--- a/client/site_tests/hardware_TPMCheck/hardware_TPMCheck.py
+++ b/client/site_tests/hardware_TPMCheck/hardware_TPMCheck.py
@@ -27,7 +27,7 @@
 
 # Expected permissions for NV indexes.
 PERM_EXPECTED_1_2 = {'0x1007': '0x8001', '0x1008': '0x1'}
-PERM_EXPECTED_2_0 = {'0x1007': '0x60054c01', '0x1008': '0x60050001'}
+PERM_EXPECTED_2_0 = {'0x1007': '0x60054c01', '0x1008': '(0x60050001|0x60054001)'}
 
 def missing_firmware_version():
     """Check for empty fwid.
diff --git a/client/site_tests/hardware_TPMLoadKey/control b/client/site_tests/hardware_TPMLoadKey/control
index 852acf9..f6dacbb 100644
--- a/client/site_tests/hardware_TPMLoadKey/control
+++ b/client/site_tests/hardware_TPMLoadKey/control
@@ -10,6 +10,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "Client"
+PY_VERSION = 3
 
 DOC = """
     This test determines that the TPM can execute a TPM LoadKey function.
diff --git a/client/site_tests/hardware_TPMTakeOwnership/control b/client/site_tests/hardware_TPMTakeOwnership/control
index b44b062..fb225d3 100644
--- a/client/site_tests/hardware_TPMTakeOwnership/control
+++ b/client/site_tests/hardware_TPMTakeOwnership/control
@@ -10,6 +10,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "Client"
+PY_VERSION = 3
 
 DOC = """
 This test exercises TPM_TakeOwnership functionality of a TPM (Trusted Platform
diff --git a/client/site_tests/hardware_TPMTakeOwnership/hardware_TPMTakeOwnership.py b/client/site_tests/hardware_TPMTakeOwnership/hardware_TPMTakeOwnership.py
index 5cf49f7..debc1bf 100644
--- a/client/site_tests/hardware_TPMTakeOwnership/hardware_TPMTakeOwnership.py
+++ b/client/site_tests/hardware_TPMTakeOwnership/hardware_TPMTakeOwnership.py
@@ -12,6 +12,9 @@
 
 
 class hardware_TPMTakeOwnership(test.test):
+    """
+    Autotest test case to measure TPM_TakeOwnership timing data.
+    """
     version = 1
 
 
@@ -124,7 +127,7 @@
                 smogcheck_util.runInSubprocess(['tpmc', 'clear'])
 
             # Output timing measurements
-            for k, v in timestamps.iteritems():
+            for k, v in timestamps.items():
                 sec, ms = divmod(v/1000, 1000)
                 key = 'iteration_%d_delay_in_sec' % k
                 delay_float = float(v)/1000000
diff --git a/client/site_tests/hardware_TPMtspi/control b/client/site_tests/hardware_TPMtspi/control
index 757b1a6..73f623a 100644
--- a/client/site_tests/hardware_TPMtspi/control
+++ b/client/site_tests/hardware_TPMtspi/control
@@ -10,6 +10,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "Client"
+PY_VERSION = 3
 
 DOC = """
 This test exercises the basic functionalities of the TPM
diff --git a/client/site_tests/hardware_TPMtspi/hardware_TPMtspi.py b/client/site_tests/hardware_TPMtspi/hardware_TPMtspi.py
index bd8931f..0e1ac2b 100644
--- a/client/site_tests/hardware_TPMtspi/hardware_TPMtspi.py
+++ b/client/site_tests/hardware_TPMtspi/hardware_TPMtspi.py
@@ -8,6 +8,9 @@
 
 
 class hardware_TPMtspi(test.test):
+    """
+    C libtspi functionality to Python for TPM testing.
+    """
     version = 1
 
     def setup(self):
@@ -21,7 +24,7 @@
         """
         try:
             return smogcheck_tpm.TpmController()
-        except smogcheck_tpm.SmogcheckError, e:
+        except smogcheck_tpm.SmogcheckError as e:
             raise error.TestFail('Error creating a TpmController: %s', e)
 
     def run_once(self):
@@ -60,7 +63,7 @@
             # TODO(tgao): uncomment to enable.
             #self.tpm_obj.setTpmClearable('force')
 
-        except smogcheck_tpm.SmogcheckError, e:
+        except smogcheck_tpm.SmogcheckError as e:
             raise error.TestFail('Error: %r' % e)
         finally:
             # Close TPM context
diff --git a/client/site_tests/hardware_TPMttci/control b/client/site_tests/hardware_TPMttci/control
index 74c677c..c98fb1f 100644
--- a/client/site_tests/hardware_TPMttci/control
+++ b/client/site_tests/hardware_TPMttci/control
@@ -5,14 +5,15 @@
 NAME = "hardware_TPMttci"
 AUTHOR = "The Chromium OS Authors"
 PURPOSE = "Exercise TPM TTCI board modules with TPM"
-CRITERIA = "Check TTCI module functionality and TPM sanity check"
+CRITERIA = "Check TTCI module functionality and TPM confidence check"
 TIME = "MEDIUM"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "Client"
+PY_VERSION = 3
 
 DOC = """
-This test exercises the two modules on TTCI board and performs basic sanity
+This test exercises the two modules on TTCI board and performs basic confidence
 check of the TPM (Trusted Platform Module) module (version & self-test).
 """
 
diff --git a/client/site_tests/hardware_TPMttci/hardware_TPMttci.py b/client/site_tests/hardware_TPMttci/hardware_TPMttci.py
index c8d70e5..c869756 100644
--- a/client/site_tests/hardware_TPMttci/hardware_TPMttci.py
+++ b/client/site_tests/hardware_TPMttci/hardware_TPMttci.py
@@ -2,12 +2,15 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import datetime, logging, subprocess, time
+import datetime, logging
 from autotest_lib.client.bin import test
 from autotest_lib.client.common_lib import error, smogcheck_ttci, smogcheck_util
 
 
 class hardware_TPMttci(test.test):
+    """
+    Autotest test case to utilized C shared library for TPM SmogCheck.
+    """
     version = 1
 
     def setup(self):
@@ -26,7 +29,7 @@
         """
         try:
             self.ttci_obj = smogcheck_ttci.TtciController()
-        except smogcheck_ttci.TtciError, e:
+        except smogcheck_ttci.TtciError as e:
             raise error.TestFail('Error creating a TtciController: %s' % e)
 
     def _getMainPowerStatus(self):
diff --git a/client/site_tests/hardware_Thermal/control b/client/site_tests/hardware_Thermal/control
deleted file mode 100644
index 2359cb1..0000000
--- a/client/site_tests/hardware_Thermal/control
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = 'hardware_Thermal'
-AUTHOR = 'The Chromium OS Authors'
-PURPOSE = 'Ensure the temp sensor is working.'
-CRITERIA = 'Fails if the temp sensor is not in reasonable range.'
-TIME='SHORT'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = "hardware"
-TEST_TYPE = 'client'
-
-DOC = """
-This is a hardware test for temp sensor.  The test uses mosys to read temp
-sensor value and check it's in reasonable range.
-"""
-
-job.run_test('hardware_Thermal')
diff --git a/client/site_tests/hardware_Thermal/hardware_Thermal.py b/client/site_tests/hardware_Thermal/hardware_Thermal.py
deleted file mode 100644
index 5e89c9d..0000000
--- a/client/site_tests/hardware_Thermal/hardware_Thermal.py
+++ /dev/null
@@ -1,62 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-# DESCRIPTION :
-#
-# Hardware test for temp sensor.  The test uses mosys to read temp sensor value
-# and check it's in reasonable range.
-
-
-import re
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-
-# Reasonable temp range for different temp units.
-TEMP_RANGE = {
-    'degrees C': (0, 100),
-}
-
-
-class TempSensor(object):
-    MOSYS_OUTPUT_RE = re.compile('(\w+)="(.*?)"')
-
-    def __init__(self, name):
-        self._name = name
-
-    def get_values(self):
-        values = {}
-        cmd = 'mosys -k sensor print thermal %s' % self._name
-        for kv in self.MOSYS_OUTPUT_RE.finditer(utils.system_output(cmd)):
-            key, value = kv.groups()
-            if key == 'reading':
-                value = int(value)
-            values[key] = value
-        return values
-
-    def get_units(self):
-        return self.get_values()['units']
-
-    def get_reading(self):
-        return self.get_values()['reading']
-
-
-class hardware_Thermal(test.test):
-    version = 1
-
-    def run_once(self, temp_sensor_names=['temp0']):
-        if not temp_sensor_names:
-            raise error.TestError('No temp sensor specified')
-
-        for name in temp_sensor_names:
-            ts = TempSensor(name)
-            units = ts.get_units()
-            try:
-                low, high = TEMP_RANGE[units]
-            except KeyError:
-                raise error.TestError('Unknown temp units of %s' % name)
-            if not low <= ts.get_reading() <= high:
-                raise error.TestError('Temperature of %s out of range' % name)
diff --git a/client/site_tests/hardware_TouchScreenPowerCycles/control b/client/site_tests/hardware_TouchScreenPowerCycles/control
deleted file mode 100644
index cffb3e6..0000000
--- a/client/site_tests/hardware_TouchScreenPowerCycles/control
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = "hardware_TouchScreenPowerCycles"
-AUTHOR = "Chrome OS Team"
-PURPOSE = "Check whether there are spurious contacts."
-CRITERIA = "Fails if there are any spurious contacts."
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "hardware"
-TEST_TYPE = "Client"
-
-DOC = """
-Check if there are any spurious contacts when power is cycled.
-"""
-
-job.run_test('hardware_TouchScreenPowerCycles')
diff --git a/client/site_tests/hardware_TouchScreenPowerCycles/hardware_TouchScreenPowerCycles.py b/client/site_tests/hardware_TouchScreenPowerCycles/hardware_TouchScreenPowerCycles.py
deleted file mode 100644
index 96a982b..0000000
--- a/client/site_tests/hardware_TouchScreenPowerCycles/hardware_TouchScreenPowerCycles.py
+++ /dev/null
@@ -1,143 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import re
-import subprocess
-import tempfile
-import time
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.graphics import graphics_utils
-
-
-class hardware_TouchScreenPowerCycles(test.test):
-    """Check if there are any spurious contacts when power is cycled."""
-    version = 1
-
-    SCREEN_ON = 1
-    SCREEN_OFF = 0
-
-    def initialize(self):
-        self.touch_screen_device = self._probe_touch_screen_device()
-        logging.info('Touchscreen device: %s', self.touch_screen_device)
-        if self.touch_screen_device is None:
-            raise error.TestError('No touch screen device is found.')
-
-        # Make sure that the screen is turned on before conducting the test.
-        self._wakeup_screen()
-        self.touch_screen_status = self.SCREEN_ON
-
-    def _wakeup_screen(self):
-        """Wake up the screen if it is dark."""
-        graphics_utils.screen_wakeup()
-        time.sleep(2)
-
-    def _touch_screen_on(self, interval):
-        """Turn the touch screen on."""
-        graphics_utils.switch_screen_on(on=1)
-        self.touch_screen_status = self.SCREEN_ON
-        logging.info('Touchscreen is turned on')
-        time.sleep(interval)
-
-    def _touch_screen_off(self, interval):
-        """Turn the touch screen off."""
-        graphics_utils.switch_screen_on(on=0)
-        self.touch_screen_status = self.SCREEN_OFF
-        logging.info('Touchscreen is turned off')
-        time.sleep(interval)
-
-    def _probe_touch_screen_device(self):
-        """Probe the touch screen device file."""
-        device_info_file = '/proc/bus/input/devices'
-        if not os.path.exists(device_info_file):
-            return None
-        with open(device_info_file) as f:
-            device_info = f.read()
-
-        touch_screen_pattern = re.compile('name=.+%s' % 'Touchscreen', re.I)
-        event_pattern = re.compile('handlers=.*event(\d+)', re.I)
-        found_touch_screen = False
-        touch_screen_device_file = None
-        for line in device_info.splitlines():
-            if (not found_touch_screen and
-                touch_screen_pattern.search(line) is not None):
-                found_touch_screen = True
-            elif found_touch_screen:
-                result = event_pattern.search(line)
-                if result is not None:
-                    event_no = int(result.group(1))
-                    device_file = '/dev/input/event%d' % event_no
-                    if os.path.exists(device_file):
-                        touch_screen_device_file = device_file
-                    break
-        return touch_screen_device_file
-
-    def _begin_recording(self):
-        """Begin a recording process."""
-        record_program = 'evemu-record'
-        record_cmd = '%s %s -1' % (record_program, self.touch_screen_device)
-        self.event_file = tempfile.TemporaryFile()
-        self.rec_proc = subprocess.Popen(record_cmd.split(),
-                                         stdout=self.event_file)
-
-    def _end_recording(self):
-        """Terminate recording process, and read/close the temp event file."""
-        self.rec_proc.terminate()
-        self.rec_proc.wait()
-        self.event_file.seek(0)
-        self.events = self.event_file.readlines()
-        self.event_file.close()
-
-    def _get_timestamp(self, event):
-        """Get the timestamp of an event.
-
-        A device event looks like: "E: 1344225607.043493 0003 0036 202"
-        """
-        result = re.search('E:\s*(\d+(\.\d*)?|\.\d+)', event)
-        timestamp = float(result.group(1)) if result else None
-        return timestamp
-
-    def _get_number_touch_contacts(self):
-        """Get the number of touch contacts.
-
-        Count ABS_MT_TRACKING_ID with a positive ID number but not -1
-        For example:
-            count this event:          "E: 1365999572.107771 0003 0039 405"
-            do not count this event:   "E: 1365999572.107771 0003 0039 -1"
-        """
-        touch_pattern = re.compile('^E:.*\s*0003\s*0039\s*\d+')
-        count_contacts = len(filter(touch_pattern.search, self.events))
-        return count_contacts
-
-    def run_once(self, repeated_times=5, interval=30):
-        """Run through power cycles and check spurious contacts.
-
-        @param repeated_times: the number of power on/off cycles to check.
-        @param interval: the power on/off duration in seconds.
-
-        Turn the power on for 30 seconds, and then turn it off for another
-        30 seconds. Repeat it for 5 times.
-        """
-        count_contacts_list = []
-        count_rounds = 0
-        for _ in range(repeated_times):
-            self._begin_recording()
-            self._touch_screen_off(interval)
-            self._touch_screen_on(interval)
-            self._end_recording()
-            count_contacts = self._get_number_touch_contacts()
-            count_contacts_list.append(count_contacts)
-            if count_contacts > 0:
-                count_rounds += 1
-
-        if count_rounds > 0:
-            msg1 = ('Spurious contacts detected %d out of %d iterations.' %
-                    (count_rounds, repeated_times))
-            msg2 = 'Count of touch contacts: %s' % str(count_contacts_list)
-            ave = float(sum(count_contacts_list)) / len(count_contacts_list)
-            msg3 = 'Average count of touch contacts: %.2f' % ave
-            raise error.TestFail('\n'.join(['', msg1, msg2, msg3]))
diff --git a/client/site_tests/hardware_TrimIntegrity/control b/client/site_tests/hardware_TrimIntegrity/control
index 9107a32..96e54d9 100644
--- a/client/site_tests/hardware_TrimIntegrity/control
+++ b/client/site_tests/hardware_TrimIntegrity/control
@@ -2,11 +2,17 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+# TEST IS DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
 NAME = 'hardware_TrimIntegrity'
 AUTHOR = 'puthik'
 PURPOSE = 'Perform data integrity trim test on an unmounted partition.'
 TIME = 'LENGTHY'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 Performs data integrity trim test on an unmounted partition.
diff --git a/client/site_tests/hardware_TrimIntegrity/control.full_disk b/client/site_tests/hardware_TrimIntegrity/control.full_disk
index afc7f67..9d23389 100644
--- a/client/site_tests/hardware_TrimIntegrity/control.full_disk
+++ b/client/site_tests/hardware_TrimIntegrity/control.full_disk
@@ -2,11 +2,17 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+# TEST IS DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
 NAME = 'hardware_TrimIntegrity.full_disk'
 AUTHOR = 'puthik'
 PURPOSE = 'Perform data integrity trim test on internal disk.'
 TIME = 'LENGTHY'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 ATTRIBUTES = "suite:mmc_qual"
 
 DOC = """
diff --git a/client/site_tests/hardware_TrimIntegrity/control.quick b/client/site_tests/hardware_TrimIntegrity/control.quick
index f80af29..71a79e2 100644
--- a/client/site_tests/hardware_TrimIntegrity/control.quick
+++ b/client/site_tests/hardware_TrimIntegrity/control.quick
@@ -2,11 +2,17 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+# TEST IS DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
 NAME = 'hardware_TrimIntegrity.quick'
 AUTHOR = 'puthik'
 PURPOSE = 'Perform data integrity trim test on an unmounted partition.'
 TIME = 'SHORT'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 ATTRIBUTES = "suite:experimental"
 
 DOC = """
diff --git a/client/site_tests/hardware_TrimIntegrity/hardware_TrimIntegrity.py b/client/site_tests/hardware_TrimIntegrity/hardware_TrimIntegrity.py
index 880920c..95b9ceb 100644
--- a/client/site_tests/hardware_TrimIntegrity/hardware_TrimIntegrity.py
+++ b/client/site_tests/hardware_TrimIntegrity/hardware_TrimIntegrity.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -69,9 +70,9 @@
         @param size: size to try the trim command
         """
         try:
-            fd = os.open(self._filename, os.O_RDWR, 0666)
+            fd = os.open(self._filename, os.O_RDWR, 0o666)
             self._do_trim(fd, 0, size)
-        except IOError, err:
+        except IOError as err:
             if err.errno == self.IOCTL_NOT_SUPPORT_ERRNO:
                 reason = 'IOCTL Does not support trim.'
                 msg = utils.get_storage_error_msg(self._diskname, reason)
@@ -131,11 +132,6 @@
         # Make file size multiple of 4 * chunk size
         file_size -= file_size % (4 * chunk_size)
 
-        if fulldisk:
-            fio_file_size = 0
-        else:
-            fio_file_size = file_size
-
         logging.info('filename: %s, filesize: %d', self._filename, file_size)
 
         self._verify_trim_support(chunk_size)
@@ -152,7 +148,7 @@
         trim_hash = ""
 
         # Write random data to disk
-        chunk_count = file_size / chunk_size
+        chunk_count = file_size // chunk_size
         cmd = str('dd if=/dev/urandom of=%s bs=%d count=%d oflag=direct' %
                   (self._filename, chunk_size, chunk_count))
         utils.run(cmd)
@@ -162,7 +158,7 @@
         # Check read speed/latency when reading real data.
         self.job.run_test('hardware_StorageFio',
                           disable_sysinfo=True,
-                          filesize=fio_file_size,
+                          filesize=file_size,
                           blkdiscard=False,
                           requirements=[('4k_read_qd32', [])],
                           tag='before_trim')
@@ -187,7 +183,7 @@
             # Do trim
             begin_trim_chunk = int(last_ratio * chunk_count)
             end_trim_chunk = int(ratio * chunk_count)
-            fd = os.open(self._filename, os.O_RDWR, 0666)
+            fd = os.open(self._filename, os.O_RDWR, 0o666)
             for chunk in trim_order[begin_trim_chunk:end_trim_chunk]:
                 self._do_trim(fd, chunk * chunk_size, chunk_size)
                 trim_status[chunk] = True
@@ -230,7 +226,7 @@
         # Check read speed/latency when reading from trimmed data.
         self.job.run_test('hardware_StorageFio',
                           disable_sysinfo=True,
-                          filesize=fio_file_size,
+                          filesize=file_size,
                           blkdiscard=False,
                           requirements=[('4k_read_qd32', [])],
                           tag='after_trim')
diff --git a/client/site_tests/hardware_UnsafeMemory/control b/client/site_tests/hardware_UnsafeMemory/control
index 6b6ffa6..970c502 100644
--- a/client/site_tests/hardware_UnsafeMemory/control
+++ b/client/site_tests/hardware_UnsafeMemory/control
@@ -13,6 +13,9 @@
 # TODO(wad) once we have an arm and 32-bit build, we need it in hwqual.
 #SUITE = 'hwqual'
 JOB_RETRIES = 0
+ATTRIBUTES = "suite:memory_qual2"
+EXTENDED_TIMEOUT = 19800 # 5.5 hours
+PY_VERSION = 3
 
 DOC = """
 This test uses rowhammer-test to find memory faults that may lead to violations
diff --git a/client/site_tests/hardware_UnsafeMemory/control.quick b/client/site_tests/hardware_UnsafeMemory/control.quick
index ce78950..8114e63 100644
--- a/client/site_tests/hardware_UnsafeMemory/control.quick
+++ b/client/site_tests/hardware_UnsafeMemory/control.quick
@@ -11,6 +11,7 @@
 TEST_CLASS = 'hardware'
 TEST_TYPE = 'client'
 JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test uses rowhammer-test to find memory faults that may lead to violations
diff --git a/client/site_tests/hardware_UnsafeMemory/hardware_UnsafeMemory.py b/client/site_tests/hardware_UnsafeMemory/hardware_UnsafeMemory.py
index 68062be..3fe27b1 100644
--- a/client/site_tests/hardware_UnsafeMemory/hardware_UnsafeMemory.py
+++ b/client/site_tests/hardware_UnsafeMemory/hardware_UnsafeMemory.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -36,7 +37,7 @@
                 '/sys/devices/virtual/thermal/thermal_zone1/temp')
         except:
             pass
-        return (therm0, therm1)
+        return ' '.join([therm0, therm1])
 
     def run_once(self, sec=(60*25)):
         """
@@ -46,17 +47,17 @@
         """
         self._hammer_path = os.path.join(self.srcdir, self._DIR_NAME,
                                          'rowhammer_test')
-        logging.info('cmd: %s %d' % (self._hammer_path, sec))
+        logging.info('cmd: %s %d', self._hammer_path, sec)
         # Grab the CPU temperature before hand if possible.
-        logging.info('start temp: %s %s' % self.get_thermals())
+        logging.info('start temp: %s', self.get_thermals())
         try:
             output = subprocess.check_output([self._hammer_path, '%d' % sec])
             logging.info("run complete. Output below:")
             logging.info(output)
-        except subprocess.CalledProcessError, e:
+        except subprocess.CalledProcessError as e:
             logging.error("Unsafe memory found!")
             logging.error(e.output)
-            logging.info('end temp: %s %s' % self.get_thermals())
+            logging.info('end temp: %s', self.get_thermals())
             raise error.TestFail('Unsafe memory found!')
-        logging.info('end temp: %s %s' % self.get_thermals())
+        logging.info('end temp: %s', self.get_thermals())
         return True
diff --git a/client/site_tests/hardware_Usb30Throughput/control b/client/site_tests/hardware_Usb30Throughput/control
deleted file mode 100644
index c7f85b9..0000000
--- a/client/site_tests/hardware_Usb30Throughput/control
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright (c) 2012 Collabora Ltd. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.cros import storage as storage_mod
-
-NAME = "hardware_Usb30Throughput"
-AUTHOR = "Cosimo Alfarano <cosimo.alfarano@collabora.co.uk>"
-PURPOSE = "Check that there are no transfer speed downgrade after suspend."
-CRITERIA = "Fails if transfer rate is below expectations"
-TIME="SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "hardware"
-TEST_TYPE = "client"
-
-DOC = """Measure transfer rates for a number of times.
-If any of the transfer rate is below expectation the test fails.
-
-This test need a high-speed USB 3.0 device connected, with a mountable file
-system on a single partition.
-No user should be logged in via GUI to avoid automounter interfere with the
-test.
-
-@param measurements: (10) number of times to repeat xfer rate measureaments
-@param size: (10Mb) the size of the file to transfer for each |measureaments|
-@param fs_uuid: UUID for USB storage define volume, if auto detection does not
-       work.
-@param min_speed: (300Mb/sec) a float number for the minimum speed accepted.
-       Any |measureaments| performing below it will make the test fail
-"""
-
-storage_filter, args_dict = storage_mod.args_to_storage_dict(args)
-if not storage_filter:
-    storage_filter = {'bus': 'usb'}
-measurements = int(args_dict.get('measurements', 10))
-size = int(args_dict.get('size', 10))
-min_speed = float(args_dict.get('min_speed', 300))
-
-job.run_test('hardware_Usb30Throughput', storage_filter=storage_filter,
-             measurements=measurements, size=size, min_speed=min_speed)
diff --git a/client/site_tests/hardware_Usb30Throughput/hardware_Usb30Throughput.py b/client/site_tests/hardware_Usb30Throughput/hardware_Usb30Throughput.py
deleted file mode 100644
index fcc8251..0000000
--- a/client/site_tests/hardware_Usb30Throughput/hardware_Usb30Throughput.py
+++ /dev/null
@@ -1,123 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-# Author: Cosimo Alfarano <cosimo.alfarano@collabora.co.uk>
-
-import datetime
-import logging
-import os
-
-from autotest_lib.client.cros import storage as storage_mod
-from autotest_lib.client.common_lib import autotemp, error
-from autotest_lib.client.bin import utils
-
-USECS_IN_SEC = 1000000.0
-
-class hardware_Usb30Throughput(storage_mod.StorageTester):
-    version = 1
-    preserve_srcdir = True
-    _autosrc = None
-    _autodst = None
-    results = {}
-
-
-    def cleanup(self):
-        if self._autosrc:
-            self._autosrc.clean()
-        if self._autodst:
-            self._autodst.clean()
-
-        self.scanner.unmount_all()
-
-        super(hardware_Usb30Throughput, self).cleanup()
-
-
-    def run_once(self, measurements=5, size=1, min_speed=300.0):
-        """
-        @param measurements: (int) the number of measurements to do.
-                For the test to fail at least one measurement needs to be
-                below |min_speed|
-        @param size: (int) size of the file to be copied for testing the
-                transfer rate, it represent the size in megabytes.
-                Generally speaking, the bigger is the file used for
-                |measurements| the slower the test will run and the more
-                accurate it will be.
-                e.g.: 10 is 10MB, 101 is 101MB
-        @param min_speed: (float) in Mbit/sec. It's the min throughput a USB 3.0
-                device should perform to be accepted. Conceptually it's the max
-                USB 3.0 throughput minus a tollerance.
-                Defaults to 300Mbit/sec (ie 350Mbits/sec minus ~15% tollerance)
-        """
-        volume_filter = {'bus': 'usb'}
-        storage = self.wait_for_device(volume_filter, cycles=1,
-                                       mount_volume=True)[0]
-
-        # in Megabytes (power of 10, to be consistent with the throughput unit)
-        size *= 1000*1000
-
-        self._autosrc = autotemp.tempfile(unique_id='autotest.src',
-                                          dir=storage['mountpoint'])
-        self._autodst = autotemp.tempfile(unique_id='autotest.dst',
-                                          dir=self.tmpdir)
-
-        # Create random file
-        storage_mod.create_file(self._autosrc.name, size)
-
-        num_failures = 0
-        for measurement in range(measurements):
-            xfer_rate = get_xfer_rate(self._autosrc.name, self._autodst.name)
-            key = 'Mbit_per_sec_measurement_%d' % measurement
-            self.results[key] = xfer_rate
-            logging.debug('xfer rate (measurement %d) %.2f (min=%.2f)',
-                          measurement, xfer_rate, min_speed)
-
-            if xfer_rate < min_speed:
-                num_failures += 1
-
-        # Apparently self.postprocess_iteration is not called on TestFail
-        # so we need to process data here in order to have some performance log
-        # even on TestFail
-        self.results['Mbit_per_sec_average'] = (sum(self.results.values()) /
-            len(self.results))
-        self.write_perf_keyval(self.results)
-
-        if num_failures > 0:
-            msg = ('%d/%d measured transfer rates under performed '
-                   '(min_speed=%.2fMbit/sec)' % (num_failures, measurements,
-                   min_speed))
-            raise error.TestFail(msg)
-
-
-def get_xfer_rate(src, dst):
-    """Compute transfer rate from src to dst as Mbit/sec
-
-    Execute a copy from |src| to |dst| and returns the file copy transfer rate
-    in Mbit/sec
-
-    @param src, dst: paths for source and destination
-
-    @return trasfer rate (float) in Mbit/sec
-    """
-    assert os.path.isfile(src)
-    assert os.path.isfile(dst)
-
-    utils.drop_caches()
-    start = datetime.datetime.now()
-    utils.force_copy(src, dst)
-    end = datetime.datetime.now()
-    delta = end - start
-
-    # compute seconds (as float) from microsecs
-    delta_secs = delta.seconds + (delta.microseconds/USECS_IN_SEC)
-    # compute Mbit from bytes
-    size_Mbit = (os.path.getsize(src)*8.0)/(1000*1000)
-
-    logging.info('file trasferred: size (Mbits): %f, start: %f, end: %d,'
-                 ' delta (secs): %f',
-                 size_Mbit,
-                 start.second+start.microsecond/USECS_IN_SEC,
-                 end.second+end.microsecond/USECS_IN_SEC,
-                 delta_secs)
-
-    # return the xfer rate in Mbits/secs having bytes/microsec
-    return size_Mbit / delta_secs
diff --git a/client/site_tests/hardware_UsbBasicFileOperations/control b/client/site_tests/hardware_UsbBasicFileOperations/control
deleted file mode 100644
index 3ef560a..0000000
--- a/client/site_tests/hardware_UsbBasicFileOperations/control
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright (c) 2012 Collabora Ltd. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = "hardware_UsbBasicFileOperations"
-AUTHOR = "Cosimo Alfarano <cosimo.alfarano@collabora.co.uk>"
-PURPOSE = "Test USB drive file-based operations"
-CRITERIA = "Fail if open/copy/move/remove and modify content operations fail"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "hardware"
-TEST_TYPE = "client"
-
-DOC = """
-Try to open a file, move/copy and remove it from the filesystem.
-Check that it"s possible to change the file as well.
-
-This test requires a USB drive inserted before the test begins.
-It also needs a single partition on the drive, the file system type is not
-relevant as long as it"s mountable by the system.
-
-@param device,bus,model,size,fs_uuid: @see client.cros.storage.StorageScanner
-                                      doc for paramter meaning.
-                                      A single filter key can be passed as
-                                      parameter.
-                                      Use those parameters only if the test
-                                      cannot detect your device by default.
-"""
-from autotest_lib.client.cros import storage as storage_mod
-
-volume_filter, args_dict = storage_mod.args_to_storage_dict(args)
-if not volume_filter:
-    volume_filter = {"bus": "usb"}
-
-job.run_test("hardware_UsbBasicFileOperations", volume_filter=volume_filter)
diff --git a/client/site_tests/hardware_UsbBasicFileOperations/hardware_UsbBasicFileOperations.py b/client/site_tests/hardware_UsbBasicFileOperations/hardware_UsbBasicFileOperations.py
deleted file mode 100644
index 3739d2a..0000000
--- a/client/site_tests/hardware_UsbBasicFileOperations/hardware_UsbBasicFileOperations.py
+++ /dev/null
@@ -1,152 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-from autotest_lib.client.bin import utils
-from autotest_lib.client.cros import storage as storage_mod
-from autotest_lib.client.common_lib import autotemp, error
-
-
-class hardware_UsbBasicFileOperations(storage_mod.StorageTester):
-    version = 1
-    preserve_srcdir = True
-    _src, _dst = None, None
-
-
-    def run_once(self, volume_filter={'bus':'usb'}):
-        storage = self.wait_for_device(volume_filter, cycles=1,
-                                       mount_volume=True)[0]
-        mount_point = storage['mountpoint']
-
-        # -> Megabytes
-        size = 1*1024*1024
-
-        self._src = autotemp.tempfile(unique_id='tmpfile',
-                                      dir=mount_point)
-        self._dst = autotemp.tempfile(unique_id='autotest',
-                                      dir=self.tmpdir)
-        # Step 1: check if file creation works
-        try:
-            storage_mod.create_file(self._src.name, size)
-        except error.CmdError, e:
-            msg = ('fatal error occurred during file creation: '
-                   'basic file operation failed: %s' % e)
-            raise error.TestFail(msg)
-
-        # not part of current check, remember the value for later use
-        src_md5 = storage_mod.checksum_file(self._src.name)
-
-        # Step 2: check if open works
-        try:
-            f = open(self._src.name, 'rb')
-        except Exception, e:
-            msg = ('fatal error occurred during open(): '
-                   'basic file operation failed: %s' % e)
-            raise error.TestFail(msg)
-
-        try:
-            f.read()
-        except Exception, e:
-            msg = ('fatal error occurred during read(): '
-                   'basic file operation failed: %s' % e)
-            raise error.TestFail(msg)
-
-        try:
-            f.close()
-        except Exception, e:
-            msg = ('fatal error occurred during close(): '
-                   'basic file operation failed: %s' % e)
-            raise error.TestFail(msg)
-
-
-        # Step 3: check if file copy works
-        try:
-            utils.force_copy(self._src.name, self._dst.name)
-        except Exception, e:
-            msg = ('fatal error occurred during a file copy: '
-                   'basic file operation failed: %s' % e)
-            raise error.TestFail(msg)
-
-        if src_md5 != storage_mod.checksum_file(self._dst.name):
-            msg = ('fatal error occurred during a file copy, '
-                   'md5 from origin and from destination are different: '
-                   'basic file operation failed')
-            raise error.TestFail(msg)
-
-
-        # Step 4: check if file removal works
-        try:
-            os.remove(self._src.name)
-        except OSError, e:
-            msg = ('fatal error occurred during file removal: '
-                   'basic file operation failed: %s' % e)
-            raise error.TestFail(msg)
-
-        if os.path.isfile(self._src.name):
-            msg = ('fatal error occurred during file removal: '
-                   'file still present after command, '
-                   'basic file operation failed')
-            raise error.TestFail(msg)
-
-        utils.drop_caches()
-
-        if os.path.isfile(self._src.name):
-            msg = ('fatal error occurred during file removal: '
-                   'file still present after command issued and '
-                   'disk cached flushed), '
-                   'basic file operation failed')
-            raise error.TestFail(msg)
-
-        # Step 5: check if modification to a file are persistent
-        # copy file, modify src and modify dst the same way, checksum
-        storage_mod.create_file(self._src.name, size)
-        utils.force_copy(self._src.name, self._dst.name)
-
-        # apply the same change to both files (which are identical in origin)
-        src_md5 = modify_file(self._src.name)
-        dst_md5 = modify_file(self._dst.name)
-
-        # both copy of they file have to be the same
-        if src_md5 != dst_md5:
-            msg = ('fatal error occurred after modifying src and dst: '
-                   'md5 checksums differ - %s / %s ,'
-                   'basic file operation failed' % (src_md5, dst_md5))
-            raise error.TestFail(msg)
-
-
-    def cleanup(self):
-        if self._src:
-            self._src.clean()
-        if self._dst:
-            self._dst.clean()
-
-        self.scanner.unmount_all()
-
-        super(hardware_UsbBasicFileOperations, self).cleanup()
-
-
-def modify_file(path):
-    '''Modify a file returning its new MD5
-
-    Open |path|, change a byte within the file and return the new md5.
-
-    The change applied to the file is based on the file content and size.
-    This means that identical files will result in identical changes and thus
-    will return the same MD5.
-
-    @param path: a path to the file to be modified
-    @return the MD5 of |path| after the modification
-    '''
-    position = os.path.getsize(path) / 2
-
-    # modify the file means: read a char, increase its value and write it back
-    # given the same file (identical in size and bytes) it will apply the same
-    # change
-    f = open(path, 'r+b')
-    f.seek(position)
-    c = f.read(1)
-    f.seek(position)
-    f.write(chr(ord(c)+1))
-    f.close()
-    return storage_mod.checksum_file(path)
diff --git a/client/site_tests/hardware_UsbMount/control b/client/site_tests/hardware_UsbMount/control
deleted file mode 100644
index 42600a3..0000000
--- a/client/site_tests/hardware_UsbMount/control
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.cros import storage as storage_mod
-
-NAME = "hardware_UsbMount"
-AUTHOR = "Cosimo Alfarano <cosimo.alfarano@collabora.co.uk"
-PURPOSE = "Verify a USB drive can mount/umount"
-CRITERIA = """Fails if it cannot issue mount/umount commands or if filesystem
-is not still mounted after suspend.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "hardware"
-TEST_TYPE = "client"
-
-DOC = """
-Mounts/umounts several times a filesystem, creating a file and checking it"s
-presence/content integrity in beetween mountng cycles.
-
-Also checks that after suspending with the file system mounted and resuming,
-the file sysem is still mounted
-
-This test requires a USB drive inserted before the test begins.
-This test will prepare the USB drive formatting its partition before starting
-the test.
-"""
-
-job.run_test("hardware_UsbMount")
diff --git a/client/site_tests/hardware_UsbMount/hardware_UsbMount.py b/client/site_tests/hardware_UsbMount/hardware_UsbMount.py
deleted file mode 100644
index 1495e19..0000000
--- a/client/site_tests/hardware_UsbMount/hardware_UsbMount.py
+++ /dev/null
@@ -1,112 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-# Author: Cosimo Alfarano <cosimo.alfarano@collabora.co.uk>
-
-import logging, os
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import rtc
-from autotest_lib.client.cros.power import sys_power
-from cros import storage as storage_mod
-
-
-class hardware_UsbMount(storage_mod.StorageTester):
-    version = 1
-    SECS_TO_SUSPEND = 10
-    _tmpfile = None
-
-
-    def cleanup(self):
-        # if the test fails with the device unmounted and before re-mounting,
-        # the test will be unable to properly cleanup, since we have no way to
-        # remove a tmp file from an unmounted device.
-        # For instance, this can happen if suspend won't work (e.g. it will
-        # reboot instead).
-        if self._tmpfile and os.path.isfile(self._tmpfile):
-            logging.debug('cleanup(): removing %s', self._tmpfile)
-            os.remove(self._tmpfile)
-
-        self.scanner.unmount_all()
-
-        super(hardware_UsbMount, self).cleanup()
-
-
-    def run_once(self, mount_cycles=10, filter_dict={'bus':'usb'}):
-        """
-        @param mount_cycles: how many times to mount/unount Default: 10.
-        @param filter_dict: storage dictionary filter.
-               Default: match any device connected on the USB bus.
-        """
-        # wait_for_device() returns (device_dictionary,
-        # time_spent_looking_for_it), and only the dictionary is relevant for
-        # this test
-        storage = self.wait_for_device(filter_dict, cycles=1,
-                                       mount_volume=True)[0]
-
-        if not os.path.ismount(storage['mountpoint']):
-            raise error.TestFail('filesystem %s mount failed' % filter_dict)
-
-        storage_filter = {'fs_uuid': storage['fs_uuid']}
-        # We cannot use autotemp.tempfile since we should close the descriptors
-        # everytime the storage device is un-mounted.
-        self._tmpfile = os.path.join(storage['mountpoint'],
-                                     'tempfile_usb_mount.tmp')
-
-        while mount_cycles:
-            mount_cycles -= 1
-            # Create a 1MiB random file and checksum it.
-            storage_mod.create_file(self._tmpfile, 1*1024*1024)
-            chksum = storage_mod.checksum_file(self._tmpfile)
-
-            logging.debug('storage to umount %s', storage)
-
-            # Umount the volume.
-            self.scanner.umount_volume(storage_dict=storage)
-            storage = self.wait_for_device(storage_filter,
-                                           mount_volume=False)[0]
-            if os.path.ismount(storage['mountpoint']):
-                raise error.TestFail('filesystem %s unmount failed ' %
-                                     storage_filter)
-
-            # Mount the volume back.
-            self.scanner.mount_volume(storage_dict=storage)
-            storage =  self.wait_for_device(storage_filter,
-                                            mount_volume=False)[0]
-            if not os.path.ismount(storage['mountpoint']):
-                raise error.TestFail('filesystem %s mount failed' %
-                                     storage_filter)
-
-            # Check that the created file exists and has the same content.
-            if not os.path.isfile(self._tmpfile):
-                raise error.TestFail('%s: file not present after remounting' %
-                                     self._tmpfile)
-
-            if chksum != storage_mod.checksum_file(self._tmpfile):
-                raise error.TestFail('%s: file content changed after '
-                                     'remounting' % self._tmpfile)
-
-        # Mount it, suspend and verify that after suspend-to-ram the
-        # device is still mounted
-        self.scanner.mount_volume(storage_dict=storage)
-        storage = self.wait_for_device(storage_filter, mount_volume=False)[0]
-        if not os.path.ismount(storage['mountpoint']):
-            raise error.TestFail('filesystem %s mount failed ' % storage)
-
-        sys_power.do_suspend(self.SECS_TO_SUSPEND)
-
-        # mount_volume=False because we don't want the method to mount if
-        # unmonted: we need to check its actual status right after suspend
-        storage = self.wait_for_device(storage_filter, mount_volume=False)[0]
-
-        if not os.path.ismount(storage['mountpoint']):
-            raise error.TestFail('filesystem %s not mounted after suspend' %
-                                 storage_filter)
-
-        if not os.path.isfile(self._tmpfile):
-            raise error.TestFail('%s: file not present anymore after '
-                                 'remounting' % self._tmpfile)
-
-        if chksum != storage_mod.checksum_file(self._tmpfile):
-            raise error.TestFail('%s: file content changed after remounting' %
-                                 self._tmpfile)
diff --git a/client/site_tests/infra_PythonVersion/control.3 b/client/site_tests/infra_PythonVersion/control.3
new file mode 100644
index 0000000..8417d0f
--- /dev/null
+++ b/client/site_tests/infra_PythonVersion/control.3
@@ -0,0 +1,17 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "infra_PythonVersion.3"
+PURPOSE = "Verify the python version running the test"
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_TYPE = "client"
+PY_VERSION = 3
+
+DOC = """Verify the python version running the test."""
+
+args_dict = utils.args_to_dict(args)
+
+job.run_test('infra_PythonVersion', case=3)
diff --git a/client/site_tests/infra_PythonVersion/infra_PythonVersion.py b/client/site_tests/infra_PythonVersion/infra_PythonVersion.py
new file mode 100644
index 0000000..9542624
--- /dev/null
+++ b/client/site_tests/infra_PythonVersion/infra_PythonVersion.py
@@ -0,0 +1,19 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+from autotest_lib.client.bin import test
+from autotest_lib.client.common_lib import error
+
+
+class infra_PythonVersion(test.test):
+    """Test to be run locally for checking Python version in Autotest."""
+    version = 1
+
+    def run_once(self, case):
+        """Verify the running Python Version is as expected."""
+        if sys.version_info.major != case:
+            raise error.TestFail("Not running in python version %s" % case)
+        return
diff --git a/client/site_tests/kernel_AsyncDriverProbe/control b/client/site_tests/kernel_AsyncDriverProbe/control
index 895357e..153a8a6 100644
--- a/client/site_tests/kernel_AsyncDriverProbe/control
+++ b/client/site_tests/kernel_AsyncDriverProbe/control
@@ -14,6 +14,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "kernel"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
     Checks that the kernel correctly implements asynchronous probing for
diff --git a/client/site_tests/kernel_CrosECSysfsAccel/control b/client/site_tests/kernel_CrosECSysfsAccel/control
deleted file mode 100644
index 508abed..0000000
--- a/client/site_tests/kernel_CrosECSysfsAccel/control
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = 'kernel_CrosECSysfsAccel'
-AUTHOR = 'alecaberg'
-PURPOSE = 'Test cros_ec_accel driver and sysfs interface'
-CRITERIA = 'Fails if sysfs accel interface is not present or data is invalid'
-TIME = 'SHORT'
-ATTRIBUTES = "suite:kernel_daily_regression"
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'kernel'
-TEST_TYPE = 'client'
-DEPENDENCIES = 'accel:cros-ec'
-
-DOC = 'Checks that the sysfs interface to the EC accels is present and working.'
-
-job.run_test('kernel_CrosECSysfsAccel')
diff --git a/client/site_tests/kernel_CrosECSysfsAccel/kernel_CrosECSysfsAccel.py b/client/site_tests/kernel_CrosECSysfsAccel/kernel_CrosECSysfsAccel.py
deleted file mode 100644
index c581d86..0000000
--- a/client/site_tests/kernel_CrosECSysfsAccel/kernel_CrosECSysfsAccel.py
+++ /dev/null
@@ -1,167 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-import logging, os
-import math
-from autotest_lib.client.bin import utils, test
-from autotest_lib.client.common_lib import error
-
-
-class kernel_CrosECSysfsAccel(test.test):
-    '''Make sure the EC sysfs accel interface provides meaningful output'''
-    version = 1
-
-
-    # For EC accelerometer, define the number of counts in 1G, and the number
-    # of counts that the magnitude of each sensor is allowed to be off from a
-    # magnitude of 1G. These values are not sensor dependent, they are based
-    # on the EC sysfs interface, which specifies number of counts in 1G.
-    _ACCEL_1G_IN_G = 1024
-    _ACCEL_1G_IN_MS2 = 9.8185
-    _ACCEL_MAG_VALID_OFFSET = .25
-
-    _ACCEL_BASE_LOC = 'base'
-    _ACCEL_LID_LOC = 'lid'
-    _ACCEL_LOCS = [_ACCEL_BASE_LOC, _ACCEL_LID_LOC]
-
-
-    sysfs_accel_search_path = '/sys/bus/iio/devices'
-    sysfs_accel_paths = {}
-    sysfs_accel_old_path = ''
-    new_sysfs_layout = True
-
-    @classmethod
-    def _read_sysfs_accel_file(cls, fullpath):
-        """
-        Read the contents of the given accel sysfs file or fail
-
-        @param fullpath Name of the file within the accel sysfs interface
-        directory
-        """
-        try:
-            content = utils.read_file(fullpath)
-        except Exception as err:
-            raise error.TestFail('sysfs file problem: %s' % err)
-        return content
-
-
-    def _find_sysfs_accel_dir(self):
-        """
-        Return the sysfs directory for accessing EC accels
-        """
-        for _, dirs, _ in os.walk(self.sysfs_accel_search_path):
-            for d in dirs:
-                dirpath = os.path.join(self.sysfs_accel_search_path, d)
-                namepath = os.path.join(dirpath, 'name')
-
-                try:
-                    content = utils.read_file(namepath)
-                except IOError as err:
-                    # errno 2 is code for file does not exist, which is ok
-                    # here, just continue on to next directory. Any other
-                    # error is a problem, raise an error.
-                    if err.errno == 2:
-                        continue
-                    raise error.TestFail('IOError %d while searching for accel'
-                                         'sysfs dir in %s', err.errno, namepath)
-
-                # Correct directory has a file called 'name' with contents
-                # 'cros-ec-accel'
-                if content.strip() != 'cros-ec-accel':
-                    continue
-
-                locpath = os.path.join(dirpath, 'location')
-                try:
-                    location = utils.read_file(locpath)
-                except IOError as err:
-                    if err.errno == 2:
-                        # We have an older scheme
-                        self.new_sysfs_layout = False
-                        self.sysfs_accel_old_path = dirpath
-                        return
-                    raise error.TestFail('IOError %d while reading %s',
-                                         err.errno, locpath)
-                loc = location.strip()
-                if loc in self._ACCEL_LOCS:
-                    self.sysfs_accel_paths[loc] = dirpath
-
-        if (not self.sysfs_accel_old_path and
-            len(self.sysfs_accel_paths) == 0):
-            raise error.TestFail('No sysfs interface to EC accels (cros-ec-accel)')
-
-    def _verify_accel_data(self, name):
-        """
-        Verify one of the EC accelerometers through the sysfs interface.
-        """
-        if self.new_sysfs_layout:
-            accel_scale = float(self._read_sysfs_accel_file(
-                os.path.join(self.sysfs_accel_paths[name],
-                             'scale')))
-            exp = self._ACCEL_1G_IN_MS2
-        else:
-            accel_scale = 1
-            exp = self._ACCEL_1G_IN_G
-
-        err = exp * self._ACCEL_MAG_VALID_OFFSET
-        value = {}
-        mag = 0
-        for axis in ['x', 'y', 'z']:
-            name_list = ['in', 'accel', axis]
-            if self.new_sysfs_layout:
-                base_path = self.sysfs_accel_paths[name]
-            else:
-                base_path = self.sysfs_accel_old_path
-                name_list.append(name)
-            name_list.append('raw')
-            axis_path = os.path.join(base_path, '_'.join(name_list))
-            value[axis] = int(self._read_sysfs_accel_file(axis_path))
-            value[axis] *= accel_scale
-            mag += value[axis] * value[axis]
-
-        mag = math.sqrt(mag)
-
-        # Accel data is out of range if magnitude is not close to 1G.
-        # Note, this means test will fail on the moon.
-        if abs(mag - exp) <= err:
-            logging.info("%s accel passed. Magnitude is %f.", name, mag)
-        else:
-            logging.info("%s accel bad data. Magnitude is %f, expected "
-                         "%f +/-%f. Raw data is x:%f, y:%f, z:%f.", name,
-                         mag, exp, err, value['x'], value['y'], value['z'])
-            raise error.TestFail("Accel magnitude out of range.")
-
-
-    def run_once(self):
-        """
-        Check for accelerometers, and if present, check data is valid
-        """
-        # First make sure that the motion sensors are active. If this
-        # check fails it means the EC motion sense task is not running and
-        # therefore not updating acceleration values in shared memory.
-        # Note that this check only works for x86 boards.
-        arch = utils.get_arch()
-        if arch.startswith('x86'):
-            active = utils.system_output('ectool motionsense active')
-            if active == "0":
-                raise error.TestFail("Motion sensing is inactive")
-
-        # Find the iio sysfs directory for EC accels
-        self._find_sysfs_accel_dir()
-
-        if self.sysfs_accel_old_path:
-            # Get all accelerometer data
-            accel_info = utils.system_output('ectool motionsense')
-            info = accel_info.splitlines()
-
-            # If the base accelerometer is present, then verify data
-            if 'None' not in info[1]:
-                self._verify_accel_data(self._ACCEL_BASE_LOC)
-
-            # If the lid accelerometer is present, then verify data
-            if 'None' not in info[2]:
-                self._verify_accel_data(self._ACCEL_LID_LOC)
-        else:
-            for loc in self.sysfs_accel_paths.keys():
-                self._verify_accel_data(loc)
diff --git a/client/site_tests/kernel_Delay/control b/client/site_tests/kernel_Delay/control
deleted file mode 100644
index beff606..0000000
--- a/client/site_tests/kernel_Delay/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "The Chromium OS Authors"
-DOC = """
-This test is to make sure that udelay() delays at least as long as
-requested (as compared to ktime()).
-"""
-NAME = "kernel_Delay"
-PURPOSE = ""
-CRITERIA = """
-Fails if udelay() returns early.
-"""
-ATTRIBUTES = "suite:kernel_daily_regression"
-TIME = "FAST"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "kernel"
-TEST_TYPE = "client"
-
-job.run_test('kernel_Delay')
diff --git a/client/site_tests/kernel_Delay/kernel_Delay.py b/client/site_tests/kernel_Delay/kernel_Delay.py
deleted file mode 100644
index 0ef265e..0000000
--- a/client/site_tests/kernel_Delay/kernel_Delay.py
+++ /dev/null
@@ -1,252 +0,0 @@
-# Copyright (c) 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import glob
-import logging
-import os
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-class kernel_Delay(test.test):
-    """
-    Test to ensure that udelay() delays at least as long as requested
-    (as compared to ktime()).
-
-    Test a variety of delays at mininmum and maximum cpu frequencies.
-
-    """
-    version = 1
-
-    # Module not present prior to 3.8.  From 4.4 on, module renamed.
-    MIN_KERNEL_VER = '3.8'
-    OLD_MODULE_NAME = 'udelay_test'
-    NEW_KERNEL_VER = '4.4'
-    NEW_MODULE_NAME = 'test_udelay'
-
-    UDELAY_PATH = '/sys/kernel/debug/udelay_test'
-    QUIET_GOVERNOR_PATH = '/sys/devices/system/cpu/cpuquiet/current_governor'
-    GOVERNOR_GLOB = '/sys/devices/system/cpu/cpu*/cpufreq/scaling_governor'
-    SETSPEED_GLOB = '/sys/devices/system/cpu/cpu*/cpufreq/scaling_setspeed'
-    CUR_FREQ_GLOB = '/sys/devices/system/cpu/cpu*/cpufreq/cpuinfo_cur_freq'
-    CPUFREQ_AVAIL_GOVERNORS_PATH = (
-            '/sys/devices/system/cpu/cpu0/cpufreq/'
-            'scaling_available_governors')
-    CPUFREQ_AVAIL_FREQS_PATH = (
-            '/sys/devices/system/cpu/cpu0/cpufreq/'
-            'scaling_available_frequencies')
-
-    # Test a variety of delays
-    # 1..200, 200..500 (by 10), 500..2000 (by 100)
-    DELAYS = range(1, 200) + range(200, 500, 10) + range(500, 2001, 100)
-    ITERATIONS = 100
-
-    _governor_paths = []
-    _setspeed_paths = []
-    _cur_freq_paths = []
-
-    def _set_file(self, contents, filename):
-        """
-        Write a string to a file.
-
-        @param contents: the contents to write to the file
-        @param filename: the filename to use
-
-        """
-        logging.debug('setting %s to %s', filename, contents)
-        with open(filename, 'w') as f:
-            f.write(contents)
-
-
-    def _get_file(self, filename):
-        """
-        Read a string from a file.
-
-        @returns: the contents of the file (string)
-
-        """
-        with open(filename, 'r') as f:
-            return f.read()
-
-
-    def _get_freqs(self):
-        """
-        Get the current CPU frequencies.
-
-        @returns: the CPU frequencies of each CPU (list of int)
-
-        """
-        return [int(self._get_file(p)) for p in self._cur_freq_paths]
-
-
-    def _get_freqs_string(self):
-        """
-        Get the current CPU frequencies.
-
-        @returns: the CPU frequencies of each CPU (string)
-
-        """
-        return ' '.join(str(x) for x in self._get_freqs())
-
-
-    def _get_governors(self):
-        """
-        Get the current CPU governors.
-
-        @returns: the CPU governors of each CPU (list of string)
-
-        """
-        return [self._get_file(p).rstrip() for p in self._governor_paths]
-
-
-    def _get_quiet_governor(self):
-        """
-        Get the current CPU quiet governor.
-
-        @returns: the CPU quiet governor or None if it does not exist (string)
-
-        """
-        if os.path.isfile(self.QUIET_GOVERNOR_PATH):
-            return self._get_file(self.QUIET_GOVERNOR_PATH).rstrip()
-        else:
-            return None
-
-
-    def _reset_freq(self, initial_governors, initial_quiet_governor):
-        """
-        Unlimit the CPU frequency.
-
-        @param initial_governors: list of initial governors to reset state to
-        @param initial_quiet_governor: initial quiet governor to reset state to
-
-        """
-        for p, g in zip(self._governor_paths, initial_governors):
-            self._set_file(g, p)
-        if initial_quiet_governor and os.path.isfile(self.QUIET_GOVERNOR_PATH):
-            self._set_file(initial_quiet_governor, self.QUIET_GOVERNOR_PATH)
-
-
-    def _set_freq(self, freq):
-        """
-        Set the CPU frequency.
-
-        @param freq: desired CPU frequency
-
-        """
-        # Prevent CPUs from going up and down during the test if the option
-        # is available.
-        if os.path.isfile(self.QUIET_GOVERNOR_PATH):
-            logging.info('changing to userspace cpuquiet governor');
-            self._set_file('userspace', self.QUIET_GOVERNOR_PATH)
-
-        for p in self._governor_paths:
-            self._set_file('userspace', p)
-        for p in self._setspeed_paths:
-            self._set_file(str(freq), p)
-        logging.info(
-                'cpu frequencies set to %s with userspace governor',
-                self._get_freqs_string())
-        self._check_freq(freq)
-
-
-    def _check_freq(self, freq):
-        """
-        Check the CPU frequencies are set as requested.
-
-        @param freq: desired CPU frequency
-
-        """
-        for p in self._governor_paths:
-            governor = self._get_file(p).rstrip()
-            if governor != 'userspace':
-                raise error.TestFail('governor changed from userspace to %s' % (
-                        governor))
-        for p in self._setspeed_paths:
-            speed = int(self._get_file(p))
-            if speed != freq:
-                raise error.TestFail('setspeed changed from %s to %s' % (
-                        freq, speed))
-        freqs = self._get_freqs()
-        for f in freqs:
-            if f != freq:
-                raise error.TestFail('frequency set to %s instead of %s' % (
-                        f, freq))
-
-
-    def _test_udelay(self, usecs):
-        """
-        Test udelay() for a given amount of time.
-
-        @param usecs: number of usecs to delay for each iteration
-
-        """
-        self._set_file('%d %d' % (usecs, self.ITERATIONS), self.UDELAY_PATH)
-        with open(self.UDELAY_PATH, 'r') as f:
-            for line in f:
-                line = line.rstrip()
-                logging.info('result: %s', line)
-                if 'FAIL' in line:
-                    raise error.TestFail('udelay failed: %s' % line)
-
-    def _test_all_delays(self):
-        """
-        Test udelay() over all configured delays.
-
-        """
-        for usecs in self.DELAYS:
-            self._test_udelay(usecs)
-
-    def _test_userspace(self):
-        """
-        Test udelay() using userspace governor.
-
-        """
-        logging.info('testing with userspace governor')
-        with open(self.CPUFREQ_AVAIL_FREQS_PATH, 'r') as f:
-            available_freqs = [int(x) for x in f.readline().split()]
-
-        max_freq = max(available_freqs)
-        min_freq = min(available_freqs)
-        logging.info('cpu frequency max %d min %d', max_freq, min_freq)
-
-        freqs = [ min_freq, max_freq ]
-        for freq in freqs:
-            self._set_freq(freq)
-            self._test_all_delays()
-            self._check_freq(freq)
-
-    def run_once(self):
-        kernel_ver = os.uname()[2]
-        if utils.compare_versions(kernel_ver, self.MIN_KERNEL_VER) < 0:
-            logging.info(
-                    'skipping test: old kernel %s (min %s) missing module %s',
-                    kernel_ver, self.MIN_KERNEL_VER, self.OLD_MODULE_NAME)
-            return
-
-        if utils.compare_versions(kernel_ver, self.NEW_KERNEL_VER) < 0:
-            module_name = self.OLD_MODULE_NAME
-        else:
-            module_name = self.NEW_MODULE_NAME
-
-        utils.load_module(module_name)
-
-        self._governor_paths = glob.glob(self.GOVERNOR_GLOB)
-        self._setspeed_paths = glob.glob(self.SETSPEED_GLOB)
-        self._cur_freq_paths = glob.glob(self.CUR_FREQ_GLOB)
-        initial_governors = self._get_governors()
-        initial_quiet_governor = self._get_quiet_governor()
-
-        with open(self.CPUFREQ_AVAIL_GOVERNORS_PATH, 'r') as f:
-            available_governors = set(f.readline().split())
-        logging.info('governors: %s', ' '.join(available_governors))
-
-        try:
-            if 'userspace' in available_governors:
-                self._test_userspace()
-            else:
-                logging.warning('testing with existing governor')
-                self._test_all_delays()
-        finally:
-            self._reset_freq(initial_governors, initial_quiet_governor)
-            utils.unload_module(module_name)
diff --git a/client/site_tests/kernel_FirmwareRequest/control b/client/site_tests/kernel_FirmwareRequest/control
deleted file mode 100644
index 031927b..0000000
--- a/client/site_tests/kernel_FirmwareRequest/control
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = "kernel_FirmwareRequest"
-AUTHOR = "The Chromium OS Authors"
-PURPOSE = "Verify kernel correctly implements firmware request APIs"
-CRITERIA = """
-   Fails if a known-existing firmware file can't be loaded (e.g., due to
-   crashes, race conditions on kfree()'ing the name, etc.), or if
-   known-nonexistent firmware files can be loaded.
-"""
-TIME = "SHORT"
-ATTRIBUTES = "suite:kernel_daily_regression"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "kernel"
-TEST_TYPE = "client"
-
-DOC = """
-    This test will run the firmware request kernel self test (from upstream).
-    This tests that the request_firmware() and request_firmware_nowait() kernel
-    APIs are somewhat sane. It tries to load the empty filename ("") as well as
-    a small toy firmware, and checks that it matches. It also makes sure a
-    non-existent firmware cannot be found.
-
-    We rerun the same test several times to increase the probability of
-    catching errors.
-
-    Of course, a success doesn't mean the absence of race conditions or bugs.
-    We may have just won the race.
-"""
-
-job.run_test('kernel_FirmwareRequest')
diff --git a/client/site_tests/kernel_FirmwareRequest/kernel_FirmwareRequest.py b/client/site_tests/kernel_FirmwareRequest/kernel_FirmwareRequest.py
deleted file mode 100644
index 2f3dbc0..0000000
--- a/client/site_tests/kernel_FirmwareRequest/kernel_FirmwareRequest.py
+++ /dev/null
@@ -1,91 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import kernel_config
-
-
-class kernel_FirmwareRequest(test.test):
-    """
-    Test asynchronous firmware loading
-    """
-    version = 1
-
-    def set_module_locking(self, enabled):
-        """
-        Enable/disable LSM request_firmware location locking
-        Inspired by security_ModuleLocking
-        """
-        sysctl = "/proc/sys/kernel/chromiumos/module_locking"
-        value = '1\n' if enabled else '0\n'
-
-        if os.path.exists(sysctl):
-            open(sysctl, "w").write(value)
-        else:
-            raise error.TestNAError("module locking sysctl not available; may not be able to load test FW")
-
-
-    def test_is_valid(self):
-        """
-        Check if this test is worth running, based on whether the kernel
-        .config has the right features
-        """
-        config = kernel_config.KernelConfig()
-        config.initialize()
-        config.is_enabled('TEST_FIRMWARE')
-        return len(config.failures()) == 0
-
-
-    def do_fw_test(self):
-        """
-        Run one iteration of the test
-        Return non-zero if failed
-        """
-        os.chdir(self.srcdir)
-        ret = utils.system("./fw_filesystem.sh", ignore_status=True)
-        if ret:
-            raise error.TestFail("FW request test failed: %d" % (ret))
-
-
-    def run_once(self):
-        """
-        This test will run the firmware request kernel self test (from
-        upstream). This tests that the request_firmware() and
-        request_firmware_nowait() kernel APIs are somewhat sane. It tries to
-        load the empty filename ("") as well as a small toy firmware, and
-        checks that it matches. It also makes sure a non-existent firmware
-        cannot be found.
-
-        We rerun the same test several times to increase the probability of
-        catching errors.
-
-        Needs to disable module locking so we can load test firmwares from
-        non-standard locations (e.g., /tmp)
-        """
-
-        num_loops = 50
-        module_name = "test_firmware"
-
-        if not self.test_is_valid():
-            raise error.TestNAError("FW test module is not available for this test")
-
-        utils.load_module(module_name)
-        if not utils.module_is_loaded(module_name):
-            raise error.TestNAError("FW test module is not available for this test")
-
-        try:
-            self.set_module_locking(False)
-
-            logging.info("iterations: %d", num_loops)
-
-            for i in range(0, num_loops):
-                self.do_fw_test()
-
-        finally:
-            self.set_module_locking(True)
-            utils.unload_module(module_name)
diff --git a/client/site_tests/kernel_FirmwareRequest/src/fw_filesystem.sh b/client/site_tests/kernel_FirmwareRequest/src/fw_filesystem.sh
deleted file mode 100755
index 5c495ad..0000000
--- a/client/site_tests/kernel_FirmwareRequest/src/fw_filesystem.sh
+++ /dev/null
@@ -1,104 +0,0 @@
-#!/bin/sh
-# This validates that the kernel will load firmware out of its list of
-# firmware locations on disk. Since the user helper does similar work,
-# we reset the custom load directory to a location the user helper doesn't
-# know so we can be sure we're not accidentally testing the user helper.
-set -e
-
-modprobe test_firmware
-
-DIR=/sys/devices/virtual/misc/test_firmware
-
-# CONFIG_FW_LOADER_USER_HELPER has a sysfs class under /sys/class/firmware/
-# These days no one enables CONFIG_FW_LOADER_USER_HELPER so check for that
-# as an indicator for CONFIG_FW_LOADER_USER_HELPER.
-HAS_FW_LOADER_USER_HELPER=$(if [ -d /sys/class/firmware/ ]; then echo yes; else echo no; fi)
-
-if [ "$HAS_FW_LOADER_USER_HELPER" = "yes" ]; then
-	OLD_TIMEOUT=$(cat /sys/class/firmware/timeout)
-fi
-
-OLD_FWPATH=$(cat /sys/module/firmware_class/parameters/path)
-
-FWPATH=$(mktemp -d)
-FW="$FWPATH/test-firmware.bin"
-
-test_finish()
-{
-	if [ "$HAS_FW_LOADER_USER_HELPER" = "yes" ]; then
-		echo "$OLD_TIMEOUT" >/sys/class/firmware/timeout
-	fi
-	echo -n "$OLD_PATH" >/sys/module/firmware_class/parameters/path
-	rm -f "$FW"
-	rmdir "$FWPATH"
-}
-
-trap "test_finish" EXIT
-
-if [ "$HAS_FW_LOADER_USER_HELPER" = "yes" ]; then
-	# Turn down the timeout so failures don't take so long.
-	echo 1 >/sys/class/firmware/timeout
-fi
-
-# Set the kernel search path.
-echo -n "$FWPATH" >/sys/module/firmware_class/parameters/path
-
-# This is an unlikely real-world firmware content. :)
-echo "ABCD0123" >"$FW"
-
-NAME=$(basename "$FW")
-
-if printf '\000' >"$DIR"/trigger_request; then
-	echo "$0: empty filename should not succeed" >&2
-	exit 1
-fi
-
-if printf '\000' >"$DIR"/trigger_async_request; then
-	echo "$0: empty filename should not succeed (async)" >&2
-	exit 1
-fi
-
-# Request a firmware that doesn't exist, it should fail.
-if echo -n "nope-$NAME" >"$DIR"/trigger_request; then
-	echo "$0: firmware shouldn't have loaded" >&2
-	exit 1
-fi
-if diff -q "$FW" /dev/test_firmware >/dev/null ; then
-	echo "$0: firmware was not expected to match" >&2
-	exit 1
-else
-	if [ "$HAS_FW_LOADER_USER_HELPER" = "yes" ]; then
-		echo "$0: timeout works"
-	fi
-fi
-
-# This should succeed via kernel load or will fail after 1 second after
-# being handed over to the user helper, which won't find the fw either.
-if ! echo -n "$NAME" >"$DIR"/trigger_request ; then
-	echo "$0: could not trigger request" >&2
-	exit 1
-fi
-
-# Verify the contents are what we expect.
-if ! diff -q "$FW" /dev/test_firmware >/dev/null ; then
-	echo "$0: firmware was not loaded" >&2
-	exit 1
-else
-	echo "$0: filesystem loading works"
-fi
-
-# Try the asynchronous version too
-if ! echo -n "$NAME" >"$DIR"/trigger_async_request ; then
-	echo "$0: could not trigger async request" >&2
-	exit 1
-fi
-
-# Verify the contents are what we expect.
-if ! diff -q "$FW" /dev/test_firmware >/dev/null ; then
-	echo "$0: firmware was not loaded (async)" >&2
-	exit 1
-else
-	echo "$0: async filesystem loading works"
-fi
-
-exit 0
diff --git a/client/site_tests/kernel_Ktime/control b/client/site_tests/kernel_Ktime/control
deleted file mode 100644
index 8ac4954..0000000
--- a/client/site_tests/kernel_Ktime/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "The Chromium OS Authors"
-DOC = """
-This test is to make sure that ktime and the RTC are reasonably consistent
-over a short amount of time.
-"""
-NAME = "kernel_Ktime"
-PURPOSE = ""
-CRITERIA = """
-Fails if ktime and RTC do not agree.
-"""
-ATTRIBUTES = "suite:kernel_daily_regression"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "kernel"
-TEST_TYPE = "client"
-
-job.run_test('kernel_Ktime')
diff --git a/client/site_tests/kernel_Ktime/kernel_Ktime.py b/client/site_tests/kernel_Ktime/kernel_Ktime.py
deleted file mode 100644
index afd9003..0000000
--- a/client/site_tests/kernel_Ktime/kernel_Ktime.py
+++ /dev/null
@@ -1,224 +0,0 @@
-# Copyright (c) 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import re
-from time import sleep
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-class kernel_Ktime(test.test):
-    """
-    Test to ensure that ktime and the RTC clock are consistent.
-
-    """
-    version = 1
-
-    MIN_KERNEL_VER = '3.8'
-    KERNEL_VER = '3.18'
-    MODULE_NAME = 'udelay_test'
-    MODULE_NAME_NEW = 'test_udelay'
-    UDELAY_PATH = '/sys/kernel/debug/udelay_test'
-    RTC_PATH = '/sys/class/rtc/rtc0/since_epoch'
-
-    # How many iterations to run the test for, each iteration is usally
-    # a second, but might be more if the skew is too large when retrieving
-    # the RTC and ktime.
-    TEST_DURATION = 250
-
-    # Allowable drift (as a function of elapsed RTC time): 0.01%
-    ALLOWABLE_DRIFT = 0.0001
-
-    # Maximum skew between ktime readings when aligning RTC and ktime
-    MAX_SKEW = 0.050
-
-    # Diffs to average for the rolling display
-    DIFFS_TO_AVERAGE = 30
-
-    def _set_file(self, contents, filename):
-        """
-        Write a string to a file.
-
-        @param contents: the contents to write to the file
-        @param filename: the filename to use
-
-        """
-        logging.debug('setting %s to %s', filename, contents)
-        with open(filename, 'w') as f:
-            f.write(contents)
-
-
-    def _get_file(self, filename):
-        """
-        Read a string from a file.
-
-        @returns: the contents of the file (string)
-
-        """
-        with open(filename, 'r') as f:
-            return f.read()
-
-
-    def _get_rtc(self):
-        """
-        Get the current RTC time.
-
-        @returns: the current RTC time since epoch (int)
-
-        """
-        return int(self._get_file(self.RTC_PATH))
-
-
-    def _get_ktime(self):
-        """
-        Get the current ktime.
-
-        @returns: the current ktime (float)
-
-        """
-        # Writing a delay of 0 will return info including the current ktime.
-        self._set_file('0', self.UDELAY_PATH)
-        with open(self.UDELAY_PATH, 'r') as f:
-            for line in f:
-                line = line.rstrip()
-                logging.debug('result: %s', line)
-                m = re.search(r'kt=(\d+.\d+)', line)
-                if m:
-                    return float(m.group(1))
-        return 0.0
-
-
-    def _get_times(self):
-        """
-        Get the rtc and estimated ktime and max potential error.
-
-        Returns the RTC and a best guess of the ktime when the RTC actually
-        ticked over to the current value.  Also returns the maximum potential
-        error of how far they are off by.
-
-        RTC ticked in the range of [ktime - max_error, ktime + max_error]
-
-        @returns: list of the current rtc, estimated ktime, max error
-
-        """
-        # Times are read k1, r1, k2, r2, k3.  RTC ticks over somewhere between
-        # r1 and r2, but since we don't know exactly when that is, the best
-        # guess we have is between k1 and k3.
-        rtc_older = self._get_rtc()
-        ktime_older = self._get_ktime()
-        rtc_old = self._get_rtc()
-        ktime_old = self._get_ktime()
-
-        # Ensure that this function returns in a reasonable number of
-        # iterations.  If excessive skew occurs repeatedly (eg RTC is too
-        # slow), abort.
-        bad_skew = 0
-        while bad_skew < 10:
-            rtc = self._get_rtc()
-            ktime = self._get_ktime()
-            skew = ktime - ktime_older
-            if skew > self.MAX_SKEW:
-                # Time between successive calls to ktime was too slow to
-                # bound the error to a reasonable value.  A few occurrences
-                # isn't anything to be concerned about, but if it's happening
-                # every second, it's worth investigating and could indicate
-                # that the RTC is very slow and MAX_SKEW needs to be increased.
-                logging.info((
-                    'retrying excessive skew: '
-                    'rtc [%d %d %d] ktime [%f %f %f] skew %f'),
-                    rtc_older, rtc_old, rtc, ktime_older, ktime_old, ktime,
-                    skew)
-                bad_skew += 1
-            elif rtc != rtc_old:
-                if rtc_older != rtc_old or rtc != rtc_old + 1:
-                    # This could happen if we took more than one second per
-                    # loop and could be changed to a warning if legitimate.
-                    raise error.TestFail('rtc progressed from %u to %u to %u' %
-                            (rtc_older, rtc_old, rtc))
-                return rtc, ktime_older + skew / 2, skew / 2
-            rtc_older = rtc_old
-            ktime_older = ktime_old
-            rtc_old = rtc
-            ktime_old = ktime
-        raise error.TestFail('could not reach skew %f after %d attempts' % (
-                self.MAX_SKEW, bad_skew))
-
-
-    def run_once(self):
-        kernel_ver = os.uname()[2]
-        if utils.compare_versions(kernel_ver, self.MIN_KERNEL_VER) < 0:
-            logging.info(
-                    'skipping test: old kernel %s (min %s) missing module %s',
-                    kernel_ver, self.MIN_KERNEL_VER, self.MODULE_NAME)
-            return
-        elif utils.compare_versions(kernel_ver, self.KERNEL_VER) < 0:
-            utils.load_module(self.MODULE_NAME)
-        elif utils.compare_versions(kernel_ver, self.KERNEL_VER) > 0:
-            utils.load_module(self.MODULE_NAME_NEW)
-
-        start_rtc, start_ktime, start_error = self._get_times()
-        logging.info(
-                'start rtc %d ktime %f error %f',
-                start_rtc, start_ktime, start_error)
-
-        recent_diffs = []
-        max_diff = 0
-        sum_rtc = 0
-        sum_diff = 0
-        sum_rtc_rtc = 0
-        sum_rtc_diff = 0
-        sum_diff_diff = 0
-        for i in xrange(self.TEST_DURATION):
-            # Sleep some amount of time to avoid busy waiting the entire time
-            sleep((i % 10) * 0.1)
-
-            current_rtc, current_ktime, current_error = self._get_times()
-            elapsed_rtc = current_rtc - start_rtc
-            elapsed_ktime = current_ktime - start_ktime
-            elapsed_diff = float(elapsed_rtc) - elapsed_ktime
-
-            # Allow for inaccurate ktime off ALLOWABLE_DRIFT from elapsed RTC,
-            # and take into account start and current error in times gathering
-            max_error = start_error + current_error
-            drift_threshold = elapsed_rtc * self.ALLOWABLE_DRIFT + max_error
-
-            # Track rolling average and maximum diff
-            recent_diffs.append(elapsed_diff)
-            if len(recent_diffs) > self.DIFFS_TO_AVERAGE:
-                recent_diffs.pop(0)
-            rolling_diff = sum(recent_diffs) / len(recent_diffs)
-            if abs(elapsed_diff) > abs(max_diff):
-                max_diff = elapsed_diff
-
-            # Track linear regression
-            sum_rtc += elapsed_rtc
-            sum_diff += elapsed_diff
-            sum_rtc_rtc += elapsed_rtc * elapsed_rtc
-            sum_rtc_diff += elapsed_rtc * elapsed_diff
-            sum_diff_diff += elapsed_diff * elapsed_diff
-
-            logging.info((
-                    'current rtc %d ktime %f error %f; elapsed rtc %d '
-                    'ktime %f: threshold %f diff %+f rolling %+f'),
-                    current_rtc, current_ktime, current_error, elapsed_rtc,
-                    elapsed_ktime, drift_threshold, elapsed_diff, rolling_diff)
-
-            if abs(elapsed_diff) > drift_threshold:
-                raise error.TestFail((
-                        'elapsed rtc %d and ktime %f diff %f '
-                        'is greater than threshold %f') %
-                        (elapsed_rtc, elapsed_ktime, elapsed_diff,
-                        drift_threshold))
-
-        # Dump final statistics
-        logging.info('max_diff %f', max_diff)
-        mean_rtc = sum_rtc / self.TEST_DURATION
-        mean_diff = sum_diff / self.TEST_DURATION
-        slope = ((sum_rtc_diff - sum_rtc * mean_diff) /
-                (sum_rtc_rtc - sum_rtc * mean_rtc))
-        logging.info('drift %.9f', slope)
-
-        utils.unload_module(self.MODULE_NAME)
diff --git a/client/site_tests/kernel_Lmbench/control b/client/site_tests/kernel_Lmbench/control
deleted file mode 100644
index 7d90e5b..0000000
--- a/client/site_tests/kernel_Lmbench/control
+++ /dev/null
@@ -1,13 +0,0 @@
-NAME = "kernel_Lmbench"
-AUTHOR = "Chrome OS Team"
-ATTRIBUTES = "suite:kernel_per-build_benchmarks"
-# Sept 2012: Full run takes 2m50s on Lumpy.
-TIME = "SHORT"
-TEST_CATEGORY = "Benchmark"
-TEST_CLASS = "kernel"
-TEST_TYPE = "client"
-DOC = """
-Run the lmbench benchmarks
-"""
-
-job.run_test('kernel_Lmbench')
diff --git a/client/site_tests/kernel_Lmbench/kernel_Lmbench.py b/client/site_tests/kernel_Lmbench/kernel_Lmbench.py
deleted file mode 100644
index 530dfd8..0000000
--- a/client/site_tests/kernel_Lmbench/kernel_Lmbench.py
+++ /dev/null
@@ -1,143 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-import logging
-from autotest_lib.client.bin import test, utils
-
-
-class kernel_Lmbench(test.test):
-    """Run some benchmarks from the lmbench3 suite.
-
-    lmbench is a series of micro benchmarks intended to measure basic operating
-    system and hardware system metrics.
-
-    For further details about lmbench refer to:
-    http://lmbench.sourceforge.net/man/lmbench.8.html
-
-    This test is copied from from client/tests to avoid depending on make and
-    perl. Here we can also tune the individual benchmarks to be more
-    deterministic using taskset, nice, etc.
-
-    Example benchmark runs and outputs on a Lumpy device:
-    ./lat_pagefault -N 100 -W 10000 /usr/local/zeros 2>&1
-    Pagefaults on /usr/local/zeros: 1.5215 microseconds
-
-    ./lat_syscall -N 100 -W 10000 null 2>&1
-    Simple syscall: 0.1052 microseconds
-
-    ./lat_syscall -N 100 -W 10000 read /usr/local/zeros 2>&1
-    Simple read: 0.2422 microseconds
-
-    ./lat_syscall -N 100 -W 10000 write /usr/local/zeros 2>&1
-    Simple write: 0.2036 microseconds
-
-    ./lat_proc -N 100 -W 10000 fork 2>&1
-    Process fork+exit: 250.9048 microseconds
-
-    ./lat_proc -N 100 -W 10000 exec 2>&1
-    Process fork+execve: 270.8000 microseconds
-
-    ./lat_mmap -N 100 -W 10000 128M /usr/local/zeros 2>&1
-    134.217728 1644
-
-    ./lat_mmap -P 2 -W 10000 128M /usr/local/zeros 2>&1
-    134.217728 2932
-
-    ./lat_pipe -N 100 -W 10000 2>&1
-    Pipe latency: 14.3242 microseconds
-
-    taskset 0x1 nice -20 ./lat_ctx -s 0 -W 10000  8 2>&1
-    "size=0k ovr=1.09
-    8 1.80
-    """
-
-    version = 1
-
-    def _run_benchmarks(self):
-        """Run the benchmarks.
-
-        For details and output format refer to individual benchmark man pages:
-        http://lmbench.sourceforge.net/man/
-
-        To improve determinism, we sometimes use taskset to pin to a CPU and
-        nice.
-        """
-
-        benchmarks = [
-            ('lat_pagefault',
-             'lat_pagefault -N %(N)d -W %(W)d %(fname)s 2>&1'),
-            ('lat_syscall_null',
-             'lat_syscall -N %(N)d -W %(W)d null 2>&1'),
-            ('lat_syscall_read',
-             'lat_syscall -N %(N)d -W %(W)d read %(fname)s 2>&1'),
-            ('lat_syscall_write',
-             'lat_syscall -N %(N)d -W %(W)d write %(fname)s 2>&1'),
-            ('lat_proc_fork',
-             'lat_proc -N %(N)d -W %(W)d fork 2>&1'),
-            ('lat_proc_exec',
-             'lat_proc -N %(N)d -W %(W)d exec 2>&1'),
-            ('lat_mmap',
-             ('lat_mmap -N %(N)d -W %(W)d '
-              '%(fsize)dM %(fname)s 2>&1')),
-            ('lat_mmap_P2',
-             'lat_mmap -P 2 -W %(W)d %(fsize)dM %(fname)s 2>&1'),
-            ('lat_pipe',
-             'lat_pipe -N %(N)d -W %(W)d 2>&1'),
-            ('lat_ctx_s0',
-             ('taskset 0x1 nice -20 '
-              'lat_ctx -s 0 -W %(W)d  %(procs)d 2>&1'))
-        ]
-
-        keyvals = {}
-
-        # Create a file with <fsize> MB of zeros in /usr/local
-        cmd = 'dd if=/dev/zero of=%(fname)s bs=1M count=%(fsize)d'
-        cmd = cmd % self.lmparams
-        utils.system(cmd)
-
-        for (bm, cmd) in benchmarks:
-            cmd = cmd % self.lmparams
-            logging.info('Running: %s, cmd: %s', bm, cmd)
-            out = utils.system_output(cmd)
-            logging.info('Output: %s', out)
-
-            # See class doc string for output examples
-            lst = out.split()
-            idx = -2
-            if '_mmap' in bm or '_ctx' in bm:
-                idx = -1
-            useconds = float(lst[idx])
-            keyvals['us_' + bm] = useconds
-
-        self.lmkeyvals.update(keyvals)
-
-
-    def initialize(self):
-        self.job.require_gcc()
-        self.lmkeyvals = {}
-
-        # Common parameters for the benchmarks. More details here:
-        # http://lmbench.sourceforge.net/man/lmbench.8.html
-        # N - number of repetitions
-        # P - parallelism
-        # W - warmup time in microseconds
-        # fname - file to operate on
-        # fsize - size of the above file in MB
-        # procs - number of processes for context switch benchmark - lat_ctx
-        self.lmparams = {
-            'N':100,
-            'P':2,
-            'fname':'/usr/local/zeros',
-            'fsize':128,
-            'W':10000,
-            'procs':8}
-
-        # Write out the params as kevals now to keep them even if test fails
-        param_kvals = [('param_%s' % p,v) for (p,v) in self.lmparams.items()]
-        self.write_perf_keyval(dict(param_kvals))
-
-    def run_once(self):
-        self._run_benchmarks()
-        self.write_perf_keyval(self.lmkeyvals)
diff --git a/client/site_tests/kernel_SchedBandwith/control b/client/site_tests/kernel_SchedBandwith/control
deleted file mode 100644
index 4149a5b..0000000
--- a/client/site_tests/kernel_SchedBandwith/control
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "The Chromium OS Authors"
-DOC = """
-Runs a cpu-hogging process in the Chrome renderer background cgroup, which is
-CPU bandwidth limited and verifies that 90% of the time slices were throttled.
-"""
-NAME = "kernel_SchedBandwith"
-PURPOSE = """
-Checks that /sys/fs/cgroup/cpu/chrome_renderers/background/cpu.cfs_quota_us
-is enforced.
-"""
-CRITERIA = """
-Fails if CONFIG_CFS_BANDWIDTH components do not exist or behave incorrectly.
-"""
-ATTRIBUTES = "suite:hwqual"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "kernel"
-TEST_TYPE = "client"
-
-job.run_test('kernel_SchedBandwith')
diff --git a/client/site_tests/kernel_SchedBandwith/kernel_SchedBandwith.py b/client/site_tests/kernel_SchedBandwith/kernel_SchedBandwith.py
deleted file mode 100644
index ab3a442..0000000
--- a/client/site_tests/kernel_SchedBandwith/kernel_SchedBandwith.py
+++ /dev/null
@@ -1,254 +0,0 @@
-#!/usr/bin/python2
-#
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import subprocess
-import time
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-class kernel_SchedBandwith(test.test):
-    """Test kernel CFS_BANDWIDTH scheduler mechanism (/sys/fs/cgroup/...)"""
-    version = 1
-    # A 30 second (default) run should result in most of the time slices being
-    # throttled.  Set a conservative lower bound based on having an unknown
-    # system load.  Alex commonly yields numbers in the range 311..315, which
-    # includes test overhead and signal latency.
-    _MIN_SECS = 30
-
-    _CG_DIR = "/sys/fs/cgroup/cpu"
-    _CG_CRB_DIR = os.path.join(_CG_DIR, "chrome_renderers", "background")
-
-    def _parse_cpu_stats(self):
-        """Parse and return CFS bandwidth statistics.
-
-        From kernel/Documentation/scheduler/sched-bwc.txt
-
-        cpu.stat:
-        - nr_periods: Number of enforcement intervals that have elapsed.
-        - nr_throttled: Number of times the group has been throttled/limited.
-        - throttled_time: The total time duration (in nanoseconds) for which entities
-          of the group have been throttled.
-
-        Returns: tuple with nr_periods, nr_throttled, throttled_time.
-        """
-        nr_periods = None
-        nr_throttled = None
-        throttled_time = None
-
-        fd = open(os.path.join(self._CG_CRB_DIR, "cpu.stat"))
-
-        for ln in fd.readlines():
-            logging.debug(ln)
-            (name, val) = ln.split()
-            logging.debug("name = %s val = %s", name, val)
-            if name == 'nr_periods':
-                nr_periods = int(val)
-            if name == 'nr_throttled':
-                nr_throttled = int(val)
-            if name == 'throttled_time':
-                throttled_time = int(val)
-
-        fd.close()
-        return nr_periods, nr_throttled, throttled_time
-
-    @staticmethod
-    def _parse_pid_stats(pid):
-        """Parse process id stats to determin CPU utilization.
-
-           from: https://www.kernel.org/doc/Documentation/scheduler/sched-stats.txt
-
-           /proc/<pid>/schedstat
-           ----------------
-           schedstats also adds a new /proc/<pid>/schedstat file to include some
-           of the same information on a per-process level.  There are three
-           fields in this file correlating for that process to:
-                1) time spent on the cpu
-                2) time spent waiting on a runqueue
-                3) # of timeslices run on this cpu
-
-        Args:
-            pid: integer, process id to gather stats for.
-
-        Returns:
-            tuple with total_msecs and idle_msecs
-        """
-        idle_slices = 0
-        total_slices = 0
-
-        fname = "/proc/sys/kernel/sched_cfs_bandwidth_slice_us"
-        timeslice_ms = int(utils.read_one_line(fname).strip()) / 1000.
-
-        with open(os.path.join('/proc', str(pid), 'schedstat')) as fd:
-            values = list(int(val) for val in fd.readline().strip().split())
-            running_slices = values[0] / timeslice_ms
-            idle_slices = values[1] / timeslice_ms
-            total_slices = running_slices + idle_slices
-        return (total_slices, idle_slices)
-
-
-    def _cg_start_task(self, in_cgroup=True):
-        """Start a CPU hogging task and add to cgroup.
-
-        Args:
-            in_cgroup: Boolean, if true add to cgroup otherwise just start.
-
-        Returns:
-            integer of pid of task started
-        """
-        null_fd = open("/dev/null", "w")
-        cmd = ['seq', '0', '0', '0']
-        task = subprocess.Popen(cmd, stdout=null_fd)
-        self._tasks.append(task)
-
-        if in_cgroup:
-            utils.write_one_line(os.path.join(self._CG_CRB_DIR, "tasks"),
-                                 task.pid)
-        return task.pid
-
-
-    def _cg_stop_tasks(self):
-        """Stop CPU hogging task."""
-        if hasattr(self, '_tasks') and self._tasks:
-            for task in self._tasks:
-                task.kill()
-        self._tasks = []
-
-
-    def _cg_set_quota(self, quota=-1):
-        """Set CPU quota that can be used for cgroup
-
-        Default of -1 will disable throttling
-        """
-        utils.write_one_line(os.path.join(self._CG_CRB_DIR, "cpu.cfs_quota_us"),
-                             quota)
-        rd_quota = utils.read_one_line(os.path.join(self._CG_CRB_DIR,
-                                                    "cpu.cfs_quota_us"))
-        if rd_quota != quota:
-            error.TestFail("Setting cpu quota to %d" % quota)
-
-
-    def _cg_total_shares(self):
-        if not hasattr(self, '_total_shares'):
-            self._total_shares = int(utils.read_one_line(
-                    os.path.join(self._CG_DIR, "cpu.shares")))
-        return self._total_shares
-
-
-    def _cg_set_shares(self, shares=None):
-        """Set CPU shares that can be used for cgroup
-
-        Default of None reads total shares for cpu group and assigns that so
-        there will be no throttling
-        """
-        if shares is None:
-            shares = self._cg_total_shares()
-        utils.write_one_line(os.path.join(self._CG_CRB_DIR, "cpu.shares"),
-                             shares)
-        rd_shares = utils.read_one_line(os.path.join(self._CG_CRB_DIR,
-                                                  "cpu.shares"))
-        if rd_shares != shares:
-            error.TestFail("Setting cpu shares to %d" % shares)
-
-
-    def _cg_disable_throttling(self):
-        self._cg_set_quota()
-        self._cg_set_shares()
-
-
-    def _cg_test_quota(self):
-        stats = []
-        period_us = int(utils.read_one_line(os.path.join(self._CG_CRB_DIR,
-                                                     "cpu.cfs_period_us")))
-
-        stats.append(self._parse_cpu_stats())
-
-        self._cg_start_task()
-        self._cg_set_quota(int(period_us * 0.1))
-        time.sleep(self._MIN_SECS)
-
-        stats.append(self._parse_cpu_stats())
-
-        self._cg_stop_tasks()
-        return stats
-
-
-    def _cg_test_shares(self):
-        stats = []
-
-        self._cg_set_shares(2)
-        pid = self._cg_start_task()
-        stats.append(self._parse_pid_stats(pid))
-
-        # load system heavily
-        for _ in xrange(utils.count_cpus() * 2 + 1):
-            self._cg_start_task(in_cgroup=False)
-
-        time.sleep(self._MIN_SECS)
-
-        stats.append(self._parse_pid_stats(pid))
-
-        self._cg_stop_tasks()
-        return stats
-
-
-    @staticmethod
-    def _check_stats(name, stats, percent):
-        total = stats[1][0] - stats[0][0]
-        idle = stats[1][1] - stats[0][1]
-        logging.info("%s total:%d idle:%d",
-                     name, total, idle)
-
-        # make sure we idled at least X% of the slices
-        min_idle = int(percent * total)
-        if idle < min_idle:
-            logging.error("%s idle count %d < %d ", name, idle,
-                          min_idle)
-            return 1
-        return 0
-
-
-    def setup(self):
-        super(kernel_SchedBandwith, self).setup()
-        self._tasks = []
-        self._quota = None
-        self._shares = None
-
-
-    def run_once(self, test_quota=True, test_shares=True):
-        errors = 0
-        if not os.path.exists(self._CG_CRB_DIR):
-            raise error.TestError("Locating cgroup dir %s" % self._CG_CRB_DIR)
-
-        self._quota = utils.read_one_line(os.path.join(self._CG_CRB_DIR,
-                                                       "cpu.cfs_quota_us"))
-        self._shares = utils.read_one_line(os.path.join(self._CG_CRB_DIR,
-                                                        "cpu.shares"))
-        if test_quota:
-            self._cg_disable_throttling()
-            quota_stats = self._cg_test_quota()
-            errors += self._check_stats('quota', quota_stats, 0.9)
-
-        if test_shares:
-            self._cg_disable_throttling()
-            shares_stats = self._cg_test_shares()
-            errors += self._check_stats('shares', shares_stats, 0.6)
-
-        if errors:
-            error.TestFail("Cgroup bandwidth throttling not working")
-
-
-    def cleanup(self):
-        super(kernel_SchedBandwith, self).cleanup()
-        self._cg_stop_tasks()
-
-        if hasattr(self, '_quota') and self._quota is not None:
-            self._cg_set_quota(self._quota)
-
-        if hasattr(self, '_shares') and self._shares is not None:
-            self._cg_set_shares(self._shares)
diff --git a/client/site_tests/kernel_TPMStress/control b/client/site_tests/kernel_TPMStress/control
index c1ea4f8..01bc5b2 100644
--- a/client/site_tests/kernel_TPMStress/control
+++ b/client/site_tests/kernel_TPMStress/control
@@ -11,6 +11,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "kernel"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
     Sends commands to the TPM in a tight loop for some length of time, in
diff --git a/client/site_tests/kernel_TPMStress/kernel_TPMStress.py b/client/site_tests/kernel_TPMStress/kernel_TPMStress.py
index 6526035..7fa7559 100644
--- a/client/site_tests/kernel_TPMStress/kernel_TPMStress.py
+++ b/client/site_tests/kernel_TPMStress/kernel_TPMStress.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/logging_AsanCrash/asan_crash_ext/background.js b/client/site_tests/logging_AsanCrash/asan_crash_ext/background.js
deleted file mode 100644
index a7c19b3..0000000
--- a/client/site_tests/logging_AsanCrash/asan_crash_ext/background.js
+++ /dev/null
@@ -1,4 +0,0 @@
-// Copyright (c) 2013 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
diff --git a/client/site_tests/logging_AsanCrash/asan_crash_ext/manifest.json b/client/site_tests/logging_AsanCrash/asan_crash_ext/manifest.json
deleted file mode 100644
index 1eb454e..0000000
--- a/client/site_tests/logging_AsanCrash/asan_crash_ext/manifest.json
+++ /dev/null
@@ -1,13 +0,0 @@
-{
-  "key": "MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDuUZGKCDbff6IRaxa4Pue7PPkxwPaNhGT3JEqppEsNWFjM80imEdqMbf3lrWqEfaHgaNku7nlpwPO1mu3/4Hr+XdNa5MhfnOnuPee4hyTLwOs3Vzz81wpbdzUxZSi2OmqMyI5oTaBYICfNHLwcuc65N5dbt6WKGeKgTpp4v7j7zwIDAQAB",
-  "description": "Telemetry ChromeOS Autotest component extension",
-  "name": "Telemetry ChromeOS AutoTest Component Extension",
-  "background": {
-    "scripts": ["background.js"]
-  },
-  "manifest_version": 2,
-  "version": "0.1",
-  "permissions" : [
-    "autotestPrivate"
-  ]
-}
diff --git a/client/site_tests/logging_AsanCrash/control b/client/site_tests/logging_AsanCrash/control
deleted file mode 100644
index 24c1273..0000000
--- a/client/site_tests/logging_AsanCrash/control
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "dpolukhin, fdeng, yunlian"
-NAME = "logging_AsanCrash"
-PURPOSE = "Verify Address Sanitizer does catch memory errors."
-CRITERIA = "Fails if no message appears in a log after simulating memory bug."
-# Normally all suite:smoke tests running in the VM should also run on hardware
-# in bvt-inline. This test is an exception, as ASAN is VM specific.
-ATTRIBUTES = "suite:smoke"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "logging"
-TEST_TYPE = "client"
-
-DOC = """
-This is a positive test for Address Sanitizer.
-Test that memory error causes Chrome to abort with descriptive message in a log.
-"""
-
-job.run_test('logging_AsanCrash')
diff --git a/client/site_tests/logging_AsanCrash/logging_AsanCrash.py b/client/site_tests/logging_AsanCrash/logging_AsanCrash.py
deleted file mode 100644
index af1ede6..0000000
--- a/client/site_tests/logging_AsanCrash/logging_AsanCrash.py
+++ /dev/null
@@ -1,77 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.cros import cros_logging
-
-
-class logging_AsanCrash(test.test):
-    """Verify Address Sanitizer does catch memory errors."""
-
-    version = 1
-
-
-    def run_once(self):
-        if not 'asan' in utils.read_file('/etc/ui_use_flags.txt'):
-            raise error.TestFail('Current image not built with ASAN')
-
-        extension_path = os.path.join(os.path.dirname(__file__),
-                                      'asan_crash_ext')
-
-        with chrome.Chrome(extension_paths=[extension_path]) as cr:
-            pid = utils.get_oldest_pid_by_name('chrome')
-            asan_log_name = '/var/log/chrome/asan_log.%d' % pid
-            logging.info('Browser PID under telemetry control is %d. '
-                         'So ASAN log is expected at %s.', pid, asan_log_name)
-
-            logging.info('Initiate simulating memory bug to be caught by ASAN.')
-            extension = cr.get_extension(extension_path)
-            if not extension:
-                raise error.TestFail('Failed to find extension %s'
-                                     % extension_path)
-
-            # Catch the exception raised when the browser crashes.
-            cr.did_browser_crash(lambda: extension.ExecuteJavaScript(
-                    'chrome.autotestPrivate.simulateAsanMemoryBug();'))
-
-            utils.poll_for_condition(
-                    lambda: os.path.isfile(asan_log_name),
-                    timeout=10,
-                    exception=error.TestFail(
-                            'Found no asan log file %s during 10s'
-                            % asan_log_name))
-            ui_log = cros_logging.LogReader(asan_log_name)
-            ui_log.read_all_logs()
-
-            # We must wait some time until memory bug is simulated (happens
-            # immediately after the return on the call) and caught by ASAN.
-            try:
-                utils.poll_for_condition(
-                        lambda: ui_log.can_find('ERROR: AddressSanitizer'),
-                        timeout=10,
-                        exception=error.TestFail(
-                                'Found no asan log message about '
-                                'Address Sanitizer catch'))
-                # An example error string is like this
-                # 'testarray:228' <== Memory access at offset 52 overflows
-                # this variable
-                utils.poll_for_condition(
-                        lambda: ui_log.can_find("'testarray"),
-                        timeout=10,
-                        exception=error.TestFail(
-                                'ASAN caught bug but did not mention '
-                                'the cause in the log'))
-
-            except:
-                logging.debug('ASAN log content: ' + ui_log.get_logs())
-                raise
-
-            # The cbuildbot logic will look for asan logs and process them.
-            # Remove the simulated log file to avoid that.
-            os.remove(asan_log_name)
diff --git a/client/site_tests/logging_CrashSender/OWNERS b/client/site_tests/logging_CrashSender/OWNERS
deleted file mode 100644
index fb67bf6..0000000
--- a/client/site_tests/logging_CrashSender/OWNERS
+++ /dev/null
@@ -1 +0,0 @@
-include chromiumos/platform2:/crash-reporter/OWNERS
diff --git a/client/site_tests/logging_CrashSender/control b/client/site_tests/logging_CrashSender/control
deleted file mode 100644
index 448b762..0000000
--- a/client/site_tests/logging_CrashSender/control
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "logging_CrashSender"
-PURPOSE = "Verify crash sender works."
-CRITERIA = """
-Fails if any of the following conditions occur:
-  - crash log messages are incorrect
-  - crash is sent without user consent
-"""
-ATTRIBUTES = "suite:bvt-inline, suite:partners, suite:smoke"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "logging"
-TEST_TYPE = "client"
-JOB_RETRIES = 2
-
-DOC = """
-Test that crash sender works correctly.
-"""
-
-job.run_test('logging_CrashSender')
diff --git a/client/site_tests/logging_CrashSender/logging_CrashSender.py b/client/site_tests/logging_CrashSender/logging_CrashSender.py
deleted file mode 100644
index a577c40..0000000
--- a/client/site_tests/logging_CrashSender/logging_CrashSender.py
+++ /dev/null
@@ -1,97 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os, re
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.crash import crash_test
-
-
-_SECONDS_SEND_SPREAD = 3600
-
-class logging_CrashSender(crash_test.CrashTest):
-    """
-      End-to-end test of crash_sender.
-    """
-    version = 1
-
-
-    def _check_hardware_info(self, result):
-        # Get board name
-        lsb_release = utils.read_file('/etc/lsb-release')
-        board_match = re.search(r'CHROMEOS_RELEASE_BOARD=(.*)', lsb_release)
-        if not ('Board: %s' % board_match.group(1)) in result['output']:
-            raise error.TestFail('Missing board name %s in output' %
-                                 board_match.group(1))
-        # Get hwid
-        with os.popen("crossystem hwid 2>/dev/null", "r") as hwid_proc:
-            hwclass = hwid_proc.read()
-        if not hwclass:
-            hwclass = 'undefined'
-        if not ('HWClass: %s' % hwclass) in result['output']:
-            raise error.TestFail('Expected hwclass %s in output' % hwclass)
-
-
-    def _check_send_result(self, result, report_kind, payload_name,
-                           exec_name):
-        if result['report_exists']:
-            raise error.TestFail('Test report was not deleted after sending')
-        if result['rate_count'] != 1:
-            raise error.TestFail('Rate limit was not updated properly: #%d' %
-                                 result['rate_count'])
-        if not result['send_attempt']:
-            raise error.TestFail('Sender did not attempt the send')
-        if not result['send_success']:
-            raise error.TestFail('Send did not complete successfully')
-        if (result['sleep_time'] < 0 or
-            result['sleep_time'] >= _SECONDS_SEND_SPREAD):
-            raise error.TestFail('Sender did not sleep for an appropriate '
-                                 'amount of time: #%d' % result['sleep_time'])
-        if result['report_kind'] != report_kind:
-            raise error.TestFail('Incorrect report kind "%s", expected "%s"',
-                                 result['report_kind'], report_kind)
-        desired_payload = self.get_crash_dir_name(payload_name)
-        if result['report_payload'] != desired_payload:
-            raise error.TestFail('Payload filename was incorrect, got "%s", '
-                                 'expected "%s"', result['report_payload'],
-                                 desired_payload)
-        if result['exec_name'] != exec_name:
-            raise error.TestFail('ExecName was incorrect, expected "%s", '
-                                 'got "%s"', exec_name, result['exec_name'])
-
-
-    def _test_sender_simple_minidump(self):
-        """Test sending a single minidump crash report."""
-        result = self._call_sender_one_crash()
-        self._check_send_result(result, 'minidump',
-                                '%s.dmp' % self._FAKE_TEST_BASENAME, 'fake')
-        if (not 'Version: my_ver' in result['output']):
-            raise error.TestFail('Simple minidump send failed')
-        self._check_hardware_info(result)
-        # Also test "Image type" field.  Note that it will not be "dev" even
-        # on a dev build because crash-test-in-progress will exist.
-        if result['image_type']:
-            raise error.TestFail('Image type "%s" should not exist' %
-                                 result['image_type'])
-        # Also test "Boot mode" field.  Note that it will not be "dev" even
-        # when booting in dev mode because crash-test-in-progress will exist.
-        if result['boot_mode']:
-            raise error.TestFail('Boot mode "%s" should not exist' %
-                                 result['boot_mode'])
-
-
-    def _test_sender_reports_disabled(self):
-        """Test that when reporting is disabled, we don't send."""
-        result = self._call_sender_one_crash(reports_enabled=False)
-        if (result['report_exists'] or
-            not 'Crash reporting is disabled' in result['output'] or
-            result['send_attempt']):
-            raise error.TestFail('Sender did not handle reports disabled')
-
-
-    def run_once(self):
-        """ Run all tests once """
-        self.run_crash_tests([
-            'sender_simple_minidump',
-            'sender_reports_disabled']);
diff --git a/client/site_tests/logging_CrashServices/control b/client/site_tests/logging_CrashServices/control
index a6b44b2..89458bc 100644
--- a/client/site_tests/logging_CrashServices/control
+++ b/client/site_tests/logging_CrashServices/control
@@ -12,9 +12,10 @@
 JOB_RETRIES = 0
 ATTRIBUTES = "suite:bvt-perbuild, suite:jailed_build, suite:partners"
 DEPENDENCIES='cleanup-reboot'
+PY_VERSION = 3
 
 DOC = """
-Crash all valid Chrome OS processes from an allowlist, and confirm
+Crash all valid ChromeOS processes from an allowlist, and confirm
 that *.dmp, *.meta, and *.core files are created.
 """
 
diff --git a/client/site_tests/logging_CrashServices/logging_CrashServices.py b/client/site_tests/logging_CrashServices/logging_CrashServices.py
index 37af6ed..2396da6 100644
--- a/client/site_tests/logging_CrashServices/logging_CrashServices.py
+++ b/client/site_tests/logging_CrashServices/logging_CrashServices.py
@@ -8,6 +8,8 @@
 from autotest_lib.client.common_lib.cros import chrome
 from autotest_lib.client.cros.crash.crash_test import CrashTest
 
+
+# TODO(b/185707445): port this test to Tast.
 class logging_CrashServices(test.test):
     """Verifies crash collection for system services."""
     version = 3
@@ -55,7 +57,7 @@
         """Checks the creation of the the dump files with appropriate extensions.
            Also check for the file size of the dumps created.
 
-        @param path: Dirctory path where the dump files are expected.
+        @param path: Directory path where the dump files are expected.
         @param process_name: Name of the process.
         @param filetype: Extension of the dump file.
 
@@ -70,11 +72,37 @@
         for entry in entries:
             (filename, ext) = os.path.splitext(entry)
             if ext == filetype and filename.startswith(process_name):
-                logging.info('the path is %s', os.path)
-                if os.path.getsize(path + '/' + entry) > 0 :
+                file_path = path + '/' + entry
+                logging.info('the path is %s', file_path)
+                if os.path.getsize(file_path) > 0:
                     return entry
         return None
 
+    def _remove_crash_file(self, path, process_name):
+        """Remove crash dumps to prevent unnecessary crash reporting.
+
+        @param path: Directory path where the dump files are expected.
+        @param process_name: Name of the process.
+
+        """
+        try:
+            # sort by name so that we can find latest crash first
+            entries = sorted(os.listdir(path), reverse=True)
+        except OSError:
+            return
+
+        crash_name = None
+        for entry in entries:
+            (filename, _) = os.path.splitext(entry)
+            if filename.startswith(process_name):
+                crash_name = filename
+                break
+        if crash_name is None:
+            return
+
+        for entry in entries:
+            if entry.startswith(crash_name):
+                os.remove(path + '/' + entry)
 
     def _test_process(self, process_path, crash_extensions):
         """Calls a function to kill the process and then wait
@@ -96,6 +124,11 @@
                                                          crash_ext),
                 desc="Waiting for %s for %s" % (crash_ext, process_path))
 
+        self._remove_crash_file(CrashTest._SYSTEM_CRASH_DIR, process_name)
+        # tlsdated generates two groups of crash files for some unknown reason,
+        # so we need to remove both of them.
+        if process_name == "tlsdated":
+            self._remove_crash_file(CrashTest._SYSTEM_CRASH_DIR, process_name)
 
     def run_once(self, process_path=None, crash_extensions=None):
         if process_path:
diff --git a/client/site_tests/logging_FeedbackReport/control b/client/site_tests/logging_FeedbackReport/control
index 423dae1..e80d19c 100644
--- a/client/site_tests/logging_FeedbackReport/control
+++ b/client/site_tests/logging_FeedbackReport/control
@@ -9,10 +9,11 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "logging"
 TEST_TYPE = "client"
-ATTRIBUTES = "suite:bvt-perbuild, suite:usb_detect"
+ATTRIBUTES = "suite:bvt-perbuild"
+PY_VERSION = 3
 
 DOC = """
 Open feedback report and verify UI is correct
 """
 
-job.run_test("logging_FeedbackReport")
\ No newline at end of file
+job.run_test("logging_FeedbackReport")
diff --git a/client/site_tests/logging_FeedbackReport/logging_FeedbackReport.py b/client/site_tests/logging_FeedbackReport/logging_FeedbackReport.py
index 6513e3c..76fd338 100644
--- a/client/site_tests/logging_FeedbackReport/logging_FeedbackReport.py
+++ b/client/site_tests/logging_FeedbackReport/logging_FeedbackReport.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -15,7 +16,6 @@
 class logging_FeedbackReport(test.test):
     """Tests if feedback report can be opened with no crashes in browser."""
     version = 1
-    _FEEDBACK_ID = 'gfdkimpbcpahaombhbimeihdjnejgicl'
     _FEEDBACK_STATE_TIMEOUT = 40
     _WAIT = 2
     _FEEDBACK_SENT_URL = 'support.google.com/chromebook/answer/3142217'
@@ -33,61 +33,6 @@
         self._player.blocking_playback_of_default_file(
             input_type='keyboard', filename='keyboard_alt+shift+i')
 
-    def _check_feedback_elements(self):
-        """
-        Return whether feedback app is open or not.
-
-        @returns: True if all elements are present, else False.
-
-        """
-        # Verifying feedback app window is open.
-        if not self.feedback_app.EvaluateJavaScript('document.body != null'):
-            logging.info('Window not enabled.')
-            return False
-
-        # Verifying UI elements in window are enabled.
-        elements = ['cancel-button', 'send-report-button',
-                    'description-text']
-        for element in elements:
-            js = "document.getElementById('%s') != null" % element
-            if not self.feedback_app.EvaluateJavaScript(js):
-                logging.info("%s not enabled.", element)
-                return False
-
-        return True
-
-    def _check_feedback_extension_loaded(self):
-        """
-        Return whether feedback extension has loaded.
-
-        @returns: True if extension loaded, else False.
-
-        """
-
-        for extension in self.cr_exts.GetByExtensionId(self._FEEDBACK_ID):
-            url = extension.EvaluateJavaScript('location.href;')
-            if url.endswith('default.html'):
-                self.feedback_app = extension
-                return True
-        return False
-
-    def _confirm_feedback_state(self):
-        """
-        Fail test if feedback elements have not been found.
-
-        @raises: error.TestFail if feedback app not found.
-
-        """
-        utils.poll_for_condition(
-                lambda: self._check_feedback_extension_loaded(),
-                exception=error.TestError("Incorrect feedback id list."),
-                timeout=self._FEEDBACK_STATE_TIMEOUT)
-
-        utils.poll_for_condition(
-                lambda: self._check_feedback_elements(),
-                exception=error.TestFail('Feedback elements not enabled.'),
-                timeout=self._FEEDBACK_STATE_TIMEOUT)
-
     def _enter_feedback_text(self):
         """Enter Feedback message in the Text field"""
         time.sleep(self._WAIT)
@@ -106,6 +51,7 @@
 
     def _submit_feedback(self):
         """Click on Send button to submit Feedback Report using keyboard input"""
+        time.sleep(self._WAIT)
         self._enter_feedback_text()
         self._press_shift_tab()
         self._press_enter()
@@ -133,9 +79,6 @@
             # Open and confirm feedback app is working.
             time.sleep(self._WAIT)
             self._open_feedback()
-            self.cr_exts = self.cr.browser.extensions
-            self.feedback_app = None
-            self._confirm_feedback_state()
             self._submit_feedback()
 
             start_time = time.time()
diff --git a/client/site_tests/logging_UserCrash/control b/client/site_tests/logging_UserCrash/control
index 98b94cf..d9dbf48 100644
--- a/client/site_tests/logging_UserCrash/control
+++ b/client/site_tests/logging_UserCrash/control
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "logging_UserCrash"
 PURPOSE = "Verify user space crashes are handled appropriately."
 CRITERIA = """
@@ -14,7 +14,7 @@
   - core pattern is not recognized as as 'core'
   - crash is sent without user consent
 """
-ATTRIBUTES = "suite:bvt-cq, suite:smoke"
+ATTRIBUTES = "suite:bvt-cq, suite:smoke, suite:satlab-qual-bvt-cq"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "logging"
@@ -23,6 +23,7 @@
 # at the end of the test, we should reboot to restore the state of the
 # device.
 DEPENDENCIES='cleanup-reboot'
+PY_VERSION = 3
 
 DOC = """
 Test that user space crashes are detected and handled appropriately.
diff --git a/client/site_tests/logging_UserCrash/logging_UserCrash.py b/client/site_tests/logging_UserCrash/logging_UserCrash.py
index 59446ef..37c8abb 100644
--- a/client/site_tests/logging_UserCrash/logging_UserCrash.py
+++ b/client/site_tests/logging_UserCrash/logging_UserCrash.py
@@ -2,15 +2,12 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import logging, os, time
 from autotest_lib.client.bin import utils
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.cros import cros_ui, upstart
 from autotest_lib.client.cros.crash import user_crash_test
 
 
-_COLLECTION_ERROR_SIGNATURE = 'crash_reporter-user-collection'
-_MAX_CRASH_DIRECTORY_SIZE = 32
 _CRASH_REPORTER_ENABLED_PATH = '/var/lib/crash_reporter/crash-handling-enabled'
 
 
@@ -26,10 +23,10 @@
         return uptime_seconds
 
 
-    # This test has a critical tast counterpart, but the tast version only
-    # performs one of the two functions that this test does. In particular,
-    # the tast variant does not verify that crash reporter state is valid before
-    # any tests run and re-initialize crash reporter.
+    # This test has a tast counterpart, but the tast version only performs a
+    # slightly different function. Specifically, the tast variant does not
+    # verify that crash reporter state is valid before any tests run and
+    # re-initialize crash reporter.
     # TODO(https://crbug.com/1085194): Write a tast test to verify that crash
     # reporter's state is good on a "clean" system.
     def _test_reporter_startup(self):
@@ -43,26 +40,10 @@
             raise error.TestFail('core pattern should have been %s, not %s' %
                                  (expected_core_pattern, output))
 
-        # Check that we wrote out the file indicating that crash_reporter is
-        # enabled AFTER the system was booted. This replaces the old technique
-        # of looking for the log message which was flakey when the logs got
-        # flooded.
-        # NOTE: This technique doesn't need to be highly accurate, we are only
-        # verifying that the flag was written after boot and there are multiple
-        # seconds between those steps, and a file from a prior boot will almost
-        # always have been written out much further back in time than our
-        # current boot time.
-        if not os.path.isfile(_CRASH_REPORTER_ENABLED_PATH):
-            raise error.TestFail(
-                'crash reporter enabled file flag is not present at %s' %
-                _CRASH_REPORTER_ENABLED_PATH)
-        flag_time = time.time() - os.path.getmtime(_CRASH_REPORTER_ENABLED_PATH)
-        uptime = self._get_uptime()
-        if (flag_time > uptime):
-            raise error.TestFail(
-                'user space crash handling was not started during last boot')
 
-
+    # This test has a critical tast counterpart, but we leave it here because
+    # it verifies that the in_progress_integration_test variable will be set in
+    # autotests.
     def _test_chronos_crasher(self):
         """Test a user space crash when running as chronos is handled."""
         self._check_crashing_process(
@@ -71,66 +52,6 @@
                 'logging_UserCrash')
 
 
-    def _test_chronos_crasher_no_consent(self):
-        """Test that without consent no files are stored."""
-        results = self._check_crashing_process('chronos', consent=False)
-
-
-    def _test_root_crasher(self):
-        """Test a user space crash when running as root is handled."""
-        self._check_crashing_process('root')
-
-
-    def _test_root_crasher_no_consent(self):
-        """Test that without consent no files are stored."""
-        results = self._check_crashing_process('root', consent=False)
-
-
-    def _test_max_enqueued_crashes(self):
-        """Test that _MAX_CRASH_DIRECTORY_SIZE is enforced."""
-        self._log_reader.set_start_by_current()
-        username = 'root'
-
-        crash_dir = self._get_crash_dir(username)
-        full_message = ('Crash directory %s already full with %d pending '
-                        'reports' % (crash_dir, _MAX_CRASH_DIRECTORY_SIZE))
-
-        # Fill up the queue.
-        for i in range(0, _MAX_CRASH_DIRECTORY_SIZE):
-            result = self._run_crasher_process(username)
-            if not result['crashed']:
-                raise error.TestFail('failure while setting up queue: %d' %
-                                     result['returncode'])
-            if self._log_reader.can_find(full_message):
-                raise error.TestFail('unexpected full message: ' +
-                                     full_message)
-
-        crash_dir_size = len(os.listdir(crash_dir))
-        # For debugging
-        utils.system('ls -l %s' % crash_dir)
-        logging.info('Crash directory had %d entries', crash_dir_size)
-
-        # Crash a bunch more times, but make sure no new reports
-        # are enqueued.
-        for i in range(0, 10):
-            self._log_reader.set_start_by_current()
-            result = self._run_crasher_process(username)
-            logging.info('New log messages: %s', self._log_reader.get_logs())
-            if not result['crashed']:
-                raise error.TestFail('failure after setting up queue: %d' %
-                                     result['returncode'])
-            utils.poll_for_condition(
-                    lambda: self._log_reader.can_find(full_message),
-                    timeout=20,
-                    exception=error.TestFail('expected full message: ' +
-                                             full_message))
-            if crash_dir_size != len(os.listdir(crash_dir)):
-                utils.system('ls -l %s' % crash_dir)
-                raise error.TestFail('expected no new files (now %d were %d)',
-                                     len(os.listdir(crash_dir)),
-                                     crash_dir_size)
-
-
     def initialize(self):
         user_crash_test.UserCrashTest.initialize(self)
 
@@ -140,9 +61,6 @@
             cros_ui.restart()
 
 
-    # TODO(kmixter): Test crashing a process as ntp or some other
-    # non-root, non-chronos user.
-
     def run_once(self):
         """ Run all tests once """
         self._prepare_crasher()
@@ -154,10 +72,5 @@
                               initialize_crash_reporter=False,
                               must_run_all=False)
 
-        self.run_crash_tests(['reporter_startup',
-                              'chronos_crasher',
-                              'chronos_crasher_no_consent',
-                              'root_crasher',
-                              'root_crasher_no_consent',
-                              'max_enqueued_crashes'],
-                              initialize_crash_reporter=True)
+        self.run_crash_tests(['reporter_startup', 'chronos_crasher'],
+                             initialize_crash_reporter=True)
diff --git a/client/site_tests/login_ChromeProfileSanitary/control b/client/site_tests/login_ChromeProfileSanitary/control
deleted file mode 100644
index 9cd6409..0000000
--- a/client/site_tests/login_ChromeProfileSanitary/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "cmasone, achuith"
-NAME = "login_ChromeProfileSanitary"
-ATTRIBUTES = "suite:regression"
-TIME = "FAST"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "login"
-TEST_TYPE = "client"
-
-DOC = """
-This test checks that a browser crash during a user session does not cause the
-browser to flip over to using the 'Default' profile, which is neither
-unprotected nor user-specific.
-
-This is a regression test for http://crbug.com/183736
-"""
-
-job.run_test('login_ChromeProfileSanitary')
diff --git a/client/site_tests/login_ChromeProfileSanitary/login_ChromeProfileSanitary.py b/client/site_tests/login_ChromeProfileSanitary/login_ChromeProfileSanitary.py
deleted file mode 100644
index 6a97fd3..0000000
--- a/client/site_tests/login_ChromeProfileSanitary/login_ChromeProfileSanitary.py
+++ /dev/null
@@ -1,86 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import errno, os, stat
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.cros import constants, httpd
-
-
-def _respond_with_cookies(handler, url_args):
-    """set_cookie response.
-
-    Responds with a Set-Cookie header to any GET request, and redirects to a
-    chosen URL.
-
-    @param handler: handler for set_cookie.
-    @param url_args: arguments passed through the url.
-
-    """
-    handler.send_response(303)
-    handler.send_header('Set-Cookie', 'name=value')
-    handler.send_header('Location', url_args['continue'][0])
-    handler.end_headers()
-    handler.wfile.write('Got form data:\n')
-    handler.wfile.write('%s:\n' % url_args)
-
-
-class login_ChromeProfileSanitary(test.test):
-    """Tests that the browser uses the correct profile after a crash."""
-    version = 1
-
-
-    def __get_cookies_mtime(self):
-        try:
-            cookies_info = os.stat(constants.LOGIN_PROFILE + '/Cookies')
-            return cookies_info[stat.ST_MTIME]
-        except OSError as e:
-            if e.errno == errno.ENOENT:
-                return None
-            raise
-
-
-    def initialize(self):
-        spec = 'http://localhost:8000'
-        path = '/set_cookie'
-        self._wait_path = '/test_over'
-        self._test_url = spec + path + '?continue=' + spec + self._wait_path
-        self._testServer = httpd.HTTPListener(8000, docroot=self.srcdir)
-        self._testServer.add_url_handler(path, _respond_with_cookies)
-        self._testServer.run()
-
-
-    def cleanup(self):
-        self._testServer.stop()
-
-
-    def run_once(self, timeout=10):
-        with chrome.Chrome() as cr:
-            # Get Default/Cookies mtime. None means no Cookies DB.
-            cookies_mtime = self.__get_cookies_mtime()
-
-            # Wait for chrome to show, then "crash" it.
-            utils.nuke_process_by_name(constants.BROWSER, with_prejudice=True)
-
-            cr.wait_for_browser_to_come_up()
-
-            latch = self._testServer.add_wait_url(self._wait_path)
-
-            # Navigate to site that leaves cookies.
-            cr.browser.tabs[0].Navigate(self._test_url)
-            latch.wait(timeout)
-            if not latch.is_set():
-                raise error.TestError('Never received callback from browser.')
-
-        # Ensure chrome writes state to disk.
-        with chrome.Chrome():
-            # Check mtime of Default/Cookies.  If changed, KABLOOEY.
-            new_cookies_mtime = self.__get_cookies_mtime()
-
-            if cookies_mtime != new_cookies_mtime:
-                if not cookies_mtime and new_cookies_mtime:
-                    raise error.TestFail('Cookies created in Default profile!')
-                raise error.TestFail('Cookies in Default profile changed!')
diff --git a/client/site_tests/login_CryptohomeDataLeak/control b/client/site_tests/login_CryptohomeDataLeak/control
deleted file mode 100644
index e40faec..0000000
--- a/client/site_tests/login_CryptohomeDataLeak/control
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "login_CryptohomeDataLeak"
-PURPOSE = "Verify decrypted user data is cleared once the session is over"
-CRITERIA = """
-This test will fail if user data is still visible after end of session.
-"""
-ATTRIBUTES = "suite:bvt-perbuild"
-TIME = "SHORT"
-TEST_CATEGORY = "Security"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-
-DOC = """
-This test checks whether a user data file created during a session is
-visible once the session is logged out.
-"""
-
-job.run_test('login_CryptohomeDataLeak')
diff --git a/client/site_tests/login_CryptohomeDataLeak/login_CryptohomeDataLeak.py b/client/site_tests/login_CryptohomeDataLeak/login_CryptohomeDataLeak.py
deleted file mode 100644
index f061551..0000000
--- a/client/site_tests/login_CryptohomeDataLeak/login_CryptohomeDataLeak.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# Copyright (c) 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import time
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.cros import cryptohome
-
-
-class login_CryptohomeDataLeak(test.test):
-    """Verify decrypted user data is cleared after end of session.
-    """
-    version = 1
-
-    _CHAPS_LOCK_DIR = '/run/lock/power_override'
-    _CHAPS_LOCK_PREFIX = 'chapsd_token_init_slot_'
-    _CHAPS_INIT_TIMEOUT = 30
-
-    def run_once(self):
-        """Entry point of test"""
-        username = ''
-        test_file = ''
-
-        with chrome.Chrome() as cr:
-            username = cr.username
-            if not cryptohome.is_permanent_vault_mounted(username):
-                raise error.TestError('Expected to find a mounted vault.')
-
-            test_file =  '/home/.shadow/%s/mount/hello' \
-                         % cryptohome.get_user_hash(username)
-
-            logging.info("Test file: %s", test_file)
-            open(test_file, 'w').close()
-
-            # Check until chaps lock file disappear.
-            for _ in xrange(self._CHAPS_INIT_TIMEOUT):
-                time.sleep(1)
-                has_lock = False
-                for lock in os.listdir(self._CHAPS_LOCK_DIR):
-                    if lock.startswith(self._CHAPS_LOCK_PREFIX):
-                        has_lock = True
-                        break
-                if not has_lock:
-                    break
-            else:
-                raise error.TestError(
-                        'Expected chaps finished all load events.')
-
-        if cryptohome.is_vault_mounted(user=username, allow_fail=True):
-            raise error.TestError('Expected to not find a mounted vault.')
-
-        # At this point, the session is not active and the file name is expected
-        # to be encrypted again.
-
-        if os.path.isfile(test_file):
-            raise error.TestFail('File still visible after end of session.')
-
-        cryptohome.remove_vault(username)
diff --git a/client/site_tests/login_CryptohomeIncognito/control b/client/site_tests/login_CryptohomeIncognito/control
index bf91473..7632a71 100644
--- a/client/site_tests/login_CryptohomeIncognito/control
+++ b/client/site_tests/login_CryptohomeIncognito/control
@@ -9,6 +9,7 @@
 TEST_CLASS = "login"
 TEST_TYPE = "client"
 JOB_RETRIES = 2
+PY_VERSION = 3
 
 DOC = """
 This test will fail if guest cryptohome is not mounted when logged in as guest,
diff --git a/client/site_tests/login_CryptohomeIncognito/login_CryptohomeIncognito.py b/client/site_tests/login_CryptohomeIncognito/login_CryptohomeIncognito.py
index 034eba9..b3925ac 100644
--- a/client/site_tests/login_CryptohomeIncognito/login_CryptohomeIncognito.py
+++ b/client/site_tests/login_CryptohomeIncognito/login_CryptohomeIncognito.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/login_CryptohomeOwnerQuery/control b/client/site_tests/login_CryptohomeOwnerQuery/control
index ebd09e5..424f7a7 100644
--- a/client/site_tests/login_CryptohomeOwnerQuery/control
+++ b/client/site_tests/login_CryptohomeOwnerQuery/control
@@ -9,6 +9,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "login"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test verifies that the owner user query in cryptohome reports the proper
diff --git a/client/site_tests/login_CryptohomeOwnerQuery/login_CryptohomeOwnerQuery.py b/client/site_tests/login_CryptohomeOwnerQuery/login_CryptohomeOwnerQuery.py
index 496aeb9..c46c522 100644
--- a/client/site_tests/login_CryptohomeOwnerQuery/login_CryptohomeOwnerQuery.py
+++ b/client/site_tests/login_CryptohomeOwnerQuery/login_CryptohomeOwnerQuery.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -5,8 +6,12 @@
 # Most of this code is based on login_GuestAndActualSession, which performs
 # similar ownership clearing/checking tasks.
 
-import gobject
 from dbus.mainloop.glib import DBusGMainLoop
+# AU tests use ToT client code, but ToT -3 client version.
+try:
+    from gi.repository import GObject
+except ImportError:
+    import gobject as GObject
 
 from autotest_lib.client.bin import test
 from autotest_lib.client.common_lib import error
@@ -26,7 +31,7 @@
         bus_loop = DBusGMainLoop(set_as_default=True)
         self._session_manager = session_manager.connect(bus_loop)
         self._listener = session_manager.OwnershipSignalListener(
-                gobject.MainLoop())
+                GObject.MainLoop())
         self._listener.listen_for_new_key_and_policy()
 
 
diff --git a/client/site_tests/login_GaiaLogin/control b/client/site_tests/login_GaiaLogin/control
index 1c42c32..9b0d9a6 100644
--- a/client/site_tests/login_GaiaLogin/control
+++ b/client/site_tests/login_GaiaLogin/control
@@ -9,13 +9,13 @@
 TEST_CATEGORY = "General"
 TEST_CLASS = "login"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test verifies that logging into production Gaia works correctly.
 
-It logs in using the telemetry gaia_login flag, and navigates to
-accounts.google.com to verify that we're logged in to gaia, as opposed
-to fake telemetry login.
+It logs in using the telemetry gaia_login flag, and navigates to google.com to
+verify that we're logged in to gaia, as opposed to fake telemetry login.
 """
 
 job.run_test('login_GaiaLogin',
diff --git a/client/site_tests/login_GaiaLogin/control.plt b/client/site_tests/login_GaiaLogin/control.plt
index 6b6c13a..12bf444 100644
--- a/client/site_tests/login_GaiaLogin/control.plt
+++ b/client/site_tests/login_GaiaLogin/control.plt
@@ -4,20 +4,20 @@
 
 from autotest_lib.client.common_lib.cros import power_load_util
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "login_GaiaLogin.plt"
 ATTRIBUTES = "suite:power_daily"
 TIME = "SHORT"
 TEST_CATEGORY = "General"
 TEST_CLASS = "login"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test verifies that logging into production Gaia works correctly.
 
-It logs in using the telemetry gaia_login flag, and navigates to
-accounts.google.com to verify that we're logged in to gaia, as opposed
-to fake telemetry login.
+It logs in using the telemetry gaia_login flag, and navigates to google.com to
+verify that we're logged in to gaia, as opposed to fake telemetry login.
 """
 
 username = power_load_util.get_username()
diff --git a/client/site_tests/login_GaiaLogin/login_GaiaLogin.py b/client/site_tests/login_GaiaLogin/login_GaiaLogin.py
index 282b3bb..4d2505f 100644
--- a/client/site_tests/login_GaiaLogin/login_GaiaLogin.py
+++ b/client/site_tests/login_GaiaLogin/login_GaiaLogin.py
@@ -1,8 +1,9 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-from autotest_lib.client.bin import test
+from autotest_lib.client.bin import test, utils
 from autotest_lib.client.cros import cryptohome
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib.cros import chrome
@@ -16,9 +17,9 @@
     def run_once(self, username, password):
         """Test body."""
         if not username:
-          raise error.TestFail('User not set.')
+            raise error.TestFail('User not set.')
         if not password:
-          raise error.TestFail('Password not set.')
+            raise error.TestFail('Password not set.')
 
         with chrome.Chrome(gaia_login=True,
                            username=username,
@@ -27,23 +28,26 @@
                     user=chrome.NormalizeEmail(username)):
                 raise error.TestFail('Expected to find a mounted vault for %s'
                                      % username)
+
             tab = cr.browser.tabs.New()
-            # TODO(achuith): Use a better signal of being logged in, instead of
-            # parsing accounts.google.com.
-            tab.Navigate('http://accounts.google.com')
+            tab.Navigate('https://google.com')
             tab.WaitForDocumentReadyStateToBeComplete()
-            res = tab.EvaluateJavaScript('''
-                    var res = '',
-                        divs = document.getElementsByTagName('div');
-                    for (var i = 0; i < divs.length; i++) {
-                        res = divs[i].textContent;
-                        if (res.search('%s') > 1) {
-                            break;
-                        }
-                    }
-                    res;
-            ''' % username)
-            if not res:
-                raise error.TestFail('No references to %s on accounts page.'
-                                     % username)
-            tab.Close()
+
+            def _userLoggedIn(tab, username):
+                # TODO(achuith): Use a better signal of being logged in, instead
+                # of parsing google.com.
+                res = tab.EvaluateJavaScript('''
+                      var res = '',
+                          divs = document.getElementsByTagName('div');
+                      for (var i = 0; i < divs.length; i++) {
+                          res = divs[i].textContent;
+                          if (res.search('%s') > 1) {
+                              break;
+                          }
+                      }
+                      res;
+              ''' % username)
+                return res
+
+            utils.poll_for_condition(lambda: _userLoggedIn(tab, username),
+                                     timeout=20)
diff --git a/client/site_tests/login_LoginPin/control b/client/site_tests/login_LoginPin/control
new file mode 100644
index 0000000..c78eb1f
--- /dev/null
+++ b/client/site_tests/login_LoginPin/control
@@ -0,0 +1,22 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "rsorokin"
+NAME = "login_LoginPin"
+ATTRIBUTES = "suite:bvt-perbuild"
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "login"
+TEST_TYPE = "client"
+PY_VERSION = 3
+
+DOC = """
+This test consists of two parts:
+ * Login as a new user and setup a pin for the user
+ * Login as an exising user with the pin
+By default both parts are executed consequentially. To skip any part one could
+use `setup_pin` and `login_pin` boolean parameters.
+"""
+
+job.run_test('login_LoginPin')
diff --git a/client/site_tests/login_LoginPin/login_LoginPin.py b/client/site_tests/login_LoginPin/login_LoginPin.py
new file mode 100644
index 0000000..1b31ed9
--- /dev/null
+++ b/client/site_tests/login_LoginPin/login_LoginPin.py
@@ -0,0 +1,86 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from dbus.mainloop.glib import DBusGMainLoop
+from gi.repository import GObject
+
+from autotest_lib.client.bin import test
+from autotest_lib.client.cros import cryptohome
+from autotest_lib.client.common_lib import error, lsbrelease_utils
+from autotest_lib.client.common_lib.cros import chrome, session_manager
+
+class login_LoginPin(test.test):
+    """Sets up a PIN for user and then logs in using the pin."""
+    version = 1
+
+    def run_once(self,
+                 username='autotest',
+                 password='password',
+                 pin='123456789',
+                 setup_pin=True,
+                 login_pin=True):
+        """Test body."""
+        if not cryptohome.is_low_entropy_credentials_supported():
+            if lsbrelease_utils.get_current_board() == 'hatch':
+                # Fail on a board where LEC must work so bisection could be run.
+                raise error.TestFail(
+                        'low entropy credentials must be ' +
+                        'supported on hatch, are the cryptohome utils wrong?')
+            raise error.TestNAError(
+                    'Skip test: No hardware support for PIN login')
+
+        username = chrome.NormalizeEmail(username)
+        if setup_pin:
+            with chrome.Chrome(username=username, password=password) as cr:
+                if not cryptohome.is_vault_mounted(username):
+                    raise error.TestFail(
+                            'Expected to find a mounted vault for %s' %
+                            username)
+
+                tab = cr.browser.tabs.New()
+                tab.Navigate('chrome://os-settings/osPrivacy/lockScreen')
+
+                tab.WaitForDocumentReadyStateToBeComplete()
+                setup_pin = '''
+                  const getAuthToken = new Promise((resolve, reject) => {
+                    chrome.quickUnlockPrivate.getAuthToken('%s', function(auth_token) { resolve(auth_token.token); })
+                  });
+                  function setModes(token) {
+                    return new Promise((resolve, reject) => {
+                      chrome.quickUnlockPrivate.setModes(token, [chrome.quickUnlockPrivate.QuickUnlockMode.PIN], ['%s'], resolve);
+                    })
+                  }
+                  function canAuthenticatePin() {
+                    return new Promise((resolve, reject) => {
+                      chrome.quickUnlockPrivate.canAuthenticatePin(resolve);
+                    })
+                  }
+
+                  getAuthToken.then(setModes).then(canAuthenticatePin);
+                  ''' % (password, pin)
+                pin_set = tab.EvaluateJavaScript(setup_pin, promise=True)
+                if not pin_set:
+                    raise error.TestFail('Failed to setup a pin')
+
+        if login_pin:
+            DBusGMainLoop(set_as_default=True)
+            listener = session_manager.SessionSignalListener(
+                    GObject.MainLoop())
+            listener.listen_for_session_state_change('started')
+            with chrome.Chrome(auto_login=False,
+                               clear_enterprise_policy=False,
+                               dont_override_profile=True,
+                               extra_browser_args=[
+                                       '--skip-force-online-signin-for-testing'
+                               ]) as cr:
+                oobe = cr.browser.oobe
+                oobe.WaitForJavaScriptCondition(
+                        "typeof Oobe == 'function' && "
+                        "typeof OobeAPI == 'object' && "
+                        "Oobe.readyForTesting",
+                        timeout=20)
+                oobe.ExecuteJavaScript("OobeAPI.loginWithPin('%s','%s')" %
+                                       (username, pin))
+                listener.wait_for_signals(desc='Session started.', timeout=20)
diff --git a/client/site_tests/login_LoginSuccess/control b/client/site_tests/login_LoginSuccess/control
index 5eb32e7..f168084 100644
--- a/client/site_tests/login_LoginSuccess/control
+++ b/client/site_tests/login_LoginSuccess/control
@@ -5,7 +5,7 @@
 AUTHOR = "cmasone, achuith"
 NAME = "login_LoginSuccess"
 ATTRIBUTES = (
-        "suite:bvt-inline, suite:push_to_prod, suite:skylab_staging_test,"
+        "suite:bvt-inline, suite:pvs-bvt-inline, suite:push_to_prod, suite:skylab_staging_test,"
         " suite:smoke, suite:dev_drone_image_test, suite:labqual"
 )
 TIME = "SHORT"
@@ -13,6 +13,7 @@
 TEST_CLASS = "login"
 TEST_TYPE = "client"
 JOB_RETRIES = 2
+PY_VERSION = 3
 
 DOC = """
 This test verifies that signing in via the UI (using Telemetry) works correctly.
diff --git a/client/site_tests/login_LoginSuccess/control.stress b/client/site_tests/login_LoginSuccess/control.stress
index 5b59eed..3250978 100644
--- a/client/site_tests/login_LoginSuccess/control.stress
+++ b/client/site_tests/login_LoginSuccess/control.stress
@@ -9,6 +9,7 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "login"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test stress tests and verifies that signing in via the UI (using Telemetry)
diff --git a/client/site_tests/login_LoginSuccess/login_LoginSuccess.py b/client/site_tests/login_LoginSuccess/login_LoginSuccess.py
index 071b0d3..50225cd 100644
--- a/client/site_tests/login_LoginSuccess/login_LoginSuccess.py
+++ b/client/site_tests/login_LoginSuccess/login_LoginSuccess.py
@@ -1,10 +1,15 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import gobject
 from dbus.mainloop.glib import DBusGMainLoop
 
+try:
+    from gi.repository import GObject
+except ImportError:
+    import gobject as GObject
+
 from autotest_lib.client.bin import test
 from autotest_lib.client.common_lib.cros import chrome, session_manager
 from autotest_lib.client.cros import asan
@@ -19,7 +24,7 @@
     # TODO(afakhry): Remove this timeout increase for asan bots once we figure
     # out why logging out is taking so long. See crbug.com/488291
     if asan.running_on_asan():
-      _SESSION_STOP_TIMEOUT *= 2
+        _SESSION_STOP_TIMEOUT *= 2
 
 
     def initialize(self):
@@ -28,24 +33,35 @@
         bus_loop = DBusGMainLoop(set_as_default=True)
         self._session_manager = session_manager.connect(bus_loop)
         self._listener = session_manager.SessionSignalListener(
-                gobject.MainLoop())
+                GObject.MainLoop())
 
 
-    def run_once(self, stress_run=False, arc_mode=None):
+    def run_once(self,
+                 stress_run=False,
+                 arc_mode=None,
+                 username=None,
+                 password=None,
+                 dont_override_profile=False):
         """
         Runs the test.
 
         @param stress_run: True if we are doing a stress run and want to
                            double the timeout.
+        @param username: Username to use instead of the default telemetry one.
+        @param password: Password to use instead of the default telemetry one.
         @param arc_mode: This value is passed to Chrome and determines how
                          the ARC/Android instance should start. Possible values
                          are defined in common_lib/cros/arc_common.py.
+        @dont_override_profile: Don't delete cryptohome before login.
 
         """
         if stress_run:
             self._SESSION_STOP_TIMEOUT *= 2
         self._listener.listen_for_session_state_change('started')
-        with chrome.Chrome(arc_mode=arc_mode):
+        with chrome.Chrome(arc_mode=arc_mode,
+                           username=username,
+                           password=password,
+                           dont_override_profile=dont_override_profile):
             self._listener.wait_for_signals(desc='Session started.',
                                             timeout=self._SESSION_START_TIMEOUT)
             # To enable use as a 'helper test'.
diff --git a/client/site_tests/login_MultipleSessions/control b/client/site_tests/login_MultipleSessions/control
index 957e17e..ac9a54e 100644
--- a/client/site_tests/login_MultipleSessions/control
+++ b/client/site_tests/login_MultipleSessions/control
@@ -9,6 +9,7 @@
 TEST_CLASS = "login"
 TEST_TYPE = "client"
 JOB_RETRIES = 2
+PY_VERSION = 3
 
 DOC = """
 This test is intended to ensure that the session manager can
diff --git a/client/site_tests/login_MultipleSessions/login_MultipleSessions.py b/client/site_tests/login_MultipleSessions/login_MultipleSessions.py
index df97885..41839b5 100644
--- a/client/site_tests/login_MultipleSessions/login_MultipleSessions.py
+++ b/client/site_tests/login_MultipleSessions/login_MultipleSessions.py
@@ -1,9 +1,14 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import gobject
 from dbus.mainloop.glib import DBusGMainLoop
+# AU tests use ToT client code, but ToT -3 client version.
+try:
+    from gi.repository import GObject
+except ImportError:
+    import gobject as GObject
 
 from autotest_lib.client.bin import test
 from autotest_lib.client.common_lib import error
@@ -27,12 +32,9 @@
         self._bus_loop = DBusGMainLoop(set_as_default=True)
         self._session_manager = session_manager.connect(self._bus_loop)
         self._listener = session_manager.OwnershipSignalListener(
-                gobject.MainLoop())
+                GObject.MainLoop())
         self._listener.listen_for_new_key_and_policy()
 
-        self._cryptohome_proxy = cryptohome.CryptohomeProxy(
-            self._bus_loop, self.autodir, self.job)
-
 
     def run_once(self):
         expected_owner = 'first_user@nowhere.com'
@@ -64,7 +66,7 @@
 
 
     def __start_session_for(self, user):
-        """Call StartSession() for user, ensure he has clean on-device state
+        """Call StartSession() for user, ensure the user has clean on-device state
 
         Make a fresh cryptohome for user, and then start a session for him
         with the session manager.
@@ -73,7 +75,7 @@
 
         @raises error.TestFail: if the session cannot be started.
         """
-        self._cryptohome_proxy.ensure_clean_cryptohome_for(user)
+        cryptohome.ensure_clean_cryptohome_for(user)
         self._session_manager.StartSession(user, '')
 
 
diff --git a/client/site_tests/login_OobeLocalization/control b/client/site_tests/login_OobeLocalization/control
index 6dfa4f7..5ddfb75 100644
--- a/client/site_tests/login_OobeLocalization/control
+++ b/client/site_tests/login_OobeLocalization/control
@@ -2,19 +2,20 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = 'michaelpg, alemate'
+AUTHOR = 'rsorokin, alemate'
 NAME = 'login_OobeLocalization'
 PURPOSE = 'Tests different region configurations at OOBE.'
 CRITERIA = """
 Fails if the language or keyboard dropdowns in OOBE under a supported region
 configuration do not include the desired values.
 """
-ATTRIBUTES = "suite:experimental"
-TIME = 'MEDIUM'
+ATTRIBUTES = "suite:bvt-perbuild"
+TIME = 'LENGTHY'
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'login'
 TEST_TYPE = 'client'
 JOB_RETRIES = 2
+PY_VERSION = 3
 
 DOC = """
 This test verifies the locale and keyboard options present at OOBE under
diff --git a/client/site_tests/login_OobeLocalization/login_OobeLocalization.py b/client/site_tests/login_OobeLocalization/login_OobeLocalization.py
index ec864ee..618548e 100644
--- a/client/site_tests/login_OobeLocalization/login_OobeLocalization.py
+++ b/client/site_tests/login_OobeLocalization/login_OobeLocalization.py
@@ -1,9 +1,15 @@
+# Lint as: python2, python3
 # Copyright 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import json
 import logging
+from six.moves import map
 
 from autotest_lib.client.bin import test, utils
 from autotest_lib.client.common_lib import error
@@ -14,17 +20,17 @@
     """Tests different region configurations at OOBE."""
     version = 1
 
-    _LANGUAGE_SELECT = 'language-select'
-    _KEYBOARD_SELECT = 'keyboard-select'
+    _LANGUAGE_SELECT = "document.getElementById('connect').$.languageSelect.$.select"
+    _KEYBOARD_SELECT = "document.getElementById('connect').$.keyboardSelect.$.select"
+    _KEYBOARD_ITEMS = "document.getElementById('connect').$.keyboardSelect.items"
     _FALLBACK_KEYBOARD = 'xkb:us::eng'
 
+    _VPD_CACHE_DIR = '/mnt/stateful_partition/unencrypted/cache/vpd'
     # dump_vpd_log reads the VPD cache in lieu of running `vpd -l`.
-    _VPD_FILENAME = '/var/cache/vpd/full-v2.txt'
+    _VPD_FILENAME = _VPD_CACHE_DIR + '/full-v2.txt'
     # The filtered cache is created from the cache by dump_vpd_log. It is read
-    # at startup if the device is not owned. (Otherwise /tmp/machine-info is
-    # created by dump_vpd_log and read. See
-    # /platform/login_manager/init/machine-info.conf.)
-    _FILTERED_VPD_FILENAME = '/var/log/vpd_2.0.txt'
+    # by Chrome to load region information.
+    _FILTERED_VPD_FILENAME = _VPD_CACHE_DIR + '/filtered.txt'
     # cros-regions.json has information for each region (locale, input method,
     # etc.) in JSON format.
     _REGIONS_FILENAME = '/usr/share/misc/cros-regions.json'
@@ -49,6 +55,10 @@
                              region['region_code'])
                 continue
 
+            # TODO(https://crbug.com/1256723): Reenable when the bug is fixed.
+            if region['region_code'] == 'kz':
+                continue
+
             # TODO(hungte) When OOBE supports cros-regions.json
             # (crosbug.com/p/34536) we can remove initial_locale,
             # initial_timezone, and keyboard_layout.
@@ -64,13 +74,22 @@
         cros_ui.stop()
         utils.run('rm /home/chronos/Local\ State', ignore_status=True)
         utils.run('dump_vpd_log --clean')
+        utils.run('dump_vpd_log')
 
 
     def _run_with_chrome(self, func, *args):
-        with chrome.Chrome(auto_login=False) as self._chrome:
-            utils.poll_for_condition(
-                    self._is_oobe_ready,
-                    exception=error.TestFail('OOBE not ready'))
+        with chrome.Chrome(
+                auto_login=False,
+                extra_browser_args=[
+                        "--disable-hid-detection-on-oobe",
+                        "--force-hwid-check-result-for-test=success",
+                        "--vmodule=login_display_host_webui=1"
+                ]) as self._chrome:
+            self._chrome.browser.oobe.WaitForJavaScriptCondition(
+                    "typeof Oobe == 'function' && "
+                    "typeof OobeAPI == 'object' && "
+                    "OobeAPI.screens.WelcomeScreen.isVisible()",
+                    timeout=30)
             return func(*args)
 
 
@@ -84,12 +103,12 @@
                 initial_locale,
                 alternate_values = self._resolve_language(initial_locale),
                 check_separator = True):
-            raise error.TestFail(
-                    'Language not found for region "%s".\n'
-                    'Actual value of %s:\n%s' % (
-                            region['region_code'],
-                            self._LANGUAGE_SELECT,
-                            self._dump_options(self._LANGUAGE_SELECT)))
+            raise error.TestFail('Language not found for region "%s".\n'
+                                 'Expected: %s\n.'
+                                 'Actual value of %s:\n%s' %
+                                 (region['region_code'], initial_locale,
+                                  self._LANGUAGE_SELECT,
+                                  self._dump_options(self._LANGUAGE_SELECT)))
 
         # We expect to see only login keyboards at OOBE.
         keyboards = region['keyboards']
@@ -116,7 +135,7 @@
         # Check that the fallback keyboard is present.
         if self._FALLBACK_KEYBOARD not in keyboards:
             if not self._verify_option_exists(
-                    self._KEYBOARD_SELECT,
+                    self._KEYBOARD_ITEMS,
                     self._comp_ime_prefix + self._FALLBACK_KEYBOARD):
                 raise error.TestFail(
                         'Fallback keyboard layout not found for region "%s".\n'
@@ -138,6 +157,7 @@
             for line in vpd_log:
                 # Extract "key"="value" pair.
                 key, _, value = line.replace('"', '').partition('=')
+                value = value.rstrip("\n")
                 vpd[key] = value
 
             vpd.update(vpd_settings);
@@ -177,7 +197,7 @@
         """
         js_expression = """
                 (function () {
-                  var select = document.querySelector('#%s');
+                  var select = %s;
                   if (!select || select.selectedIndex)
                     return false;
                   var values = '%s'.split(',');
@@ -193,9 +213,7 @@
                         'OPTGROUP';
                   }
                   return true;
-                })()""" % (select_id,
-                           values,
-                           alternate_values,
+                })()""" % (select_id, values, alternate_values,
                            check_separator)
 
         return self._chrome.browser.oobe.EvaluateJavaScript(js_expression)
@@ -213,8 +231,7 @@
         """
         js_expression = """
                 (function () {
-                  return !!document.querySelector(
-                      '#%s option[value=\\'%s\\']');
+                  return !!%s.find(el => el.value == '%s');
                 })()""" % (select_id, value)
 
         return self._chrome.browser.oobe.EvaluateJavaScript(js_expression)
@@ -234,7 +251,6 @@
 
 
     def _get_regions(self):
-        regions = {}
         with open(self._REGIONS_FILENAME, 'r') as regions_file:
             return json.load(regions_file).values()
 
@@ -243,8 +259,9 @@
         """Finds the xkb values' component extension id prefix, if any.
         @returns the prefix if found, or an empty string
         """
-        return self._chrome.browser.oobe.EvaluateJavaScript("""
-                var value = document.getElementById('%s').value;
+        return self._chrome.browser.oobe.EvaluateJavaScript(
+                """
+                var value = %s.value;
                 value.substr(0, value.lastIndexOf('xkb:'))""" %
                 self._KEYBOARD_SELECT)
 
@@ -274,23 +291,13 @@
         return ''
 
 
-    def _is_oobe_ready(self):
-        return (self._chrome.browser.oobe and
-                self._chrome.browser.oobe.EvaluateJavaScript(
-                        "var select = document.getElementById('%s');"
-                        "select && select.children.length >= 2" %
-                                self._LANGUAGE_SELECT))
-
-
     def _dump_options(self, select_id):
         js_expression = """
                 (function () {
-                  var selector = '#%s';
                   var divider = ',';
-                  var select = document.querySelector(selector);
+                  var select = %s
                   if (!select)
-                    return 'document.querySelector(\\'' + selector +
-                        '\\') failed.';
+                    return 'selector failed.';
                   var dumpOptgroup = function(group) {
                     var result = '';
                     for (var i = 0; i < group.children.length; i++) {
diff --git a/client/site_tests/login_UnicornLogin/control b/client/site_tests/login_UnicornLogin/control
deleted file mode 100644
index 486d5d0..0000000
--- a/client/site_tests/login_UnicornLogin/control
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "achuith"
-NAME = "login_UnicornLogin"
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "login"
-TEST_TYPE = "client"
-# Disable until test can be fixed: https://crbug.com/1097949
-# ATTRIBUTES = "suite:bvt-perbuild"
-
-DOC = """
-This test verifies that logging into a Unicorn account works correctly.
-"""
-
-job.run_test('login_UnicornLogin')
diff --git a/client/site_tests/login_UnicornLogin/login_UnicornLogin.py b/client/site_tests/login_UnicornLogin/login_UnicornLogin.py
deleted file mode 100644
index cebdad7..0000000
--- a/client/site_tests/login_UnicornLogin/login_UnicornLogin.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.cros import cryptohome
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chrome
-try:
-    # Importing this private util fails on public boards (e.g amd64-generic)
-    from autotest_lib.client.common_lib.cros import password_util
-except ImportError:
-    logging.error('Failed to import password_util from autotest-private')
-
-
-class login_UnicornLogin(test.test):
-  """Sign into a unicorn account."""
-  version = 1
-
-
-  def run_once(self):
-    """Test function body."""
-
-
-    with chrome.Chrome(auto_login=False,
-                       disable_gaia_services=False) as cr:
-      parent = password_util.get_unicorn_parent_credentials()
-      child = password_util.get_unicorn_child_credentials()
-      cr.browser.oobe.NavigateUnicornLogin(
-          child_user=child.username, child_pass=child.password,
-          parent_user=parent.username, parent_pass=parent.password)
-      if not cryptohome.is_vault_mounted(
-          user=chrome.NormalizeEmail(child.username)):
-        raise error.TestFail('Expected to find a mounted vault for %s'
-                             % child.username)
-      tab = cr.browser.tabs.New()
-      # TODO(achuith): Use a better signal of being logged in, instead of
-      # parsing accounts.google.com.
-      tab.Navigate('http://accounts.google.com')
-      tab.WaitForDocumentReadyStateToBeComplete()
-      res = tab.EvaluateJavaScript(
-          '''
-              var res = '',
-              divs = document.getElementsByTagName('div');
-              for (var i = 0; i < divs.length; i++) {
-                res = divs[i].textContent;
-                if (res.search('%s') > 1) {
-                  break;
-                }
-              }
-              res;
-          ''' % child.username.lower())
-      if not res:
-        raise error.TestFail('No references to %s on accounts page.'
-                             % child.username)
-      tab.Close()
diff --git a/client/site_tests/login_UserPolicyKeys/control b/client/site_tests/login_UserPolicyKeys/control
deleted file mode 100644
index 078d980..0000000
--- a/client/site_tests/login_UserPolicyKeys/control
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "joaodasilva, cmasone"
-NAME = "login_UserPolicyKeys"
-ATTRIBUTES = "suite:bvt-inline, suite:smoke"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "login"
-TEST_TYPE = "client"
-JOB_RETRIES = 2
-
-DOC = """
-This test stores a user policy at the session_manager over dbus, and verifies
-that the user policy key was stored at the expected location and is readable
-by chronos.
-"""
-
-job.run_test('login_UserPolicyKeys')
diff --git a/client/site_tests/login_UserPolicyKeys/login_UserPolicyKeys.py b/client/site_tests/login_UserPolicyKeys/login_UserPolicyKeys.py
deleted file mode 100644
index cb69bbe..0000000
--- a/client/site_tests/login_UserPolicyKeys/login_UserPolicyKeys.py
+++ /dev/null
@@ -1,138 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import dbus, grp, os, pwd, stat
-from dbus.mainloop.glib import DBusGMainLoop
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import policy, session_manager
-from autotest_lib.client.cros import cros_ui, cryptohome, ownership
-
-
-class login_UserPolicyKeys(test.test):
-    """Verifies that, after user policy is pushed, the user policy key winds
-       up stored in the right place.
-    """
-    version = 1
-
-    def _can_read(self, uid, gid, info):
-        """Returns true if uid or gid can read a file with the info stat."""
-        if uid == info.st_uid:
-            return info.st_mode & stat.S_IRUSR
-        if gid == info.st_gid:
-            return info.st_mode & stat.S_IRGRP
-        return info.st_mode & stat.S_IROTH
-
-
-    def _can_execute(self, uid, gid, info):
-        """Returns true if uid or gid can execute a file with the info stat."""
-        if uid == info.st_uid:
-            return info.st_mode & stat.S_IXUSR
-        if gid == info.st_gid:
-            return info.st_mode & stat.S_IXGRP
-        return info.st_mode & stat.S_IXOTH
-
-
-    def _verify_key_file(self, key_file):
-        """Verifies that the key file has been created and is readable."""
-        if not os.path.isfile(key_file):
-            raise error.TestFail('%s does not exist!' % key_file)
-        # And is readable by chronos.
-        chronos_uid = pwd.getpwnam('chronos').pw_uid
-        chronos_gid = grp.getgrnam('chronos').gr_gid
-        info = os.stat(key_file)
-        if not stat.S_ISREG(info.st_mode):
-            raise error.TestFail('%s is not a regular file' % key_file)
-        if not self._can_read(chronos_uid, chronos_gid, info):
-            raise error.TestFail('chronos can\' read %s, mode is %s' %
-                                 (key_file, oct(info.st_mode)))
-        # All the parent directories must be executable by chronos.
-        current = key_file
-        parent = os.path.dirname(current)
-        while current != parent:
-            current = parent
-            parent = os.path.dirname(parent)
-            info = os.stat(current)
-            mode = stat.S_IMODE(info.st_mode)
-            if not self._can_execute(chronos_uid, chronos_gid, info):
-                raise error.TestFail('chronos can\'t execute %s, mode is %s' %
-                                     (current, oct(info.st_mode)))
-
-
-    def initialize(self):
-        super(login_UserPolicyKeys, self).initialize()
-        policy.install_protobufs(self.autodir, self.job)
-        self._bus_loop = DBusGMainLoop(set_as_default=True)
-        self._cryptohome_proxy = cryptohome.CryptohomeProxy(
-            self._bus_loop, self.autodir, self.job)
-
-        # Clear the user's vault, to make sure the test starts without any
-        # policy or key lingering around. At this stage the session isn't
-        # started and there's no user signed in.
-        ownership.restart_ui_to_clear_ownership_files()
-        self._cryptohome_proxy.remove(ownership.TESTUSER)
-
-
-    def run_once(self):
-        # Mount the vault, connect to session_manager and start the session.
-        self._cryptohome_proxy.mount(ownership.TESTUSER,
-                                     ownership.TESTPASS,
-                                     create=True)
-        sm = session_manager.connect(self._bus_loop)
-        sm.StartSession(ownership.TESTUSER, '')
-
-        # No policy stored yet.
-        retrieved_policy = sm.RetrievePolicyEx(
-                session_manager.make_user_policy_descriptor(ownership.TESTUSER),
-                byte_arrays=True)
-        if retrieved_policy:
-            raise error.TestError('session_manager already has user policy!')
-
-        # And no user key exists.
-        key_file = ownership.get_user_policy_key_filename(ownership.TESTUSER)
-        if os.path.exists(key_file):
-            raise error.TestFail('%s exists before storing user policy!' %
-                                 key_file)
-
-        # Now store a policy. This is building a device policy protobuf, but
-        # that's fine as far as the session_manager is concerned; it's the
-        # outer PolicyFetchResponse that contains the public_key.
-        public_key = ownership.known_pubkey()
-        private_key = ownership.known_privkey()
-        policy_data = policy.build_policy_data()
-        policy_response = policy.generate_policy(private_key,
-                                                 public_key,
-                                                 policy_data)
-        try:
-            sm.StorePolicyEx(
-                session_manager.make_user_policy_descriptor(ownership.TESTUSER),
-                dbus.ByteArray(policy_response))
-        except dbus.exceptions.DBusException as e:
-            raise error.TestFail('Failed to store user policy', e)
-
-        # The policy key should have been created now.
-        self._verify_key_file(key_file)
-
-        # Restart the ui; the key should be deleted.
-        self._cryptohome_proxy.unmount(ownership.TESTUSER)
-        cros_ui.restart()
-        if os.path.exists(key_file):
-            raise error.TestFail('%s exists after restarting ui!' %
-                                 key_file)
-
-        # Starting a new session will restore the key that was previously
-        # stored. Reconnect to the session_manager, since the restart killed it.
-        self._cryptohome_proxy.mount(ownership.TESTUSER,
-                                     ownership.TESTPASS,
-                                     create=True)
-        sm = session_manager.connect(self._bus_loop)
-        sm.StartSession(ownership.TESTUSER, '')
-        self._verify_key_file(key_file)
-
-
-    def cleanup(self):
-        cros_ui.restart()
-        self._cryptohome_proxy.remove(ownership.TESTUSER)
-        super(login_UserPolicyKeys, self).cleanup()
diff --git a/client/site_tests/longevity_Tracker/control.chromesign b/client/site_tests/longevity_Tracker/control.chromesign
deleted file mode 100644
index 92064ac..0000000
--- a/client/site_tests/longevity_Tracker/control.chromesign
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'longevity_Tracker.chromesign'
-PURPOSE = 'Tracks Chrome OS performance over long periods.'
-TIME = 'LONG'
-TEST_CATEGORY = 'Performance'
-TEST_CLASS = 'performance'
-TEST_TYPE = 'client'
-DEPENDENCIES = 'chromesign'
-
-DOC = '''
-This script tracks performance of Chrome OS running the specified Kiosk App.
-It captures device resource data, including cpu and memory usage, and device
-temperature in the background with the specified Kiosk App running in the
-foreground. The script presumes the device is running the desired Chrome OS
-version, is enrolled, and the target App is launched and signed in.
-
-The script cycle shall terminate itself after the set duration, or soon after
-a longevity_terminate flag file is seen in the /tmp/ directory.
-'''
-
-kiosk_app_attributes = (
-    'chromesign:odjaaghiehpobimgdjjfofmablbaleem:viewer.html')
-
-job.run_test('longevity_Tracker',
-             kiosk_app_attributes=kiosk_app_attributes,
-             tag='chromesign')
diff --git a/client/site_tests/longevity_Tracker/control.riseplayer b/client/site_tests/longevity_Tracker/control.riseplayer
deleted file mode 100644
index 5778e7c..0000000
--- a/client/site_tests/longevity_Tracker/control.riseplayer
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'longevity_Tracker.riseplayer'
-PURPOSE = 'Tracks Chrome OS performance over long periods.'
-TIME = 'LONG'
-TEST_CATEGORY = 'Performance'
-TEST_CLASS = 'performance'
-TEST_TYPE = 'client'
-DEPENDENCIES = 'riseplayer'
-
-DOC = '''
-This script tracks performance of Chrome OS running the specified Kiosk App.
-It captures device resource data, including cpu and memory usage, and device
-temperature in the background with the specified Kiosk App running in the
-foreground. The script presumes the device is running the desired Chrome OS
-version, is enrolled, and the target App is launched and signed in.
-
-The script cycle shall terminate itself after the set duration, or soon after
-a longevity_terminate flag file is seen in the /tmp/ directory.
-'''
-
-kiosk_app_attributes = (
-    'riseplayer:mfpgpdablffhbfofnhlpgmokokbahooi:index.html')
-
-job.run_test('longevity_Tracker',
-             kiosk_app_attributes=kiosk_app_attributes,
-             tag='riseplayer')
diff --git a/client/site_tests/longevity_Tracker/control.stratosmedia b/client/site_tests/longevity_Tracker/control.stratosmedia
deleted file mode 100644
index 41af598..0000000
--- a/client/site_tests/longevity_Tracker/control.stratosmedia
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'longevity_Tracker.stratosmedia'
-PURPOSE = 'Tracks Chrome OS performance over long periods.'
-TIME = 'LONG'
-TEST_CATEGORY = 'Performance'
-TEST_CLASS = 'performance'
-TEST_TYPE = 'client'
-DEPENDENCIES = 'stratosmedia'
-
-DOC = '''
-This script tracks performance of Chrome OS running the specified Kiosk App.
-It captures device resource data, including cpu and memory usage, and device
-temperature in the background with the specified Kiosk App running in the
-foreground. The script presumes the device is running the desired Chrome OS
-version, is enrolled, and the target App is launched and signed in.
-
-The script cycle shall terminate itself after the set duration, or soon after
-a longevity_terminate flag file is seen in the /tmp/ directory.
-'''
-
-kiosk_app_attributes = (
-    'stratosmedia:alhlkpgheiefedomljbenmkpconkffhk:index.html')
-
-job.run_test('longevity_Tracker',
-             kiosk_app_attributes=kiosk_app_attributes,
-             tag='stratosmedia')
diff --git a/client/site_tests/longevity_Tracker/longevity_Tracker.py b/client/site_tests/longevity_Tracker/longevity_Tracker.py
deleted file mode 100755
index d150bee..0000000
--- a/client/site_tests/longevity_Tracker/longevity_Tracker.py
+++ /dev/null
@@ -1,560 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# pylint: disable=module-missing-docstring
-# pylint: disable=docstring-section-name
-
-import csv
-import glob
-import httplib
-import json
-import logging
-import os
-import re
-import shutil
-import time
-import urllib
-import urllib2
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import constants
-
-# TODO(scunningham): Return to 72000 (20 hrs) after server-side stabilizes.
-TEST_DURATION = 10800  # Duration of test (3 hrs) in seconds.
-SAMPLE_INTERVAL = 60  # Length of measurement samples in seconds.
-METRIC_INTERVAL = 3600  # Length between metric calculation in seconds.
-STABILIZATION_DURATION = 60  # Time for test stabilization in seconds.
-TMP_DIRECTORY = '/tmp/'
-EXIT_FLAG_FILE = TMP_DIRECTORY + 'longevity_terminate'
-PERF_FILE_NAME_PREFIX = 'perf'
-OLD_FILE_AGE = 14400  # Age of old files to be deleted in minutes = 10 days.
-# The manifest.json file for a Chrome Extension contains the app name, id,
-# version, and other app info. It is accessible by the OS only when the app
-# is running, and thus it's cryptohome directory mounted. Only one Kiosk app
-# can be running at a time.
-MANIFEST_PATTERN = '/home/.shadow/*/mount/user/Extensions/%s/*/manifest.json'
-VERSION_PATTERN = r'^(\d+)\.(\d+)\.(\d+)\.(\d+)$'
-DASHBOARD_UPLOAD_URL = 'https://chromeperf.appspot.com/add_point'
-
-
-class PerfUploadingError(Exception):
-    """Exception raised in perf_uploader."""
-    pass
-
-
-class longevity_Tracker(test.test):
-    """Monitor device and App stability over long periods of time."""
-
-    version = 1
-
-    def initialize(self):
-        self.temp_dir = os.path.split(self.tmpdir)[0]
-
-    def _get_cpu_usage(self):
-        """Compute percent CPU in active use over the sample interval.
-
-        Note: This method introduces a sleep period into the test, equal to
-        90% of the sample interval.
-
-        @returns float of percent active use of CPU.
-        """
-        # Time between measurements is ~90% of the sample interval.
-        measurement_time_delta = SAMPLE_INTERVAL * 0.90
-        cpu_usage_start = utils.get_cpu_usage()
-        time.sleep(measurement_time_delta)
-        cpu_usage_end = utils.get_cpu_usage()
-        return utils.compute_active_cpu_time(cpu_usage_start,
-                                                  cpu_usage_end) * 100
-
-    def _get_mem_usage(self):
-        """Compute percent memory in active use.
-
-        @returns float of percent memory in use.
-        """
-        total_memory = utils.get_mem_total()
-        free_memory = utils.get_mem_free()
-        return ((total_memory - free_memory) / total_memory) * 100
-
-    def _get_max_temperature(self):
-        """Get temperature of hottest sensor in Celsius.
-
-        @returns float of temperature of hottest sensor.
-        """
-        temperature = utils.get_current_temperature_max()
-        if not temperature:
-            temperature = 0
-        return temperature
-
-    def _get_hwid(self):
-        """Get hwid of test device, e.g., 'WOLF C4A-B2B-A47'.
-
-        @returns string of hwid (Hardware ID) of device under test.
-        """
-        with os.popen('crossystem hwid 2>/dev/null', 'r') as hwid_proc:
-            hwid = hwid_proc.read()
-        if not hwid:
-            hwid = 'undefined'
-        return hwid
-
-    def elapsed_time(self, mark_time):
-        """Get time elapsed since |mark_time|.
-
-        @param mark_time: point in time from which elapsed time is measured.
-        @returns time elapsed since the marked time.
-        """
-        return time.time() - mark_time
-
-    def modulo_time(self, timer, interval):
-        """Get time eplased on |timer| for the |interval| modulus.
-
-        Value returned is used to adjust the timer so that it is synchronized
-        with the current interval.
-
-        @param timer: time on timer, in seconds.
-        @param interval: period of time in seconds.
-        @returns time elapsed from the start of the current interval.
-        """
-        return timer % int(interval)
-
-    def syncup_time(self, timer, interval):
-        """Get time remaining on |timer| for the |interval| modulus.
-
-        Value returned is used to induce sleep just long enough to put the
-        process back in sync with the timer.
-
-        @param timer: time on timer, in seconds.
-        @param interval: period of time in seconds.
-        @returns time remaining till the end of the current interval.
-        """
-        return interval - (timer % int(interval))
-
-    def _record_perf_measurements(self, perf_values, perf_writer):
-        """Record attribute performance measurements, and write to file.
-
-        @param perf_values: dict of attribute performance values.
-        @param perf_writer: file to write performance measurements.
-        """
-        # Get performance measurements.
-        cpu_usage = '%.3f' % self._get_cpu_usage()
-        mem_usage = '%.3f' % self._get_mem_usage()
-        max_temp = '%.3f' % self._get_max_temperature()
-
-        # Append measurements to attribute lists in perf values dictionary.
-        perf_values['cpu'].append(cpu_usage)
-        perf_values['mem'].append(mem_usage)
-        perf_values['temp'].append(max_temp)
-
-        # Write performance measurements to perf timestamped file.
-        time_stamp = time.strftime('%Y/%m/%d %H:%M:%S')
-        perf_writer.writerow([time_stamp, cpu_usage, mem_usage, max_temp])
-        logging.info('Time: %s, CPU: %s, Mem: %s, Temp: %s',
-                     time_stamp, cpu_usage, mem_usage, max_temp)
-
-    def _record_90th_metrics(self, perf_values, perf_metrics):
-        """Record 90th percentile metric of attribute performance values.
-
-        @param perf_values: dict attribute performance values.
-        @param perf_metrics: dict attribute 90%-ile performance metrics.
-        """
-        # Calculate 90th percentile for each attribute.
-        cpu_values = perf_values['cpu']
-        mem_values = perf_values['mem']
-        temp_values = perf_values['temp']
-        cpu_metric = sorted(cpu_values)[(len(cpu_values) * 9) // 10]
-        mem_metric = sorted(mem_values)[(len(mem_values) * 9) // 10]
-        temp_metric = sorted(temp_values)[(len(temp_values) * 9) // 10]
-        logging.info('== Performance values: %s', perf_values)
-        logging.info('== 90th percentile: cpu: %s, mem: %s, temp: %s',
-                     cpu_metric, mem_metric, temp_metric)
-
-        # Append 90th percentile to each attribute performance metric.
-        perf_metrics['cpu'].append(cpu_metric)
-        perf_metrics['mem'].append(mem_metric)
-        perf_metrics['temp'].append(temp_metric)
-
-    def _get_median_metrics(self, metrics):
-        """Returns median of each attribute performance metric.
-
-        If no metric values were recorded, return 0 for each metric.
-
-        @param metrics: dict of attribute performance metric lists.
-        @returns dict of attribute performance metric medians.
-        """
-        if len(metrics['cpu']):
-            cpu_metric = sorted(metrics['cpu'])[len(metrics['cpu']) // 2]
-            mem_metric = sorted(metrics['mem'])[len(metrics['mem']) // 2]
-            temp_metric = sorted(metrics['temp'])[len(metrics['temp']) // 2]
-        else:
-            cpu_metric = 0
-            mem_metric = 0
-            temp_metric = 0
-        logging.info('== Median: cpu: %s, mem: %s, temp: %s',
-                     cpu_metric, mem_metric, temp_metric)
-        return {'cpu': cpu_metric, 'mem': mem_metric, 'temp': temp_metric}
-
-    def _append_to_aggregated_file(self, ts_file, ag_file):
-        """Append contents of perf timestamp file to perf aggregated file.
-
-        @param ts_file: file handle for performance timestamped file.
-        @param ag_file: file handle for performance aggregated file.
-        """
-        next(ts_file)  # Skip fist line (the header) of timestamped file.
-        for line in ts_file:
-            ag_file.write(line)
-
-    def _copy_aggregated_to_resultsdir(self, aggregated_fpath):
-        """Copy perf aggregated file to results dir for AutoTest results.
-
-        Note: The AutoTest results default directory is located at /usr/local/
-        autotest/results/default/longevity_Tracker/results
-
-        @param aggregated_fpath: file path to Aggregated performance values.
-        """
-        results_fpath = os.path.join(self.resultsdir, 'perf.csv')
-        shutil.copy(aggregated_fpath, results_fpath)
-        logging.info('Copied %s to %s)', aggregated_fpath, results_fpath)
-
-    def _write_perf_keyvals(self, perf_results):
-        """Write perf results to keyval file for AutoTest results.
-
-        @param perf_results: dict of attribute performance metrics.
-        """
-        perf_keyval = {}
-        perf_keyval['cpu_usage'] = perf_results['cpu']
-        perf_keyval['memory_usage'] = perf_results['mem']
-        perf_keyval['temperature'] = perf_results['temp']
-        self.write_perf_keyval(perf_keyval)
-
-    def _write_perf_results(self, perf_results):
-        """Write perf results to results-chart.json file for Perf Dashboard.
-
-        @param perf_results: dict of attribute performance metrics.
-        """
-        cpu_metric = perf_results['cpu']
-        mem_metric = perf_results['mem']
-        ec_metric = perf_results['temp']
-        self.output_perf_value(description='cpu_usage', value=cpu_metric,
-                               units='%', higher_is_better=False)
-        self.output_perf_value(description='mem_usage', value=mem_metric,
-                               units='%', higher_is_better=False)
-        self.output_perf_value(description='max_temp', value=ec_metric,
-                               units='Celsius', higher_is_better=False)
-
-    def _read_perf_results(self):
-        """Read perf results from results-chart.json file for Perf Dashboard.
-
-        @returns dict of perf results, formatted as JSON chart data.
-        """
-        results_file = os.path.join(self.resultsdir, 'results-chart.json')
-        with open(results_file, 'r') as fp:
-            contents = fp.read()
-            chart_data = json.loads(contents)
-        return chart_data
-
-    def _get_point_id(self, cros_version, epoch_minutes):
-        """Compute point ID from ChromeOS version number and epoch minutes.
-
-        @param cros_version: String of ChromeOS version number.
-        @param epoch_minutes: String of minutes since 1970.
-
-        @return unique integer ID computed from given version and epoch.
-        """
-        # Number of digits from each part of the Chrome OS version string.
-        cros_version_col_widths = [0, 4, 3, 2]
-
-        def get_digits(version_num, column_widths):
-            if re.match(VERSION_PATTERN, version_num):
-                computed_string = ''
-                version_parts = version_num.split('.')
-                for i, version_part in enumerate(version_parts):
-                    if column_widths[i]:
-                        computed_string += version_part.zfill(column_widths[i])
-                return computed_string
-            else:
-                return None
-
-        cros_digits = get_digits(cros_version, cros_version_col_widths)
-        epoch_digits = epoch_minutes[-8:]
-        if not cros_digits:
-            return None
-        return int(epoch_digits + cros_digits)
-
-    def _get_kiosk_app_info(self, app_id):
-        """Get kiosk app name and version from manifest.json file.
-
-        Get the Kiosk App name and version strings from the manifest file of
-        the specified |app_id| Extension in the currently running session. If
-        |app_id| is empty or None, then return 'none' for the kiosk app info.
-
-        Raise an error if no manifest is found (ie, |app_id| is not running),
-        or if multiple manifest files are found (ie, |app_id| is running, but
-        the |app_id| dir contains multiple versions or manifest files).
-
-        @param app_id: string kiosk application identification.
-        @returns dict of Kiosk name and version number strings.
-        @raises: An error.TestError if single manifest is not found.
-        """
-        kiosk_app_info = {'name': 'none', 'version': 'none'}
-        if not app_id:
-            return kiosk_app_info
-
-        # Get path to manifest file of the running Kiosk app_id.
-        app_manifest_pattern = (MANIFEST_PATTERN % app_id)
-        logging.info('app_manifest_pattern: %s', app_manifest_pattern)
-        file_paths = glob.glob(app_manifest_pattern)
-        # Raise error if current session has no Kiosk Apps running.
-        if len(file_paths) == 0:
-            raise error.TestError('Kiosk App ID=%s is not running.' % app_id)
-        # Raise error if running Kiosk App has multiple manifest files.
-        if len(file_paths) > 1:
-            raise error.TestError('Kiosk App ID=%s has multiple manifest '
-                                  'files.' % app_id)
-        kiosk_manifest = open(file_paths[0], 'r').read()
-        manifest_json = json.loads(kiosk_manifest)
-        # If manifest is missing name or version key, set to 'undefined'.
-        kiosk_app_info['name'] = manifest_json.get('name', 'undefined')
-        kiosk_app_info['version'] = manifest_json.get('version', 'undefined')
-        return kiosk_app_info
-
-    def _format_data_for_upload(self, chart_data):
-        """Collect chart data into an uploadable data JSON object.
-
-        @param chart_data: performance results formatted as chart data.
-        """
-        perf_values = {
-            'format_version': '1.0',
-            'benchmark_name': self.test_suite_name,
-            'charts': chart_data,
-        }
-
-        dash_entry = {
-            'master': 'ChromeOS_Enterprise',
-            'bot': 'cros-%s' % self.board_name,
-            'point_id': self.point_id,
-            'versions': {
-                'cros_version': self.chromeos_version,
-                'chrome_version': self.chrome_version,
-            },
-            'supplemental': {
-                'default_rev': 'r_cros_version',
-                'hardware_identifier': 'a_' + self.hw_id,
-                'kiosk_app_name': 'a_' + self.kiosk_app_name,
-                'kiosk_app_version': 'r_' + self.kiosk_app_version
-            },
-            'chart_data': perf_values
-        }
-        return {'data': json.dumps(dash_entry)}
-
-    def _send_to_dashboard(self, data_obj):
-        """Send formatted perf data to the perf dashboard.
-
-        @param data_obj: data object as returned by _format_data_for_upload().
-
-        @raises PerfUploadingError if an exception was raised when uploading.
-        """
-        logging.debug('data_obj: %s', data_obj)
-        encoded = urllib.urlencode(data_obj)
-        req = urllib2.Request(DASHBOARD_UPLOAD_URL, encoded)
-        try:
-            urllib2.urlopen(req)
-        except urllib2.HTTPError as e:
-            raise PerfUploadingError('HTTPError: %d %s for JSON %s\n' %
-                                     (e.code, e.msg, data_obj['data']))
-        except urllib2.URLError as e:
-            raise PerfUploadingError('URLError: %s for JSON %s\n' %
-                                     (str(e.reason), data_obj['data']))
-        except httplib.HTTPException:
-            raise PerfUploadingError('HTTPException for JSON %s\n' %
-                                     data_obj['data'])
-
-    def _get_chrome_version(self):
-        """Get the Chrome version number and milestone as strings.
-
-        Invoke "chrome --version" to get the version number and milestone.
-
-        @return A tuple (chrome_ver, milestone) where "chrome_ver" is the
-            current Chrome version number as a string (in the form "W.X.Y.Z")
-            and "milestone" is the first component of the version number
-            (the "W" from "W.X.Y.Z").  If the version number cannot be parsed
-            in the "W.X.Y.Z" format, the "chrome_ver" will be the full output
-            of "chrome --version" and the milestone will be the empty string.
-        """
-        chrome_version = utils.system_output(constants.CHROME_VERSION_COMMAND,
-                                             ignore_status=True)
-        chrome_version = utils.parse_chrome_version(chrome_version)
-        return chrome_version
-
-    def _open_perf_file(self, file_path):
-        """Open a perf file. Write header line if new. Return file object.
-
-        If the file on |file_path| already exists, then open file for
-        appending only. Otherwise open for writing only.
-
-        @param file_path: file path for perf file.
-        @returns file object for the perf file.
-        """
-        # If file exists, open it for appending. Do not write header.
-        if os.path.isfile(file_path):
-            perf_file = open(file_path, 'a+')
-        # Otherwise, create it for writing. Write header on first line.
-        else:
-            perf_file = open(file_path, 'w')  # Erase if existing file.
-            perf_file.write('Time,CPU,Memory,Temperature (C)\r\n')
-        return perf_file
-
-    def _run_test_cycle(self):
-        """Track performance of Chrome OS over a long period of time.
-
-        This method collects performance measurements, and calculates metrics
-        to upload to the performance dashboard. It creates two files to
-        collect and store performance values and results: perf_<timestamp>.csv
-        and perf_aggregated.csv.
-
-        At the start, it creates a unique perf timestamped file in the test's
-        temp_dir. As the cycle runs, it saves a time-stamped performance
-        value after each sample interval. Periodically, it calculates
-        the 90th percentile performance metrics from these values.
-
-        The perf_<timestamp> files on the device will survive multiple runs
-        of the longevity_Tracker by the server-side test, and will also
-        survive multiple runs of the server-side test. The script will
-        delete them after 10 days, to prevent filling up the SSD.
-
-        At the end, it opens the perf aggregated file in the test's temp_dir,
-        and appends the contents of the perf timestamped file. It then
-        copies the perf aggregated file to the results directory as perf.csv.
-        This perf.csv file will be consumed by the AutoTest backend when the
-        server-side test ends.
-
-        Note that the perf_aggregated.csv file will grow larger with each run
-        of longevity_Tracker on the device by the server-side test. However,
-        the server-side test will delete file in the end.
-
-        This method also calculates 90th percentile and median metrics, and
-        returns the median metrics. Median metrics will be pushed to the perf
-        dashboard with a unique point_id.
-
-        @returns list of median performance metrics.
-        """
-        # Allow system to stabilize before start taking measurements.
-        test_start_time = time.time()
-        time.sleep(STABILIZATION_DURATION)
-
-        perf_values = {'cpu': [], 'mem': [], 'temp': []}
-        perf_metrics = {'cpu': [], 'mem': [], 'temp': []}
-
-        # Create perf_<timestamp> file and writer.
-        timestamp_fname = (PERF_FILE_NAME_PREFIX +
-                           time.strftime('_%Y-%m-%d_%H-%M') + '.csv')
-        timestamp_fpath = os.path.join(self.temp_dir, timestamp_fname)
-        timestamp_file = self._open_perf_file(timestamp_fpath)
-        timestamp_writer = csv.writer(timestamp_file)
-
-        # Align time of loop start with the sample interval.
-        test_elapsed_time = self.elapsed_time(test_start_time)
-        time.sleep(self.syncup_time(test_elapsed_time, SAMPLE_INTERVAL))
-        test_elapsed_time = self.elapsed_time(test_start_time)
-
-        metric_start_time = time.time()
-        metric_prev_time = metric_start_time
-
-        metric_elapsed_prev_time = self.elapsed_time(metric_prev_time)
-        offset = self.modulo_time(metric_elapsed_prev_time, METRIC_INTERVAL)
-        metric_timer = metric_elapsed_prev_time + offset
-        while self.elapsed_time(test_start_time) <= TEST_DURATION:
-            if os.path.isfile(EXIT_FLAG_FILE):
-                logging.info('Exit flag file detected. Exiting test.')
-                break
-            self._record_perf_measurements(perf_values, timestamp_writer)
-
-            # Periodically calculate and record 90th percentile metrics.
-            metric_elapsed_prev_time = self.elapsed_time(metric_prev_time)
-            metric_timer = metric_elapsed_prev_time + offset
-            if metric_timer >= METRIC_INTERVAL:
-                self._record_90th_metrics(perf_values, perf_metrics)
-                perf_values = {'cpu': [], 'mem': [], 'temp': []}
-
-                # Set previous time to current time.
-                metric_prev_time = time.time()
-                metric_elapsed_prev_time = self.elapsed_time(metric_prev_time)
-
-                # Calculate offset based on the original start time.
-                metric_elapsed_time = self.elapsed_time(metric_start_time)
-                offset = self.modulo_time(metric_elapsed_time, METRIC_INTERVAL)
-
-                # Set the timer to time elapsed plus offset to next interval.
-                metric_timer = metric_elapsed_prev_time + offset
-
-            # Sync the loop time to the sample interval.
-            test_elapsed_time = self.elapsed_time(test_start_time)
-            time.sleep(self.syncup_time(test_elapsed_time, SAMPLE_INTERVAL))
-
-        # Close perf timestamp file.
-        timestamp_file.close()
-
-        # Open perf timestamp file to read, and aggregated file to append.
-        timestamp_file = open(timestamp_fpath, 'r')
-        aggregated_fname = (PERF_FILE_NAME_PREFIX + '_aggregated.csv')
-        aggregated_fpath = os.path.join(self.temp_dir, aggregated_fname)
-        aggregated_file = self._open_perf_file(aggregated_fpath)
-
-        # Append contents of perf timestamp file to perf aggregated file.
-        self._append_to_aggregated_file(timestamp_file, aggregated_file)
-        timestamp_file.close()
-        aggregated_file.close()
-
-        # Copy perf aggregated file to test results directory.
-        self._copy_aggregated_to_resultsdir(aggregated_fpath)
-
-        # Return median of each attribute performance metric.
-        return self._get_median_metrics(perf_metrics)
-
-    def run_once(self, kiosk_app_attributes=None):
-        if kiosk_app_attributes:
-            app_name, app_id, ext_page = (
-                kiosk_app_attributes.rstrip().split(':'))
-        self.subtest_name = app_name
-        self.board_name = utils.get_board()
-        self.hw_id = self._get_hwid()
-        self.chrome_version = self._get_chrome_version()[0]
-        self.chromeos_version = '0.' + utils.get_chromeos_release_version()
-        self.epoch_minutes = str(int(time.time() / 60))  # Minutes since 1970.
-        self.point_id = self._get_point_id(self.chromeos_version,
-                                           self.epoch_minutes)
-
-        kiosk_info = self._get_kiosk_app_info(app_id)
-        self.kiosk_app_name = kiosk_info['name']
-        self.kiosk_app_version = kiosk_info['version']
-        self.test_suite_name = self.tagged_testname
-        if self.subtest_name:
-            self.test_suite_name += '.' + self.subtest_name
-
-        # Delete exit flag file at start of test run.
-        if os.path.isfile(EXIT_FLAG_FILE):
-            os.remove(EXIT_FLAG_FILE)
-
-        # Run a single test cycle.
-        self.perf_results = {'cpu': '0', 'mem': '0', 'temp': '0'}
-        self.perf_results = self._run_test_cycle()
-
-        # Write results for AutoTest to pick up at end of test.
-        self._write_perf_keyvals(self.perf_results)
-        self._write_perf_results(self.perf_results)
-
-        # Post perf results directly to performance dashboard. You may view
-        # uploaded data at https://chromeperf.appspot.com/new_points,
-        # with test path pattern=ChromeOS_Enterprise/cros-*/longevity*/*
-        chart_data = self._read_perf_results()
-        data_obj = self._format_data_for_upload(chart_data)
-        self._send_to_dashboard(data_obj)
-
-    def cleanup(self):
-        """Delete aged perf data files and the exit flag file."""
-        cmd = ('find %s -name %s* -type f -mmin +%s -delete' %
-               (self.temp_dir, PERF_FILE_NAME_PREFIX, OLD_FILE_AGE))
-        os.system(cmd)
-        if os.path.isfile(EXIT_FLAG_FILE):
-            os.remove(EXIT_FLAG_FILE)
diff --git a/client/site_tests/network_CastTDLS/control.receiver_1 b/client/site_tests/network_CastTDLS/control.receiver_1
index a0fc5c3..c4faaf6 100644
--- a/client/site_tests/network_CastTDLS/control.receiver_1
+++ b/client/site_tests/network_CastTDLS/control.receiver_1
@@ -14,6 +14,7 @@
 TEST_CLASS = "network"
 TEST_TYPE = "client"
 ATTRIBUTES = "suite:wifi_tdls_cast"
+PY_VERSION = 3
 DOC = """
   Test a mirroring session between a TDLS enabled ChromeOS device and
   Chromecast. The test downloads Cast extension from Chrome Web Store,
diff --git a/client/site_tests/network_CastTDLS/network_CastTDLS.py b/client/site_tests/network_CastTDLS/network_CastTDLS.py
index e26f9b1..d70ceec 100644
--- a/client/site_tests/network_CastTDLS/network_CastTDLS.py
+++ b/client/site_tests/network_CastTDLS/network_CastTDLS.py
@@ -1,8 +1,12 @@
+# Lint as: python2, python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import json, logging, os, re, tempfile, time, urllib2, zipfile
+import json, logging, os, re, tempfile, time, zipfile
+
+from six.moves import urllib
+
 from autotest_lib.client.bin import test
 from autotest_lib.client.common_lib import error, file_utils
 from autotest_lib.client.common_lib.cros import chromedriver
@@ -89,7 +93,7 @@
         @param output_file: The output file of the extension.
         """
         update_check_url = UPDATE_CHECK_URL % EXTENSION_ID_BETA
-        response = urllib2.urlopen(update_check_url).read()
+        response = urllib.request.urlopen(update_check_url).read()
         logging.info('Response: %s', response)
         pattern = r'codebase="(.*crx)"'
         regex = re.compile(pattern)
@@ -174,7 +178,7 @@
         @raises error.TestFail If TDLS status is invalid.
         @raises error.TestFail TDLS is not being used in the mirroring session.
         """
-        response = urllib2.urlopen(GET_LOG_URL % device_ip).read()
+        response = urllib.request.urlopen(GET_LOG_URL % device_ip).read()
         logging.info('Receiver log is under: %s', self.debugdir)
         with open(os.path.join(self.debugdir, RECEIVER_LOG), 'wb') as f:
             f.write(response)
diff --git a/client/site_tests/network_ChromeCellularEndToEnd/control b/client/site_tests/network_ChromeCellularEndToEnd/control
index 8f31983..0c494ea 100644
--- a/client/site_tests/network_ChromeCellularEndToEnd/control
+++ b/client/site_tests/network_ChromeCellularEndToEnd/control
@@ -3,10 +3,10 @@
 # found in the LICENSE file.
 
 AUTHOR = 'harpreet'
-NAME = 'network_ChromeCelluarEndToEnd'
+NAME = 'network_ChromeCellularEndToEnd'
 TIME = 'FAST'
 TEST_TYPE = 'client'
-
+PY_VERSION = 3
 DOC = """
   Client side of the end to end Cellular test which is called by the
   cellular_ChromeEndToEnd server test that cold reboots the DUT before
diff --git a/client/site_tests/network_ChromeCellularEndToEnd/network_ChromeCellularEndToEnd.py b/client/site_tests/network_ChromeCellularEndToEnd/network_ChromeCellularEndToEnd.py
index 2c96d73..43d44e4 100644
--- a/client/site_tests/network_ChromeCellularEndToEnd/network_ChromeCellularEndToEnd.py
+++ b/client/site_tests/network_ChromeCellularEndToEnd/network_ChromeCellularEndToEnd.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/network_ChromeCellularNetworkPresent/control b/client/site_tests/network_ChromeCellularNetworkPresent/control
deleted file mode 100644
index 981e0d4..0000000
--- a/client/site_tests/network_ChromeCellularNetworkPresent/control
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "network_ChromeCellularNetworkPresent"
-PURPOSE = "Verify that a cellular network is visible via networkingPrivate"
-ATTRIBUTES = "suite:network_ui"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-
-DOC = """
-
-This test is meant as a simple example of client/cros/networking/chrome_testing.
-It uses telemetry and pseudomodem to setup a fake network and verify that it
-properly propagates to Chrome.
-
-"""
-
-job.run_test('network_ChromeCellularNetworkPresent', family='3GPP', tag='3GPP')
-job.run_test('network_ChromeCellularNetworkPresent', family='CDMA', tag='CDMA')
diff --git a/client/site_tests/network_ChromeCellularNetworkPresent/network_ChromeCellularNetworkPresent.py b/client/site_tests/network_ChromeCellularNetworkPresent/network_ChromeCellularNetworkPresent.py
deleted file mode 100644
index 2e0f9b2..0000000
--- a/client/site_tests/network_ChromeCellularNetworkPresent/network_ChromeCellularNetworkPresent.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.cellular.pseudomodem import pm_constants
-from autotest_lib.client.cros.cellular.pseudomodem import pseudomodem_context
-from autotest_lib.client.cros.networking.chrome_testing \
-        import chrome_networking_test_context as cntc
-
-class network_ChromeCellularNetworkPresent(test.test):
-    """
-    This test is meant as a simple example using
-    client/cros/networking/chrome_testing. It uses telemetry and pseudomodem to
-    setup a fake network and verify that it properly propagates to Chrome.
-
-    """
-    version = 1
-
-    def run_once(self, family):
-        with pseudomodem_context.PseudoModemManagerContext(
-                True,
-                {'family' : family}):
-            with cntc.ChromeNetworkingTestContext() as test_context:
-                networks = test_context.find_cellular_networks()
-                if len(networks) != 1:
-                    raise error.TestFail(
-                            'Expected 1 cellular network, found ' +
-                            str(len(networks)))
-
-                network = networks[0]
-                if network["Type"] != test_context.CHROME_NETWORK_TYPE_CELLULAR:
-                    raise error.TestFail(
-                            'Expected network of type "Cellular", found ' +
-                            network["Type"])
-
-                if not network["Name"].startswith(
-                        pm_constants.DEFAULT_TEST_NETWORK_PREFIX):
-                    raise error.TestFail('Network name is incorrect: ' +
-                                         network["Name"])
diff --git a/client/site_tests/network_ChromeCellularNetworkProperties/control b/client/site_tests/network_ChromeCellularNetworkProperties/control
deleted file mode 100644
index cb7e143..0000000
--- a/client/site_tests/network_ChromeCellularNetworkProperties/control
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "network_ChromeCellularNetworkProperties"
-PURPOSE = "Verify that Chrome sees the correct cellular service properties."
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-DOC = """
-
-This test configures the cellular pseudomodem in various ways and makes sure
-that Service properties exposed by shill are propagated to Chrome. The API call
-that is under test is "chrome.networkingPrivate.getProperties".
-
-This test uses the pseudomodem for cellular, but it can also be extended to
-other technologies.
-
-"""
-
-job.run_test('network_ChromeCellularNetworkProperties',
-             family='3GPP', tag='3GPP')
-job.run_test('network_ChromeCellularNetworkProperties',
-             family='CDMA', tag='CDMA')
diff --git a/client/site_tests/network_ChromeCellularNetworkProperties/network_ChromeCellularNetworkProperties.py b/client/site_tests/network_ChromeCellularNetworkProperties/network_ChromeCellularNetworkProperties.py
deleted file mode 100644
index c32fd41..0000000
--- a/client/site_tests/network_ChromeCellularNetworkProperties/network_ChromeCellularNetworkProperties.py
+++ /dev/null
@@ -1,281 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import dbus.types
-import logging
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.cellular import mm1_constants
-from autotest_lib.client.cros.cellular import test_environment
-from autotest_lib.client.cros.cellular.pseudomodem import pm_constants
-from autotest_lib.client.cros.networking import mm1_proxy
-from autotest_lib.client.cros.networking.chrome_testing \
-        import chrome_networking_test_context as cntc
-from autotest_lib.client.cros.networking.chrome_testing import test_utils
-
-class network_ChromeCellularNetworkProperties(test.test):
-    """
-    This test configures the cellular pseudomodem in various ways and makes sure
-    that Service properties exposed by shill are propagated to Chrome. The API
-    call that is under test is "chrome.networkingPrivate.getProperties".
-
-    This test uses the pseudomodem to mock out cellular, but it can also be
-    extended to other technologies.
-
-    """
-    version = 1
-
-    class SimplePropagationTest(object):
-        """
-        Test class for simple property propagation. This class helps compare
-        a read-only network property and a ModemManager property that have a 1:1
-        correspondence with each other by setting the ModemManager property to
-        a series of values and checking that the UI property value matches
-        the expectation.
-
-        An instance of this class takes in a specially formatted dictionary
-        that enumerates the corresponding ModemManager and UI property values:
-
-          {
-            "properties" : (<MM property>, <UI property>),
-            "values" : [
-                         ( <MM value 1>, <UI value 1> ),
-                         ( <MM value 2>, <UI value 2> ),
-                         ( <MM value 3>, <UI value 3> ),
-                         ...
-                       ]
-          }
-
-        The "properties" key maps to a tuple containing the ModemManager
-        property and the UI property that are under test. The test will go
-        through each of the tuples contained in "values", set the ModemManager
-        property to the first value and check that the UI property takes on
-        the second value.
-
-        UI properties allow "path expansion by '.'" meaning that a UI property
-        will be expanded at the first occurrence of the '.' character to allow
-        for nested dictionaries. For example:
-
-            property: "A.B.C"
-
-          corresponds to a UI property dictionary of the form:
-
-            {
-              ...
-              "A": {
-                     ...
-                     "B.C": value,
-                     ...
-                   },
-              ...
-            }
-
-        This class is called "Simple" because it only applies to properties
-        that are read-only and won't cause significant changes such as a
-        modem reset or service recreation.
-
-        """
-
-        def __init__(self, chrome_testing_context,
-                     property_map,
-                     mm_property_interface,
-                     dbus_type=None,
-                     initial_property_list=None):
-            """
-            @param chrome_testing_context: Instance of
-                    cntc.ChromeNetworkingTestContext.
-            @param property_map: Contains the property mapping that will be
-                    tested as described in the class docstring.
-            @param mm_property_interface: The ModemManager1 D-Bus interface
-                    that the property is listed under.
-            @param initial_property_list: Optional list of tuples containing
-                    ModemManager properties and values that will be assigned
-                    before the comparison checks are done.
-            @param dbus_type: The dbus.types instance that the property should
-                    be converted to, or None if it should be assigned as is.
-
-            """
-            self._chrome = chrome_testing_context
-            self._property_map = property_map
-            self._mm_iface = mm_property_interface
-            self._initial_list = initial_property_list
-            self._dbus_type = dbus_type
-
-
-        def _find_cellular_network(self):
-            """
-            Finds the current cellular network. Asserts that it matches the fake
-            network from pseudomodem and returns the network.
-
-            """
-            networks = self._chrome.find_cellular_networks()
-            if len(networks) != 1:
-                raise error.TestFail(
-                        'Expected 1 cellular network, found ' +
-                        str(len(networks)))
-            network = networks[0]
-            test_utils.simple_network_sanity_check(
-                    network,
-                    pm_constants.DEFAULT_TEST_NETWORK_PREFIX,
-                    self._chrome.CHROME_NETWORK_TYPE_CELLULAR)
-            return network
-
-
-        def compare(self):
-            """
-            Runs the property comparison checks.
-
-            """
-            # Get a modem proxy. This proxy should remain valid throughout the
-            # test.
-            self._modem = mm1_proxy.ModemManager1Proxy.get_proxy().get_modem()
-
-            # Perform the initial property assignments.
-            if self._initial_list:
-                for prop, value in self._initial_list:
-                    logging.info('Assigning initial property (%s, %s)',
-                                 prop, repr(value))
-                    self._modem.iface_properties.Set(self._mm_iface, prop,
-                                                     value)
-
-            # Store the GUID of the fake test network.
-            self._network_guid = self._find_cellular_network()['GUID']
-
-            # Run the checks.
-            mm_prop, ui_prop = self._property_map['properties']
-            logging.info('Testing ModemManager property "%s.%s" against UI '
-                         'property "%s".', self._mm_iface, mm_prop, ui_prop)
-            for mm_value, ui_value in self._property_map['values']:
-                logging.info('Setting ModemManager value to: %s',
-                             repr(mm_value))
-                if self._dbus_type:
-                    mm_value = self._dbus_type(mm_value)
-                self._modem.iface_properties.Set(self._mm_iface, mm_prop,
-                                                 mm_value)
-
-                logging.info('Checking UI property: %s', ui_prop)
-                test_utils.check_ui_property(
-                        self._chrome, self._network_guid,
-                        ui_prop, ui_value, 2)
-
-
-    def _run_once_internal(self):
-        name_prefix = pm_constants.DEFAULT_TEST_NETWORK_PREFIX
-        tests = [ self.SimplePropagationTest(
-                        self._chrome_testing,
-                        { 'properties': ('AccessTechnologies',
-                                         'Cellular.NetworkTechnology'),
-                          'values': [(mm1_constants.
-                                      MM_MODEM_ACCESS_TECHNOLOGY_LTE,
-                                      'LTE'),
-                                     (mm1_constants.
-                                      MM_MODEM_ACCESS_TECHNOLOGY_EVDO0,
-                                      'EVDO'),
-                                     (mm1_constants.
-                                      MM_MODEM_ACCESS_TECHNOLOGY_UMTS,
-                                      'UMTS'),
-                                     (mm1_constants.
-                                      MM_MODEM_ACCESS_TECHNOLOGY_GSM,
-                                      'GSM')]
-                        },
-                        mm1_constants.I_MODEM,
-                        dbus.types.UInt32)
-                ]
-
-        if self._family == '3GPP':
-            tests.extend([
-                self.SimplePropagationTest(
-                    self._chrome_testing,
-                    { 'properties': ('OperatorCode',
-                                     'Cellular.ServingOperator.Code'),
-                      'values': [('001001', '001001'),
-                                 ('001002', '001002'),
-                                 ('001003', '001003'),
-                                 ('001000', '001000')]
-                    },
-                    mm1_constants.I_MODEM_3GPP),
-                self.SimplePropagationTest(
-                    self._chrome_testing,
-                    { 'properties': ('RegistrationState',
-                                     'Cellular.RoamingState'),
-                      'values': [(mm1_constants.
-                                  MM_MODEM_3GPP_REGISTRATION_STATE_ROAMING,
-                                  'Roaming'),
-                                 (mm1_constants.
-                                  MM_MODEM_3GPP_REGISTRATION_STATE_HOME,
-                                  'Home')]
-                    },
-                    mm1_constants.I_MODEM_3GPP,
-                    dbus.types.UInt32)
-            ])
-        elif self._family == 'CDMA':
-            tests.extend([
-                self.SimplePropagationTest(
-                    self._chrome_testing,
-                    { 'properties': ('Sid',
-                                     'Cellular.ServingOperator.Code'),
-                      'values': [(99995, '99995'),
-                                 (99996, '99996'),
-                                 (99997, '99997'),
-                                 (99998, '99998')]
-                    },
-                    mm1_constants.I_MODEM_CDMA,
-                    dbus.types.UInt32),
-                self.SimplePropagationTest(
-                    self._chrome_testing,
-                    { 'properties': ('EvdoRegistrationState',
-                                     'Cellular.RoamingState'),
-                      'values': [(mm1_constants.
-                                  MM_MODEM_CDMA_REGISTRATION_STATE_ROAMING,
-                                  'Roaming'),
-                                 (mm1_constants.
-                                  MM_MODEM_CDMA_REGISTRATION_STATE_HOME,
-                                  'Home')]
-                    },
-                    mm1_constants.I_MODEM_CDMA,
-                    dbus.types.UInt32,
-                    [('EvdoRegistrationState',
-                      dbus.types.UInt32(
-                            mm1_constants.
-                            MM_MODEM_CDMA_REGISTRATION_STATE_HOME)),
-                     ('Cdma1xRegistrationState',
-                      dbus.types.UInt32(
-                            mm1_constants.
-                            MM_MODEM_CDMA_REGISTRATION_STATE_UNKNOWN))
-                    ]),
-                self.SimplePropagationTest(
-                    self._chrome_testing,
-                    { 'properties': ('Cdma1xRegistrationState',
-                                     'Cellular.RoamingState'),
-                      'values': [(mm1_constants.
-                                  MM_MODEM_CDMA_REGISTRATION_STATE_ROAMING,
-                                  'Roaming'),
-                                 (mm1_constants.
-                                  MM_MODEM_CDMA_REGISTRATION_STATE_HOME,
-                                  'Home')]
-                    },
-                    mm1_constants.I_MODEM_CDMA,
-                    dbus.types.UInt32,
-                    [('Cdma1xRegistrationState',
-                      dbus.types.UInt32(
-                            mm1_constants.
-                            MM_MODEM_CDMA_REGISTRATION_STATE_HOME)),
-                     ('EvdoRegistrationState',
-                      dbus.types.UInt32(
-                            mm1_constants.
-                            MM_MODEM_CDMA_REGISTRATION_STATE_UNKNOWN))
-                    ])
-            ])
-        for test in tests:
-            test.compare()
-
-
-    def run_once(self, family):
-        test_env = test_environment.CellularPseudoMMTestEnvironment(
-                pseudomm_args=({'family': family},))
-        self._chrome_testing = cntc.ChromeNetworkingTestContext()
-        with test_env, self._chrome_testing:
-            self._family = family
-            self._run_once_internal()
diff --git a/client/site_tests/network_ChromeCellularSmokeTest/control b/client/site_tests/network_ChromeCellularSmokeTest/control
deleted file mode 100644
index bc08d8d..0000000
--- a/client/site_tests/network_ChromeCellularSmokeTest/control
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "network_ChromeCellularSmokeTest"
-PURPOSE = "Verify that chrome.networkingPrivate can connect to the network"
-CRITERIA = """
-  The test will fail if the Chrome fails to connect to the network or if shill
-  or the pseudomodem are not left in a working state.
-"""
-ATTRIBUTES = "suite:network_ui"
-TIME = "FAST"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-DOC = """
-  Tests that Chrome can bring the network to a connected state and effectively
-  access the internet through the cellular network. The test repeats a
-  connect/disconnect sequence several times and makes sure that Chrome can
-  always connect to the network via chrome.networkingPrivate.
-"""
-
-job.run_test('network_ChromeCellularSmokeTest', family='3GPP', tag='3GPP')
-job.run_test('network_ChromeCellularSmokeTest', family='CDMA', tag='CDMA')
-
diff --git a/client/site_tests/network_ChromeCellularSmokeTest/network_ChromeCellularSmokeTest.py b/client/site_tests/network_ChromeCellularSmokeTest/network_ChromeCellularSmokeTest.py
deleted file mode 100644
index d8726ed..0000000
--- a/client/site_tests/network_ChromeCellularSmokeTest/network_ChromeCellularSmokeTest.py
+++ /dev/null
@@ -1,123 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.cellular import mm1_constants
-from autotest_lib.client.cros.cellular import test_environment
-from autotest_lib.client.cros.cellular.pseudomodem import pm_constants
-from autotest_lib.client.cros.networking import pm_proxy
-from autotest_lib.client.cros.networking.chrome_testing \
-        import chrome_networking_test_context as cntc
-from autotest_lib.client.cros.networking.chrome_testing import test_utils
-
-class network_ChromeCellularSmokeTest(test.test):
-    """
-    Tests that Chrome can bring the network to a connected state and effectively
-    access the internet through the cellular network. The test repeats a
-    connect/disconnect sequence several times and makes sure that Chrome can
-    always connect to the network via chrome.networkingPrivate.
-
-    """
-    version = 1
-
-    CONNECT_COUNT = 5
-
-    def _setup_modem_proxy(self):
-        pseudomm = pm_proxy.PseudoMMProxy.get_proxy()
-        self._modem = pseudomm.get_modem()
-
-
-    def _get_modem_state(self):
-        props = self._modem.properties(mm1_constants.I_MODEM)
-        return props[mm1_constants.MM_MODEM_PROPERTY_NAME_STATE]
-
-
-    def _get_cellular_network(self):
-        networks = self._chrome_testing.find_cellular_networks()
-        if len(networks) != 1:
-            raise error.TestFail(
-                    'Expected 1 cellular network, found ' + str(len(networks)))
-        network = networks[0]
-        test_utils.simple_network_sanity_check(
-                network, pm_constants.DEFAULT_TEST_NETWORK_PREFIX,
-                self._chrome_testing.CHROME_NETWORK_TYPE_CELLULAR)
-        return network
-
-
-    def _assert_modem_state(self, expected_state):
-        modem_state = self._get_modem_state()
-        if modem_state != expected_state:
-            raise error.TestFail(
-                    'Expected modem state to be "' +
-                    mm1_constants.ModemStateToString(expected_state) +
-                    '", found: ' +
-                    mm1_constants.ModemStateToString(modem_state))
-
-
-    def _ensure_network_status(self, network_id, status, timeout):
-        test_utils.check_ui_property(
-                self._chrome_testing, network_id, 'ConnectionState', status)
-
-
-    def _disconnect_cellular_network(self):
-        # Make sure that the network becomes disconnected.
-        network_id = self._network['GUID']
-        logging.info('Disconnecting from network: ' + network_id)
-        call_status = self._chrome_testing.call_test_function(
-                test_utils.LONG_TIMEOUT,
-                'disconnectFromNetwork',
-                '"' + network_id + '"')
-        logging.info('Checking that the network is disconnected.')
-        self._ensure_network_status(
-                network_id, 'NotConnected', test_utils.LONG_TIMEOUT)
-        logging.info('The network is disconnected. Checking that the modem is '
-                     'in the REGISTERED state.')
-        self._assert_modem_state(mm1_constants.MM_MODEM_STATE_REGISTERED)
-        logging.info('Modem is disconnected. Disconnect was successful.')
-
-
-    def _connect_cellular_network(self):
-        # Make sure that the network becomes connected.
-        network_id = self._network['GUID']
-        logging.info('Connecting to network: ' + network_id)
-        call_status = self._chrome_testing.call_test_function(
-                test_utils.LONG_TIMEOUT,
-                'connectToNetwork',
-                '"' + network_id + '"')
-        logging.info('Checking that the network is connected.')
-        self._ensure_network_status(
-                network_id, 'Connected', test_utils.LONG_TIMEOUT)
-        logging.info('The network is connected. Checking that the modem is in '
-                     'the CONNECTED state.')
-        self._assert_modem_state(mm1_constants.MM_MODEM_STATE_CONNECTED)
-        logging.info('Modem is connected. Connect was successful.')
-
-
-    def _run_once_internal(self):
-        # Set up a ModemManager proxy to use to verify the modem state.
-        self._setup_modem_proxy()
-
-        # Make sure that there is a single cellular network and it matches
-        # the data from pseudomm.
-        self._network = self._get_cellular_network()
-
-        # Disconnect from the network before doing any operations.
-        self._disconnect_cellular_network()
-
-        logging.info('Starting connect/disconnect sequence.')
-        for _ in xrange(self.CONNECT_COUNT):
-            self._connect_cellular_network()
-            self._disconnect_cellular_network()
-
-
-    def run_once(self, family):
-        test_env = test_environment.CellularPseudoMMTestEnvironment(
-                pseudomm_args=({'family': family},))
-        testing_context = cntc.ChromeNetworkingTestContext()
-        with test_env, testing_context:
-            self._chrome_testing = testing_context
-            self._run_once_internal()
diff --git a/client/site_tests/network_ChromeWifiConfigure/control b/client/site_tests/network_ChromeWifiConfigure/control
index be7cdb3..f1ac37d 100644
--- a/client/site_tests/network_ChromeWifiConfigure/control
+++ b/client/site_tests/network_ChromeWifiConfigure/control
@@ -14,6 +14,8 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "network"
 TEST_TYPE = "client"
+PY_VERSION = 3
+
 DOC = """
   Tests that Chrome can do the following using chrome.networkingPrivate:
   * Configure a new wifi network with Security = psk.
diff --git a/client/site_tests/network_ChromeWifiConfigure/network_ChromeWifiConfigure.py b/client/site_tests/network_ChromeWifiConfigure/network_ChromeWifiConfigure.py
index 2e43171..ebb0bdb 100644
--- a/client/site_tests/network_ChromeWifiConfigure/network_ChromeWifiConfigure.py
+++ b/client/site_tests/network_ChromeWifiConfigure/network_ChromeWifiConfigure.py
@@ -1,4 +1,5 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Lint as: python2, python3
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
diff --git a/client/site_tests/network_ChromeWifiEndToEnd/control b/client/site_tests/network_ChromeWifiEndToEnd/control
index 0a2bc0a..4034fb4 100644
--- a/client/site_tests/network_ChromeWifiEndToEnd/control
+++ b/client/site_tests/network_ChromeWifiEndToEnd/control
@@ -6,6 +6,7 @@
 NAME = 'network_ChromeWifiEndToEnd'
 TIME = 'FAST'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
   Client side of the end to end WiFi test which is called by the
diff --git a/client/site_tests/network_ChromeWifiEndToEnd/network_ChromeWifiEndToEnd.py b/client/site_tests/network_ChromeWifiEndToEnd/network_ChromeWifiEndToEnd.py
index b9c1b7f..218212c 100644
--- a/client/site_tests/network_ChromeWifiEndToEnd/network_ChromeWifiEndToEnd.py
+++ b/client/site_tests/network_ChromeWifiEndToEnd/network_ChromeWifiEndToEnd.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/network_DhcpBrokenDefaultGateway/control b/client/site_tests/network_DhcpBrokenDefaultGateway/control
deleted file mode 100644
index c34e9bb..0000000
--- a/client/site_tests/network_DhcpBrokenDefaultGateway/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'pstew, quiche'
-NAME = 'network_DhcpBrokenDefaultGateway'
-TIME = 'SHORT'
-TEST_TYPE = 'client'
-
-DOC = """
-  Tests that we can negotiate a lease on an IPv4 address via DHCP from
-  a server that provides a broken default gateway option.  Specifically,
-  the gateway provided is outside the broadcast domain specified by the
-  client IP address and prefix.  This test verifies that the client
-  mitigates this issue by creating a link-scoped route to the gateway,
-  or via some other method which allows the gateway route to be
-  successfully added.
-
-"""
-
-job.run_test('network_DhcpBrokenDefaultGateway')
diff --git a/client/site_tests/network_DhcpBrokenDefaultGateway/network_DhcpBrokenDefaultGateway.py b/client/site_tests/network_DhcpBrokenDefaultGateway/network_DhcpBrokenDefaultGateway.py
deleted file mode 100644
index 1c87580..0000000
--- a/client/site_tests/network_DhcpBrokenDefaultGateway/network_DhcpBrokenDefaultGateway.py
+++ /dev/null
@@ -1,102 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import interface
-from autotest_lib.client.cros import dhcp_handling_rule
-from autotest_lib.client.cros import dhcp_packet
-from autotest_lib.client.cros import dhcp_test_base
-from autotest_lib.client.cros.networking import shill_proxy
-
-# Length of time the lease from the DHCP server is valid.
-LEASE_TIME_SECONDS = 60
-# We'll fill in the subnet and give this address to the client.
-INTENDED_IP_SUFFIX = '0.0.0.101'
-# We should be able to complete a DHCP negotiation in this amount of time.
-DHCP_NEGOTIATION_TIMEOUT_SECONDS = 10
-
-class network_DhcpBrokenDefaultGateway(dhcp_test_base.DhcpTestBase):
-    """Test application of broken gateway route from DHCP server."""
-
-    def check_shill_gateway_setup(self, interface_name, gateway_ip):
-        """Check that the ipconfig in the client shows the gateway IP.
-
-        @param interface_name string client network interface name.
-        @param gateway_ip string expected gateway IP address.
-
-        """
-        proxy = shill_proxy.ShillProxy()
-        device = proxy.find_object('Device', {'Name': interface_name})
-        if device is None:
-            raise error.TestFail('Device was not found.')
-        device_properties = device.GetProperties(utf8_strings=True)
-        ipconfig_path = device_properties['IPConfigs'][0]
-        ipconfig = proxy.get_dbus_object('org.chromium.flimflam.IPConfig',
-                                         ipconfig_path)
-        ipconfig_properties = ipconfig.GetProperties(utf8_strings=True)
-        ipconfig_gateway = ipconfig_properties['Gateway']
-        if ipconfig_gateway != gateway_ip:
-            raise error.TestFail('Shill gateway %s does '
-                                 'not match expected %s.' %
-                                 (ipconfig_gateway, gateway_ip))
-
-
-    def check_routing_table_gateway_setup(self, interface_name, gateway_ip):
-        """Check that the routing table in the client shows the gateway IP.
-
-        @param interface_name string client network interface name.
-        @param gateway_ip string expected gateway IP address.
-
-        """
-        default_route = interface.get_prioritized_default_route(
-            host=None, interface_name_regex=interface_name)
-        if not default_route:
-            raise error.TestFail('No default route found.')
-        if default_route.gateway != gateway_ip:
-            raise error.TestFail('Routing table gateway %s does '
-                                 'not match expected %s.' %
-                                 (default_route.gateway, gateway_ip))
-
-
-    def test_body(self):
-        """Main body of the test."""
-        subnet_mask = self.ethernet_pair.interface_subnet_mask
-        intended_ip = dhcp_test_base.DhcpTestBase.rewrite_ip_suffix(
-                subnet_mask,
-                self.server_ip,
-                INTENDED_IP_SUFFIX)
-        # Pick an address that's unlikely to be in the broadcast domain of the
-        # virtual network pair.
-        gateway_ip = "10.11.12.13"
-        # Two real name servers, and a bogus one to be unpredictable.
-        dns_servers = ['8.8.8.8', '8.8.4.4', '192.168.87.88']
-        vendor_options = 'ANDROID_METERED'
-        # This is the pool of information the server will give out to the client
-        # upon request.
-        dhcp_options = {
-                dhcp_packet.OPTION_SERVER_ID : self.server_ip,
-                dhcp_packet.OPTION_SUBNET_MASK : subnet_mask,
-                dhcp_packet.OPTION_IP_LEASE_TIME : LEASE_TIME_SECONDS,
-                dhcp_packet.OPTION_REQUESTED_IP : intended_ip,
-                dhcp_packet.OPTION_DNS_SERVERS : dns_servers,
-                dhcp_packet.OPTION_ROUTERS : [ gateway_ip ],
-                }
-        rules = [
-                dhcp_handling_rule.DhcpHandlingRule_RespondToDiscovery(
-                        intended_ip, self.server_ip, dhcp_options, {}),
-                dhcp_handling_rule.DhcpHandlingRule_RespondToRequest(
-                        intended_ip, self.server_ip, dhcp_options, {})
-                ]
-        rules[-1].is_final_handler = True
-
-        self.server.start_test(rules, DHCP_NEGOTIATION_TIMEOUT_SECONDS)
-        self.server.wait_for_test_to_finish()
-        if not self.server.last_test_passed:
-            raise error.TestFail('Test server didn\'t get all the messages it '
-                                 'was told to expect during negotiation.')
-
-        self.wait_for_dhcp_propagation()
-        interface_name = self.ethernet_pair.peer_interface_name
-        self.check_shill_gateway_setup(interface_name, gateway_ip)
-        self.check_routing_table_gateway_setup(interface_name, gateway_ip)
diff --git a/client/site_tests/network_DhcpClasslessStaticRoute/control b/client/site_tests/network_DhcpClasslessStaticRoute/control
index 66b066e..585d91c 100644
--- a/client/site_tests/network_DhcpClasslessStaticRoute/control
+++ b/client/site_tests/network_DhcpClasslessStaticRoute/control
@@ -7,6 +7,7 @@
 ATTRIBUTES = "suite:network_nightly"
 TIME = 'SHORT'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
   Tests that we can negotiate a lease on an IPv4 address via DHCP.
diff --git a/client/site_tests/network_DhcpFQDN/control b/client/site_tests/network_DhcpFQDN/control
deleted file mode 100644
index 4d6ef1f..0000000
--- a/client/site_tests/network_DhcpFQDN/control
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'pstew, quiche, wiley'
-NAME = 'network_DhcpFQDN'
-TIME = 'SHORT'
-TEST_TYPE = 'client'
-
-DOC = """
-  Tests that DHCP option 81 (Fully Qualified Domain Name) is successfully
-  accepted.  It was observed that this caused a crash in dhcpcd in some
-  releases.  This test ensures that the DHCP client does not regress to
-  repeat this failure by ensuring that the DHCP client succeeds long enough
-  to provide an IPConfig back to shill.
-
-"""
-
-job.run_test('network_DhcpFQDN')
diff --git a/client/site_tests/network_DhcpFQDN/network_DhcpFQDN.py b/client/site_tests/network_DhcpFQDN/network_DhcpFQDN.py
deleted file mode 100644
index 65a70f7..0000000
--- a/client/site_tests/network_DhcpFQDN/network_DhcpFQDN.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import interface
-from autotest_lib.client.cros import dhcp_handling_rule
-from autotest_lib.client.cros import dhcp_packet
-from autotest_lib.client.cros import dhcp_test_base
-from autotest_lib.client.cros.networking import shill_proxy
-
-# Length of time the lease from the DHCP server is valid.
-LEASE_TIME_SECONDS = 60
-# We'll fill in the subnet and give this address to the client.
-INTENDED_IP_SUFFIX = "0.0.0.101"
-# We should be able to complete a DHCP negotiation in this amount of time.
-DHCP_NEGOTIATION_TIMEOUT_SECONDS = 10
-
-class network_DhcpFQDN(dhcp_test_base.DhcpTestBase):
-    """Test implemenation of client completing negotiation with FQDN flag."""
-
-    def test_body(self):
-        """Main body of the test."""
-        subnet_mask = self.ethernet_pair.interface_subnet_mask
-        intended_ip = dhcp_test_base.DhcpTestBase.rewrite_ip_suffix(
-                subnet_mask,
-                self.server_ip,
-                INTENDED_IP_SUFFIX)
-        # It doesn't matter what is contained in this option value, except that
-        # the DHCP client does not crash decoding it or passing its
-        # interpretation of it back to shill.
-        fqdn_option = '\x03\xff\x00'
-        # This is the pool of information the server will give out to the client
-        # upon request.
-        dhcp_options = {
-                dhcp_packet.OPTION_SERVER_ID : self.server_ip,
-                dhcp_packet.OPTION_SUBNET_MASK : subnet_mask,
-                dhcp_packet.OPTION_IP_LEASE_TIME : LEASE_TIME_SECONDS,
-                dhcp_packet.OPTION_REQUESTED_IP : intended_ip,
-                dhcp_packet.OPTION_FULLY_QUALIFIED_DOMAIN_NAME : fqdn_option
-                }
-        rules = [
-                dhcp_handling_rule.DhcpHandlingRule_RespondToDiscovery(
-                        intended_ip, self.server_ip, dhcp_options, {}),
-                dhcp_handling_rule.DhcpHandlingRule_RespondToRequest(
-                        intended_ip, self.server_ip, dhcp_options, {})
-                ]
-        rules[-1].is_final_handler = True
-
-        # In some DHCP server implementations, the FQDN option is provided in
-        # the DHCP ACK response without the client requesting it.
-        rules[-1].force_reply_options = [
-                dhcp_packet.OPTION_FULLY_QUALIFIED_DOMAIN_NAME ]
-
-        self.server.start_test(rules, DHCP_NEGOTIATION_TIMEOUT_SECONDS)
-        self.server.wait_for_test_to_finish()
-        if not self.server.last_test_passed:
-            raise error.TestFail('Test server didn\'t get all the messages it '
-                                 'was told to expect during negotiation.')
-
-        # This test passes if the DHCP client lives long enough to send
-        # the network configuration to shill.
-        self.wait_for_dhcp_propagation()
-        self.check_dhcp_config(dhcp_options)
diff --git a/client/site_tests/network_DhcpFailureWithStaticIP/control b/client/site_tests/network_DhcpFailureWithStaticIP/control
deleted file mode 100644
index 2344c40..0000000
--- a/client/site_tests/network_DhcpFailureWithStaticIP/control
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'pstew, quiche, wiley'
-NAME = 'network_DhcpFailureWithStaticIP'
-TIME = 'SHORT'
-TEST_TYPE = 'client'
-
-DOC = """
-  Tests that it is possible to apply static IP configuration to a network
-  that has no DHCP server at all.
-
-  Note that shill's DHCP timeout handling depends on the device technology.
-  This test only vaidates shill's handling of DHCP timeouts for Ethernet.
-"""
-
-job.run_test('network_DhcpFailureWithStaticIP')
diff --git a/client/site_tests/network_DhcpFailureWithStaticIP/network_DhcpFailureWithStaticIP.py b/client/site_tests/network_DhcpFailureWithStaticIP/network_DhcpFailureWithStaticIP.py
deleted file mode 100644
index 454b431..0000000
--- a/client/site_tests/network_DhcpFailureWithStaticIP/network_DhcpFailureWithStaticIP.py
+++ /dev/null
@@ -1,83 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import dbus
-import logging
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import dhcp_test_base
-
-class network_DhcpFailureWithStaticIP(dhcp_test_base.DhcpTestBase):
-    """The DHCP Negotiation Timeout class.
-
-    Sets up a virtual ethernet pair, and stops the DHCP server on the
-    pair.  Static IP parameters are configured on the interface using
-    shill's StaticIP configuration.  Ensure that these parameters are
-    immediately applied to the ipconfig.
-
-    After the DHCP timeout interval, check to make sure that the same
-    IP config remains applied.
-
-    """
-    SHILL_DHCP_TIMEOUT_SECONDS = 30
-
-
-    def check_static_ip_config(self, ipconfig, static_ip_address, name_servers):
-        """Checks that the static IP configuration is applied to the
-        interface ipconfig.
-
-        @param ipconfig object representing the DBus IPConfig entity to check.
-        @param static_ip_address string IP address we expect to be configured.
-        @param name_servers list of string name servers we expect to be
-                configured on the interface.
-
-        """
-        ipconfig_properties = self.shill_proxy.dbus2primitive(
-                ipconfig.GetProperties(utf8_strings=True))
-
-        logging.info('IPConfig properties are %r', ipconfig_properties)
-        if static_ip_address != ipconfig_properties['Address']:
-            raise error.TestFail('Expected address %r but got %r' %
-                                 (static_ip_address,
-                                  ipconfig_properties['Address']))
-
-        if name_servers != ipconfig_properties['NameServers']:
-            raise error.TestFail('Expected name servers %r but got %r' %
-                                 (name_servers,
-                                  ipconfig_properties['NameServers']))
-
-
-    def get_ipconfig(self):
-        """Returns the first IPConfig object associated with the peer device."""
-        ipconfig_objects = (
-                self.get_interface_ipconfig_objects(
-                        self.ethernet_pair.peer_interface_name))
-        if len(ipconfig_objects) == 0:
-            raise error.TestFail('Failed to retrieve DHCP ipconfig object '
-                                 'from shill.')
-        return ipconfig_objects[0]
-
-
-    def test_body(self):
-        """Test main loop."""
-        self.server.stop()
-        service = self.find_ethernet_service(
-                self.ethernet_pair.peer_interface_name)
-
-        static_ip_address = '192.168.1.101'
-        name_servers = [ '10.10.10.10', '10.10.11.11' ]
-        config = {'Address' : static_ip_address,
-                  'Prefixlen' : 23,
-                  'NameServers' : name_servers}
-        service.SetProperty(self.shill_proxy.SERVICE_PROPERTY_STATIC_IP_CONFIG,
-                            dbus.Dictionary(config, signature='sv'))
-
-        ipconfig = self.get_ipconfig()
-        self.check_static_ip_config(ipconfig, static_ip_address, name_servers)
-
-        # Make sure configuration is still correct after DHCP timeout.
-        time.sleep(self.SHILL_DHCP_TIMEOUT_SECONDS + 2)
-        ipconfig = self.get_ipconfig()
-        self.check_static_ip_config(ipconfig, static_ip_address, name_servers)
diff --git a/client/site_tests/network_DhcpMTU/control b/client/site_tests/network_DhcpMTU/control
deleted file mode 100644
index 34d9061..0000000
--- a/client/site_tests/network_DhcpMTU/control
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'pstew, quiche, wiley'
-NAME = 'network_DhcpMTU'
-TIME = 'SHORT'
-TEST_TYPE = 'client'
-
-DOC = """
-  Tests that MTU parameters are successfully configured on the interface.
-  This test fails if the MTU parameter doesn't appear in the IPConfig, or
-  if the interface MTU is not set to the value negotiated.
-
-"""
-
-job.run_test('network_DhcpMTU')
diff --git a/client/site_tests/network_DhcpMTU/network_DhcpMTU.py b/client/site_tests/network_DhcpMTU/network_DhcpMTU.py
deleted file mode 100644
index 0748268e..0000000
--- a/client/site_tests/network_DhcpMTU/network_DhcpMTU.py
+++ /dev/null
@@ -1,85 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import interface
-from autotest_lib.client.cros import dhcp_handling_rule
-from autotest_lib.client.cros import dhcp_packet
-from autotest_lib.client.cros import dhcp_test_base
-from autotest_lib.client.cros.networking import shill_proxy
-
-# Length of time the lease from the DHCP server is valid.
-LEASE_TIME_SECONDS = 60
-# We'll fill in the subnet and give this address to the client.
-INTENDED_IP_SUFFIX = "0.0.0.101"
-# We should be able to complete a DHCP negotiation in this amount of time.
-DHCP_NEGOTIATION_TIMEOUT_SECONDS = 10
-
-class network_DhcpMTU(dhcp_test_base.DhcpTestBase):
-    """Test implemenation of MTU including confirming the interface state."""
-
-    def check_mtu_config(self, mtu):
-        """Check that the ipconfig and interface in the client has correct MTU.
-
-        @param mtu int expected MTU value.
-
-        """
-        proxy = shill_proxy.ShillProxy()
-        device = proxy.find_object(
-                'Device',
-                {'Name': self.ethernet_pair.peer_interface_name})
-        if device is None:
-            raise error.TestFail('Device was not found.')
-        device_properties = device.GetProperties(utf8_strings=True)
-        ipconfig_path = device_properties['IPConfigs'][0]
-        ipconfig = proxy.get_dbus_object('org.chromium.flimflam.IPConfig',
-                                         ipconfig_path)
-        ipconfig_properties = ipconfig.GetProperties(utf8_strings=True)
-        ipconfig_mtu = ipconfig_properties['Mtu']
-        if ipconfig_mtu != mtu:
-            raise error.TestFail('Shill MTU %d does not match expected %d.' %
-                                 (ipconfig_mtu, mtu))
-
-        interface_mtu = interface.Interface(
-                self.ethernet_pair.peer_interface_name).mtu
-        if interface_mtu != mtu:
-            raise error.TestFail('Interface MTU %d does not match '
-                                 'expected %d.' % (interface_mtu, ipconfig_mtu))
-
-    def test_body(self):
-        """Main body of the test."""
-        subnet_mask = self.ethernet_pair.interface_subnet_mask
-        intended_ip = dhcp_test_base.DhcpTestBase.rewrite_ip_suffix(
-                subnet_mask,
-                self.server_ip,
-                INTENDED_IP_SUFFIX)
-        # Two real name servers, and a bogus one to be unpredictable.
-        dns_servers = ["8.8.8.8", "8.8.4.4", "192.168.87.88"]
-        interface_mtu = 1234
-        # This is the pool of information the server will give out to the client
-        # upon request.
-        dhcp_options = {
-                dhcp_packet.OPTION_SERVER_ID : self.server_ip,
-                dhcp_packet.OPTION_SUBNET_MASK : subnet_mask,
-                dhcp_packet.OPTION_IP_LEASE_TIME : LEASE_TIME_SECONDS,
-                dhcp_packet.OPTION_REQUESTED_IP : intended_ip,
-                dhcp_packet.OPTION_DNS_SERVERS : dns_servers,
-                dhcp_packet.OPTION_INTERFACE_MTU : interface_mtu
-                }
-        rules = [
-                dhcp_handling_rule.DhcpHandlingRule_RespondToDiscovery(
-                        intended_ip, self.server_ip, dhcp_options, {}),
-                dhcp_handling_rule.DhcpHandlingRule_RespondToRequest(
-                        intended_ip, self.server_ip, dhcp_options, {})
-                ]
-        rules[-1].is_final_handler = True
-        self.server.start_test(rules, DHCP_NEGOTIATION_TIMEOUT_SECONDS)
-        self.server.wait_for_test_to_finish()
-        if not self.server.last_test_passed:
-            raise error.TestFail("Test server didn't get all the messages it "
-                                 "was told to expect during negotiation.")
-
-        self.wait_for_dhcp_propagation()
-        self.check_mtu_config(interface_mtu)
diff --git a/client/site_tests/network_DhcpNak/control b/client/site_tests/network_DhcpNak/control
index 8527355..0eb1a9f 100644
--- a/client/site_tests/network_DhcpNak/control
+++ b/client/site_tests/network_DhcpNak/control
@@ -7,6 +7,7 @@
 ATTRIBUTES = "suite:network_nightly"
 TIME = 'SHORT'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
   Tests handling of DHCP NAK messages. In particular, tests that
diff --git a/client/site_tests/network_DhcpNegotiationSuccess/control b/client/site_tests/network_DhcpNegotiationSuccess/control
index d5ab1e3..96ecfb9 100644
--- a/client/site_tests/network_DhcpNegotiationSuccess/control
+++ b/client/site_tests/network_DhcpNegotiationSuccess/control
@@ -7,6 +7,7 @@
 ATTRIBUTES = "suite:network_nightly"
 TIME = 'SHORT'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
   Tests that we can negotiate a lease on an IPv4 address via DHCP.  This test
diff --git a/client/site_tests/network_DhcpNegotiationTimeout/control b/client/site_tests/network_DhcpNegotiationTimeout/control
index 641e8ec..f043c43 100644
--- a/client/site_tests/network_DhcpNegotiationTimeout/control
+++ b/client/site_tests/network_DhcpNegotiationTimeout/control
@@ -7,6 +7,7 @@
 ATTRIBUTES = "suite:network_nightly"
 TIME = 'SHORT'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
   Tests that shill handles DHCP timeout in a reasonable way. In particular,
diff --git a/client/site_tests/network_DhcpNonAsciiParameter/control b/client/site_tests/network_DhcpNonAsciiParameter/control
index 4bacd43..fae2452 100644
--- a/client/site_tests/network_DhcpNonAsciiParameter/control
+++ b/client/site_tests/network_DhcpNonAsciiParameter/control
@@ -7,6 +7,7 @@
 ATTRIBUTES = "suite:network_nightly"
 TIME = 'SHORT'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
   Tests that we can negotiate a lease on an IPv4 address via DHCP.
diff --git a/client/site_tests/network_DhcpNonAsciiParameter/network_DhcpNonAsciiParameter.py b/client/site_tests/network_DhcpNonAsciiParameter/network_DhcpNonAsciiParameter.py
index 4fe11ab..ebe7139 100644
--- a/client/site_tests/network_DhcpNonAsciiParameter/network_DhcpNonAsciiParameter.py
+++ b/client/site_tests/network_DhcpNonAsciiParameter/network_DhcpNonAsciiParameter.py
@@ -26,7 +26,7 @@
                 "zircon.encrusted.tweezers.google.com",
                 ]
         # Set a server name that is invalid as ASCII or UTF-8.
-        server_name = "\xff"
+        server_name = b"\xff"
         # This is the pool of information the server will give out to the client
         # upon request.
         dhcp_options = {
diff --git a/client/site_tests/network_DhcpRenew/control b/client/site_tests/network_DhcpRenew/control
index ea2b942..6413b2c 100644
--- a/client/site_tests/network_DhcpRenew/control
+++ b/client/site_tests/network_DhcpRenew/control
@@ -7,6 +7,7 @@
 ATTRIBUTES = "suite:network_nightly"
 TIME = 'SHORT'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
   Tests that shill can renew leases through dhcpcd, and deconfigures an
diff --git a/client/site_tests/network_DhcpRenew/network_DhcpRenew.py b/client/site_tests/network_DhcpRenew/network_DhcpRenew.py
index f31248e..2a70a1b 100644
--- a/client/site_tests/network_DhcpRenew/network_DhcpRenew.py
+++ b/client/site_tests/network_DhcpRenew/network_DhcpRenew.py
@@ -61,20 +61,24 @@
         lease_start_time = time.time()
         t1_deadline = lease_start_time + LEASE_T1_TIME
         t2_deadline = lease_start_time + LEASE_T2_TIME
+        # DHCP standard forbids to include "server ID" and "requested IP"
+        # options during RENEW/REBIND (T1/T2 tiemouts)
+        dhcp_opts_rr = dhcp_options.copy()
+        del dhcp_opts_rr[dhcp_packet.OPTION_SERVER_ID]
+        del dhcp_opts_rr[dhcp_packet.OPTION_REQUESTED_IP]
         # Ignore the T1 deadline packet.
         t1_handler = dhcp_handling_rule.DhcpHandlingRule_RespondToRequest(
                 intended_ip,
                 self.server_ip,
-                dhcp_options,
-                {},
-                should_respond=False)
+                dhcp_opts_rr, {},
+                should_respond=False,
+                expect_server_ip_set=False)
         t1_handler.target_time_seconds = t1_deadline
         t1_handler.allowable_time_delta_seconds = RENEWAL_TIME_DELTA_SECONDS
         t2_handler = dhcp_handling_rule.DhcpHandlingRule_RespondToPostT2Request(
                 intended_ip,
                 self.server_ip,
-                dhcp_options,
-                {},
+                dhcp_opts_rr, {},
                 should_respond=False)
         t2_handler.target_time_seconds = t2_deadline
         t2_handler.allowable_time_delta_seconds = RENEWAL_TIME_DELTA_SECONDS
diff --git a/client/site_tests/network_DhcpRenewWithOptionSubset/control b/client/site_tests/network_DhcpRenewWithOptionSubset/control
deleted file mode 100644
index a9105f7..0000000
--- a/client/site_tests/network_DhcpRenewWithOptionSubset/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_DhcpRenewWithOptionSubset'
-TIME = 'SHORT'
-TEST_TYPE = 'client'
-
-DOC = """
-  Tests that shill can renew leases through dhcpcd.  In particular,
-  this test replies to a DHCPREQUEST method with a subset of the
-  DHCP parameters it sent in the original DHCPOFFER, and observe
-  whether the client can retain those options itself.
-
-  This test fails if shill fails to renew the lease, or if the
-  parameters in the resulting ifconfig do not retain the values from
-  the DHCPOFFER.
-"""
-
-job.run_test('network_DhcpRenewWithOptionSubset')
diff --git a/client/site_tests/network_DhcpRenewWithOptionSubset/network_DhcpRenewWithOptionSubset.py b/client/site_tests/network_DhcpRenewWithOptionSubset/network_DhcpRenewWithOptionSubset.py
deleted file mode 100644
index df84bb8..0000000
--- a/client/site_tests/network_DhcpRenewWithOptionSubset/network_DhcpRenewWithOptionSubset.py
+++ /dev/null
@@ -1,81 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import dhcp_handling_rule
-from autotest_lib.client.cros import dhcp_packet
-from autotest_lib.client.cros import dhcp_test_base
-
-# dhcpcd has a 20 second minimal accepted lease time
-LEASE_TIME_SECONDS = 20
-# We'll fill in the subnet and give this address to the client.
-INTENDED_IP_SUFFIX = "0.0.0.101"
-
-class network_DhcpRenewWithOptionSubset(dhcp_test_base.DhcpTestBase):
-    """Tests DHCP renewal process in the connection manager."""
-    def test_body(self):
-        subnet_mask = self.ethernet_pair.interface_subnet_mask
-        intended_ip = dhcp_test_base.DhcpTestBase.rewrite_ip_suffix(
-                subnet_mask,
-                self.server_ip,
-                INTENDED_IP_SUFFIX)
-        # Two real name servers, and a bogus one to be unpredictable.
-        dns_servers = ["8.8.8.8", "8.8.4.4", "192.168.87.88"]
-        domain_name = "corp.google.com"
-        dns_search_list = [
-                "corgie.google.com",
-                "lies.google.com",
-                "that.is.a.tasty.burger.google.com",
-                ]
-        # This is the pool of information the server will give out to the client
-        # upon request.
-        minimal_options = {
-                dhcp_packet.OPTION_SERVER_ID : self.server_ip,
-                dhcp_packet.OPTION_SUBNET_MASK : subnet_mask,
-                dhcp_packet.OPTION_IP_LEASE_TIME : LEASE_TIME_SECONDS,
-                dhcp_packet.OPTION_REQUESTED_IP : intended_ip,
-                dhcp_packet.OPTION_DNS_SERVERS : dns_servers,
-        }
-        dhcp_options = minimal_options.copy()
-        dhcp_options.update({
-                dhcp_packet.OPTION_DOMAIN_NAME : domain_name,
-                dhcp_packet.OPTION_DNS_DOMAIN_SEARCH_LIST : dns_search_list,
-                })
-        self.negotiate_and_check_lease(dhcp_options)
-
-        # At renewal time, respond without the search list, and with a
-        # different domain name from the original lease.
-        changed_options = {
-                dhcp_packet.OPTION_DOMAIN_NAME : "mail.google.com",
-        }
-        renew_options = minimal_options.copy()
-        renew_options.update(changed_options)
-        rules = [
-                dhcp_handling_rule.DhcpHandlingRule_RespondToRequest(
-                        intended_ip,
-                        self.server_ip,
-                        renew_options,
-                        {},
-                        should_respond=True,
-                        # Per RFC-2131, the server identifier must be false
-                        # during REBOOT.
-                        expect_server_ip_set=False)
-                ]
-        rules[-1].is_final_handler = True
-        self.server.start_test(
-                rules, dhcp_test_base.DHCP_NEGOTIATION_TIMEOUT_SECONDS)
-
-        # Trigger lease renewal on the client.
-        interface_name = self.ethernet_pair.peer_interface_name
-        self.get_device(interface_name).RenewDHCPLease()
-
-        self.server.wait_for_test_to_finish()
-        if not self.server.last_test_passed:
-            raise error.TestFail("Test server didn't get a renewal request.")
-
-        # Check to make sure the system retained the search list from the
-        # initial lease, but also has the domain name from the ACK of the
-        # DHCPREQUEST.
-        dhcp_options.update(changed_options)
-        self.check_dhcp_config(dhcp_options)
diff --git a/client/site_tests/network_DhcpRequestHostName/control b/client/site_tests/network_DhcpRequestHostName/control
deleted file mode 100644
index a991b05..0000000
--- a/client/site_tests/network_DhcpRequestHostName/control
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'mattwein'
-NAME = 'network_DhcpRequestHostName'
-TIME = 'SHORT'
-TEST_TYPE = 'client'
-
-DOC = """
-  Tests that we can allow a Device to request the hostname
-parameter and accept this hostname from a dhcp config and
-apply it locally.
-
-"""
-
-job.run_test('network_DhcpRequestHostName')
diff --git a/client/site_tests/network_DhcpRequestHostName/network_DhcpRequestHostName.py b/client/site_tests/network_DhcpRequestHostName/network_DhcpRequestHostName.py
deleted file mode 100644
index 500acfb..0000000
--- a/client/site_tests/network_DhcpRequestHostName/network_DhcpRequestHostName.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import utils
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import dhcp_packet
-from autotest_lib.client.cros import dhcp_test_base
-
-# Length of time the lease from the DHCP server is valid.
-LEASE_TIME_SECONDS = 60
-# We'll fill in the subnet and give this address to the client.
-INTENDED_IP_SUFFIX = '0.0.0.101'
-# Most ChromeOS devices are configured with this name.
-DEFAULT_HOSTNAME = 'localhost'
-# Hostname we'll provide to the device.
-TEST_HOSTNAME = 'britney-spears'
-
-class network_DhcpRequestHostName(dhcp_test_base.DhcpTestBase):
-    """Tests that we can supply a hostname to the shill over DHCP."""
-    def test_body(self):
-        # Make sure that shill is started with
-        # --accept-hostname-from=pseudoethernet0.
-        required_flag = '--accept-hostname-from=pseudoethernet0'
-        pid = utils.system_output('pgrep shill')
-        process_info = utils.system_output('ps %s' % pid)
-        if required_flag not in process_info:
-            raise error.TestNAError('Invalid Test. '
-                                    'Expected shill to be started with %s' %
-                                    required_flag)
-
-        # Keep track of the original hostname.
-        original_hostname = utils.system_output('hostname')
-        if original_hostname != DEFAULT_HOSTNAME:
-            logging.warning('Unexpected starting hostname %s (expected %s)',
-                            original_hostname, DEFAULT_HOSTNAME)
-            # Set the hostname to something we know.
-            utils.system('hostname %s' % DEFAULT_HOSTNAME)
-
-        subnet_mask = self.ethernet_pair.interface_subnet_mask
-        intended_ip = dhcp_test_base.DhcpTestBase.rewrite_ip_suffix(
-                subnet_mask,
-                self.server_ip,
-                INTENDED_IP_SUFFIX)
-        # Two real name servers, and a bogus one to be unpredictable.
-        dns_servers = ['8.8.8.8', '8.8.4.4', '192.168.87.88']
-        domain_name = 'corp.google.com'
-        dns_search_list = [
-                'corgie.google.com',
-                'lies.google.com',
-                'that.is.a.tasty.burger.google.com',
-                ]
-        # This is the pool of information the server will give out to the client
-        # upon request.
-        dhcp_options = {
-                dhcp_packet.OPTION_SERVER_ID : self.server_ip,
-                dhcp_packet.OPTION_SUBNET_MASK : subnet_mask,
-                dhcp_packet.OPTION_IP_LEASE_TIME : LEASE_TIME_SECONDS,
-                dhcp_packet.OPTION_REQUESTED_IP : intended_ip,
-                dhcp_packet.OPTION_DNS_SERVERS : dns_servers,
-                dhcp_packet.OPTION_DOMAIN_NAME : domain_name,
-                dhcp_packet.OPTION_HOST_NAME : TEST_HOSTNAME,
-                dhcp_packet.OPTION_DNS_DOMAIN_SEARCH_LIST : dns_search_list,
-                }
-
-        try:
-            self.negotiate_and_check_lease(dhcp_options)
-            system_hostname = utils.system_output('hostname')
-        finally:
-            # Set the hostname back to the original to avoid side effects.
-            utils.system_output('hostname %s' % original_hostname)
-
-        # Test that shill updated the system hostname correctly.
-        if system_hostname != TEST_HOSTNAME:
-            raise error.TestFail('Expected system host name to be set to '
-                                 '%s, but got %s instead.' %
-                                 (TEST_HOSTNAME, system_hostname))
diff --git a/client/site_tests/network_DhcpStaticIP/control b/client/site_tests/network_DhcpStaticIP/control
index 26a0398..18df52b 100644
--- a/client/site_tests/network_DhcpStaticIP/control
+++ b/client/site_tests/network_DhcpStaticIP/control
@@ -7,6 +7,7 @@
 ATTRIBUTES = "suite:network_nightly"
 TIME = 'SHORT'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
   Tests that we can negotiate a lease on an IPv4 address via DHCP,
diff --git a/client/site_tests/network_DhcpVendorEncapsulatedOptions/control b/client/site_tests/network_DhcpVendorEncapsulatedOptions/control
deleted file mode 100644
index da84062..0000000
--- a/client/site_tests/network_DhcpVendorEncapsulatedOptions/control
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_DhcpVendorEncapsulatedOptions'
-TIME = 'SHORT'
-TEST_TYPE = 'client'
-
-DOC = """
-  Tests that we can negotiate a lease on an IPv4 address via DHCP from
-  a server that provides the Vendor Encapsulated Options option.  Test
-  that a properly formatted Vendor Encapsulated Options field causes
-  the Ethernet service to report that it is tethered.
-
-"""
-
-job.run_test('network_DhcpVendorEncapsulatedOptions')
diff --git a/client/site_tests/network_DhcpVendorEncapsulatedOptions/network_DhcpVendorEncapsulatedOptions.py b/client/site_tests/network_DhcpVendorEncapsulatedOptions/network_DhcpVendorEncapsulatedOptions.py
deleted file mode 100644
index cf324bc..0000000
--- a/client/site_tests/network_DhcpVendorEncapsulatedOptions/network_DhcpVendorEncapsulatedOptions.py
+++ /dev/null
@@ -1,97 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import dhcp_handling_rule
-from autotest_lib.client.cros import dhcp_packet
-from autotest_lib.client.cros import dhcp_test_base
-from autotest_lib.client.cros.networking import shill_proxy
-
-# Length of time the lease from the DHCP server is valid.
-LEASE_TIME_SECONDS = 60
-# We'll fill in the subnet and give this address to the client.
-INTENDED_IP_SUFFIX = '0.0.0.101'
-# We should be able to complete a DHCP negotiation in this amount of time.
-DHCP_NEGOTIATION_TIMEOUT_SECONDS = 10
-
-class network_DhcpVendorEncapsulatedOptions(dhcp_test_base.DhcpTestBase):
-    """Test implemenation of Vendor Enacapsulated Options in DHCP response."""
-
-    def check_vendor_encapsulated_options(self, option_string):
-        """Check that the ipconfig in the client shows the the vendor options.
-
-        @param option_string string expected value for vendor options.
-
-        """
-        proxy = shill_proxy.ShillProxy()
-        device = proxy.find_object(
-                'Device',
-                {'Name': self.ethernet_pair.peer_interface_name})
-        if device is None:
-            raise error.TestFail('Device was not found.')
-        device_properties = device.GetProperties(utf8_strings=True)
-        ipconfig_path = device_properties['IPConfigs'][0]
-        ipconfig = proxy.get_dbus_object('org.chromium.flimflam.IPConfig',
-                                         ipconfig_path)
-        ipconfig_properties = ipconfig.GetProperties(utf8_strings=True)
-        ipconfig_vendor_encapsulated_options = ''.join(map(chr,
-            ipconfig_properties['VendorEncapsulatedOptions']))
-        if ipconfig_vendor_encapsulated_options != option_string:
-            raise error.TestFail('Shill vendor encapsulated options %s does '
-                                 'not match expected %s.' %
-                                 (ipconfig_vendor_encapsulated_options,
-                                  option_string))
-
-        device_path = shill_proxy.ShillProxy.dbus2primitive(device.object_path)
-        service = proxy.find_object('Service', {'Device': device_path})
-        tethering = service.GetProperties()['Tethering']
-        expected_value = 'Confirmed'
-        if tethering != expected_value:
-            raise error.TestFail('Service tethering state %s does '
-                                 'not match expected %s.' %
-                                 (tethering, expected_value))
-
-
-    def test_body(self):
-        """Main body of the test."""
-        subnet_mask = self.ethernet_pair.interface_subnet_mask
-        intended_ip = dhcp_test_base.DhcpTestBase.rewrite_ip_suffix(
-                subnet_mask,
-                self.server_ip,
-                INTENDED_IP_SUFFIX)
-        # Two real name servers, and a bogus one to be unpredictable.
-        dns_servers = ['8.8.8.8', '8.8.4.4', '192.168.87.88']
-        vendor_options = 'ANDROID_METERED'
-        # This is the pool of information the server will give out to the client
-        # upon request.
-        dhcp_options = {
-                dhcp_packet.OPTION_SERVER_ID : self.server_ip,
-                dhcp_packet.OPTION_SUBNET_MASK : subnet_mask,
-                dhcp_packet.OPTION_IP_LEASE_TIME : LEASE_TIME_SECONDS,
-                dhcp_packet.OPTION_REQUESTED_IP : intended_ip,
-                dhcp_packet.OPTION_DNS_SERVERS : dns_servers,
-                dhcp_packet.OPTION_VENDOR_ENCAPSULATED_OPTIONS : vendor_options
-                }
-        rules = [
-                dhcp_handling_rule.DhcpHandlingRule_RespondToDiscovery(
-                        intended_ip, self.server_ip, dhcp_options, {}),
-                dhcp_handling_rule.DhcpHandlingRule_RespondToRequest(
-                        intended_ip, self.server_ip, dhcp_options, {})
-                ]
-        rules[-1].is_final_handler = True
-
-        # In some DHCP server implementations, the vendor encapsulated option
-        # is provided in the DHCP response without the client requesting it.
-        for rule in rules:
-            rule.force_reply_options = [
-                    dhcp_packet.OPTION_VENDOR_ENCAPSULATED_OPTIONS ]
-
-        self.server.start_test(rules, DHCP_NEGOTIATION_TIMEOUT_SECONDS)
-        self.server.wait_for_test_to_finish()
-        if not self.server.last_test_passed:
-            raise error.TestFail('Test server didn\'t get all the messages it '
-                                 'was told to expect during negotiation.')
-
-        self.wait_for_dhcp_propagation()
-        self.check_vendor_encapsulated_options(vendor_options)
diff --git a/client/site_tests/network_DhcpWpadNegotiation/control b/client/site_tests/network_DhcpWpadNegotiation/control
index 47162d3..fe32e30 100644
--- a/client/site_tests/network_DhcpWpadNegotiation/control
+++ b/client/site_tests/network_DhcpWpadNegotiation/control
@@ -7,6 +7,7 @@
 ATTRIBUTES = "suite:network_nightly"
 TIME = 'SHORT'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
   Tests that we can negotiate a lease on an IPv4 address via DHCP from a server
diff --git a/client/site_tests/network_DhcpWpadNegotiation/network_DhcpWpadNegotiation.py b/client/site_tests/network_DhcpWpadNegotiation/network_DhcpWpadNegotiation.py
index 067a3d2..74708b0 100644
--- a/client/site_tests/network_DhcpWpadNegotiation/network_DhcpWpadNegotiation.py
+++ b/client/site_tests/network_DhcpWpadNegotiation/network_DhcpWpadNegotiation.py
@@ -30,11 +30,11 @@
                 {'Name': self.ethernet_pair.peer_interface_name})
         if device is None:
             raise error.TestFail('Device was not found.')
-        device_properties = device.GetProperties(utf8_strings=True)
+        device_properties = device.GetProperties()
         ipconfig_path = device_properties['IPConfigs'][0]
         ipconfig = proxy.get_dbus_object('org.chromium.flimflam.IPConfig',
                                          ipconfig_path)
-        ipconfig_properties = ipconfig.GetProperties(utf8_strings=True)
+        ipconfig_properties = ipconfig.GetProperties()
         ipconfig_proxy_auto_config = ipconfig_properties[
                 'WebProxyAutoDiscoveryUrl']
         if ipconfig_proxy_auto_config != proxy_auto_config:
diff --git a/client/site_tests/network_Dhcpv6Basic/control b/client/site_tests/network_Dhcpv6Basic/control
deleted file mode 100644
index 04dfe8f..0000000
--- a/client/site_tests/network_Dhcpv6Basic/control
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "network_Dhcpv6Basic"
-PURPOSE = "Verify DHCPv6 negotiation can succeed in the most normal case"
-CRITERIA = """
-This test fails if the device cannot successfully negotiate a lease for
-non-temporary address and prefix delegation via DHCPv6 server.
-"""
-# TODO(zqiu): assign this test to an appropriate test suite.
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-
-DOC = """
-  Tests that we can acquire an DHCPv6 non-temporary address and prefix
-  delegation from DHCPv6 server.
-"""
-
-job.run_test('network_Dhcpv6Basic')
diff --git a/client/site_tests/network_Dhcpv6Basic/network_Dhcpv6Basic.py b/client/site_tests/network_Dhcpv6Basic/network_Dhcpv6Basic.py
deleted file mode 100644
index 22f703a..0000000
--- a/client/site_tests/network_Dhcpv6Basic/network_Dhcpv6Basic.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.cros import dhcpv6_test_base
-
-class network_Dhcpv6Basic(dhcpv6_test_base.Dhcpv6TestBase):
-    """
-    Tests DHCPv6 lease negotiation process.
-    """
-
-    def test_body(self):
-        """The main body for this test."""
-        self.check_dhcpv6_config()
diff --git a/client/site_tests/network_EthCaps/control b/client/site_tests/network_EthCaps/control
deleted file mode 100644
index 1515836..0000000
--- a/client/site_tests/network_EthCaps/control
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "network_EthCaps"
-PURPOSE = 'Verify that LAN devices have the required capabilities.'
-CRITERIA = """
-See server/site_tests/network_EthCapsServer/control for details
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-
-DOC = """
-See server/site_tests/network_EthCapsServer/control for details
-"""
-
-job.run_test('network_EthCaps', ethname="eth0")
diff --git a/client/site_tests/network_EthCaps/network_EthCaps.py b/client/site_tests/network_EthCaps/network_EthCaps.py
deleted file mode 100644
index 6ce482f..0000000
--- a/client/site_tests/network_EthCaps/network_EthCaps.py
+++ /dev/null
@@ -1,243 +0,0 @@
-# Copyright (c) 2011-2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import collections, logging, os
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import rtc
-from autotest_lib.client.cros.power import sys_power
-
-# TODO(tbroch) WOL:
-# - Should we test any of the other modes?  I chose magic as it meant that only
-#   the target device should be awaken.
-
-class network_EthCaps(test.test):
-    """Base class of EthCaps test.
-
-    Verify Capabilities advertised by an ethernet device work.
-    We can't verify much in reality though. But we can verify
-    WOL for built-in devices which is expected to work.
-
-    @param test.test: test instance
-    """
-    version = 1
-
-    # If WOL setting changed during test then restore to original during cleanup
-    _restore_wol = False
-
-
-    def _is_usb(self):
-        """Determine if device is USB (or not)
-
-        Add-on USB devices won't report the same 'Supports Wake-on' value
-        as built-in (ie PCI) ethernet devices.
-        """
-        if not self._bus_info:
-            cmd = "ethtool -i %s | awk '/bus-info/ {print $2}'" % self._ethname
-            self._bus_info = utils.system_output(cmd)
-            logging.debug("bus_info is %s", self._bus_info)
-            if not self._bus_info:
-                logging.error("ethtool -i %s has no bus-info", self._ethname)
-
-        # Two bus_info formats are reported by different device drivers:
-        # 1) "usb-0000:00:1d.0-1.2"
-        #    "0000:00:1d.0" is the "platform" info of the USB host controller
-        #    But it's obvious it's USB since that's the prefix. :)
-        if self._bus_info.startswith('usb-'):
-            return True
-
-        # 2) "2-1.2" where "2-" is USB host controller instance
-        return os.path.exists("/sys/bus/usb/devices/%s" % self._bus_info)
-
-    def _parse_ethtool_caps(self):
-        """Retrieve ethernet capabilities.
-
-        Executes ethtool command and parses various capabilities into a
-        dictionary.
-        """
-        caps = collections.defaultdict(list)
-
-        cmd = "ethtool %s" % self._ethname
-        prev_keyname = None
-        for ln in utils.system_output(cmd).splitlines():
-            cap_str = ln.strip()
-            try:
-                (keyname, value) = cap_str.split(': ')
-                caps[keyname].extend(value.split())
-                prev_keyname = keyname
-            except ValueError:
-                # keyname from previous line, add there
-                if prev_keyname:
-                    caps[prev_keyname].extend(cap_str.split())
-
-        for keyname in caps:
-            logging.debug("cap['%s'] = %s", keyname, caps[keyname])
-
-        self._caps = caps
-
-
-    def _check_eth_caps(self):
-        """Check necessary LAN capabilities are present.
-
-        Hardware and driver should support the following functionality:
-          1000baseT, 100baseT, 10baseT, half-duplex, full-duplex, auto-neg, WOL
-
-        Raises:
-          error.TestError if above LAN capabilities are NOT supported.
-        """
-        default_eth_caps = {
-            'Supported link modes': ['10baseT/Half', '100baseT/Half',
-                                      '1000baseT/Half', '10baseT/Full',
-                                      '100baseT/Full', '1000baseT/Full'],
-            'Supports auto-negotiation': ['Yes'],
-            # TODO(tbroch): Other WOL caps: 'a': arp and 's': magicsecure are
-            # they important?  Are any of these undesirable/security holes?
-            'Supports Wake-on': ['pumbg']
-            }
-        errors = 0
-
-        for keyname in default_eth_caps:
-            if keyname not in self._caps:
-                logging.error("\'%s\' not a capability of %s", keyname,
-                              self._ethname)
-                errors += 1
-                continue
-
-            for value in default_eth_caps[keyname]:
-                if value not in self._caps[keyname]:
-                    # WOL not required for USB Ethernet plug-in devices
-                    # But all USB Ethernet devices to date report "pg".
-                    # Enforce that.
-                    # RTL8153 can report 'pumbag'.
-                    # AX88178 can report 'pumbg'.
-                    if self._is_usb() and keyname == 'Supports Wake-on':
-                        if (self._caps[keyname][0].find('p') >= 0) and \
-                            (self._caps[keyname][0].find('g') >= 0):
-                            continue
-
-                    logging.error("\'%s\' not a supported mode in \'%s\' of %s",
-                                  value, keyname, self._ethname)
-                    errors += 1
-
-        if errors:
-            raise error.TestError("Eth capability checks.  See errors")
-
-
-    def _test_wol_magic_packet(self):
-        """Check the Wake-on-LAN (WOL) magic packet capabilities of a device.
-
-        Raises:
-          error.TestError if WOL functionality fails
-        """
-        # Magic number WOL supported
-        capname = 'Supports Wake-on'
-        if self._caps[capname][0].find('g') != -1:
-            logging.info("%s support magic number WOL", self._ethname)
-        else:
-            raise error.TestError('%s should support magic number WOL' %
-                            self._ethname)
-
-        # Check that WOL works
-        if self._caps['Wake-on'][0] != 'g':
-            utils.system_output("ethtool -s %s wol g" % self._ethname)
-            self._restore_wol = True
-
-        # Set RTC as backup to WOL
-        before_secs = rtc.get_seconds()
-        alarm_secs =  before_secs + self._suspend_secs + self._threshold_secs
-        rtc.set_wake_alarm(alarm_secs)
-
-        sys_power.do_suspend(self._suspend_secs)
-
-        after_secs = rtc.get_seconds()
-        # flush RTC as it may not work subsequently if wake was not RTC
-        rtc.set_wake_alarm(0)
-
-        suspended_secs = after_secs - before_secs
-        if suspended_secs >= (self._suspend_secs + self._threshold_secs):
-            raise error.TestError("Device woke due to RTC not WOL")
-
-
-    def _verify_wol_magic(self):
-        """If possible identify wake source was caused by WOL.
-
-        The bits identifying the wake source may be cleared by the time
-        userspace gets a chance to query the kernel.  However, firmware
-        might have a log and expose the wake source.  Attempt to interrogate
-        the wake source details if they are present on the system.
-
-        Returns:
-          True if verified or unable to verify due to system limitations
-          False otherwise
-        """
-        fw_log = "/sys/firmware/log"
-        if not os.path.isfile(fw_log):
-            logging.warning("Unable to verify wake in s/w due to missing log %s",
-                         fw_log)
-            return True
-
-        log_info_str = utils.system_output("egrep '(SMI|PM1|GPE0)_STS:' %s" %
-                                           fw_log)
-        status_dict = {}
-        for ln in log_info_str.splitlines():
-            logging.debug("f/w line = %s", ln)
-            try:
-                (status_reg, status_values) = ln.strip().split(":")
-                status_dict[status_reg] = status_values.split()
-            except ValueError:
-                # no bits asserted ... empty list
-                status_dict[status_reg] = list()
-
-        for status_reg in status_dict:
-            logging.debug("status_dict[%s] = %s", status_reg,
-                          status_dict[status_reg])
-
-        return ('PM1' in status_dict['SMI_STS']) and \
-            ('WAK' in status_dict['PM1_STS']) and \
-            ('PCIEXPWAK' in status_dict['PM1_STS']) and \
-            len(status_dict['GPE0_STS']) == 0
-
-
-    def cleanup(self):
-        if self._restore_wol:
-            utils.system_output("ethtool -s %s wol %s" %
-                                (self._ethname, self._caps['Wake-on'][0]))
-
-
-    def run_once(self, ethname=None, suspend_secs=5, threshold_secs=10):
-        """Run the test.
-
-        Args:
-          ethname: string of ethernet device under test
-          threshold_secs: integer of seconds to determine whether wake occurred
-            due to WOL versus RTC
-        """
-        if not ethname:
-            raise error.TestError("Name of ethernet device must be declared")
-
-        self._ethname = ethname
-        self._threshold_secs = threshold_secs
-        self._suspend_secs = suspend_secs
-        self._bus_info = None
-
-        self._parse_ethtool_caps()
-        self._check_eth_caps()
-
-        # ChromeOS does not require WOL support for any USB Ethernet Adapters.
-        # In fact, WoL only known to work for PCIe Ethernet devices.
-        # We know _some_ platforms power off all USB ports when suspended.
-        # USB adapters with "pg" capabilities _might_ WoL on _some_ platforms.
-        # White list/black listing of platforms will be required to test
-        # WoL against USB dongles in the future.
-        if self._is_usb():
-            logging.debug("Skipping WOL test on USB Ethernet device.")
-            return
-
-        self._test_wol_magic_packet()
-        # TODO(tbroch) There is evidence in the filesystem of the wake source
-        # for coreboot but its still being flushed out.  For now only produce a
-        # warning for this check.
-        if not self._verify_wol_magic():
-            logging.warning("Unable to see evidence of WOL wake in filesystem")
diff --git a/client/site_tests/network_EthernetStressPlug/control b/client/site_tests/network_EthernetStressPlug/control
deleted file mode 100644
index ab149f8..0000000
--- a/client/site_tests/network_EthernetStressPlug/control
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "network_EthernetStressPlug"
-PURPOSE = "Stress-test Ethernet plug/unplug"
-CRITERIA = """
-This test fails if device fails to obtain dhcp through ethernet.
-"""
-# Note: This is now only for manual run.
-TIME = "SHORT"
-TEST_CATEGORY = "Stress"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-
-DOC = """
-  Stress-tests simulating plugging/unplugging the ethernet dongle.
-"""
-
-# We want the default number of loops per test run
-# to be 100.
-num_iterations = 100
-interface=None # autodetect interface
-
-# Parse comma-separated args.
-for arg in args:
-    for item in arg.split(','):
-        key, val = item.split('=')
-        if key == 'num_iterations':
-            num_iterations = int(val)
-        if key == 'interface':
-            interface = val
-
-job.run_test('network_EthernetStressPlug', num_iterations=num_iterations,
-    interface=interface)
diff --git a/client/site_tests/network_EthernetStressPlug/network_EthernetStressPlug.py b/client/site_tests/network_EthernetStressPlug/network_EthernetStressPlug.py
deleted file mode 100644
index 158cd16..0000000
--- a/client/site_tests/network_EthernetStressPlug/network_EthernetStressPlug.py
+++ /dev/null
@@ -1,524 +0,0 @@
-# Copyright (c) 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import fcntl
-import logging
-import os
-import pyudev
-import random
-import re
-import socket
-import struct
-import subprocess
-import sys
-import time
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-
-class EthernetDongle(object):
-    """ Used for definining the desired module expect states. """
-
-    def __init__(self, expect_speed='100', expect_duplex='full'):
-        # Expected values for parameters.
-        self.expected_parameters = {
-            'ifconfig_status': 0,
-            'duplex': expect_duplex,
-            'speed': expect_speed,
-            'mac_address': None,
-            'ipaddress': None,
-        }
-
-    def GetParam(self, parameter):
-        return self.expected_parameters[parameter]
-
-class network_EthernetStressPlug(test.test):
-    version = 1
-
-    def initialize(self, interface=None):
-        """ Determines and defines the bus information and interface info. """
-
-        self.link_speed_failures = 0
-        sysnet = os.path.join('/', 'sys', 'class', 'net')
-
-        def get_ethernet_interface(interface):
-            """ Valid interface requires link and duplex status."""
-            avail_eth_interfaces=[]
-            if interface is None:
-                # This is not the (bridged) eth dev we are looking for.
-                for x in os.listdir(sysnet):
-                    sysdev = os.path.join(sysnet,  x, 'device')
-                    syswireless = os.path.join(sysnet,  x, 'wireless')
-                    if os.path.exists(sysdev) and not os.path.exists(syswireless):
-                        avail_eth_interfaces.append(x)
-            else:
-                sysdev = os.path.join(sysnet,  interface, 'device')
-                if os.path.exists(sysdev):
-                    avail_eth_interfaces.append(interface)
-                else:
-                    raise error.TestError('Network Interface %s is not a device ' % iface)
-
-            link_status = 'unknown'
-            duplex_status = 'unknown'
-            iface = 'unknown'
-
-            for iface in avail_eth_interfaces:
-                syslink = os.path.join(sysnet, iface, 'operstate')
-                try:
-                    link_file = open(syslink)
-                    link_status = link_file.readline().strip()
-                    link_file.close()
-                except:
-                    pass
-
-                sysduplex = os.path.join(sysnet, iface, 'duplex')
-                try:
-                    duplex_file = open(sysduplex)
-                    duplex_status = duplex_file.readline().strip()
-                    duplex_file.close()
-                except:
-                    pass
-
-                if link_status == 'up' and duplex_status == 'full':
-                    return iface
-
-            raise error.TestError('Network Interface %s not usable (%s, %s)'
-                                  % (iface, link_status, duplex_status))
-
-        def get_net_device_path(device=''):
-            """ Uses udev to get the path of the desired internet device.
-            Args:
-                device: look for the /sys entry for this ethX device
-            Returns:
-                /sys pathname for the found ethX device or raises an error.
-            """
-            net_list = pyudev.Context().list_devices(subsystem='net')
-            for dev in net_list:
-                if dev.sys_path.endswith('net/%s' % device):
-                    return dev.sys_path
-
-            raise error.TestError('Could not find /sys device path for %s'
-                                  % device)
-
-        self.interface = get_ethernet_interface(interface)
-        self.eth_syspath = get_net_device_path(self.interface)
-        self.eth_flagspath = os.path.join(self.eth_syspath, 'flags')
-
-        # USB Dongles: "authorized" file will disable the USB port and
-        # in some cases powers off the port. In either case, net/eth* goes
-        # away. And thus "../../.." won't be valid to access "authorized".
-        # Build the pathname that goes directly to authpath.
-        auth_path = os.path.join(self.eth_syspath, '../../../authorized')
-        if os.path.exists(auth_path):
-            # now rebuild the path w/o use of '..'
-            auth_path = os.path.split(self.eth_syspath)[0]
-            auth_path = os.path.split(auth_path)[0]
-            auth_path = os.path.split(auth_path)[0]
-
-            self.eth_authpath = os.path.join(auth_path,'authorized')
-        else:
-            self.eth_authpath = None
-
-        # Stores the status of the most recently run iteration.
-        self.test_status = {
-            'ipaddress': None,
-            'eth_state': None,
-            'reason': None,
-            'last_wait': 0
-        }
-
-        self.secs_before_warning = 10
-
-        # Represents the current number of instances in which ethernet
-        # took longer than dhcp_warning_level to come up.
-        self.warning_count = 0
-
-        # The percentage of test warnings before we fail the test.
-        self.warning_threshold = .25
-
-    def GetIPAddress(self):
-        """ Obtains the ipaddress of the interface. """
-        try:
-            s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
-            return socket.inet_ntoa(fcntl.ioctl(
-                   s.fileno(), 0x8915,  # SIOCGIFADDR
-                   struct.pack('256s', self.interface[:15]))[20:24])
-        except:
-            return None
-
-    def GetEthernetStatus(self):
-        """
-        Updates self.test_status with the status of the ethernet interface.
-
-        Returns:
-            True if the ethernet device is up.  False otherwise.
-        """
-
-        def ReadEthVal(param):
-            """ Reads the network parameters of the interface. """
-            eth_path = os.path.join('/', 'sys', 'class', 'net', self.interface,
-                                    param)
-            val = None
-            try:
-                fp = open(eth_path)
-                val = fp.readline().strip()
-                fp.close()
-            except:
-                pass
-            return val
-
-        eth_out = self.ParseEthTool()
-        ethernet_status = {
-            'ifconfig_status': utils.system('ifconfig %s' % self.interface,
-                                            ignore_status=True),
-            'duplex': eth_out.get('Duplex'),
-            'speed': eth_out.get('Speed'),
-            'mac_address': ReadEthVal('address'),
-            'ipaddress': self.GetIPAddress()
-        }
-
-        self.test_status['ipaddress'] = ethernet_status['ipaddress']
-
-        for param, val in ethernet_status.iteritems():
-            if self.dongle.GetParam(param) is None:
-                # For parameters with expected values none, we check the
-                # existence of a value.
-                if not bool(val):
-                    self.test_status['eth_state'] = False
-                    self.test_status['reason'] = '%s is not ready: %s == %s' \
-                                                 % (self.interface, param, val)
-                    return False
-            else:
-                if val != self.dongle.GetParam(param):
-                    self.test_status['eth_state'] = False
-                    self.test_status['reason'] = '%s is not ready. (%s)\n' \
-                                                 "  Expected: '%s'\n" \
-                                                 "  Received: '%s'" \
-                                                 % (self.interface, param,
-                                                 self.dongle.GetParam(param),
-                                                 val)
-                    return False
-
-        self.test_status['eth_state'] = True
-        self.test_status['reason'] = None
-        return True
-
-    def _PowerEthernet(self, power=1):
-        """ Sends command to change the power state of ethernet.
-        Args:
-          power: 0 to unplug, 1 to plug.
-        """
-
-        if self.eth_authpath:
-            try:
-                fp = open(self.eth_authpath, 'w')
-                fp.write('%d' % power)
-                fp.close()
-            except:
-                raise error.TestError('Could not write %d to %s' %
-                                      (power, self.eth_authpath))
-
-        # Linux can set network link state by frobbing "flags" bitfields.
-        # Bit fields are documented in include/uapi/linux/if.h.
-        # Bit 0 is IFF_UP (link up=1 or down=0).
-        elif os.path.exists(self.eth_flagspath):
-            try:
-                fp = open(self.eth_flagspath, mode='r')
-                val= int(fp.readline().strip(), 16)
-                fp.close()
-            except:
-                raise error.TestError('Could not read %s' % self.eth_flagspath)
-
-            if power:
-                newval = val | 1
-            else:
-                newval = val &  ~1
-
-            if val != newval:
-                try:
-                    fp = open(self.eth_flagspath, mode='w')
-                    fp.write('0x%x' % newval)
-                    fp.close()
-                except:
-                    raise error.TestError('Could not write 0x%x to %s' %
-                                          (newval, self.eth_flagspath))
-                logging.debug("eth flags: 0x%x to 0x%x" % (val, newval))
-
-        # else use ifconfig eth0 up/down to switch
-        else:
-            logging.warning('plug/unplug event control not found. '
-                            'Use ifconfig %s %s instead' %
-                            (self.interface, 'up' if power else 'down'))
-            result = subprocess.check_call(['ifconfig', self.interface,
-                                            'up' if power else 'down'])
-            if result:
-                raise error.TestError('Fail to change the power state of %s' %
-                                      self.interface)
-
-    def TestPowerEthernet(self, power=1, timeout=45):
-        """ Tests enabling or disabling the ethernet.
-        Args:
-            power: 0 to unplug, 1 to plug.
-            timeout: Indicates approximately the number of seconds to timeout
-                     how long we should check for the success of the ethernet
-                     state change.
-
-        Returns:
-            The time in seconds required for device to transfer to the desired
-            state.
-
-        Raises:
-            error.TestFail if the ethernet status is not in the desired state.
-        """
-
-        start_time = time.time()
-        end_time = start_time + timeout
-
-        power_str = ['off', 'on']
-        self._PowerEthernet(power)
-
-        while time.time() < end_time:
-            status = self.GetEthernetStatus()
-
-
-            # If GetEthernetStatus() detects the wrong link rate, "bouncing"
-            # the link _should_ recover. Keep count of how many times this
-            # happens. Test should fail if happens "frequently".
-            if power and not status and 'speed' in self.test_status['reason']:
-                self._PowerEthernet(0)
-                time.sleep(1)
-                self._PowerEthernet(power)
-                self.link_speed_failures += 1
-                logging.warning('Link Renegotiated ' +
-                    self.test_status['reason'])
-
-            # If ethernet is enabled  and has an IP, OR
-            # if ethernet is disabled and does not have an IP,
-            # then we are in the desired state.
-            # Return the number of "seconds" for this to happen.
-            # (translated to an approximation of the number of seconds)
-            if (power and status and \
-                self.test_status['ipaddress'] is not None) \
-                or \
-                (not power and not status and \
-                self.test_status['ipaddress'] is None):
-                return time.time()-start_time
-
-            time.sleep(1)
-
-        logging.debug(self.test_status['reason'])
-        raise error.TestFail('ERROR: TIMEOUT : %s IP is %s after setting '
-                             'power %s (last_wait = %.2f seconds)' %
-                             (self.interface, self.test_status['ipaddress'],
-                             power_str[power], self.test_status['last_wait']))
-
-    def RandSleep(self, min_sleep, max_sleep):
-        """ Sleeps for a random duration.
-
-        Args:
-            min_sleep: Minimum sleep parameter in miliseconds.
-            max_sleep: Maximum sleep parameter in miliseconds.
-        """
-        duration = random.randint(min_sleep, max_sleep)/1000.0
-        self.test_status['last_wait'] = duration
-        time.sleep(duration)
-
-    def _ParseEthTool_LinkModes(self, line):
-        """ Parses Ethtool Link Mode Entries.
-        Inputs:
-            line: Space separated string of link modes that have the format
-                  (\d+)baseT/(Half|Full) (eg. 100baseT/Full).
-
-        Outputs:
-            List of dictionaries where each dictionary has the format
-            { 'Speed': '<speed>', 'Duplex': '<duplex>' }
-        """
-        parameters = []
-
-        # QCA ESS EDMA driver doesn't report "Supported link modes:"
-        if 'Not reported' in line:
-            return parameters
-
-        for speed_to_parse in line.split():
-            speed_duplex = speed_to_parse.split('/')
-            parameters.append(
-                {
-                    'Speed': re.search('(\d*)', speed_duplex[0]).groups()[0],
-                    'Duplex': speed_duplex[1],
-                }
-            )
-        return parameters
-
-    def ParseEthTool(self):
-        """
-        Parses the output of Ethtools into a dictionary and returns
-        the dictionary with some cleanup in the below areas:
-            Speed: Remove the unit of speed.
-            Supported link modes: Construct a list of dictionaries.
-                                  The list is ordered (relying on ethtool)
-                                  and each of the dictionaries contains a Speed
-                                  kvp and a Duplex kvp.
-            Advertised link modes: Same as 'Supported link modes'.
-
-        Sample Ethtool Output:
-            Supported ports: [ TP MII ]
-            Supported link modes:   10baseT/Half 10baseT/Full
-                                    100baseT/Half 100baseT/Full
-                                    1000baseT/Half 1000baseT/Full
-            Supports auto-negotiation: Yes
-            Advertised link modes:  10baseT/Half 10baseT/Full
-                                    100baseT/Half 100baseT/Full
-                                    1000baseT/Full
-            Advertised auto-negotiation: Yes
-            Speed: 1000Mb/s
-            Duplex: Full
-            Port: MII
-            PHYAD: 2
-            Transceiver: internal
-            Auto-negotiation: on
-            Supports Wake-on: pg
-            Wake-on: d
-            Current message level: 0x00000007 (7)
-            Link detected: yes
-
-        Returns:
-          A dictionary representation of the above ethtool output, or an empty
-          dictionary if no ethernet dongle is present.
-          Eg.
-            {
-              'Supported ports': '[ TP MII ]',
-              'Supported link modes': [{'Speed': '10', 'Duplex': 'Half'},
-                                       {...},
-                                       {'Speed': '1000', 'Duplex': 'Full'}],
-              'Supports auto-negotiation: 'Yes',
-              'Advertised link modes': [{'Speed': '10', 'Duplex': 'Half'},
-                                        {...},
-                                        {'Speed': '1000', 'Duplex': 'Full'}],
-              'Advertised auto-negotiation': 'Yes'
-              'Speed': '1000',
-              'Duplex': 'Full',
-              'Port': 'MII',
-              'PHYAD': '2',
-              'Transceiver': 'internal',
-              'Auto-negotiation': 'on',
-              'Supports Wake-on': 'pg',
-              'Wake-on': 'd',
-              'Current message level': '0x00000007 (7)',
-              'Link detected': 'yes',
-            }
-        """
-        parameters = {}
-        ethtool_out = os.popen('ethtool %s' % self.interface).read().split('\n')
-        if 'No data available' in ethtool_out:
-            return parameters
-
-        # bridged interfaces only have two lines of ethtool output.
-        if len(ethtool_out) < 3:
-            return parameters
-
-        # For multiline entries, keep track of the key they belong to.
-        current_key = ''
-        for line in ethtool_out:
-            current_line = line.strip().partition(':')
-            if current_line[1] == ':':
-                current_key = current_line[0]
-
-                # Assumes speed does not span more than one line.
-                # Also assigns empty string if speed field
-                # is not available.
-                if current_key == 'Speed':
-                    speed = re.search('^\s*(\d*)', current_line[2])
-                    parameters[current_key] = ''
-                    if speed:
-                        parameters[current_key] = speed.groups()[0]
-                elif (current_key == 'Supported link modes' or
-                      current_key == 'Advertised link modes'):
-                    parameters[current_key] = []
-                    parameters[current_key] += \
-                        self._ParseEthTool_LinkModes(current_line[2])
-                else:
-                    parameters[current_key] = current_line[2].strip()
-            else:
-              if (current_key == 'Supported link modes' or
-                  current_key == 'Advertised link modes'):
-                  parameters[current_key] += \
-                      self._ParseEthTool_LinkModes(current_line[0])
-              else:
-                  parameters[current_key]+=current_line[0].strip()
-
-        return parameters
-
-    def GetDongle(self):
-        """ Returns the ethernet dongle object associated with what's connected.
-
-        Dongle uniqueness is retrieved from the 'product' file that is
-        associated with each usb dongle in
-        /sys/devices/pci.*/0000.*/usb.*/.*-.*/product.  The correct
-        dongle object is determined and returned.
-
-        Returns:
-          Object of type EthernetDongle.
-
-        Raises:
-          error.TestFail if ethernet dongle is not found.
-        """
-        ethtool_dict = self.ParseEthTool()
-
-        if not ethtool_dict:
-            raise error.TestFail('Unable to parse ethtool output for %s.' %
-                                 self.interface)
-
-        # Ethtool output is ordered in terms of speed so this obtains the
-        # fastest speed supported by dongle.
-        # QCA ESS EDMA driver doesn't report "Supported link modes".
-        max_link = ethtool_dict['Advertised link modes'][-1]
-
-        return EthernetDongle(expect_speed=max_link['Speed'],
-                              expect_duplex=max_link['Duplex'])
-
-    def run_once(self, num_iterations=1):
-        try:
-            self.dongle = self.GetDongle()
-
-            #Sleep for a random duration between .5 and 2 seconds
-            #for unplug and plug scenarios.
-            for i in range(num_iterations):
-                logging.debug('Iteration: %d start' % i)
-                linkdown_time = self.TestPowerEthernet(power=0)
-                linkdown_wait = self.test_status['last_wait']
-                if linkdown_time > self.secs_before_warning:
-                    self.warning_count+=1
-
-                self.RandSleep(500, 2000)
-
-                linkup_time = self.TestPowerEthernet(power=1)
-                linkup_wait = self.test_status['last_wait']
-
-                if linkup_time > self.secs_before_warning:
-                    self.warning_count+=1
-
-                self.RandSleep(500, 2000)
-                logging.debug('Iteration: %d end (down:%f/%d up:%f/%d)' %
-                              (i, linkdown_wait, linkdown_time,
-                               linkup_wait, linkup_time))
-
-                if self.warning_count > num_iterations * self.warning_threshold:
-                    raise error.TestFail('ERROR: %.2f%% of total runs (%d) '
-                                         'took longer than %d seconds for '
-                                         'ethernet to come up.' %
-                                         (self.warning_threshold*100,
-                                          num_iterations,
-                                          self.secs_before_warning))
-
-            # Link speed failures are secondary.
-            # Report after all iterations complete.
-            if self.link_speed_failures > 1:
-                raise error.TestFail('ERROR: %s : Link Renegotiated %d times'
-                                % (self.interface, self.link_speed_failures))
-
-        except Exception as e:
-            exc_info = sys.exc_info()
-            self._PowerEthernet(1)
-            raise exc_info[0], exc_info[1], exc_info[2]
diff --git a/client/site_tests/network_FirewallHolePunch/control b/client/site_tests/network_FirewallHolePunch/control
deleted file mode 100644
index 4f88f87..0000000
--- a/client/site_tests/network_FirewallHolePunch/control
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "krisr@chromium.org"
-NAME = "network_FirewallHolePunch"
-PURPOSE = "Verifies the Chrome applications can punch a hole in the firewall"
-CRITERIA = """
-This test will fail if a Chrome application is unable to punch a hole in the
-firewall or if the socket is not closed when no longer in use.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-
-DOC = """
-This test verifies holes can be punched in the ChromeOS firewall.
-"""
-
-job.run_test('network_FirewallHolePunch')
-
diff --git a/client/site_tests/network_FirewallHolePunch/network_FirewallHolePunch.py b/client/site_tests/network_FirewallHolePunch/network_FirewallHolePunch.py
deleted file mode 100644
index 716a9b2..0000000
--- a/client/site_tests/network_FirewallHolePunch/network_FirewallHolePunch.py
+++ /dev/null
@@ -1,99 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import time
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.common_lib.cros.network import interface
-
-_CLIENT_COMPLETE_FLAG = '/tmp/network_FirewallHolePunch'
-
-class network_FirewallHolePunch(test.test):
-    """Tests that controls an app that can open ports."""
-    version = 1
-    preserve_srcdir = True
-
-
-    def setup(self):
-        """Sets the current directory so the app can be accessed."""
-        os.chdir(self.srcdir)
-        self.extension_path = os.path.join(os.path.dirname(__file__),
-                                           'src/tcpserver')
-
-
-    def wait_for_server_command(self):
-        """Waits for the server to send a command.
-
-        @returns True if the server responds to the request; False otherwise.
-
-        """
-        for i in range(30):
-            result = utils.run('ls %s' %  _CLIENT_COMPLETE_FLAG,
-                               ignore_status=True)
-            if result.exit_status != 0:
-                return True
-            time.sleep(1)
-        return False
-
-
-    def interpret_command(self, command, test_error):
-        """Takes the string command and performs the appropriate action.
-
-        @param command: the command string
-        @param test_error: string of the test error message
-
-
-        @raises TestError if the server does not set the flag or if an
-                invalid command is passed.
-        """
-        if self.wait_for_server_command() is False:
-            raise error.TestError(test_error)
-
-        if command == 'launch app':
-            self.extension = self.cr.get_extension(self.extension_path)
-            self.extension.ExecuteJavaScript('tcpUI.create();')
-        elif command == 'start server':
-            script = str('tcpUI.startServer("%s", %d);' %
-                         (self.ip_address, self.port))
-            self.extension.ExecuteJavaScript(script)
-        elif command == 'stop server':
-            self.extension.ExecuteJavaScript('tcpUI.stopServer();')
-        elif command == 'exit app':
-            script = 'commandWindow.contentWindow.close();'
-            self.extension.ExecuteJavaScript(script)
-            self.extension.ExecuteJavaScript('close();')
-        elif command == 'logout':
-            self.cr.browser.Close()
-        elif command == 'login':
-            flag = '--enable-firewall-hole-punching'
-            self.cr = chrome.Chrome(extension_paths=[self.extension_path],
-                                    extra_browser_args=flag)
-        else:
-            raise error.TestError('Invalid client command passed.')
-
-        utils.run('touch %s' % _CLIENT_COMPLETE_FLAG)
-
-
-    def run_once(self, test_sequence=None, port=8888):
-        """Runs the integration test."""
-
-        # Throw if no test sequence
-        if not test_sequence:
-            raise error.TestError('No test sequence was passed to client.')
-
-        # Get the IP Address of the DUT
-        ethernet = interface.Interface.get_connected_ethernet_interface()
-        self.ip_address = ethernet.ipv4_address
-        self.port = port
-
-        self.cr = None
-        self.extension = None
-
-        for command in test_sequence:
-            self.interpret_command(command['client_command'],
-                                   command['client_error'])
-
diff --git a/client/site_tests/network_FirewallHolePunch/src/README.txt b/client/site_tests/network_FirewallHolePunch/src/README.txt
deleted file mode 100644
index 85889ce..0000000
--- a/client/site_tests/network_FirewallHolePunch/src/README.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-This extension was taken from the Google Chrome Apps sample:
-  https://github.com/GoogleChrome/chrome-app-samples/
-
-Modifications were made in main.js to make the extension usable
-via Telemetry.
diff --git a/client/site_tests/network_FirewallHolePunch/src/tcpserver/README.md b/client/site_tests/network_FirewallHolePunch/src/tcpserver/README.md
deleted file mode 100644
index df5b41b..0000000
--- a/client/site_tests/network_FirewallHolePunch/src/tcpserver/README.md
+++ /dev/null
@@ -1,16 +0,0 @@
-<a target="_blank" href="https://chrome.google.com/webstore/detail/ahlcocbkjpjkobcdpjcobmibmpbeecpg">![Try it now in CWS](https://raw.github.com/GoogleChrome/chrome-app-samples/master/tryitnowbutton.png "Click here to install this sample from the Chrome Web Store")</a>
-
-
-# Chrome Commando TCP server
-
-This is a sample that shows how you can run a network TCP server in a packaged app. This sample allows you to start a server in an arbitrary address and port. Telnet to the listening port and you will be able to remotely control your browser by sending some commands such as `open` and `echo`.
-
-## APIs
-
-* [Sockets](https://developer.chrome.com/apps/sockets_tcp)
-* [Runtime](https://developer.chrome.com/apps/app_runtime)
-* [Window](https://developer.chrome.com/apps/app_window)
-     
-## Screenshot
-![screenshot](/samples/tcpserver/assets/screenshot_1280_800.png)
-
diff --git a/client/site_tests/network_FirewallHolePunch/src/tcpserver/assets/screenshot_1280_800.png b/client/site_tests/network_FirewallHolePunch/src/tcpserver/assets/screenshot_1280_800.png
deleted file mode 100644
index c8b2ffa..0000000
--- a/client/site_tests/network_FirewallHolePunch/src/tcpserver/assets/screenshot_1280_800.png
+++ /dev/null
Binary files differ
diff --git a/client/site_tests/network_FirewallHolePunch/src/tcpserver/commands/BrowserCommands.js b/client/site_tests/network_FirewallHolePunch/src/tcpserver/commands/BrowserCommands.js
deleted file mode 100644
index bbd27ee..0000000
--- a/client/site_tests/network_FirewallHolePunch/src/tcpserver/commands/BrowserCommands.js
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
-Copyright 2012 Google Inc.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Author: Renato Mangini (mangini@chromium.org)
-*/
-
-(function(exports) {
-
-  function Commands() {
-  	this.commands={};
-  }
-
-  Commands.prototype.addCommand=function(name, help, runnable) {
-  	if (name in this.commands) {
-  		console.log("WARNING: ignoring duplicate command "+name);
-  		return;
-  	}
-  	this.commands[name] = {help: help, runnable: runnable};
-  }
-
-  Commands.prototype.help=function(name, args) {
-    var result='';
-    for (var command in this.commands) {
-      result+=command+'\t'+this.commands[command].help+"\n";
-    }
-    return result;
-  	/*if (! (name in this.commands)) {
-  		return "Unknown command "+name;
-  	}
-  	var context={out: out};
-  	return this.commands[name].help.apply(context, args);*/
-  }
-
-  Commands.prototype.run=function(name, args) {
-    if (name === 'help') {
-      return this.help(name, args);
-    }
-  	if (! (name in this.commands)) {
-  		throw 'Unknown command '+name+'. Try "help"';
-  	}
-  	var context={};
-  	return this.commands[name].runnable.call(context, args);
-  }
-
-  exports.Commands=new Commands();
-
-})(window);
-
-
-Commands.addCommand("echo", 
-	"Echo the arguments", 
-	function(args) {
-		return args.join(' ');
-	});
-
-Commands.addCommand("open", 
-  "Open the given URL", 
-  function(args) {
-    chrome.app.window.create('commands/webview.html', {innerBounds: {width: 600, height: 400}},
-      function(w) {
-        w.contentWindow.addEventListener("DOMContentLoaded", function() {
-          var doc=w.contentWindow.document;
-          var el=doc.querySelector("webview");
-          el.src=args[0];
-        });
-      });
-    return "ok, url "+args[0]+" open";
-  });
diff --git a/client/site_tests/network_FirewallHolePunch/src/tcpserver/commands/webview.html b/client/site_tests/network_FirewallHolePunch/src/tcpserver/commands/webview.html
deleted file mode 100644
index 60d4c43..0000000
--- a/client/site_tests/network_FirewallHolePunch/src/tcpserver/commands/webview.html
+++ /dev/null
@@ -1,11 +0,0 @@
-<!DOCTYPE html>
-<html>
-<head>
-  <meta charset="utf-8">
-  <link href="/styles/webview.css" rel="stylesheet">
-</head>
-<body>
-  <webview autosize="on"></webview>
-</body>
-</html>
-
diff --git a/client/site_tests/network_FirewallHolePunch/src/tcpserver/index.html b/client/site_tests/network_FirewallHolePunch/src/tcpserver/index.html
deleted file mode 100644
index f44906d..0000000
--- a/client/site_tests/network_FirewallHolePunch/src/tcpserver/index.html
+++ /dev/null
@@ -1,25 +0,0 @@
-<!DOCTYPE html>
-<html>
-<head>
-  <meta charset="utf-8">
-  <title>Chrome Remote Comando</title>
-  <link href="styles/main.css" rel="stylesheet">
-</head>
-<body>
-  <section id="server">
-  <h1>Chrome Remote Commando</h1>
-  <div class="hide-when-connected">Serve at 
-    <select id="addresses"><option>127.0.0.1</option></select>:
-    <input type="number" id="serverPort" value="8888"></input>
-    <button id="serverStart">Start!</button>
-  </div>
-  <div class="hide-when-not-connected">
-    <p>Serving at <span class="serving-at"></span>
-      <button id="serverStop">Stop!</button></p>
-    <div id="serverlog"></div>
-  </div>
-  </section>
-  <script src="server.js"></script>
-</body>
-</html>
-
diff --git a/client/site_tests/network_FirewallHolePunch/src/tcpserver/main.js b/client/site_tests/network_FirewallHolePunch/src/tcpserver/main.js
deleted file mode 100644
index 0b20e38..0000000
--- a/client/site_tests/network_FirewallHolePunch/src/tcpserver/main.js
+++ /dev/null
@@ -1,104 +0,0 @@
-var tcpUI = tcpUI || {};
-var tcpServer;
-var commandWindow;
-
-tcpUI.create = function() {
-  if (commandWindow && !commandWindow.contentWindow.closed) {
-    commandWindow.focus();
-  } else {
-    chrome.app.window.create('index.html',
-        {id: "mainwin", innerBounds: {width: 500, height: 309, left: 0}},
-	    function(w) { commandWindow = w; });
-  }
-}
-
-/**
- * Listens for the app launching then creates the window
- *
- * @see https://developer.chrome.com/apps/app_runtime
- * @see https://developer.chrome.com/apps/app_window
- */
-chrome.app.runtime.onLaunched.addListener(tcpUI.create);
-
-// event logger
-var log = (function(){
-  var logLines = [];
-  var logListener = null;
-
-  var output=function(str) {
-    if (str.length>0 && str.charAt(str.length-1)!='\n') {
-      str+='\n'
-    }
-    logLines.push(str);
-    if (logListener) {
-      logListener(str);
-    }
-  };
-
-  var addListener=function(listener) {
-    logListener=listener;
-    // let's call the new listener with all the old log lines
-    for (var i=0; i<logLines.length; i++) {
-      logListener(logLines[i]);
-    }
-  };
-
-  return {output: output, addListener: addListener};
-})();
-
-function onAcceptCallback(tcpConnection, socketInfo) {
-  var info="["+socketInfo.peerAddress+":"+socketInfo.peerPort+"] Connection accepted!";
-  log.output(info);
-  console.log(socketInfo);
-  tcpConnection.addDataReceivedListener(function(data) {
-    var lines = data.split(/[\n\r]+/);
-    for (var i=0; i<lines.length; i++) {
-      var line=lines[i];
-      if (line.length>0) {
-        var info="["+socketInfo.peerAddress+":"+socketInfo.peerPort+"] "+line;
-        log.output(info);
-
-        var cmd=line.split(/\s+/);
-        try {
-          tcpConnection.sendMessage(Commands.run(cmd[0], cmd.slice(1)));
-        } catch (ex) {
-          tcpConnection.sendMessage(ex);
-        }
-      }
-    }
-  });
-};
-
-function startServer(addr, port) {
-  if (tcpServer) {
-    tcpServer.disconnect();
-  }
-  tcpServer = new TcpServer(addr, port);
-  tcpServer.listen(onAcceptCallback);
-}
-
-
-function stopServer() {
-  if (tcpServer) {
-    tcpServer.disconnect();
-    tcpServer=null;
-  }
-}
-
-function getServerState() {
-  if (tcpServer) {
-    return {isConnected: tcpServer.isConnected(),
-      addr: tcpServer.addr,
-      port: tcpServer.port};
-  } else {
-    return {isConnected: false};
-  }
-}
-
-tcpUI.startServer = function(addr, port) {
-  startServer(addr, port);
-}
-
-tcpUI.stopServer = function() {
-  stopServer();
-}
diff --git a/client/site_tests/network_FirewallHolePunch/src/tcpserver/manifest.json b/client/site_tests/network_FirewallHolePunch/src/tcpserver/manifest.json
deleted file mode 100644
index 79a6b66..0000000
--- a/client/site_tests/network_FirewallHolePunch/src/tcpserver/manifest.json
+++ /dev/null
@@ -1,21 +0,0 @@
-{
-  "manifest_version": 2,
-  "name": "TCP Server Sample",
-  "version": "3.1",
-  "minimum_chrome_version": "33.0.1715.0",
-  "permissions": ["webview", "system.network"],
-  "app": {
-    "background": {
-      "scripts": ["tcp-server.js", "commands/BrowserCommands.js", "main.js"]
-    }
-  },
-  "sockets": {
-    "tcpServer": {
-      "listen": ""
-    }
-  },
-  "externally_connectable": {
-    "ids" : ["*"],
-    "matches": ["*://*.google.com/*"]
-  }
-}
diff --git a/client/site_tests/network_FirewallHolePunch/src/tcpserver/sample_support_metadata.json b/client/site_tests/network_FirewallHolePunch/src/tcpserver/sample_support_metadata.json
deleted file mode 100644
index c9befc9..0000000
--- a/client/site_tests/network_FirewallHolePunch/src/tcpserver/sample_support_metadata.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
-  "sample": "tcpserver",
-  "files_with_snippets": [ ]
-}
diff --git a/client/site_tests/network_FirewallHolePunch/src/tcpserver/server.js b/client/site_tests/network_FirewallHolePunch/src/tcpserver/server.js
deleted file mode 100644
index ed74d00..0000000
--- a/client/site_tests/network_FirewallHolePunch/src/tcpserver/server.js
+++ /dev/null
@@ -1,60 +0,0 @@
-
-// quick terminal->textarea simulation
-var log = (function(){
-  var area=document.querySelector("#serverlog");
-  var output=function(str) {
-    if (str.length>0 && str.charAt(str.length-1)!='\n') {
-      str+='\n'
-    }
-    area.innerText=str+area.innerText;
-    if (console) console.log(str);
-  };
-  return {output: output};
-})();
-
-chrome.runtime.getBackgroundPage(function(bgPage) {
-
- bgPage.log.addListener(function(str) {
-    log.output(str);
-  });
-
- bgPage.TcpServer.getNetworkAddresses(function(list) {
-    var addr=document.querySelector("#addresses");
-    for (var i=0; i<list.length; i++) {
-      if (/^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$/.test(list[i].address)) {
-        var option = document.createElement('option');
-        option.text = list[i].name+" ("+list[i].address+")";
-        option.value = list[i].address;
-        addr.appendChild(option);
-      }
-    };
-  });
-
-  function setConnectedState(addr, port) {
-    document.querySelector(".serving-at").innerText=addr+":"+port;
-    document.querySelector("#server").className="connected";
-  }
-    
-  function setStoppedState() {
-    document.querySelector("#server").className="";
-  }
-
-  document.getElementById('serverStart').addEventListener('click', function() {
-    var addr=document.getElementById("addresses").value;
-    var port=parseInt(document.getElementById("serverPort").value);
-    setConnectedState(addr, port);
-    bgPage.startServer(addr, port);
-  });
-
-  document.getElementById('serverStop').addEventListener('click', function() {
-    setStoppedState();
-    bgPage.stopServer();
-  })
-
-  var currentState=bgPage.getServerState();
-  if (currentState.isConnected) {
-    setConnectedState(currentState.addr, currentState.port);
-  }  
-
-})
-
diff --git a/client/site_tests/network_FirewallHolePunch/src/tcpserver/styles/main.css b/client/site_tests/network_FirewallHolePunch/src/tcpserver/styles/main.css
deleted file mode 100644
index 915e467..0000000
--- a/client/site_tests/network_FirewallHolePunch/src/tcpserver/styles/main.css
+++ /dev/null
@@ -1,54 +0,0 @@
-body {
-  font-family: Arial, sans-serif;
-  background: #333;
-  color: white;
-}
-
-.hide-when-not-connected {
-  display: none;
-}
-
-.connected .hide-when-not-connected {
-  display: block;
-}
-
-
-.connected .hide-when-not-connected p {
-  color: #33ff44
-}
-
-.connected .hide-when-connected {
-  display: none;
-}
-
-input[type="number"] {
-  width: 5em;
-}
-
-button {
-  cursor: pointer;
-  margin-left: 20px;
-}
-
-#serverStop {
-  background-color: #D14836;
-  background-image: -webkit-linear-gradient(top,#DD4B39,#D14836);
-  border: 1px solid transparent;
-  color: white;
-  border-radius: 2px;
-}
-
-#serverStop:hover {
-  background-color: #C53727;
-  background-image: -webkit-linear-gradient(top, #DD4B39, #C53727);
-  border: 1px solid #B0281A;
-}
-#serverlog {
-  background: #444;
-  color: #999;
-	width: 480px;
-	height: 180px;
-	border: 1px solid #666;
-	padding: 3px;
-	overflow: auto;
-}
\ No newline at end of file
diff --git a/client/site_tests/network_FirewallHolePunch/src/tcpserver/styles/webview.css b/client/site_tests/network_FirewallHolePunch/src/tcpserver/styles/webview.css
deleted file mode 100644
index d2bb948..0000000
--- a/client/site_tests/network_FirewallHolePunch/src/tcpserver/styles/webview.css
+++ /dev/null
@@ -1,8 +0,0 @@
-html, body, webview {
-  height: 100%;
-  width: 100%;
-}
-
-body {
-  margin: 0;
-}
diff --git a/client/site_tests/network_FirewallHolePunch/src/tcpserver/tcp-server.js b/client/site_tests/network_FirewallHolePunch/src/tcpserver/tcp-server.js
deleted file mode 100644
index 7e9c57e..0000000
--- a/client/site_tests/network_FirewallHolePunch/src/tcpserver/tcp-server.js
+++ /dev/null
@@ -1,389 +0,0 @@
-/*
-Copyright 2012 Google Inc.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Author: Renato Mangini (mangini@chromium.org)
-*/
-
-const DEFAULT_MAX_CONNECTIONS=5;
-
-(function(exports) {
-
-  // Define some local variables here.
-  var socket = chrome.sockets.tcpServer;
-
-  /**
-   * Creates an instance of the client
-   *
-   * @param {String} host The remote host to connect to
-   * @param {Number} port The port to connect to at the remote host
-   */
-  function TcpServer(addr, port, options) {
-    this.addr = addr;
-    this.port = port;
-    this.maxConnections = typeof(options) != 'undefined'
-        && options.maxConnections || DEFAULT_MAX_CONNECTIONS;
-
-    this._onAccept = this._onAccept.bind(this);
-    this._onAcceptError = this._onAcceptError.bind(this);
-
-    // Callback functions.
-    this.callbacks = {
-      listen: null,    // Called when socket is connected.
-      connect: null,    // Called when socket is connected.
-      disconnect: null, // Called when socket is disconnected.
-      recv: null,       // Called when client receives data from server.
-      sent: null        // Called when client sends data to server.
-    };
-
-    // Sockets open
-    this.openSockets=[];
-
-    // server socket (one server connection, accepts and opens one socket per client)
-    this.serverSocketId = null;
-
-    log('initialized tcp server, not listening yet');
-  }
-
-
-  /**
-   * Static method to return available network interfaces.
-   *
-   * @see https://developer.chrome.com/apps/system_network#method-getNetworkInterfaces
-   *
-   * @param {Function} callback The function to call with the available network
-   * interfaces. The callback parameter is an array of
-   * {name(string), address(string)} objects. Use the address property of the
-   * preferred network as the addr parameter on TcpServer contructor.
-   */
-  TcpServer.getNetworkAddresses=function(callback) {
-    chrome.system.network.getNetworkInterfaces(callback);
-  }
-
-  TcpServer.prototype.isConnected=function() {
-    return this.serverSocketId > 0;
-  }
-
-  /**
-   * Connects to the TCP socket, and creates an open socket.
-   *
-   * @see https://developer.chrome.com/apps/sockets_tcpServer#method-create
-   * @param {Function} callback The function to call on connection
-   */
-  TcpServer.prototype.listen = function(callback) {
-    // Register connect callback.
-    this.callbacks.connect = callback;
-    socket.create({}, this._onCreate.bind(this));
-  };
-
-
-  /**
-   * Disconnects from the remote side
-   *
-   * @see https://developer.chrome.com/apps/sockets_tcpServer#method-disconnect
-   */
-  TcpServer.prototype.disconnect = function() {
-    if (this.serverSocketId) {
-      socket.onAccept.removeListener(this._onAccept);
-      socket.onAcceptError.removeListener(this._onAcceptError);
-      socket.close(this.serverSocketId);
-    }
-    for (var i=0; i<this.openSockets.length; i++) {
-      try {
-        this.openSockets[i].close();
-      } catch (ex) {
-        console.log(ex);
-      }
-    }
-    this.openSockets=[];
-    this.serverSocketId=0;
-  };
-
-  /**
-   * The callback function used for when we attempt to have Chrome
-   * create a socket. If the socket is successfully created
-   * we go ahead and start listening for incoming connections.
-   *
-   * @private
-   * @see https://developer.chrome.com/apps/sockets_tcpServer#method-listen
-   * @param {Object} createInfo The socket details
-   */
-  TcpServer.prototype._onCreate = function(createInfo) {
-    this.serverSocketId = createInfo.socketId;
-    if (this.serverSocketId > 0) {
-      socket.onAccept.addListener(this._onAccept);
-      socket.onAcceptError.addListener(this._onAcceptError);
-      socket.listen(this.serverSocketId, this.addr, this.port, 50,
-        this._onListenComplete.bind(this));
-      this.isListening = true;
-    } else {
-      error('Unable to create socket');
-    }
-  };
-
-  /**
-   * The callback function used for when we attempt to have Chrome
-   * connect to the remote side. If a successful connection is
-   * made then we accept it by opening it in a new socket (accept method)
-   *
-   * @private
-   */
-  TcpServer.prototype._onListenComplete = function(resultCode) {
-    if (resultCode !==0) {
-      error('Unable to listen to socket. Resultcode='+resultCode);
-    }
-  }
-
-  TcpServer.prototype._onAccept = function (info) {
-    if (info.socketId != this.serverSocketId)
-      return;
-
-    if (this.openSockets.length >= this.maxConnections) {
-      this._onNoMoreConnectionsAvailable(info.clientSocketId);
-      return;
-    }
-
-    var tcpConnection = new TcpConnection(info.clientSocketId);
-    this.openSockets.push(tcpConnection);
-
-    tcpConnection.requestSocketInfo(this._onSocketInfo.bind(this));
-    log('Incoming connection handled.');
-  }
-
-  TcpServer.prototype._onAcceptError = function(info) {
-    if (info.socketId != this.serverSocketId)
-      return;
-
-    error('Unable to accept incoming connection. Error code=' + info.resultCode);
-  }
-
-  TcpServer.prototype._onNoMoreConnectionsAvailable = function(socketId) {
-    var msg="No more connections available. Try again later\n";
-    _stringToArrayBuffer(msg, function(arrayBuffer) {
-      chrome.sockets.tcp.send(socketId, arrayBuffer,
-        function() {
-          chrome.sockets.tcp.close(socketId);
-        });
-    });
-  }
-
-  TcpServer.prototype._onSocketInfo = function(tcpConnection, socketInfo) {
-    if (this.callbacks.connect) {
-      this.callbacks.connect(tcpConnection, socketInfo);
-    }
-  }
-
-  /**
-   * Holds a connection to a client
-   *
-   * @param {number} socketId The ID of the server<->client socket
-   */
-  function TcpConnection(socketId) {
-    this.socketId = socketId;
-    this.socketInfo = null;
-
-    // Callback functions.
-    this.callbacks = {
-      disconnect: null, // Called when socket is disconnected.
-      recv: null,       // Called when client receives data from server.
-      sent: null        // Called when client sends data to server.
-    };
-
-    log('Established client connection. Listening...');
-
-  };
-
-  TcpConnection.prototype.setSocketInfo = function(socketInfo) {
-    this.socketInfo = socketInfo;
-  };
-
-  TcpConnection.prototype.requestSocketInfo = function(callback) {
-    chrome.sockets.tcp.getInfo(this.socketId,
-      this._onSocketInfo.bind(this, callback));
-  };
-
-  /**
-   * Add receive listeners for when a message is received
-   *
-   * @param {Function} callback The function to call when a message has arrived
-   */
-  TcpConnection.prototype.startListening = function(callback) {
-    this.callbacks.recv = callback;
-
-    // Add receive listeners.
-    this._onReceive = this._onReceive.bind(this);
-    this._onReceiveError = this._onReceiveError.bind(this);
-    chrome.sockets.tcp.onReceive.addListener(this._onReceive);
-    chrome.sockets.tcp.onReceiveError.addListener(this._onReceiveError);
-
-    chrome.sockets.tcp.setPaused(this.socketId, false);
-  };
-
-  /**
-   * Sets the callback for when a message is received
-   *
-   * @param {Function} callback The function to call when a message has arrived
-   */
-  TcpConnection.prototype.addDataReceivedListener = function(callback) {
-    // If this is the first time a callback is set, start listening for incoming data.
-    if (!this.callbacks.recv) {
-      this.startListening(callback);
-    } else {
-      this.callbacks.recv = callback;
-    }
-  };
-
-
-  /**
-   * Sends a message down the wire to the remote side
-   *
-   * @see https://developer.chrome.com/apps/sockets_tcp#method-send
-   * @param {String} msg The message to send
-   * @param {Function} callback The function to call when the message has sent
-   */
-  TcpConnection.prototype.sendMessage = function(msg, callback) {
-    _stringToArrayBuffer(msg + '\n', function(arrayBuffer) {
-      chrome.sockets.tcp.send(this.socketId, arrayBuffer, this._onWriteComplete.bind(this));
-    }.bind(this));
-
-    // Register sent callback.
-    this.callbacks.sent = callback;
-  };
-
-
-  /**
-   * Disconnects from the remote side
-   *
-   * @see https://developer.chrome.com/apps/sockets_tcp#method-close
-   */
-  TcpConnection.prototype.close = function() {
-    if (this.socketId) {
-      chrome.sockets.tcp.onReceive.removeListener(this._onReceive);
-      chrome.sockets.tcp.onReceiveError.removeListener(this._onReceiveError);
-      chrome.sockets.tcp.close(this.socketId);
-    }
-  };
-
-
-  /**
-   * Callback function for when socket details (socketInfo) is received.
-   * Stores the socketInfo for future reference and pass it to the
-   * callback sent in its parameter.
-   *
-   * @private
-   */
-  TcpConnection.prototype._onSocketInfo = function(callback, socketInfo) {
-    if (callback && typeof(callback)!='function') {
-      throw "Illegal value for callback: "+callback;
-    }
-    this.socketInfo = socketInfo;
-    callback(this, socketInfo);
-  }
-
-  /**
-   * Callback function for when data has been read from the socket.
-   * Converts the array buffer that is read in to a string
-   * and sends it on for further processing by passing it to
-   * the previously assigned callback function.
-   *
-   * @private
-   * @see TcpConnection.prototype.addDataReceivedListener
-   * @param {Object} readInfo The incoming message
-   */
-  TcpConnection.prototype._onReceive = function(info) {
-    if (this.socketId != info.socketId)
-      return;
-
-    // Call received callback if there's data in the response.
-    if (this.callbacks.recv) {
-      log('onDataRead');
-      // Convert ArrayBuffer to string.
-      _arrayBufferToString(info.data, this.callbacks.recv.bind(this));
-    }
-  };
-
-  TcpConnection.prototype._onReceiveError = function (info) {
-    if (this.socketId != info.socketId)
-      return;
-    this.close();
-  };
-
-  /**
-   * Callback for when data has been successfully
-   * written to the socket.
-   *
-   * @private
-   * @param {Object} writeInfo The outgoing message
-   */
-  TcpConnection.prototype._onWriteComplete = function(writeInfo) {
-    log('onWriteComplete');
-    // Call sent callback.
-    if (this.callbacks.sent) {
-      this.callbacks.sent(writeInfo);
-    }
-  };
-
-
-
-  /**
-   * Converts an array buffer to a string
-   *
-   * @private
-   * @param {ArrayBuffer} buf The buffer to convert
-   * @param {Function} callback The function to call when conversion is complete
-   */
-  function _arrayBufferToString(buf, callback) {
-    var bb = new Blob([new Uint8Array(buf)]);
-    var f = new FileReader();
-    f.onload = function(e) {
-      callback(e.target.result);
-    };
-    f.readAsText(bb);
-  }
-
-  /**
-   * Converts a string to an array buffer
-   *
-   * @private
-   * @param {String} str The string to convert
-   * @param {Function} callback The function to call when conversion is complete
-   */
-  function _stringToArrayBuffer(str, callback) {
-    var bb = new Blob([str]);
-    var f = new FileReader();
-    f.onload = function(e) {
-        callback(e.target.result);
-    };
-    f.readAsArrayBuffer(bb);
-  }
-
-
-  /**
-   * Wrapper function for logging
-   */
-  function log(msg) {
-    console.log(msg);
-  }
-
-  /**
-   * Wrapper function for error logging
-   */
-  function error(msg) {
-    console.error(msg);
-  }
-
-  exports.TcpServer = TcpServer;
-  exports.TcpConnection = TcpConnection;
-
-})(window);
diff --git a/client/site_tests/network_Ipv6SimpleNegotiation/control b/client/site_tests/network_Ipv6SimpleNegotiation/control
index 9b117fc..bc18067 100644
--- a/client/site_tests/network_Ipv6SimpleNegotiation/control
+++ b/client/site_tests/network_Ipv6SimpleNegotiation/control
@@ -15,6 +15,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "network"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
   Tests that we can acquire an IPv6 globally scoped address and IPv6 DNS
diff --git a/client/site_tests/network_Ipv6SimpleNegotiation/network_Ipv6SimpleNegotiation.py b/client/site_tests/network_Ipv6SimpleNegotiation/network_Ipv6SimpleNegotiation.py
index edc5e6a..ed62211 100644
--- a/client/site_tests/network_Ipv6SimpleNegotiation/network_Ipv6SimpleNegotiation.py
+++ b/client/site_tests/network_Ipv6SimpleNegotiation/network_Ipv6SimpleNegotiation.py
@@ -68,7 +68,7 @@
         for ipconfig in self.get_interface_ipconfig_objects(
                 self.ethernet_pair.peer_interface_name):
             ipconfig_properties = shill_proxy.ShillProxy.dbus2primitive(
-                    ipconfig.GetProperties(utf8_strings=True))
+                    ipconfig.GetProperties())
             if 'Method' not in ipconfig_properties:
                 continue
 
@@ -141,14 +141,14 @@
 
         for property, value in (('Address', address), ('Prefixlen', prefix)):
             if property not in ipconfig_properties:
-               raise error.TestError('IPv6 IPConfig entry does not '
-                                     'contain property %s' % property)
+                raise error.TestError('IPv6 IPConfig entry does not '
+                                      'contain property %s' % property)
             if ipconfig_properties[property] != value:
-               raise error.TestError('IPv6 IPConfig property %s does not '
-                                     'contain the expected value %s; '
-                                     'instead it is %s' %
-                                     (property, value,
-                                      ipconfig_properties[property]))
+                raise error.TestError(
+                        'IPv6 IPConfig property %s does not '
+                        'contain the expected value %s; '
+                        'instead it is %s' %
+                        (property, value, ipconfig_properties[property]))
 
 
     def verify_ipconfig_name_servers(self, name_servers):
@@ -169,7 +169,9 @@
 
     def test_body(self):
         """The main body for this test."""
-        server = radvd_server.RadvdServer(self.ethernet_pair.interface_name)
+        server = radvd_server.RadvdServer(
+                self.ethernet_pair.interface_name,
+                self.ethernet_pair.interface_namespace)
         server.start_server()
 
         try:
diff --git a/client/site_tests/network_NegotiatedLANSpeed/control b/client/site_tests/network_NegotiatedLANSpeed/control
deleted file mode 100644
index bc2d0f5..0000000
--- a/client/site_tests/network_NegotiatedLANSpeed/control
+++ /dev/null
@@ -1,14 +0,0 @@
-AUTHOR = "Chrome OS Team"
-NAME = "network_NegotiatedLANSpeed"
-PURPOSE = "Ensure wired network interface is running at least 1Gbps."
-CRITERIA = "Fails if wired network speed is less than 1Gbps."
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "Network"
-TEST_TYPE = "client"
-
-DOC = """
-This test checks whether a wired network interface is running at 1000Mbps
-"""
-
-job.run_test('network_NegotiatedLANSpeed', iface_name = 'eth0', tag = 'eth0')
diff --git a/client/site_tests/network_NegotiatedLANSpeed/network_NegotiatedLANSpeed.py b/client/site_tests/network_NegotiatedLANSpeed/network_NegotiatedLANSpeed.py
deleted file mode 100644
index 76119a1..0000000
--- a/client/site_tests/network_NegotiatedLANSpeed/network_NegotiatedLANSpeed.py
+++ /dev/null
@@ -1,37 +0,0 @@
-import logging, re, utils
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-
-class network_NegotiatedLANSpeed(test.test):
-    version = 1
-
-
-    def run_once(self, iface_name = 'eth0'):
-        # bring up the interface if its not already up
-        if not self.iface_up(iface_name):
-            utils.system('ifconfig %s up' % iface_name)
-            if not self.iface_up(iface_name):
-                raise error.TestFail('interface failed to come up')
-        # confirm negotiated bandwidth is acceptable
-        if not int(self.get_speed(iface_name)) >= 1000:
-            raise error.TestFail('interface failed to negotiate at 1000Mbps')
-
-
-    def iface_up(self, name):
-        try:
-            out = utils.system_output('ifconfig %s' % name)
-        except error.CmdError, e:
-            logging.info(e)
-            raise error.TestFail('test interface not found')
-        match = re.search('UP', out, re.S)
-        return match
-
-
-    def get_speed(self, name):
-        try:
-            out = utils.system_output('ethtool %s | grep Speed | \
-                sed s/^.*:.// | sed s/M.*$//' % name)
-        except error.CmdError, e:
-            logging.info(e)
-            raise error.TestFail('unable to determine negotiated link speed')
-        return out
diff --git a/client/site_tests/network_PortalStress/control b/client/site_tests/network_PortalStress/control
deleted file mode 100644
index f1012f4..0000000
--- a/client/site_tests/network_PortalStress/control
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'The Chromium OS Authors'
-DOC = """
-This test puts the network interfaces through hundred(s) of
-connect/disconnect cycles and verifies that flimflam properly detects
-that the service is either in online or portal state.
-
-Use test_that with --args=service_name=cell to test the
-portal code on cellular devices.
-"""
-NAME = 'network_PortalStress'
-PURPOSE = 'Stress test the portal detection code.'
-CRITERIA = """
-    Fails if portal code detects portal state when
-    clients3.google.com is accessible or if portal code detects online
-    when clients3.google.com is not accessible.
-"""
-TIME = 'LONG'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'network'
-TEST_TYPE = 'client'
-
-import logging
-logging.info('args = %s' % args)
-
-if args:
-    args = args[0].split(':')
-
-if args:
-    dict_args = dict([x.split('=') for x in args])
-else:
-    dict_args = {}
-
-# Run online cycle for hundred(s) of iterations because this is fast
-job.run_test('network_Portal',
-             force_portal=False, tag='online', test_iterations=100, **dict_args)
-
-# Run the portal cycle for fewer iterations because each test must time
-# out and that makes it exceedingly slow (15 seconds/iteration)
-job.run_test('network_Portal',
-             force_portal=True, tag='portal', test_iterations=20, **dict_args)
diff --git a/client/site_tests/network_ProxyResolver/control b/client/site_tests/network_ProxyResolver/control
deleted file mode 100755
index df86950..0000000
--- a/client/site_tests/network_ProxyResolver/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-DOC = "Sets a proxy in Chrome and ensures it resolves over DBus"
-NAME = "network_ProxyResolver"
-PURPOSE = "Verify Chrome-based proxy resolving works"
-CRITERIA = "Fails if Chrome returns incorrect or malformed data"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-
-# TODO(wad) cases: OOBE with network configured,
-#                  OOBE when set by the user,
-#                  OOBE when set by policy
-job.run_test(
-  'network_ProxyResolver',
-  test_type='test_same_ip_proxy_at_signin_chrome_system_tlsdated',
-  service_name='Ethernet')
diff --git a/client/site_tests/network_ProxyResolver/network_ProxyResolver.py b/client/site_tests/network_ProxyResolver/network_ProxyResolver.py
deleted file mode 100755
index 08b44d2..0000000
--- a/client/site_tests/network_ProxyResolver/network_ProxyResolver.py
+++ /dev/null
@@ -1,364 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.;
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-#
-# This is an integration test which ensures that a proxy set on a
-# shared network connection is exposed via LibCrosSevice and used
-# by tlsdated during time synchronization.
-
-import dbus
-import gobject
-import logging
-import subprocess
-import threading
-import time
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import cros_ui
-from autotest_lib.client.cros.networking import shill_proxy
-
-from dbus.mainloop.glib import DBusGMainLoop
-from SocketServer import ThreadingTCPServer, StreamRequestHandler
-
-class ProxyHandler(StreamRequestHandler):
-    """Matching request handler for the ThreadedHitServer
-       that notes when an expected request is seen.
-    """
-    wbufsize = -1
-    def handle(self):
-        """Reads the first line, up to 40 characters, looking
-           for the CONNECT string that tlsdated sends. If it
-           is found, the server's hit() method is called.
-
-           All requests receive a HTTP 504 error.
-        """
-        # Read up to 40 characters
-        data = self.rfile.readline(40).strip()
-        logging.info('ProxyHandler::handle(): <%s>', data)
-        # TODO(wad) Add User-agent check when it lands in tlsdate.
-        # Also, abstract the time server and move this code into cros/.
-        if data.__contains__('CONNECT clients3.google.com:443 HTTP/1.1'):
-          self.server.hit()
-        self.wfile.write("HTTP/1.1 504 Gateway Timeout\r\n" +
-                         "Connection: close\r\n\r\n")
-
-class ThreadedHitServer(ThreadingTCPServer):
-    """A threaded TCP server which services requests
-       and allows the handler to track "hits".
-    """
-    def __init__(self, server_address, HandlerClass):
-        """Constructor
-
-        @param server_address: tuple of server IP and port to listen on.
-        @param HandlerClass: the RequestHandler class to instantiate per req.
-        """
-        self._hits = 0
-        ThreadingTCPServer.__init__(self, server_address, HandlerClass)
-
-    def hit(self):
-        """Increment the hit count. Usually called by the HandlerClass"""
-        self._hits += 1
-
-    def reset_hits(self):
-        """Set the hit count to 0"""
-        self._hits = 0
-
-    def hits(self):
-        """Get the number of matched requests
-        @return the count of matched requests
-        """
-        return self._hits
-
-class ProxyListener(object):
-    """A fake listener for tracking if an expected CONNECT request is
-       seen at the provided server address. Any hits are exposed to be
-       consumed by the caller.
-    """
-    def __init__(self, server_address):
-        """Constructor
-
-        @param server_address: tuple of server IP and port to listen on.
-        """
-        self._server = ThreadedHitServer(server_address, ProxyHandler)
-        self._thread = threading.Thread(target=self._server.serve_forever)
-
-    def run(self):
-        """Run the server on a thread"""
-        self._thread.start()
-
-    def stop(self):
-        """Stop the server and its threads"""
-        self._server.shutdown()
-        self._server.socket.close()
-        self._thread.join()
-
-    def reset_hits(self):
-        """Reset the number of matched requests to 0"""
-        return self._server.reset_hits()
-
-    def hits(self):
-        """Get the number of matched requests
-        @return the count of matched requests
-        """
-        return self._server.hits()
-
-class SignalListener(object):
-    """A class to listen for a DBus signal
-    """
-    DEFAULT_TIMEOUT = 60
-    _main_loop = None
-    _signals = { }
-
-    def __init__(self, g_main_loop):
-        """Constructor
-
-        @param g_mail_loop: glib main loop object.
-        """
-        self._main_loop = g_main_loop
-
-
-    def listen_for_signal(self, signal, interface, path):
-        """Listen with a default handler
-        @param signal: signal name to listen for
-        @param interface: DBus interface to expect it from
-        @param path: DBus path associated with the signal
-        """
-        self.__listen_to_signal(self.__handle_signal, signal, interface, path)
-
-
-    def wait_for_signals(self, desc,
-                         timeout=DEFAULT_TIMEOUT):
-        """Block for |timeout| seconds waiting for the signals to come in.
-
-        @param desc: string describing the high-level reason you're waiting
-                     for the signals.
-        @param timeout: maximum seconds to wait for the signals.
-
-        @raises TimeoutError if the timeout is hit.
-        """
-        utils.poll_for_condition(
-            condition=lambda: self.__received_signals(),
-            desc=desc,
-            timeout=self.DEFAULT_TIMEOUT)
-        all_signals = self._signals.copy()
-        self.__reset_signal_state()
-        return all_signals
-
-
-    def __received_signals(self):
-        """Run main loop until all pending events are done, checks for signals.
-
-        Runs self._main_loop until it says it has no more events pending,
-        then returns the state of the internal variables tracking whether
-        desired signals have been received.
-
-        @return True if both signals have been handled, False otherwise.
-        """
-        context = self._main_loop.get_context()
-        while context.iteration(False):
-            pass
-        return len(self._signals) > 0
-
-
-    def __reset_signal_state(self):
-        """Resets internal signal tracking state."""
-        self._signals = { }
-
-
-    def __listen_to_signal(self, callback, signal, interface, path):
-        """Connect a callback to a given session_manager dbus signal.
-
-        Sets up a signal receiver for signal, and calls the provided callback
-        when it comes in.
-
-        @param callback: a callable to call when signal is received.
-        @param signal: the signal to listen for.
-        """
-        bus = dbus.SystemBus(mainloop=self._main_loop)
-        bus.add_signal_receiver(
-            handler_function=callback,
-            signal_name=signal,
-            dbus_interface=interface,
-            bus_name=None,
-            path=path,
-            member_keyword='signal_name')
-
-
-    def __handle_signal(self, *args, **kwargs):
-        """Callback to be used when a new key signal is received."""
-        signal_name = kwargs.pop('signal_name', '')
-        #signal_data = str(args[0])
-        logging.info("SIGNAL: " + signal_name + ", " + str(args));
-        if self._signals.has_key(signal_name):
-          self._signals[signal_name].append(args)
-        else:
-          self._signals[signal_name] = [args]
-
-
-class network_ProxyResolver(test.test):
-    """A test fixture for validating the integration of
-       shill, Chrome, and tlsdated's proxy resolution.
-    """
-    version = 1
-    auto_login = False
-    service_settings = { }
-
-    TIMEOUT = 360
-
-    def initialize(self):
-       """Constructor
-          Sets up the test such that all DBus signals can be
-          received and a fake proxy server can be instantiated.
-          Additionally, the UI is restarted to ensure consistent
-          shared network use.
-       """
-       super(network_ProxyResolver, self).initialize()
-       cros_ui.stop()
-       cros_ui.start()
-       DBusGMainLoop(set_as_default=True)
-       self._listener = SignalListener(gobject.MainLoop())
-       self._shill = shill_proxy.ShillProxy.get_proxy()
-       if self._shill is None:
-         raise error.TestFail('Could not connect to shill')
-       # Listen for network property changes
-       self._listener.listen_for_signal('PropertyChanged',
-                                        'org.chromium.flimflam.Service',
-                                        '/')
-       # Listen on the proxy port.
-       self._proxy_server = ProxyListener(('', 3128))
-
-    # Set the proxy with Shill. This only works for shared connections
-    # (like Eth).
-    def set_proxy(self, service_name, proxy_config):
-        """Changes the ProxyConfig property on the specified shill service.
-
-        @param service_name: the name, as a str, of the shill service
-        @param proxy_config: the ProxyConfig property value string
-
-        @raises TestFail if the service is not found.
-        """
-        shill = self._shill
-        service = shill.find_object('Service', { 'Name' : service_name })
-        if not service:
-            raise error.TestFail('Service ' + service_name +
-                                 ' not found to test proxy with.')
-        props = service.GetProperties()
-        old_proxy = ''
-        if props.has_key('ProxyConfig'):
-          old_proxy = props['ProxyConfig']
-        if self.service_settings.has_key(service_name) == False:
-          logging.info('Preexisting ProxyConfig: ' + service_name +
-                       ' -> ' + old_proxy)
-          self.service_settings[service_name] = old_proxy
-        logging.info('Setting proxy to ' + proxy_config)
-        service.SetProperties({'ProxyConfig': proxy_config})
-
-
-    def reset_services(self):
-        """Walks the dict of service->ProxyConfig values and sets the
-           proxy back to the originally observed value.
-        """
-        if len(self.service_settings) == 0:
-          return
-        for k,v in self.service_settings.items():
-          logging.info('Resetting ProxyConfig: ' + k + ' -> ' + v)
-          self.set_proxy(k, v)
-
-
-    def check_chrome(self, proxy_type, proxy_config, timeout):
-        """Check that Chrome has acknowledged the supplied proxy config
-           by asking for resolution over DBus.
-
-        @param proxy_type: PAC-style string type (e.g., 'PROXY', 'SOCKS')
-        @param proxy_config: PAC-style config string (e.g., 127.0.0.1:1234)
-        @param timeout: time in seconds to wait for Chrome to issue a signal.
-
-        @return True if a matching response is seen and False otherwise
-        """
-        bus = dbus.SystemBus()
-        dbus_proxy = bus.get_object('org.chromium.NetworkProxyService',
-                                    '/org/chromium/NetworkProxyService')
-        service = dbus.Interface(dbus_proxy,
-                                 'org.chromium.NetworkProxyServiceInterface')
-
-        attempts = timeout
-        while attempts > 0:
-          result, _ = service.ResolveProxy('https://clients3.google.com')
-          if str(result) == proxy_type + ' ' + proxy_config:
-            return True
-          attempts -= 1
-          time.sleep(1)
-        logging.error('Last response seen before giving up: ' + str(result))
-        return False
-
-    def check_tlsdated(self, timeout):
-        """Check that tlsdated uses the set proxy.
-        @param timeout: time in seconds to wait for tlsdate to restart and query
-        @return True if tlsdated hits the proxy server and False otherwise
-        """
-        # Restart tlsdated to force a network resync
-        # (The other option is to force it to think there is no network sync.)
-        try:
-            self._proxy_server.run()
-        except Exception as e:
-            logging.error("Proxy error =>" + str(e))
-            return False
-        logging.info("proxy started!")
-        status = subprocess.call(['initctl', 'restart', 'tlsdated'])
-        if status != 0:
-          logging.info("failed to restart tlsdated")
-          return False
-        attempts = timeout
-        logging.info("waiting for hits on the proxy server")
-        while attempts > 0:
-          if self._proxy_server.hits() > 0:
-            self._proxy_server.reset_hits()
-            return True
-          time.sleep(1)
-          attempts -= 1
-        logging.info("no hits")
-        return False
-
-
-    def cleanup(self):
-        """Reset all the service data and teardown the proxy."""
-        self.reset_services()
-        logging.info("tearing down the proxy server")
-        self._proxy_server.stop()
-        logging.info("proxy server down")
-        super(network_ProxyResolver, self).cleanup()
-
-
-    def test_same_ip_proxy_at_signin_chrome_system_tlsdated(
-                                                        self,
-                                                        service_name,
-                                                        test_timeout=TIMEOUT):
-        """ Set the user policy, waits for condition, then logs out.
-
-        @param service_name: shill service name to test on
-        @param test_timeout: the total time in seconds split among all timeouts.
-        """
-        proxy_type = 'http'
-        proxy_port = '3128'
-        proxy_host = '127.0.0.1'
-        proxy_url = proxy_type + '://' + proxy_host + ':' + proxy_port
-        # TODO(wad) Only do the below if it was a single protocol proxy.
-        # proxy_config = proxy_type + '=' + proxy_host + ':' + proxy_port
-        proxy_config = proxy_host + ':' + proxy_port
-        self.set_proxy(service_name, '{"mode":"fixed_servers","server":"' +
-                                     proxy_config + '"}')
-
-        logging.info("checking chrome")
-        if self.check_chrome('PROXY', proxy_config, test_timeout/3) == False:
-          raise error.TestFail('Chrome failed to resolve the proxy')
-
-        # Restart tlsdate to force a network fix
-        logging.info("checking tlsdated")
-        if self.check_tlsdated(test_timeout/3) == False:
-          raise error.TestFail('tlsdated never tried the proxy')
-        logging.info("done!")
-
-    def run_once(self, test_type, **params):
-        logging.info('client: Running client test %s', test_type)
-        getattr(self, test_type)(**params)
diff --git a/client/site_tests/network_RackWiFiConnect/control b/client/site_tests/network_RackWiFiConnect/control
deleted file mode 100644
index 0233e67..0000000
--- a/client/site_tests/network_RackWiFiConnect/control
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'tienchang'
-TIME = 'SHORT'
-NAME = 'network_RackWiFiConnect'
-TEST_TYPE = 'Client'
-
-DOC = """
-network_RackWiFiConnect uses enterprise managed users on the crostest.tv
-domain to connect to preconfigured network services on our WiFi rack.
-"""
-
-job.run_test('network_RackWiFiConnect',
-             test='all')
diff --git a/client/site_tests/network_RackWiFiConnect/control.EAP_TTLS b/client/site_tests/network_RackWiFiConnect/control.EAP_TTLS
deleted file mode 100644
index 8205d5f..0000000
--- a/client/site_tests/network_RackWiFiConnect/control.EAP_TTLS
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'tienchang'
-TIME = 'SHORT'
-NAME = 'network_RackWiFiConnect.EAP_TTLS'
-TEST_TYPE = 'Client'
-
-DOC = """
-network_RackWiFiConnect test uses enterprise managed users on the crostest.tv
-domain to connect to preconfigured network services on our WiFi rack, in this
-case a EAP-TTLS network.
-"""
-
-from autotest_lib.client.common_lib.cros.network import wifi_rack_constants
-
-job.run_test('network_RackWiFiConnect',
-             test=wifi_rack_constants.EAP_TTLS.testname)
diff --git a/client/site_tests/network_RackWiFiConnect/control.PEAP b/client/site_tests/network_RackWiFiConnect/control.PEAP
deleted file mode 100644
index 5d95217..0000000
--- a/client/site_tests/network_RackWiFiConnect/control.PEAP
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'tienchang'
-TIME = 'SHORT'
-NAME = 'network_RackWiFiConnect.PEAP'
-TEST_TYPE = 'Client'
-
-DOC = """
-network_RackWiFiConnect test uses enterprise managed users on the crostest.tv
-domain to connect to preconfigured network services on our WiFi rack, in this
-case a PEAP network.
-"""
-
-from autotest_lib.client.common_lib.cros.network import wifi_rack_constants
-
-job.run_test('network_RackWiFiConnect',
-             test=wifi_rack_constants.PEAP.testname)
diff --git a/client/site_tests/network_RackWiFiConnect/control.WEP b/client/site_tests/network_RackWiFiConnect/control.WEP
deleted file mode 100644
index 2e1290e..0000000
--- a/client/site_tests/network_RackWiFiConnect/control.WEP
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'tienchang'
-TIME = 'SHORT'
-NAME = 'network_RackWiFiConnect.WEP'
-TEST_TYPE = 'Client'
-
-DOC = """
-network_RackWiFiConnect test uses enterprise managed users on the crostest.tv
-domain to connect to preconfigured network services on our WiFi rack, in this
-case a WEP network.
-"""
-
-from autotest_lib.client.common_lib.cros.network import wifi_rack_constants
-
-job.run_test('network_RackWiFiConnect',
-             test=wifi_rack_constants.WEP.testname)
diff --git a/client/site_tests/network_RackWiFiConnect/control.WPA2 b/client/site_tests/network_RackWiFiConnect/control.WPA2
deleted file mode 100644
index 4d9b231..0000000
--- a/client/site_tests/network_RackWiFiConnect/control.WPA2
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'tienchang'
-TIME = 'SHORT'
-NAME = 'network_RackWiFiConnect.WPA2'
-TEST_TYPE = 'Client'
-
-DOC = """
-network_RackWiFiConnect test uses enterprise managed users on the crostest.tv
-domain to connect to preconfigured network services on our WiFi rack, in this
-case a WPA2 network.
-"""
-
-from autotest_lib.client.common_lib.cros.network import wifi_rack_constants
-
-job.run_test('network_RackWiFiConnect',
-             test=wifi_rack_constants.WPA2.testname)
diff --git a/client/site_tests/network_RackWiFiConnect/control.googleGuest b/client/site_tests/network_RackWiFiConnect/control.googleGuest
deleted file mode 100644
index 8ad96e9..0000000
--- a/client/site_tests/network_RackWiFiConnect/control.googleGuest
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'tienchang'
-TIME = 'SHORT'
-NAME = 'network_RackWiFiConnect.googleGuest'
-TEST_TYPE = 'Client'
-
-DOC = """
-network_WiFi_RackConnect test uses enterprise managed users on the crostest.tv
-domain to connect to preconfigured network services, including GoogleGuest.
-"""
-
-from autotest_lib.client.common_lib.cros.network import wifi_rack_constants
-
-job.run_test('network_RackWiFiConnect',
-             test=wifi_rack_constants.GOOGLE_GUEST.testname)
diff --git a/client/site_tests/network_RackWiFiConnect/control.hiddenWEP b/client/site_tests/network_RackWiFiConnect/control.hiddenWEP
deleted file mode 100644
index 2fecb85..0000000
--- a/client/site_tests/network_RackWiFiConnect/control.hiddenWEP
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'tienchang'
-TIME = 'SHORT'
-NAME = 'network_RackWiFiConnect.hiddenWEP'
-TEST_TYPE = 'Client'
-
-DOC = """
-network_RackWiFiConnect test uses enterprise managed users on the crostest.tv
-domain to connect to preconfigured network services on our WiFi rack, in this
-case a hidden WEP network.
-"""
-
-from autotest_lib.client.common_lib.cros.network import wifi_rack_constants
-
-job.run_test('network_RackWiFiConnect',
-             test=wifi_rack_constants.HIDDEN_WEP.testname)
diff --git a/client/site_tests/network_RackWiFiConnect/control.hiddenWPA b/client/site_tests/network_RackWiFiConnect/control.hiddenWPA
deleted file mode 100644
index 59c71dd..0000000
--- a/client/site_tests/network_RackWiFiConnect/control.hiddenWPA
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'tienchang'
-TIME = 'SHORT'
-NAME = 'network_RackWiFiConnect.hiddenWPA'
-TEST_TYPE = 'Client'
-
-DOC = """
-network_RackWiFiConnect test uses enterprise managed users on the crostest.tv
-domain to connect to preconfigured network services on our WiFi rack, in this
-case a hidden WPA network.
-"""
-
-from autotest_lib.client.common_lib.cros.network import wifi_rack_constants
-
-job.run_test('network_RackWiFiConnect',
-             test=wifi_rack_constants.HIDDEN_WPA.testname)
diff --git a/client/site_tests/network_RackWiFiConnect/control.proxyNonAuth b/client/site_tests/network_RackWiFiConnect/control.proxyNonAuth
deleted file mode 100644
index 0aa924d..0000000
--- a/client/site_tests/network_RackWiFiConnect/control.proxyNonAuth
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'tienchang'
-TIME = 'SHORT'
-NAME = 'network_RackWiFiConnect.proxyNonAuth'
-TEST_TYPE = 'Client'
-
-DOC = """
-network_RackWiFiConnect test uses enterprise managed users on the crostest.tv
-domain to connect to preconfigured network services on our WiFi rack, in this
-case a non authenticated proxy network.
-"""
-
-from autotest_lib.client.common_lib.cros.network import wifi_rack_constants
-
-job.run_test('network_RackWiFiConnect',
-             test=wifi_rack_constants.PROXY_NON_AUTH.testname)
diff --git a/client/site_tests/network_RackWiFiConnect/network_RackWiFiConnect.py b/client/site_tests/network_RackWiFiConnect/network_RackWiFiConnect.py
deleted file mode 100644
index 7c838eb..0000000
--- a/client/site_tests/network_RackWiFiConnect/network_RackWiFiConnect.py
+++ /dev/null
@@ -1,174 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import dbus
-import logging
-import tempfile
-import time
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network \
-         import wifi_rack_constants as constants
-from autotest_lib.client.cros.networking import wifi_proxy
-from autotest_lib.client.cros.networking.chrome_testing \
-        import chrome_networking_test_api as cnta
-from autotest_lib.client.cros.networking.chrome_testing \
-        import chrome_networking_test_context as cntc
-
-
-class network_RackWiFiConnect(test.test):
-    """Client test to connect to various network services on WiFi rack.
-
-    After connection, we assert access to pages only accessible through the
-    connected network.
-
-    """
-    version = 1
-
-
-    def _assert_access(self, test):
-        """Asset user can access page.
-
-        Verification URLs are either pages on WiFi rack's Apache server or
-        general Internet.
-
-        @param test string - testname of NetworkServices namedtuple
-
-        @return boolean - True if able to access, False otherwise
-
-        """
-        for service_test in constants.NETWORK_SERVICES_TESTS:
-            if test == service_test.testname:
-                url, pattern = service_test.url, service_test.pattern
-                break
-
-        # Since this test runs OTA, allow 15 seconds of leeway
-        time.sleep(15)
-
-        wget_cmd = 'wget -O /tmp/wget.log %s' % url
-        for retry in range(3):
-            exit_status = utils.system(wget_cmd, ignore_status=True)
-            if not exit_status:
-                logging.debug('Able to wget URL.')
-                break
-            logging.error('Could not wget URL; trying again.')
-        grep_url_cmd = 'cat /tmp/wget.log | grep %s' % pattern
-        output_status = utils.system(grep_url_cmd, ignore_status=True)
-        if output_status:
-            logging.debug('Unable to access correct URL for %s',
-                          service_test.testname)
-            return False
-        return True
-
-
-    def _connect(self, ssid, uname):
-        """Connect to particular network and assert access to page.
-
-        @param ssid string - predefined SSID from user's preferred networks
-        @param uname string - predefined username of managed user
-
-        @return boolean - True if able to connect, False otherwise
-
-        """
-        start_time = time.time()
-        with cntc.ChromeNetworkingTestContext(username=uname,
-                  password=constants.PASSWORD, gaia_login=True) as \
-                  testing_context:
-            net_provider = cnta.ChromeNetworkProvider(testing_context)
-            enabled_devices = net_provider.get_enabled_devices()
-            if net_provider.WIFI_DEVICE not in enabled_devices:
-                net_provider.enable_network_device(net_provider.WIFI_DEVICE)
-            logging.info('Scanning for networks')
-            connect_to_service = None
-            while time.time() - start_time < constants.SCAN_RETRY_TIMEOUT:
-                net_provider.scan_for_networks(timeout=20)
-                logging.info('Attempting to connect to %s', ssid)
-                networks = net_provider.get_wifi_networks()
-                for service in networks:
-                    if service['Name'] == ssid:
-                        connect_to_service = service
-                if not connect_to_service:
-                    logging.error('Unable to find %s', ssid)
-                    continue
-                try:
-                    net_provider.connect_to_network(connect_to_service)
-                    logging.info('Successfully connected to network %s', ssid)
-                    return True
-                except error.TestFail as e:
-                    logging.error('Unable to connect to %s', ssid)
-                    continue
-            return False
-
-
-    def _connect_and_assert(self, test, ssid, user):
-        """Verify connect and assert and write results to results/.
-
-        @param test string - testname of NetworkServices namedtuple
-        @param ssid string - predefined SSID from user's preferred networks
-        @param user string - predefined username of managed user
-
-        """
-        tf = tempfile.NamedTemporaryFile(suffix='.txt',
-                                         prefix='connect_%s_' % test,
-                                         dir=self.resultsdir,
-                                         delete=False)
-        with tf as results:
-            if not self._connect(ssid, user):
-                results.write('%s FAILED to connect to SSID\n\n' % test)
-            elif not self._assert_access(test):
-                results.write('%s FAILED to access\n\n' % test)
-            else:
-                results.write('%s passed\n\n' % test)
-
-
-    def _to_wifi(self, proxy):
-        """Set service order to WiFi before Ethernet.
-
-        @param proxy WiFi Proxy object
-
-        """
-        logging.info('Setting order to WiFi, prioritized over Ethernet.')
-        proxy.manager.SetServiceOrder(dbus.String('wifi,ethernet'))
-
-
-    def _to_ethernet(self, proxy):
-        """Set service order to default Ethernet before WiFi
-
-        @param proxy WiFi Proxy object
-
-        """
-        logging.info('Setting back to default service order.')
-        proxy.manager.SetServiceOrder(dbus.String('ethernet,wifi'))
-
-
-    def run_once(self, test):
-        """Run the test.
-
-        @param test string - Set by the client test control file
-
-        """
-        client_proxy = wifi_proxy.WifiProxy()
-        if test is not 'all':
-            logging.info('Running an individual control file.')
-            self._to_wifi(client_proxy)
-            for service_test in constants.NETWORK_SERVICES_TESTS:
-                if service_test.testname == test:
-                    self._connect_and_assert(service_test.testname,
-                                             service_test.ssid,
-                                             service_test.user)
-                    self._to_ethernet(client_proxy)
-                    return
-        for service_test in constants.NETWORK_SERVICES_TESTS:
-            logging.info('==== Running current test %s ====',
-                         service_test.testname)
-            self._to_wifi(client_proxy)
-            self._connect_and_assert(service_test.testname,
-                                     service_test.ssid,
-                                     service_test.user)
-            self._to_ethernet(client_proxy)
-
-        # Ensure DUT returns to normal service state
-        self._to_ethernet(client_proxy)
diff --git a/client/site_tests/network_RestartShill/control b/client/site_tests/network_RestartShill/control
deleted file mode 100644
index 99b6eb7..0000000
--- a/client/site_tests/network_RestartShill/control
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'quiche, pstew'
-NAME = 'network_RestartShill'
-TIME = 'SHORT'
-TEST_TYPE = 'client'
-
-DOC = """
-  Tests that we can restart shill. Used by the network_DiskFull
-  server test.
-"""
-
-job.run_test(
-    'network_RestartShill', remove_profile=True, tag='profile_missing')
-job.run_test(
-    'network_RestartShill', remove_profile=False, tag='profile_exists')
diff --git a/client/site_tests/network_RestartShill/network_RestartShill.py b/client/site_tests/network_RestartShill/network_RestartShill.py
deleted file mode 100644
index 4e37bb8..0000000
--- a/client/site_tests/network_RestartShill/network_RestartShill.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.cros.networking import shill_context
-from autotest_lib.client.cros.networking import shill_proxy
-
-class network_RestartShill(test.test):
-    """
-    Stop shill, restart it, check that we can talk to it.
-    """
-    DEFAULT_PROFILE_PATH = '/var/cache/shill/default.profile'
-    version = 1
-
-
-    def run_once(self, remove_profile):
-        """Test main loop."""
-        with shill_context.stopped_shill():
-            if remove_profile:
-                os.remove(self.DEFAULT_PROFILE_PATH)
-        shill = shill_proxy.ShillProxy.get_proxy()
diff --git a/client/site_tests/network_RoamWifiEndToEnd/control b/client/site_tests/network_RoamWifiEndToEnd/control
index 4d1457c..1a728ea 100755
--- a/client/site_tests/network_RoamWifiEndToEnd/control
+++ b/client/site_tests/network_RoamWifiEndToEnd/control
@@ -6,6 +6,7 @@
 NAME = 'network_RoamWifiEndToEnd'
 TIME = 'FAST'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
   Client side of the network_WiFi_RoamEndToEnd server test that sets up the
diff --git a/client/site_tests/network_RoamWifiEndToEnd/network_RoamWifiEndToEnd.py b/client/site_tests/network_RoamWifiEndToEnd/network_RoamWifiEndToEnd.py
index a50443a..9f5a813 100755
--- a/client/site_tests/network_RoamWifiEndToEnd/network_RoamWifiEndToEnd.py
+++ b/client/site_tests/network_RoamWifiEndToEnd/network_RoamWifiEndToEnd.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/network_VPNConnect/control.l2tpipsec_cert b/client/site_tests/network_VPNConnect/control.l2tpipsec_cert
deleted file mode 100644
index eafbb99..0000000
--- a/client/site_tests/network_VPNConnect/control.l2tpipsec_cert
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'pstew, wiley, quiche'
-NAME = 'network_VPNConnect.l2tpipsec_cert'
-PURPOSE = 'Verify shill correctly authenticates to an L2TP/IPSec Cert VPN'
-ATTRIBUTES = "suite:bvt-perbuild, suite:network_nightly, suite:toolchain-tests"
-TIME = 'SHORT'
-TEST_TYPE = 'client'
-
-DOC = """
-  Tests that we can complete L2TP/IPSec certificate VPN authentication.
-  This is a client-only test which verifies function of the L2TP/IPSec
-  client.  Since the server it is testing against is itself, this test
-  is best suited for verifying that the connection manager and VPN
-  manager are correctly passing parameters to the underlying VPN client
-  to have it properly connect.
-
-  This test fails if the DUT cannot authenticate to a locally running
-  L2TP/IPSec certificate-based VPN server running at the other end of a
-  virtual Ethernet interface.
-
-"""
-
-job.run_test('network_VPNConnect',
-             vpn_types=[ 'l2tpipsec-cert' ],
-             tag=NAME.split('.')[1])
diff --git a/client/site_tests/network_VPNConnect/control.l2tpipsec_psk b/client/site_tests/network_VPNConnect/control.l2tpipsec_psk
deleted file mode 100644
index a28ab0e..0000000
--- a/client/site_tests/network_VPNConnect/control.l2tpipsec_psk
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'pstew, wiley, quiche'
-NAME = 'network_VPNConnect.l2tpipsec_psk'
-ATTRIBUTES = "suite:bvt-perbuild, suite:network_nightly, suite:toolchain-tests"
-TIME = 'SHORT'
-TEST_TYPE = 'client'
-
-DOC = """
-  Tests that we can complete L2TP/IPSec PSK VPN authentication.
-  This is a client-only test which verifies function of the L2TP/IPSec
-  client.  Since the server it is testing against is itself, this test
-  is best suited for verifying that the connection manager and VPN
-  manager are correctly passing parameters to the underlying VPN client
-  to have it properly connect.
-
-  This test fails if the DUT cannot authenticate to a locally running
-  L2TP/IPSec PSK VPN server runing at the other end of a virtual Ethernet
-  interface.
-
-  The -evil variant simulates connecting to a VPN gateway that specifies
-  its public IP as the point-to-point (remote) PPP address.  This
-  configuration has caused routing loops in the past, so we test it
-  explicitly in order to catch regressions.
-
-"""
-
-job.run_test('network_VPNConnect',
-             vpn_types=[ 'l2tpipsec-psk',
-                         'l2tpipsec-psk-evil' ],
-             tag=NAME.split('.')[1])
diff --git a/client/site_tests/network_VPNConnect/control.l2tpipsec_xauth b/client/site_tests/network_VPNConnect/control.l2tpipsec_xauth
deleted file mode 100644
index 851ccd5..0000000
--- a/client/site_tests/network_VPNConnect/control.l2tpipsec_xauth
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'pstew, wiley, quiche'
-NAME = 'network_VPNConnect.l2tpipsec_xauth'
-ATTRIBUTES = "suite:bvt-perbuild, suite:toolchain-tests"
-TIME = 'SHORT'
-TEST_TYPE = 'client'
-
-DOC = """
-  Tests that we can complete L2TP/IPSec PSK VPN authentication with XAUTH.
-  This is a client-only test which verifies function of the L2TP/IPSec
-  client.  Since the server it is testing against is itself, this test
-  is best suited for verifying that the connection manager and VPN
-  manager are correctly passing parameters to the underlying VPN client
-  to have it properly connect.
-
-  This test fails if the DUT cannot authenticate to a locally running
-  L2TP/IPSec PSK VPN server runing at the other end of a virtual Ethernet
-  interface.
-
-"""
-
-job.run_test('network_VPNConnect',
-             vpn_types=[ 'l2tpipsec-psk-xauth',
-                         'l2tpipsec-psk-xauth-incorrect_user',
-                         'l2tpipsec-psk-xauth-incorrect_missing_user' ],
-             tag=NAME.split('.')[1])
diff --git a/client/site_tests/network_VPNConnect/control.openvpn b/client/site_tests/network_VPNConnect/control.openvpn
deleted file mode 100644
index dfbfbdd..0000000
--- a/client/site_tests/network_VPNConnect/control.openvpn
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'pstew, wiley, quiche'
-NAME = 'network_VPNConnect.openvpn'
-ATTRIBUTES = "suite:bvt-perbuild, suite:network_nightly"
-TIME = 'SHORT'
-TEST_TYPE = 'client'
-
-DOC = """
-  Tests that we can complete OpenVPN authentication.  This is a client-only
-  test which verifies function of the OpenVPN client.  Since the server it
-  is testing against is itself, this test is best suited for verifying that
-  the connection manager is correctly passing parameters to the underlying
-  VPN client to have it properly connect.
-
-  This test fails if the DUT cannot authenticate to a locally running
-  OpenVPN server runing at the other end of a virtual Ethernet interface.
-
-"""
-
-job.run_test('network_VPNConnect',
-             vpn_types=[ 'openvpn' ],
-             tag=NAME.split('.')[1])
diff --git a/client/site_tests/network_VPNConnect/control.openvpn_cert_verify b/client/site_tests/network_VPNConnect/control.openvpn_cert_verify
deleted file mode 100644
index 709450f..0000000
--- a/client/site_tests/network_VPNConnect/control.openvpn_cert_verify
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'pstew, wiley, quiche'
-NAME = 'network_VPNConnect.openvpn_cert_verify'
-ATTRIBUTES = "suite:bvt-perbuild, suite:network_nightly"
-TIME = 'SHORT'
-TEST_TYPE = 'client'
-
-DOC = """
-  Tests that we can complete OpenVPN authentication with a server where
-  the client performs subject name and hash verification.  This is a
-  client-only test which verifies function of the OpenVPN client.  Since
-  the server it is testing against is itself, this test is best suited for
-  verifying that the connection manager is correctly passing parameters to
-  the underlying VPN client to have it properly connect.
-
-  This test fails if the DUT cannot successfully verify the remote VPN
-  server using the "verify-hash" and "verify-x509-name" parameters.  It
-  also tests using the "extra-certs" parameter.  The server runs at the
-  other end of a virtual Ethernet interface.
-
-"""
-
-vpn_types = [ '%s_%s' % (NAME.split('.')[1], subtest)
-              for subtest in ('', 'cn_only', 'incorrect_subject',
-                              'incorrect_hash', 'incorrect_cn') ]
-job.run_test('network_VPNConnect', vpn_types=vpn_types, tag=NAME.split('.')[1])
diff --git a/client/site_tests/network_VPNConnect/control.openvpn_user_pass b/client/site_tests/network_VPNConnect/control.openvpn_user_pass
deleted file mode 100644
index 1919521..0000000
--- a/client/site_tests/network_VPNConnect/control.openvpn_user_pass
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'pstew, wiley, quiche'
-NAME = 'network_VPNConnect.openvpn_user_pass'
-ATTRIBUTES = "suite:bvt-perbuild, suite:network_nightly"
-TIME = 'SHORT'
-TEST_TYPE = 'client'
-
-DOC = """
-  Tests that we can complete OpenVPN authentication with a server that
-  performs username/password authentication.  This is a client-only test
-  which verifies function of the OpenVPN client.  Since the server it is
-  testing against is itself, this test is best suited for verifying that
-  the connection manager is correctly passing parameters to the underlying
-  VPN client to have it properly connect.
-
-  This test fails if the DUT cannot authenticate to a locally running
-  OpenVPN server runing at the other end of a virtual Ethernet interface.
-  This server performs username / password authentication.
-
-"""
-
-job.run_test('network_VPNConnect',
-             vpn_types=[ 'openvpn_user_pass' ],
-             tag=NAME.split('.')[1])
diff --git a/client/site_tests/network_VPNConnect/network_VPNConnect.py b/client/site_tests/network_VPNConnect/network_VPNConnect.py
deleted file mode 100644
index 01dd24e..0000000
--- a/client/site_tests/network_VPNConnect/network_VPNConnect.py
+++ /dev/null
@@ -1,254 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-from autotest_lib.client.common_lib.cros import site_eap_certs
-from autotest_lib.client.common_lib.cros import virtual_ethernet_pair
-from autotest_lib.client.cros import certificate_util
-from autotest_lib.client.cros import shill_temporary_profile
-from autotest_lib.client.cros import tpm_store
-from autotest_lib.client.cros import vpn_server
-from autotest_lib.client.cros.networking import shill_context
-from autotest_lib.client.cros.networking import shill_proxy
-
-class network_VPNConnect(test.test):
-    """The VPN authentication class.
-
-    Starts up a VPN server within a chroot on the other end of a virtual
-    ethernet pair and attempts a VPN association using shill.
-
-    """
-    CLIENT_INTERFACE_NAME = 'pseudoethernet0'
-    SERVER_INTERFACE_NAME = 'serverethernet0'
-    TEST_PROFILE_NAME = 'testVPN'
-    CONNECT_TIMEOUT_SECONDS = 15
-    version = 1
-    SERVER_ADDRESS = '10.9.8.1'
-    CLIENT_ADDRESS = '10.9.8.2'
-    NETWORK_PREFIX = 24
-
-    def get_device(self, interface_name):
-        """Finds the corresponding Device object for an ethernet
-        interface with the name |interface_name|.
-
-        @param interface_name string The name of the interface to check.
-
-        @return DBus interface object representing the associated device.
-
-        """
-        device = self._shill_proxy.find_object('Device',
-                                               {'Name': interface_name})
-        if device is None:
-            raise error.TestFail('Device was not found.')
-
-        return device
-
-
-    def find_ethernet_service(self, interface_name):
-        """Finds the corresponding service object for an ethernet
-        interface.
-
-        @param interface_name string The name of the associated interface
-
-        @return Service object representing the associated service.
-
-        """
-        device = self.get_device(interface_name)
-        device_path = shill_proxy.ShillProxy.dbus2primitive(device.object_path)
-        return self._shill_proxy.find_object('Service', {'Device': device_path})
-
-
-    def get_vpn_server(self):
-        """Returns a VPN server instance."""
-        if self._vpn_type.startswith('l2tpipsec-psk'):
-            return vpn_server.L2TPIPSecVPNServer(
-                'psk',
-                self.SERVER_INTERFACE_NAME,
-                self.SERVER_ADDRESS,
-                self.NETWORK_PREFIX,
-                perform_xauth_authentication = 'xauth' in self._vpn_type,
-                local_ip_is_public_ip = 'evil' in self._vpn_type)
-        elif self._vpn_type.startswith('l2tpipsec-cert'):
-            return vpn_server.L2TPIPSecVPNServer('cert',
-                                                 self.SERVER_INTERFACE_NAME,
-                                                 self.SERVER_ADDRESS,
-                                                 self.NETWORK_PREFIX)
-        elif self._vpn_type.startswith('openvpn'):
-            return vpn_server.OpenVPNServer(self.SERVER_INTERFACE_NAME,
-                                            self.SERVER_ADDRESS,
-                                            self.NETWORK_PREFIX,
-                                            'user_pass' in self._vpn_type)
-        else:
-            raise error.TestFail('Unknown vpn server type %s' % self._vpn_type)
-
-
-    def get_vpn_client_properties(self, tpm):
-        """Returns VPN configuration properties.
-
-        @param tpm object TPM store instance to add credentials if necessary.
-
-        """
-        if self._vpn_type.startswith('l2tpipsec-psk'):
-            params = {
-                'L2TPIPsec.Password': vpn_server.L2TPIPSecVPNServer.CHAP_SECRET,
-                'L2TPIPsec.PSK':
-                        vpn_server.L2TPIPSecVPNServer.IPSEC_PRESHARED_KEY,
-                'L2TPIPsec.User':vpn_server.L2TPIPSecVPNServer.CHAP_USER,
-                'Name': 'test-vpn-l2tp-psk',
-                'Provider.Host': self.SERVER_ADDRESS,
-                'Provider.Type': 'l2tpipsec',
-                'Type': 'vpn'
-            }
-            if 'xauth' in self._vpn_type:
-                if 'incorrect_user' in self._vpn_type:
-                    params['L2TPIPsec.XauthUser'] = 'wrong_user'
-                    params['L2TPIPsec.XauthPassword'] = 'wrong_password'
-                elif 'incorrect_missing_user' not in self._vpn_type:
-                    params['L2TPIPsec.XauthUser'] = (
-                            vpn_server.L2TPIPSecVPNServer.XAUTH_USER)
-                    params['L2TPIPsec.XauthPassword'] = (
-                            vpn_server.L2TPIPSecVPNServer.XAUTH_PASSWORD)
-            return params
-        elif self._vpn_type == 'l2tpipsec-cert':
-            tpm.install_certificate(site_eap_certs.client_cert_1,
-                                    site_eap_certs.cert_1_tpm_key_id)
-            tpm.install_private_key(site_eap_certs.client_private_key_1,
-                                    site_eap_certs.cert_1_tpm_key_id)
-            return {
-                'L2TPIPsec.CACertPEM': [ site_eap_certs.ca_cert_1 ],
-                'L2TPIPsec.ClientCertID': site_eap_certs.cert_1_tpm_key_id,
-                'L2TPIPsec.ClientCertSlot': tpm.SLOT_ID,
-                'L2TPIPsec.User':vpn_server.L2TPIPSecVPNServer.CHAP_USER,
-                'L2TPIPsec.Password': vpn_server.L2TPIPSecVPNServer.CHAP_SECRET,
-                'L2TPIPsec.PIN': tpm.PIN,
-                'Name': 'test-vpn-l2tp-cert',
-                'Provider.Host': self.SERVER_ADDRESS,
-                'Provider.Type': 'l2tpipsec',
-                'Type': 'vpn'
-            }
-        elif self._vpn_type.startswith('openvpn'):
-            tpm.install_certificate(site_eap_certs.client_cert_1,
-                                    site_eap_certs.cert_1_tpm_key_id)
-            tpm.install_private_key(site_eap_certs.client_private_key_1,
-                                    site_eap_certs.cert_1_tpm_key_id)
-            params = {
-                'Name': 'test-vpn-openvpn',
-                'Provider.Host': self.SERVER_ADDRESS,
-                'Provider.Type': 'openvpn',
-                'Type': 'vpn',
-                'OpenVPN.CACertPEM': [ site_eap_certs.ca_cert_1 ],
-                'OpenVPN.Pkcs11.ID': site_eap_certs.cert_1_tpm_key_id,
-                'OpenVPN.Pkcs11.PIN': tpm.PIN,
-                'OpenVPN.RemoteCertEKU': 'TLS Web Server Authentication',
-                'OpenVPN.Verb': '5'
-            }
-            if 'user_pass' in self._vpn_type:
-                params['OpenVPN.User'] = vpn_server.OpenVPNServer.USERNAME
-                params['OpenVPN.Password'] = vpn_server.OpenVPNServer.PASSWORD
-            if 'cert_verify' in self._vpn_type:
-                ca = certificate_util.PEMCertificate(site_eap_certs.ca_cert_1)
-                if 'incorrect_hash' in self._vpn_type:
-                    bogus_hash = ':'.join(['00'] * 20)
-                    params['OpenVPN.VerifyHash'] = bogus_hash
-                else:
-                    params['OpenVPN.VerifyHash'] = ca.fingerprint
-                server = certificate_util.PEMCertificate(
-                        site_eap_certs.server_cert_1)
-                if 'incorrect_subject' in self._vpn_type:
-                    params['OpenVPN.VerifyX509Name'] = 'bogus subject name'
-                elif 'incorrect_cn' in self._vpn_type:
-                    params['OpenVPN.VerifyX509Name'] = 'bogus cn'
-                    params['OpenVPN.VerifyX509Type'] = 'name'
-                elif 'cn_only' in self._vpn_type:
-                    params['OpenVPN.VerifyX509Name'] = server.subject_dict['CN']
-                    params['OpenVPN.VerifyX509Type'] = 'name'
-                else:
-                    # This is the form OpenVPN expects.
-                    params['OpenVPN.VerifyX509Name'] = ', '.join(server.subject)
-            return params
-        else:
-            raise error.TestFail('Unknown vpn client type %s' % self._vpn_type)
-
-
-    def connect_vpn(self):
-        """Connects the client to the VPN server."""
-        proxy = self._shill_proxy
-        with tpm_store.TPMStore() as tpm:
-            service = proxy.configure_service(
-                self.get_vpn_client_properties(tpm))
-            service.Connect()
-            result = proxy.wait_for_property_in(service,
-                                                proxy.SERVICE_PROPERTY_STATE,
-                                                ('ready', 'online'),
-                                                self.CONNECT_TIMEOUT_SECONDS)
-        (successful, _, _) = result
-        if not successful and self._expect_success:
-            raise error.TestFail('VPN connection failed')
-        if successful and not self._expect_success:
-            raise error.TestFail('VPN connection suceeded '
-                                 'when it should have failed')
-        return successful
-
-
-    def run_once(self, vpn_types=[]):
-        """Test main loop."""
-        self._shill_proxy = shill_proxy.ShillProxy()
-        for vpn_type in vpn_types:
-            self.run_vpn_test(vpn_type)
-
-
-    def run_vpn_test(self, vpn_type):
-        """Run a vpn test of |vpn_type|.
-
-        @param vpn_type string type of VPN test to run.
-
-        """
-        manager = self._shill_proxy.manager
-        server_address_and_prefix = '%s/%d' % (self.SERVER_ADDRESS,
-                                               self.NETWORK_PREFIX)
-        client_address_and_prefix = '%s/%d' % (self.CLIENT_ADDRESS,
-                                               self.NETWORK_PREFIX)
-        self._vpn_type = vpn_type
-        self._expect_success = 'incorrect' not in vpn_type
-
-        with shill_temporary_profile.ShillTemporaryProfile(
-                manager, profile_name=self.TEST_PROFILE_NAME):
-            with virtual_ethernet_pair.VirtualEthernetPair(
-                    interface_name=self.SERVER_INTERFACE_NAME,
-                    peer_interface_name=self.CLIENT_INTERFACE_NAME,
-                    peer_interface_ip=client_address_and_prefix,
-                    interface_ip=server_address_and_prefix,
-                    ignore_shutdown_errors=True) as ethernet_pair:
-                if not ethernet_pair.is_healthy:
-                    raise error.TestFail('Virtual ethernet pair failed.')
-
-                with self.get_vpn_server() as server:
-                    # We have to poll and wait the service to be ready in shill
-                    # because the shill update of "CLIENT_INTERFACE_NAME" is
-                    # async.
-                    service = utils.poll_for_condition(
-                        lambda: self.find_ethernet_service(
-                            self.CLIENT_INTERFACE_NAME))
-                    # When shill finds this ethernet interface, it will reset
-                    # its IP address and start a DHCP client.  We must configure
-                    # the static IP address through shill.
-                    static_ip_config = {'Address' : self.CLIENT_ADDRESS,
-                                        'Prefixlen' : self.NETWORK_PREFIX}
-                    with shill_context.StaticIPContext(service,
-                                                       static_ip_config):
-                        if self.connect_vpn():
-                            res = utils.ping(server.SERVER_IP_ADDRESS, tries=3,
-                                             user='chronos')
-                            if res != 0:
-                                raise error.TestFail('Error pinging server IP')
-
-                            # IPv6 should be blackholed, so ping returns
-                            # "other error"
-                            res = utils.ping("2001:db8::1", tries=1,
-                                             user='chronos')
-                            if res != 2:
-                                raise error.TestFail(
-                                        'IPv6 ping should have aborted')
diff --git a/client/site_tests/network_WiFiCaps/control b/client/site_tests/network_WiFiCaps/control
deleted file mode 100644
index 241f308..0000000
--- a/client/site_tests/network_WiFiCaps/control
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "network_WiFiCaps"
-PURPOSE = 'Verify that WiFi devices have the required capabilities.'
-CRITERIA = """
-If the following requirements are not present the test case will fail:
-  - station mode
-  - 2.4GHz band
-  - 5 GHz band
-  - 802.11n
-  - HT40
-  - Short GI in HT40
-"""
-ATTRIBUTES = "suite:network_nightly"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "client"
-
-DOC = """
-The test uses the iw tool to query device capabilities.
-"""
-
-job.run_test('network_WiFiCaps')
diff --git a/client/site_tests/network_WiFiCaps/network_WiFiCaps.py b/client/site_tests/network_WiFiCaps/network_WiFiCaps.py
deleted file mode 100644
index f320f9a..0000000
--- a/client/site_tests/network_WiFiCaps/network_WiFiCaps.py
+++ /dev/null
@@ -1,72 +0,0 @@
-# Copyright (c) 2009 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging, os, re, string
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-class network_WiFiCaps(test.test):
-    version = 1
-
-    def setup(self):
-        self.job.setup_dep(['iwcap'])
-        # create a empty srcdir to prevent the error that checks .version
-        if not os.path.exists(self.srcdir):
-            os.mkdir(self.srcdir)
-
-
-    def __parse_iwcap(self, lines):
-        """Parse the iwcap output"""
-
-        results = {}
-        parse_re = re.compile(r'([a-z0-9]*):[ ]*(.*)')
-        for line in lines.split('\n'):
-            line = line.rstrip()
-            logging.info('==> %s' %line)
-            match = parse_re.search(line)
-            if match:
-                results[match.group(1)] = match.group(2)
-                continue
-        return results
-
-
-    def __run_iwcap(self, phy, caps):
-        iwcapdir = os.path.join(self.autodir, 'deps', 'iwcap', 'iwcap')
-        iwcap = utils.run(iwcapdir + ' ' + phy + ' ' + string.join(caps))
-        return self.__parse_iwcap(iwcap.stdout)
-
-    def run_once(self):
-        phy = utils.system_output("iw list | awk '/^Wiphy/ {print $2}'")
-        if not phy or 'phy' not in phy:
-            raise error.TestFail('WiFi Physical interface not found')
-
-        requiredCaps = {
-            'sta'    : 'true',        # station mode
-
-            '24ghz'  : 'true',        # 2.4GHz band
-            '11b'    : 'true',
-            '11g'    : 'true',
-
-            '5ghz'   : 'true',        # 5GHz band
-            '11a'    : 'true',
-
-            '11n'    : 'true',        # 802.11n (both bands)
-            'ht40'   : 'true',        # HT40
-            'sgi40'  : 'true',        # Short GI in HT40
-        }
-
-        dep = 'iwcap'
-        dep_dir = os.path.join(self.autodir, 'deps', dep)
-        self.job.install_pkg(dep, 'dep', dep_dir)
-
-        results = self.__run_iwcap(phy, requiredCaps.keys())
-        for cap in requiredCaps:
-            if not cap in results:
-                raise error.TestFail('Internal error, ' +
-                    'capability "%s" not handled' % cap)
-            if results[cap] != requiredCaps[cap]:
-                raise error.TestFail('Requirement not met: ' +
-                    'cap "%s" is "%s" but expected "%s"'
-                    % (cap, results[cap], requiredCaps[cap]))
diff --git a/client/site_tests/network_WiFiHECaps/control b/client/site_tests/network_WiFiHECaps/control
deleted file mode 100644
index 3f5eeb3..0000000
--- a/client/site_tests/network_WiFiHECaps/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'pauletti'
-NAME = 'network_WiFiHECaps'
-ATTRIBUTES = 'suite:wifi_flaky'
-TIME = 'SHORT'
-TEST_TYPE = 'client'
-
-DOC = """
-This test checks that a device which supports the 802.11ax WiFi standard is
-able to function on an High Efficiency 160 MHz wide channel (HE160). HE PHY
-implementation is expected on all devices which support 802.11ax frame formats
-(i.e. HE PPDU).
-"""
-
-from autotest_lib.client.common_lib.cros.network import iw_runner
-
-job.run_test('network_WiFiHECaps', features=[[iw_runner.HE_PHY_5HE160,
-                                              iw_runner.HE_PHY_5HE160_80_80]])
diff --git a/client/site_tests/network_WiFiHECaps/control.11ax_supported b/client/site_tests/network_WiFiHECaps/control.11ax_supported
deleted file mode 100644
index a664888..0000000
--- a/client/site_tests/network_WiFiHECaps/control.11ax_supported
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'pauletti'
-NAME = 'network_WiFiHECaps.11ax_supported'
-ATTRIBUTES = 'suite:wifi_flaky'
-TIME = 'SHORT'
-TEST_TYPE = 'client'
-
-DOC = """
-This test checks that a device supports the 802.11ax WiFi standard (i.e., that
-it is capable of receiving and transmitting High Efficiency (HE) PPDUs).
-"""
-
-job.run_test('network_WiFiHECaps', tag=NAME.split('.')[1])
diff --git a/client/site_tests/network_WiFiHECaps/control.HE160 b/client/site_tests/network_WiFiHECaps/control.HE160
deleted file mode 100644
index 935d346..0000000
--- a/client/site_tests/network_WiFiHECaps/control.HE160
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'pauletti'
-NAME = 'network_WiFiHECaps.HE160'
-ATTRIBUTES = 'suite:wifi_flaky'
-TIME = 'SHORT'
-TEST_TYPE = 'client'
-
-DOC = """
-This test checks that a device which supports the 802.11ax WiFi standard is
-able to function on an High Efficiency 160 MHz wide channel (HE160). HE PHY
-implementation is expected on all devices which support 802.11ax frame formats
-(i.e. HE PPDU).
-"""
-
-from autotest_lib.client.common_lib.cros.network import iw_runner
-
-job.run_test('network_WiFiHECaps', tag=NAME.split('.')[1],
-             features=[[iw_runner.HE_PHY_5HE160,
-                        iw_runner.HE_PHY_5HE160_80_80]])
diff --git a/client/site_tests/network_WiFiHECaps/control.MU_MIMO b/client/site_tests/network_WiFiHECaps/control.MU_MIMO
deleted file mode 100644
index b578a21..0000000
--- a/client/site_tests/network_WiFiHECaps/control.MU_MIMO
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'pauletti'
-NAME = 'network_WiFiHECaps.MU_MIMO'
-ATTRIBUTES = 'suite:wifi_flaky'
-TIME = 'SHORT'
-TEST_TYPE = 'client'
-
-DOC = """
-This test checks that a device which supports the 802.11ax WiFi also supports
-Multi-User Multi Input Multi Output (MU-MIMO) downlink (DL) and uplink (UL).
-"""
-
-from autotest_lib.client.common_lib.cros.network import iw_runner
-
-job.run_test('network_WiFiHECaps', tag=NAME.split('.')[1],
-        features=[[iw_runner.HE_PHY_FULL_BAND_UL_MU_MIMO,
-                   iw_runner.HE_PHY_PART_BAND_UL_MU_MIMO],
-                  [iw_runner.HE_PHY_PART_BAND_DL_MU_MIMO]])
-
diff --git a/client/site_tests/network_WiFiHECaps/network_WiFiHECaps.py b/client/site_tests/network_WiFiHECaps/network_WiFiHECaps.py
deleted file mode 100644
index 4c41560..0000000
--- a/client/site_tests/network_WiFiHECaps/network_WiFiHECaps.py
+++ /dev/null
@@ -1,80 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import iw_runner
-
-class network_WiFiHECaps(test.test):
-    """Test the specified HE client capabilities."""
-    version = 1
-
-    def run_once(self, phy=None, features=None):
-        """
-        Check for support of the features specified in control file.
-
-        Features are passed in as a list of lists containing string constants.
-        The test will pass if the DUT supports at least one string in each
-        list. Essentially, all the elements in the list are joined with AND
-        while all the elements of a list are joined with OR.
-
-        @param phy string name of wifi phy to use, or None to allow the
-                test to choose.
-        @param features list of lists of string constants from iw_runner
-                specifying the HE features to check on the DUT.
-
-        """
-        iw = iw_runner.IwRunner()
-        if not phy:
-            phys = iw.list_phys()
-            if not phys:
-                raise error.TestError('No valid WiFi phy found')
-            phy = phys[0].name
-        if not iw.he_supported():
-            raise error.TestNAError('HE not supported by DUT')
-
-        phy_info = iw.get_info()
-        if not phy_info:
-            raise error.TestError('Could not get phy info using iw_runner')
-
-        if not features:
-            features = []
-
-        featurelist = [f for inner_list in features for f in inner_list]
-        is_supported = {f : False for f in featurelist}
-        values = {}
-
-        for line in phy_info.splitlines():
-            line = line.strip()
-            for f in featurelist:
-                if not is_supported[f] and f in line:
-                    is_supported[f] = True
-                    l = line.split(':', 1)
-                    if len(l) > 1:
-                        values[f] = l[1].strip()
-                    break
-
-        supported = ['These features are supported by the DUT:']
-        not_supported = ['These features are NOT supported by the DUT:']
-        for f in featurelist:
-            if is_supported[f]:
-                if values.get(f, None):
-                    f += ('; has value %s' % values[f])
-                supported.append(f)
-            else:
-                not_supported.append(f)
-        logging.info(' '.join(supported))
-        logging.info(' '.join(not_supported))
-
-        for inner_list in features:
-            list_passed = False
-            for f in inner_list:
-                if is_supported[f]:
-                    list_passed = True
-                    break
-            if not list_passed:
-                raise error.TestError('Test failed because none of %r are '
-                                      'supported by the DUT.' % inner_list)
diff --git a/client/site_tests/network_WiFiResume/control b/client/site_tests/network_WiFiResume/control
index e331064..4fa4697 100644
--- a/client/site_tests/network_WiFiResume/control
+++ b/client/site_tests/network_WiFiResume/control
@@ -7,6 +7,7 @@
 ATTRIBUTES = ('suite:network_nightly, suite:bvt-perbuild')
 TIME = 'SHORT'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 Ensure wifi interface is found after suspend-resume.
diff --git a/client/site_tests/network_WiFiResume/network_WiFiResume.py b/client/site_tests/network_WiFiResume/network_WiFiResume.py
index d6eef00..f4fc5ed 100755
--- a/client/site_tests/network_WiFiResume/network_WiFiResume.py
+++ b/client/site_tests/network_WiFiResume/network_WiFiResume.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/network_WlanRegulatory/control b/client/site_tests/network_WlanRegulatory/control
deleted file mode 100644
index b115cff..0000000
--- a/client/site_tests/network_WlanRegulatory/control
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'pstew, quiche, wiley, briannorris'
-NAME = 'network_WlanRegulatory'
-ATTRIBUTES = 'suite:wifi_matfunc'
-TIME = 'SHORT'
-TEST_TYPE = 'client'
-# To be safe, let's avoid setting non-default regulatory domains outside of an
-# RF chamber.
-DEPENDENCIES = 'wificell'
-
-DOC = """
-Ensure the regulatory database is sane and that we can successfully switch
-domains using the "iw" userspace utility.
-"""
-
-
-job.run_test('network_WlanRegulatory')
diff --git a/client/site_tests/network_WlanRegulatory/network_WlanRegulatory.py b/client/site_tests/network_WlanRegulatory/network_WlanRegulatory.py
deleted file mode 100755
index 865a669..0000000
--- a/client/site_tests/network_WlanRegulatory/network_WlanRegulatory.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# Copyright (c) 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import time
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import interface
-from autotest_lib.client.common_lib.cros.network import iw_runner
-
-
-class network_WlanRegulatory(test.test):
-    """
-    Ensure the regulatory database is sane and that we can successfully switch
-    domains using the "iw" userspace utility. We don't verify that the system
-    truly respects the rules, but only that it does not reject them.
-    Note that some drivers "self manage" their domain detection and so this
-    test can't apply reliably. For those drivers that do not self-manage, we
-    still ask for the phy-specific domain, to ensure that (if it's different
-    than the global domain) it still follows along with the global domain.
-    """
-    version = 1
-    # TODO(https://crbug.com/1000346): parse /lib/firmware/regulatory.db, once
-    # CRDA goes away.
-    REGULATORY_DATABASE = '/usr/lib/crda/regulatory.bin'
-
-    def get_regulatory_domains(self):
-        """Get the list or regulatory domains in the DUT's database."""
-        return utils.system_output('regdbdump %s | grep country | '
-                                   'sed -e s/^country.// -e s/:.*//' %
-                                   self.REGULATORY_DATABASE).split()
-
-    def assert_set_regulatory_domain(self, regdomain):
-        """Set the system regulatory domain, then assert that it is correctly
-        reflected for the wiphy.
-
-        @param regdomain string 2-letter country code of the regulatory
-            domain to set.
-
-        """
-        logging.info('Using iw to set regulatory domain to %s', regdomain)
-        self._iw.set_regulatory_domain(regdomain)
-
-        # The kernel handles the user hint asynchronously (either calling out
-        # to udev/CRDA, or to the in-kernel database). Wait a bit.
-        # TODO: poll instead, or watch for NL80211_CMD_REG_CHANGE.
-        time.sleep(1)
-
-        current_regdomain = self._iw.get_regulatory_domain(wiphy=self._wiphy)
-        if current_regdomain != regdomain:
-            raise error.TestFail('Expected iw to set regdomain %s but got %s' %
-                                 (regdomain, current_regdomain))
-
-    def run_once(self):
-        """Test main loop"""
-        self._iw = iw_runner.IwRunner()
-
-        # If the driver "self manages" (NL80211_ATTR_WIPHY_SELF_MANAGED_REG)
-        # its domain detection, we can't guarantee it will respect user-space
-        # settings.
-        if self._iw.is_regulatory_self_managed():
-            raise error.TestNAError('Wiphy is self-managed')
-
-        wlan_ifs = [nic for nic in interface.get_interfaces()
-                    if nic.is_wifi_device()]
-        if not wlan_ifs:
-            raise error.TestFail('No WiFi device found')
-        self._wiphy = wlan_ifs[0].wiphy_name
-
-        # Stash the global domain; we can only 'set' the global domain, and we
-        # want to restore it in the end if things go awry.
-        self._initial_regdomain = self._iw.get_regulatory_domain()
-        logging.info('Initial global regulatory domain is %s',
-                     self._initial_regdomain)
-
-        domain_list = self.get_regulatory_domains()
-        if not domain_list:
-            raise error.TestFail('Did not get a domain list from the database')
-
-        for domain in domain_list:
-            self.assert_set_regulatory_domain(domain)
-
-    def cleanup(self):
-        """Cleanup: restore device to original state."""
-        if hasattr(self, '_initial_regdomain'):
-            self.assert_set_regulatory_domain(self._initial_regdomain)
diff --git a/client/site_tests/p2p_ConsumeFiles/control b/client/site_tests/p2p_ConsumeFiles/control
deleted file mode 100644
index c54a3a3..0000000
--- a/client/site_tests/p2p_ConsumeFiles/control
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-TIME='FAST'
-AUTHOR = 'deymo,zeuthen,chromeos-installer'
-BUG_TEMPLATE = {
-    'cc': ['chromeos-installer-alerts@google.com'],
-    'components': ['Internals>Installer'],
-}
-
-DOC = """\
-This test runs p2p-client on several common cases using a fake network of peers
-to check that it can discover files on this network. If p2p-client isn't able
-to discover and return URLs in this environment the test will fail. Among the
-tested scenarios, a case where p2p-client should hang (because there are too
-many connections) is included.
-"""
-NAME = 'p2p_ConsumeFiles'
-PURPOSE = 'Check that P2P can discover peers and download from them.'
-CRITERIA = 'Fails if p2p-client is not able to discover and get the URLs.'
-ATTRIBUTES = "suite:bvt-perbuild"
-TEST_CLASS = 'platform'
-TEST_CATEGORY = 'Functional'
-TEST_TYPE = 'client'
-JOB_RETRIES = 2
-
-job.run_test('p2p_ConsumeFiles')
diff --git a/client/site_tests/p2p_ConsumeFiles/p2p_ConsumeFiles.py b/client/site_tests/p2p_ConsumeFiles/p2p_ConsumeFiles.py
deleted file mode 100644
index 9d38bc8..0000000
--- a/client/site_tests/p2p_ConsumeFiles/p2p_ConsumeFiles.py
+++ /dev/null
@@ -1,225 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import sys
-import tempfile
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error, utils
-from autotest_lib.client.common_lib.cros import avahi_utils
-from autotest_lib.client.cros import service_stopper
-from autotest_lib.client.cros.netprotos import cros_p2p, zeroconf
-
-
-P2P_CLIENT = '/usr/sbin/p2p-client'
-
-
-class p2p_ConsumeFiles(test.test):
-    """The P2P Client class tester.
-
-    Creates a fake network of peers with lansim and tests if p2p-client can
-    discover files on that network.
-    """
-    version = 1
-
-    def setup(self):
-        self.job.setup_dep(['lansim'])
-
-
-    def initialize(self):
-        dep = 'lansim'
-        dep_dir = os.path.join(self.autodir, 'deps', dep)
-        logging.info('lansim is at %s', dep_dir)
-        self.job.install_pkg(dep, 'dep', dep_dir)
-
-        # Import the lansim modules installed on lansim/build/
-        sys.path.append(os.path.join(dep_dir, 'build'))
-
-        self._services = None
-        self._tap = None
-
-
-    def cleanup(self):
-        avahi_utils.avahi_stop()
-
-        if self._tap:
-            self._tap.down()
-
-        if self._services:
-            self._services.restore_services()
-
-
-    def _setup_avahi(self):
-        """Initializes avahi daemon on a new tap interface."""
-        from lansim import tuntap
-        # Ensure p2p and avahi aren't running.
-        self._services = service_stopper.ServiceStopper(['p2p', 'avahi'])
-        self._services.stop_services()
-
-        # Initialize avahi-daemon listenning only on the fake TAP interface.
-        self._tap = tuntap.TunTap(tuntap.IFF_TAP, name='faketap')
-
-        # The network 169.254/16 shouldn't clash with other real services. We
-        # use a /24 subnet of it here.
-        self._tap.set_addr('169.254.10.1', mask=24)
-        self._tap.up()
-
-        # Re-launch avahi-daemon on the tap interface.
-        avahi_utils.avahi_start_on_iface(self._tap.name)
-
-
-    def _run_p2p_client(self, args, timeout=10., ignore_status=False):
-        """Run p2p-client with the provided arguments.
-
-        @param args: list of strings, each one representing an argument.
-        @param timeout: Timeout for p2p-client in seconds before it's killed.
-        @return: the return value of the process and the stdout content.
-        """
-        fd, tempfn = tempfile.mkstemp(prefix='p2p-output')
-        ret = utils.run(
-                P2P_CLIENT, args=['-v=1'] + list(args), timeout=timeout,
-                ignore_timeout=True, ignore_status=True,
-                stdout_tee=open(tempfn, 'w'), stderr_tee=sys.stdout)
-        url = os.fdopen(fd).read()
-        os.unlink(tempfn)
-
-        if not ignore_status and ret is None:
-            self._join_simulator()
-            raise error.TestFail('p2p-client %s timeout.' % ' '.join(args))
-
-        if not ignore_status and ret.exit_status != 0:
-            self._join_simulator()
-            raise error.TestFail('p2p-client %s finished with value: %d' % (
-                                 ' '.join(args), ret.exit_status))
-
-        return None if ret is None else ret.exit_status, url
-
-
-    def _join_simulator(self):
-        """Stops the simulator and logs any exception generated there."""
-        self._sim.stop()
-        self._sim.join()
-        if self._sim.error:
-            logging.error('SimulatorThread exception: %r', self._sim.error)
-            logging.error(self._sim.traceback)
-
-
-    def run_once(self):
-        from lansim import simulator, host
-
-        # Setup the environment where avahi-daemon runs during the test.
-        self._setup_avahi()
-
-        self._sim = simulator.SimulatorThread(self._tap)
-        # Create three peers host-a, host-b and host-c sharing a set of files.
-        # This first block creates the fake host on the simulator. For clarity
-        # and easier debug, note that the last octect on the IPv4 address is the
-        # ASCII for a, b and c respectively.
-        peer_a = host.SimpleHost(self._sim, '94:EB:2C:00:00:61',
-                                 '169.254.10.97')
-        peer_b = host.SimpleHost(self._sim, '94:EB:2C:00:00:62',
-                                 '169.254.10.98')
-        peer_c = host.SimpleHost(self._sim, '94:EB:2C:00:00:63',
-                                 '169.254.10.99')
-
-        # Run a userspace implementation of avahi + p2p-server on the fake
-        # hosts. This announces the P2P service on each fake host.
-        zero_a = zeroconf.ZeroconfDaemon(peer_a, 'host-a')
-        zero_b = zeroconf.ZeroconfDaemon(peer_b, 'host-b')
-        zero_c = zeroconf.ZeroconfDaemon(peer_c, 'host-c')
-
-        cros_a = cros_p2p.CrosP2PDaemon(zero_a)
-        cros_b = cros_p2p.CrosP2PDaemon(zero_b)
-        cros_c = cros_p2p.CrosP2PDaemon(zero_c)
-
-        # Add files to each host. All the three hosts share the file "everyone"
-        # with different size, used to test the minimum-size argument.
-        # host-a and host-b share another file only-a and only-b respectively,
-        # used to check that the p2p-client picks the right peer.
-        cros_a.add_file('everyone', 1000)
-        cros_b.add_file('everyone', 10000)
-        cros_c.add_file('everyone', 20000)
-
-        cros_a.add_file('only-a', 5000)
-
-        cros_b.add_file('only-b', 8000)
-
-        # Initially set the number of connections on the network to a low number
-        # (two) that later will be increased to test if p2p-client hangs when
-        # there are too many connections.
-        cros_a.set_num_connections(1)
-        cros_c.set_num_connections(1)
-
-        self._sim.start()
-
-        ### Request a file shared from only one peer.
-        _ret, url = self._run_p2p_client(
-                args=('--get-url=only-a',), timeout=10.)
-
-        if url.strip() != 'http://169.254.10.97:16725/only-a':
-            self._join_simulator()
-            raise error.TestFail('Received unknown url: "%s"' % url)
-
-        ### Check that the num_connections is reported properly.
-        _ret, conns = self._run_p2p_client(args=('--num-connections',),
-                                          timeout=10.)
-        if conns.strip() != '2':
-            self._join_simulator()
-            raise error.TestFail('Wrong number of connections reported: %s' %
-                                 conns)
-
-        ### Request a file shared from a peer with enough of the file.
-        _ret, url = self._run_p2p_client(
-                args=('--get-url=everyone', '--minimum-size=15000'),
-                timeout=10.)
-
-        if url.strip() != 'http://169.254.10.99:16725/everyone':
-            self._join_simulator()
-            raise error.TestFail('Received unknown url: "%s"' % url)
-
-        ### Request too much bytes of an existing file.
-        ret, url = self._run_p2p_client(
-                args=('--get-url=only-b', '--minimum-size=10000'),
-                timeout=10., ignore_status=True)
-
-        if url:
-            self._join_simulator()
-            raise error.TestFail('Received url but expected none: "%s"' % url)
-        if ret == 0:
-            raise error.TestFail('p2p-client returned no URL, but without an '
-                                 'error.')
-
-        ### Check that p2p-client hangs while waiting for a peer when there are
-        ### too many connections.
-        self._sim.run_on_simulator(
-                lambda: cros_a.set_num_connections(99, announce=True))
-
-        # For a query on the DUT to check that the new information is received.
-        for attempt in range(5):
-            _ret, conns = self._run_p2p_client(args=('--num-connections',),
-                                               timeout=10.)
-            conns = conns.strip()
-            if conns == '100':
-                break
-        if conns != '100':
-            self._join_simulator()
-            raise error.TestFail("p2p-client --num-connections doesn't reflect "
-                                 "the current number of connections on the "
-                                 "network, returned %s" % conns)
-
-        ret, url = self._run_p2p_client(
-                args=('--get-url=only-b',), timeout=5., ignore_status=True)
-        if not ret is None:
-            self._join_simulator()
-            raise error.TestFail('p2p-client finished but should have waited '
-                                 'for num_connections to drop.')
-
-        self._sim.stop()
-        self._sim.join()
-
-        if self._sim.error:
-            raise error.TestError('SimulatorThread ended with an exception: %r'
-                                  % self._sim.error)
diff --git a/client/site_tests/performance_Tracker/control b/client/site_tests/performance_Tracker/control
deleted file mode 100644
index d592121..0000000
--- a/client/site_tests/performance_Tracker/control
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "rohitbm"
-NAME = "performance_Tracker"
-PURPOSE = "Records cpu and memory usage for manually scheduled test."
-TIME = "SHORT"
-TEST_CATEGORY = "Performance"
-TEST_CLASS = "performance"
-TEST_TYPE = "client"
-
-DOC = """
-This test is intended to be used by any functional test which wants to
-measure cpu/memory usage. This test runs forever until the exit flag is seen.
-"""
-
-job.run_test("performance_Tracker")
diff --git a/client/site_tests/performance_Tracker/performance_Tracker.py b/client/site_tests/performance_Tracker/performance_Tracker.py
deleted file mode 100644
index 293b2fc..0000000
--- a/client/site_tests/performance_Tracker/performance_Tracker.py
+++ /dev/null
@@ -1,78 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import csv
-import logging
-import os
-import time
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.bin import utils
-
-# Measurement duration [seconds] for one interation.
-MEASUREMENT_DURATION = 10
-
-TERMINATE_PATH = "/tmp/terminate"
-
-# Time for initial test setup [seconds].
-STABILIZATION_DURATION = 60
-
-PERF_RESULT_FILE = '/tmp/perf.csv'
-
-class performance_Tracker(test.test):
-    """Monitors cpu/memory usage."""
-
-    version = 1
-
-    def get_cpu_usage(self):
-        """Computes current cpu usage in percentage.
-
-        @returns percentage cpu used as a float.
-
-        """
-        cpu_usage_start = utils.get_cpu_usage()
-        time.sleep(MEASUREMENT_DURATION)
-        cpu_usage_end = utils.get_cpu_usage()
-        return utils.compute_active_cpu_time(cpu_usage_start,
-                                                      cpu_usage_end) * 100
-
-
-    def used_mem(self):
-        """Computes used memory in percentage.
-
-        @returns percentage memory used as a float.
-
-        """
-        total_memory = utils.get_mem_total()
-        return (total_memory - utils.get_mem_free()) * 100 / total_memory
-
-
-    def run_once(self):
-        if os.path.isfile(TERMINATE_PATH):
-            os.remove(TERMINATE_PATH)
-
-        time.sleep(STABILIZATION_DURATION)
-        perf_keyval = {}
-        perf_file = open(PERF_RESULT_FILE, 'w')
-        writer = csv.writer(perf_file)
-        writer.writerow(['cpu', 'memory'])
-        while True:
-            # This test runs forever until the terminate file is created.
-            if os.path.isfile(TERMINATE_PATH):
-                logging.info('Exit flag detected; exiting.')
-                perf_file.close()
-                return
-            perf_keyval['cpu_usage'] = self.get_cpu_usage()
-            perf_keyval['memory_usage'] = self.used_mem()
-            writer.writerow([perf_keyval['cpu_usage'],
-                            perf_keyval['memory_usage']])
-            self.write_perf_keyval(perf_keyval)
-            time.sleep(MEASUREMENT_DURATION)
-        perf_file.close()
-
-
-    def cleanup(self):
-        # cleanup() is run by common_lib/test.py.
-        if os.path.isfile(TERMINATE_PATH):
-            os.remove(TERMINATE_PATH)
diff --git a/client/site_tests/platform_AccurateTime/control b/client/site_tests/platform_AccurateTime/control
deleted file mode 100644
index 5030f2b..0000000
--- a/client/site_tests/platform_AccurateTime/control
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "platform_AccurateTime"
-PURPOSE = "Ensure system time is set correctly."
-CRITERIA = """
-This test will fail under any of the following conditions:
-  - NTP daemon is not running
-  - the NTP offset is not found
-  - if seconds_offset constraint is exceeded
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-
-DOC = """
-This test tests whether the system time is set within allowable drift from
-the configured NTP server's time.
-"""
-
-job.run_test('platform_AccurateTime', constraints=['seconds_offset < 60'])
diff --git a/client/site_tests/platform_AccurateTime/platform_AccurateTime.py b/client/site_tests/platform_AccurateTime/platform_AccurateTime.py
deleted file mode 100644
index ff1030a..0000000
--- a/client/site_tests/platform_AccurateTime/platform_AccurateTime.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging, math, re
-import subprocess
-import time
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import cros_logging
-
-OPENSSL = '/usr/bin/openssl'
-TLSDATE = '/usr/bin/tlsdate'
-
-class platform_AccurateTime(test.test):
-    version = 1
-
-    def serve(self):
-        self.ca = '%s/ca.pem' % self.srcdir
-        self.cert = '%s/cert.pem' % self.srcdir
-        self.key = '%s/cert.key' % self.srcdir
-        self.server = subprocess.Popen([OPENSSL, 's_server', '-www',
-                                        '-CAfile', self.ca, '-cert', self.cert,
-                                        '-key', self.key, '-port', '4433'])
-        time.sleep(1)
-
-    def tlsdate(self):
-        proc = subprocess.Popen([TLSDATE, '-H', '127.0.0.1', '-p', '4433',
-                                 '-C', self.srcdir,
-                                 '-nv'], stdout=subprocess.PIPE,
-                                 stderr=subprocess.PIPE)
-        (out,err) = proc.communicate()
-        return err
-
-    def run_once(self):
-        self.serve()
-        out = self.tlsdate()
-        print out
-        try:
-            if 'verification passed' not in out:
-                raise error.TestFail('ssl did not verify')
-            if 'difference is about' not in out:
-                raise error.TestFail('no time delta found')
-        finally:
-            self.server.terminate()
diff --git a/client/site_tests/platform_AccurateTime/src/ca.crt b/client/site_tests/platform_AccurateTime/src/ca.crt
deleted file mode 100644
index 254a348..0000000
--- a/client/site_tests/platform_AccurateTime/src/ca.crt
+++ /dev/null
@@ -1,21 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDjDCCAvWgAwIBAgIJAP6IycaMXlqqMA0GCSqGSIb3DQEBBQUAMIGLMQswCQYD
-VQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTETMBEGA1UEChMKR29vZ2xlIElu
-YzESMBAGA1UECxMJQ2hyb21lIE9TMRgwFgYDVQQDEw9PcGVuU1NMIFRlc3QgQ0Ex
-JDAiBgkqhkiG9w0BCQEWFXNlY3VyaXR5QGNocm9taXVtLm9yZzAgFw0xMjA1MTcx
-OTMwMjFaGA8yMTEyMDQyMzE5MzAyMVowgYsxCzAJBgNVBAYTAlVTMRMwEQYDVQQI
-EwpDYWxpZm9ybmlhMRMwEQYDVQQKEwpHb29nbGUgSW5jMRIwEAYDVQQLEwlDaHJv
-bWUgT1MxGDAWBgNVBAMTD09wZW5TU0wgVGVzdCBDQTEkMCIGCSqGSIb3DQEJARYV
-c2VjdXJpdHlAY2hyb21pdW0ub3JnMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKB
-gQDb+iDa/X6XNljxiOvplzKngIV6N8Ke9BCnrgmKANldzcHVNXVrI3SqQOReiy8U
-mWZBmo6TDBXWclfiW19B4qlSEGiF2frDxLxeLyx3/zw33bkxUEyTQkhfbN75h5lP
-rw2LyGaKTnSxPCiSwMfchw2vm9lZwo3Ih/6nEZbbmdL0LQIDAQABo4HzMIHwMB0G
-A1UdDgQWBBQLQTzz2YPtufZ9dwS0Dce9SKZX7TCBwAYDVR0jBIG4MIG1gBQLQTzz
-2YPtufZ9dwS0Dce9SKZX7aGBkaSBjjCBizELMAkGA1UEBhMCVVMxEzARBgNVBAgT
-CkNhbGlmb3JuaWExEzARBgNVBAoTCkdvb2dsZSBJbmMxEjAQBgNVBAsTCUNocm9t
-ZSBPUzEYMBYGA1UEAxMPT3BlblNTTCBUZXN0IENBMSQwIgYJKoZIhvcNAQkBFhVz
-ZWN1cml0eUBjaHJvbWl1bS5vcmeCCQD+iMnGjF5aqjAMBgNVHRMEBTADAQH/MA0G
-CSqGSIb3DQEBBQUAA4GBAKXO7qxV8z6d+IeXakcpXr4id+S0a9ADXyRsz2Mj4w0O
-tLAP0/7HcAsA5tC+WpQCr4aRpAyZNq/bx73bV0OKLnsOp9/8BE8ai52miMiU7V/U
-QXP648prYLoFVOELkwkY60L+RMdVZ2p2jTqpMeBW6TgkTWRZ10RwAI4u/TmqBbKk
------END CERTIFICATE-----
diff --git a/client/site_tests/platform_AccurateTime/src/ca.csr b/client/site_tests/platform_AccurateTime/src/ca.csr
deleted file mode 100644
index 516198b..0000000
--- a/client/site_tests/platform_AccurateTime/src/ca.csr
+++ /dev/null
@@ -1,13 +0,0 @@
------BEGIN CERTIFICATE REQUEST-----
-MIIB5DCCAU0CAQAwgaMxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpDYWxpZm9ybmlh
-MRYwFAYDVQQHEw1Nb3VudGFpbiBWaWV3MRMwEQYDVQQKEwpHb29nbGUgSW5jMRIw
-EAYDVQQLEwlDaHJvbWUgT1MxGDAWBgNVBAMTD09wZW5TU0wgVGVzdCBDQTEkMCIG
-CSqGSIb3DQEJARYVc2VjdXJpdHlAY2hyb21pdW0ub3JnMIGfMA0GCSqGSIb3DQEB
-AQUAA4GNADCBiQKBgQDb+iDa/X6XNljxiOvplzKngIV6N8Ke9BCnrgmKANldzcHV
-NXVrI3SqQOReiy8UmWZBmo6TDBXWclfiW19B4qlSEGiF2frDxLxeLyx3/zw33bkx
-UEyTQkhfbN75h5lPrw2LyGaKTnSxPCiSwMfchw2vm9lZwo3Ih/6nEZbbmdL0LQID
-AQABoAAwDQYJKoZIhvcNAQEFBQADgYEAiDjCWgqxIOK0qVJP9f2v+y5HOPTB9j0v
-pcsRcA1B6fqSLNQF6yDcbbyFp+nMvHcOozPpd2/VKWBcD90qUkabqh64AzWlnWZJ
-Ra0HOJ4lJHKidygNxfL2O50oLkF1U1QhODLk6SQTgMuZU06r25Z5alqzkDe8H2He
-fyEE9+YvfYQ=
------END CERTIFICATE REQUEST-----
diff --git a/client/site_tests/platform_AccurateTime/src/ca.key b/client/site_tests/platform_AccurateTime/src/ca.key
deleted file mode 100644
index 7171fbb..0000000
--- a/client/site_tests/platform_AccurateTime/src/ca.key
+++ /dev/null
@@ -1,15 +0,0 @@
------BEGIN RSA PRIVATE KEY-----
-MIICXQIBAAKBgQDb+iDa/X6XNljxiOvplzKngIV6N8Ke9BCnrgmKANldzcHVNXVr
-I3SqQOReiy8UmWZBmo6TDBXWclfiW19B4qlSEGiF2frDxLxeLyx3/zw33bkxUEyT
-QkhfbN75h5lPrw2LyGaKTnSxPCiSwMfchw2vm9lZwo3Ih/6nEZbbmdL0LQIDAQAB
-AoGBAKb6OARjFbuMenllmMXoin/RNACw4N+tnhLaLc1+lYD51Z+VA7s+36j4tyD4
-/9TgGFnGC2Gdc9ojU4i0GRu3BRSor9zPoXNEWkC4QgxKTh68R62yodOpxUGmdGL0
-+7tWqz6tiCEkq2CKPbqBvBVjOWRRXigwYRk7URYkX6tTjIXhAkEA7p8FMAS40L1E
-Csx+9jSYNbOy7k+ntYzqLLBxBozoFdtznvhHUmrttI7ONkNPlSxdIjVaxgXs0CJE
-mItbdIlRJQJBAOv/f1a+P/08cUmiJzbJR3jEtdcMIAlIQbGsHXL7FNprMWt0k9hj
-i6/UvMyPGr4LlXO8VCvU5C5p50zOLpHMPGkCQFnNTs57eHJWSD0oKL/Pm1jagARf
-xeX1WkvnOyIT9WQEasbw+SZzR3IkLEcPneut/n7ZRYgwwqaxvs8J4ylAZGECQQCG
-+dNv3lIk2SwGLDr3Z7l8Bi3md+XeyTfhkYoqWBZ+Me7LAZsJmS2sfCUbbgGijmeF
-EUKm6xUywplq4UXbV495AkBivVBcDuYqjjdZjt7IYqbMYO3Jixa/n/gbuug/mhk5
-yM1LD6gjjFlRzsy88A2c9Q46EMdxQvfqgS5TcbXaHYgU
------END RSA PRIVATE KEY-----
diff --git a/client/site_tests/platform_AccurateTime/src/ca.pem b/client/site_tests/platform_AccurateTime/src/ca.pem
deleted file mode 100644
index 5b0ad69..0000000
--- a/client/site_tests/platform_AccurateTime/src/ca.pem
+++ /dev/null
@@ -1,65 +0,0 @@
-Certificate:
-    Data:
-        Version: 3 (0x2)
-        Serial Number:
-            fe:88:c9:c6:8c:5e:5a:aa
-        Signature Algorithm: sha1WithRSAEncryption
-        Issuer: C=US, ST=California, O=Google Inc, OU=Chrome OS, CN=OpenSSL Test CA/emailAddress=security@chromium.org
-        Validity
-            Not Before: May 17 19:30:21 2012 GMT
-            Not After : Apr 23 19:30:21 2112 GMT
-        Subject: C=US, ST=California, O=Google Inc, OU=Chrome OS, CN=OpenSSL Test CA/emailAddress=security@chromium.org
-        Subject Public Key Info:
-            Public Key Algorithm: rsaEncryption
-            RSA Public Key: (1024 bit)
-                Modulus (1024 bit):
-                    00:db:fa:20:da:fd:7e:97:36:58:f1:88:eb:e9:97:
-                    32:a7:80:85:7a:37:c2:9e:f4:10:a7:ae:09:8a:00:
-                    d9:5d:cd:c1:d5:35:75:6b:23:74:aa:40:e4:5e:8b:
-                    2f:14:99:66:41:9a:8e:93:0c:15:d6:72:57:e2:5b:
-                    5f:41:e2:a9:52:10:68:85:d9:fa:c3:c4:bc:5e:2f:
-                    2c:77:ff:3c:37:dd:b9:31:50:4c:93:42:48:5f:6c:
-                    de:f9:87:99:4f:af:0d:8b:c8:66:8a:4e:74:b1:3c:
-                    28:92:c0:c7:dc:87:0d:af:9b:d9:59:c2:8d:c8:87:
-                    fe:a7:11:96:db:99:d2:f4:2d
-                Exponent: 65537 (0x10001)
-        X509v3 extensions:
-            X509v3 Subject Key Identifier: 
-                0B:41:3C:F3:D9:83:ED:B9:F6:7D:77:04:B4:0D:C7:BD:48:A6:57:ED
-            X509v3 Authority Key Identifier: 
-                keyid:0B:41:3C:F3:D9:83:ED:B9:F6:7D:77:04:B4:0D:C7:BD:48:A6:57:ED
-                DirName:/C=US/ST=California/O=Google Inc/OU=Chrome OS/CN=OpenSSL Test CA/emailAddress=security@chromium.org
-                serial:FE:88:C9:C6:8C:5E:5A:AA
-
-            X509v3 Basic Constraints: 
-                CA:TRUE
-    Signature Algorithm: sha1WithRSAEncryption
-        a5:ce:ee:ac:55:f3:3e:9d:f8:87:97:6a:47:29:5e:be:22:77:
-        e4:b4:6b:d0:03:5f:24:6c:cf:63:23:e3:0d:0e:b4:b0:0f:d3:
-        fe:c7:70:0b:00:e6:d0:be:5a:94:02:af:86:91:a4:0c:99:36:
-        af:db:c7:bd:db:57:43:8a:2e:7b:0e:a7:df:fc:04:4f:1a:8b:
-        9d:a6:88:c8:94:ed:5f:d4:41:73:fa:e3:ca:6b:60:ba:05:54:
-        e1:0b:93:09:18:eb:42:fe:44:c7:55:67:6a:76:8d:3a:a9:31:
-        e0:56:e9:38:24:4d:64:59:d7:44:70:00:8e:2e:fd:39:aa:05:
-        b2:a4
------BEGIN CERTIFICATE-----
-MIIDjDCCAvWgAwIBAgIJAP6IycaMXlqqMA0GCSqGSIb3DQEBBQUAMIGLMQswCQYD
-VQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTETMBEGA1UEChMKR29vZ2xlIElu
-YzESMBAGA1UECxMJQ2hyb21lIE9TMRgwFgYDVQQDEw9PcGVuU1NMIFRlc3QgQ0Ex
-JDAiBgkqhkiG9w0BCQEWFXNlY3VyaXR5QGNocm9taXVtLm9yZzAgFw0xMjA1MTcx
-OTMwMjFaGA8yMTEyMDQyMzE5MzAyMVowgYsxCzAJBgNVBAYTAlVTMRMwEQYDVQQI
-EwpDYWxpZm9ybmlhMRMwEQYDVQQKEwpHb29nbGUgSW5jMRIwEAYDVQQLEwlDaHJv
-bWUgT1MxGDAWBgNVBAMTD09wZW5TU0wgVGVzdCBDQTEkMCIGCSqGSIb3DQEJARYV
-c2VjdXJpdHlAY2hyb21pdW0ub3JnMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKB
-gQDb+iDa/X6XNljxiOvplzKngIV6N8Ke9BCnrgmKANldzcHVNXVrI3SqQOReiy8U
-mWZBmo6TDBXWclfiW19B4qlSEGiF2frDxLxeLyx3/zw33bkxUEyTQkhfbN75h5lP
-rw2LyGaKTnSxPCiSwMfchw2vm9lZwo3Ih/6nEZbbmdL0LQIDAQABo4HzMIHwMB0G
-A1UdDgQWBBQLQTzz2YPtufZ9dwS0Dce9SKZX7TCBwAYDVR0jBIG4MIG1gBQLQTzz
-2YPtufZ9dwS0Dce9SKZX7aGBkaSBjjCBizELMAkGA1UEBhMCVVMxEzARBgNVBAgT
-CkNhbGlmb3JuaWExEzARBgNVBAoTCkdvb2dsZSBJbmMxEjAQBgNVBAsTCUNocm9t
-ZSBPUzEYMBYGA1UEAxMPT3BlblNTTCBUZXN0IENBMSQwIgYJKoZIhvcNAQkBFhVz
-ZWN1cml0eUBjaHJvbWl1bS5vcmeCCQD+iMnGjF5aqjAMBgNVHRMEBTADAQH/MA0G
-CSqGSIb3DQEBBQUAA4GBAKXO7qxV8z6d+IeXakcpXr4id+S0a9ADXyRsz2Mj4w0O
-tLAP0/7HcAsA5tC+WpQCr4aRpAyZNq/bx73bV0OKLnsOp9/8BE8ai52miMiU7V/U
-QXP648prYLoFVOELkwkY60L+RMdVZ2p2jTqpMeBW6TgkTWRZ10RwAI4u/TmqBbKk
------END CERTIFICATE-----
diff --git a/client/site_tests/platform_AccurateTime/src/cert.csr b/client/site_tests/platform_AccurateTime/src/cert.csr
deleted file mode 100644
index 1b22e38..0000000
--- a/client/site_tests/platform_AccurateTime/src/cert.csr
+++ /dev/null
@@ -1,13 +0,0 @@
------BEGIN CERTIFICATE REQUEST-----
-MIIB3jCCAUcCAQAwgZ0xCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpDYWxpZm9ybmlh
-MRYwFAYDVQQHEw1Nb3VudGFpbiBWaWV3MRMwEQYDVQQKEwpHb29nbGUgSW5jMRIw
-EAYDVQQLEwlDaHJvbWUgT1MxEjAQBgNVBAMTCTEyNy4wLjAuMTEkMCIGCSqGSIb3
-DQEJARYVc2VjdXJpdHlAY2hyb21pdW0ub3JnMIGfMA0GCSqGSIb3DQEBAQUAA4GN
-ADCBiQKBgQC5bxzyvNJFDmyThIGoFoZkN3rlQB8QoR80rS1u8pLyqW5Vk2A0pNOv
-cxPrUHAUTgWhikqzymz4a4XoLxat53H/t/XmRYwZ9GVNZoczQ4naWxtPyPqIBosM
-LnWu6FHUVO1lTdvhC6Pjw2i1S9Rq3dMsANU1IER4NR8XM+v6qBg1XQIDAQABoAAw
-DQYJKoZIhvcNAQEFBQADgYEAo/+1dblmxhfye7FQ2WBWTsHMU5rrvXK1uWj42Ugt
-fetQbLlrO0/dTW993ZYp4QqH3uGGp0ZHFxCOGUJAduqkQ7Hk5x/zRRL2kBxvgScA
-Tb0T70rgJiUYBFEY1c9FjLIIGwLe+JUntoDn1xdPiAvtGRUE5cINqyGN0ukd0cpN
-ObA=
------END CERTIFICATE REQUEST-----
diff --git a/client/site_tests/platform_AccurateTime/src/cert.key b/client/site_tests/platform_AccurateTime/src/cert.key
deleted file mode 100644
index 4192309..0000000
--- a/client/site_tests/platform_AccurateTime/src/cert.key
+++ /dev/null
@@ -1,15 +0,0 @@
------BEGIN RSA PRIVATE KEY-----
-MIICXAIBAAKBgQC5bxzyvNJFDmyThIGoFoZkN3rlQB8QoR80rS1u8pLyqW5Vk2A0
-pNOvcxPrUHAUTgWhikqzymz4a4XoLxat53H/t/XmRYwZ9GVNZoczQ4naWxtPyPqI
-BosMLnWu6FHUVO1lTdvhC6Pjw2i1S9Rq3dMsANU1IER4NR8XM+v6qBg1XQIDAQAB
-AoGATJUBBZbJ2E8+tCXTTlIPKrBnBurWaY3BT4X974Yq/iDPOnRlFRjFe/4wycws
-nZYjaTWt6Fa/blEpulxIJ4YgY+HdcsTxXsVUL0qnVpTtNtFkaymEHaH0A+6PHd0d
-BQKI5LP98H99bgQo7l59IMUjgGIG9koL3HTFynfFqUcbHZkCQQDduglnOrF1RR0d
-xVjyt6NDwNfEcyv4Trig7LaPyZiYsHCxHkGNk5s1LJ6C+pl5z6nlf03E5lfGMiv6
-00kk0rrfAkEA1hjvvl3ITZyUQEumQR9IyqJnY/hu0gIVN4LC7D6W4VqHkWJ1bVKv
-j6AnSRK5+CEKsKHtUzaRK8TNvGpFdAVTQwJAaf5B+oAbTwUQCyhJAhZ5TK1GOBoa
-feQb7IncJ9ZmRS+rX3AoRCa4vm2rBNAJSCrWe+gkZCj88rkPPyN7xseaHwJBAK17
-J0ip1VSOO9Kdeenq36DSV9S5yZKB/Y8tdaTYUonoQVGc5mnfRkKrEyoy8l4M6AyY
-KSX0YU2F+u9hbUXFyuECQGmRPKHs5TRLI2qRarhJvKbWL5x2FzWGiJvjYHuS3jF9
-ZklH3ITWPQ3KFa/IK/a3BnGNapFO8M9UWJafJVE6Rp0=
------END RSA PRIVATE KEY-----
diff --git a/client/site_tests/platform_AccurateTime/src/cert.pem b/client/site_tests/platform_AccurateTime/src/cert.pem
deleted file mode 100644
index d0a7571..0000000
--- a/client/site_tests/platform_AccurateTime/src/cert.pem
+++ /dev/null
@@ -1,63 +0,0 @@
-Certificate:
-    Data:
-        Version: 3 (0x2)
-        Serial Number:
-            fe:88:c9:c6:8c:5e:5a:ac
-        Signature Algorithm: sha1WithRSAEncryption
-        Issuer: C=US, ST=California, O=Google Inc, OU=Chrome OS, CN=OpenSSL Test CA/emailAddress=security@chromium.org
-        Validity
-            Not Before: May 17 19:45:22 2012 GMT
-            Not After : Jan 14 19:45:22 2112 GMT
-        Subject: C=US, ST=California, L=Mountain View, O=Google Inc, OU=Chrome OS, CN=127.0.0.1/emailAddress=security@chromium.org
-        Subject Public Key Info:
-            Public Key Algorithm: rsaEncryption
-            RSA Public Key: (1024 bit)
-                Modulus (1024 bit):
-                    00:b9:6f:1c:f2:bc:d2:45:0e:6c:93:84:81:a8:16:
-                    86:64:37:7a:e5:40:1f:10:a1:1f:34:ad:2d:6e:f2:
-                    92:f2:a9:6e:55:93:60:34:a4:d3:af:73:13:eb:50:
-                    70:14:4e:05:a1:8a:4a:b3:ca:6c:f8:6b:85:e8:2f:
-                    16:ad:e7:71:ff:b7:f5:e6:45:8c:19:f4:65:4d:66:
-                    87:33:43:89:da:5b:1b:4f:c8:fa:88:06:8b:0c:2e:
-                    75:ae:e8:51:d4:54:ed:65:4d:db:e1:0b:a3:e3:c3:
-                    68:b5:4b:d4:6a:dd:d3:2c:00:d5:35:20:44:78:35:
-                    1f:17:33:eb:fa:a8:18:35:5d
-                Exponent: 65537 (0x10001)
-        X509v3 extensions:
-            X509v3 Basic Constraints: 
-                CA:FALSE
-            Netscape Comment: 
-                OpenSSL Generated Certificate
-            X509v3 Subject Key Identifier: 
-                F9:B8:AA:CB:9B:5B:44:69:54:34:BB:04:B6:39:B2:EE:FD:5E:62:AA
-            X509v3 Authority Key Identifier: 
-                keyid:0B:41:3C:F3:D9:83:ED:B9:F6:7D:77:04:B4:0D:C7:BD:48:A6:57:ED
-
-    Signature Algorithm: sha1WithRSAEncryption
-        d9:f2:71:a4:6c:03:6e:75:05:e5:9d:e6:bd:9f:35:fe:99:3c:
-        7d:54:1d:26:f5:79:53:dd:c4:03:6f:55:13:54:9a:14:0e:52:
-        e6:a6:12:cc:fd:54:7b:43:17:f9:1a:8c:2c:a8:e7:1f:e1:3a:
-        c8:4a:28:fa:46:03:f6:bc:da:82:7b:ce:45:e7:db:25:e3:3d:
-        d9:28:87:80:73:de:4a:6f:05:d6:74:2d:19:c9:63:a7:c8:b3:
-        2c:19:5c:2a:03:c7:c1:02:ca:c4:42:e6:6d:2f:f1:be:de:1c:
-        c8:76:8b:48:8c:b9:d2:52:eb:74:97:8a:b5:78:ea:ab:83:db:
-        1f:76
------BEGIN CERTIFICATE-----
-MIIDJTCCAo6gAwIBAgIJAP6IycaMXlqsMA0GCSqGSIb3DQEBBQUAMIGLMQswCQYD
-VQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTETMBEGA1UEChMKR29vZ2xlIElu
-YzESMBAGA1UECxMJQ2hyb21lIE9TMRgwFgYDVQQDEw9PcGVuU1NMIFRlc3QgQ0Ex
-JDAiBgkqhkiG9w0BCQEWFXNlY3VyaXR5QGNocm9taXVtLm9yZzAgFw0xMjA1MTcx
-OTQ1MjJaGA8yMTEyMDExNDE5NDUyMlowgZ0xCzAJBgNVBAYTAlVTMRMwEQYDVQQI
-EwpDYWxpZm9ybmlhMRYwFAYDVQQHEw1Nb3VudGFpbiBWaWV3MRMwEQYDVQQKEwpH
-b29nbGUgSW5jMRIwEAYDVQQLEwlDaHJvbWUgT1MxEjAQBgNVBAMTCTEyNy4wLjAu
-MTEkMCIGCSqGSIb3DQEJARYVc2VjdXJpdHlAY2hyb21pdW0ub3JnMIGfMA0GCSqG
-SIb3DQEBAQUAA4GNADCBiQKBgQC5bxzyvNJFDmyThIGoFoZkN3rlQB8QoR80rS1u
-8pLyqW5Vk2A0pNOvcxPrUHAUTgWhikqzymz4a4XoLxat53H/t/XmRYwZ9GVNZocz
-Q4naWxtPyPqIBosMLnWu6FHUVO1lTdvhC6Pjw2i1S9Rq3dMsANU1IER4NR8XM+v6
-qBg1XQIDAQABo3sweTAJBgNVHRMEAjAAMCwGCWCGSAGG+EIBDQQfFh1PcGVuU1NM
-IEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQU+biqy5tbRGlUNLsEtjmy
-7v1eYqowHwYDVR0jBBgwFoAUC0E889mD7bn2fXcEtA3HvUimV+0wDQYJKoZIhvcN
-AQEFBQADgYEA2fJxpGwDbnUF5Z3mvZ81/pk8fVQdJvV5U93EA29VE1SaFA5S5qYS
-zP1Ue0MX+RqMLKjnH+E6yEoo+kYD9rzagnvORefbJeM92SiHgHPeSm8F1nQtGclj
-p8izLBlcKgPHwQLKxELmbS/xvt4cyHaLSIy50lLrdJeKtXjqq4PbH3Y=
------END CERTIFICATE-----
diff --git a/client/site_tests/platform_AccurateTime/src/fc92dea4.0 b/client/site_tests/platform_AccurateTime/src/fc92dea4.0
deleted file mode 120000
index a74ccf5..0000000
--- a/client/site_tests/platform_AccurateTime/src/fc92dea4.0
+++ /dev/null
@@ -1 +0,0 @@
-ca.crt
\ No newline at end of file
diff --git a/client/site_tests/platform_AesThroughput/control b/client/site_tests/platform_AesThroughput/control
deleted file mode 100644
index 5477d01..0000000
--- a/client/site_tests/platform_AesThroughput/control
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = "platform_AesThroughput"
-AUTHOR = "The Chromium OS Authors"
-PURPOSE = "Benchmark processor performance using OpenSSL using AES options."
-CRITERIA = """
-This test is a benchmark.
-
-This test will fail if the output of openssl cannot be parsed.
-"""
-ATTRIBUTES = "suite:hwqual, suite:kernel_per-build_benchmarks"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Performance"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-
-DOC = """
-This test uses the openssl program with AES options.
-"""
-job.run_test('platform_AesThroughput')
diff --git a/client/site_tests/platform_AesThroughput/platform_AesThroughput.py b/client/site_tests/platform_AesThroughput/platform_AesThroughput.py
deleted file mode 100644
index 0703063..0000000
--- a/client/site_tests/platform_AesThroughput/platform_AesThroughput.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging, os, re
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.bin import utils as bin_utils
-from autotest_lib.client.common_lib import error, utils
-
-class platform_AesThroughput(test.test):
-    version = 1
-
-
-    def setup(self):
-        self.results = {'bytes_per_sec_ideal_min' : 20 * 1024 * 1024}
-
-
-    def run_once(self):
-        num_cpus = bin_utils.count_cpus()
-        logging.debug('Running using all cpus: %d' % num_cpus)
-        results = self.openssl_speed('aes-256-cbc', '-multi %d' % num_cpus)
-        parsed = self.parse_results(results)
-        self.update_stats(parsed)
-        self.export_stats()
-
-
-    def openssl_speed(self, cipher, options=''):
-        cmd = 'openssl speed %s -mr %s' % (cipher, options)
-        return utils.system_output(cmd, retain_output=True)
-
-
-    def parse_results(self, results, name=''):
-        # Split the results into lines.
-        # We really only want the final line for our purposes.
-        type, times = results.split("\n")[-1].split(' ')
-        # +F:num:aes-256 cbc -> aes_256_cbc
-        type = re.sub('[- ]', '_', type.split(':')[-1])
-        # cbc:time:time:time:... -> time, time, ...
-        times = times.split(':')[1:]
-
-        # Build the key names
-        if len(name) > 0:
-          name = name + '_'
-        key_prefix = 'bytes_per_sec_' + name + type + '_blocksz_'
-        keys = ['16_bytes', '64_bytes', '256_bytes', '1024_bytes', '8192_bytes']
-        keys = [key_prefix+k for k in keys]
-
-        if len(times) > len(keys):
-            logging.debug(results)
-            raise error.TestFail('openssl output format parsing failed')
-        return dict(zip(keys, times))
-
-
-    def update_stats(self, keyvals):
-        self.results.update(keyvals)
-
-
-    def export_stats(self):
-        self.write_perf_keyval(self.results)
diff --git a/client/site_tests/platform_AnomalyDetector/control b/client/site_tests/platform_AnomalyDetector/control
deleted file mode 100644
index c0c10cf..0000000
--- a/client/site_tests/platform_AnomalyDetector/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = "platform_AnomalyDetector"
-AUTHOR = "chromeos-kernel@google.com"
-PURPOSE = "Checks that the anomaly detector is collecting log anomalies"
-CRITERIA = "The detector must create a file when a test warning is produced"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "Platform"
-TEST_TYPE = "Client"
-ATTRIBUTES = "suite:bvt-perbuild"
-
-DOC = """
-This checks that the anomaly detector is operational by generating
-a test kernel warning and verifying that the detector daemon collects it.
-More functionality is tested by the compile-time test.
-"""
-
-job.run_test('platform_AnomalyDetector')
diff --git a/client/site_tests/platform_AnomalyDetector/platform_AnomalyDetector.py b/client/site_tests/platform_AnomalyDetector/platform_AnomalyDetector.py
deleted file mode 100644
index eeb9f16..0000000
--- a/client/site_tests/platform_AnomalyDetector/platform_AnomalyDetector.py
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import time
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.cros import upstart
-
-class platform_AnomalyDetector(test.test):
-    "Tests the anomaly detector daemon"
-    version = 1
-
-    def run_once(self):
-        "Runs the test once"
-
-        # Restart the anomaly detector daemon (to clear its cache of
-        # "already-seen warnings" and ensure this one is logged)
-        upstart.restart_job("anomaly-detector")
-
-        # Give enough time for the anomaly detector to open the journal and scan
-        # to the end. (otherwise, it will miss the warning message).
-        # TODO(https://crbug.com/983725): Check that it's opened the journal.
-        time.sleep(0.5)
-
-        # Delete old kernel warning files to distinguish this one.
-        utils.system("rm -rf /var/spool/crash/kernel_warning*")
-
-        lkdtm = "/sys/kernel/debug/provoke-crash/DIRECT"
-        if os.path.exists(lkdtm):
-            utils.system("echo WARNING > %s" % (lkdtm))
-        else:
-            utils.system("echo warning > /proc/breakme")
-
-        cmd = "test -f /var/spool/crash/kernel_warning*.kcrash"
-        utils.poll_for_condition_ex(lambda: utils.system(cmd) == 0)
diff --git a/client/site_tests/platform_BootPerf/control b/client/site_tests/platform_BootPerf/control
index 0d7f4a2..b784cd2 100644
--- a/client/site_tests/platform_BootPerf/control
+++ b/client/site_tests/platform_BootPerf/control
@@ -2,7 +2,12 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+# TEST IS DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
+AUTHOR = "ChromeOS Team"
 NAME = "platform_BootPerf"
 PURPOSE = "Collect boot performance metrics from the last system reboot."
 CRITERIA = """
diff --git a/client/site_tests/platform_BootPerf/platform_BootPerf.py b/client/site_tests/platform_BootPerf/platform_BootPerf.py
index 5cdbe8b..353c629 100644
--- a/client/site_tests/platform_BootPerf/platform_BootPerf.py
+++ b/client/site_tests/platform_BootPerf/platform_BootPerf.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2009 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -97,8 +98,8 @@
 
     _UPTIME_PREFIX = 'uptime-'
     _DISK_PREFIX = 'disk-'
+    _FW_TIMESTAMPS = 'cbmem-timestamps'
 
-    _FIRMWARE_TIME_FILE = '/tmp/firmware-boot-time'
 
     _BOOTSTAT_ARCHIVE_GLOB = '/var/log/metrics/shutdown.[0-9]*'
     _UPTIME_FILE_GLOB = os.path.join('/tmp', _UPTIME_PREFIX + '*')
@@ -115,10 +116,6 @@
                             glob.glob(self._DISK_FILE_GLOB))
         for fname in statlist:
             shutil.copy(fname, self.resultsdir)
-        try:
-            shutil.copy(self._FIRMWARE_TIME_FILE, self.resultsdir)
-        except Exception:
-            pass
 
     def _copy_console_ramoops(self):
         """Copy console_ramoops from previous reboot."""
@@ -130,6 +127,12 @@
             except Exception:
                 pass
 
+    def _store_fw_timestamps(self):
+        """Store detailed firmware timestamps for debugging."""
+        with open(os.path.join(self.resultsdir, self._FW_TIMESTAMPS),
+                  'w') as f:
+            utils.run('cbmem -t', stdout_tee=f)
+
     def _parse_bootstat(self, filename, fieldnum, required=False):
         """Read values from a bootstat event file.
 
@@ -167,8 +170,8 @@
                     logging.warning("Waited %d seconds for bootstat file: %s", cnt, filename)
 
             with open(filename) as statfile:
-                values = map(lambda l: float(l.split()[fieldnum]),
-                             statfile.readlines())
+                values = list(map(lambda l: float(l.split()[fieldnum]),
+                             statfile.readlines()))
             return values
         except IOError:
             raise error.TestFail('Failed to read bootstat file "%s"' %
@@ -212,29 +215,19 @@
     def _gather_firmware_boot_time(self, results):
         """Read and report firmware startup time.
 
-        send-boot-metrics.cong writes the firmware startup time to the
-        file named in `_FIRMWARE_TIME_FILE`.  Read the time and record
-        it in `results` as the keyval seconds_power_on_to_kernel.
+        `cbmem -t` reports firmware boot time with format of
+        'Total Time: {comma separated microseconds}'. Read the time
+        and record it in `results` as the keyval
+        seconds_power_on_to_kernel.
 
         @param results  Keyvals dictionary.
 
         """
 
-        # crbug.com/1098635 - don't race with send-boot-metrics.conf
-        # TODO(grundler): directly read the firmware_time instead of depending
-        # on send-boot-metrics to create _FIRMWARE_TIME_FILE.
-        cnt = 1
-        while cnt < 60:
-            if  os.path.exists(self._FIRMWARE_TIME_FILE):
-                break
-            time.sleep(1)
-            cnt += 1
+        data = utils.system_output('cbmem -t | grep \'Total Time:\' |'
+                                   'awk \'{print $NF}\'')
+        firmware_time = round(float(data.replace(',', '')) / (1000 * 1000), 2)
 
-        # If the firmware boot time is not available, the file
-        # will not exist and we should throw an exception here.
-        data = utils.read_one_line(self._FIRMWARE_TIME_FILE)
-
-        firmware_time = float(data)
         boot_time = results['seconds_kernel_to_login']
         results['seconds_power_on_to_kernel'] = firmware_time
         results['seconds_power_on_to_login'] = (
@@ -459,6 +452,7 @@
 
         self._copy_timestamp_files()
         self._copy_console_ramoops()
+        self._store_fw_timestamps()
 
         self.write_perf_keyval(results)
 
diff --git a/client/site_tests/platform_CheckErrorsInLog/control b/client/site_tests/platform_CheckErrorsInLog/control
deleted file mode 100644
index e0a63e7..0000000
--- a/client/site_tests/platform_CheckErrorsInLog/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "kdlucas@chromium.org (Kelly Lucas)"
-DOC = "Checks system logs for error messages."
-NAME = "platform_CheckErrorsInLog"
-PURPOSE = "Ensure there are no major errors in system logs."
-CRITERIA = """
-Fails if any of the following occurs:
-  - Finds the keywords fatal, oops, panic, or segfault in:
-    - kern.log
-    - syslog
-    - dmesg
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-
-job.run_test('platform_CheckErrorsInLog')
diff --git a/client/site_tests/platform_CheckErrorsInLog/platform_CheckErrorsInLog.py b/client/site_tests/platform_CheckErrorsInLog/platform_CheckErrorsInLog.py
deleted file mode 100644
index 496e05a..0000000
--- a/client/site_tests/platform_CheckErrorsInLog/platform_CheckErrorsInLog.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/usr/bin/python2
-#
-# Copyright (c) 2010 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-__author__ = 'kdlucas@chromium.org (Kelly Lucas)'
-
-import logging, os
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-
-class platform_CheckErrorsInLog(test.test):
-    """
-    Check system logs for errors.
-    """
-    version = 1
-
-    def search_log(self, logfile):
-        """
-        Try to ping the remote host and report the status.
-        Args:
-            logfile: string, pathname of logfile to search.
-        Returns:
-            integer: number of errors found.
-        """
-        errors = 0
-        kerrors = ['fatal', 'oops', 'panic', 'segfault']
-        f = open(logfile, 'r')
-        log = f.readlines()
-        for line in log:
-            for key in kerrors:
-                if key in line:
-                    errors += 1
-                    logging.error('%s found in %s' ,line, logfile)
-        f.close()
-
-        return errors
-
-
-    def run_once(self):
-        errors = 0
-        logs = ['kern.log', 'syslog', 'dmesg']
-
-        for log in logs:
-            logfile = os.path.join('/var/log', log)
-            if os.path.isfile(logfile):
-                errors += self.search_log(logfile)
-            else:
-                logging.warning('%s does not exist' % logfile)
-
-        if errors:
-            raise error.TestFail('%d failures found in logs' % errors)
diff --git a/client/site_tests/platform_ChromeCgroups/control b/client/site_tests/platform_ChromeCgroups/control
deleted file mode 100644
index d60ed26..0000000
--- a/client/site_tests/platform_ChromeCgroups/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "abrestic, sonnyrao"
-NAME = "platform_ChromeCgroups"
-PURPOSE = "Verify that the chrome_renderers cgroups are created and used."
-CRITERIA = "Fails if the required cgroups do not exist or are empty."
-ATTRIBUTES = "suite:regression"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-
-DOC = """
-This test verifies that the chrome_renderers foreground and background
-cgroups exist and that tasks are placed in those cgroups when multiple
-tabs are open.
-"""
-
-job.run_test('platform_ChromeCgroups')
diff --git a/client/site_tests/platform_ChromeCgroups/platform_ChromeCgroups.py b/client/site_tests/platform_ChromeCgroups/platform_ChromeCgroups.py
deleted file mode 100755
index 74f6db8..0000000
--- a/client/site_tests/platform_ChromeCgroups/platform_ChromeCgroups.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging, os
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chrome
-
-CGROUP_DIR = '/sys/fs/cgroup/cpu/chrome_renderers'
-FG_CGROUP_DIR = os.path.join(CGROUP_DIR, 'foreground')
-BG_CGROUP_DIR = os.path.join(CGROUP_DIR, 'background')
-
-class platform_ChromeCgroups(test.test):
-    version = 1
-
-    def _get_cgroup_tasks(self, cgroup_dir):
-        """
-        Returns the set of tasks in a cgroup.
-
-        @param cgroup_dir Directory containing the cgroup.
-        """
-        task_path = os.path.join(cgroup_dir, 'tasks')
-        task_file = open(task_path)
-        if not task_file:
-            raise error.TestError('failed to open %s' % task_path)
-        tasks = set(line.rstrip() for line in task_file.readlines())
-        task_file.close()
-        logging.info('tasks in cgroup %s: %s', cgroup_dir, ','.join(tasks))
-        return tasks
-
-    def run_once(self):
-        """
-        Check that the chrome_renderers cgroups are created and that tasks
-        are placed in them.
-        """
-        with chrome.Chrome() as cr:
-            # Make sure the cgroup directories actually exist.
-            if not os.path.isdir(CGROUP_DIR):
-                raise error.TestFail('chrome_renderers cgroup does not exist')
-            if not os.path.isdir(FG_CGROUP_DIR):
-                raise error.TestFail('foreground cgroup does not exist')
-            if not os.path.isdir(BG_CGROUP_DIR):
-                raise error.TestFail('background cgroup does not exist')
-
-            # Open up two tabs in the same window. One should be in the foreground
-            # while the other is in the background.
-            tab1 = cr.browser.tabs[0]
-            tab1.Navigate('about:blank')
-            tab1.WaitForDocumentReadyStateToBeComplete()
-            tab2 = cr.browser.tabs.New()
-            tab2.Navigate('chrome:system')
-            tab2.WaitForDocumentReadyStateToBeComplete()
-
-            # Make sure the foreground and background cgroups are non-empty.
-            if not self._get_cgroup_tasks(FG_CGROUP_DIR):
-                raise error.TestFail('no tasks in foreground cgroup')
-            if not self._get_cgroup_tasks(BG_CGROUP_DIR):
-                raise error.TestFail('no tasks in background cgroup')
diff --git a/client/site_tests/platform_CleanShutdown/control b/client/site_tests/platform_CleanShutdown/control
deleted file mode 100644
index 60ecc40..0000000
--- a/client/site_tests/platform_CleanShutdown/control
+++ /dev/null
@@ -1,19 +0,0 @@
-TIME='SHORT'
-AUTHOR = 'The Chromium OS Authors'
-DOC = """
-Tests that the last shutdown was clean (without errors).
-
-NOTE: You need to shutdown or reboot the device at least once before running
-this test for a given build or else the test is useless.
-"""
-
-NAME = 'platform_CleanShutdown'
-PURPOSE = 'Verify the last shutdown was clean.'
-CRITERiA = """
-Fails if the stateful partition was not cleanly unmounted on the last shutdown.
-"""
-TEST_CLASS = 'platform'
-TEST_CATEGORY = 'Functional'
-TEST_TYPE = 'client'
-
-job.run_test('platform_CleanShutdown')
diff --git a/client/site_tests/platform_CleanShutdown/platform_CleanShutdown.py b/client/site_tests/platform_CleanShutdown/platform_CleanShutdown.py
deleted file mode 100644
index 86aba1a..0000000
--- a/client/site_tests/platform_CleanShutdown/platform_CleanShutdown.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging, os
-from shutil import copyfile
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-
-UMOUNT_FAIL_BASENAME = 'shutdown_stateful_umount_failure'
-SHUTDOWN_STATEFUL_UMOUNT_FAIL = ('/mnt/stateful_partition/' +
-                                 UMOUNT_FAIL_BASENAME)
-
-class platform_CleanShutdown(test.test):
-    """Checks for the presence of an unclean shutdown file."""
-    version = 1
-
-    def run_once(self):
-        if os.path.exists(SHUTDOWN_STATEFUL_UMOUNT_FAIL):
-            with open(SHUTDOWN_STATEFUL_UMOUNT_FAIL) as f:
-                logging.debug('Stateful unmount failure log:\n%s', f.read())
-
-            copyfile(SHUTDOWN_STATEFUL_UMOUNT_FAIL,
-                     os.path.join(self.resultsdir, UMOUNT_FAIL_BASENAME))
-
-            # Delete the file between each test run to see if the last reboot
-            # failed.
-            os.remove(SHUTDOWN_STATEFUL_UMOUNT_FAIL)
-            raise error.TestFail(
-                '{} exists!'.format(SHUTDOWN_STATEFUL_UMOUNT_FAIL))
diff --git a/client/site_tests/platform_Crossystem/control b/client/site_tests/platform_Crossystem/control
index 14867d8..b235828 100644
--- a/client/site_tests/platform_Crossystem/control
+++ b/client/site_tests/platform_Crossystem/control
@@ -13,6 +13,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "client"
 JOB_RETRIES = 2
+PY_VERSION = 3
 
 DOC = """
     Checks that the "crossystem" command basic functionality is present.
diff --git a/client/site_tests/platform_Crossystem/platform_Crossystem.py b/client/site_tests/platform_Crossystem/platform_Crossystem.py
index ab6f7c0..0d62077 100644
--- a/client/site_tests/platform_Crossystem/platform_Crossystem.py
+++ b/client/site_tests/platform_Crossystem/platform_Crossystem.py
@@ -10,7 +10,7 @@
 
 class _Matcher(object):
     """Extends regular expression with a match/do not match bit and
-    a saner definition of "match".
+    a more valid definition of "match".
     """
 
 
diff --git a/client/site_tests/platform_Crouton/control b/client/site_tests/platform_Crouton/control
deleted file mode 100755
index 5e07245..0000000
--- a/client/site_tests/platform_Crouton/control
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = "drinkcat,dnschneid"
-NAME = "platform_Crouton"
-TIME = "LENGTHY"
-TEST_TYPE = "client"
-
-DOC = """
-This test fetches a specific branch of crouton, and runs crouton tests.
-
-@param repo: (dnschneid/crouton) github repository to fetch from
-@param branch: (master) github branch
-@param runargs: (-R precise 00) parameters to pass to run.sh
-@param env: () Environment variables to set, semicolon-separated key=val pairs.
-            Only CROUTON_MIRROR_* can be set
-"""
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('platform_Crouton', args=args_dict)
diff --git a/client/site_tests/platform_Crouton/platform_Crouton.py b/client/site_tests/platform_Crouton/platform_Crouton.py
deleted file mode 100755
index 4b37a49..0000000
--- a/client/site_tests/platform_Crouton/platform_Crouton.py
+++ /dev/null
@@ -1,74 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import file_utils
-from autotest_lib.client.common_lib import logging_manager
-
-
-class platform_Crouton(test.test):
-    """
-    Tests crouton
-    """
-    version = 1
-
-
-    def _parse_args(self, args):
-        self._repo = "dnschneid/crouton"
-        self._branch = "master"
-        self._runargs = "00"
-        self._env = ""
-
-        for option_name, value in args.iteritems():
-            if option_name == 'repo':
-                self._repo = value
-            elif option_name == 'branch':
-                self._branch = value
-            elif option_name == 'runargs':
-                self._runargs = value
-            elif option_name == 'env':
-                self._env = value
-
-
-    def run_once(self, args={}):
-        self._parse_args(args)
-
-        logging.info("Running crouton test:")
-        logging.info(" - repo: %s", self._repo);
-        logging.info(" - branch: %s", self._branch);
-        logging.info(" - runargs: %s", self._runargs);
-        logging.info(" - env:%s", self._env);
-        logging.debug(" - resultsdir: %s", self.resultsdir)
-        logging.debug(' - tmpdir: %s', self.tmpdir)
-
-        crouton_temp_file = os.path.join(self.tmpdir, "archive.tar.gz")
-        crouton_url = 'https://github.com/%s/archive/%s.tar.gz' \
-                                            % (self._repo, self._branch)
-
-        logging.info('Downloading crouton tarball: "%s".', crouton_url)
-        file_utils.download_file(crouton_url, crouton_temp_file)
-
-        os.chdir(self.tmpdir)
-        utils.system('tar xvf %s --strip-components 1' % crouton_temp_file)
-
-        # Set environment. Only allow setting CROUTON_MIRROR_* variables
-        for env_pair in self._env.split(";"):
-            keyval = env_pair.split("=")
-            if len(keyval) == 2 and keyval[0].find("CROUTON_MIRROR_") == 0:
-                logging.debug('Setting env %s=%s', keyval[0], keyval[1])
-                os.environ[keyval[0]] = keyval[1]
-
-        # Pass arguments separately to avoid problems with Little Bobby Tables.
-        args = ['test/run.sh', '-l', self.resultsdir] + self._runargs.split()
-        utils.run('sh', args=args,
-                  timeout=None, ignore_status=False,
-                  stderr_tee=logging_manager.LoggingFile(level=logging.INFO))
-
-
-    def cleanup(self):
-        # Reset hung task panic, see crbug.com/420094
-        utils.system('echo 1 > /proc/sys/kernel/hung_task_panic')
diff --git a/client/site_tests/platform_CryptohomeBadPerms/control b/client/site_tests/platform_CryptohomeBadPerms/control
deleted file mode 100644
index 7b2acd0..0000000
--- a/client/site_tests/platform_CryptohomeBadPerms/control
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chromium OS Authors"
-NAME = "platform_CryptohomeBadPerms"
-PURPOSE = "Tests Cryptohome's protection against bad directory permissions."
-TIME = "SHORT"
-TEST_CATEGORY = "Security"
-TEST_CLASS = "functional"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:bvt-perbuild"
-
-DOC = """
-Tests Cryptohome's ability to detect directories with bad permissions or
-ownership in the mount path of a home directory.
-"""
-
-job.run_test('platform_CryptohomeBadPerms')
diff --git a/client/site_tests/platform_CryptohomeBadPerms/platform_CryptohomeBadPerms.py b/client/site_tests/platform_CryptohomeBadPerms/platform_CryptohomeBadPerms.py
deleted file mode 100644
index 9fb72e9..0000000
--- a/client/site_tests/platform_CryptohomeBadPerms/platform_CryptohomeBadPerms.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import contextlib
-import os
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import cryptohome
-
-@contextlib.contextmanager
-def scoped_dir(path):
-    os.mkdir(path)
-    yield
-    os.rmdir(path)
-
-
-class platform_CryptohomeBadPerms(test.test):
-    """Tests Cryptohome's ability to detect directories with bad permissions or
-       ownership in the mount path of a home directory.
-    """
-    version = 1
-
-    def require_mount_fail(self, user):
-        """
-        Raise an error if the mount succeeded.
-        @param user: A random user created in run_once.
-        """
-        try:
-            cryptohome.mount_vault(user, 'test', create=True)
-        except:
-            pass
-        else:
-            raise error.TestFail('Mount unexpectedly succeeded for %s' % user)
-
-    def run_once(self):
-        # Leaf element of user path not owned by user.
-        user = utils.random_username()
-        path = cryptohome.user_path(user)
-        with scoped_dir(path):
-            os.chown(path, 0, 0)
-            self.require_mount_fail(user)
-
-        # Leaf element of system path not owned by root.
-        user = utils.random_username()
-        path = cryptohome.system_path(user)
-        with scoped_dir(path):
-            os.chown(path, 1, 1)
-            self.require_mount_fail(user)
-
-        # Leaf element of path too permissive.
-        user = utils.random_username()
-        path = cryptohome.user_path(user)
-        with scoped_dir(path):
-            os.chmod(path, 0777)
-            self.require_mount_fail(user)
-
-        # Non-leaf element of path not owned by root.
-        user = utils.random_username()
-        path = cryptohome.user_path(user)
-        parent_path = os.path.dirname(path)
-        os.chown(parent_path, 1, 1)
-        try:
-            self.require_mount_fail(user)
-        finally:
-            os.chown(parent_path, 0, 0)
-
-        # Non-leaf element of path too permissive.
-        user = utils.random_username()
-        path = cryptohome.user_path(user)
-        parent_path = os.path.dirname(path)
-        old_perm = os.stat(parent_path).st_mode & 0777
-        os.chmod(parent_path, 0777)
-        try:
-            self.require_mount_fail(user)
-        finally:
-            os.chmod(parent_path, old_perm)
-            os.chown(parent_path, 0, 0)
diff --git a/client/site_tests/platform_CryptohomeChangePassword/control b/client/site_tests/platform_CryptohomeChangePassword/control
deleted file mode 100644
index d55d4d7..0000000
--- a/client/site_tests/platform_CryptohomeChangePassword/control
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "platform_CryptohomeChangePassword"
-PURPOSE = "Verify that cryptohome can change a password for an ecrypted vault."
-CRITERIA = "This test will fail if cryptohome fails to re-wrap the vault key with the new password."
-ATTRIBUTES = "suite:bvt-perbuild"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-
-DOC = """
-This test checks that cryptohome can change a password for an encrypted vault.
-"""
-
-job.run_test('platform_CryptohomeChangePassword')
diff --git a/client/site_tests/platform_CryptohomeChangePassword/platform_CryptohomeChangePassword.py b/client/site_tests/platform_CryptohomeChangePassword/platform_CryptohomeChangePassword.py
deleted file mode 100644
index aa2db9f..0000000
--- a/client/site_tests/platform_CryptohomeChangePassword/platform_CryptohomeChangePassword.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error, utils
-from autotest_lib.client.cros import cryptohome
-
-
-def run_cmd(cmd):
-    return utils.system_output(cmd + ' 2>&1', retain_output=True,
-                               ignore_status=True)
-
-class platform_CryptohomeChangePassword(test.test):
-    version = 1
-
-
-    def run_once(self):
-        test_user = 'this_is_a_local_test_account@chromium.org'
-        test_password = 'this_is_a_test_password'
-
-        # Remove any old test user account
-        cryptohome.remove_vault(test_user)
-
-        # Create a fresh test user account
-        cryptohome.mount_vault(test_user, test_password, create=True)
-        cryptohome.unmount_vault(test_user)
-
-        # Try to migrate the password
-        new_password = 'this_is_a_new_password'
-        cryptohome.change_password(test_user, test_password, new_password)
-
-        # Mount the test user account with the new password
-        cryptohome.mount_vault(test_user, new_password)
-        cryptohome.unmount_vault(test_user)
-
-        # Ensure the old password doesn't work
-        try:
-            cryptohome.mount_vault(test_user, test_password)
-        except cryptohome.ChromiumOSError:
-            pass
-        else:
-            raise error.TestFail("Mount with old password worked")
-
-        # Remove the test user account
-        cryptohome.remove_vault(test_user)
diff --git a/client/site_tests/platform_CryptohomeFio/control b/client/site_tests/platform_CryptohomeFio/control
index 5902811..d142d14 100644
--- a/client/site_tests/platform_CryptohomeFio/control
+++ b/client/site_tests/platform_CryptohomeFio/control
@@ -15,6 +15,7 @@
 TEST_CLASS = 'platform'
 TEST_CATEGORY = 'Stress'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 ATTRIBUTES = "suite:bvt-perbuild"
 
 job.run_test('platform_CryptohomeFio',
diff --git a/client/site_tests/platform_CryptohomeFio/control.dirty_setting b/client/site_tests/platform_CryptohomeFio/control.dirty_setting
index a805cf5..926c19d 100644
--- a/client/site_tests/platform_CryptohomeFio/control.dirty_setting
+++ b/client/site_tests/platform_CryptohomeFio/control.dirty_setting
@@ -14,6 +14,7 @@
 TEST_CLASS = 'platform'
 TEST_CATEGORY = 'Stress'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 ATTRIBUTES = "suite:bvt-perbuild"
 
 sysctls_list = [
diff --git a/client/site_tests/platform_CryptohomeFio/control.stress b/client/site_tests/platform_CryptohomeFio/control.stress
index d96b49f..479bd70 100644
--- a/client/site_tests/platform_CryptohomeFio/control.stress
+++ b/client/site_tests/platform_CryptohomeFio/control.stress
@@ -14,6 +14,7 @@
 TEST_CLASS = 'platform'
 TEST_CATEGORY = 'Stress'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 ATTRIBUTES = "suite:bvt-perbuild"
 
 RUNTIME=60  # seconds.
diff --git a/client/site_tests/platform_CryptohomeFio/platform_CryptohomeFio.py b/client/site_tests/platform_CryptohomeFio/platform_CryptohomeFio.py
index 0d8d839..72d39b4 100644
--- a/client/site_tests/platform_CryptohomeFio/platform_CryptohomeFio.py
+++ b/client/site_tests/platform_CryptohomeFio/platform_CryptohomeFio.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -46,7 +47,7 @@
         for config in disk_configs:
             for sysctls in sysctls_list or [ {} ]:
                 graph_descr = ''
-                for key, val in sysctls.iteritems():
+                for key, val in list(sysctls.items()):
                     utils.sysctl(key, val)
                     graph_descr += '-'.join([os.path.basename(key), str(val)])
                 # Mount a test cryptohome vault.
diff --git a/client/site_tests/platform_CryptohomeKeyEviction/control b/client/site_tests/platform_CryptohomeKeyEviction/control
deleted file mode 100644
index 1cb179d..0000000
--- a/client/site_tests/platform_CryptohomeKeyEviction/control
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "usanghi"
-NAME = "platform_CryptohomeKeyEviction"
-PURPOSE = "Verify that cryptohome correctly reloads evicted TPM keys."
-CRITERIA = "Fails if any operations fail."
-ATTRIBUTES = "suite:bvt-perbuild"
-TIME = "SHORT"
-TEST_CATEGORY = "Stress"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-
-DOC = """
-    Ensure that the cryptohome properly manages key eviction from
-    the tpm. This test verifies this behaviour by creating 30 keys using
-    chaps, and then remounting a user's cryptohome. Mount requires use of the
-    user's cryptohome key, and thus the mount only succeeds if the
-    cryptohome key was properly evicted and reloaded into the TPM.
-"""
-
-job.run_test('platform_CryptohomeKeyEviction')
diff --git a/client/site_tests/platform_CryptohomeKeyEviction/platform_CryptohomeKeyEviction.py b/client/site_tests/platform_CryptohomeKeyEviction/platform_CryptohomeKeyEviction.py
deleted file mode 100644
index 731fd3c..0000000
--- a/client/site_tests/platform_CryptohomeKeyEviction/platform_CryptohomeKeyEviction.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.cros import cryptohome, pkcs11
-
-
-class platform_CryptohomeKeyEviction(test.test):
-    """Ensure that the cryptohome properly manages key eviction from the tpm.
-       This test verifies this behaviour by creating 30 keys using chaps,
-       and then remounting a user's cryptohome. Mount requires use of the
-       user's cryptohome key, and thus the mount only succeeds if the
-       cryptohome key was properly evicted and reloaded into the TPM.
-    """
-    version = 1
-
-
-    def run_once(self):
-        # Make sure that the tpm is owned.
-        status = cryptohome.get_tpm_status()
-        if not status['Owned']:
-            cryptohome.take_tpm_ownership()
-
-        self.user = 'first_user@nowhere.com'
-        password = 'test_password'
-        cryptohome.ensure_clean_cryptohome_for(self.user, password)
-
-
-        # First we inject 30 tokens into chaps. This forces the cryptohome
-        # key to get evicted.
-        for i in range(30):
-            pkcs11.inject_and_test_key()
-
-        # Then we get a user to remount his cryptohome. This process uses
-        # the cryptohome key, and if the user was able to login, the
-        # cryptohome key was correctly reloaded.
-        cryptohome.unmount_vault(self.user)
-        cryptohome.mount_vault(self.user, password, create=True)
-
-
-    def cleanup(self):
-        cryptohome.unmount_vault(self.user)
-        cryptohome.remove_vault(self.user)
diff --git a/client/site_tests/platform_CryptohomeLECredentialManager/control b/client/site_tests/platform_CryptohomeLECredentialManager/control
deleted file mode 100644
index 06aa5ff..0000000
--- a/client/site_tests/platform_CryptohomeLECredentialManager/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "allenwebb"
-NAME = "platform_CryptohomeLECredentialManager"
-PURPOSE = "Validate PinWeaver functionality on Cr50"
-ATTRIBUTES = ""
-TIME = "SHORT"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-
-DOC = """
-Test the functionality of le_credential_manager. This feature handles
-low entropy credentials such as PINs and uses Cr50 to exchange the low
-entropy credential for a high entropy credential while enforcing limits
-on how often attempts can be made. The high entropy credential is needed
-to mount the user's home directory.
-"""
-
-job.run_test('platform_CryptohomeLECredentialManager')
diff --git a/client/site_tests/platform_CryptohomeLECredentialManager/platform_CryptohomeLECredentialManager.py b/client/site_tests/platform_CryptohomeLECredentialManager/platform_CryptohomeLECredentialManager.py
deleted file mode 100644
index 0a51f1d..0000000
--- a/client/site_tests/platform_CryptohomeLECredentialManager/platform_CryptohomeLECredentialManager.py
+++ /dev/null
@@ -1,139 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-import logging
-import time
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.cros import cryptohome
-from autotest_lib.client.common_lib import error
-
-
-class platform_CryptohomeLECredentialManager(test.test):
-    """Tests the le_credential_manager functionality of cryptohome.
-    """
-
-    version = 1
-
-    USER = 'testing@gmail.com'
-    USER2 = 'testing2@gmail.com'
-    KEY_LABEL = 'lecred0'
-    KEY_LABEL2 = 'lecred2'
-    GOOD_PIN = '123456'
-    BAD_PIN = '000000'
-    TEST_PASSWORD = '~'
-
-    def get_known_le_credentials(self):
-        """ Returns the set of LE credentials present on the device.
-        """
-        list_result = utils.run('ls /home/.shadow/low_entropy_creds')
-        labels_str = list_result.stdout
-        return set(labels_str.split())
-
-    def run_once(self, pre_reboot=None):
-        """Runs the platform_CryptohomeLECredentialManager test.
-        """
-        supported_policies = cryptohome.get_supported_key_policies()
-        if (not supported_policies or
-                not supported_policies.get('low_entropy_credentials', False)):
-            raise error.TestNAError(
-                'Low-entropy credentials are not supported.')
-
-        if pre_reboot is None or pre_reboot == True:
-            logging.info('Performing cleanup!')
-            utils.run('stop cryptohomed')
-            utils.run('rm -rf /home/.shadow/low_entropy_creds')
-            try:
-                cryptohome.remove_vault(self.USER)
-                cryptohome.remove_vault(self.USER2)
-            except cryptohome.ChromiumOSError:
-                pass
-            utils.run('start cryptohomed')
-
-            logging.info('Waiting on cryptohomed to startup!')
-            time.sleep(3)
-            # Cleanup any existing mounts
-
-            cryptohome.unmount_vault()
-
-            logging.info('Setting up LE credential!')
-            # The following operations shall all succeed:
-            cryptohome.mount_vault(user=self.USER, password=self.TEST_PASSWORD,
-                                   create=True, key_label='default')
-            cryptohome.add_le_key(
-                user=self.USER, password=self.TEST_PASSWORD,
-                new_key_label=self.KEY_LABEL, new_password=self.GOOD_PIN)
-            cryptohome.unmount_vault()
-
-        logging.info('Testing authentication!')
-        # The following operations shall all succeed:
-        cryptohome.mount_vault(user=self.USER, password=self.GOOD_PIN,
-                               key_label=self.KEY_LABEL)
-        cryptohome.unmount_vault()
-
-        logging.info('Testing lockout!')
-        # The following operations fail, as they attempt to use the wrong PIN 5
-        # times and then good PIN also stops working until reset:
-        for i in range(5):
-            try:
-                cryptohome.mount_vault(user=self.USER, password=self.BAD_PIN,
-                                       key_label=self.KEY_LABEL)
-                raise cryptohome.ChromiumOSError(
-                    'Mount succeeded where it should have failed (try %d)' % i)
-            except cryptohome.ChromiumOSError:
-                pass
-        try:
-            cryptohome.mount_vault(user=self.USER, password=self.GOOD_PIN,
-                                   key_label=self.KEY_LABEL)
-            raise cryptohome.ChromiumOSError(
-                'Mount succeeded where it should have failed')
-        except cryptohome.ChromiumOSError:
-            pass
-
-        logging.info('Testing reset!')
-        # The following operations shall all succeed:
-        cryptohome.mount_vault(user=self.USER, password=self.TEST_PASSWORD,
-                               key_label='default')
-        cryptohome.unmount_vault()
-        cryptohome.mount_vault(user=self.USER, password=self.GOOD_PIN,
-                               key_label=self.KEY_LABEL)
-        cryptohome.unmount_vault()
-
-        logging.info('Testing LE cred removal on user removal!')
-
-        # Create a new user to test removal.
-        cryptohome.mount_vault(user=self.USER2, password=self.TEST_PASSWORD,
-                               create=True, key_label='default')
-        lecreds_before_add = self.get_known_le_credentials()
-
-        cryptohome.add_le_key(
-            user=self.USER2, password=self.TEST_PASSWORD,
-            new_key_label=self.KEY_LABEL, new_password=self.GOOD_PIN)
-        cryptohome.add_le_key(
-            user=self.USER2, password=self.TEST_PASSWORD,
-            new_key_label=self.KEY_LABEL2, new_password=self.GOOD_PIN)
-        cryptohome.unmount_vault()
-        lecreds_after_add = self.get_known_le_credentials()
-
-        cryptohome.remove_vault(self.USER2)
-        lecreds_after_remove = self.get_known_le_credentials()
-
-        if lecreds_after_add == lecreds_before_add:
-            raise cryptohome.ChromiumOSError(
-                'LE creds not added successfully')
-
-        if lecreds_after_remove != lecreds_before_add:
-            raise cryptohome.ChromiumOSError(
-                'LE creds not deleted succesfully on user deletion!')
-
-        if pre_reboot is None or pre_reboot == False:
-            logging.info('Testing remove credential!')
-            #The following operations shall all succeed:
-            cryptohome.remove_key(user=self.USER, password=self.TEST_PASSWORD,
-                                  remove_key_label=self.KEY_LABEL)
-            logging.info('Cleanup of test user!')
-            cryptohome.remove_vault(self.USER)
-
-        logging.info('Tests passed!')
diff --git a/client/site_tests/platform_CryptohomeMigrateKey/control b/client/site_tests/platform_CryptohomeMigrateKey/control
deleted file mode 100644
index 0ba6dea..0000000
--- a/client/site_tests/platform_CryptohomeMigrateKey/control
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "platform_CryptohomeMigrateKey"
-PURPOSE = "Verify that password changes work."
-CRITERIA = "This test will fail if password changes fail."
-ATTRIBUTES = "suite:bvt-perbuild"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-JOB_RETRIES = 2
-
-DOC = """
-This test checks that cryptohome can migrate cryptohomes from old keys to new
-keys. It first mounts and unmounts a vault using an old key, then migrates from
-an old key to a new key, and finally mounts and unmounts using the new key.
-"""
-
-job.run_test('platform_CryptohomeMigrateKey')
diff --git a/client/site_tests/platform_CryptohomeMigrateKey/platform_CryptohomeMigrateKey.py b/client/site_tests/platform_CryptohomeMigrateKey/platform_CryptohomeMigrateKey.py
deleted file mode 100644
index 680d2cf..0000000
--- a/client/site_tests/platform_CryptohomeMigrateKey/platform_CryptohomeMigrateKey.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import cryptohome
-
-class platform_CryptohomeMigrateKey(test.test):
-    version = 1
-
-    def good(self):
-        user = utils.random_username()
-        old_pass = 'old'
-        new_pass = 'new'
-
-        cryptohome.mount_vault(user, old_pass, create=True)
-        cryptohome.unmount_vault(user)
-        cryptohome.change_password(user, old_pass, new_pass)
-        try:
-            cryptohome.mount_vault(user, old_pass)
-        except:
-            pass
-        else:
-            raise error.TestFail('Old password still works.')
-        cryptohome.mount_vault(user, new_pass)
-        cryptohome.unmount_vault(user)
-        cryptohome.remove_vault(user)
-
-
-    def bad_password(self):
-        user = utils.random_username()
-        old_pass = 'old'
-        new_pass = 'new'
-        cryptohome.mount_vault(user, old_pass, create=True)
-        cryptohome.unmount_vault(user)
-        try:
-            cryptohome.change_password(user, 'bad', new_pass)
-        except:
-            pass
-        else:
-            raise error.TestFail('Migrated with bad password.')
-        cryptohome.remove_vault(user)
-
-
-    def nonexistent_user(self):
-        user = utils.random_username()
-        old_pass = 'old'
-        new_pass = 'new'
-        try:
-            cryptohome.change_password(user, old_pass, new_pass)
-        except:
-            pass
-        else:
-            raise error.TestFail('Migrated a nonexistent user.')
-
-    def run_once(self):
-        self.good()
-        self.bad_password()
-        self.nonexistent_user()
diff --git a/client/site_tests/platform_CryptohomeMount/control b/client/site_tests/platform_CryptohomeMount/control
deleted file mode 100644
index 3367ec5..0000000
--- a/client/site_tests/platform_CryptohomeMount/control
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "wad, dkrahn"
-NAME = "platform_CryptohomeMount"
-ATTRIBUTES = "suite:bvt-perbuild"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-JOB_RETRIES = 2
-
-DOC = """
-This test checks that cryptohome can create and mount an encrypted vault.
-"""
-
-job.run_test('platform_CryptohomeMount')
diff --git a/client/site_tests/platform_CryptohomeMount/platform_CryptohomeMount.py b/client/site_tests/platform_CryptohomeMount/platform_CryptohomeMount.py
deleted file mode 100644
index c6ad26f..0000000
--- a/client/site_tests/platform_CryptohomeMount/platform_CryptohomeMount.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import cryptohome
-
-class platform_CryptohomeMount(test.test):
-    """Validates basic cryptohome creation and mounting."""
-    version = 1
-
-
-    def run_once(self):
-        test_user = 'this_is_a_local_test_account@chromium.org';
-        test_password = 'this_is_a_test_password';
-
-        # Remove the test user account (if it exists), create it and
-        # mount it
-        cryptohome.ensure_clean_cryptohome_for(test_user, test_password)
-
-        # Unmount the vault and ensure it's not there
-        cryptohome.unmount_vault(test_user)
-
-        # Make sure that an incorrect password fails
-        incorrect_password = 'this_is_an_incorrect_password'
-        try:
-            cryptohome.mount_vault(test_user, incorrect_password)
-        except:
-            pass
-        else:
-            raise error.TestFail('Cryptohome mounted with a bad password')
-
-        # Ensure that the user directory is not mounted
-        if cryptohome.is_permanent_vault_mounted(test_user, allow_fail=True):
-            raise error.TestFail('Cryptohome mounted even though mount failed')
-
-        # Remove the test user account
-        cryptohome.remove_vault(test_user)
diff --git a/client/site_tests/platform_CryptohomeMultiple/control b/client/site_tests/platform_CryptohomeMultiple/control
deleted file mode 100644
index 136957d..0000000
--- a/client/site_tests/platform_CryptohomeMultiple/control
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chromium OS Authors"
-NAME = "platform_CryptohomeMultiple"
-PURPOSE = "Tests the API for mounting multiple cryptohomes at once."
-TIME = "SHORT"
-TEST_CATEGORY = "Security"
-TEST_CLASS = "functional"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:bvt-perbuild"
-
-DOC = """
-Performs functional tests for cryptohome multi-mount.
-"""
-
-job.run_test('platform_CryptohomeMultiple')
diff --git a/client/site_tests/platform_CryptohomeMultiple/platform_CryptohomeMultiple.py b/client/site_tests/platform_CryptohomeMultiple/platform_CryptohomeMultiple.py
deleted file mode 100644
index cacedb8..0000000
--- a/client/site_tests/platform_CryptohomeMultiple/platform_CryptohomeMultiple.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.cros import cryptohome
-
-class platform_CryptohomeMultiple(test.test):
-    version = 1
-
-
-    def test_mount_single(self):
-        """
-        Tests mounting a single not-already-existing cryptohome. Ensures that
-        the infrastructure for multiple mounts is present and active.
-        """
-        user = utils.random_username()
-        cryptohome.mount_vault(user, 'test', create=True)
-        cryptohome.unmount_vault(user)
-
-
-    def run_once(self):
-        self.test_mount_single()
diff --git a/client/site_tests/platform_CryptohomeNonDirs/control b/client/site_tests/platform_CryptohomeNonDirs/control
deleted file mode 100644
index 264819f..0000000
--- a/client/site_tests/platform_CryptohomeNonDirs/control
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chromium OS Authors"
-NAME = "platform_CryptohomeNonDirs"
-PURPOSE = "Tests Cryptohome's protection against non-directory mounts."
-TIME = "SHORT"
-TEST_CATEGORY = "Security"
-TEST_CLASS = "functional"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:bvt-perbuild"
-
-DOC = """
-Tests whether cryptohome correctly returns errors when non-directory components
-are present in the paths it needs to mount at.
-"""
-
-job.run_test('platform_CryptohomeNonDirs')
diff --git a/client/site_tests/platform_CryptohomeNonDirs/platform_CryptohomeNonDirs.py b/client/site_tests/platform_CryptohomeNonDirs/platform_CryptohomeNonDirs.py
deleted file mode 100644
index efbef05..0000000
--- a/client/site_tests/platform_CryptohomeNonDirs/platform_CryptohomeNonDirs.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import cryptohome
-
-class platform_CryptohomeNonDirs(test.test):
-    version = 1
-
-    def require_mount_fail(self, user):
-        try:
-            cryptohome.mount_vault(user, 'test', create=True)
-        except:
-            pass
-        else:
-            raise error.TestFail('Mount succeeded for %s' % user)
-
-
-    def replace(self, src, dest):
-        """Replaces dest with src.
-
-        Replaces the dirent at dest with the dirent at src, deleting dest first
-        if necessary. This is distinguished from os.rename() or shutil.move() by
-        the fact that it works even if dest is a non-directory dirent.
-        """
-        if os.path.exists(dest):
-            os.remove(dest)
-        os.rename(src, dest)
-
-    def run_once(self):
-        # Leaf element of user path is non-dir.
-        user = utils.random_username()
-        path = cryptohome.user_path(user)
-        utils.open_write_close(path, '')
-        try:
-            self.require_mount_fail(user)
-        finally:
-            os.remove(path)
-
-        # Leaf element of system path is non-dir.
-        user = utils.random_username()
-        path = cryptohome.system_path(user)
-        os.symlink('/etc', path)
-        try:
-            self.require_mount_fail(user)
-        finally:
-            os.remove(path)
-
-        # Non-leaf element of user path is non-dir.
-        user = utils.random_username()
-        path = cryptohome.user_path(user)
-        parent_path = os.path.dirname(path)
-        os.rename(parent_path, parent_path + '.old')
-        try:
-            utils.open_write_close(parent_path, '')
-            self.require_mount_fail(user)
-        finally:
-            # We can't just rely on the rename() to blow away the file -
-            # rename() will refuse to rename directories to non-directory names.
-            self.replace(parent_path + '.old', parent_path)
-
-        # Non-leaf element of system path is non-dir.
-        user = utils.random_username()
-        path = cryptohome.system_path(user)
-        parent_path = os.path.dirname(path)
-        os.rename(parent_path, parent_path + '.old')
-        try:
-            utils.open_write_close(parent_path, '')
-            self.require_mount_fail(user)
-        finally:
-            self.replace(parent_path + '.old', parent_path)
diff --git a/client/site_tests/platform_CryptohomeStress/control b/client/site_tests/platform_CryptohomeStress/control
index 137cff6..64124c3 100644
--- a/client/site_tests/platform_CryptohomeStress/control
+++ b/client/site_tests/platform_CryptohomeStress/control
@@ -5,6 +5,7 @@
 TEST_CATEGORY = "Security"
 TEST_CLASS = "security"
 TEST_TYPE = "client"
+PY_VERSION = 3
 ATTRIBUTES = "suite:bvt-perbuild"
 
 DOC = """
diff --git a/client/site_tests/platform_CryptohomeStress/control.surfing b/client/site_tests/platform_CryptohomeStress/control.surfing
index 16f21ac..41206e6 100644
--- a/client/site_tests/platform_CryptohomeStress/control.surfing
+++ b/client/site_tests/platform_CryptohomeStress/control.surfing
@@ -5,6 +5,7 @@
 TEST_CATEGORY = "Security"
 TEST_CLASS = "security"
 TEST_TYPE = "client"
+PY_VERSION = 3
 ATTRIBUTES = "suite:bvt-perbuild"
 
 DOC = """
diff --git a/client/site_tests/platform_CryptohomeStress/platform_CryptohomeStress.py b/client/site_tests/platform_CryptohomeStress/platform_CryptohomeStress.py
index 378dae7..6b8d2e0 100644
--- a/client/site_tests/platform_CryptohomeStress/platform_CryptohomeStress.py
+++ b/client/site_tests/platform_CryptohomeStress/platform_CryptohomeStress.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -48,4 +49,3 @@
         open(SUSPEND_END, 'w').close()
         command = 'echo %s > /proc/sys/vm/dirty_ratio' % self.d_ratio
         utils.system(command)
-
diff --git a/client/site_tests/platform_CryptohomeTPMReOwn/control b/client/site_tests/platform_CryptohomeTPMReOwn/control
deleted file mode 100644
index 3556d1b..0000000
--- a/client/site_tests/platform_CryptohomeTPMReOwn/control
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "platform_CryptohomeTPMReOwn"
-PURPOSE = "Verify that cryptohome re-creates the user's vault directory when the TPM is re-owned."
-CRITERIA = "This will run a test of re-owning the TPM."
-TIME = "MEDIUM"
-TEST_CATEGORY = "Functionality"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-
-DOC = """
-This test will verify that cryptohome will re-create the user's vault directory
-when the TPM is re-owned.
-"""
-
-job.run_test('platform_CryptohomeTPMReOwn')
diff --git a/client/site_tests/platform_CryptohomeTPMReOwn/platform_CryptohomeTPMReOwn.py b/client/site_tests/platform_CryptohomeTPMReOwn/platform_CryptohomeTPMReOwn.py
deleted file mode 100644
index ffc5235..0000000
--- a/client/site_tests/platform_CryptohomeTPMReOwn/platform_CryptohomeTPMReOwn.py
+++ /dev/null
@@ -1,98 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import contextlib, logging, time
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import cryptohome
-
-
-def run_cmd(cmd):
-    return utils.system_output(cmd + ' 2>&1', retain_output=True,
-                               ignore_status=True)
-
-
-def wait_for_tpm_ready():
-    for n in xrange(0, 20):
-        tpm_status = cryptohome.get_tpm_status()
-        if tpm_status['Ready'] == True:
-            return
-        time.sleep(10)
-    raise error.TestError("TPM never became ready")
-
-
-# This context manager ensures we mount a vault and don't forget
-# to unmount it at the end of the test.
-@contextlib.contextmanager
-def vault_mounted(user, password):
-    cryptohome.mount_vault(user, password, create=True)
-    yield
-    try:
-        cryptohome.unmount_vault(user)
-    except:
-        pass
-
-
-def test_file_path(user):
-    return "%s/TESTFILE" % cryptohome.user_path(user)
-
-
-# TODO(ejcaruso): add dump_keyset action to cryptohome utils instead
-# of calling it directly here
-def expect_wrapped_keyset(user):
-    output = run_cmd(
-        "/usr/sbin/cryptohome --action=dump_keyset --user=%s" % user)
-    if output.find("TPM_WRAPPED") < 0:
-        raise error.TestError(
-            "Cryptohome did not create a TPM-wrapped keyset.")
-
-
-class platform_CryptohomeTPMReOwn(test.test):
-    """
-    Test of cryptohome functionality to re-create a user's vault directory if
-    the TPM is cleared and re-owned and the vault keyset is TPM-wrapped.
-    """
-    version = 1
-    preserve_srcdir = True
-
-    def _test_mount_cryptohome(self):
-        cryptohome.remove_vault(self.user)
-        wait_for_tpm_ready()
-        with vault_mounted(self.user, self.password):
-            run_cmd("echo TEST_CONTENT > %s" % test_file_path(self.user))
-        expect_wrapped_keyset(self.user)
-
-
-    def _test_mount_cryptohome_after_reboot(self):
-        wait_for_tpm_ready()
-        with vault_mounted(self.user, self.password):
-            output = run_cmd("cat %s" % test_file_path(self.user))
-        if output.find("TEST_CONTENT") < 0:
-            raise error.TestError(
-                "Cryptohome did not contain original test file")
-
-
-    def _test_mount_cryptohome_check_recreate(self):
-        wait_for_tpm_ready()
-        with vault_mounted(self.user, self.password):
-            output = run_cmd("cat %s" % test_file_path(self.user))
-        if output.find("TEST_CONTENT") >= 0:
-            raise error.TestError(
-                "Cryptohome not re-created, found original test file")
-        expect_wrapped_keyset(self.user)
-
-
-    def run_once(self, subtest='None'):
-        self.user = 'this_is_a_local_test_account@chromium.org'
-        self.password = 'this_is_a_test_password'
-
-        logging.info("Running client subtest %s", subtest)
-        if subtest == 'take_tpm_ownership':
-            cryptohome.take_tpm_ownership()
-        elif subtest == 'mount_cryptohome':
-            self._test_mount_cryptohome()
-        elif subtest == 'mount_cryptohome_after_reboot':
-            self._test_mount_cryptohome_after_reboot()
-        elif subtest == 'mount_cryptohome_check_recreate':
-            self._test_mount_cryptohome_check_recreate()
diff --git a/client/site_tests/platform_CryptohomeTestAuth/control b/client/site_tests/platform_CryptohomeTestAuth/control
deleted file mode 100644
index 020f8c0..0000000
--- a/client/site_tests/platform_CryptohomeTestAuth/control
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "platform_CryptohomeTestAuth"
-PURPOSE = "Verify that cryptohome can verify credentials."
-CRITERIA = "This test will fail if cryptohome fails to validate credentials."
-ATTRIBUTES = "suite:bvt-perbuild"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-
-DOC = """
-This test checks that cryptohome can verify credentials.
-"""
-
-job.run_test('platform_CryptohomeTestAuth')
diff --git a/client/site_tests/platform_CryptohomeTestAuth/platform_CryptohomeTestAuth.py b/client/site_tests/platform_CryptohomeTestAuth/platform_CryptohomeTestAuth.py
deleted file mode 100644
index 36975bb..0000000
--- a/client/site_tests/platform_CryptohomeTestAuth/platform_CryptohomeTestAuth.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import re
-import shutil
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error, utils
-from autotest_lib.client.cros import constants, cryptohome
-
-class platform_CryptohomeTestAuth(test.test):
-    version = 1
-
-
-    def run_once(self):
-        test_user = 'this_is_a_local_test_account@chromium.org'
-        test_password = 'this_is_a_test_password'
-
-        user_hash = cryptohome.get_user_hash(test_user)
-
-
-        # Ensure that the user directory is unmounted and does not exist.
-        cryptohome.unmount_vault(test_user)
-        cryptohome.remove_vault(test_user)
-        if os.path.exists(os.path.join(constants.SHADOW_ROOT, user_hash)):
-            raise error.TestFail('Could not remove the test user.')
-
-        # Mount the test user account, which ensures that the vault is
-        # created, and that the mount succeeds.
-        cryptohome.mount_vault(test_user, test_password, create=True)
-
-        # Test credentials when the user's directory is mounted
-        if not cryptohome.test_auth(test_user, test_password):
-            raise error.TestFail('Valid credentials should authenticate '
-                                 'while mounted.')
-
-        # Make sure that an incorrect password fails
-        if cryptohome.test_auth(test_user, 'badpass'):
-            raise error.TestFail('Invalid credentials should not authenticate '
-                                 'while mounted.')
-
-        # Unmount the directory
-        cryptohome.unmount_vault(test_user)
-        # Ensure that the user directory is not mounted
-        if cryptohome.is_vault_mounted(user=test_user, allow_fail=True):
-            raise error.TestFail('Cryptohome did not unmount the user.')
-
-        # Test valid credentials when the user's directory is not mounted
-        if not cryptohome.test_auth(test_user, test_password):
-            raise error.TestFail('Valid credentials should authenticate '
-                                 ' while mounted.')
-
-        # Test invalid credentials fails while not mounted.
-        if cryptohome.test_auth(test_user, 'badpass'):
-            raise error.TestFail('Invalid credentials should not authenticate '
-                                 'when unmounted.')
-
-
-        # Re-mount existing test user vault, verifying that the mount succeeds.
-        cryptohome.mount_vault(test_user, test_password)
-
-        # Finally, unmount and destroy the vault again.
-        cryptohome.unmount_vault(test_user)
-        cryptohome.remove_vault(test_user)
-        if os.path.exists(os.path.join(constants.SHADOW_ROOT, user_hash)):
-            raise error.TestFail('Could not destroy the vault.')
diff --git a/client/site_tests/platform_CryptohomeTpmLiveTest/OWNERS b/client/site_tests/platform_CryptohomeTpmLiveTest/OWNERS
deleted file mode 100644
index b01b374..0000000
--- a/client/site_tests/platform_CryptohomeTpmLiveTest/OWNERS
+++ /dev/null
@@ -1,2 +0,0 @@
-emaxx@chromium.org
-apronin@chromium.org
diff --git a/client/site_tests/platform_CryptohomeTpmLiveTest/control b/client/site_tests/platform_CryptohomeTpmLiveTest/control
deleted file mode 100644
index 773605b..0000000
--- a/client/site_tests/platform_CryptohomeTpmLiveTest/control
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "emaxx, apronin"
-NAME = "platform_CryptohomeTpmLiveTest"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-JOB_RETRIES = 1
-
-DOC = """
-This test runs cryptohome's TPM live tests.
-"""
-
-job.run_test('platform_CryptohomeTpmLiveTest')
diff --git a/client/site_tests/platform_CryptohomeTpmLiveTest/platform_CryptohomeTpmLiveTest.py b/client/site_tests/platform_CryptohomeTpmLiveTest/platform_CryptohomeTpmLiveTest.py
deleted file mode 100644
index 232e2c5..0000000
--- a/client/site_tests/platform_CryptohomeTpmLiveTest/platform_CryptohomeTpmLiveTest.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import cryptohome
-
-
-class platform_CryptohomeTpmLiveTest(test.test):
-    """Run cryptohome's TPM live tests."""
-    version = 1
-
-    def run_once(self):
-        """Run TPM live tests."""
-        cryptohome.take_tpm_ownership(wait_for_ownership=True)
-
-        tpm_owner_password = cryptohome.get_tpm_password()
-        if not tpm_owner_password:
-            raise error.TestError('TPM owner password is empty after taking '
-                                  'ownership.')
-
-        # Execute the program which runs the actual test cases. When some test
-        # cases fail, the program will return with a non-zero exit code,
-        # resulting in raising the CmdError exception and failing the autotest.
-        utils.system_output('cryptohome-tpm-live-test', retain_output=True,
-                            args=['--owner_password=' + tpm_owner_password])
diff --git a/client/site_tests/platform_DBusMachineIdRotation/control b/client/site_tests/platform_DBusMachineIdRotation/control
deleted file mode 100644
index 71d17a5..0000000
--- a/client/site_tests/platform_DBusMachineIdRotation/control
+++ /dev/null
@@ -1,12 +0,0 @@
-AUTHOR = 'zeuthen'
-NAME = 'platform_DBusMachineIdRotation'
-TIME = 'FAST'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'platform'
-TEST_TYPE = 'client'
-ATTRIBUTES = "suite:experimental"
-DOC = """
-Verifies that /var/lib/dbus/machine-id is properly rotated.
-"""
-
-job.run_test('platform_DBusMachineIdRotation')
diff --git a/client/site_tests/platform_DBusMachineIdRotation/platform_DBusMachineIdRotation.py b/client/site_tests/platform_DBusMachineIdRotation/platform_DBusMachineIdRotation.py
deleted file mode 100755
index cb618b0..0000000
--- a/client/site_tests/platform_DBusMachineIdRotation/platform_DBusMachineIdRotation.py
+++ /dev/null
@@ -1,96 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import subprocess
-import tempfile
-import time
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import avahi_utils
-
-class platform_DBusMachineIdRotation(test.test):
-    """Verify that /var/lib/dbus/machine-id is properly rotated.
-
-    To avoid interference with existing rotation scripts on the DUT,
-    we actually don't use /var/lib/dbus/machine-id for
-    testing. Instead we allocate a file on test start.
-    """
-    version = 1
-
-    def initialize(self):
-        """Allocates the machine-id file to use and initialize it."""
-        fd, self._machine_id_file = tempfile.mkstemp(prefix='machine-id-rot-')
-        os.write(fd, '0123456789abcdef0123456789abcdef\n')
-        os.close(fd)
-
-    def cleanup(self):
-        """Cleans up the allocated machine-id file."""
-        os.unlink(self._machine_id_file)
-
-    def _get_machine_id(self):
-        """Helper function to read the machine-id file."""
-        with open(self._machine_id_file, 'r') as f:
-            return f.read().strip()
-
-    def _test_forced_rotation(self):
-        """Check that forced regeneration work."""
-        machine_id_before = self._get_machine_id()
-        subprocess.check_call(['cros-machine-id-regen', '-r', 'network',
-                               '-p', self._machine_id_file])
-        machine_id_after = self._get_machine_id()
-        if machine_id_before == machine_id_after:
-            raise error.TestFail('Forced rotation failed.')
-
-    def _test_time_limit(self):
-        """Check that the machine-id is not regenerated unless a given amount
-        of time has passed."""
-        machine_id_before = self._get_machine_id()
-        subprocess.check_call(['cros-machine-id-regen', '-r', 'network',
-                               '-p', self._machine_id_file])
-        machine_id_after = self._get_machine_id()
-        if machine_id_before == machine_id_after:
-            raise error.TestFail('Forced rotation failed.')
-
-        # Now request a very long time limit (1000 seconds) and check
-        # that the machine-id hasn't been regenerated.
-        machine_id_before = self._get_machine_id()
-        subprocess.check_call(['cros-machine-id-regen', '-r', 'periodic',
-                               '-t', '1000', '-p', self._machine_id_file])
-        machine_id_after = self._get_machine_id()
-        if machine_id_before != machine_id_after:
-            raise error.TestFail('Rotated despite timeout not reached.')
-
-        # Sleep ten seconds and request regeneration if ten seconds
-        # have passed. This should always result in regeneration.
-        machine_id_before = self._get_machine_id()
-        time.sleep(10)
-        subprocess.check_call(['cros-machine-id-regen', '-r', 'periodic',
-                               '-t', '10', '-p', self._machine_id_file])
-        machine_id_after = self._get_machine_id()
-        if machine_id_after == machine_id_before:
-            raise error.TestFail('Not rotated despite timeout reached.')
-
-    def _test_avahi_host_name(self):
-        """Check that the Avahi host name is set to the machine-id when
-        cros-machine-id-regen runs."""
-        # Right now this throws if Avahi is running so manually
-        # catch and ignore any error.
-        try:
-            avahi_utils.avahi_start()
-        except:
-            pass
-        subprocess.check_call(['cros-machine-id-regen', '-r', 'network',
-                               '-p', self._machine_id_file])
-        machine_id = self._get_machine_id()
-        host_name = avahi_utils.avahi_get_hostname()
-        if host_name != machine_id:
-            raise error.TestFail('Avahi host name not updated as expected.')
-
-    def run_once(self):
-        """Run tests related to /var/lib/dbus/machine-id rotation."""
-        self._test_forced_rotation()
-        self._test_time_limit()
-        self._test_avahi_host_name()
diff --git a/client/site_tests/platform_DaemonsRespawn/control b/client/site_tests/platform_DaemonsRespawn/control
index 553ea25..ed397d5 100644
--- a/client/site_tests/platform_DaemonsRespawn/control
+++ b/client/site_tests/platform_DaemonsRespawn/control
@@ -5,7 +5,7 @@
 a few key daemons and make sure that they have been restarted properly.
 """
 
-NAME = 'platform_DaemonRespawn'
+NAME = 'platform_DaemonsRespawn'
 PURPOSE = 'Verify that daemons restart when they are killed.'
 CRITERIA = """
 Fails if kill daemons are not automatically restarted after they are killed.
@@ -13,5 +13,6 @@
 TEST_CLASS = "platform"
 TEST_CATEGORY = 'Functional'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 job.run_test('platform_DaemonsRespawn')
diff --git a/client/site_tests/platform_DaemonsRespawn/platform_DaemonsRespawn.py b/client/site_tests/platform_DaemonsRespawn/platform_DaemonsRespawn.py
index 6ee3ea7..1a1429a 100644
--- a/client/site_tests/platform_DaemonsRespawn/platform_DaemonsRespawn.py
+++ b/client/site_tests/platform_DaemonsRespawn/platform_DaemonsRespawn.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -7,9 +8,9 @@
 from autotest_lib.client.common_lib import error, utils
 
 class platform_DaemonsRespawn(test.test):
-  version = 1
+    version = 1
 
 
-  def run_once(self):
-    utils.system_output(self.bindir + "/test_respawn.sh",
-                        retain_output=True)
+    def run_once(self):
+        utils.system_output(self.bindir + "/test_respawn.sh",
+                            retain_output=True)
diff --git a/client/site_tests/platform_DebugDaemonDumpDebugLogs/control b/client/site_tests/platform_DebugDaemonDumpDebugLogs/control
deleted file mode 100644
index 740dc80..0000000
--- a/client/site_tests/platform_DebugDaemonDumpDebugLogs/control
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "platform_DebugDaemonDumpDebugLogs"
-PURPOSE = "Verify that debugd's DumpDebugLogs work"
-
-CRITERIA = """
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-
-DOC = """
-Exercises the debugd DumpDebugLogs API.
-"""
-
-job.run_test('platform_DebugDaemonDumpDebugLogs')
diff --git a/client/site_tests/platform_DebugDaemonDumpDebugLogs/platform_DebugDaemonDumpDebugLogs.py b/client/site_tests/platform_DebugDaemonDumpDebugLogs/platform_DebugDaemonDumpDebugLogs.py
deleted file mode 100644
index 8a4eef7..0000000
--- a/client/site_tests/platform_DebugDaemonDumpDebugLogs/platform_DebugDaemonDumpDebugLogs.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import dbus
-import os
-import shutil
-import tarfile
-import tempfile
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-
-class platform_DebugDaemonDumpDebugLogs(test.test):
-    version = 1
-
-    def runDump(self, compressed):
-        filename = 'compressed_dump.tgz' if compressed else 'uncompressed_dump.tar'
-        tmp_file = os.path.join(self.tmp_dir, filename)
-        try:
-            fh = os.open(tmp_file, os.O_TRUNC | os.O_CREAT | os.O_WRONLY)
-            self.iface.DumpDebugLogs(compressed, fh, signature="bh")
-        except:
-            raise
-        finally:
-            os.close(fh)
-
-        mode = 'r:gz' if compressed else 'r:'
-        with tarfile.open(tmp_file, mode) as tar_file:
-            if len(tar_file.getmembers()) == 0:
-                raise error.TestFail("%s log file list is empty." %
-                       "compressed" if compressed else "uncompressed")
-
-
-    def run_once(self, *args, **kwargs):
-        bus = dbus.SystemBus()
-        proxy = bus.get_object('org.chromium.debugd', '/org/chromium/debugd')
-        self.iface = dbus.Interface(proxy,
-                                    dbus_interface='org.chromium.debugd')
-        self.tmp_dir = tempfile.mkdtemp()
-        self.runDump(True)
-        self.runDump(False)
-        if os.path.exists(self.tmp_dir):
-            shutil.rmtree(self.tmp_dir)
-
diff --git a/client/site_tests/platform_DebugDaemonGetNetworkStatus/control b/client/site_tests/platform_DebugDaemonGetNetworkStatus/control
deleted file mode 100644
index d5df994..0000000
--- a/client/site_tests/platform_DebugDaemonGetNetworkStatus/control
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "platform_DebugDaemonGetNetworkStatus"
-PURPOSE = "Verify that debugd's GetNetworkStatus works."
-
-CRITERIA = """
-GetNetworkStatus must return a valid, non-empty json object.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-
-DOC = """
-Exercises the debugd GetNetworkStatus API. This test will only pass when run on
-a target with at least one network device.
-"""
-
-job.run_test('platform_DebugDaemonGetNetworkStatus')
diff --git a/client/site_tests/platform_DebugDaemonGetNetworkStatus/platform_DebugDaemonGetNetworkStatus.py b/client/site_tests/platform_DebugDaemonGetNetworkStatus/platform_DebugDaemonGetNetworkStatus.py
deleted file mode 100644
index e5a7b12..0000000
--- a/client/site_tests/platform_DebugDaemonGetNetworkStatus/platform_DebugDaemonGetNetworkStatus.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import dbus
-import json
-import logging
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-
-class platform_DebugDaemonGetNetworkStatus(test.test):
-    version = 1
-
-    def run_once(self, *args, **kwargs):
-        bus = dbus.SystemBus()
-        proxy = bus.get_object('org.chromium.debugd', '/org/chromium/debugd')
-        self.iface = dbus.Interface(proxy,
-                                    dbus_interface='org.chromium.debugd')
-        result = self.iface.GetNetworkStatus()
-        logging.info('Result: %s' % result)
-        networks = json.loads(result)
-        if 'services' not in networks or 'devices' not in networks:
-            raise error.TestFail('No networks found: %s' % result)
diff --git a/client/site_tests/platform_DebugDaemonGetPerfData/control b/client/site_tests/platform_DebugDaemonGetPerfData/control
deleted file mode 100644
index c12995e..0000000
--- a/client/site_tests/platform_DebugDaemonGetPerfData/control
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "asharif bjanakiraman"
-NAME = "platform_DebugDaemonGetPerfData"
-PURPOSE = "Verify that debugd's GetPerfOutput works."
-
-CRITERIA = """
-GetPerfData must return a valid, non-empty protobuf.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-JOB_RETRIES = 2
-
-ATTRIBUTES = "suite:bvt-perbuild"
-
-DOC = """
-Exercises the debugd GetPerfData API.
-"""
-
-job.run_test('platform_DebugDaemonGetPerfData')
diff --git a/client/site_tests/platform_DebugDaemonGetPerfData/platform_DebugDaemonGetPerfData.py b/client/site_tests/platform_DebugDaemonGetPerfData/platform_DebugDaemonGetPerfData.py
deleted file mode 100644
index ab6c506..0000000
--- a/client/site_tests/platform_DebugDaemonGetPerfData/platform_DebugDaemonGetPerfData.py
+++ /dev/null
@@ -1,154 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import cStringIO, collections, dbus, gzip, logging, subprocess
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-
-
-class platform_DebugDaemonGetPerfData(test.test):
-    """
-    This autotest tests the collection of perf data.  It calls perf indirectly
-    through debugd -> quipper -> perf.
-
-    The perf data is collected both when the system is idle and when there is a
-    process running in the background.
-
-    The perf data is collected over various durations.
-    """
-
-    version = 1
-
-    # A list of durations over which to gather perf data using quipper (given in
-    # seconds), plus the number of times to run perf with each duration.
-    # e.g. the entry "1: 50" means to run perf for 1 second 50 times.
-    _profile_duration_and_repetitions = [
-        (1, 3),
-        (5, 1)
-    ]
-
-    # Commands to repeatedly run in the background when collecting perf data.
-    _system_load_commands = {
-        'idle'     : 'sleep 1',
-        'busy'     : 'ls',
-    }
-
-    _dbus_debugd_object = '/org/chromium/debugd'
-    _dbus_debugd_name = 'org.chromium.debugd'
-
-    # For storing the size of returned results.
-    SizeInfo = collections.namedtuple('SizeInfo', ['size', 'size_zipped'])
-
-    def gzip_string(self, string):
-        """
-        Gzip a string.
-
-        @param string: The input string to be gzipped.
-
-        Returns:
-          The gzipped string.
-        """
-        string_file = cStringIO.StringIO()
-        gzip_file = gzip.GzipFile(fileobj=string_file, mode='wb')
-        gzip_file.write(string)
-        gzip_file.close()
-        return string_file.getvalue()
-
-
-    def validate_get_perf_method(self, duration, num_reps, load_type):
-        """
-        Validate a debugd method that returns perf data.
-
-        @param duration: The duration to use for perf data collection.
-        @param num_reps: Number of times to run.
-        @param load_type: A label to use for storing into perf keyvals.
-        """
-        # Dictionary for storing results returned from debugd.
-        # Key:   Name of data type (string)
-        # Value: Sizes of results in bytes (list of SizeInfos)
-        stored_results = collections.defaultdict(list)
-
-        for _ in range(num_reps):
-            perf_command = ['perf', 'record', '-a', '-e', 'cycles',
-                            '-c', '1000003']
-            status, perf_data, perf_stat = self.dbus_iface.GetPerfOutput(
-                duration, perf_command, signature="uas")
-            if status != 0:
-                raise error.TestFail('GetPerfOutput() returned status %d',
-                                     status)
-            if len(perf_data) == 0 and len(perf_stat) == 0:
-                raise error.TestFail('GetPerfOutput() returned no data')
-            if len(perf_data) > 0 and len(perf_stat) > 0:
-                raise error.TestFail('GetPerfOutput() returned both '
-                                     'perf_data and perf_stat')
-
-            result_type = None
-            if perf_data:
-                result = perf_data
-                result_type = "perf_data"
-            else:   # if perf_stat
-                result = perf_stat
-                result_type = "perf_stat"
-
-            logging.info('GetPerfOutput() for %s seconds returned %d '
-                         'bytes of type %s',
-                         duration, len(result), result_type)
-            if len(result) < 10:
-                raise error.TestFail('Perf output too small')
-
-            # Convert |result| from an array of dbus.Bytes to a string.
-            result = ''.join(chr(b) for b in result)
-
-            # If there was an error in collecting a profile with quipper, debugd
-            # will output an error message. Make sure to check for this message.
-            # It is found in PerfTool::GetPerfDataHelper() in
-            # debugd/src/perf_tool.cc.
-            if result.startswith('<process exited with status: '):
-                raise error.TestFail('Quipper failed: %s' % result)
-
-            stored_results[result_type].append(
-                self.SizeInfo(len(result), len(self.gzip_string(result))))
-
-        for result_type, sizes in stored_results.iteritems():
-            key = 'mean_%s_size_%s_%d' % (result_type, load_type, duration)
-            total_size = sum(entry.size for entry in sizes)
-            total_size_zipped = sum(entry.size_zipped for entry in sizes)
-
-            keyvals = {}
-            keyvals[key] = total_size / len(sizes)
-            keyvals[key + '_zipped'] = total_size_zipped / len(sizes)
-            self.write_perf_keyval(keyvals)
-
-
-    def run_once(self, *args, **kwargs):
-        """
-        Primary autotest function.
-        """
-
-        bus = dbus.SystemBus()
-        proxy = bus.get_object(
-            self._dbus_debugd_name, self._dbus_debugd_object, introspect=False)
-        self.dbus_iface = dbus.Interface(proxy,
-                                         dbus_interface=self._dbus_debugd_name)
-
-        # Open /dev/null to redirect unnecessary output.
-        devnull = open('/dev/null', 'w')
-
-        load_items = self._system_load_commands.iteritems()
-        for load_type, load_command in load_items:
-            # Repeatedly run the comand for the current load.
-            cmd = 'while true; do %s; done' % load_command
-            process = subprocess.Popen(cmd, stdout=devnull, shell=True)
-
-            for duration, num_reps in self._profile_duration_and_repetitions:
-                # Collect perf data from debugd.
-                self.validate_get_perf_method(duration, num_reps, load_type)
-
-            # Terminate the process and actually wait for it to terminate.
-            process.terminate()
-            while process.poll() == None:
-                pass
-
-        devnull.close()
diff --git a/client/site_tests/platform_DebugDaemonGetPerfOutputFd/control b/client/site_tests/platform_DebugDaemonGetPerfOutputFd/control
deleted file mode 100644
index 10a5226..0000000
--- a/client/site_tests/platform_DebugDaemonGetPerfOutputFd/control
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "platform_DebugDaemonGetPerfOutputFd"
-PURPOSE = "Verify that debugd's GetPerfOutputFd and StopPerf works."
-
-CRITERIA = """
-GetPerfOutputFd must return a valid, non-empty protobuf through the pipe.
-StopPerf must stop the perf session.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-JOB_RETRIES = 2
-
-ATTRIBUTES = "suite:bvt-perbuild"
-
-DOC = """
-Exercises the debugd GetPerfOutputFd and StopPerf API.
-"""
-
-job.run_test('platform_DebugDaemonGetPerfOutputFd')
diff --git a/client/site_tests/platform_DebugDaemonGetPerfOutputFd/platform_DebugDaemonGetPerfOutputFd.py b/client/site_tests/platform_DebugDaemonGetPerfOutputFd/platform_DebugDaemonGetPerfOutputFd.py
deleted file mode 100644
index 0774f73..0000000
--- a/client/site_tests/platform_DebugDaemonGetPerfOutputFd/platform_DebugDaemonGetPerfOutputFd.py
+++ /dev/null
@@ -1,204 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import dbus, os, time
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import debugd_util
-
-class platform_DebugDaemonGetPerfOutputFd(test.test):
-    """
-    This autotest tests the collection of perf data.  It calls perf indirectly
-    through debugd -> quipper -> perf. This also tests stopping the perf
-    session.
-
-    The perf data is read from a pipe that is redirected to stdout of the
-    quipper process.
-    """
-
-    version = 1
-
-    def check_perf_output(self, perf_data):
-        """
-        Utility function to validate the perf data that was previously read
-        from the pipe.
-        """
-        if len(perf_data) < 10:
-            raise error.TestFail('Perf output (%s) too small' % perf_data)
-
-        # Perform basic sanity checks of the perf data: it should contain
-        # [kernel.kallsyms] and /usr/bin/perf
-        if (perf_data.find('[kernel.kallsyms]') == -1 or
-                perf_data.find('/usr/bin/perf') == -1):
-            raise error.TestFail('Quipper failed: %s' % perf_data)
-
-    def call_get_perf_output_fd(self, duration):
-        """
-        Utility function to call DBus method GetPerfOutputFd with the given
-        duration.
-        """
-        pipe_r, pipe_w = os.pipe()
-        perf_command = ['perf', 'record', '-a', '-F', '100']
-
-        session_id = self.dbus_iface.GetPerfOutputFd(
-            duration, perf_command, dbus.types.UnixFd(pipe_w), signature="uash")
-
-        # pipe_w is dup()'d in calling dbus. Close in this process.
-        os.close(pipe_w)
-
-        if session_id == 0:
-            raise error.TestFail('Invalid session ID from GetPerfOutputFd')
-
-        # Don't explicitly os.close(pipe_r) since it will be closed
-        # automatically when the file object returned by os.fdopen() is closed.
-        return session_id, os.fdopen(pipe_r, 'r')
-
-    def call_stop_perf(self, session_id, real_duration):
-        """
-        Utility function to call DBus method StopPerf to collect perf data
-        collected within the given duration.
-        """
-        # Sleep for real_duration seconds and then stop the perf session.
-        time.sleep(real_duration)
-        self.dbus_iface.StopPerf(session_id, signature='t')
-
-    def test_full_duration(self):
-        """
-        Test GetPerfOutpuFd to collect a profile of 2 seconds.
-        """
-
-        session_id, result_file = self.call_get_perf_output_fd(2)
-
-        # This performs synchronous read until perf exits.
-        result = result_file.read()
-
-        self.check_perf_output(result)
-
-    def test_stop_perf(self):
-        """
-        Test StopPerf by calling GetPerfOutputFd to collect a profile of 30
-        seconds. After the perf session is started for 2 seconds, call StopPerf
-        to stop the profiling session. The net result is a profile of 2
-        seconds. Verify StopPerf working by timing the test case: the test case
-        shouldn't run for 30 seconds or longer.
-        """
-        start = time.time()
-
-        # Default duration is 30 sec.
-        session_id, result_file = self.call_get_perf_output_fd(30)
-
-        # Get a profile of 2 seconds by premature stop.
-        self.call_stop_perf(session_id, 2)
-
-        # This performs synchronous read until perf exits.
-        result = result_file.read()
-
-        self.check_perf_output(result)
-
-        end = time.time()
-        if (end - start) >= 30:
-            raise error.TestFail('Unable to stop the perf tool')
-
-    def test_start_after_previous_finished(self):
-        """
-        Test consecutive GetPerfOutputFd calls that there is no undesirable
-        side effect left in the previous profiling session.
-        """
-        self.test_full_duration()
-        self.test_full_duration()
-
-    def test_stop_without_start(self):
-        """
-        Test unmatched StopPerf call by checking the returned DBusException.
-        """
-        dbus_message = None
-        try:
-            self.call_stop_perf(0, 1)
-        except dbus.exceptions.DBusException as dbus_exception:
-            dbus_message = dbus_exception.get_dbus_message()
-
-        if dbus_message is None:
-            raise error.TestFail('DBusException expected')
-        if dbus_message.find('Perf tool not started') == -1:
-            raise error.TestFail('Unexpected DBus message: %s' % dbus_message)
-
-    def test_stop_using_wrong_id(self):
-        """
-        Test calling StopPerf with an invalid session ID by checking the
-        returned DBusException.
-        """
-        start = time.time()
-
-        # Default duration is 30 sec.
-        session_id, result_file = self.call_get_perf_output_fd(30)
-
-        dbus_message = None
-        try:
-            # Use session_id - 1 to trigger the error condition.
-            self.call_stop_perf(session_id - 1, 1)
-        except dbus.exceptions.DBusException as dbus_exception:
-            dbus_message = dbus_exception.get_dbus_message()
-
-        if dbus_message is None:
-            raise error.TestFail('DBusException expected')
-        if dbus_message.find('Invalid profile session id') == -1:
-            raise error.TestFail('Unexpected DBus message: %s' % dbus_message)
-
-        # Get a profile of 1 second by premature stop.
-        self.call_stop_perf(session_id, 1)
-
-        # This performs synchronous read until perf exits.
-        result = result_file.read()
-
-        self.check_perf_output(result)
-
-        end = time.time()
-        if (end - start) >= 30:
-            raise error.TestFail('Unable to stop the perf tool')
-
-    def test_start_2nd_time(self):
-        """
-        Test calling GetPerfOutputFd when an existing profiling session is
-        running: the 2nd call should yield a DBusException without affecting
-        the 1st call.
-        """
-        # Default duration is 30 sec.
-        session_id, result_file = self.call_get_perf_output_fd(30)
-
-        dbus_message = None
-        try:
-            self.call_get_perf_output_fd(60)
-        except dbus.exceptions.DBusException as dbus_exception:
-            dbus_message = dbus_exception.get_dbus_message()
-
-        if dbus_message is None:
-            raise error.TestFail('DBusException expected')
-        if dbus_message.find('Existing perf tool running') == -1:
-            raise error.TestFail('Unexpected DBus message: %s' % dbus_message)
-
-        # Get a profile of 1 second by premature stop.
-        self.call_stop_perf(session_id, 1)
-
-        # This performs synchronous read until perf exits.
-        result = result_file.read()
-
-        self.check_perf_output(result)
-
-    def run_once(self, *args, **kwargs):
-        """
-        Primary autotest function.
-        """
-        # Setup.
-        self.dbus_iface = debugd_util.iface()
-
-        # Test normal cases.
-        self.test_full_duration()
-        self.test_start_after_previous_finished()
-        self.test_stop_perf()
-
-        # Test error cases.
-        self.test_stop_without_start()
-        self.test_stop_using_wrong_id()
-        self.test_start_2nd_time()
diff --git a/client/site_tests/platform_DebugDaemonGetRoutes/control b/client/site_tests/platform_DebugDaemonGetRoutes/control
deleted file mode 100644
index e5b233e..0000000
--- a/client/site_tests/platform_DebugDaemonGetRoutes/control
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "platform_DebugDaemonGetRoutes"
-PURPOSE = "Verify that debugd's GetRoutes works."
-
-CRITERIA = """
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-
-DOC = """
-Exercises the debugd GetRoutes API.
-"""
-
-job.run_test('platform_DebugDaemonGetRoutes')
diff --git a/client/site_tests/platform_DebugDaemonGetRoutes/platform_DebugDaemonGetRoutes.py b/client/site_tests/platform_DebugDaemonGetRoutes/platform_DebugDaemonGetRoutes.py
deleted file mode 100644
index ff63a24..0000000
--- a/client/site_tests/platform_DebugDaemonGetRoutes/platform_DebugDaemonGetRoutes.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import dbus
-import logging
-
-from autotest_lib.client.bin import test
-
-class platform_DebugDaemonGetRoutes(test.test):
-    """Checks that the debugd GetRoutes function is working."""
-    version = 1
-
-    def run_once(self, *args, **kwargs):
-        bus = dbus.SystemBus()
-        proxy = bus.get_object('org.chromium.debugd', '/org/chromium/debugd')
-        self.iface = dbus.Interface(proxy,
-                                    dbus_interface='org.chromium.debugd')
-        ip4_routes = self.iface.GetRoutes({}, signature="a{sv}")
-        logging.debug('IP4 Routes: %s', ip4_routes)
-        ip6_routes = self.iface.GetRoutes({'v6': True}, signature="a{sv}")
-        logging.debug('IP6 Routes: %s', ip6_routes)
diff --git a/client/site_tests/platform_DebugDaemonPerfDataInFeedbackLogs/control b/client/site_tests/platform_DebugDaemonPerfDataInFeedbackLogs/control
deleted file mode 100644
index 88ce8cd..0000000
--- a/client/site_tests/platform_DebugDaemonPerfDataInFeedbackLogs/control
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "chinglin yu"
-NAME = "platform_DebugDaemonPerfDataInFeedbackLogs"
-PURPOSE = "Verify that feedback logs contain perf data"
-
-CRITERIA = """
-GetFeedbackLogs must contain a perf profile.
-"""
-TIME = "MEDIUM"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-JOB_RETRIES = 2
-
-ATTRIBUTES = "suite:bvt-perbuild"
-
-DOC = """
-Exercises the debugd GetFeedbackLogs API and checks for a valid perf profile.
-"""
-
-job.run_test('platform_DebugDaemonPerfDataInFeedbackLogs')
diff --git a/client/site_tests/platform_DebugDaemonPerfDataInFeedbackLogs/platform_DebugDaemonPerfDataInFeedbackLogs.py b/client/site_tests/platform_DebugDaemonPerfDataInFeedbackLogs/platform_DebugDaemonPerfDataInFeedbackLogs.py
deleted file mode 100644
index 34c2f47..0000000
--- a/client/site_tests/platform_DebugDaemonPerfDataInFeedbackLogs/platform_DebugDaemonPerfDataInFeedbackLogs.py
+++ /dev/null
@@ -1,120 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import base64, dbus, json, logging, os
-from subprocess import Popen, PIPE
-from threading import Thread
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import debugd_util
-
-class PipeReader():
-    """
-    The class to read from a pipe. Intended for running off the main thread.
-    """
-    def __init__(self, pipe_r):
-        self.pipe_r = pipe_r
-
-    def read(self):
-        """
-        Drain from self.pipe_r and store the result in self.result. This method
-        runs in a new thread.
-        """
-        # Read feedback logs content (JSON) from pipe_r.
-        self.result = os.fdopen(self.pipe_r, 'r').read()
-
-class platform_DebugDaemonPerfDataInFeedbackLogs(test.test):
-    """
-    This autotest tests perf profile in feedback logs. It calls the debugd
-    method GetBigFeedbackLogs and checks whether 'perf-data' is present in the
-    returned logs. The perf data is base64-encoded lzma-compressed quipper
-    output.
-    """
-
-    version = 1
-
-    def xz_decompress_string(self, compressed_input):
-        """
-        xz-decompresses a string.
-
-        @param compressed_input: The input string to be decompressed.
-
-        Returns:
-          The decompressed string.
-        """
-        process = Popen('/usr/bin/xz -d', stdout=PIPE, stderr=PIPE, stdin=PIPE,
-                        shell=True)
-        out, err = process.communicate(input=compressed_input)
-
-        if len(err) > 0:
-            raise error.TestFail('decompress() failed with %s' % err)
-
-        logging.info('decompress() %d -> %d bytes', len(compressed_input),
-                     len(out))
-        return out
-
-    def validate_perf_data_in_feedback_logs(self):
-        """
-        Validate that feedback logs contain valid perf data.
-        """
-        pipe_r, pipe_w = os.pipe()
-
-        # GetBigFeedbackReport transfers large content through the pipe. We
-        # need to read from the pipe off-thread to prevent a deadlock.
-        pipe_reader = PipeReader(pipe_r)
-        thread = Thread(target = pipe_reader.read)
-        thread.start()
-
-        # Use 180-sec timeout because GetBigFeedbackLogs runs arc-bugreport,
-        # which takes a while to finish.
-        debugd_util.iface().GetBigFeedbackLogs(dbus.types.UnixFd(pipe_w), '',
-                                               signature='hs', timeout=180)
-
-        # pipe_w is dup()'d in calling dbus. Close in this process.
-        os.close(pipe_w)
-        thread.join()
-
-        # Decode into a dictionary.
-        logs = json.loads(pipe_reader.result)
-
-        if len(logs) == 0:
-            raise error.TestFail('GetBigFeedbackLogs() returned no data')
-        logging.info('GetBigFeedbackLogs() returned %d elements.', len(logs))
-
-        perf_data = logs['perf-data']
-
-        if perf_data is None:
-            raise error.TestFail('perf-data not found in feedback logs')
-
-        BLOB_START_TOKEN = '<base64>: '
-        try:
-            blob_start = perf_data.index(BLOB_START_TOKEN)
-        except:
-            raise error.TestFail(("perf-data doesn't include base64 encoded"
-                                  "data"))
-
-        # Skip description text and BLOB_START_TOKEN
-        perf_data = perf_data[blob_start + len(BLOB_START_TOKEN):]
-
-        logging.info('base64 perf data: %d bytes', len(perf_data))
-
-        # This raises TypeError if input is invalid base64-encoded data.
-        compressed_data = base64.b64decode(perf_data)
-
-        protobuff = self.xz_decompress_string(compressed_data)
-        if len(protobuff) < 10:
-            raise error.TestFail('Perf output too small (%d bytes)' %
-                                 len(protobuff))
-
-        if protobuff.startswith('<process exited with status: '):
-            raise error.TestFail('Failed to capture a profile: %s' %
-                                 protobuff)
-
-    def run_once(self, *args, **kwargs):
-        """
-        Primary autotest function.
-        """
-        self.validate_perf_data_in_feedback_logs()
-
diff --git a/client/site_tests/platform_DebugDaemonPing/control b/client/site_tests/platform_DebugDaemonPing/control
deleted file mode 100644
index 7a407bc..0000000
--- a/client/site_tests/platform_DebugDaemonPing/control
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "platform_DebugDaemonPing"
-PURPOSE = "Verify that debugd's PingStart and PingStop work"
-
-CRITERIA = """
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-
-DOC = """
-Exercises the debugd Ping API.
-"""
-
-job.run_test('platform_DebugDaemonPing')
diff --git a/client/site_tests/platform_DebugDaemonPing/platform_DebugDaemonPing.py b/client/site_tests/platform_DebugDaemonPing/platform_DebugDaemonPing.py
deleted file mode 100644
index 4efba8a..0000000
--- a/client/site_tests/platform_DebugDaemonPing/platform_DebugDaemonPing.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import dbus
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-
-class platform_DebugDaemonPing(test.test):
-    version = 1
-
-    def run_once(self, *args, **kwargs):
-        bus = dbus.SystemBus()
-        proxy = bus.get_object('org.chromium.debugd', '/org/chromium/debugd')
-        self.iface = dbus.Interface(proxy,
-                                    dbus_interface='org.chromium.debugd')
-        handle = self.iface.PingStart(1, "127.0.0.1", {}, signature="hsa{sv}")
-        self.iface.PingStop(handle)
-        got_exception = False
-        try:
-            self.iface.PingStop(handle)
-        except dbus.DBusException as e:
-            if e.get_dbus_name() == 'org.chromium.debugd.error.NoSuchProcess':
-                got_exception = True
-            else:
-                print "Unexpected exception %s" % e.get_dbus_name()
-        if not got_exception:
-            raise error.TestFail("Didn't get expected exception.")
diff --git a/client/site_tests/platform_DebugDaemonTracePath/control b/client/site_tests/platform_DebugDaemonTracePath/control
deleted file mode 100644
index e66521a..0000000
--- a/client/site_tests/platform_DebugDaemonTracePath/control
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "platform_DebugDaemonTracePath"
-PURPOSE = "Verify that debugd's TracePathStart and TracePathStop work"
-
-CRITERIA = """
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-
-DOC = """
-Exercises the debugd TracePath API.
-"""
-
-job.run_test('platform_DebugDaemonTracePath')
diff --git a/client/site_tests/platform_DebugDaemonTracePath/platform_DebugDaemonTracePath.py b/client/site_tests/platform_DebugDaemonTracePath/platform_DebugDaemonTracePath.py
deleted file mode 100644
index 81d25f2..0000000
--- a/client/site_tests/platform_DebugDaemonTracePath/platform_DebugDaemonTracePath.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import dbus
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-
-class platform_DebugDaemonTracePath(test.test):
-    version = 1
-
-    def run_once(self, *args, **kwargs):
-        bus = dbus.SystemBus()
-        proxy = bus.get_object('org.chromium.debugd', '/org/chromium/debugd')
-        self.iface = dbus.Interface(proxy,
-                                    dbus_interface='org.chromium.debugd')
-        handle = self.iface.TracePathStart(
-            1, "127.0.0.1", {}, signature="hsa{sv}")
-        self.iface.TracePathStop(handle)
-        got_exception = False
-        try:
-            self.iface.TracePathStop(handle)
-        except dbus.DBusException as e:
-            if e.get_dbus_name() == 'org.chromium.debugd.error.NoSuchProcess':
-                got_exception = True
-            else:
-                print "Unexpected exception %s" % e.get_dbus_name()
-        if not got_exception:
-            raise error.TestFail("Didn't get expected exception.")
diff --git a/client/site_tests/platform_EncryptedStateful/control b/client/site_tests/platform_EncryptedStateful/control
index fbfd2a8..5076f8b 100644
--- a/client/site_tests/platform_EncryptedStateful/control
+++ b/client/site_tests/platform_EncryptedStateful/control
@@ -3,7 +3,7 @@
 # found in the LICENSE file.
 
 NAME = "platform_EncryptedStateful"
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 PURPOSE = "Verify mount-encrypted operates as expected"
 CRITERIA = """
 Fails if the mount-encrypted helper does not operated as expected
@@ -13,6 +13,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "security"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
     Ensures the mount-encrypted helper for mounting the encrypted statful
diff --git a/client/site_tests/platform_EncryptedStateful/platform_EncryptedStateful.py b/client/site_tests/platform_EncryptedStateful/platform_EncryptedStateful.py
index 4cca629..2e33c7f 100644
--- a/client/site_tests/platform_EncryptedStateful/platform_EncryptedStateful.py
+++ b/client/site_tests/platform_EncryptedStateful/platform_EncryptedStateful.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -11,23 +12,26 @@
 #  - test failure when things aren't mounted correctly
 
 class test_checker(object):
+    """ A helper for test result """
+
     def __init__(self):
         logging.info("test_checker.__init__")
-        # Empty failure list means test passes.
+        """ Empty failure list means test passes. """
         self._failures = []
 
     def _passed(self, msg):
-        logging.info('ok: %s' % (msg))
+        logging.info('ok: %s', (msg))
 
     def _failed(self, msg):
-        logging.error('FAIL: %s' % (msg))
+        logging.error('FAIL: %s', (msg))
         self._failures.append(msg)
 
     def _fatal(self, msg):
-        logging.error('FATAL: %s' % (msg))
+        logging.error('FATAL: %s', (msg))
         raise error.TestError(msg)
 
     def check(self, boolean, msg, fatal=False):
+        """ Check the result log error """
         if boolean == True:
             self._passed(msg)
         else:
@@ -38,7 +42,7 @@
                 self._failed(msg)
 
     def test_raise(self):
-        # Raise a failure if anything unexpected was seen.
+        """ Raise a failure if anything unexpected was seen. """
         if len(self._failures):
             raise error.TestFail((", ".join(self._failures)))
 
@@ -46,6 +50,8 @@
 
 
 class EncryptedStateful(object):
+    """ A helper to operate the encrypted stateful. """
+
     def _prepare_simulated_root(self):
         os.makedirs(self.var)
         os.makedirs(self.chronos)
@@ -87,6 +93,7 @@
         self.mounted = not self.simulated
 
     def mount(self, args=""):
+        """ Mount the encstateful partition """
         if self.mounted or not self.simulated:
             return
         # TODO(keescook): figure out what is killing the resizer and
@@ -97,27 +104,30 @@
         # unresized. It would be better to have the resizer running in
         # the background, as it is designed, so we can examine its behavior
         # during testing (e.g. "does the filesystem actually grow?").
-        utils.system("MOUNT_ENCRYPTED_ROOT=%s mount-encrypted %s 2>&1 "
-                     "| tee %s" % (self.root, args, self.mount_log))
+        utils.system("MOUNT_ENCRYPTED_ROOT=%s mount-encrypted --unsafe "
+                     "%s 2>&1 | tee %s" % (self.root, args, self.mount_log))
         self.mounted = True
 
     def umount(self):
+        """ Unmount the encstateful partition """
         if not self.mounted or not self.simulated:
             return
         utils.system("MOUNT_ENCRYPTED_ROOT=%s mount-encrypted umount" %
                          (self.root))
         self.mounted = False
 
-    # Clean up when destroyed.
     def __del__(self):
+        """ Clean up when destroyed. """
         if self.simulated:
             self.umount()
             utils.system("umount -n %s" % (self.stateful))
             shutil.rmtree(self.root)
 
-    # Perform common post-mount size/owner checks on the filesystem and
-    # backing files.
     def check_sizes(self, finalized=True):
+        """
+        Perform common post-mount size/owner checks on the filesystem and
+        backing files.
+        """
         # Do we have the expected backing files?
         chk.check(os.path.exists(self.block), "%s exists" % (self.block))
         if finalized:
@@ -129,7 +139,7 @@
         chk.check(os.path.exists(keyfile), "%s exists" % (keyfile))
         chk.check(not os.path.exists(other), "%s does not exist" % (other))
 
-        # Sanity check the key file stat.
+        # Check the key file stat.
         info = os.stat(keyfile)
         chk.check(stat.S_ISREG(info.st_mode),
                   "%s is regular file" % (keyfile))
@@ -139,7 +149,7 @@
                   "%s is S_IRUSR | S_IWUSR" % (keyfile))
         chk.check(info.st_size == 48, "%s is 48 bytes" % (keyfile))
 
-        # Sanity check the block file stat.
+        # Check the block file stat.
         info = os.stat(self.block)
         chk.check(stat.S_ISREG(info.st_mode),
                   "%s is regular file" % (self.block))
@@ -183,8 +193,9 @@
         start = None
         size = 0
         while True:
-            k = long(utils.system_output("du -sk %s" % (self.block),
-                                         retain_output = True).split()[0])
+            k = int(
+                    utils.system_output("du -sk %s" % (self.block),
+                                        retain_output=True).split()[0])
             if start == None:
                 start = k
             if k == size:
@@ -194,12 +205,14 @@
             time.sleep(10)
             utils.system("sync")
             size = k
-        logging.info("%s stabilized at %dK (was %dK)" %
+        logging.info("%s stabilized at %dK (was %dK)",
                      (self.block, size, start))
 
-    # Check that the backing file reclaims space when filesystem contents
-    # are deleted.
     def check_reclamation(self):
+        """
+        Check that the backing file reclaims space when filesystem contents
+        are deleted.
+        """
         # This test is sensitive to other things happening on the filesystem,
         # so we must wait for background initialization to finish first.
         self._backing_stabilize()
@@ -241,25 +254,33 @@
 
 
 class platform_EncryptedStateful(test.test):
+    """ Test encrypted stateful partition."""
     version = 1
 
-    # With b/80549098, PUNCH_HOLE was disabled for all kernel trees
-    # before v4.4. This means that the reclamation check will only work
-    # with kernels that support PUNCH_HOLE.
     def is_punch_hole_supported(self):
+        """
+        With b/80549098, PUNCH_HOLE was disabled for all kernel trees
+        before v4.4. This means that the reclamation check will only work
+        with kernels that support PUNCH_HOLE.
+        """
         kernel_ver = os.uname()[2]
         if utils.compare_versions(kernel_ver, "4.4") < 0 :
             return False
         return True
 
     def existing_partition(self):
+        """ Do a no-write test of system's existing encrypted partition. """
         # Examine the existing encrypted partition.
         encstate = EncryptedStateful("/")
 
-        # Perform post-mount sanity checks (and handle unfinalized devices).
+        # Perform post-mount confidence check (and handle unfinalized devices).
         encstate.check_sizes(finalized=os.path.exists(encstate.key))
 
     def no_tpm(self):
+        """
+        Do a no-write, no-TPM test with confidence checks. Also do a
+        reclamation check against the encrypted stateful partition.
+        """
         encstate = EncryptedStateful()
 
         # Make sure we haven't run here before.
@@ -280,8 +301,8 @@
             if os.path.exists(off):
                 utils.system("mv %s %s" % (off, tpm))
 
-        # Perform post-mount sanity checks.
-        encstate.check_sizes(finalized=False)
+        # Perform post-mount confidence checks.
+        encstate.check_sizes(finalized=True)
 
         # Check disk reclamation for kernels that support PUNCH_HOLE.
         if self.is_punch_hole_supported():
@@ -291,9 +312,10 @@
         encstate.umount()
 
     def run_once(self):
+        """ Primary autotest function. """
         # Do a no-write test of system's existing encrypted partition.
         self.existing_partition()
 
-        # Do a no-write, no-TPM test with sanity checks. Also do a reclamation
-        # check against the encrypted stateful partition.
+        # Do a no-write, no-TPM test with confidence checks. Also do a
+        # reclamation check against the encrypted stateful partition.
         self.no_tpm()
diff --git a/client/site_tests/platform_FileNum/control b/client/site_tests/platform_FileNum/control
index 19befc1..11a604d 100644
--- a/client/site_tests/platform_FileNum/control
+++ b/client/site_tests/platform_FileNum/control
@@ -11,5 +11,6 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 job.run_test('platform_FileNum')
diff --git a/client/site_tests/platform_FileNum/platform_FileNum.py b/client/site_tests/platform_FileNum/platform_FileNum.py
index 97cc7bf..0050c37 100755
--- a/client/site_tests/platform_FileNum/platform_FileNum.py
+++ b/client/site_tests/platform_FileNum/platform_FileNum.py
@@ -1,5 +1,3 @@
-#!/usr/bin/python2
-#
 # Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -46,7 +44,7 @@
 
             os.makedirs(create_dir)
 
-            for i in xrange(count):
+            for i in range(count):
                 f = open(os.path.join(create_dir, '%d.txt' % i), 'w')
                 f.write(self._TEST_TEXT)
                 f.close()
diff --git a/client/site_tests/platform_FileSize/control b/client/site_tests/platform_FileSize/control
index 7b4724d..b363f45 100644
--- a/client/site_tests/platform_FileSize/control
+++ b/client/site_tests/platform_FileSize/control
@@ -11,5 +11,5 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "client"
-
+PY_VERSION = 3
 job.run_test('platform_FileSize')
diff --git a/client/site_tests/platform_FileSize/platform_FileSize.py b/client/site_tests/platform_FileSize/platform_FileSize.py
index bbc20d2..638b6e7 100755
--- a/client/site_tests/platform_FileSize/platform_FileSize.py
+++ b/client/site_tests/platform_FileSize/platform_FileSize.py
@@ -1,5 +1,3 @@
-#!/usr/bin/python2
-#
 # Copyright (c) 2010 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -36,10 +34,9 @@
         """
         TEXT = 'ChromeOS knows how to make your netbook run fast!\n'
         count = size * 20000
-        fh = file(fname, 'w')
-        for i in range(count):
-            fh.write(TEXT)
-        fh.close()
+        with open(fname, 'w') as fh:
+            for i in range(count):
+                fh.write(TEXT)
 
         if os.path.exists(fname):
             fsize = os.path.getsize(fname)
diff --git a/client/site_tests/platform_Fingerprint/OWNERS b/client/site_tests/platform_Fingerprint/OWNERS
deleted file mode 100644
index fa05dac..0000000
--- a/client/site_tests/platform_Fingerprint/OWNERS
+++ /dev/null
@@ -1,2 +0,0 @@
-set noparent
-include /FINGERPRINT_OWNERS
diff --git a/client/site_tests/platform_Fingerprint/control.biod_is_running b/client/site_tests/platform_Fingerprint/control.biod_is_running
deleted file mode 100644
index 3e49dcc..0000000
--- a/client/site_tests/platform_Fingerprint/control.biod_is_running
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "platform_Fingerprint.biod_is_running"
-PURPOSE = "Checks that biod is running."
-CRITERIA = """
-Fails if biod is not running.
-"""
-ATTRIBUTES = "suite:fingerprint"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-DEPENDENCIES = "fingerprint"
-JOB_RETRIES = 0
-
-DOC = """
-Checks that biod is running.
-"""
-
-job.run_test('platform_Fingerprint', to_test='biod_is_running')
diff --git a/client/site_tests/platform_Fingerprint/platform_Fingerprint.py b/client/site_tests/platform_Fingerprint/platform_Fingerprint.py
deleted file mode 100644
index e01dfbb..0000000
--- a/client/site_tests/platform_Fingerprint/platform_Fingerprint.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.cros import upstart
-
-
-class platform_Fingerprint(test.test):
-  """Fingerprint client tests."""
-  version = 1
-
-  @staticmethod
-  def _test_biod_is_running():
-    """Verify biod is running."""
-    upstart.ensure_running("biod")
-
-  def run_once(self, to_test):
-    """Run the test specified by to_test."""
-    method_name = '_test_' + to_test
-    method = getattr(self, method_name)
-    method()
diff --git a/client/site_tests/platform_Firewall/control b/client/site_tests/platform_Firewall/control
deleted file mode 100644
index 942cbe8..0000000
--- a/client/site_tests/platform_Firewall/control
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "jorgelo, kees"
-DOC = """
-Integration test for the firewall service.
-Tests whether firewall rules can be added using D-Bus.
-Fails if rules are not added after the D-Bus call, or if rules are not removed
-when the lifeline file descriptors are closed.
-"""
-NAME = "platform_Firewall"
-TIME="SHORT"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:bvt-perbuild"
-JOB_RETRIES = 2
-
-job.run_test("platform_Firewall")
diff --git a/client/site_tests/platform_Firewall/platform_Firewall.py b/client/site_tests/platform_Firewall/platform_Firewall.py
deleted file mode 100644
index c490454..0000000
--- a/client/site_tests/platform_Firewall/platform_Firewall.py
+++ /dev/null
@@ -1,179 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import dbus
-import logging
-import os
-import time
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-
-class platform_Firewall(test.test):
-    """Ensure the firewall service is working correctly."""
-
-    version = 1
-
-    _PORT = 1234
-    _IFACE = "eth0"
-
-    _TCP_RULE = "-A INPUT -p tcp -m tcp --dport %d -j ACCEPT" % _PORT
-    _UDP_RULE = "-A INPUT -p udp -m udp --dport %d -j ACCEPT" % _PORT
-    _IFACE_RULE = "-A INPUT -i %s -p tcp -m tcp --dport %d -j ACCEPT" % (_IFACE,
-                                                                         _PORT)
-
-    _POLL_INTERVAL = 5
-
-    _IPTABLES_DEL_CMD = "%s -D INPUT -p %s -m %s --dport %d -j ACCEPT"
-
-    @staticmethod
-    def _iptables_rules(executable):
-        rule_output = utils.system_output("%s -S" % executable)
-        logging.debug(rule_output)
-        return [line.strip() for line in rule_output.splitlines()]
-
-
-    @staticmethod
-    def _check(expected_rule, actual_rules, error_msg, executable, check):
-        # If check() returns false, fail the test.
-        if not check(expected_rule, actual_rules):
-            raise error.TestFail(error_msg % executable)
-
-
-    @staticmethod
-    def _check_included(expected_rule, actual_rules, error_msg, executable):
-        # Test whether the rule is included, fail if it's not.
-        platform_Firewall._check(
-                expected_rule, actual_rules, error_msg, executable,
-                lambda e, a: e in a)
-
-
-    @staticmethod
-    def _check_not_included(expected_rule, actual_rules, error_msg, executable):
-        # Test whether the rule is not included, fail if it is.
-        platform_Firewall._check(
-                expected_rule, actual_rules, error_msg, executable,
-                lambda e, a: e not in a)
-
-
-    def run_once(self):
-        # Create lifeline file descriptors.
-        self.tcp_r, self.tcp_w = os.pipe()
-        self.udp_r, self.udp_w = os.pipe()
-        self.iface_r, self.iface_w = os.pipe()
-
-        try:
-            bus = dbus.SystemBus()
-            pb_proxy = bus.get_object('org.chromium.PermissionBroker',
-                                      '/org/chromium/PermissionBroker')
-            pb = dbus.Interface(pb_proxy, 'org.chromium.PermissionBroker')
-
-            tcp_lifeline = dbus.types.UnixFd(self.tcp_r)
-            ret = pb.RequestTcpPortAccess(dbus.UInt16(self._PORT), "",
-                                          tcp_lifeline)
-            # |ret| is a dbus.Boolean, but compares as int.
-            if ret == 0:
-                raise error.TestFail("RequestTcpPortAccess returned false.")
-
-            udp_lifeline = dbus.types.UnixFd(self.udp_r)
-            ret = pb.RequestUdpPortAccess(dbus.UInt16(self._PORT), "",
-                                          udp_lifeline)
-            # |ret| is a dbus.Boolean, but compares as int.
-            if ret == 0:
-                raise error.TestFail("RequestUdpPortAccess returned false.")
-
-            iface_lifeline = dbus.types.UnixFd(self.iface_r)
-            ret = pb.RequestTcpPortAccess(dbus.UInt16(self._PORT),
-                                          dbus.String(self._IFACE),
-                                          iface_lifeline)
-            # |ret| is a dbus.Boolean, but compares as int.
-            if ret == 0:
-                raise error.TestFail(
-                        "RequestTcpPortAccess(port, interface) returned false.")
-
-            # Test IPv4 and IPv6.
-            for executable in ["iptables", "ip6tables"]:
-                actual_rules = self._iptables_rules(executable)
-                self._check_included(
-                        self._TCP_RULE, actual_rules,
-                        "RequestTcpPortAccess did not add %s rule.",
-                        executable)
-                self._check_included(
-                        self._UDP_RULE, actual_rules,
-                        "RequestUdpPortAccess did not add %s rule.",
-                        executable)
-                self._check_included(
-                        self._IFACE_RULE, actual_rules,
-                        "RequestTcpPortAccess(port, interface)"
-                        " did not add %s rule.",
-                        executable)
-
-            ret = pb.ReleaseTcpPort(dbus.UInt16(self._PORT), "")
-            # |ret| is a dbus.Boolean, but compares as int.
-            if ret == 0:
-                raise error.TestFail("ReleaseTcpPort returned false.")
-
-            ret = pb.ReleaseUdpPort(dbus.UInt16(self._PORT), "")
-            # |ret| is a dbus.Boolean, but compares as int.
-            if ret == 0:
-                raise error.TestFail("ReleaseUdpPort returned false.")
-
-            # Test IPv4 and IPv6.
-            for executable in ["iptables", "ip6tables"]:
-                rules = self._iptables_rules(executable)
-                self._check_not_included(
-                        self._TCP_RULE, rules,
-                        "ReleaseTcpPortAccess did not remove %s rule.",
-                        executable)
-                self._check_not_included(
-                        self._UDP_RULE, rules,
-                        "ReleaseUdpPortAccess did not remove %s rule.",
-                        executable)
-
-            # permission_broker should plug the firewall hole
-            # when the requesting process exits.
-            # Simulate the process exiting by closing |iface_w|.
-            os.close(self.iface_w)
-
-            # permission_broker checks every |_POLL_INTERVAL| seconds
-            # for processes that have exited.
-            # This is ugly, but it's either this or polling /var/log/messages.
-            time.sleep(self._POLL_INTERVAL + 1)
-            # Test IPv4 and IPv6.
-            for executable in ["iptables", "ip6tables"]:
-                rules = self._iptables_rules(executable)
-                self._check_not_included(
-                        self._IFACE_RULE, rules,
-                        "permission_broker did not remove %s rule.",
-                        executable)
-
-        except dbus.DBusException as e:
-            raise error.TestFail("D-Bus error: " + e.get_dbus_message())
-
-
-    def cleanup(self):
-        # File descriptors could already be closed.
-        try:
-            os.close(self.tcp_w)
-            os.close(self.udp_w)
-            os.close(self.iface_w)
-        except OSError:
-            pass
-
-        # We don't want the cleanup() method to fail, so we ignore exit codes.
-        # This also allows us to clean up iptables rules unconditionally.
-        # The command will fail if the rule has already been deleted,
-        # but it won't fail the test.
-        for executable in ["iptables", "ip6tables"]:
-            cmd = self._IPTABLES_DEL_CMD % (executable, "tcp", "tcp",
-                                            self._PORT)
-            utils.system(cmd, ignore_status=True)
-            cmd = self._IPTABLES_DEL_CMD % (executable, "udp", "udp",
-                                            self._PORT)
-            utils.system(cmd, ignore_status=True)
-            cmd = self._IPTABLES_DEL_CMD % (executable, "tcp", "tcp",
-                                            self._PORT)
-            cmd += " -i %s" % self._IFACE
-            utils.system(cmd, ignore_status=True)
diff --git a/client/site_tests/platform_GesturesRegressionTest/control b/client/site_tests/platform_GesturesRegressionTest/control
index 4f08ae0..4d51d8f 100644
--- a/client/site_tests/platform_GesturesRegressionTest/control
+++ b/client/site_tests/platform_GesturesRegressionTest/control
@@ -10,6 +10,7 @@
 TEST_CATEGORY = "Regression"
 TEST_CLASS = "platform"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This tests the regression of gestures library and stores the results in autotest
diff --git a/client/site_tests/platform_GesturesRegressionTest/platform_GesturesRegressionTest.py b/client/site_tests/platform_GesturesRegressionTest/platform_GesturesRegressionTest.py
index bdf32d4..ed653b5 100644
--- a/client/site_tests/platform_GesturesRegressionTest/platform_GesturesRegressionTest.py
+++ b/client/site_tests/platform_GesturesRegressionTest/platform_GesturesRegressionTest.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/platform_HighResTimers/control b/client/site_tests/platform_HighResTimers/control
index 176f3e4..ad9714f 100644
--- a/client/site_tests/platform_HighResTimers/control
+++ b/client/site_tests/platform_HighResTimers/control
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "platform_HighResTimers"
 PURPOSE = "Ensure the high resolution timers are working."
 CRITERIA = "Fails if no timers have nanosecond resolution."
@@ -12,6 +12,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "client"
 JOB_RETRIES = 2
+PY_VERSION = 3
 
 DOC = """
 Reads from /proc/timer_list to see if any reported timers have nanosecond
diff --git a/client/site_tests/platform_HighResTimers/platform_HighResTimers.py b/client/site_tests/platform_HighResTimers/platform_HighResTimers.py
index 0f618af..579b6c8 100644
--- a/client/site_tests/platform_HighResTimers/platform_HighResTimers.py
+++ b/client/site_tests/platform_HighResTimers/platform_HighResTimers.py
@@ -7,7 +7,7 @@
 
 import re
 from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error, utils
+from autotest_lib.client.common_lib import error
 
 class platform_HighResTimers(test.test):
     version = 1
@@ -24,5 +24,5 @@
     def run_once(self):
         try:
             self.check_timers()
-        except error.TestError, e:
+        except error.TestError as e:
             raise error.TestFail(e)
diff --git a/client/site_tests/platform_ImageLoader/control b/client/site_tests/platform_ImageLoader/control
index f3bd6b1..ee92ced 100644
--- a/client/site_tests/platform_ImageLoader/control
+++ b/client/site_tests/platform_ImageLoader/control
@@ -8,6 +8,7 @@
 TEST_CATEGORY = "General"
 TEST_CLASS = "platform"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test runs ImageLoader through its various dbus and command line interfaces.
diff --git a/client/site_tests/platform_ImageLoader/platform_ImageLoader.py b/client/site_tests/platform_ImageLoader/platform_ImageLoader.py
index be56e6a..c015a90 100644
--- a/client/site_tests/platform_ImageLoader/platform_ImageLoader.py
+++ b/client/site_tests/platform_ImageLoader/platform_ImageLoader.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -5,7 +6,6 @@
 import dbus
 import os
 import shutil
-import subprocess
 import utils
 
 from autotest_lib.client.common_lib import error
diff --git a/client/site_tests/platform_InitLoginPerf/control b/client/site_tests/platform_InitLoginPerf/control
index 2e76a79..5481577 100644
--- a/client/site_tests/platform_InitLoginPerf/control
+++ b/client/site_tests/platform_InitLoginPerf/control
@@ -9,6 +9,7 @@
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "platform"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This is the client-side test that goes through initialization and logins and
diff --git a/client/site_tests/platform_InitLoginPerf/platform_InitLoginPerf.py b/client/site_tests/platform_InitLoginPerf/platform_InitLoginPerf.py
index 26baec2..771466e 100644
--- a/client/site_tests/platform_InitLoginPerf/platform_InitLoginPerf.py
+++ b/client/site_tests/platform_InitLoginPerf/platform_InitLoginPerf.py
@@ -1,7 +1,9 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+import codecs
 import logging
 import os
 import re
@@ -191,7 +193,10 @@
         """
         # Grep syslog for AttestationReady line
         attestation_line = ''
-        with open('/var/log/messages', 'r') as syslog:
+        with codecs.open('/var/log/messages',
+                         'r',
+                         encoding='utf-8',
+                         errors='surrogateescape') as syslog:
             for ln in syslog:
                 if 'Attestation: Prepared successfully' in ln:
                     attestation_line = ln
diff --git a/client/site_tests/platform_InputBrightness/control b/client/site_tests/platform_InputBrightness/control
index 489a70a..a6dc757 100644
--- a/client/site_tests/platform_InputBrightness/control
+++ b/client/site_tests/platform_InputBrightness/control
@@ -11,6 +11,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "client"
 ATTRIBUTES = "suite:bvt-perbuild"
+PY_VERSION = 3
 
 DOC = """
 Increase and decrease the brightness using keyboard shortcuts.
diff --git a/client/site_tests/platform_InputBrightness/platform_InputBrightness.py b/client/site_tests/platform_InputBrightness/platform_InputBrightness.py
index 649fca8..bc57454 100644
--- a/client/site_tests/platform_InputBrightness/platform_InputBrightness.py
+++ b/client/site_tests/platform_InputBrightness/platform_InputBrightness.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/platform_InputBrowserNav/control b/client/site_tests/platform_InputBrowserNav/control
index f004b13..30eaaeb 100644
--- a/client/site_tests/platform_InputBrowserNav/control
+++ b/client/site_tests/platform_InputBrowserNav/control
@@ -11,6 +11,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "client"
 #ATTRIBUTES = "suite:bvt-perbuild"
+PY_VERSION = 3
 
 DOC = """
 Go forward, backward, and refresh webpage in browser using keyboard shortcut.
diff --git a/client/site_tests/platform_InputBrowserNav/platform_InputBrowserNav.py b/client/site_tests/platform_InputBrowserNav/platform_InputBrowserNav.py
index 89de17f..c6b9193 100644
--- a/client/site_tests/platform_InputBrowserNav/platform_InputBrowserNav.py
+++ b/client/site_tests/platform_InputBrowserNav/platform_InputBrowserNav.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -17,8 +18,8 @@
     """Tests if device suspends using shortcut keys."""
     version = 1
     _WAIT = 15
-    URL_1 = 'https://www.youtube.com'
-    URL_2 = 'https://www.yahoo.com'
+    URL_1 = 'https://www.youtube.com/'
+    URL_2 = 'https://www.yahoo.com/'
 
     def warmup(self):
         """Test setup."""
@@ -37,7 +38,7 @@
         self._player.blocking_playback_of_default_file(
             input_type='keyboard', filename='keyboard_f1')
         time.sleep(self._WAIT)
-        self.verify_url(tab, self.URL_1)
+        self.verify_url(tab.url, self.URL_1)
 
     def test_forward(self, tab):
         """Use keyboard shortcut to test Forward (F2) key.
@@ -48,7 +49,7 @@
         self._player.blocking_playback_of_default_file(
             input_type='keyboard', filename='keyboard_f2')
         time.sleep(self._WAIT)
-        self.verify_url(tab, self.URL_2)
+        self.verify_url(tab.url, self.URL_2)
 
     def test_refresh(self, tab):
         """Use keyboard shortcut to test Refresh (F3) key.
@@ -71,7 +72,7 @@
         time.sleep(self._WAIT)
 
         # Verify we are still on the second url.
-        self.verify_url(tab, self.URL_2)
+        self.verify_url(tab.url, self.URL_2)
         # Check to see not_refresh does not exist (results in exception).
         # If it does, the refresh was not successful.
         try:
@@ -80,7 +81,7 @@
         except exceptions.EvaluateException:
             logging.info("Refresh successful.")
 
-    def verify_url(self, tab, correct_url):
+    def verify_url(self, current_url, correct_url):
         """Verify tab's current url is the url wanted.
 
         @param tab: current tab.
@@ -89,7 +90,6 @@
         @raises: error.TestFail if incorrect url.
 
         """
-        current_url = tab.url.encode('utf8').rstrip('/')
         utils.poll_for_condition(
             lambda: current_url == correct_url,
             exception=error.TestFail('Incorrect navigation: %s'
diff --git a/client/site_tests/platform_InputNewTab/control b/client/site_tests/platform_InputNewTab/control
index c0d8730..b01b6a1 100644
--- a/client/site_tests/platform_InputNewTab/control
+++ b/client/site_tests/platform_InputNewTab/control
@@ -11,6 +11,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "client"
 ATTRIBUTES = "suite:bvt-perbuild"
+PY_VERSION = 3
 
 DOC = """
 Open and close new tabs in browser using keyboard shortcut.
diff --git a/client/site_tests/platform_InputNewTab/platform_InputNewTab.py b/client/site_tests/platform_InputNewTab/platform_InputNewTab.py
index 0541462..d0dc510 100644
--- a/client/site_tests/platform_InputNewTab/platform_InputNewTab.py
+++ b/client/site_tests/platform_InputNewTab/platform_InputNewTab.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/platform_InputScreenshot/control b/client/site_tests/platform_InputScreenshot/control
index 0e95bd4..93c4c37 100644
--- a/client/site_tests/platform_InputScreenshot/control
+++ b/client/site_tests/platform_InputScreenshot/control
@@ -10,6 +10,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "client"
 ATTRIBUTES = "suite:bvt-perbuild"
+PY_VERSION = 3
 
 DOC = """
 Press Ctrl+F5 to create a screenshot file and verify it is present.
diff --git a/client/site_tests/platform_InputScreenshot/platform_InputScreenshot.py b/client/site_tests/platform_InputScreenshot/platform_InputScreenshot.py
index b11c8d0..0931b64 100644
--- a/client/site_tests/platform_InputScreenshot/platform_InputScreenshot.py
+++ b/client/site_tests/platform_InputScreenshot/platform_InputScreenshot.py
@@ -1,9 +1,10 @@
+# Lint as: python2, python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import time
-import os.path
+import glob
+import os
 
 from autotest_lib.client.bin import test
 from autotest_lib.client.bin import utils
@@ -15,7 +16,6 @@
 class platform_InputScreenshot(test.test):
     """Tests if key combinations will create a screenshot."""
     version = 1
-    _WAIT = 5
     _TMP = '/tmp'
     _DOWNLOADS = '/home/chronos/user/Downloads'
     _SCREENSHOT = 'Screenshot*'
@@ -50,22 +50,29 @@
         if not os.path.isdir(filepath):
             raise error.TestNAError("%s folder is not found" % filepath)
 
-        if not (utils.system_output('sync; sleep 2; find %s -name "%s"'
-                                    % (filepath, self._SCREENSHOT))):
-            self._ERROR.append('Screenshot was not found under:%s' % filepath)
+        try:
+            paths = utils.poll_for_condition(lambda: glob.glob(
+                    os.path.join(filepath, self._SCREENSHOT)),
+                                             timeout=20,
+                                             sleep_interval=1)
+        except utils.TimeoutError:
+            self._ERROR.append('Screenshot was not found under: %s' % filepath)
+            return
 
-        filesize = utils.system_output('ls -l %s/%s | cut -d" " -f5'
-                                       % (filepath, self._SCREENSHOT))
+        if len(paths) > 1:
+            self._ERROR.append('Found too many screenshots: %s' % paths)
+            return
+
+        filesize = os.stat(paths[0]).st_size
         if filesize < self._MIN_SIZE:
-            self._ERROR.append('Screenshot size:%s at %s is wrong'
-                               % (filesize, filepath))
+            self._ERROR.append('Screenshot size:%d at %s is wrong' %
+                               (filesize, filepath))
 
 
     def create_screenshot(self):
         """Create a screenshot."""
         self.player.blocking_playback_of_default_file(
                input_type='keyboard', filename='keyboard_ctrl+f5')
-        time.sleep(self._WAIT)
 
 
     def run_once(self):
diff --git a/client/site_tests/platform_InputVolume/control b/client/site_tests/platform_InputVolume/control
index 490c08b..ab18a48 100644
--- a/client/site_tests/platform_InputVolume/control
+++ b/client/site_tests/platform_InputVolume/control
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -11,6 +12,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "client"
 ATTRIBUTES = "suite:bvt-perbuild"
+PY_VERSION = 3
 
 DOC = """
 Increase, decrease, and mute the volume using keyboard shortcuts.
diff --git a/client/site_tests/platform_InputVolume/platform_InputVolume.py b/client/site_tests/platform_InputVolume/platform_InputVolume.py
index 9f431b3..ac8c316 100644
--- a/client/site_tests/platform_InputVolume/platform_InputVolume.py
+++ b/client/site_tests/platform_InputVolume/platform_InputVolume.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/platform_KernelVersion/control b/client/site_tests/platform_KernelVersion/control
index df36657..b730a09 100644
--- a/client/site_tests/platform_KernelVersion/control
+++ b/client/site_tests/platform_KernelVersion/control
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "platform_KernelVersion"
 PURPOSE = "Ensure the running kernel is supported."
 CRITERIA = "Fails if the running kernel version is older than kernel_version."
diff --git a/client/site_tests/platform_KernelVersion/platform_KernelVersion.py b/client/site_tests/platform_KernelVersion/platform_KernelVersion.py
index 433c3d8..f2ee83a 100644
--- a/client/site_tests/platform_KernelVersion/platform_KernelVersion.py
+++ b/client/site_tests/platform_KernelVersion/platform_KernelVersion.py
@@ -12,6 +12,6 @@
     def run_once(self, kernel_version='3.8'):
         try:
             utils.check_kernel_ver(kernel_version)
-        except error.TestError, e:
+        except error.TestError as e:
             logging.debug(e)
             raise error.TestFail(e)
diff --git a/client/site_tests/platform_LibCBench/control b/client/site_tests/platform_LibCBench/control
deleted file mode 100755
index 24bfd31..0000000
--- a/client/site_tests/platform_LibCBench/control
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "platform_LibCBench"
-PURPOSE = "A LibCBenchmark."
-CRITERIA = """
-No specific criteria.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Benchmark"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-
-DOC = """
-Runs the libc-bench benchmark originally from http://www.etalabs.net/libc-bench.html
-"""
-
-if args:
-    args = args[0].split(':')
-job.run_test('platform_LibCBench', args=args)
diff --git a/client/site_tests/platform_LibCBench/platform_LibCBench.py b/client/site_tests/platform_LibCBench/platform_LibCBench.py
deleted file mode 100644
index 42e4414..0000000
--- a/client/site_tests/platform_LibCBench/platform_LibCBench.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import glob, logging, re
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.bin import utils
-from optparse import OptionParser
-
-class platform_LibCBench(test.test):
-    version = 1
-
-    iteration_output = []
-    GOVERNOR_FILE = '/sys/devices/system/cpu/cpu*/cpufreq/scaling_governor'
-
-    def run_once(self, args=[]):
-        parser = OptionParser()
-        parser.add_option('-i',
-                          '--iterations',
-                          dest='iterations',
-                          default=3,
-                          help='Number of iterations to run.')
-        parser.add_option('--path',
-                          dest='path',
-                          default='/usr/local/libc-bench/libc-bench',
-                          help='Path to the libc-bench binary.')
-
-        options, args = parser.parse_args(args)
-
-        last_governor_modes = []
-        governor_paths = glob.glob(self.GOVERNOR_FILE)
-        for path in governor_paths:
-            mode = utils.system_output('cat %s' % path)
-            last_governor_modes.append(mode)
-            utils.system('sudo bash -c "echo performance > %s"' % path)
-
-        for i in xrange(int(options.iterations)):
-            self.iteration_output.append(utils.system_output(options.path))
-
-        for i in xrange(len(governor_paths)):
-            utils.system('sudo bash -c "echo %s > %s"' %
-                         (last_governor_modes[i], governor_paths[i]))
-
-    def postprocess_iteration(self):
-        results = {}
-
-        current_benchmark = None
-        # Process the output of the benchmarks.
-        # Output for each benchmark looks like the following:
-        # b_<benchmark_1>
-        #   time: ..., x: ..., y: ..., z: ...
-        for output in self.iteration_output:
-            for line in output.split('\n'):
-                if line.startswith('b_'):
-                    current_benchmark = line
-                elif line.strip().startswith('time'):
-                    time = float(line.strip().split(',')[0].split(' ')[1])
-                    assert(current_benchmark is not None)
-                    results.setdefault(current_benchmark, []).append(time)
-
-        perf_results = {}
-        for benchmark in results:
-            average = sum(results[benchmark]) / len(results[benchmark])
-            minimum = min(results[benchmark])
-            maximum = max(results[benchmark])
-            difference = maximum - minimum
-            percent_difference = difference / average * 100
-
-
-            logging.info('%s:\tmin=%s\tmax=%s\tdiff=%s\tavg=%s\tpercent=%s' %
-                         (benchmark, minimum, maximum, difference, average,
-                          percent_difference))
-
-            key_string = re.sub('[^\w]', '_', benchmark)
-            perf_results[key_string] = average
-
-
-        self.write_perf_keyval(perf_results)
diff --git a/client/site_tests/platform_LogDupSuppression/control b/client/site_tests/platform_LogDupSuppression/control
deleted file mode 100644
index 4446751..0000000
--- a/client/site_tests/platform_LogDupSuppression/control
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.  Use of
-# this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'quiche'
-NAME = 'platform_LogDupSuppression'
-ATTRIBUTES = "suite:experimental"
-TIME = 'SHORT'
-TEST_TYPE = 'client'
-
-DOC = """
-  Test that we suppress duplicate messages from one process to syslog.
-
-  Directs log messages for the 'spam' process to a new file, then
-  checks that repeated messages from that process are suppressed.
-"""
-
-job.run_test('platform_LogDupSuppression')
diff --git a/client/site_tests/platform_LogDupSuppression/platform_LogDupSuppression.py b/client/site_tests/platform_LogDupSuppression/platform_LogDupSuppression.py
deleted file mode 100644
index 0b48681..0000000
--- a/client/site_tests/platform_LogDupSuppression/platform_LogDupSuppression.py
+++ /dev/null
@@ -1,53 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.  Use of
-# this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import syslog
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-
-class platform_LogDupSuppression(test.test):
-    """Test that we suppress duplicate messages from one process to syslog"""
-    DUP_DETECT_SIG = "spam: last message repeated"
-    NON_SPAM_MSG = 'not spam'
-    NUM_SPAM_MSGS = 10
-    SPAM_LOG_PATH = '/var/log/spam.log'
-    SPAM_MSG = 'SPAM SPAM SPAM'
-    SYSLOG_BIN = 'rsyslogd'
-    SYSLOG_OPTS = '-c4'  # allow version 4 commands
-    SYSLOG_JOB = 'syslog'
-    version = 1
-
-
-    def run_once(self):
-        syslog.openlog('spam')
-        try:
-            utils.run('stop %s' % self.SYSLOG_JOB,
-                      ignore_status=True)  # might not have been running
-            utils.run('truncate -s 0 %s' % self.SPAM_LOG_PATH)
-            utils.run('chown syslog %s' % self.SPAM_LOG_PATH)
-            utils.run('%s %s -f %s/rsyslog.test' %
-                      (self.SYSLOG_BIN, self.SYSLOG_OPTS, self.bindir))
-
-            for i in range(self.NUM_SPAM_MSGS):
-                syslog.syslog(self.SPAM_MSG)
-            syslog.syslog(self.NON_SPAM_MSG)
-
-            cmd_result = utils.run(
-                'grep "%s" %s' % (self.DUP_DETECT_SIG, self.SPAM_LOG_PATH),
-                ignore_status=True)
-            if cmd_result.exit_status:
-                raise error.TestFail(
-                    'duplicate suppression signature not found')
-
-            spam_count = int(
-                utils.run('grep -c "%s" %s' %
-                          (self.SPAM_MSG, self.SPAM_LOG_PATH)).stdout)
-            if spam_count != 1:
-                raise error.TestFail(
-                    'got %s spams, expected exactly one' % spam_count)
-        finally:
-            utils.run('pkill %s' % self.SYSLOG_BIN)
-            utils.run('start %s' % self.SYSLOG_JOB)
diff --git a/client/site_tests/platform_LogDupSuppression/rsyslog.test b/client/site_tests/platform_LogDupSuppression/rsyslog.test
deleted file mode 100644
index a505e9b..0000000
--- a/client/site_tests/platform_LogDupSuppression/rsyslog.test
+++ /dev/null
@@ -1,3 +0,0 @@
-$IncludeConfig /etc/rsyslog.chromeos
-
-if ($programname == 'spam') then -/var/log/spam.log
diff --git a/client/site_tests/platform_LogNonKernelKmsg/control b/client/site_tests/platform_LogNonKernelKmsg/control
deleted file mode 100644
index df100b0..0000000
--- a/client/site_tests/platform_LogNonKernelKmsg/control
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.  Use of
-# this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'quiche'
-NAME = 'platform_LogNonKernelKmsg'
-ATTRIBUTES = "suite:experimental"
-TIME = 'SHORT'
-TEST_TYPE = 'client'
-
-DOC = """
-  Test that we log non-kernel messages that are written to /dev/kmsg
-
-  Write a message to /dev/kmsg, and check that the message has been
-  written to /var/log/messages. This is important, as init (upstart)
-  writes log messages to /dev/kmsg.
-"""
-
-job.run_test('platform_LogNonKernelKmsg')
diff --git a/client/site_tests/platform_LogNonKernelKmsg/platform_LogNonKernelKmsg.py b/client/site_tests/platform_LogNonKernelKmsg/platform_LogNonKernelKmsg.py
deleted file mode 100644
index e26a77f..0000000
--- a/client/site_tests/platform_LogNonKernelKmsg/platform_LogNonKernelKmsg.py
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.  Use of
-# this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import time
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-
-class platform_LogNonKernelKmsg(test.test):
-    """Test that we log non-kernel messages via /dev/kmsg"""
-    KLOG_PATH = '/dev/kmsg'
-    SYSLOG_BIN = 'rsyslogd'
-    SYSLOG_JOB = 'syslog'
-    SYSTEM_LOG_PATH = '/var/log/messages'
-    version = 1
-
-
-    def run_once(self):
-        utils.run('truncate -s 0 %s' % self.SYSTEM_LOG_PATH)
-        our_message = 'non-kernel message generated at %d\n' % time.time()
-        with open(self.KLOG_PATH, 'w') as outfile:
-            outfile.write(our_message)
-
-        cmd_result = utils.run(
-            'grep "%s" %s' % (our_message, self.SYSTEM_LOG_PATH),
-            ignore_status=True)
-        if cmd_result.exit_status:
-            raise error.TestFail(
-                'our log message did not appear in %s' % self.SYSTEM_LOG_PATH)
diff --git a/client/site_tests/platform_LogoutPerf/control b/client/site_tests/platform_LogoutPerf/control
index 6d14e81..8779720 100644
--- a/client/site_tests/platform_LogoutPerf/control
+++ b/client/site_tests/platform_LogoutPerf/control
@@ -9,6 +9,7 @@
 TEST_CATEGORY = "General"
 TEST_CLASS = "login"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test measures the time elapsed for signing out from a logged on
diff --git a/client/site_tests/platform_LogoutPerf/platform_LogoutPerf.py b/client/site_tests/platform_LogoutPerf/platform_LogoutPerf.py
index 0fd0d40..877257a 100644
--- a/client/site_tests/platform_LogoutPerf/platform_LogoutPerf.py
+++ b/client/site_tests/platform_LogoutPerf/platform_LogoutPerf.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -40,8 +41,7 @@
             logging.info('timestamp %s is missing', ts_name)
             return 0
         with open(pathname) as statfile:
-            values = map(lambda l: float(l.split()[0]),
-                         statfile.readlines())
+            values = [float(l.split()[0]) for l in statfile.readlines()]
         logging.info('timestamp of %s -> %s ', ts_name, values[-1])
         return values[-1]
 
diff --git a/client/site_tests/platform_LowMemoryTest/alloc.html b/client/site_tests/platform_LowMemoryTest/alloc.html
deleted file mode 100644
index 74e01ab..0000000
--- a/client/site_tests/platform_LowMemoryTest/alloc.html
+++ /dev/null
@@ -1,15 +0,0 @@
-<!DOCTYPE html>
-<meta charset="utf-8">
-<html>
-<head>
-  <title>Alloc memory</title>
-  <meta http-equiv="Cache-Control" content="no-store" />
-  <script src="https://ajax.googleapis.com/ajax/libs/jquery/3.2.1/jquery.min.js"></script>
-  <script src="main.js"></script>
-</head>
-<body>
-<div id="display">
-  Allocating memory...
-</div>
-</body>
-</html>
diff --git a/client/site_tests/platform_LowMemoryTest/control.form b/client/site_tests/platform_LowMemoryTest/control.form
deleted file mode 100644
index 933fb50..0000000
--- a/client/site_tests/platform_LowMemoryTest/control.form
+++ /dev/null
@@ -1,15 +0,0 @@
-NAME = "platform_LowMemoryTest.form"
-PURPOSE = "Tab discarding functional tests with forms."
-CRITERIA = """Failed if oom-kill is invoked."""
-ATTRIBUTES = "suite:crosbolt_perf_nightly"
-AUTHOR="Vovo Yang"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-DOC = """
-This is a low memory policy functional test. It creates several web page
-with random javascript data and a form with pending data.
-"""
-
-job.run_test('platform_LowMemoryTest', flavor='form')
diff --git a/client/site_tests/platform_LowMemoryTest/form.html b/client/site_tests/platform_LowMemoryTest/form.html
deleted file mode 100644
index bcd9739..0000000
--- a/client/site_tests/platform_LowMemoryTest/form.html
+++ /dev/null
@@ -1,19 +0,0 @@
-<!DOCTYPE html>
-<meta charset="utf-8">
-<html>
-<head>
-  <title>Alloc memory</title>
-  <meta http-equiv="Cache-Control" content="no-store" />
-  <script src="https://ajax.googleapis.com/ajax/libs/jquery/3.2.1/jquery.min.js"></script>
-  <script src="main.js"></script>
-</head>
-<body>
-<div id="display">
-  Allocating memory...
-</div>
-<form action="action.html">
-  Form:<br>
-  <input type="text" name="edit">
-</form>
-</body>
-</html>
diff --git a/client/site_tests/platform_LowMemoryTest/main.js b/client/site_tests/platform_LowMemoryTest/main.js
deleted file mode 100644
index 8ee873b..0000000
--- a/client/site_tests/platform_LowMemoryTest/main.js
+++ /dev/null
@@ -1,34 +0,0 @@
-function alloc(sizeMB, randomRatio) {
-  const FLOAT64_BYTES = 8;
-  const MB = 1024 * 1024;
-  const total_count = sizeMB* MB / FLOAT64_BYTES;
-  const random_count = total_count * randomRatio;
-  // Random array is uncompressable.
-  let random_array = new Float64Array(random_count);
-  for (let i = 0; i < random_array.length; i++) {
-    random_array[i] = Math.random();
-  }
-  // Constant array is compressable.
-  const const_count = total_count * (1 - randomRatio);
-  let const_array = new Float64Array(const_count);
-  for (let i = 0; i < const_array.length; i++) {
-    const_array[i] = 1;
-  }
-  return [random_array, const_array];
-}
-$(document).ready(function() {
-  var url = new URL(window.location.href);
-  var allocMB = parseInt(url.searchParams.get("alloc"));
-  if (isNaN(allocMB))
-    allocMB = 800;
-  var randomRatio = parseFloat(url.searchParams.get("ratio"));
-  if (isNaN(randomRatio))
-    randomRatio = 0.666
-
-  var startTime = new Date();
-  // Assigns the content to docuement to avoid optimization of unused data.
-  document.out = alloc(allocMB, randomRatio);
-  var ellapse = (new Date() - startTime) / 1000;
-  // Shows the loading time for manual test.
-  $("#display").text(`Allocating ${allocMB} MB takes ${ellapse} seconds`);
-});
diff --git a/client/site_tests/platform_LowMemoryTest/platform_LowMemoryTest.py b/client/site_tests/platform_LowMemoryTest/platform_LowMemoryTest.py
deleted file mode 100644
index eebf392..0000000
--- a/client/site_tests/platform_LowMemoryTest/platform_LowMemoryTest.py
+++ /dev/null
@@ -1,202 +0,0 @@
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import re
-import time
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.cros import cros_logging
-from autotest_lib.client.cros.input_playback import input_playback
-
-GB_TO_BYTE = 1024 * 1024 * 1024
-MB_TO_BYTE = 1024 * 1024
-KB_TO_BYTE = 1024
-
-DEFAULT_RANDOM_RATIO = 0.666
-
-class MemoryKillsMonitor:
-    """A util class for reading kill events."""
-
-    _LOG_FILE = '/var/log/chrome/chrome'
-    _PATTERN_DISCARD = re.compile(
-        'tab_manager_delegate_chromeos.*:(\d+) Killed tab')
-    _PATTERN_OOM = re.compile('Tab OOM-Killed Memory details:')
-
-    def __init__(self):
-        self._log_reader = cros_logging.ContinuousLogReader(self._LOG_FILE)
-        self._oom = False
-        self._discarded = False
-
-    @property
-    def oom(self):
-        """Returns true if oom event is recorded."""
-        return self._oom
-
-    @property
-    def discarded(self):
-        """Returns true if tab discard event is recorded."""
-        return self._discarded
-
-    def check_events(self):
-        """Checks the events and updates oom and discarded property."""
-        for line in self._log_reader.read_all_logs():
-            matched = self._PATTERN_DISCARD.search(line)
-            if matched:
-                logging.info('Matched line %s', line)
-                self._discarded = True
-            matched = self._PATTERN_OOM.search(line)
-            if matched:
-                logging.info('Matched line %s', line)
-                self._oom = True
-
-
-def create_pages_and_check_oom(create_page_func, size_mb, bindir):
-    """Common code to create pages and to check OOM.
-
-    Args:
-        create_page_func: function to create page, it takes 3 arguments,
-            cr: chrome wrapper, size_mb: alloc size per page in MB,
-            bindir: path to the test directory.
-        bindir: path to the test directory.
-    Returns:
-        Dictionary of test results.
-    """
-    kills_monitor = MemoryKillsMonitor()
-
-    # The amount of tabs that can trigger OOM consistently if the tabs are not
-    # discarded properly.
-    tab_count = 1 + (utils.memtotal() * KB_TO_BYTE * 4) / (size_mb * MB_TO_BYTE)
-
-    # The tab count at the first tab discard.
-    first_discard = -1
-    # The number of tabs actually created.
-    tabs_created = tab_count
-
-    # Opens a specific amount of tabs, breaks if the OOM killer is invoked.
-    with chrome.Chrome(init_network_controller=True) as cr:
-        cr.browser.platform.SetHTTPServerDirectories(bindir)
-        for i in range(tab_count):
-            create_page_func(cr, size_mb, bindir)
-            time.sleep(3)
-            kills_monitor.check_events()
-            if first_discard == -1 and kills_monitor.discarded:
-                first_discard = i + 1
-            if kills_monitor.oom:
-                tabs_created = i + 1
-                break
-
-    # Test is successful if at least one Chrome tab is killed by tab
-    # discarder and the kernel OOM killer isn't invoked.
-    if kills_monitor.oom:
-        raise error.TestFail('OOM Killer invoked')
-
-    if not kills_monitor.discarded:
-        raise error.TestFail('No tab discarded')
-
-    # TODO: reports the page loading time.
-    return {'NumberOfTabsAtFirstDiscard': first_discard,
-            'NumberOfTabsCreated': tabs_created}
-
-
-def get_alloc_size_per_page():
-    """Returns the default alloc size per page in MB."""
-    ALLOC_MB_PER_PAGE_DEFAULT = 800
-    ALLOC_MB_PER_PAGE_SUB_2GB = 400
-
-    alloc_mb_per_page = ALLOC_MB_PER_PAGE_DEFAULT
-    # Allocate less memory per page for devices with 2GB or less memory.
-    if utils.memtotal() * KB_TO_BYTE < 2 * GB_TO_BYTE:
-        alloc_mb_per_page = ALLOC_MB_PER_PAGE_SUB_2GB
-    return alloc_mb_per_page
-
-
-def create_alloc_page(cr, page_name, size_mb, random_ratio, bindir):
-    """The program in alloc.html allocates a large array with random data.
-
-    Args:
-        cr: chrome wrapper.
-        size_mb: size of the allocated javascript array in the page.
-        random_ratio: the ratio of random data size : all data size
-        bindir: path to the test directory.
-    Returns:
-        The created tab.
-    """
-    url = cr.browser.platform.http_server.UrlOf(
-        os.path.join(bindir, page_name))
-    url += '?alloc=' + str(size_mb)
-    url += '&ratio=' + str(random_ratio)
-    tab = cr.browser.tabs.New()
-    tab.Navigate(url)
-    tab.WaitForDocumentReadyStateToBeComplete()
-    tab.WaitForJavaScriptCondition(
-        "document.hasOwnProperty('out') == true", timeout=60)
-    return tab
-
-
-def random_pages(bindir, random_ratio):
-    """Creates pages with random javascript data and checks OOM.
-
-    Args:
-        bindir: path to the test directory.
-        random_ratio: the ratio of random data size : all data size
-    """
-    def create_random_page(cr, size_mb, bindir):
-        """Creates a page with random javascript data."""
-        create_alloc_page(cr, 'alloc.html', size_mb, random_ratio,
-                          bindir)
-
-    return create_pages_and_check_oom(
-        create_random_page, get_alloc_size_per_page(), bindir)
-
-
-def form_pages(bindir, random_ratio):
-    """Creates pages with pending form data and checks OOM.
-
-    Args:
-        bindir: path to the test directory.
-        random_ratio: the ratio of random data size : all data size
-    """
-    player = input_playback.InputPlayback()
-    player.emulate(input_type='keyboard')
-    player.find_connected_inputs()
-
-    def create_form_page(cr, size_mb, bindir):
-        """Creates a page with pending form data."""
-        tab = create_alloc_page(cr, 'form.html', size_mb, random_ratio,
-                                bindir)
-        # Presses tab to focus on the first interactive element.
-        player.blocking_playback_of_default_file(input_type='keyboard',
-                                                 filename='keyboard_tab')
-        # Fills the form.
-        player.blocking_playback_of_default_file(input_type='keyboard',
-                                                 filename='keyboard_a')
-
-    ret = create_pages_and_check_oom(
-        create_form_page, get_alloc_size_per_page(), bindir)
-    player.close()
-    return ret
-
-
-class platform_LowMemoryTest(test.test):
-    """Memory pressure test."""
-    version = 1
-
-    def run_once(self, flavor='random', random_ratio=DEFAULT_RANDOM_RATIO):
-        """Runs the test once."""
-        if flavor == 'random':
-            perf_results = random_pages(self.bindir, random_ratio)
-        elif flavor == 'form':
-            perf_results = form_pages(self.bindir, random_ratio)
-
-        self.write_perf_keyval(perf_results)
-        for result_key in perf_results:
-            self.output_perf_value(description=result_key,
-                                   value=perf_results[result_key],
-                                   higher_is_better=True)
-
diff --git a/client/site_tests/platform_MemCheck/control b/client/site_tests/platform_MemCheck/control
index 610e5a3..9435898 100644
--- a/client/site_tests/platform_MemCheck/control
+++ b/client/site_tests/platform_MemCheck/control
@@ -1,6 +1,7 @@
 NAME = 'platform_MemCheck'
 PURPOSE = 'Verify memory values look reasonable.'
 CRITERIA = """
+
 This test will fail if unexpected values are found for:
   - Total Memory
   - Free Memory
@@ -14,10 +15,12 @@
 TEST_CLASS = 'platform'
 TEST_TYPE = 'client'
 JOB_RETRIES = 2
-ATTRIBUTES = "suite:bvt-perbuild"
+ATTRIBUTES = "suite:bvt-perbuild, suite:memory_qual2"
 DEPENDENCIES = 'cleanup-reboot'
+PY_VERSION = 3
+
 DOC = """
-Verifies that memory levels look sane.
+Verifies that memory levels look valid.
 
 The resources being verified are:
 
diff --git a/client/site_tests/platform_MemCheck/platform_MemCheck.py b/client/site_tests/platform_MemCheck/platform_MemCheck.py
index 5fbdbc2..b2ce86c 100644
--- a/client/site_tests/platform_MemCheck/platform_MemCheck.py
+++ b/client/site_tests/platform_MemCheck/platform_MemCheck.py
@@ -1,5 +1,4 @@
-#!/usr/bin/python2
-#
+# Lint as: python2, python3
 # Copyright (c) 2010 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -9,9 +8,11 @@
 
 __author__ = 'kdlucas@chromium.org (Kelly Lucas)'
 
-import logging, re
+import logging
+import re
 
-from autotest_lib.client.bin import utils, test
+from autotest_lib.client.bin import utils
+from autotest_lib.client.bin import test
 from autotest_lib.client.common_lib import error
 
 
@@ -37,7 +38,6 @@
             memref = 700000
             vmemref = 210000
 
-        speedref = 1333
         os_reserve_min = 600000
         os_reserve_ratio = 0.04
 
@@ -50,6 +50,8 @@
         # read physical HW size from mosys and adjust memref if need
         cmd = 'mosys memory spd print geometry -s size_mb'
         phy_size_run = utils.run(cmd)
+        logging.info('Ran command: `%s`', cmd)
+        logging.info('Output: "%s"', phy_size_run.stdout)
         phy_size = 0
         for line in phy_size_run.stdout.split():
             phy_size += int(line)
@@ -88,43 +90,30 @@
                     errors += 1
                     error_list += [k]
 
-        # read spd timings
-        cmd = 'mosys memory spd print timings -s speeds'
-        # result example
-        # DDR3-800, DDR3-1066, DDR3-1333, DDR3-1600
-        pattern = '[A-Z]*DDR([3-9]|[1-9]\d+)[A-Z]*-(?P<speed>\d+)'
-        timing_run = utils.run(cmd)
+        # Log memory type
+        cmd = 'mosys memory spd print type -s dram | head -1'
+        # Example
+        # 0 | LPDDR4
+        mem_type = utils.run(cmd).stdout.strip()
+        logging.info('Ran command: `%s`', cmd)
+        logging.info('Output: "%s"', mem_type)
 
-        keyval['speedref'] = speedref
-        for dimm, line in enumerate(timing_run.stdout.split('\n')):
-            if not line:
-                continue
-            max_timing = line.split(', ')[-1]
-            keyval['timing_dimm_%d' % dimm] = max_timing
-            m = re.match(pattern, max_timing)
-            if not m:
-                logging.warning('Error parsing timings for dimm #%d (%s)',
-                             dimm, max_timing)
-                errors += 1
-                continue
-            logging.info('dimm #%d timings: %s', dimm, max_timing)
-            max_speed = int(m.group('speed'))
-            keyval['speed_dimm_%d' % dimm] = max_speed
-            if max_speed < speedref:
-                logging.warning('ram speed is %s', max_timing)
-                logging.warning('ram speed should be at least %d', speedref)
-                error_list += ['speed_dimm_%d' % dimm]
-                errors += 1
+        # key name timing_dimm_0 for backward compatibility with older test.
+        keyval['timing_dimm_0'] = mem_type
 
         # Log memory ids
         cmd = 'mosys memory spd print id'
         # result example (1 module of memory per result line)
         # 0 | 1-45: SK Hynix (Hyundai) | 128d057e | HMT425S6CFR6A-PB
         # 1 | 1-45: SK Hynix (Hyundai) | 121d0581 | HMT425S6CFR6A-PB
-        mem_ids = utils.run(cmd).stdout.split('\n')
-        for dimm, line in enumerate(mem_ids):
-            if not line:
-                continue
+        mem_ids = utils.run(cmd)
+        logging.info('Ran command: `%s`', cmd)
+        logging.info('Output: "%s"', mem_ids.stdout)
+
+        mem_ids_list = [line for line in mem_ids.stdout.split('\n') if line]
+        keyval['number_of_channel'] = len(mem_ids_list)
+
+        for dimm, line in enumerate(mem_ids_list):
             keyval['memory_id_dimm_%d' % dimm] = line
 
         if board.startswith('rambi') or board.startswith('expresso'):
@@ -135,4 +124,21 @@
             error_list_str = ', '.join(error_list)
             raise error.TestFail('Found incorrect values: %s' % error_list_str)
 
+        keyval['cpu_name'] = utils.get_cpu_name()
+
+        # Log memory type
+        cmd = 'dmidecode -t memory'
+        mem_dmi = utils.run(cmd)
+        logging.info('Ran command: `%s`', cmd)
+        logging.info('Output: "%s"', mem_dmi.stdout)
+
+        pattern = r'\s*Speed: (?P<speed>\d+) MT/s'
+        for line in mem_dmi.stdout.split('\n'):
+            match = re.match(pattern, line)
+            if match:
+                keyval['speed'] = match.group('speed')
+                break
+        else:
+            keyval['speed'] = 'N/A'
+
         self.write_perf_keyval(keyval)
diff --git a/client/site_tests/platform_MetricsUploader/control b/client/site_tests/platform_MetricsUploader/control
deleted file mode 100644
index 953b550..0000000
--- a/client/site_tests/platform_MetricsUploader/control
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "bsimonnet"
-NAME = "platform_MetricsUploader"
-PURPOSE = "Verify that metrics_daemon uploads the metrics."
-
-CRITERIA = """
-MetricsUploader must pick up the metrics from the file and upload them
-periodically.
-"""
-
-ATTRIBUTES = "suite:experimental"
-TIME = "FAST"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-
-DOC = """
-End to end test of the metrics uploader using a fake UMA server.
-"""
-
-job.run_test('platform_MetricsUploader')
diff --git a/client/site_tests/platform_MetricsUploader/platform_MetricsUploader.py b/client/site_tests/platform_MetricsUploader/platform_MetricsUploader.py
deleted file mode 100644
index 274f7f5..0000000
--- a/client/site_tests/platform_MetricsUploader/platform_MetricsUploader.py
+++ /dev/null
@@ -1,238 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import shutil
-import SimpleHTTPServer
-import sys
-import threading
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import autotemp, error, file_utils, utils
-from autotest_lib.client.cros import httpd, service_stopper
-
-
-SERVER_PORT=51793
-SERVER_ADDRESS = "http://localhost:%s/uma/v2" % SERVER_PORT
-
-class FakeHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
-    """
-    Fake Uma handler.
-
-    Answer OK on well formed request and add the data to the server's list of
-    messages.
-    """
-
-    def do_POST(self):
-        """
-        Handle post request to the fake UMA backend.
-
-        Answer 'OK' with a 200 HTTP status code on POST requests to /uma/v2
-        and an empty message with error code 404 otherwise.
-        """
-        if self.path != '/uma/v2':
-            self.send_response(404)
-            self.end_headers()
-            return
-
-        message = self.rfile.read(int(self.headers['Content-Length']))
-        self.server.messages.append(message)
-
-        self.send_response(200)
-        self.send_header('Content-type', 'text/html')
-        self.end_headers()
-        self.wfile.write('OK')
-
-
-class FakeServer(httpd.ThreadedHTTPServer):
-    """
-    Wrapper around ThreadedHTTPServer.
-
-    Provides helpers to start/stop the instance and hold the list of
-    received messages.
-    """
-
-    def __init__(self):
-        httpd.ThreadedHTTPServer.__init__(self, ('', SERVER_PORT), FakeHandler)
-        self.messages = []
-
-
-    def Start(self):
-        """
-        Start the server on a new thread.
-        """
-        self.server_thread = threading.Thread(target=self.serve_forever)
-        self.server_thread.start()
-
-
-    def Stop(self):
-        """
-        Stop the server thread.
-        """
-        self.shutdown()
-        self.socket.close()
-        self.server_thread.join()
-
-
-class platform_MetricsUploader(test.test):
-    """
-    End-to-End test of the metrics uploader
-
-    Test that metrics_daemon is sending the metrics to the Uma server when
-    started with the -uploader flag and that the messages are well formatted.
-    """
-
-    version = 1
-    _CONSENT_FILE = '/home/chronos/Consent To Send Stats'
-
-    def setup(self):
-        os.chdir(self.srcdir)
-        utils.make('OUT_DIR=.')
-
-
-    def initialize(self):
-        self._services = service_stopper.ServiceStopper(['metrics_daemon'])
-        self._services.stop_services()
-        self._tempdir = autotemp.tempdir()
-
-
-    def _create_one_sample(self):
-        utils.system_output('truncate --size=0 /run/metrics/uma-events')
-        utils.system_output('metrics_client test 10 0 100 10')
-
-
-    def _test_simple_upload(self):
-        self._create_one_sample()
-
-        self.server = FakeServer()
-        self.server.Start()
-
-        utils.system_output('metrics_daemon -uploader_test '
-                            '-server="%s"' % SERVER_ADDRESS,
-                            timeout=10, retain_output=True)
-
-        self.server.Stop()
-
-        if len(self.server.messages) != 1:
-            raise error.TestFail('no messages received by the server')
-
-
-    def _test_server_unavailable(self):
-        """
-        metrics_daemon should not crash when the server is unavailable.
-        """
-        self._create_one_sample()
-        utils.system_output('metrics_daemon -uploader_test '
-                            '-server="http://localhost:12345"',
-                            retain_output=True)
-
-
-    def _test_check_product_id(self):
-        """
-        metrics_daemon should set the product id when it is specified.
-
-        The product id can be set through the GOOGLE_METRICS_PRODUCT_ID file in
-        /etc/os-release.d/.
-        """
-
-        # The product id must be an integer, declared in the upstream UMA
-        # backend protobuf.
-        EXPECTED_PRODUCT_ID = 3
-
-        sys.path.append(self.srcdir)
-        from chrome_user_metrics_extension_pb2 import ChromeUserMetricsExtension
-
-        self._create_one_sample()
-
-        self.server = FakeServer()
-        self.server.Start()
-        osreleased_path = os.path.join(self._tempdir.name, 'etc',
-                                       'os-release.d')
-        file_utils.make_leaf_dir(osreleased_path)
-        utils.write_one_line(os.path.join(osreleased_path,
-                                          'GOOGLE_METRICS_PRODUCT_ID'),
-                             str(EXPECTED_PRODUCT_ID))
-
-        utils.system_output('metrics_daemon -uploader_test '
-                            '-server="%s" '
-                            '-config_root="%s"' % (SERVER_ADDRESS,
-                                                   self._tempdir.name),
-                            retain_output=True)
-
-        self.server.Stop()
-
-        if len(self.server.messages) != 1:
-            raise error.TestFail('should have received 1 message. Received: '
-                               + str(len(self.server.messages)))
-
-        proto = ChromeUserMetricsExtension.FromString(self.server.messages[0])
-        logging.debug('Proto received is: ' + str(proto))
-        if proto.product != EXPECTED_PRODUCT_ID:
-            raise error.TestFail('Product id should be set to 3. Was: '
-                                 + str(proto.product))
-
-
-    def _test_metrics_disabled(self):
-        """
-        When metrics are disabled, nothing should get uploaded.
-        """
-        self._create_one_sample()
-
-        self.server = FakeServer()
-        self.server.Start()
-
-        utils.system_output('metrics_daemon -uploader_test '
-                            '-server="%s"' % SERVER_ADDRESS,
-                            timeout=10, retain_output=True)
-
-        self.server.Stop()
-
-        if len(self.server.messages) != 0:
-            raise error.TestFail('message received by the server')
-
-
-    def _get_saved_consent_file_path(self):
-        return os.path.join(self.bindir, 'saved_consent')
-
-
-    def run_once(self):
-        """
-        Run the tests.
-        """
-        if os.path.exists(self._CONSENT_FILE):
-          shutil.move(self._CONSENT_FILE, self._get_saved_consent_file_path())
-        # enable metrics reporting
-        utils.open_write_close(self._CONSENT_FILE, 'foo')
-
-        logging.info(('=' * 4) + 'Check that metrics samples can be uploaded '
-                     'with the default configuration')
-        self._test_simple_upload()
-
-        logging.info(('=' * 4) + 'Check that the metrics uploader does not '
-                     'crash when the backend server is unreachable')
-        self._test_server_unavailable()
-
-        logging.info(('=' * 4) + 'Check that the product id can be set '
-                     'through the GOOGLE_METRICS_PRODUCT_ID field in '
-                     '/etc/os-release.d/')
-        self._test_check_product_id()
-
-        os.remove(self._CONSENT_FILE)
-        logging.info(('=' * 4) + 'Check that metrics are not uploaded when '
-                     'metrics are disabled.')
-        self._test_metrics_disabled()
-
-
-    def cleanup(self):
-        self._services.restore_services()
-        self._tempdir.clean()
-
-        # The consent file might or might not exist depending on whether a test
-        # failed or not. Handle both cases.
-        if os.path.exists(self._CONSENT_FILE):
-          os.remove(self._CONSENT_FILE)
-
-        if os.path.exists(self._get_saved_consent_file_path()):
-          shutil.move(self._get_saved_consent_file_path(), self._CONSENT_FILE)
diff --git a/client/site_tests/platform_MetricsUploader/src/Makefile b/client/site_tests/platform_MetricsUploader/src/Makefile
deleted file mode 100644
index abfc11b..0000000
--- a/client/site_tests/platform_MetricsUploader/src/Makefile
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-ifndef SYSROOT
-  $(error Define SYSROOT)
-endif
-
-OUT_DIR ?= .
-PROTO_PATH = $(SYSROOT)/usr/include/metrics/proto
-PROTO_DEFS = $(PROTO_PATH)/*.proto
-PROTO_BINDINGS = $(PROTO_DEFS:$PROTO_PATH%.proto=$OUT_DIR%_pb2.py)
-
-all: $(PROTO_BINDINGS)
-
-$(PROTO_BINDINGS): $(PROTO_DEFS)
-	protoc --proto_path=$(PROTO_PATH) --python_out=$(OUT_DIR) $(PROTO_DEFS)
-
-clean:
-	rm -f $(PROTO_BINDINGS)
diff --git a/client/site_tests/platform_MouseScrollTest/control b/client/site_tests/platform_MouseScrollTest/control
deleted file mode 100644
index 78584c4..0000000
--- a/client/site_tests/platform_MouseScrollTest/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = "platform_MouseScrollTest"
-PURPOSE = "Scroll up and down with mouse pressure test."
-CRITERIA = """
-
-"""
-AUTHOR="Yi Xu"
-ATTRIBUTES = "suite:crosbolt_perf_nightly"
-TIME = "SHORT"
-TEST_CATEGORY = "Benchmark"
-TEST_CLASS = "performance"
-TEST_TYPE = "client"
-DOC = """
-Opens some tough compositing websites, verify the fps rate when the page is
-scrolled up and down by mouse.
-"""
-
-job.run_test('platform_MouseScrollTest')
diff --git a/client/site_tests/platform_MouseScrollTest/mouse.prop b/client/site_tests/platform_MouseScrollTest/mouse.prop
deleted file mode 100644
index 5caaf22..0000000
--- a/client/site_tests/platform_MouseScrollTest/mouse.prop
+++ /dev/null
@@ -1,24 +0,0 @@
-N: Logitech-USB-PS/2-Optical-Mouse
-I: 0003 046d c03e 0110
-P: 00 00 00 00 00 00 00 00
-B: 00 17 00 00 00 00 00 00 00
-B: 01 00 00 00 00 00 00 00 00
-B: 01 00 00 00 00 00 00 00 00
-B: 01 00 00 00 00 00 00 00 00
-B: 01 00 00 00 00 00 00 00 00
-B: 01 00 00 07 00 00 00 00 00
-B: 01 00 00 00 00 00 00 00 00
-B: 01 00 00 00 00 00 00 00 00
-B: 01 00 00 00 00 00 00 00 00
-B: 01 00 00 00 00 00 00 00 00
-B: 01 00 00 00 00 00 00 00 00
-B: 01 00 00 00 00 00 00 00 00
-B: 01 00 00 00 00 00 00 00 00
-B: 02 03 01 00 00 00 00 00 00
-B: 03 00 00 00 00 00 00 00 00
-B: 04 10 00 00 00 00 00 00 00
-B: 05 00 00 00 00 00 00 00 00
-B: 11 00 00 00 00 00 00 00 00
-B: 12 00 00 00 00 00 00 00 00
-B: 15 00 00 00 00 00 00 00 00
-B: 15 00 00 00 00 00 00 00 00
\ No newline at end of file
diff --git a/client/site_tests/platform_MouseScrollTest/mouse_event b/client/site_tests/platform_MouseScrollTest/mouse_event
deleted file mode 100644
index 443a377..0000000
--- a/client/site_tests/platform_MouseScrollTest/mouse_event
+++ /dev/null
@@ -1,1008 +0,0 @@
-E: 1527063326.350417 0002 0000 1
-E: 1527063326.350417 0000 0000 0
-E: 1527063326.766171 0002 0000 1
-E: 1527063326.766171 0000 0000 0
-E: 1527063327.486273 0002 0000 1
-E: 1527063327.486273 0002 0001 -2
-E: 1527063327.486273 0000 0000 0
-E: 1527063327.494239 0002 0000 2
-E: 1527063327.494239 0002 0001 -3
-E: 1527063327.494239 0000 0000 0
-E: 1527063327.502259 0002 0000 1
-E: 1527063327.502259 0002 0001 -2
-E: 1527063327.502259 0000 0000 0
-E: 1527063327.510241 0002 0000 2
-E: 1527063327.510241 0002 0001 -3
-E: 1527063327.510241 0000 0000 0
-E: 1527063327.518256 0002 0000 1
-E: 1527063327.518256 0002 0001 -3
-E: 1527063327.518256 0000 0000 0
-E: 1527063327.526241 0002 0000 -2
-E: 1527063327.526241 0002 0001 -4
-E: 1527063327.526241 0000 0000 0
-E: 1527063327.534261 0002 0000 -4
-E: 1527063327.534261 0002 0001 -4
-E: 1527063327.534261 0000 0000 0
-E: 1527063327.542232 0002 0000 -7
-E: 1527063327.542232 0002 0001 -5
-E: 1527063327.542232 0000 0000 0
-E: 1527063327.550145 0002 0000 -8
-E: 1527063327.550145 0002 0001 -6
-E: 1527063327.550145 0000 0000 0
-E: 1527063327.558141 0002 0000 -9
-E: 1527063327.558141 0002 0001 -7
-E: 1527063327.558141 0000 0000 0
-E: 1527063327.566110 0002 0000 -8
-E: 1527063327.566110 0002 0001 -7
-E: 1527063327.566110 0000 0000 0
-E: 1527063327.574121 0002 0000 -7
-E: 1527063327.574121 0002 0001 -7
-E: 1527063327.574121 0000 0000 0
-E: 1527063327.582230 0002 0000 -9
-E: 1527063327.582230 0002 0001 -8
-E: 1527063327.582230 0000 0000 0
-E: 1527063327.590132 0002 0000 -8
-E: 1527063327.590132 0002 0001 -8
-E: 1527063327.590132 0000 0000 0
-E: 1527063327.598159 0002 0000 -8
-E: 1527063327.598159 0002 0001 -9
-E: 1527063327.598159 0000 0000 0
-E: 1527063327.606157 0002 0000 -6
-E: 1527063327.606157 0002 0001 -8
-E: 1527063327.606157 0000 0000 0
-E: 1527063327.614134 0002 0000 -7
-E: 1527063327.614134 0002 0001 -8
-E: 1527063327.614134 0000 0000 0
-E: 1527063327.622153 0002 0000 -5
-E: 1527063327.622153 0002 0001 -8
-E: 1527063327.622153 0000 0000 0
-E: 1527063327.630127 0002 0000 -11
-E: 1527063327.630127 0002 0001 -11
-E: 1527063327.630127 0000 0000 0
-E: 1527063327.638133 0002 0000 -8
-E: 1527063327.638133 0002 0001 -8
-E: 1527063327.638133 0000 0000 0
-E: 1527063327.646125 0002 0000 -8
-E: 1527063327.646125 0002 0001 -8
-E: 1527063327.646125 0000 0000 0
-E: 1527063327.654239 0002 0000 -10
-E: 1527063327.654239 0002 0001 -11
-E: 1527063327.654239 0000 0000 0
-E: 1527063327.662125 0002 0000 -9
-E: 1527063327.662125 0002 0001 -8
-E: 1527063327.662125 0000 0000 0
-E: 1527063327.670224 0002 0000 -12
-E: 1527063327.670224 0002 0001 -12
-E: 1527063327.670224 0000 0000 0
-E: 1527063327.678125 0002 0000 -9
-E: 1527063327.678125 0002 0001 -9
-E: 1527063327.678125 0000 0000 0
-E: 1527063327.686228 0002 0000 -11
-E: 1527063327.686228 0002 0001 -11
-E: 1527063327.686228 0000 0000 0
-E: 1527063327.694124 0002 0000 -8
-E: 1527063327.694124 0002 0001 -8
-E: 1527063327.694124 0000 0000 0
-E: 1527063327.702258 0002 0000 -10
-E: 1527063327.702258 0002 0001 -10
-E: 1527063327.702258 0000 0000 0
-E: 1527063327.710125 0002 0000 -9
-E: 1527063327.710125 0002 0001 -9
-E: 1527063327.710125 0000 0000 0
-E: 1527063327.718248 0002 0000 -7
-E: 1527063327.718248 0002 0001 -8
-E: 1527063327.718248 0000 0000 0
-E: 1527063327.726126 0002 0000 -10
-E: 1527063327.726126 0002 0001 -9
-E: 1527063327.726126 0000 0000 0
-E: 1527063327.734256 0002 0000 -7
-E: 1527063327.734256 0002 0001 -8
-E: 1527063327.734256 0000 0000 0
-E: 1527063327.742232 0002 0000 -8
-E: 1527063327.742232 0002 0001 -9
-E: 1527063327.742232 0000 0000 0
-E: 1527063327.750238 0002 0000 -5
-E: 1527063327.750238 0002 0001 -7
-E: 1527063327.750238 0000 0000 0
-E: 1527063327.758101 0002 0000 -7
-E: 1527063327.758101 0002 0001 -10
-E: 1527063327.758101 0000 0000 0
-E: 1527063327.766147 0002 0000 -6
-E: 1527063327.766147 0002 0001 -7
-E: 1527063327.766147 0000 0000 0
-E: 1527063327.774127 0002 0000 -7
-E: 1527063327.774127 0002 0001 -9
-E: 1527063327.774127 0000 0000 0
-E: 1527063327.782132 0002 0000 -7
-E: 1527063327.782132 0002 0001 -7
-E: 1527063327.782132 0000 0000 0
-E: 1527063327.790153 0002 0000 -6
-E: 1527063327.790153 0002 0001 -6
-E: 1527063327.790153 0000 0000 0
-E: 1527063327.798095 0002 0000 -9
-E: 1527063327.798095 0002 0001 -9
-E: 1527063327.798095 0000 0000 0
-E: 1527063327.806105 0002 0000 -6
-E: 1527063327.806105 0002 0001 -6
-E: 1527063327.806105 0000 0000 0
-E: 1527063327.814150 0002 0000 -8
-E: 1527063327.814150 0002 0001 -8
-E: 1527063327.814150 0000 0000 0
-E: 1527063327.822093 0002 0000 -7
-E: 1527063327.822093 0002 0001 -7
-E: 1527063327.822093 0000 0000 0
-E: 1527063327.830154 0002 0000 -7
-E: 1527063327.830154 0002 0001 -6
-E: 1527063327.830154 0000 0000 0
-E: 1527063327.838254 0002 0000 -7
-E: 1527063327.838254 0002 0001 -5
-E: 1527063327.838254 0000 0000 0
-E: 1527063327.846126 0002 0000 -8
-E: 1527063327.846126 0002 0001 -5
-E: 1527063327.846126 0000 0000 0
-E: 1527063327.854151 0002 0000 -6
-E: 1527063327.854151 0002 0001 -5
-E: 1527063327.854151 0000 0000 0
-E: 1527063327.862095 0002 0000 -6
-E: 1527063327.862095 0002 0001 -4
-E: 1527063327.862095 0000 0000 0
-E: 1527063327.870107 0002 0000 -7
-E: 1527063327.870107 0002 0001 -6
-E: 1527063327.870107 0000 0000 0
-E: 1527063327.878155 0002 0000 -6
-E: 1527063327.878155 0002 0001 -4
-E: 1527063327.878155 0000 0000 0
-E: 1527063327.886141 0002 0000 -8
-E: 1527063327.886141 0002 0001 -4
-E: 1527063327.886141 0000 0000 0
-E: 1527063327.894110 0002 0000 -5
-E: 1527063327.894110 0002 0001 -2
-E: 1527063327.894110 0000 0000 0
-E: 1527063327.902111 0002 0000 -6
-E: 1527063327.902111 0002 0001 -3
-E: 1527063327.902111 0000 0000 0
-E: 1527063327.910095 0002 0000 -5
-E: 1527063327.910095 0002 0001 -2
-E: 1527063327.910095 0000 0000 0
-E: 1527063327.918122 0002 0000 -5
-E: 1527063327.918122 0002 0001 -2
-E: 1527063327.918122 0000 0000 0
-E: 1527063327.926095 0002 0000 -5
-E: 1527063327.926095 0002 0001 -2
-E: 1527063327.926095 0000 0000 0
-E: 1527063327.934122 0002 0000 -4
-E: 1527063327.934122 0002 0001 -2
-E: 1527063327.934122 0000 0000 0
-E: 1527063327.942102 0002 0000 -4
-E: 1527063327.942102 0002 0001 -2
-E: 1527063327.942102 0000 0000 0
-E: 1527063327.950122 0002 0000 -3
-E: 1527063327.950122 0002 0001 -2
-E: 1527063327.950122 0000 0000 0
-E: 1527063327.958130 0002 0000 -2
-E: 1527063327.958130 0002 0001 -1
-E: 1527063327.958130 0000 0000 0
-E: 1527063327.966129 0002 0000 -2
-E: 1527063327.966129 0002 0001 -2
-E: 1527063327.966129 0000 0000 0
-E: 1527063327.974132 0002 0000 -2
-E: 1527063327.974132 0002 0001 -1
-E: 1527063327.974132 0000 0000 0
-E: 1527063327.982129 0002 0000 -2
-E: 1527063327.982129 0002 0001 -2
-E: 1527063327.982129 0000 0000 0
-E: 1527063327.990131 0002 0000 -3
-E: 1527063327.990131 0002 0001 -2
-E: 1527063327.990131 0000 0000 0
-E: 1527063327.998129 0002 0000 -3
-E: 1527063327.998129 0002 0001 -2
-E: 1527063327.998129 0000 0000 0
-E: 1527063328.006102 0002 0000 -2
-E: 1527063328.006102 0002 0001 -2
-E: 1527063328.006102 0000 0000 0
-E: 1527063328.014128 0002 0000 -3
-E: 1527063328.014128 0002 0001 -2
-E: 1527063328.014128 0000 0000 0
-E: 1527063328.022092 0002 0000 -2
-E: 1527063328.022092 0002 0001 -2
-E: 1527063328.022092 0000 0000 0
-E: 1527063328.030128 0002 0000 -3
-E: 1527063328.030128 0002 0001 -1
-E: 1527063328.030128 0000 0000 0
-E: 1527063328.038146 0002 0000 -1
-E: 1527063328.038146 0002 0001 -1
-E: 1527063328.038146 0000 0000 0
-E: 1527063328.046133 0002 0000 -2
-E: 1527063328.046133 0002 0001 -1
-E: 1527063328.046133 0000 0000 0
-E: 1527063328.054130 0002 0001 -1
-E: 1527063328.054130 0000 0000 0
-E: 1527063328.062129 0002 0000 -1
-E: 1527063328.062129 0002 0001 -1
-E: 1527063328.062129 0000 0000 0
-E: 1527063328.070126 0002 0000 -1
-E: 1527063328.070126 0000 0000 0
-E: 1527063328.078136 0002 0001 -1
-E: 1527063328.078136 0000 0000 0
-E: 1527063328.094122 0002 0001 -1
-E: 1527063328.094122 0000 0000 0
-E: 1527063328.110121 0002 0000 -1
-E: 1527063328.110121 0000 0000 0
-E: 1527063328.278134 0002 0000 1
-E: 1527063328.278134 0000 0000 0
-E: 1527063328.294219 0002 0000 2
-E: 1527063328.294219 0002 0001 1
-E: 1527063328.294219 0000 0000 0
-E: 1527063328.302241 0002 0000 3
-E: 1527063328.302241 0002 0001 2
-E: 1527063328.302241 0000 0000 0
-E: 1527063328.310142 0002 0000 2
-E: 1527063328.310142 0002 0001 2
-E: 1527063328.310142 0000 0000 0
-E: 1527063328.318146 0002 0000 5
-E: 1527063328.318146 0002 0001 3
-E: 1527063328.318146 0000 0000 0
-E: 1527063328.326152 0002 0000 4
-E: 1527063328.326152 0002 0001 3
-E: 1527063328.326152 0000 0000 0
-E: 1527063328.334110 0002 0000 7
-E: 1527063328.334110 0002 0001 6
-E: 1527063328.334110 0000 0000 0
-E: 1527063328.342135 0002 0000 5
-E: 1527063328.342135 0002 0001 4
-E: 1527063328.342135 0000 0000 0
-E: 1527063328.350222 0002 0000 7
-E: 1527063328.350222 0002 0001 5
-E: 1527063328.350222 0000 0000 0
-E: 1527063328.358100 0002 0000 9
-E: 1527063328.358100 0002 0001 6
-E: 1527063328.358100 0000 0000 0
-E: 1527063328.366214 0002 0000 8
-E: 1527063328.366214 0002 0001 5
-E: 1527063328.366214 0000 0000 0
-E: 1527063328.374104 0002 0000 9
-E: 1527063328.374104 0002 0001 7
-E: 1527063328.374104 0000 0000 0
-E: 1527063328.382096 0002 0000 10
-E: 1527063328.382096 0002 0001 6
-E: 1527063328.382096 0000 0000 0
-E: 1527063328.390103 0002 0000 10
-E: 1527063328.390103 0002 0001 6
-E: 1527063328.390103 0000 0000 0
-E: 1527063328.398097 0002 0000 8
-E: 1527063328.398097 0002 0001 5
-E: 1527063328.398097 0000 0000 0
-E: 1527063328.406135 0002 0000 8
-E: 1527063328.406135 0002 0001 4
-E: 1527063328.406135 0000 0000 0
-E: 1527063328.414081 0002 0000 10
-E: 1527063328.414081 0002 0001 6
-E: 1527063328.414081 0000 0000 0
-E: 1527063328.422080 0002 0000 7
-E: 1527063328.422080 0002 0001 5
-E: 1527063328.422080 0000 0000 0
-E: 1527063328.430095 0002 0000 7
-E: 1527063328.430095 0002 0001 4
-E: 1527063328.430095 0000 0000 0
-E: 1527063328.438078 0002 0000 5
-E: 1527063328.438078 0002 0001 4
-E: 1527063328.438078 0000 0000 0
-E: 1527063328.446078 0002 0000 5
-E: 1527063328.446078 0002 0001 4
-E: 1527063328.446078 0000 0000 0
-E: 1527063328.454081 0002 0000 4
-E: 1527063328.454081 0002 0001 2
-E: 1527063328.454081 0000 0000 0
-E: 1527063328.462143 0002 0000 3
-E: 1527063328.462143 0002 0001 3
-E: 1527063328.462143 0000 0000 0
-E: 1527063328.470134 0002 0000 2
-E: 1527063328.470134 0002 0001 1
-E: 1527063328.470134 0000 0000 0
-E: 1527063328.478080 0002 0000 2
-E: 1527063328.478080 0002 0001 2
-E: 1527063328.478080 0000 0000 0
-E: 1527063328.486116 0002 0000 3
-E: 1527063328.486116 0002 0001 2
-E: 1527063328.486116 0000 0000 0
-E: 1527063328.494080 0002 0000 2
-E: 1527063328.494080 0002 0001 2
-E: 1527063328.494080 0000 0000 0
-E: 1527063328.502137 0002 0000 3
-E: 1527063328.502137 0002 0001 1
-E: 1527063328.502137 0000 0000 0
-E: 1527063328.510079 0002 0000 2
-E: 1527063328.510079 0002 0001 2
-E: 1527063328.510079 0000 0000 0
-E: 1527063328.518120 0002 0000 3
-E: 1527063328.518120 0002 0001 3
-E: 1527063328.518120 0000 0000 0
-E: 1527063328.526070 0002 0000 3
-E: 1527063328.526070 0002 0001 1
-E: 1527063328.526070 0000 0000 0
-E: 1527063328.534120 0002 0000 3
-E: 1527063328.534120 0002 0001 3
-E: 1527063328.534120 0000 0000 0
-E: 1527063328.542073 0002 0000 3
-E: 1527063328.542073 0002 0001 2
-E: 1527063328.542073 0000 0000 0
-E: 1527063328.550124 0002 0000 3
-E: 1527063328.550124 0002 0001 3
-E: 1527063328.550124 0000 0000 0
-E: 1527063328.558078 0002 0000 5
-E: 1527063328.558078 0002 0001 3
-E: 1527063328.558078 0000 0000 0
-E: 1527063328.566078 0002 0000 4
-E: 1527063328.566078 0002 0001 2
-E: 1527063328.566078 0000 0000 0
-E: 1527063328.574071 0002 0000 6
-E: 1527063328.574071 0002 0001 3
-E: 1527063328.574071 0000 0000 0
-E: 1527063328.582077 0002 0000 5
-E: 1527063328.582077 0002 0001 2
-E: 1527063328.582077 0000 0000 0
-E: 1527063328.590114 0002 0000 7
-E: 1527063328.590114 0002 0001 4
-E: 1527063328.590114 0000 0000 0
-E: 1527063328.598078 0002 0000 6
-E: 1527063328.598078 0002 0001 2
-E: 1527063328.598078 0000 0000 0
-E: 1527063328.606115 0002 0000 6
-E: 1527063328.606115 0002 0001 3
-E: 1527063328.606115 0000 0000 0
-E: 1527063328.614116 0002 0000 7
-E: 1527063328.614116 0002 0001 3
-E: 1527063328.614116 0000 0000 0
-E: 1527063328.622112 0002 0000 7
-E: 1527063328.622112 0002 0001 3
-E: 1527063328.622112 0000 0000 0
-E: 1527063328.630111 0002 0000 6
-E: 1527063328.630111 0002 0001 2
-E: 1527063328.630111 0000 0000 0
-E: 1527063328.638124 0002 0000 4
-E: 1527063328.638124 0002 0001 2
-E: 1527063328.638124 0000 0000 0
-E: 1527063328.646110 0002 0000 4
-E: 1527063328.646110 0002 0001 1
-E: 1527063328.646110 0000 0000 0
-E: 1527063328.654113 0002 0000 3
-E: 1527063328.654113 0000 0000 0
-E: 1527063328.662117 0002 0000 1
-E: 1527063328.662117 0000 0000 0
-E: 1527063328.670113 0002 0000 1
-E: 1527063328.670113 0000 0000 0
-E: 1527063328.686070 0002 0001 -1
-E: 1527063328.686070 0000 0000 0
-E: 1527063328.694110 0002 0001 -1
-E: 1527063328.694110 0000 0000 0
-E: 1527063328.702124 0002 0000 -1
-E: 1527063328.702124 0002 0001 -1
-E: 1527063328.702124 0000 0000 0
-E: 1527063328.710109 0002 0000 -1
-E: 1527063328.710109 0002 0001 -1
-E: 1527063328.710109 0000 0000 0
-E: 1527063328.718114 0002 0001 -1
-E: 1527063328.718114 0000 0000 0
-E: 1527063328.726103 0002 0001 -1
-E: 1527063328.726103 0000 0000 0
-E: 1527063328.742072 0002 0000 -1
-E: 1527063328.742072 0000 0000 0
-E: 1527063328.750122 0002 0000 -1
-E: 1527063328.750122 0000 0000 0
-E: 1527063328.758110 0002 0000 -1
-E: 1527063328.758110 0000 0000 0
-E: 1527063328.766115 0002 0000 -1
-E: 1527063328.766115 0000 0000 0
-E: 1527063328.774104 0002 0000 -2
-E: 1527063328.774104 0000 0000 0
-E: 1527063328.790113 0002 0000 -1
-E: 1527063328.790113 0000 0000 0
-E: 1527063328.886151 0002 0001 1
-E: 1527063328.886151 0000 0000 0
-E: 1527063328.894088 0002 0000 1
-E: 1527063328.894088 0000 0000 0
-E: 1527063328.918084 0002 0000 1
-E: 1527063328.918084 0000 0000 0
-E: 1527063328.934127 0002 0000 1
-E: 1527063328.934127 0000 0000 0
-E: 1527063328.942074 0002 0001 -1
-E: 1527063328.942074 0000 0000 0
-E: 1527063328.950132 0002 0001 -1
-E: 1527063328.950132 0000 0000 0
-E: 1527063328.958073 0002 0001 -3
-E: 1527063328.958073 0000 0000 0
-E: 1527063328.966134 0002 0000 -1
-E: 1527063328.966134 0002 0001 -3
-E: 1527063328.966134 0000 0000 0
-E: 1527063328.974070 0002 0001 -2
-E: 1527063328.974070 0000 0000 0
-E: 1527063328.982066 0002 0001 -3
-E: 1527063328.982066 0000 0000 0
-E: 1527063328.990136 0002 0000 1
-E: 1527063328.990136 0002 0001 -2
-E: 1527063328.990136 0000 0000 0
-E: 1527063328.998134 0002 0000 1
-E: 1527063328.998134 0002 0001 -1
-E: 1527063328.998134 0000 0000 0
-E: 1527063329.006126 0002 0000 1
-E: 1527063329.006126 0002 0001 -2
-E: 1527063329.006126 0000 0000 0
-E: 1527063329.014134 0002 0001 -1
-E: 1527063329.014134 0000 0000 0
-E: 1527063329.022134 0002 0000 1
-E: 1527063329.022134 0000 0000 0
-E: 1527063329.078126 0002 0000 -1
-E: 1527063329.078126 0000 0000 0
-E: 1527063329.086149 0002 0000 -1
-E: 1527063329.086149 0002 0001 1
-E: 1527063329.086149 0000 0000 0
-E: 1527063329.102126 0002 0000 -1
-E: 1527063329.102126 0000 0000 0
-E: 1527063329.166138 0002 0000 -1
-E: 1527063329.166138 0000 0000 0
-E: 1527063329.214139 0002 0000 -1
-E: 1527063329.214139 0000 0000 0
-E: 1527063329.286115 0002 0000 1
-E: 1527063329.286115 0000 0000 0
-E: 1527063329.294129 0002 0000 1
-E: 1527063329.294129 0000 0000 0
-E: 1527063329.310082 0002 0000 1
-E: 1527063329.310082 0000 0000 0
-E: 1527063329.318117 0002 0000 1
-E: 1527063329.318117 0000 0000 0
-E: 1527063329.326147 0002 0000 1
-E: 1527063329.326147 0000 0000 0
-E: 1527063329.334127 0002 0000 1
-E: 1527063329.334127 0000 0000 0
-E: 1527063329.342104 0002 0000 1
-E: 1527063329.342104 0000 0000 0
-E: 1527063329.350125 0002 0000 1
-E: 1527063329.350125 0000 0000 0
-E: 1527063329.358155 0002 0000 1
-E: 1527063329.358155 0000 0000 0
-E: 1527063329.566131 0002 0000 1
-E: 1527063329.566131 0000 0000 0
-E: 1527063329.574077 0002 0000 1
-E: 1527063329.574077 0000 0000 0
-E: 1527063329.582113 0002 0000 1
-E: 1527063329.582113 0000 0000 0
-E: 1527063329.590121 0002 0000 1
-E: 1527063329.590121 0000 0000 0
-E: 1527063329.622123 0002 0001 -1
-E: 1527063329.622123 0000 0000 0
-E: 1527063329.718118 0002 0008 -1
-E: 1527063329.718118 0000 0000 0
-E: 1527063329.750116 0002 0008 -1
-E: 1527063329.750116 0000 0000 0
-E: 1527063329.766098 0002 0008 -1
-E: 1527063329.766098 0000 0000 0
-E: 1527063329.782088 0002 0008 -1
-E: 1527063329.782088 0000 0000 0
-E: 1527063329.798119 0002 0008 -1
-E: 1527063329.798119 0000 0000 0
-E: 1527063329.814089 0002 0008 -1
-E: 1527063329.814089 0000 0000 0
-E: 1527063329.822101 0002 0008 -1
-E: 1527063329.822101 0000 0000 0
-E: 1527063329.966091 0002 0008 1
-E: 1527063329.966091 0000 0000 0
-E: 1527063329.982104 0002 0008 1
-E: 1527063329.982104 0000 0000 0
-E: 1527063329.990090 0002 0008 1
-E: 1527063329.990090 0000 0000 0
-E: 1527063329.998087 0002 0008 1
-E: 1527063329.998087 0000 0000 0
-E: 1527063330.006088 0002 0008 1
-E: 1527063330.006088 0000 0000 0
-E: 1527063330.022041 0002 0008 1
-E: 1527063330.022041 0000 0000 0
-E: 1527063330.030084 0002 0008 1
-E: 1527063330.030084 0000 0000 0
-E: 1527063330.038084 0002 0008 1
-E: 1527063330.038084 0000 0000 0
-E: 1527063330.046054 0002 0001 -1
-E: 1527063330.046054 0000 0000 0
-E: 1527063330.054162 0002 0001 -2
-E: 1527063330.054162 0002 0008 1
-E: 1527063330.054162 0000 0000 0
-E: 1527063330.062057 0002 0001 -1
-E: 1527063330.062057 0000 0000 0
-E: 1527063330.070055 0002 0001 -1
-E: 1527063330.070055 0000 0000 0
-E: 1527063330.078088 0002 0001 -1
-E: 1527063330.078088 0000 0000 0
-E: 1527063330.086074 0002 0008 1
-E: 1527063330.086074 0000 0000 0
-E: 1527063330.142094 0002 0000 1
-E: 1527063330.142094 0000 0000 0
-E: 1527063330.158098 0002 0008 -1
-E: 1527063330.158098 0000 0000 0
-E: 1527063330.174051 0002 0008 -1
-E: 1527063330.174051 0000 0000 0
-E: 1527063330.190082 0002 0008 -1
-E: 1527063330.190082 0000 0000 0
-E: 1527063330.198082 0002 0008 -1
-E: 1527063330.198082 0000 0000 0
-E: 1527063330.214082 0002 0008 -1
-E: 1527063330.214082 0000 0000 0
-E: 1527063330.222053 0002 0008 -1
-E: 1527063330.222053 0000 0000 0
-E: 1527063330.246082 0002 0008 -1
-E: 1527063330.246082 0000 0000 0
-E: 1527063330.334046 0002 0008 1
-E: 1527063330.334046 0000 0000 0
-E: 1527063330.342052 0002 0008 1
-E: 1527063330.342052 0000 0000 0
-E: 1527063330.358049 0002 0008 1
-E: 1527063330.358049 0000 0000 0
-E: 1527063330.366049 0002 0008 1
-E: 1527063330.366049 0000 0000 0
-E: 1527063330.382048 0002 0008 1
-E: 1527063330.382048 0000 0000 0
-E: 1527063330.390050 0002 0008 1
-E: 1527063330.390050 0000 0000 0
-E: 1527063330.406045 0002 0008 1
-E: 1527063330.406045 0000 0000 0
-E: 1527063330.422045 0002 0008 1
-E: 1527063330.422045 0000 0000 0
-E: 1527063330.446044 0002 0008 1
-E: 1527063330.446044 0000 0000 0
-E: 1527063330.494050 0002 0008 -1
-E: 1527063330.494050 0000 0000 0
-E: 1527063330.518045 0002 0008 -1
-E: 1527063330.518045 0000 0000 0
-E: 1527063330.534107 0002 0008 -1
-E: 1527063330.534107 0000 0000 0
-E: 1527063330.550078 0002 0008 -1
-E: 1527063330.550078 0000 0000 0
-E: 1527063330.558049 0002 0008 -1
-E: 1527063330.558049 0000 0000 0
-E: 1527063330.574084 0002 0008 -1
-E: 1527063330.574084 0000 0000 0
-E: 1527063330.582079 0002 0008 -1
-E: 1527063330.582079 0000 0000 0
-E: 1527063330.598075 0002 0008 -1
-E: 1527063330.598075 0000 0000 0
-E: 1527063330.614075 0002 0008 -1
-E: 1527063330.614075 0000 0000 0
-E: 1527063330.638072 0002 0008 -1
-E: 1527063330.638072 0000 0000 0
-E: 1527063330.742072 0002 0008 1
-E: 1527063330.742072 0000 0000 0
-E: 1527063330.758074 0002 0008 1
-E: 1527063330.758074 0000 0000 0
-E: 1527063330.774035 0002 0008 1
-E: 1527063330.774035 0000 0000 0
-E: 1527063330.782084 0002 0008 1
-E: 1527063330.782084 0000 0000 0
-E: 1527063330.790077 0002 0008 1
-E: 1527063330.790077 0000 0000 0
-E: 1527063330.806068 0002 0008 1
-E: 1527063330.806068 0000 0000 0
-E: 1527063330.814074 0002 0008 1
-E: 1527063330.814074 0000 0000 0
-E: 1527063330.822085 0002 0000 1
-E: 1527063330.822085 0002 0008 1
-E: 1527063330.822085 0000 0000 0
-E: 1527063330.838058 0002 0008 1
-E: 1527063330.838058 0000 0000 0
-E: 1527063330.854031 0002 0008 1
-E: 1527063330.854031 0000 0000 0
-E: 1527063330.926039 0002 0008 -1
-E: 1527063330.926039 0000 0000 0
-E: 1527063330.942039 0002 0008 -1
-E: 1527063330.942039 0000 0000 0
-E: 1527063330.958038 0002 0008 -1
-E: 1527063330.958038 0000 0000 0
-E: 1527063330.966073 0002 0008 -1
-E: 1527063330.966073 0000 0000 0
-E: 1527063330.974072 0002 0008 -1
-E: 1527063330.974072 0000 0000 0
-E: 1527063330.990038 0002 0008 -1
-E: 1527063330.990038 0000 0000 0
-E: 1527063330.998041 0002 0008 -1
-E: 1527063330.998041 0000 0000 0
-E: 1527063331.006042 0002 0008 -1
-E: 1527063331.006042 0000 0000 0
-E: 1527063331.014040 0002 0008 -1
-E: 1527063331.014040 0000 0000 0
-E: 1527063331.022041 0002 0008 -1
-E: 1527063331.022041 0000 0000 0
-E: 1527063331.038051 0002 0008 -1
-E: 1527063331.038051 0000 0000 0
-E: 1527063331.150035 0002 0008 1
-E: 1527063331.150035 0000 0000 0
-E: 1527063331.158085 0002 0008 1
-E: 1527063331.158085 0000 0000 0
-E: 1527063331.174062 0002 0008 1
-E: 1527063331.174062 0000 0000 0
-E: 1527063331.182066 0002 0008 1
-E: 1527063331.182066 0000 0000 0
-E: 1527063331.190030 0002 0008 1
-E: 1527063331.190030 0000 0000 0
-E: 1527063331.198067 0002 0008 1
-E: 1527063331.198067 0000 0000 0
-E: 1527063331.206029 0002 0008 1
-E: 1527063331.206029 0000 0000 0
-E: 1527063331.214070 0002 0008 1
-E: 1527063331.214070 0000 0000 0
-E: 1527063331.222069 0002 0008 1
-E: 1527063331.222069 0000 0000 0
-E: 1527063331.238089 0002 0008 1
-E: 1527063331.238089 0000 0000 0
-E: 1527063331.246069 0002 0008 1
-E: 1527063331.246069 0000 0000 0
-E: 1527063331.254064 0002 0008 1
-E: 1527063331.254064 0000 0000 0
-E: 1527063331.270094 0002 0008 1
-E: 1527063331.270094 0000 0000 0
-E: 1527063331.278030 0002 0001 -1
-E: 1527063331.278030 0000 0000 0
-E: 1527063331.302064 0002 0001 -1
-E: 1527063331.302064 0002 0008 1
-E: 1527063331.302064 0000 0000 0
-E: 1527063331.326064 0002 0008 -1
-E: 1527063331.326064 0000 0000 0
-E: 1527063331.350033 0002 0008 -1
-E: 1527063331.350033 0000 0000 0
-E: 1527063331.374031 0002 0008 -1
-E: 1527063331.374031 0000 0000 0
-E: 1527063331.390034 0002 0008 -1
-E: 1527063331.390034 0000 0000 0
-E: 1527063331.398033 0002 0008 -1
-E: 1527063331.398033 0000 0000 0
-E: 1527063331.414065 0002 0008 -1
-E: 1527063331.414065 0000 0000 0
-E: 1527063331.422066 0002 0008 -1
-E: 1527063331.422066 0000 0000 0
-E: 1527063331.430065 0002 0008 -1
-E: 1527063331.430065 0000 0000 0
-E: 1527063331.438056 0002 0008 -1
-E: 1527063331.438056 0000 0000 0
-E: 1527063331.446080 0002 0008 -1
-E: 1527063331.446080 0000 0000 0
-E: 1527063331.462088 0002 0008 -1
-E: 1527063331.462088 0000 0000 0
-E: 1527063331.470065 0002 0008 -1
-E: 1527063331.470065 0000 0000 0
-E: 1527063331.486091 0002 0008 -1
-E: 1527063331.486091 0000 0000 0
-E: 1527063331.598056 0002 0008 1
-E: 1527063331.598056 0000 0000 0
-E: 1527063331.614092 0002 0008 1
-E: 1527063331.614092 0000 0000 0
-E: 1527063331.622063 0002 0008 1
-E: 1527063331.622063 0000 0000 0
-E: 1527063331.638055 0002 0008 1
-E: 1527063331.638055 0000 0000 0
-E: 1527063331.646063 0002 0008 1
-E: 1527063331.646063 0000 0000 0
-E: 1527063331.654093 0002 0008 1
-E: 1527063331.654093 0000 0000 0
-E: 1527063331.662064 0002 0008 1
-E: 1527063331.662064 0000 0000 0
-E: 1527063331.670093 0002 0008 1
-E: 1527063331.670093 0000 0000 0
-E: 1527063331.678061 0002 0008 1
-E: 1527063331.678061 0000 0000 0
-E: 1527063331.694057 0002 0008 1
-E: 1527063331.694057 0000 0000 0
-E: 1527063331.710026 0002 0008 1
-E: 1527063331.710026 0000 0000 0
-E: 1527063331.726025 0002 0008 1
-E: 1527063331.726025 0000 0000 0
-E: 1527063331.734034 0002 0000 1
-E: 1527063331.734034 0000 0000 0
-E: 1527063331.806018 0002 0008 -1
-E: 1527063331.806018 0000 0000 0
-E: 1527063331.822023 0002 0000 1
-E: 1527063331.822023 0000 0000 0
-E: 1527063331.830030 0002 0008 -1
-E: 1527063331.830030 0000 0000 0
-E: 1527063331.838064 0002 0008 -1
-E: 1527063331.838064 0000 0000 0
-E: 1527063331.846125 0002 0000 1
-E: 1527063331.846125 0000 0000 0
-E: 1527063331.854053 0002 0001 1
-E: 1527063331.854053 0002 0008 -1
-E: 1527063331.854053 0000 0000 0
-E: 1527063331.862027 0002 0008 -1
-E: 1527063331.862027 0000 0000 0
-E: 1527063331.870021 0002 0008 -1
-E: 1527063331.870021 0000 0000 0
-E: 1527063331.878028 0002 0008 -1
-E: 1527063331.878028 0000 0000 0
-E: 1527063331.886022 0002 0008 -1
-E: 1527063331.886022 0000 0000 0
-E: 1527063331.894027 0002 0008 -1
-E: 1527063331.894027 0000 0000 0
-E: 1527063331.910158 0002 0008 -1
-E: 1527063331.910158 0000 0000 0
-E: 1527063331.918093 0002 0008 -1
-E: 1527063331.918093 0000 0000 0
-E: 1527063331.982050 0002 0000 1
-E: 1527063331.982050 0000 0000 0
-E: 1527063332.022032 0002 0001 -1
-E: 1527063332.022032 0000 0000 0
-E: 1527063332.046020 0002 0008 1
-E: 1527063332.046020 0000 0000 0
-E: 1527063332.054018 0002 0008 1
-E: 1527063332.054018 0000 0000 0
-E: 1527063332.070050 0002 0008 1
-E: 1527063332.070050 0000 0000 0
-E: 1527063332.078054 0002 0008 1
-E: 1527063332.078054 0000 0000 0
-E: 1527063332.086077 0002 0008 1
-E: 1527063332.086077 0000 0000 0
-E: 1527063332.094056 0002 0008 1
-E: 1527063332.094056 0000 0000 0
-E: 1527063332.102089 0002 0008 1
-E: 1527063332.102089 0000 0000 0
-E: 1527063332.110054 0002 0008 1
-E: 1527063332.110054 0000 0000 0
-E: 1527063332.118018 0002 0000 1
-E: 1527063332.118018 0002 0008 1
-E: 1527063332.118018 0000 0000 0
-E: 1527063332.126024 0002 0008 1
-E: 1527063332.126024 0000 0000 0
-E: 1527063332.142019 0002 0008 1
-E: 1527063332.142019 0000 0000 0
-E: 1527063332.150022 0002 0008 1
-E: 1527063332.150022 0000 0000 0
-E: 1527063332.174021 0002 0008 1
-E: 1527063332.174021 0000 0000 0
-E: 1527063332.246141 0002 0008 -1
-E: 1527063332.246141 0000 0000 0
-E: 1527063332.262020 0002 0008 -1
-E: 1527063332.262020 0000 0000 0
-E: 1527063332.278017 0002 0008 -1
-E: 1527063332.278017 0000 0000 0
-E: 1527063332.294034 0002 0008 -1
-E: 1527063332.294034 0000 0000 0
-E: 1527063332.302090 0002 0008 -1
-E: 1527063332.302090 0000 0000 0
-E: 1527063332.310024 0002 0008 -1
-E: 1527063332.310024 0000 0000 0
-E: 1527063332.326017 0002 0008 -1
-E: 1527063332.326017 0000 0000 0
-E: 1527063332.334086 0002 0008 -1
-E: 1527063332.334086 0000 0000 0
-E: 1527063332.342041 0002 0008 -1
-E: 1527063332.342041 0000 0000 0
-E: 1527063332.350020 0002 0008 -1
-E: 1527063332.350020 0000 0000 0
-E: 1527063332.358024 0002 0008 -1
-E: 1527063332.358024 0000 0000 0
-E: 1527063332.374019 0002 0008 -1
-E: 1527063332.374019 0000 0000 0
-E: 1527063332.478019 0002 0008 1
-E: 1527063332.478019 0000 0000 0
-E: 1527063332.486018 0002 0008 1
-E: 1527063332.486018 0000 0000 0
-E: 1527063332.502014 0002 0008 1
-E: 1527063332.502014 0000 0000 0
-E: 1527063332.510019 0002 0008 1
-E: 1527063332.510019 0000 0000 0
-E: 1527063332.518017 0002 0008 1
-E: 1527063332.518017 0000 0000 0
-E: 1527063332.526049 0002 0008 1
-E: 1527063332.526049 0000 0000 0
-E: 1527063332.534050 0002 0008 1
-E: 1527063332.534050 0000 0000 0
-E: 1527063332.542051 0002 0008 1
-E: 1527063332.542051 0000 0000 0
-E: 1527063332.558045 0002 0008 1
-E: 1527063332.558045 0000 0000 0
-E: 1527063332.566058 0002 0008 1
-E: 1527063332.566058 0000 0000 0
-E: 1527063332.574051 0002 0008 1
-E: 1527063332.574051 0000 0000 0
-E: 1527063332.590015 0002 0008 1
-E: 1527063332.590015 0000 0000 0
-E: 1527063332.606011 0002 0008 1
-E: 1527063332.606011 0000 0000 0
-E: 1527063332.686067 0002 0008 -1
-E: 1527063332.686067 0000 0000 0
-E: 1527063332.710021 0002 0008 -1
-E: 1527063332.710021 0000 0000 0
-E: 1527063332.718015 0002 0008 -1
-E: 1527063332.718015 0000 0000 0
-E: 1527063332.734064 0002 0008 -1
-E: 1527063332.734064 0000 0000 0
-E: 1527063332.742019 0002 0008 -1
-E: 1527063332.742019 0000 0000 0
-E: 1527063332.750013 0002 0008 -1
-E: 1527063332.750013 0000 0000 0
-E: 1527063332.758018 0002 0008 -1
-E: 1527063332.758018 0000 0000 0
-E: 1527063332.766006 0002 0008 -1
-E: 1527063332.766006 0000 0000 0
-E: 1527063332.774018 0002 0008 -1
-E: 1527063332.774018 0000 0000 0
-E: 1527063332.782015 0002 0008 -1
-E: 1527063332.782015 0000 0000 0
-E: 1527063332.798011 0002 0008 -1
-E: 1527063332.798011 0000 0000 0
-E: 1527063332.806008 0002 0008 -1
-E: 1527063332.806008 0000 0000 0
-E: 1527063332.950007 0002 0008 1
-E: 1527063332.950007 0000 0000 0
-E: 1527063332.958013 0002 0008 1
-E: 1527063332.958013 0000 0000 0
-E: 1527063332.966070 0002 0008 1
-E: 1527063332.966070 0000 0000 0
-E: 1527063332.974013 0002 0008 1
-E: 1527063332.974013 0000 0000 0
-E: 1527063332.982009 0002 0008 1
-E: 1527063332.982009 0000 0000 0
-E: 1527063332.990010 0002 0008 1
-E: 1527063332.990010 0000 0000 0
-E: 1527063332.998010 0002 0008 1
-E: 1527063332.998010 0000 0000 0
-E: 1527063333.006009 0002 0008 1
-E: 1527063333.006009 0000 0000 0
-E: 1527063333.022065 0002 0008 1
-E: 1527063333.022065 0000 0000 0
-E: 1527063333.030021 0002 0008 1
-E: 1527063333.030021 0000 0000 0
-E: 1527063333.038000 0002 0008 1
-E: 1527063333.038000 0000 0000 0
-E: 1527063333.054041 0002 0008 1
-E: 1527063333.054041 0000 0000 0
-E: 1527063333.158036 0002 0008 -1
-E: 1527063333.158036 0000 0000 0
-E: 1527063333.174003 0002 0008 -1
-E: 1527063333.174003 0000 0000 0
-E: 1527063333.190003 0002 0008 -1
-E: 1527063333.190003 0000 0000 0
-E: 1527063333.205996 0002 0008 -1
-E: 1527063333.205996 0000 0000 0
-E: 1527063333.214007 0002 0008 -1
-E: 1527063333.214007 0000 0000 0
-E: 1527063333.222069 0002 0008 -1
-E: 1527063333.222069 0000 0000 0
-E: 1527063333.230008 0002 0008 -1
-E: 1527063333.230008 0000 0000 0
-E: 1527063333.238065 0002 0008 -1
-E: 1527063333.238065 0000 0000 0
-E: 1527063333.246008 0002 0008 -1
-E: 1527063333.246008 0000 0000 0
-E: 1527063333.254072 0002 0008 -1
-E: 1527063333.254072 0000 0000 0
-E: 1527063333.270065 0002 0008 -1
-E: 1527063333.270065 0000 0000 0
-E: 1527063333.278051 0002 0008 -1
-E: 1527063333.278051 0000 0000 0
-E: 1527063333.414061 0002 0008 1
-E: 1527063333.414061 0000 0000 0
-E: 1527063333.422056 0002 0008 1
-E: 1527063333.422056 0000 0000 0
-E: 1527063333.438001 0002 0008 1
-E: 1527063333.438001 0000 0000 0
-E: 1527063333.446036 0002 0008 1
-E: 1527063333.446036 0000 0000 0
-E: 1527063333.454065 0002 0008 1
-E: 1527063333.454065 0000 0000 0
-E: 1527063333.462004 0002 0008 1
-E: 1527063333.462004 0000 0000 0
-E: 1527063333.470063 0002 0008 1
-E: 1527063333.470063 0000 0000 0
-E: 1527063333.478001 0002 0008 1
-E: 1527063333.478001 0000 0000 0
-E: 1527063333.486002 0002 0008 1
-E: 1527063333.486002 0000 0000 0
-E: 1527063333.494002 0002 0008 1
-E: 1527063333.494002 0000 0000 0
-E: 1527063333.502001 0002 0008 1
-E: 1527063333.502001 0000 0000 0
-E: 1527063333.518025 0002 0008 1
-E: 1527063333.518025 0000 0000 0
-E: 1527063333.534027 0002 0008 1
-E: 1527063333.534027 0000 0000 0
-E: 1527063333.605998 0002 0008 -1
-E: 1527063333.605998 0000 0000 0
-E: 1527063333.622009 0002 0008 -1
-E: 1527063333.622009 0000 0000 0
-E: 1527063333.638015 0002 0008 -1
-E: 1527063333.638015 0000 0000 0
-E: 1527063333.646007 0002 0008 -1
-E: 1527063333.646007 0000 0000 0
-E: 1527063333.661997 0002 0008 -1
-E: 1527063333.661997 0000 0000 0
-E: 1527063333.670034 0002 0008 -1
-E: 1527063333.670034 0000 0000 0
-E: 1527063333.678002 0002 0008 -1
-E: 1527063333.678002 0000 0000 0
-E: 1527063333.686066 0002 0008 -1
-E: 1527063333.686066 0000 0000 0
-E: 1527063333.694006 0002 0008 -1
-E: 1527063333.694006 0000 0000 0
-E: 1527063333.702032 0002 0008 -1
-E: 1527063333.702032 0000 0000 0
-E: 1527063333.710001 0002 0008 -1
-E: 1527063333.710001 0000 0000 0
-E: 1527063333.718001 0002 0008 -1
-E: 1527063333.718001 0000 0000 0
-E: 1527063334.054079 0002 0000 2
-E: 1527063334.054079 0002 0001 -1
-E: 1527063334.054079 0000 0000 0
-E: 1527063334.062167 0002 0000 3
-E: 1527063334.062167 0002 0001 -1
-E: 1527063334.062167 0000 0000 0
-E: 1527063334.070023 0002 0000 3
-E: 1527063334.070023 0002 0001 -1
-E: 1527063334.070023 0000 0000 0
-E: 1527063334.078032 0002 0000 3
-E: 1527063334.078032 0002 0001 -2
-E: 1527063334.078032 0000 0000 0
-E: 1527063334.086019 0002 0000 5
-E: 1527063334.086019 0002 0001 -1
-E: 1527063334.086019 0000 0000 0
-E: 1527063334.094057 0002 0000 3
-E: 1527063334.094057 0002 0001 -1
-E: 1527063334.094057 0000 0000 0
-E: 1527063334.102056 0002 0000 5
-E: 1527063334.102056 0002 0001 -3
-E: 1527063334.102056 0000 0000 0
-E: 1527063334.110060 0002 0000 4
-E: 1527063334.110060 0002 0001 -1
-E: 1527063334.110060 0000 0000 0
-E: 1527063334.118065 0002 0000 4
-E: 1527063334.118065 0002 0001 -2
-E: 1527063334.118065 0000 0000 0
-E: 1527063334.126059 0002 0000 3
-E: 1527063334.126059 0002 0001 -1
-E: 1527063334.126059 0000 0000 0
-E: 1527063334.134162 0002 0000 2
-E: 1527063334.134162 0002 0001 -1
-E: 1527063334.134162 0000 0000 0
-E: 1527063334.142063 0002 0000 1
-E: 1527063334.142063 0002 0001 -1
-E: 1527063334.142063 0000 0000 0
-E: 1527063334.214126 0002 0000 -1
-E: 1527063334.214126 0000 0000 0
-E: 1527063334.254106 0002 0000 -1
-E: 1527063334.254106 0002 0001 1
-E: 1527063334.254106 0000 0000 0
-E: 1527063334.278035 0002 0000 -1
-E: 1527063334.278035 0000 0000 0
-E: 1527063334.286150 0002 0000 -1
-E: 1527063334.286150 0002 0001 1
-E: 1527063334.286150 0000 0000 0
-E: 1527063334.302128 0002 0000 -1
-E: 1527063334.302128 0000 0000 0
-E: 1527063334.310022 0002 0000 -1
-E: 1527063334.310022 0002 0001 1
-E: 1527063334.310022 0000 0000 0
-E: 1527063334.325993 0002 0000 -1
-E: 1527063334.325993 0000 0000 0
-E: 1527063334.334014 0002 0000 -1
-E: 1527063334.334014 0000 0000 0
-E: 1527063334.350054 0002 0001 1
-E: 1527063334.350054 0000 0000 0
-E: 1527063334.374116 0002 0000 -1
-E: 1527063334.374116 0000 0000 0
-E: 1527063334.462160 0002 0000 -1
-E: 1527063334.462160 0000 0000 0
-E: 1527063334.502153 0002 0001 1
-E: 1527063334.502153 0000 0000 0
-E: 1527063334.518033 0002 0000 -1
-E: 1527063334.518033 0000 0000 0
-E: 1527063334.534144 0002 0000 -1
-E: 1527063334.534144 0000 0000 0
-E: 1527063334.542027 0002 0000 -1
-E: 1527063334.542027 0000 0000 0
-E: 1527063334.558020 0002 0001 1
-E: 1527063334.558020 0000 0000 0
-E: 1527063334.565999 0002 0000 -1
-E: 1527063334.565999 0000 0000 0
-E: 1527063334.581983 0002 0000 -1
-E: 1527063334.581983 0000 0000 0
-E: 1527063334.614035 0002 0001 1
-E: 1527063334.614035 0000 0000 0
-E: 1527063334.702164 0002 0000 -1
-E: 1527063334.702164 0000 0000 0
-E: 1527063335.069999 0002 0000 -1
-E: 1527063335.069999 0000 0000 0
-E: 1527063336.510005 0002 0000 -1
-E: 1527063336.510005 0000 0000 0
\ No newline at end of file
diff --git a/client/site_tests/platform_MouseScrollTest/platform_MouseScrollTest.py b/client/site_tests/platform_MouseScrollTest/platform_MouseScrollTest.py
deleted file mode 100644
index 12b0107..0000000
--- a/client/site_tests/platform_MouseScrollTest/platform_MouseScrollTest.py
+++ /dev/null
@@ -1,81 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import functools
-import logging
-import numpy
-import os
-import time
-
-from autotest_lib.client.bin import fps_meter
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.cros import touch_playback_test_base
-
-import py_utils
-
-""" The tracing file that contains the desired mouse scrolling events. """
-_PLAYBACK_FILE = 'mouse_event'
-
-""" Description of the fake mouse we add to the system. """
-_MOUSE_DESCRIPTION = 'mouse.prop'
-
-""" List of URLs that will be used to test users gestures on. """
-_LIST_OF_URLS = ['https://www.youtube.com', 'https://www.cnn.com',
-    'https://slashdot.org/']
-
-""" Separator used in fps_meter for each VSync """
-_SEPARATOR = ' '
-
-class platform_MouseScrollTest(
-    touch_playback_test_base.touch_playback_test_base):
-    """Fast scroll up and down with mouse pressure test."""
-    version = 1
-
-    def _play_events(self, event_filename):
-        """
-        Simulate mouse events for user scrolling.
-
-        @param event_filename string string file name containing the events
-        to play pack.
-        """
-        file_path = os.path.join(self.bindir, event_filename)
-        self._blocking_playback(str(file_path), touch_type='mouse')
-
-    def run_once(self):
-        """ Runs the test once. """
-        mouse_file = os.path.join(self.bindir, _MOUSE_DESCRIPTION)
-        self._emulate_mouse(property_file=mouse_file)
-
-        def record_fps_info(fps_data, fps_info):
-            """ record the fps info from |fps_meter| """
-            frame_info, frame_times = fps_info
-            frame_info_str = ''.join(frame_info)
-            fps_count = sum(
-                map(int, frame_info_str.replace(_SEPARATOR, "")))
-            fps_data.append(fps_count)
-
-        fps_data = []
-        fps = fps_meter.FPSMeter(functools.partial(record_fps_info, fps_data))
-        with chrome.Chrome(init_network_controller=True) as cr:
-            for url in _LIST_OF_URLS:
-                tab = cr.browser.tabs.New()
-                tab.Navigate(url)
-                try:
-                    tab.WaitForDocumentReadyStateToBeComplete(timeout=15)
-                except py_utils.TimeoutException:
-                    logging.warning('Time out during loading url ' + url)
-
-                tab.Activate()
-                cr.browser.platform.SetHTTPServerDirectories(self.bindir)
-                fps.start()
-                self._play_events(_PLAYBACK_FILE)
-                fps.stop()
-                time.sleep(1)
-
-            value = getattr(numpy, 'mean')(fps_data)
-
-            self.output_perf_value(description='fps average',
-                                   value=value,
-                                   units='frames per second',
-                                   higher_is_better=True)
diff --git a/client/site_tests/platform_Nvram/control b/client/site_tests/platform_Nvram/control
deleted file mode 100644
index c04ecd1..0000000
--- a/client/site_tests/platform_Nvram/control
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-DOC = "Verifies "
-NAME = "platform_Nvram"
-PURPOSE = "Verify /dev/nvram exists and is readable."
-CRITERIA = """
-Fail if /dev/nvram does not exist or not readable.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-
-job.run_test('platform_Nvram')
diff --git a/client/site_tests/platform_Nvram/platform_Nvram.py b/client/site_tests/platform_Nvram/platform_Nvram.py
deleted file mode 100755
index d07c19f..0000000
--- a/client/site_tests/platform_Nvram/platform_Nvram.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/python2
-#
-# Copyright (c) 2010 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging, os
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-
-class platform_Nvram(test.test):
-    """
-    Test /dev/nvram
-    """
-    version = 1
-
-    def run_once(self):
-        nvram_path = '/dev/nvram'
-        if not os.path.exists(nvram_path):
-            raise error.TestFail('%s does not exist.' % nvram_path)
-        if not open(nvram_path, 'rb').read(1):
-            raise error.TestFail('cannot read from %s.' % nvram_path)
diff --git a/client/site_tests/platform_OpenSSLActual/control b/client/site_tests/platform_OpenSSLActual/control
deleted file mode 100644
index 3dea460..0000000
--- a/client/site_tests/platform_OpenSSLActual/control
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-TIME="SHORT"
-AUTHOR = "The Chromium OS Authors (chromium-os-dev@chromium.org)"
-DOC = """
-Tests that OpenSSL (in the default configuration) works.
-"""
-NAME = "platform_OpenSSLActual"
-PURPOSE = "Real-world integration test for openssl"
-CRITERIA = """
-Fail if curl (which uses openssl) fails to actually fetch https://www.google.com
-"""
-TEST_CLASS = "platform"
-TEST_CATEGORY = "Functional"
-TEST_TYPE = "client"
-
-job.run_test("platform_OpenSSLActual")
diff --git a/client/site_tests/platform_OpenSSLActual/platform_OpenSSLActual.py b/client/site_tests/platform_OpenSSLActual/platform_OpenSSLActual.py
deleted file mode 100644
index e428ad96..0000000
--- a/client/site_tests/platform_OpenSSLActual/platform_OpenSSLActual.py
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import subprocess
-import test
-
-from autotest_lib.client.common_lib import utils
-
-class platform_OpenSSLActual(test.test):
-    version = 1
-
-    def curl(self, rest):
-        base = '/usr/bin/curl -sSIo /dev/null'
-        out = utils.system_output('%s %s' % (base, rest))
-        print out
-
-    def run_once(self):
-        self.curl('https://www.google.com')
-        self.curl('--capath /mnt/empty https://www.google.com; [ $? != 0 ]')
diff --git a/client/site_tests/platform_Perf/control b/client/site_tests/platform_Perf/control
deleted file mode 100644
index 5f236bf..0000000
--- a/client/site_tests/platform_Perf/control
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'sque dhsharp rapati'
-NAME = 'platform_Perf'
-PURPOSE = 'Verify that the perf tool works properly.'
-
-CRITERIA = """
-Successfully collect a perf data profile and verify that the contents are well
-formed.
-"""
-TIME = 'SHORT'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'platform'
-TEST_TYPE = 'client'
-JOB_RETRIES = 2
-
-ATTRIBUTES = 'suite:bvt-perbuild'
-
-DOC = """
-Runs 'perf record' and 'perf report'.
-"""
-
-job.run_test('platform_Perf')
diff --git a/client/site_tests/platform_Perf/platform_Perf.py b/client/site_tests/platform_Perf/platform_Perf.py
deleted file mode 100644
index ca860d0..0000000
--- a/client/site_tests/platform_Perf/platform_Perf.py
+++ /dev/null
@@ -1,178 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import errno, os, re, subprocess
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-
-
-class platform_Perf(test.test):
-    """
-    Gathers perf data and makes sure it is well-formed.
-    """
-    version = 1
-
-    # Whitelist of DSOs that are expected to appear in perf data from a CrOS
-    # device. The actual name may change so use regex pattern matching. This is
-    # a list of allowed but not required DSOs. With this list, we can filter out
-    # unknown DSOs that might not have a build ID, e.g. JIT code.
-    _KERNEL_NAME_REGEX = re.compile(r'.*kernel\.kallsyms.*')
-    _DSO_WHITELIST_REGEX = [
-      _KERNEL_NAME_REGEX,
-      re.compile(r'bash'),
-      re.compile(r'chrome'),
-      re.compile(r'ld-.*\.so.*'),
-      # For simplicity since the libbase binaries are built together, we assume
-      # that if one of them (libbase-core) was properly built and passes this
-      # test, then the others will pass as well. It's easier than trying to
-      # include all libbase-* while filtering out libbase-XXXXXX.so, which is a
-      # text file that links to the other files.
-      re.compile(r'libbase-core-.*\.so.*'),
-      re.compile(r'libc-.*\.so.*'),
-      re.compile(r'libdbus-.*\.so.*'),
-      re.compile(r'libpthread-.*\.so.*'),
-      re.compile(r'libstdc\+\+.*\.so\..*'),
-    ]
-
-
-    def run_once(self):
-        """
-        Collect a perf data profile and check the detailed perf report.
-        """
-        keyvals = {}
-        num_errors = 0
-
-        try:
-            # Create temporary file and get its name. Then close it.
-            perf_file_path = os.tempnam()
-
-            # Perf command for recording a profile.
-            perf_record_args = [ 'perf', 'record', '-a', '-o', perf_file_path,
-                                 '--', 'sleep', '2']
-            # Perf command for getting a detailed report.
-            perf_report_args = [ 'perf', 'report', '-D', '-i', perf_file_path ]
-            # Perf command for getting a report grouped by DSO name.
-            perf_report_dso_args = [ 'perf', 'report', '--sort', 'dso', '-i',
-                                     perf_file_path ]
-            # Perf command for getting a list of all build IDs in a data file.
-            perf_buildid_list_args = [ 'perf', 'buildid-list', '-i',
-                                       perf_file_path ]
-
-            try:
-                subprocess.check_output(perf_record_args,
-                                        stderr=subprocess.STDOUT)
-            except subprocess.CalledProcessError as cmd_error:
-                raise error.TestFail("Running command [%s] failed: %s" %
-                                     (' '.join(perf_record_args),
-                                      cmd_error.output))
-
-            # Make sure the file still exists.
-            if not os.path.isfile(perf_file_path):
-                raise error.TestFail('Could not find perf output file: ' +
-                                     perf_file_path)
-
-            # Get detailed perf data view and extract the line containing the
-            # kernel MMAP summary.
-            kernel_mapping = None
-            p = subprocess.Popen(perf_report_args, stdout=subprocess.PIPE)
-            for line in p.stdout:
-                if ('PERF_RECORD_MMAP' in line and
-                    self._KERNEL_NAME_REGEX.match(line)):
-                    kernel_mapping = line
-                    break
-
-            # Read the rest of output to EOF.
-            for _ in p.stdout:
-                pass
-            p.wait();
-
-            # Generate a list of whitelisted DSOs from the perf report.
-            dso_list = []
-            p = subprocess.Popen(perf_report_dso_args, stdout=subprocess.PIPE)
-            for line in p.stdout:
-                # Skip comments.
-                if line.startswith('#'):
-                    continue
-                # The output consists of percentage and DSO name.
-                tokens = line.split()
-                if len(tokens) < 2:
-                    continue
-                # Store the DSO name if it appears in the whitelist.
-                dso_name = tokens[1]
-                for regex in self._DSO_WHITELIST_REGEX:
-                    if regex.match(dso_name):
-                        dso_list += [dso_name]
-
-            p.wait();
-
-            # Generate a mapping of DSOs to their build IDs.
-            dso_to_build_ids = {}
-            p = subprocess.Popen(perf_buildid_list_args, stdout=subprocess.PIPE)
-            for line in p.stdout:
-                # The output consists of build ID and DSO name.
-                tokens = line.split()
-                if len(tokens) < 2:
-                    continue
-                # The build ID list uses the full path of the DSOs, while the
-                # report output usesonly the basename. Store the basename to
-                # make lookups easier.
-                dso_to_build_ids[os.path.basename(tokens[1])] = tokens[0]
-
-            p.wait();
-
-
-        finally:
-            # Delete the perf data file.
-            try:
-                os.remove(perf_file_path)
-            except OSError as e:
-                if e.errno != errno.ENONENT: raise
-
-        if kernel_mapping is None:
-            raise error.TestFail('Could not find kernel mapping in perf '
-                                 'report.')
-        # Get the kernel mapping values.
-        kernel_mapping = kernel_mapping.split(':')[2]
-        start, length, pgoff = re.sub(r'[][()@]', ' ',
-                                      kernel_mapping).strip().split()
-
-        # Check that all whitelisted DSOs from the report have build IDs.
-        kernel_name = None
-        kernel_build_id = None
-        for dso in dso_list:
-            if dso not in dso_to_build_ids:
-                raise error.TestFail('Could not find build ID for %s' % dso)
-            if self._KERNEL_NAME_REGEX.match(dso):
-                kernel_name = dso
-                kernel_build_id = dso_to_build_ids[dso]
-
-        # Make sure the kernel build ID was found.
-        if not kernel_build_id:
-            raise error.TestFail('Could not find kernel entry (containing '
-                                 '"%s") in build ID list' % self._KERNEL_NAME)
-
-        # Write keyvals.
-        keyvals = {}
-        keyvals['start'] = start
-        keyvals['length'] = length
-        keyvals['pgoff'] = pgoff
-        keyvals['kernel_name'] = kernel_name
-        keyvals['kernel_build_id'] = kernel_build_id
-        self.write_perf_keyval(keyvals)
-
-        # Make sure that the kernel mapping values follow an expected pattern,
-        #
-        # Expect one of two patterns:
-        # (1) start == pgoff, e.g.:
-        #   start=0x80008200
-        #   pgoff=0x80008200
-        #   len  =0xfffffff7ff7dff
-        # (2) start < pgoff < start + len, e.g.:
-        #   start=0x3bc00000
-        #   pgoff=0xffffffffbcc00198
-        #   len  =0xffffffff843fffff
-        start = int(start, 0)
-        length = int(length, 0)
-        pgoff = int(pgoff, 0)
-        if not (start == pgoff or start < pgoff < start + length):
-            raise error.TestFail('Improper kernel mapping values!')
diff --git a/client/site_tests/platform_Pkcs11ChangeAuthData/control b/client/site_tests/platform_Pkcs11ChangeAuthData/control
deleted file mode 100644
index 4bfd06f..0000000
--- a/client/site_tests/platform_Pkcs11ChangeAuthData/control
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chromium OS Authors"
-NAME = "platform_Pkcs11ChangeAuthData"
-PURPOSE = "Tests changing authorization data for a PKCS #11 token."
-CRITERIA = "Fails if token does not correctly change authorization data."
-ATTRIBUTES = "suite:experimental"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-
-DOC = """
-This tests changing authorization data for a PKCS #11 token.
-"""
-
-job.run_test('platform_Pkcs11ChangeAuthData')
diff --git a/client/site_tests/platform_Pkcs11ChangeAuthData/platform_Pkcs11ChangeAuthData.py b/client/site_tests/platform_Pkcs11ChangeAuthData/platform_Pkcs11ChangeAuthData.py
deleted file mode 100644
index 2ea292b..0000000
--- a/client/site_tests/platform_Pkcs11ChangeAuthData/platform_Pkcs11ChangeAuthData.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import pkcs11
-
-class platform_Pkcs11ChangeAuthData(test.test):
-    version = 1
-
-    def run_once(self):
-        pkcs11.setup_p11_test_token(True, 'auth1')
-        pkcs11.load_p11_test_token('auth1')
-        utils.system('p11_replay --inject --replay_wifi')
-        # Change auth data while the token is not loaded.
-        pkcs11.unload_p11_test_token()
-        pkcs11.change_p11_test_token_auth_data('auth1', 'auth2')
-        pkcs11.load_p11_test_token('auth2')
-        result = utils.system('p11_replay --replay_wifi', ignore_status=True)
-        if result != 0:
-            raise error.TestFail('Change authorization data failed (1).')
-        # Change auth data while the token is loaded.
-        pkcs11.change_p11_test_token_auth_data('auth2', 'auth3')
-        pkcs11.unload_p11_test_token()
-        pkcs11.load_p11_test_token('auth3')
-        result = utils.system('p11_replay --replay_wifi', ignore_status=True)
-        if result != 0:
-            raise error.TestFail('Change authorization data failed (2).')
-        # Attempt change with incorrect current auth data.
-        pkcs11.unload_p11_test_token()
-        pkcs11.change_p11_test_token_auth_data('bad_auth', 'auth4')
-        pkcs11.load_p11_test_token('auth3')
-        result = utils.system('p11_replay --replay_wifi', ignore_status=True)
-        if result != 0:
-            raise error.TestFail('Change authorization data failed (3).')
-        # Verify old auth data no longer works after change. This also verifies
-        # recovery from bad auth data - expect a functional, empty token.
-        pkcs11.unload_p11_test_token()
-        pkcs11.change_p11_test_token_auth_data('auth3', 'auth5')
-        pkcs11.load_p11_test_token('auth3')
-        result = utils.system('p11_replay --replay_wifi', ignore_status=True)
-        if result == 0:
-            raise error.TestFail('Bad authorization data allowed (1).')
-        utils.system('p11_replay --inject --replay_wifi')
-        pkcs11.unload_p11_test_token()
-        # Token should have been recreated with 'auth3'.
-        pkcs11.load_p11_test_token('auth3')
-        result = utils.system('p11_replay --replay_wifi', ignore_status=True)
-        if result != 0:
-            raise error.TestFail('Token not valid after recovery.')
-        pkcs11.unload_p11_test_token()
-        # Since token was recovered, previous correct auth should be rejected.
-        pkcs11.load_p11_test_token('auth5')
-        result = utils.system('p11_replay --replay_wifi', ignore_status=True)
-        if result == 0:
-            raise error.TestFail('Bad authorization data allowed (2).')
-        pkcs11.unload_p11_test_token()
-        pkcs11.cleanup_p11_test_token()
diff --git a/client/site_tests/platform_Pkcs11Events/control b/client/site_tests/platform_Pkcs11Events/control
deleted file mode 100644
index be6a334..0000000
--- a/client/site_tests/platform_Pkcs11Events/control
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chromium OS Authors"
-NAME = "platform_Pkcs11Events"
-PURPOSE = "Tests the response of the PKCS #11 system to login events."
-CRITERIA = "Fails if the PKCS #11 system does not respond correctly to events."
-ATTRIBUTES = "suite:experimental"
-TIME = "SHORT"
-TEST_CATEGORY = "Stress"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-
-DOC = """
-This tests the response of the PKCS #11 system to login events.
-"""
-
-job.run_test('platform_Pkcs11Events', num_tokens=2, num_events=20)
diff --git a/client/site_tests/platform_Pkcs11Events/control.regression b/client/site_tests/platform_Pkcs11Events/control.regression
deleted file mode 100644
index 03fe3a3..0000000
--- a/client/site_tests/platform_Pkcs11Events/control.regression
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chromium OS Authors"
-NAME = "platform_Pkcs11Events"
-PURPOSE = "Tests the response of the PKCS #11 system to login events."
-CRITERIA = "Fails if the PKCS #11 system does not respond correctly to events."
-ATTRIBUTES = "suite:regression"
-TIME = "SHORT"
-TEST_CATEGORY = "Stress"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-
-DOC = """
-This tests the response of the PKCS #11 system to login events.
-"""
-
-job.run_test('platform_Pkcs11Events', num_tokens=7, num_events=200)
diff --git a/client/site_tests/platform_Pkcs11Events/platform_Pkcs11Events.py b/client/site_tests/platform_Pkcs11Events/platform_Pkcs11Events.py
deleted file mode 100644
index 0edc708..0000000
--- a/client/site_tests/platform_Pkcs11Events/platform_Pkcs11Events.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import random, shutil
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import pkcs11
-
-class platform_Pkcs11Events(test.test):
-    version = 1
-
-    def run_once(self, num_tokens, num_events):
-        # Setup some token directories.
-        token_list = ['/tmp/chaps%d' % x for x in range(num_tokens)]
-        pkcs11.setup_p11_test_token(True)
-        shutil.rmtree('%s/database' % pkcs11.TMP_CHAPS_DIR, ignore_errors=True)
-        for token in token_list:
-            shutil.rmtree(token, ignore_errors=True)
-            pkcs11.copytree_with_ownership(pkcs11.TMP_CHAPS_DIR, token)
-
-        # Setup a key on each token.
-        for token in token_list:
-            utils.system('chaps_client --load --path=%s --auth=%s' %
-                         (token, token))
-            utils.system('p11_replay --inject')
-            utils.system('chaps_client --unload --path=%s' % token)
-
-        # Follow a login by an immediate logout.
-        for token in token_list:
-            utils.system('chaps_client --load --path=%s --auth=%s' %
-                         (token, token))
-        for token in token_list:
-            utils.system('chaps_client --unload --path=%s' % token)
-
-        # Hit the tokens with a bunch of random login / logout events.
-        for i in range(num_events):
-            token = random.choice(token_list)
-            event = random.choice(['login', 'logout'])
-            if event == 'login':
-              utils.system('chaps_client --load --path=%s --auth=%s' %
-                           (token, token))
-              # Note: This won't necessarily test the token we just loaded but
-              # we do know there should be at least one token available.
-              result = utils.system('p11_replay --replay_wifi',
-                                    ignore_status=True)
-              if result != 0:
-                  raise error.TestFail('At least one token is not functional.')
-            else:
-              utils.system('chaps_client --unload --path=%s' % token)
-
-        # See if each token is still functional.
-        for token in token_list:
-            utils.system('chaps_client --unload --path=%s' % token)
-        for token in token_list:
-            utils.system('chaps_client --load --path=%s --auth=%s' %
-                         (token, token))
-            result = utils.system('p11_replay --replay_wifi',
-                                  ignore_status=True)
-            if result != 0:
-                raise error.TestFail('Token is not functional: %s' % token)
-            utils.system('chaps_client --unload --path=%s' % token)
-            shutil.rmtree(token, ignore_errors=True)
-
-        pkcs11.cleanup_p11_test_token()
diff --git a/client/site_tests/platform_Pkcs11InitOnLogin/control b/client/site_tests/platform_Pkcs11InitOnLogin/control
deleted file mode 100644
index 2f18ad1..0000000
--- a/client/site_tests/platform_Pkcs11InitOnLogin/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "dkrahn"
-NAME = "platform_Pkcs11InitOnLogin"
-PURPOSE = "Tests PKCS#11 initialization on login"
-CRITERIA = "Fails if initialization fails during login."
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:bvt-perbuild"
-
-DOC = """
-This tests if initialization of a user PKCS #11 token succeeds during login. It
-also tests whether objects stored in the token persist through to a subsequent
-login.
-"""
-
-job.run_test('platform_Pkcs11InitOnLogin')
diff --git a/client/site_tests/platform_Pkcs11InitOnLogin/platform_Pkcs11InitOnLogin.py b/client/site_tests/platform_Pkcs11InitOnLogin/platform_Pkcs11InitOnLogin.py
deleted file mode 100644
index 1e4464e..0000000
--- a/client/site_tests/platform_Pkcs11InitOnLogin/platform_Pkcs11InitOnLogin.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import time
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.cros import pkcs11
-
-class platform_Pkcs11InitOnLogin(test.test):
-    """This test logs in and verifies that the TPM token is working."""
-    version = 1
-
-    def run_once(self):
-        start_time = time.time()
-        with chrome.Chrome() as cr:
-            if not pkcs11.wait_for_pkcs11_token():
-                raise error.TestFail('The PKCS #11 token is not available.')
-            end_time = time.time()
-            self.write_perf_keyval(
-                { 'seconds_pkcs11_onlogin_init': end_time - start_time } )
-            if not pkcs11.verify_pkcs11_initialized():
-                raise error.TestFail('Initialized token failed checks!')
-            if not pkcs11.inject_and_test_key():
-                raise error.TestFail('Failed to inject a key.')
-        # Login again with the same account.
-        with chrome.Chrome(dont_override_profile=True) as cr:
-            if not pkcs11.wait_for_pkcs11_token():
-                raise error.TestFail(
-                    'The PKCS #11 token is no longer available.')
-            if not pkcs11.test_and_cleanup_key():
-                raise error.TestFail('The PKCS #11 key is no longer valid.')
-
diff --git a/client/site_tests/platform_Pkcs11InitUnderErrors/control b/client/site_tests/platform_Pkcs11InitUnderErrors/control
deleted file mode 100644
index ba48f6b..0000000
--- a/client/site_tests/platform_Pkcs11InitUnderErrors/control
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chromium OS Authors"
-NAME = "platform_Pkcs11InitUnderErrors"
-PURPOSE = "Tests PKCS#11 initialization under various system states."
-CRITERIA = "Fails if initialization fails under any of the initial states."
-ATTRIBUTES = "suite:regression"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-
-DOC = """
-This tests PKCS#11 initialization under various kinds of error conditions -
-missing opencryptoki files, corrupted state information, etc.
-"""
-
-job.run_test('platform_Pkcs11InitUnderErrors')
diff --git a/client/site_tests/platform_Pkcs11InitUnderErrors/platform_Pkcs11InitUnderErrors.py b/client/site_tests/platform_Pkcs11InitUnderErrors/platform_Pkcs11InitUnderErrors.py
deleted file mode 100644
index de19b55..0000000
--- a/client/site_tests/platform_Pkcs11InitUnderErrors/platform_Pkcs11InitUnderErrors.py
+++ /dev/null
@@ -1,50 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging, os, shutil
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import pkcs11
-
-class Pkcs11InitFailure(error.TestError):
-    pass
-
-
-class platform_Pkcs11InitUnderErrors(test.test):
-    version = 1
-
-    def __chaps_init_iteration(self):
-        # Try initializing and using the temporary chaps test token.
-        pkcs11.load_p11_test_token()
-        if not pkcs11.verify_p11_test_token():
-            return False
-        pkcs11.unload_p11_test_token()
-        pkcs11.restore_p11_test_token()
-        return True
-
-    def __test_chaps_init(self):
-        pkcs11.setup_p11_test_token(True)
-        dbpath = pkcs11.get_p11_test_token_db_path()
-        # Make sure the test token is functional.
-        if not self.__chaps_init_iteration():
-            raise error.TestFail('Token verification failed.')
-        # Erase the chaps database directory.
-        shutil.rmtree(dbpath, ignore_errors=True)
-        if not self.__chaps_init_iteration():
-            raise error.TestFail('Token verification failed after erasing the '
-                                 'database directory.')
-        # Corrupt each file in the chaps database directory.
-        for f in os.listdir(dbpath):
-            utils.system('dd if=/dev/zero of=%s bs=1 count=1000 >/dev/null 2>&1'
-                % os.path.join(dbpath, f))
-        if not self.__chaps_init_iteration():
-            raise error.TestFail('Token verification failed after corrupting '
-                                 'the database.')
-        pkcs11.cleanup_p11_test_token()
-
-    def run_once(self):
-        self.__test_chaps_init()
-        return
-
diff --git a/client/site_tests/platform_Pkcs11LiveTest/platform_Pkcs11LiveTest.py b/client/site_tests/platform_Pkcs11LiveTest/platform_Pkcs11LiveTest.py
deleted file mode 100644
index f75e31a..0000000
--- a/client/site_tests/platform_Pkcs11LiveTest/platform_Pkcs11LiveTest.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-SYSTEM_SBIN = '/usr/sbin'
-LIVE_TEST_LIST = ['chapsd_test', 'tpm_utility_test']
-
-class platform_Pkcs11LiveTest(test.test):
-    """
-    This test runs the Chaps Live tests on a device with a TPM.
-
-    Currently we have two test suits that run.
-    1) chapsd_test
-    2) tpm_utility_test
-    """
-
-    version = 1
-
-    def run_once(self):
-        for live_test in LIVE_TEST_LIST:
-            test_path = os.path.join(SYSTEM_SBIN, live_test)
-            exit_status = utils.system(test_path, ignore_status=True)
-            if (exit_status != 0):
-                raise error.TestFail(live_test + " has failures")
diff --git a/client/site_tests/platform_Pkcs11LoadPerf/control b/client/site_tests/platform_Pkcs11LoadPerf/control
deleted file mode 100644
index 3ea0a28..0000000
--- a/client/site_tests/platform_Pkcs11LoadPerf/control
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "dkrahn"
-NAME = "platform_Pkcs11LoadPerf"
-TIME = "SHORT"
-TEST_CATEGORY = "Performance"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-
-DOC = """
-This tests the performance of loading a PKCS #11 token.
-"""
-
-job.run_test('platform_Pkcs11LoadPerf',
-             iterations=3,
-             constraints=['cert_ready_ms < 1500', 'key_ready_ms < 3000'])
diff --git a/client/site_tests/platform_Pkcs11LoadPerf/platform_Pkcs11LoadPerf.py b/client/site_tests/platform_Pkcs11LoadPerf/platform_Pkcs11LoadPerf.py
deleted file mode 100644
index 4027b37..0000000
--- a/client/site_tests/platform_Pkcs11LoadPerf/platform_Pkcs11LoadPerf.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import re
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import pkcs11
-
-class platform_Pkcs11LoadPerf(test.test):
-    """This tests the performance of loading a PKCS #11 token."""
-
-    version = 1
-
-    def run_once(self):
-        pkcs11.setup_p11_test_token(True)
-        pkcs11.load_p11_test_token()
-        # Prepare the token with a key.
-        utils.system('p11_replay --inject')
-        pkcs11.unload_p11_test_token()
-        pkcs11.load_p11_test_token()
-        # List the objects and gather timing data.
-        output = utils.system_output('p11_replay --list_objects')
-        # The output will have multiple lines like 'Elapsed: 25ms'. We are
-        # expecting at least three:
-        # 1) How long it took to open a session.
-        # 2) How long it took to list public objects.
-        # 3) How long it took to list private objects.
-        # The following extracts the numeric value from each timing statement.
-        time_list = [int(match.group(1)) for match in
-            re.finditer(r'Elapsed: (\d+)ms', output, flags=re.MULTILINE)]
-        if len(time_list) < 3:
-            raise error.TestFail('Expected output not found.')
-        self.output_perf_value(description='Key_Ready',
-                               value=(time_list[0] + time_list[1] + time_list[2]),
-                               units='ms', higher_is_better=False)
-        self.write_perf_keyval(
-            {'cert_ready_ms': time_list[0] + time_list[1],
-             'key_ready_ms': time_list[0] + time_list[1] + time_list[2]})
-        pkcs11.cleanup_p11_test_token()
diff --git a/client/site_tests/platform_PrintJob/control b/client/site_tests/platform_PrintJob/control
deleted file mode 100644
index 1341adc..0000000
--- a/client/site_tests/platform_PrintJob/control
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "kalin"
-NAME = "platform_PrintJob"
-TIME = "SHORT"
-TEST_TYPE = "client"
-CRITERIA = """
-The test fails in cases of DUT is:
-- unable to find the printer emulated by chameleon after set in print dialog
-- unable to receive notification for successful print job
-"""
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform, desktopui"
-ATTRIBUTES = "suite:audio_advanced"
-JOB_RETRIES = 1
-DEPENDENCIES = "test_usbaudio, chameleon"
-
-DOC = """
-This is a test that emulates a printer using chameleon board
-connected via USB cable, sets the printer, and sends a print job
-"""
-
-host = next(iter(job.hosts))
-job.run_test('platform_PrintJob', host=host, args=args)
diff --git a/client/site_tests/platform_PrintJob/platform_PrintJob.py b/client/site_tests/platform_PrintJob/platform_PrintJob.py
deleted file mode 100644
index e5368b4..0000000
--- a/client/site_tests/platform_PrintJob/platform_PrintJob.py
+++ /dev/null
@@ -1,95 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import time
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.cros.chameleon import chameleon
-from autotest_lib.client.common_lib import ui_utils_helpers
-
-_CHECK_TIMEOUT = 20
-_PRINTER_NAME = "HP OfficeJet g55"
-_PRINTING_COMPLETE_NOTIF = "Printing complete"
-_PRINTING_NOTIF = "Printing"
-_STEPS_BETWEEN_CONTROLS = 4
-_USB_PRINTER_CONNECTED_NOTIF = "USB printer connected"
-
-_SHORT_WAIT = 2
-
-class platform_PrintJob(test.test):
-    """
-    E2E test - Chrome is brought up, local pdf file open, print dialog open,
-    chameleon gadget driver printer selected and print job executed. The test
-    verifies the print job finished successfully.
-    """
-    version = 1
-
-    def cleanup(self):
-        if hasattr(self, 'browser'):
-            self.browser.close()
-        if self.printer_capture_started:
-            self.usb_printer.StopCapturingPrinterData()
-        if self.printer_connected:
-            self.usb_printer.Unplug()
-
-    def check_notification(self, notification):
-        """Polls for successful print job notification"""
-
-        def find_notification(title=None):
-            notifications = self.cr.get_visible_notifications()
-            if notifications is None:
-                return False
-            for n in notifications:
-                if title in n['title']:
-                    return True
-            return False
-
-        utils.poll_for_condition(
-                condition=lambda: find_notification(notification),
-                desc='Notification %s NOT found' % notification,
-                timeout=_CHECK_TIMEOUT, sleep_interval=_SHORT_WAIT)
-
-    def navigate_to_pdf(self):
-        """Navigate to the pdf page to print"""
-        self.cr.browser.platform.SetHTTPServerDirectories(self.bindir)
-        tab = self.cr.browser.tabs.New()
-        pdf_path = os.path.join(self.bindir, 'to_print.pdf')
-        tab.Navigate(self.cr.browser.platform.http_server.UrlOf(pdf_path))
-        tab.WaitForDocumentReadyStateToBeInteractiveOrBetter(
-                timeout=_CHECK_TIMEOUT);
-        time.sleep(_SHORT_WAIT)
-
-    def run_once(self, host, args):
-        """Run the test."""
-        # Set these to know if the usb_printer needs to be handled post test.
-        self.printer_capture_started = False
-        self.printer_connected = False
-
-        # Make chameleon host known to the DUT host crbug.com/862646
-        chameleon_args = 'chameleon_host=' + host.hostname + '-chameleon'
-        args.append(chameleon_args)
-
-        chameleon_board = chameleon.create_chameleon_board(host.hostname, args)
-        chameleon_board.setup_and_reset(self.outputdir)
-        self.usb_printer = chameleon_board.get_usb_printer()
-        self.usb_printer.SetPrinterModel(1008, 17, _PRINTER_NAME)
-
-        with chrome.Chrome(autotest_ext=True,
-                           init_network_controller=True) as self.cr:
-            self.usb_printer.Plug()
-            self.printer_connected = True
-            self.check_notification(_USB_PRINTER_CONNECTED_NOTIF)
-            logging.info('Chameleon printer connected!')
-            self.navigate_to_pdf()
-            time.sleep(_SHORT_WAIT)
-            logging.info('PDF file opened in browser!')
-            self.ui_helper = ui_utils_helpers.UIPrinterHelper(chrome=self.cr)
-            self.ui_helper.print_to_custom_printer("Chameleon " + _PRINTER_NAME)
-            self.usb_printer.StartCapturingPrinterData()
-            self.printer_capture_started = True
-            self.check_notification(_PRINTING_NOTIF)
-            self.check_notification(_PRINTING_COMPLETE_NOTIF)
diff --git a/client/site_tests/platform_PrintJob/to_print.pdf b/client/site_tests/platform_PrintJob/to_print.pdf
deleted file mode 100644
index 44841ab..0000000
--- a/client/site_tests/platform_PrintJob/to_print.pdf
+++ /dev/null
Binary files differ
diff --git a/client/site_tests/platform_PrinterPpds/archiver.py b/client/site_tests/platform_PrinterPpds/archiver.py
index abf6cc2..5aa9756 100644
--- a/client/site_tests/platform_PrinterPpds/archiver.py
+++ b/client/site_tests/platform_PrinterPpds/archiver.py
@@ -96,7 +96,7 @@
                 max_prefixes_per_archive)
         self._filenames_prefixes = dict()
         prefixes.reverse()
-        for ap, fc in sorted(self._archives_names.iteritems()):
+        for ap, fc in sorted(self._archives_names.items()):
             self._archives_names[ap] = [fc, []]
             while fc > 0:
                 self._filenames_prefixes[prefixes.pop()] = [ap, set()]
@@ -110,7 +110,7 @@
         @param prefix: prefix of filename that the new file will be saved with
         @param name: the rest of the filename of the new file; in summary, the
                 resultant filename of the new file will be prefix+name
-        @param content: a content of the file
+        @param content: a content of the file (bytes)
         @param apply_gzip: if true, the added file will be gzipped, the suffix
                 .gz will be added to its resultant filename
 
diff --git a/client/site_tests/platform_PrinterPpds/configurator.py b/client/site_tests/platform_PrinterPpds/configurator.py
index 9387a22..6cf7c3e 100644
--- a/client/site_tests/platform_PrinterPpds/configurator.py
+++ b/client/site_tests/platform_PrinterPpds/configurator.py
@@ -65,8 +65,9 @@
         # a line with 'LogLevel' option
         lines = self._run_as_root(["cat", _CUPS_CONF_FILE]).splitlines()
 
+        line_no = None
         for index, line in enumerate(lines):
-            if line.startswith('LogLevel'):
+            if line.startswith(b'LogLevel'):
                 line_no = index
                 break
         if line_no is None:
diff --git a/client/site_tests/platform_PrinterPpds/control.core_dump b/client/site_tests/platform_PrinterPpds/control.core_dump
index bf6f261..db92670 100644
--- a/client/site_tests/platform_PrinterPpds/control.core_dump
+++ b/client/site_tests/platform_PrinterPpds/control.core_dump
@@ -13,6 +13,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test runs the following procedure for PPD files from ppds_core directory:
diff --git a/client/site_tests/platform_PrinterPpds/control.core_dump_debug b/client/site_tests/platform_PrinterPpds/control.core_dump_debug
index d3e6e68..8ba421f 100644
--- a/client/site_tests/platform_PrinterPpds/control.core_dump_debug
+++ b/client/site_tests/platform_PrinterPpds/control.core_dump_debug
@@ -13,6 +13,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test runs the following procedure for PPD files from ppds_core directory:
diff --git a/client/site_tests/platform_PrinterPpds/control.core_test b/client/site_tests/platform_PrinterPpds/control.core_test
index b8994a7..b9d99e6 100644
--- a/client/site_tests/platform_PrinterPpds/control.core_test
+++ b/client/site_tests/platform_PrinterPpds/control.core_test
@@ -16,6 +16,7 @@
 TEST_CLASS = "platform"
 ATTRIBUTES = "suite:bvt-perbuild"
 JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test runs the following procedure for PPD files from ppds_core directory:
diff --git a/client/site_tests/platform_PrinterPpds/control.core_test_debug b/client/site_tests/platform_PrinterPpds/control.core_test_debug
index fda2b4f..a8224fe 100644
--- a/client/site_tests/platform_PrinterPpds/control.core_test_debug
+++ b/client/site_tests/platform_PrinterPpds/control.core_test_debug
@@ -15,6 +15,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test runs the following procedure for PPD files from ppds_core directory:
diff --git a/client/site_tests/platform_PrinterPpds/control.ext_dump b/client/site_tests/platform_PrinterPpds/control.ext_dump
index 32533f0..fb9cb43 100644
--- a/client/site_tests/platform_PrinterPpds/control.ext_dump
+++ b/client/site_tests/platform_PrinterPpds/control.ext_dump
@@ -13,6 +13,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test runs the following procedure for PPD files from ppds_ext directory:
diff --git a/client/site_tests/platform_PrinterPpds/control.ext_dump_debug b/client/site_tests/platform_PrinterPpds/control.ext_dump_debug
index eb63103..91c9423 100644
--- a/client/site_tests/platform_PrinterPpds/control.ext_dump_debug
+++ b/client/site_tests/platform_PrinterPpds/control.ext_dump_debug
@@ -13,6 +13,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test runs the following procedure for PPD files from ppds_ext directory:
diff --git a/client/site_tests/platform_PrinterPpds/control.ext_test b/client/site_tests/platform_PrinterPpds/control.ext_test
index 79e6ff0..32a250a 100644
--- a/client/site_tests/platform_PrinterPpds/control.ext_test
+++ b/client/site_tests/platform_PrinterPpds/control.ext_test
@@ -16,6 +16,7 @@
 TEST_CLASS = "platform"
 ATTRIBUTES = "suite:cups_weekly"
 JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test runs the following procedure for PPD files from ppds_ext directory:
diff --git a/client/site_tests/platform_PrinterPpds/control.ext_test_debug b/client/site_tests/platform_PrinterPpds/control.ext_test_debug
index 64f9765..dd3322e 100644
--- a/client/site_tests/platform_PrinterPpds/control.ext_test_debug
+++ b/client/site_tests/platform_PrinterPpds/control.ext_test_debug
@@ -15,6 +15,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test runs the following procedure for PPD files from ppds_ext directory:
diff --git a/client/site_tests/platform_PrinterPpds/digests/annotation_highlight_square_with_ap.pdf.digests b/client/site_tests/platform_PrinterPpds/digests/annotation_highlight_square_with_ap.pdf.digests
index 9dfe2b3..b3eb48f 100644
--- a/client/site_tests/platform_PrinterPpds/digests/annotation_highlight_square_with_ap.pdf.digests
+++ b/client/site_tests/platform_PrinterPpds/digests/annotation_highlight_square_with_ap.pdf.digests
@@ -1,32 +1,8 @@
-brother-20191213-DCP7180DN.ppd.gz	039cde73ca628a83f22a90183bb70979	25774
-brother-20191213-DCP7195DW.ppd.gz	039cde73ca628a83f22a90183bb70979	25774
-brother-20191213-DCPL5500DN.ppd.gz	039cde73ca628a83f22a90183bb70979	25774
-brother-20191213-DCPL6600DW.ppd.gz	039cde73ca628a83f22a90183bb70979	25774
-brother-20191213-HL5595DNH.ppd.gz	039cde73ca628a83f22a90183bb70979	25774
-brother-20200131-DCP7025.ppd.gz	039cde73ca628a83f22a90183bb70979	25754
-brother-20200131-DCP8080DN.ppd.gz	039cde73ca628a83f22a90183bb70979	25774
-brother-20200615-DCP9030CDN.ppd.gz	75232ffdd097feaa4c4987a8dca5c5d2	4219402
-brother-20200615-DCPL8410CDW.ppd.gz	75232ffdd097feaa4c4987a8dca5c5d2	4219402
-brother-20200615-HLL3210CW.ppd.gz	75232ffdd097feaa4c4987a8dca5c5d2	4219402
-brother-20200615-MFC9350CDW.ppd.gz	75232ffdd097feaa4c4987a8dca5c5d2	4219402
-brother-20201006-DCP7080-cups-en.ppd.gz	9de2bf30c53b8e513b6e644e37ee18db	20118
-brother-20201006-DCP7080D-cups-en.ppd.gz	42e94a9f01e13f1054796173a3d38c3c	20118
-brother-20201006-DCP7090-cups-en.ppd.gz	42e94a9f01e13f1054796173a3d38c3c	20118
-brother-20201006-DCP7090DW-cups-en.ppd.gz	47f9da0260dee7b6f7422ad88d98c661	20118
-brother-20201006-HL2290-cups-en.ppd.gz	47f9da0260dee7b6f7422ad88d98c661	20118
-brother-20201006-HL2295D-cups-en.ppd.gz	14c55d5f59a3ac00b58192667dc7b10a	20118
-brother-20201006-MFCL2685DW-cups-en.ppd.gz	290ad53a641e1b2dee3bd7bdf00b2592	20118
-cups-20170101-Generic-PDF_Printer-PDF.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13160
-dymo-20200714-lm280.ppd.gz	6f4b9652036a47137f20eb71408d3a94	2156
-dymo-20200714-lm400.ppd.gz	6f4b9652036a47137f20eb71408d3a94	2156
-dymo-20200714-lm420p.ppd.gz	543303859e81fb96a1965d81035510ba	2158
-dymo-20200714-lm450.ppd.gz	af031493c09f141578ccd858a637b452	2160
-dymo-20200714-lm500ts.ppd.gz	d10f465460f92419ca2ba4eba7055067	3610
-dymo-20200714-lmpc2.ppd.gz	96030bba7a4b6efa39e9a719549869a2	2158
-dymo-20200714-lmpc.ppd.gz	8b04d63ca9b329cae57189749ccaec79	2154
-dymo-20200714-lmpnp.ppd.gz	cded90ff56eb2756fbd4abf7246d4210	2150
-dymo-20200714-lmpnpw.ppd.gz	d8a3dbb9b42b04f7a2e2a9fdfa08f69f	3476
-dymo-20200714-lp350.ppd.gz	8b04d63ca9b329cae57189749ccaec79	2154
+brother-20200615-DCP9030CDN.ppd.gz	211cd12474702d78aaa3ba38aab7f2fa	4219407
+brother-20200615-DCPL8410CDW.ppd.gz	211cd12474702d78aaa3ba38aab7f2fa	4219407
+brother-20200615-HLL3210CW.ppd.gz	211cd12474702d78aaa3ba38aab7f2fa	4219407
+brother-20200615-MFC9350CDW.ppd.gz	211cd12474702d78aaa3ba38aab7f2fa	4219407
+cups-20170101-Generic-PDF_Printer-PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13156
 dymo-20200714-lw300.ppd.gz	4402250764b2bac7f1aa966829408c07	2107
 dymo-20200714-lw315.ppd.gz	4402250764b2bac7f1aa966829408c07	2107
 dymo-20200714-lw320.ppd.gz	08d8ddbcfac33146561595e013b30859	13189
@@ -34,15 +10,12 @@
 dymo-20200714-lw330t.ppd.gz	08d8ddbcfac33146561595e013b30859	13189
 dymo-20200714-lw400.ppd.gz	6d4f510d1db714d0822a0e2c52638022	13191
 dymo-20200714-lw400t.ppd.gz	6d4f510d1db714d0822a0e2c52638022	13191
-dymo-20200714-lw450dl.ppd.gz	6d4f510d1db714d0822a0e2c52638022	13191
-dymo-20200714-lw450dt.ppd.gz	7c4cd5def81f2ce2908823be55a6540a	2160
 dymo-20200714-lw450.ppd.gz	6d4f510d1db714d0822a0e2c52638022	13191
+dymo-20200714-lw450dl.ppd.gz	6d4f510d1db714d0822a0e2c52638022	13191
 dymo-20200714-lw450t.ppd.gz	6d4f510d1db714d0822a0e2c52638022	13191
 dymo-20200714-lw450tt.ppd.gz	47f0fd586ab93f5e44fd0df38785d255	13197
 dymo-20200714-lw4xl.ppd.gz	1b483942f48c7dfc2d81c206ebf4eeea	34195
 dymo-20200714-lwduol.ppd.gz	6d4f510d1db714d0822a0e2c52638022	13191
-dymo-20200714-lwduot2.ppd.gz	7c4cd5def81f2ce2908823be55a6540a	2160
-dymo-20200714-lwduot.ppd.gz	2b74c9dba137b9409b94ba82159d17d3	2156
 dymo-20200714-lwtt.ppd.gz	47f0fd586ab93f5e44fd0df38785d255	13197
 dymo-20200714-se450.ppd.gz	b1baf4b08604e58e153c012b7beecaa4	6500
 epson-20170125-Epson-L380_Series-epson-escpr-en-1.6.10.ppd.gz	bfea9c85d43bf8e2c598c14d64cef9c8	88876
@@ -115,9 +88,9 @@
 epson-20200615-1_6_41-Epson-L805_Series-epson-escpr-en.ppd.gz	bfea9c85d43bf8e2c598c14d64cef9c8	88876
 epson-20200615-1_6_41-Epson-L810_Series-epson-escpr-en.ppd.gz	bfea9c85d43bf8e2c598c14d64cef9c8	88876
 epson-20200615-1_6_41-Epson-M200_Series-epson-escpr-en.ppd.gz	ba3f6e277605e1edc620fe86bbf5be97	88876
-epson-20200615-1_6_41-Epson-ME_200-epson-escpr-en.ppd.gz	db58c154f635543ad583fcae8eda95ae	86976
 epson-20200615-1_6_41-Epson-ME-301_Series-epson-escpr-en.ppd.gz	db58c154f635543ad583fcae8eda95ae	86976
 epson-20200615-1_6_41-Epson-ME-400_Series-epson-escpr-en.ppd.gz	bfea9c85d43bf8e2c598c14d64cef9c8	88876
+epson-20200615-1_6_41-Epson-ME_200-epson-escpr-en.ppd.gz	db58c154f635543ad583fcae8eda95ae	86976
 epson-20200615-1_6_41-Epson-ME_OFFICE_510-epson-escpr-en.ppd.gz	db58c154f635543ad583fcae8eda95ae	86976
 epson-20200615-1_6_41-Epson-ME_OFFICE_530-epson-escpr-en.ppd.gz	db58c154f635543ad583fcae8eda95ae	86976
 epson-20200615-1_6_41-Epson-ME_OFFICE_570-epson-escpr-en.ppd.gz	bfea9c85d43bf8e2c598c14d64cef9c8	88876
@@ -127,11 +100,6 @@
 epson-20200615-1_6_41-Epson-PF-70_Series-epson-escpr-en.ppd.gz	5bd55f263600acfc6e997a25838eaf21	22722
 epson-20200615-1_6_41-Epson-PF-71_Series-epson-escpr-en.ppd.gz	5bd55f263600acfc6e997a25838eaf21	22722
 epson-20200615-1_6_41-Epson-PF-81_Series-epson-escpr-en.ppd.gz	fc495499a504ae19bb4c6b3b3cbddc83	22722
-epson-20200615-1_6_41-Epson-PictureMate_500-epson-escpr-en.ppd.gz	cd84735a3de6546622453a549264bba7	30933
-epson-20200615-1_6_41-Epson-PictureMate_PM_200-epson-escpr-en.ppd.gz	87035f87ba158b4bef862b1029d4efc2	30933
-epson-20200615-1_6_41-Epson-PictureMate_PM_240-epson-escpr-en.ppd.gz	87035f87ba158b4bef862b1029d4efc2	30933
-epson-20200615-1_6_41-Epson-PictureMate_PM_270-epson-escpr-en.ppd.gz	cd84735a3de6546622453a549264bba7	30933
-epson-20200615-1_6_41-Epson-PictureMate_PM_300-epson-escpr-en.ppd.gz	cd84735a3de6546622453a549264bba7	30933
 epson-20200615-1_6_41-Epson-PM-400_Series-epson-escpr-en.ppd.gz	fc495499a504ae19bb4c6b3b3cbddc83	22722
 epson-20200615-1_6_41-Epson-PM-A750-epson-escpr-en.ppd.gz	db58c154f635543ad583fcae8eda95ae	86976
 epson-20200615-1_6_41-Epson-PM-A820-epson-escpr-en.ppd.gz	db58c154f635543ad583fcae8eda95ae	86976
@@ -172,6 +140,11 @@
 epson-20200615-1_6_41-Epson-PX-M860F-epson-escpr-en.ppd.gz	bfea9c85d43bf8e2c598c14d64cef9c8	88876
 epson-20200615-1_6_41-Epson-PX-S05_Series-epson-escpr-en.ppd.gz	2448a4532be837e31a36c09dedfacdf2	88924
 epson-20200615-1_6_41-Epson-PX-S06_Series-epson-escpr-en.ppd.gz	2448a4532be837e31a36c09dedfacdf2	88924
+epson-20200615-1_6_41-Epson-PictureMate_500-epson-escpr-en.ppd.gz	cd84735a3de6546622453a549264bba7	30933
+epson-20200615-1_6_41-Epson-PictureMate_PM_200-epson-escpr-en.ppd.gz	87035f87ba158b4bef862b1029d4efc2	30933
+epson-20200615-1_6_41-Epson-PictureMate_PM_240-epson-escpr-en.ppd.gz	87035f87ba158b4bef862b1029d4efc2	30933
+epson-20200615-1_6_41-Epson-PictureMate_PM_270-epson-escpr-en.ppd.gz	cd84735a3de6546622453a549264bba7	30933
+epson-20200615-1_6_41-Epson-PictureMate_PM_300-epson-escpr-en.ppd.gz	cd84735a3de6546622453a549264bba7	30933
 epson-20200615-1_6_41-Epson-Stylus_CX3700-epson-escpr-en.ppd.gz	db58c154f635543ad583fcae8eda95ae	86976
 epson-20200615-1_6_41-Epson-Stylus_CX3800-epson-escpr-en.ppd.gz	28c5558643ac14c72294bf52c0e5f6d3	83603
 epson-20200615-1_6_41-Epson-Stylus_CX4200-epson-escpr-en.ppd.gz	28c5558643ac14c72294bf52c0e5f6d3	83603
@@ -218,13 +191,13 @@
 epson-20200615-1_6_41-Epson-WF-7610_Series-epson-escpr-en.ppd.gz	8a01c80289c25cb0e1e22af8317cc64c	88876
 epson-20200615-1_6_41-Epson-WF-8010_Series-epson-escpr-en.ppd.gz	bfea9c85d43bf8e2c598c14d64cef9c8	88876
 epson-20200615-1_6_41-Epson-WF-M5190_Series-epson-escpr-en.ppd.gz	ba3f6e277605e1edc620fe86bbf5be97	88876
-epson-20200615-1_6_41-Epson-WorkForce_320-epson-escpr-en.ppd.gz	bfea9c85d43bf8e2c598c14d64cef9c8	88876
-epson-20200615-1_6_41-Epson-WorkForce_600-epson-escpr-en.ppd.gz	bfea9c85d43bf8e2c598c14d64cef9c8	88876
-epson-20200615-1_6_41-Epson-WorkForce_610-epson-escpr-en.ppd.gz	bfea9c85d43bf8e2c598c14d64cef9c8	88876
 epson-20200615-1_6_41-Epson-WP-4010_Series-epson-escpr-en.ppd.gz	aff4d623f68683b37e52b440c29a6870	62369
 epson-20200615-1_6_41-Epson-WP-4011_Series-epson-escpr-en.ppd.gz	5263829323ea08ac0c3fc77fb52b24da	64737
 epson-20200615-1_6_41-Epson-WP-4020_Series-epson-escpr-en.ppd.gz	acdf3b829a15fad5b9b6f0d48e0898ad	66152
 epson-20200615-1_6_41-Epson-WP-M4011_Series-epson-escpr-en.ppd.gz	bfea9c85d43bf8e2c598c14d64cef9c8	88876
+epson-20200615-1_6_41-Epson-WorkForce_320-epson-escpr-en.ppd.gz	bfea9c85d43bf8e2c598c14d64cef9c8	88876
+epson-20200615-1_6_41-Epson-WorkForce_600-epson-escpr-en.ppd.gz	bfea9c85d43bf8e2c598c14d64cef9c8	88876
+epson-20200615-1_6_41-Epson-WorkForce_610-epson-escpr-en.ppd.gz	bfea9c85d43bf8e2c598c14d64cef9c8	88876
 epson-20200615-1_6_41-Epson-XP-201_204_208_Series-epson-escpr-en.ppd.gz	bfea9c85d43bf8e2c598c14d64cef9c8	88876
 epson-20200615-1_6_41-Epson-XP-2100_Series-epson-escpr-en.ppd.gz	bfea9c85d43bf8e2c598c14d64cef9c8	88876
 epson-20200615-1_6_41-Epson-XP-211_214_216_Series-epson-escpr-en.ppd.gz	28c5558643ac14c72294bf52c0e5f6d3	83603
@@ -251,1206 +224,366 @@
 epson-20200615-1_6_41-Epson-XP-950_Series-epson-escpr-en.ppd.gz	bfea9c85d43bf8e2c598c14d64cef9c8	88876
 epson-20200615-1_6_41-Epson-XP-960_Series-epson-escpr-en.ppd.gz	bfea9c85d43bf8e2c598c14d64cef9c8	88876
 epson-20200615-EPSON_EW-052A_Series.ppd.gz	28c5558643ac14c72294bf52c0e5f6d3	83603
-epson-20200615-Epson-LX-10000FK_Series_PS3.ppd.gz	30cca9eceb655fbb535c9d2f72e39795	315611
-epson-20200615-Epson-LX-10000F_PS.ppd.gz	6db634bfac5762c1f297cb38e884264a	315611
-epson-20200615-Epson-LX-10010MF_Series_PS3.ppd.gz	5e7f0accb256ec0aca83e3a0d06cec86	226722
-epson-20200615-Epson-WF-C17590_Series_PS3.ppd.gz	6628b750134b888ba908aaa516e399f2	315611
-epson-20200615-Epson-WF-C20590_PS.ppd.gz	a26d133a3aa8bbcaab750d3a28a8c111	315611
-epson-20200615-Epson-WF-M20590_Series_PS3.ppd.gz	21667001f832fd802a54c31318eaa01c	226722
-foomatic-20170101-Samsung-M332x_382x_402x-Postscript.ppd.gz	53736a0f137ccd74baf2012408b07fa6	226050
-foomatic-20190909-Ricoh-IM_430F-PostscriptMono-Ricoh.ppd.gz	d0ae8af8c09bd7cda2d2c05f6513cae2	227449
-foomatic-20190909-Ricoh-IM_430_PDF.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13135
-foomatic-20190909-Ricoh-IM_C4500_PDF.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13296
-foomatic-20191029-Apollo-P-1200-pcl3.ppd.gz	a677bcde99d05de988b526ab35bc32a9	66571
-foomatic-20191029-BR5070DN_GPL.ppd.gz	dbf102bd6ef3efbfccea3b445a9fafd8	227148
-foomatic-20191029-Gestetner-Pro_C5200S_PDF.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13320
-foomatic-20191029-Lanier-Pro_8200S_PDF.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13273
-foomatic-20191029-Lanier-Pro_8210_PDF.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13273
-foomatic-20191029-Lanier-Pro_C7200_PDF.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13345
-foomatic-20191029-Lanier-Pro_C7200S_Light_PDF.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13345
-foomatic-20191029-shar208d.ppd.gz	1d846734d902a107ce5d05096ca2acf5	227659
-foomatic-20191029-shar208s.ppd.gz	459aff242b50b5b96e8a2cc2aae48584	227560
+epson-20210521-1_6_41-Epson-PX-S5040-epson-escpr-en.ppd.gz	bfea9c85d43bf8e2c598c14d64cef9c8	88876
+epson-20210703-Epson-L1210_Series-epson-escpr-en.ppd.gz	bfea9c85d43bf8e2c598c14d64cef9c8	88876
+foomatic-20190909-Ricoh-IM_430_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13131
+foomatic-20190909-Ricoh-IM_C4500_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13292
+foomatic-20191029-Gestetner-Pro_C5200S_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13316
+foomatic-20191029-Lanier-Pro_8200S_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13269
+foomatic-20191029-Lanier-Pro_8210_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13269
+foomatic-20191029-Lanier-Pro_C7200S_Light_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13341
+foomatic-20191029-Lanier-Pro_C7200_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13341
 foomatic-20200219-Anitech-M24-epson.ppd.gz	1c3b9d575b30e2a54b0effc4660b99ee	2109
-foomatic-20200219-Apple-12_640ps-Postscript.ppd.gz	b017d467a2fda2bab4ea6dd3eab93e05	226050
-foomatic-20200219-Apple-LaserWriter_IIg-Postscript.ppd.gz	3426a0d12a7cc564d4bf9a1937d300cf	187437
-foomatic-20200219-Apple-LaserWriter_Select_360-ljet2p.ppd.gz	740b4ba05f00836522620334a1366af7	16781
-foomatic-20200219-Brother-DCP-7010-ljet4.ppd.gz	671d685f1cde2a1b312e6c8de2bc70ac	25606
-foomatic-20200219-Brother-DCP-8020-Postscript-Brother.ppd.gz	e2e91bda3e6cf35549384aa40a5a1769	226881
-foomatic-20200219-Brother-DCP-8025D-Postscript-Brother.ppd.gz	30fed33af6f95e344680408d00b2ec62	226993
-foomatic-20200219-Brother-DCP-8040-Postscript-Brother.ppd.gz	de0e10f0ceb297996047bfabbfc502f0	226881
-foomatic-20200219-Brother-DCP-8045D-Postscript-Brother.ppd.gz	8ba9a2559739895a17914e45f4ebf812	226993
-foomatic-20200219-Brother-DCP-9010CN-Postscript-Brother.ppd.gz	104b0fb12f8e8faac1c16c18b016427d	127977412
-foomatic-20200219-Brother-DCP-9040CN-Postscript-Brother.ppd.gz	388f5ff094f3fe484d788144d2a1f90b	127977436
-foomatic-20200219-Brother-DCP-9045CDN-Postscript-Brother.ppd.gz	96b98ce7aee97f81a74369b30a55bc2e	127977622
 foomatic-20200219-Brother-HJ-400-lq850.ppd.gz	5fcfb6b2be65506c5cbd5a0b5d02b8d2	55329
-foomatic-20200219-Brother-HL-1030-hl1250.ppd.gz	1b0b6a18ee3668ad441aa77d811ac1fb	11017
-foomatic-20200219-Brother-HL-1070-ljet4.ppd.gz	671d685f1cde2a1b312e6c8de2bc70ac	25606
-foomatic-20200219-Brother-HL-10V-ljet3.ppd.gz	80e6198833b8d5262b50a5c15bdd2fd4	11140
-foomatic-20200219-Brother-HL-1230-hl1250.ppd.gz	1b0b6a18ee3668ad441aa77d811ac1fb	11017
-foomatic-20200219-Brother-HL-1240-laserjet.ppd.gz	0a3bce50a60a19ee5749d9c4136f93fb	62985
-foomatic-20200219-Brother-HL-1250-ljet4.ppd.gz	fb086a0b27f0349c5b92f80a04ddc644	25690
-foomatic-20200219-Brother-HL-1270N-ljet4.ppd.gz	fb086a0b27f0349c5b92f80a04ddc644	25690
-foomatic-20200219-Brother-HL-1450-Postscript-Brother.ppd.gz	ef235b047c4ef14bec5df6cfbd26f098	226931
-foomatic-20200219-Brother-HL-1650_70N-Postscript-Brother.ppd.gz	fa7d6499be9551ecc5577fbe7af6d715	227240
-foomatic-20200219-Brother-HL-1650-ljet4.ppd.gz	fb086a0b27f0349c5b92f80a04ddc644	25736
-foomatic-20200219-Brother-HL-1660e-ljet4.ppd.gz	fb086a0b27f0349c5b92f80a04ddc644	25736
-foomatic-20200219-Brother-HL-1850_70N-Postscript-Brother.ppd.gz	95c4fc9aadf849459320b14038ca8d8d	227477
-foomatic-20200219-Brother-HL-1850-ljet4.ppd.gz	fb086a0b27f0349c5b92f80a04ddc644	25736
-foomatic-20200219-Brother-HL-2060-ljet4.ppd.gz	fb086a0b27f0349c5b92f80a04ddc644	25690
-foomatic-20200219-Brother-HL-2240D-hl1250.ppd.gz	ec392c72826454c6e45a7e687da3cc88	25877
-foomatic-20200219-Brother-HL-2240-hl1250.ppd.gz	ec392c72826454c6e45a7e687da3cc88	25831
-foomatic-20200219-Brother-HL-2400CeN-pxlcolor.ppd.gz	c8ca478ff81032962259cf3139263b07	4219348
-foomatic-20200219-Brother-HL-2460N-pxlmono.ppd.gz	31f86c4b6f008fd3ff7d22b7c2b69d82	547082
-foomatic-20200219-Brother-HL-2460-Postscript-Brother.ppd.gz	cf963cf1f2a6b5bbf53012ee9a3cb392	227937
-foomatic-20200219-Brother-HL-2600CN-Postscript-Brother.ppd.gz	8583618a0d2e56491e2bc96e7d9c6eeb	127979370
-foomatic-20200219-Brother-HL-2700CN-Postscript-Brother.ppd.gz	9a8ddb7ca602ede3a2da398124013167	127977959
-foomatic-20200219-Brother-HL-3070CW-Postscript-Brother.ppd.gz	33744355c4843f4ec4e316d37a68e419	127977488
-foomatic-20200219-Brother-HL-3260N-Postscript-Brother.ppd.gz	366bc321805fde033a20d3d94c66fa46	227917
-foomatic-20200219-Brother-HL-3450CN-Postscript-Brother.ppd.gz	d038195473c100143ea1daf36f261c74	127979370
-foomatic-20200219-Brother-HL-4050CDN-Postscript-Brother.ppd.gz	905b9df8c402f4f18c1c312f54582c5f	127977698
-foomatic-20200219-Brother-HL-4Ve-laserjet.ppd.gz	21ef470512223260bbbb3af88f81e138	62901
-foomatic-20200219-Brother-HL-5030-ljet4.ppd.gz	fb086a0b27f0349c5b92f80a04ddc644	25690
-foomatic-20200219-Brother-HL-5050-Postscript-Brother.ppd.gz	28079ee1dcab4601f48571e51582600f	227037
-foomatic-20200219-Brother-HL-5140-ljet4.ppd.gz	671d685f1cde2a1b312e6c8de2bc70ac	25606
-foomatic-20200219-Brother-HL-5150D-Postscript-Brother.ppd.gz	b202b7d34e2a051701594a935c9c4880	227149
-foomatic-20200219-Brother-HL-5240-Postscript-Brother.ppd.gz	d6b09381d05d8aee3162e8cafcf6ea3d	227063
-foomatic-20200219-Brother-HL-5250DN-Postscript-Brother.ppd.gz	a1bb11ec884b456e9ae403ad3d47c3e8	227175
-foomatic-20200219-Brother-HL-5270DN-Postscript-Brother.ppd.gz	0faf5811d477fb5cd55862b131f6e3d0	227599
-foomatic-20200219-Brother-HL-6050D_DN-Postscript-Brother.ppd.gz	38122e42e571ce7e9d8be1d0b8366337	227677
-foomatic-20200219-Brother-HL-6050-Postscript-Brother.ppd.gz	168ec612e37da5eddc4dd7d0589ec0b4	227565
-foomatic-20200219-Brother-HL-7050N-pxlmono.ppd.gz	31f86c4b6f008fd3ff7d22b7c2b69d82	547132
-foomatic-20200219-Brother-HL-7050-Postscript-Brother.ppd.gz	a5d7a9ea16558314fb33f3c2d576bd09	228078
-foomatic-20200219-Brother-HL-8050N-Postscript-Brother.ppd.gz	115dfa8e349cb949ddb2b2451a604c11	227895
-foomatic-20200219-Brother-HL-8-ljetplus.ppd.gz	21ef470512223260bbbb3af88f81e138	62901
+foomatic-20200219-Brother-HL-2400CeN-pxlcolor.ppd.gz	65a4243340efda05e7822131db44326f	4219353
+foomatic-20200219-Brother-HL-2460N-pxlmono.ppd.gz	68f1a3f86af51919ba8c3e07022b99eb	547087
+foomatic-20200219-Brother-HL-7050N-pxlmono.ppd.gz	68f1a3f86af51919ba8c3e07022b99eb	547137
 foomatic-20200219-Brother-MC-3000-epson.ppd.gz	1c3b9d575b30e2a54b0effc4660b99ee	2109
-foomatic-20200219-Brother-MFC-7450-Postscript-Brother.ppd.gz	9578b082b2481f848b76ec77594b2952	227375
-foomatic-20200219-Brother-MFC-8220-Postscript-Brother.ppd.gz	dc2672f1dd2979e64c32be9a8f655e85	226881
-foomatic-20200219-Brother-MFC-8440-Postscript-Brother.ppd.gz	009a922c4281ab8d433360b592530a8f	226881
-foomatic-20200219-Brother-MFC-8640D-Postscript-Brother.ppd.gz	4f29dda150bbb4c6a215f5e845711d46	226993
-foomatic-20200219-Brother-MFC-8670DN-Postscript-Brother.ppd.gz	5cade184a528e1f5423868e27f6d0c26	227523
-foomatic-20200219-Brother-MFC-8820D-Postscript-Brother.ppd.gz	e68abf78d4944e09f19867d5c03ad213	226993
 foomatic-20200219-Brother-MFC-9100c-epsonc.ppd.gz	0a4d9cad64852530b24f222cce311780	3555
-foomatic-20200219-Brother-MFC-9420CN-Postscript-Brother.ppd.gz	271aa5b473c11fb76f2bec9c41adbbec	127977211
-foomatic-20200219-Brother-MFC-9440CN-Postscript-Brother.ppd.gz	0107805f691c4801164c9812bc9e51a5	127977510
-foomatic-20200219-Brother-MFC-P2500-hl1250.ppd.gz	1b0b6a18ee3668ad441aa77d811ac1fb	11017
+foomatic-20200219-CItoh-M8510-m8510.ppd.gz	2a86a0e0c3c0b55e0b384a408a850895	17617
 foomatic-20200219-Canon-BJ-100-bj200.ppd.gz	194ffb69bcfdb14f6f6a80f99468bead	52180
 foomatic-20200219-Canon-BJ-10e-bj10e.ppd.gz	614340ec97519e0b105091d21018c4ae	52177
 foomatic-20200219-Canon-BJ-330-bj200.ppd.gz	194ffb69bcfdb14f6f6a80f99468bead	52180
 foomatic-20200219-Canon-BJC-1000-bjc600.ppd.gz	d0660836e80d9b1d4a28cdfffc03c075	114497
-foomatic-20200219-Canon-BJC-2100-bjc610XY.upp.ppd.gz	f5d130849c36e1f7413ea58f15a839e0	104993
 foomatic-20200219-Canon-BJC-210-bjc600.ppd.gz	d0660836e80d9b1d4a28cdfffc03c075	114497
 foomatic-20200219-Canon-BJC-210SP-bj200.ppd.gz	194ffb69bcfdb14f6f6a80f99468bead	52180
-foomatic-20200219-Canon-BJC-250ex-bjc250gs.ppd.gz	7fcfcea7f115ef9f34f11073be1ea0ca	121324
-foomatic-20200219-Canon-BJC-255SP-bjc250gs.ppd.gz	856697af13ae367f16798d70d0bf1a0d	121235
 foomatic-20200219-Canon-BJC-4550-bjc800.ppd.gz	6f832566eb8eb330f042726acd6023c5	114496
-foomatic-20200219-Canon-BJC-610-bjc610XY.upp.ppd.gz	f5d130849c36e1f7413ea58f15a839e0	104993
 foomatic-20200219-Canon-BJC-800-bjc800.ppd.gz	6f832566eb8eb330f042726acd6023c5	114496
-foomatic-20200219-Canon-BJC-8200-bj8XXYYZ.upp.ppd.gz	5e346b586ca40a8266aa429054361585	231885
-foomatic-20200219-Canon-GP_405-Postscript.ppd.gz	317438485aa662ee18e26087962167b5	226050
-foomatic-20200219-Canon-imageRunner_C2570-Postscript.ppd.gz	b315f350690e9d7469b9f11b891f46a3	314674
-foomatic-20200219-Canon-iPR_C600-Postscript-Canon.ppd.gz	9138f95d4f27909e3d553898fd414255	319688
-foomatic-20200219-Canon-iPR_C650_PPD-Postscript-Canon.ppd.gz	343fad3572ea36c89ff899b1316346db	321315
-foomatic-20200219-Canon-iPR_C700_800-Postscript-Canon.ppd.gz	4905420fcdb9c92dee091de35a212540	321321
-foomatic-20200219-Canon-iPR_C750_850_PPD-Postscript-Canon.ppd.gz	e9d66fcdb933a276e75031b0a4a3a3dc	321329
-foomatic-20200219-Canon-iR-ADV_400_500-Postscript-Canon.ppd.gz	aa2ab1c05f388b30cf8a3812a95b1aeb	228751
-foomatic-20200219-Canon-iR-ADV_4025_4035-Postscript-Canon.ppd.gz	6f3463c86bdb9f8ec2a6c28aa7d3794a	229393
-foomatic-20200219-Canon-iR-ADV_4225_4235-Postscript-Canon.ppd.gz	b809f79a5691c97afc81fa457aa06f48	229746
-foomatic-20200219-Canon-iR-ADV_6055_6065-Postscript-Canon.ppd.gz	54eab159bf29a71c8eff6345a3004054	229626
-foomatic-20200219-Canon-iR-ADV_6255_6265-Postscript-Canon.ppd.gz	5ade838bf2b44dd39d814ef48da4aef1	229967
-foomatic-20200219-Canon-iR-ADV_8085_8095-Postscript-Canon.ppd.gz	e452077c26042f9bfdccd5e269f4ec06	229697
-foomatic-20200219-Canon-iR-ADV_8205-Postscript-Canon.ppd.gz	4138042b20f003258187d5cbb1a51b7b	230246
-foomatic-20200219-Canon-iR-ADV_C2020_2030-Postscript-Canon.ppd.gz	0c8b056d49693575c76314bc550e81c1	318694
-foomatic-20200219-Canon-iR-ADV_C2020i_2030i-Postscript-Canon.ppd.gz	bfecc80b0155330aa18070d8b873a981	318690
-foomatic-20200219-Canon-iR-ADV_C2025-Postscript-Canon.ppd.gz	bfecc80b0155330aa18070d8b873a981	318690
-foomatic-20200219-Canon-iR-ADV_C2220_2230-Postscript-Canon.ppd.gz	83278a3b1f6503d53564dc21cddf4f71	319249
-foomatic-20200219-Canon-iR-ADV_C2225-Postscript-Canon.ppd.gz	5810da758256d3b05cb6ce4342990c49	319249
-foomatic-20200219-Canon-iR-ADV_C250_350-Postscript-Canon.ppd.gz	5cf2c3e8d47301b0bf3c9b9f0a08542a	318431
-foomatic-20200219-Canon-iR-ADV_C3320L-Postscript-Canon.ppd.gz	27187fbe29fb4047bdff9f436428bafa	319133
-foomatic-20200219-Canon-iR-ADV_C3320-Postscript-Canon.ppd.gz	5c0e5aa3926e0aba1ef2644dda89648c	319205
-foomatic-20200219-Canon-iR-ADV_C3325_3330-Postscript-Canon.ppd.gz	7ce4988e2a3464c699621d0a6aed1878	319205
-foomatic-20200219-Canon-iR-ADV_C351-Postscript-Canon.ppd.gz	f83f889f97a6d2f4f881bc92905060e2	318360
-foomatic-20200219-Canon-iR-ADV_C5030_5035-Postscript-Canon.ppd.gz	779ee6fb4cda7b9acb0c9aff4b7827f8	320071
-foomatic-20200219-Canon-iR-ADV_C5045_5051-Postscript-Canon.ppd.gz	e97ea516e1b7ac233fd5fdd3194fa417	320071
-foomatic-20200219-Canon-iR-ADV_C5235_5240-Postscript-Canon.ppd.gz	1a91c140893650dad8cc819aae440c41	320137
-foomatic-20200219-Canon-iR-ADV_C5250_5255-Postscript-Canon.ppd.gz	1a91c140893650dad8cc819aae440c41	320137
-foomatic-20200219-Canon-iR-ADV_C7055_7065-Postscript-Canon.ppd.gz	c78be5e423a0a7e7fc228ec2c11a6a36	319877
-foomatic-20200219-Canon-iR-ADV_C7260_7270-Postscript-Canon.ppd.gz	c004e170590564f3d3b44ba5fb9f6dfe	320218
-foomatic-20200219-Canon-iR-ADV_C7280-Postscript-Canon.ppd.gz	e0b513a508cf59a423fa35fa3ce65dfe	320493
-foomatic-20200219-Canon-iR-ADV_C9060_9070-Postscript-Canon.ppd.gz	49298cd6ab8b9c6c79a297f2f0d07cdd	319944
-foomatic-20200219-Canon-iR-ADV_C9065_9075-Postscript-Canon.ppd.gz	3d463ff89df4cbff672938b43770d614	319948
-foomatic-20200219-Canon-iR-ADV_C9270_9280-Postscript-Canon.ppd.gz	ecdbacca78790b1caca18d0cbad468d3	320497
-foomatic-20200219-Canon-LBP-1000-ljet4.ppd.gz	fb086a0b27f0349c5b92f80a04ddc644	25721
-foomatic-20200219-Canon-LBP-1760-ljet4.ppd.gz	671d685f1cde2a1b312e6c8de2bc70ac	25606
-foomatic-20200219-Canon-LBP-430-ljet4.ppd.gz	e5cbf1b01ed8e6518ae5dc72abdf207a	10792
 foomatic-20200219-Canon-LBP-4U-lbp8.ppd.gz	b8db9e2d57b744f66e385aca3a8f1d76	37364
-foomatic-20200219-Canon-LBP6670-Postscript-Canon.ppd.gz	28e991bff02eff4e32ffed451946fe9b	226579
-foomatic-20200219-Canon-LBP6780_3580-Postscript-Canon.ppd.gz	22d6990ce0cae36241cd3c38b8f36c37	226585
-foomatic-20200219-Canon-LBP710C_PPD-Postscript-Canon.ppd.gz	2325a51cf304d18b917e3cf2f2921ef9	316172
-foomatic-20200219-Canon-LBP712C_PPD-Postscript-Canon.ppd.gz	a68634e5c56b6480789b17a7c0ecff91	316175
-foomatic-20200219-Canon-LBP7660C-Postscript-Canon.ppd.gz	c01bd741ce92cd1b9d1cd7ec5f0adcc7	316179
-foomatic-20200219-Canon-LBP7680C_5280-Postscript-Canon.ppd.gz	c01bd741ce92cd1b9d1cd7ec5f0adcc7	316179
-foomatic-20200219-Canon-LBP7780C_5480-Postscript-Canon.ppd.gz	90122da95871f52e857bef44627d62c8	316172
-foomatic-20200219-Canon-LBP8780-Postscript-Canon.ppd.gz	19b7bb5e68e34ba4e8a286f3c7b33e73	226642
 foomatic-20200219-Canon-LIPS-III-lips3.ppd.gz	a1d413064d1e345beec5374f458a82a9	6568
 foomatic-20200219-Canon-LIPS-IIplus-lips2p.ppd.gz	add9f39e1d634acbb8be9166c2841c9c	4799
-foomatic-20200219-Canon-S500-bj8XXYYZ.upp.ppd.gz	5e346b586ca40a8266aa429054361585	231885
 foomatic-20200219-Citizen-ProJet_IIc-cdj500.ppd.gz	0f840373170dbaa33ae3053e5212d1aa	56044
-foomatic-20200219-CItoh-M8510-m8510.ppd.gz	2a86a0e0c3c0b55e0b384a408a850895	17617
 foomatic-20200219-Compaq-IJ900-lxm5700m.ppd.gz	eccf445e08d5914ee2fd53002cfab044	210869
-foomatic-20200219-DEC-DECWriter_500i-djet500.ppd.gz	bd4661f9ff56b214454f3b767429ddaf	11153
 foomatic-20200219-DEC-LJ250-declj250.ppd.gz	fbc890730d1e31671a49c50080f7fd75	36800
-foomatic-20200219-Dell-M5200-Postscript.ppd.gz	6e005fde245bbbddef54274c590ff03a	226050
+foomatic-20200219-Epson-AL-M2300-eplaser.ppd.gz	94e3190abf489a51034da5a36e052a29	1265047
+foomatic-20200219-Epson-AL-M2310-eplaser.ppd.gz	94e3190abf489a51034da5a36e052a29	1265047
 foomatic-20200219-Epson-ActionPrinter_3250-ap3250.ppd.gz	820c14feaf6c643665951549ea031d73	11806
-foomatic-20200219-Epson-AL-2600-Postscript-Epson.ppd.gz	c6b6781bb4f3fd1467265a0543fc3c4b	215542
-foomatic-20200219-Epson-AL-C1900_PS3-Postscript-Epson.ppd.gz	c47b287bdc2e8f250e3cfd7ae3211d8e	215472
-foomatic-20200219-Epson-AL-C2000_PS3-Postscript-Epson.ppd.gz	1ce9b186c6f8edbdc12dff0d6936ae4c	214750
-foomatic-20200219-Epson-AL-C2600-Postscript-Epson.ppd.gz	5c19909f7e83822da4a45689dbe90bc5	215555
-foomatic-20200219-Epson-AL-C2800-Postscript-Epson.ppd.gz	4670590b5ffabfe5efca24b2379e408a	215333
-foomatic-20200219-Epson-AL-C3800-Postscript-Epson.ppd.gz	4031535f182d8ef960b9ccd4bf283750	215333
-foomatic-20200219-Epson-AL-C4000_PS3-Postscript-Epson.ppd.gz	e4562788a84e8b843aedf7f78c0a6c4e	215575
-foomatic-20200219-Epson-AL-C4100-Postscript-Epson.ppd.gz	a984173f6915a149000108791f38e0c6	215389
-foomatic-20200219-Epson-AL-C4200-Postscript-Epson.ppd.gz	7da9ab4df8f09ef1934df3a82871ed4d	215545
-foomatic-20200219-Epson-AL-C8500-ljet4.ppd.gz	671d685f1cde2a1b312e6c8de2bc70ac	25606
-foomatic-20200219-Epson-AL-C8600_PS3-Postscript-Epson.ppd.gz	49d61b46f5ea39c3db055eb38515f3e7	215607
-foomatic-20200219-Epson-AL-C9100-Postscript-Epson.ppd.gz	271634ecc084fbb393fba3ab5e85c0b3	215808
-foomatic-20200219-Epson-AL-C9200-Postscript-Epson.ppd.gz	a748e4bb2bc1404b80143e9faa46eb8f	215291
-foomatic-20200219-Epson-AL-CX21-Postscript-Epson.ppd.gz	b421f88ac7c60c3ac1c25f3b26d027f8	214681
-foomatic-20200219-Epson-AL-M2000-Postscript-Epson.ppd.gz	70273ad1cbfb3e54c72ac9f27b5a09dd	188803
-foomatic-20200219-Epson-AL-M2300-eplaser.ppd.gz	861df1c88412816cad99b6412503e97a	1207015
-foomatic-20200219-Epson-AL-M2310-eplaser.ppd.gz	861df1c88412816cad99b6412503e97a	1207015
-foomatic-20200219-Epson-AL-M2400-Postscript-Epson.ppd.gz	b78fbb9fc0d3c8e2168282294cf4606e	188815
-foomatic-20200219-Epson-AL-M4000-Postscript-Epson.ppd.gz	fabda4cb2165fcbb4a83d4ca1c344944	227820
-foomatic-20200219-Epson-AL-M8000-Postscript-Epson.ppd.gz	b259df3031282552a47576f187de16f8	227637
-foomatic-20200219-Epson-AL-MX20-Postscript-Epson.ppd.gz	318c1b398a2e83b35560a45eabd10c1f	188673
 foomatic-20200219-Epson-Dot_Matrix-epsonc.ppd.gz	0a4d9cad64852530b24f222cce311780	3555
-foomatic-20200219-Epson-EPL-5800-ljet4.ppd.gz	671d685f1cde2a1b312e6c8de2bc70ac	25606
-foomatic-20200219-Epson-EPL-5900_PS3-Postscript-Epson.ppd.gz	3153da57ac3d63c05462ebe0e2d2a9fb	315545
-foomatic-20200219-Epson-EPL-6100_PS3-Postscript-Epson.ppd.gz	7876ef3d4130a1d28aa9aecbb1e58ea3	315545
-foomatic-20200219-Epson-EPL-6200-Postscript-Epson.ppd.gz	c95c0b73e35365e202aabc77ee533de1	214905
-foomatic-20200219-Epson-EPL-7100-laserjet.ppd.gz	21ef470512223260bbbb3af88f81e138	62901
-foomatic-20200219-Epson-EPL-N2120-ljet4.ppd.gz	671d685f1cde2a1b312e6c8de2bc70ac	25606
-foomatic-20200219-Epson-EPL-N2500_PS3-Postscript-Epson.ppd.gz	e09baa36196ca7d86137f806bc787e01	316095
-foomatic-20200219-Epson-EPL-N2550-Postscript-Epson.ppd.gz	a12042c87672a0264173514e74f62f15	227382
-foomatic-20200219-Epson-EPL-N2700-Postscript-Epson.ppd.gz	cdfa63913df24227a752aa0fa567e886	316406
-foomatic-20200219-Epson-EPL-N3000-Postscript-Epson.ppd.gz	601c0f429e868f4dab4a9e5dc3560b22	316425
-foomatic-20200219-Epson-EPL-N7000-Postscript-Epson.ppd.gz	3ba454469efe7a90f02aedafaf3ab35b	316333
-foomatic-20200219-Epson-LP-1800-eplaser-jp.ppd.gz	91743307c1f49a7ff9fa1a4204fa7537	1207013
-foomatic-20200219-Epson-LP-1900-eplaser-jp.ppd.gz	c795e5ca23aca100ff73b7403bd67530	1207013
-foomatic-20200219-Epson-LP-2200-eplaser-jp.ppd.gz	99198e55671ac3cd1db648a217416e8c	1207013
-foomatic-20200219-Epson-LP-2400-eplaser-jp.ppd.gz	1344b53898fb36d8f7054824e68082bc	1207013
-foomatic-20200219-Epson-LP-2500-eplaser-jp.ppd.gz	af74e2d440fd81f0824a501388a8eed2	1207013
-foomatic-20200219-Epson-LP-3000C-eplaser-jp.ppd.gz	697e6ed53e30fa9583fa174203dd1e8c	97255899
-foomatic-20200219-Epson-LP-7500-eplaser-jp.ppd.gz	fa513a43f3f8245aa4655f7e6e69c76d	1207013
-foomatic-20200219-Epson-LP-7700-eplaser-jp.ppd.gz	4b5f79a54ac74c84fdc4c2178d4c7d56	1207013
-foomatic-20200219-Epson-LP-7900-eplaser-jp.ppd.gz	3c587255767430d7145610345892f605	1207013
-foomatic-20200219-Epson-LP-8000C-eplaser-jp.ppd.gz	8d74521d9971e80b0cd9d40290083c64	97255879
+foomatic-20200219-Epson-LP-1800-eplaser-jp.ppd.gz	aa805214054c42a32bea12c9a234912f	1264395
+foomatic-20200219-Epson-LP-1900-eplaser-jp.ppd.gz	6aca701adb6dbe9b921b0300e765fbb2	1265045
+foomatic-20200219-Epson-LP-2200-eplaser-jp.ppd.gz	5e40396c5b2d5ad3a9485cb2d3709063	1265045
+foomatic-20200219-Epson-LP-2400-eplaser-jp.ppd.gz	e9061539fa41f63c71352f1093f83b89	1265045
+foomatic-20200219-Epson-LP-2500-eplaser-jp.ppd.gz	16a35b09b8885e263e68bb4ff97020cc	1265045
+foomatic-20200219-Epson-LP-3000C-eplaser-jp.ppd.gz	9036dafcf899a20a66e287bcc452813d	97273514
+foomatic-20200219-Epson-LP-7500-eplaser-jp.ppd.gz	75930e9772134a4e165ca70b6cb4906f	1265045
+foomatic-20200219-Epson-LP-7700-eplaser-jp.ppd.gz	2bb92908ee7b00e180a40c6fd0aa6b16	1265045
+foomatic-20200219-Epson-LP-7900-eplaser-jp.ppd.gz	83c716d713ead6e95dcd44b958f31385	1265045
+foomatic-20200219-Epson-LP-8000C-eplaser-jp.ppd.gz	c4e93fe73f28926329ae9d08a48ca47e	97273494
+foomatic-20200219-Epson-LP-8100-eplaser-jp.ppd.gz	865306a081bc3b6f645830297d83a47d	1265045
+foomatic-20200219-Epson-LP-8200C-eplaser-jp.ppd.gz	0120ad9ebd4e9ffc578e8275fb8d0720	97273514
+foomatic-20200219-Epson-LP-8300C-eplaser-jp.ppd.gz	be8b0b8d142c2ca3772cd16eb9d3a1a4	97273514
+foomatic-20200219-Epson-LP-8300F-eplaser-jp.ppd.gz	84f5b1c3830b81e5b628700806d41281	1265047
+foomatic-20200219-Epson-LP-8400F-eplaser-jp.ppd.gz	184e3921ca2efb1341c5fad82f38c371	1265047
+foomatic-20200219-Epson-LP-8500C-eplaser-jp.ppd.gz	56a7883f591ec5e142052ec5ec174554	97273514
+foomatic-20200219-Epson-LP-8600-eplaser-jp.ppd.gz	8cd2e13a2796bec8c77a4f92b31795fe	1265045
+foomatic-20200219-Epson-LP-8600F-eplaser-jp.ppd.gz	e54fd3d4bb540cf3b9cb9deecc9d2920	1265047
+foomatic-20200219-Epson-LP-8700-eplaser-jp.ppd.gz	5b8dcdb340505b1b353e8f326d20cb50	1265045
+foomatic-20200219-Epson-LP-8800C-eplaser-jp.ppd.gz	c49c964090599808095afb7399cd7908	97273514
+foomatic-20200219-Epson-LP-8900-eplaser-jp.ppd.gz	13cdd8aea320e0a79458b4addef67fd5	1265045
+foomatic-20200219-Epson-LP-9000B-eplaser-jp.ppd.gz	ffdd5d872fffa6dfce80c6a30f5088bc	1265047
+foomatic-20200219-Epson-LP-9000C-eplaser-jp.ppd.gz	208696670fe54860bb4074b343e241df	97273514
+foomatic-20200219-Epson-LP-9100-eplaser-jp.ppd.gz	63046d695a1a3bb7cd09e7a9b639208a	1265045
+foomatic-20200219-Epson-LP-9200B-eplaser-jp.ppd.gz	537b46e045f2158a8815e200f19d3f2b	1265047
+foomatic-20200219-Epson-LP-9300-eplaser-jp.ppd.gz	8028644b0392d50b124c5b7d7f37e4a4	1265045
+foomatic-20200219-Epson-LP-9400-eplaser-jp.ppd.gz	1b615f48059e2a81239d9b6a3baa2d89	1265045
+foomatic-20200219-Epson-LP-9500C-eplaser-jp.ppd.gz	9ae23259d8f14c8d49cf1751cdfe777d	97273514
+foomatic-20200219-Epson-LP-9600-eplaser-jp.ppd.gz	fcb039fdbdb02833b53bfc8b44a23505	1264395
+foomatic-20200219-Epson-LP-9600S-eplaser-jp.ppd.gz	f16226536c05f6e379f0cba020da536b	1265047
+foomatic-20200219-Epson-LP-M5000-eplaser-jp.ppd.gz	56a7883f591ec5e142052ec5ec174554	97273514
+foomatic-20200219-Epson-LP-M5300-eplaser-jp.ppd.gz	56a7883f591ec5e142052ec5ec174554	97273514
+foomatic-20200219-Epson-LP-M6000-eplaser-jp.ppd.gz	56a7883f591ec5e142052ec5ec174554	97273514
+foomatic-20200219-Epson-LP-S210-eplaser-jp.ppd.gz	5b8dcdb340505b1b353e8f326d20cb50	1265045
+foomatic-20200219-Epson-LP-S300-eplaser-jp.ppd.gz	5b8dcdb340505b1b353e8f326d20cb50	1265045
+foomatic-20200219-Epson-LP-S3000-eplaser-jp.ppd.gz	5b8dcdb340505b1b353e8f326d20cb50	1265045
+foomatic-20200219-Epson-LP-S310-eplaser-jp.ppd.gz	5b8dcdb340505b1b353e8f326d20cb50	1265045
+foomatic-20200219-Epson-LP-S3200-eplaser-jp.ppd.gz	5b8dcdb340505b1b353e8f326d20cb50	1265045
+foomatic-20200219-Epson-LP-S3500-eplaser-jp.ppd.gz	5b8dcdb340505b1b353e8f326d20cb50	1265045
+foomatic-20200219-Epson-LP-S4000-eplaser-jp.ppd.gz	5b8dcdb340505b1b353e8f326d20cb50	1265045
+foomatic-20200219-Epson-LP-S4200-eplaser-jp.ppd.gz	5b8dcdb340505b1b353e8f326d20cb50	1265045
+foomatic-20200219-Epson-LP-S4500-eplaser-jp.ppd.gz	abfa9574c4b9c6259f8a5283bb8ad645	1265047
+foomatic-20200219-Epson-LP-S6500-eplaser-jp.ppd.gz	e0e4bc504fc2ee35204c93d2705b18e3	97273514
+foomatic-20200219-Epson-LP-S7500-eplaser-jp.ppd.gz	56a7883f591ec5e142052ec5ec174554	97273514
+foomatic-20200219-Epson-LP-S8100-eplaser-jp.ppd.gz	02f33fd487b7e4e62edaa934dc94dd85	97273502
 foomatic-20200219-Epson-LP_8000-lp8000.ppd.gz	9f012ead5c03b6392c591e6f0d7b142c	15425
-foomatic-20200219-Epson-LP-8100-eplaser-jp.ppd.gz	3a3bda045b8b211901c070f3e07e87f7	1207013
-foomatic-20200219-Epson-LP-8200C-eplaser-jp.ppd.gz	2015c6a425c6bb5769a78dcfb1d85810	97255899
-foomatic-20200219-Epson-LP-8300C-eplaser-jp.ppd.gz	523947dfc85e14b8699214338b8c3bce	97255899
-foomatic-20200219-Epson-LP-8300CPD-Postscript-Epson.ppd.gz	0ddad95dad95e5a42dfa609fbb189e54	214776
-foomatic-20200219-Epson-LP-8300F-eplaser-jp.ppd.gz	591deefd1393566c24a7e079e18c443a	1207015
-foomatic-20200219-Epson-LP-8400F-eplaser-jp.ppd.gz	5607bd91bf3143038001f8325c0ace8f	1207015
-foomatic-20200219-Epson-LP-8500C-eplaser-jp.ppd.gz	f2773bbe2030a5226610e09e5900d5ed	97255899
-foomatic-20200219-Epson-LP-8500CPD-Postscript-Epson.ppd.gz	5a6dd1188950145f8b0689fd239b5f61	214755
-foomatic-20200219-Epson-LP-8600-eplaser-jp.ppd.gz	68db4d9a447d949ebed581417264f766	1207013
-foomatic-20200219-Epson-LP-8600F-eplaser-jp.ppd.gz	36bd092735b6a282da3ff2ddb5263f62	1207015
-foomatic-20200219-Epson-LP-8700-eplaser-jp.ppd.gz	85dfcd1633d1fb2ee65ab52ad7044e48	1207013
-foomatic-20200219-Epson-LP-8800C-eplaser-jp.ppd.gz	b9460ccea75a0329987bd791b4a71d7c	97255899
-foomatic-20200219-Epson-LP-8800CPS-Postscript-Epson.ppd.gz	891bc3839fddf11065a885f06c46fd69	215218
-foomatic-20200219-Epson-LP-8900-eplaser-jp.ppd.gz	b639d3687f32ebb1b92b4983ee7460f5	1207013
-foomatic-20200219-Epson-LP-9000B-eplaser-jp.ppd.gz	77b789410548f0809cda72c909d42b06	1207015
-foomatic-20200219-Epson-LP-9000C-eplaser-jp.ppd.gz	e226ff36bad1d37a403c0ec7ed9f1a9b	97255899
-foomatic-20200219-Epson-LP-9100-eplaser-jp.ppd.gz	d9742d9a3d61382ccfc41cc2b21efc4e	1207013
-foomatic-20200219-Epson-LP-9100PS3-Postscript-Epson.ppd.gz	eaf2d826ac08c48698cbb498892e6575	315932
-foomatic-20200219-Epson-LP-9200B-eplaser-jp.ppd.gz	0d709daf5d6413d8e5f98a1ce5c090ea	1207015
-foomatic-20200219-Epson-LP-9200C-Postscript-Epson.ppd.gz	f557ced4e8930c21c04794e2bb4e391b	214959
-foomatic-20200219-Epson-LP-9300-eplaser-jp.ppd.gz	d35ae22fd8f1f7d7086fe52a2f2548aa	1207013
-foomatic-20200219-Epson-LP-9400-eplaser-jp.ppd.gz	6da7d65a0391decedf44a92810cb2a9c	1207013
-foomatic-20200219-Epson-LP-9500C-eplaser-jp.ppd.gz	669c77062a6c50f54cf1ea88af0f9990	97255899
-foomatic-20200219-Epson-LP-9500CPS-Postscript-Epson.ppd.gz	bbd1ed78fe999c558c10f80cffb74cad	215278
-foomatic-20200219-Epson-LP-9600-eplaser-jp.ppd.gz	793f0c7669b7f7d4996e5bb512a367a1	1207013
-foomatic-20200219-Epson-LP-9600S-eplaser-jp.ppd.gz	fd56f68717f6a67fb23f761a9cf04548	1207015
-foomatic-20200219-Epson-LP-9600SPD-Postscript-Epson.ppd.gz	0c1eaabeec5c040e7ec543d433cc5cac	227874
-foomatic-20200219-Epson-LP-9800C-Postscript-Epson.ppd.gz	651946e81588331e1f5170848d47ba3d	215150
-foomatic-20200219-Epson-LP-M5000-eplaser-jp.ppd.gz	f2773bbe2030a5226610e09e5900d5ed	97255899
-foomatic-20200219-Epson-LP-M5300-eplaser-jp.ppd.gz	f2773bbe2030a5226610e09e5900d5ed	97255899
-foomatic-20200219-Epson-LP-M6000-eplaser-jp.ppd.gz	f2773bbe2030a5226610e09e5900d5ed	97255899
-foomatic-20200219-Epson-LP-S210-eplaser-jp.ppd.gz	85dfcd1633d1fb2ee65ab52ad7044e48	1207013
-foomatic-20200219-Epson-LP-S3000-eplaser-jp.ppd.gz	85dfcd1633d1fb2ee65ab52ad7044e48	1207013
-foomatic-20200219-Epson-LP-S300-eplaser-jp.ppd.gz	85dfcd1633d1fb2ee65ab52ad7044e48	1207013
-foomatic-20200219-Epson-LP-S310-eplaser-jp.ppd.gz	85dfcd1633d1fb2ee65ab52ad7044e48	1207013
-foomatic-20200219-Epson-LP-S3200-eplaser-jp.ppd.gz	85dfcd1633d1fb2ee65ab52ad7044e48	1207013
-foomatic-20200219-Epson-LP-S3500-eplaser-jp.ppd.gz	85dfcd1633d1fb2ee65ab52ad7044e48	1207013
-foomatic-20200219-Epson-LP-S4000-eplaser-jp.ppd.gz	85dfcd1633d1fb2ee65ab52ad7044e48	1207013
-foomatic-20200219-Epson-LP-S4200-eplaser-jp.ppd.gz	85dfcd1633d1fb2ee65ab52ad7044e48	1207013
-foomatic-20200219-Epson-LP-S4500-eplaser-jp.ppd.gz	bf66a15e9d1c7a19d7740858d02fecc4	1207015
-foomatic-20200219-Epson-LP-S6500-eplaser-jp.ppd.gz	bb5875cf9bea631d7b173bb7ec0cc6fd	97255899
-foomatic-20200219-Epson-LP-S7500-eplaser-jp.ppd.gz	f2773bbe2030a5226610e09e5900d5ed	97255899
-foomatic-20200219-Epson-LP-S8100-eplaser-jp.ppd.gz	4b8bb9ac13c11c56a822b0e28551a79c	97255887
 foomatic-20200219-Epson-LX-300plus-ibmpro.ppd.gz	78745659a39ee928e055f801868449a0	2582
-foomatic-20200219-Epson-MJ_520C-stcolor.ppd.gz	e519eb7ff1d9d6903d201de5f779bf88	101719
 foomatic-20200219-Epson-Stylus_Color_460-stcX.upp.ppd.gz	9a5a1fe58f6647fffc5cf66660a9bbdc	97510
 foomatic-20200219-Epson-Stylus_Color_660-stc600X.upp.ppd.gz	e8647a6b2878133bb81563999a5c75bb	102463
-foomatic-20200219-Epson-Stylus_Color_777-stcanyX.upp.ppd.gz	c82eb0e7606d6af4b35b8544bc4a9cf8	105568
 foomatic-20200219-Epson-Stylus_Color_850-stc800X.upp.ppd.gz	be7642ab625e195994a7f08ba36968f1	101804
 foomatic-20200219-Epson-Stylus_Color_II-stc2X.upp.ppd.gz	398118a179355d46ebd0b92487328705	97399
-foomatic-20200219-Epson-Stylus_Color-stcolor.ppd.gz	e7db23def2a39ac606d8e79c95586b2a	101719
-foomatic-20200219-Fuji_Xerox-DocuPrint_CM305_df-Postscript.ppd.gz	88e32975ac4e10622c5e703a57334d96	214421
 foomatic-20200219-Generic-ESC_P_Dot_Matrix_Printer-lq850.ppd.gz	5fcfb6b2be65506c5cbd5a0b5d02b8d2	55329
 foomatic-20200219-Generic-IBM-Compatible_Dot_Matrix_Printer-ibmpro.ppd.gz	78745659a39ee928e055f801868449a0	2582
-foomatic-20200219-Generic-PCL_6_PCL_XL_Printer-ljet4.ppd.gz	fb086a0b27f0349c5b92f80a04ddc644	25686
-foomatic-20200219-Generic-PostScript_Printer-Postscript.ppd.gz	26bc089fb96ec1e92e5860ab47b6838a	314890
-foomatic-20200219-Gestetner-10512-pxlmono-Gestetner.ppd.gz	91b66baf1f8fcd474663381823e5427c	547298
-foomatic-20200219-Gestetner-2212-pxlmono-Gestetner.ppd.gz	91b66baf1f8fcd474663381823e5427c	547278
-foomatic-20200219-Gestetner-3502-pxlmono-Gestetner.ppd.gz	91b66baf1f8fcd474663381823e5427c	547297
-foomatic-20200219-Gestetner-3532_4235g-pxlmono-Gestetner.ppd.gz	91b66baf1f8fcd474663381823e5427c	547270
-foomatic-20200219-Gestetner-6002-pxlmono-Gestetner.ppd.gz	91b66baf1f8fcd474663381823e5427c	547270
-foomatic-20200219-Gestetner-9002-pxlmono-Gestetner.ppd.gz	91b66baf1f8fcd474663381823e5427c	547298
-foomatic-20200219-Gestetner-C7010-Postscript-Gestetner.ppd.gz	167fae29e9b36533eda5c246b9e64337	316389
-foomatic-20200219-Gestetner-C7116-Postscript-Gestetner.ppd.gz	b911a45790d1c20e1cb86017abbf56b9	316171
-foomatic-20200219-Gestetner-C7425dn-Postscript-Gestetner.ppd.gz	fae0b93b0553d4c0f8a57cbec63437b1	317645
-foomatic-20200219-Gestetner-C7435n-Postscript-Gestetner.ppd.gz	832dd16fac702bb4c405612f04545281	317895
-foomatic-20200219-Gestetner-C7521n-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13172
-foomatic-20200219-Gestetner-C7526dn-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13182
-foomatic-20200219-Gestetner-C7528n-Postscript-Gestetner.ppd.gz	9d1217e5e28150e1d39a35dabe2a61ea	318298
-foomatic-20200219-Gestetner-C7535n-Postscript-Gestetner.ppd.gz	770d620839c16e68ec7ca4b258e5c3ec	318229
-foomatic-20200219-Gestetner-C7640nD-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13224
-foomatic-20200219-Gestetner-C8140ND-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13248
-foomatic-20200219-Gestetner-CS555-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13214
-foomatic-20200219-Gestetner-DSc1020-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13296
-foomatic-20200219-Gestetner-DSc1030-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13273
-foomatic-20200219-Gestetner-DSc1045-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13273
-foomatic-20200219-Gestetner-DSc1060-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13273
-foomatic-20200219-Gestetner-DSc1120-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13230
-foomatic-20200219-Gestetner-DSc1220ex-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13296
-foomatic-20200219-Gestetner-DSc1220-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13296
-foomatic-20200219-Gestetner-DSc1230ex-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13296
-foomatic-20200219-Gestetner-DSc1230-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13296
-foomatic-20200219-Gestetner-DSc1245ex-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13296
-foomatic-20200219-Gestetner-DSc1245-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13296
-foomatic-20200219-Gestetner-DSc1260ex-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13296
-foomatic-20200219-Gestetner-DSc1260-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13296
-foomatic-20200219-Gestetner-DSc224-Postscript-Gestetner.ppd.gz	5c7c402d525c3e0565e84e749cb89df1	316625
-foomatic-20200219-Gestetner-DSc328-Postscript-Gestetner.ppd.gz	cf663131ddf9583fbf07d084a8c79438	318005
-foomatic-20200219-Gestetner-DSc38-Postscript-Gestetner.ppd.gz	7d5c10219e1edd13691129158a144b1c	316715
-foomatic-20200219-Gestetner-DSc38u-Postscript-Gestetner.ppd.gz	af8614a601e5f37796fa12c1796ee3ee	317949
-foomatic-20200219-Gestetner-DSc424-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13195
-foomatic-20200219-Gestetner-DSc428-pxlcolor-Gestetner.ppd.gz	236d5ea35a1a20ce5f0d50c63cf65bd1	4219569
-foomatic-20200219-Gestetner-DSm1525-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13177
-foomatic-20200219-Gestetner-DSm2525-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13224
-foomatic-20200219-Gestetner-DSm2540-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13224
-foomatic-20200219-Gestetner-DSm_2625-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13224
-foomatic-20200219-Gestetner-DSm_2640-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13224
-foomatic-20200219-Gestetner-DSm_2660-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13224
-foomatic-20200219-Gestetner-DSm415-pxlmono-Gestetner.ppd.gz	91b66baf1f8fcd474663381823e5427c	547204
-foomatic-20200219-Gestetner-DSm615-pxlmono-Gestetner.ppd.gz	91b66baf1f8fcd474663381823e5427c	547211
-foomatic-20200219-Gestetner-DSm618d-pxlmono-Gestetner.ppd.gz	91b66baf1f8fcd474663381823e5427c	547231
-foomatic-20200219-Gestetner-DSm618-pxlmono-Gestetner.ppd.gz	91b66baf1f8fcd474663381823e5427c	547211
-foomatic-20200219-Gestetner-DSm622-pxlmono-Gestetner.ppd.gz	91b66baf1f8fcd474663381823e5427c	547251
-foomatic-20200219-Gestetner-DSm651-pxlmono-Gestetner.ppd.gz	91b66baf1f8fcd474663381823e5427c	547288
-foomatic-20200219-Gestetner-DSm725-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13132
-foomatic-20200219-Gestetner-DSm735_735G-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13151
-foomatic-20200219-Gestetner-F9199_9199nf-Postscript-Gestetner.ppd.gz	e7732bb5f7a17833c6e55560effb4ae0	226490
-foomatic-20200219-Gestetner-GS1227-Postscript-Gestetner.ppd.gz	a4e2038d15e453370358376aa4fe1156	230735
-foomatic-20200219-Gestetner-GS3020-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13296
-foomatic-20200219-Gestetner-GS3030-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13296
-foomatic-20200219-Gestetner-GS3160-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13296
-foomatic-20200219-Gestetner-GWD2004-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13070
-foomatic-20200219-Gestetner-GWD3006-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13070
-foomatic-20200219-Gestetner-GWD5100-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13070
-foomatic-20200219-Gestetner-MP1100_DSm7110-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13176
-foomatic-20200219-Gestetner-MP1600_DSm716-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13094
-foomatic-20200219-Gestetner-MP_161_DSm416-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13087
-foomatic-20200219-Gestetner-MP_171-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13087
-foomatic-20200219-Gestetner-MP2000_DSm721d-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13114
-foomatic-20200219-Gestetner-MP_2001-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13138
-foomatic-20200219-Gestetner-MP2352_DSm923-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13152
-foomatic-20200219-Gestetner-MP2500_DSm625-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13133
-foomatic-20200219-Gestetner-MP_2501-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13138
-foomatic-20200219-Gestetner-MP_2510_DSm725e-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13132
-foomatic-20200219-Gestetner-MP_2550-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13151
-foomatic-20200219-Gestetner-MP_2851-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13151
-foomatic-20200219-Gestetner-MP_301-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13111
-foomatic-20200219-Gestetner-MP_305plus-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13111
-foomatic-20200219-Gestetner-MP3500_DSm735e-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13151
-foomatic-20200219-Gestetner-MP_4000-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13151
-foomatic-20200219-Gestetner-MP_4001-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13151
-foomatic-20200219-Gestetner-MP_4002-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13177
-foomatic-20200219-Gestetner-MP_401SPF-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13135
-foomatic-20200219-Gestetner-MP_402SPF-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13135
-foomatic-20200219-Gestetner-MP5500_DSm755-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13176
-foomatic-20200219-Gestetner-MP_6001-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13200
-foomatic-20200219-Gestetner-MP_6002-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13225
-foomatic-20200219-Gestetner-MP_6503-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13248
-foomatic-20200219-Gestetner-MPC1500_GS106-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13190
-foomatic-20200219-Gestetner-MP_C2050-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13248
-foomatic-20200219-Gestetner-MP_C2051-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13248
-foomatic-20200219-Gestetner-MPC2500_DSc525-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13224
-foomatic-20200219-Gestetner-MP_C2800-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13248
-foomatic-20200219-Gestetner-MP_C3001-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13248
-foomatic-20200219-Gestetner-MP_C3002-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13273
-foomatic-20200219-Gestetner-MP_C300-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13209
-foomatic-20200219-Gestetner-MP_C300SR-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13229
-foomatic-20200219-Gestetner-MP_C305-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13234
-foomatic-20200219-Gestetner-MP_C306Z-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13234
-foomatic-20200219-Gestetner-MP_C307-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13234
-foomatic-20200219-Gestetner-MPC3500_DSc535-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13224
-foomatic-20200219-Gestetner-MP_C4000-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13248
-foomatic-20200219-Gestetner-MP_C401-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13234
-foomatic-20200219-Gestetner-MP_C401SR-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13277
-foomatic-20200219-Gestetner-MP_C4501-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13248
-foomatic-20200219-Gestetner-MP_C4502-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13273
-foomatic-20200219-Gestetner-MP_C6000-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13224
-foomatic-20200219-Gestetner-MP_C6501-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13272
-foomatic-20200219-Gestetner-MP_C6502-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13297
-foomatic-20200219-Gestetner-MP_C6503-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13320
-foomatic-20200219-Gestetner-MP_CW2200-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13175
-foomatic-20200219-Gestetner-MP_CW2201-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13183
-foomatic-20200219-Gestetner-MP_W6700-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13116
-foomatic-20200219-Gestetner-MP_W7100-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13116
-foomatic-20200219-Gestetner-P7026-Postscript-Gestetner.ppd.gz	f51015db09cb7f5f3a75c38eba8194c8	227116
-foomatic-20200219-Gestetner-P7031n-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13107
-foomatic-20200219-Gestetner-P7032-Postscript-Gestetner.ppd.gz	17b0488e0a5767c3398e6b766fae1703	227287
-foomatic-20200219-Gestetner-P7126-Postscript-Gestetner.ppd.gz	fddcea1e9f7c55a8741078e66fff8d6d	227141
-foomatic-20200219-Gestetner-P7132n-Postscript-Gestetner.ppd.gz	1395064bd58f41f879c15d4a2fd13156	228235
-foomatic-20200219-Gestetner-P7145-Postscript-Gestetner.ppd.gz	18da8153888ad5e05983cfe562bc284a	227242
-foomatic-20200219-Gestetner-P7245-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13151
-foomatic-20200219-Gestetner-P7325-Postscript-Gestetner.ppd.gz	6b9d6b5ebee48f4a2477676b0e2a14ba	228060
-foomatic-20200219-Gestetner-P7431cn-Postscript-Gestetner.ppd.gz	bec05167fbdaacc763406fa28ca9e2bf	317356
-foomatic-20200219-Gestetner-P7527-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13100
-foomatic-20200219-Gestetner-P7535n-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13100
-foomatic-20200219-Gestetner-P7575-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13166
-foomatic-20200219-Gestetner-P7675-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13176
-foomatic-20200219-Gestetner-Pro_1106EX-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13176
-foomatic-20200219-Gestetner-Pro_1107EX-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13200
-foomatic-20200219-Gestetner-SP_4210N-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13107
-foomatic-20200219-Gestetner-SP_6330N-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13109
-foomatic-20200219-Gestetner-SP_8200DN-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13151
-foomatic-20200219-Gestetner-SP_C320DN-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13182
-foomatic-20200219-Gestetner-SP_C420DN-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13182
-foomatic-20200219-Gestetner-SP_C430DN-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13182
-foomatic-20200219-Gestetner-SP_W2470-PDF-Gestetner.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13070
+foomatic-20200219-Generic-PCL_6_PCL_XL_Printer-pxlcolor.ppd.gz	68f1a3f86af51919ba8c3e07022b99eb	547087
+foomatic-20200219-Gestetner-10512-pxlmono-Gestetner.ppd.gz	722e7156a44e42e18d18c8b42d23c73f	547303
+foomatic-20200219-Gestetner-2212-pxlmono-Gestetner.ppd.gz	722e7156a44e42e18d18c8b42d23c73f	547283
+foomatic-20200219-Gestetner-3502-pxlmono-Gestetner.ppd.gz	722e7156a44e42e18d18c8b42d23c73f	547302
+foomatic-20200219-Gestetner-3532_4235g-pxlmono-Gestetner.ppd.gz	722e7156a44e42e18d18c8b42d23c73f	547275
+foomatic-20200219-Gestetner-6002-pxlmono-Gestetner.ppd.gz	722e7156a44e42e18d18c8b42d23c73f	547275
+foomatic-20200219-Gestetner-9002-pxlmono-Gestetner.ppd.gz	722e7156a44e42e18d18c8b42d23c73f	547303
+foomatic-20200219-Gestetner-C7521n-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13168
+foomatic-20200219-Gestetner-C7526dn-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13178
+foomatic-20200219-Gestetner-C7640nD-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13220
+foomatic-20200219-Gestetner-C8140ND-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13244
+foomatic-20200219-Gestetner-CS555-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13210
+foomatic-20200219-Gestetner-DSc1020-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13292
+foomatic-20200219-Gestetner-DSc1030-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13269
+foomatic-20200219-Gestetner-DSc1045-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13269
+foomatic-20200219-Gestetner-DSc1060-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13269
+foomatic-20200219-Gestetner-DSc1120-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13226
+foomatic-20200219-Gestetner-DSc1220-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13292
+foomatic-20200219-Gestetner-DSc1220ex-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13292
+foomatic-20200219-Gestetner-DSc1230-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13292
+foomatic-20200219-Gestetner-DSc1230ex-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13292
+foomatic-20200219-Gestetner-DSc1245-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13292
+foomatic-20200219-Gestetner-DSc1245ex-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13292
+foomatic-20200219-Gestetner-DSc1260-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13292
+foomatic-20200219-Gestetner-DSc1260ex-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13292
+foomatic-20200219-Gestetner-DSc424-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13191
+foomatic-20200219-Gestetner-DSc428-pxlcolor-Gestetner.ppd.gz	72874e70ab0ab50ae38d5a7b7902d0b2	4219574
+foomatic-20200219-Gestetner-DSm1525-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13173
+foomatic-20200219-Gestetner-DSm2525-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13220
+foomatic-20200219-Gestetner-DSm2540-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13220
+foomatic-20200219-Gestetner-DSm415-pxlmono-Gestetner.ppd.gz	722e7156a44e42e18d18c8b42d23c73f	547209
+foomatic-20200219-Gestetner-DSm615-pxlmono-Gestetner.ppd.gz	722e7156a44e42e18d18c8b42d23c73f	547216
+foomatic-20200219-Gestetner-DSm618-pxlmono-Gestetner.ppd.gz	722e7156a44e42e18d18c8b42d23c73f	547216
+foomatic-20200219-Gestetner-DSm618d-pxlmono-Gestetner.ppd.gz	722e7156a44e42e18d18c8b42d23c73f	547236
+foomatic-20200219-Gestetner-DSm622-pxlmono-Gestetner.ppd.gz	722e7156a44e42e18d18c8b42d23c73f	547256
+foomatic-20200219-Gestetner-DSm651-pxlmono-Gestetner.ppd.gz	722e7156a44e42e18d18c8b42d23c73f	547293
+foomatic-20200219-Gestetner-DSm725-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13128
+foomatic-20200219-Gestetner-DSm735_735G-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13147
+foomatic-20200219-Gestetner-DSm_2625-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13220
+foomatic-20200219-Gestetner-DSm_2640-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13220
+foomatic-20200219-Gestetner-DSm_2660-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13220
+foomatic-20200219-Gestetner-GS3020-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13292
+foomatic-20200219-Gestetner-GS3030-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13292
+foomatic-20200219-Gestetner-GS3160-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13292
+foomatic-20200219-Gestetner-GWD2004-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13066
+foomatic-20200219-Gestetner-GWD3006-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13066
+foomatic-20200219-Gestetner-GWD5100-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13066
+foomatic-20200219-Gestetner-MP1100_DSm7110-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13172
+foomatic-20200219-Gestetner-MP1600_DSm716-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13090
+foomatic-20200219-Gestetner-MP2000_DSm721d-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13110
+foomatic-20200219-Gestetner-MP2352_DSm923-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13148
+foomatic-20200219-Gestetner-MP2500_DSm625-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13129
+foomatic-20200219-Gestetner-MP3500_DSm735e-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13147
+foomatic-20200219-Gestetner-MP5500_DSm755-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13172
+foomatic-20200219-Gestetner-MPC1500_GS106-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13186
+foomatic-20200219-Gestetner-MPC2500_DSc525-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13220
+foomatic-20200219-Gestetner-MPC3500_DSc535-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13220
+foomatic-20200219-Gestetner-MP_161_DSm416-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13083
+foomatic-20200219-Gestetner-MP_171-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13083
+foomatic-20200219-Gestetner-MP_2001-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13134
+foomatic-20200219-Gestetner-MP_2501-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13134
+foomatic-20200219-Gestetner-MP_2510_DSm725e-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13128
+foomatic-20200219-Gestetner-MP_2550-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13147
+foomatic-20200219-Gestetner-MP_2851-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13147
+foomatic-20200219-Gestetner-MP_301-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13107
+foomatic-20200219-Gestetner-MP_305plus-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13107
+foomatic-20200219-Gestetner-MP_4000-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13147
+foomatic-20200219-Gestetner-MP_4001-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13147
+foomatic-20200219-Gestetner-MP_4002-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13173
+foomatic-20200219-Gestetner-MP_401SPF-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13131
+foomatic-20200219-Gestetner-MP_402SPF-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13131
+foomatic-20200219-Gestetner-MP_6001-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13196
+foomatic-20200219-Gestetner-MP_6002-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13221
+foomatic-20200219-Gestetner-MP_6503-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13244
+foomatic-20200219-Gestetner-MP_C2050-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13244
+foomatic-20200219-Gestetner-MP_C2051-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13244
+foomatic-20200219-Gestetner-MP_C2800-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13244
+foomatic-20200219-Gestetner-MP_C300-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13205
+foomatic-20200219-Gestetner-MP_C3001-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13244
+foomatic-20200219-Gestetner-MP_C3002-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13269
+foomatic-20200219-Gestetner-MP_C300SR-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13225
+foomatic-20200219-Gestetner-MP_C305-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13230
+foomatic-20200219-Gestetner-MP_C306Z-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13230
+foomatic-20200219-Gestetner-MP_C307-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13230
+foomatic-20200219-Gestetner-MP_C4000-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13244
+foomatic-20200219-Gestetner-MP_C401-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13230
+foomatic-20200219-Gestetner-MP_C401SR-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13273
+foomatic-20200219-Gestetner-MP_C4501-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13244
+foomatic-20200219-Gestetner-MP_C4502-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13269
+foomatic-20200219-Gestetner-MP_C6000-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13220
+foomatic-20200219-Gestetner-MP_C6501-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13268
+foomatic-20200219-Gestetner-MP_C6502-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13293
+foomatic-20200219-Gestetner-MP_C6503-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13316
+foomatic-20200219-Gestetner-MP_CW2200-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13171
+foomatic-20200219-Gestetner-MP_CW2201-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13179
+foomatic-20200219-Gestetner-MP_W6700-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13112
+foomatic-20200219-Gestetner-MP_W7100-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13112
+foomatic-20200219-Gestetner-P7031n-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13103
+foomatic-20200219-Gestetner-P7245-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13147
+foomatic-20200219-Gestetner-P7527-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13096
+foomatic-20200219-Gestetner-P7535n-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13096
+foomatic-20200219-Gestetner-P7575-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13162
+foomatic-20200219-Gestetner-P7675-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13172
+foomatic-20200219-Gestetner-Pro_1106EX-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13172
+foomatic-20200219-Gestetner-Pro_1107EX-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13196
+foomatic-20200219-Gestetner-SP_4210N-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13103
+foomatic-20200219-Gestetner-SP_6330N-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13105
+foomatic-20200219-Gestetner-SP_8200DN-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13147
+foomatic-20200219-Gestetner-SP_C320DN-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13178
+foomatic-20200219-Gestetner-SP_C420DN-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13178
+foomatic-20200219-Gestetner-SP_C430DN-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13178
+foomatic-20200219-Gestetner-SP_W2470-PDF-Gestetner.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13066
+foomatic-20200219-HP-DesignJet_100plus-cdnj500.ppd.gz	9b13bfd37be8d38eaa65c258ae105738	7433
+foomatic-20200219-HP-OfficeJet_500-cdj550.ppd.gz	c7cf6a394cdcc75ad119d7f1e1d1ad96	55443
 foomatic-20200219-IBM-3853_JetPrinter-jetp3852.ppd.gz	a953e3b57bc165233516f718fa6edf1e	33354
-foomatic-20200219-IBM-4303_Network_Color_Printer-Postscript.ppd.gz	e73bf139a89fbfb3c236bb8ba08f7c18	314674
 foomatic-20200219-Imagen-ImPress-imagen.ppd.gz	fe6d23717e4015853ef2c24bdb15c792	12205
-foomatic-20200219-InfoPrint-Pro_1107EX-pxlmono-InfoPrint.ppd.gz	91b66baf1f8fcd474663381823e5427c	547298
-foomatic-20200219-Infotec-MP_201-PDF-Infotec.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13087
-foomatic-20200219-Infotec-MP_501-PDF-Infotec.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13135
-foomatic-20200219-Infotec-Pro_8100S-PDF-Infotec.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13248
-foomatic-20200219-Infotec-Pro_8110-PDF-Infotec.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13273
-foomatic-20200219-Infotec-Pro_8300S-PDF-Infotec.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13248
-foomatic-20200219-Infotec-Pro_8310-PDF-Infotec.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13273
-foomatic-20200219-Infotec-Pro_8310S-PDF-Infotec.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13273
-foomatic-20200219-Infotec-Pro_C5100S-PDF-Infotec.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13297
-foomatic-20200219-Infotec-Pro_C7100-PDF-Infotec.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13345
-foomatic-20200219-Infotec-Pro_C7100S-PDF-Infotec.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13345
-foomatic-20200219-Infotec-SP_5300-PDF-Infotec.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13135
-foomatic-20200219-Kodak-IS_70_CPII-Postscript.ppd.gz	556c5cf8e84c4a854cd9e3a2fe842ac4	213195
-foomatic-20200219-KONICA_MINOLTA-bizhub_1050eP-Postscript-KONICA_MINOLTA.ppd.gz	14c0ee15c6a3a5583e8900a190c400a6	231336
-foomatic-20200219-KONICA_MINOLTA-bizhub_500-Postscript-KONICA_MINOLTA.ppd.gz	467ec614e2232a5f5f40f4798414742c	228681
-foomatic-20200219-KONICA_MINOLTA-bizhub_C250P-Postscript-KONICA_MINOLTA.ppd.gz	d91307cad1f772ff271587627d4e9a55	318913
-foomatic-20200219-KONICA_MINOLTA-bizhub_C252P-Postscript-KONICA_MINOLTA.ppd.gz	9054891462e576bfe6753e0bf0ed30ee	318913
-foomatic-20200219-KONICA_MINOLTA-bizhub_C351-Postscript-KONICA_MINOLTA.ppd.gz	26394b8ace08489481d1bbb228e77d1d	318930
-foomatic-20200219-KONICA_MINOLTA-bizhub_C352P-Postscript-KONICA_MINOLTA.ppd.gz	5f2abbdd919f8a1cb778b2f52fd202b3	318908
-foomatic-20200219-KONICA_MINOLTA-bizhub_C450P-Postscript-KONICA_MINOLTA.ppd.gz	6dca71c3db6e3d971062a49b686e9e86	318930
-foomatic-20200219-KONICA_MINOLTA-bizhub_C451-Postscript-KONICA_MINOLTA.ppd.gz	776c83eb7099bab90f06ebb53bb05274	318922
-foomatic-20200219-KONICA_MINOLTA-bizhub_C550-Postscript-KONICA_MINOLTA.ppd.gz	bb5b861f954467e4ee4040d45930e348	318777
-foomatic-20200219-Kyocera-Ci-1100-Postscript-Kyocera.ppd.gz	c2548765b6e6d04bf89268cc28d30787	318106
-foomatic-20200219-Kyocera-CS-1650-Postscript-Kyocera.ppd.gz	f82f33fb48e473e5b809b8cced36ae60	228624
-foomatic-20200219-Kyocera-CS-2050-Postscript-Kyocera.ppd.gz	c35afc152ba5c974e825fccbdd1fa7d8	228554
-foomatic-20200219-Kyocera-FS-1030D-Postscript-Kyocera.ppd.gz	06f0c79e77713332a278469b9dde57b2	228081
-foomatic-20200219-Kyocera-FS-1118MFP-Postscript-Kyocera.ppd.gz	718eed31a9d707ed4d1678e8bcf2a76f	227777
-foomatic-20200219-Kyocera-FS-1200-Postscript-Kyocera.ppd.gz	0edf489547116437d2a42d59e65f78cd	228055
-foomatic-20200219-Kyocera-FS-1700plus-Postscript-Kyocera.ppd.gz	c22550c1168e9893b7ff9471910d5bf2	227843
-foomatic-20200219-Kyocera-FS-1700-Postscript-Kyocera.ppd.gz	7df7ef8119bd1a204657fa8b76792572	227845
-foomatic-20200219-Kyocera-FS-1714M-Postscript-Kyocera.ppd.gz	73f42811a27bd784f22003a3f3580d63	228081
-foomatic-20200219-Kyocera-FS-1800-Postscript-Kyocera.ppd.gz	374fb88f54dccd2ce0d797ea07d2ee54	229150
-foomatic-20200219-Kyocera-FS-1900-Postscript-Kyocera.ppd.gz	64ab398c0c04e2f9469a765f4708dda5	228152
-foomatic-20200219-Kyocera-FS-1920-Postscript-Kyocera.ppd.gz	7114dd4a0ac00795fe07c131946220cb	228165
-foomatic-20200219-Kyocera-FS-2000D-Postscript-Kyocera.ppd.gz	d462aa1e0a218c2d889fe7ae278fc688	228298
-foomatic-20200219-Kyocera-FS-3700plus-Postscript-Kyocera.ppd.gz	c3348f24c1818fdbd4d2ba11856b60b1	227843
-foomatic-20200219-Kyocera-FS-3700-Postscript-Kyocera.ppd.gz	7284e30589b140f270a004ef4fd27d2d	227845
-foomatic-20200219-Kyocera-FS-3750-Postscript-Kyocera.ppd.gz	d700c0052ee22a919108fd145a468a12	228082
-foomatic-20200219-Kyocera-FS-3820N-Postscript-Kyocera.ppd.gz	8aae409527f97cb65fe82d0a45819d0e	228165
-foomatic-20200219-Kyocera-FS-3830N-Postscript-Kyocera.ppd.gz	37186169cc7379660053fc4bafcba94c	228165
-foomatic-20200219-Kyocera-FS-3900DN-Postscript-Kyocera.ppd.gz	8778c834750aee540df0ed5c0463b35e	228229
-foomatic-20200219-Kyocera-FS-4000DN-Postscript-Kyocera.ppd.gz	359240e880e4236ad8720987f09aacda	228230
-foomatic-20200219-Kyocera-FS-5800C-Postscript-Kyocera.ppd.gz	d6d4af8d420f2eb7ec5b018f162fb391	318227
-foomatic-20200219-Kyocera-FS-5900C-Postscript-Kyocera.ppd.gz	69c805103df1baa83f9c66588e80ae10	318106
-foomatic-20200219-Kyocera-FS-600-Postscript-Kyocera.ppd.gz	5a3555e4ecf145ae2ab9815577d5009f	227517
-foomatic-20200219-Kyocera-FS-6020-Postscript-Kyocera.ppd.gz	fdae008d478bdcd19348161c41584716	229087
-foomatic-20200219-Kyocera-FS-6026-Postscript-Kyocera.ppd.gz	257633d5e472d323ecc77441d99bac65	228089
-foomatic-20200219-Kyocera-FS-6300-Postscript-Kyocera.ppd.gz	d8006ef6d8e958f0c000e07480617823	227919
-foomatic-20200219-Kyocera-FS-6500plus-Postscript-Kyocera.ppd.gz	9f1725fb67613aabf3b9dd4dc040c4ae	188529
-foomatic-20200219-Kyocera-FS-6700-Postscript-Kyocera.ppd.gz	c6eaf3280ba5577173ec71cfa548ccdc	228846
-foomatic-20200219-Kyocera-FS-6750-Postscript-Kyocera.ppd.gz	196e46cd2aaf843713935d28412a8d18	229090
-foomatic-20200219-Kyocera-FS-680-Postscript-Kyocera.ppd.gz	643cf3b0c1cfda46e09d170e5ce59e23	227517
-foomatic-20200219-Kyocera-FS-6900-Postscript-Kyocera.ppd.gz	8534e8075210be1e5ed35d9b6749e101	228917
-foomatic-20200219-Kyocera-FS-6950DN-Postscript-Kyocera.ppd.gz	8021002d3320c193f532232c2a6516c0	228276
-foomatic-20200219-Kyocera-FS-7000-Postscript-Kyocera.ppd.gz	d579df69a75ed6449696924dd6c83fec	229423
-foomatic-20200219-Kyocera-FS-7028M-Postscript-Kyocera.ppd.gz	a6c82876a12e2002a290aa1d24e8d7b7	229493
-foomatic-20200219-Kyocera-FS-8000C-Postscript-Kyocera.ppd.gz	3e94be1b0770df41af1719f529249125	318499
-foomatic-20200219-Kyocera-FS-9000-Postscript-Kyocera.ppd.gz	48dd493e3fb2f5ca6a7ec16ebf1a59c9	229494
-foomatic-20200219-Kyocera-FS-9100DN-Postscript-Kyocera.ppd.gz	341a23928b3fc13a4f002e025b3a2605	229578
-foomatic-20200219-Kyocera-FS-920-Postscript-Kyocera.ppd.gz	d729f6cf4118027e4bbb33e0f3cbbec4	227535
-foomatic-20200219-Kyocera-FS-C5015N-Postscript-Kyocera.ppd.gz	2526428c47b0b4ebd2f7f9c126de707c	317267
-foomatic-20200219-Kyocera-FS-C5016N-Postscript-Kyocera.ppd.gz	54f0bed3cfe6dd6f7ce421fa7c3a420c	316904
-foomatic-20200219-Kyocera-FS-C5020N-Postscript-Kyocera.ppd.gz	a74578bcc8dd68abb812d07264f99787	317306
-foomatic-20200219-Kyocera-FS-C5025N-Postscript-Kyocera.ppd.gz	9e6623591e905822ba330b07dcf6dbab	317335
-foomatic-20200219-Kyocera-FS-C5030N-Postscript-Kyocera.ppd.gz	c5e082f2338e00a71eb984c262b26775	317306
-foomatic-20200219-Kyocera-FS-C8008N-Postscript-Kyocera.ppd.gz	de648cde99529d0d0a5a366d3655fcdb	317344
-foomatic-20200219-Kyocera-FS-C8026N-Postscript-Kyocera.ppd.gz	101a6f577d452a0da702ef4dcb728225	317710
-foomatic-20200219-Kyocera-FS-C8100DNplus_KPDL-Postscript-Kyocera.ppd.gz	e12fab90db060675e3df8382b129cd41	318052
-foomatic-20200219-Kyocera-FS-C8100DN-Postscript-Kyocera.ppd.gz	d013fa31a6f6f9ae3e576cb266fbb3cb	318041
-foomatic-20200219-Kyocera-KM-1510-Postscript-Kyocera.ppd.gz	e3bfd79330e9a5f057f132cd5de50476	227890
-foomatic-20200219-Kyocera-KM-1530-Postscript-Kyocera.ppd.gz	6eac308b20bcf74cc43e3b113d61c034	227956
-foomatic-20200219-Kyocera-KM-1810-Postscript-Kyocera.ppd.gz	9d12f8b826b7a278e099449f532a1e76	227890
-foomatic-20200219-Kyocera-KM-1820-Postscript-Kyocera.ppd.gz	6e479bc2013fffb47b50e275d1260955	228028
-foomatic-20200219-Kyocera-KM-2030-Postscript-Kyocera.ppd.gz	5790a7e75b0e41bf62b311bfa5ff781a	228240
-foomatic-20200219-Kyocera-KM-3050-Postscript-Kyocera.ppd.gz	960924839d48c5c3937450d548f146eb	229287
-foomatic-20200219-Kyocera-KM-4230_5230-Postscript-Kyocera.ppd.gz	1eefa64d655f80c688f664eef4b9393c	227779
-foomatic-20200219-Kyocera-KM-4530-Postscript-Kyocera.ppd.gz	9c2ebcdb7a52aba8b0791ce39a00976f	230420
-foomatic-20200219-Kyocera-KM-5530-Postscript-Kyocera.ppd.gz	01f6b881efd94a36b19fd7573e6134be	230486
-foomatic-20200219-Kyocera-KM-6030-Postscript-Kyocera.ppd.gz	50f49ca69915b51d6353e2e6b1820885	229383
-foomatic-20200219-Kyocera-KM-6230-Postscript-Kyocera.ppd.gz	0595c92346c338f9f86d68f3aa406b2a	227823
-foomatic-20200219-Kyocera-KM-6330-Postscript-Kyocera.ppd.gz	9af33e307795dcb9b8bc2cd01f9cd842	230486
-foomatic-20200219-Kyocera-KM-C2520-Postscript-Kyocera.ppd.gz	5bb53c344a7bf2de18f4165a7a18c37e	318112
-foomatic-20200219-Kyocera-KM-C2630-Postscript-Kyocera.ppd.gz	993f4e0fa577c1150bb094128a7ca04b	317710
-foomatic-20200219-Kyocera-KM-C830-Postscript-Kyocera.ppd.gz	63557a6d18f2ffef9477586093979cc4	319338
-foomatic-20200219-Kyocera-KM-C850-Postscript-Kyocera.ppd.gz	a8f9a8240943aa114043dc3317ed8d0a	317949
-foomatic-20200219-Lanier-MP_C501-PDF-Lanier.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13296
-foomatic-20200219-Lanier-P_501-PDF-Lanier.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13135
-foomatic-20200219-Lanier-SG3110DN-pxlcolor-Lanier.ppd.gz	500c2a242ebf24822951f11b2d0ac64c	1069999
-foomatic-20200219-Lanier-SG3110SFNw-pxlcolor-Lanier.ppd.gz	500c2a242ebf24822951f11b2d0ac64c	1069999
-foomatic-20200219-Lanier-SP_3400N-Postscript-Lanier.ppd.gz	6b291fe1b772273bd3e20f7f663d5a43	227295
-foomatic-20200219-Lanier-SP_3410DN-Postscript-Lanier.ppd.gz	61305103da93c7a02da1c9c2ab64ccd9	227396
-foomatic-20200219-Lanier-SP_3600DN-Postscript-Lanier.ppd.gz	f9883f127e61ac429536dbe01c7a1951	228477
-foomatic-20200219-Lanier-SP_4310N-PDF-Lanier.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13108
-foomatic-20200219-Lanier-SP_4510DN-PDF-Lanier.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13135
-foomatic-20200219-Lanier-SP_5200DN-PDF-Lanier.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13108
-foomatic-20200219-Lanier-SP_5200S-PDF-Lanier.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13108
-foomatic-20200219-Lanier-SP_5210SF-PDF-Lanier.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13135
-foomatic-20200219-Lanier-SP_5210SR-PDF-Lanier.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13128
-foomatic-20200219-Lanier-SP_6430DN-PDF-Lanier.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13158
-foomatic-20200219-Lanier-SP_8300DN-PDF-Lanier.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13177
-foomatic-20200219-Lanier-SP_8400DN-PDF-Lanier.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13224
-foomatic-20200219-Lanier-SP_C221N-Postscript-Lanier.ppd.gz	6cbad4388fd668d9576260147087c098	317078
-foomatic-20200219-Lanier-SP_C222DN-Postscript-Lanier.ppd.gz	98ef4148f24490833392aff83bf1b57a	317179
-foomatic-20200219-Lanier-SP_C222SF-Postscript-Lanier.ppd.gz	39c85117e29364ed7f7964efc28b996d	317431
-foomatic-20200219-Lanier-SP_C232SF-Postscript-Lanier.ppd.gz	caeb839375748fa19a2b2f5110e66038	317430
-foomatic-20200219-Lanier-SP_C311N-Postscript-Lanier.ppd.gz	e845e9aaa1c8516de740bed53ed81bc4	317078
-foomatic-20200219-Lanier-SP_C312DN-Postscript-Lanier.ppd.gz	00117c953d66e7d20a8806a4c5e72db7	317180
-foomatic-20200219-Lanier-SP_C340DN-PDF-Lanier.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13208
-foomatic-20200219-Lanier-SP_C342DN-PDF-Lanier.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13207
-foomatic-20200219-Lanier-SP_C352DN-PDF-Lanier.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13207
-foomatic-20200219-Lanier-SP_C360DNw-Postscript-Lanier.ppd.gz	82c5bcea04b3dd3ea8ebbe386b2b3baf	319380
-foomatic-20200219-Lanier-SP_C360SFNw-Postscript-Lanier.ppd.gz	c3e01595c1fc90e3dc31a3bd22045e39	320084
-foomatic-20200219-Lanier-SP_C435DN-PDF-Lanier.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13207
-foomatic-20200219-Lanier-SP_C730DN-PDF-Lanier.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13207
-foomatic-20200219-Lanier-SP_C830DN-PDF-Lanier.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13273
-foomatic-20200219-Lanier-SP_C840DN-PDF-Lanier.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13296
-foomatic-20200219-Lexmark-1020_Business-pcl3.ppd.gz	60bec2a476b5a05dec2489d146c688e0	16830
-foomatic-20200219-Lexmark-4039_10plus-Postscript.ppd.gz	57b744f02de0ef89202742c606b56995	226266
-foomatic-20200219-Lexmark-C2132-Postscript-Lexmark.ppd.gz	12bcd0ae4f4c2c193b52c171311fda54	702110
-foomatic-20200219-Lexmark-C500n-Postscript.ppd.gz	8831d3a0e99ed9d815493a8f61023e8b	314890
-foomatic-20200219-Lexmark-C510b-Postscript.ppd.gz	1d440d0552ea639efacc33baca91389f	226266
-foomatic-20200219-Lexmark-C510-Postscript-Lexmark.ppd.gz	13ee6a757a50951da7701b7bc9725ede	215204
-foomatic-20200219-Lexmark-C520-Postscript-Lexmark.ppd.gz	9643daaa38a4cc2b278ae42cedb7530a	701728
-foomatic-20200219-Lexmark-C522-Postscript-Lexmark.ppd.gz	24ece6074e449ccf254fbfd079ec6640	701870
-foomatic-20200219-Lexmark-C524-Postscript-Lexmark.ppd.gz	806bf2e3485004526fef573e94ab8ec1	702056
-foomatic-20200219-Lexmark-C540-Postscript-Lexmark.ppd.gz	35545425d8af0ed866074e26c820b4c7	701860
-foomatic-20200219-Lexmark-C543-Postscript-Lexmark.ppd.gz	3fc10624f58e3382242cd49d39c225dd	701857
-foomatic-20200219-Lexmark-C544-Postscript-Lexmark.ppd.gz	d38dabb4318a85ba467358b1b4ec9af9	702004
-foomatic-20200219-Lexmark-C546-Postscript-Lexmark.ppd.gz	ba7ef2e3828c2a212ac3e6351ab757a7	702004
-foomatic-20200219-Lexmark-C720n-pxlcolor.ppd.gz	c8ca478ff81032962259cf3139263b07	4219348
-foomatic-20200219-Lexmark-C734-Postscript-Lexmark.ppd.gz	a27152761ac584bf9505ea1a11c1da6a	701970
-foomatic-20200219-Lexmark-C736-Postscript-Lexmark.ppd.gz	a27152761ac584bf9505ea1a11c1da6a	701970
-foomatic-20200219-Lexmark-C750-Postscript-Lexmark.ppd.gz	4ee0a07949c55d4856927cd798ac979d	214950
-foomatic-20200219-Lexmark-C752-Postscript-Lexmark.ppd.gz	6983d7d8d31c3293dd008bb9c54677f4	215727
-foomatic-20200219-Lexmark-C780-Postscript-Lexmark.ppd.gz	b8a635474ae63ea3327bf2ed4dedd3f6	702590
-foomatic-20200219-Lexmark-C782-Postscript-Lexmark.ppd.gz	861e5e2822020ec75fafb100b6f5b6ab	702967
-foomatic-20200219-Lexmark-C910-Postscript-Lexmark.ppd.gz	77c084462f3d808acb90421eb35213c2	214875
-foomatic-20200219-Lexmark-C912-Postscript-Lexmark.ppd.gz	9690bf76174e9cb3a996d95c38012c09	214875
-foomatic-20200219-Lexmark-C930-Postscript-Lexmark.ppd.gz	aaabd2a449be48a322c82495b6ae71d7	318007
-foomatic-20200219-Lexmark-C935-Postscript-Lexmark.ppd.gz	aaabd2a449be48a322c82495b6ae71d7	318007
-foomatic-20200219-Lexmark-E238-pxlmono.ppd.gz	31f86c4b6f008fd3ff7d22b7c2b69d82	547082
-foomatic-20200219-Lexmark-E260dn-Postscript-Lexmark.ppd.gz	bbd704f5f9f98c31fee8386e4594b029	316071
-foomatic-20200219-Lexmark-E350d-Postscript-Lexmark.ppd.gz	772f4a43760208b31ea0cd4699f044d9	227142
-foomatic-20200219-Lexmark-E360dn-Postscript-Lexmark.ppd.gz	3fc6a33653b6a1e68f39cc4a88d131db	316143
-foomatic-20200219-Lexmark-EG460dn-Postscript-Lexmark.ppd.gz	9b89606caa11a9543cba84ccbad701a8	316143
-foomatic-20200219-Lexmark-Optra_Color_1200-Postscript.ppd.gz	8204390b36f8a8115193befac67ca31c	314890
-foomatic-20200219-Lexmark-T650-Postscript-Lexmark.ppd.gz	bc92f637a4cdee57687ccdf16cd8c165	316611
-foomatic-20200219-Lexmark-T656-Postscript-Lexmark.ppd.gz	01b9509f2023c7c137f96af3415ed269	316535
-foomatic-20200219-Lexmark-W850-Postscript-Lexmark.ppd.gz	69a2dda542253cf3999b3098ae0fc4fd	316522
-foomatic-20200219-Lexmark-X203n-Postscript-Lexmark.ppd.gz	7e699038daf6acfcc0433eac418390f2	315358
-foomatic-20200219-Lexmark-X264dn-Postscript-Lexmark.ppd.gz	9749c2d391022a52328556f4aae784b6	315917
-foomatic-20200219-Lexmark-X363dn-Postscript-Lexmark.ppd.gz	a6b6002e0d0e98f0cacdc51a1a2b61b5	315917
-foomatic-20200219-Lexmark-X463de-Postscript-Lexmark.ppd.gz	d7ba5f37aacc909c5985398b61c4e13e	315915
-foomatic-20200219-Lexmark-X543-Postscript-Lexmark.ppd.gz	805d04879efb976f9910c0740a0364d8	701857
-foomatic-20200219-Lexmark-X544-Postscript-Lexmark.ppd.gz	2cb0914f2b7cebf61f8e658a23ea6fad	702004
-foomatic-20200219-Lexmark-X546-Postscript-Lexmark.ppd.gz	0f300db1fb93e0f6c0363b9a2648a089	702004
-foomatic-20200219-Lexmark-X734de-Postscript-Lexmark.ppd.gz	cd5b14c7dc42ad7f0c08852ec62e6bb5	701900
-foomatic-20200219-Lexmark-X860de-Postscript-Lexmark.ppd.gz	545bbfeb6a3628a2d835aaa4f0564506	316600
-foomatic-20200219-Lexmark-X940e-Postscript-Lexmark.ppd.gz	28dae34d83dcfa0cc382ec761d7f8c36	318007
-foomatic-20200219-Minolta-magicolor_3100-Postscript.ppd.gz	b8cf3d0d02dd189a2d96c5e7fe69e2f8	314674
-foomatic-20200219-Minolta-PagePro_8L-ljet2p.ppd.gz	bd390582ef0a7b857357df08f6da7120	37090
-foomatic-20200219-NEC-P2X-necp2xX.upp.ppd.gz	9a38f13e0c9cd622441aeb83f0bb2702	55773
+foomatic-20200219-InfoPrint-Pro_1107EX-pxlmono-InfoPrint.ppd.gz	722e7156a44e42e18d18c8b42d23c73f	547303
+foomatic-20200219-Infotec-MP_201-PDF-Infotec.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13083
+foomatic-20200219-Infotec-MP_501-PDF-Infotec.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13131
+foomatic-20200219-Infotec-Pro_8100S-PDF-Infotec.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13244
+foomatic-20200219-Infotec-Pro_8110-PDF-Infotec.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13269
+foomatic-20200219-Infotec-Pro_8300S-PDF-Infotec.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13244
+foomatic-20200219-Infotec-Pro_8310-PDF-Infotec.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13269
+foomatic-20200219-Infotec-Pro_8310S-PDF-Infotec.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13269
+foomatic-20200219-Infotec-Pro_C5100S-PDF-Infotec.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13293
+foomatic-20200219-Infotec-Pro_C7100-PDF-Infotec.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13341
+foomatic-20200219-Infotec-Pro_C7100S-PDF-Infotec.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13341
+foomatic-20200219-Infotec-SP_5300-PDF-Infotec.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13131
+foomatic-20200219-Lanier-MP_C501-PDF-Lanier.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13292
+foomatic-20200219-Lanier-P_501-PDF-Lanier.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13131
+foomatic-20200219-Lanier-SG3110DN-pxlcolor-Lanier.ppd.gz	11453589b3e3b0d08a79c35679d68435	1070004
+foomatic-20200219-Lanier-SG3110SFNw-pxlcolor-Lanier.ppd.gz	11453589b3e3b0d08a79c35679d68435	1070004
+foomatic-20200219-Lanier-SP_4310N-PDF-Lanier.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13104
+foomatic-20200219-Lanier-SP_4510DN-PDF-Lanier.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13131
+foomatic-20200219-Lanier-SP_5200DN-PDF-Lanier.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13104
+foomatic-20200219-Lanier-SP_5200S-PDF-Lanier.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13104
+foomatic-20200219-Lanier-SP_5210SF-PDF-Lanier.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13131
+foomatic-20200219-Lanier-SP_5210SR-PDF-Lanier.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13124
+foomatic-20200219-Lanier-SP_6430DN-PDF-Lanier.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13154
+foomatic-20200219-Lanier-SP_8300DN-PDF-Lanier.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13173
+foomatic-20200219-Lanier-SP_8400DN-PDF-Lanier.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13220
+foomatic-20200219-Lanier-SP_C340DN-PDF-Lanier.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13204
+foomatic-20200219-Lanier-SP_C342DN-PDF-Lanier.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13203
+foomatic-20200219-Lanier-SP_C352DN-PDF-Lanier.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13203
+foomatic-20200219-Lanier-SP_C435DN-PDF-Lanier.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13203
+foomatic-20200219-Lanier-SP_C730DN-PDF-Lanier.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13203
+foomatic-20200219-Lanier-SP_C830DN-PDF-Lanier.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13269
+foomatic-20200219-Lanier-SP_C840DN-PDF-Lanier.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13292
+foomatic-20200219-Lexmark-C720n-pxlcolor.ppd.gz	65a4243340efda05e7822131db44326f	4219353
+foomatic-20200219-Lexmark-E238-pxlmono.ppd.gz	68f1a3f86af51919ba8c3e07022b99eb	547087
 foomatic-20200219-NEC-PICTY180-picty180.ppd.gz	4bac86664cd5240dae29298e1dc7947d	48071
-foomatic-20200219-NEC-Pinwriter_P20-necp6.ppd.gz	53e6bfc9886476f787b905504b51200f	55329
 foomatic-20200219-NEC-PinWriter_P6-necp6.ppd.gz	53e6bfc9886476f787b905504b51200f	55329
-foomatic-20200219-NEC-SuperScript_650C-pcl3.ppd.gz	60bec2a476b5a05dec2489d146c688e0	16830
-foomatic-20200219-NRG-SP_3500N-Postscript-NRG.ppd.gz	8df787dae4d2d96e2725a1f2ba3c87eb	227411
-foomatic-20200219-NRG-SP_3510DN-Postscript-NRG.ppd.gz	a20b4e297e8153f491e3ca98049d1b74	227512
-foomatic-20200219-NRG-SP_C242DN-Postscript-NRG.ppd.gz	662951dd07316046353de093039641ed	317971
-foomatic-20200219-Oce-3145PS-Postscript2-Oce.ppd.gz	68e0ef655f12cee183ede7c54e3aa18b	226352
-foomatic-20200219-Oce-8445PS-Postscript2-Oce.ppd.gz	6d22ec649d0e1ee2a25c23f64036368f	226324
-foomatic-20200219-Oce-9050-oce9050.ppd.gz	4a4eb5b3bd73b975a62db814405f6db2	11728
-foomatic-20200219-Oce-9230-Postscript2-Oce.ppd.gz	d14d62738f9aeefb360ccdbc6eca59e8	226323
-foomatic-20200219-Oce-9260-Postscript2-Oce.ppd.gz	f8cb6df3fd92b75ba89994ac541a3eb3	226323
-foomatic-20200219-Oce-PPC3073PS-Postscript-Oce.ppd.gz	61dea7402470468cff46221c300e9a33	226403
-foomatic-20200219-Oce-PPC3074PS-Postscript-Oce.ppd.gz	61dea7402470468cff46221c300e9a33	226403
-foomatic-20200219-Oce-PPC5115PS-Postscript-Oce.ppd.gz	4a12001007598c15b51c21fee8a43e13	226521
-foomatic-20200219-Oce-VarioPrint_2045PS-Postscript-Oce.ppd.gz	a21c62c1e5b2edd9414580f1702928ba	226389
-foomatic-20200219-Oce-VarioPrint_2090PS-Postscript-Oce.ppd.gz	7ad5239f67bbf9a104d3039e3467085b	315336
-foomatic-20200219-Oce-VarioPrint_2100PS-Postscript-Oce.ppd.gz	e3a9a7ae591cee3149bddc169f319703	315838
-foomatic-20200219-Oce-VarioPrint_2105PS-Postscript-Oce.ppd.gz	1a25a8ac69b02f63810b44e106a0a59b	226412
-foomatic-20200219-Oki-14i-Postscript-Oki.ppd.gz	2c3e971a0c7125269d161b1befcb5b29	227324
-foomatic-20200219-Oki-B4350-Postscript-Oki.ppd.gz	78684972f0212395993dc1849f216787	228243
-foomatic-20200219-Oki-C5400-Postscript-Oki.ppd.gz	6ef0e3ff0c484cbfeb4098063fc328a8	319423
-foomatic-20200219-Oki-C5700-Postscript-Oki.ppd.gz	e5a774f7173a91848cfc7988ddc90bcd	219257
-foomatic-20200219-Oki-C5900-Postscript-Oki.ppd.gz	341403bd99e81744c32d458c87df319a	219256
-foomatic-20200219-Oki-C6100-Postscript-Oki.ppd.gz	6e4e671144cab6037e5812c509176207	219260
-foomatic-20200219-Oki-C8800-Postscript-Oki.ppd.gz	e1efc58e68858e5438dcbbfbc21968e2	219182
-foomatic-20200219-Oki-C9600-Postscript-Oki.ppd.gz	efbc3ffb950f1f7424ea65c4485ee15b	219168
-foomatic-20200219-Oki-Microline_182-oki182.ppd.gz	797544af5574b76491fba7b5234d04b0	10306
+foomatic-20200219-NEC-Pinwriter_P20-necp6.ppd.gz	53e6bfc9886476f787b905504b51200f	55329
 foomatic-20200219-Oki-ML_320-okiibm.ppd.gz	13a6f50c719398dade53d64e1bb5c979	6235
-foomatic-20200219-Oki-Okijet_2500-cdj550.ppd.gz	c7cf6a394cdcc75ad119d7f1e1d1ad96	55443
-foomatic-20200219-Oki-OL410e-ljet4.ppd.gz	e5cbf1b01ed8e6518ae5dc72abdf207a	10792
-foomatic-20200219-Olivetti-JP350S-laserjet.ppd.gz	21ef470512223260bbbb3af88f81e138	62901
-foomatic-20200219-Olivetti-JP450-djet500.ppd.gz	bd4661f9ff56b214454f3b767429ddaf	11153
+foomatic-20200219-Oki-Microline_182-oki182.ppd.gz	797544af5574b76491fba7b5234d04b0	10306
 foomatic-20200219-Panasonic-KX-P1150-eps9high.ppd.gz	29b0c1b33a24cf4bb73263bde705c9d3	7745
 foomatic-20200219-Ricoh-4081-r4081.ppd.gz	4097fbf6adc6bc9af81056fde458fb2d	212172
-foomatic-20200219-Ricoh-ColorLaser_AP828-Postscript-Ricoh.ppd.gz	4213f30dc17f052e3a0ea45360f50290	316413
-foomatic-20200219-Ricoh-DDP_70-Postscript-Ricoh.ppd.gz	912a05c0bd1d9fa56ebcb45cee044376	227953
-foomatic-20200219-Ricoh-DDP_92-Postscript-Ricoh.ppd.gz	90d5e99e1e70d5ec296e4013c78a9fc5	227952
-foomatic-20200219-Ricoh-EMP_156-Postscript-Ricoh.ppd.gz	0adeb352dcf7bcfdbf77d6999d6d5807	227178
-foomatic-20200219-Ricoh-GX_3050N-pxlcolor-Ricoh.ppd.gz	500c2a242ebf24822951f11b2d0ac64c	1069974
-foomatic-20200219-Ricoh-GX_3050SFN-pxlcolor-Ricoh.ppd.gz	500c2a242ebf24822951f11b2d0ac64c	1069974
-foomatic-20200219-Ricoh-GX_E3350N-pxlcolor-Ricoh.ppd.gz	500c2a242ebf24822951f11b2d0ac64c	1069974
-foomatic-20200219-Ricoh-GX_E5550N-pxlcolor-Ricoh.ppd.gz	500c2a242ebf24822951f11b2d0ac64c	1069999
-foomatic-20200219-Ricoh-SP_2300L-pcl5-Ricoh.ppd.gz	82bef2987c720ba9a931ca2c627f49e6	571835
-foomatic-20200219-Ricoh-SP_330DN-Postscript-Ricoh.ppd.gz	99c45e56dd1ebdef49ecafb434147713	227764
-foomatic-20200219-Ricoh-SP_330SFN-Postscript-Ricoh.ppd.gz	1cb77d7884e4107d1611eaa40cb9b0f0	227764
-foomatic-20200219-Ricoh-SP_3700-Postscript-Ricoh.ppd.gz	011d34095137c630ea6eb6fd16649edf	227764
-foomatic-20200219-Ricoh-SP_3700SF-Postscript-Ricoh.ppd.gz	24c6b6c6387cfcad7fc76a33cb52f89a	227764
-foomatic-20200219-Ricoh-SP_400DN-Postscript-Ricoh.ppd.gz	2ef32603d067730d039076b5a436e70c	228736
-foomatic-20200219-Ricoh-SP_450DN-PDF-Ricoh.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13135
-foomatic-20200219-Ricoh-SP_C250DN-Postscript-Ricoh.ppd.gz	d08533556b31c9f23617ee33ede11bfc	317971
-foomatic-20200219-Ricoh-SP_C250SF-Postscript-Ricoh.ppd.gz	8c50dcb92f1240fad2e9f99db0024d23	317971
-foomatic-20200219-Ricoh-SP_C261DNw-Postscript-Ricoh.ppd.gz	35228113858a1db5c0dd0450e97a2f63	317971
-foomatic-20200219-Samsung-C140x-Postscript-Samsung.ppd.gz	7deca5b20e5b0021bef125ad0d8afc2e	315231
-foomatic-20200219-Samsung-C2620-Postscript-Samsung.ppd.gz	8e5c7162fdd05dd88f7f09831f946832	315555
-foomatic-20200219-Samsung-C2670-Postscript-Samsung.ppd.gz	a3697279cf698f6aa73739cee731d102	315730
-foomatic-20200219-Samsung-C460-Postscript-Samsung.ppd.gz	6c1067959d52160357cd3e21e9ae3f6f	315170
-foomatic-20200219-Samsung-C4820-Postscript-Samsung.ppd.gz	2088794b1c5c80951c1854ebe29c0ac1	315656
-foomatic-20200219-Samsung-C48x-Postscript-Samsung.ppd.gz	2017b9be9a126162074f901f0f657590	315201
-foomatic-20200219-Samsung-CLP-350-Postscript-Samsung.ppd.gz	b65027dc9ead6980ebc6504abc9e10fa	314687
-foomatic-20200219-Samsung-CLP-410-Postscript-Samsung.ppd.gz	5f4c6b9ecf1034ff3af2202c56031ab8	315200
-foomatic-20200219-Samsung-CLP-610-pxlcolor.ppd.gz	c8ca478ff81032962259cf3139263b07	4219348
-foomatic-20200219-Samsung-CLP-660-Postscript-Samsung.ppd.gz	8ad720441e572d9941e862b7db617a24	315138
-foomatic-20200219-Samsung-CLP-670-Postscript-Samsung.ppd.gz	417fbb2faa358bdc0aa952c472f68457	315320
-foomatic-20200219-Samsung-CLP-680-Postscript-Samsung.ppd.gz	f51b727362810dfee1e757d2b658824b	315449
-foomatic-20200219-Samsung-CLP-770-Postscript-Samsung.ppd.gz	162c795f4c660ca51470f34098520f29	315351
-foomatic-20200219-Samsung-CLP-775-Postscript-Samsung.ppd.gz	0a640299100a5b6066300d2bd78c4437	315366
-foomatic-20200219-Samsung-CLX-3300-Postscript-Samsung.ppd.gz	751975f7cb084bedd77004dbe1e6a666	315170
-foomatic-20200219-Samsung-CLX-6200-Postscript-Samsung.ppd.gz	b9cb616e3bc0e3d2f92811ba0647edba	315051
-foomatic-20200219-Samsung-CLX-6220-Postscript-Samsung.ppd.gz	c7867e80e97d5c6d53b55bb9a59ecb65	315271
-foomatic-20200219-Samsung-CLX-6250-Postscript-Samsung.ppd.gz	13a29a967b479c41dece3de5cd3123b6	315285
-foomatic-20200219-Samsung-CLX-8380-Postscript-Samsung.ppd.gz	4052d1d27cdca1086144c9fb4bcbd2ad	315376
-foomatic-20200219-Samsung-CLX-8385-Postscript-Samsung.ppd.gz	3a62213d5dba5c8ce18b2247e1ad9302	315409
-foomatic-20200219-Samsung-CLX-8640_8650-Postscript-Samsung.ppd.gz	802436a8e165eb27b866f4a700b3d976	315725
-foomatic-20200219-Samsung-CLX-9250_9350-Postscript-Samsung.ppd.gz	61268744f6d39fa68544551df35070a3	315237
-foomatic-20200219-Samsung-CLX-9252_9352-Postscript-Samsung.ppd.gz	b8259968992870c4be1ad81ab85b07ec	315829
-foomatic-20200219-Samsung-CLX-92x1_93x1-Postscript-Samsung.ppd.gz	66661f9eda43fa81c092587d92199a0d	315661
-foomatic-20200219-Samsung-K3250-Postscript-Samsung.ppd.gz	ace29ce3ac553672952d1a3fae29f38b	226895
-foomatic-20200219-Samsung-K401-Postscript-Samsung.ppd.gz	aa7f7462c7087085aa2d01238789058b	226826
-foomatic-20200219-Samsung-K703-Postscript-Samsung.ppd.gz	f2b113919f410ffed7f294adea1fded1	226926
-foomatic-20200219-Samsung-K7600-Postscript-Samsung.ppd.gz	a72829328682a4bf1090b96b5618fac7	226926
-foomatic-20200219-Samsung-M337x_387x_407x-Postscript-Samsung.ppd.gz	743b25e821f036267f1bd4477384afc5	226496
-foomatic-20200219-Samsung-M403x-Postscript-Samsung.ppd.gz	0eca68229825cb434c71680abefc2271	226473
-foomatic-20200219-Samsung-M408x-Postscript-Samsung.ppd.gz	105d0479bd590a863f16c128835b1caf	226533
-foomatic-20200219-Samsung-M4370_5370-Postscript-Samsung.ppd.gz	031a2e6969c5354446160236f5b7180f	226831
-foomatic-20200219-Samsung-M453x-Postscript-Samsung.ppd.gz	62522f194dd1201cda292c24919b1655	226724
-foomatic-20200219-Samsung-M458x-Postscript-Samsung.ppd.gz	aa810e61c2f478047f55d07c7c0ed69b	226649
-foomatic-20200219-Samsung-M5270-Postscript-Samsung.ppd.gz	4bac40b2a509605ee4a68717f247a641	226831
-foomatic-20200219-Samsung-ML-1650-ljet4.ppd.gz	fb086a0b27f0349c5b92f80a04ddc644	25725
-foomatic-20200219-Samsung-ML-2150-Postscript-Samsung.ppd.gz	801ad212d76e2a3909c0d0f73aba9558	226099
-foomatic-20200219-Samsung-ML-2550-Postscript-Samsung.ppd.gz	9ab1c4f22055e8fea63730fe7c2e00da	226099
-foomatic-20200219-Samsung-ML-2570-Postscript-Samsung.ppd.gz	47d24f788a7d73cfff6b769680b40647	225891
-foomatic-20200219-Samsung-ML-2850-Postscript-Samsung.ppd.gz	f98517411a5d112fe8c56ab665789d74	226129
-foomatic-20200219-Samsung-ML-2855-Postscript-Samsung.ppd.gz	a9ddf23c7fd0e1f0a34845f9ab7243f0	226316
-foomatic-20200219-Samsung-ML-3470-Postscript-Samsung.ppd.gz	cd66ad79915d51b2263e49cf0fecd23d	226186
-foomatic-20200219-Samsung-ML-371x-Postscript-Samsung.ppd.gz	ac0ce9a129f148ae395ece3ec18ec693	226422
-foomatic-20200219-Samsung-ML-4050-Postscript-Samsung.ppd.gz	9323b34924824b4ac75eb3948c53a761	226110
-foomatic-20200219-Samsung-ML-4055-Postscript-Samsung.ppd.gz	87f64bb4ccb5fb952f08d9eeb87a4b47	226110
-foomatic-20200219-Samsung-ML-451x_501x-Postscript-Samsung.ppd.gz	37ac14346b9882ca3702c232f825366f	226423
-foomatic-20200219-Samsung-ML-4550-Postscript-Samsung.ppd.gz	47d174b9c692d00685e47176cf8848d1	225991
-foomatic-20200219-Samsung-ML-4555-Postscript-Samsung.ppd.gz	9c21973b07254376f91ae681dc415def	225991
-foomatic-20200219-Samsung-ML-551x_651x-Postscript-Samsung.ppd.gz	8b68a032c9e250c5600e8965011c6c58	226432
-foomatic-20200219-Samsung-ML-8850_8950-Postscript-Samsung.ppd.gz	22944de840a83ee5f39608853c0ae48b	226397
-foomatic-20200219-Samsung-ML-8x00-Postscript-Samsung.ppd.gz	fe6a056755f2df306e7dbc2f18c4bc21	225840
-foomatic-20200219-Samsung-SCX-483x_5x3x-Postscript-Samsung.ppd.gz	bde358dbdb08ade50dd2126e44b4f6e4	226459
-foomatic-20200219-Samsung-SCX-4x28-Postscript-Samsung.ppd.gz	d20ab29692b8270aba6e5f542c3041c8	226150
-foomatic-20200219-Samsung-SCX-5635-Postscript-Samsung.ppd.gz	89d49798140c7efe6f54d3c857e493c5	226361
-foomatic-20200219-Samsung-SCX-5835_5935-Postscript-Samsung.ppd.gz	362b927ce26d97793a67b3d84790e578	226437
-foomatic-20200219-Samsung-SCX-6545-Postscript-Samsung.ppd.gz	7c73fab36a88d18888a8f440ad202174	226608
-foomatic-20200219-Samsung-SCX-6545X-Postscript-Samsung.ppd.gz	d115ad9e84a7002faac460880ca0ab9e	226696
-foomatic-20200219-Samsung-SCX-681x-Postscript-Samsung.ppd.gz	94de75db1d65556e9edfd7620accf63b	226479
-foomatic-20200219-Samsung-SCX-6x20-Postscript-Samsung.ppd.gz	ecba2f6eb9f478af5dcdb3831814883a	226075
-foomatic-20200219-Samsung-SCX-6x22-Postscript-Samsung.ppd.gz	db66840970021fd5ce0c1f2dd1c83386	225991
-foomatic-20200219-Samsung-SCX-6x45-Postscript-Samsung.ppd.gz	5187f9c6750a5e274bda697e7c8c9f85	226434
-foomatic-20200219-Samsung-SCX-6x55-Postscript-Samsung.ppd.gz	51c0ee47b5e8d8551518f18a5ea34d27	226608
-foomatic-20200219-Samsung-SCX-8030_8040-Postscript-Samsung.ppd.gz	e2787abcca77286aa1cd25ae0441207b	226436
-foomatic-20200219-Samsung-SCX-8123_8128-Postscript-Samsung.ppd.gz	c9e92709c790f5c09b1d01078f04cd18	226640
-foomatic-20200219-Samsung-SCX-8230_8240-Postscript-Samsung.ppd.gz	4181ba4bd143fe7c4adc66cec534749a	226809
-foomatic-20200219-Sharp-AJ-1800-pcl3.ppd.gz	b68a2cdeded698d61d8a7c6ded88ae23	232712
-foomatic-20200219-Sharp-AR-155FG_PS-Postscript-Sharp.ppd.gz	304dd9ba5160cf0a6c92335e7a85e820	227889
-foomatic-20200219-Sharp-AR-160M_PS-Postscript-Sharp.ppd.gz	b0b9b0ffb7d7de271a9c75dc1f289252	227790
-foomatic-20200219-Sharp-AR-163FG_PS-Postscript-Sharp.ppd.gz	f0c37093c7dc82d85524a6e8d62d9575	227977
-foomatic-20200219-Sharp-AR-163G_PS-Postscript-Sharp.ppd.gz	32660d8be31da14f494101ae20a7c000	227980
-foomatic-20200219-Sharp-AR-168D-Postscript-Sharp.ppd.gz	e2c04bed3b3509b6c9abbf5a41899756	227893
-foomatic-20200219-Sharp-AR-168S-Postscript-Sharp.ppd.gz	777ff5652277bc4d207b13f69663e4bb	227794
-foomatic-20200219-Sharp-AR-200M_PS-Postscript-Sharp.ppd.gz	8482d50bd34034e1ee40817084aedb24	227790
-foomatic-20200219-Sharp-AR-205FG_PS-Postscript-Sharp.ppd.gz	c0f6cb32f26dc7d678f4f921e7968b56	228076
-foomatic-20200219-Sharp-AR-205G_PS-Postscript-Sharp.ppd.gz	51a3c3526153e8e3ff0b178e11e9b7d3	228079
-foomatic-20200219-Sharp-AR-266FP_PS-Postscript-Sharp.ppd.gz	6e91d2001351700183e7123ff07e3083	229242
-foomatic-20200219-Sharp-AR-311FP_PS-Postscript-Sharp.ppd.gz	9fe7360d7b35edffd9fc50f077b90182	229741
-foomatic-20200219-Sharp-AR-5220-Postscript-Sharp.ppd.gz	da514461a37d55d71caca124b949a79a	227794
-foomatic-20200219-Sharp-AR-555M_PS-Postscript-Sharp.ppd.gz	f03b0e21f4023d6b06c95f2dede9dbd6	229366
-foomatic-20200219-Sharp-AR-705M_PS-Postscript-Sharp.ppd.gz	891f3a3f99cbd5e619956e7d8d18e77f	229099
-foomatic-20200219-Sharp-AR-B07-Postscript-Sharp.ppd.gz	656e16c0b45d72d08ab7390103990436	227655
-foomatic-20200219-Sharp-AR-BC260-Postscript-Sharp.ppd.gz	991e020ccdecc36ccc67abe8e4f2f1e0	321304
-foomatic-20200219-Sharp-AR-C170FP_PS-Postscript-Sharp.ppd.gz	0395fa5ef4c1fff69b04682068914671	321300
-foomatic-20200219-Sharp-AR-C260P-Postscript-Sharp.ppd.gz	22e856ec5f1fbfd32e1d1a7dee7c9fa9	321304
-foomatic-20200219-Sharp-AR-M161_PS-Postscript-Sharp.ppd.gz	f735a42514712c885602275fa3e775e9	227984
-foomatic-20200219-Sharp-AR-M165_PS-Postscript-Sharp.ppd.gz	c631be4d3a5b8314709205dc3fb4fc48	228083
-foomatic-20200219-Sharp-AR-M205_PS-Postscript-Sharp.ppd.gz	dcd532965c2a8ff4eaeaa5c4441b9404	227893
-foomatic-20200219-Sharp-AR-M206_PS-Postscript-Sharp.ppd.gz	1c9d3121d160ea99d48e9f24be5603af	228083
-foomatic-20200219-Sharp-AR-M236_PS-Postscript-Sharp.ppd.gz	4cd12b66fe7545125dc0e9f841b3716a	229246
-foomatic-20200219-Sharp-AR-M351N-Postscript-Sharp.ppd.gz	21358e78e4b3b09fc4c52faa21eb9a7a	229745
-foomatic-20200219-Sharp-AR-M550N-Postscript-Sharp.ppd.gz	777589e7e1e8f1339551114f9e1eba0d	229370
-foomatic-20200219-Sharp-AR-M700N-Postscript-Sharp.ppd.gz	01bbc01c4e5ec2e0e81eaf9b9c62686f	229103
-foomatic-20200219-Sharp-AR-N182FG-Postscript-Sharp.ppd.gz	468b37cf989a07a844c9919cd808e93c	228176
-foomatic-20200219-Sharp-AR-N182G-Postscript-Sharp.ppd.gz	fcc7fc279bb5aceae8231962c5a11d82	228179
-foomatic-20200219-Sharp-MX-2314NR-Postscript-Sharp.ppd.gz	e54f4474ef4da410bf630b565984c1d5	322297
-foomatic-20200219-Sharp-MX-2614NR-Postscript-Sharp.ppd.gz	4b38ab23f8bbace51b9fb913168a8d08	322223
-foomatic-20200219-Sharp-MX-M1100-Postscript-Sharp.ppd.gz	5fb6c3bde9577229d02dda95a8542b4a	230660
-foomatic-20200219-Sharp-MX-M182D-Postscript-Sharp.ppd.gz	b3ab5d154fbea65b3db1914e75f32a3d	228183
-foomatic-20200219-Sharp-MX-M182-Postscript-Sharp.ppd.gz	e4621cea8cc84cc7d5fc8d604e191847	228084
-foomatic-20200219-Sharp-MX-M202D-Postscript-Sharp.ppd.gz	076479c3bbabc946202327222406c991	228183
-foomatic-20200219-Sharp-MX-M260FP-Postscript-Sharp.ppd.gz	a5ad2baf2fe131a60fa18937d6b9de7b	229176
-foomatic-20200219-Sharp-MX-M260-Postscript-Sharp.ppd.gz	ca67dd3601d9d9d5c50d793d4b43dd26	229180
-foomatic-20200219-Sharp-MX-M264NR-Postscript-Sharp.ppd.gz	d33d8218924e36ca70d1ba2cf73e351e	229960
-foomatic-20200219-Sharp-MX-M860-Postscript-Sharp.ppd.gz	97fd4298363a59c47ef1cf1443a99c38	230587
+foomatic-20200219-Ricoh-GX_3050N-pxlcolor-Ricoh.ppd.gz	11453589b3e3b0d08a79c35679d68435	1069979
+foomatic-20200219-Ricoh-GX_3050SFN-pxlcolor-Ricoh.ppd.gz	11453589b3e3b0d08a79c35679d68435	1069979
+foomatic-20200219-Ricoh-GX_E3350N-pxlcolor-Ricoh.ppd.gz	11453589b3e3b0d08a79c35679d68435	1069979
+foomatic-20200219-Ricoh-GX_E5550N-pxlcolor-Ricoh.ppd.gz	11453589b3e3b0d08a79c35679d68435	1070004
+foomatic-20200219-Ricoh-SP_450DN-PDF-Ricoh.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13131
+foomatic-20200219-Samsung-CLP-610-pxlcolor.ppd.gz	65a4243340efda05e7822131db44326f	4219353
+foomatic-20200219-Sharp-AJ-1800-pcl3.ppd.gz	1c0aa99dd979d22c315eea51a874cd57	18493
 foomatic-20200219-Star-NX-1001-eps9mid.ppd.gz	d42d0fe84e18912316db82fbefd5268c	7745
 foomatic-20200219-Star-StarJet_48-sj48.ppd.gz	476eb23c4e68a207a7d2347afe8a229b	51810
 foomatic-20200219-Tektronix-4693d-t4693dX.ppd.gz	230e388823735da68f9b2df16fc41b31	1871123
 foomatic-20200219-Tektronix-4695-tek4696.ppd.gz	a6b0c25352428eb01ec8a60fc1967b2b	23835
 foomatic-20200219-Tektronix-4696-tek4696.ppd.gz	a6b0c25352428eb01ec8a60fc1967b2b	23835
-foomatic-20200219-Tektronix-Phaser_350-Postscript.ppd.gz	f5ee07a80b8be451391cb2dc741d31e5	213429
-foomatic-20200219-Toshiba-e-Studio_205-Postscript-Toshiba.ppd.gz	ddf1f70c4793583f0ce4a8b676f7acdc	138141
-foomatic-20200219-Toshiba-e-Studio_282-Postscript-Toshiba.ppd.gz	6e59bbe5aa0da8522377d08b48e79975	139543
-foomatic-20200219-Toshiba-e-Studio_3510c-Postscript-Toshiba.ppd.gz	235c7184f6d37f3e603a5ef7d024039c	228398
-foomatic-20200219-Toshiba-e-Studio_451c-Postscript-Toshiba.ppd.gz	e86e895a3a2abdcd664c9c074337c21d	231080
-foomatic-20200219-Toshiba-e-Studio_452-Postscript-Toshiba.ppd.gz	1eafa03661f9002e438285466161985f	139471
-foomatic-20200219-Toshiba-e-Studio_850-Postscript-Toshiba.ppd.gz	09aee060d229452527c994745ad50bef	139205
-foomatic-20200219-Toshiba-GL-1010-Postscript-Toshiba.ppd.gz	c00edba94ed42e18c72f48f02dbd2b7c	136878
-foomatic-20200219-Toshiba-GL-1020-Postscript-Toshiba.ppd.gz	098753ceb23f14c686bf1fbc4a4bdfa7	136918
 foomatic-20200219-Xerox-DocuPrint_XJ8C-lxm5700m.ppd.gz	eccf445e08d5914ee2fd53002cfab044	210869
-foomatic-20200219-Xerox-Phaser_3160N-pxlcolor.ppd.gz	c8ca478ff81032962259cf3139263b07	4219348
-fuji_xerox-20200402-fuji-xerox-20200402-fx-apeosportv-c3375.ppd.gz	6ff264e582973a3d75b57a9b49d5996c	315963
-hp-20171121-hplip-3.17.10-hp-color_laserjet-ps.ppd.gz	34bc66287c4cbc3de3cdb218021f6ce4	214077
-hp-20171121-hplip-3.17.10-hp-deskjet_f4210_series.ppd.gz	a8a0bfd451fe546c1510b4884bba75af	18238
-hp-20171121-hplip-3.17.10-hp-laserjet_4-ps.ppd.gz	8976fee99238096f756b9b446eb0ad30	226447
-hp-20171121-hplip-3.17.10-hp-laserjet_4si-ps.ppd.gz	7b7164d96d2a6429a48cd0ba6e802c76	226500
-hp-20171121-hplip-3.17.10-hp-laserjet_4v-ps.ppd.gz	2eab5a3b48bdf83521e6612ab3f3a228	227099
-hp-20171121-hplip-3.17.10-hp-laserjet_6p-ps.ppd.gz	393fa28d69999e2efba9d43b56e7f3fe	226972
+foomatic-20200219-Xerox-Phaser_3160N-pxlcolor.ppd.gz	65a4243340efda05e7822131db44326f	4219353
 hp-20171121-hplip-3.17.10-hp-laserjet_p1505n-pcl3.ppd.gz	5693d7cd934636dd157e8768a32e96fb	32207
-hp-20171121-hplip-3.17.10-hp-laserjet_p2055_series-ps.ppd.gz	0bd0dda341802bd67789e7d54d821557	226669
-hp-20171121-hplip-3.17.10-hp-laserjet_p4010_series-ps.ppd.gz	958575783ab2e2ec28bc0caf33c274fd	228128
-hp-20190111-hplip-3.18.12-hp-designjet_z6200_42in_photo-ps.ppd.gz	d91116a41ed94097d408c3d04c0e2f59	215814
-hp-20190111-hplip-3.18.12-hp-designjet_z6200_60in_photo-ps.ppd.gz	5f16599ff63ee8af5b5d8c293b74ea79	215814
-hp-20190111-hplip-3.18.12-hp-designjet_z6810ps_42in-ps.ppd.gz	20f9d8fd6b00e9e6b71e24d006e45ac7	317506
-hp-20190111-hplip-3.18.12-hp-PCL3-Class1B.ppd.gz	39ef1e071debc0465ea026425d7152ba	18190
-hp-20190918-hplip-3.19.6-hp-Ampere.ppd.gz	f2ef1b2131fc0572b7a10ed14ab9178d	18191
-hp-20190918-hplip-3.19.6-hp-CLE17.ppd.gz	58c8ca7d9b4122611e3551f80ebe1341	18224
-hp-20190918-hplip-3.19.6-hp-cm8060_mfp_with_edgeline-ps.ppd.gz	0159c5e590b5acb168ea98975b319682	318295
-hp-20190918-hplip-3.19.6-hp-color_designjet_xl_3600-ps.ppd.gz	801a51d07b7c23c55fe43dbda93fd4df	317235
-hp-20190918-hplip-3.19.6-hp-color_laserjet_pro_mfp_m277-ps.ppd.gz	3064ecf8b6607882232b3ff23b78b98d	316676
-hp-20190918-hplip-3.19.6-hp-Copperhead12.ppd.gz	a9ac7c212e43a07df2afd6af2d78923f	18237
-hp-20190918-hplip-3.19.6-hp-CopperheadIPH15.ppd.gz	ef629017f8482a75b7ca808a38f386d3	18225
-hp-20190918-hplip-3.19.6-hp-CopperheadIPH17.ppd.gz	ef629017f8482a75b7ca808a38f386d3	18225
-hp-20190918-hplip-3.19.6-hp-CopperheadIPH.ppd.gz	678e96704e5dfb7e9a361faeee832c03	18225
-hp-20190918-hplip-3.19.6-hp-Copperhead.ppd.gz	a9ac7c212e43a07df2afd6af2d78923f	18237
-hp-20190918-hplip-3.19.6-hp-CopperheadXLP.ppd.gz	421650f2ed28f1966c50b24b5fd30e38	18225
-hp-20190918-hplip-3.19.6-hp-Corbett.ppd.gz	f2ef1b2131fc0572b7a10ed14ab9178d	18191
-hp-20190918-hplip-3.19.6-hp-designjet_t2600dr-ps.ppd.gz	3e39d3630c21e6fbf277f7f8b4465da5	317147
-hp-20190918-hplip-3.19.6-hp-DJ55xx.ppd.gz	49c36973ecf8c5b9382e1d05fb31e5b5	18141
-hp-20190918-hplip-3.19.6-hp-DJ9xxVIP.ppd.gz	056171eab957d464e6da585388f2eec0	18140
-hp-20190918-hplip-3.19.6-hp-Gemstone.ppd.gz	8e770df1d37ded0c37bad08ee7a710b9	18225
-hp-20190918-hplip-3.19.6-hp-Kapan.ppd.gz	dfee67dc212959138d0672be29bfbd05	18200
-hp-20190918-hplip-3.19.6-hp-laserjet_200_color_m251-ps.ppd.gz	6492f0ee552cc9772c0abe9531c0c933	316713
-hp-20190918-hplip-3.19.6-hp-laserjet_m1522_mfp-ps.ppd.gz	267cdeac567cddd074d39b6a6cddf958	226874
-hp-20190918-hplip-3.19.6-hp-laserjet_m2727_mfp_series-ps.ppd.gz	db3b3e64413c100d968b537ae8b27cef	227213
+hp-20190111-hplip-3.18.12-hp-PCL3-Class1B.ppd.gz	39ef1e071debc0465ea026425d7152ba	18188
+hp-20190918-hplip-3.19.6-hp-Ampere.ppd.gz	f2ef1b2131fc0572b7a10ed14ab9178d	18188
+hp-20190918-hplip-3.19.6-hp-CLE.ppd.gz	c3ec7dcdb0c7be64173663824b5489d1	18224
+hp-20190918-hplip-3.19.6-hp-CLE17.ppd.gz	58c8ca7d9b4122611e3551f80ebe1341	18222
+hp-20190918-hplip-3.19.6-hp-Copperhead.ppd.gz	a9ac7c212e43a07df2afd6af2d78923f	18235
+hp-20190918-hplip-3.19.6-hp-Copperhead12.ppd.gz	a9ac7c212e43a07df2afd6af2d78923f	18235
+hp-20190918-hplip-3.19.6-hp-CopperheadIPH.ppd.gz	678e96704e5dfb7e9a361faeee832c03	18222
+hp-20190918-hplip-3.19.6-hp-CopperheadIPH15.ppd.gz	ef629017f8482a75b7ca808a38f386d3	18223
+hp-20190918-hplip-3.19.6-hp-CopperheadIPH17.ppd.gz	ef629017f8482a75b7ca808a38f386d3	18223
+hp-20190918-hplip-3.19.6-hp-CopperheadXLP.ppd.gz	421650f2ed28f1966c50b24b5fd30e38	18223
+hp-20190918-hplip-3.19.6-hp-Corbett.ppd.gz	f2ef1b2131fc0572b7a10ed14ab9178d	18189
+hp-20190918-hplip-3.19.6-hp-DJ9xxVIP.ppd.gz	056171eab957d464e6da585388f2eec0	18138
+hp-20190918-hplip-3.19.6-hp-Gemstone.ppd.gz	8e770df1d37ded0c37bad08ee7a710b9	18223
+hp-20190918-hplip-3.19.6-hp-Kapan.ppd.gz	dfee67dc212959138d0672be29bfbd05	18199
 hp-20190918-hplip-3.19.6-hp-LJ-Class1.ppd.gz	e46585b6a8771037631218de9e1d1661	44994
 hp-20190918-hplip-3.19.6-hp-LJ-Class2.ppd.gz	bd8ebd3fb5973ca3c4cf4f07f2d6235f	904021
-hp-20190918-hplip-3.19.6-hp-LJ-Class3.ppd.gz	3d9654d50b1e2b4ef182885451c7d713	165161
+hp-20190918-hplip-3.19.6-hp-LJ-Class3.ppd.gz	bcd7a9e7d83edc65a1d87509eb69ded1	165687
 hp-20190918-hplip-3.19.6-hp-LJ-Class6.ppd.gz	5693d7cd934636dd157e8768a32e96fb	32207
-hp-20190918-hplip-3.19.6-hp-Mimas15.ppd.gz	8e770df1d37ded0c37bad08ee7a710b9	18225
-hp-20190918-hplip-3.19.6-hp-Mimas17.ppd.gz	8e770df1d37ded0c37bad08ee7a710b9	18225
-hp-20190918-hplip-3.19.6-hp-Mimas.ppd.gz	0518d62d4497ae16f27046b796431a26	18237
-hp-20190918-hplip-3.19.6-hp-MimasTDR.ppd.gz	1651551c1a3a0fd57c987f516fb52ee9	12054
-hp-20190918-hplip-3.19.6-hp-OJ7000.ppd.gz	f700db107ce46b344e1e036368c74235	18237
-hp-20190918-hplip-3.19.6-hp-OJProKx50.ppd.gz	a8a0bfd451fe546c1510b4884bba75af	18238
-hp-20190918-hplip-3.19.6-hp-postscript-inkjet.ppd.gz	1b9f46ed0a9738fa2b4c7e1ebb82a74e	316611
-hp-20190918-hplip-3.19.6-hp-postscript-laserjet.ppd.gz	f61cb19fa8c4146a4f26f19d980fea95	316875
-hp-20190918-hplip-3.19.6-hp-postscript-laserjet-pro.ppd.gz	6eee69a20643f0d2188ef2ac5b8723a9	316899
-hp-20190918-hplip-3.19.6-hp-PSP100.ppd.gz	0db48178558665d86ab3245220b9afc4	18241
-hp-20190918-hplip-3.19.6-hp-PSP470.ppd.gz	159afaf3e88e84166742400c87d41b6b	37283
-hp-20190918-hplip-3.19.6-hp-Pyramid15.ppd.gz	8e770df1d37ded0c37bad08ee7a710b9	18225
-hp-20190918-hplip-3.19.6-hp-PyramidPlus.ppd.gz	1651551c1a3a0fd57c987f516fb52ee9	12054
-hp-20190918-hplip-3.19.6-hp-Pyramid.ppd.gz	9785e610a6ae577b529e1e3e0e4ace37	12098
-hp-20190918-hplip-3.19.6-hp-PyramidRefresh15.ppd.gz	8e770df1d37ded0c37bad08ee7a710b9	18225
-hp-20190918-hplip-3.19.6-hp-PyramidRefresh17.ppd.gz	58c8ca7d9b4122611e3551f80ebe1341	18223
-hp-20190918-hplip-3.19.6-hp-Python10.ppd.gz	4a0e241b0f8219f323be07133de6c56f	18238
-hp-20190918-hplip-3.19.6-hp-Python11.ppd.gz	f700db107ce46b344e1e036368c74235	18238
-hp-20190918-hplip-3.19.6-hp-Python.ppd.gz	f700db107ce46b344e1e036368c74235	18238
-hp-20190918-hplip-3.19.6-hp-Saipan15B.ppd.gz	01919e1a8cc7e5732ced0976128964ab	18191
-hp-20190918-hplip-3.19.6-hp-Saipan.ppd.gz	01919e1a8cc7e5732ced0976128964ab	18191
-hp-20190918-hplip-3.19.6-hp-SPDOfficejetProAsize.ppd.gz	8e770df1d37ded0c37bad08ee7a710b9	18225
-hp-20190918-hplip-3.19.6-hp-SPDOfficejetProBsize.ppd.gz	8e770df1d37ded0c37bad08ee7a710b9	18225
-hp-20190918-hplip-3.19.6-hp-Stabler.ppd.gz	0518d62d4497ae16f27046b796431a26	18237
-hp-20190918-hplip-3.19.6-hp-StingrayOJ.ppd.gz	03fd7d0679955d604ca845ccddf3bdb4	18141
-hp-20190918-hplip-3.19.6-hp-ViperMinusVIP.ppd.gz	aaa7d48665d43147f9c369761310c26a	19165
-hp-20190918-hplip-3.19.6-hp-ViperPlusVIP.ppd.gz	0518d62d4497ae16f27046b796431a26	18237
-hplip-20200303-hplip-3.19.12-hp-color_designjet_xl_3600-ps.ppd.gz	ce8d9a612710cea9cce23af031c63fdd	317100
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_2550_series-ps.ppd.gz	88665c5b5117765328ac4fee5dca5248	316554
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_2605-ps.ppd.gz	c302b73ae8fc5bdf2c3aa8905e7b4ccd	316848
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_2700-ps.ppd.gz	1f6fb3962a7f52737062b3e12e45992b	316612
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_2800-ps.ppd.gz	4df81a1a945325e0b6f38b94c85d6c6a	316490
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_cm1015-ps.ppd.gz	1c3ceeb9e9e2654107f342aa7c8a06fb	316565
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_e85055-ps.ppd.gz	153bff7ebc51b385269d2a3116c949c6	317672
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_flowmfp_m776-ps.ppd.gz	13690f0164adbac067273370bcf714d3	318045
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_m856-ps.ppd.gz	c04d9278c66c2382c32de2f4756eb4f0	317976
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_mfp_m776-ps.ppd.gz	1a393baded20675f4f5514bc0d28925a	318273
-hplip-20200303-hplip-3.19.12-hp-designjet_4000ps.ppd.gz	6e1b367dc9493cda3c5dc3937d374a62	215603
-hplip-20200303-hplip-3.19.12-hp-designjet_4500mfp.ppd.gz	ff6dd524948915beb9811ce6fd5fbb46	215610
-hplip-20200303-hplip-3.19.12-hp-designjet_d5800-ps.ppd.gz	4bd2d9ad76191f149e631fce3ee18160	316875
-hplip-20200303-hplip-3.19.12-hp-designjet_t1100ps_24in-ps.ppd.gz	bc40a80bf19b72fa113d1cffa0b83c28	215609
-hplip-20200303-hplip-3.19.12-hp-designjet_t1100ps_44in-ps.ppd.gz	2aa80369959869a00e2b5a91c1e24ebc	215609
-hplip-20200303-hplip-3.19.12-hp-designjet_t1200_postscript-ps.ppd.gz	564f29270e3bf9411969061a607c0c5d	215620
-hplip-20200303-hplip-3.19.12-hp-designjet_t1300_postscript-ps.ppd.gz	7a6077fa279bce726e80810166c87b12	215743
-hplip-20200303-hplip-3.19.12-hp-designjet_t1500-postscript.ppd.gz	d25db500f7ebf733132a491dd55fe68d	216016
-hplip-20200303-hplip-3.19.12-hp-designjet_t1530-postscript.ppd.gz	14eb4603cd000a7b9811e494a1eac885	216054
-hplip-20200303-hplip-3.19.12-hp-designjet_t1600dr-ps.ppd.gz	08a5c2ea7ec119c46b25adacd9f9ea3a	317012
-hplip-20200303-hplip-3.19.12-hp-designjet_t1600_printer-ps.ppd.gz	377488c80a238cef41f53769cb4281d4	317012
-hplip-20200303-hplip-3.19.12-hp-designjet_t1700dr_postscript-ps.ppd.gz	3c29d45a43069380bb71feb43b40ce30	317103
-hplip-20200303-hplip-3.19.12-hp-designjet_t1700_postscript-ps.ppd.gz	4cd62360e7c7330b9949e404c0d6e660	317092
-hplip-20200303-hplip-3.19.12-hp-designjet_t1708dr_postscript-ps.ppd.gz	eb1b7670ffedd700a9ded94f8bda4357	317415
-hplip-20200303-hplip-3.19.12-hp-designjet_t1708_postscript-ps.ppd.gz	7c5426a283e9ceff77cfe855ffdf893a	317404
-hplip-20200303-hplip-3.19.12-hp-designjet_t2300_postscript-ps.ppd.gz	a4e314aa6a1439b2a579a28a892a6a6d	215743
-hplip-20200303-hplip-3.19.12-hp-designjet_t2500-postscript.ppd.gz	1a4c782aa5a3b090afcd5528c6152c40	215952
-hplip-20200303-hplip-3.19.12-hp-designjet_t2600dr-ps.ppd.gz	4fd077e95d7ade3ec79216c3c32a8ee0	317012
-hplip-20200303-hplip-3.19.12-hp-designjet_t3500-ps.ppd.gz	8356ae8927c1f9f6a216cd5a408dbb6e	316997
-hplip-20200303-hplip-3.19.12-hp-designjet_t7100ps_monochrome-ps.ppd.gz	a6f29bf0a5b19262364811a69f75733d	189152
-hplip-20200303-hplip-3.19.12-hp-designjet_t7100ps-ps.ppd.gz	d5f0de2762b6b21705c35c80aa6ea76c	216024
-hplip-20200303-hplip-3.19.12-hp-designjet_t7200-ps.ppd.gz	7b12785f84ea108ee048907974497a86	317147
-hplip-20200303-hplip-3.19.12-hp-designjet_t770_postscript-ps.ppd.gz	4117fde8d247aa3830066ed02e1dffbb	215465
-hplip-20200303-hplip-3.19.12-hp-designjet_t770ps_24in-ps.ppd.gz	8a573b075185141ec95c1b4b8d6ba9d3	215465
-hplip-20200303-hplip-3.19.12-hp-designjet_t790ps_24in-ps.ppd.gz	37837ce9b774b95afb0f04fe50a07b99	215588
-hplip-20200303-hplip-3.19.12-hp-designjet_t790ps_44in-ps.ppd.gz	6759ec5a4d3970b1c06dea0bbe22f08f	215588
-hplip-20200303-hplip-3.19.12-hp-designjet_t920-postscript.ppd.gz	349816bba7eaa57381a06caad6da7ab7	215857
-hplip-20200303-hplip-3.19.12-hp-designjet_t930-postscript.ppd.gz	43b8b079998134c0166034d2aebd16fc	215857
-hplip-20200303-hplip-3.19.12-hp-designjet_z5200_postscript-ps.ppd.gz	1226f07b1d73140de4d5aacdd6e35509	318370
-hplip-20200303-hplip-3.19.12-hp-designjet_z6100ps_42in_photo-ps.ppd.gz	d4dd5eff08e3feb78767bbbfb621de00	215826
-hplip-20200303-hplip-3.19.12-hp-designjet_z6100ps_60in_photo-ps.ppd.gz	61a64da3ed7c8f98c580ea5c6feb6a0d	215826
-hplip-20200303-hplip-3.19.12-hp-designjet_z6600-postscript.ppd.gz	06ce89bc95a5cc6713fcdc81dcbc2743	317109
-hplip-20200303-hplip-3.19.12-hp-designjet_z6610ps_60in-ps.ppd.gz	0fed61299f735dd975b6bfdc0a82e33c	317358
-hplip-20200303-hplip-3.19.12-hp-designjet_z6800_photo-postscript.ppd.gz	0cb3cc4e16f04bc17d05b5195a11d365	317109
-hplip-20200303-hplip-3.19.12-hp-designjet_z6810ps_60in-ps.ppd.gz	3adecc51291acdfa97cc950d23adad1d	317506
-hplip-20200303-hplip-3.19.12-hp-deskjet_3420.ppd.gz	f9a15b99e6b2779d83b2012c68c52074	2105370
-hplip-20200303-hplip-3.19.12-hp-deskjet_3700_series.ppd.gz	c3ec7dcdb0c7be64173663824b5489d1	18227
-hplip-20200303-hplip-3.19.12-hp-deskjet_950c.ppd.gz	454204fdd51f9911425afddcfc2dba18	162578
-hplip-20200303-hplip-3.19.12-hp-deskjet_d1600_series.ppd.gz	c98ab70588b9661509e80cd16df6543c	4095964
-hplip-20200303-hplip-3.19.12-hp-deskjet_d2600_series.ppd.gz	48e0036418de34aa387ea8d6647aa856	4095964
-hplip-20200303-hplip-3.19.12-hp-deskjet_d4100_series.ppd.gz	20e023513963f39c41f49e9cd0fcde0a	3962228
-hplip-20200303-hplip-3.19.12-hp-deskjet_f300_series.ppd.gz	f9a15b99e6b2779d83b2012c68c52074	2105370
-hplip-20200303-hplip-3.19.12-hp-DJGenericVIP.ppd.gz	53c39ae28d7602a589eebca68f4db1e0	18240
-hplip-20200303-hplip-3.19.12-hp-laserjet_100_color_mfp_m175-ps.ppd.gz	73f23dd7a966046537fddcbb60d8487a	316630
-hplip-20200303-hplip-3.19.12-hp-officejet_4300_series.ppd.gz	f9a15b99e6b2779d83b2012c68c52074	2105370
-hplip-20200303-hplip-3.19.12-hp-pagewide_p55250-ps.ppd.gz	dcbd792cc066d3d9de28f962cdabad9d	316354
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_3900ps_mfp-ps.ppd.gz	b6e55ef866a0368dece1e111abfb3d4e	316581
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_4000ps-ps.ppd.gz	0c17289e5bd3e4545a8627ea1a6fbcfd	317174
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_4100ps-ps.ppd.gz	26ed966ec40f79087aad5d542a9ec641	316723
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5000ps_blueprinter-ps.ppd.gz	6f7c016271d25ff7bf9d4ce420724b07	317042
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5000ps-ps.ppd.gz	0197ae25de87f5e24739bc8e3c572b2e	317174
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5100ps_mfp_blueprinter-ps.ppd.gz	5fd3e4632621e9cef480e5178168af3d	317042
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5100ps_mfp-ps.ppd.gz	79b5bd2925cd51746107919f75778230	317174
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5100ps-ps.ppd.gz	79b5bd2925cd51746107919f75778230	317174
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_8000ps_blueprinter-ps.ppd.gz	3b5363a92ac2fa9722796136de6f0d81	317042
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_8000ps-ps.ppd.gz	6cd85208a2a3299212ac6aa57f773f49	317174
-hplip-20200303-hplip-3.19.12-hp-photosmart_a530_series.ppd.gz	f846df49b4b5fea20ed6deb23e16172b	193299
-konica_minolta-20200331-konica-minolta-20200331-konica-minolta-226i.ppd.gz	8a436ea98fbe7a0c124263b709f24a54	225933
-konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c226.ppd.gz	88f13050343210a4a118cc4e8a857ed8	314627
-kyocera-20180809-Kyocera_TASKalfa_3051ci.ppd.gz	287024fb9c4d71c952b58eadceb753f5	318898
-kyocera-20190328-Kyocera_CS_2551ci.ppd.gz	c94efd7ec3e897d62ca8e57b42642eee	214545
-kyocera-20190328-Kyocera_CS_2552ci.ppd.gz	5d8f8fa5f818e501eda42ecae7b6cfdd	214545
-kyocera-20190328-Kyocera_CS_3010i.ppd.gz	3b9e3d8d4c77793dced659e2330a4d2e	188431
-kyocera-20190328-Kyocera_CS_3011i.ppd.gz	12b6849b48c97717e0a6c3e97576e6c2	188431
-kyocera-20190328-Kyocera_CS_306ci.ppd.gz	44fc8e251aff8085fd2185c26eab6673	214545
-kyocera-20190328-Kyocera_CS_307ci.ppd.gz	af0ade6a516e74d1dc69d496bac60150	214545
-kyocera-20190328-Kyocera_CS_3212i.ppd.gz	3f1fc4ae29f8e886125d4112029626b5	188431
-kyocera-20190328-Kyocera_CS_4002i.ppd.gz	8ae3e4ae6de6746f8b653c147f54da64	188431
-kyocera-20190328-Kyocera_CS_7002i.ppd.gz	8ae3e4ae6de6746f8b653c147f54da64	188431
-kyocera-20190328-Kyocera_CS_7052ci.ppd.gz	1d89bfb500ceda6b259ac9da033fb9ec	214545
-kyocera-20190328-Kyocera_CS_9002i.ppd.gz	6dd1ac471dedb6fd75d44b8e585b68f4	188431
-kyocera-20190328-Kyocera_ECOSYS_M2030dn.ppd.gz	54e031cb92bb7e1b2e873c1c8706c055	188431
-kyocera-20190328-Kyocera_ECOSYS_M2035dn.ppd.gz	2bbc11b5bc9f8b169465b554fdfe3314	188431
-kyocera-20190328-Kyocera_ECOSYS_M2040dn.ppd.gz	eb37feddf1d0717f66dec2a7e8b7a4d7	188431
-kyocera-20190328-Kyocera_ECOSYS_M2235dn.ppd.gz	0525026e91ce5e248bd6541825ff9c9f	188431
-kyocera-20190328-Kyocera_ECOSYS_M3040dn.ppd.gz	24834b14180e2855cbc7596e36425030	188431
-kyocera-20190328-Kyocera_ECOSYS_M3145dn.ppd.gz	24834b14180e2855cbc7596e36425030	188431
-kyocera-20190328-Kyocera_ECOSYS_M4028idn.ppd.gz	f88995f71c8763457b54b298ac2e4b95	188431
-kyocera-20190328-Kyocera_ECOSYS_M4125idn.ppd.gz	859ad409d30daf371a85e4d91c428d00	188431
-kyocera-20190328-Kyocera_ECOSYS_M5021cdn.ppd.gz	473024906cc2055dc74285f841110cfc	214545
-kyocera-20190328-Kyocera_ECOSYS_M5521cdn.ppd.gz	cdd41f09351c12377a7dec3eda586094	214545
-kyocera-20190328-Kyocera_ECOSYS_M6026cdn.ppd.gz	45f289d5ce76b0c19606972e47384c43	214545
-kyocera-20190328-Kyocera_ECOSYS_M6230cidn.ppd.gz	7c2ea902085aa17a257b668c2ac4470c	214545
-kyocera-20190328-Kyocera_ECOSYS_M8024cidn.ppd.gz	4756a76345461ca53540dbc712faa317	214545
-kyocera-20190328-Kyocera_ECOSYS_M8124cidn.ppd.gz	4756a76345461ca53540dbc712faa317	214545
-kyocera-20190328-Kyocera_ECOSYS_P2035d.ppd.gz	01115639b8d58c1822217303a672f2bc	188431
-kyocera-20190328-Kyocera_ECOSYS_P3045dn.ppd.gz	01115639b8d58c1822217303a672f2bc	188431
-kyocera-20190328-Kyocera_ECOSYS_P4035dn.ppd.gz	cd80c73c157468721a016f3b6ad7a92f	188431
-kyocera-20190328-Kyocera_ECOSYS_P6026cdn.ppd.gz	c290914848e7287f0783b7d61f59ecce	214545
-kyocera-20190328-Kyocera_ECOSYS_P8060cdn.ppd.gz	d46c1a01156a9135786aa6f6ec125600	214545
-kyocera-20190328-Kyocera_FS-5040DN.ppd.gz	60ee2ca12382a0275ffd40793990fc84	188431
-kyocera-20190328-Kyocera_TASKalfa_4020i.ppd.gz	cc30575ab523ccea04b6429efa38236d	188431
-kyocera-20190328-Kyocera_TASKalfa_406ci.ppd.gz	d25205185fa00d5e6633dcf13f53fcf4	214545
-kyocera-20190328-Kyocera_TASKalfa_4500i.ppd.gz	cd115cf273d1b9fd966a874293ae479e	188431
-kyocera-20200211-Kyocera_TASKalfa_7003i.ppd.gz	cd115cf273d1b9fd966a874293ae479e	188431
-kyocera-20200416-Kyocera_CS_205c.ppd.gz	38d1968e62cf77b88a34dbce6fcd4b8b	214545
-kyocera-20200416-Kyocera_CS_250ci.ppd.gz	bbe5851d4cab014bd9216611c59dc6db	214545
-kyocera-20200416-Kyocera_CS_2550ci.ppd.gz	c1ed8474e2ee0a43773e7483ca6a4659	214545
-kyocera-20200416-Kyocera_CS_2553ci.ppd.gz	3122d76c69795e0558b27c7bf425c4c9	214545
-kyocera-20200416-Kyocera_CS_2554ci.ppd.gz	2dce886dd165992503613fe01aa34f39	214736
-kyocera-20200416-Kyocera_CS_255.ppd.gz	c580f76c8c6d318d7b19cda6dee9d777	188431
-kyocera-20200416-Kyocera_CS_300i.ppd.gz	1f2468929e80e931170c1c3dd83e8835	188431
-kyocera-20200416-Kyocera_CS_3050ci.ppd.gz	86c19f6faa8cd6cde7c490a199356044	214545
-kyocera-20200416-Kyocera_CS_308ci.ppd.gz	0d35cf1e13bfc34f90b4b83b606bee12	214545
-kyocera-20200416-Kyocera_CS_3500i.ppd.gz	28d07e97b78e0fdd6b0e261baf309ab1	188431
-kyocera-20200416-Kyocera_CS_4003i.ppd.gz	28d07e97b78e0fdd6b0e261baf309ab1	188431
-kyocera-20200416-Kyocera_CS_6500i.ppd.gz	6c68f84561aecaa4abbf212c25cbe977	188431
-kyocera-20200416-Kyocera_CS_6550ci.ppd.gz	372cbf0aa410ab10703dfc5276e41ad8	214545
-kyocera-20200416-Kyocera_CS_7003i.ppd.gz	96bdca2fedf1758f0cdfe7fa7c93aacf	188431
-kyocera-20200416-Kyocera_CS_7353ci.ppd.gz	d620f5f7440d76cbc0811cbba9f3236f	214545
-kyocera-20200416-Kyocera_CS_9003i.ppd.gz	3660c9c8b740255ec6d71a51f415adff	188431
-kyocera-20200416-Kyocera_ECOSYS_M3860idnf.ppd.gz	ff5fc8960cfe23c9d8b57dbe13e69f0c	188431
-kyocera-20200416-Kyocera_ECOSYS_M3860idn.ppd.gz	ff5fc8960cfe23c9d8b57dbe13e69f0c	188431
-kyocera-20200416-Kyocera_ECOSYS_P3260dn.ppd.gz	2d5ba5950f7032676b88a7f19cea029a	188431
-kyocera-20200416-Kyocera_ECOSYS_P4135dn.ppd.gz	88d3ea22303f9e7496fa99ae800ec73c	188431
-kyocera-20200416-Kyocera_ECOSYS_P5018cdn.ppd.gz	454fc25f628ebb30a244a471dd658e79	214545
-kyocera-20200416-Kyocera_FS-1028MFP.ppd.gz	d70cce71e3447ffc0af5223e934db5ce	188431
-kyocera-20200416-Kyocera_FS-1030MFP.ppd.gz	ab9b9f6ab3035526e63fba3f1bc78504	188431
-kyocera-20200416-Kyocera_FS-1035MFP.ppd.gz	ab9b9f6ab3035526e63fba3f1bc78504	188431
-kyocera-20200416-Kyocera_FS-1120D.ppd.gz	f3ce9f50879a8f908cc1f81f12e3d484	188431
-kyocera-20200416-Kyocera_FS-2020D.ppd.gz	e935984b361f26bd54a4607d74d7cc62	188431
-kyocera-20200416-Kyocera_FS-2100D.ppd.gz	fd43604894d3187674ade84c870df234	188431
-kyocera-20200416-Kyocera_FS-3540MFP.ppd.gz	89b509aadcdadbf2fc80b5bd99382922	188431
-kyocera-20200416-Kyocera_FS-3920DN.ppd.gz	13807c28bc1c5a06b641faece2382a43	188431
-kyocera-20200416-Kyocera_FS-4100DN.ppd.gz	13807c28bc1c5a06b641faece2382a43	188431
-kyocera-20200416-Kyocera_FS-6970DN.ppd.gz	303c2d576f63c7fdf2d311e410f213aa	188431
-kyocera-20200416-Kyocera_FS-9130DN.ppd.gz	903a742d0c930be9cb4d4fe4ba630d69	188431
-kyocera-20200416-Kyocera_FS-C2026MFP.ppd.gz	f0488524c1314aa735ee11bb8963e23a	214545
-kyocera-20200416-Kyocera_FS-C2026MFP+.ppd.gz	f39ee8dae92becc2a69f958f3ae79d9b	214545
-kyocera-20200416-Kyocera_FS-C5250DN.ppd.gz	c456fd89f2b069eaec5139f802760e01	214545
-kyocera-20200416-Kyocera_FS-C8600DN.ppd.gz	fd36e2db2ff37a52c2d3cd42c85de3a6	214545
-kyocera-20200416-Kyocera_TASKalfa_3060ci.ppd.gz	911e38fd26c464807baba3cc1ab22688	214545
-kyocera-20200716-Kyocera_ECOSYS_M2540dwJ.ppd.gz	eeed5fa51ad6f53b06ef0c79d39012c8	188427
-kyocera-20200716-Kyocera_ECOSYS_M3645idnJ.ppd.gz	7c2f97b49f3fd23ef09f7079b2922494	188309
-kyocera-20200716-Kyocera_ECOSYS_M4226idn.ppd.gz	08ff06d155f32f5ad031942fb1892381	188431
-kyocera-20200716-Kyocera_ECOSYS_M6635cidnJ.ppd.gz	805bcfdd1d180b067f345ef3e0f8d1a2	214541
-kyocera-20200716-Kyocera_ECOSYS_M8224cidn.ppd.gz	4a282d21657eb69037a743919f4ddc6f	214545
-kyocera-20200716-Kyocera_ECOSYS_P3060dnJ.ppd.gz	2c844d93eba9bc22faf014c43c16d87e	188427
-kyocera-20200716-Kyocera_ECOSYS_P3145dnJ.ppd.gz	5929b25c9fc645baddb73ab661d6802d	188427
-kyocera-20200716-Kyocera_ECOSYS_P3145dn.ppd.gz	e087b11bd57884768328738c5a45f2a8	188431
-kyocera-20200716-Kyocera_ECOSYS_P4040dnJ.ppd.gz	71d9fddb8bc112208636c6b96685c7d5	188427
-kyocera-20200716-Kyocera_ECOSYS_P4060dnJ.ppd.gz	71d9fddb8bc112208636c6b96685c7d5	188427
-kyocera-20200716-Kyocera_ECOSYS_P4140dnJ.ppd.gz	02b584b6322678feadf7e9dc8707cba0	188427
-kyocera-20200716-Kyocera_ECOSYS_P8060cdnJ.ppd.gz	e7bd3ee4f87616b289f96ec3c3b2b4d5	214541
-kyocera-20200716-Kyocera_TASKalfa_2460ciJ.ppd.gz	77460e5f6f51b45203b276569b28e124	214541
-kyocera-20200716-Kyocera_TASKalfa_2510iJ.ppd.gz	1a2f05a8ad9d73eed4aaa90450efbfe4	188427
-kyocera-20200716-Kyocera_TASKalfa_2553ciJ.ppd.gz	b7de5ba713d5e98acbd45546febc21be	214541
-kyocera-20200716-Kyocera_TASKalfa_2553ci.ppd.gz	429c8c57c3b9300e721f30ca996738f9	214545
-kyocera-20200716-Kyocera_TASKalfa_308ci.ppd.gz	4cdddf87be2bbabd23bf209d38304eb4	214545
-kyocera-20200716-Kyocera_TASKalfa_3212iJ.ppd.gz	08f7ef7772277c7c82f7b99924c75edf	188309
-kyocera-20200716-Kyocera_TASKalfa_352ci.ppd.gz	4cdddf87be2bbabd23bf209d38304eb4	214545
-kyocera-20200716-Kyocera_TASKalfa_358ciJ.ppd.gz	6a1f271b68597d6bb2320c5739b68e00	214541
-kyocera-20200716-Kyocera_TASKalfa_4003i.ppd.gz	290dd3526abeb564e75e17b50c70a9b3	188431
-kyocera-20200716-Kyocera_TASKalfa_4012iJ.ppd.gz	9fbc53aa15ea462f2b0c70b8cb2781ba	188309
-kyocera-20200716-Kyocera_TASKalfa_5003iJ.ppd.gz	c2047f95631a834428ce430d3ecc6435	188427
-kyocera-20200716-Kyocera_TASKalfa_7003iJ.ppd.gz	9152ed0748d904d9c644a949b23891e3	188427
-kyocera-20200716-Kyocera_TASKalfa_7353ciJ.ppd.gz	32523d5c24865acce77f43cde1d3357d	214541
-kyocera-20200716-TA_P-4531_MFP.ppd.gz	bfa94b0f067c57197322567f07203d72	188431
-lanier-20190916-Lanier-IM_550_PDF.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13135
-lanier-20190916-Lanier-IM_600SR_PDF.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13205
-lanier-20190916-Lanier-P_800_PDF.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13135
-lanier-20190916-Lanier-P_C600_PDF.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13207
-lexmark-20200918-Lexmark_X651de.ppd.gz	d6c67858bde25dd9b9183340d0c45752	316228
-lexmark-20200918-Lexmark_X658de.ppd.gz	c1a0a8d5b7b34ab7c2e8d224997c9e20	316611
-lexmark-20201101-Lexmark_6500e_Series.ppd.gz	85e922bc93e1963cbb7e05e5d7c25087	316762
-lexmark-20201101-Lexmark_B2300_Series.ppd.gz	cd7396bacadd9f8e761403f073763cc5	189260
-lexmark-20201101-Lexmark_C2200_Series.ppd.gz	669bcb3086926129469cbabfdb652507	216298
-lexmark-20201101-Lexmark_C2300_Series.ppd.gz	e3e9c539200e6367adfee8a29b9039ae	216075
-lexmark-20201101-Lexmark_C2400_Series.ppd.gz	c376325bb8baf21e1e951f2c91bb1607	216228
-lexmark-20201101-Lexmark_C3400_Series.ppd.gz	1de85a7649d159664cf8b3898a1832f8	215854
-lexmark-20201101-Lexmark_C740_Series.ppd.gz	329bb7b436b43606e2956942521e793b	702005
-lexmark-20201101-Lexmark_C790_Series.ppd.gz	2bb41646ad016e811d8c0367352dc3a1	702473
-lexmark-20201101-Lexmark_C9200_Series.ppd.gz	e35de70ad9f0e854674b7ad778d2c7f8	217114
-lexmark-20201101-Lexmark_C920_Series.ppd.gz	191278579d0d30bfd4478688f4c2c7d6	317013
-lexmark-20201101-Lexmark_C950_Series.ppd.gz	83315941f02ad3a7a8441fbb2b5b007f	317553
-lexmark-20201101-Lexmark_CS310_Series.ppd.gz	4628a4b6f9805c9f624b9d4306e187e9	215994
-lexmark-20201101-Lexmark_CS410_Series.ppd.gz	441dc1a9c8f882831462b4d8a93fca6b	216135
-lexmark-20201101-Lexmark_CS510_Series.ppd.gz	17539e46155c56d45c5aa9971af624ab	702062
-lexmark-20201101-Lexmark_CS720_Series.ppd.gz	787bc00bca78ddda29c97646d9d30a4f	216219
-lexmark-20201101-Lexmark_CS820_Series.ppd.gz	52bd36bb84a944c011add5e46d04d782	216819
-lexmark-20201101-Lexmark_CS920_Series.ppd.gz	1e6f719ae6ee348f4ffc603d515c4c81	217114
-lexmark-20201101-Lexmark_CX310_Series.ppd.gz	fe64146d59f1bd62aea25823155770a1	702062
-lexmark-20201101-Lexmark_CX410_Series.ppd.gz	216b92ab9b43b297eeabc11f2a672d56	216470
-lexmark-20201101-Lexmark_CX420_Series.ppd.gz	78d407cd7a3221b57281272041959629	216228
-lexmark-20201101-Lexmark_CX430_Series.ppd.gz	e92dcff2fdec9675f8fd3991f66dd36a	215854
-lexmark-20201101-Lexmark_CX510_Series.ppd.gz	289fa6577372d5eed5e526553d1a9b04	358264
-lexmark-20201101-Lexmark_CX625_Series.ppd.gz	855c06f2e30a46484565dd9d1fb8438a	216298
-lexmark-20201101-Lexmark_CX725_Series.ppd.gz	2c7787458f92df0a82d66f24aaac340c	216219
-lexmark-20201101-Lexmark_CX820_Series.ppd.gz	287cef2ba3e943f3b238a077d1a826aa	216819
-lexmark-20201101-Lexmark_CX825_Series.ppd.gz	3ade3050e96b7c16588c28843f8fdf50	217174
-lexmark-20201101-Lexmark_CX920_Series.ppd.gz	718b073b63dfe083ac79b66815ad8503	217114
-lexmark-20201101-Lexmark_M1100_Series.ppd.gz	9eaf11de83f1bfec03d29b789492fcf8	227734
-lexmark-20201101-Lexmark_M3100_Series.ppd.gz	4e429f9ef8786de81601652340088e5e	227713
-lexmark-20201101-Lexmark_M5100_Series.ppd.gz	166141bbf22eeebc75f39b613d58fd6f	228121
-lexmark-20201101-Lexmark_MB2300_Series.ppd.gz	728c7d7961eca973da2974724b6d389b	189260
-lexmark-20201101-Lexmark_MC2300_Series.ppd.gz	46298c62851a2179b7b59cab2d2da8c6	216075
-lexmark-20201101-Lexmark_MS310_Series.ppd.gz	5371bd32a5f4f2dc5d958ede9af47639	189525
-lexmark-20201101-Lexmark_MS410_Series.ppd.gz	104d74a2ec20741ef0ced1fe57c41155	227694
-lexmark-20201101-Lexmark_MS510_Series.ppd.gz	9b5cfb2fb2cd56ffc67298dc6bd82ca9	316270
-lexmark-20201101-Lexmark_MS610_Series.ppd.gz	6b9c2431a0eefbf11a270712a38a4430	316270
-lexmark-20201101-Lexmark_MS620_Series.ppd.gz	75ce4429bee81495ebc1b73c408642d9	189400
-lexmark-20201101-Lexmark_MS710_Series.ppd.gz	df4599a328256ba50c3a5165b6ff62f3	316651
-lexmark-20201101-Lexmark_MS725_Series.ppd.gz	0959f5df2e5bb25e0dd77a4102fe8e84	190807
-lexmark-20201101-Lexmark_MS810_Series.ppd.gz	4749d4d54a608e3091b0ca675b82baaf	316651
-lexmark-20201101-Lexmark_MS820_Series.ppd.gz	9473ba41cf840c558c1948ee7205431e	190807
-lexmark-20201101-Lexmark_MX310_Series.ppd.gz	ea26f9e49ea8c66e0b9df8919b51ff3b	189526
-lexmark-20201101-Lexmark_MX410_Series.ppd.gz	9ebb1f8d5c7de09c002e44b42c7179be	316270
-lexmark-20201101-Lexmark_MX510_Series.ppd.gz	df6a28226f078b5796432fe969200fbd	316270
-lexmark-20201101-Lexmark_MX520_Series.ppd.gz	988d2741b099007667edae32a310890c	189400
-lexmark-20201101-Lexmark_MX610_Series.ppd.gz	39ef87d9ecf9c167a302195d5e5dc6ad	316428
-lexmark-20201101-Lexmark_MX620_Series.ppd.gz	c9bf96c626556b7a46a943ba6ce7d29c	189546
-lexmark-20201101-Lexmark_MX6500e_Series.ppd.gz	7e1996f7c67309931c12c74480bf9a45	228076
-lexmark-20201101-Lexmark_MX710_Series.ppd.gz	d73aeb971b421428e7fabb76e014500b	316255
-lexmark-20201101-Lexmark_MX720_Series.ppd.gz	f720609633bfb27adbb619a0148aed47	189540
-lexmark-20201101-Lexmark_MX725_Series.ppd.gz	c8b19af901c5ac0d96a867ece261e4c9	189540
-lexmark-20201101-Lexmark_MX810_Series.ppd.gz	976f4530fa14e714aa370a1da0424dca	316713
-lexmark-20201101-Lexmark_MX820_Series.ppd.gz	bafda12d9168a79c6b85251c3b5fdc37	190252
-lexmark-20201101-Lexmark_MX910_Series.ppd.gz	ec23a553075e5c6cb206141bac41bffd	228536
-lexmark-20201101-Lexmark_X548_Series.ppd.gz	3c295a3030aa8b78b5124d03956c4f28	702079
-lexmark-20201101-Lexmark_X740_Series.ppd.gz	25f20db1b9f6734e2aa2d445af264476	702058
-lexmark-20201101-Lexmark_X790_Series.ppd.gz	08cff39b8922a487edfe0493f23ef926	702526
-lexmark-20201101-Lexmark_X920_Series.ppd.gz	92984c6d58fa6def1d7b6c758f9c3aab	317066
-lexmark-20201101-Lexmark_X950_Series.ppd.gz	9a6f79ca6ea0582895574dab7ab31b58	317606
-lexmark-20201101-Lexmark_XC2100_Series.ppd.gz	ac5b0db40dd7d4ebb398a57093fbaff1	702111
-lexmark-20201101-Lexmark_XC9200_Series.ppd.gz	f4938ec407e63c69ac5e7510b3fc2c94	217114
-lexmark-20201101-Lexmark_XM1100_Series.ppd.gz	9dafefb2868bbd54792deeb28b383df4	227710
-lexmark-20201101-Lexmark_XM3100_Series.ppd.gz	a12ea9af9df94fa8ba9e4bf15236843f	227868
-lexmark-20201101-Lexmark_XM5100_Series.ppd.gz	f6deb621c80690bde415a05648ac9029	227710
-lexmark-20201101-Lexmark_XM7100_Series.ppd.gz	a583bd6627cb3079aa2e393bdd755534	228206
-oki-20200129-oki-c542-ps.ppd.gz	81d71a303ef2c46f1279192de093d32c	321612
-oki-20200329-ES8434-PS.ppd.gz	a05dc6449529905be64e4c872973508a	321688
-oki-20200329-OKB432_a.ppd.gz	ae43d8d60ba1c45024a03d2de62e30b2	230653
-oki-20200329-OKB512_a.ppd.gz	e373ef3275e86faccd137bcffb626304	230653
-oki-20200329-OKB841_a110.ppd.gz	354b28b0c509822111c5d36425133f1b	229044
-oki-20200329-OKI-C332-PS.ppd.gz	15589bd878a8fb1418be87f7295af27d	321612
-oki-20200329-OKI-C612-PS.ppd.gz	e2b4a2a4ffd5d3d81b481c280b465f1b	321751
-oki-20200329-OKI-C712-PS.ppd.gz	200e127f2af6ebc938469410c4d6d838	321832
-oki-20200329-OKI-C833-PS.ppd.gz	57e0609b1e16f63b6c7d1ed3a4177330	321751
-oki-20200329-OKI-C843-PS.ppd.gz	35effad71eaa1c674a4e75934855dc35	321751
-oki-20200329-OKI-C844-PS.ppd.gz	c36d3314c6e4b3363bc96666e4c73151	321688
-oki-20201022-ES6450_PS.ppd.gz	73bb6dc231b3640340fef66dd5ab5b45	321612
-oki-20201022-OKI_MC843_PS.ppd.gz	feaa4cd4600d79deaef4f3411ad51e0b	321531
-oki-20201022-OKI_MC853_PS.ppd.gz	5aeadea5b81995fcb673475023d3c01a	321611
-oki-20201022-OKI_MC883_PS.ppd.gz	5554841265b6f66db431d4ed824c7d3f	321611
-ricoh-20190916-Ricoh-IPSiO_SP_3400L_PXL.ppd.gz	91b66baf1f8fcd474663381823e5427c	547178
-ricoh-20190916-Ricoh-IPSiO_SP_3510SF_PXL.ppd.gz	91b66baf1f8fcd474663381823e5427c	547178
-ricoh-20190916-Ricoh-M_C250FWB_PS.ppd.gz	e3f43f7d9d60cd6289e9de906f0881e4	317968
-ricoh-20190916-Ricoh-MP_C306Z_JPN_PDF.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13180
-ricoh-20190916-Ricoh-P_C301SF_PS.ppd.gz	5012f0cfa2390136d3990e6afaf833be	317968
-ricoh-20190916-Ricoh-SP_212Nw_PXL.ppd.gz	bfd546de35444db8d94de689337b2b5e	547178
-ricoh-20190916-Ricoh-SP_2200L_PXL.ppd.gz	91b66baf1f8fcd474663381823e5427c	547178
-ricoh-20190916-Ricoh-SP_320DN_PXL.ppd.gz	91b66baf1f8fcd474663381823e5427c	547178
-ricoh-20191121-Infotec-Pro_8200S_PDF.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13273
-ricoh-20191121-Infotec-Pro_8210_PDF.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13273
-ricoh-20191121-Infotec-Pro_C5200S_PDF.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13320
-ricoh-20191121-Infotec-Pro_C7200_PDF.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13369
-ricoh-20191121-Infotec-Pro_C7200S_Light_PDF.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13369
-ricoh-20191218-SP_C420e_JPN-PostscriptColor-Ricoh.ppd.gz	aa2352550fe92e1eef5144957368be21	316142
-ricoh-20200221-Gestetner-IM_C300_PDF.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13234
-ricoh-20200221-Lanier-IM_C400SR_PDF.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13250
-ricoh-20200221-Ricoh-SP_C342M_JPN_PDF.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13182
-ricoh-20200527-Gestetner-GS3021_PDF.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13230
-ricoh-20200527-Infotec-Pro_C5300S_PDF.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13396
-ricoh-20200527-Lanier-IM_C6500_PDF.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13320
-ricoh-20200527-Ricoh-P_6000_JPN.ppd.gz	6ae65c90f7fabde3226c7b786aecc579	227449
-ricoh-20200821-Infotec-Pro_C5300SL_PDF.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13396
-ricoh-20200821-Lanier-IM_C530FB_PDF.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13207
-ricoh-20200821-Lanier-IM_C530F_PDF.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13277
-ricoh-20200821-Ricoh-IM_C2509J_PDF.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13296
-ricoh-20200821-Ricoh-IM_C3509J_PDF.ppd.gz	555a4970ff7cfa739ef43c9b1975df63	13296
-ricoh-20200821-Ricoh-IM_C6500_JPN.ppd.gz	c4dfef8d0ab5650817f3bf9861f2254f	316142
-ricoh-20200930-Ricoh_Generic_PS_Printer.ppd.gz	0c4cdf9642fa2ed37d831cf89f2884fa	318819
-sharp-20180409-Sharp-AR-M452U-ps.ppd.gz	00ec0a296a52c129f53d9592b9f7ab4c	229459
-sharp-20180409-Sharp-MX-2640NR-ps.ppd.gz	883793cbc2b40239f4fbde595986b352	322423
-sharp-20180409-Sharp-MX-M283N-ps.ppd.gz	761374b54ae164d80fab7f43bb2250be	229533
-sharp-20180409-Sharp-MX-M363F-ps-jp.ppd.gz	85e074c7c640420c6dcef050e2b3ba7a	229452
-sharp-20180409-Sharp-MX-M623N-ps.ppd.gz	7ed00ce70c7c2ac2faf774071136ca01	229381
-sharp-20180409-Sharp-MX-M623-ps-jp.ppd.gz	36dcfd7cc0cf6cd625bcf54d7dbeead9	229377
-sharp-20190711-Sharp-MX-6240N-ps.ppd.gz	8f368e5df89d826674a51f95f834edfe	325556
-sharp-20190711-Sharp-MX-6500N-ps.ppd.gz	842ff2aa6a496719337b69f1aaff3462	326293
-sharp-20190711-Sharp-MX-6540FN-ps-jp.ppd.gz	accd5818da05659c7b7475b912fa8387	325552
-sharp-20190711-Sharp-MX-C250-ps.ppd.gz	2d41be365b63a139706b415254858f07	321283
-sharp-20190711-Sharp-MX-C301-ps.ppd.gz	852174ad3775f398b093a67f13f96de2	321488
-sharp-20190711-Sharp-MX-M1054-ps.ppd.gz	8216315886dbf05d700dc9904c7b4781	233681
-sharp-20190711-Sharp-MX-M1055-ps.ppd.gz	f43afc9645a62f241b27bbefb8dada6d	233681
-sharp-20190711-Sharp-MX-M654FN-ps-jp.ppd.gz	19b5ad79bac07a472bd4c56d18a3551f	233539
-sharp-20190711-Sharp-MX-M654N-ps.ppd.gz	23aad9f8e98a527d0ede2b3da56d1d47	233543
-sharp-20190711-Sharp-MX-M904-ps.ppd.gz	72507b0a45b3e94e413a108e4deede52	233681
-sharp-20191219-Sharp-AR-6020D-ps.ppd.gz	c0529af39db5da36033d2968709c2a3e	227991
-sharp-20191219-Sharp-AR-6020-ps.ppd.gz	3dda14d5a3b4c1883783c6f91ab2d148	227892
-sharp-20191219-Sharp-AR-6026N-ps.ppd.gz	a1cf955e31a3b9b6abd876d4ff26df04	227991
-sharp-20191219-Sharp-AR-G200-ps-jp.ppd.gz	f7335591d10dc31a7f671d07ce50b3bb	227888
-sharp-20191219-Sharp-BP-10C20-ps.ppd.gz	f1f3fc4d91e3a658efcd304b2f51e88a	322626
-sharp-20191219-Sharp-DX-2000U-ps.ppd.gz	c10adf10e706a27c544e2bd7e9b3e3e0	321653
-sharp-20191219-Sharp-DX-20C20-ps-jp.ppd.gz	352f835eed262b124f716bed3abe459a	322622
-sharp-20191219-Sharp-DX-2500N-ps.ppd.gz	35bc2180e870945478346247f48ce2be	322221
-sharp-20191219-Sharp-MX-1800N-ps.ppd.gz	d793c85950ab6a685548cdaea56dbde9	321070
-sharp-20191219-Sharp-MX-1810U-ps.ppd.gz	4ebdb32505c0759fcb5d0a76d8501b48	322099
-sharp-20191219-Sharp-MX-2300FG-ps-jp.ppd.gz	79cd3d0f615ce49ac39d241a285db81c	321142
-sharp-20191219-Sharp-MX-2300G-ps.ppd.gz	2023cc29e1446ffded9d85ce49c33e8c	321146
-sharp-20191219-Sharp-MX-2301N-ps.ppd.gz	58c0b488b6c8778ee2b544971fa91132	321586
-sharp-20191219-Sharp-MX-2310F-ps-jp.ppd.gz	17cc31e84e86f6e983b015f642743f92	322095
-sharp-20191219-Sharp-MX-2514FN-ps-jp.ppd.gz	e64d9e7490c1e00946dac6078560ceb4	322219
-sharp-20191219-Sharp-MX-2600FG-ps-jp.ppd.gz	4773014634dfcb4ad2337a2f3733254e	321728
-sharp-20191219-Sharp-MX-2600G-ps.ppd.gz	8561398ad5b562223d299870bd660514	321732
-sharp-20191219-Sharp-MX-2610FN-ps-jp.ppd.gz	7c959106fab13edd1e9e1d5ac6d9873b	322241
-sharp-20191219-Sharp-MX-2610N-ps.ppd.gz	dc574351738bcd1858d940b2058a2ae0	322245
-sharp-20191219-Sharp-MX-2614N-ps.ppd.gz	e61286f2078adbe04d39b6b8615b37c2	322648
-sharp-20191219-Sharp-MX-2631-ps-jp.ppd.gz	ad25a16c7a071795413eed67cbb817c0	325712
-sharp-20191219-Sharp-MX-2640FN-ps-jp.ppd.gz	54701d130b6972a390d4d8739d19d80c	322419
-sharp-20191219-Sharp-MX-2651-ps.ppd.gz	6def78cf3821298419115d7675e003ea	325723
-sharp-20191219-Sharp-MX-2661-ps-jp.ppd.gz	95924e9af5c6abb05e204940a7334847	325712
-sharp-20191219-Sharp-MX-3061-ps.ppd.gz	73d9e6a92f7f9d93991dacf21850abbd	325723
-sharp-20191219-Sharp-MX-3600FN-ps-jp.ppd.gz	a184b7bb4b6d242c1b03dcf30e63016a	321728
-sharp-20191219-Sharp-MX-4100N-ps.ppd.gz	fc8f8c7748ac3ea37cbf0888bb5c87fe	321732
-sharp-20191219-Sharp-MX-4110FN-ps-jp.ppd.gz	45069cf8878c161f662902bde1dbb395	322241
-sharp-20191219-Sharp-MX-4110N-ps.ppd.gz	712d406be908a12be1faf8957383461f	322245
-sharp-20191219-Sharp-MX-4140FN-ps-jp.ppd.gz	938abacc2241b8869db68242bb483b73	322419
-sharp-20191219-Sharp-MX-4140N-ps.ppd.gz	76ff77980a4342fe29e4a248a35a8daf	322423
-sharp-20191219-Sharp-MX-5500N-ps.ppd.gz	651a8e46b819bd946b3d05e40c7023f0	320994
-sharp-20191219-Sharp-MX-C303-ps.ppd.gz	23985162b7d7d73aa5ef54c84338de7f	324482
-sharp-20191219-Sharp-MX-C305W-ps-jp.ppd.gz	40fb723cb17cb92d011fd6610e1c2b92	324478
-sharp-20191219-Sharp-MX-M264FP-ps-jp.ppd.gz	1bc28afa2cb749414b55066cf7cb9161	229956
-sharp-20191219-Sharp-MX-M264NV-ps.ppd.gz	d01c390b02918b64786b1f8f289c81f7	229960
-sharp-20191219-Sharp-MX-M265N-ps.ppd.gz	b1e3aeee4ae030d460aada01f0d66697	230056
-sharp-20191219-Sharp-MX-M266FP-ps-jp.ppd.gz	e1e03e8a781ee2ab00b3d4f721f27ea2	229975
-sharp-20191219-Sharp-MX-M266N-ps.ppd.gz	ca6db0dbc3840b4f338ca0f1617770a4	230055
-sharp-20191219-Sharp-MX-M316G-ps-jp.ppd.gz	7037219886caa73ca92e3029f557d012	229975
-sharp-20191219-Sharp-MX-M364N-ps.ppd.gz	6d58ffe94db9a392d45b24e68cc76476	230658
-sharp-20191219-Sharp-MX-M365FN-ps-jp.ppd.gz	ce229514b0575785dca96a973fa60cc4	230654
-sharp-20191219-Sharp-MX-M365N-ps.ppd.gz	034d1048b023b0d89f6edd4595d00fca	230658
-sharp-20191219-Sharp-MX-M464FN-ps-jp.ppd.gz	8f5c35b3549483002584b4ea9c8b0193	230654
-sharp-20191230-Sharp-AR-B350W-ps-jp.ppd.gz	0b5de732db6d1e0998e9b57d30854b39	229712
-sharp-20191230-Sharp-AR-B351-ps.ppd.gz	184ffc7f4fa653481d7a0f9236c11641	229716
-sharp-20191230-Sharp-DX-C310-ps.ppd.gz	797fbc5a2843d18474003404fd207231	321413
-sharp-20191230-Sharp-MX-2630FN-ps-jp.ppd.gz	16b5e45fa4df52f5a544e33129d0f302	325122
-sharp-20191230-Sharp-MX-2630N-ps.ppd.gz	ec11b8405fafd67779d720fc5215b09a	325133
-sharp-20191230-Sharp-MX-2650FN-ps-jp.ppd.gz	b5d3cc6bd91518d1431e3b2a83e91351	325122
-sharp-20191230-Sharp-MX-3060N-ps.ppd.gz	1c1b825e5f1f47ad82cb97e9030ec02d	325133
-sharp-20191230-Sharp-MX-6580N-ps.ppd.gz	7629b3d379db3027423fe14218df7c37	327150
-sharp-20191230-Sharp-MX-7090N-ps.ppd.gz	5fb712b465721bd238fe330eed3e3a09	327815
-sharp-20191230-Sharp-MX-B355W-ps.ppd.gz	0a03526dac330b8bfea24627d1ebd6ec	233088
-sharp-20191230-Sharp-MX-B356W-ps.ppd.gz	4ba03ba8a88a3cfe6f30a2ae8c7be081	233219
-sharp-20191230-Sharp-MX-B380P-ps.ppd.gz	114e9e0c8a62691820fe74f8d6b0d516	229139
-sharp-20191230-Sharp-MX-M2630-ps.ppd.gz	1eead810a3e82c95e603f318cf30a36c	233737
-sharp-20191230-Sharp-MX-M2651-ps.ppd.gz	f380fffcca7fb5f4f8632abd44c2f56f	233760
-sharp-20191230-Sharp-MX-M3070-ps.ppd.gz	cb868b064311a780c5d45aee7d0245b3	233661
-sharp-20191230-Sharp-MX-M3071-ps.ppd.gz	61c76341f3a2a66de5bc04b9e54c497c	233684
-sharp-20191230-Sharp-MX-M3531-ps-jp.ppd.gz	3df5a4f1f6f78a9f1a33bf9872ffdf4d	233673
-sharp-20191230-Sharp-MX-M6570-ps.ppd.gz	d51664d46a490cb734da50a6bcebbb7d	234898
-sharp-20191230-Sharp-MX-M905-ps.ppd.gz	0573eb409bd218666bf81da07189f4ed	234536
-star-20171009-starcupsdrv-3.6.0-hsp7000r.ppd.gz	2aac5cf72ecac95f8f857b6bdcfb6332	23323
-star-20171009-starcupsdrv-3.6.0-hsp7000s.ppd.gz	30a4c96be8ad34c568d89dbcff5c742f	9021
-star-20171009-starcupsdrv-3.6.0-hsp7000v.ppd.gz	e8ef4a9054a70100556dcce64c7eb3bd	17659
-star-20171009-starcupsdrv-3.6.0-sp512.ppd.gz	c9302918ce7ad89142274bc829407dbc	6884
-star-20171009-starcupsdrv-3.6.0-sp542.ppd.gz	0aea599f8b91d68e68ce16c6dc6fcf3a	6884
+hp-20190918-hplip-3.19.6-hp-Mimas.ppd.gz	0518d62d4497ae16f27046b796431a26	18235
+hp-20190918-hplip-3.19.6-hp-Mimas15.ppd.gz	8e770df1d37ded0c37bad08ee7a710b9	18223
+hp-20190918-hplip-3.19.6-hp-Mimas17.ppd.gz	8e770df1d37ded0c37bad08ee7a710b9	18223
+hp-20190918-hplip-3.19.6-hp-MimasTDR.ppd.gz	1651551c1a3a0fd57c987f516fb52ee9	12052
+hp-20190918-hplip-3.19.6-hp-Pyramid.ppd.gz	9785e610a6ae577b529e1e3e0e4ace37	12096
+hp-20190918-hplip-3.19.6-hp-Pyramid15.ppd.gz	8e770df1d37ded0c37bad08ee7a710b9	18223
+hp-20190918-hplip-3.19.6-hp-PyramidPlus.ppd.gz	1651551c1a3a0fd57c987f516fb52ee9	12052
+hp-20190918-hplip-3.19.6-hp-PyramidRefresh15.ppd.gz	8e770df1d37ded0c37bad08ee7a710b9	18222
+hp-20190918-hplip-3.19.6-hp-PyramidRefresh17.ppd.gz	58c8ca7d9b4122611e3551f80ebe1341	18222
+hp-20190918-hplip-3.19.6-hp-SPDOfficejetProAsize.ppd.gz	8e770df1d37ded0c37bad08ee7a710b9	18223
+hp-20190918-hplip-3.19.6-hp-SPDOfficejetProBsize.ppd.gz	8e770df1d37ded0c37bad08ee7a710b9	18223
+hp-20190918-hplip-3.19.6-hp-Saipan.ppd.gz	01919e1a8cc7e5732ced0976128964ab	18189
+hp-20190918-hplip-3.19.6-hp-Saipan15B.ppd.gz	01919e1a8cc7e5732ced0976128964ab	18189
+hp-20190918-hplip-3.19.6-hp-Stabler.ppd.gz	0518d62d4497ae16f27046b796431a26	18235
+hp-20190918-hplip-3.19.6-hp-ViperMinusVIP.ppd.gz	aaa7d48665d43147f9c369761310c26a	19163
+hp-20190918-hplip-3.19.6-hp-ViperPlusVIP.ppd.gz	0518d62d4497ae16f27046b796431a26	18235
+hplip-20201209-hplip-3.20.11-hp-CLE17.ppd.gz	58c8ca7d9b4122611e3551f80ebe1341	18222
+hplip-20201209-hplip-3.20.11-hp-Mimas17.ppd.gz	8e770df1d37ded0c37bad08ee7a710b9	18222
+hplip-20201209-hplip-3.20.11-hp-PyramidRefresh17.ppd.gz	58c8ca7d9b4122611e3551f80ebe1341	18222
+hplip-20201209-hplip-3.20.11-hp-SPDOfficejetProBsize.ppd.gz	8e770df1d37ded0c37bad08ee7a710b9	18223
+hplip-20201209-hplip-3.20.11-hp-deskjet_3700_series.ppd.gz	c3ec7dcdb0c7be64173663824b5489d1	18225
+hplip-20201209-hplip-3.20.11-hp-deskjet_d1600_series.ppd.gz	c98ab70588b9661509e80cd16df6543c	4095964
+hplip-20201209-hplip-3.20.11-hp-photosmart_8700_series.ppd.gz	cf73d8326943d508d77e4c6f34e5c609	18236
+hplip-20201209-hplip-3.20.11-hp-photosmart_a530_series.ppd.gz	f846df49b4b5fea20ed6deb23e16172b	193300
+lanier-20190916-Lanier-IM_550_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13131
+lanier-20190916-Lanier-IM_600SR_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13201
+lanier-20190916-Lanier-P_800_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13131
+lanier-20190916-Lanier-P_C600_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13203
+ricoh-20190916-Ricoh-IPSiO_SP_3400L_PXL.ppd.gz	722e7156a44e42e18d18c8b42d23c73f	547183
+ricoh-20190916-Ricoh-IPSiO_SP_3510SF_PXL.ppd.gz	722e7156a44e42e18d18c8b42d23c73f	547183
+ricoh-20190916-Ricoh-MP_C306Z_JPN_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13176
+ricoh-20190916-Ricoh-SP_212Nw_PXL.ppd.gz	7c6c3947e37c123913d1c272244d5712	547183
+ricoh-20190916-Ricoh-SP_2200L_PXL.ppd.gz	722e7156a44e42e18d18c8b42d23c73f	547183
+ricoh-20190916-Ricoh-SP_320DN_PXL.ppd.gz	722e7156a44e42e18d18c8b42d23c73f	547183
+ricoh-20191121-Infotec-Pro_8200S_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13269
+ricoh-20191121-Infotec-Pro_8210_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13269
+ricoh-20191121-Infotec-Pro_C5200S_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13316
+ricoh-20191121-Infotec-Pro_C7200S_Light_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13365
+ricoh-20191121-Infotec-Pro_C7200_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13365
+ricoh-20200221-Gestetner-IM_C300_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13230
+ricoh-20200221-Lanier-IM_C400SR_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13246
+ricoh-20200221-Ricoh-SP_C342M_JPN_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13178
+ricoh-20200527-Gestetner-GS3021_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13226
+ricoh-20200527-Infotec-Pro_C5300S_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13392
+ricoh-20200527-Lanier-IM_C6500_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13316
+ricoh-20200821-Infotec-Pro_C5300SL_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13392
+ricoh-20200821-Lanier-IM_C530FB_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13203
+ricoh-20200821-Lanier-IM_C530F_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13273
+ricoh-20200821-Ricoh-IM_C2509J_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13292
+ricoh-20200821-Ricoh-IM_C3509J_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13292
+ricoh-20210222-Gestetner-G3020c_PXL.ppd.gz	f76fd04f89024325aba24e06bc1f45ed	4219532
+ricoh-20210222-Lanier-IM_7000_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13244
+ricoh-20210601-Gestetner-GS3025m_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13220
+ricoh-20210601-Gestetner-GS3040m_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13220
+ricoh-20210601-Gestetner-GS3060m_PDF.ppd.gz	6929104cf3a70f5701f3e22e0d44a9b0	13220
+ricoh-20210601-Ricoh-M_C2000_PXL.ppd.gz	f76fd04f89024325aba24e06bc1f45ed	4219532
+ricoh-20210601-Ricoh-SP_2300L_PXL.ppd.gz	722e7156a44e42e18d18c8b42d23c73f	547165
+star-20171009-starcupsdrv-3.6.0-hsp7000r.ppd.gz	b95ed8efd53704699cfe16e6c6bbadb4	23451
+star-20171009-starcupsdrv-3.6.0-hsp7000s.ppd.gz	23daf0808d631e52a0f79168426e34ec	9149
+star-20171009-starcupsdrv-3.6.0-hsp7000v.ppd.gz	b30797d79d61bfa85f2e9260f7948071	17787
+star-20171009-starcupsdrv-3.6.0-sp512.ppd.gz	dd3a5a6745e3deeed6eb8dc977a12061	7012
+star-20171009-starcupsdrv-3.6.0-sp542.ppd.gz	b0228f9f90f9ce80abbcbc4870f02a0b	7012
 star-20171009-starcupsdrv-3.6.0-tsp1000.ppd.gz	033e025bd58a5df4193d77dffcd3e494	25672
 star-20171009-starcupsdrv-3.6.0-tsp828l.ppd.gz	c62fd6223287d43a28be7c7b3ecf3445	27910
-star-20171009-starcupsdrv-3.6.0-tup542.ppd.gz	d18d589f68cde7611a5ceb77d39b096c	26253
-star-20171009-starcupsdrv-3.6.0-tup592.ppd.gz	2065959866afc63edb0bc88128a2723f	26282
-star-20171009-starcupsdrv-3.6.0-tup942.ppd.gz	858842fadb2f7dc6984e76c926e39101	27912
-star-20171009-starcupsdrv-3.6.0-tup992.ppd.gz	45760ce93113729f27ff1486c9b6e1c2	27919
-star-20191209-fvp10.ppd.gz	edce9c39a79ad4e3ffa4753c62262bcd	23333
-star-20191209-sp712.ppd.gz	f8f658fb6a69790ee14b616f1fa2884e	6903
-star-20191209-sp717.ppd.gz	f8f658fb6a69790ee14b616f1fa2884e	6903
-star-20191209-sp742.ppd.gz	e26eb498c5cdc8de27799feac4f78c49	6903
-star-20191209-sp747.ppd.gz	e26eb498c5cdc8de27799feac4f78c49	6903
+star-20171009-starcupsdrv-3.6.0-tup542.ppd.gz	c67a9e90d65c2c831b686c6060eeec1d	26381
+star-20171009-starcupsdrv-3.6.0-tup592.ppd.gz	ed8b0a08a088f0386c21e33477fbdc8c	26410
+star-20171009-starcupsdrv-3.6.0-tup942.ppd.gz	0832be5d1746b87af5c9f6a8335c2a11	28040
+star-20171009-starcupsdrv-3.6.0-tup992.ppd.gz	3931ef5704617f8705c87d9b3a1b6181	28047
+star-20191209-fvp10.ppd.gz	4889dc9b4f86617cfa091148aae9e58d	23461
+star-20191209-mcp20.ppd.gz	9cb66b4b250ebd01a5ba24345f29ae64	17989
+star-20191209-mcp21.ppd.gz	9cb66b4b250ebd01a5ba24345f29ae64	17989
+star-20191209-mcp30.ppd.gz	6391d23b80ab814eebaabad53eecda71	25146
+star-20191209-mcp31.ppd.gz	c9145a34da25ab1616a9836b4947dc97	25150
+star-20191209-pop10.ppd.gz	6588f93a85a8bff5609db57cfdbd5922	17993
+star-20191209-sp712.ppd.gz	6a7ff90c218d0b1e038f281194e96dc2	7031
+star-20191209-sp717.ppd.gz	6a7ff90c218d0b1e038f281194e96dc2	7031
+star-20191209-sp742.ppd.gz	9791c5bfa0752c1ffa3b34cba68b1e46	7031
+star-20191209-sp747.ppd.gz	9791c5bfa0752c1ffa3b34cba68b1e46	7031
 star-20191209-tsp113.ppd.gz	dbe0cef47a47c6a948e9c7b1bba406b8	23308
-star-20191209-tsp143gt.ppd.gz	c628d62f722d5ff8cf43c9af31107443	23309
 star-20191209-tsp143.ppd.gz	c628d62f722d5ff8cf43c9af31107443	23309
-star-20191209-tsp654.ppd.gz	a535d0d1bf9b566fbdd290332074097d	23331
-star-20191209-tsp700II.ppd.gz	a00b509ee2b223783570511b8ed51784	23327
-star-20191209-tsp800II.ppd.gz	ddbb5ee36151d3f6efa4bca30cdd16e1	27934
-xerox-20190225-xr6605dn.ppd.gz	b6253ee6b80ca899a07c9045538e31cf	315069
-xerox-20190225-xr8580dn.ppd.gz	c90c438f126c1bdef6ba6ef57fb6d728	330949
-xerox-20190225-xrx3655s.ppd.gz	55c6fc14b5de1d89fa042dc1db7e0a2e	226849
-xerox-20190225-xrx4622.ppd.gz	2152a3b95954f7d3b801ced676d910da	232750
-xerox-20190225-xrx5330.ppd.gz	e03141f5e1f471d495986d81db306fe1	227752
-xerox-20190225-xrx5875.ppd.gz	57115775e38dbbc67b37f3f5d32ee313	227177
-xerox-20190225-xrx7830.ppd.gz	ebbfd80c2964bd168e60f32d8671b5df	315798
-xerox-20190225-xrx7970.ppd.gz	ebbfd80c2964bd168e60f32d8671b5df	315798
-xerox-20190225-xrx8580n.ppd.gz	d3f42cfec5d6a4679021ecc8ae47571f	330947
-xerox-20190225-xrxd95cp.ppd.gz	becef960ef22ebdbba03e370982058fd	227898
-xerox-20190711-xrwc3335.ppd.gz	f11618e2cbb98df1e9a9c6568d926bd8	226881
-xerox-20190711-xrx6510.ppd.gz	83dacd652dff94ef813f5c99d17e1b53	315172
-xerox-20190820-xrxosd.ppd.gz	d0794808859a8fa8da75ba8bcc370383	213468
-xerox-20191030-Xerox_Phaser_7800DN.ppd.gz	86245d933d811bb431af5e41be2a4bca	324577
-xerox-20191030-Xerox_Phaser_7800DX.ppd.gz	1da5b07b7a00d842a8b0864675bd5ef7	324583
-xerox-20191030-Xerox_Phaser_7800GX.ppd.gz	7936a1236fde1f1aa999809ae00d0c69	324577
-xerox-20191030-Xerox_VersaLink_C500.ppd.gz	c66d2e2be622a28cf4b69f9737e852da	315279
-xerox-20191030-Xerox_VersaLink_C505.ppd.gz	2942bce3573a3b8919f0f21d0363a046	315203
-xerox-20191030-Xerox_VersaLink_C600.ppd.gz	dec1cd2299cccfeeadbe9f71dce1cf33	315356
-xerox-20191030-Xerox_VersaLink_C605.ppd.gz	e5b35f7df7053fd8bb3849299b4b30f1	315280
-xerox-20191030-xrxB400.ppd.ppd.gz	c353106563fe3aa1cc0b7c4447703fda	226548
-xerox-20191030-xrxB405.ppd.ppd.gz	9aa1e1b7debd031923b618c7691277d1	226548
-xerox-20191030-xrxb600.ppd.gz	34ef84669e7372fdc521ee2e5ba3d6cd	226582
-xerox-20191030-xrxb615.ppd.gz	5df8d8865a76cc47e21cf59638c1a19c	226582
-xerox-20191030-xrxB7025.ppd.gz	42f58cb2d053fc2b399a44162338db43	226916
-xerox-20191030-xrxB7030.ppd.gz	42f58cb2d053fc2b399a44162338db43	226916
-xerox-20191030-xrxB7035.ppd.gz	42f58cb2d053fc2b399a44162338db43	226916
-xerox-20191030-xrxB8045.ppd.gz	2b5ae85772c3fc0d17781f9504ed3496	227177
-xerox-20191030-xrxC400.ppd.gz	7ffb14a2c66af9900b382b61f17e562d	315199
-xerox-20191030-xrxC405.ppd.gz	7ffb14a2c66af9900b382b61f17e562d	315199
-xerox-20191030-xrxC7000.ppd.gz	b78277ed2599de7eae11b84064ad0ba8	315440
-xerox-20191030-xrxC7030.ppd.gz	390198cc8283aef7465c814d6f45c84b	315540
-xerox-20191030-xrxC8000.ppd.gz	0335791047b2d17f7f7f5d9dae808268	315616
-xerox-20191030-xrxC8030.ppd.gz	d79d0917e5408e25cb8c7b36a347b86e	315798
-xerox-20200129-xrxC9065.ppd.gz	d62fb5a418c0d6802371aeadeea9c055	315548
-xerox-20200226-xrxB9100.ppd.gz	0930643404b4b2faecc13173e83c785b	226927
-xerox-20201014-xrxC8000W.ppd.gz	3f6c0d5fe1e3039496fb85c702247a03	315540
+star-20191209-tsp143gt.ppd.gz	c628d62f722d5ff8cf43c9af31107443	23309
+star-20191209-tsp654.ppd.gz	9ac3a45aa6f7d644d0958add7c53a990	23459
+star-20191209-tsp700II.ppd.gz	c89c5b7351c482bbface30ae0dfbbd2a	23455
+star-20191209-tsp800II.ppd.gz	755d15fe94be59fd0a0f54f7ef27138c	28062
+zebra-20210504-SP-005645A.ppd.gz	2d2903debee268e872aac84b6a37aef3	51514
diff --git a/client/site_tests/platform_PrinterPpds/digests/denylist.txt b/client/site_tests/platform_PrinterPpds/digests/denylist.txt
index a6a3662..e69de29 100644
--- a/client/site_tests/platform_PrinterPpds/digests/denylist.txt
+++ b/client/site_tests/platform_PrinterPpds/digests/denylist.txt
@@ -1,808 +0,0 @@
-brother-20201006-DCP7080-cups-en.ppd.gz
-brother-20201006-DCP7080D-cups-en.ppd.gz
-brother-20201006-DCP7090-cups-en.ppd.gz
-brother-20201006-DCP7090DW-cups-en.ppd.gz
-brother-20201006-HL2290-cups-en.ppd.gz
-brother-20201006-HL2295D-cups-en.ppd.gz
-brother-20201006-MFCL2685DW-cups-en.ppd.gz
-epson-20200615-Epson-LX-10000FK_Series_PS3.ppd.gz
-epson-20200615-Epson-LX-10000F_PS.ppd.gz
-epson-20200615-Epson-LX-10010MF_Series_PS3.ppd.gz
-epson-20200615-Epson-WF-C17590_Series_PS3.ppd.gz
-epson-20200615-Epson-WF-C20590_PS.ppd.gz
-epson-20200615-Epson-WF-M20590_Series_PS3.ppd.gz
-foomatic-20170101-Samsung-M332x_382x_402x-Postscript.ppd.gz
-foomatic-20190909-Ricoh-IM_430F-PostscriptMono-Ricoh.ppd.gz
-foomatic-20191029-BR5070DN_GPL.ppd.gz
-foomatic-20191029-shar208d.ppd.gz
-foomatic-20191029-shar208s.ppd.gz
-foomatic-20200219-Apple-12_640ps-Postscript.ppd.gz
-foomatic-20200219-Apple-LaserWriter_IIg-Postscript.ppd.gz
-foomatic-20200219-Brother-DCP-8020-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-DCP-8025D-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-DCP-8040-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-DCP-8045D-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-DCP-9010CN-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-DCP-9040CN-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-DCP-9045CDN-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-HL-1450-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-HL-1650_70N-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-HL-1850_70N-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-HL-2460-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-HL-2600CN-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-HL-2700CN-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-HL-3070CW-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-HL-3260N-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-HL-3450CN-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-HL-4050CDN-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-HL-5050-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-HL-5150D-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-HL-5240-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-HL-5250DN-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-HL-5270DN-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-HL-6050D_DN-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-HL-6050-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-HL-7050-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-HL-8050N-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-MFC-7450-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-MFC-8220-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-MFC-8440-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-MFC-8640D-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-MFC-8670DN-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-MFC-8820D-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-MFC-9420CN-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-MFC-9440CN-Postscript-Brother.ppd.gz
-foomatic-20200219-Canon-BJC-250ex-bjc250gs.ppd.gz
-foomatic-20200219-Canon-BJC-255SP-bjc250gs.ppd.gz
-foomatic-20200219-Canon-GP_405-Postscript.ppd.gz
-foomatic-20200219-Canon-imageRunner_C2570-Postscript.ppd.gz
-foomatic-20200219-Canon-iPR_C600-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iPR_C650_PPD-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iPR_C700_800-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iPR_C750_850_PPD-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iR-ADV_400_500-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iR-ADV_4025_4035-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iR-ADV_4225_4235-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iR-ADV_6055_6065-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iR-ADV_6255_6265-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iR-ADV_8085_8095-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iR-ADV_8205-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iR-ADV_C2020_2030-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iR-ADV_C2020i_2030i-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iR-ADV_C2025-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iR-ADV_C2220_2230-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iR-ADV_C2225-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iR-ADV_C250_350-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iR-ADV_C3320L-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iR-ADV_C3320-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iR-ADV_C3325_3330-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iR-ADV_C351-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iR-ADV_C5030_5035-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iR-ADV_C5045_5051-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iR-ADV_C5235_5240-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iR-ADV_C5250_5255-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iR-ADV_C7055_7065-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iR-ADV_C7260_7270-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iR-ADV_C7280-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iR-ADV_C9060_9070-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iR-ADV_C9065_9075-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-iR-ADV_C9270_9280-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-LBP6670-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-LBP6780_3580-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-LBP710C_PPD-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-LBP712C_PPD-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-LBP7660C-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-LBP7680C_5280-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-LBP7780C_5480-Postscript-Canon.ppd.gz
-foomatic-20200219-Canon-LBP8780-Postscript-Canon.ppd.gz
-foomatic-20200219-Dell-M5200-Postscript.ppd.gz
-foomatic-20200219-Epson-AL-2600-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-AL-C1900_PS3-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-AL-C2000_PS3-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-AL-C2600-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-AL-C2800-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-AL-C3800-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-AL-C4000_PS3-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-AL-C4100-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-AL-C4200-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-AL-C8600_PS3-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-AL-C9100-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-AL-C9200-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-AL-CX21-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-AL-M2000-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-AL-M2400-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-AL-M4000-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-AL-M8000-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-AL-MX20-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-EPL-5900_PS3-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-EPL-6100_PS3-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-EPL-6200-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-EPL-N2500_PS3-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-EPL-N2550-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-EPL-N2700-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-EPL-N3000-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-EPL-N7000-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-LP-8300CPD-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-LP-8500CPD-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-LP-8800CPS-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-LP-9100PS3-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-LP-9200C-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-LP-9500CPS-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-LP-9600SPD-Postscript-Epson.ppd.gz
-foomatic-20200219-Epson-LP-9800C-Postscript-Epson.ppd.gz
-foomatic-20200219-Fuji_Xerox-DocuPrint_CM305_df-Postscript.ppd.gz
-foomatic-20200219-Generic-PostScript_Printer-Postscript.ppd.gz
-foomatic-20200219-Gestetner-C7010-Postscript-Gestetner.ppd.gz
-foomatic-20200219-Gestetner-C7116-Postscript-Gestetner.ppd.gz
-foomatic-20200219-Gestetner-C7425dn-Postscript-Gestetner.ppd.gz
-foomatic-20200219-Gestetner-C7435n-Postscript-Gestetner.ppd.gz
-foomatic-20200219-Gestetner-C7528n-Postscript-Gestetner.ppd.gz
-foomatic-20200219-Gestetner-C7535n-Postscript-Gestetner.ppd.gz
-foomatic-20200219-Gestetner-DSc224-Postscript-Gestetner.ppd.gz
-foomatic-20200219-Gestetner-DSc328-Postscript-Gestetner.ppd.gz
-foomatic-20200219-Gestetner-DSc38-Postscript-Gestetner.ppd.gz
-foomatic-20200219-Gestetner-DSc38u-Postscript-Gestetner.ppd.gz
-foomatic-20200219-Gestetner-F9199_9199nf-Postscript-Gestetner.ppd.gz
-foomatic-20200219-Gestetner-GS1227-Postscript-Gestetner.ppd.gz
-foomatic-20200219-Gestetner-P7026-Postscript-Gestetner.ppd.gz
-foomatic-20200219-Gestetner-P7032-Postscript-Gestetner.ppd.gz
-foomatic-20200219-Gestetner-P7126-Postscript-Gestetner.ppd.gz
-foomatic-20200219-Gestetner-P7132n-Postscript-Gestetner.ppd.gz
-foomatic-20200219-Gestetner-P7145-Postscript-Gestetner.ppd.gz
-foomatic-20200219-Gestetner-P7325-Postscript-Gestetner.ppd.gz
-foomatic-20200219-Gestetner-P7431cn-Postscript-Gestetner.ppd.gz
-foomatic-20200219-IBM-4303_Network_Color_Printer-Postscript.ppd.gz
-foomatic-20200219-Kodak-IS_70_CPII-Postscript.ppd.gz
-foomatic-20200219-KONICA_MINOLTA-bizhub_1050eP-Postscript-KONICA_MINOLTA.ppd.gz
-foomatic-20200219-KONICA_MINOLTA-bizhub_500-Postscript-KONICA_MINOLTA.ppd.gz
-foomatic-20200219-KONICA_MINOLTA-bizhub_C250P-Postscript-KONICA_MINOLTA.ppd.gz
-foomatic-20200219-KONICA_MINOLTA-bizhub_C252P-Postscript-KONICA_MINOLTA.ppd.gz
-foomatic-20200219-KONICA_MINOLTA-bizhub_C351-Postscript-KONICA_MINOLTA.ppd.gz
-foomatic-20200219-KONICA_MINOLTA-bizhub_C352P-Postscript-KONICA_MINOLTA.ppd.gz
-foomatic-20200219-KONICA_MINOLTA-bizhub_C450P-Postscript-KONICA_MINOLTA.ppd.gz
-foomatic-20200219-KONICA_MINOLTA-bizhub_C451-Postscript-KONICA_MINOLTA.ppd.gz
-foomatic-20200219-KONICA_MINOLTA-bizhub_C550-Postscript-KONICA_MINOLTA.ppd.gz
-foomatic-20200219-Kyocera-Ci-1100-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-CS-1650-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-CS-2050-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-1030D-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-1118MFP-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-1200-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-1700plus-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-1700-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-1714M-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-1800-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-1900-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-1920-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-2000D-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-3700plus-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-3700-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-3750-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-3820N-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-3830N-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-3900DN-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-4000DN-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-5800C-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-5900C-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-600-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-6020-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-6026-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-6300-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-6500plus-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-6700-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-6750-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-680-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-6900-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-6950DN-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-7000-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-7028M-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-8000C-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-9000-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-9100DN-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-920-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-C5015N-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-C5016N-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-C5020N-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-C5025N-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-C5030N-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-C8008N-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-C8026N-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-C8100DNplus_KPDL-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-FS-C8100DN-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-KM-1510-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-KM-1530-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-KM-1810-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-KM-1820-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-KM-2030-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-KM-3050-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-KM-4230_5230-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-KM-4530-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-KM-5530-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-KM-6030-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-KM-6230-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-KM-6330-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-KM-C2520-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-KM-C2630-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-KM-C830-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Kyocera-KM-C850-Postscript-Kyocera.ppd.gz
-foomatic-20200219-Lanier-SP_3400N-Postscript-Lanier.ppd.gz
-foomatic-20200219-Lanier-SP_3410DN-Postscript-Lanier.ppd.gz
-foomatic-20200219-Lanier-SP_3600DN-Postscript-Lanier.ppd.gz
-foomatic-20200219-Lanier-SP_C221N-Postscript-Lanier.ppd.gz
-foomatic-20200219-Lanier-SP_C222DN-Postscript-Lanier.ppd.gz
-foomatic-20200219-Lanier-SP_C222SF-Postscript-Lanier.ppd.gz
-foomatic-20200219-Lanier-SP_C232SF-Postscript-Lanier.ppd.gz
-foomatic-20200219-Lanier-SP_C311N-Postscript-Lanier.ppd.gz
-foomatic-20200219-Lanier-SP_C312DN-Postscript-Lanier.ppd.gz
-foomatic-20200219-Lanier-SP_C360DNw-Postscript-Lanier.ppd.gz
-foomatic-20200219-Lanier-SP_C360SFNw-Postscript-Lanier.ppd.gz
-foomatic-20200219-Lexmark-4039_10plus-Postscript.ppd.gz
-foomatic-20200219-Lexmark-C2132-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-C500n-Postscript.ppd.gz
-foomatic-20200219-Lexmark-C510b-Postscript.ppd.gz
-foomatic-20200219-Lexmark-C510-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-C520-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-C522-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-C524-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-C540-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-C543-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-C544-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-C546-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-C734-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-C736-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-C750-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-C752-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-C780-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-C782-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-C910-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-C912-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-C930-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-C935-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-E260dn-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-E350d-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-E360dn-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-EG460dn-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-Optra_Color_1200-Postscript.ppd.gz
-foomatic-20200219-Lexmark-T650-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-T656-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-W850-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-X203n-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-X264dn-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-X363dn-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-X463de-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-X543-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-X544-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-X546-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-X734de-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-X860de-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-X940e-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Minolta-magicolor_3100-Postscript.ppd.gz
-foomatic-20200219-NEC-P2X-necp2xX.upp.ppd.gz
-foomatic-20200219-NRG-SP_3500N-Postscript-NRG.ppd.gz
-foomatic-20200219-NRG-SP_3510DN-Postscript-NRG.ppd.gz
-foomatic-20200219-NRG-SP_C242DN-Postscript-NRG.ppd.gz
-foomatic-20200219-Oce-3145PS-Postscript2-Oce.ppd.gz
-foomatic-20200219-Oce-8445PS-Postscript2-Oce.ppd.gz
-foomatic-20200219-Oce-9230-Postscript2-Oce.ppd.gz
-foomatic-20200219-Oce-9260-Postscript2-Oce.ppd.gz
-foomatic-20200219-Oce-PPC3073PS-Postscript-Oce.ppd.gz
-foomatic-20200219-Oce-PPC3074PS-Postscript-Oce.ppd.gz
-foomatic-20200219-Oce-PPC5115PS-Postscript-Oce.ppd.gz
-foomatic-20200219-Oce-VarioPrint_2045PS-Postscript-Oce.ppd.gz
-foomatic-20200219-Oce-VarioPrint_2090PS-Postscript-Oce.ppd.gz
-foomatic-20200219-Oce-VarioPrint_2100PS-Postscript-Oce.ppd.gz
-foomatic-20200219-Oce-VarioPrint_2105PS-Postscript-Oce.ppd.gz
-foomatic-20200219-Oki-14i-Postscript-Oki.ppd.gz
-foomatic-20200219-Oki-B4350-Postscript-Oki.ppd.gz
-foomatic-20200219-Oki-C5400-Postscript-Oki.ppd.gz
-foomatic-20200219-Oki-C5700-Postscript-Oki.ppd.gz
-foomatic-20200219-Oki-C5900-Postscript-Oki.ppd.gz
-foomatic-20200219-Oki-C6100-Postscript-Oki.ppd.gz
-foomatic-20200219-Oki-C8800-Postscript-Oki.ppd.gz
-foomatic-20200219-Oki-C9600-Postscript-Oki.ppd.gz
-foomatic-20200219-Ricoh-ColorLaser_AP828-Postscript-Ricoh.ppd.gz
-foomatic-20200219-Ricoh-DDP_70-Postscript-Ricoh.ppd.gz
-foomatic-20200219-Ricoh-DDP_92-Postscript-Ricoh.ppd.gz
-foomatic-20200219-Ricoh-EMP_156-Postscript-Ricoh.ppd.gz
-foomatic-20200219-Ricoh-SP_330DN-Postscript-Ricoh.ppd.gz
-foomatic-20200219-Ricoh-SP_330SFN-Postscript-Ricoh.ppd.gz
-foomatic-20200219-Ricoh-SP_3700-Postscript-Ricoh.ppd.gz
-foomatic-20200219-Ricoh-SP_3700SF-Postscript-Ricoh.ppd.gz
-foomatic-20200219-Ricoh-SP_400DN-Postscript-Ricoh.ppd.gz
-foomatic-20200219-Ricoh-SP_C250DN-Postscript-Ricoh.ppd.gz
-foomatic-20200219-Ricoh-SP_C250SF-Postscript-Ricoh.ppd.gz
-foomatic-20200219-Ricoh-SP_C261DNw-Postscript-Ricoh.ppd.gz
-foomatic-20200219-Samsung-C140x-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-C2620-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-C2670-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-C460-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-C4820-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-C48x-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-CLP-350-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-CLP-410-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-CLP-660-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-CLP-670-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-CLP-680-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-CLP-770-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-CLP-775-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-CLX-3300-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-CLX-6200-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-CLX-6220-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-CLX-6250-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-CLX-8380-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-CLX-8385-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-CLX-8640_8650-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-CLX-9250_9350-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-CLX-9252_9352-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-CLX-92x1_93x1-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-K3250-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-K401-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-K703-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-K7600-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-M337x_387x_407x-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-M403x-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-M408x-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-M4370_5370-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-M453x-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-M458x-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-M5270-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-ML-2150-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-ML-2550-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-ML-2570-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-ML-2850-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-ML-2855-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-ML-3470-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-ML-371x-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-ML-4050-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-ML-4055-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-ML-451x_501x-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-ML-4550-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-ML-4555-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-ML-551x_651x-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-ML-8850_8950-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-ML-8x00-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-SCX-483x_5x3x-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-SCX-4x28-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-SCX-5635-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-SCX-5835_5935-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-SCX-6545-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-SCX-6545X-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-SCX-681x-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-SCX-6x20-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-SCX-6x22-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-SCX-6x45-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-SCX-6x55-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-SCX-8030_8040-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-SCX-8123_8128-Postscript-Samsung.ppd.gz
-foomatic-20200219-Samsung-SCX-8230_8240-Postscript-Samsung.ppd.gz
-foomatic-20200219-Sharp-AR-155FG_PS-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-AR-160M_PS-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-AR-163FG_PS-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-AR-163G_PS-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-AR-168D-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-AR-168S-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-AR-200M_PS-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-AR-205FG_PS-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-AR-205G_PS-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-AR-266FP_PS-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-AR-311FP_PS-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-AR-5220-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-AR-555M_PS-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-AR-705M_PS-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-AR-B07-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-AR-BC260-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-AR-C170FP_PS-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-AR-C260P-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-AR-M161_PS-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-AR-M165_PS-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-AR-M205_PS-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-AR-M206_PS-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-AR-M236_PS-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-AR-M351N-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-AR-M550N-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-AR-M700N-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-AR-N182FG-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-AR-N182G-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-MX-2314NR-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-MX-2614NR-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-MX-M1100-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-MX-M182D-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-MX-M182-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-MX-M202D-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-MX-M260FP-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-MX-M260-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-MX-M264NR-Postscript-Sharp.ppd.gz
-foomatic-20200219-Sharp-MX-M860-Postscript-Sharp.ppd.gz
-foomatic-20200219-Tektronix-Phaser_350-Postscript.ppd.gz
-foomatic-20200219-Toshiba-e-Studio_205-Postscript-Toshiba.ppd.gz
-foomatic-20200219-Toshiba-e-Studio_282-Postscript-Toshiba.ppd.gz
-foomatic-20200219-Toshiba-e-Studio_3510c-Postscript-Toshiba.ppd.gz
-foomatic-20200219-Toshiba-e-Studio_451c-Postscript-Toshiba.ppd.gz
-foomatic-20200219-Toshiba-e-Studio_452-Postscript-Toshiba.ppd.gz
-foomatic-20200219-Toshiba-e-Studio_850-Postscript-Toshiba.ppd.gz
-foomatic-20200219-Toshiba-GL-1010-Postscript-Toshiba.ppd.gz
-foomatic-20200219-Toshiba-GL-1020-Postscript-Toshiba.ppd.gz
-fuji_xerox-20200402-fuji-xerox-20200402-fx-apeosportv-c3375.ppd.gz
-hp-20171121-hplip-3.17.10-hp-color_laserjet-ps.ppd.gz
-hp-20171121-hplip-3.17.10-hp-laserjet_4-ps.ppd.gz
-hp-20171121-hplip-3.17.10-hp-laserjet_4si-ps.ppd.gz
-hp-20171121-hplip-3.17.10-hp-laserjet_4v-ps.ppd.gz
-hp-20171121-hplip-3.17.10-hp-laserjet_6p-ps.ppd.gz
-hp-20171121-hplip-3.17.10-hp-laserjet_p2055_series-ps.ppd.gz
-hp-20171121-hplip-3.17.10-hp-laserjet_p4010_series-ps.ppd.gz
-hp-20190111-hplip-3.18.12-hp-designjet_z6200_42in_photo-ps.ppd.gz
-hp-20190111-hplip-3.18.12-hp-designjet_z6200_60in_photo-ps.ppd.gz
-hp-20190111-hplip-3.18.12-hp-designjet_z6810ps_42in-ps.ppd.gz
-hp-20190918-hplip-3.19.6-hp-cm8060_mfp_with_edgeline-ps.ppd.gz
-hp-20190918-hplip-3.19.6-hp-color_designjet_xl_3600-ps.ppd.gz
-hp-20190918-hplip-3.19.6-hp-color_laserjet_pro_mfp_m277-ps.ppd.gz
-hp-20190918-hplip-3.19.6-hp-designjet_t2600dr-ps.ppd.gz
-hp-20190918-hplip-3.19.6-hp-laserjet_200_color_m251-ps.ppd.gz
-hp-20190918-hplip-3.19.6-hp-laserjet_m1522_mfp-ps.ppd.gz
-hp-20190918-hplip-3.19.6-hp-laserjet_m2727_mfp_series-ps.ppd.gz
-hp-20190918-hplip-3.19.6-hp-LJ-Class3.ppd.gz
-hp-20190918-hplip-3.19.6-hp-postscript-inkjet.ppd.gz
-hp-20190918-hplip-3.19.6-hp-postscript-laserjet.ppd.gz
-hp-20190918-hplip-3.19.6-hp-postscript-laserjet-pro.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-color_designjet_xl_3600-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_2550_series-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_2605-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_2700-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_2800-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_cm1015-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_e85055-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_flowmfp_m776-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_m856-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_mfp_m776-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_4000ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_4500mfp.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_d5800-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t1100ps_24in-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t1100ps_44in-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t1200_postscript-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t1300_postscript-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t1500-postscript.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t1530-postscript.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t1600dr-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t1600_printer-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t1700dr_postscript-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t1700_postscript-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t1708dr_postscript-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t1708_postscript-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t2300_postscript-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t2500-postscript.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t2600dr-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t3500-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t7100ps_monochrome-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t7100ps-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t7200-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t770_postscript-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t770ps_24in-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t790ps_24in-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t790ps_44in-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t920-postscript.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t930-postscript.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_z5200_postscript-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_z6100ps_42in_photo-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_z6100ps_60in_photo-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_z6600-postscript.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_z6610ps_60in-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_z6800_photo-postscript.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_z6810ps_60in-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-laserjet_100_color_mfp_m175-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-pagewide_p55250-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_3900ps_mfp-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_4000ps-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_4100ps-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5000ps_blueprinter-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5000ps-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5100ps_mfp_blueprinter-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5100ps_mfp-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5100ps-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_8000ps_blueprinter-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_8000ps-ps.ppd.gz
-konica_minolta-20200331-konica-minolta-20200331-konica-minolta-226i.ppd.gz
-konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c226.ppd.gz
-kyocera-20180809-Kyocera_TASKalfa_3051ci.ppd.gz
-kyocera-20190328-Kyocera_CS_2551ci.ppd.gz
-kyocera-20190328-Kyocera_CS_2552ci.ppd.gz
-kyocera-20190328-Kyocera_CS_3010i.ppd.gz
-kyocera-20190328-Kyocera_CS_3011i.ppd.gz
-kyocera-20190328-Kyocera_CS_306ci.ppd.gz
-kyocera-20190328-Kyocera_CS_307ci.ppd.gz
-kyocera-20190328-Kyocera_CS_3212i.ppd.gz
-kyocera-20190328-Kyocera_CS_4002i.ppd.gz
-kyocera-20190328-Kyocera_CS_7002i.ppd.gz
-kyocera-20190328-Kyocera_CS_7052ci.ppd.gz
-kyocera-20190328-Kyocera_CS_9002i.ppd.gz
-kyocera-20190328-Kyocera_ECOSYS_M2030dn.ppd.gz
-kyocera-20190328-Kyocera_ECOSYS_M2035dn.ppd.gz
-kyocera-20190328-Kyocera_ECOSYS_M2040dn.ppd.gz
-kyocera-20190328-Kyocera_ECOSYS_M2235dn.ppd.gz
-kyocera-20190328-Kyocera_ECOSYS_M3040dn.ppd.gz
-kyocera-20190328-Kyocera_ECOSYS_M3145dn.ppd.gz
-kyocera-20190328-Kyocera_ECOSYS_M4028idn.ppd.gz
-kyocera-20190328-Kyocera_ECOSYS_M4125idn.ppd.gz
-kyocera-20190328-Kyocera_ECOSYS_M5021cdn.ppd.gz
-kyocera-20190328-Kyocera_ECOSYS_M5521cdn.ppd.gz
-kyocera-20190328-Kyocera_ECOSYS_M6026cdn.ppd.gz
-kyocera-20190328-Kyocera_ECOSYS_M6230cidn.ppd.gz
-kyocera-20190328-Kyocera_ECOSYS_M8024cidn.ppd.gz
-kyocera-20190328-Kyocera_ECOSYS_M8124cidn.ppd.gz
-kyocera-20190328-Kyocera_ECOSYS_P2035d.ppd.gz
-kyocera-20190328-Kyocera_ECOSYS_P3045dn.ppd.gz
-kyocera-20190328-Kyocera_ECOSYS_P4035dn.ppd.gz
-kyocera-20190328-Kyocera_ECOSYS_P6026cdn.ppd.gz
-kyocera-20190328-Kyocera_ECOSYS_P8060cdn.ppd.gz
-kyocera-20190328-Kyocera_FS-5040DN.ppd.gz
-kyocera-20190328-Kyocera_TASKalfa_4020i.ppd.gz
-kyocera-20190328-Kyocera_TASKalfa_406ci.ppd.gz
-kyocera-20190328-Kyocera_TASKalfa_4500i.ppd.gz
-kyocera-20200211-Kyocera_TASKalfa_7003i.ppd.gz
-kyocera-20200416-Kyocera_CS_205c.ppd.gz
-kyocera-20200416-Kyocera_CS_250ci.ppd.gz
-kyocera-20200416-Kyocera_CS_2550ci.ppd.gz
-kyocera-20200416-Kyocera_CS_2553ci.ppd.gz
-kyocera-20200416-Kyocera_CS_2554ci.ppd.gz
-kyocera-20200416-Kyocera_CS_255.ppd.gz
-kyocera-20200416-Kyocera_CS_300i.ppd.gz
-kyocera-20200416-Kyocera_CS_3050ci.ppd.gz
-kyocera-20200416-Kyocera_CS_308ci.ppd.gz
-kyocera-20200416-Kyocera_CS_3500i.ppd.gz
-kyocera-20200416-Kyocera_CS_4003i.ppd.gz
-kyocera-20200416-Kyocera_CS_6500i.ppd.gz
-kyocera-20200416-Kyocera_CS_6550ci.ppd.gz
-kyocera-20200416-Kyocera_CS_7003i.ppd.gz
-kyocera-20200416-Kyocera_CS_7353ci.ppd.gz
-kyocera-20200416-Kyocera_CS_9003i.ppd.gz
-kyocera-20200416-Kyocera_ECOSYS_M3860idnf.ppd.gz
-kyocera-20200416-Kyocera_ECOSYS_M3860idn.ppd.gz
-kyocera-20200416-Kyocera_ECOSYS_P3260dn.ppd.gz
-kyocera-20200416-Kyocera_ECOSYS_P4135dn.ppd.gz
-kyocera-20200416-Kyocera_ECOSYS_P5018cdn.ppd.gz
-kyocera-20200416-Kyocera_FS-1028MFP.ppd.gz
-kyocera-20200416-Kyocera_FS-1030MFP.ppd.gz
-kyocera-20200416-Kyocera_FS-1035MFP.ppd.gz
-kyocera-20200416-Kyocera_FS-1120D.ppd.gz
-kyocera-20200416-Kyocera_FS-2020D.ppd.gz
-kyocera-20200416-Kyocera_FS-2100D.ppd.gz
-kyocera-20200416-Kyocera_FS-3540MFP.ppd.gz
-kyocera-20200416-Kyocera_FS-3920DN.ppd.gz
-kyocera-20200416-Kyocera_FS-4100DN.ppd.gz
-kyocera-20200416-Kyocera_FS-6970DN.ppd.gz
-kyocera-20200416-Kyocera_FS-9130DN.ppd.gz
-kyocera-20200416-Kyocera_FS-C2026MFP.ppd.gz
-kyocera-20200416-Kyocera_FS-C2026MFP+.ppd.gz
-kyocera-20200416-Kyocera_FS-C5250DN.ppd.gz
-kyocera-20200416-Kyocera_FS-C8600DN.ppd.gz
-kyocera-20200416-Kyocera_TASKalfa_3060ci.ppd.gz
-kyocera-20200716-Kyocera_ECOSYS_M2540dwJ.ppd.gz
-kyocera-20200716-Kyocera_ECOSYS_M3645idnJ.ppd.gz
-kyocera-20200716-Kyocera_ECOSYS_M4226idn.ppd.gz
-kyocera-20200716-Kyocera_ECOSYS_M6635cidnJ.ppd.gz
-kyocera-20200716-Kyocera_ECOSYS_M8224cidn.ppd.gz
-kyocera-20200716-Kyocera_ECOSYS_P3060dnJ.ppd.gz
-kyocera-20200716-Kyocera_ECOSYS_P3145dnJ.ppd.gz
-kyocera-20200716-Kyocera_ECOSYS_P3145dn.ppd.gz
-kyocera-20200716-Kyocera_ECOSYS_P4040dnJ.ppd.gz
-kyocera-20200716-Kyocera_ECOSYS_P4060dnJ.ppd.gz
-kyocera-20200716-Kyocera_ECOSYS_P4140dnJ.ppd.gz
-kyocera-20200716-Kyocera_ECOSYS_P8060cdnJ.ppd.gz
-kyocera-20200716-Kyocera_TASKalfa_2460ciJ.ppd.gz
-kyocera-20200716-Kyocera_TASKalfa_2510iJ.ppd.gz
-kyocera-20200716-Kyocera_TASKalfa_2553ciJ.ppd.gz
-kyocera-20200716-Kyocera_TASKalfa_2553ci.ppd.gz
-kyocera-20200716-Kyocera_TASKalfa_308ci.ppd.gz
-kyocera-20200716-Kyocera_TASKalfa_3212iJ.ppd.gz
-kyocera-20200716-Kyocera_TASKalfa_352ci.ppd.gz
-kyocera-20200716-Kyocera_TASKalfa_358ciJ.ppd.gz
-kyocera-20200716-Kyocera_TASKalfa_4003i.ppd.gz
-kyocera-20200716-Kyocera_TASKalfa_4012iJ.ppd.gz
-kyocera-20200716-Kyocera_TASKalfa_5003iJ.ppd.gz
-kyocera-20200716-Kyocera_TASKalfa_7003iJ.ppd.gz
-kyocera-20200716-Kyocera_TASKalfa_7353ciJ.ppd.gz
-kyocera-20200716-TA_P-4531_MFP.ppd.gz
-lexmark-20200918-Lexmark_X651de.ppd.gz
-lexmark-20200918-Lexmark_X658de.ppd.gz
-lexmark-20201101-Lexmark_6500e_Series.ppd.gz
-lexmark-20201101-Lexmark_B2300_Series.ppd.gz
-lexmark-20201101-Lexmark_C2200_Series.ppd.gz
-lexmark-20201101-Lexmark_C2300_Series.ppd.gz
-lexmark-20201101-Lexmark_C2400_Series.ppd.gz
-lexmark-20201101-Lexmark_C3400_Series.ppd.gz
-lexmark-20201101-Lexmark_C740_Series.ppd.gz
-lexmark-20201101-Lexmark_C790_Series.ppd.gz
-lexmark-20201101-Lexmark_C9200_Series.ppd.gz
-lexmark-20201101-Lexmark_C920_Series.ppd.gz
-lexmark-20201101-Lexmark_C950_Series.ppd.gz
-lexmark-20201101-Lexmark_CS310_Series.ppd.gz
-lexmark-20201101-Lexmark_CS410_Series.ppd.gz
-lexmark-20201101-Lexmark_CS510_Series.ppd.gz
-lexmark-20201101-Lexmark_CS720_Series.ppd.gz
-lexmark-20201101-Lexmark_CS820_Series.ppd.gz
-lexmark-20201101-Lexmark_CS920_Series.ppd.gz
-lexmark-20201101-Lexmark_CX310_Series.ppd.gz
-lexmark-20201101-Lexmark_CX410_Series.ppd.gz
-lexmark-20201101-Lexmark_CX420_Series.ppd.gz
-lexmark-20201101-Lexmark_CX430_Series.ppd.gz
-lexmark-20201101-Lexmark_CX510_Series.ppd.gz
-lexmark-20201101-Lexmark_CX625_Series.ppd.gz
-lexmark-20201101-Lexmark_CX725_Series.ppd.gz
-lexmark-20201101-Lexmark_CX820_Series.ppd.gz
-lexmark-20201101-Lexmark_CX825_Series.ppd.gz
-lexmark-20201101-Lexmark_CX920_Series.ppd.gz
-lexmark-20201101-Lexmark_M1100_Series.ppd.gz
-lexmark-20201101-Lexmark_M3100_Series.ppd.gz
-lexmark-20201101-Lexmark_M5100_Series.ppd.gz
-lexmark-20201101-Lexmark_MB2300_Series.ppd.gz
-lexmark-20201101-Lexmark_MC2300_Series.ppd.gz
-lexmark-20201101-Lexmark_MS310_Series.ppd.gz
-lexmark-20201101-Lexmark_MS410_Series.ppd.gz
-lexmark-20201101-Lexmark_MS510_Series.ppd.gz
-lexmark-20201101-Lexmark_MS610_Series.ppd.gz
-lexmark-20201101-Lexmark_MS620_Series.ppd.gz
-lexmark-20201101-Lexmark_MS710_Series.ppd.gz
-lexmark-20201101-Lexmark_MS725_Series.ppd.gz
-lexmark-20201101-Lexmark_MS810_Series.ppd.gz
-lexmark-20201101-Lexmark_MS820_Series.ppd.gz
-lexmark-20201101-Lexmark_MX310_Series.ppd.gz
-lexmark-20201101-Lexmark_MX410_Series.ppd.gz
-lexmark-20201101-Lexmark_MX510_Series.ppd.gz
-lexmark-20201101-Lexmark_MX520_Series.ppd.gz
-lexmark-20201101-Lexmark_MX610_Series.ppd.gz
-lexmark-20201101-Lexmark_MX620_Series.ppd.gz
-lexmark-20201101-Lexmark_MX6500e_Series.ppd.gz
-lexmark-20201101-Lexmark_MX710_Series.ppd.gz
-lexmark-20201101-Lexmark_MX720_Series.ppd.gz
-lexmark-20201101-Lexmark_MX725_Series.ppd.gz
-lexmark-20201101-Lexmark_MX810_Series.ppd.gz
-lexmark-20201101-Lexmark_MX820_Series.ppd.gz
-lexmark-20201101-Lexmark_MX910_Series.ppd.gz
-lexmark-20201101-Lexmark_X548_Series.ppd.gz
-lexmark-20201101-Lexmark_X740_Series.ppd.gz
-lexmark-20201101-Lexmark_X790_Series.ppd.gz
-lexmark-20201101-Lexmark_X920_Series.ppd.gz
-lexmark-20201101-Lexmark_X950_Series.ppd.gz
-lexmark-20201101-Lexmark_XC2100_Series.ppd.gz
-lexmark-20201101-Lexmark_XC9200_Series.ppd.gz
-lexmark-20201101-Lexmark_XM1100_Series.ppd.gz
-lexmark-20201101-Lexmark_XM3100_Series.ppd.gz
-lexmark-20201101-Lexmark_XM5100_Series.ppd.gz
-lexmark-20201101-Lexmark_XM7100_Series.ppd.gz
-oki-20200129-oki-c542-ps.ppd.gz
-oki-20200329-ES8434-PS.ppd.gz
-oki-20200329-OKB432_a.ppd.gz
-oki-20200329-OKB512_a.ppd.gz
-oki-20200329-OKB841_a110.ppd.gz
-oki-20200329-OKI-C332-PS.ppd.gz
-oki-20200329-OKI-C612-PS.ppd.gz
-oki-20200329-OKI-C712-PS.ppd.gz
-oki-20200329-OKI-C833-PS.ppd.gz
-oki-20200329-OKI-C843-PS.ppd.gz
-oki-20200329-OKI-C844-PS.ppd.gz
-oki-20201022-ES6450_PS.ppd.gz
-oki-20201022-OKI_MC843_PS.ppd.gz
-oki-20201022-OKI_MC853_PS.ppd.gz
-oki-20201022-OKI_MC883_PS.ppd.gz
-ricoh-20190916-Ricoh-M_C250FWB_PS.ppd.gz
-ricoh-20190916-Ricoh-P_C301SF_PS.ppd.gz
-ricoh-20191218-SP_C420e_JPN-PostscriptColor-Ricoh.ppd.gz
-ricoh-20200527-Ricoh-P_6000_JPN.ppd.gz
-ricoh-20200821-Ricoh-IM_C6500_JPN.ppd.gz
-ricoh-20200930-Ricoh_Generic_PS_Printer.ppd.gz
-sharp-20180409-Sharp-AR-M452U-ps.ppd.gz
-sharp-20180409-Sharp-MX-2640NR-ps.ppd.gz
-sharp-20180409-Sharp-MX-M283N-ps.ppd.gz
-sharp-20180409-Sharp-MX-M363F-ps-jp.ppd.gz
-sharp-20180409-Sharp-MX-M623N-ps.ppd.gz
-sharp-20180409-Sharp-MX-M623-ps-jp.ppd.gz
-sharp-20190711-Sharp-MX-6240N-ps.ppd.gz
-sharp-20190711-Sharp-MX-6500N-ps.ppd.gz
-sharp-20190711-Sharp-MX-6540FN-ps-jp.ppd.gz
-sharp-20190711-Sharp-MX-C250-ps.ppd.gz
-sharp-20190711-Sharp-MX-C301-ps.ppd.gz
-sharp-20190711-Sharp-MX-M1054-ps.ppd.gz
-sharp-20190711-Sharp-MX-M1055-ps.ppd.gz
-sharp-20190711-Sharp-MX-M654FN-ps-jp.ppd.gz
-sharp-20190711-Sharp-MX-M654N-ps.ppd.gz
-sharp-20190711-Sharp-MX-M904-ps.ppd.gz
-sharp-20191219-Sharp-AR-6020D-ps.ppd.gz
-sharp-20191219-Sharp-AR-6020-ps.ppd.gz
-sharp-20191219-Sharp-AR-6026N-ps.ppd.gz
-sharp-20191219-Sharp-AR-G200-ps-jp.ppd.gz
-sharp-20191219-Sharp-BP-10C20-ps.ppd.gz
-sharp-20191219-Sharp-DX-2000U-ps.ppd.gz
-sharp-20191219-Sharp-DX-20C20-ps-jp.ppd.gz
-sharp-20191219-Sharp-DX-2500N-ps.ppd.gz
-sharp-20191219-Sharp-MX-1800N-ps.ppd.gz
-sharp-20191219-Sharp-MX-1810U-ps.ppd.gz
-sharp-20191219-Sharp-MX-2300FG-ps-jp.ppd.gz
-sharp-20191219-Sharp-MX-2300G-ps.ppd.gz
-sharp-20191219-Sharp-MX-2301N-ps.ppd.gz
-sharp-20191219-Sharp-MX-2310F-ps-jp.ppd.gz
-sharp-20191219-Sharp-MX-2514FN-ps-jp.ppd.gz
-sharp-20191219-Sharp-MX-2600FG-ps-jp.ppd.gz
-sharp-20191219-Sharp-MX-2600G-ps.ppd.gz
-sharp-20191219-Sharp-MX-2610FN-ps-jp.ppd.gz
-sharp-20191219-Sharp-MX-2610N-ps.ppd.gz
-sharp-20191219-Sharp-MX-2614N-ps.ppd.gz
-sharp-20191219-Sharp-MX-2631-ps-jp.ppd.gz
-sharp-20191219-Sharp-MX-2640FN-ps-jp.ppd.gz
-sharp-20191219-Sharp-MX-2651-ps.ppd.gz
-sharp-20191219-Sharp-MX-2661-ps-jp.ppd.gz
-sharp-20191219-Sharp-MX-3061-ps.ppd.gz
-sharp-20191219-Sharp-MX-3600FN-ps-jp.ppd.gz
-sharp-20191219-Sharp-MX-4100N-ps.ppd.gz
-sharp-20191219-Sharp-MX-4110FN-ps-jp.ppd.gz
-sharp-20191219-Sharp-MX-4110N-ps.ppd.gz
-sharp-20191219-Sharp-MX-4140FN-ps-jp.ppd.gz
-sharp-20191219-Sharp-MX-4140N-ps.ppd.gz
-sharp-20191219-Sharp-MX-5500N-ps.ppd.gz
-sharp-20191219-Sharp-MX-C303-ps.ppd.gz
-sharp-20191219-Sharp-MX-C305W-ps-jp.ppd.gz
-sharp-20191219-Sharp-MX-M264FP-ps-jp.ppd.gz
-sharp-20191219-Sharp-MX-M264NV-ps.ppd.gz
-sharp-20191219-Sharp-MX-M265N-ps.ppd.gz
-sharp-20191219-Sharp-MX-M266FP-ps-jp.ppd.gz
-sharp-20191219-Sharp-MX-M266N-ps.ppd.gz
-sharp-20191219-Sharp-MX-M316G-ps-jp.ppd.gz
-sharp-20191219-Sharp-MX-M364N-ps.ppd.gz
-sharp-20191219-Sharp-MX-M365FN-ps-jp.ppd.gz
-sharp-20191219-Sharp-MX-M365N-ps.ppd.gz
-sharp-20191219-Sharp-MX-M464FN-ps-jp.ppd.gz
-sharp-20191230-Sharp-AR-B350W-ps-jp.ppd.gz
-sharp-20191230-Sharp-AR-B351-ps.ppd.gz
-sharp-20191230-Sharp-DX-C310-ps.ppd.gz
-sharp-20191230-Sharp-MX-2630FN-ps-jp.ppd.gz
-sharp-20191230-Sharp-MX-2630N-ps.ppd.gz
-sharp-20191230-Sharp-MX-2650FN-ps-jp.ppd.gz
-sharp-20191230-Sharp-MX-3060N-ps.ppd.gz
-sharp-20191230-Sharp-MX-6580N-ps.ppd.gz
-sharp-20191230-Sharp-MX-7090N-ps.ppd.gz
-sharp-20191230-Sharp-MX-B355W-ps.ppd.gz
-sharp-20191230-Sharp-MX-B356W-ps.ppd.gz
-sharp-20191230-Sharp-MX-B380P-ps.ppd.gz
-sharp-20191230-Sharp-MX-M2630-ps.ppd.gz
-sharp-20191230-Sharp-MX-M2651-ps.ppd.gz
-sharp-20191230-Sharp-MX-M3070-ps.ppd.gz
-sharp-20191230-Sharp-MX-M3071-ps.ppd.gz
-sharp-20191230-Sharp-MX-M3531-ps-jp.ppd.gz
-sharp-20191230-Sharp-MX-M6570-ps.ppd.gz
-sharp-20191230-Sharp-MX-M905-ps.ppd.gz
-xerox-20190225-xr6605dn.ppd.gz
-xerox-20190225-xr8580dn.ppd.gz
-xerox-20190225-xrx3655s.ppd.gz
-xerox-20190225-xrx4622.ppd.gz
-xerox-20190225-xrx5330.ppd.gz
-xerox-20190225-xrx5875.ppd.gz
-xerox-20190225-xrx7830.ppd.gz
-xerox-20190225-xrx7970.ppd.gz
-xerox-20190225-xrx8580n.ppd.gz
-xerox-20190225-xrxd95cp.ppd.gz
-xerox-20190711-xrwc3335.ppd.gz
-xerox-20190711-xrx6510.ppd.gz
-xerox-20190820-xrxosd.ppd.gz
-xerox-20191030-Xerox_Phaser_7800DN.ppd.gz
-xerox-20191030-Xerox_Phaser_7800DX.ppd.gz
-xerox-20191030-Xerox_Phaser_7800GX.ppd.gz
-xerox-20191030-Xerox_VersaLink_C500.ppd.gz
-xerox-20191030-Xerox_VersaLink_C505.ppd.gz
-xerox-20191030-Xerox_VersaLink_C600.ppd.gz
-xerox-20191030-Xerox_VersaLink_C605.ppd.gz
-xerox-20191030-xrxB400.ppd.ppd.gz
-xerox-20191030-xrxB405.ppd.ppd.gz
-xerox-20191030-xrxb600.ppd.gz
-xerox-20191030-xrxb615.ppd.gz
-xerox-20191030-xrxB7025.ppd.gz
-xerox-20191030-xrxB7030.ppd.gz
-xerox-20191030-xrxB7035.ppd.gz
-xerox-20191030-xrxB8045.ppd.gz
-xerox-20191030-xrxC400.ppd.gz
-xerox-20191030-xrxC405.ppd.gz
-xerox-20191030-xrxC7000.ppd.gz
-xerox-20191030-xrxC7030.ppd.gz
-xerox-20191030-xrxC8000.ppd.gz
-xerox-20191030-xrxC8030.ppd.gz
-xerox-20200129-xrxC9065.ppd.gz
-xerox-20200226-xrxB9100.ppd.gz
-xerox-20201014-xrxC8000W.ppd.gz
diff --git a/client/site_tests/platform_PrinterPpds/digests/embedded_images.pdf.digests b/client/site_tests/platform_PrinterPpds/digests/embedded_images.pdf.digests
index 36103b2..53d891c 100644
--- a/client/site_tests/platform_PrinterPpds/digests/embedded_images.pdf.digests
+++ b/client/site_tests/platform_PrinterPpds/digests/embedded_images.pdf.digests
@@ -1,32 +1,8 @@
-brother-20191213-DCP7180DN.ppd.gz	83154bd70a7e0da1da927a3c29634ddf	51041
-brother-20191213-DCP7195DW.ppd.gz	83154bd70a7e0da1da927a3c29634ddf	51041
-brother-20191213-DCPL5500DN.ppd.gz	83154bd70a7e0da1da927a3c29634ddf	51041
-brother-20191213-DCPL6600DW.ppd.gz	83154bd70a7e0da1da927a3c29634ddf	51041
-brother-20191213-HL5595DNH.ppd.gz	83154bd70a7e0da1da927a3c29634ddf	51041
-brother-20200131-DCP7025.ppd.gz	83154bd70a7e0da1da927a3c29634ddf	51021
-brother-20200131-DCP8080DN.ppd.gz	83154bd70a7e0da1da927a3c29634ddf	51041
-brother-20200615-DCP9030CDN.ppd.gz	13365a1789c4a6ad7f3527264b6629a4	144681
-brother-20200615-DCPL8410CDW.ppd.gz	13365a1789c4a6ad7f3527264b6629a4	144681
-brother-20200615-HLL3210CW.ppd.gz	13365a1789c4a6ad7f3527264b6629a4	144681
-brother-20200615-MFC9350CDW.ppd.gz	13365a1789c4a6ad7f3527264b6629a4	144681
-brother-20201006-DCP7080-cups-en.ppd.gz	03ac8e27d2cc41109b1a714834a7b7b5	49346
-brother-20201006-DCP7080D-cups-en.ppd.gz	dba588d07f12ceb95eef42ef21da43b4	49346
-brother-20201006-DCP7090-cups-en.ppd.gz	dba588d07f12ceb95eef42ef21da43b4	49346
-brother-20201006-DCP7090DW-cups-en.ppd.gz	39ac2cafc1c499f2763e19ffb16583a2	49346
-brother-20201006-HL2290-cups-en.ppd.gz	39ac2cafc1c499f2763e19ffb16583a2	49346
-brother-20201006-HL2295D-cups-en.ppd.gz	095f20627abe2a99cb4f6049bc63c203	49346
-brother-20201006-MFCL2685DW-cups-en.ppd.gz	c4df3eb0430845054d9f17f94d0002dd	49346
+brother-20200615-DCP9030CDN.ppd.gz	b08845fa29b497d2f5c00b44eed2b91f	144686
+brother-20200615-DCPL8410CDW.ppd.gz	b08845fa29b497d2f5c00b44eed2b91f	144686
+brother-20200615-HLL3210CW.ppd.gz	b08845fa29b497d2f5c00b44eed2b91f	144686
+brother-20200615-MFC9350CDW.ppd.gz	b08845fa29b497d2f5c00b44eed2b91f	144686
 cups-20170101-Generic-PDF_Printer-PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33482
-dymo-20200714-lm280.ppd.gz	6f4b9652036a47137f20eb71408d3a94	2156
-dymo-20200714-lm400.ppd.gz	6f4b9652036a47137f20eb71408d3a94	2156
-dymo-20200714-lm420p.ppd.gz	543303859e81fb96a1965d81035510ba	2158
-dymo-20200714-lm450.ppd.gz	af031493c09f141578ccd858a637b452	2160
-dymo-20200714-lm500ts.ppd.gz	d10f465460f92419ca2ba4eba7055067	3610
-dymo-20200714-lmpc2.ppd.gz	96030bba7a4b6efa39e9a719549869a2	2158
-dymo-20200714-lmpc.ppd.gz	8b04d63ca9b329cae57189749ccaec79	2154
-dymo-20200714-lmpnp.ppd.gz	cded90ff56eb2756fbd4abf7246d4210	2150
-dymo-20200714-lmpnpw.ppd.gz	d8a3dbb9b42b04f7a2e2a9fdfa08f69f	3476
-dymo-20200714-lp350.ppd.gz	8b04d63ca9b329cae57189749ccaec79	2154
 dymo-20200714-lw300.ppd.gz	db2f63775c103515692c3224a0c71e65	5162
 dymo-20200714-lw315.ppd.gz	db2f63775c103515692c3224a0c71e65	5162
 dymo-20200714-lw320.ppd.gz	f320923e7cea453904a8f8b957a9a2a0	7250
@@ -34,15 +10,12 @@
 dymo-20200714-lw330t.ppd.gz	f320923e7cea453904a8f8b957a9a2a0	7250
 dymo-20200714-lw400.ppd.gz	9b38392854b3e4081a3da0c3ea512756	7252
 dymo-20200714-lw400t.ppd.gz	9b38392854b3e4081a3da0c3ea512756	7252
-dymo-20200714-lw450dl.ppd.gz	9b38392854b3e4081a3da0c3ea512756	7252
-dymo-20200714-lw450dt.ppd.gz	7c4cd5def81f2ce2908823be55a6540a	2160
 dymo-20200714-lw450.ppd.gz	9b38392854b3e4081a3da0c3ea512756	7252
+dymo-20200714-lw450dl.ppd.gz	9b38392854b3e4081a3da0c3ea512756	7252
 dymo-20200714-lw450t.ppd.gz	9b38392854b3e4081a3da0c3ea512756	7252
 dymo-20200714-lw450tt.ppd.gz	afb984c987bc5dae270787924aa1b103	7258
 dymo-20200714-lw4xl.ppd.gz	89bda9320dd0b485eb0ccb8fa506e922	20417
 dymo-20200714-lwduol.ppd.gz	9b38392854b3e4081a3da0c3ea512756	7252
-dymo-20200714-lwduot2.ppd.gz	7c4cd5def81f2ce2908823be55a6540a	2160
-dymo-20200714-lwduot.ppd.gz	2b74c9dba137b9409b94ba82159d17d3	2156
 dymo-20200714-lwtt.ppd.gz	afb984c987bc5dae270787924aa1b103	7258
 dymo-20200714-se450.ppd.gz	e88028a98b5c22a45c282dc4132413ac	3767
 epson-20170125-Epson-L380_Series-epson-escpr-en-1.6.10.ppd.gz	d6acec8c7aaaa4fb4bfbd998f790f780	365528
@@ -115,9 +88,9 @@
 epson-20200615-1_6_41-Epson-L805_Series-epson-escpr-en.ppd.gz	d6acec8c7aaaa4fb4bfbd998f790f780	365528
 epson-20200615-1_6_41-Epson-L810_Series-epson-escpr-en.ppd.gz	d6acec8c7aaaa4fb4bfbd998f790f780	365528
 epson-20200615-1_6_41-Epson-M200_Series-epson-escpr-en.ppd.gz	bda91475e9758d8ea7d340d7b2c16895	358658
-epson-20200615-1_6_41-Epson-ME_200-epson-escpr-en.ppd.gz	11e992e11b3e2bdb5b45cb37aa1926ae	359043
 epson-20200615-1_6_41-Epson-ME-301_Series-epson-escpr-en.ppd.gz	11e992e11b3e2bdb5b45cb37aa1926ae	359043
 epson-20200615-1_6_41-Epson-ME-400_Series-epson-escpr-en.ppd.gz	d6acec8c7aaaa4fb4bfbd998f790f780	365528
+epson-20200615-1_6_41-Epson-ME_200-epson-escpr-en.ppd.gz	11e992e11b3e2bdb5b45cb37aa1926ae	359043
 epson-20200615-1_6_41-Epson-ME_OFFICE_510-epson-escpr-en.ppd.gz	11e992e11b3e2bdb5b45cb37aa1926ae	359043
 epson-20200615-1_6_41-Epson-ME_OFFICE_530-epson-escpr-en.ppd.gz	11e992e11b3e2bdb5b45cb37aa1926ae	359043
 epson-20200615-1_6_41-Epson-ME_OFFICE_570-epson-escpr-en.ppd.gz	d6acec8c7aaaa4fb4bfbd998f790f780	365528
@@ -127,11 +100,6 @@
 epson-20200615-1_6_41-Epson-PF-70_Series-epson-escpr-en.ppd.gz	e39efdfab490b032379978901334db90	108512
 epson-20200615-1_6_41-Epson-PF-71_Series-epson-escpr-en.ppd.gz	e39efdfab490b032379978901334db90	108512
 epson-20200615-1_6_41-Epson-PF-81_Series-epson-escpr-en.ppd.gz	f3d7c622860135b332472645e868853c	108512
-epson-20200615-1_6_41-Epson-PictureMate_500-epson-escpr-en.ppd.gz	a73bc13933393af3920f544c749808db	142610
-epson-20200615-1_6_41-Epson-PictureMate_PM_200-epson-escpr-en.ppd.gz	8d80952341d941434a0fb2487348eacc	142610
-epson-20200615-1_6_41-Epson-PictureMate_PM_240-epson-escpr-en.ppd.gz	8d80952341d941434a0fb2487348eacc	142610
-epson-20200615-1_6_41-Epson-PictureMate_PM_270-epson-escpr-en.ppd.gz	a73bc13933393af3920f544c749808db	142610
-epson-20200615-1_6_41-Epson-PictureMate_PM_300-epson-escpr-en.ppd.gz	a73bc13933393af3920f544c749808db	142610
 epson-20200615-1_6_41-Epson-PM-400_Series-epson-escpr-en.ppd.gz	f3d7c622860135b332472645e868853c	108512
 epson-20200615-1_6_41-Epson-PM-A750-epson-escpr-en.ppd.gz	11e992e11b3e2bdb5b45cb37aa1926ae	359043
 epson-20200615-1_6_41-Epson-PM-A820-epson-escpr-en.ppd.gz	11e992e11b3e2bdb5b45cb37aa1926ae	359043
@@ -172,6 +140,11 @@
 epson-20200615-1_6_41-Epson-PX-M860F-epson-escpr-en.ppd.gz	d6acec8c7aaaa4fb4bfbd998f790f780	365528
 epson-20200615-1_6_41-Epson-PX-S05_Series-epson-escpr-en.ppd.gz	6e6c2ee469fddd3e41cc0e23123755a1	366968
 epson-20200615-1_6_41-Epson-PX-S06_Series-epson-escpr-en.ppd.gz	6e6c2ee469fddd3e41cc0e23123755a1	366968
+epson-20200615-1_6_41-Epson-PictureMate_500-epson-escpr-en.ppd.gz	a73bc13933393af3920f544c749808db	142610
+epson-20200615-1_6_41-Epson-PictureMate_PM_200-epson-escpr-en.ppd.gz	8d80952341d941434a0fb2487348eacc	142610
+epson-20200615-1_6_41-Epson-PictureMate_PM_240-epson-escpr-en.ppd.gz	8d80952341d941434a0fb2487348eacc	142610
+epson-20200615-1_6_41-Epson-PictureMate_PM_270-epson-escpr-en.ppd.gz	a73bc13933393af3920f544c749808db	142610
+epson-20200615-1_6_41-Epson-PictureMate_PM_300-epson-escpr-en.ppd.gz	a73bc13933393af3920f544c749808db	142610
 epson-20200615-1_6_41-Epson-Stylus_CX3700-epson-escpr-en.ppd.gz	11e992e11b3e2bdb5b45cb37aa1926ae	359043
 epson-20200615-1_6_41-Epson-Stylus_CX3800-epson-escpr-en.ppd.gz	5cf62fbb568d73b61951a383bcbc9a34	346097
 epson-20200615-1_6_41-Epson-Stylus_CX4200-epson-escpr-en.ppd.gz	5cf62fbb568d73b61951a383bcbc9a34	346097
@@ -218,13 +191,13 @@
 epson-20200615-1_6_41-Epson-WF-7610_Series-epson-escpr-en.ppd.gz	14c5b3d0fd89107ee6e163e13dca8abb	365528
 epson-20200615-1_6_41-Epson-WF-8010_Series-epson-escpr-en.ppd.gz	d6acec8c7aaaa4fb4bfbd998f790f780	365528
 epson-20200615-1_6_41-Epson-WF-M5190_Series-epson-escpr-en.ppd.gz	bda91475e9758d8ea7d340d7b2c16895	358658
-epson-20200615-1_6_41-Epson-WorkForce_320-epson-escpr-en.ppd.gz	d6acec8c7aaaa4fb4bfbd998f790f780	365528
-epson-20200615-1_6_41-Epson-WorkForce_600-epson-escpr-en.ppd.gz	d6acec8c7aaaa4fb4bfbd998f790f780	365528
-epson-20200615-1_6_41-Epson-WorkForce_610-epson-escpr-en.ppd.gz	d6acec8c7aaaa4fb4bfbd998f790f780	365528
 epson-20200615-1_6_41-Epson-WP-4010_Series-epson-escpr-en.ppd.gz	bcdf2048d21ef76670ed594ef1604170	281582
 epson-20200615-1_6_41-Epson-WP-4011_Series-epson-escpr-en.ppd.gz	583de1866584cf6e76454556311e6c5a	289152
 epson-20200615-1_6_41-Epson-WP-4020_Series-epson-escpr-en.ppd.gz	47030833c15048e10a19f9957a60c461	295943
 epson-20200615-1_6_41-Epson-WP-M4011_Series-epson-escpr-en.ppd.gz	d6acec8c7aaaa4fb4bfbd998f790f780	365528
+epson-20200615-1_6_41-Epson-WorkForce_320-epson-escpr-en.ppd.gz	d6acec8c7aaaa4fb4bfbd998f790f780	365528
+epson-20200615-1_6_41-Epson-WorkForce_600-epson-escpr-en.ppd.gz	d6acec8c7aaaa4fb4bfbd998f790f780	365528
+epson-20200615-1_6_41-Epson-WorkForce_610-epson-escpr-en.ppd.gz	d6acec8c7aaaa4fb4bfbd998f790f780	365528
 epson-20200615-1_6_41-Epson-XP-201_204_208_Series-epson-escpr-en.ppd.gz	d6acec8c7aaaa4fb4bfbd998f790f780	365528
 epson-20200615-1_6_41-Epson-XP-2100_Series-epson-escpr-en.ppd.gz	d6acec8c7aaaa4fb4bfbd998f790f780	365528
 epson-20200615-1_6_41-Epson-XP-211_214_216_Series-epson-escpr-en.ppd.gz	5cf62fbb568d73b61951a383bcbc9a34	346097
@@ -251,270 +224,103 @@
 epson-20200615-1_6_41-Epson-XP-950_Series-epson-escpr-en.ppd.gz	d6acec8c7aaaa4fb4bfbd998f790f780	365528
 epson-20200615-1_6_41-Epson-XP-960_Series-epson-escpr-en.ppd.gz	d6acec8c7aaaa4fb4bfbd998f790f780	365528
 epson-20200615-EPSON_EW-052A_Series.ppd.gz	5cf62fbb568d73b61951a383bcbc9a34	346097
-epson-20200615-Epson-LX-10000FK_Series_PS3.ppd.gz	a3a4b86560af0cd1f3640c8c5af8bcd5	334294
-epson-20200615-Epson-LX-10000F_PS.ppd.gz	7d644058446d0866ab4c24d257b94920	334294
-epson-20200615-Epson-LX-10010MF_Series_PS3.ppd.gz	ac425ea30a89eb73a7f07a27275f2773	288768
-epson-20200615-Epson-WF-C17590_Series_PS3.ppd.gz	35b3015b9817f082965a99d4b6ec4b5c	334294
-epson-20200615-Epson-WF-C20590_PS.ppd.gz	35b3015b9817f082965a99d4b6ec4b5c	334294
-epson-20200615-Epson-WF-M20590_Series_PS3.ppd.gz	177f43a30682494d5b30d51ab0429ebb	288768
-foomatic-20170101-Samsung-M332x_382x_402x-Postscript.ppd.gz	f12d5b3a9470e9c732383dd849d14f60	288096
-foomatic-20190909-Ricoh-IM_430F-PostscriptMono-Ricoh.ppd.gz	b42b215aab8608d086185b53d5aa40cb	289495
+epson-20210521-1_6_41-Epson-PX-S5040-epson-escpr-en.ppd.gz	d6acec8c7aaaa4fb4bfbd998f790f780	365528
+epson-20210703-Epson-L1210_Series-epson-escpr-en.ppd.gz	d6acec8c7aaaa4fb4bfbd998f790f780	365528
 foomatic-20190909-Ricoh-IM_430_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33458
 foomatic-20190909-Ricoh-IM_C4500_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33619
-foomatic-20191029-Apollo-P-1200-pcl3.ppd.gz	079d4813b81b905a226cc0193fc11650	74141
-foomatic-20191029-BR5070DN_GPL.ppd.gz	344bbdffb0e7b0bf8fbcdb507e716048	289171
 foomatic-20191029-Gestetner-Pro_C5200S_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33642
 foomatic-20191029-Lanier-Pro_8200S_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33596
 foomatic-20191029-Lanier-Pro_8210_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33596
-foomatic-20191029-Lanier-Pro_C7200_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33668
 foomatic-20191029-Lanier-Pro_C7200S_Light_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33668
-foomatic-20191029-shar208d.ppd.gz	b1fdac279d7dae4a8f1ed18be61ff893	289705
-foomatic-20191029-shar208s.ppd.gz	2c4429571245c205c64be71da1bac42f	289606
+foomatic-20191029-Lanier-Pro_C7200_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33668
 foomatic-20200219-Anitech-M24-epson.ppd.gz	549ff7ffdb7e93080da50335e44c08f1	2830
-foomatic-20200219-Apple-12_640ps-Postscript.ppd.gz	e63f3d3cefb0c4b56cea7ad3d0c6f189	288096
-foomatic-20200219-Apple-LaserWriter_IIg-Postscript.ppd.gz	168d4c3a4618af39a1216d855d1cc2dd	288239
-foomatic-20200219-Apple-LaserWriter_Select_360-ljet2p.ppd.gz	f7239c9e170d10f2f89ea61f670bd8be	33929
-foomatic-20200219-Brother-DCP-7010-ljet4.ppd.gz	0e475b0c02c70744d761622ecd089ff3	50873
-foomatic-20200219-Brother-DCP-8020-Postscript-Brother.ppd.gz	02f70e4fdf05e0be9da3b238066a7322	288904
-foomatic-20200219-Brother-DCP-8025D-Postscript-Brother.ppd.gz	a120f4fc421adf6322bee578bf0e0b2f	289016
-foomatic-20200219-Brother-DCP-8040-Postscript-Brother.ppd.gz	61ccee571ad7a3799624f4c8923add7f	288904
-foomatic-20200219-Brother-DCP-8045D-Postscript-Brother.ppd.gz	12ebc5dd38cb8081a4f2dd7e9b3dbbc3	289016
-foomatic-20200219-Brother-DCP-9010CN-Postscript-Brother.ppd.gz	ae593e7b6f6135b9055b4591fca4635d	475834
-foomatic-20200219-Brother-DCP-9040CN-Postscript-Brother.ppd.gz	f450fb9e95e34b8953d8e43f26378682	475858
-foomatic-20200219-Brother-DCP-9045CDN-Postscript-Brother.ppd.gz	05f33ac19aeaf8c469122cd6bb460a65	476044
 foomatic-20200219-Brother-HJ-400-lq850.ppd.gz	612b7f37268bce6a320da8372e050ff7	58605
-foomatic-20200219-Brother-HL-1030-hl1250.ppd.gz	da0da22e9aeadbbc91a4280488b25917	17383
-foomatic-20200219-Brother-HL-1070-ljet4.ppd.gz	0e475b0c02c70744d761622ecd089ff3	50873
-foomatic-20200219-Brother-HL-10V-ljet3.ppd.gz	10e730ed71d6ca79acf7b5c6dadd85e4	17067
-foomatic-20200219-Brother-HL-1230-hl1250.ppd.gz	da0da22e9aeadbbc91a4280488b25917	17383
-foomatic-20200219-Brother-HL-1240-laserjet.ppd.gz	507b9f541a287105da56acf8c2500dd7	90540
-foomatic-20200219-Brother-HL-1250-ljet4.ppd.gz	0ad2b865d397648e39fb1c0ba77cb455	50957
-foomatic-20200219-Brother-HL-1270N-ljet4.ppd.gz	0ad2b865d397648e39fb1c0ba77cb455	50957
-foomatic-20200219-Brother-HL-1450-Postscript-Brother.ppd.gz	cb2b6b1d3b53a721c32a27c267647ce6	288954
-foomatic-20200219-Brother-HL-1650_70N-Postscript-Brother.ppd.gz	5e8c76f70cb61e7329d0d2de4feea73c	289263
-foomatic-20200219-Brother-HL-1650-ljet4.ppd.gz	0ad2b865d397648e39fb1c0ba77cb455	51003
-foomatic-20200219-Brother-HL-1660e-ljet4.ppd.gz	0ad2b865d397648e39fb1c0ba77cb455	51003
-foomatic-20200219-Brother-HL-1850_70N-Postscript-Brother.ppd.gz	e32287d1f4899bcf7db181d5433bcd02	289500
-foomatic-20200219-Brother-HL-1850-ljet4.ppd.gz	0ad2b865d397648e39fb1c0ba77cb455	51003
-foomatic-20200219-Brother-HL-2060-ljet4.ppd.gz	0ad2b865d397648e39fb1c0ba77cb455	50957
-foomatic-20200219-Brother-HL-2240D-hl1250.ppd.gz	a8d19510cf18979dc063417c3bd02916	51144
-foomatic-20200219-Brother-HL-2240-hl1250.ppd.gz	a8d19510cf18979dc063417c3bd02916	51098
-foomatic-20200219-Brother-HL-2400CeN-pxlcolor.ppd.gz	03e1e86b1a8a9930fdb05a4e28cd56bd	144627
-foomatic-20200219-Brother-HL-2460N-pxlmono.ppd.gz	aa58c9f5adee4733e7705b8983202a36	79848
-foomatic-20200219-Brother-HL-2460-Postscript-Brother.ppd.gz	662ba22771c815ba01caa5aa1801df41	289960
-foomatic-20200219-Brother-HL-2600CN-Postscript-Brother.ppd.gz	b8461fb34a3f6c5843de62ed93e86159	477792
-foomatic-20200219-Brother-HL-2700CN-Postscript-Brother.ppd.gz	55f45b2cb3b7cfe435cf4d437f1d897e	476381
-foomatic-20200219-Brother-HL-3070CW-Postscript-Brother.ppd.gz	bd17c37b80f521f82c313f8d8f1a3802	475910
-foomatic-20200219-Brother-HL-3260N-Postscript-Brother.ppd.gz	5904d9023cd0767f9905adce1ae0aea4	289940
-foomatic-20200219-Brother-HL-3450CN-Postscript-Brother.ppd.gz	10ffc6c28f5756e1952f185f0d2c17f2	477792
-foomatic-20200219-Brother-HL-4050CDN-Postscript-Brother.ppd.gz	e6515c55fc1129b5116e7cab6524993f	476120
-foomatic-20200219-Brother-HL-4Ve-laserjet.ppd.gz	8cc98d56bc44cac5c411d43d71e75259	90456
-foomatic-20200219-Brother-HL-5030-ljet4.ppd.gz	0ad2b865d397648e39fb1c0ba77cb455	50957
-foomatic-20200219-Brother-HL-5050-Postscript-Brother.ppd.gz	601c6d8f7127456bd6fef239c216debd	289060
-foomatic-20200219-Brother-HL-5140-ljet4.ppd.gz	0e475b0c02c70744d761622ecd089ff3	50873
-foomatic-20200219-Brother-HL-5150D-Postscript-Brother.ppd.gz	d1414b8129753e9336c494e05589eb39	289172
-foomatic-20200219-Brother-HL-5240-Postscript-Brother.ppd.gz	e7fbeda5589583c3736bf707abc8e547	289086
-foomatic-20200219-Brother-HL-5250DN-Postscript-Brother.ppd.gz	9ab8014c47d76384a771e4aa202e8fdd	289198
-foomatic-20200219-Brother-HL-5270DN-Postscript-Brother.ppd.gz	c843480cf0b3290ba4588aee07bc0440	289622
-foomatic-20200219-Brother-HL-6050D_DN-Postscript-Brother.ppd.gz	9e6a91bd01aaee1510f411a5be35e772	289700
-foomatic-20200219-Brother-HL-6050-Postscript-Brother.ppd.gz	38a53486538348f9e196398625439572	289588
-foomatic-20200219-Brother-HL-7050N-pxlmono.ppd.gz	aa58c9f5adee4733e7705b8983202a36	79898
-foomatic-20200219-Brother-HL-7050-Postscript-Brother.ppd.gz	ae74da0a180474a7d459587274b50acb	290101
-foomatic-20200219-Brother-HL-8050N-Postscript-Brother.ppd.gz	ec17683f2078209aed3272cd030ee921	289918
-foomatic-20200219-Brother-HL-8-ljetplus.ppd.gz	8cc98d56bc44cac5c411d43d71e75259	90456
+foomatic-20200219-Brother-HL-2400CeN-pxlcolor.ppd.gz	841f926bcf703837272e12aeb2e4bed5	144632
+foomatic-20200219-Brother-HL-2460N-pxlmono.ppd.gz	aa85461a590a8edde3bb2e6300015d51	79853
+foomatic-20200219-Brother-HL-7050N-pxlmono.ppd.gz	aa85461a590a8edde3bb2e6300015d51	79903
 foomatic-20200219-Brother-MC-3000-epson.ppd.gz	549ff7ffdb7e93080da50335e44c08f1	2830
-foomatic-20200219-Brother-MFC-7450-Postscript-Brother.ppd.gz	ab41be1093c7e704a2c74c9086cc3202	289398
-foomatic-20200219-Brother-MFC-8220-Postscript-Brother.ppd.gz	322776363e659ee6e0800fa7b5eeba38	288904
-foomatic-20200219-Brother-MFC-8440-Postscript-Brother.ppd.gz	70a45c34fafcc7577c92895bd2abc307	288904
-foomatic-20200219-Brother-MFC-8640D-Postscript-Brother.ppd.gz	829992d6aa01b9afc92187c8369408a0	289016
-foomatic-20200219-Brother-MFC-8670DN-Postscript-Brother.ppd.gz	578bb21befedef753a7bb02c76b3914a	289546
-foomatic-20200219-Brother-MFC-8820D-Postscript-Brother.ppd.gz	032c4584e965d3726ebffc39b73fb4c0	289016
 foomatic-20200219-Brother-MFC-9100c-epsonc.ppd.gz	ab78e0bb8d605c9f633e4e0fe42c80c5	2965
-foomatic-20200219-Brother-MFC-9420CN-Postscript-Brother.ppd.gz	410904893af70ff94303606b4717d313	475633
-foomatic-20200219-Brother-MFC-9440CN-Postscript-Brother.ppd.gz	ec6accd6179c4303fa5db533e1a4def5	475932
-foomatic-20200219-Brother-MFC-P2500-hl1250.ppd.gz	da0da22e9aeadbbc91a4280488b25917	17383
+foomatic-20200219-CItoh-M8510-m8510.ppd.gz	d363b89cf6828c36ed84c57d99355168	29927
 foomatic-20200219-Canon-BJ-100-bj200.ppd.gz	9d0d82376eaaf79674801dc3ebabc479	52765
 foomatic-20200219-Canon-BJ-10e-bj10e.ppd.gz	01bdecd5833fc0cea9552dd1fd84d6d4	53063
 foomatic-20200219-Canon-BJ-330-bj200.ppd.gz	9d0d82376eaaf79674801dc3ebabc479	52765
 foomatic-20200219-Canon-BJC-1000-bjc600.ppd.gz	b19f932855d2f5aac4d7a8a974cac32b	112481
-foomatic-20200219-Canon-BJC-2100-bjc610XY.upp.ppd.gz	d5fd77d9dbdffd01cc01fadd87d91eb2	97952
 foomatic-20200219-Canon-BJC-210-bjc600.ppd.gz	b19f932855d2f5aac4d7a8a974cac32b	112481
 foomatic-20200219-Canon-BJC-210SP-bj200.ppd.gz	9d0d82376eaaf79674801dc3ebabc479	52765
-foomatic-20200219-Canon-BJC-250ex-bjc250gs.ppd.gz	6ae8ff3927a8cd818d3f91b209ac1f11	149841
-foomatic-20200219-Canon-BJC-255SP-bjc250gs.ppd.gz	e93ab0f8ed80186a4fe56366d831fd3d	150129
 foomatic-20200219-Canon-BJC-4550-bjc800.ppd.gz	aa1ef06dc5709224509c500ba5799339	112480
-foomatic-20200219-Canon-BJC-610-bjc610XY.upp.ppd.gz	d5fd77d9dbdffd01cc01fadd87d91eb2	97952
 foomatic-20200219-Canon-BJC-800-bjc800.ppd.gz	aa1ef06dc5709224509c500ba5799339	112480
-foomatic-20200219-Canon-BJC-8200-bj8XXYYZ.upp.ppd.gz	9b2c56c9a3ffae12ec0364fffc657ddf	166083
-foomatic-20200219-Canon-GP_405-Postscript.ppd.gz	0468263e8a260960692ef13732f3b1d4	288096
-foomatic-20200219-Canon-imageRunner_C2570-Postscript.ppd.gz	b3d73d377ab5ee4d7816674839be9983	333357
-foomatic-20200219-Canon-iPR_C600-Postscript-Canon.ppd.gz	08af4039dc4c3bbb9b3b7d643e900234	338371
-foomatic-20200219-Canon-iPR_C650_PPD-Postscript-Canon.ppd.gz	cd9cacd94801dfafdc8d5078dae4dda6	339998
-foomatic-20200219-Canon-iPR_C700_800-Postscript-Canon.ppd.gz	ee94ed0029aa76f9c5013d63cca18976	340004
-foomatic-20200219-Canon-iPR_C750_850_PPD-Postscript-Canon.ppd.gz	8dcda70cd71dee5a48b9e3fcccbb1065	340012
-foomatic-20200219-Canon-iR-ADV_400_500-Postscript-Canon.ppd.gz	bcca93d24a057f8fac7eb58693631cf1	290797
-foomatic-20200219-Canon-iR-ADV_4025_4035-Postscript-Canon.ppd.gz	ef08655a61443f51bccbfa3d7588b059	291439
-foomatic-20200219-Canon-iR-ADV_4225_4235-Postscript-Canon.ppd.gz	5d47ac246eacc1adb0213d8009cbf7ab	291792
-foomatic-20200219-Canon-iR-ADV_6055_6065-Postscript-Canon.ppd.gz	bb817dd1e909229ace448f1c578e867f	291672
-foomatic-20200219-Canon-iR-ADV_6255_6265-Postscript-Canon.ppd.gz	c44ca41fa9ef9746d261b084cdb5718b	292013
-foomatic-20200219-Canon-iR-ADV_8085_8095-Postscript-Canon.ppd.gz	c523af319e98a824faeea0e180f7e978	291743
-foomatic-20200219-Canon-iR-ADV_8205-Postscript-Canon.ppd.gz	d3be36821645e192216677431b720653	292292
-foomatic-20200219-Canon-iR-ADV_C2020_2030-Postscript-Canon.ppd.gz	877c8454db961c73d7cf564c2218f583	337377
-foomatic-20200219-Canon-iR-ADV_C2020i_2030i-Postscript-Canon.ppd.gz	b9cb0c693796a6a12aec37ff0ccc82df	337373
-foomatic-20200219-Canon-iR-ADV_C2025-Postscript-Canon.ppd.gz	b9cb0c693796a6a12aec37ff0ccc82df	337373
-foomatic-20200219-Canon-iR-ADV_C2220_2230-Postscript-Canon.ppd.gz	e49f661d6244ac0e8ed2d3933db35d8d	337932
-foomatic-20200219-Canon-iR-ADV_C2225-Postscript-Canon.ppd.gz	0e7f27d75194cb4168cca91177c31268	337932
-foomatic-20200219-Canon-iR-ADV_C250_350-Postscript-Canon.ppd.gz	5cfd34de12a4c66cef4fac077379ec06	337114
-foomatic-20200219-Canon-iR-ADV_C3320L-Postscript-Canon.ppd.gz	b843247a703baa02c5bb4756f60c2a68	337816
-foomatic-20200219-Canon-iR-ADV_C3320-Postscript-Canon.ppd.gz	df414f0b1917bb286685e21f4a631f9f	337888
-foomatic-20200219-Canon-iR-ADV_C3325_3330-Postscript-Canon.ppd.gz	192c1c52ea128272a1b33b6e090cdf92	337888
-foomatic-20200219-Canon-iR-ADV_C351-Postscript-Canon.ppd.gz	812f45472a469cd7ff8c80a5b2bb2344	337043
-foomatic-20200219-Canon-iR-ADV_C5030_5035-Postscript-Canon.ppd.gz	bd1b0b273b6169bfe26cf3dc5e64db34	338754
-foomatic-20200219-Canon-iR-ADV_C5045_5051-Postscript-Canon.ppd.gz	7069e277e3a281597ce3d23851f5e0eb	338754
-foomatic-20200219-Canon-iR-ADV_C5235_5240-Postscript-Canon.ppd.gz	d9c6a63478c078ae2064080d077e8b88	338820
-foomatic-20200219-Canon-iR-ADV_C5250_5255-Postscript-Canon.ppd.gz	d9c6a63478c078ae2064080d077e8b88	338820
-foomatic-20200219-Canon-iR-ADV_C7055_7065-Postscript-Canon.ppd.gz	7922dcc4c838811660b67514079ea4b1	338560
-foomatic-20200219-Canon-iR-ADV_C7260_7270-Postscript-Canon.ppd.gz	51ce953beedb1083c67bc0526110301d	338901
-foomatic-20200219-Canon-iR-ADV_C7280-Postscript-Canon.ppd.gz	b2d0c754fa0a821a15d62f467f70a756	339176
-foomatic-20200219-Canon-iR-ADV_C9060_9070-Postscript-Canon.ppd.gz	9b9016789c2217e27ea2f297346916c7	338627
-foomatic-20200219-Canon-iR-ADV_C9065_9075-Postscript-Canon.ppd.gz	bb89e856614f2cf4d38b2959ee3e8811	338631
-foomatic-20200219-Canon-iR-ADV_C9270_9280-Postscript-Canon.ppd.gz	a87df291657c84dbba48c5d5beb6fd9c	339180
-foomatic-20200219-Canon-LBP-1000-ljet4.ppd.gz	0ad2b865d397648e39fb1c0ba77cb455	50988
-foomatic-20200219-Canon-LBP-1760-ljet4.ppd.gz	0e475b0c02c70744d761622ecd089ff3	50873
-foomatic-20200219-Canon-LBP-430-ljet4.ppd.gz	1c9f47ba243d4494624b982749ad8ba2	17158
 foomatic-20200219-Canon-LBP-4U-lbp8.ppd.gz	da04ba317a36d96d3937d6b8ff0c380d	43257
-foomatic-20200219-Canon-LBP6670-Postscript-Canon.ppd.gz	f54b576fd54b8bfb11fdde45e53d8975	288625
-foomatic-20200219-Canon-LBP6780_3580-Postscript-Canon.ppd.gz	e02cc2a06aa1a39cb423810d9009b94d	288631
-foomatic-20200219-Canon-LBP710C_PPD-Postscript-Canon.ppd.gz	ca4be37aa3aaef11a0affb2e42aa3c85	334855
-foomatic-20200219-Canon-LBP712C_PPD-Postscript-Canon.ppd.gz	62efee4b185c03cbe8b6f2eb3cb02fca	334858
-foomatic-20200219-Canon-LBP7660C-Postscript-Canon.ppd.gz	8a73eaf7e39200b352858a03b8adad7a	334862
-foomatic-20200219-Canon-LBP7680C_5280-Postscript-Canon.ppd.gz	7e3a68a37a59b21b78667f0ccf11a299	334862
-foomatic-20200219-Canon-LBP7780C_5480-Postscript-Canon.ppd.gz	dd0722972e366cf996e1857727c8a908	334855
-foomatic-20200219-Canon-LBP8780-Postscript-Canon.ppd.gz	d86adcef71394fe28bb6e008f2cf5636	288688
 foomatic-20200219-Canon-LIPS-III-lips3.ppd.gz	6130d5cb66d80afd9749a7de834f8f71	24402
 foomatic-20200219-Canon-LIPS-IIplus-lips2p.ppd.gz	7a6087c407215fe58a1868e78fec2abe	16623
-foomatic-20200219-Canon-S500-bj8XXYYZ.upp.ppd.gz	9b2c56c9a3ffae12ec0364fffc657ddf	166083
 foomatic-20200219-Citizen-ProJet_IIc-cdj500.ppd.gz	522b949faf6c52ee92979cc67813a0f7	52113
-foomatic-20200219-CItoh-M8510-m8510.ppd.gz	d363b89cf6828c36ed84c57d99355168	29927
 foomatic-20200219-Compaq-IJ900-lxm5700m.ppd.gz	e326ce8ef6f2f49e29a0705490765480	201023
-foomatic-20200219-DEC-DECWriter_500i-djet500.ppd.gz	4948bea0de077922c81c04c66ae45276	17145
 foomatic-20200219-DEC-LJ250-declj250.ppd.gz	462af91ee1308fc637f4516b9012a907	49404
-foomatic-20200219-Dell-M5200-Postscript.ppd.gz	116a4aaa32d44886926905abc6907094	288096
+foomatic-20200219-Epson-AL-M2300-eplaser.ppd.gz	5a1fcbea252f3cf33271f491074e4194	2630318
+foomatic-20200219-Epson-AL-M2310-eplaser.ppd.gz	5a1fcbea252f3cf33271f491074e4194	2630318
 foomatic-20200219-Epson-ActionPrinter_3250-ap3250.ppd.gz	040efac0dde19f78b520b8eedeaaf15e	32721
-foomatic-20200219-Epson-AL-2600-Postscript-Epson.ppd.gz	0fc05576083a3426d034bc51f1f795c1	335613
-foomatic-20200219-Epson-AL-C1900_PS3-Postscript-Epson.ppd.gz	2f9b73d286afd0c6ce937403eb499b78	335543
-foomatic-20200219-Epson-AL-C2000_PS3-Postscript-Epson.ppd.gz	4169c1878f135a525860ba74c91ea246	334821
-foomatic-20200219-Epson-AL-C2600-Postscript-Epson.ppd.gz	6866c2efd7331df0cbcabbe886519a02	335626
-foomatic-20200219-Epson-AL-C2800-Postscript-Epson.ppd.gz	f214966c09606dcb605cd9ceac0eb2a9	335404
-foomatic-20200219-Epson-AL-C3800-Postscript-Epson.ppd.gz	164d0caa0787bed5dcd8a61e85c15d76	335404
-foomatic-20200219-Epson-AL-C4000_PS3-Postscript-Epson.ppd.gz	f4215fa377cf4ddccf07b361145a68aa	335646
-foomatic-20200219-Epson-AL-C4100-Postscript-Epson.ppd.gz	bae12d54db51e8e95ed1f0f584d80521	335460
-foomatic-20200219-Epson-AL-C4200-Postscript-Epson.ppd.gz	9f86120acb774b7a6badead248009d44	335616
-foomatic-20200219-Epson-AL-C8500-ljet4.ppd.gz	0e475b0c02c70744d761622ecd089ff3	50873
-foomatic-20200219-Epson-AL-C8600_PS3-Postscript-Epson.ppd.gz	e48e34ef651e164e960fbb596e05dbe9	335678
-foomatic-20200219-Epson-AL-C9100-Postscript-Epson.ppd.gz	30bae1dfaae5f3b1955d68cfb1daf644	335879
-foomatic-20200219-Epson-AL-C9200-Postscript-Epson.ppd.gz	43668a296cf9b0ee9dc9b63ac8c69f35	335362
-foomatic-20200219-Epson-AL-CX21-Postscript-Epson.ppd.gz	f787db8dfb25d266ac3d8cac47cb0b5c	334752
-foomatic-20200219-Epson-AL-M2000-Postscript-Epson.ppd.gz	85ace2ae679d05a9cd1fcd41aaa8f986	289605
-foomatic-20200219-Epson-AL-M2300-eplaser.ppd.gz	d70e11456e17a1b72935804b231ca0dd	2572286
-foomatic-20200219-Epson-AL-M2310-eplaser.ppd.gz	d70e11456e17a1b72935804b231ca0dd	2572286
-foomatic-20200219-Epson-AL-M2400-Postscript-Epson.ppd.gz	3aed6c7ebe82ef929ee1afd0edbae10e	289617
-foomatic-20200219-Epson-AL-M4000-Postscript-Epson.ppd.gz	bea967738ec9d7b74f58e82cd17e7013	289866
-foomatic-20200219-Epson-AL-M8000-Postscript-Epson.ppd.gz	3151c9ee976cb717dd28594a02304210	289683
-foomatic-20200219-Epson-AL-MX20-Postscript-Epson.ppd.gz	2917c883a360edadbf78f93a26ad1cb5	289475
 foomatic-20200219-Epson-Dot_Matrix-epsonc.ppd.gz	ab78e0bb8d605c9f633e4e0fe42c80c5	2965
-foomatic-20200219-Epson-EPL-5800-ljet4.ppd.gz	0e475b0c02c70744d761622ecd089ff3	50873
-foomatic-20200219-Epson-EPL-5900_PS3-Postscript-Epson.ppd.gz	9db0dfc83ec7a309692cff64c0f9270b	334228
-foomatic-20200219-Epson-EPL-6100_PS3-Postscript-Epson.ppd.gz	752e2310250996e342806d12591c4cf9	334228
-foomatic-20200219-Epson-EPL-6200-Postscript-Epson.ppd.gz	800333f55623c8a06ca07e9297b709f8	334976
-foomatic-20200219-Epson-EPL-7100-laserjet.ppd.gz	8cc98d56bc44cac5c411d43d71e75259	90456
-foomatic-20200219-Epson-EPL-N2120-ljet4.ppd.gz	0e475b0c02c70744d761622ecd089ff3	50873
-foomatic-20200219-Epson-EPL-N2500_PS3-Postscript-Epson.ppd.gz	5582bbe35ea7fffd15870ef0b8effb4f	334778
-foomatic-20200219-Epson-EPL-N2550-Postscript-Epson.ppd.gz	b5678550a814b1065e9f2c120e762312	289428
-foomatic-20200219-Epson-EPL-N2700-Postscript-Epson.ppd.gz	500edceaab73b423ed0f752b58533d6d	335089
-foomatic-20200219-Epson-EPL-N3000-Postscript-Epson.ppd.gz	b23f5c430e07f64f711685cdd3d0f927	335108
-foomatic-20200219-Epson-EPL-N7000-Postscript-Epson.ppd.gz	8fd2508ed924f27afb8b76d669dfff46	335016
-foomatic-20200219-Epson-LP-1800-eplaser-jp.ppd.gz	8cbff1ce764e4951720e73ba17ce3368	2571894
-foomatic-20200219-Epson-LP-1900-eplaser-jp.ppd.gz	d43da8d0dfe474442086add88f222e00	2572284
-foomatic-20200219-Epson-LP-2200-eplaser-jp.ppd.gz	3058902d998659833477787651dc48f0	2572284
-foomatic-20200219-Epson-LP-2400-eplaser-jp.ppd.gz	b403b7b5eac6456d5cfd5ba77cfbb3e4	2572284
-foomatic-20200219-Epson-LP-2500-eplaser-jp.ppd.gz	f7e077554dcf35fc52ef328bcd1df857	2572284
-foomatic-20200219-Epson-LP-3000C-eplaser-jp.ppd.gz	60a215da754ded7ff6d1d3040435a474	3056665
-foomatic-20200219-Epson-LP-7500-eplaser-jp.ppd.gz	e70098735613b42e399e860006cc7d0f	2572284
-foomatic-20200219-Epson-LP-7700-eplaser-jp.ppd.gz	5911ccbad4be670e8c82919a4bf4817a	2572284
-foomatic-20200219-Epson-LP-7900-eplaser-jp.ppd.gz	2bf4cecccf75ad876bc97ce0317797de	2572284
-foomatic-20200219-Epson-LP-8000C-eplaser-jp.ppd.gz	bad917d5ce7046837728e7f8c1c07989	3056645
+foomatic-20200219-Epson-LP-1800-eplaser-jp.ppd.gz	498a184cb7fd39d06099230cd18ba108	2629276
+foomatic-20200219-Epson-LP-1900-eplaser-jp.ppd.gz	f60805cac69192f813d9737f2469afd7	2630316
+foomatic-20200219-Epson-LP-2200-eplaser-jp.ppd.gz	ee0f024a3086f8e6cb131d06cc1a6dba	2630316
+foomatic-20200219-Epson-LP-2400-eplaser-jp.ppd.gz	1aff4221d252a1ed567f2061da9b6413	2630316
+foomatic-20200219-Epson-LP-2500-eplaser-jp.ppd.gz	c9047dde6e1e213b8c1aaf0bc887095f	2630316
+foomatic-20200219-Epson-LP-3000C-eplaser-jp.ppd.gz	2273d7eb799e835854be4f6d719415a2	3074280
+foomatic-20200219-Epson-LP-7500-eplaser-jp.ppd.gz	6241cb9f30c43b0358382a89b92d8d8d	2630316
+foomatic-20200219-Epson-LP-7700-eplaser-jp.ppd.gz	e62429f43f623b2f58efc0f4738abb10	2630316
+foomatic-20200219-Epson-LP-7900-eplaser-jp.ppd.gz	fc06ff5332c6d9b536058ade5f2bbb61	2630316
+foomatic-20200219-Epson-LP-8000C-eplaser-jp.ppd.gz	02acad74eddaa4845820e73e85d3600b	3074260
+foomatic-20200219-Epson-LP-8100-eplaser-jp.ppd.gz	3ef28bbaa3ccc6c45d7c2978e9fee7c9	2630316
+foomatic-20200219-Epson-LP-8200C-eplaser-jp.ppd.gz	c60eff42102a4c87c8cc74e99ef69704	3074280
+foomatic-20200219-Epson-LP-8300C-eplaser-jp.ppd.gz	d1ec85b38bc2e4edd3ce33ffcab55cf8	3074280
+foomatic-20200219-Epson-LP-8300F-eplaser-jp.ppd.gz	3a77f9dc403bc88124bd9c4386c85fac	2630318
+foomatic-20200219-Epson-LP-8400F-eplaser-jp.ppd.gz	826413a0ed6db2e1fcfadeac9c9184f5	2630318
+foomatic-20200219-Epson-LP-8500C-eplaser-jp.ppd.gz	9828d4446ffe6ecfe55c032f806ec47f	3074280
+foomatic-20200219-Epson-LP-8600-eplaser-jp.ppd.gz	51d8f7adca1ea9384f3f0ec42c20d451	2630316
+foomatic-20200219-Epson-LP-8600F-eplaser-jp.ppd.gz	6022fda847f76e929ca706b48dd3a6e1	2630318
+foomatic-20200219-Epson-LP-8700-eplaser-jp.ppd.gz	4a313517c4bb965471b91e3fece00911	2630316
+foomatic-20200219-Epson-LP-8800C-eplaser-jp.ppd.gz	086981f17d2ff3cdb5a31b2a6abd6216	3074280
+foomatic-20200219-Epson-LP-8900-eplaser-jp.ppd.gz	91bbd22fa8a78feabed7451bc8f05096	2630316
+foomatic-20200219-Epson-LP-9000B-eplaser-jp.ppd.gz	ec71ea2c10ab9e2fe96af0f6c719058c	2630318
+foomatic-20200219-Epson-LP-9000C-eplaser-jp.ppd.gz	80eb08fd2f81b64ff2ff692a5de5e819	3074280
+foomatic-20200219-Epson-LP-9100-eplaser-jp.ppd.gz	9c770821e6fff7502e94b5eef96a344e	2630316
+foomatic-20200219-Epson-LP-9200B-eplaser-jp.ppd.gz	0053b9f04680469eb06b4288f46cac2e	2630318
+foomatic-20200219-Epson-LP-9300-eplaser-jp.ppd.gz	37c0d04700165b1130cb97c63a8c7e58	2630316
+foomatic-20200219-Epson-LP-9400-eplaser-jp.ppd.gz	2872c91408e35b0bda4ded754d7ef299	2630316
+foomatic-20200219-Epson-LP-9500C-eplaser-jp.ppd.gz	82870419b6bdde4185dd8da896bc70d9	3074280
+foomatic-20200219-Epson-LP-9600-eplaser-jp.ppd.gz	05981c832a1449f6f329f8ee6db76bd1	2629276
+foomatic-20200219-Epson-LP-9600S-eplaser-jp.ppd.gz	37d51ace747009eced70e343e769b64c	2630318
+foomatic-20200219-Epson-LP-M5000-eplaser-jp.ppd.gz	9828d4446ffe6ecfe55c032f806ec47f	3074280
+foomatic-20200219-Epson-LP-M5300-eplaser-jp.ppd.gz	9828d4446ffe6ecfe55c032f806ec47f	3074280
+foomatic-20200219-Epson-LP-M6000-eplaser-jp.ppd.gz	9828d4446ffe6ecfe55c032f806ec47f	3074280
+foomatic-20200219-Epson-LP-S210-eplaser-jp.ppd.gz	4a313517c4bb965471b91e3fece00911	2630316
+foomatic-20200219-Epson-LP-S300-eplaser-jp.ppd.gz	4a313517c4bb965471b91e3fece00911	2630316
+foomatic-20200219-Epson-LP-S3000-eplaser-jp.ppd.gz	4a313517c4bb965471b91e3fece00911	2630316
+foomatic-20200219-Epson-LP-S310-eplaser-jp.ppd.gz	4a313517c4bb965471b91e3fece00911	2630316
+foomatic-20200219-Epson-LP-S3200-eplaser-jp.ppd.gz	4a313517c4bb965471b91e3fece00911	2630316
+foomatic-20200219-Epson-LP-S3500-eplaser-jp.ppd.gz	4a313517c4bb965471b91e3fece00911	2630316
+foomatic-20200219-Epson-LP-S4000-eplaser-jp.ppd.gz	4a313517c4bb965471b91e3fece00911	2630316
+foomatic-20200219-Epson-LP-S4200-eplaser-jp.ppd.gz	4a313517c4bb965471b91e3fece00911	2630316
+foomatic-20200219-Epson-LP-S4500-eplaser-jp.ppd.gz	56bd22215c791467935b82e6564d208e	2630318
+foomatic-20200219-Epson-LP-S6500-eplaser-jp.ppd.gz	c5f5e9e026d6764e84f1f9254c24e990	3074280
+foomatic-20200219-Epson-LP-S7500-eplaser-jp.ppd.gz	9828d4446ffe6ecfe55c032f806ec47f	3074280
+foomatic-20200219-Epson-LP-S8100-eplaser-jp.ppd.gz	7e3a5c7cee4a840ce13ba2323431fea0	3074268
 foomatic-20200219-Epson-LP_8000-lp8000.ppd.gz	a3a5ec5cb104c64a160efde0098ab25f	32131
-foomatic-20200219-Epson-LP-8100-eplaser-jp.ppd.gz	bfa1be859b69c96ced99d58db8466f49	2572284
-foomatic-20200219-Epson-LP-8200C-eplaser-jp.ppd.gz	45aa07a16da04ef2dac4e4b5e4c5bd4f	3056665
-foomatic-20200219-Epson-LP-8300C-eplaser-jp.ppd.gz	65d3cf2624669eaa7ffd4c13bb486a75	3056665
-foomatic-20200219-Epson-LP-8300CPD-Postscript-Epson.ppd.gz	96c92775b408b3b74aea82bf8f765b7a	334847
-foomatic-20200219-Epson-LP-8300F-eplaser-jp.ppd.gz	94df5734d2f4c1fadd1c5d84f2c32769	2572286
-foomatic-20200219-Epson-LP-8400F-eplaser-jp.ppd.gz	6876bc323634b50cc1d9af33a71e622b	2572286
-foomatic-20200219-Epson-LP-8500C-eplaser-jp.ppd.gz	a8a279b93485e7c8712ff270d6d019f3	3056665
-foomatic-20200219-Epson-LP-8500CPD-Postscript-Epson.ppd.gz	559ca705ebfbf18e81c995f7cf130eed	334826
-foomatic-20200219-Epson-LP-8600-eplaser-jp.ppd.gz	5ef1e27e157bbc00d4e4945dc4ffa353	2572284
-foomatic-20200219-Epson-LP-8600F-eplaser-jp.ppd.gz	976c478421029bd648bdfe72cbcf1e88	2572286
-foomatic-20200219-Epson-LP-8700-eplaser-jp.ppd.gz	11af52bd999e3abba1a0770f1e129022	2572284
-foomatic-20200219-Epson-LP-8800C-eplaser-jp.ppd.gz	ad47ce76e80a2efd7f73fb1a5cd20266	3056665
-foomatic-20200219-Epson-LP-8800CPS-Postscript-Epson.ppd.gz	7d212ccabfadfaadf2fbdb3e602bbbde	335289
-foomatic-20200219-Epson-LP-8900-eplaser-jp.ppd.gz	d1311a6d09e0d430948ba78a4cb80335	2572284
-foomatic-20200219-Epson-LP-9000B-eplaser-jp.ppd.gz	9cf40224ddc00785cc4f0fb0a4a1d0c4	2572286
-foomatic-20200219-Epson-LP-9000C-eplaser-jp.ppd.gz	e8c99c4c3a9d30a8341e14e9e30163b2	3056665
-foomatic-20200219-Epson-LP-9100-eplaser-jp.ppd.gz	72e1548afb786c08df3922be89eb3b82	2572284
-foomatic-20200219-Epson-LP-9100PS3-Postscript-Epson.ppd.gz	7a20d766c408ab37708dd8a0454edc28	334615
-foomatic-20200219-Epson-LP-9200B-eplaser-jp.ppd.gz	4b6da6270fef42d40a94c6ecb5072cf2	2572286
-foomatic-20200219-Epson-LP-9200C-Postscript-Epson.ppd.gz	28fc58ea36b451ed807f1e205966544a	335030
-foomatic-20200219-Epson-LP-9300-eplaser-jp.ppd.gz	871c578fb8f4830d4741715e5b4e2d05	2572284
-foomatic-20200219-Epson-LP-9400-eplaser-jp.ppd.gz	4554b6e0f939d6ca3545e13bfe3cd85d	2572284
-foomatic-20200219-Epson-LP-9500C-eplaser-jp.ppd.gz	c5814311d31ea27820333df3ea93a52f	3056665
-foomatic-20200219-Epson-LP-9500CPS-Postscript-Epson.ppd.gz	be722444d6c34bfdc1c853faec0dc20b	335349
-foomatic-20200219-Epson-LP-9600-eplaser-jp.ppd.gz	280f47314647285cac2628888b267689	2571894
-foomatic-20200219-Epson-LP-9600S-eplaser-jp.ppd.gz	3e6f2a5a4f23e5a551efc0c7ddb13f0b	2572286
-foomatic-20200219-Epson-LP-9600SPD-Postscript-Epson.ppd.gz	1a41479bdf2e33709e5a10e2399d1c8c	289920
-foomatic-20200219-Epson-LP-9800C-Postscript-Epson.ppd.gz	06e1036f7a920d053132698c38a9810b	335221
-foomatic-20200219-Epson-LP-M5000-eplaser-jp.ppd.gz	a8a279b93485e7c8712ff270d6d019f3	3056665
-foomatic-20200219-Epson-LP-M5300-eplaser-jp.ppd.gz	a8a279b93485e7c8712ff270d6d019f3	3056665
-foomatic-20200219-Epson-LP-M6000-eplaser-jp.ppd.gz	a8a279b93485e7c8712ff270d6d019f3	3056665
-foomatic-20200219-Epson-LP-S210-eplaser-jp.ppd.gz	11af52bd999e3abba1a0770f1e129022	2572284
-foomatic-20200219-Epson-LP-S3000-eplaser-jp.ppd.gz	11af52bd999e3abba1a0770f1e129022	2572284
-foomatic-20200219-Epson-LP-S300-eplaser-jp.ppd.gz	11af52bd999e3abba1a0770f1e129022	2572284
-foomatic-20200219-Epson-LP-S310-eplaser-jp.ppd.gz	11af52bd999e3abba1a0770f1e129022	2572284
-foomatic-20200219-Epson-LP-S3200-eplaser-jp.ppd.gz	11af52bd999e3abba1a0770f1e129022	2572284
-foomatic-20200219-Epson-LP-S3500-eplaser-jp.ppd.gz	11af52bd999e3abba1a0770f1e129022	2572284
-foomatic-20200219-Epson-LP-S4000-eplaser-jp.ppd.gz	11af52bd999e3abba1a0770f1e129022	2572284
-foomatic-20200219-Epson-LP-S4200-eplaser-jp.ppd.gz	11af52bd999e3abba1a0770f1e129022	2572284
-foomatic-20200219-Epson-LP-S4500-eplaser-jp.ppd.gz	b88c89f28bb45dd61bc49fc3f25f49a2	2572286
-foomatic-20200219-Epson-LP-S6500-eplaser-jp.ppd.gz	85065da144fde7b82752923210ad451e	3056665
-foomatic-20200219-Epson-LP-S7500-eplaser-jp.ppd.gz	a8a279b93485e7c8712ff270d6d019f3	3056665
-foomatic-20200219-Epson-LP-S8100-eplaser-jp.ppd.gz	3f44067416ebbcd82989df229d8f9eb6	3056653
 foomatic-20200219-Epson-LX-300plus-ibmpro.ppd.gz	3d857a98886341cea95c25f21dea50c1	4676
-foomatic-20200219-Epson-MJ_520C-stcolor.ppd.gz	88ae75e8003ee80b4e649e63e1f8d0fe	111491
 foomatic-20200219-Epson-Stylus_Color_460-stcX.upp.ppd.gz	55005626a2ee713f99ae93563f341e72	93441
 foomatic-20200219-Epson-Stylus_Color_660-stc600X.upp.ppd.gz	9eb3c2ea0641222b4d933f199f1be3a7	97123
-foomatic-20200219-Epson-Stylus_Color_777-stcanyX.upp.ppd.gz	df6778eaf42a47386d1e3315f312d369	115782
 foomatic-20200219-Epson-Stylus_Color_850-stc800X.upp.ppd.gz	762fe97d9fc9482afb1212588df88093	96782
 foomatic-20200219-Epson-Stylus_Color_II-stc2X.upp.ppd.gz	e1ebb4e8470468c3337c3f737deb05b7	93354
-foomatic-20200219-Epson-Stylus_Color-stcolor.ppd.gz	0f7ca6ac11a7456945fee93fb56bf155	111491
-foomatic-20200219-Fuji_Xerox-DocuPrint_CM305_df-Postscript.ppd.gz	d39b00a843c7942a4d26df3741af7f09	334492
 foomatic-20200219-Generic-ESC_P_Dot_Matrix_Printer-lq850.ppd.gz	612b7f37268bce6a320da8372e050ff7	58605
 foomatic-20200219-Generic-IBM-Compatible_Dot_Matrix_Printer-ibmpro.ppd.gz	3d857a98886341cea95c25f21dea50c1	4676
-foomatic-20200219-Generic-PCL_6_PCL_XL_Printer-ljet4.ppd.gz	0ad2b865d397648e39fb1c0ba77cb455	50953
-foomatic-20200219-Generic-PostScript_Printer-Postscript.ppd.gz	86046aff16dc2b40e71b28a9948fc567	333573
-foomatic-20200219-Gestetner-10512-pxlmono-Gestetner.ppd.gz	3ef3a04b5b30aacc99ec78b0b4eb7187	48576
-foomatic-20200219-Gestetner-2212-pxlmono-Gestetner.ppd.gz	3ef3a04b5b30aacc99ec78b0b4eb7187	48556
-foomatic-20200219-Gestetner-3502-pxlmono-Gestetner.ppd.gz	3ef3a04b5b30aacc99ec78b0b4eb7187	48575
-foomatic-20200219-Gestetner-3532_4235g-pxlmono-Gestetner.ppd.gz	3ef3a04b5b30aacc99ec78b0b4eb7187	48548
-foomatic-20200219-Gestetner-6002-pxlmono-Gestetner.ppd.gz	3ef3a04b5b30aacc99ec78b0b4eb7187	48548
-foomatic-20200219-Gestetner-9002-pxlmono-Gestetner.ppd.gz	3ef3a04b5b30aacc99ec78b0b4eb7187	48576
-foomatic-20200219-Gestetner-C7010-Postscript-Gestetner.ppd.gz	fb350e386d87be14efbe810098d3b225	335072
-foomatic-20200219-Gestetner-C7116-Postscript-Gestetner.ppd.gz	02cfaca350b32484f8ad7482a0e905cc	334854
-foomatic-20200219-Gestetner-C7425dn-Postscript-Gestetner.ppd.gz	c2ff022264194fa7b4f8cfc6b86472b1	336328
-foomatic-20200219-Gestetner-C7435n-Postscript-Gestetner.ppd.gz	b9bdff0c1b8bd649a9e7383d19f59d8c	336578
+foomatic-20200219-Generic-PCL_6_PCL_XL_Printer-pxlcolor.ppd.gz	aa85461a590a8edde3bb2e6300015d51	79853
+foomatic-20200219-Gestetner-10512-pxlmono-Gestetner.ppd.gz	a13b115e8059860aae61e83b97898344	48581
+foomatic-20200219-Gestetner-2212-pxlmono-Gestetner.ppd.gz	a13b115e8059860aae61e83b97898344	48561
+foomatic-20200219-Gestetner-3502-pxlmono-Gestetner.ppd.gz	a13b115e8059860aae61e83b97898344	48580
+foomatic-20200219-Gestetner-3532_4235g-pxlmono-Gestetner.ppd.gz	a13b115e8059860aae61e83b97898344	48553
+foomatic-20200219-Gestetner-6002-pxlmono-Gestetner.ppd.gz	a13b115e8059860aae61e83b97898344	48553
+foomatic-20200219-Gestetner-9002-pxlmono-Gestetner.ppd.gz	a13b115e8059860aae61e83b97898344	48581
 foomatic-20200219-Gestetner-C7521n-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33495
 foomatic-20200219-Gestetner-C7526dn-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33505
-foomatic-20200219-Gestetner-C7528n-Postscript-Gestetner.ppd.gz	4932bf23beeb759f46c1fc66d0be7f9e	336981
-foomatic-20200219-Gestetner-C7535n-Postscript-Gestetner.ppd.gz	d79fdc9e71f82a8c21f45d0420be933d	336912
 foomatic-20200219-Gestetner-C7640nD-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33547
 foomatic-20200219-Gestetner-C8140ND-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33571
 foomatic-20200219-Gestetner-CS555-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33537
@@ -523,36 +329,30 @@
 foomatic-20200219-Gestetner-DSc1045-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33596
 foomatic-20200219-Gestetner-DSc1060-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33596
 foomatic-20200219-Gestetner-DSc1120-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33553
-foomatic-20200219-Gestetner-DSc1220ex-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33619
 foomatic-20200219-Gestetner-DSc1220-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33619
-foomatic-20200219-Gestetner-DSc1230ex-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33619
+foomatic-20200219-Gestetner-DSc1220ex-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33619
 foomatic-20200219-Gestetner-DSc1230-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33619
-foomatic-20200219-Gestetner-DSc1245ex-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33619
+foomatic-20200219-Gestetner-DSc1230ex-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33619
 foomatic-20200219-Gestetner-DSc1245-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33619
-foomatic-20200219-Gestetner-DSc1260ex-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33619
+foomatic-20200219-Gestetner-DSc1245ex-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33619
 foomatic-20200219-Gestetner-DSc1260-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33619
-foomatic-20200219-Gestetner-DSc224-Postscript-Gestetner.ppd.gz	af42a2c1c46be5d501dbaf69d98bca2b	335308
-foomatic-20200219-Gestetner-DSc328-Postscript-Gestetner.ppd.gz	9f060d2491bf063390142e4354b4c069	336688
-foomatic-20200219-Gestetner-DSc38-Postscript-Gestetner.ppd.gz	3c02723ff4f9dbf0b6f1a94e651d99fe	335398
-foomatic-20200219-Gestetner-DSc38u-Postscript-Gestetner.ppd.gz	6a94b87a7eaf44304451806f4858976b	336632
+foomatic-20200219-Gestetner-DSc1260ex-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33619
 foomatic-20200219-Gestetner-DSc424-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33518
-foomatic-20200219-Gestetner-DSc428-pxlcolor-Gestetner.ppd.gz	beaf2bc2b31dc99c7b3b70a1ca0456a4	144768
+foomatic-20200219-Gestetner-DSc428-pxlcolor-Gestetner.ppd.gz	54b1baa523e9cb33868b7b8395a4e6e0	144773
 foomatic-20200219-Gestetner-DSm1525-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33500
 foomatic-20200219-Gestetner-DSm2525-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33547
 foomatic-20200219-Gestetner-DSm2540-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33547
+foomatic-20200219-Gestetner-DSm415-pxlmono-Gestetner.ppd.gz	a13b115e8059860aae61e83b97898344	48487
+foomatic-20200219-Gestetner-DSm615-pxlmono-Gestetner.ppd.gz	a13b115e8059860aae61e83b97898344	48494
+foomatic-20200219-Gestetner-DSm618-pxlmono-Gestetner.ppd.gz	a13b115e8059860aae61e83b97898344	48494
+foomatic-20200219-Gestetner-DSm618d-pxlmono-Gestetner.ppd.gz	a13b115e8059860aae61e83b97898344	48514
+foomatic-20200219-Gestetner-DSm622-pxlmono-Gestetner.ppd.gz	a13b115e8059860aae61e83b97898344	48534
+foomatic-20200219-Gestetner-DSm651-pxlmono-Gestetner.ppd.gz	a13b115e8059860aae61e83b97898344	48571
+foomatic-20200219-Gestetner-DSm725-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33455
+foomatic-20200219-Gestetner-DSm735_735G-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33474
 foomatic-20200219-Gestetner-DSm_2625-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33547
 foomatic-20200219-Gestetner-DSm_2640-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33547
 foomatic-20200219-Gestetner-DSm_2660-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33547
-foomatic-20200219-Gestetner-DSm415-pxlmono-Gestetner.ppd.gz	3ef3a04b5b30aacc99ec78b0b4eb7187	48482
-foomatic-20200219-Gestetner-DSm615-pxlmono-Gestetner.ppd.gz	3ef3a04b5b30aacc99ec78b0b4eb7187	48489
-foomatic-20200219-Gestetner-DSm618d-pxlmono-Gestetner.ppd.gz	3ef3a04b5b30aacc99ec78b0b4eb7187	48509
-foomatic-20200219-Gestetner-DSm618-pxlmono-Gestetner.ppd.gz	3ef3a04b5b30aacc99ec78b0b4eb7187	48489
-foomatic-20200219-Gestetner-DSm622-pxlmono-Gestetner.ppd.gz	3ef3a04b5b30aacc99ec78b0b4eb7187	48529
-foomatic-20200219-Gestetner-DSm651-pxlmono-Gestetner.ppd.gz	3ef3a04b5b30aacc99ec78b0b4eb7187	48566
-foomatic-20200219-Gestetner-DSm725-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33455
-foomatic-20200219-Gestetner-DSm735_735G-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33474
-foomatic-20200219-Gestetner-F9199_9199nf-Postscript-Gestetner.ppd.gz	b7c4f771308e80e006ecbcdf667e54b4	288513
-foomatic-20200219-Gestetner-GS1227-Postscript-Gestetner.ppd.gz	cf060b111bae78ca3d7adadc0082365e	292761
 foomatic-20200219-Gestetner-GS3020-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33619
 foomatic-20200219-Gestetner-GS3030-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33619
 foomatic-20200219-Gestetner-GS3160-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33619
@@ -561,41 +361,41 @@
 foomatic-20200219-Gestetner-GWD5100-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33393
 foomatic-20200219-Gestetner-MP1100_DSm7110-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33499
 foomatic-20200219-Gestetner-MP1600_DSm716-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33417
-foomatic-20200219-Gestetner-MP_161_DSm416-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33410
-foomatic-20200219-Gestetner-MP_171-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33410
 foomatic-20200219-Gestetner-MP2000_DSm721d-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33437
-foomatic-20200219-Gestetner-MP_2001-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33461
 foomatic-20200219-Gestetner-MP2352_DSm923-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33475
 foomatic-20200219-Gestetner-MP2500_DSm625-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33456
+foomatic-20200219-Gestetner-MP3500_DSm735e-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33474
+foomatic-20200219-Gestetner-MP5500_DSm755-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33499
+foomatic-20200219-Gestetner-MPC1500_GS106-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33513
+foomatic-20200219-Gestetner-MPC2500_DSc525-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33547
+foomatic-20200219-Gestetner-MPC3500_DSc535-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33547
+foomatic-20200219-Gestetner-MP_161_DSm416-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33410
+foomatic-20200219-Gestetner-MP_171-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33410
+foomatic-20200219-Gestetner-MP_2001-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33461
 foomatic-20200219-Gestetner-MP_2501-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33461
 foomatic-20200219-Gestetner-MP_2510_DSm725e-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33455
 foomatic-20200219-Gestetner-MP_2550-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33474
 foomatic-20200219-Gestetner-MP_2851-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33474
 foomatic-20200219-Gestetner-MP_301-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33434
 foomatic-20200219-Gestetner-MP_305plus-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33434
-foomatic-20200219-Gestetner-MP3500_DSm735e-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33474
 foomatic-20200219-Gestetner-MP_4000-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33474
 foomatic-20200219-Gestetner-MP_4001-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33474
 foomatic-20200219-Gestetner-MP_4002-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33500
 foomatic-20200219-Gestetner-MP_401SPF-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33458
 foomatic-20200219-Gestetner-MP_402SPF-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33458
-foomatic-20200219-Gestetner-MP5500_DSm755-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33499
 foomatic-20200219-Gestetner-MP_6001-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33523
 foomatic-20200219-Gestetner-MP_6002-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33548
 foomatic-20200219-Gestetner-MP_6503-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33571
-foomatic-20200219-Gestetner-MPC1500_GS106-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33513
 foomatic-20200219-Gestetner-MP_C2050-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33571
 foomatic-20200219-Gestetner-MP_C2051-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33571
-foomatic-20200219-Gestetner-MPC2500_DSc525-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33547
 foomatic-20200219-Gestetner-MP_C2800-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33571
+foomatic-20200219-Gestetner-MP_C300-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33532
 foomatic-20200219-Gestetner-MP_C3001-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33571
 foomatic-20200219-Gestetner-MP_C3002-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33596
-foomatic-20200219-Gestetner-MP_C300-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33532
 foomatic-20200219-Gestetner-MP_C300SR-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33552
 foomatic-20200219-Gestetner-MP_C305-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33557
 foomatic-20200219-Gestetner-MP_C306Z-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33557
 foomatic-20200219-Gestetner-MP_C307-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33557
-foomatic-20200219-Gestetner-MPC3500_DSc535-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33547
 foomatic-20200219-Gestetner-MP_C4000-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33571
 foomatic-20200219-Gestetner-MP_C401-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33557
 foomatic-20200219-Gestetner-MP_C401SR-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33600
@@ -609,15 +409,8 @@
 foomatic-20200219-Gestetner-MP_CW2201-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33506
 foomatic-20200219-Gestetner-MP_W6700-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33439
 foomatic-20200219-Gestetner-MP_W7100-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33439
-foomatic-20200219-Gestetner-P7026-Postscript-Gestetner.ppd.gz	767c85144cb132e02cb0e7a658671aa9	289162
 foomatic-20200219-Gestetner-P7031n-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33430
-foomatic-20200219-Gestetner-P7032-Postscript-Gestetner.ppd.gz	694adabd7b28af3f89ccecd9837c736f	289310
-foomatic-20200219-Gestetner-P7126-Postscript-Gestetner.ppd.gz	16edd17fdd9190b9e0b7c82178d36574	289187
-foomatic-20200219-Gestetner-P7132n-Postscript-Gestetner.ppd.gz	5fe63dd4fbe3cf07edd96a1fd3e31668	290281
-foomatic-20200219-Gestetner-P7145-Postscript-Gestetner.ppd.gz	3fab159617df9ba383f7925fc5e86132	289288
 foomatic-20200219-Gestetner-P7245-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33474
-foomatic-20200219-Gestetner-P7325-Postscript-Gestetner.ppd.gz	05cd20daeba27660902cfc19d8a11a52	290106
-foomatic-20200219-Gestetner-P7431cn-Postscript-Gestetner.ppd.gz	b886bd86079c8e31796b4f6ba02a30b1	336039
 foomatic-20200219-Gestetner-P7527-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33423
 foomatic-20200219-Gestetner-P7535n-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33423
 foomatic-20200219-Gestetner-P7575-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33489
@@ -631,10 +424,11 @@
 foomatic-20200219-Gestetner-SP_C420DN-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33505
 foomatic-20200219-Gestetner-SP_C430DN-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33505
 foomatic-20200219-Gestetner-SP_W2470-PDF-Gestetner.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33393
+foomatic-20200219-HP-DesignJet_100plus-cdnj500.ppd.gz	ab7dc637ee8683233b016525388dbac3	100905
+foomatic-20200219-HP-OfficeJet_500-cdj550.ppd.gz	a73be5cb6b248f70b08ea493eda4a7f9	53817
 foomatic-20200219-IBM-3853_JetPrinter-jetp3852.ppd.gz	29170d588588d226f0b8ab326a29b085	29176
-foomatic-20200219-IBM-4303_Network_Color_Printer-Postscript.ppd.gz	74b3c35b2a6152d1da1235c9a6e8fce5	333357
 foomatic-20200219-Imagen-ImPress-imagen.ppd.gz	84f1a47e75004bf1bab197c8b3773971	17407
-foomatic-20200219-InfoPrint-Pro_1107EX-pxlmono-InfoPrint.ppd.gz	3ef3a04b5b30aacc99ec78b0b4eb7187	48576
+foomatic-20200219-InfoPrint-Pro_1107EX-pxlmono-InfoPrint.ppd.gz	a13b115e8059860aae61e83b97898344	48581
 foomatic-20200219-Infotec-MP_201-PDF-Infotec.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33410
 foomatic-20200219-Infotec-MP_501-PDF-Infotec.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33458
 foomatic-20200219-Infotec-Pro_8100S-PDF-Infotec.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33571
@@ -646,86 +440,10 @@
 foomatic-20200219-Infotec-Pro_C7100-PDF-Infotec.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33668
 foomatic-20200219-Infotec-Pro_C7100S-PDF-Infotec.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33668
 foomatic-20200219-Infotec-SP_5300-PDF-Infotec.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33458
-foomatic-20200219-Kodak-IS_70_CPII-Postscript.ppd.gz	2f5b31c230c1bfabdbc05c7b3ed61355	333266
-foomatic-20200219-KONICA_MINOLTA-bizhub_1050eP-Postscript-KONICA_MINOLTA.ppd.gz	e23904c0a495c3ea475db320811723fd	293359
-foomatic-20200219-KONICA_MINOLTA-bizhub_500-Postscript-KONICA_MINOLTA.ppd.gz	f47a61c0475dd5e7d7d54313ea0efadd	290727
-foomatic-20200219-KONICA_MINOLTA-bizhub_C250P-Postscript-KONICA_MINOLTA.ppd.gz	ee3535c74ae5acdc71674c4b81482382	337596
-foomatic-20200219-KONICA_MINOLTA-bizhub_C252P-Postscript-KONICA_MINOLTA.ppd.gz	1d588c5b431194e7705ad85a19d70986	337596
-foomatic-20200219-KONICA_MINOLTA-bizhub_C351-Postscript-KONICA_MINOLTA.ppd.gz	51f2a7fa9a0cec9592d7da762658b90c	337613
-foomatic-20200219-KONICA_MINOLTA-bizhub_C352P-Postscript-KONICA_MINOLTA.ppd.gz	f93c587e4053004f4d265853659cae89	337591
-foomatic-20200219-KONICA_MINOLTA-bizhub_C450P-Postscript-KONICA_MINOLTA.ppd.gz	159f2b36533e9dd82ec92ed85898fdc9	337613
-foomatic-20200219-KONICA_MINOLTA-bizhub_C451-Postscript-KONICA_MINOLTA.ppd.gz	d2a639384a90217e442ca7d9cdbff5d8	337605
-foomatic-20200219-KONICA_MINOLTA-bizhub_C550-Postscript-KONICA_MINOLTA.ppd.gz	4bac086fb01cf23b4e052dbd950e2267	337460
-foomatic-20200219-Kyocera-Ci-1100-Postscript-Kyocera.ppd.gz	039197282b8183cabe0eb11523b4f7e2	336766
-foomatic-20200219-Kyocera-CS-1650-Postscript-Kyocera.ppd.gz	55026fbd55897821eea11d399c1f1e67	290647
-foomatic-20200219-Kyocera-CS-2050-Postscript-Kyocera.ppd.gz	01b57d0a0a1c0870e7fa0dceec548258	290577
-foomatic-20200219-Kyocera-FS-1030D-Postscript-Kyocera.ppd.gz	1c0c421b2a42a22353b3c0d45524c391	290104
-foomatic-20200219-Kyocera-FS-1118MFP-Postscript-Kyocera.ppd.gz	34abc8193cddf6d7aed0c0bf68edfe18	289800
-foomatic-20200219-Kyocera-FS-1200-Postscript-Kyocera.ppd.gz	e25e2769e9fdc1405e6897f424e44a53	290078
-foomatic-20200219-Kyocera-FS-1700plus-Postscript-Kyocera.ppd.gz	804a482468b226cdd092cb77f657dbac	289866
-foomatic-20200219-Kyocera-FS-1700-Postscript-Kyocera.ppd.gz	382e1c0e31851903e0b0c5c36823f2a9	289868
-foomatic-20200219-Kyocera-FS-1714M-Postscript-Kyocera.ppd.gz	702be46b2bbb57656212d2b645676583	290104
-foomatic-20200219-Kyocera-FS-1800-Postscript-Kyocera.ppd.gz	293f537ceb4a30fd6006c7c11c6b7435	291173
-foomatic-20200219-Kyocera-FS-1900-Postscript-Kyocera.ppd.gz	e7e7d071a7609170ee744623d18f8530	290175
-foomatic-20200219-Kyocera-FS-1920-Postscript-Kyocera.ppd.gz	5722ea050f1722aabf58a314dabdca40	290188
-foomatic-20200219-Kyocera-FS-2000D-Postscript-Kyocera.ppd.gz	f57594e48a333884ad6cd02e87e56b1d	290321
-foomatic-20200219-Kyocera-FS-3700plus-Postscript-Kyocera.ppd.gz	630a047738b19e3e110a1f67a40b042a	289866
-foomatic-20200219-Kyocera-FS-3700-Postscript-Kyocera.ppd.gz	0145c990c54444cf5f3d2ceeff3bc646	289868
-foomatic-20200219-Kyocera-FS-3750-Postscript-Kyocera.ppd.gz	38b90d7242bc32e028b128ffa1d92778	290105
-foomatic-20200219-Kyocera-FS-3820N-Postscript-Kyocera.ppd.gz	9123f31976f95e629fd5a3ff5ac04ae3	290188
-foomatic-20200219-Kyocera-FS-3830N-Postscript-Kyocera.ppd.gz	3e3584d84af384b44a28a23aea75fe07	290188
-foomatic-20200219-Kyocera-FS-3900DN-Postscript-Kyocera.ppd.gz	9498a47a1c9b7cb0c9e0c8b7a3326aa7	290252
-foomatic-20200219-Kyocera-FS-4000DN-Postscript-Kyocera.ppd.gz	880d1f93abd216060c373312ed2ee895	290253
-foomatic-20200219-Kyocera-FS-5800C-Postscript-Kyocera.ppd.gz	fd469caf1e6b45843ad21a248329d646	336887
-foomatic-20200219-Kyocera-FS-5900C-Postscript-Kyocera.ppd.gz	3cc93954895fe2f49f4c2e64d2f6624e	336766
-foomatic-20200219-Kyocera-FS-600-Postscript-Kyocera.ppd.gz	6436384a426f784139f17496c48840ee	289540
-foomatic-20200219-Kyocera-FS-6020-Postscript-Kyocera.ppd.gz	1852caa74d5adad992d1e34a91358a67	291110
-foomatic-20200219-Kyocera-FS-6026-Postscript-Kyocera.ppd.gz	41881421844a0d78ae8e811646c13e1d	290112
-foomatic-20200219-Kyocera-FS-6300-Postscript-Kyocera.ppd.gz	b3c2090fc788ab42c7df60cd0203b9fd	289942
-foomatic-20200219-Kyocera-FS-6500plus-Postscript-Kyocera.ppd.gz	2a96515be5e849965cae9dc0c80233ef	289308
-foomatic-20200219-Kyocera-FS-6700-Postscript-Kyocera.ppd.gz	2f17732fab0354ea9d476d6fd8e09af8	290869
-foomatic-20200219-Kyocera-FS-6750-Postscript-Kyocera.ppd.gz	af098f76a3ff7c84170ec0a91ca4c7cd	291113
-foomatic-20200219-Kyocera-FS-680-Postscript-Kyocera.ppd.gz	9b12c9cd60126eb996071b2c83bdba55	289540
-foomatic-20200219-Kyocera-FS-6900-Postscript-Kyocera.ppd.gz	b865881ef6764e05d8287f75366f3498	290940
-foomatic-20200219-Kyocera-FS-6950DN-Postscript-Kyocera.ppd.gz	9ef8ea81ddaab8d8474d6f767ae44a8a	290299
-foomatic-20200219-Kyocera-FS-7000-Postscript-Kyocera.ppd.gz	b9bd1748f55664acd6b5d88fc68d4bcf	291446
-foomatic-20200219-Kyocera-FS-7028M-Postscript-Kyocera.ppd.gz	fe6f97e24b8a14dca33154e2933f0bdd	291516
-foomatic-20200219-Kyocera-FS-8000C-Postscript-Kyocera.ppd.gz	4e382c395e46288ef20e5e2d2a55e107	337159
-foomatic-20200219-Kyocera-FS-9000-Postscript-Kyocera.ppd.gz	bb1cd1e329d8d15a26e598cea96d23db	291517
-foomatic-20200219-Kyocera-FS-9100DN-Postscript-Kyocera.ppd.gz	5749a67d1a4372375246d60b73be20f8	291624
-foomatic-20200219-Kyocera-FS-920-Postscript-Kyocera.ppd.gz	e5f1be1b1fe2a86f836ec9547eed2ba5	289581
-foomatic-20200219-Kyocera-FS-C5015N-Postscript-Kyocera.ppd.gz	b59ba924c83d94f135730e94c7be92ec	335927
-foomatic-20200219-Kyocera-FS-C5016N-Postscript-Kyocera.ppd.gz	c6dbc7099296c21e4bb51108dfe8295f	335564
-foomatic-20200219-Kyocera-FS-C5020N-Postscript-Kyocera.ppd.gz	3db5220fb8f0b4b10a7c341c75d8871a	335966
-foomatic-20200219-Kyocera-FS-C5025N-Postscript-Kyocera.ppd.gz	08e0a53e94dbb9ace34e66e7eafe88d1	335995
-foomatic-20200219-Kyocera-FS-C5030N-Postscript-Kyocera.ppd.gz	bdba3e7028b8186b8c613751fe43e9a3	335966
-foomatic-20200219-Kyocera-FS-C8008N-Postscript-Kyocera.ppd.gz	78829de0c47fcae25bc7c274e5bd1fb0	336004
-foomatic-20200219-Kyocera-FS-C8026N-Postscript-Kyocera.ppd.gz	580a0fb03f39b583ac99019f1d77c1ac	336370
-foomatic-20200219-Kyocera-FS-C8100DNplus_KPDL-Postscript-Kyocera.ppd.gz	b0182ec3e5adb48f1c7b8a2cce7b7977	336712
-foomatic-20200219-Kyocera-FS-C8100DN-Postscript-Kyocera.ppd.gz	0993cd38ec6f042eb4b1c2e4a196b1b8	336701
-foomatic-20200219-Kyocera-KM-1510-Postscript-Kyocera.ppd.gz	9cba0fadc77beb570fe48630d2a4faad	289913
-foomatic-20200219-Kyocera-KM-1530-Postscript-Kyocera.ppd.gz	21617f6b38d95f84437bb4e8ef56c663	289979
-foomatic-20200219-Kyocera-KM-1810-Postscript-Kyocera.ppd.gz	7586e373466a01f4854be84e0ea955b3	289913
-foomatic-20200219-Kyocera-KM-1820-Postscript-Kyocera.ppd.gz	633a9fe770dbd9e41391171a5cfa6bc2	290051
-foomatic-20200219-Kyocera-KM-2030-Postscript-Kyocera.ppd.gz	266ea35b87d09ac8a4358946f2177dbd	290263
-foomatic-20200219-Kyocera-KM-3050-Postscript-Kyocera.ppd.gz	dd5b53e3850f8fd6af05eb46cb86e825	291310
-foomatic-20200219-Kyocera-KM-4230_5230-Postscript-Kyocera.ppd.gz	dca4b71577c57a98da434f7447cd5a03	289802
-foomatic-20200219-Kyocera-KM-4530-Postscript-Kyocera.ppd.gz	e3723ac5e8413b817241147305100e05	292443
-foomatic-20200219-Kyocera-KM-5530-Postscript-Kyocera.ppd.gz	71280416e15f18d54582a67344fa24b5	292509
-foomatic-20200219-Kyocera-KM-6030-Postscript-Kyocera.ppd.gz	69a1640c1cb216c70d1c635311acf2b9	291406
-foomatic-20200219-Kyocera-KM-6230-Postscript-Kyocera.ppd.gz	d1eefd239bf142f895595a7dbca39a48	289846
-foomatic-20200219-Kyocera-KM-6330-Postscript-Kyocera.ppd.gz	d208e1e071dae1dfcf47c6aa9c6755ae	292509
-foomatic-20200219-Kyocera-KM-C2520-Postscript-Kyocera.ppd.gz	f7e699ab8b8173f3e4c0e743916f32ae	336772
-foomatic-20200219-Kyocera-KM-C2630-Postscript-Kyocera.ppd.gz	155661ba56c688869cb4eeaf9d0312bd	336370
-foomatic-20200219-Kyocera-KM-C830-Postscript-Kyocera.ppd.gz	4e3bf862baeb7293643cf47a3b05c47c	337998
-foomatic-20200219-Kyocera-KM-C850-Postscript-Kyocera.ppd.gz	008f9aabf4c9336af7eb64891c76cd9c	336609
 foomatic-20200219-Lanier-MP_C501-PDF-Lanier.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33619
 foomatic-20200219-Lanier-P_501-PDF-Lanier.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33458
-foomatic-20200219-Lanier-SG3110DN-pxlcolor-Lanier.ppd.gz	c63c87522c6c43e103f8fa663192b214	122793
-foomatic-20200219-Lanier-SG3110SFNw-pxlcolor-Lanier.ppd.gz	c63c87522c6c43e103f8fa663192b214	122793
-foomatic-20200219-Lanier-SP_3400N-Postscript-Lanier.ppd.gz	17b91eebdf950cc36b0136086a3935d2	289341
-foomatic-20200219-Lanier-SP_3410DN-Postscript-Lanier.ppd.gz	57766158c7448a8336951e0421397a11	289442
-foomatic-20200219-Lanier-SP_3600DN-Postscript-Lanier.ppd.gz	1be42364d4060d3ae2581bb01d5e0d5a	290503
+foomatic-20200219-Lanier-SG3110DN-pxlcolor-Lanier.ppd.gz	5d86428064f4e66fab8a04d43f32f4c5	122798
+foomatic-20200219-Lanier-SG3110SFNw-pxlcolor-Lanier.ppd.gz	5d86428064f4e66fab8a04d43f32f4c5	122798
 foomatic-20200219-Lanier-SP_4310N-PDF-Lanier.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33431
 foomatic-20200219-Lanier-SP_4510DN-PDF-Lanier.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33458
 foomatic-20200219-Lanier-SP_5200DN-PDF-Lanier.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33431
@@ -735,722 +453,137 @@
 foomatic-20200219-Lanier-SP_6430DN-PDF-Lanier.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33481
 foomatic-20200219-Lanier-SP_8300DN-PDF-Lanier.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33500
 foomatic-20200219-Lanier-SP_8400DN-PDF-Lanier.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33547
-foomatic-20200219-Lanier-SP_C221N-Postscript-Lanier.ppd.gz	00fe886e066753cfe1bd10810b92d9aa	335761
-foomatic-20200219-Lanier-SP_C222DN-Postscript-Lanier.ppd.gz	df720185c873a86a6090c21671c020e4	335862
-foomatic-20200219-Lanier-SP_C222SF-Postscript-Lanier.ppd.gz	8946cf3c77fcdcbaef993db35a777d6c	336114
-foomatic-20200219-Lanier-SP_C232SF-Postscript-Lanier.ppd.gz	38972998420fe9be4de4b17396263bbf	336113
-foomatic-20200219-Lanier-SP_C311N-Postscript-Lanier.ppd.gz	415dda488bba0d5482ce3d98e297bb23	335761
-foomatic-20200219-Lanier-SP_C312DN-Postscript-Lanier.ppd.gz	0cc2b48fadf0504a36e9c5f3337ab674	335863
 foomatic-20200219-Lanier-SP_C340DN-PDF-Lanier.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33531
 foomatic-20200219-Lanier-SP_C342DN-PDF-Lanier.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33530
 foomatic-20200219-Lanier-SP_C352DN-PDF-Lanier.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33530
-foomatic-20200219-Lanier-SP_C360DNw-Postscript-Lanier.ppd.gz	3f1aa687f9135062a2b6550150182eb7	338043
-foomatic-20200219-Lanier-SP_C360SFNw-Postscript-Lanier.ppd.gz	a133196e875238f59ec1683fd7bdae33	338747
 foomatic-20200219-Lanier-SP_C435DN-PDF-Lanier.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33530
 foomatic-20200219-Lanier-SP_C730DN-PDF-Lanier.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33530
 foomatic-20200219-Lanier-SP_C830DN-PDF-Lanier.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33596
 foomatic-20200219-Lanier-SP_C840DN-PDF-Lanier.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33619
-foomatic-20200219-Lexmark-1020_Business-pcl3.ppd.gz	7afaa0cc2e06d03f32df02ca5bc583b9	69191
-foomatic-20200219-Lexmark-4039_10plus-Postscript.ppd.gz	365c44e5ca6628ad727378878fb36fb4	288312
-foomatic-20200219-Lexmark-C2132-Postscript-Lexmark.ppd.gz	a0c2b868718079737e0a018aef9bda04	335785
-foomatic-20200219-Lexmark-C500n-Postscript.ppd.gz	5ee65426a1dd4085bef21e0cadca78cb	333573
-foomatic-20200219-Lexmark-C510b-Postscript.ppd.gz	7eef9b983c07db44ef7be3bbace54bb7	288312
-foomatic-20200219-Lexmark-C510-Postscript-Lexmark.ppd.gz	17b00e906731a65598ff6b139f86fb94	335275
-foomatic-20200219-Lexmark-C520-Postscript-Lexmark.ppd.gz	67d1db35bb700c18f4d49cc9dc4728d1	335426
-foomatic-20200219-Lexmark-C522-Postscript-Lexmark.ppd.gz	514664ca7b825dfd5caa0744b86f17de	335568
-foomatic-20200219-Lexmark-C524-Postscript-Lexmark.ppd.gz	65160dba3e3889aa97fd48049e477e48	335754
-foomatic-20200219-Lexmark-C540-Postscript-Lexmark.ppd.gz	0c616b70d669bd933bdc16474d059390	335558
-foomatic-20200219-Lexmark-C543-Postscript-Lexmark.ppd.gz	42d2fc5e81c70a505372691d75107963	335555
-foomatic-20200219-Lexmark-C544-Postscript-Lexmark.ppd.gz	d15a62b7f4d076cea3dac547e676400a	335702
-foomatic-20200219-Lexmark-C546-Postscript-Lexmark.ppd.gz	e27807a15ae741ca74d3e82052b1c078	335702
-foomatic-20200219-Lexmark-C720n-pxlcolor.ppd.gz	03e1e86b1a8a9930fdb05a4e28cd56bd	144627
-foomatic-20200219-Lexmark-C734-Postscript-Lexmark.ppd.gz	ba34cf4c8d1f37c5fc67c66f240c1a8f	335668
-foomatic-20200219-Lexmark-C736-Postscript-Lexmark.ppd.gz	7662c057fa9ef1f6214e77f0185a61a6	335668
-foomatic-20200219-Lexmark-C750-Postscript-Lexmark.ppd.gz	8fb50307eb09308aaf6607eae7f9d2b7	335021
-foomatic-20200219-Lexmark-C752-Postscript-Lexmark.ppd.gz	630cc6c8fb3d32d0191570daa9920a42	335798
-foomatic-20200219-Lexmark-C780-Postscript-Lexmark.ppd.gz	c4e7a40066727668036c26ba933fb865	336288
-foomatic-20200219-Lexmark-C782-Postscript-Lexmark.ppd.gz	1c3e3102afcbd4253f37ceb73ad9c64d	336665
-foomatic-20200219-Lexmark-C910-Postscript-Lexmark.ppd.gz	f6041638e02e61d6f37ad72e96a6905f	334946
-foomatic-20200219-Lexmark-C912-Postscript-Lexmark.ppd.gz	eee138bcfc52b95bd552737521ce386c	334946
-foomatic-20200219-Lexmark-C930-Postscript-Lexmark.ppd.gz	25067f618f404bd25a128b69aa234556	336690
-foomatic-20200219-Lexmark-C935-Postscript-Lexmark.ppd.gz	365f0746927ed2ee0cb4086fafb896d5	336690
-foomatic-20200219-Lexmark-E238-pxlmono.ppd.gz	aa58c9f5adee4733e7705b8983202a36	79848
-foomatic-20200219-Lexmark-E260dn-Postscript-Lexmark.ppd.gz	2dd56975d8b84f485166cb5fa872c42f	334754
-foomatic-20200219-Lexmark-E350d-Postscript-Lexmark.ppd.gz	f7196c1019eab44aed65f13fd8356380	289188
-foomatic-20200219-Lexmark-E360dn-Postscript-Lexmark.ppd.gz	c34d5276e16388d4c319eb93f5dd8b11	334826
-foomatic-20200219-Lexmark-EG460dn-Postscript-Lexmark.ppd.gz	00a59793b81b705188055d54d5f449de	334826
-foomatic-20200219-Lexmark-Optra_Color_1200-Postscript.ppd.gz	d72f2227a4c3fe6bf0c312015a49e467	333573
-foomatic-20200219-Lexmark-T650-Postscript-Lexmark.ppd.gz	7b46a8fd1374d8ea7c21c8d0af167add	335294
-foomatic-20200219-Lexmark-T656-Postscript-Lexmark.ppd.gz	66eed4785b1bbea4ea4df56349b84eae	335218
-foomatic-20200219-Lexmark-W850-Postscript-Lexmark.ppd.gz	a8fd797609b6fb4303c46c6e8545e706	335205
-foomatic-20200219-Lexmark-X203n-Postscript-Lexmark.ppd.gz	9520745e31bfe160fc9972c364ede062	334041
-foomatic-20200219-Lexmark-X264dn-Postscript-Lexmark.ppd.gz	9bf6965fbc16894465c03d7a52c0216f	334600
-foomatic-20200219-Lexmark-X363dn-Postscript-Lexmark.ppd.gz	aa5280a71d1fe7d002bbcd6d7dfa77e8	334600
-foomatic-20200219-Lexmark-X463de-Postscript-Lexmark.ppd.gz	1b91cd8acd221d798e8efec2819a418f	334598
-foomatic-20200219-Lexmark-X543-Postscript-Lexmark.ppd.gz	08d4f6686f5611eda43cb464535fff13	335555
-foomatic-20200219-Lexmark-X544-Postscript-Lexmark.ppd.gz	8f68c1a67443bd632e7b6c806956217f	335702
-foomatic-20200219-Lexmark-X546-Postscript-Lexmark.ppd.gz	1992fa05579833059adb5247b96a8b7d	335702
-foomatic-20200219-Lexmark-X734de-Postscript-Lexmark.ppd.gz	948fdb7dd1a98a36ad0dfa6989bc1201	335598
-foomatic-20200219-Lexmark-X860de-Postscript-Lexmark.ppd.gz	b7038260896e06e62008af06db19d626	335283
-foomatic-20200219-Lexmark-X940e-Postscript-Lexmark.ppd.gz	5eb2c052845fff339b0c54bce9303e9e	336690
-foomatic-20200219-Minolta-magicolor_3100-Postscript.ppd.gz	47c740ae62767ef358eb431ed29d158e	333357
-foomatic-20200219-Minolta-PagePro_8L-ljet2p.ppd.gz	fde3c7e3f90f7b94a44b8c1977d174f6	106692
-foomatic-20200219-NEC-P2X-necp2xX.upp.ppd.gz	d63f9d97c5b3018ff0759ae1a11dd0e3	136155
+foomatic-20200219-Lexmark-C720n-pxlcolor.ppd.gz	841f926bcf703837272e12aeb2e4bed5	144632
+foomatic-20200219-Lexmark-E238-pxlmono.ppd.gz	aa85461a590a8edde3bb2e6300015d51	79853
 foomatic-20200219-NEC-PICTY180-picty180.ppd.gz	701d6194b98c15676dcf15885176e72c	43669
-foomatic-20200219-NEC-Pinwriter_P20-necp6.ppd.gz	72fa6ea2a854a975d8a4b84ae13c82cc	58605
 foomatic-20200219-NEC-PinWriter_P6-necp6.ppd.gz	72fa6ea2a854a975d8a4b84ae13c82cc	58605
-foomatic-20200219-NEC-SuperScript_650C-pcl3.ppd.gz	7afaa0cc2e06d03f32df02ca5bc583b9	69191
-foomatic-20200219-NRG-SP_3500N-Postscript-NRG.ppd.gz	b6435a70515a28ac1ef9bdb730d626d0	289457
-foomatic-20200219-NRG-SP_3510DN-Postscript-NRG.ppd.gz	c8ed32e07d01a07db9b85046372a3d82	289558
-foomatic-20200219-NRG-SP_C242DN-Postscript-NRG.ppd.gz	6654ec22ff15af092b2263fe6a9486df	336654
-foomatic-20200219-Oce-3145PS-Postscript2-Oce.ppd.gz	ef9548876987e4a51e30fc1cde7de437	288398
-foomatic-20200219-Oce-8445PS-Postscript2-Oce.ppd.gz	b44cb34d01429d95b4cd9d2ec3535fd2	288370
-foomatic-20200219-Oce-9050-oce9050.ppd.gz	037521d171d0e95d522d20038d0aa816	18469
-foomatic-20200219-Oce-9230-Postscript2-Oce.ppd.gz	4d80e659f74ff94c80aac700113e2722	288369
-foomatic-20200219-Oce-9260-Postscript2-Oce.ppd.gz	907357c7613a2b39d685d8047698d730	288369
-foomatic-20200219-Oce-PPC3073PS-Postscript-Oce.ppd.gz	872d6ce5b9844140451ffb1c12697aa9	288449
-foomatic-20200219-Oce-PPC3074PS-Postscript-Oce.ppd.gz	872d6ce5b9844140451ffb1c12697aa9	288449
-foomatic-20200219-Oce-PPC5115PS-Postscript-Oce.ppd.gz	bde30dc2bd5a64c44a8548ffb51e0c85	288567
-foomatic-20200219-Oce-VarioPrint_2045PS-Postscript-Oce.ppd.gz	681e56481bbf4f49d0de418772b9d380	288435
-foomatic-20200219-Oce-VarioPrint_2090PS-Postscript-Oce.ppd.gz	30dda5c29442c276d54c69ad03906315	334019
-foomatic-20200219-Oce-VarioPrint_2100PS-Postscript-Oce.ppd.gz	b6f7a93865fc33517663c6df7d93f8a7	334521
-foomatic-20200219-Oce-VarioPrint_2105PS-Postscript-Oce.ppd.gz	43a37a2c739b078b16a2617889669e48	288458
-foomatic-20200219-Oki-14i-Postscript-Oki.ppd.gz	fec4c440856768116a5026c3b6abfd32	289370
-foomatic-20200219-Oki-B4350-Postscript-Oki.ppd.gz	88a5ad14f1977f18c52f8cfad186e498	290266
-foomatic-20200219-Oki-C5400-Postscript-Oki.ppd.gz	b8de5c04fc938266ca557bcdc3ab68a5	338083
-foomatic-20200219-Oki-C5700-Postscript-Oki.ppd.gz	948645a238925bfb326908e9eaf0662d	339305
-foomatic-20200219-Oki-C5900-Postscript-Oki.ppd.gz	8640378d45abe217b50797bb33bab700	339304
-foomatic-20200219-Oki-C6100-Postscript-Oki.ppd.gz	0bc2da589078b75c4d31cbfca0f0ac35	339308
-foomatic-20200219-Oki-C8800-Postscript-Oki.ppd.gz	d5df71195e68bb5e26e1d5c00e873a06	339230
-foomatic-20200219-Oki-C9600-Postscript-Oki.ppd.gz	56c477eb2ff505323df78c16f679d84a	339239
-foomatic-20200219-Oki-Microline_182-oki182.ppd.gz	72136b4a6538ef16a333de42f894c16b	23158
+foomatic-20200219-NEC-Pinwriter_P20-necp6.ppd.gz	72fa6ea2a854a975d8a4b84ae13c82cc	58605
 foomatic-20200219-Oki-ML_320-okiibm.ppd.gz	08413db1fd51369c2994e547eba091b0	10399
-foomatic-20200219-Oki-Okijet_2500-cdj550.ppd.gz	a73be5cb6b248f70b08ea493eda4a7f9	53817
-foomatic-20200219-Oki-OL410e-ljet4.ppd.gz	1c9f47ba243d4494624b982749ad8ba2	17158
-foomatic-20200219-Olivetti-JP350S-laserjet.ppd.gz	8cc98d56bc44cac5c411d43d71e75259	90456
-foomatic-20200219-Olivetti-JP450-djet500.ppd.gz	4948bea0de077922c81c04c66ae45276	17145
+foomatic-20200219-Oki-Microline_182-oki182.ppd.gz	72136b4a6538ef16a333de42f894c16b	23158
 foomatic-20200219-Panasonic-KX-P1150-eps9high.ppd.gz	fc27aa053ba65c3d90c3159cd0d5073a	10266
 foomatic-20200219-Ricoh-4081-r4081.ppd.gz	be4af06638bf3a8a1b5a06ea28a10a7b	216957
-foomatic-20200219-Ricoh-ColorLaser_AP828-Postscript-Ricoh.ppd.gz	4043bc77def30fe7c1dbc07eb89de868	335096
-foomatic-20200219-Ricoh-DDP_70-Postscript-Ricoh.ppd.gz	39640ef27431f625184e652d3c817156	289999
-foomatic-20200219-Ricoh-DDP_92-Postscript-Ricoh.ppd.gz	f193c4095187147ca73e6ce32278fa3d	289998
-foomatic-20200219-Ricoh-EMP_156-Postscript-Ricoh.ppd.gz	51741bb4372728e6caf5cbc2c5b125e1	289224
-foomatic-20200219-Ricoh-GX_3050N-pxlcolor-Ricoh.ppd.gz	c63c87522c6c43e103f8fa663192b214	122768
-foomatic-20200219-Ricoh-GX_3050SFN-pxlcolor-Ricoh.ppd.gz	c63c87522c6c43e103f8fa663192b214	122768
-foomatic-20200219-Ricoh-GX_E3350N-pxlcolor-Ricoh.ppd.gz	c63c87522c6c43e103f8fa663192b214	122768
-foomatic-20200219-Ricoh-GX_E5550N-pxlcolor-Ricoh.ppd.gz	c63c87522c6c43e103f8fa663192b214	122793
-foomatic-20200219-Ricoh-SP_2300L-pcl5-Ricoh.ppd.gz	0de3c5ca2bb01a6ecbd63d689a51d87b	524079
-foomatic-20200219-Ricoh-SP_330DN-Postscript-Ricoh.ppd.gz	284c89e39139298a8ff4c4373ca3a46a	289810
-foomatic-20200219-Ricoh-SP_330SFN-Postscript-Ricoh.ppd.gz	a18df1e75681fea4c719c67af612e1ed	289810
-foomatic-20200219-Ricoh-SP_3700-Postscript-Ricoh.ppd.gz	8b4bb011a6e5d8947f6e10762f60890d	289810
-foomatic-20200219-Ricoh-SP_3700SF-Postscript-Ricoh.ppd.gz	64b9f82e4919b68e0dca53299131645b	289810
-foomatic-20200219-Ricoh-SP_400DN-Postscript-Ricoh.ppd.gz	b7905112c5cbd7c43962679c9e261bbe	290762
+foomatic-20200219-Ricoh-GX_3050N-pxlcolor-Ricoh.ppd.gz	5d86428064f4e66fab8a04d43f32f4c5	122773
+foomatic-20200219-Ricoh-GX_3050SFN-pxlcolor-Ricoh.ppd.gz	5d86428064f4e66fab8a04d43f32f4c5	122773
+foomatic-20200219-Ricoh-GX_E3350N-pxlcolor-Ricoh.ppd.gz	5d86428064f4e66fab8a04d43f32f4c5	122773
+foomatic-20200219-Ricoh-GX_E5550N-pxlcolor-Ricoh.ppd.gz	5d86428064f4e66fab8a04d43f32f4c5	122798
 foomatic-20200219-Ricoh-SP_450DN-PDF-Ricoh.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33458
-foomatic-20200219-Ricoh-SP_C250DN-Postscript-Ricoh.ppd.gz	b6758c6dfcb23a3b21f254d30e3a9375	336654
-foomatic-20200219-Ricoh-SP_C250SF-Postscript-Ricoh.ppd.gz	20d2484c38402587113fe48fed8f50f3	336654
-foomatic-20200219-Ricoh-SP_C261DNw-Postscript-Ricoh.ppd.gz	654781edd34fe0c04ce580cf522c4613	336654
-foomatic-20200219-Samsung-C140x-Postscript-Samsung.ppd.gz	c08afbfadc48acbbeddc665ceba2f1c2	333891
-foomatic-20200219-Samsung-C2620-Postscript-Samsung.ppd.gz	579d2c8105f73b845d201dda98784953	334215
-foomatic-20200219-Samsung-C2670-Postscript-Samsung.ppd.gz	f788cbd49c5e1f9240c9d2bf6543646a	334390
-foomatic-20200219-Samsung-C460-Postscript-Samsung.ppd.gz	7ceec7e8ff5b7a36823a02f17b8ce1e6	333830
-foomatic-20200219-Samsung-C4820-Postscript-Samsung.ppd.gz	42543334b9742e2ea77e31f44223d61d	334316
-foomatic-20200219-Samsung-C48x-Postscript-Samsung.ppd.gz	c86931d81535107e706d575687366e80	333861
-foomatic-20200219-Samsung-CLP-350-Postscript-Samsung.ppd.gz	3884ac5502d78bf7f005721c63f9dbe1	333347
-foomatic-20200219-Samsung-CLP-410-Postscript-Samsung.ppd.gz	7f735030e32c34dc2bbc5449e90d4b4e	333860
-foomatic-20200219-Samsung-CLP-610-pxlcolor.ppd.gz	03e1e86b1a8a9930fdb05a4e28cd56bd	144627
-foomatic-20200219-Samsung-CLP-660-Postscript-Samsung.ppd.gz	10d5d77e76e827bbc61d0fbc18a063cc	333798
-foomatic-20200219-Samsung-CLP-670-Postscript-Samsung.ppd.gz	cb9f4152c2572340c73bb96454374a56	333980
-foomatic-20200219-Samsung-CLP-680-Postscript-Samsung.ppd.gz	3d75bc01d4956d9f5fd9e875ccb4b406	334109
-foomatic-20200219-Samsung-CLP-770-Postscript-Samsung.ppd.gz	05a6f3f9630c1ce2412ecfa7186ead9f	334011
-foomatic-20200219-Samsung-CLP-775-Postscript-Samsung.ppd.gz	0a95915c5a50484c2f6e66f24ab8659d	334026
-foomatic-20200219-Samsung-CLX-3300-Postscript-Samsung.ppd.gz	b15bc3c584831d4f1f2bce6236ed4b02	333830
-foomatic-20200219-Samsung-CLX-6200-Postscript-Samsung.ppd.gz	644a85157a8c9580d014b349ae8e2723	333711
-foomatic-20200219-Samsung-CLX-6220-Postscript-Samsung.ppd.gz	477477a19c698c79309ecdfff8cfd557	333931
-foomatic-20200219-Samsung-CLX-6250-Postscript-Samsung.ppd.gz	efb96913f39b777dbccdd04a1e0aa286	333945
-foomatic-20200219-Samsung-CLX-8380-Postscript-Samsung.ppd.gz	c9bdaea18fb1189b7cf77356de06c5b6	334036
-foomatic-20200219-Samsung-CLX-8385-Postscript-Samsung.ppd.gz	6ee027613c93ffd7f6b9f3e1c34ca1e4	334069
-foomatic-20200219-Samsung-CLX-8640_8650-Postscript-Samsung.ppd.gz	994358c729f1296d0d26f926fe676ae9	334385
-foomatic-20200219-Samsung-CLX-9250_9350-Postscript-Samsung.ppd.gz	e2d2f3af710c4467bee9ec606698a66c	333897
-foomatic-20200219-Samsung-CLX-9252_9352-Postscript-Samsung.ppd.gz	5006744e2efb933f2c335f0e342fbf4a	334489
-foomatic-20200219-Samsung-CLX-92x1_93x1-Postscript-Samsung.ppd.gz	0e378927d854ce5fe75825f19c97b83b	334321
-foomatic-20200219-Samsung-K3250-Postscript-Samsung.ppd.gz	2f07fc353403f6da3d4d4709c6477e4d	288918
-foomatic-20200219-Samsung-K401-Postscript-Samsung.ppd.gz	2341fddce87bf3ebf8ce5f0642e121b2	288849
-foomatic-20200219-Samsung-K703-Postscript-Samsung.ppd.gz	47c2559051c4aa6fe1ad6e7f64d78d86	288949
-foomatic-20200219-Samsung-K7600-Postscript-Samsung.ppd.gz	d835893e4bde3ee05a0169db5f9fdeaf	288949
-foomatic-20200219-Samsung-M337x_387x_407x-Postscript-Samsung.ppd.gz	42e18b15eaba2ffd2bc1bfb78aa359f6	288519
-foomatic-20200219-Samsung-M403x-Postscript-Samsung.ppd.gz	b2b10a619828584a59b725a6bf5b7a67	288496
-foomatic-20200219-Samsung-M408x-Postscript-Samsung.ppd.gz	526fd2c2abd19962a5a69686f9b5d194	288556
-foomatic-20200219-Samsung-M4370_5370-Postscript-Samsung.ppd.gz	ae66e99a78d5fdff56ec6e8fcc962267	288854
-foomatic-20200219-Samsung-M453x-Postscript-Samsung.ppd.gz	16dafb85ea7b8bece32c6b04bd84932d	288747
-foomatic-20200219-Samsung-M458x-Postscript-Samsung.ppd.gz	af4d62fba98662d72c78eac9212927ed	288672
-foomatic-20200219-Samsung-M5270-Postscript-Samsung.ppd.gz	aa28489bdd701e36ad56f806a3b7ab64	288854
-foomatic-20200219-Samsung-ML-1650-ljet4.ppd.gz	0ad2b865d397648e39fb1c0ba77cb455	50992
-foomatic-20200219-Samsung-ML-2150-Postscript-Samsung.ppd.gz	7872e5a3919af724a5f8d8ac4809948f	288122
-foomatic-20200219-Samsung-ML-2550-Postscript-Samsung.ppd.gz	7a4309a5e65d22294f92a2b0a586ca76	288122
-foomatic-20200219-Samsung-ML-2570-Postscript-Samsung.ppd.gz	8f6cfaa0596bfbfb9bacf4d0dc0d28b8	287914
-foomatic-20200219-Samsung-ML-2850-Postscript-Samsung.ppd.gz	974a7af5c7556fa0a6a911fc386cb9c7	288152
-foomatic-20200219-Samsung-ML-2855-Postscript-Samsung.ppd.gz	6972ea71e32214525109dd88209daa90	288339
-foomatic-20200219-Samsung-ML-3470-Postscript-Samsung.ppd.gz	6d3a985bb635e662eb3b7d1e12f2b371	288209
-foomatic-20200219-Samsung-ML-371x-Postscript-Samsung.ppd.gz	1f2028c9c26d2c4c5bc039543998d433	288445
-foomatic-20200219-Samsung-ML-4050-Postscript-Samsung.ppd.gz	2ddbb7137f81b438975d85c903a087bc	288133
-foomatic-20200219-Samsung-ML-4055-Postscript-Samsung.ppd.gz	4401fb516acead00ad3394c360008a74	288133
-foomatic-20200219-Samsung-ML-451x_501x-Postscript-Samsung.ppd.gz	baf2a44e52d81a7abd1024fbb5e7d387	288446
-foomatic-20200219-Samsung-ML-4550-Postscript-Samsung.ppd.gz	1abd2921ee548fa36f2784376342feac	288014
-foomatic-20200219-Samsung-ML-4555-Postscript-Samsung.ppd.gz	1abd2921ee548fa36f2784376342feac	288014
-foomatic-20200219-Samsung-ML-551x_651x-Postscript-Samsung.ppd.gz	14e5ed9c3be3980684dc5880a399c3cd	288455
-foomatic-20200219-Samsung-ML-8850_8950-Postscript-Samsung.ppd.gz	d9f6f7385ffd519dab22885b87d02c56	288420
-foomatic-20200219-Samsung-ML-8x00-Postscript-Samsung.ppd.gz	558b86f659a3bb45c9fcc7546b7cab74	287863
-foomatic-20200219-Samsung-SCX-483x_5x3x-Postscript-Samsung.ppd.gz	5607b44fbe6fe9eb75d9969db97fb165	288482
-foomatic-20200219-Samsung-SCX-4x28-Postscript-Samsung.ppd.gz	7bfba145c3d0827841036667e57c7484	288173
-foomatic-20200219-Samsung-SCX-5635-Postscript-Samsung.ppd.gz	1be3ecbf2704d8cbc602cb83b14eb6c3	288384
-foomatic-20200219-Samsung-SCX-5835_5935-Postscript-Samsung.ppd.gz	15fbb14ec46291b8806a82f433ded3f2	288460
-foomatic-20200219-Samsung-SCX-6545-Postscript-Samsung.ppd.gz	33839eef498c1845f2b2286cfb1e02e6	288631
-foomatic-20200219-Samsung-SCX-6545X-Postscript-Samsung.ppd.gz	cfffe97d525249a55d9175fe0405f510	288719
-foomatic-20200219-Samsung-SCX-681x-Postscript-Samsung.ppd.gz	7bd9fa412114a1e6e8d580fd258246bd	288502
-foomatic-20200219-Samsung-SCX-6x20-Postscript-Samsung.ppd.gz	c6825c24b4f9b44ace559bcd3b098fbd	288098
-foomatic-20200219-Samsung-SCX-6x22-Postscript-Samsung.ppd.gz	f2ef0037be8b8a154834840fcb280157	288014
-foomatic-20200219-Samsung-SCX-6x45-Postscript-Samsung.ppd.gz	8f29623cbd2132488d4f04fc17ce37d9	288457
-foomatic-20200219-Samsung-SCX-6x55-Postscript-Samsung.ppd.gz	d33617be0a6a7d6b9b547a1fdaeddc87	288631
-foomatic-20200219-Samsung-SCX-8030_8040-Postscript-Samsung.ppd.gz	b3755871aeb3977ecaf2e6d9a0f10ea4	288459
-foomatic-20200219-Samsung-SCX-8123_8128-Postscript-Samsung.ppd.gz	4e6bd0c6712b532d15b3f29e55960969	288663
-foomatic-20200219-Samsung-SCX-8230_8240-Postscript-Samsung.ppd.gz	475b3e1e279f554b9b4a71bd8ee67589	288832
-foomatic-20200219-Sharp-AJ-1800-pcl3.ppd.gz	e22685fde29e79bcb33cec61dd9c3a4c	220876
-foomatic-20200219-Sharp-AR-155FG_PS-Postscript-Sharp.ppd.gz	6d39f3a2cad3964c5793543c2f8ee215	289912
-foomatic-20200219-Sharp-AR-160M_PS-Postscript-Sharp.ppd.gz	72bc381a18f11fee8938f51574ffbd1a	289813
-foomatic-20200219-Sharp-AR-163FG_PS-Postscript-Sharp.ppd.gz	e7ab549745fd7335a589f38314f137c7	290000
-foomatic-20200219-Sharp-AR-163G_PS-Postscript-Sharp.ppd.gz	9d4d26882d5188e9c4dd428b5dc33485	290003
-foomatic-20200219-Sharp-AR-168D-Postscript-Sharp.ppd.gz	7429eb7dfce180e10978829e51778d4f	289916
-foomatic-20200219-Sharp-AR-168S-Postscript-Sharp.ppd.gz	dd888ac2b5e38108a6bdf0143bc84db7	289817
-foomatic-20200219-Sharp-AR-200M_PS-Postscript-Sharp.ppd.gz	70de57ca6dd201a0fd19b52a795e70e2	289813
-foomatic-20200219-Sharp-AR-205FG_PS-Postscript-Sharp.ppd.gz	17a41e67300b92d668e2ab0707135467	290099
-foomatic-20200219-Sharp-AR-205G_PS-Postscript-Sharp.ppd.gz	795afc187d82f370659e4c82c76f0631	290102
-foomatic-20200219-Sharp-AR-266FP_PS-Postscript-Sharp.ppd.gz	6aac239c2cb56726a7e2758554d5f32f	291265
-foomatic-20200219-Sharp-AR-311FP_PS-Postscript-Sharp.ppd.gz	0310ff62fbaa6a8e25042a979380b6ff	291764
-foomatic-20200219-Sharp-AR-5220-Postscript-Sharp.ppd.gz	30a1e534adfc120bb58acfc2be415bf4	289817
-foomatic-20200219-Sharp-AR-555M_PS-Postscript-Sharp.ppd.gz	813f5f6dc5a7aa2690956e98219f2e1d	291389
-foomatic-20200219-Sharp-AR-705M_PS-Postscript-Sharp.ppd.gz	fc799e73235084585cb7c91924ef984e	291122
-foomatic-20200219-Sharp-AR-B07-Postscript-Sharp.ppd.gz	8960b7486d0e3cb190a495fd9aed9035	289701
-foomatic-20200219-Sharp-AR-BC260-Postscript-Sharp.ppd.gz	709881068bf90e54c5ddf09470862603	339964
-foomatic-20200219-Sharp-AR-C170FP_PS-Postscript-Sharp.ppd.gz	9d37077cd49b389c25ff5c9f24e987cf	339960
-foomatic-20200219-Sharp-AR-C260P-Postscript-Sharp.ppd.gz	709881068bf90e54c5ddf09470862603	339964
-foomatic-20200219-Sharp-AR-M161_PS-Postscript-Sharp.ppd.gz	05b73c399fe2458389d143fa8f655744	290007
-foomatic-20200219-Sharp-AR-M165_PS-Postscript-Sharp.ppd.gz	775d0fde0ae1574d83658bc71426ec25	290106
-foomatic-20200219-Sharp-AR-M205_PS-Postscript-Sharp.ppd.gz	f16cd1488dba0dd1647f9cac523d4144	289916
-foomatic-20200219-Sharp-AR-M206_PS-Postscript-Sharp.ppd.gz	9fdf5b9cbad2586a060cce6c06dea92c	290106
-foomatic-20200219-Sharp-AR-M236_PS-Postscript-Sharp.ppd.gz	01071ee0affc84d5d4f1d3fe93aa6ef1	291269
-foomatic-20200219-Sharp-AR-M351N-Postscript-Sharp.ppd.gz	ba2e51fbdf7662f78524612b36f670d7	291768
-foomatic-20200219-Sharp-AR-M550N-Postscript-Sharp.ppd.gz	3a7cfb23efc3f4236547efa634db0701	291393
-foomatic-20200219-Sharp-AR-M700N-Postscript-Sharp.ppd.gz	b31f67eebff460af7f9a4d47763d375b	291126
-foomatic-20200219-Sharp-AR-N182FG-Postscript-Sharp.ppd.gz	5a6af35f25f3d7e6bfef3ae7e39a382d	290199
-foomatic-20200219-Sharp-AR-N182G-Postscript-Sharp.ppd.gz	e0bd6a9385f72e1a235b22b312763c03	290202
-foomatic-20200219-Sharp-MX-2314NR-Postscript-Sharp.ppd.gz	306d61b02e3dd26cb0b24aa1528b159e	340957
-foomatic-20200219-Sharp-MX-2614NR-Postscript-Sharp.ppd.gz	7945e25d9cb256aaf40124cf983fd1fc	340883
-foomatic-20200219-Sharp-MX-M1100-Postscript-Sharp.ppd.gz	162ec2b6a5944eac510874c4a223b26a	292683
-foomatic-20200219-Sharp-MX-M182D-Postscript-Sharp.ppd.gz	23f7fed7df03a9523b6296517ce54d1a	290206
-foomatic-20200219-Sharp-MX-M182-Postscript-Sharp.ppd.gz	c17e4dab5f8a11f1cf866cd012cb256c	290107
-foomatic-20200219-Sharp-MX-M202D-Postscript-Sharp.ppd.gz	92d06db3c6e501e1d3f307d09bcccec9	290206
-foomatic-20200219-Sharp-MX-M260FP-Postscript-Sharp.ppd.gz	c66052661137400524045265ee6f7273	291199
-foomatic-20200219-Sharp-MX-M260-Postscript-Sharp.ppd.gz	9583bf20d04d988212452e302759403f	291203
-foomatic-20200219-Sharp-MX-M264NR-Postscript-Sharp.ppd.gz	03d8c3eb81a9b58e830c3008f2cbf427	291983
-foomatic-20200219-Sharp-MX-M860-Postscript-Sharp.ppd.gz	615f84d770b22bda562387359e17351a	292610
+foomatic-20200219-Samsung-CLP-610-pxlcolor.ppd.gz	841f926bcf703837272e12aeb2e4bed5	144632
+foomatic-20200219-Sharp-AJ-1800-pcl3.ppd.gz	5db1f7a16ede4b062037e7fdf64195ad	66986
 foomatic-20200219-Star-NX-1001-eps9mid.ppd.gz	05d044bc2fc5896ad3f936ebacbc8cfb	10272
 foomatic-20200219-Star-StarJet_48-sj48.ppd.gz	96868a362e6c8ee34840a7707ed350b3	51362
 foomatic-20200219-Tektronix-4693d-t4693dX.ppd.gz	72424c8fe1a73b2325dfc1d5e9a33b55	1871123
 foomatic-20200219-Tektronix-4695-tek4696.ppd.gz	bd6570ca9f9b9c4a862d7c159a1455a7	34901
 foomatic-20200219-Tektronix-4696-tek4696.ppd.gz	bd6570ca9f9b9c4a862d7c159a1455a7	34901
-foomatic-20200219-Tektronix-Phaser_350-Postscript.ppd.gz	8cdc60ab70bbc969d7dacb9d8000951a	333500
-foomatic-20200219-Toshiba-e-Studio_205-Postscript-Toshiba.ppd.gz	ea81e238fba6a53de825aeed8b035b67	136988
-foomatic-20200219-Toshiba-e-Studio_282-Postscript-Toshiba.ppd.gz	0966a9dc0dd4e7c441cc749f5a12136a	138390
-foomatic-20200219-Toshiba-e-Studio_3510c-Postscript-Toshiba.ppd.gz	72bfbab9bb03a59ca1655f20c9b77228	183884
-foomatic-20200219-Toshiba-e-Studio_451c-Postscript-Toshiba.ppd.gz	ecab419cd832c7fe64f91aea63e2e5fc	186566
-foomatic-20200219-Toshiba-e-Studio_452-Postscript-Toshiba.ppd.gz	25c4e6fd98e6cffa131cecb2a960934a	138318
-foomatic-20200219-Toshiba-e-Studio_850-Postscript-Toshiba.ppd.gz	208668cc0152dde8a323849dba489451	138052
-foomatic-20200219-Toshiba-GL-1010-Postscript-Toshiba.ppd.gz	d66816498195c7a3dca886eb87c6c91b	135725
-foomatic-20200219-Toshiba-GL-1020-Postscript-Toshiba.ppd.gz	b99319a77d294bafb19d82c23449d5be	135765
 foomatic-20200219-Xerox-DocuPrint_XJ8C-lxm5700m.ppd.gz	e326ce8ef6f2f49e29a0705490765480	201023
-foomatic-20200219-Xerox-Phaser_3160N-pxlcolor.ppd.gz	03e1e86b1a8a9930fdb05a4e28cd56bd	144627
-fuji_xerox-20200402-fuji-xerox-20200402-fx-apeosportv-c3375.ppd.gz	0b056b64bd3b3aebc2311b41703947fc	334646
-hp-20171121-hplip-3.17.10-hp-color_laserjet-ps.ppd.gz	a8220b1ab205533b64e4e0adad148874	334148
-hp-20171121-hplip-3.17.10-hp-deskjet_f4210_series.ppd.gz	6e09668662f1cea8a00d3d5129165841	151178
-hp-20171121-hplip-3.17.10-hp-laserjet_4-ps.ppd.gz	d446b58f90f5beb9cd98370ef7e702cb	288493
-hp-20171121-hplip-3.17.10-hp-laserjet_4si-ps.ppd.gz	eb7e1fcf3795a77b062e713612db13fa	288546
-hp-20171121-hplip-3.17.10-hp-laserjet_4v-ps.ppd.gz	bcd4024e5ba00c542a942ecc4ca1481c	289122
-hp-20171121-hplip-3.17.10-hp-laserjet_6p-ps.ppd.gz	e9b0f81ffdac879465d8d60bfadd471c	289018
+foomatic-20200219-Xerox-Phaser_3160N-pxlcolor.ppd.gz	841f926bcf703837272e12aeb2e4bed5	144632
 hp-20171121-hplip-3.17.10-hp-laserjet_p1505n-pcl3.ppd.gz	966c7772fa252eeca38be2e7cd408dcc	87823
-hp-20171121-hplip-3.17.10-hp-laserjet_p2055_series-ps.ppd.gz	7c17ecc5b51b96374fbd838e7d3e0442	288692
-hp-20171121-hplip-3.17.10-hp-laserjet_p4010_series-ps.ppd.gz	65d420e977675d081f50803e9f7f4b09	290174
-hp-20190111-hplip-3.18.12-hp-designjet_z6200_42in_photo-ps.ppd.gz	3b076e1221ab853052231d4089c7fb1f	335885
-hp-20190111-hplip-3.18.12-hp-designjet_z6200_60in_photo-ps.ppd.gz	00351aa3812850192b008ff5864fa525	335885
-hp-20190111-hplip-3.18.12-hp-designjet_z6810ps_42in-ps.ppd.gz	6dc78439509aca27d457c35abece199e	336189
-hp-20190111-hplip-3.18.12-hp-PCL3-Class1B.ppd.gz	886ed995791deb24ab9038e36045e685	150636
-hp-20190918-hplip-3.19.6-hp-Ampere.ppd.gz	e3a6fd2fc7b965c00ccfb23c205b8e34	150636
-hp-20190918-hplip-3.19.6-hp-CLE17.ppd.gz	05d5ab47da934e542706f6dacee7af99	150635
-hp-20190918-hplip-3.19.6-hp-cm8060_mfp_with_edgeline-ps.ppd.gz	e901ea54efdfeab167df79c38c8fa181	336958
-hp-20190918-hplip-3.19.6-hp-color_designjet_xl_3600-ps.ppd.gz	16eb89e78bbbb1e8b78d7ec440927368	335918
-hp-20190918-hplip-3.19.6-hp-color_laserjet_pro_mfp_m277-ps.ppd.gz	1bd3ad28e3795e2c29a3569344716dfd	335339
-hp-20190918-hplip-3.19.6-hp-Copperhead12.ppd.gz	e496710dcbe1cf24cca1094b402a415f	151177
-hp-20190918-hplip-3.19.6-hp-CopperheadIPH15.ppd.gz	5e25eafc77201c9388ceaa3e07dcc46b	150636
-hp-20190918-hplip-3.19.6-hp-CopperheadIPH17.ppd.gz	5e25eafc77201c9388ceaa3e07dcc46b	150636
-hp-20190918-hplip-3.19.6-hp-CopperheadIPH.ppd.gz	61e495d76c8afba2a3fb1e9cd01c5f34	150636
-hp-20190918-hplip-3.19.6-hp-Copperhead.ppd.gz	e496710dcbe1cf24cca1094b402a415f	151177
-hp-20190918-hplip-3.19.6-hp-CopperheadXLP.ppd.gz	86f6d46d81f7e9e44ac1917ede222afc	150636
-hp-20190918-hplip-3.19.6-hp-Corbett.ppd.gz	e3a6fd2fc7b965c00ccfb23c205b8e34	150637
-hp-20190918-hplip-3.19.6-hp-designjet_t2600dr-ps.ppd.gz	53b4e5d3365ffb703e22cb2e45258660	335830
-hp-20190918-hplip-3.19.6-hp-DJ55xx.ppd.gz	438eecdf2c627ea605fdef7f13646dcb	150447
-hp-20190918-hplip-3.19.6-hp-DJ9xxVIP.ppd.gz	9c9625f9819f529a863b2442a11099a4	150446
-hp-20190918-hplip-3.19.6-hp-Gemstone.ppd.gz	fb3f816cecbd8986df9e4ec70ac1ed9c	150636
-hp-20190918-hplip-3.19.6-hp-Kapan.ppd.gz	c5e0370899766647fd06124f7d3ee206	150647
-hp-20190918-hplip-3.19.6-hp-laserjet_200_color_m251-ps.ppd.gz	5643c6e3fa124b3c19f0e83cb50f7f34	335353
-hp-20190918-hplip-3.19.6-hp-laserjet_m1522_mfp-ps.ppd.gz	dbf68976a60bddbce064cae038ea10d3	288900
-hp-20190918-hplip-3.19.6-hp-laserjet_m2727_mfp_series-ps.ppd.gz	c5d8e76af9e3bae82c3fa894fe6bbe3e	289239
+hp-20190111-hplip-3.18.12-hp-PCL3-Class1B.ppd.gz	886ed995791deb24ab9038e36045e685	150634
+hp-20190918-hplip-3.19.6-hp-Ampere.ppd.gz	e3a6fd2fc7b965c00ccfb23c205b8e34	150635
+hp-20190918-hplip-3.19.6-hp-CLE.ppd.gz	448bba9706e3e844b2fe5db656f5dd7d	150635
+hp-20190918-hplip-3.19.6-hp-CLE17.ppd.gz	05d5ab47da934e542706f6dacee7af99	150632
+hp-20190918-hplip-3.19.6-hp-Copperhead.ppd.gz	e496710dcbe1cf24cca1094b402a415f	151175
+hp-20190918-hplip-3.19.6-hp-Copperhead12.ppd.gz	e496710dcbe1cf24cca1094b402a415f	151175
+hp-20190918-hplip-3.19.6-hp-CopperheadIPH.ppd.gz	61e495d76c8afba2a3fb1e9cd01c5f34	150634
+hp-20190918-hplip-3.19.6-hp-CopperheadIPH15.ppd.gz	5e25eafc77201c9388ceaa3e07dcc46b	150634
+hp-20190918-hplip-3.19.6-hp-CopperheadIPH17.ppd.gz	5e25eafc77201c9388ceaa3e07dcc46b	150634
+hp-20190918-hplip-3.19.6-hp-CopperheadXLP.ppd.gz	86f6d46d81f7e9e44ac1917ede222afc	150634
+hp-20190918-hplip-3.19.6-hp-Corbett.ppd.gz	e3a6fd2fc7b965c00ccfb23c205b8e34	150635
+hp-20190918-hplip-3.19.6-hp-DJ9xxVIP.ppd.gz	9c9625f9819f529a863b2442a11099a4	150444
+hp-20190918-hplip-3.19.6-hp-Gemstone.ppd.gz	fb3f816cecbd8986df9e4ec70ac1ed9c	150634
+hp-20190918-hplip-3.19.6-hp-Kapan.ppd.gz	c5e0370899766647fd06124f7d3ee206	150645
 hp-20190918-hplip-3.19.6-hp-LJ-Class1.ppd.gz	49973e5f420715719f17455433b989a1	69913
 hp-20190918-hplip-3.19.6-hp-LJ-Class2.ppd.gz	280cbe27f9ddf9d14a291fcecc845ef3	975890
-hp-20190918-hplip-3.19.6-hp-LJ-Class3.ppd.gz	a328af2fb56dcf386cb79a64427beac8	554090
+hp-20190918-hplip-3.19.6-hp-LJ-Class3.ppd.gz	bd7d5acbdab739b2b88763586ac55709	537872
 hp-20190918-hplip-3.19.6-hp-LJ-Class6.ppd.gz	966c7772fa252eeca38be2e7cd408dcc	87823
-hp-20190918-hplip-3.19.6-hp-Mimas15.ppd.gz	fb3f816cecbd8986df9e4ec70ac1ed9c	150636
-hp-20190918-hplip-3.19.6-hp-Mimas17.ppd.gz	fb3f816cecbd8986df9e4ec70ac1ed9c	150636
-hp-20190918-hplip-3.19.6-hp-Mimas.ppd.gz	fae5d108cfc59b0ef6f684478ce1a6b7	151177
-hp-20190918-hplip-3.19.6-hp-MimasTDR.ppd.gz	a744dc673ec1d67a1a422f7ba3cd18a8	109442
-hp-20190918-hplip-3.19.6-hp-OJ7000.ppd.gz	ac7f4dcd6ef29bb9c56264dbf2588519	151178
-hp-20190918-hplip-3.19.6-hp-OJProKx50.ppd.gz	6e09668662f1cea8a00d3d5129165841	151178
-hp-20190918-hplip-3.19.6-hp-postscript-inkjet.ppd.gz	31a6944249e7e51af93fcc1a727c13f0	335274
-hp-20190918-hplip-3.19.6-hp-postscript-laserjet.ppd.gz	d332703dd1cdf33334ce43a833fece09	335538
-hp-20190918-hplip-3.19.6-hp-postscript-laserjet-pro.ppd.gz	fb925c249afec6d15454e81623e95097	335562
-hp-20190918-hplip-3.19.6-hp-PSP100.ppd.gz	447242fa3dd0300a02dc4fdd92a8f14d	151181
-hp-20190918-hplip-3.19.6-hp-PSP470.ppd.gz	729810610340260abea207329e5c5bdb	263703
-hp-20190918-hplip-3.19.6-hp-Pyramid15.ppd.gz	fb3f816cecbd8986df9e4ec70ac1ed9c	150636
-hp-20190918-hplip-3.19.6-hp-PyramidPlus.ppd.gz	a744dc673ec1d67a1a422f7ba3cd18a8	109442
-hp-20190918-hplip-3.19.6-hp-Pyramid.ppd.gz	0736fbff8a8e4bb34e54b6f7e599f4e4	108772
-hp-20190918-hplip-3.19.6-hp-PyramidRefresh15.ppd.gz	fb3f816cecbd8986df9e4ec70ac1ed9c	150636
-hp-20190918-hplip-3.19.6-hp-PyramidRefresh17.ppd.gz	05d5ab47da934e542706f6dacee7af99	150635
-hp-20190918-hplip-3.19.6-hp-Python10.ppd.gz	29fc9cd34ae64294d92de8a57c1089bb	151178
-hp-20190918-hplip-3.19.6-hp-Python11.ppd.gz	ac7f4dcd6ef29bb9c56264dbf2588519	151178
-hp-20190918-hplip-3.19.6-hp-Python.ppd.gz	ac7f4dcd6ef29bb9c56264dbf2588519	151178
-hp-20190918-hplip-3.19.6-hp-Saipan15B.ppd.gz	843f704eb090fefdcba8f26019ae0eca	150637
-hp-20190918-hplip-3.19.6-hp-Saipan.ppd.gz	843f704eb090fefdcba8f26019ae0eca	150637
-hp-20190918-hplip-3.19.6-hp-SPDOfficejetProAsize.ppd.gz	fb3f816cecbd8986df9e4ec70ac1ed9c	150636
-hp-20190918-hplip-3.19.6-hp-SPDOfficejetProBsize.ppd.gz	fb3f816cecbd8986df9e4ec70ac1ed9c	150636
-hp-20190918-hplip-3.19.6-hp-Stabler.ppd.gz	fae5d108cfc59b0ef6f684478ce1a6b7	151177
-hp-20190918-hplip-3.19.6-hp-StingrayOJ.ppd.gz	ba8545b223b87286cebd49d5b4489e3b	150447
-hp-20190918-hplip-3.19.6-hp-ViperMinusVIP.ppd.gz	9535159e3a6d2a3f49cf07f7ee01ed43	153893
-hp-20190918-hplip-3.19.6-hp-ViperPlusVIP.ppd.gz	fae5d108cfc59b0ef6f684478ce1a6b7	151177
-hplip-20200303-hplip-3.19.12-hp-color_designjet_xl_3600-ps.ppd.gz	ddc6b08926ab6f89d2266aaca21147c3	335783
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_2550_series-ps.ppd.gz	feba9ea91f6c6c06e8f5665c85659df3	335237
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_2605-ps.ppd.gz	847af0ad26a7474e1382fde30c1c2089	335531
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_2700-ps.ppd.gz	f0db433112a70a90c1fe8ee0e0cffab4	335295
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_2800-ps.ppd.gz	16612d5465623bb61dab3d72c8271476	335173
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_cm1015-ps.ppd.gz	aecc407659a6b8010c120a51b15e6a79	335248
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_e85055-ps.ppd.gz	a51a0645f2bcd3858099ac262d53f5ab	336335
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_flowmfp_m776-ps.ppd.gz	f861d77212fc68afd7cdd7dc47f415b9	336708
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_m856-ps.ppd.gz	fd54b7a710fdd264e2c72f6e4459581d	336639
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_mfp_m776-ps.ppd.gz	0a6d0697f7a94e1e1f47d1099c0cec09	336936
-hplip-20200303-hplip-3.19.12-hp-designjet_4000ps.ppd.gz	931e730b07d5fc8d0bf675d677b0ef46	335674
-hplip-20200303-hplip-3.19.12-hp-designjet_4500mfp.ppd.gz	f80522a0f9d864265d32e2f0d39d7ee0	335681
-hplip-20200303-hplip-3.19.12-hp-designjet_d5800-ps.ppd.gz	25d9a7eb20a142d30c2907af6037bc09	335558
-hplip-20200303-hplip-3.19.12-hp-designjet_t1100ps_24in-ps.ppd.gz	5e44cb16329ec25b9c93221a8764c12b	335680
-hplip-20200303-hplip-3.19.12-hp-designjet_t1100ps_44in-ps.ppd.gz	5f8eacb00782226a90a499656413a2d2	335680
-hplip-20200303-hplip-3.19.12-hp-designjet_t1200_postscript-ps.ppd.gz	8f83d2610d56ec141ae6488dcb6be6f9	335691
-hplip-20200303-hplip-3.19.12-hp-designjet_t1300_postscript-ps.ppd.gz	387fc3641d7a20fad56f1f67f059b11f	335814
-hplip-20200303-hplip-3.19.12-hp-designjet_t1500-postscript.ppd.gz	ee57dfc8dd8f8d432bc2ad822d22d62f	336087
-hplip-20200303-hplip-3.19.12-hp-designjet_t1530-postscript.ppd.gz	47306471a0e3592528c5a67efa5e27f1	336125
-hplip-20200303-hplip-3.19.12-hp-designjet_t1600dr-ps.ppd.gz	84c4f417a1bbf889457c0d114c513404	335695
-hplip-20200303-hplip-3.19.12-hp-designjet_t1600_printer-ps.ppd.gz	ae77b10a26a8e162e12ebe2b353141be	335695
-hplip-20200303-hplip-3.19.12-hp-designjet_t1700dr_postscript-ps.ppd.gz	efc477f3957cd84359f6c451230a0f8b	335786
-hplip-20200303-hplip-3.19.12-hp-designjet_t1700_postscript-ps.ppd.gz	a3c1c4a720ccf1005d6d8f9b6b196a66	335775
-hplip-20200303-hplip-3.19.12-hp-designjet_t1708dr_postscript-ps.ppd.gz	3e55931170b4dc955206f9a33b4e0cd4	336073
-hplip-20200303-hplip-3.19.12-hp-designjet_t1708_postscript-ps.ppd.gz	adcda046d090fe854040e62f687e89ff	336064
-hplip-20200303-hplip-3.19.12-hp-designjet_t2300_postscript-ps.ppd.gz	8db40d7addf018886136d70e4ed97875	335814
-hplip-20200303-hplip-3.19.12-hp-designjet_t2500-postscript.ppd.gz	853b42168433a014b62c90a2e1e160d2	336023
-hplip-20200303-hplip-3.19.12-hp-designjet_t2600dr-ps.ppd.gz	2221a219b5f7ada22aeb4b3895ac6219	335695
-hplip-20200303-hplip-3.19.12-hp-designjet_t3500-ps.ppd.gz	57d8cc681ca9f003e7e490877c220e64	335680
-hplip-20200303-hplip-3.19.12-hp-designjet_t7100ps_monochrome-ps.ppd.gz	1e1394baf5af039c402c2d439c776253	289954
-hplip-20200303-hplip-3.19.12-hp-designjet_t7100ps-ps.ppd.gz	e777f9b44f280b2f565466b6e7531778	336095
-hplip-20200303-hplip-3.19.12-hp-designjet_t7200-ps.ppd.gz	523b008d634d32e9a14d281bf2048974	335830
-hplip-20200303-hplip-3.19.12-hp-designjet_t770_postscript-ps.ppd.gz	7d1aae052c4b337991410669a5b9be61	335536
-hplip-20200303-hplip-3.19.12-hp-designjet_t770ps_24in-ps.ppd.gz	90a57dad57f25bdf55445ac2ab8b87f2	335536
-hplip-20200303-hplip-3.19.12-hp-designjet_t790ps_24in-ps.ppd.gz	6ab2fe22fd3e385d91110574705d5cc9	335659
-hplip-20200303-hplip-3.19.12-hp-designjet_t790ps_44in-ps.ppd.gz	f5440bcf4a3cbfea1b5981e79c144ecc	335659
-hplip-20200303-hplip-3.19.12-hp-designjet_t920-postscript.ppd.gz	8af5fcc67719e474329d290e934f9588	335928
-hplip-20200303-hplip-3.19.12-hp-designjet_t930-postscript.ppd.gz	9db6d0d9aef652eb42abbe2a06578eea	335928
-hplip-20200303-hplip-3.19.12-hp-designjet_z5200_postscript-ps.ppd.gz	85102a9646c7c9d3103963463ee6f5fb	337033
-hplip-20200303-hplip-3.19.12-hp-designjet_z6100ps_42in_photo-ps.ppd.gz	b6849be5030a29e7093c93a8d2a1d5ce	335897
-hplip-20200303-hplip-3.19.12-hp-designjet_z6100ps_60in_photo-ps.ppd.gz	b43b0ade9737eff3113f3c438b39e7e3	335897
-hplip-20200303-hplip-3.19.12-hp-designjet_z6600-postscript.ppd.gz	cb6e651dae206c8910c48b2840c834ab	335792
-hplip-20200303-hplip-3.19.12-hp-designjet_z6610ps_60in-ps.ppd.gz	61563ce329840f6d102116b0902117d0	336041
-hplip-20200303-hplip-3.19.12-hp-designjet_z6800_photo-postscript.ppd.gz	4a09de3000a48bebf51c3c275e254c69	335792
-hplip-20200303-hplip-3.19.12-hp-designjet_z6810ps_60in-ps.ppd.gz	4c648c03fe7cb4aa0ae7d2abcc381e32	336189
-hplip-20200303-hplip-3.19.12-hp-deskjet_3420.ppd.gz	eca0a38f04b91db0d9a58a86fdf16de6	5392105
-hplip-20200303-hplip-3.19.12-hp-deskjet_3700_series.ppd.gz	448bba9706e3e844b2fe5db656f5dd7d	150638
-hplip-20200303-hplip-3.19.12-hp-deskjet_950c.ppd.gz	ce752c8ba4ec4e4a17db936eec7980f8	234383
-hplip-20200303-hplip-3.19.12-hp-deskjet_d1600_series.ppd.gz	a1d541296bcfd8651d5f7f766b228230	7687460
-hplip-20200303-hplip-3.19.12-hp-deskjet_d2600_series.ppd.gz	d8a5d1828e944a3fa016c5f1c102a319	8028340
-hplip-20200303-hplip-3.19.12-hp-deskjet_d4100_series.ppd.gz	89b03f358f082b1f0a56b4facb729c55	8131916
-hplip-20200303-hplip-3.19.12-hp-deskjet_f300_series.ppd.gz	eca0a38f04b91db0d9a58a86fdf16de6	5392105
-hplip-20200303-hplip-3.19.12-hp-DJGenericVIP.ppd.gz	a36f55f7b28e60e34610ecb6adeff845	151180
-hplip-20200303-hplip-3.19.12-hp-laserjet_100_color_mfp_m175-ps.ppd.gz	e4c05fe7f493b65cdfcef4e5c02510c0	335270
-hplip-20200303-hplip-3.19.12-hp-officejet_4300_series.ppd.gz	eca0a38f04b91db0d9a58a86fdf16de6	5392105
-hplip-20200303-hplip-3.19.12-hp-pagewide_p55250-ps.ppd.gz	9b70f3bbbae7710a4a517f67b1ea4a59	335017
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_3900ps_mfp-ps.ppd.gz	3d629b8c235136fd63cfe20661c1ff74	335264
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_4000ps-ps.ppd.gz	c6bf34b275057b99542dd39ce87905a8	335834
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_4100ps-ps.ppd.gz	abd3901a614e6e57e26ab9a536bcdc2e	335406
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5000ps_blueprinter-ps.ppd.gz	3c3e1515a830c41892cc052321a95a07	335702
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5000ps-ps.ppd.gz	22d3043c6bb97954cc532837c33ed7b1	335834
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5100ps_mfp_blueprinter-ps.ppd.gz	f891b4c7c7ccf6e462e0138f5e931be6	335702
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5100ps_mfp-ps.ppd.gz	4e907a9284cd0be686add9e23e27bc7a	335834
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5100ps-ps.ppd.gz	2301eb56ecab78e3181f99a065d5b5ee	335834
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_8000ps_blueprinter-ps.ppd.gz	f0bfab5d9c04e6d7082be7a5005df88d	335702
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_8000ps-ps.ppd.gz	d111fe70255623befa2581add9ad31f5	335834
-hplip-20200303-hplip-3.19.12-hp-photosmart_a530_series.ppd.gz	a59494820dbf825195716601c00dc2ba	378412
-konica_minolta-20200331-konica-minolta-20200331-konica-minolta-226i.ppd.gz	25bba93d6b1f9daaeaaba2fc0698c1ad	287979
-konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c226.ppd.gz	965fa75fe09ff60388019366b7bb38db	333310
-kyocera-20180809-Kyocera_TASKalfa_3051ci.ppd.gz	73b357a77b8f2c69850b9dd8a332e315	337558
-kyocera-20190328-Kyocera_CS_2551ci.ppd.gz	308bb52da0f0b103bba920c87b10a454	334593
-kyocera-20190328-Kyocera_CS_2552ci.ppd.gz	308bb52da0f0b103bba920c87b10a454	334593
-kyocera-20190328-Kyocera_CS_3010i.ppd.gz	f6c9994e2fd052210b389aec788a4659	289210
-kyocera-20190328-Kyocera_CS_3011i.ppd.gz	d9702f93d8c42c52d55d301050d5a6fe	289210
-kyocera-20190328-Kyocera_CS_306ci.ppd.gz	2435043280b0f4ad0f840f4c771fa4ce	334593
-kyocera-20190328-Kyocera_CS_307ci.ppd.gz	3e1f06313ec740ecc988a6c78cce96af	334593
-kyocera-20190328-Kyocera_CS_3212i.ppd.gz	27c1c6773a2dda41e24f2f6d7e3cd8f3	289210
-kyocera-20190328-Kyocera_CS_4002i.ppd.gz	02b5066a9920ad7fac80fdd4ab2adfa3	289210
-kyocera-20190328-Kyocera_CS_7002i.ppd.gz	02b5066a9920ad7fac80fdd4ab2adfa3	289210
-kyocera-20190328-Kyocera_CS_7052ci.ppd.gz	c6f36179f2edb20c6096a1e57666357e	334593
-kyocera-20190328-Kyocera_CS_9002i.ppd.gz	d236bf130b0bfc7298b233d7abdc3687	289210
-kyocera-20190328-Kyocera_ECOSYS_M2030dn.ppd.gz	d06e7821723409fe1110248190a50016	289210
-kyocera-20190328-Kyocera_ECOSYS_M2035dn.ppd.gz	9a42362458ca3caa36e7833ddbf37552	289210
-kyocera-20190328-Kyocera_ECOSYS_M2040dn.ppd.gz	791470071aa0d3572b601a7d1125d24c	289210
-kyocera-20190328-Kyocera_ECOSYS_M2235dn.ppd.gz	a20853748026037dec87aa3b5784fdad	289210
-kyocera-20190328-Kyocera_ECOSYS_M3040dn.ppd.gz	d35bf8f0859f21f9ec62eab3e0bd4f35	289210
-kyocera-20190328-Kyocera_ECOSYS_M3145dn.ppd.gz	d35bf8f0859f21f9ec62eab3e0bd4f35	289210
-kyocera-20190328-Kyocera_ECOSYS_M4028idn.ppd.gz	f145ba6c203cdb7687b1827ab1877ca6	289210
-kyocera-20190328-Kyocera_ECOSYS_M4125idn.ppd.gz	4017dd12516b77b1da36f5d89ff5e316	289210
-kyocera-20190328-Kyocera_ECOSYS_M5021cdn.ppd.gz	a78828b3ec3859781d561c4605ddd2e3	334593
-kyocera-20190328-Kyocera_ECOSYS_M5521cdn.ppd.gz	6b26c507b0a3d3705aabbda4e34833ec	334593
-kyocera-20190328-Kyocera_ECOSYS_M6026cdn.ppd.gz	89ad10891707d2c5fd49d21d32855455	334593
-kyocera-20190328-Kyocera_ECOSYS_M6230cidn.ppd.gz	e8ddaf1e99a5700c82386d34c60a4dba	334593
-kyocera-20190328-Kyocera_ECOSYS_M8024cidn.ppd.gz	c8cf78d7926c437c5d03a6226ec8b5e1	334593
-kyocera-20190328-Kyocera_ECOSYS_M8124cidn.ppd.gz	c8cf78d7926c437c5d03a6226ec8b5e1	334593
-kyocera-20190328-Kyocera_ECOSYS_P2035d.ppd.gz	f8f59921974eaff8f75ccc5664a2d053	289210
-kyocera-20190328-Kyocera_ECOSYS_P3045dn.ppd.gz	b8587dfb6adfa57cb144f17d0bdd61a4	289210
-kyocera-20190328-Kyocera_ECOSYS_P4035dn.ppd.gz	487abeecba6d64787f82d1843dd54943	289210
-kyocera-20190328-Kyocera_ECOSYS_P6026cdn.ppd.gz	ef8cd85fe8884b459619b6c532377d63	334593
-kyocera-20190328-Kyocera_ECOSYS_P8060cdn.ppd.gz	e0aa4fec5d08f7832868803a49b5c5ab	334593
-kyocera-20190328-Kyocera_FS-5040DN.ppd.gz	05cca8637c402ac89fe0efd72d0bfd17	289210
-kyocera-20190328-Kyocera_TASKalfa_4020i.ppd.gz	cfeb79b8fbac3546520ec16b2c5e49e9	289210
-kyocera-20190328-Kyocera_TASKalfa_406ci.ppd.gz	0bd114528f0414ad525ce7174beb5959	334593
-kyocera-20190328-Kyocera_TASKalfa_4500i.ppd.gz	789a96368b9dbe8664676be47485b38d	289210
-kyocera-20200211-Kyocera_TASKalfa_7003i.ppd.gz	2d3f95781b1067a487cda8880032b478	289210
-kyocera-20200416-Kyocera_CS_205c.ppd.gz	06dbfeb1c2d78b05c546e50da3b7a2d7	334593
-kyocera-20200416-Kyocera_CS_250ci.ppd.gz	ebd24155a45e72a3c16635141ef2de51	334593
-kyocera-20200416-Kyocera_CS_2550ci.ppd.gz	c4509dba570541881303ab776b4ba9ce	334593
-kyocera-20200416-Kyocera_CS_2553ci.ppd.gz	987695f68bd3801640f82137ad52d80e	334593
-kyocera-20200416-Kyocera_CS_2554ci.ppd.gz	b7895bea59e2a147e7cf557fd79f31a8	334784
-kyocera-20200416-Kyocera_CS_255.ppd.gz	1398a60793714c85ed926089cb94fb34	289210
-kyocera-20200416-Kyocera_CS_300i.ppd.gz	d9a4a6701f5f26d2d04a6da1b88a4c38	289210
-kyocera-20200416-Kyocera_CS_3050ci.ppd.gz	46a18a0a4c03ed4a378110771fbc6feb	334593
-kyocera-20200416-Kyocera_CS_308ci.ppd.gz	481e4bf103cd66c6945609b086f2bd7b	334593
-kyocera-20200416-Kyocera_CS_3500i.ppd.gz	6dc7d36b52fe41ba00ad4f167aa4a578	289210
-kyocera-20200416-Kyocera_CS_4003i.ppd.gz	2a68ccd847cfda3fbe0c1ea380481443	289210
-kyocera-20200416-Kyocera_CS_6500i.ppd.gz	176e1dc1520ca04c969cf6b1a9075753	289210
-kyocera-20200416-Kyocera_CS_6550ci.ppd.gz	1d9ec74bddb43e07b940184cc6af649e	334593
-kyocera-20200416-Kyocera_CS_7003i.ppd.gz	f4885aededc77d94410bc8c17fc52331	289210
-kyocera-20200416-Kyocera_CS_7353ci.ppd.gz	df1991d62c1a75eef4b5ab38fd5d31e4	334593
-kyocera-20200416-Kyocera_CS_9003i.ppd.gz	feedb3ca4871e0890d4437c8fa85dda5	289210
-kyocera-20200416-Kyocera_ECOSYS_M3860idnf.ppd.gz	c072d75158b8d10c1a4614d775e00128	289210
-kyocera-20200416-Kyocera_ECOSYS_M3860idn.ppd.gz	2cd8b6f5b2bb97badf958efebf68c804	289210
-kyocera-20200416-Kyocera_ECOSYS_P3260dn.ppd.gz	c072d75158b8d10c1a4614d775e00128	289210
-kyocera-20200416-Kyocera_ECOSYS_P4135dn.ppd.gz	3947feae8b1c4d2623a9862e38e2e889	289210
-kyocera-20200416-Kyocera_ECOSYS_P5018cdn.ppd.gz	23384fb9e29a084bff2181f1c800234f	334593
-kyocera-20200416-Kyocera_FS-1028MFP.ppd.gz	228bd454dc9fccffb29803c13e171c7b	289210
-kyocera-20200416-Kyocera_FS-1030MFP.ppd.gz	5387c3272e15d9a24c64c4d207776223	289210
-kyocera-20200416-Kyocera_FS-1035MFP.ppd.gz	5387c3272e15d9a24c64c4d207776223	289210
-kyocera-20200416-Kyocera_FS-1120D.ppd.gz	99c78da71616c9f0078a7a38bfd9d464	289210
-kyocera-20200416-Kyocera_FS-2020D.ppd.gz	99c78da71616c9f0078a7a38bfd9d464	289210
-kyocera-20200416-Kyocera_FS-2100D.ppd.gz	f4924d6b0deea97cafb4b5762592a796	289210
-kyocera-20200416-Kyocera_FS-3540MFP.ppd.gz	3e3f54844ce50328730aa039ea205270	289210
-kyocera-20200416-Kyocera_FS-3920DN.ppd.gz	b243074eda69a3c9a3e27e920e4f170b	289210
-kyocera-20200416-Kyocera_FS-4100DN.ppd.gz	b243074eda69a3c9a3e27e920e4f170b	289210
-kyocera-20200416-Kyocera_FS-6970DN.ppd.gz	887d754951955e60ae2d640481cd589a	289210
-kyocera-20200416-Kyocera_FS-9130DN.ppd.gz	006cb38303174904cbb2abf7fe0abcb1	289210
-kyocera-20200416-Kyocera_FS-C2026MFP.ppd.gz	0abd208311508ec3ecba34e61cb478d9	334593
-kyocera-20200416-Kyocera_FS-C2026MFP+.ppd.gz	69a72d275a5ddcad65068e9b44efa6a1	334593
-kyocera-20200416-Kyocera_FS-C5250DN.ppd.gz	83aa3eb87cd86e9e705f520ced64775b	334593
-kyocera-20200416-Kyocera_FS-C8600DN.ppd.gz	aa29edd068c9f830a37f3c072fddf160	334593
-kyocera-20200416-Kyocera_TASKalfa_3060ci.ppd.gz	d0fc4bd75a53cc472d67b753edd2394d	334593
-kyocera-20200716-Kyocera_ECOSYS_M2540dwJ.ppd.gz	179c07826e2d02342df884a965e25e2c	289206
-kyocera-20200716-Kyocera_ECOSYS_M3645idnJ.ppd.gz	e2a963b35ebb5dc8c0b93d10b6e09805	289088
-kyocera-20200716-Kyocera_ECOSYS_M4226idn.ppd.gz	6395b71191e7387337b3b43b5fc55dcd	289210
-kyocera-20200716-Kyocera_ECOSYS_M6635cidnJ.ppd.gz	d93b1bb8e307e50adae949fe43c31529	334589
-kyocera-20200716-Kyocera_ECOSYS_M8224cidn.ppd.gz	f0f8ee3ce24d4aca7a4c97ce56a8639c	334593
-kyocera-20200716-Kyocera_ECOSYS_P3060dnJ.ppd.gz	af11008c4ed85d189162ec6a677b9d33	289206
-kyocera-20200716-Kyocera_ECOSYS_P3145dnJ.ppd.gz	88c109d74089800885d1336670099e50	289206
-kyocera-20200716-Kyocera_ECOSYS_P3145dn.ppd.gz	627778a5c52b5440a0908470732946b5	289210
-kyocera-20200716-Kyocera_ECOSYS_P4040dnJ.ppd.gz	630ca4f1958412ee6066bc6fe675280b	289206
-kyocera-20200716-Kyocera_ECOSYS_P4060dnJ.ppd.gz	630ca4f1958412ee6066bc6fe675280b	289206
-kyocera-20200716-Kyocera_ECOSYS_P4140dnJ.ppd.gz	2ebc0e475e264670f651819b186d5c20	289206
-kyocera-20200716-Kyocera_ECOSYS_P8060cdnJ.ppd.gz	3d8b72f0e19cea02c725ff2cbdcfd931	334589
-kyocera-20200716-Kyocera_TASKalfa_2460ciJ.ppd.gz	efc968dfa9237ee9c1ac10ac08afcffc	334589
-kyocera-20200716-Kyocera_TASKalfa_2510iJ.ppd.gz	665053017435ebd180472cbb8e20f7d2	289206
-kyocera-20200716-Kyocera_TASKalfa_2553ciJ.ppd.gz	781644040ed03aac88a80d3ac0c39ee8	334589
-kyocera-20200716-Kyocera_TASKalfa_2553ci.ppd.gz	ee09fef2631b7c01cb6e20f3b63ddafc	334593
-kyocera-20200716-Kyocera_TASKalfa_308ci.ppd.gz	12ba29e561b591aa6f59d914d0e032ee	334593
-kyocera-20200716-Kyocera_TASKalfa_3212iJ.ppd.gz	6b2ee05a9aebfdeb03fd3c7e88cb1364	289087
-kyocera-20200716-Kyocera_TASKalfa_352ci.ppd.gz	12ba29e561b591aa6f59d914d0e032ee	334592
-kyocera-20200716-Kyocera_TASKalfa_358ciJ.ppd.gz	a3e09d75b6160b5ff881cd01f63533ac	334588
-kyocera-20200716-Kyocera_TASKalfa_4003i.ppd.gz	a6c4f0399038c21c28ee4cdb71da333f	289209
-kyocera-20200716-Kyocera_TASKalfa_4012iJ.ppd.gz	c8ab26a20d3c850f8629a5cb8d27f7ab	289087
-kyocera-20200716-Kyocera_TASKalfa_5003iJ.ppd.gz	46bbb7dcac398c3f5726c2e86f416168	289205
-kyocera-20200716-Kyocera_TASKalfa_7003iJ.ppd.gz	a0cc168e9cae56957104163e84873bf4	289205
-kyocera-20200716-Kyocera_TASKalfa_7353ciJ.ppd.gz	efa5c3df0b91c69a97d1dba679c9a96c	334588
-kyocera-20200716-TA_P-4531_MFP.ppd.gz	e3cf71a7f61161cec3e054dadfac38d7	289209
-lanier-20190916-Lanier-IM_550_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33457
-lanier-20190916-Lanier-IM_600SR_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33527
-lanier-20190916-Lanier-P_800_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33457
-lanier-20190916-Lanier-P_C600_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33529
-lexmark-20200918-Lexmark_X651de.ppd.gz	36730492df43d77b4a646f8585bbf138	334911
-lexmark-20200918-Lexmark_X658de.ppd.gz	5d981b83f11e1ebc3dc6753805968541	335294
-lexmark-20201101-Lexmark_6500e_Series.ppd.gz	f4dad8f2cd473169c328c43a71a4d5ba	335421
-lexmark-20201101-Lexmark_B2300_Series.ppd.gz	d03b5593148e534f352fafa261f603b8	290038
-lexmark-20201101-Lexmark_C2200_Series.ppd.gz	24279b08c5d272d20f4f883715b39aa3	336345
-lexmark-20201101-Lexmark_C2300_Series.ppd.gz	8cb05da0f2c74610c0a3715e39381e75	336122
-lexmark-20201101-Lexmark_C2400_Series.ppd.gz	014f1fee3ae6617b6daa5617ae74212e	336275
-lexmark-20201101-Lexmark_C3400_Series.ppd.gz	8cedd3e20598155d7c79fee6c856fc6b	335901
-lexmark-20201101-Lexmark_C740_Series.ppd.gz	44cd1981b2f98aaede260ba1ffa3f798	335679
-lexmark-20201101-Lexmark_C790_Series.ppd.gz	f254dd425be4b1b568c1e51f293d75bc	336147
-lexmark-20201101-Lexmark_C9200_Series.ppd.gz	b54f9dbe9e2ffb0071f24ac80c7315ab	337161
-lexmark-20201101-Lexmark_C920_Series.ppd.gz	bf812639e9e83aaa0eae6e4afafbd2cd	335672
-lexmark-20201101-Lexmark_C950_Series.ppd.gz	d08fc551239e834cab763fbaa8197f6b	336212
-lexmark-20201101-Lexmark_CS310_Series.ppd.gz	32fad0d149b8d29a1055458f40370ae1	336041
-lexmark-20201101-Lexmark_CS410_Series.ppd.gz	188cd809019ada99f9b47d5e7701c62b	336182
-lexmark-20201101-Lexmark_CS510_Series.ppd.gz	d738a88507298e3cc9812e9152d59687	335736
-lexmark-20201101-Lexmark_CS720_Series.ppd.gz	c21724ff76227fdda8323dc16e75be7e	336266
-lexmark-20201101-Lexmark_CS820_Series.ppd.gz	8314d7d2fab650a173cd7cfec41de981	336866
-lexmark-20201101-Lexmark_CS920_Series.ppd.gz	63beb4d71cdf6a99e989bd83c36baa7a	337161
-lexmark-20201101-Lexmark_CX310_Series.ppd.gz	4339c7bc8649e3c65579c68ea687d956	335736
-lexmark-20201101-Lexmark_CX410_Series.ppd.gz	960947b4d3c4ef46c0c1fa7619935c23	336517
-lexmark-20201101-Lexmark_CX420_Series.ppd.gz	54bde2711c5daf7d23ab4e2b01b793ff	336275
-lexmark-20201101-Lexmark_CX430_Series.ppd.gz	8bb6d7488df3913aa23dc183ddfab44d	335901
-lexmark-20201101-Lexmark_CX510_Series.ppd.gz	a44e010f32b733be4b170dc8767bdee0	290475
-lexmark-20201101-Lexmark_CX625_Series.ppd.gz	ae46ba57ff56c6d602a5dd4906709231	336345
-lexmark-20201101-Lexmark_CX725_Series.ppd.gz	c6c124f26930e04ab62e00aac9cc614a	336266
-lexmark-20201101-Lexmark_CX820_Series.ppd.gz	7661d967b5bebbe3b8515c53e08a55e0	336866
-lexmark-20201101-Lexmark_CX825_Series.ppd.gz	a40ab8d480fb72bb3349d77e75593748	337221
-lexmark-20201101-Lexmark_CX920_Series.ppd.gz	08c43b4a2b914ec95ef55729a16f8b7b	337160
-lexmark-20201101-Lexmark_M1100_Series.ppd.gz	d839b0b7522abce42e03a5f450724b41	289756
-lexmark-20201101-Lexmark_M3100_Series.ppd.gz	58913846f15216ce557589ef1278cb22	289735
-lexmark-20201101-Lexmark_M5100_Series.ppd.gz	fcaa62c58cfbdfee8f8716c1e1751197	290143
-lexmark-20201101-Lexmark_MB2300_Series.ppd.gz	3a73b199c2b01965a26978cb7c4ecc4d	290038
-lexmark-20201101-Lexmark_MC2300_Series.ppd.gz	cf78b600d4decea3923c99dab57b6eba	336122
-lexmark-20201101-Lexmark_MS310_Series.ppd.gz	16f748fadcbfcab17acd84fe7774635b	290303
-lexmark-20201101-Lexmark_MS410_Series.ppd.gz	ba15bc0bcfa9a503972b1d9f70ea71cf	289716
-lexmark-20201101-Lexmark_MS510_Series.ppd.gz	5f0c3bb0ec5b08cce5960dac3e50aaad	334929
-lexmark-20201101-Lexmark_MS610_Series.ppd.gz	571079a3ae6b744cdc370ca7effd092f	334929
-lexmark-20201101-Lexmark_MS620_Series.ppd.gz	f9ee5fe91f646486bfcf9e341b5140fd	290178
-lexmark-20201101-Lexmark_MS710_Series.ppd.gz	667ba96f758204483c077c995edc90a7	335310
-lexmark-20201101-Lexmark_MS725_Series.ppd.gz	1c1d72f16e8dcad3e002d216b1d9332a	291585
-lexmark-20201101-Lexmark_MS810_Series.ppd.gz	0d92d5907b06814b424b874b9baa07ce	335310
-lexmark-20201101-Lexmark_MS820_Series.ppd.gz	db4dce3356a1b312941684acce1d5295	291585
-lexmark-20201101-Lexmark_MX310_Series.ppd.gz	3aea6800e26749fa3f61c17af997b5b1	290304
-lexmark-20201101-Lexmark_MX410_Series.ppd.gz	39591f7cf637942601e98bd3d2909f17	334929
-lexmark-20201101-Lexmark_MX510_Series.ppd.gz	294e5e7c9f978262d0d15212b269e8fa	334929
-lexmark-20201101-Lexmark_MX520_Series.ppd.gz	dcd5b24d569cf4d64c0933e79f0eeb3d	290178
-lexmark-20201101-Lexmark_MX610_Series.ppd.gz	d22c58bf03d6af3e1f2c973711b4fed2	335087
-lexmark-20201101-Lexmark_MX620_Series.ppd.gz	2ae9ca64ce748f03051ff96637004971	290324
-lexmark-20201101-Lexmark_MX6500e_Series.ppd.gz	330569527454fdf42fab573a8311906f	290098
-lexmark-20201101-Lexmark_MX710_Series.ppd.gz	4f153ddc973829952b538793352ead00	334914
-lexmark-20201101-Lexmark_MX720_Series.ppd.gz	0ba304cc713867d1a29dda65811eca5b	290318
-lexmark-20201101-Lexmark_MX725_Series.ppd.gz	7fdd6a65b5c37385ab00db0271832d36	290318
-lexmark-20201101-Lexmark_MX810_Series.ppd.gz	8e5f398ecd7bc0ae32b15d0cbd3a6565	335372
-lexmark-20201101-Lexmark_MX820_Series.ppd.gz	9e2edbe8d4f40a2005a27bd65a2f00ac	291030
-lexmark-20201101-Lexmark_MX910_Series.ppd.gz	7c8d4016ea8a9bfd980739928ce3dcc6	290558
-lexmark-20201101-Lexmark_X548_Series.ppd.gz	176c2aa0a58572da7cf5cb1ddb194ec5	335753
-lexmark-20201101-Lexmark_X740_Series.ppd.gz	a04b81cfa91476e3efef862b17d5a72f	335732
-lexmark-20201101-Lexmark_X790_Series.ppd.gz	9b37525ecf585e07cf28c8c7e4eaa8cb	336200
-lexmark-20201101-Lexmark_X920_Series.ppd.gz	d4bf4e61ff15ac6949afab0b23670b03	335725
-lexmark-20201101-Lexmark_X950_Series.ppd.gz	077d06d740bcdae7fd7970ef740f547b	336265
-lexmark-20201101-Lexmark_XC2100_Series.ppd.gz	eff7ca51adce1ad2770b9aa9d9a21be5	335785
-lexmark-20201101-Lexmark_XC9200_Series.ppd.gz	90d589c830c0b1215b14db884b164820	337161
-lexmark-20201101-Lexmark_XM1100_Series.ppd.gz	7b60d6e1b788df19533e5e0d67e7c918	289732
-lexmark-20201101-Lexmark_XM3100_Series.ppd.gz	b7324b6b929ed9db9696fdcf118768e3	289890
-lexmark-20201101-Lexmark_XM5100_Series.ppd.gz	56da75fd205e2181b0d1ab5f8868238e	289732
-lexmark-20201101-Lexmark_XM7100_Series.ppd.gz	70b0f9d87f29f3ba74bf02df11130843	290228
-oki-20200129-oki-c542-ps.ppd.gz	bce122e5026197a953d62f0a90c313ed	340295
-oki-20200329-ES8434-PS.ppd.gz	ee4ca9aaf1f46e9cddd782f7ada5eaec	340371
-oki-20200329-OKB432_a.ppd.gz	550edaa62eb45b825886c8ce638fcb8c	292699
-oki-20200329-OKB512_a.ppd.gz	27902e3c710c891b183a5ca4cb32666d	292699
-oki-20200329-OKB841_a110.ppd.gz	de8d046f390efcf3fedce5061ab7eff6	291090
-oki-20200329-OKI-C332-PS.ppd.gz	66d3e4440cc52f5db4f1ddfa63ea5af2	340295
-oki-20200329-OKI-C612-PS.ppd.gz	11c6f0c8ed50f9960a1a65ee941e05fe	340434
-oki-20200329-OKI-C712-PS.ppd.gz	03a7afd67e4a19ebee137842daac4142	340515
-oki-20200329-OKI-C833-PS.ppd.gz	2463a6ad57838cf72bd391b3fcb82367	340434
-oki-20200329-OKI-C843-PS.ppd.gz	5b40df630de262548388765d34854438	340434
-oki-20200329-OKI-C844-PS.ppd.gz	b109b1e496af6723bf8382be1ad3faf2	340371
-oki-20201022-ES6450_PS.ppd.gz	6455a14aa69c21a1a75abbe159c189c9	340295
-oki-20201022-OKI_MC843_PS.ppd.gz	f9be054e72cc80ec22ff7a7eb719aba7	340214
-oki-20201022-OKI_MC853_PS.ppd.gz	51ebd63657beb7e3664aa44a69257311	340294
-oki-20201022-OKI_MC883_PS.ppd.gz	dc278203d4293a795f831faa65701d1d	340294
-ricoh-20190916-Ricoh-IPSiO_SP_3400L_PXL.ppd.gz	3ef3a04b5b30aacc99ec78b0b4eb7187	48456
-ricoh-20190916-Ricoh-IPSiO_SP_3510SF_PXL.ppd.gz	3ef3a04b5b30aacc99ec78b0b4eb7187	48456
-ricoh-20190916-Ricoh-M_C250FWB_PS.ppd.gz	f3acc74bcfdcc6bc995ba244c8f5191d	336651
+hp-20190918-hplip-3.19.6-hp-Mimas.ppd.gz	fae5d108cfc59b0ef6f684478ce1a6b7	151175
+hp-20190918-hplip-3.19.6-hp-Mimas15.ppd.gz	fb3f816cecbd8986df9e4ec70ac1ed9c	150634
+hp-20190918-hplip-3.19.6-hp-Mimas17.ppd.gz	fb3f816cecbd8986df9e4ec70ac1ed9c	150634
+hp-20190918-hplip-3.19.6-hp-MimasTDR.ppd.gz	a744dc673ec1d67a1a422f7ba3cd18a8	109440
+hp-20190918-hplip-3.19.6-hp-Pyramid.ppd.gz	0736fbff8a8e4bb34e54b6f7e599f4e4	108770
+hp-20190918-hplip-3.19.6-hp-Pyramid15.ppd.gz	fb3f816cecbd8986df9e4ec70ac1ed9c	150634
+hp-20190918-hplip-3.19.6-hp-PyramidPlus.ppd.gz	a744dc673ec1d67a1a422f7ba3cd18a8	109440
+hp-20190918-hplip-3.19.6-hp-PyramidRefresh15.ppd.gz	fb3f816cecbd8986df9e4ec70ac1ed9c	150634
+hp-20190918-hplip-3.19.6-hp-PyramidRefresh17.ppd.gz	05d5ab47da934e542706f6dacee7af99	150632
+hp-20190918-hplip-3.19.6-hp-SPDOfficejetProAsize.ppd.gz	fb3f816cecbd8986df9e4ec70ac1ed9c	150633
+hp-20190918-hplip-3.19.6-hp-SPDOfficejetProBsize.ppd.gz	fb3f816cecbd8986df9e4ec70ac1ed9c	150634
+hp-20190918-hplip-3.19.6-hp-Saipan.ppd.gz	843f704eb090fefdcba8f26019ae0eca	150635
+hp-20190918-hplip-3.19.6-hp-Saipan15B.ppd.gz	843f704eb090fefdcba8f26019ae0eca	150635
+hp-20190918-hplip-3.19.6-hp-Stabler.ppd.gz	fae5d108cfc59b0ef6f684478ce1a6b7	151175
+hp-20190918-hplip-3.19.6-hp-ViperMinusVIP.ppd.gz	9535159e3a6d2a3f49cf07f7ee01ed43	153891
+hp-20190918-hplip-3.19.6-hp-ViperPlusVIP.ppd.gz	fae5d108cfc59b0ef6f684478ce1a6b7	151175
+hplip-20201209-hplip-3.20.11-hp-CLE17.ppd.gz	05d5ab47da934e542706f6dacee7af99	150633
+hplip-20201209-hplip-3.20.11-hp-Mimas17.ppd.gz	fb3f816cecbd8986df9e4ec70ac1ed9c	150634
+hplip-20201209-hplip-3.20.11-hp-PyramidRefresh17.ppd.gz	05d5ab47da934e542706f6dacee7af99	150632
+hplip-20201209-hplip-3.20.11-hp-SPDOfficejetProBsize.ppd.gz	fb3f816cecbd8986df9e4ec70ac1ed9c	150634
+hplip-20201209-hplip-3.20.11-hp-deskjet_3700_series.ppd.gz	448bba9706e3e844b2fe5db656f5dd7d	150636
+hplip-20201209-hplip-3.20.11-hp-deskjet_d1600_series.ppd.gz	a1d541296bcfd8651d5f7f766b228230	7687460
+hplip-20201209-hplip-3.20.11-hp-photosmart_8700_series.ppd.gz	22e2aeb7d9ba64932f8fe9991073e62e	151176
+hplip-20201209-hplip-3.20.11-hp-photosmart_a530_series.ppd.gz	a59494820dbf825195716601c00dc2ba	378413
+lanier-20190916-Lanier-IM_550_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33458
+lanier-20190916-Lanier-IM_600SR_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33528
+lanier-20190916-Lanier-P_800_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33458
+lanier-20190916-Lanier-P_C600_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33530
+ricoh-20190916-Ricoh-IPSiO_SP_3400L_PXL.ppd.gz	a13b115e8059860aae61e83b97898344	48461
+ricoh-20190916-Ricoh-IPSiO_SP_3510SF_PXL.ppd.gz	a13b115e8059860aae61e83b97898344	48461
 ricoh-20190916-Ricoh-MP_C306Z_JPN_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33502
-ricoh-20190916-Ricoh-P_C301SF_PS.ppd.gz	8e73d39b2c35e633052f34010e34f532	336651
-ricoh-20190916-Ricoh-SP_212Nw_PXL.ppd.gz	9d28233eac79f38fc93c127cf635da4c	48456
-ricoh-20190916-Ricoh-SP_2200L_PXL.ppd.gz	3ef3a04b5b30aacc99ec78b0b4eb7187	48456
-ricoh-20190916-Ricoh-SP_320DN_PXL.ppd.gz	3ef3a04b5b30aacc99ec78b0b4eb7187	48456
+ricoh-20190916-Ricoh-SP_212Nw_PXL.ppd.gz	88c218b8d5c23cc075df916473010948	48461
+ricoh-20190916-Ricoh-SP_2200L_PXL.ppd.gz	a13b115e8059860aae61e83b97898344	48461
+ricoh-20190916-Ricoh-SP_320DN_PXL.ppd.gz	a13b115e8059860aae61e83b97898344	48461
 ricoh-20191121-Infotec-Pro_8200S_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33595
 ricoh-20191121-Infotec-Pro_8210_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33595
 ricoh-20191121-Infotec-Pro_C5200S_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33642
-ricoh-20191121-Infotec-Pro_C7200_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33691
 ricoh-20191121-Infotec-Pro_C7200S_Light_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33691
-ricoh-20191218-SP_C420e_JPN-PostscriptColor-Ricoh.ppd.gz	73602019327081d50b88b9f3bc0f1c76	334825
+ricoh-20191121-Infotec-Pro_C7200_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33691
 ricoh-20200221-Gestetner-IM_C300_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33556
 ricoh-20200221-Lanier-IM_C400SR_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33572
 ricoh-20200221-Ricoh-SP_C342M_JPN_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33504
 ricoh-20200527-Gestetner-GS3021_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33552
 ricoh-20200527-Infotec-Pro_C5300S_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33718
 ricoh-20200527-Lanier-IM_C6500_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33642
-ricoh-20200527-Ricoh-P_6000_JPN.ppd.gz	026b1a4638ae4d4fc9c1a077561596f9	289495
 ricoh-20200821-Infotec-Pro_C5300SL_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33718
 ricoh-20200821-Lanier-IM_C530FB_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33529
 ricoh-20200821-Lanier-IM_C530F_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33599
 ricoh-20200821-Ricoh-IM_C2509J_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33618
 ricoh-20200821-Ricoh-IM_C3509J_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33618
-ricoh-20200821-Ricoh-IM_C6500_JPN.ppd.gz	66f49b93ab0501070332d9aa5b1af4ad	334825
-ricoh-20200930-Ricoh_Generic_PS_Printer.ppd.gz	86a7465a75c2378bf69dbb57a1f3e2fe	337462
-sharp-20180409-Sharp-AR-M452U-ps.ppd.gz	f178ccc5ac7ae05fd2c95e73a575db34	291481
-sharp-20180409-Sharp-MX-2640NR-ps.ppd.gz	3530b489efc79832c6ad3e98c5840920	341082
-sharp-20180409-Sharp-MX-M283N-ps.ppd.gz	e0b540db250d6ea8c82cbf3285366d74	291555
-sharp-20180409-Sharp-MX-M363F-ps-jp.ppd.gz	b3777f1d4ca6b68e582aebff37fd3b33	291474
-sharp-20180409-Sharp-MX-M623N-ps.ppd.gz	1f2396cd49fe83d0632d4d5d523b157c	291403
-sharp-20180409-Sharp-MX-M623-ps-jp.ppd.gz	47db17bcfcc9a0f77220c49314228a6f	291399
-sharp-20190711-Sharp-MX-6240N-ps.ppd.gz	f2eb1a6b56b48c0477109b1842c1c276	344215
-sharp-20190711-Sharp-MX-6500N-ps.ppd.gz	fe6c9ef731b8899533bf0bb60207d620	344952
-sharp-20190711-Sharp-MX-6540FN-ps-jp.ppd.gz	76e62567fa2f121f3a0682d6ce0f7a8b	344211
-sharp-20190711-Sharp-MX-C250-ps.ppd.gz	51bbb7bc395499bd25c5433dfbad8816	339942
-sharp-20190711-Sharp-MX-C301-ps.ppd.gz	c8c7e2a8ef78232fa01e7e25696b40d7	340147
-sharp-20190711-Sharp-MX-M1054-ps.ppd.gz	54cc74361856eea1bdeab61b810b5043	295703
-sharp-20190711-Sharp-MX-M1055-ps.ppd.gz	c8b2b869f397240d82dd677d76baedb8	295703
-sharp-20190711-Sharp-MX-M654FN-ps-jp.ppd.gz	1e40b27a2792c22d9e2ef287eb841196	295561
-sharp-20190711-Sharp-MX-M654N-ps.ppd.gz	8e4a0a660870f764b1a225e702ecf6ad	295565
-sharp-20190711-Sharp-MX-M904-ps.ppd.gz	efdafe3c50703ecc9e8122c8ff377dfe	295703
-sharp-20191219-Sharp-AR-6020D-ps.ppd.gz	9e2fd782241225ceeea1010c02cf5bee	290013
-sharp-20191219-Sharp-AR-6020-ps.ppd.gz	40c69bd7f177b557aad36c08f5fabb87	289914
-sharp-20191219-Sharp-AR-6026N-ps.ppd.gz	2f47588aabc8d6dc00498c753632a670	290013
-sharp-20191219-Sharp-AR-G200-ps-jp.ppd.gz	96263f3f8caa03b019f0db9570880774	289910
-sharp-20191219-Sharp-BP-10C20-ps.ppd.gz	02263d53630bb507d93aa90ccfe5a57a	341285
-sharp-20191219-Sharp-DX-2000U-ps.ppd.gz	5ec6a012452967234c05f14cd742ec9b	340312
-sharp-20191219-Sharp-DX-20C20-ps-jp.ppd.gz	75298ff6bb4ef1dd50cf4cb07835ebf4	341281
-sharp-20191219-Sharp-DX-2500N-ps.ppd.gz	57ac2e9bcf7fe9c276b15ef727e0ba92	340880
-sharp-20191219-Sharp-MX-1800N-ps.ppd.gz	1c9e56a8f8ee245ac1393cccfe70b61b	339729
-sharp-20191219-Sharp-MX-1810U-ps.ppd.gz	8dcbcef0a7262ca4ecbe607faa445679	340758
-sharp-20191219-Sharp-MX-2300FG-ps-jp.ppd.gz	c20f36f5c1e91fbbdfd953e65a979f2a	339801
-sharp-20191219-Sharp-MX-2300G-ps.ppd.gz	2debcb311bfc37199b402da1b454d5f8	339805
-sharp-20191219-Sharp-MX-2301N-ps.ppd.gz	91cb7b6a2f03208ca5f729e84ec06c82	340245
-sharp-20191219-Sharp-MX-2310F-ps-jp.ppd.gz	c321d0713ab9a6a5e021c90d40f9f7e7	340754
-sharp-20191219-Sharp-MX-2514FN-ps-jp.ppd.gz	be126d4b3ef9494ca020875735b8cebc	340878
-sharp-20191219-Sharp-MX-2600FG-ps-jp.ppd.gz	e5ae1f9d0120777f2284399ad96a486c	340387
-sharp-20191219-Sharp-MX-2600G-ps.ppd.gz	628e63c490388cb9ad1f7e9c480d7dc6	340391
-sharp-20191219-Sharp-MX-2610FN-ps-jp.ppd.gz	ae83577cd3a1a5d47268cd420fd05bdc	340900
-sharp-20191219-Sharp-MX-2610N-ps.ppd.gz	90df50e693aedd35e906e56c0bcfa64d	340904
-sharp-20191219-Sharp-MX-2614N-ps.ppd.gz	e829bf5cb49b21c6bb3a99ac260104e0	341307
-sharp-20191219-Sharp-MX-2631-ps-jp.ppd.gz	dffef0af8f30b6127d57dfe48fdb8ebb	344371
-sharp-20191219-Sharp-MX-2640FN-ps-jp.ppd.gz	ca7a5e2524125b06c41c9f18a2eee7bf	341078
-sharp-20191219-Sharp-MX-2651-ps.ppd.gz	b410e1898d0d1ec12ea43ca73aaaa3ae	344382
-sharp-20191219-Sharp-MX-2661-ps-jp.ppd.gz	2cb22ca22c64a9b24e45c5b7d44d79ed	344371
-sharp-20191219-Sharp-MX-3061-ps.ppd.gz	e511622e0ef6697ae43b39fe7aef645e	344382
-sharp-20191219-Sharp-MX-3600FN-ps-jp.ppd.gz	11cf2a45b6fd827869a9c10aa340edbf	340387
-sharp-20191219-Sharp-MX-4100N-ps.ppd.gz	97e20bae8eb4d2f5c98c01796e175c86	340391
-sharp-20191219-Sharp-MX-4110FN-ps-jp.ppd.gz	11399459a92ba83f194af515075681a7	340900
-sharp-20191219-Sharp-MX-4110N-ps.ppd.gz	fd2f8bded623a96e7b964546a6b0683f	340904
-sharp-20191219-Sharp-MX-4140FN-ps-jp.ppd.gz	77c872dfac36c9143c55e7f8b244ef14	341078
-sharp-20191219-Sharp-MX-4140N-ps.ppd.gz	953b6083c91c5e92020f700c644cf3a1	341082
-sharp-20191219-Sharp-MX-5500N-ps.ppd.gz	e4e17fef9117318f78b69aa21c1406e7	339653
-sharp-20191219-Sharp-MX-C303-ps.ppd.gz	ef3859ce649bf5094343c2f96c7870bd	343141
-sharp-20191219-Sharp-MX-C305W-ps-jp.ppd.gz	1c1e0024fc3b59f1678c1b1df44fb65c	343137
-sharp-20191219-Sharp-MX-M264FP-ps-jp.ppd.gz	3cdb15a52d01d55ff0e5b61cc142c79f	291978
-sharp-20191219-Sharp-MX-M264NV-ps.ppd.gz	7d185c019a530238e5d648da5f4db754	291982
-sharp-20191219-Sharp-MX-M265N-ps.ppd.gz	4e52060aa0691d819264a0207d8dca64	292078
-sharp-20191219-Sharp-MX-M266FP-ps-jp.ppd.gz	6424159902d2319e80bf3c7e0efad17f	291997
-sharp-20191219-Sharp-MX-M266N-ps.ppd.gz	78a3efe58c7f69f698455f76ae59e975	292077
-sharp-20191219-Sharp-MX-M316G-ps-jp.ppd.gz	d4cf7bb7d8fbdf1810260935f93733c0	291997
-sharp-20191219-Sharp-MX-M364N-ps.ppd.gz	4255dfd2c78e477f7d53e0c3c41ab95b	292680
-sharp-20191219-Sharp-MX-M365FN-ps-jp.ppd.gz	11ccffbcc94d72054e6ff80123e4c86b	292676
-sharp-20191219-Sharp-MX-M365N-ps.ppd.gz	2a3d5dcb86060d71f41a9dd6a0732e0e	292680
-sharp-20191219-Sharp-MX-M464FN-ps-jp.ppd.gz	9319f21ece3110ecda3cd60c328f4b5a	292676
-sharp-20191230-Sharp-AR-B350W-ps-jp.ppd.gz	9b6fc47d36c8cb97995fe52fd56fd7e6	291734
-sharp-20191230-Sharp-AR-B351-ps.ppd.gz	2816dc3f6979c316e8c1a0f5941e5c5d	291738
-sharp-20191230-Sharp-DX-C310-ps.ppd.gz	9d9679cea0fdb3c98758d9549c1893fc	340072
-sharp-20191230-Sharp-MX-2630FN-ps-jp.ppd.gz	23fef27e6b62d2d27002a348ee7f9403	343781
-sharp-20191230-Sharp-MX-2630N-ps.ppd.gz	50514e22ad4e935d2f1f77395a28ae72	343792
-sharp-20191230-Sharp-MX-2650FN-ps-jp.ppd.gz	9e9ffc9cfae2f95f54e9ffda6feb5c7a	343781
-sharp-20191230-Sharp-MX-3060N-ps.ppd.gz	ca4ad020160eab1cc13a1191d3dc72e2	343792
-sharp-20191230-Sharp-MX-6580N-ps.ppd.gz	0460eccf82fff5192c080eb0fe89a3ca	345809
-sharp-20191230-Sharp-MX-7090N-ps.ppd.gz	254f18dc82341e85c3d36de72eddb185	346474
-sharp-20191230-Sharp-MX-B355W-ps.ppd.gz	7662dcfc03d3f93263c7113ddc4da46e	295110
-sharp-20191230-Sharp-MX-B356W-ps.ppd.gz	eec677695595c04cabf1e11b57c6c7b4	295241
-sharp-20191230-Sharp-MX-B380P-ps.ppd.gz	84b9bc9b01169a2220b977ccbda7634c	291161
-sharp-20191230-Sharp-MX-M2630-ps.ppd.gz	6eb9b585be031c1fdd64b437b55369ec	295759
-sharp-20191230-Sharp-MX-M2651-ps.ppd.gz	d3b2b28bebde4ee7e3c09968513f5c3a	295782
-sharp-20191230-Sharp-MX-M3070-ps.ppd.gz	053d2d7f6060aad21367759a29f91c26	295683
-sharp-20191230-Sharp-MX-M3071-ps.ppd.gz	e83b3ad48e8ecf63d695a1ebf68fb52c	295706
-sharp-20191230-Sharp-MX-M3531-ps-jp.ppd.gz	5f79be561ff0b73cbbaf90d76e37b585	295695
-sharp-20191230-Sharp-MX-M6570-ps.ppd.gz	e44ccb2021eb42378ee9e9cc70e19f29	296920
-sharp-20191230-Sharp-MX-M905-ps.ppd.gz	ee0b58d23948083172583ab808a934a9	296558
-star-20171009-starcupsdrv-3.6.0-hsp7000r.ppd.gz	ae5f62ab22a11bbd5df7d207e0cf571f	8759
-star-20171009-starcupsdrv-3.6.0-hsp7000s.ppd.gz	e77dfd54128ff4a7db77a4c7fd1dced6	8477
-star-20171009-starcupsdrv-3.6.0-hsp7000v.ppd.gz	10457e156ce5bc1ac2e8270851c81482	22011
-star-20171009-starcupsdrv-3.6.0-sp512.ppd.gz	445086d94f085523c3ec705a094683d9	6463
-star-20171009-starcupsdrv-3.6.0-sp542.ppd.gz	d7350c2aa449afb0516f1a01d44add97	6463
+ricoh-20210222-Gestetner-G3020c_PXL.ppd.gz	5b00c8e2bf05dc0aff80236e7635d416	144731
+ricoh-20210222-Lanier-IM_7000_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33570
+ricoh-20210601-Gestetner-GS3025m_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33546
+ricoh-20210601-Gestetner-GS3040m_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33546
+ricoh-20210601-Gestetner-GS3060m_PDF.ppd.gz	b754ab320abe1bbdb7217ba0663899b5	33546
+ricoh-20210601-Ricoh-M_C2000_PXL.ppd.gz	5b00c8e2bf05dc0aff80236e7635d416	144731
+ricoh-20210601-Ricoh-SP_2300L_PXL.ppd.gz	a13b115e8059860aae61e83b97898344	48443
+star-20171009-starcupsdrv-3.6.0-hsp7000r.ppd.gz	58756e1033a89a99976dc2b61caa86eb	8887
+star-20171009-starcupsdrv-3.6.0-hsp7000s.ppd.gz	c1357547511e07c4f568e467d38b5454	8605
+star-20171009-starcupsdrv-3.6.0-hsp7000v.ppd.gz	ee95a4397185f0d55588cd6d0d5f9106	22139
+star-20171009-starcupsdrv-3.6.0-sp512.ppd.gz	414708f620f7887a070998b7e19efd78	6591
+star-20171009-starcupsdrv-3.6.0-sp542.ppd.gz	155c64c62422a599b16ab76e91d1678c	6591
 star-20171009-starcupsdrv-3.6.0-tsp1000.ppd.gz	08c2ffce299e15bfd033474c8dac78a7	13825
 star-20171009-starcupsdrv-3.6.0-tsp828l.ppd.gz	43799e3c157bda057a86a8a46fbc06c1	21995
-star-20171009-starcupsdrv-3.6.0-tup542.ppd.gz	743503d4eee025effbc915af089db8c1	14441
-star-20171009-starcupsdrv-3.6.0-tup592.ppd.gz	9131ce80c26fbf361f2e201b73d0bce3	14470
-star-20171009-starcupsdrv-3.6.0-tup942.ppd.gz	4625ed1c5853dba7872e08eb5f7c68e3	21997
-star-20171009-starcupsdrv-3.6.0-tup992.ppd.gz	40d14e366455669497aa40390ada0991	22004
-star-20191209-fvp10.ppd.gz	09e6cbe4e532732715059ea7222a466b	8769
-star-20191209-sp712.ppd.gz	4a1f9f9b98adcc2340fbc4784d623825	6482
-star-20191209-sp717.ppd.gz	4a1f9f9b98adcc2340fbc4784d623825	6482
-star-20191209-sp742.ppd.gz	cbc7546a4d119b28da0ee89ce3840776	6482
-star-20191209-sp747.ppd.gz	cbc7546a4d119b28da0ee89ce3840776	6482
+star-20171009-starcupsdrv-3.6.0-tup542.ppd.gz	e8fe78382e3c894a3e6bfd49c2361789	14569
+star-20171009-starcupsdrv-3.6.0-tup592.ppd.gz	e7dea026a7d31ae592bb940b0e140852	14598
+star-20171009-starcupsdrv-3.6.0-tup942.ppd.gz	67cba4ee9dbece7061051974dc5f6d7b	22125
+star-20171009-starcupsdrv-3.6.0-tup992.ppd.gz	8f98a280ee0e68a6b9090857cfcf9714	22132
+star-20191209-fvp10.ppd.gz	2729d3a4340ac5b7485c46eca2cf5079	8897
+star-20191209-mcp20.ppd.gz	92cde9d6458a655e1486ec1bf176bc93	9551
+star-20191209-mcp21.ppd.gz	92cde9d6458a655e1486ec1bf176bc93	9551
+star-20191209-mcp30.ppd.gz	8f7208154876b79dc37f34a1bc3a9238	9771
+star-20191209-mcp31.ppd.gz	05eea3500236b2a02b7d98b6783da99b	9775
+star-20191209-pop10.ppd.gz	e43571152c76832ecd123543d992364b	9555
+star-20191209-sp712.ppd.gz	313e9a9276b8c1150091b79114359f32	6610
+star-20191209-sp717.ppd.gz	313e9a9276b8c1150091b79114359f32	6610
+star-20191209-sp742.ppd.gz	be7832e9086f0abadb5a157666d6e1af	6610
+star-20191209-sp747.ppd.gz	be7832e9086f0abadb5a157666d6e1af	6610
 star-20191209-tsp113.ppd.gz	649b77a85fec17704b9bd6e883bc9062	8744
-star-20191209-tsp143gt.ppd.gz	a8ace80a2140f740ff19d12f3dc16d95	8745
 star-20191209-tsp143.ppd.gz	a8ace80a2140f740ff19d12f3dc16d95	8745
-star-20191209-tsp654.ppd.gz	b6033504c6d37c2bd027f24239fc10fa	8767
-star-20191209-tsp700II.ppd.gz	e6be128881e12a237b6c247644246aa9	8763
-star-20191209-tsp800II.ppd.gz	289f92018d0d49d35f4e7ae65c01a2ed	22019
-xerox-20190225-xr6605dn.ppd.gz	57e9aaf3ff74c77f52862065364e7aef	333728
-xerox-20190225-xr8580dn.ppd.gz	7f6c579a0825dd29a481d133d59e9e53	349632
-xerox-20190225-xrx3655s.ppd.gz	13b75becadf9828ccbade9137adf1268	288871
-xerox-20190225-xrx4622.ppd.gz	a943b568eeddb946751f9ddb0a33d514	294796
-xerox-20190225-xrx5330.ppd.gz	a360737eb26d1cfe12c773a40309fc09	289774
-xerox-20190225-xrx5875.ppd.gz	7769420ad0991d42df8cad3a60a2291d	289199
-xerox-20190225-xrx7830.ppd.gz	6bd6627707168028d3f1d3d4a68ac800	334457
-xerox-20190225-xrx7970.ppd.gz	79e4c86b84e9d0b5b4d9e55783a661bc	334457
-xerox-20190225-xrx8580n.ppd.gz	c6d56697a8f28f088d8de177c548cab9	349630
-xerox-20190225-xrxd95cp.ppd.gz	a73b1834d654b7377088672e308b7d96	289920
-xerox-20190711-xrwc3335.ppd.gz	d6572ad74de8271b04e430758b254518	288903
-xerox-20190711-xrx6510.ppd.gz	f0f844366b35eb47bb90685331be0a17	333831
-xerox-20190820-xrxosd.ppd.gz	74318f684a8aa8af759800a5a226a0c3	333539
-xerox-20191030-Xerox_Phaser_7800DN.ppd.gz	e1ac11868c0f60267818a28b9a287788	343260
-xerox-20191030-Xerox_Phaser_7800DX.ppd.gz	fcc3f965b49dd7b5d9e4b64117d6a05d	343266
-xerox-20191030-Xerox_Phaser_7800GX.ppd.gz	86fb5a01e53439e95d306db4144aeccb	343260
-xerox-20191030-Xerox_VersaLink_C500.ppd.gz	da60726fe1fbb76d3f2657ec456c6ed7	333938
-xerox-20191030-Xerox_VersaLink_C505.ppd.gz	a2b087f7e8e4b83c447d3e60f82fcf44	333862
-xerox-20191030-Xerox_VersaLink_C600.ppd.gz	7018b713c2779878fd2b845a8c04f42c	334015
-xerox-20191030-Xerox_VersaLink_C605.ppd.gz	876eeab7696247a9b7ffc75306da6f33	333939
-xerox-20191030-xrxB400.ppd.ppd.gz	9a6a62f0e9b0dad0a990ca19d7e0ca54	288570
-xerox-20191030-xrxB405.ppd.ppd.gz	6607c3a567e558724da78f91c2a147ca	288570
-xerox-20191030-xrxb600.ppd.gz	1f744412732b94d88935f342bc34e7c3	288604
-xerox-20191030-xrxb615.ppd.gz	1f744412732b94d88935f342bc34e7c3	288604
-xerox-20191030-xrxB7025.ppd.gz	b042ee2bf983f2f21c3de7731ffa1c36	288938
-xerox-20191030-xrxB7030.ppd.gz	7aff6b8bd1e63a38af13b3cb889f0d02	288938
-xerox-20191030-xrxB7035.ppd.gz	7aff6b8bd1e63a38af13b3cb889f0d02	288938
-xerox-20191030-xrxB8045.ppd.gz	08d0e50d60cefa0ead2c8637961a3de2	289199
-xerox-20191030-xrxC400.ppd.gz	aef45a147191c29103b08212c3ead0e3	333858
-xerox-20191030-xrxC405.ppd.gz	aef45a147191c29103b08212c3ead0e3	333858
-xerox-20191030-xrxC7000.ppd.gz	55b6aa5ef564c6e8fde78176ae0f3d39	334099
-xerox-20191030-xrxC7030.ppd.gz	b28b2fdfc9833d58400edd957bbcb46d	334199
-xerox-20191030-xrxC8000.ppd.gz	c205ae221859bc2d0c082012dca30422	334275
-xerox-20191030-xrxC8030.ppd.gz	109903cc93d25bff1ceb89509faa6527	334457
-xerox-20200129-xrxC9065.ppd.gz	1425b0b3a46153352326a15df35d2f03	334207
-xerox-20200226-xrxB9100.ppd.gz	3821db8bec77fe4fbff40c263559a717	288949
-xerox-20201014-xrxC8000W.ppd.gz	0568276db6f9a7fd9c072ca5220d5238	334199
+star-20191209-tsp143gt.ppd.gz	a8ace80a2140f740ff19d12f3dc16d95	8745
+star-20191209-tsp654.ppd.gz	55cc4086b8ad0919866780dba393459f	8895
+star-20191209-tsp700II.ppd.gz	c63ce27445f9d872f3e14faf359c6ca3	8891
+star-20191209-tsp800II.ppd.gz	8e9990d719178e456a466a32211b63db	22147
+zebra-20210504-SP-005645A.ppd.gz	ebe16982144138840b4091762c316fe5	39347
diff --git a/client/site_tests/platform_PrinterPpds/digests/split_streams.pdf.digests b/client/site_tests/platform_PrinterPpds/digests/split_streams.pdf.digests
index 9f0cbf4..1ee55b1 100644
--- a/client/site_tests/platform_PrinterPpds/digests/split_streams.pdf.digests
+++ b/client/site_tests/platform_PrinterPpds/digests/split_streams.pdf.digests
@@ -1,32 +1,8 @@
-brother-20191213-DCP7180DN.ppd.gz	f4c86eb0342f01f6d0a9c56464d94b24	42292
-brother-20191213-DCP7195DW.ppd.gz	f4c86eb0342f01f6d0a9c56464d94b24	42292
-brother-20191213-DCPL5500DN.ppd.gz	f4c86eb0342f01f6d0a9c56464d94b24	42292
-brother-20191213-DCPL6600DW.ppd.gz	f4c86eb0342f01f6d0a9c56464d94b24	42292
-brother-20191213-HL5595DNH.ppd.gz	f4c86eb0342f01f6d0a9c56464d94b24	42292
-brother-20200131-DCP7025.ppd.gz	f4c86eb0342f01f6d0a9c56464d94b24	42272
-brother-20200131-DCP8080DN.ppd.gz	f4c86eb0342f01f6d0a9c56464d94b24	42292
-brother-20200615-DCP9030CDN.ppd.gz	ec36e10c011a8545ead41f0da44f5cce	679564
-brother-20200615-DCPL8410CDW.ppd.gz	ec36e10c011a8545ead41f0da44f5cce	679564
-brother-20200615-HLL3210CW.ppd.gz	ec36e10c011a8545ead41f0da44f5cce	679564
-brother-20200615-MFC9350CDW.ppd.gz	ec36e10c011a8545ead41f0da44f5cce	679564
-brother-20201006-DCP7080-cups-en.ppd.gz	4e708f8d2b96acd220fc651d21204959	41640
-brother-20201006-DCP7080D-cups-en.ppd.gz	4495f25aca826e66dd05b1df325911a1	41640
-brother-20201006-DCP7090-cups-en.ppd.gz	4495f25aca826e66dd05b1df325911a1	41640
-brother-20201006-DCP7090DW-cups-en.ppd.gz	6f01a83a684fe841cb62047f5a4b0526	41640
-brother-20201006-HL2290-cups-en.ppd.gz	6f01a83a684fe841cb62047f5a4b0526	41640
-brother-20201006-HL2295D-cups-en.ppd.gz	47b994b5525ffa4566bd27cdc9a18bc3	41640
-brother-20201006-MFCL2685DW-cups-en.ppd.gz	7dcb1040ac65da1bf2d092ceac60b15b	41640
+brother-20200615-DCP9030CDN.ppd.gz	d75691f2bbce8bda2d037b4a6f69efaa	679569
+brother-20200615-DCPL8410CDW.ppd.gz	d75691f2bbce8bda2d037b4a6f69efaa	679569
+brother-20200615-HLL3210CW.ppd.gz	d75691f2bbce8bda2d037b4a6f69efaa	679569
+brother-20200615-MFC9350CDW.ppd.gz	d75691f2bbce8bda2d037b4a6f69efaa	679569
 cups-20170101-Generic-PDF_Printer-PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2780
-dymo-20200714-lm280.ppd.gz	4b34c05894c2eaf37cea838864f64824	1151
-dymo-20200714-lm400.ppd.gz	4b34c05894c2eaf37cea838864f64824	1151
-dymo-20200714-lm420p.ppd.gz	d6e3b0ad61642278f8e3375a44cf9eae	1697
-dymo-20200714-lm450.ppd.gz	18d163ca9a128210f975d066d3bb2e71	1699
-dymo-20200714-lm500ts.ppd.gz	57768960860d77312de8f86321a960e3	4890
-dymo-20200714-lmpc2.ppd.gz	b45499e4dfa0ec21c314d9c47468e689	1697
-dymo-20200714-lmpc.ppd.gz	872655af2b0b44c6389e8a76c91fdca0	1149
-dymo-20200714-lmpnp.ppd.gz	7e113440c46a7f88ac5cd157ec7e2a0c	794
-dymo-20200714-lmpnpw.ppd.gz	59dd04073f30986b3880a698a4a23a3e	1202
-dymo-20200714-lp350.ppd.gz	872655af2b0b44c6389e8a76c91fdca0	1149
 dymo-20200714-lw300.ppd.gz	9914e5abc675577223a126e6ee8e55bd	15933
 dymo-20200714-lw315.ppd.gz	9914e5abc675577223a126e6ee8e55bd	15933
 dymo-20200714-lw320.ppd.gz	9740dcd4e1d942028cece540011b58b6	29521
@@ -34,15 +10,12 @@
 dymo-20200714-lw330t.ppd.gz	9740dcd4e1d942028cece540011b58b6	29521
 dymo-20200714-lw400.ppd.gz	fea7c2d5f1ec8cb5e708b6a2e3517bc5	29523
 dymo-20200714-lw400t.ppd.gz	fea7c2d5f1ec8cb5e708b6a2e3517bc5	29523
-dymo-20200714-lw450dl.ppd.gz	fea7c2d5f1ec8cb5e708b6a2e3517bc5	29523
-dymo-20200714-lw450dt.ppd.gz	711db28bf0b1f321d7f17e0232f3932a	1699
 dymo-20200714-lw450.ppd.gz	fea7c2d5f1ec8cb5e708b6a2e3517bc5	29523
+dymo-20200714-lw450dl.ppd.gz	fea7c2d5f1ec8cb5e708b6a2e3517bc5	29523
 dymo-20200714-lw450t.ppd.gz	fea7c2d5f1ec8cb5e708b6a2e3517bc5	29523
 dymo-20200714-lw450tt.ppd.gz	3192245f4bad44e7c2b2b6ab90617e7d	29529
 dymo-20200714-lw4xl.ppd.gz	15e12adbd68d6cc3d2f93ad7f17f0764	31766
 dymo-20200714-lwduol.ppd.gz	fea7c2d5f1ec8cb5e708b6a2e3517bc5	29523
-dymo-20200714-lwduot2.ppd.gz	711db28bf0b1f321d7f17e0232f3932a	1699
-dymo-20200714-lwduot.ppd.gz	7f222308b2d4e8daee28ff2dc7833bbf	1151
 dymo-20200714-lwtt.ppd.gz	3192245f4bad44e7c2b2b6ab90617e7d	29529
 dymo-20200714-se450.ppd.gz	501a6efb788873efc350e4ee323b63d3	14454
 epson-20170125-Epson-L380_Series-epson-escpr-en-1.6.10.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
@@ -115,9 +88,9 @@
 epson-20200615-1_6_41-Epson-L805_Series-epson-escpr-en.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
 epson-20200615-1_6_41-Epson-L810_Series-epson-escpr-en.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
 epson-20200615-1_6_41-Epson-M200_Series-epson-escpr-en.ppd.gz	01b87b624ec9bf403e99ef79a926e971	501341
-epson-20200615-1_6_41-Epson-ME_200-epson-escpr-en.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
 epson-20200615-1_6_41-Epson-ME-301_Series-epson-escpr-en.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
 epson-20200615-1_6_41-Epson-ME-400_Series-epson-escpr-en.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
+epson-20200615-1_6_41-Epson-ME_200-epson-escpr-en.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
 epson-20200615-1_6_41-Epson-ME_OFFICE_510-epson-escpr-en.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
 epson-20200615-1_6_41-Epson-ME_OFFICE_530-epson-escpr-en.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
 epson-20200615-1_6_41-Epson-ME_OFFICE_570-epson-escpr-en.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
@@ -127,11 +100,6 @@
 epson-20200615-1_6_41-Epson-PF-70_Series-epson-escpr-en.ppd.gz	81250e920d19bddb96d95ad783b33828	142505
 epson-20200615-1_6_41-Epson-PF-71_Series-epson-escpr-en.ppd.gz	81250e920d19bddb96d95ad783b33828	142505
 epson-20200615-1_6_41-Epson-PF-81_Series-epson-escpr-en.ppd.gz	4f3297808b530b9197d6dca8c2a2e912	142505
-epson-20200615-1_6_41-Epson-PictureMate_500-epson-escpr-en.ppd.gz	7aa213754a5a7ae82f20003c4289fd67	180184
-epson-20200615-1_6_41-Epson-PictureMate_PM_200-epson-escpr-en.ppd.gz	7a3a97a816ec10991e975f81d7a79e6b	180184
-epson-20200615-1_6_41-Epson-PictureMate_PM_240-epson-escpr-en.ppd.gz	7a3a97a816ec10991e975f81d7a79e6b	180184
-epson-20200615-1_6_41-Epson-PictureMate_PM_270-epson-escpr-en.ppd.gz	7aa213754a5a7ae82f20003c4289fd67	180184
-epson-20200615-1_6_41-Epson-PictureMate_PM_300-epson-escpr-en.ppd.gz	7aa213754a5a7ae82f20003c4289fd67	180184
 epson-20200615-1_6_41-Epson-PM-400_Series-epson-escpr-en.ppd.gz	4f3297808b530b9197d6dca8c2a2e912	142505
 epson-20200615-1_6_41-Epson-PM-A750-epson-escpr-en.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
 epson-20200615-1_6_41-Epson-PM-A820-epson-escpr-en.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
@@ -172,6 +140,11 @@
 epson-20200615-1_6_41-Epson-PX-M860F-epson-escpr-en.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
 epson-20200615-1_6_41-Epson-PX-S05_Series-epson-escpr-en.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
 epson-20200615-1_6_41-Epson-PX-S06_Series-epson-escpr-en.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
+epson-20200615-1_6_41-Epson-PictureMate_500-epson-escpr-en.ppd.gz	7aa213754a5a7ae82f20003c4289fd67	180184
+epson-20200615-1_6_41-Epson-PictureMate_PM_200-epson-escpr-en.ppd.gz	7a3a97a816ec10991e975f81d7a79e6b	180184
+epson-20200615-1_6_41-Epson-PictureMate_PM_240-epson-escpr-en.ppd.gz	7a3a97a816ec10991e975f81d7a79e6b	180184
+epson-20200615-1_6_41-Epson-PictureMate_PM_270-epson-escpr-en.ppd.gz	7aa213754a5a7ae82f20003c4289fd67	180184
+epson-20200615-1_6_41-Epson-PictureMate_PM_300-epson-escpr-en.ppd.gz	7aa213754a5a7ae82f20003c4289fd67	180184
 epson-20200615-1_6_41-Epson-Stylus_CX3700-epson-escpr-en.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
 epson-20200615-1_6_41-Epson-Stylus_CX3800-epson-escpr-en.ppd.gz	4f90dc4f62a12296962332fa7d530230	478248
 epson-20200615-1_6_41-Epson-Stylus_CX4200-epson-escpr-en.ppd.gz	4f90dc4f62a12296962332fa7d530230	478248
@@ -218,13 +191,13 @@
 epson-20200615-1_6_41-Epson-WF-7610_Series-epson-escpr-en.ppd.gz	ac2801f22b48ae03d1fef5fe582856b2	501341
 epson-20200615-1_6_41-Epson-WF-8010_Series-epson-escpr-en.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
 epson-20200615-1_6_41-Epson-WF-M5190_Series-epson-escpr-en.ppd.gz	01b87b624ec9bf403e99ef79a926e971	501341
-epson-20200615-1_6_41-Epson-WorkForce_320-epson-escpr-en.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
-epson-20200615-1_6_41-Epson-WorkForce_600-epson-escpr-en.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
-epson-20200615-1_6_41-Epson-WorkForce_610-epson-escpr-en.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
 epson-20200615-1_6_41-Epson-WP-4010_Series-epson-escpr-en.ppd.gz	1fecdca1a3f20e49eb65c333e0838578	360938
 epson-20200615-1_6_41-Epson-WP-4011_Series-epson-escpr-en.ppd.gz	6418137bfc761ea8c79e302156c37791	378880
 epson-20200615-1_6_41-Epson-WP-4020_Series-epson-escpr-en.ppd.gz	6418137bfc761ea8c79e302156c37791	378880
 epson-20200615-1_6_41-Epson-WP-M4011_Series-epson-escpr-en.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
+epson-20200615-1_6_41-Epson-WorkForce_320-epson-escpr-en.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
+epson-20200615-1_6_41-Epson-WorkForce_600-epson-escpr-en.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
+epson-20200615-1_6_41-Epson-WorkForce_610-epson-escpr-en.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
 epson-20200615-1_6_41-Epson-XP-201_204_208_Series-epson-escpr-en.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
 epson-20200615-1_6_41-Epson-XP-2100_Series-epson-escpr-en.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
 epson-20200615-1_6_41-Epson-XP-211_214_216_Series-epson-escpr-en.ppd.gz	4f90dc4f62a12296962332fa7d530230	478248
@@ -251,270 +224,103 @@
 epson-20200615-1_6_41-Epson-XP-950_Series-epson-escpr-en.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
 epson-20200615-1_6_41-Epson-XP-960_Series-epson-escpr-en.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
 epson-20200615-EPSON_EW-052A_Series.ppd.gz	4f90dc4f62a12296962332fa7d530230	478248
-epson-20200615-Epson-LX-10000FK_Series_PS3.ppd.gz	81617f7ef8eea6d7f9a2a5e6d3692faf	239912
-epson-20200615-Epson-LX-10000F_PS.ppd.gz	572be862c8117ba0808babb0778ac4d4	239912
-epson-20200615-Epson-LX-10010MF_Series_PS3.ppd.gz	3eb002b7438bea7731fc741b3967ec74	212924
-epson-20200615-Epson-WF-C17590_Series_PS3.ppd.gz	626c024034c82f2617be8862600afab5	239912
-epson-20200615-Epson-WF-C20590_PS.ppd.gz	3bc86ddabba87c2b05633fdd49b7c4d5	239912
-epson-20200615-Epson-WF-M20590_Series_PS3.ppd.gz	fed678a41dce9e80ef4f6aa1a86cef3e	212924
-foomatic-20170101-Samsung-M332x_382x_402x-Postscript.ppd.gz	41ed70558d46dfe8f3b55f1a59ba0507	212252
-foomatic-20190909-Ricoh-IM_430F-PostscriptMono-Ricoh.ppd.gz	15948f607781434daad2cfad57292490	213651
+epson-20210521-1_6_41-Epson-PX-S5040-epson-escpr-en.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
+epson-20210703-Epson-L1210_Series-epson-escpr-en.ppd.gz	9da96b2142e2e429d4a45ef00535c2bb	501341
 foomatic-20190909-Ricoh-IM_430_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2756
 foomatic-20190909-Ricoh-IM_C4500_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2917
-foomatic-20191029-Apollo-P-1200-pcl3.ppd.gz	ab4a8b331de68cff90acec50cd913723	50798
-foomatic-20191029-BR5070DN_GPL.ppd.gz	301ae4197903de98dbf2bdb62c897507	213323
 foomatic-20191029-Gestetner-Pro_C5200S_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2940
 foomatic-20191029-Lanier-Pro_8200S_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2894
 foomatic-20191029-Lanier-Pro_8210_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2894
-foomatic-20191029-Lanier-Pro_C7200_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2966
 foomatic-20191029-Lanier-Pro_C7200S_Light_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2966
-foomatic-20191029-shar208d.ppd.gz	12405ad1658579344654739f4f17f073	213861
-foomatic-20191029-shar208s.ppd.gz	fe5eb9a8dcc39ff46abf473300f5bb53	213762
+foomatic-20191029-Lanier-Pro_C7200_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2966
 foomatic-20200219-Anitech-M24-epson.ppd.gz	c82f267a84d8af1b42f9c2dd0edd7561	981
-foomatic-20200219-Apple-12_640ps-Postscript.ppd.gz	67887374953e82b4572c222c27a65709	212252
-foomatic-20200219-Apple-LaserWriter_IIg-Postscript.ppd.gz	ea69f0255977cbd5b94b1d6499105b5a	186803
-foomatic-20200219-Apple-LaserWriter_Select_360-ljet2p.ppd.gz	a1f917c7d6b0b1f8467bcbde85636e3d	28787
-foomatic-20200219-Brother-DCP-7010-ljet4.ppd.gz	a4010572b3e6ad7dcb9c817449d7fee9	42124
-foomatic-20200219-Brother-DCP-8020-Postscript-Brother.ppd.gz	12d78f525b46c044ee8f32cce653100d	213056
-foomatic-20200219-Brother-DCP-8025D-Postscript-Brother.ppd.gz	66ae6e934531207753fd98cfdd4a1333	213168
-foomatic-20200219-Brother-DCP-8040-Postscript-Brother.ppd.gz	fecdd23dc4384f293fa18157e1fb7354	213056
-foomatic-20200219-Brother-DCP-8045D-Postscript-Brother.ppd.gz	b7c9cac5b951748fb557f00aa4855796	213168
-foomatic-20200219-Brother-DCP-9010CN-Postscript-Brother.ppd.gz	009b022cae858115ad6bc1dc7ca71d11	10729822
-foomatic-20200219-Brother-DCP-9040CN-Postscript-Brother.ppd.gz	d162467ebfb471d137cf4b662113796a	10729846
-foomatic-20200219-Brother-DCP-9045CDN-Postscript-Brother.ppd.gz	56cb14a14584d9371aa50c86179bc38f	10730032
-foomatic-20200219-Brother-HJ-400-lq850.ppd.gz	ae98ea2fa55c9cfd5295fd2ae42c5e84	38020
-foomatic-20200219-Brother-HL-1030-hl1250.ppd.gz	1e72f9f80ff71a942557439a08500201	15327
-foomatic-20200219-Brother-HL-1070-ljet4.ppd.gz	a4010572b3e6ad7dcb9c817449d7fee9	42124
-foomatic-20200219-Brother-HL-10V-ljet3.ppd.gz	b537f15d8120d294b184ab09cc073000	14468
-foomatic-20200219-Brother-HL-1230-hl1250.ppd.gz	1e72f9f80ff71a942557439a08500201	15327
-foomatic-20200219-Brother-HL-1240-laserjet.ppd.gz	2f0322f1fb292235dc37b9219488dca0	48492
-foomatic-20200219-Brother-HL-1250-ljet4.ppd.gz	f6e5a9b3f4ccee676c38c6e0e3a541b4	42208
-foomatic-20200219-Brother-HL-1270N-ljet4.ppd.gz	f6e5a9b3f4ccee676c38c6e0e3a541b4	42208
-foomatic-20200219-Brother-HL-1450-Postscript-Brother.ppd.gz	d87093c6e06c751e444a899a6f7065ba	213106
-foomatic-20200219-Brother-HL-1650_70N-Postscript-Brother.ppd.gz	e4b126a5167983b07e24fff6393da528	213415
-foomatic-20200219-Brother-HL-1650-ljet4.ppd.gz	f6e5a9b3f4ccee676c38c6e0e3a541b4	42254
-foomatic-20200219-Brother-HL-1660e-ljet4.ppd.gz	f6e5a9b3f4ccee676c38c6e0e3a541b4	42254
-foomatic-20200219-Brother-HL-1850_70N-Postscript-Brother.ppd.gz	afed8b1f4f2a962fd21bc5a11d9950b1	213652
-foomatic-20200219-Brother-HL-1850-ljet4.ppd.gz	f6e5a9b3f4ccee676c38c6e0e3a541b4	42254
-foomatic-20200219-Brother-HL-2060-ljet4.ppd.gz	f6e5a9b3f4ccee676c38c6e0e3a541b4	42208
-foomatic-20200219-Brother-HL-2240D-hl1250.ppd.gz	6ed92a182eaa9e8da5dbbdaa08859b8e	42414
-foomatic-20200219-Brother-HL-2240-hl1250.ppd.gz	6ed92a182eaa9e8da5dbbdaa08859b8e	42368
-foomatic-20200219-Brother-HL-2400CeN-pxlcolor.ppd.gz	f806e59260ec92ab078d06457825b9c6	679510
-foomatic-20200219-Brother-HL-2460N-pxlmono.ppd.gz	a6771d9200d30ea72bfaf1e1553882e1	79767
-foomatic-20200219-Brother-HL-2460-Postscript-Brother.ppd.gz	4460a933aa1cd34abaa3dd951915b5a1	214112
-foomatic-20200219-Brother-HL-2600CN-Postscript-Brother.ppd.gz	a0d54b28fcf99df9158da0f7e33e01ad	10731780
-foomatic-20200219-Brother-HL-2700CN-Postscript-Brother.ppd.gz	83a30fcb8c350043a988f86e1243f96f	10730369
-foomatic-20200219-Brother-HL-3070CW-Postscript-Brother.ppd.gz	f2ee256b5e96d498a6a531f6d5653206	10729898
-foomatic-20200219-Brother-HL-3260N-Postscript-Brother.ppd.gz	bd93452bce7c130420c0720d1cbd2107	214092
-foomatic-20200219-Brother-HL-3450CN-Postscript-Brother.ppd.gz	dbb64f96fd605967b4fb36e762b5d96c	10731780
-foomatic-20200219-Brother-HL-4050CDN-Postscript-Brother.ppd.gz	e653a290127b53838a830f26d25853c6	10730108
-foomatic-20200219-Brother-HL-4Ve-laserjet.ppd.gz	332abc3788d34ae2f95670c4015577e6	48408
-foomatic-20200219-Brother-HL-5030-ljet4.ppd.gz	f6e5a9b3f4ccee676c38c6e0e3a541b4	42208
-foomatic-20200219-Brother-HL-5050-Postscript-Brother.ppd.gz	5be4aa41fa48b812a2b403b8f9628627	213212
-foomatic-20200219-Brother-HL-5140-ljet4.ppd.gz	a4010572b3e6ad7dcb9c817449d7fee9	42124
-foomatic-20200219-Brother-HL-5150D-Postscript-Brother.ppd.gz	f52c8ffd6aecbd7b5c728c37c8016d3d	213324
-foomatic-20200219-Brother-HL-5240-Postscript-Brother.ppd.gz	d7148fa2d07153f2c4c86b1a565b3fe3	213238
-foomatic-20200219-Brother-HL-5250DN-Postscript-Brother.ppd.gz	600e49a22f5d0a7ff43e2f6806237173	213350
-foomatic-20200219-Brother-HL-5270DN-Postscript-Brother.ppd.gz	3afee844748f20cdfa7384d8a794833f	213774
-foomatic-20200219-Brother-HL-6050D_DN-Postscript-Brother.ppd.gz	6c472cfe8ad7e2a96ac5ad57c8dbfd66	213852
-foomatic-20200219-Brother-HL-6050-Postscript-Brother.ppd.gz	33af6a0821c71360bb315986ea8085f6	213740
-foomatic-20200219-Brother-HL-7050N-pxlmono.ppd.gz	a6771d9200d30ea72bfaf1e1553882e1	79817
-foomatic-20200219-Brother-HL-7050-Postscript-Brother.ppd.gz	215c66264a81519bb3b3b6c1ee83bb62	214253
-foomatic-20200219-Brother-HL-8050N-Postscript-Brother.ppd.gz	8a6e6d98e14a6437cca55536e08c497f	214070
-foomatic-20200219-Brother-HL-8-ljetplus.ppd.gz	332abc3788d34ae2f95670c4015577e6	48408
+foomatic-20200219-Brother-HJ-400-lq850.ppd.gz	e87eb44a726c4ea5b5de4c7e405788e0	38065
+foomatic-20200219-Brother-HL-2400CeN-pxlcolor.ppd.gz	b306abb3233e84618474ccee568bd0c3	679515
+foomatic-20200219-Brother-HL-2460N-pxlmono.ppd.gz	da8a63669af5504e85ee4a4e4e167702	79772
+foomatic-20200219-Brother-HL-7050N-pxlmono.ppd.gz	da8a63669af5504e85ee4a4e4e167702	79822
 foomatic-20200219-Brother-MC-3000-epson.ppd.gz	c82f267a84d8af1b42f9c2dd0edd7561	981
-foomatic-20200219-Brother-MFC-7450-Postscript-Brother.ppd.gz	1a3b65a4710926c7868c1b68f1db64f0	213550
-foomatic-20200219-Brother-MFC-8220-Postscript-Brother.ppd.gz	acbafd49f8cdf7ae70a95725028d069d	213056
-foomatic-20200219-Brother-MFC-8440-Postscript-Brother.ppd.gz	1401591a865c33beb7845b54630b213a	213056
-foomatic-20200219-Brother-MFC-8640D-Postscript-Brother.ppd.gz	25bc2b4d84ff87eb8e16499c9ae31667	213168
-foomatic-20200219-Brother-MFC-8670DN-Postscript-Brother.ppd.gz	7fded50049c6ff4a3754ce7530a34c43	213698
-foomatic-20200219-Brother-MFC-8820D-Postscript-Brother.ppd.gz	973745319018552f9beccd877911b29c	213168
 foomatic-20200219-Brother-MFC-9100c-epsonc.ppd.gz	9c5c79061c314efa74b103e6a3bd58d3	2807
-foomatic-20200219-Brother-MFC-9420CN-Postscript-Brother.ppd.gz	b34041128236e9f23d1ef83778820f40	10729621
-foomatic-20200219-Brother-MFC-9440CN-Postscript-Brother.ppd.gz	8d987da3f9da9c40d8dcc1442cd090b6	10729920
-foomatic-20200219-Brother-MFC-P2500-hl1250.ppd.gz	1e72f9f80ff71a942557439a08500201	15327
-foomatic-20200219-Canon-BJ-100-bj200.ppd.gz	9aa15c81a1a38dae60caf6849ecb5dbe	30537
-foomatic-20200219-Canon-BJ-10e-bj10e.ppd.gz	bca1043a02a9dbc0e213649914399596	28751
-foomatic-20200219-Canon-BJ-330-bj200.ppd.gz	9aa15c81a1a38dae60caf6849ecb5dbe	30537
-foomatic-20200219-Canon-BJC-1000-bjc600.ppd.gz	957db51dd8b109096599382869417e56	80446
-foomatic-20200219-Canon-BJC-2100-bjc610XY.upp.ppd.gz	463a17a0e3672a9002185221500a1b13	83370
-foomatic-20200219-Canon-BJC-210-bjc600.ppd.gz	957db51dd8b109096599382869417e56	80446
-foomatic-20200219-Canon-BJC-210SP-bj200.ppd.gz	9aa15c81a1a38dae60caf6849ecb5dbe	30537
-foomatic-20200219-Canon-BJC-250ex-bjc250gs.ppd.gz	4405fb1348dd9636423c0cd0d054cc11	94598
-foomatic-20200219-Canon-BJC-255SP-bjc250gs.ppd.gz	9f450f08438db89de0e2a9c812ab36c5	94670
-foomatic-20200219-Canon-BJC-4550-bjc800.ppd.gz	4cb509567d93a59d830161d70aa7b9fa	80445
-foomatic-20200219-Canon-BJC-610-bjc610XY.upp.ppd.gz	463a17a0e3672a9002185221500a1b13	83370
-foomatic-20200219-Canon-BJC-800-bjc800.ppd.gz	4cb509567d93a59d830161d70aa7b9fa	80445
-foomatic-20200219-Canon-BJC-8200-bj8XXYYZ.upp.ppd.gz	92c6cc2a9c5246863955b7ec610a3b14	151005
-foomatic-20200219-Canon-GP_405-Postscript.ppd.gz	757075bc510a6e4009eec5536bb090e9	212252
-foomatic-20200219-Canon-imageRunner_C2570-Postscript.ppd.gz	8399c58902ed391d59cad51291348771	238975
-foomatic-20200219-Canon-iPR_C600-Postscript-Canon.ppd.gz	1d4a3e23b57f890912e7d3dbc24bfe9b	243989
-foomatic-20200219-Canon-iPR_C650_PPD-Postscript-Canon.ppd.gz	e0b67c1601a1f91d7eec477d040c50c8	245616
-foomatic-20200219-Canon-iPR_C700_800-Postscript-Canon.ppd.gz	6901cf310b10dc8b8db3e438533b234c	245622
-foomatic-20200219-Canon-iPR_C750_850_PPD-Postscript-Canon.ppd.gz	ecdf54f2b9bcc80457bab23fbe9366ed	245630
-foomatic-20200219-Canon-iR-ADV_400_500-Postscript-Canon.ppd.gz	b4b97750f55ea1719f6cb207efebe1a0	214953
-foomatic-20200219-Canon-iR-ADV_4025_4035-Postscript-Canon.ppd.gz	720e7b179173a76ddda0bf57103acc46	215595
-foomatic-20200219-Canon-iR-ADV_4225_4235-Postscript-Canon.ppd.gz	bdeb7fb997325cc4861ef91b11892824	215948
-foomatic-20200219-Canon-iR-ADV_6055_6065-Postscript-Canon.ppd.gz	1e251f20e2fc3e894b11a62de24869e6	215828
-foomatic-20200219-Canon-iR-ADV_6255_6265-Postscript-Canon.ppd.gz	445a1e89da1151e534eb3c4ee30cd959	216169
-foomatic-20200219-Canon-iR-ADV_8085_8095-Postscript-Canon.ppd.gz	7ed15d4325bb8299771c3b8e65c40121	215899
-foomatic-20200219-Canon-iR-ADV_8205-Postscript-Canon.ppd.gz	605ee2b598d736bf1216e810cd98a509	216448
-foomatic-20200219-Canon-iR-ADV_C2020_2030-Postscript-Canon.ppd.gz	f95707e06705a0921c63d7700b752b75	242995
-foomatic-20200219-Canon-iR-ADV_C2020i_2030i-Postscript-Canon.ppd.gz	5afdc44e00ea0affbdcd81415c3c3af6	242991
-foomatic-20200219-Canon-iR-ADV_C2025-Postscript-Canon.ppd.gz	5afdc44e00ea0affbdcd81415c3c3af6	242991
-foomatic-20200219-Canon-iR-ADV_C2220_2230-Postscript-Canon.ppd.gz	300f94107c9567e22716bc062768b15b	243550
-foomatic-20200219-Canon-iR-ADV_C2225-Postscript-Canon.ppd.gz	e0a994a15d20ed7aeba679135f9496ae	243550
-foomatic-20200219-Canon-iR-ADV_C250_350-Postscript-Canon.ppd.gz	976e08d41b54e7c83f6dea2cdcfa8339	242732
-foomatic-20200219-Canon-iR-ADV_C3320L-Postscript-Canon.ppd.gz	38768ef62eb90e659793a2344cec83cc	243434
-foomatic-20200219-Canon-iR-ADV_C3320-Postscript-Canon.ppd.gz	ebd8fc4ed303b37197c3efbf1bcb3fd4	243506
-foomatic-20200219-Canon-iR-ADV_C3325_3330-Postscript-Canon.ppd.gz	ca8b933ad4027fc14c96b9c4ea9272ec	243506
-foomatic-20200219-Canon-iR-ADV_C351-Postscript-Canon.ppd.gz	92ffb8923986fca213684ffe46e7d942	242661
-foomatic-20200219-Canon-iR-ADV_C5030_5035-Postscript-Canon.ppd.gz	d5b5b5fd9e84e8fc2aafa50eca262551	244372
-foomatic-20200219-Canon-iR-ADV_C5045_5051-Postscript-Canon.ppd.gz	7c302972999ea04f067f423318621dc7	244372
-foomatic-20200219-Canon-iR-ADV_C5235_5240-Postscript-Canon.ppd.gz	6e5b11c0c3dacc50520ead565938c2ac	244438
-foomatic-20200219-Canon-iR-ADV_C5250_5255-Postscript-Canon.ppd.gz	6e5b11c0c3dacc50520ead565938c2ac	244438
-foomatic-20200219-Canon-iR-ADV_C7055_7065-Postscript-Canon.ppd.gz	d16ea478a24a6137bf920e9e25d2a312	244178
-foomatic-20200219-Canon-iR-ADV_C7260_7270-Postscript-Canon.ppd.gz	ec806c707a42164db0659b105799e83c	244519
-foomatic-20200219-Canon-iR-ADV_C7280-Postscript-Canon.ppd.gz	a76aea03b0122e9aa557fa9f37caa3a6	244794
-foomatic-20200219-Canon-iR-ADV_C9060_9070-Postscript-Canon.ppd.gz	45b4c625d66650de7a75cc91528ffd05	244245
-foomatic-20200219-Canon-iR-ADV_C9065_9075-Postscript-Canon.ppd.gz	5293a60c5697457f1f8461c3ddb869e8	244249
-foomatic-20200219-Canon-iR-ADV_C9270_9280-Postscript-Canon.ppd.gz	c76db7f9c3050417aeec60bd6656f8c1	244798
-foomatic-20200219-Canon-LBP-1000-ljet4.ppd.gz	f6e5a9b3f4ccee676c38c6e0e3a541b4	42239
-foomatic-20200219-Canon-LBP-1760-ljet4.ppd.gz	a4010572b3e6ad7dcb9c817449d7fee9	42124
-foomatic-20200219-Canon-LBP-430-ljet4.ppd.gz	4def7a570be40572772e3f57657ea7ae	15091
-foomatic-20200219-Canon-LBP-4U-lbp8.ppd.gz	5ee0b466344cf3d5b94ff8d184ac0abf	39182
-foomatic-20200219-Canon-LBP6670-Postscript-Canon.ppd.gz	3829e82225ecdf78e2eddbf29a3d6f5f	212781
-foomatic-20200219-Canon-LBP6780_3580-Postscript-Canon.ppd.gz	0359450c73fcdb12dae15ecf0fda49e0	212787
-foomatic-20200219-Canon-LBP710C_PPD-Postscript-Canon.ppd.gz	e61a82c511c8ce3ac352a1d50e5cc642	240473
-foomatic-20200219-Canon-LBP712C_PPD-Postscript-Canon.ppd.gz	3a7969cdc9974745b000c63bc5e24884	240476
-foomatic-20200219-Canon-LBP7660C-Postscript-Canon.ppd.gz	2724e3f0b73b347fecf8e7394a2988c0	240480
-foomatic-20200219-Canon-LBP7680C_5280-Postscript-Canon.ppd.gz	2724e3f0b73b347fecf8e7394a2988c0	240480
-foomatic-20200219-Canon-LBP7780C_5480-Postscript-Canon.ppd.gz	cc0d72480bcc715bf9848983fe8fe188	240473
-foomatic-20200219-Canon-LBP8780-Postscript-Canon.ppd.gz	ed2c626c549bed998067696fe0edbec8	212844
-foomatic-20200219-Canon-LIPS-III-lips3.ppd.gz	2c00cf31c6a6980af62e665b51805d92	18485
-foomatic-20200219-Canon-LIPS-IIplus-lips2p.ppd.gz	6fc1190e741e131f955a00d8cadaeffb	12724
-foomatic-20200219-Canon-S500-bj8XXYYZ.upp.ppd.gz	92c6cc2a9c5246863955b7ec610a3b14	151005
-foomatic-20200219-Citizen-ProJet_IIc-cdj500.ppd.gz	e44529d1020f0ac4d78f944e667101ab	37589
 foomatic-20200219-CItoh-M8510-m8510.ppd.gz	941764441d6cda079565f233fb536e12	14033
-foomatic-20200219-Compaq-IJ900-lxm5700m.ppd.gz	a4e34c3997af0ab4a88fd1013b965206	99537
-foomatic-20200219-DEC-DECWriter_500i-djet500.ppd.gz	5e404c72704e88d8c252176d67f16da4	15276
-foomatic-20200219-DEC-LJ250-declj250.ppd.gz	9e78fe394a071879c3cbe5749d1f030a	36623
-foomatic-20200219-Dell-M5200-Postscript.ppd.gz	0e63c9f881466ebd86535d522c8ed6ce	212252
-foomatic-20200219-Epson-ActionPrinter_3250-ap3250.ppd.gz	26031faf80d41ce67ef52c888865fea1	17833
-foomatic-20200219-Epson-AL-2600-Postscript-Epson.ppd.gz	5c618fa8aa51fe88be1dec44df9045b7	199256
-foomatic-20200219-Epson-AL-C1900_PS3-Postscript-Epson.ppd.gz	4a544231e307dffa73c2b5f76c96c8fe	199186
-foomatic-20200219-Epson-AL-C2000_PS3-Postscript-Epson.ppd.gz	b66c12f25eea5836b5f756b17c960a37	198464
-foomatic-20200219-Epson-AL-C2600-Postscript-Epson.ppd.gz	f097b114a9b61c378ed14f644d4adb7c	199269
-foomatic-20200219-Epson-AL-C2800-Postscript-Epson.ppd.gz	56e06e4b4b078ff6423e36ced9bd8b56	199047
-foomatic-20200219-Epson-AL-C3800-Postscript-Epson.ppd.gz	54910277cac81270b7f730c7f9b7c0f4	199047
-foomatic-20200219-Epson-AL-C4000_PS3-Postscript-Epson.ppd.gz	f64a3ed3011e8d6641b2c14ac232acf7	199289
-foomatic-20200219-Epson-AL-C4100-Postscript-Epson.ppd.gz	a39c40be4c1c38166895b58393c625b5	199103
-foomatic-20200219-Epson-AL-C4200-Postscript-Epson.ppd.gz	e824e0c7e35c57f104ee6db2f3f239fc	199259
-foomatic-20200219-Epson-AL-C8500-ljet4.ppd.gz	a4010572b3e6ad7dcb9c817449d7fee9	42124
-foomatic-20200219-Epson-AL-C8600_PS3-Postscript-Epson.ppd.gz	f22d400780334d6028c06cbe8650f0b5	199321
-foomatic-20200219-Epson-AL-C9100-Postscript-Epson.ppd.gz	20f8270778c6d7bb9c1ee7251fc2ddec	199522
-foomatic-20200219-Epson-AL-C9200-Postscript-Epson.ppd.gz	0d0d894202e2be64afc9626390ec9a8a	199005
-foomatic-20200219-Epson-AL-CX21-Postscript-Epson.ppd.gz	7d7f86c032515b901952ed494cb2af56	198395
-foomatic-20200219-Epson-AL-M2000-Postscript-Epson.ppd.gz	b90073a0892d29e89bd2264403cfd1e9	188169
-foomatic-20200219-Epson-AL-M2300-eplaser.ppd.gz	0f78881885cb17e6bd8e7eadea0fedeb	1678897
-foomatic-20200219-Epson-AL-M2310-eplaser.ppd.gz	0f78881885cb17e6bd8e7eadea0fedeb	1678897
-foomatic-20200219-Epson-AL-M2400-Postscript-Epson.ppd.gz	0bb4b3a8d289a283893356065922ddc3	188181
-foomatic-20200219-Epson-AL-M4000-Postscript-Epson.ppd.gz	a4f8ee2cbf193787d6ceaa9bd10a72ba	214022
-foomatic-20200219-Epson-AL-M8000-Postscript-Epson.ppd.gz	82bb29c145c0f9d57b1117171604e9bd	213839
-foomatic-20200219-Epson-AL-MX20-Postscript-Epson.ppd.gz	a58e8851eb305365a31cd2aeb7b868b6	188039
+foomatic-20200219-Canon-BJ-100-bj200.ppd.gz	f4845551e13dc7d7c2791e7d717f371e	30651
+foomatic-20200219-Canon-BJ-10e-bj10e.ppd.gz	ca9cc051e46814708abffedda30e312a	28849
+foomatic-20200219-Canon-BJ-330-bj200.ppd.gz	f4845551e13dc7d7c2791e7d717f371e	30651
+foomatic-20200219-Canon-BJC-1000-bjc600.ppd.gz	957db51dd8b109096599382869417e56	80446
+foomatic-20200219-Canon-BJC-210-bjc600.ppd.gz	957db51dd8b109096599382869417e56	80446
+foomatic-20200219-Canon-BJC-210SP-bj200.ppd.gz	f4845551e13dc7d7c2791e7d717f371e	30651
+foomatic-20200219-Canon-BJC-4550-bjc800.ppd.gz	4cb509567d93a59d830161d70aa7b9fa	80445
+foomatic-20200219-Canon-BJC-800-bjc800.ppd.gz	4cb509567d93a59d830161d70aa7b9fa	80445
+foomatic-20200219-Canon-LBP-4U-lbp8.ppd.gz	e570119398c9b3962c312920180a7bf5	39270
+foomatic-20200219-Canon-LIPS-III-lips3.ppd.gz	2242fab8cf7fba9df093bc6dda9ee639	18604
+foomatic-20200219-Canon-LIPS-IIplus-lips2p.ppd.gz	a03e4d1ba6aee056ff65e1c8566588e0	12813
+foomatic-20200219-Citizen-ProJet_IIc-cdj500.ppd.gz	37ab516ad22e67b76652c8ef6c7680c4	38144
+foomatic-20200219-Compaq-IJ900-lxm5700m.ppd.gz	e71ba580e989c72e9d526fcfaa1dc643	100265
+foomatic-20200219-DEC-LJ250-declj250.ppd.gz	888e7a55fa91ad303fe15387da331b32	36975
+foomatic-20200219-Epson-AL-M2300-eplaser.ppd.gz	742c62f021fee9190caa6a4e21d5c7ad	1679431
+foomatic-20200219-Epson-AL-M2310-eplaser.ppd.gz	742c62f021fee9190caa6a4e21d5c7ad	1679431
+foomatic-20200219-Epson-ActionPrinter_3250-ap3250.ppd.gz	c8e74bdae5fca22a9563e872bf77e043	18019
 foomatic-20200219-Epson-Dot_Matrix-epsonc.ppd.gz	9c5c79061c314efa74b103e6a3bd58d3	2807
-foomatic-20200219-Epson-EPL-5800-ljet4.ppd.gz	a4010572b3e6ad7dcb9c817449d7fee9	42124
-foomatic-20200219-Epson-EPL-5900_PS3-Postscript-Epson.ppd.gz	c1d9366cba7314ea31c965b1db017ec9	239846
-foomatic-20200219-Epson-EPL-6100_PS3-Postscript-Epson.ppd.gz	c1d9366cba7314ea31c965b1db017ec9	239846
-foomatic-20200219-Epson-EPL-6200-Postscript-Epson.ppd.gz	1ac7d1905d2d6872c65263ab529cb1c5	198619
-foomatic-20200219-Epson-EPL-7100-laserjet.ppd.gz	332abc3788d34ae2f95670c4015577e6	48408
-foomatic-20200219-Epson-EPL-N2120-ljet4.ppd.gz	a4010572b3e6ad7dcb9c817449d7fee9	42124
-foomatic-20200219-Epson-EPL-N2500_PS3-Postscript-Epson.ppd.gz	e9eff6e72643f783f3965d2d21bebdbd	240396
-foomatic-20200219-Epson-EPL-N2550-Postscript-Epson.ppd.gz	6a49782297c4e56667e95e6086b395de	213584
-foomatic-20200219-Epson-EPL-N2700-Postscript-Epson.ppd.gz	d1341466bef92ca3f187e4946b6cbb07	240707
-foomatic-20200219-Epson-EPL-N3000-Postscript-Epson.ppd.gz	00f4f644a7fd464628ae55ed59374469	240726
-foomatic-20200219-Epson-EPL-N7000-Postscript-Epson.ppd.gz	b6172c8dca6e5d4d3b6008a79f41b70f	240634
-foomatic-20200219-Epson-LP-1800-eplaser-jp.ppd.gz	cca87398b021056a4970921ddba3a2e3	1678895
-foomatic-20200219-Epson-LP-1900-eplaser-jp.ppd.gz	3336127b7a1d4fd1ec4c1fb8fadf149a	1678895
-foomatic-20200219-Epson-LP-2200-eplaser-jp.ppd.gz	b90665eada754ff9b0a3758112e65179	1678895
-foomatic-20200219-Epson-LP-2400-eplaser-jp.ppd.gz	e91a07baed0e0ff579e7f60a5a010ad6	1678895
-foomatic-20200219-Epson-LP-2500-eplaser-jp.ppd.gz	4e1c8d3bdab296fa7376636c7d246d0f	1678895
-foomatic-20200219-Epson-LP-3000C-eplaser-jp.ppd.gz	459e9a2ed5957884ba7b6eb367ca430b	7286467
-foomatic-20200219-Epson-LP-7500-eplaser-jp.ppd.gz	3944c31e14e4f9e0fd2c8d1f93bc4145	1678895
-foomatic-20200219-Epson-LP-7700-eplaser-jp.ppd.gz	4b8f6c719b4091c0c7dfa057250d79c0	1678895
-foomatic-20200219-Epson-LP-7900-eplaser-jp.ppd.gz	b9a22650b1ab13926c914aa24b016d87	1678895
-foomatic-20200219-Epson-LP-8000C-eplaser-jp.ppd.gz	66402f3c6ef03aaa336bacacd41eb50b	7286447
-foomatic-20200219-Epson-LP_8000-lp8000.ppd.gz	4d14051d86c35b76292fb025416216bb	24924
-foomatic-20200219-Epson-LP-8100-eplaser-jp.ppd.gz	f1e44dbeb806ebccc653bab1249e8173	1678895
-foomatic-20200219-Epson-LP-8200C-eplaser-jp.ppd.gz	7cb252c9a324f8f07668ce26b1cb2134	7286467
-foomatic-20200219-Epson-LP-8300C-eplaser-jp.ppd.gz	ea835e4b20a8d083b1205e07c9f383e7	7286467
-foomatic-20200219-Epson-LP-8300CPD-Postscript-Epson.ppd.gz	cd43c30a59555db5d0cfaad0f1afabbb	198490
-foomatic-20200219-Epson-LP-8300F-eplaser-jp.ppd.gz	bb7d9f7a2a35f1404f48f5c1232cbf75	1678897
-foomatic-20200219-Epson-LP-8400F-eplaser-jp.ppd.gz	f28f4714465388a6dbc2e06084e992ca	1678897
-foomatic-20200219-Epson-LP-8500C-eplaser-jp.ppd.gz	a5a7058dcff60a1e3532ef1b749f93d3	7286467
-foomatic-20200219-Epson-LP-8500CPD-Postscript-Epson.ppd.gz	0328a27928d5a452c0f0d38ee2070960	198469
-foomatic-20200219-Epson-LP-8600-eplaser-jp.ppd.gz	999a9545548ef1e82c5531b956b76c71	1678895
-foomatic-20200219-Epson-LP-8600F-eplaser-jp.ppd.gz	613e151ce5152d3924b31274946b34d1	1678897
-foomatic-20200219-Epson-LP-8700-eplaser-jp.ppd.gz	2f85be7ef7106aeaf64c73bbdbf26e51	1678895
-foomatic-20200219-Epson-LP-8800C-eplaser-jp.ppd.gz	96561d2a574f07a836cab8462ae920b7	7286467
-foomatic-20200219-Epson-LP-8800CPS-Postscript-Epson.ppd.gz	3c6d4b599abb80ae89e7144fb91803f2	198932
-foomatic-20200219-Epson-LP-8900-eplaser-jp.ppd.gz	45c12fa7d8b5130fbe7d7805934bf9a8	1678895
-foomatic-20200219-Epson-LP-9000B-eplaser-jp.ppd.gz	2f2bbac523c2628128182b17f4dde105	1678897
-foomatic-20200219-Epson-LP-9000C-eplaser-jp.ppd.gz	ac0630beb816d13dcca52bce23a4bc59	7286467
-foomatic-20200219-Epson-LP-9100-eplaser-jp.ppd.gz	855883596cc92849129f4f79c775bca1	1678895
-foomatic-20200219-Epson-LP-9100PS3-Postscript-Epson.ppd.gz	d1d7cc3a832fecfe52674d558a0876b2	240233
-foomatic-20200219-Epson-LP-9200B-eplaser-jp.ppd.gz	f8dca0f6c49edf8f641d0addf0039495	1678897
-foomatic-20200219-Epson-LP-9200C-Postscript-Epson.ppd.gz	82a6d81df160072efc69447117341e73	198673
-foomatic-20200219-Epson-LP-9300-eplaser-jp.ppd.gz	67e40531347f5883c530cd1e207a77df	1678895
-foomatic-20200219-Epson-LP-9400-eplaser-jp.ppd.gz	72089691029d2ac3e1d133f62c4418cf	1678895
-foomatic-20200219-Epson-LP-9500C-eplaser-jp.ppd.gz	8908a4bdc44b479e16a7179c68edc723	7286467
-foomatic-20200219-Epson-LP-9500CPS-Postscript-Epson.ppd.gz	b18264bda7cc48cf9dcce8e0e3e2e87b	198992
-foomatic-20200219-Epson-LP-9600-eplaser-jp.ppd.gz	f074ce00655b5929113d6241542c7310	1678895
-foomatic-20200219-Epson-LP-9600S-eplaser-jp.ppd.gz	355a95f88467cbd63e503c5c48d89075	1678897
-foomatic-20200219-Epson-LP-9600SPD-Postscript-Epson.ppd.gz	a783b980e3291b371f2e9f6c2914f7a0	214076
-foomatic-20200219-Epson-LP-9800C-Postscript-Epson.ppd.gz	03dc4b194c57ba29ae157d46254050de	198864
-foomatic-20200219-Epson-LP-M5000-eplaser-jp.ppd.gz	a5a7058dcff60a1e3532ef1b749f93d3	7286467
-foomatic-20200219-Epson-LP-M5300-eplaser-jp.ppd.gz	a5a7058dcff60a1e3532ef1b749f93d3	7286467
-foomatic-20200219-Epson-LP-M6000-eplaser-jp.ppd.gz	a5a7058dcff60a1e3532ef1b749f93d3	7286467
-foomatic-20200219-Epson-LP-S210-eplaser-jp.ppd.gz	2f85be7ef7106aeaf64c73bbdbf26e51	1678895
-foomatic-20200219-Epson-LP-S3000-eplaser-jp.ppd.gz	2f85be7ef7106aeaf64c73bbdbf26e51	1678895
-foomatic-20200219-Epson-LP-S300-eplaser-jp.ppd.gz	2f85be7ef7106aeaf64c73bbdbf26e51	1678895
-foomatic-20200219-Epson-LP-S310-eplaser-jp.ppd.gz	2f85be7ef7106aeaf64c73bbdbf26e51	1678895
-foomatic-20200219-Epson-LP-S3200-eplaser-jp.ppd.gz	2f85be7ef7106aeaf64c73bbdbf26e51	1678895
-foomatic-20200219-Epson-LP-S3500-eplaser-jp.ppd.gz	2f85be7ef7106aeaf64c73bbdbf26e51	1678895
-foomatic-20200219-Epson-LP-S4000-eplaser-jp.ppd.gz	2f85be7ef7106aeaf64c73bbdbf26e51	1678895
-foomatic-20200219-Epson-LP-S4200-eplaser-jp.ppd.gz	2f85be7ef7106aeaf64c73bbdbf26e51	1678895
-foomatic-20200219-Epson-LP-S4500-eplaser-jp.ppd.gz	39ae638ef52cfc168ef792cc1394ae2e	1678897
-foomatic-20200219-Epson-LP-S6500-eplaser-jp.ppd.gz	f2a0a77cf6f35848ec1e5b524e1b9dc0	7286467
-foomatic-20200219-Epson-LP-S7500-eplaser-jp.ppd.gz	a5a7058dcff60a1e3532ef1b749f93d3	7286467
-foomatic-20200219-Epson-LP-S8100-eplaser-jp.ppd.gz	e90951a40e45f155c4281c7e9b3e301c	7286455
+foomatic-20200219-Epson-LP-1800-eplaser-jp.ppd.gz	31903cc3ee70faae2e1b6e1a853e943f	1679429
+foomatic-20200219-Epson-LP-1900-eplaser-jp.ppd.gz	d9d2741f2a6b5bd198c5dec3b38e61cf	1679429
+foomatic-20200219-Epson-LP-2200-eplaser-jp.ppd.gz	a1d3c31416cac984b2c488cc73f1c920	1679429
+foomatic-20200219-Epson-LP-2400-eplaser-jp.ppd.gz	f8ab37bcef412d8c09dbae33e48d3697	1679429
+foomatic-20200219-Epson-LP-2500-eplaser-jp.ppd.gz	fae2add2457233951e962795a440c4d5	1679429
+foomatic-20200219-Epson-LP-3000C-eplaser-jp.ppd.gz	87d94976beff9798e6a5a48b98ec7221	7287056
+foomatic-20200219-Epson-LP-7500-eplaser-jp.ppd.gz	eb01beef8cd61d0c01f05738d51ae157	1679429
+foomatic-20200219-Epson-LP-7700-eplaser-jp.ppd.gz	ab2c26e49ebec01f6cabde13293f023d	1679429
+foomatic-20200219-Epson-LP-7900-eplaser-jp.ppd.gz	196a99a24fef6471f47bd6d805e4373a	1679429
+foomatic-20200219-Epson-LP-8000C-eplaser-jp.ppd.gz	bc77dee87d8431b5a8a916fbaf90ca01	7287036
+foomatic-20200219-Epson-LP-8100-eplaser-jp.ppd.gz	823b2a71f487b7e3edc6346ba082cc52	1679429
+foomatic-20200219-Epson-LP-8200C-eplaser-jp.ppd.gz	099a4c31e08d17a12396ecc4e86ff6cf	7287056
+foomatic-20200219-Epson-LP-8300C-eplaser-jp.ppd.gz	2d5dced182a7c5d93895d1a320ef96c8	7287056
+foomatic-20200219-Epson-LP-8300F-eplaser-jp.ppd.gz	252e39ce2bd0ba444b4d00c3f74c00e9	1679431
+foomatic-20200219-Epson-LP-8400F-eplaser-jp.ppd.gz	3554929f19722a47a9a029dbf0760a9a	1679431
+foomatic-20200219-Epson-LP-8500C-eplaser-jp.ppd.gz	ce26e03366e9e1d8c6286192cbba1063	7287056
+foomatic-20200219-Epson-LP-8600-eplaser-jp.ppd.gz	b7cbf51d9d408cb9e6a2f90fb76967b1	1679429
+foomatic-20200219-Epson-LP-8600F-eplaser-jp.ppd.gz	4ad77f1a657856777f9690a39e7d7cbc	1679431
+foomatic-20200219-Epson-LP-8700-eplaser-jp.ppd.gz	6a3fe72bdc4814bf8b8fbc46204b9254	1679429
+foomatic-20200219-Epson-LP-8800C-eplaser-jp.ppd.gz	b67f2d52917c8614807fab9af039505a	7287056
+foomatic-20200219-Epson-LP-8900-eplaser-jp.ppd.gz	f4330a9a306830b51f193e9ebbedb2e4	1679429
+foomatic-20200219-Epson-LP-9000B-eplaser-jp.ppd.gz	e6b92cc8240b3511204a471bc860ff63	1679431
+foomatic-20200219-Epson-LP-9000C-eplaser-jp.ppd.gz	a5e91b17fa12d1093bc663ab21f0123b	7287056
+foomatic-20200219-Epson-LP-9100-eplaser-jp.ppd.gz	2aeb53aef0c486402495b46c5f4bba39	1679429
+foomatic-20200219-Epson-LP-9200B-eplaser-jp.ppd.gz	e193e92fd6d7b396896cc611e2b9e23d	1679431
+foomatic-20200219-Epson-LP-9300-eplaser-jp.ppd.gz	269cd405c7c46708c9784d03f9898ee9	1679429
+foomatic-20200219-Epson-LP-9400-eplaser-jp.ppd.gz	c04916a2ea12a16c7703c4864ef95b8b	1679429
+foomatic-20200219-Epson-LP-9500C-eplaser-jp.ppd.gz	6cce075555911b46c51a85c6acf97d2e	7287056
+foomatic-20200219-Epson-LP-9600-eplaser-jp.ppd.gz	130d0c85d975af0234f441d7217583a9	1679429
+foomatic-20200219-Epson-LP-9600S-eplaser-jp.ppd.gz	425ae76461ddb020aaff78dc6019f547	1679431
+foomatic-20200219-Epson-LP-M5000-eplaser-jp.ppd.gz	ce26e03366e9e1d8c6286192cbba1063	7287056
+foomatic-20200219-Epson-LP-M5300-eplaser-jp.ppd.gz	ce26e03366e9e1d8c6286192cbba1063	7287056
+foomatic-20200219-Epson-LP-M6000-eplaser-jp.ppd.gz	ce26e03366e9e1d8c6286192cbba1063	7287056
+foomatic-20200219-Epson-LP-S210-eplaser-jp.ppd.gz	6a3fe72bdc4814bf8b8fbc46204b9254	1679429
+foomatic-20200219-Epson-LP-S300-eplaser-jp.ppd.gz	6a3fe72bdc4814bf8b8fbc46204b9254	1679429
+foomatic-20200219-Epson-LP-S3000-eplaser-jp.ppd.gz	6a3fe72bdc4814bf8b8fbc46204b9254	1679429
+foomatic-20200219-Epson-LP-S310-eplaser-jp.ppd.gz	6a3fe72bdc4814bf8b8fbc46204b9254	1679429
+foomatic-20200219-Epson-LP-S3200-eplaser-jp.ppd.gz	6a3fe72bdc4814bf8b8fbc46204b9254	1679429
+foomatic-20200219-Epson-LP-S3500-eplaser-jp.ppd.gz	6a3fe72bdc4814bf8b8fbc46204b9254	1679429
+foomatic-20200219-Epson-LP-S4000-eplaser-jp.ppd.gz	6a3fe72bdc4814bf8b8fbc46204b9254	1679429
+foomatic-20200219-Epson-LP-S4200-eplaser-jp.ppd.gz	6a3fe72bdc4814bf8b8fbc46204b9254	1679429
+foomatic-20200219-Epson-LP-S4500-eplaser-jp.ppd.gz	56ee1b1a579442a5266518aa9556be22	1679431
+foomatic-20200219-Epson-LP-S6500-eplaser-jp.ppd.gz	1444b16a54af8a71014cf16251682c08	7287056
+foomatic-20200219-Epson-LP-S7500-eplaser-jp.ppd.gz	ce26e03366e9e1d8c6286192cbba1063	7287056
+foomatic-20200219-Epson-LP-S8100-eplaser-jp.ppd.gz	ec425371fa588f2a084a5b1cdf904e8b	7287044
+foomatic-20200219-Epson-LP_8000-lp8000.ppd.gz	7b5b485c7c61c650c022ecf6bea1cfd2	25019
 foomatic-20200219-Epson-LX-300plus-ibmpro.ppd.gz	6b661238af63eb0c627d181a564dc9be	1861
-foomatic-20200219-Epson-MJ_520C-stcolor.ppd.gz	cdbd29ecd3755bb7eee3fd0e94e72326	86298
 foomatic-20200219-Epson-Stylus_Color_460-stcX.upp.ppd.gz	f2fe0c799d4120e5e0b0190fc5419db2	65316
 foomatic-20200219-Epson-Stylus_Color_660-stc600X.upp.ppd.gz	344f979da83e5700162996c7e6d5e09b	60817
-foomatic-20200219-Epson-Stylus_Color_777-stcanyX.upp.ppd.gz	d475e4821a36f7631085c2ea2d97c376	92280
 foomatic-20200219-Epson-Stylus_Color_850-stc800X.upp.ppd.gz	76cc87cb18780eaa6e4f3d60ae6e65d6	60118
 foomatic-20200219-Epson-Stylus_Color_II-stc2X.upp.ppd.gz	72340b7ce9658c55861954dec88eb376	64744
-foomatic-20200219-Epson-Stylus_Color-stcolor.ppd.gz	35808c9b93f9a4455d50ed2316bcbe6b	86298
-foomatic-20200219-Fuji_Xerox-DocuPrint_CM305_df-Postscript.ppd.gz	89d9fdf67511a23f4396dba6fc11100d	198135
-foomatic-20200219-Generic-ESC_P_Dot_Matrix_Printer-lq850.ppd.gz	ae98ea2fa55c9cfd5295fd2ae42c5e84	38020
+foomatic-20200219-Generic-ESC_P_Dot_Matrix_Printer-lq850.ppd.gz	e87eb44a726c4ea5b5de4c7e405788e0	38065
 foomatic-20200219-Generic-IBM-Compatible_Dot_Matrix_Printer-ibmpro.ppd.gz	6b661238af63eb0c627d181a564dc9be	1861
-foomatic-20200219-Generic-PCL_6_PCL_XL_Printer-ljet4.ppd.gz	f6e5a9b3f4ccee676c38c6e0e3a541b4	42204
-foomatic-20200219-Generic-PostScript_Printer-Postscript.ppd.gz	0ea96f88932bad6cec39b1ce9f89c044	239191
-foomatic-20200219-Gestetner-10512-pxlmono-Gestetner.ppd.gz	aa68534d59f2156b897e0c665897cc44	79986
-foomatic-20200219-Gestetner-2212-pxlmono-Gestetner.ppd.gz	aa68534d59f2156b897e0c665897cc44	79966
-foomatic-20200219-Gestetner-3502-pxlmono-Gestetner.ppd.gz	aa68534d59f2156b897e0c665897cc44	79985
-foomatic-20200219-Gestetner-3532_4235g-pxlmono-Gestetner.ppd.gz	aa68534d59f2156b897e0c665897cc44	79958
-foomatic-20200219-Gestetner-6002-pxlmono-Gestetner.ppd.gz	aa68534d59f2156b897e0c665897cc44	79958
-foomatic-20200219-Gestetner-9002-pxlmono-Gestetner.ppd.gz	aa68534d59f2156b897e0c665897cc44	79986
-foomatic-20200219-Gestetner-C7010-Postscript-Gestetner.ppd.gz	1205349253fd3efd393373145a3fa7b7	240690
-foomatic-20200219-Gestetner-C7116-Postscript-Gestetner.ppd.gz	4cf1f50a4f876a4f60d96e8ad7b8c723	240472
-foomatic-20200219-Gestetner-C7425dn-Postscript-Gestetner.ppd.gz	6cf5371874301a06db2f35b68f2e3436	241946
-foomatic-20200219-Gestetner-C7435n-Postscript-Gestetner.ppd.gz	2f55730b9ae45d81261635d58a360294	242196
+foomatic-20200219-Generic-PCL_6_PCL_XL_Printer-pxlcolor.ppd.gz	da8a63669af5504e85ee4a4e4e167702	79772
+foomatic-20200219-Gestetner-10512-pxlmono-Gestetner.ppd.gz	39b28d9f8af78fe5b7f79fde90db0133	79991
+foomatic-20200219-Gestetner-2212-pxlmono-Gestetner.ppd.gz	39b28d9f8af78fe5b7f79fde90db0133	79971
+foomatic-20200219-Gestetner-3502-pxlmono-Gestetner.ppd.gz	39b28d9f8af78fe5b7f79fde90db0133	79990
+foomatic-20200219-Gestetner-3532_4235g-pxlmono-Gestetner.ppd.gz	39b28d9f8af78fe5b7f79fde90db0133	79963
+foomatic-20200219-Gestetner-6002-pxlmono-Gestetner.ppd.gz	39b28d9f8af78fe5b7f79fde90db0133	79963
+foomatic-20200219-Gestetner-9002-pxlmono-Gestetner.ppd.gz	39b28d9f8af78fe5b7f79fde90db0133	79991
 foomatic-20200219-Gestetner-C7521n-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2793
 foomatic-20200219-Gestetner-C7526dn-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2803
-foomatic-20200219-Gestetner-C7528n-Postscript-Gestetner.ppd.gz	7c77d51b13e1fd130c1b0350db9f1033	242599
-foomatic-20200219-Gestetner-C7535n-Postscript-Gestetner.ppd.gz	24667d85dfb4ea7a074c158801bbd13f	242530
 foomatic-20200219-Gestetner-C7640nD-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2845
 foomatic-20200219-Gestetner-C8140ND-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2869
 foomatic-20200219-Gestetner-CS555-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2835
@@ -523,36 +329,30 @@
 foomatic-20200219-Gestetner-DSc1045-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2894
 foomatic-20200219-Gestetner-DSc1060-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2894
 foomatic-20200219-Gestetner-DSc1120-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2851
-foomatic-20200219-Gestetner-DSc1220ex-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2917
 foomatic-20200219-Gestetner-DSc1220-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2917
-foomatic-20200219-Gestetner-DSc1230ex-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2917
+foomatic-20200219-Gestetner-DSc1220ex-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2917
 foomatic-20200219-Gestetner-DSc1230-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2917
-foomatic-20200219-Gestetner-DSc1245ex-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2917
+foomatic-20200219-Gestetner-DSc1230ex-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2917
 foomatic-20200219-Gestetner-DSc1245-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2917
-foomatic-20200219-Gestetner-DSc1260ex-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2917
+foomatic-20200219-Gestetner-DSc1245ex-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2917
 foomatic-20200219-Gestetner-DSc1260-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2917
-foomatic-20200219-Gestetner-DSc224-Postscript-Gestetner.ppd.gz	ebd8813f7457faf53ce9301fb56ea580	240926
-foomatic-20200219-Gestetner-DSc328-Postscript-Gestetner.ppd.gz	e52642ae389b0d03130e1b576c7399d9	242306
-foomatic-20200219-Gestetner-DSc38-Postscript-Gestetner.ppd.gz	925b8628c16d28c89c49b6ef604608fc	241016
-foomatic-20200219-Gestetner-DSc38u-Postscript-Gestetner.ppd.gz	c447339bad99c5c10fe720fa133d76e1	242250
+foomatic-20200219-Gestetner-DSc1260ex-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2917
 foomatic-20200219-Gestetner-DSc424-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2816
-foomatic-20200219-Gestetner-DSc428-pxlcolor-Gestetner.ppd.gz	e369cc5b824b398966eea56015bacbf4	679710
+foomatic-20200219-Gestetner-DSc428-pxlcolor-Gestetner.ppd.gz	09d00a9e1015fed4489377db680b0c25	679715
 foomatic-20200219-Gestetner-DSm1525-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2798
 foomatic-20200219-Gestetner-DSm2525-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2845
 foomatic-20200219-Gestetner-DSm2540-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2845
+foomatic-20200219-Gestetner-DSm415-pxlmono-Gestetner.ppd.gz	39b28d9f8af78fe5b7f79fde90db0133	79897
+foomatic-20200219-Gestetner-DSm615-pxlmono-Gestetner.ppd.gz	39b28d9f8af78fe5b7f79fde90db0133	79904
+foomatic-20200219-Gestetner-DSm618-pxlmono-Gestetner.ppd.gz	39b28d9f8af78fe5b7f79fde90db0133	79904
+foomatic-20200219-Gestetner-DSm618d-pxlmono-Gestetner.ppd.gz	39b28d9f8af78fe5b7f79fde90db0133	79924
+foomatic-20200219-Gestetner-DSm622-pxlmono-Gestetner.ppd.gz	39b28d9f8af78fe5b7f79fde90db0133	79944
+foomatic-20200219-Gestetner-DSm651-pxlmono-Gestetner.ppd.gz	39b28d9f8af78fe5b7f79fde90db0133	79981
+foomatic-20200219-Gestetner-DSm725-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2753
+foomatic-20200219-Gestetner-DSm735_735G-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2772
 foomatic-20200219-Gestetner-DSm_2625-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2845
 foomatic-20200219-Gestetner-DSm_2640-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2845
 foomatic-20200219-Gestetner-DSm_2660-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2845
-foomatic-20200219-Gestetner-DSm415-pxlmono-Gestetner.ppd.gz	aa68534d59f2156b897e0c665897cc44	79892
-foomatic-20200219-Gestetner-DSm615-pxlmono-Gestetner.ppd.gz	aa68534d59f2156b897e0c665897cc44	79899
-foomatic-20200219-Gestetner-DSm618d-pxlmono-Gestetner.ppd.gz	aa68534d59f2156b897e0c665897cc44	79919
-foomatic-20200219-Gestetner-DSm618-pxlmono-Gestetner.ppd.gz	aa68534d59f2156b897e0c665897cc44	79899
-foomatic-20200219-Gestetner-DSm622-pxlmono-Gestetner.ppd.gz	aa68534d59f2156b897e0c665897cc44	79939
-foomatic-20200219-Gestetner-DSm651-pxlmono-Gestetner.ppd.gz	aa68534d59f2156b897e0c665897cc44	79976
-foomatic-20200219-Gestetner-DSm725-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2753
-foomatic-20200219-Gestetner-DSm735_735G-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2772
-foomatic-20200219-Gestetner-F9199_9199nf-Postscript-Gestetner.ppd.gz	30ebedde5e82876d02cd22c6e82ffda7	212665
-foomatic-20200219-Gestetner-GS1227-Postscript-Gestetner.ppd.gz	1ef4daa3fc8ed9c1394a364622b9828c	216915
 foomatic-20200219-Gestetner-GS3020-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2917
 foomatic-20200219-Gestetner-GS3030-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2917
 foomatic-20200219-Gestetner-GS3160-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2917
@@ -561,41 +361,41 @@
 foomatic-20200219-Gestetner-GWD5100-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2691
 foomatic-20200219-Gestetner-MP1100_DSm7110-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2797
 foomatic-20200219-Gestetner-MP1600_DSm716-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2715
-foomatic-20200219-Gestetner-MP_161_DSm416-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2708
-foomatic-20200219-Gestetner-MP_171-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2708
 foomatic-20200219-Gestetner-MP2000_DSm721d-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2735
-foomatic-20200219-Gestetner-MP_2001-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2759
 foomatic-20200219-Gestetner-MP2352_DSm923-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2773
 foomatic-20200219-Gestetner-MP2500_DSm625-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2754
+foomatic-20200219-Gestetner-MP3500_DSm735e-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2772
+foomatic-20200219-Gestetner-MP5500_DSm755-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2797
+foomatic-20200219-Gestetner-MPC1500_GS106-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2811
+foomatic-20200219-Gestetner-MPC2500_DSc525-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2845
+foomatic-20200219-Gestetner-MPC3500_DSc535-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2845
+foomatic-20200219-Gestetner-MP_161_DSm416-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2708
+foomatic-20200219-Gestetner-MP_171-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2708
+foomatic-20200219-Gestetner-MP_2001-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2759
 foomatic-20200219-Gestetner-MP_2501-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2759
 foomatic-20200219-Gestetner-MP_2510_DSm725e-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2753
 foomatic-20200219-Gestetner-MP_2550-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2772
 foomatic-20200219-Gestetner-MP_2851-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2772
 foomatic-20200219-Gestetner-MP_301-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2732
 foomatic-20200219-Gestetner-MP_305plus-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2732
-foomatic-20200219-Gestetner-MP3500_DSm735e-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2772
 foomatic-20200219-Gestetner-MP_4000-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2772
 foomatic-20200219-Gestetner-MP_4001-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2772
 foomatic-20200219-Gestetner-MP_4002-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2798
 foomatic-20200219-Gestetner-MP_401SPF-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2756
 foomatic-20200219-Gestetner-MP_402SPF-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2756
-foomatic-20200219-Gestetner-MP5500_DSm755-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2797
 foomatic-20200219-Gestetner-MP_6001-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2821
 foomatic-20200219-Gestetner-MP_6002-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2846
 foomatic-20200219-Gestetner-MP_6503-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2869
-foomatic-20200219-Gestetner-MPC1500_GS106-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2811
 foomatic-20200219-Gestetner-MP_C2050-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2869
 foomatic-20200219-Gestetner-MP_C2051-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2869
-foomatic-20200219-Gestetner-MPC2500_DSc525-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2845
 foomatic-20200219-Gestetner-MP_C2800-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2869
+foomatic-20200219-Gestetner-MP_C300-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2830
 foomatic-20200219-Gestetner-MP_C3001-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2869
 foomatic-20200219-Gestetner-MP_C3002-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2894
-foomatic-20200219-Gestetner-MP_C300-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2830
 foomatic-20200219-Gestetner-MP_C300SR-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2850
 foomatic-20200219-Gestetner-MP_C305-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2855
 foomatic-20200219-Gestetner-MP_C306Z-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2855
 foomatic-20200219-Gestetner-MP_C307-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2855
-foomatic-20200219-Gestetner-MPC3500_DSc535-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2845
 foomatic-20200219-Gestetner-MP_C4000-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2869
 foomatic-20200219-Gestetner-MP_C401-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2855
 foomatic-20200219-Gestetner-MP_C401SR-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2898
@@ -609,15 +409,8 @@
 foomatic-20200219-Gestetner-MP_CW2201-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2804
 foomatic-20200219-Gestetner-MP_W6700-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2737
 foomatic-20200219-Gestetner-MP_W7100-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2737
-foomatic-20200219-Gestetner-P7026-Postscript-Gestetner.ppd.gz	49eb38c649694adcbbf2873a277d9034	213318
 foomatic-20200219-Gestetner-P7031n-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2728
-foomatic-20200219-Gestetner-P7032-Postscript-Gestetner.ppd.gz	2043e90f0e58de8b0a656fe1f9cd6b09	213462
-foomatic-20200219-Gestetner-P7126-Postscript-Gestetner.ppd.gz	3384c9ca5991da158d54943ee1dc76bb	213343
-foomatic-20200219-Gestetner-P7132n-Postscript-Gestetner.ppd.gz	580cd1ce63bee1dd0df270bb11286322	214437
-foomatic-20200219-Gestetner-P7145-Postscript-Gestetner.ppd.gz	0523d4d34a32b40611d96e498f18d9cb	213444
 foomatic-20200219-Gestetner-P7245-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2772
-foomatic-20200219-Gestetner-P7325-Postscript-Gestetner.ppd.gz	88bf8a1e1a3388fde95c59cfcc24dc12	214262
-foomatic-20200219-Gestetner-P7431cn-Postscript-Gestetner.ppd.gz	c273b2df2cd266feb150fe778a38caa4	241657
 foomatic-20200219-Gestetner-P7527-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2721
 foomatic-20200219-Gestetner-P7535n-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2721
 foomatic-20200219-Gestetner-P7575-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2787
@@ -631,10 +424,11 @@
 foomatic-20200219-Gestetner-SP_C420DN-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2803
 foomatic-20200219-Gestetner-SP_C430DN-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2803
 foomatic-20200219-Gestetner-SP_W2470-PDF-Gestetner.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2691
+foomatic-20200219-HP-DesignJet_100plus-cdnj500.ppd.gz	28af171ecd9738e5333123f5d765bf05	34792
+foomatic-20200219-HP-OfficeJet_500-cdj550.ppd.gz	f5b9a16ecdf2d2eb8cfc53779a88baf9	45974
 foomatic-20200219-IBM-3853_JetPrinter-jetp3852.ppd.gz	ec08c599756064f59cee26794b5ad009	16241
-foomatic-20200219-IBM-4303_Network_Color_Printer-Postscript.ppd.gz	4866bd80bb16c5e788b0cbceba8805d3	238975
-foomatic-20200219-Imagen-ImPress-imagen.ppd.gz	a2e6340ed5179a244b5431e0cbf48d30	9537
-foomatic-20200219-InfoPrint-Pro_1107EX-pxlmono-InfoPrint.ppd.gz	aa68534d59f2156b897e0c665897cc44	79986
+foomatic-20200219-Imagen-ImPress-imagen.ppd.gz	426d33132e6a864d1c1f238011c8093f	9537
+foomatic-20200219-InfoPrint-Pro_1107EX-pxlmono-InfoPrint.ppd.gz	39b28d9f8af78fe5b7f79fde90db0133	79991
 foomatic-20200219-Infotec-MP_201-PDF-Infotec.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2708
 foomatic-20200219-Infotec-MP_501-PDF-Infotec.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2756
 foomatic-20200219-Infotec-Pro_8100S-PDF-Infotec.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2869
@@ -646,86 +440,10 @@
 foomatic-20200219-Infotec-Pro_C7100-PDF-Infotec.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2966
 foomatic-20200219-Infotec-Pro_C7100S-PDF-Infotec.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2966
 foomatic-20200219-Infotec-SP_5300-PDF-Infotec.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2756
-foomatic-20200219-Kodak-IS_70_CPII-Postscript.ppd.gz	315b8e2bd5ed027c11aa8dccbfc122a4	196909
-foomatic-20200219-KONICA_MINOLTA-bizhub_1050eP-Postscript-KONICA_MINOLTA.ppd.gz	ed152bdcfcc530663cb1ae8c6714a1e2	217511
-foomatic-20200219-KONICA_MINOLTA-bizhub_500-Postscript-KONICA_MINOLTA.ppd.gz	6b640d71ae9d7fb2a3c6dd124842acd2	214883
-foomatic-20200219-KONICA_MINOLTA-bizhub_C250P-Postscript-KONICA_MINOLTA.ppd.gz	92f4d9022f8971958044b30a099e2e75	243214
-foomatic-20200219-KONICA_MINOLTA-bizhub_C252P-Postscript-KONICA_MINOLTA.ppd.gz	4964d406a5a0ac5c6d35d41c1a8a2dfc	243214
-foomatic-20200219-KONICA_MINOLTA-bizhub_C351-Postscript-KONICA_MINOLTA.ppd.gz	4be37c37bfe56e2c91a6d0407c6762d1	243231
-foomatic-20200219-KONICA_MINOLTA-bizhub_C352P-Postscript-KONICA_MINOLTA.ppd.gz	733d583b8b944b7c7e76d353df889431	243209
-foomatic-20200219-KONICA_MINOLTA-bizhub_C450P-Postscript-KONICA_MINOLTA.ppd.gz	4adc0991031b310883542d0523219958	243231
-foomatic-20200219-KONICA_MINOLTA-bizhub_C451-Postscript-KONICA_MINOLTA.ppd.gz	22ea4fa335126665940f5bf3abd9dcf4	243223
-foomatic-20200219-KONICA_MINOLTA-bizhub_C550-Postscript-KONICA_MINOLTA.ppd.gz	bb21b43d1501c4262e8fc5cde4f3bde2	243078
-foomatic-20200219-Kyocera-Ci-1100-Postscript-Kyocera.ppd.gz	1ceeb406a019f9b86482175b50c179f5	242380
-foomatic-20200219-Kyocera-CS-1650-Postscript-Kyocera.ppd.gz	65a47735118ef86d698f052957aed39f	214799
-foomatic-20200219-Kyocera-CS-2050-Postscript-Kyocera.ppd.gz	2ab6caf4fb8eaf5c28ce93707f84ac2a	214729
-foomatic-20200219-Kyocera-FS-1030D-Postscript-Kyocera.ppd.gz	cf746ac292a5710a4f37eb7739fbe4c6	214256
-foomatic-20200219-Kyocera-FS-1118MFP-Postscript-Kyocera.ppd.gz	152fe30a6e26af90237365c2ea1620e8	213952
-foomatic-20200219-Kyocera-FS-1200-Postscript-Kyocera.ppd.gz	049cdbee50f586e716c92c3ad6ced1f7	214230
-foomatic-20200219-Kyocera-FS-1700plus-Postscript-Kyocera.ppd.gz	5b928eec9d17ef87146a22fd13c424d5	214018
-foomatic-20200219-Kyocera-FS-1700-Postscript-Kyocera.ppd.gz	d33b05b297a911ec1b1a19092be8847e	214020
-foomatic-20200219-Kyocera-FS-1714M-Postscript-Kyocera.ppd.gz	a7e8e1ff5a6a60c9163d2677470db291	214256
-foomatic-20200219-Kyocera-FS-1800-Postscript-Kyocera.ppd.gz	d9448575a06569da35c7b0a1510546b1	215325
-foomatic-20200219-Kyocera-FS-1900-Postscript-Kyocera.ppd.gz	4e8c9e09ea2dd8b6481afa1da1163c91	214327
-foomatic-20200219-Kyocera-FS-1920-Postscript-Kyocera.ppd.gz	04b2e8dfa6a7e746c7540e13eaabdbaa	214340
-foomatic-20200219-Kyocera-FS-2000D-Postscript-Kyocera.ppd.gz	e863a45a205ceaac237af27e0f4e885b	214473
-foomatic-20200219-Kyocera-FS-3700plus-Postscript-Kyocera.ppd.gz	a8b645cbe1ba506ab15609acf404de1c	214018
-foomatic-20200219-Kyocera-FS-3700-Postscript-Kyocera.ppd.gz	ee580faf7a6a4750b0d2a8103f198428	214020
-foomatic-20200219-Kyocera-FS-3750-Postscript-Kyocera.ppd.gz	1ea5644225101b028b1dd34fea551c3e	214257
-foomatic-20200219-Kyocera-FS-3820N-Postscript-Kyocera.ppd.gz	e46064cd78a2d64d0df6256064285039	214340
-foomatic-20200219-Kyocera-FS-3830N-Postscript-Kyocera.ppd.gz	124d4f4bb543c693aab1f09ff997523e	214340
-foomatic-20200219-Kyocera-FS-3900DN-Postscript-Kyocera.ppd.gz	f54dc5688564ddd3f75ed05ef81c7670	214404
-foomatic-20200219-Kyocera-FS-4000DN-Postscript-Kyocera.ppd.gz	7f546cdbe60d4a6b62d46812969ee4a4	214405
-foomatic-20200219-Kyocera-FS-5800C-Postscript-Kyocera.ppd.gz	ecd77afd4eb9bb517794f85da1f25396	242501
-foomatic-20200219-Kyocera-FS-5900C-Postscript-Kyocera.ppd.gz	4ee3e21fbeda4f11a4bc8d12807b2129	242380
-foomatic-20200219-Kyocera-FS-600-Postscript-Kyocera.ppd.gz	145ea8272b1d6c222c84288707c25ba6	213692
-foomatic-20200219-Kyocera-FS-6020-Postscript-Kyocera.ppd.gz	6703e6da310752f92dc445f4201d8fdc	215262
-foomatic-20200219-Kyocera-FS-6026-Postscript-Kyocera.ppd.gz	3350d58681ffc65493426ece01505d4f	214264
-foomatic-20200219-Kyocera-FS-6300-Postscript-Kyocera.ppd.gz	3e83b734536cd3af498bfe5d1765ff52	214094
-foomatic-20200219-Kyocera-FS-6500plus-Postscript-Kyocera.ppd.gz	834a098fe38daac76417e5489faacb0f	187868
-foomatic-20200219-Kyocera-FS-6700-Postscript-Kyocera.ppd.gz	d096034d7b47d155612b251bc2a9f8c6	215021
-foomatic-20200219-Kyocera-FS-6750-Postscript-Kyocera.ppd.gz	ae706e9078bc11ab75144eab54b78313	215265
-foomatic-20200219-Kyocera-FS-680-Postscript-Kyocera.ppd.gz	fa84c78dfa5b6d6341c5b9a01f3f806d	213692
-foomatic-20200219-Kyocera-FS-6900-Postscript-Kyocera.ppd.gz	b91fbbddf07378e60c313a0e2a05be73	215092
-foomatic-20200219-Kyocera-FS-6950DN-Postscript-Kyocera.ppd.gz	71def362a5e1bfed8662eb1cf6970c99	214451
-foomatic-20200219-Kyocera-FS-7000-Postscript-Kyocera.ppd.gz	891fae000b94c054a2edfd1a8882cdc6	215598
-foomatic-20200219-Kyocera-FS-7028M-Postscript-Kyocera.ppd.gz	9b916ba013c3a16e98195a2655edac6d	215668
-foomatic-20200219-Kyocera-FS-8000C-Postscript-Kyocera.ppd.gz	389631d508d15893352d5dcacd53bc04	242773
-foomatic-20200219-Kyocera-FS-9000-Postscript-Kyocera.ppd.gz	c6ddf335f3a099209ea97a45543f1228	215669
-foomatic-20200219-Kyocera-FS-9100DN-Postscript-Kyocera.ppd.gz	139079f4fcdad5205d702cae4707a91e	215780
-foomatic-20200219-Kyocera-FS-920-Postscript-Kyocera.ppd.gz	9af68f834a74ab9e89bf4b73b615a813	213737
-foomatic-20200219-Kyocera-FS-C5015N-Postscript-Kyocera.ppd.gz	10a50c50eeec7643b5e5d676a1648892	241541
-foomatic-20200219-Kyocera-FS-C5016N-Postscript-Kyocera.ppd.gz	a23a65f65301a61c38976bf7b2bdda64	241178
-foomatic-20200219-Kyocera-FS-C5020N-Postscript-Kyocera.ppd.gz	669d0423525edad3bde5f13328632520	241580
-foomatic-20200219-Kyocera-FS-C5025N-Postscript-Kyocera.ppd.gz	b7cc97a86f20c9d8c01099ae07a9821f	241609
-foomatic-20200219-Kyocera-FS-C5030N-Postscript-Kyocera.ppd.gz	17ed4ba9dc5743f496ba59db37e1d3cc	241580
-foomatic-20200219-Kyocera-FS-C8008N-Postscript-Kyocera.ppd.gz	7eda1b30d58fbbe5b226cebf96685c64	241618
-foomatic-20200219-Kyocera-FS-C8026N-Postscript-Kyocera.ppd.gz	c15fba5ab222fa82f09452f8733d359d	241984
-foomatic-20200219-Kyocera-FS-C8100DNplus_KPDL-Postscript-Kyocera.ppd.gz	8abefba708da4e70f61e9b37e929b471	242326
-foomatic-20200219-Kyocera-FS-C8100DN-Postscript-Kyocera.ppd.gz	5ab6c1c72b2b459471501c5bff577ec1	242315
-foomatic-20200219-Kyocera-KM-1510-Postscript-Kyocera.ppd.gz	06fa87b48633f3bc70399e63cb45e44d	214065
-foomatic-20200219-Kyocera-KM-1530-Postscript-Kyocera.ppd.gz	cc6bc033aa97fa7c703f6731ebaf03a4	214131
-foomatic-20200219-Kyocera-KM-1810-Postscript-Kyocera.ppd.gz	64e0fa6052728a76fc373894a766619f	214065
-foomatic-20200219-Kyocera-KM-1820-Postscript-Kyocera.ppd.gz	69d3b61eb2c68f090a60fe256637ac25	214203
-foomatic-20200219-Kyocera-KM-2030-Postscript-Kyocera.ppd.gz	7952c10e442648f7ac393ffb7f6d8a7c	214415
-foomatic-20200219-Kyocera-KM-3050-Postscript-Kyocera.ppd.gz	d43f76bd6e49f4ee9f9644c86f9d3bbc	215462
-foomatic-20200219-Kyocera-KM-4230_5230-Postscript-Kyocera.ppd.gz	65839f2c154920739cca1002d5446294	213954
-foomatic-20200219-Kyocera-KM-4530-Postscript-Kyocera.ppd.gz	da97c951311c6de37278a0912e5966e8	216595
-foomatic-20200219-Kyocera-KM-5530-Postscript-Kyocera.ppd.gz	e6074e76b2c63901d24dccce4ca0e1eb	216661
-foomatic-20200219-Kyocera-KM-6030-Postscript-Kyocera.ppd.gz	2399a89de7a623891bc1c295f65b76e3	215558
-foomatic-20200219-Kyocera-KM-6230-Postscript-Kyocera.ppd.gz	7f594bcbf2fabd0cfbcc0020b685dde4	213998
-foomatic-20200219-Kyocera-KM-6330-Postscript-Kyocera.ppd.gz	0587423d898331422bf116b10c9f76f2	216661
-foomatic-20200219-Kyocera-KM-C2520-Postscript-Kyocera.ppd.gz	3e6c522720f506c614e90fb1b2a7d458	242386
-foomatic-20200219-Kyocera-KM-C2630-Postscript-Kyocera.ppd.gz	1cac3b867fea5e1704b293d11ded7361	241984
-foomatic-20200219-Kyocera-KM-C830-Postscript-Kyocera.ppd.gz	854f0c81a2c9a26560271d536ec52942	243612
-foomatic-20200219-Kyocera-KM-C850-Postscript-Kyocera.ppd.gz	a367c2adb0eb20de0a337c0212573abf	242223
 foomatic-20200219-Lanier-MP_C501-PDF-Lanier.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2917
 foomatic-20200219-Lanier-P_501-PDF-Lanier.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2756
-foomatic-20200219-Lanier-SG3110DN-pxlcolor-Lanier.ppd.gz	2e32a01e82df2fbba2801922566941ed	177373
-foomatic-20200219-Lanier-SG3110SFNw-pxlcolor-Lanier.ppd.gz	2e32a01e82df2fbba2801922566941ed	177373
-foomatic-20200219-Lanier-SP_3400N-Postscript-Lanier.ppd.gz	bb0ec6374d66ef80c6e6e4925b119b04	213497
-foomatic-20200219-Lanier-SP_3410DN-Postscript-Lanier.ppd.gz	5a45ddc7653295e6a3cefa959f8135c5	213598
-foomatic-20200219-Lanier-SP_3600DN-Postscript-Lanier.ppd.gz	a3530bae9658cb41e8db9b68f591687f	214657
+foomatic-20200219-Lanier-SG3110DN-pxlcolor-Lanier.ppd.gz	83af6d9ecbea7000242d5479e59f9736	177378
+foomatic-20200219-Lanier-SG3110SFNw-pxlcolor-Lanier.ppd.gz	83af6d9ecbea7000242d5479e59f9736	177378
 foomatic-20200219-Lanier-SP_4310N-PDF-Lanier.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2729
 foomatic-20200219-Lanier-SP_4510DN-PDF-Lanier.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2756
 foomatic-20200219-Lanier-SP_5200DN-PDF-Lanier.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2729
@@ -735,722 +453,137 @@
 foomatic-20200219-Lanier-SP_6430DN-PDF-Lanier.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2779
 foomatic-20200219-Lanier-SP_8300DN-PDF-Lanier.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2798
 foomatic-20200219-Lanier-SP_8400DN-PDF-Lanier.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2845
-foomatic-20200219-Lanier-SP_C221N-Postscript-Lanier.ppd.gz	90c56cd7e2d78a889404675d3f70b4a7	241379
-foomatic-20200219-Lanier-SP_C222DN-Postscript-Lanier.ppd.gz	1b4e3bdf9c27f4a747537585e41807e9	241480
-foomatic-20200219-Lanier-SP_C222SF-Postscript-Lanier.ppd.gz	e8395f6c9fca52ce36e1d0696e2bb420	241732
-foomatic-20200219-Lanier-SP_C232SF-Postscript-Lanier.ppd.gz	46fe73b43cf356a751e8e0615c248be1	241731
-foomatic-20200219-Lanier-SP_C311N-Postscript-Lanier.ppd.gz	46afadf03ceaff4016871103c47c4100	241379
-foomatic-20200219-Lanier-SP_C312DN-Postscript-Lanier.ppd.gz	754734a75f106385d8b6b67507de02fe	241481
 foomatic-20200219-Lanier-SP_C340DN-PDF-Lanier.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2829
 foomatic-20200219-Lanier-SP_C342DN-PDF-Lanier.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2828
 foomatic-20200219-Lanier-SP_C352DN-PDF-Lanier.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2828
-foomatic-20200219-Lanier-SP_C360DNw-Postscript-Lanier.ppd.gz	c200f6457867c8fd1e13552146e1e6f3	243659
-foomatic-20200219-Lanier-SP_C360SFNw-Postscript-Lanier.ppd.gz	50914d551a5cf07cab9f5d6e1b2b3935	244363
 foomatic-20200219-Lanier-SP_C435DN-PDF-Lanier.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2828
 foomatic-20200219-Lanier-SP_C730DN-PDF-Lanier.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2828
 foomatic-20200219-Lanier-SP_C830DN-PDF-Lanier.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2894
 foomatic-20200219-Lanier-SP_C840DN-PDF-Lanier.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2917
-foomatic-20200219-Lexmark-1020_Business-pcl3.ppd.gz	4fb6b1e69a3da5f6ba7bcced09856ccf	27082
-foomatic-20200219-Lexmark-4039_10plus-Postscript.ppd.gz	d0b919e90409b4bca1484f490bc2e408	212468
-foomatic-20200219-Lexmark-C2132-Postscript-Lexmark.ppd.gz	65393076295dc3d091b82c9f6801cefd	359425
-foomatic-20200219-Lexmark-C500n-Postscript.ppd.gz	abb7eb9cc99f3ec0e354251dc9800e06	239191
-foomatic-20200219-Lexmark-C510b-Postscript.ppd.gz	822ee460cdff29fd5ec427b8d714c224	212468
-foomatic-20200219-Lexmark-C510-Postscript-Lexmark.ppd.gz	45a0bd259fa90dcc43bf87271382041b	198918
-foomatic-20200219-Lexmark-C520-Postscript-Lexmark.ppd.gz	989aa9821886d411e555c0bb9c78d97a	359070
-foomatic-20200219-Lexmark-C522-Postscript-Lexmark.ppd.gz	4b8ab1cc5719203ba1d358ca83422bb3	359212
-foomatic-20200219-Lexmark-C524-Postscript-Lexmark.ppd.gz	85eee6831bb4e4bf88befab5218037b4	359398
-foomatic-20200219-Lexmark-C540-Postscript-Lexmark.ppd.gz	cc1d9b5b206f70e50e646c668f243afa	359202
-foomatic-20200219-Lexmark-C543-Postscript-Lexmark.ppd.gz	67ebc6480fa38b7acc5dc11d96a0c0c6	359199
-foomatic-20200219-Lexmark-C544-Postscript-Lexmark.ppd.gz	4e3e5fdb0e28714a827c23ca66e1afb4	359346
-foomatic-20200219-Lexmark-C546-Postscript-Lexmark.ppd.gz	6f97adad660d73c6b67a2ce3824e43f7	359346
-foomatic-20200219-Lexmark-C720n-pxlcolor.ppd.gz	f806e59260ec92ab078d06457825b9c6	679510
-foomatic-20200219-Lexmark-C734-Postscript-Lexmark.ppd.gz	d12198172bf96277f6b77624d5286368	359312
-foomatic-20200219-Lexmark-C736-Postscript-Lexmark.ppd.gz	d12198172bf96277f6b77624d5286368	359312
-foomatic-20200219-Lexmark-C750-Postscript-Lexmark.ppd.gz	79eb815e66608c61ea743dc25303b839	198664
-foomatic-20200219-Lexmark-C752-Postscript-Lexmark.ppd.gz	aa68d3a8aac8cf571988c1ce4b98742c	199441
-foomatic-20200219-Lexmark-C780-Postscript-Lexmark.ppd.gz	54544f60e5bf5a3f191b8bd3540c0dc2	359932
-foomatic-20200219-Lexmark-C782-Postscript-Lexmark.ppd.gz	5a49f60f62d84837950c3ca4abbe5890	360309
-foomatic-20200219-Lexmark-C910-Postscript-Lexmark.ppd.gz	fbf5e1b7a52f662188f4840b6a5cd9dd	198589
-foomatic-20200219-Lexmark-C912-Postscript-Lexmark.ppd.gz	81dcff283bfb974477b8fc791f55ba5f	198589
-foomatic-20200219-Lexmark-C930-Postscript-Lexmark.ppd.gz	10f6769ca3aaca7c8975edff81466f0f	242308
-foomatic-20200219-Lexmark-C935-Postscript-Lexmark.ppd.gz	10f6769ca3aaca7c8975edff81466f0f	242308
-foomatic-20200219-Lexmark-E238-pxlmono.ppd.gz	a6771d9200d30ea72bfaf1e1553882e1	79767
-foomatic-20200219-Lexmark-E260dn-Postscript-Lexmark.ppd.gz	ddc49e6d207d199c16583e3036810c9d	240372
-foomatic-20200219-Lexmark-E350d-Postscript-Lexmark.ppd.gz	d610ef584f4842978b069a2fa4b962cc	213344
-foomatic-20200219-Lexmark-E360dn-Postscript-Lexmark.ppd.gz	b6f022a0d008976a3f0b55fa76d14791	240444
-foomatic-20200219-Lexmark-EG460dn-Postscript-Lexmark.ppd.gz	b6f022a0d008976a3f0b55fa76d14791	240444
-foomatic-20200219-Lexmark-Optra_Color_1200-Postscript.ppd.gz	6645a83a462f783ec1b7809e877cae20	239191
-foomatic-20200219-Lexmark-T650-Postscript-Lexmark.ppd.gz	e4069a84e785b22bf4f8a114f47d2afa	240912
-foomatic-20200219-Lexmark-T656-Postscript-Lexmark.ppd.gz	429a9d0f336be9aa56937fb29579a3d7	240836
-foomatic-20200219-Lexmark-W850-Postscript-Lexmark.ppd.gz	007ab0b03728f3b14cd635f529c5db39	240823
-foomatic-20200219-Lexmark-X203n-Postscript-Lexmark.ppd.gz	863b194551c2346266140756711d6b69	239659
-foomatic-20200219-Lexmark-X264dn-Postscript-Lexmark.ppd.gz	754ad38ab76004998307a46137e39139	240218
-foomatic-20200219-Lexmark-X363dn-Postscript-Lexmark.ppd.gz	3922ab2bf14f9101483d507b42703668	240218
-foomatic-20200219-Lexmark-X463de-Postscript-Lexmark.ppd.gz	779beab1a1d88a5b27e0ea33286eb62f	240216
-foomatic-20200219-Lexmark-X543-Postscript-Lexmark.ppd.gz	954c8cf6ad35544fa56ab29487c88af9	359199
-foomatic-20200219-Lexmark-X544-Postscript-Lexmark.ppd.gz	5135c80176ab40a9457fb18ea6a083da	359346
-foomatic-20200219-Lexmark-X546-Postscript-Lexmark.ppd.gz	cd3b868b472631e848a626cbb3f41bed	359346
-foomatic-20200219-Lexmark-X734de-Postscript-Lexmark.ppd.gz	660912557717a201fe5664820b9ba283	359242
-foomatic-20200219-Lexmark-X860de-Postscript-Lexmark.ppd.gz	5c4264d8c2cea23a601006a2a309638a	240901
-foomatic-20200219-Lexmark-X940e-Postscript-Lexmark.ppd.gz	a8281efe8b3f276cfa605e0b189d894c	242308
-foomatic-20200219-Minolta-magicolor_3100-Postscript.ppd.gz	2f3b86bcb0ffb92c501b762bab4b05f0	238975
-foomatic-20200219-Minolta-PagePro_8L-ljet2p.ppd.gz	40c3cf8956d43a9308fb2a7d5ffcc84f	80915
-foomatic-20200219-NEC-P2X-necp2xX.upp.ppd.gz	6cee723564297646eef0ccdb2b39f399	34475
+foomatic-20200219-Lexmark-C720n-pxlcolor.ppd.gz	b306abb3233e84618474ccee568bd0c3	679515
+foomatic-20200219-Lexmark-E238-pxlmono.ppd.gz	da8a63669af5504e85ee4a4e4e167702	79772
 foomatic-20200219-NEC-PICTY180-picty180.ppd.gz	946428ac119ac86d98954b1b0a69656d	51863
-foomatic-20200219-NEC-Pinwriter_P20-necp6.ppd.gz	2faf7d32d9d71c998285f7f2acb19eaa	38020
-foomatic-20200219-NEC-PinWriter_P6-necp6.ppd.gz	2faf7d32d9d71c998285f7f2acb19eaa	38020
-foomatic-20200219-NEC-SuperScript_650C-pcl3.ppd.gz	4fb6b1e69a3da5f6ba7bcced09856ccf	27082
-foomatic-20200219-NRG-SP_3500N-Postscript-NRG.ppd.gz	b42af37dd9091725d77a50fffcb0a687	213613
-foomatic-20200219-NRG-SP_3510DN-Postscript-NRG.ppd.gz	8d3f57f06f20bc78473dea89ff5d6515	213714
-foomatic-20200219-NRG-SP_C242DN-Postscript-NRG.ppd.gz	649fe9569b3a4a485fc061b8355326a4	242272
-foomatic-20200219-Oce-3145PS-Postscript2-Oce.ppd.gz	2f7a985ca5e4e4d137e45d381953cfa5	212554
-foomatic-20200219-Oce-8445PS-Postscript2-Oce.ppd.gz	ad422e1e71b7189c708c81f2f2027cdb	212526
-foomatic-20200219-Oce-9050-oce9050.ppd.gz	0cf01daa5196f45f78f7b8f28f8bdd0e	15916
-foomatic-20200219-Oce-9230-Postscript2-Oce.ppd.gz	713db7a86cb8c7aa18cf414ad72cf51d	212525
-foomatic-20200219-Oce-9260-Postscript2-Oce.ppd.gz	b742c49cfdeef18dc21db48db5b6c356	212525
-foomatic-20200219-Oce-PPC3073PS-Postscript-Oce.ppd.gz	c2b84cc0f4a228a780a59880bc5c0224	212605
-foomatic-20200219-Oce-PPC3074PS-Postscript-Oce.ppd.gz	c2b84cc0f4a228a780a59880bc5c0224	212605
-foomatic-20200219-Oce-PPC5115PS-Postscript-Oce.ppd.gz	1a488a267b3a7ec826f11bb29754296f	212723
-foomatic-20200219-Oce-VarioPrint_2045PS-Postscript-Oce.ppd.gz	464df60cc7b10971150ec46362e6fe69	212591
-foomatic-20200219-Oce-VarioPrint_2090PS-Postscript-Oce.ppd.gz	b2f24723a23ce399893da55be4469766	239637
-foomatic-20200219-Oce-VarioPrint_2100PS-Postscript-Oce.ppd.gz	3e1b7cebbe3f228c53dc4e7f38d9f8fb	240139
-foomatic-20200219-Oce-VarioPrint_2105PS-Postscript-Oce.ppd.gz	e2c09354c40976721de12a6fda453a1d	212614
-foomatic-20200219-Oki-14i-Postscript-Oki.ppd.gz	b8e78c214b6f12e63fd744f4e94967f0	213526
-foomatic-20200219-Oki-B4350-Postscript-Oki.ppd.gz	cc0828443f17ac95dcebd6f10cf31b43	214418
-foomatic-20200219-Oki-C5400-Postscript-Oki.ppd.gz	edef7179cf657851a82f03ca482bedd0	243697
-foomatic-20200219-Oki-C5700-Postscript-Oki.ppd.gz	5295a26bc1ae90e2b799e15a9df02f06	202944
-foomatic-20200219-Oki-C5900-Postscript-Oki.ppd.gz	9701324dedb32fdc67bc0612e70babb5	202943
-foomatic-20200219-Oki-C6100-Postscript-Oki.ppd.gz	d331806fee463fa7b3eb3a14e51f50ba	202947
-foomatic-20200219-Oki-C8800-Postscript-Oki.ppd.gz	cf6870d0b93c8fd0b52dfa8520de4481	202869
-foomatic-20200219-Oki-C9600-Postscript-Oki.ppd.gz	44bf11fdff8c131c6734b8c73138b148	202882
-foomatic-20200219-Oki-Microline_182-oki182.ppd.gz	24ddf20666fe33d93fa62a31fea74c3f	10825
+foomatic-20200219-NEC-PinWriter_P6-necp6.ppd.gz	b279f0a88d4a4923af08fd9f78e57bf4	38065
+foomatic-20200219-NEC-Pinwriter_P20-necp6.ppd.gz	b279f0a88d4a4923af08fd9f78e57bf4	38065
 foomatic-20200219-Oki-ML_320-okiibm.ppd.gz	3780eadf80a455eede5e44785addf587	4721
-foomatic-20200219-Oki-Okijet_2500-cdj550.ppd.gz	347bf504f93a83308f1bd98658b355cf	45038
-foomatic-20200219-Oki-OL410e-ljet4.ppd.gz	4def7a570be40572772e3f57657ea7ae	15091
-foomatic-20200219-Olivetti-JP350S-laserjet.ppd.gz	332abc3788d34ae2f95670c4015577e6	48408
-foomatic-20200219-Olivetti-JP450-djet500.ppd.gz	5e404c72704e88d8c252176d67f16da4	15276
+foomatic-20200219-Oki-Microline_182-oki182.ppd.gz	24ddf20666fe33d93fa62a31fea74c3f	10825
 foomatic-20200219-Panasonic-KX-P1150-eps9high.ppd.gz	8a16b1daa41ba6b232d22ecf168f6956	5499
-foomatic-20200219-Ricoh-4081-r4081.ppd.gz	1626146969446c0c37da1dcaa06188de	67234
-foomatic-20200219-Ricoh-ColorLaser_AP828-Postscript-Ricoh.ppd.gz	6a54eca4258947daeabe69dca2829dc3	240714
-foomatic-20200219-Ricoh-DDP_70-Postscript-Ricoh.ppd.gz	c2659797d14bf156e40504c7be2e1fed	214155
-foomatic-20200219-Ricoh-DDP_92-Postscript-Ricoh.ppd.gz	85738a673bbff9607e449bb428315655	214154
-foomatic-20200219-Ricoh-EMP_156-Postscript-Ricoh.ppd.gz	126c7718b8226bf7dd394e10a0a9641a	213380
-foomatic-20200219-Ricoh-GX_3050N-pxlcolor-Ricoh.ppd.gz	2e32a01e82df2fbba2801922566941ed	177348
-foomatic-20200219-Ricoh-GX_3050SFN-pxlcolor-Ricoh.ppd.gz	2e32a01e82df2fbba2801922566941ed	177348
-foomatic-20200219-Ricoh-GX_E3350N-pxlcolor-Ricoh.ppd.gz	2e32a01e82df2fbba2801922566941ed	177348
-foomatic-20200219-Ricoh-GX_E5550N-pxlcolor-Ricoh.ppd.gz	2e32a01e82df2fbba2801922566941ed	177373
-foomatic-20200219-Ricoh-SP_2300L-pcl5-Ricoh.ppd.gz	381a3732f2271fd45a1def9e735e8f8d	296237
-foomatic-20200219-Ricoh-SP_330DN-Postscript-Ricoh.ppd.gz	3906c991d625d145480bec539b26dddf	213966
-foomatic-20200219-Ricoh-SP_330SFN-Postscript-Ricoh.ppd.gz	95af821dc53383c3c64f6bde4e0c5e47	213966
-foomatic-20200219-Ricoh-SP_3700-Postscript-Ricoh.ppd.gz	16d744ce42bc3830697be7c070b51ec6	213966
-foomatic-20200219-Ricoh-SP_3700SF-Postscript-Ricoh.ppd.gz	a51dbe69296040db008a2fa352a22c86	213966
-foomatic-20200219-Ricoh-SP_400DN-Postscript-Ricoh.ppd.gz	6a3176137af769bc17d2ae9f9eb3d074	214916
+foomatic-20200219-Ricoh-4081-r4081.ppd.gz	ad0462cea0830315ae6492146ab96d40	67234
+foomatic-20200219-Ricoh-GX_3050N-pxlcolor-Ricoh.ppd.gz	83af6d9ecbea7000242d5479e59f9736	177353
+foomatic-20200219-Ricoh-GX_3050SFN-pxlcolor-Ricoh.ppd.gz	83af6d9ecbea7000242d5479e59f9736	177353
+foomatic-20200219-Ricoh-GX_E3350N-pxlcolor-Ricoh.ppd.gz	83af6d9ecbea7000242d5479e59f9736	177353
+foomatic-20200219-Ricoh-GX_E5550N-pxlcolor-Ricoh.ppd.gz	83af6d9ecbea7000242d5479e59f9736	177378
 foomatic-20200219-Ricoh-SP_450DN-PDF-Ricoh.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2756
-foomatic-20200219-Ricoh-SP_C250DN-Postscript-Ricoh.ppd.gz	eb34a8d8b2443b519709fa2c87525ff2	242272
-foomatic-20200219-Ricoh-SP_C250SF-Postscript-Ricoh.ppd.gz	a49229be6326321e1ea7e5fb5e56897b	242272
-foomatic-20200219-Ricoh-SP_C261DNw-Postscript-Ricoh.ppd.gz	5ee4a5ea67eb7db0ec4c764c67dca717	242272
-foomatic-20200219-Samsung-C140x-Postscript-Samsung.ppd.gz	4c93bdc2c6d71da122fe63865f592c43	239505
-foomatic-20200219-Samsung-C2620-Postscript-Samsung.ppd.gz	55ab4be2bdb5b5ad551ef164854f3f05	239829
-foomatic-20200219-Samsung-C2670-Postscript-Samsung.ppd.gz	48b0e2e28409b4d95641764c51b769f8	240004
-foomatic-20200219-Samsung-C460-Postscript-Samsung.ppd.gz	a3fff3ae20a66b8e36e2dda5d12848dd	239444
-foomatic-20200219-Samsung-C4820-Postscript-Samsung.ppd.gz	c33d4c4a4aa75609d326987fc22682f7	239930
-foomatic-20200219-Samsung-C48x-Postscript-Samsung.ppd.gz	8177bae198680c9a5ac4ac0b3f98b57a	239475
-foomatic-20200219-Samsung-CLP-350-Postscript-Samsung.ppd.gz	eea74104b9e6e478dc4f980e958bcf4b	238961
-foomatic-20200219-Samsung-CLP-410-Postscript-Samsung.ppd.gz	4f84e70fc0fcf536c8cc1e473e766ae6	239474
-foomatic-20200219-Samsung-CLP-610-pxlcolor.ppd.gz	f806e59260ec92ab078d06457825b9c6	679510
-foomatic-20200219-Samsung-CLP-660-Postscript-Samsung.ppd.gz	603a2e6c9f2182b3603acde15ce6f4f2	239412
-foomatic-20200219-Samsung-CLP-670-Postscript-Samsung.ppd.gz	902f13920f9742d9aaee44c3012e73a1	239594
-foomatic-20200219-Samsung-CLP-680-Postscript-Samsung.ppd.gz	e4ffb76b24c6bd2ca54f7dc84066384e	239723
-foomatic-20200219-Samsung-CLP-770-Postscript-Samsung.ppd.gz	96c93c5e7ffabad1912ba9b5156c5644	239625
-foomatic-20200219-Samsung-CLP-775-Postscript-Samsung.ppd.gz	d7ba76a46779a7957c79297d5e3eb1c9	239640
-foomatic-20200219-Samsung-CLX-3300-Postscript-Samsung.ppd.gz	96071b8ff17457f309609916b00e0816	239444
-foomatic-20200219-Samsung-CLX-6200-Postscript-Samsung.ppd.gz	b0090d8ba9e4eabfbb8a93ef798be514	239325
-foomatic-20200219-Samsung-CLX-6220-Postscript-Samsung.ppd.gz	e997b30b7115896865b01dea1851bf12	239545
-foomatic-20200219-Samsung-CLX-6250-Postscript-Samsung.ppd.gz	5a221b6f7ec394c011806e0bb00b8ce6	239559
-foomatic-20200219-Samsung-CLX-8380-Postscript-Samsung.ppd.gz	7301da1ff3c5b670c9591984576c8e77	239650
-foomatic-20200219-Samsung-CLX-8385-Postscript-Samsung.ppd.gz	cf8647b03433bfaeca4211f0329a15d3	239683
-foomatic-20200219-Samsung-CLX-8640_8650-Postscript-Samsung.ppd.gz	deae92d5d31c514dcb8c1ab13514d2e8	239999
-foomatic-20200219-Samsung-CLX-9250_9350-Postscript-Samsung.ppd.gz	7d54f264468dfcad9d1c453e731562de	239511
-foomatic-20200219-Samsung-CLX-9252_9352-Postscript-Samsung.ppd.gz	b5f5e04182d0c23ac8db5ecf6cbee10e	240103
-foomatic-20200219-Samsung-CLX-92x1_93x1-Postscript-Samsung.ppd.gz	6769b43d1d7170a4299f70608b49eec7	239935
-foomatic-20200219-Samsung-K3250-Postscript-Samsung.ppd.gz	efd54f099b98cae190093072491281a5	213070
-foomatic-20200219-Samsung-K401-Postscript-Samsung.ppd.gz	599921695dc358978c6d9648bb511dcc	213001
-foomatic-20200219-Samsung-K703-Postscript-Samsung.ppd.gz	c26199946e58d6829db869a44f82bf94	213101
-foomatic-20200219-Samsung-K7600-Postscript-Samsung.ppd.gz	96d571a674dddaa8fcd72220333b1c86	213101
-foomatic-20200219-Samsung-M337x_387x_407x-Postscript-Samsung.ppd.gz	0d404ff8a463632388310a6428cdaa64	212671
-foomatic-20200219-Samsung-M403x-Postscript-Samsung.ppd.gz	d2563ea9d61d70ee903b39aded3444e8	212648
-foomatic-20200219-Samsung-M408x-Postscript-Samsung.ppd.gz	a05873d9771fafab4a42830e8e0e2996	212708
-foomatic-20200219-Samsung-M4370_5370-Postscript-Samsung.ppd.gz	0f674fca60a568bd989fd0c583857822	213006
-foomatic-20200219-Samsung-M453x-Postscript-Samsung.ppd.gz	377e9814056a05f107902f681ceec581	212899
-foomatic-20200219-Samsung-M458x-Postscript-Samsung.ppd.gz	4748c2d95fe4ebe29f4f18c7a342b06b	212824
-foomatic-20200219-Samsung-M5270-Postscript-Samsung.ppd.gz	6ef41c75a476dc9977fe22fd34d338b6	213006
-foomatic-20200219-Samsung-ML-1650-ljet4.ppd.gz	f6e5a9b3f4ccee676c38c6e0e3a541b4	42243
-foomatic-20200219-Samsung-ML-2150-Postscript-Samsung.ppd.gz	711f7c075777e5ac795beaf678f05104	212274
-foomatic-20200219-Samsung-ML-2550-Postscript-Samsung.ppd.gz	8633d0845932d3c919a700979f8d5598	212274
-foomatic-20200219-Samsung-ML-2570-Postscript-Samsung.ppd.gz	ad79717950f51012ca37124da53f9fb6	212066
-foomatic-20200219-Samsung-ML-2850-Postscript-Samsung.ppd.gz	5acebbd6a6ab1d860a205712d1fd05da	212304
-foomatic-20200219-Samsung-ML-2855-Postscript-Samsung.ppd.gz	ceeb1554eb84efc4d0ebfbc87e9a7fcb	212491
-foomatic-20200219-Samsung-ML-3470-Postscript-Samsung.ppd.gz	e75f750a43777aa2161d4b3162f9a279	212361
-foomatic-20200219-Samsung-ML-371x-Postscript-Samsung.ppd.gz	394870bca0d0cd72ffbd34f053b5d0a0	212597
-foomatic-20200219-Samsung-ML-4050-Postscript-Samsung.ppd.gz	dd4e272fdb88165d45fb51972ede7b84	212285
-foomatic-20200219-Samsung-ML-4055-Postscript-Samsung.ppd.gz	7e5bc9f777fb3fc0180dd1045817f76b	212285
-foomatic-20200219-Samsung-ML-451x_501x-Postscript-Samsung.ppd.gz	1bb188cc97a34635b0a61c709d690f79	212598
-foomatic-20200219-Samsung-ML-4550-Postscript-Samsung.ppd.gz	f7c48e97a8a032e4fa8ebe22d3c6e5a8	212166
-foomatic-20200219-Samsung-ML-4555-Postscript-Samsung.ppd.gz	5fb77840dd33537a91bf81b34c121d83	212166
-foomatic-20200219-Samsung-ML-551x_651x-Postscript-Samsung.ppd.gz	329967b24856cde996f96b7d023c8fea	212607
-foomatic-20200219-Samsung-ML-8850_8950-Postscript-Samsung.ppd.gz	b4cddea192cf0a60025ed1309f4e790f	212572
-foomatic-20200219-Samsung-ML-8x00-Postscript-Samsung.ppd.gz	eb657115aa3e4770c03710e2f710d7f7	212015
-foomatic-20200219-Samsung-SCX-483x_5x3x-Postscript-Samsung.ppd.gz	6154e692a0781cce85a50ad946949b98	212634
-foomatic-20200219-Samsung-SCX-4x28-Postscript-Samsung.ppd.gz	20d9ed469d120bfe8a7c12611dc99f49	212325
-foomatic-20200219-Samsung-SCX-5635-Postscript-Samsung.ppd.gz	d470efb874bcda55950b3888fd131691	212536
-foomatic-20200219-Samsung-SCX-5835_5935-Postscript-Samsung.ppd.gz	2fc5ee8f408743cb0b496152a8c4fae6	212612
-foomatic-20200219-Samsung-SCX-6545-Postscript-Samsung.ppd.gz	c10e4b175f0e207e72ceee15fd034153	212783
-foomatic-20200219-Samsung-SCX-6545X-Postscript-Samsung.ppd.gz	57f20357afd2db1312feef1eccb288ec	212871
-foomatic-20200219-Samsung-SCX-681x-Postscript-Samsung.ppd.gz	4de07dfda5382c4edc65d419e18f56da	212654
-foomatic-20200219-Samsung-SCX-6x20-Postscript-Samsung.ppd.gz	6afa7e7e385dced741eb83e12a195e76	212250
-foomatic-20200219-Samsung-SCX-6x22-Postscript-Samsung.ppd.gz	dc7125f3c51669f872b0b5d3c630cefa	212166
-foomatic-20200219-Samsung-SCX-6x45-Postscript-Samsung.ppd.gz	46568b73c9733f2963b52eb6f8ba779f	212609
-foomatic-20200219-Samsung-SCX-6x55-Postscript-Samsung.ppd.gz	bb66255fdd1ff310853a1b292f54bf00	212783
-foomatic-20200219-Samsung-SCX-8030_8040-Postscript-Samsung.ppd.gz	111ad89e1b92b5548ba77a4e1198db5a	212611
-foomatic-20200219-Samsung-SCX-8123_8128-Postscript-Samsung.ppd.gz	2d25ad10bddf7c5b817e336f982e9437	212815
-foomatic-20200219-Samsung-SCX-8230_8240-Postscript-Samsung.ppd.gz	d4bf073b576a00f127f76bfa867ec3ad	212984
-foomatic-20200219-Sharp-AJ-1800-pcl3.ppd.gz	02b55132a6a7bce16ca416f42d5ffbdb	162187
-foomatic-20200219-Sharp-AR-155FG_PS-Postscript-Sharp.ppd.gz	a2d8fbbf732bc5dbcbb5ffb80fd3ddd7	214064
-foomatic-20200219-Sharp-AR-160M_PS-Postscript-Sharp.ppd.gz	ee81f78176cb8f6eb023eca543cfb933	213965
-foomatic-20200219-Sharp-AR-163FG_PS-Postscript-Sharp.ppd.gz	fde5658e92383e3b1a2b3aefae4f5f46	214152
-foomatic-20200219-Sharp-AR-163G_PS-Postscript-Sharp.ppd.gz	d19e468de8f0c22a6e878195d6dc6745	214155
-foomatic-20200219-Sharp-AR-168D-Postscript-Sharp.ppd.gz	5dab7762aa6a6cf6400a3b27c1688d48	214068
-foomatic-20200219-Sharp-AR-168S-Postscript-Sharp.ppd.gz	ac5dba65a4488de640091c0f38704e7d	213969
-foomatic-20200219-Sharp-AR-200M_PS-Postscript-Sharp.ppd.gz	4e4e6f5d06370ba81f9221e50dc22e2c	213965
-foomatic-20200219-Sharp-AR-205FG_PS-Postscript-Sharp.ppd.gz	429a3b0d711852b3e264b482730eaa0c	214251
-foomatic-20200219-Sharp-AR-205G_PS-Postscript-Sharp.ppd.gz	5b02e25581f246bf11d4da5876f9860e	214254
-foomatic-20200219-Sharp-AR-266FP_PS-Postscript-Sharp.ppd.gz	b3f8b3272a9458eb0b5e740839da1e05	215417
-foomatic-20200219-Sharp-AR-311FP_PS-Postscript-Sharp.ppd.gz	fd4d571d94649f755e1a93f3767fe061	215916
-foomatic-20200219-Sharp-AR-5220-Postscript-Sharp.ppd.gz	dc6738e6e8fc740ed1ada99b337de0c7	213969
-foomatic-20200219-Sharp-AR-555M_PS-Postscript-Sharp.ppd.gz	00f9384245ddba14116d56b12bea506a	215541
-foomatic-20200219-Sharp-AR-705M_PS-Postscript-Sharp.ppd.gz	1ea2a8a03fcea41e2c3da3893fcba931	215274
-foomatic-20200219-Sharp-AR-B07-Postscript-Sharp.ppd.gz	8b009c6b4ef08941ba29ccf0733e0a8a	213857
-foomatic-20200219-Sharp-AR-BC260-Postscript-Sharp.ppd.gz	46537ae86213db0213cd0cace3bd73dc	245578
-foomatic-20200219-Sharp-AR-C170FP_PS-Postscript-Sharp.ppd.gz	a4c29d45ea1a3850371f75b5c6fc8fad	245574
-foomatic-20200219-Sharp-AR-C260P-Postscript-Sharp.ppd.gz	c10ae4e0bba100ba0892a99b53bee2f0	245578
-foomatic-20200219-Sharp-AR-M161_PS-Postscript-Sharp.ppd.gz	3dd7e859974efab94beab6b1fa12715f	214159
-foomatic-20200219-Sharp-AR-M165_PS-Postscript-Sharp.ppd.gz	470e53939df8c88e390d1d3b9bca1fec	214258
-foomatic-20200219-Sharp-AR-M205_PS-Postscript-Sharp.ppd.gz	839f2c8ec5ea82f8332b06613a751308	214068
-foomatic-20200219-Sharp-AR-M206_PS-Postscript-Sharp.ppd.gz	05e9b1ea5ec4d44403e8ed61b3cc3354	214258
-foomatic-20200219-Sharp-AR-M236_PS-Postscript-Sharp.ppd.gz	3c1feb0f31dfb94f787cce3a87f9e3a7	215421
-foomatic-20200219-Sharp-AR-M351N-Postscript-Sharp.ppd.gz	8bae576ef9b3bf53c88446cf680fd260	215920
-foomatic-20200219-Sharp-AR-M550N-Postscript-Sharp.ppd.gz	4299284471af8d151563f02fdcb52565	215545
-foomatic-20200219-Sharp-AR-M700N-Postscript-Sharp.ppd.gz	2f6b12b6a862275f793fef88bc7ce2b8	215278
-foomatic-20200219-Sharp-AR-N182FG-Postscript-Sharp.ppd.gz	662d331c5d9d91d4b10f8ee655a9b444	214351
-foomatic-20200219-Sharp-AR-N182G-Postscript-Sharp.ppd.gz	ac7329ff462bcab6c74a6d1b251478a3	214354
-foomatic-20200219-Sharp-MX-2314NR-Postscript-Sharp.ppd.gz	6c2a21fff38275bec2697b4676fa79f8	246571
-foomatic-20200219-Sharp-MX-2614NR-Postscript-Sharp.ppd.gz	5fcc68e2350abab30140e4d070f49c12	246497
-foomatic-20200219-Sharp-MX-M1100-Postscript-Sharp.ppd.gz	ba068c3c22740919ca0351e390ee494f	216835
-foomatic-20200219-Sharp-MX-M182D-Postscript-Sharp.ppd.gz	cf80b254973b38155d9c3184fbba2449	214358
-foomatic-20200219-Sharp-MX-M182-Postscript-Sharp.ppd.gz	3980bd13d9a4a86330d3b3e7b7a7ec51	214259
-foomatic-20200219-Sharp-MX-M202D-Postscript-Sharp.ppd.gz	34b988442b25337cd3ffbc4a0c192dce	214358
-foomatic-20200219-Sharp-MX-M260FP-Postscript-Sharp.ppd.gz	8aec4fba91956d7e377e6d2e0dc05b3f	215351
-foomatic-20200219-Sharp-MX-M260-Postscript-Sharp.ppd.gz	2726fa1bb7a2334d27dd825ab7309bf4	215355
-foomatic-20200219-Sharp-MX-M264NR-Postscript-Sharp.ppd.gz	925dbb595d9d29cdf246d1aacfaf192d	216135
-foomatic-20200219-Sharp-MX-M860-Postscript-Sharp.ppd.gz	5d54df7ac5bcab438028a36f3e9a1141	216762
+foomatic-20200219-Samsung-CLP-610-pxlcolor.ppd.gz	b306abb3233e84618474ccee568bd0c3	679515
+foomatic-20200219-Sharp-AJ-1800-pcl3.ppd.gz	79b7b6418135fd0cb8e26209eebdb317	33423
 foomatic-20200219-Star-NX-1001-eps9mid.ppd.gz	6c32c5579702c088466f6e68241bdec5	5502
-foomatic-20200219-Star-StarJet_48-sj48.ppd.gz	afb91f8aab0c5db3e1f55ec0470a1370	31109
+foomatic-20200219-Star-StarJet_48-sj48.ppd.gz	1d1fa215e030e59ce5762b5fdcfc435c	31172
 foomatic-20200219-Tektronix-4693d-t4693dX.ppd.gz	60c739246ef3679b3be458228be2f748	154869
 foomatic-20200219-Tektronix-4695-tek4696.ppd.gz	378849ee74e9df0419d7cfe66092c7d6	21105
 foomatic-20200219-Tektronix-4696-tek4696.ppd.gz	378849ee74e9df0419d7cfe66092c7d6	21105
-foomatic-20200219-Tektronix-Phaser_350-Postscript.ppd.gz	5869d299ae46c38f591968cc74cf5f18	197143
-foomatic-20200219-Toshiba-e-Studio_205-Postscript-Toshiba.ppd.gz	c62040ff72c33144724aa3a2d5dd9467	124343
-foomatic-20200219-Toshiba-e-Studio_282-Postscript-Toshiba.ppd.gz	cb1f93cb78df7da7a20cd31465d86fa3	125745
-foomatic-20200219-Toshiba-e-Studio_3510c-Postscript-Toshiba.ppd.gz	e709843014346fd3f0f60909da5dacb9	152699
-foomatic-20200219-Toshiba-e-Studio_451c-Postscript-Toshiba.ppd.gz	03397aa3133f8ae60e776b0543043b87	155381
-foomatic-20200219-Toshiba-e-Studio_452-Postscript-Toshiba.ppd.gz	f4efdef7a55e561959623c668a6b652a	125673
-foomatic-20200219-Toshiba-e-Studio_850-Postscript-Toshiba.ppd.gz	c66fff98cfd15a9e32b9fdac7b21bd1b	125407
-foomatic-20200219-Toshiba-GL-1010-Postscript-Toshiba.ppd.gz	76e304011e34eec8f92809fc6b3217c1	123080
-foomatic-20200219-Toshiba-GL-1020-Postscript-Toshiba.ppd.gz	4ac9551438c37e805ef7e06e34f269a5	123120
-foomatic-20200219-Xerox-DocuPrint_XJ8C-lxm5700m.ppd.gz	a4e34c3997af0ab4a88fd1013b965206	99537
-foomatic-20200219-Xerox-Phaser_3160N-pxlcolor.ppd.gz	f806e59260ec92ab078d06457825b9c6	679510
-fuji_xerox-20200402-fuji-xerox-20200402-fx-apeosportv-c3375.ppd.gz	eec6dac9ce178bd49b614d4c13ec34a3	240264
-hp-20171121-hplip-3.17.10-hp-color_laserjet-ps.ppd.gz	41ca4d7955a1f3d2de9d2869d4622197	197791
-hp-20171121-hplip-3.17.10-hp-deskjet_f4210_series.ppd.gz	1c519dfb9d382d59e9fc00094e9f29a9	60680
-hp-20171121-hplip-3.17.10-hp-laserjet_4-ps.ppd.gz	373874dd5c8d1de7e7a6347346e59981	212649
-hp-20171121-hplip-3.17.10-hp-laserjet_4si-ps.ppd.gz	f89f970bf172a38fd3e46f240184aee4	212702
-hp-20171121-hplip-3.17.10-hp-laserjet_4v-ps.ppd.gz	5c130fe203738f867605ebcb3bff7ced	213274
-hp-20171121-hplip-3.17.10-hp-laserjet_6p-ps.ppd.gz	3929e8b16ec2bff828cd6957b14db1cc	213174
-hp-20171121-hplip-3.17.10-hp-laserjet_p1505n-pcl3.ppd.gz	643f51e7bfac3b39b896e75a067ad686	55038
-hp-20171121-hplip-3.17.10-hp-laserjet_p2055_series-ps.ppd.gz	c2bc1ab18488aaa2272a702d6f2310ea	212844
-hp-20171121-hplip-3.17.10-hp-laserjet_p4010_series-ps.ppd.gz	0bdca36c031ca13413f826b2101aa194	214330
-hp-20190111-hplip-3.18.12-hp-designjet_z6200_42in_photo-ps.ppd.gz	3a280950ef71414b0d67e8d705dde0ba	199528
-hp-20190111-hplip-3.18.12-hp-designjet_z6200_60in_photo-ps.ppd.gz	852fcb884e90db06d2ad376649c24856	199528
-hp-20190111-hplip-3.18.12-hp-designjet_z6810ps_42in-ps.ppd.gz	6ee27e7c30442d3213e6ee6bcdf64ea1	241807
-hp-20190111-hplip-3.18.12-hp-PCL3-Class1B.ppd.gz	e702d88e3441c4795b51f6b8aa560063	69009
-hp-20190918-hplip-3.19.6-hp-Ampere.ppd.gz	f69666f1e2b6a51d1b3221fd509c8f2a	69010
-hp-20190918-hplip-3.19.6-hp-CLE17.ppd.gz	dc41b12dd95541460398b1883271a03e	72022
-hp-20190918-hplip-3.19.6-hp-cm8060_mfp_with_edgeline-ps.ppd.gz	bd22a0f03d1fb228e5c056197e1ba4ce	242574
-hp-20190918-hplip-3.19.6-hp-color_designjet_xl_3600-ps.ppd.gz	f25b74becac3a02ee344e09050a420f6	241536
-hp-20190918-hplip-3.19.6-hp-color_laserjet_pro_mfp_m277-ps.ppd.gz	a8f1ed315f6371171cd215e4cfec3761	240955
-hp-20190918-hplip-3.19.6-hp-Copperhead12.ppd.gz	7c67b4790ec048c7258e6fabc6ab0600	69017
-hp-20190918-hplip-3.19.6-hp-CopperheadIPH15.ppd.gz	213f444ac8ed89f2a9255848a1d42f62	69414
-hp-20190918-hplip-3.19.6-hp-CopperheadIPH17.ppd.gz	213f444ac8ed89f2a9255848a1d42f62	69415
-hp-20190918-hplip-3.19.6-hp-CopperheadIPH.ppd.gz	f9887e3c76a5d689826c6f99d12956f3	69414
-hp-20190918-hplip-3.19.6-hp-Copperhead.ppd.gz	7c67b4790ec048c7258e6fabc6ab0600	69017
-hp-20190918-hplip-3.19.6-hp-CopperheadXLP.ppd.gz	6a9c5bafc68057392c8efb807e5c8be4	72032
-hp-20190918-hplip-3.19.6-hp-Corbett.ppd.gz	f69666f1e2b6a51d1b3221fd509c8f2a	69010
-hp-20190918-hplip-3.19.6-hp-designjet_t2600dr-ps.ppd.gz	5cad0c64815ce42b6e54d009debdc246	241448
-hp-20190918-hplip-3.19.6-hp-DJ55xx.ppd.gz	2d763c85487163f7712b4bad230516c7	61726
-hp-20190918-hplip-3.19.6-hp-DJ9xxVIP.ppd.gz	1134bca5011d025f41b40bc819fd83a4	61725
-hp-20190918-hplip-3.19.6-hp-Gemstone.ppd.gz	db5a2bb12613cbf12ea75b9512afaca1	72022
-hp-20190918-hplip-3.19.6-hp-Kapan.ppd.gz	87183604c598ac5bf10850ce40aa7696	69020
-hp-20190918-hplip-3.19.6-hp-laserjet_200_color_m251-ps.ppd.gz	4a4233cd05e2e1b5f242645443f4e331	240965
-hp-20190918-hplip-3.19.6-hp-laserjet_m1522_mfp-ps.ppd.gz	01fe5bc66e333aac09412c1c992e634b	213054
-hp-20190918-hplip-3.19.6-hp-laserjet_m2727_mfp_series-ps.ppd.gz	c05deeacf9bc514716b312b30d828a09	213393
-hp-20190918-hplip-3.19.6-hp-LJ-Class1.ppd.gz	8eb924200444d1ef2f93a2e6e3fde47f	46462
-hp-20190918-hplip-3.19.6-hp-LJ-Class2.ppd.gz	ce16520362c2d3eb0576d489396392ba	190207
-hp-20190918-hplip-3.19.6-hp-LJ-Class3.ppd.gz	690763dad601d98ccf8ca053c45603a7	118262
-hp-20190918-hplip-3.19.6-hp-LJ-Class6.ppd.gz	f5005426ab827b1857d1a454630e6f9a	59224
-hp-20190918-hplip-3.19.6-hp-Mimas15.ppd.gz	db5a2bb12613cbf12ea75b9512afaca1	72022
-hp-20190918-hplip-3.19.6-hp-Mimas17.ppd.gz	db5a2bb12613cbf12ea75b9512afaca1	72022
-hp-20190918-hplip-3.19.6-hp-Mimas.ppd.gz	e11105a15b3b62d37338390dcc14d26a	69017
-hp-20190918-hplip-3.19.6-hp-MimasTDR.ppd.gz	ac37833cf3ca8a51801220f49b7fc971	45958
-hp-20190918-hplip-3.19.6-hp-OJ7000.ppd.gz	edb400d791b1732f741ab4ce42f0c56a	69018
-hp-20190918-hplip-3.19.6-hp-OJProKx50.ppd.gz	2d763c85487163f7712b4bad230516c7	61726
-hp-20190918-hplip-3.19.6-hp-postscript-inkjet.ppd.gz	0015108b373865356147431d04387d06	240890
-hp-20190918-hplip-3.19.6-hp-postscript-laserjet.ppd.gz	3d31044590c72843617f071280e241d0	241154
-hp-20190918-hplip-3.19.6-hp-postscript-laserjet-pro.ppd.gz	7cf4bb64f6e8fc528b50d519798dc269	241178
-hp-20190918-hplip-3.19.6-hp-PSP100.ppd.gz	1245948f2d298121448808fd8d15d38e	61728
-hp-20190918-hplip-3.19.6-hp-PSP470.ppd.gz	fe84ee6798a8e9484f61b4cfeb3c60d1	147576
-hp-20190918-hplip-3.19.6-hp-Pyramid15.ppd.gz	db5a2bb12613cbf12ea75b9512afaca1	72022
-hp-20190918-hplip-3.19.6-hp-PyramidPlus.ppd.gz	ac37833cf3ca8a51801220f49b7fc971	45958
-hp-20190918-hplip-3.19.6-hp-Pyramid.ppd.gz	512bc2e60f90fde3978271be5870935f	45739
-hp-20190918-hplip-3.19.6-hp-PyramidRefresh15.ppd.gz	4f83312b1393b65fceb8fc8f106c2d2d	68164
-hp-20190918-hplip-3.19.6-hp-PyramidRefresh17.ppd.gz	dc41b12dd95541460398b1883271a03e	72022
-hp-20190918-hplip-3.19.6-hp-Python10.ppd.gz	227d96dedf4685774d785f16fc2638f1	69017
-hp-20190918-hplip-3.19.6-hp-Python11.ppd.gz	edb400d791b1732f741ab4ce42f0c56a	69018
-hp-20190918-hplip-3.19.6-hp-Python.ppd.gz	edb400d791b1732f741ab4ce42f0c56a	69018
-hp-20190918-hplip-3.19.6-hp-Saipan15B.ppd.gz	9b538488d8cdbc26c0fbe7aca6464665	72032
-hp-20190918-hplip-3.19.6-hp-Saipan.ppd.gz	4d362af48deccd5b500c7e2316fb9a0e	69010
-hp-20190918-hplip-3.19.6-hp-SPDOfficejetProAsize.ppd.gz	db5a2bb12613cbf12ea75b9512afaca1	72022
-hp-20190918-hplip-3.19.6-hp-SPDOfficejetProBsize.ppd.gz	db5a2bb12613cbf12ea75b9512afaca1	72022
-hp-20190918-hplip-3.19.6-hp-Stabler.ppd.gz	e11105a15b3b62d37338390dcc14d26a	69017
-hp-20190918-hplip-3.19.6-hp-StingrayOJ.ppd.gz	e430016285e2d90897b94ba1fd77c457	66965
-hp-20190918-hplip-3.19.6-hp-ViperMinusVIP.ppd.gz	7df7364622485985c0cbf938c1124a17	68781
-hp-20190918-hplip-3.19.6-hp-ViperPlusVIP.ppd.gz	e11105a15b3b62d37338390dcc14d26a	69017
-hplip-20200303-hplip-3.19.12-hp-color_designjet_xl_3600-ps.ppd.gz	6582492048d083442b5fabc9beec5ae6	241401
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_2550_series-ps.ppd.gz	1cc89d8dccf0264d2c0b6357a9ec08bf	240855
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_2605-ps.ppd.gz	4925bbec1c90f7c04f25d9a3a216856f	241149
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_2700-ps.ppd.gz	7237921f66fdf802e9fb5450894af62a	240913
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_2800-ps.ppd.gz	9a27877d68650dbcf5cdb2999b064f67	240791
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_cm1015-ps.ppd.gz	379b372da5ec60cfdd8f56a9c1cd949a	240866
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_e85055-ps.ppd.gz	a69f2f3d3facd48790192eae91d3db44	241951
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_flowmfp_m776-ps.ppd.gz	51b00a3fcc0e8e3790333c3524dc3f74	242324
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_m856-ps.ppd.gz	4e4bcffcefb8bad8f1bd53447c1a914e	242255
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_mfp_m776-ps.ppd.gz	210ddfa988525a056bc1976161651bb5	242552
-hplip-20200303-hplip-3.19.12-hp-designjet_4000ps.ppd.gz	e0aa448d3f75bbb0c877e11ccdc387f9	199317
-hplip-20200303-hplip-3.19.12-hp-designjet_4500mfp.ppd.gz	1beb7866416ab24bae9fecb1d747d03e	199324
-hplip-20200303-hplip-3.19.12-hp-designjet_d5800-ps.ppd.gz	91e4b049e982bfc9a76f97a683867a1f	241176
-hplip-20200303-hplip-3.19.12-hp-designjet_t1100ps_24in-ps.ppd.gz	fa127295bd22a8ccae785538a4709ec9	199323
-hplip-20200303-hplip-3.19.12-hp-designjet_t1100ps_44in-ps.ppd.gz	065b7d0d502c1a89df786caf1cecd315	199323
-hplip-20200303-hplip-3.19.12-hp-designjet_t1200_postscript-ps.ppd.gz	b37c0fe9693390163a5db70cb2c60f1b	199334
-hplip-20200303-hplip-3.19.12-hp-designjet_t1300_postscript-ps.ppd.gz	9e9c6c51c5cc755805a615afdf2239d1	199457
-hplip-20200303-hplip-3.19.12-hp-designjet_t1500-postscript.ppd.gz	5467d65fceb25351afb132e322643cd5	199730
-hplip-20200303-hplip-3.19.12-hp-designjet_t1530-postscript.ppd.gz	7d223bc95d07780956359041a9c89ae1	199768
-hplip-20200303-hplip-3.19.12-hp-designjet_t1600dr-ps.ppd.gz	5570e85758c033676bf1bb49eb6bfbb0	241313
-hplip-20200303-hplip-3.19.12-hp-designjet_t1600_printer-ps.ppd.gz	c5ee3c1e72d6722b3e9834e2c60ce8f1	241313
-hplip-20200303-hplip-3.19.12-hp-designjet_t1700dr_postscript-ps.ppd.gz	5604d001d5dc771463a4a0836cb39273	241404
-hplip-20200303-hplip-3.19.12-hp-designjet_t1700_postscript-ps.ppd.gz	010e09798efabfcc3f464ff4208fd5d7	241393
-hplip-20200303-hplip-3.19.12-hp-designjet_t1708dr_postscript-ps.ppd.gz	d022b8f92249349daab800ca023f3421	241687
-hplip-20200303-hplip-3.19.12-hp-designjet_t1708_postscript-ps.ppd.gz	ccb8eaf0c9a48cb23b1488e79644f32c	241678
-hplip-20200303-hplip-3.19.12-hp-designjet_t2300_postscript-ps.ppd.gz	ef8d166fc52f8d571be1a579b7e15258	199457
-hplip-20200303-hplip-3.19.12-hp-designjet_t2500-postscript.ppd.gz	98dd849ec6a95893d266e793e8fbf540	199666
-hplip-20200303-hplip-3.19.12-hp-designjet_t2600dr-ps.ppd.gz	f9f9c6d3d5c8cc56bbb7992ea70f96a9	241313
-hplip-20200303-hplip-3.19.12-hp-designjet_t3500-ps.ppd.gz	3430f129b897739f3822e023d822d54e	241298
-hplip-20200303-hplip-3.19.12-hp-designjet_t7100ps_monochrome-ps.ppd.gz	b4ae06e4d535334fccdd7947b675cba5	188518
-hplip-20200303-hplip-3.19.12-hp-designjet_t7100ps-ps.ppd.gz	09c6d48950d57a535867083be68186ce	199738
-hplip-20200303-hplip-3.19.12-hp-designjet_t7200-ps.ppd.gz	013207c3c8f4330b7dbbd6697d747b34	241448
-hplip-20200303-hplip-3.19.12-hp-designjet_t770_postscript-ps.ppd.gz	3cc1e9c980119c6d9d71a667ab497940	199179
-hplip-20200303-hplip-3.19.12-hp-designjet_t770ps_24in-ps.ppd.gz	e63563a67569d8c7fb5df273c031fb4e	199179
-hplip-20200303-hplip-3.19.12-hp-designjet_t790ps_24in-ps.ppd.gz	d040678de9b47f2d1fee0b0381d866bb	199302
-hplip-20200303-hplip-3.19.12-hp-designjet_t790ps_44in-ps.ppd.gz	02b0539d1359fb8472aafc1e330a5384	199302
-hplip-20200303-hplip-3.19.12-hp-designjet_t920-postscript.ppd.gz	51337bf8b8f8160b2edae44284742627	199571
-hplip-20200303-hplip-3.19.12-hp-designjet_t930-postscript.ppd.gz	3a7b97344e9ebeaf3a61c298ed4f2900	199571
-hplip-20200303-hplip-3.19.12-hp-designjet_z5200_postscript-ps.ppd.gz	e4d76e160b1beb17bb704fd64ff2e78b	242649
-hplip-20200303-hplip-3.19.12-hp-designjet_z6100ps_42in_photo-ps.ppd.gz	821948dfce39d8754a046387caa08447	199540
-hplip-20200303-hplip-3.19.12-hp-designjet_z6100ps_60in_photo-ps.ppd.gz	173dbc89df0a2ff82c6590ef429cf0e0	199540
-hplip-20200303-hplip-3.19.12-hp-designjet_z6600-postscript.ppd.gz	19845db0466cb162619594ca4bb3814b	241410
-hplip-20200303-hplip-3.19.12-hp-designjet_z6610ps_60in-ps.ppd.gz	d39f5c8fb123609a853c34d4c7d0c758	241659
-hplip-20200303-hplip-3.19.12-hp-designjet_z6800_photo-postscript.ppd.gz	f1efc6d7f4a2e7f5d0958c50d90d51d7	241410
-hplip-20200303-hplip-3.19.12-hp-designjet_z6810ps_60in-ps.ppd.gz	8db8f0676df04a9e5b2f9f919bf43367	241807
-hplip-20200303-hplip-3.19.12-hp-deskjet_3420.ppd.gz	a97a623d0bb2b6318477e33b9492bd2e	1272786
-hplip-20200303-hplip-3.19.12-hp-deskjet_3700_series.ppd.gz	dc41b12dd95541460398b1883271a03e	72023
-hplip-20200303-hplip-3.19.12-hp-deskjet_950c.ppd.gz	bc9d338682d081892de3479187ca7638	104810
-hplip-20200303-hplip-3.19.12-hp-deskjet_d1600_series.ppd.gz	d65408cc3bf03a6e78b839e42e537365	2533900
-hplip-20200303-hplip-3.19.12-hp-deskjet_d2600_series.ppd.gz	01b9cd0be0953d18271e7907ec039135	2702260
-hplip-20200303-hplip-3.19.12-hp-deskjet_d4100_series.ppd.gz	eaf0299a682275cf3a5ac52547fd329c	2362612
-hplip-20200303-hplip-3.19.12-hp-deskjet_f300_series.ppd.gz	08eb08b2ef79befa7dbdd42323ed0e02	1389746
-hplip-20200303-hplip-3.19.12-hp-DJGenericVIP.ppd.gz	c433c0c0faa591e82b5ab4992e895bf7	69020
-hplip-20200303-hplip-3.19.12-hp-laserjet_100_color_mfp_m175-ps.ppd.gz	6160c11ccfec849c80ff0fa992349e8c	240882
-hplip-20200303-hplip-3.19.12-hp-officejet_4300_series.ppd.gz	a97a623d0bb2b6318477e33b9492bd2e	1272786
-hplip-20200303-hplip-3.19.12-hp-pagewide_p55250-ps.ppd.gz	be8b5008ed6d11ef79bfe1c0c2191e44	240633
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_3900ps_mfp-ps.ppd.gz	5eaac365603e571d7284ee70d6ed5db9	240882
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_4000ps-ps.ppd.gz	e254e1c46b9061d8685af8d70c65f04b	241448
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_4100ps-ps.ppd.gz	ffcc863b346819b222b70a48730b6e8a	241024
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5000ps_blueprinter-ps.ppd.gz	754c436048193113e9d00d467d1f3d18	241316
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5000ps-ps.ppd.gz	eb6cd87e5ca2ab6e6b4e198b5e680335	241448
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5100ps_mfp_blueprinter-ps.ppd.gz	de34e1df77a048c9ea6d9767ccbd3feb	241316
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5100ps_mfp-ps.ppd.gz	d13da658f4dbfa4d377817b4e542e780	241448
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5100ps-ps.ppd.gz	878fb218f9656488479668b9089aa3f7	241448
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_8000ps_blueprinter-ps.ppd.gz	a2efeebe65722cd4763c5d3a392796f8	241316
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_8000ps-ps.ppd.gz	b95e6e3df1391ba0407db1b053e2dffa	241448
-hplip-20200303-hplip-3.19.12-hp-photosmart_a530_series.ppd.gz	c0b30155a3d836346950cefe8f8fe997	206359
-konica_minolta-20200331-konica-minolta-20200331-konica-minolta-226i.ppd.gz	fa5f77f471537c4acfb033f6d5e1717b	212135
-konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c226.ppd.gz	7bcd4161dc58b23d1a6422be5e953e67	238928
-kyocera-20180809-Kyocera_TASKalfa_3051ci.ppd.gz	6419d60d7a7798d0cde3c1df40b145f4	243172
-kyocera-20190328-Kyocera_CS_2551ci.ppd.gz	cda98d66033d8e6907292ab7e324324e	198232
-kyocera-20190328-Kyocera_CS_2552ci.ppd.gz	d567e710cb921bdbfe450b87443d8542	198232
-kyocera-20190328-Kyocera_CS_3010i.ppd.gz	6b28a0b507a02c6b2df86ce602daef97	187770
-kyocera-20190328-Kyocera_CS_3011i.ppd.gz	4b634322b19153c59cf82ee7d9273b5d	187770
-kyocera-20190328-Kyocera_CS_306ci.ppd.gz	e5902c4a4ae691b6d78ea4c273eb5f34	198232
-kyocera-20190328-Kyocera_CS_307ci.ppd.gz	0ba070ae097ce025fae42180bdcf782c	198232
-kyocera-20190328-Kyocera_CS_3212i.ppd.gz	fa2cceca8cdbf3e2a9bee7aac010f8ae	187770
-kyocera-20190328-Kyocera_CS_4002i.ppd.gz	496e9c9810fd729e50d38d0a4e5180b3	187770
-kyocera-20190328-Kyocera_CS_7002i.ppd.gz	496e9c9810fd729e50d38d0a4e5180b3	187770
-kyocera-20190328-Kyocera_CS_7052ci.ppd.gz	e10b8b7d6282c92ca17e7a7889f2023e	198232
-kyocera-20190328-Kyocera_CS_9002i.ppd.gz	8897903dc7cd2bb4e002f8c96216372d	187770
-kyocera-20190328-Kyocera_ECOSYS_M2030dn.ppd.gz	8d59e2a825f6e70a3b7a77396b343fa3	187770
-kyocera-20190328-Kyocera_ECOSYS_M2035dn.ppd.gz	93a68abbfc47d6b88dd8c0ba6db10094	187770
-kyocera-20190328-Kyocera_ECOSYS_M2040dn.ppd.gz	7d2f37611f8a980744a9e37dfb29e471	187770
-kyocera-20190328-Kyocera_ECOSYS_M2235dn.ppd.gz	51e4d220fa27e7946fffcbbf2a1d2e11	187770
-kyocera-20190328-Kyocera_ECOSYS_M3040dn.ppd.gz	db4f9322f23a8026dda84e246aecb337	187770
-kyocera-20190328-Kyocera_ECOSYS_M3145dn.ppd.gz	db4f9322f23a8026dda84e246aecb337	187770
-kyocera-20190328-Kyocera_ECOSYS_M4028idn.ppd.gz	2db4d3b6fcaada9f9fcee614e1dd743e	187770
-kyocera-20190328-Kyocera_ECOSYS_M4125idn.ppd.gz	fb08a314095d0bb8d220cf5bffced1a7	187770
-kyocera-20190328-Kyocera_ECOSYS_M5021cdn.ppd.gz	009782cf373fb4fe1daca42dee1195f7	198232
-kyocera-20190328-Kyocera_ECOSYS_M5521cdn.ppd.gz	5ace7b68e7f651392b23d1136a713170	198232
-kyocera-20190328-Kyocera_ECOSYS_M6026cdn.ppd.gz	4c3f9d2c606ff013eac482e73cb2db72	198232
-kyocera-20190328-Kyocera_ECOSYS_M6230cidn.ppd.gz	1414718e201d2a116471ee5b31bb2bf8	198232
-kyocera-20190328-Kyocera_ECOSYS_M8024cidn.ppd.gz	4207a841bb84a950d8a6fab2d5251261	198232
-kyocera-20190328-Kyocera_ECOSYS_M8124cidn.ppd.gz	4207a841bb84a950d8a6fab2d5251261	198232
-kyocera-20190328-Kyocera_ECOSYS_P2035d.ppd.gz	4aa81f3a68709036d11afcfccc5f7ccf	187770
-kyocera-20190328-Kyocera_ECOSYS_P3045dn.ppd.gz	4aa81f3a68709036d11afcfccc5f7ccf	187770
-kyocera-20190328-Kyocera_ECOSYS_P4035dn.ppd.gz	8815d321a23b5f807386a97548d4eee4	187770
-kyocera-20190328-Kyocera_ECOSYS_P6026cdn.ppd.gz	2c728f61bb9f95dd346754098ecefd13	198232
-kyocera-20190328-Kyocera_ECOSYS_P8060cdn.ppd.gz	b037ec0b3d03bf2e63e777b5b3a49420	198232
-kyocera-20190328-Kyocera_FS-5040DN.ppd.gz	53e75995e520fb8110dc86509ed3ddde	187770
-kyocera-20190328-Kyocera_TASKalfa_4020i.ppd.gz	0238ca8dd5364ca2c2bb1c6a93db91ac	187770
-kyocera-20190328-Kyocera_TASKalfa_406ci.ppd.gz	dc41fc84559cd81a4dcee868cdc3a435	198232
-kyocera-20190328-Kyocera_TASKalfa_4500i.ppd.gz	1a6f3833c758e8645e87b546401f30ad	187770
-kyocera-20200211-Kyocera_TASKalfa_7003i.ppd.gz	1a6f3833c758e8645e87b546401f30ad	187770
-kyocera-20200416-Kyocera_CS_205c.ppd.gz	80b6071b6dcc77a592ba40471548fae9	198232
-kyocera-20200416-Kyocera_CS_250ci.ppd.gz	57a3f5468d512540b0c85468f894fe4b	198232
-kyocera-20200416-Kyocera_CS_2550ci.ppd.gz	69451f0a5d9ec1dc1b1e5c07e056f00a	198232
-kyocera-20200416-Kyocera_CS_2553ci.ppd.gz	b980f27d7bf404d1b6675dc551782e4b	198232
-kyocera-20200416-Kyocera_CS_2554ci.ppd.gz	62d33ec78d1331feec997ca723e88594	198423
-kyocera-20200416-Kyocera_CS_255.ppd.gz	3227e395fe4a87b28312ea35a47879de	187770
-kyocera-20200416-Kyocera_CS_300i.ppd.gz	f027340e1b6e978e578281c1a0f3babe	187770
-kyocera-20200416-Kyocera_CS_3050ci.ppd.gz	d86b4cbd93b3d82b0131736e3f4feaa5	198232
-kyocera-20200416-Kyocera_CS_308ci.ppd.gz	1e224fc82cbb21a36b422fa6ba269a88	198232
-kyocera-20200416-Kyocera_CS_3500i.ppd.gz	69573e48816180739696d3a36ed7ad6d	187770
-kyocera-20200416-Kyocera_CS_4003i.ppd.gz	c6f54990da0b1a7584c21165d911ba2f	187770
-kyocera-20200416-Kyocera_CS_6500i.ppd.gz	ac0f6a5a7fed43ac154187f6069bf1b1	187770
-kyocera-20200416-Kyocera_CS_6550ci.ppd.gz	054bf8ff6d96dbad9ccfcb82edd052e6	198232
-kyocera-20200416-Kyocera_CS_7003i.ppd.gz	749d9758e3b6a6f28e7a26f4c84c4f4b	187770
-kyocera-20200416-Kyocera_CS_7353ci.ppd.gz	a9d88116ea8ef894af0afc700a8a074c	198232
-kyocera-20200416-Kyocera_CS_9003i.ppd.gz	00c547cfaad4650a22add31a21981d3a	187770
-kyocera-20200416-Kyocera_ECOSYS_M3860idnf.ppd.gz	ff375a3c03b23a378831ef6982800fee	187770
-kyocera-20200416-Kyocera_ECOSYS_M3860idn.ppd.gz	ff375a3c03b23a378831ef6982800fee	187770
-kyocera-20200416-Kyocera_ECOSYS_P3260dn.ppd.gz	d77c15e314c30e1c5194f5854ded1940	187770
-kyocera-20200416-Kyocera_ECOSYS_P4135dn.ppd.gz	bf17130af266bbe6a9fc4344c45fa72f	187770
-kyocera-20200416-Kyocera_ECOSYS_P5018cdn.ppd.gz	0daf546c39b298036481e367dfa61aff	198232
-kyocera-20200416-Kyocera_FS-1028MFP.ppd.gz	d6f0b89e5a1461b1f57076c3a9638ffd	187770
-kyocera-20200416-Kyocera_FS-1030MFP.ppd.gz	b6c10b4961913fa57e107a1e8233309e	187770
-kyocera-20200416-Kyocera_FS-1035MFP.ppd.gz	20f1c23e6842cfb2ff081d6dcf283305	187770
-kyocera-20200416-Kyocera_FS-1120D.ppd.gz	8812ab4e1707b9f7ea803bfaa8fbe569	187770
-kyocera-20200416-Kyocera_FS-2020D.ppd.gz	e685347cb0aafabe3939e3b844ce91ef	187770
-kyocera-20200416-Kyocera_FS-2100D.ppd.gz	35520fed7e84eecd3d8ac0ec9ed1b311	187770
-kyocera-20200416-Kyocera_FS-3540MFP.ppd.gz	a2cb69509e39d29a4303a94dac48ef0e	187770
-kyocera-20200416-Kyocera_FS-3920DN.ppd.gz	f7c7fcc23d2279daffe0bd275232ce70	187770
-kyocera-20200416-Kyocera_FS-4100DN.ppd.gz	f7c7fcc23d2279daffe0bd275232ce70	187770
-kyocera-20200416-Kyocera_FS-6970DN.ppd.gz	559066b846bbac261cd251e243cc564e	187770
-kyocera-20200416-Kyocera_FS-9130DN.ppd.gz	6566a222ecd56874874d3a9a393fa4b6	187770
-kyocera-20200416-Kyocera_FS-C2026MFP.ppd.gz	0761862a9b0348858db32c4962a85902	198232
-kyocera-20200416-Kyocera_FS-C2026MFP+.ppd.gz	fe8b84a6f4c766e59f52eb599874af54	198232
-kyocera-20200416-Kyocera_FS-C5250DN.ppd.gz	1025070edd688e0d61cc882b40f4be61	198232
-kyocera-20200416-Kyocera_FS-C8600DN.ppd.gz	a4c3665fd6c068a2d2980c6e838d45c3	198232
-kyocera-20200416-Kyocera_TASKalfa_3060ci.ppd.gz	87b5273d13bdec15a69f09d71850476b	198232
-kyocera-20200716-Kyocera_ECOSYS_M2540dwJ.ppd.gz	f609d7667b286c345b821bdaa99aecbf	187766
-kyocera-20200716-Kyocera_ECOSYS_M3645idnJ.ppd.gz	a7650386b136f8fcf687f57a58670d13	187648
-kyocera-20200716-Kyocera_ECOSYS_M4226idn.ppd.gz	f667dcb101223d229ffcd3d19ea16a3d	187770
-kyocera-20200716-Kyocera_ECOSYS_M6635cidnJ.ppd.gz	63c979e5688aeea92c50b3e4274db007	198228
-kyocera-20200716-Kyocera_ECOSYS_M8224cidn.ppd.gz	74dac47a9ee4b8f6db539c19cdad6354	198232
-kyocera-20200716-Kyocera_ECOSYS_P3060dnJ.ppd.gz	f4dbf8c02005d9d10b4ccac7dd124819	187766
-kyocera-20200716-Kyocera_ECOSYS_P3145dnJ.ppd.gz	271376dc845241969428ccd3c7e854f6	187766
-kyocera-20200716-Kyocera_ECOSYS_P3145dn.ppd.gz	0b60ddf07b65963ba6e3472fc5ee9007	187770
-kyocera-20200716-Kyocera_ECOSYS_P4040dnJ.ppd.gz	838cb81c3df38cbabb666e847795f9cf	187766
-kyocera-20200716-Kyocera_ECOSYS_P4060dnJ.ppd.gz	838cb81c3df38cbabb666e847795f9cf	187766
-kyocera-20200716-Kyocera_ECOSYS_P4140dnJ.ppd.gz	838cb81c3df38cbabb666e847795f9cf	187766
-kyocera-20200716-Kyocera_ECOSYS_P8060cdnJ.ppd.gz	24ccb755024a77a3ad1c684cdedd8dd5	198228
-kyocera-20200716-Kyocera_TASKalfa_2460ciJ.ppd.gz	3f6cf653a3d64fff276861d458179f3d	198228
-kyocera-20200716-Kyocera_TASKalfa_2510iJ.ppd.gz	9f10b87723acab57c94c5cc88f068cd7	187766
-kyocera-20200716-Kyocera_TASKalfa_2553ciJ.ppd.gz	66c600ff0671ce8ea7cb61a319d3445b	198228
-kyocera-20200716-Kyocera_TASKalfa_2553ci.ppd.gz	ebb1b3ddafd9bb20604526a83928405b	198232
-kyocera-20200716-Kyocera_TASKalfa_308ci.ppd.gz	9a78d58380b51e8c67122bdbae62fa19	198232
-kyocera-20200716-Kyocera_TASKalfa_3212iJ.ppd.gz	5f8d74e52e8bb59e5948c4563a409bbf	187648
-kyocera-20200716-Kyocera_TASKalfa_352ci.ppd.gz	9a78d58380b51e8c67122bdbae62fa19	198232
-kyocera-20200716-Kyocera_TASKalfa_358ciJ.ppd.gz	780ecbff1c91f3ca8e7a84930c4e0718	198228
-kyocera-20200716-Kyocera_TASKalfa_4003i.ppd.gz	f71d37bc57d823da6bd4260700d8fcc8	187770
-kyocera-20200716-Kyocera_TASKalfa_4012iJ.ppd.gz	e521b2ed138d3a1b726f3d8ebcbe669c	187647
-kyocera-20200716-Kyocera_TASKalfa_5003iJ.ppd.gz	3039c7ca415b11ecc669bd92075f9564	187765
-kyocera-20200716-Kyocera_TASKalfa_7003iJ.ppd.gz	6869715f631ef438daaae4402a4d1080	187765
-kyocera-20200716-Kyocera_TASKalfa_7353ciJ.ppd.gz	0a350bfadc13849fb8e3248888101544	198227
-kyocera-20200716-TA_P-4531_MFP.ppd.gz	5c75828e2199567c537d3c2df38387f8	187769
-lanier-20190916-Lanier-IM_550_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2755
-lanier-20190916-Lanier-IM_600SR_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2825
-lanier-20190916-Lanier-P_800_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2755
-lanier-20190916-Lanier-P_C600_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2827
-lexmark-20200918-Lexmark_X651de.ppd.gz	61f146c2e529170b181aa09d1ae640ba	240529
-lexmark-20200918-Lexmark_X658de.ppd.gz	e7d24d778413e13cf91b2b0ce3e65310	240912
-lexmark-20201101-Lexmark_6500e_Series.ppd.gz	c2c4f3417537f75bf67d5de859c1a945	241035
-lexmark-20201101-Lexmark_B2300_Series.ppd.gz	b9874b2cbff009b743e89e84e87d8d1a	188598
-lexmark-20201101-Lexmark_C2200_Series.ppd.gz	ebb5f539f79a5d7d06a840b3e62a97b7	199984
-lexmark-20201101-Lexmark_C2300_Series.ppd.gz	8bf456b6a7842f3cb2f33734b5d8bf65	199761
-lexmark-20201101-Lexmark_C2400_Series.ppd.gz	f5b9670586e72f8140aec1b7d6757126	199914
-lexmark-20201101-Lexmark_C3400_Series.ppd.gz	9222fcb71ef6a1735904e44cadbf83ca	199540
-lexmark-20201101-Lexmark_C740_Series.ppd.gz	f51971aeefd7f6664c4b64cb7015e3e0	359319
-lexmark-20201101-Lexmark_C790_Series.ppd.gz	88fdb6bcf4fad1300516831830f0010c	359787
-lexmark-20201101-Lexmark_C9200_Series.ppd.gz	ee4d0e239dda2e910901abd8fffb9bd2	200800
-lexmark-20201101-Lexmark_C920_Series.ppd.gz	6f2221e5cfddd93f3cf9cccfcb8ea668	241286
-lexmark-20201101-Lexmark_C950_Series.ppd.gz	728dfbc2356208cb3c11aec793f7ee95	241826
-lexmark-20201101-Lexmark_CS310_Series.ppd.gz	1470d3017d87ed9eb2579edbe1c26f2b	199680
-lexmark-20201101-Lexmark_CS410_Series.ppd.gz	5bb44bdb9d23500f51eb369a56a34ff5	199821
-lexmark-20201101-Lexmark_CS510_Series.ppd.gz	d02a9da2811f05a7179b26ca1650df00	359376
-lexmark-20201101-Lexmark_CS720_Series.ppd.gz	6b72341a0f18836dd251d718c9486394	199905
-lexmark-20201101-Lexmark_CS820_Series.ppd.gz	670c83e4549dce706b1b3eb6043fb1e8	200505
-lexmark-20201101-Lexmark_CS920_Series.ppd.gz	8f059e1ee358fa172f197fb8f185fed8	200800
-lexmark-20201101-Lexmark_CX310_Series.ppd.gz	131eb6d16e64e75a8f21f88abfde5ff1	359376
-lexmark-20201101-Lexmark_CX410_Series.ppd.gz	077abc7d521668b5b0c63b66eada2aaf	200156
-lexmark-20201101-Lexmark_CX420_Series.ppd.gz	dea9b12cf154aec1c1458685b8dd1578	199914
-lexmark-20201101-Lexmark_CX430_Series.ppd.gz	b506cf0311d413979d529bd43b1138bb	199540
-lexmark-20201101-Lexmark_CX510_Series.ppd.gz	831b59a653ba85077a03552152f34b6e	273873
-lexmark-20201101-Lexmark_CX625_Series.ppd.gz	6c69c0aaabed3af2bba5b37465c8849f	199984
-lexmark-20201101-Lexmark_CX725_Series.ppd.gz	f6351829563cfea9ceaa6c06d20df236	199905
-lexmark-20201101-Lexmark_CX820_Series.ppd.gz	81fda7b706a6eff37948e1040cb127d0	200505
-lexmark-20201101-Lexmark_CX825_Series.ppd.gz	69e343b34fa3bbe0b7b7a3f1544ae76b	200860
-lexmark-20201101-Lexmark_CX920_Series.ppd.gz	e24fa38e53926c99af80462e2ebd65c4	200799
-lexmark-20201101-Lexmark_M1100_Series.ppd.gz	b2787dd33554d1f823563890bb0534be	213908
-lexmark-20201101-Lexmark_M3100_Series.ppd.gz	3330fe7467dc553ec0c61acb180c4311	213887
-lexmark-20201101-Lexmark_M5100_Series.ppd.gz	fafe3a96ad5f6039b1ef3798a9ddf358	214295
-lexmark-20201101-Lexmark_MB2300_Series.ppd.gz	1230461c9f2ad0823b860be9bb721c7d	188598
-lexmark-20201101-Lexmark_MC2300_Series.ppd.gz	85b08c30131b230371f7d350a0644a0e	199761
-lexmark-20201101-Lexmark_MS310_Series.ppd.gz	bef19bbea014fbcf19ce7584e240352e	188863
-lexmark-20201101-Lexmark_MS410_Series.ppd.gz	200e055d7c2c73fd116e8dc2081e6ba1	213868
-lexmark-20201101-Lexmark_MS510_Series.ppd.gz	42d84fbebf947f258feca3f24f728dd6	240543
-lexmark-20201101-Lexmark_MS610_Series.ppd.gz	b35a6f5b8ecb8d166dfebc40a3cf88c3	240543
-lexmark-20201101-Lexmark_MS620_Series.ppd.gz	a90c1a24e8c7ce0e2cc57b353e26ee1e	188738
-lexmark-20201101-Lexmark_MS710_Series.ppd.gz	baae40695b16f2739b26e2c3a9ca4b9f	240924
-lexmark-20201101-Lexmark_MS725_Series.ppd.gz	31afe12036ed05c8846c732171cd7068	190145
-lexmark-20201101-Lexmark_MS810_Series.ppd.gz	7881d8209b61ca18cc5d57910d5120f7	240924
-lexmark-20201101-Lexmark_MS820_Series.ppd.gz	46b42efd09bdb61877b1d19be8c4c547	190145
-lexmark-20201101-Lexmark_MX310_Series.ppd.gz	2ba525e0a84838264ba5d96c0f644c6b	188864
-lexmark-20201101-Lexmark_MX410_Series.ppd.gz	e0d12f47c0ecb85cc31d6c0706ea6097	240543
-lexmark-20201101-Lexmark_MX510_Series.ppd.gz	3b37e0cb0a337527783f9aec57ca6f49	240543
-lexmark-20201101-Lexmark_MX520_Series.ppd.gz	f5a773cc8adf6b54a89646e6f92b1de0	188738
-lexmark-20201101-Lexmark_MX610_Series.ppd.gz	cec71b7f07af9e9447dd3db51f5ef09e	240701
-lexmark-20201101-Lexmark_MX620_Series.ppd.gz	0597d0eb1bd7253ad7286f9c4ae6b156	188884
-lexmark-20201101-Lexmark_MX6500e_Series.ppd.gz	de05962016c44bb58281ad72030ed7d2	214250
-lexmark-20201101-Lexmark_MX710_Series.ppd.gz	6ec3cd891606e956d3f5a61296abad66	240528
-lexmark-20201101-Lexmark_MX720_Series.ppd.gz	ca4ea7e91024304b0cf228ccf6883a6d	188878
-lexmark-20201101-Lexmark_MX725_Series.ppd.gz	8588a4cc7a3c263158faece1d06a81eb	188878
-lexmark-20201101-Lexmark_MX810_Series.ppd.gz	6a698ca77cb314c9c67c0e146cc9e96f	240986
-lexmark-20201101-Lexmark_MX820_Series.ppd.gz	7985024dce98cf99724992fbe4fad64d	189590
-lexmark-20201101-Lexmark_MX910_Series.ppd.gz	1ef510d54084ac756a02215a46e645c8	214710
-lexmark-20201101-Lexmark_X548_Series.ppd.gz	0d0c870813b9d19524c65e31dd5d35c4	359393
-lexmark-20201101-Lexmark_X740_Series.ppd.gz	87c327a70b6d1c1ace79be58b7f54b46	359372
-lexmark-20201101-Lexmark_X790_Series.ppd.gz	f13db56e8b2d8b7967f1976ec0913fa7	359840
-lexmark-20201101-Lexmark_X920_Series.ppd.gz	b67ea20aeb4c7f030b1295927b091e7d	241339
-lexmark-20201101-Lexmark_X950_Series.ppd.gz	c9b599858191e6ae41b0dceb690e468e	241879
-lexmark-20201101-Lexmark_XC2100_Series.ppd.gz	d177be511f8cd571e561116d6128518e	359425
-lexmark-20201101-Lexmark_XC9200_Series.ppd.gz	a48fd432878decc320e6477195d29ea9	200800
-lexmark-20201101-Lexmark_XM1100_Series.ppd.gz	cb8aae9b3af1d658b0f0239f59d313a5	213884
-lexmark-20201101-Lexmark_XM3100_Series.ppd.gz	acdb5d918ecc8d0ad1a1c983723444ec	214042
-lexmark-20201101-Lexmark_XM5100_Series.ppd.gz	c397a3b42c1ad9cecd794da241935798	213884
-lexmark-20201101-Lexmark_XM7100_Series.ppd.gz	b8ae626102093df9f3e8e5274b53e88c	214380
-oki-20200129-oki-c542-ps.ppd.gz	789c6b120fc77f88894a61c8667a38b9	245913
-oki-20200329-ES8434-PS.ppd.gz	d59426ecf19241dff4de40ddee792f8a	245989
-oki-20200329-OKB432_a.ppd.gz	4f47375a1889135d9d821f34742b152d	216855
-oki-20200329-OKB512_a.ppd.gz	c01c8847ac561f604e5ca10c5a808e6a	216855
-oki-20200329-OKB841_a110.ppd.gz	9962fc07457c66b26887fb85d29e1dd9	215246
-oki-20200329-OKI-C332-PS.ppd.gz	63aff8db087fa29d3c44ee6e2419a849	245913
-oki-20200329-OKI-C612-PS.ppd.gz	ebbe71e7371e5ad7d57500a637f3d224	246052
-oki-20200329-OKI-C712-PS.ppd.gz	82cda096513add112b54d9322aee1b94	246133
-oki-20200329-OKI-C833-PS.ppd.gz	7f8c69aab9430d0e933cfa1e5ce4ab9b	246052
-oki-20200329-OKI-C843-PS.ppd.gz	904b0232c482abdfc2f7671cb2aaf894	246052
-oki-20200329-OKI-C844-PS.ppd.gz	dc9e0f672c3b2c09870eeec7ef44507b	245989
-oki-20201022-ES6450_PS.ppd.gz	efa67a83fb599f0c8e092518daeb873b	245913
-oki-20201022-OKI_MC843_PS.ppd.gz	0d038d44991b57cec440b8b3131fc94e	245832
-oki-20201022-OKI_MC853_PS.ppd.gz	dc20c789c55be5df8bf26c141774e8c8	245912
-oki-20201022-OKI_MC883_PS.ppd.gz	972eb24001a67a88540e77cab591f145	245912
-ricoh-20190916-Ricoh-IPSiO_SP_3400L_PXL.ppd.gz	aa68534d59f2156b897e0c665897cc44	79866
-ricoh-20190916-Ricoh-IPSiO_SP_3510SF_PXL.ppd.gz	aa68534d59f2156b897e0c665897cc44	79866
-ricoh-20190916-Ricoh-M_C250FWB_PS.ppd.gz	0cc53deff352573945ccbaa0ea877f6d	242269
+foomatic-20200219-Xerox-DocuPrint_XJ8C-lxm5700m.ppd.gz	e71ba580e989c72e9d526fcfaa1dc643	100265
+foomatic-20200219-Xerox-Phaser_3160N-pxlcolor.ppd.gz	b306abb3233e84618474ccee568bd0c3	679515
+hp-20171121-hplip-3.17.10-hp-laserjet_p1505n-pcl3.ppd.gz	84e39b285752ac0389176bab7788687f	55969
+hp-20190111-hplip-3.18.12-hp-PCL3-Class1B.ppd.gz	3337a1cf1935f7f9a4b02da2d6955799	69029
+hp-20190918-hplip-3.19.6-hp-Ampere.ppd.gz	be8a987bdd823a9426bbf2af96a4758f	69030
+hp-20190918-hplip-3.19.6-hp-CLE.ppd.gz	cca9fc316668fef12fc7aef89e9ed54b	72041
+hp-20190918-hplip-3.19.6-hp-CLE17.ppd.gz	cca9fc316668fef12fc7aef89e9ed54b	72042
+hp-20190918-hplip-3.19.6-hp-Copperhead.ppd.gz	4590cf7753dba5b27e64ec74f81ebd41	69037
+hp-20190918-hplip-3.19.6-hp-Copperhead12.ppd.gz	4590cf7753dba5b27e64ec74f81ebd41	69037
+hp-20190918-hplip-3.19.6-hp-CopperheadIPH.ppd.gz	723a36917d14ff2f55a64a82e10a764f	69435
+hp-20190918-hplip-3.19.6-hp-CopperheadIPH15.ppd.gz	adcda0b423faeef5f33d930b486f92a6	69435
+hp-20190918-hplip-3.19.6-hp-CopperheadIPH17.ppd.gz	adcda0b423faeef5f33d930b486f92a6	69435
+hp-20190918-hplip-3.19.6-hp-CopperheadXLP.ppd.gz	f792e7731dd9dc1237c85c5a3db4bbab	72052
+hp-20190918-hplip-3.19.6-hp-Corbett.ppd.gz	be8a987bdd823a9426bbf2af96a4758f	69030
+hp-20190918-hplip-3.19.6-hp-DJ9xxVIP.ppd.gz	f6392d5e18f6b2907d520abdd2164358	61745
+hp-20190918-hplip-3.19.6-hp-Gemstone.ppd.gz	8c8ac7bd4c34c5cb119e735c34c4ea26	72042
+hp-20190918-hplip-3.19.6-hp-Kapan.ppd.gz	51c2abbb68379edb52415aa1d8a61c15	69040
+hp-20190918-hplip-3.19.6-hp-LJ-Class1.ppd.gz	9d3eb3426640be5cb2dcbee9bc504edf	47002
+hp-20190918-hplip-3.19.6-hp-LJ-Class2.ppd.gz	fd06f9d727475586ac359f8c557bea4d	190207
+hp-20190918-hplip-3.19.6-hp-LJ-Class3.ppd.gz	d8977dff6d1f6c339cd6a307844a189a	118259
+hp-20190918-hplip-3.19.6-hp-LJ-Class6.ppd.gz	f77b7f729d000d4c0eb88ec547a89a94	60220
+hp-20190918-hplip-3.19.6-hp-Mimas.ppd.gz	6c1e90e9fb0e3bb843e835cb57e768c1	69037
+hp-20190918-hplip-3.19.6-hp-Mimas15.ppd.gz	8c8ac7bd4c34c5cb119e735c34c4ea26	72042
+hp-20190918-hplip-3.19.6-hp-Mimas17.ppd.gz	8c8ac7bd4c34c5cb119e735c34c4ea26	72042
+hp-20190918-hplip-3.19.6-hp-MimasTDR.ppd.gz	1a73021ad1bdb92da007f25e01e62d58	45978
+hp-20190918-hplip-3.19.6-hp-Pyramid.ppd.gz	67eeab507931190bcc387eaebce912fd	45758
+hp-20190918-hplip-3.19.6-hp-Pyramid15.ppd.gz	8c8ac7bd4c34c5cb119e735c34c4ea26	72042
+hp-20190918-hplip-3.19.6-hp-PyramidPlus.ppd.gz	1a73021ad1bdb92da007f25e01e62d58	45978
+hp-20190918-hplip-3.19.6-hp-PyramidRefresh15.ppd.gz	271e6dea227fc6d944f139333c4412b1	68184
+hp-20190918-hplip-3.19.6-hp-PyramidRefresh17.ppd.gz	cca9fc316668fef12fc7aef89e9ed54b	72042
+hp-20190918-hplip-3.19.6-hp-SPDOfficejetProAsize.ppd.gz	8c8ac7bd4c34c5cb119e735c34c4ea26	72042
+hp-20190918-hplip-3.19.6-hp-SPDOfficejetProBsize.ppd.gz	8c8ac7bd4c34c5cb119e735c34c4ea26	72041
+hp-20190918-hplip-3.19.6-hp-Saipan.ppd.gz	cdd13cca2c7a38dbc869a0f945e079a9	69030
+hp-20190918-hplip-3.19.6-hp-Saipan15B.ppd.gz	c1d80048e0d96be47b1c86e0d80d291b	72052
+hp-20190918-hplip-3.19.6-hp-Stabler.ppd.gz	6c1e90e9fb0e3bb843e835cb57e768c1	69037
+hp-20190918-hplip-3.19.6-hp-ViperMinusVIP.ppd.gz	3383b415dec9d25ec09bfab86e9171cc	68801
+hp-20190918-hplip-3.19.6-hp-ViperPlusVIP.ppd.gz	6c1e90e9fb0e3bb843e835cb57e768c1	69036
+hplip-20201209-hplip-3.20.11-hp-CLE17.ppd.gz	cca9fc316668fef12fc7aef89e9ed54b	72042
+hplip-20201209-hplip-3.20.11-hp-Mimas17.ppd.gz	8c8ac7bd4c34c5cb119e735c34c4ea26	72042
+hplip-20201209-hplip-3.20.11-hp-PyramidRefresh17.ppd.gz	cca9fc316668fef12fc7aef89e9ed54b	72042
+hplip-20201209-hplip-3.20.11-hp-SPDOfficejetProBsize.ppd.gz	8c8ac7bd4c34c5cb119e735c34c4ea26	72042
+hplip-20201209-hplip-3.20.11-hp-deskjet_3700_series.ppd.gz	cca9fc316668fef12fc7aef89e9ed54b	72043
+hplip-20201209-hplip-3.20.11-hp-deskjet_d1600_series.ppd.gz	d65408cc3bf03a6e78b839e42e537365	2533900
+hplip-20201209-hplip-3.20.11-hp-photosmart_8700_series.ppd.gz	6002a66e572631a9ad998b044288fcaf	60701
+hplip-20201209-hplip-3.20.11-hp-photosmart_a530_series.ppd.gz	c0b30155a3d836346950cefe8f8fe997	206360
+lanier-20190916-Lanier-IM_550_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2756
+lanier-20190916-Lanier-IM_600SR_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2826
+lanier-20190916-Lanier-P_800_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2756
+lanier-20190916-Lanier-P_C600_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2828
+ricoh-20190916-Ricoh-IPSiO_SP_3400L_PXL.ppd.gz	39b28d9f8af78fe5b7f79fde90db0133	79871
+ricoh-20190916-Ricoh-IPSiO_SP_3510SF_PXL.ppd.gz	39b28d9f8af78fe5b7f79fde90db0133	79871
 ricoh-20190916-Ricoh-MP_C306Z_JPN_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2800
-ricoh-20190916-Ricoh-P_C301SF_PS.ppd.gz	972e759571f0c0655bd4fde8c6611450	242269
-ricoh-20190916-Ricoh-SP_212Nw_PXL.ppd.gz	aa68534d59f2156b897e0c665897cc44	79866
-ricoh-20190916-Ricoh-SP_2200L_PXL.ppd.gz	aa68534d59f2156b897e0c665897cc44	79866
-ricoh-20190916-Ricoh-SP_320DN_PXL.ppd.gz	aa68534d59f2156b897e0c665897cc44	79866
+ricoh-20190916-Ricoh-SP_212Nw_PXL.ppd.gz	39b28d9f8af78fe5b7f79fde90db0133	79871
+ricoh-20190916-Ricoh-SP_2200L_PXL.ppd.gz	39b28d9f8af78fe5b7f79fde90db0133	79871
+ricoh-20190916-Ricoh-SP_320DN_PXL.ppd.gz	39b28d9f8af78fe5b7f79fde90db0133	79871
 ricoh-20191121-Infotec-Pro_8200S_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2893
 ricoh-20191121-Infotec-Pro_8210_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2893
 ricoh-20191121-Infotec-Pro_C5200S_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2940
-ricoh-20191121-Infotec-Pro_C7200_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2989
 ricoh-20191121-Infotec-Pro_C7200S_Light_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2989
-ricoh-20191218-SP_C420e_JPN-PostscriptColor-Ricoh.ppd.gz	245ee4a58e1df732b42fab1b093efbe1	240443
+ricoh-20191121-Infotec-Pro_C7200_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2989
 ricoh-20200221-Gestetner-IM_C300_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2854
 ricoh-20200221-Lanier-IM_C400SR_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2870
 ricoh-20200221-Ricoh-SP_C342M_JPN_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2802
 ricoh-20200527-Gestetner-GS3021_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2850
 ricoh-20200527-Infotec-Pro_C5300S_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	3016
 ricoh-20200527-Lanier-IM_C6500_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2940
-ricoh-20200527-Ricoh-P_6000_JPN.ppd.gz	907b38fd141a38f4dfda9f8e4a36f523	213651
 ricoh-20200821-Infotec-Pro_C5300SL_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	3016
 ricoh-20200821-Lanier-IM_C530FB_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2827
 ricoh-20200821-Lanier-IM_C530F_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2897
 ricoh-20200821-Ricoh-IM_C2509J_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2916
 ricoh-20200821-Ricoh-IM_C3509J_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2916
-ricoh-20200821-Ricoh-IM_C6500_JPN.ppd.gz	7d14cc94cda756fdb495593e76db60e1	240443
-ricoh-20200930-Ricoh_Generic_PS_Printer.ppd.gz	cc3e61ecdb3e7b7e0a0bcd21cf46306d	243076
-sharp-20180409-Sharp-AR-M452U-ps.ppd.gz	eb06938edabbf45225bf4dd39f3a7615	215633
-sharp-20180409-Sharp-MX-2640NR-ps.ppd.gz	bbd185cd41e7da8355fcaa2ea29a217f	246696
-sharp-20180409-Sharp-MX-M283N-ps.ppd.gz	889b2e624d770b0fe6a98c8d2d164be8	215707
-sharp-20180409-Sharp-MX-M363F-ps-jp.ppd.gz	6589cc0a6ced6440830f6f508c234cb2	215626
-sharp-20180409-Sharp-MX-M623N-ps.ppd.gz	9569d13f08963d2e9e0ab929ea1b464f	215555
-sharp-20180409-Sharp-MX-M623-ps-jp.ppd.gz	bfacde8c1f152a43e748f793ab60f4e0	215551
-sharp-20190711-Sharp-MX-6240N-ps.ppd.gz	56ce50a67c37fcad3f8db6fc32632f08	249829
-sharp-20190711-Sharp-MX-6500N-ps.ppd.gz	a6a517a1c74997cd58a1701a57006511	250566
-sharp-20190711-Sharp-MX-6540FN-ps-jp.ppd.gz	60bdcfb0027633ef488426aec31933a3	249825
-sharp-20190711-Sharp-MX-C250-ps.ppd.gz	28c591db4190e41d6af92754a7edadf7	245556
-sharp-20190711-Sharp-MX-C301-ps.ppd.gz	101d8525c27b90702c3034798dce4811	245761
-sharp-20190711-Sharp-MX-M1054-ps.ppd.gz	a3383c434b5583720754f6664617c44c	219855
-sharp-20190711-Sharp-MX-M1055-ps.ppd.gz	929046600e68312c04719a70b80eeb1a	219855
-sharp-20190711-Sharp-MX-M654FN-ps-jp.ppd.gz	b0e7d4d7accad24f0ede20060f19a24e	219713
-sharp-20190711-Sharp-MX-M654N-ps.ppd.gz	9b56e628d43215a9d2ab2eb63510a258	219717
-sharp-20190711-Sharp-MX-M904-ps.ppd.gz	72f9d7acfa5ccdafdf6ed52d05f7d664	219855
-sharp-20191219-Sharp-AR-6020D-ps.ppd.gz	f720ea405403503a288a17d21e675924	214165
-sharp-20191219-Sharp-AR-6020-ps.ppd.gz	3c36e41c8cc1ee399af5718d9008c612	214066
-sharp-20191219-Sharp-AR-6026N-ps.ppd.gz	514a66c8fb6b7b84f74a512e90939082	214165
-sharp-20191219-Sharp-AR-G200-ps-jp.ppd.gz	a63133127c0cdba8d0d14415da54b57c	214062
-sharp-20191219-Sharp-BP-10C20-ps.ppd.gz	ded3715f6d4ea7a7e23fcee80f4e4dda	246899
-sharp-20191219-Sharp-DX-2000U-ps.ppd.gz	8c109e971a6a757fc891caf07ed77615	245926
-sharp-20191219-Sharp-DX-20C20-ps-jp.ppd.gz	ceda24f91256d90f28ffac7dabc2107e	246895
-sharp-20191219-Sharp-DX-2500N-ps.ppd.gz	68486dcbf9a011aed08672e7fba57201	246494
-sharp-20191219-Sharp-MX-1800N-ps.ppd.gz	e30345ea9e38c93397f9a5b10d2019ff	245343
-sharp-20191219-Sharp-MX-1810U-ps.ppd.gz	e64468f68e6c117dd7ca5da58dff5769	246372
-sharp-20191219-Sharp-MX-2300FG-ps-jp.ppd.gz	0bf1a9106e997c36721442fa8514c813	245415
-sharp-20191219-Sharp-MX-2300G-ps.ppd.gz	c74f44788c522f29f60942848ea764b1	245419
-sharp-20191219-Sharp-MX-2301N-ps.ppd.gz	f90a88ad577bbe45ac468e85d64bc15b	245859
-sharp-20191219-Sharp-MX-2310F-ps-jp.ppd.gz	72034672ea181fcae2c05173ecb0c682	246368
-sharp-20191219-Sharp-MX-2514FN-ps-jp.ppd.gz	652938007b2e8b2f0b9cea581e4ca111	246492
-sharp-20191219-Sharp-MX-2600FG-ps-jp.ppd.gz	7986576044dcb2c619cdf5a8eacf5923	246001
-sharp-20191219-Sharp-MX-2600G-ps.ppd.gz	4439f87c632323fff071cdcc8850f0d5	246005
-sharp-20191219-Sharp-MX-2610FN-ps-jp.ppd.gz	8ec9ec064bd64c98958c572705a11d21	246514
-sharp-20191219-Sharp-MX-2610N-ps.ppd.gz	f8e6a144d081b68900e9d727520f839b	246518
-sharp-20191219-Sharp-MX-2614N-ps.ppd.gz	89f30d5477b6d8b90205913a2954ecae	246921
-sharp-20191219-Sharp-MX-2631-ps-jp.ppd.gz	6ba5571f13b05cfc76e29b317012fc52	249985
-sharp-20191219-Sharp-MX-2640FN-ps-jp.ppd.gz	17e221e9b06b64776ff9e21b6317bb59	246692
-sharp-20191219-Sharp-MX-2651-ps.ppd.gz	7baa454b40e6caa4fd56c79e2cd2f08a	249996
-sharp-20191219-Sharp-MX-2661-ps-jp.ppd.gz	cb4fcbaba967d0f87126b861b98de5e4	249985
-sharp-20191219-Sharp-MX-3061-ps.ppd.gz	a05316a75e7ac44fe2b5d4ae3237c1af	249996
-sharp-20191219-Sharp-MX-3600FN-ps-jp.ppd.gz	ab738b3b4931afde17bb8ef953af9541	246001
-sharp-20191219-Sharp-MX-4100N-ps.ppd.gz	16442535a8138b88fe903feeed8ad10e	246005
-sharp-20191219-Sharp-MX-4110FN-ps-jp.ppd.gz	50e5eba98a2216c2975245f093f98584	246514
-sharp-20191219-Sharp-MX-4110N-ps.ppd.gz	aa5674549ac8a0c26a912a3472832617	246518
-sharp-20191219-Sharp-MX-4140FN-ps-jp.ppd.gz	bdfaaa0f61371b8b3c558e6452b11308	246692
-sharp-20191219-Sharp-MX-4140N-ps.ppd.gz	d3bcd85a775f6cfe50fce51e2ef0d75f	246696
-sharp-20191219-Sharp-MX-5500N-ps.ppd.gz	ed975bdaa5151684e13675bed7c70b75	245267
-sharp-20191219-Sharp-MX-C303-ps.ppd.gz	d221a324251a64e950179df7bfb4d9a2	248755
-sharp-20191219-Sharp-MX-C305W-ps-jp.ppd.gz	b0000816c43a3829b65248722b9b4c4c	248751
-sharp-20191219-Sharp-MX-M264FP-ps-jp.ppd.gz	421bdaf668a742baf595de84fc615c81	216130
-sharp-20191219-Sharp-MX-M264NV-ps.ppd.gz	453b0898966be117545600de9aa0db62	216134
-sharp-20191219-Sharp-MX-M265N-ps.ppd.gz	5f7aa9cc616453a74c310ab43f45f3eb	216230
-sharp-20191219-Sharp-MX-M266FP-ps-jp.ppd.gz	b799ae97b835bd50016cfe06d5993f68	216149
-sharp-20191219-Sharp-MX-M266N-ps.ppd.gz	adb8fda577ea2c93d494fa4fbbf2807f	216229
-sharp-20191219-Sharp-MX-M316G-ps-jp.ppd.gz	d5cc50922a48dad106e23e8aa5c81f44	216149
-sharp-20191219-Sharp-MX-M364N-ps.ppd.gz	bab4a27644e58b807ec9f0a1573dcbc2	216832
-sharp-20191219-Sharp-MX-M365FN-ps-jp.ppd.gz	cac542967aef6662c60fdfa604b552c4	216828
-sharp-20191219-Sharp-MX-M365N-ps.ppd.gz	c3d75d72fce1694a98695b54add9af73	216832
-sharp-20191219-Sharp-MX-M464FN-ps-jp.ppd.gz	49c545cf7388623cffdd8dc192f2bf66	216828
-sharp-20191230-Sharp-AR-B350W-ps-jp.ppd.gz	ed143d7e0e57e67c93bbac609dff8c5e	215886
-sharp-20191230-Sharp-AR-B351-ps.ppd.gz	3b96a28fd46bffa30a3efe5504cda93e	215890
-sharp-20191230-Sharp-DX-C310-ps.ppd.gz	91a1a35a96c5e6495f37499f946207eb	245686
-sharp-20191230-Sharp-MX-2630FN-ps-jp.ppd.gz	599dcaa5449d55518d107e82f25735a2	249395
-sharp-20191230-Sharp-MX-2630N-ps.ppd.gz	73532cdd3afba184689cf5e854a113cb	249406
-sharp-20191230-Sharp-MX-2650FN-ps-jp.ppd.gz	b79836e94d33266c72f2a11d5be66347	249395
-sharp-20191230-Sharp-MX-3060N-ps.ppd.gz	4c3439a4e3ee86c1c5a0208fa2e783e8	249406
-sharp-20191230-Sharp-MX-6580N-ps.ppd.gz	4d54aef65b0a8ea8868cffdfaf243c7b	251423
-sharp-20191230-Sharp-MX-7090N-ps.ppd.gz	b43d9b6c70f80a14e0211970a3c0414a	252088
-sharp-20191230-Sharp-MX-B355W-ps.ppd.gz	23163023ffe7e2192cd878f4672f6054	219262
-sharp-20191230-Sharp-MX-B356W-ps.ppd.gz	47933e21491db67c3a1af0df5582ade3	219393
-sharp-20191230-Sharp-MX-B380P-ps.ppd.gz	93cce4cb30a40296a52d1bc08aee6417	215313
-sharp-20191230-Sharp-MX-M2630-ps.ppd.gz	999fd87335f906a22b4888c14d0a1a86	219911
-sharp-20191230-Sharp-MX-M2651-ps.ppd.gz	fee63cdaabf8d2cc35c8436c7b3aa586	219934
-sharp-20191230-Sharp-MX-M3070-ps.ppd.gz	879bb73fb424cd9d8d33e613c44a2e29	219835
-sharp-20191230-Sharp-MX-M3071-ps.ppd.gz	3a35c43f5a761ae59174221e8b1df379	219858
-sharp-20191230-Sharp-MX-M3531-ps-jp.ppd.gz	426872e62291dc286187273a04f82bbe	219847
-sharp-20191230-Sharp-MX-M6570-ps.ppd.gz	0ffafaa1f5db09c77708cb244a321b49	221072
-sharp-20191230-Sharp-MX-M905-ps.ppd.gz	417e72bb9c43484c7d5e7e23589ee7e9	220710
-star-20171009-starcupsdrv-3.6.0-hsp7000r.ppd.gz	90d3536a6170117dc034f65727215218	24234
-star-20171009-starcupsdrv-3.6.0-hsp7000s.ppd.gz	2f460806a14ed0291dd23904a34ffc98	11519
-star-20171009-starcupsdrv-3.6.0-hsp7000v.ppd.gz	80117743883c489448b55e4c1ee69301	12607
-star-20171009-starcupsdrv-3.6.0-sp512.ppd.gz	088d272786695afe136b52d29df4a9ba	8980
-star-20171009-starcupsdrv-3.6.0-sp542.ppd.gz	6bb0e97d8be7f1ec6daa1e7b75247dba	8980
-star-20171009-starcupsdrv-3.6.0-tsp1000.ppd.gz	dd544c126b6847c84569d7330aeb1582	24215
-star-20171009-starcupsdrv-3.6.0-tsp828l.ppd.gz	a2ba7a96730eb625ffc2c3fa4bc77007	24210
-star-20171009-starcupsdrv-3.6.0-tup542.ppd.gz	b9788d8f8c0e769c02bf8850089f243c	24215
-star-20171009-starcupsdrv-3.6.0-tup592.ppd.gz	7f0abb14456d7c752820b4e6d4f4ed58	24244
-star-20171009-starcupsdrv-3.6.0-tup942.ppd.gz	469b9beb2df364fd3b7bc7a1e9bd0ef3	24212
-star-20171009-starcupsdrv-3.6.0-tup992.ppd.gz	6af9e072de0324077fbc0445470e9375	24219
-star-20191209-fvp10.ppd.gz	08a4ee9c1df5c0c13219898fe578826f	24244
-star-20191209-sp712.ppd.gz	571a855dd8b116615ef17eebbc40a683	8999
-star-20191209-sp717.ppd.gz	571a855dd8b116615ef17eebbc40a683	8999
-star-20191209-sp742.ppd.gz	d417b28a8f9eade9d307e8a3ef854012	8999
-star-20191209-sp747.ppd.gz	d417b28a8f9eade9d307e8a3ef854012	8999
-star-20191209-tsp113.ppd.gz	c5f179726b84b7b688ea3fe749c59024	24219
-star-20191209-tsp143gt.ppd.gz	ad066c1686148a28648929485e1519d6	24220
-star-20191209-tsp143.ppd.gz	ad066c1686148a28648929485e1519d6	24220
-star-20191209-tsp654.ppd.gz	3f847c3f8b550a497cc59bed35e89235	24242
-star-20191209-tsp700II.ppd.gz	02355b92b84261faf982c238a5b5c22c	24238
-star-20191209-tsp800II.ppd.gz	3f639fb56769f12cc20408b1e31814c9	24234
-xerox-20190225-xr6605dn.ppd.gz	1c6a783066ca72863c79dc7c707be679	239342
-xerox-20190225-xr8580dn.ppd.gz	bdd1884d365eeeac0aeafefcbadc7c60	255250
-xerox-20190225-xrx3655s.ppd.gz	410b048aafaf00b1c2bd5d9a428c673e	213023
-xerox-20190225-xrx4622.ppd.gz	04788a6220b4176c6cca3cbe4dc935c6	218952
-xerox-20190225-xrx5330.ppd.gz	9536187046c5494d7a58204df1399f5d	213926
-xerox-20190225-xrx5875.ppd.gz	d7cf8d24be826aada7b02ae20ac61710	213351
-xerox-20190225-xrx7830.ppd.gz	02baa01c606ca93724d6feedc6292fba	240071
-xerox-20190225-xrx7970.ppd.gz	02baa01c606ca93724d6feedc6292fba	240071
-xerox-20190225-xrx8580n.ppd.gz	775ab5aab8831c9cc74a65493d787493	255248
-xerox-20190225-xrxd95cp.ppd.gz	2ec27940a6f17a127c07620489372156	214072
-xerox-20190711-xrwc3335.ppd.gz	e7814d9ac9cf5a47e5fae117a9ceab17	213055
-xerox-20190711-xrx6510.ppd.gz	0f5d134c209bed25a9d32ea84ad00696	239445
-xerox-20190820-xrxosd.ppd.gz	fe00660b3196891ec98256ef8da1ffd4	197182
-xerox-20191030-Xerox_Phaser_7800DN.ppd.gz	80d7988e9a3bbebd19a222bd825c2b5d	248878
-xerox-20191030-Xerox_Phaser_7800DX.ppd.gz	75155009a2d72683e459543219076376	248884
-xerox-20191030-Xerox_Phaser_7800GX.ppd.gz	1af9b7900be893594f40b33da1d7b5cf	248878
-xerox-20191030-Xerox_VersaLink_C500.ppd.gz	a4b3c12e48730405680ea5fde5413a22	239552
-xerox-20191030-Xerox_VersaLink_C505.ppd.gz	f06bec617936c120f8b16edd2e0b7ec1	239476
-xerox-20191030-Xerox_VersaLink_C600.ppd.gz	c3aff17a4ebfe38272145ef4602ef0fc	239629
-xerox-20191030-Xerox_VersaLink_C605.ppd.gz	523bb5c807669582bc0e9fdc2f646a94	239553
-xerox-20191030-xrxB400.ppd.ppd.gz	43969d2287d6612f01d677295cb4e35a	212722
-xerox-20191030-xrxB405.ppd.ppd.gz	5999b3e234b79ff80776eaac8901ff5b	212722
-xerox-20191030-xrxb600.ppd.gz	fb6efc5de8949cd4a5a687b59da68200	212756
-xerox-20191030-xrxb615.ppd.gz	e10492a55524b2dff22c0888f272a690	212756
-xerox-20191030-xrxB7025.ppd.gz	9cc27e250cdc257a8d6cf257e4360cdb	213090
-xerox-20191030-xrxB7030.ppd.gz	9cc27e250cdc257a8d6cf257e4360cdb	213090
-xerox-20191030-xrxB7035.ppd.gz	9cc27e250cdc257a8d6cf257e4360cdb	213090
-xerox-20191030-xrxB8045.ppd.gz	fb9946250467da0a28fb3b3c15461a5b	213351
-xerox-20191030-xrxC400.ppd.gz	4ac8dc801afd2f673b7ce9d6bf3d1e59	239472
-xerox-20191030-xrxC405.ppd.gz	4ac8dc801afd2f673b7ce9d6bf3d1e59	239472
-xerox-20191030-xrxC7000.ppd.gz	a0877a97129c8abbbf13538ac3f815fc	239713
-xerox-20191030-xrxC7030.ppd.gz	5c920021ef688048cecf4fddcd8244fe	239813
-xerox-20191030-xrxC8000.ppd.gz	71e36c26b40517688eeed0c72bc9e235	239889
-xerox-20191030-xrxC8030.ppd.gz	fc47570cfa77b06f279e525b892f1586	240071
-xerox-20200129-xrxC9065.ppd.gz	8539917efa4faf8f62978f6d2f0edda2	239821
-xerox-20200226-xrxB9100.ppd.gz	08bd80edf964d65e2a85869dfb11c8a2	213101
-xerox-20201014-xrxC8000W.ppd.gz	b8a55288fece7f6e4ee0b98ea7784251	239813
+ricoh-20210222-Gestetner-G3020c_PXL.ppd.gz	aac67d6b84abdaf0d93cd18f69bf6b0c	679673
+ricoh-20210222-Lanier-IM_7000_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2868
+ricoh-20210601-Gestetner-GS3025m_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2844
+ricoh-20210601-Gestetner-GS3040m_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2844
+ricoh-20210601-Gestetner-GS3060m_PDF.ppd.gz	01cb55c1a74a40739f0b2cdb2319cb1f	2844
+ricoh-20210601-Ricoh-M_C2000_PXL.ppd.gz	aac67d6b84abdaf0d93cd18f69bf6b0c	679673
+ricoh-20210601-Ricoh-SP_2300L_PXL.ppd.gz	39b28d9f8af78fe5b7f79fde90db0133	79853
+star-20171009-starcupsdrv-3.6.0-hsp7000r.ppd.gz	7d803645cdf4c19b5b42d3b8e6ce6f46	24377
+star-20171009-starcupsdrv-3.6.0-hsp7000s.ppd.gz	3989c9ae12c477f0d1d2c88dce87dede	11647
+star-20171009-starcupsdrv-3.6.0-hsp7000v.ppd.gz	c0a3042e9e5386964329a6729952c353	12735
+star-20171009-starcupsdrv-3.6.0-sp512.ppd.gz	2d0cdde650d4eb077c1caf8936009980	9108
+star-20171009-starcupsdrv-3.6.0-sp542.ppd.gz	e5cbc82b83d778cf53830dcfa60de40f	9108
+star-20171009-starcupsdrv-3.6.0-tsp1000.ppd.gz	924fb85273c311949f2a2265cb0609e9	24230
+star-20171009-starcupsdrv-3.6.0-tsp828l.ppd.gz	643f8b972fbcd8ed567c83b46982d6df	24225
+star-20171009-starcupsdrv-3.6.0-tup542.ppd.gz	d6ea9b141f616b4f6074a3a28bccc4e9	24358
+star-20171009-starcupsdrv-3.6.0-tup592.ppd.gz	74f9a8cc33ad367532af32cf1caa954a	24387
+star-20171009-starcupsdrv-3.6.0-tup942.ppd.gz	33a9fbf9d562fe599d76485456e6364a	24355
+star-20171009-starcupsdrv-3.6.0-tup992.ppd.gz	545d7bb320fbf707c84870d50f0ca672	24362
+star-20191209-fvp10.ppd.gz	2d727bf845523285659b904387384953	24387
+star-20191209-mcp20.ppd.gz	2b01896f503a4edd69f5a4ccb0ccf5c2	22659
+star-20191209-mcp21.ppd.gz	2b01896f503a4edd69f5a4ccb0ccf5c2	22659
+star-20191209-mcp30.ppd.gz	d6ff1dcd725dc84f941afe33a52ce230	26991
+star-20191209-mcp31.ppd.gz	5c51d46eb042bd59cc99c0259a63dc70	26995
+star-20191209-pop10.ppd.gz	c20c9730616f24c5dc388003c69f9388	22663
+star-20191209-sp712.ppd.gz	6f2ad14d5a40f857bf2bc116adb56fbe	9127
+star-20191209-sp717.ppd.gz	6f2ad14d5a40f857bf2bc116adb56fbe	9127
+star-20191209-sp742.ppd.gz	0f5c501f86f8ec6b15c70944c2fc6faf	9127
+star-20191209-sp747.ppd.gz	0f5c501f86f8ec6b15c70944c2fc6faf	9127
+star-20191209-tsp113.ppd.gz	0a2467af833b5aed66be3939396e5996	24234
+star-20191209-tsp143.ppd.gz	6229b951c1824d7df0a438574dc194c9	24235
+star-20191209-tsp143gt.ppd.gz	6229b951c1824d7df0a438574dc194c9	24235
+star-20191209-tsp654.ppd.gz	295acd6bff1ab12359889b91a0ed485e	24385
+star-20191209-tsp700II.ppd.gz	892c872970b87ced563bfdaa50823411	24381
+star-20191209-tsp800II.ppd.gz	b75c26e764a241fc716aeb83ca6e04ab	24377
+zebra-20210504-SP-005645A.ppd.gz	d4fc162771ad2721bdab8c8a89d0b4fb	23670
diff --git a/client/site_tests/platform_PrinterPpds/do_not_test.txt b/client/site_tests/platform_PrinterPpds/do_not_test.txt
index 28f673a..469a553 100644
--- a/client/site_tests/platform_PrinterPpds/do_not_test.txt
+++ b/client/site_tests/platform_PrinterPpds/do_not_test.txt
@@ -1,3 +1,17 @@
+dymo-20200714-lm280.ppd.gz
+dymo-20200714-lm400.ppd.gz
+dymo-20200714-lm420p.ppd.gz
+dymo-20200714-lm450.ppd.gz
+dymo-20200714-lm500ts.ppd.gz
+dymo-20200714-lmpc.ppd.gz
+dymo-20200714-lmpc2.ppd.gz
+dymo-20200714-lmpnp.ppd.gz
+dymo-20200714-lmpnpw.ppd.gz
+dymo-20200714-lp350.ppd.gz
+dymo-20200714-lw450dt.ppd.gz
+dymo-20200714-lwduot.ppd.gz
+dymo-20200714-lwduot2.ppd.gz
+foomatic-20191029-Apollo-P-1200-pcl3.ppd.gz
 foomatic-20191029-Samsung_ML-2525W_Series_PXL.ppd.gz
 foomatic-20200219-Apple-Color_StyleWriter_1500-lpstyl.ppd.gz
 foomatic-20200219-Apple-ImageWriter_II-iwhi.ppd.gz
@@ -14,6 +28,7 @@
 foomatic-20200219-Compaq-IJ1200-drv_z42.ppd.gz
 foomatic-20200219-Compaq-IJ750-lxm3200X.ppd.gz
 foomatic-20200219-Dell-3010cn-pxldpl.ppd.gz
+foomatic-20200219-Epson-LP-9600SPD-Postscript-Epson.ppd.gz
 foomatic-20200219-Epson-PM_760C-PM760pX.upp.ppd.gz
 foomatic-20200219-Epson-PM_820C-PM820pX.upp.ppd.gz
 foomatic-20200219-Epson-SQ_1170-stcolor.ppd.gz
@@ -22,6 +37,25 @@
 foomatic-20200219-Epson-Stylus_Photo_720-Stp720pX.upp.ppd.gz
 foomatic-20200219-Epson-Stylus_Photo_870-Stp870pX.upp.ppd.gz
 foomatic-20200219-Generic-PCL_3_Printer-pcl3.ppd.gz
+foomatic-20200219-HP-DesignJet_1050C-Postscript-HP.ppd.gz
+foomatic-20200219-HP-DesignJet_1055CM-Postscript-HP.ppd.gz
+foomatic-20200219-HP-DesignJet_2500CP-Postscript-HP.ppd.gz
+foomatic-20200219-HP-DesignJet_3500CP-Postscript-HP.ppd.gz
+foomatic-20200219-HP-DesignJet_5000PS-Postscript-HP.ppd.gz
+foomatic-20200219-HP-DesignJet_5500ps-Postscript-HP.ppd.gz
+foomatic-20200219-HP-DesignJet_800PS-Postscript-HP.ppd.gz
+foomatic-20200219-HP-DeskJet_1000C-pnm2ppa.ppd.gz
+foomatic-20200219-HP-DeskJet_200-pcl3.ppd.gz
+foomatic-20200219-HP-DeskJet_310-pcl3.ppd.gz
+foomatic-20200219-HP-DeskJet_320-pcl3.ppd.gz
+foomatic-20200219-HP-DeskJet_340C-pcl3.ppd.gz
+foomatic-20200219-HP-DeskJet_540C-pcl3.ppd.gz
+foomatic-20200219-HP-DeskJet_560C-pcl3.ppd.gz
+foomatic-20200219-HP-DeskJet_712C-pnm2ppa.ppd.gz
+foomatic-20200219-HP-DeskJet_722C-pnm2ppa.ppd.gz
+foomatic-20200219-HP-DeskJet_820C-pnm2ppa.ppd.gz
+foomatic-20200219-HP-DeskJet_Plus-pcl3.ppd.gz
+foomatic-20200219-HP-DeskJet_Portable-pcl3.ppd.gz
 foomatic-20200219-Imagistics-im8530-Postscript-Oce.ppd.gz
 foomatic-20200219-KONICA_MINOLTA-bizhub_750-Postscript-KONICA_MINOLTA.ppd.gz
 foomatic-20200219-Kyocera-CS-1815-Postscript-Kyocera.ppd.gz
@@ -59,23 +93,15 @@
 foomatic-20200219-Samsung-X7600-Postscript-Samsung.ppd.gz
 foomatic-20200219-SiPix-Pocket_Printer_A6-sipixa6.upp.ppd.gz
 hp-20190918-hplip-3.19.6-hp-P15_CISS.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_z2600_postscript-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_z5400-postscript.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_z5600_postscript-ps.ppd.gz
 hplip-20200303-hplip-3.19.12-hp-designjet_Z6_24in-ps.ppd.gz
 hplip-20200303-hplip-3.19.12-hp-designjet_Z6_44in-ps.ppd.gz
 hplip-20200303-hplip-3.19.12-hp-designjet_Z6dr_44in-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_Z9_24in-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_Z9_44in-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_Z9dr_44in-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-PCL3-Class3A.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-PCL3-Class3B.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-PCL3-Class3.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-PCL4-Class1.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_z2600_postscript-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_z5400-postscript.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_z5600_postscript-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_Z9_24in-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_Z9_44in-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_Z9dr_44in-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-P15_CISS.ppd.gz
 kyocera-20190830-Kyocera_Generic_Monochrome.ppd.gz
-star-20191209-mcp20.ppd.gz
-star-20191209-mcp21.ppd.gz
-star-20191209-mcp30.ppd.gz
-star-20191209-mcp31.ppd.gz
-star-20191209-pop10.ppd.gz
 star-20191209-tsp651.ppd.gz
diff --git a/client/site_tests/platform_PrinterPpds/fake_printer.py b/client/site_tests/platform_PrinterPpds/fake_printer.py
index 0603e74..bcf64ff 100644
--- a/client/site_tests/platform_PrinterPpds/fake_printer.py
+++ b/client/site_tests/platform_PrinterPpds/fake_printer.py
@@ -4,7 +4,10 @@
 
 import socket
 import threading
-import Queue
+
+# Importing from six to maintain compatibility with Python 2. Safe to
+# `import queue` once Tauto transitions fully to Python 3.
+from six.moves import queue
 
 _BUF_SIZE = 4096
 
@@ -40,7 +43,7 @@
         # It is set when printer is stopped because of some internal error
         self._error_message = None
         # An internal queue with printed documents
-        self._documents = Queue.Queue()
+        self._documents = queue.Queue()
         # Create a TCP/IP socket
         self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
         try:
@@ -90,7 +93,7 @@
         """
         try:
             return self._documents.get(block=True, timeout=timeout)
-        except Queue.Empty:
+        except queue.Empty:
             # Builds a message for the exception
             message = 'Timeout occured when waiting for the document. '
             if self._stopped:
@@ -126,7 +129,7 @@
                     return None
 
         # Reads document
-        document = ''
+        document = bytearray()
         while True:
             try:
                 data = connection.recv(_BUF_SIZE)
@@ -135,7 +138,7 @@
                     # we got the whole document - exit the loop
                     break
                 # save chunk of the document and return to the loop
-                document += data
+                document.extend(data)
             except socket.timeout:
                 # exit if the printer was stopped, else return to the loop
                 if self._stopped:
@@ -144,7 +147,7 @@
 
         # Closes connection & returns document
         connection.close()
-        return document
+        return bytes(document)
 
 
     def _thread_read_docs(self):
diff --git a/client/site_tests/platform_PrinterPpds/helpers.py b/client/site_tests/platform_PrinterPpds/helpers.py
index 687b9e0..953ed86 100644
--- a/client/site_tests/platform_PrinterPpds/helpers.py
+++ b/client/site_tests/platform_PrinterPpds/helpers.py
@@ -3,7 +3,7 @@
 # found in the LICENSE file.
 
 import json
-import md5
+import hashlib
 import os
 import requests
 
@@ -27,7 +27,7 @@
     """
     lines = []
     while document.startswith(prefix, position):
-        position_next_line = document.find('\n', position + len(prefix))
+        position_next_line = document.find(b'\n', position + len(prefix))
         if position_next_line < 0:
             break
         position_next_line += 1  # to eat '\n' character
@@ -60,13 +60,13 @@
         out += doc[position:(position_pjl+len(PJL_MARKER))]
         position = position_pjl + len(PJL_MARKER)
         # parse header and filter problematic lines
-        lines, position = _read_lines_with_prefix(doc, position, '@PJL')
+        lines, position = _read_lines_with_prefix(doc, position, b'@PJL')
         for line in lines:
-            if not (line.startswith('@PJL SET ') or
-                    line.startswith('@PJL COMMENT') or
-                    line.startswith('@PJL DMINFO') or
-                    line.startswith('@PJL JOB NAME') or
-                    line.startswith('@PJL JOBNAME')):
+            if not (line.startswith(b'@PJL SET ')
+                    or line.startswith(b'@PJL COMMENT')
+                    or line.startswith(b'@PJL DMINFO')
+                    or line.startswith(b'@PJL JOB NAME')
+                    or line.startswith(b'@PJL JOBNAME')):
                 out += line
         # try to find next PJL header
         position_pjl = doc.find(PJL_MARKER, position, position + MARGIN)
@@ -91,28 +91,30 @@
             accordingly.
 
     """
-    PS_MARKER = '%!PS-Adobe'
+    PS_MARKER = b'%!PS-Adobe'
     MARGIN = 2048  # max distance to the header
     position_ps = doc.find(PS_MARKER, position, position + MARGIN)
     while position_ps >= 0:
         # add everything till the end of the first line in the header
-        position_next_line = doc.find('\n', position_ps + len(PS_MARKER))
+        position_next_line = doc.find(b'\n', position_ps + len(PS_MARKER))
         if position_next_line < 0:
             break  # no more '\n', we finish the parsing here
         position_next_line += 1 # to eat \n character
         out += doc[position:position_next_line]
         # parse the rest of the header and filter problematic lines
-        lines, position = _read_lines_with_prefix(doc, position_next_line, '%')
+        lines, position = _read_lines_with_prefix(doc, position_next_line,
+                                                  b'%')
         for line in lines:
-            if not (line.startswith('%%Title:') or line.startswith('%%For:')):
+            if not (line.startswith(b'%%Title:')
+                    or line.startswith(b'%%For:')):
                 out += line
         # search for lines with '{setuserinfo}' or '/JobInfo <<'
         position_ps = doc.find(PS_MARKER, position, position + MARGIN)
-        position_ui = doc.find('{setuserinfo}', position, position + MARGIN)
-        position_ji = doc.find('/JobInfo <<', position, position + MARGIN)
+        position_ui = doc.find(b'{setuserinfo}', position, position + MARGIN)
+        position_ji = doc.find(b'/JobInfo <<', position, position + MARGIN)
         # if '/JobInfo <<' was found, move the offset to the end of the section
         if position_ji >= 0:
-            position_ji = doc.find('>>', position_ji)
+            position_ji = doc.find(b'>>', position_ji)
         # if the beginning of the next header was found, make sure that
         # detected sections do not belong to the next header
         if position_ps >= 0:
@@ -124,20 +126,19 @@
         position_end = max(position_ji, position_ui)
         if position_end >= 0:
             # find the first '\n' after the farthest section
-            position_end = doc.find('\n', position_end)
+            position_end = doc.find(b'\n', position_end)
             if position_end < 0:
                 break  # no more '\n', we finish the parsing here
             # split into lines everything from here to the end of the section
-            lines = doc[position:position_end].split('\n')
+            lines = doc[position:position_end].split(b'\n')
             position = position_end + 1  # +1 is needed to eat the last \n
             # filter problematic lines
             for line in lines:
-                if not (line.find('{setuserinfo}') >= 0 or
-                        line.find('/UserID') >= 0 or
-                        line.find('/Time') >= 0 or
-                        line.find('/HostLoginName') >= 0 or
-                        line.find('/HostName') >= 0):
-                    out += line + '\n'
+                if not (line.find(b'{setuserinfo}') >= 0 or
+                        line.find(b'/UserID') >= 0 or line.find(b'/Time') >= 0
+                        or line.find(b'/HostLoginName') >= 0
+                        or line.find(b'/HostName') >= 0):
+                    out += line + b'\n'
             # go to the next iteration, position_ps is already set
     return position, out
 
@@ -164,9 +165,11 @@
     # remove both JOB IDs and exit
     nd = len(doc)
     if nd > LIDIL_JOBID_1_OFF + LIDIL_JOBID_2_OFF + 2*JOBID_SIZE:
-        doc = ''.join([ doc[:(LIDIL_JOBID_1_OFF)],
-                doc[(LIDIL_JOBID_1_OFF+JOBID_SIZE):(nd-LIDIL_JOBID_2_OFF)],
-                doc[(nd-LIDIL_JOBID_2_OFF+JOBID_SIZE):] ])
+        doc = b''.join([
+                doc[:(LIDIL_JOBID_1_OFF)],
+                doc[(LIDIL_JOBID_1_OFF + JOBID_SIZE):(nd - LIDIL_JOBID_2_OFF)],
+                doc[(nd - LIDIL_JOBID_2_OFF + JOBID_SIZE):]
+        ])
     return doc
 
 
@@ -189,10 +192,10 @@
         return None
     # copy the document to output; filter lines parsed from the EJL header
     out = EJL_MARKER
-    lines, position = _read_lines_with_prefix(doc, len(EJL_MARKER), '@EJL')
+    lines, position = _read_lines_with_prefix(doc, len(EJL_MARKER), b'@EJL')
     for line in lines:
-        if not (line.startswith('@EJL JI ID=') or
-                line.startswith('@EJL JI USER=')):
+        if not (line.startswith(b'@EJL JI ID=')
+                or line.startswith(b'@EJL JI USER=')):
             out += line
     # add the rest of the document and exit
     out += doc[position:]
@@ -206,9 +209,9 @@
     That includes, but is not limited to: user name, host name, job id, date,
     time.
 
-    @param doc: a raw document sent directly to printer to be printed
+    @param doc: a raw document sent directly to printer to be printed (bytes)
 
-    @returns a copy of doc with removed fragments that can vary between
+    @returns a copy of doc (bytes) with removed fragments that can vary between
         printing jobs. The returned output is supposed to be identical for the
         same input content send to the pipeline for the same PPD file.
 
@@ -223,7 +226,7 @@
 
     # Try to parse and process PJL and PS headers.
     position = 0
-    out = ''
+    out = b''
     position, out = _process_PJL_headers(doc, position, out)
     position, out = _process_PS_Adobe_headers(doc, position, out)
 
@@ -234,10 +237,10 @@
         position = position_tail
 
     # Try to find 'trailer << '.
-    position_trailer = doc.find('trailer << ', position)
+    position_trailer = doc.find(b'trailer << ', position)
     if position_trailer >= 0:
         # If found, prune the line with it.
-        position_end = doc.find('\n', position_trailer)
+        position_end = doc.find(b'\n', position_trailer)
         if position_end >= 0:
             out += doc[position:position_trailer]
             position = position_end + 1  # +1 to ommit '\n' from the trailer
@@ -252,7 +255,7 @@
     """
     Calculates digests for given document.
 
-    @param doc: document's content
+    @param doc: document's content (bytes)
 
     @returns calculated digests as a string of hexadecimals
 
@@ -261,7 +264,7 @@
     out = _normalize_document(doc)
 
     # Calculates hash
-    return md5.new(out).hexdigest()
+    return hashlib.md5(out).hexdigest()
 
 
 def parse_digests_file(path_digests, denylist):
@@ -280,7 +283,7 @@
     sizes = dict()
     denylist = set(denylist)
     if os.path.isfile(path_digests):
-        with open(path_digests, 'rb') as file_digests:
+        with open(path_digests, 'r') as file_digests:
             lines = file_digests.read().splitlines()
             for line in lines:
                 cols = line.split()
@@ -315,7 +318,7 @@
         digests_content += '\n'
 
     with open(path_digests, 'wb') as file_digests:
-        file_digests.write(digests_content)
+        file_digests.write(digests_content.encode("utf-8"))
 
 
 def load_lines_from_file(path):
diff --git a/client/site_tests/platform_PrinterPpds/large_clusters.txt b/client/site_tests/platform_PrinterPpds/large_clusters.txt
index 2ea347a..75757d8 100644
--- a/client/site_tests/platform_PrinterPpds/large_clusters.txt
+++ b/client/site_tests/platform_PrinterPpds/large_clusters.txt
@@ -1,22 +1,22 @@
-brother-20191213-DCPL6600DW.ppd.gz	brother-20191213-HLL6300DW.ppd.gz	brother-20191213-HLL6400DW.ppd.gz	brother-20191213-HLL6402DW.ppd.gz	brother-20191213-MFCL6800DW.ppd.gz	brother-20191213-MFCL6900DW.ppd.gz	brother-20191213-MFCL6902DW.ppd.gz	brother-20191213-HL5595DNH.ppd.gz	brother-20191213-HLL6200DW.ppd.gz	brother-20191213-HLL6202DW.ppd.gz	brother-20191213-HLL6250DN.ppd.gz	brother-20191213-HLL6250DW.ppd.gz	brother-20191213-DCPL5500DN.ppd.gz	brother-20191213-DCPL5502DN.ppd.gz	brother-20191213-DCPL5600DN.ppd.gz	brother-20191213-DCPL5602DN.ppd.gz	brother-20191213-DCPL5650DN.ppd.gz	brother-20191213-DCPL5652DN.ppd.gz	brother-20191213-HL5580D.ppd.gz	brother-20191213-HL5585D.ppd.gz	brother-20191213-HL5590DN.ppd.gz	brother-20191213-HLL5000D.ppd.gz	brother-20191213-HLL5100DN.ppd.gz	brother-20191213-HLL5102DW.ppd.gz	brother-20191213-HLL5200DW.ppd.gz	brother-20191213-HLL5202DW.ppd.gz	brother-20191213-MFC8530DN.ppd.gz	brother-20191213-MFC8535DN.ppd.gz	brother-20191213-MFC8540DN.ppd.gz	brother-20191213-MFCL5700DN.ppd.gz	brother-20191213-MFCL5700DW.ppd.gz	brother-20191213-MFCL5702DW.ppd.gz	brother-20191213-MFCL5750DW.ppd.gz	brother-20191213-MFCL5755DW.ppd.gz	brother-20191213-MFCL5800DW.ppd.gz	brother-20191213-MFCL5802DW.ppd.gz	brother-20191213-MFCL5850DW.ppd.gz	brother-20191213-MFCL5900DW.ppd.gz	brother-20191213-MFCL5902DW.ppd.gz	brother-20191213-MFCL6700DW.ppd.gz	brother-20191213-MFCL6702DW.ppd.gz	brother-20191213-MFCL6750DW.ppd.gz	brother-20200131-HL5340D.ppd.gz	brother-20200131-HL5350DN.ppd.gz	brother-20200131-HL5370DW.ppd.gz	brother-20200131-HL5380DN.ppd.gz	brother-20191213-DCP7195DW.ppd.gz	brother-20191213-HLL2385DW.ppd.gz	brother-20191213-MFC7895DW.ppd.gz	brother-20191213-MFCL2770DW.ppd.gz	brother-20200131-DCP8080DN.ppd.gz	brother-20200131-DCP8085DN.ppd.gz	brother-20200131-DCP8110D.ppd.gz	brother-20200131-DCP8110DN.ppd.gz	brother-20200131-DCP8112DN.ppd.gz	brother-20200131-DCP8150DN.ppd.gz	brother-20200131-DCP8152DN.ppd.gz	brother-20200131-DCP8155DN.ppd.gz	brother-20200131-DCP8157DN.ppd.gz	brother-20200131-DCP8250DN.ppd.gz	brother-20200131-HL5440D.ppd.gz	brother-20200131-HL5450DN.ppd.gz	brother-20200131-HL5470DW.ppd.gz	brother-20200131-HL6180DW.ppd.gz	brother-20200131-MFC8380DN.ppd.gz	brother-20200131-MFC8480DN.ppd.gz	brother-20200131-MFC8510D.ppd.gz	brother-20200131-MFC8510DN.ppd.gz	brother-20200131-MFC8512DN.ppd.gz	brother-20200131-MFC8515DN.ppd.gz	brother-20200131-MFC8520DN.ppd.gz	brother-20200131-MFC8680DN.ppd.gz	brother-20200131-MFC8690DW.ppd.gz	brother-20200131-MFC8710DN.ppd.gz	brother-20200131-MFC8710DW.ppd.gz	brother-20200131-MFC8712DN.ppd.gz	brother-20200131-MFC8712DW.ppd.gz	brother-20200131-MFC8810DW.ppd.gz	brother-20200131-MFC8880DN.ppd.gz	brother-20200131-MFC8890DW.ppd.gz	brother-20200131-MFC8910DW.ppd.gz	brother-20200131-MFC8912DW.ppd.gz	brother-20200131-MFC8950DW.ppd.gz	brother-20200131-MFC8952DW.ppd.gz	brother-20191213-DCP7180DN.ppd.gz	brother-20191213-DCP7189DW.ppd.gz	brother-20191213-DCP7190DN.ppd.gz	brother-20191213-DCPB7500D.ppd.gz	brother-20191213-DCPB7520DW.ppd.gz	brother-20191213-DCPB7530DN.ppd.gz	brother-20191213-DCPB7535DW.ppd.gz	brother-20191213-DCPL2535DW.ppd.gz	brother-20191213-DCPL2540DN.ppd.gz	brother-20191213-DCPL2540DW.ppd.gz	brother-20191213-DCPL2550DN.ppd.gz	brother-20191213-DCPL2550DW.ppd.gz	brother-20191213-DCPL2551DN.ppd.gz	brother-20191213-DCPL2560DW.ppd.gz	brother-20191213-HL2560DN.ppd.gz	brother-20191213-HL2569DW.ppd.gz	brother-20191213-HL2590DN.ppd.gz	brother-20191213-HLB2050DN.ppd.gz	brother-20191213-HLB2080DW.ppd.gz	brother-20191213-HLL2360D.ppd.gz	brother-20191213-HLL2370DN.ppd.gz	brother-20191213-HLL2370DW.ppd.gz	brother-20191213-HLL2371DN.ppd.gz	brother-20191213-HLL2375DW.ppd.gz	brother-20191213-HLL2380DW.ppd.gz	brother-20191213-HLL2395DW.ppd.gz	brother-20191213-MFC7880DN.ppd.gz	brother-20191213-MFC7889DW.ppd.gz	brother-20191213-MFC7890DN.ppd.gz	brother-20191213-MFCB7715DW.ppd.gz	brother-20191213-MFCB7720DN.ppd.gz	brother-20191213-MFCL2715DW.ppd.gz	brother-20191213-MFCL2720DN.ppd.gz	brother-20191213-MFCL2720DW.ppd.gz	brother-20191213-MFCL2730DN.ppd.gz	brother-20191213-MFCL2730DW.ppd.gz	brother-20191213-MFCL2740DW.ppd.gz	brother-20191213-MFCL2750DW.ppd.gz	brother-20200131-DCP8070D.ppd.gz	brother-20200131-HL2595DW.ppd.gz	brother-20200131-MFC8370DN.ppd.gz	brother-20200131-DCP7070DW.ppd.gz	brother-20200131-FAX7860DW.ppd.gz	brother-20200131-HL2250DN.ppd.gz	brother-20200131-HL2270DW.ppd.gz	brother-20200131-HL2280DW.ppd.gz	brother-20200131-HLL2360DN.ppd.gz	brother-20200131-HLL2360DW.ppd.gz	brother-20200131-HLL2365DW.ppd.gz	brother-20200131-HLL2372DN.ppd.gz	brother-20200131-MFC7470D.ppd.gz	brother-20200131-MFC7860DN.ppd.gz	brother-20200131-MFC7860DW.ppd.gz	brother-20200131-MFCL2705DW.ppd.gz	brother-20200131-DCP7025.ppd.gz	brother-20200131-DCP7045N.ppd.gz	brother-20200131-HL2070N.ppd.gz	brother-20200131-HL2075N.ppd.gz	brother-20200131-HL2150N.ppd.gz	brother-20200131-HL2170W.ppd.gz	brother-20200131-MFC7225N.ppd.gz	brother-20200131-MFC7820N.ppd.gz	brother-20200131-MFC7840N.ppd.gz	brother-20200131-MFC7840W.ppd.gz
 brother-20200615-DCPL8410CDW.ppd.gz	brother-20200615-HLL8260CDN.ppd.gz	brother-20200615-HLL8260CDW.ppd.gz	brother-20200615-HLL8360CDW.ppd.gz	brother-20200615-HLL9310CDW.ppd.gz	brother-20200615-MFCL8610CDW.ppd.gz	brother-20200615-MFCL8690CDW.ppd.gz	brother-20200615-MFCL8900CDW.ppd.gz	brother-20200615-MFCL9570CDW.ppd.gz	brother-20200615-DCP9030CDN.ppd.gz	brother-20200615-DCPL3510CDW.ppd.gz	brother-20200615-DCPL3550CDW.ppd.gz	brother-20200615-DCPL3551CDW.ppd.gz	brother-20200615-HL3160CDW.ppd.gz	brother-20200615-HL3190CDW.ppd.gz	brother-20200615-HLL3230CDN.ppd.gz	brother-20200615-HLL3230CDW.ppd.gz	brother-20200615-HLL3270CDW.ppd.gz	brother-20200615-HLL3290CDW.ppd.gz	brother-20200615-MFC9150CDN.ppd.gz	brother-20200615-MFC9350CDW.ppd.gz	brother-20200615-MFCL3730CDN.ppd.gz	brother-20200615-MFCL3735CDN.ppd.gz	brother-20200615-MFCL3745CDW.ppd.gz	brother-20200615-MFCL3750CDW.ppd.gz	brother-20200615-MFCL3770CDW.ppd.gz	brother-20200615-HLL3210CW.ppd.gz	brother-20200615-MFCL3710CW.ppd.gz
 brother-20201006-HL2295D-cups-en.ppd.gz	brother-20201006-HLB2000D-cups-en.ppd.gz	brother-20201006-HLL2310D-cups-en.ppd.gz	brother-20201006-HLL2325DW-cups-en.ppd.gz	brother-20201006-HLL2330D-cups-en.ppd.gz	brother-20201006-HLL2335D-cups-en.ppd.gz	brother-20201006-HLL2350DW-cups-en.ppd.gz	brother-20201006-HLL2357DW-cups-en.ppd.gz	brother-20201006-DCP7090DW-cups-en.ppd.gz	brother-20201006-DCP7095D-cups-en.ppd.gz	brother-20201006-DCP7190DW-cups-en.ppd.gz	brother-20201006-DCPL2510D-cups-en.ppd.gz	brother-20201006-DCPL2530DW-cups-en.ppd.gz	brother-20201006-DCPL2537DW-cups-en.ppd.gz	brother-20201006-FAXL2710DN-cups-en.ppd.gz	brother-20201006-HLL2390DW-cups-en.ppd.gz	brother-20201006-MFC7490D-cups-en.ppd.gz	brother-20201006-MFCB7700D-cups-en.ppd.gz	brother-20201006-MFCL2690DW-cups-en.ppd.gz	brother-20201006-MFCL2710DN-cups-en.ppd.gz	brother-20201006-MFCL2710DW-cups-en.ppd.gz	brother-20201006-MFCL2713DW-cups-en.ppd.gz	brother-20201006-MFCL2717DW-cups-en.ppd.gz	brother-20201006-DCP7080D-cups-en.ppd.gz	brother-20201006-DCPL2500D-cups-en.ppd.gz	brother-20201006-DCPL2520D-cups-en.ppd.gz	brother-20201006-DCPL2520DW-cups-en.ppd.gz	brother-20201006-FAXL2700DN-cups-en.ppd.gz	brother-20201006-HL2260D-cups-en.ppd.gz	brother-20201006-HLL2300D-cups-en.ppd.gz	brother-20201006-HLL2315DW-cups-en.ppd.gz	brother-20201006-HLL2320D-cups-en.ppd.gz	brother-20201006-HLL2340D-cups-en.ppd.gz	brother-20201006-MFC7480D-cups-en.ppd.gz	brother-20201006-MFCL2700D-cups-en.ppd.gz	brother-20201006-MFCL2700DN-cups-en.ppd.gz	brother-20201006-MFCL2700DW-cups-en.ppd.gz	brother-20201006-MFCL2685DW-cups-en.ppd.gz	brother-20201006-MFCL2707DW-cups-en.ppd.gz	brother-20201006-HL2290-cups-en.ppd.gz	brother-20201006-DCP7080-cups-en.ppd.gz	brother-20201006-HL2260-cups-en.ppd.gz	brother-20201006-HLL2300-cups-en.ppd.gz	brother-20201006-HLL2305-cups-en.ppd.gz	brother-20201006-MFC7380-cups-en.ppd.gz	brother-20201006-MFCL2680W-cups-en.ppd.gz	brother-20201006-DCP7090-cups-en.ppd.gz	brother-20201006-MFC7390-cups-en.ppd.gz
+brother-20210413-MFC5705DW.ppd.gz	brother-20191213-DCPL6600DW.ppd.gz	brother-20191213-HLL6300DW.ppd.gz	brother-20191213-HLL6400DW.ppd.gz	brother-20191213-HLL6402DW.ppd.gz	brother-20191213-MFCL6800DW.ppd.gz	brother-20191213-MFCL6900DW.ppd.gz	brother-20191213-MFCL6902DW.ppd.gz	brother-20191213-HL5595DNH.ppd.gz	brother-20191213-HLL6200DW.ppd.gz	brother-20191213-HLL6202DW.ppd.gz	brother-20191213-HLL6250DN.ppd.gz	brother-20191213-HLL6250DW.ppd.gz	brother-20191213-DCPL5500DN.ppd.gz	brother-20191213-DCPL5502DN.ppd.gz	brother-20191213-DCPL5600DN.ppd.gz	brother-20191213-DCPL5602DN.ppd.gz	brother-20191213-DCPL5650DN.ppd.gz	brother-20191213-DCPL5652DN.ppd.gz	brother-20191213-HL5580D.ppd.gz	brother-20191213-HL5585D.ppd.gz	brother-20191213-HL5590DN.ppd.gz	brother-20191213-HLL5000D.ppd.gz	brother-20191213-HLL5100DN.ppd.gz	brother-20191213-HLL5102DW.ppd.gz	brother-20191213-HLL5200DW.ppd.gz	brother-20191213-HLL5202DW.ppd.gz	brother-20191213-MFC8530DN.ppd.gz	brother-20191213-MFC8535DN.ppd.gz	brother-20191213-MFC8540DN.ppd.gz	brother-20191213-MFCL5700DN.ppd.gz	brother-20191213-MFCL5700DW.ppd.gz	brother-20191213-MFCL5702DW.ppd.gz	brother-20191213-MFCL5750DW.ppd.gz	brother-20191213-MFCL5755DW.ppd.gz	brother-20191213-MFCL5800DW.ppd.gz	brother-20191213-MFCL5802DW.ppd.gz	brother-20191213-MFCL5850DW.ppd.gz	brother-20191213-MFCL5900DW.ppd.gz	brother-20191213-MFCL5902DW.ppd.gz	brother-20191213-MFCL6700DW.ppd.gz	brother-20191213-MFCL6702DW.ppd.gz	brother-20191213-MFCL6750DW.ppd.gz	brother-20200131-HL5340D.ppd.gz	brother-20200131-HL5350DN.ppd.gz	brother-20200131-HL5370DW.ppd.gz	brother-20200131-HL5380DN.ppd.gz	brother-20191213-DCP7195DW.ppd.gz	brother-20191213-HLL2385DW.ppd.gz	brother-20191213-MFC7895DW.ppd.gz	brother-20191213-MFCL2770DW.ppd.gz	brother-20200131-DCP8080DN.ppd.gz	brother-20200131-DCP8085DN.ppd.gz	brother-20200131-DCP8110D.ppd.gz	brother-20200131-DCP8110DN.ppd.gz	brother-20200131-DCP8112DN.ppd.gz	brother-20200131-DCP8150DN.ppd.gz	brother-20200131-DCP8152DN.ppd.gz	brother-20200131-DCP8155DN.ppd.gz	brother-20200131-DCP8157DN.ppd.gz	brother-20200131-DCP8250DN.ppd.gz	brother-20200131-HL5440D.ppd.gz	brother-20200131-HL5450DN.ppd.gz	brother-20200131-HL5470DW.ppd.gz	brother-20200131-HL6180DW.ppd.gz	brother-20200131-MFC8380DN.ppd.gz	brother-20200131-MFC8480DN.ppd.gz	brother-20200131-MFC8510D.ppd.gz	brother-20200131-MFC8510DN.ppd.gz	brother-20200131-MFC8512DN.ppd.gz	brother-20200131-MFC8515DN.ppd.gz	brother-20200131-MFC8520DN.ppd.gz	brother-20200131-MFC8680DN.ppd.gz	brother-20200131-MFC8690DW.ppd.gz	brother-20200131-MFC8710DN.ppd.gz	brother-20200131-MFC8710DW.ppd.gz	brother-20200131-MFC8712DN.ppd.gz	brother-20200131-MFC8712DW.ppd.gz	brother-20200131-MFC8810DW.ppd.gz	brother-20200131-MFC8880DN.ppd.gz	brother-20200131-MFC8890DW.ppd.gz	brother-20200131-MFC8910DW.ppd.gz	brother-20200131-MFC8912DW.ppd.gz	brother-20200131-MFC8950DW.ppd.gz	brother-20200131-MFC8952DW.ppd.gz	brother-20191213-DCP7180DN.ppd.gz	brother-20191213-DCP7189DW.ppd.gz	brother-20191213-DCP7190DN.ppd.gz	brother-20191213-DCPB7500D.ppd.gz	brother-20191213-DCPB7520DW.ppd.gz	brother-20191213-DCPB7530DN.ppd.gz	brother-20191213-DCPB7535DW.ppd.gz	brother-20191213-DCPL2535DW.ppd.gz	brother-20191213-DCPL2540DN.ppd.gz	brother-20191213-DCPL2540DW.ppd.gz	brother-20191213-DCPL2550DN.ppd.gz	brother-20191213-DCPL2550DW.ppd.gz	brother-20191213-DCPL2551DN.ppd.gz	brother-20191213-DCPL2560DW.ppd.gz	brother-20191213-HL2560DN.ppd.gz	brother-20191213-HL2569DW.ppd.gz	brother-20191213-HL2590DN.ppd.gz	brother-20191213-HLB2050DN.ppd.gz	brother-20191213-HLB2080DW.ppd.gz	brother-20191213-HLL2360D.ppd.gz	brother-20191213-HLL2370DN.ppd.gz	brother-20191213-HLL2370DW.ppd.gz	brother-20191213-HLL2371DN.ppd.gz	brother-20191213-HLL2375DW.ppd.gz	brother-20191213-HLL2380DW.ppd.gz	brother-20191213-HLL2395DW.ppd.gz	brother-20191213-MFC7880DN.ppd.gz	brother-20191213-MFC7889DW.ppd.gz	brother-20191213-MFC7890DN.ppd.gz	brother-20191213-MFCB7715DW.ppd.gz	brother-20191213-MFCB7720DN.ppd.gz	brother-20191213-MFCL2715DW.ppd.gz	brother-20191213-MFCL2720DN.ppd.gz	brother-20191213-MFCL2720DW.ppd.gz	brother-20191213-MFCL2730DN.ppd.gz	brother-20191213-MFCL2730DW.ppd.gz	brother-20191213-MFCL2740DW.ppd.gz	brother-20191213-MFCL2750DW.ppd.gz	brother-20200131-DCP8070D.ppd.gz	brother-20200131-HL2595DW.ppd.gz	brother-20200131-MFC8370DN.ppd.gz	brother-20210413-MFCB7710DN.ppd.gz	brother-20200131-DCP7070DW.ppd.gz	brother-20200131-FAX7860DW.ppd.gz	brother-20200131-HL2250DN.ppd.gz	brother-20200131-HL2270DW.ppd.gz	brother-20200131-HL2280DW.ppd.gz	brother-20200131-HLL2360DN.ppd.gz	brother-20200131-HLL2360DW.ppd.gz	brother-20200131-HLL2365DW.ppd.gz	brother-20200131-HLL2372DN.ppd.gz	brother-20200131-MFC7470D.ppd.gz	brother-20200131-MFC7860DN.ppd.gz	brother-20200131-MFC7860DW.ppd.gz	brother-20200131-MFCL2705DW.ppd.gz	fujifilm-20210604-DOCUPRINTP385DW.ppd.gz	konica_minolta-20210401-konica-minolta-20210401-bizhub5000i.ppd.gz	brother-20200131-DCP7025.ppd.gz	brother-20200131-DCP7045N.ppd.gz	brother-20200131-HL2070N.ppd.gz	brother-20200131-HL2075N.ppd.gz	brother-20200131-HL2150N.ppd.gz	brother-20200131-HL2170W.ppd.gz	brother-20200131-MFC7225N.ppd.gz	brother-20200131-MFC7820N.ppd.gz	brother-20200131-MFC7840N.ppd.gz	brother-20200131-MFC7840W.ppd.gz	fujifilm-20210604-DOCUPRINTM375DF.ppd.gz	fujifilm-20210604-DOCUPRINTM375Z.ppd.gz	fujifilm-20210604-DOCUPRINTM385Z.ppd.gz	fujifilm-20210604-DOCUPRINTP360DW.ppd.gz	fujifilm-20210604-DOCUPRINTP375D.ppd.gz	fujifilm-20210604-DOCUPRINTP375DW.ppd.gz	fujifilm-20210604-MULTIWRITER5350.ppd.gz	konica_minolta-20210401-konica-minolta-20210401-bizhub4000i.ppd.gz	konica_minolta-20210401-konica-minolta-20210401-bizhub4020i.ppd.gz	konica_minolta-20210401-konica-minolta-20210401-bizhub5020i.ppd.gz	fujifilm-20210604-DOCUPRINTM285Z.ppd.gz	fujifilm-20210604-DOCUPRINTP285DW.ppd.gz	fujifilm-20210604-DOCUPRINTM235DW.ppd.gz	fujifilm-20210604-DOCUPRINTM235Z.ppd.gz	fujifilm-20210604-DOCUPRINTM275Z.ppd.gz	fujifilm-20210604-DOCUPRINTP235D.ppd.gz	fujifilm-20210604-DOCUPRINTP275DW.ppd.gz
 cups-20170101-Generic-PDF_Printer-PDF.ppd.gz
 dymo-20200714-lm400.ppd.gz	dymo-20200714-lw450dt.ppd.gz	dymo-20200714-lwduot.ppd.gz	dymo-20200714-lm450.ppd.gz	dymo-20200714-lmpc.ppd.gz	dymo-20200714-lwduot2.ppd.gz	dymo-20200714-lm500ts.ppd.gz	dymo-20200714-lmpc2.ppd.gz	dymo-20200714-lmpnpw.ppd.gz	dymo-20200714-lp350.ppd.gz	dymo-20200714-lm420p.ppd.gz	dymo-20200714-lm280.ppd.gz	dymo-20200714-lmpnp.ppd.gz
 dymo-20200714-lw450tt.ppd.gz	dymo-20200714-lwtt.ppd.gz	dymo-20200714-lw4xl.ppd.gz	dymo-20200714-lw450.ppd.gz	dymo-20200714-lw450dl.ppd.gz	dymo-20200714-lw450t.ppd.gz	dymo-20200714-lw400.ppd.gz	dymo-20200714-se450.ppd.gz	dymo-20200714-lw400t.ppd.gz	dymo-20200714-lwduol.ppd.gz	dymo-20200714-lw330t.ppd.gz	dymo-20200714-lw320.ppd.gz	dymo-20200714-lw330.ppd.gz	dymo-20200714-lw300.ppd.gz	dymo-20200714-lw310.ppd.gz	dymo-20200714-lw315.ppd.gz
-epson-20200615-1_6_41-Epson-EP-10VA_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EW-M970A3T_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-979A3_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-7700_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L7160_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EW-M571T_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-978A3_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-811A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-810A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-30VA_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-7750_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L7180_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EW-M770T_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-2830_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-2850_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-4100_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-808A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-900_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-960_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S06_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EW-M5071FT_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L5190_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-6090_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-6590_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-7100_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-640_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-830_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-2750_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L4160_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-630_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-711A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-710A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-540_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L1455_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-709A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-049A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-2100_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M860F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S860-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-530_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-2700_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L4150_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-M570T_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-6530_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-2650_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-2660_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-2750_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-2760_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-708A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-16500_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-255_257_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-352_355_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-452_455_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L805_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-2710_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-2720_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-4700_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L3150_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L3160_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-220_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-235_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-240_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-243_245_247_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-320_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-330_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-332_335_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-340_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-342_343_345_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-420_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-430_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-432_435_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-440_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-442_445_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-048A_Series-epson-escpr-en.ppd.gz	epson-20170125-Epson-L382_Series-epson-escpr-en-1.6.10.ppd.gz	epson-20200615-1_6_41-Epson-L3050_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L3060_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L3070_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L386_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L486_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PF-71_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PF-81_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L605_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-4550_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L655_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-M4011_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-M4015_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-M4095_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-M4521_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-M4525_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-M4595_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EW-M660FT_Series-epson-escpr-en.ppd.gz	epson-20170125-Epson-L380_Series-epson-escpr-en-1.6.10.ppd.gz	epson-20200615-1_6_41-Epson-ET-2600_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-2610_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-2650_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L385_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L405_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L485_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-400_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-2500_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-2550_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-4500_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L375_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L395_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L396_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L475_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L495_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L575_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-1110_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L1110_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L3100_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L3110_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M160T_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S160T_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-805A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-905A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-905F_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-775A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-750_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-A890-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M5041F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S5040-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-7110_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-7620_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-A950-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-977A3_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M5040F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-205_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-605F_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-675F_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-7610_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-3010_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-3520_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-3530_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-3540_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-3640_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-850_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-976A3_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-8010_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-8090_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-8510_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-8590_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-R8590_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M7050FP-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M7050FX-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M7050_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S7050PS-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S7050X-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S7050_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M741F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-3620_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-807A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-907F_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-A970-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-T990-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-306_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-806A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M740F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S740-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-A820-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-A840-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-A840S-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-D870-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-804A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-904A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-904F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-906F_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-720_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-760_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-820_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-860_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-A920-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-A940-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-T960-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-55_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-620_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-4630_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-4640_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-5110_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-5190_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-5620_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-5690_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-R4640_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-R5190_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-R5690_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-950_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-777A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-705A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M840F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M840FX-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S840-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S840X-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-610_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-710_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-810_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-702A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-M5190_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-M5690_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-703A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-704A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-774A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-802A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-803A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-902A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-903A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-903F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M350F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S350-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-435A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-505F_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-535F_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-706A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-776A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-520_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-D800-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-1600F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-1700F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-600_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-700_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-800_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-2510_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-2520_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-2530_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-2540_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-205_207_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-302_303_305_306_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-402_403_405_406_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-046A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-436A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-707A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-510_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-901A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-901F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-212_213_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-215_217_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-310_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-312_313_315_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-410_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-412_413_415_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M650A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M650F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L810_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L850_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-2630_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-225_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-322_323_325_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-422_423_425_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-047A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-437A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_RX640-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_RX650-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-G5300-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S05_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-5600-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-5V-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-7V-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-211_214_216_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_R1900-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_R2880-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_R2000-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_R3000-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME-400_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-200_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-300_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-400_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PF-70_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-100_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Artisan_730-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Artisan_837-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_TX730-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-801A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-503A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-504A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-601F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-602F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-603F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-673F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-4004-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_PX730-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_PX830-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Artisan_1430-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_1430-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_1500-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_R350-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_R340-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L364_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L565_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-G4500-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_1400-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_1410-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Artisan_710-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Artisan_800-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Artisan_810-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_RX580-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_RX595-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Artisan_630-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Artisan_720-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Artisan_830-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L455_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_RX680-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-502A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_R380-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME_OFFICE_650FN-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-501A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-A740-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-FA700-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_BX310FN-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_PX660-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_PX700W-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_PX720WD-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_PX810FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_PX820FWD-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_TX700W-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_TX710W-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_TX720WD-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_TX810FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-TX720_Artisan720-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-TX820_Artisan830-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_PX650-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_R360-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_R390-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_RX560-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_RX585-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_RX590-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_RX610-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_TX650-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-405A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-7510_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-7511_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-7515_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-7520_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-7521_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-7525_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_RX685-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_RX690-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-G850-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_R260-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_R265-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_R270-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_PX800FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_TX800FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WorkForce_600-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WorkForce_610-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-350-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-360_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-600-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-800-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-810-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-820-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-830_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_PM_245-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_NX530-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_NX620-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WorkForce_435-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WorkForce_545-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WorkForce_630-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WorkForce_645-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WorkForce_840-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WorkForce_845-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME-301_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME-303_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-202_203_206_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-201_204_208_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-370_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-840_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-330-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-330S-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-340-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-520-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-530-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-720-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_NX510-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-404A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-434A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-500-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-700-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WorkForce_320-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WorkForce_520-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME_OFFICE_700FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME_OFFICE_900WD-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME_OFFICE_940FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME_OFFICE_960FWD-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_NX635-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_BX305_Plus-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_BX525WD-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_BX535WD-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_BX600FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_BX610FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_BX620FWD-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_BX630FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_BX635FWD-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_BX925-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_BX935-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_TX600FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_TX610FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_TX620FWD-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX510W-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX525WD-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX535WD-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX600FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX610FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX620FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_TX550W-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_TX560WD-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WorkForce_620-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-B700-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-B750F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-850_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX8300-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX9300F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_DX8400-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_DX9400F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_PX710W-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX400-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX410-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_TX400-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_TX410-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX8400-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX9400Fax-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_NX300-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_NX400-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_NX410-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WorkForce_310-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WorkForce_500-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4020_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4022_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4090_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4092_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4530_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4532_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4540_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4590_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4592_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Artisan_700-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-A720-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX5900-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_R240-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_R250-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-300-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX4900-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME_OFFICE_620F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME_Office_600F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_BX300F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_BX305-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_BX320FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_TX300F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_TX320F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_TX510FN-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_TX515FN-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_TX525FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-TX320_WorkForce320-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4011_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4015_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4025_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4091_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4095_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4511_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4515_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4521_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4525_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4531_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4535_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4545_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4595_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4010_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4023_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4520_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4533_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME_OFFICE_510-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME_OFFICE_520-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME_OFFICE_560W-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX210-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX218-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX420W-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_TX210-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_TX220-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_TX420W-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-TX220_NX220-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-TX420_NX420-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME_OFFICE_570-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_NX330-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_NX430-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_DX4200-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_DX4800-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_DX5000-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_RX520-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_RX530-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX6000-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX7700-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX7800-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-A750-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-D600-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-A650-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX4200-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX4800-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX5000-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-150-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-200-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME_OFFICE_530-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-NX430_TX435-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_NX210-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_NX420-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX230-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX430-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX440-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_TX235-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_TX430-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_PM_240-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_PM_250-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_PM_280-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_PM_200-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_PM_210-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-A640-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX7300-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX7400-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_DX7400-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_NX200-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX200-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_TX200-epson-escpr-en.ppd.gz	epson-20200615-EPSON_EW-052A_Series.ppd.gz	epson-20200615-EPSON_L110_Series.ppd.gz	epson-20200615-EPSON_L120_Series.ppd.gz	epson-20200615-EPSON_L130_Series.ppd.gz	epson-20200615-EPSON_L210_Series.ppd.gz	epson-20200615-EPSON_L220_Series.ppd.gz	epson-20200615-EPSON_L310_Series.ppd.gz	epson-20200615-EPSON_L350_Series.ppd.gz	epson-20200615-EPSON_L355_Series.ppd.gz	epson-20200615-EPSON_L360_Series.ppd.gz	epson-20200615-EPSON_L555_Series.ppd.gz	epson-20200615-EPSON_M1120_Series.ppd.gz	epson-20200615-EPSON_PX-S170T_Series.ppd.gz	epson-20200615-EPSON_USB1.1_MFP_Full-Speed.ppd.gz	epson-20200615-EPSON_USB2.0_MFP_Hi-Speed.ppd.gz	epson-20200615-EPSON_USB2.0_Printer_Hi-speed.ppd.gz	epson-20200615-EPSON_USB_MFP.ppd.gz	epson-20200615-EPSON_USB_Printer.ppd.gz	epson-20200615-Epson-generic-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_PM_270-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME_200-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-A620-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX2800-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX2900-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX3700-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX3800-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX3900-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_DX3800-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_DX4000-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_PM_300-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_PM_310-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-M200_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-M205_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_500-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_Deluxe-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_Express-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_PM_215-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_PM_225-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_PM_235-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_PM_260-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_PM_290-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_NX230-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_TX230-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-M1560_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-K200-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-K300-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-NX230_TX230-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX235-epson-escpr-en.ppd.gz
-foomatic-20191029-Gestetner-Pro_C5200S_PDF.ppd.gz	foomatic-20191029-Gestetner-Pro_C5210S_PDF.ppd.gz	foomatic-20191029-Lanier-Pro_C5200S_PDF.ppd.gz	foomatic-20191029-Lanier-Pro_C5210S_PDF.ppd.gz	foomatic-20191029-NRG-Pro_C5200S_PDF.ppd.gz	foomatic-20191029-NRG-Pro_C5210S_PDF.ppd.gz	foomatic-20191029-Savin-Pro_C5200S_PDF.ppd.gz	foomatic-20191029-Savin-Pro_C5210S_PDF.ppd.gz	ricoh-20191121-Infotec-Pro_C5200S_PDF.ppd.gz	ricoh-20191121-Infotec-Pro_C5210S_PDF.ppd.gz	ricoh-20191121-Ricoh-Pro_C5200S_PDF.ppd.gz	ricoh-20191121-Ricoh-Pro_C5210S_PDF.ppd.gz	ricoh-20191121-Infotec-Pro_C7200S_Light_PDF.ppd.gz	ricoh-20191121-Infotec-Pro_C7200S_PDF.ppd.gz	ricoh-20191121-Infotec-Pro_C7210S_PDF.ppd.gz	ricoh-20191121-Ricoh-Pro_C7200S_Light_PDF.ppd.gz	ricoh-20191121-Ricoh-Pro_C7200S_PDF.ppd.gz	ricoh-20191121-Ricoh-Pro_C7210S_PDF.ppd.gz	ricoh-20191121-Infotec-Pro_C7200_PDF.ppd.gz	ricoh-20191121-Infotec-Pro_C7210_PDF.ppd.gz	ricoh-20191121-Ricoh-Pro_C7200_PDF.ppd.gz	ricoh-20191121-Ricoh-Pro_C7210_PDF.ppd.gz	ricoh-20200527-Infotec-Pro_C5300S_PDF.ppd.gz	ricoh-20200527-Infotec-Pro_C5310S_PDF.ppd.gz	ricoh-20200527-Lanier-Pro_C5300S_PDF.ppd.gz	ricoh-20200527-Lanier-Pro_C5310S_PDF.ppd.gz	ricoh-20200527-NRG-Pro_C5300S_PDF.ppd.gz	ricoh-20200527-NRG-Pro_C5310S_PDF.ppd.gz	ricoh-20200527-Ricoh-Pro_C5300S_PDF.ppd.gz	ricoh-20200527-Ricoh-Pro_C5310S_PDF.ppd.gz	ricoh-20200527-Savin-Pro_C5300S_PDF.ppd.gz	ricoh-20200527-Savin-Pro_C5310S_PDF.ppd.gz	ricoh-20200821-Infotec-Pro_C5300SL_PDF.ppd.gz	ricoh-20200821-NRG-Pro_C5300SL_PDF.ppd.gz	ricoh-20200821-Ricoh-Pro_C5300SL_PDF.ppd.gz	foomatic-20200219-Gestetner-MP_C6502-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C8002-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C6502-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C8002-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C6502-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C8002-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C6502-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C8002-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C6502-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C8002-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C6502-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C8002-PDF-Savin.ppd.gz	foomatic-20200219-Infotec-Pro_C7100S-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-Pro_C7110S-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-Pro_C7100S-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-Pro_C7110S-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-Pro_C7100S-PDF-NRG.ppd.gz	foomatic-20200219-NRG-Pro_C7110S-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Pro_C7100S-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Pro_C7110S-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-Pro_C7100S-PDF-Savin.ppd.gz	foomatic-20200219-Savin-Pro_C7110S-PDF-Savin.ppd.gz	foomatic-20200219-Infotec-Pro_C7100-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-Pro_C7110-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-Pro_C7100-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-Pro_C7110-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-Pro_C7100-PDF-NRG.ppd.gz	foomatic-20200219-NRG-Pro_C7110-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Pro_C7100-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Pro_C7110-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-Pro_C7100-PDF-Savin.ppd.gz	foomatic-20200219-Savin-Pro_C7110-PDF-Savin.ppd.gz	ricoh-20191121-Infotec-Pro_8200S_PDF.ppd.gz	ricoh-20191121-Infotec-Pro_8210S_PDF.ppd.gz	ricoh-20191121-Infotec-Pro_8220S_PDF.ppd.gz	ricoh-20191121-Ricoh-Pro_8200S_PDF.ppd.gz	ricoh-20191121-Ricoh-Pro_8210S_PDF.ppd.gz	ricoh-20191121-Ricoh-Pro_8220S_PDF.ppd.gz	foomatic-20191029-Lanier-Pro_8200S_PDF.ppd.gz	foomatic-20191029-Lanier-Pro_8210S_PDF.ppd.gz	foomatic-20191029-Lanier-Pro_8220S_PDF.ppd.gz	foomatic-20191029-NRG-Pro_8200S_PDF.ppd.gz	foomatic-20191029-NRG-Pro_8210S_PDF.ppd.gz	foomatic-20191029-NRG-Pro_8220S_PDF.ppd.gz	foomatic-20191029-Savin-Pro_8200S_PDF.ppd.gz	foomatic-20191029-Savin-Pro_8210S_PDF.ppd.gz	foomatic-20191029-Savin-Pro_8220S_PDF.ppd.gz	foomatic-20191029-Lanier-Pro_8210_PDF.ppd.gz	foomatic-20191029-Lanier-Pro_8220_PDF.ppd.gz	foomatic-20191029-NRG-Pro_8210_PDF.ppd.gz	foomatic-20191029-NRG-Pro_8220_PDF.ppd.gz	foomatic-20191029-Savin-Pro_8210_PDF.ppd.gz	foomatic-20191029-Savin-Pro_8220_PDF.ppd.gz	foomatic-20200219-Infotec-Pro_8110-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-Pro_8120-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-Pro_8110-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-Pro_8120-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-Pro_8110-PDF-NRG.ppd.gz	foomatic-20200219-NRG-Pro_8120-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Pro_8110-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Pro_8120-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-Pro_8110-PDF-Savin.ppd.gz	foomatic-20200219-Savin-Pro_8120-PDF-Savin.ppd.gz	ricoh-20191121-Infotec-Pro_8210_PDF.ppd.gz	ricoh-20191121-Infotec-Pro_8220_PDF.ppd.gz	ricoh-20191121-Ricoh-Pro_8210_PDF.ppd.gz	ricoh-20191121-Ricoh-Pro_8220_PDF.ppd.gz	foomatic-20200219-Gestetner-MP_C6503-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C8003-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C6503-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C8003-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C6503-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C8003-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C6503-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C8003-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C6503-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C8003-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C6503-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C8003-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSm2540-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm2550-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm2560-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_4054-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_5054-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_6054-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_4054-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_5054-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_6054-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_4054-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_5054-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_6054-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_4054-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_5054-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_6054-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_4054-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_5054-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_6054-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_4054-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_5054-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_6054-PDF-Savin.ppd.gz	foomatic-20191029-Lanier-Pro_C7200S_Light_PDF.ppd.gz	foomatic-20191029-Lanier-Pro_C7200S_PDF.ppd.gz	foomatic-20191029-Lanier-Pro_C7210S_PDF.ppd.gz	foomatic-20191029-NRG-Pro_C7200S_Light_PDF.ppd.gz	foomatic-20191029-NRG-Pro_C7200S_PDF.ppd.gz	foomatic-20191029-NRG-Pro_C7210S_PDF.ppd.gz	foomatic-20191029-Savin-Pro_C7200S_Light_PDF.ppd.gz	foomatic-20191029-Savin-Pro_C7200S_PDF.ppd.gz	foomatic-20191029-Savin-Pro_C7210S_PDF.ppd.gz	foomatic-20191029-Lanier-Pro_C7200_PDF.ppd.gz	foomatic-20191029-Lanier-Pro_C7210_PDF.ppd.gz	foomatic-20191029-NRG-Pro_C7200_PDF.ppd.gz	foomatic-20191029-NRG-Pro_C7210_PDF.ppd.gz	foomatic-20191029-Savin-Pro_C7200_PDF.ppd.gz	foomatic-20191029-Savin-Pro_C7210_PDF.ppd.gz	foomatic-20200219-Infotec-Pro_8100S-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-Pro_8110S-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-Pro_8120S-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-Pro_8100S-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-Pro_8110S-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-Pro_8120S-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-Pro_8100S-PDF-NRG.ppd.gz	foomatic-20200219-NRG-Pro_8110S-PDF-NRG.ppd.gz	foomatic-20200219-NRG-Pro_8120S-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Pro_8100S-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Pro_8110S-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Pro_8120S-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-Pro_8100S-PDF-Savin.ppd.gz	foomatic-20200219-Savin-Pro_8110S-PDF-Savin.ppd.gz	foomatic-20200219-Savin-Pro_8120S-PDF-Savin.ppd.gz	foomatic-20200219-Infotec-Pro_8310S-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-Pro_8320S-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-Pro_8310S-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-Pro_8320S-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-Pro_8310S-PDF-NRG.ppd.gz	foomatic-20200219-NRG-Pro_8320S-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Pro_8310S-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Pro_8320S-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-Pro_8310S-PDF-Savin.ppd.gz	foomatic-20200219-Savin-Pro_8320S-PDF-Savin.ppd.gz	foomatic-20200219-Infotec-Pro_8310-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-Pro_8320-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-Pro_8310-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-Pro_8320-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-Pro_8310-PDF-NRG.ppd.gz	foomatic-20200219-NRG-Pro_8320-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Pro_8310-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Pro_8320-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-Pro_8310-PDF-Savin.ppd.gz	foomatic-20200219-Savin-Pro_8320-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_6002-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_7502-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_9002-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_6002-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_7502-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_9002-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_6002-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_7502-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_9002-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_6002-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_7502-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_9002-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_6002-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_7502-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_9002-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_6002-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_7502-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_9002-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc1245ex-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C4504ex-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C4504ex-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C5504ex-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C4504ex-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C5504ex-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C4504ex-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C5504ex-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C4504ex-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C5504ex-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C4504ex-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C5504ex-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc1245-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C4504-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C4504-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C5504-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C4504-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C4504-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C5504-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C4504-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C5504-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C4504-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc1260ex-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C6004ex-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C6004ex-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C6004ex-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C6004ex-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C6004ex-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C6004ex-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc1260-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C6004-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C6004-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C6004-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C6004-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C6004-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C6004-PDF-Savin.ppd.gz	foomatic-20200219-Lanier-SP_C840DN-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_C842DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C840DN-PDF-NRG.ppd.gz	foomatic-20200219-NRG-SP_C842DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-SP_C840DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_C842DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_C840DN-PDF-Savin.ppd.gz	foomatic-20200219-Savin-SP_C842DN-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc1045-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C4503-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C5503-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C4503-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C5503-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C4503-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C5503-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C4503-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C5503-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C4503-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C5503-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C4503-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C5503-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_6503-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_7503-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_9003-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_6503-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_7503-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_9003-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_6503-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_7503-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_9003-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_6503-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_7503-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_9003-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_6503-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_7503-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_9003-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_6503-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_7503-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_9003-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc1060-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C6003-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C6003-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C6003-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_C830DN-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_C831DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C6003-PDF-NRG.ppd.gz	foomatic-20200219-NRG-SP_C830DN-PDF-NRG.ppd.gz	foomatic-20200219-NRG-SP_C831DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C830DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C831DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C6003-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C6003-PDF-Savin.ppd.gz	foomatic-20200219-Savin-SP_C830DN-PDF-Savin.ppd.gz	foomatic-20200219-Savin-SP_C831DN-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-C8140ND-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-C8150ND-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP540C_SPC820DN-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LP550C_SPC821DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C820DN-PDF-NRG.ppd.gz	foomatic-20200219-NRG-SP_C821DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C820DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C821DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-CLP340D-PDF-Savin.ppd.gz	foomatic-20200219-Savin-CLP350D-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSm_2640-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm_2650-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_4055-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_5055-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_4055-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_5055-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_4055-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_5055-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_4055-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_5055-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_4055-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_5055-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_4055-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_5055-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C3001-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C3501-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C3001-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C3501-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C3001_LD630C-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C3501_LD635C-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C3001-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C3501-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C3001-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C3501-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-C9130-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C9135-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSm_2660-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_6055-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_6055-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_6055-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_6055-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_6055-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_6055-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C3002-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C3502-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C3002-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C3502-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C3002-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C3502-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C3002-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C3502-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C3002-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C3502-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C3002-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C3502-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C4501-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C4501A-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C5501-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C5501A-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C4501-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C4501A-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C5501-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C5501A-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MPC4501A_LD645CA-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MPC5501A_LD655CA-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C4501_LD645C-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C5501_LD655C-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C4501-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C4501A-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C5501-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C5501A-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C4501-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C4501A-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C5501-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C5501A-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-C9145-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C9145A-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C9155-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C9155A-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C4502-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C4502A-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C5502-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C5502A-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C4502-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C4502A-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C5502-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C5502A-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C4502-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C4502A-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C5502-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C5502A-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C4502-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C4502A-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C5502-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C5502A-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C4502-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C4502A-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C5502-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C5502A-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C4502-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C4502A-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C5502-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C5502A-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc1230ex-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C3004ex-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C3504ex-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C3004ex-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C3504ex-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C3004ex-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C3504ex-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C3004ex-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C3504ex-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C3004ex-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C3504ex-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C3004ex-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C3504ex-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc1230-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C3004-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C3504-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C3004-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C3504-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C3004-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C3504-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C3004-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C3504-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C3004-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C3504-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C3004-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C3504-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C2800-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C3300-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C2800-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C3300-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C2800_LD528C-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C3300_LD533C-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C2800-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C3300-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C2800-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C3300-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-C2828-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C3333-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C4000-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C5000-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C4000-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C5000-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C4000_LD540C-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C5000_LD550C-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C4000-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C5000-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C4000-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C5000-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-C4040-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C5050-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSm2525-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm2530-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm2535-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_2554-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_3054-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_3554-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_2554-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_3054-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_3554-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_2554-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_3054-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_3554-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_2554-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_3054-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_3554-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_2554-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_2554J-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_3054-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_3054J-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_3554-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_3554J-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_2554-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_3054-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_3554-PDF-Savin.ppd.gz	foomatic-20200219-Infotec-Pro_C5100S-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-Pro_C5110S-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-Pro_C5100S-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-Pro_C5110S-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-Pro_C5100S-PDF-NRG.ppd.gz	foomatic-20200219-NRG-Pro_C5110S-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Pro_C5100S-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Pro_C5110S-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-Pro_C5100S-PDF-Savin.ppd.gz	foomatic-20200219-Savin-Pro_C5110S-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C6501-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C7501-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C6501-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C7501-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C6501_LD365C-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C7501_LD375C-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C6501-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C7501-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C6501-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C7501-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-C9065-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C9075-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-Pro_1107EX-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-Pro_1357EX-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-Pro_907EX-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-Pro_1107EX-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-Pro_1357EX-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-Pro_907EX-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-Pro_1107EX-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-Pro_1357EX-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-Pro_907EX-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-Pro_1107EX-PDF-NRG.ppd.gz	foomatic-20200219-NRG-Pro_1357EX-PDF-NRG.ppd.gz	foomatic-20200219-NRG-Pro_907EX-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Pro_1107EX-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Pro_1357EX-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Pro_907EX-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-Pro_1107EX-PDF-Savin.ppd.gz	foomatic-20200219-Savin-Pro_1357EX-PDF-Savin.ppd.gz	foomatic-20200219-Savin-Pro_907EX-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc1030-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C3003-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C3503-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C3003-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C3503-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C3003-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C3503-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C3003-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C3503-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C3003-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C3503-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C3003-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C3503-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_4002-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_5002-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_4002-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_5002-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_4002-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_5002-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_4002-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_5002-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_4002-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_5002-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_4002-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_5002-PDF-Savin.ppd.gz	ricoh-20200527-Lanier-IM_C6500_PDF.ppd.gz	ricoh-20200527-Lanier-IM_C8000_PDF.ppd.gz	ricoh-20200527-NRG-IM_C6500_PDF.ppd.gz	ricoh-20200527-NRG-IM_C8000_PDF.ppd.gz	ricoh-20200527-Ricoh-IM_C6500_PDF.ppd.gz	ricoh-20200527-Ricoh-IM_C8000_PDF.ppd.gz	ricoh-20200527-Savin-IM_C6500_PDF.ppd.gz	ricoh-20200527-Savin-IM_C8000_PDF.ppd.gz	foomatic-20200219-Gestetner-MP_6001-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_7001-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_8001-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_9001-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_6001-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_7001-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_8001-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_9001-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_6001_LD360-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_7001_LD370-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_8001_LD380-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_9001_LD390-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_6001-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_7001-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_8001-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_9001-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_6001-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_7001-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_8001-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_9001-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-9060-PDF-Savin.ppd.gz	foomatic-20200219-Savin-9070-PDF-Savin.ppd.gz	foomatic-20200219-Savin-9080-PDF-Savin.ppd.gz	foomatic-20200219-Savin-9090-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP2352_DSm923-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP2852_DSm928-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP3352_DSm933-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_2352-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_2852-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_3352-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_2352-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_2852-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_3352-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_2352-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_2852-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_3352-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_2352-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_2852-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_3352-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_2352-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_2852-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_3352-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc1220ex-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSc1225ex-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C2004ex-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C2504ex-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C2004ex-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C2504ex-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C2004ex-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C2504ex-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C2004ex-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C2504ex-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C2004ex-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C2094exJ-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C2504ex-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C2594exJ-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C2004ex-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C2504ex-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSm_2625-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm_2630-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm_2635-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_2555-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_3055-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_3555-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_2555-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_3055-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_3555-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_2555-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_3055-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_3555-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_2555-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_3055-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_3555-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_2555-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_2595J-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_3055-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_3095J-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_3555-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_3595J-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_2555-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_3055-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_3555-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc1220-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSc1225-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C2004-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C2504-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C2004-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C2504-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C2004-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C2504-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C2004-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C2504-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C2004-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C2094J-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C2504-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C2594J-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C2004-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C2504-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C6000-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C7500-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C6000-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C7500-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C6000_LD260c-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C7500_LD275c-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C6000-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C7500-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C6000-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C7500-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-C6055-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C7570-PDF-Savin.ppd.gz	foomatic-20190909-Ricoh-IM_C4500_PDF.ppd.gz	foomatic-20200219-Gestetner-GS3045-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-IM_C4500-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-IM_C4500-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-IM_C4500-PDF-NRG.ppd.gz	foomatic-20200219-NRG-IM_C5500-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-IM_C5500-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-IM_C4500-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-GS3160-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-IM_C6000-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-IM_C6000-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-IM_C6000-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-IM_C6000-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-IM_C6000-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_2550-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_2550B-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_2851-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_3350-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_3350B-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_3351-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_2550-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_2550B-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_2851-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_3350-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_3350B-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_3351-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_2550B_LD425B-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_2550_LD425-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_2851_LD528-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_3350B_LD433B-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_3350_LD433-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_3351_LD533-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_2550-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_2550B-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_2851-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_3350-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_3350B-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_3351-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_2550-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_2550B-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_2851-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_3350-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_3350B-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_3351-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-9025-PDF-Savin.ppd.gz	foomatic-20200219-Savin-9025b-PDF-Savin.ppd.gz	foomatic-20200219-Savin-9033-PDF-Savin.ppd.gz	foomatic-20200219-Savin-9033b-PDF-Savin.ppd.gz	foomatic-20200219-Savin-9228-PDF-Savin.ppd.gz	foomatic-20200219-Savin-9233-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-GS3030-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-IM_C3000-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-IM_C3500-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-IM_C3000-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-IM_C3500-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-IM_C3000-PDF-NRG.ppd.gz	foomatic-20200219-NRG-IM_C3500-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-IM_C3000-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-IM_C3500-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-IM_C3000-PDF-Savin.ppd.gz	foomatic-20200219-Savin-IM_C3500-PDF-Savin.ppd.gz	ricoh-20200821-Ricoh-IM_C3509J_PDF.ppd.gz	foomatic-20200219-Lanier-SP_8400DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_8400DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-SP_8400DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_8400DN-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSm1525-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm1530-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm1533-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_2553-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_3053-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_3353-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_2553-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_3053-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_3353-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_2553-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_3053-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_3353-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_2553-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_3053-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_3353-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_2553-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_3053-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_3353-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_2553-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_3053-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_3353-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-SP_8200DN-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP150dn_SP8200DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_8200DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_8200DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MLP150DN-PDF-Savin.ppd.gz	foomatic-20200219-Infotec-Pro_8300S-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-Pro_8300S-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-Pro_8300S-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Pro_8300S-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-Pro_8300S-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-GS3020-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-GS3025-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-IM_C2000-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-IM_C2500-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-IM_C2000-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-IM_C2500-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-IM_C2000-PDF-NRG.ppd.gz	foomatic-20200219-NRG-IM_C2500-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-IM_C2000-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-IM_C2500-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-IM_C2000-PDF-Savin.ppd.gz	foomatic-20200219-Savin-IM_C2500-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc1020-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSc1025-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C2003-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C2503-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C2003-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C2003Z-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C2503-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C2503Z-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C2003-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C2503-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C2003-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C2003Z-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C2503-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C2503Z-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C2003-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C2003J-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C2003Z-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C2503-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C2503J-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C2503Z-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C2003-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C2503-PDF-Savin.ppd.gz	ricoh-20200821-Ricoh-IM_C2509J_PDF.ppd.gz	foomatic-20200219-Lanier-SP_8300DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_8300DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_8300DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_8300DN-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_4000-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_4000B-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_4001-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_5000-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_5000B-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_5001-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_4000-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_4000B-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_4001-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_5000-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_5000B-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_5001-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_4000B_LD040B-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_4000_LD040-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_4001_LD140-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_5000B_LD050B-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_5000_LD050-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_5001_LD150-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_4000-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_4000B-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_4001-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_5000-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_5000B-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_5001-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_4000-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_4000B-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_4001-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_5000-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_5000B-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_5001-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-9040-PDF-Savin.ppd.gz	foomatic-20200219-Savin-9040b-PDF-Savin.ppd.gz	foomatic-20200219-Savin-9050-PDF-Savin.ppd.gz	foomatic-20200219-Savin-9050b-PDF-Savin.ppd.gz	foomatic-20200219-Savin-9240-PDF-Savin.ppd.gz	foomatic-20200219-Savin-9250-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-CS555-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSc460-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-ISC_4560-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-ISC_5560-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-LC155-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LD160c-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-CS555-PDF-NRG.ppd.gz	foomatic-20200219-NRG-DSc460-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_3260C-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_Color5560-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-C6045-PDF-Savin.ppd.gz	foomatic-20200219-Savin-SDC555-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C2051-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C2551-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C2051-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C2551-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-LD_620C-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LD_625C-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C2051-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C2551-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C2051-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C2551-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-C9120-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C9125-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C2050-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C2550-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C2050-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C2550-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C2050_LD520C-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C2550_LD525C-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C2050-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C2550-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C2050-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C2550-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-C9020-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C9025-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MPC2500_DSc525-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MPC3000_DSc530-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-ISC_2525-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-ISC_3030-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C2500_LD425c-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C3000_LD430c-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C2500-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C3000-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C2500-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C3000-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-C2525-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C3030-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-C7640nD-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-IPC_4040DN-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-LP440c_SP_C811DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C811DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C811DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-CLP240D-PDF-Savin.ppd.gz	foomatic-20200219-Lanier-MP_C501-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C501-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C501-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C501-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP5500_DSm755-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP6500_DSm765-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP7500_DSm775-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_6000-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_7000-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_8000-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS_2255-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-IS_2265-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-IS_2275-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_6000-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_7000-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_8000-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_5500_LD255-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_6000_LD260-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_6500_LD265-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_7000_LD270-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_7500_LD275-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_8000_LD280-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_5500-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_6000-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_6500-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_7000-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_7500-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_8000-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_5500-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_6000-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_6500-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_7000-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_7500-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_8000-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-8055-PDF-Savin.ppd.gz	foomatic-20200219-Savin-8060-PDF-Savin.ppd.gz	foomatic-20200219-Savin-8065-PDF-Savin.ppd.gz	foomatic-20200219-Savin-8070-PDF-Savin.ppd.gz	foomatic-20200219-Savin-8075-PDF-Savin.ppd.gz	foomatic-20200219-Savin-8080-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MPC3500_DSc535-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MPC4500_DSc545-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-ISC_3535-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-ISC_4045-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C3500_LD435c-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C4500_LD445c-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C3500-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C4500-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C3500-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C4500-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-C3535-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C4540-PDF-Savin.ppd.gz	ricoh-20190916-Ricoh-MP_C306Z_JPN_PDF.ppd.gz	foomatic-20200219-Gestetner-Pro_1106EX-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-Pro_1356EX-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-Pro_906EX-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-Pro_1106EX-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-Pro_1356EX-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-Pro_906EX-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-Pro_1106EX-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-Pro_1356EX-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-Pro_906EX-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-Pro_1106EX-PDF-NRG.ppd.gz	foomatic-20200219-NRG-Pro_1356EX-PDF-NRG.ppd.gz	foomatic-20200219-NRG-Pro_906EX-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Pro_1106EX-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Pro_1356EX-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Pro_906EX-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-Pro_1106EX-PDF-Savin.ppd.gz	foomatic-20200219-Savin-Pro_1356EX-PDF-Savin.ppd.gz	foomatic-20200219-Savin-Pro_906EX-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP3500_DSm735e-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP4500_DSm745e-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS_2435-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-IS_2445-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_3500_LD335-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_4500_LD345-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_3500-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_4500-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_3500-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_4500-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-8035e-PDF-Savin.ppd.gz	foomatic-20200219-Savin-8045e-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc424-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSc432-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-ISC_1024c-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-ISC_1032c-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-LD124c-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LD132c-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-DSc424-PDF-NRG.ppd.gz	foomatic-20200219-NRG-DSc432-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_3224C-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_3232C-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-C2410-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C3210e-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_2510_DSm725e-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_3010_DSm730e-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS_2425-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-IS_2430-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_2510_LD325-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_3010_LD330-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_2510-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_3010-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_2510-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_3010-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-8025e-PDF-Savin.ppd.gz	foomatic-20200219-Savin-8030e-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSm725-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm730-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS_2225-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-IS_2230-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-LD225-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LD230-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-DSm725-PDF-NRG.ppd.gz	foomatic-20200219-NRG-DSm730-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_3025-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_3030-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-8025-PDF-Savin.ppd.gz	foomatic-20200219-Savin-8030-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSm735_735G-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm745_745G-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS_2235-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-IS_2245-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-LD235-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LD245-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-DSm735-PDF-NRG.ppd.gz	foomatic-20200219-NRG-DSm745-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_3035-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_3045-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-8035_8035g-PDF-Savin.ppd.gz	foomatic-20200219-Savin-8045_8045g-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP1100_DSm7110-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP1350_DSm7135-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP9000_DSm790-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS_3090-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-IS_3110-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-IS_3135-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_1100_LD1100-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_1350_LD1135-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_9000_LD190-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_1100-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_1350-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_9000-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_1100-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_1350-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_9000-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-8090-PDF-Savin.ppd.gz	foomatic-20200219-Savin-8110-PDF-Savin.ppd.gz	foomatic-20200219-Savin-8135-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C307-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C407-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C307-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C407-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C307-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C407-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C307-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C407-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C307-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C407-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C307-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C407-PDF-Savin.ppd.gz	ricoh-20200221-Ricoh-SP_C342M_JPN_PDF.ppd.gz	foomatic-20200219-Gestetner-MP_C306Z-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C406Z-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C306Z-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C406Z-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C306Z-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C406Z-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C306Z-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C406Z-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C306Z-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C406Z-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C306Z-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C406Z-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_305plus-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_305plus-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_305plus-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_305plus-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_305plus-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_305plus-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C305-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C305-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C305-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C305-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C305-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C305-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-C7521n-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP222cn_LP221c-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-C7521n-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_CL3500N-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-CLP22-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc1120-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C2011-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C401SR-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C2011-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C401SR-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C401SR-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C2011-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C401SR-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C2011-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C401SR-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C401SR-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_2501-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_2501-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_2501-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_2501-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_2501-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_2501-PDF-Savin.ppd.gz	foomatic-20200219-Lanier-SP_C730DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C730DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C730DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_C730DN-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C300SR-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C400SR-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C300SR-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C400SR-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-LD_130CSR-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LD_140CSR-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C300SR-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C400SR-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C300SR-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C400SR-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C300SR-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C400SR-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-C_230SR-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C_240SR-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-P7575-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP175_LP175hdn-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-P7575-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_AP900-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MLP75n-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-P7675-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-SP_9100_LP275-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_9100DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_9100DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MLP175n-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-P7245-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP145n_SP_8100DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_8100DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_8100DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MLP145-PDF-Savin.ppd.gz	foomatic-20200219-Lanier-SP_C342DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C342DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-SP_C342DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_C342DN-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_301-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_301-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_301-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_301-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_301-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_301-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_2001-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_2001-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_2001-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_2001-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_2001-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_2001J-PDF-Ricoh.ppd.gz	foomatic-20200219-Lanier-SP_C352DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C352DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-SP_C352DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_C352DN-PDF-Savin.ppd.gz	foomatic-20200219-Lanier-SP_C340DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C340DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-SP_C340DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_C340DN-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C401-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C401-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C401-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C401-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C401-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C401-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP2500_DSm625-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS_2325-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP2500_LD125-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_2500-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_2500-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-7025-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C300-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C400-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C300-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C400-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-LD_130C-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LD_140C-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C300-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C400-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C300-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C400-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C300-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C400-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-C_230-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C_240-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MPC1500_GS106-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-ISC_615G-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MPC1500_LD215c-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C1500sp-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C1500_615C-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SGC_1506-PDF-Savin.ppd.gz	foomatic-20200219-Lanier-SP_6430DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_6430DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-SP_6430DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_6430DN-PDF-Savin.ppd.gz	foomatic-20190909-Ricoh-IM_430_PDF.ppd.gz	foomatic-20200219-Gestetner-IM_350-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-IM_430-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-IM_350-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-IM_430-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-IM_350-PDF-NRG.ppd.gz	foomatic-20200219-NRG-IM_430-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-IM_350-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-IM_350-PDF-Savin.ppd.gz	foomatic-20200219-Savin-IM_430-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-SP_C320DN-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-SP_C320DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C320DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C320DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_C320DN-PDF-Savin.ppd.gz	foomatic-20200219-Infotec-MP_201-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_501-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_601-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_201_LD_220-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_501-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_601-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_201-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_501-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_601-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_201-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_501-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_601-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-920-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_501-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_601-PDF-Savin.ppd.gz	foomatic-20200219-Infotec-SP_5300-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-SP_5310-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-SP_5300-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_5310-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_5300-PDF-NRG.ppd.gz	foomatic-20200219-NRG-SP_5310-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-SP_5300-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_5310-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_5300-PDF-Savin.ppd.gz	foomatic-20200219-Savin-SP_5310-PDF-Savin.ppd.gz	lanier-20190916-Lanier-IM_600SR_PDF.ppd.gz	nrg-20190916-NRG-IM_600SR_PDF.ppd.gz	ricoh-20190916-Ricoh-IM_600SR_PDF.ppd.gz	savin-20190916-Savin-IM_600SR_PDF.ppd.gz	foomatic-20200219-Gestetner-MP_171-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_171-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_171_LD_117-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_171-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_171-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-917-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP2000_DSm721d-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS_2320-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_2000_LD320d-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_2000-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_2000-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-9021d-PDF-Savin.ppd.gz	ricoh-20200221-Lanier-IM_C400SR_PDF.ppd.gz	ricoh-20200221-NRG-IM_C400SR_PDF.ppd.gz	ricoh-20200221-Ricoh-IM_C400SR_PDF.ppd.gz	ricoh-20200221-Savin-IM_C400SR_PDF.ppd.gz	ricoh-20200821-Lanier-IM_C530F_PDF.ppd.gz	ricoh-20200821-NRG-IM_C530F_PDF.ppd.gz	ricoh-20200821-Ricoh-IM_C530F_PDF.ppd.gz	ricoh-20200821-Savin-IM_C530F_PDF.ppd.gz	foomatic-20200219-Lanier-SP_5210SR-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_5210SR-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_5210SR-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_5210SR-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_402SPF-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-P7535n-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_402SPF-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-LP135n-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_402SPF-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_4310N-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_402SPF-PDF-NRG.ppd.gz	foomatic-20200219-NRG-P7535n-PDF-NRG.ppd.gz	foomatic-20200219-NRG-SP_4310N-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_AP610N-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_4310N-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_402SPF-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MLP35n-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_402SPF-PDF-Savin.ppd.gz	foomatic-20200219-Savin-SP_4310N-PDF-Savin.ppd.gz	ricoh-20200221-Gestetner-IM_C300_PDF.ppd.gz	ricoh-20200221-Gestetner-IM_C400_PDF.ppd.gz	ricoh-20200221-Lanier-IM_C300_PDF.ppd.gz	ricoh-20200221-Lanier-IM_C400_PDF.ppd.gz	ricoh-20200221-NRG-IM_C300_PDF.ppd.gz	ricoh-20200221-NRG-IM_C400_PDF.ppd.gz	ricoh-20200221-Ricoh-IM_C300_PDF.ppd.gz	ricoh-20200221-Ricoh-IM_C400_PDF.ppd.gz	ricoh-20200221-Savin-IM_C300_PDF.ppd.gz	ricoh-20200221-Savin-IM_C400_PDF.ppd.gz	foomatic-20200219-Gestetner-MP_161_DSm416-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS_2416-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_161_LD016-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_161-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_161-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-816-PDF-Savin.ppd.gz	foomatic-20200219-Lanier-SP_C435DN-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_C440DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C440DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-SP_C435DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_C440DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_C435DN-PDF-Savin.ppd.gz	foomatic-20200219-Savin-SP_C440DN-PDF-Savin.ppd.gz	lanier-20190916-Lanier-IM_550_PDF.ppd.gz	lanier-20190916-Lanier-IM_600_PDF.ppd.gz	nrg-20190916-NRG-IM_550_PDF.ppd.gz	nrg-20190916-NRG-IM_600_PDF.ppd.gz	ricoh-20190916-Ricoh-IM_550_PDF.ppd.gz	ricoh-20190916-Ricoh-IM_600_PDF.ppd.gz	savin-20190916-Savin-IM_550_PDF.ppd.gz	savin-20190916-Savin-IM_600_PDF.ppd.gz	lanier-20190916-Lanier-P_800_PDF.ppd.gz	lanier-20190916-Lanier-P_801_PDF.ppd.gz	nrg-20190916-NRG-P_800_PDF.ppd.gz	nrg-20190916-NRG-P_801_PDF.ppd.gz	ricoh-20190916-Ricoh-P_800_PDF.ppd.gz	ricoh-20190916-Ricoh-P_801_PDF.ppd.gz	savin-20190916-Savin-P_800_PDF.ppd.gz	savin-20190916-Savin-P_801_PDF.ppd.gz	foomatic-20200219-Lanier-SP_5200DN-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_5210DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_5200DN-PDF-NRG.ppd.gz	foomatic-20200219-NRG-SP_5210DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_5200DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_5210DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_5200DN-PDF-Savin.ppd.gz	foomatic-20200219-Savin-SP_5210DN-PDF-Savin.ppd.gz	foomatic-20200219-Ricoh-SP_450DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Gestetner-MP_401SPF-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-SP_C430DN-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-SP_C431DN-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP137CN-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LP142CN-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_401SPF-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_4510SF-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_C430DN-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_C431DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_4510SF-PDF-NRG.ppd.gz	foomatic-20200219-NRG-SP_C430DN-PDF-NRG.ppd.gz	foomatic-20200219-NRG-SP_C431DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C430DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C431DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_401SPF-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_4510SF-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-CLP37DN-PDF-Savin.ppd.gz	foomatic-20200219-Savin-CLP42DN-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_401SPF-PDF-Savin.ppd.gz	foomatic-20200219-Savin-SP_4510SF-PDF-Savin.ppd.gz	foomatic-20200219-Lanier-SP_4510DN-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_4520DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_4510DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-SP_4510DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_4520DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_4510DN-PDF-Savin.ppd.gz	foomatic-20200219-Savin-SP_4520DN-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-P7031n-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-P7031nL-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-P7035n-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP131n-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LP131nL-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LP136n-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_4100N-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_4100NL-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_4110N-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_4100N-PDF-NRG.ppd.gz	foomatic-20200219-NRG-SP_4100NL-PDF-NRG.ppd.gz	foomatic-20200219-NRG-SP_4110N-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_4100N-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_4100NL-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_4110N-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MLP31n-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MLP31nL-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MLP36n-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-P7527-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-P7527n-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP127n_LP128n-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LP128-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_5210SF-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-P7527-PDF-NRG.ppd.gz	foomatic-20200219-NRG-P7527n-PDF-NRG.ppd.gz	foomatic-20200219-NRG-SP_5210SF-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_AP410-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_AP410N-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_5210SF-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MLP28-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MLP28n-PDF-Savin.ppd.gz	foomatic-20200219-Savin-SP_5210SF-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-SP_C420DN-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP331cn_SP_C420-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C420DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C420DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-CLP131DN-PDF-Savin.ppd.gz	foomatic-20200219-Lanier-SP_5200S-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_5200S-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_5200S-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_5200S-PDF-Savin.ppd.gz	lanier-20190916-Lanier-P_C600_PDF.ppd.gz	ricoh-20190916-Ricoh-P_C600_PDF.ppd.gz	savin-20190916-Savin-P_C600_PDF.ppd.gz	ricoh-20200527-Gestetner-GS3021_PDF.ppd.gz	ricoh-20200527-Ricoh-M_C2001_PDF.ppd.gz	foomatic-20200219-Gestetner-C7526dn-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-C7531dn-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-IPC_2525DN-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-IPC_3030DN-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-LP226c_SP_C410-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LP231c_SP_C411-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_C400DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C410DN-PDF-NRG.ppd.gz	foomatic-20200219-NRG-SP_C411DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C400DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C410DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C411DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-CLP27DN-PDF-Savin.ppd.gz	foomatic-20200219-Savin-CLP31DN-PDF-Savin.ppd.gz	foomatic-20200219-Savin-SP_C400DN-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-SP_6330N-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP235N-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_6330N-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_6330N-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_6330N-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MLP235n-PDF-Savin.ppd.gz	foomatic-20200219-Lanier-P_501-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-P_502-PDF-Lanier.ppd.gz	foomatic-20200219-Ricoh-P_501-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-P_502-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-P_501-PDF-Savin.ppd.gz	foomatic-20200219-Savin-P_502-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-GWD2004-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-GWD2006-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LW324-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LW326-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_W2400-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_W3600-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_W2400-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_W3600-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-2404WD-PDF-Savin.ppd.gz	foomatic-20200219-Savin-2406WD-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-GWD3006-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LW426-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_W2401-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_W3601-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_W2401-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_W3601-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-3406WD-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-SP_W2470-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP124w-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_W2470-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_W2470-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-2404WDP-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-SP_4210N-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP37N-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_4210N-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_4210N-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_4210N-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MLP37N-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP1600_DSm716-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS_2316-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_1600_LD316-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_1600-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_1600-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-9016-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_CW2201-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-MP_CW2201-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_CW2201-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_CW2201-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_CW2201-PDF-Savin.ppd.gz	ricoh-20200821-Lanier-IM_C530FB_PDF.ppd.gz	ricoh-20200821-NRG-IM_C530FB_PDF.ppd.gz	ricoh-20200821-Ricoh-IM_C530FB_PDF.ppd.gz	ricoh-20200821-Savin-IM_C530FB_PDF.ppd.gz	foomatic-20200219-Gestetner-MP_W7100-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_W8140-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-MP_W7100-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_W8140-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_W7100-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_W8140-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_W7100-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_W8140-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_W7100-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_W8140-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_CW2200-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-MP_CW2200-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_CW2200-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_CW2200-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_CW2200-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_W6700-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-MP_W6700-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_W6700-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_W6700-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_W6700-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-GWD5100-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-GWD7140-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LW5100-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LW7140-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_W5100-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_W7140-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_W5100-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_W7140-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-5100WD-PDF-Savin.ppd.gz	foomatic-20200219-Savin-7140WD-PDF-Savin.ppd.gz
+epson-20200615-1_6_41-Epson-EP-10VA_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EW-M970A3T_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-979A3_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-7700_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L7160_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EW-M571T_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-978A3_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-811A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-810A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-30VA_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-7750_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L7180_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EW-M770T_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-2830_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-2850_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-4100_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-808A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-900_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-960_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S06_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EW-M5071FT_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L5190_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-6090_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-6590_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-7100_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-640_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-830_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-2750_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L4160_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-630_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-711A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-710A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-540_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L1455_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-709A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-049A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-2100_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M860F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S860-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-530_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-2700_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L4150_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-M570T_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-6530_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-2650_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-2660_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-2750_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-2760_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-708A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-16500_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-255_257_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-352_355_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-452_455_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L805_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-2710_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-2720_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-4700_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L3150_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L3160_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-220_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-235_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-240_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-243_245_247_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-320_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-330_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-332_335_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-340_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-342_343_345_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-420_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-430_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-432_435_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-440_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-442_445_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-048A_Series-epson-escpr-en.ppd.gz	epson-20170125-Epson-L382_Series-epson-escpr-en-1.6.10.ppd.gz	epson-20200615-1_6_41-Epson-L3050_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L3060_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L3070_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L386_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L486_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PF-71_Series-epson-escpr-en.ppd.gz	epson-20210703-Epson-L1210_Series-epson-escpr-en.ppd.gz	epson-20210703-Epson-L3210_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PF-81_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L605_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-4550_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L655_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-M4011_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-M4015_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-M4095_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-M4521_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-M4525_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-M4595_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EW-M660FT_Series-epson-escpr-en.ppd.gz	epson-20170125-Epson-L380_Series-epson-escpr-en-1.6.10.ppd.gz	epson-20200615-1_6_41-Epson-ET-2600_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-2610_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-2650_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L385_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L405_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L485_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-400_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-2500_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-2550_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-4500_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L375_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L395_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L396_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L475_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L495_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L575_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ET-1110_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L1110_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L3100_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L3110_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M160T_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S160T_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-805A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-905A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-905F_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-775A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-750_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-A890-epson-escpr-en.ppd.gz	epson-20210521-1_6_41-Epson-PX-S5040-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M5041F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-7110_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-7620_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-A950-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-977A3_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M5040F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-205_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-605F_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-675F_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-7610_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-3010_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-3520_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-3530_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-3540_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-3640_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-850_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-976A3_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-8010_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-8090_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-8510_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-8590_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-R8590_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M7050FP-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M7050FX-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M7050_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S7050PS-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S7050X-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S7050_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M741F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-3620_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-807A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-907F_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-A970-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-T990-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-306_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-806A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M740F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S740-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-A820-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-A840-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-A840S-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-D870-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-804A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-904A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-904F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-906F_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-720_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-760_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-820_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-860_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-A920-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-A940-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-T960-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-55_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-620_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-4630_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-4640_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-5110_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-5190_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-5620_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-5690_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-R4640_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-R5190_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-R5690_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-950_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-777A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-705A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M840F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M840FX-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S840-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S840X-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-610_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-710_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-810_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-702A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-M5190_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-M5690_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-703A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-704A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-774A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-802A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-803A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-902A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-903A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-903F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M350F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S350-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-435A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-505F_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-535F_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-706A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-776A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-520_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-D800-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-1600F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-1700F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-600_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-700_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-800_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-2510_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-2520_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-2530_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-2540_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-205_207_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-302_303_305_306_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-402_403_405_406_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-046A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-436A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-707A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-510_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-901A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-901F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-212_213_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-215_217_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-310_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-312_313_315_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-410_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-412_413_415_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M650A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M650F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L810_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L850_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-2630_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-225_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-322_323_325_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-422_423_425_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-047A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-437A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_RX640-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_RX650-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-G5300-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S05_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-5600-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-5V-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-7V-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-211_214_216_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_R1900-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_R2880-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_R2000-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_R3000-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME-400_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-200_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-300_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-400_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PF-70_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-100_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Artisan_730-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Artisan_837-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_TX730-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-801A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-503A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-504A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-601F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-602F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-603F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-673F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-EP-4004-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_PX730-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_PX830-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Artisan_1430-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_1430-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_1500-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_R350-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_R340-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L364_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L565_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-G4500-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_1400-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_1410-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Artisan_710-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Artisan_800-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Artisan_810-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_RX580-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_RX595-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Artisan_630-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Artisan_720-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Artisan_830-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-L455_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_RX680-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-502A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_R380-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME_OFFICE_650FN-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-501A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-A740-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-FA700-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_BX310FN-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_PX660-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_PX700W-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_PX720WD-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_PX810FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_PX820FWD-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_TX700W-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_TX710W-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_TX720WD-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_TX810FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-TX720_Artisan720-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-TX820_Artisan830-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_PX650-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_R360-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_R390-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_RX560-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_RX585-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_RX590-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_RX610-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_TX650-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-405A_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-7510_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-7511_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-7515_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-7520_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-7521_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-7525_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_RX685-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_RX690-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-G850-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_R260-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_R265-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_R270-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_PX800FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_TX800FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WorkForce_600-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WorkForce_610-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-350-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-360_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-600-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-800-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-810-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-820-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-830_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_PM_245-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_NX530-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_NX620-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WorkForce_435-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WorkForce_545-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WorkForce_630-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WorkForce_645-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WorkForce_840-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WorkForce_845-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME-301_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME-303_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-202_203_206_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-XP-201_204_208_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-370_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-840_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-330-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-330S-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-340-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-520-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-530-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-720-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_NX510-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-404A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-434A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-500-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-700-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WorkForce_320-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WorkForce_520-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME_OFFICE_700FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME_OFFICE_900WD-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME_OFFICE_940FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME_OFFICE_960FWD-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_NX635-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_BX305_Plus-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_BX525WD-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_BX535WD-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_BX600FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_BX610FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_BX620FWD-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_BX630FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_BX635FWD-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_BX925-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_BX935-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_TX600FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_TX610FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_TX620FWD-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX510W-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX525WD-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX535WD-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX600FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX610FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX620FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_TX550W-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_TX560WD-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WorkForce_620-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-B700-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-B750F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-850_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX8300-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX9300F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_DX8400-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_DX9400F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_PX710W-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX400-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX410-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_TX400-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_TX410-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX8400-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX9400Fax-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_NX300-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_NX400-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_NX410-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WorkForce_310-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WorkForce_500-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4020_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4022_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4090_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4092_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4530_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4532_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4540_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4590_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4592_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Artisan_700-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-A720-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX5900-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_R240-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_R250-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-300-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX4900-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME_OFFICE_620F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME_Office_600F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_BX300F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_BX305-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_BX320FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_TX300F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_TX320F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_TX510FN-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_TX515FN-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Office_TX525FW-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-TX320_WorkForce320-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4011_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4015_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4025_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4091_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4095_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4511_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4515_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4521_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4525_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4531_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4535_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4545_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4595_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4010_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4023_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4520_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WP-4533_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME_OFFICE_510-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME_OFFICE_520-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME_OFFICE_560W-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX210-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX218-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX420W-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_TX210-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_TX220-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_TX420W-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-TX220_NX220-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-TX420_NX420-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME_OFFICE_570-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_NX330-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_NX430-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_DX4200-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_DX4800-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_DX5000-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_RX520-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_Photo_RX530-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX6000-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX7700-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX7800-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-A750-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PM-D600-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-A650-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX4200-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX4800-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX5000-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-150-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-E-200-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME_OFFICE_530-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-NX430_TX435-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_NX210-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_NX420-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX230-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX430-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX440-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_TX235-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_TX430-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_PM_240-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_PM_250-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_PM_280-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_PM_200-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_PM_210-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-A640-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX7300-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX7400-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_DX7400-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_NX200-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX200-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_TX200-epson-escpr-en.ppd.gz	epson-20200615-EPSON_EW-052A_Series.ppd.gz	epson-20200615-EPSON_L110_Series.ppd.gz	epson-20200615-EPSON_L120_Series.ppd.gz	epson-20200615-EPSON_L130_Series.ppd.gz	epson-20200615-EPSON_L210_Series.ppd.gz	epson-20200615-EPSON_L220_Series.ppd.gz	epson-20200615-EPSON_L310_Series.ppd.gz	epson-20200615-EPSON_L350_Series.ppd.gz	epson-20200615-EPSON_L355_Series.ppd.gz	epson-20200615-EPSON_L360_Series.ppd.gz	epson-20200615-EPSON_L555_Series.ppd.gz	epson-20200615-EPSON_M1120_Series.ppd.gz	epson-20200615-EPSON_PX-S170T_Series.ppd.gz	epson-20200615-EPSON_USB1.1_MFP_Full-Speed.ppd.gz	epson-20200615-EPSON_USB2.0_MFP_Hi-Speed.ppd.gz	epson-20200615-EPSON_USB2.0_Printer_Hi-speed.ppd.gz	epson-20200615-EPSON_USB_MFP.ppd.gz	epson-20200615-EPSON_USB_Printer.ppd.gz	epson-20200615-Epson-generic-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_PM_270-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-ME_200-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-A620-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX2800-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX2900-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX3700-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX3800-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_CX3900-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_DX3800-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_DX4000-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_PM_300-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_PM_310-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-M200_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-M205_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_500-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_Deluxe-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_Express-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_PM_215-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_PM_225-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_PM_235-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_PM_260-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PictureMate_PM_290-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_NX230-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_TX230-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-WF-M1560_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-K200-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-K300-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-NX230_TX230-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-Stylus_SX235-epson-escpr-en.ppd.gz
+foomatic-20191029-Gestetner-Pro_C5200S_PDF.ppd.gz	foomatic-20191029-Gestetner-Pro_C5210S_PDF.ppd.gz	foomatic-20191029-Lanier-Pro_C5200S_PDF.ppd.gz	foomatic-20191029-Lanier-Pro_C5210S_PDF.ppd.gz	foomatic-20191029-NRG-Pro_C5200S_PDF.ppd.gz	foomatic-20191029-NRG-Pro_C5210S_PDF.ppd.gz	foomatic-20191029-Savin-Pro_C5200S_PDF.ppd.gz	foomatic-20191029-Savin-Pro_C5210S_PDF.ppd.gz	ricoh-20191121-Infotec-Pro_C5200S_PDF.ppd.gz	ricoh-20191121-Infotec-Pro_C5210S_PDF.ppd.gz	ricoh-20191121-Ricoh-Pro_C5200S_PDF.ppd.gz	ricoh-20191121-Ricoh-Pro_C5210S_PDF.ppd.gz	ricoh-20191121-Infotec-Pro_C7200S_Light_PDF.ppd.gz	ricoh-20191121-Infotec-Pro_C7200S_PDF.ppd.gz	ricoh-20191121-Infotec-Pro_C7210S_PDF.ppd.gz	ricoh-20191121-Ricoh-Pro_C7200S_Light_PDF.ppd.gz	ricoh-20191121-Ricoh-Pro_C7200S_PDF.ppd.gz	ricoh-20191121-Ricoh-Pro_C7210S_PDF.ppd.gz	ricoh-20191121-Infotec-Pro_C7200_PDF.ppd.gz	ricoh-20191121-Infotec-Pro_C7210_PDF.ppd.gz	ricoh-20191121-Ricoh-Pro_C7200_PDF.ppd.gz	ricoh-20191121-Ricoh-Pro_C7210_PDF.ppd.gz	ricoh-20200527-Infotec-Pro_C5300S_PDF.ppd.gz	ricoh-20200527-Infotec-Pro_C5310S_PDF.ppd.gz	ricoh-20200527-Lanier-Pro_C5300S_PDF.ppd.gz	ricoh-20200527-Lanier-Pro_C5310S_PDF.ppd.gz	ricoh-20200527-NRG-Pro_C5300S_PDF.ppd.gz	ricoh-20200527-NRG-Pro_C5310S_PDF.ppd.gz	ricoh-20200527-Ricoh-Pro_C5300S_PDF.ppd.gz	ricoh-20200527-Ricoh-Pro_C5310S_PDF.ppd.gz	ricoh-20200527-Savin-Pro_C5300S_PDF.ppd.gz	ricoh-20200527-Savin-Pro_C5310S_PDF.ppd.gz	ricoh-20200821-Infotec-Pro_C5300SL_PDF.ppd.gz	ricoh-20200821-NRG-Pro_C5300SL_PDF.ppd.gz	ricoh-20200821-Ricoh-Pro_C5300SL_PDF.ppd.gz	foomatic-20200219-Gestetner-MP_C6502-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C8002-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C6502-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C8002-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C6502-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C8002-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C6502-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C8002-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C6502-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C8002-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C6502-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C8002-PDF-Savin.ppd.gz	foomatic-20200219-Infotec-Pro_C7100S-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-Pro_C7110S-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-Pro_C7100S-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-Pro_C7110S-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-Pro_C7100S-PDF-NRG.ppd.gz	foomatic-20200219-NRG-Pro_C7110S-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Pro_C7100S-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Pro_C7110S-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-Pro_C7100S-PDF-Savin.ppd.gz	foomatic-20200219-Savin-Pro_C7110S-PDF-Savin.ppd.gz	foomatic-20200219-Infotec-Pro_C7100-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-Pro_C7110-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-Pro_C7100-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-Pro_C7110-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-Pro_C7100-PDF-NRG.ppd.gz	foomatic-20200219-NRG-Pro_C7110-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Pro_C7100-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Pro_C7110-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-Pro_C7100-PDF-Savin.ppd.gz	foomatic-20200219-Savin-Pro_C7110-PDF-Savin.ppd.gz	ricoh-20191121-Infotec-Pro_8200S_PDF.ppd.gz	ricoh-20191121-Infotec-Pro_8210S_PDF.ppd.gz	ricoh-20191121-Infotec-Pro_8220S_PDF.ppd.gz	ricoh-20191121-Ricoh-Pro_8200S_PDF.ppd.gz	ricoh-20191121-Ricoh-Pro_8210S_PDF.ppd.gz	ricoh-20191121-Ricoh-Pro_8220S_PDF.ppd.gz	foomatic-20191029-Lanier-Pro_8200S_PDF.ppd.gz	foomatic-20191029-Lanier-Pro_8210S_PDF.ppd.gz	foomatic-20191029-Lanier-Pro_8220S_PDF.ppd.gz	foomatic-20191029-NRG-Pro_8200S_PDF.ppd.gz	foomatic-20191029-NRG-Pro_8210S_PDF.ppd.gz	foomatic-20191029-NRG-Pro_8220S_PDF.ppd.gz	foomatic-20191029-Savin-Pro_8200S_PDF.ppd.gz	foomatic-20191029-Savin-Pro_8210S_PDF.ppd.gz	foomatic-20191029-Savin-Pro_8220S_PDF.ppd.gz	foomatic-20191029-Lanier-Pro_8210_PDF.ppd.gz	foomatic-20191029-Lanier-Pro_8220_PDF.ppd.gz	foomatic-20191029-NRG-Pro_8210_PDF.ppd.gz	foomatic-20191029-NRG-Pro_8220_PDF.ppd.gz	foomatic-20191029-Savin-Pro_8210_PDF.ppd.gz	foomatic-20191029-Savin-Pro_8220_PDF.ppd.gz	foomatic-20200219-Infotec-Pro_8110-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-Pro_8120-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-Pro_8110-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-Pro_8120-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-Pro_8110-PDF-NRG.ppd.gz	foomatic-20200219-NRG-Pro_8120-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Pro_8110-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Pro_8120-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-Pro_8110-PDF-Savin.ppd.gz	foomatic-20200219-Savin-Pro_8120-PDF-Savin.ppd.gz	ricoh-20191121-Infotec-Pro_8210_PDF.ppd.gz	ricoh-20191121-Infotec-Pro_8220_PDF.ppd.gz	ricoh-20191121-Ricoh-Pro_8210_PDF.ppd.gz	ricoh-20191121-Ricoh-Pro_8220_PDF.ppd.gz	foomatic-20200219-Gestetner-MP_C6503-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C8003-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C6503-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C8003-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C6503-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C8003-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C6503-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C8003-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C6503-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C8003-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C6503-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C8003-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSm2540-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm2550-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm2560-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_4054-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_5054-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_6054-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_4054-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_5054-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_6054-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_4054-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_5054-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_6054-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_4054-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_5054-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_6054-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_4054-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_5054-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_6054-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_4054-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_5054-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_6054-PDF-Savin.ppd.gz	foomatic-20191029-Lanier-Pro_C7200S_Light_PDF.ppd.gz	foomatic-20191029-Lanier-Pro_C7200S_PDF.ppd.gz	foomatic-20191029-Lanier-Pro_C7210S_PDF.ppd.gz	foomatic-20191029-NRG-Pro_C7200S_Light_PDF.ppd.gz	foomatic-20191029-NRG-Pro_C7200S_PDF.ppd.gz	foomatic-20191029-NRG-Pro_C7210S_PDF.ppd.gz	foomatic-20191029-Savin-Pro_C7200S_Light_PDF.ppd.gz	foomatic-20191029-Savin-Pro_C7200S_PDF.ppd.gz	foomatic-20191029-Savin-Pro_C7210S_PDF.ppd.gz	foomatic-20191029-Lanier-Pro_C7200_PDF.ppd.gz	foomatic-20191029-Lanier-Pro_C7210_PDF.ppd.gz	foomatic-20191029-NRG-Pro_C7200_PDF.ppd.gz	foomatic-20191029-NRG-Pro_C7210_PDF.ppd.gz	foomatic-20191029-Savin-Pro_C7200_PDF.ppd.gz	foomatic-20191029-Savin-Pro_C7210_PDF.ppd.gz	foomatic-20200219-Infotec-Pro_8100S-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-Pro_8110S-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-Pro_8120S-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-Pro_8100S-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-Pro_8110S-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-Pro_8120S-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-Pro_8100S-PDF-NRG.ppd.gz	foomatic-20200219-NRG-Pro_8110S-PDF-NRG.ppd.gz	foomatic-20200219-NRG-Pro_8120S-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Pro_8100S-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Pro_8110S-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Pro_8120S-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-Pro_8100S-PDF-Savin.ppd.gz	foomatic-20200219-Savin-Pro_8110S-PDF-Savin.ppd.gz	foomatic-20200219-Savin-Pro_8120S-PDF-Savin.ppd.gz	foomatic-20200219-Infotec-Pro_8310S-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-Pro_8320S-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-Pro_8310S-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-Pro_8320S-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-Pro_8310S-PDF-NRG.ppd.gz	foomatic-20200219-NRG-Pro_8320S-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Pro_8310S-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Pro_8320S-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-Pro_8310S-PDF-Savin.ppd.gz	foomatic-20200219-Savin-Pro_8320S-PDF-Savin.ppd.gz	foomatic-20200219-Infotec-Pro_8310-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-Pro_8320-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-Pro_8310-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-Pro_8320-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-Pro_8310-PDF-NRG.ppd.gz	foomatic-20200219-NRG-Pro_8320-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Pro_8310-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Pro_8320-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-Pro_8310-PDF-Savin.ppd.gz	foomatic-20200219-Savin-Pro_8320-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_6002-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_7502-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_9002-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_6002-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_7502-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_9002-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_6002-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_7502-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_9002-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_6002-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_7502-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_9002-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_6002-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_7502-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_9002-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_6002-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_7502-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_9002-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc1245ex-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C4504ex-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C4504ex-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C5504ex-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C4504ex-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C5504ex-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C4504ex-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C5504ex-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C4504ex-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C5504ex-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C4504ex-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C5504ex-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc1245-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C4504-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C4504-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C5504-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C4504-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C4504-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C5504-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C4504-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C5504-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C4504-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc1260ex-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C6004ex-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C6004ex-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C6004ex-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C6004ex-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C6004ex-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C6004ex-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc1260-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C6004-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C6004-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C6004-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C6004-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C6004-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C6004-PDF-Savin.ppd.gz	foomatic-20200219-Lanier-SP_C840DN-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_C842DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C840DN-PDF-NRG.ppd.gz	foomatic-20200219-NRG-SP_C842DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-SP_C840DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_C842DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_C840DN-PDF-Savin.ppd.gz	foomatic-20200219-Savin-SP_C842DN-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc1045-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C4503-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C5503-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C4503-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C5503-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C4503-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C5503-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C4503-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C5503-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C4503-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C5503-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C4503-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C5503-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_6503-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_7503-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_9003-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_6503-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_7503-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_9003-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_6503-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_7503-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_9003-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_6503-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_7503-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_9003-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_6503-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_7503-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_9003-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_6503-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_7503-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_9003-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc1060-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C6003-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C6003-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C6003-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_C830DN-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_C831DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C6003-PDF-NRG.ppd.gz	foomatic-20200219-NRG-SP_C830DN-PDF-NRG.ppd.gz	foomatic-20200219-NRG-SP_C831DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C830DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C831DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C6003-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C6003-PDF-Savin.ppd.gz	foomatic-20200219-Savin-SP_C830DN-PDF-Savin.ppd.gz	foomatic-20200219-Savin-SP_C831DN-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-C8140ND-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-C8150ND-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP540C_SPC820DN-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LP550C_SPC821DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C820DN-PDF-NRG.ppd.gz	foomatic-20200219-NRG-SP_C821DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C820DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C821DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-CLP340D-PDF-Savin.ppd.gz	foomatic-20200219-Savin-CLP350D-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSm_2640-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm_2650-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_4055-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_5055-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_4055-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_5055-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_4055-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_5055-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_4055-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_5055-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_4055-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_5055-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_4055-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_5055-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C3001-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C3501-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C3001-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C3501-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C3001_LD630C-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C3501_LD635C-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C3001-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C3501-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C3001-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C3501-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-C9130-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C9135-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSm_2660-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_6055-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_6055-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_6055-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_6055-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_6055-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_6055-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C3002-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C3502-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C3002-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C3502-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C3002-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C3502-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C3002-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C3502-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C3002-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C3502-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C3002-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C3502-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C4501-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C4501A-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C5501-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C5501A-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C4501-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C4501A-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C5501-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C5501A-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MPC4501A_LD645CA-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MPC5501A_LD655CA-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C4501_LD645C-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C5501_LD655C-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C4501-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C4501A-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C5501-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C5501A-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C4501-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C4501A-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C5501-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C5501A-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-C9145-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C9145A-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C9155-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C9155A-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C4502-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C4502A-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C5502-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C5502A-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C4502-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C4502A-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C5502-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C5502A-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C4502-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C4502A-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C5502-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C5502A-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C4502-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C4502A-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C5502-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C5502A-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C4502-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C4502A-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C5502-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C5502A-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C4502-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C4502A-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C5502-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C5502A-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc1230ex-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C3004ex-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C3504ex-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C3004ex-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C3504ex-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C3004ex-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C3504ex-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C3004ex-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C3504ex-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C3004ex-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C3504ex-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C3004ex-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C3504ex-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc1230-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C3004-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C3504-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C3004-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C3504-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C3004-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C3504-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C3004-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C3504-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C3004-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C3504-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C3004-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C3504-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C2800-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C3300-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C2800-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C3300-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C2800_LD528C-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C3300_LD533C-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C2800-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C3300-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C2800-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C3300-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-C2828-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C3333-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C4000-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C5000-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C4000-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C5000-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C4000_LD540C-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C5000_LD550C-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C4000-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C5000-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C4000-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C5000-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-C4040-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C5050-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSm2525-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm2530-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm2535-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_2554-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_3054-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_3554-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_2554-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_3054-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_3554-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_2554-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_3054-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_3554-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_2554-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_3054-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_3554-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_2554-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_2554J-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_3054-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_3054J-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_3554-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_3554J-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_2554-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_3054-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_3554-PDF-Savin.ppd.gz	foomatic-20200219-Infotec-Pro_C5100S-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-Pro_C5110S-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-Pro_C5100S-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-Pro_C5110S-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-Pro_C5100S-PDF-NRG.ppd.gz	foomatic-20200219-NRG-Pro_C5110S-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Pro_C5100S-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Pro_C5110S-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-Pro_C5100S-PDF-Savin.ppd.gz	foomatic-20200219-Savin-Pro_C5110S-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C6501-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C7501-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C6501-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C7501-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C6501_LD365C-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C7501_LD375C-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C6501-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C7501-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C6501-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C7501-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-C9065-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C9075-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-Pro_1107EX-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-Pro_1357EX-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-Pro_907EX-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-Pro_1107EX-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-Pro_1357EX-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-Pro_907EX-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-Pro_1107EX-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-Pro_1357EX-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-Pro_907EX-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-Pro_1107EX-PDF-NRG.ppd.gz	foomatic-20200219-NRG-Pro_1357EX-PDF-NRG.ppd.gz	foomatic-20200219-NRG-Pro_907EX-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Pro_1107EX-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Pro_1357EX-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Pro_907EX-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-Pro_1107EX-PDF-Savin.ppd.gz	foomatic-20200219-Savin-Pro_1357EX-PDF-Savin.ppd.gz	foomatic-20200219-Savin-Pro_907EX-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc1030-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C3003-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C3503-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C3003-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C3503-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C3003-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C3503-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C3003-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C3503-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C3003-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C3503-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C3003-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C3503-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_4002-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_5002-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_4002-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_5002-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_4002-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_5002-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_4002-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_5002-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_4002-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_5002-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_4002-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_5002-PDF-Savin.ppd.gz	ricoh-20200527-Lanier-IM_C6500_PDF.ppd.gz	ricoh-20200527-Lanier-IM_C8000_PDF.ppd.gz	ricoh-20200527-NRG-IM_C6500_PDF.ppd.gz	ricoh-20200527-NRG-IM_C8000_PDF.ppd.gz	ricoh-20200527-Ricoh-IM_C6500_PDF.ppd.gz	ricoh-20200527-Ricoh-IM_C8000_PDF.ppd.gz	ricoh-20200527-Savin-IM_C6500_PDF.ppd.gz	ricoh-20200527-Savin-IM_C8000_PDF.ppd.gz	foomatic-20200219-Gestetner-MP_6001-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_7001-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_8001-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_9001-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_6001-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_7001-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_8001-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_9001-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_6001_LD360-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_7001_LD370-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_8001_LD380-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_9001_LD390-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_6001-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_7001-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_8001-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_9001-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_6001-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_7001-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_8001-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_9001-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-9060-PDF-Savin.ppd.gz	foomatic-20200219-Savin-9070-PDF-Savin.ppd.gz	foomatic-20200219-Savin-9080-PDF-Savin.ppd.gz	foomatic-20200219-Savin-9090-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP2352_DSm923-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP2852_DSm928-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP3352_DSm933-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_2352-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_2852-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_3352-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_2352-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_2852-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_3352-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_2352-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_2852-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_3352-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_2352-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_2852-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_3352-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_2352-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_2852-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_3352-PDF-Savin.ppd.gz	ricoh-20210222-Lanier-IM_7000_PDF.ppd.gz	ricoh-20210222-Lanier-IM_8000_PDF.ppd.gz	ricoh-20210222-Lanier-IM_9000_PDF.ppd.gz	ricoh-20210222-NRG-IM_7000_PDF.ppd.gz	ricoh-20210222-NRG-IM_8000_PDF.ppd.gz	ricoh-20210222-NRG-IM_9000_PDF.ppd.gz	ricoh-20210222-Ricoh-IM_7000_PDF.ppd.gz	ricoh-20210222-Ricoh-IM_8000_PDF.ppd.gz	ricoh-20210222-Ricoh-IM_9000_PDF.ppd.gz	ricoh-20210222-Savin-IM_7000_PDF.ppd.gz	ricoh-20210222-Savin-IM_8000_PDF.ppd.gz	ricoh-20210222-Savin-IM_9000_PDF.ppd.gz	foomatic-20200219-Gestetner-DSc1220ex-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSc1225ex-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C2004ex-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C2504ex-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C2004ex-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C2504ex-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C2004ex-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C2504ex-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C2004ex-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C2504ex-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C2004ex-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C2094exJ-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C2504ex-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C2594exJ-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C2004ex-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C2504ex-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSm_2625-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm_2630-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm_2635-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_2555-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_3055-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_3555-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_2555-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_3055-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_3555-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_2555-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_3055-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_3555-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_2555-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_3055-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_3555-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_2555-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_2595J-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_3055-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_3095J-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_3555-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_3595J-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_2555-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_3055-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_3555-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc1220-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSc1225-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C2004-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C2504-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C2004-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C2504-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C2004-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C2504-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C2004-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C2504-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C2004-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C2094J-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C2504-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C2594J-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C2004-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C2504-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C6000-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C7500-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C6000-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C7500-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C6000_LD260c-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C7500_LD275c-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C6000-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C7500-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C6000-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C7500-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-C6055-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C7570-PDF-Savin.ppd.gz	foomatic-20190909-Ricoh-IM_C4500_PDF.ppd.gz	foomatic-20200219-Gestetner-GS3045-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-IM_C4500-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-IM_C4500-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-IM_C4500-PDF-NRG.ppd.gz	foomatic-20200219-NRG-IM_C5500-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-IM_C5500-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-IM_C4500-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-GS3160-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-IM_C6000-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-IM_C6000-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-IM_C6000-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-IM_C6000-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-IM_C6000-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_2550-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_2550B-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_2851-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_3350-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_3350B-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_3351-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_2550-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_2550B-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_2851-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_3350-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_3350B-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_3351-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_2550B_LD425B-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_2550_LD425-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_2851_LD528-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_3350B_LD433B-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_3350_LD433-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_3351_LD533-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_2550-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_2550B-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_2851-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_3350-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_3350B-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_3351-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_2550-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_2550B-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_2851-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_3350-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_3350B-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_3351-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-9025-PDF-Savin.ppd.gz	foomatic-20200219-Savin-9025b-PDF-Savin.ppd.gz	foomatic-20200219-Savin-9033-PDF-Savin.ppd.gz	foomatic-20200219-Savin-9033b-PDF-Savin.ppd.gz	foomatic-20200219-Savin-9228-PDF-Savin.ppd.gz	foomatic-20200219-Savin-9233-PDF-Savin.ppd.gz	ricoh-20210601-Gestetner-GS3040m_PDF.ppd.gz	ricoh-20210601-Gestetner-GS3050m_PDF.ppd.gz	ricoh-20210601-Gestetner-IM_4000_PDF.ppd.gz	ricoh-20210601-Gestetner-IM_5000_PDF.ppd.gz	ricoh-20210601-Lanier-IM_4000_PDF.ppd.gz	ricoh-20210601-Lanier-IM_5000_PDF.ppd.gz	ricoh-20210601-NRG-IM_4000_PDF.ppd.gz	ricoh-20210601-NRG-IM_5000_PDF.ppd.gz	ricoh-20210601-Ricoh-IM_4000_PDF.ppd.gz	ricoh-20210601-Ricoh-IM_5000_PDF.ppd.gz	ricoh-20210601-Savin-IM_4000_PDF.ppd.gz	ricoh-20210601-Savin-IM_5000_PDF.ppd.gz	foomatic-20200219-Gestetner-GS3030-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-IM_C3000-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-IM_C3500-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-IM_C3000-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-IM_C3500-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-IM_C3000-PDF-NRG.ppd.gz	foomatic-20200219-NRG-IM_C3500-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-IM_C3000-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-IM_C3500-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-IM_C3000-PDF-Savin.ppd.gz	foomatic-20200219-Savin-IM_C3500-PDF-Savin.ppd.gz	ricoh-20200821-Ricoh-IM_C3509J_PDF.ppd.gz	foomatic-20200219-Lanier-SP_8400DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_8400DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-SP_8400DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_8400DN-PDF-Savin.ppd.gz	ricoh-20210601-Gestetner-GS3060m_PDF.ppd.gz	ricoh-20210601-Gestetner-IM_6000_PDF.ppd.gz	ricoh-20210601-Lanier-IM_6000_PDF.ppd.gz	ricoh-20210601-NRG-IM_6000_PDF.ppd.gz	ricoh-20210601-Ricoh-IM_6000_PDF.ppd.gz	ricoh-20210601-Savin-IM_6000_PDF.ppd.gz	foomatic-20200219-Gestetner-DSm1525-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm1530-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm1533-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_2553-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_3053-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_3353-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_2553-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_3053-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_3353-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_2553-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_3053-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_3353-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_2553-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_3053-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_3353-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_2553-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_3053-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_3353-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_2553-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_3053-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_3353-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-SP_8200DN-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP150dn_SP8200DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_8200DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_8200DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MLP150DN-PDF-Savin.ppd.gz	ricoh-20210601-Gestetner-GS3025m_PDF.ppd.gz	ricoh-20210601-Gestetner-GS3030m_PDF.ppd.gz	ricoh-20210601-Gestetner-GS3035m_PDF.ppd.gz	ricoh-20210601-Gestetner-IM_2500_PDF.ppd.gz	ricoh-20210601-Gestetner-IM_3000_PDF.ppd.gz	ricoh-20210601-Gestetner-IM_3500_PDF.ppd.gz	ricoh-20210601-Lanier-IM_2500_PDF.ppd.gz	ricoh-20210601-Lanier-IM_3000_PDF.ppd.gz	ricoh-20210601-Lanier-IM_3500_PDF.ppd.gz	ricoh-20210601-NRG-IM_2500_PDF.ppd.gz	ricoh-20210601-NRG-IM_3000_PDF.ppd.gz	ricoh-20210601-NRG-IM_3500_PDF.ppd.gz	ricoh-20210601-Ricoh-IM_2500_PDF.ppd.gz	ricoh-20210601-Ricoh-IM_2509J_PDF.ppd.gz	ricoh-20210601-Ricoh-IM_3000_PDF.ppd.gz	ricoh-20210601-Ricoh-IM_3009J_PDF.ppd.gz	ricoh-20210601-Ricoh-IM_3500_PDF.ppd.gz	ricoh-20210601-Ricoh-IM_3509J_PDF.ppd.gz	ricoh-20210601-Savin-IM_2500_PDF.ppd.gz	ricoh-20210601-Savin-IM_3000_PDF.ppd.gz	ricoh-20210601-Savin-IM_3500_PDF.ppd.gz	foomatic-20200219-Infotec-Pro_8300S-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-Pro_8300S-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-Pro_8300S-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Pro_8300S-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-Pro_8300S-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-GS3020-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-GS3025-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-IM_C2000-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-IM_C2500-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-IM_C2000-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-IM_C2500-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-IM_C2000-PDF-NRG.ppd.gz	foomatic-20200219-NRG-IM_C2500-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-IM_C2000-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-IM_C2500-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-IM_C2000-PDF-Savin.ppd.gz	foomatic-20200219-Savin-IM_C2500-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc1020-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSc1025-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C2003-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C2503-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C2003-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C2003Z-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C2503-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C2503Z-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C2003-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C2503-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C2003-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C2003Z-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C2503-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C2503Z-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C2003-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C2003J-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C2003Z-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C2503-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C2503J-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C2503Z-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C2003-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C2503-PDF-Savin.ppd.gz	ricoh-20200821-Ricoh-IM_C2509J_PDF.ppd.gz	foomatic-20200219-Lanier-SP_8300DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_8300DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_8300DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_8300DN-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_4000-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_4000B-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_4001-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_5000-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_5000B-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_5001-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_4000-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_4000B-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_4001-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_5000-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_5000B-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_5001-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_4000B_LD040B-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_4000_LD040-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_4001_LD140-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_5000B_LD050B-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_5000_LD050-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_5001_LD150-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_4000-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_4000B-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_4001-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_5000-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_5000B-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_5001-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_4000-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_4000B-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_4001-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_5000-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_5000B-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_5001-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-9040-PDF-Savin.ppd.gz	foomatic-20200219-Savin-9040b-PDF-Savin.ppd.gz	foomatic-20200219-Savin-9050-PDF-Savin.ppd.gz	foomatic-20200219-Savin-9050b-PDF-Savin.ppd.gz	foomatic-20200219-Savin-9240-PDF-Savin.ppd.gz	foomatic-20200219-Savin-9250-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-CS555-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSc460-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-ISC_4560-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-ISC_5560-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-LC155-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LD160c-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-CS555-PDF-NRG.ppd.gz	foomatic-20200219-NRG-DSc460-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_3260C-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_Color5560-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-C6045-PDF-Savin.ppd.gz	foomatic-20200219-Savin-SDC555-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C2051-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C2551-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C2051-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C2551-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-LD_620C-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LD_625C-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C2051-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C2551-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C2051-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C2551-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-C9120-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C9125-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C2050-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C2550-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C2050-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C2550-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C2050_LD520C-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C2550_LD525C-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C2050-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C2550-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C2050-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C2550-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-C9020-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C9025-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MPC2500_DSc525-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MPC3000_DSc530-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-ISC_2525-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-ISC_3030-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C2500_LD425c-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C3000_LD430c-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C2500-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C3000-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C2500-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C3000-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-C2525-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C3030-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-C7640nD-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-IPC_4040DN-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-LP440c_SP_C811DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C811DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C811DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-CLP240D-PDF-Savin.ppd.gz	foomatic-20200219-Lanier-MP_C501-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C501-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C501-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C501-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP5500_DSm755-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP6500_DSm765-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP7500_DSm775-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_6000-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_7000-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_8000-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS_2255-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-IS_2265-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-IS_2275-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_6000-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_7000-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_8000-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_5500_LD255-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_6000_LD260-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_6500_LD265-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_7000_LD270-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_7500_LD275-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_8000_LD280-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_5500-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_6000-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_6500-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_7000-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_7500-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_8000-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_5500-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_6000-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_6500-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_7000-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_7500-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_8000-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-8055-PDF-Savin.ppd.gz	foomatic-20200219-Savin-8060-PDF-Savin.ppd.gz	foomatic-20200219-Savin-8065-PDF-Savin.ppd.gz	foomatic-20200219-Savin-8070-PDF-Savin.ppd.gz	foomatic-20200219-Savin-8075-PDF-Savin.ppd.gz	foomatic-20200219-Savin-8080-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MPC3500_DSc535-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MPC4500_DSc545-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-ISC_3535-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-ISC_4045-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C3500_LD435c-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C4500_LD445c-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C3500-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C4500-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C3500-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C4500-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-C3535-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C4540-PDF-Savin.ppd.gz	ricoh-20190916-Ricoh-MP_C306Z_JPN_PDF.ppd.gz	foomatic-20200219-Gestetner-Pro_1106EX-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-Pro_1356EX-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-Pro_906EX-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-Pro_1106EX-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-Pro_1356EX-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-Pro_906EX-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-Pro_1106EX-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-Pro_1356EX-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-Pro_906EX-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-Pro_1106EX-PDF-NRG.ppd.gz	foomatic-20200219-NRG-Pro_1356EX-PDF-NRG.ppd.gz	foomatic-20200219-NRG-Pro_906EX-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Pro_1106EX-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Pro_1356EX-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Pro_906EX-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-Pro_1106EX-PDF-Savin.ppd.gz	foomatic-20200219-Savin-Pro_1356EX-PDF-Savin.ppd.gz	foomatic-20200219-Savin-Pro_906EX-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP3500_DSm735e-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP4500_DSm745e-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS_2435-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-IS_2445-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_3500_LD335-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_4500_LD345-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_3500-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_4500-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_3500-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_4500-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-8035e-PDF-Savin.ppd.gz	foomatic-20200219-Savin-8045e-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc424-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSc432-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-ISC_1024c-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-ISC_1032c-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-LD124c-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LD132c-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-DSc424-PDF-NRG.ppd.gz	foomatic-20200219-NRG-DSc432-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_3224C-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_3232C-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-C2410-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C3210e-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_2510_DSm725e-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_3010_DSm730e-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS_2425-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-IS_2430-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_2510_LD325-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_3010_LD330-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_2510-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_3010-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_2510-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_3010-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-8025e-PDF-Savin.ppd.gz	foomatic-20200219-Savin-8030e-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSm725-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm730-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS_2225-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-IS_2230-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-LD225-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LD230-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-DSm725-PDF-NRG.ppd.gz	foomatic-20200219-NRG-DSm730-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_3025-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_3030-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-8025-PDF-Savin.ppd.gz	foomatic-20200219-Savin-8030-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSm735_735G-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm745_745G-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS_2235-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-IS_2245-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-LD235-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LD245-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-DSm735-PDF-NRG.ppd.gz	foomatic-20200219-NRG-DSm745-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_3035-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_3045-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-8035_8035g-PDF-Savin.ppd.gz	foomatic-20200219-Savin-8045_8045g-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP1100_DSm7110-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP1350_DSm7135-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP9000_DSm790-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS_3090-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-IS_3110-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-IS_3135-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_1100_LD1100-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_1350_LD1135-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_9000_LD190-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_1100-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_1350-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_9000-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_1100-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_1350-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_9000-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-8090-PDF-Savin.ppd.gz	foomatic-20200219-Savin-8110-PDF-Savin.ppd.gz	foomatic-20200219-Savin-8135-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C307-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C407-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C307-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C407-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C307-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C407-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C307-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C407-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C307-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C407-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C307-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C407-PDF-Savin.ppd.gz	ricoh-20200221-Ricoh-SP_C342M_JPN_PDF.ppd.gz	foomatic-20200219-Gestetner-MP_C306Z-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C406Z-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C306Z-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C406Z-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C306Z-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C406Z-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C306Z-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C406Z-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C306Z-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C406Z-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C306Z-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_C406Z-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_305plus-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_305plus-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_305plus-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_305plus-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_305plus-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_305plus-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C305-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C305-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C305-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C305-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C305-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C305-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-C7521n-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP222cn_LP221c-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-C7521n-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_CL3500N-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-CLP22-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc1120-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C2011-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C401SR-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C2011-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C401SR-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C401SR-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C2011-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C401SR-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C2011-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_C401SR-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C401SR-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_2501-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_2501-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_2501-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_2501-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_2501-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_2501-PDF-Savin.ppd.gz	foomatic-20200219-Lanier-SP_C730DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C730DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C730DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_C730DN-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C300SR-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C400SR-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C300SR-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C400SR-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-LD_130CSR-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LD_140CSR-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C300SR-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C400SR-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C300SR-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C400SR-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C300SR-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C400SR-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-C_230SR-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C_240SR-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-P7575-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP175_LP175hdn-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-P7575-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_AP900-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MLP75n-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-P7675-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-SP_9100_LP275-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_9100DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_9100DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MLP175n-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-P7245-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP145n_SP_8100DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_8100DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_8100DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MLP145-PDF-Savin.ppd.gz	foomatic-20200219-Lanier-SP_C342DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C342DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-SP_C342DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_C342DN-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_301-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_301-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_301-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_301-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_301-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_301-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_2001-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_2001-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_2001-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_2001-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_2001-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_2001J-PDF-Ricoh.ppd.gz	foomatic-20200219-Lanier-SP_C352DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C352DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-SP_C352DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_C352DN-PDF-Savin.ppd.gz	foomatic-20200219-Lanier-SP_C340DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C340DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-SP_C340DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_C340DN-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C401-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C401-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_C401-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C401-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C401-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_C401-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP2500_DSm625-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS_2325-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP2500_LD125-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_2500-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_2500-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-7025-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_C300-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_C400-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_C300-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_C400-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-LD_130C-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LD_140C-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C300-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_C400-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C300-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_C400-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C300-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_C400-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-C_230-PDF-Savin.ppd.gz	foomatic-20200219-Savin-C_240-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MPC1500_GS106-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-ISC_615G-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MPC1500_LD215c-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_C1500sp-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_C1500_615C-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SGC_1506-PDF-Savin.ppd.gz	foomatic-20200219-Lanier-SP_6430DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_6430DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-SP_6430DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_6430DN-PDF-Savin.ppd.gz	foomatic-20190909-Ricoh-IM_430_PDF.ppd.gz	foomatic-20200219-Gestetner-IM_350-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-IM_430-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-IM_350-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-IM_430-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-IM_350-PDF-NRG.ppd.gz	foomatic-20200219-NRG-IM_430-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-IM_350-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-IM_350-PDF-Savin.ppd.gz	foomatic-20200219-Savin-IM_430-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-SP_C320DN-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-SP_C320DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C320DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C320DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_C320DN-PDF-Savin.ppd.gz	foomatic-20200219-Infotec-MP_201-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_501-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-MP_601-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_201_LD_220-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_501-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_601-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_201-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_501-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_601-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_201-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_501-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_601-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-920-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_501-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_601-PDF-Savin.ppd.gz	foomatic-20200219-Infotec-SP_5300-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-SP_5310-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-SP_5300-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_5310-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_5300-PDF-NRG.ppd.gz	foomatic-20200219-NRG-SP_5310-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-SP_5300-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_5310-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_5300-PDF-Savin.ppd.gz	foomatic-20200219-Savin-SP_5310-PDF-Savin.ppd.gz	lanier-20190916-Lanier-IM_600SR_PDF.ppd.gz	nrg-20190916-NRG-IM_600SR_PDF.ppd.gz	ricoh-20190916-Ricoh-IM_600SR_PDF.ppd.gz	savin-20190916-Savin-IM_600SR_PDF.ppd.gz	foomatic-20200219-Gestetner-MP_171-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_171-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_171_LD_117-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_171-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_171-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-917-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP2000_DSm721d-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS_2320-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_2000_LD320d-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_2000-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_2000-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-9021d-PDF-Savin.ppd.gz	ricoh-20200221-Lanier-IM_C400SR_PDF.ppd.gz	ricoh-20200221-NRG-IM_C400SR_PDF.ppd.gz	ricoh-20200221-Ricoh-IM_C400SR_PDF.ppd.gz	ricoh-20200221-Savin-IM_C400SR_PDF.ppd.gz	ricoh-20200821-Lanier-IM_C530F_PDF.ppd.gz	ricoh-20200821-NRG-IM_C530F_PDF.ppd.gz	ricoh-20200821-Ricoh-IM_C530F_PDF.ppd.gz	ricoh-20200821-Savin-IM_C530F_PDF.ppd.gz	foomatic-20200219-Lanier-SP_5210SR-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_5210SR-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_5210SR-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_5210SR-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_402SPF-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-P7535n-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-MP_402SPF-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-LP135n-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_402SPF-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_4310N-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_402SPF-PDF-NRG.ppd.gz	foomatic-20200219-NRG-P7535n-PDF-NRG.ppd.gz	foomatic-20200219-NRG-SP_4310N-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_AP610N-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_4310N-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_402SPF-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MLP35n-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_402SPF-PDF-Savin.ppd.gz	foomatic-20200219-Savin-SP_4310N-PDF-Savin.ppd.gz	ricoh-20200221-Gestetner-IM_C300_PDF.ppd.gz	ricoh-20200221-Gestetner-IM_C400_PDF.ppd.gz	ricoh-20200221-Lanier-IM_C300_PDF.ppd.gz	ricoh-20200221-Lanier-IM_C400_PDF.ppd.gz	ricoh-20200221-NRG-IM_C300_PDF.ppd.gz	ricoh-20200221-NRG-IM_C400_PDF.ppd.gz	ricoh-20200221-Ricoh-IM_C300_PDF.ppd.gz	ricoh-20200221-Ricoh-IM_C400_PDF.ppd.gz	ricoh-20200221-Savin-IM_C300_PDF.ppd.gz	ricoh-20200221-Savin-IM_C400_PDF.ppd.gz	foomatic-20200219-Gestetner-MP_161_DSm416-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS_2416-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_161_LD016-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_161-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_161-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-816-PDF-Savin.ppd.gz	foomatic-20200219-Lanier-SP_C435DN-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_C440DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C440DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-SP_C435DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_C440DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_C435DN-PDF-Savin.ppd.gz	foomatic-20200219-Savin-SP_C440DN-PDF-Savin.ppd.gz	lanier-20190916-Lanier-IM_550_PDF.ppd.gz	lanier-20190916-Lanier-IM_600_PDF.ppd.gz	nrg-20190916-NRG-IM_550_PDF.ppd.gz	nrg-20190916-NRG-IM_600_PDF.ppd.gz	ricoh-20190916-Ricoh-IM_550_PDF.ppd.gz	ricoh-20190916-Ricoh-IM_600_PDF.ppd.gz	savin-20190916-Savin-IM_550_PDF.ppd.gz	savin-20190916-Savin-IM_600_PDF.ppd.gz	lanier-20190916-Lanier-P_800_PDF.ppd.gz	lanier-20190916-Lanier-P_801_PDF.ppd.gz	nrg-20190916-NRG-P_800_PDF.ppd.gz	nrg-20190916-NRG-P_801_PDF.ppd.gz	ricoh-20190916-Ricoh-P_800_PDF.ppd.gz	ricoh-20190916-Ricoh-P_801_PDF.ppd.gz	savin-20190916-Savin-P_800_PDF.ppd.gz	savin-20190916-Savin-P_801_PDF.ppd.gz	foomatic-20200219-Lanier-SP_5200DN-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_5210DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_5200DN-PDF-NRG.ppd.gz	foomatic-20200219-NRG-SP_5210DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_5200DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_5210DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_5200DN-PDF-Savin.ppd.gz	foomatic-20200219-Savin-SP_5210DN-PDF-Savin.ppd.gz	foomatic-20200219-Ricoh-SP_450DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Gestetner-MP_401SPF-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-SP_C430DN-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-SP_C431DN-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP137CN-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LP142CN-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_401SPF-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_4510SF-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_C430DN-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_C431DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_4510SF-PDF-NRG.ppd.gz	foomatic-20200219-NRG-SP_C430DN-PDF-NRG.ppd.gz	foomatic-20200219-NRG-SP_C431DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C430DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C431DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_401SPF-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_4510SF-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-CLP37DN-PDF-Savin.ppd.gz	foomatic-20200219-Savin-CLP42DN-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_401SPF-PDF-Savin.ppd.gz	foomatic-20200219-Savin-SP_4510SF-PDF-Savin.ppd.gz	foomatic-20200219-Lanier-SP_4510DN-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_4520DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_4510DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-SP_4510DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_4520DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_4510DN-PDF-Savin.ppd.gz	foomatic-20200219-Savin-SP_4520DN-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-P7031n-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-P7031nL-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-P7035n-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP131n-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LP131nL-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LP136n-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_4100N-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_4100NL-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_4110N-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_4100N-PDF-NRG.ppd.gz	foomatic-20200219-NRG-SP_4100NL-PDF-NRG.ppd.gz	foomatic-20200219-NRG-SP_4110N-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_4100N-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_4100NL-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_4110N-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MLP31n-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MLP31nL-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MLP36n-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-P7527-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-P7527n-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP127n_LP128n-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LP128-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_5210SF-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-P7527-PDF-NRG.ppd.gz	foomatic-20200219-NRG-P7527n-PDF-NRG.ppd.gz	foomatic-20200219-NRG-SP_5210SF-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_AP410-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_AP410N-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_5210SF-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MLP28-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MLP28n-PDF-Savin.ppd.gz	foomatic-20200219-Savin-SP_5210SF-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-SP_C420DN-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP331cn_SP_C420-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C420DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C420DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-CLP131DN-PDF-Savin.ppd.gz	foomatic-20200219-Lanier-SP_5200S-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_5200S-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_5200S-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_5200S-PDF-Savin.ppd.gz	lanier-20190916-Lanier-P_C600_PDF.ppd.gz	ricoh-20190916-Ricoh-P_C600_PDF.ppd.gz	savin-20190916-Savin-P_C600_PDF.ppd.gz	ricoh-20200527-Gestetner-GS3021_PDF.ppd.gz	ricoh-20200527-Ricoh-M_C2001_PDF.ppd.gz	foomatic-20200219-Gestetner-C7526dn-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-C7531dn-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-IPC_2525DN-PDF-Infotec.ppd.gz	foomatic-20200219-Infotec-IPC_3030DN-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-LP226c_SP_C410-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LP231c_SP_C411-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_C400DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C410DN-PDF-NRG.ppd.gz	foomatic-20200219-NRG-SP_C411DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C400DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C410DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C411DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-CLP27DN-PDF-Savin.ppd.gz	foomatic-20200219-Savin-CLP31DN-PDF-Savin.ppd.gz	foomatic-20200219-Savin-SP_C400DN-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-SP_6330N-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP235N-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_6330N-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_6330N-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_6330N-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MLP235n-PDF-Savin.ppd.gz	foomatic-20200219-Lanier-P_501-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-P_502-PDF-Lanier.ppd.gz	foomatic-20200219-Ricoh-P_501-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-P_502-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-P_501-PDF-Savin.ppd.gz	foomatic-20200219-Savin-P_502-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-GWD2004-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-GWD2006-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LW324-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LW326-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_W2400-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_W3600-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_W2400-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_W3600-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-2404WD-PDF-Savin.ppd.gz	foomatic-20200219-Savin-2406WD-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-GWD3006-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LW426-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_W2401-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_W3601-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_W2401-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_W3601-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-3406WD-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-SP_W2470-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP124w-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_W2470-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_W2470-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-2404WDP-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-SP_4210N-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP37N-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_4210N-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_4210N-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_4210N-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MLP37N-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP1600_DSm716-PDF-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS_2316-PDF-Infotec.ppd.gz	foomatic-20200219-Lanier-MP_1600_LD316-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_1600-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_1600-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-9016-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_CW2201-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-MP_CW2201-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_CW2201-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_CW2201-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_CW2201-PDF-Savin.ppd.gz	ricoh-20200821-Lanier-IM_C530FB_PDF.ppd.gz	ricoh-20200821-NRG-IM_C530FB_PDF.ppd.gz	ricoh-20200821-Ricoh-IM_C530FB_PDF.ppd.gz	ricoh-20200821-Savin-IM_C530FB_PDF.ppd.gz	foomatic-20200219-Gestetner-MP_W7100-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-MP_W8140-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-MP_W7100-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-MP_W8140-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_W7100-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_W8140-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_W7100-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-MP_W8140-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_W7100-PDF-Savin.ppd.gz	foomatic-20200219-Savin-MP_W8140-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_CW2200-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-MP_CW2200-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_CW2200-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_CW2200-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_CW2200-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-MP_W6700-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-MP_W6700-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_W6700-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-MP_W6700-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-MP_W6700-PDF-Savin.ppd.gz	foomatic-20200219-Gestetner-GWD5100-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-GWD7140-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LW5100-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LW7140-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-MP_W5100-PDF-NRG.ppd.gz	foomatic-20200219-NRG-MP_W7140-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_W5100-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_MP_W7140-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-5100WD-PDF-Savin.ppd.gz	foomatic-20200219-Savin-7140WD-PDF-Savin.ppd.gz
 foomatic-20191029-Samsung_ML-2525W_Series_PXL.ppd.gz
 foomatic-20200219-Anitech-M24-epson.ppd.gz	foomatic-20200219-Brother-MC-3000-epson.ppd.gz	foomatic-20200219-Epson-L-1000-epson.ppd.gz	foomatic-20200219-Epson-LQ-500-epson.ppd.gz	foomatic-20200219-Epson-LQ-570plus-epson.ppd.gz	foomatic-20200219-Epson-LX-1050-epson.ppd.gz	foomatic-20200219-Oki-ML_380-epson.ppd.gz	foomatic-20200219-Panasonic-KX-P1123-epson.ppd.gz	foomatic-20200219-Panasonic-KX-P1124-epson.ppd.gz	foomatic-20200219-Panasonic-KX-P1624-epson.ppd.gz	foomatic-20200219-Panasonic-KX-P2023-epson.ppd.gz	foomatic-20200219-Panasonic-KX-P2123-epson.ppd.gz	foomatic-20200219-Star-LC_90-epson.ppd.gz	foomatic-20200219-Star-NL-10-epson.ppd.gz
 foomatic-20200219-Apple-Color_StyleWriter_1500-lpstyl.ppd.gz	foomatic-20200219-Apple-Color_StyleWriter_2200-lpstyl.ppd.gz	foomatic-20200219-Apple-Color_StyleWriter_2400-lpstyl.ppd.gz	foomatic-20200219-Apple-Color_StyleWriter_2500-lpstyl.ppd.gz	foomatic-20200219-Apple-StyleWriter_1200-lpstyl.ppd.gz	foomatic-20200219-Apple-StyleWriter_I-lpstyl.ppd.gz	foomatic-20200219-Apple-StyleWriter_II-lpstyl.ppd.gz
 foomatic-20200219-Apple-ImageWriter_LQ-iwhi.ppd.gz	foomatic-20200219-Apple-ImageWriter-iwhi.ppd.gz	foomatic-20200219-Apple-ImageWriter_II-iwhi.ppd.gz
 foomatic-20200219-Brother-HJ-400-lq850.ppd.gz	foomatic-20200219-Canon-BJ-300-lq850.ppd.gz	foomatic-20200219-Epson-LQ-850-lq850.ppd.gz	foomatic-20200219-Generic-ESC_P_Dot_Matrix_Printer-lq850.ppd.gz
 foomatic-20200219-Brother-HL-1020-hl7x0.ppd.gz	foomatic-20200219-Brother-HL-720-hl7x0.ppd.gz	foomatic-20200219-Brother-HL-730-hl7x0.ppd.gz	foomatic-20200219-Brother-HL-820-hl7x0.ppd.gz	foomatic-20200219-Brother-MFC-9050-hl7x0.ppd.gz
-foomatic-20200219-Brother-HL-10V-ljet3.ppd.gz	foomatic-20200219-Canon-LBP-4sx-ljet3.ppd.gz	foomatic-20200219-DEC-1800-ljet3.ppd.gz	foomatic-20200219-Epson-ActionLaser_1100-ljet3.ppd.gz	foomatic-20200219-Epson-EPL-5200-ljet3.ppd.gz	foomatic-20200219-Epson-EPL-5200plus-ljet3.ppd.gz	foomatic-20200219-Fujitsu-PrintPartner_8000-ljet3.ppd.gz	foomatic-20200219-Generic-PCL_5_Printer-ljet3.ppd.gz	foomatic-20200219-Tally-MT908-ljet3.ppd.gz
-foomatic-20200219-Brother-HL-1240-laserjet.ppd.gz	foomatic-20200219-Epson-EPL-7100-laserjet.ppd.gz	foomatic-20200219-Brother-HL-4Ve-laserjet.ppd.gz	foomatic-20200219-Brother-MFC-8300-laserjet.ppd.gz	foomatic-20200219-Brother-MFC-8600-laserjet.ppd.gz	foomatic-20200219-Citizen-ProJet_II-laserjet.ppd.gz	foomatic-20200219-Epson-ActionLaser_II-laserjet.ppd.gz	foomatic-20200219-Generic-PCL_4_Printer-laserjet.ppd.gz	foomatic-20200219-IBM-4019-laserjet.ppd.gz	foomatic-20200219-IBM-4029_030_LaserPrinter_10-laserjet.ppd.gz	foomatic-20200219-IBM-4312-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-1000-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-1010-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-1200-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-1200S-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-1800-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-2000-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-2010-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-2200-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-2200S-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-3000-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-3010-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-3300-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-5000-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-800-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-800T-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-820-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-1500-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-1550-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-1550plus-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-3400-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-3400plus-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-3500-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-400-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-5500-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-850-laserjet.ppd.gz	foomatic-20200219-Kyocera-P-2000-laserjet.ppd.gz	foomatic-20200219-Kyocera-P-2002-laserjet.ppd.gz	foomatic-20200219-Oki-OL400-laserjet.ppd.gz	foomatic-20200219-Oki-OL610e_S-laserjet.ppd.gz	foomatic-20200219-Oki-OL800-laserjet.ppd.gz	foomatic-20200219-Olivetti-JP350S-laserjet.ppd.gz	foomatic-20200219-Olivetti-PG_306-laserjet.ppd.gz	foomatic-20200219-PCPI-1030-laserjet.ppd.gz	foomatic-20200219-Panasonic-KX-P6150-laserjet.ppd.gz	foomatic-20200219-Seiko-SpeedJET_200-laserjet.ppd.gz	foomatic-20200219-Star-LaserPrinter_8-laserjet.ppd.gz
+foomatic-20200219-Brother-HL-10V-ljet3.ppd.gz	foomatic-20200219-Canon-LBP-4sx-ljet3.ppd.gz	foomatic-20200219-DEC-1800-ljet3.ppd.gz	foomatic-20200219-Epson-ActionLaser_1100-ljet3.ppd.gz	foomatic-20200219-Epson-EPL-5200-ljet3.ppd.gz	foomatic-20200219-Epson-EPL-5200plus-ljet3.ppd.gz	foomatic-20200219-Fujitsu-PrintPartner_8000-ljet3.ppd.gz	foomatic-20200219-Generic-PCL_5_Printer-ljet3.ppd.gz	foomatic-20200219-HP-LaserJet_3-ljet3.ppd.gz	foomatic-20200219-Tally-MT908-ljet3.ppd.gz
+foomatic-20200219-Brother-HL-1240-laserjet.ppd.gz	foomatic-20200219-Epson-EPL-7100-laserjet.ppd.gz	foomatic-20200219-Brother-HL-4Ve-laserjet.ppd.gz	foomatic-20200219-Brother-MFC-8300-laserjet.ppd.gz	foomatic-20200219-Brother-MFC-8600-laserjet.ppd.gz	foomatic-20200219-Citizen-ProJet_II-laserjet.ppd.gz	foomatic-20200219-Epson-ActionLaser_II-laserjet.ppd.gz	foomatic-20200219-Generic-PCL_4_Printer-laserjet.ppd.gz	foomatic-20200219-HP-LaserJet-laserjet.ppd.gz	foomatic-20200219-HP-LaserJet_2-laserjet.ppd.gz	foomatic-20200219-IBM-4019-laserjet.ppd.gz	foomatic-20200219-IBM-4029_030_LaserPrinter_10-laserjet.ppd.gz	foomatic-20200219-IBM-4312-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-1000-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-1010-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-1200-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-1200S-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-1800-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-2000-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-2010-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-2200-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-2200S-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-3000-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-3010-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-3300-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-5000-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-800-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-800T-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-820-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-1500-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-1550-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-1550plus-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-3400-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-3400plus-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-3500-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-400-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-5500-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-850-laserjet.ppd.gz	foomatic-20200219-Kyocera-P-2000-laserjet.ppd.gz	foomatic-20200219-Kyocera-P-2002-laserjet.ppd.gz	foomatic-20200219-Oki-OL400-laserjet.ppd.gz	foomatic-20200219-Oki-OL610e_S-laserjet.ppd.gz	foomatic-20200219-Oki-OL800-laserjet.ppd.gz	foomatic-20200219-Olivetti-JP350S-laserjet.ppd.gz	foomatic-20200219-Olivetti-PG_306-laserjet.ppd.gz	foomatic-20200219-PCPI-1030-laserjet.ppd.gz	foomatic-20200219-Panasonic-KX-P6150-laserjet.ppd.gz	foomatic-20200219-Seiko-SpeedJET_200-laserjet.ppd.gz	foomatic-20200219-Star-LaserPrinter_8-laserjet.ppd.gz
 foomatic-20200219-Brother-HL-2240D-hl1250.ppd.gz	foomatic-20200219-Brother-HL-2240-hl1250.ppd.gz	foomatic-20200219-Brother-MFC-P2500-hl1250.ppd.gz	foomatic-20200219-Brother-HL-1230-hl1250.ppd.gz	foomatic-20200219-Brother-HL-1030-hl1250.ppd.gz
 foomatic-20200219-Brother-HL-7050N-pxlmono.ppd.gz	foomatic-20200219-Lexmark-E238-pxlmono.ppd.gz	foomatic-20200219-Lexmark-E450dn-pxlmono.ppd.gz	foomatic-20200219-Lexmark-X342n-pxlmono.ppd.gz	foomatic-20200219-Lexmark-X642e-pxlmono.ppd.gz	foomatic-20200219-Oce-fx2080-pxlmono.ppd.gz	foomatic-20200219-Oce-sx1480-pxlmono.ppd.gz	foomatic-20200219-Oki-B2400-pxlmono.ppd.gz	foomatic-20200219-Oki-B4200-pxlmono.ppd.gz	foomatic-20200219-Oki-B4250-pxlmono.ppd.gz	foomatic-20200219-Oki-B4400-pxlmono.ppd.gz	foomatic-20200219-Oki-B4600-pxlmono.ppd.gz	foomatic-20200219-Oki-B4600_PS-pxlmono.ppd.gz	foomatic-20200219-Oki-B6300-pxlmono.ppd.gz	foomatic-20200219-Panasonic-DP-2330-pxlmono.ppd.gz	foomatic-20200219-Panasonic-DP-4510-pxlmono.ppd.gz	foomatic-20200219-Panasonic-DP-4520-pxlmono.ppd.gz	foomatic-20200219-Panasonic-DP-8016P-pxlmono.ppd.gz	foomatic-20200219-Panasonic-DP-8020E-pxlmono.ppd.gz	foomatic-20200219-Panasonic-DP-8045-pxlmono.ppd.gz	foomatic-20200219-Panasonic-DP-8060-pxlmono.ppd.gz	foomatic-20200219-Panasonic-DP-C213-pxlmono.ppd.gz	foomatic-20200219-Panasonic-DP-C262-pxlmono.ppd.gz	foomatic-20200219-Panasonic-DP-C264-pxlmono.ppd.gz	foomatic-20200219-Panasonic-DP-C354-pxlmono.ppd.gz	foomatic-20200219-Samsung-K2200-pxlmono.ppd.gz	foomatic-20200219-Samsung-M262x_282x-pxlmono.ppd.gz	foomatic-20200219-Samsung-M267x_287x-pxlmono.ppd.gz	foomatic-20200219-Samsung-M283x-pxlmono.ppd.gz	foomatic-20200219-Samsung-M288x-pxlmono.ppd.gz	foomatic-20200219-Samsung-M301x-pxlmono.ppd.gz	foomatic-20200219-Samsung-M306x-pxlmono.ppd.gz	foomatic-20200219-Samsung-ML-2571N-pxlmono.ppd.gz	foomatic-20200219-Samsung-ML-2580-pxlmono.ppd.gz	foomatic-20200219-Samsung-ML-2850D-pxlmono.ppd.gz	foomatic-20200219-Samsung-ML-2851ND-pxlmono.ppd.gz	foomatic-20200219-Samsung-ML-2950-pxlmono.ppd.gz	foomatic-20200219-Samsung-ML-3051N-pxlmono.ppd.gz	foomatic-20200219-Samsung-ML-3051ND-pxlmono.ppd.gz	foomatic-20200219-Samsung-ML-3470D-pxlmono.ppd.gz	foomatic-20200219-Samsung-ML-3471ND-pxlmono.ppd.gz	foomatic-20200219-Samsung-ML-4050N-pxlmono.ppd.gz	foomatic-20200219-Samsung-ML-4551N-pxlmono.ppd.gz	foomatic-20200219-Samsung-ML-4551ND-pxlmono.ppd.gz	foomatic-20200219-Samsung-ML-6060-pxlmono.ppd.gz	foomatic-20200219-Samsung-ML-6060N-pxlmono.ppd.gz	foomatic-20200219-Samsung-ML-6060S-pxlmono.ppd.gz	foomatic-20200219-Samsung-SCX-470x-pxlmono.ppd.gz	foomatic-20200219-Samsung-SCX-472x-pxlmono.ppd.gz	foomatic-20200219-Samsung-SCX-4x24-pxlmono.ppd.gz	foomatic-20200219-Samsung-SCX-4x25-pxlmono.ppd.gz	foomatic-20200219-Samsung-SCX-4x26-pxlmono.ppd.gz	foomatic-20200219-Toshiba-e-Studio_1101-pxlmono.ppd.gz	foomatic-20200219-Toshiba-e-Studio_1351-pxlmono.ppd.gz	foomatic-20200219-Toshiba-e-Studio_162-pxlmono.ppd.gz	foomatic-20200219-Toshiba-e-Studio_162d-pxlmono.ppd.gz	foomatic-20200219-Toshiba-e-Studio_167-pxlmono.ppd.gz	foomatic-20200219-Toshiba-e-Studio_203l-pxlmono.ppd.gz	foomatic-20200219-Toshiba-e-Studio_233-pxlmono.ppd.gz	foomatic-20200219-Toshiba-e-Studio_237-pxlmono.ppd.gz	foomatic-20200219-Toshiba-e-Studio_283-pxlmono.ppd.gz	foomatic-20200219-Toshiba-e-Studio_353-pxlmono.ppd.gz	foomatic-20200219-Toshiba-e-Studio_450s-pxlmono.ppd.gz	foomatic-20200219-Toshiba-e-Studio_453-pxlmono.ppd.gz	foomatic-20200219-Toshiba-e-Studio_500p-pxlmono.ppd.gz	foomatic-20200219-Toshiba-e-Studio_500s-pxlmono.ppd.gz	foomatic-20200219-Toshiba-e-Studio_523-pxlmono.ppd.gz	foomatic-20200219-Toshiba-e-Studio_523t-pxlmono.ppd.gz	foomatic-20200219-Toshiba-e-Studio_603-pxlmono.ppd.gz	foomatic-20200219-Toshiba-e-Studio_603t-pxlmono.ppd.gz	foomatic-20200219-Toshiba-e-Studio_723-pxlmono.ppd.gz	foomatic-20200219-Toshiba-e-Studio_723t-pxlmono.ppd.gz	foomatic-20200219-Toshiba-e-Studio_853-pxlmono.ppd.gz	foomatic-20200219-Toshiba-e-Studio_901-pxlmono.ppd.gz	foomatic-20200219-Brother-HL-2460N-pxlmono.ppd.gz	foomatic-20200219-Canon-LBP-3460-pxlmono.ppd.gz	foomatic-20200219-Canon-LBP-5970-pxlmono.ppd.gz	foomatic-20200219-Canon-LBP-5975-pxlmono.ppd.gz	foomatic-20200219-Canon-imageRunner_1023-pxlmono.ppd.gz	foomatic-20200219-Canon-imageRunner_1023N-pxlmono.ppd.gz	foomatic-20200219-Canon-imageRunner_1023iF-pxlmono.ppd.gz	foomatic-20200219-Canon-imageRunner_2016-pxlmono.ppd.gz	foomatic-20200219-Canon-imageRunner_2016i-pxlmono.ppd.gz	foomatic-20200219-Canon-imageRunner_2018-pxlmono.ppd.gz	foomatic-20200219-Canon-imageRunner_2020-pxlmono.ppd.gz	foomatic-20200219-Canon-imageRunner_2020i-pxlmono.ppd.gz	foomatic-20200219-Canon-imageRunner_2022-pxlmono.ppd.gz	foomatic-20200219-Canon-imageRunner_2230-pxlmono.ppd.gz	foomatic-20200219-Canon-imageRunner_2270-pxlmono.ppd.gz	foomatic-20200219-Canon-imageRunner_3025-pxlmono.ppd.gz	foomatic-20200219-Canon-imageRunner_3225-pxlmono.ppd.gz	foomatic-20200219-Canon-imageRunner_7086-pxlmono.ppd.gz	foomatic-20200219-Canon-imageRunner_7095-pxlmono.ppd.gz	foomatic-20200219-Canon-imageRunner_7105-pxlmono.ppd.gz	foomatic-20200219-Canon-imageRunner_8070-pxlmono.ppd.gz	foomatic-20200219-Xerox-Phaser_3150-pxlmono.ppd.gz	foomatic-20200219-Xerox-Phaser_3500-pxlmono.ppd.gz	foomatic-20200219-Xerox-WorkCentre_PE120-pxlmono.ppd.gz
-foomatic-20200219-Brother-HL-8-ljetplus.ppd.gz	foomatic-20200219-Lexmark-Winwriter_400-ljetplus.ppd.gz	foomatic-20200219-Panasonic-KX-P4450-ljetplus.ppd.gz
+foomatic-20200219-Brother-HL-8-ljetplus.ppd.gz	foomatic-20200219-HP-LaserJet_Plus-ljetplus.ppd.gz	foomatic-20200219-Lexmark-Winwriter_400-ljetplus.ppd.gz	foomatic-20200219-Panasonic-KX-P4450-ljetplus.ppd.gz
 foomatic-20200219-Brother-MFC-9100c-epsonc.ppd.gz	foomatic-20200219-Brother-MFC_7150C-epsonc.ppd.gz	foomatic-20200219-Epson-Dot_Matrix-epsonc.ppd.gz	foomatic-20200219-Epson-LQ-24-epsonc.ppd.gz	foomatic-20200219-Epson-LQ-2550-epsonc.ppd.gz	foomatic-20200219-Fujitsu-1200-epsonc.ppd.gz	foomatic-20200219-Fujitsu-2400-epsonc.ppd.gz	foomatic-20200219-Fujitsu-3400-epsonc.ppd.gz	foomatic-20200219-Panasonic-KX-P2135-epsonc.ppd.gz	foomatic-20200219-Star-LC24-200-epsonc.ppd.gz
 foomatic-20200219-CItoh-M8510-m8510.ppd.gz
 foomatic-20200219-Canon-BJ-10e-bj10e.ppd.gz	foomatic-20200219-Canon-BJ-20-bj10e.ppd.gz	foomatic-20200219-Canon-BJ-5-bj10e.ppd.gz
@@ -28,14 +28,13 @@
 foomatic-20200219-Canon-BJC-610-bjc610XY.upp.ppd.gz	foomatic-20200219-Canon-BJC-620-bjc610XY.upp.ppd.gz	foomatic-20200219-Canon-BJC-6200-bjc610XY.upp.ppd.gz	foomatic-20200219-Canon-MultiPASS_C3000-bjc610XY.upp.ppd.gz	foomatic-20200219-Canon-BJC-2100-bjc610XY.upp.ppd.gz	foomatic-20200219-Canon-BJC-250-bjc610XY.upp.ppd.gz	foomatic-20200219-Canon-BJC-600-bjc610XY.upp.ppd.gz
 foomatic-20200219-Canon-BJC-8200-bj8XXYYZ.upp.ppd.gz	foomatic-20200219-Canon-S330_Photo-bj8XXYYZ.upp.ppd.gz	foomatic-20200219-Canon-S500-bj8XXYYZ.upp.ppd.gz	foomatic-20200219-Canon-S600-bj8XXYYZ.upp.ppd.gz	foomatic-20200219-Canon-S630-bj8XXYYZ.upp.ppd.gz
 foomatic-20200219-Canon-BJC-880J-bjc880j.ppd.gz
-foomatic-20200219-Canon-LBP-1000-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1650-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1670N-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1850-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1870N-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1660e-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-1650-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-1651N-ljet4.ppd.gz	foomatic-20200219-Generic-PCL_6_PCL_XL_Printer-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1270N-ljet4.ppd.gz	foomatic-20200219-Brother-HL-2060-ljet4.ppd.gz	foomatic-20200219-Brother-HL-2140-ljet4.ppd.gz	foomatic-20200219-Brother-HL-5030-ljet4.ppd.gz	foomatic-20200219-Brother-HL-5040-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1250-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1430-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1440-ljet4.ppd.gz	foomatic-20200219-Epson-AL-C8500-ljet4.ppd.gz	foomatic-20200219-Epson-AL-C8500PS-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1070-ljet4.ppd.gz	foomatic-20200219-Canon-GP_335-ljet4.ppd.gz	foomatic-20200219-Canon-LBP-1760-ljet4.ppd.gz	foomatic-20200219-Canon-LBP-3360-ljet4.ppd.gz	foomatic-20200219-Canon-imageRunner_2200-ljet4.ppd.gz	foomatic-20200219-Canon-imageRunner_2800-ljet4.ppd.gz	foomatic-20200219-Canon-imageRunner_3300-ljet4.ppd.gz	foomatic-20200219-Canon-imageRunner_330s-ljet4.ppd.gz	foomatic-20200219-Canon-imageRunner_3570-ljet4.ppd.gz	foomatic-20200219-Canon-imageRunner_4570-ljet4.ppd.gz	foomatic-20200219-Canon-imageRunner_5000-ljet4.ppd.gz	foomatic-20200219-Canon-imageRunner_5570-ljet4.ppd.gz	foomatic-20200219-Canon-imageRunner_6000-ljet4.ppd.gz	foomatic-20200219-Canon-imageRunner_6570-ljet4.ppd.gz	foomatic-20200219-Canon-imageRunner_8500-ljet4.ppd.gz	foomatic-20200219-Canon-imageRunner_C5870U-ljet4.ppd.gz	foomatic-20200219-Canon-imageRunner_C6870U-ljet4.ppd.gz	foomatic-20200219-Epson-EPL-5800PS-ljet4.ppd.gz	foomatic-20200219-Epson-EPL-N1600-ljet4.ppd.gz	foomatic-20200219-Epson-EPL-N1600PS-ljet4.ppd.gz	foomatic-20200219-Epson-EPL-N2050PS-ljet4.ppd.gz	foomatic-20200219-Epson-EPL-N2050PSplus-ljet4.ppd.gz	foomatic-20200219-Epson-EPL-N2750-ljet4.ppd.gz	foomatic-20200219-Epson-EPL-N2750PS-ljet4.ppd.gz	foomatic-20200219-Fujitsu-PrintPartner_20W-ljet4.ppd.gz	foomatic-20200219-Generic-PCL_5c_Printer-ljet4.ppd.gz	foomatic-20200219-Generic-PCL_5e_Printer-ljet4.ppd.gz	foomatic-20200219-IBM-Infoprint_12-ljet4.ppd.gz	foomatic-20200219-Kyocera-FS-1600-ljet4.ppd.gz	foomatic-20200219-Kyocera-FS-1600plus-ljet4.ppd.gz	foomatic-20200219-Kyocera-FS-3600-ljet4.ppd.gz	foomatic-20200219-Kyocera-FS-3600plus-ljet4.ppd.gz	foomatic-20200219-Kyocera-KM-4230-ljet4.ppd.gz	foomatic-20200219-Kyocera-KM-5230-ljet4.ppd.gz	foomatic-20200219-Lexmark-E120-ljet4.ppd.gz	foomatic-20200219-Lexmark-E120n-ljet4.ppd.gz	foomatic-20200219-Lexmark-E230-ljet4.ppd.gz	foomatic-20200219-Lexmark-E320-ljet4.ppd.gz	foomatic-20200219-Lexmark-Optra_E321-ljet4.ppd.gz	foomatic-20200219-Minolta-PagePro_1100-ljet4.ppd.gz	foomatic-20200219-NEC-SuperScript_1400-ljet4.ppd.gz	foomatic-20200219-Ricoh-Aficio_220-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-1250-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-1450-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-1450PS-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-1750-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-2150PS-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-2151N-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-2151NPS-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-2152W-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-2152WPS-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-2250-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-2551N-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-2552W-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-7000-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-7000N-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-7000P-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-7050-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-7300-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-7300N-ljet4.ppd.gz	foomatic-20200219-Sharp-ARP350-ljet4.ppd.gz	foomatic-20200219-Sharp-ARP450-ljet4.ppd.gz	foomatic-20200219-Tektronix-Phaser_750DP-ljet4.ppd.gz	foomatic-20200219-Tektronix-Phaser_750DX-ljet4.ppd.gz	foomatic-20200219-Tektronix-Phaser_750N-ljet4.ppd.gz	foomatic-20200219-Tektronix-Phaser_750P-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_2135-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_4400B-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_4400DT-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_4400DX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_4400N-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_4500B-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_4500DT-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_4500DX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_4500N-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_4510B-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_4510DT-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_4510DX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_4510N-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_5500B-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_5500DN-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_5500DT-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_5500DX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_5500N-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6130N-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6180DN-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6180MFP-D-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6200B-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6200DP-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6200DX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6200N-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6250B-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6250DP-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6250DT-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6250DX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6250N-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6300DN-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6300N-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6350DP-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6350DT-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6350DX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6360DN-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6360DX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7300B-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7300DN-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7300DT-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7300DX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7300N-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7400DN-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7400DT-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7400DX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7400DXF-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7400N-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7700DN-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7700DX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7700GX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7750B-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7750DN-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7750DXF-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7750GX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7760DN-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7760DX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7760GX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_8400B-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_8400BD-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_8400DP-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_8400DX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_8400N-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_8500DN-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_8500N-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_8550DP-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_8550DT-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_8550DX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_8560DN-ljet4.ppd.gz	foomatic-20200219-Xerox-WorkCentre_7345-ljet4.ppd.gz	foomatic-20200219-Brother-DCP-7010-ljet4.ppd.gz	foomatic-20200219-Brother-DCP-7020-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1050-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1060-ljet4.ppd.gz	foomatic-20200219-Brother-HL-10h-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1260-ljet4.ppd.gz	foomatic-20200219-Brother-HL-2035-ljet4.ppd.gz	foomatic-20200219-Brother-HL-2040-ljet4.ppd.gz	foomatic-20200219-Brother-HL-2135-ljet4.ppd.gz	foomatic-20200219-Brother-HL-2142-ljet4.ppd.gz	foomatic-20200219-Brother-HL-5140-ljet4.ppd.gz	foomatic-20200219-Brother-HL-660-ljet4.ppd.gz	foomatic-20200219-Brother-HL-760-ljet4.ppd.gz	foomatic-20200219-Brother-HL-960-ljet4.ppd.gz	foomatic-20200219-Canon-LBP-1260-ljet4.ppd.gz	foomatic-20200219-Canon-LBP-430-ljet4.ppd.gz	foomatic-20200219-DEC-LN17-ljet4.ppd.gz	foomatic-20200219-Epson-EPL-5700-ljet4.ppd.gz	foomatic-20200219-Fujitsu-PrintPartner_10V-ljet4.ppd.gz	foomatic-20200219-Fujitsu-PrintPartner_16DV-ljet4.ppd.gz	foomatic-20200219-IBM-Page_Printer_3112-ljet4.ppd.gz	foomatic-20200219-Infotec-4651_MF-ljet4.ppd.gz	foomatic-20200219-Kyocera-FS-6500-ljet4.ppd.gz	foomatic-20200219-Lexmark-Optra_E-ljet4.ppd.gz	foomatic-20200219-Lexmark-Optra_Eplus-ljet4.ppd.gz	foomatic-20200219-Minolta-PagePro_6-ljet4.ppd.gz	foomatic-20200219-Minolta-PagePro_6ex-ljet4.ppd.gz	foomatic-20200219-Minolta-PagePro_8-ljet4.ppd.gz	foomatic-20200219-NEC-SuperScript_1800-ljet4.ppd.gz	foomatic-20200219-NEC-SuperScript_660i-ljet4.ppd.gz	foomatic-20200219-Oki-B410-ljet4.ppd.gz	foomatic-20200219-Oki-OL810ex-ljet4.ppd.gz	foomatic-20200219-Oki-Okipage_10e-ljet4.ppd.gz	foomatic-20200219-Oki-Okipage_10ex-ljet4.ppd.gz	foomatic-20200219-Oki-Okipage_14ex-ljet4.ppd.gz	foomatic-20200219-Oki-Okipage_6ex-ljet4.ppd.gz	foomatic-20200219-Oki-Okipage_8p-ljet4.ppd.gz	foomatic-20200219-Ricoh-Aficio_700-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-4600-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-5000a-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-6000-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-6100-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-85-ljet4.ppd.gz	foomatic-20200219-Samsung-QL-5100A-ljet4.ppd.gz	foomatic-20200219-Samsung-QL-6050-ljet4.ppd.gz	foomatic-20200219-Sharp-AR-161-ljet4.ppd.gz	foomatic-20200219-Xerox-Able_1406-ljet4.ppd.gz	foomatic-20200219-Xerox-DocuPrint_4508-ljet4.ppd.gz	foomatic-20200219-Xerox-DocuPrint_N4512-ljet4.ppd.gz	foomatic-20200219-Xerox-DocuPrint_N4512PS-ljet4.ppd.gz	foomatic-20200219-Xerox-DocuPrint_P1202-ljet4.ppd.gz	foomatic-20200219-Xerox-DocuPrint_P8e-ljet4.ppd.gz	foomatic-20200219-Xerox-Document_Centre_400-ljet4.ppd.gz	foomatic-20200219-Oki-OL410e-ljet4.ppd.gz	foomatic-20200219-Oki-Super_6e-ljet4.ppd.gz	foomatic-20200219-Ricoh-Aficio_401-ljet4.ppd.gz	foomatic-20200219-Epson-EPL-N2120-ljet4.ppd.gz	foomatic-20200219-Epson-EPL-5800-ljet4.ppd.gz	foomatic-20200219-Epson-EPL-N2050-ljet4.ppd.gz	foomatic-20200219-Epson-EPL-N2050plus-ljet4.ppd.gz
 foomatic-20200219-Canon-LBP-470-lips4.ppd.gz	foomatic-20200219-Canon-LIPS-IV-lips4.ppd.gz	foomatic-20200219-Canon-LIPS-IVv-lips4.ppd.gz
 foomatic-20200219-Canon-LBP-4U-lbp8.ppd.gz	foomatic-20200219-Canon-LBP-4plus-lbp8.ppd.gz	foomatic-20200219-Canon-LBP-8A1-lbp8.ppd.gz
 foomatic-20200219-Canon-LIPS-III-lips3.ppd.gz
 foomatic-20200219-Canon-LIPS-IIplus-lips2p.ppd.gz
 foomatic-20200219-Canon-i450-bjc800.ppd.gz	foomatic-20200219-Canon-BJC-7000-bjc800.ppd.gz	foomatic-20200219-Canon-BJC-7004-bjc800.ppd.gz	foomatic-20200219-Canon-BJC-7100-bjc800.ppd.gz	foomatic-20200219-Canon-BJC-800-bjc800.ppd.gz	foomatic-20200219-Canon-BJC-4550-bjc800.ppd.gz
-foomatic-20200219-Canon-iPR_C650_PPD-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iPR_C750_850_PPD-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iPR_C700_800-Postscript-Canon.ppd.gz	xerox-20190225-xr8580dn.ppd.gz	xerox-20190225-xrx8580n.ppd.gz	foomatic-20200219-Canon-iR-ADV_C7280-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_C9270_9280-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_8205-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_8285_8295-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_C7260_7270-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iPR_C600-Postscript-Canon.ppd.gz	hp-20171121-hplip-3.17.10-hp-laserjet_p4010_series-ps.ppd.gz	foomatic-20200219-Samsung-X703-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-X7600-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-K703-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-K7600-Postscript-Samsung.ppd.gz	foomatic-20200219-Lexmark-X940e-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-X945e-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-C935-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-C930-Postscript-Lexmark.ppd.gz	foomatic-20200219-KONICA_MINOLTA-bizhub_1050eP-Postscript-KONICA_MINOLTA.ppd.gz	foomatic-20200219-Canon-iR-ADV_C5030_5035-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_C5045_5051-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_C5235_5240-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_C5250_5255-Postscript-Canon.ppd.gz	foomatic-20200219-Lexmark-C782-Postscript-Lexmark.ppd.gz	foomatic-20200219-Canon-iR-ADV_6255_6265-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_6275-Postscript-Canon.ppd.gz	foomatic-20200219-Lexmark-C2132-Postscript-Lexmark.ppd.gz	foomatic-20200219-Canon-iR-ADV_C2220_2230-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_C2225-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_4225_4235-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_4245_4251-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_C3320L-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_C3320-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_C3325_3330-Postscript-Canon.ppd.gz	foomatic-20200219-Lexmark-C780-Postscript-Lexmark.ppd.gz	hplip-20200303-hplip-3.19.12-hp-color_laserjet_2605-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-color_laserjet_2605dn-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-color_laserjet_2605dtn-ps.ppd.gz	foomatic-20200219-Lexmark-C752-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-C524-Postscript-Lexmark.ppd.gz	hplip-20200303-hplip-3.19.12-hp-color_laserjet_2700-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-color_laserjet_2700n-ps.ppd.gz	foomatic-20200219-Lexmark-C522-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-C736-Postscript-Lexmark.ppd.gz	foomatic-20200219-Samsung-CLX-9252_9352-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-CLX-982x-Postscript-Samsung.ppd.gz	foomatic-20200219-Lexmark-C734-Postscript-Lexmark.ppd.gz	hplip-20200303-hplip-3.19.12-hp-color_laserjet_2550_series-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-color_laserjet_cm1015-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-color_laserjet_cm1017-ps.ppd.gz	foomatic-20200219-Lexmark-C520-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-C546-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-X546-Postscript-Lexmark.ppd.gz	kyocera-20180809-Kyocera_TASKalfa_3051ci.ppd.gz	foomatic-20200219-Lexmark-X544-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-C544-Postscript-Lexmark.ppd.gz	foomatic-20200219-Samsung-SCX-8230_8240-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-SCX-882x-Postscript-Samsung.ppd.gz	hp-20171121-hplip-3.17.10-hp-laserjet_p2055_series-ps.ppd.gz	foomatic-20200219-Samsung-CLX-9250_9350-Postscript-Samsung.ppd.gz	foomatic-20200219-Lexmark-X734de-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-X736de-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-X738de-Postscript-Lexmark.ppd.gz	foomatic-20200219-KONICA_MINOLTA-bizhub_C451-Postscript-KONICA_MINOLTA.ppd.gz	foomatic-20200219-Lexmark-X543-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-C540-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-C543-Postscript-Lexmark.ppd.gz	foomatic-20200219-Kyocera-CS-C2525E_KPDL-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-CS-C3225E_KPDL-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-CS-C3232E_KPDL-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-CS-C4035E_KPDL-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-C2525E_KPDL-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-C3225E_KPDL-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-C3232E_KPDL-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-C4035E_KPDL-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-C2520-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-C3225-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-C3232-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-C8100DNplus_KPDL-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-C8100DN-Postscript-Kyocera.ppd.gz	foomatic-20200219-Lexmark-X860de-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-X862de-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-X864de-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-C750-Postscript-Lexmark.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t1500-postscript.ppd.gz	foomatic-20200219-KONICA_MINOLTA-bizhub_C550-Postscript-KONICA_MINOLTA.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t920-postscript.ppd.gz	lexmark-20201101-Lexmark_MS410_Series.ppd.gz	foomatic-20200219-Samsung-SCX-8030_8040-Postscript-Samsung.ppd.gz	foomatic-20200219-Kyocera-FS-9120DN-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-9520DN-Postscript-Kyocera.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t930-postscript.ppd.gz	foomatic-20200219-KONICA_MINOLTA-bizhub_C250P-Postscript-KONICA_MINOLTA.ppd.gz	foomatic-20200219-KONICA_MINOLTA-bizhub_C252P-Postscript-KONICA_MINOLTA.ppd.gz	foomatic-20200219-Samsung-X401-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-X4300-Postscript-Samsung.ppd.gz	foomatic-20200219-Kyocera-KM-3035-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-4035-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-5035-Postscript-Kyocera.ppd.gz	foomatic-20200219-Lexmark-W850-Postscript-Lexmark.ppd.gz	foomatic-20200219-Kyocera-KM-2530-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-3530-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-4030-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-6030-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-8030-Postscript-Kyocera.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t2500-postscript.ppd.gz	foomatic-20200219-Samsung-K401-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-K4350-Postscript-Samsung.ppd.gz	foomatic-20200219-Lexmark-C510-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-C910-Postscript-Lexmark.ppd.gz	foomatic-20200219-KONICA_MINOLTA-bizhub_C352P-Postscript-KONICA_MINOLTA.ppd.gz	foomatic-20200219-Lexmark-T650-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-T652-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-T654-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-TG654-Postscript-Lexmark.ppd.gz	foomatic-20200219-Kyocera-KM-3050-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-4050-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-5050-Postscript-Kyocera.ppd.gz	foomatic-20200219-Lexmark-C912-Postscript-Lexmark.ppd.gz	lexmark-20200918-Lexmark_X658de.ppd.gz	foomatic-20200219-Lexmark-T656-Postscript-Lexmark.ppd.gz	foomatic-20200219-Oki-C9300-Postscript-Oki.ppd.gz	foomatic-20200219-Oki-C9500-Postscript-Oki.ppd.gz	sharp-20191230-Sharp-MX-7090N-ps.ppd.gz	sharp-20191230-Sharp-MX-8090N-ps.ppd.gz	foomatic-20200219-Samsung-X3220-Postscript-Samsung.ppd.gz	foomatic-20200219-KONICA_MINOLTA-bizhub_750-Postscript-KONICA_MINOLTA.ppd.gz	hp-20171121-hplip-3.17.10-hp-laserjet_4v-ps.ppd.gz	foomatic-20200219-Samsung-K3250-Postscript-Samsung.ppd.gz	foomatic-20200219-KONICA_MINOLTA-bizhub_500-Postscript-KONICA_MINOLTA.ppd.gz	foomatic-20200219-Lexmark-EG460dn-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-E360dn-Postscript-Lexmark.ppd.gz	foomatic-20200219-Kyocera-FS-C8026N-Postscript-Kyocera.ppd.gz	foomatic-20200219-Lexmark-E260dn-Postscript-Lexmark.ppd.gz	foomatic-20200219-KONICA_MINOLTA-bizhub_C450P-Postscript-KONICA_MINOLTA.ppd.gz	foomatic-20200219-Kyocera-KM-C2630-Postscript-Kyocera.ppd.gz	sharp-20191219-Sharp-MX-3061-ps.ppd.gz	sharp-20191219-Sharp-MX-3071-ps.ppd.gz	sharp-20191219-Sharp-MX-3561-ps.ppd.gz	sharp-20191219-Sharp-MX-3571-ps.ppd.gz	sharp-20191219-Sharp-MX-4061-ps.ppd.gz	sharp-20191219-Sharp-MX-4071-ps.ppd.gz	sharp-20191219-Sharp-MX-5051-ps.ppd.gz	sharp-20191219-Sharp-MX-5071-ps.ppd.gz	sharp-20191219-Sharp-MX-6051-ps.ppd.gz	sharp-20191219-Sharp-MX-6071-ps.ppd.gz	lexmark-20200918-Lexmark_X651de.ppd.gz	lexmark-20200918-Lexmark_X652de.ppd.gz	lexmark-20200918-Lexmark_X654de.ppd.gz	lexmark-20200918-Lexmark_X656de.ppd.gz	sharp-20191219-Sharp-MX-2651-ps.ppd.gz	sharp-20191219-Sharp-MX-3051-ps.ppd.gz	sharp-20191219-Sharp-MX-3551-ps.ppd.gz	sharp-20191219-Sharp-MX-4051-ps.ppd.gz	oki-20200329-ES8434-PS.ppd.gz	oki-20200329-OKI-C834-PS.ppd.gz	oki-20200329-OKI-C844-PS.ppd.gz	foomatic-20200219-Samsung-CLX-92x1_93x1-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-CLX-981x-Postscript-Samsung.ppd.gz	oki-20201022-OKI_MC853_PS.ppd.gz	oki-20201022-OKI_MC873_PS.ppd.gz	foomatic-20200219-KONICA_MINOLTA-bizhub_C351-Postscript-KONICA_MINOLTA.ppd.gz	oki-20201022-OKI_MC883_PS.ppd.gz	epson-20200615-Epson-WF-M20590_Series_PS3.ppd.gz	sharp-20191230-Sharp-MX-6580N-ps.ppd.gz	sharp-20191230-Sharp-MX-7580N-ps.ppd.gz	foomatic-20200219-Samsung-CLX-8640_8650-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-C268x-Postscript-Samsung.ppd.gz	epson-20200615-Epson-WF-C20590_PS.ppd.gz	foomatic-20200219-Samsung-C2670-Postscript-Samsung.ppd.gz	oki-20201022-OKI_MC843_PS.ppd.gz	oki-20201022-OKI_MC863_PS.ppd.gz	sharp-20191219-Sharp-MX-2661-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-3161-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-3661-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-4151-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-4171-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-5151-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-5171-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-6151-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-6171-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-2631-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-3631-ps-jp.ppd.gz	foomatic-20200219-Samsung-C2620-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-CLP-680-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-CLX-6260-Postscript-Samsung.ppd.gz	epson-20200615-Epson-WF-C17590_Series_PS3.ppd.gz	oki-20200329-OKI-C833-PS.ppd.gz	oki-20200329-OKI-C843-PS.ppd.gz	sharp-20190711-Sharp-MX-6500N-ps.ppd.gz	sharp-20190711-Sharp-MX-7500N-ps.ppd.gz	foomatic-20200219-Kyocera-KM-6330-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-7530-Postscript-Kyocera.ppd.gz	foomatic-20200219-Samsung-SCX-8123_8128-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-SCX-881x-Postscript-Samsung.ppd.gz	foomatic-20200219-Lexmark-X363dn-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-X364dn-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-X364dw-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-X463de-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-X464de-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-X466de-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-X466dte-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-X466dwe-Postscript-Lexmark.ppd.gz	xerox-20191030-xrxC8030.ppd.gz	xerox-20191030-xrxC8035.ppd.gz	xerox-20191030-xrxC8045.ppd.gz	xerox-20191030-xrxC8055.ppd.gz	xerox-20191030-xrxC8070.ppd.gz	foomatic-20200219-Samsung-M4370_5370-Postscript-Samsung.ppd.gz	foomatic-20200219-Lexmark-X264dn-Postscript-Lexmark.ppd.gz	xerox-20200129-xrxC9065.ppd.gz	xerox-20200129-xrxC9070.ppd.gz	foomatic-20200219-Oki-C9600-Postscript-Oki.ppd.gz	sharp-20191230-Sharp-MX-M2651-ps.ppd.gz	sharp-20191230-Sharp-MX-M3051-ps.ppd.gz	sharp-20191230-Sharp-MX-M3551-ps.ppd.gz	sharp-20191230-Sharp-MX-M4051-ps.ppd.gz	sharp-20191230-Sharp-MX-M5051-ps.ppd.gz	sharp-20191230-Sharp-MX-M6051-ps.ppd.gz	sharp-20191230-Sharp-MX-M3071-ps.ppd.gz	sharp-20191230-Sharp-MX-M3571-ps.ppd.gz	sharp-20191230-Sharp-MX-M4071-ps.ppd.gz	sharp-20191230-Sharp-MX-M5071-ps.ppd.gz	sharp-20191230-Sharp-MX-M6071-ps.ppd.gz	sharp-20191230-Sharp-MX-3060N-ps.ppd.gz	sharp-20191230-Sharp-MX-3060V-ps.ppd.gz	sharp-20191230-Sharp-MX-3070N-ps.ppd.gz	sharp-20191230-Sharp-MX-3070V-ps.ppd.gz	sharp-20191230-Sharp-MX-3560N-ps.ppd.gz	sharp-20191230-Sharp-MX-3560V-ps.ppd.gz	sharp-20191230-Sharp-MX-3570N-ps.ppd.gz	sharp-20191230-Sharp-MX-3570V-ps.ppd.gz	sharp-20191230-Sharp-MX-4060N-ps.ppd.gz	sharp-20191230-Sharp-MX-4060V-ps.ppd.gz	sharp-20191230-Sharp-MX-4070N-ps.ppd.gz	sharp-20191230-Sharp-MX-4070V-ps.ppd.gz	sharp-20191230-Sharp-MX-5050N-ps.ppd.gz	sharp-20191230-Sharp-MX-5050V-ps.ppd.gz	sharp-20191230-Sharp-MX-5070N-ps.ppd.gz	sharp-20191230-Sharp-MX-5070V-ps.ppd.gz	sharp-20191230-Sharp-MX-6050N-ps.ppd.gz	sharp-20191230-Sharp-MX-6050V-ps.ppd.gz	sharp-20191230-Sharp-MX-6070N-ps.ppd.gz	sharp-20191230-Sharp-MX-6070V-ps.ppd.gz	sharp-20191230-Sharp-MX-2630N-ps.ppd.gz	sharp-20191230-Sharp-MX-3050N-ps.ppd.gz	sharp-20191230-Sharp-MX-3050V-ps.ppd.gz	sharp-20191230-Sharp-MX-3550N-ps.ppd.gz	sharp-20191230-Sharp-MX-3550V-ps.ppd.gz	sharp-20191230-Sharp-MX-4050N-ps.ppd.gz	sharp-20191230-Sharp-MX-4050V-ps.ppd.gz	xerox-20190225-xrx7970.ppd.gz	foomatic-20200219-Samsung-CLP-670-Postscript-Samsung.ppd.gz	foomatic-20200219-Canon-iR-ADV_C351-Postscript-Canon.ppd.gz	foomatic-20200219-Samsung-C4820-Postscript-Samsung.ppd.gz	oki-20200329-OKB512_a.ppd.gz	oki-20200329-OKM562_a.ppd.gz	foomatic-20200219-Lexmark-E350d-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-E352dn-Postscript-Lexmark.ppd.gz	oki-20200329-OKB432_a.ppd.gz	oki-20200329-OKM492_a.ppd.gz	foomatic-20200219-Kyocera-KM-C850-Postscript-Kyocera.ppd.gz	oki-20200329-OKI-C712-PS.ppd.gz	sharp-20190711-Sharp-MX-6240N-ps.ppd.gz	sharp-20190711-Sharp-MX-7040N-ps.ppd.gz	oki-20201022-ES6450_PS.ppd.gz	oki-20201022-OKI_C650_PS.ppd.gz	xerox-20190225-xrx7830.ppd.gz	xerox-20190225-xrx7835.ppd.gz	xerox-20190225-xrx7845.ppd.gz	xerox-20190225-xrx7855.ppd.gz	foomatic-20200219-Kyocera-KM-C830-Postscript-Kyocera.ppd.gz	foomatic-20200219-Canon-iR-ADV_C250_350-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_C9060_9070-Postscript-Canon.ppd.gz	foomatic-20200219-Samsung-M5270-Postscript-Samsung.ppd.gz	oki-20200329-OKI-C612-PS.ppd.gz	sharp-20190711-Sharp-MX-6540FN-ps-jp.ppd.gz	foomatic-20200219-Oki-C7100-Postscript-Oki.ppd.gz	foomatic-20200219-Oki-C7300-Postscript-Oki.ppd.gz	foomatic-20200219-Oki-C7500-Postscript-Oki.ppd.gz	hp-20171121-hplip-3.17.10-hp-laserjet_6p-ps.ppd.gz	sharp-20191230-Sharp-MX-2650FN-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-2650FV-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-3150FN-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-3150FV-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-3650FN-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-3650FV-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-4150FN-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-4150FV-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-4170FN-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-4170FV-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-5150FN-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-5150FV-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-5170FN-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-5170FV-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-6150FN-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-6150FV-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-6170FN-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-6170FV-ps-jp.ppd.gz	foomatic-20200219-Kyocera-KM-5530-Postscript-Kyocera.ppd.gz	sharp-20191230-Sharp-MX-2630FN-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-3630FN-ps-jp.ppd.gz	foomatic-20200219-Kyocera-KM-4530-Postscript-Kyocera.ppd.gz	sharp-20191230-Sharp-MX-M3531-ps-jp.ppd.gz	foomatic-20200219-Toshiba-e-Studio_3510c-Postscript-Toshiba.ppd.gz	foomatic-20200219-Samsung-M453x-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-CLX-6220-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-CLX-6250-Postscript-Samsung.ppd.gz	foomatic-20200219-Canon-iR-ADV_C9065_9075-Postscript-Canon.ppd.gz	sharp-20191230-Sharp-MX-M2630-ps.ppd.gz	sharp-20191230-Sharp-MX-M3050-ps.ppd.gz	sharp-20191230-Sharp-MX-M3550-ps.ppd.gz	sharp-20191230-Sharp-MX-M4050-ps.ppd.gz	sharp-20191230-Sharp-MX-M5050-ps.ppd.gz	sharp-20191230-Sharp-MX-M6050-ps.ppd.gz	sharp-20191230-Sharp-MX-M3070-ps.ppd.gz	sharp-20191230-Sharp-MX-M3570-ps.ppd.gz	sharp-20191230-Sharp-MX-M4070-ps.ppd.gz	sharp-20191230-Sharp-MX-M5070-ps.ppd.gz	sharp-20191230-Sharp-MX-M6070-ps.ppd.gz	foomatic-20200219-Samsung-M458x-Postscript-Samsung.ppd.gz	oki-20200129-oki-c542-ps.ppd.gz	oki-20200329-OKI-C532-PS.ppd.gz	oki-20200329-OKI-MC563-PS.ppd.gz	oki-20200329-OKI-MC573-PS.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t1200_postscript-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t2300_postscript-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t1300_postscript-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t1100ps_44in-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t1120ps_44in-ps.ppd.gz	foomatic-20200219-Canon-iR-ADV_C7055_7065-Postscript-Canon.ppd.gz	foomatic-20200219-Oki-C5300-Postscript-Oki.ppd.gz	epson-20200615-Epson-LX-10010MF_Series_PS3.ppd.gz	foomatic-20200219-Samsung-CLX-8385-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-CLX-8385X-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-CLX-8540-Postscript-Samsung.ppd.gz	foomatic-20200219-Kyocera-FS-C8008N-Postscript-Kyocera.ppd.gz	foomatic-20200219-Samsung-ML-551x_651x-Postscript-Samsung.ppd.gz	sharp-20190711-Sharp-MX-M904-ps.ppd.gz	sharp-20191230-Sharp-MX-M6570-ps.ppd.gz	sharp-20191230-Sharp-MX-M7570-ps.ppd.gz	sharp-20191230-Sharp-MX-M905-ps.ppd.gz	foomatic-20200219-Sharp-AR-311FP_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-311N_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-311S_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-351FP_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-351N_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-351S_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-451FP_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-451N_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-451S_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M351N-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M351U-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M355N-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M355U-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M451N-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M451U-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M455N-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M455U-Postscript-Sharp.ppd.gz	hp-20171121-hplip-3.17.10-hp-laserjet_4si-ps.ppd.gz	oki-20200329-OKI-C332-PS.ppd.gz	oki-20200329-OKI-MC363-PS.ppd.gz	hplip-20200303-hplip-3.19.12-hp-color_laserjet_2800-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-color_laserjet_2820-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-color_laserjet_2830-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-color_laserjet_2840-ps.ppd.gz	hp-20171121-hplip-3.17.10-hp-color_laserjet-ps.ppd.gz	sharp-20190711-Sharp-MX-M1055-ps.ppd.gz	sharp-20190711-Sharp-MX-M1205-ps.ppd.gz	sharp-20190711-Sharp-MX-M1054-ps.ppd.gz	sharp-20190711-Sharp-MX-M1204-ps.ppd.gz	foomatic-20200219-Samsung-CLX-8380-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-M337x_387x_407x-Postscript-Samsung.ppd.gz	foomatic-20200219-Oki-C5400-Postscript-Oki.ppd.gz	foomatic-20200219-Oki-C5450-Postscript-Oki.ppd.gz	foomatic-20200219-Oki-C5700-Postscript-Oki.ppd.gz	foomatic-20200219-Oki-C5900-Postscript-Oki.ppd.gz	foomatic-20200219-Samsung-ML-451x_501x-Postscript-Samsung.ppd.gz	foomatic-20200219-Oki-C6100-Postscript-Oki.ppd.gz	foomatic-20200219-Samsung-SCX-483x_5x3x-Postscript-Samsung.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t7100ps-ps.ppd.gz	xerox-20200226-xrxB9100.ppd.gz	xerox-20200226-xrxB9110.ppd.gz	xerox-20200226-xrxB9125.ppd.gz	xerox-20200226-xrxB9136.ppd.gz	foomatic-20200219-Kyocera-FS-C5030N-Postscript-Kyocera.ppd.gz	foomatic-20200219-Canon-iR-ADV_C2020i_2030i-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_C2025-Postscript-Canon.ppd.gz	foomatic-20200219-Kyocera-FS-C5020N-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-8000C-Postscript-Kyocera.ppd.gz	foomatic-20200219-Epson-LP-9600SPD-Postscript-Epson.ppd.gz	sharp-20191219-Sharp-MX-C303-ps.ppd.gz	sharp-20191219-Sharp-MX-C303W-ps.ppd.gz	sharp-20191219-Sharp-MX-C304-ps.ppd.gz	sharp-20191219-Sharp-MX-C304W-ps.ppd.gz	sharp-20191219-Sharp-MX-C305W-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-C306W-ps-jp.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t790ps_44in-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t795ps_44in-ps.ppd.gz	epson-20200615-Epson-LX-10000F_PS.ppd.gz	epson-20200615-Epson-LX-7000F_PS.ppd.gz	foomatic-20200219-Samsung-CLP-775-Postscript-Samsung.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t770_postscript-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t1100ps_24in-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t1120ps_24in-ps.ppd.gz	foomatic-20200219-Epson-EPL-N2700-Postscript-Epson.ppd.gz	xerox-20191030-xrxC7030.ppd.gz	sharp-20191219-Sharp-MX-2614N-ps.ppd.gz	sharp-20191219-Sharp-MX-2615N-ps.ppd.gz	sharp-20191219-Sharp-MX-3114N-ps.ppd.gz	sharp-20191219-Sharp-MX-3115N-ps.ppd.gz	epson-20200615-Epson-LX-10000FK_Series_PS3.ppd.gz	foomatic-20200219-Canon-iR-ADV_C2020_2030-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_400_500-Postscript-Canon.ppd.gz	foomatic-20200219-Kyocera-FS-C5025N-Postscript-Kyocera.ppd.gz	sharp-20190711-Sharp-MX-M654N-ps.ppd.gz	sharp-20190711-Sharp-MX-M754N-ps.ppd.gz	foomatic-20200219-Samsung-SCX-6545X-Postscript-Samsung.ppd.gz	foomatic-20200219-Epson-AL-C9100-Postscript-Epson.ppd.gz	foomatic-20200219-Kyocera-FS-9100DN-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-9500DN-Postscript-Kyocera.ppd.gz	foomatic-20200219-Samsung-C140x-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-C145x-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-C1810-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-C1860-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-CLP-410-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-CLX-4190-Postscript-Samsung.ppd.gz	foomatic-20200219-Sharp-MX-M1100-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-MX-M850-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-MX-M950-Postscript-Sharp.ppd.gz	oki-20200329-OKB841_a110.ppd.gz	hp-20190111-hplip-3.18.12-hp-designjet_z6200_42in_photo-ps.ppd.gz	hp-20190111-hplip-3.18.12-hp-designjet_z6200_60in_photo-ps.ppd.gz	sharp-20191219-Sharp-MX-4140FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-4140N-ps.ppd.gz	sharp-20191219-Sharp-MX-4141FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-4141N-ps.ppd.gz	sharp-20191219-Sharp-MX-5140FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-5140N-ps.ppd.gz	sharp-20191219-Sharp-MX-5141FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-5141N-ps.ppd.gz	xerox-20190225-xrx5875.ppd.gz	sharp-20191219-Sharp-MX-4110FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-4110N-ps.ppd.gz	sharp-20191219-Sharp-MX-4111FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-4111N-ps.ppd.gz	sharp-20191219-Sharp-MX-4112N-ps.ppd.gz	sharp-20191219-Sharp-MX-5110FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-5110N-ps.ppd.gz	sharp-20191219-Sharp-MX-5111FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-5111N-ps.ppd.gz	sharp-20191219-Sharp-MX-5112N-ps.ppd.gz	xerox-20190225-xrx5330.ppd.gz	foomatic-20200219-Samsung-CLP-770-Postscript-Samsung.ppd.gz	sharp-20180409-Sharp-MX-2640NR-ps.ppd.gz	sharp-20180409-Sharp-MX-3140NR-ps.ppd.gz	sharp-20180409-Sharp-MX-3640NR-ps.ppd.gz	sharp-20191219-Sharp-MX-2640FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-2640N-ps.ppd.gz	sharp-20191219-Sharp-MX-3140FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-3140N-ps.ppd.gz	sharp-20191219-Sharp-MX-3640FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-3640N-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_4000ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_4020ps-ps.ppd.gz	sharp-20191219-Sharp-MX-2610FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-2610N-ps.ppd.gz	sharp-20191219-Sharp-MX-3110FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-3110N-ps.ppd.gz	sharp-20191219-Sharp-MX-3610FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-3610N-ps.ppd.gz	sharp-20190711-Sharp-MX-M654FN-ps-jp.ppd.gz	sharp-20190711-Sharp-MX-M754FN-ps-jp.ppd.gz	foomatic-20200219-Kyocera-FS-7028M-Postscript-Kyocera.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_z6100ps_42in_photo-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_z6100ps_60in_photo-ps.ppd.gz	foomatic-20200219-Oki-C9200-Postscript-Oki.ppd.gz	foomatic-20200219-Oki-C9400-Postscript-Oki.ppd.gz	foomatic-20200219-Samsung-SCX-6545-Postscript-Samsung.ppd.gz	foomatic-20200219-Kyocera-FS-7000-Postscript-Kyocera.ppd.gz	foomatic-20200219-Samsung-ML-8850_8950-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-SCX-6x55-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-SCX-6x55X-Postscript-Samsung.ppd.gz	xerox-20191030-xrxC8000.ppd.gz	xerox-20191030-xrxC9000.ppd.gz	sharp-20191219-Sharp-MX-3600FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-4100FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-4100N-ps.ppd.gz	sharp-20191219-Sharp-MX-4101FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-4101N-ps.ppd.gz	sharp-20191219-Sharp-MX-5000FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-5000N-ps.ppd.gz	sharp-20191219-Sharp-MX-5001FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-5001N-ps.ppd.gz	foomatic-20200219-Epson-LP-8800CPS-Postscript-Epson.ppd.gz	foomatic-20200219-Oce-VarioPrint_2105PS-Postscript-Oce.ppd.gz	foomatic-20200219-Kyocera-FS-9000-Postscript-Kyocera.ppd.gz	foomatic-20200219-Samsung-SCX-681x-Postscript-Samsung.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_4500mfp.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_4500ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_4520mfp-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_4520ps-ps.ppd.gz	foomatic-20200219-Ricoh-DDP_92-Postscript-Ricoh.ppd.gz	sharp-20191219-Sharp-MX-2600FG-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-2600FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-2600G-ps.ppd.gz	sharp-20191219-Sharp-MX-2600N-ps.ppd.gz	sharp-20191219-Sharp-MX-3100FG-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-3100FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-3100G-ps.ppd.gz	sharp-20191219-Sharp-MX-3100N-ps.ppd.gz	foomatic-20200219-Epson-LP-9500CPS-Postscript-Epson.ppd.gz	foomatic-20200219-Lexmark-X203n-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-X204n-Postscript-Lexmark.ppd.gz	xerox-20191030-xrxB8045.ppd.gz	xerox-20191030-xrxB8055.ppd.gz	xerox-20191030-xrxB8065.ppd.gz	xerox-20191030-xrxB8075.ppd.gz	xerox-20191030-xrxB8090.ppd.gz	foomatic-20200219-Epson-LP-9800C-Postscript-Epson.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t7100ps_monochrome-ps.ppd.gz	foomatic-20200219-Samsung-M408x-Postscript-Samsung.ppd.gz	foomatic-20200219-Kyocera-FS-C5015N-Postscript-Kyocera.ppd.gz	foomatic-20200219-Epson-AL-M8000-Postscript-Epson.ppd.gz	xerox-20201014-xrxC8000W.ppd.gz	foomatic-20200219-Ricoh-DDP_70-Postscript-Ricoh.ppd.gz	foomatic-20200219-Sharp-MX-2314NR-Postscript-Sharp.ppd.gz	sharp-20191219-Sharp-MX-2314N-ps.ppd.gz	foomatic-20200219-Canon-iR-ADV_8085_8095-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_8105-Postscript-Canon.ppd.gz	foomatic-20200219-Kyocera-CS-1650-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-1650-Postscript-Kyocera.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t790ps_24in-ps.ppd.gz	foomatic-20200219-Sharp-MX-2614NR-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-MX-3114NR-Postscript-Sharp.ppd.gz	sharp-20191219-Sharp-MX-2514FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-2517FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-2616N-ps.ppd.gz	sharp-20191219-Sharp-MX-3114FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-3116N-ps.ppd.gz	sharp-20191219-Sharp-MX-3117FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-3614FN-ps-jp.ppd.gz	xerox-20191030-xrxB7025.ppd.gz	xerox-20191030-xrxB7030.ppd.gz	xerox-20191030-xrxB7035.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t770ps_24in-ps.ppd.gz	foomatic-20200219-Epson-AL-C8600_PS3-Postscript-Epson.ppd.gz	foomatic-20200219-Kyocera-CS-2050-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-CS-2550-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-2050-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-2550-Postscript-Kyocera.ppd.gz	sharp-20191219-Sharp-MX-1810U-ps.ppd.gz	sharp-20191219-Sharp-MX-2010U-ps.ppd.gz	sharp-20191219-Sharp-MX-2310F-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-2310U-ps.ppd.gz	sharp-20191219-Sharp-MX-3111F-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-3111U-ps.ppd.gz	sharp-20191219-Sharp-MX-3611F-ps-jp.ppd.gz	sharp-20191219-Sharp-DX-2500N-ps.ppd.gz	foomatic-20200219-Kyocera-KM-6230-Postscript-Kyocera.ppd.gz	foomatic-20200219-Epson-LP-9200C-Postscript-Epson.ppd.gz	foomatic-20200219-Toshiba-e-Studio_451c-Postscript-Toshiba.ppd.gz	foomatic-20200219-Epson-AL-C9200-Postscript-Epson.ppd.gz	sharp-20191219-Sharp-BP-10C20-ps.ppd.gz	sharp-20191219-Sharp-BP-20C20-ps.ppd.gz	sharp-20191219-Sharp-BP-20C25-ps.ppd.gz	sharp-20191219-Sharp-DX-20C20-ps-jp.ppd.gz	foomatic-20200219-Samsung-SCX-4x28-Postscript-Samsung.ppd.gz	foomatic-20200219-Sharp-MX-M860-Postscript-Sharp.ppd.gz	sharp-20191219-Sharp-MX-2300FG-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-2300G-ps.ppd.gz	sharp-20191219-Sharp-MX-2300N-ps.ppd.gz	sharp-20191219-Sharp-MX-2700FG-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-2700G-ps.ppd.gz	sharp-20191219-Sharp-MX-2700N-ps.ppd.gz	sharp-20191219-Sharp-MX-3500FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-3500N-ps.ppd.gz	sharp-20191219-Sharp-MX-3501FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-3501N-ps.ppd.gz	sharp-20191219-Sharp-MX-4500FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-4500N-ps.ppd.gz	sharp-20191219-Sharp-MX-4501FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-4501N-ps.ppd.gz	foomatic-20200219-Canon-iR-ADV_6055_6065-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_6075-Postscript-Canon.ppd.gz	foomatic-20200219-Sharp-AR-266FP_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-266S_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M236_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M237_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M276_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M277_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Canon-iR-ADV_4025_4035-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_4045_4051-Postscript-Canon.ppd.gz	sharp-20191219-Sharp-MX-5500N-ps.ppd.gz	sharp-20191219-Sharp-MX-6200N-ps.ppd.gz	sharp-20191219-Sharp-MX-6201N-ps.ppd.gz	sharp-20191219-Sharp-MX-7000N-ps.ppd.gz	sharp-20191219-Sharp-MX-7001N-ps.ppd.gz	foomatic-20200219-Epson-LP-9100PS3-Postscript-Epson.ppd.gz	foomatic-20200219-Kyocera-KM-4230_5230-Postscript-Kyocera.ppd.gz	foomatic-20200219-Samsung-M403x-Postscript-Samsung.ppd.gz	sharp-20191230-Sharp-MX-B356W-ps.ppd.gz	sharp-20191230-Sharp-MX-B376W-ps.ppd.gz	sharp-20191230-Sharp-MX-B456W-ps.ppd.gz	sharp-20191230-Sharp-MX-B476W-ps.ppd.gz	foomatic-20200219-Epson-LP-8300CPD-Postscript-Epson.ppd.gz	foomatic-20200219-Epson-LP-8500CPD-Postscript-Epson.ppd.gz	sharp-20191219-Sharp-MX-2301N-ps.ppd.gz	foomatic-20200219-Toshiba-e-Studio_282-Postscript-Toshiba.ppd.gz	sharp-20191219-Sharp-MX-M365FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-M365N-ps.ppd.gz	sharp-20191219-Sharp-MX-M465FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-M465N-ps.ppd.gz	sharp-20191219-Sharp-MX-M565FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-M565N-ps.ppd.gz	sharp-20191219-Sharp-MX-M364N-ps.ppd.gz	sharp-20191219-Sharp-MX-M464FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-M464N-ps.ppd.gz	sharp-20191219-Sharp-MX-M564FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-M564N-ps.ppd.gz	sharp-20191219-Sharp-DX-2000U-ps.ppd.gz	foomatic-20200219-Kyocera-FS-6020-Postscript-Kyocera.ppd.gz	foomatic-20200219-Samsung-SCX-5835_5935-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-SCX-5835_5935X-Postscript-Samsung.ppd.gz	xerox-20191030-xrxC7000.ppd.gz	xerox-20190225-xrxd95cp.ppd.gz	foomatic-20200219-Samsung-ML-371x-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-ML-375x-Postscript-Samsung.ppd.gz	foomatic-20200219-Sharp-AR-BC260-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-BC320-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C170FP_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C170M-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C172FP_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C172M-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C260-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C260F-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C260FP_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C260M-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C260S-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C261F-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C261FP_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C261M-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C261S-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C262FP_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C262M-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C262S-Postscript-Sharp.ppd.gz	foomatic-20200219-Oki-C8800-Postscript-Oki.ppd.gz	foomatic-20200219-Samsung-SCX-5635-Postscript-Samsung.ppd.gz	foomatic-20200219-Imagistics-im8530-Postscript-Oce.ppd.gz	foomatic-20200219-Sharp-AR-C260P-Postscript-Sharp.ppd.gz	foomatic-20200219-Oki-C7200-Postscript-Oki.ppd.gz	foomatic-20200219-Oki-C7400-Postscript-Oki.ppd.gz	foomatic-20200219-Toshiba-e-Studio_452-Postscript-Toshiba.ppd.gz	foomatic-20200219-Epson-AL-C4000_PS3-Postscript-Epson.ppd.gz	foomatic-20200219-Canon-LBP712C_PPD-Postscript-Canon.ppd.gz	sharp-20191219-Sharp-MX-1800N-ps.ppd.gz	foomatic-20200219-Epson-AL-C4100-Postscript-Epson.ppd.gz	sharp-20180409-Sharp-MX-M283N-ps.ppd.gz	sharp-20180409-Sharp-MX-M363N-ps.ppd.gz	sharp-20180409-Sharp-MX-M453N-ps.ppd.gz	sharp-20180409-Sharp-MX-M503N-ps.ppd.gz	sharp-20191219-Sharp-MX-M265N-ps.ppd.gz	sharp-20191219-Sharp-MX-M265NV-ps.ppd.gz	sharp-20191219-Sharp-MX-M265U-ps.ppd.gz	sharp-20191219-Sharp-MX-M265UV-ps.ppd.gz	sharp-20191219-Sharp-MX-M266N-ps.ppd.gz	sharp-20191219-Sharp-MX-M266NV-ps.ppd.gz	sharp-20191219-Sharp-MX-M315N-ps.ppd.gz	sharp-20191219-Sharp-MX-M315NV-ps.ppd.gz	sharp-20191219-Sharp-MX-M315U-ps.ppd.gz	sharp-20191219-Sharp-MX-M315UV-ps.ppd.gz	sharp-20191219-Sharp-MX-M316N-ps.ppd.gz	sharp-20191219-Sharp-MX-M316NV-ps.ppd.gz	sharp-20191219-Sharp-MX-M356N-ps.ppd.gz	sharp-20191219-Sharp-MX-M356NV-ps.ppd.gz	sharp-20191219-Sharp-MX-M356U-ps.ppd.gz	sharp-20191219-Sharp-MX-M356UV-ps.ppd.gz	xerox-20190711-xrwc3335.ppd.gz	xerox-20190711-xrwc3345.ppd.gz	xerox-20190711-xrx3330.ppd.gz	sharp-20180409-Sharp-AR-M452U-ps.ppd.gz	sharp-20180409-Sharp-MX-M282N-ps.ppd.gz	sharp-20180409-Sharp-MX-M362N-ps.ppd.gz	sharp-20180409-Sharp-MX-M363U-ps.ppd.gz	sharp-20180409-Sharp-MX-M452N-ps.ppd.gz	sharp-20180409-Sharp-MX-M453U-ps.ppd.gz	sharp-20180409-Sharp-MX-M502N-ps.ppd.gz	sharp-20180409-Sharp-MX-M503U-ps.ppd.gz	sharp-20180409-Sharp-MX-M363F-ps-jp.ppd.gz	sharp-20180409-Sharp-MX-M423F-ps-jp.ppd.gz	sharp-20180409-Sharp-MX-M503F-ps-jp.ppd.gz	foomatic-20200219-Kyocera-FS-1800-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-1800plus-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-3800-Postscript-Kyocera.ppd.gz	foomatic-20200219-Sharp-MX-M260-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-MX-M260FP-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-MX-M260N-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-MX-M310-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-MX-M310FP-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-MX-M310N-Postscript-Sharp.ppd.gz	foomatic-20200219-Kyocera-FS-3820N-Postscript-Kyocera.ppd.gz	sharp-20191230-Sharp-MX-B355W-ps.ppd.gz	sharp-20191230-Sharp-MX-B455W-ps.ppd.gz	foomatic-20200219-Epson-EPL-N7000-Postscript-Epson.ppd.gz	sharp-20191219-Sharp-MX-M266FP-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-M266FV-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-M316FP-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-M316FV-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-M356FP-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-M356FV-ps-jp.ppd.gz	foomatic-20200219-Kyocera-FS-3900DN-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-4000DN-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-C5016N-Postscript-Kyocera.ppd.gz	sharp-20191219-Sharp-MX-M316G-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-M316GV-ps-jp.ppd.gz	foomatic-20200219-Samsung-CLP-660-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-CLX-6200-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-CLX-6240-Postscript-Samsung.ppd.gz	xerox-20191030-Xerox_Phaser_7800DN.ppd.gz	xerox-20191030-Xerox_Phaser_7800DX.ppd.gz	xerox-20191030-Xerox_Phaser_7800GX.ppd.gz	foomatic-20200219-Epson-EPL-N2500_PS3-Postscript-Epson.ppd.gz	foomatic-20200219-Toshiba-e-Studio_850-Postscript-Toshiba.ppd.gz	foomatic-20200219-Canon-LBP710C_PPD-Postscript-Canon.ppd.gz	foomatic-20200219-Kyocera-FS-3830N-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-6026-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-6950DN-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-1920-Postscript-Kyocera.ppd.gz	foomatic-20200219-Epson-AL-C1900_PS3-Postscript-Epson.ppd.gz	xerox-20190225-xrx3655s.ppd.gz	sharp-20191230-Sharp-DX-C310-ps.ppd.gz	sharp-20191230-Sharp-DX-C311-ps.ppd.gz	sharp-20191230-Sharp-DX-C380-ps.ppd.gz	sharp-20191230-Sharp-DX-C381-ps.ppd.gz	sharp-20191230-Sharp-DX-C400-ps.ppd.gz	sharp-20191230-Sharp-DX-C401-ps.ppd.gz	sharp-20191230-Sharp-MX-C310-ps.ppd.gz	sharp-20191230-Sharp-MX-C311-ps.ppd.gz	sharp-20191230-Sharp-MX-C380-ps.ppd.gz	sharp-20191230-Sharp-MX-C380P-ps.ppd.gz	sharp-20191230-Sharp-MX-C381-ps.ppd.gz	sharp-20191230-Sharp-MX-C400-ps.ppd.gz	sharp-20191230-Sharp-MX-C400P-ps.ppd.gz	sharp-20191230-Sharp-MX-C401-ps.ppd.gz	foomatic-20200219-Samsung-ML-2855-Postscript-Samsung.ppd.gz	foomatic-20200219-Toshiba-GL-1020-Postscript-Toshiba.ppd.gz	foomatic-20200219-Toshiba-GL-1010-Postscript-Toshiba.ppd.gz	foomatic-20200219-Samsung-ML-3470-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-ML-3475-Postscript-Samsung.ppd.gz	sharp-20191219-Sharp-MX-M264NV-ps.ppd.gz	sharp-20191219-Sharp-MX-M314NV-ps.ppd.gz	foomatic-20200219-Kyocera-FS-1900-Postscript-Kyocera.ppd.gz	foomatic-20200219-Sharp-MX-M264NR-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-MX-M314NR-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-MX-M354NR-Postscript-Sharp.ppd.gz	sharp-20191219-Sharp-MX-M264FP-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-M264N-ps.ppd.gz	sharp-20191219-Sharp-MX-M264U-ps.ppd.gz	sharp-20191219-Sharp-MX-M314FP-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-M314N-ps.ppd.gz	sharp-20191219-Sharp-MX-M314U-ps.ppd.gz	sharp-20191219-Sharp-MX-M354FP-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-M354N-ps.ppd.gz	sharp-20191219-Sharp-MX-M354U-ps.ppd.gz	foomatic-20200219-Epson-AL-MX20-Postscript-Epson.ppd.gz	foomatic-20200219-Epson-AL-MX21-Postscript-Epson.ppd.gz	foomatic-20200219-Canon-LBP7780C_5480-Postscript-Canon.ppd.gz	foomatic-20200219-Epson-AL-C2000_PS3-Postscript-Epson.ppd.gz	foomatic-20200219-Samsung-CLP-350-Postscript-Samsung.ppd.gz	foomatic-20200219-Kyocera-FS-6900-Postscript-Kyocera.ppd.gz	foomatic-20200219-Gestetner-P7032-Postscript-Gestetner.ppd.gz	foomatic-20200219-Lanier-2132-Postscript-Lanier.ppd.gz	foomatic-20200219-NRG-P7032-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_AP3200-Postscript-Ricoh.ppd.gz	foomatic-20200219-Savin-SLP32-Postscript-Savin.ppd.gz	foomatic-20200219-Oki-B4300-Postscript-Oki.ppd.gz	sharp-20180409-Sharp-MX-M623-ps-jp.ppd.gz	sharp-20180409-Sharp-MX-M623N-ps.ppd.gz	sharp-20180409-Sharp-MX-M623U-ps.ppd.gz	sharp-20180409-Sharp-MX-M753-ps-jp.ppd.gz	sharp-20180409-Sharp-MX-M753N-ps.ppd.gz	sharp-20180409-Sharp-MX-M753U-ps.ppd.gz	foomatic-20200219-Kyocera-FS-2000D-Postscript-Kyocera.ppd.gz	foomatic-20200219-Oki-B4350-Postscript-Oki.ppd.gz	foomatic-20200219-Kyocera-FS-3750-Postscript-Kyocera.ppd.gz	sharp-20190711-Sharp-MX-C250-ps.ppd.gz	sharp-20190711-Sharp-MX-C250E-ps.ppd.gz	sharp-20190711-Sharp-MX-C250F-ps.ppd.gz	sharp-20190711-Sharp-MX-C250FE-ps.ppd.gz	sharp-20190711-Sharp-MX-C250FR-ps.ppd.gz	sharp-20190711-Sharp-MX-C300-ps.ppd.gz	sharp-20190711-Sharp-MX-C300E-ps.ppd.gz	sharp-20190711-Sharp-MX-C300F-ps.ppd.gz	sharp-20190711-Sharp-MX-C300P-ps.ppd.gz	sharp-20190711-Sharp-MX-C300PE-ps.ppd.gz	sharp-20190711-Sharp-MX-C300PL-ps.ppd.gz	sharp-20190711-Sharp-MX-C300W-ps.ppd.gz	sharp-20190711-Sharp-MX-C300WE-ps.ppd.gz	sharp-20190711-Sharp-MX-C300WR-ps.ppd.gz	sharp-20190711-Sharp-MX-C301-ps.ppd.gz	sharp-20190711-Sharp-MX-C301F-ps.ppd.gz	sharp-20190711-Sharp-MX-C301W-ps.ppd.gz	foomatic-20200219-Kyocera-FS-6750-Postscript-Kyocera.ppd.gz	foomatic-20200219-Samsung-ML-2850-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-ML-2853-Postscript-Samsung.ppd.gz	foomatic-20200219-Brother-HL-3260N-Postscript-Brother.ppd.gz	foomatic-20200219-Sharp-AR-555M_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-555S_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-625M_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-625S_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M550N-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M550U-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M620N-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M620U-Postscript-Sharp.ppd.gz	foomatic-20200219-Kyocera-FS-6700-Postscript-Kyocera.ppd.gz	foomatic-20200219-Sharp-AR-705M_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-705S_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M700N-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M700U-Postscript-Sharp.ppd.gz	foomatic-20200219-Canon-LBP8780-Postscript-Canon.ppd.gz	foomatic-20200219-Epson-AL-M2400-Postscript-Epson.ppd.gz	foomatic-20200219-Epson-AL-M2410-Postscript-Epson.ppd.gz	foomatic-20200219-Epson-EPL-5900_PS3-Postscript-Epson.ppd.gz	foomatic-20200219-Epson-EPL-6100_PS3-Postscript-Epson.ppd.gz	foomatic-20200219-Kyocera-FS-1714M-Postscript-Kyocera.ppd.gz	foomatic-20200219-Brother-HL-3450CN-Postscript-Brother.ppd.gz	xerox-20191030-Xerox_VersaLink_C600.ppd.gz	foomatic-20200219-Kyocera-KM-2030-Postscript-Kyocera.ppd.gz	xerox-20191030-Xerox_VersaLink_C500.ppd.gz	foomatic-20200219-Canon-LBP7680C_5280-Postscript-Canon.ppd.gz	xerox-20191030-Xerox_VersaLink_C605.ppd.gz	xerox-20190225-xrx4622.ppd.gz	foomatic-20200219-Epson-AL-M2000-Postscript-Epson.ppd.gz	foomatic-20200219-Epson-AL-M2010-Postscript-Epson.ppd.gz	foomatic-20200219-Brother-HL-2600CN-Postscript-Brother.ppd.gz	foomatic-20200219-Kyocera-FS-1200-Postscript-Kyocera.ppd.gz	foomatic-20200219-Oce-9260-Postscript2-Oce.ppd.gz	xerox-20191030-Xerox_VersaLink_C505.ppd.gz	xerox-20190711-xrx6510.ppd.gz	xerox-20190711-xrx6515.ppd.gz	xerox-20191030-xrxC400.ppd.gz	xerox-20191030-xrxC405.ppd.gz	foomatic-20200219-Epson-AL-C2600-Postscript-Epson.ppd.gz	foomatic-20200219-Epson-EPL-N2550-Postscript-Epson.ppd.gz	foomatic-20200219-Kyocera-FS-1700-Postscript-Kyocera.ppd.gz	foomatic-20200219-Samsung-C48x-Postscript-Samsung.ppd.gz	foomatic-20200219-Epson-AL-2600-Postscript-Epson.ppd.gz	xerox-20191030-xrxB405.ppd.ppd.gz	xerox-20191030-xrxb600.ppd.gz	xerox-20191030-xrxb605.ppd.gz	xerox-20191030-xrxb610.ppd.gz	xerox-20191030-xrxb615.ppd.gz	foomatic-20200219-Samsung-C460-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-C470-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-CLX-3300-Postscript-Samsung.ppd.gz	xerox-20191030-xrxB400.ppd.ppd.gz	foomatic-20200219-Kyocera-FS-3700-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-1700plus-Postscript-Kyocera.ppd.gz	foomatic-20200219-Toshiba-e-Studio_205-Postscript-Toshiba.ppd.gz	foomatic-20200219-Epson-AL-C2800-Postscript-Epson.ppd.gz	sharp-20191230-Sharp-AR-B350W-ps-jp.ppd.gz	sharp-20191230-Sharp-AR-B351-ps.ppd.gz	sharp-20191230-Sharp-AR-B351F-ps.ppd.gz	sharp-20191230-Sharp-AR-B351W-ps.ppd.gz	sharp-20191230-Sharp-AR-B352P-ps.ppd.gz	sharp-20191230-Sharp-AR-B353P-ps.ppd.gz	sharp-20191230-Sharp-AR-B451-ps.ppd.gz	sharp-20191230-Sharp-AR-B451F-ps.ppd.gz	sharp-20191230-Sharp-AR-B451W-ps.ppd.gz	sharp-20191230-Sharp-AR-B452P-ps.ppd.gz	sharp-20191230-Sharp-AR-B453P-ps.ppd.gz	sharp-20191230-Sharp-MX-B350-ps.ppd.gz	sharp-20191230-Sharp-MX-B350F-ps.ppd.gz	sharp-20191230-Sharp-MX-B350P-ps.ppd.gz	sharp-20191230-Sharp-MX-B350W-ps.ppd.gz	sharp-20191230-Sharp-MX-B351P-ps.ppd.gz	sharp-20191230-Sharp-MX-B450-ps.ppd.gz	sharp-20191230-Sharp-MX-B450F-ps.ppd.gz	sharp-20191230-Sharp-MX-B450P-ps.ppd.gz	sharp-20191230-Sharp-MX-B450W-ps.ppd.gz	sharp-20191230-Sharp-MX-B451P-ps.ppd.gz	foomatic-20200219-Canon-LBP6780_3580-Postscript-Canon.ppd.gz	foomatic-20200219-Epson-AL-C4200-Postscript-Epson.ppd.gz	foomatic-20200219-Epson-AL-C3800-Postscript-Epson.ppd.gz	foomatic-20200219-Canon-LBP7660C-Postscript-Canon.ppd.gz	sharp-20191219-Sharp-AR-6020D-ps.ppd.gz	sharp-20191219-Sharp-AR-6020N-ps.ppd.gz	sharp-20191219-Sharp-AR-6020NR-ps.ppd.gz	sharp-20191219-Sharp-AR-6023D-ps.ppd.gz	sharp-20191219-Sharp-AR-6023N-ps.ppd.gz	sharp-20191219-Sharp-AR-6023NR-ps.ppd.gz	foomatic-20200219-Kyocera-FS-6300-Postscript-Kyocera.ppd.gz	foomatic-20200219-Oce-VarioPrint_2045PS-Postscript-Oce.ppd.gz	foomatic-20200219-Oce-VarioPrint_2050PS-Postscript-Oce.ppd.gz	foomatic-20200219-Oce-VarioPrint_2055PS-Postscript-Oce.ppd.gz	foomatic-20200219-Oce-VarioPrint_2060PS-Postscript-Oce.ppd.gz	foomatic-20200219-Oce-VarioPrint_2065PS-Postscript-Oce.ppd.gz	foomatic-20200219-Oce-VarioPrint_2070PS-Postscript-Oce.ppd.gz	foomatic-20200219-Kyocera-FS-3700plus-Postscript-Kyocera.ppd.gz	sharp-20191219-Sharp-AR-6026N-ps.ppd.gz	sharp-20191219-Sharp-AR-6026NR-ps.ppd.gz	sharp-20191219-Sharp-AR-6031N-ps.ppd.gz	sharp-20191219-Sharp-AR-6031NR-ps.ppd.gz	sharp-20191219-Sharp-AR-6120N-ps.ppd.gz	sharp-20191219-Sharp-AR-6131N-ps.ppd.gz	foomatic-20200219-Epson-EPL-6200-Postscript-Epson.ppd.gz	sharp-20191230-Sharp-MX-B380P-ps.ppd.gz	sharp-20191230-Sharp-MX-B381-ps.ppd.gz	sharp-20191230-Sharp-MX-B382-ps.ppd.gz	sharp-20191230-Sharp-MX-B382P-ps.ppd.gz	sharp-20191230-Sharp-MX-B382SC-ps.ppd.gz	sharp-20191230-Sharp-MX-B400P-ps.ppd.gz	sharp-20191230-Sharp-MX-B401-ps.ppd.gz	sharp-20191230-Sharp-MX-B402-ps.ppd.gz	sharp-20191230-Sharp-MX-B402SC-ps.ppd.gz	foomatic-20200219-Kyocera-Ci-1100-Postscript-Kyocera.ppd.gz	foomatic-20200219-Oce-9230-Postscript2-Oce.ppd.gz	foomatic-20200219-Oce-9245-Postscript2-Oce.ppd.gz	foomatic-20200219-Kyocera-FS-5800C-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-5900C-Postscript-Kyocera.ppd.gz	foomatic-20200219-Sharp-AR-N182FG-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-N182G-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-MX-M182D-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-MX-M202D-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-MX-M232D-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M165_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-205FG_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-205G_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M206_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M207_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Oce-VarioPrint_2100PS-Postscript-Oce.ppd.gz	foomatic-20200219-Oce-VarioPrint_2110PS-Postscript-Oce.ppd.gz	foomatic-20200219-Brother-HL-2460-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-HL-7050-Postscript-Brother.ppd.gz	foomatic-20200219-Sharp-AR-M205_PS-Postscript-Sharp.ppd.gz	sharp-20191219-Sharp-AR-6020-ps.ppd.gz	sharp-20191219-Sharp-AR-6023-ps.ppd.gz	sharp-20191219-Sharp-AR-G200-ps-jp.ppd.gz	foomatic-20200219-Gestetner-F9199_9199nf-Postscript-Gestetner.ppd.gz	foomatic-20200219-Lanier-LF510_515e-Postscript-Lanier.ppd.gz	foomatic-20200219-Oce-3145PS-Postscript2-Oce.ppd.gz	foomatic-20200219-Oce-3155PS-Postscript2-Oce.ppd.gz	foomatic-20200219-Oce-3165PS-Postscript2-Oce.ppd.gz	foomatic-20200219-Ricoh-FAX5510L_5510NF-Postscript-Ricoh.ppd.gz	foomatic-20200219-Savin-FAX3799_3799nf-Postscript-Savin.ppd.gz	foomatic-20200219-Kyocera-FS-6500plus-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-1820-Postscript-Kyocera.ppd.gz	foomatic-20200219-Samsung-SCX-6x45-Postscript-Samsung.ppd.gz	xerox-20190225-xr6605dn.ppd.gz	foomatic-20200219-Kyocera-FS-1030D-Postscript-Kyocera.ppd.gz	foomatic-20200219-Epson-EPL-N3000-Postscript-Epson.ppd.gz	foomatic-20200219-Sharp-AR-163G_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M161_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M162_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-MX-M182-Postscript-Sharp.ppd.gz	foomatic-20200219-Kyocera-FS-1020D-Postscript-Kyocera.ppd.gz	foomatic-20200219-Sharp-AR-163FG_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Kyocera-KM-1530-Postscript-Kyocera.ppd.gz	foomatic-20200219-Epson-AL-CX21-Postscript-Epson.ppd.gz	foomatic-20200219-Sharp-AR-200M_PS-Postscript-Sharp.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c226.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c227.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c258.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c266.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c287.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c308.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c3351.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c368.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c3851.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c3851fs.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c458.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c558.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c658.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c659.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c759.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c250i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c300i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c3300i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c3320i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c3350i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c360i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c4000i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c4050i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c450i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c550i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c650i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-workplace-hub.ppd.gz	foomatic-20200219-Brother-HL-4050CDN-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-HL-4070CDW-Postscript-Brother.ppd.gz	foomatic-20200219-Ricoh-EMP_156-Postscript-Ricoh.ppd.gz	foomatic-20200219-Epson-AL-M4000-Postscript-Epson.ppd.gz	foomatic-20200219-Brother-DCP-9045CDN-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-MFC-9450CDN-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-MFC9840CDW-Postscript-Brother.ppd.gz	foomatic-20200219-Kyocera-FS-1118MFP-Postscript-Kyocera.ppd.gz	foomatic-20200219-Oki-14i-Postscript-Oki.ppd.gz	foomatic-20200219-Brother-MFC-9440CN-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-HL-8050N-Postscript-Brother.ppd.gz	foomatic-20200219-Sharp-AR-160M_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-5220-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M160_PS-Postscript-Sharp.ppd.gz	foomatic-20191029-BR5070DN_GPL.ppd.gz	foomatic-20200219-Brother-HL-2700CN-Postscript-Brother.ppd.gz	foomatic-20200219-Sharp-AR-155FG_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-168D-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M155_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Brother-DCP-9040CN-Postscript-Brother.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-226i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-246i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-266i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-306i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-227.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-287.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-308.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-308e.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-367.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-368.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-368e.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-4052.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-458.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-458e.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-4752.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-558.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-558e.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-658e.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-758-jp-eu.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-808-us.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-958.ppd.gz	foomatic-20191029-shar208d.ppd.gz	foomatic-20191029-shmb201d.ppd.gz	foomatic-20200219-Sharp-AR-B07-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-NB2A-Postscript-Sharp.ppd.gz	foomatic-20200219-Canon-LBP6670-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-LBP6680_3480-Postscript-Canon.ppd.gz	foomatic-20200219-Oce-8445PS-Postscript2-Oce.ppd.gz	foomatic-20200219-Oce-8465PS-Postscript2-Oce.ppd.gz	foomatic-20200219-Brother-HL-3070CW-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-HL-6050D_DN-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-DCP-9010CN-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-MFC-9010CN-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-MFC-9120CN-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-MFC-9320CW-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-HL-6050-Postscript-Brother.ppd.gz	foomatic-20200219-Kyocera-KM-1815-Postscript-Kyocera.ppd.gz	foomatic-20200219-Brother-HL-5050-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-HL-5070N-Postscript-Brother.ppd.gz	foomatic-20200219-Kyocera-CS-1815-Postscript-Kyocera.ppd.gz	foomatic-20200219-Brother-HL-5270DN-Postscript-Brother.ppd.gz	xerox-20190820-xrxosd.ppd.gz	foomatic-20200219-Brother-MFC-8670DN-Postscript-Brother.ppd.gz	foomatic-20200219-Kyocera-FS-920-Postscript-Kyocera.ppd.gz	foomatic-20200219-Samsung-ML-4050-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-ML-4055-Postscript-Samsung.ppd.gz	foomatic-20200219-Brother-HL-1850_70N-Postscript-Brother.ppd.gz	hp-20190918-hplip-3.19.6-hp-color_designjet_xl_3600-ps.ppd.gz	foomatic-20200219-Sharp-AR-168S-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M150_PS-Postscript-Sharp.ppd.gz	foomatic-20191029-shar208s.ppd.gz	foomatic-20191029-sharm200.ppd.gz	foomatic-20200219-Sharp-MX-NB11-Postscript-Sharp.ppd.gz	foomatic-20200219-Oce-VarioPrint_2090PS-Postscript-Oce.ppd.gz	hplip-20200303-hplip-3.19.12-hp-color_designjet_xl_3600-ps.ppd.gz	foomatic-20200219-Kyocera-FS-1018MFP-Postscript-Kyocera.ppd.gz	foomatic-20200219-Brother-MFC-8820D-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-DCP-8025D-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-MFC-9420CN-Postscript-Brother.ppd.gz	foomatic-20200219-Oce-PPC3074PS-Postscript-Oce.ppd.gz	foomatic-20200219-Oce-PPC3094PS-Postscript-Oce.ppd.gz	foomatic-20200219-Oce-PPC3114PS-Postscript-Oce.ppd.gz	foomatic-20200219-Oce-PPC3073PS-Postscript-Oce.ppd.gz	foomatic-20200219-Oce-PPC3093PS-Postscript-Oce.ppd.gz	foomatic-20200219-Oce-PPC3113PS-Postscript-Oce.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t1530-postscript.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t2530-postscript.ppd.gz	hp-20190918-hplip-3.19.6-hp-designjet_t2600dr-ps.ppd.gz	foomatic-20200219-Samsung-ML-4550-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-ML-4555-Postscript-Samsung.ppd.gz	foomatic-20200219-Kyocera-FS-1010-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-1050-Postscript-Kyocera.ppd.gz	foomatic-20200219-Brother-MFC-7450-Postscript-Brother.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t1600dr-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t2600dr-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t1600_printer-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t2600-ps.ppd.gz	foomatic-20200219-Brother-HL-5250DN-Postscript-Brother.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_4100ps-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_4100ps_mfp-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_4600ps-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_4600ps_mfp-ps.ppd.gz	foomatic-20200219-Brother-HL-5150D-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-HL-5170DN-Postscript-Brother.ppd.gz	foomatic-20200219-Samsung-SCX-6x22-Postscript-Samsung.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_3900ps_mfp-ps.ppd.gz	foomatic-20200219-Brother-HL-5240-Postscript-Brother.ppd.gz	foomatic-20200219-Kyocera-FS-1000-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-1510-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-1810-Postscript-Kyocera.ppd.gz	foomatic-20200219-Samsung-ML-2150-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-ML-2550-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-ML-8x00-Postscript-Samsung.ppd.gz	foomatic-20200219-Brother-HL-1650_70N-Postscript-Brother.ppd.gz	foomatic-20200219-Samsung-SCX-6x20-Postscript-Samsung.ppd.gz	foomatic-20200219-Kyocera-FS-600-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-680-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-800-Postscript-Kyocera.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_2553ciJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_3253ciJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_4053ciJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_5053ciJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_6053ciJ.ppd.gz	foomatic-20200219-Kyocera-FS-1000plus-Postscript-Kyocera.ppd.gz	foomatic-20200219-Brother-DCP-8020-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-MFC-8420-Postscript-Brother.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_7353ciJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_8353ciJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_5003iJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_6003iJ.ppd.gz	kyocera-20200416-Kyocera_CS_2554ci.ppd.gz	kyocera-20200416-Kyocera_CS_3554ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_2554ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_3554ci.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_P8060cdnJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_2552ciJ.ppd.gz	foomatic-20200219-Oce-PPC5115PS-Postscript-Oce.ppd.gz	foomatic-20200219-Oce-PPC5160PS-Postscript-Oce.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_7003iJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_9003iJ.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_P4060dnJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_2460ciJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_2470ciJ.ppd.gz	kyocera-20190328-Kyocera_CS_2552ci.ppd.gz	kyocera-20190328-Kyocera_CS_3252ci.ppd.gz	kyocera-20190328-Kyocera_CS_3552ci.ppd.gz	kyocera-20190328-Kyocera_CS_4052ci.ppd.gz	kyocera-20190328-Kyocera_CS_5052ci.ppd.gz	kyocera-20190328-Kyocera_CS_6052ci.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P8060cdn.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_2552ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_3252ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_3552ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_4052ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_5052ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_6052ci.ppd.gz	kyocera-20200416-Kyocera_CS_2553ci.ppd.gz	kyocera-20200416-Kyocera_CS_3253ci.ppd.gz	kyocera-20200416-Kyocera_CS_3553ci.ppd.gz	kyocera-20200416-Kyocera_CS_4053ci.ppd.gz	kyocera-20200416-Kyocera_CS_5053ci.ppd.gz	kyocera-20200416-Kyocera_CS_6053ci.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_P4040dnJ.ppd.gz	foomatic-20200219-Brother-HL-1450-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-HL-1470N-Postscript-Brother.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_2553ci.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_3253ci.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_3553ci.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_4053ci.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_5053ci.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_6053ci.ppd.gz	foomatic-20200219-Brother-DCP-8045D-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-MFC-8640D-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-MFC-8840D-Postscript-Brother.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_P4140dnJ.ppd.gz	kyocera-20200416-Kyocera_CS_3050ci.ppd.gz	kyocera-20200416-Kyocera_CS_3051ci.ppd.gz	kyocera-20200416-Kyocera_CS_3550ci.ppd.gz	kyocera-20200416-Kyocera_CS_3551ci.ppd.gz	kyocera-20200416-Kyocera_CS_4550ci.ppd.gz	kyocera-20200416-Kyocera_CS_4551ci.ppd.gz	kyocera-20200416-Kyocera_CS_5550ci.ppd.gz	kyocera-20200416-Kyocera_CS_5551ci.ppd.gz	kyocera-20200416-Kyocera_FS-C8600DN.ppd.gz	kyocera-20200416-Kyocera_FS-C8650DN.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_3050ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_3550ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_3551ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_4550ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_4551ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_5550ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_5551ci.ppd.gz	kyocera-20190328-Kyocera_CS_7052ci.ppd.gz	kyocera-20190328-Kyocera_CS_8052ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_7052ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_8052ci.ppd.gz	kyocera-20200416-Kyocera_CS_6550ci.ppd.gz	kyocera-20200416-Kyocera_CS_6551ci.ppd.gz	kyocera-20200416-Kyocera_CS_7353ci.ppd.gz	kyocera-20200416-Kyocera_CS_7550ci.ppd.gz	kyocera-20200416-Kyocera_CS_7551ci.ppd.gz	kyocera-20200416-Kyocera_CS_8353ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_6550ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_6551ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_7353ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_7550ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_7551ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_8353ci.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_2510iJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_2520iJ.ppd.gz	kyocera-20190328-Kyocera_CS_2551ci.ppd.gz	kyocera-20190328-Kyocera_CS_4002i.ppd.gz	kyocera-20190328-Kyocera_CS_5002i.ppd.gz	kyocera-20190328-Kyocera_CS_6002i.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_2551ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_4002i.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_5002i.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_6002i.ppd.gz	kyocera-20200416-Kyocera_CS_250ci.ppd.gz	kyocera-20200416-Kyocera_CS_300ci.ppd.gz	kyocera-20200416-Kyocera_CS_4003i.ppd.gz	kyocera-20200416-Kyocera_CS_400ci.ppd.gz	kyocera-20200416-Kyocera_CS_5003i.ppd.gz	kyocera-20200416-Kyocera_CS_500ci.ppd.gz	kyocera-20200416-Kyocera_CS_552ci.ppd.gz	kyocera-20200416-Kyocera_CS_6003i.ppd.gz	kyocera-20200416-Kyocera_ECOSYS_P4060dn.ppd.gz	kyocera-20200416-Kyocera_FS-C8500DN.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_250ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_300ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_400ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_500ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_552ci.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_3212iJ.ppd.gz	foomatic-20200219-Brother-DCP-8040-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-MFC-8220-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-MFC-8440-Postscript-Brother.ppd.gz	kyocera-20200416-Kyocera_CS_2550ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_2550ci.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_4003i.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_4012iJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_5003i.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_6003i.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M8024cidn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M8124cidn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M8130cidn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P4035dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P4040dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P4045dn.ppd.gz	kyocera-20200416-Kyocera_CS_205c.ppd.gz	kyocera-20200416-Kyocera_CS_255c.ppd.gz	kyocera-20200416-Kyocera_FS-C8020MFP.ppd.gz	kyocera-20200416-Kyocera_FS-C8025MFP.ppd.gz	kyocera-20200416-Kyocera_FS-C8520MFP.ppd.gz	kyocera-20200416-Kyocera_FS-C8525MFP.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_205c.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_255c.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_3060ci.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_M8224cidn.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_M8228cidn.ppd.gz	kyocera-20200416-Kyocera_CS_3500i.ppd.gz	kyocera-20200416-Kyocera_CS_3501i.ppd.gz	kyocera-20200416-Kyocera_CS_4500i.ppd.gz	kyocera-20200416-Kyocera_CS_4501i.ppd.gz	kyocera-20200416-Kyocera_CS_5500i.ppd.gz	kyocera-20200416-Kyocera_CS_5501i.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_3500i.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_3501i.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_4501i.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_5500i.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_5501i.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_358ciJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_408ciJ.ppd.gz	foomatic-20200219-Samsung-ML-2570-Postscript-Samsung.ppd.gz	kyocera-20190328-Kyocera_CS_7002i.ppd.gz	kyocera-20190328-Kyocera_CS_8002i.ppd.gz	kyocera-20190328-Kyocera_CS_9002i.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_7002i.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_8002i.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_9002i.ppd.gz	kyocera-20200211-Kyocera_TASKalfa_7003i.ppd.gz	kyocera-20200416-Kyocera_CS_6500i.ppd.gz	kyocera-20200416-Kyocera_CS_6501i.ppd.gz	kyocera-20200416-Kyocera_CS_7003i.ppd.gz	kyocera-20200416-Kyocera_CS_8000i.ppd.gz	kyocera-20200416-Kyocera_CS_8001i.ppd.gz	kyocera-20200416-Kyocera_CS_8003i.ppd.gz	kyocera-20200416-Kyocera_CS_9003i.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_6500i.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_6501i.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_8000i.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_8001i.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_8003i.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_9003i.ppd.gz	kyocera-20190328-Kyocera_CS_3011i.ppd.gz	kyocera-20190328-Kyocera_CS_3212i.ppd.gz	kyocera-20190328-Kyocera_CS_3511i.ppd.gz	kyocera-20190328-Kyocera_CS_4012i.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_3011i.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_3212i.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_3511i.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_4012i.ppd.gz	kyocera-20200416-Kyocera_ECOSYS_P4135dn.ppd.gz	kyocera-20200416-Kyocera_ECOSYS_P4140dn.ppd.gz	kyocera-20200416-Kyocera_ECOSYS_P4145dn.ppd.gz	kyocera-20200416-Kyocera_FS-6970DN.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_M6635cidnJ.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_P6230cdnJ.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_P7240cdnJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_352ciJ.ppd.gz	kyocera-20190328-Kyocera_CS_3010i.ppd.gz	kyocera-20190328-Kyocera_CS_3510i.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_3010i.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_3510i.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_4500i.ppd.gz	kyocera-20200416-Kyocera_CS_300i.ppd.gz	kyocera-20200416-Kyocera_CS_420i.ppd.gz	kyocera-20200416-Kyocera_CS_520i.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_300i.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_420i.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_520i.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M4028idn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M4125idn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M4132idn.ppd.gz	kyocera-20200416-Kyocera_CS_255.ppd.gz	kyocera-20200416-Kyocera_CS_305.ppd.gz	kyocera-20200416-Kyocera_FS-6025MFP.ppd.gz	kyocera-20200416-Kyocera_FS-6030MFP.ppd.gz	kyocera-20200416-Kyocera_FS-6525MFP.ppd.gz	kyocera-20200416-Kyocera_FS-6530MFP.ppd.gz	kyocera-20200416-Kyocera_FS-9130DN.ppd.gz	kyocera-20200416-Kyocera_FS-9530DN.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_255.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_305.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_M4226idn.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_M4230idn.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_4020i.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_P3060dnJ.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_P3160dnJ.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_P3145dnJ.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_M2540dwJ.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_M2640idwJ.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_P2040dwJ.ppd.gz	kyocera-20190328-Kyocera_CS_306ci.ppd.gz	kyocera-20190328-Kyocera_CS_307ci.ppd.gz	kyocera-20190328-Kyocera_CS_356ci.ppd.gz	kyocera-20190328-Kyocera_CS_406ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_306ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_307ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_356ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_406ci.ppd.gz	kyocera-20190830-Kyocera_Generic_Color.ppd.gz	kyocera-20200416-Kyocera_CS_308ci.ppd.gz	kyocera-20200416-Kyocera_CS_358ci.ppd.gz	kyocera-20200416-Kyocera_CS_408ci.ppd.gz	kyocera-20200416-Kyocera_CS_508ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_358ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_408ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_508ci.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_308ci.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P6026cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P6030cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P7035cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M5021cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M5520cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M5520cdw.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M5521cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M5521cdw.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M5525cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M5526cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M5526cdw.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P5020cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P5020cdw.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P5021cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P5021cdw.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P5025cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P5026cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P5026cdw.ppd.gz	kyocera-20200416-Kyocera_ECOSYS_P5018cdn.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_M3645idnJ.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M6026cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M6026cidn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M6030cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M6035cidn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M6230cidn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M6235cidn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M6526cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M6526cidn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M6530cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M6535cidn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M6630cidn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M6635cidn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P6021cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P6035cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P6130cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P6230cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P6235cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P7040cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P7240cdn.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_265ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_266ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_350ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_351ci.ppd.gz	kyocera-20200416-Kyocera_FS-C2026MFP+.ppd.gz	kyocera-20200416-Kyocera_FS-C2126MFP+.ppd.gz	kyocera-20200416-Kyocera_FS-C2526MFP.ppd.gz	kyocera-20200416-Kyocera_FS-C2626MFP.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_352ci.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M3040dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M3040idn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M3540dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M3540idn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M3550idn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M3560idn.ppd.gz	kyocera-20200416-Kyocera_FS-4100DN.ppd.gz	kyocera-20200416-Kyocera_FS-4200DN.ppd.gz	kyocera-20200416-Kyocera_FS-4300DN.ppd.gz	kyocera-20200416-Kyocera_FS-C5250DN.ppd.gz	kyocera-20200416-Kyocera_FS-C5300DN.ppd.gz	kyocera-20200416-Kyocera_FS-C5350DN.ppd.gz	kyocera-20200416-Kyocera_FS-C5400DN.ppd.gz	kyocera-20190328-Kyocera_FS-5040DN.ppd.gz	kyocera-20200416-Kyocera_FS-2100D.ppd.gz	kyocera-20200416-Kyocera_FS-2100DN.ppd.gz	kyocera-20200416-Kyocera_FS-3040MFP+.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M3145dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M3145idn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M3645dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M3645idn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M3655idn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M3660idn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P3045dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P3050dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P3055dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P3060dn.ppd.gz	kyocera-20200416-Kyocera_ECOSYS_M3860idn.ppd.gz	kyocera-20200416-Kyocera_ECOSYS_M3860idnf.ppd.gz	kyocera-20200416-Kyocera_ECOSYS_P3260dn.ppd.gz	kyocera-20200416-Kyocera_FS-C2026MFP.ppd.gz	kyocera-20200416-Kyocera_FS-C2126MFP.ppd.gz	kyocera-20200416-Kyocera_FS-C5100DN.ppd.gz	kyocera-20200416-Kyocera_FS-C5150DN.ppd.gz	kyocera-20200416-Kyocera_FS-C5200DN.ppd.gz	kyocera-20200416-Kyocera_FS-3540MFP.ppd.gz	kyocera-20200416-Kyocera_FS-3640MFP.ppd.gz	kyocera-20200416-Kyocera_FS-3920DN.ppd.gz	kyocera-20200416-Kyocera_FS-4020DN.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_P3145dn.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_P3150dn.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_P3155dn.ppd.gz	kyocera-20200716-TA_P-4531_MFP.ppd.gz	kyocera-20200716-TA_P-4531i_MFP.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2035dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2535dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P2135dn.ppd.gz	kyocera-20200416-Kyocera_FS-1035MFP.ppd.gz	kyocera-20200416-Kyocera_FS-1135MFP.ppd.gz	kyocera-20200416-Kyocera_FS-2020D.ppd.gz	kyocera-20200416-Kyocera_FS-3040MFP.ppd.gz	kyocera-20200416-Kyocera_FS-3140MFP.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2030dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2040dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2135dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2235dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2530dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2540dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2540dw.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2635dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2635dw.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2640idw.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2735dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2735dw.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2835dw.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P2040dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P2040dw.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P2230dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P2235dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P2235dw.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P2335d.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P2335dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P2335dw.ppd.gz	kyocera-20200416-Kyocera_FS-1030MFP.ppd.gz	kyocera-20200416-Kyocera_FS-1130MFP.ppd.gz	kyocera-20200416-Kyocera_FS-1028MFP.ppd.gz	kyocera-20200416-Kyocera_FS-1128MFP.ppd.gz	kyocera-20200416-Kyocera_FS-1320D.ppd.gz	kyocera-20200416-Kyocera_FS-1350DN.ppd.gz	kyocera-20200416-Kyocera_FS-1370DN.ppd.gz	kyocera-20200416-Kyocera_KM-2810.ppd.gz	kyocera-20200416-Kyocera_KM-2820.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P2035d.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P2135d.ppd.gz	kyocera-20190830-Kyocera_Generic_Monochrome.ppd.gz	kyocera-20200416-Kyocera_FS-1120D.ppd.gz	hp-20171121-hplip-3.17.10-hp-laserjet_4-ps.ppd.gz
-foomatic-20200219-Citizen-ProJet_IIc-cdj500.ppd.gz	foomatic-20200219-DEC-DECwriter_520ic-cdj500.ppd.gz	foomatic-20200219-Olivetti-JP470-cdj500.ppd.gz
+foomatic-20200219-Canon-iPR_C650_PPD-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iPR_C750_850_PPD-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iPR_C700_800-Postscript-Canon.ppd.gz	xerox-20190225-xr8580dn.ppd.gz	xerox-20190225-xrx8580n.ppd.gz	foomatic-20200219-Canon-iR-ADV_C7280-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_C9270_9280-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_8205-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_8285_8295-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_C7260_7270-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iPR_C600-Postscript-Canon.ppd.gz	foomatic-20200219-Samsung-X703-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-X7600-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-K703-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-K7600-Postscript-Samsung.ppd.gz	foomatic-20200219-Lexmark-X940e-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-X945e-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-C935-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-C930-Postscript-Lexmark.ppd.gz	foomatic-20200219-KONICA_MINOLTA-bizhub_1050eP-Postscript-KONICA_MINOLTA.ppd.gz	foomatic-20200219-Canon-iR-ADV_C5030_5035-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_C5045_5051-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_C5235_5240-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_C5250_5255-Postscript-Canon.ppd.gz	foomatic-20200219-Lexmark-C782-Postscript-Lexmark.ppd.gz	foomatic-20200219-Canon-iR-ADV_6255_6265-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_6275-Postscript-Canon.ppd.gz	foomatic-20200219-Lexmark-C2132-Postscript-Lexmark.ppd.gz	foomatic-20200219-Canon-iR-ADV_C2220_2230-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_C2225-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_4225_4235-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_4245_4251-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_C3320L-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_C3320-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_C3325_3330-Postscript-Canon.ppd.gz	foomatic-20200219-Lexmark-C780-Postscript-Lexmark.ppd.gz	hplip-20201209-hplip-3.20.11-hp-color_laserjet_2605dtn-ps.ppd.gz	foomatic-20200219-Lexmark-C752-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-C524-Postscript-Lexmark.ppd.gz	hplip-20201209-hplip-3.20.11-hp-color_laserjet_2700n-ps.ppd.gz	foomatic-20200219-Lexmark-C522-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-C736-Postscript-Lexmark.ppd.gz	foomatic-20200219-Samsung-CLX-9252_9352-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-CLX-982x-Postscript-Samsung.ppd.gz	foomatic-20200219-Lexmark-C734-Postscript-Lexmark.ppd.gz	hplip-20201209-hplip-3.20.11-hp-color_laserjet_2550_series-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-color_laserjet_cm1017-ps.ppd.gz	foomatic-20200219-Lexmark-C520-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-C546-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-X546-Postscript-Lexmark.ppd.gz	kyocera-20180809-Kyocera_TASKalfa_3051ci.ppd.gz	foomatic-20200219-Lexmark-X544-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-C544-Postscript-Lexmark.ppd.gz	foomatic-20200219-Samsung-SCX-8230_8240-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-SCX-882x-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-CLX-9250_9350-Postscript-Samsung.ppd.gz	foomatic-20200219-Lexmark-X734de-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-X736de-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-X738de-Postscript-Lexmark.ppd.gz	foomatic-20200219-KONICA_MINOLTA-bizhub_C451-Postscript-KONICA_MINOLTA.ppd.gz	foomatic-20200219-Lexmark-X543-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-C540-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-C543-Postscript-Lexmark.ppd.gz	foomatic-20200219-Kyocera-CS-C2525E_KPDL-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-CS-C3225E_KPDL-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-CS-C3232E_KPDL-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-CS-C4035E_KPDL-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-C2525E_KPDL-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-C3225E_KPDL-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-C3232E_KPDL-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-C4035E_KPDL-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-C2520-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-C3225-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-C3232-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-C8100DNplus_KPDL-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-C8100DN-Postscript-Kyocera.ppd.gz	foomatic-20200219-Lexmark-X860de-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-X862de-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-X864de-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-C750-Postscript-Lexmark.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_t1500-postscript.ppd.gz	foomatic-20200219-KONICA_MINOLTA-bizhub_C550-Postscript-KONICA_MINOLTA.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t920-postscript.ppd.gz	lexmark-20201101-Lexmark_MS410_Series.ppd.gz	foomatic-20200219-Samsung-SCX-8030_8040-Postscript-Samsung.ppd.gz	foomatic-20200219-Kyocera-FS-9120DN-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-9520DN-Postscript-Kyocera.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t930-postscript.ppd.gz	foomatic-20200219-KONICA_MINOLTA-bizhub_C250P-Postscript-KONICA_MINOLTA.ppd.gz	foomatic-20200219-KONICA_MINOLTA-bizhub_C252P-Postscript-KONICA_MINOLTA.ppd.gz	foomatic-20200219-Samsung-X401-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-X4300-Postscript-Samsung.ppd.gz	foomatic-20200219-Kyocera-KM-3035-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-4035-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-5035-Postscript-Kyocera.ppd.gz	foomatic-20200219-Lexmark-W850-Postscript-Lexmark.ppd.gz	foomatic-20200219-Kyocera-KM-2530-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-3530-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-4030-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-6030-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-8030-Postscript-Kyocera.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_t2500-postscript.ppd.gz	foomatic-20200219-Samsung-K401-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-K4350-Postscript-Samsung.ppd.gz	foomatic-20200219-Lexmark-C510-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-C910-Postscript-Lexmark.ppd.gz	foomatic-20200219-KONICA_MINOLTA-bizhub_C352P-Postscript-KONICA_MINOLTA.ppd.gz	sharp-20210601-Sharp-MX-7081-ps.ppd.gz	sharp-20210601-Sharp-MX-8081-ps.ppd.gz	foomatic-20200219-Lexmark-T650-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-T652-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-T654-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-TG654-Postscript-Lexmark.ppd.gz	foomatic-20200219-Kyocera-KM-3050-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-4050-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-5050-Postscript-Kyocera.ppd.gz	foomatic-20200219-Lexmark-C912-Postscript-Lexmark.ppd.gz	lexmark-20200918-Lexmark_X658de.ppd.gz	foomatic-20200219-Lexmark-T656-Postscript-Lexmark.ppd.gz	foomatic-20200219-Oki-C9300-Postscript-Oki.ppd.gz	foomatic-20200219-Oki-C9500-Postscript-Oki.ppd.gz	sharp-20191230-Sharp-MX-7090N-ps.ppd.gz	sharp-20191230-Sharp-MX-8090N-ps.ppd.gz	foomatic-20200219-Samsung-X3220-Postscript-Samsung.ppd.gz	foomatic-20200219-KONICA_MINOLTA-bizhub_750-Postscript-KONICA_MINOLTA.ppd.gz	foomatic-20200219-Samsung-K3250-Postscript-Samsung.ppd.gz	foomatic-20200219-KONICA_MINOLTA-bizhub_500-Postscript-KONICA_MINOLTA.ppd.gz	foomatic-20200219-Lexmark-EG460dn-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-E360dn-Postscript-Lexmark.ppd.gz	foomatic-20200219-Kyocera-FS-C8026N-Postscript-Kyocera.ppd.gz	foomatic-20200219-Lexmark-E260dn-Postscript-Lexmark.ppd.gz	foomatic-20200219-KONICA_MINOLTA-bizhub_C450P-Postscript-KONICA_MINOLTA.ppd.gz	foomatic-20200219-Kyocera-KM-C2630-Postscript-Kyocera.ppd.gz	sharp-20191219-Sharp-MX-3061-ps.ppd.gz	sharp-20191219-Sharp-MX-3071-ps.ppd.gz	sharp-20191219-Sharp-MX-3561-ps.ppd.gz	sharp-20191219-Sharp-MX-3571-ps.ppd.gz	sharp-20191219-Sharp-MX-4061-ps.ppd.gz	sharp-20191219-Sharp-MX-4071-ps.ppd.gz	sharp-20191219-Sharp-MX-5051-ps.ppd.gz	sharp-20191219-Sharp-MX-5071-ps.ppd.gz	sharp-20191219-Sharp-MX-6051-ps.ppd.gz	sharp-20191219-Sharp-MX-6071-ps.ppd.gz	lexmark-20200918-Lexmark_X651de.ppd.gz	lexmark-20200918-Lexmark_X652de.ppd.gz	lexmark-20200918-Lexmark_X654de.ppd.gz	lexmark-20200918-Lexmark_X656de.ppd.gz	sharp-20191219-Sharp-MX-2651-ps.ppd.gz	sharp-20191219-Sharp-MX-3051-ps.ppd.gz	sharp-20191219-Sharp-MX-3551-ps.ppd.gz	sharp-20191219-Sharp-MX-4051-ps.ppd.gz	oki-20210628-ES8434_PS.ppd.gz	oki-20210628-OKI_C834_PS.ppd.gz	oki-20210628-OKI_C844_PS.ppd.gz	foomatic-20200219-Samsung-CLX-92x1_93x1-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-CLX-981x-Postscript-Samsung.ppd.gz	oki-20210628-OKI_MC853_PS.ppd.gz	oki-20210628-OKI_MC873_PS.ppd.gz	oki-20210628-OKI_MC883_PS.ppd.gz	foomatic-20200219-KONICA_MINOLTA-bizhub_C351-Postscript-KONICA_MINOLTA.ppd.gz	epson-20200615-Epson-WF-M20590_Series_PS3.ppd.gz	sharp-20191230-Sharp-MX-6580N-ps.ppd.gz	sharp-20191230-Sharp-MX-7580N-ps.ppd.gz	foomatic-20200219-Samsung-CLX-8640_8650-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-C268x-Postscript-Samsung.ppd.gz	epson-20200615-Epson-WF-C20590_PS.ppd.gz	foomatic-20200219-Samsung-C2670-Postscript-Samsung.ppd.gz	oki-20210628-OKI_MC843_PS.ppd.gz	oki-20210628-OKI_MC863_PS.ppd.gz	sharp-20191219-Sharp-MX-2661-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-3161-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-3661-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-4151-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-4171-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-5151-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-5171-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-6151-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-6171-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-2631-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-3631-ps-jp.ppd.gz	foomatic-20200219-Samsung-C2620-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-CLP-680-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-CLX-6260-Postscript-Samsung.ppd.gz	epson-20200615-Epson-WF-C17590_Series_PS3.ppd.gz	oki-20200329-OKI-C833-PS.ppd.gz	oki-20200329-OKI-C843-PS.ppd.gz	sharp-20190711-Sharp-MX-6500N-ps.ppd.gz	sharp-20190711-Sharp-MX-7500N-ps.ppd.gz	foomatic-20200219-Kyocera-KM-6330-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-7530-Postscript-Kyocera.ppd.gz	foomatic-20200219-Samsung-SCX-8123_8128-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-SCX-881x-Postscript-Samsung.ppd.gz	foomatic-20200219-Lexmark-X363dn-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-X364dn-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-X364dw-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-X463de-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-X464de-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-X466de-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-X466dte-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-X466dwe-Postscript-Lexmark.ppd.gz	xerox-20191030-xrxC8030.ppd.gz	xerox-20191030-xrxC8035.ppd.gz	xerox-20191030-xrxC8045.ppd.gz	xerox-20191030-xrxC8055.ppd.gz	xerox-20191030-xrxC8070.ppd.gz	xerox-20210715-xrxEC8036.ppd.gz	xerox-20210715-xrxEC8056.ppd.gz	foomatic-20200219-Samsung-M4370_5370-Postscript-Samsung.ppd.gz	foomatic-20200219-Lexmark-X264dn-Postscript-Lexmark.ppd.gz	xerox-20200129-xrxC9065.ppd.gz	xerox-20200129-xrxC9070.ppd.gz	foomatic-20200219-Oki-C9600-Postscript-Oki.ppd.gz	sharp-20191230-Sharp-MX-M2651-ps.ppd.gz	sharp-20191230-Sharp-MX-M3051-ps.ppd.gz	sharp-20191230-Sharp-MX-M3551-ps.ppd.gz	sharp-20191230-Sharp-MX-M4051-ps.ppd.gz	sharp-20191230-Sharp-MX-M5051-ps.ppd.gz	sharp-20191230-Sharp-MX-M6051-ps.ppd.gz	sharp-20191230-Sharp-MX-M3071-ps.ppd.gz	sharp-20191230-Sharp-MX-M3571-ps.ppd.gz	sharp-20191230-Sharp-MX-M4071-ps.ppd.gz	sharp-20191230-Sharp-MX-M5071-ps.ppd.gz	sharp-20191230-Sharp-MX-M6071-ps.ppd.gz	sharp-20191230-Sharp-MX-3060N-ps.ppd.gz	sharp-20191230-Sharp-MX-3060V-ps.ppd.gz	sharp-20191230-Sharp-MX-3070N-ps.ppd.gz	sharp-20191230-Sharp-MX-3070V-ps.ppd.gz	sharp-20191230-Sharp-MX-3560N-ps.ppd.gz	sharp-20191230-Sharp-MX-3560V-ps.ppd.gz	sharp-20191230-Sharp-MX-3570N-ps.ppd.gz	sharp-20191230-Sharp-MX-3570V-ps.ppd.gz	sharp-20191230-Sharp-MX-4060N-ps.ppd.gz	sharp-20191230-Sharp-MX-4060V-ps.ppd.gz	sharp-20191230-Sharp-MX-4070N-ps.ppd.gz	sharp-20191230-Sharp-MX-4070V-ps.ppd.gz	sharp-20191230-Sharp-MX-5050N-ps.ppd.gz	sharp-20191230-Sharp-MX-5050V-ps.ppd.gz	sharp-20191230-Sharp-MX-5070N-ps.ppd.gz	sharp-20191230-Sharp-MX-5070V-ps.ppd.gz	sharp-20191230-Sharp-MX-6050N-ps.ppd.gz	sharp-20191230-Sharp-MX-6050V-ps.ppd.gz	sharp-20191230-Sharp-MX-6070N-ps.ppd.gz	sharp-20191230-Sharp-MX-6070V-ps.ppd.gz	sharp-20191230-Sharp-MX-2630N-ps.ppd.gz	sharp-20191230-Sharp-MX-3050N-ps.ppd.gz	sharp-20191230-Sharp-MX-3050V-ps.ppd.gz	sharp-20191230-Sharp-MX-3550N-ps.ppd.gz	sharp-20191230-Sharp-MX-3550V-ps.ppd.gz	sharp-20191230-Sharp-MX-4050N-ps.ppd.gz	sharp-20191230-Sharp-MX-4050V-ps.ppd.gz	xerox-20190225-xrx7970.ppd.gz	foomatic-20200219-Samsung-CLP-670-Postscript-Samsung.ppd.gz	foomatic-20200219-Canon-iR-ADV_C351-Postscript-Canon.ppd.gz	foomatic-20200219-Samsung-C4820-Postscript-Samsung.ppd.gz	oki-20200329-OKB512_a.ppd.gz	oki-20200329-OKM562_a.ppd.gz	foomatic-20200219-Lexmark-E350d-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-E352dn-Postscript-Lexmark.ppd.gz	oki-20200329-OKB432_a.ppd.gz	oki-20200329-OKM492_a.ppd.gz	foomatic-20200219-Kyocera-KM-C850-Postscript-Kyocera.ppd.gz	oki-20200329-OKI-C712-PS.ppd.gz	sharp-20190711-Sharp-MX-6240N-ps.ppd.gz	sharp-20190711-Sharp-MX-7040N-ps.ppd.gz	oki-20201022-ES6450_PS.ppd.gz	oki-20201022-OKI_C650_PS.ppd.gz	xerox-20190225-xrx7830.ppd.gz	xerox-20190225-xrx7835.ppd.gz	xerox-20190225-xrx7845.ppd.gz	xerox-20190225-xrx7855.ppd.gz	foomatic-20200219-Kyocera-KM-C830-Postscript-Kyocera.ppd.gz	foomatic-20200219-Canon-iR-ADV_C250_350-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_C9060_9070-Postscript-Canon.ppd.gz	foomatic-20200219-Samsung-M5270-Postscript-Samsung.ppd.gz	oki-20200329-OKI-C612-PS.ppd.gz	sharp-20190711-Sharp-MX-6540FN-ps-jp.ppd.gz	foomatic-20200219-Oki-C7100-Postscript-Oki.ppd.gz	foomatic-20200219-Oki-C7300-Postscript-Oki.ppd.gz	foomatic-20200219-Oki-C7500-Postscript-Oki.ppd.gz	sharp-20191230-Sharp-MX-2650FN-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-2650FV-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-3150FN-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-3150FV-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-3650FN-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-3650FV-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-4150FN-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-4150FV-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-4170FN-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-4170FV-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-5150FN-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-5150FV-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-5170FN-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-5170FV-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-6150FN-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-6150FV-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-6170FN-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-6170FV-ps-jp.ppd.gz	foomatic-20200219-Kyocera-KM-5530-Postscript-Kyocera.ppd.gz	sharp-20191230-Sharp-MX-2630FN-ps-jp.ppd.gz	sharp-20191230-Sharp-MX-3630FN-ps-jp.ppd.gz	foomatic-20200219-Kyocera-KM-4530-Postscript-Kyocera.ppd.gz	sharp-20191230-Sharp-MX-M3531-ps-jp.ppd.gz	foomatic-20200219-Toshiba-e-Studio_3510c-Postscript-Toshiba.ppd.gz	foomatic-20200219-Samsung-M453x-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-CLX-6220-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-CLX-6250-Postscript-Samsung.ppd.gz	foomatic-20200219-Canon-iR-ADV_C9065_9075-Postscript-Canon.ppd.gz	sharp-20191230-Sharp-MX-M2630-ps.ppd.gz	sharp-20191230-Sharp-MX-M3050-ps.ppd.gz	sharp-20191230-Sharp-MX-M3550-ps.ppd.gz	sharp-20191230-Sharp-MX-M4050-ps.ppd.gz	sharp-20191230-Sharp-MX-M5050-ps.ppd.gz	sharp-20191230-Sharp-MX-M6050-ps.ppd.gz	sharp-20191230-Sharp-MX-M3070-ps.ppd.gz	sharp-20191230-Sharp-MX-M3570-ps.ppd.gz	sharp-20191230-Sharp-MX-M4070-ps.ppd.gz	sharp-20191230-Sharp-MX-M5070-ps.ppd.gz	sharp-20191230-Sharp-MX-M6070-ps.ppd.gz	foomatic-20200219-Samsung-M458x-Postscript-Samsung.ppd.gz	oki-20200129-oki-c542-ps.ppd.gz	oki-20200329-OKI-C532-PS.ppd.gz	oki-20200329-OKI-MC563-PS.ppd.gz	oki-20200329-OKI-MC573-PS.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_t1200_postscript-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_t2300_postscript-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_t1300_postscript-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_t1100ps_44in-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_t1120ps_44in-ps.ppd.gz	foomatic-20200219-Canon-iR-ADV_C7055_7065-Postscript-Canon.ppd.gz	foomatic-20200219-Oki-C5300-Postscript-Oki.ppd.gz	epson-20200615-Epson-LX-10010MF_Series_PS3.ppd.gz	foomatic-20200219-Samsung-CLX-8385-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-CLX-8385X-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-CLX-8540-Postscript-Samsung.ppd.gz	foomatic-20200219-Kyocera-FS-C8008N-Postscript-Kyocera.ppd.gz	foomatic-20200219-Samsung-ML-551x_651x-Postscript-Samsung.ppd.gz	sharp-20190711-Sharp-MX-M904-ps.ppd.gz	sharp-20191230-Sharp-MX-M6570-ps.ppd.gz	sharp-20191230-Sharp-MX-M7570-ps.ppd.gz	sharp-20191230-Sharp-MX-M905-ps.ppd.gz	foomatic-20200219-Sharp-AR-311FP_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-311N_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-311S_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-351FP_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-351N_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-351S_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-451FP_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-451N_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-451S_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M351N-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M351U-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M355N-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M355U-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M451N-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M451U-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M455N-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M455U-Postscript-Sharp.ppd.gz	oki-20200329-OKI-C332-PS.ppd.gz	oki-20200329-OKI-MC363-PS.ppd.gz	hplip-20201209-hplip-3.20.11-hp-color_laserjet_2840-ps.ppd.gz	hp-20171121-hplip-3.17.10-hp-color_laserjet-ps.ppd.gz	sharp-20190711-Sharp-MX-M1055-ps.ppd.gz	sharp-20190711-Sharp-MX-M1205-ps.ppd.gz	sharp-20190711-Sharp-MX-M1054-ps.ppd.gz	sharp-20190711-Sharp-MX-M1204-ps.ppd.gz	foomatic-20200219-Samsung-CLX-8380-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-M337x_387x_407x-Postscript-Samsung.ppd.gz	foomatic-20200219-Oki-C5400-Postscript-Oki.ppd.gz	foomatic-20200219-Oki-C5450-Postscript-Oki.ppd.gz	foomatic-20200219-Oki-C5700-Postscript-Oki.ppd.gz	foomatic-20200219-Oki-C5900-Postscript-Oki.ppd.gz	foomatic-20200219-Samsung-ML-451x_501x-Postscript-Samsung.ppd.gz	foomatic-20200219-Oki-C6100-Postscript-Oki.ppd.gz	foomatic-20200219-Samsung-SCX-483x_5x3x-Postscript-Samsung.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_t7100ps-ps.ppd.gz	xerox-20200226-xrxB9100.ppd.gz	xerox-20200226-xrxB9110.ppd.gz	xerox-20200226-xrxB9125.ppd.gz	xerox-20200226-xrxB9136.ppd.gz	foomatic-20200219-Kyocera-FS-C5030N-Postscript-Kyocera.ppd.gz	foomatic-20200219-Canon-iR-ADV_C2020i_2030i-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_C2025-Postscript-Canon.ppd.gz	foomatic-20200219-Kyocera-FS-C5020N-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-8000C-Postscript-Kyocera.ppd.gz	foomatic-20200219-Epson-LP-9600SPD-Postscript-Epson.ppd.gz	sharp-20210601-Sharp-BP-30C25-ps.ppd.gz	sharp-20191219-Sharp-MX-C303-ps.ppd.gz	sharp-20191219-Sharp-MX-C303W-ps.ppd.gz	sharp-20191219-Sharp-MX-C304-ps.ppd.gz	sharp-20191219-Sharp-MX-C304W-ps.ppd.gz	sharp-20191219-Sharp-MX-C305W-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-C306W-ps-jp.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_t790ps_44in-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_t795ps_44in-ps.ppd.gz	epson-20200615-Epson-LX-10000F_PS.ppd.gz	epson-20200615-Epson-LX-7000F_PS.ppd.gz	foomatic-20200219-Samsung-CLP-775-Postscript-Samsung.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_t770_postscript-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_t1100ps_24in-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_t1120ps_24in-ps.ppd.gz	foomatic-20200219-Epson-EPL-N2700-Postscript-Epson.ppd.gz	xerox-20191030-xrxC7030.ppd.gz	sharp-20191219-Sharp-MX-2614N-ps.ppd.gz	sharp-20191219-Sharp-MX-2615N-ps.ppd.gz	sharp-20191219-Sharp-MX-3114N-ps.ppd.gz	sharp-20191219-Sharp-MX-3115N-ps.ppd.gz	epson-20200615-Epson-LX-10000FK_Series_PS3.ppd.gz	foomatic-20200219-Canon-iR-ADV_C2020_2030-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_400_500-Postscript-Canon.ppd.gz	foomatic-20200219-Kyocera-FS-C5025N-Postscript-Kyocera.ppd.gz	sharp-20190711-Sharp-MX-M654N-ps.ppd.gz	sharp-20190711-Sharp-MX-M754N-ps.ppd.gz	foomatic-20200219-Samsung-SCX-6545X-Postscript-Samsung.ppd.gz	foomatic-20200219-Epson-AL-C9100-Postscript-Epson.ppd.gz	foomatic-20200219-Kyocera-FS-9100DN-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-9500DN-Postscript-Kyocera.ppd.gz	foomatic-20200219-Samsung-C140x-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-C145x-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-C1810-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-C1860-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-CLP-410-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-CLX-4190-Postscript-Samsung.ppd.gz	foomatic-20200219-Sharp-MX-M1100-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-MX-M850-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-MX-M950-Postscript-Sharp.ppd.gz	oki-20200329-OKB841_a110.ppd.gz	hp-20190111-hplip-3.18.12-hp-designjet_z6200_42in_photo-ps.ppd.gz	hp-20190111-hplip-3.18.12-hp-designjet_z6200_60in_photo-ps.ppd.gz	sharp-20191219-Sharp-MX-4140FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-4140N-ps.ppd.gz	sharp-20191219-Sharp-MX-4141FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-4141N-ps.ppd.gz	sharp-20191219-Sharp-MX-5140FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-5140N-ps.ppd.gz	sharp-20191219-Sharp-MX-5141FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-5141N-ps.ppd.gz	xerox-20190225-xrx5875.ppd.gz	sharp-20191219-Sharp-MX-4110FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-4110N-ps.ppd.gz	sharp-20191219-Sharp-MX-4111FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-4111N-ps.ppd.gz	sharp-20191219-Sharp-MX-4112N-ps.ppd.gz	sharp-20191219-Sharp-MX-5110FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-5110N-ps.ppd.gz	sharp-20191219-Sharp-MX-5111FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-5111N-ps.ppd.gz	sharp-20191219-Sharp-MX-5112N-ps.ppd.gz	xerox-20190225-xrx5330.ppd.gz	foomatic-20200219-Samsung-CLP-770-Postscript-Samsung.ppd.gz	sharp-20180409-Sharp-MX-2640NR-ps.ppd.gz	sharp-20180409-Sharp-MX-3140NR-ps.ppd.gz	sharp-20180409-Sharp-MX-3640NR-ps.ppd.gz	sharp-20191219-Sharp-MX-2640FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-2640N-ps.ppd.gz	sharp-20191219-Sharp-MX-3140FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-3140N-ps.ppd.gz	sharp-20191219-Sharp-MX-3640FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-3640N-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_4000ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_4020ps-ps.ppd.gz	sharp-20191219-Sharp-MX-2610FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-2610N-ps.ppd.gz	sharp-20191219-Sharp-MX-3110FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-3110N-ps.ppd.gz	sharp-20191219-Sharp-MX-3610FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-3610N-ps.ppd.gz	sharp-20190711-Sharp-MX-M654FN-ps-jp.ppd.gz	sharp-20190711-Sharp-MX-M754FN-ps-jp.ppd.gz	foomatic-20200219-Kyocera-FS-7028M-Postscript-Kyocera.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_z6100ps_42in_photo-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_z6100ps_60in_photo-ps.ppd.gz	foomatic-20200219-Oki-C9200-Postscript-Oki.ppd.gz	foomatic-20200219-Oki-C9400-Postscript-Oki.ppd.gz	foomatic-20200219-Samsung-SCX-6545-Postscript-Samsung.ppd.gz	foomatic-20200219-Kyocera-FS-7000-Postscript-Kyocera.ppd.gz	foomatic-20200219-Samsung-ML-8850_8950-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-SCX-6x55-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-SCX-6x55X-Postscript-Samsung.ppd.gz	xerox-20191030-xrxC8000.ppd.gz	xerox-20191030-xrxC9000.ppd.gz	sharp-20191219-Sharp-MX-3600FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-4100FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-4100N-ps.ppd.gz	sharp-20191219-Sharp-MX-4101FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-4101N-ps.ppd.gz	sharp-20191219-Sharp-MX-5000FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-5000N-ps.ppd.gz	sharp-20191219-Sharp-MX-5001FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-5001N-ps.ppd.gz	foomatic-20200219-Epson-LP-8800CPS-Postscript-Epson.ppd.gz	foomatic-20200219-Oce-VarioPrint_2105PS-Postscript-Oce.ppd.gz	foomatic-20200219-Kyocera-FS-9000-Postscript-Kyocera.ppd.gz	foomatic-20200219-Samsung-SCX-681x-Postscript-Samsung.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_4500mfp.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_4500ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_4520mfp-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_4520ps-ps.ppd.gz	foomatic-20200219-Ricoh-DDP_92-Postscript-Ricoh.ppd.gz	sharp-20191219-Sharp-MX-2600FG-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-2600FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-2600G-ps.ppd.gz	sharp-20191219-Sharp-MX-2600N-ps.ppd.gz	sharp-20191219-Sharp-MX-3100FG-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-3100FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-3100G-ps.ppd.gz	sharp-20191219-Sharp-MX-3100N-ps.ppd.gz	foomatic-20200219-Epson-LP-9500CPS-Postscript-Epson.ppd.gz	foomatic-20200219-Lexmark-X203n-Postscript-Lexmark.ppd.gz	foomatic-20200219-Lexmark-X204n-Postscript-Lexmark.ppd.gz	xerox-20191030-xrxB8045.ppd.gz	xerox-20191030-xrxB8055.ppd.gz	xerox-20191030-xrxB8065.ppd.gz	xerox-20191030-xrxB8075.ppd.gz	xerox-20191030-xrxB8090.ppd.gz	foomatic-20200219-Epson-LP-9800C-Postscript-Epson.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_t7100ps_monochrome-ps.ppd.gz	foomatic-20200219-Samsung-M408x-Postscript-Samsung.ppd.gz	foomatic-20200219-Kyocera-FS-C5015N-Postscript-Kyocera.ppd.gz	foomatic-20200219-Epson-AL-M8000-Postscript-Epson.ppd.gz	xerox-20201014-xrxC8000W.ppd.gz	foomatic-20200219-Ricoh-DDP_70-Postscript-Ricoh.ppd.gz	foomatic-20200219-Sharp-MX-2314NR-Postscript-Sharp.ppd.gz	sharp-20191219-Sharp-MX-2314N-ps.ppd.gz	foomatic-20200219-Canon-iR-ADV_8085_8095-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_8105-Postscript-Canon.ppd.gz	foomatic-20200219-Kyocera-CS-1650-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-1650-Postscript-Kyocera.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_t790ps_24in-ps.ppd.gz	foomatic-20200219-Sharp-MX-2614NR-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-MX-3114NR-Postscript-Sharp.ppd.gz	sharp-20191219-Sharp-MX-2514FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-2517FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-2616N-ps.ppd.gz	sharp-20191219-Sharp-MX-3114FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-3116N-ps.ppd.gz	sharp-20191219-Sharp-MX-3117FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-3614FN-ps-jp.ppd.gz	xerox-20191030-xrxB7025.ppd.gz	xerox-20191030-xrxB7030.ppd.gz	xerox-20191030-xrxB7035.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_t770ps_24in-ps.ppd.gz	foomatic-20200219-Epson-AL-C8600_PS3-Postscript-Epson.ppd.gz	foomatic-20200219-Kyocera-CS-2050-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-CS-2550-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-2050-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-2550-Postscript-Kyocera.ppd.gz	sharp-20191219-Sharp-MX-1810U-ps.ppd.gz	sharp-20191219-Sharp-MX-2010U-ps.ppd.gz	sharp-20191219-Sharp-MX-2310F-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-2310U-ps.ppd.gz	sharp-20191219-Sharp-MX-3111F-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-3111U-ps.ppd.gz	sharp-20191219-Sharp-MX-3611F-ps-jp.ppd.gz	sharp-20191219-Sharp-DX-2500N-ps.ppd.gz	foomatic-20200219-Kyocera-KM-6230-Postscript-Kyocera.ppd.gz	foomatic-20200219-Epson-LP-9200C-Postscript-Epson.ppd.gz	foomatic-20200219-Toshiba-e-Studio_451c-Postscript-Toshiba.ppd.gz	foomatic-20200219-Epson-AL-C9200-Postscript-Epson.ppd.gz	sharp-20191219-Sharp-BP-10C20-ps.ppd.gz	sharp-20191219-Sharp-BP-20C20-ps.ppd.gz	sharp-20191219-Sharp-BP-20C25-ps.ppd.gz	sharp-20191219-Sharp-DX-20C20-ps-jp.ppd.gz	foomatic-20200219-Samsung-SCX-4x28-Postscript-Samsung.ppd.gz	foomatic-20200219-Sharp-MX-M860-Postscript-Sharp.ppd.gz	sharp-20191219-Sharp-MX-2300FG-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-2300G-ps.ppd.gz	sharp-20191219-Sharp-MX-2300N-ps.ppd.gz	sharp-20191219-Sharp-MX-2700FG-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-2700G-ps.ppd.gz	sharp-20191219-Sharp-MX-2700N-ps.ppd.gz	sharp-20191219-Sharp-MX-3500FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-3500N-ps.ppd.gz	sharp-20191219-Sharp-MX-3501FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-3501N-ps.ppd.gz	sharp-20191219-Sharp-MX-4500FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-4500N-ps.ppd.gz	sharp-20191219-Sharp-MX-4501FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-4501N-ps.ppd.gz	foomatic-20200219-Canon-iR-ADV_6055_6065-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_6075-Postscript-Canon.ppd.gz	foomatic-20200219-Sharp-AR-266FP_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-266S_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M236_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M237_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M276_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M277_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Canon-iR-ADV_4025_4035-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-iR-ADV_4045_4051-Postscript-Canon.ppd.gz	sharp-20191219-Sharp-MX-5500N-ps.ppd.gz	sharp-20191219-Sharp-MX-6200N-ps.ppd.gz	sharp-20191219-Sharp-MX-6201N-ps.ppd.gz	sharp-20191219-Sharp-MX-7000N-ps.ppd.gz	sharp-20191219-Sharp-MX-7001N-ps.ppd.gz	foomatic-20200219-Epson-LP-9100PS3-Postscript-Epson.ppd.gz	foomatic-20200219-Kyocera-KM-4230_5230-Postscript-Kyocera.ppd.gz	foomatic-20200219-Samsung-M403x-Postscript-Samsung.ppd.gz	sharp-20191230-Sharp-MX-B356W-ps.ppd.gz	sharp-20191230-Sharp-MX-B376W-ps.ppd.gz	sharp-20191230-Sharp-MX-B456W-ps.ppd.gz	sharp-20191230-Sharp-MX-B476W-ps.ppd.gz	foomatic-20200219-Epson-LP-8300CPD-Postscript-Epson.ppd.gz	foomatic-20200219-Epson-LP-8500CPD-Postscript-Epson.ppd.gz	sharp-20191219-Sharp-MX-2301N-ps.ppd.gz	foomatic-20200219-Toshiba-e-Studio_282-Postscript-Toshiba.ppd.gz	sharp-20191219-Sharp-MX-M365FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-M365N-ps.ppd.gz	sharp-20191219-Sharp-MX-M465FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-M465N-ps.ppd.gz	sharp-20191219-Sharp-MX-M565FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-M565N-ps.ppd.gz	sharp-20191219-Sharp-MX-M364N-ps.ppd.gz	sharp-20191219-Sharp-MX-M464FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-M464N-ps.ppd.gz	sharp-20191219-Sharp-MX-M564FN-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-M564N-ps.ppd.gz	sharp-20191219-Sharp-DX-2000U-ps.ppd.gz	foomatic-20200219-Kyocera-FS-6020-Postscript-Kyocera.ppd.gz	foomatic-20200219-Samsung-SCX-5835_5935-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-SCX-5835_5935X-Postscript-Samsung.ppd.gz	xerox-20191030-xrxC7000.ppd.gz	xerox-20190225-xrxd95cp.ppd.gz	foomatic-20200219-Samsung-ML-371x-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-ML-375x-Postscript-Samsung.ppd.gz	foomatic-20200219-Sharp-AR-BC260-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-BC320-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C170FP_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C170M-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C172FP_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C172M-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C260-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C260F-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C260FP_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C260M-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C260S-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C261F-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C261FP_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C261M-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C261S-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C262FP_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C262M-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-C262S-Postscript-Sharp.ppd.gz	foomatic-20200219-Oki-C8800-Postscript-Oki.ppd.gz	foomatic-20200219-Samsung-SCX-5635-Postscript-Samsung.ppd.gz	foomatic-20200219-Imagistics-im8530-Postscript-Oce.ppd.gz	foomatic-20200219-Sharp-AR-C260P-Postscript-Sharp.ppd.gz	foomatic-20200219-Oki-C7200-Postscript-Oki.ppd.gz	foomatic-20200219-Oki-C7400-Postscript-Oki.ppd.gz	foomatic-20200219-HP-DesignJet_5000PS-Postscript-HP.ppd.gz	foomatic-20200219-Toshiba-e-Studio_452-Postscript-Toshiba.ppd.gz	foomatic-20200219-Epson-AL-C4000_PS3-Postscript-Epson.ppd.gz	foomatic-20200219-Canon-LBP712C_PPD-Postscript-Canon.ppd.gz	sharp-20191219-Sharp-MX-1800N-ps.ppd.gz	foomatic-20200219-Epson-AL-C4100-Postscript-Epson.ppd.gz	sharp-20180409-Sharp-MX-M283N-ps.ppd.gz	sharp-20180409-Sharp-MX-M363N-ps.ppd.gz	sharp-20180409-Sharp-MX-M453N-ps.ppd.gz	sharp-20180409-Sharp-MX-M503N-ps.ppd.gz	sharp-20191219-Sharp-MX-M265N-ps.ppd.gz	sharp-20191219-Sharp-MX-M265NV-ps.ppd.gz	sharp-20191219-Sharp-MX-M265U-ps.ppd.gz	sharp-20191219-Sharp-MX-M265UV-ps.ppd.gz	sharp-20191219-Sharp-MX-M266N-ps.ppd.gz	sharp-20191219-Sharp-MX-M266NV-ps.ppd.gz	sharp-20191219-Sharp-MX-M315N-ps.ppd.gz	sharp-20191219-Sharp-MX-M315NV-ps.ppd.gz	sharp-20191219-Sharp-MX-M315U-ps.ppd.gz	sharp-20191219-Sharp-MX-M315UV-ps.ppd.gz	sharp-20191219-Sharp-MX-M316N-ps.ppd.gz	sharp-20191219-Sharp-MX-M316NV-ps.ppd.gz	sharp-20191219-Sharp-MX-M356N-ps.ppd.gz	sharp-20191219-Sharp-MX-M356NV-ps.ppd.gz	sharp-20191219-Sharp-MX-M356U-ps.ppd.gz	sharp-20191219-Sharp-MX-M356UV-ps.ppd.gz	xerox-20190711-xrwc3335.ppd.gz	xerox-20190711-xrwc3345.ppd.gz	xerox-20190711-xrx3330.ppd.gz	sharp-20180409-Sharp-AR-M452U-ps.ppd.gz	sharp-20180409-Sharp-MX-M282N-ps.ppd.gz	sharp-20180409-Sharp-MX-M362N-ps.ppd.gz	sharp-20180409-Sharp-MX-M363U-ps.ppd.gz	sharp-20180409-Sharp-MX-M452N-ps.ppd.gz	sharp-20180409-Sharp-MX-M453U-ps.ppd.gz	sharp-20180409-Sharp-MX-M502N-ps.ppd.gz	sharp-20180409-Sharp-MX-M503U-ps.ppd.gz	sharp-20180409-Sharp-MX-M363F-ps-jp.ppd.gz	sharp-20180409-Sharp-MX-M423F-ps-jp.ppd.gz	sharp-20180409-Sharp-MX-M503F-ps-jp.ppd.gz	foomatic-20200219-Kyocera-FS-1800-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-1800plus-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-3800-Postscript-Kyocera.ppd.gz	foomatic-20200219-Sharp-MX-M260-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-MX-M260FP-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-MX-M260N-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-MX-M310-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-MX-M310FP-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-MX-M310N-Postscript-Sharp.ppd.gz	foomatic-20200219-Kyocera-FS-3820N-Postscript-Kyocera.ppd.gz	sharp-20191230-Sharp-MX-B355W-ps.ppd.gz	sharp-20191230-Sharp-MX-B455W-ps.ppd.gz	foomatic-20200219-Epson-EPL-N7000-Postscript-Epson.ppd.gz	sharp-20191219-Sharp-MX-M266FP-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-M266FV-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-M316FP-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-M316FV-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-M356FP-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-M356FV-ps-jp.ppd.gz	foomatic-20200219-Kyocera-FS-3900DN-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-4000DN-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-C5016N-Postscript-Kyocera.ppd.gz	sharp-20191219-Sharp-MX-M316G-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-M316GV-ps-jp.ppd.gz	foomatic-20200219-Samsung-CLP-660-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-CLX-6200-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-CLX-6240-Postscript-Samsung.ppd.gz	xerox-20191030-Xerox_Phaser_7800DN.ppd.gz	xerox-20191030-Xerox_Phaser_7800DX.ppd.gz	xerox-20191030-Xerox_Phaser_7800GX.ppd.gz	foomatic-20200219-Epson-EPL-N2500_PS3-Postscript-Epson.ppd.gz	foomatic-20200219-Toshiba-e-Studio_850-Postscript-Toshiba.ppd.gz	foomatic-20200219-Canon-LBP710C_PPD-Postscript-Canon.ppd.gz	foomatic-20200219-HP-DesignJet_800PS-Postscript-HP.ppd.gz	foomatic-20200219-Kyocera-FS-3830N-Postscript-Kyocera.ppd.gz	foomatic-20200219-HP-DesignJet_5500ps-Postscript-HP.ppd.gz	foomatic-20200219-Kyocera-FS-6026-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-6950DN-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-1920-Postscript-Kyocera.ppd.gz	foomatic-20200219-Epson-AL-C1900_PS3-Postscript-Epson.ppd.gz	xerox-20190225-xrx3655s.ppd.gz	sharp-20191230-Sharp-DX-C310-ps.ppd.gz	sharp-20191230-Sharp-DX-C311-ps.ppd.gz	sharp-20191230-Sharp-DX-C380-ps.ppd.gz	sharp-20191230-Sharp-DX-C381-ps.ppd.gz	sharp-20191230-Sharp-DX-C400-ps.ppd.gz	sharp-20191230-Sharp-DX-C401-ps.ppd.gz	sharp-20191230-Sharp-MX-C310-ps.ppd.gz	sharp-20191230-Sharp-MX-C311-ps.ppd.gz	sharp-20191230-Sharp-MX-C380-ps.ppd.gz	sharp-20191230-Sharp-MX-C380P-ps.ppd.gz	sharp-20191230-Sharp-MX-C381-ps.ppd.gz	sharp-20191230-Sharp-MX-C400-ps.ppd.gz	sharp-20191230-Sharp-MX-C400P-ps.ppd.gz	sharp-20191230-Sharp-MX-C401-ps.ppd.gz	foomatic-20200219-Samsung-ML-2855-Postscript-Samsung.ppd.gz	foomatic-20200219-Toshiba-GL-1020-Postscript-Toshiba.ppd.gz	foomatic-20200219-Toshiba-GL-1010-Postscript-Toshiba.ppd.gz	foomatic-20200219-Samsung-ML-3470-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-ML-3475-Postscript-Samsung.ppd.gz	sharp-20191219-Sharp-MX-M264NV-ps.ppd.gz	sharp-20191219-Sharp-MX-M314NV-ps.ppd.gz	sharp-20210601-Sharp-BP-30M28-ps.ppd.gz	sharp-20210601-Sharp-BP-30M31-ps.ppd.gz	sharp-20210601-Sharp-BP-30M35-ps.ppd.gz	foomatic-20200219-Kyocera-FS-1900-Postscript-Kyocera.ppd.gz	foomatic-20200219-Sharp-MX-M264NR-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-MX-M314NR-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-MX-M354NR-Postscript-Sharp.ppd.gz	sharp-20191219-Sharp-MX-M264FP-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-M264N-ps.ppd.gz	sharp-20191219-Sharp-MX-M264U-ps.ppd.gz	sharp-20191219-Sharp-MX-M314FP-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-M314N-ps.ppd.gz	sharp-20191219-Sharp-MX-M314U-ps.ppd.gz	sharp-20191219-Sharp-MX-M354FP-ps-jp.ppd.gz	sharp-20191219-Sharp-MX-M354N-ps.ppd.gz	sharp-20191219-Sharp-MX-M354U-ps.ppd.gz	foomatic-20200219-Epson-AL-MX20-Postscript-Epson.ppd.gz	foomatic-20200219-Epson-AL-MX21-Postscript-Epson.ppd.gz	foomatic-20200219-Canon-LBP7780C_5480-Postscript-Canon.ppd.gz	foomatic-20200219-HP-DesignJet_1055CM-Postscript-HP.ppd.gz	foomatic-20200219-HP-DesignJet_1050C-Postscript-HP.ppd.gz	foomatic-20200219-Epson-AL-C2000_PS3-Postscript-Epson.ppd.gz	foomatic-20200219-Samsung-CLP-350-Postscript-Samsung.ppd.gz	foomatic-20200219-Kyocera-FS-6900-Postscript-Kyocera.ppd.gz	foomatic-20200219-HP-DesignJet_3500CP-Postscript-HP.ppd.gz	foomatic-20200219-Gestetner-P7032-Postscript-Gestetner.ppd.gz	foomatic-20200219-Lanier-2132-Postscript-Lanier.ppd.gz	foomatic-20200219-NRG-P7032-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_AP3200-Postscript-Ricoh.ppd.gz	foomatic-20200219-Savin-SLP32-Postscript-Savin.ppd.gz	foomatic-20200219-Oki-B4300-Postscript-Oki.ppd.gz	sharp-20180409-Sharp-MX-M623-ps-jp.ppd.gz	sharp-20180409-Sharp-MX-M623N-ps.ppd.gz	sharp-20180409-Sharp-MX-M623U-ps.ppd.gz	sharp-20180409-Sharp-MX-M753-ps-jp.ppd.gz	sharp-20180409-Sharp-MX-M753N-ps.ppd.gz	sharp-20180409-Sharp-MX-M753U-ps.ppd.gz	foomatic-20200219-Kyocera-FS-2000D-Postscript-Kyocera.ppd.gz	foomatic-20200219-Oki-B4350-Postscript-Oki.ppd.gz	foomatic-20200219-Kyocera-FS-3750-Postscript-Kyocera.ppd.gz	foomatic-20200219-HP-DesignJet_2500CP-Postscript-HP.ppd.gz	sharp-20190711-Sharp-MX-C250-ps.ppd.gz	sharp-20190711-Sharp-MX-C250E-ps.ppd.gz	sharp-20190711-Sharp-MX-C250F-ps.ppd.gz	sharp-20190711-Sharp-MX-C250FE-ps.ppd.gz	sharp-20190711-Sharp-MX-C250FR-ps.ppd.gz	sharp-20190711-Sharp-MX-C300-ps.ppd.gz	sharp-20190711-Sharp-MX-C300E-ps.ppd.gz	sharp-20190711-Sharp-MX-C300F-ps.ppd.gz	sharp-20190711-Sharp-MX-C300P-ps.ppd.gz	sharp-20190711-Sharp-MX-C300PE-ps.ppd.gz	sharp-20190711-Sharp-MX-C300PL-ps.ppd.gz	sharp-20190711-Sharp-MX-C300W-ps.ppd.gz	sharp-20190711-Sharp-MX-C300WE-ps.ppd.gz	sharp-20190711-Sharp-MX-C300WR-ps.ppd.gz	sharp-20190711-Sharp-MX-C301-ps.ppd.gz	sharp-20190711-Sharp-MX-C301F-ps.ppd.gz	sharp-20190711-Sharp-MX-C301W-ps.ppd.gz	foomatic-20200219-Kyocera-FS-6750-Postscript-Kyocera.ppd.gz	foomatic-20200219-Samsung-ML-2850-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-ML-2853-Postscript-Samsung.ppd.gz	foomatic-20200219-Brother-HL-3260N-Postscript-Brother.ppd.gz	foomatic-20200219-Sharp-AR-555M_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-555S_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-625M_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-625S_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M550N-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M550U-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M620N-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M620U-Postscript-Sharp.ppd.gz	foomatic-20200219-Kyocera-FS-6700-Postscript-Kyocera.ppd.gz	xerox-20210715-xrxB310.ppd.gz	foomatic-20200219-Sharp-AR-705M_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-705S_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M700N-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M700U-Postscript-Sharp.ppd.gz	foomatic-20200219-Canon-LBP8780-Postscript-Canon.ppd.gz	foomatic-20200219-Epson-AL-M2400-Postscript-Epson.ppd.gz	foomatic-20200219-Epson-AL-M2410-Postscript-Epson.ppd.gz	foomatic-20200219-Epson-EPL-5900_PS3-Postscript-Epson.ppd.gz	foomatic-20200219-Epson-EPL-6100_PS3-Postscript-Epson.ppd.gz	foomatic-20200219-Kyocera-FS-1714M-Postscript-Kyocera.ppd.gz	foomatic-20200219-Brother-HL-3450CN-Postscript-Brother.ppd.gz	xerox-20210715-xrxC230.ppd.gz	xerox-20210715-xrxC235.ppd.gz	xerox-20191030-Xerox_VersaLink_C600.ppd.gz	foomatic-20200219-Kyocera-KM-2030-Postscript-Kyocera.ppd.gz	xerox-20191030-Xerox_VersaLink_C500.ppd.gz	foomatic-20200219-Canon-LBP7680C_5280-Postscript-Canon.ppd.gz	xerox-20191030-Xerox_VersaLink_C605.ppd.gz	xerox-20190225-xrx4622.ppd.gz	foomatic-20200219-Epson-AL-M2000-Postscript-Epson.ppd.gz	foomatic-20200219-Epson-AL-M2010-Postscript-Epson.ppd.gz	foomatic-20200219-Brother-HL-2600CN-Postscript-Brother.ppd.gz	foomatic-20200219-Kyocera-FS-1200-Postscript-Kyocera.ppd.gz	foomatic-20200219-Oce-9260-Postscript2-Oce.ppd.gz	xerox-20191030-Xerox_VersaLink_C505.ppd.gz	xerox-20190711-xrx6510.ppd.gz	xerox-20190711-xrx6515.ppd.gz	xerox-20191030-xrxC400.ppd.gz	xerox-20191030-xrxC405.ppd.gz	foomatic-20200219-Epson-AL-C2600-Postscript-Epson.ppd.gz	foomatic-20200219-Epson-EPL-N2550-Postscript-Epson.ppd.gz	foomatic-20200219-Kyocera-FS-1700-Postscript-Kyocera.ppd.gz	foomatic-20200219-Samsung-C48x-Postscript-Samsung.ppd.gz	foomatic-20200219-Epson-AL-2600-Postscript-Epson.ppd.gz	xerox-20191030-xrxB405.ppd.ppd.gz	xerox-20191030-xrxb600.ppd.gz	xerox-20191030-xrxb605.ppd.gz	xerox-20191030-xrxb610.ppd.gz	xerox-20191030-xrxb615.ppd.gz	foomatic-20200219-Samsung-C460-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-C470-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-CLX-3300-Postscript-Samsung.ppd.gz	xerox-20191030-xrxB400.ppd.ppd.gz	sharp-20210601-Sharp-BP-20M22-ps.ppd.gz	sharp-20210601-Sharp-BP-20M24-ps.ppd.gz	foomatic-20200219-Kyocera-FS-3700-Postscript-Kyocera.ppd.gz	sharp-20210601-Sharp-BP-20M28-ps.ppd.gz	sharp-20210601-Sharp-BP-20M31-ps.ppd.gz	sharp-20210601-Sharp-BP-21M22-ps.ppd.gz	foomatic-20200219-Kyocera-FS-1700plus-Postscript-Kyocera.ppd.gz	foomatic-20200219-Toshiba-e-Studio_205-Postscript-Toshiba.ppd.gz	foomatic-20200219-Epson-AL-C2800-Postscript-Epson.ppd.gz	sharp-20191230-Sharp-AR-B350W-ps-jp.ppd.gz	sharp-20191230-Sharp-AR-B351-ps.ppd.gz	sharp-20191230-Sharp-AR-B351F-ps.ppd.gz	sharp-20191230-Sharp-AR-B351W-ps.ppd.gz	sharp-20191230-Sharp-AR-B352P-ps.ppd.gz	sharp-20191230-Sharp-AR-B353P-ps.ppd.gz	sharp-20191230-Sharp-AR-B451-ps.ppd.gz	sharp-20191230-Sharp-AR-B451F-ps.ppd.gz	sharp-20191230-Sharp-AR-B451W-ps.ppd.gz	sharp-20191230-Sharp-AR-B452P-ps.ppd.gz	sharp-20191230-Sharp-AR-B453P-ps.ppd.gz	sharp-20191230-Sharp-MX-B350-ps.ppd.gz	sharp-20191230-Sharp-MX-B350F-ps.ppd.gz	sharp-20191230-Sharp-MX-B350P-ps.ppd.gz	sharp-20191230-Sharp-MX-B350W-ps.ppd.gz	sharp-20191230-Sharp-MX-B351P-ps.ppd.gz	sharp-20191230-Sharp-MX-B450-ps.ppd.gz	sharp-20191230-Sharp-MX-B450F-ps.ppd.gz	sharp-20191230-Sharp-MX-B450P-ps.ppd.gz	sharp-20191230-Sharp-MX-B450W-ps.ppd.gz	sharp-20191230-Sharp-MX-B451P-ps.ppd.gz	foomatic-20200219-Canon-LBP6780_3580-Postscript-Canon.ppd.gz	foomatic-20200219-Epson-AL-C4200-Postscript-Epson.ppd.gz	foomatic-20200219-Epson-AL-C3800-Postscript-Epson.ppd.gz	foomatic-20200219-Canon-LBP7660C-Postscript-Canon.ppd.gz	sharp-20191219-Sharp-AR-6020D-ps.ppd.gz	sharp-20191219-Sharp-AR-6020N-ps.ppd.gz	sharp-20191219-Sharp-AR-6020NR-ps.ppd.gz	sharp-20191219-Sharp-AR-6023D-ps.ppd.gz	sharp-20191219-Sharp-AR-6023N-ps.ppd.gz	sharp-20191219-Sharp-AR-6023NR-ps.ppd.gz	foomatic-20200219-Kyocera-FS-6300-Postscript-Kyocera.ppd.gz	foomatic-20200219-Oce-VarioPrint_2045PS-Postscript-Oce.ppd.gz	foomatic-20200219-Oce-VarioPrint_2050PS-Postscript-Oce.ppd.gz	foomatic-20200219-Oce-VarioPrint_2055PS-Postscript-Oce.ppd.gz	foomatic-20200219-Oce-VarioPrint_2060PS-Postscript-Oce.ppd.gz	foomatic-20200219-Oce-VarioPrint_2065PS-Postscript-Oce.ppd.gz	foomatic-20200219-Oce-VarioPrint_2070PS-Postscript-Oce.ppd.gz	foomatic-20200219-Kyocera-FS-3700plus-Postscript-Kyocera.ppd.gz	sharp-20191219-Sharp-AR-6026N-ps.ppd.gz	sharp-20191219-Sharp-AR-6026NR-ps.ppd.gz	sharp-20191219-Sharp-AR-6031N-ps.ppd.gz	sharp-20191219-Sharp-AR-6031NR-ps.ppd.gz	sharp-20191219-Sharp-AR-6120N-ps.ppd.gz	sharp-20191219-Sharp-AR-6131N-ps.ppd.gz	xerox-20210715-xrxB225.ppd.gz	xerox-20210715-xrxB230.ppd.gz	xerox-20210715-xrxB235.ppd.gz	foomatic-20200219-Epson-EPL-6200-Postscript-Epson.ppd.gz	sharp-20191230-Sharp-MX-B380P-ps.ppd.gz	sharp-20191230-Sharp-MX-B381-ps.ppd.gz	sharp-20191230-Sharp-MX-B382-ps.ppd.gz	sharp-20191230-Sharp-MX-B382P-ps.ppd.gz	sharp-20191230-Sharp-MX-B382SC-ps.ppd.gz	sharp-20191230-Sharp-MX-B400P-ps.ppd.gz	sharp-20191230-Sharp-MX-B401-ps.ppd.gz	sharp-20191230-Sharp-MX-B402-ps.ppd.gz	sharp-20191230-Sharp-MX-B402SC-ps.ppd.gz	foomatic-20200219-Kyocera-Ci-1100-Postscript-Kyocera.ppd.gz	foomatic-20200219-Oce-9230-Postscript2-Oce.ppd.gz	foomatic-20200219-Oce-9245-Postscript2-Oce.ppd.gz	foomatic-20200219-Kyocera-FS-5800C-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-5900C-Postscript-Kyocera.ppd.gz	foomatic-20200219-Sharp-AR-N182FG-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-N182G-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-MX-M182D-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-MX-M202D-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-MX-M232D-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M165_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-205FG_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-205G_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M206_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M207_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Oce-VarioPrint_2100PS-Postscript-Oce.ppd.gz	foomatic-20200219-Oce-VarioPrint_2110PS-Postscript-Oce.ppd.gz	foomatic-20200219-Brother-HL-2460-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-HL-7050-Postscript-Brother.ppd.gz	foomatic-20200219-Sharp-AR-M205_PS-Postscript-Sharp.ppd.gz	sharp-20191219-Sharp-AR-6020-ps.ppd.gz	sharp-20191219-Sharp-AR-6023-ps.ppd.gz	sharp-20191219-Sharp-AR-G200-ps-jp.ppd.gz	foomatic-20200219-Gestetner-F9199_9199nf-Postscript-Gestetner.ppd.gz	foomatic-20200219-Lanier-LF510_515e-Postscript-Lanier.ppd.gz	foomatic-20200219-Oce-3145PS-Postscript2-Oce.ppd.gz	foomatic-20200219-Oce-3155PS-Postscript2-Oce.ppd.gz	foomatic-20200219-Oce-3165PS-Postscript2-Oce.ppd.gz	foomatic-20200219-Ricoh-FAX5510L_5510NF-Postscript-Ricoh.ppd.gz	foomatic-20200219-Savin-FAX3799_3799nf-Postscript-Savin.ppd.gz	foomatic-20200219-Kyocera-FS-6500plus-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-1820-Postscript-Kyocera.ppd.gz	foomatic-20200219-Samsung-SCX-6x45-Postscript-Samsung.ppd.gz	xerox-20190225-xr6605dn.ppd.gz	foomatic-20200219-Kyocera-FS-1030D-Postscript-Kyocera.ppd.gz	foomatic-20200219-Epson-EPL-N3000-Postscript-Epson.ppd.gz	foomatic-20200219-Sharp-AR-163G_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M161_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M162_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-MX-M182-Postscript-Sharp.ppd.gz	foomatic-20200219-Kyocera-FS-1020D-Postscript-Kyocera.ppd.gz	foomatic-20200219-Sharp-AR-163FG_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Kyocera-KM-1530-Postscript-Kyocera.ppd.gz	foomatic-20200219-Epson-AL-CX21-Postscript-Epson.ppd.gz	foomatic-20200219-Sharp-AR-200M_PS-Postscript-Sharp.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c226.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c227.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c258.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c266.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c287.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c308.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c3351.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c368.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c3851.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c3851fs.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c458.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c558.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c658.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c659.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c759.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c250i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c300i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c3300i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c3320i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c3350i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c360i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c4000i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c4050i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c450i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c550i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c650i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-workplace-hub.ppd.gz	foomatic-20200219-Brother-HL-4050CDN-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-HL-4070CDW-Postscript-Brother.ppd.gz	foomatic-20200219-Ricoh-EMP_156-Postscript-Ricoh.ppd.gz	foomatic-20200219-Epson-AL-M4000-Postscript-Epson.ppd.gz	foomatic-20200219-Brother-DCP-9045CDN-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-MFC-9450CDN-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-MFC9840CDW-Postscript-Brother.ppd.gz	foomatic-20200219-Kyocera-FS-1118MFP-Postscript-Kyocera.ppd.gz	foomatic-20200219-Oki-14i-Postscript-Oki.ppd.gz	foomatic-20200219-Brother-MFC-9440CN-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-HL-8050N-Postscript-Brother.ppd.gz	foomatic-20200219-Sharp-AR-160M_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-5220-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M160_PS-Postscript-Sharp.ppd.gz	foomatic-20191029-BR5070DN_GPL.ppd.gz	foomatic-20200219-Brother-HL-2700CN-Postscript-Brother.ppd.gz	foomatic-20200219-Sharp-AR-155FG_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-168D-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M155_PS-Postscript-Sharp.ppd.gz	foomatic-20200219-Brother-DCP-9040CN-Postscript-Brother.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-226i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-246i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-266i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-306i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-227.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-287.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-308.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-308e.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-367.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-368.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-368e.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-4052.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-458.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-458e.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-4752.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-558.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-558e.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-658e.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-758-jp-eu.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-808-us.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-958.ppd.gz	foomatic-20191029-shar208d.ppd.gz	foomatic-20191029-shmb201d.ppd.gz	foomatic-20200219-Sharp-AR-B07-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-NB2A-Postscript-Sharp.ppd.gz	foomatic-20200219-Canon-LBP6670-Postscript-Canon.ppd.gz	foomatic-20200219-Canon-LBP6680_3480-Postscript-Canon.ppd.gz	foomatic-20200219-Oce-8445PS-Postscript2-Oce.ppd.gz	foomatic-20200219-Oce-8465PS-Postscript2-Oce.ppd.gz	foomatic-20200219-Brother-HL-3070CW-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-HL-6050D_DN-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-DCP-9010CN-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-MFC-9010CN-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-MFC-9120CN-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-MFC-9320CW-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-HL-6050-Postscript-Brother.ppd.gz	foomatic-20200219-Kyocera-KM-1815-Postscript-Kyocera.ppd.gz	foomatic-20200219-Brother-HL-5050-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-HL-5070N-Postscript-Brother.ppd.gz	foomatic-20200219-Kyocera-CS-1815-Postscript-Kyocera.ppd.gz	foomatic-20200219-Brother-HL-5270DN-Postscript-Brother.ppd.gz	xerox-20190820-xrxosd.ppd.gz	foomatic-20200219-Brother-MFC-8670DN-Postscript-Brother.ppd.gz	foomatic-20200219-Kyocera-FS-920-Postscript-Kyocera.ppd.gz	foomatic-20200219-Samsung-ML-4050-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-ML-4055-Postscript-Samsung.ppd.gz	foomatic-20200219-Brother-HL-1850_70N-Postscript-Brother.ppd.gz	hp-20190918-hplip-3.19.6-hp-color_designjet_xl_3600-ps.ppd.gz	foomatic-20200219-Sharp-AR-168S-Postscript-Sharp.ppd.gz	foomatic-20200219-Sharp-AR-M150_PS-Postscript-Sharp.ppd.gz	foomatic-20191029-shar208s.ppd.gz	foomatic-20191029-sharm200.ppd.gz	foomatic-20200219-Sharp-MX-NB11-Postscript-Sharp.ppd.gz	foomatic-20200219-Oce-VarioPrint_2090PS-Postscript-Oce.ppd.gz	hplip-20200303-hplip-3.19.12-hp-color_designjet_xl_3600-ps.ppd.gz	foomatic-20200219-Kyocera-FS-1018MFP-Postscript-Kyocera.ppd.gz	foomatic-20200219-Brother-MFC-8820D-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-DCP-8025D-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-MFC-9420CN-Postscript-Brother.ppd.gz	foomatic-20200219-Oce-PPC3074PS-Postscript-Oce.ppd.gz	foomatic-20200219-Oce-PPC3094PS-Postscript-Oce.ppd.gz	foomatic-20200219-Oce-PPC3114PS-Postscript-Oce.ppd.gz	foomatic-20200219-Oce-PPC3073PS-Postscript-Oce.ppd.gz	foomatic-20200219-Oce-PPC3093PS-Postscript-Oce.ppd.gz	foomatic-20200219-Oce-PPC3113PS-Postscript-Oce.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t1530-postscript.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t2530-postscript.ppd.gz	hp-20190918-hplip-3.19.6-hp-designjet_t2600dr-ps.ppd.gz	foomatic-20200219-Samsung-ML-4550-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-ML-4555-Postscript-Samsung.ppd.gz	foomatic-20200219-Kyocera-FS-1010-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-1050-Postscript-Kyocera.ppd.gz	foomatic-20200219-Brother-MFC-7450-Postscript-Brother.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t1600dr-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t2600dr-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t1600_printer-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t2600-ps.ppd.gz	foomatic-20200219-Brother-HL-5250DN-Postscript-Brother.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_4100ps-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_4100ps_mfp-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_4600ps-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_4600ps_mfp-ps.ppd.gz	foomatic-20200219-Brother-HL-5150D-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-HL-5170DN-Postscript-Brother.ppd.gz	foomatic-20200219-Samsung-SCX-6x22-Postscript-Samsung.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_3900ps_mfp-ps.ppd.gz	foomatic-20200219-Brother-HL-5240-Postscript-Brother.ppd.gz	foomatic-20200219-Kyocera-FS-1000-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-1510-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-KM-1810-Postscript-Kyocera.ppd.gz	foomatic-20200219-Samsung-ML-2150-Postscript-Samsung.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_2554ci_J.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_3554ci_J.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_4054ci_J.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_5054ci_J.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_6054ci_J.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_7054ci_J.ppd.gz	foomatic-20200219-Samsung-ML-2550-Postscript-Samsung.ppd.gz	foomatic-20200219-Samsung-ML-8x00-Postscript-Samsung.ppd.gz	foomatic-20200219-Brother-HL-1650_70N-Postscript-Brother.ppd.gz	foomatic-20200219-Samsung-SCX-6x20-Postscript-Samsung.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_5004i_J.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_6004i_J.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_7004i_J.ppd.gz	foomatic-20200219-Kyocera-FS-600-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-680-Postscript-Kyocera.ppd.gz	foomatic-20200219-Kyocera-FS-800-Postscript-Kyocera.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_2553ciJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_3253ciJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_4053ciJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_5053ciJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_6053ciJ.ppd.gz	foomatic-20200219-Kyocera-FS-1000plus-Postscript-Kyocera.ppd.gz	foomatic-20200219-Brother-DCP-8020-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-MFC-8420-Postscript-Brother.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_7353ciJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_8353ciJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_5003iJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_6003iJ.ppd.gz	kyocera-20200416-Kyocera_CS_2554ci.ppd.gz	kyocera-20200416-Kyocera_CS_3554ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_2554ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_3554ci.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_P8060cdnJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_2552ciJ.ppd.gz	foomatic-20200219-Oce-PPC5115PS-Postscript-Oce.ppd.gz	foomatic-20200219-Oce-PPC5160PS-Postscript-Oce.ppd.gz	kyocera-20210630-Kyocera_CS_4054ci.ppd.gz	kyocera-20210630-Kyocera_CS_5054ci.ppd.gz	kyocera-20210630-Kyocera_CS_6054ci.ppd.gz	kyocera-20210630-Kyocera_CS_7054ci.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_4054ci.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_5054ci.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_6054ci.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_7054ci.ppd.gz	kyocera-20210630-TA_2508ci.ppd.gz	kyocera-20210630-TA_3508ci.ppd.gz	kyocera-20210630-TA_4008ci.ppd.gz	kyocera-20210630-TA_5008ci.ppd.gz	kyocera-20210630-TA_6008ci.ppd.gz	kyocera-20210630-TA_7008ci.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_7003iJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_9003iJ.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_P4060dnJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_2460ciJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_2470ciJ.ppd.gz	kyocera-20210630-Kyocera_CS_4004i.ppd.gz	kyocera-20210630-Kyocera_CS_5004i.ppd.gz	kyocera-20210630-Kyocera_CS_6004i.ppd.gz	kyocera-20210630-Kyocera_CS_7004i.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_4004i.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_5004i.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_6004i.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_7004i.ppd.gz	kyocera-20210630-TA_5058i.ppd.gz	kyocera-20210630-TA_6058i.ppd.gz	kyocera-20210630-TA_7058i.ppd.gz	kyocera-20190328-Kyocera_CS_2552ci.ppd.gz	kyocera-20190328-Kyocera_CS_3252ci.ppd.gz	kyocera-20190328-Kyocera_CS_3552ci.ppd.gz	kyocera-20190328-Kyocera_CS_4052ci.ppd.gz	kyocera-20190328-Kyocera_CS_5052ci.ppd.gz	kyocera-20190328-Kyocera_CS_6052ci.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P8060cdn.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_2552ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_3252ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_3552ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_4052ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_5052ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_6052ci.ppd.gz	kyocera-20200416-Kyocera_CS_2553ci.ppd.gz	kyocera-20200416-Kyocera_CS_3253ci.ppd.gz	kyocera-20200416-Kyocera_CS_3553ci.ppd.gz	kyocera-20200416-Kyocera_CS_4053ci.ppd.gz	kyocera-20200416-Kyocera_CS_5053ci.ppd.gz	kyocera-20200416-Kyocera_CS_6053ci.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_P4040dnJ.ppd.gz	foomatic-20200219-Brother-HL-1450-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-HL-1470N-Postscript-Brother.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_2553ci.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_3253ci.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_3553ci.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_4053ci.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_5053ci.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_6053ci.ppd.gz	kyocera-20210301-TA_2507ci.ppd.gz	kyocera-20210301-TA_3207ci.ppd.gz	kyocera-20210301-TA_4007ci.ppd.gz	kyocera-20210301-TA_5007ci.ppd.gz	kyocera-20210301-TA_6007ci.ppd.gz	kyocera-20210309-TA_2507ci.ppd.gz	kyocera-20210309-TA_3207ci.ppd.gz	kyocera-20210309-TA_4007ci.ppd.gz	kyocera-20210309-TA_5007ci.ppd.gz	kyocera-20210309-TA_6007ci.ppd.gz	foomatic-20200219-Brother-DCP-8045D-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-MFC-8640D-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-MFC-8840D-Postscript-Brother.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_P4140dnJ.ppd.gz	kyocera-20200416-Kyocera_CS_3050ci.ppd.gz	kyocera-20200416-Kyocera_CS_3051ci.ppd.gz	kyocera-20200416-Kyocera_CS_3550ci.ppd.gz	kyocera-20200416-Kyocera_CS_3551ci.ppd.gz	kyocera-20200416-Kyocera_CS_4550ci.ppd.gz	kyocera-20200416-Kyocera_CS_4551ci.ppd.gz	kyocera-20200416-Kyocera_CS_5550ci.ppd.gz	kyocera-20200416-Kyocera_CS_5551ci.ppd.gz	kyocera-20200416-Kyocera_FS-C8600DN.ppd.gz	kyocera-20200416-Kyocera_FS-C8650DN.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_3050ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_3550ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_3551ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_4550ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_4551ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_5550ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_5551ci.ppd.gz	kyocera-20190328-Kyocera_CS_7052ci.ppd.gz	kyocera-20190328-Kyocera_CS_8052ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_7052ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_8052ci.ppd.gz	kyocera-20200416-Kyocera_CS_6550ci.ppd.gz	kyocera-20200416-Kyocera_CS_6551ci.ppd.gz	kyocera-20200416-Kyocera_CS_7353ci.ppd.gz	kyocera-20200416-Kyocera_CS_7550ci.ppd.gz	kyocera-20200416-Kyocera_CS_7551ci.ppd.gz	kyocera-20200416-Kyocera_CS_8353ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_6550ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_6551ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_7353ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_7550ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_7551ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_8353ci.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_2510iJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_2520iJ.ppd.gz	kyocera-20210301-TA_7307ci.ppd.gz	kyocera-20210301-TA_8307ci.ppd.gz	kyocera-20210309-TA_7307ci.ppd.gz	kyocera-20210309-TA_8307ci.ppd.gz	kyocera-20190328-Kyocera_CS_2551ci.ppd.gz	kyocera-20190328-Kyocera_CS_4002i.ppd.gz	kyocera-20190328-Kyocera_CS_5002i.ppd.gz	kyocera-20190328-Kyocera_CS_6002i.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_2551ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_4002i.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_5002i.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_6002i.ppd.gz	kyocera-20200416-Kyocera_CS_250ci.ppd.gz	kyocera-20200416-Kyocera_CS_300ci.ppd.gz	kyocera-20200416-Kyocera_CS_4003i.ppd.gz	kyocera-20200416-Kyocera_CS_400ci.ppd.gz	kyocera-20200416-Kyocera_CS_5003i.ppd.gz	kyocera-20200416-Kyocera_CS_500ci.ppd.gz	kyocera-20200416-Kyocera_CS_552ci.ppd.gz	kyocera-20200416-Kyocera_CS_6003i.ppd.gz	kyocera-20200416-Kyocera_ECOSYS_P4060dn.ppd.gz	kyocera-20200416-Kyocera_FS-C8500DN.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_250ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_300ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_400ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_500ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_552ci.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_3212iJ.ppd.gz	foomatic-20200219-Brother-DCP-8040-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-MFC-8220-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-MFC-8440-Postscript-Brother.ppd.gz	kyocera-20200416-Kyocera_CS_2550ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_2550ci.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_4003i.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_4012iJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_5003i.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_6003i.ppd.gz	kyocera-20210301-TA_5057i.ppd.gz	kyocera-20210301-TA_6057i.ppd.gz	kyocera-20210301-TA_P-6040DN.ppd.gz	kyocera-20210309-TA_5057i.ppd.gz	kyocera-20210309-TA_6057i.ppd.gz	kyocera-20210309-TA_P-6040DN.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M8024cidn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M8124cidn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M8130cidn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P4035dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P4040dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P4045dn.ppd.gz	kyocera-20200416-Kyocera_CS_205c.ppd.gz	kyocera-20200416-Kyocera_CS_255c.ppd.gz	kyocera-20200416-Kyocera_FS-C8020MFP.ppd.gz	kyocera-20200416-Kyocera_FS-C8025MFP.ppd.gz	kyocera-20200416-Kyocera_FS-C8520MFP.ppd.gz	kyocera-20200416-Kyocera_FS-C8525MFP.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_205c.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_255c.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_3060ci.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_M8224cidn.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_M8228cidn.ppd.gz	kyocera-20210301-TA_P-C2480i_MFP.ppd.gz	kyocera-20210309-TA_P-C2480i_MFP.ppd.gz	kyocera-20200416-Kyocera_CS_3500i.ppd.gz	kyocera-20200416-Kyocera_CS_3501i.ppd.gz	kyocera-20200416-Kyocera_CS_4500i.ppd.gz	kyocera-20200416-Kyocera_CS_4501i.ppd.gz	kyocera-20200416-Kyocera_CS_5500i.ppd.gz	kyocera-20200416-Kyocera_CS_5501i.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_3500i.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_3501i.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_4501i.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_5500i.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_5501i.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_358ciJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_408ciJ.ppd.gz	foomatic-20200219-Samsung-ML-2570-Postscript-Samsung.ppd.gz	kyocera-20190328-Kyocera_CS_7002i.ppd.gz	kyocera-20190328-Kyocera_CS_8002i.ppd.gz	kyocera-20190328-Kyocera_CS_9002i.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_7002i.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_8002i.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_9002i.ppd.gz	kyocera-20200211-Kyocera_TASKalfa_7003i.ppd.gz	kyocera-20200416-Kyocera_CS_6500i.ppd.gz	kyocera-20200416-Kyocera_CS_6501i.ppd.gz	kyocera-20200416-Kyocera_CS_7003i.ppd.gz	kyocera-20200416-Kyocera_CS_8000i.ppd.gz	kyocera-20200416-Kyocera_CS_8001i.ppd.gz	kyocera-20200416-Kyocera_CS_8003i.ppd.gz	kyocera-20200416-Kyocera_CS_9003i.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_6500i.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_6501i.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_8000i.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_8001i.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_8003i.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_9003i.ppd.gz	kyocera-20190328-Kyocera_CS_3011i.ppd.gz	kyocera-20190328-Kyocera_CS_3212i.ppd.gz	kyocera-20190328-Kyocera_CS_3511i.ppd.gz	kyocera-20190328-Kyocera_CS_4012i.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_3011i.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_3212i.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_3511i.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_4012i.ppd.gz	kyocera-20200416-Kyocera_ECOSYS_P4135dn.ppd.gz	kyocera-20200416-Kyocera_ECOSYS_P4140dn.ppd.gz	kyocera-20200416-Kyocera_ECOSYS_P4145dn.ppd.gz	kyocera-20210301-TA_7057i.ppd.gz	kyocera-20210301-TA_8057i.ppd.gz	kyocera-20210309-TA_7057i.ppd.gz	kyocera-20210309-TA_8057i.ppd.gz	kyocera-20200416-Kyocera_FS-6970DN.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_M6635cidnJ.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_P6230cdnJ.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_P7240cdnJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_352ciJ.ppd.gz	kyocera-20210301-TA_3262i.ppd.gz	kyocera-20210301-TA_4062i.ppd.gz	kyocera-20210309-TA_3262i.ppd.gz	kyocera-20210309-TA_4062i.ppd.gz	kyocera-20190328-Kyocera_CS_3010i.ppd.gz	kyocera-20190328-Kyocera_CS_3510i.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_3010i.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_3510i.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_4500i.ppd.gz	kyocera-20200416-Kyocera_CS_300i.ppd.gz	kyocera-20200416-Kyocera_CS_420i.ppd.gz	kyocera-20200416-Kyocera_CS_520i.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_300i.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_420i.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_520i.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M4028idn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M4125idn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M4132idn.ppd.gz	kyocera-20200416-Kyocera_CS_255.ppd.gz	kyocera-20200416-Kyocera_CS_305.ppd.gz	kyocera-20200416-Kyocera_FS-6025MFP.ppd.gz	kyocera-20200416-Kyocera_FS-6030MFP.ppd.gz	kyocera-20200416-Kyocera_FS-6525MFP.ppd.gz	kyocera-20200416-Kyocera_FS-6530MFP.ppd.gz	kyocera-20200416-Kyocera_FS-9130DN.ppd.gz	kyocera-20200416-Kyocera_FS-9530DN.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_255.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_305.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_M4226idn.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_M4230idn.ppd.gz	kyocera-20210301-TA_P-2540i_MFP.ppd.gz	kyocera-20210309-TA_P-2540i_MFP.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_4020i.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_P3060dnJ.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_P3160dnJ.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_P3145dnJ.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_M2540dwJ.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_M2640idwJ.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_P2040dwJ.ppd.gz	kyocera-20190328-Kyocera_CS_306ci.ppd.gz	kyocera-20190328-Kyocera_CS_307ci.ppd.gz	kyocera-20190328-Kyocera_CS_356ci.ppd.gz	kyocera-20190328-Kyocera_CS_406ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_306ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_307ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_356ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_406ci.ppd.gz	kyocera-20190830-Kyocera_Generic_Color.ppd.gz	kyocera-20200416-Kyocera_CS_308ci.ppd.gz	kyocera-20200416-Kyocera_CS_358ci.ppd.gz	kyocera-20200416-Kyocera_CS_408ci.ppd.gz	kyocera-20200416-Kyocera_CS_508ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_358ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_408ci.ppd.gz	kyocera-20200416-Kyocera_TASKalfa_508ci.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_308ci.ppd.gz	kyocera-20210301-TA_302ci.ppd.gz	kyocera-20210301-TA_352ci.ppd.gz	kyocera-20210301-TA_402ci.ppd.gz	kyocera-20210301-TA_502ci.ppd.gz	kyocera-20210309-TA_302ci.ppd.gz	kyocera-20210309-TA_352ci.ppd.gz	kyocera-20210309-TA_402ci.ppd.gz	kyocera-20210309-TA_502ci.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P6026cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P6030cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P7035cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M5021cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M5520cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M5520cdw.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M5521cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M5521cdw.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M5525cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M5526cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M5526cdw.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P5020cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P5020cdw.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P5021cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P5021cdw.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P5025cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P5026cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P5026cdw.ppd.gz	kyocera-20200416-Kyocera_ECOSYS_P5018cdn.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_M3645idnJ.ppd.gz	kyocera-20210301-TA_P-C2155w_MFP.ppd.gz	kyocera-20210301-TA_P-C2650DW.ppd.gz	kyocera-20210301-TA_P-C2655w_MFP.ppd.gz	kyocera-20210309-TA_P-C2155w_MFP.ppd.gz	kyocera-20210309-TA_P-C2650DW.ppd.gz	kyocera-20210309-TA_P-C2655w_MFP.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M6026cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M6026cidn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M6030cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M6035cidn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M6230cidn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M6235cidn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M6526cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M6526cidn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M6530cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M6535cidn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M6630cidn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M6635cidn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P6021cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P6035cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P6130cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P6230cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P6235cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P7040cdn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P7240cdn.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_265ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_266ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_350ci.ppd.gz	kyocera-20190328-Kyocera_TASKalfa_351ci.ppd.gz	kyocera-20200416-Kyocera_FS-C2026MFP+.ppd.gz	kyocera-20200416-Kyocera_FS-C2126MFP+.ppd.gz	kyocera-20200416-Kyocera_FS-C2526MFP.ppd.gz	kyocera-20200416-Kyocera_FS-C2626MFP.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_352ci.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M3040dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M3040idn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M3540dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M3540idn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M3550idn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M3560idn.ppd.gz	kyocera-20200416-Kyocera_FS-4100DN.ppd.gz	kyocera-20200416-Kyocera_FS-4200DN.ppd.gz	kyocera-20200416-Kyocera_FS-4300DN.ppd.gz	kyocera-20200416-Kyocera_FS-C5250DN.ppd.gz	kyocera-20200416-Kyocera_FS-C5300DN.ppd.gz	kyocera-20200416-Kyocera_FS-C5350DN.ppd.gz	kyocera-20200416-Kyocera_FS-C5400DN.ppd.gz	kyocera-20210301-TA_357ci.ppd.gz	kyocera-20210301-TA_P-C3062DN.ppd.gz	kyocera-20210301-TA_P-C3062i_MFP.ppd.gz	kyocera-20210301-TA_P-C3066i_MFP.ppd.gz	kyocera-20210301-TA_P-C3562DN.ppd.gz	kyocera-20210301-TA_P-C3562i_MFP.ppd.gz	kyocera-20210301-TA_P-C3566i_MFP.ppd.gz	kyocera-20210301-TA_P-C4072DN.ppd.gz	kyocera-20210309-TA_357ci.ppd.gz	kyocera-20210309-TA_P-C3062DN.ppd.gz	kyocera-20210309-TA_P-C3062i_MFP.ppd.gz	kyocera-20210309-TA_P-C3066i_MFP.ppd.gz	kyocera-20210309-TA_P-C3562DN.ppd.gz	kyocera-20210309-TA_P-C3562i_MFP.ppd.gz	kyocera-20210309-TA_P-C3566i_MFP.ppd.gz	kyocera-20210309-TA_P-C4072DN.ppd.gz	kyocera-20190328-Kyocera_FS-5040DN.ppd.gz	kyocera-20200416-Kyocera_FS-2100D.ppd.gz	kyocera-20200416-Kyocera_FS-2100DN.ppd.gz	kyocera-20200416-Kyocera_FS-3040MFP+.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M3145dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M3145idn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M3645dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M3645idn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M3655idn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M3660idn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P3045dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P3050dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P3055dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P3060dn.ppd.gz	kyocera-20200416-Kyocera_ECOSYS_M3860idn.ppd.gz	kyocera-20200416-Kyocera_ECOSYS_M3860idnf.ppd.gz	kyocera-20200416-Kyocera_ECOSYS_P3260dn.ppd.gz	kyocera-20200416-Kyocera_FS-C2026MFP.ppd.gz	kyocera-20200416-Kyocera_FS-C2126MFP.ppd.gz	kyocera-20200416-Kyocera_FS-C5100DN.ppd.gz	kyocera-20200416-Kyocera_FS-C5150DN.ppd.gz	kyocera-20200416-Kyocera_FS-C5200DN.ppd.gz	kyocera-20200416-Kyocera_FS-3540MFP.ppd.gz	kyocera-20200416-Kyocera_FS-3640MFP.ppd.gz	kyocera-20200416-Kyocera_FS-3920DN.ppd.gz	kyocera-20200416-Kyocera_FS-4020DN.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_P3145dn.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_P3150dn.ppd.gz	kyocera-20200716-Kyocera_ECOSYS_P3155dn.ppd.gz	kyocera-20210301-TA_P-4531_MFP.ppd.gz	kyocera-20210301-TA_P-4531i_MFP.ppd.gz	kyocera-20210301-TA_P-4532DN.ppd.gz	kyocera-20210301-TA_P-4536_MFP.ppd.gz	kyocera-20210301-TA_P-4536i_MFP.ppd.gz	kyocera-20210301-TA_P-5032DN.ppd.gz	kyocera-20210301-TA_P-5532DN.ppd.gz	kyocera-20210301-TA_P-5536i_MFP.ppd.gz	kyocera-20210301-TA_P-6033DN.ppd.gz	kyocera-20210301-TA_P-6038i_MFP.ppd.gz	kyocera-20210301-TA_P-6038if_MFP.ppd.gz	kyocera-20210309-TA_P-4531_MFP.ppd.gz	kyocera-20210309-TA_P-4531i_MFP.ppd.gz	kyocera-20210309-TA_P-4532DN.ppd.gz	kyocera-20210309-TA_P-4536_MFP.ppd.gz	kyocera-20210309-TA_P-4536i_MFP.ppd.gz	kyocera-20210309-TA_P-5032DN.ppd.gz	kyocera-20210309-TA_P-5532DN.ppd.gz	kyocera-20210309-TA_P-5536i_MFP.ppd.gz	kyocera-20210309-TA_P-6033DN.ppd.gz	kyocera-20210309-TA_P-6038i_MFP.ppd.gz	kyocera-20210309-TA_P-6038if_MFP.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2035dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2535dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P2135dn.ppd.gz	kyocera-20200416-Kyocera_FS-1035MFP.ppd.gz	kyocera-20200416-Kyocera_FS-1135MFP.ppd.gz	kyocera-20200416-Kyocera_FS-2020D.ppd.gz	kyocera-20200416-Kyocera_FS-3040MFP.ppd.gz	kyocera-20200416-Kyocera_FS-3140MFP.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2030dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2040dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2135dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2235dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2530dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2540dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2540dw.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2635dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2635dw.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2640idw.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2735dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2735dw.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_M2835dw.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P2040dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P2040dw.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P2230dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P2235dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P2235dw.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P2335d.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P2335dn.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P2335dw.ppd.gz	kyocera-20200416-Kyocera_FS-1030MFP.ppd.gz	kyocera-20200416-Kyocera_FS-1130MFP.ppd.gz	kyocera-20200416-Kyocera_FS-1028MFP.ppd.gz	kyocera-20200416-Kyocera_FS-1128MFP.ppd.gz	kyocera-20200416-Kyocera_FS-1320D.ppd.gz	kyocera-20200416-Kyocera_FS-1350DN.ppd.gz	kyocera-20200416-Kyocera_FS-1370DN.ppd.gz	kyocera-20200416-Kyocera_KM-2810.ppd.gz	kyocera-20200416-Kyocera_KM-2820.ppd.gz	kyocera-20210301-TA_P-3521_MFP.ppd.gz	kyocera-20210301-TA_P-3522DW.ppd.gz	kyocera-20210301-TA_P-3527w_MFP.ppd.gz	kyocera-20210301-TA_P-4020DN.ppd.gz	kyocera-20210301-TA_P-4020DW.ppd.gz	kyocera-20210301-TA_P-4020_MFP.ppd.gz	kyocera-20210301-TA_P-4025w_MFP.ppd.gz	kyocera-20210301-TA_P-4026iw_MFP.ppd.gz	kyocera-20210309-TA_P-3521_MFP.ppd.gz	kyocera-20210309-TA_P-3522DW.ppd.gz	kyocera-20210309-TA_P-3527w_MFP.ppd.gz	kyocera-20210309-TA_P-4020DN.ppd.gz	kyocera-20210309-TA_P-4020DW.ppd.gz	kyocera-20210309-TA_P-4020_MFP.ppd.gz	kyocera-20210309-TA_P-4025w_MFP.ppd.gz	kyocera-20210309-TA_P-4026iw_MFP.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P2035d.ppd.gz	kyocera-20190328-Kyocera_ECOSYS_P2135d.ppd.gz	kyocera-20190830-Kyocera_Generic_Monochrome.ppd.gz	kyocera-20200416-Kyocera_FS-1120D.ppd.gz	hp-20171121-hplip-3.17.10-hp-laserjet_4-ps.ppd.gz
+foomatic-20200219-Citizen-ProJet_IIc-cdj500.ppd.gz	foomatic-20200219-DEC-DECwriter_520ic-cdj500.ppd.gz	foomatic-20200219-HP-DeskJet_420C-cdj500.ppd.gz	foomatic-20200219-Olivetti-JP470-cdj500.ppd.gz
 foomatic-20200219-Compaq-IJ1200-drv_z42.ppd.gz	foomatic-20200219-Lexmark-X73-drv_z42.ppd.gz	foomatic-20200219-Lexmark-Z42-drv_z42.ppd.gz	foomatic-20200219-Lexmark-Z43-drv_z42.ppd.gz
 foomatic-20200219-Compaq-IJ750-lxm3200X.ppd.gz	foomatic-20200219-Lexmark-3200-lxm3200X.ppd.gz	foomatic-20200219-Lexmark-Z31-lxm3200X.ppd.gz
 foomatic-20200219-Compaq-IJ900-lxm5700m.ppd.gz	foomatic-20200219-Lexmark-5700-lxm5700m.ppd.gz	foomatic-20200219-Xerox-DocuPrint_XJ8C-lxm5700m.ppd.gz	foomatic-20200219-Xerox-WorkCentre_470cx-lxm5700m.ppd.gz
@@ -59,9 +58,18 @@
 foomatic-20200219-Epson-Stylus_Color_II-stc2X.upp.ppd.gz
 foomatic-20200219-Epson-Stylus_Photo_720-Stp720pX.upp.ppd.gz
 foomatic-20200219-Epson-Stylus_Photo_870-Stp870pX.upp.ppd.gz
-foomatic-20200219-Generic-PCL_3_Printer-pcl3.ppd.gz	foomatic-20200219-NEC-SuperScript_650C-pcl3.ppd.gz	foomatic-20200219-NEC-SuperScript_750C-pcl3.ppd.gz	foomatic-20200219-Xerox-DocuPrint_C11-pcl3.ppd.gz	foomatic-20191029-Apollo-P-1200-pcl3.ppd.gz	foomatic-20191029-Apollo-P-1220_Barbie-pcl3.ppd.gz	foomatic-20191029-Apollo-P-1250-pcl3.ppd.gz	foomatic-20200219-Lexmark-1020_Business-pcl3.ppd.gz	foomatic-20200219-Lexmark-3000-pcl3.ppd.gz	foomatic-20200219-NEC-SuperScript_100C-pcl3.ppd.gz	foomatic-20200219-NEC-SuperScript_150C-pcl3.ppd.gz	foomatic-20200219-Samsung-SI-630A-pcl3.ppd.gz	foomatic-20200219-Sharp-AJ-1800-pcl3.ppd.gz	foomatic-20200219-Sharp-AJ-1805-pcl3.ppd.gz	foomatic-20200219-Sharp-AJ-2000-pcl3.ppd.gz	foomatic-20200219-Sharp-AJ-2005-pcl3.ppd.gz	foomatic-20200219-Xerox-DocuPrint_M750-pcl3.ppd.gz	foomatic-20200219-Xerox-DocuPrint_M760-pcl3.ppd.gz
-foomatic-20200219-Generic-PostScript_Printer-Postscript.ppd.gz	foomatic-20200219-Lexmark-C500n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C510b-Postscript.ppd.gz	foomatic-20200219-Lexmark-C530dn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C532dn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C532n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C534dn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C534dtn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C534n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C752b-Postscript.ppd.gz	foomatic-20200219-Lexmark-C770dtn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C770n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C772dn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C772n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C780dn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C780dtn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C780n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C782dn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C782dtn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C782n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C920n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C935dn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C935dtn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C935hdn-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_C710-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_Rplus-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_S_1250-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_S_1855-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_Se_3455-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_T610-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_T612-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_T614-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_T616-Postscript.ppd.gz	foomatic-20200219-Lexmark-T430-Postscript.ppd.gz	foomatic-20200219-Lexmark-T640-Postscript.ppd.gz	foomatic-20200219-Lexmark-T642-Postscript.ppd.gz	foomatic-20200219-Lexmark-T644-Postscript.ppd.gz	foomatic-20200219-Lexmark-W812-Postscript.ppd.gz	foomatic-20200219-Lexmark-W840-Postscript.ppd.gz	foomatic-20200219-Lexmark-X560n-Postscript.ppd.gz	foomatic-20200219-Lexmark-4039_10plus-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_Color_1200-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_Color_1275-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_Color_40-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_Color_45-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_E310-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_E312-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_Ep-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_K_1220-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_M410-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_M412-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_W810-Postscript.ppd.gz	foomatic-20200219-Fuji_Xerox-DocuPrint_CM305_df-Postscript.ppd.gz	foomatic-20170101-Samsung-M332x_382x_402x-Postscript.ppd.gz	foomatic-20200219-Canon-GP_405-Postscript.ppd.gz	foomatic-20200219-Canon-imageRunner_C2570-Postscript.ppd.gz	foomatic-20200219-Canon-imageRunner_C2570i-Postscript.ppd.gz	foomatic-20200219-Canon-imageRunner_C2620N-Postscript.ppd.gz	foomatic-20200219-Canon-imageRunner_C3100-Postscript.ppd.gz	foomatic-20200219-Canon-imageRunner_C3170-Postscript.ppd.gz	foomatic-20200219-Canon-imageRunner_C3170i-Postscript.ppd.gz	foomatic-20200219-Canon-imageRunner_C3200-Postscript.ppd.gz	foomatic-20200219-Canon-imageRunner_C5800-Postscript.ppd.gz	foomatic-20200219-Canon-imageRunner_C6800-Postscript.ppd.gz	foomatic-20200219-Dell-M5200-Postscript.ppd.gz	foomatic-20200219-Dell-S2500-Postscript.ppd.gz	foomatic-20200219-Genicom-LN45-Postscript.ppd.gz	foomatic-20200219-Genicom-ML450-Postscript.ppd.gz	foomatic-20200219-Genicom-microLaser_12-Postscript.ppd.gz	foomatic-20200219-Genicom-microLaser_210-Postscript.ppd.gz	foomatic-20200219-IBM-Infoprint_1226tn-Postscript.ppd.gz	foomatic-20200219-IBM-Infoprint_1422-Postscript.ppd.gz	foomatic-20200219-IBM-Infoprint_1532_Express-Postscript.ppd.gz	foomatic-20200219-IBM-Infoprint_1540_MFP-Postscript.ppd.gz	foomatic-20200219-IBM-Infoprint_1560_MFP-Postscript.ppd.gz	foomatic-20200219-IBM-Infoprint_1570_MFP-Postscript.ppd.gz	foomatic-20200219-IBM-Infoprint_1572_MFP-Postscript.ppd.gz	foomatic-20200219-IBM-Infoprint_1580_MFP-Postscript.ppd.gz	foomatic-20200219-IBM-Infoprint_1585n-Postscript.ppd.gz	foomatic-20200219-IBM-Infoprint_1650_MFP-Postscript.ppd.gz	foomatic-20200219-Minolta-PagePro_1250E-Postscript.ppd.gz	foomatic-20200219-Minolta-magicolor_3100-Postscript.ppd.gz	foomatic-20200219-NEC-SuperScript_4600N-Postscript.ppd.gz	foomatic-20200219-Oce-PPC3073PS-Postscript.ppd.gz	foomatic-20200219-Oce-PPC3074PS-Postscript.ppd.gz	foomatic-20200219-Oce-PPC3093PS-Postscript.ppd.gz	foomatic-20200219-Oce-PPC3094PS-Postscript.ppd.gz	foomatic-20200219-Oce-PPC3113PS-Postscript.ppd.gz	foomatic-20200219-Oce-PPC3114PS-Postscript.ppd.gz	foomatic-20200219-Oce-PPC5115PS-Postscript.ppd.gz	foomatic-20200219-Oce-PPC5160PS-Postscript.ppd.gz	foomatic-20200219-Oce-cm2510-Postscript.ppd.gz	foomatic-20200219-Oce-cm2520-Postscript.ppd.gz	foomatic-20200219-Oce-cm3521-Postscript.ppd.gz	foomatic-20200219-Oce-cm4010-Postscript.ppd.gz	foomatic-20200219-Oce-cm4520-Postscript.ppd.gz	foomatic-20200219-Oce-cm4521-Postscript.ppd.gz	foomatic-20200219-Oce-cm5520-Postscript.ppd.gz	foomatic-20200219-Oce-cm6520-Postscript.ppd.gz	foomatic-20200219-Oce-fx3000-Postscript.ppd.gz	foomatic-20200219-Oce-im2330-Postscript.ppd.gz	foomatic-20200219-Oce-im2830-Postscript.ppd.gz	foomatic-20200219-Oce-im3512-Postscript.ppd.gz	foomatic-20200219-Oce-im3530-Postscript.ppd.gz	foomatic-20200219-Oce-im4512-Postscript.ppd.gz	foomatic-20200219-Oce-im4530-Postscript.ppd.gz	foomatic-20200219-Oce-im5530-Postscript.ppd.gz	foomatic-20200219-Oce-im6020-Postscript.ppd.gz	foomatic-20200219-Oce-im6030-Postscript.ppd.gz	foomatic-20200219-Oce-im7230-Postscript.ppd.gz	foomatic-20200219-Oce-im7520-Postscript.ppd.gz	foomatic-20200219-Oce-im8530-Postscript.ppd.gz	foomatic-20200219-Oce-im9220-Postscript.ppd.gz	foomatic-20200219-Oce-sx2100-Postscript.ppd.gz	foomatic-20200219-Oki-B2540_MFP-Postscript.ppd.gz	foomatic-20200219-Oki-B4545_MFP-Postscript.ppd.gz	foomatic-20200219-Oki-B6200-Postscript.ppd.gz	foomatic-20200219-Oki-B6250-Postscript.ppd.gz	foomatic-20200219-Oki-B6500-Postscript.ppd.gz	foomatic-20200219-Oki-C5540_MFP-Postscript.ppd.gz	foomatic-20200219-Oki-C5550_MFP-Postscript.ppd.gz	foomatic-20200219-Oki-C5750-Postscript.ppd.gz	foomatic-20200219-Oki-C6050-Postscript.ppd.gz	foomatic-20200219-Oki-C6150-Postscript.ppd.gz	foomatic-20200219-Oki-C710-Postscript.ppd.gz	foomatic-20200219-Oki-C9800_MFP-Postscript.ppd.gz	foomatic-20200219-Panasonic-KX-P8420-Postscript.ppd.gz	foomatic-20200219-Panasonic-KX-P8475-Postscript.ppd.gz	foomatic-20200219-Ricoh-Aficio_AP2000-Postscript.ppd.gz	foomatic-20200219-Sharp-AR-B07-Postscript.ppd.gz	foomatic-20200219-Sharp-AR-M256-Postscript.ppd.gz	foomatic-20200219-Sharp-AR-NB2A-Postscript.ppd.gz	foomatic-20200219-Sharp-MX-NB11-Postscript.ppd.gz	foomatic-20200219-Tally-LN45-Postscript.ppd.gz	foomatic-20200219-Tally-ML450-Postscript.ppd.gz	foomatic-20200219-Tektronix-Phaser_780-Postscript.ppd.gz	foomatic-20200219-Tektronix-Phaser_850-Postscript.ppd.gz	foomatic-20200219-Tektronix-Phaser_850DP-Postscript.ppd.gz	foomatic-20200219-Tektronix-Phaser_850DX-Postscript.ppd.gz	foomatic-20200219-Tektronix-Phaser_850N-Postscript.ppd.gz	foomatic-20200219-Texas_Instruments-microLaser_PowerPro_12-Postscript.ppd.gz	foomatic-20200219-Texas_Instruments-microLaser_Pro_600-Postscript.ppd.gz	foomatic-20200219-Texas_Instruments-microLaser_Pro_8-Postscript.ppd.gz	foomatic-20200219-Texas_Instruments-microLaser_Pro_E-Postscript.ppd.gz	foomatic-20200219-Texas_Instruments-microLaser_XL_Basic-Postscript.ppd.gz	foomatic-20200219-Texas_Instruments-microLaser_XL_PS17-Postscript.ppd.gz	foomatic-20200219-Texas_Instruments-microLaser_XL_PS35-Postscript.ppd.gz	foomatic-20200219-Texas_Instruments-microLaser_XL_Turbo-Postscript.ppd.gz	foomatic-20200219-Toshiba-DP2570-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_165-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_200L-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_202L-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_205-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_230-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_232-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_2500c-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_280-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_281c-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_282-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_350-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_3500c-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_3510c-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_3511-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_351c-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_352-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_450-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_4511-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_451c-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_452-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_520-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_600-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_720-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_850-Postscript.ppd.gz	foomatic-20200219-Xante-Accel-a-Writer_4G-Postscript.ppd.gz	foomatic-20200219-Xante-FilmMaker_4-Postscript.ppd.gz	foomatic-20200219-Xante-PlateMaker_4-Postscript.ppd.gz	foomatic-20200219-Xerox-DocuColor_2006-Postscript.ppd.gz	foomatic-20200219-Xerox-DocuPrint_N17-Postscript.ppd.gz	foomatic-20200219-Xerox-DocuPrint_N2125-Postscript.ppd.gz	foomatic-20200219-Xerox-DocuPrint_N24-Postscript.ppd.gz	foomatic-20200219-Xerox-DocuPrint_N3225-Postscript.ppd.gz	foomatic-20200219-Xerox-DocuPrint_N40-Postscript.ppd.gz	foomatic-20200219-Xerox-DocuPrint_N4025-Postscript.ppd.gz	foomatic-20200219-Xerox-DocuPrint_N4525-Postscript.ppd.gz	foomatic-20200219-Xerox-FaxCenter_2121-Postscript.ppd.gz	foomatic-20200219-Xerox-FaxCentre_2218-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_1235-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_3200MFP-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_3400-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_3450-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_4510-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_5500-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_6100-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_6120-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_6130-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_6180-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_6360-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_7400-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_7760-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_790-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_8200B-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_8200DP-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_8200DX-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_8200N-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_8560-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_860B-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_860DP-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_860DX-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_860N-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_8860-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_24-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_4118-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_7228-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_7232-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_7242-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_7328-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_7335-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_C2424-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_M118-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_M20-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_M24-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_Pro_128-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_Pro_133-Postscript.ppd.gz	foomatic-20200219-Apple-12_640ps-Postscript.ppd.gz	foomatic-20200219-Apple-LaserWriter_16_600-Postscript.ppd.gz	foomatic-20200219-Apple-LaserWriter_4_600-Postscript.ppd.gz	foomatic-20200219-Apple-LaserWriter_Pro_630-Postscript.ppd.gz	foomatic-20200219-Epson-EPL-5700PS-Postscript.ppd.gz	foomatic-20200219-Heidelberg-Digimaster_9110-Postscript.ppd.gz	foomatic-20200219-Hitachi-DDP_70_with_MicroPress-Postscript.ppd.gz	foomatic-20200219-IBM-4029_10P-Postscript.ppd.gz	foomatic-20200219-IBM-4303_Network_Color_Printer-Postscript.ppd.gz	foomatic-20200219-Kodak-DigiSource_9110-Postscript.ppd.gz	foomatic-20200219-Oki-OL610e_PS-Postscript.ppd.gz	foomatic-20200219-Oki-OL810e_PS-Postscript.ppd.gz	foomatic-20200219-Oki-Okipage_12i-Postscript.ppd.gz	foomatic-20200219-Oki-Okipage_20DXn-Postscript.ppd.gz	foomatic-20200219-Oki-Okipage_8c-Postscript.ppd.gz	foomatic-20200219-Xerox-DocuPrint_C20-Postscript.ppd.gz	foomatic-20200219-Xerox-DocuPrint_C55-Postscript.ppd.gz	foomatic-20200219-Xerox-DocuPrint_N32-Postscript.ppd.gz	foomatic-20200219-Apple-LaserWriter_IIg-Postscript.ppd.gz	foomatic-20200219-DEC-LN07-Postscript.ppd.gz	foomatic-20200219-NEC-SilentWriter_LC_890-Postscript.ppd.gz	foomatic-20200219-NEC-Silentwriter2_S60P-Postscript.ppd.gz	foomatic-20200219-NEC-Silentwriter2_model_290-Postscript.ppd.gz	foomatic-20200219-NEC-Silentwriter_95f-Postscript.ppd.gz	foomatic-20200219-Oki-OL830Plus-Postscript.ppd.gz	foomatic-20200219-Panasonic-KX-P5400-Postscript.ppd.gz	foomatic-20200219-Tektronix-Phaser_350-Postscript.ppd.gz	foomatic-20200219-Tektronix-Phaser_IISX-Postscript.ppd.gz	foomatic-20200219-Tektronix-Phaser_PX-Postscript.ppd.gz	foomatic-20200219-Kodak-IS_70_CPII-Postscript.ppd.gz	foomatic-20200219-Ricoh-Aficio_5206-Postscript.ppd.gz
+foomatic-20200219-Generic-PCL_3_Printer-pcl3.ppd.gz	foomatic-20200219-HP-DeskJet_340C-pcl3.ppd.gz	foomatic-20200219-HP-DeskJet_320-pcl3.ppd.gz	foomatic-20200219-HP-DeskJet_540C-pcl3.ppd.gz	foomatic-20200219-HP-DeskJet_560C-pcl3.ppd.gz	foomatic-20200219-HP-DeskJet_310-pcl3.ppd.gz	foomatic-20200219-HP-DesignJet_ColorPro_CAD-pcl3.ppd.gz	foomatic-20200219-NEC-SuperScript_650C-pcl3.ppd.gz	foomatic-20200219-NEC-SuperScript_750C-pcl3.ppd.gz	foomatic-20200219-Xerox-DocuPrint_C11-pcl3.ppd.gz	foomatic-20191029-Apollo-P-1200-pcl3.ppd.gz	foomatic-20191029-Apollo-P-1220_Barbie-pcl3.ppd.gz	foomatic-20191029-Apollo-P-1250-pcl3.ppd.gz	foomatic-20200219-Lexmark-1020_Business-pcl3.ppd.gz	foomatic-20200219-Lexmark-3000-pcl3.ppd.gz	foomatic-20200219-NEC-SuperScript_100C-pcl3.ppd.gz	foomatic-20200219-NEC-SuperScript_150C-pcl3.ppd.gz	foomatic-20200219-Samsung-SI-630A-pcl3.ppd.gz	foomatic-20200219-HP-DeskJet_Plus-pcl3.ppd.gz	foomatic-20200219-HP-DeskJet_Portable-pcl3.ppd.gz	foomatic-20200219-HP-DeskJet_200-pcl3.ppd.gz	foomatic-20200219-Sharp-AJ-1800-pcl3.ppd.gz	foomatic-20200219-Sharp-AJ-1805-pcl3.ppd.gz	foomatic-20200219-Sharp-AJ-2000-pcl3.ppd.gz	foomatic-20200219-Sharp-AJ-2005-pcl3.ppd.gz	foomatic-20200219-Xerox-DocuPrint_M750-pcl3.ppd.gz	foomatic-20200219-Xerox-DocuPrint_M760-pcl3.ppd.gz
+foomatic-20200219-Generic-PCL_6_PCL_XL_Printer-pxlcolor.ppd.gz	foomatic-20200219-Lexmark-C720n-pxlcolor.ppd.gz	foomatic-20200219-Oki-C6000-pxlcolor.ppd.gz	foomatic-20200219-Oki-C7350-pxlcolor.ppd.gz	foomatic-20200219-Oki-C7550-pxlcolor.ppd.gz	foomatic-20200219-Oki-C9650-pxlcolor.ppd.gz	foomatic-20200219-Oki-C9800-pxlcolor.ppd.gz	foomatic-20200219-Panasonic-DP-180-pxlcolor.ppd.gz	foomatic-20200219-Panasonic-DP-C265-pxlcolor.ppd.gz	foomatic-20200219-Panasonic-DP-C305-pxlcolor.ppd.gz	foomatic-20200219-Panasonic-DP-C322-pxlcolor.ppd.gz	foomatic-20200219-Panasonic-DP-C405-pxlcolor.ppd.gz	foomatic-20200219-Samsung-CLP-550-pxlcolor.ppd.gz	foomatic-20200219-Samsung-CLP-550N-pxlcolor.ppd.gz	foomatic-20200219-Samsung-CLP-610-pxlcolor.ppd.gz	foomatic-20200219-Samsung-CLP-620-pxlcolor.ppd.gz	foomatic-20200219-Samsung-CLP-650-pxlcolor.ppd.gz	foomatic-20200219-Samsung-CLP-650N-pxlcolor.ppd.gz	foomatic-20200219-Samsung-CLP-660N-pxlcolor.ppd.gz	foomatic-20200219-Samsung-CLP-660ND-pxlcolor.ppd.gz	foomatic-20200219-Toshiba-e-Studio_220cp-pxlcolor.ppd.gz	foomatic-20200219-Toshiba-e-Studio_2830c-pxlcolor.ppd.gz	foomatic-20200219-Toshiba-e-Studio_3530c-pxlcolor.ppd.gz	foomatic-20200219-Toshiba-e-Studio_4500c-pxlcolor.ppd.gz	foomatic-20200219-Toshiba-e-Studio_4520c-pxlcolor.ppd.gz	foomatic-20200219-Toshiba-e-Studio_5500c-pxlcolor.ppd.gz	foomatic-20200219-Brother-HL-2400CeN-pxlcolor.ppd.gz	foomatic-20200219-Brother-HL-3400CN-pxlcolor.ppd.gz	foomatic-20200219-Brother-HL-4000CN-pxlcolor.ppd.gz	foomatic-20200219-Brother-HL-4040CN-pxlcolor.ppd.gz	foomatic-20200219-Canon-imageRunner_C2550-pxlcolor.ppd.gz	foomatic-20200219-Canon-imageRunner_C5058-pxlcolor.ppd.gz	foomatic-20200219-Canon-imageRunner_C5068-pxlcolor.ppd.gz	foomatic-20200219-Canon-imageRunner_C5185-pxlcolor.ppd.gz	foomatic-20200219-Canon-imageRunner_C5185i-pxlcolor.ppd.gz	foomatic-20200219-Dell-3000cn-pxlcolor.ppd.gz	foomatic-20200219-Dell-3100cn-pxlcolor.ppd.gz	foomatic-20200219-Xerox-Phaser_3160N-pxlcolor.ppd.gz
 foomatic-20200219-Gestetner-C7528n-Postscript-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP332c-Postscript-Lanier.ppd.gz	foomatic-20200219-NRG-C7528n-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_CL7200-Postscript-Ricoh.ppd.gz	foomatic-20200219-Savin-CLP128-Postscript-Savin.ppd.gz	foomatic-20200219-Gestetner-C7535n-Postscript-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP335c-Postscript-Lanier.ppd.gz	foomatic-20200219-NRG-C7535hdn-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_CL7300-Postscript-Ricoh.ppd.gz	foomatic-20200219-Savin-CLP135-Postscript-Savin.ppd.gz	foomatic-20200219-Gestetner-C7435n-Postscript-Gestetner.ppd.gz	foomatic-20200219-Infotec-IPC_3535-Postscript-Infotec.ppd.gz	foomatic-20200219-Lanier-LP235c-Postscript-Lanier.ppd.gz	foomatic-20200219-NRG-C7435n-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_CL7100-Postscript-Ricoh.ppd.gz	foomatic-20200219-Savin-CLP35-Postscript-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc38u-Postscript-Gestetner.ppd.gz	foomatic-20200219-Infotec-IPC2838-Postscript-Infotec.ppd.gz	foomatic-20200219-Lanier-LP138c-Postscript-Lanier.ppd.gz	foomatic-20200219-NRG-DSc38u-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_CL7000-Postscript-Ricoh.ppd.gz	foomatic-20200219-Savin-CLP28-Postscript-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc328-Postscript-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSc332-Postscript-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSc338-Postscript-Gestetner.ppd.gz	foomatic-20200219-Infotec-ISC_2028-Postscript-Infotec.ppd.gz	foomatic-20200219-Infotec-ISC_2432-Postscript-Infotec.ppd.gz	foomatic-20200219-Infotec-ISC_2838-Postscript-Infotec.ppd.gz	foomatic-20200219-Lanier-LD228c-Postscript-Lanier.ppd.gz	foomatic-20200219-Lanier-LD232c-Postscript-Lanier.ppd.gz	foomatic-20200219-Lanier-LD238c-Postscript-Lanier.ppd.gz	foomatic-20200219-NRG-DSc328-Postscript-NRG.ppd.gz	foomatic-20200219-NRG-DSc332-Postscript-NRG.ppd.gz	foomatic-20200219-NRG-DSc338-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_2228C-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_2232C-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_2238C-Postscript-Ricoh.ppd.gz	foomatic-20200219-Savin-C2820-Postscript-Savin.ppd.gz	foomatic-20200219-Savin-C3224-Postscript-Savin.ppd.gz	foomatic-20200219-Savin-C3828-Postscript-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc38-Postscript-Gestetner.ppd.gz	foomatic-20200219-Infotec-IP_280-Postscript-Infotec.ppd.gz	foomatic-20200219-Lanier-2138-Postscript-Lanier.ppd.gz	foomatic-20200219-NRG-DSc38-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_AP3800C-Postscript-Ricoh.ppd.gz	foomatic-20200219-Savin-SLP38c-Postscript-Savin.ppd.gz	foomatic-20200219-Gestetner-P7145-Postscript-Gestetner.ppd.gz	foomatic-20200219-Lanier-2145-Postscript-Lanier.ppd.gz	foomatic-20200219-NRG-P7145-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_AP4510-Postscript-Ricoh.ppd.gz	foomatic-20200219-Savin-MLP45-Postscript-Savin.ppd.gz	foomatic-20200219-Gestetner-DSc224-Postscript-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSc232-Postscript-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSc524-Postscript-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSc532-Postscript-Gestetner.ppd.gz	foomatic-20200219-Infotec-ISC1032-Postscript-Infotec.ppd.gz	foomatic-20200219-Infotec-ISC824-Postscript-Infotec.ppd.gz	foomatic-20200219-Lanier-LD024c-Postscript-Lanier.ppd.gz	foomatic-20200219-Lanier-LD032c-Postscript-Lanier.ppd.gz	foomatic-20200219-NRG-DSc224-Postscript-NRG.ppd.gz	foomatic-20200219-NRG-DSc232-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_1224C-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_1232C-Postscript-Ricoh.ppd.gz	foomatic-20200219-Savin-C2408-Postscript-Savin.ppd.gz	foomatic-20200219-Savin-C2524-Postscript-Savin.ppd.gz	foomatic-20200219-Savin-C2532-Postscript-Savin.ppd.gz	foomatic-20200219-Savin-C3210-Postscript-Savin.ppd.gz	foomatic-20200219-Gestetner-C7010-Postscript-Gestetner.ppd.gz	foomatic-20200219-Infotec-IPC1036-Postscript-Infotec.ppd.gz	foomatic-20200219-Lanier-LP_036c-Postscript-Lanier.ppd.gz	foomatic-20200219-NRG-C7010-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_CL5000-Postscript-Ricoh.ppd.gz	foomatic-20200219-Savin-CLP1036-Postscript-Savin.ppd.gz	foomatic-20200219-Gestetner-P7132n-Postscript-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP032-Postscript-Lanier.ppd.gz	foomatic-20200219-NRG-P7132N-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_AP600N-Postscript-Ricoh.ppd.gz	foomatic-20200219-Savin-MLP32-Postscript-Savin.ppd.gz	foomatic-20200219-Gestetner-C7116-Postscript-Gestetner.ppd.gz	foomatic-20200219-Gestetner-C7416-Postscript-Gestetner.ppd.gz	foomatic-20200219-Gestetner-C7417-Postscript-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP116c-Postscript-Lanier.ppd.gz	foomatic-20200219-Lanier-LP122c_LP121c-Postscript-Lanier.ppd.gz	foomatic-20200219-Lanier-LP_020c-Postscript-Lanier.ppd.gz	foomatic-20200219-NRG-C7116-Postscript-NRG.ppd.gz	foomatic-20200219-NRG-C7416-Postscript-NRG.ppd.gz	foomatic-20200219-NRG-CC7417-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_CL2000-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_CL3000-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_CL3000e-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_CL3100-Postscript-Ricoh.ppd.gz	foomatic-20200219-Savin-CLP1620-Postscript-Savin.ppd.gz	foomatic-20200219-Savin-CLP17-Postscript-Savin.ppd.gz	foomatic-20200219-Savin-CLP18-Postscript-Savin.ppd.gz	foomatic-20200219-Ricoh-ColorLaser_AP828-Postscript-Ricoh.ppd.gz	foomatic-20200219-Gestetner-C7425dn-Postscript-Gestetner.ppd.gz	foomatic-20200219-Infotec-IPC_2525-Postscript-Infotec.ppd.gz	foomatic-20200219-Infotec-IPC_2525e-Postscript-Infotec.ppd.gz	foomatic-20200219-Lanier-LP125cx_LP126cn-Postscript-Lanier.ppd.gz	foomatic-20200219-NRG-C7425dn-Postscript-NRG.ppd.gz	foomatic-20200219-NRG-C7425hdn-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_CL4000DN-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_CL4000HDN-Postscript-Ricoh.ppd.gz	foomatic-20200219-Savin-CLP26DN-Postscript-Savin.ppd.gz	foomatic-20200219-Gestetner-P7026-Postscript-Gestetner.ppd.gz	foomatic-20200219-Gestetner-P7026n-Postscript-Gestetner.ppd.gz	foomatic-20200219-NRG-P7026-Postscript-NRG.ppd.gz	foomatic-20200219-NRG-P7026N-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_AP2600-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_AP2600N-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-LASER_AP2600-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-LASER_AP2600N-Postscript-Ricoh.ppd.gz	foomatic-20200219-Savin-SLP26-Postscript-Savin.ppd.gz	foomatic-20200219-Savin-SLP26n-Postscript-Savin.ppd.gz	foomatic-20200219-Gestetner-P7126-Postscript-Gestetner.ppd.gz	foomatic-20200219-Gestetner-P7126n-Postscript-Gestetner.ppd.gz	foomatic-20200219-NRG-P7126-Postscript-NRG.ppd.gz	foomatic-20200219-NRG-P7126N-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_AP2610-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_AP2610N-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-LASER_AP2610-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-LASER_AP2610N-Postscript-Ricoh.ppd.gz	foomatic-20200219-Savin-MLP26-Postscript-Savin.ppd.gz	foomatic-20200219-Savin-MLP26n-Postscript-Savin.ppd.gz	foomatic-20200219-Gestetner-P7325-Postscript-Gestetner.ppd.gz	foomatic-20200219-Gestetner-P7325n-Postscript-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP025N_LP026N-Postscript-Lanier.ppd.gz	foomatic-20200219-Lanier-LP025_LP026-Postscript-Lanier.ppd.gz	foomatic-20200219-NRG-P7325-Postscript-NRG.ppd.gz	foomatic-20200219-NRG-P7325N-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_AP400-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_AP400N-Postscript-Ricoh.ppd.gz	foomatic-20200219-Savin-MLP25-Postscript-Savin.ppd.gz	foomatic-20200219-Savin-MLP25n-Postscript-Savin.ppd.gz	foomatic-20200219-Gestetner-P7431cn-Postscript-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP031c-Postscript-Lanier.ppd.gz	foomatic-20200219-NRG-P7431cn-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_CL1000N-Postscript-Ricoh.ppd.gz	foomatic-20200219-Savin-CLP831-Postscript-Savin.ppd.gz
+foomatic-20200219-HP-2563-lp2563.ppd.gz
+foomatic-20200219-HP-DesignJet_100plus-cdnj500.ppd.gz	foomatic-20200219-HP-DesignJet_110-cdnj500.ppd.gz	foomatic-20200219-HP-DesignJet_120-cdnj500.ppd.gz	foomatic-20200219-HP-DesignJet_130-cdnj500.ppd.gz	foomatic-20200219-HP-DesignJet_30-cdnj500.ppd.gz	foomatic-20200219-HP-DesignJet_5000-cdnj500.ppd.gz	foomatic-20200219-HP-DesignJet_5500-cdnj500.ppd.gz	foomatic-20200219-HP-DesignJet_70-cdnj500.ppd.gz	foomatic-20200219-HP-DesignJet_90-cdnj500.ppd.gz
+foomatic-20200219-HP-DesignJet_230-dnj650c.ppd.gz	foomatic-20200219-HP-DesignJet_500-dnj650c.ppd.gz	foomatic-20200219-HP-DesignJet_500ps-dnj650c.ppd.gz	foomatic-20200219-HP-DesignJet_800-dnj650c.ppd.gz	foomatic-20200219-HP-DesignJet_350C-dnj650c.ppd.gz	foomatic-20200219-HP-DesignJet_650C-dnj650c.ppd.gz
+foomatic-20200219-HP-DesignJet_750-dnj750X.upp.ppd.gz	foomatic-20200219-HP-DesignJet_750C_Plus-dnj750X.upp.ppd.gz
+foomatic-20200219-HP-DeskJet_1000C-pnm2ppa.ppd.gz	foomatic-20200219-HP-DeskJet_712C-pnm2ppa.ppd.gz	foomatic-20200219-HP-DeskJet_722C-pnm2ppa.ppd.gz	foomatic-20200219-HP-DeskJet_820C-pnm2ppa.ppd.gz
+foomatic-20200219-HP-LaserJet_5-ljet4.ppd.gz	foomatic-20200219-HP-LaserJet_6-ljet4.ppd.gz	foomatic-20200219-Canon-LBP-1000-ljet4.ppd.gz	foomatic-20200219-HP-LaserJet_4P-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1650-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1670N-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1850-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1870N-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1660e-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-1650-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-1651N-ljet4.ppd.gz	foomatic-20200219-Generic-PCL_6_PCL_XL_Printer-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1270N-ljet4.ppd.gz	foomatic-20200219-Brother-HL-2060-ljet4.ppd.gz	foomatic-20200219-Brother-HL-2140-ljet4.ppd.gz	foomatic-20200219-Brother-HL-5030-ljet4.ppd.gz	foomatic-20200219-Brother-HL-5040-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1250-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1430-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1440-ljet4.ppd.gz	foomatic-20200219-Epson-AL-C8500-ljet4.ppd.gz	foomatic-20200219-Epson-AL-C8500PS-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1070-ljet4.ppd.gz	foomatic-20200219-Canon-GP_335-ljet4.ppd.gz	foomatic-20200219-Canon-LBP-1760-ljet4.ppd.gz	foomatic-20200219-Canon-LBP-3360-ljet4.ppd.gz	foomatic-20200219-Canon-imageRunner_2200-ljet4.ppd.gz	foomatic-20200219-Canon-imageRunner_2800-ljet4.ppd.gz	foomatic-20200219-Canon-imageRunner_3300-ljet4.ppd.gz	foomatic-20200219-Canon-imageRunner_330s-ljet4.ppd.gz	foomatic-20200219-Canon-imageRunner_3570-ljet4.ppd.gz	foomatic-20200219-Canon-imageRunner_4570-ljet4.ppd.gz	foomatic-20200219-Canon-imageRunner_5000-ljet4.ppd.gz	foomatic-20200219-Canon-imageRunner_5570-ljet4.ppd.gz	foomatic-20200219-Canon-imageRunner_6000-ljet4.ppd.gz	foomatic-20200219-Canon-imageRunner_6570-ljet4.ppd.gz	foomatic-20200219-Canon-imageRunner_8500-ljet4.ppd.gz	foomatic-20200219-Canon-imageRunner_C5870U-ljet4.ppd.gz	foomatic-20200219-Canon-imageRunner_C6870U-ljet4.ppd.gz	foomatic-20200219-Epson-EPL-5800PS-ljet4.ppd.gz	foomatic-20200219-Epson-EPL-N1600-ljet4.ppd.gz	foomatic-20200219-Epson-EPL-N1600PS-ljet4.ppd.gz	foomatic-20200219-Epson-EPL-N2050PS-ljet4.ppd.gz	foomatic-20200219-Epson-EPL-N2050PSplus-ljet4.ppd.gz	foomatic-20200219-Epson-EPL-N2750-ljet4.ppd.gz	foomatic-20200219-Epson-EPL-N2750PS-ljet4.ppd.gz	foomatic-20200219-Fujitsu-PrintPartner_20W-ljet4.ppd.gz	foomatic-20200219-Generic-PCL_5c_Printer-ljet4.ppd.gz	foomatic-20200219-Generic-PCL_5e_Printer-ljet4.ppd.gz	foomatic-20200219-IBM-Infoprint_12-ljet4.ppd.gz	foomatic-20200219-Kyocera-FS-1600-ljet4.ppd.gz	foomatic-20200219-Kyocera-FS-1600plus-ljet4.ppd.gz	foomatic-20200219-Kyocera-FS-3600-ljet4.ppd.gz	foomatic-20200219-Kyocera-FS-3600plus-ljet4.ppd.gz	foomatic-20200219-Kyocera-KM-4230-ljet4.ppd.gz	foomatic-20200219-Kyocera-KM-5230-ljet4.ppd.gz	foomatic-20200219-Lexmark-E120-ljet4.ppd.gz	foomatic-20200219-Lexmark-E120n-ljet4.ppd.gz	foomatic-20200219-Lexmark-E230-ljet4.ppd.gz	foomatic-20200219-Lexmark-E320-ljet4.ppd.gz	foomatic-20200219-Lexmark-Optra_E321-ljet4.ppd.gz	foomatic-20200219-Minolta-PagePro_1100-ljet4.ppd.gz	foomatic-20200219-NEC-SuperScript_1400-ljet4.ppd.gz	foomatic-20200219-Ricoh-Aficio_220-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-1250-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-1450-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-1450PS-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-1750-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-2150PS-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-2151N-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-2151NPS-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-2152W-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-2152WPS-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-2250-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-2551N-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-2552W-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-7000-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-7000N-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-7000P-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-7050-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-7300-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-7300N-ljet4.ppd.gz	foomatic-20200219-Sharp-ARP350-ljet4.ppd.gz	foomatic-20200219-Sharp-ARP450-ljet4.ppd.gz	foomatic-20200219-Tektronix-Phaser_750DP-ljet4.ppd.gz	foomatic-20200219-Tektronix-Phaser_750DX-ljet4.ppd.gz	foomatic-20200219-Tektronix-Phaser_750N-ljet4.ppd.gz	foomatic-20200219-Tektronix-Phaser_750P-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_2135-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_4400B-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_4400DT-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_4400DX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_4400N-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_4500B-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_4500DT-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_4500DX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_4500N-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_4510B-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_4510DT-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_4510DX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_4510N-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_5500B-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_5500DN-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_5500DT-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_5500DX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_5500N-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6130N-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6180DN-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6180MFP-D-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6200B-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6200DP-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6200DX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6200N-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6250B-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6250DP-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6250DT-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6250DX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6250N-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6300DN-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6300N-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6350DP-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6350DT-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6350DX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6360DN-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_6360DX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7300B-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7300DN-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7300DT-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7300DX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7300N-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7400DN-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7400DT-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7400DX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7400DXF-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7400N-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7700DN-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7700DX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7700GX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7750B-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7750DN-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7750DXF-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7750GX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7760DN-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7760DX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_7760GX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_8400B-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_8400BD-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_8400DP-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_8400DX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_8400N-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_8500DN-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_8500N-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_8550DP-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_8550DT-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_8550DX-ljet4.ppd.gz	foomatic-20200219-Xerox-Phaser_8560DN-ljet4.ppd.gz	foomatic-20200219-Xerox-WorkCentre_7345-ljet4.ppd.gz	foomatic-20200219-Brother-DCP-7010-ljet4.ppd.gz	foomatic-20200219-Brother-DCP-7020-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1050-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1060-ljet4.ppd.gz	foomatic-20200219-Brother-HL-10h-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1260-ljet4.ppd.gz	foomatic-20200219-Brother-HL-2035-ljet4.ppd.gz	foomatic-20200219-Brother-HL-2040-ljet4.ppd.gz	foomatic-20200219-Brother-HL-2135-ljet4.ppd.gz	foomatic-20200219-Brother-HL-2142-ljet4.ppd.gz	foomatic-20200219-Brother-HL-5140-ljet4.ppd.gz	foomatic-20200219-Brother-HL-660-ljet4.ppd.gz	foomatic-20200219-Brother-HL-760-ljet4.ppd.gz	foomatic-20200219-Brother-HL-960-ljet4.ppd.gz	foomatic-20200219-Canon-LBP-1260-ljet4.ppd.gz	foomatic-20200219-Canon-LBP-430-ljet4.ppd.gz	foomatic-20200219-DEC-LN17-ljet4.ppd.gz	foomatic-20200219-Epson-EPL-5700-ljet4.ppd.gz	foomatic-20200219-Fujitsu-PrintPartner_10V-ljet4.ppd.gz	foomatic-20200219-Fujitsu-PrintPartner_16DV-ljet4.ppd.gz	foomatic-20200219-HP-Color_LaserJet_5000-ljet4.ppd.gz	foomatic-20200219-IBM-Page_Printer_3112-ljet4.ppd.gz	foomatic-20200219-Infotec-4651_MF-ljet4.ppd.gz	foomatic-20200219-Kyocera-FS-6500-ljet4.ppd.gz	foomatic-20200219-Lexmark-Optra_E-ljet4.ppd.gz	foomatic-20200219-Lexmark-Optra_Eplus-ljet4.ppd.gz	foomatic-20200219-Minolta-PagePro_6-ljet4.ppd.gz	foomatic-20200219-Minolta-PagePro_6ex-ljet4.ppd.gz	foomatic-20200219-Minolta-PagePro_8-ljet4.ppd.gz	foomatic-20200219-NEC-SuperScript_1800-ljet4.ppd.gz	foomatic-20200219-NEC-SuperScript_660i-ljet4.ppd.gz	foomatic-20200219-Oki-B410-ljet4.ppd.gz	foomatic-20200219-Oki-OL810ex-ljet4.ppd.gz	foomatic-20200219-Oki-Okipage_10e-ljet4.ppd.gz	foomatic-20200219-Oki-Okipage_10ex-ljet4.ppd.gz	foomatic-20200219-Oki-Okipage_14ex-ljet4.ppd.gz	foomatic-20200219-Oki-Okipage_6ex-ljet4.ppd.gz	foomatic-20200219-Oki-Okipage_8p-ljet4.ppd.gz	foomatic-20200219-Ricoh-Aficio_700-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-4600-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-5000a-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-6000-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-6100-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-85-ljet4.ppd.gz	foomatic-20200219-Samsung-QL-5100A-ljet4.ppd.gz	foomatic-20200219-Samsung-QL-6050-ljet4.ppd.gz	foomatic-20200219-Sharp-AR-161-ljet4.ppd.gz	foomatic-20200219-Xerox-Able_1406-ljet4.ppd.gz	foomatic-20200219-Xerox-DocuPrint_4508-ljet4.ppd.gz	foomatic-20200219-Xerox-DocuPrint_N4512-ljet4.ppd.gz	foomatic-20200219-Xerox-DocuPrint_N4512PS-ljet4.ppd.gz	foomatic-20200219-Xerox-DocuPrint_P1202-ljet4.ppd.gz	foomatic-20200219-Xerox-DocuPrint_P8e-ljet4.ppd.gz	foomatic-20200219-Xerox-Document_Centre_400-ljet4.ppd.gz	foomatic-20200219-HP-LaserJet_3P_w_PCL5-ljet4.ppd.gz	foomatic-20200219-Oki-OL410e-ljet4.ppd.gz	foomatic-20200219-Oki-Super_6e-ljet4.ppd.gz	foomatic-20200219-Ricoh-Aficio_401-ljet4.ppd.gz	foomatic-20200219-Epson-EPL-N2120-ljet4.ppd.gz	foomatic-20200219-Epson-EPL-5800-ljet4.ppd.gz	foomatic-20200219-Epson-EPL-N2050-ljet4.ppd.gz	foomatic-20200219-Epson-EPL-N2050plus-ljet4.ppd.gz
+foomatic-20200219-HP-LaserJet_5M-Postscript.ppd.gz	foomatic-20200219-Generic-PostScript_Printer-Postscript.ppd.gz	foomatic-20200219-HP-Business_Inkjet_9100-Postscript.ppd.gz	foomatic-20200219-Lexmark-C500n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C510b-Postscript.ppd.gz	foomatic-20200219-Lexmark-C530dn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C532dn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C532n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C534dn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C534dtn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C534n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C752b-Postscript.ppd.gz	foomatic-20200219-Lexmark-C770dtn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C770n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C772dn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C772n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C780dn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C780dtn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C780n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C782dn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C782dtn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C782n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C920n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C935dn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C935dtn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C935hdn-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_C710-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_Rplus-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_S_1250-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_S_1855-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_Se_3455-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_T610-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_T612-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_T614-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_T616-Postscript.ppd.gz	foomatic-20200219-Lexmark-T430-Postscript.ppd.gz	foomatic-20200219-Lexmark-T640-Postscript.ppd.gz	foomatic-20200219-Lexmark-T642-Postscript.ppd.gz	foomatic-20200219-Lexmark-T644-Postscript.ppd.gz	foomatic-20200219-Lexmark-W812-Postscript.ppd.gz	foomatic-20200219-Lexmark-W840-Postscript.ppd.gz	foomatic-20200219-Lexmark-X560n-Postscript.ppd.gz	foomatic-20200219-Lexmark-4039_10plus-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_Color_1200-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_Color_1275-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_Color_40-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_Color_45-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_E310-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_E312-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_Ep-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_K_1220-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_M410-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_M412-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_W810-Postscript.ppd.gz	foomatic-20200219-Fuji_Xerox-DocuPrint_CM305_df-Postscript.ppd.gz	foomatic-20170101-Samsung-M332x_382x_402x-Postscript.ppd.gz	foomatic-20200219-Canon-GP_405-Postscript.ppd.gz	foomatic-20200219-Canon-imageRunner_C2570-Postscript.ppd.gz	foomatic-20200219-Canon-imageRunner_C2570i-Postscript.ppd.gz	foomatic-20200219-Canon-imageRunner_C2620N-Postscript.ppd.gz	foomatic-20200219-Canon-imageRunner_C3100-Postscript.ppd.gz	foomatic-20200219-Canon-imageRunner_C3170-Postscript.ppd.gz	foomatic-20200219-Canon-imageRunner_C3170i-Postscript.ppd.gz	foomatic-20200219-Canon-imageRunner_C3200-Postscript.ppd.gz	foomatic-20200219-Canon-imageRunner_C5800-Postscript.ppd.gz	foomatic-20200219-Canon-imageRunner_C6800-Postscript.ppd.gz	foomatic-20200219-Dell-M5200-Postscript.ppd.gz	foomatic-20200219-Dell-S2500-Postscript.ppd.gz	foomatic-20200219-Genicom-LN45-Postscript.ppd.gz	foomatic-20200219-Genicom-ML450-Postscript.ppd.gz	foomatic-20200219-Genicom-microLaser_12-Postscript.ppd.gz	foomatic-20200219-Genicom-microLaser_210-Postscript.ppd.gz	foomatic-20200219-IBM-Infoprint_1226tn-Postscript.ppd.gz	foomatic-20200219-IBM-Infoprint_1422-Postscript.ppd.gz	foomatic-20200219-IBM-Infoprint_1532_Express-Postscript.ppd.gz	foomatic-20200219-IBM-Infoprint_1540_MFP-Postscript.ppd.gz	foomatic-20200219-IBM-Infoprint_1560_MFP-Postscript.ppd.gz	foomatic-20200219-IBM-Infoprint_1570_MFP-Postscript.ppd.gz	foomatic-20200219-IBM-Infoprint_1572_MFP-Postscript.ppd.gz	foomatic-20200219-IBM-Infoprint_1580_MFP-Postscript.ppd.gz	foomatic-20200219-IBM-Infoprint_1585n-Postscript.ppd.gz	foomatic-20200219-IBM-Infoprint_1650_MFP-Postscript.ppd.gz	foomatic-20200219-Minolta-PagePro_1250E-Postscript.ppd.gz	foomatic-20200219-Minolta-magicolor_3100-Postscript.ppd.gz	foomatic-20200219-NEC-SuperScript_4600N-Postscript.ppd.gz	foomatic-20200219-Oce-PPC3073PS-Postscript.ppd.gz	foomatic-20200219-Oce-PPC3074PS-Postscript.ppd.gz	foomatic-20200219-Oce-PPC3093PS-Postscript.ppd.gz	foomatic-20200219-Oce-PPC3094PS-Postscript.ppd.gz	foomatic-20200219-Oce-PPC3113PS-Postscript.ppd.gz	foomatic-20200219-Oce-PPC3114PS-Postscript.ppd.gz	foomatic-20200219-Oce-PPC5115PS-Postscript.ppd.gz	foomatic-20200219-Oce-PPC5160PS-Postscript.ppd.gz	foomatic-20200219-Oce-cm2510-Postscript.ppd.gz	foomatic-20200219-Oce-cm2520-Postscript.ppd.gz	foomatic-20200219-Oce-cm3521-Postscript.ppd.gz	foomatic-20200219-Oce-cm4010-Postscript.ppd.gz	foomatic-20200219-Oce-cm4520-Postscript.ppd.gz	foomatic-20200219-Oce-cm4521-Postscript.ppd.gz	foomatic-20200219-Oce-cm5520-Postscript.ppd.gz	foomatic-20200219-Oce-cm6520-Postscript.ppd.gz	foomatic-20200219-Oce-fx3000-Postscript.ppd.gz	foomatic-20200219-Oce-im2330-Postscript.ppd.gz	foomatic-20200219-Oce-im2830-Postscript.ppd.gz	foomatic-20200219-Oce-im3512-Postscript.ppd.gz	foomatic-20200219-Oce-im3530-Postscript.ppd.gz	foomatic-20200219-Oce-im4512-Postscript.ppd.gz	foomatic-20200219-Oce-im4530-Postscript.ppd.gz	foomatic-20200219-Oce-im5530-Postscript.ppd.gz	foomatic-20200219-Oce-im6020-Postscript.ppd.gz	foomatic-20200219-Oce-im6030-Postscript.ppd.gz	foomatic-20200219-Oce-im7230-Postscript.ppd.gz	foomatic-20200219-Oce-im7520-Postscript.ppd.gz	foomatic-20200219-Oce-im8530-Postscript.ppd.gz	foomatic-20200219-Oce-im9220-Postscript.ppd.gz	foomatic-20200219-Oce-sx2100-Postscript.ppd.gz	foomatic-20200219-Oki-B2540_MFP-Postscript.ppd.gz	foomatic-20200219-Oki-B4545_MFP-Postscript.ppd.gz	foomatic-20200219-Oki-B6200-Postscript.ppd.gz	foomatic-20200219-Oki-B6250-Postscript.ppd.gz	foomatic-20200219-Oki-B6500-Postscript.ppd.gz	foomatic-20200219-Oki-C5540_MFP-Postscript.ppd.gz	foomatic-20200219-Oki-C5550_MFP-Postscript.ppd.gz	foomatic-20200219-Oki-C5750-Postscript.ppd.gz	foomatic-20200219-Oki-C6050-Postscript.ppd.gz	foomatic-20200219-Oki-C6150-Postscript.ppd.gz	foomatic-20200219-Oki-C710-Postscript.ppd.gz	foomatic-20200219-Oki-C9800_MFP-Postscript.ppd.gz	foomatic-20200219-Panasonic-KX-P8420-Postscript.ppd.gz	foomatic-20200219-Panasonic-KX-P8475-Postscript.ppd.gz	foomatic-20200219-Ricoh-Aficio_AP2000-Postscript.ppd.gz	foomatic-20200219-Sharp-AR-B07-Postscript.ppd.gz	foomatic-20200219-Sharp-AR-M256-Postscript.ppd.gz	foomatic-20200219-Sharp-AR-NB2A-Postscript.ppd.gz	foomatic-20200219-Sharp-MX-NB11-Postscript.ppd.gz	foomatic-20200219-Tally-LN45-Postscript.ppd.gz	foomatic-20200219-Tally-ML450-Postscript.ppd.gz	foomatic-20200219-Tektronix-Phaser_780-Postscript.ppd.gz	foomatic-20200219-Tektronix-Phaser_850-Postscript.ppd.gz	foomatic-20200219-Tektronix-Phaser_850DP-Postscript.ppd.gz	foomatic-20200219-Tektronix-Phaser_850DX-Postscript.ppd.gz	foomatic-20200219-Tektronix-Phaser_850N-Postscript.ppd.gz	foomatic-20200219-Texas_Instruments-microLaser_PowerPro_12-Postscript.ppd.gz	foomatic-20200219-Texas_Instruments-microLaser_Pro_600-Postscript.ppd.gz	foomatic-20200219-Texas_Instruments-microLaser_Pro_8-Postscript.ppd.gz	foomatic-20200219-Texas_Instruments-microLaser_Pro_E-Postscript.ppd.gz	foomatic-20200219-Texas_Instruments-microLaser_XL_Basic-Postscript.ppd.gz	foomatic-20200219-Texas_Instruments-microLaser_XL_PS17-Postscript.ppd.gz	foomatic-20200219-Texas_Instruments-microLaser_XL_PS35-Postscript.ppd.gz	foomatic-20200219-Texas_Instruments-microLaser_XL_Turbo-Postscript.ppd.gz	foomatic-20200219-Toshiba-DP2570-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_165-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_200L-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_202L-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_205-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_230-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_232-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_2500c-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_280-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_281c-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_282-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_350-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_3500c-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_3510c-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_3511-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_351c-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_352-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_450-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_4511-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_451c-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_452-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_520-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_600-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_720-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_850-Postscript.ppd.gz	foomatic-20200219-Xante-Accel-a-Writer_4G-Postscript.ppd.gz	foomatic-20200219-Xante-FilmMaker_4-Postscript.ppd.gz	foomatic-20200219-Xante-PlateMaker_4-Postscript.ppd.gz	foomatic-20200219-Xerox-DocuColor_2006-Postscript.ppd.gz	foomatic-20200219-Xerox-DocuPrint_N17-Postscript.ppd.gz	foomatic-20200219-Xerox-DocuPrint_N2125-Postscript.ppd.gz	foomatic-20200219-Xerox-DocuPrint_N24-Postscript.ppd.gz	foomatic-20200219-Xerox-DocuPrint_N3225-Postscript.ppd.gz	foomatic-20200219-Xerox-DocuPrint_N40-Postscript.ppd.gz	foomatic-20200219-Xerox-DocuPrint_N4025-Postscript.ppd.gz	foomatic-20200219-Xerox-DocuPrint_N4525-Postscript.ppd.gz	foomatic-20200219-Xerox-FaxCenter_2121-Postscript.ppd.gz	foomatic-20200219-Xerox-FaxCentre_2218-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_1235-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_3200MFP-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_3400-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_3450-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_4510-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_5500-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_6100-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_6120-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_6130-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_6180-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_6360-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_7400-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_7760-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_790-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_8200B-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_8200DP-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_8200DX-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_8200N-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_8560-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_860B-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_860DP-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_860DX-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_860N-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_8860-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_24-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_4118-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_7228-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_7232-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_7242-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_7328-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_7335-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_C2424-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_M118-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_M20-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_M24-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_Pro_128-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_Pro_133-Postscript.ppd.gz	foomatic-20200219-Apple-12_640ps-Postscript.ppd.gz	foomatic-20200219-Apple-LaserWriter_16_600-Postscript.ppd.gz	foomatic-20200219-Apple-LaserWriter_4_600-Postscript.ppd.gz	foomatic-20200219-Apple-LaserWriter_Pro_630-Postscript.ppd.gz	foomatic-20200219-Epson-EPL-5700PS-Postscript.ppd.gz	foomatic-20200219-Heidelberg-Digimaster_9110-Postscript.ppd.gz	foomatic-20200219-Hitachi-DDP_70_with_MicroPress-Postscript.ppd.gz	foomatic-20200219-IBM-4029_10P-Postscript.ppd.gz	foomatic-20200219-IBM-4303_Network_Color_Printer-Postscript.ppd.gz	foomatic-20200219-Kodak-DigiSource_9110-Postscript.ppd.gz	foomatic-20200219-Oki-OL610e_PS-Postscript.ppd.gz	foomatic-20200219-Oki-OL810e_PS-Postscript.ppd.gz	foomatic-20200219-Oki-Okipage_12i-Postscript.ppd.gz	foomatic-20200219-Oki-Okipage_20DXn-Postscript.ppd.gz	foomatic-20200219-Oki-Okipage_8c-Postscript.ppd.gz	foomatic-20200219-Xerox-DocuPrint_C20-Postscript.ppd.gz	foomatic-20200219-Xerox-DocuPrint_C55-Postscript.ppd.gz	foomatic-20200219-Xerox-DocuPrint_N32-Postscript.ppd.gz	foomatic-20200219-Apple-LaserWriter_IIg-Postscript.ppd.gz	foomatic-20200219-DEC-LN07-Postscript.ppd.gz	foomatic-20200219-NEC-SilentWriter_LC_890-Postscript.ppd.gz	foomatic-20200219-NEC-Silentwriter2_S60P-Postscript.ppd.gz	foomatic-20200219-NEC-Silentwriter2_model_290-Postscript.ppd.gz	foomatic-20200219-NEC-Silentwriter_95f-Postscript.ppd.gz	foomatic-20200219-Oki-OL830Plus-Postscript.ppd.gz	foomatic-20200219-Panasonic-KX-P5400-Postscript.ppd.gz	foomatic-20200219-Tektronix-Phaser_350-Postscript.ppd.gz	foomatic-20200219-Tektronix-Phaser_IISX-Postscript.ppd.gz	foomatic-20200219-Tektronix-Phaser_PX-Postscript.ppd.gz	foomatic-20200219-Kodak-IS_70_CPII-Postscript.ppd.gz	foomatic-20200219-Ricoh-Aficio_5206-Postscript.ppd.gz
+foomatic-20200219-HP-OfficeJet_500-cdj550.ppd.gz	foomatic-20200219-Oki-Okijet_2500-cdj550.ppd.gz	foomatic-20200219-Xerox-DocuPrint_C6-cdj550.ppd.gz	foomatic-20200219-Xerox-DocuPrint_C8-cdj550.ppd.gz	foomatic-20200219-Xerox-DocuPrint_XJ6C-cdj550.ppd.gz	foomatic-20200219-Xerox-Document_Homecentre-cdj550.ppd.gz	foomatic-20200219-Xerox-WorkCentre_450cp-cdj550.ppd.gz
+foomatic-20200219-HP-PaintJet_XL300-pjxl300.ppd.gz
 foomatic-20200219-IBM-3853_JetPrinter-jetp3852.ppd.gz
 foomatic-20200219-Imagen-ImPress-imagen.ppd.gz
 foomatic-20200219-InfoPrint-Pro_1107EX-pxlmono-InfoPrint.ppd.gz	foomatic-20200219-InfoPrint-Pro_1357EX-pxlmono-InfoPrint.ppd.gz	foomatic-20200219-InfoPrint-Pro_907EX-pxlmono-InfoPrint.ppd.gz	foomatic-20200219-Gestetner-DSc428-pxlcolor-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSc435-pxlcolor-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSc445-pxlcolor-Gestetner.ppd.gz	foomatic-20200219-Infotec-ISC_2428-pxlcolor-Infotec.ppd.gz	foomatic-20200219-Infotec-ISC_2835-pxlcolor-Infotec.ppd.gz	foomatic-20200219-Infotec-ISC_3545-pxlcolor-Infotec.ppd.gz	foomatic-20200219-Lanier-LD328c-pxlcolor-Lanier.ppd.gz	foomatic-20200219-Lanier-LD335c-pxlcolor-Lanier.ppd.gz	foomatic-20200219-Lanier-LD345c-pxlcolor-Lanier.ppd.gz	foomatic-20200219-NRG-DSc428-pxlcolor-NRG.ppd.gz	foomatic-20200219-NRG-DSc435-pxlcolor-NRG.ppd.gz	foomatic-20200219-NRG-DSc445-pxlcolor-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_3228C-pxlcolor-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_3235C-pxlcolor-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_3245C-pxlcolor-Ricoh.ppd.gz	foomatic-20200219-Savin-C2824-pxlcolor-Savin.ppd.gz	foomatic-20200219-Savin-C3528-pxlcolor-Savin.ppd.gz	foomatic-20200219-Savin-C4535-pxlcolor-Savin.ppd.gz	foomatic-20200219-Gestetner-DSm651-pxlmono-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm660-pxlmono-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm675-pxlmono-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS_2151-pxlmono-Infotec.ppd.gz	foomatic-20200219-Infotec-IS_2160-pxlmono-Infotec.ppd.gz	foomatic-20200219-Infotec-IS_2175-pxlmono-Infotec.ppd.gz	foomatic-20200219-Lanier-LD151-pxlmono-Lanier.ppd.gz	foomatic-20200219-Lanier-LD160-pxlmono-Lanier.ppd.gz	foomatic-20200219-Lanier-LD175-pxlmono-Lanier.ppd.gz	foomatic-20200219-NRG-DSm651-pxlmono-NRG.ppd.gz	foomatic-20200219-NRG-DSm660-pxlmono-NRG.ppd.gz	foomatic-20200219-NRG-DSm675-pxlmono-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_2051-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_2060-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_2075-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Savin-4051-pxlmono-Savin.ppd.gz	foomatic-20200219-Savin-4060-pxlmono-Savin.ppd.gz	foomatic-20200219-Savin-4075-pxlmono-Savin.ppd.gz	foomatic-20200219-Gestetner-9002-pxlmono-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS2090-pxlmono-Infotec.ppd.gz	foomatic-20200219-Lanier-LD090-pxlmono-Lanier.ppd.gz	foomatic-20200219-NRG-9005_9008_9002-pxlmono-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_2090-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Savin-4090-pxlmono-Savin.ppd.gz	foomatic-20200219-Gestetner-6002-pxlmono-Gestetner.ppd.gz	foomatic-20200219-Gestetner-7502-pxlmono-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS_2060-pxlmono-Infotec.ppd.gz	foomatic-20200219-Infotec-IS_2075-pxlmono-Infotec.ppd.gz	foomatic-20200219-Lanier-LD060-pxlmono-Lanier.ppd.gz	foomatic-20200219-Lanier-LD075-pxlmono-Lanier.ppd.gz	foomatic-20200219-NRG-6002_6005_6008-pxlmono-NRG.ppd.gz	foomatic-20200219-NRG-7502_7505_7508-pxlmono-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_1060-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_1075-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Savin-2560-pxlmono-Savin.ppd.gz	foomatic-20200219-Savin-2575-pxlmono-Savin.ppd.gz	foomatic-20200219-Gestetner-10512-pxlmono-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS2105-pxlmono-Infotec.ppd.gz	foomatic-20200219-Lanier-LD0105-pxlmono-Lanier.ppd.gz	foomatic-20200219-NRG-10515_10518_10512-pxlmono-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_2105-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Savin-40105-pxlmono-Savin.ppd.gz	foomatic-20200219-Gestetner-DSm622-pxlmono-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm627-pxlmono-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS_2122-pxlmono-Infotec.ppd.gz	foomatic-20200219-Infotec-IS_2127-pxlmono-Infotec.ppd.gz	foomatic-20200219-Infotec-IS_2132-pxlmono-Infotec.ppd.gz	foomatic-20200219-Lanier-LD122-pxlmono-Lanier.ppd.gz	foomatic-20200219-Lanier-LD127-pxlmono-Lanier.ppd.gz	foomatic-20200219-Lanier-LD132-pxlmono-Lanier.ppd.gz	foomatic-20200219-NRG-DSm622-pxlmono-NRG.ppd.gz	foomatic-20200219-NRG-DSm627-pxlmono-NRG.ppd.gz	foomatic-20200219-NRG-DSm632-pxlmono-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_2022-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_2027-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_2032-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Savin-4022-pxlmono-Savin.ppd.gz	foomatic-20200219-Savin-4027-pxlmono-Savin.ppd.gz	foomatic-20200219-Gestetner-2212-pxlmono-Gestetner.ppd.gz	foomatic-20200219-Gestetner-2712-pxlmono-Gestetner.ppd.gz	foomatic-20200219-Gestetner-3212-pxlmono-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS2022-pxlmono-Infotec.ppd.gz	foomatic-20200219-Infotec-IS2027-pxlmono-Infotec.ppd.gz	foomatic-20200219-Infotec-IS2032-pxlmono-Infotec.ppd.gz	foomatic-20200219-Lanier-5622-pxlmono-Lanier.ppd.gz	foomatic-20200219-Lanier-5627-pxlmono-Lanier.ppd.gz	foomatic-20200219-Lanier-5632-pxlmono-Lanier.ppd.gz	foomatic-20200219-NRG-2205_2238_2212-pxlmono-NRG.ppd.gz	foomatic-20200219-NRG-2705_2738_2712-pxlmono-NRG.ppd.gz	foomatic-20200219-NRG-3205_3238_3212-pxlmono-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_1022-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_1027-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_1032-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Savin-2522-pxlmono-Savin.ppd.gz	foomatic-20200219-Savin-2527-pxlmono-Savin.ppd.gz	foomatic-20200219-Savin-2532-pxlmono-Savin.ppd.gz	foomatic-20200219-Gestetner-3502-pxlmono-Gestetner.ppd.gz	foomatic-20200219-Gestetner-4502-pxlmono-Gestetner.ppd.gz	foomatic-20200219-Infotec-4353_MF-pxlmono-Infotec.ppd.gz	foomatic-20200219-Infotec-4452_MF-pxlmono-Infotec.ppd.gz	foomatic-20200219-Lanier-5635-pxlmono-Lanier.ppd.gz	foomatic-20200219-Lanier-5645-pxlmono-Lanier.ppd.gz	foomatic-20200219-NRG-3525_3508_3502-pxlmono-NRG.ppd.gz	foomatic-20200219-NRG-4525_4508_4502-pxlmono-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_1035-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_1045-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Savin-2535_2235-pxlmono-Savin.ppd.gz	foomatic-20200219-Savin-2545_2245-pxlmono-Savin.ppd.gz	foomatic-20200219-Gestetner-3532_4235g-pxlmono-Gestetner.ppd.gz	foomatic-20200219-Gestetner-4532_4245g-pxlmono-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm635_635G-pxlmono-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm645_645G-pxlmono-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS2035-pxlmono-Infotec.ppd.gz	foomatic-20200219-Infotec-IS2045-pxlmono-Infotec.ppd.gz	foomatic-20200219-Infotec-IS_2135-pxlmono-Infotec.ppd.gz	foomatic-20200219-Infotec-IS_2145-pxlmono-Infotec.ppd.gz	foomatic-20200219-Lanier-LD035-pxlmono-Lanier.ppd.gz	foomatic-20200219-Lanier-LD045-pxlmono-Lanier.ppd.gz	foomatic-20200219-Lanier-LD135-pxlmono-Lanier.ppd.gz	foomatic-20200219-Lanier-LD145-pxlmono-Lanier.ppd.gz	foomatic-20200219-NRG-3545_3518_3532-pxlmono-NRG.ppd.gz	foomatic-20200219-NRG-4545_4518_4532-pxlmono-NRG.ppd.gz	foomatic-20200219-NRG-DSm635-pxlmono-NRG.ppd.gz	foomatic-20200219-NRG-DSm645-pxlmono-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_2035-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_2035e-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_2045-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_2045e-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Savin-4035_4135g-pxlmono-Savin.ppd.gz	foomatic-20200219-Savin-4035e_4135eG-pxlmono-Savin.ppd.gz	foomatic-20200219-Savin-4045_4145g-pxlmono-Savin.ppd.gz	foomatic-20200219-Savin-4045e_4145eG-pxlmono-Savin.ppd.gz	foomatic-20200219-Gestetner-DSm618d-pxlmono-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm620d-pxlmono-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS_2018D-pxlmono-Infotec.ppd.gz	foomatic-20200219-Infotec-IS_2220D-pxlmono-Infotec.ppd.gz	foomatic-20200219-Lanier-LD118d-pxlmono-Lanier.ppd.gz	foomatic-20200219-Lanier-LD120d-pxlmono-Lanier.ppd.gz	foomatic-20200219-NRG-DSm618d-pxlmono-NRG.ppd.gz	foomatic-20200219-NRG-DSm620d-pxlmono-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_2018D-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_2020D-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Savin-4018d-pxlmono-Savin.ppd.gz	foomatic-20200219-Savin-8020d-pxlmono-Savin.ppd.gz	foomatic-20200219-Gestetner-DSm618-pxlmono-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm620-pxlmono-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS_2018-pxlmono-Infotec.ppd.gz	foomatic-20200219-Infotec-IS_2220-pxlmono-Infotec.ppd.gz	foomatic-20200219-Lanier-LD118-pxlmono-Lanier.ppd.gz	foomatic-20200219-Lanier-LD120-pxlmono-Lanier.ppd.gz	foomatic-20200219-NRG-DSm618-pxlmono-NRG.ppd.gz	foomatic-20200219-NRG-DSm620-pxlmono-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_2018-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_2020-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Savin-4018-pxlmono-Savin.ppd.gz	foomatic-20200219-Savin-8020-pxlmono-Savin.ppd.gz	foomatic-20200219-Gestetner-DSm415-pxlmono-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS_2215-pxlmono-Infotec.ppd.gz	foomatic-20200219-Lanier-LD015-pxlmono-Lanier.ppd.gz	foomatic-20200219-NRG-DSm415-pxlmono-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_1515-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Savin-3515-pxlmono-Savin.ppd.gz	foomatic-20200219-Gestetner-DSm615-pxlmono-Gestetner.ppd.gz	foomatic-20200219-Gestetner-DSm616-pxlmono-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS_2015-pxlmono-Infotec.ppd.gz	foomatic-20200219-Infotec-IS_2216-pxlmono-Infotec.ppd.gz	foomatic-20200219-Lanier-LD115-pxlmono-Lanier.ppd.gz	foomatic-20200219-Lanier-LD116-pxlmono-Lanier.ppd.gz	foomatic-20200219-NRG-DSm615-pxlmono-NRG.ppd.gz	foomatic-20200219-NRG-DSm616-pxlmono-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_2015-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_2016-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Savin-4015-pxlmono-Savin.ppd.gz	foomatic-20200219-Savin-8016-pxlmono-Savin.ppd.gz
@@ -69,10 +77,9 @@
 foomatic-20200219-Lanier-SG3110SFNw-pxlcolor-Lanier.ppd.gz	foomatic-20200219-NRG-SG3110SFNw-pxlcolor-NRG.ppd.gz	foomatic-20200219-Ricoh-AficioSG3110SFNw-pxlcolor-Ricoh.ppd.gz	foomatic-20200219-Savin-SG3110SFNw-pxlcolor-Savin.ppd.gz
 foomatic-20200219-Lanier-SP_3600DN-Postscript-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_3600SF-Postscript-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_3610SF-Postscript-Lanier.ppd.gz	foomatic-20200219-NRG-SP_3600DN-Postscript-NRG.ppd.gz	foomatic-20200219-NRG-SP_3600SF-Postscript-NRG.ppd.gz	foomatic-20200219-NRG-SP_3610SF-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-SP_3600DN-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_3600SF-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_3610SF-Postscript-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_3600DN-Postscript-Savin.ppd.gz	foomatic-20200219-Savin-SP_3600SF-Postscript-Savin.ppd.gz	foomatic-20200219-Savin-SP_3610SF-Postscript-Savin.ppd.gz
 foomatic-20200219-Lanier-SP_C360SFNw-Postscript-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_C361SFNw-Postscript-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C360SFNw-Postscript-NRG.ppd.gz	foomatic-20200219-NRG-SP_C360SNw-Postscript-NRG.ppd.gz	foomatic-20200219-NRG-SP_C361SFNw-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-SP_C360SFNw-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_C360SNw-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_C361SFNw-Postscript-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_C360SFNw-Postscript-Savin.ppd.gz	foomatic-20200219-Savin-SP_C361SFNw-Postscript-Savin.ppd.gz	foomatic-20200219-Gestetner-GS1227-Postscript-Gestetner.ppd.gz	foomatic-20200219-Gestetner-IM_2702-Postscript-Gestetner.ppd.gz	foomatic-20200219-NRG-IM_2702-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-IM_2702-Postscript-Ricoh.ppd.gz	foomatic-20200219-Lanier-SP_C360DNw-Postscript-Lanier.ppd.gz	foomatic-20200219-Ricoh-SP_C360DNw-Postscript-Ricoh.ppd.gz	foomatic-20200219-Savin-SP_C360DNw-Postscript-Savin.ppd.gz	foomatic-20200219-Ricoh-SP_400DN-Postscript-Ricoh.ppd.gz
-foomatic-20200219-Lexmark-C720n-pxlcolor.ppd.gz	foomatic-20200219-Oki-C6000-pxlcolor.ppd.gz	foomatic-20200219-Oki-C7350-pxlcolor.ppd.gz	foomatic-20200219-Oki-C7550-pxlcolor.ppd.gz	foomatic-20200219-Oki-C9650-pxlcolor.ppd.gz	foomatic-20200219-Oki-C9800-pxlcolor.ppd.gz	foomatic-20200219-Panasonic-DP-180-pxlcolor.ppd.gz	foomatic-20200219-Panasonic-DP-C265-pxlcolor.ppd.gz	foomatic-20200219-Panasonic-DP-C305-pxlcolor.ppd.gz	foomatic-20200219-Panasonic-DP-C322-pxlcolor.ppd.gz	foomatic-20200219-Panasonic-DP-C405-pxlcolor.ppd.gz	foomatic-20200219-Samsung-CLP-550-pxlcolor.ppd.gz	foomatic-20200219-Samsung-CLP-550N-pxlcolor.ppd.gz	foomatic-20200219-Samsung-CLP-610-pxlcolor.ppd.gz	foomatic-20200219-Samsung-CLP-620-pxlcolor.ppd.gz	foomatic-20200219-Samsung-CLP-650-pxlcolor.ppd.gz	foomatic-20200219-Samsung-CLP-650N-pxlcolor.ppd.gz	foomatic-20200219-Samsung-CLP-660N-pxlcolor.ppd.gz	foomatic-20200219-Samsung-CLP-660ND-pxlcolor.ppd.gz	foomatic-20200219-Toshiba-e-Studio_220cp-pxlcolor.ppd.gz	foomatic-20200219-Toshiba-e-Studio_2830c-pxlcolor.ppd.gz	foomatic-20200219-Toshiba-e-Studio_3530c-pxlcolor.ppd.gz	foomatic-20200219-Toshiba-e-Studio_4500c-pxlcolor.ppd.gz	foomatic-20200219-Toshiba-e-Studio_4520c-pxlcolor.ppd.gz	foomatic-20200219-Toshiba-e-Studio_5500c-pxlcolor.ppd.gz	foomatic-20200219-Brother-HL-2400CeN-pxlcolor.ppd.gz	foomatic-20200219-Brother-HL-3400CN-pxlcolor.ppd.gz	foomatic-20200219-Brother-HL-4000CN-pxlcolor.ppd.gz	foomatic-20200219-Brother-HL-4040CN-pxlcolor.ppd.gz	foomatic-20200219-Canon-imageRunner_C2550-pxlcolor.ppd.gz	foomatic-20200219-Canon-imageRunner_C5058-pxlcolor.ppd.gz	foomatic-20200219-Canon-imageRunner_C5068-pxlcolor.ppd.gz	foomatic-20200219-Canon-imageRunner_C5185-pxlcolor.ppd.gz	foomatic-20200219-Canon-imageRunner_C5185i-pxlcolor.ppd.gz	foomatic-20200219-Dell-3000cn-pxlcolor.ppd.gz	foomatic-20200219-Dell-3100cn-pxlcolor.ppd.gz	foomatic-20200219-Xerox-Phaser_3160N-pxlcolor.ppd.gz
 foomatic-20200219-Lexmark-X125-drv_x125.ppd.gz
 foomatic-20200219-Lexmark-Z11-cZ11somsom.ppd.gz
-foomatic-20200219-Minolta-PagePro_8L-ljet2p.ppd.gz	foomatic-20200219-Apple-LaserWriter_Select_360-ljet2p.ppd.gz	foomatic-20200219-Brother-DCP-1200-ljet2p.ppd.gz	foomatic-20200219-Brother-HL-1040-ljet2p.ppd.gz	foomatic-20200219-Brother-HL-630-ljet2p.ppd.gz	foomatic-20200219-Brother-MFC-6550MC-ljet2p.ppd.gz	foomatic-20200219-Brother-MFC-9500-ljet2p.ppd.gz	foomatic-20200219-Brother-MFC-9600-ljet2p.ppd.gz	foomatic-20200219-Lexmark-Valuewriter_300-ljet2p.ppd.gz	foomatic-20200219-NEC-SuperScript_1260-ljet2p.ppd.gz	foomatic-20200219-NEC-SuperScript_860-ljet2p.ppd.gz	foomatic-20200219-NEC-SuperScript_870-ljet2p.ppd.gz	foomatic-20200219-Oki-OL400e-ljet2p.ppd.gz	foomatic-20200219-Oki-OL400ex-ljet2p.ppd.gz	foomatic-20200219-Oki-OL600e-ljet2p.ppd.gz	foomatic-20200219-Panasonic-KX-P4410-ljet2p.ppd.gz	foomatic-20200219-Panasonic-KX-P6500-ljet2p.ppd.gz	foomatic-20200219-Raven-LP-410-ljet2p.ppd.gz	foomatic-20200219-Star-LS-04-ljet2p.ppd.gz	foomatic-20200219-Xerox-DocuPrint_P12-ljet2p.ppd.gz
+foomatic-20200219-Minolta-PagePro_8L-ljet2p.ppd.gz	foomatic-20200219-Apple-LaserWriter_Select_360-ljet2p.ppd.gz	foomatic-20200219-Brother-DCP-1200-ljet2p.ppd.gz	foomatic-20200219-Brother-HL-1040-ljet2p.ppd.gz	foomatic-20200219-Brother-HL-630-ljet2p.ppd.gz	foomatic-20200219-Brother-MFC-6550MC-ljet2p.ppd.gz	foomatic-20200219-Brother-MFC-9500-ljet2p.ppd.gz	foomatic-20200219-Brother-MFC-9600-ljet2p.ppd.gz	foomatic-20200219-HP-LaserJet_2D-ljet2p.ppd.gz	foomatic-20200219-HP-LaserJet_2P-ljet2p.ppd.gz	foomatic-20200219-HP-LaserJet_2P_Plus-ljet2p.ppd.gz	foomatic-20200219-Lexmark-Valuewriter_300-ljet2p.ppd.gz	foomatic-20200219-NEC-SuperScript_1260-ljet2p.ppd.gz	foomatic-20200219-NEC-SuperScript_860-ljet2p.ppd.gz	foomatic-20200219-NEC-SuperScript_870-ljet2p.ppd.gz	foomatic-20200219-Oki-OL400e-ljet2p.ppd.gz	foomatic-20200219-Oki-OL400ex-ljet2p.ppd.gz	foomatic-20200219-Oki-OL600e-ljet2p.ppd.gz	foomatic-20200219-Panasonic-KX-P4410-ljet2p.ppd.gz	foomatic-20200219-Panasonic-KX-P6500-ljet2p.ppd.gz	foomatic-20200219-Raven-LP-410-ljet2p.ppd.gz	foomatic-20200219-Star-LS-04-ljet2p.ppd.gz	foomatic-20200219-Xerox-DocuPrint_P12-ljet2p.ppd.gz
 foomatic-20200219-NEC-P2X-necp2xX.upp.ppd.gz
 foomatic-20200219-NEC-PICTY180-picty180.ppd.gz
 foomatic-20200219-NEC-PinWriter_P6-necp6.ppd.gz	foomatic-20200219-NEC-PinWriter_P60-necp6.ppd.gz	foomatic-20200219-NEC-PinWriter_P6_plus-necp6.ppd.gz	foomatic-20200219-NEC-PinWriter_P7-necp6.ppd.gz	foomatic-20200219-NEC-PinWriter_P70-necp6.ppd.gz	foomatic-20200219-NEC-PinWriter_P7_plus-necp6.ppd.gz	foomatic-20200219-NEC-Pinwriter_P20-necp6.ppd.gz	foomatic-20200219-Star-LC24-100-necp6.ppd.gz
@@ -80,27 +87,27 @@
 foomatic-20200219-Oki-ML_320-okiibm.ppd.gz	foomatic-20200219-Oki-ML_321-okiibm.ppd.gz	foomatic-20200219-Oki-Microline_IBM_compatible_9_pin-okiibm.ppd.gz
 foomatic-20200219-Oki-Microline_182-oki182.ppd.gz	foomatic-20200219-Oki-Microline_192plus-oki182.ppd.gz
 foomatic-20200219-Oki-OL400w-oki4drv.ppd.gz	foomatic-20200219-Oki-Okipage_4w-oki4drv.ppd.gz	foomatic-20200219-Oki-Okipage_4wplus-oki4drv.ppd.gz	foomatic-20200219-Oki-Okipage_6w-oki4drv.ppd.gz	foomatic-20200219-Oki-Okipage_8w-oki4drv.ppd.gz	foomatic-20200219-Oki-Okipage_8w_Lite-oki4drv.ppd.gz	foomatic-20200219-Oki-Okipage_8z-oki4drv.ppd.gz
-foomatic-20200219-Oki-Okijet_2500-cdj550.ppd.gz	foomatic-20200219-Xerox-DocuPrint_C6-cdj550.ppd.gz	foomatic-20200219-Xerox-DocuPrint_C8-cdj550.ppd.gz	foomatic-20200219-Xerox-DocuPrint_XJ6C-cdj550.ppd.gz	foomatic-20200219-Xerox-Document_Homecentre-cdj550.ppd.gz	foomatic-20200219-Xerox-WorkCentre_450cp-cdj550.ppd.gz
 foomatic-20200219-Panasonic-KX-P1150-eps9high.ppd.gz
 foomatic-20200219-Pentax-PocketJet_200-pentaxpj.ppd.gz	foomatic-20200219-Pentax-PocketJet_II-pentaxpj.ppd.gz
 foomatic-20200219-Ricoh-4081-r4081.ppd.gz	foomatic-20200219-Ricoh-4801-r4081.ppd.gz	foomatic-20200219-Ricoh-6000-r4081.ppd.gz
 foomatic-20200219-Ricoh-GX_E5550N-pxlcolor-Ricoh.ppd.gz	foomatic-20200219-Ricoh-GX_3050SFN-pxlcolor-Ricoh.ppd.gz	foomatic-20200219-Ricoh-GX_5050N-pxlcolor-Ricoh.ppd.gz	foomatic-20200219-Ricoh-GX_3050N-pxlcolor-Ricoh.ppd.gz	foomatic-20200219-Ricoh-GX_E3350N-pxlcolor-Ricoh.ppd.gz
-foomatic-20200219-Ricoh-SP_2300L-pcl5-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_2300SFL-pcl5-Ricoh.ppd.gz
 foomatic-20200219-SiPix-Pocket_Printer_A6-sipixa6.upp.ppd.gz
 foomatic-20200219-Star-NX-1001-eps9mid.ppd.gz
 foomatic-20200219-Star-StarJet_48-sj48.ppd.gz
 foomatic-20200219-Tektronix-4693d-t4693dX.ppd.gz
 foomatic-20200219-Tektronix-4695-tek4696.ppd.gz	foomatic-20200219-Tektronix-4696-tek4696.ppd.gz	foomatic-20200219-Tektronix-4697-tek4696.ppd.gz
 fuji_xerox-20200402-fuji-xerox-20200402-fx-apeosportv-c3375.ppd.gz	fuji_xerox-20200402-fuji-xerox-20200402-fx-apeosportv-c4475.ppd.gz	fuji_xerox-20200402-fuji-xerox-20200402-fx-docucentreiv-c2263.ppd.gz	fuji_xerox-20200402-fuji-xerox-20200402-fx-docucentreiv-c2275.ppd.gz	fuji_xerox-20200402-fuji-xerox-20200402-fx-docucentreiv-c3370.ppd.gz	fuji_xerox-20200402-fuji-xerox-20200402-fx-docucentreiv-c3375.ppd.gz	fuji_xerox-20200402-fuji-xerox-20200402-fx-docucolor-1450-ga.ppd.gz	fuji_xerox-20200402-fuji-xerox-20200402-fx-docuprint-205.ppd.gz	fuji_xerox-20200402-fuji-xerox-20200402-fx-docuprint-c3050.ppd.gz	fuji_xerox-20200402-fuji-xerox-20200402-fx-docuprint-c3450-d.ppd.gz	fuji_xerox-20200402-fuji-xerox-20200402-fx-docuprint-c4000-d.ppd.gz
-hp-20190918-hplip-3.19.6-hp-CopperheadXLP.ppd.gz	hplip-20200303-hplip-3.19.12-hp-CopperheadXLP.ppd.gz	hp-20190918-hplip-3.19.6-hp-SPDOfficejetProBsize.ppd.gz	hplip-20200303-hplip-3.19.12-hp-SPDOfficejetProBsize.ppd.gz	hp-20190918-hplip-3.19.6-hp-CopperheadIPH15.ppd.gz	hplip-20200303-hplip-3.19.12-hp-CopperheadIPH15.ppd.gz	hp-20190918-hplip-3.19.6-hp-CopperheadIPH.ppd.gz	hplip-20200303-hplip-3.19.12-hp-CopperheadIPH.ppd.gz	hp-20190918-hplip-3.19.6-hp-Mimas15.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Mimas15.ppd.gz	hp-20190918-hplip-3.19.6-hp-MimasTDR.ppd.gz	hplip-20200303-hplip-3.19.12-hp-MimasTDR.ppd.gz	hp-20190918-hplip-3.19.6-hp-Mimas17.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Mimas17.ppd.gz	hp-20190918-hplip-3.19.6-hp-Mimas.ppd.gz	hp-20190918-hplip-3.19.6-hp-Saipan.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Mimas.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Saipan.ppd.gz	hp-20190918-hplip-3.19.6-hp-Copperhead12.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Copperhead12.ppd.gz	hp-20190918-hplip-3.19.6-hp-P15_CISS.ppd.gz	hp-20190918-hplip-3.19.6-hp-PyramidRefresh15.ppd.gz	hplip-20200303-hplip-3.19.12-hp-P15_CISS.ppd.gz	hplip-20200303-hplip-3.19.12-hp-PyramidRefresh15.ppd.gz	hp-20190918-hplip-3.19.6-hp-CopperheadIPH17.ppd.gz	hplip-20200303-hplip-3.19.12-hp-CopperheadIPH17.ppd.gz	hp-20190918-hplip-3.19.6-hp-Python11.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Python11.ppd.gz	hp-20190918-hplip-3.19.6-hp-SPDOfficejetProAsize.ppd.gz	hplip-20200303-hplip-3.19.12-hp-SPDOfficejetProAsize.ppd.gz	hp-20190918-hplip-3.19.6-hp-Python10.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Python10.ppd.gz	hp-20190918-hplip-3.19.6-hp-Corbett.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Corbett.ppd.gz	hp-20190918-hplip-3.19.6-hp-Pyramid15.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Pyramid15.ppd.gz	hp-20190918-hplip-3.19.6-hp-Python.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Python.ppd.gz	hp-20171121-hplip-3.17.10-hp-deskjet_f4210_series.ppd.gz	hp-20171121-hplip-3.17.10-hp-deskjet_f4213_series.ppd.gz	hp-20190918-hplip-3.19.6-hp-Gemstone.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Gemstone.ppd.gz	hp-20190918-hplip-3.19.6-hp-OJProKx50.ppd.gz	hplip-20200303-hplip-3.19.12-hp-OJProKx50.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_d2600_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_ink_advant_k109a-z.ppd.gz	hplip-20200303-hplip-3.19.12-hp-officejet_4000_k210.ppd.gz	hp-20190918-hplip-3.19.6-hp-OJ7000.ppd.gz	hplip-20200303-hplip-3.19.12-hp-OJ7000.ppd.gz	hp-20190918-hplip-3.19.6-hp-Saipan15B.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Saipan15B.ppd.gz	hp-20190918-hplip-3.19.6-hp-Ampere.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Ampere.ppd.gz	hp-20190918-hplip-3.19.6-hp-DJ55xx.ppd.gz	hplip-20200303-hplip-3.19.12-hp-DJ55xx.ppd.gz	hp-20190918-hplip-3.19.6-hp-ViperPlusVIP.ppd.gz	hplip-20200303-hplip-3.19.12-hp-ViperPlusVIP.ppd.gz	hp-20190918-hplip-3.19.6-hp-StingrayOJ.ppd.gz	hplip-20200303-hplip-3.19.12-hp-StingrayOJ.ppd.gz	hp-20190918-hplip-3.19.6-hp-DJ9xxVIP.ppd.gz	hplip-20200303-hplip-3.19.12-hp-DJ9xxVIP.ppd.gz	hp-20190918-hplip-3.19.6-hp-Copperhead.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Copperhead.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_950c.ppd.gz	hp-20190918-hplip-3.19.6-hp-PyramidPlus.ppd.gz	hplip-20200303-hplip-3.19.12-hp-PyramidPlus.ppd.gz	hp-20190918-hplip-3.19.6-hp-Stabler.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Stabler.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_d4100_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-PCL3-Class3.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_d4200_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_d4300_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_f300_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_f4100_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-officejet_5600_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-psc_1400_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-psc_1310_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-officejet_4200_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-officejet_4300_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-officejet_j5500_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3650.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3840.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_d2400_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-officejet_5500_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-officejet_j3600_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-psc_1300_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_d2300_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_f2200_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-officejet_4255.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3600.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3700_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-psc_1200_series.ppd.gz	hp-20190918-hplip-3.19.6-hp-CLE17.ppd.gz	hp-20190918-hplip-3.19.6-hp-Kapan.ppd.gz	hp-20190918-hplip-3.19.6-hp-PyramidRefresh17.ppd.gz	hplip-20200303-hplip-3.19.12-hp-CLE17.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Kapan.ppd.gz	hplip-20200303-hplip-3.19.12-hp-PyramidRefresh17.ppd.gz	hplip-20200303-hplip-3.19.12-hp-910.ppd.gz	hplip-20200303-hplip-3.19.12-hp-915.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_f2100_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-officejet_j3500_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-psc_1358_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_d1400_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3740.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_d1500_series.ppd.gz	hp-20190918-hplip-3.19.6-hp-CLE.ppd.gz	hplip-20200303-hplip-3.19.12-hp-CLE.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3420.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3500.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3910.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_d1300_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3425.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3450.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3320.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3550.ppd.gz	hplip-20200303-hplip-3.19.12-hp-officejet_4100_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-psc_1100_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3940.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3325.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3900.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3920.ppd.gz	hplip-20200303-hplip-3.19.12-hp-officejet_4105.ppd.gz	hplip-20200303-hplip-3.19.12-hp-officejet_4115_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-psc_1000_series.ppd.gz	hp-20190918-hplip-3.19.6-hp-LJ-Class6.ppd.gz	hplip-20200303-hplip-3.19.12-hp-LJ-Class6.ppd.gz	hp-20190918-hplip-3.19.6-hp-Pyramid.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Pyramid.ppd.gz	hp-20190111-hplip-3.18.12-hp-PCL3-Class1B.ppd.gz	hp-20190918-hplip-3.19.6-hp-LJ-Class3.ppd.gz	hplip-20200303-hplip-3.19.12-hp-LJ-Class3.ppd.gz	hp-20171121-hplip-3.17.10-hp-laserjet_p1505n-pcl3.ppd.gz	hp-20171121-hplip-3.17.10-hp-laserjet_p2014-pcl3.ppd.gz	hp-20171121-hplip-3.17.10-hp-laserjet_p2014n-pcl3.ppd.gz	hp-20190918-hplip-3.19.6-hp-LJ-Class2.ppd.gz	hplip-20200303-hplip-3.19.12-hp-LJ-Class2.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_d1600_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-PCL3-Class3A.ppd.gz	hplip-20200303-hplip-3.19.12-hp-PCL3-Class3B.ppd.gz	hp-20190918-hplip-3.19.6-hp-ViperMinusVIP.ppd.gz	hplip-20200303-hplip-3.19.12-hp-PCL4-Class1.ppd.gz	hplip-20200303-hplip-3.19.12-hp-ViperMinusVIP.ppd.gz	hplip-20201028-hplip-3.19.12-hp-PCL4-Class1.ppd.gz	hp-20190918-hplip-3.19.6-hp-LJ-Class1.ppd.gz	hplip-20200303-hplip-3.19.12-hp-LJ-Class1.ppd.gz	hp-20190918-hplip-3.19.6-hp-PSP470.ppd.gz	hplip-20200303-hplip-3.19.12-hp-PSP470.ppd.gz	hplip-20200303-hplip-3.19.12-hp-DJGenericVIP.ppd.gz	hp-20190918-hplip-3.19.6-hp-PSP100.ppd.gz	hplip-20200303-hplip-3.19.12-hp-PSP100.ppd.gz	hplip-20200303-hplip-3.19.12-hp-photosmart_a530_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-photosmart_a630_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-photosmart_a640_series.ppd.gz
-hp-20190918-hplip-3.19.6-hp-cm8060_mfp_with_edgeline-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-cm8050_mfp_with_edgeline-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-cm8060_mfp_with_edgeline-ps.ppd.gz	hp-20190918-hplip-3.19.6-hp-laserjet_m2727_mfp_series-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-laserjet_m2727_mfp_series-ps.ppd.gz	hp-20190918-hplip-3.19.6-hp-laserjet_m1522_mfp-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-laserjet_m1522_mfp-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t1708dr_postscript-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t1708_postscript-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_Z9dr_44in-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t1700dr_postscript-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t1700_postscript-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_Z6dr_44in-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_z5600_postscript-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_Z9_44in-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_z6810ps_60in-ps.ppd.gz	hp-20190111-hplip-3.18.12-hp-designjet_z6810ps_42in-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_Z6_44in-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_z6610ps_60in-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t3500-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_Z9_24in-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_z2600_postscript-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_Z6_24in-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t7200-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_8000ps-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5100ps-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5100ps_mfp-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_6000ps-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_6000ps_mfp-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5000ps-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5000ps_mfp-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_4000ps-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_4000ps_mfp-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_4500ps-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_4500ps_mfp-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_d5800-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_8000ps_blueprinter-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5100ps_mfp_blueprinter-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5000ps_blueprinter-ps.ppd.gz	lexmark-20201101-Lexmark_CS310_Series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_z6800_photo-postscript.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_z6600-postscript.ppd.gz	lexmark-20201101-Lexmark_MS310_Series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_z5400-postscript.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_z5200_postscript-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-color_laserjet_mfp_m776-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-color_laserjet_m856-ps.ppd.gz	hp-20190918-hplip-3.19.6-hp-postscript-laserjet-pro.ppd.gz	hplip-20200303-hplip-3.19.12-hp-postscript-laserjet-pro.ppd.gz	hplip-20200303-hplip-3.19.12-hp-color_laserjet_flowmfp_m776-ps.ppd.gz	hp-20190918-hplip-3.19.6-hp-postscript-laserjet.ppd.gz	hplip-20200303-hplip-3.19.12-hp-postscript-laserjet.ppd.gz	hplip-20200303-hplip-3.19.12-hp-color_laserjet_e85055-ps.ppd.gz	hp-20190918-hplip-3.19.6-hp-postscript-inkjet.ppd.gz	hplip-20200303-hplip-3.19.12-hp-postscript-inkjet.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_p55250-ps.ppd.gz	hp-20190918-hplip-3.19.6-hp-color_laserjet_pro_mfp_m277-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-color_laserjet_pro_mfp_m277-ps.ppd.gz	hp-20190918-hplip-3.19.6-hp-laserjet_200_color_m251-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-laserjet_200_color_m251-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-laserjet_100_color_mfp_m175-ps.ppd.gz
-lexmark-20201101-Lexmark_CX920_Series.ppd.gz	lexmark-20201101-Lexmark_XC9200_Series.ppd.gz	lexmark-20201101-Lexmark_CS920_Series.ppd.gz	lexmark-20201101-Lexmark_C9200_Series.ppd.gz	lexmark-20201101-Lexmark_X950_Series.ppd.gz	lexmark-20201101-Lexmark_C950_Series.ppd.gz	lexmark-20201101-Lexmark_CX825_Series.ppd.gz	lexmark-20201101-Lexmark_CX860_Series.ppd.gz	lexmark-20201101-Lexmark_XC8100_Series.ppd.gz	lexmark-20201101-Lexmark_X790_Series.ppd.gz	lexmark-20201101-Lexmark_CX820_Series.ppd.gz	lexmark-20201101-Lexmark_XC6100_Series.ppd.gz	lexmark-20201101-Lexmark_CS820_Series.ppd.gz	lexmark-20201101-Lexmark_C6100_Series.ppd.gz	lexmark-20201101-Lexmark_MX910_Series.ppd.gz	lexmark-20201101-Lexmark_XM9100_Series.ppd.gz	lexmark-20201101-Lexmark_MS910_Series.ppd.gz	lexmark-20201101-Lexmark_X920_Series.ppd.gz	lexmark-20201101-Lexmark_MS820_Series.ppd.gz	lexmark-20201101-Lexmark_M5200_Series.ppd.gz	lexmark-20201101-Lexmark_B2860_Series.ppd.gz	lexmark-20201101-Lexmark_X740_Series.ppd.gz	lexmark-20201101-Lexmark_C740_Series.ppd.gz	lexmark-20201101-Lexmark_CX410_Series.ppd.gz	lexmark-20201101-Lexmark_CX625_Series.ppd.gz	lexmark-20201101-Lexmark_CX620_Series.ppd.gz	lexmark-20201101-Lexmark_XC2200_Series.ppd.gz	lexmark-20201101-Lexmark_CX520_Series.ppd.gz	lexmark-20201101-Lexmark_MC2500_Series.ppd.gz	lexmark-20201101-Lexmark_MC2600_Series.ppd.gz	lexmark-20201101-Lexmark_XC4200_Series.ppd.gz	lexmark-20201101-Lexmark_CS410_Series.ppd.gz	lexmark-20201101-Lexmark_CX510_Series.ppd.gz	lexmark-20201101-Lexmark_C2200_Series.ppd.gz	lexmark-20201101-Lexmark_C2500_Series.ppd.gz	lexmark-20201101-Lexmark_CS520_Series.ppd.gz	lexmark-20201101-Lexmark_CS620_Series.ppd.gz	lexmark-20201101-Lexmark_XC2100_Series.ppd.gz	lexmark-20201101-Lexmark_X548_Series.ppd.gz	lexmark-20201101-Lexmark_CS510_Series.ppd.gz	lexmark-20201101-Lexmark_CX420_Series.ppd.gz	lexmark-20201101-Lexmark_MC2400_Series.ppd.gz	lexmark-20201101-Lexmark_C2400_Series.ppd.gz	lexmark-20201101-Lexmark_CS420_Series.ppd.gz	lexmark-20201101-Lexmark_MX820_Series.ppd.gz	lexmark-20201101-Lexmark_CX725_Series.ppd.gz	lexmark-20201101-Lexmark_XC4100_Series.ppd.gz	lexmark-20201101-Lexmark_CX310_Series.ppd.gz	lexmark-20201101-Lexmark_XM7300_Series.ppd.gz	lexmark-20201101-Lexmark_6500e_Series.ppd.gz	lexmark-20201101-Lexmark_CS720_Series.ppd.gz	lexmark-20201101-Lexmark_CS725_Series.ppd.gz	lexmark-20201101-Lexmark_C4100_Series.ppd.gz	lexmark-20201101-Lexmark_MC2300_Series.ppd.gz	lexmark-20201101-Lexmark_MX6500e_Series.ppd.gz	lexmark-20201101-Lexmark_C2300_Series.ppd.gz	lexmark-20201101-Lexmark_MS810_Series.ppd.gz	lexmark-20201101-Lexmark_MS710_Series.ppd.gz	lexmark-20201101-Lexmark_M5100_Series.ppd.gz	lexmark-20201101-Lexmark_MX810_Series.ppd.gz	lexmark-20201101-Lexmark_XM7100_Series.ppd.gz	lexmark-20201101-Lexmark_CX430_Series.ppd.gz	lexmark-20201101-Lexmark_MC3400_Series.ppd.gz	lexmark-20201101-Lexmark_C3400_Series.ppd.gz	lexmark-20201101-Lexmark_CS430_Series.ppd.gz	lexmark-20201101-Lexmark_MS725_Series.ppd.gz	lexmark-20201101-Lexmark_C790_Series.ppd.gz	lexmark-20201101-Lexmark_MX620_Series.ppd.gz	lexmark-20201101-Lexmark_MB2600_Series.ppd.gz	lexmark-20201101-Lexmark_XM3200_Series.ppd.gz	lexmark-20201101-Lexmark_MX520_Series.ppd.gz	lexmark-20201101-Lexmark_XM1200_Series.ppd.gz	lexmark-20201101-Lexmark_MB2500_Series.ppd.gz	lexmark-20201101-Lexmark_MS620_Series.ppd.gz	lexmark-20201101-Lexmark_M1200_Series.ppd.gz	lexmark-20201101-Lexmark_B2500_Series.ppd.gz	lexmark-20201101-Lexmark_B2600_Series.ppd.gz	lexmark-20201101-Lexmark_M3200_Series.ppd.gz	lexmark-20201101-Lexmark_MS520_Series.ppd.gz	lexmark-20201101-Lexmark_MX610_Series.ppd.gz	lexmark-20201101-Lexmark_XM3100_Series.ppd.gz	lexmark-20201101-Lexmark_C920_Series.ppd.gz	lexmark-20201101-Lexmark_MB2300_Series.ppd.gz	lexmark-20201101-Lexmark_MB2400_Series.ppd.gz	lexmark-20201101-Lexmark_MX320_Series.ppd.gz	lexmark-20201101-Lexmark_MX420_Series.ppd.gz	lexmark-20201101-Lexmark_MX720_Series.ppd.gz	lexmark-20201101-Lexmark_XM5300_Series.ppd.gz	lexmark-20201101-Lexmark_MB2700_Series.ppd.gz	lexmark-20201101-Lexmark_B2300_Series.ppd.gz	lexmark-20201101-Lexmark_B2400_Series.ppd.gz	lexmark-20201101-Lexmark_MS420_Series.ppd.gz	lexmark-20201101-Lexmark_MS320_Series.ppd.gz	lexmark-20201101-Lexmark_MX510_Series.ppd.gz	lexmark-20201101-Lexmark_XM1100_Series.ppd.gz	lexmark-20201101-Lexmark_MX310_Series.ppd.gz	lexmark-20201101-Lexmark_MS610_Series.ppd.gz	lexmark-20201101-Lexmark_M3100_Series.ppd.gz	lexmark-20201101-Lexmark_MX710_Series.ppd.gz	lexmark-20201101-Lexmark_XM5100_Series.ppd.gz	lexmark-20201101-Lexmark_MS510_Series.ppd.gz	lexmark-20201101-Lexmark_M1100_Series.ppd.gz	lexmark-20201101-Lexmark_MX410_Series.ppd.gz	lexmark-20201101-Lexmark_MX725_Series.ppd.gz
+hp-20190918-hplip-3.19.6-hp-CopperheadXLP.ppd.gz	hplip-20201209-hplip-3.20.11-hp-CopperheadXLP.ppd.gz	hp-20190918-hplip-3.19.6-hp-SPDOfficejetProBsize.ppd.gz	hplip-20201209-hplip-3.20.11-hp-SPDOfficejetProBsize.ppd.gz	hp-20190918-hplip-3.19.6-hp-CopperheadIPH15.ppd.gz	hplip-20201209-hplip-3.20.11-hp-CopperheadIPH15.ppd.gz	hp-20190918-hplip-3.19.6-hp-CopperheadIPH.ppd.gz	hplip-20201209-hplip-3.20.11-hp-CopperheadIPH.ppd.gz	hp-20190918-hplip-3.19.6-hp-Mimas15.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Mimas15.ppd.gz	hp-20190918-hplip-3.19.6-hp-MimasTDR.ppd.gz	hplip-20201209-hplip-3.20.11-hp-MimasTDR.ppd.gz	hp-20190918-hplip-3.19.6-hp-Mimas17.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Mimas17.ppd.gz	hp-20190918-hplip-3.19.6-hp-Mimas.ppd.gz	hp-20190918-hplip-3.19.6-hp-Saipan.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Mimas.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Saipan.ppd.gz	hplip-20210520-hplip-3.21.4-hp-envy_6400_series.ppd.gz	hp-20190918-hplip-3.19.6-hp-Copperhead12.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Copperhead12.ppd.gz	hp-20190918-hplip-3.19.6-hp-P15_CISS.ppd.gz	hp-20190918-hplip-3.19.6-hp-PyramidRefresh15.ppd.gz	hplip-20201209-hplip-3.20.11-hp-PyramidRefresh15.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_9800.ppd.gz	hp-20190918-hplip-3.19.6-hp-CopperheadIPH17.ppd.gz	hplip-20201209-hplip-3.20.11-hp-CopperheadIPH17.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_9600.ppd.gz	hp-20190918-hplip-3.19.6-hp-Python11.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Python11.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_k7100.ppd.gz	hp-20190918-hplip-3.19.6-hp-SPDOfficejetProAsize.ppd.gz	hplip-20201209-hplip-3.20.11-hp-SPDOfficejetProAsize.ppd.gz	hp-20190918-hplip-3.19.6-hp-Python10.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Python10.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_8700_series.ppd.gz	hp-20190918-hplip-3.19.6-hp-Corbett.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Corbett.ppd.gz	hplip-20201209-hplip-3.20.11-hp-P15_CISS.ppd.gz	hp-20190918-hplip-3.19.6-hp-Pyramid15.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Pyramid15.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_f4200_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_c4400_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_c4200_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_c3100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_c6200_series.ppd.gz	hp-20190918-hplip-3.19.6-hp-Python.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Python.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_j5700_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_c4100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_c4500_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_6300_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_6500.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_c5100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_c5200_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_2350_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_6200_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_j4500_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_2570_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_c6100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_1500_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_1600_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_5100.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_5400_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_6800.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_j6400_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_7700_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_c7100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_6980_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_7200_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_2600_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_3200_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_8000_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_5700.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_d2500_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_7800_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_8200_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_8400_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_c4340_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_d5100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_2400_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_6600.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_3300_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_7400_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_8100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_c4380_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_c7200_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_c8100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_d7100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_d7200_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_d7300_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_5650.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_5900_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_6100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_7400_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_7200_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_7900_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_c5500_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_d5060_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_d5300_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_2500_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_6940_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_7300_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_2700_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_3100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_7550.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_d5400_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_d6100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_d7400_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_2210_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_5850.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_j4680_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_7600_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_2300_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-business_inkjet_1000.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_5552.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_5600.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_5652.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_5800.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_d730.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_f735.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_ink_advant_k209a-z.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_6150_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_j4660_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_7345.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_2200_series.ppd.gz	hp-20190918-hplip-3.19.6-hp-Gemstone.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Gemstone.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_pro_k8600.ppd.gz	hp-20190918-hplip-3.19.6-hp-OJProKx50.ppd.gz	hplip-20201209-hplip-3.20.11-hp-OJProKx50.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_d2600_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_ink_advant_k109a-z.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_4000_k210.ppd.gz	hp-20190918-hplip-3.19.6-hp-OJ7000.ppd.gz	hplip-20201209-hplip-3.20.11-hp-OJ7000.ppd.gz	hp-20190918-hplip-3.19.6-hp-Saipan15B.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Saipan15B.ppd.gz	hp-20190918-hplip-3.19.6-hp-Ampere.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Ampere.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_1220c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_1280.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_9300.ppd.gz	hplip-20201209-hplip-3.20.11-hp-2500c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_1120.ppd.gz	hp-20190918-hplip-3.19.6-hp-DJ55xx.ppd.gz	hplip-20201209-hplip-3.20.11-hp-DJ55xx.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_1125.ppd.gz	hplip-20201209-hplip-3.20.11-hp-business_inkjet_1200.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_pro_k5400.ppd.gz	hp-20190918-hplip-3.19.6-hp-ViperPlusVIP.ppd.gz	hplip-20201209-hplip-3.20.11-hp-ViperPlusVIP.ppd.gz	hplip-20201209-hplip-3.20.11-hp-business_inkjet_1100.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_pro_k5300.ppd.gz	hp-20190918-hplip-3.19.6-hp-StingrayOJ.ppd.gz	hplip-20201209-hplip-3.20.11-hp-StingrayOJ.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_5100_series.ppd.gz	hp-20190918-hplip-3.19.6-hp-DJ9xxVIP.ppd.gz	hplip-20201209-hplip-3.20.11-hp-DJ9xxVIP.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_920c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_940c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3820.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_970c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_975c.ppd.gz	hp-20190918-hplip-3.19.6-hp-Copperhead.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Copperhead.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3816.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_930c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_955c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_v40.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_p1000.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_p1100.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3810.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3819.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3822.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3870.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_916c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_932c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_933c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_934c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_935c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_948c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_950c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_952c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_957c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_959c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_g55.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_g55xi.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_g85.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_g85xi.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_g95.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_k60.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_k60xi.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_k80.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_k80xi.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_v30.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_v40xi.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_v45.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_720.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_750.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_750xi.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_760.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_780.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_780xi.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_900_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_920.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_950.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_950vr.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_950xi.ppd.gz	hplip-20201209-hplip-3.20.11-hp-2000c.ppd.gz	hp-20190918-hplip-3.19.6-hp-PyramidPlus.ppd.gz	hplip-20201209-hplip-3.20.11-hp-PyramidPlus.ppd.gz	hp-20190918-hplip-3.19.6-hp-Stabler.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Stabler.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_d4100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_d4200_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_d4300_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_f300_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_f4100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_5600_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_1400_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_1310_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_4200_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_4300_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_j5500_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3650.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3840.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_d2400_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_5500_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_j3600_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_d2300_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_f2200_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_4255.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3600.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_1200_series.ppd.gz	hp-20190918-hplip-3.19.6-hp-CLE17.ppd.gz	hp-20190918-hplip-3.19.6-hp-Kapan.ppd.gz	hp-20190918-hplip-3.19.6-hp-PyramidRefresh17.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Kapan.ppd.gz	hplip-20201209-hplip-3.20.11-hp-910.ppd.gz	hplip-20201209-hplip-3.20.11-hp-915.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_f2100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_j3500_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_1358_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3700_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_d1400_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3740.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_d1500_series.ppd.gz	hp-20190918-hplip-3.19.6-hp-CLE.ppd.gz	hplip-20201209-hplip-3.20.11-hp-CLE17.ppd.gz	hplip-20201209-hplip-3.20.11-hp-PyramidRefresh17.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3420.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3500.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3910.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_d1300_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3425.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3450.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3320.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3550.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_4100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_1100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-CLE.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3940.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3325.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3900.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3920.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_4105.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_4115_series.ppd.gz	hp-20190918-hplip-3.19.6-hp-LJ-Class6.ppd.gz	hplip-20201209-hplip-3.20.11-hp-LJ-Class6.ppd.gz	hp-20190918-hplip-3.19.6-hp-Pyramid.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Pyramid.ppd.gz	hp-20190111-hplip-3.18.12-hp-PCL3-Class1B.ppd.gz	hp-20190918-hplip-3.19.6-hp-LJ-Class3.ppd.gz	hplip-20201209-hplip-3.20.11-hp-LJ-Class3.ppd.gz	hp-20171121-hplip-3.17.10-hp-laserjet_p1505n-pcl3.ppd.gz	hp-20171121-hplip-3.17.10-hp-laserjet_p2014-pcl3.ppd.gz	hp-20171121-hplip-3.17.10-hp-laserjet_p2014n-pcl3.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_640c.ppd.gz	hp-20190918-hplip-3.19.6-hp-LJ-Class2.ppd.gz	hplip-20201209-hplip-3.20.11-hp-LJ-Class2.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_t_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_692.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_695.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_895c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_d1600_series.ppd.gz	hplip-20201209-hplip-3.20.11-apollo-2100.ppd.gz	hplip-20201209-hplip-3.20.11-apollo-2150.ppd.gz	hplip-20201209-hplip-3.20.11-apollo-2200.ppd.gz	hplip-20201209-hplip-3.20.11-apollo-2500.ppd.gz	hplip-20201209-hplip-3.20.11-apollo-2600.ppd.gz	hplip-20201209-hplip-3.20.11-apollo-2650.ppd.gz	hplip-20201209-hplip-3.20.11-apollo-p2000-u.ppd.gz	hplip-20201209-hplip-3.20.11-apollo-p2250.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_612c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_648c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_693.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_697.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_843c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_series_700.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_series_710.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_series_720.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_series_725.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_500.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_812c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_815c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_816c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_830c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_832c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_841c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_842c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_880c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_882c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_r40xi.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_r45.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_r60.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_r65.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_r80.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_r80xi.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_500.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_656c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_630c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_632c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_pro_1170c_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_505j.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_510.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_520.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_600.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_660.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_lx.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_series_350.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_series_630.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_1100.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_400.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_670.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_670c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_670tv.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_672c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_680.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_682.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_825c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_845c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_890c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_series_520.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_series_570.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_series_580.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_series_590.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_series_610.ppd.gz	hplip-20201209-hplip-3.20.11-hp-printer_scanner_copier_300.ppd.gz	hp-20190918-hplip-3.19.6-hp-ViperMinusVIP.ppd.gz	hplip-20201209-hplip-3.20.11-hp-ViperMinusVIP.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_500c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_540.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_550c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_series_310.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_series_320.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_870c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_855c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_pro_1150c.ppd.gz	hp-20190918-hplip-3.19.6-hp-LJ-Class1.ppd.gz	hplip-20201209-hplip-3.20.11-hp-LJ-Class1.ppd.gz	hplip-20210520-hplip-3.21.4-hp-PCLM-MONO.ppd.gz	hp-20190918-hplip-3.19.6-hp-PSP470.ppd.gz	hplip-20201209-hplip-3.20.11-hp-PSP470.ppd.gz	hplip-20201217-hplip-3.20.11-hp-PCLM_COLOR.ppd.gz	hplip-20201217-hplip-3.20.11-hp-PCLM_MONO.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_420_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_330_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_370_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_320_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_a430_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_a520_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_a310_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_380_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_a440_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_a510_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_350.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_a320_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_400l.ppd.gz	hp-20190918-hplip-3.19.6-hp-PSP100.ppd.gz	hplip-20201209-hplip-3.20.11-hp-PSP100.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_a530_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_a630_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_a640_series.ppd.gz
+hplip-20200303-hplip-3.19.12-hp-designjet_t1708dr_postscript-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t1708_postscript-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_Z9dr_44in-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t1700dr_postscript-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t1700_postscript-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_Z6dr_44in-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_z5600_postscript-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_Z9_44in-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_z6810ps_60in-ps.ppd.gz	hp-20190111-hplip-3.18.12-hp-designjet_z6810ps_42in-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_Z6_44in-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_z6610ps_60in-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t3500-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_Z9_24in-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_z2600_postscript-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_Z6_24in-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_t7200-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-pagewide_xl_8000ps-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-pagewide_xl_5100ps-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-pagewide_xl_5100ps_mfp-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-pagewide_xl_6000ps-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-pagewide_xl_6000ps_mfp-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-pagewide_xl_5000ps-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-pagewide_xl_5000ps_mfp-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-pagewide_xl_4000ps-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-pagewide_xl_4000ps_mfp-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-pagewide_xl_4500ps-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-pagewide_xl_4500ps_mfp-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_d5800-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_8000ps_blueprinter-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5100ps_mfp_blueprinter-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5000ps_blueprinter-ps.ppd.gz	lexmark-20201101-Lexmark_CS310_Series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_z6800_photo-postscript.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_z6600-postscript.ppd.gz	lexmark-20201101-Lexmark_MS310_Series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_z5400-postscript.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-cm8060_mfp_with_edgeline-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-laserjet_m2727_mfp_series-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-laserjet_m1522_mfp-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_z5200_postscript-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-color_laserjet_mfp_m776-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-color_laserjet_m856-ps.ppd.gz	hp-20190918-hplip-3.19.6-hp-postscript-laserjet-pro.ppd.gz	hplip-20210520-hplip-3.21.4-hp-postscript-laserjet-pro.ppd.gz	hplip-20201209-hplip-3.20.11-hp-color_laserjet_flowmfp_m776-ps.ppd.gz	hp-20190918-hplip-3.19.6-hp-postscript-laserjet.ppd.gz	hplip-20210520-hplip-3.21.4-hp-postscript-laserjet.ppd.gz	hplip-20201209-hplip-3.20.11-hp-color_laserjet_e85055-ps.ppd.gz	hp-20190918-hplip-3.19.6-hp-postscript-inkjet.ppd.gz	hplip-20201209-hplip-3.20.11-hp-postscript-inkjet.ppd.gz	hplip-20201209-hplip-3.20.11-hp-pagewide_p55250-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-color_laserjet_pro_mfp_m277-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-laserjet_100_color_mfp_m175-ps.ppd.gz
+lexmark-20201101-Lexmark_CX920_Series.ppd.gz	lexmark-20201101-Lexmark_XC9200_Series.ppd.gz	lexmark-20201101-Lexmark_CS920_Series.ppd.gz	lexmark-20201101-Lexmark_C9200_Series.ppd.gz	lexmark-20201101-Lexmark_X950_Series.ppd.gz	lexmark-20201101-Lexmark_C950_Series.ppd.gz	sharp-20210601-SHARP_MX-C557F.ppd.gz	sharp-20210601-SHARP_MX-C607F.ppd.gz	lexmark-20201101-Lexmark_CX825_Series.ppd.gz	lexmark-20201101-Lexmark_CX860_Series.ppd.gz	lexmark-20201101-Lexmark_XC8100_Series.ppd.gz	lexmark-20201101-Lexmark_X790_Series.ppd.gz	lexmark-20201101-Lexmark_CX820_Series.ppd.gz	lexmark-20201101-Lexmark_XC6100_Series.ppd.gz	sharp-20210601-SHARP_MX-C607P.ppd.gz	lexmark-20201101-Lexmark_CS820_Series.ppd.gz	lexmark-20201101-Lexmark_C6100_Series.ppd.gz	lexmark-20201101-Lexmark_MX910_Series.ppd.gz	lexmark-20201101-Lexmark_XM9100_Series.ppd.gz	lexmark-20201101-Lexmark_MS910_Series.ppd.gz	sharp-20210601-SHARP_MX-B557P.ppd.gz	sharp-20210601-SHARP_MX-B707P.ppd.gz	lexmark-20201101-Lexmark_X920_Series.ppd.gz	sharp-20210601-SHARP_MX-C357F.ppd.gz	lexmark-20201101-Lexmark_MS820_Series.ppd.gz	lexmark-20201101-Lexmark_M5200_Series.ppd.gz	lexmark-20201101-Lexmark_B2860_Series.ppd.gz	sharp-20210601-SHARP_MX-C407P.ppd.gz	lexmark-20201101-Lexmark_X740_Series.ppd.gz	sharp-20210601-SHARP_MX-C407F.ppd.gz	sharp-20210601-SHARP_MX-C507F.ppd.gz	lexmark-20201101-Lexmark_C740_Series.ppd.gz	sharp-20210601-SHARP_MX-B557F.ppd.gz	sharp-20210601-SHARP_MX-B707F.ppd.gz	sharp-20210601-SHARP_MX-C507P.ppd.gz	lexmark-20201101-Lexmark_CX410_Series.ppd.gz	lexmark-20201101-Lexmark_CX625_Series.ppd.gz	lexmark-20201101-Lexmark_CX620_Series.ppd.gz	lexmark-20201101-Lexmark_XC2200_Series.ppd.gz	lexmark-20201101-Lexmark_CX520_Series.ppd.gz	lexmark-20201101-Lexmark_MC2500_Series.ppd.gz	lexmark-20201101-Lexmark_MC2600_Series.ppd.gz	lexmark-20201101-Lexmark_XC4200_Series.ppd.gz	lexmark-20201101-Lexmark_CS410_Series.ppd.gz	lexmark-20201101-Lexmark_CX510_Series.ppd.gz	lexmark-20201101-Lexmark_C2200_Series.ppd.gz	lexmark-20201101-Lexmark_C2500_Series.ppd.gz	lexmark-20201101-Lexmark_CS520_Series.ppd.gz	lexmark-20201101-Lexmark_CS620_Series.ppd.gz	lexmark-20201101-Lexmark_XC2100_Series.ppd.gz	lexmark-20201101-Lexmark_X548_Series.ppd.gz	lexmark-20201101-Lexmark_CS510_Series.ppd.gz	lexmark-20201101-Lexmark_CX420_Series.ppd.gz	lexmark-20201101-Lexmark_MC2400_Series.ppd.gz	lexmark-20201101-Lexmark_C2400_Series.ppd.gz	lexmark-20201101-Lexmark_CS420_Series.ppd.gz	lexmark-20201101-Lexmark_MX820_Series.ppd.gz	lexmark-20201101-Lexmark_CX725_Series.ppd.gz	lexmark-20201101-Lexmark_XC4100_Series.ppd.gz	lexmark-20201101-Lexmark_CX310_Series.ppd.gz	lexmark-20201101-Lexmark_XM7300_Series.ppd.gz	lexmark-20201101-Lexmark_6500e_Series.ppd.gz	lexmark-20201101-Lexmark_CS720_Series.ppd.gz	lexmark-20201101-Lexmark_CS725_Series.ppd.gz	lexmark-20201101-Lexmark_C4100_Series.ppd.gz	lexmark-20201101-Lexmark_MC2300_Series.ppd.gz	lexmark-20201101-Lexmark_MX6500e_Series.ppd.gz	lexmark-20201101-Lexmark_C2300_Series.ppd.gz	lexmark-20201101-Lexmark_MS810_Series.ppd.gz	lexmark-20201101-Lexmark_MS710_Series.ppd.gz	lexmark-20201101-Lexmark_M5100_Series.ppd.gz	lexmark-20201101-Lexmark_MX810_Series.ppd.gz	lexmark-20201101-Lexmark_XM7100_Series.ppd.gz	lexmark-20201101-Lexmark_CX430_Series.ppd.gz	lexmark-20201101-Lexmark_MC3400_Series.ppd.gz	lexmark-20201101-Lexmark_C3400_Series.ppd.gz	lexmark-20201101-Lexmark_CS430_Series.ppd.gz	lexmark-20201101-Lexmark_MS725_Series.ppd.gz	lexmark-20201101-Lexmark_C790_Series.ppd.gz	sharp-20210601-SHARP_MX-B467F.ppd.gz	lexmark-20201101-Lexmark_MX620_Series.ppd.gz	lexmark-20201101-Lexmark_MB2600_Series.ppd.gz	lexmark-20201101-Lexmark_XM3200_Series.ppd.gz	lexmark-20201101-Lexmark_MX520_Series.ppd.gz	lexmark-20201101-Lexmark_XM1200_Series.ppd.gz	lexmark-20201101-Lexmark_MB2500_Series.ppd.gz	sharp-20210601-SHARP_MX-B467P.ppd.gz	lexmark-20201101-Lexmark_MS620_Series.ppd.gz	lexmark-20201101-Lexmark_M1200_Series.ppd.gz	lexmark-20201101-Lexmark_B2500_Series.ppd.gz	lexmark-20201101-Lexmark_B2600_Series.ppd.gz	lexmark-20201101-Lexmark_M3200_Series.ppd.gz	lexmark-20201101-Lexmark_MS520_Series.ppd.gz	lexmark-20201101-Lexmark_MX610_Series.ppd.gz	lexmark-20201101-Lexmark_XM3100_Series.ppd.gz	lexmark-20201101-Lexmark_C920_Series.ppd.gz	lexmark-20201101-Lexmark_MB2300_Series.ppd.gz	lexmark-20201101-Lexmark_MB2400_Series.ppd.gz	lexmark-20201101-Lexmark_MX320_Series.ppd.gz	lexmark-20201101-Lexmark_MX420_Series.ppd.gz	lexmark-20201101-Lexmark_MX720_Series.ppd.gz	lexmark-20201101-Lexmark_XM5300_Series.ppd.gz	lexmark-20201101-Lexmark_MB2700_Series.ppd.gz	lexmark-20201101-Lexmark_B2300_Series.ppd.gz	lexmark-20201101-Lexmark_B2400_Series.ppd.gz	lexmark-20201101-Lexmark_MS420_Series.ppd.gz	lexmark-20201101-Lexmark_MS320_Series.ppd.gz	lexmark-20201101-Lexmark_MX510_Series.ppd.gz	lexmark-20201101-Lexmark_XM1100_Series.ppd.gz	lexmark-20201101-Lexmark_MX310_Series.ppd.gz	lexmark-20201101-Lexmark_MS610_Series.ppd.gz	lexmark-20201101-Lexmark_M3100_Series.ppd.gz	lexmark-20201101-Lexmark_MX710_Series.ppd.gz	lexmark-20201101-Lexmark_XM5100_Series.ppd.gz	lexmark-20201101-Lexmark_MS510_Series.ppd.gz	lexmark-20201101-Lexmark_M1100_Series.ppd.gz	lexmark-20201101-Lexmark_MX410_Series.ppd.gz	lexmark-20201101-Lexmark_MX725_Series.ppd.gz
 ricoh-20190916-Ricoh-IPSiO_SP_3510SF_PXL.ppd.gz	ricoh-20190916-Ricoh-IPSiO_SP_3510_PXL.ppd.gz	ricoh-20190916-Ricoh-SP_320DN_PXL.ppd.gz	ricoh-20190916-Ricoh-SP_320SFN_PXL.ppd.gz	ricoh-20190916-Ricoh-SP_320SN_PXL.ppd.gz	ricoh-20190916-Ricoh-SP_325DNw_PXL.ppd.gz	ricoh-20190916-Ricoh-SP_325SFNw_PXL.ppd.gz	ricoh-20190916-Ricoh-SP_325SNw_PXL.ppd.gz	ricoh-20190916-Ricoh-SP_377DNwX_PXL.ppd.gz	ricoh-20190916-Ricoh-SP_377SFNwX_PXL.ppd.gz	ricoh-20190916-Ricoh-SP_377SNwX_PXL.ppd.gz	ricoh-20190916-Ricoh-IPSiO_SP_3400L_PXL.ppd.gz	ricoh-20190916-Ricoh-IPSiO_SP_3410SF_PXL.ppd.gz	ricoh-20190916-Ricoh-IPSiO_SP_3410_PXL.ppd.gz	ricoh-20190916-Ricoh-SP_2200L_PXL.ppd.gz	ricoh-20190916-Ricoh-SP_2200SFL_PXL.ppd.gz	ricoh-20190916-Ricoh-SP_212Nw_PXL.ppd.gz	ricoh-20190916-Ricoh-SP_212SFNw_PXL.ppd.gz	ricoh-20190916-Ricoh-SP_212SFw_PXL.ppd.gz	ricoh-20190916-Ricoh-SP_212SNw_PXL.ppd.gz	ricoh-20190916-Ricoh-SP_212SUw_PXL.ppd.gz	ricoh-20190916-Ricoh-SP_212w_PXL.ppd.gz	ricoh-20190916-Ricoh-SP_213Nw_PXL.ppd.gz	ricoh-20190916-Ricoh-SP_213SFNw_PXL.ppd.gz	ricoh-20190916-Ricoh-SP_213SFw_PXL.ppd.gz	ricoh-20190916-Ricoh-SP_213SNw_PXL.ppd.gz	ricoh-20190916-Ricoh-SP_213SUw_PXL.ppd.gz	ricoh-20190916-Ricoh-SP_213w_PXL.ppd.gz
 ricoh-20190916-Ricoh-P_C301SF_PS.ppd.gz	ricoh-20190916-Ricoh-P_C301_PS.ppd.gz	foomatic-20200219-Ricoh-SP_C261DNw-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_C261SFNw-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_C262DNw-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_C262SFNw-Postscript-Ricoh.ppd.gz	ricoh-20190916-Ricoh-M_C250FWB_PS.ppd.gz	ricoh-20190916-Ricoh-M_C250FW_PS.ppd.gz	ricoh-20190916-Ricoh-P_C300W_PS.ppd.gz	ricoh-20190916-Ricoh-P_C301W_PS.ppd.gz	foomatic-20200219-Ricoh-SP_C250DN-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_C250SF-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_C252DN-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_C252SF-Postscript-Ricoh.ppd.gz	foomatic-20200219-NRG-SP_C242DN-Postscript-NRG.ppd.gz	foomatic-20200219-NRG-SP_C242SF-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C242DN-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C242SF-Postscript-Ricoh.ppd.gz	foomatic-20200219-Lanier-SP_C222SF-Postscript-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_C232SF-Postscript-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C222SF-Postscript-NRG.ppd.gz	foomatic-20200219-NRG-SP_C232SF-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C222SF-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C232SF-Postscript-Ricoh.ppd.gz	foomatic-20200219-NRG-SP_3510DN-Postscript-NRG.ppd.gz	foomatic-20200219-NRG-SP_3510SF-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_3510DN-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_3510SF-Postscript-Ricoh.ppd.gz	foomatic-20200219-Lanier-SP_C312DN-Postscript-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C312DN-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C312DN-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_330DN-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_330SFN-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_330SN-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_3710DN-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_3710SF-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_3700-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_3700SF-Postscript-Ricoh.ppd.gz	foomatic-20200219-Lanier-SP_C222DN-Postscript-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_C232DN-Postscript-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C222DN-Postscript-NRG.ppd.gz	foomatic-20200219-NRG-SP_C232DN-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C222DN-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C232DN-Postscript-Ricoh.ppd.gz	foomatic-20200219-Lanier-SP_3410DN-Postscript-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_3410SF-Postscript-Lanier.ppd.gz	foomatic-20200219-NRG-SP_3410DN-Postscript-NRG.ppd.gz	foomatic-20200219-NRG-SP_3410SF-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_300DN-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_3410DN-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_3410SF-Postscript-Ricoh.ppd.gz	foomatic-20200219-NRG-SP_3500N-Postscript-NRG.ppd.gz	foomatic-20200219-NRG-SP_3500SF-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_3500N-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_3500SF-Postscript-Ricoh.ppd.gz	foomatic-20200219-Lanier-SP_C311N-Postscript-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C311N-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C311N-Postscript-Ricoh.ppd.gz	foomatic-20200219-Lanier-SP_C221N-Postscript-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_C231N-Postscript-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C221N-Postscript-NRG.ppd.gz	foomatic-20200219-NRG-SP_C231N-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C221N-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C231N-Postscript-Ricoh.ppd.gz	foomatic-20200219-Lanier-SP_3400N-Postscript-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_3400SF-Postscript-Lanier.ppd.gz	foomatic-20200219-NRG-SP_3400N-Postscript-NRG.ppd.gz	foomatic-20200219-NRG-SP_3400SF-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_3400N-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_3400SF-Postscript-Ricoh.ppd.gz
-ricoh-20191218-SP_C420e_JPN-PostscriptColor-Ricoh.ppd.gz	ricoh-20200403-Ricoh-IM_C2000_JPN.ppd.gz	ricoh-20200403-Ricoh-IM_C2500_JPN.ppd.gz	ricoh-20200403-Ricoh-IM_C3000_JPN.ppd.gz	ricoh-20200403-Ricoh-IM_C300_JPN.ppd.gz	ricoh-20200403-Ricoh-IM_C3500_JPN.ppd.gz	ricoh-20200403-Ricoh-IM_C4500A_JPN.ppd.gz	ricoh-20200403-Ricoh-IM_C4500_JPN.ppd.gz	ricoh-20200403-Ricoh-IM_C5500A_JPN.ppd.gz	ricoh-20200403-Ricoh-IM_C5500_JPN.ppd.gz	ricoh-20200403-Ricoh-IM_C6000_JPN.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C320.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C411-ME.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C411.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C420-ME.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C420.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C711.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C721.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C721M.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C730.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C730M.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C731.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C731M.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C810.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C810M.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C811.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C811M.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C820.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C820M.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C821.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C821M.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C830.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C830M.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C831.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C831M.ppd.gz	ricoh-20200403-Ricoh-MP_C1803_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C2503_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C2504_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C3003_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C3004_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C305_SP_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C306_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C307_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C3503_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C3504_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C4503_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C4504_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C5503_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C5504_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C6003_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C6004_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C6502_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C6503_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C8002_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C8003_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_C5100S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_C5110S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_C5200S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_C5210S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_C7100S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_C7110S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_C7110_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_C7200S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_C7210S_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C341_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C342_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C420e_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C420e_ME_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C420e_M_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C750M_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C750_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C751M_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C751_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C840M_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C840M_a1_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C840_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C840a1_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C841M_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C841M_a1_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C841_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C841a1_JPN.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C1800.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C2200.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C2201.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C2500.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C2800.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C2801.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C2802.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C3000.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C3300.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C3301.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C3302.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C3500.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C3500it.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C4000.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C4000it.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C4001.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C4001A.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C4001it.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C4002.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C4002A.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C4500.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C4500it.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C5000.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C5000it.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C5001.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C5001A.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C5001A_it.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C5001it.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C5002.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C5002A.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C6000.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C6001.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C7500.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C7501.ppd.gz	ricoh-20200821-Ricoh-IM_C6500_JPN.ppd.gz	ricoh-20200821-Ricoh-IM_C8000_JPN.ppd.gz	ricoh-20200821-Ricoh-Pro_C5300S_JPN.ppd.gz	ricoh-20200821-Ricoh-Pro_C5310S_JPN.ppd.gz	foomatic-20190909-Ricoh-IM_430F-PostscriptMono-Ricoh.ppd.gz	ricoh-20200403-Ricoh-IM_430F.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_4210.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_4300.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_4310.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_6110.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_6120.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_6210.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_6220.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_6310.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_6320.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_6330.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_8100.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_8100M.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_8200.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_8200M.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_8300.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_8300M.ppd.gz	ricoh-20200403-Ricoh-IP_500SF.ppd.gz	ricoh-20200403-Ricoh-MP_1301_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_1601_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_2553_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_2554_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_2555_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_305plus_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_305plus_SPF_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_3353_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_3554_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_3555_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_401SPF_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_4054_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_4055_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_5054_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_5055_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_6054_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_6055_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_6503_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_7503_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_9003_JPN.ppd.gz	ricoh-20200403-Ricoh-P_500_JPN.ppd.gz	ricoh-20200403-Ricoh-P_501_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_1107EXP.ppd.gz	ricoh-20200403-Ricoh-Pro_1357EXP.ppd.gz	ricoh-20200403-Ricoh-Pro_6100HT_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_6100_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8100S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8110HT_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8110S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8110Y_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8120HT_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8120S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8120Y_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8200S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8210HT_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8210S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8210Y_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8220HT_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8220S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8220Y_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8300S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8310HT_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8310S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8310Y_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8320HT_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8320S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8320Y_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_907EXP.ppd.gz	ricoh-20200403-Ricoh-SP_4500_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_4510SF_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_4510_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_6410_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_6420M_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_6420_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_6430M_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_6430_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_6440M_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_6440_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_6450_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_8400M_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_8400M_a1_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_8400_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_8400a1_JPN.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_1100.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_1350.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_2550.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_2552.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_3350.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_3352.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_4000.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_4002.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_5000.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_5002.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_6000.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_6001.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_6001GP.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_6002.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_6002GP.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_7500.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_7500T.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_7501.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_7502.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_8000.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_8000T.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_9000.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_9001.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_9001T.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_9002.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_9002T.ppd.gz	ricoh-20200403-Ricoh-imagio_Neo_452.ppd.gz	ricoh-20200403-Ricoh-imagio_Neo_453.ppd.gz	ricoh-20200527-Ricoh-P_6000_JPN.ppd.gz	ricoh-20200527-Ricoh-P_6010M_JPN.ppd.gz	ricoh-20200527-Ricoh-P_6010_JPN.ppd.gz	ricoh-20200527-Ricoh-P_6020M_JPN.ppd.gz	ricoh-20200527-Ricoh-P_6020_JPN.ppd.gz	ricoh-20200527-Ricoh-P_6030M_JPN.ppd.gz	ricoh-20200527-Ricoh-P_6030_JPN.ppd.gz
+ricoh-20191218-SP_C420e_JPN-PostscriptColor-Ricoh.ppd.gz	ricoh-20200403-Ricoh-IM_C2000_JPN.ppd.gz	ricoh-20200403-Ricoh-IM_C2500_JPN.ppd.gz	ricoh-20200403-Ricoh-IM_C3000_JPN.ppd.gz	ricoh-20200403-Ricoh-IM_C300_JPN.ppd.gz	ricoh-20200403-Ricoh-IM_C3500_JPN.ppd.gz	ricoh-20200403-Ricoh-IM_C4500A_JPN.ppd.gz	ricoh-20200403-Ricoh-IM_C4500_JPN.ppd.gz	ricoh-20200403-Ricoh-IM_C5500A_JPN.ppd.gz	ricoh-20200403-Ricoh-IM_C5500_JPN.ppd.gz	ricoh-20200403-Ricoh-IM_C6000_JPN.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C320.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C411-ME.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C411.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C420-ME.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C420.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C711.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C721.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C721M.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C730.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C730M.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C731.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C731M.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C810.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C810M.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C811.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C811M.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C820.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C820M.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C821.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C821M.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C830.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C830M.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C831.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_C831M.ppd.gz	ricoh-20200403-Ricoh-MP_C1803_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C2503_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C2504_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C3003_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C3004_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C305_SP_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C306_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C307_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C3503_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C3504_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C4503_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C4504_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C5503_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C5504_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C6003_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C6004_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C6502_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C6503_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C8002_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_C8003_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_C5100S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_C5110S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_C5200S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_C5210S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_C7100S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_C7110S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_C7110_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_C7200S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_C7210S_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C341_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C342_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C420e_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C420e_ME_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C420e_M_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C750M_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C750_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C751M_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C751_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C840M_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C840M_a1_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C840_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C840a1_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C841M_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C841M_a1_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C841_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_C841a1_JPN.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C1800.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C2200.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C2201.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C2500.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C2800.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C2801.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C2802.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C3000.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C3300.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C3301.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C3302.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C3500.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C3500it.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C4000.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C4000it.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C4001.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C4001A.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C4001it.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C4002.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C4002A.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C4500.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C4500it.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C5000.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C5000it.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C5001.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C5001A.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C5001A_it.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C5001it.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C5002.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C5002A.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C6000.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C6001.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C7500.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_C7501.ppd.gz	ricoh-20200821-Ricoh-IM_C6500_JPN.ppd.gz	ricoh-20200821-Ricoh-IM_C8000_JPN.ppd.gz	ricoh-20200821-Ricoh-Pro_C5300S_JPN.ppd.gz	ricoh-20200821-Ricoh-Pro_C5310S_JPN.ppd.gz	foomatic-20190909-Ricoh-IM_430F-PostscriptMono-Ricoh.ppd.gz	ricoh-20200403-Ricoh-IM_430F.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_4210.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_4300.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_4310.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_6110.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_6120.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_6210.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_6220.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_6310.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_6320.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_6330.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_8100.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_8100M.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_8200.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_8200M.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_8300.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_8300M.ppd.gz	ricoh-20200403-Ricoh-IP_500SF.ppd.gz	ricoh-20200403-Ricoh-MP_1301_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_1601_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_2553_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_2554_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_2555_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_305plus_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_305plus_SPF_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_3353_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_3554_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_3555_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_401SPF_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_4054_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_4055_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_5054_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_5055_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_6054_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_6055_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_6503_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_7503_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_9003_JPN.ppd.gz	ricoh-20200403-Ricoh-P_500_JPN.ppd.gz	ricoh-20200403-Ricoh-P_501_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_1107EXP.ppd.gz	ricoh-20200403-Ricoh-Pro_1357EXP.ppd.gz	ricoh-20200403-Ricoh-Pro_6100HT_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_6100_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8100S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8110HT_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8110S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8110Y_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8120HT_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8120S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8120Y_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8200S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8210HT_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8210S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8210Y_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8220HT_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8220S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8220Y_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8300S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8310HT_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8310S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8310Y_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8320HT_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8320S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8320Y_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_907EXP.ppd.gz	ricoh-20200403-Ricoh-SP_4500_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_4510SF_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_4510_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_6410_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_6420M_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_6420_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_6430M_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_6430_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_6440M_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_6440_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_6450_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_8400M_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_8400M_a1_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_8400_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_8400a1_JPN.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_1100.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_1350.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_2550.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_2552.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_3350.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_3352.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_4000.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_4002.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_5000.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_5002.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_6000.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_6001.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_6001GP.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_6002.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_6002GP.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_7500.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_7500T.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_7501.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_7502.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_8000.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_8000T.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_9000.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_9001.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_9001T.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_9002.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_9002T.ppd.gz	ricoh-20200403-Ricoh-imagio_Neo_452.ppd.gz	ricoh-20200403-Ricoh-imagio_Neo_453.ppd.gz	ricoh-20200527-Ricoh-P_6000_JPN.ppd.gz	ricoh-20200527-Ricoh-P_6010M_JPN.ppd.gz	ricoh-20200527-Ricoh-P_6010_JPN.ppd.gz	ricoh-20200527-Ricoh-P_6020M_JPN.ppd.gz	ricoh-20200527-Ricoh-P_6020_JPN.ppd.gz	ricoh-20200527-Ricoh-P_6030M_JPN.ppd.gz	ricoh-20200527-Ricoh-P_6030_JPN.ppd.gz	ricoh-20210222-Ricoh-IM_7000_JPN.ppd.gz	ricoh-20210222-Ricoh-IM_8000_JPN.ppd.gz	ricoh-20210222-Ricoh-IM_9000T_JPN.ppd.gz	ricoh-20210222-Ricoh-IM_9000_JPN.ppd.gz	ricoh-20210824-Ricoh-IM_2500_JPN.ppd.gz	ricoh-20210824-Ricoh-IM_3500_JPN.ppd.gz	ricoh-20210824-Ricoh-IM_4000_JPN.ppd.gz	ricoh-20210824-Ricoh-IM_5000_JPN.ppd.gz	ricoh-20210824-Ricoh-IM_6000_JPN.ppd.gz
 ricoh-20200930-Ricoh_Generic_PS_Printer.ppd.gz
+ricoh-20210601-Ricoh-M_C2000_PXL.ppd.gz	ricoh-20210222-Gestetner-G3020c_PXL.ppd.gz
+ricoh-20210601-Ricoh-SP_2300L_PXL.ppd.gz	ricoh-20210601-Ricoh-SP_2300SFL_PXL.ppd.gz
 star-20171009-starcupsdrv-3.6.0-hsp7000r.ppd.gz
 star-20171009-starcupsdrv-3.6.0-hsp7000s.ppd.gz
 star-20171009-starcupsdrv-3.6.0-hsp7000v.ppd.gz
@@ -128,3 +135,4 @@
 star-20191209-tsp654.ppd.gz
 star-20191209-tsp700II.ppd.gz
 star-20191209-tsp800II.ppd.gz
+zebra-20210504-SP-005645A.ppd.gz
diff --git a/client/site_tests/platform_PrinterPpds/log_reader.py b/client/site_tests/platform_PrinterPpds/log_reader.py
index 3a9da82..f1a73f9 100644
--- a/client/site_tests/platform_PrinterPpds/log_reader.py
+++ b/client/site_tests/platform_PrinterPpds/log_reader.py
@@ -40,14 +40,18 @@
 
         @param lines_count: a number of lines to read
 
-        @returns a list of lines
+        @returns a list of lines (as strings)
 
         """
         assert lines_count > 0
         argv = ['tail', '-n', '%d' % (lines_count+1), _PATH_LOG_FILE]
         p1 = subprocess.Popen(argv, stdout=subprocess.PIPE)
         out,err = p1.communicate()
-        lines = out.split('\n')
+
+        # It is possible for invalid UTF-8 to appear in the system log
+        # (e.g. null bytes on unclean poweroff), but this doesn't
+        # concern us, so we elect to ignore it.
+        lines = out.decode(errors="ignore").split('\n')
         lines.pop()
         if len(lines) > lines_count:
             if len(lines) == 0:
@@ -241,7 +245,7 @@
             filters[-1] = subprocess.check_output(find_cmd).rstrip()
         # build and return the script
         script = '#!/bin/bash\nset -e\nset -o pipefail\n'
-        for name, value in envp.iteritems():
+        for name, value in envp.items():
             script += ('export %s=%s\n' % (name, value))
         for ind, filt in enumerate(filters):
             if ind > 0:
diff --git a/client/site_tests/platform_PrinterPpds/multithreaded_processor.py b/client/site_tests/platform_PrinterPpds/multithreaded_processor.py
index 8e82d3a..518f374 100644
--- a/client/site_tests/platform_PrinterPpds/multithreaded_processor.py
+++ b/client/site_tests/platform_PrinterPpds/multithreaded_processor.py
@@ -2,7 +2,11 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
 import threading
+from six.moves import range
 
 
 class MultithreadedProcessor():
@@ -54,7 +58,7 @@
         @throws Exception if at least one of the tasks threw any Exception.
 
         """
-        self._tasks_ids = range(number_of_tasks) # list of tasks ids to process
+        self._tasks_ids = list(range(number_of_tasks))  # tasks ids to process
         self._outputs = [None]*number_of_tasks
         self._error = None
 
diff --git a/client/site_tests/platform_PrinterPpds/platform_PrinterPpds.py b/client/site_tests/platform_PrinterPpds/platform_PrinterPpds.py
index eca6b2f..ba3d2d6 100644
--- a/client/site_tests/platform_PrinterPpds/platform_PrinterPpds.py
+++ b/client/site_tests/platform_PrinterPpds/platform_PrinterPpds.py
@@ -2,6 +2,10 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import dbus
 import gzip
 import logging
@@ -21,6 +25,7 @@
 import fake_printer
 import log_reader
 import multithreaded_processor
+from six.moves import range
 
 # Timeout for printing documents in seconds
 _FAKE_PRINTER_TIMEOUT = 200
@@ -368,10 +373,10 @@
                             self._archivers[doc_name].save_file(
                                     ppd_name, '.out', doc, apply_gzip=True)
                             self._archivers[doc_name].save_file(
-                                    ppd_name, '.log', logs)
+                                    ppd_name, '.log', logs.encode())
                             if pipeline is not None:
                                 self._archivers[doc_name].save_file(
-                                        ppd_name, '.sh', pipeline)
+                                        ppd_name, '.sh', pipeline.encode())
                             # Set new digest
                             self._new_digests[doc_name][ppd_name] = digest
                             self._new_sizes[doc_name][ppd_name] = len(doc)
diff --git a/client/site_tests/platform_PrinterPpds/ppds_core.tar.xz b/client/site_tests/platform_PrinterPpds/ppds_core.tar.xz
index 41aa4fa..8bb817c 100644
--- a/client/site_tests/platform_PrinterPpds/ppds_core.tar.xz
+++ b/client/site_tests/platform_PrinterPpds/ppds_core.tar.xz
Binary files differ
diff --git a/client/site_tests/platform_PrinterPpds/ppds_ext.tar.xz b/client/site_tests/platform_PrinterPpds/ppds_ext.tar.xz
index e3083c9..e950f20 100644
--- a/client/site_tests/platform_PrinterPpds/ppds_ext.tar.xz
+++ b/client/site_tests/platform_PrinterPpds/ppds_ext.tar.xz
Binary files differ
diff --git a/client/site_tests/platform_PrinterPpds/readme.txt b/client/site_tests/platform_PrinterPpds/readme.txt
index d4bc3fb..cca824b 100644
--- a/client/site_tests/platform_PrinterPpds/readme.txt
+++ b/client/site_tests/platform_PrinterPpds/readme.txt
@@ -115,6 +115,9 @@
     go run ppdTool.go download
 3. Calculate new clusters:
     go run ppdTool.go compare
+   If it fails with an error "too many open files" you have to increase the
+   soft limit of open files with ulimit, e.g.:
+    ulimit -Sn 10123
 4. Compress new directories with PPD files:
     tar cJf ppds_core.tar.xz ppds_core
     tar cJf ppds_ext.tar.xz ppds_ext
diff --git a/client/site_tests/platform_PrinterPpds/small_clusters.txt b/client/site_tests/platform_PrinterPpds/small_clusters.txt
index 859808b..f158bd1 100644
--- a/client/site_tests/platform_PrinterPpds/small_clusters.txt
+++ b/client/site_tests/platform_PrinterPpds/small_clusters.txt
@@ -1,4 +1,4 @@
-brother-20191213-DCP7180DN.ppd.gz	brother-20191213-DCP7189DW.ppd.gz	brother-20191213-DCP7190DN.ppd.gz	brother-20191213-DCPB7500D.ppd.gz	brother-20191213-DCPB7520DW.ppd.gz	brother-20191213-DCPB7530DN.ppd.gz	brother-20191213-DCPB7535DW.ppd.gz	brother-20191213-DCPL2535DW.ppd.gz	brother-20191213-DCPL2540DN.ppd.gz	brother-20191213-DCPL2540DW.ppd.gz	brother-20191213-DCPL2550DN.ppd.gz	brother-20191213-DCPL2550DW.ppd.gz	brother-20191213-DCPL2551DN.ppd.gz	brother-20191213-DCPL2560DW.ppd.gz	brother-20191213-HL2560DN.ppd.gz	brother-20191213-HL2569DW.ppd.gz	brother-20191213-HL2590DN.ppd.gz	brother-20191213-HLB2050DN.ppd.gz	brother-20191213-HLB2080DW.ppd.gz	brother-20191213-HLL2360D.ppd.gz	brother-20191213-HLL2370DN.ppd.gz	brother-20191213-HLL2370DW.ppd.gz	brother-20191213-HLL2371DN.ppd.gz	brother-20191213-HLL2375DW.ppd.gz	brother-20191213-HLL2380DW.ppd.gz	brother-20191213-HLL2395DW.ppd.gz	brother-20191213-MFC7880DN.ppd.gz	brother-20191213-MFC7889DW.ppd.gz	brother-20191213-MFC7890DN.ppd.gz	brother-20191213-MFCB7715DW.ppd.gz	brother-20191213-MFCB7720DN.ppd.gz	brother-20191213-MFCL2715DW.ppd.gz	brother-20191213-MFCL2720DN.ppd.gz	brother-20191213-MFCL2720DW.ppd.gz	brother-20191213-MFCL2730DN.ppd.gz	brother-20191213-MFCL2730DW.ppd.gz	brother-20191213-MFCL2740DW.ppd.gz	brother-20191213-MFCL2750DW.ppd.gz	brother-20200131-DCP7070DW.ppd.gz	brother-20200131-FAX7860DW.ppd.gz	brother-20200131-HL2250DN.ppd.gz	brother-20200131-HL2270DW.ppd.gz	brother-20200131-HL2280DW.ppd.gz	brother-20200131-HLL2360DN.ppd.gz	brother-20200131-HLL2360DW.ppd.gz	brother-20200131-HLL2365DW.ppd.gz	brother-20200131-HLL2372DN.ppd.gz	brother-20200131-MFC7470D.ppd.gz	brother-20200131-MFC7860DN.ppd.gz	brother-20200131-MFC7860DW.ppd.gz	brother-20200131-MFCL2705DW.ppd.gz
+brother-20191213-DCP7180DN.ppd.gz	brother-20191213-DCP7189DW.ppd.gz	brother-20191213-DCP7190DN.ppd.gz	brother-20191213-DCPB7500D.ppd.gz	brother-20191213-DCPB7520DW.ppd.gz	brother-20191213-DCPB7530DN.ppd.gz	brother-20191213-DCPB7535DW.ppd.gz	brother-20191213-DCPL2535DW.ppd.gz	brother-20191213-DCPL2540DN.ppd.gz	brother-20191213-DCPL2540DW.ppd.gz	brother-20191213-DCPL2550DN.ppd.gz	brother-20191213-DCPL2550DW.ppd.gz	brother-20191213-DCPL2551DN.ppd.gz	brother-20191213-DCPL2560DW.ppd.gz	brother-20191213-HL2560DN.ppd.gz	brother-20191213-HL2569DW.ppd.gz	brother-20191213-HL2590DN.ppd.gz	brother-20191213-HLB2050DN.ppd.gz	brother-20191213-HLB2080DW.ppd.gz	brother-20191213-HLL2360D.ppd.gz	brother-20191213-HLL2370DN.ppd.gz	brother-20191213-HLL2370DW.ppd.gz	brother-20191213-HLL2371DN.ppd.gz	brother-20191213-HLL2375DW.ppd.gz	brother-20191213-HLL2380DW.ppd.gz	brother-20191213-HLL2395DW.ppd.gz	brother-20191213-MFC7880DN.ppd.gz	brother-20191213-MFC7889DW.ppd.gz	brother-20191213-MFC7890DN.ppd.gz	brother-20191213-MFCB7715DW.ppd.gz	brother-20191213-MFCB7720DN.ppd.gz	brother-20191213-MFCL2715DW.ppd.gz	brother-20191213-MFCL2720DN.ppd.gz	brother-20191213-MFCL2720DW.ppd.gz	brother-20191213-MFCL2730DN.ppd.gz	brother-20191213-MFCL2730DW.ppd.gz	brother-20191213-MFCL2740DW.ppd.gz	brother-20191213-MFCL2750DW.ppd.gz	brother-20210413-MFCB7710DN.ppd.gz	brother-20200131-DCP7070DW.ppd.gz	brother-20200131-FAX7860DW.ppd.gz	brother-20200131-HL2250DN.ppd.gz	brother-20200131-HL2270DW.ppd.gz	brother-20200131-HL2280DW.ppd.gz	brother-20200131-HLL2360DN.ppd.gz	brother-20200131-HLL2360DW.ppd.gz	brother-20200131-HLL2365DW.ppd.gz	brother-20200131-HLL2372DN.ppd.gz	brother-20200131-MFC7470D.ppd.gz	brother-20200131-MFC7860DN.ppd.gz	brother-20200131-MFC7860DW.ppd.gz	brother-20200131-MFCL2705DW.ppd.gz
 brother-20191213-DCP7195DW.ppd.gz	brother-20191213-HLL2385DW.ppd.gz	brother-20191213-MFC7895DW.ppd.gz	brother-20191213-MFCL2770DW.ppd.gz	brother-20200131-DCP8070D.ppd.gz	brother-20200131-HL2595DW.ppd.gz	brother-20200131-MFC8370DN.ppd.gz
 brother-20191213-DCPL5500DN.ppd.gz	brother-20191213-DCPL5502DN.ppd.gz	brother-20191213-DCPL5600DN.ppd.gz	brother-20191213-DCPL5602DN.ppd.gz	brother-20191213-DCPL5650DN.ppd.gz	brother-20191213-DCPL5652DN.ppd.gz	brother-20191213-HL5580D.ppd.gz	brother-20191213-HL5585D.ppd.gz	brother-20191213-HL5590DN.ppd.gz	brother-20191213-HLL5000D.ppd.gz	brother-20191213-HLL5100DN.ppd.gz	brother-20191213-HLL5102DW.ppd.gz	brother-20191213-HLL5200DW.ppd.gz	brother-20191213-HLL5202DW.ppd.gz	brother-20191213-MFC8530DN.ppd.gz	brother-20191213-MFC8535DN.ppd.gz	brother-20191213-MFC8540DN.ppd.gz	brother-20191213-MFCL5700DN.ppd.gz	brother-20191213-MFCL5700DW.ppd.gz	brother-20191213-MFCL5702DW.ppd.gz	brother-20191213-MFCL5750DW.ppd.gz	brother-20191213-MFCL5755DW.ppd.gz	brother-20191213-MFCL5800DW.ppd.gz	brother-20191213-MFCL5802DW.ppd.gz	brother-20191213-MFCL5850DW.ppd.gz	brother-20191213-MFCL5900DW.ppd.gz	brother-20191213-MFCL5902DW.ppd.gz	brother-20191213-MFCL6700DW.ppd.gz	brother-20191213-MFCL6702DW.ppd.gz	brother-20191213-MFCL6750DW.ppd.gz	brother-20200131-HL5340D.ppd.gz	brother-20200131-HL5350DN.ppd.gz	brother-20200131-HL5370DW.ppd.gz	brother-20200131-HL5380DN.ppd.gz
 brother-20191213-DCPL6600DW.ppd.gz	brother-20191213-HLL6300DW.ppd.gz	brother-20191213-HLL6400DW.ppd.gz	brother-20191213-HLL6402DW.ppd.gz	brother-20191213-MFCL6800DW.ppd.gz	brother-20191213-MFCL6900DW.ppd.gz	brother-20191213-MFCL6902DW.ppd.gz
@@ -16,6 +16,7 @@
 brother-20201006-HL2290-cups-en.ppd.gz
 brother-20201006-HL2295D-cups-en.ppd.gz	brother-20201006-HLB2000D-cups-en.ppd.gz	brother-20201006-HLL2310D-cups-en.ppd.gz	brother-20201006-HLL2325DW-cups-en.ppd.gz	brother-20201006-HLL2330D-cups-en.ppd.gz	brother-20201006-HLL2335D-cups-en.ppd.gz	brother-20201006-HLL2350DW-cups-en.ppd.gz	brother-20201006-HLL2357DW-cups-en.ppd.gz
 brother-20201006-MFCL2685DW-cups-en.ppd.gz	brother-20201006-MFCL2707DW-cups-en.ppd.gz
+brother-20210413-MFC5705DW.ppd.gz
 cups-20170101-Generic-PDF_Printer-PDF.ppd.gz
 dymo-20200714-lm280.ppd.gz
 dymo-20200714-lm400.ppd.gz
@@ -158,7 +159,7 @@
 epson-20200615-1_6_41-Epson-PX-M160T_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S160T_Series-epson-escpr-en.ppd.gz
 epson-20200615-1_6_41-Epson-PX-M350F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S350-epson-escpr-en.ppd.gz
 epson-20200615-1_6_41-Epson-PX-M5040F-epson-escpr-en.ppd.gz
-epson-20200615-1_6_41-Epson-PX-M5041F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S5040-epson-escpr-en.ppd.gz
+epson-20200615-1_6_41-Epson-PX-M5041F-epson-escpr-en.ppd.gz
 epson-20200615-1_6_41-Epson-PX-M650A-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M650F-epson-escpr-en.ppd.gz
 epson-20200615-1_6_41-Epson-PX-M7050FP-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M7050FX-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-M7050_Series-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S7050PS-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S7050X-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S7050_Series-epson-escpr-en.ppd.gz
 epson-20200615-1_6_41-Epson-PX-M740F-epson-escpr-en.ppd.gz	epson-20200615-1_6_41-Epson-PX-S740-epson-escpr-en.ppd.gz
@@ -257,6 +258,8 @@
 epson-20200615-Epson-WF-C17590_Series_PS3.ppd.gz
 epson-20200615-Epson-WF-C20590_PS.ppd.gz
 epson-20200615-Epson-WF-M20590_Series_PS3.ppd.gz
+epson-20210521-1_6_41-Epson-PX-S5040-epson-escpr-en.ppd.gz
+epson-20210703-Epson-L1210_Series-epson-escpr-en.ppd.gz	epson-20210703-Epson-L3210_Series-epson-escpr-en.ppd.gz
 foomatic-20170101-Samsung-M332x_382x_402x-Postscript.ppd.gz
 foomatic-20190909-Ricoh-IM_430F-PostscriptMono-Ricoh.ppd.gz	ricoh-20200403-Ricoh-IM_430F.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_4210.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_4300.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_4310.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_6110.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_6120.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_6210.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_6220.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_6310.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_6320.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_6330.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_8100.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_8100M.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_8200.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_8200M.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_8300.ppd.gz	ricoh-20200403-Ricoh-IPSiO_SP_8300M.ppd.gz	ricoh-20200403-Ricoh-IP_500SF.ppd.gz	ricoh-20200403-Ricoh-MP_1301_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_1601_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_2553_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_2554_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_2555_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_305plus_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_305plus_SPF_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_3353_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_3554_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_3555_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_401SPF_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_4054_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_4055_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_5054_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_5055_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_6054_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_6055_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_6503_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_7503_JPN.ppd.gz	ricoh-20200403-Ricoh-MP_9003_JPN.ppd.gz	ricoh-20200403-Ricoh-P_500_JPN.ppd.gz	ricoh-20200403-Ricoh-P_501_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_1107EXP.ppd.gz	ricoh-20200403-Ricoh-Pro_1357EXP.ppd.gz	ricoh-20200403-Ricoh-Pro_6100HT_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_6100_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8100S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8110HT_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8110S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8110Y_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8120HT_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8120S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8120Y_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8200S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8210HT_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8210S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8210Y_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8220HT_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8220S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8220Y_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8300S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8310HT_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8310S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8310Y_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8320HT_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8320S_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_8320Y_JPN.ppd.gz	ricoh-20200403-Ricoh-Pro_907EXP.ppd.gz	ricoh-20200403-Ricoh-SP_4500_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_4510SF_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_4510_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_6410_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_6420M_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_6420_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_6430M_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_6430_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_6440M_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_6440_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_6450_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_8400M_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_8400M_a1_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_8400_JPN.ppd.gz	ricoh-20200403-Ricoh-SP_8400a1_JPN.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_1100.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_1350.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_2550.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_2552.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_3350.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_3352.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_4000.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_4002.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_5000.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_5002.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_6000.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_6001.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_6001GP.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_6002.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_6002GP.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_7500.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_7500T.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_7501.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_7502.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_8000.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_8000T.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_9000.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_9001.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_9001T.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_9002.ppd.gz	ricoh-20200403-Ricoh-imagio_MP_9002T.ppd.gz	ricoh-20200403-Ricoh-imagio_Neo_452.ppd.gz	ricoh-20200403-Ricoh-imagio_Neo_453.ppd.gz
 foomatic-20190909-Ricoh-IM_430_PDF.ppd.gz	foomatic-20200219-Gestetner-IM_350-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-IM_430-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-IM_350-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-IM_430-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-IM_350-PDF-NRG.ppd.gz	foomatic-20200219-NRG-IM_430-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-IM_350-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-IM_350-PDF-Savin.ppd.gz	foomatic-20200219-Savin-IM_430-PDF-Savin.ppd.gz
@@ -278,9 +281,9 @@
 foomatic-20200219-Apple-ImageWriter_II-iwhi.ppd.gz
 foomatic-20200219-Apple-ImageWriter_LQ-iwhi.ppd.gz
 foomatic-20200219-Apple-LaserWriter_IIg-Postscript.ppd.gz	foomatic-20200219-DEC-LN07-Postscript.ppd.gz	foomatic-20200219-NEC-SilentWriter_LC_890-Postscript.ppd.gz	foomatic-20200219-NEC-Silentwriter2_S60P-Postscript.ppd.gz	foomatic-20200219-NEC-Silentwriter2_model_290-Postscript.ppd.gz	foomatic-20200219-NEC-Silentwriter_95f-Postscript.ppd.gz	foomatic-20200219-Oki-OL830Plus-Postscript.ppd.gz	foomatic-20200219-Panasonic-KX-P5400-Postscript.ppd.gz
-foomatic-20200219-Apple-LaserWriter_Select_360-ljet2p.ppd.gz	foomatic-20200219-Brother-DCP-1200-ljet2p.ppd.gz	foomatic-20200219-Brother-HL-1040-ljet2p.ppd.gz	foomatic-20200219-Brother-HL-630-ljet2p.ppd.gz	foomatic-20200219-Brother-MFC-6550MC-ljet2p.ppd.gz	foomatic-20200219-Brother-MFC-9500-ljet2p.ppd.gz	foomatic-20200219-Brother-MFC-9600-ljet2p.ppd.gz	foomatic-20200219-Lexmark-Valuewriter_300-ljet2p.ppd.gz	foomatic-20200219-NEC-SuperScript_1260-ljet2p.ppd.gz	foomatic-20200219-NEC-SuperScript_860-ljet2p.ppd.gz	foomatic-20200219-NEC-SuperScript_870-ljet2p.ppd.gz	foomatic-20200219-Oki-OL400e-ljet2p.ppd.gz	foomatic-20200219-Oki-OL400ex-ljet2p.ppd.gz	foomatic-20200219-Oki-OL600e-ljet2p.ppd.gz	foomatic-20200219-Panasonic-KX-P4410-ljet2p.ppd.gz	foomatic-20200219-Panasonic-KX-P6500-ljet2p.ppd.gz	foomatic-20200219-Raven-LP-410-ljet2p.ppd.gz	foomatic-20200219-Star-LS-04-ljet2p.ppd.gz	foomatic-20200219-Xerox-DocuPrint_P12-ljet2p.ppd.gz
+foomatic-20200219-Apple-LaserWriter_Select_360-ljet2p.ppd.gz	foomatic-20200219-Brother-DCP-1200-ljet2p.ppd.gz	foomatic-20200219-Brother-HL-1040-ljet2p.ppd.gz	foomatic-20200219-Brother-HL-630-ljet2p.ppd.gz	foomatic-20200219-Brother-MFC-6550MC-ljet2p.ppd.gz	foomatic-20200219-Brother-MFC-9500-ljet2p.ppd.gz	foomatic-20200219-Brother-MFC-9600-ljet2p.ppd.gz	foomatic-20200219-HP-LaserJet_2D-ljet2p.ppd.gz	foomatic-20200219-HP-LaserJet_2P-ljet2p.ppd.gz	foomatic-20200219-HP-LaserJet_2P_Plus-ljet2p.ppd.gz	foomatic-20200219-Lexmark-Valuewriter_300-ljet2p.ppd.gz	foomatic-20200219-NEC-SuperScript_1260-ljet2p.ppd.gz	foomatic-20200219-NEC-SuperScript_860-ljet2p.ppd.gz	foomatic-20200219-NEC-SuperScript_870-ljet2p.ppd.gz	foomatic-20200219-Oki-OL400e-ljet2p.ppd.gz	foomatic-20200219-Oki-OL400ex-ljet2p.ppd.gz	foomatic-20200219-Oki-OL600e-ljet2p.ppd.gz	foomatic-20200219-Panasonic-KX-P4410-ljet2p.ppd.gz	foomatic-20200219-Panasonic-KX-P6500-ljet2p.ppd.gz	foomatic-20200219-Raven-LP-410-ljet2p.ppd.gz	foomatic-20200219-Star-LS-04-ljet2p.ppd.gz	foomatic-20200219-Xerox-DocuPrint_P12-ljet2p.ppd.gz
 foomatic-20200219-Apple-StyleWriter_1200-lpstyl.ppd.gz	foomatic-20200219-Apple-StyleWriter_I-lpstyl.ppd.gz	foomatic-20200219-Apple-StyleWriter_II-lpstyl.ppd.gz
-foomatic-20200219-Brother-DCP-7010-ljet4.ppd.gz	foomatic-20200219-Brother-DCP-7020-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1050-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1060-ljet4.ppd.gz	foomatic-20200219-Brother-HL-10h-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1260-ljet4.ppd.gz	foomatic-20200219-Brother-HL-2035-ljet4.ppd.gz	foomatic-20200219-Brother-HL-2040-ljet4.ppd.gz	foomatic-20200219-Brother-HL-2135-ljet4.ppd.gz	foomatic-20200219-Brother-HL-2142-ljet4.ppd.gz	foomatic-20200219-Brother-HL-660-ljet4.ppd.gz	foomatic-20200219-Brother-HL-760-ljet4.ppd.gz	foomatic-20200219-Brother-HL-960-ljet4.ppd.gz	foomatic-20200219-Canon-LBP-1260-ljet4.ppd.gz	foomatic-20200219-DEC-LN17-ljet4.ppd.gz	foomatic-20200219-Epson-EPL-5700-ljet4.ppd.gz	foomatic-20200219-Fujitsu-PrintPartner_10V-ljet4.ppd.gz	foomatic-20200219-Fujitsu-PrintPartner_16DV-ljet4.ppd.gz	foomatic-20200219-IBM-Page_Printer_3112-ljet4.ppd.gz	foomatic-20200219-Infotec-4651_MF-ljet4.ppd.gz	foomatic-20200219-Lexmark-Optra_E-ljet4.ppd.gz	foomatic-20200219-Lexmark-Optra_Eplus-ljet4.ppd.gz	foomatic-20200219-Minolta-PagePro_6-ljet4.ppd.gz	foomatic-20200219-Minolta-PagePro_6ex-ljet4.ppd.gz	foomatic-20200219-Minolta-PagePro_8-ljet4.ppd.gz	foomatic-20200219-NEC-SuperScript_1800-ljet4.ppd.gz	foomatic-20200219-NEC-SuperScript_660i-ljet4.ppd.gz	foomatic-20200219-Oki-B410-ljet4.ppd.gz	foomatic-20200219-Oki-OL810ex-ljet4.ppd.gz	foomatic-20200219-Oki-Okipage_10e-ljet4.ppd.gz	foomatic-20200219-Oki-Okipage_10ex-ljet4.ppd.gz	foomatic-20200219-Oki-Okipage_14ex-ljet4.ppd.gz	foomatic-20200219-Oki-Okipage_6ex-ljet4.ppd.gz	foomatic-20200219-Oki-Okipage_8p-ljet4.ppd.gz	foomatic-20200219-Ricoh-Aficio_700-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-4600-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-5000a-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-6000-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-6100-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-85-ljet4.ppd.gz	foomatic-20200219-Samsung-QL-5100A-ljet4.ppd.gz	foomatic-20200219-Samsung-QL-6050-ljet4.ppd.gz	foomatic-20200219-Sharp-AR-161-ljet4.ppd.gz	foomatic-20200219-Xerox-Able_1406-ljet4.ppd.gz	foomatic-20200219-Xerox-DocuPrint_4508-ljet4.ppd.gz	foomatic-20200219-Xerox-DocuPrint_N4512-ljet4.ppd.gz	foomatic-20200219-Xerox-DocuPrint_N4512PS-ljet4.ppd.gz	foomatic-20200219-Xerox-DocuPrint_P1202-ljet4.ppd.gz	foomatic-20200219-Xerox-DocuPrint_P8e-ljet4.ppd.gz	foomatic-20200219-Xerox-Document_Centre_400-ljet4.ppd.gz
+foomatic-20200219-Brother-DCP-7010-ljet4.ppd.gz	foomatic-20200219-Brother-DCP-7020-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1050-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1060-ljet4.ppd.gz	foomatic-20200219-Brother-HL-10h-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1260-ljet4.ppd.gz	foomatic-20200219-Brother-HL-2035-ljet4.ppd.gz	foomatic-20200219-Brother-HL-2040-ljet4.ppd.gz	foomatic-20200219-Brother-HL-2135-ljet4.ppd.gz	foomatic-20200219-Brother-HL-2142-ljet4.ppd.gz	foomatic-20200219-Brother-HL-660-ljet4.ppd.gz	foomatic-20200219-Brother-HL-760-ljet4.ppd.gz	foomatic-20200219-Brother-HL-960-ljet4.ppd.gz	foomatic-20200219-Canon-LBP-1260-ljet4.ppd.gz	foomatic-20200219-DEC-LN17-ljet4.ppd.gz	foomatic-20200219-Epson-EPL-5700-ljet4.ppd.gz	foomatic-20200219-Fujitsu-PrintPartner_10V-ljet4.ppd.gz	foomatic-20200219-Fujitsu-PrintPartner_16DV-ljet4.ppd.gz	foomatic-20200219-HP-Color_LaserJet_5000-ljet4.ppd.gz	foomatic-20200219-IBM-Page_Printer_3112-ljet4.ppd.gz	foomatic-20200219-Infotec-4651_MF-ljet4.ppd.gz	foomatic-20200219-Lexmark-Optra_E-ljet4.ppd.gz	foomatic-20200219-Lexmark-Optra_Eplus-ljet4.ppd.gz	foomatic-20200219-Minolta-PagePro_6-ljet4.ppd.gz	foomatic-20200219-Minolta-PagePro_6ex-ljet4.ppd.gz	foomatic-20200219-Minolta-PagePro_8-ljet4.ppd.gz	foomatic-20200219-NEC-SuperScript_1800-ljet4.ppd.gz	foomatic-20200219-NEC-SuperScript_660i-ljet4.ppd.gz	foomatic-20200219-Oki-B410-ljet4.ppd.gz	foomatic-20200219-Oki-OL810ex-ljet4.ppd.gz	foomatic-20200219-Oki-Okipage_10e-ljet4.ppd.gz	foomatic-20200219-Oki-Okipage_10ex-ljet4.ppd.gz	foomatic-20200219-Oki-Okipage_14ex-ljet4.ppd.gz	foomatic-20200219-Oki-Okipage_6ex-ljet4.ppd.gz	foomatic-20200219-Oki-Okipage_8p-ljet4.ppd.gz	foomatic-20200219-Ricoh-Aficio_700-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-4600-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-5000a-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-6000-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-6100-ljet4.ppd.gz	foomatic-20200219-Samsung-ML-85-ljet4.ppd.gz	foomatic-20200219-Samsung-QL-5100A-ljet4.ppd.gz	foomatic-20200219-Samsung-QL-6050-ljet4.ppd.gz	foomatic-20200219-Sharp-AR-161-ljet4.ppd.gz	foomatic-20200219-Xerox-Able_1406-ljet4.ppd.gz	foomatic-20200219-Xerox-DocuPrint_4508-ljet4.ppd.gz	foomatic-20200219-Xerox-DocuPrint_N4512-ljet4.ppd.gz	foomatic-20200219-Xerox-DocuPrint_N4512PS-ljet4.ppd.gz	foomatic-20200219-Xerox-DocuPrint_P1202-ljet4.ppd.gz	foomatic-20200219-Xerox-DocuPrint_P8e-ljet4.ppd.gz	foomatic-20200219-Xerox-Document_Centre_400-ljet4.ppd.gz
 foomatic-20200219-Brother-DCP-8020-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-MFC-8420-Postscript-Brother.ppd.gz
 foomatic-20200219-Brother-DCP-8025D-Postscript-Brother.ppd.gz
 foomatic-20200219-Brother-DCP-8040-Postscript-Brother.ppd.gz
@@ -292,7 +295,7 @@
 foomatic-20200219-Brother-HL-1020-hl7x0.ppd.gz	foomatic-20200219-Brother-HL-720-hl7x0.ppd.gz	foomatic-20200219-Brother-HL-730-hl7x0.ppd.gz	foomatic-20200219-Brother-HL-820-hl7x0.ppd.gz	foomatic-20200219-Brother-MFC-9050-hl7x0.ppd.gz
 foomatic-20200219-Brother-HL-1030-hl1250.ppd.gz
 foomatic-20200219-Brother-HL-1070-ljet4.ppd.gz	foomatic-20200219-Canon-GP_335-ljet4.ppd.gz
-foomatic-20200219-Brother-HL-10V-ljet3.ppd.gz	foomatic-20200219-Canon-LBP-4sx-ljet3.ppd.gz	foomatic-20200219-DEC-1800-ljet3.ppd.gz	foomatic-20200219-Epson-ActionLaser_1100-ljet3.ppd.gz	foomatic-20200219-Epson-EPL-5200-ljet3.ppd.gz	foomatic-20200219-Epson-EPL-5200plus-ljet3.ppd.gz	foomatic-20200219-Fujitsu-PrintPartner_8000-ljet3.ppd.gz	foomatic-20200219-Generic-PCL_5_Printer-ljet3.ppd.gz	foomatic-20200219-Tally-MT908-ljet3.ppd.gz
+foomatic-20200219-Brother-HL-10V-ljet3.ppd.gz	foomatic-20200219-Canon-LBP-4sx-ljet3.ppd.gz	foomatic-20200219-DEC-1800-ljet3.ppd.gz	foomatic-20200219-Epson-ActionLaser_1100-ljet3.ppd.gz	foomatic-20200219-Epson-EPL-5200-ljet3.ppd.gz	foomatic-20200219-Epson-EPL-5200plus-ljet3.ppd.gz	foomatic-20200219-Fujitsu-PrintPartner_8000-ljet3.ppd.gz	foomatic-20200219-Generic-PCL_5_Printer-ljet3.ppd.gz	foomatic-20200219-HP-LaserJet_3-ljet3.ppd.gz	foomatic-20200219-Tally-MT908-ljet3.ppd.gz
 foomatic-20200219-Brother-HL-1230-hl1250.ppd.gz
 foomatic-20200219-Brother-HL-1240-laserjet.ppd.gz
 foomatic-20200219-Brother-HL-1250-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1430-ljet4.ppd.gz	foomatic-20200219-Brother-HL-1440-ljet4.ppd.gz
@@ -315,7 +318,7 @@
 foomatic-20200219-Brother-HL-3260N-Postscript-Brother.ppd.gz
 foomatic-20200219-Brother-HL-3450CN-Postscript-Brother.ppd.gz
 foomatic-20200219-Brother-HL-4050CDN-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-HL-4070CDW-Postscript-Brother.ppd.gz
-foomatic-20200219-Brother-HL-4Ve-laserjet.ppd.gz	foomatic-20200219-Brother-MFC-8300-laserjet.ppd.gz	foomatic-20200219-Brother-MFC-8600-laserjet.ppd.gz	foomatic-20200219-Citizen-ProJet_II-laserjet.ppd.gz	foomatic-20200219-Epson-ActionLaser_II-laserjet.ppd.gz	foomatic-20200219-Generic-PCL_4_Printer-laserjet.ppd.gz	foomatic-20200219-IBM-4019-laserjet.ppd.gz	foomatic-20200219-IBM-4029_030_LaserPrinter_10-laserjet.ppd.gz	foomatic-20200219-IBM-4312-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-1000-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-1010-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-1200-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-1200S-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-1800-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-2000-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-2010-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-2200-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-2200S-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-3000-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-3010-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-3300-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-5000-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-800-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-800T-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-820-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-1500-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-1550-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-1550plus-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-3400-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-3400plus-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-3500-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-400-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-5500-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-850-laserjet.ppd.gz	foomatic-20200219-Kyocera-P-2000-laserjet.ppd.gz	foomatic-20200219-Kyocera-P-2002-laserjet.ppd.gz	foomatic-20200219-Oki-OL400-laserjet.ppd.gz	foomatic-20200219-Oki-OL610e_S-laserjet.ppd.gz	foomatic-20200219-Oki-OL800-laserjet.ppd.gz	foomatic-20200219-Olivetti-PG_306-laserjet.ppd.gz	foomatic-20200219-PCPI-1030-laserjet.ppd.gz	foomatic-20200219-Panasonic-KX-P6150-laserjet.ppd.gz	foomatic-20200219-Seiko-SpeedJET_200-laserjet.ppd.gz	foomatic-20200219-Star-LaserPrinter_8-laserjet.ppd.gz
+foomatic-20200219-Brother-HL-4Ve-laserjet.ppd.gz	foomatic-20200219-Brother-MFC-8300-laserjet.ppd.gz	foomatic-20200219-Brother-MFC-8600-laserjet.ppd.gz	foomatic-20200219-Citizen-ProJet_II-laserjet.ppd.gz	foomatic-20200219-Epson-ActionLaser_II-laserjet.ppd.gz	foomatic-20200219-Generic-PCL_4_Printer-laserjet.ppd.gz	foomatic-20200219-HP-LaserJet-laserjet.ppd.gz	foomatic-20200219-HP-LaserJet_2-laserjet.ppd.gz	foomatic-20200219-IBM-4019-laserjet.ppd.gz	foomatic-20200219-IBM-4029_030_LaserPrinter_10-laserjet.ppd.gz	foomatic-20200219-IBM-4312-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-1000-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-1010-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-1200-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-1200S-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-1800-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-2000-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-2010-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-2200-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-2200S-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-3000-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-3010-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-3300-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-5000-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-800-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-800T-laserjet.ppd.gz	foomatic-20200219-Kyocera-F-820-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-1500-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-1550-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-1550plus-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-3400-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-3400plus-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-3500-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-400-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-5500-laserjet.ppd.gz	foomatic-20200219-Kyocera-FS-850-laserjet.ppd.gz	foomatic-20200219-Kyocera-P-2000-laserjet.ppd.gz	foomatic-20200219-Kyocera-P-2002-laserjet.ppd.gz	foomatic-20200219-Oki-OL400-laserjet.ppd.gz	foomatic-20200219-Oki-OL610e_S-laserjet.ppd.gz	foomatic-20200219-Oki-OL800-laserjet.ppd.gz	foomatic-20200219-Olivetti-PG_306-laserjet.ppd.gz	foomatic-20200219-PCPI-1030-laserjet.ppd.gz	foomatic-20200219-Panasonic-KX-P6150-laserjet.ppd.gz	foomatic-20200219-Seiko-SpeedJET_200-laserjet.ppd.gz	foomatic-20200219-Star-LaserPrinter_8-laserjet.ppd.gz
 foomatic-20200219-Brother-HL-5030-ljet4.ppd.gz	foomatic-20200219-Brother-HL-5040-ljet4.ppd.gz
 foomatic-20200219-Brother-HL-5050-Postscript-Brother.ppd.gz	foomatic-20200219-Brother-HL-5070N-Postscript-Brother.ppd.gz
 foomatic-20200219-Brother-HL-5140-ljet4.ppd.gz
@@ -327,7 +330,7 @@
 foomatic-20200219-Brother-HL-6050D_DN-Postscript-Brother.ppd.gz
 foomatic-20200219-Brother-HL-7050-Postscript-Brother.ppd.gz
 foomatic-20200219-Brother-HL-7050N-pxlmono.ppd.gz
-foomatic-20200219-Brother-HL-8-ljetplus.ppd.gz	foomatic-20200219-Lexmark-Winwriter_400-ljetplus.ppd.gz	foomatic-20200219-Panasonic-KX-P4450-ljetplus.ppd.gz
+foomatic-20200219-Brother-HL-8-ljetplus.ppd.gz	foomatic-20200219-HP-LaserJet_Plus-ljetplus.ppd.gz	foomatic-20200219-Lexmark-Winwriter_400-ljetplus.ppd.gz	foomatic-20200219-Panasonic-KX-P4450-ljetplus.ppd.gz
 foomatic-20200219-Brother-HL-8050N-Postscript-Brother.ppd.gz
 foomatic-20200219-Brother-MC-3000-epson.ppd.gz	foomatic-20200219-Panasonic-KX-P2123-epson.ppd.gz
 foomatic-20200219-Brother-MFC-7450-Postscript-Brother.ppd.gz
@@ -408,7 +411,7 @@
 foomatic-20200219-Canon-iR-ADV_C9065_9075-Postscript-Canon.ppd.gz
 foomatic-20200219-Canon-iR-ADV_C9270_9280-Postscript-Canon.ppd.gz
 foomatic-20200219-Canon-imageRunner_C2570-Postscript.ppd.gz	foomatic-20200219-Canon-imageRunner_C2570i-Postscript.ppd.gz	foomatic-20200219-Canon-imageRunner_C2620N-Postscript.ppd.gz	foomatic-20200219-Canon-imageRunner_C3100-Postscript.ppd.gz	foomatic-20200219-Canon-imageRunner_C3170-Postscript.ppd.gz	foomatic-20200219-Canon-imageRunner_C3170i-Postscript.ppd.gz	foomatic-20200219-Canon-imageRunner_C3200-Postscript.ppd.gz	foomatic-20200219-Canon-imageRunner_C5800-Postscript.ppd.gz	foomatic-20200219-Canon-imageRunner_C6800-Postscript.ppd.gz	foomatic-20200219-Genicom-microLaser_210-Postscript.ppd.gz	foomatic-20200219-NEC-SuperScript_4600N-Postscript.ppd.gz	foomatic-20200219-Oce-cm2510-Postscript.ppd.gz	foomatic-20200219-Oce-cm3521-Postscript.ppd.gz	foomatic-20200219-Oce-cm4010-Postscript.ppd.gz	foomatic-20200219-Oce-cm4520-Postscript.ppd.gz	foomatic-20200219-Oce-cm4521-Postscript.ppd.gz	foomatic-20200219-Oce-cm5520-Postscript.ppd.gz	foomatic-20200219-Oce-cm6520-Postscript.ppd.gz	foomatic-20200219-Oki-C5540_MFP-Postscript.ppd.gz	foomatic-20200219-Oki-C5550_MFP-Postscript.ppd.gz	foomatic-20200219-Oki-C5750-Postscript.ppd.gz	foomatic-20200219-Oki-C6050-Postscript.ppd.gz	foomatic-20200219-Oki-C6150-Postscript.ppd.gz	foomatic-20200219-Oki-C710-Postscript.ppd.gz	foomatic-20200219-Oki-C9800_MFP-Postscript.ppd.gz	foomatic-20200219-Panasonic-KX-P8420-Postscript.ppd.gz	foomatic-20200219-Panasonic-KX-P8475-Postscript.ppd.gz	foomatic-20200219-Tektronix-Phaser_780-Postscript.ppd.gz	foomatic-20200219-Tektronix-Phaser_850-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_2500c-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_281c-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_3500c-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_3510c-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_3511-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_351c-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_4511-Postscript.ppd.gz	foomatic-20200219-Toshiba-e-Studio_451c-Postscript.ppd.gz	foomatic-20200219-Xante-FilmMaker_4-Postscript.ppd.gz	foomatic-20200219-Xerox-DocuColor_2006-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_6120-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_6130-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_6180-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_6360-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_7400-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_7760-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_8560-Postscript.ppd.gz	foomatic-20200219-Xerox-Phaser_8860-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_24-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_7228-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_7232-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_C2424-Postscript.ppd.gz	foomatic-20200219-Xerox-WorkCentre_M24-Postscript.ppd.gz
-foomatic-20200219-Citizen-ProJet_IIc-cdj500.ppd.gz	foomatic-20200219-DEC-DECwriter_520ic-cdj500.ppd.gz	foomatic-20200219-Olivetti-JP470-cdj500.ppd.gz
+foomatic-20200219-Citizen-ProJet_IIc-cdj500.ppd.gz	foomatic-20200219-DEC-DECwriter_520ic-cdj500.ppd.gz	foomatic-20200219-HP-DeskJet_420C-cdj500.ppd.gz	foomatic-20200219-Olivetti-JP470-cdj500.ppd.gz
 foomatic-20200219-Compaq-IJ1200-drv_z42.ppd.gz	foomatic-20200219-Lexmark-X73-drv_z42.ppd.gz	foomatic-20200219-Lexmark-Z42-drv_z42.ppd.gz	foomatic-20200219-Lexmark-Z43-drv_z42.ppd.gz
 foomatic-20200219-Compaq-IJ750-lxm3200X.ppd.gz	foomatic-20200219-Lexmark-3200-lxm3200X.ppd.gz	foomatic-20200219-Lexmark-Z31-lxm3200X.ppd.gz
 foomatic-20200219-Compaq-IJ900-lxm5700m.ppd.gz	foomatic-20200219-Lexmark-5700-lxm5700m.ppd.gz	foomatic-20200219-Xerox-WorkCentre_470cx-lxm5700m.ppd.gz
@@ -524,6 +527,7 @@
 foomatic-20200219-Generic-IBM-Compatible_Dot_Matrix_Printer-ibmpro.ppd.gz
 foomatic-20200219-Generic-PCL_3_Printer-pcl3.ppd.gz
 foomatic-20200219-Generic-PCL_6_PCL_XL_Printer-ljet4.ppd.gz
+foomatic-20200219-Generic-PCL_6_PCL_XL_Printer-pxlcolor.ppd.gz
 foomatic-20200219-Generic-PostScript_Printer-Postscript.ppd.gz
 foomatic-20200219-Gestetner-10512-pxlmono-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS2105-pxlmono-Infotec.ppd.gz	foomatic-20200219-Lanier-LD0105-pxlmono-Lanier.ppd.gz	foomatic-20200219-NRG-10515_10518_10512-pxlmono-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_2105-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Savin-40105-pxlmono-Savin.ppd.gz
 foomatic-20200219-Gestetner-2212-pxlmono-Gestetner.ppd.gz	foomatic-20200219-Gestetner-2712-pxlmono-Gestetner.ppd.gz	foomatic-20200219-Gestetner-3212-pxlmono-Gestetner.ppd.gz	foomatic-20200219-Infotec-IS2022-pxlmono-Infotec.ppd.gz	foomatic-20200219-Infotec-IS2027-pxlmono-Infotec.ppd.gz	foomatic-20200219-Infotec-IS2032-pxlmono-Infotec.ppd.gz	foomatic-20200219-Lanier-5622-pxlmono-Lanier.ppd.gz	foomatic-20200219-Lanier-5627-pxlmono-Lanier.ppd.gz	foomatic-20200219-Lanier-5632-pxlmono-Lanier.ppd.gz	foomatic-20200219-NRG-2205_2238_2212-pxlmono-NRG.ppd.gz	foomatic-20200219-NRG-2705_2738_2712-pxlmono-NRG.ppd.gz	foomatic-20200219-NRG-3205_3238_3212-pxlmono-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_1022-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_1027-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_1032-pxlmono-Ricoh.ppd.gz	foomatic-20200219-Savin-2522-pxlmono-Savin.ppd.gz	foomatic-20200219-Savin-2527-pxlmono-Savin.ppd.gz	foomatic-20200219-Savin-2532-pxlmono-Savin.ppd.gz
@@ -655,6 +659,40 @@
 foomatic-20200219-Gestetner-SP_C420DN-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP331cn_SP_C420-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C420DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C420DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-CLP131DN-PDF-Savin.ppd.gz
 foomatic-20200219-Gestetner-SP_C430DN-PDF-Gestetner.ppd.gz	foomatic-20200219-Gestetner-SP_C431DN-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP137CN-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-LP142CN-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_C430DN-PDF-Lanier.ppd.gz	foomatic-20200219-Lanier-SP_C431DN-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_C430DN-PDF-NRG.ppd.gz	foomatic-20200219-NRG-SP_C431DN-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C430DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C431DN-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-CLP37DN-PDF-Savin.ppd.gz	foomatic-20200219-Savin-CLP42DN-PDF-Savin.ppd.gz
 foomatic-20200219-Gestetner-SP_W2470-PDF-Gestetner.ppd.gz	foomatic-20200219-Lanier-LP124w-PDF-Lanier.ppd.gz	foomatic-20200219-NRG-SP_W2470-PDF-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_W2470-PDF-Ricoh.ppd.gz	foomatic-20200219-Savin-2404WDP-PDF-Savin.ppd.gz
+foomatic-20200219-HP-2563-lp2563.ppd.gz
+foomatic-20200219-HP-Business_Inkjet_9100-Postscript.ppd.gz	foomatic-20200219-Lexmark-C500n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C530dn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C532dn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C532n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C534dn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C534dtn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C534n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C770dtn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C770n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C772dn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C772n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C780dn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C780dtn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C780n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C782dn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C782dtn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C782n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C920n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C935dn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C935dtn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C935hdn-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_C710-Postscript.ppd.gz	foomatic-20200219-Lexmark-X560n-Postscript.ppd.gz
+foomatic-20200219-HP-DesignJet_100plus-cdnj500.ppd.gz	foomatic-20200219-HP-DesignJet_110-cdnj500.ppd.gz	foomatic-20200219-HP-DesignJet_120-cdnj500.ppd.gz	foomatic-20200219-HP-DesignJet_130-cdnj500.ppd.gz	foomatic-20200219-HP-DesignJet_30-cdnj500.ppd.gz	foomatic-20200219-HP-DesignJet_5000-cdnj500.ppd.gz	foomatic-20200219-HP-DesignJet_5500-cdnj500.ppd.gz	foomatic-20200219-HP-DesignJet_70-cdnj500.ppd.gz	foomatic-20200219-HP-DesignJet_90-cdnj500.ppd.gz
+foomatic-20200219-HP-DesignJet_1050C-Postscript-HP.ppd.gz
+foomatic-20200219-HP-DesignJet_1055CM-Postscript-HP.ppd.gz
+foomatic-20200219-HP-DesignJet_230-dnj650c.ppd.gz
+foomatic-20200219-HP-DesignJet_2500CP-Postscript-HP.ppd.gz
+foomatic-20200219-HP-DesignJet_3500CP-Postscript-HP.ppd.gz
+foomatic-20200219-HP-DesignJet_350C-dnj650c.ppd.gz	foomatic-20200219-HP-DesignJet_650C-dnj650c.ppd.gz
+foomatic-20200219-HP-DesignJet_500-dnj650c.ppd.gz	foomatic-20200219-HP-DesignJet_800-dnj650c.ppd.gz
+foomatic-20200219-HP-DesignJet_5000PS-Postscript-HP.ppd.gz
+foomatic-20200219-HP-DesignJet_500ps-dnj650c.ppd.gz
+foomatic-20200219-HP-DesignJet_5500ps-Postscript-HP.ppd.gz
+foomatic-20200219-HP-DesignJet_750-dnj750X.upp.ppd.gz	foomatic-20200219-HP-DesignJet_750C_Plus-dnj750X.upp.ppd.gz
+foomatic-20200219-HP-DesignJet_800PS-Postscript-HP.ppd.gz
+foomatic-20200219-HP-DesignJet_ColorPro_CAD-pcl3.ppd.gz	foomatic-20200219-NEC-SuperScript_650C-pcl3.ppd.gz	foomatic-20200219-NEC-SuperScript_750C-pcl3.ppd.gz	foomatic-20200219-Xerox-DocuPrint_C11-pcl3.ppd.gz
+foomatic-20200219-HP-DeskJet_1000C-pnm2ppa.ppd.gz
+foomatic-20200219-HP-DeskJet_200-pcl3.ppd.gz
+foomatic-20200219-HP-DeskJet_310-pcl3.ppd.gz
+foomatic-20200219-HP-DeskJet_320-pcl3.ppd.gz
+foomatic-20200219-HP-DeskJet_340C-pcl3.ppd.gz
+foomatic-20200219-HP-DeskJet_540C-pcl3.ppd.gz
+foomatic-20200219-HP-DeskJet_560C-pcl3.ppd.gz
+foomatic-20200219-HP-DeskJet_712C-pnm2ppa.ppd.gz
+foomatic-20200219-HP-DeskJet_722C-pnm2ppa.ppd.gz
+foomatic-20200219-HP-DeskJet_820C-pnm2ppa.ppd.gz
+foomatic-20200219-HP-DeskJet_Plus-pcl3.ppd.gz
+foomatic-20200219-HP-DeskJet_Portable-pcl3.ppd.gz
+foomatic-20200219-HP-LaserJet_3P_w_PCL5-ljet4.ppd.gz
+foomatic-20200219-HP-LaserJet_4P-ljet4.ppd.gz
+foomatic-20200219-HP-LaserJet_5-ljet4.ppd.gz	foomatic-20200219-HP-LaserJet_6-ljet4.ppd.gz
+foomatic-20200219-HP-LaserJet_5M-Postscript.ppd.gz
+foomatic-20200219-HP-OfficeJet_500-cdj550.ppd.gz	foomatic-20200219-Oki-Okijet_2500-cdj550.ppd.gz	foomatic-20200219-Xerox-DocuPrint_C6-cdj550.ppd.gz	foomatic-20200219-Xerox-DocuPrint_C8-cdj550.ppd.gz	foomatic-20200219-Xerox-DocuPrint_XJ6C-cdj550.ppd.gz	foomatic-20200219-Xerox-Document_Homecentre-cdj550.ppd.gz	foomatic-20200219-Xerox-WorkCentre_450cp-cdj550.ppd.gz
+foomatic-20200219-HP-PaintJet_XL300-pjxl300.ppd.gz
 foomatic-20200219-IBM-3853_JetPrinter-jetp3852.ppd.gz
 foomatic-20200219-IBM-4303_Network_Color_Printer-Postscript.ppd.gz	foomatic-20200219-Oki-Okipage_8c-Postscript.ppd.gz	foomatic-20200219-Xerox-DocuPrint_C20-Postscript.ppd.gz	foomatic-20200219-Xerox-DocuPrint_C55-Postscript.ppd.gz
 foomatic-20200219-Imagen-ImPress-imagen.ppd.gz
@@ -791,7 +829,6 @@
 foomatic-20200219-Lexmark-1020_Business-pcl3.ppd.gz	foomatic-20200219-Lexmark-3000-pcl3.ppd.gz	foomatic-20200219-NEC-SuperScript_100C-pcl3.ppd.gz	foomatic-20200219-NEC-SuperScript_150C-pcl3.ppd.gz	foomatic-20200219-Samsung-SI-630A-pcl3.ppd.gz
 foomatic-20200219-Lexmark-4039_10plus-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_E310-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_E312-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_Ep-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_K_1220-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_M410-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_M412-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_W810-Postscript.ppd.gz
 foomatic-20200219-Lexmark-C2132-Postscript-Lexmark.ppd.gz
-foomatic-20200219-Lexmark-C500n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C530dn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C532dn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C532n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C534dn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C534dtn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C534n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C770dtn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C770n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C772dn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C772n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C780dn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C780dtn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C780n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C782dn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C782dtn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C782n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C920n-Postscript.ppd.gz	foomatic-20200219-Lexmark-C935dn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C935dtn-Postscript.ppd.gz	foomatic-20200219-Lexmark-C935hdn-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_C710-Postscript.ppd.gz	foomatic-20200219-Lexmark-X560n-Postscript.ppd.gz
 foomatic-20200219-Lexmark-C510-Postscript-Lexmark.ppd.gz
 foomatic-20200219-Lexmark-C510b-Postscript.ppd.gz	foomatic-20200219-Lexmark-C752b-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_Rplus-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_S_1250-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_S_1855-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_Se_3455-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_T610-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_T612-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_T614-Postscript.ppd.gz	foomatic-20200219-Lexmark-Optra_T616-Postscript.ppd.gz	foomatic-20200219-Lexmark-T430-Postscript.ppd.gz	foomatic-20200219-Lexmark-T640-Postscript.ppd.gz	foomatic-20200219-Lexmark-T642-Postscript.ppd.gz	foomatic-20200219-Lexmark-T644-Postscript.ppd.gz	foomatic-20200219-Lexmark-W812-Postscript.ppd.gz	foomatic-20200219-Lexmark-W840-Postscript.ppd.gz
 foomatic-20200219-Lexmark-C520-Postscript-Lexmark.ppd.gz
@@ -839,7 +876,6 @@
 foomatic-20200219-NEC-PICTY180-picty180.ppd.gz
 foomatic-20200219-NEC-PinWriter_P6-necp6.ppd.gz	foomatic-20200219-NEC-PinWriter_P60-necp6.ppd.gz	foomatic-20200219-NEC-PinWriter_P6_plus-necp6.ppd.gz	foomatic-20200219-NEC-PinWriter_P7-necp6.ppd.gz	foomatic-20200219-NEC-PinWriter_P70-necp6.ppd.gz	foomatic-20200219-NEC-PinWriter_P7_plus-necp6.ppd.gz
 foomatic-20200219-NEC-Pinwriter_P20-necp6.ppd.gz	foomatic-20200219-Star-LC24-100-necp6.ppd.gz
-foomatic-20200219-NEC-SuperScript_650C-pcl3.ppd.gz	foomatic-20200219-NEC-SuperScript_750C-pcl3.ppd.gz	foomatic-20200219-Xerox-DocuPrint_C11-pcl3.ppd.gz
 foomatic-20200219-NRG-SP_3500N-Postscript-NRG.ppd.gz	foomatic-20200219-NRG-SP_3500SF-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_3500N-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_3500SF-Postscript-Ricoh.ppd.gz
 foomatic-20200219-NRG-SP_3510DN-Postscript-NRG.ppd.gz	foomatic-20200219-NRG-SP_3510SF-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_3510DN-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_3510SF-Postscript-Ricoh.ppd.gz
 foomatic-20200219-NRG-SP_C242DN-Postscript-NRG.ppd.gz	foomatic-20200219-NRG-SP_C242SF-Postscript-NRG.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C242DN-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-Aficio_SP_C242SF-Postscript-Ricoh.ppd.gz
@@ -878,7 +914,6 @@
 foomatic-20200219-Oki-Microline_182-oki182.ppd.gz	foomatic-20200219-Oki-Microline_192plus-oki182.ppd.gz
 foomatic-20200219-Oki-OL400w-oki4drv.ppd.gz	foomatic-20200219-Oki-Okipage_4w-oki4drv.ppd.gz	foomatic-20200219-Oki-Okipage_4wplus-oki4drv.ppd.gz	foomatic-20200219-Oki-Okipage_6w-oki4drv.ppd.gz	foomatic-20200219-Oki-Okipage_8w-oki4drv.ppd.gz	foomatic-20200219-Oki-Okipage_8w_Lite-oki4drv.ppd.gz	foomatic-20200219-Oki-Okipage_8z-oki4drv.ppd.gz
 foomatic-20200219-Oki-OL410e-ljet4.ppd.gz	foomatic-20200219-Oki-Super_6e-ljet4.ppd.gz	foomatic-20200219-Ricoh-Aficio_401-ljet4.ppd.gz
-foomatic-20200219-Oki-Okijet_2500-cdj550.ppd.gz	foomatic-20200219-Xerox-DocuPrint_C6-cdj550.ppd.gz	foomatic-20200219-Xerox-DocuPrint_C8-cdj550.ppd.gz	foomatic-20200219-Xerox-DocuPrint_XJ6C-cdj550.ppd.gz	foomatic-20200219-Xerox-Document_Homecentre-cdj550.ppd.gz	foomatic-20200219-Xerox-WorkCentre_450cp-cdj550.ppd.gz
 foomatic-20200219-Olivetti-JP350S-laserjet.ppd.gz
 foomatic-20200219-Olivetti-JP450-djet500.ppd.gz
 foomatic-20200219-Panasonic-KX-P1150-eps9high.ppd.gz
@@ -893,7 +928,6 @@
 foomatic-20200219-Ricoh-GX_3050SFN-pxlcolor-Ricoh.ppd.gz	foomatic-20200219-Ricoh-GX_5050N-pxlcolor-Ricoh.ppd.gz
 foomatic-20200219-Ricoh-GX_E3350N-pxlcolor-Ricoh.ppd.gz
 foomatic-20200219-Ricoh-GX_E5550N-pxlcolor-Ricoh.ppd.gz
-foomatic-20200219-Ricoh-SP_2300L-pcl5-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_2300SFL-pcl5-Ricoh.ppd.gz
 foomatic-20200219-Ricoh-SP_330DN-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_3710DN-Postscript-Ricoh.ppd.gz
 foomatic-20200219-Ricoh-SP_330SFN-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_330SN-Postscript-Ricoh.ppd.gz	foomatic-20200219-Ricoh-SP_3710SF-Postscript-Ricoh.ppd.gz
 foomatic-20200219-Ricoh-SP_3700-Postscript-Ricoh.ppd.gz
@@ -1030,100 +1064,68 @@
 foomatic-20200219-Xerox-DocuPrint_XJ8C-lxm5700m.ppd.gz
 foomatic-20200219-Xerox-Phaser_3160N-pxlcolor.ppd.gz
 fuji_xerox-20200402-fuji-xerox-20200402-fx-apeosportv-c3375.ppd.gz	fuji_xerox-20200402-fuji-xerox-20200402-fx-apeosportv-c4475.ppd.gz	fuji_xerox-20200402-fuji-xerox-20200402-fx-docucentreiv-c2263.ppd.gz	fuji_xerox-20200402-fuji-xerox-20200402-fx-docucentreiv-c2275.ppd.gz	fuji_xerox-20200402-fuji-xerox-20200402-fx-docucentreiv-c3370.ppd.gz	fuji_xerox-20200402-fuji-xerox-20200402-fx-docucentreiv-c3375.ppd.gz	fuji_xerox-20200402-fuji-xerox-20200402-fx-docucolor-1450-ga.ppd.gz	fuji_xerox-20200402-fuji-xerox-20200402-fx-docuprint-205.ppd.gz	fuji_xerox-20200402-fuji-xerox-20200402-fx-docuprint-c3050.ppd.gz	fuji_xerox-20200402-fuji-xerox-20200402-fx-docuprint-c3450-d.ppd.gz	fuji_xerox-20200402-fuji-xerox-20200402-fx-docuprint-c4000-d.ppd.gz
+fujifilm-20210604-DOCUPRINTM235DW.ppd.gz	fujifilm-20210604-DOCUPRINTM235Z.ppd.gz	fujifilm-20210604-DOCUPRINTM275Z.ppd.gz	fujifilm-20210604-DOCUPRINTP235D.ppd.gz	fujifilm-20210604-DOCUPRINTP275DW.ppd.gz
+fujifilm-20210604-DOCUPRINTM285Z.ppd.gz	fujifilm-20210604-DOCUPRINTP285DW.ppd.gz
+fujifilm-20210604-DOCUPRINTM375DF.ppd.gz	fujifilm-20210604-DOCUPRINTM375Z.ppd.gz	fujifilm-20210604-DOCUPRINTM385Z.ppd.gz	fujifilm-20210604-DOCUPRINTP360DW.ppd.gz	fujifilm-20210604-DOCUPRINTP375D.ppd.gz	fujifilm-20210604-DOCUPRINTP375DW.ppd.gz	fujifilm-20210604-MULTIWRITER5350.ppd.gz	konica_minolta-20210401-konica-minolta-20210401-bizhub4000i.ppd.gz	konica_minolta-20210401-konica-minolta-20210401-bizhub4020i.ppd.gz	konica_minolta-20210401-konica-minolta-20210401-bizhub5020i.ppd.gz
+fujifilm-20210604-DOCUPRINTP385DW.ppd.gz	konica_minolta-20210401-konica-minolta-20210401-bizhub5000i.ppd.gz
 hp-20171121-hplip-3.17.10-hp-color_laserjet-ps.ppd.gz
-hp-20171121-hplip-3.17.10-hp-deskjet_f4210_series.ppd.gz	hp-20171121-hplip-3.17.10-hp-deskjet_f4213_series.ppd.gz
 hp-20171121-hplip-3.17.10-hp-laserjet_4-ps.ppd.gz
-hp-20171121-hplip-3.17.10-hp-laserjet_4si-ps.ppd.gz
-hp-20171121-hplip-3.17.10-hp-laserjet_4v-ps.ppd.gz
-hp-20171121-hplip-3.17.10-hp-laserjet_6p-ps.ppd.gz
 hp-20171121-hplip-3.17.10-hp-laserjet_p1505n-pcl3.ppd.gz	hp-20171121-hplip-3.17.10-hp-laserjet_p2014-pcl3.ppd.gz	hp-20171121-hplip-3.17.10-hp-laserjet_p2014n-pcl3.ppd.gz
-hp-20171121-hplip-3.17.10-hp-laserjet_p2055_series-ps.ppd.gz
-hp-20171121-hplip-3.17.10-hp-laserjet_p4010_series-ps.ppd.gz
 hp-20190111-hplip-3.18.12-hp-PCL3-Class1B.ppd.gz
 hp-20190111-hplip-3.18.12-hp-designjet_z6200_42in_photo-ps.ppd.gz
 hp-20190111-hplip-3.18.12-hp-designjet_z6200_60in_photo-ps.ppd.gz
 hp-20190111-hplip-3.18.12-hp-designjet_z6810ps_42in-ps.ppd.gz
-hp-20190918-hplip-3.19.6-hp-Ampere.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Ampere.ppd.gz
-hp-20190918-hplip-3.19.6-hp-CLE17.ppd.gz	hplip-20200303-hplip-3.19.12-hp-CLE17.ppd.gz
-hp-20190918-hplip-3.19.6-hp-Copperhead.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Copperhead.ppd.gz
-hp-20190918-hplip-3.19.6-hp-Copperhead12.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Copperhead12.ppd.gz
-hp-20190918-hplip-3.19.6-hp-CopperheadIPH.ppd.gz	hplip-20200303-hplip-3.19.12-hp-CopperheadIPH.ppd.gz
-hp-20190918-hplip-3.19.6-hp-CopperheadIPH15.ppd.gz	hplip-20200303-hplip-3.19.12-hp-CopperheadIPH15.ppd.gz
-hp-20190918-hplip-3.19.6-hp-CopperheadIPH17.ppd.gz	hplip-20200303-hplip-3.19.12-hp-CopperheadIPH17.ppd.gz
-hp-20190918-hplip-3.19.6-hp-CopperheadXLP.ppd.gz	hplip-20200303-hplip-3.19.12-hp-CopperheadXLP.ppd.gz
-hp-20190918-hplip-3.19.6-hp-Corbett.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Corbett.ppd.gz
-hp-20190918-hplip-3.19.6-hp-DJ55xx.ppd.gz	hplip-20200303-hplip-3.19.12-hp-DJ55xx.ppd.gz
-hp-20190918-hplip-3.19.6-hp-DJ9xxVIP.ppd.gz	hplip-20200303-hplip-3.19.12-hp-DJ9xxVIP.ppd.gz
-hp-20190918-hplip-3.19.6-hp-Gemstone.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Gemstone.ppd.gz
-hp-20190918-hplip-3.19.6-hp-Kapan.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Kapan.ppd.gz
-hp-20190918-hplip-3.19.6-hp-LJ-Class1.ppd.gz	hplip-20200303-hplip-3.19.12-hp-LJ-Class1.ppd.gz
-hp-20190918-hplip-3.19.6-hp-LJ-Class2.ppd.gz	hplip-20200303-hplip-3.19.12-hp-LJ-Class2.ppd.gz
-hp-20190918-hplip-3.19.6-hp-LJ-Class3.ppd.gz	hplip-20200303-hplip-3.19.12-hp-LJ-Class3.ppd.gz
-hp-20190918-hplip-3.19.6-hp-LJ-Class6.ppd.gz	hplip-20200303-hplip-3.19.12-hp-LJ-Class6.ppd.gz
-hp-20190918-hplip-3.19.6-hp-Mimas.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Mimas.ppd.gz
-hp-20190918-hplip-3.19.6-hp-Mimas15.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Mimas15.ppd.gz
-hp-20190918-hplip-3.19.6-hp-Mimas17.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Mimas17.ppd.gz
-hp-20190918-hplip-3.19.6-hp-MimasTDR.ppd.gz	hplip-20200303-hplip-3.19.12-hp-MimasTDR.ppd.gz
-hp-20190918-hplip-3.19.6-hp-OJ7000.ppd.gz	hplip-20200303-hplip-3.19.12-hp-OJ7000.ppd.gz
-hp-20190918-hplip-3.19.6-hp-OJProKx50.ppd.gz	hplip-20200303-hplip-3.19.12-hp-OJProKx50.ppd.gz
-hp-20190918-hplip-3.19.6-hp-P15_CISS.ppd.gz	hplip-20200303-hplip-3.19.12-hp-P15_CISS.ppd.gz
-hp-20190918-hplip-3.19.6-hp-PSP100.ppd.gz	hplip-20200303-hplip-3.19.12-hp-PSP100.ppd.gz
-hp-20190918-hplip-3.19.6-hp-PSP470.ppd.gz	hplip-20200303-hplip-3.19.12-hp-PSP470.ppd.gz
-hp-20190918-hplip-3.19.6-hp-Pyramid.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Pyramid.ppd.gz
-hp-20190918-hplip-3.19.6-hp-Pyramid15.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Pyramid15.ppd.gz
-hp-20190918-hplip-3.19.6-hp-PyramidPlus.ppd.gz	hplip-20200303-hplip-3.19.12-hp-PyramidPlus.ppd.gz
-hp-20190918-hplip-3.19.6-hp-PyramidRefresh15.ppd.gz	hplip-20200303-hplip-3.19.12-hp-PyramidRefresh15.ppd.gz
-hp-20190918-hplip-3.19.6-hp-PyramidRefresh17.ppd.gz	hplip-20200303-hplip-3.19.12-hp-PyramidRefresh17.ppd.gz
-hp-20190918-hplip-3.19.6-hp-Python.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Python.ppd.gz
-hp-20190918-hplip-3.19.6-hp-Python10.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Python10.ppd.gz
-hp-20190918-hplip-3.19.6-hp-Python11.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Python11.ppd.gz
-hp-20190918-hplip-3.19.6-hp-SPDOfficejetProAsize.ppd.gz	hplip-20200303-hplip-3.19.12-hp-SPDOfficejetProAsize.ppd.gz
-hp-20190918-hplip-3.19.6-hp-SPDOfficejetProBsize.ppd.gz	hplip-20200303-hplip-3.19.12-hp-SPDOfficejetProBsize.ppd.gz
-hp-20190918-hplip-3.19.6-hp-Saipan.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Saipan.ppd.gz
-hp-20190918-hplip-3.19.6-hp-Saipan15B.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Saipan15B.ppd.gz
-hp-20190918-hplip-3.19.6-hp-Stabler.ppd.gz	hplip-20200303-hplip-3.19.12-hp-Stabler.ppd.gz
-hp-20190918-hplip-3.19.6-hp-StingrayOJ.ppd.gz	hplip-20200303-hplip-3.19.12-hp-StingrayOJ.ppd.gz
-hp-20190918-hplip-3.19.6-hp-ViperMinusVIP.ppd.gz	hplip-20200303-hplip-3.19.12-hp-ViperMinusVIP.ppd.gz
-hp-20190918-hplip-3.19.6-hp-ViperPlusVIP.ppd.gz	hplip-20200303-hplip-3.19.12-hp-ViperPlusVIP.ppd.gz
-hp-20190918-hplip-3.19.6-hp-cm8060_mfp_with_edgeline-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-cm8050_mfp_with_edgeline-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-cm8060_mfp_with_edgeline-ps.ppd.gz
+hp-20190918-hplip-3.19.6-hp-Ampere.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Ampere.ppd.gz
+hp-20190918-hplip-3.19.6-hp-CLE.ppd.gz
+hp-20190918-hplip-3.19.6-hp-CLE17.ppd.gz
+hp-20190918-hplip-3.19.6-hp-Copperhead.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Copperhead.ppd.gz
+hp-20190918-hplip-3.19.6-hp-Copperhead12.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Copperhead12.ppd.gz
+hp-20190918-hplip-3.19.6-hp-CopperheadIPH.ppd.gz	hplip-20201209-hplip-3.20.11-hp-CopperheadIPH.ppd.gz
+hp-20190918-hplip-3.19.6-hp-CopperheadIPH15.ppd.gz	hplip-20201209-hplip-3.20.11-hp-CopperheadIPH15.ppd.gz
+hp-20190918-hplip-3.19.6-hp-CopperheadIPH17.ppd.gz	hplip-20201209-hplip-3.20.11-hp-CopperheadIPH17.ppd.gz
+hp-20190918-hplip-3.19.6-hp-CopperheadXLP.ppd.gz	hplip-20201209-hplip-3.20.11-hp-CopperheadXLP.ppd.gz
+hp-20190918-hplip-3.19.6-hp-Corbett.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Corbett.ppd.gz
+hp-20190918-hplip-3.19.6-hp-DJ55xx.ppd.gz	hplip-20201209-hplip-3.20.11-hp-DJ55xx.ppd.gz
+hp-20190918-hplip-3.19.6-hp-DJ9xxVIP.ppd.gz	hplip-20201209-hplip-3.20.11-hp-DJ9xxVIP.ppd.gz
+hp-20190918-hplip-3.19.6-hp-Gemstone.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Gemstone.ppd.gz
+hp-20190918-hplip-3.19.6-hp-Kapan.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Kapan.ppd.gz
+hp-20190918-hplip-3.19.6-hp-LJ-Class1.ppd.gz	hplip-20201209-hplip-3.20.11-hp-LJ-Class1.ppd.gz
+hp-20190918-hplip-3.19.6-hp-LJ-Class2.ppd.gz	hplip-20201209-hplip-3.20.11-hp-LJ-Class2.ppd.gz
+hp-20190918-hplip-3.19.6-hp-LJ-Class3.ppd.gz	hplip-20201209-hplip-3.20.11-hp-LJ-Class3.ppd.gz
+hp-20190918-hplip-3.19.6-hp-LJ-Class6.ppd.gz	hplip-20201209-hplip-3.20.11-hp-LJ-Class6.ppd.gz
+hp-20190918-hplip-3.19.6-hp-Mimas.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Mimas.ppd.gz	hplip-20210520-hplip-3.21.4-hp-envy_6400_series.ppd.gz
+hp-20190918-hplip-3.19.6-hp-Mimas15.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Mimas15.ppd.gz
+hp-20190918-hplip-3.19.6-hp-Mimas17.ppd.gz
+hp-20190918-hplip-3.19.6-hp-MimasTDR.ppd.gz	hplip-20201209-hplip-3.20.11-hp-MimasTDR.ppd.gz
+hp-20190918-hplip-3.19.6-hp-OJ7000.ppd.gz	hplip-20201209-hplip-3.20.11-hp-OJ7000.ppd.gz
+hp-20190918-hplip-3.19.6-hp-P15_CISS.ppd.gz
+hp-20190918-hplip-3.19.6-hp-PSP100.ppd.gz	hplip-20201209-hplip-3.20.11-hp-PSP100.ppd.gz
+hp-20190918-hplip-3.19.6-hp-PSP470.ppd.gz	hplip-20201209-hplip-3.20.11-hp-PSP470.ppd.gz
+hp-20190918-hplip-3.19.6-hp-Pyramid.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Pyramid.ppd.gz
+hp-20190918-hplip-3.19.6-hp-Pyramid15.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Pyramid15.ppd.gz
+hp-20190918-hplip-3.19.6-hp-PyramidPlus.ppd.gz	hplip-20201209-hplip-3.20.11-hp-PyramidPlus.ppd.gz
+hp-20190918-hplip-3.19.6-hp-PyramidRefresh15.ppd.gz	hplip-20201209-hplip-3.20.11-hp-PyramidRefresh15.ppd.gz
+hp-20190918-hplip-3.19.6-hp-PyramidRefresh17.ppd.gz
+hp-20190918-hplip-3.19.6-hp-Python.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Python.ppd.gz
+hp-20190918-hplip-3.19.6-hp-Python10.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Python10.ppd.gz
+hp-20190918-hplip-3.19.6-hp-Python11.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Python11.ppd.gz
+hp-20190918-hplip-3.19.6-hp-SPDOfficejetProAsize.ppd.gz	hplip-20201209-hplip-3.20.11-hp-SPDOfficejetProAsize.ppd.gz
+hp-20190918-hplip-3.19.6-hp-SPDOfficejetProBsize.ppd.gz
+hp-20190918-hplip-3.19.6-hp-Saipan.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Saipan.ppd.gz
+hp-20190918-hplip-3.19.6-hp-Saipan15B.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Saipan15B.ppd.gz
+hp-20190918-hplip-3.19.6-hp-Stabler.ppd.gz	hplip-20201209-hplip-3.20.11-hp-Stabler.ppd.gz
+hp-20190918-hplip-3.19.6-hp-StingrayOJ.ppd.gz	hplip-20201209-hplip-3.20.11-hp-StingrayOJ.ppd.gz
+hp-20190918-hplip-3.19.6-hp-ViperMinusVIP.ppd.gz	hplip-20201209-hplip-3.20.11-hp-ViperMinusVIP.ppd.gz
+hp-20190918-hplip-3.19.6-hp-ViperPlusVIP.ppd.gz	hplip-20201209-hplip-3.20.11-hp-ViperPlusVIP.ppd.gz
 hp-20190918-hplip-3.19.6-hp-color_designjet_xl_3600-ps.ppd.gz
-hp-20190918-hplip-3.19.6-hp-color_laserjet_pro_mfp_m277-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-color_laserjet_pro_mfp_m277-ps.ppd.gz
 hp-20190918-hplip-3.19.6-hp-designjet_t2600dr-ps.ppd.gz
-hp-20190918-hplip-3.19.6-hp-laserjet_200_color_m251-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-laserjet_200_color_m251-ps.ppd.gz
-hp-20190918-hplip-3.19.6-hp-laserjet_m1522_mfp-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-laserjet_m1522_mfp-ps.ppd.gz
-hp-20190918-hplip-3.19.6-hp-laserjet_m2727_mfp_series-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-laserjet_m2727_mfp_series-ps.ppd.gz
-hp-20190918-hplip-3.19.6-hp-postscript-inkjet.ppd.gz	hplip-20200303-hplip-3.19.12-hp-postscript-inkjet.ppd.gz
-hp-20190918-hplip-3.19.6-hp-postscript-laserjet-pro.ppd.gz	hplip-20200303-hplip-3.19.12-hp-postscript-laserjet-pro.ppd.gz
-hp-20190918-hplip-3.19.6-hp-postscript-laserjet.ppd.gz	hplip-20200303-hplip-3.19.12-hp-postscript-laserjet.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-DJGenericVIP.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-PCL3-Class3.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-PCL3-Class3A.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-PCL3-Class3B.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-PCL4-Class1.ppd.gz	hplip-20201028-hplip-3.19.12-hp-PCL4-Class1.ppd.gz
+hp-20190918-hplip-3.19.6-hp-postscript-inkjet.ppd.gz	hplip-20201209-hplip-3.20.11-hp-postscript-inkjet.ppd.gz
+hp-20190918-hplip-3.19.6-hp-postscript-laserjet-pro.ppd.gz
+hp-20190918-hplip-3.19.6-hp-postscript-laserjet.ppd.gz
 hplip-20200303-hplip-3.19.12-hp-color_designjet_xl_3600-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_2550_series-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_2605-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-color_laserjet_2605dn-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-color_laserjet_2605dtn-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_2700-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-color_laserjet_2700n-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_2800-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-color_laserjet_2820-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-color_laserjet_2830-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-color_laserjet_2840-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_cm1015-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-color_laserjet_cm1017-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_e85055-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_flowmfp_m776-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_m856-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-color_laserjet_mfp_m776-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_4000ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_4020ps-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_4500mfp.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_4500ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_4520mfp-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_4520ps-ps.ppd.gz
 hplip-20200303-hplip-3.19.12-hp-designjet_Z6_24in-ps.ppd.gz
 hplip-20200303-hplip-3.19.12-hp-designjet_Z6_44in-ps.ppd.gz
 hplip-20200303-hplip-3.19.12-hp-designjet_Z6dr_44in-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_Z9_24in-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_Z9_44in-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_Z9dr_44in-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_d5800-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t1100ps_24in-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t1120ps_24in-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t1100ps_44in-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t1120ps_44in-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t1200_postscript-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t1300_postscript-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t1500-postscript.ppd.gz
 hplip-20200303-hplip-3.19.12-hp-designjet_t1530-postscript.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t2530-postscript.ppd.gz
 hplip-20200303-hplip-3.19.12-hp-designjet_t1600_printer-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t2600-ps.ppd.gz
 hplip-20200303-hplip-3.19.12-hp-designjet_t1600dr-ps.ppd.gz
@@ -1131,50 +1133,109 @@
 hplip-20200303-hplip-3.19.12-hp-designjet_t1700dr_postscript-ps.ppd.gz
 hplip-20200303-hplip-3.19.12-hp-designjet_t1708_postscript-ps.ppd.gz
 hplip-20200303-hplip-3.19.12-hp-designjet_t1708dr_postscript-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t2300_postscript-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t2500-postscript.ppd.gz
 hplip-20200303-hplip-3.19.12-hp-designjet_t2600dr-ps.ppd.gz
 hplip-20200303-hplip-3.19.12-hp-designjet_t3500-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t7100ps-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t7100ps_monochrome-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t7200-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t770_postscript-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t770ps_24in-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t790ps_24in-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_t790ps_44in-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-designjet_t795ps_44in-ps.ppd.gz
 hplip-20200303-hplip-3.19.12-hp-designjet_t920-postscript.ppd.gz
 hplip-20200303-hplip-3.19.12-hp-designjet_t930-postscript.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_z2600_postscript-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_z5200_postscript-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_z5400-postscript.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_z5600_postscript-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_z6100ps_42in_photo-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_z6100ps_60in_photo-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_z6600-postscript.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_z6610ps_60in-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_z6800_photo-postscript.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-designjet_z6810ps_60in-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-deskjet_3420.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3425.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3450.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-deskjet_3700_series.ppd.gz	hp-20190918-hplip-3.19.6-hp-CLE.ppd.gz	hplip-20200303-hplip-3.19.12-hp-CLE.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-deskjet_950c.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-deskjet_d1600_series.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-deskjet_d2600_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_ink_advant_k109a-z.ppd.gz	hplip-20200303-hplip-3.19.12-hp-officejet_4000_k210.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-deskjet_d4100_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_d4200_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_d4300_series.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-deskjet_f300_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_f4100_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-officejet_5600_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-psc_1400_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-psc_1310_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-officejet_4200_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-officejet_j5500_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3650.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3840.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_d2400_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-officejet_5500_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-officejet_j3600_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-psc_1300_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_d2300_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-officejet_4255.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3600.ppd.gz	hplip-20200303-hplip-3.19.12-hp-910.ppd.gz	hplip-20200303-hplip-3.19.12-hp-915.ppd.gz	hplip-20200303-hplip-3.19.12-hp-officejet_j3500_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-psc_1358_series.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-laserjet_100_color_mfp_m175-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-officejet_4300_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_f2200_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-psc_1200_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_f2100_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_d1400_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3740.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_d1500_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3500.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3910.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_d1300_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3320.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3550.ppd.gz	hplip-20200303-hplip-3.19.12-hp-officejet_4100_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-psc_1100_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3940.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3325.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3900.ppd.gz	hplip-20200303-hplip-3.19.12-hp-deskjet_3920.ppd.gz	hplip-20200303-hplip-3.19.12-hp-officejet_4105.ppd.gz	hplip-20200303-hplip-3.19.12-hp-officejet_4115_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-psc_1000_series.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-pagewide_p55250-ps.ppd.gz
 hplip-20200303-hplip-3.19.12-hp-pagewide_xl_3900ps_mfp-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_4000ps-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_4000ps_mfp-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_4500ps-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_4500ps_mfp-ps.ppd.gz
 hplip-20200303-hplip-3.19.12-hp-pagewide_xl_4100ps-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_4100ps_mfp-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_4600ps-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_4600ps_mfp-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5000ps-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5000ps_mfp-ps.ppd.gz
 hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5000ps_blueprinter-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5100ps-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_6000ps_mfp-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5100ps_mfp-ps.ppd.gz	hplip-20200303-hplip-3.19.12-hp-pagewide_xl_6000ps-ps.ppd.gz
 hplip-20200303-hplip-3.19.12-hp-pagewide_xl_5100ps_mfp_blueprinter-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-pagewide_xl_8000ps-ps.ppd.gz
 hplip-20200303-hplip-3.19.12-hp-pagewide_xl_8000ps_blueprinter-ps.ppd.gz
-hplip-20200303-hplip-3.19.12-hp-photosmart_a530_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-photosmart_a630_series.ppd.gz	hplip-20200303-hplip-3.19.12-hp-photosmart_a640_series.ppd.gz
+hplip-20201209-hplip-3.20.11-apollo-2100.ppd.gz	hplip-20201209-hplip-3.20.11-apollo-2150.ppd.gz	hplip-20201209-hplip-3.20.11-apollo-p2000-u.ppd.gz
+hplip-20201209-hplip-3.20.11-apollo-2200.ppd.gz	hplip-20201209-hplip-3.20.11-apollo-p2250.ppd.gz
+hplip-20201209-hplip-3.20.11-apollo-2500.ppd.gz	hplip-20201209-hplip-3.20.11-apollo-2600.ppd.gz	hplip-20201209-hplip-3.20.11-apollo-2650.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-2000c.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-2500c.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-CLE17.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-Mimas17.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-P15_CISS.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-PyramidRefresh17.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-SPDOfficejetProBsize.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-business_inkjet_1200.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_pro_k5400.ppd.gz	hplip-20201209-hplip-3.20.11-hp-business_inkjet_1100.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_pro_k5300.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-cm8060_mfp_with_edgeline-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-color_laserjet_2550_series-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-color_laserjet_2605dtn-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-color_laserjet_2700n-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-color_laserjet_2840-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-color_laserjet_cm1017-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-color_laserjet_e85055-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-color_laserjet_flowmfp_m776-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-color_laserjet_m856-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-color_laserjet_mfp_m776-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-color_laserjet_pro_mfp_m277-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_4000ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_4020ps-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_4500mfp.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_4500ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_4520mfp-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_4520ps-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_Z9_24in-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_Z9_44in-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_Z9dr_44in-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_d5800-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_t1100ps_24in-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_t1120ps_24in-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_t1100ps_44in-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_t1120ps_44in-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_t1200_postscript-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_t1300_postscript-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_t1500-postscript.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_t2300_postscript-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_t2500-postscript.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_t7100ps-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_t7100ps_monochrome-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_t7200-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_t770_postscript-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_t770ps_24in-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_t790ps_24in-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_t790ps_44in-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-designjet_t795ps_44in-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_z2600_postscript-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_z5200_postscript-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_z5400-postscript.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_z5600_postscript-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_z6100ps_42in_photo-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_z6100ps_60in_photo-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_z6600-postscript.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_z6610ps_60in-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_z6800_photo-postscript.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-designjet_z6810ps_60in-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-deskjet_1120.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_1125.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-deskjet_1220c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_1280.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_9300.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-deskjet_3420.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3425.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3450.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-deskjet_350.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_400l.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-deskjet_3700_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-CLE.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-deskjet_400.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_500c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_540.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_550c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_series_310.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_series_320.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-deskjet_500.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_505j.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_510.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_520.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_lx.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_series_350.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-deskjet_600.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-deskjet_640c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_692.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_695.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_612c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_648c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_693.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_697.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_series_700.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_series_710.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_series_720.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_series_725.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-deskjet_656c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_630c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_632c.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-deskjet_660.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_series_630.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_1100.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_670.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_670c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_670tv.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_672c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_680.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_682.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_series_520.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_series_570.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_series_580.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_series_590.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_series_610.ppd.gz	hplip-20201209-hplip-3.20.11-hp-printer_scanner_copier_300.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-deskjet_825c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_845c.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-deskjet_870c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_855c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_pro_1150c.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-deskjet_9800.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_9600.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_k7100.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-deskjet_d1600_series.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-deskjet_d2600_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_ink_advant_k109a-z.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_4000_k210.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-deskjet_d4100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_d4200_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_d4300_series.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-deskjet_f300_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_f4100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_5600_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_1400_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_1310_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_4200_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_j5500_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3650.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3840.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_d2400_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_5500_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_j3600_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_d2300_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_4255.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3600.ppd.gz	hplip-20201209-hplip-3.20.11-hp-910.ppd.gz	hplip-20201209-hplip-3.20.11-hp-915.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_j3500_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_1358_series.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-deskjet_f4200_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_c4400_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_c4200_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_c3100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_c6200_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_j5700_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_c4100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_c4500_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_6300_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_6500.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_c5100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_c5200_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_2350_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_6200_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_j4500_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_2570_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_c6100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_1500_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_1600_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_5100.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_5400_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_6800.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_j6400_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_7700_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_c7100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_6980_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_7200_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_2600_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_3200_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_8000_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_5700.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_d2500_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_7800_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_8200_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_8400_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_c4340_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_d5100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_2400_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_6600.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_3300_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_7400_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_8100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_c4380_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_c7200_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_d7100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_d7200_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_d7300_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_5650.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_5900_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_6100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_7400_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_7200_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_7900_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_c5500_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_d5060_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_d5300_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_2500_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_6940_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_7300_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_2700_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_3100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_7550.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_d5400_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_d6100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_d7400_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_2210_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_5850.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_j4680_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_7600_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_2300_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-business_inkjet_1000.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_5552.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_5600.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_5652.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_5800.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_d730.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_f735.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_ink_advant_k209a-z.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_6150_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_j4660_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_7345.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_2200_series.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-laserjet_100_color_mfp_m175-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-laserjet_m1522_mfp-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-laserjet_m2727_mfp_series-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-officejet_4300_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_f2200_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_1200_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_f2100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_d1400_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3740.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_d1500_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3500.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3910.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_d1300_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3320.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3550.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_4100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_1100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3940.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3325.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3900.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3920.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_4105.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_4115_series.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-officejet_5100_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_920c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_940c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3820.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_970c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_975c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3816.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_930c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_955c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_v40.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_p1000.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_p1100.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3810.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3819.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3822.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_3870.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_916c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_932c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_933c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_934c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_935c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_948c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_950c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_952c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_957c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_959c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_g55.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_g55xi.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_g85.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_g85xi.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_g95.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_k60.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_k60xi.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_k80.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_k80xi.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_v30.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_v40xi.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_v45.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_720.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_750.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_750xi.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_760.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_780.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_780xi.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_900_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_920.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_950.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_950vr.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_950xi.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-officejet_pro_1170c_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_890c.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-officejet_pro_k8600.ppd.gz	hp-20190918-hplip-3.19.6-hp-OJProKx50.ppd.gz	hplip-20201209-hplip-3.20.11-hp-OJProKx50.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-officejet_t_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_895c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_843c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-psc_500.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_812c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_815c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_816c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_830c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_832c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_841c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_842c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_880c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-deskjet_882c.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_r40xi.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_r45.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_r60.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_r65.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_r80.ppd.gz	hplip-20201209-hplip-3.20.11-hp-officejet_r80xi.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-pagewide_p55250-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-pagewide_xl_4000ps-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-pagewide_xl_4000ps_mfp-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-pagewide_xl_4500ps-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-pagewide_xl_4500ps_mfp-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-pagewide_xl_5000ps-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-pagewide_xl_5000ps_mfp-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-pagewide_xl_5100ps-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-pagewide_xl_6000ps_mfp-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-pagewide_xl_5100ps_mfp-ps.ppd.gz	hplip-20201209-hplip-3.20.11-hp-pagewide_xl_6000ps-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-pagewide_xl_8000ps-ps.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-photosmart_420_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_330_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_370_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_320_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_a430_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_a520_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_a310_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_380_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_a440_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_a510_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_a320_series.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-photosmart_8700_series.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-photosmart_a530_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_a630_series.ppd.gz	hplip-20201209-hplip-3.20.11-hp-photosmart_a640_series.ppd.gz
+hplip-20201209-hplip-3.20.11-hp-photosmart_c8100_series.ppd.gz
+hplip-20201217-hplip-3.20.11-hp-PCLM_COLOR.ppd.gz
+hplip-20201217-hplip-3.20.11-hp-PCLM_MONO.ppd.gz
+hplip-20210520-hplip-3.21.4-hp-PCLM-MONO.ppd.gz
+hplip-20210520-hplip-3.21.4-hp-postscript-laserjet-pro.ppd.gz
+hplip-20210520-hplip-3.21.4-hp-postscript-laserjet.ppd.gz
 konica_minolta-20200331-konica-minolta-20200331-konica-minolta-226i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-246i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-266i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-306i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-227.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-287.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-308.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-308e.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-367.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-368.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-368e.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-4052.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-458.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-458e.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-4752.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-558.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-558e.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-658e.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-758-jp-eu.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-808-us.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-958.ppd.gz
 konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c226.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c227.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c258.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c266.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c287.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c308.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c3351.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c368.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c3851.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c3851fs.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c458.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c558.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c658.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c659.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-bizhub-c759.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c250i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c300i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c3300i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c3320i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c3350i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c360i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c4000i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c4050i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c450i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c550i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-c650i.ppd.gz	konica_minolta-20200331-konica-minolta-20200331-konica-minolta-workplace-hub.ppd.gz
 kyocera-20180809-Kyocera_TASKalfa_3051ci.ppd.gz
@@ -1276,7 +1337,25 @@
 kyocera-20200716-Kyocera_TASKalfa_5003iJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_6003iJ.ppd.gz
 kyocera-20200716-Kyocera_TASKalfa_7003iJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_9003iJ.ppd.gz
 kyocera-20200716-Kyocera_TASKalfa_7353ciJ.ppd.gz	kyocera-20200716-Kyocera_TASKalfa_8353ciJ.ppd.gz
-kyocera-20200716-TA_P-4531_MFP.ppd.gz	kyocera-20200716-TA_P-4531i_MFP.ppd.gz
+kyocera-20210301-TA_2507ci.ppd.gz	kyocera-20210301-TA_3207ci.ppd.gz	kyocera-20210301-TA_4007ci.ppd.gz	kyocera-20210301-TA_5007ci.ppd.gz	kyocera-20210301-TA_6007ci.ppd.gz	kyocera-20210309-TA_2507ci.ppd.gz	kyocera-20210309-TA_3207ci.ppd.gz	kyocera-20210309-TA_4007ci.ppd.gz	kyocera-20210309-TA_5007ci.ppd.gz	kyocera-20210309-TA_6007ci.ppd.gz
+kyocera-20210301-TA_302ci.ppd.gz	kyocera-20210301-TA_352ci.ppd.gz	kyocera-20210301-TA_402ci.ppd.gz	kyocera-20210301-TA_502ci.ppd.gz	kyocera-20210309-TA_302ci.ppd.gz	kyocera-20210309-TA_352ci.ppd.gz	kyocera-20210309-TA_402ci.ppd.gz	kyocera-20210309-TA_502ci.ppd.gz
+kyocera-20210301-TA_3262i.ppd.gz	kyocera-20210301-TA_4062i.ppd.gz	kyocera-20210309-TA_3262i.ppd.gz	kyocera-20210309-TA_4062i.ppd.gz
+kyocera-20210301-TA_357ci.ppd.gz	kyocera-20210301-TA_P-C3062DN.ppd.gz	kyocera-20210301-TA_P-C3062i_MFP.ppd.gz	kyocera-20210301-TA_P-C3066i_MFP.ppd.gz	kyocera-20210301-TA_P-C3562DN.ppd.gz	kyocera-20210301-TA_P-C3562i_MFP.ppd.gz	kyocera-20210301-TA_P-C3566i_MFP.ppd.gz	kyocera-20210301-TA_P-C4072DN.ppd.gz	kyocera-20210309-TA_357ci.ppd.gz	kyocera-20210309-TA_P-C3062DN.ppd.gz	kyocera-20210309-TA_P-C3062i_MFP.ppd.gz	kyocera-20210309-TA_P-C3066i_MFP.ppd.gz	kyocera-20210309-TA_P-C3562DN.ppd.gz	kyocera-20210309-TA_P-C3562i_MFP.ppd.gz	kyocera-20210309-TA_P-C3566i_MFP.ppd.gz	kyocera-20210309-TA_P-C4072DN.ppd.gz
+kyocera-20210301-TA_5057i.ppd.gz	kyocera-20210301-TA_6057i.ppd.gz	kyocera-20210301-TA_P-6040DN.ppd.gz	kyocera-20210309-TA_5057i.ppd.gz	kyocera-20210309-TA_6057i.ppd.gz	kyocera-20210309-TA_P-6040DN.ppd.gz
+kyocera-20210301-TA_7057i.ppd.gz	kyocera-20210301-TA_8057i.ppd.gz	kyocera-20210309-TA_7057i.ppd.gz	kyocera-20210309-TA_8057i.ppd.gz
+kyocera-20210301-TA_7307ci.ppd.gz	kyocera-20210301-TA_8307ci.ppd.gz	kyocera-20210309-TA_7307ci.ppd.gz	kyocera-20210309-TA_8307ci.ppd.gz
+kyocera-20210301-TA_P-2540i_MFP.ppd.gz	kyocera-20210309-TA_P-2540i_MFP.ppd.gz
+kyocera-20210301-TA_P-3521_MFP.ppd.gz	kyocera-20210301-TA_P-3522DW.ppd.gz	kyocera-20210301-TA_P-3527w_MFP.ppd.gz	kyocera-20210301-TA_P-4020DN.ppd.gz	kyocera-20210301-TA_P-4020DW.ppd.gz	kyocera-20210301-TA_P-4020_MFP.ppd.gz	kyocera-20210301-TA_P-4025w_MFP.ppd.gz	kyocera-20210301-TA_P-4026iw_MFP.ppd.gz	kyocera-20210309-TA_P-3521_MFP.ppd.gz	kyocera-20210309-TA_P-3522DW.ppd.gz	kyocera-20210309-TA_P-3527w_MFP.ppd.gz	kyocera-20210309-TA_P-4020DN.ppd.gz	kyocera-20210309-TA_P-4020DW.ppd.gz	kyocera-20210309-TA_P-4020_MFP.ppd.gz	kyocera-20210309-TA_P-4025w_MFP.ppd.gz	kyocera-20210309-TA_P-4026iw_MFP.ppd.gz
+kyocera-20210301-TA_P-4531_MFP.ppd.gz	kyocera-20210301-TA_P-4531i_MFP.ppd.gz	kyocera-20210301-TA_P-4536_MFP.ppd.gz	kyocera-20210301-TA_P-4536i_MFP.ppd.gz	kyocera-20210301-TA_P-5536i_MFP.ppd.gz	kyocera-20210301-TA_P-6038i_MFP.ppd.gz	kyocera-20210301-TA_P-6038if_MFP.ppd.gz	kyocera-20210309-TA_P-4531_MFP.ppd.gz	kyocera-20210309-TA_P-4531i_MFP.ppd.gz	kyocera-20210309-TA_P-4536_MFP.ppd.gz	kyocera-20210309-TA_P-4536i_MFP.ppd.gz	kyocera-20210309-TA_P-5536i_MFP.ppd.gz	kyocera-20210309-TA_P-6038i_MFP.ppd.gz	kyocera-20210309-TA_P-6038if_MFP.ppd.gz
+kyocera-20210301-TA_P-4532DN.ppd.gz	kyocera-20210301-TA_P-5032DN.ppd.gz	kyocera-20210301-TA_P-5532DN.ppd.gz	kyocera-20210301-TA_P-6033DN.ppd.gz	kyocera-20210309-TA_P-4532DN.ppd.gz	kyocera-20210309-TA_P-5032DN.ppd.gz	kyocera-20210309-TA_P-5532DN.ppd.gz	kyocera-20210309-TA_P-6033DN.ppd.gz
+kyocera-20210301-TA_P-C2155w_MFP.ppd.gz	kyocera-20210301-TA_P-C2650DW.ppd.gz	kyocera-20210301-TA_P-C2655w_MFP.ppd.gz	kyocera-20210309-TA_P-C2155w_MFP.ppd.gz	kyocera-20210309-TA_P-C2650DW.ppd.gz	kyocera-20210309-TA_P-C2655w_MFP.ppd.gz
+kyocera-20210301-TA_P-C2480i_MFP.ppd.gz	kyocera-20210309-TA_P-C2480i_MFP.ppd.gz
+kyocera-20210630-Kyocera_CS_4004i.ppd.gz	kyocera-20210630-Kyocera_CS_5004i.ppd.gz	kyocera-20210630-Kyocera_CS_6004i.ppd.gz	kyocera-20210630-Kyocera_CS_7004i.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_4004i.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_5004i.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_6004i.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_7004i.ppd.gz
+kyocera-20210630-Kyocera_CS_4054ci.ppd.gz	kyocera-20210630-Kyocera_CS_5054ci.ppd.gz	kyocera-20210630-Kyocera_CS_6054ci.ppd.gz	kyocera-20210630-Kyocera_CS_7054ci.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_4054ci.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_5054ci.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_6054ci.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_7054ci.ppd.gz
+kyocera-20210630-Kyocera_TASKalfa_2554ci_J.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_3554ci_J.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_4054ci_J.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_5054ci_J.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_6054ci_J.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_7054ci_J.ppd.gz
+kyocera-20210630-Kyocera_TASKalfa_5004i_J.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_6004i_J.ppd.gz	kyocera-20210630-Kyocera_TASKalfa_7004i_J.ppd.gz
+kyocera-20210630-TA_2508ci.ppd.gz	kyocera-20210630-TA_3508ci.ppd.gz	kyocera-20210630-TA_4008ci.ppd.gz	kyocera-20210630-TA_5008ci.ppd.gz	kyocera-20210630-TA_6008ci.ppd.gz	kyocera-20210630-TA_7008ci.ppd.gz
+kyocera-20210630-TA_5058i.ppd.gz	kyocera-20210630-TA_6058i.ppd.gz	kyocera-20210630-TA_7058i.ppd.gz
 lanier-20190916-Lanier-IM_550_PDF.ppd.gz	lanier-20190916-Lanier-IM_600_PDF.ppd.gz	nrg-20190916-NRG-IM_550_PDF.ppd.gz	nrg-20190916-NRG-IM_600_PDF.ppd.gz	ricoh-20190916-Ricoh-IM_550_PDF.ppd.gz	ricoh-20190916-Ricoh-IM_600_PDF.ppd.gz	savin-20190916-Savin-IM_550_PDF.ppd.gz	savin-20190916-Savin-IM_600_PDF.ppd.gz
 lanier-20190916-Lanier-IM_600SR_PDF.ppd.gz	nrg-20190916-NRG-IM_600SR_PDF.ppd.gz	ricoh-20190916-Ricoh-IM_600SR_PDF.ppd.gz	savin-20190916-Savin-IM_600SR_PDF.ppd.gz
 lanier-20190916-Lanier-P_800_PDF.ppd.gz	lanier-20190916-Lanier-P_801_PDF.ppd.gz	nrg-20190916-NRG-P_800_PDF.ppd.gz	nrg-20190916-NRG-P_801_PDF.ppd.gz	ricoh-20190916-Ricoh-P_800_PDF.ppd.gz	ricoh-20190916-Ricoh-P_801_PDF.ppd.gz	savin-20190916-Savin-P_800_PDF.ppd.gz	savin-20190916-Savin-P_801_PDF.ppd.gz
@@ -1349,7 +1428,6 @@
 lexmark-20201101-Lexmark_XM5100_Series.ppd.gz
 lexmark-20201101-Lexmark_XM7100_Series.ppd.gz
 oki-20200129-oki-c542-ps.ppd.gz	oki-20200329-OKI-C532-PS.ppd.gz	oki-20200329-OKI-MC563-PS.ppd.gz	oki-20200329-OKI-MC573-PS.ppd.gz
-oki-20200329-ES8434-PS.ppd.gz	oki-20200329-OKI-C834-PS.ppd.gz
 oki-20200329-OKB432_a.ppd.gz	oki-20200329-OKM492_a.ppd.gz
 oki-20200329-OKB512_a.ppd.gz	oki-20200329-OKM562_a.ppd.gz
 oki-20200329-OKB841_a110.ppd.gz
@@ -1358,11 +1436,12 @@
 oki-20200329-OKI-C712-PS.ppd.gz
 oki-20200329-OKI-C833-PS.ppd.gz
 oki-20200329-OKI-C843-PS.ppd.gz
-oki-20200329-OKI-C844-PS.ppd.gz
 oki-20201022-ES6450_PS.ppd.gz	oki-20201022-OKI_C650_PS.ppd.gz
-oki-20201022-OKI_MC843_PS.ppd.gz	oki-20201022-OKI_MC863_PS.ppd.gz
-oki-20201022-OKI_MC853_PS.ppd.gz	oki-20201022-OKI_MC873_PS.ppd.gz
-oki-20201022-OKI_MC883_PS.ppd.gz
+oki-20210628-ES8434_PS.ppd.gz	oki-20210628-OKI_C834_PS.ppd.gz
+oki-20210628-OKI_C844_PS.ppd.gz
+oki-20210628-OKI_MC843_PS.ppd.gz	oki-20210628-OKI_MC863_PS.ppd.gz
+oki-20210628-OKI_MC853_PS.ppd.gz	oki-20210628-OKI_MC873_PS.ppd.gz
+oki-20210628-OKI_MC883_PS.ppd.gz
 ricoh-20190916-Ricoh-IPSiO_SP_3400L_PXL.ppd.gz	ricoh-20190916-Ricoh-IPSiO_SP_3410SF_PXL.ppd.gz	ricoh-20190916-Ricoh-IPSiO_SP_3410_PXL.ppd.gz
 ricoh-20190916-Ricoh-IPSiO_SP_3510SF_PXL.ppd.gz	ricoh-20190916-Ricoh-IPSiO_SP_3510_PXL.ppd.gz
 ricoh-20190916-Ricoh-MP_C306Z_JPN_PDF.ppd.gz
@@ -1383,7 +1462,7 @@
 ricoh-20200527-Gestetner-GS3021_PDF.ppd.gz	ricoh-20200527-Ricoh-M_C2001_PDF.ppd.gz
 ricoh-20200527-Infotec-Pro_C5300S_PDF.ppd.gz	ricoh-20200527-Infotec-Pro_C5310S_PDF.ppd.gz	ricoh-20200527-Lanier-Pro_C5300S_PDF.ppd.gz	ricoh-20200527-Lanier-Pro_C5310S_PDF.ppd.gz	ricoh-20200527-NRG-Pro_C5300S_PDF.ppd.gz	ricoh-20200527-NRG-Pro_C5310S_PDF.ppd.gz	ricoh-20200527-Ricoh-Pro_C5300S_PDF.ppd.gz	ricoh-20200527-Ricoh-Pro_C5310S_PDF.ppd.gz	ricoh-20200527-Savin-Pro_C5300S_PDF.ppd.gz	ricoh-20200527-Savin-Pro_C5310S_PDF.ppd.gz
 ricoh-20200527-Lanier-IM_C6500_PDF.ppd.gz	ricoh-20200527-Lanier-IM_C8000_PDF.ppd.gz	ricoh-20200527-NRG-IM_C6500_PDF.ppd.gz	ricoh-20200527-NRG-IM_C8000_PDF.ppd.gz	ricoh-20200527-Ricoh-IM_C6500_PDF.ppd.gz	ricoh-20200527-Ricoh-IM_C8000_PDF.ppd.gz	ricoh-20200527-Savin-IM_C6500_PDF.ppd.gz	ricoh-20200527-Savin-IM_C8000_PDF.ppd.gz
-ricoh-20200527-Ricoh-P_6000_JPN.ppd.gz	ricoh-20200527-Ricoh-P_6010M_JPN.ppd.gz	ricoh-20200527-Ricoh-P_6010_JPN.ppd.gz	ricoh-20200527-Ricoh-P_6020M_JPN.ppd.gz	ricoh-20200527-Ricoh-P_6020_JPN.ppd.gz	ricoh-20200527-Ricoh-P_6030M_JPN.ppd.gz	ricoh-20200527-Ricoh-P_6030_JPN.ppd.gz
+ricoh-20200527-Ricoh-P_6000_JPN.ppd.gz	ricoh-20200527-Ricoh-P_6010M_JPN.ppd.gz	ricoh-20200527-Ricoh-P_6010_JPN.ppd.gz	ricoh-20200527-Ricoh-P_6020M_JPN.ppd.gz	ricoh-20200527-Ricoh-P_6020_JPN.ppd.gz	ricoh-20200527-Ricoh-P_6030M_JPN.ppd.gz	ricoh-20200527-Ricoh-P_6030_JPN.ppd.gz	ricoh-20210222-Ricoh-IM_7000_JPN.ppd.gz	ricoh-20210222-Ricoh-IM_8000_JPN.ppd.gz	ricoh-20210222-Ricoh-IM_9000T_JPN.ppd.gz	ricoh-20210222-Ricoh-IM_9000_JPN.ppd.gz	ricoh-20210824-Ricoh-IM_2500_JPN.ppd.gz	ricoh-20210824-Ricoh-IM_3500_JPN.ppd.gz	ricoh-20210824-Ricoh-IM_4000_JPN.ppd.gz	ricoh-20210824-Ricoh-IM_5000_JPN.ppd.gz	ricoh-20210824-Ricoh-IM_6000_JPN.ppd.gz
 ricoh-20200821-Infotec-Pro_C5300SL_PDF.ppd.gz	ricoh-20200821-NRG-Pro_C5300SL_PDF.ppd.gz	ricoh-20200821-Ricoh-Pro_C5300SL_PDF.ppd.gz
 ricoh-20200821-Lanier-IM_C530FB_PDF.ppd.gz	ricoh-20200821-NRG-IM_C530FB_PDF.ppd.gz	ricoh-20200821-Ricoh-IM_C530FB_PDF.ppd.gz	ricoh-20200821-Savin-IM_C530FB_PDF.ppd.gz
 ricoh-20200821-Lanier-IM_C530F_PDF.ppd.gz	ricoh-20200821-NRG-IM_C530F_PDF.ppd.gz	ricoh-20200821-Ricoh-IM_C530F_PDF.ppd.gz	ricoh-20200821-Savin-IM_C530F_PDF.ppd.gz
@@ -1391,6 +1470,13 @@
 ricoh-20200821-Ricoh-IM_C3509J_PDF.ppd.gz
 ricoh-20200821-Ricoh-IM_C6500_JPN.ppd.gz	ricoh-20200821-Ricoh-IM_C8000_JPN.ppd.gz	ricoh-20200821-Ricoh-Pro_C5300S_JPN.ppd.gz	ricoh-20200821-Ricoh-Pro_C5310S_JPN.ppd.gz
 ricoh-20200930-Ricoh_Generic_PS_Printer.ppd.gz
+ricoh-20210222-Gestetner-G3020c_PXL.ppd.gz
+ricoh-20210222-Lanier-IM_7000_PDF.ppd.gz	ricoh-20210222-Lanier-IM_8000_PDF.ppd.gz	ricoh-20210222-Lanier-IM_9000_PDF.ppd.gz	ricoh-20210222-NRG-IM_7000_PDF.ppd.gz	ricoh-20210222-NRG-IM_8000_PDF.ppd.gz	ricoh-20210222-NRG-IM_9000_PDF.ppd.gz	ricoh-20210222-Ricoh-IM_7000_PDF.ppd.gz	ricoh-20210222-Ricoh-IM_8000_PDF.ppd.gz	ricoh-20210222-Ricoh-IM_9000_PDF.ppd.gz	ricoh-20210222-Savin-IM_7000_PDF.ppd.gz	ricoh-20210222-Savin-IM_8000_PDF.ppd.gz	ricoh-20210222-Savin-IM_9000_PDF.ppd.gz
+ricoh-20210601-Gestetner-GS3025m_PDF.ppd.gz	ricoh-20210601-Gestetner-GS3030m_PDF.ppd.gz	ricoh-20210601-Gestetner-GS3035m_PDF.ppd.gz	ricoh-20210601-Gestetner-IM_2500_PDF.ppd.gz	ricoh-20210601-Gestetner-IM_3000_PDF.ppd.gz	ricoh-20210601-Gestetner-IM_3500_PDF.ppd.gz	ricoh-20210601-Lanier-IM_2500_PDF.ppd.gz	ricoh-20210601-Lanier-IM_3000_PDF.ppd.gz	ricoh-20210601-Lanier-IM_3500_PDF.ppd.gz	ricoh-20210601-NRG-IM_2500_PDF.ppd.gz	ricoh-20210601-NRG-IM_3000_PDF.ppd.gz	ricoh-20210601-NRG-IM_3500_PDF.ppd.gz	ricoh-20210601-Ricoh-IM_2500_PDF.ppd.gz	ricoh-20210601-Ricoh-IM_2509J_PDF.ppd.gz	ricoh-20210601-Ricoh-IM_3000_PDF.ppd.gz	ricoh-20210601-Ricoh-IM_3009J_PDF.ppd.gz	ricoh-20210601-Ricoh-IM_3500_PDF.ppd.gz	ricoh-20210601-Ricoh-IM_3509J_PDF.ppd.gz	ricoh-20210601-Savin-IM_2500_PDF.ppd.gz	ricoh-20210601-Savin-IM_3000_PDF.ppd.gz	ricoh-20210601-Savin-IM_3500_PDF.ppd.gz
+ricoh-20210601-Gestetner-GS3040m_PDF.ppd.gz	ricoh-20210601-Gestetner-GS3050m_PDF.ppd.gz	ricoh-20210601-Gestetner-IM_4000_PDF.ppd.gz	ricoh-20210601-Gestetner-IM_5000_PDF.ppd.gz	ricoh-20210601-Lanier-IM_4000_PDF.ppd.gz	ricoh-20210601-Lanier-IM_5000_PDF.ppd.gz	ricoh-20210601-NRG-IM_4000_PDF.ppd.gz	ricoh-20210601-NRG-IM_5000_PDF.ppd.gz	ricoh-20210601-Ricoh-IM_4000_PDF.ppd.gz	ricoh-20210601-Ricoh-IM_5000_PDF.ppd.gz	ricoh-20210601-Savin-IM_4000_PDF.ppd.gz	ricoh-20210601-Savin-IM_5000_PDF.ppd.gz
+ricoh-20210601-Gestetner-GS3060m_PDF.ppd.gz	ricoh-20210601-Gestetner-IM_6000_PDF.ppd.gz	ricoh-20210601-Lanier-IM_6000_PDF.ppd.gz	ricoh-20210601-NRG-IM_6000_PDF.ppd.gz	ricoh-20210601-Ricoh-IM_6000_PDF.ppd.gz	ricoh-20210601-Savin-IM_6000_PDF.ppd.gz
+ricoh-20210601-Ricoh-M_C2000_PXL.ppd.gz
+ricoh-20210601-Ricoh-SP_2300L_PXL.ppd.gz	ricoh-20210601-Ricoh-SP_2300SFL_PXL.ppd.gz
 sharp-20180409-Sharp-AR-M452U-ps.ppd.gz	sharp-20180409-Sharp-MX-M282N-ps.ppd.gz	sharp-20180409-Sharp-MX-M362N-ps.ppd.gz	sharp-20180409-Sharp-MX-M363U-ps.ppd.gz	sharp-20180409-Sharp-MX-M452N-ps.ppd.gz	sharp-20180409-Sharp-MX-M453U-ps.ppd.gz	sharp-20180409-Sharp-MX-M502N-ps.ppd.gz	sharp-20180409-Sharp-MX-M503U-ps.ppd.gz
 sharp-20180409-Sharp-MX-2640NR-ps.ppd.gz	sharp-20180409-Sharp-MX-3140NR-ps.ppd.gz	sharp-20180409-Sharp-MX-3640NR-ps.ppd.gz	sharp-20191219-Sharp-MX-2640N-ps.ppd.gz	sharp-20191219-Sharp-MX-3140N-ps.ppd.gz	sharp-20191219-Sharp-MX-3640N-ps.ppd.gz
 sharp-20180409-Sharp-MX-M283N-ps.ppd.gz	sharp-20180409-Sharp-MX-M363N-ps.ppd.gz	sharp-20180409-Sharp-MX-M453N-ps.ppd.gz	sharp-20180409-Sharp-MX-M503N-ps.ppd.gz
@@ -1470,6 +1556,21 @@
 sharp-20191230-Sharp-MX-M3531-ps-jp.ppd.gz
 sharp-20191230-Sharp-MX-M6570-ps.ppd.gz	sharp-20191230-Sharp-MX-M7570-ps.ppd.gz
 sharp-20191230-Sharp-MX-M905-ps.ppd.gz
+sharp-20210601-SHARP_MX-B467F.ppd.gz
+sharp-20210601-SHARP_MX-B467P.ppd.gz
+sharp-20210601-SHARP_MX-B557F.ppd.gz	sharp-20210601-SHARP_MX-B707F.ppd.gz
+sharp-20210601-SHARP_MX-B557P.ppd.gz	sharp-20210601-SHARP_MX-B707P.ppd.gz
+sharp-20210601-SHARP_MX-C357F.ppd.gz
+sharp-20210601-SHARP_MX-C407F.ppd.gz	sharp-20210601-SHARP_MX-C507F.ppd.gz
+sharp-20210601-SHARP_MX-C407P.ppd.gz
+sharp-20210601-SHARP_MX-C507P.ppd.gz
+sharp-20210601-SHARP_MX-C557F.ppd.gz	sharp-20210601-SHARP_MX-C607F.ppd.gz
+sharp-20210601-SHARP_MX-C607P.ppd.gz
+sharp-20210601-Sharp-BP-20M22-ps.ppd.gz	sharp-20210601-Sharp-BP-20M24-ps.ppd.gz
+sharp-20210601-Sharp-BP-20M28-ps.ppd.gz	sharp-20210601-Sharp-BP-20M31-ps.ppd.gz	sharp-20210601-Sharp-BP-21M22-ps.ppd.gz
+sharp-20210601-Sharp-BP-30C25-ps.ppd.gz
+sharp-20210601-Sharp-BP-30M28-ps.ppd.gz	sharp-20210601-Sharp-BP-30M31-ps.ppd.gz	sharp-20210601-Sharp-BP-30M35-ps.ppd.gz
+sharp-20210601-Sharp-MX-7081-ps.ppd.gz	sharp-20210601-Sharp-MX-8081-ps.ppd.gz
 star-20171009-starcupsdrv-3.6.0-hsp7000r.ppd.gz
 star-20171009-starcupsdrv-3.6.0-hsp7000s.ppd.gz
 star-20171009-starcupsdrv-3.6.0-hsp7000v.ppd.gz
@@ -1535,3 +1636,8 @@
 xerox-20200129-xrxC9065.ppd.gz	xerox-20200129-xrxC9070.ppd.gz
 xerox-20200226-xrxB9100.ppd.gz	xerox-20200226-xrxB9110.ppd.gz	xerox-20200226-xrxB9125.ppd.gz	xerox-20200226-xrxB9136.ppd.gz
 xerox-20201014-xrxC8000W.ppd.gz
+xerox-20210715-xrxB225.ppd.gz	xerox-20210715-xrxB230.ppd.gz	xerox-20210715-xrxB235.ppd.gz
+xerox-20210715-xrxB310.ppd.gz
+xerox-20210715-xrxC230.ppd.gz	xerox-20210715-xrxC235.ppd.gz
+xerox-20210715-xrxEC8036.ppd.gz	xerox-20210715-xrxEC8056.ppd.gz
+zebra-20210504-SP-005645A.ppd.gz
diff --git a/client/site_tests/platform_PrinterPpds/test_fake_printer.py b/client/site_tests/platform_PrinterPpds/test_fake_printer.py
index 0647a8e..2d23b15 100755
--- a/client/site_tests/platform_PrinterPpds/test_fake_printer.py
+++ b/client/site_tests/platform_PrinterPpds/test_fake_printer.py
@@ -1,4 +1,3 @@
-#!/usr/bin/python2
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -22,7 +21,7 @@
         pass
 
     # Another test - let's try to send something
-    message = 'lkds;fsdjfsdjflsdjfsd;lfsad;adfsfa324dsfcxvdsvdf'
+    message = b'lkds;fsdjfsdjflsdjfsd;lfsad;adfsfa324dsfcxvdsvdf'
     port = 12345
     with FakePrinter(port) as printer:
         # Opens a socket and sends the message
diff --git a/client/site_tests/platform_PrinterPpds/test_multithreaded_processor.py b/client/site_tests/platform_PrinterPpds/test_multithreaded_processor.py
index 90205fd..3731086 100755
--- a/client/site_tests/platform_PrinterPpds/test_multithreaded_processor.py
+++ b/client/site_tests/platform_PrinterPpds/test_multithreaded_processor.py
@@ -1,9 +1,12 @@
-#!/usr/bin/python2
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
 from multithreaded_processor import MultithreadedProcessor
+from six.moves import range
 
 
 class EvenNumbersGenerator():
@@ -26,7 +29,7 @@
 
         """
         # prepares input data
-        self._data = range(count)
+        self._data = list(range(count))
         # runs the calculations and returns an output
         return self._processor.run(self._thread, count)
 
diff --git a/client/site_tests/platform_Quipper/control b/client/site_tests/platform_Quipper/control
deleted file mode 100644
index 7fbb66d..0000000
--- a/client/site_tests/platform_Quipper/control
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "sque dhsharp rapati"
-NAME = "platform_Quipper"
-PURPOSE = "Test CWP's quipper tool."
-
-CRITERIA = """
-Successfully collect a perf data profile through quipper and make sure there is
-a non-empty protobuf output.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-JOB_RETRIES = 2
-
-ATTRIBUTES = "suite:experimental"
-
-DOC = """
-Verify that CWP's quipper tool can successfully call perf to collect a profile,
-process it, and serialize it to a protobuf.
-"""
-
-job.run_test('platform_Quipper')
diff --git a/client/site_tests/platform_Quipper/platform_Quipper.py b/client/site_tests/platform_Quipper/platform_Quipper.py
deleted file mode 100644
index 5db8198..0000000
--- a/client/site_tests/platform_Quipper/platform_Quipper.py
+++ /dev/null
@@ -1,92 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import subprocess
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-
-
-class platform_Quipper(test.test):
-    """
-    Collects perf data and convert it to a protobuf. Verifies that quipper
-    completes successfully and that the output is nonzero.
-    """
-    version = 1
-
-    def _get_quipper_command(self, duration, perf_options):
-        return ('quipper', str(duration), 'perf', 'record', '-a') + \
-               perf_options
-
-
-    def _get_perf_command(self, duration, perf_options):
-        return ('perf', 'record', '-a') + perf_options + \
-               ('--', 'sleep', str(duration))
-
-
-    def run_once(self):
-        """
-        See test description.
-        """
-
-        duration = 2
-
-        # These are the various perf command options to add to
-        # |quipper_command_base|, for a wide range of commands to test.
-        quipper_command_options = (
-            # Basic cycle-based profile.
-            ('-e', 'cycles'),
-            # Set a custom sampling frequency.
-            ('-e', 'cycles', '-F', '3011'),
-            # Set a custom sampling period.
-            ('-e', 'cycles', '-c', '2000003'),
-            # Test various events.
-            ('-e', 'cycles,instructions,branch-misses,cache-misses'),
-            # Test callgraph.
-            ('-e', 'cycles', '-g'),
-            # Test callgraph and raw data.
-            ('-e', 'cycles', '-g', '-R'),
-            # Test LBR.
-            ('-e', 'cycles', '-b'),
-            # Test LBR, callgraph, and raw data.
-            ('-e', 'cycles', '-b', '-g', '-R'),
-        )
-
-        keyvals = {}
-        # Run quipper with each of the options.
-        for options in quipper_command_options:
-            result = ""
-
-            # Try running the associated perf command first.
-            perf_command = self._get_perf_command(duration, options)
-
-            # Generate a full quipper command by joining the base command
-            # and various perf options.
-            quipper_command = self._get_quipper_command(duration, options)
-            quipper_command_string = ' '.join(quipper_command)
-
-            try:
-                result = subprocess.check_output(perf_command)
-            except subprocess.CalledProcessError:
-                # If the perf command fails, don't test quipper. But record that
-                # it was skipped.
-                keyvals['command'] = '(' + quipper_command_string + ')'
-                keyvals['result_length'] = '(skipped)'
-                self.write_perf_keyval(keyvals)
-                continue
-
-            try:
-                result = subprocess.check_output(quipper_command,
-                                                 stderr=subprocess.STDOUT)
-            except subprocess.CalledProcessError:
-                raise error.TestFail('Error running command: ' +
-                                     quipper_command_string)
-
-            # Write keyvals.
-            keyvals['command'] = quipper_command_string;
-            keyvals['result_length'] = len(result)
-            self.write_perf_keyval(keyvals)
-
-            # Verify the output size.
-            if len(result) == 0:
-                raise error.TestFail('Got no result data from quipper.')
-
diff --git a/client/site_tests/platform_Rootdev/control b/client/site_tests/platform_Rootdev/control
deleted file mode 100644
index 6a6a1f7..0000000
--- a/client/site_tests/platform_Rootdev/control
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2009 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Kobic@codeaurora.org (Kobi Cohen-Arazi)"
-NAME = "platform_Rootdev"
-PURPOSE = "Test rootdev correctness"
-CRITERIA = """
-Will check it with options to rootdev e.g. -d
-it should be /dev/sd{a,b} (-d) or /dev/nvme0n1 or mmcblk0
-"""
-ATTRIBUTES = "suite:regression"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-
-DOC = """
-Definitions of the results:
-
-it should be /dev/sda (-d) or /dev/nvme0n1 or mmcblk0
-
-"""
-
-job.run_test('platform_Rootdev')
diff --git a/client/site_tests/platform_Rootdev/platform_Rootdev.py b/client/site_tests/platform_Rootdev/platform_Rootdev.py
deleted file mode 100644
index 412d1ee..0000000
--- a/client/site_tests/platform_Rootdev/platform_Rootdev.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright (c) 2009 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-__author__ = 'kobic@codeaurora.org (Kobi Cohen-Arazi)'
-
-import logging
-import re
-import utils
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-
-class platform_Rootdev(test.test):
-    version = 1
-
-    def test_no_partition(self, inputDev):
-        """Tests the device is having partition
-
-        @inputDev: rootdev -s -d output.
-        """
-        m = re.match(r'/dev/(sd[a-z]|mmcblk[0-9]+|nvme[0-9]n[0-9]+)$', inputDev)
-        if not m:
-            raise error.TestFail(
-                    "Rootdev test_no_partition failed != "
-                    "/dev/(sd[a-z]|mmcblk[0-9]+|nvme[0-9]n[0-9]+)$")
-
-
-    def run_once(self):
-        # test return values
-        result = utils.system("rootdev -s")
-        logging.debug("Rootdev test res: %d", result)
-        if (result != 0):
-            raise error.TestFail("Rootdev failed")
-        result = utils.system("rootdev -s -d")
-        logging.debug("Rootdev test -d switch res: %d", result)
-        if (result != 0):
-            raise error.TestFail("Rootdev failed -s -d")
-
-        # test with -d Results should be without the partition device number
-        text = utils.system_output("rootdev -s -d 2>&1")
-        text = text.strip()
-        logging.debug("Rootdev -s -d txt is *%s*", text)
-        self.test_no_partition(text)
-
-
diff --git a/client/site_tests/platform_ScrollTest/control b/client/site_tests/platform_ScrollTest/control
deleted file mode 100644
index 0d084fe..0000000
--- a/client/site_tests/platform_ScrollTest/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = "platform_ScrollTest"
-PURPOSE = "Scroll up and down pressure test."
-CRITERIA = """
-
-"""
-AUTHOR="Yi Xu"
-ATTRIBUTES = "suite:crosbolt_perf_nightly"
-TIME = "SHORT"
-TEST_CATEGORY = "Benchmark"
-TEST_CLASS = "performance"
-TEST_TYPE = "client"
-DOC = """
-Opens some tough compositing websites, verify the fps rate when the page is
-scrolled up and down.
-"""
-
-job.run_test('platform_ScrollTest')
diff --git a/client/site_tests/platform_ScrollTest/platform_ScrollTest.py b/client/site_tests/platform_ScrollTest/platform_ScrollTest.py
deleted file mode 100644
index 864de58..0000000
--- a/client/site_tests/platform_ScrollTest/platform_ScrollTest.py
+++ /dev/null
@@ -1,80 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import functools
-import logging
-import numpy
-import time
-
-from autotest_lib.client.bin import fps_meter
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.cros import touch_playback_test_base
-from telemetry.internal.actions import scroll
-
-import py_utils
-
-""" List of URLs that will be used to test users gestures on. """
-_LIST_OF_URLS = ["https://www.youtube.com", "https://www.cnn.com",
-    "https://slashdot.org/"]
-
-""" Scroll bar's moving speed. """
-_SCROLL_SPEED = 1500
-
-""" The total distance that the scroll bar moved. """
-_SCROLL_DISTANCE = 3000
-
-""" Separator used in fps_meter for each VSync. """
-_SEPARATOR = " "
-
-class platform_ScrollTest(touch_playback_test_base.touch_playback_test_base):
-    """Scroll up and down pressure test."""
-    version = 1
-
-    def run_once(self):
-        """Runs the test once."""
-        perf_results = {}
-
-        def record_fps_info(fps_data, fps_info):
-            ''' record the fps info from |fps_meter| '''
-            frame_info, frame_times = fps_info
-            frame_info_str = ''.join(frame_info)
-            fps_count = sum(
-                map(int, frame_info_str.replace(_SEPARATOR, "")))
-            fps_data.append(fps_count)
-
-        fps_data = []
-        fps = fps_meter.FPSMeter(functools.partial(record_fps_info, fps_data))
-        with chrome.Chrome(init_network_controller=True) as cr:
-            for url in _LIST_OF_URLS:
-                tab = cr.browser.tabs.New()
-                tab.Navigate(url)
-                try:
-                    tab.WaitForDocumentReadyStateToBeComplete(timeout=15)
-                except py_utils.TimeoutException:
-                    logging.warning('Time out during loading url ' + url)
-
-                for x in range(0, 3):
-                    page_scroll = scroll.ScrollAction(
-                        speed_in_pixels_per_second=_SCROLL_SPEED,
-                        distance=_SCROLL_DISTANCE)
-                    cr.browser.platform.SetHTTPServerDirectories(self.bindir)
-                    page_scroll.WillRunAction(tab)
-                    fps.start()
-                    page_scroll.RunAction(tab)
-                    fps.stop()
-                    page_scroll = scroll.ScrollAction(
-                        direction="up",
-                        speed_in_pixels_per_second=_SCROLL_SPEED,
-                        distance=_SCROLL_DISTANCE)
-                    page_scroll.WillRunAction(tab)
-                    fps.start()
-                    page_scroll.RunAction(tab)
-                    fps.stop()
-                time.sleep(1)
-        value = getattr(numpy, "mean")(fps_data)
-
-        self.output_perf_value(description="fps average",
-                                value=value,
-                               units='frame per second',
-                               higher_is_better=True)
diff --git a/client/site_tests/platform_SecureEraseFile/control b/client/site_tests/platform_SecureEraseFile/control
deleted file mode 100644
index 21e461b..0000000
--- a/client/site_tests/platform_SecureEraseFile/control
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "teravest"
-NAME = "platform_SecureEraseFile"
-ATTRIBUTES = "suite:experimental"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-
-DOC = """
-This test checks that secure_erase_file erases a file on eMMC storage.
-"""
-
-job.run_test('platform_SecureEraseFile')
diff --git a/client/site_tests/platform_SecureEraseFile/platform_SecureEraseFile.py b/client/site_tests/platform_SecureEraseFile/platform_SecureEraseFile.py
deleted file mode 100644
index 6dd3a43..0000000
--- a/client/site_tests/platform_SecureEraseFile/platform_SecureEraseFile.py
+++ /dev/null
@@ -1,116 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import glob
-import logging
-import os
-import tempfile
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-
-PARTITION_TEST_PATH = 'platform_SecureEraseFile_test_file'
-TEST_PATH = '/mnt/stateful_partition/' + PARTITION_TEST_PATH
-BINARY = '/usr/bin/secure_erase_file'
-DEVNAME_PREFIX = 'DEVNAME='
-
-class platform_SecureEraseFile(test.test):
-    """Validate secure_erase_file tool behavior.
-
-    We can't verify from this test that data has been destroyed from the
-    underlying physical device, but we can confirm that it's not reachable from
-    userspace.
-    """
-    version = 1
-
-    def __write_test_file(self, path, blocksize, count):
-        cmd = '/bin/dd if=/dev/urandom of=%s bs=%s count=%d' % (
-                path, blocksize, count)
-        utils.run(cmd)
-        if not os.path.exists(path):
-            raise error.TestError('Failed to generate test file')
-
-
-    def __get_partition(self, path):
-        info = os.lstat(path)
-        major = os.major(info.st_dev)
-        minor = os.minor(info.st_dev)
-        uevent_path = '/sys/dev/block/%d:%d/uevent' % (major, minor)
-        with open(uevent_path, 'r') as uevent_file:
-            for l in uevent_file.readlines():
-                if l.startswith(DEVNAME_PREFIX):
-                    return '/dev/' + l[len(DEVNAME_PREFIX):].strip()
-        raise error.TestError('Unable to find partition for path: ' + path)
-
-
-    def __get_extents(self, path, partition):
-        extents = []
-        cmd = 'debugfs -R "extents %s" %s' % (path, partition)
-        result = utils.run(cmd)
-        for line in result.stdout.splitlines():
-            # Discard header line.
-            if line.startswith('Level'):
-                continue
-            fields = line.split()
-
-            # Ignore non-leaf extents
-            if fields[0].strip('/') != fields[1]:
-                continue
-            extents.append({'offset': fields[7], 'length': fields[10]})
-
-        return extents
-
-
-    def __verify_cleared(self, partition, extents):
-        out_path = tempfile.mktemp()
-        for e in extents:
-            cmd = 'dd if=%s bs=4K skip=%s count=%s of=%s' % (
-                   partition, e['offset'], e['length'], out_path)
-            utils.run(cmd)
-            with open(out_path, 'r') as out_file:
-                d = out_file.read()
-                for i, byte in enumerate(d):
-                    if ord(byte) != 0x00 and ord(byte) != 0xFF:
-                        logging.info('extent[%d] = %s', i, hex(ord(byte)))
-                        raise error.TestError('Bad byte found')
-
-
-    def __test_and_verify_cleared(self, blocksize, count):
-        self.__write_test_file(TEST_PATH, blocksize, count)
-        utils.run('sync')
-
-        logging.info('original file contents: ')
-        res = utils.run('xxd %s' % TEST_PATH)
-        logging.info(res.stdout)
-
-        partition = self.__get_partition(TEST_PATH)
-        extents = self.__get_extents(PARTITION_TEST_PATH, partition)
-        if len(extents) == 0:
-            raise error.TestError('No extents found for ' + TEST_PATH)
-
-        utils.run('%s %s' % (BINARY, TEST_PATH))
-
-        # secure_erase_file confirms that the file has been erased and that its
-        # contents are not accessible. If that is not the case, it will return
-        # with a non-zero exit code.
-        if os.path.exists(TEST_PATH):
-            raise error.TestError('Secure Erase failed to unlink file.')
-
-        self.__verify_cleared(partition, extents)
-
-
-    def run_once(self):
-        # Secure erase is only supported on eMMC today; pass if
-        # no device is present.
-        if len(glob.glob('/dev/mmcblk*')) == 0:
-            raise error.TestNAError('Skipping test; no eMMC device found.')
-
-        self.__test_and_verify_cleared('64K', 2)
-        self.__test_and_verify_cleared('1M', 16)
-
-    def after_run_once(self):
-        if os.path.exists(TEST_PATH):
-            os.unlink(TEST_PATH)
-
diff --git a/client/site_tests/platform_SessionManagerBlockDevmodeSetting/control b/client/site_tests/platform_SessionManagerBlockDevmodeSetting/control
deleted file mode 100644
index e40db1d..0000000
--- a/client/site_tests/platform_SessionManagerBlockDevmodeSetting/control
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'mnissler, pastarmovj'
-NAME = 'platform_SessionManagerBlockDevmodeSetting'
-ATTRIBUTES = "suite:experimental"
-TIME = 'SHORT'
-TEST_TYPE = 'client'
-
-DOC = """
-This test checks whether session_manager correctly updates the block_devmode
-crossystem flag according to the value stored in device settings.
-"""
-
-job.run_test('platform_SessionManagerBlockDevmodeSetting')
diff --git a/client/site_tests/platform_SessionManagerBlockDevmodeSetting/owner.key b/client/site_tests/platform_SessionManagerBlockDevmodeSetting/owner.key
deleted file mode 100644
index fc253ab..0000000
--- a/client/site_tests/platform_SessionManagerBlockDevmodeSetting/owner.key
+++ /dev/null
Binary files differ
diff --git a/client/site_tests/platform_SessionManagerBlockDevmodeSetting/platform_SessionManagerBlockDevmodeSetting.py b/client/site_tests/platform_SessionManagerBlockDevmodeSetting/platform_SessionManagerBlockDevmodeSetting.py
deleted file mode 100644
index fa456f8..0000000
--- a/client/site_tests/platform_SessionManagerBlockDevmodeSetting/platform_SessionManagerBlockDevmodeSetting.py
+++ /dev/null
@@ -1,87 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import gobject, os, shutil
-from dbus.mainloop.glib import DBusGMainLoop
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chrome, session_manager
-from autotest_lib.client.cros import constants, cros_ui, ownership
-
-
-def set_block_devmode(value):
-    try:
-        utils.system('crossystem block_devmode=%d' % (1 if value else 0))
-    except error.CmdError, e:
-        raise error.TestError('Failed to run crossystem: %s' % e)
-
-
-def get_block_devmode():
-    try:
-        return utils.system_output('crossystem block_devmode') == '1'
-    except error.CmdError, e:
-        raise error.TestError('Failed to run crossystem: %s' % e)
-
-
-class platform_SessionManagerBlockDevmodeSetting(test.test):
-    """Verifies that session_manager updates the block_devmode flag to be in
-    sync with the corresponding device setting."""
-    version = 1
-
-    def initialize(self):
-        super(platform_SessionManagerBlockDevmodeSetting, self).initialize()
-        ownership.restart_ui_to_clear_ownership_files()
-        self._bus_loop = DBusGMainLoop(set_as_default=True)
-
-
-    def run_once(self):
-        try:
-            if utils.system_output('crossystem mainfw_type') == 'nonchrome':
-                raise error.TestNAError(
-                    'State key generation only works on Chrome OS hardware')
-        except error.CmdError, e:
-            raise error.TestError('Failed to run crossystem: %s' % e)
-
-        # Make sure that the flag sticks when there is no owner.
-        set_block_devmode(True)
-        cros_ui.restart()
-        cros_ui.stop()
-        if not get_block_devmode():
-            raise error.TestFail("Flag got reset for non-owned device.")
-
-        # Test whether the flag gets reset when taking ownership.
-        listener = session_manager.OwnershipSignalListener(gobject.MainLoop())
-        listener.listen_for_new_key_and_policy()
-        with chrome.Chrome() as cr:
-            listener.wait_for_signals(desc='Ownership files written to disk.')
-            if get_block_devmode():
-                raise error.TestFail(
-                    "Flag not clear after ownership got established.")
-
-        # Put a new owner key and policy blob in place, the latter of which
-        # specifies block_devmode=true.
-        cros_ui.stop(allow_fail=True)
-        shutil.copyfile(
-            os.path.join(self.bindir, 'owner.key'), constants.OWNER_KEY_FILE)
-        shutil.copyfile(
-            os.path.join(self.bindir, 'policy_block_devmode_enabled'),
-            constants.SIGNED_POLICY_FILE)
-        cros_ui.start()
-        if not get_block_devmode():
-            raise error.TestFail(
-                "Flag not set after starting with policy enabled.")
-
-        # Send a new policy blob to session_manager that disables block_devmode.
-        listener.listen_for_new_policy()
-        with open(os.path.join(self.bindir,
-                               'policy_block_devmode_disabled')) as f:
-            session_manager_proxy = session_manager.connect(self._bus_loop)
-            session_manager_proxy.StorePolicyEx(
-                session_manager.make_device_policy_descriptor(), f.read())
-        listener.wait_for_signals(desc='Policy updated.')
-
-        if get_block_devmode():
-            raise error.TestFail(
-                "Flag set after updating policy to clear flag.")
diff --git a/client/site_tests/platform_SessionManagerBlockDevmodeSetting/policy_block_devmode_disabled b/client/site_tests/platform_SessionManagerBlockDevmodeSetting/policy_block_devmode_disabled
deleted file mode 100644
index 61e20d0..0000000
--- a/client/site_tests/platform_SessionManagerBlockDevmodeSetting/policy_block_devmode_disabled
+++ /dev/null
Binary files differ
diff --git a/client/site_tests/platform_SessionManagerBlockDevmodeSetting/policy_block_devmode_enabled b/client/site_tests/platform_SessionManagerBlockDevmodeSetting/policy_block_devmode_enabled
deleted file mode 100644
index 3407de7..0000000
--- a/client/site_tests/platform_SessionManagerBlockDevmodeSetting/policy_block_devmode_enabled
+++ /dev/null
Binary files differ
diff --git a/client/site_tests/platform_SessionManagerStateKeyGeneration/control b/client/site_tests/platform_SessionManagerStateKeyGeneration/control
deleted file mode 100644
index e746728..0000000
--- a/client/site_tests/platform_SessionManagerStateKeyGeneration/control
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'mnissler, pastarmovj'
-NAME = 'platform_SessionManagerStateKeyGeneration'
-ATTRIBUTES = "suite:experimental"
-TIME = 'SHORT'
-TEST_TYPE = 'client'
-
-DOC = """
-This test invokes session_manager's GetSeverBackedStateKeys DBus method and
-checks whether the returned state keys look sane. This ensures that any
-hardware identifiers required to generate state keys (such as machine and disk
-serial numbers) can be extracted successfully from the device.
-"""
-
-job.run_test('platform_SessionManagerStateKeyGeneration')
diff --git a/client/site_tests/platform_SessionManagerStateKeyGeneration/platform_SessionManagerStateKeyGeneration.py b/client/site_tests/platform_SessionManagerStateKeyGeneration/platform_SessionManagerStateKeyGeneration.py
deleted file mode 100644
index a568ddb..0000000
--- a/client/site_tests/platform_SessionManagerStateKeyGeneration/platform_SessionManagerStateKeyGeneration.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from dbus.mainloop.glib import DBusGMainLoop
-
-from autotest_lib.client.common_lib.cros import session_manager
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.cros import cros_ui
-
-class platform_SessionManagerStateKeyGeneration(test.test):
-    '''Verifies that session_manager's GetServerBackedStateKeys DBus method
-    returns valid state keys.'''
-    version = 1
-
-    def initialize(self):
-        super(platform_SessionManagerStateKeyGeneration, self).initialize()
-        cros_ui.stop(allow_fail=True)
-        cros_ui.start()
-        self._bus_loop = DBusGMainLoop(set_as_default=True)
-
-    def run_once(self):
-        try:
-            if utils.system_output('crossystem mainfw_type') == 'nonchrome':
-                raise error.TestNAError(
-                    'State key generation only works on Chrome OS hardware')
-        except error.CmdError, e:
-            raise error.TestError('Failed to run crossystem: %s' % e)
-
-        # Retrieve state keys.
-        session_manager_proxy = session_manager.connect(self._bus_loop)
-        state_keys = session_manager_proxy.GetServerBackedStateKeys(
-            byte_arrays=True)
-
-        # Sanity-check the state keys.
-        if len(state_keys) < 3:
-            raise error.TestFail("Not enough state keys")
-        if len(state_keys) != len(set(state_keys)):
-            raise error.TestFail("Duplicate state keys")
-        for state_key in state_keys:
-            if len(state_key) != 32:
-                raise error.TestFail("Bad state key size")
diff --git a/client/site_tests/platform_TLSDate/control b/client/site_tests/platform_TLSDate/control
deleted file mode 100644
index 23f80a0..0000000
--- a/client/site_tests/platform_TLSDate/control
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-TIME="SHORT"
-AUTHOR = "The Chromium OS Authors"
-DOC = """
-Tests features of the tlsdate{,d} tools.
-"""
-NAME = "platform_TLSDate"
-PURPOSE = "Regression and integration tests of tlsdate{,d}."
-CRITERIA = """
-Fail if any of the tlsdate{,d} features fail.
-"""
-ATTRIBUTES = "suite:bvt-perbuild"
-TEST_CLASS = "platform"
-TEST_CATEGORY = "Functional"
-TEST_TYPE = "client"
-
-job.run_test("platform_TLSDate")
diff --git a/client/site_tests/platform_TLSDate/platform_TLSDate.py b/client/site_tests/platform_TLSDate/platform_TLSDate.py
deleted file mode 100644
index feb534a..0000000
--- a/client/site_tests/platform_TLSDate/platform_TLSDate.py
+++ /dev/null
@@ -1,144 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import pwd
-import subprocess
-import tempfile
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-
-class TLSDate:
-    """
-    A single tlsdate invocation. Takes care of setting up a temporary cachedir
-    for it, along with collecting output from both it and its helper processes.
-    """
-    def __init__(self, test_obj):
-        self._proc = None
-        self._testdir = tempfile.mkdtemp(suffix='tlsdate')
-        self._cachedir = self._testdir + '/cache'
-        self._outfile = self._testdir + '/out'
-        self._subprog = '?'
-        self._test_obj = test_obj
-        self._output = None
-        self._tlsdate_uid = pwd.getpwnam('tlsdate').pw_uid
-        os.mkdir(self._cachedir)
-        # Let the tlsdate user (tlsdate) write.
-        os.chown(self._testdir, self._tlsdate_uid, -1)
-        # Allow support shell library to be sourced.
-        os.chown(self._test_obj.srcdir + '/time.sh', self._tlsdate_uid, -1)
-
-
-    def start(self, subprog):
-        logging.info('running with %s/%s', self._test_obj.srcdir, subprog)
-        self._subprog = subprog
-        # Make sure the tlsdate user can access the files
-        fake_tlsdate = self._test_obj.srcdir + '/' + subprog
-        os.chown(fake_tlsdate, self._tlsdate_uid, -1)
-        args = ['/usr/bin/tlsdated', '-p',
-                '-f', self._test_obj.srcdir + '/test.conf',
-                '-c', self._cachedir,
-                '-v',
-                fake_tlsdate,
-                self._outfile]
-        self._proc = subprocess.Popen(args, stdin=subprocess.PIPE,
-                                      stderr=subprocess.PIPE)
-
-
-    def route_up(self):
-        self._proc.stdin.write('n')
-        self._proc.stdin.flush()
-
-
-    def kill(self):
-        self._proc.terminate()
-
-
-    def output(self):
-        if not self._output:
-            self._output = self._proc.communicate()[1].split('\n')
-        return self._output
-
-
-    def in_output(self, string):
-        for x in self.output():
-            if string in x:
-                return True
-        return False
-
-
-    def subproc_output(self):
-        with open(self._outfile) as f:
-            return [x.rstrip() for x in f.readlines()]
-
-
-    def ok(self):
-        return 'ok' in self.subproc_output()
-
-
-class platform_TLSDate(test.test):
-    version = 1
-
-    def require_ok(self, t):
-        if not t.ok():
-            raise error.TestFail('Expected success, got:' +
-                                 ';'.join(t.subproc_output()))
-
-
-    def require_output(self, t, string):
-        if not t.in_output(string):
-            raise error.TestFail('Needed "%s" but got "%s"' % (string,
-                                 ';'.join(t.output())))
-
-
-    def require_not_output(self, t, string):
-        if t.in_output(string):
-            raise error.TestFail('Needed no "%s" but got "%s"' % (string,
-                                 ';'.join(t.output())))
-
-
-    def test_delay_subproc(self):
-        """
-        Tests that a subprocess that delays for one second is waited on
-        successfully the second time.
-        """
-        t = TLSDate(self)
-        t.start('delay_subproc')
-        self.require_output(t, 'attempt 1 backoff')
-        self.require_output(t, 'time set from the network')
-        self.require_ok(t)
-
-
-    def test_hang_subproc(self):
-        """
-        Tests that a subprocess that delays for too long is considered hung and
-        killed.
-        """
-        t = TLSDate(self)
-        t.start('hang_subproc')
-        self.require_output(t, 'attempt 1 backoff')
-        self.require_output(t, 'tlsdate timed out')
-        self.require_ok(t)
-
-
-    def test_fail_routes(self):
-        """
-        Tests that if the initial tlsdate call fails, we wait for a route to
-        appear, then rerun tlsdate.
-        """
-        t = TLSDate(self)
-        t.start('fail_routes')
-        t.route_up()
-        self.require_output(t, 'status:2')
-        self.require_output(t, 'stdin')
-        self.require_output(t, 'time set from the network')
-        self.require_ok(t)
-
-
-    def run_once(self):
-        self.test_delay_subproc()
-        self.test_hang_subproc()
-        self.test_fail_routes()
diff --git a/client/site_tests/platform_TLSDate/src/delay_subproc b/client/site_tests/platform_TLSDate/src/delay_subproc
deleted file mode 100755
index d843e2f..0000000
--- a/client/site_tests/platform_TLSDate/src/delay_subproc
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-. $(dirname $0)/time.sh
-
-if grep -q '^ok$' "$1" 2>/dev/null; then
-	# Take down tlsdated
-	kill -TERM $PPID
-	exit 1
-fi
-
-echo 'ok' > "$1"
-sleep 1
-print_time $(date +%s)
-exit 0
diff --git a/client/site_tests/platform_TLSDate/src/fail_routes b/client/site_tests/platform_TLSDate/src/fail_routes
deleted file mode 100755
index 8f02bb1..0000000
--- a/client/site_tests/platform_TLSDate/src/fail_routes
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-. $(dirname $0)/time.sh
-
-if grep -q '^ok$' "$1" 2>/dev/null; then
-	# Take down tlsdated
-	kill -TERM $PPID
-	exit 1
-fi
-
-if ! grep -q '^first-fail$' "$1" 2>/dev/null; then
-	echo 'first-fail' > "$1"
-	exit 2
-fi
-
-echo 'ok' >> "$1"
-print_time $(date +%s)
-exit 0
diff --git a/client/site_tests/platform_TLSDate/src/hang_subproc b/client/site_tests/platform_TLSDate/src/hang_subproc
deleted file mode 100755
index 78a560e..0000000
--- a/client/site_tests/platform_TLSDate/src/hang_subproc
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-if grep -q '^ok$' "$1" 2>/dev/null; then
-	# Take down tlsdated
-	kill -TERM $PPID
-	exit 1
-fi
-
-echo 'ok' > "$1"
-sleep 10
-exit 1
diff --git a/client/site_tests/platform_TLSDate/src/test.conf b/client/site_tests/platform_TLSDate/src/test.conf
deleted file mode 100644
index 8b96fef..0000000
--- a/client/site_tests/platform_TLSDate/src/test.conf
+++ /dev/null
@@ -1,16 +0,0 @@
-subprocess-wait-between-tries 2
-subprocess-tries 2
-wait-between-tries 2
-tries 2
-should-sync-hwclock no
-should-netlink no
-dry-run yes
-verbose yes
-steady-state-interval 1
-min-steady-state-interval 1
-# base-path supplied on command line
-source
-        host 127.0.0.1
-        port 1010
-        proxy none
-end
diff --git a/client/site_tests/platform_TLSDate/src/time.sh b/client/site_tests/platform_TLSDate/src/time.sh
deleted file mode 100644
index 9d198e2..0000000
--- a/client/site_tests/platform_TLSDate/src/time.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-print_time() {
-  unix_time_from_date="$1"
-  awk "BEGIN { printf \""$(printf "%x" $1 | \
-       sed -e 's/\(..\)\(..\)\(..\)\(..\)/\\x\4\\x\3\\x\2\\x\1/g')"\" }"
-}
diff --git a/client/site_tests/platform_TLSDateActual/control b/client/site_tests/platform_TLSDateActual/control
deleted file mode 100644
index 2bc8dde..0000000
--- a/client/site_tests/platform_TLSDateActual/control
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-TIME="SHORT"
-AUTHOR = "The Chromium OS Authors"
-DOC = """
-Tests that tlsdate actually can connect to our time host, fetch and set time.
-"""
-NAME = "platform_TLSDateActual"
-PURPOSE = "Real-world integration test for tlsdate"
-CRITERIA = """
-Fail if tlsdate fails to actually work.
-"""
-ATTRIBUTES = "suite:bvt-perbuild"
-TEST_CLASS = "platform"
-TEST_CATEGORY = "Functional"
-TEST_TYPE = "client"
-
-job.run_test("platform_TLSDateActual")
diff --git a/client/site_tests/platform_TLSDateActual/platform_TLSDateActual.py b/client/site_tests/platform_TLSDateActual/platform_TLSDateActual.py
deleted file mode 100644
index 5f7b52b..0000000
--- a/client/site_tests/platform_TLSDateActual/platform_TLSDateActual.py
+++ /dev/null
@@ -1,181 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors and the python-socks5 authors.
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 3,
-# as published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-import subprocess
-import test
-
-# Taken and hacked from https://code.google.com/p/python-socks5/
-
-import socket
-from threading import Thread
-
-from autotest_lib.client.common_lib import error
-
-SOCKTIMEOUT=5
-RESENDTIMEOUT=300
-
-class Forwarder(Thread):
-    def __init__(self,src,dest):
-        Thread.__init__(self)
-        self.src=src
-        self.dest=dest
-
-
-    def __str__(self):
-        return '<Forwarder from %s to %s>' % (self.src, self.dest)
-
-
-    def run(self):
-        print '%s: starting' % self
-        try:
-            self.forward()
-        except socket.error as e:
-            print '%s: exception %s' % (self, e)
-            self.src.close()
-            self.dest.close()
-        finally:
-            print '%s: exiting' % self
-
-
-    def forward(self):
-        BUFSIZE = 1024
-        data = self.src.recv(BUFSIZE)
-        while data:
-            self.dest.sendall(data)
-            data = self.src.recv(BUFSIZE)
-        self.src.close()
-        self.dest.close()
-        print '%s: client quit normally' % self
-
-
-class ProxyForwarder(Forwarder):
-    def __init__(self, src, dest_addr):
-        Forwarder.__init__(self, src, None)
-        self.dest_addr = dest_addr
-        self.src = src
-        self.dest = None
-
-
-    def __str__(self):
-        return '<ProxyForwarder between %s and %s (%s:%d)' % (
-            self.src, self.dest, self.dest_addr[0], self.dest_addr[1])
-
-
-    def forward(self):
-        self.dest = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
-        self.dest.connect(self.dest_addr)
-        self.src.settimeout(RESENDTIMEOUT)
-        self.dest.settimeout(RESENDTIMEOUT)
-        Forwarder(self.src,self.dest).start()
-        Forwarder(self.dest,self.src).start()
-
-
-def recvbytes(sock, n):
-    bs = sock.recv(n)
-    return [ ord(x) for x in bs ]
-
-
-def recvshort(sock):
-    x = recvbytes(sock, 2)
-    return x[0] * 256 + x[1]
-
-
-def create_server(ip,port):
-    SOCKS5_VER = "\x05"
-    AUTH_NONE = "\x00"
-
-    ATYP_DOMAIN = 0x03
-
-    CMD_CONNECT = 0x01
-
-    ERR_SUCCESS = "\x00"
-    ERR_UNSUPP = "\x07"
-
-    transformer = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-    transformer.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
-    transformer.bind((ip, port))
-    transformer.listen(1000)
-
-    network_port = chr(port >> 8) + chr(port & 0xff)
-    # Turn the textual IP address we were supplied with into a
-    # network-byte-order IP address for SOCKS5 wire protocol
-    network_ip = "".join(chr(int(i)) for i in ip.split("."))
-    while True:
-        sock = transformer.accept()[0]
-        sock.settimeout(SOCKTIMEOUT)
-        print "Got one client connection"
-        (_, nmethods) = recvbytes(sock, 2)
-        _ = recvbytes(sock, nmethods)
-        sock.sendall(SOCKS5_VER + AUTH_NONE)
-        (_, cmd, _, atyp) = recvbytes(sock, 4)
-        dst_addr = None
-        dst_port = None
-        if atyp == ATYP_DOMAIN:
-            addr_len = recvbytes(sock, 1)[0]
-            dst_addr = "".join([unichr(x) for x in recvbytes(sock, addr_len)])
-            dst_port = recvshort(sock)
-        else:
-            socket.sendall(SOCKS5_VER + ERR_UNSUPP + network_ip + network_port)
-        print "Proxying to %s:%d" %(dst_addr,dst_port)
-
-        if cmd == CMD_CONNECT:
-            sock.sendall(SOCKS5_VER + ERR_SUCCESS + "\x00" + "\x01" +
-                         network_ip + network_port)
-            print "Starting forwarding thread"
-            ProxyForwarder(sock, (dst_addr, dst_port)).start()
-        else:
-            sock.sendall(SOCKS5_VER + ERR_UNSUPP + network_ip + network_port)
-            sock.close()
-
-
-class ServingThread(Thread):
-    def __init__(self, ip, port):
-        Thread.__init__(self)
-        self.ip = ip
-        self.port = port
-
-
-    def run(self):
-        create_server(self.ip, self.port)
-
-
-class platform_TLSDateActual(test.test):
-    version = 1
-
-
-    def tlsdate(self, host, proxy):
-        args = ['/usr/bin/tlsdate', '-v', '-l', '-H', host]
-        if proxy:
-            args += ['-x', proxy]
-        p = subprocess.Popen(args, stderr=subprocess.PIPE)
-        out = p.communicate()[1]
-        print out
-        return p.returncode
-
-
-    def run_once(self):
-        t = ServingThread("127.0.0.1", 8083)
-        t.start()
-        r = self.tlsdate('clients3.google.com', None)
-        if r != 0:
-            raise error.TestFail('tlsdate with no proxy to good host failed: %d' % r)
-        r = self.tlsdate('clients3.google.com', 'socks5://127.0.0.1:8083')
-        if r != 0:
-            raise error.TestFail('tlsdate with proxy to good host failed: %d' % r)
-        r = self.tlsdate('invalid-host.example.com', None)
-        if r == 0:
-            raise error.TestFail('tlsdate with no proxy to bad host succeeded')
-        r = self.tlsdate('invalid-host.example.com', 'socks5://127.0.0.1:8083')
-        if r == 0:
-            raise error.TestFail('tlsdate with proxy to bad host succeeded')
diff --git a/client/site_tests/platform_TPMEvict/control b/client/site_tests/platform_TPMEvict/control
deleted file mode 100644
index fc50b3f..0000000
--- a/client/site_tests/platform_TPMEvict/control
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chromium OS Authors"
-NAME = "platform_TPMEvict"
-PURPOSE = "Tests the TPM under low-resource conditions."
-CRITERIA = "Fails if any operations fail."
-ATTRIBUTES = "suite:regression"
-TIME = "SHORT"
-TEST_CATEGORY = "Stress"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-
-DOC = """
-This test verifies that PKCS #11 services remain functional when the TPM is
-operating under low-resource conditions. Specifically, more keys are used than
-are able to fit in TPM memory which requires that previously loaded keys be
-evicted. The test exercises the eviction code path as well as the reload code
-path (when a previously evicted key is used again).
-"""
-
-job.run_test('platform_TPMEvict')
diff --git a/client/site_tests/platform_TPMEvict/platform_TPMEvict.py b/client/site_tests/platform_TPMEvict/platform_TPMEvict.py
deleted file mode 100644
index 0acf2ed..0000000
--- a/client/site_tests/platform_TPMEvict/platform_TPMEvict.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.cros import pkcs11
-
-
-class platform_TPMEvict(test.test):
-    version = 1
-
-    def run_once(self):
-        pkcs11.setup_p11_test_token(True)
-        pkcs11.load_p11_test_token()
-        for i in range(30):
-            utils.system('p11_replay --inject --replay_wifi')
-        for i in range(30):
-            utils.system('p11_replay --inject')
-        utils.system('p11_replay --replay_wifi')
-        utils.system('p11_replay --cleanup')
-        pkcs11.cleanup_p11_test_token()
diff --git a/client/site_tests/platform_TabletMode/control b/client/site_tests/platform_TabletMode/control
deleted file mode 100644
index e83270b..0000000
--- a/client/site_tests/platform_TabletMode/control
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "rjahagir"
-NAME = "platform_TabletMode"
-PURPOSE = """Verify that laptop can transition successfully into tablet mode."""
-CRITERIA = """
-Test fails if any of the following happen:
-- Unable to get a valid screenshot.
-- Landscape, portrait, laptop screenshots are not sufficiently different.
-- Laptop at the start and laptop at the end are not the same.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:tablet_mode"
-
-DOC = """
-Takes screenshots when in laptop mode and tablet mode (landscape and portrait).
-The test will fail if the laptop screenshots are not similar enough or if the
-tablet screenshots are not different enough from the laptop screenshots.
-"""
-job.run_test("platform_TabletMode")
diff --git a/client/site_tests/platform_TabletMode/platform_TabletMode.py b/client/site_tests/platform_TabletMode/platform_TabletMode.py
deleted file mode 100644
index b782ab4..0000000
--- a/client/site_tests/platform_TabletMode/platform_TabletMode.py
+++ /dev/null
@@ -1,160 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import time
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.graphics import graphics_utils
-from autotest_lib.client.cros.image_comparison import pdiff_image_comparer
-
-def get_percent_difference(file1, file2):
-    """
-    Performs pixel comparison of two files, given by their paths |file1|
-    and |file2| using terminal tool 'perceptualdiff' and returns percentage
-    difference of the total file size.
-
-    @param file1: path to image
-    @param file2: path to secondary image
-    @return: percentage difference of total file size.
-    @raise ValueError: if image dimensions are not the same
-    @raise OSError: if file does not exist or cannot be opened.
-
-    """
-    # Using pdiff image comparer to compare the two images. This class
-    # invokes the terminal tool perceptualdiff.
-    pdi = pdiff_image_comparer.PdiffImageComparer()
-    diff_bytes = pdi.compare(file1, file2)[0]
-    return round(100. * diff_bytes / os.path.getsize(file1))
-
-
-class platform_TabletMode(test.test):
-    """
-    Verify that tablet mode toggles appropriately.
-    """
-    version = 1
-    _WAIT = 5
-    _SHORT_WAIT = 1
-    SPOOF_CMD = 'ectool motionsense spoof '
-    # Disable spoof mode and return into laptop state.
-    RESET_SENSOR_0 = '-- 0 0'
-    RESET_SENSOR_1 = '-- 1 0'
-    # Spoof sensor 1 to force laptop into landscape tablet mode.
-    LANDSCAPE_SENSOR_1 = '-- 1 1 32 -16256 -224'
-    # Spoof sensor 0 and sensor 1 to force laptop into portrait tablet mode.
-    PORTRAIT_SENSOR_0 = '-- 0 1 -7760 -864 -14112'
-    PORTRAIT_SENSOR_1 = '-- 1 1 -7936 848 14480'
-    ERRORS = []
-
-    def _revert_laptop(self):
-        """Resets sensors to revert back to laptop mode."""
-        utils.system(self.SPOOF_CMD + self.RESET_SENSOR_0)
-        time.sleep(self._SHORT_WAIT)
-        utils.system(self.SPOOF_CMD + self.RESET_SENSOR_1)
-        time.sleep(self._WAIT)
-
-    def _spoof_tablet_landscape(self):
-        """Spoofs sensors to change into tablet landscape mode."""
-        utils.system(self.SPOOF_CMD + self.LANDSCAPE_SENSOR_1)
-        time.sleep(self._WAIT)
-
-    def _spoof_tablet_portrait(self):
-        """Spoofs sensors to change into tablet portrait mode."""
-        utils.system(self.SPOOF_CMD + self.PORTRAIT_SENSOR_0)
-        time.sleep(self._SHORT_WAIT)
-        utils.system(self.SPOOF_CMD + self.PORTRAIT_SENSOR_1)
-        time.sleep(self._WAIT)
-
-    def _take_screenshot(self, suffix):
-        """
-        Captures a screenshot of the current VT screen in PNG format.
-
-        @param suffixcurrent_vt: desired vt for screenshot.
-
-        @returns the path of the screenshot file.
-
-        """
-        return graphics_utils.take_screenshot(self.resultsdir,
-                                              suffix + '_tablet_mode')
-
-    def _verify_difference(self, screenshot1, screenshot2,
-                           difference_percent_threshold=5):
-        """
-        Make sure screenshots are sufficiently different.
-
-        @param screenshot1: path to screenshot.
-        @param screenshot2: path to screenshot.
-        @param difference_percent_threshold: threshold for difference.
-
-        @returns number of errors found (0 or 1).
-
-        """
-        filename1 = screenshot1.split('/')[-1]
-        filename2 = screenshot2.split('/')[-1]
-        diff = get_percent_difference(screenshot1, screenshot2)
-        logging.info("Screenshot 1 and 2 diff: %s" % diff)
-        if not diff >= difference_percent_threshold:
-            error = ('Screenshots differ by %d %%: %s vs %s'
-                     % (diff, filename1, filename2))
-            self.ERRORS.append(error)
-
-    def _verify_similarity(self, screenshot1, screenshot2,
-                           similarity_percent_threshold=5):
-        """
-        Make sure screenshots are the same or similar.
-
-        @param screenshot1: path to screenshot.
-        @param screenshot2: path to screenshot.
-        @param difference_percent_threshold: threshold for similarity.
-
-        @returns number of errors found (0 or 1).
-
-        """
-        filename1 = screenshot1.split('/')[-1]
-        filename2 = screenshot2.split('/')[-1]
-        diff = get_percent_difference(screenshot1, screenshot2)
-        logging.info("Screenshot 1 and 2 similarity diff: %s" % diff)
-        if not diff <= similarity_percent_threshold:
-            error = ('Screenshots differ by %d %%: %s vs %s'
-                     % (diff, filename1, filename2))
-            self.ERRORS.append(error)
-
-    def run_once(self):
-        """
-        Run tablet mode test to spoof various tablet modes and ensure
-        device changes accordingly.
-        """
-
-        # Ensure we start in laptop mode.
-        self._revert_laptop()
-
-        logging.info("Take screenshot for initial laptop mode.")
-        laptop_start = self._take_screenshot('laptop_start')
-
-        logging.info("Entering landscape mode.")
-        self._spoof_tablet_landscape()
-        landscape = self._take_screenshot('landscape')
-
-        self._revert_laptop()
-
-        logging.info("Entering portrait mode.")
-        self._spoof_tablet_portrait()
-        portrait = self._take_screenshot('portrait')
-
-        self._revert_laptop()
-        laptop_end = self._take_screenshot('laptop_end')
-
-        # Compare screenshots and determine the number of errors.
-        self._verify_similarity(laptop_start, laptop_end)
-        self._verify_difference(laptop_start, landscape)
-        self._verify_difference(landscape, portrait)
-        self._verify_difference(portrait, laptop_end)
-
-        if self.ERRORS:
-            raise error.TestFail('; '.join(set(self.ERRORS)))
-
-    def cleanup(self):
-        self._revert_laptop()
diff --git a/client/site_tests/platform_TempFS/control b/client/site_tests/platform_TempFS/control
deleted file mode 100644
index 9f207ee..0000000
--- a/client/site_tests/platform_TempFS/control
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "kdlucas@chromium.org (Kelly Lucas)"
-DOC = "Verifies temp filesystems have enough available space."
-NAME = "platform_TempFS"
-PURPOSE = "Verify Temp file systems are set up correctly."
-CRITERIA = """
-Fail if the following file systems are less than our threshhold size:
-  - /dev
-  - /tmp
-  - /dev/shm
-  - /var/tmp
-  - /run
-  - /run/lock
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-
-job.run_test('platform_TempFS')
diff --git a/client/site_tests/platform_TempFS/platform_TempFS.py b/client/site_tests/platform_TempFS/platform_TempFS.py
deleted file mode 100755
index 5787d24..0000000
--- a/client/site_tests/platform_TempFS/platform_TempFS.py
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/python2
-#
-# Copyright (c) 2010 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-__author__ = 'kdlucas@chromium.org (Kelly Lucas)'
-
-import logging, os
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-
-class platform_TempFS(test.test):
-    """
-    Test temp file systems.
-    """
-    version = 1
-
-    def run_once(self):
-        errors = 0
-        # The minimum available space we expect on temp filesystems.
-        # TempFS allows 1/2 of Total Memory for each temp fs. Our threshold
-        # allows for 50% usage of space allocated before this test is run.
-
-        threshold = utils.memtotal()/4
-        tempdirs = ['/dev', '/tmp', '/dev/shm', '/var/tmp', '/run',
-                    '/run/lock']
-
-        for dir in tempdirs:
-            if os.path.isdir(dir):
-                # utils.freespace is in bytes, so convert to kb.
-                avail = utils.freespace(dir)/1024
-                if avail < threshold:
-                    logging.error('Not enough available space on %s', dir)
-                    logging.error('%d bytes is minimum, found %d bytes',
-                                  (threshold, avail))
-                    errors += 1
-            else:
-                logging.error('%s does not exist!' % dir)
-                errors += 1
-
-        if errors:
-            raise error.TestFail('There were %d temp directory errors' % errors)
diff --git a/client/site_tests/platform_ToolchainTests/control b/client/site_tests/platform_ToolchainTests/control
index d59d3ff..ef1880b 100644
--- a/client/site_tests/platform_ToolchainTests/control
+++ b/client/site_tests/platform_ToolchainTests/control
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "platform_ToolchainTests"
 
 PURPOSE = """
@@ -19,6 +19,7 @@
 TEST_TYPE = "client"
 TIME="SHORT"
 JOB_RETRIES = 2
+PY_VERSION = 3
 
 DOC = """
 Verify the code generated by toolchain on DUTs.
diff --git a/client/site_tests/platform_ToolchainTests/platform_ToolchainTests.py b/client/site_tests/platform_ToolchainTests/platform_ToolchainTests.py
index 4b1b115..5f60261 100644
--- a/client/site_tests/platform_ToolchainTests/platform_ToolchainTests.py
+++ b/client/site_tests/platform_ToolchainTests/platform_ToolchainTests.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -6,6 +7,7 @@
 
 from autotest_lib.client.bin import test, utils
 from autotest_lib.client.common_lib import error
+from autotest_lib.client.cros.crash import crash_test
 
 
 class platform_ToolchainTests(test.test):
@@ -19,7 +21,9 @@
         Run `toolchain-tests`, check the exit status, and print the output.
         """
 
-        result = utils.run('toolchain-tests', ignore_status=True)
+        # Run this, but ignore the crashes it generates
+        with crash_test.FilterOut('fortify-runtime-tests'):
+            result = utils.run('toolchain-tests', ignore_status=True)
 
         if result.exit_status != 0:
             logging.error(result.stdout)
diff --git a/client/site_tests/platform_ToolchainTests/src/Makefile b/client/site_tests/platform_ToolchainTests/src/Makefile
index e9a8fef..bd7e67d 100644
--- a/client/site_tests/platform_ToolchainTests/src/Makefile
+++ b/client/site_tests/platform_ToolchainTests/src/Makefile
@@ -21,13 +21,11 @@
 # convenience. Testing diagnostics takes <1sec, and these tests silently fell
 # off the radar before (crbug.com/1159199).
 clang-fortify-tests-1.o: $(fortify-test-src)
-	# FIXME(crbug.com/1159199): Reenable this once the glibc fix is live.
-	# $(fortify-cxx) $(fortify-diag-flags) -D_FORTIFY_SOURCE=1
+	$(fortify-cxx) $(fortify-diag-flags) -D_FORTIFY_SOURCE=1
 	$(fortify-cxx) $(fortify-runtime-flags) -c -D_FORTIFY_SOURCE=1 -o $@
 
 clang-fortify-tests-2.o: $(fortify-test-src)
-	# FIXME(crbug.com/1159199): Reenable this once the glibc fix is live.
-	# $(fortify-cxx) $(fortify-diag-flags) -D_FORTIFY_SOURCE=2
+	$(fortify-cxx) $(fortify-diag-flags) -D_FORTIFY_SOURCE=2
 	$(fortify-cxx) $(fortify-runtime-flags) -c -D_FORTIFY_SOURCE=2 -o $@
 
 clean:
diff --git a/client/site_tests/platform_ToolchainTests/src/README.md b/client/site_tests/platform_ToolchainTests/src/README.md
index 77bc061..eabf9c3 100644
--- a/client/site_tests/platform_ToolchainTests/src/README.md
+++ b/client/site_tests/platform_ToolchainTests/src/README.md
@@ -3,4 +3,4 @@
 ## Summary
 
 toolchain-tests contains compilation and runtime tests for the toolchain on
-Chrome OS.
+ChromeOS.
diff --git a/client/site_tests/platform_ToolchainTests/src/clang-fortify-tests.cpp b/client/site_tests/platform_ToolchainTests/src/clang-fortify-tests.cpp
index 214e3ac..4420d13 100644
--- a/client/site_tests/platform_ToolchainTests/src/clang-fortify-tests.cpp
+++ b/client/site_tests/platform_ToolchainTests/src/clang-fortify-tests.cpp
@@ -8,7 +8,7 @@
 
 // Clang compile-time and run-time tests for glibc FORTIFY.
 //
-// This file is compiled in two configurations ways to give us a sane set of
+// This file is compiled in two configurations ways to give us a valid set of
 // tests for clang's FORTIFY implementation.
 //
 // One configuration uses clang's diagnostic consumer
diff --git a/client/site_tests/platform_TraceClockMonotonic/control b/client/site_tests/platform_TraceClockMonotonic/control
deleted file mode 100644
index 48f5ce7..0000000
--- a/client/site_tests/platform_TraceClockMonotonic/control
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "sonnyrao"
-DOC = """
-Checks for "mono" option in trace_clock and runs a binary that creates
-three entries using the trace_marker file to write directly into the trace
-and then processes the trace to make sure things are in the proper order.
-"""
-NAME = "platform_TraceClockMonotonic"
-PURPOSE = """
-Ensure the the kernel properly supports monotonic timestamps for
-ftrace events
-"""
-CRITERIA = """
-Fails if kernel doesn't support tracing or monotonic clock for ftrace events
-or the timestamps aren't in the proper order
-"""
-ATTRIBUTES = "suite:bvt-perbuild"
-TIME = "FAST"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-
-job.run_test('platform_TraceClockMonotonic')
diff --git a/client/site_tests/platform_TraceClockMonotonic/platform_TraceClockMonotonic.py b/client/site_tests/platform_TraceClockMonotonic/platform_TraceClockMonotonic.py
deleted file mode 100644
index 7424922..0000000
--- a/client/site_tests/platform_TraceClockMonotonic/platform_TraceClockMonotonic.py
+++ /dev/null
@@ -1,95 +0,0 @@
-#!/usr/bin/python2
-#
-# Copyright (c) 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-
-class platform_TraceClockMonotonic(test.test):
-    """
-    This verifies that the kernel supports monotonic clock timestamps for
-    ftrace events.  This is the same clock that Chrome will use for
-    timestamping its trace events.
-    """
-    version = 1
-
-    executable = 'ftrace-clock-monotonic'
-
-    TRACE_PATH = '/sys/kernel/debug/tracing/'
-    TRACE_CLOCK = TRACE_PATH + 'trace_clock'
-    TRACE_FILE = TRACE_PATH + 'trace'
-    TRACE_ENABLE = TRACE_PATH + 'tracing_on'
-
-    def _setup_trace(self):
-        """
-        Verify that the system supports the monotonic trace clock and set up
-        the trace system to use it, and clean up any old stuff in the trace
-        and enable it.
-        """
-        with open(self.TRACE_CLOCK, 'r+') as clock:
-            content = clock.read()
-            if not 'mono' in content:
-                raise error.TestFail('Kernel does not support monotonic clock')
-
-            # Set up to use the monotonic clock
-            clock.write('mono')
-
-        # clear out the trace
-        with open(self.TRACE_FILE, 'w') as trace:
-            trace.write('')
-
-        # enable tracing
-        with open(self.TRACE_ENABLE, 'w') as enable:
-            enable.write('1')
-
-    def setup(self):
-        """Cleans and makes ftrace-clock-monotonic.c.
-
-        Prepares environment for tests by removing directory we will extract
-        to (if it exists), extracting tarball of tests, and making them.
-        """
-        os.chdir(self.srcdir)
-        utils.make('clean')
-        utils.make()
-
-    def process_trace(self):
-        """Opens the trace file and processes it.
-
-        Looks for the 3 markers that are written out by the binary.  The binary
-        gets a clock timestamp and then writes it out into the trace three times.
-        This looks at each entry and the content of the entry, and verifies that
-        they are all in chronological order.
-        Example trace file without the header:
-           <...>-16484 [003] ...1 509651.512676: tracing_mark_write: start: 509651.512651785
-           <...>-16484 [003] ...1 509651.512680: tracing_mark_write: middle: 509651.512678312
-           <...>-16484 [003] ...1 509651.512682: tracing_mark_write: end: 509651.512680934
-        """
-        with open(self.TRACE_FILE, 'r') as trace:
-            prev_timestamp = 0
-            for line in trace:
-                if 'tracing_mark_write' not in line:
-                    continue
-
-                columns = line.split()
-                entry_timestamp = float(columns[3].replace(':',''))
-                sample_timestamp = float(columns[6])
-                if sample_timestamp > entry_timestamp:
-                    raise error.TestFail('sample timestamp after trace marker entry')
-
-                if sample_timestamp < prev_timestamp:
-                    raise error.TestFail('sample timestamp before previous timestamp')
-                prev_timestamp = entry_timestamp
-
-            if prev_timestamp == 0:
-                raise error.TestFail('no valid timestamps seen in trace file')
-
-    def run_once(self):
-        self._setup_trace()
-        binpath = os.path.join(self.srcdir, self.executable)
-        utils.system_output(binpath, retain_output = True)
-        self.process_trace()
diff --git a/client/site_tests/platform_TraceClockMonotonic/src/Makefile b/client/site_tests/platform_TraceClockMonotonic/src/Makefile
deleted file mode 100644
index 0aa4451..0000000
--- a/client/site_tests/platform_TraceClockMonotonic/src/Makefile
+++ /dev/null
@@ -1,11 +0,0 @@
-EXEC=ftrace-clock-monotonic
-
-all: $(EXEC)
-
-clean:
-	rm -f $(EXEC)
-
-$(EXEC): ftrace-clock-monotonic.c
-	$(CC) $^ -o $@ $(CFLAGS) $(CPPFLAGS) $(LDFLAGS)
-
-.PHONY: clean
diff --git a/client/site_tests/platform_TraceClockMonotonic/src/ftrace-clock-monotonic.c b/client/site_tests/platform_TraceClockMonotonic/src/ftrace-clock-monotonic.c
deleted file mode 100644
index 9a0061c..0000000
--- a/client/site_tests/platform_TraceClockMonotonic/src/ftrace-clock-monotonic.c
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-
-#include <unistd.h>
-#include <stdio.h>
-#include <stdlib.h>
-#include <errno.h>
-#include <time.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <fcntl.h>
-
-
-static int write_marker(int fd, char *name)
-{
-  char buf[1024];
-  struct timespec ts;
-  int size, ret;
-  unsigned long usec;
-
-  ret = clock_gettime(CLOCK_MONOTONIC, &ts);
-  if (ret < 0) {
-    perror("clock_gettime");
-    return 1;
-  }
-
-  // normalize nanoseconds down to microseconds
-  // to make it easier to compare to the entry
-  // timestamps
-  usec = ts.tv_nsec / 1000;
-  size = snprintf(buf, 1024, "%s: %lu.%06lu\n",
-		  name, ts.tv_sec, usec);
-  ret = write(fd, buf, size);
-  if (ret < size) {
-    perror("write");
-    return 1;
-  }
-
-  return 0;
-}
-#define TRACE_PATH "/sys/kernel/debug/tracing/"
-
-int main(int argc, char* argv[]) {
-  int ret, fd;
-
-  fd = open(TRACE_PATH "trace_marker", O_WRONLY);
-  if (fd < 0) {
-    perror("open");
-    return 1;
-  }
-  ret = write_marker(fd, "start");
-  if (ret)
-    goto out;
-
-  ret = write_marker(fd, "middle");
-  if (ret)
-    goto out;
-
-  ret = write_marker(fd, "end");
-
- out:
-  close(fd);
-  return ret;
-}
diff --git a/client/site_tests/platform_UdevVars/control b/client/site_tests/platform_UdevVars/control
deleted file mode 100644
index 3de4fca..0000000
--- a/client/site_tests/platform_UdevVars/control
+++ /dev/null
@@ -1,13 +0,0 @@
-AUTHOR = "Chrome OS Team"
-NAME = "platform_UdevVars"
-TIME = "FAST"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "client"
-# TODO(chromium:399007) Move to bvt once it passes on all boards.
-ATTRIBUTES = "suite:experimental"
-DOC = """
-Verifies that ChromeOS-specific udev variables are set correctly by udev rules.
-"""
-
-job.run_test('platform_UdevVars')
diff --git a/client/site_tests/platform_UdevVars/platform_UdevVars.py b/client/site_tests/platform_UdevVars/platform_UdevVars.py
deleted file mode 100755
index 6aee381..0000000
--- a/client/site_tests/platform_UdevVars/platform_UdevVars.py
+++ /dev/null
@@ -1,144 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import pyudev
-import re
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from collections import defaultdict
-from operator import attrgetter
-
-def natural_key(string_):
-    """
-    Derive key for natural sorting.
-    @param string_: String to derive sort key for.
-    From http://stackoverflow.com/questions/34518/natural-sorting-algorithm.
-    """
-    return [int(s) if s.isdigit() else s for s in re.split(r'(\d+)', string_)]
-
-
-class platform_UdevVars(test.test):
-    """Verify ChromeOS-specific udev variables."""
-    version = 1
-
-
-    def _input_devices(self):
-        """Obtain a list of all /dev/input/event* udev devices."""
-        devices = self.udev.list_devices(subsystem='input')
-        # only consider the event devices
-        devices = filter(attrgetter('device_node'), devices)
-        devices = sorted(devices, key=lambda device: natural_key(device.device_node))
-        return devices
-
-
-    def _get_roles(self):
-        """Get information on input devices and roles from udev."""
-        self.devices_with_role = defaultdict(list)
-
-        logging.debug('Input devices:')
-        for device in self._input_devices():
-            name = device.parent.attributes.get('name', '')
-            logging.debug('  %s [%s]', device.device_node, name)
-            role = device.get('POWERD_ROLE', None)
-            if role:
-                logging.debug('    POWERD_ROLE=%s', role)
-                self.devices_with_role[role].append(device)
-
-
-    def _dump_roles(self):
-        """Log devices grouped by role for easier debugging."""
-        logging.info('Roles:')
-        for role in sorted(self.devices_with_role.keys()):
-            for device in self.devices_with_role[role]:
-                path = device.device_node
-                name = device.parent.attributes.get('name', '')
-                logging.info('  %-21s %s [%s]', role + ':', path, name)
-
-
-    def _dump_udev_attrs(self):
-        """Log udev attributes for selected devices to the debug directory."""
-        for device in self._input_devices():
-            devname = os.path.basename(device.device_node)
-
-            outfile = os.path.join(self.debugdir, "udevattrs.%s" % devname)
-            utils.system('udevadm info --attribute-walk --path=%s > %s' % (
-                    device.sys_path, outfile))
-
-            outfile = os.path.join(self.debugdir, "udevprops.%s" % devname)
-            utils.system('udevadm info --query=property --path=%s > %s' % (
-                    device.sys_path, outfile))
-
-
-    def _verify_roles(self):
-        """Verify that POWERD_ROLE was set on devices as expected."""
-
-        # TODO(chromium:410968): Consider moving this to USE flags instead of
-        # listing devices here.
-        boards_with_touchscreen = ['link', 'samus']
-        boards_maybe_touchscreen = ['rambi', 'peppy', 'glimmer', 'clapper',
-                                    'nyan_big', 'nyan_blaze', 'expresso']
-        boards_chromebox = ['beltino', 'guado', 'mccloud', 'panther', 'rikku', 
-                            'stumpy', 'tidus', 'tricky', 'zako']
-        boards_aio = ['nyan_kitty', 'tiny', 'anglar', 'monroe']
-
-        expect_keyboard = None
-        expect_touchpad = None
-        expect_touchscreen = None
-
-        board = utils.get_board()
-        if board in boards_chromebox or board in boards_aio:
-            expect_keyboard = [0]
-            expect_touchpad = [0]
-        else:
-            expect_keyboard = [1]
-            expect_touchpad = [1]
-
-        if board in boards_with_touchscreen:
-            expect_touchscreen = [1]
-        elif board in boards_maybe_touchscreen:
-            expect_touchscreen = [0, 1]
-        else:
-            expect_touchscreen = [0]
-
-        expected_num_per_role = [
-                ('internal_keyboard', expect_keyboard),
-                ('internal_touchpad', expect_touchpad),
-                ('internal_touchscreen', expect_touchscreen),
-            ]
-
-        for role, expected_num in expected_num_per_role:
-            num = len(self.devices_with_role[role])
-            if num not in expected_num:
-                self.errors += 1
-                logging.error('POWERD_ROLE=%s is present %d times, expected '
-                              'one of %s', role, num, repr(expected_num))
-
-        if len(self.devices_with_role['external_input']) != 0:
-            logging.warn('%d external input devices detected',
-                         len(self.devices_with_role['external_input']))
-
-
-    def initialize(self):
-        self.udev = pyudev.Context()
-
-
-    def run_once(self):
-        """
-        Check that udev variables are assigned correctly by udev rules. In
-        particular, verifies that powerd tags are set correctly.
-        """
-        logging.debug('Board: %s', utils.get_board())
-        self._get_roles()
-        self._dump_roles()
-        self._dump_udev_attrs()
-
-        self.errors = 0
-        self._verify_roles()
-
-        if self.errors != 0:
-            raise error.TestFail('Verification of udev variables failed; see '
-                                 'logs for details')
diff --git a/client/site_tests/policy_AccessibilityTest/control b/client/site_tests/policy_AccessibilityTest/control
deleted file mode 100644
index 634c8fa..0000000
--- a/client/site_tests/policy_AccessibilityTest/control
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_AccessibilityTest'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Base test of all the policy_Accessibility* Tests.
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_AccessibilityTest', **args_dict)
\ No newline at end of file
diff --git a/client/site_tests/policy_AccessibilityTest/control.HighContrastFalse b/client/site_tests/policy_AccessibilityTest/control.HighContrastFalse
deleted file mode 100644
index 97995e7..0000000
--- a/client/site_tests/policy_AccessibilityTest/control.HighContrastFalse
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_AccessibilityTest.HighContrastFalse'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify when the 'HighContrastEnabled' policy is False, the high contrast
-accessibility setting within ChromeOS is disabled.
-
-'''
-
-job.run_test('policy_AccessibilityTest',
-             policy='HighContrastEnabled',
-             case=False)
diff --git a/client/site_tests/policy_AccessibilityTest/control.HighContrastNotSet b/client/site_tests/policy_AccessibilityTest/control.HighContrastNotSet
deleted file mode 100644
index 6e50db4..0000000
--- a/client/site_tests/policy_AccessibilityTest/control.HighContrastNotSet
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_AccessibilityTest.HighContrastNotSet'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify when the 'HighContrastEnabled' policy is not set (None), the high
-contrast accessibility setting within ChromeOS is disabled.
-
-'''
-
-job.run_test('policy_AccessibilityTest',
-             policy='HighContrastEnabled',
-             case=None)
diff --git a/client/site_tests/policy_AccessibilityTest/control.HighContrastTrue b/client/site_tests/policy_AccessibilityTest/control.HighContrastTrue
deleted file mode 100644
index 1c6ce4d..0000000
--- a/client/site_tests/policy_AccessibilityTest/control.HighContrastTrue
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_AccessibilityTest.HighContrastTrue'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify when the 'HighContrastEnabled' policy is True, the high contrast
-accessibility setting within ChromeOS is enabled.
-
-'''
-
-job.run_test('policy_AccessibilityTest',
-             policy='HighContrastEnabled',
-             case=True)
diff --git a/client/site_tests/policy_AccessibilityTest/control.LargeCursorFalse b/client/site_tests/policy_AccessibilityTest/control.LargeCursorFalse
deleted file mode 100644
index 729e401..0000000
--- a/client/site_tests/policy_AccessibilityTest/control.LargeCursorFalse
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_AccessibilityTest.LargeCursorFalse'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify when the 'LargeCursorEnabled' policy is False, the large cursor
-accessibility setting within ChromeOS is disabled.
-
-'''
-
-job.run_test('policy_AccessibilityTest',
-             policy='LargeCursorEnabled',
-             case=False)
diff --git a/client/site_tests/policy_AccessibilityTest/control.LargeCursorNotSet b/client/site_tests/policy_AccessibilityTest/control.LargeCursorNotSet
deleted file mode 100644
index 01d44cd..0000000
--- a/client/site_tests/policy_AccessibilityTest/control.LargeCursorNotSet
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_AccessibilityTest.LargeCursorNotSet'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify when the 'LargeCursorEnabled' policy is not set (None), the large cursor
-accessibility setting within ChromeOS is disabled.
-
-'''
-
-job.run_test('policy_AccessibilityTest',
-             policy='LargeCursorEnabled',
-             case=None)
diff --git a/client/site_tests/policy_AccessibilityTest/control.LargeCursorTrue b/client/site_tests/policy_AccessibilityTest/control.LargeCursorTrue
deleted file mode 100644
index c2b682f..0000000
--- a/client/site_tests/policy_AccessibilityTest/control.LargeCursorTrue
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_AccessibilityTest.LargeCursorTrue'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify when the 'LargeCursorEnabled' policy is 1 (on), the large cursor
-accessibility setting within ChromeOS is enabled.
-
-'''
-
-job.run_test('policy_AccessibilityTest',
-             policy='LargeCursorEnabled',
-             case=True)
diff --git a/client/site_tests/policy_AccessibilityTest/control.ScreenMagnifierTypeFalse b/client/site_tests/policy_AccessibilityTest/control.ScreenMagnifierTypeFalse
deleted file mode 100644
index 1e3d532..0000000
--- a/client/site_tests/policy_AccessibilityTest/control.ScreenMagnifierTypeFalse
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_AccessibilityTest.ScreenMagnifierTypeFalse'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify when the 'ScreenMagnifierType' policy is 0 (off), the screen magnifier
-accessibility setting within ChromeOS is disabled.
-
-'''
-
-job.run_test('policy_AccessibilityTest',
-             policy='ScreenMagnifierType',
-             case=0)
diff --git a/client/site_tests/policy_AccessibilityTest/control.ScreenMagnifierTypeNotSet b/client/site_tests/policy_AccessibilityTest/control.ScreenMagnifierTypeNotSet
deleted file mode 100644
index e54ba72..0000000
--- a/client/site_tests/policy_AccessibilityTest/control.ScreenMagnifierTypeNotSet
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_AccessibilityTest.ScreenMagnifierTypeNotSet'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify when the 'ScreenMagnifierType' policy is not set (None), the screen
-magnifier accessibility setting within ChromeOS is disabled.
-
-'''
-
-job.run_test('policy_AccessibilityTest',
-             policy='ScreenMagnifierType',
-             case=None)
diff --git a/client/site_tests/policy_AccessibilityTest/control.ScreenMagnifierTypeTrue b/client/site_tests/policy_AccessibilityTest/control.ScreenMagnifierTypeTrue
deleted file mode 100644
index 52b9729..0000000
--- a/client/site_tests/policy_AccessibilityTest/control.ScreenMagnifierTypeTrue
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_AccessibilityTest.ScreenMagnifierTypeTrue'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify when the 'ScreenMagnifierType' policy is set to 1 (on), the screen
-magnifier accessibility setting within ChromeOS is enabled.
-
-'''
-
-job.run_test('policy_AccessibilityTest',
-             policy='ScreenMagnifierType',
-             case=1)
diff --git a/client/site_tests/policy_AccessibilityTest/control.VirtualKeyboardEnabledFalse b/client/site_tests/policy_AccessibilityTest/control.VirtualKeyboardEnabledFalse
deleted file mode 100644
index 2124fb9..0000000
--- a/client/site_tests/policy_AccessibilityTest/control.VirtualKeyboardEnabledFalse
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_AccessibilityTest.VirtualKeyboardEnabledFalse'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify when the 'VirtualKeyboardEnabled' policy is False, the virtual keyboard
-accessibility setting within ChromeOS is disabled.
-
-'''
-
-job.run_test('policy_AccessibilityTest',
-             policy='VirtualKeyboardEnabled',
-             case=False)
diff --git a/client/site_tests/policy_AccessibilityTest/control.VirtualKeyboardEnabledNotSet b/client/site_tests/policy_AccessibilityTest/control.VirtualKeyboardEnabledNotSet
deleted file mode 100644
index 28e2fe9..0000000
--- a/client/site_tests/policy_AccessibilityTest/control.VirtualKeyboardEnabledNotSet
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_AccessibilityTest.VirtualKeyboardEnabledNotSet'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify when the 'VirtualKeyboardEnabled' policy is not set (None), the virtual
-keyboard accessibility setting within ChromeOS is disabled.
-
-'''
-
-job.run_test('policy_AccessibilityTest',
-             policy='VirtualKeyboardEnabled',
-             case=None)
diff --git a/client/site_tests/policy_AccessibilityTest/control.VirtualKeyboardEnabledTrue b/client/site_tests/policy_AccessibilityTest/control.VirtualKeyboardEnabledTrue
deleted file mode 100644
index d1c8775..0000000
--- a/client/site_tests/policy_AccessibilityTest/control.VirtualKeyboardEnabledTrue
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_AccessibilityTest.VirtualKeyboardEnabledTrue'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify when the 'VirtualKeyboardEnabled' policy is True, the virtual keyboard
-accessibility setting within ChromeOS is enabled.
-
-'''
-
-job.run_test('policy_AccessibilityTest',
-             policy='VirtualKeyboardEnabled',
-             case=True)
diff --git a/client/site_tests/policy_AccessibilityTest/policy_AccessibilityTest.py b/client/site_tests/policy_AccessibilityTest/policy_AccessibilityTest.py
deleted file mode 100644
index 35e1772..0000000
--- a/client/site_tests/policy_AccessibilityTest/policy_AccessibilityTest.py
+++ /dev/null
@@ -1,67 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import logging
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.a11y import a11y_test_base
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-
-class policy_AccessibilityTest(
-        enterprise_policy_base.EnterprisePolicyTest,
-        a11y_test_base.a11y_test_base):
-    """
-    Test effect of the following accessibility policies on Chrome OS:
-    HighContrastEnabled, LargeCursorEnabled, VirtualKeyboardEnabled, and
-    ScreenMagnifierType.
-
-    This test will set the policy and value, then call the Accessibility API
-    to see if the feature is enabled or not.
-
-    """
-    version = 1
-
-    _LOOKUP = {'HighContrastEnabled': 'highContrast',
-               'LargeCursorEnabled': 'largeCursor',
-               'VirtualKeyboardEnabled': 'virtualKeyboard',
-               'ScreenMagnifierType': 'screenMagnifier'}
-
-    def _check_settings(self, policy, case):
-        """Call the accessibility API extension and check the policy was set
-        correctly.
-
-        @param policy: Name of the policy set.
-        @param case: Value of the set policy.
-
-        """
-        value_str = 'true' if case else 'false'
-        feature = self._LOOKUP[policy]
-
-        cmd = ('window.__result = null;\n'
-               'chrome.accessibilityFeatures.%s.get({}, function(d) {'
-               'window.__result = d[\'value\']; });' % (feature))
-        self._extension.ExecuteJavaScript(cmd)
-        poll_cmd = 'window.__result == %s;' % value_str
-        pol_status = self._extension.EvaluateJavaScript(poll_cmd)
-
-        if not pol_status:
-            raise error.TestError('{} setting incorrect'.format(policy))
-
-    def run_once(self, policy, case):
-        """
-        Setup and run the test configured for the specified test case.
-
-        @param policy: Name of the policy to set.
-        @param case: Value of the policy to set.
-
-        """
-
-        # Get the accessibility API extension path from the ally_test_base
-        extension_path = self._get_extension_path()
-
-        self.setup_case(user_policies={policy: case},
-                        extension_paths=[extension_path])
-
-        self._extension = self.cr.get_extension(extension_path)
-        self._check_settings(policy, case)
diff --git a/client/site_tests/policy_ArcAudioCaptureAllowed/control b/client/site_tests/policy_ArcAudioCaptureAllowed/control
deleted file mode 100644
index 575da2a..0000000
--- a/client/site_tests/policy_ArcAudioCaptureAllowed/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_ArcAudioCaptureAllowed'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-DEPENDENCIES = "arc"
-
-DOC = '''
-Verify that the AudioCaptureAllowed ChromeOS Policy propagates to the ARC
-clouddpc setting.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_ArcAudioCaptureAllowed', **args_dict)
\ No newline at end of file
diff --git a/client/site_tests/policy_ArcAudioCaptureAllowed/control.allowed b/client/site_tests/policy_ArcAudioCaptureAllowed/control.allowed
deleted file mode 100644
index 0531e51..0000000
--- a/client/site_tests/policy_ArcAudioCaptureAllowed/control.allowed
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_ArcAudioCaptureAllowed.allow'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-DEPENDENCIES = "arc"
-
-DOC = '''
-Verify when the 'ArcAudioCaptureAllowed' policy is set to True (allow) the ARC
-microphone will be able to record.
-
-'''
-
-job.run_test('policy_ArcAudioCaptureAllowed', case=True)
diff --git a/client/site_tests/policy_ArcAudioCaptureAllowed/control.not_allowed b/client/site_tests/policy_ArcAudioCaptureAllowed/control.not_allowed
deleted file mode 100644
index 466a631..0000000
--- a/client/site_tests/policy_ArcAudioCaptureAllowed/control.not_allowed
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_ArcAudioCaptureAllowed.not_allowed'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-DEPENDENCIES = "arc"
-
-DOC = '''
-Verify when the 'ArcAudioCaptureAllowed' policy is set to False the ARC
-microphone will be muted.
-
-'''
-
-job.run_test('policy_ArcAudioCaptureAllowed', case=False)
diff --git a/client/site_tests/policy_ArcAudioCaptureAllowed/control.not_set b/client/site_tests/policy_ArcAudioCaptureAllowed/control.not_set
deleted file mode 100644
index 7b9e283..0000000
--- a/client/site_tests/policy_ArcAudioCaptureAllowed/control.not_set
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_ArcAudioCaptureAllowed.not_set'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-DEPENDENCIES = "arc"
-
-DOC = '''
-Verify when the 'ArcAudioCaptureAllowed' policy is set to None the ARC
-microphone will be able to record.
-
-'''
-
-job.run_test('policy_ArcAudioCaptureAllowed', case=None)
diff --git a/client/site_tests/policy_ArcAudioCaptureAllowed/policy_ArcAudioCaptureAllowed.py b/client/site_tests/policy_ArcAudioCaptureAllowed/policy_ArcAudioCaptureAllowed.py
deleted file mode 100644
index 3f8b29f..0000000
--- a/client/site_tests/policy_ArcAudioCaptureAllowed/policy_ArcAudioCaptureAllowed.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-from autotest_lib.client.common_lib import error
-
-from autotest_lib.client.common_lib.cros import arc
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-
-class policy_ArcAudioCaptureAllowed(
-        enterprise_policy_base.EnterprisePolicyTest):
-    """
-    Test effect of the ArcAudioCaptureAllowed ChromeOS policy on ARC.
-
-    This test will launch the ARC container via the ArcEnabled policy, then
-    will verify the status of the mic using dumpsys. If mic can't be unmuted
-    then the policy has been set to False. If mic can be unmuted then it's
-    set to True or None.
-
-    """
-    version = 1
-
-    def _test_microphone_status(self, case):
-        microphone_status = arc.adb_shell("dumpsys | grep microphone")
-
-        if case or case is None:
-            if "no_unmute_microphone" in microphone_status:
-                raise error.TestFail(
-                    "Microphone is muted and it shouldn't be.")
-        else:
-            if "no_unmute_microphone" not in microphone_status:
-                raise error.TestFail(
-                    "Micprophone isn't muted and it should be.")
-
-    def policy_creator(self, case):
-        pol = {'ArcEnabled': True, 'AudioCaptureAllowed': case}
-        return pol
-
-    def run_once(self, case):
-        """
-        Setup and run the test configured for the specified test case.
-
-        @param case: Name of the test case to run.
-
-        """
-        policies = self.policy_creator(case)
-
-        self.setup_case(user_policies=policies,
-                        arc_mode='enabled',
-                        use_clouddpc_test=False)
-
-        self._test_microphone_status(case)
diff --git a/client/site_tests/policy_ArcBackupRestoreServiceEnabled/control b/client/site_tests/policy_ArcBackupRestoreServiceEnabled/control
deleted file mode 100644
index bb59a13..0000000
--- a/client/site_tests/policy_ArcBackupRestoreServiceEnabled/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_ArcBackupRestoreServiceEnabled'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-DEPENDENCIES = "arc, internal_display"
-
-DOC = '''
-Verifing that the ArcBackupRestoreServiceEnabled ChromeOS Policy propagates
-to the ARC clouddpc setting.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_ArcBackupRestoreServiceEnabled', **args_dict)
diff --git a/client/site_tests/policy_ArcBackupRestoreServiceEnabled/control.disabled b/client/site_tests/policy_ArcBackupRestoreServiceEnabled/control.disabled
deleted file mode 100644
index c4bf91f..0000000
--- a/client/site_tests/policy_ArcBackupRestoreServiceEnabled/control.disabled
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_ArcBackupRestoreServiceEnabled.disabled'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-DEPENDENCIES = "arc, internal_display"
-
-DOC = '''
-Verify the policy_ArcBackupRestoreServiceEnabled will disable Android backup
-and recovery within the ARC container as well.
-
-'''
-
-job.run_test('policy_ArcBackupRestoreServiceEnabled', case=0)
diff --git a/client/site_tests/policy_ArcBackupRestoreServiceEnabled/control.enabled b/client/site_tests/policy_ArcBackupRestoreServiceEnabled/control.enabled
deleted file mode 100644
index 2c1759f..0000000
--- a/client/site_tests/policy_ArcBackupRestoreServiceEnabled/control.enabled
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_ArcBackupRestoreServiceEnabled.enabled'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-DEPENDENCIES = "arc, internal_display"
-
-DOC = '''
-Verify the policy_ArcBackupRestoreServiceEnabled will enable Android backup
-and recovery within the ARC container as well.
-
-'''
-
-job.run_test('policy_ArcBackupRestoreServiceEnabled', case=2)
diff --git a/client/site_tests/policy_ArcBackupRestoreServiceEnabled/policy_ArcBackupRestoreServiceEnabled.py b/client/site_tests/policy_ArcBackupRestoreServiceEnabled/policy_ArcBackupRestoreServiceEnabled.py
deleted file mode 100644
index 96e387d..0000000
--- a/client/site_tests/policy_ArcBackupRestoreServiceEnabled/policy_ArcBackupRestoreServiceEnabled.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-
-from autotest_lib.client.common_lib.cros import arc
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-
-class policy_ArcBackupRestoreServiceEnabled(
-        enterprise_policy_base.EnterprisePolicyTest):
-    """
-    Test effect of policy_ArcBackupRestoreServiceEnabled policy on the
-    ARC++ container within ChromeOS.
-
-    """
-    version = 1
-
-    def verify_policy(self, case):
-        """
-        Verify the policy was properly set
-
-        @param case: integer, value of the policy setting
-
-        """
-        if case:
-            e_msg = 'Backup manager is disabled and should be enabled.'
-        else:
-            e_msg = 'Backup manager is enabled and should be disabled.'
-
-        # Give the ARC container time to setup and configure its policy.
-        utils.poll_for_condition(
-            lambda: self.check_bmgr(case),
-            exception=error.TestFail(e_msg),
-            timeout=45,
-            sleep_interval=5,
-            desc='Checking bmgr status')
-
-    def check_bmgr(self, case):
-        """
-        Check if Android backup and recovery is accessible.
-
-        @param case: integer, value of the policy setting
-
-        @Returns True if accessible and set correctly, False otherwise.
-
-        """
-        b_and_r_status = arc.adb_shell('bmgr enabled')
-
-        if case:
-            if "Backup Manager currently enabled" not in b_and_r_status:
-                return False
-
-        else:
-            if "Backup Manager currently disabled" not in b_and_r_status:
-                return False
-
-        return True
-
-    def run_once(self, case):
-        """
-        @param case: integer, value of the policy setting
-
-        """
-        pol = {'ArcEnabled': True,
-               'ArcBackupRestoreServiceEnabled': case}
-        self.setup_case(user_policies=pol,
-                        arc_mode='enabled',
-                        use_clouddpc_test=False)
-        self.verify_policy(case)
diff --git a/client/site_tests/policy_ArcDisableScreenshots/control b/client/site_tests/policy_ArcDisableScreenshots/control
deleted file mode 100644
index 4b218c3..0000000
--- a/client/site_tests/policy_ArcDisableScreenshots/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_ArcDisableScreenshots'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-DEPENDENCIES = "arc, internal_display"
-
-DOC = '''
-Verify that the DisableScreenshots ChromeOS Policy propagates to the ARC
-clouddpc setting.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_ArcDisableScreenshots', **args_dict)
diff --git a/client/site_tests/policy_ArcDisableScreenshots/control.allow b/client/site_tests/policy_ArcDisableScreenshots/control.allow
deleted file mode 100644
index 3c8dba9..0000000
--- a/client/site_tests/policy_ArcDisableScreenshots/control.allow
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_ArcDisableScreenshots.allow'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-DEPENDENCIES = "arc, internal_display"
-
-DOC = '''
-Verify the 'policy_DisableScreenshots' policy will allow the screenshots
-within the ARC container as well.
-
-'''
-
-job.run_test('policy_ArcDisableScreenshots',
-             case=False)
diff --git a/client/site_tests/policy_ArcDisableScreenshots/control.disable b/client/site_tests/policy_ArcDisableScreenshots/control.disable
deleted file mode 100644
index 5de90a9..0000000
--- a/client/site_tests/policy_ArcDisableScreenshots/control.disable
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_ArcDisableScreenshots.disable'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-DEPENDENCIES = "arc, internal_display"
-
-DOC = '''
-Verify the 'policy_DisableScreenshots' policy will disable the screenshots
-within the ARC container as well.
-
-'''
-
-job.run_test('policy_ArcDisableScreenshots',
-             case=True)
diff --git a/client/site_tests/policy_ArcDisableScreenshots/control.not_set b/client/site_tests/policy_ArcDisableScreenshots/control.not_set
deleted file mode 100644
index c4243fb..0000000
--- a/client/site_tests/policy_ArcDisableScreenshots/control.not_set
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_ArcDisableScreenshots.not_set'
-# Disable this test until it can be fixed: http://b/170958240
-# ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-DEPENDENCIES = "arc, internal_display"
-
-DOC = '''
-Verify the 'policy_DisableScreenshots' policy will allow the screenshots
-within the ARC container as well, when the policy is not set.
-
-'''
-
-job.run_test('policy_ArcDisableScreenshots',
-             case=None)
- 
\ No newline at end of file
diff --git a/client/site_tests/policy_ArcDisableScreenshots/policy_ArcDisableScreenshots.py b/client/site_tests/policy_ArcDisableScreenshots/policy_ArcDisableScreenshots.py
deleted file mode 100644
index df1654a..0000000
--- a/client/site_tests/policy_ArcDisableScreenshots/policy_ArcDisableScreenshots.py
+++ /dev/null
@@ -1,80 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-
-from autotest_lib.client.common_lib.cros import arc
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-
-class policy_ArcDisableScreenshots(
-        enterprise_policy_base.EnterprisePolicyTest):
-    """
-    Test effect of policy_ArcDisableScreenshots policy on the ARC++ container
-    within ChromeOS.
-
-    """
-    version = 1
-
-    POLICY_NAME = 'ArcPolicy'
-
-    def verify_policy(self, case):
-        """
-        Verify the policy was properly set
-
-        @param case: bool, value of the policy setting
-
-        """
-        if case:
-            e_msg = 'ARC++ Screenshot Taken when it should not have been'
-        else:
-            e_msg = 'ARC++ Screenshot was blocked when it should not have been'
-
-        # Give the ARC container time to setup and configure its policy.
-        utils.poll_for_condition(
-            lambda: self.check_screenshot(case),
-            exception=error.TestFail(e_msg),
-            timeout=30,
-            sleep_interval=1,
-            desc='Checking for screenshot file size')
-
-    def check_screenshot(self, case):
-        """
-        Take a sceenshot and check its size, to see if the policy was set
-        correctly.
-
-        @param case: bool, value of the policy setting
-
-        @Returns True if the screenshot setting was correct, False otherwise.
-
-        """
-        # Remove any lingering possible screenshots
-        arc.adb_shell('rm -f /sdcard/test.png', ignore_status=True)
-
-        # Take a screenshot, then check its size
-        arc.adb_shell('screencap > /sdcard/test.png', ignore_status=True)
-        screenshotsize = arc.adb_shell('du -s /sdcard/test.png',
-                                       ignore_status=True).split()[0]
-
-        # Some devices screenshot may contain metadata that would be up to 8b
-        if case and int(screenshotsize) > 8:
-            return False
-        # No screenshot should be under 100b
-        elif not case and int(screenshotsize) < 100:
-            return False
-
-        return True
-
-    def run_once(self, case):
-        """
-        @param case: bool, value of the policy setting
-
-        """
-        pol = {'ArcEnabled': True,
-               'DisableScreenshots': case}
-        self.setup_case(user_policies=pol,
-                        arc_mode='enabled',
-                        use_clouddpc_test=False)
-
-        self.verify_policy(case)
diff --git a/client/site_tests/policy_ArcExternalStorageDisabled/control b/client/site_tests/policy_ArcExternalStorageDisabled/control
deleted file mode 100644
index 24cbc73..0000000
--- a/client/site_tests/policy_ArcExternalStorageDisabled/control
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_ArcExternalStorageDisabled'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effect of ExternalStorageDisabled policy on ARC behavior.
-
-This test verifies the effect of the ExternalStorageDisabled user policy on
-ARC client behavior. It exercises all valid policy values across three
-test cases: 'True_Block', 'NotSet_Allow', and 'False_Allow'.
-
-This test requires some sort of external storage to be connected to the DUT to
-pass. If running on a lab machine, trigger this through the
-policy_ExternalStorageDisabledServer server test.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_ArcExternalStorageDisabled', **args_dict)
diff --git a/client/site_tests/policy_ArcExternalStorageDisabled/policy_ArcExternalStorageDisabled.py b/client/site_tests/policy_ArcExternalStorageDisabled/policy_ArcExternalStorageDisabled.py
deleted file mode 100644
index d9dc129..0000000
--- a/client/site_tests/policy_ArcExternalStorageDisabled/policy_ArcExternalStorageDisabled.py
+++ /dev/null
@@ -1,61 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import arc
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-
-class policy_ArcExternalStorageDisabled(
-        enterprise_policy_base.EnterprisePolicyTest):
-    version = 1
-
-    POLICY_NAME = 'ExternalStorageDisabled'
-    TEST_CASES = {
-        'True_Block': True,
-        'False_Allow': False,
-        'NotSet_Allow': None
-    }
-
-    def _test_arc_external_storage(self, policy_value):
-        """
-        Verify the behavior of the ExternalStorageDisabled policy on ARC.
-
-        Check the /storage directory and verify that it is empty if the
-        policy disables access to external storage, or not empty if external
-        storage is allowed.
-
-        @param policy_value: policy value for this case.
-
-        @raises error.TestFail: If the contents of the /media/removable
-            directory do not match the policy behavior.
-
-        """
-
-        arc_dirs = set(arc.adb_shell('ls /storage').split())
-
-        base_dirs = set(['emulated', 'self', 'MyFiles'])
-
-        usb_parts = arc_dirs - base_dirs
-        if policy_value:
-            if usb_parts:
-                raise error.TestFail('External storage was disabled but '
-                                     'external storage detected')
-        elif not usb_parts:
-            raise error.TestFail('External storage enabled but external '
-                                 'storage not found')
-
-    def run_once(self, case):
-        """
-        Setup and run the test configured for the specified test case.
-
-        @param case: Name of the test case to run.
-
-        """
-        case_value = self.TEST_CASES[case]
-        pol = {'ArcEnabled': True,
-               'ExternalStorageDisabled': case_value}
-        self.setup_case(user_policies=pol,
-                        arc_mode='enabled',
-                        use_clouddpc_test=False)
-        self._test_arc_external_storage(case_value)
diff --git a/client/site_tests/policy_ArcVideoCaptureAllowed/control b/client/site_tests/policy_ArcVideoCaptureAllowed/control
deleted file mode 100644
index 99c7238..0000000
--- a/client/site_tests/policy_ArcVideoCaptureAllowed/control
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_ArcVideoCaptureAllowed'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-DEPENDENCIES = "arc"
-
-DOC = '''
-Verify that the ArcVideoCaptureAllowed ChromeOS Policy propagates to the ARC
-clouddpc setting."
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_ArcVideoCaptureAllowed', **args_dict)
\ No newline at end of file
diff --git a/client/site_tests/policy_ArcVideoCaptureAllowed/control.allowed b/client/site_tests/policy_ArcVideoCaptureAllowed/control.allowed
deleted file mode 100644
index 68b2b7d..0000000
--- a/client/site_tests/policy_ArcVideoCaptureAllowed/control.allowed
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_ArcVideoCaptureAllowed.allow'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-DEPENDENCIES = "arc"
-
-DOC = '''
-Verify when the 'ArcVideoCaptureAllowed' policy is set to True (allow) the ARC
-Camera within the ARC container can be launched.
-
-'''
-
-job.run_test('policy_ArcVideoCaptureAllowed',
-             case=True)
\ No newline at end of file
diff --git a/client/site_tests/policy_ArcVideoCaptureAllowed/control.disable b/client/site_tests/policy_ArcVideoCaptureAllowed/control.disable
deleted file mode 100644
index 217cd20..0000000
--- a/client/site_tests/policy_ArcVideoCaptureAllowed/control.disable
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_ArcVideoCaptureAllowed.disable'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-DEPENDENCIES = "arc"
-
-DOC = '''
-Verify when the 'ArcVideoCaptureAllowed' policy is set to False (disable) the ARC
-Camera within the ARC container can not be launched.
-
-'''
-
-job.run_test('policy_ArcVideoCaptureAllowed',
-             case=False)
\ No newline at end of file
diff --git a/client/site_tests/policy_ArcVideoCaptureAllowed/control.not_set b/client/site_tests/policy_ArcVideoCaptureAllowed/control.not_set
deleted file mode 100644
index 6d2ab61..0000000
--- a/client/site_tests/policy_ArcVideoCaptureAllowed/control.not_set
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_ArcVideoCaptureAllowed.not_set'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-DEPENDENCIES = "arc"
-
-DOC = '''
-Verify when the 'ArcVideoCaptureAllowed' policy is set not set (None) the ARC
-Camera within the ARC container can be launched.
-
-'''
-
-job.run_test('policy_ArcVideoCaptureAllowed',
-             case=None)
\ No newline at end of file
diff --git a/client/site_tests/policy_ArcVideoCaptureAllowed/policy_ArcVideoCaptureAllowed.py b/client/site_tests/policy_ArcVideoCaptureAllowed/policy_ArcVideoCaptureAllowed.py
deleted file mode 100644
index 1020d97..0000000
--- a/client/site_tests/policy_ArcVideoCaptureAllowed/policy_ArcVideoCaptureAllowed.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import time
-
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-from autotest_lib.client.cros.input_playback import keyboard
-
-
-class policy_ArcVideoCaptureAllowed(
-        enterprise_policy_base.EnterprisePolicyTest):
-    """
-    Test effect of the ArcVideoCaptureAllowed ChromeOS policy on ARC.
-
-    This test will launch the ARC container via the ArcEnabled policy, then
-    will check the behavior of the passthrough policy VideoCaptureAllowed.
-
-    When the policy is set to False, Video Capture is not allowed. To test
-    this, we will attemp to launch the ARC Camera, and check the logs to see
-    if the Camera was launched or not.
-
-    """
-    version = 1
-
-    def _launch_Arc_Cam(self):
-        """Grant the Camera location permission, and launch the Camera app."""
-        self.ui.click_and_wait_for_item_with_retries(
-            'Launcher',
-            '/Search your device, apps/',
-            isRegex_wait=True)
-        self.ui.doDefault_on_obj('/Search your device, apps/', isRegex=True)
-        for button in 'cam':
-            time.sleep(0.1)
-            self.keyboard.press_key(button)
-        self.ui.wait_for_ui_obj('/Camera/', isRegex=True)
-        self.ui.doDefault_on_obj('/Camera/', isRegex=True)
-
-    def _test_Arc_cam_status(self, expected):
-        """
-        Test if the Arc Camera has been opened, or not.
-
-        @param case: bool, value of the VideoCaptureAllowed policy.
-
-        """
-        #  Once the Camera is open, get the status from logcat.
-
-        if expected is False:
-            self.ui.did_obj_not_load('/Switch to take photo/',
-                                     isRegex=True,
-                                     timeout=10)
-        else:
-            self.ui.wait_for_ui_obj('/Switch to take photo/',
-                                    isRegex=True,
-                                    timeout=10)
-
-    def run_once(self, case):
-        """
-        Setup and run the test configured for the specified test case.
-
-        @param case: Name of the test case to run.
-
-        """
-        pol = {'ArcEnabled': True,
-               'VideoCaptureAllowed': case}
-
-        self.setup_case(user_policies=pol,
-                        arc_mode='enabled',
-                        use_clouddpc_test=False)
-
-        # Allow the ARC container time to apply the policy...
-        self.keyboard = keyboard.Keyboard()
-        self.ui.start_ui_root(self.cr)
-
-        self._launch_Arc_Cam()
-        self._test_Arc_cam_status(case)
diff --git a/client/site_tests/policy_AudioOutputAllowed/control b/client/site_tests/policy_AudioOutputAllowed/control
deleted file mode 100644
index 3c1e040..0000000
--- a/client/site_tests/policy_AudioOutputAllowed/control
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-NAME = 'policy_AudioOutputAllowed'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-CRITERIA = 'Test will fail if RMS difference is not within acceptable limits'
-
-DOC = '''
-Verify effect of AudioOutputAllowed policy on Chrome OS behavior.
-
-This test verifies the effect of the AudioOutputAllowed user policy on
-Chrome OS client behavior through the 3.5mm output. It exercises all valid
-policy values across three test cases: 'True_Allow', 'False_Block', and
-'NotSet_Allow'.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_AudioOutputAllowed', **args_dict)
diff --git a/client/site_tests/policy_AudioOutputAllowed/control.false_block b/client/site_tests/policy_AudioOutputAllowed/control.false_block
deleted file mode 100644
index be45dd3..0000000
--- a/client/site_tests/policy_AudioOutputAllowed/control.false_block
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-NAME = 'policy_AudioOutputAllowed.False_Block'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy, suite:bvt-perbuild'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-This test verifies the effect of the AudioOutputAllowed user policy on
-Chrome OS client behavior through the 3.5mm output. It verifies Chrome OS
-behavior is correct when policy is set to 'False'.
-
-'''
-
-job.run_test('policy_AudioOutputAllowed', case='False_Block')
diff --git a/client/site_tests/policy_AudioOutputAllowed/control.notSet_allow b/client/site_tests/policy_AudioOutputAllowed/control.notSet_allow
deleted file mode 100644
index c815695..0000000
--- a/client/site_tests/policy_AudioOutputAllowed/control.notSet_allow
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-NAME = 'policy_AudioOutputAllowed.NotSet_Allow'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy, suite:bvt-perbuild'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-This test verifies the effect of the AudioOutputAllowed user policy on
-Chrome OS client behavior through the 3.5mm output. It verifies Chrome OS
-behavior is correct when policy is unset.
-
-'''
-
-job.run_test('policy_AudioOutputAllowed', case='NotSet_Allow')
diff --git a/client/site_tests/policy_AudioOutputAllowed/control.true_allow b/client/site_tests/policy_AudioOutputAllowed/control.true_allow
deleted file mode 100644
index 01e2967..0000000
--- a/client/site_tests/policy_AudioOutputAllowed/control.true_allow
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-NAME = 'policy_AudioOutputAllowed.True_Allow'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy, suite:bvt-perbuild'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-This test verifies the effect of the AudioOutputAllowed user policy on
-Chrome OS client behavior through the 3.5mm output. It verifies Chrome OS
-behavior is correct when policy is set to 'True'.
-
-'''
-
-job.run_test('policy_AudioOutputAllowed', case='True_Allow')
diff --git a/client/site_tests/policy_AudioOutputAllowed/policy_AudioOutputAllowed.py b/client/site_tests/policy_AudioOutputAllowed/policy_AudioOutputAllowed.py
deleted file mode 100644
index 1166718..0000000
--- a/client/site_tests/policy_AudioOutputAllowed/policy_AudioOutputAllowed.py
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-
-class policy_AudioOutputAllowed(
-        enterprise_policy_base.EnterprisePolicyTest):
-    version = 1
-    POLICY_NAME = 'AudioOutputAllowed'
-    TEST_CASES = {
-        'NotSet_Allow': None,
-        'True_Allow': True,
-        'False_Block': False
-    }
-    NOT_MUTED = '/Volume is on/'
-    MUTED = '/Volume is muted/'
-
-    def run_once(self, case):
-        """
-        Setup and run the test configured for the specified test case.
-
-        @param case: Name of the test case to run.
-
-        """
-        case_value = self.TEST_CASES[case]
-        self.setup_case(user_policies={self.POLICY_NAME: case_value})
-        self.ui.start_ui_root(self.cr)
-        self.ui.doDefault_on_obj("/Status tray/", isRegex=True)
-        if case_value is False:
-            self.ui.wait_for_ui_obj(name=self.MUTED, isRegex=True)
-            self.ui.did_obj_not_load(name=self.NOT_MUTED, isRegex=True)
-        else:
-            self.ui.wait_for_ui_obj(name=self.NOT_MUTED, isRegex=True)
-            self.ui.did_obj_not_load(name=self.MUTED, isRegex=True)
diff --git a/client/site_tests/policy_AutotestSanity/control b/client/site_tests/policy_AutotestSanity/control
deleted file mode 100644
index b7da73a..0000000
--- a/client/site_tests/policy_AutotestSanity/control
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_AutotestSanity'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy, suite:bvt-perbuild, suite:smoke'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-# TODO(rzakarian): create enterprise dependency or figure out if 'arc' is good
-# enough.
-DEPENDENCIES = 'arc'
-
-DOC = '''
-Super small autotest to be put on CQ.
-
-The purpose of this autotest is to verify that all the basics of the
-Enteprise autotest work. Policy is set and applied.
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_AutotestSanity', **args_dict)
diff --git a/client/site_tests/policy_AutotestSanity/policy_AutotestSanity.py b/client/site_tests/policy_AutotestSanity/policy_AutotestSanity.py
deleted file mode 100644
index 4151a7a..0000000
--- a/client/site_tests/policy_AutotestSanity/policy_AutotestSanity.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-
-class policy_AutotestSanity(
-        enterprise_policy_base.EnterprisePolicyTest):
-    """
-    Super small autotest to be put on CQ.
-
-    The purpose of this autotest is to verify that all the basics of the
-    Enteprise autotest work. Policy is set and applied.
-
-    Test will verify these areas work:
-    getAllPolicies API
-    test_policyserver & protos
-    Chrome login
-    """
-    version = 1
-
-    POLICY_NAME = 'AllowDinosaurEasterEgg'
-
-
-    def run_once(self):
-        """
-        Setup and run the test configured for the specified test case.
-
-        """
-        self.setup_case(user_policies={self.POLICY_NAME: True})
diff --git a/client/site_tests/policy_ChromeOsLockOnIdleSuspend/control b/client/site_tests/policy_ChromeOsLockOnIdleSuspend/control
deleted file mode 100644
index ae1603b..0000000
--- a/client/site_tests/policy_ChromeOsLockOnIdleSuspend/control
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'policy_ChromeOsLockOnIdleSuspend'
-TIME = 'SHORT'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of ChromeOsLockOnIdleSuspend policy on client behavior.
-
-This test verifies the effect of the ChromeOsLockOnIdleSuspend user policy on
-Chrome OS client behavior and appearance. It exercises all valid policy values
-with three test cases: True_Lock, False_Unlock, and NotSet_Lock.
-
-The test shall pass if the 'Require password to wake from sleep' check box:
-1) is checked when ChromeOsLockOnIdleSuspend policy is set true,
-2) is unchecked when set false,
-3) is editable when ChromeOsLockOnIdleSuspend not set,
-4) is uneditable when set True or False;
-and if the device, after it goes to sleep:
-1) displays the lock screen when ChromeOsLockOnIdleSuspend is set True,
-2) does not display the lock screen when set False or Not set.
-The test shall fail if any of the above behaviors are not enforced.
-
-This control file allows CloudDPC E2E tests to run any test case defined in
-this test via command-line.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_ChromeOsLockOnIdleSuspend', **args_dict)
diff --git a/client/site_tests/policy_ChromeOsLockOnIdleSuspend/control.false_unlock b/client/site_tests/policy_ChromeOsLockOnIdleSuspend/control.false_unlock
deleted file mode 100644
index f800057..0000000
--- a/client/site_tests/policy_ChromeOsLockOnIdleSuspend/control.false_unlock
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'policy_ChromeOsLockOnIdleSuspend.false_unlock'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of ChromeOsLockOnIdleSuspend policy.
-
-This test case verifies the behavior and appearance of Chrome OS when the
-ChromeOsLockOnIdleSuspend user policy is set False.
-
-The test case shall pass if the 'Require password to wake from sleep' check
-box is unchecked and uneditable, and the Screen Lock is not displayed when
-device goes to sleep. It shall fail if this behavior is not enforced.
-
-'''
-
-job.run_test('policy_ChromeOsLockOnIdleSuspend', case='False_Unlock')
diff --git a/client/site_tests/policy_ChromeOsLockOnIdleSuspend/control.notset_unlock b/client/site_tests/policy_ChromeOsLockOnIdleSuspend/control.notset_unlock
deleted file mode 100644
index a5e5f04..0000000
--- a/client/site_tests/policy_ChromeOsLockOnIdleSuspend/control.notset_unlock
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'policy_ChromeOsLockOnIdleSuspend.notset_unlock'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of ChromeOsLockOnIdleSuspend policy.
-
-This test case verifies the behavior and appearance of Chrome OS when the
-ChromeOsLockOnIdleSuspend user policy is Not set.
-
-The test case shall pass if the 'Require password to wake from sleep' check
-box is user editable, and the Screen Lock is not displayed when the device
-goes to sleep. It shall fail if this behavior is not enforced.
-
-'''
-
-job.run_test('policy_ChromeOsLockOnIdleSuspend', case='NotSet_Unlock')
diff --git a/client/site_tests/policy_ChromeOsLockOnIdleSuspend/control.true_lock b/client/site_tests/policy_ChromeOsLockOnIdleSuspend/control.true_lock
deleted file mode 100644
index a660b9a..0000000
--- a/client/site_tests/policy_ChromeOsLockOnIdleSuspend/control.true_lock
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'policy_ChromeOsLockOnIdleSuspend.true_lock'
-# TODO(krishnargv): Uncomment ATTRIBUTES line to add case to suite:ent-nightly
-# when crbug.com/666430 is fixed.
-# ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of ChromeOsLockOnIdleSuspend policy.
-
-This test case verifies the behavior and appearance of Chrome OS when the
-ChromeOsLockOnIdleSuspend user policy is set True.
-
-The test case shall pass if the 'Require password to wake from sleep' check
-box is checked and uneditable, and the Screen Lock is displayed when the
-device goes to sleep. It shall fail if this behavior is not enforced.
-
-'''
-
-job.run_test('policy_ChromeOsLockOnIdleSuspend', case='True_Lock')
diff --git a/client/site_tests/policy_ChromeOsLockOnIdleSuspend/policy_ChromeOsLockOnIdleSuspend.py b/client/site_tests/policy_ChromeOsLockOnIdleSuspend/policy_ChromeOsLockOnIdleSuspend.py
deleted file mode 100644
index 9a7e6a3..0000000
--- a/client/site_tests/policy_ChromeOsLockOnIdleSuspend/policy_ChromeOsLockOnIdleSuspend.py
+++ /dev/null
@@ -1,132 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-from autotest_lib.client.cros.power import power_status
-
-
-class policy_ChromeOsLockOnIdleSuspend(
-            enterprise_policy_base.EnterprisePolicyTest):
-    """
-    Test effect of ChromeOsLockOnIdleSuspend policy on Chrome OS behavior.
-
-    This test verifies the behaviour and appearance of the 'Require password
-    to wake from sleep' check box setting in the Security: Idle Settings
-    section of the chrome://settings page for all valid values of the user
-    policy ChromeOsLockOnIdleSuspend: True, False, and Not set. The
-    corresponding test cases are: True_Lock, False_Unlock, NotSet_Unlock.
-
-    Note: True_Lock case is not run as part of the regression suite due to
-    bug crbug.com/666430. See control.true_lock for details.
-
-    """
-    version = 1
-
-    POLICY_NAME = 'ChromeOsLockOnIdleSuspend'
-    TEST_CASES = {
-        'True_Lock': True,
-        'False_Unlock': False,
-        'NotSet_Unlock': None
-    }
-    IDLE_ACTION_DELAY = 5
-    POWER_MANAGEMENT_IDLE_SETTINGS = {
-        'AC': {
-            'Delays': {
-                'ScreenDim': 2000,
-                'ScreenOff': 3000,
-                'IdleWarning': 4000,
-                'Idle': (IDLE_ACTION_DELAY * 1000)
-            },
-            'IdleAction': 'Suspend'
-        },
-        'Battery': {
-            'Delays': {
-                'ScreenDim': 2000,
-                'ScreenOff': 3000,
-                'IdleWarning': 4000,
-                'Idle': (IDLE_ACTION_DELAY * 1000)
-            },
-            'IdleAction': 'Suspend'
-        }
-    }
-    PERCENT_CHARGE_MIN = 10
-    STARTUP_URLS = ['chrome://policy', 'chrome://settings']
-    SUPPORTING_POLICIES = {
-        'AllowScreenLock': True,
-        'LidCloseAction': 0,
-        'PowerManagementIdleSettings': POWER_MANAGEMENT_IDLE_SETTINGS,
-        'RestoreOnStartup': 4,
-        'RestoreOnStartupURLs': STARTUP_URLS
-    }
-
-
-    def initialize(self, **kwargs):
-        """Set up local variables and ensure sufficient battery charge."""
-        self._power_status = power_status.get_status()
-        if not self._power_status.on_ac():
-            # Ensure that the battery has sufficient minimum charge.
-            self._power_status.assert_battery_state(self.PERCENT_CHARGE_MIN)
-
-        logging.info('Device power type is "%s"', self._power_type)
-        super(policy_ChromeOsLockOnIdleSuspend, self).initialize(**kwargs)
-
-
-    @property
-    def _power_type(self):
-        """Return type of power used by DUT: AC or Battery."""
-        return 'AC' if self._power_status.on_ac() else 'Battery'
-
-
-    def _is_screen_locked(self):
-        """Return true if login status indicates that screen is locked."""
-        def _get_screen_locked():
-            """Return isScreenLocked property, if defined."""
-            login_status = self.cr.login_status
-            if (isinstance(login_status, dict) and
-                'isScreenLocked' in login_status):
-                return self.cr.login_status['isScreenLocked']
-            else:
-                logging.debug('login_status: %s', login_status)
-                return None
-
-        return utils.wait_for_value(_get_screen_locked, expected_value=True,
-                                    timeout_sec=self.IDLE_ACTION_DELAY)
-
-
-    def _test_require_password_to_wake(self, policy_value):
-        """
-        Verify CrOS enforces ChromeOsLockOnIdleSuspend policy value.
-
-        @param policy_value: policy value for this case.
-        @raises: TestFail if behavior is incorrect.
-
-        """
-        screen_is_locked = self._is_screen_locked()
-        if screen_is_locked is None:
-            raise error.TestError('Could not determine screen state!')
-
-        # Screen shall be locked if the policy is True, else unlocked.
-        if policy_value:
-            if not screen_is_locked:
-                raise error.TestFail('Screen should be locked.')
-        else:
-            if screen_is_locked:
-                raise error.TestFail('Screen should be unlocked.')
-
-
-    def run_once(self, case):
-        """
-        Setup and run the test configured for the specified test case.
-
-        @param case: Name of the test case to run.
-
-        """
-        case_value = self.TEST_CASES[case]
-        self.SUPPORTING_POLICIES[self.POLICY_NAME] = case_value
-        self.setup_case(user_policies=self.SUPPORTING_POLICIES)
-        self._test_require_password_to_wake(case_value)
diff --git a/client/site_tests/policy_CookiesAllowedForUrls/control b/client/site_tests/policy_CookiesAllowedForUrls/control
deleted file mode 100644
index b2473e9..0000000
--- a/client/site_tests/policy_CookiesAllowedForUrls/control
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'krishnargv'
-NAME = 'policy_CookiesAllowedForUrls'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of CookiesAllowedForUrls policy.
-
-This test verifies the effect of the CookiesAllowedForUrls user policy on
-Chrome OS client behavior when the DefaultCookiesSetting user policy is set
-to 2. It exercises a range of policy values using four unique test cases:
-NotSet_CookiesBlocked, SingleUrl_CookiesAllowed, MultipleUrls_CookiesAllowed,
-and MultipleUrls_CookiesBlocked. See the test file for a full description of
-what each test case does.
-
-A test case shall pass iff the browser allows cookies on a test page with an
-URL that matches one or more of the URL patterns listed in
-CookiesAllowedForUrls. It shall fail if the browser:
-- Blocks cookies on a test page with an URL that matches one or more of the
-  URL patterns listed in the CookiesAllowedForUrls policy value.
-- Allows cookies on a test page with an URL that does not match any of the
-  URL patterns listed in the CookiesAllowedForUrls policy value.
-
-This control file allows CloudDPC E2E tests to run any test case defined in
-this test via command-line.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_CookiesAllowedForUrls', **args_dict)
diff --git a/client/site_tests/policy_CookiesAllowedForUrls/control.multipleurls_allow b/client/site_tests/policy_CookiesAllowedForUrls/control.multipleurls_allow
deleted file mode 100644
index 36af5f4..0000000
--- a/client/site_tests/policy_CookiesAllowedForUrls/control.multipleurls_allow
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'policy_CookiesAllowedForUrls.multipleurls_allow'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of CookiesAllowedForUrls policy.
-
-This test case verifies the behavior of Chrome OS when CookiesAllowedForUrls
-user policy is set to multiple URL patterns, and DefaultCookiesSetting policy
-is set to 2 (do not allow by default).
-
-The test case shall pass iff the browser allows cookies for any test page with
-an URL that matches any of the URL patterns specified in CookiesAllowedForUrls,
-and blocks cookies on any page with an URL that does not match. It shall fail
-if the browser does not enforce this behavior.
-
-'''
-
-job.run_test('policy_CookiesAllowedForUrls', case='MultipleUrls_Allow')
diff --git a/client/site_tests/policy_CookiesAllowedForUrls/control.multipleurls_block b/client/site_tests/policy_CookiesAllowedForUrls/control.multipleurls_block
deleted file mode 100644
index 847fcdd..0000000
--- a/client/site_tests/policy_CookiesAllowedForUrls/control.multipleurls_block
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'policy_CookiesAllowedForUrls.multipleurls_block'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of CookiesAllowedForUrls policy.
-
-This test case verifies the behavior of Chrome OS when CookiesAllowedForUrls
-user policy is set to multiple URL patterns, and DefaultCookiesSetting policy
-is set to 2 (do not allow by default).
-
-The test case shall pass iff the browser allows cookies for any test page with
-an URL that matches any of the URL patterns specified in CookiesAllowedForUrls,
-and blocks cookies on any page with an URL that does not match. It shall fail
-if the browser does not enforce this behavior.
-
-'''
-
-job.run_test('policy_CookiesAllowedForUrls', case='MultipleUrls_Block')
diff --git a/client/site_tests/policy_CookiesAllowedForUrls/control.notset_block b/client/site_tests/policy_CookiesAllowedForUrls/control.notset_block
deleted file mode 100644
index 60237b4..0000000
--- a/client/site_tests/policy_CookiesAllowedForUrls/control.notset_block
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'policy_CookiesAllowedForUrls.notset_block'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of CookiesAllowedForUrls policy.
-
-This test case verifies the behavior of Chrome OS when CookiesAllowedForUrls
-user policy is not set to any URL pattern, and DefaultCookiesSetting policy
-is set to 2 (do not allow by default).
-
-The test case shall pass iff the browser blocks cookies for every test page.
-It shall fail if the browser allows cookies on any test page.
-
-'''
-
-job.run_test('policy_CookiesAllowedForUrls', case='NotSet_Block')
diff --git a/client/site_tests/policy_CookiesAllowedForUrls/control.singleurl_allow b/client/site_tests/policy_CookiesAllowedForUrls/control.singleurl_allow
deleted file mode 100644
index cd6dad7..0000000
--- a/client/site_tests/policy_CookiesAllowedForUrls/control.singleurl_allow
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'policy_CookiesAllowedForUrls.singleurl_allow'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of CookiesAllowedForUrls policy.
-
-This test case verifies the behavior of Chrome OS when CookiesAllowedForUrls
-user policy is set to a single URL pattern, and DefaultCookiesSetting policy
-is set to 2 (do not allow by default).
-
-The test case shall pass iff the browser allows cookies for any test page with
-an URL that matches any of the URL patterns specified in CookiesAllowedForUrls,
-and blocks cookies on any page with an URL that does not match. It shall fail
-if the browser does not enforce this behavior.
-
-'''
-
-job.run_test('policy_CookiesAllowedForUrls', case='SingleUrl_Allow')
diff --git a/client/site_tests/policy_CookiesAllowedForUrls/policy_CookiesAllowedForUrls.py b/client/site_tests/policy_CookiesAllowedForUrls/policy_CookiesAllowedForUrls.py
deleted file mode 100644
index 76b5525..0000000
--- a/client/site_tests/policy_CookiesAllowedForUrls/policy_CookiesAllowedForUrls.py
+++ /dev/null
@@ -1,105 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-
-class policy_CookiesAllowedForUrls(enterprise_policy_base.EnterprisePolicyTest):
-    """Test effect of the CookiesAllowedForUrls policy on Chrome OS behavior.
-
-    This test implicitly verifies one value of the DefaultCookiesSetting
-    policy as well. When the DefaultCookiesSetting policy value is set to 2,
-    cookies for all URLs shall not be stored (ie, shall be blocked), except
-    for the URL patterns specified by the CookiesAllowedForUrls policy.
-
-    The test verifies ChromeOS behaviour for different values of the
-    CookiesAllowedForUrls policy, i.e., for the policy value set to Not Set,
-    set to a single url/host pattern, or when the policy is set to multiple
-    url/host patterns. It also verifies that cookies are blocked for urls that
-    are not part of the policy value.
-
-    The corresponding three test cases are NotSet_CookiesBlocked,
-    SingleUrl_CookiesAllowed, MultipleUrls_CookiesAllowed, and
-    MultipleUrls_CookiesBlocked.
-    """
-    version = 1
-
-    def initialize(self, **kwargs):
-        """Initialize this test."""
-        self._initialize_test_constants()
-        super(policy_CookiesAllowedForUrls, self).initialize(**kwargs)
-        self.start_webserver()
-
-
-    def _initialize_test_constants(self):
-        """Initialize test-specific constants, some from class constants."""
-        self.POLICY_NAME = 'CookiesAllowedForUrls'
-        self.COOKIE_NAME = 'cookie1'
-        self.TEST_FILE = 'cookie_status.html'
-        self.TEST_URL = '%s/%s' % (self.WEB_HOST, self.TEST_FILE)
-        self.COOKIE_ALLOWED_SINGLE_FILE = [self.WEB_HOST]
-        self.COOKIE_ALLOWED_MULTIPLE_FILES = ['http://google.com',
-                                              self.WEB_HOST,
-                                              'http://doesnotmatter.com']
-        self.COOKIE_BLOCKED_MULTIPLE_FILES = ['https://testingwebsite.html',
-                                              'https://somewebsite.com',
-                                              'http://doesnotmatter.com']
-        self.TEST_CASES = {
-            'NotSet_Block': None,
-            'SingleUrl_Allow': self.COOKIE_ALLOWED_SINGLE_FILE,
-            'MultipleUrls_Allow': self.COOKIE_ALLOWED_MULTIPLE_FILES,
-            'MultipleUrls_Block': self.COOKIE_BLOCKED_MULTIPLE_FILES
-        }
-        self.SUPPORTING_POLICIES = {'DefaultCookiesSetting': 2}
-
-
-    def _is_cookie_blocked(self, url):
-        """Return True if cookie is blocked for the URL else return False.
-
-        @param url: Url of the page which is loaded to check whether it's
-                    cookie is blocked or stored.
-        """
-        tab = self.navigate_to_url(url)
-        return tab.GetCookieByName(self.COOKIE_NAME) is None
-
-
-    def _test_cookies_allowed_for_urls(self, policy_value):
-        """Verify CrOS enforces CookiesAllowedForUrls policy value.
-
-        When the CookiesAllowedForUrls policy is set to one or more urls/hosts,
-        check that cookies are not blocked for the urls/urlpatterns listed in
-        the policy value. When set to None, check that cookies are blocked for
-        all URLs.
-
-        @param policy_value: policy value for this case.
-        @raises: TestFail if cookies are blocked/not blocked based on the
-                 corresponding policy values.
-        """
-        cookie_is_blocked = self._is_cookie_blocked(self.TEST_URL)
-
-        if policy_value and self.WEB_HOST in policy_value:
-            if cookie_is_blocked:
-                raise error.TestFail('Cookies should be allowed.')
-        else:
-            if not cookie_is_blocked:
-                raise error.TestFail('Cookies should be blocked.')
-
-
-    def run_once(self, case):
-        """Setup and run the test configured for the specified test case.
-
-        Set the expected |policy_value| and |policies_dict| data defined for
-        the specified test |case|, and run the test.
-
-        @param case: Name of the test case to run.
-        """
-        case_value = self.TEST_CASES[case]
-        self.SUPPORTING_POLICIES[self.POLICY_NAME] = case_value
-        self.setup_case(user_policies=self.SUPPORTING_POLICIES)
-        self._test_cookies_allowed_for_urls(case_value)
diff --git a/client/site_tests/policy_CookiesBlockedForUrls/control b/client/site_tests/policy_CookiesBlockedForUrls/control
deleted file mode 100644
index 1621a48..0000000
--- a/client/site_tests/policy_CookiesBlockedForUrls/control
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'krishnargv'
-NAME = 'policy_CookiesBlockedForUrls'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of CookiesBlockedForUrls policy on client behavior.
-
-This test verifies the effect of the CookiesBlockedForUrls user policy on
-Chrome OS client behavior when the DefaultCookiesSetting user policy is set
-to 1. It exercises a range of policy values using four unique test cases:
-NotSet_CookiesAllowed, SingleUrl_CookiesBlocked, MultipleUrls_CookiesBlocked,
-and MultipleUrls_CookiesAllowed. See the test file for a full description of
-what each test case does.
-
-A test case shall pass iff the browser blocks cookies on a test page with an
-URL that matches one or more of the URL patterns listed in
-CookiesBlockedForUrls. It shall fail if the browser:
-- Allows cookies on a test page with an Url that matches one or more of the
-  URL patterns listed in the CookiesBlockedForUrls policy value.
-- Blocks cookies on a test page with an URL that does not match any of the
-  URL patterns listed in the CookiesBlockedForUrls policy value.
-
-This control file allows CloudDPC E2E tests to run any test case defined in
-this test via command-line.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_CookiesBlockedForUrls', **args_dict)
diff --git a/client/site_tests/policy_CookiesBlockedForUrls/control.multipleurls_allow b/client/site_tests/policy_CookiesBlockedForUrls/control.multipleurls_allow
deleted file mode 100644
index 13a683a..0000000
--- a/client/site_tests/policy_CookiesBlockedForUrls/control.multipleurls_allow
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'policy_CookiesBlockedForUrls.multipleurls_allow'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of CookiesBlockedForUrls policy.
-
-This test case verifies the behavior of Chrome OS when CookiesBlockedForUrls
-user policy is set to multiple URL patterns, and DefaultCookiesSetting policy
-is set to 1 (allow by default).
-
-The test case shall pass iff the browser blocks cookies for any test page with
-an URL that matches any of the URL patterns specified in CookiesBlockedForUrls,
-and allows cookies on any page with an URL that does not match. It shall fail
-if the browser does not enforce this behavior.
-
-'''
-
-job.run_test('policy_CookiesBlockedForUrls', case='MultipleUrls_Allow')
diff --git a/client/site_tests/policy_CookiesBlockedForUrls/control.multipleurls_block b/client/site_tests/policy_CookiesBlockedForUrls/control.multipleurls_block
deleted file mode 100644
index a9bf998..0000000
--- a/client/site_tests/policy_CookiesBlockedForUrls/control.multipleurls_block
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'policy_CookiesBlockedForUrls.multipleurls_block'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of CookiesBlockedForUrls policy.
-
-This test case verifies the behavior of Chrome OS when CookiesBlockedForUrls
-user policy is set to multiple URL patterns, and DefaultCookiesSetting policy
-is set to 1 (allow by default).
-
-The test case shall pass iff the browser blocks cookies for any test page with
-an URL that matches any of the URL patterns specified in CookiesBlockedForUrls,
-and allows cookies on any page with an URL that does not match. It shall fail
-if the browser does not enforce this behavior.
-
-'''
-
-job.run_test('policy_CookiesBlockedForUrls', case='MultipleUrls_Block')
diff --git a/client/site_tests/policy_CookiesBlockedForUrls/control.notset_allow b/client/site_tests/policy_CookiesBlockedForUrls/control.notset_allow
deleted file mode 100644
index e5c5796..0000000
--- a/client/site_tests/policy_CookiesBlockedForUrls/control.notset_allow
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'policy_CookiesBlockedForUrls.notset_allow'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of CookiesBlockedForUrls policy.
-
-This test case verifies the behavior of Chrome OS when CookiesBlockedForUrls
-user policy is not set to any URL pattern, and DefaultCookiesSetting policy
-is set to 1 (allow by default).
-
-The test case shall pass iff the browser allows cookies for every test page.
-It shall fail if the browser blocks cookies on any test page.
-
-'''
-
-job.run_test('policy_CookiesBlockedForUrls', case='NotSet_Allow')
diff --git a/client/site_tests/policy_CookiesBlockedForUrls/control.singleurl_block b/client/site_tests/policy_CookiesBlockedForUrls/control.singleurl_block
deleted file mode 100644
index 4dd369e..0000000
--- a/client/site_tests/policy_CookiesBlockedForUrls/control.singleurl_block
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'policy_CookiesBlockedForUrls.singleurl_block'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of CookiesBlockedForUrls policy.
-
-This test case verifies the behavior of Chrome OS when CookiesBlockedForUrls
-user policy is set to a single URL pattern, and DefaultCookiesSetting policy
-is set to 1 (allow by default).
-
-The test case shall pass iff the browser blocks cookies for any test page with
-an URL that matches any of the URL patterns specified in CookiesBlockedForUrls,
-and allows cookies on any page with an URL that does not match. It shall fail
-if the browser does not enforce this behavior.
-
-'''
-
-job.run_test('policy_CookiesBlockedForUrls', case='SingleUrl_Block')
diff --git a/client/site_tests/policy_CookiesBlockedForUrls/policy_CookiesBlockedForUrls.py b/client/site_tests/policy_CookiesBlockedForUrls/policy_CookiesBlockedForUrls.py
deleted file mode 100644
index 38b6421..0000000
--- a/client/site_tests/policy_CookiesBlockedForUrls/policy_CookiesBlockedForUrls.py
+++ /dev/null
@@ -1,104 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-
-class policy_CookiesBlockedForUrls(enterprise_policy_base.EnterprisePolicyTest):
-    """Test effect of the CookiesBlockedForUrls policy on Chrome OS behavior.
-
-    This test implicitly verifies one value of the DefaultCookiesSetting
-    policy as well. When DefaultCookiesSetting is set to 1, cookies for all
-    URLs shall be stored (i.e., shall be not blocked), except for the URL
-    patterns specified by the CookiesBlockedForUrls policy value.
-
-    The test verifies ChromeOS behaviour for different values of the
-    CookiesBlockedForUrls policy, i.e., for the policy value set to Not Set,
-    set to a single url/host pattern, or when the policy is set to multiple
-    url/host patterns. It also verifies that cookies are allowed for urls that
-    are not part of the policy value.
-
-    The corresponding three test cases are NotSet_CookiesAllowed,
-    SingleUrl_CookiesBlocked, MultipleUrls_CookiesBlocked and
-    MultipleUrls_CookiesAllowed.
-
-    """
-    version = 1
-
-    def initialize(self, **kwargs):
-        """Initialize this test."""
-        self._initialize_test_constants()
-        super(policy_CookiesBlockedForUrls, self).initialize(**kwargs)
-        self.start_webserver()
-
-
-    def _initialize_test_constants(self):
-        """Initialize test-specific constants, some from class constants."""
-        self.POLICY_NAME = 'CookiesBlockedForUrls'
-        self.COOKIE_NAME = 'cookie1'
-        self.TEST_FILE = 'cookie_status.html'
-        self.TEST_URL = '%s/%s' % (self.WEB_HOST, self.TEST_FILE)
-        self.COOKIE_BLOCKED_SINGLE_FILE = [self.WEB_HOST]
-        self.COOKIE_BLOCKED_MULTIPLE_FILES = ['http://google.com',
-                                              self.WEB_HOST,
-                                              'http://doesnotmatter.com']
-        self.COOKIE_ALLOWED_MULTIPLE_FILES = ['https://testingwebsite.html',
-                                              'https://somewebsite.com',
-                                              'http://doesnotmatter.com']
-        self.TEST_CASES = {
-            'NotSet_Allow': None,
-            'SingleUrl_Block': self.COOKIE_BLOCKED_SINGLE_FILE,
-            'MultipleUrls_Block': self.COOKIE_BLOCKED_MULTIPLE_FILES,
-            'MultipleUrls_Allow': self.COOKIE_ALLOWED_MULTIPLE_FILES
-        }
-        self.SUPPORTING_POLICIES = {'DefaultCookiesSetting': 1}
-
-
-    def _is_cookie_blocked(self, url):
-        """Return True if cookie is blocked for the URL else return False.
-
-        @param url: Url of the page which is loaded to check whether it's
-                    cookie is blocked or stored.
-        """
-        tab = self.navigate_to_url(url)
-        return tab.GetCookieByName(self.COOKIE_NAME) is None
-
-
-    def _test_cookies_blocked_for_urls(self, policy_value):
-        """Verify CrOS enforces CookiesBlockedForUrls policy value.
-
-        When the CookiesBlockedForUrls policy is set to one or more urls/hosts,
-        check that cookies are blocked for the urls/urlpatterns listed in
-        the policy value. When set to None, check that cookies are allowed for
-        all URLs.
-
-        @param policy_value: policy value expected.
-
-        @raises: TestFail if cookies are blocked/not blocked based on the
-                 corresponding policy values.
-        """
-        cookie_is_blocked = self._is_cookie_blocked(self.TEST_URL)
-
-        if policy_value and self.WEB_HOST in policy_value:
-            if not cookie_is_blocked:
-                raise error.TestFail('Cookies should be blocked.')
-        else:
-            if cookie_is_blocked:
-                raise error.TestFail('Cookies should be allowed.')
-
-
-    def run_once(self, case):
-        """Setup and run the test configured for the specified test case.
-
-        @param case: Name of the test case to run.
-        """
-        case_value = self.TEST_CASES[case]
-        self.SUPPORTING_POLICIES[self.POLICY_NAME] = case_value
-        self.setup_case(user_policies=self.SUPPORTING_POLICIES)
-        self._test_cookies_blocked_for_urls(case_value)
diff --git a/client/site_tests/policy_CookiesSessionOnlyForUrls/control b/client/site_tests/policy_CookiesSessionOnlyForUrls/control
deleted file mode 100644
index e532cce..0000000
--- a/client/site_tests/policy_CookiesSessionOnlyForUrls/control
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'policy_CookiesSessionOnlyForUrls'
-TIME = 'SHORT'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of CookiesSessionOnlyForUrls policy.
-
-This test verifies the effect of the CookiesSessionOnlyForUrls user policy on
-Chrome OS client behavior, when DefaultCookiesSetting=2 (ie, block cookies by
-default). It exercises a set of valid policy values using three unique test
-cases: UrlIsIn_Allow, UrlNotIn_Block, and NotSet_Block. See the test file for
-a full description of what each test case does.
-
-The test case shall pass iff the browser allows cookie storage for a visited
-page whose URL matches any of the patterns in CookiesSessionOnlyForUrls,
-and blocks cookie storage if the policy does not contain a matching
-pattern. It shall fail if the browser does not enforce this behavior.
-
-This control file allows CloudDPC E2E tests to run any test case defined in
-this test via command-line.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_CookiesSessionOnlyForUrls', **args_dict)
diff --git a/client/site_tests/policy_CookiesSessionOnlyForUrls/control.notset_block b/client/site_tests/policy_CookiesSessionOnlyForUrls/control.notset_block
deleted file mode 100644
index 92cd0a5..0000000
--- a/client/site_tests/policy_CookiesSessionOnlyForUrls/control.notset_block
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'policy_CookiesSessionOnlyForUrls.notset_block'
-# TODO(krishnargv): enable this test once crbug.com/712713 is fixed.
-# ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of CookiesSessionOnlyForUrls policy.
-
-This test case verifies the behavior and appearance of Chrome OS when user
-policy CookiesSessionOnlyForUrls is not set, and DefaultCookiesSetting=2
-(ie, block cookies by default).
-
-The test case shall pass if the browser blocks cookie storage for the test
-page. It shall fail if the browser allows cookie storage for the test page.
-
-'''
-
-job.run_test('policy_CookiesSessionOnlyForUrls', case='NotSet_Block')
diff --git a/client/site_tests/policy_CookiesSessionOnlyForUrls/control.urlisin_allow b/client/site_tests/policy_CookiesSessionOnlyForUrls/control.urlisin_allow
deleted file mode 100644
index 18ab9a5..0000000
--- a/client/site_tests/policy_CookiesSessionOnlyForUrls/control.urlisin_allow
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'policy_CookiesSessionOnlyForUrls.urlisin_allow'
-# TODO(krishnargv): enable this test once crbug.com/712713 is fixed.
-# ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of CookiesSessionOnlyForUrls policy.
-
-This test case verifies the behavior and appearance of Chrome OS when the user
-policy CookiesSessionOnlyForUrls contains a pattern that matches the URL of a
-visited page, and DefaultCookiesSetting=2 (ie, block cookies by default).
-
-The test case shall pass if the browser allows cookie storage for the test
-page, because the page URL matches one of the URL patterns specified in
-CookiesSessionOnlyForUrls. It shall fail if the browser blocks cookie storage
-for the test page.
-
-'''
-
-job.run_test('policy_CookiesSessionOnlyForUrls', case='UrlIsIn_Allow')
diff --git a/client/site_tests/policy_CookiesSessionOnlyForUrls/control.urlnotin_block b/client/site_tests/policy_CookiesSessionOnlyForUrls/control.urlnotin_block
deleted file mode 100644
index f04849a..0000000
--- a/client/site_tests/policy_CookiesSessionOnlyForUrls/control.urlnotin_block
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'policy_CookiesSessionOnlyForUrls.urlnotin_block'
-# TODO(krishnargv): enable this test once crbug.com/712713 is fixed.
-# ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of CookiesSessionOnlyForUrls policy.
-
-This test case verifies the behavior and appearance of Chrome OS when the user
-policy CookiesSessionOnlyForUrls does not contain a pattern that matches the
-URL of a visited page, and DefaultCookiesSetting=2 (ie, block cookies by
-default).
-
-The test case shall pass if the browser blocks cookie storage for the test
-page, because the page URL does not match any of the URL patterns specified in
-CookiesSessionOnlyForUrls. It shall fail if the browser allows cookie storage
-for the test page.
-
-'''
-
-job.run_test('policy_CookiesSessionOnlyForUrls', case='UrlNotIn_Block')
diff --git a/client/site_tests/policy_CookiesSessionOnlyForUrls/policy_CookiesSessionOnlyForUrls.py b/client/site_tests/policy_CookiesSessionOnlyForUrls/policy_CookiesSessionOnlyForUrls.py
deleted file mode 100644
index 03a9b15..0000000
--- a/client/site_tests/policy_CookiesSessionOnlyForUrls/policy_CookiesSessionOnlyForUrls.py
+++ /dev/null
@@ -1,163 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-
-class policy_CookiesSessionOnlyForUrls(
-        enterprise_policy_base.EnterprisePolicyTest):
-    """
-    Test effect of CookiesSessionOnlyForUrls policy on Chrome OS.
-
-    The test verifies ChromeOS behaviour and appearance for a set of valid
-    values of the CookiesSessionOnlyForUrls user policy, when user policy
-    DefaultCookiesSetting=2 (block cookies for all URLs). Generally, cookies
-    shall not be stored for any visted page, except for those whose domain
-    matches an URL pattern specified in CookiesSessionOnlyForUrls. Also,
-    these URL patterns shall have their behavior set to 'Clear on exit',
-    indicating that they are marked for deletion when Chrome exits.
-
-    If the policy value Not set, then no visited page is allowed to store
-    cookies. In the same way, if the URL of the visited page is not listed in
-    the policy, then the visted page is not allowed to store cookies. If the
-    URL of the visited page is listed in the policy, then the page is allowed
-    to store cookies for the current session only. The corresponding test
-    cases are NotSet_Block, UrlNotIn_Block, and UrlIsIn_Allow.
-
-    Note that this test does not verify that cookies set to 'Clear on exit'
-    are actually deleted when the session ends. That functionality is tested
-    by the Chrome team.
-
-    """
-    version = 1
-
-    def initialize(self, **kwargs):
-        """Initialize this test."""
-        self._initialize_test_constants()
-        super(policy_CookiesSessionOnlyForUrls, self).initialize(**kwargs)
-        self.start_webserver()
-
-
-    def _initialize_test_constants(self):
-        """Initialize test-specific constants, some from class constants."""
-        self.POLICY_NAME = 'CookiesSessionOnlyForUrls'
-        self.COOKIE_NAME = 'cookie1'
-        self.TEST_FILE = 'cookie_status.html'
-        self.TEST_URL = '%s/%s' % (self.WEB_HOST, self.TEST_FILE)
-        self.COOKIE_EXCEPTIONS_PAGE = (
-            'chrome://settings-frame/contentExceptions#cookies')
-        self.COOKIE_ALLOWED_MULTIPLE_URLS = ['https://testingwebsite.html',
-                                             self.WEB_HOST,
-                                             'http://doesnotmatter.com']
-        self.COOKIE_BLOCKED_MULTIPLE_URLS = ['https://testingwebsite.html',
-                                             'https://somewebsite.com',
-                                             'http://doesnotmatter.com']
-        self.TEST_CASES = {
-            'UrlIsIn_Allow': self.COOKIE_ALLOWED_MULTIPLE_URLS,
-            'UrlNotIn_Block': self.COOKIE_BLOCKED_MULTIPLE_URLS,
-            'NotSet_Block': None
-        }
-        self.SUPPORTING_POLICIES = {'DefaultCookiesSetting': 2}
-
-
-    def _is_cookie_blocked(self, url):
-        """
-        Return True if cookie is blocked for the URL, else return False.
-
-        @param url: URL of the page to load.
-
-        """
-        tab = self.navigate_to_url(url)
-        cookie_value = tab.GetCookieByName(self.COOKIE_NAME)
-        tab.Close()
-        return cookie_value is None
-
-
-    def _is_cookie_clear_on_exit(self, url):
-        """
-        Return True if cookie for |url| has behavior set to 'Clear on exit'.
-
-        @param url: string url pattern for cookie exception.
-        @returns: True if cookie behavior is set to 'Clear on exit'.
-        """
-        js_cmd = ('''
-          var exception_area=document.getElementById('content-settings-exceptions-area');
-          var contents=exception_area.getElementsByClassName('content-area')[0];
-          var contents_children = contents.children;
-          var cookie_idx = -1;
-          var cookie_behavior = '';
-          for (var i=0; i<contents_children.length; i++) {
-            var content = contents_children[i];
-            var type = content.getAttribute('contenttype');
-            if (type == 'cookies') {
-              var cookie_items = content.getElementsByClassName('deletable-item');
-              for (var j=0; j<cookie_items.length; j++) {
-                var cookie_item = cookie_items[j];
-                var cookie_pattern = cookie_item.getElementsByClassName('exception-pattern')[0];
-                var pattern = cookie_pattern.innerText.trim();
-                var cookie_setting = cookie_item.getElementsByClassName('exception-setting')[0];
-                var setting = cookie_setting.innerText.trim();
-                if (pattern == '%s') {
-                  cookie_idx = j;
-                  cookie_behavior = setting;
-                  break;
-                }
-              }
-              break;
-            }
-            if (cookie_idx >= 0) { break; }
-          }
-          cookie_behavior;
-        ''' % url)
-        tab = self.navigate_to_url(self.COOKIE_EXCEPTIONS_PAGE)
-        cookie_behavior = self.get_elements_from_page(tab, js_cmd)
-        tab.Close()
-        return cookie_behavior == 'Clear on exit'
-
-
-    def _test_cookies_allowed_for_urls(self, policy_value):
-        """
-        Verify CrOS enforces CookiesSessionOnlyForUrls policy value.
-
-        When CookiesSessionOnlyForUrls policy is set to a list of one or more
-        more urls, verify that cookies are allowed for a page that matches a
-        URL pattern in the list, but are blocked for a page whose URL pattern
-        is not in the list. When set to None, verify that cookies are
-        blocked for all URLs.
-
-        @param policy_value: policy value expected.
-
-        @raises: TestFail if cookies are blocked/not blocked based on the
-                 policy value.
-
-        """
-        cookie_is_blocked = self._is_cookie_blocked(self.TEST_URL)
-        if policy_value and self.WEB_HOST in policy_value:
-            if cookie_is_blocked:
-                raise error.TestFail('Cookie should be allowed.')
-        else:
-            if not cookie_is_blocked:
-                raise error.TestFail('Cookie should be blocked.')
-
-        cookie_is_clear_on_exit = self._is_cookie_clear_on_exit(self.WEB_HOST)
-        if policy_value and self.WEB_HOST in policy_value:
-            if not cookie_is_clear_on_exit:
-                raise error.TestFail('Cookie should be Clear on exit.')
-        else:
-            if cookie_is_clear_on_exit:
-                raise error.TestFail('Cookie should not be Clear on exit.')
-
-
-    def run_once(self, case):
-        """
-        Setup and run the test configured for the specified test case.
-
-        @param case: Name of the test case to run.
-
-        """
-        case_value = self.TEST_CASES[case]
-        self.SUPPORTING_POLICIES[self.POLICY_NAME] = case_value
-        self.setup_case(user_policies=self.SUPPORTING_POLICIES)
-        self._test_cookies_allowed_for_urls(case_value)
diff --git a/client/site_tests/policy_DefaultSearchProvider/control b/client/site_tests/policy_DefaultSearchProvider/control
deleted file mode 100644
index 869117d..0000000
--- a/client/site_tests/policy_DefaultSearchProvider/control
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_DefaultSearchProvider'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of the DefaultSearchProviderSearchURL and
-DefaultSearchProviderKeyword policy. When the DefaultSearchProviderSearchURL
-policy is set, the specified search url will be used when a value is entered
-in the omnibox. When the DefaultSearchProviderKeyword is set, the value will
-trigger the shortcut used in the omnibox to trigger the search for this
-provider.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-job.run_test('policy_DefaultSearchProvider', **args_dict)
diff --git a/client/site_tests/policy_DefaultSearchProvider/control.Keyword b/client/site_tests/policy_DefaultSearchProvider/control.Keyword
deleted file mode 100644
index ce66c25..0000000
--- a/client/site_tests/policy_DefaultSearchProvider/control.Keyword
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_DefaultSearchProvider.Keyword'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify the 'DefaultSearchProviderKeyword' policy will set the shortcut used
-in the omnibox to trigger the search for this provider inside the Chrome
-browser.
-
-'''
-
-job.run_test('policy_DefaultSearchProvider',
-             case='Keyword')
diff --git a/client/site_tests/policy_DefaultSearchProvider/control.SearchURL b/client/site_tests/policy_DefaultSearchProvider/control.SearchURL
deleted file mode 100644
index 70c49f7..0000000
--- a/client/site_tests/policy_DefaultSearchProvider/control.SearchURL
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_DefaultSearchProvider.SearchURL'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify the 'policy_DefaultSearchProviderSearchURL' policy will specify the
-URL of the search engine used when doing a default search inside the Chrome
-browser.
-
-'''
-
-job.run_test('policy_DefaultSearchProvider',
-             case='SearchURL')
diff --git a/client/site_tests/policy_DefaultSearchProvider/policy_DefaultSearchProvider.py b/client/site_tests/policy_DefaultSearchProvider/policy_DefaultSearchProvider.py
deleted file mode 100644
index 307fbf5..0000000
--- a/client/site_tests/policy_DefaultSearchProvider/policy_DefaultSearchProvider.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-from autotest_lib.client.cros.input_playback import keyboard
-
-
-class policy_DefaultSearchProvider(
-        enterprise_policy_base.EnterprisePolicyTest):
-    """
-    Verify effects of the DefaultSearchProviderSearchURL and
-    DefaultSearchProviderKeyword policy. When the
-    DefaultSearchProviderSearchURL policy is set, the specified search url
-    will be used when a value is entered in the omnibox. When the
-    DefaultSearchProviderKeyword is set, the value will trigger the shortcut
-    used in the omnibox to trigger the search for this provider.
-
-    """
-    version = 1
-
-    def _search_check(self, case):
-        """
-        Open a new tab, use the omnibox as a search box, and check the URL.
-
-        @param case: Value of the test being run.
-
-        """
-        self.ui.start_ui_root(self.cr)
-        self.keyboard = keyboard.Keyboard()
-        self.ui.doDefault_on_obj(name='Address and search bar')
-        # The keys to be pressed for the test
-        if case == 'Keyword':
-            buttons = ['d', 'a', 'd', 'tab', 's', 's', 'enter']
-            expected = '{}{}'.format(self.BASE_URL, 'ss')
-        else:
-            buttons = ['f', 's', 'w', 'enter']
-            expected = '{}{}'.format(self.BASE_URL, 'fsw')
-
-        # Enter the buttons
-        for button in buttons:
-            self.keyboard.press_key(button)
-
-        tabFound = False
-        startTime = time.time()
-        while time.time() - startTime < 1:
-            tabs = set([tab.GetUrl() for tab in self.cr.browser.tabs])
-            if expected in tabs:
-                tabFound = True
-                break
-
-        if not tabFound:
-            raise error.TestFail(
-                'Search not formated correctly. expected {} got {}'
-                .format(expected, tabs))
-
-    def run_once(self, case):
-        """
-        Setup and run the test configured for the specified test case.
-
-        @param case: Name of the test case to run.
-
-        """
-        self.BASE_URL = 'https://fakeurl/search?q='
-        POLICIES = {'DefaultSearchProviderEnabled': True,
-                    'DefaultSearchProviderSearchURL':
-                       '%s{searchTerms}' % (self.BASE_URL)}
-        if case == 'Keyword':
-            POLICIES['DefaultSearchProviderKeyword'] = 'dad'
-        self.setup_case(user_policies=POLICIES)
-        self._search_check(case)
diff --git a/client/site_tests/policy_DefaultSearchProviderEnabled/control b/client/site_tests/policy_DefaultSearchProviderEnabled/control
deleted file mode 100644
index fb8f03a..0000000
--- a/client/site_tests/policy_DefaultSearchProviderEnabled/control
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_DefaultSearchProviderEnabled'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of the DefaultSearchProviderEnabled policy.
-If the policy is set to True/Not Set then typing search queries in the
-omnibox will result in searching on google.com.
-If the policy is set to False then typing search queries in the
-omnibox will not result in searching on google.com.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-job.run_test('policy_DefaultSearchProviderEnabled', **args_dict)
\ No newline at end of file
diff --git a/client/site_tests/policy_DefaultSearchProviderEnabled/control.False b/client/site_tests/policy_DefaultSearchProviderEnabled/control.False
deleted file mode 100644
index f1d1903..0000000
--- a/client/site_tests/policy_DefaultSearchProviderEnabled/control.False
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_DefaultSearchProviderEnabled.False'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verifies effects of DefaultSearchProviderEnabled policy.
-If the policy is set to False then typing search queries in the
-omnibox will not result in searching on google.com.
-
-'''
-
-job.run_test('policy_DefaultSearchProviderEnabled', case=False)
\ No newline at end of file
diff --git a/client/site_tests/policy_DefaultSearchProviderEnabled/control.None b/client/site_tests/policy_DefaultSearchProviderEnabled/control.None
deleted file mode 100644
index 57dc156..0000000
--- a/client/site_tests/policy_DefaultSearchProviderEnabled/control.None
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_DefaultSearchProviderEnabled.None'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verifies effects of DefaultSearchProviderEnabled policy.
-If the policy is set to None then typing search queries in the
-omnibox will result in searching on google.com.
-
-'''
-
-job.run_test('policy_DefaultSearchProviderEnabled', case=None)
\ No newline at end of file
diff --git a/client/site_tests/policy_DefaultSearchProviderEnabled/control.True b/client/site_tests/policy_DefaultSearchProviderEnabled/control.True
deleted file mode 100644
index 77454c0..0000000
--- a/client/site_tests/policy_DefaultSearchProviderEnabled/control.True
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_DefaultSearchProviderEnabled.True'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verifies effects of DefaultSearchProviderEnabled policy.
-If the policy is set to True then typing search queries in the
-omnibox will result in searching on google.com.
-
-'''
-
-job.run_test('policy_DefaultSearchProviderEnabled', case=True)
\ No newline at end of file
diff --git a/client/site_tests/policy_DefaultSearchProviderEnabled/policy_DefaultSearchProviderEnabled.py b/client/site_tests/policy_DefaultSearchProviderEnabled/policy_DefaultSearchProviderEnabled.py
deleted file mode 100644
index 27cefb3..0000000
--- a/client/site_tests/policy_DefaultSearchProviderEnabled/policy_DefaultSearchProviderEnabled.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-from autotest_lib.client.cros.input_playback import keyboard
-
-DEFAULT_SEARCH_ENGINE_URL = 'google.com'
-
-
-class policy_DefaultSearchProviderEnabled(
-        enterprise_policy_base.EnterprisePolicyTest):
-    """
-    Tests the DefaultSearchProviderEnabled policy in Chrome OS.
-    If the policy is set to True/Not Set then typing search queries in the
-    omnibox will result in searching on google.com.
-    If the policy is set to False then typing search queries in the
-    omnibox will not result in searching on google.com.
-
-    """
-    version = 1
-
-    def _default_search_provider_enabled(self, case):
-        """
-        Open a new tab and try using the omnibox as a search box.
-
-        @param case: policy value.
-
-        """
-
-        self.keyboard = keyboard.Keyboard()
-
-        # Open new tab.
-        self.keyboard.press_key('ctrl+t')
-
-        time.sleep(1)
-        # Input random characters into the omnibox and hit Enter. This will
-        # either perform the search or not.
-        self.keyboard.press_key('f')
-        self.keyboard.press_key('s')
-        self.keyboard.press_key('w')
-        self.keyboard.press_key('enter')
-
-        current_url = self.cr.browser.tabs[1].GetUrl()
-
-        if case is False:
-            if DEFAULT_SEARCH_ENGINE_URL in current_url:
-                raise error.TestFail(
-                    'Search engine is on in the omnibox and it should not be')
-
-        else:
-            if DEFAULT_SEARCH_ENGINE_URL not in current_url:
-                raise error.TestFail(
-                    'Search engine is off in the omnibox and it should be on')
-
-
-    def run_once(self, case):
-        """
-        Setup and run the test configured for the specified test case.
-
-        @param case: Name of the test case to run.
-
-        """
-        POLICIES = {'DefaultSearchProviderEnabled': case}
-        self.setup_case(user_policies=POLICIES)
-        self._default_search_provider_enabled(case)
\ No newline at end of file
diff --git a/client/site_tests/policy_DeveloperToolsAvailability/control b/client/site_tests/policy_DeveloperToolsAvailability/control
deleted file mode 100644
index dcdd483..0000000
--- a/client/site_tests/policy_DeveloperToolsAvailability/control
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'carvalheira'
-NAME = 'policy_DeveloperToolsAvailability'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of the DeveloperToolsAvailability policy.
-It verifies the Disabled, Available and DisabledOnExtensions states.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-job.run_test('policy_DeveloperToolsAvailability', **args_dict)
diff --git a/client/site_tests/policy_DeveloperToolsAvailability/control.Available b/client/site_tests/policy_DeveloperToolsAvailability/control.Available
deleted file mode 100644
index ebb5b20..0000000
--- a/client/site_tests/policy_DeveloperToolsAvailability/control.Available
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'carvalheira'
-NAME = 'policy_DeveloperToolsAvailability.Available'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verifies effects of DeveloperToolsAvailability policy.
-If the policy is set to 1, then user should be able to open
-the Developer Tools console. If that's not the case, the test fails.
-
-'''
-
-job.run_test('policy_DeveloperToolsAvailability', case='Available')
diff --git a/client/site_tests/policy_DeveloperToolsAvailability/control.Disabled b/client/site_tests/policy_DeveloperToolsAvailability/control.Disabled
deleted file mode 100644
index 11dc326..0000000
--- a/client/site_tests/policy_DeveloperToolsAvailability/control.Disabled
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'carvalheira'
-NAME = 'policy_DeveloperToolsAvailability.Disabled'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verifies effects of DeveloperToolsAvailability policy.
-If the policy is set to 2, then user should not be able to open
-the Developer Tools console. If that happens, the test fails.
-
-'''
-
-job.run_test('policy_DeveloperToolsAvailability', case='Disabled')
diff --git a/client/site_tests/policy_DeveloperToolsAvailability/control.DisabledOnExtensions b/client/site_tests/policy_DeveloperToolsAvailability/control.DisabledOnExtensions
deleted file mode 100644
index 9fa58a6..0000000
--- a/client/site_tests/policy_DeveloperToolsAvailability/control.DisabledOnExtensions
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'carvalheira'
-NAME = 'policy_DeveloperToolsAvailability.DisabledOnExtensions'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verifies effects of DeveloperToolsAvailability policy.
-If the policy is set to 0, the Developer Tools are disabled on extensions
-installed by enterprise policy, and allowed in other contexts, then user should
-be able to open the Developer Tools console.
-
-'''
-
-job.run_test('policy_DeveloperToolsAvailability', case='DisabledOnExtensions')
diff --git a/client/site_tests/policy_DeveloperToolsAvailability/control.NotSet b/client/site_tests/policy_DeveloperToolsAvailability/control.NotSet
deleted file mode 100644
index dbd467f..0000000
--- a/client/site_tests/policy_DeveloperToolsAvailability/control.NotSet
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'carvalheira'
-NAME = 'policy_DeveloperToolsAvailability.NotSet'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verifies effects of DeveloperToolsAvailability policy.
-If the policy is not set to any value, the user should be able to open
-the Developer Tools console. If that's not the case, the test fails.
-
-'''
-
-job.run_test('policy_DeveloperToolsAvailability', case='NotSet')
diff --git a/client/site_tests/policy_DeveloperToolsAvailability/policy_DeveloperToolsAvailability.py b/client/site_tests/policy_DeveloperToolsAvailability/policy_DeveloperToolsAvailability.py
deleted file mode 100644
index 964bda2..0000000
--- a/client/site_tests/policy_DeveloperToolsAvailability/policy_DeveloperToolsAvailability.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-from autotest_lib.client.cros.input_playback import keyboard
-from telemetry.core import exceptions
-
-class policy_DeveloperToolsAvailability(
-        enterprise_policy_base.EnterprisePolicyTest):
-    """
-    Tests the DeveloperToolsAvailable policy in Chrome OS.
-    If the policy is set to Available then the user will be able to open
-    a new Developer Tools console. If the policy is Disabled then the user
-    should not be able to open a new Developer Tools console. Forced is
-    not being tested.
-
-    """
-
-    version = 1
-
-
-    def initialize(self, **kwargs):
-        super(policy_DeveloperToolsAvailability, self).initialize(**kwargs)
-        self.keyboard = keyboard.Keyboard()
-        self.POLICY_NAME = 'DeveloperToolsAvailability'
-        self.POLICIES = {}
-        self.TEST_CASES = {
-            'NotSet': None,
-            'DisabledOnExtensions': 0,
-            'Available': 1,
-            'Disabled': 2}
-
-
-    def _check_developer_tools_availability(self, case):
-        """
-        Opens a new chrome://user-actions page and then tries to open Developer
-        Tools console. To see if the new window actually opened the test checks
-        what was recorded in user actions.
-
-        @param case: policy description.
-
-        """
-        page_scrape_cmd = (
-            'document.getElementById("user-actions-table").innerText;')
-        user_actions_tab = self.navigate_to_url('chrome://user-actions')
-
-        # The below shortcuts can be used to open Developer Tools, though in
-        # different tabs. The first one opens the Elements tab, the next two
-        # open the last used tab, and the final one opens the Console tab.
-
-        keys = ['ctrl+shift+c', 'ctrl+shift+i', 'f12', 'ctrl+shift+j']
-
-        for key in keys:
-            self.keyboard.press_key(key)
-
-        recorded_user_actions = (
-            user_actions_tab.EvaluateJavaScript(page_scrape_cmd))
-
-        if (case == 'Available' or case == 'DisabledOnExtensions'
-                or case == 'NotSet'):
-            if ('DevTools_ToggleWindow' not in recorded_user_actions and
-                'DevTools_ToggleConsole' not in recorded_user_actions):
-                    raise error.TestFail("Developer Tools didn't open, but"
-                        " should be allowed.")
-        elif case == 'Disabled':
-            if 'DevTools_ToggleWindow' in recorded_user_actions:
-                raise error.TestFail("Developer Tools opened and should "
-                    "have been disabled.")
-
-
-    def run_once(self, case):
-        """
-        Setup and run the test configured for the specified test case.
-
-        @param case: Name of the test case to run.
-
-        """
-        case_value = self.TEST_CASES[case]
-        self.POLICIES[self.POLICY_NAME] = case_value
-
-        try:
-            self.setup_case(user_policies=self.POLICIES)
-            self._check_developer_tools_availability(case)
-        except exceptions.TimeoutException:
-            if case != 'Disabled':
-                raise error.TestFail("Unexpected Timeout Exception")
-
diff --git a/client/site_tests/policy_DeviceAllowBluetooth/control b/client/site_tests/policy_DeviceAllowBluetooth/control
deleted file mode 100644
index 81cbe1b..0000000
--- a/client/site_tests/policy_DeviceAllowBluetooth/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_DeviceAllowBluetooth'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Called through the policy_DeviceAllowBluetooth test only. Verifies
-whether bluetooth is available or not. This test is kicked off via
-policy_DeviceServer server test.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_DeviceAllowBluetooth', **args_dict)
diff --git a/client/site_tests/policy_DeviceAllowBluetooth/policy_DeviceAllowBluetooth.py b/client/site_tests/policy_DeviceAllowBluetooth/policy_DeviceAllowBluetooth.py
deleted file mode 100644
index bc94dd2..0000000
--- a/client/site_tests/policy_DeviceAllowBluetooth/policy_DeviceAllowBluetooth.py
+++ /dev/null
@@ -1,67 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-
-class policy_DeviceAllowBluetooth(
-        enterprise_policy_base.EnterprisePolicyTest):
-    """
-    Test for the DeviceAllowBluetooth policy.
-
-    If the policy is set to True/None then bluetooth button is available in
-    status tray menu. If the policy is set to False then bluetooth button
-    is not available.
-
-    """
-    version = 1
-    _POLICY = 'DeviceAllowBluetooth'
-
-    def _is_bluetooth_button_present(self, ext):
-        bt_present = ext.EvaluateJavaScript("""
-        var root;
-        chrome.automation.getDesktop(r => root = r);
-        bt = root.find({attributes: {role: "button", name: /Bluetooth/}});
-        bt;
-        """)
-        if bt_present is None:
-            return False
-        return True
-
-    def bluetooth_check(self, case):
-        # Click the status tray button in bottom right.
-        ext = self.cr.autotest_ext
-        ext.ExecuteJavaScript("""
-        chrome.automation.getDesktop(root => {
-            var button_to_click = root.find(
-                {attributes: {
-                    role: "button", name: /Status tray/}}).doDefault();
-        });
-        """)
-        time.sleep(1)
-
-        bluetooth_button = self._is_bluetooth_button_present(ext)
-
-        if case is False:
-            if bluetooth_button:
-                raise error.TestFail(
-                    'Bluetooth option is available and it should not be')
-        else:
-            if not bluetooth_button:
-                raise error.TestFail(
-                    'Bluetooth option should be available but it is not.')
-
-    def run_once(self, case):
-        """
-        Entry point of this test.
-
-        @param case: True, False, or None for the value of the policy.
-
-        """
-        self.setup_case(
-                device_policies={self._POLICY: case},
-                enroll=True)
-        self.bluetooth_check(case)
diff --git a/client/site_tests/policy_DeviceAutoUpdateDisabled/control b/client/site_tests/policy_DeviceAutoUpdateDisabled/control
deleted file mode 100644
index 7122655..0000000
--- a/client/site_tests/policy_DeviceAutoUpdateDisabled/control
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'kathrelkeld'
-NAME = 'policy_DeviceAutoUpdateDisabled'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Called through the policy_AUServer test only.  Verifies whether the device
-can or cannot Autoupdate with the DeviceAutoUpdateDisabled policy set.
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_DeviceAutoUpdateDisabled', **args_dict)
diff --git a/client/site_tests/policy_DeviceAutoUpdateDisabled/policy_DeviceAutoUpdateDisabled.py b/client/site_tests/policy_DeviceAutoUpdateDisabled/policy_DeviceAutoUpdateDisabled.py
deleted file mode 100644
index d6e7b60..0000000
--- a/client/site_tests/policy_DeviceAutoUpdateDisabled/policy_DeviceAutoUpdateDisabled.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import math
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-from autotest_lib.client.cros.update_engine import nebraska_wrapper
-from autotest_lib.client.cros.update_engine import update_engine_test
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-
-class policy_DeviceAutoUpdateDisabled(
-        enterprise_policy_base.EnterprisePolicyTest,
-        update_engine_test.UpdateEngineTest):
-    """Test for the DeviceAutoUpdateDisabled policy."""
-    version = 1
-    _POLICY = 'DeviceAutoUpdateDisabled'
-
-
-    def _test_update_disabled(self, update_url, should_update):
-        """
-        Main test function.
-
-        Try to update and poll for start (or lack of start) to the update.
-        Check whether an update request was sent.
-
-        @param update_url: The URL to get an update from.
-        @param should_update: True or False whether the device should update.
-
-        """
-        # Log time is only in second accuracy.  Assume no update request has
-        # occured since the current whole second started.
-        start_time = math.floor(time.time())
-        logging.info('Update test start time: %s', start_time)
-
-        try:
-            self._check_for_update(update_url, interactive=False)
-
-            utils.poll_for_condition(
-                    self._is_update_started,
-                    timeout=60,
-                    exception=error.TestFail('Update did not start!'))
-        except error.TestFail as e:
-            if should_update:
-                raise e
-        else:
-            if not should_update:
-                raise error.TestFail('Update started when it should not have!')
-
-        update_time = self._get_time_of_last_update_request()
-        logging.info('Last update time: %s', update_time)
-
-        if should_update and (not update_time or update_time < start_time):
-            raise error.TestFail('No update request was sent!')
-        if not should_update and update_time and update_time >= start_time:
-            raise error.TestFail('Update request was sent!')
-
-
-    def run_once(self, case, image_url, enroll=True):
-        """
-        Entry point of this test.
-
-        @param case: True, False, or None for the value of the update policy.
-        @param image_url: Url of update image (this build).
-
-        """
-        # Because we are doing polimorphism and the EnterprisePolicyTest is
-        # earlier in the python MRO, this class's initialize() will get called,
-        # but not the UpdateEngineTest's initialize(). So we need to call it
-        # manually.
-        update_engine_test.UpdateEngineTest.initialize(self)
-
-        self.setup_case(device_policies={self._POLICY: case}, enroll=enroll)
-
-        with nebraska_wrapper.NebraskaWrapper(
-            log_dir=self.resultsdir, payload_url=image_url,
-            target_version='999999.9.9') as nebraska:
-
-            self._create_custom_lsb_release(nebraska.get_update_url(),
-                                            build='1.1.1')
-
-            # When policy is False or not set, user should update.
-            self._test_update_disabled(nebraska.get_update_url(),
-                                       should_update=case is not True)
-
diff --git a/client/site_tests/policy_DeviceBootOnAcEnabled/control b/client/site_tests/policy_DeviceBootOnAcEnabled/control
deleted file mode 100644
index 5495c43..0000000
--- a/client/site_tests/policy_DeviceBootOnAcEnabled/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_DeviceBootOnAcEnabled'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of DeviceBootOnAcEnabled policy.
-
-This test is kicked off via policy_DeviceBootOnAcEnabled server test.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_DeviceBootOnAcEnabled', **args_dict)
diff --git a/client/site_tests/policy_DeviceBootOnAcEnabled/policy_DeviceBootOnAcEnabled.py b/client/site_tests/policy_DeviceBootOnAcEnabled/policy_DeviceBootOnAcEnabled.py
deleted file mode 100644
index 92ac46a..0000000
--- a/client/site_tests/policy_DeviceBootOnAcEnabled/policy_DeviceBootOnAcEnabled.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-
-class policy_DeviceBootOnAcEnabled(
-        enterprise_policy_base.EnterprisePolicyTest):
-    """Test for setting the DeviceBootOnAcEnabled policy."""
-    version = 1
-    _POLICY = 'DeviceBootOnAcEnabled'
-
-
-    def run_once(self, case):
-        """
-        Entry point of this test.
-
-        @param case: True, False, or None for the value of the policy.
-
-        """
-        self.setup_case(device_policies={self._POLICY: case}, enroll=True)
diff --git a/client/site_tests/policy_DeviceCharging/control b/client/site_tests/policy_DeviceCharging/control
deleted file mode 100644
index b662982..0000000
--- a/client/site_tests/policy_DeviceCharging/control
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'ncrews'
-NAME = 'policy_DeviceCharging'
-TIME = 'LONG'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verifies that the DeviceBatteryChargeMode policy works.
-
-This test is kicked off via policy_DeviceChargingServer server test. It requires
-a Servo v4 and Servo Micro attached to the DUT. Also, it requires that the
-battery is not full, and that the battery is above |MIN_BATTERY_LEVEL|, so that
-the policies can get fully tested. The server test should take care of this
-setup.
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_DeviceCharging', **args_dict)
diff --git a/client/site_tests/policy_DeviceCharging/policy_DeviceCharging.py b/client/site_tests/policy_DeviceCharging/policy_DeviceCharging.py
deleted file mode 100644
index 205d8c6..0000000
--- a/client/site_tests/policy_DeviceCharging/policy_DeviceCharging.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.cros.enterprise import charging_policy_tests
-
-
-class policy_DeviceCharging(charging_policy_tests.ChargingPolicyTest):
-    """
-    Client test for device policies that change charging behavior.
-
-    Everything is taken care of in the superclass.
-    """
-    version = 1
diff --git a/client/site_tests/policy_DeviceDockMacAddressSource/contol b/client/site_tests/policy_DeviceDockMacAddressSource/contol
deleted file mode 100644
index 1f6a3dc..0000000
--- a/client/site_tests/policy_DeviceDockMacAddressSource/contol
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_DeviceDockMacAddressSource'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of DeviceDockMacAddressSource policy.
-
-This test is kicked off via policy_DeviceDockMacAddressSource server test.
-
-This test checks which MAC address will be used when a
-dock is connected to the device.
-
-1 = Device's designated dock MAC address
-2 = Device's built-in NIC MAC address
-3 = Dock's built-in NIC MAC address
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_DeviceDockMacAddressSource', **args_dict)
diff --git a/client/site_tests/policy_DeviceDockMacAddressSource/policy_DeviceDockMacAddressSource.py b/client/site_tests/policy_DeviceDockMacAddressSource/policy_DeviceDockMacAddressSource.py
deleted file mode 100644
index fe2ba0e..0000000
--- a/client/site_tests/policy_DeviceDockMacAddressSource/policy_DeviceDockMacAddressSource.py
+++ /dev/null
@@ -1,107 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-
-class policy_DeviceDockMacAddressSource(
-        enterprise_policy_base.EnterprisePolicyTest):
-    """Test for setting the DeviceDockMacAddressSource policy.
-
-    This test checks which MAC address will be used when a
-    dock is connected to the device.
-
-    """
-    version = 1
-    _POLICY = 'DeviceDockMacAddressSource'
-
-
-    def _get_device_name(self):
-        """Figure out which ethernet port is the dut.
-
-        Since dut is the one plugged into the internet it's the one
-        that has 'BROADCAST' and 'state UP' in 'ip link'.
-        Example: "2: eth1: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500"
-        " qdisc mq state UP".
-        """
-        active_ethernet = utils.run("ip link | grep 'BROADCAST.*state UP'")
-        device_name = active_ethernet.stdout.split(":")
-        device_name = device_name[1].lstrip()
-        return device_name
-
-
-    def _get_dock_mac(self, device_name):
-        """Determine the dock's mac address.
-
-        This is done via looking for an "eth" interface in /sys/class/net,
-        that is NOT the interface currently in use by the device. E.g. if
-        the "device_name" is "eth0", we are looking for an interface that
-        has the name "eth" but not "eth0" (such as eth1").
-        """
-        dock_ethernet = utils.run(
-            "ls /sys/class/net/ | grep -v {} | grep 'eth'".format(
-                device_name))
-        dock_ethernet = dock_ethernet.stdout.rstrip()
-        dock_mac = utils.run(
-            'cat /sys/class/net/{}/address'.format(dock_ethernet))
-        dock_mac = dock_mac.stdout.lower().rstrip()
-        return dock_mac
-
-
-    def _get_dut_mac(self, device_name):
-        """Grab duts's mac address."""
-        dut_mac = utils.run(
-            'cat /sys/class/net/{}/address'.format(device_name))
-        dut_mac = dut_mac.stdout.lower().rstrip()
-        return dut_mac
-
-
-    def _get_designated_mac(self):
-        """Device's designated dock MAC address."""
-        desig_mac = utils.run('vpd -g dock_mac')
-        desig_mac = desig_mac.stdout.lower().rstrip()
-        return desig_mac
-
-
-    def run_once(self, case, enroll=True, check_mac=False):
-        """
-        Entry point of this test.
-
-        @param case: True, False, or None for the value of the policy.
-
-        """
-
-        TEST_CASES = {
-            'designated_mac': 1,
-            'device_mac': 2,
-            'dock_mac': 3
-        }
-
-        if enroll:
-            case_value = TEST_CASES[case]
-            self.setup_case(
-                device_policies={self._POLICY: case_value}, enroll=enroll)
-
-        if check_mac:
-            device_name = self._get_device_name()
-            dock_mac = self._get_dock_mac(device_name)
-            dut_mac = self._get_dut_mac(device_name)
-            desig_mac = self._get_designated_mac()
-
-            if case is 'designated_mac':
-                if dock_mac != desig_mac:
-                    raise error.TestFail(
-                        'Dock MAC address should match the designated MAC '
-                        'address and it does not.')
-            elif case is 'device_mac':
-                if dut_mac != dock_mac:
-                    raise error.TestFail(
-                        'Dock MAC address should match the device MAC '
-                        'address and it does not.')
-            else:
-                if dock_mac == dut_mac or dock_mac == desig_mac:
-                    raise error.TestFail(
-                        'Dock MAC should not match any other MAC addresses '
-                        'but it does.')
diff --git a/client/site_tests/policy_DeviceEphemeralUsersEnabled/control b/client/site_tests/policy_DeviceEphemeralUsersEnabled/control
deleted file mode 100644
index 0fe97fc..0000000
--- a/client/site_tests/policy_DeviceEphemeralUsersEnabled/control
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_DeviceEphemeralUsersEnabled'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Called through the policy_DeviceEphemeralUsersEnabled test only. Verifies
-whether the ephemeral_users_enabled policy is set on the device or not.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_DeviceEphemeralUsersEnabled', **args_dict)
\ No newline at end of file
diff --git a/client/site_tests/policy_DeviceEphemeralUsersEnabled/policy_DeviceEphemeralUsersEnabled.py b/client/site_tests/policy_DeviceEphemeralUsersEnabled/policy_DeviceEphemeralUsersEnabled.py
deleted file mode 100644
index 46700c7..0000000
--- a/client/site_tests/policy_DeviceEphemeralUsersEnabled/policy_DeviceEphemeralUsersEnabled.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import retry
-
-from autotest_lib.client.cros import cryptohome
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-from py_utils import TimeoutException
-
-class policy_DeviceEphemeralUsersEnabled(
-        enterprise_policy_base.EnterprisePolicyTest):
-    """Test for the DeviceEphemeralUsersEnabled policy."""
-    version = 1
-    _POLICY = 'DeviceEphemeralUsersEnabled'
-
-    def verify_permanent_vault(self, case):
-        if case and cryptohome.is_permanent_vault_mounted(
-            user=enterprise_policy_base.USERNAME):
-                raise error.TestFail(
-                    'User should not be permanently vaulted in '
-                    'Ephemeral mode.')
-
-        if not case:
-            cryptohome.is_permanent_vault_mounted(
-                user=enterprise_policy_base.USERNAME, allow_fail=True)
-
-    # Prevents client tests that are kicked off via server tests from flaking.
-    @retry.retry(TimeoutException, timeout_min=5, delay_sec=10)
-    def _run_setup_case(self, case):
-        self.setup_case(device_policies={self._POLICY: case}, enroll=True)
-
-    def run_once(self, case):
-        """
-        Entry point of this test.
-
-        @param case: True, False, or None for the value of the policy.
-
-        """
-        self._run_setup_case(case)
-        self.verify_permanent_vault(case)
\ No newline at end of file
diff --git a/client/site_tests/policy_DeviceScheduledCharging/control b/client/site_tests/policy_DeviceScheduledCharging/control
deleted file mode 100644
index 06eb219..0000000
--- a/client/site_tests/policy_DeviceScheduledCharging/control
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'ncrews'
-NAME = 'policy_DeviceScheduledCharging'
-TIME = 'LONG'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verifies that the DeviceAdvancedBatteryChargeMode and DevicePowerPeakShift
-polices work.
-
-This test is kicked off via policy_DeviceChargingServer server test. It requires
-a Servo v4 and Servo Micro attached to the DUT. Also, it requires that the
-battery is not full, and that the battery is above |MIN_BATTERY_LEVEL|, so that
-the policies can get fully tested. The server test should take care of this
-setup.
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_DeviceScheduledCharging', **args_dict)
diff --git a/client/site_tests/policy_DeviceScheduledCharging/policy_DeviceScheduledCharging.py b/client/site_tests/policy_DeviceScheduledCharging/policy_DeviceScheduledCharging.py
deleted file mode 100644
index 810de77..0000000
--- a/client/site_tests/policy_DeviceScheduledCharging/policy_DeviceScheduledCharging.py
+++ /dev/null
@@ -1,50 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.bin import utils
-from autotest_lib.client.cros import upstart
-from autotest_lib.client.cros.enterprise import charging_policy_tests
-
-
-class policy_DeviceScheduledCharging(charging_policy_tests.ChargingPolicyTest):
-    """
-    Variant of ChargingPolicyTest for schedule-based charging policies. As of
-    this writing, these features are only present on the Wilco platform.
-
-    This variation of ChargingPolicyTest only has to do a bit of warmup and
-    cleanup before and after each call to run_once(). Users should assume that
-    the EC thinks the time is |MOCK_TIME|.
-    """
-    version = 1
-
-    SYNC_EC_RTC_UPSTART_JOB = 'wilco_sync_ec_rtc'
-    # Noon on a Monday.
-    MOCK_TIME = '1/1/01 12:00:00'
-
-    def warmup(self):
-        """
-        For the first step in the test we set the EC's RTC to a consistent,
-        mock time. The EC, or Embedded Controller, is a microcontroller
-        separate from the main system-on-a-chip. The EC controls charge
-        scheduling, among other things. Setting the EC's RTC allows us to
-        use a hardcoded list of schedules as our test cases. We also need to
-        disable the upstart job that keeps the EC's RTC in sync with the
-        local time of the DUT.
-        """
-        super(policy_DeviceScheduledCharging, self).warmup()
-        upstart.stop_job(self.SYNC_EC_RTC_UPSTART_JOB)
-        utils.set_hwclock(
-                time=self.MOCK_TIME,
-                utc=False,
-                rtc='/dev/rtc1',
-                noadjfile=True)
-
-    def cleanup(self):
-        """
-        Get the DUT back to a clean state after messing with it in warmup().
-        """
-        utils.set_hwclock(
-                time='system', utc=False, rtc='/dev/rtc1', noadjfile=True)
-        upstart.restart_job(self.SYNC_EC_RTC_UPSTART_JOB)
-        super(policy_DeviceScheduledCharging, self).cleanup()
diff --git a/client/site_tests/policy_DisableScreenshots/Screenshooter/background.js b/client/site_tests/policy_DisableScreenshots/Screenshooter/background.js
deleted file mode 100644
index c177fed..0000000
--- a/client/site_tests/policy_DisableScreenshots/Screenshooter/background.js
+++ /dev/null
@@ -1,29 +0,0 @@
-function setTitle(title) {
-    chrome.tabs.executeScript({
-        code: 'document.title = "' + title + '"'
-    });
-}
-
-chrome.commands.onCommand.addListener((command) => {
-    if (command === 'activeTab') {
-        chrome.tabs.query({active: true, currentWindow: true}, (tabs) => {
-            chrome.tabs.sendMessage(tabs[0].id, {text: 'title'}, (method) => {
-                if (method === 'captureVisibleTab') {
-                    chrome.tabs.captureVisibleTab((img) => {
-                        setTitle(img);
-                    });
-                } else if (method === 'tabCapture') {
-                    chrome.tabCapture.capture({video: true}, (stream) => {
-                        setTitle(stream);
-                    });
-                } else if (method === 'desktopCapture') {
-                    chrome.desktopCapture.chooseDesktopMedia(
-                        ['screen', 'window', 'tab'], (streamId) => {
-                            setTitle(streamId);
-                        }
-                    );
-                }
-            });
-        });
-    }
-});
diff --git a/client/site_tests/policy_DisableScreenshots/Screenshooter/content.js b/client/site_tests/policy_DisableScreenshots/Screenshooter/content.js
deleted file mode 100644
index d5966ce..0000000
--- a/client/site_tests/policy_DisableScreenshots/Screenshooter/content.js
+++ /dev/null
@@ -1,5 +0,0 @@
-chrome.runtime.onMessage.addListener((msg, sender, sendResponse) => {
-    if (msg.text === 'title') {
-        sendResponse(document.title);
-    }
-});
diff --git a/client/site_tests/policy_DisableScreenshots/Screenshooter/manifest.json b/client/site_tests/policy_DisableScreenshots/Screenshooter/manifest.json
deleted file mode 100644
index d47ce33..0000000
--- a/client/site_tests/policy_DisableScreenshots/Screenshooter/manifest.json
+++ /dev/null
@@ -1,34 +0,0 @@
-{
-  "manifest_version": 2,
-
-  "name": "Screenshooter",
-  "description": "Take screenshots with ALL the APIs",
-  "version": "1.0",
-
-  "background": {
-      "scripts": ["background.js"],
-      "persistent": false
-  },
-
-  "content_scripts": [
-      {
-          "matches": ["<all_urls>"],
-          "js": ["content.js"]
-      }
-  ],
-
-  "commands": {
-      "activeTab": {
-          "suggested_key": {
-              "default": "Ctrl+Shift+Y"
-          },
-          "description": "Enable activeTab permission for tabCapture"
-      }
-  },
-
-  "permissions": [
-      "desktopCapture",
-      "tabCapture",
-      "<all_urls>"
-  ]
-}
diff --git a/client/site_tests/policy_DisableScreenshots/control b/client/site_tests/policy_DisableScreenshots/control
deleted file mode 100644
index 0833f43..0000000
--- a/client/site_tests/policy_DisableScreenshots/control
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'krishnargv'
-NAME = 'policy_DisableScreenshots'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effect of DisableScreenshots policy on Chrome OS behavior.
-
-This test verifies the effect of the DisableScreenshots user policy on
-Chrome OS client behavior. It exercises all valid policy values across two
-test cases: 'DisableScreenshot_Block', 'NotSet_Allow'.
-
-This default control file allows CloudDPC E2E tests to run any test case
-defined in this test via command-line.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_DisableScreenshots', **args_dict)
diff --git a/client/site_tests/policy_DisableScreenshots/control.disablescreenshot_block b/client/site_tests/policy_DisableScreenshots/control.disablescreenshot_block
deleted file mode 100644
index eea4691..0000000
--- a/client/site_tests/policy_DisableScreenshots/control.disablescreenshot_block
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'krishnargv'
-NAME = 'policy_DisableScreenshots.DisableScreenshot_Block'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effect of DisableScreenshots policy on Chrome OS behavior
-
-This test case verifies the behavior and appearance of Chrome OS when the
-DisableScreenshots user policy is set to True.
-
-'''
-
-job.run_test('policy_DisableScreenshots', case='DisableScreenshot_Block')
diff --git a/client/site_tests/policy_DisableScreenshots/control.false_allow b/client/site_tests/policy_DisableScreenshots/control.false_allow
deleted file mode 100644
index db999d8..0000000
--- a/client/site_tests/policy_DisableScreenshots/control.false_allow
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'krishnargv'
-NAME = 'policy_DisableScreenshots.False_Allow'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effect of DisableScreenshots policy on Chrome OS behavior
-
-This test case verifies the behavior and appearance of Chrome OS when the
-DisableScreenshots user policy is set to False.
-
-'''
-
-job.run_test('policy_DisableScreenshots', case='False_Allow')
diff --git a/client/site_tests/policy_DisableScreenshots/control.notSet_allow b/client/site_tests/policy_DisableScreenshots/control.notSet_allow
deleted file mode 100644
index 9bf9c61..0000000
--- a/client/site_tests/policy_DisableScreenshots/control.notSet_allow
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'krishnargv'
-NAME = 'policy_DisableScreenshots.NotSet_Allow'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effect of DisableScreenshots policy on Chrome OS behavior
-
-This test case verifies the behavior and appearance of Chrome OS when the
-DisableScreenshots user policy is not set.
-
-'''
-
-job.run_test('policy_DisableScreenshots', case='NotSet_Allow')
diff --git a/client/site_tests/policy_DisableScreenshots/keyboard_ctrl+shift+y b/client/site_tests/policy_DisableScreenshots/keyboard_ctrl+shift+y
deleted file mode 100644
index f122fce..0000000
--- a/client/site_tests/policy_DisableScreenshots/keyboard_ctrl+shift+y
+++ /dev/null
@@ -1,24 +0,0 @@
-E: 1519424992.660594 0004 0004 28
-E: 1519424992.660594 0001 001c 0
-E: 1519424992.660594 0000 0000 0
-E: 1519424993.111768 0004 0004 29
-E: 1519424993.111768 0001 001d 1
-E: 1519424993.111768 0000 0000 0
-E: 1519424993.362599 0004 0004 29
-E: 1519424993.362599 0001 001d 2
-E: 1519424993.362599 0000 0000 0
-E: 1519424993.389394 0004 0004 42
-E: 1519424993.389394 0001 002a 1
-E: 1519424993.389394 0000 0000 0
-E: 1519424993.635911 0004 0004 21
-E: 1519424993.635911 0001 0015 1
-E: 1519424993.635911 0000 0000 0
-E: 1519424993.722326 0004 0004 21
-E: 1519424993.722326 0001 0015 0
-E: 1519424993.722326 0000 0000 0
-E: 1519424993.867139 0004 0004 42
-E: 1519424993.867139 0001 002a 0
-E: 1519424993.867139 0000 0000 0
-E: 1519424993.888691 0004 0004 29
-E: 1519424993.888691 0001 001d 0
-E: 1519424993.888691 0000 0000 0
diff --git a/client/site_tests/policy_DisableScreenshots/policy_DisableScreenshots.py b/client/site_tests/policy_DisableScreenshots/policy_DisableScreenshots.py
deleted file mode 100644
index 6fd0906..0000000
--- a/client/site_tests/policy_DisableScreenshots/policy_DisableScreenshots.py
+++ /dev/null
@@ -1,189 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import glob
-import logging
-import os
-import utils
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-from autotest_lib.client.cros.input_playback import input_playback
-
-POLL_TIMEOUT = 5
-POLL_FREQUENCY = 0.5
-
-
-class policy_DisableScreenshots(
-        enterprise_policy_base.EnterprisePolicyTest):
-    version = 1
-
-    def initialize(self, **kwargs):
-        """Emulate a keyboard in order to play back the screenshot shortcut."""
-        self._initialize_test_constants()
-        super(policy_DisableScreenshots, self).initialize(**kwargs)
-        self.player = input_playback.InputPlayback()
-        self.player.emulate(input_type='keyboard')
-        self.player.find_connected_inputs()
-
-
-    def _initialize_test_constants(self):
-        """Initialize test-specific constants, some from class constants."""
-        self.POLICY_NAME = 'DisableScreenshots'
-        self._DOWNLOADS = '/home/chronos/user/Downloads/'
-        self._SCREENSHOT_PATTERN = 'Screenshot*'
-        self._SCREENSHOT_FILENAME = self._DOWNLOADS + self._SCREENSHOT_PATTERN
-
-        self.TEST_CASES = {
-            'DisableScreenshot_Block': True,
-            'False_Allow': False,
-            'NotSet_Allow': None
-        }
-
-        # Possible API methods to capture the screen
-        self.CAPTURE_CMDS = [
-                'captureVisibleTab',
-                # TODO(timkovich): https://crbug.com/839630
-                # 'tabCapture',
-                # TODO(timkovich): https://crbug.com/817497
-                # 'desktopCapture'
-        ]
-
-
-    def _screenshot_file_exists(self):
-        """
-        Checks if screenshot file was created by keyboard shortcut.
-
-        @returns boolean indicating if screenshot file was saved or not.
-
-        """
-        try:
-            utils.poll_for_condition(
-                    lambda: len(glob.glob(self._SCREENSHOT_FILENAME)) > 0,
-                    timeout=POLL_TIMEOUT,
-                    sleep_interval=POLL_FREQUENCY)
-        except utils.TimeoutError:
-            logging.info('Screenshot file not found.')
-            return False
-
-        logging.info('Screenshot file found.')
-        return True
-
-
-    def _delete_screenshot_files(self):
-        """Delete existing screenshot files, if any."""
-        for filename in glob.glob(self._SCREENSHOT_FILENAME):
-            os.remove(filename)
-
-
-    def cleanup(self):
-        """Cleanup files created in this test, if any and close the player."""
-        self._delete_screenshot_files()
-        self.player.close()
-        super(policy_DisableScreenshots, self).cleanup()
-
-
-    def _test_screenshot_shortcut(self, policy_value):
-        """
-        Verify DisableScreenshots is enforced for the screenshot shortcut.
-
-        When DisableScreenshots policy value is undefined, screenshots shall
-        be captured via the keyboard shortcut Ctrl + F5.
-        When DisableScreenshots policy is set to True screenshots shall not
-        be captured.
-
-        @param policy_value: policy value for this case.
-
-        """
-        logging.info('Deleting preexisting Screenshot files.')
-        self._delete_screenshot_files()
-
-        # Keyboard shortcut for screenshots
-        self.player.blocking_playback_of_default_file(
-                input_type='keyboard', filename='keyboard_ctrl+f5')
-
-        screenshot_file_captured = self._screenshot_file_exists()
-        if policy_value:
-            if screenshot_file_captured:
-                raise error.TestFail('Screenshot should not be captured')
-        elif not screenshot_file_captured:
-            raise error.TestFail('Screenshot should be captured')
-
-
-    def _test_screenshot_apis(self, policy_value):
-        """
-        Verify DisableScreenshot policy blocks API calls.
-
-        Attempts to capture the screen using all of the methods to capture
-        the screen through the APIs. Captures should not happen when
-        policy_value is True and should happen in the other cases.
-
-        @param policy_value: policy value for this case
-
-        @raises error.TestFail: In the case where the capture behavior
-            does not match the policy value
-
-        """
-        tab = self.navigate_to_url('https://google.com')
-
-        current_dir = os.path.dirname(os.path.realpath(__file__))
-
-        for method in self.CAPTURE_CMDS:
-            # Set the document.title to the test name
-            tab.ExecuteJavaScript('document.title = "%s"' % method)
-
-            # Call the extension's shortcut to trigger the API call
-            self.player.blocking_playback(
-                    input_type='keyboard',
-                    filepath=os.path.join(current_dir, 'keyboard_ctrl+shift+y'))
-
-            # desktopCapture opens a prompt window that needs to be OKed
-            if method == 'desktopCapture':
-                self.player.blocking_playback_of_default_file(
-                        input_type='keyboard', filename='keyboard_enter')
-
-            # The document.title is used to pass information to and from
-            # the DOM and the extension. The return value of the screenshot
-            # API call is set to the document.title.
-            try:
-                utils.poll_for_condition(
-                        lambda: tab.EvaluateJavaScript(
-                            'document.title != "%s"' % method
-                        ),
-                        timeout=POLL_TIMEOUT)
-                capture = tab.EvaluateJavaScript('document.title')
-            except utils.TimeoutError:
-                capture = None
-
-            if capture == 'undefined':
-                capture = None
-
-            if policy_value:
-                if capture is not None:
-                    raise error.TestFail('Screen should not be captured. '
-                                         'method = %s, capture = %s'
-                                         % (method, capture))
-            elif capture is None:
-                raise error.TestFail('Screen should be captured. '
-                                     'method = %s, capture = %s'
-                                     % (method, capture))
-
-
-    def run_once(self, case):
-        """
-        Setup and run the test configured for the specified test case.
-
-        @param case: Name of the test case to run.
-
-        """
-        case_value = self.TEST_CASES[case]
-        self._extension_path = os.path.join(os.path.dirname(__file__),
-                                            'Screenshooter')
-
-        self.setup_case(user_policies={self.POLICY_NAME: case_value},
-                        extension_paths=[self._extension_path])
-
-        self._test_screenshot_shortcut(case_value)
-        self._test_screenshot_apis(case_value)
diff --git a/client/site_tests/policy_DownloadDirectory/control b/client/site_tests/policy_DownloadDirectory/control
deleted file mode 100644
index 8276715..0000000
--- a/client/site_tests/policy_DownloadDirectory/control
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_DownloadDirectory'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of policy_DownloadDirectory policy.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_DownloadDirectory', **args_dict)
\ No newline at end of file
diff --git a/client/site_tests/policy_DownloadDirectory/control.GDrive b/client/site_tests/policy_DownloadDirectory/control.GDrive
deleted file mode 100644
index b5cc2c1..0000000
--- a/client/site_tests/policy_DownloadDirectory/control.GDrive
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_DownloadDirectory.GDrive'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify the when 'DownloadDirectory' policy is set to the Google Drive, files
-will download to the drive by default.
-
-'''
-
-job.run_test('policy_DownloadDirectory',
-             case=True)
diff --git a/client/site_tests/policy_DownloadDirectory/control.default b/client/site_tests/policy_DownloadDirectory/control.default
deleted file mode 100644
index 85d28ac..0000000
--- a/client/site_tests/policy_DownloadDirectory/control.default
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_DownloadDirectory.default'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify the when 'DownloadDirectory' policy is not set, files
-will download to the downloads folder by default.
-
-'''
-
-job.run_test('policy_DownloadDirectory',
-             case=None)
diff --git a/client/site_tests/policy_DownloadDirectory/policy_DownloadDirectory.py b/client/site_tests/policy_DownloadDirectory/policy_DownloadDirectory.py
deleted file mode 100644
index f1eed6c..0000000
--- a/client/site_tests/policy_DownloadDirectory/policy_DownloadDirectory.py
+++ /dev/null
@@ -1,131 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import os
-
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-from telemetry.core import exceptions
-
-
-class policy_DownloadDirectory(
-        enterprise_policy_base.EnterprisePolicyTest):
-    """
-    Test effect of DownloadDirectory policy on Chrome OS.
-
-    This policy can only be set to default or Google Drive. Test each case,
-    and verify the test download file goes to the proper location.
-
-    """
-    version = 1
-
-    POLICY_NAME = 'DownloadDirectory'
-    _DOWNLOAD_BASE = ('http://commondatastorage.googleapis.com/'
-                      'chromiumos-test-assets-public/audio_power/')
-    DOWNLOAD_DIR = '/home/chronos/user/Downloads/download'
-    POLICY_SETTING = {'PromptForDownloadLocation': False}
-
-    def run_once(self, case):
-        """
-        Setup and run the test configured for the specified test case.
-
-        @param case: Name of the test case to run.
-
-        """
-        if case:
-            self.POLICY_SETTING[self.POLICY_NAME] = '${google_drive}'
-
-        self.setup_case(user_policies=self.POLICY_SETTING,
-                        real_gaia=True)
-
-        self.GDRIVE_DIR = self._get_Gdrive_path()
-        self._wait_for_mount_ready()
-        self._clear_test_locations()
-        self._the_test(case)
-        self._clear_test_locations()
-
-    def _get_Gdrive_path(self):
-        """Returns the path for the Google Drive Mountpoint."""
-        self.GDRIVE_BASE = self._get_mount(False)
-        return '{}/root/download'.format(self.GDRIVE_BASE)
-
-    def _download_test_file(self):
-        """Loads to the test URL which automatically downloads the test file."""
-        try:
-            self.navigate_to_url(self._DOWNLOAD_BASE)
-        # This page empty with just a test download, so it will timeout.
-        #  TODO: crbug:1058141, invesitgate the telemetry crash caused here.
-        except (exceptions.TimeoutException, exceptions.InitializationError):
-            pass
-
-
-    def _the_test(self, case):
-        """
-        Download the test file, and verify it is in the proper directory,
-        and not in the incorrect directory
-
-        @param case: If the download location was set to Gdrive or not.
-
-        """
-        self._download_test_file()
-
-        if case:
-            if (not os.path.isfile(self.GDRIVE_DIR) or
-                    os.path.isfile(self.DOWNLOAD_DIR)):
-                raise error.TestError(
-                    'Download file not in Gdrive Dir, OR found in downloads.')
-        else:
-            if (not os.path.isfile(self.DOWNLOAD_DIR) or
-                    os.path.isfile(self.GDRIVE_DIR)):
-                raise error.TestError(
-                    'Download file in Gdrive Dir, OR not found in downloads.')
-
-    def _clear_test_locations(self):
-        """Deletes the 'download' file in both test locations."""
-        for test_dir in [self.DOWNLOAD_DIR, self.GDRIVE_DIR]:
-            try:
-                # Adding the * incase the file was downloaded multiple times.
-                utils.system_output('rm {}*'.format(test_dir))
-            # If there is no file present this will return -1. Thats OK!
-            except error.CmdError:
-                pass
-
-    def _wait_for_mount_ready(self):
-        """Wait for the mount to be ready."""
-        def _mount_ready():
-            try:
-                utils.system_output('ls {}/root/'.format(self.GDRIVE_BASE))
-                return True
-            except error.CmdError:
-                return False
-
-        utils.poll_for_condition(
-            lambda: (_mount_ready()),
-            exception=error.TestFail('mounts not ready ever'),
-            timeout=15,
-            sleep_interval=1,
-            desc='Polling for mounts to be ready.')
-
-    def _get_mount(self, case):
-        """Get the Google Drive mount path."""
-        e_msg = 'Should have found mountpoint but did not!'
-        # It may take some time until drivefs is started, so poll for the
-        # mountpoint until timeout.
-        utils.poll_for_condition(
-            lambda: (self._find_drivefs_mount() is not None),
-            exception=error.TestFail(e_msg),
-            timeout=10,
-            sleep_interval=1,
-            desc='Polling for Google Drive to load.')
-
-        mountpoint = self._find_drivefs_mount()
-        return mountpoint
-
-    def _find_drivefs_mount(self):
-        """Return the mount point of the drive if found, else return None."""
-        for mount in utils.mounts():
-            if mount['type'] == 'fuse.drivefs':
-                return mount['dest']
-        return None
diff --git a/client/site_tests/policy_DriveDisabled/control b/client/site_tests/policy_DriveDisabled/control
deleted file mode 100644
index f5a4dbe..0000000
--- a/client/site_tests/policy_DriveDisabled/control
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_DriveDisabled'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of policy_DriveDisabled policy.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_DriveDisabled', **args_dict)
\ No newline at end of file
diff --git a/client/site_tests/policy_DriveDisabled/control.disable b/client/site_tests/policy_DriveDisabled/control.disable
deleted file mode 100644
index 930812d..0000000
--- a/client/site_tests/policy_DriveDisabled/control.disable
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_DriveDisabled.disable'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify the 'policy_DriveDisabled' policy will disable Google Drive acess
-within the Chrome OS.
-
-'''
-
-job.run_test('policy_DriveDisabled',
-             case='disable')
diff --git a/client/site_tests/policy_DriveDisabled/control.enable b/client/site_tests/policy_DriveDisabled/control.enable
deleted file mode 100644
index e3fa67c..0000000
--- a/client/site_tests/policy_DriveDisabled/control.enable
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_DriveDisabled.enable'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify the 'policy_DriveDisabled' policy will enable Google Drive acess
-within the Chrome OS.
-
-'''
-
-job.run_test('policy_DriveDisabled',
-             case='enable')
diff --git a/client/site_tests/policy_DriveDisabled/control.not_set b/client/site_tests/policy_DriveDisabled/control.not_set
deleted file mode 100644
index 82ced05..0000000
--- a/client/site_tests/policy_DriveDisabled/control.not_set
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_DriveDisabled.not_set'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify when the 'policy_DriveDisabled' policy is not set, Google Drive acess
-within the Chrome OS is allowed.
-
-'''
-
-job.run_test('policy_DriveDisabled',
-             case='not_set')
diff --git a/client/site_tests/policy_DriveDisabled/policy_DriveDisabled.py b/client/site_tests/policy_DriveDisabled/policy_DriveDisabled.py
deleted file mode 100644
index aa45c61..0000000
--- a/client/site_tests/policy_DriveDisabled/policy_DriveDisabled.py
+++ /dev/null
@@ -1,97 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import time
-
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-
-class policy_DriveDisabled(
-        enterprise_policy_base.EnterprisePolicyTest):
-    """
-    Test effect of policy_DriveDisabled policy on Chrome OS.
-
-    This test will set the policy, then check if the google drive is mounted
-    or not.
-
-    """
-    version = 1
-
-    POLICY_NAME = 'DriveDisabled'
-    case_value_lookup = {'enable': False,
-                         'disable': True,
-                         'not_set': None}
-
-    def run_once(self, case=None):
-        """
-        Setup and run the test configured for the specified test case.
-
-        @param case: Name of the test case to run.
-
-        """
-        case = self.case_value_lookup[case]
-
-        self.setup_case(user_policies={self.POLICY_NAME: case},
-                        extra_chrome_flags=['--enable-features=DriveFS'],
-                        real_gaia=True)
-
-        self.check_mount(case)
-
-    def check_mount(self, case):
-        """
-        Poll for the drive setting. If the case is True (ie disabled), wait
-        another few seconds to ensure the drive doesn't start with a delay.
-
-        @param case: Value of the DriveDisabled setting.
-
-        """
-        if case:
-            e_msg = 'Should not have found mountpoint but did!'
-        else:
-            e_msg = 'Should have found mountpoint but did not!'
-        # It may take some time until drivefs is started, so poll for the
-        # mountpoint until timeout.
-        utils.poll_for_condition(
-            lambda: self.is_drive_properly_set(case),
-            exception=error.TestFail(e_msg),
-            timeout=10,
-            sleep_interval=1,
-            desc='Polling for page to load.')
-
-        # Due to this being a negative case, and the poll_for would likely
-        # return True immediately, we should wait the maximum duration and do
-        # a final check for the mount.
-        if case:
-            time.sleep(10)
-
-        mountpoint = self._find_drivefs_mount()
-
-        if case and mountpoint:
-            raise error.TestFail(e_msg)
-        if not case and not mountpoint:
-            raise error.TestFail(e_msg)
-
-    def is_drive_properly_set(self, case):
-        """
-        Checks if the drive status is proper vs the policy settings.policy
-
-        @param case: Value of the DriveDisabled setting.
-
-        """
-        if case:
-            if not self._find_drivefs_mount():
-                return True
-        else:
-            if self._find_drivefs_mount():
-                return True
-        return False
-
-    def _find_drivefs_mount(self):
-        """Return the mount point of the drive if found, else return None."""
-        for mount in utils.mounts():
-            if mount['type'] == 'fuse.drivefs':
-                return mount['dest']
-        return None
diff --git a/client/site_tests/policy_EnrollmentRetainment/control b/client/site_tests/policy_EnrollmentRetainment/control
deleted file mode 100644
index 2c40a30..0000000
--- a/client/site_tests/policy_EnrollmentRetainment/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_EnrollmentRetainment'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Called through the stress_EnrollmentRetainnment server test only. This test
-continiously restarts the DUT, logs in with a user setup in cpanel, and
-verifies that a device policy for that user is set.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_EnrollmentRetainment', **args_dict)
\ No newline at end of file
diff --git a/client/site_tests/policy_EnrollmentRetainment/policy_EnrollmentRetainment.py b/client/site_tests/policy_EnrollmentRetainment/policy_EnrollmentRetainment.py
deleted file mode 100644
index 38e397c..0000000
--- a/client/site_tests/policy_EnrollmentRetainment/policy_EnrollmentRetainment.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-USERNAME = 'stressenroll@managedchrome.com'
-PASSWORD = 'test0000'
-
-
-class policy_EnrollmentRetainment(
-        enterprise_policy_base.EnterprisePolicyTest):
-    """Stress tests the enrollment by continiously restarting."""
-    version = 1
-
-
-    def initialize(self, **kwargs):
-        self._initialize_enterprise_policy_test(
-            set_auto_logout=False,
-            env='prod',
-            username=USERNAME,
-            password=PASSWORD,
-            **kwargs)
-
-
-    def run_once(self):
-        """Entry point of this test."""
-
-        with chrome.Chrome(
-            clear_enterprise_policy=False,
-            expect_policy_fetch=True,
-            disable_gaia_services=False,
-            gaia_login=True,
-            username=USERNAME,
-            password=PASSWORD) as self.cr:
-            # Policy that is set on cpanel side that is off by default.
-            self.verify_policy_value('DeviceAllowBluetooth', False)
diff --git a/client/site_tests/policy_EnterpriseForceInstallCustom/control b/client/site_tests/policy_EnterpriseForceInstallCustom/control
deleted file mode 100644
index 107f474..0000000
--- a/client/site_tests/policy_EnterpriseForceInstallCustom/control
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_EnterpriseForceInstallCustom'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of ArcPolicy EnterpriseForceInstall policy.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_EnterpriseForceInstallCustom', **args_dict)
diff --git a/client/site_tests/policy_EnterpriseForceInstallCustom/control.run b/client/site_tests/policy_EnterpriseForceInstallCustom/control.run
deleted file mode 100644
index 2243d78..0000000
--- a/client/site_tests/policy_EnterpriseForceInstallCustom/control.run
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_EnterpriseForceInstallCustom.run'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-DEPENDENCIES = "arc"
-
-DOC = '''
-Verify the ARC EnterpriseForceInstall subpolicy will determine if an
-application is installed or not.
-
-'''
-
-job.run_test('policy_EnterpriseForceInstallCustom')
\ No newline at end of file
diff --git a/client/site_tests/policy_EnterpriseForceInstallCustom/policy_EnterpriseForceInstallCustom.py b/client/site_tests/policy_EnterpriseForceInstallCustom/policy_EnterpriseForceInstallCustom.py
deleted file mode 100644
index a5ab83f..0000000
--- a/client/site_tests/policy_EnterpriseForceInstallCustom/policy_EnterpriseForceInstallCustom.py
+++ /dev/null
@@ -1,131 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-
-from autotest_lib.client.common_lib.cros import arc
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-
-class policy_EnterpriseForceInstallCustom(
-        enterprise_policy_base.EnterprisePolicyTest):
-    """
-    Test effect of policy_AlternateErrorPages policy on Chrome OS.
-
-    """
-    version = 1
-
-    POLICY_NAME = 'ArcPolicy'
-    PACKAGE1 = "com.coffeebeanventures.easyvoicerecorder"
-
-    def _get_actual_policy_apps(self):
-        """
-        Return the apps listed in the ArcPolicy value with the value of
-        "FORCE_INSTALLED" or "BLOCKED".
-
-        @raises error.TestError if no apps are found.
-
-        @returns: App & policies that are set to force_install, or blocked.
-
-        """
-        policy_value = self._get_policy_value_from_new_tab(self.POLICY_NAME)
-        if not policy_value:
-            raise error.TestError('No value for ArcPolicy found!')
-
-        check_apps = []
-        checklist = set(['FORCE_INSTALLED', 'BLOCKED'])
-        app_settings = 'applications'
-        if app_settings not in policy_value:
-            raise error.TestError('ArcPolicy has no application settings!')
-
-        for app in policy_value[app_settings]:
-            if app['installType'] in checklist:
-                check_apps.append(app)
-        return check_apps
-
-    def _verify_force_apps_list(self):
-        """
-        Verify that the expected force-installed apps match the policy value.
-
-        """
-        controlled_apps = self._get_actual_policy_apps()
-        for pkg in controlled_apps:
-            self._verify_package_status(pkg['packageName'], pkg['installType'])
-
-    def _verify_package_status(self, pkg, installType):
-        """
-        Test that the package is installed/not installed as expected
-
-        @param pkg: Name of the package to check.
-        @param installType: Policy Setting of the app.
-
-        @raises error.TestError if any expected apps are not found.
-
-        """
-        type_lookup = {'FORCE_INSTALLED': True,
-                       'BLOCKED': False}
-        status = type_lookup[installType]
-
-        utils.poll_for_condition(
-            lambda: self._check_pkg(pkg, status),
-            exception=error.TestFail('Package {} not installed!'.format(pkg)),
-            timeout=240,
-            sleep_interval=1,
-            desc='Polling for Package to be installed')
-
-    def _check_pkg(self, pkg, status):
-        """
-        Checks to see if the package state is proper.
-
-        @param pkg: Name of the package to check.
-        @param status: If the package should be installed or not.
-
-        @returns: True if the status is correct, else False.
-
-        """
-        if status:
-            return arc._is_in_installed_packages_list(pkg)
-        else:
-            return not arc._is_in_installed_packages_list(pkg)
-
-    def run_once(self):
-        """
-        Verify that the FORCE_INSTALLED and BLOCKED app settings work properly.
-
-        Test will itterate twice. First will run the test with the app
-        installed, and verify it is loaded. The second will block the app, and
-        verify that the app is un-installed.
-
-        """
-        cases = ['FORCE_INSTALLED', 'BLOCKED']
-        for case in cases:
-
-            pol = self.policy_creator(case)
-            self.setup_case(user_policies=pol,
-                            arc_mode='enabled',
-                            use_clouddpc_test=False)
-
-            self._verify_force_apps_list()
-
-    def policy_creator(self, case):
-        """
-        Generates the policy value.
-
-        @param case: If the app should installed or removed.
-
-        @returns: Policy value.
-        """
-        pol = {'ArcEnabled': True,
-               'ArcPolicy':
-                   {"installUnknownSourcesDisabled": False,
-                    "playDeviceLocalPolicyEnabled": True,
-                    "availableAppSetPolicy": "WHITELIST",
-                    "applications":
-                        [{"packageName": self.PACKAGE1,
-                          "defaultPermissionPolicy": "GRANT",
-                          "installType": case}]
-                    }
-               }
-        return pol
diff --git a/client/site_tests/policy_ExtensionPolicy/control b/client/site_tests/policy_ExtensionPolicy/control
deleted file mode 100644
index c363c79..0000000
--- a/client/site_tests/policy_ExtensionPolicy/control
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-NAME = 'policy_ExtensionPolicy'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-policy_ExtensionPolicy verifies that changes to extension policies can be set
-and are reflected correctly in chrome://policy. Fields marked sensitive should
-have their value obfuscated.
-
-This test will only work when using the fake DM server.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_ExtensionPolicy', **args_dict)
-
diff --git a/client/site_tests/policy_ExtensionPolicy/policy_ExtensionPolicy.py b/client/site_tests/policy_ExtensionPolicy/policy_ExtensionPolicy.py
deleted file mode 100644
index 6cb0fd1..0000000
--- a/client/site_tests/policy_ExtensionPolicy/policy_ExtensionPolicy.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import hashlib
-import logging
-import os
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-
-class policy_ExtensionPolicy(enterprise_policy_base.EnterprisePolicyTest):
-    version = 1
-
-
-    def initialize(self, **kwargs):
-        """
-        Start webserver and set the extension policy file's path and checksum.
-
-        """
-        super(policy_ExtensionPolicy, self).initialize(**kwargs)
-        self.start_webserver()
-
-        # Location of the extension policy on the server.
-        POLICY_FILE = 'extension_policy.json'
-        policy_path = os.path.join(self.enterprise_dir, POLICY_FILE)
-        self.EXTENSION_POLICY_URL = '%s/%s' % (self.WEB_HOST, POLICY_FILE)
-        self.CHECKSUM = self.sha256sum(policy_path)
-
-
-    def sha256sum(self, filepath):
-        """
-        Generate the SHA256 checksum of |filepath|.
-
-        @param filepath: Path to file.
-
-        @returns: SHA256 checksum as a hex string.
-
-        """
-        with open(filepath, 'rb') as f:
-            return hashlib.sha256(f.read()).hexdigest()
-
-
-    def run_once(self):
-        """
-        Setup and run the test configured for the specified test case.
-
-        """
-        extension_path = os.path.join(os.path.dirname(__file__),
-                                      'policy_test_extension')
-
-        self.setup_case(extension_paths=[extension_path])
-
-        # The extension ID is required for setting the extension policy. But
-        # the extension ID is assigned randomly, so we need to force install
-        # the policy test extension first and then read its ID.
-        extension_id = self.cr.get_extension(extension_path).extension_id
-        extension_policies = {
-            extension_id: {
-                'download_url': self.EXTENSION_POLICY_URL,
-                'secure_hash': self.CHECKSUM
-            }
-        }
-
-        if self.dms_is_fake:
-            # Update the server policies with the extension policies.
-            self.add_policies(extension=extension_policies)
-
-        # Ensure fields marked sensitive are censored in the policy tab.
-        sensitive_fields = ['SensitiveStringPolicy', 'SensitiveDictPolicy']
-        self.verify_extension_stats(extension_policies,
-                                    sensitive_fields=sensitive_fields)
diff --git a/client/site_tests/policy_ExtensionPolicy/policy_test_extension/background.js b/client/site_tests/policy_ExtensionPolicy/policy_test_extension/background.js
deleted file mode 100644
index e69de29..0000000
--- a/client/site_tests/policy_ExtensionPolicy/policy_test_extension/background.js
+++ /dev/null
diff --git a/client/site_tests/policy_ExtensionPolicy/policy_test_extension/manifest.json b/client/site_tests/policy_ExtensionPolicy/policy_test_extension/manifest.json
deleted file mode 100644
index f9685dd..0000000
--- a/client/site_tests/policy_ExtensionPolicy/policy_test_extension/manifest.json
+++ /dev/null
@@ -1,20 +0,0 @@
-{
-  "manifest_version": 2,
-
-  "name": "Policy Extension",
-  "description": "Dummy extension for testing extension policies",
-  "version": "1.0",
-
-  "background": {
-      "scripts": ["background.js"],
-      "persistent": false
-  },
-
-  "storage": {
-      "managed_schema": "schema.json"
-  },
-
-  "permissions": [
-      "storage"
-  ]
-}
diff --git a/client/site_tests/policy_ExtensionPolicy/policy_test_extension/schema.json b/client/site_tests/policy_ExtensionPolicy/policy_test_extension/schema.json
deleted file mode 100644
index 096356c..0000000
--- a/client/site_tests/policy_ExtensionPolicy/policy_test_extension/schema.json
+++ /dev/null
@@ -1,27 +0,0 @@
-{
-  "type": "object",
-  "properties": {
-    "VisibleStringPolicy": {
-      "type": "string"
-    },
-    "SensitiveStringPolicy": {
-      "type": "string",
-      "sensitiveValue": true
-    },
-    "VisibleDictPolicy": {
-      "type": "object",
-      "properties": {
-        "some_bool": { "type": "boolean" },
-        "some_string": { "type": "string" }
-      }
-    },
-    "SensitiveDictPolicy": {
-      "type": "object",
-      "properties": {
-        "some_bool": { "type": "boolean" },
-        "some_string": { "type": "string" }
-      },
-      "sensitiveValue": true
-    }
-  }
-}
diff --git a/client/site_tests/policy_ExternalStorageDisabled/control b/client/site_tests/policy_ExternalStorageDisabled/control
deleted file mode 100644
index e96239a..0000000
--- a/client/site_tests/policy_ExternalStorageDisabled/control
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-NAME = 'policy_ExternalStorageDisabled'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effect of ExternalStorageDisabled policy on Chrome OS behavior.
-
-This test verifies the effect of the ExternalStorageDisabled user policy on
-Chrome OS client behavior. It exercises all valid policy values across three
-test cases: 'True_Block', 'NotSet_Allow', and 'False_Allow'.
-
-This test requires some sort of external storage to be connected to the DUT to
-pass. If running on a lab machine, trigger this through the
-policy_ExternalStorageDisabledServer server test.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_ExternalStorageDisabled', **args_dict)
diff --git a/client/site_tests/policy_ExternalStorageDisabled/policy_ExternalStorageDisabled.py b/client/site_tests/policy_ExternalStorageDisabled/policy_ExternalStorageDisabled.py
deleted file mode 100644
index 2ef7633..0000000
--- a/client/site_tests/policy_ExternalStorageDisabled/policy_ExternalStorageDisabled.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-
-class policy_ExternalStorageDisabled(
-        enterprise_policy_base.EnterprisePolicyTest):
-    version = 1
-
-    POLICY_NAME = 'ExternalStorageDisabled'
-    TEST_CASES = {
-        'True_Block': True,
-        'False_Allow': False,
-        'NotSet_Allow': None
-    }
-
-    def _test_external_storage(self, policy_value):
-        """
-        Verify the behavior of the ExternalStorageDisabled policy.
-
-        Check the /media/removable directory and verify that it is empty if the
-        policy disables access to external storage, or not empty if external
-        storage is allowed.
-
-        @param policy_value: policy value for this case.
-
-        @raises error.TestFail: If the contents of the /media/removable
-            directory do not match the policy behavior.
-
-        """
-        removable_dir = os.listdir(os.path.join(os.sep, 'media', 'removable'))
-
-        if policy_value:
-            if removable_dir:
-                raise error.TestFail('External storage was disabled but '
-                                     'external storage detected')
-        elif not removable_dir:
-            raise error.TestFail('External storage enabled but external '
-                                 'storage not found')
-
-
-    def run_once(self, case):
-        """
-        Setup and run the test configured for the specified test case.
-
-        @param case: Name of the test case to run.
-
-        """
-        case_value = self.TEST_CASES[case]
-        self.setup_case(user_policies={self.POLICY_NAME: case_value})
-        self._test_external_storage(case_value)
diff --git a/client/site_tests/policy_ExternalStorageReadOnly/control b/client/site_tests/policy_ExternalStorageReadOnly/control
deleted file mode 100644
index 85a4e03..0000000
--- a/client/site_tests/policy_ExternalStorageReadOnly/control
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-NAME = 'policy_ExternalStorageReadOnly'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effect of ExternalStorageReadOnly policy on Chrome OS behavior.
-
-This test verifies the effect of the ExternalStorageReadOnly user policy on
-Chrome OS client behavior. It exercises all valid policy values across three
-test cases: 'True_Block', 'NotSet_Allow', and 'False_Allow'.
-
-This test requires some sort of external storage to be connected to the DUT to
-pass. If running on a lab machine, trigger this through the
-policy_ExternalStorageReadOnlyServer server test.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_ExternalStorageReadOnly', **args_dict)
diff --git a/client/site_tests/policy_ExternalStorageReadOnly/policy_ExternalStorageReadOnly.py b/client/site_tests/policy_ExternalStorageReadOnly/policy_ExternalStorageReadOnly.py
deleted file mode 100644
index e508442..0000000
--- a/client/site_tests/policy_ExternalStorageReadOnly/policy_ExternalStorageReadOnly.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-
-class policy_ExternalStorageReadOnly(
-        enterprise_policy_base.EnterprisePolicyTest):
-    version = 1
-
-    POLICY_NAME = 'ExternalStorageReadOnly'
-    TEST_CASES = {
-        'True_Block': True,
-        'False_Allow': False,
-        'NotSet_Allow': None
-    }
-
-    TEST_DIR = os.path.join(os.sep, 'media', 'removable', 'STATE')
-    TEST_FILE = os.path.join(TEST_DIR, 'test')
-
-    def cleanup(self):
-        """Delete the test file, if it was created."""
-        try:
-            os.remove(self.TEST_FILE)
-        except OSError:
-            # The remove call fails if the file isn't created, but that's ok.
-            pass
-
-        super(policy_ExternalStorageReadOnly, self).cleanup()
-
-    def _test_external_storage(self, policy_value):
-        """
-        Verify the behavior of the ExternalStorageReadOnly policy.
-
-        Attempt to create TEST_FILE on the external storage. This should fail
-        if the policy is set to True and succeed otherwise.
-
-        @param policy_value: policy value for this case.
-
-        @raises error.TestFail: If the permissions of the /media/removable
-            directory do not match the policy behavior.
-
-        """
-        # Attempt to modify the external storage by creating a file.
-        if not os.path.isdir(self.TEST_DIR):
-            raise error.TestWarn('USB Missing. Exiting')
-        if os.path.isfile(self.TEST_FILE):
-            raise error.TestWarn('Test file existed prior to test.')
-        utils.run('touch %s' % self.TEST_FILE, ignore_status=True)
-        if policy_value and os.path.isfile(self.TEST_FILE):
-            raise error.TestFail('External storage set to read-only but '
-                                 'was able to write to storage.')
-        elif not policy_value and not os.path.isfile(self.TEST_FILE):
-            raise error.TestFail('External storage not read-only but '
-                                 'unable to write to storage.')
-
-    def run_once(self, case):
-        """
-        Setup and run the test configured for the specified test case.
-
-        @param case: Name of the test case to run.
-
-        """
-        case_value = self.TEST_CASES[case]
-        self.setup_case(user_policies={self.POLICY_NAME: case_value})
-        self._test_external_storage(case_value)
diff --git a/client/site_tests/policy_ForceYouTubeRestrict/control b/client/site_tests/policy_ForceYouTubeRestrict/control
deleted file mode 100644
index 129dd46..0000000
--- a/client/site_tests/policy_ForceYouTubeRestrict/control
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_ForceYouTubeRestrict'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verifies effects of ForceYouTubeRestrict policy.
-
-This test verifies four states of this policy: Strict, Moderate,
-Disabled and Not Set.
-
-If the policy is set to Strict than the user will not be able to view any
-restricted videos on YouTube. If the policy is set to Moderate than the
-user will not be able to watch some restricted videos. If the policy is
-Disabled or Not Set the user will be able to watch everything. In both cases,
-Strict and Moderate, the user will not be able to toggle restricted settings
-on the website.
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_ForceYouTubeRestrict', **args_dict)
diff --git a/client/site_tests/policy_ForceYouTubeRestrict/control.Disabled b/client/site_tests/policy_ForceYouTubeRestrict/control.Disabled
deleted file mode 100644
index 0431b7a..0000000
--- a/client/site_tests/policy_ForceYouTubeRestrict/control.Disabled
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_ForceYouTubeRestrict.Disabled'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verifies effects of ForceYouTubeRestrict policy.
-
-If the policy is set to Disbaled the user can view any videos and
-toggle restricted mode.
-
-'''
-
-job.run_test('policy_ForceYouTubeRestrict', case='Disabled')
\ No newline at end of file
diff --git a/client/site_tests/policy_ForceYouTubeRestrict/control.Moderate b/client/site_tests/policy_ForceYouTubeRestrict/control.Moderate
deleted file mode 100644
index 4540ab7..0000000
--- a/client/site_tests/policy_ForceYouTubeRestrict/control.Moderate
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_ForceYouTubeRestrict.Moderate'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verifies effects of ForceYouTubeRestrict policy.
-
-If the policy is set to Moderate than the user will not be able to view some
-restricted videos on YouTube.
-
-'''
-
-job.run_test('policy_ForceYouTubeRestrict', case='Moderate')
\ No newline at end of file
diff --git a/client/site_tests/policy_ForceYouTubeRestrict/control.NotSet b/client/site_tests/policy_ForceYouTubeRestrict/control.NotSet
deleted file mode 100644
index 164dacf..0000000
--- a/client/site_tests/policy_ForceYouTubeRestrict/control.NotSet
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_ForceYouTubeRestrict.NotSet'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verifies effects of ForceYouTubeRestrict policy.
-
-If the policy is Not Set the user can view any videos and
-toggle restricted mode.
-
-'''
-
-job.run_test('policy_ForceYouTubeRestrict', case='NotSet')
\ No newline at end of file
diff --git a/client/site_tests/policy_ForceYouTubeRestrict/control.Strict b/client/site_tests/policy_ForceYouTubeRestrict/control.Strict
deleted file mode 100644
index fb2b9f8..0000000
--- a/client/site_tests/policy_ForceYouTubeRestrict/control.Strict
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_ForceYouTubeRestrict.Strict'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verifies effects of ForceYouTubeRestrict policy.
-
-If the policy is set to Strict than the user will not be able to view any
-restricted videos on YouTube.
-
-'''
-
-job.run_test('policy_ForceYouTubeRestrict', case='Strict')
\ No newline at end of file
diff --git a/client/site_tests/policy_ForceYouTubeRestrict/policy_ForceYouTubeRestrict.py b/client/site_tests/policy_ForceYouTubeRestrict/policy_ForceYouTubeRestrict.py
deleted file mode 100644
index 71e1abd..0000000
--- a/client/site_tests/policy_ForceYouTubeRestrict/policy_ForceYouTubeRestrict.py
+++ /dev/null
@@ -1,148 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import time
-
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-class policy_ForceYouTubeRestrict(
-        enterprise_policy_base.EnterprisePolicyTest):
-    """
-    Tests the ForceYouTubeRestrict policy in Chrome OS.
-
-    If the policy is set to strict than the user will not be able to view any
-    restricted videos on YouTube. If the policy is set to moderate than the
-    user will not be able to watch some restricted videos. In both cases
-    the user will not be able to toggle restricted settings on the website.
-
-    Note: This test doesn't cover the ARC++ app.
-
-    """
-    version = 1
-
-    POLICY_NAME = 'ForceYouTubeRestrict'
-    TEST_CASES = {
-        'Strict': 2,
-        'Moderate': 1,
-        'Disabled': 0,
-        'NotSet': None}
-
-    def _search_for_adult_content(self):
-        SEARCH_QUERY = 'https://www.youtube.com/results?search_query=adult'
-        BURGER_MENU = (
-                "document.querySelector('* /deep/ #masthead-container /deep/"
-                " #end /deep/ ytd-topbar-menu-button-renderer:last-of-type').innerHTML;"
-        )
-        self.search_tab = self.navigate_to_url(SEARCH_QUERY)
-        utils.poll_for_condition(lambda: self.check_page_readiness(
-                self.search_tab, BURGER_MENU),
-                                 exception=error.TestFail(
-                                         'Page is not ready.'),
-                                 timeout=5,
-                                 sleep_interval=1)
-
-    def _open_restricted_menu(self):
-        BURGER_MENU_CLICK = (
-                "document.querySelector('* /deep/ #masthead-container /de"
-                "ep/ #end /deep/ ytd-topbar-menu-button-renderer:last-of-type').click();"
-        )
-        RESTRICTED_MENU_CLICK = """
-buttons=document.querySelectorAll('a#endpoint.yt-simple-endpoint.style-scope.ytd-compact-link-renderer');
-for (let i = 0; i < buttons.length; i++) {
-  button=buttons[i];
-  if (button.innerText.startsWith("Restricted Mode:")) {
-     button.click();
-     break;
-  }
-}
-"""
-        self.search_tab.EvaluateJavaScript(BURGER_MENU_CLICK)
-        time.sleep(1)
-        self.search_tab.EvaluateJavaScript(RESTRICTED_MENU_CLICK)
-        time.sleep(1)
-
-    def _restricted_mode_by_policy(self):
-        RESTRICTED_MODE_SELECTOR = "document.querySelector('ytd-text-header-renderer.style-scope.ytd-section-list-renderer').innerText"
-        return self.search_tab.EvaluateJavaScript(RESTRICTED_MODE_SELECTOR)
-
-    def _restricted_mode_by_policy_strict(self):
-        return "Restricted Mode is enabled by your network administrator" in \
-           self._restricted_mode_by_policy()
-
-    def _restricted_mode_by_policy_moderate(self):
-        return self._restricted_mode_by_policy_strict()
-
-    def _get_content(self, restriction_type_url):
-        """
-        Checks the contents of the watch page.
-
-        @param restriction_type: URL with either strict or moderate content.
-
-        @returns text content of the element with video status.
-
-        """
-        VERIFY_VIDEO_NOT_LOADED_CMD = ("document.getElementById"
-                                       "('error-screen').innerText;")
-        active_tab = self.navigate_to_url(restriction_type_url)
-        utils.poll_for_condition(
-            lambda: self.check_page_readiness(
-                active_tab, VERIFY_VIDEO_NOT_LOADED_CMD),
-            exception=error.TestFail('Page is not ready.'),
-            timeout=5,
-            sleep_interval=1)
-        return active_tab.EvaluateJavaScript(VERIFY_VIDEO_NOT_LOADED_CMD)
-
-    def _strict_content(self):
-        RESTRICTED_ONLY_ON_STRICT = 'https://www.youtube.com/watch?v=Fmwfmee2ZTE'
-        return self._get_content(RESTRICTED_ONLY_ON_STRICT)
-
-    def _moderated_content(self):
-        RESTRICTED_ON_MODERATE = 'https://www.youtube.com/watch?v=yR79oLrI1g4'
-        return self._get_content(RESTRICTED_ON_MODERATE)
-
-    def _check_restricted_mode(self, case):
-        """
-        Checks restricted settings by verifying that user is unable to play
-        certain videos as well as toggle restricted settings.
-
-        @param case: policy value expected.
-
-        """
-        # Navigates to the search page to search for adult content
-        self._search_for_adult_content()
-        # We could check for the status shown in restricted menu but
-        # unfortunately this is broken for the policy and therefore
-        # doesn't add value to the test
-        #self._open_restricted_menu()
-
-        if case == 'Strict':
-            if 'restricted' in self._strict_content() \
-               and self._restricted_mode_by_policy_strict():
-                return True
-            raise error.TestFail(
-                    "Restricted mode is not on, user can view restricted video."
-            )
-        elif case == 'Moderate':
-            if 'restricted' in self._moderated_content() \
-               and self._restricted_mode_by_policy_moderate():
-                return True
-            raise error.TestFail(
-                    "Restricted mode is not on, user can view restricted video."
-            )
-        else:
-            return True
-
-
-    def run_once(self, case):
-        """
-        Setup and run the test configured for the specified test case.
-
-        @param case: Name of the test case to run.
-
-        """
-        self.POLICIES = {self.POLICY_NAME: self.TEST_CASES[case]}
-        self.setup_case(user_policies=self.POLICIES)
-        self._check_restricted_mode(case)
diff --git a/client/site_tests/policy_ForceYouTubeSafetyMode/control b/client/site_tests/policy_ForceYouTubeSafetyMode/control
deleted file mode 100644
index df652ca..0000000
--- a/client/site_tests/policy_ForceYouTubeSafetyMode/control
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'policy_ForceYouTubeSafetyMode'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effect of ForceYouTubeSafetyMode policy on client behavior.
-
-This test verifies the effect of the ForceYouTubeSafetyMode user policy on
-Chrome OS client behavior. The test exercises all valid policy values across
-three test cases: NotSet_SafetyInactive, False_SafetyInactive, and
-True_SafetyActive.
-
-A test case shall pass when the YouTube.com page indicates that 'Restricted
-Mode' is On (or Off) when the policy is set true (or is set false or not set).
-A test case shall fail if the above behavior is not enforced.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_ForceYouTubeSafetyMode', **args_dict)
diff --git a/client/site_tests/policy_ForceYouTubeSafetyMode/control.false_safetyinactive b/client/site_tests/policy_ForceYouTubeSafetyMode/control.false_safetyinactive
deleted file mode 100644
index bc228cf..0000000
--- a/client/site_tests/policy_ForceYouTubeSafetyMode/control.false_safetyinactive
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'policy_ForceYouTubeSafetyMode.false_safetyinactive'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of ForceYouTubeSafetyMode policy on client behavior & appearance.
-
-This test case verifies the behavior and appearance of Chrome OS when the
-ForceYouTubeSafetyMode user policy is set false.
-
-The test case shall pass when the YouTube.com page indicates that 'Restricted
-Mode' is Off. It shall fail if this behavior is not enforced.
-
-'''
-
-job.run_test('policy_ForceYouTubeSafetyMode', case='False_SafetyInactive')
diff --git a/client/site_tests/policy_ForceYouTubeSafetyMode/control.notset_safetyinactive b/client/site_tests/policy_ForceYouTubeSafetyMode/control.notset_safetyinactive
deleted file mode 100644
index 25fb216..0000000
--- a/client/site_tests/policy_ForceYouTubeSafetyMode/control.notset_safetyinactive
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'policy_ForceYouTubeSafetyMode.notset_safetyinactive'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of ForceYouTubeSafetyMode policy on client behavior & appearance.
-
-This test case verifies the behavior and appearance of Chrome OS when the
-ForceYouTubeSafetyMode user policy is not set.
-
-The test case shall pass when the YouTube.com page indicates that 'Restricted
-Mode' is Off. It shall fail if this behavior is not enforced.
-
-'''
-
-job.run_test('policy_ForceYouTubeSafetyMode', case='NotSet_SafetyInactive')
diff --git a/client/site_tests/policy_ForceYouTubeSafetyMode/control.true_safetyactive b/client/site_tests/policy_ForceYouTubeSafetyMode/control.true_safetyactive
deleted file mode 100644
index c75b8c1..0000000
--- a/client/site_tests/policy_ForceYouTubeSafetyMode/control.true_safetyactive
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'policy_ForceYouTubeSafetyMode.true_safetyactive'
-# TODO(krishnargv): enable this test once crbug.com/712383 is fixed.
-# ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of ForceYouTubeSafetyMode policy on client behavior & appearance.
-
-This test case verifies the behavior and appearance of Chrome OS when the
-ForceYouTubeSafetyMode user policy is set true.
-
-The test case shall pass when the YouTube.com page indicates that 'Restricted
-Mode' is On. It shall fail if this behavior is not enforced.
-
-'''
-
-job.run_test('policy_ForceYouTubeSafetyMode', case='True_SafetyActive')
diff --git a/client/site_tests/policy_ForceYouTubeSafetyMode/policy_ForceYouTubeSafetyMode.py b/client/site_tests/policy_ForceYouTubeSafetyMode/policy_ForceYouTubeSafetyMode.py
deleted file mode 100644
index fe96428..0000000
--- a/client/site_tests/policy_ForceYouTubeSafetyMode/policy_ForceYouTubeSafetyMode.py
+++ /dev/null
@@ -1,85 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-
-class policy_ForceYouTubeSafetyMode(
-    enterprise_policy_base.EnterprisePolicyTest):
-    """Test effect of ForceYouTubeSafetyMode policy on Chrome OS behavior.
-
-    This test verifies that the ForceYouTubeSafetyMode user policy controls
-    whether Chrome OS forces YouTube to use Safety Mode. When the policy is
-    set true, Chrome shall add the 'YouTube-Safety-Mode: Active' header to
-    any YouTube URL request. When the policy is set false or is not set,
-    Chrome shall not add the header. The presence of the header causes YouTube
-    to activate Restricted Mode. The absence of the header causes YouTube to
-    use the mode last set by the user (as stored in a cookie), or default to
-    inactive if the user has not set the mode.
-
-    The test covers all valid policy values across three test cases:
-    NotSet_SafetyInactive, False_SafetyInactive, True_SafetyActive.
-
-    A test case passes when https://www.youtube.com page indicates that
-    'Restricted Mode' is On (or Off) when the policy is set true (or set false
-    or not set). A test case shall fail if the above behavior is not enforced.
-
-    """
-    version = 1
-
-    POLICY_NAME = 'ForceYouTubeSafetyMode'
-    TEST_CASES = {
-        'True_SafetyActive': True,
-        'False_SafetyInactive': False,
-        'NotSet_SafetyInactive': None
-    }
-    SUPPORTING_POLICIES = {
-        'DefaultSearchProviderEnabled': None
-    }
-    YOUTUBE_SEARCH_URL = 'https://www.youtube.com/results?search_query=kittens'
-
-    def _test_force_youtube_safety(self, policy_value):
-        """Verify CrOS enforces ForceYouTubeSafetyMode policy.
-
-        @param policy_value: policy value for this case.
-
-        """
-        is_safety_mode_active = self._is_restricted_mode_active()
-        if policy_value == True:
-            if not is_safety_mode_active:
-                raise error.TestFail('Restricted Mode should be active.')
-        else:
-            if is_safety_mode_active:
-                raise error.TestFail('Restricted Mode should not be active.')
-
-    def _is_restricted_mode_active(self):
-        """Check whether the safety-mode-message is displayed.
-
-        When Restricted Mode is enabled on www.youtube.com, a warning message
-        is displayed at the top of the screen saying that some results have
-        been removed. The message is in <p class="safety-mode-message">.
-
-        @returns: True if the safety-mode-message is displayed.
-
-        """
-        tab = self.navigate_to_url(self.YOUTUBE_SEARCH_URL)
-        is_restricted_mode_active = tab.EvaluateJavaScript(
-            'document.getElementsByClassName("safety-mode-message").length')
-        logging.info('restricted mode active: %s', is_restricted_mode_active)
-        tab.Close()
-        return is_restricted_mode_active
-
-    def run_once(self, case):
-        """Setup and run the test configured for the specified test case.
-
-        @param case: Name of the test case to run.
-
-        """
-        case_value = self.TEST_CASES[case]
-        self.SUPPORTING_POLICIES[self.POLICY_NAME] = case_value
-        self.setup_case(user_policies=self.SUPPORTING_POLICIES)
-        self._test_force_youtube_safety(case_value)
diff --git a/client/site_tests/policy_GlobalNetworkSettings/control b/client/site_tests/policy_GlobalNetworkSettings/control
deleted file mode 100644
index 0a85d4f..0000000
--- a/client/site_tests/policy_GlobalNetworkSettings/control
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-NAME = 'policy_GlobalNetworkSettings'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-This test should be run through the 'policy_GlobalNetworkSettingsServer' test.
-
-'policy_GlobalNetworkSettings' sets GlobalNetworkConfiguration policy
-and attempts to connect to policy and user defined networks. Fails if DUT
-behavior does not match expected policy behavior.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_GlobalNetworkSettings', **args_dict)
-
diff --git a/client/site_tests/policy_GlobalNetworkSettings/policy_GlobalNetworkSettings.py b/client/site_tests/policy_GlobalNetworkSettings/policy_GlobalNetworkSettings.py
deleted file mode 100644
index ebdb1f2..0000000
--- a/client/site_tests/policy_GlobalNetworkSettings/policy_GlobalNetworkSettings.py
+++ /dev/null
@@ -1,226 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import pickle
-import re
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-from autotest_lib.client.cros.enterprise import enterprise_network_api
-
-
-class policy_GlobalNetworkSettings(
-        enterprise_policy_base.EnterprisePolicyTest):
-    version = 1
-
-
-    def cleanup(self):
-        """Re-enable ethernet after the test is completed."""
-        if hasattr(self, 'net_api'):
-            self.net_api.chrome_net_context.enable_network_device('Ethernet')
-        super(policy_GlobalNetworkSettings, self).cleanup()
-
-
-    def _test_only_connect_if_available(self, policy_error, user_error):
-        """
-        Verify the AllowOnlyPolicyNetworksToConnectIfAvailable policy.
-
-        If both networks are available, only the policy network should
-        connect. If the policy network is unavailable, then the user network
-        may connect. Ensure the errors are caused by the policy blocking or
-        allowing the correct connections.
-
-        @param policy_error: Error (if any) raised from connecting to policy
-                network.
-        @param user_error: Error (if any) raised from connecting to user
-                network.
-
-        @raises error.TestFail: If errors do not match policy behavior.
-
-        """
-        OUT_OF_RANGE_ERROR = 'The SSID: .* is not in WiFi range of the DUT'
-        BLOCKED_ERROR = ('Could not connect to .* network. '
-                         'Error returned by chrome.networkingPrivate.'
-                         'startConnect API: blocked-by-policy')
-
-        if policy_error and user_error:
-            raise error.TestFail(
-                    'Unable to connect to user or policy network: %s'
-                    % policy_error)
-        elif not policy_error and user_error:
-            if not re.match(BLOCKED_ERROR, str(user_error)):
-                raise error.TestFail('Network API received unrecognized '
-                                     'error connecting to the user '
-                                     'network: %s' % user_error)
-        elif policy_error and not user_error:
-            if not re.match(OUT_OF_RANGE_ERROR, str(policy_error)):
-                raise error.TestFail('Network API received unrecognized '
-                                     'error connecting to the policy '
-                                     'network: %s' % policy_error)
-        elif not (policy_error or user_error):
-            raise error.TestFail('User network connected despite policy '
-                                 'network being available')
-
-
-    def _test_wifi_disabled_policy(self, policy_error, user_error):
-        """
-        Verify the DisableNetworkTypes policy on WiFi.
-
-        Both networks should show the 'No wifi networks found' error.
-
-        @param policy_error: Error (if any) raised from connecting to policy
-                network.
-        @param user_error: Error (if any) raised from connecting to user
-                network.
-
-        @raises error.TestFail: If errors do not match policy behavior.
-
-        """
-        for err in [policy_error, user_error]:
-            if err is None:
-                raise error.TestFail('DUT was able to connect to WiFi, but '
-                                     'WiFi should be blocked.')
-            elif str(err) != 'No wifi networks found.':
-                raise error.TestFail('Network API received unrecognized '
-                                     'error connecting to network: %s'
-                                     % err)
-
-
-    def _test_autoconnect_policy(self, ssid, policy_network, user_network):
-        """
-        Verify the AllowOnlyPolicyNetworksToAutoconnect policy.
-
-        Disconnect from the network. The policy network should reconnect,
-        and the user network should not.
-
-        @param ssid: SSID of connected network.
-        @param policy_network: Network policy defined network.
-        @param user_network: User defined network.
-
-        @raises error.TestFail: If errors do not match policy behavior.
-
-        """
-        self.net_api.disconnect_from_network(ssid)
-
-        if self.net_api.is_network_connected(ssid):
-            if ssid == user_network.ssid:
-                raise error.TestFail(
-                        'User network autoconnected despite '
-                        'AllowOnlyPolicyNetworksToAutoconnect=True')
-        elif ssid == policy_network.ssid:
-            raise error.TestFail(
-                    'Policy network did not autoconnect, despite '
-                    'AllowOnlyPolicyNetworksToAutoconnect=True')
-
-
-    def _test_allow_only_policy(self, policy_error, user_error):
-        """
-        Verify the AllowOnlyPolicyNetworksToConnect policy.
-
-        policy_error should be None, user_error should not.
-
-        @param policy_error: Error (if any) raised from connecting to policy
-                network.
-        @param user_error: Error (if any) raised from connecting to user
-                network.
-
-        @raises error.TestFail: If errors do not match policy behavior.
-
-        """
-        if policy_error:
-            raise error.TestFail('DUT should have connected to policy '
-                                 'network, but did not: %s' % policy_error)
-        if not user_error:
-            raise error.TestFail('DUT was able to connect to user '
-                                 'network, but should have been blocked.')
-
-
-    def test_global_settings(self, gnc_settings, policy_network, user_network):
-        """
-        Attempt to connect to the policy network, then the user_network.
-
-        Ensure connection behavior matches GlobalNetworkConfiguration.
-
-        @param gnc_settings: GlobalNetworkConfiguration dictionary value.
-        @param policy_network_pickle: NetworkConfig object representing
-                the policy network configuration.
-        @param user_network_pickle: NetworkConfig object representing
-                user network configuration.
-
-        @raise error.TestFail: DUT behavior does not match policy settings.
-
-        """
-        # Store connection errors to check later.
-        network_errors = {}
-        for ssid in [policy_network.ssid, user_network.ssid]:
-            network_errors[ssid] = None
-            try:
-                self.net_api.connect_to_network(ssid)
-            except error.TestFail as e:
-                network_errors[ssid] = e
-                continue
-
-            if gnc_settings.get('AllowOnlyPolicyNetworksToAutoconnect'):
-                self._test_autoconnect_policy(ssid,
-                                              policy_network,
-                                              user_network)
-                continue
-
-            if not self.net_api.is_network_connected(ssid):
-                raise error.TestFail(
-                        'Did not connect to network (%s)' % ssid)
-
-        policy_error = network_errors[policy_network.ssid]
-        user_error = network_errors[user_network.ssid]
-
-        if gnc_settings.get('AllowOnlyPolicyNetworksToConnectIfAvailable'):
-            self._test_only_connect_if_available(policy_error, user_error)
-
-        elif 'WiFi' in gnc_settings.get('DisableNetworkTypes', {}):
-            self._test_wifi_disabled_policy(policy_error, user_error)
-
-        elif gnc_settings.get('AllowOnlyPolicyNetworksToConnect'):
-            self._test_allow_only_policy(policy_error, user_error)
-
-
-    def run_once(self, gnc_settings=None, policy_network_pickle=None,
-                 user_network_pickle=None):
-        """
-        Setup and run the test configured for the specified test case.
-
-        policy_network is in the network policy, and user_network is not.
-        The GlobalNetworkConfiguration settings modify how the DUT is able to
-        connect to both of these networks.
-
-        @param gnc_settings: GlobalNetworkConfiguration dictionary value.
-        @param policy_network_pickle: Pickled NetworkConfig object to set as
-                the policy network configuration.
-        @param user_network_pickle: Pickled NetworkConfig object to set as the
-                user network configuration.
-
-        @raises error.TestFail: If DUT's actions do not match policy settings.
-
-        """
-        if policy_network_pickle is None or user_network_pickle is None:
-            raise error.TestError('Networks cannot be None')
-
-        policy_network = pickle.loads(policy_network_pickle)
-        user_network = pickle.loads(user_network_pickle)
-
-        self.setup_case(
-            user_policies={'OpenNetworkConfiguration': policy_network.policy()},
-            device_policies={'DeviceOpenNetworkConfiguration': {
-                             'GlobalNetworkConfiguration': gnc_settings}},
-            extension_paths=[
-                enterprise_network_api.NETWORK_TEST_EXTENSION_PATH
-            ],
-            enroll=True
-        )
-
-        self.net_api = enterprise_network_api.\
-                ChromeEnterpriseNetworkContext(self.cr)
-        # Disable ethernet so device will default to WiFi
-        self.net_api.disable_network_device('Ethernet')
-
-        self.test_global_settings(gnc_settings, policy_network, user_network)
diff --git a/client/site_tests/policy_KeyPermissions/control b/client/site_tests/policy_KeyPermissions/control
deleted file mode 100644
index 525f1c4..0000000
--- a/client/site_tests/policy_KeyPermissions/control
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_KeyPermissions'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of policy_KeyPermissions policy.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_KeyPermissions', **args_dict)
diff --git a/client/site_tests/policy_KeyPermissions/control.allow b/client/site_tests/policy_KeyPermissions/control.allow
deleted file mode 100644
index 98c8ec5..0000000
--- a/client/site_tests/policy_KeyPermissions/control.allow
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_KeyPermissions.allow'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify when the allowCorporateKeyUsage (that is within the KeyPermissions
-policy) is True, the configured certficate can be used.
-
-'''
-
-job.run_test('policy_KeyPermissions',
-             case=True)
diff --git a/client/site_tests/policy_KeyPermissions/control.disallow b/client/site_tests/policy_KeyPermissions/control.disallow
deleted file mode 100644
index 73715f2..0000000
--- a/client/site_tests/policy_KeyPermissions/control.disallow
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_KeyPermissions.disallow'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify when the allowCorporateKeyUsage (that is within the KeyPermissions
-policy) is False, the configured certficate can not be used.
-'''
-
-job.run_test('policy_KeyPermissions',
-             case=False)
diff --git a/client/site_tests/policy_KeyPermissions/control.not_set b/client/site_tests/policy_KeyPermissions/control.not_set
deleted file mode 100644
index 4f9e650..0000000
--- a/client/site_tests/policy_KeyPermissions/control.not_set
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_KeyPermissions.not_set'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify when the allowCorporateKeyUsage (that is within the KeyPermissions
-policy) is None, the configured certficate can not be used.
-
-'''
-
-job.run_test('policy_KeyPermissions',
-             case=None)
diff --git a/client/site_tests/policy_KeyPermissions/policy_KeyPermissions.py b/client/site_tests/policy_KeyPermissions/policy_KeyPermissions.py
deleted file mode 100644
index ccc89d7..0000000
--- a/client/site_tests/policy_KeyPermissions/policy_KeyPermissions.py
+++ /dev/null
@@ -1,113 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import utils
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-
-class policy_KeyPermissions(
-        enterprise_policy_base.EnterprisePolicyTest):
-    """
-    Test effect of policy_KeyPermissions/allowCorporateKeyUsage policy.
-
-    This test will utilize the certs api extension to prep the certificate
-    and see if when the cert is 'selected' if the Certficate information GUI
-    object appears.
-
-    The wait_for_ui_obj and did_obj_not_load functions within self.ui will
-    raise testErrors if:
-        The object did not load in the wait_for_ui_obj() call.
-        The object did load in the  did_obj_not_load() call.
-
-    """
-    version = 1
-    QUERY = "document.querySelector('#{}').{}"
-
-    def click_button(self, button_id):
-        """Click an element given its ID."""
-        cmd = self.QUERY.format(button_id, 'click()')
-        self.tab.ExecuteJavaScript(cmd)
-
-    def field_value(self, obj_id):
-        """Return the value of a text field."""
-        cmd = self.QUERY.format(obj_id, 'value')
-        return self.tab.EvaluateJavaScript(cmd)
-
-    def wait_for_extension(self):
-        """Wait for the extension to install so we can open it."""
-        def load_page():
-            self.tab = self.navigate_to_url(self.EXTENSION_PAGE, self.tab)
-            return self.tab.EvaluateJavaScript(
-                "document.querySelector('#cert-enrollment') !== null")
-
-        utils.poll_for_condition(
-            load_page,
-            timeout=15,
-            sleep_interval=1,
-            desc='Timed out waiting for extension to install.')
-
-    def test_platform_keys(self, case):
-        """
-        Test the chrome.enterprise.platformKeys API.
-
-        The following API methods are tested:
-            - getToken
-            - getCertificates
-            - importCertificate
-            - removeCertificate
-
-        """
-        try:
-            self._click_generate()
-        except error.TestFail:
-            # On specific devices the very first time the extension page is
-            # loaded, it will not load the certs. This is a small workaround to
-            # fix that.
-            self.navigate_to_url('https://google.com', self.tab)
-            self._click_generate()
-
-        # Click all the buttons needed to get the cert ready.
-        self.click_button('sign')
-        self.click_button('create-cert')
-        self.click_button('import-cert')
-        self.click_button('list-certs')
-        self.click_button('select-certs')
-
-        # Test if the cert was allowed.
-        if case:
-            self.ui.wait_for_ui_obj('Certificate information')
-        else:
-            self.ui.did_obj_not_load('Certificate information')
-
-    def _click_generate(self):
-        self.wait_for_extension()
-
-        self.click_button('generate')
-        error_id = 'generate-error'
-
-        utils.poll_for_condition(
-            lambda: 'OK' in self.field_value(error_id),
-            timeout=45,
-            exception=error.TestFail(
-                'API error: %s' % self.field_value(error_id)))
-
-    def run_once(self, case=None):
-        """Setup and run the test configured for the specified test case."""
-
-        EXTENSION_ID = 'hoppbgdeajkagempifacalpdapphfoai'
-        self.EXTENSION_PAGE = ('chrome-extension://%s/main.html'
-                               % EXTENSION_ID)
-        self.tab = None
-
-        self.setup_case(
-            disable_default_apps=False,
-            user_policies={
-                'ExtensionInstallForcelist': [EXTENSION_ID],
-                'DeveloperToolsAvailability': 1,
-                'KeyPermissions':
-                    {'hoppbgdeajkagempifacalpdapphfoai':
-                        {'allowCorporateKeyUsage': case}}})
-        self.ui.start_ui_root(self.cr)
-        self.test_platform_keys(case)
diff --git a/client/site_tests/policy_KioskModeEnabled/control b/client/site_tests/policy_KioskModeEnabled/control
deleted file mode 100644
index 021f92f..0000000
--- a/client/site_tests/policy_KioskModeEnabled/control
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_KioskModeEnabled'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Called through the policy_KioskModeEnabled test only. Verifies
-that DUT entered kiosk mode.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_KioskModeEnabled', **args_dict)
\ No newline at end of file
diff --git a/client/site_tests/policy_KioskModeEnabled/policy_KioskModeEnabled.py b/client/site_tests/policy_KioskModeEnabled/policy_KioskModeEnabled.py
deleted file mode 100644
index f9376fe..0000000
--- a/client/site_tests/policy_KioskModeEnabled/policy_KioskModeEnabled.py
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-from autotest_lib.client.common_lib import utils
-
-
-class policy_KioskModeEnabled(
-        enterprise_policy_base.EnterprisePolicyTest):
-    """Test for verifying that the DUT entered kiosk mode."""
-    version = 1
-
-    def run_once(self):
-        """Entry point of this test."""
-
-        # ID of the kiosk app to start.
-        kId = 'afhcomalholahplbjhnmahkoekoijban'
-
-        self.DEVICE_POLICIES = {
-            'DeviceLocalAccounts': [
-                {'account_id': kId, 'kiosk_app': {'app_id': kId}, 'type': 1}],
-            'DeviceLocalAccountAutoLoginId': kId
-        }
-
-        self.setup_case(
-            device_policies=self.DEVICE_POLICIES,
-            enroll=True,
-            kiosk_mode=True,
-            auto_login=False)
-        self.ui.start_ui_root(self.cr)
-        self.ui.wait_for_ui_obj(name='/Kiosk/', isRegex=True, timeout=30)
diff --git a/client/site_tests/policy_NativePrintersBulkAccessMode/control.allowall b/client/site_tests/policy_NativePrintersBulkAccessMode/control.allowall
deleted file mode 100644
index 0c06806..0000000
--- a/client/site_tests/policy_NativePrintersBulkAccessMode/control.allowall
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'kathrelkeld, pawliczek, Chrome OS Team'
-NAME = 'policy_NativePrintersBulkAccessMode.allowall'
-ATTRIBUTES = 'suite:bvt-perbuild, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effect of NativePrintersBulkAccessMode policy on which printers are
-displayed for use on a managed account.
-
-'''
-
-job.run_test('policy_NativePrintersBulkAccessMode', case=(None, 'allowall'))
diff --git a/client/site_tests/policy_NativePrintersBulkAccessMode/control.blacklist b/client/site_tests/policy_NativePrintersBulkAccessMode/control.blacklist
deleted file mode 100644
index 87b3f06..0000000
--- a/client/site_tests/policy_NativePrintersBulkAccessMode/control.blacklist
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'kathrelkeld, pawliczek, Chrome OS Team'
-NAME = 'policy_NativePrintersBulkAccessMode.blacklist'
-ATTRIBUTES = 'suite:bvt-perbuild, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effect of NativePrintersBulkAccessMode policy on which
-printers are displayed for use on a managed account.
-
-'''
-
-job.run_test('policy_NativePrintersBulkAccessMode', case=(None, 'blacklist'))
diff --git a/client/site_tests/policy_NativePrintersBulkAccessMode/control.none b/client/site_tests/policy_NativePrintersBulkAccessMode/control.none
deleted file mode 100644
index 1cf6edf..0000000
--- a/client/site_tests/policy_NativePrintersBulkAccessMode/control.none
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'kathrelkeld, pawliczek, Chrome OS Team'
-NAME = 'policy_NativePrintersBulkAccessMode.none'
-ATTRIBUTES = 'suite:bvt-perbuild, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effect of NativePrintersBulkAccessMode policy on which printers are
-displayed for use on a managed account.
-
-'''
-
-job.run_test('policy_NativePrintersBulkAccessMode', case=(None,None))
diff --git a/client/site_tests/policy_NativePrintersBulkAccessMode/control.whitelist b/client/site_tests/policy_NativePrintersBulkAccessMode/control.whitelist
deleted file mode 100644
index 5f87f11..0000000
--- a/client/site_tests/policy_NativePrintersBulkAccessMode/control.whitelist
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'kathrelkeld, pawliczek, Chrome OS Team'
-NAME = 'policy_NativePrintersBulkAccessMode.whitelist'
-ATTRIBUTES = 'suite:bvt-perbuild, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effect of NativePrintersBulkAccessMode policy on which
-printers are displayed for use on a managed account.
-
-'''
-
-job.run_test('policy_NativePrintersBulkAccessMode', case=(None,'whitelist'))
diff --git a/client/site_tests/policy_NativePrintersBulkAccessMode/policy_NativePrintersBulkAccessMode.py b/client/site_tests/policy_NativePrintersBulkAccessMode/policy_NativePrintersBulkAccessMode.py
deleted file mode 100644
index 665ba8f..0000000
--- a/client/site_tests/policy_NativePrintersBulkAccessMode/policy_NativePrintersBulkAccessMode.py
+++ /dev/null
@@ -1,170 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-
-class policy_NativePrintersBulkAccessMode(
-        enterprise_policy_base.EnterprisePolicyTest):
-    """Verify behavior of NativePrinters user policy."""
-    version = 1
-
-    def initialize(self, **kwargs):
-        """Initialize."""
-        self._initialize_test_constants()
-        self._initialize_enterprise_policy_test()
-
-
-    def _initialize_test_constants(self):
-        """Construct policy values as needed."""
-        PRINTERS_URL = ('https://storage.googleapis.com/chromiumos-test-assets'
-                        '-public/enterprise/printers.json')
-        PRINTERS_HASH = ('7a052c5e4f23c159668148df2a3c202bed4d65749cab5ecd0f'
-                         'a7db211c12a3b8') #sha256
-        PRINTERS2_URL = ('https://storage.googleapis.com/chromiumos-test-assets'
-                         '-public/enterprise/printers2.json')
-        PRINTERS2_HASH = ('0d7344c989893cb97484d6111bf497a999333c177e1c991c06'
-                          'db664c58e4b81a') #sha256
-        # These strings are printer ids, defined in PRINTERS_URL.
-        self.DEFINED_IDS = set(['wl', 'bl', 'other', 'both'])
-        # These strings are printer ids, defined in PRINTERS2_URL.
-        self.DEFINED_IDS2 = set(['wl2', 'bl2', 'other2', 'both2'])
-        # Whitelist and blacklist, common for both sets of printers
-        self.WHITELIST = ['both', 'both2', 'wl', 'wl2', 'otherwl']
-        self.BLACKLIST = ['both', 'both2', 'bl', 'bl2', 'otherbl']
-
-        self.user_policies = {
-                'NativePrintersBulkConfiguration': {'url': PRINTERS_URL,
-                                                    'hash': PRINTERS_HASH},
-                'NativePrintersBulkWhitelist': self.WHITELIST,
-                'NativePrintersBulkBlacklist': self.BLACKLIST}
-
-        self.device_policies = {
-                'DevicePrinters': {
-                        'url': PRINTERS2_URL,
-                        'hash': PRINTERS2_HASH
-                },
-                'DevicePrintersAllowlist': self.WHITELIST,
-                'DevicePrintersBlocklist': self.BLACKLIST
-        }
-
-        self.USER_ACCESS_MODE_NAME = 'NativePrintersBulkAccessMode'
-        self.DEVICE_ACCESS_MODE_NAME = 'DevicePrintersAccessMode'
-        self.ACCESS_MODE_VALUES = {'allowall': 2,
-                                   'whitelist': 1,
-                                   'blacklist': 0}
-
-
-    def _get_printer_ids(self):
-        """
-        Use autotest_private to read the ids of listed printers.
-
-        @returns: a set of ids of printers that would be seen by a user under
-                  Print Destinations.
-
-        """
-        self.cr.autotest_ext.ExecuteJavaScript(
-                'window.__printers = null; '
-                'chrome.autotestPrivate.getPrinterList(function(printers) {'
-                '    window.__printers = printers;'
-                '});')
-        self.cr.autotest_ext.WaitForJavaScriptCondition(
-                'window.__printers !== null')
-        printers = self.cr.autotest_ext.EvaluateJavaScript(
-                'window.__printers')
-        logging.info('Printers found: %s', printers)
-
-        if not isinstance(printers, list):
-            raise error.TestFail('Received response is not a list!')
-
-        ID_KEY = u'printerId'
-        ids = set()
-        for printer in printers:
-            if ID_KEY in printer:
-                ids.add(printer[ID_KEY].encode('ascii'))
-            else:
-                raise error.TestFail('Missing %s field!' % ID_KEY)
-        logging.info('Found ids: %s', ids)
-
-        if len(ids) < len(printers):
-            raise error.TestFail('Received response contains duplicates!')
-
-        return ids
-
-
-    def _resultant_set_of_ids(self, access_mode, set_of_all_ids):
-        """
-        Calculates resultant set of printers identifiers depending on given
-        access_mode.
-
-        @param access_mode: one of strings: 'allowall', 'whitelist',
-            'blacklist' or None (policy not set).
-        @param set_of_all_ids: set of all printer identifiers.
-
-        @returns: a set of ids of printers.
-
-        """
-        if access_mode is None:
-            return set()
-        if access_mode == 'blacklist':
-            return set_of_all_ids - set(self.BLACKLIST)
-        if access_mode == 'whitelist':
-            return set_of_all_ids & set(self.WHITELIST)
-        if access_mode == 'allowall':
-            return set_of_all_ids
-        raise Exception("Incorrect value of access mode")
-
-
-    def _test_bulk_native_printers(self, case_device, case_user):
-        """
-
-        @param case_device: access mode set in device policies (one of strings:
-                'allowall', 'whitelist', 'blacklist') or None (policy not set).
-        @param case_user: access mode set in user policies (one of strings:
-                'allowall', 'whitelist', 'blacklist') or None (policy not set).
-
-        """
-        printers = self._get_printer_ids()
-        expected = self._resultant_set_of_ids(case_device, self.DEFINED_IDS2)
-        expected |= self._resultant_set_of_ids(case_user, self.DEFINED_IDS)
-        if printers != expected:
-            raise error.TestFail('Did not see expected printer output! '
-                                 'Expected %s, found %s.' % (expected,
-                                                             printers))
-
-
-    def run_once(self, case):
-        """
-        Entry point of this test.
-
-        @param case: a tuple of two access modes, the first one is for device
-                policies, the second one is for user policies. Each access mode
-                equals one of the strings: 'allowall', 'whitelist', 'blacklist'
-                or is set to None (what means 'policy not set').
-
-        """
-        case_device, case_user = case
-
-        if case_device is None:
-            self.device_policies = {}
-            enroll = False
-        else:
-            assert(case_device in self.ACCESS_MODE_VALUES)
-            self.device_policies[self.DEVICE_ACCESS_MODE_NAME] =\
-                    self.ACCESS_MODE_VALUES[case_device]
-            enroll = True
-
-        if case_user is None:
-            self.user_policies = {}
-        else:
-            assert(case_user in self.ACCESS_MODE_VALUES)
-            self.user_policies[self.USER_ACCESS_MODE_NAME] =\
-                    self.ACCESS_MODE_VALUES[case_user]
-
-        self.setup_case(device_policies=self.device_policies,
-                user_policies=self.user_policies, enroll=enroll)
-        self._test_bulk_native_printers(case_device, case_user)
diff --git a/client/site_tests/policy_NativePrintersBulkAccessMode/src/Makefile b/client/site_tests/policy_NativePrintersBulkAccessMode/src/Makefile
deleted file mode 100644
index 6f9e008..0000000
--- a/client/site_tests/policy_NativePrintersBulkAccessMode/src/Makefile
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-ifndef SYSROOT
-  $(error Define SYSROOT)
-endif
-
-OUT_DIR ?= .
-PROTO_PATH = $(SYSROOT)/usr/share/protofiles
-PROTO_DEFS = $(PROTO_PATH)/chrome_device_policy.proto \
-	$(PROTO_PATH)/device_management_backend.proto \
-	$(PROTO_PATH)/chrome_extension_policy.proto \
-	$(PROTO_PATH)/cloud_policy.proto
-PROTO_BINDINGS = $(OUT_DIR)/chrome_device_policy_pb2.py \
-	$(OUT_DIR)/device_management_backend_pb2.py \
-	$(OUT_DIR)/chrome_extension_policy_pb2.py \
-	$(OUT_DIR)/cloud_policy_pb2.py
-
-all: $(PROTO_BINDINGS)
-
-$(PROTO_BINDINGS): $(PROTO_DEFS)
-	protoc --proto_path=$(PROTO_PATH) --python_out=$(OUT_DIR) $(PROTO_DEFS)
-
-clean:
-	rm -f $(PROTO_BINDINGS)
-
-
diff --git a/client/site_tests/policy_PinnedLauncherApps/control b/client/site_tests/policy_PinnedLauncherApps/control
deleted file mode 100644
index ec00e4e..0000000
--- a/client/site_tests/policy_PinnedLauncherApps/control
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_PinnedLauncherApps'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of policy_PinnedLauncherApps policy.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_PinnedLauncherApps', **args_dict)
\ No newline at end of file
diff --git a/client/site_tests/policy_PinnedLauncherApps/control.pinned b/client/site_tests/policy_PinnedLauncherApps/control.pinned
deleted file mode 100644
index 0f98527..0000000
--- a/client/site_tests/policy_PinnedLauncherApps/control.pinned
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_PinnedLauncherApps.pinned'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify the PinnedLauncherApps policy will pin apps to the launch bar, as well
-as lock them. Additionally verify when the policy is removed the user can
-remove the app from the launch bar.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_PinnedLauncherApps', **args_dict)
diff --git a/client/site_tests/policy_PinnedLauncherApps/policy_PinnedLauncherApps.py b/client/site_tests/policy_PinnedLauncherApps/policy_PinnedLauncherApps.py
deleted file mode 100644
index 6fd1a6a..0000000
--- a/client/site_tests/policy_PinnedLauncherApps/policy_PinnedLauncherApps.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-
-class policy_PinnedLauncherApps(
-        enterprise_policy_base.EnterprisePolicyTest):
-    """
-    Test the PinnedLauncherApps policy by pinning the default Google Photos
-    application.
-
-    This test will:
-        Set the application to be pinned via the user policy.
-        Verify the application is on the launch bar.
-        Verify the application cannot be removed from the launch bar.
-        Remove the application from the PinnedLauncherApps policy.
-        Verify the application can be removed from the launch bar.
-
-    """
-    version = 1
-    PINNED_TEXT = '/Remove from Chrome/'
-    EXT_NAME = 'Google Photos'
-
-    def _remove_pinned_aps_policy(self):
-        """Reset the policy, thus removing any pinned apps."""
-        self.update_policies()
-
-    def _remove_pinned_app(self):
-        """Remove the pinned app after the test is done."""
-        self.ui.doCommand_on_obj(self.EXT_NAME, cmd="showContextMenu()")
-        self.ui.wait_for_ui_obj('Unpin')
-        self.ui.doDefault_on_obj('Unpin')
-
-        self.ui.wait_for_ui_obj(self.EXT_NAME, remove=True)
-
-    def _check_launcher(self):
-        """Run the launcher test."""
-        self.ui.wait_for_ui_obj(self.EXT_NAME, timeout=30)
-        self.ui.doCommand_on_obj(self.EXT_NAME, cmd="showContextMenu()")
-        self.ui.wait_for_ui_obj(self.PINNED_TEXT, isRegex=True)
-        if not self.ui.did_obj_not_load('Unpin'):
-            self._remove_pinned_app()
-            raise error.TestError(
-                'App can be removed when pinned by policy!')
-
-        self._remove_pinned_aps_policy()
-        self._remove_pinned_app()
-
-        if self.ui.item_present(self.EXT_NAME):
-            raise error.TestError('Could not removed pinned app')
-
-    def run_once(self):
-        """
-        Setup and run the test configured for the specified test case.
-
-        @param case: Name of the test case to run.
-
-        """
-        pol = {'PinnedLauncherApps': ['hcglmfcclpfgljeaiahehebeoaiicbko']}
-        self.setup_case(user_policies=pol, real_gaia=True)
-        self.ui.start_ui_root(self.cr)
-        self._check_launcher()
diff --git a/client/site_tests/policy_PlatformKeys/control b/client/site_tests/policy_PlatformKeys/control
deleted file mode 100644
index ae113c1..0000000
--- a/client/site_tests/policy_PlatformKeys/control
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-NAME = 'policy_PlatformKeys'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-policy_PlatformKeys tests the chrome.enterprise.platformKeys API. It uses the
-"platformKeys Test Extension" to access the API.
-
-It tests the methods:
-- getToken
-- getCertificates
-- importCertificate
-- removeCertificate
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_PlatformKeys', **args_dict)
-
diff --git a/client/site_tests/policy_PlatformKeys/policy_PlatformKeys.py b/client/site_tests/policy_PlatformKeys/policy_PlatformKeys.py
deleted file mode 100644
index e502bb8..0000000
--- a/client/site_tests/policy_PlatformKeys/policy_PlatformKeys.py
+++ /dev/null
@@ -1,144 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import re
-import utils
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-
-class policy_PlatformKeys(enterprise_policy_base.EnterprisePolicyTest):
-    version = 1
-
-
-    def initialize(self, **kwargs):
-        """Set some global variables."""
-        super(policy_PlatformKeys, self).initialize(**kwargs)
-        # PlatformKeys extension ID.
-        self.EXTENSION_ID = 'hoppbgdeajkagempifacalpdapphfoai'
-        self.EXTENSION_PAGE = ('chrome-extension://%s/main.html'
-                               % self.EXTENSION_ID)
-        self.tab = None
-
-
-    def click_button(self, id):
-        """Click an element given its ID."""
-        self.tab.ExecuteJavaScript(
-                "document.querySelector('#%s').click()" % id)
-
-
-    def field_value(self, id):
-        """Return the value of a text field."""
-        return self.tab.EvaluateJavaScript(
-            "document.querySelector('#%s').value" % id)
-
-
-    def call_api(self, button_id, field_id=None):
-        """
-        Call the API by clicking a button and checking its output fields.
-
-        @param button_id: ID of the button element.
-        @param field_id: Text field output is printed to (if any).
-
-        @returns: Output of the call, if any.
-        @raises error.TestFail: If the API call fails.
-
-        """
-        error_id = button_id + '-error'
-        self.click_button(button_id)
-
-        # Wait for the API to return 'OK' and raise an error if it doesn't.
-        utils.poll_for_condition(
-                lambda: 'OK' in self.field_value(error_id),
-                timeout=15,
-                exception=error.TestFail(
-                    'API error: %s' % self.field_value(error_id)))
-
-        if field_id:
-            field = self.field_value(field_id)
-            return field
-
-
-    def create_certificate(self):
-        """Return a certificate using the generated public key."""
-        cert = self.call_api('create-cert', 'certificate')
-        return cert.rstrip()
-
-
-    def list_certificates(self):
-        """Fetch all certificates and parse them into a list."""
-        raw_certs = self.call_api('list-certs', 'certificates')
-
-        if raw_certs:
-            pattern = re.compile('-----BEGIN CERTIFICATE-----.*?'
-                                 '-----END CERTIFICATE-----', flags=re.DOTALL)
-            certs = re.findall(pattern, raw_certs)
-        else:
-            certs = []
-
-        return certs
-
-
-    def wait_for_extension(self):
-        """Wait for the extension to install so we can open it."""
-        def load_page():
-            self.tab = self.navigate_to_url(self.EXTENSION_PAGE, self.tab)
-            return self.tab.EvaluateJavaScript(
-                      "document.querySelector('#cert-enrollment') !== null")
-
-        utils.poll_for_condition(
-            load_page,
-            timeout=15,
-            sleep_interval=1,
-            desc='Timed out waiting for extension to install.')
-
-
-    def test_platform_keys(self):
-        """
-        Test the chrome.enterprise.platformKeys API.
-
-        The following API methods are tested:
-            - getToken
-            - getCertificates
-            - importCertificate
-            - removeCertificate
-
-        """
-        self.wait_for_extension()
-
-        if self.list_certificates():
-            raise error.TestFail('Certificates list should be empty at start.')
-
-        public_key = self.call_api('generate', 'public-key')
-
-        certificate = self.create_certificate()
-        self.call_api('import-cert')
-
-        installed_certs = self.list_certificates()
-        if len(installed_certs) != 1:
-            raise error.TestFail('There should be 1 certificate instead of %s.'
-                                 % len(installed_certs))
-
-        if installed_certs[0] != certificate:
-            raise error.TestFail('Installed certificate does not match '
-                                 'expected certificate. %s != %s' %
-                                 (installed_certs[0], certificate))
-
-        self.call_api('remove-cert')
-
-        if self.list_certificates():
-            raise error.TestFail('All certificates should have been removed '
-                                 'at the end of the test.')
-
-
-    def run_once(self):
-        """Setup and run the test configured for the specified test case."""
-        self.setup_case(user_policies={
-            'ExtensionInstallForcelist': [self.EXTENSION_ID],
-            'DeveloperToolsAvailability': 1
-        })
-
-        self.test_platform_keys()
diff --git a/client/site_tests/policy_PolicyRefreshRate/control b/client/site_tests/policy_PolicyRefreshRate/control
deleted file mode 100644
index e49df71..0000000
--- a/client/site_tests/policy_PolicyRefreshRate/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_PolicyRefreshRate'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of policy_PolicyRefreshRate policy.
-The control files will set the min, max, and out of bound values.
-The test will then check the policy page and verify the refresh interval
-shown at the top of the page matches the policy setting.
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_PolicyRefreshRate', **args_dict)
\ No newline at end of file
diff --git a/client/site_tests/policy_PolicyRefreshRate/control.above_max b/client/site_tests/policy_PolicyRefreshRate/control.above_max
deleted file mode 100644
index 7b9823a..0000000
--- a/client/site_tests/policy_PolicyRefreshRate/control.above_max
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_PolicyRefreshRate.above_max'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify when the 'PolicyRefreshRate' is set above the maximum allowed value,
-the value is set to the maximum value instead.
-
-'''
-
-job.run_test('policy_PolicyRefreshRate', case=186400000)
diff --git a/client/site_tests/policy_PolicyRefreshRate/control.below_min b/client/site_tests/policy_PolicyRefreshRate/control.below_min
deleted file mode 100644
index 3d72429..0000000
--- a/client/site_tests/policy_PolicyRefreshRate/control.below_min
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_PolicyRefreshRate.below_min'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify when the 'PolicyRefreshRate' is set below the minimum allowed value,
-the value is set to the minimum value instead.
-
-'''
-
-job.run_test('policy_PolicyRefreshRate', case=100)
diff --git a/client/site_tests/policy_PolicyRefreshRate/control.max b/client/site_tests/policy_PolicyRefreshRate/control.max
deleted file mode 100644
index 8b52850..0000000
--- a/client/site_tests/policy_PolicyRefreshRate/control.max
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_PolicyRefreshRate.max'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify when the 'PolicyRefreshRate' is to the maximum allowed value,
-the value is set properly.
-
-'''
-
-job.run_test('policy_PolicyRefreshRate', case=86400000)
diff --git a/client/site_tests/policy_PolicyRefreshRate/control.min b/client/site_tests/policy_PolicyRefreshRate/control.min
deleted file mode 100644
index ff5e6e3..0000000
--- a/client/site_tests/policy_PolicyRefreshRate/control.min
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_PolicyRefreshRate.min'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify when the 'PolicyRefreshRate' is to the minimum allowed value,
-the value is set properly.
-
-'''
-
-job.run_test('policy_PolicyRefreshRate', case=1800000)
diff --git a/client/site_tests/policy_PolicyRefreshRate/policy_PolicyRefreshRate.py b/client/site_tests/policy_PolicyRefreshRate/policy_PolicyRefreshRate.py
deleted file mode 100644
index 7426b46..0000000
--- a/client/site_tests/policy_PolicyRefreshRate/policy_PolicyRefreshRate.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-
-class policy_PolicyRefreshRate(
-        enterprise_policy_base.EnterprisePolicyTest):
-    """
-    Test effect of policy_PolicyRefreshRate policy on Chrome OS.
-
-    """
-    version = 1
-
-    def check_refresh_rate(self, case):
-        """
-        Will check the policy refresh rate from chrome://policy, and verify
-        that the text returned alligns with the policy configured.
-
-        @param case: Name of the test case to run.
-
-        """
-        tab = self.navigate_to_url('chrome://policy')
-
-        js_text_query = "document.querySelector('{}').innerText"
-        refresh_interval_js = '#status-box-container div.refresh-interval'
-
-        # Grab the policy refresh as shown at the top of the page, not from
-        # the policy table.
-        refresh_interval = tab.EvaluateJavaScript(
-                               js_text_query.format(refresh_interval_js))
-        if case <= 1800000:
-            expected_refresh = ' 30 mins'
-        elif case >= 86400000:
-            expected_refresh = ' 1 day'
-
-        if refresh_interval != expected_refresh:
-            raise error.TestFail('Policy refresh incorrect. Got {} expected {}'
-                                 .format(refresh_interval, expected_refresh))
-
-    def run_once(self, case):
-        """
-        @param case: Name of the test case to run.
-
-        """
-        self.setup_case(user_policies={'PolicyRefreshRate': case})
-        self.check_refresh_rate(case)
diff --git a/client/site_tests/policy_PowerManagementIdleSettings/control b/client/site_tests/policy_PowerManagementIdleSettings/control
deleted file mode 100644
index d22ce61..0000000
--- a/client/site_tests/policy_PowerManagementIdleSettings/control
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'policy_PowerManagementIdleSettings'
-TIME = 'SHORT'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of PowerManagementIdleSettings policy on client behavior.
-
-This test verifies the effect of the PowerManagementIdleSettings user policy
-on Chrome OS client behavior. It exercises a range of valid values for the
-IdleAction property using three test cases: NotSet_Sleep, DoNothing_Continue,
-and Logout_EndSession.
-
-The test shall pass if the device correctly changes the screen brightness
-(dim and off) after the specified delay times, and performs the specified
-IdleAction after the Idle time has elapsed. The test shall fail if any of the
-above behaviors are not enforced.
-
-This control file allows CloudDPC E2E tests to run any test case defined in
-this test via command-line.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_PowerManagementIdleSettings', **args_dict)
diff --git a/client/site_tests/policy_PowerManagementIdleSettings/control.donothing_continue b/client/site_tests/policy_PowerManagementIdleSettings/control.donothing_continue
deleted file mode 100644
index 44fa8cf..0000000
--- a/client/site_tests/policy_PowerManagementIdleSettings/control.donothing_continue
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'policy_PowerManagementIdleSettings.donothing_continue'
-#TODO (kathrelkeld): Re-enable this test once issue 840558 is resolved.
-#ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of PowerManagementIdleSettings policy on client behavior.
-
-This test case verifies the behavior and appearance of Chrome OS when the
-PowerManagementIdleSettings user policy IdleAction is not set.
-
-The test shall pass if the user session continues (i.e., is not logged out,
-and does not go to sleep) after the Delay:Idle time expires. It shall fail if
-this behavior is not enforced.
-
-'''
-
-job.run_test('policy_PowerManagementIdleSettings', case='DoNothing_Continue')
diff --git a/client/site_tests/policy_PowerManagementIdleSettings/control.logout_endsession b/client/site_tests/policy_PowerManagementIdleSettings/control.logout_endsession
deleted file mode 100644
index d49abc9..0000000
--- a/client/site_tests/policy_PowerManagementIdleSettings/control.logout_endsession
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'policy_PowerManagementIdleSettings.logout_endsession'
-#TODO (kathrelkeld): Re-enable this test once issue 840558 is resolved.
-#ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of PowerManagementIdleSettings policy on client behavior.
-
-This test case verifies the behavior and appearance of Chrome OS when the
-PowerManagementIdleSettings user policy IdleAction is not set.
-
-The test shall pass if the user session ends (i.e., is logged out) after the
-Delay:Idle time expires. It shall fail if this behavior is not enforced.
-
-'''
-
-job.run_test('policy_PowerManagementIdleSettings', case='Logout_EndSession')
diff --git a/client/site_tests/policy_PowerManagementIdleSettings/control.notset_sleep b/client/site_tests/policy_PowerManagementIdleSettings/control.notset_sleep
deleted file mode 100644
index 8dce149..0000000
--- a/client/site_tests/policy_PowerManagementIdleSettings/control.notset_sleep
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'policy_PowerManagementIdleSettings.notset_sleep'
-#TODO (kathrelkeld): Re-enable this test once issue 840558 is resolved.
-#ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of PowerManagementIdleSettings policy on client behavior.
-
-This test case verifies the behavior and appearance of Chrome OS when the
-PowerManagementIdleSettings user policy IdleAction is not set.
-
-The test shall pass if the user session sleeps (i.e., screen blanks, but user
-is not logged out) after the Delay:Idle time expires. It shall fail if this
-behavior is not enforced.
-
-'''
-
-job.run_test('policy_PowerManagementIdleSettings', case='NotSet_Sleep')
diff --git a/client/site_tests/policy_PowerManagementIdleSettings/policy_PowerManagementIdleSettings.py b/client/site_tests/policy_PowerManagementIdleSettings/policy_PowerManagementIdleSettings.py
deleted file mode 100644
index fa2bf21..0000000
--- a/client/site_tests/policy_PowerManagementIdleSettings/policy_PowerManagementIdleSettings.py
+++ /dev/null
@@ -1,288 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import time
-
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import cryptohome
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-from autotest_lib.client.cros.graphics import graphics_utils
-from autotest_lib.client.cros.power import power_status, power_utils
-
-
-class policy_PowerManagementIdleSettings(
-          enterprise_policy_base.EnterprisePolicyTest):
-    """
-    Test effect of PowerManagementIdleSettings policy on Chrome OS behavior.
-
-    This test verifies the effect of the PowerManagementIdleSettings user
-    policy on specific Chrome OS client behaviors. It tests two valid values
-    for the IdleAction property: 'DoNothing' and Not set, with three test
-    cases: DoNothing_Continue, NotSet_Sleep, and Logout_EndSession. It also
-    verifies that the screen dims after ScreenDim delay, and then turns off
-    after ScreenOff delay (both delays in milliseconds).
-
-    Note: Valid IdleAction values are 'DoNothing', 'Suspend', 'Logout', and
-    'Shutdown'. This test exercises only the DoNothing and Logout actions.
-    Suspend is tested by enterprise_PowerManager.py. Shutdown can be tested
-    only using a Server-side AutoTest.
-
-    Chrome reports user activity to the power manager at most every 5 seconds.
-    To accomodate potential delays, the test pads the idle-action delay with
-    a 5 second activity report interval.
-
-    Several supporting policies are necessary to facillitate testing, or to
-    make testing more reliable. These policies are listed below with a brief
-    description of the set value.
-    - WaitForInitialUserActivity=False so idle timer starts immediately after
-      session starts.
-    - UserActivityScreenDimDelayScale=100 to prevent increase delays when
-      user activity occurs after screen dim.
-    - ChromeOsLockOnIdleSuspend=False to prevent screen lock upon suspend.
-    - AllowScreenLock=False to prevent manual screen lock. Will not affect
-      this test, but is the safest setting.
-    - AllowScreenWakeLocks=False to ignore 'keep awake' requests. Since wake
-      locks are not requested during this test, ignoring them is unnecessary.
-      But for safety we ignore them when testing suspend.
-    - LidCloseAction=3 to invoke no action upon (accidental) lid closure.
-    - ResoreOnStartup* polices are set to display the settings and policy
-      pages. This is useful when debugging failures.
-
-    """
-    version = 1
-
-    def initialize(self, **kwargs):
-        """Set up local variables and ensure device is on AC power."""
-        self._initialize_test_constants()
-        self._power_status = power_status.get_status()
-        if not self._power_status.on_ac():
-            raise error.TestNAError('Test must be run with DUT on AC power.')
-        self._backlight = power_utils.Backlight()
-        super(policy_PowerManagementIdleSettings, self).initialize(**kwargs)
-
-    def _initialize_test_constants(self):
-        self.POLICY_NAME = 'PowerManagementIdleSettings'
-        self.SCREEN_SETTLE_TIME = 0.3
-        self.SCREEN_DIM_DELAY = 4
-        self.IDLE_WARNING_DELAY = 6
-        self.SCREEN_OFF_DELAY = 8
-        self.IDLE_ACTION_DELAY = 10
-        self.ACTIVITY_REPORT_INTERVAL = 5
-        self.IDLE_ACTION_NOTSET = {
-            'AC': {
-                'Delays': {
-                    'ScreenDim': (self.SCREEN_DIM_DELAY * 1000),
-                    'IdleWarning': (self.IDLE_WARNING_DELAY * 1000),
-                    'ScreenOff': (self.SCREEN_OFF_DELAY * 1000),
-                    'Idle': (self.IDLE_ACTION_DELAY * 1000)
-                }
-            }
-        }
-        self.IDLE_ACTION_DONOTHING = {
-            'AC': {
-                'Delays': {
-                    'ScreenDim': (self.SCREEN_DIM_DELAY * 1000),
-                    'IdleWarning': (self.IDLE_WARNING_DELAY * 1000),
-                    'ScreenOff': (self.SCREEN_OFF_DELAY * 1000),
-                    'Idle': (self.IDLE_ACTION_DELAY * 1000)
-                },
-                'IdleAction': 'DoNothing'
-            }
-        }
-        self.IDLE_ACTION_LOGOUT = {
-            'AC': {
-                'Delays': {
-                    'ScreenDim': (self.SCREEN_DIM_DELAY * 1000),
-                    'IdleWarning': (self.IDLE_WARNING_DELAY * 1000),
-                    'ScreenOff': (self.SCREEN_OFF_DELAY * 1000),
-                    'Idle': (self.IDLE_ACTION_DELAY * 1000)
-                },
-                'IdleAction': 'Logout'
-            }
-        }
-        self.TEST_CASES = {
-            'NotSet_Sleep': self.IDLE_ACTION_NOTSET,
-            'DoNothing_Continue': self.IDLE_ACTION_DONOTHING,
-            'Logout_EndSession': self.IDLE_ACTION_LOGOUT
-        }
-        self.STARTUP_URLS = ['chrome://settings', 'chrome://policy']
-        self.SUPPORTING_POLICIES = {
-            'WaitForInitialUserActivity': True,
-            'UserActivityScreenDimDelayScale': 100,
-            'ChromeOsLockOnIdleSuspend': False,
-            'AllowScreenLock': False,
-            'AllowScreenWakeLocks': False,
-            'LidCloseAction': 3,
-            'RestoreOnStartup': 4,
-            'RestoreOnStartupURLs': self.STARTUP_URLS
-        }
-
-
-    def elapsed_time(self, start_time):
-        """Get time elapsed since |start_time|.
-
-        @param start_time: clock time from which elapsed time is measured.
-        @returns time elapsed since the start time.
-        """
-        return time.time() - start_time
-
-
-    def _simulate_user_activity(self):
-        """Inject user activity via D-bus to restart idle timer.
-
-        Note that if the screen has gone black, these use activities will
-        wake up the display again. However, they will not wake up a screen
-        that has merely been dimmed.
-
-        """
-        graphics_utils.click_mouse()  # Note: Duration is 0.4 seconds.
-        graphics_utils.press_keys(['KEY_LEFTCTRL'])
-
-
-    def _wait_for_login_status(self, attribute, value, timeout):
-        """Return when attribute has value, or its current value on timeout.
-
-        Login_status is a dictionary of attributes that describe the login
-        status of the current session. It contains values for the following
-        attributes: isLoggedIn, isRegularUser, isOwner, isKiosk, isGuest,
-        isScreenLocked, userImage, email, and displayEmail.
-
-        @param attribute: String attribute key to be measured.
-        @param value: Boolean attribute value expected.
-        @param timeout: integer seconds till timeout.
-        @returns dict of login status.
-
-        """
-        attribute_value = utils.wait_for_value(
-            lambda: self.cr.login_status[attribute],
-            expected_value=value,
-            timeout_sec=timeout)
-        return attribute_value
-
-
-    def _poll_until_user_is_logged_out(self, timeout):
-        """Return True when user logs out, False when user remains logged in.
-
-        @returns boolean of user logged out status.
-
-        """
-        my_result = utils.poll_for_condition(
-            lambda: not cryptohome.is_vault_mounted(user=self.username,
-                                                    allow_fail=True),
-            exception=None,
-            timeout=timeout,
-            sleep_interval=2,
-            desc='Polling for user to be logged out.')
-        return my_result
-
-
-    def _set_brightness_to_maximum(self):
-        """Set screen to maximum brightness."""
-        max_level = self._backlight.get_max_level()
-        self._backlight.set_level(max_level)
-
-
-    def _wait_for_brightness_change(self, timeout):
-        """Return screen brightness on update, or current value on timeout.
-
-        @returns float of screen brightness percentage.
-
-        """
-        initial_brightness = self._backlight.get_percent()
-        current_brightness = utils.wait_for_value_changed(
-            lambda: self._backlight.get_percent(),
-            old_value=initial_brightness,
-            timeout_sec=timeout)
-        if current_brightness != initial_brightness:
-            time.sleep(self.SCREEN_SETTLE_TIME)
-            current_brightness = self._backlight.get_percent()
-        return current_brightness
-
-
-    def _test_idle_action(self, policy_value):
-        """
-        Verify CrOS enforces PowerManagementIdleSettings policy value.
-
-        @param policy_value: policy value for this case.
-        @raises: TestFail if idle actions are not performed after their
-                 specified delays.
-
-        """
-        logging.info('Running _test_idle_action(%s)', policy_value)
-
-        # Wait until UI settles down with user logged in.
-        user_is_logged_in = self._wait_for_login_status(
-            'isLoggedIn', True, self.IDLE_ACTION_DELAY)
-        if not user_is_logged_in:
-            raise error.TestFail('User must be logged in at start.')
-
-        # Set screen to maxiumum brightness.
-        self._set_brightness_to_maximum()
-        max_brightness = self._backlight.get_percent()
-        logging.info('Brightness maximized to: %.2f', max_brightness)
-
-        # Induce user activity to start idle timer.
-        self._simulate_user_activity()
-        start_time = time.time()
-
-        # Verify screen is dimmed after expected delay.
-        seconds_to_dim = (
-            self.SCREEN_DIM_DELAY - self.elapsed_time(start_time))
-        dim_brightness = self._wait_for_brightness_change(seconds_to_dim)
-        dim_elapsed_time = self.elapsed_time(start_time)
-        logging.info('  Brightness dimmed to: %.2f, ', dim_brightness)
-        logging.info('  after %s seconds.', dim_elapsed_time)
-        if not (dim_brightness < max_brightness and dim_brightness > 0.0):
-            raise error.TestFail('Screen did not dim on delay.')
-
-        # Verify screen is turned off after expected delay.
-        seconds_to_off = (
-            self.SCREEN_OFF_DELAY - self.elapsed_time(start_time))
-        off_brightness = self._wait_for_brightness_change(seconds_to_off)
-        off_elapsed_time = self.elapsed_time(start_time)
-        logging.info('  Brightness off to: %.2f, ', off_brightness)
-        logging.info('  after %s seconds.', off_elapsed_time)
-        if not off_brightness < dim_brightness:
-            raise error.TestFail('Screen did not turn off on delay.')
-
-        # Verify user is still logged in before IdleAction.
-        user_is_logged_in = self.cr.login_status['isLoggedIn']
-        if not user_is_logged_in:
-            raise error.TestFail('User must be logged in before idle action.')
-
-        # Get user logged in state after IdleAction.
-        seconds_to_action = (
-            self.IDLE_ACTION_DELAY + self.ACTIVITY_REPORT_INTERVAL
-            - self.elapsed_time(start_time))
-        try:
-            user_is_logged_in = not self._poll_until_user_is_logged_out(
-                seconds_to_action)
-        except utils.TimeoutError:
-            pass
-        action_elapsed_time = self.elapsed_time(start_time)
-        logging.info('  User logged out: %r, ', not user_is_logged_in)
-        logging.info('  after %s seconds.', action_elapsed_time)
-
-        # Verify user status against expected result, based on case.
-        if self.case == 'NotSet_Sleep' or self.case == 'DoNothing_Continue':
-            if not user_is_logged_in:
-                raise error.TestFail('User should be logged in.')
-        elif self.case == 'Logout_EndSession':
-            if user_is_logged_in:
-                raise error.TestFail('User should be logged out.')
-
-
-    def run_once(self, case):
-        """
-        Setup and run the test configured for the specified test case.
-
-        @param case: Name of the test case to run.
-
-        """
-        case_value = self.TEST_CASES[case]
-        self.SUPPORTING_POLICIES[self.POLICY_NAME] = case_value
-        self.setup_case(user_policies=self.SUPPORTING_POLICIES)
-        self._test_idle_action(case_value)
diff --git a/client/site_tests/policy_PrintingEnabled/control b/client/site_tests/policy_PrintingEnabled/control
deleted file mode 100644
index 588bcd5..0000000
--- a/client/site_tests/policy_PrintingEnabled/control
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_PrintingEnabled'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of policy_PrintingEnabled policy.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_PrintingEnabled', **args_dict)
\ No newline at end of file
diff --git a/client/site_tests/policy_PrintingEnabled/control.disabled b/client/site_tests/policy_PrintingEnabled/control.disabled
deleted file mode 100644
index b1265ee..0000000
--- a/client/site_tests/policy_PrintingEnabled/control.disabled
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_PrintingEnabled.disabled'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify when the 'PrintingEnabled' is disabled (False), the policy will disable
-printing inside the Chrome browser.
-
-'''
-
-job.run_test('policy_PrintingEnabled', case=False)
diff --git a/client/site_tests/policy_PrintingEnabled/control.enabled b/client/site_tests/policy_PrintingEnabled/control.enabled
deleted file mode 100644
index 221d5b1..0000000
--- a/client/site_tests/policy_PrintingEnabled/control.enabled
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_PrintingEnabled.enabled'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify when the 'PrintingEnabled' is enabled (True), the policy will enable
-printing inside the Chrome browser.
-
-'''
-
-job.run_test('policy_PrintingEnabled', case=True)
diff --git a/client/site_tests/policy_PrintingEnabled/control.not_set b/client/site_tests/policy_PrintingEnabled/control.not_set
deleted file mode 100644
index 24bd76f..0000000
--- a/client/site_tests/policy_PrintingEnabled/control.not_set
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_PrintingEnabled.not_set'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify when the 'PrintingEnabled' is not set (None), the policy will allow
-printing inside the Chrome browser.
-
-'''
-
-job.run_test('policy_PrintingEnabled', case=None)
diff --git a/client/site_tests/policy_PrintingEnabled/policy_PrintingEnabled.py b/client/site_tests/policy_PrintingEnabled/policy_PrintingEnabled.py
deleted file mode 100644
index 3e41af8..0000000
--- a/client/site_tests/policy_PrintingEnabled/policy_PrintingEnabled.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-
-class policy_PrintingEnabled(
-        enterprise_policy_base.EnterprisePolicyTest):
-    """Test effect of PrintingEnabled policy on Chrome OS."""
-    version = 1
-
-    POLICY_NAME = 'PrintingEnabled'
-
-    def _print_check(self, case):
-        """
-        Click the dropdown menu in Chrome, check if the print option is greyed
-        out.
-
-        @param case: bool or None, the setting of the PrintingEnabled Policy
-
-        """
-        self.ui.doDefault_on_obj('Chrome')
-        self.ui.wait_for_ui_obj('/Print/', role='menuItem', isRegex=True)
-        print_disabled = self.ui.is_obj_restricted('/Print/',
-                                                   role='menuItem',
-                                                   isRegex=True)
-        if case is not False and print_disabled:
-            raise error.TestError('Printing not enabled when it should be')
-        elif case is False and not print_disabled:
-            raise error.TestError('Printing enabled when it should not be')
-
-    def run_once(self, case):
-        """
-        Entry point of the test.
-
-        @param case: Name of the test case to run.
-
-        """
-        self.setup_case(user_policies={'PrintingEnabled': case},
-                        disable_default_apps=False)
-
-        self.ui.start_ui_root(self.cr)
-        self._print_check(case)
diff --git a/client/site_tests/policy_ProxySettings/control b/client/site_tests/policy_ProxySettings/control
deleted file mode 100644
index adee94f..0000000
--- a/client/site_tests/policy_ProxySettings/control
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'krishnargv'
-NAME = 'policy_ProxySettings'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of policy_ProxySettings policy on client behavior.
-
-This test verifies the effect of the ProxySettings user policy on Chrome OS
-client behavior. It also tests setting proxies through the
-OpenNetworkConfiguration policy.
-
-In general, the test case shall pass if Chrome OS uses the correct proxy,
-as specified by the policy value. It fails if the wrong proxy is used.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_ProxySettings', **args_dict)
diff --git a/client/site_tests/policy_ProxySettings/control.directproxy_usenoproxy b/client/site_tests/policy_ProxySettings/control.directproxy_usenoproxy
deleted file mode 100644
index c9eebfc..0000000
--- a/client/site_tests/policy_ProxySettings/control.directproxy_usenoproxy
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'krishnargv'
-NAME = 'policy_ProxySettings.directproxy_usenoproxy'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of policy_ProxySettings policy on client behavior.
-
-This test verifies the effect of the ProxySettings user policy on Chrome OS
-client behavior.
-
-The test case shall pass if Chrome OS does not use a proxy. It fails if a
-proxy is used.
-
-'''
-
-job.run_test('policy_ProxySettings', case='DirectProxy_UseNoProxy')
diff --git a/client/site_tests/policy_ProxySettings/control.directproxy_usenoproxy_onc b/client/site_tests/policy_ProxySettings/control.directproxy_usenoproxy_onc
deleted file mode 100644
index 5cda2b0..0000000
--- a/client/site_tests/policy_ProxySettings/control.directproxy_usenoproxy_onc
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-NAME = 'policy_ProxySettings.directproxy_usenoproxy_onc'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of policy_ProxySettings policy on client behavior.
-
-This test verifies the effect of the ONC proxy user policy on Chrome OS
-client behavior.
-
-The test case shall pass if Chrome OS does not use a proxy. It fails if a
-proxy is used.
-
-'''
-
-job.run_test('policy_ProxySettings', case='DirectProxy_UseNoProxy_ONC')
diff --git a/client/site_tests/policy_ProxySettings/control.fixedproxy_usefixedproxy b/client/site_tests/policy_ProxySettings/control.fixedproxy_usefixedproxy
deleted file mode 100644
index 0b60c79..0000000
--- a/client/site_tests/policy_ProxySettings/control.fixedproxy_usefixedproxy
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'krishnargv'
-NAME = 'policy_ProxySettings.fixedproxy_usefixedproxy'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of policy_ProxySettings policy on client behavior.
-
-This test verifies the effect of the ProxySettings user policy on Chrome OS
-client behavior.
-
-In general, the test case shall pass if Chrome OS uses the fixed proxy
-as specified by the policy. It fails if the wrong proxy is used.
-
-'''
-
-job.run_test('policy_ProxySettings', case='FixedProxy_UseFixedProxy')
diff --git a/client/site_tests/policy_ProxySettings/control.fixedproxy_usefixedproxy_onc b/client/site_tests/policy_ProxySettings/control.fixedproxy_usefixedproxy_onc
deleted file mode 100644
index 0e3c3dd..0000000
--- a/client/site_tests/policy_ProxySettings/control.fixedproxy_usefixedproxy_onc
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-NAME = 'policy_ProxySettings.fixedproxy_usefixedproxy_onc'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of policy_ProxySettings policy on client behavior.
-
-This test verifies the effect of the ONC proxy user policy on Chrome OS
-client behavior.
-
-In general, the test case shall pass if Chrome OS uses the fixed proxy
-as specified by the policy. It fails if the wrong proxy is used.
-
-'''
-
-job.run_test('policy_ProxySettings', case='FixedProxy_UseFixedProxy_ONC')
diff --git a/client/site_tests/policy_ProxySettings/control.notset_usenoproxy b/client/site_tests/policy_ProxySettings/control.notset_usenoproxy
deleted file mode 100644
index 0bd83ea..0000000
--- a/client/site_tests/policy_ProxySettings/control.notset_usenoproxy
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'krishnargv'
-NAME = 'policy_ProxySettings.notset_usenoproxy'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of policy_ProxySettings policy on client behavior.
-
-This test verifies the effect of the ProxySettings user policy on Chrome OS
-client behavior.
-
-The test case shall pass if Chrome OS does not use a proxy. It fails if a
-proxy is used.
-
-'''
-
-job.run_test('policy_ProxySettings', case='NotSet_UseNoProxy')
diff --git a/client/site_tests/policy_ProxySettings/control.pacproxy_usepacfile b/client/site_tests/policy_ProxySettings/control.pacproxy_usepacfile
deleted file mode 100644
index f0e7e8b..0000000
--- a/client/site_tests/policy_ProxySettings/control.pacproxy_usepacfile
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'krishnargv'
-NAME = 'policy_ProxySettings.pacproxy_usepacfile'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of policy_ProxySettings policy on client behavior.
-
-This test verifies the effect of the ProxySettings user policy on Chrome OS
-client behavior.
-
-The test case shall pass if Chrome OS uses the specified PAC file to determine
-the proxy. It fails if the PAC file is not used.
-
-'''
-
-job.run_test('policy_ProxySettings', case='PacProxy_UsePacFile')
diff --git a/client/site_tests/policy_ProxySettings/control.pacproxy_usepacfile_onc b/client/site_tests/policy_ProxySettings/control.pacproxy_usepacfile_onc
deleted file mode 100644
index dbbe666..0000000
--- a/client/site_tests/policy_ProxySettings/control.pacproxy_usepacfile_onc
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-NAME = 'policy_ProxySettings.pacproxy_usepacfile_onc'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of policy_ProxySettings policy on client behavior.
-
-This test verifies the effect of the ONC proxy user policy on Chrome OS
-client behavior.
-
-The test case shall pass if Chrome OS uses the specified PAC file to determine
-the proxy. It fails if the PAC file is not used.
-
-'''
-
-job.run_test('policy_ProxySettings', case='PacProxy_UsePacFile_ONC')
diff --git a/client/site_tests/policy_ProxySettings/policy_ProxySettings.py b/client/site_tests/policy_ProxySettings/policy_ProxySettings.py
deleted file mode 100644
index 7295722..0000000
--- a/client/site_tests/policy_ProxySettings/policy_ProxySettings.py
+++ /dev/null
@@ -1,248 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import sys
-import threading
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-from autotest_lib.client.cros.enterprise.network_config import ProxyConfig
-from SocketServer import ThreadingTCPServer, StreamRequestHandler
-from telemetry.core import exceptions as telemetry_exceptions
-
-
-class ProxyHandler(StreamRequestHandler):
-    """Provide request handler for the Threaded Proxy Listener."""
-    def handle(self):
-        """
-        Get URL of request from first line.
-
-        Read the first line of the request, up to 40 characters, and look
-        for the URL of the request. If found, save it to the URL list.
-
-        Note: All requests are sent an HTTP 504 error.
-
-        """
-        # Capture URL in first 40 chars of request.
-        data = self.rfile.readline(40).strip()
-        logging.debug('ProxyHandler::handle(): <%s>', data)
-        self.server.store_requests_received(data)
-        self.wfile.write('HTTP/1.1 504 Gateway Timeout\r\n'
-                         'Connection: close\r\n\r\n')
-
-
-class ThreadedProxyServer(ThreadingTCPServer):
-    """
-    Provide a Threaded Proxy Server to service and save requests.
-
-    Define a Threaded Proxy Server which services requests, and allows the
-    handler to save all requests.
-
-    """
-    def __init__(self, server_address, HandlerClass):
-        """
-        Constructor.
-
-        @param server_address: tuple of server IP and port to listen on.
-        @param HandlerClass: the RequestHandler class to instantiate per req.
-
-        """
-        self.requests_received = []
-        ThreadingTCPServer.allow_reuse_address = True
-        ThreadingTCPServer.__init__(self, server_address, HandlerClass)
-
-    def store_requests_received(self, request):
-        """
-        Add receieved request to list.
-
-        @param request: request received by the proxy server.
-
-        """
-        self.requests_received.append(request)
-
-
-class ProxyListener(object):
-    """
-    Provide a Proxy Listener to detect connect requests.
-
-    Define a proxy listener to detect when a CONNECT request is seen at the
-    given |server_address|, and record all requests received. Requests
-    received are exposed to the caller.
-
-    """
-    def __init__(self, server_address):
-        """
-        Constructor.
-
-        @param server_address: tuple of server IP and port to listen on.
-
-        """
-        self._server = ThreadedProxyServer(server_address, ProxyHandler)
-        self._thread = threading.Thread(target=self._server.serve_forever)
-
-    def run(self):
-        """Start the server by activating its thread."""
-        self._thread.start()
-
-    def stop(self):
-        """Stop the server and its threads."""
-        self._server.server_close()
-        self._thread.join()
-
-    def get_requests_received(self):
-        """Get list of received requests."""
-        return self._server.requests_received
-
-    def reset_requests_received(self):
-        """Clear list of received requests."""
-        self._server.requests_received = []
-
-
-class policy_ProxySettings(enterprise_policy_base.EnterprisePolicyTest):
-    """
-    Test effect of ProxySettings policy on Chrome OS behavior.
-
-    This test verifies the behavior of Chrome OS for specific configurations
-    of the ProxySettings use policy: None (undefined), ProxyMode=direct,
-    ProxyMode=fixed_servers, ProxyMode=pac_script. None means that the policy
-    value is not set. This induces the default behavior, equivalent to what is
-    seen by an un-managed user.
-
-    When ProxySettings is None (undefined) or ProxyMode=direct, then no proxy
-    server should be used. When ProxyMode=fixed_servers or pac_script, then
-    the proxy server address specified by the ProxyServer or ProxyPacUrl
-    entry should be used.
-
-    """
-    version = 1
-
-    def initialize(self, **kwargs):
-        """Initialize this test."""
-        self._initialize_test_constants()
-        super(policy_ProxySettings, self).initialize(**kwargs)
-        self._proxy_server = ProxyListener(('', self.PROXY_PORT))
-        self._proxy_server.run()
-        self.start_webserver()
-
-
-    def _initialize_test_constants(self):
-        """Initialize test-specific constants, some from class constants."""
-        self.POLICY_NAME = 'ProxySettings'
-        self.PROXY_PORT = 3128
-        self.PAC_FILE = 'proxy_test.pac'
-        self.PAC_URL = '%s/%s' % (self.WEB_HOST, self.PAC_FILE)
-        self.BYPASS_URLS = ['www.google.com', 'www.googleapis.com']
-        self.FIXED_PROXY = {
-            'ProxyBypassList': ','.join(self.BYPASS_URLS),
-            'ProxyMode': 'fixed_servers',
-            'ProxyServer': 'localhost:%s' % self.PROXY_PORT
-        }
-        self.PAC_PROXY = {
-            'ProxyMode': 'pac_script',
-            'ProxyPacUrl': self.PAC_URL
-        }
-        self.DIRECT_PROXY = {
-            'ProxyMode': 'direct'
-        }
-        self.TEST_URL = 'http://www.cnn.com/'
-        self.TEST_CASES = {
-            'FixedProxy_UseFixedProxy': self.FIXED_PROXY,
-            'PacProxy_UsePacFile': self.PAC_PROXY,
-            'DirectProxy_UseNoProxy': self.DIRECT_PROXY,
-            'NotSet_UseNoProxy': None,
-        }
-        self.PROXY_CONFIGS = {
-            'DirectProxy_UseNoProxy_ONC': ProxyConfig(type='Direct'),
-            'PacProxy_UsePacFile_ONC': ProxyConfig(type='PAC',
-                                           pac_url=self.PAC_URL),
-            'FixedProxy_UseFixedProxy_ONC': ProxyConfig(type='Manual',
-                                                host='localhost',
-                                                port=self.PROXY_PORT,
-                                                exclude_urls=self.BYPASS_URLS)
-        }
-
-
-    def cleanup(self):
-        """Stop proxy server and cleanup."""
-        self._proxy_server.stop()
-        super(policy_ProxySettings, self).cleanup()
-
-
-    def navigate_to_url_with_retry(self, url, total_tries=1):
-        """
-        Navigate to url, attempting retry_count times if it fails to load.
-
-        @param url: string of the url to load.
-        @param total_tries: number of attempts to load the page.
-
-        @raises: error.TestError if page load times out.
-
-        """
-        for i in xrange(total_tries):
-            try:
-                self.navigate_to_url(url)
-            except telemetry_exceptions.TimeoutError as e:
-                if i == total_tries - 1:
-                    logging.error('Timeout error: %s [%s].', str(e),
-                                  sys.exc_info())
-                    raise error.TestError('Could not load %s after '
-                                          '%s tries.' % (url, total_tries))
-                else:
-                    logging.debug('Retrying page load of %s.', url)
-                    logging.debug('Timeout error: %s.', str(e))
-            else:
-                break
-
-
-    def _test_proxy_configuration(self, mode):
-        """
-        Verify CrOS enforces the specified ProxySettings configuration.
-
-        @param mode: Type of proxy.
-
-        @raises error.TestFail if behavior does not match expected.
-
-        """
-        self._proxy_server.reset_requests_received()
-        self.navigate_to_url_with_retry(url=self.TEST_URL, total_tries=2)
-        proxied_requests = self._proxy_server.get_requests_received()
-
-        matching_requests = [request for request in proxied_requests
-                             if self.TEST_URL in request]
-        logging.info('matching_requests: %s', matching_requests)
-
-        if mode is None or mode == 'direct':
-            if matching_requests:
-                raise error.TestFail('Requests should not have been sent '
-                                     'through the proxy server.')
-        elif mode == 'fixed_servers' or mode == 'pac_script':
-            if not matching_requests:
-                raise error.TestFail('Requests should have been sent '
-                                     'through the proxy server.')
-        else:
-            raise error.TestFail('Unrecognized Mode %s' % mode)
-
-
-    def run_once(self, case):
-        """
-        Setup and run the test configured for the specified test case.
-
-        Sets up a proxy using either the ProxyMode policy or ONC policy.
-
-        @param case: Name of the test case to run: see TEST_CASES.
-
-        """
-        if case.endswith('_ONC'):
-            proxy = self.PROXY_CONFIGS[case]
-            self.setup_case(user_policies={
-                'OpenNetworkConfiguration': proxy.policy()
-            })
-            mode = proxy.mode()
-        else:
-            case_value = self.TEST_CASES[case]
-            self.setup_case(user_policies={self.POLICY_NAME: case_value})
-            mode = case_value['ProxyMode'] if case_value else None
-
-        self._test_proxy_configuration(mode)
diff --git a/client/site_tests/policy_ReportUploadFrequency/control b/client/site_tests/policy_ReportUploadFrequency/control
deleted file mode 100644
index 54bf5cd..0000000
--- a/client/site_tests/policy_ReportUploadFrequency/control
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'carvalheira'
-NAME = 'policy_ReportUploadFrequency'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify if the ReportUploadFrequency policy works correctly.
-It verifies for the minimum report time, that is 60s.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_ReportUploadFrequency', **args_dict)
diff --git a/client/site_tests/policy_ReportUploadFrequency/policy_ReportUploadFrequency.py b/client/site_tests/policy_ReportUploadFrequency/policy_ReportUploadFrequency.py
deleted file mode 100644
index 20d604f..0000000
--- a/client/site_tests/policy_ReportUploadFrequency/policy_ReportUploadFrequency.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-from telemetry.core import exceptions
-
-
-class policy_ReportUploadFrequency(
-        enterprise_policy_base.EnterprisePolicyTest):
-    """
-    Tests the ReportUploadFrequency policy in Chrome OS.
-
-    """
-
-    version = 1
-
-
-    def initialize(self, **kwargs):
-        super(policy_ReportUploadFrequency, self).initialize(**kwargs)
-
-        self.POLICY_NAME = 'ReportUploadFrequency'
-        self.POLICIES = {}
-        self.TEST_CASES = {
-            '60s': 60000,
-        }
-
-
-    def _check_report_upload_frequency(self):
-        """
-        Grep syslog for "Starting status upload: have_device_status = 1" line
-
-        @param case_value: policy value in milliseconds
-
-        """
-
-        def is_log_present():
-            """
-            Checks to see if logs have been written.
-
-            @returns True if written False if not.
-
-            """
-            try:
-                if 'Starting status upload: has_device_status = 1' in open(
-                    '/var/log/messages').read():
-                        return True
-            except exceptions.EvaluateException:
-                return False
-
-        utils.poll_for_condition(
-            lambda: is_log_present(),
-            exception=error.TestFail('No status upload sent.'),
-            timeout=60,
-            sleep_interval=5,
-            desc='Polling for logs to be written.')
-
-
-    def run_once(self, case):
-        """
-        Setup and run the test configured for the specified test case.
-
-        @param case: Name of the test case to run.
-
-        """
-
-        case_value = self.TEST_CASES[case]
-        self.POLICIES[self.POLICY_NAME] = case_value
-
-        self.setup_case(device_policies=self.POLICIES, enroll=True)
-        self._check_report_upload_frequency()
diff --git a/client/site_tests/policy_RestoreOnStartupURLs/control b/client/site_tests/policy_RestoreOnStartupURLs/control
deleted file mode 100644
index eb873c1..0000000
--- a/client/site_tests/policy_RestoreOnStartupURLs/control
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'policy_RestoreOnStartupURLs'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of RestoreOnStartupURLs policy on client behavior.
-
-This test verifies the effect of the RestoreOnStartupURLs user policy on
-Chrome OS client behavior and appearance. It excercises a range of valid
-policy values across three test cases.
-
-A test case shall pass when a single browser tab is opened for only the URLs
-specified in the policy value, and in the order specified (left to right).
-A test case shall fail if the above behavior is not enforced.
-
-See accompanying README file for usage instructions and examples.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_RestoreOnStartupURLs', **args_dict)
-
diff --git a/client/site_tests/policy_RestoreOnStartupURLs/control.multipleurls_3tabs b/client/site_tests/policy_RestoreOnStartupURLs/control.multipleurls_3tabs
deleted file mode 100644
index 080d8d0..0000000
--- a/client/site_tests/policy_RestoreOnStartupURLs/control.multipleurls_3tabs
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'policy_RestoreOnStartupURLs.multipleurls_3tabs'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of RestoreOnStartupURLs policy on client behavior & appearance.
-
-This test case verifies the behavior and appearance of Chrome OS when the
-RestoreOnStartupURLs user policy specifies multiple URLs.
-
-The test case shall pass when one browser tab is opened only for each of the
-URLs specified in the policy value, and in the order specified (left to
-right). It shall fail if the above behavior is not enforced.
-
-'''
-
-job.run_test('policy_RestoreOnStartupURLs', case='MultipleUrls_3Tabs')
diff --git a/client/site_tests/policy_RestoreOnStartupURLs/control.notset_notabs b/client/site_tests/policy_RestoreOnStartupURLs/control.notset_notabs
deleted file mode 100644
index 30a5865..0000000
--- a/client/site_tests/policy_RestoreOnStartupURLs/control.notset_notabs
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'policy_RestoreOnStartupURLs.notset_notabs'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of RestoreOnStartupURLs policy on client behavior & appearance.
-
-This test case verifies the behavior and appearance of Chrome OS when the
-RestoreOnStartupURLs user policy is not set.
-
-The test case shall pass when no browser tabs are opened upon startup. It
-shall fail if the above behavior is not enforced.
-
-'''
-
-job.run_test('policy_RestoreOnStartupURLs', case='NotSet_NoTabs')
diff --git a/client/site_tests/policy_RestoreOnStartupURLs/control.singleurl_1tab b/client/site_tests/policy_RestoreOnStartupURLs/control.singleurl_1tab
deleted file mode 100644
index 36466e5..0000000
--- a/client/site_tests/policy_RestoreOnStartupURLs/control.singleurl_1tab
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'scunningham'
-NAME = 'policy_RestoreOnStartupURLs.singleurl_1tab'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effects of RestoreOnStartupURLs policy on client behavior & appearance.
-
-This test case verifies the behavior and appearance of Chrome OS when the
-RestoreOnStartupURLs user policy specifies a single URL.
-
-The test case shall pass when a single browser tab is opened for the URL
-specified in the policy value. It shall fail if the above behavior is not
-enforced.
-
-'''
-
-job.run_test('policy_RestoreOnStartupURLs', case='SingleUrl_1Tab')
diff --git a/client/site_tests/policy_RestoreOnStartupURLs/policy_RestoreOnStartupURLs.py b/client/site_tests/policy_RestoreOnStartupURLs/policy_RestoreOnStartupURLs.py
deleted file mode 100644
index 3fc24fb..0000000
--- a/client/site_tests/policy_RestoreOnStartupURLs/policy_RestoreOnStartupURLs.py
+++ /dev/null
@@ -1,85 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-
-class policy_RestoreOnStartupURLs(enterprise_policy_base.EnterprisePolicyTest):
-    """Test effect of RestoreOnStartupURLs policy on Chrome OS behavior.
-
-    This test verifies the behavior of Chrome OS for a range of valid values
-    in the RestoreOnStartupURLs user policy. It also exercises the dependent
-    user policy RestoreOnStartup, which must be set to 4 to utilize the
-    specified startup URLs, and to None to when no URLs are specified.
-
-    The combination of policy values are covered by three test cases named:
-    NotSet_NoTabs, SingleUrl_1Tab, and MultipleUrls_3Tabs.
-    - Case NotSet_NoTabs opens no tabs. This is the default behavior for
-      un-managed user and guest user sessions.
-    - Case SingleUrl_1Tab opens a single tab to chrome://settings.
-    - Case MultipleUrls_3Tabs opens 3 tabs, in order, to the following pages:
-      'chrome://policy', 'chrome://settings', and 'chrome://histograms'
-
-    """
-    version = 1
-
-    POLICY_NAME = 'RestoreOnStartupURLs'
-    URLS1_DATA = ['chrome://settings']
-    URLS3_DATA = ['chrome://policy', 'chrome://settings',
-                  'chrome://histograms']
-    NEWTAB_URLS = ['chrome://newtab',
-                   'https://www.google.com/_/chrome/newtab?espv=2&ie=UTF-8']
-
-    TEST_CASES = {
-        'NotSet_NoTabs': None,
-        'SingleUrl_1Tab': URLS1_DATA,
-        'MultipleUrls_3Tabs': URLS3_DATA
-    }
-
-    def _test_startup_urls(self, policy_value):
-        """Verify CrOS enforces RestoreOnStartupURLs policy value.
-
-        When RestoreOnStartupURLs policy is set to one or more URLs, check
-        that a tab is opened to each URL. When set to None, check that no tab
-        is opened.
-
-        @param policy_value: policy value expected.
-
-        """
-        # Get list of open tab urls from browser; Convert unicode to text;
-        # Strip any trailing '/' character reported by devtools.
-        tab_urls = [tab.url.encode('utf8').rstrip('/')
-                    for tab in self.cr.browser.tabs]
-
-        # Telemetry always opens a 'newtab' tab if no startup tabs are opened.
-        if policy_value is None:
-            if len(tab_urls) != 1 or tab_urls[0] not in self.NEWTAB_URLS:
-                raise error.TestFail('Unexpected tabs: %s (expected: NEWTAB)' %
-                                     tab_urls)
-
-        # Compare open tabs with expected tabs by |policy_value|.
-        elif set(tab_urls) != set(policy_value):
-            raise error.TestFail('Unexpected tabs: %s (expected: %s)' %
-                                 (tab_urls, policy_value))
-
-
-    def run_once(self, case):
-        """Setup and run the test configured for the specified test case.
-
-        Set the expected |policy_value| string and |policies_dict| data based
-        on the test |case|. Set RestoreOnStartup=4 when there is 1 or more
-        startup urls given. Otherwise, set to None.
-
-        @param case: Name of the test case to run.
-
-        """
-        case_value = self.TEST_CASES[case]
-        if case_value == None:
-            user_policies = {'RestoreOnStartup': None}
-        else:
-            user_policies = {'RestoreOnStartup': 4}
-        user_policies[self.POLICY_NAME] = case_value
-        self.setup_case(user_policies=user_policies)
-        self._test_startup_urls(case_value)
diff --git a/client/site_tests/policy_SystemTimezone/control b/client/site_tests/policy_SystemTimezone/control
deleted file mode 100644
index 4917678..0000000
--- a/client/site_tests/policy_SystemTimezone/control
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_SystemTimezone'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-This test verifies the effect of the SystemTimezone user policy on Chrome OS
-client behavior. This policy does not take effect until the user logs out.
-
-A test case shall pass if:
- - The proper timezone is set
- - The timezone reported by the DATE function is correct
- - When the timezone is not set, it is user adjustable
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_SystemTimezone', **args_dict)
diff --git a/client/site_tests/policy_SystemTimezone/policy_SystemTimezone.py b/client/site_tests/policy_SystemTimezone/policy_SystemTimezone.py
deleted file mode 100644
index 43cc663..0000000
--- a/client/site_tests/policy_SystemTimezone/policy_SystemTimezone.py
+++ /dev/null
@@ -1,141 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-import logging
-import time
-
-
-class policy_SystemTimezone(
-        enterprise_policy_base.EnterprisePolicyTest):
-    """
-    Test effect of SystemTimezone policy on Chrome OS behavior.
-
-    This will test that both the timezone can be set by the policy, and that
-    when the policy is not set a user can change the settings.
-
-    """
-    version = 1
-    POLICY_NAME = 'SystemTimezone'
-
-    def _navigate_chromeos_settings_to_timezone(self):
-        """
-        Will navigate the ChromeOS Settings (New as of R79) to the 'Time zone'
-        page.
-
-        """
-        self.ui.click_and_wait_for_item_with_retries('/tray/', 'Settings', True)
-        self.ui.click_and_wait_for_item_with_retries('Settings', 'Advanced')
-        self.ui.click_and_wait_for_item_with_retries('Advanced',
-                                                     'Date and time')
-        self.ui.click_and_wait_for_item_with_retries('Date and time',
-                                                     'Time zone')
-        self.ui.click_and_wait_for_item_with_retries('Time zone',
-                                                     'Choose from list',
-                                                     click_role='link')
-
-    def _is_timezone_selectable(self):
-        """
-        Check if the timezone is selectable via the UI. If the timezone
-        dropdown is greyed out, then it is not selectable.
-
-        @returns: True if dropdown is usable, False if not.
-
-        """
-        self._navigate_chromeos_settings_to_timezone()
-        self.ui.doDefault_on_obj('Choose from list')
-
-        # Give the dropdown a second to load (there is no object to wait on).
-        time.sleep(2)
-        return not self.ui.is_obj_restricted('Time zone', role='popUpButton')
-
-    def _set_timezone(self):
-        """Sets the timezone to the first option in the list."""
-        self.ui.doDefault_on_obj('/(UTC-10:00)/',
-                                 isRegex=True,
-                                 role='menuListOption')
-
-    def _test_timezone(self, expected):
-        """
-        Verify the Timezone set on the device.
-
-        This is done by running the UNIX date command (%z) and verifying the
-        timezone matches the expected result.
-
-        """
-        def check_timezone(expected):
-            result = utils.system_output('date +%z')
-            logging.info('date result {}'.format(result))
-            return result == expected
-
-        utils.poll_for_condition(
-            lambda: check_timezone(expected),
-            exception=error.TestFail('Time zone was not set! Expected {}'
-                                     .format(expected)),
-            timeout=30,
-            sleep_interval=1,
-            desc='Polling for timezone change')
-
-    def set_timezones(self):
-        """
-        Iterate through different time zones, and verify they can be set.
-
-        This is specifically being done to verify the timezone via seeing
-        the reported timezone is changing, and not just on the first one via
-        luck.
-
-        """
-        case = {'policy': 'Asia/Kathmandu', 'expected': '+0545'}
-
-        policy_value = case['policy']
-        expected = case['expected']
-        policies = {self.POLICY_NAME: policy_value}
-        self.setup_case(device_policies=policies, enroll=True)
-        self.ui.start_ui_root(self.cr)
-        # Logout so the policy can take effect
-        if self._is_timezone_selectable():
-            raise error.TestError(
-                'Timezone is selectable when the policy is set')
-        self.log_out_via_keyboard()
-        self._test_timezone(expected)
-
-
-    def set_empty_timezone(self):
-        """
-        Set and verify the timezone when the policy is empty.
-
-        This will be done by adjusting the setting on the ://settings page,
-        and verifying the date reported. Additionally log out, then verify the
-        timezone matches as well.
-
-        """
-
-        policies = {self.POLICY_NAME: ''}
-        self.setup_case(device_policies=policies, enroll=True)
-        self.ui.start_ui_root(self.cr)
-
-        # Check if the Timezone is changable in the settings.
-        if not self._is_timezone_selectable():
-            raise error.TestError('User cannot change timezone')
-        self._set_timezone()
-
-        self._test_timezone('-1000')
-
-        self.log_out_via_keyboard()
-        self._test_timezone('-1000')
-
-    def run_once(self, case):
-        """
-        Run the proper test based on the selected case.
-
-        @param case: bool or None, value of the test case to run.
-
-        """
-        if case:
-            self.set_timezones()
-        else:
-            self.set_empty_timezone()
diff --git a/client/site_tests/policy_UIUtilsSmokeTest/control b/client/site_tests/policy_UIUtilsSmokeTest/control
deleted file mode 100644
index 6d97e4f..0000000
--- a/client/site_tests/policy_UIUtilsSmokeTest/control
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_UIUtilsSmokeTest'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-A quick test to ensure the ui_utils are not broken.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_UIUtilsSmokeTest', **args_dict)
\ No newline at end of file
diff --git a/client/site_tests/policy_UIUtilsSmokeTest/policy_UIUtilsSmokeTest.py b/client/site_tests/policy_UIUtilsSmokeTest/policy_UIUtilsSmokeTest.py
deleted file mode 100644
index 522cb5a..0000000
--- a/client/site_tests/policy_UIUtilsSmokeTest/policy_UIUtilsSmokeTest.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-
-class policy_UIUtilsSmokeTest(
-        enterprise_policy_base.EnterprisePolicyTest):
-    """
-    Simple test to check that the ui_utils are mostly working. Good to run this
-    if major changes are made to the utils file.
-
-    When new features are added, attempt to add them to this test.
-
-    """
-    version = 1
-
-    def _smoke_test(self):
-        """The test."""
-        self.ui.start_ui_root(self.cr)
-
-        # Checks if both this and the list_screen_items functions are working.
-        if not self.ui.get_name_role_list():
-            raise error.TestError('No items returned from entire screen')
-
-        self.ui.doDefault_on_obj(name='Launcher', role='button')
-
-        # Check the doCommand, wait_for_ui_obj and item_present
-        self.ui.doCommand_on_obj(name='Launcher',
-                                 role='button',
-                                 cmd='showContextMenu()')
-        self.ui.wait_for_ui_obj(name='/Autohide/',
-                                isRegex=True,
-                                role='menuItem')
-
-        if self.ui.is_obj_restricted(name='Launcher', role='button',):
-            raise error.TestError('Launcher should not be restricted')
-        if len(self.ui.list_screen_items()) == 0:
-            raise error.TestError("list_screen_items returned no items")
-        self.ui.click_and_wait_for_item_with_retries('/tray/', 'Settings', True)
-
-    def run_once(self):
-        """Run the test."""
-        self.setup_case()
-        self._smoke_test()
diff --git a/client/site_tests/policy_VirtualMachinesAllowed/control b/client/site_tests/policy_VirtualMachinesAllowed/control
deleted file mode 100644
index a39359c..0000000
--- a/client/site_tests/policy_VirtualMachinesAllowed/control
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_VirtualMachinesAllowed'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verifies whether virtual machines are available or not. This test is kicked
-off via policy_DeviceServer server test.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_VirtualMachinesAllowed', **args_dict)
diff --git a/client/site_tests/policy_VirtualMachinesAllowed/policy_VirtualMachinesAllowed.py b/client/site_tests/policy_VirtualMachinesAllowed/policy_VirtualMachinesAllowed.py
deleted file mode 100644
index 6ed0296..0000000
--- a/client/site_tests/policy_VirtualMachinesAllowed/policy_VirtualMachinesAllowed.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import retry
-
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-from py_utils import TimeoutException
-
-
-class policy_VirtualMachinesAllowed(
-        enterprise_policy_base.EnterprisePolicyTest):
-    """
-    Test for the VirtualMachinesAllowed policy.
-
-    If the policy is set to True then installing linux is allowed.
-    If the policy is set to False/None then installing linux is not allowed.
-
-    """
-    version = 1
-    _POLICY = 'VirtualMachinesAllowed'
-
-    @retry.retry(TimeoutException, timeout_min=5, delay_sec=10)
-    def _run_setup_case(self, case):
-        self.setup_case(device_policies={self._POLICY: case}, enroll=True)
-
-    def run_once(self, case):
-        """
-        Entry point of this test.
-
-        @param case: True, False, or None for the value of the policy.
-
-        """
-        self._run_setup_case(case)
-        self.ui.start_ui_root(self.cr)
-
-        self.cr.autotest_ext.ExecuteJavaScript('''
-            chrome.autotestPrivate.runCrostiniInstaller(function() {})
-        ''')
-
-        if case:
-            self.ui.wait_for_ui_obj(name='/Linux/', isRegex=True)
-        else:
-            if not self.ui.did_obj_not_load(name='/Linux/', isRegex=True):
-                raise error.TestFail(
-                    'Linux is installing and it should not be.')
diff --git a/client/site_tests/policy_WiFiAutoconnect/control b/client/site_tests/policy_WiFiAutoconnect/control
deleted file mode 100644
index a6c0926..0000000
--- a/client/site_tests/policy_WiFiAutoconnect/control
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-NAME = 'policy_WiFiAutoconnect'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effect of policy_WiFiAutoconnect policy on Chrome OS behavior.
-
-Sets network configuration policy and then verifies that device autoconnects
-(or doesn't) based on policy settings.
-
-Trigger this through the 'policy_WiFiAutoconnectServer' test.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_WiFiAutoconnect', **args_dict)
-
diff --git a/client/site_tests/policy_WiFiAutoconnect/policy_WiFiAutoconnect.py b/client/site_tests/policy_WiFiAutoconnect/policy_WiFiAutoconnect.py
deleted file mode 100644
index 0b2db47..0000000
--- a/client/site_tests/policy_WiFiAutoconnect/policy_WiFiAutoconnect.py
+++ /dev/null
@@ -1,80 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-from autotest_lib.client.cros.enterprise import enterprise_network_api
-from autotest_lib.client.cros.enterprise import network_config
-
-
-class policy_WiFiAutoconnect(
-        enterprise_policy_base.EnterprisePolicyTest):
-    version = 1
-
-
-    def cleanup(self):
-        """Re-enable ethernet after the test is completed."""
-        if hasattr(self, 'net_api'):
-            self.net_api.chrome_net_context.enable_network_device('Ethernet')
-        super(policy_WiFiAutoconnect, self).cleanup()
-
-
-    def test_wifi_autoconnect(self, ssid, autoconnect):
-        """
-        Verifies the behavior of the autoconnect portion of network policy.
-
-        @param ssid: Service set identifier for wireless local area network.
-        @param autoconnect: Whether policy autoconnects to network.
-
-        @raises error.TestFail: When device's behavior does not match policy.
-
-        """
-        if not autoconnect:
-            if self.net_api.is_network_connected(ssid):
-                raise error.TestFail('Device autoconnected to %s, but '
-                                     'autoconnect = False.'
-                                     % ssid)
-            self.net_api.connect_to_network(ssid)
-
-        if not self.net_api.is_network_connected(ssid):
-            raise error.TestFail('Did not connect to network (%s)' % ssid)
-
-
-    def run_once(self, autoconnect=False, ssid=''):
-        """
-        Setup and run the test configured for the specified test case.
-
-        @param ssid: Service set identifier for wireless local area network.
-        @param autoconnect: Value of "AutoConnect" setting. Options are True,
-                            False, or None
-
-        """
-        network = network_config.NetworkConfig(ssid,
-                                               autoconnect=autoconnect)
-
-        self.setup_case(
-            user_policies={
-                'OpenNetworkConfiguration': network.policy()
-            },
-            extension_paths=[
-                enterprise_network_api.NETWORK_TEST_EXTENSION_PATH
-            ]
-        )
-
-        self.net_api = enterprise_network_api.\
-                ChromeEnterpriseNetworkContext(self.cr)
-
-        network_available = self.net_api.is_network_in_range(
-                network.ssid,
-                wait_time=self.net_api.SHORT_TIMEOUT)
-        if not network_available:
-            raise error.TestError('SSID %s not available within %s seconds'
-                                  % (network.ssid, self.net_api.SHORT_TIMEOUT))
-
-        # Disable ethernet so device will default to WiFi
-        self.net_api.disable_network_device('Ethernet')
-
-        self.test_wifi_autoconnect(ssid, autoconnect)
diff --git a/client/site_tests/policy_WiFiPrecedence/control b/client/site_tests/policy_WiFiPrecedence/control
deleted file mode 100644
index 874faf0..0000000
--- a/client/site_tests/policy_WiFiPrecedence/control
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-NAME = 'policy_WiFiPrecedence'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verify effect of policy_WiFiPrecedence policy on Chrome OS behavior.
-
-The DUT is given 2 network configurations and connects to the one with the
-higher precedence.
-
-Trigger this through the 'policy_WiFiPrecedenceServer' test.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_WiFiPrecedence', **args_dict)
-
diff --git a/client/site_tests/policy_WiFiPrecedence/policy_WiFiPrecedence.py b/client/site_tests/policy_WiFiPrecedence/policy_WiFiPrecedence.py
deleted file mode 100644
index 62aa46a..0000000
--- a/client/site_tests/policy_WiFiPrecedence/policy_WiFiPrecedence.py
+++ /dev/null
@@ -1,115 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import pickle
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-from autotest_lib.client.cros.enterprise import enterprise_network_api
-
-
-class policy_WiFiPrecedence(enterprise_policy_base.EnterprisePolicyTest):
-    version = 1
-
-
-    def cleanup(self):
-        """Re-enable ethernet after the test is completed."""
-        if hasattr(self, 'net_api'):
-            self.net_api.chrome_net_context.enable_network_device('Ethernet')
-        super(policy_WiFiPrecedence, self).cleanup()
-
-
-    def test_precedence(self, network1, network2, precedence, test):
-        """
-        Ensure DUT connects to network with higher precedence.
-
-        DUT is given 2 network configs and must connect to the one specified
-        by |precedence|.
-
-        @param network1: A NetworkConfig object representing a network.
-        @param network2: A NetworkConfig object representing a network.
-        @param precedence: The int 1 or 2 that indicates
-            which network should autoconnect.
-        @param test: Name of the test being run.
-
-        @raises error.TestFail: If DUT does not connect to the |precedence|
-            network.
-
-        """
-        if test == 'managed_vs_unmanaged':
-            # Connect and disconnect from the unmanaged network so the network
-            # is a "remembered" network on the DUT.
-            self.net_api.connect_to_network(network2.ssid)
-            self.net_api.disconnect_from_network(network2.ssid)
-
-        # If the networks are the same, ignore the precedence checks.
-        if network1.ssid != network2.ssid:
-            if (self.net_api.is_network_connected(network1.ssid) and
-                    precedence == 2):
-                raise error.TestFail(
-                        'DUT autoconnected to network1, but '
-                        'should have preferred network2.')
-            elif (self.net_api.is_network_connected(network2.ssid) and
-                  precedence == 1):
-                raise error.TestFail(
-                        'DUT autoconnected to network2, but '
-                        'should have preferred network1.')
-
-        if (not self.net_api.is_network_connected(network1.ssid) and
-              not self.net_api.is_network_connected(network2.ssid)):
-            raise error.TestFail('DUT did not connect to a network.')
-
-
-    def run_once(self, network1_pickle=None, network2_pickle=None,
-                 precedence=None, test=None):
-        """
-        Setup and run the test configured for the specified test case.
-
-        @param network_pickle1: A pickled version of a NetworkConfig
-            object representing network1.
-        @param network_pickle2: A pickled version of a NetworkConfig
-            object representing network2.
-        @param precedence: The int 1 or 2 that indicates which network
-            should autoconnect.
-        @param test: Name of the test being run.
-
-        @raises error.TestFail: If DUT does not connect to the |precedence|
-            network.
-
-        """
-        if network1_pickle is None or network2_pickle is None:
-            raise error.TestError('network1 and network2 cannot be None.')
-
-        network1 = pickle.loads(network1_pickle)
-        network2 = pickle.loads(network2_pickle)
-
-        network_policy = network1.policy()
-
-        device_policy = {}
-        if test == 'device_vs_user':
-            device_policy['device_policies'] = {
-                'DeviceOpenNetworkConfiguration': network2.policy()}
-        elif test != 'managed_vs_unmanaged':
-            # Concatenate the network policies.
-            network_policy['NetworkConfigurations'].append(
-                    network2.policy()['NetworkConfigurations'][0])
-
-        self.setup_case(
-            user_policies={
-                'OpenNetworkConfiguration': network_policy
-            },
-            extension_paths=[
-                enterprise_network_api.NETWORK_TEST_EXTENSION_PATH
-            ],
-            enroll=bool(device_policy),
-            **device_policy
-        )
-
-        self.net_api = enterprise_network_api.\
-            ChromeEnterpriseNetworkContext(self.cr)
-        # Disable ethernet so device will default to WiFi.
-        self.net_api.disable_network_device('Ethernet')
-
-        self.test_precedence(network1, network2, precedence, test)
diff --git a/client/site_tests/policy_WiFiTypes/control b/client/site_tests/policy_WiFiTypes/control
deleted file mode 100644
index 488358a..0000000
--- a/client/site_tests/policy_WiFiTypes/control
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-NAME = 'policy_WiFiTypes'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-This test should be run through the 'policy_WiFiTypesServer' test.
-
-'policy_WiFiTypes' sets the OpenNetworkConfiguration policy and attempts to
-connect to the given AP. Fails if the DUT does not connect. Tests with both
-autoconnect on and off.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_WiFiTypes', **args_dict)
-
diff --git a/client/site_tests/policy_WiFiTypes/policy_WiFiTypes.py b/client/site_tests/policy_WiFiTypes/policy_WiFiTypes.py
deleted file mode 100644
index 08957c6..0000000
--- a/client/site_tests/policy_WiFiTypes/policy_WiFiTypes.py
+++ /dev/null
@@ -1,55 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import pickle
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-from autotest_lib.client.cros.enterprise import enterprise_network_api
-
-
-class policy_WiFiTypes(enterprise_policy_base.EnterprisePolicyTest):
-    version = 1
-
-
-    def cleanup(self):
-        """Re-enable ethernet after the test is completed."""
-        if hasattr(self, 'net_api'):
-            self.net_api.chrome_net_context.enable_network_device('Ethernet')
-        super(policy_WiFiTypes, self).cleanup()
-
-
-    def run_once(self, network_pickle):
-        """
-        Setup and run the test configured for the specified test case.
-
-        @param network_pickle: A pickled NetworkConfig object of the network
-            to connect to.
-
-        """
-        network = pickle.loads(network_pickle)
-
-        # Test with both autoconnect on and off.
-        for autoconnect in [False, True]:
-            network.autoconnect = autoconnect
-
-            self.setup_case(
-                user_policies={'OpenNetworkConfiguration': network.policy()},
-                extension_paths=[
-                    enterprise_network_api.NETWORK_TEST_EXTENSION_PATH
-                ],
-            )
-
-            self.net_api = enterprise_network_api.\
-                ChromeEnterpriseNetworkContext(self.cr)
-            # Disable ethernet so device will default to WiFi
-            self.net_api.disable_network_device('Ethernet')
-
-            if not autoconnect:
-                self.net_api.connect_to_network(network.ssid)
-
-            if not self.net_api.is_network_connected(network.ssid):
-                raise error.TestFail(
-                        'No connection to network (%s) when autoconnect is %s.'
-                        % (network.ssid, autoconnect))
diff --git a/client/site_tests/policy_WilcoOnNonWilcoDevice/control b/client/site_tests/policy_WilcoOnNonWilcoDevice/control
deleted file mode 100644
index 109e4a0..0000000
--- a/client/site_tests/policy_WilcoOnNonWilcoDevice/control
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_WilcoOnNonWilcoDevice'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'client'
-
-DOC = '''
-Verifies that non Wilco devices are not crashed by Wilco policies.
-This test is kicked off via policy_WilcoServerOnNonWilcoDevice server test.
-
-'''
-
-args_dict = utils.args_to_dict(args)
-
-job.run_test('policy_WilcoOnNonWilcoDevice', **args_dict)
diff --git a/client/site_tests/policy_WilcoOnNonWilcoDevice/policy_WilcoOnNonWilcoDevice.py b/client/site_tests/policy_WilcoOnNonWilcoDevice/policy_WilcoOnNonWilcoDevice.py
deleted file mode 100644
index d5679ea..0000000
--- a/client/site_tests/policy_WilcoOnNonWilcoDevice/policy_WilcoOnNonWilcoDevice.py
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-from autotest_lib.client.cros.enterprise import enterprise_policy_base
-
-
-class policy_WilcoOnNonWilcoDevice(
-        enterprise_policy_base.EnterprisePolicyTest):
-    """
-    Test for looping through Wilco policies on a non wilco device.
-
-    Setting Wilco policies on a non Wilco device should not cause a crash.
-
-    """
-    version = 1
-
-    def _run_setup_case(self, tests):
-        self.setup_case(
-            device_policies={
-                tests[0]['Policy_Name']: tests[0]['Policy_Value']},
-            enroll=True,
-            extra_chrome_flags=['--user-always-affiliated'])
-
-    def run_once(self, tests):
-        """
-        Entry point of this test.
-
-        @param case: True, False, or None for the value of the policy.
-
-        """
-        self._run_setup_case(tests)
-        tests.pop(0)
-        for test in tests:
-            self.update_policies(
-                device_policies={test['Policy_Name']: test['Policy_Value']})
-            self.verify_policy_value(test['Policy_Name'], test['Policy_Value'])
diff --git a/client/site_tests/policy_WilcoUSBPowershare/control b/client/site_tests/policy_WilcoUSBPowershare/control
index a4a1107..10e5c52 100644
--- a/client/site_tests/policy_WilcoUSBPowershare/control
+++ b/client/site_tests/policy_WilcoUSBPowershare/control
@@ -8,6 +8,7 @@
 TEST_CATEGORY = 'General'
 TEST_CLASS = 'enterprise'
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = '''
 Verify effects of DeviceUsbPowerShareEnabled policy.
diff --git a/client/site_tests/power_AudioDetector/control b/client/site_tests/power_AudioDetector/control
index 308f218..fe9dd47 100644
--- a/client/site_tests/power_AudioDetector/control
+++ b/client/site_tests/power_AudioDetector/control
@@ -16,6 +16,7 @@
 TEST_CLASS = "power"
 TEST_TYPE = "client"
 ATTRIBUTES = "suite:bvt-perbuild"
+PY_VERSION = 3
 
 DOC = """
 This test verifies that the system will not suspend while audio is playing.
diff --git a/client/site_tests/power_AudioDetector/power_AudioDetector.py b/client/site_tests/power_AudioDetector/power_AudioDetector.py
index f4b79e7..2b98ffe 100644
--- a/client/site_tests/power_AudioDetector/power_AudioDetector.py
+++ b/client/site_tests/power_AudioDetector/power_AudioDetector.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/power_BacklightControl/control b/client/site_tests/power_BacklightControl/control
deleted file mode 100644
index a47f5dd..0000000
--- a/client/site_tests/power_BacklightControl/control
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "sque"
-NAME = "power_BacklightControl"
-PURPOSE = "Verify the backlight controller functions properly."
-CRITERIA = """
-Fail if any of the following occur:
-  - The backlight cannot be turned all the way to max using powerd.
-  - The backlight cannot be turned all the way down to minimum and zero using
-    powerd.
-  - The screen cannot be turned on/off by setting backlight to 0 and nonzero,
-    respectively, using powerd.
-  - There are too many steps (> 16) between min and max brightness.
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "power"
-TEST_TYPE = "client"
-
-DOC = """
-This test verifies that the backlight controller can adjust brightness up and
-down over the full range, as well as turn screen on/off.
-"""
-
-job.run_test("power_BacklightControl")
diff --git a/client/site_tests/power_BacklightControl/power_BacklightControl.py b/client/site_tests/power_BacklightControl/power_BacklightControl.py
deleted file mode 100644
index 000ddc8..0000000
--- a/client/site_tests/power_BacklightControl/power_BacklightControl.py
+++ /dev/null
@@ -1,243 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging, time
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.graphics import graphics_utils
-from autotest_lib.client.cros.power import power_status, power_utils
-
-
-def get_num_outputs_on():
-    """
-    Retrieves the number of connected outputs that are on.
-    @return: integer value of number of connected outputs that are on.
-    """
-
-    return graphics_utils.get_num_outputs_on();
-
-class power_BacklightControl(test.test):
-    version = 1
-    # Minimum number of steps expected between min and max brightness levels.
-    _min_num_steps = 4
-    # Minimum required percentage change in energy rate between transitions
-    # (max -> min, min-> off)
-    _energy_rate_change_threshold_percent = 5
-
-
-    def initialize(self):
-        """Perform necessary initialization prior to test run.
-
-        Private Attributes:
-          _backlight: power_utils.Backlight object
-        """
-        super(power_BacklightControl, self).initialize()
-        self._backlight = None
-
-
-    def run_once(self):
-        # Require that this test be run on battery with at least 5% charge
-        status = power_status.get_status()
-        status.assert_battery_state(5)
-
-        prefs = { 'has_ambient_light_sensor' : 0,
-                  'ignore_external_policy'   : 1,
-                  'plugged_dim_ms'           : 7200000,
-                  'plugged_off_ms'           : 9000000,
-                  'plugged_suspend_ms'       : 18000000,
-                  'unplugged_dim_ms'         : 7200000,
-                  'unplugged_off_ms'         : 9000000,
-                  'unplugged_suspend_ms'     : 18000000 }
-        self._pref_change = power_utils.PowerPrefChanger(prefs)
-
-        keyvals = {}
-        num_errors = 0
-
-        # These are the expected ratios of energy rate between max, min, and off
-        # (zero) brightness levels.  e.g. when changing from max to min, the
-        # energy rate must become <= (max_energy_rate * max_to_min_factor).
-        max_to_min_factor = \
-            1.0 - self._energy_rate_change_threshold_percent / 100.0
-        min_to_off_factor = \
-            1.0 - self._energy_rate_change_threshold_percent / 100.0
-        off_to_max_factor = 1.0 / (max_to_min_factor * min_to_off_factor)
-
-        # Determine the number of outputs that are on.
-        starting_num_outputs_on = get_num_outputs_on()
-        if starting_num_outputs_on == 0:
-            raise error.TestFail('At least one display output must be on.')
-        keyvals['starting_num_outputs_on'] = starting_num_outputs_on
-
-        self._backlight = power_utils.Backlight()
-        keyvals['max_brightness'] = self._backlight.get_max_level()
-        if keyvals['max_brightness'] <= self._min_num_steps:
-            raise error.TestFail('Must have at least %d backlight levels' %
-                                 (self._min_num_steps + 1))
-
-        keyvals['initial_brightness'] = self._backlight.get_level()
-
-        self._wait_for_stable_energy_rate()
-        keyvals['initial_power_w'] = self._get_current_energy_rate()
-
-        self._backlight_controller = power_utils.BacklightController()
-        self._backlight_controller.set_brightness_to_max()
-
-        current_brightness = \
-            utils.wait_for_value(self._backlight.get_level,
-                                 max_threshold=keyvals['max_brightness'])
-        if current_brightness != keyvals['max_brightness']:
-            num_errors += 1
-            logging.error(('Failed to increase brightness to max, ' + \
-                           'brightness is %d.') % current_brightness)
-        else:
-            self._wait_for_stable_energy_rate()
-            keyvals['max_brightness_power_w'] = self._get_current_energy_rate()
-
-        # Set brightness to minimum without going to zero.
-        # Note that we don't know what the minimum brightness is, so just set
-        # min_threshold=0 to use the timeout to wait for the brightness to
-        # settle.
-        self._backlight_controller.set_brightness_to_min()
-        current_brightness = utils.wait_for_value(
-            self._backlight.get_level,
-            min_threshold=(keyvals['max_brightness'] / 2 - 1))
-        if current_brightness >= keyvals['max_brightness'] / 2 or \
-           current_brightness == 0:
-            num_errors += 1
-            logging.error('Brightness is not at minimum non-zero level: %d' %
-                          current_brightness)
-        else:
-            self._wait_for_stable_energy_rate()
-            keyvals['min_brightness_power_w'] = self._get_current_energy_rate()
-
-        # Turn off the screen by decreasing brightness one more time with
-        # allow_off=True.
-        self._backlight_controller.decrease_brightness(True)
-        current_brightness = utils.wait_for_value(
-            self._backlight.get_level, min_threshold=0)
-        if current_brightness != 0:
-            num_errors += 1
-            logging.error('Brightness is %d, expecting 0.' % current_brightness)
-
-        # Wait for screen to turn off.
-        num_outputs_on = utils.wait_for_value(
-            get_num_outputs_on, min_threshold=(starting_num_outputs_on - 1))
-        keyvals['outputs_on_after_screen_off'] = num_outputs_on
-        if num_outputs_on >= starting_num_outputs_on:
-            num_errors += 1
-            logging.error('At least one display must have been turned off. ' + \
-                          'Number of displays on: %s' % num_outputs_on)
-        else:
-            self._wait_for_stable_energy_rate()
-            keyvals['screen_off_power_w'] = self._get_current_energy_rate()
-
-        # Set brightness to max.
-        self._backlight_controller.set_brightness_to_max()
-        current_brightness = utils.wait_for_value(
-            self._backlight.get_level, max_threshold=keyvals['max_brightness'])
-        if current_brightness != keyvals['max_brightness']:
-            num_errors += 1
-            logging.error(('Failed to increase brightness to max, ' + \
-                           'brightness is %d.') % current_brightness)
-
-        # Verify that the same number of outputs are on as before.
-        num_outputs_on = get_num_outputs_on()
-        keyvals['outputs_on_at_end'] = num_outputs_on
-        if num_outputs_on != starting_num_outputs_on:
-            num_errors += 1
-            logging.error(('Number of displays turned on should be same as ' + \
-                           'at start.  Number of displays on: %s') %
-                          num_outputs_on)
-
-        self._wait_for_stable_energy_rate()
-        keyvals['final_power_w'] = self._get_current_energy_rate()
-
-        # Energy rate must have changed significantly between transitions.
-        if 'max_brightness_power_w' in keyvals and \
-           'min_brightness_power_w' in keyvals and \
-           keyvals['min_brightness_power_w'] >= \
-               keyvals['max_brightness_power_w'] * max_to_min_factor:
-            num_errors += 1
-            logging.error('Power draw did not decrease enough when ' + \
-                          'brightness was decreased from max to min.')
-
-        if 'screen_off_power_w' in keyvals and \
-           'min_brightness_power_w' in keyvals and \
-           keyvals['screen_off_power_w'] >= \
-               keyvals['min_brightness_power_w'] * min_to_off_factor:
-            num_errors += 1
-            logging.error('Power draw did not decrease enough when screen ' + \
-                          'was turned off.')
-
-        if num_outputs_on == starting_num_outputs_on and \
-           'screen_off_power_w' in keyvals and \
-           keyvals['final_power_w'] <= \
-               keyvals['screen_off_power_w'] * off_to_max_factor:
-            num_errors += 1
-            logging.error('Power draw did not increase enough after ' + \
-                          'turning screen on.')
-
-        self.write_perf_keyval(keyvals)
-
-        if num_errors > 0:
-            raise error.TestFail('Test failed with %d errors' % num_errors)
-
-
-    def cleanup(self):
-        if self._backlight:
-            self._backlight.restore()
-        super(power_BacklightControl, self).cleanup()
-
-
-    def _get_current_energy_rate(self):
-        return power_status.get_status().battery.energy_rate
-
-
-    def _wait_for_stable_energy_rate(self,
-                                     max_variation_percent=5,
-                                     sample_delay_sec=1,
-                                     window_size=10,
-                                     timeout_sec=30):
-        """
-        Waits for the energy rate to stablize.  Stability criterion:
-            The last |window_size| samples of energy rate do not deviate from
-            their mean by more than |max_variation_percent|.
-
-        Arguments:
-            max_variation_percent   Percentage of allowed deviation from mean
-                                    energy rate to still be considered stable.
-            sample_delay_sec        Time to wait between each reading of the
-                                    energy rate.
-            window_size             Number of energy rate samples required to
-                                    measure stability.  If there are more
-                                    samples than this amount, use only the last
-                                    |window_size| values.
-            timeout_sec             If stability has not been attained after
-                                    this long, stop waiting.
-
-        Return value:
-            True if energy rate stabilized before timeout.
-            False if timed out waiting for energy rate to stabilize.
-        """
-        start_time = time.time()
-        samples = []
-        max_variation_factor = max_variation_percent / 100.0
-        while time.time() - start_time < timeout_sec:
-            current_rate = self._get_current_energy_rate()
-
-            # Remove the oldest value if the list of energy rate samples is at
-            # the maximum limit |window_size|, before appending a new value.
-            if len(samples) >= window_size:
-                samples = samples[1:]
-            samples.append(current_rate)
-
-            mean = sum(samples) / len(samples)
-            if len(samples) >= window_size and \
-               max(samples) <= mean * (1 + max_variation_factor) and \
-               min(samples) >= mean * (1 - max_variation_factor):
-                return True
-
-            time.sleep(sample_delay_sec)
-
-        return False
diff --git a/client/site_tests/power_BatteryCharge/control b/client/site_tests/power_BatteryCharge/control
index 10e3b96..9e579ab 100644
--- a/client/site_tests/power_BatteryCharge/control
+++ b/client/site_tests/power_BatteryCharge/control
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_BatteryCharge"
 PURPOSE = "Measure the time required to charge the battery."
 CRITERIA = "This test is a benchmark."
@@ -10,6 +10,7 @@
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 Device should be plugged into an AC outlet.
diff --git a/client/site_tests/power_BatteryCharge/control.args b/client/site_tests/power_BatteryCharge/control.args
index 7cd1431..47b4faf 100644
--- a/client/site_tests/power_BatteryCharge/control.args
+++ b/client/site_tests/power_BatteryCharge/control.args
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_BatteryCharge.args"
 PURPOSE = "Measure the time required to charge the battery."
 CRITERIA = "This test is a benchmark."
@@ -10,6 +10,7 @@
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 Device should be plugged into an AC outlet.
diff --git a/client/site_tests/power_BatteryCharge/power_BatteryCharge.py b/client/site_tests/power_BatteryCharge/power_BatteryCharge.py
index 80d8e0c..c19dece 100755
--- a/client/site_tests/power_BatteryCharge/power_BatteryCharge.py
+++ b/client/site_tests/power_BatteryCharge/power_BatteryCharge.py
@@ -1,19 +1,23 @@
-#!/usr/bin/python2
+# Lint as: python2, python3
 # Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 import logging, time
-from autotest_lib.client.bin import test
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.cros import service_stopper
-from autotest_lib.client.cros.power import power_status, power_utils
+from autotest_lib.client.cros.power import power_status
+from autotest_lib.client.cros.power import power_test
+from autotest_lib.client.cros.power import power_utils
 
-class power_BatteryCharge(test.test):
+
+class power_BatteryCharge(power_test.power_Test):
     """class power_BatteryCharge."""
     version = 1
 
-    def initialize(self):
+    def initialize(self, pdash_note=''):
+        """Perform necessary initialization prior to test run."""
+
         if not power_utils.has_battery():
             raise error.TestNAError('DUT has no battery. Test Skipped')
 
@@ -23,11 +27,14 @@
             raise error.TestNAError(
                   'This test needs to be run with the AC power online')
 
+        super(power_BatteryCharge, self).initialize(seconds_period=20,
+                                                    pdash_note=pdash_note,
+                                                    force_discharge=False)
+
         self._services = service_stopper.ServiceStopper(
             service_stopper.ServiceStopper.POWER_DRAW_SERVICES + ['ui'])
         self._services.stop_services()
 
-
     def run_once(self, max_run_time=180, percent_charge_to_add=1,
                  percent_initial_charge_max=None,
                  percent_target_charge=None,
@@ -86,6 +93,7 @@
         logging.info('initial_charge: %f', self.initial_charge)
         logging.info('target_charge: %f', target_charge)
 
+        self.start_measurements()
         while self.remaining_time and current_charge < target_charge:
             if time_to_sleep > self.remaining_time:
                 time_to_sleep = self.remaining_time
@@ -106,18 +114,26 @@
                 logging.info('Battery full, aborting!')
                 break
             elif self.status.battery.status == 'Discharging':
-                raise error.TestError('This test needs to be run with the '
-                    'battery charging on AC.')
-
+                # TestError might be raised if |use_design_charge_capacity|
+                # is True when testing with older battery.
+                if current_charge > self.charge_capacity * 0.97:
+                    logging.info('Battery full (Discharge on AC), aborting!')
+                else:
+                    raise error.TestError('This test needs to be run with the '
+                                          'battery charging on AC.')
+        self._end_time = time.time()
 
     def postprocess_iteration(self):
+        """"Collect and log keyvals."""
         keyvals = {}
         keyvals['ah_charge_full'] = self.charge_full
         keyvals['ah_charge_full_design'] = self.charge_full_design
         keyvals['ah_charge_capacity'] = self.charge_capacity
         keyvals['ah_initial_charge'] = self.initial_charge
         keyvals['ah_final_charge'] = self.status.battery.charge_now
-        keyvals['s_time_taken'] = self.max_run_time - self.remaining_time
+        s_time_taken = self.max_run_time - self.remaining_time
+        min_time_taken = s_time_taken / 60.
+        keyvals['s_time_taken'] = s_time_taken
         keyvals['percent_initial_charge'] = self.initial_charge * 100 / \
                                             keyvals['ah_charge_capacity']
         keyvals['percent_final_charge'] = keyvals['ah_final_charge'] * 100 / \
@@ -135,10 +151,25 @@
                 (keyvals['ah_final_charge'] - self.initial_charge) / \
                 hrs_charging
 
-        self.write_perf_keyval(keyvals)
+        self.keyvals.update(keyvals)
 
+        self._keyvallogger.add_item('time_to_charge_min', min_time_taken,
+                                    'point', 'perf')
+        self._keyvallogger.add_item('initial_charge_ah', self.initial_charge,
+                                    'point', 'perf')
+        self._keyvallogger.add_item('final_charge_ah',
+                                    self.status.battery.charge_now, 'point',
+                                    'perf')
+        self._keyvallogger.add_item('charge_full_ah', self.charge_full,
+                                    'point', 'perf')
+        self._keyvallogger.add_item('charge_full_design_ah',
+                                    self.charge_full_design, 'point', 'perf')
+        self._keyvallogger.set_end(self._end_time)
+
+        super(power_BatteryCharge, self).postprocess_iteration()
 
     def cleanup(self):
+        """Restore stop services and backlight level."""
         if hasattr(self, '_services') and self._services:
             self._services.restore_services()
         if hasattr(self, '_backlight') and self._backlight:
diff --git a/client/site_tests/power_BatteryDrain/control b/client/site_tests/power_BatteryDrain/control
index d52f5ee..3a2f974 100644
--- a/client/site_tests/power_BatteryDrain/control
+++ b/client/site_tests/power_BatteryDrain/control
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_BatteryDrain"
 PURPOSE = "Drain the battery quickly, as a utility for other tests."
 CRITERIA = """
@@ -13,6 +13,7 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test drains the battery quickly, as a utility for other tests. It requires
diff --git a/client/site_tests/power_BatteryDrain/power_BatteryDrain.py b/client/site_tests/power_BatteryDrain/power_BatteryDrain.py
index 234ffc9..177080b 100644
--- a/client/site_tests/power_BatteryDrain/power_BatteryDrain.py
+++ b/client/site_tests/power_BatteryDrain/power_BatteryDrain.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -18,13 +19,15 @@
     backlight = None
     keyboard_backlight = None
 
+    tick_count = 0
+
     url = 'https://crospower.page.link/power_BatteryDrain'
 
     def cleanup(self):
         '''Cleanup for a test run'''
         if self._force_discharge:
             if not power_utils.charge_control_by_ectool(True):
-                logging.warn('Can not restore from force discharge.')
+                logging.warning('Can not restore from force discharge.')
         if self.backlight:
             self.backlight.restore()
         if self.keyboard_backlight:
@@ -81,6 +84,10 @@
                 status.refresh()
                 if not force_discharge and status.on_ac():
                     raise ac_error
+                self.tick_count += 1
+                if self.tick_count % 60 == 0:
+                    logging.info('Battery charge percent: {}'.format(
+                            status.percent_display_charge()))
                 return status.percent_display_charge() <= drain_to_percent
 
             err = error.TestFail(
diff --git a/client/site_tests/power_CPUFreq/control b/client/site_tests/power_CPUFreq/control
index 1af31f8..23e99d9 100644
--- a/client/site_tests/power_CPUFreq/control
+++ b/client/site_tests/power_CPUFreq/control
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_CPUFreq"
 PURPOSE = "Verify that supported CPU frequencies can be set."
 CRITERIA = """
@@ -16,6 +16,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test will read the supporting frequencies from the file
diff --git a/client/site_tests/power_CPUFreq/power_CPUFreq.py b/client/site_tests/power_CPUFreq/power_CPUFreq.py
index 13ccc5b..7be17f1 100644
--- a/client/site_tests/power_CPUFreq/power_CPUFreq.py
+++ b/client/site_tests/power_CPUFreq/power_CPUFreq.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -27,6 +28,7 @@
         self._cpus = [cpufreq(dirname) for dirname in dirs]
         for cpu in self._cpus:
             cpu.save_state()
+            cpu.disable_constraints()
 
         # Store the setting if the system has CPUQuiet feature
         if os.path.exists(SYSFS_CPUQUIET_ENABLE):
@@ -88,6 +90,12 @@
             raise error.TestFail('Not enough frequencies supported!')
 
         for cpu in cpus:
+            this_frequencies = cpu.get_available_frequencies()
+            if set(available_frequencies) != set(this_frequencies):
+                raise error.TestError(
+                    "Can't fallback to parallel test: %s / %s differ: %r / %r" %
+                    (cpu, cpu0, available_frequencies, this_frequencies))
+
             if 'userspace' not in cpu.get_available_governors():
                 raise error.TestError('userspace governor not supported')
 
@@ -108,7 +116,7 @@
         try:
             freq = cpu.get_current_frequency()
         except IOError:
-            logging.warn('Frequency getting failed.  Retrying once.')
+            logging.warning('Frequency getting failed.  Retrying once.')
             time.sleep(.1)
             freq = cpu.get_current_frequency()
 
@@ -150,12 +158,15 @@
         if self.get_driver() == 'acpi-cpufreq':
             self.enable_boost()
 
+    def __str__(self):
+        return os.path.basename(os.path.dirname(self.__base_path))
+
     def __write_file(self, file_name, data):
         path = os.path.join(self.__base_path, file_name)
         try:
             utils.open_write_close(path, data)
         except IOError as e:
-            logging.warn('write of %s failed: %s', path, str(e))
+            logging.warning('write of %s failed: %s', path, str(e))
 
     def __read_file(self, file_name):
         path = os.path.join(self.__base_path, file_name)
@@ -182,6 +193,11 @@
             logging.info(fname + ': ' + data)
             self.__write_file(fname, data)
 
+    def disable_constraints(self):
+        logging.info('disabling min/max constraints:')
+        self.__write_file('scaling_min_freq', str(self.get_min_frequency()))
+        self.__write_file('scaling_max_freq', str(self.get_max_frequency()))
+
     def get_available_governors(self):
         governors = self.__read_file('scaling_available_governors')
         logging.info('available governors: %s', governors)
@@ -214,19 +230,19 @@
         logging.info('current frequency: %s', freq)
         return freq
 
+    def get_min_frequency(self):
+        freq = int(self.__read_file('cpuinfo_min_freq'))
+        logging.info('min frequency: %s', freq)
+        return freq
+
+    def get_max_frequency(self):
+        freq = int(self.__read_file('cpuinfo_max_freq'))
+        logging.info('max frequency: %s', freq)
+        return freq
+
     def set_frequency(self, frequency):
         logging.info('setting frequency to %d', frequency)
-        if frequency >= self.get_current_frequency():
-            file_list = [
-                'scaling_max_freq', 'scaling_min_freq', 'scaling_setspeed'
-            ]
-        else:
-            file_list = [
-                'scaling_min_freq', 'scaling_max_freq', 'scaling_setspeed'
-            ]
-
-        for fname in file_list:
-            self.__write_file(fname, str(frequency))
+        self.__write_file('scaling_setspeed', str(frequency))
 
     def disable_boost(self):
         """Disable boost.
diff --git a/client/site_tests/power_CPUIdle/control b/client/site_tests/power_CPUIdle/control
index 0cadb9b..663324e 100644
--- a/client/site_tests/power_CPUIdle/control
+++ b/client/site_tests/power_CPUIdle/control
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_CPUIdle"
 PURPOSE = "Ensure the processor drops into idle state when it is idle."
 CRITERIA = "Fails if the cpu did not have any idle cycles during this test."
@@ -11,6 +11,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test will read values from /sys/devices/system/cpu/cpu*/cpuidle to
diff --git a/client/site_tests/power_CPUIdle/power_CPUIdle.py b/client/site_tests/power_CPUIdle/power_CPUIdle.py
index 581ee3b..4c6e7f0 100644
--- a/client/site_tests/power_CPUIdle/power_CPUIdle.py
+++ b/client/site_tests/power_CPUIdle/power_CPUIdle.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -17,31 +18,31 @@
         all_cpus = cpus()
 
         idle_time_at_start, active_time_at_start = all_cpus.idle_time()
-        logging.info('idle_time_at_start: %d' % idle_time_at_start)
-        logging.info('active_time_at_start: %d' % active_time_at_start)
+        logging.info('idle_time_at_start: %d', idle_time_at_start)
+        logging.info('active_time_at_start: %d', active_time_at_start)
 
         # sleep for some time to allow the CPUs to drop into idle states
         time.sleep(sleep_time)
 
         idle_time_at_end, active_time_at_end = all_cpus.idle_time()
-        logging.info('idle_time_at_end: %d' % idle_time_at_end)
-        logging.info('active_time_at_end: %d' % idle_time_at_end)
+        logging.info('idle_time_at_end: %d', idle_time_at_end)
+        logging.info('active_time_at_end: %d', idle_time_at_end)
 
         idle_time_delta_ms = (idle_time_at_end - idle_time_at_start) / 1000
-        logging.info('idle_time_delta_ms: %d' % idle_time_delta_ms)
+        logging.info('idle_time_delta_ms: %d', idle_time_delta_ms)
 
         active_time_delta_ms = (active_time_at_end - active_time_at_start) \
                                / 1000
-        logging.info('active_time_delta_ms: %d' % active_time_delta_ms)
+        logging.info('active_time_delta_ms: %d', active_time_delta_ms)
 
         total_time_delta_ms = active_time_delta_ms + idle_time_delta_ms
-        logging.info('total_time_delta_ms: %d' % total_time_delta_ms)
+        logging.info('total_time_delta_ms: %d', total_time_delta_ms)
 
         percent_active_time = active_time_delta_ms * 100.0 / total_time_delta_ms
-        logging.info('percent active time : %.2f' % percent_active_time)
+        logging.info('percent active time : %.2f', percent_active_time)
 
         percent_idle_time = idle_time_delta_ms * 100.0 / total_time_delta_ms
-        logging.info('percent idle time : %.2f' % percent_idle_time)
+        logging.info('percent idle time : %.2f', percent_idle_time)
 
         keyvals = {}
         keyvals['ms_active_time_delta'] = active_time_delta_ms
@@ -131,7 +132,7 @@
         time = 0
         if self.__is_idle_state():
             time = int(self.__read_file('time'))
-        logging.info('idle_time(%s): %d' % (self.__name, time))
+        logging.info('idle_time(%s): %d', self.__name, time)
         return time
 
 
@@ -139,5 +140,5 @@
         time = 0
         if not self.__is_idle_state():
             time = int(self.__read_file('time'))
-        logging.info('active_time(%s): %d' % (self.__name, time))
+        logging.info('active_time(%s): %d', self.__name, time)
         return time
diff --git a/client/site_tests/power_CheckAC/control b/client/site_tests/power_CheckAC/control
index 98f358a..b5cbce7 100644
--- a/client/site_tests/power_CheckAC/control
+++ b/client/site_tests/power_CheckAC/control
@@ -9,6 +9,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test checks the status of the power supply to see that it is on.
diff --git a/client/site_tests/power_CheckAC/power_CheckAC.py b/client/site_tests/power_CheckAC/power_CheckAC.py
index cb62349..84c00b3 100755
--- a/client/site_tests/power_CheckAC/power_CheckAC.py
+++ b/client/site_tests/power_CheckAC/power_CheckAC.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/power_Consumption/control b/client/site_tests/power_Consumption/control
deleted file mode 100644
index 276201f..0000000
--- a/client/site_tests/power_Consumption/control
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "power_Consumption"
-PURPOSE = "Measure power draw when system is under different kinds of load."
-CRITERIA = "This test is a benchmark."
-TIME = "MEDIUM"
-TEST_CATEGORY = "Benchmark"
-TEST_CLASS = "power"
-TEST_TYPE = "client"
-
-DOC = """This test runs a series of different tasks like media playback, flash
-animation, large file download etc. It measures and reports power
-consumptions during each of those tasks.
-
-Args:
-    short: Boolean, if True, run a shorter version of the test with fewer
-        measurements. Designed to run in under 5 minutes so it can be used in
-        the per-build test suite.
-    test_groups: list of sub-test groups to run. Those refer to _run_group_X()
-        methods. None - to use defaults hard-coded in the test.
-    reps: a multiplier used for running longer tests. With reps=N each sub-test
-        will run roughly N times longer. This is good for averaging out more
-        of the noise and therefore getting better accuracy.
-"""
-
-UI_TESTS = ['backlight', 'download', 'webpages', 'video', 'speedometer']
-NONUI_TESTS = ['backchannel', 'sound', 'lowlevel']
-DEFAULT_TESTS = UI_TESTS + NONUI_TESTS
-INTERVAL_SECS = 5
-
-job.profilers.add('vmstat', INTERVAL_SECS)
-job.run_test('power_Consumption',
-                short=False,
-                test_groups=DEFAULT_TESTS,
-                reps=1)
-job.profilers.delete('vmstat')
diff --git a/client/site_tests/power_Consumption/control.fast b/client/site_tests/power_Consumption/control.fast
deleted file mode 100644
index ba5fb55..0000000
--- a/client/site_tests/power_Consumption/control.fast
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "power_Consumption.fast"
-PURPOSE = "Measure power draw when system is under different kinds of load."
-CRITERIA = "This test is a benchmark."
-ATTRIBUTES = "suite:power_build"
-TIME = "SHORT"
-TEST_CATEGORY = "Benchmark"
-TEST_CLASS = "power"
-TEST_TYPE = "client"
-
-DOC = """This test runs a series of different tasks like media playback, flash
-animation, large file download etc. It measures and reports power
-consumptions during each of those tasks.
-
-Args:
-    short: Boolean, if True, run a shorter version of the test with fewer
-        measurements. Designed to run in under 5 minutes so it can be used in
-        the per-build test suite.
-    test_groups: list of sub-test groups to run. Those refer to _run_group_X()
-        methods. None - to use defaults hard-coded in the test.
-    reps: a multiplier used for running longer tests. With reps=N each sub-test
-        will run roughly N times longer. This is good for averaging out more
-        of the noise and therefore getting better accuracy.
-    ac_ok: Boolean, if True, allowed to run with power supply attached.
-"""
-
-INTERVAL_SECS = 5
-
-job.profilers.add('vmstat', INTERVAL_SECS)
-job.run_test('power_Consumption', short=True, ac_ok=True, reps=1)
-job.profilers.delete('vmstat')
diff --git a/client/site_tests/power_Consumption/echo.html b/client/site_tests/power_Consumption/echo.html
deleted file mode 100644
index 4cf439a..0000000
--- a/client/site_tests/power_Consumption/echo.html
+++ /dev/null
@@ -1,14 +0,0 @@
-<html>
-  <head>
-    <title>Test page</title>
-  </head>
-  <body>
-  <div style="text-align: center;margin:10em;">
-    <h3 id="the_string"> Nothing to echo </h3>
-  </div>
-  <script>
-    var h = document.getElementById('the_string');
-    h.innerText = decodeURIComponent(document.location.search.substring(1));
-  </script>
-  </body>
-</html>
diff --git a/client/site_tests/power_Consumption/power_Consumption.py b/client/site_tests/power_Consumption/power_Consumption.py
deleted file mode 100644
index 9249a88..0000000
--- a/client/site_tests/power_Consumption/power_Consumption.py
+++ /dev/null
@@ -1,553 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import time
-import urllib
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.cros import backchannel
-from autotest_lib.client.cros import httpd
-from autotest_lib.client.cros import service_stopper
-from autotest_lib.client.cros.graphics import graphics_utils
-from autotest_lib.client.cros.networking import wifi_proxy
-from autotest_lib.client.cros.power import power_rapl, power_status, power_utils
-
-
-class power_Consumption(test.test):
-    """Measure power consumption for different types of loads.
-
-    This test runs a series of different tasks like media playback, flash
-    animation, large file download etc. It measures and reports power
-    consumptions during each of those tasks.
-    """
-
-    version = 2
-
-
-    def initialize(self, ac_ok=False):
-        """Initialize test.
-
-        Args:
-            ac_ok: boolean to allow running on AC
-        """
-        # Objects that need to be taken care of in cleanup() are initialized
-        # here to None. Otherwise we run the risk of AttributeError raised in
-        # cleanup() masking a real error that caused the test to fail during
-        # initialize() before those variables were assigned.
-        self._backlight = None
-        self._tmp_keyvals = {}
-
-        self._services = service_stopper.ServiceStopper(
-            service_stopper.ServiceStopper.POWER_DRAW_SERVICES)
-        self._services.stop_services()
-
-
-        # Time to exclude from calculation after firing a task [seconds]
-        self._stabilization_seconds = 5
-        self._power_status = power_status.get_status()
-        self._tmp_keyvals['b_on_ac'] = self._power_status.on_ac()
-
-        if not ac_ok:
-            # Verify that we are running on battery and the battery is
-            # sufficiently charged
-            self._power_status.assert_battery_state(30)
-
-        # Local data and web server settings. Tarballs with traditional names
-        # like *.tgz don't get copied to the image by ebuilds (see
-        # AUTOTEST_FILE_MASK in autotest-chrome ebuild).
-        self._static_sub_dir = 'static_sites'
-        utils.extract_tarball_to_dir(
-                'static_sites.tgz.keep',
-                os.path.join(self.bindir, self._static_sub_dir))
-        self._media_dir = '/home/chronos/user/Downloads/'
-        self._httpd_port = 8000
-        self._url_base = 'http://localhost:%s/' % self._httpd_port
-        self._test_server = httpd.HTTPListener(self._httpd_port,
-                                               docroot=self.bindir)
-
-        # initialize various interesting power related stats
-        self._statomatic = power_status.StatoMatic()
-        self._test_server.run()
-
-
-        logging.info('initialize() finished')
-
-
-    def _download_test_data(self):
-        """Download audio and video files.
-
-        This is also used as payload for download test.
-
-        Note, can reach payload via browser at
-          https://console.developers.google.com/storage/chromeos-test-public/big_buck_bunny
-        Start with README
-        """
-
-        repo = 'http://commondatastorage.googleapis.com/chromeos-test-public/'
-        file_list = [repo + 'big_buck_bunny/big_buck_bunny_trailer_400p.mp4', ]
-        if not self.short:
-            file_list += [
-                repo + 'big_buck_bunny/big_buck_bunny_trailer_400p.ogg',
-                repo + 'big_buck_bunny/big_buck_bunny_trailer_400p.vp8.webm',
-                repo + 'big_buck_bunny/big_buck_bunny_trailer_400p.vp9.webm',
-                repo + 'big_buck_bunny/big_buck_bunny_trailer_720p.mp4',
-                repo + 'big_buck_bunny/big_buck_bunny_trailer_720p.ogg',
-                repo + 'big_buck_bunny/big_buck_bunny_trailer_720p.vp8.webm',
-                repo + 'big_buck_bunny/big_buck_bunny_trailer_720p.vp9.webm',
-                repo + 'big_buck_bunny/big_buck_bunny_trailer_1080p.mp4',
-                repo + 'big_buck_bunny/big_buck_bunny_trailer_1080p.ogg',
-                repo + 'big_buck_bunny/big_buck_bunny_trailer_1080p.vp8.webm',
-                repo + 'big_buck_bunny/big_buck_bunny_trailer_1080p.vp9.webm',
-                repo + 'wikimedia/Greensleeves.ogg',
-                ]
-
-        for url in file_list:
-            logging.info('Downloading %s', url)
-            utils.unmap_url('', url, self._media_dir)
-
-
-    def _toggle_fullscreen(self):
-        """Toggle full screen mode."""
-        # Note: full screen mode toggled with F11 is different from clicking the
-        # full screen icon on video player controls. This needs improvement.
-        # Bug: http://crbug.com/248939
-        graphics_utils.screen_toggle_fullscreen()
-
-
-    # Below are a series of generic sub-test runners. They run a given task
-    # and record the task name and start-end timestamps for future computation
-    # of power consumption during the task.
-    def _run_func(self, name, func, repeat=1, save_checkpoint=True):
-        """Run a given python function as a sub-test."""
-        start_time = time.time() + self._stabilization_seconds
-        for _ in xrange(repeat):
-            ret = func()
-        if save_checkpoint:
-            self._plog.checkpoint(name, start_time)
-        return ret
-
-
-    def _run_sleep(self, name, seconds=60):
-        """Just sleep and record it as a named sub-test"""
-        start_time = time.time() + self._stabilization_seconds
-        time.sleep(seconds)
-        self._plog.checkpoint(name, start_time)
-
-
-    def _run_cmd(self, name, cmd, repeat=1):
-        """Run command in a shell as a sub-test"""
-        start_time = time.time() + self._stabilization_seconds
-        for _ in xrange(repeat):
-            logging.info('Executing command: %s', cmd)
-            exit_status = utils.system(cmd, ignore_status=True)
-            if exit_status != 0:
-                logging.error('run_cmd: the following command terminated with'
-                                'a non zero exit status: %s', cmd)
-        self._plog.checkpoint(name, start_time)
-        return exit_status
-
-
-    def _run_until(self, name, predicate, timeout=60):
-        """Probe the |predicate| function  and wait until it returns true.
-        Record the waiting time as a sub-test
-        """
-        start_time = time.time() + self._stabilization_seconds
-        utils.poll_for_condition(predicate, timeout=timeout)
-        self._plog.checkpoint(name, start_time)
-
-
-    def _run_url(self, name, url, duration):
-        """Navigate to URL, sleep for some time and record it as a sub-test."""
-        logging.info('Navigating to %s', url)
-        self._tab.Activate()
-        self._tab.Navigate(url)
-        self._run_sleep(name, duration)
-        tab_title = self._tab.EvaluateJavaScript('document.title')
-        logging.info('Sub-test name: %s Tab title: %s.', name, tab_title)
-
-
-    def _run_url_bg(self, name, url, duration):
-        """Run a web site in background tab.
-
-        Navigate to the given URL, open an empty tab to put the one with the
-        URL in background, then sleep and record it as a sub-test.
-
-        Args:
-            name: sub-test name.
-            url: url to open in background tab.
-            duration: number of seconds to sleep while taking measurements.
-        """
-        bg_tab = self._tab
-        bg_tab.Navigate(url)
-        # Let it load and settle
-        time.sleep(self._stabilization_seconds / 2.)
-        tab_title = bg_tab.EvaluateJavaScript('document.title')
-        logging.info('App name: %s Tab title: %s.', name, tab_title)
-        # Open a new empty tab to cover the one with test payload.
-        fg_tab = self._browser.tabs.New()
-        fg_tab.Activate()
-        self._run_sleep(name, duration)
-        fg_tab.Close()
-        bg_tab.Activate()
-
-
-    def _run_group_download(self):
-        """Download over ethernet. Using video test data as payload."""
-
-        # For short run, the payload is too small to take measurement
-        self._run_func('download_eth',
-                       self._download_test_data ,
-                       repeat=self._repeats,
-                       save_checkpoint=not(self.short))
-
-
-    def _run_group_webpages(self):
-        """Runs a series of web pages as sub-tests."""
-        data_url = self._url_base + self._static_sub_dir + '/'
-
-        # URLs to be only tested in foreground tab.
-        # Can't use about:blank here - crbug.com/248945
-        # but chrome://version is just as good for our needs.
-        urls = [('ChromeVer', 'chrome://version/')]
-        # URLs to be tested in both, background and foreground modes.
-        bg_urls = []
-
-        more_urls = [('BallsDHTML',
-                      data_url + 'balls/DHTMLBalls/dhtml.htm'),
-                     ('BallsFlex',
-                      data_url + 'balls/FlexBalls/flexballs.html'),
-                    ]
-
-        if self.short:
-            urls += more_urls
-        else:
-            bg_urls += more_urls
-            bg_urls += [('Parapluesch',
-                         'http://www.parapluesch.de/whiskystore/test.htm'),
-                         ('PosterCircle',
-                          'http://www.webkit.org'
-                          '/blog-files/3d-transforms/poster-circle.html'), ]
-
-        for name, url in urls + bg_urls:
-            self._run_url(name, url, duration=self._duration_secs)
-
-        for name, url in bg_urls:
-            self._run_url_bg('bg_' + name, url, duration=self._duration_secs)
-
-
-    def _run_group_speedometer(self):
-        """Run the Speedometer benchmark suite as a sub-test.
-
-        Fire it up and wait until it displays "Score".
-        """
-
-        # TODO: check in a local copy of the test if we can get permission if
-        # the network causes problems.
-        url = 'http://browserbench.org/Speedometer/'
-        start_js = 'startTest()'
-        score_js = "document.getElementById('result-number').innerText"
-        tab = self._tab
-
-        def speedometer_func():
-            """To be passed as the callable to self._run_func()"""
-            tab.Navigate(url)
-            tab.WaitForDocumentReadyStateToBeComplete()
-            tab.EvaluateJavaScript(start_js)
-            # Speedometer test should be done in less than 15 minutes (actual
-            # runs are closer to 5).
-            is_done = lambda: tab.EvaluateJavaScript(score_js) != ""
-            time.sleep(self._stabilization_seconds)
-            utils.poll_for_condition(is_done, timeout=900,
-                                     desc='Speedometer score found')
-
-        self._run_func('Speedometer', speedometer_func, repeat=self._repeats)
-
-        # Write speedometer score from the last run to log
-        score = tab.EvaluateJavaScript(score_js)
-        logging.info('Speedometer Score: %s', score)
-
-
-    def _run_group_video(self):
-        """Run video and audio playback in the browser."""
-
-        # Note: for perf keyvals, key names are defined as VARCHAR(30) in the
-        # results DB. Chars above 30 are truncated when saved to DB.
-        urls = [('vid400p_h264', 'big_buck_bunny_trailer_400p.mp4'), ]
-        fullscreen_urls = []
-        bg_urls = []
-
-        if not self.short:
-            urls += [
-                ('vid400p_ogg', 'big_buck_bunny_trailer_400p.ogg'),
-                ('vid400p_vp8', 'big_buck_bunny_trailer_400p.vp8.webm'),
-                ('vid400p_vp9', 'big_buck_bunny_trailer_400p.vp9.webm'),
-                ('vid720_h264', 'big_buck_bunny_trailer_720p.mp4'),
-                ('vid720_ogg', 'big_buck_bunny_trailer_720p.ogg'),
-                ('vid720_vp8', 'big_buck_bunny_trailer_720p.vp8.webm'),
-                ('vid720_vp9', 'big_buck_bunny_trailer_720p.vp9.webm'),
-                ('vid1080_h264', 'big_buck_bunny_trailer_1080p.mp4'),
-                ('vid1080_ogg', 'big_buck_bunny_trailer_1080p.ogg'),
-                ('vid1080_vp8', 'big_buck_bunny_trailer_1080p.vp8.webm'),
-                ('vid1080_vp9', 'big_buck_bunny_trailer_1080p.vp9.webm'),
-                ('audio', 'Greensleeves.ogg'),
-                ]
-
-            fullscreen_urls += [
-                ('vid720_h264_fs', 'big_buck_bunny_trailer_720p.mp4'),
-                ('vid720_vp8_fs', 'big_buck_bunny_trailer_720p.vp8.webm'),
-                ('vid720_vp9_fs', 'big_buck_bunny_trailer_720p.vp9.webm'),
-                ('vid1080_h264_fs', 'big_buck_bunny_trailer_1080p.mp4'),
-                ('vid1080_vp8_fs', 'big_buck_bunny_trailer_1080p.vp8.webm'),
-                ('vid1080_vp9_fs', 'big_buck_bunny_trailer_1080p.vp9.webm'),
-                ]
-
-            bg_urls += [
-                ('bg_vid400p', 'big_buck_bunny_trailer_400p.vp8.webm'),
-                ]
-
-        # The video files are run from a file:// url. In order to work properly
-        # from an http:// url, some careful web server configuration is needed
-        def full_url(filename):
-            """Create a file:// url for the media file and verify it exists.
-
-            @param filename: string
-            """
-            p = os.path.join(self._media_dir, filename)
-            if not os.path.isfile(p):
-                raise error.TestError('Media file %s is missing.', p)
-            return 'file://' + p
-
-        js_loop_enable = """ve = document.getElementsByTagName('video')[0];
-                         ve.loop = true;
-                         ve.play();
-                         """
-
-        for name, url in urls:
-            logging.info('Playing video %s', url)
-            self._tab.Navigate(full_url(url))
-            self._tab.ExecuteJavaScript(js_loop_enable)
-            self._run_sleep(name, self._duration_secs)
-
-        for name, url in fullscreen_urls:
-            self._toggle_fullscreen()
-            self._tab.Navigate(full_url(url))
-            self._tab.ExecuteJavaScript(js_loop_enable)
-            self._run_sleep(name, self._duration_secs)
-            self._toggle_fullscreen()
-
-        for name, url in bg_urls:
-            logging.info('Playing video in background tab %s', url)
-            self._tab.Navigate(full_url(url))
-            self._tab.ExecuteJavaScript(js_loop_enable)
-            fg_tab = self._browser.tabs.New()
-            self._run_sleep(name, self._duration_secs)
-            fg_tab.Close()
-            self._tab.Activate()
-
-
-    def _run_group_sound(self):
-        """Run non-UI sound test using 'speaker-test'."""
-        # For some reason speaker-test won't work on CrOS without a reasonable
-        # buffer size specified with -b.
-        # http://crbug.com/248955
-        cmd = 'speaker-test -l %s -t sine -c 2 -b 16384' % (self._repeats * 6)
-        self._run_cmd('speaker_test', cmd)
-
-
-    def _run_group_lowlevel(self):
-        """Low level system stuff"""
-        mb = min(1024, 32 * self._repeats)
-        self._run_cmd('memtester', '/usr/local/sbin/memtester %s 1' % mb)
-
-        # one rep of dd takes about 15 seconds
-        root_dev = utils.get_root_partition()
-        cmd = 'dd if=%s of=/dev/null' % root_dev
-        self._run_cmd('dd', cmd, repeat=2 * self._repeats)
-
-
-    def _run_group_backchannel(self):
-        """WiFi sub-tests."""
-
-        shill = wifi_proxy.WifiProxy()
-        for _ in xrange(3):
-            succeeded, _, _, _, _ = shill.connect_to_wifi_network(
-                    ssid='GoogleGuest',
-                    security='none',
-                    security_parameters={},
-                    save_credentials=False)
-            if succeeded:
-                break
-
-        if not succeeded:
-            logging.error("Could not connect to WiFi")
-            return
-
-        logging.info('Starting Backchannel')
-        with backchannel.Backchannel():
-            # Wifi needs some time to recover after backchanel is activated
-            # TODO (kamrik) remove this sleep, once backchannel handles this
-            time.sleep(15)
-
-            cmd = 'ping -c %s www.google.com' % (self._duration_secs)
-            self._run_cmd('ping_wifi', cmd)
-
-            # This URL must be visible from WiFi network used for test
-            big_file_url = ('http://googleappengine.googlecode.com'
-                            '/files/GoogleAppEngine-1.6.2.msi')
-            cmd = 'curl %s > /dev/null' % big_file_url
-            self._run_cmd('download_wifi', cmd, repeat=self._repeats)
-
-
-    def _run_group_backlight(self):
-        """Vary backlight brightness and record power at each setting."""
-        for i in [100, 50, 0]:
-            self._backlight.set_percent(i)
-            start_time = time.time() + self._stabilization_seconds
-            time.sleep(30 * self._repeats)
-            self._plog.checkpoint('backlight_%03d' % i, start_time)
-        self._backlight.set_default()
-
-
-    def _web_echo(self, msg):
-        """ Displays a message in the browser."""
-        url = self._url_base + 'echo.html?'
-        url += urllib.quote(msg)
-        self._tab.Navigate(url)
-
-
-    def _run_test_groups(self, groups):
-        """ Run all the test groups.
-
-        Args:
-            groups: list of sub-test groups to run. Each sub-test group refers
-                to a _run_group_...() function.
-        """
-
-        for group in groups:
-            logging.info('Running group %s', group)
-            # The _web_echo here is important for some tests (esp. non UI)
-            # it gets the previous web page replaced with an almost empty one.
-            self._tab.Activate()
-            self._web_echo('Running test %s' % group)
-            test_func = getattr(self, '_run_group_%s' % group)
-            test_func()
-
-
-    def run_once(self, short=False, test_groups=None, reps=1):
-        # Some sub-tests have duration specified directly, _base_secs * reps
-        # is used in this case. Others complete whenever the underlying task
-        # completes, those are manually tuned to be roughly around
-        # reps * 30 seconds. Don't change _base_secs unless you also
-        # change the manual tuning in sub-tests
-        self._base_secs = 30
-        self._repeats = reps
-        self._duration_secs = self._base_secs * reps
-
-        # Lists of default tests to run
-        UI_TESTS = ['backlight', 'download', 'webpages', 'video', 'speedometer']
-        NONUI_TESTS = ['backchannel', 'sound', 'lowlevel']
-        DEFAULT_TESTS = UI_TESTS + NONUI_TESTS
-        DEFAULT_SHORT_TESTS = ['download', 'webpages', 'video']
-
-        self.short = short
-        if test_groups is None:
-            if self.short:
-                test_groups = DEFAULT_SHORT_TESTS
-            else:
-                test_groups = DEFAULT_TESTS
-        logging.info('Test groups to run: %s', ', '.join(test_groups))
-
-        self._backlight = power_utils.Backlight()
-        self._backlight.set_default()
-
-        measure = []
-        if not self._power_status.on_ac():
-            measure += \
-                [power_status.SystemPower(self._power_status.battery_path)]
-        if power_utils.has_powercap_support():
-            measure += power_rapl.create_powercap()
-        elif power_utils.has_rapl_support():
-            measure += power_rapl.create_rapl()
-        self._plog = power_status.PowerLogger(measure)
-        self._plog.start()
-
-        # Log in.
-        with chrome.Chrome() as cr:
-            self._browser = cr.browser
-            graphics_utils.screen_disable_energy_saving()
-            # Most of the tests will be running in this tab.
-            self._tab = cr.browser.tabs[0]
-
-            # Verify that we have a functioning browser and local web server.
-            self._tab.Activate()
-            self._web_echo("Sanity_test")
-            self._tab.WaitForDocumentReadyStateToBeComplete()
-
-            # Video test must have the data from download test
-            if ('video' in test_groups):
-                iv = test_groups.index('video')
-                if 'download' not in test_groups[:iv]:
-                    msg = '"download" test must run before "video".'
-                    raise error.TestError(msg)
-
-            # Run all the test groups
-            self._run_test_groups(test_groups)
-
-        # Wrap up
-        keyvals = self._plog.calc()
-        keyvals.update(self._tmp_keyvals)
-        keyvals.update(self._statomatic.publish())
-
-        # check AC status is still the same as init
-        self._power_status.refresh()
-        on_ac = self._power_status.on_ac()
-        if keyvals['b_on_ac'] != on_ac:
-            raise error.TestError('on AC changed between start & stop of test')
-
-        if not on_ac:
-            whrs = self._power_status.battery.energy_full_design
-            logging.info("energy_full_design = %0.3f Wh", whrs)
-
-            # Calculate expected battery life time with ChromeVer power draw
-            idle_name = 'ChromeVer_system_pwr_avg'
-            if idle_name in keyvals:
-                hours_life = whrs / keyvals[idle_name]
-                keyvals['hours_battery_ChromeVer'] = hours_life
-
-            # Calculate a weighted power draw and battery life time. The weights
-            # are intended to represent "typical" usage. Some video, some Flash
-            # ... and most of the time idle. see,
-            # http://www.chromium.org/chromium-os/testing/power-testing
-            weights = {'vid400p_h264_system_pwr_avg':0.1,
-                       'BallsFlex_system_pwr_avg':0.1,
-                       'BallsDHTML_system_pwr_avg':0.3,
-                      }
-            weights[idle_name] = 1 - sum(weights.values())
-
-            if set(weights).issubset(set(keyvals)):
-                p = sum(w * keyvals[k] for (k, w) in weights.items())
-                keyvals['w_Weighted_system_pwr_avg'] = p
-                keyvals['hours_battery_Weighted'] = whrs / p
-
-        self.write_perf_keyval(keyvals)
-        self._plog.save_results(self.resultsdir)
-
-
-    def cleanup(self):
-        # cleanup() is run by common_lib/test.py
-        try:
-            self._test_server.stop()
-        except AttributeError:
-            logging.debug('test_server could not be stopped in cleanup')
-
-        if self._backlight:
-            self._backlight.restore()
-        if self._services:
-            self._services.restore_services()
-
-        super(power_Consumption, self).cleanup()
diff --git a/client/site_tests/power_Consumption/static_sites.tgz.keep b/client/site_tests/power_Consumption/static_sites.tgz.keep
deleted file mode 100644
index 4da28de..0000000
--- a/client/site_tests/power_Consumption/static_sites.tgz.keep
+++ /dev/null
Binary files differ
diff --git a/client/site_tests/power_Display/control b/client/site_tests/power_Display/control
index 9317154..b7572a8 100644
--- a/client/site_tests/power_Display/control
+++ b/client/site_tests/power_Display/control
@@ -1,12 +1,14 @@
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_Display"
 PURPOSE = "Measure display power usage."
+
 CRITERIA = "This test is a benchmark."
 TIME = "SHORT"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
-ATTRIBUTES = "suite:power_daily, suite:power_sanity, suite:power_monitoring"
+ATTRIBUTES = "suite:power_daily, suite:power_check, suite:power_monitoring"
+PY_VERSION = 3
 
 DOC = """
 This test records power related statistics while displaying different static
diff --git a/client/site_tests/power_Display/control.brightness b/client/site_tests/power_Display/control.brightness
index e4e20c9..ac095f0 100644
--- a/client/site_tests/power_Display/control.brightness
+++ b/client/site_tests/power_Display/control.brightness
@@ -1,15 +1,17 @@
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_Display.brightness"
 PURPOSE = "Measure display power usage at each brightness."
+
 CRITERIA = "This test is a benchmark."
 TIME = "SHORT"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test records power related statistics while displaying different static
-pages with each of Chrome OS's 16 brightness levels.
+pages with each of ChromeOS's 16 brightness levels.
 """
 
 BRIGHTNESS_PAGES = ['white', 'black', 'checker1']
diff --git a/client/site_tests/power_Display/control.cabc b/client/site_tests/power_Display/control.cabc
index 2e39fdd..1558403 100644
--- a/client/site_tests/power_Display/control.cabc
+++ b/client/site_tests/power_Display/control.cabc
@@ -1,11 +1,13 @@
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_Display.cabc"
 PURPOSE = "Measure display power usage."
+
 CRITERIA = "This test is a benchmark."
 TIME = "SHORT"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test records power related statistics while displaying different static
diff --git a/client/site_tests/power_Display/control.cabc_max_brightness b/client/site_tests/power_Display/control.cabc_max_brightness
index 2f7d112..487c450 100644
--- a/client/site_tests/power_Display/control.cabc_max_brightness
+++ b/client/site_tests/power_Display/control.cabc_max_brightness
@@ -1,11 +1,13 @@
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_Display.cabc_max_brightness"
 PURPOSE = "Measure display power usage."
+
 CRITERIA = "This test is a benchmark."
 TIME = "SHORT"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test records power related statistics while displaying different static
diff --git a/client/site_tests/power_Display/control.fast b/client/site_tests/power_Display/control.fast
index 2b62ac3..adca0e6 100644
--- a/client/site_tests/power_Display/control.fast
+++ b/client/site_tests/power_Display/control.fast
@@ -1,11 +1,13 @@
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_Display.fast"
 PURPOSE = "Measure display power usage."
+
 CRITERIA = "This test is a benchmark."
 TIME = "SHORT"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test makes sure that power_Display works.
diff --git a/client/site_tests/power_Display/control.max_brightness b/client/site_tests/power_Display/control.max_brightness
index 2254c57..7c18a21 100644
--- a/client/site_tests/power_Display/control.max_brightness
+++ b/client/site_tests/power_Display/control.max_brightness
@@ -1,12 +1,14 @@
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_Display.max_brightness"
 PURPOSE = "Measure display power usage."
+
 CRITERIA = "This test is a benchmark."
 TIME = "SHORT"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
-ATTRIBUTES = "suite:power_sanity"
+ATTRIBUTES = "suite:power_check"
+PY_VERSION = 3
 
 DOC = """
 This test records power related statistics while displaying different static
diff --git a/client/site_tests/power_Display/power_Display.py b/client/site_tests/power_Display/power_Display.py
index 711e81c..f702b1b 100644
--- a/client/site_tests/power_Display/power_Display.py
+++ b/client/site_tests/power_Display/power_Display.py
@@ -1,8 +1,10 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 import logging
 import os
+import shutil
 import time
 
 from autotest_lib.client.common_lib import error
@@ -14,6 +16,7 @@
     """class for power_Display test.
     """
     version = 1
+    tmp_path = '/tmp'
 
     # TODO(tbroch) find more patterns that typical display vendors use to show
     # average and worstcase display power.
@@ -26,15 +29,26 @@
         @param secs_per_page: time in seconds to display page and measure power.
         @param brightness: flag for brightness setting to use for testing.
                            possible value are 'max' (100%) and 'all' (all manual
-                           brightness steps in Chrome OS)
+                           brightness steps in ChromeOS)
         """
         if pages is None:
             pages = self.PAGES
 
-        with chrome.Chrome(init_network_controller=True) as self.cr:
-            http_path = os.path.join(self.job.testdir, 'power_Display', 'html')
-            self.cr.browser.platform.SetHTTPServerDirectories(http_path)
-            tab = self.cr.browser.tabs.New()
+        # https://crbug.com/1288417
+        # Copy file to tmpdir to avoid the need of setting up local http server.
+        file_path = os.path.join(self.bindir, 'html')
+        dest_path = os.path.join(self.tmp_path, 'html')
+        shutil.copytree(file_path, dest_path)
+        http_path = 'file://' + dest_path
+
+        # --disable-sync disables test account info sync, eg. Wi-Fi credentials,
+        # so that each test run does not remember info from last test run.
+        extra_browser_args = ['--disable-sync']
+        # b/228256145 to avoid powerd restart
+        extra_browser_args.append('--disable-features=FirmwareUpdaterApp')
+        with chrome.Chrome(init_network_controller=True,
+                           extra_browser_args=extra_browser_args) as self.cr:
+            tab = self.cr.browser.tabs[0]
             tab.Activate()
 
             # Just measure power in full-screen.
@@ -43,6 +57,9 @@
                 with keyboard.Keyboard() as keys:
                     keys.press_key('f4')
 
+            # Stop services again as Chrome might have restarted them.
+            self._services.stop_services()
+
             if brightness not in ['', 'all', 'max']:
                 raise error.TestFail(
                         'Invalid brightness flag: %s' % (brightness))
@@ -68,7 +85,7 @@
             for name in pages:
                 url = os.path.join(http_path, name + '.html')
                 logging.info('Navigating to url: %s', url)
-                tab.Navigate(self.cr.browser.platform.http_server.UrlOf(url))
+                tab.Navigate(url)
                 tab.WaitForDocumentReadyStateToBeComplete()
 
                 for nonlinear, linear in brightnesses:
@@ -80,3 +97,5 @@
                     self.loop_sleep(loop, secs_per_page)
                     self.checkpoint_measurements(tagname, loop_start)
                     loop += 1
+
+        shutil.rmtree(dest_path)
diff --git a/client/site_tests/power_Draw/control b/client/site_tests/power_Draw/control
index 6df6e79..aa0ad75 100644
--- a/client/site_tests/power_Draw/control
+++ b/client/site_tests/power_Draw/control
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_Draw"
 PURPOSE = "Measure how much power is drawn over a given amount of time."
 CRITERIA = "This test is a benchmark."
@@ -10,6 +10,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test will run for 200 seconds.
diff --git a/client/site_tests/power_Draw/control.aquarium b/client/site_tests/power_Draw/control.aquarium
index 483e286..ef17840 100644
--- a/client/site_tests/power_Draw/control.aquarium
+++ b/client/site_tests/power_Draw/control.aquarium
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_Draw.Aquarium"
 PURPOSE = "Stress system similar to Factory Run-in."
 CRITERIA = "This test is a benchmark."
@@ -10,6 +10,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test will run for 3600 seconds by default.
diff --git a/client/site_tests/power_Draw/power_Draw.py b/client/site_tests/power_Draw/power_Draw.py
index 485220f..1de5b6b 100644
--- a/client/site_tests/power_Draw/power_Draw.py
+++ b/client/site_tests/power_Draw/power_Draw.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/power_Dummy/control b/client/site_tests/power_Dummy/control
index 47d46b5..b59da29 100644
--- a/client/site_tests/power_Dummy/control
+++ b/client/site_tests/power_Dummy/control
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_Dummy"
 PURPOSE = "Dummy client test for testing power autotest infrastructures."
 CRITERIA = "This test is a benchmark."
@@ -11,6 +11,7 @@
 TEST_CLASS = "power"
 TEST_TYPE = "client"
 ATTRIBUTES = ""
+PY_VERSION = 3
 
 DOC = """
 Dummy client test for testing power telemetry wrapper tests, measurement
diff --git a/client/site_tests/power_Dummy/power_Dummy.py b/client/site_tests/power_Dummy/power_Dummy.py
index d624e49..7112a61 100644
--- a/client/site_tests/power_Dummy/power_Dummy.py
+++ b/client/site_tests/power_Dummy/power_Dummy.py
@@ -1,10 +1,10 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 import time
 
-from autotest_lib.client.cros.power import power_dashboard
 from autotest_lib.client.cros.power import power_test
 
 class power_Dummy(power_test.power_Test):
@@ -32,10 +32,9 @@
         start_ts = time.time()
         self.start_measurements()
         for i in range(self.loop):
-          tstart = time.time()
-          time.sleep(self.loop_time)
-          self.checkpoint_measurements('section%s' % i, tstart)
+            tstart = time.time()
+            time.sleep(self.loop_time)
+            self.checkpoint_measurements('section%s' % i, tstart)
 
-        logger = power_dashboard.KeyvalLogger(start_ts, time.time())
-        logger.add_item('system', self.dummy_result, 'watt', 'power')
-        self._meas_logs.append(logger)
\ No newline at end of file
+        self._keyvallogger.add_item('system', self.dummy_result, 'point',
+                                    'perf')
diff --git a/client/site_tests/power_FlashVideoSuspend/control.html5 b/client/site_tests/power_FlashVideoSuspend/control.html5
deleted file mode 100755
index 52bb3c5..0000000
--- a/client/site_tests/power_FlashVideoSuspend/control.html5
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "power_FlashVideoSuspend"
-#ATTRIBUTES = "suite:bvt-perbuild"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "power"
-TEST_TYPE = "client"
-
-# This test has no actual dependency on servo.  However, a common
-# failure mode when there's a bug is that the DUT goes offline, and
-# must be power cycled.  Servo can do that automatically, so we
-# restrict testing to devices that have a servo, to avoid needing
-# manual repair.
-DEPENDENCIES = "servo_state:WORKING"
-
-DOC = """
-Suspends the system with a Youtube video playing in HTML5 mode.
-
-Checks that video is playing when DUT is resumed.
-"""
-
-video_url = "http://www.youtube.com/embed/Lv-sY_z8MNs?enablejsapi=1"
-job.add_sysinfo_logfile('/sys/kernel/debug/suspend_stats', on_every_test=True)
-job.run_test('power_FlashVideoSuspend', video_url = video_url)
diff --git a/client/site_tests/power_FlashVideoSuspend/power_FlashVideoSuspend.py b/client/site_tests/power_FlashVideoSuspend/power_FlashVideoSuspend.py
deleted file mode 100755
index 83d89f2..0000000
--- a/client/site_tests/power_FlashVideoSuspend/power_FlashVideoSuspend.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import time
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.cros.power import sys_power
-
-
-class power_FlashVideoSuspend(test.test):
-    """Suspend the system with a video playing."""
-    version = 2
-
-    def run_once(self, video_url=None):
-        utils.verify_flash_installed()
-        with chrome.Chrome(init_network_controller=True) as cr:
-            cr.browser.platform.SetHTTPServerDirectories(self.bindir)
-            tab = cr.browser.tabs[0]
-            tab.Navigate(cr.browser.platform.http_server.UrlOf(
-                os.path.join(self.bindir, 'youtube.html')))
-            self.suspend_with_youtube(cr.browser.tabs[0], video_url)
-
-
-    def check_video_is_playing(self, tab):
-        """
-        Checks if video is playing or not.
-
-        @param tab: Object to the browser tab
-        """
-        def get_current_time():
-            """Get current time from the javascript."""
-            return tab.EvaluateJavaScript('player.getCurrentTime()')
-
-        old_time = get_current_time()
-        utils.poll_for_condition(
-            condition=lambda: get_current_time() > old_time,
-            exception=error.TestError('Player is stuck until timeout.'))
-
-
-    def suspend_with_youtube(self, tab, video_url):
-        """
-        Suspends kernel while video is running in browser.
-
-        @param tab: Object to the browser tab
-        @param video_url: Object to video url
-        """
-        tab.WaitForDocumentReadyStateToBeInteractiveOrBetter()
-        logging.info('video url is %s', video_url)
-        tab.EvaluateJavaScript('play("%s")' % video_url)
-        tab.WaitForJavaScriptCondition('typeof player != "undefined"',
-                                       timeout=10)
-
-        self.check_video_is_playing(tab)
-
-        time.sleep(2)
-        try:
-            sys_power.do_suspend(10)
-        except Exception as e:
-            logging.error(e)
-            raise error.TestFail('====Kernel suspend failed====')
-        time.sleep(2)
-
-        self.check_video_is_playing(tab)
diff --git a/client/site_tests/power_FlashVideoSuspend/youtube.html b/client/site_tests/power_FlashVideoSuspend/youtube.html
deleted file mode 100755
index 1bb2924..0000000
--- a/client/site_tests/power_FlashVideoSuspend/youtube.html
+++ /dev/null
@@ -1,29 +0,0 @@
-<html>
-  <body>
-    <iframe id="player_frame" type="text/html" width="640" height="390"
-        src=""
-        frameborder="0"></iframe>
-    <br>
-    <script>
-      function play(video_url) {
-        document.getElementById('player_frame').src = video_url;
-     }
-      var tag = document.createElement('script');
-      tag.src = "http://www.youtube.com/iframe_api";
-      var firstScriptTag = document.getElementsByTagName('script')[0];
-      firstScriptTag.parentNode.insertBefore(tag, firstScriptTag);
-      var player;
-      function onYouTubeIframeAPIReady() {
-        new YT.Player('player_frame', {
-          events: {
-            'onReady': onPlayerReady,
-          }
-        });
-      }
-      function onPlayerReady(event) {
-        player = event.target;
-        player.playVideo();
-      }
-    </script>
-  </body>
-</html>
diff --git a/client/site_tests/power_HotCPUSuspend/control b/client/site_tests/power_HotCPUSuspend/control
index 8995112..c661c1f 100644
--- a/client/site_tests/power_HotCPUSuspend/control
+++ b/client/site_tests/power_HotCPUSuspend/control
@@ -2,13 +2,14 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_HotCPUSuspend"
 ATTRIBUTES = "suite:kernel_daily_regression"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 Suspend the system with lots of CPU activity.
diff --git a/client/site_tests/power_HotCPUSuspend/power_HotCPUSuspend.py b/client/site_tests/power_HotCPUSuspend/power_HotCPUSuspend.py
index bc8d13b..b7970d7 100644
--- a/client/site_tests/power_HotCPUSuspend/power_HotCPUSuspend.py
+++ b/client/site_tests/power_HotCPUSuspend/power_HotCPUSuspend.py
@@ -1,8 +1,9 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import hashlib, logging, multiprocessing, os, re, time
+import hashlib, logging, multiprocessing, os, time
 from autotest_lib.client.bin import test, utils
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.cros.power import sys_power
@@ -18,7 +19,7 @@
 SYSFS_CPUQUIET_ENABLE = '/sys/devices/system/cpu/cpuquiet/tegra_cpuquiet/enable'
 
 def cpu_stress():
-    sha512_hash = open('/dev/urandom', 'r').read(64)
+    sha512_hash = open('/dev/urandom', 'rb').read(64)
     while True:
         sha512_hash = hashlib.sha512(sha512_hash).digest()
 
@@ -76,7 +77,7 @@
         try:
             # fill all CPUs with a spinning task
             logging.info('starting %d workers', workers)
-            results = [pool.apply_async(cpu_stress) for _ in xrange(workers)]
+            results = [pool.apply_async(cpu_stress) for _ in range(workers)]
 
             # wait for things to settle
             logging.info('spinning for %d seconds', SUSPEND_BURN_SECONDS)
diff --git a/client/site_tests/power_Idle/control b/client/site_tests/power_Idle/control
index 015c65b..7e23dd1 100644
--- a/client/site_tests/power_Idle/control
+++ b/client/site_tests/power_Idle/control
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_Idle"
 PURPOSE = "Measure power usage when system is idle."
 CRITERIA = "This test is a benchmark."
@@ -10,8 +10,9 @@
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
-ATTRIBUTES = ("suite:bvt-perbuild, suite:power_idle, suite:power_sanity, "
+ATTRIBUTES = ("suite:bvt-perbuild, suite:power_idle, suite:power_check, "
               "suite:power_monitoring")
+PY_VERSION = 3
 
 DOC = """
 This test records power related statistics while the system is idling.
diff --git a/client/site_tests/power_Idle/control.1sec b/client/site_tests/power_Idle/control.1sec
index 055f7f5..4b1eeb2 100644
--- a/client/site_tests/power_Idle/control.1sec
+++ b/client/site_tests/power_Idle/control.1sec
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_Idle.1sec"
 PURPOSE = "Measure power usage when system is idle."
 CRITERIA = "This test is a benchmark."
@@ -10,6 +10,7 @@
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test records power related statistics while the system is idling.
diff --git a/client/site_tests/power_Idle/control.default b/client/site_tests/power_Idle/control.default
index d6784da..4fce89e 100644
--- a/client/site_tests/power_Idle/control.default
+++ b/client/site_tests/power_Idle/control.default
@@ -11,6 +11,7 @@
 TEST_CLASS = "power"
 TEST_TYPE = "client"
 ATTRIBUTES = "suite:crosbolt_perf_perbuild"
+PY_VERSION = 3
 
 DOC = """
 This test records power related statistics while the system is idling.
@@ -21,4 +22,4 @@
 args_dict = utils.args_to_dict(args)
 pdash_note = args_dict.get('pdash_note', '')
 job.run_test('power_Idle', pdash_note=pdash_note, tag=NAME.split('.')[1],
-             default_only=True, force_discharge=True)
+             default_only=True, force_discharge='optional')
diff --git a/client/site_tests/power_Idle/control.default20min b/client/site_tests/power_Idle/control.default20min
new file mode 100644
index 0000000..8ce70c8
--- /dev/null
+++ b/client/site_tests/power_Idle/control.default20min
@@ -0,0 +1,25 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "puthik"
+NAME = "power_Idle.default20min"
+PURPOSE = "Measure power usage when system is idle."
+CRITERIA = "This test is a benchmark."
+TIME = "LONG"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "power"
+TEST_TYPE = "client"
+PY_VERSION = 3
+
+DOC = """
+This test records power related statistics while the system is idling.
+
+This version tests only the default state of the DUT for 20 minutes to make
+it easier for HW Eng to manually measure power with other hardware.
+"""
+
+args_dict = utils.args_to_dict(args)
+pdash_note = args_dict.get('pdash_note', '')
+job.run_test('power_Idle', pdash_note=pdash_note, tag=NAME.split('.')[1],
+             default_only=True, force_discharge='optional', idle_secs=1200)
diff --git a/client/site_tests/power_Idle/control.default20min_noarc b/client/site_tests/power_Idle/control.default20min_noarc
new file mode 100644
index 0000000..ea7eaff
--- /dev/null
+++ b/client/site_tests/power_Idle/control.default20min_noarc
@@ -0,0 +1,26 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "puthik"
+NAME = "power_Idle.default20min_noarc"
+PURPOSE = "Measure power usage when system is idle."
+CRITERIA = "This test is a benchmark."
+TIME = "LONG"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "power"
+TEST_TYPE = "client"
+PY_VERSION = 3
+
+DOC = """
+This test records power related statistics while the system is idling.
+
+This version tests only the default state of the DUT for 20 minutes to make
+it easier for HW Eng to manually measure power with other hardware.
+"""
+
+args_dict = utils.args_to_dict(args)
+pdash_note = args_dict.get('pdash_note', '')
+job.run_test('power_Idle', pdash_note=pdash_note, tag=NAME.split('.')[1],
+             default_only=True, force_discharge='optional', idle_secs=1200,
+             run_arc=False)
diff --git a/client/site_tests/power_Idle/control.default_noarc b/client/site_tests/power_Idle/control.default_noarc
new file mode 100644
index 0000000..5bcf32c
--- /dev/null
+++ b/client/site_tests/power_Idle/control.default_noarc
@@ -0,0 +1,25 @@
+# Copyright (c) 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "puthik"
+NAME = "power_Idle.default_noarc"
+PURPOSE = "Measure power usage when system is idle."
+CRITERIA = "This test is a benchmark."
+TIME = "SHORT"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "power"
+TEST_TYPE = "client"
+ATTRIBUTES = "suite:crosbolt_perf_perbuild"
+PY_VERSION = 3
+
+DOC = """
+This test records power related statistics while the system is idling.
+
+This version tests only the default state of the DUT for 2 minutes.
+"""
+
+args_dict = utils.args_to_dict(args)
+pdash_note = args_dict.get('pdash_note', '')
+job.run_test('power_Idle', pdash_note=pdash_note, tag=NAME.split('.')[1],
+             default_only=True, force_discharge='optional', run_arc=False)
diff --git a/client/site_tests/power_Idle/control.fast b/client/site_tests/power_Idle/control.fast
index 4429ba2..13c588b 100644
--- a/client/site_tests/power_Idle/control.fast
+++ b/client/site_tests/power_Idle/control.fast
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_Idle.fast"
 PURPOSE = "Measure power usage when system is idle."
 CRITERIA = "This test is a benchmark."
@@ -10,6 +10,7 @@
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test records power related statistics while the system is idling.
diff --git a/client/site_tests/power_Idle/control.perf b/client/site_tests/power_Idle/control.perf
index 98e72ca..d3e3c43 100644
--- a/client/site_tests/power_Idle/control.perf
+++ b/client/site_tests/power_Idle/control.perf
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_Idle.perf"
 PURPOSE = "Measure power usage when system is idle with perf enabled."
 CRITERIA = "This test is a benchmark."
@@ -10,6 +10,7 @@
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test records power related statistics while the system is idling.
diff --git a/client/site_tests/power_Idle/power_Idle.py b/client/site_tests/power_Idle/power_Idle.py
index 58ee992..5f56190 100755
--- a/client/site_tests/power_Idle/power_Idle.py
+++ b/client/site_tests/power_Idle/power_Idle.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -32,10 +33,11 @@
     first_test_warmup_secs = 60
 
     def initialize(self, pdash_note='', seconds_period=10.,
-                   force_discharge=False):
+                   force_discharge=False, run_arc=True):
         super(power_Idle, self).initialize(seconds_period=seconds_period,
                                            pdash_note=pdash_note,
-                                           force_discharge=force_discharge)
+                                           force_discharge=force_discharge,
+                                           run_arc=run_arc)
 
     def run_once(self, warmup_secs=20, idle_secs=120, default_only=False):
         """Collect power stats for idle tests."""
@@ -56,7 +58,13 @@
         bt_device = bluetooth_device_xmlrpc_server \
             .BluetoothDeviceXmlRpcDelegate()
 
-        with chrome.Chrome() as self.cr:
+        # --disable-sync disables test account info sync, eg. Wi-Fi credentials,
+        # so that each test run does not remember info from last test run.
+        extra_browser_args = ['--disable-sync']
+        # b/228256145 to avoid powerd restart
+        extra_browser_args.append('--disable-features=FirmwareUpdaterApp')
+        with chrome.Chrome(extra_browser_args=extra_browser_args,
+                           arc_mode=self._arc_mode) as self.cr:
             self.is_first_test = True
 
             # Measure power in full-screen blank tab
@@ -67,6 +75,9 @@
                 with keyboard.Keyboard() as keys:
                     keys.press_key('f4')
 
+            # Stop services again as Chrome might have restarted them.
+            self._services.stop_services()
+
             if default_only:
                 self.start_measurements()
                 measure_it(warmup_secs, idle_secs, 'all-default')
diff --git a/client/site_tests/power_IdleSuspend/control b/client/site_tests/power_IdleSuspend/control
index 53e9302..2bcb973 100644
--- a/client/site_tests/power_IdleSuspend/control
+++ b/client/site_tests/power_IdleSuspend/control
@@ -9,6 +9,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 Tests that powerd tries to suspend when the system is idle.
diff --git a/client/site_tests/power_IdleSuspend/power_IdleSuspend.py b/client/site_tests/power_IdleSuspend/power_IdleSuspend.py
index f0b9994..a1c7b5b 100644
--- a/client/site_tests/power_IdleSuspend/power_IdleSuspend.py
+++ b/client/site_tests/power_IdleSuspend/power_IdleSuspend.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -44,7 +45,7 @@
         logging.info('using temporary directory %s', self.tempdir)
 
         # override power manager settings
-        for key, val in POWER_MANAGER_SETTINGS.iteritems():
+        for key, val in list(POWER_MANAGER_SETTINGS.items()):
             logging.info('overriding %s to %s', key, val)
             tmp_path = '%s/%s' % (self.tempdir, key)
             mount_path = '/usr/share/power_manager/%s' % key
@@ -70,6 +71,8 @@
     def run_once(self):
         with chrome.Chrome():
             # stop power manager before reconfiguring
+            # TODO: Consider checking to see if powerd is running.
+            #       If it isn't, the test currently fails here.
             logging.info('stopping powerd')
             utils.run('stop powerd')
 
diff --git a/client/site_tests/power_KernelSuspend/control b/client/site_tests/power_KernelSuspend/control
index 1abcb28..70ebd44 100644
--- a/client/site_tests/power_KernelSuspend/control
+++ b/client/site_tests/power_KernelSuspend/control
@@ -4,13 +4,14 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_KernelSuspend"
 ATTRIBUTES = "suite:jailed_build"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 Simple system suspend. This is the most basic suspend test.
diff --git a/client/site_tests/power_KernelSuspend/power_KernelSuspend.py b/client/site_tests/power_KernelSuspend/power_KernelSuspend.py
index de75707..a7bd96b 100644
--- a/client/site_tests/power_KernelSuspend/power_KernelSuspend.py
+++ b/client/site_tests/power_KernelSuspend/power_KernelSuspend.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/power_LoadTest/README.md b/client/site_tests/power_LoadTest/README.md
index e0e5ee6..6b1859a 100644
--- a/client/site_tests/power_LoadTest/README.md
+++ b/client/site_tests/power_LoadTest/README.md
@@ -6,7 +6,7 @@
 Modern mobile computers are sold with an advertised battery life, ranging from a
 few to tens of hours. Nonetheless, when these devices are used on a daily basis,
 many users report that their actual battery life doesn’t match up with the
-advertised numbers. For Chrome OS devices, we wanted to try and report battery
+advertised numbers. For ChromeOS devices, we wanted to try and report battery
 life that is as close as possible to what an average user experiences. Thus
 `power_LoadTest` was created to emulate average user behavior and measure the
 resultant battery life. This test is as an [open source][1] Chrome extension
@@ -61,7 +61,7 @@
 
 ### Via cros_sdk & autotest
 
-If you are interested in running `power_LoadTest` on a Chrome OS system, you
+If you are interested in running `power_LoadTest` on a ChromeOS system, you
 will need a Chromium OS test image that can be built by following [Build your
 own Chromium image][3] instruction with `./build_image --board=${BOARD} test`
 command. After the test image is built, you can follow the [Installing Chromium
@@ -184,21 +184,21 @@
 ##  Conclusion
 
 While the initial version of `power_LoadTest` seems to emulate well what users
-experience every day on Chrome OS devices, this test will be constantly
-improved. As we learn more about how users use Chrome OS devices and how
+experience every day on ChromeOS devices, this test will be constantly
+improved. As we learn more about how users use ChromeOS devices and how
 experienced battery life differs from tested battery life, we will use this
 data to refine the test, potentially changing the load mix or the parameters
 of the test. Our goal is to ensure that when you purchase a device, you know -
 with reasonable certainty - how long that device will last in your daily use.
 
-[1]: https://chromium.googlesource.com/chromiumos/third_party/autotest/+/master/client/site_tests/power_LoadTest/
-[2]: https://chromium.googlesource.com/chromiumos/platform2/+/master/power_manager/tools/backlight_tool.cc#154
-[3]: http://www.chromium.org/chromium-os/developer-guide#TOC-Build-a-disk-image-for-your-board
-[4]: http://www.chromium.org/chromium-os/developer-guide#TOC-Installing-Chromium-OS-on-your-Device
-[5]: http://www.chromium.org/chromium-os/testing/autotest-user-doc#TOC-Running-tests
-[6]: https://chromium.googlesource.com/chromiumos/docs/+/master/developer_guide.md#enter-the-chroot
-[7]: https://www.chromium.org/chromium-os/how-tos-and-troubleshooting/debugging-features
-[8]: https://developer.chrome.com/extensions/getstarted
-[9]: https://chromium.googlesource.com/chromiumos/third_party/autotest/+archive/master/client/site_tests/power_LoadTest.tar.gz
-[10]: https://chromium.googlesource.com/chromiumos/platform2/+/master/power_manager/docs/keyboard_backlight.md
-[11]: http://go/cros-plt-doc
+\[1\]: https://chromium.googlesource.com/chromiumos/third_party/autotest/+/main/client/site_tests/power_LoadTest/<br>
+\[2\]: https://chromium.googlesource.com/chromiumos/platform2/+/main/power_manager/tools/backlight_tool.cc#154<br>
+\[3\]: http://www.chromium.org/chromium-os/developer-guide#TOC-Build-a-disk-image-for-your-board<br>
+\[4\]: http://www.chromium.org/chromium-os/developer-guide#TOC-Installing-Chromium-OS-on-your-Device<br>
+\[5\]: http://www.chromium.org/chromium-os/testing/autotest-user-doc#TOC-Running-tests<br>
+\[6]\: https://chromium.googlesource.com/chromiumos/docs/+/main/developer_guide.md#enter-the-chroot<br>
+\[7\]: https://www.chromium.org/chromium-os/how-tos-and-troubleshooting/debugging-features<br>
+\[8\]: https://developer.chrome.com/extensions/getstarted<br>
+\[9\]: https://chromium.googlesource.com/chromiumos/third_party/autotest/+archive/main/client/site_tests/power_LoadTest.tar.gz<br>
+\[10\]: https://chromium.googlesource.com/chromiumos/platform2/+/main/power_manager/docs/keyboard_backlight.md<br>
+\[11\]: http://go/cros-plt-doc<br>
diff --git a/client/site_tests/power_LoadTest/control b/client/site_tests/power_LoadTest/control
index bb66fb1..2c011a3 100755
--- a/client/site_tests/power_LoadTest/control
+++ b/client/site_tests/power_LoadTest/control
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_LoadTest"
 PURPOSE = "Measure power draw when system is under load."
 CRITERIA = "This test is a benchmark."
@@ -11,6 +11,8 @@
 TEST_CLASS = "power"
 TEST_TYPE = "client"
 ATTRIBUTES = "suite:power_loadtest"
+EXTENDED_TIMEOUT = 58500  # 16 Hours + 900 second guard.
+PY_VERSION = 3
 
 DOC = """
 This test runs a load test consisting of cycling though web pages, playing
diff --git a/client/site_tests/power_LoadTest/control.1hour b/client/site_tests/power_LoadTest/control.1hour
index 6c0de67..6372fc6 100644
--- a/client/site_tests/power_LoadTest/control.1hour
+++ b/client/site_tests/power_LoadTest/control.1hour
@@ -2,15 +2,17 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_LoadTest.1hour"
 PURPOSE = "Measure power draw when system is under load."
 CRITERIA = "This test is a benchmark."
-TIME = "LONG"
+TIME = "LENGTHY"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
-ATTRIBUTES = "suite:power_loadtest_1hour, suite:power_sanity, suite:power_monitoring"
+EXTENDED_TIMEOUT = 4500
+ATTRIBUTES = "suite:power_loadtest_1hour, suite:power_check, suite:power_monitoring"
+PY_VERSION = 3
 
 DOC = """
 This test runs a load test consisting of cycling though web pages, playing
diff --git a/client/site_tests/power_LoadTest/control.1hour_cellular b/client/site_tests/power_LoadTest/control.1hour_cellular
index b110da3..5236a34 100644
--- a/client/site_tests/power_LoadTest/control.1hour_cellular
+++ b/client/site_tests/power_LoadTest/control.1hour_cellular
@@ -2,14 +2,16 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_LoadTest.1hour_cellular"
 PURPOSE = "Measure power draw when system is under load."
 CRITERIA = "This test is a benchmark."
-TIME = "LONG"
+TIME = "LENGTHY"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+EXTENDED_TIMEOUT = 4500
+PY_VERSION = 3
 
 DOC = """
 This test runs a load test consisting of cycling though web pages, playing
diff --git a/client/site_tests/power_LoadTest/control.FDO_eth_1hr b/client/site_tests/power_LoadTest/control.FDO_eth_1hr
new file mode 100755
index 0000000..a2e56aa
--- /dev/null
+++ b/client/site_tests/power_LoadTest/control.FDO_eth_1hr
@@ -0,0 +1,41 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "power_LoadTest.FDO_eth_1hr"
+ATTRIBUTES = "suite:power_daily"
+PURPOSE = "Measure power draw when system is under load."
+CRITERIA = "This test is a benchmark."
+TIME = "LENGTHY"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "power"
+TEST_TYPE = "client"
+EXTENDED_TIMEOUT = 4500
+PY_VERSION = 3
+
+DOC = """
+This test runs a load test consisting of cycling though web pages, playing
+videos, etc. and measures battery power draw. The duration of this test is
+determined by loop_time * loop_cnt.
+
+
+This version of test allows:
+  - AC is connected.
+  - Ethernet is connected.
+
+"FDO" is short for "force discharge optional." Test will use EC command to
+force DUT to discharge. If it fails, then use AC as the power source.
+"""
+
+# TODO (bleung): Find a way to do automatic Facebook login for test account.
+
+loop_time = 3600
+loop_count = 1
+
+args_dict = utils.args_to_dict(args)
+pdash_note = args_dict.get('pdash_note', '')
+job.run_test('power_LoadTest', loop_time=loop_time, loop_count=loop_count,
+             test_low_batt_p=5, ac_ok=True, force_discharge='optional',
+             check_network=False, tag=NAME.split('.')[1],
+             pdash_note=pdash_note)
diff --git a/client/site_tests/power_LoadTest/control.FDO_eth_fast b/client/site_tests/power_LoadTest/control.FDO_eth_fast
new file mode 100644
index 0000000..968060d
--- /dev/null
+++ b/client/site_tests/power_LoadTest/control.FDO_eth_fast
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "power_LoadTest.FDO_eth_fast"
+PURPOSE = "Test health of power load test functionality."
+CRITERIA = "This test is a benchmark."
+TIME = "MEDIUM"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "power"
+TEST_TYPE = "client"
+PY_VERSION = 3
+
+DOC = """
+This test runs a load test consisting of cycling though web pages, playing
+videos, etc. and measures battery power draw. The duration of this test is
+determined by loop_time * loop_cnt.
+
+
+This version of test allows:
+  - AC is connected.
+  - Ethernet is connected.
+
+This control file is simply meant to simulate the various aspects of the test
+to validate mechanics are in good health (login, external website access).
+
+It ignores whether wired access (check_network=False) or AC is
+connected(ac_ok=True).
+
+For reasons above and reduced runtime (3min) it will NOT produce valid power
+consumption results for the 60/20/10/10 load and therefore should NOT be used
+for any battery life estimations.
+
+"FDO" is short for "force discharge optional." Test will use EC command to
+force DUT to discharge. If it fails, then use AC as the power source.
+"""
+
+# TODO (bleung): Find a way to do automatic Facebook login for test account.
+
+loop_time = 180
+loop_count = 1
+
+args_dict = utils.args_to_dict(args)
+pdash_note = args_dict.get('pdash_note', '')
+job.run_test('power_LoadTest', loop_time=loop_time, loop_count=loop_count,
+             test_low_batt_p=5, ac_ok=True, force_discharge='optional',
+             check_network=False, tag=NAME.split('.')[1],
+             pdash_note=pdash_note)
diff --git a/client/site_tests/power_LoadTest/control.WIRED_10min_acok b/client/site_tests/power_LoadTest/control.WIRED_10min_acok
index ee5c414..77a0746 100644
--- a/client/site_tests/power_LoadTest/control.WIRED_10min_acok
+++ b/client/site_tests/power_LoadTest/control.WIRED_10min_acok
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_LoadTest.WIRED_10min_acok"
 ATTRIBUTES = "suite:power_daily"
 PURPOSE = "Measure power draw when system is under load."
@@ -11,6 +11,7 @@
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test runs a load test consisting of cycling though web pages, playing
diff --git a/client/site_tests/power_LoadTest/control.WIRED_1hr_acok b/client/site_tests/power_LoadTest/control.WIRED_1hr_acok
index 270c18d..5347059 100644
--- a/client/site_tests/power_LoadTest/control.WIRED_1hr_acok
+++ b/client/site_tests/power_LoadTest/control.WIRED_1hr_acok
@@ -2,15 +2,17 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_LoadTest.WIRED_1hr_acok"
-ATTRIBUTES = "suite:power_daily, suite:debug_kernel_testing"
+ATTRIBUTES = "suite:debug_kernel_testing"
 PURPOSE = "Measure power draw when system is under load."
 CRITERIA = "This test is a benchmark."
-TIME = "LONG"
+TIME = "LENGTHY"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+EXTENDED_TIMEOUT = 4500
+PY_VERSION = 3
 
 DOC = """
 This test runs a load test consisting of cycling though web pages, playing
diff --git a/client/site_tests/power_LoadTest/control.docs_1hour b/client/site_tests/power_LoadTest/control.docs_1hour
index a08a2ee..857129e 100644
--- a/client/site_tests/power_LoadTest/control.docs_1hour
+++ b/client/site_tests/power_LoadTest/control.docs_1hour
@@ -2,14 +2,16 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_LoadTest.docs_1hour"
 PURPOSE = "Measure power draw when system is under load."
 CRITERIA = "This test is a benchmark."
-TIME = "LONG"
+TIME = "LENGTHY"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+EXTENDED_TIMEOUT = 4500
+PY_VERSION = 3
 
 DOC = """
 This test runs a load test consisting of cycling though web pages, playing
@@ -47,31 +49,38 @@
 # tabs: list of urls for the window type
 # urls: list of urls to cycle through for the cycle type
 
-tasks='[{' + \
-'    type: \'window\',' + \
-'    name: \'background\',' + \
-'    start: 0,' + \
-'    duration: minutes(60),' + \
-'    focus: false,' + \
-'    tabs: [' + \
-'     \'http://www.google.com\',' + \
-'     \'http://news.google.com\',' + \
-'     \'http://www.reddit.com\',' + \
-'     \'http://clothing.shop.ebay.com/Womens-Shoes-/63889/i.html\',' + \
-'     \'http://www.facebook.com\'' + \
-'    ]}, {' + \
-'    type: \'cycle\',' + \
-'    name: \'docs\',' + \
-'    start: seconds(1),' + \
-'    duration: minutes(60),' + \
-'    delay: seconds(60),' + \
-'    timeout: seconds(10),' + \
-'    focus: true,' + \
-'    urls: [' + \
-'     ViewGDoc + \'1ywpQGu18T9e2lB_QVMlihDqiF0V5hsYkhlXCfu9B8jY\',' + \
-'     ViewGDoc + \'12qBD7L6n9hLW1OFgLgpurx7WSgDM3l01dU6YYU-xdXU\'' + \
-']}]'
 
+tasks = """
+[
+  {
+    type: 'window',
+    name: 'background',
+    start: 0,
+    duration: minutes(60),
+    focus: false,
+    tabs: [
+      'https://www.google.com/search?q=google',
+      'https://news.google.com',
+      'https://www.reddit.com',
+      'https://www.amazon.com',
+      'https://www.instagram.com/instagram',
+    ],
+  },
+  {
+    type: 'cycle',
+    name: 'docs',
+    start: seconds(1),
+    duration: minutes(60),
+    delay: seconds(60),
+    timeout: seconds(10),
+    focus: true,
+    urls: [
+      ViewGDoc + '1ywpQGu18T9e2lB_QVMlihDqiF0V5hsYkhlXCfu9B8jY',
+      ViewGDoc + '12qBD7L6n9hLW1OFgLgpurx7WSgDM3l01dU6YYU-xdXU',
+    ],
+  },
+]
+"""
 
 args_dict = utils.args_to_dict(args)
 pdash_note = args_dict.get('pdash_note', '')
diff --git a/client/site_tests/power_LoadTest/control.email_1hour b/client/site_tests/power_LoadTest/control.email_1hour
index 8c9fef5..b9db054 100644
--- a/client/site_tests/power_LoadTest/control.email_1hour
+++ b/client/site_tests/power_LoadTest/control.email_1hour
@@ -2,14 +2,16 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_LoadTest.email_1hour"
 PURPOSE = "Measure power draw when system is under load."
 CRITERIA = "This test is a benchmark."
-TIME = "LONG"
+TIME = "LENGTHY"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+EXTENDED_TIMEOUT = 4500
+PY_VERSION = 3
 
 DOC = """
 This test runs a load test consisting of cycling though web pages, playing
@@ -47,43 +49,51 @@
 # tabs: list of urls for the window type
 # urls: list of urls to cycle through for the cycle type
 
-tasks='[{' + \
-'    type: \'window\',' + \
-'    name: \'background\',' + \
-'    start: 0,' + \
-'    duration: minutes(60),' + \
-'    focus: false,' + \
-'    tabs: [' + \
-'     \'http://www.google.com\',' + \
-'     \'http://news.google.com\',' + \
-'     \'http://www.reddit.com\',' + \
-'     \'http://clothing.shop.ebay.com/Womens-Shoes-/63889/i.html\',' + \
-'     \'http://www.facebook.com\'' + \
-'    ]}, {' + \
-'    type: \'cycle\',' + \
-'    name: \'email\',' + \
-'    start: seconds(1),' + \
-'    duration: minutes(60),' + \
-'    delay: minutes(5),' + \
-'    timeout: seconds(10),' + \
-'    focus: true,' + \
-'    urls: [' + \
-'     \'http://gmail.com\',' + \
-'     \'http://mail.google.com\'' + \
-'    ]}, {' + \
-'    type: \'cycle\',' + \
-'    name: \'audio\',' + \
-'    start: 0,' + \
-'    duration: minutes(60),' + \
-'    delay: minutes(12),' + \
-'    timeout: seconds(10),' + \
-'    focus: false,' + \
-'    urls: [' + \
-'     \'http://www.bbc.co.uk/worldservice/audioconsole/?stream=live\',' + \
-'     \'http://www.npr.org/templates/player/mediaPlayer.html?action=3&t=live1\',' + \
-'     \'http://www.cbc.ca/radio2/channels/popup.html?stream=classical\'' + \
-']}]'
-
+tasks = """
+[
+  {
+    type: 'window',
+    name: 'background',
+    start: 0,
+    duration: minutes(60),
+    focus: false,
+    tabs: [
+      'https://www.google.com/search?q=google',
+      'https://news.google.com',
+      'https://www.reddit.com',
+      'https://www.amazon.com',
+      'https://www.instagram.com/instagram',
+    ],
+  },
+  {
+    type: 'cycle',
+    name: 'email',
+    start: seconds(1),
+    duration: minutes(60),
+    delay: minutes(5),
+    timeout: seconds(10),
+    focus: true,
+    urls: [
+      'http://gmail.com',
+      'http://mail.google.com',
+    ],
+  },
+  {
+    type: 'cycle',
+    name: 'audio',
+    start: 0,
+    duration: minutes(60),
+    delay: minutes(12),
+    timeout: seconds(10),
+    focus: false,
+    urls: [
+      'http://www.bbc.co.uk/worldservice/audioconsole/?stream=live',
+      'http://www.npr.org/templates/player/mediaPlayer.html?action=3&t=live1',
+      'http://www.cbc.ca/radio2/channels/popup.html?stream=classical',
+    ],
+  },
+]
+"""
 
 args_dict = utils.args_to_dict(args)
 pdash_note = args_dict.get('pdash_note', '')
diff --git a/client/site_tests/power_LoadTest/control.eth_1hour b/client/site_tests/power_LoadTest/control.eth_1hour
index 76b8e7d..ec3d088 100644
--- a/client/site_tests/power_LoadTest/control.eth_1hour
+++ b/client/site_tests/power_LoadTest/control.eth_1hour
@@ -2,15 +2,17 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_LoadTest.eth_1hour"
 ATTRIBUTES = ""
 PURPOSE = "Measure power draw when system is under load."
 CRITERIA = "This test is a benchmark."
-TIME = "LONG"
+TIME = "LENGTHY"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+EXTENDED_TIMEOUT = 4500
+PY_VERSION = 3
 
 DOC = """
 This test runs a load test consisting of cycling though web pages, playing
diff --git a/client/site_tests/power_LoadTest/control.fast b/client/site_tests/power_LoadTest/control.fast
index 753998c..4826751 100644
--- a/client/site_tests/power_LoadTest/control.fast
+++ b/client/site_tests/power_LoadTest/control.fast
@@ -2,15 +2,16 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_LoadTest.fast"
 PURPOSE = "Test health of power load test functionality."
 CRITERIA = "This test is a benchmark."
-TIME = "SHORT"
+TIME = "MEDIUM"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
 ATTRIBUTES = "suite:power_build, suite:power_loadtest_fast"
+PY_VERSION = 3
 
 DOC = """
 This test runs a load test consisting of cycling though web pages, playing
diff --git a/client/site_tests/power_LoadTest/control.fast_gaia b/client/site_tests/power_LoadTest/control.fast_gaia
index dcfcc76..81dd703 100644
--- a/client/site_tests/power_LoadTest/control.fast_gaia
+++ b/client/site_tests/power_LoadTest/control.fast_gaia
@@ -2,15 +2,16 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_LoadTest.fast_gaia"
 PURPOSE = "Test health of power load test functionality."
 CRITERIA = "This test is a benchmark."
-TIME = "SHORT"
+TIME = "MEDIUM"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
 ATTRIBUTES = "suite:power_build"
+PY_VERSION = 3
 
 DOC = """
 This test runs a load test consisting of cycling though web pages, playing
diff --git a/client/site_tests/power_LoadTest/control.force_discharge b/client/site_tests/power_LoadTest/control.force_discharge
index 21b8c54..ae2e08c 100755
--- a/client/site_tests/power_LoadTest/control.force_discharge
+++ b/client/site_tests/power_LoadTest/control.force_discharge
@@ -2,14 +2,16 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_LoadTest.force_discharge"
 PURPOSE = "Measure power draw when system is under load."
 CRITERIA = "This test is a benchmark."
-TIME = "LONG"
+TIME = "LENGTHY"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+EXTENDED_TIMEOUT = 58500  # 16 Hours + 900 second guard.
+PY_VERSION = 3
 
 DOC = """
 This test runs a load test consisting of cycling though web pages, playing
@@ -32,6 +34,5 @@
 args_dict = utils.args_to_dict(args)
 pdash_note = args_dict.get('pdash_note', '')
 job.run_test('power_LoadTest', loop_time=loop_time, loop_count=loop_count,
-             test_low_batt_p=5, gaia_login=False, ac_ok=True,
-             force_discharge=True, tag=NAME.split('.')[1],
-             pdash_note=pdash_note)
+             test_low_batt_p=5, ac_ok=True, force_discharge=True,
+             tag=NAME.split('.')[1], pdash_note=pdash_note)
diff --git a/client/site_tests/power_LoadTest/control.force_discharge_eth_1hr b/client/site_tests/power_LoadTest/control.force_discharge_eth_1hr
new file mode 100755
index 0000000..4b28c9b
--- /dev/null
+++ b/client/site_tests/power_LoadTest/control.force_discharge_eth_1hr
@@ -0,0 +1,37 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "power_LoadTest.force_discharge_eth_1hr"
+PURPOSE = "Measure power draw when system is under load."
+CRITERIA = "This test is a benchmark."
+TIME = "LENGTHY"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "power"
+TEST_TYPE = "client"
+EXTENDED_TIMEOUT = 4500
+PY_VERSION = 3
+
+DOC = """
+This test runs a load test consisting of cycling though web pages, playing
+videos, etc. and measures battery power draw. The duration of this test is
+determined by loop_time * loop_cnt.
+
+
+This version of test allows:
+  - AC is connected. Test will use EC command to force DUT to discharge.
+  - Ethernet is connected.
+"""
+
+# TODO (bleung): Find a way to do automatic Facebook login for test account.
+
+loop_time = 3600
+loop_count = 1
+
+args_dict = utils.args_to_dict(args)
+pdash_note = args_dict.get('pdash_note', '')
+job.run_test('power_LoadTest', loop_time=loop_time, loop_count=loop_count,
+             test_low_batt_p=5, ac_ok=True, force_discharge=True,
+             check_network=False, tag=NAME.split('.')[1],
+             pdash_note=pdash_note)
diff --git a/client/site_tests/power_LoadTest/control.single_page b/client/site_tests/power_LoadTest/control.single_page
index 09bdd6b..85ac313 100644
--- a/client/site_tests/power_LoadTest/control.single_page
+++ b/client/site_tests/power_LoadTest/control.single_page
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_LoadTest.single_page"
 PURPOSE = "Measure power draw when system is under load."
 CRITERIA = "This test is a benchmark."
@@ -10,6 +10,7 @@
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test measures the power of a single webpage including stats such as
@@ -50,19 +51,21 @@
 # The duration and delay are scaled by 1/12 due to the loop time being 300
 # instead of 3600 seconds. This is why duration is minutes(60) instead of
 # minutes(5) and delay is minutes(12) instead of minutes(1).
-tasks='[{' + \
-'    type: \'cycle\',' + \
-'    name: \'web\',' + \
-'    start: seconds(1),' + \
-'    duration: minutes(60),' + \
-'    delay: minutes(12),' + \
-'    timeout: seconds(10),' + \
-'    focus: true,' + \
-'    urls: [' + \
-'\'' + \
-args[0] + \
-'\'' + \
-']}]'
+#
+tasks = """
+[
+  {
+    type: 'cycle',
+    name: 'web',
+    start: seconds(1),
+    duration: minutes(60),
+    delay: minutes(12),
+    timeout: seconds(10),
+    focus: true,
+    urls: ['%s'],
+  },
+]
+""" % (args[0])
 
 args_dict = utils.args_to_dict(args)
 pdash_note = args_dict.get('pdash_note', '')
diff --git a/client/site_tests/power_LoadTest/control.video_1hour b/client/site_tests/power_LoadTest/control.video_1hour
index 7c32536..8099ecd4 100644
--- a/client/site_tests/power_LoadTest/control.video_1hour
+++ b/client/site_tests/power_LoadTest/control.video_1hour
@@ -2,14 +2,16 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_LoadTest.video_1hour"
 PURPOSE = "Measure power draw when system is under load."
 CRITERIA = "This test is a benchmark."
-TIME = "LONG"
+TIME = "LENGTHY"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+EXTENDED_TIMEOUT = 4500
+PY_VERSION = 3
 
 DOC = """
 This test runs a load test consisting of cycling though web pages, playing
@@ -47,29 +49,34 @@
 # tabs: list of urls for the window type
 # urls: list of urls to cycle through for the cycle type
 
-tasks='[{' + \
-'    type: \'window\',' + \
-'    name: \'background\',' + \
-'    start: 0,' + \
-'    duration: 3600000,' + \
-'    focus: false,' + \
-'    tabs: [' + \
-'     \'http://www.google.com\',' + \
-'     \'http://news.google.com\',' + \
-'     \'http://www.reddit.com\',' + \
-'     \'http://clothing.shop.ebay.com/Womens-Shoes-/63889/i.html\',' + \
-'     \'http://www.facebook.com\'' + \
-'    ]}, {' + \
-'    type: \'window\',' + \
-'    name: \'video\',' + \
-'    start: 1000,' + \
-'    duration: 3600000,' + \
-'    focus: true,' + \
-'    tabs: [' + \
-'     \'http://www.youtube.com/embed/YE7VzlLtp-4?autoplay=1&loop=1&' + \
-'playlist=YE7VzlLtp-4\'' + \
-'    ]}]'
-
+tasks = """
+[
+  {
+    type: 'window',
+    name: 'background',
+    start: 0,
+    duration: minutes(60),
+    focus: false,
+    tabs: [
+      'https://www.google.com/search?q=google',
+      'https://news.google.com',
+      'https://www.reddit.com',
+      'https://www.amazon.com',
+      'https://www.instagram.com/instagram',
+    ],
+  },
+  {
+    type: 'window',
+    name: 'video',
+    start: seconds(1),
+    duration: minutes(60),
+    focus: true,
+    tabs: [
+      'http://www.youtube.com/embed/YE7VzlLtp-4?autoplay=1&loop=1&playlist=YE7VzlLtp-4',
+    ],
+  },
+]
+"""
 
 args_dict = utils.args_to_dict(args)
 pdash_note = args_dict.get('pdash_note', '')
diff --git a/client/site_tests/power_LoadTest/control.web_1hour b/client/site_tests/power_LoadTest/control.web_1hour
index 035a11d..4d47d25 100644
--- a/client/site_tests/power_LoadTest/control.web_1hour
+++ b/client/site_tests/power_LoadTest/control.web_1hour
@@ -2,14 +2,16 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_LoadTest.web_1hour"
 PURPOSE = "Measure power draw when system is under load."
 CRITERIA = "This test is a benchmark."
-TIME = "LONG"
+TIME = "LENGTHY"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+EXTENDED_TIMEOUT = 4500
+PY_VERSION = 3
 
 DOC = """
 This test runs a load test consisting of cycling though web pages, playing
@@ -47,28 +49,34 @@
 # tabs: list of urls for the window type
 # urls: list of urls to cycle through for the cycle type
 
-tasks='[{' + \
-'    type: \'window\',' + \
-'    name: \'background\',' + \
-'    start: 0,' + \
-'    duration: minutes(60),' + \
-'    focus: false,' + \
-'    tabs: [' + \
-'     \'http://www.google.com\',' + \
-'     \'http://news.google.com\',' + \
-'     \'http://www.reddit.com\',' + \
-'     \'http://clothing.shop.ebay.com/Womens-Shoes-/63889/i.html\',' + \
-'     \'http://www.facebook.com\'' + \
-'    ]}, {' + \
-'    type: \'cycle\',' + \
-'    name: \'web\',' + \
-'    start: seconds(1),' + \
-'    duration: minutes(60),' + \
-'    delay: seconds(60),' + \
-'    timeout: seconds(10),' + \
-'    focus: true,' + \
-'    urls: URLS, }]'
-
+tasks = """
+[
+  {
+    type: 'window',
+    name: 'background',
+    start: 0,
+    duration: minutes(60),
+    focus: false,
+    tabs: [
+      'https://www.google.com/search?q=google',
+      'https://news.google.com',
+      'https://www.reddit.com',
+      'https://www.amazon.com',
+      'https://www.instagram.com/instagram',
+    ],
+  },
+  {
+    type: 'cycle',
+    name: 'web',
+    start: seconds(1),
+    duration: minutes(60),
+    delay: seconds(60),
+    timeout: seconds(10),
+    focus: true,
+    urls: URLS,
+  },
+]
+"""
 
 args_dict = utils.args_to_dict(args)
 pdash_note = args_dict.get('pdash_note', '')
diff --git a/client/site_tests/power_LoadTest/extension/urls.js b/client/site_tests/power_LoadTest/extension/urls.js
index 1fcb643..b8af5e1 100755
--- a/client/site_tests/power_LoadTest/extension/urls.js
+++ b/client/site_tests/power_LoadTest/extension/urls.js
@@ -7,7 +7,7 @@
 
 var ViewGDoc = ('https://docs.google.com/document/d/');
 
-var BBC_AUDIO_URL = 'https://www.bbc.co.uk/radio/player/bbc_world_service';
+var RADIO_AUDIO_URL = 'https://storage.googleapis.com/chromiumos-test-assets-public/power_LoadTest/long_rain.mp3'
 
 var PLAY_MUSIC_URL = 'https://play.google.com/music/listen?u=0#/wst/st/a2be2d85-0ac9-3a7a-b038-e221bb63ef71';
 
@@ -29,7 +29,8 @@
      'https://news.google.com',
      'https://www.reddit.com',
      'https://www.amazon.com',
-     'https://www.facebook.com/facebook'
+     // b/215156393 Facebook consumes extra cpu power recently, switch to Instgram
+     'https://www.instagram.com/instagram'
     ]
   },
   {
@@ -67,10 +68,7 @@
     delay: minutes(12),
     timeout: seconds(30),
     focus: false,
-    // Google Play Music requires MP3 decoder for playing music.
-    // Fall back to BBC if the browser does not have MP3 decoder bundle.
-    urls: isMP3DecoderPresent() ? [BBC_AUDIO_URL, BBC_AUDIO_URL] :
-                                  [BBC_AUDIO_URL, BBC_AUDIO_URL]
+    urls: [RADIO_AUDIO_URL, RADIO_AUDIO_URL],
   },
   {
     // After 48 minutes, play with Google Docs for 6 minutes
diff --git a/client/site_tests/power_LoadTest/power_LoadTest.py b/client/site_tests/power_LoadTest/power_LoadTest.py
index 5ba2b76..671625a 100755
--- a/client/site_tests/power_LoadTest/power_LoadTest.py
+++ b/client/site_tests/power_LoadTest/power_LoadTest.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -21,7 +22,6 @@
 from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
 from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
 from autotest_lib.client.cros import backchannel
-from autotest_lib.client.cros import ec
 from autotest_lib.client.cros import httpd
 from autotest_lib.client.cros import memory_bandwidth_logger
 from autotest_lib.client.cros import service_stopper
@@ -32,6 +32,7 @@
 from autotest_lib.client.cros.power import power_dashboard
 from autotest_lib.client.cros.power import power_status
 from autotest_lib.client.cros.power import power_utils
+from autotest_lib.client.cros.power import force_discharge_utils
 from telemetry.core import exceptions
 
 params_dict = {
@@ -48,16 +49,33 @@
     """test class"""
     version = 2
 
-    def initialize(self, percent_initial_charge_min=None,
-                 check_network=True, loop_time=3600, loop_count=1,
-                 should_scroll='true', should_scroll_up='true',
-                 scroll_loop='false', scroll_interval_ms='10000',
-                 scroll_by_pixels='600', test_low_batt_p=3,
-                 verbose=True, force_wifi=False, wifi_ap='', wifi_sec='none',
-                 wifi_pw='', wifi_timeout=60, use_cellular_network=False,
-                 tasks='', volume_level=10, mic_gain=10, low_batt_margin_p=2,
-                 ac_ok=False, log_mem_bandwidth=False, gaia_login=None,
-                 force_discharge=False, pdash_note=''):
+    def initialize(self,
+                   percent_initial_charge_min=None,
+                   check_network=True,
+                   loop_time=3600,
+                   loop_count=1,
+                   should_scroll='true',
+                   should_scroll_up='true',
+                   scroll_loop='false',
+                   scroll_interval_ms='10000',
+                   scroll_by_pixels='600',
+                   test_low_batt_p=3,
+                   verbose=True,
+                   force_wifi=False,
+                   wifi_ap='',
+                   wifi_sec='none',
+                   wifi_pw='',
+                   wifi_timeout=60,
+                   use_cellular_network=False,
+                   tasks='',
+                   volume_level=10,
+                   mic_gain=10,
+                   low_batt_margin_p=2,
+                   ac_ok=False,
+                   log_mem_bandwidth=False,
+                   gaia_login=None,
+                   force_discharge='false',
+                   pdash_note=''):
         """
         percent_initial_charge_min: min battery charge at start of test
         check_network: check that Ethernet interface is not running
@@ -84,8 +102,12 @@
         log_mem_bandwidth: boolean to log memory bandwidth during the test
         gaia_login: whether real GAIA login should be attempted.  If 'None'
             (default) then boolean is determined from URL.
-        force_discharge: boolean of whether to tell ec to discharge battery even
-            when the charger is plugged in.
+        force_discharge: string of whether to tell ec to discharge battery even
+            when the charger is plugged in. 'false' means no forcing discharge;
+            'true' means forcing discharge and raising an error when it fails;
+            'optional' means forcing discharge when possible but not raising an
+            error when it fails, which is more friendly to devices without a
+            battery.
         pdash_note: note of the current run to send to power dashboard.
         """
         self._backlight = None
@@ -109,7 +131,7 @@
         self._force_wifi = force_wifi
         self._use_cellular_network = use_cellular_network
         self._testServer = None
-        self._tasks = tasks.replace(' ','')
+        self._tasks = tasks
         self._backchannel = None
         self._shill_proxy = None
         self._volume_level = volume_level
@@ -118,20 +140,13 @@
         self._log_mem_bandwidth = log_mem_bandwidth
         self._wait_time = 60
         self._stats = collections.defaultdict(list)
-        self._force_discharge = force_discharge
         self._pdash_note = pdash_note
 
         self._power_status = power_status.get_status()
 
-        if force_discharge:
-            if not self._power_status.battery:
-                raise error.TestNAError('DUT does not have battery. '
-                                        'Could not force discharge.')
-            if not ec.has_cros_ec():
-                raise error.TestNAError('DUT does not have CrOS EC. '
-                                        'Could not force discharge.')
-            if not power_utils.charge_control_by_ectool(False):
-                raise error.TestError('Could not run battery force discharge.')
+        self._force_discharge_success = force_discharge_utils.process(
+                force_discharge, self._power_status)
+        if self._force_discharge_success:
             self._ac_ok = True
 
         if not self._power_status.battery:
@@ -142,8 +157,10 @@
                 rsp = "Skipping test for device without battery and powercap."
                 raise error.TestNAError(rsp)
 
-        self._tmp_keyvals['b_on_ac'] = (not force_discharge and
-                                        self._power_status.on_ac())
+        self._tmp_keyvals['b_on_ac'] = int(not self._force_discharge_success
+                                           and self._power_status.on_ac())
+        self._tmp_keyvals['force_discharge'] = int(
+                self._force_discharge_success)
 
         self._gaia_login = gaia_login
         if gaia_login is None:
@@ -181,7 +198,7 @@
 
             self._shill_proxy = wifi_proxy.WifiProxy()
             self._shill_proxy.remove_all_wifi_entries()
-            for i in xrange(1,4):
+            for i in range(1, 4):
                 raw_output = self._shill_proxy.connect_to_wifi_network(
                         wifi_config.ssid,
                         wifi_config.security,
@@ -199,7 +216,7 @@
                         from_dbus_proxy_output(raw_output)
                 if result.success:
                     break
-                logging.warn('wifi connect: disc:%d assoc:%d config:%d fail:%s',
+                logging.warning('wifi connect: disc:%d assoc:%d config:%d fail:%s',
                              result.discovery_time, result.association_time,
                              result.configuration_time, result.failure_reason)
             else:
@@ -228,13 +245,13 @@
             self._shill_proxy.wait_for_cellular_service_object()
 
         # record the max backlight level
-        self._backlight = power_utils.Backlight()
+        self._backlight = power_utils.Backlight(
+                force_battery=self._force_discharge_success)
         self._tmp_keyvals['level_backlight_max'] = \
             self._backlight.get_max_level()
 
         self._services = service_stopper.ServiceStopper(
             service_stopper.ServiceStopper.POWER_DRAW_SERVICES)
-        self._services.stop_services()
 
         self._detachable_handler = power_utils.BaseActivitySimulator()
 
@@ -272,7 +289,9 @@
             self._wh_energy_start = self._power_status.battery.energy
 
         self.task_monitor_file = open(os.path.join(self.resultsdir,
-                                      'task-monitor.json'), 'wt')
+                                                   'task-monitor.json'),
+                                      mode='wt',
+                                      **power_utils.encoding_kwargs())
 
 
     def run_once(self):
@@ -310,23 +329,31 @@
         if utils.is_arc_available():
             arc_mode = arc_common.ARC_MODE_ENABLED
 
+        # --disable-sync disables test account info sync, eg. Wi-Fi credentials,
+        # so that each test run does not remember info from last test run.
+        extra_browser_args = ['--disable-sync']
+        # b/228256145 to avoid powerd restart
+        extra_browser_args.append('--disable-features=FirmwareUpdaterApp')
         try:
-            self._browser = chrome.Chrome(extension_paths=[ext_path],
-                                          gaia_login=self._gaia_login,
-                                          username=self._username,
-                                          password=self._password,
-                                          arc_mode=arc_mode)
+            self._browser = chrome.Chrome(
+                    extension_paths=[ext_path],
+                    extra_browser_args=extra_browser_args,
+                    gaia_login=self._gaia_login,
+                    username=self._username,
+                    password=self._password,
+                    arc_mode=arc_mode)
         except exceptions.LoginException:
             # already failed guest login
             if not self._gaia_login:
                 raise
             self._gaia_login = False
-            logging.warn("Unable to use GAIA acct %s.  Using GUEST instead.\n",
+            logging.warning("Unable to use GAIA acct %s.  Using GUEST instead.\n",
                          self._username)
             self._browser = chrome.Chrome(extension_paths=[ext_path],
                                           gaia_login=self._gaia_login)
         if not self._gaia_login:
             self._tmp_keyvals['username'] = 'GUEST'
+        self._tmp_keyvals['gaia_login'] = int(self._gaia_login)
 
         extension = self._browser.get_extension(ext_path)
         for k in params_dict:
@@ -334,6 +361,10 @@
                 extension.ExecuteJavaScript('var %s = %s;' %
                                             (k, getattr(self, params_dict[k])))
 
+        # Stop the services after the browser is setup. This ensures that
+        # restart ui does not restart services e.g. powerd underneath us
+        self._services.stop_services()
+
         # This opens a trap start page to capture tabs opened for first login.
         # It will be closed when startTest is run.
         extension.ExecuteJavaScript('chrome.windows.create(null, null);')
@@ -420,6 +451,8 @@
         psr.refresh()
         self._tmp_keyvals['minutes_battery_life_tested'] = (t1 - t0) / 60
         self._tmp_keyvals.update(psr.get_keyvals())
+        self._start_time = t0
+        self._end_time = t1
 
 
     def postprocess_iteration(self):
@@ -436,12 +469,12 @@
 
 
         def _log_per_loop_stats():
-            samples_per_loop = self._loop_time / self._wait_time + 1
+            samples_per_loop = int(self._loop_time / self._wait_time) + 1
             for kname in self._stats:
                 start_idx = 0
                 loop = 1
-                for end_idx in xrange(samples_per_loop, len(self._stats[kname]),
-                                      samples_per_loop):
+                for end_idx in range(samples_per_loop, len(self._stats[kname]),
+                                     samples_per_loop):
                     _log_stats("%s loop %d" % (kname, loop),
                                self._stats[kname][start_idx:end_idx])
                     loop += 1
@@ -500,11 +533,15 @@
         keyvals['wh_energy_powerlogger'] = \
                              self._energy_use_from_powerlogger(keyvals)
 
-        if not self._power_status.on_ac() and keyvals['ah_charge_used'] > 0:
+        if (self._force_discharge_success or not self._power_status.on_ac()
+            ) and keyvals['ah_charge_used'] > 0:
             # For full runs, we should use charge to scale for battery life,
             # since the voltage swing is accounted for.
             # For short runs, energy will be a better estimate.
-            if self._loop_count > 1:
+            # TODO(b/188082306): some devices do not provide
+            # 'wh_energy_powerlogger' so use charge in this case to scale for
+            # battery life.
+            if self._loop_count > 1 or keyvals['wh_energy_powerlogger'] <= 0:
                 estimated_reps = (keyvals['ah_charge_full_design'] /
                                   keyvals['ah_charge_used'])
             else:
@@ -540,16 +577,46 @@
                             keyvals)
         # Avoid polluting the keyvals with non-core domains.
         core_keyvals = power_utils.get_core_keyvals(keyvals)
-        if not self._gaia_login:
-            core_keyvals = {'INVALID_%s' % str(k): v for k, v in
-                            core_keyvals.iteritems()}
-        else:
-            for key, value in core_keyvals.iteritems():
-                if re.match(r'percent_[cg]pu(idle|pkg).*_R?C0(_C1)?_time', key):
-                    self.output_perf_value(description=key,
-                                           value=value,
-                                           units='percent',
-                                           higher_is_better=False)
+        for key, value in core_keyvals.items():
+            if re.match(r'percent_[cg]pu(idle|pkg).*_R?C0(_C1)?_time', key):
+                self.output_perf_value(description=key,
+                                       value=value,
+                                       units='percent',
+                                       higher_is_better=False)
+
+        logger = power_dashboard.KeyvalLogger(self._start_time, self._end_time)
+        for key in [
+                'b_on_ac', 'force_discharge', 'gaia_login',
+                'percent_usb_suspended_time'
+        ]:
+            logger.add_item(key, keyvals[key], 'point', 'perf')
+
+        # Add audio/docs/email/web fail load details to power dashboard and to keyval
+        for task in ('audio', 'docs', 'email', 'web'):
+            key = 'ext_%s_failed_loads' % task
+            if key not in keyvals:
+                continue
+            vals = (int(x) for x in keyvals[key].split('_'))
+            for index, val in enumerate(vals):
+                log_name = 'loop%02d_%s_failed_load' % (index, task)
+                logger.add_item(log_name, val, 'point', 'perf')
+                core_keyvals[log_name] = val
+
+        # Add ext_ms_page_load_time_mean to power dashboard
+        if 'ext_ms_page_load_time_mean' in keyvals:
+            vals = (float(x)
+                    for x in keyvals['ext_ms_page_load_time_mean'].split('_'))
+            for index, val in enumerate(vals):
+                log_name = 'loop%02d_ms_page_load_time' % index
+                logger.add_item(log_name, val, 'point', 'perf')
+
+        # Add battery life and power to power dashboard
+        for key in ('minutes_battery_life_tested', 'minutes_battery_life',
+                    'w_energy_rate'):
+            if key in keyvals:
+                logger.add_item(key, keyvals[key], 'point', 'perf')
+
+        self._meas_logs.append(logger)
 
         self.write_perf_keyval(core_keyvals)
         for log in self._meas_logs:
@@ -566,10 +633,10 @@
                 self.tagged_testname, self.resultsdir, note=self._pdash_note)
             dashboard.upload()
 
+        power_dashboard.generate_parallax_report(self.outputdir)
 
     def cleanup(self):
-        if self._force_discharge:
-            power_utils.charge_control_by_ectool(True)
+        force_discharge_utils.restore(self._force_discharge_success)
         if self._backlight:
             self._backlight.restore()
         if self._services:
@@ -579,6 +646,7 @@
 
         if self.task_monitor_file:
             self.task_monitor_file.close()
+            self._generate_task_monitor_html()
 
         if self._shill_proxy:
             if self._force_wifi:
@@ -768,7 +836,7 @@
         elif has_light_sensor:
             level_to_set = (40 * default_level) / 100
         elif has_hover:
-            logging.warn('Device has hover but no light sensor')
+            logging.warning('Device has hover but no light sensor')
 
         logging.info('Setting keyboard backlight to %d', level_to_set)
         self._keyboard_backlight.set_level(level_to_set)
@@ -798,7 +866,7 @@
                 if start_extension >= start:
                     start = start_extension
                     break
-                logging.warn('Timestamp from extension (%.2f) is earlier than'
+                logging.warning('Timestamp from extension (%.2f) is earlier than'
                              'timestamp from autotest (%.2f).',
                              start_extension, start)
 
@@ -829,10 +897,85 @@
                 self.task_monitor_file.write(",\n")
                 # we don't want to add url information to our keyvals.
                 # httpd adds them automatically so we remove them again
-                del handler.server._form_entries[idx]
+                if idx in handler.server._form_entries:
+                    del handler.server._form_entries[idx]
         handler.send_response(200)
 
 
+    def _generate_task_monitor_html(self):
+        json_decoder = json.JSONDecoder()
+        # regex pattern to simplify the url
+        pattern = re.compile(r'.*https?://(www[.])?(?P<site>[^.]*[.][^/]*)')
+        data = []
+        min_ts = None
+        process_dict = {}
+        process_id = 1
+        with open(os.path.join(self.resultsdir, 'task-monitor.json'), 'r',
+                  **power_utils.encoding_kwargs()) as f:
+            json_strs = f.read().splitlines()
+            for json_str in json_strs[1:]:
+                if len(json_str) < 10:
+                    continue
+                entry_dict, _ = json_decoder.raw_decode(json_str, 0)
+                if not min_ts:
+                    min_ts = entry_dict['timestamp']
+                ts = (entry_dict['timestamp'] - min_ts) / 1000
+
+                items = {}
+                for p in entry_dict['processes']:
+                    if 'cpu' not in p:
+                        continue
+                    tab = p['tasks'][0]
+                    key = tab['title']
+                    if 'tabId' in tab:
+                        tabInfo = [
+                                t for t in entry_dict['tabInfo']
+                                if t['tabId'] == tab['tabId']
+                        ]
+                        if len(tabInfo) > 0 and 'url' in tabInfo[0]:
+                            url = tabInfo[0]['url']
+                            key = 'Tab: ' + pattern.search(url).group('site')
+
+                    if key.startswith('Service Worker'):
+                        key = 'Service Worker: ' + \
+                            pattern.search(key).group('site')
+
+                    items[key] = p['cpu']
+                    if key not in process_dict:
+                        process_dict[key] = process_id
+                        process_id += 1
+
+                data.append((ts, items))
+
+        cols = ['timestamp'] + list(process_dict.keys())
+        rows = [cols]
+
+        # This data is logged every seconds but graph would be too dense.
+        # So we average data in |avg_window| seconds window.
+        avg_window = 3
+        if len(data) > 1000:
+            avg_window = 20
+
+        for index, (ts, items) in enumerate(data):
+            if index % avg_window == 0:
+                row = [0] * len(cols)
+                row[0] = ts
+            for name, cpu in items.items():
+                row[process_dict[name]] += cpu / avg_window
+            if index % avg_window == avg_window - 1:
+                rows.append(row)
+
+        row_indent = ' ' * 12
+        data_str = ',\n'.join([row_indent + json.dumps(row) for row in rows])
+
+        out_str = power_dashboard._HTML_CHART_STR.format(
+                data=data_str, unit='percent', type='process cpu usage')
+
+        with open(os.path.join(self.resultsdir, 'task-monitor.html'),
+                  'w') as f:
+            f.write(out_str)
+
+
 def alphanum_key(s):
     """ Turn a string into a list of string and numeric chunks. This enables a
         sort function to use this list as a key to sort alphanumeric strings
@@ -861,12 +1004,13 @@
     """
 
     if form:
-        for field in sorted(form.keys(), key=alphanum_key):
+        for field in sorted(list(form.keys()), key=alphanum_key):
             logging.debug("[extension] @ %s %s", _loop_prefix(loop_number),
             form[field].value)
             # we don't want to add url information to our keyvals.
             # httpd adds them automatically so we remove them again
-            del handler.server._form_entries[field]
+            if field in handler.server._form_entries:
+                del handler.server._form_entries[field]
 
 
 def _extension_page_time_info_handler(handler, form, loop_number,
@@ -906,7 +1050,8 @@
 
         # we don't want to add url information to our keyvals.
         # httpd adds them automatically so we remove them again
-        del handler.server._form_entries[field]
+        if field in handler.server._form_entries:
+            del handler.server._form_entries[field]
 
     page_base = _loop_keyname(loop_number, 'web_page_')
     for page in page_timestamps:
@@ -960,7 +1105,7 @@
         keyval_data = json.loads(form[field].value)
 
         # Print each key:value pair and associate it with the data
-        for key, value in keyval_data.iteritems():
+        for key, value in keyval_data.items():
             logging.debug("[extension] @ %s key: %s val: %s",
                 _loop_prefix(loop_number), key, value)
             # Add the key:values to the _tmp_keyvals set
@@ -968,7 +1113,8 @@
 
         # we don't want to add url information to our keyvals.
         # httpd adds them automatically so we remove them again
-        del handler.server._form_entries[field]
+        if field in handler.server._form_entries:
+            del handler.server._form_entries[field]
 
 
 def _loop_prefix(loop):
diff --git a/client/site_tests/power_LowMemorySuspend/control b/client/site_tests/power_LowMemorySuspend/control
index fcecca2..063fc5f 100644
--- a/client/site_tests/power_LowMemorySuspend/control
+++ b/client/site_tests/power_LowMemorySuspend/control
@@ -11,6 +11,8 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+EXTENDED_TIMEOUT = 4500
+PY_VERSION = 3
 DOC = """
 This is a suspending stress test. It suspends the system many times
 when memory is low. It simulates the memory fragmentation by creating
diff --git a/client/site_tests/power_LowMemorySuspend/power_LowMemorySuspend.py b/client/site_tests/power_LowMemorySuspend/power_LowMemorySuspend.py
index 7e777be..fcb1f87 100644
--- a/client/site_tests/power_LowMemorySuspend/power_LowMemorySuspend.py
+++ b/client/site_tests/power_LowMemorySuspend/power_LowMemorySuspend.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -9,7 +10,7 @@
 from autotest_lib.client.bin import utils
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.common_lib.cros import arc_util
+from autotest_lib.client.common_lib.cros import power_load_util
 from autotest_lib.client.cros.power import sys_power
 
 
@@ -115,7 +116,8 @@
     def run_once(self, switches_per_suspend=15, total_suspend_duration=2400,
                  suspend_seconds=10, additional_sleep=10):
         """Runs the test once."""
-        username, password = arc_util.get_test_account_info()
+        username = power_load_util.get_username()
+        password = power_load_util.get_password()
         with chrome.Chrome(gaia_login=True, username=username,
                            password=password) as cr:
             tabs = self.create_tabs(cr)
diff --git a/client/site_tests/power_MeetClient/control.manual b/client/site_tests/power_MeetClient/control.manual
deleted file mode 100644
index f6f03a5..0000000
--- a/client/site_tests/power_MeetClient/control.manual
+++ /dev/null
@@ -1,24 +0,0 @@
-AUTHOR = "puthik"
-NAME = "power_MeetClient.manual"
-PURPOSE = "Measures Meet performance."
-CRITERIA = "This test is a benchmark."
-TIME = "LONG"
-TEST_CATEGORY = "Benchmark"
-TEST_CLASS = "power"
-TEST_TYPE = "client"
-
-DOC = """
-This test uses given meet_code, username, password to test Meet perfomance.
-
-Example call with test_that
-
-test_that ${IP} power_MeetClient.manual --args "meet_code=${CODE} \
-username=${USERNAME} password=${PASSWORD}"
-"""
-
-args_dict = utils.args_to_dict(args)
-meet_code = args_dict['meet_code']
-username = args_dict['username']
-password = args_dict['password']
-job.run_test('power_MeetClient', meet_code=meet_code, pdash_note=meet_code,
-              username=username, password=password, tag=NAME.split('.')[1])
diff --git a/client/site_tests/power_MeetClient/power_MeetClient.py b/client/site_tests/power_MeetClient/power_MeetClient.py
deleted file mode 100644
index 8cc08a2..0000000
--- a/client/site_tests/power_MeetClient/power_MeetClient.py
+++ /dev/null
@@ -1,283 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import collections
-import enum
-import json
-import os
-import logging
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.common_lib.cros import power_load_util
-from autotest_lib.client.cros.input_playback import keyboard
-from autotest_lib.client.cros.power import power_dashboard
-from autotest_lib.client.cros.power import power_status
-from autotest_lib.client.cros.power import power_test
-
-
-class power_MeetClient(power_test.power_Test):
-    """class for power_MeetClient test.
-
-    This test should be call from power_MeetCall server test only.
-    """
-    version = 1
-
-    video_url = 'http://meet.google.com'
-    doc_url = 'http://doc.new'
-
-    def initialize(self,
-                   seconds_period=5.,
-                   pdash_note='',
-                   force_discharge=False):
-        """initialize method."""
-        super(power_MeetClient, self).initialize(
-                seconds_period=seconds_period,
-                pdash_note=pdash_note,
-                force_discharge=force_discharge)
-
-    def run_once(self,
-                 meet_code,
-                 duration=180,
-                 layout='Tiled',
-                 username=None,
-                 password=None):
-        """run_once method.
-
-        @param meet_code: Meet code generated in power_MeetCall.
-        @param duration: duration in seconds.
-        @param layout: string of meet layout to use.
-        @param username: Google account to use.
-        @param password: password for Google account.
-        """
-        if not username and not password:
-            username = power_load_util.get_meet_username()
-            password = power_load_util.get_meet_password()
-        if not username or not password:
-            raise error.TestFail('Need to supply both username and password.')
-        extra_browser_args = self.get_extra_browser_args_for_camera_test()
-        with keyboard.Keyboard() as keys,\
-             chrome.Chrome(init_network_controller=True,
-                           gaia_login=True,
-                           username=username,
-                           password=password,
-                           extra_browser_args=extra_browser_args,
-                           autotest_ext=True) as cr:
-
-            # Move existing window to left half and open video page
-            tab = cr.browser.tabs[0]
-            tab.Activate()
-
-            # Run in full-screen.
-            fullscreen = tab.EvaluateJavaScript('document.webkitIsFullScreen')
-            if not fullscreen:
-                keys.press_key('f4')
-
-            url = self.video_url + '/' + meet_code
-            logging.info('Navigating left window to %s', url)
-            tab.Navigate(url)
-
-            # Workaround when camera isn't init for some unknown reason.
-            time.sleep(10)
-            tab.EvaluateJavaScript('location.reload()')
-
-            tab.WaitForDocumentReadyStateToBeComplete()
-            logging.info(meet_code)
-            self.keyvals['meet_code'] = meet_code
-
-            def wait_until(cond, error_msg):
-                """Helper for javascript polling wait."""
-                for _ in range(60):
-                    time.sleep(1)
-                    if tab.EvaluateJavaScript(cond):
-                        return
-                raise error.TestFail(error_msg)
-
-            wait_until('window.hasOwnProperty("hrTelemetryApi")',
-                       'Meet API does not existed.')
-            wait_until('hrTelemetryApi.isInMeeting()',
-                       'Can not join meeting.')
-            wait_until('hrTelemetryApi.getParticipantCount() > 1',
-                       'Meeting has no other participant.')
-
-            # Make sure camera and mic are on.
-            tab.EvaluateJavaScript('hrTelemetryApi.setCameraMuted(false)')
-            tab.EvaluateJavaScript('hrTelemetryApi.setMicMuted(false)')
-
-            if layout == 'Tiled':
-                tab.EvaluateJavaScript('hrTelemetryApi.setTiledLayout()')
-            elif layout == 'Auto':
-                tab.EvaluateJavaScript('hrTelemetryApi.setAutoLayout()')
-            elif layout == 'Sidebar':
-                tab.EvaluateJavaScript('hrTelemetryApi.setSidebarLayout()')
-            elif layout == 'Spotlight':
-                tab.EvaluateJavaScript('hrTelemetryApi.setSpotlightLayout()')
-            else:
-                raise error.TestError('Unknown layout %s' % layout)
-
-            self.keyvals['layout'] = layout
-
-            self.start_measurements()
-            time.sleep(duration)
-            end_time = self._start_time + duration
-
-            # Collect stat
-            if not tab.EvaluateJavaScript('window.hasOwnProperty("realtime")'):
-                logging.info('Account %s is not in allowlist for MediaInfoAPI',
-                             username)
-                return
-
-            meet_data = tab.EvaluateJavaScript(
-                'realtime.media.getMediaInfoDataPoints()')
-
-            power_dashboard.get_dashboard_factory().registerDataType(
-                MeetStatLogger, MeetStatDashboard)
-
-            self._meas_logs.append(
-                    MeetStatLogger(self._start_time, end_time, meet_data))
-
-
-class MeetStatLogger(power_status.MeasurementLogger):
-    """Class for logging meet data point to power dashboard.
-
-    Format of meet_data http://google3/logs/proto/buzz/callstats.proto
-    """
-
-    def __init__(self, start_ts, end_ts, meet_data):
-        # Do not call parent constructor to avoid making a new thread.
-        self.times = [start_ts]
-
-        # Meet epoch timestamp uses millisec unit.
-        self.meet_data = [data_point for data_point in meet_data
-            if start_ts * 1000 <= data_point['timestamp'] <= end_ts * 1000]
-
-    def calc(self, mtype=None):
-        return {}
-
-    def save_results(self, resultsdir, fname_prefix=None):
-        # Save raw dict from meet to file. Ignore fname_prefix.
-        with open(os.path.join(resultsdir, 'meet_powerlog.json'), 'w') as f:
-            json.dump(self.meet_data , f, indent=4, separators=(',', ': '),
-                      ensure_ascii=False)
-
-
-class MeetStatDashboard(power_dashboard.MeasurementLoggerDashboard):
-    """Dashboard class for MeetStatLogger class."""
-
-    # Direction and type numbers map to constants in the proto
-    class Direction(enum.IntEnum):
-        """Possible directions for media entries of a data point."""
-        SENDER = 0
-        RECEIVER = 1
-
-    class MediaType(enum.IntEnum):
-        """Possible media types for media entries of a data point."""
-        VIDEO = 2
-
-    # Important metrics to collect.
-    MEET_KEYS = [
-        'encodeUsagePercent',
-        'fps',
-        'height',
-        'width',
-    ]
-
-    def _get_ssrc_dict(self, meet_data):
-        """ Extract http://what/ssrc for all video stream and map to string.
-
-        The format of the string would be sender_# / receiver_# where # denotes
-        index for the video counting from 0.
-
-        Returns:
-            dict from ssrc to video stream string.
-        """
-        ret = {}
-        count = [0, 0]
-
-        # We only care about video streams.
-        for media in meet_data[-1]['media']:
-            if media['mediatype'] != self.MediaType.VIDEO:
-                continue
-            if (media['direction'] != self.Direction.SENDER and
-                media['direction'] != self.Direction.RECEIVER):
-                continue
-            name = [media['directionStr'], str(count[media['direction']])]
-            if media['direction'] == self.Direction.SENDER:
-                name.append(media['sendercodecname'])
-            else:
-                name.append(media['receiverCodecName'])
-            count[media['direction']] += 1
-            ret[media['ssrc']] = '_'.join(name)
-
-        return ret
-
-    def _get_meet_unit(self, key):
-        """Return unit from name of the key."""
-        if key.endswith('fps'):
-            return 'fps'
-        if key.endswith('Percent'):
-            return 'percent'
-        if key.endswith('width') or key.endswith('height') :
-            return 'point'
-        raise error.TestError('Unexpected key: %s' % key)
-
-    def _get_meet_type(self, key):
-        """Return type from name of the key."""
-        if key.endswith('fps'):
-            return 'meet_fps'
-        if key.endswith('Percent'):
-            return 'meet_encoder_load'
-        if key.endswith('width'):
-            return 'meet_width'
-        if key.endswith('height'):
-            return 'meet_height'
-        raise error.TestError('Unexpected key: %s' % key)
-
-    def _convert(self):
-        """Convert meet raw dict to data to power dict."""
-
-        meet_data = self._logger.meet_data
-        ssrc_dict = self._get_ssrc_dict(meet_data)
-
-        # Dict from timestamp to dict of meet_key to value
-        parse_dict = collections.defaultdict(
-                     lambda: collections.defaultdict(int))
-
-        key_set = set()
-        testname='power_MeetCall'
-
-        for data_point in meet_data:
-            timestamp = data_point['timestamp']
-            for media in data_point['media']:
-                ssrc = media.get('ssrc', 0)
-                if ssrc not in ssrc_dict:
-                    continue
-                name = ssrc_dict[media['ssrc']]
-                for meet_key in self.MEET_KEYS:
-                    if meet_key not in media:
-                        continue
-                    key = '%s_%s' % (name, meet_key)
-                    key_set.add(key)
-                    parse_dict[timestamp][key] = media[meet_key]
-
-        timestamps = sorted(parse_dict.keys())
-        sample_count = len(timestamps)
-
-        powerlog_data = collections.defaultdict(list)
-        for ts in sorted(parse_dict.keys()):
-            for key in key_set:
-                powerlog_data[key].append(parse_dict[ts][key])
-
-        powerlog_dict =  {
-            'sample_count': sample_count,
-            'sample_duration': 1,
-            'average': {k: 1.0 * sum(v) / sample_count
-                        for k, v in powerlog_data.iteritems()},
-            'data': powerlog_data,
-            'unit': {k: self._get_meet_unit(k) for k in key_set},
-            'type': {k: self._get_meet_type(k) for k in key_set},
-            'checkpoint': [[testname]] * sample_count,
-        }
-
-        return powerlog_dict
diff --git a/client/site_tests/power_MemorySuspend/control b/client/site_tests/power_MemorySuspend/control
deleted file mode 100644
index 0d83d3f..0000000
--- a/client/site_tests/power_MemorySuspend/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "jwerner, chromeos-power"
-NAME = "power_MemorySuspend"
-ATTRIBUTES = "suite:jailed_build"
-TIME = "FAST"
-TEST_CATEGORY = "Regression"
-TEST_CLASS = "power"
-TEST_TYPE = "client"
-
-DOC = """
-Memory suspend test: uses the memory_suspend_test binary to fill all
-available memory with 0x55 and 0xAA patterns before suspend and verify
-them afterwards. This is intended to catch memory initializaton regressions
-on resume and other forms of silent memory corruption in the suspend path.
-"""
-
-job.add_sysinfo_logfile('/sys/kernel/debug/suspend_stats', on_every_test=True)
-job.run_test('power_MemorySuspend')
diff --git a/client/site_tests/power_MemorySuspend/control.memory_qual b/client/site_tests/power_MemorySuspend/control.memory_qual
deleted file mode 100644
index f455743..0000000
--- a/client/site_tests/power_MemorySuspend/control.memory_qual
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "puthik"
-NAME = "power_MemorySuspend.memory_qual"
-TIME = "LENGTHY"
-TEST_CATEGORY = "Regression"
-TEST_CLASS = "power"
-TEST_TYPE = "client"
-
-DOC = """
-Memory suspend test: uses the memory_suspend_test binary to fill all
-available memory with 0x55 and 0xAA patterns before suspend and verify
-them afterwards. This is intended to catch memory initializaton regressions
-on resume and other forms of silent memory corruption in the suspend path.
-The memory_qual version test 10000 loops of the normal test.
-"""
-
-job.add_sysinfo_logfile('/sys/kernel/debug/suspend_stats', on_every_test=True)
-job.run_test('power_MemorySuspend', tag='memory_qual', num_suspends=10000)
diff --git a/client/site_tests/power_MemorySuspend/power_MemorySuspend.py b/client/site_tests/power_MemorySuspend/power_MemorySuspend.py
deleted file mode 100644
index 2d64257..0000000
--- a/client/site_tests/power_MemorySuspend/power_MemorySuspend.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.power import sys_power
-
-class power_MemorySuspend(test.test):
-    """Suspend the system via memory_suspend_test."""
-
-    version = 1
-
-    def initialize(self):
-        utils.system('stop ui', ignore_status=True)
-
-
-    def run_once(self, num_suspends=1, max_spurious_wakeup_ratio=0.01):
-        spurious_wakeup_count = 0
-        max_spurious_wakeup = num_suspends * max_spurious_wakeup_ratio
-
-        for _ in range(num_suspends):
-            try:
-                sys_power.memory_suspend(10)
-            except sys_power.SpuriousWakeupError:
-                spurious_wakeup_count += 1
-                if spurious_wakeup_count > max_spurious_wakeup:
-                    raise error.TestFail('Too many SpuriousWakeupError.')
-
-        if spurious_wakeup_count > 0:
-            logging.info("Have %d SpuriousWakeupError", spurious_wakeup_count)
-
-        keyval = { 'numSpuriousWakeupError' : spurious_wakeup_count }
-        self.write_perf_keyval(keyval)
-
-    def cleanup(self):
-        utils.system('start ui')
diff --git a/client/site_tests/power_NoConsoleSuspend/control b/client/site_tests/power_NoConsoleSuspend/control
deleted file mode 100644
index cc305df..0000000
--- a/client/site_tests/power_NoConsoleSuspend/control
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "power_NoConsoleSuspend"
-ATTRIBUTES = "suite:kernel_daily_regression"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "power"
-TEST_TYPE = "client"
-
-DOC = """
-Sets the no_console_suspend kernel option via sysfs, then suspends the system.
-"""
-
-job.add_sysinfo_logfile('/sys/kernel/debug/suspend_stats', on_every_test=True)
-job.run_test('power_NoConsoleSuspend')
diff --git a/client/site_tests/power_NoConsoleSuspend/power_NoConsoleSuspend.py b/client/site_tests/power_NoConsoleSuspend/power_NoConsoleSuspend.py
deleted file mode 100644
index d70a5c9..0000000
--- a/client/site_tests/power_NoConsoleSuspend/power_NoConsoleSuspend.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging, errno, shutil, os
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import rtc
-from autotest_lib.client.cros.power import sys_power
-
-SYSFS_CONSOLE_SUSPEND = '/sys/module/printk/parameters/console_suspend'
-
-class power_NoConsoleSuspend(test.test):
-    """Test suspend/resume with no_console_suspend option set."""
-
-    version = 1
-
-    def initialize(self):
-        # Save & disable console_suspend module param
-        self.old_console_suspend = utils.read_file(SYSFS_CONSOLE_SUSPEND)
-        utils.write_one_line(SYSFS_CONSOLE_SUSPEND, 'N')
-
-    def run_once(self):
-        sys_power.kernel_suspend(10)
-
-    def cleanup(self):
-        # Restore old console_suspend module param
-        logging.info('restoring value for console_suspend: %s',
-                     self.old_console_suspend)
-        utils.open_write_close(SYSFS_CONSOLE_SUSPEND, self.old_console_suspend)
diff --git a/client/site_tests/power_ProbeDriver/control.probe_ac b/client/site_tests/power_ProbeDriver/control.probe_ac
index ae8ed44..2b7889b 100644
--- a/client/site_tests/power_ProbeDriver/control.probe_ac
+++ b/client/site_tests/power_ProbeDriver/control.probe_ac
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_ProbeDriver.probe_ac"
 ATTRIBUTES = "suite:power_build"
 PURPOSE = "Confirm that AC driver is loaded and functioning."
@@ -21,6 +21,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This is a sample test that probes sysfs and makes sure that AC driver
diff --git a/client/site_tests/power_ProbeDriver/control.probe_bat b/client/site_tests/power_ProbeDriver/control.probe_bat
index 4348a9e..28998be 100644
--- a/client/site_tests/power_ProbeDriver/control.probe_bat
+++ b/client/site_tests/power_ProbeDriver/control.probe_bat
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_ProbeDriver.probe_bat"
 PURPOSE = "Confirm that battery driver is loaded and functioning."
 CRITERIA = """\
@@ -20,6 +20,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This is a sample test that probes sysfs and makes sure that batteries driver
diff --git a/client/site_tests/power_ProbeDriver/power_ProbeDriver.py b/client/site_tests/power_ProbeDriver/power_ProbeDriver.py
index f2d954c..c81bfb6 100644
--- a/client/site_tests/power_ProbeDriver/power_ProbeDriver.py
+++ b/client/site_tests/power_ProbeDriver/power_ProbeDriver.py
@@ -1,13 +1,12 @@
+# Lint as: python2, python3
 # Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import glob
 import logging
-import os
 
 from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error, utils
+from autotest_lib.client.common_lib import error
 from autotest_lib.client.cros.power import power_status, power_utils
 
 class power_ProbeDriver(test.test):
diff --git a/client/site_tests/power_Resume/control b/client/site_tests/power_Resume/control
index 80a7e14..9d0d00e 100644
--- a/client/site_tests/power_Resume/control
+++ b/client/site_tests/power_Resume/control
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_Resume"
 PURPOSE = "Measure the amount of time it takes to resume from suspend."
 CRITERIA = "This test is a benchmark."
@@ -11,6 +11,7 @@
 TEST_CATEGORY = "Logging"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test will search /var/log/messages for pertinent strings to determine if
diff --git a/client/site_tests/power_Resume/control.freeze b/client/site_tests/power_Resume/control.freeze
new file mode 100644
index 0000000..4c738f0
--- /dev/null
+++ b/client/site_tests/power_Resume/control.freeze
@@ -0,0 +1,20 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+AUTHOR = 'ChromeOS Team'
+NAME = 'power_Resume.freeze'
+PURPOSE = 'Measure the time it takes to resume from suspend type freeze.'
+TIME = 'SHORT'
+TEST_TYPE = 'client'
+PY_VERSION = 3
+DOC = """
+This test will search /var/log/messages for pertinent strings to determine if
+the cpu is sleeping. It will wait for a number of seconds before suspending to
+ram. It will then calculate how many seconds the system was suspended, and
+how many seconds it took to resume. As a precaution it will ensure your
+network interface is UP after it has resumed.
+"""
+
+job.add_sysinfo_command('cbmem -c', logfile='bios_log', on_every_test=True)
+job.add_sysinfo_command('cbmem -t', logfile='bios_times', on_every_test=True)
+job.run_test('power_Resume', tag=NAME.split('.')[1], suspend_state='freeze')
diff --git a/client/site_tests/power_Resume/control.iterations b/client/site_tests/power_Resume/control.iterations
index 23be866..5fe9b7b 100644
--- a/client/site_tests/power_Resume/control.iterations
+++ b/client/site_tests/power_Resume/control.iterations
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_Resume.iterations"
 PURPOSE = "Measure the amount of time it takes to resume from suspend."
 CRITERIA = "This test is a benchmark."
@@ -11,6 +11,7 @@
 TEST_CATEGORY = "Logging"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test will search /var/log/messages for pertinent strings to determine if
diff --git a/client/site_tests/power_Resume/control.mem b/client/site_tests/power_Resume/control.mem
new file mode 100644
index 0000000..cbbcef6
--- /dev/null
+++ b/client/site_tests/power_Resume/control.mem
@@ -0,0 +1,20 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+AUTHOR = 'ChromeOS Team'
+NAME = 'power_Resume.mem'
+PURPOSE = 'Measure the time it takes to resume from suspend type mem.'
+TIME = 'SHORT'
+TEST_TYPE = 'client'
+PY_VERSION = 3
+DOC = """
+This test will search /var/log/messages for pertinent strings to determine if
+the cpu is sleeping. It will wait for a number of seconds before suspending to
+ram. It will then calculate how many seconds the system was suspended, and
+how many seconds it took to resume. As a precaution it will ensure your
+network interface is UP after it has resumed.
+"""
+
+job.add_sysinfo_command('cbmem -c', logfile='bios_log', on_every_test=True)
+job.add_sysinfo_command('cbmem -t', logfile='bios_times', on_every_test=True)
+job.run_test('power_Resume', tag=NAME.split('.')[1], suspend_state='mem')
diff --git a/client/site_tests/power_Resume/control.resume_constraint b/client/site_tests/power_Resume/control.resume_constraint
index 109bb72..93af426 100644
--- a/client/site_tests/power_Resume/control.resume_constraint
+++ b/client/site_tests/power_Resume/control.resume_constraint
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_Resume.resume_constraint"
 PURPOSE = "Measure the amount of time it takes to resume from suspend."
 CRITERIA = "This test is a benchmark."
@@ -11,6 +11,7 @@
 TEST_CATEGORY = "Logging"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test will search /var/log/messages for pertinent strings to determine if
diff --git a/client/site_tests/power_Resume/power_Resume.py b/client/site_tests/power_Resume/power_Resume.py
index bbf1312..115ebee 100644
--- a/client/site_tests/power_Resume/power_Resume.py
+++ b/client/site_tests/power_Resume/power_Resume.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/power_Speedometer2/control b/client/site_tests/power_Speedometer2/control
index 0421876..3bf10f0 100644
--- a/client/site_tests/power_Speedometer2/control
+++ b/client/site_tests/power_Speedometer2/control
@@ -11,6 +11,7 @@
 TEST_CLASS = "power"
 TEST_TYPE = "client"
 ATTRIBUTES = "suite:power_daily"
+PY_VERSION = 3
 
 DOC = """
 Run Speedometer2 test and collect power data. Test should be ~3 minutes.
diff --git a/client/site_tests/power_Speedometer2/power_Speedometer2.py b/client/site_tests/power_Speedometer2/power_Speedometer2.py
index d1f6629..40ae3ed 100644
--- a/client/site_tests/power_Speedometer2/power_Speedometer2.py
+++ b/client/site_tests/power_Speedometer2/power_Speedometer2.py
@@ -1,19 +1,17 @@
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 import logging
-import re
 import time
 
 from autotest_lib.client.common_lib.cros import chrome
 from autotest_lib.client.cros.input_playback import keyboard
-from autotest_lib.client.cros.power import power_dashboard
 from autotest_lib.client.cros.power import power_test
 
-URL = 'https://browserbench.org/Speedometer2.0/'
+URL = 'http://crospower.page.link/power_Speedometer2'
 RESULT = 'result'
-CONFIDENCE = 'confidence'
 
 class power_Speedometer2(power_test.power_Test):
     """class for running Speedometer2 test in Chrome.
@@ -33,8 +31,14 @@
 
         @param url: url of Speedometer2 test page.
         """
-        with chrome.Chrome(init_network_controller=True) as self.cr:
-            tab = self.cr.browser.tabs.New()
+        # --disable-sync disables test account info sync, eg. Wi-Fi credentials,
+        # so that each test run does not remember info from last test run.
+        extra_browser_args = ['--disable-sync']
+        # b/228256145 to avoid powerd restart
+        extra_browser_args.append('--disable-features=FirmwareUpdaterApp')
+        with chrome.Chrome(extra_browser_args=extra_browser_args,
+                           init_network_controller=True) as self.cr:
+            tab = self.cr.browser.tabs[0]
             tab.Activate()
 
             # Run in full-screen.
@@ -43,6 +47,9 @@
                 with keyboard.Keyboard() as keys:
                     keys.press_key('f4')
 
+            # Stop services again as Chrome might have restarted them.
+            self._services.stop_services()
+
             logging.info('Navigating to url: %s', url)
             tab.Navigate(url)
             tab.WaitForDocumentReadyStateToBeComplete()
@@ -61,22 +68,13 @@
                         RESULT)
             end_time = time.time()
             result = float(result)
-            confidence = tab.EvaluateJavaScript(
-                    'document.getElementById("%s-number").innerHTML' % \
-                    CONFIDENCE)
-            match = re.search(r"((\d+(\.\d+)?)|(\.\d+))", confidence)
-            confidence = float(match.group(0))
 
-            keyvals = {RESULT: result, CONFIDENCE: confidence}
+            keyvals = {RESULT: result}
             for key, val in keyvals.items():
                 logging.info('Speedometer2 %s: %s', key, val)
             self.keyvals.update(keyvals)
             self.output_perf_value(description=RESULT, value=result,
                                    higher_is_better=True)
-            self.output_perf_value(description=CONFIDENCE, value=confidence,
-                                   higher_is_better=False)
 
-            logger = power_dashboard.KeyvalLogger(self._start_time, end_time)
-            logger.add_item(RESULT, result, 'point', 'perf')
-            logger.add_item(CONFIDENCE, confidence, 'point', 'perf')
-            self._meas_logs.append(logger)
+            self._keyvallogger.add_item(RESULT, result, 'point', 'perf')
+            self._keyvallogger.set_end(end_time)
diff --git a/client/site_tests/power_Standby/control b/client/site_tests/power_Standby/control
index c99c6cb..ab6e73b 100644
--- a/client/site_tests/power_Standby/control
+++ b/client/site_tests/power_Standby/control
@@ -1,14 +1,16 @@
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
+
 import re
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_Standby"
 TIME = "LONG"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test measures the power draw during standby (S3).
diff --git a/client/site_tests/power_Standby/control.1hour b/client/site_tests/power_Standby/control.1hour
index f00d6a9..6c65e69 100644
--- a/client/site_tests/power_Standby/control.1hour
+++ b/client/site_tests/power_Standby/control.1hour
@@ -2,12 +2,13 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_Standby.1hour"
 TIME = "MEDIUM"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test measures the power draw during suspend.
diff --git a/client/site_tests/power_Standby/control.36sec b/client/site_tests/power_Standby/control.36sec
index 25f7304..9bf871b 100644
--- a/client/site_tests/power_Standby/control.36sec
+++ b/client/site_tests/power_Standby/control.36sec
@@ -2,12 +2,13 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_Standby.36sec"
 TIME = "SHORT"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test make sure that power_Standby test is working.
diff --git a/client/site_tests/power_Standby/control.6min b/client/site_tests/power_Standby/control.6min
index ae1a825..b2aaa6e 100644
--- a/client/site_tests/power_Standby/control.6min
+++ b/client/site_tests/power_Standby/control.6min
@@ -2,12 +2,13 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_Standby.6min"
 TIME = "SHORT"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test measures the power draw during suspend.
diff --git a/client/site_tests/power_Standby/control.fast b/client/site_tests/power_Standby/control.fast
index 817b405..21b668f 100644
--- a/client/site_tests/power_Standby/control.fast
+++ b/client/site_tests/power_Standby/control.fast
@@ -2,13 +2,14 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_Standby.fast"
 TIME = "MEDIUM"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
-ATTRIBUTES = "suite:power_sanity, suite:power_monitoring"
+ATTRIBUTES = "suite:power_check, suite:power_monitoring"
+PY_VERSION = 3
 
 DOC = """
 This test measures the power draw during suspend.
diff --git a/client/site_tests/power_Standby/control.fast_force_discharge b/client/site_tests/power_Standby/control.fast_force_discharge
index 81d9af9..b80597f 100644
--- a/client/site_tests/power_Standby/control.fast_force_discharge
+++ b/client/site_tests/power_Standby/control.fast_force_discharge
@@ -2,12 +2,14 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_Standby.fast_force_discharge"
 TIME = "MEDIUM"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+ATTRIBUTES = "suite:power_daily"
+PY_VERSION = 3
 
 DOC = """
 This test measures the power draw during suspend.
@@ -22,5 +24,5 @@
 args_dict = utils.args_to_dict(args)
 pdash_note = args_dict.get('pdash_note', '')
 job.run_test('power_Standby', sample_hours=0.334, test_hours=0.334,
-             ac_ok=True, force_discharge=True, tag=NAME.split('.')[1],
+             ac_ok=True, force_discharge='true', tag=NAME.split('.')[1],
              pdash_note=pdash_note)
diff --git a/client/site_tests/power_Standby/control.fast_suspend_to_idle b/client/site_tests/power_Standby/control.fast_suspend_to_idle
index d7a7671..931194f 100644
--- a/client/site_tests/power_Standby/control.fast_suspend_to_idle
+++ b/client/site_tests/power_Standby/control.fast_suspend_to_idle
@@ -2,12 +2,13 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_Standby.fast_suspend_to_idle"
 TIME = "MEDIUM"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test measures the power draw during suspend.
diff --git a/client/site_tests/power_Standby/control.fast_suspend_to_ram b/client/site_tests/power_Standby/control.fast_suspend_to_ram
index dfad08b..282e0f2 100644
--- a/client/site_tests/power_Standby/control.fast_suspend_to_ram
+++ b/client/site_tests/power_Standby/control.fast_suspend_to_ram
@@ -2,12 +2,13 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_Standby.fast_suspend_to_ram"
 TIME = "MEDIUM"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test measures the power draw during suspend.
diff --git a/client/site_tests/power_Standby/power_Standby.py b/client/site_tests/power_Standby/power_Standby.py
index 89b09ae..d174c47 100644
--- a/client/site_tests/power_Standby/power_Standby.py
+++ b/client/site_tests/power_Standby/power_Standby.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -7,10 +8,11 @@
 from autotest_lib.client.bin import test
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.cros import rtc
+from autotest_lib.client.cros.power import force_discharge_utils
 from autotest_lib.client.cros.power import power_dashboard
 from autotest_lib.client.cros.power import power_status
-from autotest_lib.client.cros.power import power_telemetry_utils
 from autotest_lib.client.cros.power import power_suspend
+from autotest_lib.client.cros.power import power_telemetry_utils
 from autotest_lib.client.cros.power import power_utils
 
 
@@ -21,14 +23,18 @@
     _min_sample_hours = 0.1
 
     def initialize(self, pdash_note=''):
-        """Reset force discharge state."""
-        self._force_discharge_enabled = False
+        """Initialize."""
         self._pdash_note = pdash_note
         self._checkpoint_logger = power_status.CheckpointLogger()
 
-    def run_once(self, test_hours=None, sample_hours=None,
-                 max_milliwatts_standby=500, ac_ok=False,
-                 force_discharge=False, suspend_state='', bypass_check=False):
+    def run_once(self,
+                 test_hours=None,
+                 sample_hours=None,
+                 max_milliwatts_standby=500,
+                 ac_ok=False,
+                 force_discharge='false',
+                 suspend_state='',
+                 bypass_check=False):
         """Put DUT to suspend state for |sample_hours| and measure power."""
         if not power_utils.has_battery():
             raise error.TestNAError('Skipping test because DUT has no battery.')
@@ -46,18 +52,14 @@
 
         power_stats = power_status.get_status()
 
-        if not force_discharge and not ac_ok and power_stats.on_ac():
+        self._force_discharge_success = force_discharge_utils.process(
+                force_discharge, power_stats)
+        if self._force_discharge_success:
+            ac_ok = True
+
+        if force_discharge == 'false' and not ac_ok and power_stats.on_ac():
             raise error.TestError('On AC, please unplug power supply.')
 
-        if force_discharge:
-            if not power_stats.on_ac():
-                raise error.TestError('Not on AC, please plug in power supply '
-                                      'to attempt force discharge.')
-            if not power_utils.charge_control_by_ectool(False):
-                raise error.TestError('Unable to force discharge.')
-
-            self._force_discharge_enabled = True
-
         charge_start = power_stats.battery.charge_now
         voltage_start = power_stats.battery.voltage_now
 
@@ -120,7 +122,7 @@
             if not bypass_check:
                 raise error.TestError('Charge used is suspect.')
             # The standby time is too short, make it 0.001 to avoid divide by 0.
-            logging.warn('Total Charge used was 0')
+            logging.warning('Total Charge used was 0')
             total_charge_used = 0.001
 
         voltage_end = power_stats.battery.voltage_now
@@ -132,7 +134,7 @@
         results['ah_charge_start'] = charge_start
         results['ah_charge_now'] = charge_end
         results['ah_charge_used'] = total_charge_used
-        results['force_discharge'] = self._force_discharge_enabled
+        results['force_discharge'] = self._force_discharge_success
         results['hours_standby_time'] = standby_hours
         results['hours_standby_time_tested'] = elapsed_hours
         results['v_voltage_start'] = voltage_start
@@ -160,5 +162,4 @@
 
     def cleanup(self):
         """Clean up force discharge."""
-        if self._force_discharge_enabled:
-            power_utils.charge_control_by_ectool(True)
+        force_discharge_utils.restore(self._force_discharge_success)
diff --git a/client/site_tests/power_StatsCPUFreq/control b/client/site_tests/power_StatsCPUFreq/control
index 55bc56e..71681bd 100644
--- a/client/site_tests/power_StatsCPUFreq/control
+++ b/client/site_tests/power_StatsCPUFreq/control
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_StatsCPUFreq"
 ATTRIBUTES = "suite:power_build"
 PURPOSE = "Measure time spent at each CPU Frequency when system is idle."
@@ -11,6 +11,7 @@
 TEST_CATEGORY = "Performance"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test measures percentage time spent at each CPU frequency level when
diff --git a/client/site_tests/power_StatsCPUFreq/power_StatsCPUFreq.py b/client/site_tests/power_StatsCPUFreq/power_StatsCPUFreq.py
index 666ac05..c8f0fd9 100755
--- a/client/site_tests/power_StatsCPUFreq/power_StatsCPUFreq.py
+++ b/client/site_tests/power_StatsCPUFreq/power_StatsCPUFreq.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -8,9 +9,9 @@
 
 
 class power_StatsCPUFreq(test.test):
+    """ Gather CPU frequency statistics """
     version = 1
 
-
     def run_once(self, test_time=60):
         cpufreq_stats = power_status.CPUFreqStats()
 
@@ -27,4 +28,3 @@
         current_stats = cpufreq_stats.refresh()
         logging.info('CPUFreq stats in the last %d seconds :\n %s',
                      test_time, current_stats)
-
diff --git a/client/site_tests/power_StatsUSB/control b/client/site_tests/power_StatsUSB/control
deleted file mode 100644
index 5192119..0000000
--- a/client/site_tests/power_StatsUSB/control
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "power_StatsUSB"
-ATTRIBUTES = "suite:power_build"
-PURPOSE = "Measure time USB devices spend in active state."
-CRITERIA = "This test is a benchmark."
-TIME = "SHORT"
-TEST_CATEGORY = "Performance"
-TEST_CLASS = "power"
-TEST_TYPE = "client"
-
-DOC = """
-This test measures percent time USB devices spend in active state.
-"""
-
-job.run_test('power_StatsUSB')
diff --git a/client/site_tests/power_StatsUSB/power_StatsUSB.py b/client/site_tests/power_StatsUSB/power_StatsUSB.py
deleted file mode 100755
index 681f9a0..0000000
--- a/client/site_tests/power_StatsUSB/power_StatsUSB.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging, time
-from autotest_lib.client.bin import test
-from autotest_lib.client.cros.power import power_status
-
-
-class power_StatsUSB(test.test):
-    version = 1
-
-
-    def run_once(self, test_time=60):
-        usb = power_status.USBSuspendStats()
-
-        # get USB percent active since boot
-        usb.incremental = False
-        stats = usb.refresh()
-        logging.info('USB active time since boot: %.2f%%', stats['active'])
-
-        # sleep for some time
-        time.sleep(test_time)
-
-        # get USB percent active during the test time
-        usb.incremental = True
-        stats = usb.refresh()
-        logging.info('USB active time in the last %d seconds: %.2f%%',
-                     test_time, stats['active'])
diff --git a/client/site_tests/power_SuspendStress/control.4hours b/client/site_tests/power_SuspendStress/control.4hours
index 39d72de..011e939 100644
--- a/client/site_tests/power_SuspendStress/control.4hours
+++ b/client/site_tests/power_SuspendStress/control.4hours
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_SuspendStress.4hours"
 ATTRIBUTES = "suite:jailed_build"
 PURPOSE = "Run repeated iterations of suspend/resume to find rare errors."
@@ -10,6 +10,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 Runs four hours of bare suspend/resume cycles without additional workload.
diff --git a/client/site_tests/power_SuspendStress/control.audio b/client/site_tests/power_SuspendStress/control.audio
index 5a8027b..c16def0 100644
--- a/client/site_tests/power_SuspendStress/control.audio
+++ b/client/site_tests/power_SuspendStress/control.audio
@@ -10,6 +10,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 Runs eight minutes of suspend/resume cycles in the background of an audio tone.
diff --git a/client/site_tests/power_SuspendStress/control.bare b/client/site_tests/power_SuspendStress/control.bare
index d882490..7ab0ced 100644
--- a/client/site_tests/power_SuspendStress/control.bare
+++ b/client/site_tests/power_SuspendStress/control.bare
@@ -10,6 +10,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 Runs eight minutes of bare suspend/resume cycles without additional workload.
diff --git a/client/site_tests/power_SuspendStress/control.bareDaily b/client/site_tests/power_SuspendStress/control.bareDaily
index 62c97dd..cafddc8 100644
--- a/client/site_tests/power_SuspendStress/control.bareDaily
+++ b/client/site_tests/power_SuspendStress/control.bareDaily
@@ -10,6 +10,8 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+EXTENDED_TIMEOUT = 7500
+PY_VERSION = 3
 
 DOC = """
 Runs 250 iterations (~30mins) of bare suspend/resume cycles without additional
diff --git a/client/site_tests/power_SuspendStress/control.bareFSI b/client/site_tests/power_SuspendStress/control.bareFSI
index 39ad6fe..7291807 100644
--- a/client/site_tests/power_SuspendStress/control.bareFSI
+++ b/client/site_tests/power_SuspendStress/control.bareFSI
@@ -9,6 +9,8 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+EXTENDED_TIMEOUT = 80000 # 20 hours + guard
+PY_VERSION = 3
 
 DOC = """
 Runs iterations of bare suspend/resume cycles without additional workload.
diff --git a/client/site_tests/power_SuspendStress/control.disk b/client/site_tests/power_SuspendStress/control.disk
index 29cc16f..1b53ce4 100644
--- a/client/site_tests/power_SuspendStress/control.disk
+++ b/client/site_tests/power_SuspendStress/control.disk
@@ -2,6 +2,11 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+# TEST IS DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
 AUTHOR = "jwerner, chromeos-power"
 NAME = "power_SuspendStress.disk"
 ATTRIBUTES = "suite:jailed_build"
@@ -10,6 +15,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 Runs eight minutes of suspend/resume cycles in the background of a file I/O bench.
diff --git a/client/site_tests/power_SuspendStress/control.faft b/client/site_tests/power_SuspendStress/control.faft
index d7c5016..2512c2e 100644
--- a/client/site_tests/power_SuspendStress/control.faft
+++ b/client/site_tests/power_SuspendStress/control.faft
@@ -9,6 +9,7 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 Runs eight minutes of bare suspend/resume cycles without additional workload.
diff --git a/client/site_tests/power_SuspendStress/control.idle b/client/site_tests/power_SuspendStress/control.idle
index 2ee9c59..fe625fc 100644
--- a/client/site_tests/power_SuspendStress/control.idle
+++ b/client/site_tests/power_SuspendStress/control.idle
@@ -2,6 +2,11 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+# TEST IS DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
 AUTHOR = "jwerner, chromeos-power"
 NAME = "power_SuspendStress.idle"
 ATTRIBUTES = "suite:jailed_build"
@@ -10,6 +15,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 Runs eight minutes of suspend/resume tests where suspend is triggered by idling.
@@ -17,4 +23,4 @@
 
 job.parallel([lambda: job.run_test('power_SuspendStress', tag='idle', idle=True,
                                   duration=480, init_delay=60, min_suspend=11)],
-             [lambda: job.run_test('dummy_IdleSuspend')])
+             [lambda: job.run_test('stub_IdleSuspend')])
diff --git a/client/site_tests/power_SuspendStress/control.stress b/client/site_tests/power_SuspendStress/control.stress
index 443b3df..c211eb8 100644
--- a/client/site_tests/power_SuspendStress/control.stress
+++ b/client/site_tests/power_SuspendStress/control.stress
@@ -10,6 +10,7 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 Runs multiple minutes of bare suspend/resume cycles without additional workload.
diff --git a/client/site_tests/power_SuspendStress/power_SuspendStress.py b/client/site_tests/power_SuspendStress/power_SuspendStress.py
index 4fde100..da4e3dc 100644
--- a/client/site_tests/power_SuspendStress/power_SuspendStress.py
+++ b/client/site_tests/power_SuspendStress/power_SuspendStress.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -23,7 +24,7 @@
 
         @param duration: total run time of the test
         @param idle: use sys_power.idle_suspend method.
-                (use with dummy_IdleSuspend)
+                (use with stub_IdleSuspend)
         @param init_delay: wait this many seconds before starting the test to
                 give parallel tests time to get started
         @param min_suspend: suspend durations will be chosen randomly out of
diff --git a/client/site_tests/power_SuspendToIdle/control b/client/site_tests/power_SuspendToIdle/control
index 79da97f..a28c103 100644
--- a/client/site_tests/power_SuspendToIdle/control
+++ b/client/site_tests/power_SuspendToIdle/control
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_SuspendToIdle"
 PURPOSE = "Verify the suspend to idle is working."
 TIME = "SHORT"
@@ -10,13 +10,14 @@
 TEST_CLASS = "power"
 TEST_TYPE = "client"
 ATTRIBUTES = "suite:power_daily"
+PY_VERSION = 3
 
 DOC = """
 Test functionality of Suspend to Idle.
 
 Raise TestNAError if default config is not suspend to idle.
 
-This test uses the following sanity check to verify that S0ix is working.
+This test uses the following confidence check to verify that S0ix is working.
 1. DMC Firmware is loaded successfully.
 2. PCH IP blocks are powergating correctly.
 3. DMC firmware entry DC6 state in S0ix.
diff --git a/client/site_tests/power_SuspendToIdle/control.force b/client/site_tests/power_SuspendToIdle/control.force
index f1b78ff..33f52e6 100644
--- a/client/site_tests/power_SuspendToIdle/control.force
+++ b/client/site_tests/power_SuspendToIdle/control.force
@@ -2,20 +2,21 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_SuspendToIdle.force"
 PURPOSE = "Verify the suspend to idle is working."
 TIME = "SHORT"
 TEST_CATEGORY = "Logging"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 Test functionality of Suspend to Idle.
 
 Force system to use suspend to idle even if it not the default config.
 
-This test uses the following sanity check to verify that S0ix is working.
+This test uses the following confidence check to verify that S0ix is working.
 1. DMC Firmware is loaded successfully.
 2. PCH IP blocks are powergating correctly.
 3. DMC firmware entry DC6 state in S0ix.
diff --git a/client/site_tests/power_SuspendToIdle/power_SuspendToIdle.py b/client/site_tests/power_SuspendToIdle/power_SuspendToIdle.py
index 6e320cc..456d8ed 100644
--- a/client/site_tests/power_SuspendToIdle/power_SuspendToIdle.py
+++ b/client/site_tests/power_SuspendToIdle/power_SuspendToIdle.py
@@ -1,8 +1,10 @@
+# Lint as: python2, python3
 # Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 import logging
+import time
 
 from contextlib import contextmanager
 
@@ -17,6 +19,8 @@
 class power_SuspendToIdle(test.test):
     """class for power_SuspendToIdle test."""
     version = 1
+    _pch_powergating_max_retry = 5
+    _pch_powergating_retry_delay_secs = 1
 
     @contextmanager
     def _log_error_message(self):
@@ -55,9 +59,20 @@
 
         with self._log_error_message():
             pch_powergating_stats = power_status.PCHPowergatingStats()
-            pch_powergating_stats.read_pch_powergating_info()
-            on_pch = pch_powergating_stats.check_s0ix_requirement()
-            if on_pch:
+
+            on_pch = None
+            # Allow |_pch_powergating_max_retry| tries of PCH powergating
+            # because we check this in S0 idle instead of S0ix and background
+            # process may make this flakiness.
+            for try_count in range(1, self._pch_powergating_max_retry + 1):
+                pch_powergating_stats.read_pch_powergating_info()
+                on_pch = pch_powergating_stats.check_s0ix_requirement()
+                if not on_pch:
+                    break
+                logging.info('PCH powergating check#%d  failed: %s', try_count,
+                             ', '.join(on_pch))
+                time.sleep(self._pch_powergating_retry_delay_secs)
+            else:
                 raise error.TestFail('PCH powergating check failed: ',
                                      ', '.join(on_pch))
 
diff --git a/client/site_tests/power_SuspendType/control b/client/site_tests/power_SuspendType/control
new file mode 100644
index 0000000..c7999ed
--- /dev/null
+++ b/client/site_tests/power_SuspendType/control
@@ -0,0 +1,17 @@
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "bbrotherton,chromeos-pvs-dev"
+NAME = "power_SuspendType"
+PURPOSE = "This test is used to verify requirement boot-perf-0003-v01."
+ATTRIBUTES = ""
+TIME = "FAST"
+TEST_TYPE = "client"
+PY_VERSION = 3
+
+DOC = """
+This test verify that either mem or freeze suspend mode is supported
+"""
+
+job.run_test('power_SuspendType')
diff --git a/client/site_tests/power_SuspendType/control.freeze b/client/site_tests/power_SuspendType/control.freeze
new file mode 100644
index 0000000..2ba8449
--- /dev/null
+++ b/client/site_tests/power_SuspendType/control.freeze
@@ -0,0 +1,16 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+AUTHOR = 'bbrotherton,chromeos-pvs-dev'
+NAME = 'power_SuspendType.freeze'
+PURPOSE = 'This test is used to verify the client supports type freeze'
+ATTRIBUTES = ''
+TIME = 'FAST'
+TEST_TYPE = 'client'
+PY_VERSION = 3
+DOC = """
+Call this test to report if the DUT supports suspend type freeze, if so it
+will return a PASS result, if not it will return a FAIL result.
+"""
+
+job.run_test('power_SuspendType', tag=NAME.split('.')[1], desired_suspend_type='freeze')
diff --git a/client/site_tests/power_SuspendType/control.mem b/client/site_tests/power_SuspendType/control.mem
new file mode 100644
index 0000000..12631fe
--- /dev/null
+++ b/client/site_tests/power_SuspendType/control.mem
@@ -0,0 +1,16 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+AUTHOR = 'bbrotherton,chromeos-pvs-dev'
+NAME = 'power_SuspendType.mem'
+PURPOSE = 'This test is used to verify the client supports type mem'
+ATTRIBUTES = ''
+TIME = 'FAST'
+TEST_TYPE = 'client'
+PY_VERSION = 3
+DOC = """
+Call this test to report if the DUT supports suspend type mem, if so it
+will return a PASS result, if not it will return a FAIL result.
+"""
+
+job.run_test('power_SuspendType', tag=NAME.split('.')[1], desired_suspend_type='mem')
\ No newline at end of file
diff --git a/client/site_tests/power_SuspendType/power_SuspendType.py b/client/site_tests/power_SuspendType/power_SuspendType.py
new file mode 100644
index 0000000..6935050
--- /dev/null
+++ b/client/site_tests/power_SuspendType/power_SuspendType.py
@@ -0,0 +1,32 @@
+# Lint as: python2, python3
+# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.bin import test
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.cros.power import power_utils
+
+
+class power_SuspendType(test.test):
+    """class for power_SuspendType test."""
+    version = 1
+
+    def run_once(self, desired_suspend_type=None):
+        """
+        @param desired_suspend_type: check that device supports a specific
+                state ("mem" or "freeze"). Just checks to ensure one of the
+                2 is returned as the default suspend type
+        """
+        suspend_state = power_utils.get_sleep_state()
+
+        if desired_suspend_type is None:
+            if suspend_state != 'mem' and suspend_state != 'freeze':
+                raise error.TestFail(
+                        'Did not find valid suspend state, want: freeze or mem, got: '
+                        + suspend_state)
+        else:
+            if suspend_state != desired_suspend_type:
+                raise error.TestFail('System does not support suspend type ' +
+                                     desired_suspend_type)
+        return
diff --git a/client/site_tests/power_Thermal/control b/client/site_tests/power_Thermal/control
deleted file mode 100644
index f0b079b..0000000
--- a/client/site_tests/power_Thermal/control
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "power_Thermal"
-PURPOSE = "Verify thermal feedback operation."
-CRITERIA = "Fails if CPU is not slowed down on crossing temperature theshold."
-TIME = "MEDIUM"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "power"
-TEST_TYPE = "client"
-
-DOC = """This is a test which changes the temperature theshold and expects
-the system to slow down the CPU clock in response."""
-
-job.run_test('power_Thermal')
diff --git a/client/site_tests/power_Thermal/power_Thermal.py b/client/site_tests/power_Thermal/power_Thermal.py
deleted file mode 100644
index 0199672..0000000
--- a/client/site_tests/power_Thermal/power_Thermal.py
+++ /dev/null
@@ -1,370 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import glob, logging, os, tempfile, threading, time
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error, utils
-
-class PlatformDescriptor(object):
-    '''
-    An object to keep platform specific information.
-
-    @num_cores - number of CPU cores in this platform
-    @max_cpu_freq - maximum frequency the CPU can be running at
-    @min_cpu_freq - minimal frequency the CPU can be running at
-    '''
-
-    def __init__(self, num_cores, max_cpu_freq, min_cpu_freq):
-        self.num_cores = num_cores
-        self.max_cpu_freq = max_cpu_freq
-        self.min_cpu_freq = min_cpu_freq
-
-
-# Base name of the sysfs file where CPU temperature is reported. The file is
-# exported by the temperature monitor driver and is located in the appropriate
-# device's subtree. We use the file name to locate the subtree, only one file
-# with this name is expected to exist in /sys. The ext_ prefix indicates that
-# this is a reading off a sensor located next to the CPU. This facility could
-# be not available on some platforms, the test would need to be updated to
-# accommodate those.
-#
-# The `standard' temperature reading available through
-# /sys/class/hwmon/hwmon0/device/temperature does not represent the actual CPU
-# temperature and when the CPU load changes, the 'standard' temperature
-# reading changes much slower and not to such a large extent than the value in
-# */ext_temperature.
-EXT_TEMP_SENSOR_FILE = 'ext_temperature'
-
-# Base name of the file where the throttling temperature is set (if CPU temp
-# exceeds this value, clock throttling starts).
-THROTTLE_EXT_LIMIT_FILE = 'throttle_ext_limit'
-
-# Root directory for all sysfs information about the CPU(s).
-CPU_INFO_ROOT = '/sys/devices/system/cpu'
-
-# Template to get access to the directory/file containing current per core
-# information.
-PER_CORE_FREQ_TEMPLATE = CPU_INFO_ROOT + '/cpu%d/cpufreq/%s'
-
-# Base name for the temporary files used by this test.
-TMP_FILE_TEMPLATE = '/tmp/thermal_'
-
-# Temperature difference expected to be caused by increased CPU activity.
-DELTA = 3.0
-
-# Name of the file controlling core's clocking discipline.
-GOVERNOR = 'scaling_governor'
-
-# Name of the file providing space separated list of available clocking
-# disciplines.
-AVAILABLE_GOVERNORS = 'scaling_available_governors'
-
-def clean_up(obj):
-    '''
-    A function to register with the autotest engine to ensure proper cleanup.
-
-    It will be called after the test has run, either completing successfully
-    or throwing an exception.
-    '''
-
-    obj.cleanup()
-
-
-class power_Thermal(test.test):
-    version = 1
-
-
-    def _cpu_heater(self):
-        '''
-        A function to execute some code to heat up the target.
-
-        This function is run on a separate thread, all it does - opens a file
-        for writing, writes it with 100K characters, closes and removes the
-        file, it is running in a tight loop until the stop_all_workers flag
-        turns True.
-
-        Multiple threads are spawn to cause maximum CPU activity.
-        '''
-
-        (handle, fname) = tempfile.mkstemp(
-            prefix=os.path.basename(TMP_FILE_TEMPLATE),
-            dir=os.path.dirname(TMP_FILE_TEMPLATE))
-        os.close(handle)
-        os.remove(fname)
-        while not self.stop_all_workers:
-            f = open(fname, 'w')
-            f.write('x' * 100000)
-            f.close()
-            os.remove(fname)
-
-
-    def _add_heater_thread(self):
-        '''Add a thread to run another instance of _cpu_heater().'''
-
-        thread_count = len(self.worker_threads)
-        logging.info('adding thread number %d' % thread_count)
-        new_thread = threading.Thread(target=self._cpu_heater)
-        self.worker_threads.append(new_thread)
-        new_thread.daemon = True
-        new_thread.start()
-
-
-    def _throttle_count(self):
-        '''
-        Return current throttling status of all cores.
-
-        The return integer value is the sum of all cores' throttling status.
-        When the sum is equal the core number - all cores are throttling.
-        '''
-
-        count = 0
-        for cpu in range(self.pl_desc.num_cores):
-            count += int(utils.read_file(
-                    PER_CORE_FREQ_TEMPLATE % (cpu, 'throttle')))
-        return count
-
-
-    def _cpu_freq(self, cpu):
-        '''Return current clock frequency of a CPU, integer in Kilohertz.'''
-
-        return int(utils.read_file(
-                PER_CORE_FREQ_TEMPLATE % (cpu, 'cpuinfo_cur_freq')))
-
-
-    def _cpu_temp(self):
-        '''Return current CPU temperature, a float value.'''
-
-        return float(utils.read_file(
-                os.path.join(self.temperature_data_path, EXT_TEMP_SENSOR_FILE)))
-
-
-    def _throttle_limit(self):
-        '''
-        Return current CPU throttling temperature threshold.
-
-        If CPU temperature exceeds this value, clock throttling is activated,
-        causing CPU slowdown.
-
-        Returns the limit as a float value.
-        '''
-
-        return float(utils.read_file(
-                os.path.join(self.temperature_data_path,
-                             THROTTLE_EXT_LIMIT_FILE)))
-
-
-    def _set_throttle_limit(self, new_limit):
-        '''
-        Set current CPU throttling temperature threshold.
-
-        The passed in float value is rounded to the nearest integer.
-        '''
-
-        utils.open_write_close(
-            os.path.join(
-                self.temperature_data_path, THROTTLE_EXT_LIMIT_FILE),
-            '%d' % int(round(new_limit)))
-
-
-    def _check_freq(self):
-        '''Verify that all CPU clocks are in range for this target.'''
-
-        for cpu in range(self.pl_desc.num_cores):
-            freq = self._cpu_freq(cpu)
-            if self.pl_desc.min_cpu_freq <= freq <= self.pl_desc.max_cpu_freq:
-                return
-            raise error.TestError('Wrong cpu %d frequency reading %d' % (
-                    cpu, freq))
-
-
-    def _get_cpu_freq_raised(self):
-        '''
-        Bring all cores clock to max frequency.
-
-        This function uses the scaling_governor mechanism to force the cores
-        to run at maximum frequency, writing the string 'performance' into
-        each core's governor file.
-
-        The current value (if not 'performance') is preserved to be restored
-        in the end of the test.
-
-        Returns a dictionary where keys are the core numbers and values are
-        the preserved governor setting.
-
-        raises TestError in case 'performance' setting is not allowed on any
-               of the cores, or the clock frequency does not reach max on any
-               of the cores in 1 second.
-        '''
-
-        rv = {}
-        for cpu in range(self.pl_desc.num_cores):
-            target = 'performance'
-            gov_file = PER_CORE_FREQ_TEMPLATE % (cpu, GOVERNOR)
-            current_gov = utils.read_file(gov_file).strip()
-            available_govs = utils.read_file(PER_CORE_FREQ_TEMPLATE % (
-                    cpu, AVAILABLE_GOVERNORS)).split()
-
-            if current_gov != target:
-                if not target in available_govs:
-                    raise error.TestError('core %d does not allow setting %s'
-                                          % (cpu, target))
-                logging.info('changing core %d governor from %s to %s' % (
-                        cpu, current_gov, target))
-                utils.open_write_close(gov_file, target)
-                rv[cpu] = current_gov
-
-        for _ in range(2):  # Wait for no more than 1 second
-            for cpu in range(self.pl_desc.num_cores):
-                if self._cpu_freq(cpu) != self.pl_desc.max_cpu_freq:
-                    break
-            else:
-                return rv
-
-        freqs = []
-        for cpu in range(self.pl_desc.num_cores):
-            freqs.append('%d' % self._cpu_freq(cpu))
-        raise error.TestError('failed to speed up some CPU clocks: %s' %
-                              ', '.join(freqs))
-
-
-    def _get_cpu_temp_raised(self):
-        '''
-        Start more threads to increase CPU temperature.
-
-        This function starts 10 threads and waits till either of the two
-        events happen:
-
-        - the throttling is activated (the threshold is expected to be set at
-          DELTA/2 above the temperature when the test started). This is
-          considered a success, the function returns.
-
-        - the temperature raises DELTA degrees above the original temperature
-          but throttling does not start. This is considered an overheating
-          failure, a test error is raised.
-
-        If the temperature does not reach the DELTA and throttling does not
-        start in 30 seconds - a test error is also raised in this case.
-        '''
-
-        base_temp = self._cpu_temp()
-        # Start 10 more cpu heater threads
-        for _ in range(10):
-            self._add_heater_thread()
-
-        # Wait 30 seconds for the temp to raise DELTA degrees or throttling to
-        # start
-        for count in range(30):
-            new_temp = self._cpu_temp()
-            if new_temp - base_temp >= DELTA:
-                raise error.TestError(
-                    'Reached temperature of %2.1fC in %d'
-                    ' seconds, no throttling.'
-                    % count)
-            if self._throttle_count() == self.pl_desc.num_cores:
-                logging.info('full throttle after %d seconds' % count)
-                return
-            time.sleep(1)
-        raise error.TestError(
-            'failed to raise CPU temperature from %s (reached %s), '
-            '%d cores throttled' % (
-                str(base_temp), str(new_temp), self._throttle_count()))
-
-    def _get_platform_descriptor(self):
-        '''Fill out the platform descriptor to be used by the test.'''
-
-        present = utils.read_file(os.path.join(CPU_INFO_ROOT, 'present'))
-        if present.count('-') != 1:
-            raise error.TestError(
-                "can't determine number of cores from %s" % present)
-        (min_core, max_core) = tuple(int(x) for x in present.split('-'))
-        min_freq = int(utils.read_file(
-            PER_CORE_FREQ_TEMPLATE % (0, 'cpuinfo_min_freq')))
-        max_freq = int(utils.read_file(
-            PER_CORE_FREQ_TEMPLATE % (0, 'cpuinfo_max_freq')))
-
-        return PlatformDescriptor(max_core - min_core + 1, max_freq, min_freq)
-
-
-    def _prepare_test(self):
-        '''Prepare test: check initial conditions and set variables.'''
-
-        ext_temp_path = utils.system_output(
-            'find /sys -name %s' % EXT_TEMP_SENSOR_FILE).splitlines()
-        if len(ext_temp_path) != 1:
-            raise error.TestError('found %d sensor files' % len(ext_temp_path))
-
-        self.temperature_data_path = os.path.dirname(ext_temp_path[0])
-
-        self.stop_all_workers = False
-
-        self.pl_desc = self._get_platform_descriptor()
-
-        # Verify CPU frequency is in range.
-        self._check_freq()
-
-        # Make sure we are not yet throttling.
-        if self._throttle_count():
-            raise error.TestError('Throttling active before test started')
-
-        # Remember throttling level setting before test started.
-        self.preserved_throttle_limit = self._throttle_limit()
-
-        if self.preserved_throttle_limit - self._cpu_temp() < 4 * DELTA:
-            raise error.TestError('Target is too hot: %s C' % str(
-                    self._cpu_temp()))
-
-        # list to keep track of threads started to heat up CPU.
-        self.worker_threads = []
-
-        # Dictionary of saved cores' scaling governor settings.
-        self.saved_governors = {}
-
-        self.register_after_iteration_hook(clean_up)
-
-
-    def run_once(self):
-        self._prepare_test()
-        logging.info('starting temperature is %s' % str(self._cpu_temp()))
-        logging.info('starting frequency is %s' % str(self._cpu_freq(0)))
-
-        self.saved_governors = self._get_cpu_freq_raised()
-        self._set_throttle_limit(self._cpu_temp() + DELTA/2)
-        self._get_cpu_temp_raised()
-        self._set_throttle_limit(self.preserved_throttle_limit)
-
-        # Half a second after restoring the throttling limit is plenty for
-        # throttling to stop.
-        time.sleep(.5)
-        if self._throttle_count():
-            raise error.TestError('Throttling did not stop')
-
-        logging.info('ending temperature is %s' % str(self._cpu_temp()))
-        logging.info('ending frequency is %s' % str(self._cpu_freq(0)))
-
-
-    def cleanup(self):
-        self.stop_all_workers = True
-        self._set_throttle_limit(self.preserved_throttle_limit)
-        logging.info('stopping %d thread(s)' % len(self.worker_threads))
-        runaway_threads = 0
-        while self.worker_threads:
-            t = self.worker_threads.pop()
-            t.join(.5)
-            if t.isAlive():
-                runaway_threads += 1
-        if runaway_threads:
-            for f in glob.glob('%s*' % TMP_FILE_TEMPLATE):
-                logging.info('removing %s' % f)
-                os.remove(f)
-            raise error.TestError(
-                'Failed to join %d worker thread(s)' % runaway_threads)
-
-        if not self.saved_governors:
-            return
-
-        for (cpu, gov) in self.saved_governors.iteritems():
-            gov_file = PER_CORE_FREQ_TEMPLATE % (cpu, GOVERNOR)
-            logging.info('restoring core %d governor to %s' % (cpu, gov))
-            utils.open_write_close(gov_file, gov)
-        self.saved_governors = {}
diff --git a/client/site_tests/power_ThermalLoad/control b/client/site_tests/power_ThermalLoad/control
index edb9640..f1c65c5 100644
--- a/client/site_tests/power_ThermalLoad/control
+++ b/client/site_tests/power_ThermalLoad/control
@@ -1,11 +1,13 @@
 AUTHOR = "puthik"
 NAME = "power_ThermalLoad"
 PURPOSE = "Run thermal workload and log temperature."
+
 CRITERIA = "This test is a benchmark."
 TIME = "LONG"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test records power and temperature related statistics while open WebGL
diff --git a/client/site_tests/power_ThermalLoad/control.fast b/client/site_tests/power_ThermalLoad/control.fast
index ee4a807..654b03b 100644
--- a/client/site_tests/power_ThermalLoad/control.fast
+++ b/client/site_tests/power_ThermalLoad/control.fast
@@ -1,11 +1,13 @@
 AUTHOR = "puthik"
 NAME = "power_ThermalLoad.fast"
 PURPOSE = "Run thermal workload and log temperature."
+
 CRITERIA = "This test is a benchmark."
 TIME = "SHORT"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test makes sure that power_ThermalLoad is working fine.
diff --git a/client/site_tests/power_ThermalLoad/control.option b/client/site_tests/power_ThermalLoad/control.option
index e39e3ea..4f1feb9 100644
--- a/client/site_tests/power_ThermalLoad/control.option
+++ b/client/site_tests/power_ThermalLoad/control.option
@@ -1,11 +1,13 @@
 AUTHOR = "puthik"
 NAME = "power_ThermalLoad.option"
 PURPOSE = "Run thermal workload and log temperature."
+
 CRITERIA = "This test is a benchmark."
 TIME = "SHORT"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test is power_ThermalLoad that can use custom args.
diff --git a/client/site_tests/power_ThermalLoad/power_ThermalLoad.py b/client/site_tests/power_ThermalLoad/power_ThermalLoad.py
index 9d3f703..8099a0b 100644
--- a/client/site_tests/power_ThermalLoad/power_ThermalLoad.py
+++ b/client/site_tests/power_ThermalLoad/power_ThermalLoad.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -10,23 +11,54 @@
 from autotest_lib.client.cros.power import power_status
 from autotest_lib.client.cros.power import power_test
 
+FISHES_COUNT = {
+        1: 'setSetting0',
+        100: 'setSetting1',
+        500: 'setSetting2',
+        1000: 'setSetting3',
+        3000: 'setSetting4',
+        5000: 'setSetting5',
+        10000: 'setSetting6',
+        15000: 'setSetting7',
+        20000: 'setSetting8',
+        25000: 'setSetting9',
+        30000: 'setSetting10',
+}
+
 
 class power_ThermalLoad(power_test.power_Test):
     """class for power_ThermalLoad test.
     """
     version = 2
 
-    FISHTANK_URL = 'http://storage.googleapis.com/chrome-power/aquarium/aquarium/aquarium.html'
+    FISHTANK_URL = 'http://crospower.page.link/power_ThermalLoad'
     HOUR = 60 * 60
 
-    def run_once(self, test_url=FISHTANK_URL, duration=2.5*HOUR, numFish=3000):
+    def select_fishes(self, tab, fish_settings):
+        """Simple wrapper to select the required fish count
+
+        @param tab: An Autotest Chrome tab instance.
+        @param fish_settings: Webgl fish count settings
+        """
+        tab.ExecuteJavaScript('%s.click();' % fish_settings)
+
+    def run_once(self,
+                 test_url=FISHTANK_URL,
+                 duration=2.5 * HOUR,
+                 numFish=3000):
         """run_once method.
 
         @param test_url: url of webgl heavy page.
         @param duration: time in seconds to display url and measure power.
         @param numFish: number of fish to pass to WebGL Aquarium.
         """
-        with chrome.Chrome(init_network_controller=True) as self.cr:
+        # --disable-sync disables test account info sync, eg. Wi-Fi credentials,
+        # so that each test run does not remember info from last test run.
+        extra_browser_args = ['--disable-sync']
+        # b/228256145 to avoid powerd restart
+        extra_browser_args.append('--disable-features=FirmwareUpdaterApp')
+        with chrome.Chrome(extra_browser_args=extra_browser_args,
+                           init_network_controller=True) as self.cr:
             tab = self.cr.browser.tabs.New()
             tab.Activate()
 
@@ -36,12 +68,16 @@
                 with keyboard.Keyboard() as keys:
                     keys.press_key('f4')
 
+            # Stop services again as Chrome might have restarted them.
+            self._services.stop_services()
+
             self.backlight.set_percent(100)
 
-            url = test_url + "?numFish=" + str(numFish)
-            logging.info('Navigating to url: %s', url)
-            tab.Navigate(url)
+            logging.info('Navigating to url: %s', test_url)
+            tab.Navigate(test_url)
             tab.WaitForDocumentReadyStateToBeComplete()
+            logging.info("Selecting %d Fishes", numFish)
+            self.select_fishes(tab, FISHES_COUNT[numFish])
 
             self._flog = FishTankFpsLogger(tab,
                     seconds_period=self._seconds_period,
@@ -73,15 +109,28 @@
         super(FishTankFpsLogger, self).__init__([], seconds_period,
                                                     checkpoint_logger)
         self._tab = tab
-        self._lastFrameCount = 0
-        fishCount = self._tab.EvaluateJavaScript('fishCount')
+        (frameCount, frameTime) = self._tab.EvaluateJavaScript(
+                '[frameCount, Date.now()/1000]')
+        fishCount = self.get_fish_count(tab)
         self.domains = ['avg_fps_%04d_fishes' % fishCount]
-        self.refresh()
+        self._lastFrameCount = frameCount
+        self._lastFrameTime = frameTime
+
+    def get_fish_count(self, tab):
+        style_string = 'color: red;'
+        for count, setting in FISHES_COUNT.items():
+            style = tab.EvaluateJavaScript('%s.getAttribute("style")' %
+                                           setting)
+            if style == style_string:
+                return count
 
     def refresh(self):
-        frameCount = self._tab.EvaluateJavaScript('frameCount')
-        fps = (frameCount - self._lastFrameCount) / self.seconds_period
+        (frameCount, frameTime
+         ) = self._tab.EvaluateJavaScript('[frameCount, Date.now()/1000]')
+        period = frameTime - self._lastFrameTime
+        fps = (frameCount - self._lastFrameCount) / period
         self._lastFrameCount = frameCount
+        self._lastFrameTime = frameTime
         return [fps]
 
     def save_results(self, resultsdir, fname_prefix=None):
diff --git a/client/site_tests/power_UiResume/control b/client/site_tests/power_UiResume/control
index fa9fe04..c86fd44 100644
--- a/client/site_tests/power_UiResume/control
+++ b/client/site_tests/power_UiResume/control
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_UiResume"
 PURPOSE = "Measure the amount of time it takes to resume from suspend."
 CRITERIA = "This test is a benchmark."
@@ -11,6 +11,7 @@
 TEST_CATEGORY = "Logging"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test will search /var/log/messages for pertinent strings to determine if
diff --git a/client/site_tests/power_UiResume/control.no_arc b/client/site_tests/power_UiResume/control.no_arc
index d6c13e2..48bf18d 100644
--- a/client/site_tests/power_UiResume/control.no_arc
+++ b/client/site_tests/power_UiResume/control.no_arc
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_UiResume.no_arc"
 PURPOSE = "Measure the amount of time it takes to resume from suspend."
 CRITERIA = "This test is a benchmark."
@@ -11,6 +11,7 @@
 TEST_CATEGORY = "Logging"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test will search /var/log/messages for pertinent strings to determine if
diff --git a/client/site_tests/power_UiResume/control.resume_constraint b/client/site_tests/power_UiResume/control.resume_constraint
index dca65ac..96501b9 100644
--- a/client/site_tests/power_UiResume/control.resume_constraint
+++ b/client/site_tests/power_UiResume/control.resume_constraint
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_UiResume.resume_constraint"
 PURPOSE = "Measure the amount of time it takes to resume from suspend."
 CRITERIA = "This test is a benchmark."
@@ -11,6 +11,7 @@
 TEST_CATEGORY = "Logging"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test will search /var/log/messages for pertinent strings to determine if
diff --git a/client/site_tests/power_UiResume/power_UiResume.py b/client/site_tests/power_UiResume/power_UiResume.py
index 291fc09..c11b0fa 100644
--- a/client/site_tests/power_UiResume/power_UiResume.py
+++ b/client/site_tests/power_UiResume/power_UiResume.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -28,11 +29,19 @@
         in Chrome browser.
 
         """
-        self._enable_arc = utils.is_arc_available() and not no_arc
+        # --disable-sync disables test account info sync, eg. Wi-Fi credentials,
+        # so that each test run does not remember info from last test run.
+        extra_browser_args = ['--disable-sync']
+
+        # TODO(b/191251229): Only enable ARC if ARC is available and it is not
+        # running ARCVM.
+        self._enable_arc = (utils.is_arc_available() and not utils.is_arcvm()
+                            and not no_arc)
         if self._enable_arc:
-            super(power_UiResume, self).initialize()
+            super(power_UiResume,
+                  self).initialize(extra_browser_args=extra_browser_args)
         else:
-            self._chrome = chrome.Chrome()
+            self._chrome = chrome.Chrome(extra_browser_args=extra_browser_args)
 
 
     def run_once(self, max_devs_returned=10, seconds=0,
diff --git a/client/site_tests/power_VideoCall/control b/client/site_tests/power_VideoCall/control
index 86793ce..c740ed0 100644
--- a/client/site_tests/power_VideoCall/control
+++ b/client/site_tests/power_VideoCall/control
@@ -2,10 +2,11 @@
 NAME = "power_VideoCall"
 PURPOSE = "Simulates video call multitasking."
 CRITERIA = "This test is a benchmark."
-TIME = "LONG"
+TIME = "LENGTHY"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test simulates video call multitasking and record power related statistic.
@@ -20,4 +21,5 @@
 
 args_dict = utils.args_to_dict(args)
 pdash_note = args_dict.get('pdash_note', '')
-job.run_test('power_VideoCall', pdash_note=pdash_note)
+job.run_test('power_VideoCall', pdash_note=pdash_note,
+             min_run_time_percent=75)
diff --git a/client/site_tests/power_VideoCall/control.16_h264 b/client/site_tests/power_VideoCall/control.16_h264
new file mode 100644
index 0000000..ca01f98
--- /dev/null
+++ b/client/site_tests/power_VideoCall/control.16_h264
@@ -0,0 +1,27 @@
+AUTHOR = "puthik"
+NAME = "power_VideoCall.16_h264"
+PURPOSE = "Simulates video call."
+CRITERIA = "This test is a benchmark."
+TIME = "MEDIUM"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "power"
+TEST_TYPE = "client"
+PY_VERSION = 3
+
+DOC = """
+This test simulates video call and record power related statistic.
+
+Workload
+- Camera preview in 270p using VP8 codec.
+- 16 270p 24fps H264 video playback.
+
+The run time is 3 minutes.
+"""
+
+video_url = 'http://crospower.page.link/power_VideoCall_' + NAME.split('.')[1]
+
+args_dict = utils.args_to_dict(args)
+pdash_note = args_dict.get('pdash_note', '')
+job.run_test('power_VideoCall', pdash_note=pdash_note, tag=NAME.split('.')[1],
+             video_url=video_url, num_video=17, duration=180, seconds_period=5,
+             multitask=False)
diff --git a/client/site_tests/power_VideoCall/control.16_vp8 b/client/site_tests/power_VideoCall/control.16_vp8
new file mode 100644
index 0000000..225a25d
--- /dev/null
+++ b/client/site_tests/power_VideoCall/control.16_vp8
@@ -0,0 +1,27 @@
+AUTHOR = "puthik"
+NAME = "power_VideoCall.16_vp8"
+PURPOSE = "Simulates video call."
+CRITERIA = "This test is a benchmark."
+TIME = "MEDIUM"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "power"
+TEST_TYPE = "client"
+PY_VERSION = 3
+
+DOC = """
+This test simulates video call and record power related statistic.
+
+Workload
+- Camera preview in 270p using VP8 codec.
+- 16 270p 24fps VP8 video playback.
+
+The run time is 3 minutes.
+"""
+
+video_url = 'http://crospower.page.link/power_VideoCall_' + NAME.split('.')[1]
+
+args_dict = utils.args_to_dict(args)
+pdash_note = args_dict.get('pdash_note', '')
+job.run_test('power_VideoCall', pdash_note=pdash_note, tag=NAME.split('.')[1],
+             video_url=video_url, num_video=17, duration=180, seconds_period=5,
+             multitask=False)
diff --git a/client/site_tests/power_VideoCall/control.16_vp8_multitask b/client/site_tests/power_VideoCall/control.16_vp8_multitask
new file mode 100644
index 0000000..f3acec0
--- /dev/null
+++ b/client/site_tests/power_VideoCall/control.16_vp8_multitask
@@ -0,0 +1,27 @@
+AUTHOR = "puthik"
+NAME = "power_VideoCall.16_vp8_multitask"
+PURPOSE = "Simulates video call multitasking."
+CRITERIA = "This test is a benchmark."
+TIME = "MEDIUM"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "power"
+TEST_TYPE = "client"
+PY_VERSION = 3
+
+DOC = """
+This test simulates video call multitasking and record power related statistic.
+
+Workload
+- Camera preview in 270p using VP8 codec.
+- 16 270p 24fps VP8 video playback.
+
+The run time is 3 minutes.
+"""
+
+video_url = 'http://crospower.page.link/power_VideoCall_16_vp8'
+
+args_dict = utils.args_to_dict(args)
+pdash_note = args_dict.get('pdash_note', '')
+job.run_test('power_VideoCall', pdash_note=pdash_note, tag=NAME.split('.')[1],
+             video_url=video_url, num_video=17, duration=180, seconds_period=5,
+             multitask=True)
diff --git a/client/site_tests/power_VideoCall/control.16_vp8_multitask_2hr b/client/site_tests/power_VideoCall/control.16_vp8_multitask_2hr
new file mode 100644
index 0000000..82054a7
--- /dev/null
+++ b/client/site_tests/power_VideoCall/control.16_vp8_multitask_2hr
@@ -0,0 +1,28 @@
+AUTHOR = "puthik"
+NAME = "power_VideoCall.16_vp8_multitask_2hr"
+PURPOSE = "Simulates video call multitasking."
+CRITERIA = "This test is a benchmark."
+TIME = "LENGTHY"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "power"
+TEST_TYPE = "client"
+EXTENDED_TIMEOUT = 7800
+PY_VERSION = 3
+
+DOC = """
+This test simulates video call multitasking and record power related statistic.
+
+Workload
+- Camera preview in 270p using VP8 codec.
+- 16 270p 24fps VP8 video playback.
+
+The run time is 2 hours.
+"""
+
+video_url = 'http://crospower.page.link/power_VideoCall_16_vp8'
+
+args_dict = utils.args_to_dict(args)
+pdash_note = args_dict.get('pdash_note', '')
+job.run_test('power_VideoCall', pdash_note=pdash_note, tag=NAME.split('.')[1],
+             video_url=video_url, num_video=17, duration=7200,
+             seconds_period=20, multitask=True)
diff --git a/client/site_tests/power_VideoCall/control.16_vp9 b/client/site_tests/power_VideoCall/control.16_vp9
new file mode 100644
index 0000000..c0ce1bd
--- /dev/null
+++ b/client/site_tests/power_VideoCall/control.16_vp9
@@ -0,0 +1,27 @@
+AUTHOR = "puthik"
+NAME = "power_VideoCall.16_vp9"
+PURPOSE = "Simulates video call."
+CRITERIA = "This test is a benchmark."
+TIME = "MEDIUM"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "power"
+TEST_TYPE = "client"
+PY_VERSION = 3
+
+DOC = """
+This test simulates video call and record power related statistic.
+
+Workload
+- Camera preview in 270p using VP8 codec.
+- 16 270p 24fps VP9 video playback.
+
+The run time is 3 minutes.
+"""
+
+video_url = 'http://crospower.page.link/power_VideoCall_' + NAME.split('.')[1]
+
+args_dict = utils.args_to_dict(args)
+pdash_note = args_dict.get('pdash_note', '')
+job.run_test('power_VideoCall', pdash_note=pdash_note, tag=NAME.split('.')[1],
+             video_url=video_url, num_video=17, duration=180, seconds_period=5,
+             multitask=False)
diff --git a/client/site_tests/power_VideoCall/control.25min b/client/site_tests/power_VideoCall/control.25min
index 52adba7..6a9e427 100644
--- a/client/site_tests/power_VideoCall/control.25min
+++ b/client/site_tests/power_VideoCall/control.25min
@@ -2,11 +2,12 @@
 NAME = "power_VideoCall.25min"
 PURPOSE = "Simulates video call multitasking."
 CRITERIA = "This test is a benchmark."
-TIME = "LONG"
+TIME = "LENGTHY"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
 ATTRIBUTES = "suite:power_monitoring"
+PY_VERSION = 3
 
 DOC = """
 This test simulates video call multitasking and record power related statistic.
diff --git a/client/site_tests/power_VideoCall/control.49_h264 b/client/site_tests/power_VideoCall/control.49_h264
new file mode 100644
index 0000000..82ac3a0
--- /dev/null
+++ b/client/site_tests/power_VideoCall/control.49_h264
@@ -0,0 +1,27 @@
+AUTHOR = "puthik"
+NAME = "power_VideoCall.49_h264"
+PURPOSE = "Simulates video call."
+CRITERIA = "This test is a benchmark."
+TIME = "MEDIUM"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "power"
+TEST_TYPE = "client"
+PY_VERSION = 3
+
+DOC = """
+This test simulates video call and record power related statistic.
+
+Workload
+- Camera preview in 135p using VP8 codec.
+- 49 135p 15fps H264 video playback.
+
+The run time is 3 minutes.
+"""
+
+video_url = 'http://crospower.page.link/power_VideoCall_' + NAME.split('.')[1]
+
+args_dict = utils.args_to_dict(args)
+pdash_note = args_dict.get('pdash_note', '')
+job.run_test('power_VideoCall', pdash_note=pdash_note, tag=NAME.split('.')[1],
+             video_url=video_url, num_video=50, duration=180, seconds_period=5,
+             multitask=False)
diff --git a/client/site_tests/power_VideoCall/control.49_vp8 b/client/site_tests/power_VideoCall/control.49_vp8
new file mode 100644
index 0000000..b21b75c
--- /dev/null
+++ b/client/site_tests/power_VideoCall/control.49_vp8
@@ -0,0 +1,27 @@
+AUTHOR = "puthik"
+NAME = "power_VideoCall.49_vp8"
+PURPOSE = "Simulates video call."
+CRITERIA = "This test is a benchmark."
+TIME = "MEDIUM"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "power"
+TEST_TYPE = "client"
+PY_VERSION = 3
+
+DOC = """
+This test simulates video call and record power related statistic.
+
+Workload
+- Camera preview in 135p using VP8 codec.
+- 49 135p 15fps VP8 video playback.
+
+The run time is 3 minutes.
+"""
+
+video_url = 'http://crospower.page.link/power_VideoCall_' + NAME.split('.')[1]
+
+args_dict = utils.args_to_dict(args)
+pdash_note = args_dict.get('pdash_note', '')
+job.run_test('power_VideoCall', pdash_note=pdash_note, tag=NAME.split('.')[1],
+             video_url=video_url, num_video=50, duration=180, seconds_period=5,
+             multitask=False)
diff --git a/client/site_tests/power_VideoCall/control.49_vp9 b/client/site_tests/power_VideoCall/control.49_vp9
new file mode 100644
index 0000000..3e658ee
--- /dev/null
+++ b/client/site_tests/power_VideoCall/control.49_vp9
@@ -0,0 +1,27 @@
+AUTHOR = "puthik"
+NAME = "power_VideoCall.49_vp9"
+PURPOSE = "Simulates video call."
+CRITERIA = "This test is a benchmark."
+TIME = "MEDIUM"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "power"
+TEST_TYPE = "client"
+PY_VERSION = 3
+
+DOC = """
+This test simulates video call and record power related statistic.
+
+Workload
+- Camera preview in 135p using VP8 codec.
+- 49 135p 24fps VP9 video playback.
+
+The run time is 3 minutes.
+"""
+
+video_url = 'http://crospower.page.link/power_VideoCall_' + NAME.split('.')[1]
+
+args_dict = utils.args_to_dict(args)
+pdash_note = args_dict.get('pdash_note', '')
+job.run_test('power_VideoCall', pdash_note=pdash_note, tag=NAME.split('.')[1],
+             video_url=video_url, num_video=50, duration=180, seconds_period=5,
+             multitask=False)
diff --git a/client/site_tests/power_VideoCall/control.FDO_25min b/client/site_tests/power_VideoCall/control.FDO_25min
new file mode 100644
index 0000000..dada390
--- /dev/null
+++ b/client/site_tests/power_VideoCall/control.FDO_25min
@@ -0,0 +1,30 @@
+AUTHOR = "puthik"
+NAME = "power_VideoCall.FDO_25min"
+PURPOSE = "Simulates video call multitasking."
+CRITERIA = "This test is a benchmark."
+TIME = "LENGTHY"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "power"
+TEST_TYPE = "client"
+ATTRIBUTES = "suite:power_daily"
+PY_VERSION = 3
+
+DOC = """
+This test simulates video call multitasking and record power related statistics
+while optionally force discharge.
+
+Workload
+- Camera preview in 720p using VP9 codec.
+- Four 720p 30fps video playback with two VP8 videos and two VP9 videos.
+- Google Doc with emulated typing.
+
+The version runs for 25 minutes as it is common duration of the video call.
+
+"FDO" is short for "force discharge optional." Test will use EC command to
+force DUT to discharge. If it fails, then use AC as the power source.
+"""
+
+args_dict = utils.args_to_dict(args)
+pdash_note = args_dict.get('pdash_note', '')
+job.run_test('power_VideoCall', pdash_note=pdash_note, duration=1500,
+             tag=NAME.split('.')[1], force_discharge='optional')
diff --git a/client/site_tests/power_VideoCall/control.FDO_fast b/client/site_tests/power_VideoCall/control.FDO_fast
new file mode 100644
index 0000000..1ece550
--- /dev/null
+++ b/client/site_tests/power_VideoCall/control.FDO_fast
@@ -0,0 +1,30 @@
+AUTHOR = "puthik"
+NAME = "power_VideoCall.FDO_fast"
+PURPOSE = "Make sure power_VideoCall works fine with optional force discharge."
+CRITERIA = "This test is a benchmark."
+TIME = "MEDIUM"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "power"
+TEST_TYPE = "client"
+PY_VERSION = 3
+
+DOC = """
+This test simulates video call multitasking and record power related statistics
+while optionally force discharge.
+
+Workload
+- Camera preview in 720p using VP9 codec.
+- Four 720p 30fps video playback with two VP8 videos and two VP9 videos.
+- Google Doc with emulated typing.
+
+The fast version only runs for 3 minutes to make sure the test works as expected.
+
+"FDO" is short for "force discharge optional." Test will use EC command to
+force DUT to discharge. If it fails, then use AC as the power source.
+"""
+
+args_dict = utils.args_to_dict(args)
+pdash_note = args_dict.get('pdash_note', '')
+job.run_test('power_VideoCall', pdash_note=pdash_note, duration=180,
+             tag=NAME.split('.')[1], seconds_period=5,
+             force_discharge='optional')
diff --git a/client/site_tests/power_VideoCall/control.fast b/client/site_tests/power_VideoCall/control.fast
index 77a398f..c32c8fd 100644
--- a/client/site_tests/power_VideoCall/control.fast
+++ b/client/site_tests/power_VideoCall/control.fast
@@ -2,10 +2,11 @@
 NAME = "power_VideoCall.fast"
 PURPOSE = "Make sure power_VideoCall works fine."
 CRITERIA = "This test is a benchmark."
-TIME = "SHORT"
+TIME = "MEDIUM"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test simulates video call multitasking and record power related statistic.
diff --git a/client/site_tests/power_VideoCall/control.fast_crosbolt b/client/site_tests/power_VideoCall/control.fast_crosbolt
index df746b9..11d8825 100644
--- a/client/site_tests/power_VideoCall/control.fast_crosbolt
+++ b/client/site_tests/power_VideoCall/control.fast_crosbolt
@@ -2,11 +2,12 @@
 NAME = "power_VideoCall.fast_crosbolt"
 PURPOSE = "Make sure power_VideoCall works fine."
 CRITERIA = "This test is a benchmark."
-TIME = "SHORT"
+TIME = "MEDIUM"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
 ATTRIBUTES = "suite:crosbolt_perf_perbuild"
+PY_VERSION = 3
 
 DOC = """
 This test simulates video call multitasking and record power related statistic.
@@ -27,7 +28,8 @@
 job.profilers.add('cros_perf', interval=20)
 
 job.run_test('power_VideoCall', pdash_note=pdash_note, duration=180,
-             tag=NAME.split('.')[1], seconds_period=5, force_discharge=True)
+             tag=NAME.split('.')[1], seconds_period=5,
+             force_discharge='optional')
 
 job.profilers.delete('cros_perf')
 
diff --git a/client/site_tests/power_VideoCall/control.option b/client/site_tests/power_VideoCall/control.option
new file mode 100644
index 0000000..2afaadb
--- /dev/null
+++ b/client/site_tests/power_VideoCall/control.option
@@ -0,0 +1,45 @@
+AUTHOR = "jpmurphy"
+NAME = "power_VideoCall.option"
+PURPOSE = "Simulates video call multitasking."
+CRITERIA = "This test is a benchmark."
+TIME = "LENGTHY"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "power"
+TEST_TYPE = "client"
+PY_VERSION = 3
+
+DOC = """
+This test simulates video call multitasking and record power related statistic.
+
+This test accepts command line arguments to override default arguments to
+run_once.
+
+Default Workload
+- Camera preview in 720p using VP9 codec.
+- Four 720p 30fps video playback with two VP8 videos and two VP9 videos.
+- Google Doc with emulated typing.
+
+The default run time is 2 hours to allow the DUT to heat up to steady state.
+"""
+
+HOURS=60 * 60
+
+args_dict = utils.args_to_dict(args)
+
+duration = int(args_dict.get('duration', 2 * HOURS))
+preset = str(args_dict.get('preset', ''))
+video_url = str(args_dict.get('video_url', ''))
+num_video = int(args_dict.get('num_video', 5))
+multitask = args_dict.get('multitask')
+if multitask and (str(multitask).lower() == 'false' or int(multitask) == 0):
+    multitask = False
+else:
+    multitask = True
+min_run_time_percent = int(args_dict.get('min_run_time_percent', 100))
+
+pdash_note = args_dict.get('pdash_note', '')
+
+job.run_test('power_VideoCall', pdash_note=pdash_note, tag=NAME.split('.')[1],
+             duration=duration, preset=preset, video_url=video_url,
+             num_video=num_video, multitask=multitask,
+             min_run_time_percent=min_run_time_percent)
diff --git a/client/site_tests/power_VideoCall/power_VideoCall.py b/client/site_tests/power_VideoCall/power_VideoCall.py
index 11ac268..5827bf6 100644
--- a/client/site_tests/power_VideoCall/power_VideoCall.py
+++ b/client/site_tests/power_VideoCall/power_VideoCall.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -7,7 +8,6 @@
 
 from autotest_lib.client.bin import utils
 from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.common_lib.cros import power_load_util
 from autotest_lib.client.cros.input_playback import keyboard
 from autotest_lib.client.cros.power import power_status
 from autotest_lib.client.cros.power import power_test
@@ -16,8 +16,8 @@
     """class for power_VideoCall test."""
     version = 1
 
-    video_url = 'http://crospower.page.link/power_VideoCall'
-    doc_url = 'http://doc.new'
+    video_url = 'https://storage.googleapis.com/chromiumos-test-assets-public/power_VideoCall/power_VideoCall.html'
+    doc_url = 'http://crospower.page.link/power_VideoCall_doc'
 
     def initialize(self, seconds_period=20., pdash_note='',
                    force_discharge=False):
@@ -25,10 +25,15 @@
         super(power_VideoCall, self).initialize(seconds_period=seconds_period,
                                                 pdash_note=pdash_note,
                                                 force_discharge=force_discharge)
-        self._username = power_load_util.get_username()
-        self._password = power_load_util.get_password()
 
-    def run_once(self, duration=7200, preset=''):
+
+    def run_once(self,
+                 duration=7200,
+                 preset='',
+                 video_url='',
+                 num_video=5,
+                 multitask=True,
+                 min_run_time_percent=100):
         """run_once method.
 
         @param duration: time in seconds to display url and measure power.
@@ -38,49 +43,58 @@
                        'medium' : 720p24_vp8,
                        'low' :    360p24_vp8
                        If not supplied, preset will be determined automatically.
+        @param video_url: url of video call simulator.
+        @param num_video: number of video including camera preview.
+        @param multitask: boolean indicate Google Docs multitask enablement.
+        @param min_run_time_percent: int between 0 and 100;
+                                     run time must be longer than
+                                     min_run_time_percent / 100.0 * duration.
         """
 
-        if not preset:
+        if not preset and not video_url:
             preset = self._get_camera_preset()
+        if not video_url:
+            video_url = self.video_url
+
+        # Append preset to self.video_url for camera preset.
+        if preset:
+            video_url = '%s?preset=%s' % (video_url, preset)
 
         extra_browser_args = self.get_extra_browser_args_for_camera_test()
         with keyboard.Keyboard() as keys,\
              chrome.Chrome(init_network_controller=True,
-                           gaia_login=True,
-                           username=self._username,
-                           password=self._password,
+                           gaia_login=False,
                            extra_browser_args=extra_browser_args,
                            autotest_ext=True) as cr:
 
             # Move existing window to left half and open video page
             tab_left = cr.browser.tabs[0]
             tab_left.Activate()
-            keys.press_key('alt+[')
-            logging.info('Navigating left window to %s', self.video_url)
-            tab_left.Navigate(self.video_url)
+            if multitask:
+                keys.press_key('alt+[')
+            elif not tab_left.EvaluateJavaScript(
+                    'document.webkitIsFullScreen'):
+                # Run in fullscreen when not multitask.
+                keys.press_key('f4')
+
+            logging.info('Navigating left window to %s', video_url)
+            tab_left.Navigate(video_url)
             tab_left.WaitForDocumentReadyStateToBeComplete()
-
-            # We need to make sure that default camera preset was init properly
-            # before changing preset or else MediaRecorder won't get torn down
-            # properly. So capture the init time with the default preset and
-            # then switch to appropriate preset later.
             video_init_time = power_status.VideoFpsLogger.time_until_ready(
-                              tab_left, num_video=5)
+                    tab_left, num_video=num_video)
             self.keyvals['video_init_time'] = video_init_time
-            tab_left.EvaluateJavaScript('setPreset("%s")' % preset)
 
-            # Wait for camera to init for the new preset.
-            power_status.VideoFpsLogger.time_until_ready(tab_left, num_video=5)
-
-            # Open Google Doc on right half
-            logging.info('Navigating right window to %s', self.doc_url)
-            cmd = 'chrome.windows.create({ url : "%s" });' % self.doc_url
-            cr.autotest_ext.EvaluateJavaScript(cmd)
-            tab_right = cr.browser.tabs[-1]
-            tab_right.Activate()
-            keys.press_key('alt+]')
-            tab_right.WaitForDocumentReadyStateToBeComplete()
-            time.sleep(5)
+            tab_right = None
+            if multitask:
+                # Open Google Doc on right half
+                logging.info('Navigating right window to %s', self.doc_url)
+                cmd = 'chrome.windows.create({ url : "%s" });' % self.doc_url
+                cr.autotest_ext.EvaluateJavaScript(cmd)
+                tab_right = cr.browser.tabs[-1]
+                tab_right.Activate()
+                keys.press_key('alt+]')
+                tab_right.WaitForDocumentReadyStateToBeComplete()
+                time.sleep(5)
 
             self._vlog = power_status.VideoFpsLogger(tab_left,
                 seconds_period=self._seconds_period,
@@ -89,23 +103,51 @@
 
             # Start typing number block
             self.start_measurements()
+            # TODO(b/226960942): Revert crrev.com/c/3556798 once root cause is
+            # found for why test fails before 2 hrs.
+            min_run_time = min_run_time_percent / 100.0 * duration
+            type_count = 0
             while time.time() - self._start_time < duration:
-                keys.press_key('number_block')
+                if multitask:
+                    keys.press_key('number_block')
+                    type_count += 1
+                    if type_count == 10:
+                        keys.press_key('ctrl+a_backspace')
+                        type_count = 0
+                else:
+                    time.sleep(60)
+
+                if not tab_left.IsAlive():
+                    msg = 'Video tab crashed'
+                    logging.error(msg)
+                    if time.time() - self._start_time < min_run_time:
+                        self._failure_messages.append(msg)
+                    break
+
+                if tab_right and not tab_right.IsAlive():
+                    msg = 'Doc tab crashed'
+                    logging.error(msg)
+                    if time.time() - self._start_time < min_run_time:
+                        self._failure_messages.append(msg)
+                    break
+
                 self.status.refresh()
                 if self.status.is_low_battery():
                     logging.info(
                         'Low battery, stop test early after %.0f minutes',
                         (time.time() - self._start_time) / 60)
                     break
-            self.collect_keypress_latency(cr)
+
+            if multitask:
+                self.collect_keypress_latency(cr)
 
     def _get_camera_preset(self):
         """Return camera preset appropriate to hw spec.
 
         Preset will be determined using this logic.
-        - Newer Intel Core U-series CPU with fan -> 'high'
-        - AMD Ryzen CPU with fan -> 'high'
+        - Newer Intel Core U/P-series CPU with fan -> 'high'
         - Above without fan -> 'medium'
+        - AMD Ryzen CPU -> 'medium'
         - High performance ARM -> 'medium'
         - Other Intel Core CPU -> 'medium'
         - AMD APU -> 'low'
@@ -115,11 +157,12 @@
         """
         HIGH_IF_HAS_FAN_REGEX = r'''
             Intel[ ]Core[ ]i[357]-[6-9][0-9]{3}U|     # Intel Core i7-8650U
-            Intel[ ]Core[ ]i[357]-1[0-9]{4}U|         # Intel Core i7-10510U
-            AMD[ ]Ryzen[ ][357][ ][3-9][0-9]{3}C|     # AMD Ryzen 7 3700C
-            Genuine[ ]Intel[ ]0000                    # Unrelease CPU
+            Intel[ ]Core[ ]i[357]-1[0-9]{3,4}[UPHG]|  # 10510U, 1135G7, 1250P
+            Genuine[ ]Intel[ ]0000|                   # Unreleased Intel CPU
+            AMD[ ]Eng[ ]Sample                        # Unreleased AMD CPU
         '''
         MEDIUM_REGEX = r'''
+            AMD[ ]Ryzen[ ][357][ ][3-9][0-9]{3}|      # AMD Ryzen 7 3700
             Intel[ ]Core[ ][im][357]-[0-9]{4,5}[UY]|  # Intel Core i5-8200Y
             Intel[ ]Core[ ][im][357]-[67]Y[0-9]{2}|   # Intel Core m7-6Y75
             Intel[ ]Pentium[ ][0-9]{4,5}[UY]|         # Intel Pentium 6405U
diff --git a/client/site_tests/power_VideoDRMPlayback/control b/client/site_tests/power_VideoDRMPlayback/control
deleted file mode 100644
index f6d2deb..0000000
--- a/client/site_tests/power_VideoDRMPlayback/control
+++ /dev/null
@@ -1,15 +0,0 @@
-AUTHOR = "puthik"
-NAME = "power_VideoDRMPlayback"
-PURPOSE = "Measure encrypted video playback power usage."
-CRITERIA = "This test is a benchmark."
-TIME = "MEDIUM"
-TEST_CATEGORY = "Benchmark"
-TEST_CLASS = "power"
-TEST_TYPE = "client"
-DOC = """
-This test records power related statistics while playing encrypted video file
-in various format.
-"""
-args_dict = utils.args_to_dict(args)
-pdash_note = args_dict.get('pdash_note', '')
-job.run_test('power_VideoDRMPlayback', pdash_note=pdash_note)
diff --git a/client/site_tests/power_VideoDRMPlayback/power_VideoDRMPlayback.py b/client/site_tests/power_VideoDRMPlayback/power_VideoDRMPlayback.py
deleted file mode 100644
index b8854c0..0000000
--- a/client/site_tests/power_VideoDRMPlayback/power_VideoDRMPlayback.py
+++ /dev/null
@@ -1,103 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-from autotest_lib.client.cros.power import power_videotest
-
-import py_utils
-
-class power_VideoDRMPlayback(power_videotest.power_VideoTest):
-    """class for power_VideoDRMPlayback test."""
-    version = 1
-
-    _BASE_URL='https://ats.sandbox.google.com/videostack/media_test_page.html?file='
-
-    # list of video name and url.
-    _VIDEOS = [
-        ('h264_720_30fps_cenc',
-         _BASE_URL + 'EME_720p30fpsH264_foodmarket_sync_L3_video_clear_audio.mp4.mpd'
-        ),
-        ('h264_1080_30fps_cenc',
-         _BASE_URL + 'EME_1080p30fpsH264_foodmarket_sync_L3_video_clear_audio.mp4.mpd'
-        ),
-        ('vp9_720_30fps_cenc',
-         _BASE_URL + 'EME_720p30fpsVP9_foodmarket_sync_L3_video_clear_audio.webm.mpd'
-        ),
-        ('vp9_1080_30fps_cenc',
-         _BASE_URL + 'EME_1080p30fpsVP9_foodmarket_sync_L3_video_clear_audio.webm.mpd'
-        ),
-        ('av1_720_30fps_cenc',
-         _BASE_URL + 'EME_720p30fpsAV1_foodmarket_sync_L3_video_clear_audio.mp4.mpd'
-        ),
-        ('av1_1080_30fps_cenc',
-         _BASE_URL + 'EME_1080p30fpsAV1_foodmarket_sync_L3_video_clear_audio.mp4.mpd'
-        ),
-        ('h264_720_30fps_cbcs',
-         _BASE_URL + 'EME_720p30fpsH264_foodmarket_sync_cbcs_video_clear_audio.mp4.mpd'
-        ),
-        ('h264_1080_30fps_cbcs',
-         _BASE_URL + 'EME_1080p30fpsH264_foodmarket_sync_cbcs_video_clear_audio.mp4.mpd'
-        ),
-        ('av1_720_30fps_cbcs',
-         _BASE_URL + 'EME_720p30fpsAV1_foodmarket_sync_cbcs_video_clear_audio.mp4.mpd'
-        ),
-        ('av1_1080_30fps_cbcs',
-         _BASE_URL + 'EME_1080p30fpsAV1_foodmarket_sync_cbcs_video_clear_audio.mp4.mpd'
-        ),
-    ]
-
-    # Time in seconds to measure power per video file.
-    _MEASUREMENT_DURATION = 120
-
-    def _prepare_video(self, cr, url):
-        """Prepare browser session before playing video.
-
-        @param cr: Autotest Chrome instance.
-        @param url: url of video file to play.
-        """
-        tab = cr.browser.tabs[0]
-        tab.Navigate(url)
-        tab.WaitForDocumentReadyStateToBeComplete()
-
-    def _start_video(self, cr, url):
-        """Start playing video.
-
-        @param cr: Autotest Chrome instance.
-        @param url: url of video file to play.
-        """
-        tab = cr.browser.tabs[0]
-
-        # Chrome prevents making an element fullscreen if the request doesn't
-        # initiated by user gesture. https://CrOSPower.page.link/noFullScreen
-        # Fake the user gesture by evaluate javascript from URL bar.
-        try:
-            tab.Navigate("javascript:TestFrameworkApp.FullScreen()", timeout=0)
-            tab.WaitForDocumentReadyStateToBeComplete()
-        except py_utils.TimeoutException:
-            # tab.Navigate always raise TimeoutException because we used it to
-            # execute javascript and didn't navigate to anywhere.
-            pass
-
-        tab.EvaluateJavaScript("TestFrameworkApp.getInstance().startTest()")
-
-    def _teardown_video(self, cr, url):
-        """Teardown browser session after playing video.
-
-        @param cr: Autotest Chrome instance.
-        @param url: url of video file to play.
-        """
-        pass
-
-    def run_once(self, videos=None, secs_per_video=_MEASUREMENT_DURATION,
-                 use_hw_decode=True):
-        """run_once method.
-
-        @param videos: list of tuple of tagname and video url to test.
-        @param secs_per_video: time in seconds to play video and measure power.
-        @param use_hw_decode: if False, disable hw video decoding.
-        """
-        if not videos:
-            videos = self._VIDEOS
-
-        super(power_VideoDRMPlayback, self).run_once(
-            videos, secs_per_video, use_hw_decode)
-
diff --git a/client/site_tests/power_VideoDetector/control b/client/site_tests/power_VideoDetector/control
index 4ad545f..7dc3cc7 100644
--- a/client/site_tests/power_VideoDetector/control
+++ b/client/site_tests/power_VideoDetector/control
@@ -12,8 +12,9 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
-ATTRIBUTES = "suite:kernel_daily_regression"
+ATTRIBUTES = "suite:power_build"
 
 DOC = """
 This test verifies that the backlight does not get dimmed during video playback.
diff --git a/client/site_tests/power_VideoDetector/fade.html b/client/site_tests/power_VideoDetector/fade.html
index ed38dc6..490887b 100644
--- a/client/site_tests/power_VideoDetector/fade.html
+++ b/client/site_tests/power_VideoDetector/fade.html
@@ -1,21 +1,8 @@
 <!DOCTYPE html>
 <html>
-<head>
-  <style>
-    body {
-      -webkit-animation-name: throb;
-      -webkit-animation-iteration-count: infinite;
-      -webkit-animation-duration: 2s;
-      -webkit-animation-timing-function: ease-in-out;
-    }
-
-    @-webkit-keyframes throb {
-      0% { background-color: #fff; }
-      50% { background-color: #000; }
-      100% { background-color: #fff; }
-    }
-  </style>
-</head>
-<body>
+<body bgcolor="black">
+  <video autoplay loop muted controls width="100%" height="100%">
+    <source type="video/webm" src="data:video/webm;base64,GkXfo59ChoEBQveBAULygQRC84EIQoKEd2VibUKHgQJChYECGFOAZwEAAAAAAAW+EU2bdLpNu4tTq4QVSalmU6yBoU27i1OrhBZUrmtTrIHYTbuMU6uEElTDZ1OsggEtTbuMU6uEHFO7a1OsggWo7AEAAAAAAABZAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAVSalmsirXsYMPQkBNgI1MYXZmNTguNzYuMTAwV0GNTGF2ZjU4Ljc2LjEwMESJiECfQAAAAAAAFlSua9CuAQAAAAAAAEfXgQFzxYi5YicG489gb5yBACK1nIN1bmSGhVZfVlA5g4EBI+ODhAP5QKrgAQAAAAAAABSwgaC6gVqagQJVsIhVsYEAVbmBAhJUw2dAnnNzAQAAAAAAACdjwIBnyAEAAAAAAAAaRaOHRU5DT0RFUkSHjUxhdmY1OC43Ni4xMDBzcwEAAAAAAABjY8CLY8WIuWInBuPPYG9nyAEAAAAAAAAmRaOHRU5DT0RFUkSHmUxhdmM1OC4xMzQuMTAwIGxpYnZweC12cDlnyKJFo4hEVVJBVElPTkSHlDAwOjAwOjAyLjAwMDAwMDAwMAAAH0O2dUPR54EAo62BAACAokmDQuAJ8AWWADgkHBhKAACAYfYwaB17QABnGoeV+rsWV16IqZRS54Cjn4EAQwCmAECS8AFAAAAMYAAAAQVdAAERX3hy1RZ/7oCjn4EAhQCmAECS8AE7gAAMYAAAAQF8AAEQI9VyeGLPWwCjnoEAyACmAECS8AFAAAAMYAAAANX6AAQJ1Yj9xnOzcKOegQELAKYAQJLwATUAAAxgAAAA05YABAbFsSozStUAo5+BAU0ApgBAkvABQAAADGAAAABvWwABw/8OEbtAFRwAo56BAZAApgBAkvABO4AADGAAAABUEAADZL7JfDWEyoCjn4EB0wCmAECS8AFAAAAMYAAAAFDoAAGwQYbJKLP/dACjnoECFQCmAECS8AEogAAMYAAAAK44AAet4yYlFn/ugKOegQJYAKYAQJLwAUAAAAxgAAAAmAAAB3YWtzsEtjMAo56BApsApgDAkvABKIAADGAAAHMWxoPWxhCbmfRam4CjnoEC3QCmAECS8AFAAAAMYAAAAPDwAAIV8iERlT1FQKOfgQMgAKYAQJLwATUAAAxgAAABFf0AAItIOJ4TGaVqgKOfgQNjAKYAQJLwAUAAAAxgAAABDj4AARQv7EfflTZwAKOggQOlAKYAQJLwATuAAAxgAAABGhiAAEX174XUkRa0VgCjn4ED6ACmAECS8AFAAAAMYAAAAQ4+AAEUL+xH35U2cACjoIEEKwCmAECS8AEogAAMYAAAARz5QAARi96JXvL8VA+go56BBG0ApgBAkvABQAAADGAAAADw8AACFfIhEZU9RUCjlYEEsACmAECSnABO4AADIAAAZdLSKKOegQTzAKYAQJKcAFAAAANgAAAAmAAAB3YWtzsEtjMAo52BBTUApgDAkvABKIAADGAAAHMOixtH7JK+fOUKAKOfgQV4AKYAQJKcAFAAAANgAAAAUOgAAbBBhskos/90AKOegQW7AKYAQJKcAE7gAANgAAAAVBAAA2S+yXw1hMqAo5+BBf0ApgBAkpwAUAAAA2AAAABvWwABw/8OEbtAFRwAo5+BBkAApgBAkpwASiAAA2AAAAD5PAACGyga2fRVofAAo56BBoMApgBAkpwAUAAAA2AAAADV+gAECdWI/cZzs3Cjn4EGxQCmAECSnABO4AADYAAAAQF8AAEQI9VyeGLPWwCjn4EHCACmAECSnABQAAADYAAAAQVdAAERX3hy1RZ/7oCjloEHSwCmAECSnABNQAADIAAAa/9+CACjn4EHjQCmAECSnABQAAADYAAAARY4gACLUYifuPxUD6AcU7trkbuPs4EAt4r3gQHxggHR8IED">
+  </video>
 </body>
 </html>
diff --git a/client/site_tests/power_VideoDetector/power_VideoDetector.py b/client/site_tests/power_VideoDetector/power_VideoDetector.py
index 3dc6e5b..dc728dc 100644
--- a/client/site_tests/power_VideoDetector/power_VideoDetector.py
+++ b/client/site_tests/power_VideoDetector/power_VideoDetector.py
@@ -1,8 +1,10 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 import os
+import shutil
 import time
 
 from autotest_lib.client.bin import test, utils
@@ -17,6 +19,7 @@
     """
 
     version = 1
+    tmp_path = '/tmp'
 
     def run_once(self, run_time_sec=60):
         """
@@ -25,6 +28,14 @@
         if run_time_sec < 30:
             raise error.TestError('Must run for at least 30 seconds')
 
+
+        # https://crbug.com/1288417, b/215442780
+        # Copy file to tmpdir to avoid the need of setting up local http server.
+        file_path = os.path.join(self.bindir, 'fade.html')
+        self.dest_path = os.path.join(self.tmp_path, 'fade.html')
+        shutil.copy(file_path, self.dest_path)
+        http_path = 'file://' + self.dest_path
+
         with chrome.Chrome(init_network_controller=True) as cr:
             # Start powerd if not started.  Set timeouts for quick idle events.
             run_time_ms = run_time_sec * 1000
@@ -49,12 +60,11 @@
                 utils.wait_for_value(backlight.get_max_level)
 
             # Open a tab to play video.
-            cr.browser.platform.SetHTTPServerDirectories(self.bindir)
             tab = cr.browser.tabs[0]
-            tab.Navigate(cr.browser.platform.http_server.UrlOf(
-                os.path.join(self.bindir, 'fade.html')))
+            tab.Navigate(http_path)
             tab.WaitForDocumentReadyStateToBeComplete()
 
+
             # Sleep until the runtime is up.
             time.sleep(run_time_sec)
 
@@ -79,4 +89,6 @@
         """
         Cleanup powerd after test.
         """
+        if hasattr(self, 'dest_path'):
+            os.remove(self.dest_path)
         upstart.restart_job('powerd')
diff --git a/client/site_tests/power_VideoEncode/control b/client/site_tests/power_VideoEncode/control
index 4104e3a..917a3e4 100644
--- a/client/site_tests/power_VideoEncode/control
+++ b/client/site_tests/power_VideoEncode/control
@@ -1,11 +1,13 @@
 AUTHOR = "puthik"
 NAME = "power_VideoEncode"
 PURPOSE = "Measure video encode power usage."
+
 CRITERIA = "This test is a benchmark."
 TIME = "LONG"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test records power related statistics while encode video file in various
diff --git a/client/site_tests/power_VideoEncode/control.FDO b/client/site_tests/power_VideoEncode/control.FDO
new file mode 100644
index 0000000..1efbd4b
--- /dev/null
+++ b/client/site_tests/power_VideoEncode/control.FDO
@@ -0,0 +1,24 @@
+AUTHOR = "puthik"
+NAME = "power_VideoEncode.FDO"
+PURPOSE = "Measure video encode power usage with optional force discharge."
+
+CRITERIA = "This test is a benchmark."
+TIME = "LONG"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "power"
+TEST_TYPE = "client"
+ATTRIBUTES = "suite:power_daily"
+PY_VERSION = 3
+
+DOC = """
+This test records power related statistics while encode video file in various
+formats while optionally force discharge.
+
+"FDO" is short for "force discharge optional." Test will use EC command to
+force DUT to discharge. If it fails, then use AC as the power source.
+"""
+
+args_dict = utils.args_to_dict(args)
+pdash_note = args_dict.get('pdash_note', '')
+job.run_test('power_VideoEncode', pdash_note=pdash_note, seconds_period=5,
+             force_discharge='optional')
diff --git a/client/site_tests/power_VideoEncode/control.FDO_fast b/client/site_tests/power_VideoEncode/control.FDO_fast
new file mode 100644
index 0000000..564b3dd
--- /dev/null
+++ b/client/site_tests/power_VideoEncode/control.FDO_fast
@@ -0,0 +1,30 @@
+AUTHOR = "puthik"
+NAME = "power_VideoEncode.FDO_fast"
+PURPOSE = "Make sure video encode works fine with optional force discharge."
+
+CRITERIA = "This test is a benchmark."
+TIME = "FAST"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "power"
+TEST_TYPE = "client"
+PY_VERSION = 3
+
+DOC = """
+This test makes sure that power_VideoEncode is working fine while optionally
+force discharge.
+
+"FDO" is short for "force discharge optional." Test will use EC command to
+force DUT to discharge. If it fails, then use AC as the power source.
+"""
+
+formats = [
+    ('h264', 'hd', 24),
+    ('vp9', 'hvga', 24),
+    ('vp8', 'qhvga', 15),
+]
+
+args_dict = utils.args_to_dict(args)
+pdash_note = args_dict.get('pdash_note', '')
+job.run_test('power_VideoEncode', tag=NAME.split('.')[1], seconds_per_test=30,
+             seconds_period=5, format=formats, pdash_note=pdash_note,
+             force_discharge='optional')
diff --git a/client/site_tests/power_VideoEncode/control.fast b/client/site_tests/power_VideoEncode/control.fast
index a65c25c..9f2d605 100644
--- a/client/site_tests/power_VideoEncode/control.fast
+++ b/client/site_tests/power_VideoEncode/control.fast
@@ -1,11 +1,13 @@
 AUTHOR = "puthik"
 NAME = "power_VideoEncode.fast"
 PURPOSE = "Measure video encode power usage."
+
 CRITERIA = "This test is a benchmark."
 TIME = "FAST"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test makes sure that power_VideoEncode is working fine.
diff --git a/client/site_tests/power_VideoEncode/control.vp9_1hr b/client/site_tests/power_VideoEncode/control.vp9_1hr
new file mode 100644
index 0000000..a9bd4a7
--- /dev/null
+++ b/client/site_tests/power_VideoEncode/control.vp9_1hr
@@ -0,0 +1,23 @@
+AUTHOR = "mqg"
+NAME = "power_VideoEncode.vp9_1hr"
+PURPOSE = "Stress test video encode and measure statistics."
+
+CRITERIA = "This test is a benchmark."
+TIME = "LONG"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "power"
+TEST_TYPE = "client"
+PY_VERSION = 3
+
+DOC = """
+This test records power related statistics while encode video file in vp9
+for 1 hr.
+"""
+
+HOURS = 60 * 60
+formats = [('vp9', 'fhd', 24)]
+
+args_dict = utils.args_to_dict(args)
+pdash_note = args_dict.get('pdash_note', '')
+job.run_test('power_VideoEncode', pdash_note=pdash_note,
+             seconds_per_test=HOURS, format=formats)
diff --git a/client/site_tests/power_VideoEncode/power_VideoEncode.py b/client/site_tests/power_VideoEncode/power_VideoEncode.py
index b694b5c..bed2a7c 100644
--- a/client/site_tests/power_VideoEncode/power_VideoEncode.py
+++ b/client/site_tests/power_VideoEncode/power_VideoEncode.py
@@ -1,4 +1,5 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Lint as: python2, python3
+# # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 import logging
@@ -48,10 +49,12 @@
                        Format is tuple of codec, resolution and framerate.
         """
         extra_browser_args = self.get_extra_browser_args_for_camera_test()
+        # b/228256145 to avoid powerd restart
+        extra_browser_args.append('--disable-features=FirmwareUpdaterApp')
         with chrome.Chrome(init_network_controller=True,
                            extra_browser_args=extra_browser_args) as cr:
 
-            tab = cr.browser.tabs.New()
+            tab = cr.browser.tabs[0]
             tab.Activate()
 
             # Just measure power in full-screen.
@@ -60,6 +63,9 @@
                 with keyboard.Keyboard() as keys:
                     keys.press_key('f4')
 
+            # Stop services again as Chrome might have restarted them.
+            self._services.stop_services()
+
             url = self.video_url
             tab.Navigate(url)
             tab.WaitForDocumentReadyStateToBeComplete()
diff --git a/client/site_tests/power_VideoPlayback/control b/client/site_tests/power_VideoPlayback/control
index 9a092b9..2fb1c14 100644
--- a/client/site_tests/power_VideoPlayback/control
+++ b/client/site_tests/power_VideoPlayback/control
@@ -1,12 +1,14 @@
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_VideoPlayback"
 PURPOSE = "Measure video playback power usage."
+
 CRITERIA = "This test is a benchmark."
-TIME = "MEDIUM"
+TIME = "LENGTHY"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
-ATTRIBUTES = "suite:power_sanity, suite:power_daily, suite:power_monitoring"
+ATTRIBUTES = "suite:power_check, suite:power_monitoring"
+PY_VERSION = 3
 
 DOC = """
 This test records power related statistics while play back video file in various
diff --git a/client/site_tests/power_VideoPlayback/control.FDO b/client/site_tests/power_VideoPlayback/control.FDO
new file mode 100644
index 0000000..a29f9bb
--- /dev/null
+++ b/client/site_tests/power_VideoPlayback/control.FDO
@@ -0,0 +1,24 @@
+AUTHOR = "ChromeOS Team"
+NAME = "power_VideoPlayback.FDO"
+PURPOSE = "Measure video playback power usage."
+
+CRITERIA = "This test is a benchmark."
+TIME = "LENGTHY"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "power"
+TEST_TYPE = "client"
+ATTRIBUTES = "suite:power_daily"
+PY_VERSION = 3
+
+DOC = """
+This test records power related statistics while play back video file in
+various format.
+
+"FDO" is short for "force discharge optional." Test will use EC command to
+force DUT to discharge. If it fails, then use AC as the power source.
+"""
+
+args_dict = utils.args_to_dict(args)
+pdash_note = args_dict.get('pdash_note', '')
+job.run_test('power_VideoPlayback', pdash_note=pdash_note,
+             force_discharge='optional')
diff --git a/client/site_tests/power_VideoPlayback/control.FDO_fast b/client/site_tests/power_VideoPlayback/control.FDO_fast
new file mode 100644
index 0000000..0cbc38e
--- /dev/null
+++ b/client/site_tests/power_VideoPlayback/control.FDO_fast
@@ -0,0 +1,23 @@
+AUTHOR = "ChromeOS Team"
+NAME = "power_VideoPlayback.FDO_fast"
+PURPOSE = "Measure video playback power usage."
+
+CRITERIA = "This test is a benchmark."
+TIME = "FAST"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "power"
+TEST_TYPE = "client"
+PY_VERSION = 3
+
+DOC = """
+This test makes sure that power_VideoPlayback is working fine.
+
+"FDO" is short for "force discharge optional." Test will use EC command to
+force DUT to discharge. If it fails, then use AC as the power source.
+"""
+
+args_dict = utils.args_to_dict(args)
+pdash_note = args_dict.get('pdash_note', '')
+job.run_test('power_VideoPlayback', tag=NAME.split('.')[1], secs_per_video=10,
+             pdash_note=pdash_note, seconds_period=1, fast=True,
+             force_discharge='optional')
diff --git a/client/site_tests/power_VideoPlayback/control.arc b/client/site_tests/power_VideoPlayback/control.arc
new file mode 100644
index 0000000..37dfc6e
--- /dev/null
+++ b/client/site_tests/power_VideoPlayback/control.arc
@@ -0,0 +1,29 @@
+AUTHOR = "Chrome OS Team"
+NAME = "power_VideoPlayback.arc"
+PURPOSE = "Measure video playback power usage."
+
+CRITERIA = "This test is a benchmark."
+TIME = "LENGTHY"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "power"
+TEST_TYPE = "client"
+PY_VERSION = 3
+
+ATTRIBUTES = ""
+DOC = """
+Test video decode for
+- h264 1080p 30fps
+- vp9 1080 30fps
+for 10min each with ARC enabled.
+
+This test is called just arc to use it to distinguish between ARC vs no ARC
+on the DUT. The details of video encoding and length aren't relevant other
+than `arc` and `noarc` should be identical.
+"""
+
+args_dict = utils.args_to_dict(args)
+pdash_note = args_dict.get('pdash_note', '')
+job.run_test('power_VideoPlayback', tag=NAME.split('.')[1],
+             videos=[('h264_1080_30fps', ''), ('vp9_1080_30fps','')],
+             secs_per_video=600, pdash_note=pdash_note, seconds_period=20,
+             run_arc=True)
diff --git a/client/site_tests/power_VideoPlayback/control.fast b/client/site_tests/power_VideoPlayback/control.fast
index 81aaec4..2408d93 100644
--- a/client/site_tests/power_VideoPlayback/control.fast
+++ b/client/site_tests/power_VideoPlayback/control.fast
@@ -1,11 +1,13 @@
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_VideoPlayback.fast"
 PURPOSE = "Measure video playback power usage."
+
 CRITERIA = "This test is a benchmark."
 TIME = "MEDIUM"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test makes sure that power_VideoPlayback working fine.
diff --git a/client/site_tests/power_VideoPlayback/control.h264_1080_30fps b/client/site_tests/power_VideoPlayback/control.h264_1080_30fps
new file mode 100644
index 0000000..b6d8ca0
--- /dev/null
+++ b/client/site_tests/power_VideoPlayback/control.h264_1080_30fps
@@ -0,0 +1,21 @@
+AUTHOR = "ChromeOS Team"
+NAME = "power_VideoPlayback.h264_1080_30fps"
+PURPOSE = "Measure video playback power usage."
+
+CRITERIA = "This test is a benchmark."
+TIME = "LENGTHY"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "power"
+TEST_TYPE = "client"
+PY_VERSION = 3
+
+ATTRIBUTES = ""
+DOC = """
+Test video decode for h264 1080p 30fps for 60min
+"""
+
+args_dict = utils.args_to_dict(args)
+pdash_note = args_dict.get('pdash_note', '')
+job.run_test('power_VideoPlayback', tag=NAME.split('.')[1],
+             videos=[('h264_1080_30fps', '')], secs_per_video=3600,
+             pdash_note=pdash_note, seconds_period=10)
diff --git a/client/site_tests/power_VideoPlayback/control.noarc b/client/site_tests/power_VideoPlayback/control.noarc
new file mode 100644
index 0000000..e618e39
--- /dev/null
+++ b/client/site_tests/power_VideoPlayback/control.noarc
@@ -0,0 +1,29 @@
+AUTHOR = "Chrome OS Team"
+NAME = "power_VideoPlayback.noarc"
+PURPOSE = "Measure video playback power usage."
+
+CRITERIA = "This test is a benchmark."
+TIME = "LENGTHY"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "power"
+TEST_TYPE = "client"
+PY_VERSION = 3
+
+ATTRIBUTES = ""
+DOC = """
+Test video decode for
+- h264 1080p 30fps
+- vp9 1080 30fps
+for 10min each with ARC disabled.
+
+This test is called just noarc to use it to distinguish between ARC vs no ARC
+on the DUT. The details of video encoding and length aren't relevant other
+than `arc` and `noarc` should be identical.
+"""
+
+args_dict = utils.args_to_dict(args)
+pdash_note = args_dict.get('pdash_note', '')
+job.run_test('power_VideoPlayback', tag=NAME.split('.')[1],
+             videos=[('h264_1080_30fps', ''), ('vp9_1080_30fps','')],
+             secs_per_video=600, pdash_note=pdash_note, seconds_period=20,
+             run_arc=False)
diff --git a/client/site_tests/power_VideoPlayback/control.powerqual b/client/site_tests/power_VideoPlayback/control.powerqual
new file mode 100644
index 0000000..5d06ce7
--- /dev/null
+++ b/client/site_tests/power_VideoPlayback/control.powerqual
@@ -0,0 +1,24 @@
+AUTHOR = "ChromeOS Team"
+NAME = "power_VideoPlayback.powerqual"
+PURPOSE = "Measure video playback power usage."
+
+CRITERIA = "This test is a benchmark."
+TIME = "LENGTHY"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "power"
+TEST_TYPE = "client"
+ATTRIBUTES = ""
+PY_VERSION = 3
+
+DOC = """
+Test video decode
+- 1 hour of h264 1080p 30fps
+- 1 hour of vp9 1080p 30fps
+"""
+
+args_dict = utils.args_to_dict(args)
+pdash_note = args_dict.get('pdash_note', '')
+job.run_test('power_VideoPlayback', tag=NAME.split('.')[1],
+             videos=[('h264_1080_30fps', ''), ('vp9_1080_30fps', '')],
+             secs_per_video=3600, force_discharge='optional',
+             pdash_note=pdash_note)
diff --git a/client/site_tests/power_VideoPlayback/control.sw_decoder b/client/site_tests/power_VideoPlayback/control.sw_decoder
index 3c80464..796fe5c 100644
--- a/client/site_tests/power_VideoPlayback/control.sw_decoder
+++ b/client/site_tests/power_VideoPlayback/control.sw_decoder
@@ -1,12 +1,14 @@
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_VideoPlayback.sw_decoder"
 PURPOSE = "Measure video playback power usage."
+
 CRITERIA = "This test is a benchmark."
-TIME = "MEDIUM"
+TIME = "LENGTHY"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
-ATTRIBUTES = "suite:power_sanity, suite:power_daily, suite:power_monitoring"
+ATTRIBUTES = "suite:power_check, suite:power_monitoring"
+PY_VERSION = 3
 
 DOC = """
 This test records power related statistics while play back video file in various
diff --git a/client/site_tests/power_VideoPlayback/control.sw_decoder_FDO b/client/site_tests/power_VideoPlayback/control.sw_decoder_FDO
new file mode 100644
index 0000000..94e0b3e
--- /dev/null
+++ b/client/site_tests/power_VideoPlayback/control.sw_decoder_FDO
@@ -0,0 +1,27 @@
+AUTHOR = "ChromeOS Team"
+NAME = "power_VideoPlayback.sw_decoder_FDO"
+PURPOSE = "Measure video playback power usage."
+
+CRITERIA = "This test is a benchmark."
+TIME = "LENGTHY"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "power"
+TEST_TYPE = "client"
+ATTRIBUTES = "suite:power_daily"
+PY_VERSION = 3
+
+DOC = """
+This test records power related statistics while play back video file in
+various format.
+
+This version of the test forces Chrome to use software decoder.
+
+"FDO" is short for "force discharge optional." Test will use EC command to
+force DUT to discharge. If it fails, then use AC as the power source.
+"""
+
+args_dict = utils.args_to_dict(args)
+pdash_note = args_dict.get('pdash_note', '')
+job.run_test('power_VideoPlayback', use_hw_decode=False,
+             tag=NAME.split('.')[1], pdash_note=pdash_note,
+             force_discharge='optional')
diff --git a/client/site_tests/power_VideoPlayback/control.sw_decoder_fast b/client/site_tests/power_VideoPlayback/control.sw_decoder_fast
index 903ea80..785d713 100644
--- a/client/site_tests/power_VideoPlayback/control.sw_decoder_fast
+++ b/client/site_tests/power_VideoPlayback/control.sw_decoder_fast
@@ -1,11 +1,13 @@
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_VideoPlayback.sw_decoder_fast"
 PURPOSE = "Measure video playback power usage."
+
 CRITERIA = "This test is a benchmark."
 TIME = "MEDIUM"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test makes sure that power_VideoPlayback working fine.
diff --git a/client/site_tests/power_VideoPlayback/control.vp9_1080_30fps b/client/site_tests/power_VideoPlayback/control.vp9_1080_30fps
index e5d9bd9..ff4a89b 100644
--- a/client/site_tests/power_VideoPlayback/control.vp9_1080_30fps
+++ b/client/site_tests/power_VideoPlayback/control.vp9_1080_30fps
@@ -1,18 +1,21 @@
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_VideoPlayback.vp9_1080_30fps"
 PURPOSE = "Measure video playback power usage."
+
 CRITERIA = "This test is a benchmark."
-TIME = "SHORT"
+TIME = "LENGTHY"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
 ATTRIBUTES = ""
+PY_VERSION = 3
+
 DOC = """
-Test video decode for vp9 1080p 30fps for 6min
+Test video decode for vp9 1080p 30fps for 60min
 """
 
 args_dict = utils.args_to_dict(args)
 pdash_note = args_dict.get('pdash_note', '')
 job.run_test('power_VideoPlayback', tag=NAME.split('.')[1],
-             videos=[('vp9_1080_30fps', '')], secs_per_video=360,
+             videos=[('vp9_1080_30fps', '')], secs_per_video=3600,
              pdash_note=pdash_note, seconds_period=10)
diff --git a/client/site_tests/power_VideoPlayback/power_VideoPlayback.py b/client/site_tests/power_VideoPlayback/power_VideoPlayback.py
index f5e444e..11d588f 100644
--- a/client/site_tests/power_VideoPlayback/power_VideoPlayback.py
+++ b/client/site_tests/power_VideoPlayback/power_VideoPlayback.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -115,7 +116,6 @@
         local_path = os.path.join(self._RAMDISK, os.path.basename(url))
         logging.info('Downloading %s to %s', url, local_path)
         file_utils.download_file(url, local_path)
-        self.cr.browser.platform.SetHTTPServerDirectories(self._RAMDISK)
 
     def _start_video(self, cr, url):
         """Start playing video.
@@ -125,7 +125,11 @@
         """
         local_path = os.path.join(self._RAMDISK, os.path.basename(url))
         tab = cr.browser.tabs[0]
-        tab.Navigate(cr.browser.platform.http_server.UrlOf(local_path))
+        # Ensure the tab is activated because Chrome sometimes starts with
+        # and focus on another "What's new" tab.
+        tab.Activate()
+
+        tab.Navigate('file://' + local_path)
         tab.WaitForDocumentReadyStateToBeComplete()
         tab.EvaluateJavaScript(
             "document.getElementsByTagName('video')[0].loop=true")
@@ -136,7 +140,6 @@
         @param cr: Autotest Chrome instance.
         @param url: url of video file to play.
         """
-        self.cr.browser.platform.StopAllLocalServers()
         local_path = os.path.join(self._RAMDISK, os.path.basename(url))
         os.remove(local_path)
 
diff --git a/client/site_tests/power_VideoSuspend/control b/client/site_tests/power_VideoSuspend/control
index b33622b..96bee12 100644
--- a/client/site_tests/power_VideoSuspend/control
+++ b/client/site_tests/power_VideoSuspend/control
@@ -2,13 +2,14 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_VideoSuspend"
 ATTRIBUTES = "suite:kernel_daily_regression, suite:video"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 Suspends the system with a video playing.
diff --git a/client/site_tests/power_VideoSuspend/control.multiformat b/client/site_tests/power_VideoSuspend/control.multiformat
index fb4efd5..78f8f44 100644
--- a/client/site_tests/power_VideoSuspend/control.multiformat
+++ b/client/site_tests/power_VideoSuspend/control.multiformat
@@ -2,7 +2,12 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+# TEST IS DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
+AUTHOR = "ChromeOS Team"
 NAME = "power_VideoSuspend.multiformat"
 #TODO: move back to suite:kernel_daily_regression when test stable.
 ATTRIBUTES = "suite:experimental"
@@ -10,6 +15,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 Suspends the system with a video playing.
diff --git a/client/site_tests/power_VideoSuspend/power_VideoSuspend.py b/client/site_tests/power_VideoSuspend/power_VideoSuspend.py
index 692d845..05aa764 100644
--- a/client/site_tests/power_VideoSuspend/power_VideoSuspend.py
+++ b/client/site_tests/power_VideoSuspend/power_VideoSuspend.py
@@ -1,9 +1,11 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 import logging
 import os
+import shutil
 import time
 
 from autotest_lib.client.bin import test, utils
@@ -14,16 +16,22 @@
 class power_VideoSuspend(test.test):
     """Suspend the system with a video playing."""
     version = 1
+    tmp_path = '/tmp'
 
     def run_once(self, video_urls=None):
+        # https://crbug.com/1288417, b/215442780
+        # Copy file to tmpdir to avoid the need of setting up local http server.
+        file_path = os.path.join(self.bindir, 'play.html')
+        self.dest_path = os.path.join(self.tmp_path, 'play.html')
+        shutil.copy(file_path, self.dest_path)
+        http_path = 'file://' + self.dest_path
+
         if video_urls is None:
             raise error.TestError('no videos to play')
 
         with chrome.Chrome(init_network_controller=True) as cr:
-            cr.browser.platform.SetHTTPServerDirectories(self.bindir)
             tab = cr.browser.tabs[0]
-            tab.Navigate(cr.browser.platform.http_server.UrlOf(
-                os.path.join(self.bindir, 'play.html')))
+            tab.Navigate(http_path)
             tab.WaitForDocumentReadyStateToBeComplete()
 
             for url in video_urls:
@@ -51,3 +59,10 @@
         time.sleep(2)
 
         self._check_video_is_playing(tab)
+
+    def cleanup(self):
+        """
+        Cleanup video file.
+        """
+        if hasattr(self, 'dest_path'):
+            os.remove(self.dest_path)
diff --git a/client/site_tests/power_WaitForCoolDown/control b/client/site_tests/power_WaitForCoolDown/control
index a7436a3..65ffb6c 100644
--- a/client/site_tests/power_WaitForCoolDown/control
+++ b/client/site_tests/power_WaitForCoolDown/control
@@ -9,6 +9,7 @@
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 Transition test to wait for DUT to cool down after thermal test while collecting
@@ -18,4 +19,4 @@
 args_dict = utils.args_to_dict(args)
 pdash_note = args_dict.get('pdash_note', '')
 job.run_test('power_WaitForCoolDown', pdash_note=pdash_note,
-             force_discharge=True)
+             force_discharge='optional')
diff --git a/client/site_tests/power_WaitForCoolDown/power_WaitForCoolDown.py b/client/site_tests/power_WaitForCoolDown/power_WaitForCoolDown.py
index 25e774a..5e9fa78 100755
--- a/client/site_tests/power_WaitForCoolDown/power_WaitForCoolDown.py
+++ b/client/site_tests/power_WaitForCoolDown/power_WaitForCoolDown.py
@@ -1,4 +1,5 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Lint as: python2, python3
+# # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -48,7 +49,7 @@
             self.loop_sleep(i, loop_secs)
 
         max_temp = max(self._tlog.refresh())
-        logging.warn(
+        logging.warning(
             'Fail to cool down after %d seconds, temp: %.1fC, target: %dC',
             num_loop * loop_secs, max_temp, target_temp)
 
diff --git a/client/site_tests/power_WakeupRTC/control b/client/site_tests/power_WakeupRTC/control
deleted file mode 100644
index ee731c3..0000000
--- a/client/site_tests/power_WakeupRTC/control
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "power_WakeupRTC"
-ATTRIBUTES = "suite:kernel_per-build_regression"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "power"
-TEST_TYPE = "client"
-
-DOC = """
-Tests that RTC devices generate wakeup events.
-"""
-
-job.run_test('power_WakeupRTC')
diff --git a/client/site_tests/power_WakeupRTC/power_WakeupRTC.py b/client/site_tests/power_WakeupRTC/power_WakeupRTC.py
deleted file mode 100644
index 90ff9f2..0000000
--- a/client/site_tests/power_WakeupRTC/power_WakeupRTC.py
+++ /dev/null
@@ -1,83 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import time
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import rtc
-from autotest_lib.client.cros.power import sys_power
-
-def read_rtc_wakeup(rtc_device):
-    """
-    Read the wakeup setting for for the RTC device.
-    """
-    sysfs_path = '/sys/class/rtc/%s/device/power/wakeup' % rtc_device
-    if os.path.isfile(sysfs_path):
-        return file(sysfs_path).read().strip()
-
-
-def read_rtc_wakeup_active_count(rtc_device):
-    """
-    Read the current wakeup active count for the RTC device.
-    """
-    path = '/sys/class/rtc/%s/device/power/wakeup_active_count' % rtc_device
-    return int(file(path).read())
-
-
-def fire_wakealarm(rtc_device):
-    """
-    Schedule a wakealarm and wait for it to fire.
-    """
-    rtc.set_wake_alarm('+1', rtc_device)
-    time.sleep(2)
-
-
-class power_WakeupRTC(test.test):
-    """Test RTC wake events."""
-
-    version = 1
-
-    def run_once(self):
-        """
-        Tests that RTC devices generate wakeup events.
-        We require /dev/rtc0 to work since there are many things which rely
-        on the rtc0 wakeup alarm. For all the other RTCs, only test those
-        that have wakeup alarm capabilities.
-        """
-        default_rtc = "/dev/rtc0"
-        if not os.path.exists(default_rtc):
-            raise error.TestFail('RTC device %s does not exist' % default_rtc)
-        default_rtc_device = os.path.basename(default_rtc)
-        if read_rtc_wakeup(default_rtc_device) != 'enabled':
-            raise error.TestFail('RTC wakeup is not enabled: %s' % default_rtc_device)
-        for rtc_device in rtc.get_rtc_devices():
-            if read_rtc_wakeup(rtc_device) != 'enabled':
-                logging.info('RTC wakeup is not enabled: %s' % rtc_device)
-            else:
-                logging.info('RTC wakeup is enabled for: %s' % rtc_device)
-                self.run_once_rtc(rtc_device)
-
-    def run_once_rtc(self, rtc_device):
-        """Tests that a RTC device generate wakeup events.
-
-        @param rtc_device: RTC device to be tested.
-        """
-        logging.info('testing rtc device %s', rtc_device)
-
-        # Test that RTC can generate wake events
-        old_sys_wakeup_count = sys_power.read_wakeup_count()
-        old_rtc_wakeup_active_count = read_rtc_wakeup_active_count(rtc_device)
-        fire_wakealarm(rtc_device)
-        new_sys_wakeup_count = sys_power.read_wakeup_count()
-        new_rtc_wakeup_active_count = read_rtc_wakeup_active_count(rtc_device)
-        if new_rtc_wakeup_active_count == old_rtc_wakeup_active_count:
-            raise error.TestFail(
-                    'RTC alarm should increase RTC wakeup_active_count: %s'
-                    % rtc_device)
-        if new_sys_wakeup_count == old_sys_wakeup_count:
-            raise error.TestFail(
-                    'RTC alarm should increase system wakeup_count: %s'
-                    % rtc_device)
diff --git a/client/site_tests/power_WebGL/control b/client/site_tests/power_WebGL/control
deleted file mode 100644
index 850acf9..0000000
--- a/client/site_tests/power_WebGL/control
+++ /dev/null
@@ -1,17 +0,0 @@
-AUTHOR = "Chrome OS Team"
-NAME = "power_WebGL"
-PURPOSE = "Measure WebGL power usage."
-CRITERIA = "This test is a benchmark."
-TIME = "SHORT"
-TEST_CATEGORY = "Benchmark"
-TEST_CLASS = "power"
-TEST_TYPE = "client"
-ATTRIBUTES = "suite:power_sanity, suite:power_daily, suite:power_monitoring"
-
-DOC = """
-This test records power related statistics while open WebGL heavy page.
-"""
-
-args_dict = utils.args_to_dict(args)
-pdash_note = args_dict.get('pdash_note', '')
-job.run_test('power_WebGL', pdash_note=pdash_note)
diff --git a/client/site_tests/power_WebGL/control.fast b/client/site_tests/power_WebGL/control.fast
deleted file mode 100644
index 4c155c2..0000000
--- a/client/site_tests/power_WebGL/control.fast
+++ /dev/null
@@ -1,17 +0,0 @@
-AUTHOR = "Chrome OS Team"
-NAME = "power_WebGL.fast"
-PURPOSE = "Measure WebGL power usage."
-CRITERIA = "This test is a benchmark."
-TIME = "SHORT"
-TEST_CATEGORY = "Benchmark"
-TEST_CLASS = "power"
-TEST_TYPE = "client"
-
-DOC = """
-This test makes sure that power_WebGL works.
-"""
-
-args_dict = utils.args_to_dict(args)
-pdash_note = args_dict.get('pdash_note', '')
-job.run_test('power_WebGL', duration=10, tag=NAME.split('.')[1],
-              pdash_note=pdash_note, seconds_period=1)
diff --git a/client/site_tests/power_WebGL/power_WebGL.py b/client/site_tests/power_WebGL/power_WebGL.py
deleted file mode 100644
index 8d946c1..0000000
--- a/client/site_tests/power_WebGL/power_WebGL.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import logging
-import time
-
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.cros.input_playback import keyboard
-from autotest_lib.client.cros.power import power_test
-
-class power_WebGL(power_test.power_Test):
-    """class for power_WebGL test.
-    """
-    version = 1
-
-    # Google Earth permalink for Googleplex
-    URL = 'https://earth.app.goo.gl/Tj5Wj'
-
-    def run_once(self, url=URL, duration=180):
-        """run_once method.
-
-        @param url: url of webgl heavy page.
-        @param duration: time in seconds to display url and measure power.
-        """
-        with chrome.Chrome(init_network_controller=True) as self.cr:
-            tab = self.cr.browser.tabs.New()
-            tab.Activate()
-
-            # Just measure power in full-screen.
-            fullscreen = tab.EvaluateJavaScript('document.webkitIsFullScreen')
-            if not fullscreen:
-                with keyboard.Keyboard() as keys:
-                    keys.press_key('f4')
-
-            logging.info('Navigating to url: %s', url)
-            tab.Navigate(url)
-            tab.WaitForDocumentReadyStateToBeComplete()
-
-            self.start_measurements()
-            time.sleep(duration)
diff --git a/client/site_tests/power_WifiIdle/control b/client/site_tests/power_WifiIdle/control
index 4e66ecf..e34b302 100644
--- a/client/site_tests/power_WifiIdle/control
+++ b/client/site_tests/power_WifiIdle/control
@@ -1,12 +1,14 @@
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_WifiIdle"
 PURPOSE = "Measure power usage with wifi on or off."
+
 CRITERIA = "This test is a benchmark."
 TIME = "SHORT"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
-ATTRIBUTES = "suite:power_daily"
+#ATTRIBUTES = "suite:power_daily"
+PY_VERSION = 3
 
 DOC = """
 This test records power related statistics while staying idle, and
diff --git a/client/site_tests/power_WifiIdle/power_WifiIdle.py b/client/site_tests/power_WifiIdle/power_WifiIdle.py
index c2b491e..2ab900a 100644
--- a/client/site_tests/power_WifiIdle/power_WifiIdle.py
+++ b/client/site_tests/power_WifiIdle/power_WifiIdle.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/security_CpuVulnerabilities/control b/client/site_tests/security_CpuVulnerabilities/control
deleted file mode 100644
index e157f0a..0000000
--- a/client/site_tests/security_CpuVulnerabilities/control
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "security_CpuVulnerabilities"
-PURPOSE = "Ensure Chrome OS contains mitigations against CPU vulnerabilities."
-CRITERIA = """
-Passes when the kernel reports the appropriate mitigations against CPU
-vulnerabilities.
-"""
-TIME = "SHORT"
-ATTRIBUTES = "suite:bvt-perbuild"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "security"
-TEST_TYPE = "client"
-
-DOC = """
-This test ensures that the kernel contains appropriate mitigations against CPU
-vulnerabilities by checking what the kernel reports in
-'/sys/devices/system/cpu/vulnerabilities'.
-"""
-
-job.run_test('security_CpuVulnerabilities')
diff --git a/client/site_tests/security_CpuVulnerabilities/security_CpuVulnerabilities.py b/client/site_tests/security_CpuVulnerabilities/security_CpuVulnerabilities.py
deleted file mode 100644
index c628981..0000000
--- a/client/site_tests/security_CpuVulnerabilities/security_CpuVulnerabilities.py
+++ /dev/null
@@ -1,112 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-
-class security_CpuVulnerabilities(test.test):
-    """
-    This test ensures that the kernel contains appropriate mitigations against
-    CPU vulnerabilities by checking what the kernel reports in
-    '/sys/devices/system/cpu/vulnerabilities'.
-    """
-    version = 1
-
-    SYSTEM_CPU_VULNERABILITIES = '/sys/devices/system/cpu/vulnerabilities'
-
-    TESTS = {
-        'amd': {
-            'meltdown': ('0', set()),
-            'spectre_v1': ('0', set(['__user pointer sanitization'])),
-            'spectre_v2': ('0', set(['Full AMD retpoline'])),
-        },
-        'arm': {},
-        'i386': {},
-        'x86_64': {
-            'meltdown': ('0', set(['PTI'])),
-            'spectre_v1': ('4.4', set(['__user pointer sanitization'])),
-            'spectre_v2': ('0', set(['Full generic retpoline'])),
-        },
-    }
-
-
-    def run_once(self):
-        """Runs the test."""
-        arch = utils.get_cpu_arch()
-        if arch == 'x86_64':
-            arch = utils.get_cpu_soc_family()
-        curr_kernel = utils.get_kernel_version()
-
-        logging.debug('CPU arch is "%s"', arch)
-        logging.debug('Kernel version is "%s"', curr_kernel)
-
-        if arch not in self.TESTS:
-            raise error.TestNAError('"%s" arch not in test baseline' % arch)
-
-        # Kernels <= 3.14 don't have this directory and are expected to abort
-        # with TestNA.
-        if not os.path.exists(self.SYSTEM_CPU_VULNERABILITIES):
-            raise error.TestNAError('"%s" directory not present, not testing' %
-                                    self.SYSTEM_CPU_VULNERABILITIES)
-
-        failures = []
-        for filename, expected in self.TESTS[arch].items():
-            file = os.path.join(self.SYSTEM_CPU_VULNERABILITIES, filename)
-            if not os.path.exists(file):
-                raise error.TestError('"%s" file does not exist, cannot test' %
-                                      file)
-
-            min_kernel = expected[0]
-            if utils.compare_versions(curr_kernel, min_kernel) == -1:
-                # The kernel on the DUT is older than the version where
-                # the mitigation was introduced.
-                info_message = 'DUT kernel version "%s"' % curr_kernel
-                info_message += ' is older than "%s"' % min_kernel
-                info_message += ', skipping "%s" test' % filename
-                logging.info(info_message)
-                continue
-
-            # E.g.:
-            # Not affected
-            #   $ cat /sys/devices/system/cpu/vulnerabilities/meltdown
-            #   Not affected
-            #
-            # One mitigation
-            #   $ cat /sys/devices/system/cpu/vulnerabilities/meltdown
-            #   Mitigation: PTI
-            #
-            # Several mitigations
-            #   $ cat /sys/devices/system/cpu/vulnerabilities/spectre_v2
-            #   Mitigation: Full generic retpoline, IBPB, IBRS_FW
-            with open(file) as f:
-                lines = f.readlines()
-                if len(lines) > 1:
-                    logging.warning('"%s" has more than one line', file)
-
-                actual = lines[0].strip()
-                logging.debug('"%s" -> "%s"', file, actual)
-
-                expected_mitigations = expected[1]
-                if not expected_mitigations:
-                    if actual != 'Not affected':
-                        failures.append((file, actual, expected_mitigations))
-                else:
-                    # CPU is affected.
-                    if 'Mitigation' not in actual:
-                        failures.append((file, actual, expected_mitigations))
-                    else:
-                        mit_list = actual.split(':', 1)[1].split(',')
-                        actual_mitigations = set(t.strip() for t in mit_list)
-                        # Test set inclusion.
-                        if actual_mitigations < expected_mitigations:
-                            failures.append((file, actual_mitigations,
-                                             expected_mitigations))
-
-        if failures:
-            for failure in failures:
-                logging.error('"%s" was "%s", expected "%s"', *failure)
-            raise error.TestFail('CPU vulnerabilities not mitigated properly')
diff --git a/client/site_tests/security_Libcontainer/control b/client/site_tests/security_Libcontainer/control
deleted file mode 100644
index 55f846b..0000000
--- a/client/site_tests/security_Libcontainer/control
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-TIME="SHORT"
-AUTHOR = "The Chromium OS Authors"
-DOC = """
-Tests features of libcontainer.
-"""
-NAME = "security_Libcontainer"
-PURPOSE = "Unit tests of libcontainer that must run outside of the chroot."
-CRITERIA = "Fail if libcontainer cannot correctly create containers."
-ATTRIBUTES = ""
-TEST_CLASS = "security"
-TEST_CATEGORY = "Functional"
-TEST_TYPE = "client"
-JOB_RETRIES = 2
-
-job.run_test("security_Libcontainer")
diff --git a/client/site_tests/security_Libcontainer/security_Libcontainer.py b/client/site_tests/security_Libcontainer/security_Libcontainer.py
deleted file mode 100644
index 11e5905..0000000
--- a/client/site_tests/security_Libcontainer/security_Libcontainer.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.bin import test, utils
-
-import os
-
-class security_Libcontainer(test.test):
-    """Runs libcontainer unit tests in the device.
-
-    This is useful since some features (like user namespacing) can only really
-    be tested outside of a chroot environment.
-    """
-    version = 1
-    executable = 'libcontainer_target_test'
-
-
-    def setup(self):
-        """Builds the binary for the device."""
-        os.chdir(self.srcdir)
-        utils.make()
-
-
-    def run_once(self):
-        """Runs the test on the device."""
-        binpath = os.path.join(self.srcdir, self.executable)
-        utils.system_output(binpath, retain_output=True)
diff --git a/client/site_tests/security_Libcontainer/src/Makefile b/client/site_tests/security_Libcontainer/src/Makefile
deleted file mode 100644
index 9b9d5c3..0000000
--- a/client/site_tests/security_Libcontainer/src/Makefile
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-PKG_CONFIG ?= pkg-config
-BASE_VER = $(shell cat $(SYSROOT)/usr/share/libchrome/BASE_VER)
-DEP_LIBS = libchrome libcontainer libminijail
-CXXFLAGS += $(shell $(PKG_CONFIG) --cflags $(DEP_LIBS))
-CXXFLAGS += -std=gnu++14 -Werror -Wall
-LDFLAGS += $(shell $(PKG_CONFIG) --libs $(DEP_LIBS))
-LDFLAGS += -lgtest
-
-TARGET_UNITTEST = libcontainer_target_test
-
-all: $(TARGET_UNITTEST)
-
-$(TARGET_UNITTEST): libcontainer_target_unittest.cc
-	$(CXX) $(CPPFLAGS) $(CXXFLAGS) -g -o $@ $^ $(LDFLAGS)
-
-.PHONY: clean
-clean:
-	$(RM) $(TARGET_UNITTEST)
diff --git a/client/site_tests/security_Libcontainer/src/libcontainer_target_unittest.cc b/client/site_tests/security_Libcontainer/src/libcontainer_target_unittest.cc
deleted file mode 100644
index 6afe45b..0000000
--- a/client/site_tests/security_Libcontainer/src/libcontainer_target_unittest.cc
+++ /dev/null
@@ -1,235 +0,0 @@
-// Copyright 2016 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <fcntl.h>
-#include <sys/mount.h>
-#include <sys/types.h>
-#include <unistd.h>
-
-#include <base/at_exit.h>
-#include <base/files/file_util.h>
-#include <base/files/scoped_file.h>
-#include <base/files/scoped_temp_dir.h>
-#include <base/macros.h>
-#include <base/stl_util.h>
-#include <base/strings/string_number_conversions.h>
-#include <base/strings/stringprintf.h>
-#include <gtest/gtest.h>
-#include <libcontainer.h>
-#include <libminijail.h>
-
-namespace libcontainer {
-
-namespace {
-
-// A small RAII class that redirects stdout while it's alive. It also gets the
-// first 4k of the output.
-class ScopedCaptureStdout {
- public:
-  ScopedCaptureStdout() {
-    original_stdout_fd_.reset(dup(STDOUT_FILENO));
-    CHECK(original_stdout_fd_.is_valid());
-    int pipe_fds[2];
-    CHECK(pipe2(pipe_fds, O_NONBLOCK) != -1);
-    read_fd_.reset(pipe_fds[0]);
-    CHECK(dup2(pipe_fds[1], STDOUT_FILENO) != -1);
-    CHECK(close(pipe_fds[1]) != -1);
-  }
-
-  ~ScopedCaptureStdout() {
-    CHECK(dup2(original_stdout_fd_.get(), STDOUT_FILENO) != -1);
-  }
-
-  std::string GetContents() {
-    char buffer[4096];
-    ssize_t read_bytes = read(read_fd_.get(), buffer, sizeof(buffer) - 1);
-    CHECK(read_bytes >= 0);
-    buffer[read_bytes] = '\0';
-    return std::string(buffer, read_bytes);
-  }
-
- private:
-  base::ScopedFD read_fd_;
-  base::ScopedFD original_stdout_fd_;
-
-  DISALLOW_COPY_AND_ASSIGN(ScopedCaptureStdout);
-};
-
-}  // namespace
-
-class LibcontainerTargetTest : public ::testing::Test {
- public:
-  LibcontainerTargetTest() = default;
-  ~LibcontainerTargetTest() override = default;
-
-  void SetUp() override {
-    ASSERT_TRUE(temp_dir_.CreateUniqueTempDir());
-
-    base::FilePath rootfs;
-    ASSERT_TRUE(base::CreateTemporaryDirInDir(
-        temp_dir_.GetPath(), FILE_PATH_LITERAL("rootfs"), &rootfs));
-
-    config_ = container_config_create();
-    ASSERT_NE(nullptr, config_);
-
-    ASSERT_EQ(0, container_config_uid_map(config_, "0 0 429496729"));
-    ASSERT_EQ(0, container_config_gid_map(config_, "0 0 429496729"));
-    ASSERT_EQ(0, container_config_rootfs(config_, "/"));
-    ASSERT_EQ(0, container_config_set_cgroup_parent(
-                     config_, "chronos_containers", 1000, 1000));
-
-    container_ = container_new("containerUT", rootfs.value().c_str());
-    ASSERT_NE(nullptr, container_);
-  }
-
-  void TearDown() override {
-    container_destroy(container_);
-    container_ = nullptr;
-    container_config_destroy(config_);
-    config_ = nullptr;
-    ASSERT_TRUE(temp_dir_.Delete());
-  }
-
-  struct container* container() {
-    return container_;
-  }
-  struct container_config* config() {
-    return config_;
-  }
-
- private:
-  base::ScopedTempDir temp_dir_;
-  struct container* container_ = nullptr;
-  struct container_config* config_ = nullptr;
-
-  DISALLOW_COPY_AND_ASSIGN(LibcontainerTargetTest);
-};
-
-TEST_F(LibcontainerTargetTest, AddHookRedirectTest) {
-  // Preserve stdout/stderr to get the output from the container.
-  int stdio_fds[] = {STDOUT_FILENO, STDERR_FILENO};
-  ASSERT_EQ(0, container_config_inherit_fds(config(), stdio_fds,
-                                            base::size(stdio_fds)));
-
-  static const char* kPreChrootArgv[] = {
-      "/bin/cat",
-  };
-  int stdin_fd;
-  ASSERT_EQ(0, container_config_add_hook(
-                   config(), MINIJAIL_HOOK_EVENT_PRE_CHROOT, kPreChrootArgv[0],
-                   kPreChrootArgv, base::size(kPreChrootArgv), &stdin_fd,
-                   nullptr, nullptr));
-  EXPECT_EQ(1, write(stdin_fd, "1", 1));
-  close(stdin_fd);
-
-  static const char* kProgramArgv[] = {
-      "/bin/echo",
-      "-n",
-      "2",
-  };
-  ASSERT_EQ(0, container_config_program_argv(config(), kProgramArgv,
-                                             base::size(kProgramArgv)));
-
-  std::string output;
-  {
-    ScopedCaptureStdout capture_stdout;
-    EXPECT_EQ(0, container_start(container(), config()));
-    EXPECT_EQ(0, container_wait(container()));
-    output = capture_stdout.GetContents();
-  }
-  EXPECT_EQ("12", output);
-}
-
-TEST_F(LibcontainerTargetTest, AddHookOrderTest) {
-  // Preserve stdout/stderr to get the output from the container.
-  int stdio_fds[] = {STDOUT_FILENO, STDERR_FILENO};
-  ASSERT_EQ(0, container_config_inherit_fds(config(), stdio_fds,
-                                            base::size(stdio_fds)));
-
-  static const char* kProgramArgv[] = {
-      "/bin/echo",
-      "-n",
-      "3",
-  };
-  ASSERT_EQ(0, container_config_program_argv(config(), kProgramArgv,
-                                             base::size(kProgramArgv)));
-
-  // Hooks are run in the following order: pre-chroot, pre-dropcaps, pre-execve
-  static const char* kPreExecveArgv[] = {
-      "/bin/echo",
-      "-n",
-      "2",
-  };
-  ASSERT_EQ(0, container_config_add_hook(
-                   config(), MINIJAIL_HOOK_EVENT_PRE_EXECVE, kPreExecveArgv[0],
-                   kPreExecveArgv, base::size(kPreExecveArgv), nullptr, nullptr,
-                   nullptr));
-
-  static const char* kPreChrootArgv[] = {
-      "/bin/echo",
-      "-n",
-      "1",
-  };
-  ASSERT_EQ(0, container_config_add_hook(
-                   config(), MINIJAIL_HOOK_EVENT_PRE_CHROOT, kPreChrootArgv[0],
-                   kPreChrootArgv, base::size(kPreChrootArgv), nullptr, nullptr,
-                   nullptr));
-
-  std::string output;
-  {
-    ScopedCaptureStdout capture_stdout;
-    EXPECT_EQ(0, container_start(container(), config()));
-    EXPECT_EQ(0, container_wait(container()));
-    output = capture_stdout.GetContents();
-  }
-  EXPECT_EQ("123", output);
-}
-
-TEST_F(LibcontainerTargetTest, AddHookPidArgument) {
-  // Preserve stdout/stderr to get the output from the container.
-  int stdio_fds[] = {STDOUT_FILENO, STDERR_FILENO};
-  ASSERT_EQ(0, container_config_inherit_fds(config(), stdio_fds,
-                                            base::size(stdio_fds)));
-
-  static const char* kProgramArgv[] = {
-      "/bin/true",
-  };
-  ASSERT_EQ(0, container_config_program_argv(config(), kProgramArgv,
-                                             base::size(kProgramArgv)));
-
-  static const char* kPreExecveArgv[] = {
-      "/bin/echo",
-      "-n",
-      "$PID",
-  };
-  ASSERT_EQ(0, container_config_add_hook(
-                   config(), MINIJAIL_HOOK_EVENT_PRE_EXECVE, kPreExecveArgv[0],
-                   kPreExecveArgv, base::size(kPreExecveArgv), nullptr, nullptr,
-                   nullptr));
-
-  std::string output;
-  int pid;
-  {
-    ScopedCaptureStdout capture_stdout;
-    EXPECT_EQ(0, container_start(container(), config()));
-    pid = container_pid(container());
-    EXPECT_EQ(0, container_wait(container()));
-    output = capture_stdout.GetContents();
-  }
-  EXPECT_EQ(base::NumberToString(pid), output);
-}
-
-}  // namespace libcontainer
-
-// Avoid including syslog.h, since it collides with some of the logging
-// constants in libchrome.
-#define SYSLOG_LOG_INFO 6
-
-int main(int argc, char** argv) {
-  base::AtExitManager exit_manager;
-  testing::InitGoogleTest(&argc, argv);
-  testing::GTEST_FLAG(throw_on_failure) = true;
-  minijail_log_to_fd(STDERR_FILENO, SYSLOG_LOG_INFO);
-  return RUN_ALL_TESTS();
-}
diff --git a/client/site_tests/security_NosymfollowMountOption/control.baseline b/client/site_tests/security_NosymfollowMountOption/control.baseline
index 377d375..bd19913 100644
--- a/client/site_tests/security_NosymfollowMountOption/control.baseline
+++ b/client/site_tests/security_NosymfollowMountOption/control.baseline
@@ -31,5 +31,6 @@
 TEST_CATEGORY = "Functional"
 TEST_TYPE = "client"
 JOB_RETRIES = 2
+PY_VERSION = 3
 
 job.run_test("security_NosymfollowMountOption", test_selinux_interaction=False)
diff --git a/client/site_tests/security_NosymfollowMountOption/control.test_selinux_interaction b/client/site_tests/security_NosymfollowMountOption/control.test_selinux_interaction
index 9eb1db0..fecbea0 100644
--- a/client/site_tests/security_NosymfollowMountOption/control.test_selinux_interaction
+++ b/client/site_tests/security_NosymfollowMountOption/control.test_selinux_interaction
@@ -23,5 +23,6 @@
 TEST_CATEGORY = "Functional"
 TEST_TYPE = "client"
 JOB_RETRIES = 2
+PY_VERSION = 3
 
 job.run_test("security_NosymfollowMountOption", test_selinux_interaction=True)
diff --git a/client/site_tests/security_NosymfollowMountOption/security_NosymfollowMountOption.py b/client/site_tests/security_NosymfollowMountOption/security_NosymfollowMountOption.py
index 79f1210..f1b6cdb 100644
--- a/client/site_tests/security_NosymfollowMountOption/security_NosymfollowMountOption.py
+++ b/client/site_tests/security_NosymfollowMountOption/security_NosymfollowMountOption.py
@@ -53,7 +53,7 @@
         """
         try:
             subprocess.check_output(["/bin/umount", MOUNT_PATH])
-        except subprocess.CalledProcessError, e:
+        except subprocess.CalledProcessError as e:
             self._fail("umount call failed")
 
     def mount_and_test_with_string(self, mount_options, restrict_symlinks):
@@ -91,7 +91,7 @@
                 if arg == "nosymfollow":
                     continue
                 else:
-                    if output.find(arg) == -1:
+                    if output.find(str.encode(arg)) == -1:
                         self._fail("filesystem missing '%s' arg" % arg)
                         return
 
diff --git a/client/site_tests/security_OpenSSLRegressions/__init__.py b/client/site_tests/security_OpenSSLRegressions/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/client/site_tests/security_OpenSSLRegressions/__init__.py
+++ /dev/null
diff --git a/client/site_tests/security_OpenSSLRegressions/control b/client/site_tests/security_OpenSSLRegressions/control
deleted file mode 100644
index 1edd237..0000000
--- a/client/site_tests/security_OpenSSLRegressions/control
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-TIME='SHORT'
-AUTHOR = 'The Chromium OS Authors'
-DOC = """
-Ensures that local patches aren't dropped.
-"""
-NAME = 'security_OpenSSLRegressions'
-PURPOSE = """
-Ensure that local patches aren't dropped.
-"""
-CRITERIA = """
-Tests that local patches are not dropped when updating. See the source for which
-patches.
-"""
-TEST_CLASS = 'security'
-TEST_CATEGORY = 'Functional'
-TEST_TYPE = 'client'
-JOB_RETRIES = 2
-ATTRIBUTES = ""
-
-job.run_test('security_OpenSSLRegressions', opts=args)
diff --git a/client/site_tests/security_OpenSSLRegressions/security_OpenSSLRegressions.py b/client/site_tests/security_OpenSSLRegressions/security_OpenSSLRegressions.py
deleted file mode 100644
index a42490c..0000000
--- a/client/site_tests/security_OpenSSLRegressions/security_OpenSSLRegressions.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-
-OPENSSL = '/usr/bin/openssl'
-VERIFY = OPENSSL + ' verify'
-
-class security_OpenSSLRegressions(test.test):
-    version = 1
-
-    def verify(self):
-        r = os.system('%s %s' % (VERIFY, self.cert))
-        return r
-
-    def run_once(self, opts=None):
-        self.cert = '%s/cert.pem' % self.srcdir
-
-        # Checking for openssl-0.9.8r-verify-retcode.patch (see
-        # chromiumos-overlay:2ea51e44669062977689ff09a43ac8438f55673f).
-        if self.verify() == 0:
-            raise error.TestFail('Verify returned zero on error.')
diff --git a/client/site_tests/security_OpenSSLRegressions/src/cert.pem b/client/site_tests/security_OpenSSLRegressions/src/cert.pem
deleted file mode 100644
index 0a5df13..0000000
--- a/client/site_tests/security_OpenSSLRegressions/src/cert.pem
+++ /dev/null
@@ -1,63 +0,0 @@
-Certificate:
-    Data:
-        Version: 3 (0x2)
-        Serial Number:
-            ac:c0:60:98:d5:b4:b5:19
-        Signature Algorithm: sha1WithRSAEncryption
-        Issuer: C=US, ST=California, O=Google, OU=Chrome OS, CN=OpenSSL Test CA/emailAddress=security@chromium.org
-        Validity
-            Not Before: May 11 17:25:36 2012 GMT
-            Not After : Jan  8 17:25:36 2112 GMT
-        Subject: C=US, ST=California, L=Mountain View, O=Google, OU=Chrome OS, CN=OpenSSL Test Certificate/emailAddress=security@chromium.org
-        Subject Public Key Info:
-            Public Key Algorithm: rsaEncryption
-            RSA Public Key: (1024 bit)
-                Modulus (1024 bit):
-                    00:c4:e5:64:01:4e:00:8d:40:e4:7b:b3:05:08:ce:
-                    15:93:5e:04:e8:d2:84:d1:fe:a9:ab:9b:44:92:57:
-                    8b:7d:12:23:55:19:33:a4:33:cb:4d:01:0a:64:91:
-                    ab:0d:4f:90:6a:e6:ea:01:51:2b:2e:10:3d:1e:86:
-                    a3:0d:3d:b0:94:d0:36:36:52:36:b4:7a:d3:05:20:
-                    83:70:ff:ef:ff:45:de:91:0b:73:0a:da:d2:93:50:
-                    b5:10:70:8c:dd:21:3e:8c:23:16:d1:65:e9:d7:6d:
-                    3d:04:53:5f:19:0d:90:f2:9c:ba:d4:1d:a9:ff:ea:
-                    50:71:57:d2:44:33:fc:d1:59
-                Exponent: 65537 (0x10001)
-        X509v3 extensions:
-            X509v3 Basic Constraints: 
-                CA:FALSE
-            Netscape Comment: 
-                OpenSSL Generated Certificate
-            X509v3 Subject Key Identifier: 
-                FF:B1:9E:B2:ED:62:40:F3:58:6B:2D:9C:05:FA:EE:86:91:39:26:32
-            X509v3 Authority Key Identifier: 
-                keyid:EC:CB:48:85:6F:F6:87:56:D9:69:3A:7E:F6:34:AF:AB:77:40:02:1B
-
-    Signature Algorithm: sha1WithRSAEncryption
-        06:0a:af:c7:66:21:37:7c:a9:83:1a:68:e6:d1:a6:83:df:b5:
-        94:94:bf:27:25:d7:7d:26:5f:51:b0:4e:46:5b:d9:b4:67:ea:
-        e0:86:74:38:2d:db:31:97:23:30:de:2c:86:f3:de:3b:9c:d8:
-        3e:fd:7f:18:a1:71:1e:3b:9d:4d:c8:77:e0:da:90:23:4c:6d:
-        c2:9f:56:5d:a6:d5:0d:f6:a2:af:55:06:5c:ba:1b:bc:3d:0c:
-        ee:db:99:64:46:5a:6d:9e:09:81:f4:51:ff:5d:b9:52:85:ce:
-        2e:44:29:01:26:79:c8:5e:38:3b:29:03:67:d0:cb:11:f2:a5:
-        f2:21
------BEGIN CERTIFICATE-----
-MIIDLDCCApWgAwIBAgIJAKzAYJjVtLUZMA0GCSqGSIb3DQEBBQUAMIGHMQswCQYD
-VQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEPMA0GA1UEChMGR29vZ2xlMRIw
-EAYDVQQLEwlDaHJvbWUgT1MxGDAWBgNVBAMTD09wZW5TU0wgVGVzdCBDQTEkMCIG
-CSqGSIb3DQEJARYVc2VjdXJpdHlAY2hyb21pdW0ub3JnMCAXDTEyMDUxMTE3MjUz
-NloYDzIxMTIwMTA4MTcyNTM2WjCBqDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCkNh
-bGlmb3JuaWExFjAUBgNVBAcTDU1vdW50YWluIFZpZXcxDzANBgNVBAoTBkdvb2ds
-ZTESMBAGA1UECxMJQ2hyb21lIE9TMSEwHwYDVQQDExhPcGVuU1NMIFRlc3QgQ2Vy
-dGlmaWNhdGUxJDAiBgkqhkiG9w0BCQEWFXNlY3VyaXR5QGNocm9taXVtLm9yZzCB
-nzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAxOVkAU4AjUDke7MFCM4Vk14E6NKE
-0f6pq5tEkleLfRIjVRkzpDPLTQEKZJGrDU+QaubqAVErLhA9HoajDT2wlNA2NlI2
-tHrTBSCDcP/v/0XekQtzCtrSk1C1EHCM3SE+jCMW0WXp1209BFNfGQ2Q8py61B2p
-/+pQcVfSRDP80VkCAwEAAaN7MHkwCQYDVR0TBAIwADAsBglghkgBhvhCAQ0EHxYd
-T3BlblNTTCBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYEFP+xnrLtYkDz
-WGstnAX67oaROSYyMB8GA1UdIwQYMBaAFOzLSIVv9odW2Wk6fvY0r6t3QAIbMA0G
-CSqGSIb3DQEBBQUAA4GBAAYKr8dmITd8qYMaaObRpoPftZSUvycl130mX1GwTkZb
-2bRn6uCGdDgt2zGXIzDeLIbz3juc2D79fxihcR47nU3Id+DakCNMbcKfVl2m1Q32
-oq9VBly6G7w9DO7bmWRGWm2eCYH0Uf9duVKFzi5EKQEmecheODspA2fQyxHypfIh
------END CERTIFICATE-----
diff --git a/client/site_tests/security_ProcessManagementPolicy/control b/client/site_tests/security_ProcessManagementPolicy/control
deleted file mode 100644
index cd24072..0000000
--- a/client/site_tests/security_ProcessManagementPolicy/control
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-TIME="SHORT"
-AUTHOR = "The Chromium OS Authors"
-DOC = """
-Linux provides no way to give a process the CAP_SETUID runtime capability
-without indescriminately allowing that process to change UID to any user on the
-system, including the root user. This is an obstacle to sandboxing system
-services in ChromeOS that spawn programs which setuid() to a different user.
-To solve this problem, we have added functionality to the ChromiumOS LSM which
-allows for configuring per-UID policies in ChromeOS that restrict which UIDs
-can be switched to by processes spawned under the restricted UID.
-"""
-NAME = "security_ProcessManagementPolicy"
-PURPOSE = """
-Prevent compromised non-root processes from being able to escalate
-privileges to root through a simple setuid() call.
-"""
-CRITERIA = """
-This autotest ensures that restricted users can only setuid() to UIDs approved
-by the security policy installed on the system.
-"""
-ATTRIBUTES = "suite:bvt-perbuild"
-TEST_CLASS = "security"
-TEST_CATEGORY = "Functional"
-TEST_TYPE = "client"
-JOB_RETRIES = 2
-
-job.run_test("security_ProcessManagementPolicy")
diff --git a/client/site_tests/security_ProcessManagementPolicy/security_ProcessManagementPolicy.py b/client/site_tests/security_ProcessManagementPolicy/security_ProcessManagementPolicy.py
deleted file mode 100644
index 3ed78b2..0000000
--- a/client/site_tests/security_ProcessManagementPolicy/security_ProcessManagementPolicy.py
+++ /dev/null
@@ -1,105 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import subprocess
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-
-class security_ProcessManagementPolicy(test.test):
-    """
-    Forks processes as non-root users and ensures the processes can change UID
-    to a user that is explicitly allowed in the system-wide whitelist, but no
-    other user.
-    """
-    version = 1
-
-    _WHITELIST_DICT = {
-        "cros-disks": set(("chronos", "fuse-exfat", "fuse-sshfs", "nobody",
-                           "ntfs-3g", "fuse-rar2fs", "fuse-smbfs", "fuse-zip")),
-        "shill": set(("dhcp", "ipsec", "openvpn", "syslog", "nobody")),
-    }
-
-    def __init__(self, *args, **kwargs):
-        version = utils.get_kernel_version()
-        if version == "3.8.11":
-            raise error.TestNAError('Test is n/a for kernels older than 3.10')
-        super(security_ProcessManagementPolicy,
-            self).__init__(*args, **kwargs)
-        self._failure = False
-
-    def cleanup(self):
-        """
-        Clean up the test environment.
-        """
-        super(security_ProcessManagementPolicy, self).cleanup()
-
-    def _fail(self, msg):
-        """
-        Log failure message and record failure.
-
-        @param msg: String to log.
-
-        """
-        logging.error(msg)
-        self._failure = True
-
-    def _test_setuid(self, parent, child, give_cap_setuid, expect_success):
-        if give_cap_setuid:
-            caps = "0xc0"
-        else:
-            caps = "0x0"
-        try:
-            subprocess.check_output(["/sbin/minijail0",
-                                            "-u",
-                                            parent,
-                                            "-g",
-                                            parent,
-                                            "-c",
-                                            caps,
-                                            "--",
-                                            "/sbin/capsh",
-                                            "--user=" + child,
-                                            "--",
-                                            "-c",
-                                            "/usr/bin/whoami"])
-
-        except subprocess.CalledProcessError, e:
-            if expect_success:
-                logging.error(" " + parent + " not able to setuid to " + child)
-                self._failure = True
-            return
-        if not expect_success:
-            logging.error(" " + parent + " able to setuid to " + child)
-            self._failure = True
-
-    def run_once(self):
-        """
-        Runs the test, spawning processes as users and checking setuid()
-        behavior.
-        """
-        for parent in self._WHITELIST_DICT:
-            for child in self._WHITELIST_DICT[parent]:
-                # Expect the setuid() call to be permitted
-                self._test_setuid(parent, child, True, True)
-                # Expect the setuid() call to be denied
-                self._test_setuid(parent, child, False, False)
-
-
-        # Make sure 'cros-disks' can't setuid() to 'root'
-        self._test_setuid("cros-disks", "root", True, False)
-        # Make sure 'shill' can't setuid() to 'chronos'
-        self._test_setuid("shill", "chronos", True, False)
-        # Make sure 'openvpn' can't setuid() to 'root'
-        self._test_setuid("openvpn", "root", True, False)
-        # Make sure 'ipsec' can't setuid() to 'root'
-        self._test_setuid("ipsec", "root", True, False)
-
-        # Make the test fail if any unexpected behaviour got detected. Note
-        # that the error log output that will be included in the failure
-        # message mentions the failed location to aid debugging.
-        if self._failure:
-            raise error.TestFail('Unexpected setuid() behavior')
diff --git a/client/site_tests/security_RendererSandbox/control b/client/site_tests/security_RendererSandbox/control
deleted file mode 100644
index 71fed18..0000000
--- a/client/site_tests/security_RendererSandbox/control
+++ /dev/null
@@ -1,19 +0,0 @@
-AUTHOR = "Chromium OS Team"
-NAME = "security_RendererSandbox"
-PURPOSE = "Basic check to ensure renderer is sandboxed/jailed in Chromium OS."
-CRITERIA = """
-This test will fail if (a) no renderer is found or (b) the requested URL is not 
-sandboxed/jailed.
-"""
-TIME = "SHORT"
-ATTRIBUTES = "suite:security"
-TEST_CATEGORY = "Security"
-TEST_CLASS = "security"
-TEST_TYPE = "client"
-
-DOC = """
-This test simply checks if a page is loaded within the sandboxed/jailed 
-environment inside Chromium OS.
-"""
-
-job.run_test("security_RendererSandbox")
diff --git a/client/site_tests/security_RendererSandbox/security_RendererSandbox.py b/client/site_tests/security_RendererSandbox/security_RendererSandbox.py
deleted file mode 100644
index 66e1abd..0000000
--- a/client/site_tests/security_RendererSandbox/security_RendererSandbox.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-import os
-import subprocess
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-class security_RendererSandbox(test.test):
-    version = 1
-    renderer_pid = -1
-
-
-    def _get_renderer_pid(self):
-        """Query pgrep for the pid of the renderer. Since this function is
-        passed as an argument to |utils.poll_for_condition()|, the return values
-        are set to True/False depending on whether a pid has been found."""
-
-        pgrep = subprocess.Popen(['pgrep', '-f', '-l', 'type=renderer'],
-                                 stdout=subprocess.PIPE)
-        procs = pgrep.communicate()[0].splitlines()
-        pids = []
-        # The fix for http://code.google.com/p/chromium/issues/detail?id=129884
-        # adds '--ignored= --type=renderer' to the GPU process cmdline.
-        # This makes 'pgrep' above return the pid of the GPU process,
-        # which is not setuid sandboxed, as the pid of a renderer,
-        # breaking the test.
-        # Work around by removing processes with '--ignored= --type=renderer'
-        # flags.
-        for proc in procs:
-            if '--ignored= --type=renderer' not in proc:
-                pids.append(proc.split()[0])
-
-        if pids:
-            self.renderer_pid = pids[0]
-            return True
-        else:
-            return False
-
-
-    def _check_for_suid_sandbox(self, renderer_pid):
-        """For the setuid sandbox, make sure there is no content in the CWD
-        directory."""
-
-        cwd_contents = os.listdir('/proc/%s/cwd' % self.renderer_pid)
-        if len(cwd_contents) > 0:
-            raise error.TestFail('Contents present in the CWD directory')
-
-
-    def run_once(self, time_to_wait=20):
-        """Wait until the page is loaded and poll for the renderer pid.
-        If renderer pid is found, it is stored in |self.renderer_pid|."""
-
-        utils.poll_for_condition(
-            self._get_renderer_pid,
-            error.TestFail('Timed out waiting to obtain pid of renderer'),
-            time_to_wait)
-
-        # Check if renderer is sandboxed.
-        self._check_for_suid_sandbox(self.renderer_pid)
diff --git a/client/site_tests/security_RootfsOwners/control b/client/site_tests/security_RootfsOwners/control
deleted file mode 100644
index 1bf6013..0000000
--- a/client/site_tests/security_RootfsOwners/control
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-TIME="SHORT"
-AUTHOR = "The Chromium OS Authors"
-DOC = """
-Ensures there are no files owned by chronos/chronos-access on the rootfs.
-"""
-NAME = "security_RootfsOwners"
-PURPOSE = """
-Ensures there are no files owned by chronos/chronos-access on the rootfs.
-"""
-CRITERIA = """
-Fail if there are any files owned by chronos/chronos-access on the rootfs.
-"""
-ATTRIBUTES = "suite:bvt-perbuild"
-TEST_CLASS = "security"
-TEST_CATEGORY = "Functional"
-TEST_TYPE = "client"
-
-job.run_test("security_RootfsOwners")
diff --git a/client/site_tests/security_RootfsOwners/security_RootfsOwners.py b/client/site_tests/security_RootfsOwners/security_RootfsOwners.py
deleted file mode 100644
index 1940287..0000000
--- a/client/site_tests/security_RootfsOwners/security_RootfsOwners.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-class security_RootfsOwners(test.test):
-    """Ensures there are no files owned by chronos/chronos-access on the rootfs.
-    """
-    version = 1
-
-    def run_once(self):
-        """
-        Do a find on the system for rootfs files owned by chronos
-        or chronos-access. Fail if there are any.
-        """
-        cmd = 'find / -xdev -user chronos -print -o -user chronos-access -print'
-        cmd_output = utils.system_output(cmd, ignore_status=True)
-
-        if (cmd_output != ''):
-            logging.error('chronos-/chronos-access-owned files:')
-            logging.error(cmd_output)
-            raise error.TestFail(
-                'Rootfs contains files owned by chronos or chronos-access, '
-                'see error log')
diff --git a/client/site_tests/security_SandboxLinuxUnittests/control b/client/site_tests/security_SandboxLinuxUnittests/control
deleted file mode 100644
index f98bd6f..0000000
--- a/client/site_tests/security_SandboxLinuxUnittests/control
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "The Chromium OS Authors"
-NAME = "security_SandboxLinuxUnittests"
-ATTRIBUTES = "suite:bvt-perbuild"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "security"
-TEST_TYPE = "client"
-JOB_RETRIES = 2
-
-DOC = """
-Runs sandbox_linux_unittests.
-"""
-
-job.run_test('security_SandboxLinuxUnittests')
diff --git a/client/site_tests/security_SandboxLinuxUnittests/security_SandboxLinuxUnittests.py b/client/site_tests/security_SandboxLinuxUnittests/security_SandboxLinuxUnittests.py
deleted file mode 100644
index bfd5620..0000000
--- a/client/site_tests/security_SandboxLinuxUnittests/security_SandboxLinuxUnittests.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import glob
-import logging
-import os
-
-from autotest_lib.client.cros import chrome_binary_test
-
-
-class security_SandboxLinuxUnittests(chrome_binary_test.ChromeBinaryTest):
-    """Runs sandbox_linux_unittests."""
-
-    version = 1
-    BINARY = 'sandbox_linux_unittests'
-    CRASH_DIR = '/var/spool/crash'
-
-
-    def run_once(self):
-        self.run_chrome_test_binary(self.BINARY)
-        crash_pattern = os.path.join(self.CRASH_DIR, self.BINARY + '*')
-        for filename in glob.glob(crash_pattern):
-            try:
-                os.remove(filename)
-            except OSError as ose:
-                logging.warning('Could not remove crash dump: %s', ose)
diff --git a/client/site_tests/security_SeccompSyscallFilters/control b/client/site_tests/security_SeccompSyscallFilters/control
deleted file mode 100644
index bd99dcf..0000000
--- a/client/site_tests/security_SeccompSyscallFilters/control
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = "security_SeccompSyscallFilters"
-TIME = "SHORT"
-AUTHOR = "chromeos-security@google.com"
-DOC = """
-Runs tests to verify that seccomp calls make correct changes with
-different settings
-"""
-PURPOSE = "To verify that seccomp is correctly changing permissions"
-CRITERIA = "Permissions are changed to expected values after seccomp calls"
-ATTRIBUTES = "suite:security"
-TEST_CLASS = "security"
-TEST_CATEGORY = "Functional"
-TEST_TYPE = "client"
-
-job.run_test('security_SeccompSyscallFilters')
diff --git a/client/site_tests/security_SeccompSyscallFilters/security_SeccompSyscallFilters.py b/client/site_tests/security_SeccompSyscallFilters/security_SeccompSyscallFilters.py
deleted file mode 100644
index 6b30d87..0000000
--- a/client/site_tests/security_SeccompSyscallFilters/security_SeccompSyscallFilters.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-
-import os
-
-"""A test verifying that seccomp calls change permissions correctly.
-
-Compiles tests written in C and then runs them.  Fails if C tests fail.
-"""
-
-class security_SeccompSyscallFilters(test.test):
-    version = 1
-    executable = 'seccomp_bpf_tests'
-
-    def setup(self):
-        """Cleans and makes seccomp_bpf_tests.c.
-
-        Prepares environment for tests by removing directory we will extract
-        to (if it exists), extracting tarball of tests, and making them.
-        """
-        os.chdir(self.srcdir)
-        utils.make()
-
-    def run_once(self):
-        """Main function.
-
-        Runs the compiled tests, logs output.  Fails if the call to run
-        tests fails (meaning that a test failed). Runs both as root
-        and non-root.
-        """
-        binpath = os.path.join(self.srcdir, self.executable)
-        utils.system_output(binpath, retain_output = True)
-        utils.system_output("su chronos -c %s" % binpath, retain_output = True)
diff --git a/client/site_tests/security_SeccompSyscallFilters/src/Makefile b/client/site_tests/security_SeccompSyscallFilters/src/Makefile
deleted file mode 100644
index 4f66a75..0000000
--- a/client/site_tests/security_SeccompSyscallFilters/src/Makefile
+++ /dev/null
@@ -1,17 +0,0 @@
-EXEC=seccomp_bpf_tests
-
-all: $(EXEC)
-
-clean:
-	rm -f $(EXEC)
-
-seccomp_bpf_tests: seccomp_bpf_tests.c test_harness.h
-	$(CC) seccomp_bpf_tests.c -o seccomp_bpf_tests $(CFLAGS) $(CPPFLAGS) $(LDFLAGS) -pthread
-
-resumption: resumption.c test_harness.h
-	$(CC) $^ -o $@ $(CFLAGS) $(CPPFLAGS) $(LDFLAGS) -ggdb3
-
-sigsegv: sigsegv.c test_harness.h
-	$(CC) $^ -o $@ $(CFLAGS) $(CPPFLAGS) $(LDFLAGS) -ggdb3
-
-.PHONY: clean
diff --git a/client/site_tests/security_SeccompSyscallFilters/src/resumption.c b/client/site_tests/security_SeccompSyscallFilters/src/resumption.c
deleted file mode 100644
index 3641bd3..0000000
--- a/client/site_tests/security_SeccompSyscallFilters/src/resumption.c
+++ /dev/null
@@ -1,216 +0,0 @@
-/* seccomp_bpf_tests.c
- * Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- *
- * Test code for seccomp bpf.
- */
-
-#include <asm/siginfo.h>
-#define __have_siginfo_t 1
-#define __have_sigval_t 1
-#define __have_sigevent_t 1
-
-#include <linux/filter.h>
-#include <linux/prctl.h>
-#include <linux/seccomp.h>
-#include <stddef.h>
-#include <stdbool.h>
-#include <string.h>
-#include <syscall.h>
-#define __USE_GNU 1
-#include <sys/ucontext.h>
-#include <sys/mman.h>
-
-#include "test_harness.h"
-
-#ifndef PR_SET_NO_NEW_PRIVS
-#define PR_SET_NO_NEW_PRIVS 38
-#define PR_GET_NO_NEW_PRIVS 39
-#endif
-
-#if defined(__i386__)
-#define REG_IP	REG_EIP
-#define REG_SP	REG_ESP
-#define REG_RESULT	REG_EAX
-#define REG_SYSCALL	REG_EAX
-#define REG_ARG0	REG_EBX
-#define REG_ARG1	REG_ECX
-#define REG_ARG2	REG_EDX
-#define REG_ARG3	REG_ESI
-#define REG_ARG4	REG_EDI
-#define REG_ARG5	REG_EBP
-#elif defined(__x86_64__)
-#define REG_IP	REG_RIP
-#define REG_SP	REG_RSP
-#define REG_RESULT	REG_RAX
-#define REG_SYSCALL	REG_RAX
-#define REG_ARG0	REG_RDI
-#define REG_ARG1	REG_RSI
-#define REG_ARG2	REG_RDX
-#define REG_ARG3	REG_R10
-#define REG_ARG4	REG_R8
-#define REG_ARG5	REG_R9
-#endif
-
-FIXTURE_DATA(TRAP) {
-	struct sock_fprog prog;
-};
-
-/* XXX: will need one per arch, etc.
- *      thankfully _arch can tell us the calling convention!
- */
-extern void *thunk_ip;	/* label for the instruction _after_ syscall */
-static void syscall_thunk(void)
-{
-	asm("syscall; thunk_ip:");
-}
-
-static time_t vsyscall_time(time_t *p)
-{
-	register time_t t asm ("rax");
-	register time_t *p1 asm ("rdi") = p;
-	__asm__("call 0xffffffffff600400 \n");
-	return t;
-}
-
-
-#if 0
-/* For instance, we could jump here instead. */
-static void compat_thunk(void)
-{
-	asm("int 0x80");
-}
-#endif
-
-FIXTURE_SETUP(TRAP) {
-	/* instruction after the syscall. Will be arch specific, of course. */
-	unsigned long thunk_addr = (unsigned long)&thunk_ip;
-	TH_LOG("Thunk: 0x%lX\n", thunk_addr);
-	{
-		struct sock_filter filter[] = {
-			BPF_STMT(BPF_LD+BPF_W+BPF_ABS,
-				offsetof(struct seccomp_data, nr)),
-			/* Whitelist anything you might need in the sigaction */
-#ifdef __NR_sigreturn
-			BPF_JUMP(BPF_JMP+BPF_JEQ+BPF_K, __NR_sigreturn, 3, 0),
-#endif
-			BPF_JUMP(BPF_JMP+BPF_JEQ+BPF_K, __NR_exit, 2, 0),
-			BPF_JUMP(BPF_JMP+BPF_JEQ+BPF_K, __NR_rt_sigreturn, 1, 0),
-			/* Allow __NR_write so easy logging. */
-			BPF_JUMP(BPF_JMP+BPF_JEQ+BPF_K, __NR_write, 0, 1),
-			BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ALLOW),
-			/* Check if we're within the thunk. */
-			BPF_STMT(BPF_LD+BPF_W+BPF_ABS,
-				offsetof(struct seccomp_data, instruction_pointer)),
-			/* XXX: make this 32-bit friendly. */
-			BPF_JUMP(BPF_JMP+BPF_JEQ+BPF_K, ((__u32*)&thunk_addr)[0], 0, 3),
-			BPF_STMT(BPF_LD+BPF_W+BPF_ABS,
-				offsetof(struct seccomp_data, instruction_pointer)+sizeof(int)),
-			BPF_JUMP(BPF_JMP+BPF_JEQ+BPF_K, ((__u32*)&thunk_addr)[1], 0, 1),
-			BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ALLOW),
-			BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_TRAP),
-		};
-		memset(&self->prog, 0, sizeof(self->prog));
-		self->prog.filter = malloc(sizeof(filter));
-		ASSERT_NE(NULL, self->prog.filter);
-		memcpy(self->prog.filter, filter, sizeof(filter));
-		self->prog.len = (unsigned short)(sizeof(filter)/sizeof(filter[0]));
-	}
-}
-
-FIXTURE_TEARDOWN(TRAP) {
-	if (self->prog.filter)
-		free(self->prog.filter);
-};
-
-struct arch_sigsys {
-		void *_call_addr; /* calling user insn */
-		int _syscall;	/* triggering system call number */
-		unsigned int _arch;	/* AUDIT_ARCH_* of syscall */
-};
-
-static void TRAP_action(int nr, siginfo_t *info, void *void_context)
-{
-	ucontext_t *ctx = (ucontext_t *)void_context;
-	char buf[256];
-	int len;
-	int do_ret = 1;
-	struct arch_sigsys *sys = (struct arch_sigsys *)
-#ifdef si_syscall
-		&(info->si_call_addr);
-#else
-		&(info->si_pid);
-#endif
-
-	if (info->si_code != SYS_SECCOMP)
-		return;
-	if (!ctx)
-		return;
-	len = snprintf(buf, sizeof(buf),
-			"@0x%lX:%X:%d:0x%lX:0x%lX:0x%lX:0x%lX:0x%lX:0x%lX\n",
-			(unsigned long)sys->_call_addr,
-			sys->_arch,
-			sys->_syscall,
-			ctx->uc_mcontext.gregs[REG_ARG0],
-			ctx->uc_mcontext.gregs[REG_ARG1],
-			ctx->uc_mcontext.gregs[REG_ARG2],
-			ctx->uc_mcontext.gregs[REG_ARG3],
-			ctx->uc_mcontext.gregs[REG_ARG4],
-			ctx->uc_mcontext.gregs[REG_ARG5]);
-	/* Send the soft-fail to our "listener" */
-	syscall(__NR_write, STDOUT_FILENO, buf, len);
-	if (ctx->uc_mcontext.gregs[REG_IP] >= 0xffffffffff600000ULL &&
-	    ctx->uc_mcontext.gregs[REG_IP] < 0xffffffffff601000ULL)
-		do_ret = 0;
-	if (do_ret) {
-		/* push [REG_IP] */
-		ctx->uc_mcontext.gregs[REG_SP] -= sizeof(unsigned long);
-		*((unsigned long *)ctx->uc_mcontext.gregs[REG_SP]) =
-		    ctx->uc_mcontext.gregs[REG_IP];
-	}
-	/* jmp syscall_thunk */
-	ctx->uc_mcontext.gregs[REG_IP] = (unsigned long)syscall_thunk;
-	return;
-}
-
-TEST_F(TRAP, handler) {
-	int ret;
-	struct sigaction act;
-	pid_t pid;
-	sigset_t mask;
-	memset(&act, 0, sizeof(act));
-	sigemptyset(&mask);
-	sigaddset(&mask, SIGSYS);
-
-	act.sa_sigaction = &TRAP_action;
-	act.sa_flags = SA_SIGINFO;
-	ret = sigaction(SIGSYS, &act, NULL);
-	ASSERT_EQ(0, ret) {
-		TH_LOG("sigaction failed");
-	}
-	ret = sigprocmask(SIG_UNBLOCK, &mask, NULL);
-	ASSERT_EQ(0, ret) {
-		TH_LOG("sigprocmask failed");
-	}
-
-	/* Get the pid to compare against. */
-	pid = getpid();
-
-	ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	ASSERT_EQ(0, ret);
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->prog);
-	ASSERT_EQ(0, ret);
-
-	/* Call anything! */
-	ret = syscall(__NR_getpid);
-	ASSERT_EQ(pid, ret);
-	ret = syscall(__NR_close, 0);
-	ASSERT_EQ(0, ret);
-	ret = syscall(__NR_close, 0);
-	ASSERT_EQ(-1, ret);
-	printf("The time is %ld\n", vsyscall_time(NULL));
-	ASSERT_LT(0, vsyscall_time(NULL));
-}
-
-TEST_HARNESS_MAIN
diff --git a/client/site_tests/security_SeccompSyscallFilters/src/seccomp_bpf_tests.c b/client/site_tests/security_SeccompSyscallFilters/src/seccomp_bpf_tests.c
deleted file mode 100644
index 821bb3e..0000000
--- a/client/site_tests/security_SeccompSyscallFilters/src/seccomp_bpf_tests.c
+++ /dev/null
@@ -1,1344 +0,0 @@
-/* seccomp_bpf_tests.c
- * Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- *
- * Test code for seccomp bpf.
- */
-
-#include <errno.h>
-#include <linux/filter.h>
-#include <linux/prctl.h>
-#include <linux/ptrace.h>
-#include <linux/seccomp.h>
-#include <pthread.h>
-#include <semaphore.h>
-#include <signal.h>
-#include <stddef.h>
-#include <stdbool.h>
-#include <string.h>
-#include <syscall.h>
-
-#define _GNU_SOURCE
-#include <unistd.h>
-#include <sys/syscall.h>
-
-#include "test_harness.h"
-
-#ifndef PR_SET_PTRACER
-# define PR_SET_PTRACER 0x59616d61
-#endif
-
-#ifndef PR_SET_NO_NEW_PRIVS
-#define PR_SET_NO_NEW_PRIVS 38
-#define PR_GET_NO_NEW_PRIVS 39
-#endif
-
-#ifndef PR_SECCOMP_EXT
-#define PR_SECCOMP_EXT 43
-#endif
-
-#ifndef SECCOMP_EXT_ACT
-#define SECCOMP_EXT_ACT 1
-#endif
-
-#ifndef SECCOMP_EXT_ACT_TSYNC
-#define SECCOMP_EXT_ACT_TSYNC 1
-#endif
-
-#ifndef SECCOMP_MODE_STRICT
-#define SECCOMP_MODE_STRICT 1
-#endif
-
-#ifndef SECCOMP_MODE_FILTER
-#define SECCOMP_MODE_FILTER 2
-#endif
-
-#ifndef SECCOMP_RET_KILL
-#define SECCOMP_RET_KILL        0x00000000U // kill the task immediately
-#define SECCOMP_RET_TRAP        0x00030000U // disallow and force a SIGSYS
-#define SECCOMP_RET_ERRNO       0x00050000U // returns an errno
-#define SECCOMP_RET_TRACE       0x7ff00000U // pass to a tracer or disallow
-#define SECCOMP_RET_ALLOW       0x7fff0000U // allow
-
-/* Masks for the return value sections. */
-#define SECCOMP_RET_ACTION      0x7fff0000U
-#define SECCOMP_RET_DATA        0x0000ffffU
-
-struct seccomp_data {
-	int nr;
-	__u32 arch;
-	__u64 instruction_pointer;
-	__u64 args[6];
-};
-#endif
-
-#define syscall_arg(_n) (offsetof(struct seccomp_data, args[_n]))
-
-#define SIBLING_EXIT_UNKILLED	0xbadbeef
-#define SIBLING_EXIT_FAILURE	0xbadface
-
-TEST(mode_strict_support) {
-	long ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_STRICT, NULL, NULL, NULL);
-	ASSERT_EQ(0, ret) {
-		TH_LOG("Kernel does not support CONFIG_SECCOMP");
-	}
-	syscall(__NR_exit, 1);
-}
-
-TEST_SIGNAL(mode_strict_cannot_call_prctl, SIGKILL) {
-	long ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_STRICT, NULL, NULL, NULL);
-	ASSERT_EQ(0, ret) {
-		TH_LOG("Kernel does not support CONFIG_SECCOMP");
-	}
-	syscall(__NR_prctl, PR_SET_SECCOMP, SECCOMP_MODE_FILTER, NULL, NULL, NULL);
-	EXPECT_FALSE(true) {
-		TH_LOG("Unreachable!");
-	}
-}
-
-/* Note! This doesn't test no new privs behavior */
-TEST(no_new_privs_support) {
-	long ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	EXPECT_EQ(0, ret) {
-		TH_LOG("Kernel does not support PR_SET_NO_NEW_PRIVS!");
-	}
-}
-
-/* Tests kernel support by checking for a copy_from_user() fault on * NULL. */
-TEST(mode_filter_support) {
-	long ret = prctl(PR_SET_NO_NEW_PRIVS, 1, NULL, 0, 0);
-	ASSERT_EQ(0, ret) {
-		TH_LOG("Kernel does not support PR_SET_NO_NEW_PRIVS!");
-	}
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, NULL, NULL, NULL);
-	EXPECT_EQ(-1, ret);
-	EXPECT_EQ(EFAULT, errno) {
-		TH_LOG("Kernel does not support CONFIG_SECCOMP_FILTER!");
-	}
-}
-
-TEST(mode_filter_without_nnp) {
-	struct sock_filter filter[] = {
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ALLOW),
-	};
-	struct sock_fprog prog = {
-		.len = (unsigned short)(sizeof(filter)/sizeof(filter[0])),
-		.filter = filter,
-	};
-	long ret = prctl(PR_GET_NO_NEW_PRIVS, 0, NULL, 0, 0);
-	ASSERT_LE(0, ret) {
-		TH_LOG("Expected 0 or unsupported for NO_NEW_PRIVS");
-	}
-	errno = 0;
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &prog, 0, 0);
-	/* Succeeds with CAP_SYS_ADMIN, fails without */
-	/* TODO(wad) check caps not euid */
-	if (geteuid()) {
-		EXPECT_EQ(-1, ret);
-		EXPECT_EQ(EACCES, errno);
-	} else {
-		EXPECT_EQ(0, ret);
-	}
-}
-
-TEST(mode_filter_cannot_move_to_strict) {
-	struct sock_filter filter[] = {
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ALLOW),
-	};
-	struct sock_fprog prog = {
-		.len = (unsigned short)(sizeof(filter)/sizeof(filter[0])),
-		.filter = filter,
-	};
-
-	long ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	ASSERT_EQ(0, ret);
-
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &prog, 0, 0);
-	ASSERT_EQ(0, ret);
-
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_STRICT, NULL, 0, 0);
-	EXPECT_EQ(-1, ret);
-	EXPECT_EQ(EINVAL, errno);
-}
-
-
-TEST(ALLOW_all) {
-	struct sock_filter filter[] = {
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ALLOW),
-	};
-	struct sock_fprog prog = {
-		.len = (unsigned short)(sizeof(filter)/sizeof(filter[0])),
-		.filter = filter,
-	};
-
-	long ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	ASSERT_EQ(0, ret);
-
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &prog);
-	ASSERT_EQ(0, ret);
-}
-
-TEST(empty_prog) {
-	struct sock_filter filter[] = {
-	};
-	struct sock_fprog prog = {
-		.len = (unsigned short)(sizeof(filter)/sizeof(filter[0])),
-		.filter = filter,
-	};
-
-	long ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	ASSERT_EQ(0, ret);
-
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &prog);
-	EXPECT_EQ(-1, ret);
-	EXPECT_EQ(EINVAL, errno);
-}
-
-TEST_SIGNAL(unknown_ret_is_kill_inside, SIGSYS) {
-	struct sock_filter filter[] = {
-		BPF_STMT(BPF_RET+BPF_K, 0x10000000U),
-	};
-	struct sock_fprog prog = {
-		.len = (unsigned short)(sizeof(filter)/sizeof(filter[0])),
-		.filter = filter,
-	};
-
-	long ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	ASSERT_EQ(0, ret);
-
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &prog);
-	ASSERT_EQ(0, ret);
-	EXPECT_EQ(0, syscall(__NR_getpid)) {
-		TH_LOG("getpid() shouldn't ever return");
-	}
-}
-
-/* return code >= 0x80000000 is unused. */
-TEST_SIGNAL(unknown_ret_is_kill_above_allow, SIGSYS) {
-	struct sock_filter filter[] = {
-		BPF_STMT(BPF_RET+BPF_K, 0x90000000U),
-	};
-	struct sock_fprog prog = {
-		.len = (unsigned short)(sizeof(filter)/sizeof(filter[0])),
-		.filter = filter,
-	};
-
-	long ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	ASSERT_EQ(0, ret);
-
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &prog);
-	ASSERT_EQ(0, ret);
-	EXPECT_EQ(0, syscall(__NR_getpid)) {
-		TH_LOG("getpid() shouldn't ever return");
-	}
-}
-
-TEST_SIGNAL(KILL_all, SIGSYS) {
-	struct sock_filter filter[] = {
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_KILL),
-	};
-	struct sock_fprog prog = {
-		.len = (unsigned short)(sizeof(filter)/sizeof(filter[0])),
-		.filter = filter,
-	};
-
-	long ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	ASSERT_EQ(0, ret);
-
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &prog);
-	ASSERT_EQ(0, ret);
-}
-
-TEST_SIGNAL(KILL_one, SIGSYS) {
-	struct sock_filter filter[] = {
-		BPF_STMT(BPF_LD+BPF_W+BPF_ABS,
-			offsetof(struct seccomp_data, nr)),
-		BPF_JUMP(BPF_JMP+BPF_JEQ+BPF_K, __NR_getpid, 0, 1),
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_KILL),
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ALLOW),
-	};
-	struct sock_fprog prog = {
-		.len = (unsigned short)(sizeof(filter)/sizeof(filter[0])),
-		.filter = filter,
-	};
-	long ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	pid_t parent = getppid();
-	ASSERT_EQ(0, ret);
-
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &prog);
-	ASSERT_EQ(0, ret);
-
-	EXPECT_EQ(parent, syscall(__NR_getppid));
-	/* getpid() should never return. */
-	EXPECT_EQ(0, syscall(__NR_getpid));
-}
-
-TEST_SIGNAL(KILL_one_arg_one, SIGSYS) {
-	struct sock_filter filter[] = {
-		BPF_STMT(BPF_LD+BPF_W+BPF_ABS,
-			offsetof(struct seccomp_data, nr)),
-		BPF_JUMP(BPF_JMP+BPF_JEQ+BPF_K, __NR_getpid, 1, 0),
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ALLOW),
-		/* Only both with lower 32-bit for now. */
-		BPF_STMT(BPF_LD+BPF_W+BPF_ABS, syscall_arg(0)),
-		BPF_JUMP(BPF_JMP+BPF_JEQ+BPF_K, 0x0C0FFEE, 0, 1),
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_KILL),
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ALLOW),
-	};
-	struct sock_fprog prog = {
-		.len = (unsigned short)(sizeof(filter)/sizeof(filter[0])),
-		.filter = filter,
-	};
-	long ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	pid_t parent = getppid();
-	pid_t pid = getpid();
-	ASSERT_EQ(0, ret);
-
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &prog);
-	ASSERT_EQ(0, ret);
-
-	EXPECT_EQ(parent, syscall(__NR_getppid));
-	EXPECT_EQ(pid, syscall(__NR_getpid));
-	/* getpid() should never return. */
-	EXPECT_EQ(0, syscall(__NR_getpid, 0x0C0FFEE));
-}
-
-TEST_SIGNAL(KILL_one_arg_six, SIGSYS) {
-	struct sock_filter filter[] = {
-		BPF_STMT(BPF_LD+BPF_W+BPF_ABS,
-			offsetof(struct seccomp_data, nr)),
-		BPF_JUMP(BPF_JMP+BPF_JEQ+BPF_K, __NR_getpid, 1, 0),
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ALLOW),
-		/* Only both with lower 32-bit for now. */
-		BPF_STMT(BPF_LD+BPF_W+BPF_ABS, syscall_arg(5)),
-		BPF_JUMP(BPF_JMP+BPF_JEQ+BPF_K, 0x0C0FFEE, 0, 1),
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_KILL),
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ALLOW),
-	};
-	struct sock_fprog prog = {
-		.len = (unsigned short)(sizeof(filter)/sizeof(filter[0])),
-		.filter = filter,
-	};
-	long ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	pid_t parent = getppid();
-	pid_t pid = getpid();
-	ASSERT_EQ(0, ret);
-
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &prog);
-	ASSERT_EQ(0, ret);
-
-	EXPECT_EQ(parent, syscall(__NR_getppid));
-	EXPECT_EQ(pid, syscall(__NR_getpid));
-	/* getpid() should never return. */
-	EXPECT_EQ(0, syscall(__NR_getpid, 1, 2, 3, 4, 5, 0x0C0FFEE));
-}
-
-/* TODO(wad) add 64-bit versus 32-bit arg tests. */
-
-TEST(arg_out_of_range) {
-	struct sock_filter filter[] = {
-		BPF_STMT(BPF_LD+BPF_W+BPF_ABS, syscall_arg(6)),
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ALLOW),
-	};
-	struct sock_fprog prog = {
-		.len = (unsigned short)(sizeof(filter)/sizeof(filter[0])),
-		.filter = filter,
-	};
-	long ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	ASSERT_EQ(0, ret);
-
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &prog);
-	EXPECT_EQ(-1, ret);
-	EXPECT_EQ(EINVAL, errno);
-}
-
-TEST(ERRNO_one) {
-	struct sock_filter filter[] = {
-		BPF_STMT(BPF_LD+BPF_W+BPF_ABS,
-			offsetof(struct seccomp_data, nr)),
-		BPF_JUMP(BPF_JMP+BPF_JEQ+BPF_K, __NR_read, 0, 1),
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ERRNO | E2BIG),
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ALLOW),
-	};
-	struct sock_fprog prog = {
-		.len = (unsigned short)(sizeof(filter)/sizeof(filter[0])),
-		.filter = filter,
-	};
-	long ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	pid_t parent = getppid();
-	ASSERT_EQ(0, ret);
-
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &prog);
-	ASSERT_EQ(0, ret);
-
-	EXPECT_EQ(parent, syscall(__NR_getppid));
-	EXPECT_EQ(-1, read(0, NULL, 0));
-	EXPECT_EQ(E2BIG, errno);
-}
-
-TEST(ERRNO_one_ok) {
-	struct sock_filter filter[] = {
-		BPF_STMT(BPF_LD+BPF_W+BPF_ABS,
-			offsetof(struct seccomp_data, nr)),
-		BPF_JUMP(BPF_JMP+BPF_JEQ+BPF_K, __NR_read, 0, 1),
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ERRNO | 0),
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ALLOW),
-	};
-	struct sock_fprog prog = {
-		.len = (unsigned short)(sizeof(filter)/sizeof(filter[0])),
-		.filter = filter,
-	};
-	long ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	pid_t parent = getppid();
-	ASSERT_EQ(0, ret);
-
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &prog);
-	ASSERT_EQ(0, ret);
-
-	EXPECT_EQ(parent, syscall(__NR_getppid));
-	/* "errno" of 0 is ok. */
-	EXPECT_EQ(0, read(0, NULL, 0));
-}
-
-FIXTURE_DATA(TRAP) {
-	struct sock_fprog prog;
-};
-
-FIXTURE_SETUP(TRAP) {
-	struct sock_filter filter[] = {
-		BPF_STMT(BPF_LD+BPF_W+BPF_ABS,
-			offsetof(struct seccomp_data, nr)),
-		BPF_JUMP(BPF_JMP+BPF_JEQ+BPF_K, __NR_getpid, 0, 1),
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_TRAP),
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ALLOW),
-	};
-	memset(&self->prog, 0, sizeof(self->prog));
-	self->prog.filter = malloc(sizeof(filter));
-	ASSERT_NE(NULL, self->prog.filter);
-	memcpy(self->prog.filter, filter, sizeof(filter));
-	self->prog.len = (unsigned short)(sizeof(filter)/sizeof(filter[0]));
-}
-
-FIXTURE_TEARDOWN(TRAP) {
-	if (self->prog.filter)
-		free(self->prog.filter);
-};
-
-TEST_F_SIGNAL(TRAP, dfl, SIGSYS) {
-	long ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	ASSERT_EQ(0, ret);
-
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->prog);
-	ASSERT_EQ(0, ret);
-	syscall(__NR_getpid);
-}
-
-/* Ensure that SIGSYS overrides SIG_IGN */
-TEST_F_SIGNAL(TRAP, ign, SIGSYS) {
-	long ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	ASSERT_EQ(0, ret);
-
-	signal(SIGSYS, SIG_IGN);
-
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->prog);
-	ASSERT_EQ(0, ret);
-	syscall(__NR_getpid);
-}
-
-static siginfo_t TRAP_info;
-static volatile int TRAP_nr;
-static void TRAP_action(int nr, siginfo_t *info, void *void_context)
-{
-	memcpy(&TRAP_info, info, sizeof(TRAP_info));
-	TRAP_nr = nr;
-	return;
-}
-
-TEST_F(TRAP, handler) {
-	int ret, test;
-	struct sigaction act;
-	sigset_t mask;
-	memset(&act, 0, sizeof(act));
-	sigemptyset(&mask);
-	sigaddset(&mask, SIGSYS);
-
-	act.sa_sigaction = &TRAP_action;
-	act.sa_flags = SA_SIGINFO;
-	ret = sigaction(SIGSYS, &act, NULL);
-	ASSERT_EQ(0, ret) {
-		TH_LOG("sigaction failed");
-	}
-	ret = sigprocmask(SIG_UNBLOCK, &mask, NULL);
-	ASSERT_EQ(0, ret) {
-		TH_LOG("sigprocmask failed");
-	}
-
-	ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	ASSERT_EQ(0, ret);
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->prog);
-	ASSERT_EQ(0, ret);
-	TRAP_nr = 0;
-	memset(&TRAP_info, 0, sizeof(TRAP_info));
-	/* Expect the registers to be rolled back. (nr = error) may vary
-	 * based on arch. */
-	ret = syscall(__NR_getpid);
-	/* Silence gcc warning about volatile. */
-	test = TRAP_nr;
-	EXPECT_EQ(SIGSYS, test);
-	struct local_sigsys {
-			void *_call_addr; /* calling user insn */
-			int _syscall;	/* triggering system call number */
-			unsigned int _arch;	/* AUDIT_ARCH_* of syscall */
-	} *sigsys = (struct local_sigsys *)
-#ifdef si_syscall
-		&(TRAP_info.si_call_addr);
-#else
-		&TRAP_info.si_pid;
-#endif
-	EXPECT_EQ(__NR_getpid, sigsys->_syscall);
-	/* Make sure arch is non-zero. */
-	EXPECT_NE(0, sigsys->_arch);
-	EXPECT_NE(0, (unsigned long)sigsys->_call_addr);
-}
-
-FIXTURE_DATA(precedence) {
-	struct sock_fprog allow;
-	struct sock_fprog trace;
-	struct sock_fprog error;
-	struct sock_fprog trap;
-	struct sock_fprog kill;
-};
-
-FIXTURE_SETUP(precedence) {
-	struct sock_filter allow_insns[] = {
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ALLOW),
-	};
-	struct sock_filter trace_insns[] = {
-		BPF_STMT(BPF_LD+BPF_W+BPF_ABS,
-			offsetof(struct seccomp_data, nr)),
-		BPF_JUMP(BPF_JMP+BPF_JEQ+BPF_K, __NR_getpid, 1, 0),
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ALLOW),
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_TRACE),
-	};
-	struct sock_filter error_insns[] = {
-		BPF_STMT(BPF_LD+BPF_W+BPF_ABS,
-			offsetof(struct seccomp_data, nr)),
-		BPF_JUMP(BPF_JMP+BPF_JEQ+BPF_K, __NR_getpid, 1, 0),
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ALLOW),
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ERRNO),
-	};
-	struct sock_filter trap_insns[] = {
-		BPF_STMT(BPF_LD+BPF_W+BPF_ABS,
-			offsetof(struct seccomp_data, nr)),
-		BPF_JUMP(BPF_JMP+BPF_JEQ+BPF_K, __NR_getpid, 1, 0),
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ALLOW),
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_TRAP),
-	};
-	struct sock_filter kill_insns[] = {
-		BPF_STMT(BPF_LD+BPF_W+BPF_ABS,
-			offsetof(struct seccomp_data, nr)),
-		BPF_JUMP(BPF_JMP+BPF_JEQ+BPF_K, __NR_getpid, 1, 0),
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ALLOW),
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_KILL),
-	};
-	memset(self, 0, sizeof(*self));
-#define FILTER_ALLOC(_x) \
-	self->_x.filter = malloc(sizeof(_x##_insns)); \
-	ASSERT_NE(NULL, self->_x.filter); \
-	memcpy(self->_x.filter, &_x##_insns, sizeof(_x##_insns)); \
-	self->_x.len = (unsigned short)(sizeof(_x##_insns)/sizeof(_x##_insns[0]))
-	FILTER_ALLOC(allow);
-	FILTER_ALLOC(trace);
-	FILTER_ALLOC(error);
-	FILTER_ALLOC(trap);
-	FILTER_ALLOC(kill);
-}
-
-FIXTURE_TEARDOWN(precedence) {
-#define FILTER_FREE(_x) if (self->_x.filter) free(self->_x.filter)
-	FILTER_FREE(allow);
-	FILTER_FREE(trace);
-	FILTER_FREE(error);
-	FILTER_FREE(trap);
-	FILTER_FREE(kill);
-}
-
-TEST_F(precedence, allow_ok) {
-	long ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	pid_t parent = getppid();
-	pid_t res = 0;
-	ASSERT_EQ(0, ret);
-
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->allow);
-	ASSERT_EQ(0, ret);
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->trace);
-	ASSERT_EQ(0, ret);
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->error);
-	ASSERT_EQ(0, ret);
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->trap);
-	ASSERT_EQ(0, ret);
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->kill);
-	ASSERT_EQ(0, ret);
-	/* Should work just fine. */
-	res = syscall(__NR_getppid);
-	EXPECT_EQ(parent, res);
-}
-
-TEST_F_SIGNAL(precedence, kill_is_highest, SIGSYS) {
-	long ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	pid_t parent = getppid();
-	pid_t res = 0;
-	ASSERT_EQ(0, ret);
-
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->allow);
-	ASSERT_EQ(0, ret);
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->trace);
-	ASSERT_EQ(0, ret);
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->error);
-	ASSERT_EQ(0, ret);
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->trap);
-	ASSERT_EQ(0, ret);
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->kill);
-	ASSERT_EQ(0, ret);
-	/* Should work just fine. */
-	res = syscall(__NR_getppid);
-	EXPECT_EQ(parent, res);
-	/* getpid() should never return. */
-	res = syscall(__NR_getpid);
-	EXPECT_EQ(0, res);
-}
-
-TEST_F_SIGNAL(precedence, kill_is_highest_in_any_order, SIGSYS) {
-	long ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	pid_t parent = getppid();
-	ASSERT_EQ(0, ret);
-
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->allow);
-	ASSERT_EQ(0, ret);
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->kill);
-	ASSERT_EQ(0, ret);
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->error);
-	ASSERT_EQ(0, ret);
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->trace);
-	ASSERT_EQ(0, ret);
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->trap);
-	ASSERT_EQ(0, ret);
-	/* Should work just fine. */
-	EXPECT_EQ(parent, syscall(__NR_getppid));
-	/* getpid() should never return. */
-	EXPECT_EQ(0, syscall(__NR_getpid));
-}
-
-TEST_F_SIGNAL(precedence, trap_is_second, SIGSYS) {
-	long ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	pid_t parent = getppid();
-	ASSERT_EQ(0, ret);
-
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->allow);
-	ASSERT_EQ(0, ret);
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->trace);
-	ASSERT_EQ(0, ret);
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->error);
-	ASSERT_EQ(0, ret);
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->trap);
-	ASSERT_EQ(0, ret);
-	/* Should work just fine. */
-	EXPECT_EQ(parent, syscall(__NR_getppid));
-	/* getpid() should never return. */
-	EXPECT_EQ(0, syscall(__NR_getpid));
-}
-
-TEST_F_SIGNAL(precedence, trap_is_second_in_any_order, SIGSYS) {
-	long ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	pid_t parent = getppid();
-	ASSERT_EQ(0, ret);
-
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->allow);
-	ASSERT_EQ(0, ret);
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->trap);
-	ASSERT_EQ(0, ret);
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->trace);
-	ASSERT_EQ(0, ret);
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->error);
-	ASSERT_EQ(0, ret);
-	/* Should work just fine. */
-	EXPECT_EQ(parent, syscall(__NR_getppid));
-	/* getpid() should never return. */
-	EXPECT_EQ(0, syscall(__NR_getpid));
-}
-
-TEST_F(precedence, errno_is_third) {
-	long ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	pid_t parent = getppid();
-	ASSERT_EQ(0, ret);
-
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->allow);
-	ASSERT_EQ(0, ret);
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->trace);
-	ASSERT_EQ(0, ret);
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->error);
-	ASSERT_EQ(0, ret);
-	/* Should work just fine. */
-	EXPECT_EQ(parent, syscall(__NR_getppid));
-	EXPECT_EQ(0, syscall(__NR_getpid));
-}
-
-TEST_F(precedence, errno_is_third_in_any_order) {
-	long ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	pid_t parent = getppid();
-	ASSERT_EQ(0, ret);
-
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->error);
-	ASSERT_EQ(0, ret);
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->trace);
-	ASSERT_EQ(0, ret);
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->allow);
-	ASSERT_EQ(0, ret);
-	/* Should work just fine. */
-	EXPECT_EQ(parent, syscall(__NR_getppid));
-	EXPECT_EQ(0, syscall(__NR_getpid));
-}
-
-TEST_F(precedence, trace_is_fourth) {
-	long ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	pid_t parent = getppid();
-	ASSERT_EQ(0, ret);
-
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->allow);
-	ASSERT_EQ(0, ret);
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->trace);
-	ASSERT_EQ(0, ret);
-	/* Should work just fine. */
-	EXPECT_EQ(parent, syscall(__NR_getppid));
-	/* No ptracer */
-	EXPECT_EQ(-1, syscall(__NR_getpid));
-}
-
-TEST_F(precedence, trace_is_fourth_in_any_order) {
-	long ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	pid_t parent = getppid();
-	ASSERT_EQ(0, ret);
-
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->trace);
-	ASSERT_EQ(0, ret);
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->allow);
-	ASSERT_EQ(0, ret);
-	/* Should work just fine. */
-	EXPECT_EQ(parent, syscall(__NR_getppid));
-	/* No ptracer */
-	EXPECT_EQ(-1, syscall(__NR_getpid));
-}
-
-#ifndef PTRACE_O_TRACESECCOMP
-#define PTRACE_O_TRACESECCOMP	0x00000080
-#endif
-
-/* Catch the Ubuntu 12.04 value error. */
-#if PTRACE_EVENT_SECCOMP != 7
-#undef PTRACE_EVENT_SECCOMP
-#endif
-
-#ifndef PTRACE_EVENT_SECCOMP
-#define PTRACE_EVENT_SECCOMP 7
-#endif
-
-#define IS_SECCOMP_EVENT(status) ((status >> 16) == PTRACE_EVENT_SECCOMP)
-bool tracer_running;
-void tracer_stop(int sig)
-{
-	tracer_running = false;
-}
-void tracer(struct __test_metadata *_metadata, pid_t tracee,
-	    unsigned long poke_addr, int fd) {
-	int ret = -1;
-	struct sigaction action = {
-		.sa_handler = tracer_stop,
-	};
-
-	/* Allow external shutdown. */
-	tracer_running = true;
-	ASSERT_EQ(0, sigaction(SIGUSR1, &action, NULL));
-
-	errno = 0;
-	while (ret == -1 && errno != EINVAL) {
-		ret = ptrace(PTRACE_ATTACH, tracee, NULL, 0);
-	}
-	ASSERT_EQ(0, ret) {
-		kill(tracee, SIGKILL);
-	}
-	/* Wait for attach stop */
-	wait(NULL);
-
-	ret = ptrace(PTRACE_SETOPTIONS, tracee, NULL, PTRACE_O_TRACESECCOMP);
-	ASSERT_EQ(0, ret) {
-		TH_LOG("Failed to set PTRACE_O_TRACESECCOMP");
-		kill(tracee, SIGKILL);
-	}
-	ptrace(PTRACE_CONT, tracee, NULL, 0);
-
-	/* Unblock the tracee */
-	ASSERT_EQ(1, write(fd, "A", 1));
-	ASSERT_EQ(0, close(fd));
-
-	/* Run until we're shut down. */
-	while (tracer_running) {
-		int status;
-		unsigned long msg;
-		if (wait(&status) != tracee)
-			continue;
-		if (WIFSIGNALED(status) || WIFEXITED(status))
-			/* Child is dead. Time to go. */
-			return;
-
-		/* Make sure this is a seccomp event. */
-		EXPECT_EQ(true, IS_SECCOMP_EVENT(status));
-
-		ret = ptrace(PTRACE_GETEVENTMSG, tracee, NULL, &msg);
-		EXPECT_EQ(0, ret);
-		/* If this fails, don't try to recover. */
-		ASSERT_EQ(0x1001, msg) {
-			kill(tracee, SIGKILL);
-		}
-		/*
-		 * Poke in the message.
-		 * Registers are not touched to try to keep this relatively arch
-		 * agnostic.
-		 */
-		ret = ptrace(PTRACE_POKEDATA, tracee, poke_addr, 0x1001);
-		EXPECT_EQ(0, ret);
-		ret = ptrace(PTRACE_CONT, tracee, NULL, NULL);
-		EXPECT_EQ(0, ret);
-	}
-	/* Directly report the status of our test harness results. */
-	syscall(__NR_exit, _metadata->passed ? EXIT_SUCCESS : EXIT_FAILURE);
-}
-
-FIXTURE_DATA(TRACE) {
-	struct sock_fprog prog;
-	pid_t tracer;
-	long poked;
-};
-
-void cont_handler(int num) {
-}
-
-FIXTURE_SETUP(TRACE) {
-	struct sock_filter filter[] = {
-		BPF_STMT(BPF_LD+BPF_W+BPF_ABS,
-			offsetof(struct seccomp_data, nr)),
-		BPF_JUMP(BPF_JMP+BPF_JEQ+BPF_K, __NR_read, 0, 1),
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_TRACE | 0x1001),
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ALLOW),
-	};
-	int pipefd[2];
-	char sync;
-	pid_t tracer_pid;
-	pid_t tracee = getpid();
-	unsigned long poke_addr = (unsigned long)&self->poked;
-	self->poked = 0;
-	memset(&self->prog, 0, sizeof(self->prog));
-	self->prog.filter = malloc(sizeof(filter));
-	ASSERT_NE(NULL, self->prog.filter);
-	memcpy(self->prog.filter, filter, sizeof(filter));
-	self->prog.len = (unsigned short)(sizeof(filter)/sizeof(filter[0]));
-
-	/* Setup a pipe for clean synchronization. */
-	ASSERT_EQ(0, pipe(pipefd));
-
-	/* Fork a child which we'll promote to tracer */
-	tracer_pid = fork();
-	ASSERT_LE(0, tracer_pid);
-	signal(SIGALRM, cont_handler);
-	if (tracer_pid == 0) {
-		close(pipefd[0]);
-		tracer(_metadata, tracee, poke_addr, pipefd[1]);
-		syscall(__NR_exit, 0);
-	}
-	close(pipefd[1]);
-	self->tracer = tracer_pid;
-	prctl(PR_SET_PTRACER, self->tracer, 0, 0, 0);
-	long ret = read(pipefd[0], &sync, 1);
-	close(pipefd[0]);
-}
-
-FIXTURE_TEARDOWN(TRACE) {
-	if (self->tracer) {
-		int status;
-		/*
-		 * Extract the exit code from the other process and
-		 * adopt it for ourselves in case its asserts failed.
-		 */
-		ASSERT_EQ(0, kill(self->tracer, SIGUSR1));
-		ASSERT_EQ(self->tracer, waitpid(self->tracer, &status, 0));
-		if (WEXITSTATUS(status))
-			_metadata->passed = 0;
-	}
-	if (self->prog.filter)
-		free(self->prog.filter);
-};
-
-TEST_F(TRACE, read_has_side_effects) {
-	ssize_t ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	ASSERT_EQ(0, ret);
-
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->prog, 0, 0);
-	ASSERT_EQ(0, ret);
-
-	EXPECT_EQ(0, self->poked);
-	ret = read(-1, NULL, 0);
-	EXPECT_EQ(-1, ret);
-	EXPECT_EQ(0x1001, self->poked);
-}
-
-TEST_F(TRACE, getpid_runs_normally) {
-	long ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	ASSERT_EQ(0, ret);
-
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->prog, 0, 0);
-	ASSERT_EQ(0, ret);
-
-	EXPECT_EQ(0, self->poked);
-	EXPECT_NE(0, syscall(__NR_getpid));
-	EXPECT_EQ(0, self->poked);
-}
-
-#ifndef __NR_seccomp
-# if defined(__i386__)
-#  define __NR_seccomp 354
-# elif defined(__x86_64__)
-#  define __NR_seccomp 317
-# else
-#  define __NR_seccomp 0xffff
-# endif
-#endif
-
-#ifndef SECCOMP_SET_MODE_STRICT
-#define SECCOMP_SET_MODE_STRICT 0
-#endif
-
-#ifndef SECCOMP_SET_MODE_FILTER
-#define SECCOMP_SET_MODE_FILTER 1
-#endif
-
-#ifndef SECCOMP_FLAG_FILTER_TSYNC
-#define SECCOMP_FLAG_FILTER_TSYNC 1
-#endif
-
-#ifndef seccomp
-int seccomp(unsigned int op, unsigned int flags, struct sock_fprog *filter)
-{
-	errno = 0;
-	return syscall(__NR_seccomp, op, flags, filter);
-}
-#endif
-
-/* The following tests are commented out because they test
- * features that are currently unsupported.
- */
-#if 0
-
-TEST(seccomp_syscall) {
-	struct sock_filter filter[] = {
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ALLOW),
-	};
-	struct sock_fprog prog = {
-		.len = (unsigned short)(sizeof(filter)/sizeof(filter[0])),
-		.filter = filter,
-	};
-	long ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	ASSERT_EQ(0, ret) {
-		TH_LOG("Kernel does not support PR_SET_NO_NEW_PRIVS!");
-	}
-
-	/* Reject insane operation. */
-	ret = seccomp(-1, 0, &prog);
-	EXPECT_EQ(EINVAL, errno) {
-		TH_LOG("Did not reject crazy op value!");
-	}
-
-	/* Reject strict with flags or pointer. */
-	ret = seccomp(SECCOMP_SET_MODE_STRICT, -1, NULL);
-	EXPECT_EQ(EINVAL, errno) {
-		TH_LOG("Did not reject mode strict with flags!");
-	}
-	ret = seccomp(SECCOMP_SET_MODE_STRICT, 0, &prog);
-	EXPECT_EQ(EINVAL, errno) {
-		TH_LOG("Did not reject mode strict with uargs!");
-	}
-
-	/* Reject insane args for filter. */
-	ret = seccomp(SECCOMP_SET_MODE_FILTER, -1, &prog);
-	EXPECT_EQ(EINVAL, errno) {
-		TH_LOG("Did not reject crazy filter flags!");
-	}
-	ret = seccomp(SECCOMP_SET_MODE_FILTER, 0, NULL);
-	EXPECT_EQ(EFAULT, errno) {
-		TH_LOG("Did not reject NULL filter!");
-	}
-
-	ret = seccomp(SECCOMP_SET_MODE_FILTER, 0, &prog);
-	EXPECT_EQ(0, errno) {
-		TH_LOG("Kernel does not support SECCOMP_SET_MODE_FILTER: %s",
-			strerror(errno));
-	}
-}
-
-TEST(seccomp_syscall_mode_lock) {
-	struct sock_filter filter[] = {
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ALLOW),
-	};
-	struct sock_fprog prog = {
-		.len = (unsigned short)(sizeof(filter)/sizeof(filter[0])),
-		.filter = filter,
-	};
-	long ret = prctl(PR_SET_NO_NEW_PRIVS, 1, NULL, 0, 0);
-	ASSERT_EQ(0, ret) {
-		TH_LOG("Kernel does not support PR_SET_NO_NEW_PRIVS!");
-	}
-
-	ret = seccomp(SECCOMP_SET_MODE_FILTER, 0, &prog);
-	EXPECT_EQ(0, ret) {
-		TH_LOG("Could not install filter!");
-	}
-
-	/* Make sure neither entry point will switch to strict. */
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_STRICT, 0, 0, 0);
-	EXPECT_EQ(EINVAL, errno) {
-		TH_LOG("Switched to mode strict!");
-	}
-
-	ret = seccomp(SECCOMP_SET_MODE_STRICT, 0, NULL);
-	EXPECT_EQ(EINVAL, errno) {
-		TH_LOG("Switched to mode strict!");
-	}
-}
-
-#define TSYNC_SIBLINGS 2
-struct tsync_sibling {
-	pthread_t tid;
-	pid_t system_tid;
-	sem_t *started;
-	pthread_cond_t *cond;
-	pthread_mutex_t *mutex;
-	int diverge;
-	int num_waits;
-	struct sock_fprog *prog;
-	struct __test_metadata *metadata;
-};
-
-FIXTURE_DATA(TSYNC) {
-	struct sock_fprog root_prog, apply_prog;
-	struct tsync_sibling sibling[TSYNC_SIBLINGS];
-	sem_t started;
-	pthread_cond_t cond;
-	pthread_mutex_t mutex;
-	int sibling_count;
-};
-
-FIXTURE_SETUP(TSYNC) {
-	struct sock_filter root_filter[] = {
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ALLOW),
-	};
-	struct sock_filter apply_filter[] = {
-		BPF_STMT(BPF_LD+BPF_W+BPF_ABS,
-			offsetof(struct seccomp_data, nr)),
-		BPF_JUMP(BPF_JMP+BPF_JEQ+BPF_K, __NR_read, 0, 1),
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_KILL),
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ALLOW),
-	};
-	memset(&self->root_prog, 0, sizeof(self->root_prog));
-	memset(&self->apply_prog, 0, sizeof(self->apply_prog));
-	memset(&self->sibling, 0, sizeof(self->sibling));
-	self->root_prog.filter = malloc(sizeof(root_filter));
-	ASSERT_NE(NULL, self->root_prog.filter);
-	memcpy(self->root_prog.filter, &root_filter, sizeof(root_filter));
-	self->root_prog.len = (unsigned short)(sizeof(root_filter)/sizeof(root_filter[0]));
-
-	self->apply_prog.filter = malloc(sizeof(apply_filter));
-	ASSERT_NE(NULL, self->apply_prog.filter);
-	memcpy(self->apply_prog.filter, &apply_filter, sizeof(apply_filter));
-	self->apply_prog.len = (unsigned short)(sizeof(apply_filter)/sizeof(apply_filter[0]));
-
-	self->sibling_count = 0;
-	pthread_mutex_init(&self->mutex, NULL);
-	pthread_cond_init(&self->cond, NULL);
-	sem_init(&self->started, 0, 0);
-	self->sibling[0].tid = 0;
-	self->sibling[0].cond = &self->cond;
-	self->sibling[0].started = &self->started;
-	self->sibling[0].mutex = &self->mutex;
-	self->sibling[0].diverge = 0;
-	self->sibling[0].num_waits = 1;
-	self->sibling[0].prog = &self->root_prog;
-	self->sibling[0].metadata = _metadata;
-	self->sibling[1].tid = 0;
-	self->sibling[1].cond = &self->cond;
-	self->sibling[1].started = &self->started;
-	self->sibling[1].mutex = &self->mutex;
-	self->sibling[1].diverge = 0;
-	self->sibling[1].prog = &self->root_prog;
-	self->sibling[1].num_waits = 1;
-	self->sibling[1].metadata = _metadata;
-
-	ASSERT_EQ(0, prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0)) {
-		TH_LOG("Kernel does not support PR_SET_NO_NEW_PRIVS!");
-	}
-}
-
-FIXTURE_TEARDOWN(TSYNC) {
-	int sib = 0;
-	if (self->root_prog.filter)
-		free(self->root_prog.filter);
-	if (self->apply_prog.filter)
-		free(self->apply_prog.filter);
-
-	for ( ; sib < self->sibling_count; ++sib) {
-		struct tsync_sibling *s = &self->sibling[sib];
-		void *status;
-		if (!s->tid)
-			continue;
-		if (pthread_kill(s->tid, 0)) {
-			pthread_cancel(s->tid);
-			pthread_join(s->tid, &status);
-		}
-	}
-	pthread_mutex_destroy(&self->mutex);
-	pthread_cond_destroy(&self->cond);
-	sem_destroy(&self->started);
-};
-
-void *tsync_sibling(void *data)
-{
-	long ret = 0;
-	struct tsync_sibling *me = data;
-	struct __test_metadata *_metadata = me->metadata; /* enable TH_LOG */
-	me->system_tid = syscall(__NR_gettid);
-
-	pthread_mutex_lock(me->mutex);
-	if (me->diverge) {
-		/* Just re-apply the root prog to fork the tree */
-		ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER,
-				me->prog, 0, 0);
-	}
-	sem_post(me->started);
-	/* Return outside of started so parent notices failures. */
-	if (ret) {
-		pthread_mutex_unlock(me->mutex);
-		return (void *)SIBLING_EXIT_FAILURE;
-	}
-	do {
-		pthread_cond_wait(me->cond, me->mutex);
-		me->num_waits = me->num_waits - 1;
-	}
-	while (me->num_waits);
-	pthread_mutex_unlock(me->mutex);
-	ret = read(0, NULL, 0);
-	return (void *)SIBLING_EXIT_UNKILLED;
-}
-
-void tsync_start_sibling(struct tsync_sibling *sibling)
-{
-	pthread_create(&sibling->tid, NULL, tsync_sibling, (void *)sibling);
-}
-
-TEST_F(TSYNC, siblings_fail_prctl) {
-	long ret, sib;
-	void *status;
-	struct sock_filter filter[] = {
-		BPF_STMT(BPF_LD+BPF_W+BPF_ABS,
-			offsetof(struct seccomp_data, nr)),
-		BPF_JUMP(BPF_JMP+BPF_JEQ+BPF_K, __NR_prctl, 0, 1),
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ERRNO | EINVAL),
-		BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ALLOW),
-	};
-	struct sock_fprog prog = {
-		.len = (unsigned short)(sizeof(filter)/sizeof(filter[0])),
-		.filter = filter,
-	};
-
-	/* Check prctl failure detection by requesting sib 0 diverge. */
-	ret = seccomp(SECCOMP_SET_MODE_FILTER, 0, &prog);
-
-	self->sibling[0].diverge = 1;
-	tsync_start_sibling(&self->sibling[0]);
-	tsync_start_sibling(&self->sibling[1]);
-
-	while (self->sibling_count < TSYNC_SIBLINGS) {
-		sem_wait(&self->started);
-		self->sibling_count++;
-	}
-
-	/* Signal the threads to clean up*/
-	pthread_mutex_lock(&self->mutex);
-	ASSERT_EQ(0, pthread_cond_broadcast(&self->cond)) {
-		TH_LOG("cond broadcast non-zero");
-	}
-	pthread_mutex_unlock(&self->mutex);
-
-	/* Ensure diverging sibling failed to call prctl. */
-	pthread_join(self->sibling[0].tid, &status);
-	EXPECT_EQ(SIBLING_EXIT_FAILURE, (long)status);
-	pthread_join(self->sibling[1].tid, &status);
-	EXPECT_EQ(SIBLING_EXIT_UNKILLED, (long)status);
-}
-
-TEST_F(TSYNC, two_siblings_with_ancestor) {
-	long ret = seccomp(SECCOMP_SET_MODE_FILTER, 0, &self->root_prog);
-	void *status;
-	ASSERT_EQ(0, ret) {
-		TH_LOG("Kernel does not support SECCOMP_SET_MODE_FILTER!");
-	}
-	tsync_start_sibling(&self->sibling[0]);
-	tsync_start_sibling(&self->sibling[1]);
-
-	while (self->sibling_count < TSYNC_SIBLINGS) {
-		sem_wait(&self->started);
-		self->sibling_count++;
-	}
-
-	ret = seccomp(SECCOMP_SET_MODE_FILTER, SECCOMP_FLAG_FILTER_TSYNC,
-		      &self->apply_prog);
-	ASSERT_EQ(0, ret) {
-		TH_LOG("Could install filter on all threads!");
-	}
-	/* Tell the siblings to test the policy */
-	pthread_mutex_lock(&self->mutex);
-	ASSERT_EQ(0, pthread_cond_broadcast(&self->cond)) {
-		TH_LOG("cond broadcast non-zero");
-	}
-	pthread_mutex_unlock(&self->mutex);
-	/* Ensure they are both killed and don't exit cleanly. */
-	pthread_join(self->sibling[0].tid, &status);
-	EXPECT_EQ(0x0, (long)status);
-	pthread_join(self->sibling[1].tid, &status);
-	EXPECT_EQ(0x0, (long)status);
-}
-
-TEST_F(TSYNC, two_siblings_with_one_divergence) {
-	long ret = seccomp(SECCOMP_SET_MODE_FILTER, 0, &self->root_prog);
-	void *status;
-	ASSERT_EQ(0, ret) {
-		TH_LOG("Kernel does not support SECCOMP_SET_MODE_FILTER!");
-	}
-	self->sibling[0].diverge = 1;
-	tsync_start_sibling(&self->sibling[0]);
-	tsync_start_sibling(&self->sibling[1]);
-
-	while (self->sibling_count < TSYNC_SIBLINGS) {
-		sem_wait(&self->started);
-		self->sibling_count++;
-	}
-
-	ret = seccomp(SECCOMP_SET_MODE_FILTER, SECCOMP_FLAG_FILTER_TSYNC,
-		      &self->apply_prog);
-	ASSERT_EQ(self->sibling[0].system_tid, ret) {
-		TH_LOG("Did not fail on diverged sibling.");
-	}
-
-	/* Wake the threads */
-	pthread_mutex_lock(&self->mutex);
-	ASSERT_EQ(0, pthread_cond_broadcast(&self->cond)) {
-		TH_LOG("cond broadcast non-zero");
-	}
-	pthread_mutex_unlock(&self->mutex);
-
-	/* Ensure they are both unkilled. */
-	pthread_join(self->sibling[0].tid, &status);
-	EXPECT_EQ(SIBLING_EXIT_UNKILLED, (long)status);
-	pthread_join(self->sibling[1].tid, &status);
-	EXPECT_EQ(SIBLING_EXIT_UNKILLED, (long)status);
-}
-
-TEST_F(TSYNC, two_siblings_not_under_filter) {
-	long ret, sib;
-	void *status;
-	/*
-	 * Sibling 0 will have its own seccomp policy
-	 * and Sibling 1 will not be under seccomp at
-	 * all. Sibling 1 will enter seccomp and 0
-	 * will cause failure.
-	 */
-	self->sibling[0].diverge = 1;
-	tsync_start_sibling(&self->sibling[0]);
-	tsync_start_sibling(&self->sibling[1]);
-
-	while (self->sibling_count < TSYNC_SIBLINGS) {
-		sem_wait(&self->started);
-		self->sibling_count++;
-	}
-
-	ret = seccomp(SECCOMP_SET_MODE_FILTER, 0, &self->root_prog);
-	ASSERT_EQ(0, ret) {
-		TH_LOG("Kernel does not support SECCOMP_SET_MODE_FILTER!");
-	}
-
-	ret = seccomp(SECCOMP_SET_MODE_FILTER, SECCOMP_FLAG_FILTER_TSYNC,
-		      &self->apply_prog);
-	ASSERT_EQ(ret, self->sibling[0].system_tid) {
-		TH_LOG("Did not fail on diverged sibling.");
-	}
-	sib = 1;
-	if (ret == self->sibling[0].system_tid)
-		sib = 0;
-
-	pthread_mutex_lock(&self->mutex);
-
-	/* Increment the other siblings num_waits so we can clean up
-	 * the one we just saw.
-	 */
-	self->sibling[!sib].num_waits += 1;
-
-	/* Signal the thread to clean up*/
-	ASSERT_EQ(0, pthread_cond_broadcast(&self->cond)) {
-		TH_LOG("cond broadcast non-zero");
-	}
-	pthread_mutex_unlock(&self->mutex);
-	pthread_join(self->sibling[sib].tid, &status);
-	EXPECT_EQ(SIBLING_EXIT_UNKILLED, (long)status);
-	/* Poll for actual task death. pthread_join doesn't guarantee it. */
-	while (!kill(self->sibling[sib].system_tid, 0)) sleep(0.1);
-	/* Switch to the remaining sibling */
-	sib = !sib;
-
-	ret = seccomp(SECCOMP_SET_MODE_FILTER, SECCOMP_FLAG_FILTER_TSYNC,
-		      &self->apply_prog);
-	ASSERT_EQ(0, ret) {
-		TH_LOG("Expected the remaining sibling to sync");
-	};
-
-	pthread_mutex_lock(&self->mutex);
-
-	/* If remaining sibling didn't have a chance to wake up during
-	 * the first broadcast, manually reduce the num_waits now.
-	 */
-	if (self->sibling[sib].num_waits > 1)
-		self->sibling[sib].num_waits = 1;
-	ASSERT_EQ(0, pthread_cond_broadcast(&self->cond)) {
-		TH_LOG("cond broadcast non-zero");
-	}
-	pthread_mutex_unlock(&self->mutex);
-	pthread_join(self->sibling[sib].tid, &status);
-	EXPECT_EQ(0, (long)status);
-	/* Poll for actual task death. pthread_join doesn't guarantee it. */
-	while (!kill(self->sibling[sib].system_tid, 0)) sleep(0.1);
-
-	ret = seccomp(SECCOMP_SET_MODE_FILTER, SECCOMP_FLAG_FILTER_TSYNC,
-		      &self->apply_prog);
-	ASSERT_EQ(0, ret);  /* just us chickens */
-}
-
-#endif
-
-/*
- * TODO:
- * - add microbenchmarks
- * - expand NNP testing
- * - better arch-specific TRACE and TRAP handlers.
- * - endianness checking when appropriate
- * - 64-bit arg prodding
- * - arch value testing (x86 modes especially)
- * - ...
- */
-
-TEST_HARNESS_MAIN
diff --git a/client/site_tests/security_SeccompSyscallFilters/src/sigsegv.c b/client/site_tests/security_SeccompSyscallFilters/src/sigsegv.c
deleted file mode 100644
index ec6edbc..0000000
--- a/client/site_tests/security_SeccompSyscallFilters/src/sigsegv.c
+++ /dev/null
@@ -1,182 +0,0 @@
-/* sigsegv.c
- * Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- *
- * Forces a denied system call to trigger a SIGSEGV at the instruction after
- * the call using a SIGSYS handler.. This can be useful when debugging
- * frameworks have trouble tracing through the SIGSYS handler.
- * Proof of concept using amd64 registers and 'syscall'.
- */
-
-#include <asm/siginfo.h>
-#define __have_siginfo_t 1
-#define __have_sigval_t 1
-#define __have_sigevent_t 1
-
-#include <linux/filter.h>
-#include <linux/prctl.h>
-#include <linux/seccomp.h>
-#include <limits.h>
-#include <stddef.h>
-#include <stdbool.h>
-#include <string.h>
-#include <syscall.h>
-#define __USE_GNU 1
-#include <sys/ucontext.h>
-#include <sys/mman.h>
-
-#include "test_harness.h"
-
-#ifndef PR_SET_NO_NEW_PRIVS
-#define PR_SET_NO_NEW_PRIVS 38
-#define PR_GET_NO_NEW_PRIVS 39
-#endif
-
-#if defined(__i386__)
-#define REG_IP	REG_EIP
-#define REG_SP	REG_ESP
-#define REG_RESULT	REG_EAX
-#define REG_SYSCALL	REG_EAX
-#define REG_ARG0	REG_EBX
-#define REG_ARG1	REG_ECX
-#define REG_ARG2	REG_EDX
-#define REG_ARG3	REG_ESI
-#define REG_ARG4	REG_EDI
-#define REG_ARG5	REG_EBP
-#elif defined(__x86_64__)
-#define REG_IP	REG_RIP
-#define REG_SP	REG_RSP
-#define REG_RESULT	REG_RAX
-#define REG_SYSCALL	REG_RAX
-#define REG_ARG0	REG_RDI
-#define REG_ARG1	REG_RSI
-#define REG_ARG2	REG_RDX
-#define REG_ARG3	REG_R10
-#define REG_ARG4	REG_R8
-#define REG_ARG5	REG_R9
-#endif
-
-FIXTURE_DATA(TRAP) {
-	struct sock_fprog prog;
-};
-
-FIXTURE_SETUP(TRAP) {
-	/* instruction after the syscall. Will be arch specific, of course. */
-	{
-		struct sock_filter filter[] = {
-			BPF_STMT(BPF_LD+BPF_W+BPF_ABS,
-				offsetof(struct seccomp_data, nr)),
-			/* Whitelist anything you might need in the sigaction */
-#ifdef __NR_sigreturn
-			BPF_JUMP(BPF_JMP+BPF_JEQ+BPF_K, __NR_sigreturn, 4, 0),
-#endif
-			/* TODO: only allow PROT_NONE */
-			BPF_JUMP(BPF_JMP+BPF_JEQ+BPF_K, __NR_mprotect, 3, 0),
-			BPF_JUMP(BPF_JMP+BPF_JEQ+BPF_K, __NR_exit, 2, 0),
-			BPF_JUMP(BPF_JMP+BPF_JEQ+BPF_K, __NR_rt_sigreturn, 1, 0),
-			/* Allow __NR_write so easy logging. */
-			BPF_JUMP(BPF_JMP+BPF_JEQ+BPF_K, __NR_write, 0, 1),
-			BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_ALLOW),
-			BPF_STMT(BPF_RET+BPF_K, SECCOMP_RET_TRAP),
-		};
-		memset(&self->prog, 0, sizeof(self->prog));
-		self->prog.filter = malloc(sizeof(filter));
-		ASSERT_NE(NULL, self->prog.filter);
-		memcpy(self->prog.filter, filter, sizeof(filter));
-		self->prog.len = (unsigned short)(sizeof(filter)/sizeof(filter[0]));
-	}
-}
-
-FIXTURE_TEARDOWN(TRAP) {
-	if (self->prog.filter)
-		free(self->prog.filter);
-};
-
-struct arch_sigsys {
-		void *_call_addr; /* calling user insn */
-		int _syscall;	/* triggering system call number */
-		unsigned int _arch;	/* AUDIT_ARCH_* of syscall */
-};
-
-#define _ALIGN(x,sz) (((x + ((sz)-1)) & ~((sz)-1)) - (sz))
-#define ALIGN(x,sz) ((typeof(x))_ALIGN((unsigned long)(x),(unsigned long)(sz)))
-static long local_mprotect(void *target, unsigned long sz)
-{
-	register unsigned long res asm ("rax") = __NR_mprotect;
-	register void *addr asm ("rdi") = ALIGN(target, sz);
-	register long len asm ("rsi") = sz;
-	register long num asm ("rdx") = PROT_NONE;
-	__asm__("syscall\n");
-	return res;
-}
-
-static void TRAP_action(int nr, siginfo_t *info, void *void_context)
-{
-	ucontext_t *ctx = (ucontext_t *)void_context;
-	char buf[256];
-	int len;
-	int do_ret = 1;
-	struct arch_sigsys *sys = (struct arch_sigsys *)
-#ifdef si_syscall
-		&(info->si_call_addr);
-#else
-		&(info->si_pid);
-#endif
-
-	if (info->si_code != SYS_SECCOMP)
-		return;
-	if (!ctx)
-		return;
-	len = snprintf(buf, sizeof(buf),
-			"@0x%lX:%X:%d:0x%lX:0x%lX:0x%lX:0x%lX:0x%lX:0x%lX [0x%lX]\n",
-			(unsigned long)sys->_call_addr,
-			sys->_arch,
-			sys->_syscall,
-			ctx->uc_mcontext.gregs[REG_ARG0],
-			ctx->uc_mcontext.gregs[REG_ARG1],
-			ctx->uc_mcontext.gregs[REG_ARG2],
-			ctx->uc_mcontext.gregs[REG_ARG3],
-			ctx->uc_mcontext.gregs[REG_ARG4],
-			ctx->uc_mcontext.gregs[REG_ARG5],
-			ALIGN(ctx->uc_mcontext.gregs[REG_IP], 4096));
-	/* Emit some useful logs or whatever. */
-	syscall(__NR_write, STDOUT_FILENO, buf, len);
-	/* Make the calling page non-exec */
-	/* Careful on how it is called since it may make the syscall() instructions non-exec. */
-	local_mprotect((void *)ctx->uc_mcontext.gregs[REG_IP], sysconf(_SC_PAGE_SIZE));
-}
-
-TEST_F_SIGNAL(TRAP, sigsegv, SIGSEGV) {
-	int ret;
-	struct sigaction act;
-	pid_t pid;
-	sigset_t mask;
-	memset(&act, 0, sizeof(act));
-	sigemptyset(&mask);
-	sigaddset(&mask, SIGSYS);
-
-	act.sa_sigaction = &TRAP_action;
-	act.sa_flags = SA_SIGINFO;
-	ret = sigaction(SIGSYS, &act, NULL);
-	ASSERT_EQ(0, ret) {
-		TH_LOG("sigaction failed");
-	}
-	ret = sigprocmask(SIG_UNBLOCK, &mask, NULL);
-	ASSERT_EQ(0, ret) {
-		TH_LOG("sigprocmask failed");
-	}
-
-	/* Get the pid to compare against. */
-	pid = getpid();
-
-	ret = prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
-	ASSERT_EQ(0, ret);
-	ret = prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, &self->prog);
-	ASSERT_EQ(0, ret);
-
-	/* Call anything! */
-	ret = syscall(__NR_getpid);
-}
-
-TEST_HARNESS_MAIN
diff --git a/client/site_tests/security_SeccompSyscallFilters/src/test_harness.h b/client/site_tests/security_SeccompSyscallFilters/src/test_harness.h
deleted file mode 100644
index e765945..0000000
--- a/client/site_tests/security_SeccompSyscallFilters/src/test_harness.h
+++ /dev/null
@@ -1,489 +0,0 @@
-/* Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- *
- * test_harness.h: simple C unit test helper.
- *
- * Usage:
- *   #include "test_harness.h"
- *   TEST(standalone_test) {
- *     do_some_stuff;
- *     EXPECT_GT(10, stuff) {
- *        stuff_state_t state;
- *        enumerate_stuff_state(&state);
- *        TH_LOG("expectation failed with state: %s", state.msg);
- *     }
- *     more_stuff;
- *     ASSERT_NE(some_stuff, NULL) TH_LOG("how did it happen?!");
- *     last_stuff;
- *     EXPECT_EQ(0, last_stuff);
- *   }
- *
- *   FIXTURE(my_fixture) {
- *     mytype_t *data;
- *     int awesomeness_level;
- *   };
- *   FIXTURE_SETUP(my_fixture) {
- *     self->data = mytype_new();
- *     ASSERT_NE(NULL, self->data);
- *   }
- *   FIXTURE_TEARDOWN(my_fixture) {
- *     mytype_free(self->data);
- *   }
- *   TEST_F(my_fixture, data_is_good) {
- *     EXPECT_EQ(1, is_my_data_good(self->data));
- *   }
- *
- *   TEST_HARNESS_MAIN
- *
- * API inspired by code.google.com/p/googletest
- */
-#ifndef TEST_HARNESS_H_
-#define TEST_HARNESS_H_
-
-#define _GNU_SOURCE
-#include <stdio.h>
-#include <stdlib.h>
-#include <string.h>
-#include <sys/types.h>
-#include <sys/wait.h>
-#include <unistd.h>
-
-/* All exported functionality should be declared through this macro. */
-#define TEST_API(x) _##x
-
-/*
- * Exported APIs
- */
-
-/* TEST(name) { implementation }
- * Defines a test by name.
- * Names must be unique and tests must not be run in parallel.  The
- * implementation containing block is a function and scoping should be treated
- * as such.  Returning early may be performed with a bare "return;" statement.
- *
- * EXPECT_* and ASSERT_* are valid in a TEST() { } context.
- */
-#define TEST TEST_API(TEST)
-
-/* TEST_SIGNAL(name, signal) { implementation }
- * Defines a test by name and the expected term signal.
- * Names must be unique and tests must not be run in parallel.  The
- * implementation containing block is a function and scoping should be treated
- * as such.  Returning early may be performed with a bare "return;" statement.
- *
- * EXPECT_* and ASSERT_* are valid in a TEST() { } context.
- */
-#define TEST_SIGNAL TEST_API(TEST_SIGNAL)
-
-/* FIXTURE(datatype name) {
- *   type property1;
- *   ...
- * };
- * Defines the data provided to TEST_F()-defined tests as |self|.  It should be
- * populated and cleaned up using FIXTURE_SETUP and FIXTURE_TEARDOWN.
- */
-#define FIXTURE TEST_API(FIXTURE)
-
-/* FIXTURE_DATA(datatype name)
- * This call may be used when the type of the fixture data
- * is needed.  In general, this should not be needed unless
- * the |self| is being passed to a helper directly.
- */
-#define FIXTURE_DATA TEST_API(FIXTURE_DATA)
-
-/* FIXTURE_SETUP(fixture name) { implementation }
- * Populates the required "setup" function for a fixture.  An instance of the
- * datatype defined with _FIXTURE_DATA will be exposed as |self| for the
- * implementation.
- *
- * ASSERT_* are valid for use in this context and will prempt the execution
- * of any dependent fixture tests.
- *
- * A bare "return;" statement may be used to return early.
- */
-#define FIXTURE_SETUP TEST_API(FIXTURE_SETUP)
-
-/* FIXTURE_TEARDOWN(fixture name) { implementation }
- * Populates the required "teardown" function for a fixture.  An instance of the
- * datatype defined with _FIXTURE_DATA will be exposed as |self| for the
- * implementation to clean up.
- *
- * A bare "return;" statement may be used to return early.
- */
-#define FIXTURE_TEARDOWN TEST_API(FIXTURE_TEARDOWN)
-
-/* TEST_F(fixture, name) { implementation }
- * Defines a test that depends on a fixture (e.g., is part of a test case).
- * Very similar to TEST() except that |self| is the setup instance of fixture's
- * datatype exposed for use by the implementation.
- */
-#define TEST_F TEST_API(TEST_F)
-
-#define TEST_F_SIGNAL TEST_API(TEST_F_SIGNAL)
-
-/* Use once to append a main() to the test file. E.g.,
- *   TEST_HARNESS_MAIN
- */
-#define TEST_HARNESS_MAIN TEST_API(TEST_HARNESS_MAIN)
-
-/*
- * Operators for use in TEST and TEST_F.
- * ASSERT_* calls will stop test execution immediately.
- * EXPECT_* calls will emit a failure warning, note it, and continue.
- */
-
-/* ASSERT_EQ(expected, measured): expected == measured */
-#define ASSERT_EQ TEST_API(ASSERT_EQ)
-/* ASSERT_NE(expected, measured): expected != measured */
-#define ASSERT_NE TEST_API(ASSERT_NE)
-/* ASSERT_LT(expected, measured): expected < measured */
-#define ASSERT_LT TEST_API(ASSERT_LT)
-/* ASSERT_LE(expected, measured): expected <= measured */
-#define ASSERT_LE TEST_API(ASSERT_LE)
-/* ASSERT_GT(expected, measured): expected > measured */
-#define ASSERT_GT TEST_API(ASSERT_GT)
-/* ASSERT_GE(expected, measured): expected >= measured */
-#define ASSERT_GE TEST_API(ASSERT_GE)
-/* ASSERT_NULL(measured): NULL == measured */
-#define ASSERT_NULL TEST_API(ASSERT_NULL)
-/* ASSERT_TRUE(measured): measured != 0 */
-#define ASSERT_TRUE TEST_API(ASSERT_TRUE)
-/* ASSERT_FALSE(measured): measured == 0 */
-#define ASSERT_FALSE TEST_API(ASSERT_FALSE)
-/* ASSERT_STREQ(expected, measured): !strcmp(expected, measured) */
-#define ASSERT_STREQ TEST_API(ASSERT_STREQ)
-/* ASSERT_STRNE(expected, measured): strcmp(expected, measured) */
-#define ASSERT_STRNE TEST_API(ASSERT_STRNE)
-/* EXPECT_EQ(expected, measured): expected == measured */
-#define EXPECT_EQ TEST_API(EXPECT_EQ)
-/* EXPECT_NE(expected, measured): expected != measured */
-#define EXPECT_NE TEST_API(EXPECT_NE)
-/* EXPECT_LT(expected, measured): expected < measured */
-#define EXPECT_LT TEST_API(EXPECT_LT)
-/* EXPECT_LE(expected, measured): expected <= measured */
-#define EXPECT_LE TEST_API(EXPECT_LE)
-/* EXPECT_GT(expected, measured): expected > measured */
-#define EXPECT_GT TEST_API(EXPECT_GT)
-/* EXPECT_GE(expected, measured): expected >= measured */
-#define EXPECT_GE TEST_API(EXPECT_GE)
-/* EXPECT_NULL(measured): NULL == measured */
-#define EXPECT_NULL TEST_API(EXPECT_NULL)
-/* EXPECT_TRUE(measured): 0 != measured */
-#define EXPECT_TRUE TEST_API(EXPECT_TRUE)
-/* EXPECT_FALSE(measured): 0 == measured */
-#define EXPECT_FALSE TEST_API(EXPECT_FALSE)
-/* EXPECT_STREQ(expected, measured): !strcmp(expected, measured) */
-#define EXPECT_STREQ TEST_API(EXPECT_STREQ)
-/* EXPECT_STRNE(expected, measured): strcmp(expected, measured) */
-#define EXPECT_STRNE TEST_API(EXPECT_STRNE)
-
-/* TH_LOG(format, ...)
- * Optional debug logging function available for use in tests.
- * Logging may be enabled or disabled by defining TH_LOG_ENABLED.
- * E.g., #define TH_LOG_ENABLED 1
- * If no definition is provided, logging is enabled by default.
- */
-#define TH_LOG  TEST_API(TH_LOG)
-
-/*
- * Internal implementation.
- *
- */
-
-/* Utilities exposed to the test definitions */
-#ifndef TH_LOG_STREAM
-#  define TH_LOG_STREAM stderr
-#endif
-
-#ifndef TH_LOG_ENABLED
-#  define TH_LOG_ENABLED 1
-#endif
-
-#define _TH_LOG(fmt, ...) do { \
-  if (TH_LOG_ENABLED) \
-    __TH_LOG(fmt, ##__VA_ARGS__); \
-} while (0)
-
-/* Unconditional logger for internal use. */
-#define __TH_LOG(fmt, ...) \
-    fprintf(TH_LOG_STREAM, "%s:%d:%s:" fmt "\n", \
-            __FILE__, __LINE__, _metadata->name, ##__VA_ARGS__)
-
-/* Defines the test function and creates the registration stub. */
-#define _TEST(test_name) __TEST_IMPL(test_name, -1)
-
-#define _TEST_SIGNAL(test_name, signal) __TEST_IMPL(test_name, signal)
-
-#define __TEST_IMPL(test_name, _signal) \
-  static void test_name(struct __test_metadata *_metadata); \
-  static struct __test_metadata _##test_name##_object = \
-    { name: "global." #test_name, fn: &test_name, termsig: _signal }; \
-  static void __attribute__((constructor)) _register_##test_name(void) { \
-    __register_test(&_##test_name##_object); \
-  } \
-  static void test_name( \
-    struct __test_metadata __attribute__((unused)) *_metadata)
-
-/* Wraps the struct name so we have one less argument to pass around. */
-#define _FIXTURE_DATA(fixture_name) struct _test_data_##fixture_name
-
-/* Called once per fixture to setup the data and register. */
-#define _FIXTURE(fixture_name) \
-  static void __attribute__((constructor)) \
-      _register_##fixture_name##_data(void) { \
-    __fixture_count++; \
-  } \
-  _FIXTURE_DATA(fixture_name)
-
-/* Prepares the setup function for the fixture.  |_metadata| is included
- * so that ASSERT_* work as a convenience.
- */
-#define _FIXTURE_SETUP(fixture_name) \
-  void fixture_name##_setup( \
-    struct __test_metadata __attribute__((unused)) *_metadata, \
-    _FIXTURE_DATA(fixture_name) __attribute__((unused)) *self)
-#define _FIXTURE_TEARDOWN(fixture_name) \
-  void fixture_name##_teardown( \
-    struct __test_metadata __attribute__((unused)) *_metadata, \
-    _FIXTURE_DATA(fixture_name) __attribute__((unused)) *self)
-
-/* Emits test registration and helpers for fixture-based test
- * cases.
- * TODO(wad) register fixtures on dedicated test lists.
- */
-#define _TEST_F(fixture_name, test_name) \
-  __TEST_F_IMPL(fixture_name, test_name, -1)
-
-#define _TEST_F_SIGNAL(fixture_name, test_name, signal) \
-  __TEST_F_IMPL(fixture_name, test_name, signal)
-
-#define __TEST_F_IMPL(fixture_name, test_name, signal) \
-  static void fixture_name##_##test_name( \
-    struct __test_metadata *_metadata, \
-    _FIXTURE_DATA(fixture_name) *self); \
-  static inline void wrapper_##fixture_name##_##test_name( \
-    struct __test_metadata *_metadata) { \
-    /* fixture data is allocated, setup, and torn down per call. */ \
-    _FIXTURE_DATA(fixture_name) self; \
-    memset(&self, 0, sizeof(_FIXTURE_DATA(fixture_name))); \
-    fixture_name##_setup(_metadata, &self); \
-    /* Let setup failure terminate early. */ \
-    if (!_metadata->passed) return; \
-    fixture_name##_##test_name(_metadata, &self); \
-    fixture_name##_teardown(_metadata, &self); \
-  } \
-  static struct __test_metadata _##fixture_name##_##test_name##_object = { \
-    name: #fixture_name "." #test_name, \
-    fn: &wrapper_##fixture_name##_##test_name, \
-    termsig: signal, \
-   }; \
-  static void __attribute__((constructor)) \
-      _register_##fixture_name##_##test_name(void) { \
-    __register_test(&_##fixture_name##_##test_name##_object); \
-  } \
-  static void fixture_name##_##test_name( \
-    struct __test_metadata __attribute__((unused)) *_metadata, \
-    _FIXTURE_DATA(fixture_name) __attribute__((unused)) *self)
-
-/* Exports a simple wrapper to run the test harness. */
-#define _TEST_HARNESS_MAIN \
-  int main(int argc, char **argv) { return test_harness_run(argc, argv); }
-
-#define _ASSERT_EQ(_expected, _seen) \
-  __EXPECT(_expected, _seen, ==, 1)
-#define _ASSERT_NE(_expected, _seen) \
-  __EXPECT(_expected, _seen, !=, 1)
-#define _ASSERT_LT(_expected, _seen) \
-  __EXPECT(_expected, _seen, <, 1)
-#define _ASSERT_LE(_expected, _seen) \
-  __EXPECT(_expected, _seen, <=, 1)
-#define _ASSERT_GT(_expected, _seen) \
-  __EXPECT(_expected, _seen, >, 1)
-#define _ASSERT_GE(_expected, _seen) \
-  __EXPECT(_expected, _seen, >=, 1)
-#define _ASSERT_NULL(_seen) \
-  __EXPECT(NULL, _seen, ==, 1)
-
-#define _ASSERT_TRUE(_seen) \
-  _ASSERT_NE(0, _seen)
-#define _ASSERT_FALSE(_seen) \
-  _ASSERT_EQ(0, _seen)
-#define _ASSERT_STREQ(_expected, _seen) \
-  __EXPECT_STR(_expected, _seen, ==, 1)
-#define _ASSERT_STRNE(_expected, _seen) \
-  __EXPECT_STR(_expected, _seen, !=, 1)
-
-#define _EXPECT_EQ(_expected, _seen) \
-  __EXPECT(_expected, _seen, ==, 0)
-#define _EXPECT_NE(_expected, _seen) \
-  __EXPECT(_expected, _seen, !=, 0)
-#define _EXPECT_LT(_expected, _seen) \
-  __EXPECT(_expected, _seen, <, 0)
-#define _EXPECT_LE(_expected, _seen) \
-  __EXPECT(_expected, _seen, <=, 0)
-#define _EXPECT_GT(_expected, _seen) \
-  __EXPECT(_expected, _seen, >, 0)
-#define _EXPECT_GE(_expected, _seen) \
-  __EXPECT(_expected, _seen, >=, 0)
-
-#define _EXPECT_NULL(_seen) \
-  __EXPECT(NULL, _seen, ==, 0)
-#define _EXPECT_TRUE(_seen) \
-  _EXPECT_NE(0, _seen)
-#define _EXPECT_FALSE(_seen) \
-  _EXPECT_EQ(0, _seen)
-
-#define _EXPECT_STREQ(_expected, _seen) \
-  __EXPECT_STR(_expected, _seen, ==, 0)
-#define _EXPECT_STRNE(_expected, _seen) \
-  __EXPECT_STR(_expected, _seen, !=, 0)
-
-/* Support an optional handler after and ASSERT_* or EXPECT_*.  The approach is
- * not thread-safe, but it should be fine in most sane test scenarios.
- *
- * Using __bail(), which optionally abort()s, is the easiest way to early
- * return while still providing an optional block to the API consumer.
- */
-#define OPTIONAL_HANDLER(_assert) \
-  for (; _metadata->trigger;  _metadata->trigger = __bail(_assert))
-
-#define __EXPECT(_expected, _seen, _t, _assert) do { \
-  /* Avoid multiple evaluation of the cases */ \
-  __typeof__(_expected) __exp = (_expected); \
-  __typeof__(_seen) __seen = (_seen); \
-  if (!(__exp _t __seen)) { \
-    unsigned long long __exp_print = 0; \
-    unsigned long long __seen_print = 0; \
-    /* Avoid casting complaints the scariest way we can. */ \
-    memcpy(&__exp_print, &__exp, sizeof(__exp)); \
-    memcpy(&__seen_print, &__seen, sizeof(__seen)); \
-    __TH_LOG("Expected %s (%llu) %s %s (%llu)", \
-            #_expected, __exp_print, #_t, \
-            #_seen, __seen_print); \
-    _metadata->passed = 0; \
-    /* Ensure the optional handler is triggered */ \
-    _metadata->trigger = 1; \
-  } \
-} while (0); OPTIONAL_HANDLER(_assert)
-
-#define __EXPECT_STR(_expected, _seen, _t, _assert) do { \
-  const char *__exp = (_expected); \
-  const char *__seen = (_seen); \
-  if (!(strcmp(__exp, __seen) _t 0))  { \
-    __TH_LOG("Expected '%s' %s '%s'.", __exp, #_t, __seen); \
-    _metadata->passed = 0; \
-    _metadata->trigger = 1; \
-  } \
-} while (0); OPTIONAL_HANDLER(_assert)
-
-/* Contains all the information for test execution and status checking. */
-struct __test_metadata {
-  const char *name;
-  void (*fn)(struct __test_metadata *);
-  int termsig;
-  int passed;
-  int trigger; /* extra handler after the evaluation */
-  struct __test_metadata *prev, *next;
-};
-
-/* Storage for the (global) tests to be run. */
-static struct __test_metadata *__test_list = NULL;
-static unsigned int __test_count = 0;
-static unsigned int __fixture_count = 0;
-
-/*
- * Since constructors are called in reverse order, reverse the test
- * list so tests are run in source declaration order.
- * https://gcc.gnu.org/onlinedocs/gccint/Initialization.html
- */
-static inline void __register_test(struct __test_metadata *t) {
-  __test_count++;
-  /* Circular linked list where only prev is circular. */
-  if (__test_list == NULL) {
-    __test_list = t;
-    t->next = NULL;
-    t->prev = t;
-    return;
-  }
-  t->next = __test_list;
-  t->next->prev = t;
-  t->prev = t;
-  __test_list = t;
-}
-
-static inline int __bail(int for_realz) {
-  if (for_realz)
-    abort();
-  return 0;
-}
-
-static int test_harness_run(int __attribute__((unused)) argc,
-                            char __attribute__((unused)) **argv) {
-  struct __test_metadata *t;
-  int ret = 0;
-  unsigned int count = 0;
-  unsigned int pass_count = 0;
-
-  /* TODO(wad) add optional arguments similar to gtest. */
-  printf("[==========] Running %u tests from %u test cases.\n",
-          __test_count, __fixture_count + 1);
-  for (t = __test_list; t; t = t->next) {
-    pid_t child_pid;
-    int status;
-    count++;
-    t->passed = 1;
-    t->trigger = 0;
-    printf("[ RUN      ] %s\n", t->name);
-    child_pid = fork();
-    if (child_pid < 0) {
-      printf("ERROR SPAWNING TEST CHILD\n");
-      t->passed = 0;
-    } else if (child_pid == 0) {
-      t->fn(t);
-      _exit(t->passed);
-    } else {
-      /* TODO(wad) add timeout support. */
-      waitpid(child_pid, &status, 0);
-      if (WIFEXITED(status)) {
-        t->passed = t->termsig == -1 ? WEXITSTATUS(status) : 0;
-        if (t->termsig != -1) {
-         fprintf(TH_LOG_STREAM,
-                  "%s: Test exited normally instead of by signal (code: %d)\n",
-                 t->name,
-                 WEXITSTATUS(status));
-        }
-      } else if (WIFSIGNALED(status)) {
-        t->passed = 0;
-        if (WTERMSIG(status) == SIGABRT) {
-          fprintf(TH_LOG_STREAM,
-                  "%s: Test terminated by assertion\n",
-                 t->name);
-        } else if (WTERMSIG(status) == t->termsig) {
-          t->passed = 1;
-        } else {
-          fprintf(TH_LOG_STREAM,
-                  "%s: Test terminated unexpectedly by signal %d\n",
-                 t->name,
-                 WTERMSIG(status));
-        }
-      } else {
-          fprintf(TH_LOG_STREAM,
-                  "%s: Test ended in some other way [%u]\n",
-                 t->name,
-                 status);
-      }
-    }
-    printf("[     %4s ] %s\n", (t->passed ? "OK" : "FAIL"), t->name);
-    if (t->passed)
-      pass_count++;
-    else
-      ret = 1;
-  }
-  /* TODO(wad) organize by fixtures since ordering is not guaranteed now. */
-  printf("[==========] %u / %u tests passed.\n", pass_count, count);
-  printf("[  %s  ]\n", (ret ? "FAILED" : "PASSED"));
-  return ret;
-}
-
-#endif  /* TEST_HARNESS_H_ */
diff --git a/client/site_tests/security_SessionManagerDbusEndpoints/control b/client/site_tests/security_SessionManagerDbusEndpoints/control
deleted file mode 100644
index 92ae831..0000000
--- a/client/site_tests/security_SessionManagerDbusEndpoints/control
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-TIME="SHORT"
-AUTHOR = "The Chromium OS Authors"
-DOC = """
-Tests that SessionManager D-Bus endpoints cannot be abused.
-"""
-NAME = "security_SessionManagerDbusEndpoints"
-PURPOSE = """
-Validates that EnableChromeTesting is root only, and that RestartJob will not
-launch arbitrary processes.
-"""
-CRITERIA = """
-Fails if EnableChromeTesting can be called as user 'chronos', or if RestartJob
-allows executing an arbitrary binary.
-"""
-ATTRIBUTES = "suite:bvt-perbuild"
-TEST_CATEGORY = "Security"
-TEST_CLASS = "security"
-TEST_TYPE = "client"
-JOB_RETRIES = 2
-
-job.run_test("security_SessionManagerDbusEndpoints")
diff --git a/client/site_tests/security_SessionManagerDbusEndpoints/security_SessionManagerDbusEndpoints.py b/client/site_tests/security_SessionManagerDbusEndpoints/security_SessionManagerDbusEndpoints.py
deleted file mode 100644
index 6ff844a..0000000
--- a/client/site_tests/security_SessionManagerDbusEndpoints/security_SessionManagerDbusEndpoints.py
+++ /dev/null
@@ -1,133 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import dbus
-import logging
-import os.path
-import pwd
-import socket
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import constants, login
-
-
-class security_SessionManagerDbusEndpoints(test.test):
-    """Verifies SessionManager DBus endpoints are not exposed.
-    """
-    version = 1
-
-    _FLAGFILE = '/tmp/security_SessionManagerDbusEndpoints_regression'
-
-
-    def _set_user_environment(self, username):
-        for name in ('LOGNAME', 'USER', 'LNAME', 'USERNAME'):
-            if name in os.environ:
-                os.environ[name] = username
-
-
-    def _set_user(self, username):
-        user_info = pwd.getpwnam(username)
-        os.setegid(user_info[3])
-        os.seteuid(user_info[2])
-        self._set_user_environment(username)
-
-
-    def _reset_user(self):
-        uid = os.getuid()
-        username = pwd.getpwuid(uid)[0]
-        os.seteuid(uid)
-        os.setegid(os.getgid())
-        self._set_user_environment(username)
-
-
-    def _ps(self, proc=constants.BROWSER):
-        """Grab the oldest pid for process |proc|."""
-        pscmd = 'ps -C %s -o pid --no-header | head -1' % proc
-        return utils.system_output(pscmd)
-
-
-    def run_once(self):
-        """Main test code."""
-        login.wait_for_browser()
-        passed_enable_chrome_testing = self.test_enable_chrome_testing()
-        passed_restart_job = self.test_restart_job()
-
-        if not passed_enable_chrome_testing or not passed_restart_job:
-            raise error.TestFail('SessionManager DBus endpoints can be abused, '
-                                 'see error log')
-
-
-    def test_restart_job(self):
-        """Test SessionManager.RestartJob."""
-        bus = dbus.SystemBus()
-        proxy = bus.get_object('org.chromium.SessionManager',
-                               '/org/chromium/SessionManager')
-        session_manager = dbus.Interface(proxy,
-                                         'org.chromium.SessionManagerInterface')
-
-        # Craft a malicious replacement for the target process.
-        cmd = ['touch', self._FLAGFILE]
-
-        # Try to get our malicious replacement to run via RestartJob.
-        try:
-            remote, local = socket.socketpair(socket.AF_UNIX)
-            logging.info('Calling RestartJob(<socket>, %r)', cmd)
-            session_manager.RestartJob(dbus.types.UnixFd(remote), cmd)
-            # Fails if the RestartJob call doesn't generate an error.
-            logging.error(
-                'RestartJob did not fail when passed an arbitrary command')
-            return False
-        except dbus.DBusException as e:
-            logging.info(e.get_dbus_message())
-            pass
-        except OSError as e:
-            raise error.TestError('Could not create sockets for creds: %s', e)
-        finally:
-            try:
-                local.close()
-            except OSError:
-                pass
-
-        if os.path.exists(self._FLAGFILE):
-            logging.error('RestartJob ran an arbitrary command')
-            return False
-
-        return True
-
-
-    def test_enable_chrome_testing(self):
-        """Test SessionManager.EnableChromeTesting."""
-        self._set_user('chronos')
-
-        bus = dbus.SystemBus()
-        proxy = bus.get_object('org.chromium.SessionManager',
-                               '/org/chromium/SessionManager')
-        session_manager = dbus.Interface(proxy,
-                                         'org.chromium.SessionManagerInterface')
-
-        chrome_pid = self._ps()
-
-        # Try DBus call and make sure it fails.
-        try:
-            # DBus cannot infer the type of an empty Python list.
-            # Pass an empty dbus.Array with the correct signature, taken from
-            # platform2/login_manager/dbus_bindings/org.chromium.SessionManagerInterface.xml.
-            empty_string_array = dbus.Array(signature="as")
-            path = session_manager.EnableChromeTesting(True, empty_string_array,
-                                                       empty_string_array)
-        except dbus.exceptions.DBusException as dbe:
-            logging.info(dbe)
-        else:
-            logging.error('EnableChromeTesting '
-                          'succeeded when it should have failed')
-            return False
-
-        # Make sure Chrome didn't restart.
-        if chrome_pid != self._ps():
-            logging.error('Chrome restarted during test.')
-            return False
-
-        self._reset_user()
-        return True
diff --git a/client/site_tests/security_SysVIPC/control b/client/site_tests/security_SysVIPC/control
deleted file mode 100644
index 57d1586..0000000
--- a/client/site_tests/security_SysVIPC/control
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-TIME = "SHORT"
-AUTHOR = "The Chromium OS Authors"
-DOC = "Security tripwire for SysV IPC attack surfaces"
-NAME = "security_SysVIPC"
-PURPOSE = "Detect emergence of new attack surfaces in SysV IPC"
-CRITERIA = "Fails if shm, mqueues, or semaphores do not match expectations."
-ATTRIBUTES = "suite:security"
-TEST_CLASS = "security"
-TEST_CATEGORY = "Functional"
-TEST_TYPE = "client"
-
-job.run_test("security_SysVIPC")
diff --git a/client/site_tests/security_SysVIPC/security_SysVIPC.py b/client/site_tests/security_SysVIPC/security_SysVIPC.py
deleted file mode 100644
index 446e60e..0000000
--- a/client/site_tests/security_SysVIPC/security_SysVIPC.py
+++ /dev/null
@@ -1,110 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import re
-
-from collections import namedtuple
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-ShmRecord = namedtuple('ShmRecord', ['owner', 'perms', 'attached'])
-SemaphoreRecord = namedtuple('SemaphoreRecord', ['owner', 'perms'])
-
-class security_SysVIPC(test.test):
-    """Detect emergence of new attack surfaces in SysV IPC."""
-    version = 1
-    expected_shm = set([ShmRecord(owner='cras', perms='640',
-                                  attached=('/usr/bin/cras',))])
-    expected_sem = set([SemaphoreRecord(owner='root', perms='600')])
-
-    def dump_ipcs_to_results(self):
-        """Writes a copy of the 'ipcs' output to the autotest results dir."""
-        utils.system_output('ipcs > "%s/ipcs-output.txt"' % self.resultsdir)
-
-
-    def find_attached(self, shmid):
-        """Find programs attached to a given shared memory segment.
-
-        Returns full paths to each program identified.
-
-        Args:
-          @param shmid: the id as shown in ipcs and related utilities.
-        """
-        # This finds /proc/*/exe entries where maps shows they have
-        # attached to the specified shm segment.
-        cmd = 'grep "%s */SYSV" /proc/*/maps | sed "s/maps.*/exe/g"' % shmid
-        # Then we just need to readlink each of the links. Even though
-        # we ultimately convert to a sorted tuple, we use a set to avoid
-        # accumulating duplicates as we go along.
-        exes = set()
-        for link in utils.system_output(cmd).splitlines():
-            exes.add(os.readlink(link))
-        return tuple(sorted(exes))
-
-
-    def observe_shm(self):
-        """Return a set of ShmRecords representing current system shm usage."""
-        seen = set()
-        cmd = 'ipcs -m | grep ^0'
-        for line in utils.system_output(cmd, ignore_status=True).splitlines():
-            fields = re.split('\s+', line)
-            shmid = fields[1]
-            owner = fields[2]
-            perms = fields[3]
-            attached = self.find_attached(shmid)
-            seen.add(ShmRecord(owner=owner, perms=perms, attached=attached))
-        return seen
-
-
-    def observe_sems(self):
-        """Return a set of SemaphoreRecords representing current usage."""
-        seen = set()
-        cmd = 'ipcs -s | grep ^0'
-        for line in utils.system_output(cmd, ignore_status=True).splitlines():
-            fields = re.split('\s+', line)
-            seen.add(SemaphoreRecord(owner=fields[2], perms=fields[3]))
-        return seen
-
-
-    def run_once(self):
-        """Main entry point to run the security_SysVIPC autotest."""
-        test_fail = False
-        self.dump_ipcs_to_results()
-        # Check Shared Memory.
-        observed_shm = self.observe_shm()
-        missing = self.expected_shm.difference(observed_shm)
-        extra = observed_shm.difference(self.expected_shm)
-        if missing:
-            logging.error('Expected shm(s) not found:')
-            logging.error(missing)
-        if extra:
-            test_fail = True
-            logging.error('Unexpected shm(s) found:')
-            logging.error(extra)
-
-        # Check Semaphores.
-        observed_sem = self.observe_sems()
-        missing = self.expected_sem.difference(observed_sem)
-        extra = observed_sem.difference(self.expected_sem)
-        if missing:
-            logging.error('Expected semaphore(s) not found:')
-            logging.error(missing)
-        if extra:
-            test_fail = True
-            logging.error('Unexpected semaphore(s) found:')
-            logging.error(extra)
-
-        # Also check Message Queues. Since we currently expect
-        # none, we can avoid over-engineering this check.
-        queues = utils.system_output('ipcs -q | grep ^0', ignore_status=True)
-        if queues:
-            test_fail = True
-            logging.error('Unexpected message queues found:')
-            logging.error(queues)
-
-        if test_fail:
-            raise error.TestFail('SysV IPCs did not match expectations')
diff --git a/client/site_tests/security_x86Registers/control b/client/site_tests/security_x86Registers/control
deleted file mode 100644
index 498135f..0000000
--- a/client/site_tests/security_x86Registers/control
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "security_x86Registers"
-PURPOSE = "Verify that CPU registers on x86 are set correctly."
-CRITERIA = """
-This test will fail if any of the security-related CPU registers aren't set
-to expected values.
-"""
-TIME = "SHORT"
-ATTRIBUTES = "suite:security"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "security"
-TEST_TYPE = "client"
-
-DOC = """
-This test reads CPU MSRs to determine whether they have been set to the
-expected values from a security standpoint. It also goes through a
-suspend/resume cycle to re-verify the values once resumed.
-"""
-
-job.run_test('security_x86Registers')
diff --git a/client/site_tests/security_x86Registers/security_x86Registers.py b/client/site_tests/security_x86Registers/security_x86Registers.py
deleted file mode 100755
index 094efb4..0000000
--- a/client/site_tests/security_x86Registers/security_x86Registers.py
+++ /dev/null
@@ -1,179 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging, time
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.power import power_utils
-from autotest_lib.client.cros.power import sys_power
-
-MSR_POSITIVE = {
-    # IA32_FEATURE_CONTROL[2:0]
-    #   0 - Lock bit (1 = locked)
-    #   1 - Enable VMX in SMX operation
-    #   2 - Enable VMX outside SMX operation
-    'Atom': {
-        # Some CPUs reporting as "Atom" have VMX enabled.
-        },
-    'Core M': {
-        # Some CPUs reporting as "Core M" have VMX enabled.
-        },
-    'Core': {
-        # Some CPUs reporting as "Core" have VMX enabled.
-        },
-    'AMD': {
-        # VM_CR MSR (C001_0114h) with SVMDIS Bit 4
-        # can be used to lock writes to EFER.SVME.
-        #   0 - writes to EFER.SVME are not blocked
-        #   1 - writes to EFER treat EFER.SVME as MBZ
-        '0xc0010114':  [('4', 0)],
-        },
-    }
-
-MSR_NEGATIVE = {
-    'Atom': {
-        # No board has all bits set so this should fail.
-        '0x3a':  [('2:0', 7)],
-        },
-    'Core M': {
-        # No board has all bits set so this should fail.
-        '0x3a':  [('2:0', 7)],
-        },
-    'Core': {
-        # No board has all bits set so this should fail.
-        '0x3a':  [('2:0', 7)],
-        },
-    'AMD': {
-        # Inverted from positive case: none of these bits should be set.
-        '0xc0010114':  [('4', 1)],
-        },
-    }
-
-RCBA_POSITIVE = {
-    'Atom': {
-        # GCS.BILD is not set on H2C UEFI Firmware. :(
-        # https://code.google.com/p/chromium/issues/detail?id=269633
-        '0x3410': [('0', 0)],
-        },
-    'Core M': {
-        # GCS (General Control and Status) register, BILD (BIOS Interface
-        # Lock-Down) bit should be set.
-        '0x3410': [('0', 1)],
-        },
-    'Core': {
-        # GCS (General Control and Status) register, BILD (BIOS Interface
-        # Lock-Down) bit should be set.
-        '0x3410': [('0', 1)],
-        },
-    'AMD': {
-        # Skipping this test as there is no register to change
-        # reset vector on Stoney. NA for Stoney.
-        },
-    }
-
-RCBA_NEGATIVE = {
-    'Atom': {
-        # GCS register, BILD bit inverted from positive test.
-        '0x3410': [('0', 1)],
-        },
-    'Core M': {
-        # GCS register, BILD bit inverted from positive test.
-        '0x3410': [('0', 0)],
-        },
-    'Core': {
-        # GCS register, BILD bit inverted from positive test.
-        '0x3410': [('0', 0)],
-        },
-    'AMD': {
-        },
-    }
-
-class security_x86Registers(test.test):
-    """
-    Checks various CPU and firmware registers for security-sensitive safe
-    settings.
-    """
-    version = 1
-
-    def _check_negative_positive(self, name, func, match_neg, match_pos):
-        errors = 0
-
-        # Catch missing test conditions.
-        if len(match_neg) == 0:
-            logging.debug('No inverted %s tests defined!', name)
-        if len(match_pos) == 0:
-            logging.debug('No positive %s tests defined!', name)
-        if len(match_neg) == 0 or len(match_pos) == 0:
-            return errors
-
-        # Negative tests; make sure infrastructure is working.
-        logging.debug("=== BEGIN [expecting %s FAILs] ===", name)
-        if func(match_neg) == 0:
-            logging.error('BAD: inverted %s tests did not fail!', name)
-            errors += 1
-        logging.debug("=== END [expecting %s FAILs] ===", name)
-
-        # Positive tests; make sure values are for real.
-        logging.debug("=== BEGIN [expecting %s oks] ===", name)
-        errors += func(match_pos)
-        logging.debug("=== END [expecting %s oks] ===", name)
-
-        logging.debug("%s errors found: %d", name, errors)
-        return errors
-
-    def _check_msr(self):
-        return self._check_negative_positive('MSR',
-                                             self._registers.verify_msr,
-                                             MSR_NEGATIVE[self._cpu_type],
-                                             MSR_POSITIVE[self._cpu_type])
-
-    def _check_bios(self):
-        return self._check_negative_positive('BIOS',
-                                             self._registers.verify_rcba,
-                                             RCBA_NEGATIVE[self._cpu_type],
-                                             RCBA_POSITIVE[self._cpu_type])
-
-    def _check_all(self):
-        errors = 0
-        errors += self._check_msr()
-        errors += self._check_bios()
-        return errors
-
-    def run_once(self):
-        errors = 0
-
-        cpu_arch = power_utils.get_x86_cpu_arch()
-        if not cpu_arch:
-            cpu_arch = utils.get_cpu_arch()
-            if cpu_arch == "arm":
-                logging.info('OK: skipping x86-only test on %s.', cpu_arch)
-                return
-
-            logging.warning('Unknown CPU with arch "%s".', cpu_arch)
-            return
-
-        if cpu_arch in {"Stoney", "Ryzen"}:
-            self._cpu_type = 'AMD'
-        elif cpu_arch == 'Atom':
-            self._cpu_type = 'Atom'
-        elif cpu_arch == 'Core M':
-            self._cpu_type = 'Core M'
-        else:
-            self._cpu_type = 'Core'
-
-        self._registers = power_utils.Registers()
-
-        # Check running machine.
-        errors += self._check_all()
-
-        # Pause briefly to make sure the RTC is ready for suspend/resume.
-        time.sleep(3)
-        # Suspend the system to RAM and return after 10 seconds.
-        sys_power.do_suspend(10)
-
-        # Check resumed machine.
-        errors += self._check_all()
-
-        if errors > 0:
-            raise error.TestFail('x86 register mismatch detected')
diff --git a/client/site_tests/stub_IdleSuspend/stub_IdleSuspend.py b/client/site_tests/stub_IdleSuspend/stub_IdleSuspend.py
new file mode 100644
index 0000000..60d55ec
--- /dev/null
+++ b/client/site_tests/stub_IdleSuspend/stub_IdleSuspend.py
@@ -0,0 +1,56 @@
+# Lint as: python2, python3
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os, time
+
+from autotest_lib.client.bin import test
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib.cros import chrome
+from autotest_lib.client.cros.power import power_suspend, power_utils
+
+
+class stub_IdleSuspend(test.test):
+    """
+    This is not a complete test. It is a stub test that must be run in parallel
+    with power_SuspendStress(method='idle') to control powerd idle values and
+    perform a login.
+    """
+    version = 1
+
+    _IDLE_TIMINGS = {
+            'disable_idle_suspend': 0,
+            'ignore_external_policy': 1,
+            'unplugged_dim_ms': 8000,
+            'unplugged_off_ms': 12000,
+            'unplugged_suspend_ms': 8000,
+            'plugged_dim_ms': 8000,
+            'plugged_off_ms': 12000,
+            'plugged_suspend_ms': 8000,
+    }
+
+    # Don't wait longer than this to start... if power_SuspendStress died before
+    # creating the HWCLOCK_FILE, we might otherwise wait forever
+    _TEST_START_TIMEOUT = 70
+
+    def run_once(self):
+        with chrome.Chrome():
+            # Just idle while power_SuspendStress does all the work. Existence
+            # of the HWCLOCK_FILE tells us when it starts and when it's done.
+            for _ in range(self._TEST_START_TIMEOUT):
+                time.sleep(1)
+                if os.path.exists(power_suspend.Suspender.HWCLOCK_FILE):
+                    break
+            else:
+                raise error.TestError("Parallel test didn't create Suspender.")
+
+            # These must not be enabled too soon, or the system might suspend
+            # before a wakeup is scheduled. They must not be disabled too late
+            # either, or we might suspend again after the parallel test is done.
+            power_prefs = power_utils.PowerPrefChanger(self._IDLE_TIMINGS)
+
+            while os.path.exists(power_suspend.Suspender.HWCLOCK_FILE):
+                time.sleep(1)
+
+            power_prefs.finalize()
diff --git a/client/site_tests/stub_Pass/control b/client/site_tests/stub_Pass/control
new file mode 100644
index 0000000..fef509d
--- /dev/null
+++ b/client/site_tests/stub_Pass/control
@@ -0,0 +1,25 @@
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "stub_Pass"
+PURPOSE = "Demonstrate success methods of autotests."
+CRITERIA = "This test will always succeed."
+ATTRIBUTES = (
+        "suite:stub, suite:stubclientretries, suite:push_to_prod,"
+        " suite:skylab_staging_test, suite:something_else,"
+        " suite:dev_drone_image_test, suite:infra_qual"
+)
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "stub"
+TEST_TYPE = "client"
+MAX_RESULT_SIZE_KB = 5000
+PY_VERSION = 3
+
+DOC = """
+This is a helper test that will succeed.
+"""
+
+job.run_test('stub_Pass')
diff --git a/client/site_tests/stub_Pass/control.actionable b/client/site_tests/stub_Pass/control.actionable
new file mode 100644
index 0000000..f63ea3c
--- /dev/null
+++ b/client/site_tests/stub_Pass/control.actionable
@@ -0,0 +1,24 @@
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "stub_Pass.actionable"
+PURPOSE = "Demonstrate success methods of autotests."
+CRITERIA = "This test will always succeed."
+ATTRIBUTES = (
+        "suite:stub, suite:stubclientretries, suite:push_to_prod,"
+        " suite:skylab_staging_test, suite:something_else"
+)
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "stub"
+TEST_TYPE = "client"
+DEPENDENCIES = "cleanup-reboot"
+PY_VERSION = 3
+
+DOC = """
+This is a helper test that will succeed.
+"""
+
+job.run_test('stub_Pass', tag='actionable')
diff --git a/client/site_tests/stub_Pass/control.bluetooth b/client/site_tests/stub_Pass/control.bluetooth
new file mode 100755
index 0000000..1b4d162
--- /dev/null
+++ b/client/site_tests/stub_Pass/control.bluetooth
@@ -0,0 +1,22 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "stub_Pass.bluetooth"
+PURPOSE = "Demonstrate DEPENDENCIES in autotests."
+CRITERIA = "This test will always succeed."
+DEPENDENCIES = "bluetooth"
+ATTRIBUTES = "suite:stub, suite:push_to_prod, suite:skylab_staging_test"
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "stub"
+TEST_TYPE = "client"
+PY_VERSION = 3
+
+DOC = """
+This is a helper test that can only run on bluetooth devices,
+and should succeed trivially.
+"""
+
+job.run_test('stub_Pass', tag='bluetooth')
diff --git a/client/site_tests/stub_Pass/control.experimental b/client/site_tests/stub_Pass/control.experimental
new file mode 100644
index 0000000..855d458
--- /dev/null
+++ b/client/site_tests/stub_Pass/control.experimental
@@ -0,0 +1,22 @@
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "stub_Pass.experimental"
+PURPOSE = "Demonstrate success methods of autotests."
+CRITERIA = "This test will always succeed."
+ATTRIBUTES = "suite:stub, suite:stubclientretries, suite:something_else"
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "stub"
+TEST_TYPE = "client"
+PY_VERSION = 3
+
+DOC = """
+This is a helper test that will succeed.
+"""
+
+job.run_test('stub_Pass', tag='experimental')
+
+
diff --git a/client/site_tests/stub_Pass/control.wifichaos b/client/site_tests/stub_Pass/control.wifichaos
new file mode 100644
index 0000000..1bd8291
--- /dev/null
+++ b/client/site_tests/stub_Pass/control.wifichaos
@@ -0,0 +1,24 @@
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "stub_Pass.wifichaos"
+PURPOSE = "To re-image chaos_dut machines nightly."
+CRITERIA = "This test will always succeed."
+ATTRIBUTES = "suite:wifichaos"
+DEPENDENCIES = "chaos_dut"
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "stub"
+TEST_TYPE = "client"
+PY_VERSION = 3
+
+DOC = """
+This is a placeholder test for allowing the scheduler to install tests on the
+chaos_dut machines
+"""
+
+job.run_test('stub_Pass', tag='wifichaos')
+
+
diff --git a/client/site_tests/stub_Pass/def.star b/client/site_tests/stub_Pass/def.star
new file mode 100644
index 0000000..4ba4e1b
--- /dev/null
+++ b/client/site_tests/stub_Pass/def.star
@@ -0,0 +1,53 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+load("//metadata/test_common.star", "test_common")
+
+DOC = """
+This is a helper test that will succeed. Used to verify various
+autotest scheduling features, including pass results, dependencies, etc.
+"""
+
+TESTS = [
+    test_common.define_client_test(
+        test_name = "stub_Pass",
+        purpose = "Demonstrate success methods of autotests.",
+        doc = DOC,
+        owner_emails = ["email_addr@chromium.org"],
+        owner_groups = ["team-mdb-group"],
+        suites = ["stub", "stubclientretries", "push_to_prod",
+            "skylab_staging_test", "something_else"],
+        #TODO: max_result_size_kb = 5000
+    ),
+
+    test_common.define_client_test(
+        test_name = "stub_Pass.actionable",
+        purpose = "Demonstrate success methods of autotests",
+        doc = DOC,
+        owner_emails = ["email_addr@chromium.org"],
+        suites = ["stub", "stubclientretries", "push_to_prod",
+            "skylab_staging_test", "something_else"],
+        #TODO: common_deps = ["cleanup-reboot"],
+        named_args = {"tag": "actionable"},
+    ),
+
+    test_common.define_client_test(
+        test_name = "stub_Pass.bluetooth",
+        purpose = "Demonstrate DEPENDENCIES in autotests.",
+        doc = DOC,
+        owner_emails = ["email_addr@chromium.org"],
+        suites = ["stub", "push_to_prod", "skylab_staging_test"],
+        common_deps = ["bluetooth"],
+        named_args = {"tag": "bluetooth"},
+    ),
+
+    test_common.define_client_test(
+        test_name = "stub_Pass.experimental",
+        purpose = "Demonstrate success methods of autotests.",
+        doc = DOC,
+        owner_emails = ["email_addr@chromium.org"],
+        suites = ["stub", "stubclientretries", "something_else"],
+        named_args = {"tag": "experimental"},
+    ),
+]
diff --git a/client/site_tests/stub_Pass/stub_Pass.py b/client/site_tests/stub_Pass/stub_Pass.py
new file mode 100644
index 0000000..3bd130b
--- /dev/null
+++ b/client/site_tests/stub_Pass/stub_Pass.py
@@ -0,0 +1,13 @@
+# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.bin import test
+from autotest_lib.client.common_lib import error
+
+
+class stub_Pass(test.test):
+    version = 1
+
+    def run_once(self):
+        return
diff --git a/client/site_tests/stub_Pass/wifichaos.star b/client/site_tests/stub_Pass/wifichaos.star
new file mode 100644
index 0000000..f73b1fe
--- /dev/null
+++ b/client/site_tests/stub_Pass/wifichaos.star
@@ -0,0 +1,22 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+load("//metadata/test_common.star", "test_common")
+
+DOC = """
+This is a placeholder test for allowing the scheduler to install tests on the
+chaos_dut machines.
+"""
+
+TESTS = [
+    test_common.define_client_test(
+        test_name = "stub_Pass.wifichaos",
+        purpose = "To re-image chaos_dut machines nightly.",
+        doc = DOC,
+        # TODO: find real owners for this test.
+        owner_emails = ["email_addr@chromium.org"],
+        suites = ["wifichaos"],
+        # TODO: common_deps = ["chaos_dut"],
+    ),
+]
diff --git a/client/site_tests/suite_HWConfig/control b/client/site_tests/suite_HWConfig/control
deleted file mode 100644
index e84e24c..0000000
--- a/client/site_tests/suite_HWConfig/control
+++ /dev/null
@@ -1,49 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "suite_HWConfig"
-PURPOSE = "Execute automated hardware configuration checks."
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "suite"
-TEST_TYPE = "client"
-
-DOC = """
-This test suite runs automated hardware configuration checks. The purpose of
-the suite is to sanity test all hardware components in less than 5 minutes.
-"""
-
-# Firmware
-job.run_test('firmware_RomSize',
-             constraints=['kb_system_rom_size >= 4096',
-                          'kb_ec_rom_size >= 128'])
-
-# RTC, system
-job.run_test('platform_HighResTimers')
-job.run_test('power_Resume')
-
-# RAM
-job.run_test('hardware_MemoryTotalSize')
-
-# CPU
-job.run_test('power_CPUFreq')
-job.run_test('power_CPUIdle')
-
-# Display
-job.run_test('hardware_Backlight')
-
-# SSD
-job.run_test('hardware_DiskSize',
-             constraints=['gb_main_disk_size >= 8'])
-job.run_test('hardware_SsdDetection')
-
-# CPU, RAM, SSD
-job.run_test('hardware_SAT', seconds=20)
-
-# Network
-job.run_test('network_WiFiCaps')
-
-# GPU
-job.run_test('graphics_GLBench')
diff --git a/client/site_tests/suite_HWQual/README.txt b/client/site_tests/suite_HWQual/README.txt
index 9be6de6..894c70d 100644
--- a/client/site_tests/suite_HWQual/README.txt
+++ b/client/site_tests/suite_HWQual/README.txt
@@ -3,7 +3,7 @@
 found in the LICENSE file.
 
 
-This document describes the steps to go through in order to run Chrome OS
+This document describes the steps to go through in order to run ChromeOS
 hardware qualification on a device under test.
 
 ================================================================================
@@ -28,7 +28,7 @@
 
 
 - Create an installation directory on the Autotest server for the
-  Chrome OS hardware qualification package. The rest of the
+  ChromeOS hardware qualification package. The rest of the
   instructions assume that you're installing the package in the
   current user home directory ($HOME/).
 
@@ -43,7 +43,7 @@
   $ cd $HOME/ && tar xjf chromeos-hwqual-TAG.tar.bz2
 
 
-- Install the Chrome OS test image on the DUT. The USB test image is
+- Install the ChromeOS test image on the DUT. The USB test image is
   available in:
 
   $HOME/chromeos-hwqual-TAG/chromeos-hwqual-usb.img
@@ -64,8 +64,8 @@
 
   - Plug the USB device into the DUT and boot from it.
 
-  - Log in to Chrome OS.  Start the Chrome OS shell by pressing Ctrl-Alt-T.
-    Install Chrome OS on the DUT:
+  - Log in to ChromeOS.  Start the ChromeOS shell by pressing Ctrl-Alt-T.
+    Install ChromeOS on the DUT:
 
     crosh> install
 
@@ -163,7 +163,7 @@
   $ ./server/autoserv -r results.audio_ext -m <DUT_IP> \
                   -c client/site_tests/suite_HWQual/control.audio
 
-- Run the Keyboard test : 
+- Run the Keyboard test :
   (Wait several seconds after running the test. Then strike the "Search" key,
    e.g. the key above Left Shift and below Tab)
 
diff --git a/client/site_tests/suite_HWQual/control.audio b/client/site_tests/suite_HWQual/control.audio
index ee4cdbb..ac0db1f 100644
--- a/client/site_tests/suite_HWQual/control.audio
+++ b/client/site_tests/suite_HWQual/control.audio
@@ -12,6 +12,7 @@
 TEST_CATEGORY = 'PLAYBACKCAPTURE'
 TEST_CLASS = "audio"
 TEST_TYPE = 'client'
+PY_VERSION = 3
 
 DOC = """
 This is a semi-automated test that exercises the audio devices on the system.
diff --git a/client/site_tests/suite_HWQual/control.auto b/client/site_tests/suite_HWQual/control.auto
index 96725cc..7bd7712 100644
--- a/client/site_tests/suite_HWQual/control.auto
+++ b/client/site_tests/suite_HWQual/control.auto
@@ -2,12 +2,13 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "HWQualAuto"
 TIME = "LONG"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "suite"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test suite runs fully automated client-side hardware qualification tests.
diff --git a/client/site_tests/suite_HWQual/control.battery_charge_time b/client/site_tests/suite_HWQual/control.battery_charge_time
index c88db6d..b788bd0 100644
--- a/client/site_tests/suite_HWQual/control.battery_charge_time
+++ b/client/site_tests/suite_HWQual/control.battery_charge_time
@@ -2,12 +2,13 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "HWQualBatteryChargeTime"
 TIME = "LONG"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "suite"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test measures the battery charging time and enforces constraints. Before
diff --git a/client/site_tests/suite_HWQual/control.battery_load b/client/site_tests/suite_HWQual/control.battery_load
index 24048cb..2ff5e84 100644
--- a/client/site_tests/suite_HWQual/control.battery_load
+++ b/client/site_tests/suite_HWQual/control.battery_load
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "suite_HWQual.power_LoadTest"
 PURPOSE = "Measure power draw when system is under load."
 CRITERIA = "This test is a benchmark."
@@ -11,6 +11,7 @@
 TEST_CLASS = "power"
 TEST_TYPE = "client"
 ATTRIBUTES = "suite:power_loadtest"
+PY_VERSION = 3
 
 DOC = """
 This test runs a load test consisting of cycling though web pages, playing
diff --git a/client/site_tests/suite_HWQual/control.keyboard b/client/site_tests/suite_HWQual/control.keyboard
index 9e436b9..04b2d4f 100644
--- a/client/site_tests/suite_HWQual/control.keyboard
+++ b/client/site_tests/suite_HWQual/control.keyboard
@@ -6,10 +6,12 @@
 TEST_CLASS = 'Hardware'
 TEST_CATEGORY = 'Functional'
 TEST_TYPE = 'Client'
+PY_VERSION = 3
+
 DOC = """
 This test uses a modified version of evtest to probe for and test the keyboard.
 The test finds the keyboard event in /dev/input/, and queries to ensure that
-the driver presents all of the expected Chrome OS keyboard keys.
+the driver presents all of the expected ChromeOS keyboard keys.
 """
 
 job.run_test('hardware_Keyboard')
diff --git a/client/site_tests/suite_HWQual/control.max_power_draw b/client/site_tests/suite_HWQual/control.max_power_draw
index 902a0a3..d830f7a 100644
--- a/client/site_tests/suite_HWQual/control.max_power_draw
+++ b/client/site_tests/suite_HWQual/control.max_power_draw
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "suite_HWQual.power_Draw"
 PURPOSE = "Measure how much power is drawn over a given amount of time."
 CRITERIA = "This test is a benchmark."
@@ -10,6 +10,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test will run for 200 seconds.
diff --git a/client/site_tests/suite_HWQual/control.probe_ac b/client/site_tests/suite_HWQual/control.probe_ac
index b75dd2b..ce3cd65 100644
--- a/client/site_tests/suite_HWQual/control.probe_ac
+++ b/client/site_tests/suite_HWQual/control.probe_ac
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "suite_HWQual.probe_ac"
 ATTRIBUTES = "suite:power_build"
 PURPOSE = "Confirm that AC driver is loaded and functioning."
@@ -21,6 +21,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This is a sample test that probes sysfs and makes sure that AC driver
diff --git a/client/site_tests/suite_HWQual/control.probe_bat b/client/site_tests/suite_HWQual/control.probe_bat
index 7fc0eba..671a04f 100644
--- a/client/site_tests/suite_HWQual/control.probe_bat
+++ b/client/site_tests/suite_HWQual/control.probe_bat
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "suite_HWQual.probe_bat"
 PURPOSE = "Confirm that battery driver is loaded and functioning."
 CRITERIA = """\
@@ -20,6 +20,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "power"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This is a sample test that probes sysfs and makes sure that batteries driver
diff --git a/client/site_tests/suite_TPM/control b/client/site_tests/suite_TPM/control
index 06093f3..293a98d 100644
--- a/client/site_tests/suite_TPM/control
+++ b/client/site_tests/suite_TPM/control
@@ -2,17 +2,19 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
-NAME = "TPMSmogcheck"
+AUTHOR = "ChromeOS Team"
+NAME = "suite_TPM.TPMSmogcheck"
 PURPOSE = "Execute automated TPM functionality checks."
 TIME = "SHORT"  # <= 15 minutes in execution
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "suite"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 This test suite runs automated TPM Smogcheck tests. The purpose of
-the suite is to sanity test basic TPM functionality in less than 10 minutes.
+the suite is to confidence test basic TPM functionality in less than 10
+minutes.
 """
 
 # TPM_TakeOwnership
diff --git a/client/site_tests/telemetry_AFDOGenerateClient/OWNERS b/client/site_tests/telemetry_AFDOGenerateClient/OWNERS
deleted file mode 100644
index 22122bd..0000000
--- a/client/site_tests/telemetry_AFDOGenerateClient/OWNERS
+++ /dev/null
@@ -1 +0,0 @@
-include chromiumos/third_party/toolchain-utils:OWNERS.toolchain
diff --git a/client/site_tests/telemetry_AFDOGenerateClient/control b/client/site_tests/telemetry_AFDOGenerateClient/control
deleted file mode 100644
index 12a34fb..0000000
--- a/client/site_tests/telemetry_AFDOGenerateClient/control
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "llozano@chromium.org cmtice@chromium.org bjanakiraman@chromium.org"
-NAME = "telemetry_AFDOGenerateClient"
-PURPOSE = "Navigate a set of pages so that a Chrome profile can be captured"
-ATTRIBUTES = "suite:AFDO_page_replay"
-MAX_RESULT_SIZE_KB = 512000
-TIME = "LONG"
-TEST_CATEGORY = "Benchmark"
-TEST_CLASS = "Chrome AFDO"
-TEST_TYPE = "client"
-JOB_RETRIES = 1
-
-DOC = """
-Run a pre-defined set of pages for the DUT for Chrome profile collection.
-
-The purpose of this test is to exercise chrome with a meaningful set
-of pages while a profile of Chrome is captured. It also aims at using
-the minimum set of functionality from Telemetry since Telemetry is not
-very stable on ChromeOS at this point.
-
-This test is designed to be called from the telemetry_AFDOGenerate
-server test. The server test will start the "perf" profiling tool on
-the DUT before starting this test. It will also capture the chrome
-profile and upload it to Google Storage to be used for an optimized
-build of Chrome.
-"""
-
-job.run_test('telemetry_AFDOGenerateClient')
diff --git a/client/site_tests/telemetry_AFDOGenerateClient/telemetry_AFDOGenerateClient.py b/client/site_tests/telemetry_AFDOGenerateClient/telemetry_AFDOGenerateClient.py
deleted file mode 100644
index 5916fe3..0000000
--- a/client/site_tests/telemetry_AFDOGenerateClient/telemetry_AFDOGenerateClient.py
+++ /dev/null
@@ -1,119 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Run a pre-defined set of pages on the DUT for Chrome profile collection.
-
-The purpose of this test is to exercise chrome with a meaningful set
-of pages while a profile of Chrome is captured. It also aims at using
-the minimum set of functionality from Telemetry since Telemetry is not
-very stable on ChromeOS at this point.
-
-This test is designed to be called from the telemetry_AFDOGenerate
-server test. The server test will start the "perf" profiling tool on
-the DUT before starting this test. It will also capture the chrome
-profile and upload it to Google Storage to be used for an optimized
-build of Chrome.
-"""
-
-import logging
-import os
-import sys
-import time
-import traceback
-
-from autotest_lib.client.common_lib.cros import chrome
-from autotest_lib.client.bin import test
-from autotest_lib.client.cros import httpd
-
-# List of page cycler pages to use for Chrome profiling
-PAGE_CYCLER_BENCHMARKS = [
-        'alexa_us',
-        'bloat',
-        'dhtml',
-        'dom',
-        'intl1',
-        'intl2',
-        'morejs',
-        'morejsnp',
-        'moz',
-        'moz2' ]
-
-HTTP_PORT = 8000
-FILE_URL_PREFIX = 'http://localhost:%d/test_src/' % HTTP_PORT
-
-class telemetry_AFDOGenerateClient(test.test):
-    """
-    Run a set of pre-defined set of pages to exercise Chrome so that
-    we can capture a Chrome profile.
-    """
-    version = 1
-
-
-    def initialize(self):
-        """Setup required DEPS and start the http listener."""
-        dep = 'page_cycler_dep'
-        dep_dir = os.path.join(self.autodir, 'deps', dep)
-        self.job.install_pkg(dep, 'dep', dep_dir)
-        self.listener = httpd.HTTPListener(HTTP_PORT, docroot=dep_dir)
-        self.listener.run()
-
-
-    def cleanup(self):
-        """Stop the active http listener."""
-        self.listener.stop()
-
-
-    def run_once(self):
-        """Display predetermined set of pages so that we can profile Chrome."""
-        with chrome.Chrome() as cr:
-            for benchmark in PAGE_CYCLER_BENCHMARKS:
-                self._try_page_cycler(cr, benchmark)
-
-    def _try_page_cycler(self, cr, benchmark):
-        """Try executing a page cycler and recover if browser dies.
-
-        Navigates to the specified page_cycler, checks if the browser
-        died while executing it and waits until browser recovers.
-
-        @param cr: instance of chrome.Chrome class to control chrome.
-        @param benchmark: page_cycler page to display.
-        """
-        if cr.did_browser_crash(
-                lambda: self._navigate_page_cycler(cr, benchmark)):
-            logging.info('Browser died while navigating %s', benchmark)
-            logging.info('Trying to continue...')
-            cr.wait_for_browser_to_come_up()
-
-
-    def _navigate_page_cycler(self, cr, benchmark):
-        """Navigate to a specific page_cycler page.
-
-        Navigates to the specified page_cycler and waits for the value
-        of the __pc_done cookie to indicate it is done.
-
-        @param cr: instance of chrome.Chrome class to control chrome.
-        @param benchmark: page_cycler page to display.
-        """
-        PC_START_PAGE = 'data/page_cycler/%s/start.html?auto=1'
-        PC_DONE_EXP = 'window.document.cookie.indexOf("__pc_done=1") >= 0'
-        tab = cr.browser.tabs.New()
-        try:
-            tab.Activate()
-            logging.info('Navigating to page cycler %s', benchmark)
-            start_time = time.time()
-            benchmark_start_page = PC_START_PAGE % benchmark
-            tab.Navigate(FILE_URL_PREFIX + benchmark_start_page)
-            tab.WaitForDocumentReadyStateToBeComplete(timeout=180)
-            tab.WaitForJavaScriptCondition(PC_DONE_EXP, timeout=600)
-            tab.Close()
-            end_time = time.time()
-            logging.info('Completed page cycler %s in %f seconds',
-                         benchmark, end_time - start_time)
-        except Exception as unk_exc:
-            end_time = time.time()
-            logging.info('After navigating %s for %f seconds got exception %s',
-                         benchmark, end_time - start_time, str(unk_exc))
-            traceback.print_exc(file=sys.stdout)
-            raise
diff --git a/client/site_tests/telemetry_Check/control b/client/site_tests/telemetry_Check/control
new file mode 100644
index 0000000..e2836eb
--- /dev/null
+++ b/client/site_tests/telemetry_Check/control
@@ -0,0 +1,27 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "telemetry_Check"
+PURPOSE = "Run telemetry_check.py."
+CRITERIA = """
+Test will fail if telemetry_check fails.
+"""
+# crbug.com/1188852: Test fails on novato consistently.
+ATTRIBUTES = "suite:bvt-perbuild"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "login"
+TEST_TYPE = "client"
+PY_VERSION = 3
+
+DOC = """
+This test runs telemetry_check.py
+"""
+
+job.run_test('telemetry_Check',
+             count=1,
+             run_cryptohome=True,
+             run_incognito=False,  # crbug.com/970065.
+             run_screenlock=True)
diff --git a/client/site_tests/telemetry_Check/telemetry_Check.py b/client/site_tests/telemetry_Check/telemetry_Check.py
new file mode 100644
index 0000000..19cb12d
--- /dev/null
+++ b/client/site_tests/telemetry_Check/telemetry_Check.py
@@ -0,0 +1,16 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.bin import test, telemetry_check
+
+
+class telemetry_Check(test.test):
+    """Run telemetry_check."""
+    version = 1
+
+    def run_once(self, count, run_cryptohome, run_incognito, run_screenlock):
+        telemetry_check.TelemetryCheck(count=count,
+                                       run_cryptohome=run_cryptohome,
+                                       run_incognito=run_incognito,
+                                       run_screenlock=run_screenlock).Run()
diff --git a/client/site_tests/telemetry_Sanity/control b/client/site_tests/telemetry_Sanity/control
deleted file mode 100644
index fd8de39..0000000
--- a/client/site_tests/telemetry_Sanity/control
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "telemetry_Sanity"
-PURPOSE = "Run telemetry_sanity.py."
-CRITERIA = """
-Test will fail if telemetry_sanity fails.
-"""
-# crbug.com/930157: This test is flaky and blocks CQ, disable it until fixed.
-ATTRIBUTES = "suite:bvt-perbuild, suite:smoke"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "login"
-TEST_TYPE = "client"
-
-DOC = """
-This test runs telemetry_sanity.py
-"""
-
-job.run_test('telemetry_Sanity',
-             count=1,
-             run_cryptohome=True,
-             run_incognito=False,  # crbug.com/970065.
-             run_screenlock=True)
diff --git a/client/site_tests/telemetry_Sanity/telemetry_Sanity.py b/client/site_tests/telemetry_Sanity/telemetry_Sanity.py
deleted file mode 100644
index 5f9c7c4..0000000
--- a/client/site_tests/telemetry_Sanity/telemetry_Sanity.py
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.bin import test, telemetry_sanity
-
-
-class telemetry_Sanity(test.test):
-    """Run telemetry_sanity."""
-    version = 1
-
-    def run_once(self, count, run_cryptohome, run_incognito, run_screenlock):
-        telemetry_sanity.TelemetrySanity(
-            count=count,
-            run_cryptohome=run_cryptohome,
-            run_incognito=run_incognito,
-            run_screenlock=run_screenlock).Run()
diff --git a/client/site_tests/telemetry_UnitTests/control.guest b/client/site_tests/telemetry_UnitTests/control.guest
deleted file mode 100644
index 797f039..0000000
--- a/client/site_tests/telemetry_UnitTests/control.guest
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "achuith@chromium.org"
-NAME = "telemetry_UnitTests_guest"
-PURPOSE = "Run the Telemetry unit tests as guest (incognito)."
-ATTRIBUTES = "suite:telemetry_unit"
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "desktopui"
-TEST_TYPE = "client"
-
-DOC = """
-Verifies that all Telemetry unit tests pass when run as guest (incognito).
-"""
-
-job.run_test('telemetry_UnitTests', browser_type='system-guest', tag='guest',
-             unit_tests=['BrowserTest'], perf_tests=[])
diff --git a/client/site_tests/telemetry_UnitTests/control.perf b/client/site_tests/telemetry_UnitTests/control.perf
deleted file mode 100644
index 07ad642..0000000
--- a/client/site_tests/telemetry_UnitTests/control.perf
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "achuith@chromium.org"
-NAME = "telemetry_UnitTests_perf"
-PURPOSE = "Run the Telemetry perf unit tests as a logged-in user."
-ATTRIBUTES = "suite:telemetry_unit"
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "desktopui"
-TEST_TYPE = "client"
-
-DOC = """
-Verifies that all Telemetry perf unit tests pass when run as a logged-in user.
-"""
-
-job.run_test('telemetry_UnitTests', browser_type='system', tag='perf',
-             unit_tests=[], perf_tests=[''])
diff --git a/client/site_tests/telemetry_UnitTests/control.user b/client/site_tests/telemetry_UnitTests/control.user
deleted file mode 100644
index aaeb514..0000000
--- a/client/site_tests/telemetry_UnitTests/control.user
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "achuith@chromium.org"
-NAME = "telemetry_UnitTests"
-PURPOSE = "Run the Telemetry unit tests as a logged-in user."
-ATTRIBUTES = "suite:telemetry_unit"
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "desktopui"
-TEST_TYPE = "client"
-
-DOC = """
-Verifies that all Telemetry unit tests pass when run as a logged-in user.
-"""
-
-job.run_test('telemetry_UnitTests', browser_type='system', tag='user',
-             unit_tests=[''], perf_tests=[])
diff --git a/client/site_tests/telemetry_UnitTests/telemetry_UnitTests.py b/client/site_tests/telemetry_UnitTests/telemetry_UnitTests.py
deleted file mode 100644
index 1abe6fb..0000000
--- a/client/site_tests/telemetry_UnitTests/telemetry_UnitTests.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from telemetry.testing import run_chromeos_tests
-
-
-class telemetry_UnitTests(test.test):
-    """This is a client side wrapper for the Telemetry unit tests."""
-    version = 1
-
-
-    def run_once(self, browser_type, unit_tests, perf_tests):
-        """Runs telemetry/perf unit tests.
-
-        @param browser_type: The string type of browser to use, e.g., 'system'.
-        @param unit_tests: list of unit tests to run, [''] is all tests,
-                           [] is no tests.
-        @param perf_tests: list of perf unit tests to run, [''] is all tests,
-                           [] is no tests.
-        """
-        tests_to_run = []
-        if unit_tests:
-            tests_to_run.append((
-                    '/usr/local/telemetry/src/third_party/catapult/telemetry',
-                    unit_tests))
-        if perf_tests:
-            tests_to_run.append(('/usr/local/telemetry/src/tools/perf',
-                                 perf_tests))
-        error_str = run_chromeos_tests.RunChromeOSTests(browser_type,
-                                                        tests_to_run)
-        if error_str:
-            raise error.TestFail(error_str)
diff --git a/client/site_tests/touch_GestureNav/control b/client/site_tests/touch_GestureNav/control
index 006fdfa..1639440 100644
--- a/client/site_tests/touch_GestureNav/control
+++ b/client/site_tests/touch_GestureNav/control
@@ -9,12 +9,13 @@
 This test will fail if, after playback of two fingers for back/forward
 long swipes, the active tab does not navigate as expected.
 """
-ATTRIBUTES = "suite:touch"
+ATTRIBUTES = "suite:touch_replay"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "touch"
 TEST_TYPE = "client"
 DEPENDENCIES = "touchpad"
+PY_VERSION = 3
 
 DOC = """
 Plays back touchpad gestures (if present) and checks for back/fwd navigation.
diff --git a/client/site_tests/touch_GestureNav/touch_GestureNav.py b/client/site_tests/touch_GestureNav/touch_GestureNav.py
index 191f15d..bd6f9bc 100644
--- a/client/site_tests/touch_GestureNav/touch_GestureNav.py
+++ b/client/site_tests/touch_GestureNav/touch_GestureNav.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -51,7 +52,7 @@
     def run_once(self):
         """Entry point of this test."""
         if not self._is_testable():
-            return
+            raise error.TestNAError('Missing input data for this board name.')
 
         # Log in and start test.
         with chrome.Chrome(autotest_ext=True,
diff --git a/client/site_tests/touch_HasInput/control.stylus b/client/site_tests/touch_HasInput/control.stylus
index 77b0dfd..9214f0b 100644
--- a/client/site_tests/touch_HasInput/control.stylus
+++ b/client/site_tests/touch_HasInput/control.stylus
@@ -14,6 +14,7 @@
 TEST_CLASS = "touch"
 TEST_TYPE = "client"
 DEPENDENCIES = "stylus"
+PY_VERSION = 3
 
 DOC = """
 If there is no /dev/input/event* that is a stylus, test will fail.  Test
diff --git a/client/site_tests/touch_HasInput/control.touchpad b/client/site_tests/touch_HasInput/control.touchpad
index c3cdc72..a69dbdc 100644
--- a/client/site_tests/touch_HasInput/control.touchpad
+++ b/client/site_tests/touch_HasInput/control.touchpad
@@ -14,6 +14,7 @@
 TEST_CLASS = "touch"
 TEST_TYPE = "client"
 DEPENDENCIES = "touchpad"
+PY_VERSION = 3
 
 DOC = """
 If there is no /dev/input/event* that is a touchpad, test will fail.  Test
diff --git a/client/site_tests/touch_HasInput/control.touchscreen b/client/site_tests/touch_HasInput/control.touchscreen
index 779629b..c4ba2de 100644
--- a/client/site_tests/touch_HasInput/control.touchscreen
+++ b/client/site_tests/touch_HasInput/control.touchscreen
@@ -14,6 +14,7 @@
 TEST_CLASS = "touch"
 TEST_TYPE = "client"
 DEPENDENCIES = "touchscreen"
+PY_VERSION = 3
 
 DOC = """
 If there is no /dev/input/event* that is a touchscreen, test will fail.  Test
diff --git a/client/site_tests/touch_HasInput/touch_HasInput.py b/client/site_tests/touch_HasInput/touch_HasInput.py
index daeb0b3..cba75d1 100644
--- a/client/site_tests/touch_HasInput/touch_HasInput.py
+++ b/client/site_tests/touch_HasInput/touch_HasInput.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/touch_MouseScroll/control b/client/site_tests/touch_MouseScroll/control
index c5f5f2f..8f9341c 100644
--- a/client/site_tests/touch_MouseScroll/control
+++ b/client/site_tests/touch_MouseScroll/control
@@ -14,6 +14,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "touch"
 TEST_TYPE = "client"
+PY_VERSION = 3
 
 DOC = """
 Uses a mouse description file to emulate a USB mouse.  Plays back a single
diff --git a/client/site_tests/touch_MouseScroll/touch_MouseScroll.py b/client/site_tests/touch_MouseScroll/touch_MouseScroll.py
index 7d1283d..df99662 100644
--- a/client/site_tests/touch_MouseScroll/touch_MouseScroll.py
+++ b/client/site_tests/touch_MouseScroll/touch_MouseScroll.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/touch_ScrollDirection/control b/client/site_tests/touch_ScrollDirection/control
index 245004d..2727ae5 100644
--- a/client/site_tests/touch_ScrollDirection/control
+++ b/client/site_tests/touch_ScrollDirection/control
@@ -9,12 +9,13 @@
 This test will fail if, after playback of touch events, no scrolling is detected
 or if scrolling is in the wrong direction.
 """
-ATTRIBUTES = "suite:touch"
+ATTRIBUTES = "suite:touch_replay"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "touch"
 TEST_TYPE = "client"
 DEPENDENCIES = "touchpad"
+PY_VERSION = 3
 
 DOC = """
 Detects presence of touchscreen to determine whether Australian scrolling
diff --git a/client/site_tests/touch_ScrollDirection/touch_ScrollDirection.py b/client/site_tests/touch_ScrollDirection/touch_ScrollDirection.py
index 69708ed..db92bc4 100644
--- a/client/site_tests/touch_ScrollDirection/touch_ScrollDirection.py
+++ b/client/site_tests/touch_ScrollDirection/touch_ScrollDirection.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -82,7 +83,7 @@
     def run_once(self):
         """Entry point of this test."""
         if not self._is_testable():
-            return
+            raise error.TestNAError('Missing input data for this board name.')
 
         # Log in and start test.
         with chrome.Chrome(autotest_ext=True,
diff --git a/client/site_tests/touch_StylusTaps/control b/client/site_tests/touch_StylusTaps/control
index 16e541a..7957f4a 100644
--- a/client/site_tests/touch_StylusTaps/control
+++ b/client/site_tests/touch_StylusTaps/control
@@ -9,12 +9,13 @@
 This test will fail if, after playback of touch events, taps do not appear
 on test page.
 """
-ATTRIBUTES = "suite:touch"
+ATTRIBUTES = "suite:touch_replay"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "touch"
 TEST_TYPE = "client"
 DEPENDENCIES = "stylus"
+PY_VERSION = 3
 
 DOC = """
 Uses kernel playback and a javascript page to play and listen for stylus taps.
diff --git a/client/site_tests/touch_StylusTaps/touch_StylusTaps.py b/client/site_tests/touch_StylusTaps/touch_StylusTaps.py
index 3b351ff..035f9ac 100644
--- a/client/site_tests/touch_StylusTaps/touch_StylusTaps.py
+++ b/client/site_tests/touch_StylusTaps/touch_StylusTaps.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -39,7 +40,7 @@
         self._filepaths = self._find_test_files('stylus', [self._CLICK_NAME])
         if not self._filepaths:
             logging.info('Missing gesture files, Aborting test.')
-            return
+            raise error.TestNAError('Missing input data for this board name.')
 
         # Log in and start test.
         with chrome.Chrome(init_network_controller=True) as cr:
diff --git a/client/site_tests/touch_TabSwitch/control b/client/site_tests/touch_TabSwitch/control
index a9fb19e..cc57da3 100644
--- a/client/site_tests/touch_TabSwitch/control
+++ b/client/site_tests/touch_TabSwitch/control
@@ -9,12 +9,13 @@
 This test will fail if, after playback of three finger left/right swipes,
 the active tab does not change as expected.
 """
-ATTRIBUTES = "suite:touch"
+ATTRIBUTES = "suite:touch_replay"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "touch"
 TEST_TYPE = "client"
 DEPENDENCIES = "touchpad"
+PY_VERSION = 3
 
 DOC = """
 Plays back touchpad gestures (if present) and checks for tab switching.
diff --git a/client/site_tests/touch_TabSwitch/touch_TabSwitch.py b/client/site_tests/touch_TabSwitch/touch_TabSwitch.py
index 8a3c2bf..08da75f 100644
--- a/client/site_tests/touch_TabSwitch/touch_TabSwitch.py
+++ b/client/site_tests/touch_TabSwitch/touch_TabSwitch.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -87,7 +88,7 @@
     def run_once(self):
         """Entry point of this test."""
         if not self._is_testable():
-            return
+            raise error.TestNAError('Missing input data for this board name.')
 
         # Log in and start test.
         with chrome.Chrome(autotest_ext=True,
diff --git a/client/site_tests/touch_TapSettings/control b/client/site_tests/touch_TapSettings/control
index 8098e0c..fc16029 100644
--- a/client/site_tests/touch_TapSettings/control
+++ b/client/site_tests/touch_TapSettings/control
@@ -9,12 +9,13 @@
 This test will fail if, after playback of touch events, tap-to-click or tap
 dragging does not work when supposed to or works when not supposed to.
 """
-ATTRIBUTES = "suite:touch"
+ATTRIBUTES = "suite:touch_replay"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "touch"
 TEST_TYPE = "client"
 DEPENDENCIES = "touchpad"
+PY_VERSION = 3
 
 DOC = """
 Uses javascript page to listen for mouse clicks and drags.  If tap-to-click is
diff --git a/client/site_tests/touch_TapSettings/touch_TapSettings.py b/client/site_tests/touch_TapSettings/touch_TapSettings.py
index 50de568..4bf3cb1 100644
--- a/client/site_tests/touch_TapSettings/touch_TapSettings.py
+++ b/client/site_tests/touch_TapSettings/touch_TapSettings.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -89,7 +90,7 @@
     def run_once(self):
         """Entry point of this test."""
         if not self._is_testable():
-            return
+            raise error.TestNAError('Missing input data for this board name.')
 
         # Log in and start test.
         with chrome.Chrome(autotest_ext=True,
diff --git a/client/site_tests/touch_TouchscreenScroll/control b/client/site_tests/touch_TouchscreenScroll/control
index ef2f82b..4c92c66 100644
--- a/client/site_tests/touch_TouchscreenScroll/control
+++ b/client/site_tests/touch_TouchscreenScroll/control
@@ -9,12 +9,13 @@
 This test will fail if, after playback of touchscreen events, no scrolling is
 detected or if scrolling is in the wrong direction.
 """
-ATTRIBUTES = "suite:touch"
+ATTRIBUTES = "suite:touch_replay"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "touch"
 TEST_TYPE = "client"
 DEPENDENCIES = "touchscreen"
+PY_VERSION = 3
 
 DOC = """
 Plays back touchscreen movements and checks for scroll.
diff --git a/client/site_tests/touch_TouchscreenScroll/touch_TouchscreenScroll.py b/client/site_tests/touch_TouchscreenScroll/touch_TouchscreenScroll.py
index c835301..118c543 100644
--- a/client/site_tests/touch_TouchscreenScroll/touch_TouchscreenScroll.py
+++ b/client/site_tests/touch_TouchscreenScroll/touch_TouchscreenScroll.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -75,7 +76,7 @@
     def run_once(self):
         """Entry point of this test."""
         if not self._is_testable():
-            return
+            raise error.TestNAError('Missing input data for this board name.')
 
         # Log in and start test.
         with chrome.Chrome(autotest_ext=True,
diff --git a/client/site_tests/touch_TouchscreenTaps/control b/client/site_tests/touch_TouchscreenTaps/control
index 0819392..a717ef7 100644
--- a/client/site_tests/touch_TouchscreenTaps/control
+++ b/client/site_tests/touch_TouchscreenTaps/control
@@ -9,12 +9,13 @@
 This test will fail if, after playback of touch events, taps do not appear
 on test page.
 """
-ATTRIBUTES = "suite:touch"
+ATTRIBUTES = "suite:touch_replay"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "touch"
 TEST_TYPE = "client"
 DEPENDENCIES = "touchscreen"
+PY_VERSION = 3
 
 DOC = """
 Uses javascript page to listen for touchscreen taps.  If all goes well, the
diff --git a/client/site_tests/touch_TouchscreenTaps/touch_TouchscreenTaps.py b/client/site_tests/touch_TouchscreenTaps/touch_TouchscreenTaps.py
index 523d502..f79d0e9 100644
--- a/client/site_tests/touch_TouchscreenTaps/touch_TouchscreenTaps.py
+++ b/client/site_tests/touch_TouchscreenTaps/touch_TouchscreenTaps.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -56,7 +57,7 @@
     def run_once(self):
         """Entry point of this test."""
         if not self._is_testable():
-            return
+            raise error.TestNAError('Missing input data for this board name.')
 
         # Log in and start test.
         with chrome.Chrome(init_network_controller=True) as cr:
diff --git a/client/site_tests/touch_TouchscreenZoom/control b/client/site_tests/touch_TouchscreenZoom/control
index 44c92f5..e835ff3 100644
--- a/client/site_tests/touch_TouchscreenZoom/control
+++ b/client/site_tests/touch_TouchscreenZoom/control
@@ -9,12 +9,13 @@
 This test will fail if, after playback of touch events, no zoom is detected
 or if zoom behaves irregularly.
 """
-ATTRIBUTES = "suite:touch"
+ATTRIBUTES = "suite:touch_replay"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "touch"
 TEST_TYPE = "client"
 DEPENDENCIES = "touchscreen"
+PY_VERSION = 3
 
 DOC = """
 Plays back touchscreen movements to zoom a page.  Checks the page width to
diff --git a/client/site_tests/touch_TouchscreenZoom/touch_TouchscreenZoom.py b/client/site_tests/touch_TouchscreenZoom/touch_TouchscreenZoom.py
index 14b359b..0ab3678 100644
--- a/client/site_tests/touch_TouchscreenZoom/touch_TouchscreenZoom.py
+++ b/client/site_tests/touch_TouchscreenZoom/touch_TouchscreenZoom.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -65,7 +66,7 @@
     def run_once(self):
         """Entry point of this test."""
         if not self._is_testable():
-            return
+            raise error.TestNAError('Missing input data for this board name.')
 
         # Log in and start test.
         with chrome.Chrome(init_network_controller=True) as cr:
diff --git a/client/site_tests/touch_UpdateErrors/control.stylus b/client/site_tests/touch_UpdateErrors/control.stylus
deleted file mode 100644
index 64139f7..0000000
--- a/client/site_tests/touch_UpdateErrors/control.stylus
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "kathrelkeld"
-NAME = "touch_UpdateErrors.stylus"
-PURPOSE = "Check whether startup stylus firmware update had errors."
-CRITERIA = """
-This test will fail if stylus device did not check for updates or if there
-were errors.
-"""
-ATTRIBUTES = "suite:touch"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "touch"
-TEST_TYPE = "client"
-DEPENDENCIES = "stylus, phase:PVT"
-
-DOC = """
-Search the touch update log entries for errors or failures.
-"""
-
-job.run_test('touch_UpdateErrors', input_type='stylus')
diff --git a/client/site_tests/touch_UpdateErrors/control.touchpad b/client/site_tests/touch_UpdateErrors/control.touchpad
deleted file mode 100644
index f7c31ed..0000000
--- a/client/site_tests/touch_UpdateErrors/control.touchpad
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "kathrelkeld"
-NAME = "touch_UpdateErrors.touchpad"
-PURPOSE = "Check whether startup touchpad firmware update had errors."
-CRITERIA = """
-This test will fail if touchpad device did not check for updates or if there
-were errors.
-"""
-ATTRIBUTES = "suite:touch"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "touch"
-TEST_TYPE = "client"
-DEPENDENCIES = "touchpad, phase:PVT"
-
-DOC = """
-Search the touch update log entries for errors or failures.
-"""
-
-job.run_test('touch_UpdateErrors', input_type='touchpad')
diff --git a/client/site_tests/touch_UpdateErrors/control.touchscreen b/client/site_tests/touch_UpdateErrors/control.touchscreen
deleted file mode 100644
index 7586019..0000000
--- a/client/site_tests/touch_UpdateErrors/control.touchscreen
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "kathrelkeld"
-NAME = "touch_UpdateErrors.touchscreen"
-PURPOSE = "Check whether startup touch firmware update had errors."
-CRITERIA = """
-This test will fail if touchscreen  did not check for updates or if there
-were errors.
-"""
-ATTRIBUTES = "suite:touch"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "touch"
-TEST_TYPE = "client"
-DEPENDENCIES = "touchscreen, phase:PVT"
-
-DOC = """
-Search the touch update log entries for errors or failures.
-"""
-
-job.run_test('touch_UpdateErrors', input_type='touchscreen')
diff --git a/client/site_tests/touch_UpdateErrors/touch_UpdateErrors.py b/client/site_tests/touch_UpdateErrors/touch_UpdateErrors.py
deleted file mode 100644
index 892a157..0000000
--- a/client/site_tests/touch_UpdateErrors/touch_UpdateErrors.py
+++ /dev/null
@@ -1,144 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import touch_playback_test_base
-
-
-class touch_UpdateErrors(touch_playback_test_base.touch_playback_test_base):
-    """Check that touch update is tried and that there are no update errors."""
-    version = 1
-
-    # Older devices with Synaptics touchpads do not report firmware updates.
-    _INVALID_BOARDS = ['x86-alex', 'x86-alex_he', 'x86-zgb', 'x86-zgb_he',
-                       'x86-mario', 'stout']
-
-    # Devices which have errors in older builds but not newer ones.
-    _IGNORE_OLDER_LOGS = ['expresso', 'enguarde', 'cyan', 'wizpig']
-
-    # Devices which have errors in the first build after update.
-    _IGNORE_AFTER_UPDATE_LOGS = ['link']
-
-    def _find_logs_start_line(self):
-        """Find where in /var/log/messages this build's logs start.
-
-        Prevent bugs such as crosbug.com/p/31012, where unfixable errors from
-        FSI builds remain in the logs.
-
-        For devices where this applies, split the logs by Linux version.  Since
-        this line can repeat, find the last chunk of logs where the version is
-        all the same - all for the build under test.
-
-        @returns: string of the line number to start looking at logs
-
-        """
-        if not (self._platform in self._IGNORE_OLDER_LOGS or
-                self._platform in self._IGNORE_AFTER_UPDATE_LOGS):
-            return '0'
-
-        log_cmd = 'grep -ni "Linux version " /var/log/messages'
-
-        version_entries = utils.run(log_cmd).stdout.strip().split('\n')
-
-        # Separate the line number and the version date (i.e. remove timestamp).
-        lines, dates = [], []
-        for entry in version_entries:
-            lines.append(entry[:entry.find(':')])
-            dates.append(entry[entry.find('Linux version '):])
-        latest = dates[-1]
-        start_line = lines[-1]
-        start_line_index = -1
-
-        # Find where logs from this build start by checking backwards for the
-        # first change in build.  Some of these dates may be duplicated.
-        for i in xrange(len(lines)-1, -1, -1):
-            if dates[i] != latest:
-                break
-            start_line = lines[i]
-            start_line_index = i
-
-        if start_line_index == 0:
-            return '0'
-
-        logging.info('This build has an older build; skipping some logs, '
-                     'as was hardcoded for this platform.')
-
-        # Ignore the first build after update if required.
-        if self._platform in self._IGNORE_AFTER_UPDATE_LOGS:
-            start_line_index += 1
-            if start_line_index >= len(lines):
-                raise error.TestError(
-                        'Insufficent logs: aborting test to avoid a known '
-                        'issue!  Please reboot and try again.')
-            start_line = lines[start_line_index]
-
-        return start_line
-
-    def _check_updates(self, input_type):
-        """Fail the test if device has problems with touch firmware update.
-
-        @param input_type: string of input type, e.g. 'touchpad'
-
-        @raises: TestFail if no update attempt occurs or if there is an error.
-
-        """
-        hw_id = self.player.devices[input_type].hw_id
-        if not hw_id:
-            raise error.TestError('%s has no valid hw_id!' % input_type)
-
-        updater_name = 'touch-firmware-update'
-        start_line = self._find_logs_start_line()
-        # Null characters sometimes slip into /var/log/messages, causing grep to
-        # treat it as a binary file (and output "binary file matches" rather
-        # than the matching text). --text forces grep to treat it as text file.
-        log_cmd = (r'tail -n +%s /var/log/messages '
-                   r"| grep --text -i '\(%s\|chromeos-touch-update\)'") % (
-                           start_line, updater_name)
-
-        pass_terms = [
-                '%s.*%s' % (updater_name, hw_id),
-                r'chromeos-touch-update\[[[:digit:]]\+\]: Running updater for '
-                r'.* ([[:xdigit:]]\+:%s)$' % hw_id
-        ]
-
-        fail_terms = ['error[^s]', 'err[^a-z]']
-        ignore_terms = ['touchview','autotest']
-
-        # Remove lines that match ignore_terms.
-        for term in ignore_terms:
-            log_cmd += ' | grep -v -i %s' % term
-
-        # Check for key terms in touch logs.
-        found_pass_term = False
-        for term in pass_terms + fail_terms:
-            search_cmd = "%s | grep -i '%s'" % (log_cmd, term)
-            log_entries = utils.run(search_cmd, ignore_status=True).stdout
-            if term in fail_terms and len(log_entries) > 0:
-                error_msg = log_entries.split('\n')[0]
-                error_msg = error_msg[error_msg.find(term)+len(term):].strip()
-                raise error.TestFail(error_msg)
-            if term in pass_terms and len(log_entries) > 0:
-                logging.info('Matched "%s" on these pass terms: "%s"', term,
-                             log_entries)
-                found_pass_term = True
-
-        if not found_pass_term:
-            logging.info('Did not find any pass terms! (looked for "%s")',
-                         '", "'.join(pass_terms))
-            raise error.TestFail('Touch firmware did not attempt update.')
-
-    def run_once(self, input_type='touchpad'):
-        """Entry point of this test."""
-        if not self.player.has(input_type):
-            raise error.TestError('No %s found on this device!' % input_type)
-
-        # Skip run on invalid touch inputs.
-        if self._platform in self._INVALID_BOARDS:
-            logging.info('This touchpad is not supported for this test.')
-            return
-
-        self._check_updates(input_type)
diff --git a/client/site_tests/touch_WakeupSource/control.touchpad b/client/site_tests/touch_WakeupSource/control.touchpad
index b0631f5..e6fa2ff 100644
--- a/client/site_tests/touch_WakeupSource/control.touchpad
+++ b/client/site_tests/touch_WakeupSource/control.touchpad
@@ -15,6 +15,7 @@
 TEST_CLASS = "touch"
 TEST_TYPE = "client"
 DEPENDENCIES = "touchpad"
+PY_VERSION = 3
 
 DOC = """
 For this device's touchpad , verify whether that device is
diff --git a/client/site_tests/touch_WakeupSource/control.touchscreen b/client/site_tests/touch_WakeupSource/control.touchscreen
index d78edb9..c594e6f 100644
--- a/client/site_tests/touch_WakeupSource/control.touchscreen
+++ b/client/site_tests/touch_WakeupSource/control.touchscreen
@@ -15,6 +15,7 @@
 TEST_CLASS = "touch"
 TEST_TYPE = "client"
 DEPENDENCIES = "touchscreen"
+PY_VERSION = 3
 
 DOC = """
 For this device's touchscreen, verify whether that device is
diff --git a/client/site_tests/touch_WakeupSource/touch_WakeupSource.py b/client/site_tests/touch_WakeupSource/touch_WakeupSource.py
index 2a8eb3f..0295212 100644
--- a/client/site_tests/touch_WakeupSource/touch_WakeupSource.py
+++ b/client/site_tests/touch_WakeupSource/touch_WakeupSource.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/client/site_tests/usbpd_DisplayPortSink/control b/client/site_tests/usbpd_DisplayPortSink/control
deleted file mode 100644
index 0b291c5..0000000
--- a/client/site_tests/usbpd_DisplayPortSink/control
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-NAME = 'usbpd_DisplayPortSink'
-AUTHOR = 'The Chromium OS Authors'
-PURPOSE = 'Test USB-PD DisplayPort sink'
-DOC = """
-Integration test for USB-PD DisplayPort sink.
-"""
-ATTRIBUTES = 'suite:experimental'
-TIME = 'SHORT'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'usbpd'
-TEST_TYPE = 'client'
-
-job.run_test('usbpd_DisplayPortSink', enter_reps=10)
diff --git a/client/site_tests/usbpd_DisplayPortSink/usbpd_DisplayPortSink.py b/client/site_tests/usbpd_DisplayPortSink/usbpd_DisplayPortSink.py
deleted file mode 100644
index bb64f02..0000000
--- a/client/site_tests/usbpd_DisplayPortSink/usbpd_DisplayPortSink.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros import ec as cros_ec
-
-
-class usbpd_DisplayPortSink(test.test):
-    """Integration test for USB-PD DisplayPort sink."""
-
-    version = 1
-    DP_SVID = '0xff01'
-
-    def _is_displayport(self, port):
-        return port.is_amode_supported(self.DP_SVID)
-
-    def _set_displayport(self, port, opos, enter):
-        return port.set_amode(self.DP_SVID, opos, enter)
-
-    def run_once(self, enter_reps=1):
-        usbpd = cros_ec.EC_USBPD()
-        logging.info("device has %d USB-PD ports", len(usbpd.ports))
-
-        for i,port in enumerate(usbpd.ports):
-            if not port.is_dfp():
-                continue
-
-            logging.info("Port %d is dfp", i)
-
-            if not self._is_displayport(port):
-                continue
-
-            logging.info("Port %d supports dp", i)
-
-            for _ in xrange(enter_reps):
-                if not self._set_displayport(port, 1, False):
-                    raise error.TestError("Failed to exit DP mode")
-
-                if not self._set_displayport(port, 1, True):
-                    raise error.TestError("Failed to enter DP mode")
diff --git a/client/site_tests/usbpd_GFU/control b/client/site_tests/usbpd_GFU/control
deleted file mode 100644
index e9923d7..0000000
--- a/client/site_tests/usbpd_GFU/control
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-NAME = 'usbpd_GFU'
-AUTHOR = 'The Chromium OS Authors'
-PURPOSE = 'Test USB-PD Google Firmware Update (GFU)'
-DOC = """
-Integration test for USB-PD Google Firmware Update (GFU).
-"""
-TIME='MEDIUM'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'usbpd'
-TEST_TYPE = 'client'
-
-job.run_test('usbpd_GFU', ro_reps=1)
diff --git a/client/site_tests/usbpd_GFU/usbpd_GFU.py b/client/site_tests/usbpd_GFU/usbpd_GFU.py
deleted file mode 100644
index 7a9f80d..0000000
--- a/client/site_tests/usbpd_GFU/usbpd_GFU.py
+++ /dev/null
@@ -1,262 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import collections
-import glob
-import logging
-import re
-import time
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error, utils
-from autotest_lib.client.cros import ec as cros_ec, cros_logging
-
-
-class usbpd_GFU(test.test):
-    """Integration test for USB-PD Google Firmware Update (GFU).
-
-    Test should:
-    - interrogate what firmware's are available for each device and for each:
-      1. Use ectool's flashpd to write RW with that to mimic old hw
-         - Validate that kernel driver successfully updates to latest RW.
-      2. Erase RW and see update as well.
-
-    TODO:
-      3. Check that update is checked after S2R.
-    """
-
-    version = 1
-
-    FW_PATH = '/lib/firmware/cros-pd'
-    # <device>_v<major>.<minor>.<build>-<commit SHA>
-    FW_NAME_RE = r'%s/(\w+)_v(\d+)\.(\d+)\.(\d+)-([0-9a-f]+).*' % (FW_PATH)
-    GOOGLE_VID = '0x18d1'
-    MAX_UPDATE_SECS = 80
-    FW_UP_DNAME = 'cros_ec_pd_update'
-    # TODO(tbroch) This will be change once cros_ec_pd_update is abstracted from
-    # ACPI driver.  Will need to fix this once it happens.
-    FW_UP_DISABLE_PATH = '/sys/devices/LNXSYSTM:00/device:00/PNP0A08:00/device:1e/PNP0C09:00/GOOG0003:00/disable'
-
-    # TODO(tbroch) find better way to build this or we'll have to edit test for
-    # each new PD peripheral.
-    DEV_MAJOR = dict(zinger=1, minimuffin=2, dingdong=3, hoho=4)
-
-    def _index_firmware_avail(self):
-        """Index the various USB-PD firmwares in the rootfs.
-
-        TODO(crosbug.com/434522) This method will need reworked after we've come
-        up with a better method for firmware release.
-
-        @returns: dictionary of firmwares (key == name, value == list of
-          firmware paths)
-        """
-        fw_dict = collections.defaultdict(list)
-        for fw in glob.glob('%s/*_v[1-9].*.bin' % (self.FW_PATH)):
-            mat = re.match(self.FW_NAME_RE, fw)
-            if not mat:
-                continue
-
-            name = mat.group(1)
-            fw_dict[name].append(fw)
-
-        return fw_dict
-
-    def _is_gfu(self, port):
-        """Is it in GFU?
-
-        @param port: EC_USBPD object for port.
-
-        @returns: True if GFU enterd, False otherwise.
-        """
-        return port.is_amode_supported(self.GOOGLE_VID)
-
-    def _is_in_rw(self, port):
-        """Is PD device in RW firmware?
-
-        @param port: EC_USBPD object for port.
-
-        @returns: True if in RW, False otherwise.
-        """
-        flash_info = port.get_flash_info()
-        logging.debug('flash_info = %s', flash_info)
-        return flash_info['image_status'] == 'RW'
-
-    def _set_kernel_fw_update(self, disable=0):
-        """Disable the FW update driver.
-
-        @param disable: 1 for disable, 0 for enable.
-        """
-        utils.write_one_line(self.FW_UP_DISABLE_PATH, disable)
-        if not disable:
-            # Allow kernel driver time quiesce
-            time.sleep(2)
-
-    def _modify_rw(self, port, rw=None, tries=3):
-        """Modify RW of USB-PD device in <port>.
-
-        @param port: EC_USBPD object for port.
-        @param rw: Path to RW FW to write using ectool.  If None then uses
-          /dev/null to invalidate the RW.
-        @param tries: Number of tries to update RW via flashpd
-
-        @returns: True if success, False otherwise.
-        """
-        timeout = self.MAX_UPDATE_SECS
-
-        if not rw:
-            rw = '/dev/null'
-            tries = 1
-
-        self._set_kernel_fw_update(disable=1)
-
-        while (tries):
-            try:
-                # Note in flashpd <dev_major> <port> <file> the dev_major is
-                # unnecessary in all cases so its just been set to 0
-                port.ec_command('flashpd 0 %d %s' % (port.index, rw),
-                                ignore_status=True, timeout=timeout)
-
-            except error.CmdTimeoutError:
-                # TODO(tbroch) could remove try/except if ec_command used run
-                # instead of system_output + ignore_timeout=True
-                tries -= 1
-                continue
-
-            if rw != '/dev/null' and not self._is_in_rw(port):
-                logging.warn('Port%d: not in RW after flashpd ... retrying',
-                             port.index)
-                tries -= 1
-            else:
-                break
-
-        self._set_kernel_fw_update()
-
-        msg = self._reader.get_last_msg([r'%s.*is in RO' % port.index,
-                                         self.FW_UP_DNAME],
-                                        retries=5, sleep_seconds=2)
-        if not msg:
-            logging.warn('Port%d: Driver does NOT see dev in not in RO',
-                         port.index)
-            return False
-        logging.info('Port%d: Driver sees device in RO', port.index)
-        return True
-
-    def _test_update(self, port, rw=None, tries=3):
-        """Test RW update.
-
-        Method tests the kernel's RW update process by first modifying the
-        existing RW (either invalidating or rolling it back) via ectool.  It
-        then querys the syslog to validate kernel sees the need for update and
-        is successful.
-
-        @param port: EC_USBPD object for port.
-        @param rw: path to RW firmware to write via ectool to test upgrade.
-        @param tries: integer number of attempts to write RW.  Necessary as
-          update is not robust (design decision).
-        """
-        if not tries:
-            raise error.TestError('Retries must be > 0')
-
-        if not self._is_in_rw(port):
-            raise error.TestError('Port%d: Device is not in RW' % port.index)
-
-        fw_up_re = r'%s.*Port%d FW update completed' % (self.FW_UP_DNAME,
-                                                        port.index)
-
-        while tries:
-            self._reader.set_start_by_current()
-            rsp = self._modify_rw(port, rw)
-
-            if not rsp:
-                rsp_str = 'Port%d: RW modified with RW=%s failed' % \
-                          (port.index, rw)
-                if tries:
-                    logging.warn('%s ... retrying.', rsp_str)
-                    tries -= 1
-                else:
-                    raise error.TestError(rsp_str)
-
-            self._reader.set_start_by_current()
-            msg = self._reader.get_last_msg([fw_up_re],
-                                            retries=(self.MAX_UPDATE_SECS / 2),
-                                            sleep_seconds=2)
-
-            if not msg:
-                rsp_str = 'Port%d: driver did NOT update FW' % port.index
-                if tries:
-                    logging.warn('%s ... retrying.', rsp_str)
-                    tries -= 1
-                    continue
-                else:
-                    raise error.TestError(rsp_str)
-
-            logging.info('Port%d: Driver completed RW update', port.index)
-
-            # Allow adequate reboot time after RW write completes and device is
-            # rebooted.
-            time.sleep(3)
-
-            if not self._is_in_rw(port):
-                rsp_str = 'Port%d: Device is not in RW' % port.index
-                if tries:
-                    logging.warn('%s ... retrying.', rsp_str)
-                    tries -= 1
-                    continue
-                else:
-                    raise error.TestError(rsp_str)
-
-            break # success #
-
-    def _test_rw_rollback(self, port, fw_dict):
-        """Test rolling back RW firmware.
-
-        @param port: EC_USBPD object for port.
-        @param fw_dict: dictionary of firmwares.
-        """
-        self._set_kernel_fw_update()
-
-        # test old RW update
-        flash_info = port.get_flash_info()
-        for dev_name in fw_dict.keys():
-            if flash_info['dev_major'] == self.DEV_MAJOR[dev_name]:
-                for old_rw in sorted(fw_dict[dev_name], reverse=True)[1:]:
-                    logging.info('Port%d: Rollback test %s to %s',
-                                 port.index, dev_name, old_rw)
-                    self._test_update(port, rw=old_rw)
-                break
-
-    def _test_ro_only(self, port, ro_reps):
-        """Test FW update on device with RO only.
-
-        @param port: EC_USBPD object for port.
-        @param ro_reps: Number of times to repeat test.
-        """
-        # test update in RO ro_reps times
-        for i in xrange(ro_reps):
-            logging.info('RO Loop%d', i)
-            self._test_update(port)
-
-    def run_once(self, ro_reps=1):
-
-        fw_dict = self._index_firmware_avail()
-
-        self._usbpd = cros_ec.EC_USBPD()
-        self._reader = cros_logging.LogReader()
-
-        for port in self._usbpd.ports:
-            if not port.is_dfp():
-                continue
-
-            logging.info('Port%d: is a DFP', port.index)
-
-            if not self._is_gfu(port):
-                continue
-
-            logging.info('Port%d: supports GFU', port.index)
-
-            self._test_rw_rollback(port, fw_dict)
-            self._test_ro_only(port, ro_reps)
-
-    def cleanup(self):
-        self._set_kernel_fw_update()
diff --git a/client/site_tests/video_AVAnalysis/control.vp9.720 b/client/site_tests/video_AVAnalysis/control.vp9.720
index 6d7e247..ac5f36b 100644
--- a/client/site_tests/video_AVAnalysis/control.vp9.720
+++ b/client/site_tests/video_AVAnalysis/control.vp9.720
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+PY_VERSION = 3
 AUTHOR = "vsuley@google.com"
 NAME = "video_AVAnalysis.vp9.720"
 PURPOSE = "Play bar-coded video on ChromeOS DUT so it can be recorded for analysis."
diff --git a/client/site_tests/video_AVAnalysis/video_AVAnalysis.py b/client/site_tests/video_AVAnalysis/video_AVAnalysis.py
index 883d3bd..871b039 100644
--- a/client/site_tests/video_AVAnalysis/video_AVAnalysis.py
+++ b/client/site_tests/video_AVAnalysis/video_AVAnalysis.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.import json
diff --git a/client/site_tests/video_WebRtcMainFeedSwitching/control b/client/site_tests/video_WebRtcMainFeedSwitching/control
deleted file mode 100644
index fc3ded8..0000000
--- a/client/site_tests/video_WebRtcMainFeedSwitching/control
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "kerl@google.com, chromeos-meetings@google.com"
-NAME = "video_WebRtcMainFeedSwitching"
-PURPOSE = "Stress WebRTC and Chrome by frequent switches of the source video"
-CRITERIA = "Fails if Chrome crashes"
-ATTRIBUTES = "suite:hotrod"
-TIME = "SHORT"
-TEST_CATEGORY = "Stress"
-TEST_CLASS = "video"
-TEST_TYPE = "client"
-BUG_TEMPLATE = {
-    "labels": ["OS-Chrome"],
-    "components": ["OS>Kernel>Video"],
-}
-JOB_RETRIES = 2
-
-DOC = """
-This test starts 5 high resolution loopback peer connections. Four of the video
-elements are styled to be small, thumbnail sized and one is in its native size.
-This is intended to simulate a video conference with one main feed and several
-smaller feeds.
-
-The test then frequently swaps the sourceObject of the main feed with a
-randomly chosen smaller feed.
-
-The tests use a fake media stream - not a real camera.
-"""
-
-job.run_test("video_WebRtcMainFeedSwitching")
-
diff --git a/client/site_tests/video_WebRtcMainFeedSwitching/control.perf b/client/site_tests/video_WebRtcMainFeedSwitching/control.perf
deleted file mode 100644
index 1e0430b..0000000
--- a/client/site_tests/video_WebRtcMainFeedSwitching/control.perf
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "kerl@google.com, chromeos-meetings@google.com"
-NAME = "video_WebRtcMainFeedSwitching.perf"
-PURPOSE = """Measure performance when srcObject of video tags are frequently
-swapped"""
-CRITERIA = "Fails if Chrome crashes"
-ATTRIBUTES = "suite:hotrod"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Performance"
-TEST_CLASS = "video"
-TEST_TYPE = "client"
-BUG_TEMPLATE = {
-    "labels": ["OS-Chrome"],
-    "components": ["OS>Kernel>Video"],
-}
-JOB_RETRIES = 2
-
-DOC = """
-This test starts 5 high resolution (1280x720) loopback peer connections. Four
-of the video elements are styled to be small, thumbnail sized (182x136) and one
-is in its native size.  This is intended to simulate a video conference with
-one main feed and several smaller feeds.
-
-The test then frequently swaps the sourceObject of the main feed with a
-randomly chosen smaller feed.
-
-The tests use a fake media stream - not a real camera.
-
-The test collects system metrics during the run. See go/cfm-perf-metrics
-for details about the metrics.
-"""
-
-job.run_test(
-        "video_WebRtcMainFeedSwitching", mode = "performance", tag = "perf")
-
diff --git a/client/site_tests/video_WebRtcMainFeedSwitching/main-feed-switching.js b/client/site_tests/video_WebRtcMainFeedSwitching/main-feed-switching.js
deleted file mode 100644
index 8d6f7bf..0000000
--- a/client/site_tests/video_WebRtcMainFeedSwitching/main-feed-switching.js
+++ /dev/null
@@ -1,118 +0,0 @@
-/*
- * Copyright 2017 The Chromium Authors. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-/*jshint esversion: 6 */
-
-'use strict';
-
-const $ = document.getElementById.bind(document);
-
-const MAIN_FEED_RESOLUTION = {w:1280, h:720};
-
-// This resolution is what we typically get on Hangouts Meet.
-const SMALL_FEED_RESOLUTION = {w:182, h:136};
-
-// This test frequently reports weird resolutions although the visuals look OK.
-// Hence, require lots of consecutive bad resolutions before failure.
-// TODO(kerl): Effectively disabled now, investigate why we get so many bad
-// resolution reports.
-const NUM_BAD_RESOLUTIONS_FOR_FAILURE = Number.MAX_SAFE_INTEGER;
-
-class TestRunner {
-  constructor(numConnections, runtimeSeconds, iterationDelayMillis) {
-    this.runtimeSeconds = runtimeSeconds;
-    this.iterationDelayMillis = iterationDelayMillis;
-    this.videoElements = [];
-    this.mainFeed = null;
-    this.peerConnections = [];
-    this.numConnections = numConnections;
-    this.iteration = 0;
-    this.startTime = 0;  // initialized to dummy value
-    this.status = this.getStatusInternal_();
-  }
-
-  runTest() {
-    for (let i = 0; i < this.numConnections; i++) {
-      const videoElement = document.createElement('video');
-      videoElement.autoplay = true;
-      $('body').appendChild(videoElement);
-      if (!this.mainFeed) {
-        // The first created is the main feed.
-        setSize(videoElement, MAIN_FEED_RESOLUTION);
-        this.mainFeed = videoElement;
-      } else {
-        setSize(videoElement, SMALL_FEED_RESOLUTION);
-        this.videoElements.push(videoElement);
-      }
-      this.peerConnections.push(new PeerConnection(
-          videoElement, [MAIN_FEED_RESOLUTION], cpuOveruseDetection));
-    }
-    const promises = this.peerConnections.map((conn) => conn.start());
-    Promise.all(promises)
-        .then(() => {
-          this.startTime = Date.now();
-          this.switchFeedLoop();
-        })
-        .catch((e) => {throw e});
-  }
-
-  switchFeedLoop() {
-    this.iteration++;
-    this.status = this.getStatusInternal_();
-    $('status').textContent = this.status;
-    if (this.status != 'ok-done') {
-      const switchWith = Math.floor(Math.random() * this.videoElements.length);
-      const newMainSrc = this.videoElements[switchWith].srcObject;
-      this.videoElements[switchWith].srcObject = this.mainFeed.srcObject;
-      this.mainFeed.srcObject = newMainSrc;
-      setTimeout(
-          () => this.switchFeedLoop(), this.iterationDelayMillis);
-    }
-  }
-
-  getStatus() {
-    return this.status;
-  }
-
-  getStatusInternal_() {
-    if (this.iteration == 0) {
-      return 'not-started';
-    }
-    try {
-      this.peerConnections.forEach(
-          (conn) => conn.verifyState(NUM_BAD_RESOLUTIONS_FOR_FAILURE));
-    } catch (e) {
-      return `failure: ${e.message}`;
-    }
-    const timeSpent = Date.now() - this.startTime;
-    if (timeSpent >= this.runtimeSeconds * 1000) {
-      return 'ok-done';
-    } else {
-      return `running, iteration: ${this.iteration}`;
-    }
-  }
-}
-
-function setSize(element, size) {
-  element.setAttribute('style', `width:${size.w}px;height:${size.h}px`);
-}
-
-// Declare testRunner so that the Python code can access it to query status.
-// Also allows us to access it easily in dev tools for debugging.
-let testRunner;
-// Set from the Python test runner
-let cpuOveruseDetection = null;
-
-function startTest(
-    runtimeSeconds, numPeerConnections, iterationDelayMillis) {
-  testRunner = new TestRunner(
-      numPeerConnections, runtimeSeconds, iterationDelayMillis);
-  testRunner.runTest();
-}
-
-function getStatus() {
-  return testRunner ? testRunner.getStatus() : 'not-initialized';
-}
-
diff --git a/client/site_tests/video_WebRtcMainFeedSwitching/video_WebRtcMainFeedSwitching.py b/client/site_tests/video_WebRtcMainFeedSwitching/video_WebRtcMainFeedSwitching.py
deleted file mode 100644
index a3a39b1..0000000
--- a/client/site_tests/video_WebRtcMainFeedSwitching/video_WebRtcMainFeedSwitching.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import test_webrtc_peer_connection
-
-
-class video_WebRtcMainFeedSwitching(test.test):
-    """
-    Tests a simulated Video Call with one main and several small feeds.
-
-    Simulates speaker switching by swapping the source video object between the
-    main feed and one randomly chosen small feed.
-    """
-    version = 1
-
-    def run_once(self, mode = 'functional'):
-        """
-        Runs the test.
-
-        @param mode: 'functional' or 'performance' depending on desired mode.
-        """
-        kwargs = {
-                'own_script': 'main-feed-switching.js',
-                'common_script': 'loopback-peerconnection.js',
-                'bindir': self.bindir,
-                'tmpdir': self.tmpdir,
-                'debugdir': self.debugdir,
-                'num_peer_connections': 5,
-                'iteration_delay_millis': 50
-        }
-
-        if mode == 'functional':
-            test = test_webrtc_peer_connection.WebRtcPeerConnectionTest(
-                    title = 'Main Feed Switching',
-                    **kwargs)
-            test.run_test()
-        elif mode == 'performance':
-            test = test_webrtc_peer_connection\
-                    .WebRtcPeerConnectionPerformanceTest(
-                            title = 'Main Feed Switching Performance Test',
-                            **kwargs)
-            test.run_test()
-            test.collector.write_metrics(self.output_perf_value)
-        else:
-            raise error.TestError('mode must be "functional" or "performance"')
-
diff --git a/client/site_tests/video_WebRtcResolutionSwitching/control b/client/site_tests/video_WebRtcResolutionSwitching/control
deleted file mode 100644
index 6a439ac..0000000
--- a/client/site_tests/video_WebRtcResolutionSwitching/control
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "kerl@google.com, chromeos-meetings@google.com"
-NAME = "video_WebRtcResolutionSwitching"
-PURPOSE = "Stress WebRTC by frequently switching between different resolutions"
-CRITERIA = "Fails if any video element gets a lower resolution than requested"
-ATTRIBUTES = "suite:hotrod"
-TIME = "SHORT"
-TEST_CATEGORY = "Stress"
-TEST_CLASS = "video"
-TEST_TYPE = "client"
-BUG_TEMPLATE = {
-    "labels": ["OS-Chrome"],
-    "components": ["OS>Kernel>Video"],
-}
-JOB_RETRIES = 2
-
-DOC = """
-This test starts 5 loopback peer connections, each with 4 streams with
-different resolutions. The test then switches randomly which of the streams are
-active for each connection, effectively flipping between different resolutions.
-
-If any of the video elements gets a resolution that is lower than the lowest we
-request, the test fails.
-
-The tests use a fake media stream - not a real camera.
-
-This test acts as a stress test and ensures that we do not encounter issues
-such as https://crbug.com/758850 again.
-"""
-
-job.run_test("video_WebRtcResolutionSwitching")
-
diff --git a/client/site_tests/video_WebRtcResolutionSwitching/control.perf b/client/site_tests/video_WebRtcResolutionSwitching/control.perf
deleted file mode 100644
index 087c6fd..0000000
--- a/client/site_tests/video_WebRtcResolutionSwitching/control.perf
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "kerl@google.com, chromeos-meetings@google.com"
-NAME = "video_WebRtcResolutionSwitching.perf"
-PURPOSE = "Test WebRTC by frequently switching between different resolutions"
-CRITERIA = "Fails if any video element gets a lower resolution than requested"
-ATTRIBUTES = "suite:hotrod"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Performance"
-TEST_CLASS = "video"
-TEST_TYPE = "client"
-BUG_TEMPLATE = {
-    "labels": ["OS-Chrome"],
-    "components": ["OS>Kernel>Video"],
-}
-JOB_RETRIES = 2
-
-DOC = """
-This test starts 5 loopback peer connections, each with 4 streams with
-different resolutions. The test then switches randomly which of the streams are
-active for each connection, effectively flipping between different resolutions.
-
-The tests use a fake media stream - not a real camera.
-
-The test collects system metrics during the run. See go/cfm-perf-metrics
-for details about the metrics.
-"""
-
-job.run_test(
-        "video_WebRtcResolutionSwitching", mode = "performance", tag = "perf")
-
diff --git a/client/site_tests/video_WebRtcResolutionSwitching/resolution-switching.js b/client/site_tests/video_WebRtcResolutionSwitching/resolution-switching.js
deleted file mode 100644
index a91f088..0000000
--- a/client/site_tests/video_WebRtcResolutionSwitching/resolution-switching.js
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Copyright 2017 The Chromium Authors. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-/*jshint esversion: 6 */
-
-'use strict';
-
-const $ = document.getElementById.bind(document);
-
-function logError(err) {
-  console.error(err);
-}
-
-// Available resolutions to switch between. These are 4:3 resolutions chosen
-// since they have significant distance between them and are quite common. E.g.
-// they can be selected for youtube videos. We also avoid higher resolutions
-// since they consume a lot of resources.
-const RESOLUTIONS = [
-  {w:320, h:240},
-  {w:480, h:360},
-  {w:640, h:480},
-  {w:1280, h:720},
-];
-
-class TestRunner {
-  constructor(runtimeSeconds, switchResolutionDelayMillis) {
-    this.runtimeSeconds = runtimeSeconds;
-    this.switchResolutionDelayMillis = switchResolutionDelayMillis;
-    this.videoElements = [];
-    this.peerConnections = [];
-    this.numConnections = 0;
-    this.iteration = 0;
-    this.startTime = 0;  // initialized to dummy value
-    this.status = this.getStatusInternal_();
-  }
-
-  addPeerConnection() {
-    const videoElement = document.createElement('video');
-    videoElement.autoplay = true;
-    $('body').appendChild(videoElement);
-    this.videoElements.push(videoElement);
-    this.peerConnections.push(
-        new PeerConnection(videoElement, RESOLUTIONS, cpuOveruseDetection));
-  }
-
-  runTest() {
-    const promises = this.peerConnections.map((conn) => conn.start());
-    Promise.all(promises)
-        .then(() => {
-          this.startTime = Date.now();
-          this.switchResolutionLoop();
-        })
-        .catch((e) => {throw e});
-  }
-
-  switchResolutionLoop() {
-    this.iteration++;
-    this.status = this.getStatusInternal_();
-    $('status').textContent = this.status;
-    if (this.status != 'ok-done') {
-      Promise.all(this.peerConnections.map((pc) => pc.switchToRandomStream()))
-          .then(
-              () => setTimeout(
-                  () => this.switchResolutionLoop(),
-                  this.switchResolutionDelayMillis));
-    }
-  }
-
-  getStatus() {
-    return this.status;
-  }
-
-  getStatusInternal_() {
-    if (this.iteration == 0) {
-      return 'not-started';
-    }
-    try {
-      this.peerConnections.forEach((conn) => conn.verifyState());
-    } catch (e) {
-      return `failure: ${e.message}`;
-    }
-    const timeSpent = Date.now() - this.startTime;
-    if (timeSpent >= this.runtimeSeconds * 1000) {
-      return 'ok-done';
-    }
-    return `running, iteration: ${this.iteration}`;
-  }
-}
-
-// Declare testRunner so that the Python code can access it to query status.
-// Also allows us to access it easily in dev tools for debugging.
-let testRunner;
-// Set from the Python test runner
-let cpuOveruseDetection = null;
-
-function startTest(
-    runtimeSeconds, numPeerConnections, switchResolutionDelayMillis) {
-  testRunner = new TestRunner(runtimeSeconds, switchResolutionDelayMillis);
-  for (let i = 0; i < numPeerConnections; i++) {
-    testRunner.addPeerConnection();
-  }
-  testRunner.runTest();
-}
-
-function getStatus() {
-  return testRunner ? testRunner.getStatus() : 'not-initialized';
-}
-
diff --git a/client/site_tests/video_WebRtcResolutionSwitching/video_WebRtcResolutionSwitching.py b/client/site_tests/video_WebRtcResolutionSwitching/video_WebRtcResolutionSwitching.py
deleted file mode 100644
index 481bd88..0000000
--- a/client/site_tests/video_WebRtcResolutionSwitching/video_WebRtcResolutionSwitching.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import test_webrtc_peer_connection
-
-EXTRA_BROWSER_ARGS = ['--use-fake-ui-for-media-stream',
-                      '--use-fake-device-for-media-stream']
-
-class video_WebRtcResolutionSwitching(test.test):
-    """Tests multiple peerconnections that randomly change resolution."""
-    version = 1
-
-    def run_once(self, mode = 'functional'):
-        """
-        Runs the test.
-
-        @param mode: 'functional' or 'performance' depending on desired mode.
-        """
-        kwargs = {
-                'own_script': 'resolution-switching.js',
-                'common_script': 'loopback-peerconnection.js',
-                'bindir': self.bindir,
-                'tmpdir': self.tmpdir,
-                'debugdir': self.debugdir,
-                'num_peer_connections': 5,
-                'iteration_delay_millis': 300
-        }
-
-        if mode == 'functional':
-            test = test_webrtc_peer_connection.WebRtcPeerConnectionTest(
-                    title = 'Resolution Switching',
-                    **kwargs)
-            test.run_test()
-        elif mode == 'performance':
-            test = test_webrtc_peer_connection\
-                    .WebRtcPeerConnectionPerformanceTest(
-                            title = 'Resolution Switching Performance Test',
-                            **kwargs)
-            test.run_test()
-            test.collector.write_metrics(self.output_perf_value)
-        else:
-            raise error.TestError('mode must be "functional" or "performance"')
-
diff --git a/client/site_tests/vpd_ReadWrite/control b/client/site_tests/vpd_ReadWrite/control
deleted file mode 100644
index 2bcd891..0000000
--- a/client/site_tests/vpd_ReadWrite/control
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "dhaddock@"
-NAME = "vpd_ReadWrite"
-PURPOSE = "Verify reading and writing to VPD"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "rlz"
-TEST_TYPE = "client"
-
-DOC = """
-This test checks that repeatedly writing to and reading from vpd works reliably.
-"""
-
-job.run_test('vpd_ReadWrite', repetitions=100)
-
-
diff --git a/client/site_tests/vpd_ReadWrite/vpd_ReadWrite.py b/client/site_tests/vpd_ReadWrite/vpd_ReadWrite.py
deleted file mode 100644
index 3d38a8b..0000000
--- a/client/site_tests/vpd_ReadWrite/vpd_ReadWrite.py
+++ /dev/null
@@ -1,98 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-
-
-class vpd_ReadWrite(test.test):
-    """Tests reading from and writing to vpd."""
-    version = 1
-
-
-    def _write_to_vpd(self, vpd_field, value):
-        """
-        Writes a value to a vpd field.
-
-        @param vpd_field: The vpd field name
-        @param value: The value to write to the field.
-
-        @returns True if the write was successful, else False
-
-        """
-        try:
-            result = utils.run('vpd -i RW_VPD -s %s=%d' % (vpd_field, value))
-            logging.debug(result)
-            return True
-        except error.CmdError as err:
-            logging.info('Failed to write %d to %s vpd field: %s', value,
-                         vpd_field, err)
-            return False
-
-
-    def _read_from_vpd(self, vpd_field):
-        """
-        Reads a value from a vpd field.
-
-        @param vpd_field: The vpd field name to read from.
-
-        @returns The value of the vpd field specified or None if it failed.
-
-        """
-        try:
-            result = utils.run('vpd -i RW_VPD -g %s' % vpd_field)
-            logging.debug(result)
-            return int(result.stdout)
-        except error.CmdError as err:
-            logging.info('Failed to read %s vpd field: %s', vpd_field, err)
-            return None
-
-
-    def _execute_read_write_cycle(self, repetitions, vpd_field):
-        write_failures = 0
-        read_failures = 0
-
-        for value in range(repetitions):
-            if not self._write_to_vpd(vpd_field, value):
-                write_failures += 1
-                continue
-
-            value_from_vpd = self._read_from_vpd(vpd_field)
-
-            if value_from_vpd is None:
-                read_failures += 1
-            elif value_from_vpd != value:
-                write_failures += 1
-                logging.info('No error when writing to vpd but reading showed '
-                             'a different value than we expected. Expected: '
-                             '%d, Actual: %d', value, value_from_vpd)
-
-        if write_failures > 0 and read_failures > 0:
-            raise error.TestFail('There were %d/%d write failures and %d/%d '
-                                 'read failures.' % (write_failures,
-                                                     repetitions,
-                                                     read_failures,
-                                                     repetitions))
-        elif write_failures > 0:
-            raise error.TestFail('There were %d/%d write failures' % (
-                write_failures, repetitions))
-        elif read_failures > 0:
-            raise error.TestFail('There were %d/%d write failures' % (
-                read_failures, repetitions))
-
-
-    def run_once(self, repetitions):
-        """
-        Entry point to the test.
-
-        @param repetitions: The number of times to cycle through the test.
-
-        """
-        self._execute_read_write_cycle(repetitions, 'should_send_rlz_ping')
-        self._execute_read_write_cycle(repetitions,
-                                       'first_active_omaha_ping_sent')
-        logging.info('There were no read or write failures. Test successful')
diff --git a/client/site_tests/webrtc_PausePlayPeerConnections/control.audio b/client/site_tests/webrtc_PausePlayPeerConnections/control.audio
deleted file mode 100644
index 4ad1113..0000000
--- a/client/site_tests/webrtc_PausePlayPeerConnections/control.audio
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "malmnas@google.com, chromeos-meetings@google.com"
-NAME = "webrtc_PausePlayPeerConnections.audio"
-PURPOSE = "Ensures frequent pause and plays of peer connection streams work"
-CRITERIA = "Fails if the tab freezes during the test"
-ATTRIBUTES = "suite:hotrod, suite:bluestreak-pre-cq"
-TIME = "SHORT"
-TEST_CATEGORY = "Stress"
-TEST_CLASS = "audio"
-TEST_TYPE = "client"
-BUG_TEMPLATE = {
-    "labels": ["OS-Chrome"],
-    "components": ["OS>Kernel>Audio"],
-}
-JOB_RETRIES = 2
-
-DOC = """
-This test starts 10 peer connections with audio streams.
-The tests randomly pauses and plays the streams.
-"""
-
-job.run_test("webrtc_PausePlayPeerConnections", element_type='audio', tag='audio')
-
diff --git a/client/site_tests/webrtc_PausePlayPeerConnections/control.audio_perf b/client/site_tests/webrtc_PausePlayPeerConnections/control.audio_perf
deleted file mode 100644
index 8b7a095..0000000
--- a/client/site_tests/webrtc_PausePlayPeerConnections/control.audio_perf
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "malmnas@google.com, chromeos-meetings@google.com"
-NAME = "webrtc_PausePlayPeerConnections.audio_perf"
-PURPOSE = "Performance test of frequent pause and plays of peer connections"
-CRITERIA = "Fails if the tab freezes during the test"
-ATTRIBUTES = "suite:hotrod"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Performance"
-TEST_CLASS = "audio"
-TEST_TYPE = "client"
-BUG_TEMPLATE = {
-    "labels": ["OS-Chrome"],
-    "components": ["OS>Kernel>Audio"],
-}
-JOB_RETRIES = 2
-
-DOC = """
-This test starts 10 peer connections with audio streams.
-The tests randomly pauses and plays the streams.
-
-The test collects system metrics during the run. See go/cfm-perf-metrics
-for details about the metrics.
-"""
-
-job.run_test(
-        "webrtc_PausePlayPeerConnections",
-        mode = 'performance',
-        element_type = 'audio',
-        tag = 'audio_perf')
-
diff --git a/client/site_tests/webrtc_PausePlayPeerConnections/control.video b/client/site_tests/webrtc_PausePlayPeerConnections/control.video
deleted file mode 100644
index 80b31bd..0000000
--- a/client/site_tests/webrtc_PausePlayPeerConnections/control.video
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "kerl@google.com, chromeos-meetings@google.com"
-NAME = "webrtc_PausePlayPeerConnections.video"
-PURPOSE = "Ensures frequent pause and plays of peer connection streams work"
-CRITERIA = "Fails if the tab freezes during the test"
-ATTRIBUTES = "suite:hotrod, suite:bluestreak-pre-cq"
-TIME = "SHORT"
-TEST_CATEGORY = "Stress"
-TEST_CLASS = "video"
-TEST_TYPE = "client"
-BUG_TEMPLATE = {
-    "labels": ["OS-Chrome"],
-    "components": ["OS>Kernel>Video"],
-}
-JOB_RETRIES = 2
-
-DOC = """
-This test starts 10 peer connections with low resolution video
-streams. The tests randomly pauses and plays the streams.
-
-This is a regression test for bug 718369.
-"""
-
-job.run_test("webrtc_PausePlayPeerConnections", element_type='video', tag='video')
-
diff --git a/client/site_tests/webrtc_PausePlayPeerConnections/control.video_perf b/client/site_tests/webrtc_PausePlayPeerConnections/control.video_perf
deleted file mode 100644
index 4357d14..0000000
--- a/client/site_tests/webrtc_PausePlayPeerConnections/control.video_perf
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "kerl@google.com, chromeos-meetings@google.com"
-NAME = "webrtc_PausePlayPeerConnections.video_perf"
-PURPOSE = "Performance test of frequent pause and plays of peer connections"
-CRITERIA = "Fails if the tab freezes during the test"
-ATTRIBUTES = "suite:hotrod"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Performance"
-TEST_CLASS = "video"
-TEST_TYPE = "client"
-BUG_TEMPLATE = {
-    "labels": ["OS-Chrome"],
-    "components": ["OS>Kernel>Video"],
-}
-JOB_RETRIES = 2
-
-DOC = """
-This test starts 10 peer connections with low resolution video
-streams. The tests randomly pauses and plays the streams.
-
-The test collects system metrics during the run. See go/cfm-perf-metrics
-for details about the metrics.
-"""
-
-job.run_test(
-        "webrtc_PausePlayPeerConnections",
-        mode = 'performance',
-        element_type = 'video',
-        tag = 'video_perf')
-
diff --git a/client/site_tests/webrtc_PausePlayPeerConnections/pause-play.js b/client/site_tests/webrtc_PausePlayPeerConnections/pause-play.js
deleted file mode 100644
index ce650e2..0000000
--- a/client/site_tests/webrtc_PausePlayPeerConnections/pause-play.js
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Copyright 2017 The Chromium Authors. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-/*jshint esversion: 6 */
-
-'use strict';
-
-const $ = document.getElementById.bind(document);
-
-class TestRunner {
-  constructor(runtimeSeconds, pausePlayIterationDelayMillis) {
-    this.runtimeSeconds = runtimeSeconds;
-    this.pausePlayIterationDelayMillis = pausePlayIterationDelayMillis;
-    this.elements = [];
-    this.peerConnections = [];
-    this.iteration = 0;
-    this.startTime;
-  }
-
-  addPeerConnection(elementType) {
-    const element = document.createElement(elementType);
-    element.autoplay = false;
-    $('body').appendChild(element);
-    let resolution;
-    if (elementType === 'video') {
-      resolution = {w: 300, h: 225};
-    } else if (elementType === 'audio') {
-      resolution = {w: -1, h: -1};  // -1 is interpreted as disabled
-    } else {
-      throw new Error('elementType must be one of "audio" or "video"');
-    }
-    this.elements.push(element);
-    this.peerConnections.push(
-        new PeerConnection(element, [resolution], cpuOveruseDetection));
-  }
-
-  runTest() {
-    let promises = this.peerConnections.map((conn) => conn.start());
-    Promise.all(promises)
-        .then(() => {
-          this.startTime = Date.now();
-          this.pauseAndPlayLoop();
-        })
-        .catch((e) => {throw e});
-  }
-
-  pauseAndPlayLoop() {
-    this.iteration++;
-    this.elements.forEach((feed) => {
-      if (Math.random() >= 0.5) {
-        feed.play();
-      } else {
-        feed.pause();
-      }
-    });
-    const status = this.getStatus();
-    $('status').textContent = status
-    if (status != 'ok-done') {
-      setTimeout(
-          () => {this.pauseAndPlayLoop()}, this.pausePlayIterationDelayMillis);
-    } else {  // We're done. Pause all feeds.
-      this.elements.forEach((feed) => {
-        feed.pause();
-      });
-    }
-  }
-
-  getStatus() {
-    if (this.iteration == 0) {
-      return 'not-started';
-    }
-    try {
-      this.peerConnections.forEach((conn) => conn.verifyState());
-    } catch (e) {
-      return `failure: ${e.message}`;
-    }
-    const timeSpent = Date.now() - this.startTime;
-    if (timeSpent >= this.runtimeSeconds * 1000) {
-      return 'ok-done';
-    } else {
-      return `running, iteration: ${this.iteration}`;
-    }
-  }
-}
-
-// Declare testRunner so that the Python code can access it to query status.
-// Also allows us to access it easily in dev tools for debugging.
-let testRunner;
-// Set from the Python test runner
-let cpuOveruseDetection = null;
-let elementType;
-
-function startTest(
-    runtimeSeconds, numPeerConnections, pausePlayIterationDelayMillis) {
-  testRunner = new TestRunner(
-      runtimeSeconds, pausePlayIterationDelayMillis);
-  for (let i = 0; i < numPeerConnections; i++) {
-    testRunner.addPeerConnection(elementType);
-  }
-  testRunner.runTest();
-}
-
-function getStatus() {
-  return testRunner ? testRunner.getStatus() : 'not-initialized';
-}
-
diff --git a/client/site_tests/webrtc_PausePlayPeerConnections/webrtc_PausePlayPeerConnections.py b/client/site_tests/webrtc_PausePlayPeerConnections/webrtc_PausePlayPeerConnections.py
deleted file mode 100644
index a53e6b7..0000000
--- a/client/site_tests/webrtc_PausePlayPeerConnections/webrtc_PausePlayPeerConnections.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import test_webrtc_peer_connection
-
-
-class webrtc_PausePlayPeerConnections(test.test):
-    """Tests many peerconnections randomly paused and played."""
-    version = 1
-
-    def run_once(self, mode = 'functional', element_type='video'):
-        """
-        Runs the test.
-
-        @param mode: 'functional' or 'performance' depending on desired mode.
-        @param element_type: the element type to use for feeds, video or audio.
-        """
-        kwargs = {
-            'own_script': 'pause-play.js',
-            'common_script': 'loopback-peerconnection.js',
-            'bindir': self.bindir,
-            'tmpdir': self.tmpdir,
-            'debugdir': self.debugdir,
-            'num_peer_connections': 10,
-            'iteration_delay_millis': 20,
-            'before_start_hook': lambda tab: tab.EvaluateJavaScript(
-                    "elementType = '{}'".format(element_type))
-        }
-
-        if mode == 'functional':
-            test = test_webrtc_peer_connection.WebRtcPeerConnectionTest(
-                    title = 'Pause Play Peerconnections',
-                    **kwargs)
-            test.run_test()
-        elif mode == 'performance':
-            test = test_webrtc_peer_connection\
-                    .WebRtcPeerConnectionPerformanceTest(
-                            title = 'Pause Play Peerconnections '
-                                    + 'Performance test',
-                            **kwargs)
-            test.run_test()
-            test.collector.write_metrics(self.output_perf_value)
-        else:
-            raise error.TestError('mode must be "functional" or "performance"')
-
diff --git a/client/site_tests/webservd_BasicDBusAPI/control b/client/site_tests/webservd_BasicDBusAPI/control
deleted file mode 100644
index 909c8fd..0000000
--- a/client/site_tests/webservd_BasicDBusAPI/control
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, avakulenko'
-NAME = 'webservd_BasicDBusAPI'
-TIME = 'FAST'
-TEST_TYPE = 'client'
-
-DOC = """
-Check that basic DBus API calls to the webservd return expected responses.
-
-"""
-
-job.run_test('webservd_BasicDBusAPI')
diff --git a/client/site_tests/webservd_BasicDBusAPI/webservd_BasicDBusAPI.py b/client/site_tests/webservd_BasicDBusAPI/webservd_BasicDBusAPI.py
deleted file mode 100644
index bdb151f..0000000
--- a/client/site_tests/webservd_BasicDBusAPI/webservd_BasicDBusAPI.py
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import dbus
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-
-
-SERVICE_NAME = 'org.chromium.WebServer'
-MANAGER_INTERFACE = 'org.chromium.WebServer.Manager'
-MANAGER_OBJECT_PATH = '/org/chromium/WebServer/Manager'
-
-EXPECTED_PING_RESPONSE = 'Web Server is running'
-
-class webservd_BasicDBusAPI(test.test):
-    """Check that basic webservd daemon DBus APIs are functional."""
-    version = 1
-
-    def run_once(self):
-        """Test entry point."""
-        bus = dbus.SystemBus()
-        manager_proxy = dbus.Interface(
-                bus.get_object(SERVICE_NAME, MANAGER_OBJECT_PATH),
-                dbus_interface=MANAGER_INTERFACE)
-        ping_response = manager_proxy.Ping()
-        if EXPECTED_PING_RESPONSE != ping_response:
-            raise error.TestFail(
-                    'Expected Manager.Ping to return %s but got %s instead.' %
-                    (EXPECTED_PING_RESPONSE, ping_response))
diff --git a/client/site_tests/webstore_InstallItem/webstore_InstallItem.py b/client/site_tests/webstore_InstallItem/webstore_InstallItem.py
deleted file mode 100644
index c59242e..0000000
--- a/client/site_tests/webstore_InstallItem/webstore_InstallItem.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.cros.webstore_test import ItemType
-from autotest_lib.client.cros.webstore_test import webstore_test
-
-class webstore_InstallItem(webstore_test):
-    """
-    Installs an item and tests that it installed correctly.
-
-    This is used by several tests, which pass the parameters item_id,
-    item_type, and install_type to the test. If it's an app, this
-    class verifies that the app can launch.
-    """
-    version = 1
-
-    def run(self, item_id, item_type, install_type):
-        self.install_item(item_id, item_type, install_type)
-        if item_type != ItemType.extension and item_type != ItemType.theme:
-            self.launch_app(item_id)
diff --git a/client/site_tests/webstore_SanityTest/webstore_SanityTest.py b/client/site_tests/webstore_SanityTest/webstore_SanityTest.py
deleted file mode 100644
index bacc345..0000000
--- a/client/site_tests/webstore_SanityTest/webstore_SanityTest.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.cros.webstore_test import webstore_test
-
-class webstore_SanityTest(webstore_test):
-    """
-    Verifies that the CWS landing page works properly.
-    """
-
-    version = 1
-
-    def section_header(self, name):
-        """
-        Returns the XPath of the section header for the given section.
-
-        @param name The name of the section
-        """
-        return '//div[contains(@class, "wall-structured-section-header")]' + \
-                '/div[text() = "%s"]' % name
-
-    sections = ['Featured', 'More recommendations']
-    wall_tile = '//div[contains(@class, "webstore-test-wall-tile")]'
-    marquee = '//div[contains(@class, "webstore-test-wall-marquee-slideshow")]'
-
-    def run(self):
-        self.driver.get(self.webstore_url)
-
-        for section in self.sections:
-            self.driver.find_element_by_xpath(self.section_header(section))
-        self.driver.find_element_by_xpath(self.wall_tile)
-        self.driver.find_element_by_xpath(self.marquee)
diff --git a/client/tests/OWNERS b/client/tests/OWNERS
new file mode 100644
index 0000000..f9bd0f4
--- /dev/null
+++ b/client/tests/OWNERS
@@ -0,0 +1,3 @@
+include /INFRA_OWNERS
+include /ENGPROD_OWNERS
+*
diff --git a/client/tests/aborttest/aborttest.py b/client/tests/aborttest/aborttest.py
deleted file mode 100644
index 6e8dadd..0000000
--- a/client/tests/aborttest/aborttest.py
+++ /dev/null
@@ -1,8 +0,0 @@
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.bin import test
-
-class aborttest(test.test):
-    version = 1
-
-    def execute(self):
-        raise error.JobError('Arrrrrrrrggggh. You are DOOOMED')
diff --git a/client/tests/aborttest/control b/client/tests/aborttest/control
deleted file mode 100644
index fea1d3c..0000000
--- a/client/tests/aborttest/control
+++ /dev/null
@@ -1,12 +0,0 @@
-AUTHOR = "Martin Bligh <mbligh@google.com>"
-NAME = "Abort test"
-TEST_TYPE = "client"
-TEST_CLASS = "General"
-TEST_CATEGORY = "Functional"
-TIME = "SHORT"
-DOC = """\
-Raise a JobError to simulate a test aborting.  This is for testing Autotest
-itself.
-"""
-
-job.run_test('aborttest')
diff --git a/client/tests/aio_dio_bugs/aio_dio_bugs.py b/client/tests/aio_dio_bugs/aio_dio_bugs.py
deleted file mode 100644
index 4324111..0000000
--- a/client/tests/aio_dio_bugs/aio_dio_bugs.py
+++ /dev/null
@@ -1,41 +0,0 @@
-import os
-from autotest_lib.client.bin import test, utils
-
-
-# tests is a simple array of "cmd" "arguments"
-tests = [["aio-dio-invalidate-failure", "poo"],
-         ["aio-dio-subblock-eof-read", "eoftest"],
-         ["aio-free-ring-with-bogus-nr-pages", ""],
-         ["aio-io-setup-with-nonwritable-context-pointer", ""],
-         ["aio-dio-extend-stat", "file"],
-        ]
-name = 0
-arglist = 1
-
-class aio_dio_bugs(test.test):
-    version = 5
-    preserve_srcdir = True
-
-    def initialize(self):
-        self.job.require_gcc()
-        self.job.setup_dep(['libaio'])
-        ldflags = '-L ' + self.autodir + '/deps/libaio/lib'
-        cflags = '-I ' + self.autodir + '/deps/libaio/include'
-        self.gcc_flags = ldflags + ' ' + cflags
-
-
-    def setup(self):
-        os.chdir(self.srcdir)
-        utils.make('"CFLAGS=' + self.gcc_flags + '"')
-
-
-    def execute(self, args = ''):
-        os.chdir(self.tmpdir)
-        libs = self.autodir + '/deps/libaio/lib/'
-        ld_path = utils.prepend_path(libs,
-                              utils.environ('LD_LIBRARY_PATH'))
-        var_ld_path = 'LD_LIBRARY_PATH=' + ld_path
-        for test in tests:
-            cmd = self.srcdir + '/' + test[name] + ' ' + args + ' ' \
-                                                               + test[arglist]
-            utils.system(var_ld_path + ' ' + cmd)
diff --git a/client/tests/aio_dio_bugs/control b/client/tests/aio_dio_bugs/control
deleted file mode 100644
index 57d26d4..0000000
--- a/client/tests/aio_dio_bugs/control
+++ /dev/null
@@ -1,19 +0,0 @@
-NAME = "aio dio bugs"
-AUTHOR = "Rafal Wijata <wijata@nec-labs.com>"
-TEST_TYPE = "client"
-TEST_CLASS = "Kernel"
-TEST_CATEGORY = "Functional"
-TIME = "MEDIUM"
-DOC = """\
-This was originally submitted to
- http://bugzilla.kernel.org/show_bug.cgi?id=6831 by 
-Rafal Wijata <wijata@nec-labs.com>.  It caught a race in dio aio completion
-that would call aio_complete() before the dio callers would update i_size.
-A stat after io_getevents() would not see the new file size.
-
-The bug was fixed in the fs/direct-io.c completion reworking that appeared
-in 2.6.20.  This test should fail on 2.6.19.
-"""
-
-
-job.run_test('aio_dio_bugs')
diff --git a/client/tests/aio_dio_bugs/src/Makefile b/client/tests/aio_dio_bugs/src/Makefile
deleted file mode 100644
index 7d65c48..0000000
--- a/client/tests/aio_dio_bugs/src/Makefile
+++ /dev/null
@@ -1,24 +0,0 @@
-CC=gcc
-LDFLAGS=-laio
-CFLAGS=-W -Wall
-
-TESTS=aio-dio-invalidate-failure aio-dio-subblock-eof-read \
-      aio-free-ring-with-bogus-nr-pages \
-      aio-io-setup-with-nonwritable-context-pointer aio-dio-extend-stat
-
-all: $(TESTS)
-
-aio-dio-invalidate-failure: aio-dio-invalidate-failure.c
-	$(CC) $(CFLAGS) $(LDFLAGS) -o $@ $^
-
-aio-dio-subblock-eof-read: aio-dio-subblock-eof-read.c
-	$(CC) $(CFLAGS) $(LDFLAGS) -o $@ $^
-
-aio-free-ring-with-bogus-nr-pages: aio-free-ring-with-bogus-nr-pages.c
-	$(CC) $(CFLAGS) $(LDFLAGS) -o $@ $^
-
-aio-io-setup-with-nonwritable-context-pointer: aio-io-setup-with-nonwritable-context-pointer.c
-	$(CC) $(CFLAGS) $(LDFLAGS) -o $@ $^
-
-aio-dio-extend-stat: aio-dio-extend-stat.c
-	$(CC) $(CFLAGS) $(LDFLAGS) -lpthread -o $@ $^
diff --git a/client/tests/aio_dio_bugs/src/aio-dio-extend-stat.c b/client/tests/aio_dio_bugs/src/aio-dio-extend-stat.c
deleted file mode 100644
index bdc8299..0000000
--- a/client/tests/aio_dio_bugs/src/aio-dio-extend-stat.c
+++ /dev/null
@@ -1,163 +0,0 @@
-#define __USE_GNU
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <libaio.h>
-#include <malloc.h>
-#include <fcntl.h>
-#include <pthread.h>
-#include <errno.h>
-
-#ifndef O_DIRECT
-#define O_DIRECT         040000 /* direct disk access hint */
-#endif
-
-
-/*
- * This was originally submitted to
- * http://bugzilla.kernel.org/show_bug.cgi?id=6831 by 
- * Rafal Wijata <wijata@nec-labs.com>.  It caught a race in dio aio completion
- * that would call aio_complete() before the dio callers would update i_size.
- * A stat after io_getevents() would not see the new file size.
- *
- * The bug was fixed in the fs/direct-io.c completion reworking that appeared
- * in 2.6.20.  This test should fail on 2.6.19.
- */
-
-#define BUFSIZE 1024
-
-static unsigned char buf[BUFSIZE] __attribute((aligned (512)));
-
-/* 
- * this was arbitrarily chosen to take about two seconds on a dual athlon in a
- * debugging kernel.. it trips up long before that.
- */
-#define MAX_AIO_EVENTS 4000
-
-#define fail(fmt , args...) do {\
-	printf(fmt , ##args);	\
-	exit(1);		\
-} while (0)
-
-void fun_write1(void* ptr);
-void fun_writeN(void* ptr);
-void fun_read(void* ptr);
-
-int  handle = 0;
-io_context_t ctxp;
-struct iocb *iocbs[MAX_AIO_EVENTS];
-struct io_event ioevents[MAX_AIO_EVENTS];
-
-volatile int submittedSize = 0; //synchronization
-
-int main(int argc, char **argv)
-{
-	pthread_t thread_read; 
-	pthread_t thread_write;
-	int i;
-	int ret;
-
-	if (argc != 2)
-		fail("only arg should be file name\n");
-
-	for (i = 0; i < BUFSIZE; ++i)
-		buf[i] = 'A' + (char)(i % ('Z'-'A'+1));
-
-	buf[BUFSIZE-1] = '\n';
-
-	handle = open(argv[1], O_CREAT | O_TRUNC | O_DIRECT | O_RDWR, 0600); 
-	if (handle == -1) 
-		fail("failed to open test file %s, errno: %d\n",
-			argv[1], errno);
-
-	memset(&ctxp, 0, sizeof(ctxp));
-	ret = io_setup(MAX_AIO_EVENTS, &ctxp);
-	if (ret)
-		fail("io_setup returned %d\n", ret);
-
-	for (i = 0; i < MAX_AIO_EVENTS; ++i) {
-
-		iocbs[i] = calloc(1, sizeof(struct iocb));
-		if (iocbs[i] == NULL)
-			fail("failed to allocate an iocb\n");
-	
-/*		iocbs[i]->data = i; */
-		iocbs[i]->aio_fildes = handle;
-		iocbs[i]->aio_lio_opcode = IO_CMD_PWRITE;
-		iocbs[i]->aio_reqprio = 0;
-		iocbs[i]->u.c.buf = buf;
-		iocbs[i]->u.c.nbytes = BUFSIZE;
-		iocbs[i]->u.c.offset = BUFSIZE*i;
-	}
-
-	pthread_create(&thread_read, NULL, (void*)&fun_read, NULL);
-	pthread_create(&thread_write, NULL, (void*)&fun_writeN, NULL);
-
-	pthread_join(thread_read, NULL);
-	pthread_join(thread_write, NULL);
-
-	io_destroy(ctxp);
-	close(handle);
-
-	printf("%u iterations of racing extensions and collection passed\n",
-		MAX_AIO_EVENTS);
-
-	return 0;
-}
-
-void fun_read(void *ptr)
-{
-	long n = MAX_AIO_EVENTS;
-	struct stat filestat;
-	long long exSize;
-	long i;
-	long r;
-
-	while (n > 0) {
-		r = io_getevents(ctxp, 1, MAX_AIO_EVENTS, ioevents, NULL);
-		if (r < 0) 
-			fail("io_getevents returned %ld\n", r);
-
-		n -= r;
-		for (i = 0; i < r; ++i) {
-			if (ioevents[i].obj->u.c.nbytes != BUFSIZE)
-				fail("error in block: expacted %d bytes, "
-				     "receiced %ld\n", BUFSIZE,
-				     ioevents[i].obj->u.c.nbytes);
-
-			exSize = ioevents[i].obj->u.c.offset +
-				 ioevents[i].obj->u.c.nbytes;
-			fstat(handle, &filestat);
-			if (filestat.st_size < exSize)
-				fail("write of %lu bytes @%llu finished, "
-				     "expected filesize at least %llu, but "
-				     "got %ld\n", ioevents[i].obj->u.c.nbytes,
-				     ioevents[i].obj->u.c.offset, exSize,
-				     filestat.st_size);
-		}
-	}
-}
-
-void fun_writeN(void *ptr)
-{
-	int i;
-	int ret;
-
-	for(i = 0; i < MAX_AIO_EVENTS; ++i) {
-		ret = io_submit(ctxp, 1, &(iocbs[i]));
-		if (ret != 1)
-			fail("io_subit returned %d instead of 1\n", ret);
-	}
-}
-
-void fun_write1(void *ptr)
-{
-	int ret;
-    
-	ret = io_submit(ctxp, MAX_AIO_EVENTS, iocbs);
-	if (ret !=  MAX_AIO_EVENTS)
-		fail("io_subit returned %d instead of %u\n", ret,
-		     MAX_AIO_EVENTS);
-}
diff --git a/client/tests/aio_dio_bugs/src/aio-dio-invalidate-failure.c b/client/tests/aio_dio_bugs/src/aio-dio-invalidate-failure.c
deleted file mode 100644
index 7cc4a4b..0000000
--- a/client/tests/aio_dio_bugs/src/aio-dio-invalidate-failure.c
+++ /dev/null
@@ -1,155 +0,0 @@
-#define _XOPEN_SOURCE 500 /* pwrite */
-#include <unistd.h>
-#include <stdio.h>
-#include <stdlib.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <fcntl.h>
-#include <libaio.h>
-#include <errno.h>
-#include <time.h>
-#include <sys/types.h>
-#include <sys/wait.h>
-
-/*
- * DIO invalidates the read cache after it writes.  At one point it tried to
- * return EIO if this failed.  When called from AIO, though, this EIO return
- * would clobber EIOCBQUEUED and cause fs/aio.c and fs/direct-io.c to complete
- * an iocb twice.  This typically references freed memory from an interrupt
- * handler and oopses.
- *
- * This test hits the race after at most two minutes on a single spindle.  It
- * spins performing large dio writes.  It also spins racing buffered writes.
- * It assumes it's on ext3 using ordered writes.  The ordered write bhs can be
- * pinned by jbd as a transaction commits.  If invalidate_inode_pages2_range()
- * hits pages backed by those buffers ->releasepage will fail and it'll try to
- * return -EIO.
- */
-#ifndef O_DIRECT
-#define O_DIRECT         040000 /* direct disk access hint */
-#endif
-
-#define GINORMOUS (32 * 1024 * 1024)
-
-
-/* This test never survived to 180 seconds on a single spindle */
-#define SECONDS 200
-
-static unsigned char buf[GINORMOUS] __attribute((aligned (512)));
-
-#define fail(fmt , args...) do {\
-	printf(fmt , ##args);	\
-	exit(1);		\
-} while (0)
-
-void spin_dio(int fd)
-{
-	io_context_t ctx;
-	struct iocb iocb;
-	struct iocb *iocbs[1] = { &iocb };
-	struct io_event event;
-	int ret;
-
-        io_prep_pwrite(&iocb, fd, buf, GINORMOUS, 0);
-
-	ret = io_queue_init(1, &ctx);
-	if (ret)
-		fail("io_queue_init returned %d", ret);
-
-	while (1) {
-		ret = io_submit(ctx, 1, iocbs);
-		if (ret != 1)
-			fail("io_submit returned %d instead of 1", ret);
-
-		ret = io_getevents(ctx, 1, 1, &event, NULL);
-		if (ret != 1)
-			fail("io_getevents returned %d instead of 1", ret);
-
-		if (event.res == -EIO) {
-			printf("invalidation returned -EIO, OK\n");
-			exit(0);
-		}
-
-		if (event.res != GINORMOUS)
-			fail("event res %ld\n", event.res);
-	}
-}
-
-void spin_buffered(int fd)
-{
-	int ret;
-
-	while (1) {
-		ret = pwrite(fd, buf, GINORMOUS, 0);
-		if (ret != GINORMOUS)
-			fail("buffered write returned %d", ret);
-	}
-}
-
-static void alarm_handler(int signum)
-{
-}
-
-int main(int argc, char **argv)
-{
-	pid_t buffered_pid;
-	pid_t dio_pid;
-	pid_t pid;
-	int fd;
-	int fd2;
-	int status;
-
-	if (argc != 2)
-		fail("only arg should be file name");
-
-	fd = open(argv[1], O_DIRECT|O_CREAT|O_RDWR, 0644);
-	if (fd < 0)
-		fail("open dio failed: %d\n", errno);
-
-	fd2 = open(argv[1], O_RDWR, 0644);
-	if (fd < 0)
-		fail("open failed: %d\n", errno);
-
-	buffered_pid = fork();
-	if (buffered_pid < 0)
-		fail("fork failed: %d\n", errno);
-
-	if (buffered_pid == 0) {
-		spin_buffered(fd2);
-		exit(0);
-	}
-
-	dio_pid = fork();
-	if (dio_pid < 0) {
-		kill(buffered_pid, SIGKILL);
-		fail("fork failed: %d\n", errno);
-	}
-
-	if (dio_pid == 0) {
-		spin_dio(fd);
-		exit(0);
-	}
-
-	signal(SIGALRM, alarm_handler);
-	alarm(SECONDS);
-
-	pid = wait(&status);
-	if (pid < 0 && errno == EINTR) {
-		/* if we timed out then we're done */
-		kill(buffered_pid, SIGKILL);
-		kill(dio_pid, SIGKILL);
-		printf("ran for %d seconds without error, passing\n", SECONDS);
-		exit(0);
-	}
-
-	if (pid == dio_pid)
-		kill(buffered_pid, SIGKILL);
-	else
-		kill(dio_pid, SIGKILL);
-
-	/* 
-	 * pass on the child's pass/fail return code or fail if the child 
-	 * didn't exit cleanly.
-	 */
-	exit(WIFEXITED(status) ? WEXITSTATUS(status) : 1);
-}
diff --git a/client/tests/aio_dio_bugs/src/aio-dio-subblock-eof-read.c b/client/tests/aio_dio_bugs/src/aio-dio-subblock-eof-read.c
deleted file mode 100644
index f4eea24..0000000
--- a/client/tests/aio_dio_bugs/src/aio-dio-subblock-eof-read.c
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- *  Code taken from an example posted to linux-aio at kvack.org
- *  Original Author: Drangon Zhou
- *  Munged by Jeff Moyer to get it to build and to incorporate it into
- *  the autotest framework.
- *
- *  Description:  This source code implements a test to ensure that an AIO
- *  read of the last block in a file opened with O_DIRECT returns the proper
- *  amount of data.  In the past, there was a bug that resulted in a return
- *  value of the requested block size, when in fact there was only a fraction
- *  of that data available.  Thus, if the last data block contained 300 bytes
- *  worth of data, and the user issued a 4k read, we want to ensure that
- *  the return value is 300, not 4k.
- */
-
-#define _GNU_SOURCE
-#include <stdio.h>
-#include <stdlib.h>
-#include <libaio.h>
-#include <fcntl.h>
-#include <unistd.h>
-#include <errno.h>
-
-/* Create a file of a size that is not a multiple of block size */
-#define FILE_SIZE	300
-
-#define fail(fmt , args...) 	\
-do {				\
-	printf(fmt , ##args);	\
-	exit(1);		\
-} while (0)
-
-static unsigned char buffer[4096] __attribute((aligned (512)));
-
-int
-main(int argc, char **argv)
-{
-	int ret;
-	int fd;
-	const char *filename;
-	struct iocb myiocb;
-	struct iocb *cb = &myiocb;
-	io_context_t ioctx;
-	struct io_event ie;
-    
-	if (argc != 2)
-		fail("only arg should be file name");
-
-	filename = argv[1];
-	fd = open(filename, O_CREAT|O_RDWR|O_DIRECT, 0600);
-	if (fd < 0)
-		fail("open returned error %d\n", errno);
-
-	ret = ftruncate(fd, FILE_SIZE);
-	if (ret < 0)
-		fail("truncate returned error %d\n", errno);
-
-	/* <1> use normal disk read, this should be ok */
-	ret = read(fd, buffer, 4096);
-	if (ret != FILE_SIZE)
-		fail("buffered read returned %d, should be 300\n", ret);
-
-	/* <2> use AIO disk read, it sees error. */
-	memset(&myiocb, 0, sizeof(myiocb));
-	cb->data = 0;
-	cb->key = 0;
-	cb->aio_lio_opcode = IO_CMD_PREAD;
-	cb->aio_reqprio = 0; 
-	cb->aio_fildes = fd; 
-	cb->u.c.buf = buffer;
-	cb->u.c.nbytes = 4096;
-	cb->u.c.offset = 0;
-    
-	ret = io_queue_init(1, &ioctx);
-	if (ret != 0)
-		fail("io_queue_init returned error %d\n", ret);
-
-	ret = io_submit(ioctx, 1, &cb);
-	if (ret != 1)
-		fail("io_submit returned error %d\n", ret);
-
-	ret = io_getevents(ioctx, 1, 1, &ie, NULL);
-	if (ret != 1)
-		fail("io_getevents returned %d\n", ret);
-
-	/*
-	 *  If all goes well, we should see 300 bytes read.  If things
-	 *  are broken, we may very well see a result of 4k.
-	 */
-	if (ie.res != FILE_SIZE)
-		fail("AIO read of last block in file returned %d bytes, "
-		     "expected %d\n", ret, FILE_SIZE);
-
-	printf("AIO read of last block in file succeeded.\n");
-	return 0;
-}
diff --git a/client/tests/aio_dio_bugs/src/aio-free-ring-with-bogus-nr-pages.c b/client/tests/aio_dio_bugs/src/aio-free-ring-with-bogus-nr-pages.c
deleted file mode 100644
index 8e9d80b..0000000
--- a/client/tests/aio_dio_bugs/src/aio-free-ring-with-bogus-nr-pages.c
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- *  Code taken from an example posted to Red Hat bugzilla #220971
- *
- *  Original Author: Kostantin Khorenko from OpenVZ/Virtuozzo
- *  Munged by Jeff Moyer to incorporate it into the autotest framework.
- *
- *  Description: "aio_setup_ring() function initializes info->nr_pages
- *    variable incorrectly, then this variable can be used in error path
- *    to free the allocated resources. By this way an unprivileged user
- *    can crash the node."
- *
- *  At the beginning of aio_setup_ring, info->nr_pages is initialized
- *  to the requested number of pages.  However, it is supposed to
- *  indicate how many pages are mapped in info->ring_pages.  Thus, if
- *  the call to do_mmap fails:
- *
- *	info->mmap_base = do_mmap(NULL, 0, info->mmap_size, 
- *				  PROT_READ|PROT_WRITE, MAP_ANON|MAP_PRIVATE,
- *				  0);
- *	if (IS_ERR((void *)info->mmap_base)) {
- *		up_write(&ctx->mm->mmap_sem);
- *		printk("mmap err: %ld\n", -info->mmap_base);
- *		info->mmap_size = 0;
- *		aio_free_ring(ctx);    <---------
- *		return -EAGAIN;
- *	}
- *
- *  we end up calling aio_free_ring with a bogus array and cause an oops.
- *
- *  This is a destructive test.
- */
-#include <stdio.h>
-#include <unistd.h>
-#include <sys/mman.h>
-#include <errno.h>
-#include <libgen.h>
-#include <libaio.h>
-
-int main(int __attribute__((unused)) argc, char **argv)
-{
-	long res;
-	io_context_t ctx = (void*) 0;
-	void* map;
-
-	while (1) {
-		map = mmap(NULL, 100, PROT_READ, MAP_ANONYMOUS|MAP_PRIVATE,
-			   0, 0);
-		if (map == MAP_FAILED)
-			break;
-		map = mmap(NULL, 100, PROT_WRITE, MAP_ANONYMOUS|MAP_PRIVATE,
-			   0, 0);
-		if (map == MAP_FAILED)
-			break;
-	}
-
-	res = io_setup(10000, &ctx);
-	if (res != -ENOMEM) {
-		printf("%s: Error: io_setup returned %ld, expected -ENOMEM\n",
-		       basename(argv[0]), res);
-		return 1;
-	} else
-		printf("%s: Success!\n", basename(argv[0]));
-	return 0;
-}
diff --git a/client/tests/aio_dio_bugs/src/aio-io-setup-with-nonwritable-context-pointer.c b/client/tests/aio_dio_bugs/src/aio-io-setup-with-nonwritable-context-pointer.c
deleted file mode 100644
index c0ba09f..0000000
--- a/client/tests/aio_dio_bugs/src/aio-io-setup-with-nonwritable-context-pointer.c
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- *  Author:  Jeff Moyer
- *
- *  Description: Pass a non-writable context pointer to io_setup to see if
- *  the kernel deals with it correctly.  In the past, the reference counting
- *  in this particular error path was off and this operation would cause an
- *  oops.
- *
- *  This is a destructive test.
- */
-#include <stdio.h>
-#include <stdlib.h>
-#include <sys/mman.h>
-#include <libgen.h>
-#include <libaio.h>
-
-int
-main(int __attribute__((unused)) argc, char **argv)
-{
-	void *addr;
-
-	addr = mmap(NULL, 4096, PROT_READ, MAP_SHARED|MAP_ANONYMOUS, 0, 0);
-	if (!addr) {
-		perror("mmap");
-		exit(1);
-	}
-	io_setup(1, addr /* un-writable pointer */);
-
-	printf("%s: Success!\n", basename(argv[0]));
-	return 0;
-}
diff --git a/client/tests/aiostress/aio-stress.c b/client/tests/aiostress/aio-stress.c
deleted file mode 100644
index 91af264..0000000
--- a/client/tests/aiostress/aio-stress.c
+++ /dev/null
@@ -1,1514 +0,0 @@
-/*
- * Copyright (c) 2004 SuSE, Inc.  All Rights Reserved.
- * 
- * This program is free software; you can redistribute it and/or modify it
- * under the terms of version 2 of the GNU General Public License as
- * published by the Free Software Foundation.
- * 
- * This program is distributed in the hope that it would be useful, but
- * WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
- * 
- * Further, this software is distributed without any warranty that it is
- * free of the rightful claim of any third person regarding infringement
- * or the like.  Any license provided herein, whether implied or
- * otherwise, applies only to this software file.  Patent licenses, if
- * any, provided herein do not apply to combinations of this program with
- * other software, or any other product whatsoever.
- * 
- * You should have received a copy of the GNU General Public License along
- * with this program; if not, write the Free Software Foundation, Inc., 59
- * Temple Place - Suite 330, Boston MA 02111-1307, USA.
- * 
- * Contact information: Silicon Graphics, Inc., 1600 Amphitheatre Pkwy,
- * Mountain View, CA  94043, or:
- * 
- *
- * aio-stress
- *
- * will open or create each file on the command line, and start a series
- * of aio to it.  
- *
- * aio is done in a rotating loop.  first file1 gets 8 requests, then
- * file2, then file3 etc.  As each file finishes writing, it is switched
- * to reads
- *
- * io buffers are aligned in case you want to do raw io
- *
- * compile with gcc -Wall -laio -lpthread -o aio-stress aio-stress.c
- *
- * run aio-stress -h to see the options
- *
- * Please mail Chris Mason (mason@suse.com) with bug reports or patches
- */
-#define _FILE_OFFSET_BITS 64
-#define PROG_VERSION "0.21"
-#define NEW_GETEVENTS
-
-#include <stdio.h>
-#include <errno.h>
-#include <assert.h>
-#include <stdlib.h>
-
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <fcntl.h>
-#include <unistd.h>
-#include <sys/time.h>
-#include <libaio.h>
-#include <sys/ipc.h>
-#include <sys/shm.h>
-#include <sys/mman.h>
-#include <string.h>
-#include <pthread.h>
-
-#define IO_FREE 0
-#define IO_PENDING 1
-#define RUN_FOREVER -1
-
-#ifndef O_DIRECT
-#define O_DIRECT         040000 /* direct disk access hint */
-#endif
-
-enum {
-    WRITE,
-    READ,
-    RWRITE,
-    RREAD,
-    LAST_STAGE,
-};
-
-#define USE_MALLOC 0
-#define USE_SHM 1
-#define USE_SHMFS 2
-
-/* 
- * various globals, these are effectively read only by the time the threads
- * are started
- */
-long stages = 0;
-unsigned long page_size_mask;
-int o_direct = 0;
-int o_sync = 0;
-int latency_stats = 0;
-int completion_latency_stats = 0;
-int io_iter = 8;
-int iterations = RUN_FOREVER;
-int max_io_submit = 0;
-long rec_len = 64 * 1024;
-int depth = 64;
-int num_threads = 1;
-int num_contexts = 1;
-off_t context_offset = 2 * 1024 * 1024;
-int fsync_stages = 1;
-int use_shm = 0;
-int shm_id;
-char *unaligned_buffer = NULL;
-char *aligned_buffer = NULL;
-int padded_reclen = 0;
-int stonewall = 1;
-int verify = 0;
-char *verify_buf = NULL;
-int unlink_files = 0;
-
-struct io_unit;
-struct thread_info;
-
-/* pthread mutexes and other globals for keeping the threads in sync */
-pthread_cond_t stage_cond = PTHREAD_COND_INITIALIZER;
-pthread_mutex_t stage_mutex = PTHREAD_MUTEX_INITIALIZER;
-int threads_ending = 0;
-int threads_starting = 0;
-struct timeval global_stage_start_time;
-struct thread_info *global_thread_info;
-
-/* 
- * latencies during io_submit are measured, these are the 
- * granularities for deviations 
- */
-#define DEVIATIONS 6
-int deviations[DEVIATIONS] = { 100, 250, 500, 1000, 5000, 10000 };
-struct io_latency {
-    double max;
-    double min;
-    double total_io;
-    double total_lat;
-    double deviations[DEVIATIONS]; 
-};
-
-/* container for a series of operations to a file */
-struct io_oper {
-    /* already open file descriptor, valid for whatever operation you want */
-    int fd;
-
-    /* starting byte of the operation */
-    off_t start;
-
-    /* ending byte of the operation */
-    off_t end;
-
-    /* size of the read/write buffer */
-    int reclen;
-
-    /* max number of pending requests before a wait is triggered */
-    int depth;
-
-    /* current number of pending requests */
-    int num_pending;
-
-    /* last error, zero if there were none */
-    int last_err;
-
-    /* total number of errors hit. */
-    int num_err;
-
-    /* read,write, random, etc */
-    int rw;
-
-    /* number of ios that will get sent to aio */
-    int total_ios;
-
-    /* number of ios we've already sent */
-    int started_ios;
-
-    /* last offset used in an io operation */
-    off_t last_offset;
-
-    /* stonewalled = 1 when we got cut off before submitting all our ios */
-    int stonewalled;
-
-    /* list management */
-    struct io_oper *next;
-    struct io_oper *prev;
-
-    struct timeval start_time;
-
-    char *file_name;
-};
-
-/* a single io, and all the tracking needed for it */
-struct io_unit {
-    /* note, iocb must go first! */
-    struct iocb iocb;
-
-    /* pointer to parent io operation struct */
-    struct io_oper *io_oper;
-
-    /* aligned buffer */
-    char *buf;
-
-    /* size of the aligned buffer (record size) */
-    int buf_size;
-
-    /* state of this io unit (free, pending, done) */
-    int busy;
-
-    /* result of last operation */
-    long res;
-
-    struct io_unit *next;
-
-    struct timeval io_start_time;		/* time of io_submit */
-};
-
-struct thread_info {
-    io_context_t io_ctx;
-    pthread_t tid;
-
-    /* allocated array of io_unit structs */
-    struct io_unit *ios;
-
-    /* list of io units available for io */
-    struct io_unit *free_ious;
-
-    /* number of io units in the ios array */
-    int num_global_ios;
-
-    /* number of io units in flight */
-    int num_global_pending;
-
-    /* preallocated array of iocb pointers, only used in run_active */
-    struct iocb **iocbs;
-
-    /* preallocated array of events */
-    struct io_event *events;
-
-    /* size of the events array */
-    int num_global_events;
-
-    /* latency stats for io_submit */
-    struct io_latency io_submit_latency;
-
-    /* list of operations still in progress, and of those finished */
-    struct io_oper *active_opers;
-    struct io_oper *finished_opers;
-
-    /* number of files this thread is doing io on */
-    int num_files;
-
-    /* how much io this thread did in the last stage */
-    double stage_mb_trans;
-
-    /* latency completion stats i/o time from io_submit until io_getevents */
-    struct io_latency io_completion_latency;
-};
-
-/*
- * return seconds between start_tv and stop_tv in double precision
- */
-static double time_since(struct timeval *start_tv, struct timeval *stop_tv)
-{
-    double sec, usec;
-    double ret;
-    sec = stop_tv->tv_sec - start_tv->tv_sec;
-    usec = stop_tv->tv_usec - start_tv->tv_usec;
-    if (sec > 0 && usec < 0) {
-        sec--;
-	usec += 1000000;
-    } 
-    ret = sec + usec / (double)1000000;
-    if (ret < 0)
-        ret = 0;
-    return ret;
-}
-
-/*
- * return seconds between start_tv and now in double precision
- */
-static double time_since_now(struct timeval *start_tv)
-{
-    struct timeval stop_time;
-    gettimeofday(&stop_time, NULL);
-    return time_since(start_tv, &stop_time);
-}
-
-/*
- * Add latency info to latency struct 
- */
-static void calc_latency(struct timeval *start_tv, struct timeval *stop_tv,
-			struct io_latency *lat)
-{
-    double delta;
-    int i;
-    delta = time_since(start_tv, stop_tv);
-    delta = delta * 1000;
-
-    if (delta > lat->max)
-    	lat->max = delta;
-    if (!lat->min || delta < lat->min)
-    	lat->min = delta;
-    lat->total_io++;
-    lat->total_lat += delta;
-    for (i = 0 ; i < DEVIATIONS ; i++) {
-        if (delta < deviations[i]) {
-	    lat->deviations[i]++;
-	    break;
-	}
-    }
-}
-
-static void oper_list_add(struct io_oper *oper, struct io_oper **list)
-{
-    if (!*list) {
-        *list = oper;
-	oper->prev = oper->next = oper;
-	return;
-    }
-    oper->prev = (*list)->prev;
-    oper->next = *list;
-    (*list)->prev->next = oper;
-    (*list)->prev = oper;
-    return;
-}
-
-static void oper_list_del(struct io_oper *oper, struct io_oper **list)
-{
-    if ((*list)->next == (*list)->prev && *list == (*list)->next) {
-        *list = NULL;
-	return;
-    }
-    oper->prev->next = oper->next;
-    oper->next->prev = oper->prev;
-    if (*list == oper)
-        *list = oper->next;
-}
-
-/* worker func to check error fields in the io unit */
-static int check_finished_io(struct io_unit *io) {
-    int i;
-    if (io->res != io->buf_size) {
-
-  		 struct stat s;
-  		 fstat(io->io_oper->fd, &s);
-  
-  		 /*
-  		  * If file size is large enough for the read, then this short
-  		  * read is an error.
-  		  */
-  		 if ((io->io_oper->rw == READ || io->io_oper->rw == RREAD) &&
-  		     s.st_size > (io->iocb.u.c.offset + io->res)) {
-  
-  		 		 fprintf(stderr, "io err %lu (%s) op %d, off %Lu size %d\n",
-  		 		 		 io->res, strerror(-io->res), io->iocb.aio_lio_opcode,
-  		 		 		 io->iocb.u.c.offset, io->buf_size);
-  		 		 io->io_oper->last_err = io->res;
-  		 		 io->io_oper->num_err++;
-  		 		 return -1;
-  		 }
-    }
-    if (verify && io->io_oper->rw == READ) {
-        if (memcmp(io->buf, verify_buf, io->io_oper->reclen)) {
-	    fprintf(stderr, "verify error, file %s offset %Lu contents (offset:bad:good):\n", 
-	            io->io_oper->file_name, io->iocb.u.c.offset);
-	    
-	    for (i = 0 ; i < io->io_oper->reclen ; i++) {
-	        if (io->buf[i] != verify_buf[i]) {
-		    fprintf(stderr, "%d:%c:%c ", i, io->buf[i], verify_buf[i]);
-		}
-	    }
-	    fprintf(stderr, "\n");
-	}
-
-    }
-    return 0;
-}
-
-/* worker func to check the busy bits and get an io unit ready for use */
-static int grab_iou(struct io_unit *io, struct io_oper *oper) {
-    if (io->busy == IO_PENDING)
-        return -1;
-
-    io->busy = IO_PENDING;
-    io->res = 0;
-    io->io_oper = oper;
-    return 0;
-}
-
-char *stage_name(int rw) {
-    switch(rw) {
-    case WRITE:
-        return "write";
-    case READ:
-        return "read";
-    case RWRITE:
-        return "random write";
-    case RREAD:
-        return "random read";
-    }
-    return "unknown";
-}
-
-static inline double oper_mb_trans(struct io_oper *oper) {
-    return ((double)oper->started_ios * (double)oper->reclen) /
-                (double)(1024 * 1024);
-}
-
-static void print_time(struct io_oper *oper) {
-    double runtime;
-    double tput;
-    double mb;
-
-    runtime = time_since_now(&oper->start_time); 
-    mb = oper_mb_trans(oper);
-    tput = mb / runtime;
-    fprintf(stderr, "%s on %s (%.2f MB/s) %.2f MB in %.2fs\n", 
-	    stage_name(oper->rw), oper->file_name, tput, mb, runtime);
-}
-
-static void print_lat(char *str, struct io_latency *lat) {
-    double avg = lat->total_lat / lat->total_io;
-    int i;
-    double total_counted = 0;
-    fprintf(stderr, "%s min %.2f avg %.2f max %.2f\n\t", 
-            str, lat->min, avg, lat->max);
-
-    for (i = 0 ; i < DEVIATIONS ; i++) {
-	fprintf(stderr, " %.0f < %d", lat->deviations[i], deviations[i]);
-	total_counted += lat->deviations[i];
-    }
-    if (total_counted && lat->total_io - total_counted)
-        fprintf(stderr, " < %.0f", lat->total_io - total_counted);
-    fprintf(stderr, "\n");
-    memset(lat, 0, sizeof(*lat));
-}
-
-static void print_latency(struct thread_info *t)
-{
-    struct io_latency *lat = &t->io_submit_latency;
-    print_lat("latency", lat);
-}
-
-static void print_completion_latency(struct thread_info *t)
-{
-    struct io_latency *lat = &t->io_completion_latency;
-    print_lat("completion latency", lat);
-}
-
-/*
- * updates the fields in the io operation struct that belongs to this
- * io unit, and make the io unit reusable again
- */
-void finish_io(struct thread_info *t, struct io_unit *io, long result,
-		struct timeval *tv_now) {
-    struct io_oper *oper = io->io_oper;
-
-    calc_latency(&io->io_start_time, tv_now, &t->io_completion_latency);
-    io->res = result;
-    io->busy = IO_FREE;
-    io->next = t->free_ious;
-    t->free_ious = io;
-    oper->num_pending--;
-    t->num_global_pending--;
-    check_finished_io(io);
-    if (oper->num_pending == 0 && 
-       (oper->started_ios == oper->total_ios || oper->stonewalled)) 
-    {
-        print_time(oper);
-    } 
-}
-
-int read_some_events(struct thread_info *t) {
-    struct io_unit *event_io;
-    struct io_event *event;
-    int nr;
-    int i; 
-    int min_nr = io_iter;
-    struct timeval stop_time;
-
-    if (t->num_global_pending < io_iter)
-        min_nr = t->num_global_pending;
-
-#ifdef NEW_GETEVENTS
-    nr = io_getevents(t->io_ctx, min_nr, t->num_global_events, t->events,NULL);
-#else
-    nr = io_getevents(t->io_ctx, t->num_global_events, t->events, NULL);
-#endif
-    if (nr <= 0)
-        return nr;
-
-    gettimeofday(&stop_time, NULL);
-    for (i = 0 ; i < nr ; i++) {
-	event = t->events + i;
-	event_io = (struct io_unit *)((unsigned long)event->obj); 
-	finish_io(t, event_io, event->res, &stop_time);
-    }
-    return nr;
-}
-
-/* 
- * finds a free io unit, waiting for pending requests if required.  returns
- * null if none could be found
- */
-static struct io_unit *find_iou(struct thread_info *t, struct io_oper *oper)
-{
-    struct io_unit *event_io;
-    int nr;
-
-retry:
-    if (t->free_ious) {
-        event_io = t->free_ious;
-	t->free_ious = t->free_ious->next;
-	if (grab_iou(event_io, oper)) {
-	    fprintf(stderr, "io unit on free list but not free\n");
-	    abort();
-	}
-	return event_io;
-    }
-    nr = read_some_events(t);
-    if (nr > 0)
-    	goto retry;
-    else
-    	fprintf(stderr, "no free ious after read_some_events\n");
-    return NULL;
-}
-
-/*
- * wait for all pending requests for this io operation to finish
- */
-static int io_oper_wait(struct thread_info *t, struct io_oper *oper) {
-    struct io_event event;
-    struct io_unit *event_io;
-
-    if (oper == NULL) {
-        return 0;
-    }
-
-    if (oper->num_pending == 0)
-        goto done;
-
-    /* this func is not speed sensitive, no need to go wild reading
-     * more than one event at a time
-     */
-#ifdef NEW_GETEVENTS
-    while(io_getevents(t->io_ctx, 1, 1, &event, NULL) > 0) {
-#else
-    while(io_getevents(t->io_ctx, 1, &event, NULL) > 0) {
-#endif
-	struct timeval tv_now;
-        event_io = (struct io_unit *)((unsigned long)event.obj); 
-
-	gettimeofday(&tv_now, NULL);
-	finish_io(t, event_io, event.res, &tv_now);
-
-	if (oper->num_pending == 0)
-	    break;
-    }
-done:
-    if (oper->num_err) {
-        fprintf(stderr, "%u errors on oper, last %u\n", 
-	        oper->num_err, oper->last_err);
-    }
-    return 0;
-}
-
-off_t random_byte_offset(struct io_oper *oper) {
-    off_t num;
-    off_t rand_byte = oper->start;
-    off_t range;
-    off_t offset = 1;
-
-    range = (oper->end - oper->start) / (1024 * 1024);
-    if ((page_size_mask+1) > (1024 * 1024))
-        offset = (page_size_mask+1) / (1024 * 1024);
-    if (range < offset)
-        range = 0;
-    else
-        range -= offset;
-
-    /* find a random mb offset */
-    num = 1 + (int)((double)range * rand() / (RAND_MAX + 1.0 ));
-    rand_byte += num * 1024 * 1024;
-    
-    /* find a random byte offset */
-    num = 1 + (int)((double)(1024 * 1024) * rand() / (RAND_MAX + 1.0));
-
-    /* page align */
-    num = (num + page_size_mask) & ~page_size_mask;
-    rand_byte += num;
-
-    if (rand_byte + oper->reclen > oper->end) {
-	rand_byte -= oper->reclen;
-    }
-    return rand_byte;
-}
-
-/* 
- * build an aio iocb for an operation, based on oper->rw and the
- * last offset used.  This finds the struct io_unit that will be attached
- * to the iocb, and things are ready for submission to aio after this
- * is called.
- *
- * returns null on error
- */
-static struct io_unit *build_iocb(struct thread_info *t, struct io_oper *oper)
-{
-    struct io_unit *io;
-    off_t rand_byte;
-
-    io = find_iou(t, oper);
-    if (!io) {
-        fprintf(stderr, "unable to find io unit\n");
-	return NULL;
-    }
-
-    switch(oper->rw) {
-    case WRITE:
-        io_prep_pwrite(&io->iocb,oper->fd, io->buf, oper->reclen, 
-	               oper->last_offset);
-	oper->last_offset += oper->reclen;
-	break;
-    case READ:
-        io_prep_pread(&io->iocb,oper->fd, io->buf, oper->reclen, 
-	              oper->last_offset);
-	oper->last_offset += oper->reclen;
-	break;
-    case RREAD:
-	rand_byte = random_byte_offset(oper);
-	oper->last_offset = rand_byte;
-        io_prep_pread(&io->iocb,oper->fd, io->buf, oper->reclen, 
-	              rand_byte);
-        break;
-    case RWRITE:
-	rand_byte = random_byte_offset(oper);
-	oper->last_offset = rand_byte;
-        io_prep_pwrite(&io->iocb,oper->fd, io->buf, oper->reclen, 
-	              rand_byte);
-        
-        break;
-    }
-
-    return io;
-}
-
-/* 
- * wait for any pending requests, and then free all ram associated with
- * an operation.  returns the last error the operation hit (zero means none)
- */
-static int
-finish_oper(struct thread_info *t, struct io_oper *oper)
-{
-    unsigned long last_err;
-
-    io_oper_wait(t, oper);
-    last_err = oper->last_err;
-    if (oper->num_pending > 0) {
-        fprintf(stderr, "oper num_pending is %d\n", oper->num_pending);
-    }
-    close(oper->fd);
-    free(oper);
-    return last_err;
-}
-
-/* 
- * allocates an io operation and fills in all the fields.  returns
- * null on error
- */
-static struct io_oper * 
-create_oper(int fd, int rw, off_t start, off_t end, int reclen, int depth,
-            int iter, char *file_name)
-{
-    struct io_oper *oper;
-
-    oper = malloc (sizeof(*oper));
-    if (!oper) {
-	fprintf(stderr, "unable to allocate io oper\n");
-	return NULL;
-    }
-    memset(oper, 0, sizeof(*oper));
-
-    oper->depth = depth;
-    oper->start = start;
-    oper->end = end;
-    oper->last_offset = oper->start;
-    oper->fd = fd;
-    oper->reclen = reclen;
-    oper->rw = rw;
-    oper->total_ios = (oper->end - oper->start) / oper->reclen;
-    oper->file_name = file_name;
-
-    return oper;
-}
-
-/*
- * does setup on num_ios worth of iocbs, but does not actually
- * start any io
- */
-int build_oper(struct thread_info *t, struct io_oper *oper, int num_ios, 
-               struct iocb **my_iocbs) 
-{
-    int i;
-    struct io_unit *io;
-
-    if (oper->started_ios == 0)
-	gettimeofday(&oper->start_time, NULL);
-
-    if (num_ios == 0)
-        num_ios = oper->total_ios;
-
-    if ((oper->started_ios + num_ios) > oper->total_ios)
-        num_ios = oper->total_ios - oper->started_ios;   
-
-    for( i = 0 ; i < num_ios ; i++) {
-	io = build_iocb(t, oper);
-	if (!io) {
-	    return -1;    
-	}
-	my_iocbs[i] = &io->iocb;
-    }
-    return num_ios;
-}
-
-/*
- * runs through the iocbs in the array provided and updates
- * counters in the associated oper struct
- */
-static void update_iou_counters(struct iocb **my_iocbs, int nr,
-	struct timeval *tv_now) 
-{
-    struct io_unit *io;
-    int i;
-    for (i = 0 ; i < nr ; i++) {
-	io = (struct io_unit *)(my_iocbs[i]);
-	io->io_oper->num_pending++;
-	io->io_oper->started_ios++;
-	io->io_start_time = *tv_now;	/* set time of io_submit */
-    }
-}
-
-/* starts some io for a given file, returns zero if all went well */
-int run_built(struct thread_info *t, int num_ios, struct iocb **my_iocbs) 
-{
-    int ret;
-    struct timeval start_time;
-    struct timeval stop_time;
-
-resubmit:
-    gettimeofday(&start_time, NULL);
-    ret = io_submit(t->io_ctx, num_ios, my_iocbs);
-    gettimeofday(&stop_time, NULL);
-    calc_latency(&start_time, &stop_time, &t->io_submit_latency);
-
-    if (ret != num_ios) {
-	/* some ios got through */
-	if (ret > 0) {
-	    update_iou_counters(my_iocbs, ret, &stop_time);
-	    my_iocbs += ret;
-	    t->num_global_pending += ret;
-	    num_ios -= ret;
-	}
-	/* 
-	 * we've used all the requests allocated in aio_init, wait and
-	 * retry
-	 */
-	if (ret > 0 || ret == -EAGAIN) {
-	    int old_ret = ret;
-	    if ((ret = read_some_events(t) > 0)) {
-		goto resubmit;
-	    } else {
-	    	fprintf(stderr, "ret was %d and now is %d\n", ret, old_ret);
-		abort();
-	    }
-	}
-
-	fprintf(stderr, "ret %d (%s) on io_submit\n", ret, strerror(-ret));
-	return -1;
-    }
-    update_iou_counters(my_iocbs, ret, &stop_time);
-    t->num_global_pending += ret;
-    return 0;
-}
-
-/* 
- * changes oper->rw to the next in a command sequence, or returns zero
- * to say this operation is really, completely done for
- */
-static int restart_oper(struct io_oper *oper) {
-    int new_rw  = 0;
-    if (oper->last_err)
-        return 0;
-
-    /* this switch falls through */
-    switch(oper->rw) {
-    case WRITE:
-	if (stages & (1 << READ))
-	    new_rw = READ;
-    case READ:
-	if (!new_rw && stages & (1 << RWRITE))
-	    new_rw = RWRITE;
-    case RWRITE:
-	if (!new_rw && stages & (1 << RREAD))
-	    new_rw = RREAD;
-    }
-
-    if (new_rw) {
-	oper->started_ios = 0;
-	oper->last_offset = oper->start;
-	oper->stonewalled = 0;
-
-	/* 
-	 * we're restarting an operation with pending requests, so the
-	 * timing info won't be printed by finish_io.  Printing it here
-	 */
-	if (oper->num_pending)
-	    print_time(oper);
-
-	oper->rw = new_rw;
-	return 1;
-    } 
-    return 0;
-}
-
-static int oper_runnable(struct io_oper *oper) {
-    struct stat buf;
-    int ret;
-
-    /* first context is always runnable, if started_ios > 0, no need to
-     * redo the calculations
-     */
-    if (oper->started_ios || oper->start == 0)
-        return 1;
-    /*
-     * only the sequential phases force delays in starting */
-    if (oper->rw >= RWRITE)
-        return 1;
-    ret = fstat(oper->fd, &buf);
-    if (ret < 0) {
-        perror("fstat");
-	exit(1);
-    }
-    if (S_ISREG(buf.st_mode) && buf.st_size < oper->start)
-        return 0;
-    return 1;
-}
-
-/*
- * runs through all the io operations on the active list, and starts
- * a chunk of io on each.  If any io operations are completely finished,
- * it either switches them to the next stage or puts them on the 
- * finished list.
- *
- * this function stops after max_io_submit iocbs are sent down the 
- * pipe, even if it has not yet touched all the operations on the 
- * active list.  Any operations that have finished are moved onto
- * the finished_opers list.
- */
-static int run_active_list(struct thread_info *t,
-			 int io_iter,
-			 int max_io_submit)
-{
-    struct io_oper *oper;
-    struct io_oper *built_opers = NULL;
-    struct iocb **my_iocbs = t->iocbs;
-    int ret = 0;
-    int num_built = 0;
-
-    oper = t->active_opers;
-    while(oper) {
-	if (!oper_runnable(oper)) {
-	    oper = oper->next;
-	    if (oper == t->active_opers)
-	        break;
-	    continue;
-	}
-	ret = build_oper(t, oper, io_iter, my_iocbs);
-	if (ret >= 0) {
-	    my_iocbs += ret;
-	    num_built += ret;
-	    oper_list_del(oper, &t->active_opers);
-	    oper_list_add(oper, &built_opers);
-	    oper = t->active_opers;
-	    if (num_built + io_iter > max_io_submit)
-	        break;
-	} else
-	    break;
-    }
-    if (num_built) {
-	ret = run_built(t, num_built, t->iocbs);
-	if (ret < 0) {
-	    fprintf(stderr, "error %d on run_built\n", ret);
-	    exit(1);
-	}
-	while(built_opers) {
-	    oper = built_opers;
-	    oper_list_del(oper, &built_opers);
-	    oper_list_add(oper, &t->active_opers);
-	    if (oper->started_ios == oper->total_ios) {
-		oper_list_del(oper, &t->active_opers);
-		oper_list_add(oper, &t->finished_opers);
-	    }
-	}
-    }
-    return 0;
-}
-
-void drop_shm() {
-    int ret;
-    struct shmid_ds ds;
-    if (use_shm != USE_SHM)
-        return;
-
-    ret = shmctl(shm_id, IPC_RMID, &ds);
-    if (ret) {
-        perror("shmctl IPC_RMID");
-    }
-}
-
-void aio_setup(io_context_t *io_ctx, int n)
-{
-    int res = io_queue_init(n, io_ctx);
-    if (res != 0) {
-	fprintf(stderr, "io_queue_setup(%d) returned %d (%s)\n",
-		n, res, strerror(-res));
-	exit(3);
-    }
-}
-
-/*
- * allocate io operation and event arrays for a given thread
- */
-int setup_ious(struct thread_info *t, 
-              int num_files, int depth, 
-	      int reclen, int max_io_submit) {
-    int i;
-    size_t bytes = num_files * depth * sizeof(*t->ios);
-
-    t->ios = malloc(bytes);
-    if (!t->ios) {
-	fprintf(stderr, "unable to allocate io units\n");
-	return -1;
-    }
-    memset(t->ios, 0, bytes);
-
-    for (i = 0 ; i < depth * num_files; i++) {
-	t->ios[i].buf = aligned_buffer;
-	aligned_buffer += padded_reclen;
-	t->ios[i].buf_size = reclen;
-	if (verify)
-	    memset(t->ios[i].buf, 'b', reclen);
-	else
-	    memset(t->ios[i].buf, 0, reclen);
-	t->ios[i].next = t->free_ious;
-	t->free_ious = t->ios + i;
-    }
-    if (verify) {
-        verify_buf = aligned_buffer;
-        memset(verify_buf, 'b', reclen);
-    }
-
-    t->iocbs = malloc(sizeof(struct iocb *) * max_io_submit);
-    if (!t->iocbs) {
-        fprintf(stderr, "unable to allocate iocbs\n");
-	goto free_buffers;
-    }
-
-    memset(t->iocbs, 0, max_io_submit * sizeof(struct iocb *));
-
-    t->events = malloc(sizeof(struct io_event) * depth * num_files);
-    if (!t->events) {
-        fprintf(stderr, "unable to allocate ram for events\n");
-	goto free_buffers;
-    }
-    memset(t->events, 0, num_files * sizeof(struct io_event)*depth);
-
-    t->num_global_ios = num_files * depth;
-    t->num_global_events = t->num_global_ios;
-    return 0;
-
-free_buffers:
-    if (t->ios)
-        free(t->ios);
-    if (t->iocbs)
-        free(t->iocbs);  
-    if (t->events)
-        free(t->events);
-    return -1;
-}
-
-/*
- * The buffers used for file data are allocated as a single big
- * malloc, and then each thread and operation takes a piece and uses
- * that for file data.  This lets us do a large shm or bigpages alloc
- * and without trying to find a special place in each thread to map the
- * buffers to
- */
-int setup_shared_mem(int num_threads, int num_files, int depth, 
-                     int reclen, int max_io_submit) 
-{
-    char *p = NULL;
-    size_t total_ram;
-    
-    padded_reclen = (reclen + page_size_mask) / (page_size_mask+1);
-    padded_reclen = padded_reclen * (page_size_mask+1);
-    total_ram = num_files * depth * padded_reclen + num_threads;
-    if (verify)
-    	total_ram += padded_reclen;
-
-    if (use_shm == USE_MALLOC) {
-	p = malloc(total_ram + page_size_mask);
-    } else if (use_shm == USE_SHM) {
-        shm_id = shmget(IPC_PRIVATE, total_ram, IPC_CREAT | 0700);
-	if (shm_id < 0) {
-	    perror("shmget");
-	    drop_shm();
-	    goto free_buffers;
-	}
-	p = shmat(shm_id, (char *)0x50000000, 0);
-        if ((long)p == -1) {
-	    perror("shmat");
-	    goto free_buffers;
-	}
-	/* won't really be dropped until we shmdt */
-	drop_shm();
-    } else if (use_shm == USE_SHMFS) {
-        char mmap_name[16]; /* /dev/shm/ + null + XXXXXX */    
-	int fd;
-
-	strcpy(mmap_name, "/dev/shm/XXXXXX");
-	fd = mkstemp(mmap_name);
-        if (fd < 0) {
-	    perror("mkstemp");
-	    goto free_buffers;
-	}
-	unlink(mmap_name);
-	ftruncate(fd, total_ram);
-	shm_id = fd;
-	p = mmap((char *)0x50000000, total_ram,
-	         PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0);
-
-        if (p == MAP_FAILED) {
-	    perror("mmap");
-	    goto free_buffers;
-	}
-    }
-    if (!p) {
-        fprintf(stderr, "unable to allocate buffers\n");
-	goto free_buffers;
-    }
-    unaligned_buffer = p;
-    p = (char*)((intptr_t) (p + page_size_mask) & ~page_size_mask);
-    aligned_buffer = p;
-    return 0;
-
-free_buffers:
-    drop_shm();
-    if (unaligned_buffer)
-        free(unaligned_buffer);
-    return -1;
-}
-
-/*
- * runs through all the thread_info structs and calculates a combined
- * throughput
- */
-void global_thread_throughput(struct thread_info *t, char *this_stage) {
-    int i;
-    double runtime = time_since_now(&global_stage_start_time);
-    double total_mb = 0;
-    double min_trans = 0;
-
-    for (i = 0 ; i < num_threads ; i++) {
-        total_mb += global_thread_info[i].stage_mb_trans;
-	if (!min_trans || t->stage_mb_trans < min_trans)
-	    min_trans = t->stage_mb_trans;
-    }
-    if (total_mb) {
-	fprintf(stderr, "%s throughput (%.2f MB/s) ", this_stage,
-	        total_mb / runtime);
-	fprintf(stderr, "%.2f MB in %.2fs", total_mb, runtime);
-        if (stonewall)
-	    fprintf(stderr, " min transfer %.2fMB", min_trans);
-        fprintf(stderr, "\n");
-    }
-}
-
-
-/* this is the meat of the state machine.  There is a list of
- * active operations structs, and as each one finishes the required
- * io it is moved to a list of finished operations.  Once they have
- * all finished whatever stage they were in, they are given the chance
- * to restart and pick a different stage (read/write/random read etc)
- *
- * various timings are printed in between the stages, along with
- * thread synchronization if there are more than one threads.
- */
-int worker(struct thread_info *t)
-{
-    struct io_oper *oper;
-    char *this_stage = NULL;
-    struct timeval stage_time;
-    int status = 0;
-    int iteration = 0;
-    int cnt;
-
-    aio_setup(&t->io_ctx, 512);
-
-restart:
-    if (num_threads > 1) {
-        pthread_mutex_lock(&stage_mutex);
-	threads_starting++;
-	if (threads_starting == num_threads) {
-	    threads_ending = 0;
-	    gettimeofday(&global_stage_start_time, NULL);
-	    pthread_cond_broadcast(&stage_cond);
-	}
-	while (threads_starting != num_threads)
-	    pthread_cond_wait(&stage_cond, &stage_mutex);
-        pthread_mutex_unlock(&stage_mutex);
-    }
-    if (t->active_opers) {
-        this_stage = stage_name(t->active_opers->rw);
-	gettimeofday(&stage_time, NULL);
-	t->stage_mb_trans = 0;
-    }
-
-    cnt = 0;
-    /* first we send everything through aio */
-    while(t->active_opers && (cnt < iterations || iterations == RUN_FOREVER)) {
-	if (stonewall && threads_ending) {
-	    oper = t->active_opers;
-	    oper->stonewalled = 1;
-	    oper_list_del(oper, &t->active_opers);
-	    oper_list_add(oper, &t->finished_opers);
-	} else {
-	    run_active_list(t, io_iter,  max_io_submit);
-        }
-	cnt++;
-    }
-    if (latency_stats)
-        print_latency(t);
-
-    if (completion_latency_stats)
-	print_completion_latency(t);
-
-    /* then we wait for all the operations to finish */
-    oper = t->finished_opers;
-    do {
-	if (!oper)
-		break;
-	io_oper_wait(t, oper);
-	oper = oper->next;
-    } while(oper != t->finished_opers);
-
-    /* then we do an fsync to get the timing for any future operations
-     * right, and check to see if any of these need to get restarted
-     */
-    oper = t->finished_opers;
-    while(oper) {
-	if (fsync_stages)
-            fsync(oper->fd);
-	t->stage_mb_trans += oper_mb_trans(oper);
-	if (restart_oper(oper)) {
-	    oper_list_del(oper, &t->finished_opers);
-	    oper_list_add(oper, &t->active_opers);
-	    oper = t->finished_opers;
-	    continue;
-	}
-	oper = oper->next;
-	if (oper == t->finished_opers)
-	    break;
-    } 
-
-    if (t->stage_mb_trans && t->num_files > 0) {
-        double seconds = time_since_now(&stage_time);
-	fprintf(stderr, "thread %d %s totals (%.2f MB/s) %.2f MB in %.2fs\n", 
-	        t - global_thread_info, this_stage, t->stage_mb_trans/seconds, 
-		t->stage_mb_trans, seconds);
-    }
-
-    if (num_threads > 1) {
-	pthread_mutex_lock(&stage_mutex);
-	threads_ending++;
-	if (threads_ending == num_threads) {
-	    threads_starting = 0;
-	    pthread_cond_broadcast(&stage_cond);
-	    global_thread_throughput(t, this_stage);
-	}
-	while(threads_ending != num_threads)
-	    pthread_cond_wait(&stage_cond, &stage_mutex);
-	pthread_mutex_unlock(&stage_mutex);
-    }
-    
-    /* someone got restarted, go back to the beginning */
-    if (t->active_opers && (cnt < iterations || iterations == RUN_FOREVER)) {
-	iteration++;
-        goto restart;
-    }
-
-    /* finally, free all the ram */
-    while(t->finished_opers) {
-	oper = t->finished_opers;
-	oper_list_del(oper, &t->finished_opers);
-	status = finish_oper(t, oper);
-    }
-
-    if (t->num_global_pending) {
-        fprintf(stderr, "global num pending is %d\n", t->num_global_pending);
-    }
-    io_queue_release(t->io_ctx);
-    
-    return status;
-}
-
-typedef void * (*start_routine)(void *);
-int run_workers(struct thread_info *t, int num_threads)
-{
-    int ret;
-    int thread_ret;
-    int i;
-
-    for(i = 0 ; i < num_threads ; i++) {
-        ret = pthread_create(&t[i].tid, NULL, (start_routine)worker, t + i);
-	if (ret) {
-	    perror("pthread_create");
-	    exit(1);
-	}
-    }
-    for(i = 0 ; i < num_threads ; i++) {
-        ret = pthread_join(t[i].tid, (void *)&thread_ret);
-        if (ret) {
-	    perror("pthread_join");
-	    exit(1);
-	}
-    }
-    return 0;
-}
-
-off_t parse_size(char *size_arg, off_t mult) {
-    char c;
-    int num;
-    off_t ret;
-    c = size_arg[strlen(size_arg) - 1];
-    if (c > '9') {
-        size_arg[strlen(size_arg) - 1] = '\0';
-    }
-    num = atoi(size_arg);
-    switch(c) {
-    case 'g':
-    case 'G':
-        mult = 1024 * 1024 * 1024;
-	break;
-    case 'm':
-    case 'M':
-        mult = 1024 * 1024;
-	break;
-    case 'k':
-    case 'K':
-        mult = 1024;
-	break;
-    case 'b':
-    case 'B':
-        mult = 1;
-	break;
-    }
-    ret = mult * num;
-    return ret;
-}
-
-void print_usage(void) {
-    printf("usage: aio-stress [-s size] [-r size] [-a size] [-d num] [-b num]\n");
-    printf("                  [-i num] [-t num] [-c num] [-C size] [-nxhOS ]\n");
-    printf("                  file1 [file2 ...]\n");
-    printf("\t-a size in KB at which to align buffers\n");
-    printf("\t-b max number of iocbs to give io_submit at once\n");
-    printf("\t-c number of io contexts per file\n");
-    printf("\t-C offset between contexts, default 2MB\n");
-    printf("\t-s size in MB of the test file(s), default 1024MB\n");
-    printf("\t-r record size in KB used for each io, default 64KB\n");
-    printf("\t-d number of pending aio requests for each file, default 64\n");
-    printf("\t-i number of ios per file sent before switching\n\t   to the next file, default 8\n");
-    printf("\t-I total number of ayncs IOs the program will run, default is run until Cntl-C\n");
-    printf("\t-O Use O_DIRECT (not available in 2.4 kernels),\n");
-    printf("\t-S Use O_SYNC for writes\n");
-    printf("\t-o add an operation to the list: write=0, read=1,\n"); 
-    printf("\t   random write=2, random read=3.\n");
-    printf("\t   repeat -o to specify multiple ops: -o 0 -o 1 etc.\n");
-    printf("\t-m shm use ipc shared memory for io buffers instead of malloc\n");
-    printf("\t-m shmfs mmap a file in /dev/shm for io buffers\n");
-    printf("\t-n no fsyncs between write stage and read stage\n");
-    printf("\t-l print io_submit latencies after each stage\n");
-    printf("\t-L print io completion latencies after each stage\n");
-    printf("\t-t number of threads to run\n");
-    printf("\t-u unlink files after completion\n");
-    printf("\t-v verification of bytes written\n");
-    printf("\t-x turn off thread stonewalling\n");
-    printf("\t-h this message\n");
-    printf("\n\t   the size options (-a -s and -r) allow modifiers -s 400{k,m,g}\n");
-    printf("\t   translate to 400KB, 400MB and 400GB\n");
-    printf("version %s\n", PROG_VERSION);
-}
-
-int main(int ac, char **av) 
-{
-    int rwfd;
-    int i;
-    int j;
-    int c;
-
-    off_t file_size = 1 * 1024 * 1024 * 1024;
-    int first_stage = WRITE;
-    struct io_oper *oper;
-    int status = 0;
-    int num_files = 0;
-    int open_fds = 0;
-    struct thread_info *t;
-
-    page_size_mask = getpagesize() - 1;
-
-    while(1) {
-	c = getopt(ac, av, "a:b:c:C:m:s:r:d:i:I:o:t:lLnhOSxvu");
-	if  (c < 0)
-	    break;
-
-        switch(c) {
-	case 'a':
-	    page_size_mask = parse_size(optarg, 1024);
-	    page_size_mask--;
-	    break;
-	case 'c':
-	    num_contexts = atoi(optarg);
-	    break;
-	case 'C':
-	    context_offset = parse_size(optarg, 1024 * 1024);
-	case 'b':
-	    max_io_submit = atoi(optarg);
-	    break;
-	case 's':
-	    file_size = parse_size(optarg, 1024 * 1024);
-	    break;
-	case 'd':
-	    depth = atoi(optarg);
-	    break;
-	case 'r':
-	    rec_len = parse_size(optarg, 1024);
-	    break;
-	case 'i':
-	    io_iter = atoi(optarg);
-	    break;
-        case 'I':
-          iterations = atoi(optarg);
-        break;
-	case 'n':
-	    fsync_stages = 0;
-	    break;
-	case 'l':
-	    latency_stats = 1;
-	    break;
-	case 'L':
-	    completion_latency_stats = 1;
-	    break;
-	case 'm':
-	    if (!strcmp(optarg, "shm")) {
-		fprintf(stderr, "using ipc shm\n");
-	        use_shm = USE_SHM;
-	    } else if (!strcmp(optarg, "shmfs")) {
-	        fprintf(stderr, "using /dev/shm for buffers\n");
-		use_shm = USE_SHMFS;
-	    }
-	    break;
-	case 'o': 
-	    i = atoi(optarg);
-	    stages |= 1 << i;
-	    fprintf(stderr, "adding stage %s\n", stage_name(i));
-	    break;
-	case 'O':
-	    o_direct = O_DIRECT;
-	    break;
-	case 'S':
-	    o_sync = O_SYNC;
-	    break;
-	case 't':
-	    num_threads = atoi(optarg);
-	    break;
-	case 'x':
-	    stonewall = 0;
-	    break;
-	case 'u':
-	    unlink_files = 1;
-	    break;
-	case 'v':
-	    verify = 1;
-	    break;
-	case 'h':
-	default:
-	    print_usage();
-	    exit(1);
-	}
-    }
-
-    /* 
-     * make sure we don't try to submit more ios than we have allocated
-     * memory for
-     */
-    if (depth < io_iter) {
-	io_iter = depth;
-        fprintf(stderr, "dropping io_iter to %d\n", io_iter);
-    }
-
-    if (optind >= ac) {
-	print_usage();
-	exit(1);
-    }
-
-    num_files = ac - optind;
-
-    if (num_threads > (num_files * num_contexts)) {
-        num_threads = num_files * num_contexts;
-	fprintf(stderr, "dropping thread count to the number of contexts %d\n", 
-	        num_threads);
-    }
-
-    t = malloc(num_threads * sizeof(*t));
-    if (!t) {
-        perror("malloc");
-	exit(1);
-    }
-    global_thread_info = t;
-
-    /* by default, allow a huge number of iocbs to be sent towards
-     * io_submit
-     */
-    if (!max_io_submit)
-        max_io_submit = num_files * io_iter * num_contexts;
-
-    /*
-     * make sure we don't try to submit more ios than max_io_submit allows 
-     */
-    if (max_io_submit < io_iter) {
-        io_iter = max_io_submit;
-	fprintf(stderr, "dropping io_iter to %d\n", io_iter);
-    }
-
-    if (!stages) {
-        stages = (1 << WRITE) | (1 << READ) | (1 << RREAD) | (1 << RWRITE);
-    } else {
-        for (i = 0 ; i < LAST_STAGE; i++) {
-	    if (stages & (1 << i)) {
-	        first_stage = i;
-		fprintf(stderr, "starting with %s\n", stage_name(i));
-		break;
-	    }
-	}
-    }
-
-    if (file_size < num_contexts * context_offset) {
-        fprintf(stderr, "file size %Lu too small for %d contexts\n", 
-	        file_size, num_contexts);
-	exit(1);
-    }
-
-    fprintf(stderr, "file size %LuMB, record size %luKB, depth %d, ios per iteration %d\n", file_size / (1024 * 1024), rec_len / 1024, depth, io_iter);
-    fprintf(stderr, "max io_submit %d, buffer alignment set to %luKB\n", 
-            max_io_submit, (page_size_mask + 1)/1024);
-    fprintf(stderr, "threads %d files %d contexts %d context offset %LuMB verification %s\n", 
-            num_threads, num_files, num_contexts, 
-	    context_offset / (1024 * 1024), verify ? "on" : "off");
-    /* open all the files and do any required setup for them */
-    for (i = optind ; i < ac ; i++) {
-	int thread_index;
-	for (j = 0 ; j < num_contexts ; j++) {
-	    thread_index = open_fds % num_threads;
-	    open_fds++;
-
-	    rwfd = open(av[i], O_CREAT | O_RDWR | o_direct | o_sync, 0600);
-	    assert(rwfd != -1);
-
-	    oper = create_oper(rwfd, first_stage, j * context_offset, 
-	                       file_size - j * context_offset, rec_len, 
-			       depth, io_iter, av[i]);
-	    if (!oper) {
-		fprintf(stderr, "error in create_oper\n");
-		exit(-1);
-	    }
-	    oper_list_add(oper, &t[thread_index].active_opers);
-	    t[thread_index].num_files++;
-	}
-    }
-    if (setup_shared_mem(num_threads, num_files * num_contexts, 
-                         depth, rec_len, max_io_submit))
-    {
-        exit(1);
-    }
-    for (i = 0 ; i < num_threads ; i++) {
-	if (setup_ious(&t[i], t[i].num_files, depth, rec_len, max_io_submit))
-		exit(1);
-    }
-    if (num_threads > 1){
-        printf("Running multi thread version num_threads:%d\n", num_threads);
-        run_workers(t, num_threads);
-    } else {
-        printf("Running single thread version \n");
-	status = worker(t);
-    }
-    if (unlink_files) {
-	for (i = optind ; i < ac ; i++) {
-	    printf("Cleaning up file %s \n", av[i]);
-	    unlink(av[i]);
-	}
-    }
-
-    if (status) {
-	exit(1);
-    }
-    return status;
-}
-
diff --git a/client/tests/aiostress/aiostress.py b/client/tests/aiostress/aiostress.py
deleted file mode 100644
index a4af09f..0000000
--- a/client/tests/aiostress/aiostress.py
+++ /dev/null
@@ -1,72 +0,0 @@
-# This requires aio headers to build.
-# Should work automagically out of deps now.
-import os
-from autotest_lib.client.bin import test, utils
-
-
-class aiostress(test.test):
-    version = 3
-
-    def initialize(self):
-        self.job.require_gcc()
-        self.job.setup_dep(['libaio'])
-        ldflags = '-L ' + self.autodir + '/deps/libaio/lib'
-        cflags = '-I ' + self.autodir + '/deps/libaio/include'
-        self.gcc_flags = ldflags + ' ' + cflags
-
-
-    # ftp://ftp.suse.com/pub/people/mason/utils/aio-stress.c
-    def setup(self, tarball = None):
-        os.mkdir(self.srcdir)
-        os.chdir(self.srcdir)
-        utils.system('cp ' + self.bindir+'/aio-stress.c .')
-        os.chdir(self.srcdir)
-        self.gcc_flags += ' -Wall -lpthread -laio'
-        cmd = 'gcc ' + self.gcc_flags + ' aio-stress.c -o aio-stress'
-        utils.system(cmd)
-
-
-    def run_once(self, args = ''):
-        os.chdir(self.tmpdir)
-        libs = self.autodir+'/deps/libaio/lib/'
-        ld_path = utils.prepend_path(libs,
-                                      utils.environ('LD_LIBRARY_PATH'))
-        var_ld_path = 'LD_LIBRARY_PATH=' + ld_path
-        cmd = self.srcdir + '/aio-stress ' + args + ' poo'
-
-        stderr = os.path.join(self.debugdir, 'stderr')
-        utils.system('%s %s 2> %s' % (var_ld_path, cmd, stderr))
-        report = open(stderr)
-        self.format_results(report)
-
-
-    def format_results(self, report):
-        for line in report:
-            if 'threads' in line:
-                if 'files' in line:
-                    if 'contexts' in line:
-                        break
-
-        keyval = {}
-        for line in report:
-            line = line.split(')')[0]
-            key, value = line.split('(')
-            key = key.strip().replace(' ', '_')
-            value = value.split()[0]
-            keyval[key] = value
-
-        self.write_perf_keyval(keyval)
-
-"""
-file size 1024MB, record size 64KB, depth 64, ios per iteration 8
-max io_submit 8, buffer alignment set to 4KB
-threads 1 files 1 contexts 1 context offset 2MB verification off
-write on poo (245.77 MB/s) 1024.00 MB in 4.17s
-thread 0 write totals (55.86 MB/s) 1024.00 MB in 18.33s
-read on poo (1311.54 MB/s) 1024.00 MB in 0.78s
-thread 0 read totals (1307.66 MB/s) 1024.00 MB in 0.78s
-random write on poo (895.47 MB/s) 1024.00 MB in 1.14s
-thread 0 random write totals (18.42 MB/s) 1024.00 MB in 55.59s
-random read on poo (1502.89 MB/s) 1024.00 MB in 0.68s
-thread 0 random read totals (1474.36 MB/s) 1024.00 MB in 0.69s
-"""
diff --git a/client/tests/aiostress/control b/client/tests/aiostress/control
deleted file mode 100644
index 16e059f..0000000
--- a/client/tests/aiostress/control
+++ /dev/null
@@ -1,24 +0,0 @@
-AUTHOR = "Masoud S <masouds@google.com>"
-NAME = "aio stress"
-TEST_CATEGORY = "Stress"
-TEST_CLASS = "Kernel"
-TIME = "SHORT"
-TEST_TYPE = "client"
-DOC = """\
-aio-stress
-
-will open or create each file on the command line, and start a series
-of aio to it.
-
-aio is done in a rotating loop.  first file1 gets 8 requests, then
-file2, then file3 etc.  As each file finishes writing, it is switched
-to reads
-
-io buffers are aligned in case you want to do raw io
-
-This test takes less than a minute. It ends up writing and reading less
-than a few Megs. It is a sequential workload. This test stresses the aio
-interface not the disk, or kernel.
-"""
-
-job.run_test('aiostress')
diff --git a/client/tests/barriertest/barriertest.py b/client/tests/barriertest/barriertest.py
deleted file mode 100644
index 6b72f31..0000000
--- a/client/tests/barriertest/barriertest.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# This is used directly by server/tests/barriertest/control.srv
-
-import logging, time
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import barrier, error
-
-
-class barriertest(test.test):
-    version = 2
-
-
-    def run_once(self, our_addr, hostnames, main, timeout=120):
-        # A reusable local server as we're using multiple barriers in one test.
-        server = barrier.listen_server()
-
-        # Basic barrier rendezvous test.
-        self.job.barrier(our_addr, 'First', timeout=timeout,
-                         listen_server=server).rendezvous(*hostnames)
-        logging.info('1. rendezvous "First" complete.')
-        time.sleep(2)
-
-        # A rendezvous_servers using a different main than the default.
-        self.job.barrier(our_addr, 'Second', timeout=timeout,
-                         listen_server=server
-                         ).rendezvous_servers(hostnames[-1], *hostnames[:-1])
-        logging.info('2. rendezvous_servers "Second" complete.')
-        time.sleep(2)
-
-        # A regular rendezvous, this time testing the abort functionality.
-        try:
-            self.job.barrier(our_addr, 'WillAbort', timeout=timeout,
-                             listen_server=server
-                             ).rendezvous(abort=True, *hostnames)
-        except error.BarrierAbortError:
-            pass
-        except error.BarrierError, e:
-            # We did get an error from the barrier, but was is acceptable or
-            # not?  Site code may not be able to indicate an explicit abort.
-            self.job.record('WARN', None, 'barriertest',
-                            'BarrierError %s instead of BarrierAbortError.' % e)
-        else:
-            raise error.TestFail('Explicit barrier rendezvous abort failed.')
-        logging.info('3. rendezvous(abort=True) complete.')
-        time.sleep(2)
-
-        # Now attempt a rendezvous_servers that also includes the server.
-        self.job.barrier(our_addr, 'FinalSync', timeout=timeout,
-                         listen_server=server
-                         ).rendezvous_servers(main, *hostnames)
-        logging.info('4. rendezvous_servers "FinalSync" complete.')
-        time.sleep(2)
-
-        # rendezvous_servers, aborted from the main.
-        try:
-            self.job.barrier(our_addr, 'WillAbortServers', timeout=timeout,
-                             listen_server=server
-                             ).rendezvous_servers(main, *hostnames)
-        except error.BarrierAbortError:
-            pass
-        except error.BarrierError, e:
-            # We did get an error from the barrier, but was is acceptable or
-            # not?  Site code may not be able to indicate an explicit abort.
-            self.job.record('WARN', None, 'barriertest',
-                            'BarrierError %s instead of BarrierAbortError.' % e)
-        else:
-            raise error.TestFail('Explicit barrier rendezvous abort failed.')
-        logging.info('5. rendezvous_servers(abort=True) complete.')
-        time.sleep(2)
-
-        server.close()
diff --git a/client/tests/barriertest/control b/client/tests/barriertest/control
deleted file mode 100644
index 68b4edc..0000000
--- a/client/tests/barriertest/control
+++ /dev/null
@@ -1,12 +0,0 @@
-AUTHOR = "Colby Ranger <cranger@google.com>"
-TIME = "MEDIUM"
-NAME = "Barrier Test"
-TEST_TYPE = "client"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "Stress"
-DOC = """\
-This tests barriers which can be used for multiple threads/processes/hosts
-to coordinate within a test.  This is for testing Autotest itself.
-"""
-
-job.run_test('barriertest')
diff --git a/client/tests/bash_shared_mapping/bash_shared_mapping.py b/client/tests/bash_shared_mapping/bash_shared_mapping.py
deleted file mode 100644
index 72031fa..0000000
--- a/client/tests/bash_shared_mapping/bash_shared_mapping.py
+++ /dev/null
@@ -1,44 +0,0 @@
-import signal, os
-from autotest_lib.client.bin import utils, test
-
-class bash_shared_mapping(test.test):
-    version = 3
-
-    # http://www.zip.com.au/~akpm/linux/patches/stuff/ext3-tools.tar.gz
-    def setup(self, tarball = 'ext3-tools.tar.gz'):
-        self.tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(self.tarball, self.srcdir)
-
-        os.chdir(self.srcdir)
-        utils.system('patch -p1 < ../makefile.patch')
-        utils.make('bash-shared-mapping usemem')
-
-
-    def initialize(self):
-        self.job.require_gcc()
-
-
-    def execute(self, testdir = None, iterations = 10000):
-        if not testdir:
-            testdir = self.tmpdir
-        os.chdir(testdir)
-        file = os.path.join(testdir, 'foo')
-        # Want to use 3/4 of all memory for each of
-        # bash-shared-mapping and usemem
-        kilobytes = (3 * utils.memtotal()) / 4
-
-        # Want two usemem -m megabytes in parallel in background.
-        pid = [None, None]
-        usemem = os.path.join(self.srcdir, 'usemem')
-        args = ('usemem', '-N', '-m', '%d' % (kilobytes / 1024))
-        # print_to_tty ('2 x ' + ' '.join(args))
-        for i in (0,1):
-            pid[i] = os.spawnv(os.P_NOWAIT, usemem, args)
-
-        cmd = "%s/bash-shared-mapping %s %d -t %d -n %d" % \
-                        (self.srcdir, file, kilobytes,
-                         utils.count_cpus(), iterations)
-        os.system(cmd)
-
-        for i in (0, 1):
-            os.kill(pid[i], signal.SIGKILL)
diff --git a/client/tests/bash_shared_mapping/control b/client/tests/bash_shared_mapping/control
deleted file mode 100644
index f08e1f7..0000000
--- a/client/tests/bash_shared_mapping/control
+++ /dev/null
@@ -1,11 +0,0 @@
-AUTHOR = "Martin Bligh <mbligh@google.com>"
-NAME = "bash shared mapping"
-TIME = "SHORT"
-TEST_CLASS = "Kernel"
-TEST_CATEGORY = "Functional"
-TEST_TYPE = "client"
-DOC = """\
-Who knows...
-"""
-
-job.run_test('bash_shared_mapping')
diff --git a/client/tests/bash_shared_mapping/ext3-tools.tar.gz b/client/tests/bash_shared_mapping/ext3-tools.tar.gz
deleted file mode 100644
index db48be5..0000000
--- a/client/tests/bash_shared_mapping/ext3-tools.tar.gz
+++ /dev/null
Binary files differ
diff --git a/client/tests/bash_shared_mapping/makefile.patch b/client/tests/bash_shared_mapping/makefile.patch
deleted file mode 100644
index 9b985f1..0000000
--- a/client/tests/bash_shared_mapping/makefile.patch
+++ /dev/null
@@ -1,8 +0,0 @@
---- ext3-tools/Makefile.orig	2011-02-07 15:52:18.000000000 -0800
-+++ ext3-tools/Makefile	2011-02-07 15:52:23.000000000 -0800
-@@ -1,5 +1,5 @@
- 
--CC	=	gcc
-+CC	?=	gcc
- CFLAGS	+= 	-O -Wall -g -DAIO
- CXXFLAGS+=	-O -Wall -g
diff --git a/client/tests/blktests/blktests.py b/client/tests/blktests/blktests.py
deleted file mode 100644
index 86a06ae..0000000
--- a/client/tests/blktests/blktests.py
+++ /dev/null
@@ -1,107 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os, re, logging
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.bin import test, utils
-
-class blktests(test.test):
-    """
-    Runs the blktests suite.
-    """
-
-    version = 1
-
-    BLKTESTS_PATH = '/mnt/stateful_partition/unencrypted/cache'
-    BLKTESTS_TEST_DIR = "/usr/local/blktests"
-    CONFIG_FILE = '/usr/local/blktests/config'
-
-    FAILED_RE = re.compile(r'.*\[failed\].*', re.DOTALL)
-    DEVICE_RE = re.compile(r'/dev/(sd[a-z]|mmcblk[0-9]+|nvme[0-9]+)p?[0-9]*')
-
-    devs=[]
-    loop_devs=[]
-    files=[]
-    exclude=[]
-
-    def setup_configs(self, devices):
-        """
-        Setup the blk devices to test.
-        @param devs: The desired blk devices to test (BLK: real blk
-               device, LOOP_FILE: loop device over file, or LOOP_BLK:
-               loop device over real blk device).
-        """
-
-        for dev in devices:
-            if dev == 'BLK':
-                dev_name = utils.get_free_root_partition()
-                self.devs.append(dev_name)
-                # block/013 tries to reread the partition table of the device
-                # This won't work when run on a block device partition, so we
-                # will exclude the test.
-                self.exclude.append("block/013")
-            elif dev == 'LOOP_FILE':
-                file_name = 'blktests_test'
-                file_loc = os.path.join(self.BLKTESTS_PATH, file_name)
-                utils.system('fallocate -l 10M %s' % file_loc)
-                loop_dev = utils.system_output('losetup -f -P --show %s'
-                                               % file_loc)
-                self.devs.append(loop_dev)
-                self.loop_devs.append(loop_dev)
-                self.files.append(file_loc)
-            elif dev == 'LOOP_BLK':
-                blk_dev = utils.get_free_root_partition()
-                loop_dev = utils.system_output('losetup -f -P --show %s'
-                                               % blk_dev)
-                self.devs.append(loop_dev)
-                self.loop_devs.append(loop_dev)
-            elif self.DEVICE_RE.match(dev):
-                if dev == utils.get_root_partition():
-                    raise error.TestError("Can't run the test on the root "
-                                          "partition.")
-                elif dev == utils.get_kernel_partition():
-                    raise error.TestError("Can't run the test on the kernel "
-                                          "partition.")
-                elif dev == utils.concat_partition(utils.get_root_device(), 1):
-                    raise error.TestError("Can't run the test on the stateful "
-                                          "partition.")
-                self.devs.append(dev)
-            else:
-                raise error.TestError("Invalid device specified")
-        test_devs = ' '.join(self.devs)
-        exclusions = ""
-        if self.exclude:
-            exclusions = "EXCLUDE=(%s)" % ' '.join(self.exclude)
-        config = "TEST_DEVS=(%s) %s" % (test_devs, exclusions)
-        logging.debug("Test config: %s", config)
-        configFile = open(self.CONFIG_FILE, 'w')
-        configFile.write(config)
-        configFile.close()
-
-    def cleanup(self):
-        """
-        Clean up the environment by removing any created files and loop devs.
-        """
-        for dev in self.loop_devs:
-            utils.system('losetup -d %s' % dev)
-        for f in self.files:
-            utils.system('rm %s' % f, ignore_status=True)
-        if os.path.isfile(self.CONFIG_FILE):
-            os.remove(self.CONFIG_FILE)
-
-    def run_once(self, devices=['LOOP_FILE']):
-        """
-        Setup the config file and run blktests.
-
-        @param devices: The desired block devices to test (BLK: real block
-               device, LOOP_FILE: loop device over file, or LOOP_BLK:
-               loop device over real block device).
-        """
-        os.chdir(self.BLKTESTS_TEST_DIR)
-        self.setup_configs(devices)
-        output = utils.system_output('bash ./check',
-                                     ignore_status=True, retain_output=True)
-        if self.FAILED_RE.match(output):
-            raise error.TestError('Test error, check debug logs for complete '
-                                  'test output')
diff --git a/client/tests/blktests/control b/client/tests/blktests/control
deleted file mode 100644
index c1194d2..0000000
--- a/client/tests/blktests/control
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = 'blktestsSuiteLoopOverFile'
-AUTHOR = 'The Chromium OS Authors'
-TIME = 'MEDIUM'
-TEST_CLASS = 'kernel'
-TEST_CATEGORY = 'Functional'
-TEST_TYPE = 'client'
-DOC = """
-
-"""
-
-job.run_test('blktests', timeout=200)
\ No newline at end of file
diff --git a/client/tests/blktests/control.all b/client/tests/blktests/control.all
deleted file mode 100644
index b983c32..0000000
--- a/client/tests/blktests/control.all
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = 'blktestsSuiteAll'
-AUTHOR = 'The Chromium OS Authors'
-TIME = 'MEDIUM'
-TEST_CLASS = 'kernel'
-TEST_CATEGORY = 'Functional'
-TEST_TYPE = 'client'
-DOC = """
-This is	a wrapper for running blktests inside autotest.
-"""
-
-job.run_test('blktests', timeout=600, devices=['LOOP_BLK', 'LOOP_FILE', 'BLK'])
\ No newline at end of file
diff --git a/client/tests/blktests/control.blk b/client/tests/blktests/control.blk
deleted file mode 100644
index 4160074..0000000
--- a/client/tests/blktests/control.blk
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = 'blktestsSuiteRealBlk'
-AUTHOR = 'The Chromium OS Authors'
-TIME = 'MEDIUM'
-TEST_CLASS = 'kernel'
-TEST_CATEGORY = 'Functional'
-TEST_TYPE = 'client'
-DOC = """
-This is	a wrapper for running blktests inside autotest.
-"""
-
-job.run_test('blktests', timeout=600, devices=['BLK'])
diff --git a/client/tests/blktests/control.loopBlk b/client/tests/blktests/control.loopBlk
deleted file mode 100644
index e2a4149..0000000
--- a/client/tests/blktests/control.loopBlk
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = 'blktestsSuiteLoopOverBlk'
-AUTHOR = 'The Chromium OS Authors'
-TIME = 'MEDIUM'
-TEST_CLASS = 'kernel'
-TEST_CATEGORY = 'Functional'
-TEST_TYPE = 'client'
-DOC = """
-This is	a wrapper for running blktests inside autotest.
-"""
-
-job.run_test('blktests', timeout=300, devices=['LOOP_BLK'])
diff --git a/client/tests/cerberus/ctcs2.tar.bz2 b/client/tests/cerberus/ctcs2.tar.bz2
deleted file mode 100644
index a865ea4..0000000
--- a/client/tests/cerberus/ctcs2.tar.bz2
+++ /dev/null
Binary files differ
diff --git a/client/tests/cgroup/cgroup.py b/client/tests/cgroup/cgroup.py
deleted file mode 100644
index 959ce22..0000000
--- a/client/tests/cgroup/cgroup.py
+++ /dev/null
@@ -1,421 +0,0 @@
-import os, logging
-import time
-from tempfile import NamedTemporaryFile
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-from cgroup_common import Cgroup as CG
-from cgroup_common import CgroupModules
-
-class cgroup(test.test):
-    """
-    Tests the cgroup functionalities. It works by creating a process (which is
-    also a python application) that will try to use CPU and memory. We will
-    then verify whether the cgroups rules are obeyed.
-    """
-    version = 1
-    _client = ""
-    modules = CgroupModules()
-
-    def run_once(self):
-        """
-            Try to access different resources which are restricted by cgroup.
-        """
-        logging.info('Starting cgroup testing')
-
-        err = ""
-        # Run available tests
-        for i in ['memory', 'cpuset']:
-            logging.info("---< 'test_%s' START >---", i)
-            try:
-                if not self.modules.get_pwd(i):
-                    raise error.TestFail("module not available/mounted")
-                t_function = getattr(self, "test_%s" % i)
-                t_function()
-                logging.info("---< 'test_%s' PASSED >---", i)
-            except AttributeError:
-                err += "%s, " % i
-                logging.error("test_%s: Test doesn't exist", i)
-                logging.info("---< 'test_%s' FAILED >---", i)
-            except Exception, inst:
-                err += "%s, " % i
-                logging.error("test_%s: %s", i, inst)
-                logging.info("---< 'test_%s' FAILED >---", i)
-
-        if err:
-            logging.error('Some subtests failed (%s)' % err[:-2])
-            raise error.TestFail('Some subtests failed (%s)' % err[:-2])
-
-
-    def setup(self):
-        """
-        Setup
-        """
-        logging.debug('Setting up cgroups modules')
-
-        self._client = os.path.join(self.bindir, "cgroup_client.py")
-
-        _modules = ['cpuset', 'ns', 'cpu', 'cpuacct', 'memory', 'devices',
-                    'freezer', 'net_cls', 'blkio']
-        if (self.modules.init(_modules) <= 0):
-            raise error.TestFail('Can\'t mount any cgroup modules')
-
-
-    def cleanup(self):
-        """
-        Unmount all cgroups and remove directories
-        """
-        logging.info('Cleanup')
-        self.modules.cleanup()
-
-
-    #############################
-    # TESTS
-    #############################
-    def test_memory(self):
-        """
-        Memory test
-        """
-        def cleanup(supress=False):
-            # cleanup
-            logging.debug("test_memory: Cleanup")
-            err = ""
-            if item.rm_cgroup(pwd):
-                err += "\nCan't remove cgroup directory"
-
-            utils.system("swapon -a")
-
-            if err:
-                if supress:
-                    logging.warning("Some parts of cleanup failed%s" % err)
-                else:
-                    raise error.TestFail("Some parts of cleanup failed%s" % err)
-
-        # Preparation
-        item = CG('memory', self._client)
-        if item.initialize(self.modules):
-            raise error.TestFail("cgroup init failed")
-
-        if item.smoke_test():
-            raise error.TestFail("smoke_test failed")
-
-        pwd = item.mk_cgroup()
-        if pwd == None:
-            raise error.TestFail("Can't create cgroup")
-
-        logging.debug("test_memory: Memory filling test")
-
-        f = open('/proc/meminfo','r')
-        mem = f.readline()
-        while not mem.startswith("MemFree"):
-            mem = f.readline()
-        # Use only 1G or max of the free memory
-        mem = min(int(mem.split()[1])/1024, 1024)
-        mem = max(mem, 100) # at least 100M
-        memsw_limit_bytes = item.get_property("memory.memsw.limit_in_bytes",
-                                              supress=True)
-        if memsw_limit_bytes is not None:
-            memsw = True
-            # Clear swap
-            utils.system("swapoff -a")
-            utils.system("swapon -a")
-            f.seek(0)
-            swap = f.readline()
-            while not swap.startswith("SwapTotal"):
-                swap = f.readline()
-            swap = int(swap.split()[1])/1024
-            if swap < mem / 2:
-                logging.error("Not enough swap memory to test 'memsw'")
-                memsw = False
-        else:
-            # Doesn't support swap + memory limitation, disable swap
-            logging.info("System does not support 'memsw'")
-            utils.system("swapoff -a")
-            memsw = False
-        outf = NamedTemporaryFile('w+', prefix="cgroup_client-",
-                                  dir="/tmp")
-        logging.debug("test_memory: Initializition passed")
-
-        ################################################
-        # Fill the memory without cgroup limitation
-        # Should pass
-        ################################################
-        logging.debug("test_memory: Memfill WO cgroup")
-        ps = item.test("memfill %d %s" % (mem, outf.name))
-        ps.stdin.write('\n')
-        i = 0
-        while ps.poll() == None:
-            if i > 60:
-                break
-            i += 1
-            time.sleep(1)
-        if i > 60:
-            ps.terminate()
-            raise error.TestFail("Memory filling failed (WO cgroup)")
-        outf.seek(0)
-        outf.flush()
-        out = outf.readlines()
-        if (len(out) < 2) or (ps.poll() != 0):
-            raise error.TestFail("Process failed (WO cgroup); output:\n%s"
-                                 "\nReturn: %d" % (out, ps.poll()))
-        if not out[-1].startswith("PASS"):
-            raise error.TestFail("Unsuccessful memory filling "
-                                 "(WO cgroup)")
-        logging.debug("test_memory: Memfill WO cgroup passed")
-
-        ################################################
-        # Fill the memory with 1/2 memory limit
-        # memsw: should swap out part of the process and pass
-        # WO memsw: should fail (SIGKILL)
-        ################################################
-        logging.debug("test_memory: Memfill mem only limit")
-        ps = item.test("memfill %d %s" % (mem, outf.name))
-        if item.set_cgroup(ps.pid, pwd):
-            raise error.TestFail("Could not set cgroup")
-        if item.set_prop("memory.limit_in_bytes", ("%dM" % (mem/2)), pwd):
-            raise error.TestFail("Could not set mem limit (mem)")
-        ps.stdin.write('\n')
-        i = 0
-        while ps.poll() == None:
-            if i > 120:
-                break
-            i += 1
-            time.sleep(1)
-        if i > 120:
-            ps.terminate()
-            raise error.TestFail("Memory filling failed (mem)")
-        outf.seek(0)
-        outf.flush()
-        out = outf.readlines()
-        if (len(out) < 2):
-            raise error.TestFail("Process failed (mem); output:\n%s"
-                          "\nReturn: %d" % (out, ps.poll()))
-        if memsw:
-            if not out[-1].startswith("PASS"):
-                logging.error("test_memory: cgroup_client.py returned %d; "
-                              "output:\n%s", ps.poll(), out)
-                raise error.TestFail("Unsuccessful memory filling (mem)")
-        else:
-            if out[-1].startswith("PASS"):
-                raise error.TestFail("Unexpected memory filling (mem)")
-            else:
-                filled = int(out[-2].split()[1][:-1])
-                if mem/2 > 1.5 * filled:
-                    logging.error("test_memory: Limit = %dM, Filled = %dM (+ "
-                                  "python overhead upto 1/3 (mem))", mem/2,
-                                  filled)
-                else:
-                    logging.debug("test_memory: Limit = %dM, Filled = %dM (+ "
-                                  "python overhead upto 1/3 (mem))", mem/2,
-                                  filled)
-        logging.debug("test_memory: Memfill mem only cgroup passed")
-
-        ################################################
-        # Fill the memory with 1/2 memory+swap limit
-        # Should fail
-        # (memory.limit_in_bytes have to be set prior to this test)
-        ################################################
-        if memsw:
-            logging.debug("test_memory: Memfill mem + swap limit")
-            ps = item.test("memfill %d %s" % (mem, outf.name))
-            if item.set_cgroup(ps.pid, pwd):
-                raise error.TestFail("Could not set cgroup (memsw)")
-            if item.set_prop("memory.memsw.limit_in_bytes", "%dM"%(mem/2), pwd):
-                raise error.TestFail("Could not set mem limit (memsw)")
-            ps.stdin.write('\n')
-            i = 0
-            while ps.poll() == None:
-                if i > 120:
-                    break
-                i += 1
-                time.sleep(1)
-            if i > 120:
-                ps.terminate()
-                raise error.TestFail("Memory filling failed (mem)")
-            outf.seek(0)
-            outf.flush()
-            out = outf.readlines()
-            if (len(out) < 2):
-                raise error.TestFail("Process failed (memsw); output:\n%s"
-                                     "\nReturn: %d" % (out, ps.poll()))
-            if out[-1].startswith("PASS"):
-                raise error.TestFail("Unexpected memory filling (memsw)",
-                              mem)
-            else:
-                filled = int(out[-2].split()[1][:-1])
-                if mem / 2 > 1.5 * filled:
-                    logging.error("test_memory: Limit = %dM, Filled = %dM (+ "
-                                  "python overhead upto 1/3 (memsw))", mem/2,
-                                  filled)
-                else:
-                    logging.debug("test_memory: Limit = %dM, Filled = %dM (+ "
-                                  "python overhead upto 1/3 (memsw))", mem/2,
-                                  filled)
-            logging.debug("test_memory: Memfill mem + swap cgroup passed")
-
-        ################################################
-        # CLEANUP
-        ################################################
-        cleanup()
-
-
-
-    def test_cpuset(self):
-        """
-        Cpuset test
-        1) Initiate CPU load on CPU0, than spread into CPU* - CPU0
-        """
-        class per_cpu_load:
-            """
-            Handles the per_cpu_load stats
-            self.values [cpus, cpu0, cpu1, ...]
-            """
-            def __init__(self):
-                """
-                Init
-                """
-                self.values = []
-                self.f = open('/proc/stat', 'r')
-                line = self.f.readline()
-                while line:
-                    if line.startswith('cpu'):
-                        self.values.append(int(line.split()[1]))
-                    else:
-                        break
-                    line = self.f.readline()
-
-            def reload(self):
-                """
-                Reload current values
-                """
-                self.values = self.get()
-
-            def get(self):
-                """
-                Get the current values
-                @return vals: array of current values [cpus, cpu0, cpu1..]
-                """
-                self.f.seek(0)
-                self.f.flush()
-                vals = []
-                for i in range(len(self.values)):
-                    vals.append(int(self.f.readline().split()[1]))
-                return vals
-
-            def tick(self):
-                """
-                Reload values and returns the load between the last tick/reload
-                @return vals: array of load between ticks/reloads
-                              values [cpus, cpu0, cpu1..]
-                """
-                vals = self.get()
-                ret = []
-                for i in range(len(self.values)):
-                    ret.append(vals[i] - self.values[i])
-                self.values = vals
-                return ret
-
-        def cleanup(supress=False):
-            # cleanup
-            logging.debug("test_cpuset: Cleanup")
-            err = ""
-            try:
-                for task in tasks:
-                    for i in range(10):
-                        task.terminate()
-                        if task.poll() != None:
-                            break
-                        time.sleep(1)
-                    if i >= 9:
-                        logging.error("test_cpuset: Subprocess didn't finish")
-            except Exception, inst:
-                err += "\nCan't terminate tasks: %s" % inst
-            if item.rm_cgroup(pwd):
-                err += "\nCan't remove cgroup direcotry"
-            if err:
-                if supress:
-                    logging.warning("Some parts of cleanup failed%s" % err)
-                else:
-                    raise error.TestFail("Some parts of cleanup failed%s" % err)
-
-        # Preparation
-        item = CG('cpuset', self._client)
-        if item.initialize(self.modules):
-            raise error.TestFail("cgroup init failed")
-
-        # FIXME: new cpuset cgroup doesn't have any mems and cpus assigned
-        # thus smoke_test won't work
-        #if item.smoke_test():
-        #    raise error.TestFail("smoke_test failed")
-
-        try:
-            # Available cpus: cpuset.cpus = "0-$CPUS\n"
-            no_cpus = int(item.get_prop("cpuset.cpus").split('-')[1]) + 1
-        except:
-            raise error.TestFail("Failed to get no_cpus or no_cpus = 1")
-
-        pwd = item.mk_cgroup()
-        if pwd == None:
-            raise error.TestFail("Can't create cgroup")
-        # FIXME: new cpuset cgroup doesn't have any mems and cpus assigned
-        try:
-            tmp = item.get_prop("cpuset.cpus")
-            item.set_property("cpuset.cpus", tmp, pwd)
-            tmp = item.get_prop("cpuset.mems")
-            item.set_property("cpuset.mems", tmp, pwd)
-        except:
-            cleanup(True)
-            raise error.TestFail("Failed to set cpus and mems of"
-                                 "a new cgroup")
-
-        ################################################
-        # Cpu allocation test
-        # Use cpu0 and verify, than all cpu* - cpu0 and verify
-        ################################################
-        logging.debug("test_cpuset: Cpu allocation test")
-
-        tasks = []
-        # Run no_cpus + 1 jobs
-        for i in range(no_cpus + 1):
-            tasks.append(item.test("cpu"))
-            if item.set_cgroup(tasks[i].pid, pwd):
-                cleanup(True)
-                raise error.TestFail("Failed to set cgroup")
-            tasks[i].stdin.write('\n')
-        stats = per_cpu_load()
-        # Use only the first CPU
-        item.set_property("cpuset.cpus", 0, pwd)
-        stats.reload()
-        time.sleep(10)
-        # [0] = all cpus
-        s1 = stats.tick()[1:]
-        s2 = s1[1:]
-        s1 = s1[0]
-        for _s in s2:
-            if s1 < _s:
-                cleanup(True)
-                raise error.TestFail("Unused processor had higher utilization\n"
-                                     "used cpu: %s, remaining cpus: %s"
-                                     % (s1, s2))
-
-        if no_cpus == 2:
-            item.set_property("cpuset.cpus", "1", pwd)
-        else:
-            item.set_property("cpuset.cpus", "1-%d"%(no_cpus-1), pwd)
-        stats.reload()
-        time.sleep(10)
-        s1 = stats.tick()[1:]
-        s2 = s1[0]
-        s1 = s1[1:]
-        for _s in s1:
-            if s2 > _s:
-                cleanup(True)
-                raise error.TestFail("Unused processor had higher utilization\n"
-                                     "used cpus: %s, remaining cpu: %s"
-                                     % (s1, s2))
-        logging.debug("test_cpuset: Cpu allocation test passed")
-
-        ################################################
-        # CLEANUP
-        ################################################
-        cleanup()
diff --git a/client/tests/cgroup/cgroup_client.py b/client/tests/cgroup/cgroup_client.py
deleted file mode 100755
index d3e5be4..0000000
--- a/client/tests/cgroup/cgroup_client.py
+++ /dev/null
@@ -1,131 +0,0 @@
-#!/usr/bin/python2
-# -*- coding: utf-8 -*-
-"""
-Interactive python script for testing cgroups. It will try to use system
-resources such as cpu, memory and device IO. The other cgroups test
-instrumentation will inspect whether the linux box behaved as it should.
-
-@copyright: 2011 Red Hat Inc.
-@author: Lukas Doktor <ldoktor@redhat.com>
-"""
-import array, sys, time, math, os
-from tempfile import mktemp
-
-def test_smoke(args):
-    """
-    SIGSTOP the process and after SIGCONT exits.
-    """
-    print "TEST: smoke"
-    print "TEST: wait for input"
-    raw_input()
-    print "PASS: smoke"
-
-
-def test_memfill(args):
-    """
-    SIGSTOP and after SIGCONT fills the memory up to size size.
-    """
-    size = 1024
-    f = sys.stdout
-    if args:
-        size = int(args[0])
-        if len(args) > 1:
-            f = open(args[1], 'w', 0)
-    print "TEST: memfill (%dM)" % size
-    print "Redirecting to: %s" % f.name
-    f.write("TEST: memfill (%dM)\n" % size)
-    f.write("TEST: wait for input\n")
-    raw_input()
-    mem = array.array('B')
-    buf = ""
-    for i in range(1024 * 1024):
-        buf += '\x00'
-    for i in range(size):
-        mem.fromstring(buf)
-        f.write("TEST: %dM\n" % i)
-        try:
-            f.flush()
-            os.fsync(f)
-        except:
-            pass
-    f.write("PASS: memfill (%dM)\n" % size)
-
-
-def test_cpu(args):
-    """
-    Stress the CPU.
-    """
-    print "TEST: cpu"
-    print "TEST: wait for input"
-    raw_input()
-    while True:
-        for i in range (1000, 10000):
-            math.factorial(i)
-
-
-def test_devices(args):
-    if args:
-        if args[0] == "write":
-            test_devices_write()
-        else:
-            test_devices_read()
-    else:
-        test_devices_read()
-
-
-def test_devices_read():
-    """
-    Inf read from /dev/zero
-    """
-    print "TEST: devices read"
-    print "TEST: wait for input"
-    raw_input()
-
-    dev = open("/dev/zero", 'r')
-    while True:
-        print "TEST: tick"
-        dev.flush()
-        dev.read(1024*1024)
-        time.sleep(1)
-
-
-def test_devices_write():
-    """
-    Inf write into /dev/null device
-    """
-    print "TEST: devices write"
-    print "TEST: wait for input"
-    raw_input()
-
-    dev = open("/dev/null", 'w')
-    buf = ""
-    for _ in range(1024*1024):
-        buf += '\x00'
-    while True:
-        print "TEST: tick"
-        dev.write(buf)
-        dev.flush()
-        time.sleep(1)
-
-
-def main():
-    """
-    Main (infinite) loop.
-    """
-    if len(sys.argv) < 2:
-        print "FAIL: Incorrect usage (%s)" % sys.argv
-        return -1
-    args = sys.argv[2:]
-    if sys.argv[1] == "smoke":
-        test_smoke(args)
-    elif sys.argv[1] == "memfill":
-        test_memfill(args)
-    elif sys.argv[1] == "cpu":
-        test_cpu(args)
-    elif sys.argv[1] == "devices":
-        test_devices(args)
-    else:
-        print "FAIL: No test specified (%s)" % sys.argv
-
-if __name__ == "__main__":
-    main()
diff --git a/client/tests/cgroup/cgroup_common.py b/client/tests/cgroup/cgroup_common.py
deleted file mode 100755
index 9dcfdc0..0000000
--- a/client/tests/cgroup/cgroup_common.py
+++ /dev/null
@@ -1,379 +0,0 @@
-#!/usr/bin/python2
-# -*- coding: utf-8 -*-
-"""
-Helpers for cgroup testing.
-
-@copyright: 2011 Red Hat Inc.
-@author: Lukas Doktor <ldoktor@redhat.com>
-"""
-import os, logging, subprocess, time, shutil
-from tempfile import mkdtemp
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-
-
-class Cgroup(object):
-    """
-    Cgroup handling class.
-    """
-    def __init__(self, module, _client):
-        """
-        Constructor
-        @param module: Name of the cgroup module
-        @param _client: Test script pwd + name
-        """
-        self.module = module
-        self._client = _client
-        self.root = None
-
-
-    def initialize(self, modules):
-        """
-        Initializes object for use.
-
-        @param modules: Array of all available cgroup modules.
-        @return: 0 when PASSED.
-        """
-        self.root = modules.get_pwd(self.module)
-        if self.root:
-            return 0
-        else:
-            logging.error("cg.initialize(): Module %s not found", self.module)
-            return -1
-        return 0
-
-
-    def mk_cgroup(self, root=None):
-        """
-        Creates new temporary cgroup
-        @param root: where to create this cgroup (default: self.root)
-        @return: 0 when PASSED
-        """
-        try:
-            if root:
-                pwd = mkdtemp(prefix='cgroup-', dir=root) + '/'
-            else:
-                pwd = mkdtemp(prefix='cgroup-', dir=self.root) + '/'
-        except Exception, inst:
-            logging.error("cg.mk_cgroup(): %s" , inst)
-            return None
-        return pwd
-
-
-    def rm_cgroup(self, pwd, supress=False):
-        """
-        Removes cgroup.
-
-        @param pwd: cgroup directory.
-        @param supress: supress output.
-        @return: 0 when PASSED
-        """
-        try:
-            os.rmdir(pwd)
-        except Exception, inst:
-            if not supress:
-                logging.error("cg.rm_cgroup(): %s" , inst)
-            return -1
-        return 0
-
-
-    def test(self, cmd):
-        """
-        Executes cgroup_client.py with cmd parameter.
-
-        @param cmd: command to be executed
-        @return: subprocess.Popen() process
-        """
-        logging.debug("cg.test(): executing paralel process '%s'", cmd)
-        cmd = self._client + ' ' + cmd
-        process = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE,
-                                   stdout=subprocess.PIPE,
-                                   stderr=subprocess.PIPE, close_fds=True)
-        return process
-
-
-    def is_cgroup(self, pid, pwd):
-        """
-        Checks if the 'pid' process is in 'pwd' cgroup
-        @param pid: pid of the process
-        @param pwd: cgroup directory
-        @return: 0 when is 'pwd' member
-        """
-        if open(pwd + '/tasks').readlines().count("%d\n" % pid) > 0:
-            return 0
-        else:
-            return -1
-
-
-    def is_root_cgroup(self, pid):
-        """
-        Checks if the 'pid' process is in root cgroup (WO cgroup)
-        @param pid: pid of the process
-        @return: 0 when is 'root' member
-        """
-        return self.is_cgroup(pid, self.root)
-
-
-    def set_cgroup(self, pid, pwd):
-        """
-        Sets cgroup membership
-        @param pid: pid of the process
-        @param pwd: cgroup directory
-        @return: 0 when PASSED
-        """
-        try:
-            open(pwd+'/tasks', 'w').write(str(pid))
-        except Exception, inst:
-            logging.error("cg.set_cgroup(): %s" , inst)
-            return -1
-        if self.is_cgroup(pid, pwd):
-            logging.error("cg.set_cgroup(): Setting %d pid into %s cgroup "
-                          "failed", pid, pwd)
-            return -1
-        else:
-            return 0
-
-    def set_root_cgroup(self, pid):
-        """
-        Resets the cgroup membership (sets to root)
-        @param pid: pid of the process
-        @return: 0 when PASSED
-        """
-        return self.set_cgroup(pid, self.root)
-
-
-    def get_prop(self, prop, pwd=None, supress=False):
-        """
-        Gets one line of the property value
-        @param prop: property name (file)
-        @param pwd: cgroup directory
-        @param supress: supress the output
-        @return: String value or None when FAILED
-        """
-        tmp = self.get_property(prop, pwd, supress)
-        if tmp:
-            if tmp[0][-1] == '\n':
-                tmp[0] = tmp[0][:-1]
-            return tmp[0]
-        else:
-            return None
-
-
-    def get_property(self, prop, pwd=None, supress=False):
-        """
-        Gets the property value
-        @param prop: property name (file)
-        @param pwd: cgroup directory
-        @param supress: supress the output
-        @return: [] values or None when FAILED
-        """
-        if pwd == None:
-            pwd = self.root
-        try:
-            ret = open(pwd+prop, 'r').readlines()
-        except Exception, inst:
-            ret = None
-            if not supress:
-                logging.error("cg.get_property(): %s" , inst)
-        return ret
-
-
-    def set_prop(self, prop, value, pwd=None, check=True):
-        """
-        Sets the one-line property value concerning the K,M,G postfix
-        @param prop: property name (file)
-        @param value: desired value
-        @param pwd: cgroup directory
-        @param check: check the value after setup
-        @return: 0 when PASSED
-        """
-        _value = value
-        try:
-            value = str(value)
-            if value[-1] == '\n':
-                value = value[:-1]
-            if value[-1] == 'K':
-                value = int(value[:-1]) * 1024
-            elif value[-1] == 'M':
-                value = int(value[:-1]) * 1048576
-            elif value[-1] == 'G':
-                value = int(value[:-1]) * 1073741824
-        except:
-            logging.error("cg.set_prop() fallback into cg.set_property.")
-            value = _value
-        return self.set_property(prop, value, pwd, check)
-
-
-    def set_property(self, prop, value, pwd=None, check=True):
-        """
-        Sets the property value
-        @param prop: property name (file)
-        @param value: desired value
-        @param pwd: cgroup directory
-        @param check: check the value after setup
-        @return: 0 when PASSED
-        """
-        value = str(value)
-        if pwd == None:
-            pwd = self.root
-        try:
-            open(pwd+prop, 'w').write(value)
-        except Exception, inst:
-            logging.error("cg.set_property(): %s" , inst)
-            return -1
-        if check:
-            # Get the first line - '\n'
-            _value = self.get_property(prop, pwd)[0][:-1]
-            if value != _value:
-                logging.error("cg.set_property(): Setting failed: desired = %s,"
-                              " real value = %s", value, _value)
-                return -1
-        return 0
-
-
-    def smoke_test(self):
-        """
-        Smoke test
-        Module independent basic tests
-        """
-        part = 0
-        pwd = self.mk_cgroup()
-        if pwd == None:
-            logging.error("cg.smoke_test[%d]: Can't create cgroup", part)
-            return -1
-
-        part += 1
-        ps = self.test("smoke")
-        if ps == None:
-            logging.error("cg.smoke_test[%d]: Couldn't create process", part)
-            return -1
-
-        part += 1
-        if (ps.poll() != None):
-            logging.error("cg.smoke_test[%d]: Process died unexpectidly", part)
-            return -1
-
-        # New process should be a root member
-        part += 1
-        if self.is_root_cgroup(ps.pid):
-            logging.error("cg.smoke_test[%d]: Process is not a root member",
-                          part)
-            return -1
-
-        # Change the cgroup
-        part += 1
-        if self.set_cgroup(ps.pid, pwd):
-            logging.error("cg.smoke_test[%d]: Could not set cgroup", part)
-            return -1
-
-        # Try to remove used cgroup
-        part += 1
-        if self.rm_cgroup(pwd, supress=True) == 0:
-            logging.error("cg.smoke_test[%d]: Unexpected successful deletion of"
-                          " the used cgroup", part)
-            return -1
-
-        # Return the process into the root cgroup
-        part += 1
-        if self.set_root_cgroup(ps.pid):
-            logging.error("cg.smoke_test[%d]: Could not return the root cgroup "
-                          "membership", part)
-            return -1
-
-        # It should be safe to remove the cgroup now
-        part += 1
-        if self.rm_cgroup(pwd):
-            logging.error("cg.smoke_test[%d]: Can't remove cgroup directory",
-                          part)
-            return -1
-
-        # Finish the process
-        part += 1
-        ps.stdin.write('\n')
-        time.sleep(2)
-        if (ps.poll() == None):
-            logging.error("cg.smoke_test[%d]: Process is not finished", part)
-            return -1
-
-        return 0
-
-
-class CgroupModules(object):
-    """
-    Handles the list of different cgroup filesystems.
-    """
-    def __init__(self):
-        self.modules = []
-        self.modules.append([])
-        self.modules.append([])
-        self.modules.append([])
-        self.mountdir = mkdtemp(prefix='cgroup-') + '/'
-
-
-    def init(self, _modules):
-        """
-        Checks the mounted modules and if necessary mounts them into tmp
-            mountdir.
-        @param _modules: Desired modules.
-        @return: Number of initialized modules.
-        """
-        logging.debug("Desired cgroup modules: %s", _modules)
-        mounts = []
-        fp = open('/proc/mounts', 'r')
-        line = fp.readline().split()
-        while line:
-            if line[2] == 'cgroup':
-                mounts.append(line)
-            line = fp.readline().split()
-        fp.close()
-
-        for module in _modules:
-            # Is it already mounted?
-            i = False
-            for mount in mounts:
-                if mount[3].find(module) != -1:
-                    self.modules[0].append(module)
-                    self.modules[1].append(mount[1] + '/')
-                    self.modules[2].append(False)
-                    i = True
-                    break
-            if not i:
-                # Not yet mounted
-                os.mkdir(self.mountdir + module)
-                cmd = ('mount -t cgroup -o %s %s %s' %
-                       (module, module, self.mountdir + module))
-                try:
-                    utils.run(cmd)
-                    self.modules[0].append(module)
-                    self.modules[1].append(self.mountdir + module)
-                    self.modules[2].append(True)
-                except error.CmdError:
-                    logging.info("Cgroup module '%s' not available", module)
-
-        logging.debug("Initialized cgroup modules: %s", self.modules[0])
-        return len(self.modules[0])
-
-
-    def cleanup(self):
-        """
-        Unmount all cgroups and remove the mountdir.
-        """
-        for i in range(len(self.modules[0])):
-            if self.modules[2][i]:
-                utils.system('umount %s -l' % self.modules[1][i],
-                             ignore_status=True)
-        shutil.rmtree(self.mountdir)
-
-
-    def get_pwd(self, module):
-        """
-        Returns the mount directory of 'module'
-        @param module: desired module (memory, ...)
-        @return: mount directory of 'module' or None
-        """
-        try:
-            i = self.modules[0].index(module)
-        except Exception, inst:
-            logging.error("module %s not found: %s", module, inst)
-            return None
-        return self.modules[1][i]
diff --git a/client/tests/cgroup/control b/client/tests/cgroup/control
deleted file mode 100644
index 86aec06..0000000
--- a/client/tests/cgroup/control
+++ /dev/null
@@ -1,12 +0,0 @@
-AUTHOR = "Lukas Doktor <ldoktor@redhat.com>"
-NAME = "Cgroup"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "General"
-TEST_TYPE = "client"
-
-DOC = """
-This test checks basic functionality of cgroups
-"""
-
-job.run_test('cgroup')
diff --git a/client/tests/connectathon/connectathon.py b/client/tests/connectathon/connectathon.py
deleted file mode 100644
index a0ef86f..0000000
--- a/client/tests/connectathon/connectathon.py
+++ /dev/null
@@ -1,67 +0,0 @@
-import os, shutil, glob, logging
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-
-class connectathon(test.test):
-    """
-    Connectathon test is an nfs testsuite which can run on
-    both BSD and System V based systems. The tests.init file
-    has to be modified based on the OS in which this test is run.
-
-    The tar file in this dir has an init file which works for Linux
-    platform.
-
-    @see www.connectathon.org
-    @author Poornima.Nayak (Poornima.Nayak@in.ibm.com)(original code)
-    """
-    version = 1
-    def initialize(self):
-        """
-        Sets the overall failure counter for the test.
-        """
-        self.nfail = 0
-
-
-    def setup(self, tarball='connectathon.tar.bz2'):
-        connectathon_tarball = utils.unmap_url(self.bindir, tarball,
-                                               self.tmpdir)
-        utils.extract_tarball_to_dir(connectathon_tarball, self.srcdir)
-
-        os.chdir(self.srcdir)
-        utils.system('make clean')
-        utils.system('make')
-
-
-    def run_once(self, testdir=None, args='', cthon_iterations=1):
-        """
-        Runs the test, with the appropriate control file.
-        """
-        os.chdir(self.srcdir)
-
-        if testdir is None:
-            testdir = self.tmpdir
-
-        self.results_path = os.path.join(self.resultsdir,
-                                         'raw_output_%s' % self.iteration)
-
-        try:
-            if not args:
-                # run basic test
-                args = "-b -t"
-
-            self.results = utils.system_output('./runtests -N %s %s %s' %
-                                              (cthon_iterations, args, testdir))
-            utils.open_write_close(self.results_path, self.results)
-
-        except error.CmdError, e:
-            self.nfail += 1
-            logging.error("Test failed: %s", e)
-
-
-    def postprocess(self):
-        """
-        Raises on failure.
-        """
-        if self.nfail != 0:
-            raise error.TestFail('Connectathon test suite failed.')
diff --git a/client/tests/connectathon/connectathon.tar.bz2 b/client/tests/connectathon/connectathon.tar.bz2
deleted file mode 100644
index f67d8c2..0000000
--- a/client/tests/connectathon/connectathon.tar.bz2
+++ /dev/null
Binary files differ
diff --git a/client/tests/connectathon/control b/client/tests/connectathon/control
deleted file mode 100644
index 92fa518..0000000
--- a/client/tests/connectathon/control
+++ /dev/null
@@ -1,31 +0,0 @@
-AUTHOR = "Poornima Nayak <mpnayak@linux.vnet.ibm.com>"
-NAME = "connectathon"
-TEST_CATEGORY = "NFS FVT"
-TEST_CLASS = "General"
-TEST_TYPE = "client"
-TIME = 'MEDIUM'
-DOC = '''
-Test for testing nfs mounted paths.
-
-More information about connecthon can be found at
-http://www.connectathon.org/nfstests.html
-'''
-
-import datetime, os
-now = datetime.datetime.now()
-#iter_range = [10, 100, 1000, 10000]
-#tests = ['-b -t', '-g -t', '-g -f', '-s -t', '-s -f', '-l -f', '-l -t']
-iter_range = [1]
-tests = ['-s -t']
-tag_ver = 0
-for test in tests:
-    for j in iter_range:
-        # On this example, /mnt/test is an NFS mounted location previously set
-        dir = os.path.join('/mnt', 'test', now.strftime("%Y-%m-%d%H:%M"))
-        if not os.path.isdir(dir):
-            os.makedirs(dir)
-        job.run_test(url='connectathon', testdir=dir, args=test,
-                     cthon_iterations=j,
-                     tag=("itera-%s-test-%s" % (j, tag_ver)))
-        tag_ver = tag_ver + 1
-
diff --git a/client/tests/cpu_hotplug/control b/client/tests/cpu_hotplug/control
deleted file mode 100644
index 85d17c5..0000000
--- a/client/tests/cpu_hotplug/control
+++ /dev/null
@@ -1,9 +0,0 @@
-AUTHOR = 'Martin Bligh <mbligh@google.com>'
-DOC = 'lhcs_regression: Regression test for CPU hotplug'
-NAME = 'cpu_hotplug'
-TIME = 'MEDIUM' ## ?
-TEST_CLASS = 'kernel'
-TEST_CATEGORY = 'Functional'
-TEST_TYPE = 'client'
-
-job.run_test('cpu_hotplug')
diff --git a/client/tests/cpu_hotplug/cpu_hotplug.py b/client/tests/cpu_hotplug/cpu_hotplug.py
deleted file mode 100644
index 612d701..0000000
--- a/client/tests/cpu_hotplug/cpu_hotplug.py
+++ /dev/null
@@ -1,39 +0,0 @@
-import time, os
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-class cpu_hotplug(test.test):
-    version = 2
-
-    # http://developer.osdl.org/dev/hotplug/tests/lhcs_regression-1.6.tgz
-    def setup(self, tarball = 'lhcs_regression-1.6.tgz'):
-        tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(tarball, self.srcdir)
-
-
-    def initialize(self):
-        # Check if the kernel supports cpu hotplug
-        if utils.running_config():
-            utils.check_for_kernel_feature('HOTPLUG_CPU')
-
-        # Check cpu nums, if equals 1, quit.
-        if utils.count_cpus() == 1:
-            e_msg = 'Single CPU online detected, test not supported.'
-            raise error.TestNAError(e_msg)
-
-        # Have a simple and quick check first, FIX me please.
-        utils.system('dmesg -c > /dev/null')
-        for cpu in utils.cpu_online_map():
-            if os.path.isfile('/sys/devices/system/cpu/cpu%s/online' % cpu):
-                utils.system('echo 0 > /sys/devices/system/cpu/cpu%s/online' % cpu, 1)
-                utils.system('dmesg -c')
-                time.sleep(3)
-                utils.system('echo 1 > /sys/devices/system/cpu/cpu%s/online' % cpu, 1)
-                utils.system('dmesg -c')
-                time.sleep(3)
-
-
-    def run_once(self):
-        # Begin this cpu hotplug test big guru.
-        os.chdir(self.srcdir)
-        utils.system('./runtests.sh')
diff --git a/client/tests/cpu_hotplug/lhcs_regression-1.6.tgz b/client/tests/cpu_hotplug/lhcs_regression-1.6.tgz
deleted file mode 100644
index f093c48..0000000
--- a/client/tests/cpu_hotplug/lhcs_regression-1.6.tgz
+++ /dev/null
Binary files differ
diff --git a/client/tests/crashme/control b/client/tests/crashme/control
deleted file mode 100644
index c66cc08..0000000
--- a/client/tests/crashme/control
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME='crashme'
-AUTHOR='George J. Carrette'
-ATTRIBUTES = "suite:kernel_daily_regression"
-TEST_CATEGORY='Stress'
-TEST_CLASS='Kernel'
-TEST_TYPE='client'
-TIME='MEDIUM'
-
-DOC='''
-Runs the crashme suite located at:
-
-http://people.delphiforums.com/gjc/crashme.html
-
-All the files purporting to be source, available for download on this
-site, appear to be corrupted.
-
-I got the sources here:
-
-https://launchpad.net/ubuntu/natty/+source/crashme/2.4-9
-
-crashme_2.4.orig.tar.gz
-crashme_2.4-9.diff.gz
-
-The 'makefile' must be modified to replace 'cc' with '$(CC)' so that
-32-bit executables will be produced.
-
-'''
-
-job.run_test('crashme')
diff --git a/client/tests/crashme/crashme.py b/client/tests/crashme/crashme.py
deleted file mode 100644
index 708c964..0000000
--- a/client/tests/crashme/crashme.py
+++ /dev/null
@@ -1,76 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-from autotest_lib.client.bin import test, utils
-
-
-class crashme(test.test):
-    """
-    Runs the crashme random code test suite.
-
-    crashme [+]<nbytes>[.inc] <srand> <ntrys> [nsub] [verbose]
-
-      [NBYTES]
-         The [NBYTES] should be an integer, specifying the size of
-         the random data string in bytes. If given negative then the
-         bytes are printed instead of being executed. If given with
-         an explicit plus sign then the storage for the bytes is
-         freshly malloc'ed each time. This can have an effect on
-         machines with seperate I and D cache mechanisms. The
-         argument can also have a dot in it, X.Y, in which case Y is
-         a increment for a pointer into the random data. The buffer
-         is recalculated only when the pointer gets near the end of
-         the data.
-
-      [SRAND]
-         The [SRAND] is an input seed to the random number generator,
-         passed to srand.
-
-      [NTRIES]
-         The [NTRIES] is how many times to loop before exiting
-         normally from the program.
-
-      [NSUB]
-         The [NSUB] is optional, the number of vfork subprocesses
-         running all at once. If negative run one after another. If
-         given as a time hrs:mns:scs (hours, minutes, seconds) then
-         one sub-process will be run to completion, followed by
-         another, until the time limit has been reached. If this
-         argument is given as the empty string or . then it is
-         ignored.
-
-         When in sequential-subprocess mode there is a 30 second time
-         limit on each subprocess. This is to allow the
-         instruction-set-space random walk to continue when a
-         process bashes itself into an infinite loop. For example,
-         the ntrys can be bashed to a very large number with nbytes
-         bashed to zero. (10 second limit on Windows NT).
-
-         The SRAND argument is incremented by one for each subprocess.
-
-      [VERBOSE]
-         The [VERBOSE] arg is optional. 0 is the least verbose, 5 the
-         most.
-"""
-    version = 2
-
-    def initialize(self):
-        self.job.require_gcc()
-
-    def setup(self, tarball = 'crashme_2.4.orig.tar.bz2'):
-        tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(tarball, self.srcdir)
-        os.chdir(self.srcdir)
-        utils.system('patch -p 1 <../crashme_2.4-9.diff')
-        utils.make()
-
-    def run_once(self, args_list=''):
-        if args_list:
-            args = args_list
-        else:
-            args = ''
-
-        crashme_path = os.path.join(self.srcdir, 'crashme')
-        utils.system("%s %s" % (crashme_path, args))
diff --git a/client/tests/crashme/crashme_2.4-9.diff b/client/tests/crashme/crashme_2.4-9.diff
deleted file mode 100644
index c109a7c..0000000
--- a/client/tests/crashme/crashme_2.4-9.diff
+++ /dev/null
@@ -1,440 +0,0 @@
---- crashme-2.4.orig/crashme.1
-+++ crashme-2.4/crashme.1
-@@ -1,4 +1,4 @@
--.TH CRASHME 1C LOCAL 
-+.TH CRASHME 1 LOCAL 
- .SH NAME
- crashme \- test operating environment software robustness
- .SH SYNOPSIS
---- crashme-2.4.orig/debian/copyright
-+++ crashme-2.4/debian/copyright
-@@ -0,0 +1,25 @@
-+This package was created by Jay Kominek <jkominek@debian.org> on Sunday
-+the 7th of June, 1998.
-+
-+The copyright for crashme is as follows, taken verbatim from crashme.c:
-+
-+ *
-+ *             COPYRIGHT (c) 1990-1994 BY        *
-+ *  GEORGE J. CARRETTE, CONCORD, MASSACHUSETTS.  *
-+ *             ALL RIGHTS RESERVED               *
-+
-+Permission to use, copy, modify, distribute and sell this software
-+and its documentation for any purpose and without fee is hereby
-+granted, provided that the above copyright notice appear in all copies
-+and that both that copyright notice and this permission notice appear
-+in supporting documentation, and that the name of the author
-+not be used in advertising or publicity pertaining to distribution
-+of the software without specific, written prior permission.
-+
-+THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
-+ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL
-+HE BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
-+ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
-+WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION,
-+ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
-+SOFTWARE.
---- crashme-2.4.orig/debian/README.debian
-+++ crashme-2.4/debian/README.debian
-@@ -0,0 +1,23 @@
-+                    _                    
-+  ___ _ __ __ _ ___| |__  _ __ ___   ___ 
-+ / __| '__/ _` / __| '_ \| '_ ` _ \ / _ \
-+| (__| | | (_| \__ \ | | | | | | | |  __/
-+ \___|_|  \__,_|___/_| |_|_| |_| |_|\___|
-+                  ... for Debian!
-+
-+ So, you want to crash your computer, but pulling the processor out is
-+getting boring? crashme is for you! crashme works by generating strings of
-+random bytes, and then (here is the fun part) trying to execute the
-+bytes!
-+
-+*** WARNING ***
-+ If you run crashme, your system could very well crash. YOU COULD LOSE
-+YOUR ALL OF YOUR DATA!!! I (Jay Kominek), the author of crashme, Debian,
-+SPI and its officers take no responsibility if you lose data by running
-+crashme. Note: crashme is installed into /usr/bin, world executable.
-+Joe Random User could very well execute it and crash your system!
-+
-+
-+  --- Jay Kominek
-+       jkominek@debian.org
-+       jfk@acm.org
---- crashme-2.4.orig/debian/compat
-+++ crashme-2.4/debian/compat
-@@ -0,0 +1 @@
-+4
---- crashme-2.4.orig/debian/rules
-+++ crashme-2.4/debian/rules
-@@ -0,0 +1,46 @@
-+#!/usr/bin/make -f
-+# Originally generated by debmake, and then converted to use debhelper
-+
-+# Uncomment this to turn on verbose mode.
-+#export DH_VERBOSE=1
-+
-+CFLAGS = -Wall -g
-+ifneq (,$(findstring noopt,$(DEB_BUILD_OPTIONS)))
-+	CFLAGS += -O0
-+else
-+	CFLAGS += -O2
-+endif
-+
-+build: crashme
-+
-+clean:
-+	dh_testdir
-+	dh_testroot
-+
-+	rm -f crashme
-+
-+	dh_clean
-+
-+binary-indep:
-+# Nothing to do
-+
-+binary-arch:	build
-+	dh_testdir
-+	dh_testroot
-+	dh_clean
-+	dh_installdirs
-+	dh_install crashme usr/bin
-+	dh_installdocs crashme.html
-+	dh_installman crashme.1
-+	dh_installchangelogs
-+	dh_strip
-+	dh_compress
-+	dh_fixperms
-+	dh_installdeb
-+	dh_shlibdeps
-+	dh_gencontrol
-+	dh_md5sums
-+	dh_builddeb
-+
-+binary: binary-indep binary-arch
-+.PHONY: build clean binary-indep binary-arch binary
---- crashme-2.4.orig/debian/control
-+++ crashme-2.4/debian/control
-@@ -0,0 +1,17 @@
-+Source: crashme
-+Section: devel
-+Priority: optional
-+Maintainer: Aurélien GÉRÔME <ag@roxor.cx>
-+Standards-Version: 3.7.2
-+Build-Depends: debhelper (>= 4)
-+
-+Package: crashme
-+Architecture: any
-+Depends: ${shlibs:Depends}
-+Description: Stress tests operating system stability
-+ crashme generates strings of random bytes and then attempts to execute
-+ them. Used to test kernel stability.
-+ .
-+ **WARNING** While Linux has been known to survive days and weeks of crashme,
-+ IT IS NOT GUARANTEED THAT YOUR SYSTEM WILL SURVIVE! DO NOT USE THIS PROGRAM
-+ UNLESS YOU REALLY WANT TO CRASH YOUR COMPUTER
---- crashme-2.4.orig/debian/changelog
-+++ crashme-2.4/debian/changelog
-@@ -0,0 +1,75 @@
-+crashme (2.4-9) unstable; urgency=low
-+
-+  * Adopt the package (Closes: #353388).
-+  * Conform to policy with CFLAGS set to "-O2 -g -Wall".
-+  * Fix resulting gcc warnings from turning on "-Wall".
-+
-+ -- Aurélien GÉRÔME <ag@roxor.cx>  Mon, 31 Jul 2006 14:41:43 +0200
-+
-+crashme (2.4-8) unstable; urgency=low
-+
-+  * QA upload.
-+  * debian/postinst: Remove; /usr/doc already handled by the old prerm.
-+  * debian/rules:
-+    - Add support for DEB_BUILD_OPTIONS=noopt.
-+    - Use dh_install.
-+
-+ -- Matej Vela <vela@debian.org>  Sun, 30 Jul 2006 22:14:56 +0200
-+
-+crashme (2.4-7) unstable; urgency=low
-+
-+  * QA upload.
-+  * s/PAQUETE/crashme/g in postinst. Sorry 
-+
-+ -- Amaya Rodrigo Sastre <amaya@debian.org>  Wed, 12 Jul 2006 21:44:04 +0200
-+
-+crashme (2.4-6) unstable; urgency=low
-+
-+  * QA upload.
-+  * This package is orphaned as of #353388, so setting the Maintainer field
-+    accordingly to QA.
-+  * Fix typo in package description (Closes: #363215).
-+  * Get rid of the /usr/doc link (Closes: #359371).
-+  * Stop echoing warnings and stuff on console in postinst. Debconf should be
-+    used instead. Somebody please fix this.
-+  * Bumped Standards version to 3.7.2. No changes needed.
-+  * Fixed manual section 
-+
-+ -- Amaya Rodrigo Sastre <amaya@debian.org>  Wed, 12 Jul 2006 18:47:31 +0200
-+
-+crashme (2.4-5) unstable; urgency=low
-+
-+  * New maintainer (Closes: #81889)
-+  * Bump up Standards-Version
-+  * Add Build-Depends (Closes: #70344)
-+  * Remove pddet from the package since it has no manpage and does not seem to
-+    be useful, even for the build process.
-+
-+ -- Aaron Lehmann <aaronl@vitelus.com>  Wed, 10 Jan 2001 19:29:36 -0800
-+
-+crashme (2.4-4) unstable; urgency=low
-+
-+  * Replaced execl call with execlp. (Fixes bug #37304)
-+  * Removed access to an obsolete structure member. (Fixes bug #37446)
-+
-+ -- Jay Kominek <jay.kominek@colorado.edu>  Tue, 25 May 1999 09:32:17 -0600
-+
-+crashme (2.4-3) unstable; urgency=low
-+
-+  * Switched binary-arch and binary-indep to what they should be.
-+    (Fixes bug #25452)
-+
-+ -- Jay Kominek <jkominek@debian.org>  Thu,  6 Aug 1998 03:34:49 -0600
-+
-+crashme (2.4-2) unstable; urgency=low
-+
-+  * Changed extended package description to format sanely in dselect.
-+    (Fixes bug #23666)
-+
-+ -- Jay Kominek <jkominek@debian.org>  Thu, 18 Jun 1998 02:57:30 -0400
-+
-+crashme (2.4-1) unstable; urgency=low
-+
-+  * Initial Release.
-+
-+ -- Jay Kominek <jkominek@debian.org>  Sat,  6 Jun 1998 19:12:44 -0400
---- crashme-2.4.orig/crashme.c
-+++ crashme-2.4/crashme.c
-@@ -151,6 +151,8 @@
- #include <unistd.h>
- #endif
- 
-+#include <sys/wait.h>
-+
- typedef void (*BADBOY)();
- 
- BADBOY badboy;
-@@ -260,10 +262,7 @@
- #else
-  struct sigaction act;
-  act.sa_handler = func;
-- act.sa_mask = 0;
--#ifdef linux
-- act.sa_restorer = 0;
--#endif /* linux */
-+ bzero(&act.sa_mask,sizeof(sigset_t));
-  act.sa_flags = SA_NOMASK;
- #ifdef SA_RESTART
-  act.sa_flags |= SA_RESTART;
-@@ -272,7 +271,7 @@
- #endif /* SA_ONESHOT */
- }
-  
--set_up_signals()
-+void set_up_signals()
- {my_signal(SIGILL,again_handler);
- #ifdef SIGTRAP
-  my_signal(SIGTRAP,again_handler);
-@@ -295,7 +294,7 @@
- 
- #endif
- 
--compute_badboy_1(n)
-+void compute_badboy_1(n)
-      long n;
- {long j;
-  if (malloc_flag == 1)
-@@ -347,7 +346,7 @@
- #endif
-   return((BADBOY)dat);}
- 
--compute_badboy()
-+void compute_badboy()
- {long n;
-  n = (nbytes < 0) ? - nbytes : nbytes;
-  if (incptr == 0)
-@@ -369,7 +368,7 @@
-                          the_data,(nbytes < 0) ? - nbytes : nbytes);
- */
- 
--try_one_crash()
-+void try_one_crash()
- {if (nbytes > 0)
-    (*badboy)();
-  else if (nbytes == 0)
-@@ -377,7 +376,7 @@
- 
- char *subprocess_ind = "subprocess";
-  
--main(argc,argv)
-+int main(argc,argv)
-      int argc; char **argv;
- {long nsubs,hrs,mns,scs,tflag,j,m;
-  note_buffer = (char *) malloc(512);
-@@ -411,21 +410,23 @@
-     note(1);
-     record_note();
-     if (strchr(argv[4],':'))
--      {sscanf(argv[4],"%d:%d:%d",&hrs,&mns,&scs);
-+      {sscanf(argv[4],"%ld:%ld:%ld",&hrs,&mns,&scs);
-        tflag = 1;
-        nsubs = (((hrs * 60) + mns) * 60) + scs;
--       sprintf(notes,"Subprocess run for %d seconds (%d %02d:%02d:%02d)",
-+       sprintf(notes,"Subprocess run for %ld seconds (%ld %02ld:%02ld:%02ld)",
- 	       nsubs, hrs / 24, hrs % 24,mns,scs);}
-     else
-       {tflag = 0;
-        nsubs = atol(argv[4]);
--       sprintf(notes,"Creating %d crashme subprocesses",nsubs);}
-+       sprintf(notes,"Creating %ld crashme subprocesses",nsubs);}
-     note(1);
-     vfork_main(tflag,nsubs,argv[0],argv[1],atol(argv[2]),argv[3]);}
-  else
-    {sprintf(notes,
- 	    "crashme [+]<nbytes>[.inc] <srand> <ntrys> [nsub] [verbose]");
--    note(0);}}
-+    note(0);}
-+ return 0;
-+}
- 
- void copyright_note(n)
-      long n;
-@@ -440,19 +441,19 @@
- {char *ptr;
-  copyright_note(3);
-  nbytes = atol(argv[1]);
-- if (ptr = strchr(argv[1],'.'))
-+ if ((ptr = strchr(argv[1],'.')))
-    incptr = atol(&ptr[1]);
-  if (argv[1][0] == '+') malloc_flag = 1;
-  nseed = atol(argv[2]);
-  ntrys = atol(argv[3]);
-- sprintf(notes,"crashme %s%ld.%d %ld %ld",
-+ sprintf(notes,"crashme %s%ld.%ld %ld %ld",
- 	 (malloc_flag == 0) ? "" : "+",nbytes,incptr,nseed,ntrys);
-  note(3);
-  record_note();
-  if (malloc_flag == 0)
-    {the_data = bad_malloc((nbytes < 0) ? -nbytes : nbytes);
-     badboy = castaway(the_data);
--    sprintf(notes,"Badboy at %d. 0x%X",badboy,badboy);
-+    sprintf(notes,"Badboy at %d. 0x%X",(int) badboy,(unsigned int) badboy);
-     note(3);}
-  srand(nseed);
- #ifdef WIN32
-@@ -481,9 +482,9 @@
-  for(i=0;i<ntrys;++i)
-    {compute_badboy();
-     if (offset)
--      sprintf(notes,"try %d, offset %d",i,offset);
-+      sprintf(notes,"try %d, offset %ld",i,offset);
-     else if (malloc_flag == 1)
--      sprintf(notes,"try %d, Badboy at %d. 0x%X",i,badboy,badboy);
-+      sprintf(notes,"try %d, Badboy at %d. 0x%X",i,(int) badboy,(unsigned int) badboy);
-     else
-       sprintf(notes,"try %d",i);
-     note(5);
-@@ -514,7 +515,7 @@
- 
- struct status_list *slist = NULL;
- 
--record_status(n)
-+int record_status(n)
-      long n;
- {struct status_list *l;
-  for(l=slist;l != NULL; l = l->next)
-@@ -527,13 +528,13 @@
-  slist = l;
-  return(1);}
- 
--summarize_status()
-+void summarize_status()
- {struct status_list *l;
-  sprintf(notes,"exit status ... number of cases");
-  note(2);
-  for(l=slist;l != NULL; l = l->next)
-    {sprintf(notes,"exit status ... number of cases");
--    sprintf(notes,"%11d ... %5d",l->status,l->count);
-+    sprintf(notes,"%11ld ... %5ld",l->status,l->count);
-     note(2);}}
- 
- #ifndef WIN32
-@@ -552,7 +553,7 @@
-  if (monitor_active)
-    {++monitor_count;
-     if (monitor_count >= monitor_limit)
--      {sprintf(notes,"time limit reached on pid %d 0x%X. using kill.",
-+      {sprintf(notes,"time limit reached on pid %ld 0x%lX. using kill.",
- 	       monitor_pid,monitor_pid);
-        note(3);
-        status = kill(monitor_pid,SIGKILL);
-@@ -581,24 +582,24 @@
-    {my_signal(SIGALRM,monitor_fcn);
-     alarm(monitor_period);}
-  time(&before_time);
-- sprintf(arg5,"%d",verbose_level);
-+ sprintf(arg5,"%ld",verbose_level);
-  for(j=0;j<n;++j)
--   {sprintf(arg2,"%d",sr+j);
--    sprintf(arg4,"%d",j+1);
-+   {sprintf(arg2,"%ld",sr+j);
-+    sprintf(arg4,"%ld",j+1);
- #ifdef VMS
-     status = vfork();
- #else
-     status = fork();
- #endif
-     if (status == 0)
--      {status = execl(cmd,cmd,nb,arg2,nt,arg4,arg5,subprocess_ind,0);
-+      {status = execlp(cmd,cmd,nb,arg2,nt,arg4,arg5,subprocess_ind,NULL);
-        if (status == -1)
- 	 {perror(cmd);
- 	  exit(1);}}
-     else if (status < 0)
-       perror(cmd);
-     else
--      {sprintf(notes,"pid = %d 0x%X (subprocess %d)",status,status,j+1);
-+      {sprintf(notes,"pid = %d 0x%X (subprocess %ld)",status,status,j+1);
-        note(3);
-        if (seq == 1)
- 	 {monitor_pid = status;
-@@ -606,19 +607,19 @@
- 	  monitor_active = 1;
- 	  while((pid = wait(&status)) > 0)
- 	    {monitor_active = 0;
--	     sprintf(notes,"pid %d 0x%X exited with status %d",pid,pid,status);
-+	     sprintf(notes,"pid %ld 0x%lX exited with status %d",pid,pid,status);
- 	     note(3);
- 	     record_status(status);}}
-        if (tflag == 1)
- 	 {time(&after_time);
- 	  total_time = after_time - before_time;
- 	  if (total_time >= nsubs)
--	    {sprintf(notes,"Time limit reached after run %d",j+1);
-+	    {sprintf(notes,"Time limit reached after run %ld",j+1);
- 	     note(2);
- 	     break;}}}}
-  if (seq == 0)
-    while((pid = wait(&status)) > 0)
--     {sprintf(notes,"pid %d 0x%X exited with status %d",pid,pid,status);
-+     {sprintf(notes,"pid %ld 0x%lX exited with status %d",pid,pid,status);
-       note(3);
-       record_status(status);}
-  time(&after_time);
-@@ -632,7 +633,7 @@
-  hrs = hrs % 24;
-  open_record();
-  sprintf(notes,
--	 "Test complete, total real time: %d seconds (%d %02d:%02d:%02d)",
-+	 "Test complete, total real time: %ld seconds (%ld %02ld:%02ld:%02ld)",
- 	 total_time,dys,hrs,mns,scs);
-  note(1);
-  summarize_status();
diff --git a/client/tests/crashme/crashme_2.4.orig.tar.bz2 b/client/tests/crashme/crashme_2.4.orig.tar.bz2
deleted file mode 100644
index 11edc9a..0000000
--- a/client/tests/crashme/crashme_2.4.orig.tar.bz2
+++ /dev/null
Binary files differ
diff --git a/client/tests/ctcs/control b/client/tests/ctcs/control
deleted file mode 100644
index c105344..0000000
--- a/client/tests/ctcs/control
+++ /dev/null
@@ -1,19 +0,0 @@
-AUTHOR = """
-Manas Kumar Nayak (maknayak@in.ibm.com) (original code)
-Lucas Meneghel Rodrigues (lucasmr@br.ibm.com) (rewrite)
-Cao, Chen <kcao@redhat.com> (use ctcs2 and port it to 64)
-Lucas Meneghel Rodrigues (lmr@redhat.com) (use ctcs new source repo)
-"""
-NAME = "CTCS"
-TEST_TYPE = "CLIENT"
-TEST_CLASS = "HARDWARE"
-TEST_CATEGORY = "BENCHMARK"
-TIME = "MEDIUM"
-DOC = """
-Executes CTCS for a period of time specified. You can also provide a cerberus
-test control file of your own, trough the parameter tcf_contents.
-
-see https://github.com/autotest/ctcs
-"""
-
-job.run_test(url='ctcs', length='1h', tc_opt='-k -C -a')
diff --git a/client/tests/ctcs/ctcs.py b/client/tests/ctcs/ctcs.py
deleted file mode 100644
index 273c157..0000000
--- a/client/tests/ctcs/ctcs.py
+++ /dev/null
@@ -1,100 +0,0 @@
-import os, shutil, glob, logging
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-
-class ctcs(test.test):
-    """
-    This autotest module runs CTCS (Cerberus Test Control System), that is being
-    maintained on a new location, since both CTCS and CTCS2 on sourceforge
-    were abandoned.
-
-    The original test suite (Cerberus Test Control System) was developed for
-    the now extinct VA Linux's manufacturing system it has several hardware
-    and software stress tests that can be run in parallel. It does have a
-    control file system that allows testers to specify the sorts of tests that
-    they want to see executed. It's an excelent stress test for hardware and
-    kernel.
-
-    @author Manas Kumar Nayak (maknayak@in.ibm.com) (original code)
-    @author Lucas Meneghel Rodrigues (lucasmr@br.ibm.com) (rewrite - ctcs)
-    @author Cao, Chen (kcao@redhat.com) (use ctcs2 and port it to 64)
-    @author Lucas Meneghel Rodrigues (lmr@redhat.com) (use ctcs new source repo)
-    @see: https://github.com/autotest/ctcs
-    """
-    version = 3
-
-    def initialize(self):
-        """
-        Sets the overall failure counter for the test.
-        """
-        self.nfail = 0
-
-
-    def setup(self, tarball='ctcs.tar.bz2', length='4h', tc_opt='-k',
-              tcf_contents=None):
-        """
-        Builds the test suite, and sets up the control file that is going to
-        be processed by the ctcs engine.
-        @param tarball: CTCS tarball
-        @param length: The amount of time we'll run the test suite
-        @param tcf_contents: If the user wants to specify the contents of
-                the CTCS control file, he could do so trough this parameter.
-                If this parameter is provided, length is ignored.
-        """
-        ctcs_tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(ctcs_tarball, self.srcdir)
-
-        os.chdir(self.srcdir)
-        utils.make()
-
-        # Here we define the cerberus suite control file that will be used.
-        # It will be kept on the debug directory for further analysis.
-        self.tcf_path = os.path.join(self.debugdir, 'autotest.tcf')
-
-        if not tcf_contents:
-            logging.info('Generating CTCS control file')
-            # Note about the control file generation command - we are creating
-            # a control file with the default tests, except for the kernel
-            # compilation test (flag -k).
-            g_cmd = ('./newburn-generator %s %s> %s' %
-                     (tc_opt, length, self.tcf_path))
-            utils.system(g_cmd)
-        else:
-            logging.debug('TCF file contents supplied, ignoring test length'
-                          ' altogether')
-            tcf = open(self.tcf_path, 'w')
-            tcf.write(tcf_contents)
-
-        logging.debug('Contents of the control file that will be passed to '
-                      'CTCS:')
-        tcf = open(self.tcf_path, 'r')
-        buf = tcf.read()
-        logging.debug(buf)
-
-
-    def run_once(self):
-        """
-        Runs the test, with the appropriate control file.
-        """
-        os.chdir(self.srcdir)
-        try:
-            utils.system('./run %s' % self.tcf_path)
-        except:
-            self.nfail += 1
-        log_base_path = os.path.join(self.srcdir, 'log')
-        log_dir = glob.glob(os.path.join(log_base_path,
-                                         'autotest.tcf.log.*'))[0]
-        logging.debug('Copying %s log directory to results dir', log_dir)
-        dst = os.path.join(self.resultsdir, os.path.basename(log_dir))
-        shutil.move(log_dir, dst)
-
-
-    def cleanup(self):
-        """
-        Cleans up source directory and raises on failure.
-        """
-        if os.path.isdir(self.srcdir):
-            shutil.rmtree(self.srcdir)
-        if self.nfail != 0:
-            raise error.TestFail('CTCS execution failed')
diff --git a/client/tests/ctcs/ctcs.tar.bz2 b/client/tests/ctcs/ctcs.tar.bz2
deleted file mode 100644
index 316b37d..0000000
--- a/client/tests/ctcs/ctcs.tar.bz2
+++ /dev/null
Binary files differ
diff --git a/client/tests/cyclictest/README b/client/tests/cyclictest/README
deleted file mode 100644
index 0db88f4..0000000
--- a/client/tests/cyclictest/README
+++ /dev/null
@@ -1,32 +0,0 @@
-cyclictest -t 5 -p 80 -n -q -l 10
-
-runs a test with 5 threads, stops after 10 loops and outputs:
-
-T: 0 ( 2215) P:80 I:    1000 C:      10 Min:      31 Act:      33 Avg:      33 Max:      43
-T: 1 ( 2216) P:79 I:    1500 C:      10 Min:      22 Act:      22 Avg:      36 Max:      61
-T: 2 ( 2217) P:78 I:    2000 C:      10 Min:      27 Act:      33 Avg:      36 Max:      50
-T: 3 ( 2218) P:77 I:    2500 C:      10 Min:      23 Act:      37 Avg:      38 Max:      59
-T: 4 ( 2219) P:76 I:    3000 C:      10 Min:      26 Act:      48 Avg:      36 Max:      48
-
-All numbers in micro seconds. You get the minimium, maximum and average latency for each thread.
-
-I use this for automated regression testing. 
-
-The -v option outputs:
-
-       0:       0:       0
-       0:       1:      45
-       0:       2:      41
-       0:       3:      31
-       0:       4:      31
-       0:       5:      34
-       1:       0:       0
-       1:       1:      29
-       1:       2:      33
-       1:       3:      33
-...
-where the first column is the thread, the second column is the loop
-counter and the third is the latency value for this step. You can use
-this for your own statistics or for latency distribution plots.
-
-	tglx
diff --git a/client/tests/cyclictest/control b/client/tests/cyclictest/control
deleted file mode 100644
index 5b79b03..0000000
--- a/client/tests/cyclictest/control
+++ /dev/null
@@ -1,12 +0,0 @@
-AUTHOR = 'Martin Bligh <mbligh@google.com>'
-DOC = '''
-description rt test utils
-URL http://www.kernel.org/pub/scm/linux/kernel/git/tglx/rt-tests.git
-'''
-NAME = 'cyclic_test'
-TIME = 'MEDIUM' ## ?
-TEST_CLASS = 'kernel'
-TEST_CATEGORY = 'Functional'
-TEST_TYPE = 'client'
-
-job.run_test('cyclictest')
diff --git a/client/tests/cyclictest/cyclictest.py b/client/tests/cyclictest/cyclictest.py
deleted file mode 100644
index 8e1b40d..0000000
--- a/client/tests/cyclictest/cyclictest.py
+++ /dev/null
@@ -1,21 +0,0 @@
-import os
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import utils
-
-
-class cyclictest(test.test):
-    version = 2
-    preserve_srcdir = True
-
-    # git://git.kernel.org/pub/scm/linux/kernel/git/tglx/rt-tests.git
-    def initialize(self):
-        self.job.require_gcc()
-
-
-    def setup(self):
-        os.chdir(self.srcdir)
-        utils.make()
-
-
-    def execute(self, args = '-t 10 -l 100000'):
-        utils.system(self.srcdir + '/cyclictest ' + args)
diff --git a/client/tests/cyclictest/help b/client/tests/cyclictest/help
deleted file mode 100644
index 0e38ce2..0000000
--- a/client/tests/cyclictest/help
+++ /dev/null
@@ -1,114 +0,0 @@
-Cyclictest is a program to test the performance of high-resolution timers on Linux systems.
-
-Installation
-
-Get the latest source tarball, untar into a directory of your choice and run make in the source directory.
-
-You can run the resulting binary from there or install it.
-
-Run it
-
-Make sure to be root or use sudo to run cyclictest.
-
-Without parameters cyclictest creates one thread with a 1ms interval timer.
-
-cyclictest -h provides help text for the various options
-	-b USEC	--breaktrace=USEC	send break trace command when latency > USEC
-	-c CLOCK	--clock=CLOCK	select clock
-
-		0 = CLOCK_MONOTONIC (default)
-		1 = CLOCK_REALTIME
-
-	-d DIST	--distance=DIST	distance of thread intervals in us default=500
-	-i INTV	--interval=INTV	base interval of thread in us default=1000
-	-l LOOPS	--loops=LOOPS	number of loops: default=0(endless)
-	-n	--nanosleep	use clock_nanosleep
-	-p PRIO	--prio=PRIO	priority of highest prio thread
-	-q	--quiet	print only a summary on exit
-	-r	--relative	use relative timer instead of absolute
-	-s	--system	use sys_nanosleep and sys_setitimer
-	-t NUM	--threads=NUM	number of threads: default=1
-	-v	--verbose	output values on stdout for statistics
-
-format: n:c:v n=tasknum c=count v=value in us
-
-The -b optionis a debugging option to control the latency tracer in the realtime 
-preemption patch.  It is useful to track down unexpected large latencies on a system. 
-This option only works with:
-
-    * CONFIG_PREEMPT_RT=y
-    * CONFIG_LATENCY_TIMING=y
-    * CONFIG_LATENCY_TRACE=y 
-
-kernel configuration options enabled. The USEC parameter to the -b option defines
-a maximum latency value, which is compared against the actual latencies of the test.
-Once the measured latency is higher than the given maximum, the kernel tracer and 
-cyclictest is stopped. The trace can be read from /proc/latency_trace:
-
-	mybox# cat /proc/latency_trace >trace.log
-
-Please be aware that the tracer adds significant overhead to the kernel, so the
-latencies will be much higher than on a kernel with latency tracing disabled.
-
-	Using -c CLOCK selects the clock, which is used
-
-    * 0 selects CLOCK_MONOTONIC, which is the monotonic increasing system time. 
-	This is the default selection
-    * 1 selects CLOCK_REALTIME, which is the time of day time. 
-
-CLOCK_REALTIME can be set by settimeofday, while CLOCK_MONOTONIC can not be 
-modified by the user.
-
-This option has no influence when the -s option is given.
-
--d DIST set the distance of thread intervals in microseconds (default is 500us)
-
-When cylictest is called with the -t option and more than one thread is created, then this distance value is added to the interval of the threads.
-
-Interval(thread N) = Interval(thread N-1) + DIST
-
--i INTV set the base interval of the thread(s) in microseconds (default is 1000us)
-
-This sets the interval of the first thread. See also -d.
-
--l LOOPS set the number of loops (default = 0(endless))
-
-This option is useful for automated tests with a given number of test cycles. cyclictest is stopped once the number of timer intervals has been reached.
-
--n use clock_nanosleep instead of posix interval timers
-
-Setting this option runs the tests with clock_nanosleep instead of posix interval timers.
-
--p PRIO set the priority of the first thread
-
-The given priority is set to the first test thread. Each further thread gets a lower priority:
-
-Priority(Thread N) = Priority(Thread N-1)
-
--q run the tests quiet and print only a summary on exit
-
-Useful for automated tests, where only the summary output needs to be captured
-
--r use relative timers instead of absolute
-
-The default behaviour of the tests is to use absolute timers. This option is there for completeness and should not be used for reproducible tests.
-
--s use sys_nanosleep and sys_setitimer instead of posix timers
-
-Note, that -s can only be used with one thread because itimers are per process and not per thread. -s in combination with -n uses the nanosleep syscall and is not restricted to one thread
-
--t NUM set the number of test threads (default is 1)
-
-Create NUM test threads. See -d, -i and -p for further information.
-
--v output values on stdout for statistics
-
-This option is used to gather statistical information about the latency distribution. The output is sent to stdout. The output format is
-
-n:c:v
-
-where n=task number c=count v=latency value in us
-
-Use this option in combination with -l
-
-The OSADL Realtime LiveCD project (http://www.osadl.org/projects-live-cd.0.html) provides a script to plot the latency distribution. 
diff --git a/client/tests/cyclictest/src/Makefile b/client/tests/cyclictest/src/Makefile
deleted file mode 100644
index 6360c92..0000000
--- a/client/tests/cyclictest/src/Makefile
+++ /dev/null
@@ -1,11 +0,0 @@
-
-TARGET=cyclictest
-FLAGS= -Wall -Wno-nonnull -O2
-LIBS = -lpthread -lrt
-
-all: cyclictest.c
-	$(CROSS_COMPILE)gcc $(FLAGS) $^ -o $(TARGET) $(LIBS)
-
-clean:
-	rm -f $(TARGET) *.o .depend *.*~
-
diff --git a/client/tests/cyclictest/src/cyclictest.c b/client/tests/cyclictest/src/cyclictest.c
deleted file mode 100644
index cf1f08b..0000000
--- a/client/tests/cyclictest/src/cyclictest.c
+++ /dev/null
@@ -1,631 +0,0 @@
-/*
- * High resolution timer test software
- *
- * (C) 2005-2007 Thomas Gleixner <tglx@linutronix.de>
- *
- * This program is free software; you can redistribute it and/or
- * modify it under the terms of the GNU General Public License Version
- * 2 as published by the Free Software Foundation.
- *
- */
-
-#define VERSION_STRING "V 0.15"
-
-#include <fcntl.h>
-#include <getopt.h>
-#include <pthread.h>
-#include <signal.h>
-#include <stdlib.h>
-#include <stdio.h>
-#include <string.h>
-#include <time.h>
-#include <unistd.h>
-
-#include <linux/unistd.h>
-
-#include <sys/prctl.h>
-#include <sys/stat.h>
-#include <sys/types.h>
-#include <sys/time.h>
-
-#define ARRAY_SIZE(x) (sizeof(x) / sizeof((x)[0]))
-
-/* Ugly, but .... */
-#define gettid() syscall(__NR_gettid)
-#define sigev_notify_thread_id _sigev_un._tid
-
-extern int clock_nanosleep(clockid_t __clock_id, int __flags,
-			   __const struct timespec *__req,
-			   struct timespec *__rem);
-
-#define USEC_PER_SEC		1000000
-#define NSEC_PER_SEC		1000000000
-
-#define MODE_CYCLIC		0
-#define MODE_CLOCK_NANOSLEEP	1
-#define MODE_SYS_ITIMER		2
-#define MODE_SYS_NANOSLEEP	3
-#define MODE_SYS_OFFSET		2
-
-#define TIMER_RELTIME		0
-
-/* Must be power of 2 ! */
-#define VALBUF_SIZE		16384
-
-#define KVARS			32
-#define KVARNAMELEN		32
-
-/* Struct to transfer parameters to the thread */
-struct thread_param {
-	int prio;
-	int mode;
-	int timermode;
-	int signal;
-	int clock;
-	unsigned long max_cycles;
-	struct thread_stat *stats;
-	int bufmsk;
-	unsigned long interval;
-};
-
-/* Struct for statistics */
-struct thread_stat {
-	unsigned long cycles;
-	unsigned long cyclesread;
-	long min;
-	long max;
-	long act;
-	double avg;
-	long *values;
-	pthread_t thread;
-	int threadstarted;
-	int tid;
-};
-
-static int shutdown;
-static int tracelimit = 0;
-static int ftrace = 0;
-static int oldtrace = 0;
-
-/* Backup of kernel variables that we modify */
-static struct kvars {
-	char name[KVARNAMELEN];
-	int value;
-} kv[KVARS];
-
-static char *procfileprefix = "/proc/sys/kernel/";
-
-static int kernvar(int mode, char *name, int *value)
-{
-	int retval = 1;
-	int procfilepath;
-	char procfilename[128];
-
-	strncpy(procfilename, procfileprefix, sizeof(procfilename));
-	strncat(procfilename, name,
-		sizeof(procfilename) - sizeof(procfileprefix));
-	procfilepath = open(procfilename, mode);
-	if (procfilepath >= 0) {
-		char buffer[32];
-
-		if (mode == O_RDONLY) {
-			if (read(procfilepath, buffer, sizeof(buffer)) > 0) {
-				char *endptr;
-				*value = strtol(buffer, &endptr, 0);
-				if (endptr != buffer)
-					retval = 0;
-			}
-		} else if (mode == O_WRONLY) {
-			snprintf(buffer, sizeof(buffer), "%d\n", *value);
-			if (write(procfilepath, buffer, strlen(buffer))
-			    == strlen(buffer))
-				retval = 0;
-		}
-		close(procfilepath);
-	}
-	return retval;
-}
-
-static void setkernvar(char *name, int value)
-{
-	int i;
-	int oldvalue;
-
-	if (kernvar(O_RDONLY, name, &oldvalue))
-		fprintf(stderr, "could not retrieve %s\n", name);
-	else {
-		for (i = 0; i < KVARS; i++) {
-			if (!strcmp(kv[i].name, name))
-				break;
-			if (kv[i].name[0] == '\0') {
-				strncpy(kv[i].name, name, sizeof(kv[i].name));
-				kv[i].value = oldvalue;
-				break;
-			}
-		}
-		if (i == KVARS)
-			fprintf(stderr, "could not backup %s (%d)\n", name,
-				oldvalue);
-	}
-	if (kernvar(O_WRONLY, name, &value))
-		fprintf(stderr, "could not set %s to %d\n", name, value);
-}
-
-static void restorekernvars(void)
-{
-	int i;
-
-	for (i = 0; i < KVARS; i++) {
-		if (kv[i].name[0] != '\0') {
-			if (kernvar(O_WRONLY, kv[i].name, &kv[i].value))
-				fprintf(stderr, "could not restore %s to %d\n",
-					kv[i].name, kv[i].value);
-		}
-	}
-}
-
-static inline void tsnorm(struct timespec *ts)
-{
-	while (ts->tv_nsec >= NSEC_PER_SEC) {
-		ts->tv_nsec -= NSEC_PER_SEC;
-		ts->tv_sec++;
-	}
-}
-
-static inline long calcdiff(struct timespec t1, struct timespec t2)
-{
-	long diff;
-	diff = USEC_PER_SEC * ((int) t1.tv_sec - (int) t2.tv_sec);
-	diff += ((int) t1.tv_nsec - (int) t2.tv_nsec) / 1000;
-	return diff;
-}
-
-/*
- * timer thread
- *
- * Modes:
- * - clock_nanosleep based
- * - cyclic timer based
- *
- * Clock:
- * - CLOCK_MONOTONIC
- * - CLOCK_REALTIME
- * - CLOCK_MONOTONIC_HR
- * - CLOCK_REALTIME_HR
- *
- */
-void *timerthread(void *param)
-{
-	struct thread_param *par = param;
-	struct sched_param schedp;
-	struct sigevent sigev;
-	sigset_t sigset;
-	timer_t timer;
-	struct timespec now, next, interval;
-	struct itimerval itimer;
-	struct itimerspec tspec;
-	struct thread_stat *stat = par->stats;
-	int policy = par->prio ? SCHED_FIFO : SCHED_OTHER;
-	int stopped = 0;
-
-	interval.tv_sec = par->interval / USEC_PER_SEC;
-	interval.tv_nsec = (par->interval % USEC_PER_SEC) * 1000;
-
-	if (tracelimit) {
-		setkernvar("trace_all_cpus", 1);
-		setkernvar("trace_freerunning", 1);
-		setkernvar("trace_print_on_crash", 0);
-		setkernvar("trace_user_triggered", 1);
-		setkernvar("trace_user_trigger_irq", -1);
-		setkernvar("trace_verbose", 0);
-		setkernvar("preempt_thresh", 0);
-		setkernvar("wakeup_timing", 0);
-		setkernvar("preempt_max_latency", 0);
-		if (ftrace)
-			setkernvar("mcount_enabled", 1);
-		setkernvar("trace_enabled", 1);
-	}
-
-	stat->tid = gettid();
-
-	sigemptyset(&sigset);
-	sigaddset(&sigset, par->signal);
-	sigprocmask(SIG_BLOCK, &sigset, NULL);
-
-	if (par->mode == MODE_CYCLIC) {
-		sigev.sigev_notify = SIGEV_THREAD_ID | SIGEV_SIGNAL;
-		sigev.sigev_signo = par->signal;
-		sigev.sigev_notify_thread_id = stat->tid;
-		timer_create(par->clock, &sigev, &timer);
-		tspec.it_interval = interval;
-	}
-
-	memset(&schedp, 0, sizeof(schedp));
-	schedp.sched_priority = par->prio;
-	sched_setscheduler(0, policy, &schedp);
-
-	/* Get current time */
-	clock_gettime(par->clock, &now);
-	next = now;
-	next.tv_sec++;
-
-	if (par->mode == MODE_CYCLIC) {
-		if (par->timermode == TIMER_ABSTIME)
-			tspec.it_value = next;
-		else {
-			tspec.it_value.tv_nsec = 0;
-			tspec.it_value.tv_sec = 1;
-		}
-		timer_settime(timer, par->timermode, &tspec, NULL);
-	}
-
-	if (par->mode == MODE_SYS_ITIMER) {
-		itimer.it_value.tv_sec = 1;
-		itimer.it_value.tv_usec = 0;
-		itimer.it_interval.tv_sec = interval.tv_sec;
-		itimer.it_interval.tv_usec = interval.tv_nsec / 1000;
-		setitimer (ITIMER_REAL,  &itimer, NULL);
-	}
-
-	stat->threadstarted++;
-
-	if (tracelimit) {
-		if (oldtrace)
-			gettimeofday(0,(struct timezone *)1);
-		else
-			prctl(0, 1);
-	}
-	while (!shutdown) {
-
-		long diff;
-		int sigs;
-
-		/* Wait for next period */
-		switch (par->mode) {
-		case MODE_CYCLIC:
-		case MODE_SYS_ITIMER:
-			if (sigwait(&sigset, &sigs) < 0)
-				goto out;
-			break;
-
-		case MODE_CLOCK_NANOSLEEP:
-			if (par->timermode == TIMER_ABSTIME)
-				clock_nanosleep(par->clock, TIMER_ABSTIME,
-						&next, NULL);
-			else {
-				clock_gettime(par->clock, &now);
-				clock_nanosleep(par->clock, TIMER_RELTIME,
-						&interval, NULL);
-				next.tv_sec = now.tv_sec + interval.tv_sec;
-				next.tv_nsec = now.tv_nsec + interval.tv_nsec;
-				tsnorm(&next);
-			}
-			break;
-
-		case MODE_SYS_NANOSLEEP:
-			clock_gettime(par->clock, &now);
-			nanosleep(&interval, NULL);
-			next.tv_sec = now.tv_sec + interval.tv_sec;
-			next.tv_nsec = now.tv_nsec + interval.tv_nsec;
-			tsnorm(&next);
-			break;
-		}
-		clock_gettime(par->clock, &now);
-
-		diff = calcdiff(now, next);
-		if (diff < stat->min)
-			stat->min = diff;
-		if (diff > stat->max)
-			stat->max = diff;
-		stat->avg += (double) diff;
-
-		if (!stopped && tracelimit && (diff > tracelimit)) {
-			stopped++;
-			if (oldtrace)
-				gettimeofday(0,0);
-			else
-				prctl(0, 0);
-			shutdown++;
-		}
-		stat->act = diff;
-		stat->cycles++;
-
-		if (par->bufmsk)
-			stat->values[stat->cycles & par->bufmsk] = diff;
-
-		next.tv_sec += interval.tv_sec;
-		next.tv_nsec += interval.tv_nsec;
-		tsnorm(&next);
-
-		if (par->max_cycles && par->max_cycles == stat->cycles)
-			break;
-	}
-
-out:
-	if (par->mode == MODE_CYCLIC)
-		timer_delete(timer);
-
-	if (par->mode == MODE_SYS_ITIMER) {
-		itimer.it_value.tv_sec = 0;
-		itimer.it_value.tv_usec = 0;
-		itimer.it_interval.tv_sec = 0;
-		itimer.it_interval.tv_usec = 0;
-		setitimer (ITIMER_REAL,  &itimer, NULL);
-	}
-
-	/* switch to normal */
-	schedp.sched_priority = 0;
-	sched_setscheduler(0, SCHED_OTHER, &schedp);
-
-	stat->threadstarted = -1;
-
-	return NULL;
-}
-
-
-/* Print usage information */
-static void display_help(void)
-{
-	printf("cyclictest %s\n", VERSION_STRING);
-	printf("Usage:\n"
-	       "cyclictest <options>\n\n"
-	       "-b USEC  --breaktrace=USEC send break trace command when latency > USEC\n"
-	       "-c CLOCK --clock=CLOCK     select clock\n"
-	       "                           0 = CLOCK_MONOTONIC (default)\n"
-	       "                           1 = CLOCK_REALTIME\n"
-	       "-d DIST  --distance=DIST   distance of thread intervals in us default=500\n"
-	       "-f                         function trace (when -b is active)\n"
-	       "-i INTV  --interval=INTV   base interval of thread in us default=1000\n"
-	       "-l LOOPS --loops=LOOPS     number of loops: default=0(endless)\n"
-	       "-n       --nanosleep       use clock_nanosleep\n"
-	       "-p PRIO  --prio=PRIO       priority of highest prio thread\n"
-	       "-q       --quiet           print only a summary on exit\n"
-	       "-r       --relative        use relative timer instead of absolute\n"
-	       "-s       --system          use sys_nanosleep and sys_setitimer\n"
-	       "-t NUM   --threads=NUM     number of threads: default=1\n"
-	       "-v       --verbose         output values on stdout for statistics\n"
-	       "                           format: n:c:v n=tasknum c=count v=value in us\n");
-	exit(0);
-}
-
-static int use_nanosleep;
-static int timermode  = TIMER_ABSTIME;
-static int use_system;
-static int priority;
-static int num_threads = 1;
-static int max_cycles;
-static int clocksel = 0;
-static int verbose;
-static int quiet;
-static int interval = 1000;
-static int distance = 500;
-
-static int clocksources[] = {
-	CLOCK_MONOTONIC,
-	CLOCK_REALTIME,
-};
-
-/* Process commandline options */
-static void process_options (int argc, char *argv[])
-{
-	int error = 0;
-	for (;;) {
-		int option_index = 0;
-		/** Options for getopt */
-		static struct option long_options[] = {
-			{"breaktrace", required_argument, NULL, 'b'},
-			{"clock", required_argument, NULL, 'c'},
-			{"distance", required_argument, NULL, 'd'},
-			{"ftrace", no_argument, NULL, 'f'},
-			{"interval", required_argument, NULL, 'i'},
-			{"loops", required_argument, NULL, 'l'},
-			{"nanosleep", no_argument, NULL, 'n'},
-			{"priority", required_argument, NULL, 'p'},
-			{"quiet", no_argument, NULL, 'q'},
-			{"relative", no_argument, NULL, 'r'},
-			{"system", no_argument, NULL, 's'},
-			{"threads", required_argument, NULL, 't'},
-			{"verbose", no_argument, NULL, 'v'},
-			{"help", no_argument, NULL, '?'},
-			{NULL, 0, NULL, 0}
-		};
-		int c = getopt_long (argc, argv, "b:c:d:fi:l:np:qrst:v",
-			long_options, &option_index);
-		if (c == -1)
-			break;
-		switch (c) {
-		case 'b': tracelimit = atoi(optarg); break;
-		case 'c': clocksel = atoi(optarg); break;
-		case 'd': distance = atoi(optarg); break;
-		case 'f': ftrace = 1; break;
-		case 'i': interval = atoi(optarg); break;
-		case 'l': max_cycles = atoi(optarg); break;
-		case 'n': use_nanosleep = MODE_CLOCK_NANOSLEEP; break;
-		case 'p': priority = atoi(optarg); break;
-		case 'q': quiet = 1; break;
-		case 'r': timermode = TIMER_RELTIME; break;
-		case 's': use_system = MODE_SYS_OFFSET; break;
-		case 't': num_threads = atoi(optarg); break;
-		case 'v': verbose = 1; break;
-		case '?': error = 1; break;
-		}
-	}
-
-	if (clocksel < 0 || clocksel > ARRAY_SIZE(clocksources))
-		error = 1;
-
-	if (priority < 0 || priority > 99)
-		error = 1;
-
-	if (num_threads < 1)
-		error = 1;
-
-	if (error)
-		display_help ();
-}
-
-static void check_kernel(void)
-{
-	size_t len;
-	char ver[256];
-	int fd, maj, min, sub;
-
-	fd = open("/proc/version", O_RDONLY, 0666);
-	len = read(fd, ver, 255);
-	close(fd);
-	ver[len-1] = 0x0;
-	sscanf(ver, "Linux version %d.%d.%d", &maj, &min, &sub);
-	if (maj == 2 && min == 6 && sub < 18)
-		oldtrace = 1;
-}
-
-static int check_timer(void)
-{
-	struct timespec ts;
-
-	if (clock_getres(CLOCK_MONOTONIC, &ts))
-		return 1;
-
-	return (ts.tv_sec != 0 || ts.tv_nsec != 1);
-}
-
-static void sighand(int sig)
-{
-	shutdown = 1;
-}
-
-static void print_stat(struct thread_param *par, int index, int verbose)
-{
-	struct thread_stat *stat = par->stats;
-
-	if (!verbose) {
-		if (quiet != 1) {
-			printf("T:%2d (%5d) P:%2d I:%ld C:%7lu "
-			       "Min:%7ld Act:%5ld Avg:%5ld Max:%8ld\n",
-			       index, stat->tid, par->prio, par->interval,
-			       stat->cycles, stat->min, stat->act,
-			       stat->cycles ?
-			       (long)(stat->avg/stat->cycles) : 0, stat->max);
-		}
-	} else {
-		while (stat->cycles != stat->cyclesread) {
-			long diff = stat->values[stat->cyclesread & par->bufmsk];
-			printf("%8d:%8lu:%8ld\n", index, stat->cyclesread, diff);
-			stat->cyclesread++;
-		}
-	}
-}
-
-int main(int argc, char **argv)
-{
-	sigset_t sigset;
-	int signum = SIGALRM;
-	int mode;
-	struct thread_param *par;
-	struct thread_stat *stat;
-	int i, ret = -1;
-
-	if (geteuid()) {
-		fprintf(stderr, "cyclictest: need to run as root!\n");
-		exit(-1);
-	}
-
-	process_options(argc, argv);
-
-	check_kernel();
-
-	if (check_timer())
-		fprintf(stderr, "WARNING: High resolution timers not available\n");
-
-	mode = use_nanosleep + use_system;
-
-	sigemptyset(&sigset);
-	sigaddset(&sigset, signum);
-	sigprocmask (SIG_BLOCK, &sigset, NULL);
-
-	signal(SIGINT, sighand);
-	signal(SIGTERM, sighand);
-
-	par = calloc(num_threads, sizeof(struct thread_param));
-	if (!par)
-		goto out;
-	stat = calloc(num_threads, sizeof(struct thread_stat));
-	if (!stat)
-		goto outpar;
-
-	for (i = 0; i < num_threads; i++) {
-		if (verbose) {
-			stat[i].values = calloc(VALBUF_SIZE, sizeof(long));
-			if (!stat[i].values)
-				goto outall;
-			par[i].bufmsk = VALBUF_SIZE - 1;
-		}
-
-		par[i].prio = priority;
-		if (priority)
-			priority--;
-		par[i].clock = clocksources[clocksel];
-		par[i].mode = mode;
-		par[i].timermode = timermode;
-		par[i].signal = signum;
-		par[i].interval = interval;
-		interval += distance;
-		par[i].max_cycles = max_cycles;
-		par[i].stats = &stat[i];
-		stat[i].min = 1000000;
-		stat[i].max = -1000000;
-		stat[i].avg = 0.0;
-		pthread_create(&stat[i].thread, NULL, timerthread, &par[i]);
-		stat[i].threadstarted = 1;
-	}
-
-	while (!shutdown) {
-		char lavg[256];
-		int fd, len, allstopped = 0;
-
-		if (!verbose && !quiet) {
-			fd = open("/proc/loadavg", O_RDONLY, 0666);
-			len = read(fd, &lavg, 255);
-			close(fd);
-			lavg[len-1] = 0x0;
-			printf("%s          \n\n", lavg);
-		}
-
-		for (i = 0; i < num_threads; i++) {
-
-			print_stat(&par[i], i, verbose);
-			if(max_cycles && stat[i].cycles >= max_cycles)
-				allstopped++;
-		}
-		usleep(10000);
-		if (shutdown || allstopped)
-			break;
-		if (!verbose && !quiet)
-			printf("\033[%dA", num_threads + 2);
-	}
-	ret = 0;
- outall:
-	shutdown = 1;
-	usleep(50000);
-	if (quiet)
-		quiet = 2;
-	for (i = 0; i < num_threads; i++) {
-		if (stat[i].threadstarted > 0)
-			pthread_kill(stat[i].thread, SIGTERM);
-		if (stat[i].threadstarted) {
-			pthread_join(stat[i].thread, NULL);
-			if (quiet)
-				print_stat(&par[i], i, 0);
-		}
-		if (stat[i].values)
-			free(stat[i].values);
-	}
-	free(stat);
- outpar:
-	free(par);
- out:
-	/* Be a nice program, cleanup */
-	restorekernvars();
-
-	exit(ret);
-}
diff --git a/client/tests/dbench/control b/client/tests/dbench/control
deleted file mode 100644
index 86cf024..0000000
--- a/client/tests/dbench/control
+++ /dev/null
@@ -1,21 +0,0 @@
-TIME="SHORT"
-AUTHOR = "Martin Bligh <mbligh@google.com>"
-DOC = """
-dbench is one of our standard kernel stress tests.  It produces filesystem
-load like netbench originally did, but involves no network system calls.
-Its results include throughput rates, which can be used for performance
-analysis.
-
-More information on dbench can be found here:
-http://samba.org/ftp/tridge/dbench/README
-
-Currently it needs to be updated in its configuration. It is a great test for
-the higher level I/O systems but barely touches the disk right now.
-"""
-NAME = 'dbench'
-ATTRIBUTES = "suite:kernel_daily_benchmarks"
-TEST_CLASS = 'kernel'
-TEST_CATEGORY = 'Functional'
-TEST_TYPE = 'client'
-
-job.run_test('dbench')
diff --git a/client/tests/dbench/dbench-3.04.tar.gz b/client/tests/dbench/dbench-3.04.tar.gz
deleted file mode 100644
index c0bb2e2..0000000
--- a/client/tests/dbench/dbench-3.04.tar.gz
+++ /dev/null
Binary files differ
diff --git a/client/tests/dbench/dbench.py b/client/tests/dbench/dbench.py
deleted file mode 100644
index 1591bfe..0000000
--- a/client/tests/dbench/dbench.py
+++ /dev/null
@@ -1,38 +0,0 @@
-import os, re
-
-from autotest_lib.client.bin import utils, test
-
-class dbench(test.test):
-    version = 3
-
-    # http://samba.org/ftp/tridge/dbench/dbench-3.04.tar.gz
-    def setup(self, tarball='dbench-3.04.tar.gz'):
-        tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(tarball, self.srcdir)
-        os.chdir(self.srcdir)
-
-        utils.system('patch -p1 < ../dbench_startup.patch')
-        utils.system('patch -p1 < ../dbench_ldflags.patch')
-        utils.configure()
-        utils.make()
-
-
-    def initialize(self):
-        self.job.require_gcc()
-        self.results = []
-        self.dbench = os.path.join(self.srcdir, 'dbench')
-
-
-    def run_once(self, dir='.', nprocs=None, seconds=600, args=''):
-        if not nprocs:
-            nprocs = self.job.cpu_count()
-        loadfile = os.path.join(self.srcdir, 'client.txt')
-        cmd = '%s %s %s -D %s -c %s -t %d' % (self.dbench, nprocs, args,
-                                              dir, loadfile, seconds)
-        self.results = utils.system_output(cmd, retain_output=True)
-
-
-    def postprocess_iteration(self):
-        pattern = re.compile(r"Throughput (.*?) MB/sec (.*?) procs")
-        (throughput, procs) = pattern.findall(self.results)[0]
-        self.write_perf_keyval({'throughput':throughput, 'procs':procs})
diff --git a/client/tests/dbench/dbench_ldflags.patch b/client/tests/dbench/dbench_ldflags.patch
deleted file mode 100644
index aacf01d..0000000
--- a/client/tests/dbench/dbench_ldflags.patch
+++ /dev/null
@@ -1,27 +0,0 @@
---- old/Makefile.in	2016-06-09 16:03:03.113454462 -0700
-+++ new/Makefile.in	2016-06-09 16:03:38.713684070 -0700
-@@ -13,6 +13,7 @@
- 
- CC=@CC@
- CFLAGS=@CFLAGS@ -I. -DVERSION=\"$(VERSION)\" -DDATADIR=\"$(datadir)\"
-+LDFLAGS=@LDFLAGS@
- EXEEXT=@EXEEXT@
- 
- DB_OBJS = fileio.o util.o dbench.o child.o system.o snprintf.o
-@@ -22,13 +23,13 @@
- all: dbench tbench tbench_srv
- 
- dbench: $(DB_OBJS)
--	$(CC) -lpthread -o $@ $(DB_OBJS) $(LIBS)
-+	$(CC) -lpthread -o $@ $(DB_OBJS) $(LIBS) $(LDFLAGS)
- 
- tbench: $(TB_OBJS)
--	$(CC) -lpthread -o $@ $(TB_OBJS) $(LIBS)
-+	$(CC) -lpthread -o $@ $(TB_OBJS) $(LIBS) $(LDFLAGS)
- 
- tbench_srv: $(SRV_OBJS)
--	$(CC) -o $@ $(SRV_OBJS) $(LIBS)
-+	$(CC) -o $@ $(SRV_OBJS) $(LIBS) $(LDFLAGS)
- 
- # Careful here: don't install client.txt over itself.
- install: all
diff --git a/client/tests/dbench/dbench_startup.patch b/client/tests/dbench/dbench_startup.patch
deleted file mode 100644
index b23e67c..0000000
--- a/client/tests/dbench/dbench_startup.patch
+++ /dev/null
@@ -1,98 +0,0 @@
-diff -u old/ new/        
---- old/Makefile.in	2008-09-18 14:43:55.000000000 -0700
-+++ new/Makefile.in	2008-09-18 14:42:53.000000000 -0700
-@@ -22,10 +22,10 @@
- all: dbench tbench tbench_srv
- 
- dbench: $(DB_OBJS)
--	$(CC) -o $@ $(DB_OBJS) $(LIBS)
-+	$(CC) -lpthread -o $@ $(DB_OBJS) $(LIBS)
- 
- tbench: $(TB_OBJS)
--	$(CC) -o $@ $(TB_OBJS) $(LIBS)
-+	$(CC) -lpthread -o $@ $(TB_OBJS) $(LIBS)
- 
- tbench_srv: $(SRV_OBJS)
- 	$(CC) -o $@ $(SRV_OBJS) $(LIBS)
-diff -u old/ new/        
---- old/dbench.c	2008-09-18 14:43:49.000000000 -0700
-+++ new/dbench.c	2008-09-18 14:42:46.000000000 -0700
-@@ -130,6 +130,8 @@
- 	int synccount;
- 	struct timeval tv;
- 	FILE *load;
-+	int shmid;
-+	sem_t *sema;
- 
- 	load = open_loadfile();
- 	if (load == NULL) {
-@@ -162,12 +164,24 @@
- 		children[i].directory = directory;
- 	}
- 
-+	shmid = shmget(IPC_PRIVATE, sizeof(*sema), IPC_CREAT | 0666);
-+	if (shmid < 0) {
-+		perror("could not create shared memory segment");
-+		exit(1);
-+	}
-+	sema = shmat(shmid, NULL, 0);
-+
-+	if (sem_init(sema, 1, 0) < 0) {
-+		perror("semaphore initilization failed");
-+		exit(1);
-+	}
-+
- 	for (i=0;i<nprocs;i++) {
- 		if (fork() == 0) {
- 			setlinebuf(stdout);
- 			nb_setup(&children[i]);
- 			children[i].status = getpid();
--			pause();
-+			sem_wait(sema);
- 			fn(&children[i], loadfile);
- 			_exit(0);
- 		}
-@@ -185,12 +199,14 @@
- 
- 	if (synccount != nprocs) {
- 		printf("FAILED TO START %d CLIENTS (started %d)\n", nprocs, synccount);
-+		shmdt(sema);
- 		return;
- 	}
- 
- 	printf("%d clients started\n", nprocs);
- 
--	kill(0, SIGCONT);
-+	for (i=0;i<nprocs;i++)
-+		sem_post(sema);
- 
- 	tv_start = timeval_current();
- 
-@@ -202,6 +218,7 @@
- 		if (WEXITSTATUS(status) != 0) {
- 			printf("Child failed with status %d\n",
- 			       WEXITSTATUS(status));
-+			shmdt(sema);
- 			exit(1);
- 		}
- 		i++;
-@@ -210,6 +227,8 @@
- 	alarm(0);
- 	sig_alarm(SIGALRM);
- 
-+	shmdt(sema);
-+
- 	printf("\n");
- }
- 
-diff -u old/ new/        
---- old/dbench.h	2008-09-18 14:43:48.000000000 -0700
-+++ new/dbench.h	2008-09-18 14:42:48.000000000 -0700
-@@ -35,6 +35,7 @@
- #include <sys/ipc.h>
- #include <sys/shm.h>
- #include <sys/mman.h>
-+#include <semaphore.h>
- 
- #ifdef HAVE_SYS_VFS_H
- #include <sys/vfs.h>
diff --git a/client/tests/dbt2/control b/client/tests/dbt2/control
deleted file mode 100644
index 67269e8..0000000
--- a/client/tests/dbt2/control
+++ /dev/null
@@ -1,14 +0,0 @@
-AUTHOR = "markwkm <markwkm@us...>"
-NAME = "dbt2"
-TEST_CLASS = "kernel"
-TEST_CATEGORY = "Functional"
-TEST_TYPE = "client"
-DOC = """
-Dbt-2 is a fair-use implementation of the TPC-C benchmark.  The test is
-currently hardcoded to use PostgreSQL but the kit also supports MySQL.
-"""
-TIME="SHORT"
-
-job.run_test('dbt2', db_type='pgsql', args='-w 1 -c 20 -d 1800 -s 100 -n -z "autotest pgsql"', tag='pgsql')
-job.run_test('dbt2', db_type='pgpool', args='-w 1 -c 20 -d 1800 -s 100 -n -z "autotest pgpool"', tag='pgpool')
-job.run_test('dbt2', db_type='mysql', args='-w 1 -c 20 -d 1800 -s 100 -n -z "autotest mysql"', tag='mysql')
diff --git a/client/tests/dbt2/dbt2-0.39.tar.bz2 b/client/tests/dbt2/dbt2-0.39.tar.bz2
deleted file mode 100644
index 620f7eb..0000000
--- a/client/tests/dbt2/dbt2-0.39.tar.bz2
+++ /dev/null
Binary files differ
diff --git a/client/tests/dbt2/dbt2.py b/client/tests/dbt2/dbt2.py
deleted file mode 100644
index 5a74262..0000000
--- a/client/tests/dbt2/dbt2.py
+++ /dev/null
@@ -1,77 +0,0 @@
-import os
-from autotest_lib.client.bin import test, utils
-
-
-# Dbt-2 is a fair-use implementation of the TPC-C benchmark.  The test is
-# currently hardcoded to use PostgreSQL but the kit also supports MySQL.
-
-class dbt2(test.test):
-    version = 2
-
-    def initialize(self):
-        self.job.require_gcc()
-
-
-    # http://osdn.dl.sourceforge.net/sourceforge/osdldbt/dbt2-0.39.tar.gz
-    def setup(self, tarball = 'dbt2-0.39.tar.bz2'):
-        tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(tarball, self.srcdir)
-        self.job.setup_dep(['pgsql', 'pgpool', 'mysql'])
-
-        #
-        # Extract one copy of the kit for MySQL.
-        #
-        utils.system('cp -pR ' + self.srcdir + ' ' + self.srcdir + '.mysql')
-        os.chdir(self.srcdir + '.mysql')
-        utils.configure('--with-mysql=%s/deps/mysql/mysql' % self.autodir)
-        utils.make()
-
-        #
-        # Extract one copy of the kit for PostgreSQL.
-        #
-        utils.system('cp -pR ' + self.srcdir + ' ' + self.srcdir + '.pgsql')
-        os.chdir(self.srcdir + '.pgsql')
-        utils.configure('--with-postgresql=%s/deps/pgsql/pgsql' % self.autodir)
-        utils.make()
-
-        # Create symlinks to autotest's results directory from dbt-2's
-        # preferred results directory to self.resultsdir
-        utils.system('ln -s %s %s' %
-                     (self.resultsdir, self.srcdir + '.mysql/scripts/output'))
-        utils.system('ln -s %s %s' %
-                     (self.resultsdir, self.srcdir + '.pgsql/scripts/output'))
-
-
-    def execute(self, db_type, args = ''):
-        logfile = self.resultsdir + '/dbt2.log'
-
-        if (db_type == "mysql"):
-            self.execute_mysql(args)
-        elif (db_type == "pgpool"):
-            self.execute_pgpool(args)
-        elif (db_type == "pgsql"):
-            self.execute_pgsql(args)
-
-
-    def execute_mysql(self, args = ''):
-        args = args
-        utils.system(self.srcdir + '.mysql/scripts/mysql/build_db.sh -g -w 1')
-        utils.system(self.srcdir + '.mysql/scripts/run_workload.sh ' + args)
-
-
-    def execute_pgpool(self, args = ''):
-        utils.system('%s/deps/pgpool/pgpool/bin/pgpool -f %s/../pgpool.conf' \
-                        % (self.autodir, self.srcdir))
-        self.execute_pgsql(args)
-        utils.system('%s/deps/pgpool/pgpool/bin/pgpool stop' % self.autodir)
-
-
-    def execute_pgsql(self, args = ''):
-        utils.system(self.srcdir + '.pgsql/scripts/pgsql/build_db.sh -g -w 1')
-        utils.system(self.srcdir + '.pgsql/scripts/run_workload.sh ' + args)
-        #
-        # Clean up by dropping the database after the test.
-        #
-        utils.system(self.srcdir + '.pgsql/scripts/pgsql/start_db.sh')
-        utils.system(self.srcdir + '.pgsql/scripts/pgsql/drop_db.sh')
-        utils.system(self.srcdir + '.pgsql/scripts/pgsql/stop_db.sh')
diff --git a/client/tests/dbt2/pgpool.conf b/client/tests/dbt2/pgpool.conf
deleted file mode 100644
index dd417ce..0000000
--- a/client/tests/dbt2/pgpool.conf
+++ /dev/null
@@ -1,135 +0,0 @@
-#
-# pgpool configuration file sample
-# $Header: /cvsroot/pgpool/pgpool-II/pgpool.conf.sample,v 1.1.1.1 2006/09/08 03:36:04 t-ishii Exp $
-
-# Host name or IP address to listen on: '*' for all, '' for no TCP/IP
-# connections
-listen_addresses = 'localhost'
-
-# Port number for pgpool
-port = 9999
-
-# Port number for pgpool communication manager
-pcp_port = 9898
-
-# Unix domain socket path.  (The Debian package defaults to
-# /run/postgresql.)
-socket_dir = '/tmp'
-
-# Unix domain socket path for pgpool communication manager.
-# (Debian package default to /run/postgresql)
-pcp_socket_dir = '/tmp'
-
-# Unix domain socket path for the backend. Debian package default to /run/postgresql!
-backend_socket_dir = '/tmp'
-
-# pgpool communication manager timeout. 0 means no timeout, but strongly not recommended!
-pcp_timeout = 10
-
-# number of pre-forked child process
-num_init_children = 32
-
-# Number of connection pools allowed for a child process
-max_pool = 4
-
-# If idle for this many seconds, child exits.  0 means no timeout.
-child_life_time = 300
-
-# If idle for this many seconds, connection to PostgreSQL closes.
-# 0 means no timeout.
-connection_life_time = 0
-
-# If child_max_connections connections were received, child exits.
-# 0 means no exit.
-child_max_connections = 0
-
-# Logging directory
-logdir = '/tmp'
-
-# Replication mode
-#replication_mode = false
-
-# Set this to true if you want to avoid deadlock situations when
-# replication is enabled.  There will, however, be a noticable performance
-# degration.  A workaround is to set this to false and insert a /*STRICT*/
-# comment at the beginning of the SQL command.
-#replication_strict = true
-
-# When replication_strict is set to false, there will be a chance for
-# deadlocks.  Set this to nonzero (in milliseconds) to detect this
-# situation and resolve the deadlock by aborting current session.
-#replication_timeout = 5000
-
-# Load balancing mode, i.e., all SELECTs except in a transaction block
-# are load balanced.  This is ignored if replication_mode is false.
-#load_balance_mode = false
-
-# if there's a data mismatch between master and secondary
-# start degenration to stop replication mode
-#replication_stop_on_mismatch = false
-
-# Semicolon separated list of queries to be issued at the end of a session
-reset_query_list = 'ABORT; RESET ALL; SET SESSION AUTHORIZATION DEFAULT'
-
-# If true print time stamp on each log line.
-print_timestamp = true
-
-# If true, operate in master/slave mode.
-#master_slave_mode = false
-
-# If true, cache connection pool.
-connection_cache = true
-
-# Health check timeout.  0 means no timeout.
-health_check_timeout = 20
-
-# Health check period.  0 means no health check.
-health_check_period = 0
-
-# Health check user
-health_check_user = 'nobody'
-
-# If true, automatically lock table with INSERT statements to keep SERIAL
-# data consistency.  An /*INSERT LOCK*/ comment has the same effect.  A
-# /NO INSERT LOCK*/ comment disables the effect.
-insert_lock = false
-
-# If true, ignore leading white spaces of each query while pgpool judges
-# whether the query is a SELECT so that it can be load balanced.  This
-# is useful for certain APIs such as DBI/DBD which is known to adding an
-# extra leading white space.
-ignore_leading_white_space = false
-
-# If true, print all statements to the log.  Like the log_statement option
-# to PostgreSQL, this allows for observing queries without engaging in full
-# debugging.
-log_statement = true
-
-# if non 0, run in parallel query mode
-#parallel_mode = false
-
-# if non 0, use query cache
-#enable_query_cache = false
-
-#set pgpool2 hostname 
-#pgpool2_hostname = ''
-
-# system DB info
-#system_db_hostname = 'localhost'
-#system_db_port = 5432
-#system_db_dbname = 'pgpool'
-#system_db_schema = 'pgpool_catalog'
-#system_db_user = 'pgpool'
-#system_db_password = ''
-
-# backend_hostname, backend_port, backend_weight
-# here are examples
-backend_hostname0 = 'localhost'
-backend_port0 = 5432
-backend_weight0 = 1
-#backend_hostname0 = 'host1'
-#backend_port0 = 5432
-#backend_weight0 = 1
-#backend_hostname1 = 'host2'
-#backend_port1 = 5433
-#backend_weight1 = 1
diff --git a/client/tests/ddtest/control b/client/tests/ddtest/control
deleted file mode 100644
index 43b11a0..0000000
--- a/client/tests/ddtest/control
+++ /dev/null
@@ -1,14 +0,0 @@
-AUTHOR = 'Ricky Benitez (rickyb@google.com)'
-NAME = 'ddtest'
-DOC = '''
-Spawn multiple threads to write sequentially to the same disk and time
-the entire operation. Intended to detect basic performance regressions
-in disk writes.
-'''
-ATTRIBUTES = "suite:kernel_daily_benchmarks"
-TIME = 'SHORT'
-TEST_CLASS = 'kernel'
-TEST_CATEGORY = 'Functional'
-TEST_TYPE = 'client'
-
-job.run_test('ddtest')
diff --git a/client/tests/ddtest/ddtest.py b/client/tests/ddtest/ddtest.py
deleted file mode 100755
index 41a48e2..0000000
--- a/client/tests/ddtest/ddtest.py
+++ /dev/null
@@ -1,42 +0,0 @@
-
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os, shutil, re
-from autotest_lib.client.bin import utils, test
-
-class ddtest(test.test):
-    version = 2
-
-
-    def setup(self, tarball='ddtest.tar.gz'):
-        tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(tarball, self.srcdir)
-        os.chdir(self.srcdir)
-        utils.system('make build')
-
-
-    def initialize(self):
-        self.job.require_gcc()
-        self.results = []
-        self.job.drop_caches_between_iterations = True
-
-
-    def run_once(self, dir=None, blocksize=1024, blocknum=262144, threads=20):
-        if not dir:
-           dir = os.path.join(self.srcdir, 'rdir')
-           shutil.rmtree(dir, True)
-           os.mkdir(dir)
-        args = '-D ' + dir
-        args += ' -b %d' % blocksize
-        args += ' -n %d' % blocknum
-        args += ' -t %d' % threads
-        self.results.append(utils.system_output(os.path.join(self.srcdir,
-                            'ddtest') + ' ' + args))
-
-
-    def postprocess(self):
-        pattern = re.compile(r"throughput is (.*?) MB/sec")
-        for throughput in pattern.findall("\n".join(self.results)):
-            self.write_perf_keyval({'throughput':throughput})
diff --git a/client/tests/ddtest/ddtest.tar.gz b/client/tests/ddtest/ddtest.tar.gz
deleted file mode 100644
index 4504879..0000000
--- a/client/tests/ddtest/ddtest.tar.gz
+++ /dev/null
Binary files differ
diff --git a/client/tests/ebizzy/control b/client/tests/ebizzy/control
deleted file mode 100644
index 4b71dce..0000000
--- a/client/tests/ebizzy/control
+++ /dev/null
@@ -1,11 +0,0 @@
-NAME = "ebizzy"
-AUTHOR = "Sudhir Kumar <skumar@linux.vnet.ibm.com>"
-TIME = "MEDIUM"
-TEST_CATEGORY = "FUNCTIONAL"
-TEST_CLASS = "SYSTEM STRESS"
-TEST_TYPE = "CLIENT"
-DOC = """
-http://sourceforge.net/project/platformdownload.php?group_id=202378&sel_platform=3809
-"""
-
-job.run_test('ebizzy', args = '-vv')
diff --git a/client/tests/ebizzy/ebizzy-0.3.tar.gz b/client/tests/ebizzy/ebizzy-0.3.tar.gz
deleted file mode 100644
index d1bd9ed..0000000
--- a/client/tests/ebizzy/ebizzy-0.3.tar.gz
+++ /dev/null
Binary files differ
diff --git a/client/tests/ebizzy/ebizzy.py b/client/tests/ebizzy/ebizzy.py
deleted file mode 100644
index 50d6473..0000000
--- a/client/tests/ebizzy/ebizzy.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import os
-from autotest_lib.client.bin import utils, test
-from autotest_lib.client.common_lib import error
-
-class ebizzy(test.test):
-    version = 3
-
-    def initialize(self):
-        self.job.require_gcc()
-
-
-    # http://sourceforge.net/project/downloading.php?group_id=202378&filename=ebizzy-0.3.tar.gz
-    def setup(self, tarball='ebizzy-0.3.tar.gz'):
-        tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(tarball, self.srcdir)
-        os.chdir(self.srcdir)
-
-        utils.system('[ -x configure ] && ./configure')
-        utils.make()
-
-
-    # Note: default we use always mmap()
-    def run_once(self, args='', num_chunks=1000, chunk_size=512000,
-                 seconds=100, num_threads=100):
-
-        #TODO: Write small functions which will choose many of the above
-        # variables dynamicaly looking at guest's total resources
-        logfile = os.path.join(self.resultsdir, 'ebizzy.log')
-        args2 = '-m -n %s -P -R -s %s -S %s -t %s' % (num_chunks, chunk_size,
-                                                      seconds, num_threads)
-        args = args + ' ' + args2
diff --git a/client/tests/error_cleanup/control b/client/tests/error_cleanup/control
deleted file mode 100644
index 9edbd16..0000000
--- a/client/tests/error_cleanup/control
+++ /dev/null
@@ -1,9 +0,0 @@
-AUTHOR = "Gregory Smith <gps@google.com>"
-NAME = "error test for cleanup phase exception"
-TEST_TYPE = "client"
-TEST_CLASS = "General"
-TEST_CATEGORY = "Functional"
-TIME = "SHORT"
-DOC = """Raise an exception during cleanup().  This tests Autotest itself."""
-
-job.run_test('error_cleanup')
diff --git a/client/tests/error_cleanup/error_cleanup.py b/client/tests/error_cleanup/error_cleanup.py
deleted file mode 100644
index 9258522..0000000
--- a/client/tests/error_cleanup/error_cleanup.py
+++ /dev/null
@@ -1,12 +0,0 @@
-from autotest_lib.client.bin import test
-
-class error_cleanup(test.test):
-    version = 1
-
-
-    def execute(self):
-        pass
-
-
-    def cleanup(self):
-        raise NameError("test a bug in cleanup()")
diff --git a/client/tests/error_initialize/control b/client/tests/error_initialize/control
deleted file mode 100644
index 0de7af1..0000000
--- a/client/tests/error_initialize/control
+++ /dev/null
@@ -1,9 +0,0 @@
-AUTHOR = "Gregory Smith <gps@google.com>"
-NAME = "error test for initialize phase exception"
-TEST_TYPE = "client"
-TEST_CLASS = "General"
-TEST_CATEGORY = "Functional"
-TIME = "SHORT"
-DOC = """Raise an exception during initialize().  This tests Autotest itself."""
-
-job.run_test('error_initialize')
diff --git a/client/tests/error_initialize/error_initialize.py b/client/tests/error_initialize/error_initialize.py
deleted file mode 100644
index adb05da..0000000
--- a/client/tests/error_initialize/error_initialize.py
+++ /dev/null
@@ -1,12 +0,0 @@
-from autotest_lib.client.bin import test
-
-class error_initialize(test.test):
-    version = 1
-
-
-    def initialize(self):
-        raise NameError("test a bug in initialize()")
-
-
-    def execute(self):
-        pass
diff --git a/client/tests/error_setup/control b/client/tests/error_setup/control
deleted file mode 100644
index c1c662e..0000000
--- a/client/tests/error_setup/control
+++ /dev/null
@@ -1,9 +0,0 @@
-AUTHOR = "Gregory Smith <gps@google.com>"
-NAME = "error test for setup phase exception"
-TEST_TYPE = "client"
-TEST_CLASS = "General"
-TEST_CATEGORY = "Functional"
-TIME = "SHORT"
-DOC = """Raise an exception during setup().  This tests Autotest itself."""
-
-job.run_test('error_setup')
diff --git a/client/tests/error_setup/error_setup.py b/client/tests/error_setup/error_setup.py
deleted file mode 100644
index 1a3be4a..0000000
--- a/client/tests/error_setup/error_setup.py
+++ /dev/null
@@ -1,11 +0,0 @@
-from autotest_lib.client.bin import test
-
-class error_setup(test.test):
-    version = 1
-
-
-    def setup(self):
-        raise ValueError("test a bug in setup()")
-
-    def execute(self):
-        pass
diff --git a/client/tests/error_skip_step/control b/client/tests/error_skip_step/control
deleted file mode 100644
index 043b9be..0000000
--- a/client/tests/error_skip_step/control
+++ /dev/null
@@ -1,34 +0,0 @@
-AUTHOR = 'Gregory P. Smith <gps@google.com>'
-NAME = 'error test, make sure TestNAError skips steps in step enginer'
-TEST_TYPE = 'client'
-TEST_CLASS = 'General'
-TEST_CATEGORY = 'Functional'
-TIME = 'SHORT'
-DOC = """Raise TestNAError during step1.  step0, 2 and 3 should run."""
-
-
-def step_init():
-    job.next_step(step_test)
-
-
-def step_test():
-    job.next_step('step0')
-    job.next_step('step1')
-    job.next_step('step2')
-
-
-def step0():
-    print 'step0 is the coolest!'
-
-
-def step1():
-    raise error.TestNAError('This part can not run here.  meep meep.')
-
-
-def step2():
-    print 'screw you step0, I am the coolest. -step2'
-    job.next_step('step3')
-
-
-def step3():
-    print 'at least I can run.  unlike step1.'
diff --git a/client/tests/error_test_bug/control b/client/tests/error_test_bug/control
deleted file mode 100644
index eb0217f..0000000
--- a/client/tests/error_test_bug/control
+++ /dev/null
@@ -1,9 +0,0 @@
-AUTHOR = "Gregory Smith <gps@google.com>"
-NAME = "error test of a buggy test class"
-TEST_TYPE = "client"
-TEST_CLASS = "General"
-TEST_CATEGORY = "Functional"
-TIME = "SHORT"
-DOC = """Simulate a buggy test.  This is for testing Autotest itself."""
-
-job.run_test('error_test_bug')
diff --git a/client/tests/error_test_bug/error_test_bug.py b/client/tests/error_test_bug/error_test_bug.py
deleted file mode 100644
index ccb901c..0000000
--- a/client/tests/error_test_bug/error_test_bug.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.bin import test
-
-class error_test_bug(test.test):
-    version = 1
-
-
-    def execute(self):
-        raise RuntimeError("Woof Woof, Timmy's trapped in the well!")
diff --git a/client/tests/error_test_error/control b/client/tests/error_test_error/control
deleted file mode 100644
index 5e86178..0000000
--- a/client/tests/error_test_error/control
+++ /dev/null
@@ -1,9 +0,0 @@
-AUTHOR = "Gregory Smith <gps@google.com>"
-NAME = "error test for ERROR"
-TEST_TYPE = "client"
-TEST_CLASS = "General"
-TEST_CATEGORY = "Functional"
-TIME = "SHORT"
-DOC = """Raise a TestError.  This is for testing Autotest itself."""
-
-job.run_test('error_test_error')
diff --git a/client/tests/error_test_error/error_test_error.py b/client/tests/error_test_error/error_test_error.py
deleted file mode 100644
index 6b9b928..0000000
--- a/client/tests/error_test_error/error_test_error.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.bin import test
-
-class error_test_error(test.test):
-    version = 1
-
-
-    def execute(self):
-        raise error.TestError("This test always causes an error.")
diff --git a/client/tests/error_test_fail/control b/client/tests/error_test_fail/control
deleted file mode 100644
index 959cbc8..0000000
--- a/client/tests/error_test_fail/control
+++ /dev/null
@@ -1,9 +0,0 @@
-AUTHOR = "Gregory Smith <gps@google.com>"
-NAME = "error test for FAIL"
-TEST_TYPE = "client"
-TEST_CLASS = "General"
-TEST_CATEGORY = "Functional"
-TIME = "SHORT"
-DOC = """Raise a TestFail.  This is for testing Autotest itself."""
-
-job.run_test('error_test_fail')
diff --git a/client/tests/error_test_fail/error_test_fail.py b/client/tests/error_test_fail/error_test_fail.py
deleted file mode 100644
index 7595004..0000000
--- a/client/tests/error_test_fail/error_test_fail.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.bin import test
-
-class error_test_fail(test.test):
-    version = 1
-
-
-    def execute(self):
-        raise error.TestFail("This test always fails.")
diff --git a/client/tests/error_test_na/control b/client/tests/error_test_na/control
deleted file mode 100644
index 1704af7..0000000
--- a/client/tests/error_test_na/control
+++ /dev/null
@@ -1,9 +0,0 @@
-AUTHOR = "Gregory Smith <gps@google.com>"
-NAME = "error test for TEST_NA"
-TEST_TYPE = "client"
-TEST_CLASS = "General"
-TEST_CATEGORY = "Functional"
-TIME = "SHORT"
-DOC = """Raise a TestNAError.  This is for testing Autotest itself."""
-
-job.run_test('error_test_na')
diff --git a/client/tests/error_test_na/control2 b/client/tests/error_test_na/control2
deleted file mode 100644
index 4567628..0000000
--- a/client/tests/error_test_na/control2
+++ /dev/null
@@ -1,10 +0,0 @@
-AUTHOR = "Gregory Smith <gps@google.com>"
-NAME = "error test for top level TestNAError == TEST_NA"
-TEST_TYPE = "client"
-TEST_CLASS = "General"
-TEST_CATEGORY = "Functional"
-TIME = "SHORT"
-DOC = """Raise a TestNAError directly; for testing Autotest itself."""
-
-
-raise error.TestNAError('top level of this control file says N/A')
diff --git a/client/tests/error_test_na/error_test_na.py b/client/tests/error_test_na/error_test_na.py
deleted file mode 100644
index 1897a74..0000000
--- a/client/tests/error_test_na/error_test_na.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.bin import test
-
-class error_test_na(test.test):
-    version = 1
-
-
-    def execute(self):
-        raise error.TestNAError("This test can't run on this host.")
diff --git a/client/tests/ffsb/README b/client/tests/ffsb/README
deleted file mode 100644
index d2516ef..0000000
--- a/client/tests/ffsb/README
+++ /dev/null
@@ -1,44 +0,0 @@
-INTRODUCTION:
-=============
-This patch integrates the Flexible Filesystem Benchmark (FFSB) with
-the autotest. This integration enables the high-performace I/O load testing in the
-KVM Virtual machine environment.
-
-FFSB is a filesystem performance measurement tool.  It is a multi-threaded
-application (using pthreads), written entirely in C with cross-platform
-portability in mind.  It differs from other filesystem benchmarks in that
-the user may supply a profile to create custom workloads, while most other
-filesystem benchmarks use a fixed set of randomly generated workloads.
-
-More information about the FFSB can be got from reading the README in the
-FFSB source directory which can be accessed from this link:
-[http://sourceforge.net/projects/ffsb/]
-
-With this integration, it is now possible to test a variety of filesystems on
-the KVM guest for :
-
-(1) directed I/O with sequential/random read/write.
-(2) buffered I/O with sequential/random read/write.
-(3) use varying block alignment boundaries to measure filesystem behaviour.
-(4) use multithreaded workloads to stress the filesystem.
-(5) Exert weighted combination of I/O workloads to analyze the I/O performace
-    for a specific scenario.
-(6) Age filesystem according to a specified workload upto a specified limit.
-
-    Since the only interface used for the integration is a FFSB configuration file;
-Autotest will be able to run variety of I/O tests on guest as FFSB improves,
-with no or minimal code change in Autotest itself.
-
-USE:
-====
-To use the FFSB for Filesystem testing, two configuration files
-need to be modified -
-(1) Usual - tests.cfg file to activate the ffsb tests through KVM.
-(2) profile.cfg - where the workloads are specified.
-
-TODO:
-====
-* Add validations for max. number of threads according to number of
-  vcpus exported by QEMU-KVM
-* Test Autotest/ffsb
-* Test FFSB itself.
diff --git a/client/tests/ffsb/control b/client/tests/ffsb/control
deleted file mode 100644
index c7e92eb..0000000
--- a/client/tests/ffsb/control
+++ /dev/null
@@ -1,16 +0,0 @@
-AUTHOR = "Onkar N Mahajan <onkar.n.mahajan@linux.vnet.ibm.com>"
-NAME = "Flexible Filesystem Benchmark (FFSB)"
-TEST_CATEGORY = "Filesystem Benchmark"
-TEST_CLASS = "General"
-TEST_TYPE = "client"
-TIME = 'MEDIUM'
-DOC = """
-The Flexible Filesystem Benchmark (FFSB) is a cross-platform
-filesystem performance measurement tool. It uses customizable
-profiles to measure of different workloads, and it supports
-multiple groups of threads across multiple filesystems.
-
-For more info, see http://sourceforge.net/projects/ffsb/
-"""
-
-job.run_test('ffsb')
diff --git a/client/tests/ffsb/ffsb-6.0-rc2.tar.bz2 b/client/tests/ffsb/ffsb-6.0-rc2.tar.bz2
deleted file mode 100644
index 96c2f18..0000000
--- a/client/tests/ffsb/ffsb-6.0-rc2.tar.bz2
+++ /dev/null
Binary files differ
diff --git a/client/tests/ffsb/ffsb.py b/client/tests/ffsb/ffsb.py
deleted file mode 100644
index 90e35ec..0000000
--- a/client/tests/ffsb/ffsb.py
+++ /dev/null
@@ -1,197 +0,0 @@
-import os, string, logging, re, random, shutil
-from autotest_lib.client.bin import test, os_dep, utils
-from autotest_lib.client.common_lib import error
-
-
-def find_mnt_pt(path):
-    """
-    Find on which mount point a given path is mounted.
-
-    @param path: Path we want to figure its mount point.
-    """
-    pth = os.path.abspath(path)
-    while not os.path.ismount(pth):
-        pth = os.path.dirname(pth)
-    return pth
-
-
-class ffsb(test.test):
-    """
-    This class wraps FFSB (Flexible File System Benchmark) execution
-    under autotest.
-
-    @author Onkar N Mahajan (onkar.n.mahajan@linux.vnet.ibm.com)
-    """
-    version = 1
-    params = {}
-    tempdirs = []
-    bytes = {'K':1024 , 'k':1024,
-             'M':1048576, 'm':1048576,
-             'G':1073741824, 'g':1073741824,
-             'T':1099511627776 , 't':1099511627776}
-
-
-    def initialize(self):
-        self.job.require_gcc()
-        self.results = []
-        self.nfail = 0
-
-
-    def set_ffsb_params(self, usrfl):
-        """
-        This function checks for the user supplied FFSB profile file
-        and validates it against the availble resources on the
-        guest - currently only disk space validation is supported
-        but adjusting the number of threads according to the vcpus
-        exported by the qemu-kvm also needs to be added.
-
-        @param usrfl: Path to the user profile file.
-        """
-        d = {}
-        fr = open(usrfl,'r')
-        for line in fr.read().split('\n'):
-            p = re.compile(r'\s*\t*\[{1}filesystem(\d+)\]{1}')
-            m = p.match(line)
-            if m:
-                fsno = int(line[m.start(1):m.end(1)])
-                d[fsno] = []
-            p = re.compile(r'(\s*\t*location)\=(.*)')
-            m = p.match(line)
-            if m:
-                path = line[m.start(2):m.end(2)]
-                mntpt = find_mnt_pt(path)
-                f = os.statvfs(mntpt)
-                avl_dsk_spc = f.f_bfree * f.f_bsize
-                avl_dsk_spc *= 0.95
-                d[fsno].append(mntpt)
-                d[fsno].append(int(avl_dsk_spc))
-            p = re.compile(r'(\s*\t*num_files)\=(\d+)')
-
-            m = p.match(line)
-            if m:
-                usrnumfl = int(line[m.start(2):m.end(2)])
-                d[fsno].append(usrnumfl)
-            p = re.compile(r'(\s*\t*max_filesize)\=(\d+[kKMmGgTt]?)')
-            m = p.match(line)
-            if m:
-                usrmaxflsz = line[m.start(2):m.end(2)]
-                usrmaxflsz = int(usrmaxflsz[0:-1]) * self.bytes[usrmaxflsz[-1]]
-                d[fsno].append(usrmaxflsz)
-        for k in d.keys():
-            while d[k][2]*d[k][3] >= d[k][1]:
-                d[k][2] -= 1
-            if d[k][2] == 0:
-                d[k][2] = 1
-                d[k][3] = d[k][1]
-            # If the ffsb mount point is on the same file system
-            # then use the available disk space after the previous
-            # tests
-            for k1 in d.keys():
-                if d[k1][0] == d[k][0]:
-                    d[k1][1] -= (d[k][2]*d[k][3])
-        fr.close()
-        return d
-
-
-    def dup_ffsb_profilefl(self):
-        """
-        Validates the path from the FFSB configuration file, the
-        disk space available for the test, warn the user and
-        change the file sizes and/or number of files to be used for
-        generating the workload according to the available disk space
-        on the guest.
-        """
-        self.usrfl = '%s/%s' % (os.path.split(self.srcdir)[0],'profile.cfg')
-        self.sysfl = '%s/%s' % (self.srcdir,'profile.cfg')
-
-        params = self.set_ffsb_params(self.usrfl)
-
-        fsno = 0
-        fr = open(self.usrfl,'r')
-        fw = open(self.sysfl,'w')
-        for line in fr.read().split('\n'):
-            p = re.compile(r'\s*\t*\[{1}filesystem(\d+)\]{1}')
-            m = p.match(line)
-            if m:
-                fsno = int(line[m.start(1):m.end(1)])
-            p = re.compile(r'(\s*\t*location)\=(.*)')
-            m = p.match(line)
-            if m:
-                while True:
-                    dirnm = ''.join(random.choice(string.letters) for i in xrange(9))
-                    if line[m.end(2) - 1] == '/':
-                        newline = '%s%s' % (line[0:m.end(2)], dirnm)
-                        ffsbdir = '%s%s' % (line[m.start(2):m.end(2)], dirnm)
-                    else:
-                        newline = '%s/%s' % (line[0:m.end(2)], dirnm)
-                        ffsbdir = '%s/%s' % (line[m.start(2):m.end(2)], dirnm)
-                    self.tempdirs.append(ffsbdir)
-                    if os.path.exists(ffsbdir):
-                        continue
-                    else:
-                        os.makedirs(ffsbdir)
-                        break
-                fw.write(newline+'\n')
-                continue
-            p = re.compile(r'(\s*\t*num_files)\=(.*)')
-            m = p.match(line)
-            if m:
-                newline = '%s=%s' % (line[0:m.end(1)], str(params[fsno][2]))
-                fw.write(newline+'\n')
-                continue
-            p = re.compile(r'(\s*\t*max_filesize)\=(\d+[kKMmGgTt]?)')
-            m = p.match(line)
-            if m:
-                newline = '%s%s' % (line[0:m.start(2)], str(params[fsno][3]))
-                fw.write(newline+'\n')
-                continue
-            fw.write(line+'\n')
-        fr.close()
-        fw.close()
-
-
-    def setup(self, tarball='ffsb-6.0-rc2.tar.bz2'):
-        """
-        Uncompress the FFSB tarball and compiles it.
-
-        @param tarball: FFSB tarball. Could be either a path relative to
-                self.srcdir or a URL.
-        """
-        tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(tarball, self.srcdir)
-        os.chdir(self.srcdir)
-        os_dep.command('gcc')
-        utils.configure()
-        utils.make()
-
-
-    def run_once(self):
-        """
-        Runs a single iteration of the FFSB.
-        """
-        self.dup_ffsb_profilefl()
-        # Run FFSB using abspath
-        cmd = '%s/ffsb %s/profile.cfg' % (self.srcdir, self.srcdir)
-        logging.info("FFSB command: %s", cmd)
-        self.results_path = os.path.join(self.resultsdir,
-                                         'raw_output_%s' % self.iteration)
-        try:
-            self.results = utils.system_output(cmd, retain_output=True)
-            logging.info(self.results)
-            utils.open_write_close(self.results_path, self.results)
-        except error.CmdError, e:
-            self.nfail += 1
-            logging.error('Failed to execute FFSB : %s', e)
-
-
-    def postprocess(self):
-        """
-        Do test postprocessing. Fail the test or clean up results.
-        """
-        if self.nfail != 0:
-            raise error.TestError('FFSB test failed.')
-        else:
-            logging.info('FFSB test passed')
-            logging.info('Cleaning up test data...')
-            for l in self.tempdirs:
-                shutil.rmtree(l)
diff --git a/client/tests/ffsb/profile.cfg.sample b/client/tests/ffsb/profile.cfg.sample
deleted file mode 100644
index e68fead..0000000
--- a/client/tests/ffsb/profile.cfg.sample
+++ /dev/null
@@ -1,25 +0,0 @@
-# Large file random writes.
-# 1024 files, 100MB per file.
-
-time=300  # 5 min
-alignio=1
-
-[filesystem0]
-#   For KVM Autotest , this will by-default
-#   be / , unless and until the user is absolutely
-#   sure what is is upto.
-    location=/
-    num_files=2
-    min_filesize=1G
-    max_filesize=2G
-[end0]
-
-[threadgroup0]
-    num_threads=4
-
-    read_random=1
-    read_weight=1
-
-    read_size=5242880  # 5 MB
-    read_blocksize=4096
-[end0]
diff --git a/client/tests/flail/control b/client/tests/flail/control
deleted file mode 100644
index 1bec6b5..0000000
--- a/client/tests/flail/control
+++ /dev/null
@@ -1,16 +0,0 @@
-AUTHOR = "Pradeep Kumar Surisetty <psuriset@linux.vnet.ibm.com>"
-NAME = "flail"
-TEST_CATEGORY = "Stress"
-TEST_CLASS = "General"
-TEST_TYPE = "client"
-TIME = 'MEDIUM'
-
-DOC='''
-flail is a  systemcall fuzzer tool. This test simply runs flail.
-Fuzzing is slang for fault injection . It runs all system calls
-for that kernel version with random args.
-The goal is to find bugs in software without reading code or
-designing detailed test cases.
-'''
-
-job.run_test('flail')
diff --git a/client/tests/flail/flail-0.2.0.tar.gz b/client/tests/flail/flail-0.2.0.tar.gz
deleted file mode 100644
index a95c5a4..0000000
--- a/client/tests/flail/flail-0.2.0.tar.gz
+++ /dev/null
Binary files differ
diff --git a/client/tests/flail/flail.py b/client/tests/flail/flail.py
deleted file mode 100644
index 5b32fe6..0000000
--- a/client/tests/flail/flail.py
+++ /dev/null
@@ -1,42 +0,0 @@
-import os
-from autotest_lib.client.bin import test, utils
-
-
-class flail(test.test):
-    """
-    This autotest module runs the flail system call fuzzer.
-
-    Fuzzing is slang for fault injection . It runs all system calls for that
-    kernel version with random args. The goal is to find bugs in software
-    without reading code or designing detailed test cases.
-
-    @author: Pradeep K Surisetty (psuriset@linux.vnet.ibm.com)
-    @see: http://www.risesecurity.org/ (Website of Ramon Valle, flail's creator)
-    """
-    version = 1
-
-    def initialize(self):
-        self.job.require_gcc()
-
-
-    def setup(self, tarball = 'flail-0.2.0.tar.gz'):
-        """
-        Compiles flail with the appropriate parameters.
-
-        @param tarball: Path or URL for the flail tarball.
-        """
-        tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(tarball, self.srcdir)
-        os.chdir(self.srcdir)
-        utils.make()
-
-
-    def run_once(self, fstype = 'iso9660'):
-        """
-        Runs flail with the appropriate parameters.
-
-        @param fstype: Filesystem type you wish to run flail on.
-        """
-        args = fstype + ' 1'
-        flail_cmd = os.path.join(self.srcdir, 'flail %s' % args)
-        utils.system(flail_cmd)
diff --git a/client/tests/fs_mark/control b/client/tests/fs_mark/control
deleted file mode 100644
index 5af48bf..0000000
--- a/client/tests/fs_mark/control
+++ /dev/null
@@ -1,17 +0,0 @@
-AUTHOR = "walkinair@cn.ibm.com"
-NAME = "fs_mark"
-TEST_CATEGORY = "Stress"
-TEST_CLASS = "General"
-TEST_TYPE = "client"
-TIME = 'MEDIUM'
-DOC='''
-Detect barrier issues in file systems.
-
-If a file system have write barrier not protecting synchronous write,
-the number of synchronous (single threaded) operations/sec will exceed the
-calculated number of synchronous operations/sec of the underlying drive.
-
-Details of fs_mark can be found at:
-http://devresources.linux-foundation.org/dev/doubt/fs_mark/index.html
-'''
-job.run_test('fs_mark', dir='/mnt')
diff --git a/client/tests/fs_mark/fs_mark-3.2.tgz b/client/tests/fs_mark/fs_mark-3.2.tgz
deleted file mode 100644
index aea8a94..0000000
--- a/client/tests/fs_mark/fs_mark-3.2.tgz
+++ /dev/null
Binary files differ
diff --git a/client/tests/fs_mark/fs_mark.py b/client/tests/fs_mark/fs_mark.py
deleted file mode 100644
index 6bebd13..0000000
--- a/client/tests/fs_mark/fs_mark.py
+++ /dev/null
@@ -1,26 +0,0 @@
-import os
-from autotest_lib.client.bin import test, utils
-
-
-class fs_mark(test.test):
-    version = 1
-
-    def initialize(self):
-        self.job.require_gcc()
-
-
-    # http://developer.osdl.org/dev/doubt/fs_mark/archive/fs_mark-3.2.tgz
-    def setup(self, tarball = 'fs_mark-3.2.tgz'):
-        tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(tarball, self.srcdir)
-        os.chdir(self.srcdir)
-
-        utils.make()
-
-
-    def run_once(self, dir, args = None):
-        if not args:
-            # Just provide a sample run parameters
-            args = '-s 10240 -n 1000'
-        os.chdir(self.srcdir)
-        utils.system('./fs_mark -d %s %s' %(dir, args))
diff --git a/client/tests/fsfuzzer/control b/client/tests/fsfuzzer/control
deleted file mode 100644
index 230db89..0000000
--- a/client/tests/fsfuzzer/control
+++ /dev/null
@@ -1,20 +0,0 @@
-AUTHOR = "Martin Bligh <mbligh@google.com>"
-NAME = "fsfuzzer"
-TEST_CATEGORY = "Stress"
-TEST_CLASS = "General"
-TEST_TYPE = "client"
-TIME = 'MEDIUM'
-
-DOC='''
-fsfuzzer is a file system fuzzer tool. This test simply runs fsfuzzer.
-
-Fuzzing is slang for fault injection via random inputs. The goal is to
-find bugs in software without reading code or designing detailed test
-cases. Here fsfuzz will inject random errors into the files systems
-mounted. Evidently it has found many errors in many systems.
-
-WARNING: Currently this test may not work, and it may break subsequent
-other test runs.
-'''
-
-job.run_test('fsfuzzer')
diff --git a/client/tests/fsfuzzer/fsfuzzer-0.6.tar.gz b/client/tests/fsfuzzer/fsfuzzer-0.6.tar.gz
deleted file mode 100644
index 0de3c10..0000000
--- a/client/tests/fsfuzzer/fsfuzzer-0.6.tar.gz
+++ /dev/null
Binary files differ
diff --git a/client/tests/fsfuzzer/fsfuzzer.py b/client/tests/fsfuzzer/fsfuzzer.py
deleted file mode 100644
index b0097b92..0000000
--- a/client/tests/fsfuzzer/fsfuzzer.py
+++ /dev/null
@@ -1,23 +0,0 @@
-import os
-from autotest_lib.client.bin import test, utils
-
-
-class fsfuzzer(test.test):
-    version = 1
-
-    def initialize(self):
-        self.job.require_gcc()
-
-
-    # http://people.redhat.com/sgrubb/files/fsfuzzer-0.6.tar.gz
-    def setup(self, tarball = 'fsfuzzer-0.6.tar.gz'):
-        tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(tarball, self.srcdir)
-        os.chdir(self.srcdir)
-        utils.system('patch -p1 < ../makefile.patch')
-        utils.make()
-
-
-    def run_once(self, fstype = 'iso9660'):
-        args = fstype + ' 1'
-        utils.system(self.srcdir + '/run_test ' + args)
diff --git a/client/tests/fsfuzzer/makefile.patch b/client/tests/fsfuzzer/makefile.patch
deleted file mode 100644
index 7b8ddd3..0000000
--- a/client/tests/fsfuzzer/makefile.patch
+++ /dev/null
@@ -1,11 +0,0 @@
---- fsfuzzer-0.6/Makefile.orig	2011-02-07 19:48:48.000000000 -0800
-+++ fsfuzzer-0.6/Makefile	2011-02-07 19:48:57.000000000 -0800
-@@ -15,7 +15,7 @@
- CFLAGS=-g -W -Wall -Wundef
- LIBS=
- all:
--	gcc $(CFLAGS) mangle.c -o mangle $(LIBS)
-+	$(CC) $(CFLAGS) mangle.c -o mangle $(LIBS)
- 
- clean:
- 	rm -f mangle *.o
diff --git a/client/tests/fsstress/control b/client/tests/fsstress/control
deleted file mode 100644
index 20b9dad..0000000
--- a/client/tests/fsstress/control
+++ /dev/null
@@ -1,16 +0,0 @@
-AUTHOR = "Ricardo Salveti de Araujo <rsalveti@linux.vnet.ibm.com"
-NAME = "fsstress"
-TEST_CATEGORY = "Stress"
-TEST_CLASS = "General"
-TEST_TYPE = "client"
-TIME = 'MEDIUM'
-DOC='''
-A benchmark that tries to capture both transactional DB workloads and
-random web server ones. It writes about 24MB/s but has many small writes a
-second. It does a lot of this randomly but lets you go back to replay the
-randomness
-
-More information about fsstress can be found at
-http://www.cs.duke.edu/ari/fstress/
-'''
-job.run_test('fsstress')
diff --git a/client/tests/fsstress/ext3-tools.tar.gz b/client/tests/fsstress/ext3-tools.tar.gz
deleted file mode 100644
index 7a53d2e..0000000
--- a/client/tests/fsstress/ext3-tools.tar.gz
+++ /dev/null
Binary files differ
diff --git a/client/tests/fsstress/fsstress-ltp.patch b/client/tests/fsstress/fsstress-ltp.patch
deleted file mode 100644
index 98714f0..0000000
--- a/client/tests/fsstress/fsstress-ltp.patch
+++ /dev/null
@@ -1,30 +0,0 @@
-diff -Naur ext3-tools/fsstress.c ext3-tools.new/fsstress.c
---- ext3-tools/fsstress.c	2004-05-17 04:49:53.000000000 -0300
-+++ ext3-tools.new/fsstress.c	2007-07-16 14:26:20.000000000 -0300
-@@ -487,7 +487,7 @@
- #endif
- 	if (cleanup == 0)
- 	{
--	  sprintf(cmd,"rm -rf %s",dirname);
-+	  sprintf(cmd,"rm -rf %s/*",dirname);
- 	  system(cmd);
- 	}	
-         loopcntr++;
-@@ -1453,7 +1453,7 @@
- 	if (!get_fname(FT_ANYm, r, &f, NULL, NULL, &v))
- 		append_pathname(&f, ".");
- 	total = 0;
--	bzero(&cursor, sizeof(cursor));
-+	memset(&cursor, 0x00, sizeof(cursor));
- 	do {
- 		e = attr_list_path(&f, buf, sizeof(buf), ATTR_DONTFOLLOW,
- 			&cursor);
-@@ -1471,7 +1471,7 @@
- 		return;
- 	}
- 	which = (int)(random() % total);
--	bzero(&cursor, sizeof(cursor));
-+	memset(&cursor, 0x00, sizeof(cursor));
- 	ent = 0;
- 	aname = NULL;
- 	do {
diff --git a/client/tests/fsstress/fsstress.py b/client/tests/fsstress/fsstress.py
deleted file mode 100644
index 2d48946..0000000
--- a/client/tests/fsstress/fsstress.py
+++ /dev/null
@@ -1,29 +0,0 @@
-import os
-from autotest_lib.client.bin import test, utils
-
-
-class fsstress(test.test):
-    version = 1
-
-    def initialize(self):
-        self.job.require_gcc()
-
-
-    # http://www.zip.com.au/~akpm/linux/patches/stuff/ext3-tools.tar.gz
-    def setup(self, tarball = 'ext3-tools.tar.gz'):
-        self.tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(self.tarball, self.srcdir)
-
-        os.chdir(self.srcdir)
-        utils.system('patch -p1 < ../fsstress-ltp.patch')
-        utils.system('patch -p1 < ../makefile.patch')
-        utils.make('fsstress')
-
-
-    def run_once(self, testdir = None, extra_args = '', nproc = '1000', nops = '1000'):
-        if not testdir:
-            testdir = self.tmpdir
-
-        args = '-d %s -p %s -n %s %s' % (testdir, nproc, nops, extra_args)
-        cmd = self.srcdir + '/fsstress ' + args
-        utils.system(cmd)
diff --git a/client/tests/fsstress/makefile.patch b/client/tests/fsstress/makefile.patch
deleted file mode 100644
index e0be0ec..0000000
--- a/client/tests/fsstress/makefile.patch
+++ /dev/null
@@ -1,9 +0,0 @@
---- ext3-tools/Makefile.orig	2011-02-07 15:52:18.000000000 -0800
-+++ ext3-tools/Makefile	2011-02-07 15:52:23.000000000 -0800
-@@ -1,5 +1,5 @@
- 
--CC	=	gcc
-+CC	?=	gcc
- CFLAGS	+= 	-O -Wall -g -DAIO
- CXXFLAGS+=	-O -Wall -g
- 
diff --git a/client/tests/fsx/0001-Minor-fixes-to-PAGE_SIZE-handling.patch b/client/tests/fsx/0001-Minor-fixes-to-PAGE_SIZE-handling.patch
deleted file mode 100644
index 58d4c2c..0000000
--- a/client/tests/fsx/0001-Minor-fixes-to-PAGE_SIZE-handling.patch
+++ /dev/null
@@ -1,69 +0,0 @@
-From 34e3152b64368d4a3672084ebae565d55f320f5d Mon Sep 17 00:00:00 2001
-From: Lucas Meneghel Rodrigues <lmr@redhat.com>
-Date: Tue, 18 May 2010 10:40:15 -0300
-Subject: [PATCH 1/2] Minor fixes to PAGE_SIZE handling
-
-Signed-off-by: Suzuki <suzuki@in.ibm.com>
----
- fsx-linux.c |   10 +++++++---
- 1 files changed, 7 insertions(+), 3 deletions(-)
-
-diff --git a/fsx-linux.c b/fsx-linux.c
-index a1642d2..33eb770 100644
---- a/fsx-linux.c
-+++ b/fsx-linux.c
-@@ -10,6 +10,8 @@
-  *
-  *	Small changes to work under Linux -- davej@suse.de
-  *
-+ *	Minor fixes to PAGE_SIZE handling -- Suzuki <suzuki@in.ibm.com>.
-+ *
-  */
- 
- #undef _XOPEN_SOURCE
-@@ -74,7 +76,7 @@ int			logcount = 0;	/* total ops */
- #define OP_SKIPPED	7
- 
- #ifndef PAGE_SIZE
--#define PAGE_SIZE       4096
-+#define PAGE_SIZE       pagesize
- #endif
- #define PAGE_MASK       (PAGE_SIZE - 1)
- 
-@@ -129,6 +131,7 @@ int aio_rw(int rw, int fd, char *buf, unsigned len, unsigned offset);
- FILE *	fsxlogf = NULL;
- int badoff = -1;
- int closeopen = 0;
-+int pagesize = 0;
- 
- static void *round_up(void *ptr, unsigned long align, unsigned long offset)
- {
-@@ -493,7 +496,7 @@ domapread(unsigned offset, unsigned size)
- 		    offset, offset + size - 1, size);
- 
- 	pg_offset = offset & PAGE_MASK;
--	map_size  = pg_offset + size;
-+	map_size  = (pg_offset + size + PAGE_MASK) & ~PAGE_MASK;
- 
- #ifdef linux
- 	if ((p = (char *)mmap(0, map_size, PROT_READ, MAP_SHARED, fd,
-@@ -638,7 +641,7 @@ domapwrite(unsigned offset, unsigned size)
- 		}
- 	}
- 	pg_offset = offset & PAGE_MASK;
--	map_size  = pg_offset + size;
-+	map_size  = (pg_offset + size + PAGE_MASK) & ~PAGE_MASK;
- 
- 	if ((p = (char *)mmap(0, map_size, PROT_READ | PROT_WRITE,
- 			      MAP_FILE | MAP_SHARED, fd,
-@@ -1106,6 +1109,7 @@ main(int argc, char **argv)
- 	if (argc != 1)
- 		usage();
- 	fname = argv[0];
-+	pagesize = getpagesize();
- 
- 	signal(SIGHUP,	cleanup);
- 	signal(SIGINT,	cleanup);
--- 
-1.7.0.1
-
diff --git a/client/tests/fsx/0002-Enable-cross-compiling-for-fsx.patch b/client/tests/fsx/0002-Enable-cross-compiling-for-fsx.patch
deleted file mode 100644
index 5988c3c..0000000
--- a/client/tests/fsx/0002-Enable-cross-compiling-for-fsx.patch
+++ /dev/null
@@ -1,24 +0,0 @@
-From ee9798e2d40e56427e99f40640b6158926ec2a99 Mon Sep 17 00:00:00 2001
-From: Lucas Meneghel Rodrigues <lmr@redhat.com>
-Date: Tue, 18 May 2010 10:41:01 -0300
-Subject: [PATCH 2/2] Enable cross compiling for fsx
-
-Signed-off-by: Eric Li <ericli@google.com>
----
- Makefile |    2 +-
- 1 files changed, 1 insertions(+), 1 deletions(-)
-
-diff --git a/Makefile b/Makefile
-index 337c023..0ebfec2 100644
---- a/Makefile
-+++ b/Makefile
-@@ -1,5 +1,5 @@
- 
--CC	=	gcc
-+CC	?=	gcc
- CFLAGS	+= 	-O -Wall -g -DAIO
- CXXFLAGS+=	-O -Wall -g
- 
--- 
-1.7.0.1
-
diff --git a/client/tests/fsx/0003-Fix-Link-Options.patch b/client/tests/fsx/0003-Fix-Link-Options.patch
deleted file mode 100644
index bfbba58..0000000
--- a/client/tests/fsx/0003-Fix-Link-Options.patch
+++ /dev/null
@@ -1,23 +0,0 @@
---- a/Makefile	2014-01-29 16:25:19.476592865 -0800
-+++ b/Makefile	2014-01-29 16:27:18.937995212 -0800
-@@ -98,16 +98,16 @@
- fsstress.c:		global.h xfscompat.h
- 
- aio-test:		aio-test.o
--	$(CC) $(LDFLAGS) -laio aio-test.o -o aio-test
-+	$(CC) ${CFLAGS} $(LDFLAGS) aio-test.o -o aio-test -laio
- 
- aio-stress:		aio-stress.o
--	$(CC) $(LDFLAGS) -laio -lpthread aio-stress.o -o aio-stress
-+	$(CC) ${CFLAGS} $(LDFLAGS) aio-stress.o -o aio-stress -laio -lpthread
- 
- fsx-linux:		fsx-linux.o
--	$(CC) $(LDFLAGS) -laio fsx-linux.o -o fsx-linux
-+	$(CC) ${CFLAGS} $(LDFLAGS) fsx-linux.o -o fsx-linux -laio
- 
- rawread:		rawread.o
--	$(CC) $(LDFLAGS) -laio -lpthread rawread.o -o rawread
-+	$(CC) ${CFLAGS} $(LDFLAGS) rawread.o -o rawread -laio -lpthread
- 
- #pollbench:		pollbench.o
- #	$(CXX) $(LDFLAGS) -lpthread polbench.o -o pollbench
diff --git a/client/tests/fsx/control b/client/tests/fsx/control
deleted file mode 100644
index 8a37900..0000000
--- a/client/tests/fsx/control
+++ /dev/null
@@ -1,10 +0,0 @@
-AUTHOR = "Randy.Dunlap <rdunlap@xenotime.net>"
-NAME = "fsx"
-TEST_CATEGORY = "Stress"
-TEST_CLASS = "General"
-TEST_TYPE = "client"
-TIME = 'MEDIUM'
-DOC='''
-Fsx is a file system regression test.
-'''
-job.run_test('fsx')
diff --git a/client/tests/fsx/ext3-tools.tar.gz b/client/tests/fsx/ext3-tools.tar.gz
deleted file mode 100644
index db48be5..0000000
--- a/client/tests/fsx/ext3-tools.tar.gz
+++ /dev/null
Binary files differ
diff --git a/client/tests/fsx/fsx.py b/client/tests/fsx/fsx.py
deleted file mode 100644
index 18f3720..0000000
--- a/client/tests/fsx/fsx.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# This requires aio headers to build.
-# Should work automagically out of deps now.
-
-# NOTE - this should also have the ability to mount a filesystem,
-# run the tests, unmount it, then fsck the filesystem
-import os
-from autotest_lib.client.bin import test, utils
-
-
-class fsx(test.test):
-    """Test to run fsx-linux."""
-    version = 3
-
-    def initialize(self):
-        self.job.require_gcc()
-
-
-    # http://www.zip.com.au/~akpm/linux/patches/stuff/ext3-tools.tar.gz
-    def setup(self, tarball = 'ext3-tools.tar.gz'):
-        self.tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(self.tarball, self.srcdir)
-
-        os.chdir(self.srcdir)
-        for p in ['0001-Minor-fixes-to-PAGE_SIZE-handling.patch',
-                  '0002-Enable-cross-compiling-for-fsx.patch',
-                  '0003-Fix-Link-Options.patch']:
-            utils.system('patch -p1 < ../%s' % p)
-        utils.system('make fsx-linux')
-
-
-    def run_once(self, dir=None, repeat=100000):
-        args = '-N %s' % repeat
-        if not dir:
-            dir = self.tmpdir
-        os.chdir(dir)
-        utils.system(' '.join([os.path.join(self.srcdir,'/fsx-linux'),
-                               args, 'poo']))
diff --git a/client/tests/hackbench/control b/client/tests/hackbench/control
deleted file mode 100644
index a57efb2..0000000
--- a/client/tests/hackbench/control
+++ /dev/null
@@ -1,16 +0,0 @@
-AUTHOR = "ncrao@google.com (Nikhil Rao)"
-NAME = "Hackbench"
-ATTRIBUTES = "suite:kernel_per-build_benchmarks"
-TIME = "SHORT"
-TEST_CLASS = "Kernel"
-TEST_CATEGORY = "Benchmark"
-TEST_TYPE = "client"
-
-DOC = """
-Hackbench is a benchmark for measuring the performance, overhead and
-scalability of the Linux scheduler.
-
-hackbench.c copied from:
-http://people.redhat.com/~mingo/cfs-scheduler/tools/hackbench.c
-"""
-job.run_test('hackbench')
diff --git a/client/tests/hackbench/hackbench.py b/client/tests/hackbench/hackbench.py
deleted file mode 100644
index 15e93d7..0000000
--- a/client/tests/hackbench/hackbench.py
+++ /dev/null
@@ -1,56 +0,0 @@
-import os
-from autotest_lib.client.bin import test, utils
-
-
-class hackbench(test.test):
-    """
-    This module will run the hackbench benchmark. Hackbench is a benchmark for
-    measuring the performance, overhead and scalability of the Linux scheduler.
-    The C program was pick from Ingo Molnar's page.
-
-    @author: Nikhil Rao (ncrao@google.com)
-    @see: http://people.redhat.com/~mingo/cfs-scheduler/tools/hackbench.c
-    """
-    version = 1
-    preserve_srcdir = True
-
-
-    def setup(self):
-        os.chdir(self.srcdir)
-        if 'CC' in os.environ:
-            cc = '$CC'
-        else:
-            cc = 'cc'
-        utils.system('%s -lpthread hackbench.c -o hackbench' % cc)
-
-
-    def initialize(self):
-        self.job.require_gcc()
-        self.results = None
-
-
-    def run_once(self, num_groups=90):
-        """
-        Run hackbench, store the output in raw output files per iteration and
-        also in the results list attribute.
-
-        @param num_groups: Number of children processes hackbench will spawn.
-        """
-        hackbench_bin = os.path.join(self.srcdir, 'hackbench')
-        cmd = '%s %s' % (hackbench_bin, num_groups)
-        raw_output = utils.system_output(cmd, retain_output=True)
-        self.results = raw_output
-
-        path = os.path.join(self.resultsdir, 'raw_output_%s' % self.iteration)
-        utils.open_write_close(path, raw_output)
-
-
-    def postprocess_iteration(self):
-        """
-        Pick up the results attribute and write it in the performance keyval.
-        """
-        lines = self.results.split('\n')
-        for line in lines:
-            if line.startswith('Time:'):
-                time_val = line.split()[1]
-                self.write_perf_keyval({'time': time_val})
diff --git a/client/tests/hackbench/src/hackbench.c b/client/tests/hackbench/src/hackbench.c
deleted file mode 100644
index 4376719..0000000
--- a/client/tests/hackbench/src/hackbench.c
+++ /dev/null
@@ -1,384 +0,0 @@
-
-/*
- * This is the latest version of hackbench.c, that tests scheduler and
- * unix-socket (or pipe) performance.
- *
- * Usage: hackbench [-pipe] <num groups> [process|thread] [loops]
- *
- * Build it with:
- *   gcc -g -Wall -O2 -o hackbench hackbench.c -lpthread
- */
-#if 0
-
-Date: Fri, 04 Jan 2008 14:06:26 +0800
-From: "Zhang, Yanmin" <yanmin_zhang@linux.intel.com>
-To: LKML <linux-kernel@vger.kernel.org>
-Subject: Improve hackbench
-Cc: Ingo Molnar <mingo@elte.hu>, Arjan van de Ven <arjan@infradead.org>
-
-hackbench tests the Linux scheduler. The original program is at
-http://devresources.linux-foundation.org/craiger/hackbench/src/hackbench.c
-Based on this multi-process version, a nice person created a multi-thread
-version. Pls. see
-http://www.bullopensource.org/posix/pi-futex/hackbench_pth.c
-
-When I integrated them into my automation testing system, I found
-a couple of issues and did some improvements.
-
-1) Merge hackbench: I integrated hackbench_pth.c into hackbench and added a
-new parameter which can be used to choose process mode or thread mode. The
-default mode is process.
-
-2) It runs too fast and ends in a couple of seconds. Sometimes it's too hard to debug
-the issues. On my ia64 Montecito machines, the result looks weird when comparing
-process mode and thread mode.
-I want a stable result and hope the testing could run for a stable longer time, so I
-might use performance tools to debug issues.
-I added another new parameter,`loops`, which can be used to change variable loops,
-so more messages will be passed from writers to receivers. Parameter 'loops' is equal to
-100 by default.
-
-For example on my 8-core x86_64:
-[ymzhang@lkp-st01-x8664 hackbench]$ uname -a
-Linux lkp-st01-x8664 2.6.24-rc6 #1 SMP Fri Dec 21 08:32:31 CST 2007 x86_64 x86_64 x86_64 GNU/Linux
-[ymzhang@lkp-st01-x8664 hackbench]$ ./hackbench
-Usage: hackbench [-pipe] <num groups> [process|thread] [loops]
-[ymzhang@lkp-st01-x8664 hackbench]$ ./hackbench 150 process 1000
-Time: 151.533
-[ymzhang@lkp-st01-x8664 hackbench]$ ./hackbench 150 thread 1000
-Time: 153.666
-
-
-With the same new parameters, I did captured the SLUB issue discussed on LKML recently.
-
-3) hackbench_pth.c will fail on ia64 machine because pthread_attr_setstacksize always
-fails if the stack size is less than 196*1024. I moved this statement within a __ia64__ check.
-
-
-This new program could be compiled with command line:
-#gcc -g -Wall  -o hackbench hackbench.c -lpthread
-
-
-Thank Ingo for his great comments!
-
--yanmin
-
----
-
-* Nathan Lynch <ntl@pobox.com> wrote:
-
-> Here's a fixlet for the hackbench program found at
->
-> http://people.redhat.com/mingo/cfs-scheduler/tools/hackbench.c
->
-> When redirecting hackbench output I am seeing multiple copies of the
-> "Running with %d*40 (== %d) tasks" line.  Need to flush the buffered
-> output before forking.
-
-#endif
-
-/* Test groups of 20 processes spraying to 20 receivers */
-#include <pthread.h>
-#include <stdio.h>
-#include <stdlib.h>
-#include <string.h>
-#include <errno.h>
-#include <unistd.h>
-#include <sys/types.h>
-#include <sys/socket.h>
-#include <sys/wait.h>
-#include <sys/time.h>
-#include <sys/poll.h>
-#include <limits.h>
-
-#define DATASIZE 100
-static unsigned int loops = 100;
-/*
- * 0 means thread mode and others mean process (default)
- */
-static unsigned int process_mode = 1;
-
-static int use_pipes = 0;
-
-struct sender_context {
-	unsigned int num_fds;
-	int ready_out;
-	int wakefd;
-	int out_fds[0];
-};
-
-struct receiver_context {
-	unsigned int num_packets;
-	int in_fds[2];
-	int ready_out;
-	int wakefd;
-};
-
-
-static void barf(const char *msg)
-{
-	fprintf(stderr, "%s (error: %s)\n", msg, strerror(errno));
-	exit(1);
-}
-
-static void print_usage_exit()
-{
-	printf("Usage: hackbench [-pipe] <num groups> [process|thread] [loops]\n");
-	exit(1);
-}
-
-static void fdpair(int fds[2])
-{
-	if (use_pipes) {
-		if (pipe(fds) == 0)
-			return;
-	} else {
-		if (socketpair(AF_UNIX, SOCK_STREAM, 0, fds) == 0)
-			return;
-	}
-	barf("Creating fdpair");
-}
-
-/* Block until we're ready to go */
-static void ready(int ready_out, int wakefd)
-{
-	char dummy;
-	struct pollfd pollfd = { .fd = wakefd, .events = POLLIN };
-
-	/* Tell them we're ready. */
-	if (write(ready_out, &dummy, 1) != 1)
-		barf("CLIENT: ready write");
-
-	/* Wait for "GO" signal */
-	if (poll(&pollfd, 1, -1) != 1)
-		barf("poll");
-}
-
-/* Sender sprays loops messages down each file descriptor */
-static void *sender(struct sender_context *ctx)
-{
-	char data[DATASIZE];
-	unsigned int i, j;
-
-	ready(ctx->ready_out, ctx->wakefd);
-
-	/* Now pump to every receiver. */
-	for (i = 0; i < loops; i++) {
-		for (j = 0; j < ctx->num_fds; j++) {
-			int ret, done = 0;
-
-again:
-			ret = write(ctx->out_fds[j], data + done, sizeof(data)-done);
-			if (ret < 0)
-				barf("SENDER: write");
-			done += ret;
-			if (done < sizeof(data))
-				goto again;
-		}
-	}
-
-	return NULL;
-}
-
-
-/* One receiver per fd */
-static void *receiver(struct receiver_context* ctx)
-{
-	unsigned int i;
-
-	if (process_mode)
-		close(ctx->in_fds[1]);
-
-	/* Wait for start... */
-	ready(ctx->ready_out, ctx->wakefd);
-
-	/* Receive them all */
-	for (i = 0; i < ctx->num_packets; i++) {
-		char data[DATASIZE];
-		int ret, done = 0;
-
-again:
-		ret = read(ctx->in_fds[0], data + done, DATASIZE - done);
-		if (ret < 0)
-			barf("SERVER: read");
-		done += ret;
-		if (done < DATASIZE)
-			goto again;
-	}
-
-	return NULL;
-}
-
-pthread_t create_worker(void *ctx, void *(*func)(void *))
-{
-	pthread_attr_t attr;
-	pthread_t childid;
-	int err;
-
-	if (process_mode) {
-		/* process mode */
-		/* Fork the receiver. */
-		switch (fork()) {
-			case -1: barf("fork()");
-			case 0:
-				(*func) (ctx);
-				exit(0);
-		}
-
-		return (pthread_t) 0;
-	}
-
-	if (pthread_attr_init(&attr) != 0)
-		barf("pthread_attr_init:");
-
-#ifndef __ia64__
-	if (pthread_attr_setstacksize(&attr, PTHREAD_STACK_MIN) != 0)
-		barf("pthread_attr_setstacksize");
-#endif
-
-	if ((err=pthread_create(&childid, &attr, func, ctx)) != 0) {
-		fprintf(stderr, "pthread_create failed: %s (%d)\n", strerror(err), err);
-		exit(-1);
-	}
-	return (childid);
-}
-
-void reap_worker(pthread_t id)
-{
-	int status;
-
-	if (process_mode) {
-		/* process mode */
-		wait(&status);
-		if (!WIFEXITED(status))
-			exit(1);
-	} else {
-		void *status;
-
-		pthread_join(id, &status);
-	}
-}
-
-/* One group of senders and receivers */
-static unsigned int group(pthread_t *pth,
-		unsigned int num_fds,
-		int ready_out,
-		int wakefd)
-{
-	unsigned int i;
-	struct sender_context* snd_ctx = malloc (sizeof(struct sender_context)
-			+num_fds*sizeof(int));
-
-	for (i = 0; i < num_fds; i++) {
-		int fds[2];
-		struct receiver_context* ctx = malloc (sizeof(*ctx));
-
-		if (!ctx)
-			barf("malloc()");
-
-
-		/* Create the pipe between client and server */
-		fdpair(fds);
-
-		ctx->num_packets = num_fds*loops;
-		ctx->in_fds[0] = fds[0];
-		ctx->in_fds[1] = fds[1];
-		ctx->ready_out = ready_out;
-		ctx->wakefd = wakefd;
-
-		pth[i] = create_worker(ctx, (void *)(void *)receiver);
-
-		snd_ctx->out_fds[i] = fds[1];
-		if (process_mode)
-			close(fds[0]);
-	}
-
-	/* Now we have all the fds, fork the senders */
-	for (i = 0; i < num_fds; i++) {
-		snd_ctx->ready_out = ready_out;
-		snd_ctx->wakefd = wakefd;
-		snd_ctx->num_fds = num_fds;
-
-		pth[num_fds+i] = create_worker(snd_ctx, (void *)(void *)sender);
-	}
-
-	/* Close the fds we have left */
-	if (process_mode)
-		for (i = 0; i < num_fds; i++)
-			close(snd_ctx->out_fds[i]);
-
-	/* Return number of children to reap */
-	return num_fds * 2;
-}
-
-int main(int argc, char *argv[])
-{
-	unsigned int i, num_groups = 10, total_children;
-	struct timeval start, stop, diff;
-	unsigned int num_fds = 20;
-	int readyfds[2], wakefds[2];
-	char dummy;
-	pthread_t *pth_tab;
-
-	if (argv[1] && strcmp(argv[1], "-pipe") == 0) {
-		use_pipes = 1;
-		argc--;
-		argv++;
-	}
-
-	if (argc >= 2 && (num_groups = atoi(argv[1])) == 0)
-		print_usage_exit();
-
-	printf("Running with %d*40 (== %d) tasks.\n",
-		num_groups, num_groups*40);
-
-	fflush(NULL);
-
-	if (argc > 2) {
-		if ( !strcmp(argv[2], "process") )
-			process_mode = 1;
-		else if ( !strcmp(argv[2], "thread") )
-			process_mode = 0;
-		else
-			print_usage_exit();
-	}
-
-	if (argc > 3)
-		loops = atoi(argv[3]);
-
-	pth_tab = malloc(num_fds * 2 * num_groups * sizeof(pthread_t));
-
-	if (!pth_tab)
-		barf("main:malloc()");
-
-	fdpair(readyfds);
-	fdpair(wakefds);
-
-	total_children = 0;
-	for (i = 0; i < num_groups; i++)
-		total_children += group(pth_tab+total_children, num_fds, readyfds[1], wakefds[0]);
-
-	/* Wait for everyone to be ready */
-	for (i = 0; i < total_children; i++)
-		if (read(readyfds[0], &dummy, 1) != 1)
-			barf("Reading for readyfds");
-
-	gettimeofday(&start, NULL);
-
-	/* Kick them off */
-	if (write(wakefds[1], &dummy, 1) != 1)
-		barf("Writing to start them");
-
-	/* Reap them all */
-	for (i = 0; i < total_children; i++)
-		reap_worker(pth_tab[i]);
-
-	gettimeofday(&stop, NULL);
-
-	/* Print time... */
-	timersub(&stop, &start, &diff);
-	printf("Time: %lu.%03lu\n", diff.tv_sec, diff.tv_usec/1000);
-	exit(0);
-}
-
-
diff --git a/client/tests/hwclock/control b/client/tests/hwclock/control
deleted file mode 100644
index bf3e9d3..0000000
--- a/client/tests/hwclock/control
+++ /dev/null
@@ -1,12 +0,0 @@
-AUTHOR = "Martin J. Bligh <mbligh@mbligh.org>"
-NAME = "Hwclock"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "General"
-TEST_TYPE = "client"
-
-DOC = """
-This test checks that we can set and read the hwclock successfully
-"""
-
-job.run_test('hwclock')
diff --git a/client/tests/hwclock/hwclock.py b/client/tests/hwclock/hwclock.py
deleted file mode 100644
index 3ee3975..0000000
--- a/client/tests/hwclock/hwclock.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-import re, logging
-
-class hwclock(test.test):
-    version = 1
-
-    def run_once(self):
-        """
-        Set hwclock back to a date in 1980 and verify if the changes took
-        effect in the system.
-        """
-        logging.info('Setting hwclock to 2/2/80 03:04:00')
-        utils.set_hwclock(time='2/2/80 03:04:00')
-        date = utils.system_output('LC_ALL=C /sbin/hwclock')
-        if not re.match('Sat *Feb *2 *03:04:.. 1980', date):
-            raise error.TestFail("Failed to set hwclock back to the eighties. "
-                                 "Output of hwclock is '%s'" % date)
-
-
-    def cleanup(self):
-        """
-        Restore hardware clock to current system time.
-        """
-        logging.info('Restoring the hardware clock')
-        utils.set_hwclock(time='system', utc=True, noadjfile=True)
diff --git a/client/tests/interbench/control b/client/tests/interbench/control
deleted file mode 100644
index 5500f05..0000000
--- a/client/tests/interbench/control
+++ /dev/null
@@ -1,12 +0,0 @@
-AUTHOR = "Brandon Philips <brandon@ifup.org>"
-NAME = "interbench"
-TEST_CATEGORY = "Benchmark"
-TEST_CLASS = "General"
-TEST_TYPE = "client"
-TIME = 'MEDIUM'
-DOC='''
-Information of 'interbench' can be found at :
-http://members.optusnet.com.au/ckolivas/interbench/
-This test simply run's interbench.
-'''
-job.run_test('interbench')
diff --git a/client/tests/interbench/interbench-0.30.tar.bz2 b/client/tests/interbench/interbench-0.30.tar.bz2
deleted file mode 100644
index 275d99b..0000000
--- a/client/tests/interbench/interbench-0.30.tar.bz2
+++ /dev/null
Binary files differ
diff --git a/client/tests/interbench/interbench.py b/client/tests/interbench/interbench.py
deleted file mode 100644
index ffee90b..0000000
--- a/client/tests/interbench/interbench.py
+++ /dev/null
@@ -1,25 +0,0 @@
-import os
-from autotest_lib.client.bin import test, utils
-
-
-class interbench(test.test):
-    version = 1
-
-    def initialize(self):
-        self.job.require_gcc()
-
-
-    # http://www.kernel.org/pub/linux/kernel/people/ck/apps/interbench/interbench-0.30.tar.bz2
-    def setup(self, tarball = 'interbench-0.30.tar.bz2'):
-        tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(tarball, self.srcdir)
-        os.chdir(self.srcdir)
-        utils.system('patch -p1 < ../makefile.patch')
-        utils.make()
-
-
-    def run_once(self, args = ''):
-        os.chdir(self.tmpdir)
-        args += " -c"
-        utils.system("%s/interbench -m 'run #%s' %s" % (self.srcdir,
-                                                        self.iteration, args))
diff --git a/client/tests/interbench/makefile.patch b/client/tests/interbench/makefile.patch
deleted file mode 100644
index 99ed3c2..0000000
--- a/client/tests/interbench/makefile.patch
+++ /dev/null
@@ -1,8 +0,0 @@
---- interbench-0.30/Makefile.orig	2011-02-07 19:56:51.000000000 -0800
-+++ interbench-0.30/Makefile	2011-02-07 19:57:15.000000000 -0800
-@@ -1,4 +1,4 @@
--CC=gcc
-+CC ?= gcc
- CFLAGS=-W -Wall -g -O2 -s -pipe
- LDFLAGS=-lrt -lm
- 
diff --git a/client/tests/iosched_bugs/control b/client/tests/iosched_bugs/control
deleted file mode 100644
index 41615b7..0000000
--- a/client/tests/iosched_bugs/control
+++ /dev/null
@@ -1,16 +0,0 @@
-NAME = "iosched bugs"
-AUTHOR = "Divyesh Shah (dpshah@google.com)"
-TEST_TYPE = "client"
-TEST_CLASS = "Kernel"
-TEST_CATEGORY = "Functional"
-TIME = "SHORT"
-DOC = """\
-This is a functional test for the bug in AS io scheduler where
-reads or writes can be starved when switching a batch and a request from the
-previous batch is still in-flight. This test case should see writes being
-starved forever without the bugfix(220.4 and earlier kernels) and with the
-bugfix it should see the writer making forward progress.
-"""
-
-
-job.run_test('iosched_bugs')
diff --git a/client/tests/iosched_bugs/iosched_bugs.py b/client/tests/iosched_bugs/iosched_bugs.py
deleted file mode 100644
index f919fcc..0000000
--- a/client/tests/iosched_bugs/iosched_bugs.py
+++ /dev/null
@@ -1,43 +0,0 @@
-import os, time
-import subprocess
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import utils, error
-
-
-class iosched_bugs(test.test):
-    version = 1
-    preserve_srcdir = True
-
-    def initialize(self):
-        self.job.require_gcc()
-
-
-    def setup(self):
-        os.chdir(self.srcdir)
-        utils.make()
-
-
-    def execute(self):
-        os.chdir(self.tmpdir)
-        (p1, _) = utils.run_bg('dd if=/dev/hda3 of=/dev/null')
-        time.sleep(60)
-        blah = os.path.join(self.tmpdir, 'blah')
-        dirty_bin = os.path.join(self.srcdir, 'dirty')
-        dirty_op = os.path.join(self.tmpdir, 'dirty')
-        utils.system('echo AA > ' + blah)
-        p2 = subprocess.Popen(dirty_bin + ' ' + blah + ' 1 > ' + dirty_op,
-                              shell=True)
-        time.sleep(600)
-        if p2.poll() is None:
-            utils.nuke_subprocess(p1)
-            utils.nuke_subprocess(p2)
-            raise error.TestFail('Writes made no progress')
-# Commenting out use of utils.run as there is a timeout bug
-#
-#       try:
-#           utils.run(dirty_bin + ' ' + blah + '1 > ' + dirty_op, 900, False,
-#                     None, None)
-#       except:
-#           utils.nuke_subprocess(p1)
-#           raise error.TestFail('Writes made no progress')
-        utils.nuke_subprocess(p1)
diff --git a/client/tests/iosched_bugs/src/Makefile b/client/tests/iosched_bugs/src/Makefile
deleted file mode 100644
index da205b0..0000000
--- a/client/tests/iosched_bugs/src/Makefile
+++ /dev/null
@@ -1,8 +0,0 @@
-CC=gcc
-
-TESTS=dirty
-
-all: $(TESTS)
-
-dirty: dirty.c
-	$(CC) -o $@ $^
diff --git a/client/tests/iosched_bugs/src/dirty.c b/client/tests/iosched_bugs/src/dirty.c
deleted file mode 100644
index 0ed32b6..0000000
--- a/client/tests/iosched_bugs/src/dirty.c
+++ /dev/null
@@ -1,60 +0,0 @@
-// Author: Suleiman Souhlal (suleiman@google.com)
-
-#include <stdio.h>
-#include <err.h>
-#include <stdint.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <sys/mman.h>
-#include <fcntl.h>
-
-#define O_NOATIME     01000000 
-
-inline uint64_t
-rdtsc(void)
-{
-	int64_t tsc;
-
-	__asm __volatile("rdtsc" : "=A" (tsc));
-	return (tsc);
-}
-
-int
-main(int argc, char **argv)
-{
-	struct stat st;
-	uint64_t e, s, t;
-	char *p, q;
-	long i;
-	int fd;
-
-	if (argc < 2) {
-		printf("Usage: %s <file>\n", argv[0]);
-		return (1);
-	}
-
-	if ((fd = open(argv[1], O_RDWR | O_NOATIME)) < 0)
-		err(1, "open");
-
-	if (fstat(fd, &st) < 0)
-		err(1, "fstat");
-
-	p = mmap(NULL, st.st_size, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0);
-
-	t = 0;
-	for (i = 0; i < 1000; i++) {
-		*p = 0;
-		msync(p, 4096, MS_SYNC);
-		s = rdtsc();
-		*p = 0;
-		__asm __volatile(""::: "memory");
-		e = rdtsc();
-		if (argc > 2)
-			printf("%d: %lld cycles %jd %jd\n", i, e - s, (intmax_t)s, (intmax_t)e);
-		t += e - s;
-	}
-
-	printf("average time: %lld cycles\n", t / 1000);
-
-	return (0);
-}
diff --git a/client/tests/iozone/__init__.py b/client/tests/iozone/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/client/tests/iozone/__init__.py
+++ /dev/null
diff --git a/client/tests/iozone/clang_fortify.patch b/client/tests/iozone/clang_fortify.patch
deleted file mode 100644
index 3457321..0000000
--- a/client/tests/iozone/clang_fortify.patch
+++ /dev/null
@@ -1,28 +0,0 @@
-This patch deletes redeclarations of standard library functions. This is needed
-for clang-style FORTIFY, as it will emit errors when it sees these
-redeclarations.
-
-diff --git iozone3_347/src/current/fileop.c iozone3_347/src/current/fileop.c
-index 57ddf68..779f24c 100644
---- iozone3_347/src/current/fileop.c
-+++ iozone3_347/src/current/fileop.c
-@@ -116,7 +116,6 @@ void file_unlink(int);
- void file_read(int);
- void splash(void);
- void usage(void);
--void bzero();
- void clear_stats();
- int validate(char *, int , char );
- 
-diff --git iozone3_347/src/current/iozone.c iozone3_347/src/current/iozone.c
-index 1b291b8..6c7d3ac 100644
---- iozone3_347/src/current/iozone.c
-+++ iozone3_347/src/current/iozone.c
-@@ -988,7 +988,6 @@ int sp_start_master_listen();
- #ifdef HAVE_ANSIC_C
- #if defined (HAVE_PREAD) && defined(_LARGEFILE64_SOURCE)
- ssize_t pwrite64(); 
--ssize_t pread64(); 
- #endif
- #if !defined(linux)
- char *getenv();
diff --git a/client/tests/iozone/common.py b/client/tests/iozone/common.py
deleted file mode 100644
index ce78b85..0000000
--- a/client/tests/iozone/common.py
+++ /dev/null
@@ -1,8 +0,0 @@
-import os, sys
-dirname = os.path.dirname(sys.modules[__name__].__file__)
-client_dir = os.path.abspath(os.path.join(dirname, "..", ".."))
-sys.path.insert(0, client_dir)
-import setup_modules
-sys.path.pop(0)
-setup_modules.setup(base_path=client_dir,
-                    root_module_name="autotest_lib.client")
diff --git a/client/tests/iozone/control b/client/tests/iozone/control
deleted file mode 100644
index 9244693..0000000
--- a/client/tests/iozone/control
+++ /dev/null
@@ -1,19 +0,0 @@
-AUTHOR = "Ying Tao <yingtao@cn.ibm.com>"
-TIME = "MEDIUM"
-NAME = "IOzone"
-ATTRIBUTES = "suite:kernel_daily_benchmarks"
-TEST_TYPE = "client"
-TEST_CLASS = "Kernel"
-TEST_CATEGORY = "Benchmark"
-
-DOC = """
-Iozone is useful for performing a broad filesystem analysis of a vendors
-computer platform. The benchmark tests file I/O performance for the following
-operations:
-      Read, write, re-read, re-write, read backwards, read strided, fread,
-      fwrite, random read, pread ,mmap, aio_read, aio_write
-
-For more information see http://www.iozone.org
-"""
-
-job.run_test('iozone')
diff --git a/client/tests/iozone/iozone.py b/client/tests/iozone/iozone.py
deleted file mode 100644
index d04062b..0000000
--- a/client/tests/iozone/iozone.py
+++ /dev/null
@@ -1,171 +0,0 @@
-import os, re
-from autotest_lib.client.bin import test, utils
-import postprocessing
-
-
-class iozone(test.test):
-    """
-    This autotest module runs the IOzone filesystem benchmark. The benchmark
-    generates and measures a variety of file operations. Iozone has been ported
-    to many machines and runs under many operating systems.
-
-    Iozone is useful for performing a broad filesystem analysis of a vendor's
-    computer platform. The benchmark tests file I/O performance for the
-    following operations:
-
-    Read, write, re-read, re-write, read backwards, read strided, fread, fwrite,
-    random read, pread ,mmap, aio_read, aio_write
-
-    @author: Ying Tao (yingtao@cn.ibm.com)
-    @see: http://www.iozone.org
-    """
-    version = 3
-
-    def initialize(self):
-        self.job.require_gcc()
-
-
-    def setup(self, tarball='iozone3_347.tar'):
-        """
-        Builds the given version of IOzone from a tarball.
-        @param tarball: Tarball with IOzone
-        @see: http://www.iozone.org/src/current/iozone3_347.tar
-        """
-        tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(tarball, self.srcdir)
-        os.chdir(os.path.join(self.srcdir, 'src/current'))
-        utils.system('patch -p3 < ../../../makefile.patch')
-        utils.system('patch -p3 < ../../../clang_fortify.patch')
-
-        ctarget = os.getenv('CTARGET_default')
-
-        if (ctarget == 'armv7a-cros-linux-gnueabihf'):
-            utils.make('linux-arm')
-        elif (ctarget == 'i686-pc-linux-gnu'):
-            utils.make('linux')
-        elif (ctarget == 'x86_64-cros-linux-gnu'):
-            utils.make('linux-AMD64')
-        else:
-            utils.make('linux')
-
-    def run_once(self, dir=None, args=None):
-        """
-        Runs IOzone with appropriate parameters, record raw results in a per
-        iteration raw output file as well as in the results attribute
-
-        @param dir: IOzone file generation dir.
-        @param args: Arguments to the iozone program.
-        """
-        if not dir:
-            dir = self.tmpdir
-        os.chdir(dir)
-        if not args:
-            args = '-a'
-
-        cmd = os.path.join(self.srcdir, 'src', 'current', 'iozone')
-        self.results = utils.system_output('%s %s' % (cmd, args))
-        self.auto_mode = ("-a" in args)
-
-        self.results_path = os.path.join(self.resultsdir,
-                                         'raw_output_%s' % self.iteration)
-        self.analysisdir = os.path.join(self.resultsdir,
-                                        'analysis_%s' % self.iteration)
-
-        utils.open_write_close(self.results_path, self.results)
-
-
-    def __get_section_name(self, desc):
-        return desc.strip().replace(' ', '_')
-
-
-    def generate_keyval(self):
-        """
-        Generates a keylist.
-        """
-        keylist = {}
-
-        if self.auto_mode:
-            labels = ('write', 'rewrite', 'read', 'reread', 'randread',
-                      'randwrite', 'bkwdread', 'recordrewrite',
-                      'strideread', 'fwrite', 'frewrite', 'fread', 'freread')
-            for line in self.results.splitlines():
-                fields = line.split()
-                if len(fields) != 15:
-                    continue
-                try:
-                    fields = tuple([int(i) for i in fields])
-                except ValueError:
-                    continue
-                for l, v in zip(labels, fields[2:]):
-                    key_name = "%d-%d-%s" % (fields[0], fields[1], l)
-                    keylist[key_name] = v
-        else:
-            child_regexp  = re.compile('Children see throughput for[\s]+'
-                            '([\d]+)\s+([-\w]+[-\w\s]*)\=[\s]+([\d\.]*) KB/sec')
-            parent_regexp = re.compile('Parent sees throughput for[\s]+'
-                            '([\d]+)\s+([-\w]+[-\w\s]*)\=[\s]+([\d\.]*) KB/sec')
-
-            KBsec_regexp  = re.compile('\=[\s]+([\d\.]*) KB/sec')
-            KBval_regexp  = re.compile('\=[\s]+([\d\.]*) KB')
-
-            section = None
-            w_count = 0
-
-            for line in self.results.splitlines():
-                line = line.strip()
-
-                # Check for the beginning of a new result section
-                match = child_regexp.search(line)
-                if match:
-                    # Extract the section name and the worker count
-                    w_count = int(match.group(1))
-                    section = self.__get_section_name(match.group(2))
-
-                    # Output the appropriate keyval pair
-                    key_name = '%s-%d-kids' % (section, w_count)
-                    keylist[key_name] = match.group(3)
-                    continue
-
-                # Check for any other interesting lines
-                if '=' in line:
-                    # Is it something we recognize? First check for parent.
-                    match = parent_regexp.search(line)
-                    if match:
-                        # The section name and the worker count better match
-                        p_count = int(match.group(1))
-                        p_secnt = self.__get_section_name(match.group(2))
-                        if p_secnt != section or p_count != w_count:
-                            continue
-
-                        # Set the base name for the keyval
-                        basekey = 'parent'
-                    else:
-                        # Check for the various 'throughput' values
-                        if line[3:26] == ' throughput per thread ':
-                            basekey = line[0:3]
-                            match_x = KBsec_regexp
-                        else:
-                            # The only other thing we expect is 'Min xfer'
-                            if not line.startswith('Min xfer '):
-                                continue
-                            basekey = 'MinXfer'
-                            match_x = KBval_regexp
-
-                        match = match_x.search(line)
-                        if match:
-                            result = match.group(1)
-                            key_name = "%s-%d-%s" % (section, w_count, basekey)
-                            keylist[key_name] = result
-
-        self.write_perf_keyval(keylist)
-
-
-    def postprocess_iteration(self):
-        self.generate_keyval()
-        if self.auto_mode:
-            a = postprocessing.IOzoneAnalyzer(list_files=[self.results_path],
-                                              output_dir=self.analysisdir)
-            a.analyze()
-            p = postprocessing.IOzonePlotter(results_file=self.results_path,
-                                             output_dir=self.analysisdir)
-            p.plot_all()
diff --git a/client/tests/iozone/iozone3_347.tar b/client/tests/iozone/iozone3_347.tar
deleted file mode 100644
index 308b689..0000000
--- a/client/tests/iozone/iozone3_347.tar
+++ /dev/null
Binary files differ
diff --git a/client/tests/iozone/makefile.patch b/client/tests/iozone/makefile.patch
deleted file mode 100644
index 1edbd20..0000000
--- a/client/tests/iozone/makefile.patch
+++ /dev/null
@@ -1,15 +0,0 @@
---- iozone3_347/src/current/makefile.orig	2011-02-07 20:00:02.000000000 -0800
-+++ iozone3_347/src/current/makefile	2011-02-07 20:01:03.000000000 -0800
-@@ -9,9 +9,9 @@
- #		convex, FreeBSD, OpenBSD, OSFV3, OSFV4, OSFV5, SCO
- #		SCO_Unixware_gcc,NetBSD,TRU64, Mac OS X
- 
--CC	= cc
--C89	= c89
--GCC	= gcc
-+CC	?= cc
-+C89	?= c89
-+GCC	?= gcc
- CCS	= /usr/ccs/bin/cc
- NACC	= /opt/ansic/bin/cc
- CFLAGS	=
diff --git a/client/tests/iozone/postprocessing.py b/client/tests/iozone/postprocessing.py
deleted file mode 100755
index 17c7766..0000000
--- a/client/tests/iozone/postprocessing.py
+++ /dev/null
@@ -1,487 +0,0 @@
-#!/usr/bin/python2
-"""
-Postprocessing module for IOzone. It is capable to pick results from an
-IOzone run, calculate the geometric mean for all throughput results for
-a given file size or record size, and then generate a series of 2D and 3D
-graphs. The graph generation functionality depends on gnuplot, and if it
-is not present, functionality degrates gracefully.
-
-@copyright: Red Hat 2010
-"""
-import os, sys, optparse, logging, math, time
-import common
-from autotest_lib.client.common_lib import logging_config, logging_manager
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.bin import utils, os_dep
-
-
-_LABELS = ['file_size', 'record_size', 'write', 'rewrite', 'read', 'reread',
-           'randread', 'randwrite', 'bkwdread', 'recordrewrite', 'strideread',
-           'fwrite', 'frewrite', 'fread', 'freread']
-
-
-def unique(list):
-    """
-    Return a list of the elements in list, but without duplicates.
-
-    @param list: List with values.
-    @return: List with non duplicate elements.
-    """
-    n = len(list)
-    if n == 0:
-        return []
-    u = {}
-    try:
-        for x in list:
-            u[x] = 1
-    except TypeError:
-        return None
-    else:
-        return u.keys()
-
-
-def geometric_mean(values):
-    """
-    Evaluates the geometric mean for a list of numeric values.
-
-    @param values: List with values.
-    @return: Single value representing the geometric mean for the list values.
-    @see: http://en.wikipedia.org/wiki/Geometric_mean
-    """
-    try:
-        values = [int(value) for value in values]
-    except ValueError:
-        return None
-    product = 1
-    n = len(values)
-    if n == 0:
-        return None
-    return math.exp(sum([math.log(x) for x in values])/n)
-
-
-def compare_matrices(matrix1, matrix2, treshold=0.05):
-    """
-    Compare 2 matrices nxm and return a matrix nxm with comparison data
-
-    @param matrix1: Reference Matrix with numeric data
-    @param matrix2: Matrix that will be compared
-    @param treshold: Any difference bigger than this percent treshold will be
-            reported.
-    """
-    improvements = 0
-    regressions = 0
-    same = 0
-    comparison_matrix = []
-
-    new_matrix = []
-    for line1, line2 in zip(matrix1, matrix2):
-        new_line = []
-        for element1, element2 in zip(line1, line2):
-            ratio = float(element2) / float(element1)
-            if ratio < (1 - treshold):
-                regressions += 1
-                new_line.append((100 * ratio - 1) - 100)
-            elif ratio > (1 + treshold):
-                improvements += 1
-                new_line.append("+" + str((100 * ratio - 1) - 100))
-            else:
-                same + 1
-                if line1.index(element1) == 0:
-                    new_line.append(element1)
-                else:
-                    new_line.append(".")
-        new_matrix.append(new_line)
-
-    total = improvements + regressions + same
-
-    return (new_matrix, improvements, regressions, total)
-
-
-class IOzoneAnalyzer(object):
-    """
-    Analyze an unprocessed IOzone file, and generate the following types of
-    report:
-
-    * Summary of throughput for all file and record sizes combined
-    * Summary of throughput for all file sizes
-    * Summary of throughput for all record sizes
-
-    If more than one file is provided to the analyzer object, a comparison
-    between the two runs is made, searching for regressions in performance.
-    """
-    def __init__(self, list_files, output_dir):
-        self.list_files = list_files
-        if not os.path.isdir(output_dir):
-            os.makedirs(output_dir)
-        self.output_dir = output_dir
-        logging.info("Results will be stored in %s", output_dir)
-
-
-    def average_performance(self, results, size=None):
-        """
-        Flattens a list containing performance results.
-
-        @param results: List of n lists containing data from performance runs.
-        @param size: Numerical value of a size (say, file_size) that was used
-                to filter the original results list.
-        @return: List with 1 list containing average data from the performance
-                run.
-        """
-        average_line = []
-        if size is not None:
-            average_line.append(size)
-        for i in range(2, 15):
-            average = geometric_mean([line[i] for line in results]) / 1024.0
-            average = int(average)
-            average_line.append(average)
-        return average_line
-
-
-    def process_results(self, results, label=None):
-        """
-        Process a list of IOzone results according to label.
-
-        @label: IOzone column label that we'll use to filter and compute
-                geometric mean results, in practical term either 'file_size'
-                or 'record_size'.
-        @result: A list of n x m columns with original iozone results.
-        @return: A list of n-? x (m-1) columns with geometric averages for
-                values of each label (ex, average for all file_sizes).
-        """
-        performance = []
-        if label is not None:
-            index = _LABELS.index(label)
-            sizes = unique([line[index] for line in results])
-            sizes.sort()
-            for size in sizes:
-                r_results = [line for line in results if line[index] == size]
-                performance.append(self.average_performance(r_results, size))
-        else:
-            performance.append(self.average_performance(results))
-
-        return performance
-
-
-    def parse_file(self, file):
-        """
-        Parse an IOzone results file.
-
-        @param file: File object that will be parsed.
-        @return: Matrix containing IOzone results extracted from the file.
-        """
-        lines = []
-        for line in file.readlines():
-            fields = line.split()
-            if len(fields) != 15:
-                continue
-            try:
-                lines.append([int(i) for i in fields])
-            except ValueError:
-                continue
-        return lines
-
-
-    def report(self, overall_results, record_size_results, file_size_results):
-        """
-        Generates analysis data for IOZone run.
-
-        Generates a report to both logs (where it goes with nice headers) and
-        output files for further processing (graph generation).
-
-        @param overall_results: 1x15 Matrix containing IOzone results for all
-                file sizes
-        @param record_size_results: nx15 Matrix containing IOzone results for
-                each record size tested.
-        @param file_size_results: nx15 Matrix containing file size results
-                for each file size tested.
-        """
-        # Here we'll use the logging system to put the output of our analysis
-        # to files
-        logger = logging.getLogger()
-        formatter = logging.Formatter("")
-
-        logging.info("")
-        logging.info("TABLE:  SUMMARY of ALL FILE and RECORD SIZES                        Results in MB/sec")
-        logging.info("")
-        logging.info("FILE & RECORD  INIT    RE              RE    RANDOM  RANDOM  BACKWD   RECRE  STRIDE    F       FRE     F       FRE")
-        logging.info("SIZES (KB)     WRITE   WRITE   READ    READ    READ   WRITE    READ   WRITE    READ    WRITE   WRITE   READ    READ")
-        logging.info("-------------------------------------------------------------------------------------------------------------------")
-        for result_line in overall_results:
-            logging.info("ALL            %-8s%-8s%-8s%-8s%-8s%-8s%-8s%-8s%-8s%-8s%-8s%-8s%-8s" % tuple(result_line))
-        logging.info("")
-
-        logging.info("DRILLED DATA:")
-
-        logging.info("")
-        logging.info("TABLE:  RECORD Size against all FILE Sizes                          Results in MB/sec")
-        logging.info("")
-        logging.info("RECORD    INIT    RE              RE    RANDOM  RANDOM  BACKWD   RECRE  STRIDE    F       FRE     F       FRE ")
-        logging.info("SIZE (KB) WRITE   WRITE   READ    READ    READ   WRITE    READ   WRITE    READ    WRITE   WRITE   READ    READ")
-        logging.info("--------------------------------------------------------------------------------------------------------------")
-
-        foutput_path = os.path.join(self.output_dir, '2d-datasource-file')
-        if os.path.isfile(foutput_path):
-            os.unlink(foutput_path)
-        foutput = logging.FileHandler(foutput_path)
-        foutput.setFormatter(formatter)
-        logger.addHandler(foutput)
-        for result_line in record_size_results:
-            logging.info("%-10s%-8s%-8s%-8s%-8s%-8s%-8s%-8s%-8s%-8s%-8s%-8s%-8s%-8s" % tuple(result_line))
-        logger.removeHandler(foutput)
-
-        logging.info("")
-
-        logging.info("")
-        logging.info("TABLE:  FILE Size against all RECORD Sizes                          Results in MB/sec")
-        logging.info("")
-        logging.info("RECORD    INIT    RE              RE    RANDOM  RANDOM  BACKWD   RECRE  STRIDE    F       FRE     F       FRE ")
-        logging.info("SIZE (KB) WRITE   WRITE   READ    READ    READ   WRITE    READ   WRITE    READ    WRITE   WRITE   READ    READ")
-        logging.info("--------------------------------------------------------------------------------------------------------------")
-
-        routput_path = os.path.join(self.output_dir, '2d-datasource-record')
-        if os.path.isfile(routput_path):
-            os.unlink(routput_path)
-        routput = logging.FileHandler(routput_path)
-        routput.setFormatter(formatter)
-        logger.addHandler(routput)
-        for result_line in file_size_results:
-            logging.info("%-10s%-8s%-8s%-8s%-8s%-8s%-8s%-8s%-8s%-8s%-8s%-8s%-8s%-8s" % tuple(result_line))
-        logger.removeHandler(routput)
-
-        logging.info("")
-
-
-    def report_comparison(self, record, file):
-        """
-        Generates comparison data for 2 IOZone runs.
-
-        It compares 2 sets of nxm results and outputs a table with differences.
-        If a difference higher or smaller than 5% is found, a warning is
-        triggered.
-
-        @param record: Tuple with 4 elements containing results for record size.
-        @param file: Tuple with 4 elements containing results for file size.
-        """
-        (record_size, record_improvements, record_regressions,
-         record_total) = record
-        (file_size, file_improvements, file_regressions,
-         file_total) = file
-        logging.info("ANALYSIS of DRILLED DATA:")
-
-        logging.info("")
-        logging.info("TABLE:  RECsize Difference between runs                            Results are % DIFF")
-        logging.info("")
-        logging.info("RECORD    INIT    RE              RE    RANDOM  RANDOM  BACKWD   RECRE  STRIDE    F       FRE     F       FRE ")
-        logging.info("SIZE (KB) WRITE   WRITE   READ    READ    READ   WRITE    READ   WRITE    READ    WRITE   WRITE   READ    READ")
-        logging.info("--------------------------------------------------------------------------------------------------------------")
-        for result_line in record_size:
-            logging.info("%-10s%-8.6s%-8.6s%-8.6s%-8.6s%-8.6s%-8.6s%-8.6s%-8.6s%-8.6s%-8.6s%-8.6s%-8.6s%-8.6s" % tuple(result_line))
-        logging.info("REGRESSIONS: %d (%.2f%%)    Improvements: %d (%.2f%%)",
-                     record_regressions,
-                     (100 * record_regressions/float(record_total)),
-                     record_improvements,
-                     (100 * record_improvements/float(record_total)))
-        logging.info("")
-
-        logging.info("")
-        logging.info("TABLE:  FILEsize Difference between runs                           Results are % DIFF")
-        logging.info("")
-        logging.info("RECORD    INIT    RE              RE    RANDOM  RANDOM  BACKWD   RECRE  STRIDE    F       FRE     F       FRE ")
-        logging.info("SIZE (KB) WRITE   WRITE   READ    READ    READ   WRITE    READ   WRITE    READ    WRITE   WRITE   READ    READ")
-        logging.info("--------------------------------------------------------------------------------------------------------------")
-        for result_line in file_size:
-            logging.info("%-10s%-8.6s%-8.6s%-8.6s%-8.6s%-8.6s%-8.6s%-8.6s%-8.6s%-8.6s%-8.6s%-8.6s%-8.6s%-8.6s" % tuple(result_line))
-        logging.info("REGRESSIONS: %d (%.2f%%)    Improvements: %d (%.2f%%)",
-                     file_regressions,
-                     (100 * file_regressions/float(file_total)),
-                     file_improvements,
-                     (100 * file_improvements/float(file_total)))
-        logging.info("")
-
-
-    def analyze(self):
-        """
-        Analyzes and eventually compares sets of IOzone data.
-        """
-        overall = []
-        record_size = []
-        file_size = []
-        for path in self.list_files:
-            file = open(path, 'r')
-            logging.info('FILE: %s', path)
-
-            results = self.parse_file(file)
-
-            overall_results = self.process_results(results)
-            record_size_results = self.process_results(results, 'record_size')
-            file_size_results = self.process_results(results, 'file_size')
-            self.report(overall_results, record_size_results, file_size_results)
-
-            if len(self.list_files) == 2:
-                overall.append(overall_results)
-                record_size.append(record_size_results)
-                file_size.append(file_size_results)
-
-        if len(self.list_files) == 2:
-            record_comparison = compare_matrices(*record_size)
-            file_comparison = compare_matrices(*file_size)
-            self.report_comparison(record_comparison, file_comparison)
-
-
-class IOzonePlotter(object):
-    """
-    Plots graphs based on the results of an IOzone run.
-
-    Plots graphs based on the results of an IOzone run. Uses gnuplot to
-    generate the graphs.
-    """
-    def __init__(self, results_file, output_dir):
-        self.active = True
-        try:
-            self.gnuplot = os_dep.command("gnuplot")
-        except:
-            logging.error("Command gnuplot not found, disabling graph "
-                          "generation")
-            self.active = False
-
-        if not os.path.isdir(output_dir):
-            os.makedirs(output_dir)
-        self.output_dir = output_dir
-
-        if not os.path.isfile(results_file):
-            logging.error("Invalid file %s provided, disabling graph "
-                          "generation", results_file)
-            self.active = False
-            self.results_file = None
-        else:
-            self.results_file = results_file
-            self.generate_data_source()
-
-
-    def generate_data_source(self):
-        """
-        Creates data file without headers for gnuplot consumption.
-        """
-        results_file = open(self.results_file, 'r')
-        self.datasource = os.path.join(self.output_dir, '3d-datasource')
-        datasource = open(self.datasource, 'w')
-        for line in results_file.readlines():
-            fields = line.split()
-            if len(fields) != 15:
-                continue
-            try:
-                values = [int(i) for i in fields]
-                datasource.write(line)
-            except ValueError:
-                continue
-        datasource.close()
-
-
-    def plot_2d_graphs(self):
-        """
-        For each one of the throughput parameters, generate a set of gnuplot
-        commands that will create a parametric surface with file size vs.
-        record size vs. throughput.
-        """
-        datasource_2d = os.path.join(self.output_dir, '2d-datasource-file')
-        for index, label in zip(range(2, 15), _LABELS[2:]):
-            commands_path = os.path.join(self.output_dir, '2d-%s.do' % label)
-            commands = ""
-            commands += "set title 'Iozone performance: %s'\n" % label
-            commands += "set logscale x\n"
-            commands += "set xlabel 'File size (KB)'\n"
-            commands += "set ylabel 'Througput (MB/s)'\n"
-            commands += "set terminal png small size 450 350\n"
-            commands += "set output '%s'\n" % os.path.join(self.output_dir,
-                                                           '2d-%s.png' % label)
-            commands += ("plot '%s' using 1:%s title '%s' with lines \n" %
-                         (datasource_2d, index, label))
-            commands_file = open(commands_path, 'w')
-            commands_file.write(commands)
-            commands_file.close()
-            try:
-                utils.system("%s %s" % (self.gnuplot, commands_path))
-            except error.CmdError:
-                logging.error("Problem plotting from commands file %s",
-                              commands_path)
-
-
-    def plot_3d_graphs(self):
-        """
-        For each one of the throughput parameters, generate a set of gnuplot
-        commands that will create a parametric surface with file size vs.
-        record size vs. throughput.
-        """
-        for index, label in zip(range(1, 14), _LABELS[2:]):
-            commands_path = os.path.join(self.output_dir, '%s.do' % label)
-            commands = ""
-            commands += "set title 'Iozone performance: %s'\n" % label
-            commands += "set grid lt 2 lw 1\n"
-            commands += "set surface\n"
-            commands += "set parametric\n"
-            commands += "set xtics\n"
-            commands += "set ytics\n"
-            commands += "set logscale x 2\n"
-            commands += "set logscale y 2\n"
-            commands += "set logscale z\n"
-            commands += "set xrange [2.**5:2.**24]\n"
-            commands += "set xlabel 'File size (KB)'\n"
-            commands += "set ylabel 'Record size (KB)'\n"
-            commands += "set zlabel 'Througput (KB/s)'\n"
-            commands += "set data style lines\n"
-            commands += "set dgrid3d 80,80, 3\n"
-            commands += "set terminal png small size 900 700\n"
-            commands += "set output '%s'\n" % os.path.join(self.output_dir,
-                                                           '%s.png' % label)
-            commands += ("splot '%s' using 1:2:%s title '%s'\n" %
-                         (self.datasource, index, label))
-            commands_file = open(commands_path, 'w')
-            commands_file.write(commands)
-            commands_file.close()
-            try:
-                utils.system("%s %s" % (self.gnuplot, commands_path))
-            except error.CmdError:
-                logging.error("Problem plotting from commands file %s",
-                              commands_path)
-
-
-    def plot_all(self):
-        """
-        Plot all graphs that are to be plotted, provided that we have gnuplot.
-        """
-        if self.active:
-            self.plot_2d_graphs()
-            self.plot_3d_graphs()
-
-
-class AnalyzerLoggingConfig(logging_config.LoggingConfig):
-    def configure_logging(self, results_dir=None, verbose=False):
-        super(AnalyzerLoggingConfig, self).configure_logging(use_console=True,
-                                                        verbose=verbose)
-
-
-if __name__ == "__main__":
-    parser = optparse.OptionParser("usage: %prog [options] [filenames]")
-    options, args = parser.parse_args()
-
-    logging_manager.configure_logging(AnalyzerLoggingConfig())
-
-    if args:
-        filenames = args
-    else:
-        parser.print_help()
-        sys.exit(1)
-
-    if len(args) > 2:
-        parser.print_help()
-        sys.exit(1)
-
-    o = os.path.join(os.getcwd(),
-                     "iozone-graphs-%s" % time.strftime('%Y-%m-%d-%H.%M.%S'))
-    if not os.path.isdir(o):
-        os.makedirs(o)
-
-    a = IOzoneAnalyzer(list_files=filenames, output_dir=o)
-    a.analyze()
-    p = IOzonePlotter(results_file=filenames[0], output_dir=o)
-    p.plot_all()
diff --git a/client/tests/ipv6connect/control b/client/tests/ipv6connect/control
deleted file mode 100644
index ff337ce..0000000
--- a/client/tests/ipv6connect/control
+++ /dev/null
@@ -1,12 +0,0 @@
-NAME = "IPv6 Connect"
-AUTHOR = "David Luyer <luyer@google.com>"
-TIME = "MEDIUM"
-TEST_CATEGORY = "FUNCTIONAL"
-TEST_CLASS = "KERNEL"
-TEST_TYPE = "CLIENT"
-DOC = """
-Create many TCP connections to dual-stack sockets on IPv6 loopback, connecting
-to the IPv4, IPv6-mapped-IPv4 and IPv6 loopback address (different code paths).
-"""
-
-job.run_test('ipv6connect')
diff --git a/client/tests/ipv6connect/ipv6connect.py b/client/tests/ipv6connect/ipv6connect.py
deleted file mode 100644
index 5260ba2..0000000
--- a/client/tests/ipv6connect/ipv6connect.py
+++ /dev/null
@@ -1,37 +0,0 @@
-import os, re, sys
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import utils
-
-
-class ipv6connect(test.test):
-    version = 1
-
-    preserve_srcdir = True
-
-    def setup(self, src='ipv6connect.c'):
-        os.chdir(self.srcdir)
-        utils.system('gcc ipv6connect.c -o ipv6connect -lpthread -static -s')
-
-
-    def initialize(self):
-        self.job.require_gcc()
-        self.results = []
-
-
-    def run_once(self, dir=None, nprocs=None, args=''):
-        (lower, upper) = utils.get_ip_local_port_range()
-        utils.set_ip_local_port_range(4096, 65535)
-        try:
-            result = utils.run(os.path.join(self.srcdir, 'ipv6connect'),
-                               None, False,
-                               stdout_tee=sys.stdout, stderr_tee=sys.stderr)
-        finally:
-            utils.set_ip_local_port_range(lower, upper)
-        self.results.append(result.stderr)
-
-
-    def postprocess(self):
-        pattern = re.compile(r'\nTotal time = ([0-9.]+)s\n')
-        for duration in pattern.findall('\n'.join(self.results)):
-            self.write_perf_keyval({'time': duration})
diff --git a/client/tests/ipv6connect/src/ipv6connect.c b/client/tests/ipv6connect/src/ipv6connect.c
deleted file mode 100644
index 34bd2d8..0000000
--- a/client/tests/ipv6connect/src/ipv6connect.c
+++ /dev/null
@@ -1,474 +0,0 @@
-// Copyright 2008 Google Inc.  Released under the GPL v2.
-//
-// This test performs numerous connects (with auto-binding), to a server
-// listening on all local addresses using an IPv6 socket, by connecting to
-// 127.0.0.1, ::ffff:127.0.0.1 and ::1.
-//
-// The code is really three tests:
-//
-//   - RunWithOneServer, using CreateServer and ConnectAndAccept,
-//     uses one server socket and repeatedly connects to it.
-//
-//   - RunWithOneShotServers, using CreateServerConnectAndAccept,
-//     creates servers, connects to them and then discards them.
-//
-//   - RunMultiThreaded, using ThreadedCreateServerConnectAndAccept,
-//     ThreadedStartServer and ThreadedGetServerFD, is equivalent to
-//     RunWithOneShotServers but uses multiple threads, one for the
-//     server and one for the client.
-//
-// Each of these tests triggers error conditions on different kernels
-// to a different extent.
-
-#include <arpa/inet.h>
-#include <netinet/in.h>
-#include <pthread.h>
-#include <stdio.h>
-#include <stdlib.h>
-#include <string.h>
-#include <sys/socket.h>
-#include <sys/time.h>
-#include <time.h>
-#include <unistd.h>
-
-// Which loopback address to connect to.
-enum LoopbackAddr { V4_LOOPBACK, V6_LOOPBACK, V6_MAPPED_V4_LOOPBACK };
-
-// Connect to a listening TCP socket, and accept the connection.
-static void ConnectAndAccept(enum LoopbackAddr addr, int server_fd, int port) {
-  struct sockaddr_in6 sa;
-  socklen_t addr_len;
-  int client_fd, accepted_fd;
-
-  if (addr == V6_LOOPBACK || addr == V6_MAPPED_V4_LOOPBACK) {
-    char buf[INET6_ADDRSTRLEN];
-
-    memset(&sa, 0, sizeof(sa));
-    if ((client_fd = socket(AF_INET6, SOCK_STREAM, IPPROTO_TCP)) == -1) {
-      perror("socket");
-      exit(1);
-    }
-    if (addr == V6_LOOPBACK) {
-      inet_pton(AF_INET6, "::1", &sa.sin6_addr);
-    } else if (addr == V6_MAPPED_V4_LOOPBACK) {
-      inet_pton(AF_INET6, "::ffff:127.0.0.1", &sa.sin6_addr);
-    }
-    if (!inet_ntop(AF_INET6, &sa.sin6_addr, buf, INET6_ADDRSTRLEN)) {
-      perror("inet_ntop");
-      exit(1);
-    }
-    addr_len = sizeof(sa);
-    sa.sin6_family = AF_INET6;
-    sa.sin6_port = port;
-    if (connect(client_fd, (struct sockaddr*)(&sa),
-                sizeof(struct sockaddr_in6)) == -1) {
-      perror("connect");
-      exit(1);
-    }
-    write(2, (addr == V6_LOOPBACK) ? "+" : "-", 1);
-  } else {
-    struct sockaddr_in sa4;
-
-    if ((client_fd = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP)) == -1) {
-      perror("socket");
-      exit(1);
-    }
-    memset(&sa4, 0, sizeof(sa4));
-    sa4.sin_family = AF_INET;
-    inet_pton(AF_INET, "127.0.0.1", &sa4.sin_addr);
-    sa4.sin_port = port;
-    if (connect(client_fd, (struct sockaddr*)(&sa4),
-                sizeof(struct sockaddr_in)) == -1) {
-      perror("connect");
-      exit(1);
-    }
-    write(2, ".", 1);
-  }
-  addr_len = sizeof(sa);
-  if ((accepted_fd = accept(server_fd,
-                            (struct sockaddr*)(&sa), &addr_len)) == -1) {
-    perror("accept");
-    exit(1);
-  }
-  close(client_fd);
-  close(accepted_fd);
-}
-
-// Create a listening TCP socket.
-static void CreateServer(int* server_fd, int* port) {
-  struct sockaddr_in6 sa;
-  socklen_t addr_len;
-
-  memset(&sa, 0, sizeof(sa));
-  if ((*server_fd = socket(AF_INET6, SOCK_STREAM, IPPROTO_TCP)) == -1) {
-    perror("socket");
-    exit(1);
-  }
-  addr_len = sizeof(sa);
-  sa.sin6_family = AF_INET6;
-  sa.sin6_addr = in6addr_any;
-  sa.sin6_port = 0;
-  if (bind(*server_fd, (struct sockaddr*)(&sa), sizeof(sa)) == -1) {
-    perror("bind");
-    exit(1);
-  }
-  if (getsockname(*server_fd, (struct sockaddr*)(&sa), &addr_len) == -1) {
-    perror("getsockname");
-    exit(1);
-  }
-  if (listen(*server_fd, 10) == -1) {
-    perror("listen");
-    exit(1);
-  }
-  *port = sa.sin6_port;
-}
-
-// Create a socket, connect to it, accept, and discard both.
-static void CreateServerConnectAndAccept(enum LoopbackAddr addr) {
-  struct sockaddr_in6 sa;
-  socklen_t addr_len;
-  int server_fd, client_fd, accepted_fd, connect_rc;
-
-  if ((server_fd = socket(AF_INET6, SOCK_STREAM, IPPROTO_TCP)) == -1) {
-    perror("socket");
-    exit(1);
-  }
-  addr_len = sizeof(sa);
-  memset(&sa, 0, sizeof(sa));
-  sa.sin6_family = AF_INET6;
-  sa.sin6_addr = in6addr_any;
-  sa.sin6_port = 0;
-  if (bind(server_fd, (struct sockaddr*)(&sa), sizeof(sa)) == -1) {
-    perror("bind");
-    exit(1);
-  }
-  if (getsockname(server_fd, (struct sockaddr*)(&sa), &addr_len) == -1) {
-    perror("getsockname");
-    exit(1);
-  }
-  if (listen(server_fd, 10) == -1) {
-    perror("listen");
-    exit(1);
-  }
-  if (addr == V6_LOOPBACK || addr == V6_MAPPED_V4_LOOPBACK) {
-    char buf[INET6_ADDRSTRLEN];
-
-    if ((client_fd = socket(AF_INET6, SOCK_STREAM, IPPROTO_TCP)) == -1) {
-      perror("socket");
-      exit(1);
-    }
-    if (addr == V6_LOOPBACK) {
-      inet_pton(AF_INET6, "::1", &sa.sin6_addr);
-    } else if (addr == V6_MAPPED_V4_LOOPBACK) {
-      inet_pton(AF_INET6, "::ffff:127.0.0.1", &sa.sin6_addr);
-    }
-    if (!inet_ntop(AF_INET6, &sa.sin6_addr, buf, INET6_ADDRSTRLEN)) {
-      perror("inet_ntop");
-      exit(1);
-    }
-    connect_rc = connect(client_fd, (struct sockaddr*)(&sa),
-                         sizeof(struct sockaddr_in6));
-    write(2, (addr == V6_MAPPED_V4_LOOPBACK) ? "-" : "+", 1);
-  } else {
-    struct sockaddr_in sa4;
-
-    if ((client_fd = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP)) == -1) {
-      perror("socket");
-      exit(1);
-    }
-    memset(&sa4, 0, sizeof(sa4));
-    sa4.sin_family = AF_INET;
-    inet_pton(AF_INET, "127.0.0.1", &sa4.sin_addr);
-    sa4.sin_port = sa.sin6_port;
-    connect_rc = connect(client_fd, (struct sockaddr*)(&sa4),
-                         sizeof(struct sockaddr_in));
-    write(2, ".", 1);
-  }
-  if (connect_rc == -1) {
-    perror("connect");
-    exit(1);
-  }
-  addr_len = sizeof(sa);
-  if ((accepted_fd = accept(server_fd,
-                            (struct sockaddr*)(&sa), &addr_len)) == -1) {
-    perror("accept");
-    exit(1);
-  }
-  close(accepted_fd);
-  close(client_fd);
-  close(server_fd);
-}
-
-// Globals for threaded version.
-static volatile int threaded_listening = 0;
-static int threaded_server_fd;
-static pthread_mutex_t threaded_mutex = PTHREAD_MUTEX_INITIALIZER;
-static pthread_cond_t threaded_cond = PTHREAD_COND_INITIALIZER;
-
-// Block until listening, then return server address.
-static int ThreadedGetServerFD() {
-  pthread_mutex_lock(&threaded_mutex);
-  while (!threaded_listening) {
-    pthread_cond_wait(&threaded_cond, &threaded_mutex);
-  }
-  pthread_mutex_unlock(&threaded_mutex);
-  return threaded_server_fd;
-}
-
-// Start a server which accepts one connection.
-static void* ThreadedStartServer(void* unused) {
-  struct sockaddr_in6 sa;
-  socklen_t addr_len = sizeof(sa);
-  int accept_fd;
-
-  if ((threaded_server_fd = socket(AF_INET6, SOCK_STREAM, IPPROTO_TCP)) == -1) {
-    perror("socket");
-    exit(1);
-  }
-
-  // Any IP, unused port.
-  memset(&sa, 0, sizeof(sa));
-  sa.sin6_family = AF_INET6;
-  sa.sin6_addr = in6addr_any;
-  sa.sin6_port = 0;
-
-  // Bind.
-  if (bind(threaded_server_fd, (struct sockaddr*)(&sa), sizeof(sa)) == -1) {
-    perror("bind");
-    exit(1);
-  }
-
-  // Listen.
-  if (listen(threaded_server_fd, 10) == -1) {
-    perror("listen");
-    exit(1);
-  }
-  pthread_mutex_lock(&threaded_mutex);
-  threaded_listening = 1;
-  pthread_cond_signal(&threaded_cond);
-  pthread_mutex_unlock(&threaded_mutex);
-
-  // Try to accept.
-  if ((accept_fd = accept(threaded_server_fd, (struct sockaddr*)(&sa),
-                          &addr_len)) == -1) {
-    perror("accept");
-    exit(1);
-  }
-
-  // All done.
-  close(threaded_server_fd);
-  close(accept_fd);
-  threaded_listening = 0;
-  return NULL;
-}
-
-// Start a server thread, then connect to it via TCP.
-static void ThreadedCreateServerConnectAndAccept(enum LoopbackAddr addr) {
-  pthread_t pthread;
-  int server_fd, client_fd;
-  struct sockaddr_in6 sa;
-  socklen_t addr_len = sizeof(sa);
-
-  pthread_create(&pthread, NULL, ThreadedStartServer, NULL);
-
-  // Get the server address information -- this call will block until
-  // the server is listening.
-  server_fd = ThreadedGetServerFD();
-  memset(&sa, 0, sizeof(sa));
-  if (getsockname(server_fd, (struct sockaddr*)(&sa), &addr_len) == -1) {
-    perror("getsockname");
-    exit(1);
-  }
-
-  if (addr == V6_LOOPBACK || addr == V6_MAPPED_V4_LOOPBACK) {
-    char buf[INET6_ADDRSTRLEN];
-
-    if ((client_fd = socket(AF_INET6, SOCK_STREAM, IPPROTO_TCP)) == -1) {
-      perror("socket");
-      exit(1);
-    }
-
-    // Check that we are listening on ::
-    if (!inet_ntop(AF_INET6, &sa.sin6_addr, buf, INET6_ADDRSTRLEN)) {
-      fprintf(stderr, "inet_ntop failed\n");
-      exit(1);
-    }
-    if (strlen(buf) != 2) {
-      fprintf(stderr, "Expected to listen on ::, instead listening on %s", buf);
-      exit(1);
-    }
-
-    if (addr == V6_LOOPBACK) {
-      inet_pton(AF_INET6, "::1", &sa.sin6_addr);
-    } else if (addr == V6_MAPPED_V4_LOOPBACK) {
-      inet_pton(AF_INET6, "::ffff:127.0.0.1", &sa.sin6_addr);
-    }
-    if (connect(client_fd, (struct sockaddr*)(&sa),
-                sizeof(struct sockaddr_in6)) == -1) {
-      perror("connect");
-      exit(1);
-    }
-  } else {
-    struct sockaddr_in sa4;
-
-    if ((client_fd = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP)) == -1) {
-      perror("socket");
-      exit(1);
-    }
-
-    memset(&sa4, 0, sizeof(sa4));
-    sa4.sin_family = AF_INET;
-    inet_aton("127.0.0.1", &sa4.sin_addr);
-    sa4.sin_port = sa.sin6_port;
-
-    if (connect(client_fd, (struct sockaddr*)(&sa4),
-                sizeof(struct sockaddr_in)) == -1) {
-      perror("connect");
-      exit(1);
-    }
-  }
-
-  // Update progress.
-  switch (addr) {
-    case V4_LOOPBACK:
-      write(2, ".", 1);
-      break;
-    case V6_MAPPED_V4_LOOPBACK:
-      write(2, "-", 1);
-      break;
-    case V6_LOOPBACK:
-      write(2, "+", 1);
-      break;
-  }
-
-  // Close our connection and wait for the server thread to shutdown.
-  close(client_fd);
-  pthread_join(pthread, NULL);
-}
-
-static void RunWithOneServer(int outer, int inner) {
-  int i, j, server_fd, port;
-  fprintf(stderr, "Starting test with one server port for all connects\n");
-  for (i = 0; i < outer; ++i) {
-    CreateServer(&server_fd, &port);
-    for (j = 0; j < inner; ++j) {
-      ConnectAndAccept(V4_LOOPBACK, server_fd, port);
-    }
-    write(2, "\n", 1);
-    for (j = 0; j < inner; ++j) {
-      ConnectAndAccept(V6_MAPPED_V4_LOOPBACK, server_fd, port);
-    }
-    write(2, "\n", 1);
-    for (j = 0; j < inner; ++j) {
-      ConnectAndAccept(V6_LOOPBACK, server_fd, port);
-    }
-    write(2, "\n", 1);
-    close(server_fd);
-  }
-}
-
-static void RunWithOneShotServers(int outer, int inner) {
-  int i, j;
-  fprintf(stderr, "Starting test with one server port per connect\n");
-  for (i = 0; i < outer; ++i) {
-    for (j = 0; j < inner; ++j) {
-      CreateServerConnectAndAccept(V4_LOOPBACK);
-    }
-    write(2, "\n", 1);
-    for (j = 0; j < inner; ++j) {
-      CreateServerConnectAndAccept(V6_MAPPED_V4_LOOPBACK);
-    }
-    write(2, "\n", 1);
-    for (j = 0; j < inner; ++j) {
-      CreateServerConnectAndAccept(V6_LOOPBACK);
-    }
-    write(2, "\n", 1);
-  }
-}
-
-static void RunMultiThreaded(int outer, int inner) {
-  int i, j;
-  fprintf(stderr, "Starting multi-threaded test\n");
-  for (i = 0; i < outer; ++i) {
-    for (j = 0; j < inner; ++j) {
-      ThreadedCreateServerConnectAndAccept(V4_LOOPBACK);
-    }
-    write(2, "\n", 1);
-    for (j = 0; j < inner; ++j) {
-      ThreadedCreateServerConnectAndAccept(V6_MAPPED_V4_LOOPBACK);
-    }
-    write(2, "\n", 1);
-    for (j = 0; j < inner; ++j) {
-      ThreadedCreateServerConnectAndAccept(V6_LOOPBACK);
-    }
-    write(2, "\n", 1);
-  }
-}
-
-static const char* usage =
-    "Usage: %s [types [outer [inner]]]\n"
-    "Arguments:\n"
-    "\ttypes: String consisting of [OMT], for the test types to run\n"
-    "\t       O: One server, multiple connects\n"
-    "\t       M: One server per connect (multiple server ports)\n"
-    "\t       T: Multi-threaded version of \'M\'\n"
-    "\touter: Number of passes through the outer loops, default 10\n"
-    "\tinner: Number of passes through the inner loops, default 75\n";
-
-static void Usage(char *argv0) {
-  fprintf(stderr, usage, argv0);
-  exit(2);
-}
-
-int main(int argc, char** argv) {
-  char *types = "OMT";
-  int i, inner = 75, outer = 10, timediff;
-  struct timeval tv0, tv1;
-
-  // Parse the options.
-  if (argc == 4) {
-    inner = atoi(argv[3]);
-    if (inner <= 0) {
-      Usage(argv[0]);
-    }
-    argc--;
-  }
-  if (argc == 3) {
-    outer = atoi(argv[2]);
-    if (outer <= 0) {
-      Usage(argv[0]);
-    }
-    argc--;
-  }
-  if (argc == 2) {
-    types = argv[1];
-    if (strspn(types, "OMT") != strlen(types)) {
-      Usage(argv[0]);
-    }
-    argc--;
-  }
-  if (argc != 1) {
-    Usage(argv[0]);
-  }
-
-  // Run the tests.
-  gettimeofday(&tv0, NULL);
-  for (i = 0; i < strlen(types); ++i) {
-    switch (types[i]) {
-      case 'O':
-        RunWithOneServer(outer, inner);
-        break;
-      case 'M':
-        RunWithOneShotServers(outer, inner);
-        break;
-      case 'T':
-        RunMultiThreaded(outer, inner);
-        break;
-    }
-  }
-  gettimeofday(&tv1, NULL);
-  timediff = (tv1.tv_sec - tv0.tv_sec) * 1000000 + tv1.tv_usec - tv0.tv_usec;
-  fprintf(stderr, "Total time = %d.%06ds\n", timediff / 1000000,
-          timediff % 1000000);
-  exit(0);
-}
diff --git a/client/tests/isic/build-fixes.patch b/client/tests/isic/build-fixes.patch
deleted file mode 100644
index 8a3f4ed..0000000
--- a/client/tests/isic/build-fixes.patch
+++ /dev/null
@@ -1,26 +0,0 @@
---- src/Makefile.in2	2004-11-06 21:39:55.000000000 +0100
-+++ src/Makefile.in	2007-03-31 16:36:06.000000000 +0200
-@@ -5,9 +5,9 @@
- PREFIX ?= /usr/local
- 
- CC	= @CC@
--DEFS	= @DEFS@ `libnet-config --defines`
--CFLAGS	= @CFLAGS@ `libnet-config --cflags`
--LIBS	= @LIBS@ `libnet-config --libs`
-+DEFS	= @DEFS@ `../../../deps/libnet/src/libnet-config --defines`
-+CFLAGS	= @CFLAGS@ `../../../deps/libnet/src/libnet-config --cflags`
-+LIBS	= @LIBS@ `../../../deps/libnet/src/libnet-config --libs`
- LDFLAGS	= @LDFLAGS@
- 
- INSTALL	= @INSTALL@
---- src/configure2	2007-03-31 16:40:35.000000000 +0200
-+++ src/configure	2007-03-31 16:37:37.000000000 +0200
-@@ -828,7 +828,7 @@ test -z "$INSTALL_SCRIPT" && INSTALL_SCR
- test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644'
- 
- 
--PREFIX=/usr/local
-+#PREFIX=/usr/local
- 
- ac_safe=`echo "$PREFIX/lib/libnet.a" | sed 'y%./+-%__p_%'`
- echo $ac_n "checking for $PREFIX/lib/libnet.a""... $ac_c" 1>&6
diff --git a/client/tests/isic/control b/client/tests/isic/control
deleted file mode 100644
index 7ab5e38..0000000
--- a/client/tests/isic/control
+++ /dev/null
@@ -1,13 +0,0 @@
-AUTHOR = "Michal Piotrowski <michal.k.k.piotrowski@gmail.com>"
-TIME = "MEDIUM"
-NAME = "ISIC"
-TEST_TYPE = "client"
-TEST_CLASS = "Kernel"
-TEST_CATEGORY = "Functional"
-
-DOC = """
-Runs the ISIC (IP Stack Integrity Checker) test. For more information see
-http://www.packetfactory.net/Projects/ISIC/
-"""
-
-job.run_test('isic')
diff --git a/client/tests/isic/help b/client/tests/isic/help
deleted file mode 100644
index b573128..0000000
--- a/client/tests/isic/help
+++ /dev/null
@@ -1,43 +0,0 @@
-ISIC -- IP Stack Integrity Checker
-
-Description:
-ISIC is a suite of utilities to exercise the stability of an IP Stack and its
-component stacks (TCP, UDP, ICMP et. al.) It generates piles of pseudo random
-packets of the target protocol. The packets be given tendancies to conform to.
-Ie 50% of the packets generated can have IP Options. 25% of the packets can
-be IP fragments... But the percentages are arbitrary and most of the packet
-fields have a configurable tendancy.
-
-The packets are then sent against the target machine to either penetrate its
-firewall rules or find bugs in the IP stack.
-
-ISIC also contains a utility generate raw ether frames to examine hardware
-implementations.
-
-Other Uses:
-Other novel uses people have found for ISIC include IDS testing, stack
-fingerprinting, breaking sniffers and barraging the IRC kiddie.
-
-
-Warning:
-ISIC may break shit, melt your network, knock out your
-firewall, or singe the fur off your cat
-
-
-usage: isic [-v] [-D] -s <source ip> -d <destination ip>
-       [-p <pkts to generate>] [-k <skip packets>] [-x <send packet X times>]
-        [-r <random seed>] [-m <max kB/s to generate>]
-        Percentage Opts: [-F frags] [-V <Bad IP Version>]
-                         [-I <Random IP Header length>]
-notes:
-        [-D] causes packet info to be printed out -- DEBUGGING
-
-       ex: -s a.b.c.d   -d a.b.c.d -F100
-        100% of the packets will be ^^^^ fragments
-       ex: -s a.b.c.d   -d a.b.c.d -p 100 -r 103334
-       ex: -s rand   -d rand -r 23342
-              ^^^^ causes random source addr
-       ex: -s rand   -d rand -k 10000 -p 10001 -r 666
-               Will only send the 10001 packet with random seed 666
-               this is especially useful if you suspect that packet is
-               causing a problem with the target stack.
diff --git a/client/tests/isic/isic-0.06.tar.bz2 b/client/tests/isic/isic-0.06.tar.bz2
deleted file mode 100644
index c35e130..0000000
--- a/client/tests/isic/isic-0.06.tar.bz2
+++ /dev/null
Binary files differ
diff --git a/client/tests/isic/isic-0.06.tgz b/client/tests/isic/isic-0.06.tgz
deleted file mode 100644
index 6a27b1c..0000000
--- a/client/tests/isic/isic-0.06.tgz
+++ /dev/null
Binary files differ
diff --git a/client/tests/isic/isic-gcc41-fix.patch b/client/tests/isic/isic-gcc41-fix.patch
deleted file mode 100644
index aea3390..0000000
--- a/client/tests/isic/isic-gcc41-fix.patch
+++ /dev/null
@@ -1,56 +0,0 @@
-diff -uprN isic-old/icmpsic.c isic-new/icmpsic.c
---- isic-old/icmpsic.c	2004-11-06 21:11:11.000000000 +0100
-+++ isic-new/icmpsic.c	2006-05-02 16:43:42.000000000 +0200
-@@ -265,7 +265,8 @@ main(int argc, char **argv)
- 
- 		payload = (short int *)((u_char *) icmp + 4);
- 		for(cx = 0; cx <= (payload_s >> 1); cx+=1)
--				(u_short) payload[cx] = rand() & 0xffff;
-+//				(u_short) payload[cx] = rand() & 0xffff;
-+				payload[cx] = rand() & 0xffff;
- 
- 
- 		if ( rand() <= (RAND_MAX * ICMPCksm) )
-diff -uprN isic-old/isic.c isic-new/isic.c
---- isic-old/isic.c	2004-11-06 21:11:14.000000000 +0100
-+++ isic-new/isic.c	2006-05-02 16:39:51.000000000 +0200
-@@ -229,8 +229,11 @@ main(int argc, char **argv)
- 		
- 		payload = (short int *)(buf + IP_H);
- 		for(cx = 0; cx <= (payload_s >> 1); cx+=1)
--				(u_int16_t) payload[cx] = rand() & 0xffff;
--		(u_int16_t) payload[payload_s] = rand() & 0xffff;
-+/*				(u_int16_t) payload[cx] = rand() & 0xffff;
-+		(u_int16_t) payload[payload_s] = rand() & 0xffff;*/
-+				payload[cx] = rand() & 0xffff;
-+		payload[payload_s] = rand() & 0xffff;
-+		
- 		
- 		if ( printout ) {
- 			printf("%s ->",
-diff -uprN isic-old/tcpsic.c isic-new/tcpsic.c
---- isic-old/tcpsic.c	2004-11-06 21:11:16.000000000 +0100
-+++ isic-new/tcpsic.c	2006-05-02 16:41:31.000000000 +0200
-@@ -317,7 +317,8 @@ main(int argc, char **argv)
- 
- 		payload = (short int *)((u_char *) tcp + 20);
- 		for(cx = 0; cx <= (payload_s >> 1); cx+=1)
--				(u_int16_t) payload[cx] = rand() & 0xffff;
-+//				(u_int16_t) payload[cx] = rand() & 0xffff;
-+				payload[cx] = rand() & 0xffff;
- 
- 		if ( rand() <= (RAND_MAX * TCPCksm) )
- 			libnet_do_checksum(l, (u_int8_t *)buf, IPPROTO_TCP, (tcp->th_off << 2)
-diff -uprN isic-old/udpsic.c isic-new/udpsic.c
---- isic-old/udpsic.c	2004-11-06 21:11:20.000000000 +0100
-+++ isic-new/udpsic.c	2006-05-02 16:42:55.000000000 +0200
-@@ -292,7 +292,8 @@ main(int argc, char **argv)
- 
- 		payload = (short int *)((u_char *) udp + UDP_H);
- 		for(cx = 0; cx <= (payload_s >> 1); cx+=1)
--				(u_int16_t) payload[cx] = rand() & 0xffff;
-+//				(u_int16_t) payload[cx] = rand() & 0xffff;
-+				payload[cx] = rand() & 0xffff;
- 
- 		if ( printout ) {
- 			printf("%s,%i ->",
diff --git a/client/tests/isic/isic.py b/client/tests/isic/isic.py
deleted file mode 100644
index 07a86f1..0000000
--- a/client/tests/isic/isic.py
+++ /dev/null
@@ -1,26 +0,0 @@
-import os
-from autotest_lib.client.bin import test, utils
-
-
-class isic(test.test):
-    version = 2
-
-    # http://www.packetfactory.net/Projects/ISIC/isic-0.06.tgz
-    # + http://www.stardust.webpages.pl/files/crap/isic-gcc41-fix.patch
-
-    def initialize(self):
-        self.job.require_gcc()
-        self.job.setup_dep(['libnet'])
-
-
-    def setup(self, tarball = 'isic-0.06.tar.bz2'):
-        tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(tarball, self.srcdir)
-        os.chdir(self.srcdir)
-
-        utils.system('patch -p1 < ../build-fixes.patch')
-        utils.system('PREFIX=%s /deps/libnet/libnet/ ./configure' %self.autodir)
-        utils.system('make')
-
-    def execute(self, args = '-s rand -d 127.0.0.1 -p 10000000'):
-        utils.system(self.srcdir + '/isic ' + args)
diff --git a/client/tests/kernel_sysrq_info/control b/client/tests/kernel_sysrq_info/control
deleted file mode 100644
index afa3c0f..0000000
--- a/client/tests/kernel_sysrq_info/control
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "cernekee"
-NAME = "kernel_sysrq_info"
-PURPOSE = "Verify the Magic SysRq show-* commands."
-ATTRIBUTES = "suite:kernel_daily_regression"
-TIME = "FAST"
-TEST_TYPE = "client"
-DOC = """
-Verifies select /proc/sysrq-trigger commands.
-
-Fail if any "show-*" SysRq command doesn't match the regexp in the
-test case, or if it crashes the system:
-  - show-backtrace-all-active-cpus(L)
-  - show-memory-usage(M)
-  - show-registers(P)
-  - show-all-timers(Q)
-  - show-task-states(T)
-  - show-blocked-tasks(W)
-
-These SysRq commands are used to gather postmortem data from systems
-deployed in the field, so it is critical that they work correctly (and
-don't crash the system).
-"""
-
-job.run_test('kernel_sysrq_info')
diff --git a/client/tests/kernel_sysrq_info/kernel_sysrq_info.py b/client/tests/kernel_sysrq_info/kernel_sysrq_info.py
deleted file mode 100755
index 4b88a6e..0000000
--- a/client/tests/kernel_sysrq_info/kernel_sysrq_info.py
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/python2
-#
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import re
-
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-
-
-class kernel_sysrq_info(test.test):
-    """
-    Verify the Magic SysRq show-* commands
-    (i.e. don't verify reBoot, Crash, kill-all-tasks, etc.)
-    """
-    version = 1
-
-    def sysrq_trigger(self, key):
-        """
-        Trigger one SysRq command, and return the kernel log output
-        @param key:     lowercase SysRq keystroke (e.g. 'm')
-        @return         dmesg log from running the command
-        """
-        os.system("dmesg --clear")
-        with open("/proc/sysrq-trigger", "w") as f:
-            f.write(key + "\n")
-        with os.popen("dmesg --raw") as f:
-            return f.read()
-
-    def run_once(self):
-        test_cases = {'l': 'all active CPUs',
-                      'm': '[0-9]+ pages.*RAM',
-                      'p': 'Show Regs',
-                      'q': 'Tick Device:',
-                      't': 'init.*\s1\s',
-                      'w': 'pid father'
-                     }
-
-        for key in test_cases:
-            s = self.sysrq_trigger(key)
-            if re.search(test_cases[key], s) == None:
-                raise error.TestFail('Unexpected output from SysRq key %s' %
-                                     key)
diff --git a/client/tests/libhugetlbfs/control b/client/tests/libhugetlbfs/control
deleted file mode 100644
index fae51ce..0000000
--- a/client/tests/libhugetlbfs/control
+++ /dev/null
@@ -1,13 +0,0 @@
-AUTHOR = 'aganti@google.com (Ashwin Ganti)'
-TIME = 'MEDIUM'
-NAME = 'libhugetlbfs test'
-TEST_TYPE = 'client'
-TEST_CLASS = 'Kernel'
-TEST_CATEGORY = 'Functional'
-
-DOC = '''
-Tests basic huge pages functionality when using libhugetlbfs. For more info
-about libhugetlbfs see http://libhugetlbfs.ozlabs.org/
-'''
-
-job.run_test('libhugetlbfs', dir='/mnt')
diff --git a/client/tests/libhugetlbfs/elflink.patch b/client/tests/libhugetlbfs/elflink.patch
deleted file mode 100644
index 5707766..0000000
--- a/client/tests/libhugetlbfs/elflink.patch
+++ /dev/null
@@ -1,12 +0,0 @@
-diff --git a/elflink.c b/elflink.c
-index c39c04e..92729cc 100644
---- a/elflink.c
-+++ b/elflink.c
-@@ -37,6 +37,7 @@
- #include <limits.h>
- #include <elf.h>
- #include <dlfcn.h>
-+#include <sys/stat.h>
- 
- #include "version.h"
- #include "hugetlbfs.h"
diff --git a/client/tests/libhugetlbfs/libhugetlbfs-2.0.tar.gz b/client/tests/libhugetlbfs/libhugetlbfs-2.0.tar.gz
deleted file mode 100644
index e9d528c..0000000
--- a/client/tests/libhugetlbfs/libhugetlbfs-2.0.tar.gz
+++ /dev/null
Binary files differ
diff --git a/client/tests/libhugetlbfs/libhugetlbfs.py b/client/tests/libhugetlbfs/libhugetlbfs.py
deleted file mode 100644
index 373a5ba..0000000
--- a/client/tests/libhugetlbfs/libhugetlbfs.py
+++ /dev/null
@@ -1,64 +0,0 @@
-import re, os
-from autotest_lib.client.bin import utils, test
-from autotest_lib.client.common_lib import error
-
-class libhugetlbfs(test.test):
-    version = 6
-
-    def initialize(self, dir = None, pages_requested = 20):
-        self.dir = None
-
-        self.job.require_gcc()
-
-        utils.check_kernel_ver("2.6.16")
-
-        # Check huge page number
-        pages_available = 0
-        if os.path.exists('/proc/sys/vm/nr_hugepages'):
-            utils.write_one_line('/proc/sys/vm/nr_hugepages',
-                                          str(pages_requested))
-            nr_hugepages = utils.read_one_line('/proc/sys/vm/nr_hugepages')
-            pages_available = int(nr_hugepages)
-        else:
-            raise error.TestNAError('Kernel does not support hugepages')
-
-        if pages_available < pages_requested:
-            raise error.TestError('%d huge pages available, < %d pages requested' % (pages_available, pages_requested))
-
-        # Check if hugetlbfs has been mounted
-        if not utils.file_contains_pattern('/proc/mounts', 'hugetlbfs'):
-            if not dir:
-                dir = os.path.join(self.tmpdir, 'hugetlbfs')
-                os.makedirs(dir)
-            utils.system('mount -t hugetlbfs none %s' % dir)
-            self.dir = dir
-
-
-    # http://libhugetlbfs.ozlabs.org/releases/libhugetlbfs-2.0.tar.gz
-    def setup(self, tarball = 'libhugetlbfs-2.0.tar.gz'):
-        tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(tarball, self.srcdir)
-        os.chdir(self.srcdir)
-
-        utils.system('patch -p1 < ../elflink.patch')
-        # make might fail if there are no proper headers for the 32 bit
-        # version, in that case try only for the 64 bit version
-        try:
-            utils.make()
-        except:
-            utils.make('OBJDIRS=obj64')
-
-
-    def run_once(self):
-        os.chdir(self.srcdir)
-        # make check might fail for 32 bit if the 32 bit compile earlier
-        # had failed. See if it passes for 64 bit in that case.
-        try:
-            utils.make('check')
-        except:
-            utils.make('check OBJDIRS=obj64')
-
-
-    def cleanup(self):
-        if self.dir:
-            utils.system('umount %s' % self.dir)
diff --git a/client/tests/linus_stress/control b/client/tests/linus_stress/control
deleted file mode 100644
index 3f9d22d..0000000
--- a/client/tests/linus_stress/control
+++ /dev/null
@@ -1,12 +0,0 @@
-AUTHOR = 'mbligh@google.com (Martin Bligh)'
-TIME = 'MEDIUM'
-NAME = 'Linus Stress'
-TEST_TYPE = 'client'
-TEST_CLASS = 'Kernel'
-TEST_CATEGORY = 'Stress'
-
-DOC = '''
-Runs the standard linus_stress test.
-'''
-
-job.run_test('linus_stress')
diff --git a/client/tests/linus_stress/linus_stress.c b/client/tests/linus_stress/linus_stress.c
deleted file mode 100644
index be727b2..0000000
--- a/client/tests/linus_stress/linus_stress.c
+++ /dev/null
@@ -1,118 +0,0 @@
-#include <sys/mman.h>
-#include <sys/fcntl.h>
-#include <unistd.h>
-#include <stdlib.h>
-#include <string.h>
-#include <stdio.h>
-#include <time.h>
-
-#define TARGETSIZE (100 << 20)
-#define CHUNKSIZE (1460)
-#define NRCHUNKS (TARGETSIZE / CHUNKSIZE)
-#define SIZE (NRCHUNKS * CHUNKSIZE)
-
-static void fillmem(void *start, int nr)
-{
-	memset(start, nr, CHUNKSIZE);
-}
-
-#define page_offset(buf, off) (0xfff & ((unsigned)(unsigned long)(buf)+(off)))
-
-static int chunkorder[NRCHUNKS];
-
-static int order(int nr)
-{
-	int i;
-	if (nr < 0 || nr >= NRCHUNKS)
-		return -1;
-	for (i = 0; i < NRCHUNKS; i++)
-		if (chunkorder[i] == nr)
-			return i;
-	return -2;
-}
-
-static void checkmem(void *buf, int nr)
-{
-	unsigned int start = ~0u, end = 0;
-	unsigned char c = nr, *p = buf, differs = 0;
-	int i;
-	for (i = 0; i < CHUNKSIZE; i++) {
-		unsigned char got = *p++;
-		if (got != c) {
-			if (i < start)
-				start = i;
-			if (i > end)
-				end = i;
-			differs = got;
-		}
-	}
-	if (start < end) {
-		printf("Chunk %d corrupted (%u-%u)  (%u-%u)            \n", nr, start, end,
-			page_offset(buf, start), page_offset(buf, end));
-		printf("Expected %u, got %u\n", c, differs);
-		printf("Written as (%d)%d(%d)\n", order(nr-1), order(nr), order(nr+1));
-	}
-}
-
-static char *remap(int fd, char *mapping)
-{
-	if (mapping) {
-		munmap(mapping, SIZE);
-		posix_fadvise(fd, 0, SIZE, POSIX_FADV_DONTNEED);
-	}
-	return mmap(NULL, SIZE, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0);
-}
-
-int main(int argc, char **argv)
-{
-	char *mapping;
-	int fd, i;
-
-	/*
-	 * Make some random ordering of writing the chunks to the
-	 * memory map..
-	 *
-	 * Start with fully ordered..
-	 */
-	for (i = 0; i < NRCHUNKS; i++)
-		chunkorder[i] = i;
-
-	/* ..and then mix it up randomly */
-	srandom(time(NULL));
-	for (i = 0; i < NRCHUNKS; i++) {
-		int index = (unsigned int) random() % NRCHUNKS;
-		int nr = chunkorder[index];
-		chunkorder[index] = chunkorder[i];
-		chunkorder[i] = nr;
-	}
-
-	fd = open("mapfile", O_RDWR | O_TRUNC | O_CREAT, 0666);
-	if (fd < 0)
-		return -1;
-	if (ftruncate(fd, SIZE) < 0)
-		return -1;
-	mapping = remap(fd, NULL);
-	if (-1 == (int)(long)mapping)
-		return -1;
-
-	for (i = 0; i < NRCHUNKS; i++) {
-		int chunk = chunkorder[i];
-		printf("Writing chunk %d/%d (%d%%)     \r", i, NRCHUNKS, 100*i/NRCHUNKS);
-		fillmem(mapping + chunk * CHUNKSIZE, chunk);
-	}
-	printf("\n");
-
-	/* Unmap, drop, and remap.. */
-	mapping = remap(fd, mapping);
-
-	/* .. and check */
-	for (i = 0; i < NRCHUNKS; i++) {
-		int chunk = i;
-		printf("Checking chunk %d/%d (%d%%)     \r", i, NRCHUNKS, 100*i/NRCHUNKS);
-		checkmem(mapping + chunk * CHUNKSIZE, chunk);
-	}
-	printf("\n");
-	
-	return 0;
-}
-
diff --git a/client/tests/linus_stress/linus_stress.py b/client/tests/linus_stress/linus_stress.py
deleted file mode 100644
index 1e808b0..0000000
--- a/client/tests/linus_stress/linus_stress.py
+++ /dev/null
@@ -1,46 +0,0 @@
-import os
-from autotest_lib.client.bin import test, utils
-
-
-class linus_stress(test.test):
-    version = 1
-
-    def setup(self):
-        os.mkdir(self.srcdir)
-        os.chdir(self.bindir)
-        utils.system('cp linus_stress.c src/')
-        os.chdir(self.srcdir)
-        utils.system(utils.get_cc() + ' linus_stress.c -D_POSIX_C_SOURCE=200112 -o linus_stress')
-
-
-    def initialize(self):
-        self.job.require_gcc()
-
-
-    def run_the_test(self, iterations):
-        utils.write_one_line('/proc/sys/vm/dirty_ratio', '4')
-        utils.write_one_line('/proc/sys/vm/dirty_background_ratio', '2')
-
-        cmd = os.path.join(self.srcdir, 'linus_stress')
-        args = "%d" % (utils.memtotal() / 32)
-
-        profilers = self.job.profilers
-        if profilers.present():
-            profilers.start(self)
-
-        for i in range(iterations):
-            utils.system(cmd + ' ' + args)
-
-        if profilers.present():
-            profilers.stop(self)
-            profilers.report(self)
-
-
-    def execute(self, iterations = 1):
-        dirty_ratio = utils.read_one_line('/proc/sys/vm/dirty_ratio')
-        dirty_background_ratio = utils.read_one_line('/proc/sys/vm/dirty_background_ratio')
-        try:
-            self.run_the_test(iterations)
-        finally:
-            utils.write_one_line('/proc/sys/vm/dirty_ratio', dirty_ratio)
-            utils.write_one_line('/proc/sys/vm/dirty_background_ratio', dirty_background_ratio)
diff --git a/client/tests/memory_api/control b/client/tests/memory_api/control
deleted file mode 100644
index 73bae5b..0000000
--- a/client/tests/memory_api/control
+++ /dev/null
@@ -1,13 +0,0 @@
-TIME="SHORT"
-AUTHOR = "Ranjit Manomohan"
-DOC = """
-memory api tests run through a series of memory allocation calls which may
-then be used to analyze the kernel mappings of the corresponding memory areas.
-"""
-NAME = 'memory_api'
-TEST_CLASS = 'kernel'
-TEST_CATEGORY = 'Functional'
-TEST_TYPE = 'client'
-
-
-job.run_test('memory_api')
diff --git a/client/tests/memory_api/memory_api.c b/client/tests/memory_api/memory_api.c
deleted file mode 100644
index d6ae65a..0000000
--- a/client/tests/memory_api/memory_api.c
+++ /dev/null
@@ -1,155 +0,0 @@
-#include <stdio.h>
-#include <stdlib.h>
-#include <sys/mman.h>
-#include <unistd.h>
-#include <fcntl.h>
-
-/* This file includes a simple set of memory allocation calls that
- * a user space program can use to allocate/free or move memory mappings.
- * The intent of this program is to make it easier to verify if the kernel
- * internal mappings are correct.
- */
-
-#define PAGE_SHIFT 12
-
-#define ROUND_PAGES(memsize) ((memsize >> (PAGE_SHIFT)) << PAGE_SHIFT)
-
-/* approximately half of memsize, page aligned */
-#define HALF_MEM(memsize) ((memsize >> (PAGE_SHIFT))<<(PAGE_SHIFT - 1))
-
-inline void waitnext() {
-	fflush(NULL);
-	getchar();
-}
-
-int main(int argc, char *argv[]) {
-	unsigned int memsize;
-	char *mem;
-	int i, numpages, fd;
-
-	if (argc != 2) {
-		printf("Usage: %s <memory_size>\n", argv[0]);
-		exit(EXIT_FAILURE);
-	}
-
-	memsize = strtoul(argv[1], NULL, 10);
-
-	memsize = ROUND_PAGES(memsize);
-
-	/* We should be limited to < 4G so any size other than 0 is ok */
-	if (memsize == 0) {
-		printf("Invalid memsize\n");
-		exit(EXIT_FAILURE);
-	}
-
-
-	numpages = memsize >> PAGE_SHIFT;
-
-	mlockall(MCL_FUTURE);
-
-	mem = sbrk(memsize);
-
-	if (mem == (void*) -1) {
-		perror("Failed to allocate memory using sbrk\n");
-		exit(EXIT_FAILURE);
-	}
-
-	printf("Successfully allocated sbrk memory %d bytes @%p\n",
-				memsize,  mem);
-
-	waitnext();
-
-	sbrk(-(memsize));
-
-	mem =  mmap(0, memsize, PROT_READ | PROT_WRITE,
-			MAP_PRIVATE| MAP_ANONYMOUS,
-			-1, 0);
-
-	if (mem == (void*) -1) {
-		perror("Failed to allocate anon private memory using mmap\n");
-		exit(EXIT_FAILURE);
-	}
-
-	printf("Successfully allocated anon mmap memory %d bytes @%p\n",
-				memsize,  mem);
-
-	waitnext();
-
-	if (-1 == mprotect(mem, HALF_MEM(memsize), PROT_READ)) {
-		perror("Failed to W protect memory using mprotect\n");
-		exit(EXIT_FAILURE);
-	}
-
-	printf("Successfully write protected %d bytes @%p\n",
-			HALF_MEM(memsize), mem);
-
-	waitnext();
-
-	if (-1 == mprotect(mem, HALF_MEM(memsize),
-					 PROT_READ | PROT_WRITE)) {
-		perror("Failed to RW protect memory using mprotect\n");
-		exit(EXIT_FAILURE);
-	}
-
-	printf("Successfully cleared write protected %d bytes @%p\n",
-			memsize, mem);
-	waitnext();
-
-	/* Mark all pages with a specific pattern */
-	for (i = 0; i < numpages; i++) {
-		int *ptr = (int *)(mem + i*4096);
-		*ptr = i;
-	}
-
-	mem = mremap(mem , memsize,
-				memsize + HALF_MEM(memsize),
-				1 /* MREMAP_MAYMOVE */);
-
-	if (mem == MAP_FAILED) {
-		perror("Failed to remap expand anon private memory\n");
-		exit(EXIT_FAILURE);
-	}
-
-	printf("Successfully remapped %d bytes @%p\n",
-			memsize + HALF_MEM(memsize), mem);
-
-	waitnext();
-
-	/* Mark all pages with a specific pattern */
-	for (i = 0; i < numpages; i++) {
-		int value = *(int*)(mem + i*4096);
-		if (value != i) {
-			printf("remap error expected %d got %d\n",
-					i, value);
-			exit(EXIT_FAILURE);
-		}
-	}
-
-	if (munmap(mem, memsize + HALF_MEM(memsize))) {
-		perror("Could not unmap and free memory\n");
-		exit(EXIT_FAILURE);
-	}
-
-
-	fd = open("/dev/zero", O_RDONLY);
-
-	mem =  mmap(0, memsize, PROT_READ | PROT_WRITE,
-			MAP_PRIVATE,
-			fd, 0);
-
-	if (mem == (void*) -1) {
-		perror("Failed to allocate file backed memory using mmap\n");
-		exit(EXIT_FAILURE);
-	}
-
-	printf("Successfully allocated file backed mmap memory %d bytes @%p\n",
-					 memsize, mem);
-	waitnext();
-
-	if (munmap(mem, memsize)) {
-		perror("Could not unmap and free file backed memory\n");
-		exit(EXIT_FAILURE);
-	}
-
-	exit(EXIT_SUCCESS);
-}
diff --git a/client/tests/memory_api/memory_api.py b/client/tests/memory_api/memory_api.py
deleted file mode 100644
index ac9cc37..0000000
--- a/client/tests/memory_api/memory_api.py
+++ /dev/null
@@ -1,75 +0,0 @@
-import os, subprocess, re, commands, logging
-from autotest_lib.client.bin import utils, test
-from autotest_lib.client.common_lib import error
-
-class memory_api(test.test):
-    version = 1
-
-    def setup(self):
-        os.mkdir(self.tmpdir)
-        utils.system("%s %s -o %s" %
-                      (utils.get_cc(),
-                       os.path.join(self.bindir, "memory_api.c"),
-                       os.path.join(self.tmpdir, "memory_api")))
-        utils.system("%s %s -o %s" %
-                      (utils.get_cc(),
-                       os.path.join(self.bindir, "mremaps.c"),
-                       os.path.join(self.tmpdir, "mremaps")))
-
-
-    def initialize(self):
-        self.job.require_gcc()
-
-
-    def run_once(self, memsize = "1000000000", args=''):
-
-        vma_re = re.compile("([0-9,a-f]+)-([0-9,a-f]+)")
-        memory_re = re.compile("(\d+) bytes @(0x[0-9,a-f]+)")
-
-        vma_max_shift = 0
-        if os.access("/proc/sys/vm/vma_max_shift", os.R_OK):
-            vma_max_shift = int(
-                      open("/proc/sys/vm/vma_max_shift").read().rstrip())
-        p1 = subprocess.Popen('%s/memory_api ' % self.tmpdir  + memsize,
-                              shell=True, stdin=subprocess.PIPE,
-                              stdout=subprocess.PIPE)
-        while p1.poll() is None:
-            output = p1.stdout.readline().rstrip()
-            m = memory_re.search(output)
-            mem_start = 0
-            mem_len = 0
-            if m:
-                mem_start = int(m.group(2), 16)
-                mem_len = int(m.group(1))
-            else:
-                continue
-            map_output = open("/proc/%s/maps_backing" % p1.pid).readlines()
-            vma_count = 0
-            vma_start = 0
-            vma_len = 0
-            expected_vma_count = 1
-            for line in map_output:
-                m = vma_re.search(line)
-                if m:
-                    vma_start = int("0x%s" % m.group(1),16)
-                    vma_end = int("0x%s" % m.group(2),16)
-                    if ((vma_start >= mem_start) and
-                        (vma_start < (mem_start + mem_len))):
-                        vma_count+=1
-
-            if (('file' not in output) and (vma_max_shift != 0)):
-                expected_vma_count = mem_len >> vma_max_shift
-                if (mem_len % (1 << vma_max_shift)):
-                    expected_vma_count += 1
-            if expected_vma_count != vma_count:
-                raise error.TestFail("VmaCountMismatch")
-            logging.info("%s %s %d %d", hex(mem_start), hex(mem_len), vma_count,
-                         expected_vma_count)
-            if p1.poll() is None:
-                p1.stdin.write("\n")
-                p1.stdin.flush()
-
-        if p1.poll() != 0:
-            raise error.TestFail("Unexpected application abort")
-
-        utils.system('%s/mremaps ' % self.tmpdir  + '100000000')
diff --git a/client/tests/memory_api/mremaps.c b/client/tests/memory_api/mremaps.c
deleted file mode 100644
index 012a83e..0000000
--- a/client/tests/memory_api/mremaps.c
+++ /dev/null
@@ -1,74 +0,0 @@
-#include <stdio.h>
-#include <stdlib.h>
-#include <sys/mman.h>
-#include <unistd.h>
-#include <fcntl.h>
-
-/* This program allocates memory with multiple calls to remap. This
- * can be used to verify if the remap api is working correctly. */
-
-#define PAGE_SHIFT 12
-
-#define ROUND_PAGES(memsize) ((memsize >> (PAGE_SHIFT)) << PAGE_SHIFT)
-
-int main(int argc, char *argv[]) {
-	unsigned int memsize;
-	char *mem;
-	int i, numpages, fd;
-
-	if (argc != 2) {
-		printf("Usage: %s <memory_size>\n", argv[0]);
-		exit(EXIT_FAILURE);
-	}
-
-	memsize = strtoul(argv[1], NULL, 10);
-
-	memsize = ROUND_PAGES(memsize);
-
-	/* We should be limited to < 4G so any size other than 0 is ok */
-	if (memsize == 0) {
-		printf("Invalid memsize\n");
-		exit(EXIT_FAILURE);
-	}
-
-	numpages = memsize >> PAGE_SHIFT;
-
-	mem =  mmap(0, memsize, PROT_READ | PROT_WRITE,
-			MAP_PRIVATE | MAP_ANONYMOUS,
-			-1, 0);
-
-	if (mem == (void*) -1) {
-		perror("Failed to allocate anon private memory using mmap\n");
-		exit(EXIT_FAILURE);
-	}
-
-	for (i = 2; i <= 16; i <<= 1) {
-		mem = mremap(mem , memsize * (i >> 1),
-					memsize * i,
-					1 /* MREMAP_MAYMOVE */);
-
-		if (mem == MAP_FAILED) {
-			perror("Failed to remap expand anon private memory\n");
-			exit(EXIT_FAILURE);
-		}
-
-		printf("Successfully remapped %d bytes @%p\n",
-				memsize * i, mem);
-	}
-
-	if (munmap(mem, memsize * 16)) {
-		perror("Could not unmap and free memory\n");
-		exit(EXIT_FAILURE);
-	}
-
-	mem =  mmap(0, memsize, PROT_READ | PROT_WRITE,
-			MAP_PRIVATE | MAP_ANONYMOUS,
-			-1, 0);
-
-	if (mem == (void*) -1) {
-		perror("Failed to allocate anon private memory using mmap\n");
-		exit(EXIT_FAILURE);
-	}
-
-	exit(EXIT_SUCCESS);
-}
diff --git a/client/tests/monotonic_time/control b/client/tests/monotonic_time/control
deleted file mode 100644
index 4dbfec4..0000000
--- a/client/tests/monotonic_time/control
+++ /dev/null
@@ -1,37 +0,0 @@
-NAME = 'monotonic_time'
-AUTHOR = 'Michael Davidson <md@google.com>'
-TIME = 'MEDIUM'
-TEST_CLASS = 'Kernel'
-TEST_CATEGORY = 'Functional'
-TEST_TYPE = 'client'
-DOC = """
-monotonic_time checks various time interfaces:
-  gettimeofday()
-  clock_gettime(CLOCK_MONTONIC)
-  TSC
-for monotonicity.
-
-Based on time-warp-test.c by Ingo Molnar.
-"""
-
-#
-# Test gettimeofday(), TSC, and clock_gettime(CLOCK_MONOTONIC)
-#
-# Tests run for 'duration' seconds and check that the selected
-# time interface does not go backwards by more than 'threshold'.
-#
-# Note that the threshold value has the same resolution as the
-# clock source:
-#   gettimeofday()                 - microseconds
-#   clock_gettime(CLOCK_MONOTONIC) - nanoseconds
-#   TSC                            - CPU clock cycles
-#
-#
-job.run_test('monotonic_time', tag='gtod',  test_type='gtod',
-             duration=300, threshold=0)
-
-job.run_test('monotonic_time', tag='clock', test_type='clock',
-             duration=300, threshold=0)
-
-job.run_test('monotonic_time', tag='tsc',   test_type='tsc',
-             duration=300, threshold=0)
diff --git a/client/tests/monotonic_time/monotonic_time.py b/client/tests/monotonic_time/monotonic_time.py
deleted file mode 100644
index ce49110..0000000
--- a/client/tests/monotonic_time/monotonic_time.py
+++ /dev/null
@@ -1,38 +0,0 @@
-import os, re, logging
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-class monotonic_time(test.test):
-    version = 1
-
-    preserve_srcdir = True
-
-    def setup(self):
-        os.chdir(self.srcdir)
-        utils.make()
-
-
-    def initialize(self):
-        self.job.require_gcc()
-
-
-    def run_once(self, test_type = None, duration = 300, threshold = None):
-        if not test_type:
-            raise error.TestError('missing test type')
-
-        cmd = self.srcdir + '/time_test'
-        cmd += ' --duration ' + str(duration)
-        if threshold:
-            cmd += ' --threshold ' + str(threshold)
-        cmd += ' ' + test_type
-
-        self.results = utils.run(cmd, ignore_status=True)
-        logging.info('Time test command exit status: %s',
-                     self.results.exit_status)
-        if self.results.exit_status != 0:
-            for line in self.results.stdout.splitlines():
-                if line.startswith('ERROR:'):
-                    raise error.TestError(line)
-                if line.startswith('FAIL:'):
-                    raise error.TestFail(line)
-            raise error.TestError('unknown test failure')
diff --git a/client/tests/monotonic_time/src/Makefile b/client/tests/monotonic_time/src/Makefile
deleted file mode 100644
index 2121ec4..0000000
--- a/client/tests/monotonic_time/src/Makefile
+++ /dev/null
@@ -1,23 +0,0 @@
-CC=	cc
-
-CFLAGS=	-O -std=gnu99 -Wall
-LIBS=	-lpthread -lrt
-
-PROG=	time_test
-
-SRCS=	time_test.c cpuset.c threads.c logging.c
-HDRS=	spinlock.h cpuset.h threads.h logging.h
-OBJS=	$(SRCS:.c=.o)
-
-all:	$(PROG)
-
-$(PROG):	$(OBJS)
-	$(CC) $(LDFLAGS) -o $(PROG) $(OBJS) $(LIBS)
-
-$(OBJS):	$(HDRS)
-
-clean:
-	-rm -f $(OBJS)
-
-clobber:	clean
-	-rm -f $(PROG)
diff --git a/client/tests/monotonic_time/src/cpuset.c b/client/tests/monotonic_time/src/cpuset.c
deleted file mode 100644
index 490cf1b..0000000
--- a/client/tests/monotonic_time/src/cpuset.c
+++ /dev/null
@@ -1,142 +0,0 @@
-/*
- * Copyright 2008 Google Inc. All Rights Reserved.
- * Author: md@google.com (Michael Davidson)
- */
-#define _GNU_SOURCE	/* for cpu_set macros */
-
-#include <sched.h>
-#include <stdlib.h>
-#include <stdio.h>
-#include "cpuset.h"
-#include "logging.h"
-
-/*
- * Return the number of cpus in a cpu_set
- */
-int count_cpus(const cpu_set_t *cpus)
-{
-	int	count	= 0;
-	int	cpu;
-
-	for (cpu = 0; cpu < CPU_SETSIZE; cpu++)
-		if (CPU_ISSET(cpu, cpus))
-			++count;
-
-	return count;
-}
-
-/*
- * Parse a string containing a comma separated list of ranges
- * of cpu numbers such as: "0,2,4-7" into a cpu_set_t.
- */
-int parse_cpu_set(const char *s, cpu_set_t *cpus)
-{
-	CPU_ZERO(cpus);
-
-	while (*s) {
-		char	*next;
-		int	cpu;
-		int	start, end;
-
-		start = end = (int)strtol(s, &next, 0);
-		if (s == next)
-			break;
-		s = next;
-
-		if (*s == '-') {
-			++s;
-			end = (int)strtol(s, &next, 0);
-			if (s == next)
-				break;
-			s = next;
-		}
-
-		if (*s == ',')
-			++s;
-
-		if (start < 0 || start >= CPU_SETSIZE) {
-			ERROR(0, "bad cpu number '%d' in cpu set", start);
-			return 1;
-		}
-
-		if (end < 0 || end >= CPU_SETSIZE) {
-			ERROR(0, "bad cpu number '%d' in cpu set", end);
-			return 1;
-		}
-
-		if (end < start) {
-			ERROR(0, "bad range '%d-%d' in cpu set", start, end);
-			return 1;
-		}
-
-		for (cpu = start; cpu <= end; ++cpu)
-			CPU_SET(cpu, cpus);
-
-	}
-
-	if (*s) {
-		ERROR(0, "unexpected character '%c' in cpu set", *s);
-		return 1;
-	}
-
-	return 0;
-}
-
-
-static int show_range(char *buf, size_t len, const char *prefix,
-			int start, int end)
-{
-	int	n;
-
-	if (start == end)
-		n = snprintf(buf, len, "%s%d", prefix, start);
-	else
-		n = snprintf(buf, len, "%s%d-%d", prefix, start, end);
-
-	if (n < len)
-		return n;
-
-	return -1;
-}
-
-/*
- * Turn a cpu_set_t into a human readable string containing a
- * comma separated list of ranges of cpu numbers.
- *
- * Returns the number of bytes written to the buffer,
- * not including the terminating '\0' character,
- * or -1 if there was not enough space in the  buffer.
- */
-int show_cpu_set(char *buf, size_t len, const cpu_set_t *cpus)
-{
-	char	*bufp	= buf;
-	int	start	= -1;
-	int	end	= -1;
-	char	*sep	= "";
-	int	cpu;
-
-	for (cpu = 0; cpu < CPU_SETSIZE; cpu++) {
-		if (CPU_ISSET(cpu, cpus)) {
-			if (start < 0)
-				start = cpu;
-			end = cpu;
-		} else if (start >= 0) {
-			int	n;
-			if ((n = show_range(bufp, len, sep, start, end)) < 0)
-				return -1;
-			len -= n;
-			bufp += n;
-			sep = ",";
-			start = end = -1;
-		}
-	}
-
-	if (start >= 0) {
-		int	n;
-		if ((n = show_range(bufp, len, sep, start, end)) < 0)
-			return -1;
-		bufp += n;
-	}
-
-	return bufp - buf;
-}
diff --git a/client/tests/monotonic_time/src/cpuset.h b/client/tests/monotonic_time/src/cpuset.h
deleted file mode 100644
index bd48eca..0000000
--- a/client/tests/monotonic_time/src/cpuset.h
+++ /dev/null
@@ -1,17 +0,0 @@
-/*
- * Copyright 2008 Google Inc. All Rights Reserved.
- * Author: md@google.com (Michael Davidson)
- */
-
-#ifndef CPUSET_H_
-#define CPUSET_H_
-
-#define _GNU_SOURCE	/* for cpu_set macros */
-
-#include <sched.h>
-
-int count_cpus(const cpu_set_t *cpus);
-int parse_cpu_set(const char *s, cpu_set_t *cpus);
-int show_cpu_set(char *buf, size_t len, const cpu_set_t *cpus);
-
-#endif	/* CPUSET_H_ */
diff --git a/client/tests/monotonic_time/src/logging.c b/client/tests/monotonic_time/src/logging.c
deleted file mode 100644
index 2199647..0000000
--- a/client/tests/monotonic_time/src/logging.c
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Copyright 2008 Google Inc. All Rights Reserved.
- *
- * Author: md@google.com (Michael Davidson)
- */
-
-#include <stdarg.h>
-#include <stdio.h>
-#include <stdlib.h>
-#include <string.h>
-
-#include "logging.h"
-
-
-static FILE		*log_fp		= NULL;
-static const char	*program	= "";
-static int		debug		= 0;
-
-
-void set_log_file(FILE *fp)
-{
-	log_fp = fp;
-}
-
-void set_program_name(const char *name)
-{
-	program = name;
-}
-
-void set_debug_level(int level)
-{
-	debug = level;
-}
-
-void msg(enum msg_type msg_type, int data, const char *fmt, ...)
-{
-	va_list		ap;
-	int		err	= 0;
-	const char	*type 	= NULL;
-
-	/*
-	 * default is to log to stdout
-	 */ 	
-	if (!log_fp)
-		log_fp = stdout;
-
-	switch (msg_type) {
-		case MSG_DEBUG:
-			if (data > debug)
-				return;
-			type = "DEBUG";
-			break;
-		case MSG_INFO:
-			type = "INFO";
-			break;
-		case MSG_WARN:
-			type = "WARN";
-			break;
-		case MSG_ERROR:
-			type = "ERROR";
-			err = data;
-			break;
-		case MSG_FATAL:
-			type = "FATAL";
-			err = data;
-			break;
-	}
-
-	va_start(ap, fmt);
-
-	if (type)
-		fprintf(log_fp, "%s: ", type);
-
-	if (program)
-		fprintf(log_fp, "%s: ", program);
-
-	vfprintf(log_fp, fmt, ap);
-
-	if (err) {
-		fprintf(log_fp, ": %s\n", strerror(err));
-	} else {
-		fputc('\n', log_fp);
-	}
-
-	va_end(ap);
-
-	if (msg_type == MSG_FATAL)
-		exit(EXIT_FAILURE);
-}
diff --git a/client/tests/monotonic_time/src/logging.h b/client/tests/monotonic_time/src/logging.h
deleted file mode 100644
index 413aae9..0000000
--- a/client/tests/monotonic_time/src/logging.h
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright 2008 Google Inc. All Rights Reserved.
- *
- * Author: md@google.com (Michael Davidson)
- */
-
-#ifndef LOGGING_H_
-#define LOGGING_H_
-
-enum msg_type {
-	MSG_DEBUG,
-	MSG_INFO,
-	MSG_WARN,
-	MSG_ERROR,
-	MSG_FATAL,
-};
-
-void msg(enum msg_type, int data, const char *fmt, ...);
-
-#define	DEBUG(level, fmt, args...)	msg(MSG_DEBUG, level, fmt, ##args)
-#define	INFO(fmt, args...)		msg(MSG_INFO, 0, fmt, ##args)
-#define	WARN(err, fmt, args...)		msg(MSG_WARN, err, fmt, ##args)
-#define	ERROR(err, fmt, args...)	msg(MSG_ERROR, err, fmt, ##args)
-#define	FATAL(err, fmt, args...)	msg(MSG_FATAL, err, fmt, ##args)
-
-extern void set_program_name(const char *name);
-extern void set_debug_level(int level);
-extern void set_log_file(FILE *fp);
-
-#endif /* LOGGING_H_ */
diff --git a/client/tests/monotonic_time/src/spinlock.h b/client/tests/monotonic_time/src/spinlock.h
deleted file mode 100644
index b70116a..0000000
--- a/client/tests/monotonic_time/src/spinlock.h
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Copyright 2008 Google Inc. All Rights Reserved.
- * Author: md@google.com (Michael Davidson)
- *
- * Based on time-warp-test.c, which is:
- * Copyright (C) 2005, Ingo Molnar
- */
-
-#ifndef SPINLOCK_H_
-#define	SPINLOCK_H_
-
-typedef unsigned long spinlock_t;
-
-static inline void spin_lock(spinlock_t *lock)
-{
-	__asm__ __volatile__(
-		"1: rep; nop\n"
-		" lock; btsl $0,%0\n"
-		"jc 1b\n"
-			     : "=g"(*lock) : : "memory");
-}
-
-static inline void spin_unlock(spinlock_t *lock)
-{
-	__asm__ __volatile__("movl $0,%0; rep; nop" : "=g"(*lock) :: "memory");
-}
-
-#endif	/* SPINLOCK_H_ */
diff --git a/client/tests/monotonic_time/src/threads.c b/client/tests/monotonic_time/src/threads.c
deleted file mode 100644
index 80630db..0000000
--- a/client/tests/monotonic_time/src/threads.c
+++ /dev/null
@@ -1,120 +0,0 @@
-/*
- * Copyright 2008 Google Inc. All Rights Reserved.
- * Author: md@google.com (Michael Davidson)
- */
-#define _GNU_SOURCE
-
-#include <stdio.h>
-#include <string.h>
-#include <errno.h>
-#include <sched.h>
-#include <pthread.h>
-
-#include "logging.h"
-#include "threads.h"
-
-#define MAX_CPUS	CPU_SETSIZE
-#define	MAX_THREADS	MAX_CPUS
-
-typedef struct thread {
-	pthread_t	thread;
-	cpu_set_t	cpus;
-	thread_func_t	func;
-	void		*arg;
-} thread_t;
-
-static thread_t	threads[MAX_THREADS];
-static int	num_threads;
-
-
-/*
- * Helper function to run a thread on a specific set of CPUs.
- */
-static void *run_thread(void *arg)
-{
-	thread_t	*thread = arg;
-	void		*result;
-
-	if (sched_setaffinity(0, sizeof thread->cpus, &thread->cpus) < 0)
-		WARN(errno, "sched_setaffinity() failed");
-
-	result = thread->func(thread->arg);
-
-	return result;
-}
-
-
-/*
- * Create a set of threads each of which is bound to one of
- * the CPUs specified by cpus.
- * Returns the number of threads created.
- */
-int create_per_cpu_threads(cpu_set_t *cpus, thread_func_t func, void *arg)
-{
-	int	cpu;
-
-	for (cpu = 0; cpu < MAX_CPUS; cpu++) {
-		int		err;
-		thread_t	*thread;
-		if (!CPU_ISSET(cpu, cpus))
-			continue;
-		if (num_threads >= MAX_THREADS)
-			break;
-
-		thread		= &threads[num_threads++];
-		thread->func	= func;
-		thread->arg	= arg;
-		CPU_ZERO(&thread->cpus);
-		CPU_SET(cpu, &thread->cpus);
-
-		err = pthread_create(&thread->thread, NULL, run_thread, thread);
-		if (err) {
-			WARN(err, "pthread_create() failed");
-			--num_threads;
-			break;
-		}
-	}
-
-	return num_threads;
-}
-
-
-/*
- * Create nthreads threads.
- * Returns the number of threads created.
- */
-int create_threads(int nthreads, thread_func_t func, void *arg)
-{
-	if (nthreads > MAX_THREADS)
-		nthreads = MAX_THREADS;
-
-	while (--nthreads >= 0) {
-		int		err;
-		thread_t	*thread;
-
-		thread		= &threads[num_threads++];
-		thread->func	= func;
-		thread->arg	= arg;
-		CPU_ZERO(&thread->cpus);
-
-		err = pthread_create(&thread->thread, NULL, func, arg);
-		if (err) {
-			WARN(err, "pthread_create() failed");
-			--num_threads;
-			break;
-		}
-	}
-
-	return num_threads;
-}
-
-
-/*
- * Join with the set of previsouly created threads.
- */
-void join_threads(void)
-{
-	while (num_threads > 0)
-		pthread_join(threads[--num_threads].thread, NULL);
-}
-
diff --git a/client/tests/monotonic_time/src/threads.h b/client/tests/monotonic_time/src/threads.h
deleted file mode 100644
index b0f12b8..0000000
--- a/client/tests/monotonic_time/src/threads.h
+++ /dev/null
@@ -1,15 +0,0 @@
-/*
- * Copyright 2008 Google Inc. All Rights Reserved.
- * Author: md@google.com (Michael Davidson)
- */
-
-#ifndef THREADS_H_
-#define THREADS_H_
-
-typedef void  *(*thread_func_t)(void *);
-
-int create_threads(int num_threads, thread_func_t func, void *arg);
-int create_per_cpu_threads(cpu_set_t *cpus, thread_func_t func, void *arg);
-void join_threads(void);
-
-#endif /* THREADS_H_ */
diff --git a/client/tests/monotonic_time/src/time_test.c b/client/tests/monotonic_time/src/time_test.c
deleted file mode 100644
index e1ad6bc..0000000
--- a/client/tests/monotonic_time/src/time_test.c
+++ /dev/null
@@ -1,388 +0,0 @@
-/*
- * Copyright 2008 Google Inc. All Rights Reserved.
- * Author: md@google.com (Michael Davidson)
- *
- * Based on time-warp-test.c, which is:
- * Copyright (C) 2005, Ingo Molnar
- */
-#define _GNU_SOURCE
-
-#include <errno.h>
-#include <pthread.h>
-#include <getopt.h>
-#include <sched.h>
-#include <signal.h>
-#include <stdarg.h>
-#include <stdint.h>
-#include <inttypes.h>
-#include <stdio.h>
-#include <stdlib.h>
-#include <string.h>
-#include <sys/time.h>
-#include <time.h>
-
-#include "cpuset.h"
-#include "spinlock.h"
-#include "threads.h"
-#include "logging.h"
-
-
-char	*program	= "";
-long	duration	= 0;
-long	threshold	= 0;
-int	verbose		= 0;
-
-const char optstring[] = "c:d:ht:v";
-
-struct option options[] = {
-	{ "cpus",	required_argument,	0, 	'c'	},
-	{ "duration",	required_argument,	0,	'd'	},
-	{ "help",	no_argument,		0, 	'h'	},
-	{ "threshold",	required_argument,	0, 	't'	},
-	{ "verbose",	no_argument,		0, 	'v'	},
-	{ 0,	0,	0,	0 }
-};
-
-
-void usage(void)
-{
-	printf("usage: %s [-hv] [-c <cpu_set>] [-d duration] [-t threshold] "
-		"tsc|gtod|clock", program);
-}
-
-
-const char help_text[] =
-"check time sources for monotonicity across multiple CPUs\n"
-"  -c,--cpus        set of cpus to test (default: all)\n"
-"  -d,--duration    test duration in seconds (default: infinite)\n"
-"  -t,--threshold   error threshold (default: 0)\n"
-"  -v,--verbose     verbose output\n"
-"  tsc              test the TSC\n"
-"  gtod             test gettimeofday()\n"
-"  clock            test CLOCK_MONOTONIC\n";
-
-
-void help(void)
-{
-	usage();
-	printf("%s", help_text);
-}
-
-
-/*
- * get the TSC as 64 bit value with CPU clock frequency resolution
- */
-#if defined(__x86_64__)
-static inline uint64_t rdtsc(void)
-{
-	uint32_t	tsc_lo, tsc_hi;
-	__asm__ __volatile__("rdtsc" : "=a" (tsc_lo), "=d" (tsc_hi));
-	return ((uint64_t)tsc_hi << 32) | tsc_lo;
-}
-#elif defined(__i386__)
-static inline uint64_t rdtsc(void)
-{
-	uint64_t	tsc;
-	__asm__ __volatile__("rdtsc" : "=A" (tsc));
-	return tsc;
-}
-#else
-#error "rdtsc() not implemented for this architecture"
-#endif
-
-
-static inline uint64_t rdtsc_mfence(void)
-{
-	__asm__ __volatile__("mfence" ::: "memory");
-	return rdtsc();
-}
-
-
-static inline uint64_t rdtsc_lfence(void)
-{
-	__asm__ __volatile__("lfence" ::: "memory");
-	return rdtsc();
-}
-
-
-/*
- * get result from gettimeofday() as a 64 bit value
- * with microsecond resolution
- */
-static inline uint64_t rdgtod(void)
-{
-	struct timeval tv;
-
-	gettimeofday(&tv, NULL);
-	return (uint64_t)tv.tv_sec * 1000000 + tv.tv_usec;
-}
-
-
-/*
- * get result from clock_gettime(CLOCK_MONOTONIC) as a 64 bit value
- * with nanosecond resolution
- */
-static inline uint64_t rdclock(void)
-{
-	struct timespec ts;
-
-	clock_gettime(CLOCK_MONOTONIC, &ts);
-	return (uint64_t)ts.tv_sec * 1000000000 + ts.tv_nsec;
-}
-
-
-/*
- * test data
- */
-typedef struct test_info {
-	const char	*name;		/* test name			*/
-	void		(*func)(struct test_info *);	/* the test	*/
-	spinlock_t	lock;
-	uint64_t	last;		/* last time value		*/
-	long		loops;		/* # of test loop iterations	*/
-	long		warps;		/* # of backward time jumps	*/
-	int64_t		worst;		/* worst backward time jump	*/
-	uint64_t	start;		/* test start time		*/
-	int		done;		/* flag to stop test		*/
-} test_info_t;
-
-
-void show_warps(struct test_info *test)
-{
-	INFO("new %s-warp maximum: %9"PRId64, test->name, test->worst);
-}
-
-
-#define	DEFINE_TEST(_name)				\
-							\
-void _name##_test(struct test_info *test)		\
-{							\
-	uint64_t t0, t1;				\
-	int64_t delta;					\
-							\
-	spin_lock(&test->lock);				\
-	t1 = rd##_name();				\
-	t0 = test->last;				\
-	test->last = rd##_name();			\
-	test->loops++;					\
-	spin_unlock(&test->lock);			\
-							\
-	delta = t1 - t0;				\
-	if (delta < 0 && delta < -threshold) {		\
-		spin_lock(&test->lock);			\
-		++test->warps;				\
-		if (delta < test->worst) {		\
-			test->worst = delta;		\
-			show_warps(test);		\
-		}					\
-		spin_unlock(&test->lock);		\
-	}						\
-	if (!((unsigned long)t0 & 31))			\
-		asm volatile ("rep; nop");		\
-}							\
-							\
-struct test_info _name##_test_info = {			\
-	.name = #_name,					\
-	.func = _name##_test,				\
-}
-
-DEFINE_TEST(tsc);
-DEFINE_TEST(tsc_lfence);
-DEFINE_TEST(tsc_mfence);
-DEFINE_TEST(gtod);
-DEFINE_TEST(clock);
-
-struct test_info *tests[] = {
-	&tsc_test_info,
-	&tsc_lfence_test_info,
-	&tsc_mfence_test_info,
-	&gtod_test_info,
-	&clock_test_info,
-	NULL
-};
-
-
-void show_progress(struct test_info *test)
-{
-	static int	count;
-	const char	progress[] = "\\|/-";
-	uint64_t	elapsed = rdgtod() - test->start;
-
-        printf(" | %.2f us, %s-warps:%ld %c\r",
-                        (double)elapsed/(double)test->loops,
-			test->name,
-                        test->warps,
-			progress[++count & 3]);
-	fflush(stdout);
-}
-
-
-void *test_loop(void *arg)
-{
-	struct test_info *test = arg;
-	
-	while (! test->done)
-		(*test->func)(test);
-
-	return NULL;
-}
-
-
-int run_test(cpu_set_t *cpus, long duration, struct test_info *test)
-{
-	int		errs;
-	int		ncpus;
-	int		nthreads;
-	struct timespec ts		= { .tv_sec = 0, .tv_nsec = 200000000 };
-	struct timespec	*timeout	= (verbose || duration) ? &ts : NULL;
-	sigset_t	signals;
-
-	/*
-	 * Make sure that SIG_INT is blocked so we can
-	 * wait for it in the main test loop below.
-	 */
-	sigemptyset(&signals);
-	sigaddset(&signals, SIGINT);
-	sigprocmask(SIG_BLOCK, &signals, NULL);
-
-	/*
-	 * test start time
-	 */
-	test->start = rdgtod();
-
-	/*
- 	 * create the threads
- 	 */
-	ncpus = count_cpus(cpus);
-	nthreads = create_per_cpu_threads(cpus, test_loop, test);
-	if (nthreads != ncpus) {
-		ERROR(0, "failed to create threads: expected %d, got %d",
-			ncpus, nthreads);
-		if (nthreads) {
-			test->done = 1;
-			join_threads();
-		}
-		return 1;
-	}
-
-	if (duration) {
-		INFO("running %s test on %d cpus for %ld seconds",
-			 test->name, ncpus, duration);
-	} else {
-		INFO("running %s test on %d cpus", test->name, ncpus);
-	}
-
-	/*
- 	 * wait for a signal
- 	 */
-	while (sigtimedwait(&signals, NULL, timeout) < 0) {
-		if (duration  && rdgtod() > test->start + duration * 1000000)
-			break;
-
-		if (verbose)
-			show_progress(test);
-	}
-
-	/*
-	 * tell the test threads that we are done and wait for them to exit
-	 */
-	test->done = 1;
-
-	join_threads();
-
-	errs = (test->warps != 0);
-
-	if (!errs)
-		printf("PASS:\n");
-	else
-		printf("FAIL: %s-worst-warp=%"PRId64"\n",
-			test->name, test->worst);
-	
-	return errs;
-}
-
-
-int
-main(int argc, char *argv[])
-{
-	int		c;
-	cpu_set_t	cpus;
-	int		errs;
-	int		i;
-	test_info_t	*test;
-	const char	*testname;
-	extern int	opterr;
-	extern int	optind;
-	extern char	*optarg;
-
-	if ((program = strrchr(argv[0], '/')) != NULL)
-		++program;
-	else
-		program = argv[0];
-	set_program_name(program);
-
-	/*
-	 * default to checking all cpus
-	 */
-	for (c = 0; c < CPU_SETSIZE; c++) {
-		CPU_SET(c, &cpus);
-	}
-
-	opterr = 0;
-	errs = 0;
-	while ((c = getopt_long(argc, argv, optstring, options, NULL)) != EOF) {
-		switch (c) {
-			case 'c':
-				if (parse_cpu_set(optarg, &cpus) != 0)
-					++errs;
-				break;
-			case 'd':
-				duration = strtol(optarg, NULL, 0);
-				break;
-			case 'h':
-				help();
-				exit(0);
-			case 't':
-				threshold = strtol(optarg, NULL, 0);
-				break;
-			case 'v':
-				++verbose;
-				break;
-			default:
-				ERROR(0, "unknown option '%c'", c);
-				++errs;
-				break;
-		}
-	}
-
-	if (errs || optind != argc-1) {
-		usage();
-		exit(1);
-	}
-
-	testname = argv[optind];
-	for (i = 0; (test = tests[i]) != NULL; i++) {
-		if (strcmp(testname, test->name) == 0)
-			break;
-	}
-
-	if (!test) {
-		ERROR(0, "unknown test '%s'\n", testname);
-		usage();
-		exit(1);
-	}
-
-	/*
-	 * limit the set of CPUs to the ones that are currently available
-	 * (Note that on some kernel versions sched_setaffinity() will fail
-	 * if you specify CPUs that are not currently online so we ignore
-	 * the return value and hope for the best)
-	 */
-	sched_setaffinity(0, sizeof cpus, &cpus);
-	if (sched_getaffinity(0, sizeof cpus, &cpus) < 0) {
-		ERROR(errno, "sched_getaffinity() failed");
-		exit(1);
-	}
-
-	return run_test(&cpus, duration, test);
-}
diff --git a/client/tests/npb/NPB3.3.tar.gz b/client/tests/npb/NPB3.3.tar.gz
deleted file mode 100644
index 8b42952..0000000
--- a/client/tests/npb/NPB3.3.tar.gz
+++ /dev/null
Binary files differ
diff --git a/client/tests/npb/control b/client/tests/npb/control
deleted file mode 100644
index bebd0e9..0000000
--- a/client/tests/npb/control
+++ /dev/null
@@ -1,28 +0,0 @@
-NAME = "NAS Parallel Benchmarks"
-AUTHOR = "Cao, Chen <kcao@redhat.com>"
-TEST_TYPE = "CLIENT"
-TEST_CLASS = "HARDWARE"
-TEST_CATEGORY = "BENCHMARK"
-TIME = "MEDIUM"
-DOC = """\
-Using NPB, OpenMP implementation.
-
-See http://www.nas.nasa.gov/Software/NPB/
-"""
-
-# Supported tests (benchmarks):
-#   bt.A bt.B bt.C bt.D bt.E bt.S bt.W
-#   cg.A cg.B cg.C cg.S cg.W
-#   dc.A dc.B dc.S dc.W
-#   ep.A ep.B ep.C ep.D ep.E ep.S ep.W
-#   ft.A ft.B ft.S ft.W
-#   is.A is.B is.C is.S is.W
-#   lu.A lu.B lu.C lu.S lu.W
-#   mg.A mg.B mg.S mg.W
-#   sp.A sp.B sp.C sp.D sp.E sp.S sp.W
-#   ua.A ua.B ua.C ua.S ua.W
-#
-# Please refer to npb.py for more infomation about
-# the arguments.
-job.run_test(url='npb', tests='ep.A ep.B bt.S bt.W')
-
diff --git a/client/tests/npb/enable-all-tests.patch b/client/tests/npb/enable-all-tests.patch
deleted file mode 100644
index f08a9d3..0000000
--- a/client/tests/npb/enable-all-tests.patch
+++ /dev/null
@@ -1,233 +0,0 @@
-diff --git a/NPB3.3-OMP/config/make.def b/NPB3.3-OMP/config/make.def
-new file mode 100644
-index 0000000..afffe7d
---- /dev/null
-+++ b/NPB3.3-OMP/config/make.def
-@@ -0,0 +1,161 @@
-+#---------------------------------------------------------------------------
-+#
-+#                SITE- AND/OR PLATFORM-SPECIFIC DEFINITIONS. 
-+#
-+#---------------------------------------------------------------------------
-+
-+#---------------------------------------------------------------------------
-+# Items in this file will need to be changed for each platform.
-+#---------------------------------------------------------------------------
-+
-+#---------------------------------------------------------------------------
-+# Parallel Fortran:
-+#
-+# For CG, EP, FT, MG, LU, SP, BT and UA, which are in Fortran, the following 
-+# must be defined:
-+#
-+# F77        - Fortran compiler
-+# FFLAGS     - Fortran compilation arguments
-+# F_INC      - any -I arguments required for compiling Fortran 
-+# FLINK      - Fortran linker
-+# FLINKFLAGS - Fortran linker arguments
-+# F_LIB      - any -L and -l arguments required for linking Fortran 
-+# 
-+# compilations are done with $(F77) $(F_INC) $(FFLAGS) or
-+#                            $(F77) $(FFLAGS)
-+# linking is done with       $(FLINK) $(F_LIB) $(FLINKFLAGS)
-+#---------------------------------------------------------------------------
-+
-+#---------------------------------------------------------------------------
-+# This is the fortran compiler used for Fortran programs
-+#---------------------------------------------------------------------------
-+F77 = gfortran
-+# This links fortran programs; usually the same as ${F77}
-+FLINK	= $(F77)
-+
-+#---------------------------------------------------------------------------
-+# These macros are passed to the linker 
-+#---------------------------------------------------------------------------
-+F_LIB  =
-+
-+#---------------------------------------------------------------------------
-+# These macros are passed to the compiler 
-+#---------------------------------------------------------------------------
-+F_INC =
-+
-+#---------------------------------------------------------------------------
-+# Global *compile time* flags for Fortran programs
-+#---------------------------------------------------------------------------
-+FFLAGS	= -O -fopenmp 
-+
-+#---------------------------------------------------------------------------
-+# Global *link time* flags. Flags for increasing maximum executable 
-+# size usually go here. 
-+#---------------------------------------------------------------------------
-+FLINKFLAGS = -O -fopenmp 
-+
-+
-+#---------------------------------------------------------------------------
-+# Parallel C:
-+#
-+# For IS and DC, which are in C, the following must be defined:
-+#
-+# CC         - C compiler 
-+# CFLAGS     - C compilation arguments
-+# C_INC      - any -I arguments required for compiling C 
-+# CLINK      - C linker
-+# CLINKFLAGS - C linker flags
-+# C_LIB      - any -L and -l arguments required for linking C 
-+#
-+# compilations are done with $(CC) $(C_INC) $(CFLAGS) or
-+#                            $(CC) $(CFLAGS)
-+# linking is done with       $(CLINK) $(C_LIB) $(CLINKFLAGS)
-+#---------------------------------------------------------------------------
-+
-+#---------------------------------------------------------------------------
-+# This is the C compiler used for C programs
-+#---------------------------------------------------------------------------
-+CC = cc
-+# This links C programs; usually the same as ${CC}
-+CLINK	= $(CC)
-+
-+#---------------------------------------------------------------------------
-+# These macros are passed to the linker 
-+#---------------------------------------------------------------------------
-+C_LIB  = -lm
-+
-+#---------------------------------------------------------------------------
-+# These macros are passed to the compiler 
-+#---------------------------------------------------------------------------
-+C_INC =
-+
-+#---------------------------------------------------------------------------
-+# Global *compile time* flags for C programs
-+# DC inspects the following flags (preceded by "-D"):
-+#
-+# IN_CORE - computes all views and checksums in main memory (if there is 
-+# enough memory)
-+#
-+# VIEW_FILE_OUTPUT - forces DC to write the generated views to disk
-+#
-+# OPTIMIZATION - turns on some nonstandard DC optimizations
-+#
-+# _FILE_OFFSET_BITS=64 
-+# _LARGEFILE64_SOURCE - are standard compiler flags which allow to work with 
-+# files larger than 2GB.
-+#---------------------------------------------------------------------------
-+CFLAGS	= -O
-+
-+#---------------------------------------------------------------------------
-+# Global *link time* flags. Flags for increasing maximum executable 
-+# size usually go here. 
-+#---------------------------------------------------------------------------
-+CLINKFLAGS = -O
-+
-+
-+#---------------------------------------------------------------------------
-+# Utilities C:
-+#
-+# This is the C compiler used to compile C utilities.  Flags required by 
-+# this compiler go here also; typically there are few flags required; hence 
-+# there are no separate macros provided for such flags.
-+#---------------------------------------------------------------------------
-+UCC	= cc
-+
-+
-+#---------------------------------------------------------------------------
-+# Destination of executables, relative to subdirs of the main directory. . 
-+#---------------------------------------------------------------------------
-+BINDIR	= ../bin
-+
-+
-+#---------------------------------------------------------------------------
-+# The variable RAND controls which random number generator 
-+# is used. It is described in detail in README.install. 
-+# Use "randi8" unless there is a reason to use another one. 
-+# Other allowed values are "randi8_safe", "randdp" and "randdpvec"
-+#---------------------------------------------------------------------------
-+RAND   = randi8
-+# The following is highly reliable but may be slow:
-+# RAND   = randdp
-+
-+
-+#---------------------------------------------------------------------------
-+# The variable WTIME is the name of the wtime source code module in the
-+# common directory.  
-+# For most machines,       use wtime.c
-+# For SGI power challenge: use wtime_sgi64.c
-+#---------------------------------------------------------------------------
-+WTIME  = wtime.c
-+
-+
-+#---------------------------------------------------------------------------
-+# Enable if either Cray (not Cray-X1) or IBM: 
-+# (no such flag for most machines: see common/wtime.h)
-+# This is used by the C compiler to pass the machine name to common/wtime.h,
-+# where the C/Fortran binding interface format is determined
-+#---------------------------------------------------------------------------
-+# MACHINE	=	-DCRAY
-+# MACHINE	=	-DIBM
-+
-+
-diff --git a/NPB3.3-OMP/config/suite.def b/NPB3.3-OMP/config/suite.def
-new file mode 100644
-index 0000000..7342195
---- /dev/null
-+++ b/NPB3.3-OMP/config/suite.def
-@@ -0,0 +1,60 @@
-+# config/suite.def
-+# This file is used to build several benchmarks with a single command. 
-+# Typing "make suite" in the main directory will build all the benchmarks
-+# specified in this file. 
-+# Each line of this file contains a benchmark name and the class.
-+# The name is one of "cg", "is", "dc", "ep", mg", "ft", "sp",
-+#  "bt", "lu", and "ua". 
-+# The class is one of "S", "W", "A" through "E" 
-+# (except that no classes C,D,E for DC and no class E for IS and UA).
-+# No blank lines. 
-+# The following example builds sample sizes of all benchmarks. 
-+ft	A
-+ft	B
-+ft	S
-+ft	W
-+mg	A
-+mg	B
-+mg	S
-+mg	W
-+sp	A
-+sp	B
-+sp	C
-+sp	S
-+sp	W
-+lu	A
-+lu	B
-+lu	C
-+lu	S
-+lu	W
-+bt	A
-+bt	B
-+bt	C
-+bt	S
-+bt	W
-+is	A
-+is	B
-+is	C
-+is	S
-+is	W
-+ep	A
-+ep	B
-+ep	C
-+ep	D
-+ep	E
-+ep	S
-+ep	W
-+cg	A
-+cg	B
-+cg	C
-+cg	S
-+cg	W
-+ua	A
-+ua	B
-+ua	C
-+ua	S
-+ua	W
-+dc	A
-+dc	B
-+dc	S
-+dc	W
diff --git a/client/tests/npb/npb.py b/client/tests/npb/npb.py
deleted file mode 100644
index 09b787e..0000000
--- a/client/tests/npb/npb.py
+++ /dev/null
@@ -1,145 +0,0 @@
-import os, shutil, logging, re
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-class npb(test.test):
-    """
-    This module runs the NAS Parallel Benchmarks on the client machine
-
-    @note: Since we use gfortran to complie these benchmarks, this test might
-            not be able to run on older Operating Systems.
-    @see: http://www.nas.nasa.gov/Resources/Software/npb.html
-    """
-    version = 1
-    def initialize(self, tests=''):
-        # Initialize failure counter
-        self.n_fail = 0
-        # Get the parameters for run_once()
-        self.tests = tests
-        # Ratio is the reason between 1 and the number of CPUs of the system.
-        self.ratio = 1.0 / utils.count_cpus()
-        logging.debug('Ratio (1/n_cpus) found for this system: %s' % self.ratio)
-
-
-    def setup(self, tarball='NPB3.3.tar.gz'):
-        tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(tarball, self.srcdir)
-        os.chdir(self.srcdir)
-        # Prepare the makefile and benchmarks to generate.
-        utils.system('patch -p1 < ../enable-all-tests.patch')
-        utils.system('cd NPB3.3-OMP && make suite')
-
-
-    def run_once(self):
-        """
-        Run each benchmark twice, with different number of threads.
-
-        A sanity check is made on each benchmark executed:
-        The ratio between the times
-        time_ratio = time_one_thrd / time_full_thrds
-
-        Has to be contained inside an envelope:
-        upper_bound = full_thrds * (1 + (1/n_cpus))
-        lower_bound = full_thrds * (1 - (1/n_cpus))
-
-        Otherwise, we throw an exception (this test might be running under a
-        virtual machine and sanity check failure might mean bugs on smp
-        implementation).
-        """
-        os.chdir(self.srcdir)
-
-        # get the tests to run
-        test_list = self.tests.split()
-
-        if len(test_list) == 0:
-            raise error.TestError('No tests (benchmarks) provided. Exit.')
-
-        for itest in test_list:
-            itest_cmd = os.path.join('NPB3.3-OMP/bin/', itest)
-            try:
-                itest = utils.run(itest_cmd)
-            except:
-                logging.error('NPB benchmark %s has failed. Output: %s',
-                              itest_cmd, itest.stdout)
-                self.n_fail += 1
-            logging.debug(itest.stdout)
-
-            # Get the number of threads that the test ran
-            # (which is supposed to be equal to the number of system cores)
-            m = re.search('Total threads\s*=\s*(.*)\n', itest.stdout)
-
-            # Gather benchmark results
-            ts = re.search('Time in seconds\s*=\s*(.*)\n', itest.stdout)
-            mt = re.search('Mop/s total\s*=\s*(.*)\n', itest.stdout)
-            mp = re.search('Mop/s/thread\s*=\s*(.*)\n', itest.stdout)
-
-            time_seconds = float(ts.groups()[0])
-            mops_total = float(mt.groups()[0])
-            mops_per_thread = float(mp.groups()[0])
-
-            logging.info('Test: %s', itest_cmd)
-            logging.info('Time (s): %s', time_seconds)
-            logging.info('Total operations executed (mops/s): %s', mops_total)
-            logging.info('Total operations per thread (mops/s/thread): %s',
-                          mops_per_thread)
-
-            self.write_test_keyval({'test': itest_cmd})
-            self.write_test_keyval({'time_seconds': time_seconds})
-            self.write_test_keyval({'mops_total': mops_total})
-            self.write_test_keyval({'mops_per_thread': mops_per_thread})
-
-            # A little extra sanity check comes handy
-            if int(m.groups()[0]) != utils.count_cpus():
-                raise error.TestError("NPB test suite evaluated the number "
-                                      "of threads incorrectly: System appears "
-                                      "to have %s cores, but %s threads were "
-                                      "executed.")
-
-            # We will use this integer with float point vars later.
-            full_thrds = float(m.groups()[0])
-
-            # get duration for full_threads running.
-            m = re.search('Time in seconds\s*=\s*(.*)\n', itest.stdout)
-            time_full_thrds = float(m.groups()[0])
-
-            # repeat the execution with single thread.
-            itest_single_cmd = ''.join(['OMP_NUM_THREADS=1 ', itest_cmd])
-            try:
-                itest_single = utils.run(itest_single_cmd)
-            except:
-                logging.error('NPB benchmark single thread %s has failed. '
-                              'Output: %s',
-                              itest_single_cmd,
-                              itest_single.stdout)
-                self.n_fail += 1
-
-            m = re.search('Time in seconds\s*=\s*(.*)\n', itest_single.stdout)
-            time_one_thrd = float(m.groups()[0])
-
-            # check durations
-            ratio = self.ratio
-            time_ratio = float(time_one_thrd / time_full_thrds)
-            upper_bound = full_thrds * (1 + ratio)
-            lower_bound = full_thrds * (1 - ratio)
-            logging.debug('Time ratio for %s: %s', itest_cmd, time_ratio)
-            logging.debug('Upper bound: %s', upper_bound)
-            logging.debug('Lower bound: %s', lower_bound)
-
-            violates_upper_bound = time_ratio > upper_bound
-            violates_lower_bound = time_ratio < lower_bound
-            if violates_upper_bound or violates_lower_bound:
-                logging.error('NPB benchmark %s failed sanity check '
-                              '- time ratio outside bounds' % itest_cmd)
-                self.n_fail += 1
-            else:
-                logging.debug('NPB benchmark %s sanity check PASS' % itest_cmd)
-
-
-    def cleanup(self):
-        """
-        Raise TestError if failures were detected during test execution.
-        """
-        if self.n_fail != 0:
-            raise error.TestError('NPB test failed.')
-        else:
-            logging.info('NPB test passed.')
diff --git a/client/tests/parallel_dd/control b/client/tests/parallel_dd/control
deleted file mode 100644
index 0c8dce7..0000000
--- a/client/tests/parallel_dd/control
+++ /dev/null
@@ -1,16 +0,0 @@
-NAME = "Parallel DD"
-AUTHOR = "Martin Bligh <mbligh@google.com>"
-TIME = "MEDIUM"
-TEST_CATEGORY = "PERFORMANCE"
-TEST_CLASS = "HARDWARE"
-TEST_TYPE = "CLIENT"
-DOC = """
-Measures the performance of writing and reading multiple streams of files onto
-the files system.
-"""
-
-# YOU NEED TO SPECIFY A FILESYSTEM 
-# fs = job.filesystem('/dev/sda3', job.tmpdir)
-job.run_test('parallel_dd', fs=fs, fstype='ext2', iterations=5, megabytes=1000, streams=2)
-
-
diff --git a/client/tests/parallel_dd/parallel_dd.py b/client/tests/parallel_dd/parallel_dd.py
deleted file mode 100644
index 02774f7..0000000
--- a/client/tests/parallel_dd/parallel_dd.py
+++ /dev/null
@@ -1,142 +0,0 @@
-import os, re, time, subprocess, sys, logging
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-
-class parallel_dd(test.test):
-    version = 2
-
-    def initialize(self, fs, fstype = 'ext2', megabytes = 1000, streams = 2,
-                   seq_read = True):
-        self.megabytes = megabytes
-        self.blocks = megabytes * 256
-        self.blocks_per_file = self.blocks / streams
-        self.fs = fs
-        self.fstype = fstype
-        self.streams = streams
-        self.seq_read = seq_read
-
-        self.old_fstype = self._device_to_fstype('/etc/mtab')
-        if not self.old_fstype:
-            self.old_fstpye = self._device_to_fstype('/etc/fstab')
-        if not self.old_fstype:
-            self.old_fstype = self.fstype
-
-        logging.info('Dumping %d megabytes across %d streams', megabytes,
-                     streams)
-
-
-    def raw_write(self):
-        logging.info("Timing raw write of %d megabytes" % self.megabytes)
-        sys.stdout.flush()
-        dd = 'dd if=/dev/zero of=%s bs=4k count=%d' % (self.fs.device,
-                                                       self.blocks)
-        utils.system(dd + ' > /dev/null')
-
-
-    def raw_read(self):
-        logging.info("Timing raw read of %d megabytes", self.megabytes)
-        sys.stdout.flush()
-        dd = 'dd if=%s of=/dev/null bs=4k count=%d' % (self.fs.device,
-                                                       self.blocks)
-        utils.system(dd + ' > /dev/null')
-
-
-    def fs_write(self):
-        p = []
-        # Write out 'streams' files in parallel background tasks
-        for i in range(self.streams):
-            file = os.path.join(self.job.tmpdir, 'poo%d' % (i+1))
-            dd = 'dd if=/dev/zero of=%s bs=4k count=%d' % \
-                                    (file, self.blocks_per_file)
-            p.append(subprocess.Popen(dd + ' > /dev/null', shell=True))
-        logging.info("Waiting for %d streams", self.streams)
-        # Wait for everyone to complete
-        for i in range(self.streams):
-            logging.info("Waiting for %d", p[i].pid)
-            sys.stdout.flush()
-            os.waitpid(p[i].pid, 0)
-        sys.stdout.flush()
-        sys.stderr.flush()
-
-
-    def fs_read(self):
-        p = []
-        # Read in 'streams' files in parallel background tasks
-        for i in range(self.streams):
-            file = os.path.join(self.job.tmpdir, 'poo%d' % (i+1))
-            dd = 'dd if=%s of=/dev/null bs=4k count=%d' % \
-                                    (file, self.blocks_per_file)
-            if self.seq_read:
-                utils.system(dd + ' > /dev/null')
-            else:
-                p.append(subprocess.Popen(dd + ' > /dev/null', shell=True))
-        if self.seq_read:
-            return
-        logging.info("Waiting for %d streams", self.streams)
-        # Wait for everyone to complete
-        for i in range(self.streams):
-            logging.info("Waiting for %d", p[i].pid)
-            sys.stdout.flush()
-            os.waitpid(p[i].pid, 0)
-
-
-    def _device_to_fstype(self, file):
-        device = self.fs.device
-        try:
-            line = utils.system_output('egrep ^%s %s' % (device, file))
-            logging.debug(line)
-            fstype = line.split()[2]
-            logging.debug('Found %s is type %s from %s', device, fstype, file)
-            return fstype
-        except error.CmdError, e:
-            logging.error('No %s found in %s', device, file)
-            return None
-
-
-    def run_once(self):
-        try:
-            self.fs.unmount()
-        except error.CmdError, e:
-            pass
-
-        logging.info('------------- Timing raw operations ------------------')
-        start = time.time()
-        self.raw_write()
-        self.raw_write_rate = self.megabytes / (time.time() - start)
-
-        start = time.time()
-        self.raw_read()
-        self.raw_read_rate = self.megabytes / (time.time() - start)
-
-        # Set up the filesystem
-        self.fs.mkfs(self.fstype)
-        self.fs.mount(None)
-
-        logging.info('------------- Timing fs operations ------------------')
-        start = time.time()
-        self.fs_write()
-        self.fs_write_rate = self.megabytes / (time.time() - start)
-        self.fs.unmount()
-
-        self.fs.mount(None)
-        start = time.time()
-        self.fs_read()
-        self.fs_read_rate = self.megabytes / (time.time() - start)
-
-        self.write_perf_keyval({
-            'raw_write' : self.raw_write_rate,
-            'raw_read'  : self.raw_read_rate,
-            'fs_write'  : self.fs_write_rate,
-            'fs_read'   : self.fs_read_rate })
-
-
-    def cleanup(self):
-        try:
-            self.fs.unmount()
-        except error.CmdError, e:
-            pass
-        logging.debug('\nFormatting %s back to type %s\n', self.fs,
-                      self.old_fstype)
-        self.fs.mkfs(self.old_fstype)
-        self.fs.mount(None)
diff --git a/client/tests/perfmon/control b/client/tests/perfmon/control
deleted file mode 100644
index d3f5190..0000000
--- a/client/tests/perfmon/control
+++ /dev/null
@@ -1,16 +0,0 @@
-TIME="SHORT"
-AUTHOR = "Stephane Eranian <eranian@google.com>"
-DOC = """
-This is a simple series of test for the perfmon2 API which
-provides access to the hardware performance counters of modern
-processors.
-
-Information about perfmon2 at:
-http://perfmon2.sf.net
-"""
-NAME = 'perfmon'
-TEST_CLASS = 'kernel'
-TEST_CATEGORY = 'Functional'
-TEST_TYPE = 'client'
-
-job.run_test('perfmon')
diff --git a/client/tests/perfmon/perfmon-tests-0.3.tar.gz b/client/tests/perfmon/perfmon-tests-0.3.tar.gz
deleted file mode 100644
index 5bfc934..0000000
--- a/client/tests/perfmon/perfmon-tests-0.3.tar.gz
+++ /dev/null
Binary files differ
diff --git a/client/tests/perfmon/perfmon.py b/client/tests/perfmon/perfmon.py
deleted file mode 100644
index 207d68a..0000000
--- a/client/tests/perfmon/perfmon.py
+++ /dev/null
@@ -1,25 +0,0 @@
-import os, re
-from autotest_lib.client.bin import utils, test
-from autotest_lib.client.common_lib import error
-
-# test requires at least 2.6.26, will skip otherwise (check is internal)
-class perfmon(test.test):
-    version = 16
-
-    def setup(self, tarball = 'perfmon-tests-0.3.tar.gz'):
-        tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(tarball, self.srcdir)
-        os.chdir(self.srcdir)
-        utils.make()
-
-
-    def initialize(self):
-        self.job.require_gcc()
-        self.results = []
-
-
-    def run_once(self, dir = None, nprocs = None, args = ''):
-        cmd = self.srcdir + '/tests/pfm_tests' + args
-        # self.results.append(utils.system_output(cmd, retain_output=True))
-        if 'FAIL' in utils.system_output(cmd, retain_output=True):
-            raise error.TestError('some perfmon tests failed')
diff --git a/client/tests/pi_tests/control b/client/tests/pi_tests/control
deleted file mode 100644
index 4214b43..0000000
--- a/client/tests/pi_tests/control
+++ /dev/null
@@ -1,23 +0,0 @@
-NAME = "Priority inversion tests"
-AUTHOR = "Michal Piotrowski <michal.k.k.piotrowski@gmail.com>"
-TIME = "SHORT"
-TEST_CATEGORY = "FUNCTIONAL"
-TEST_CLASS = "KERNEL"
-TEST_TYPE = "CLIENT"
-DOC = """
-The basic premise here is to set up a deadlock scenario and confirm that PI
-mutexes resolve the situation. Three worker threads will be created from the
-main thread: low, medium and high priority threads that use SCHED_FIFO as
-their scheduling policy. The low priority thread claims a mutex and then
-starts "working". The medium priority thread starts and preempts the low
-priority thread. Then the high priority thread runs and attempts to claim
-the mutex owned by the low priority thread. Without priority inheritance,
-this will deadlock the program. With priority inheritance, the low priority
-thread receives a priority boost, finishes it's "work" and releases the mutex,
-which allows the high priority thread to run and finish and then the medium
-priority thread finishes.
-
-That's the theory, anyway...
-"""
-
-job.run_test('pi_tests')
diff --git a/client/tests/pi_tests/pi_tests.py b/client/tests/pi_tests/pi_tests.py
deleted file mode 100644
index a68581a..0000000
--- a/client/tests/pi_tests/pi_tests.py
+++ /dev/null
@@ -1,23 +0,0 @@
-import os
-from autotest_lib.client.bin import test, utils
-
-
-class pi_tests(test.test):
-    version = 1
-
-    def initialize(self):
-        self.job.require_gcc()
-
-
-    # http://www.stardust.webpages.pl/files/patches/autotest/pi_tests.tar.bz2
-    def setup(self, tarball = 'pi_tests.tar.bz2'):
-        utils.check_glibc_ver('2.5')
-        tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(tarball, self.srcdir)
-        os.chdir(self.srcdir)
-        utils.make()
-
-
-    def execute(self, args = '1 300'):
-        os.chdir(self.srcdir)
-        utils.system('./start.sh ' + args)
diff --git a/client/tests/pi_tests/pi_tests.tar.bz2 b/client/tests/pi_tests/pi_tests.tar.bz2
deleted file mode 100644
index 93a40e5..0000000
--- a/client/tests/pi_tests/pi_tests.tar.bz2
+++ /dev/null
Binary files differ
diff --git a/client/tests/pktgen/control b/client/tests/pktgen/control
deleted file mode 100644
index 61777b7..0000000
--- a/client/tests/pktgen/control
+++ /dev/null
@@ -1,35 +0,0 @@
-NAME = "Pktgen test"
-AUTHOR = "Martin Bligh <mbligh@google.com>"
-TIME = "MEDIUM"
-TEST_CATEGORY = "FUNCTIONAL"
-TEST_CLASS = "HARDWARE"
-TEST_TYPE = "CLIENT"
-DOC = """
-pktgen is a high-performance testing tool included in the Linux kernel. Being
-part of the kernel is currently best way to test the TX process of device driver
-and NIC. pktgen can also be used to generate ordinary packets to test other
-network devices. Especially of interest is the use of pktgen to test routers or
-bridges which use the Linux network stack. Because pktgen is "in-kernel", it can
-generate very high packet rates and with few systems saturate network devices as
-routers or bridges.
-"""
-
-interface='eth0'
-count=50000
-
-# Parse comma-separated args.
-for arg in args:
-    for item in arg.split(','):
-        key, val = item.split('=')
-        if key == 'interface':
-            interface = val
-        if key == 'count':
-            count = int(val)
-        if key == 'num_iterations':
-            num_iterations = int(val)
-
-job.run_test('pktgen', eth=interface, count=count, clone_skb=0,
-             tag='clone_skb_off', num_iterations=num_iterations)
-job.run_test('pktgen', eth=interface, count=count, clone_skb=1,
-             tag='clone_skb_on', num_iterations=num_iterations)
-
diff --git a/client/tests/pktgen/pktgen.py b/client/tests/pktgen/pktgen.py
deleted file mode 100644
index 6472107..0000000
--- a/client/tests/pktgen/pktgen.py
+++ /dev/null
@@ -1,53 +0,0 @@
-import os, logging
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-class pktgen(test.test):
-    version = 1
-
-    def execute(self, eth='eth0', count=50000, clone_skb=1,
-                dst_ip='192.168.210.210', dst_mac='01:02:03:04:05:07',
-                num_iterations=1):
-        if not os.path.exists('/proc/net/pktgen'):
-            utils.system('/sbin/modprobe pktgen')
-        if not os.path.exists('/proc/net/pktgen'):
-            raise error.TestError('pktgen not loaded')
-
-        for i in xrange(num_iterations):
-            logging.info('Adding %s (iteration %d)' % (eth, i))
-            self.pgdev = '/proc/net/pktgen/kpktgend_0'
-
-            self.pgset('rem_device_all')
-            self.pgset('add_device ' + eth)
-            self.pgset('max_before_softirq 10000')
-
-            # Configure the individual devices
-            logging.info('Configuring %s (iteration %d)' % (eth, i))
-
-            self.ethdev='/proc/net/pktgen/' + eth
-            self.pgdev=self.ethdev
-
-            if clone_skb:
-                self.pgset('clone_skb %d' % (count))
-            self.pgset('min_pkt_size 60')
-            self.pgset('max_pkt_size 60')
-            self.pgset('dst ' + dst_ip)
-            self.pgset('dst_mac ' + dst_mac)
-            self.pgset('count %d' % (count))
-
-            # Time to run
-            self.pgdev='/proc/net/pktgen/pgctrl'
-            self.pgset('start')
-
-            output = os.path.join(self.resultsdir, eth)
-            logging.info('Completed %s (iteration %d)' % (eth, i))
-            utils.system('cp %s %s_%05d' % (self.ethdev, output, i))
-
-    def pgset(self, command):
-        file = open(self.pgdev, 'w')
-        file.write(command + '\n');
-        file.close
-
-        if not utils.grep('Result: OK', self.pgdev):
-            if not utils.grep('Result: NA', self.pgdev):
-                utils.system('cat ' + self.pgdev)
diff --git a/client/tests/pktgen/pktgen_paper.pdf b/client/tests/pktgen/pktgen_paper.pdf
deleted file mode 100644
index 8507097..0000000
--- a/client/tests/pktgen/pktgen_paper.pdf
+++ /dev/null
Binary files differ
diff --git a/client/tests/posixtest/control b/client/tests/posixtest/control
deleted file mode 100644
index b36df95..0000000
--- a/client/tests/posixtest/control
+++ /dev/null
@@ -1,13 +0,0 @@
-AUTHOR = '''mohd.omar@in.ibm.com (Mohammed Omar)'''
-
-NAME = "Posix test"
-
-TIME = "MEDIUM"
-TEST_CLASS = "Kernel"
-TEST_CATEGORY = "Functional"
-TEST_TYPE = "Client"
-
-DOC = "Runs the Posix test suite from http://posixtest.sourceforge.net"
-
-
-job.run_test('posixtest')
diff --git a/client/tests/posixtest/posix-linux.patch b/client/tests/posixtest/posix-linux.patch
deleted file mode 100644
index f73913c..0000000
--- a/client/tests/posixtest/posix-linux.patch
+++ /dev/null
@@ -1,10 +0,0 @@
-diff --git a/LDFLAGS b/LDFLAGS
-index aa701d9..557a15d 100644
---- a/LDFLAGS
-+++ b/LDFLAGS
-@@ -13,4 +13,4 @@
- # For use with Linux, you may try the following flags to
- # allow for the NPTL-specific compilation (used in some test cases)
- # Note: this sometimes require the package nptl-devel.*.rpm to be installed.
--#-I /usr/include/nptl -L /usr/lib/nptl -D_XOPEN_SOURCE=600 -lpthread -lrt -lm
-+-I /usr/include/nptl -L /usr/lib/nptl -D_XOPEN_SOURCE=600 -lpthread -lrt -lm
diff --git a/client/tests/posixtest/posixtest.py b/client/tests/posixtest/posixtest.py
deleted file mode 100644
index c8e3e19..0000000
--- a/client/tests/posixtest/posixtest.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# POSIX test suite wrapper class. More information about the suite can be found
-# at http://posixtest.sourceforge.net/
-import os
-from autotest_lib.client.bin import test, utils
-
-
-__author__ = '''mohd.omar@in.ibm.com (Mohammed Omar)'''
-
-class posixtest(test.test):
-    version = 1
-
-    def initialize(self):
-        self.job.require_gcc()
-
-
-    # http://ufpr.dl.sourceforge.net/sourceforge/posixtest/posixtestsuite-1.5.2.tar.gz
-    def setup(self, tarball = 'posixtestsuite-1.5.2.tar.gz'):
-        self.posix_tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(self.posix_tarball, self.srcdir)
-        os.chdir(self.srcdir)
-        # Applying a small patch that introduces some linux specific
-        # linking options
-        utils.system('patch -p1 < ../posix-linux.patch')
-        utils.make()
-
-
-    def execute(self):
-        os.chdir(self.srcdir)
-        utils.system('./run_tests THR')
diff --git a/client/tests/posixtest/posixtestsuite-1.5.2.tar.gz b/client/tests/posixtest/posixtestsuite-1.5.2.tar.gz
deleted file mode 100644
index d3a11d5..0000000
--- a/client/tests/posixtest/posixtestsuite-1.5.2.tar.gz
+++ /dev/null
Binary files differ
diff --git a/client/tests/profiler_test/control b/client/tests/profiler_test/control
deleted file mode 100644
index 8729498..0000000
--- a/client/tests/profiler_test/control
+++ /dev/null
@@ -1,12 +0,0 @@
-NAME = "Profiler Test"
-AUTHOR = "Mihai Rusu <dizzy@google.com>"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "General"
-TEST_TYPE = "Client"
-DOC = """
-Tests a profiler (adds it, starts it, waits a couple of seconds then it
-stops it, generates the profiler report and removes it).
-"""
-
-job.run_test('profiler_test', profiler='oprofile')
diff --git a/client/tests/profiler_test/profiler_test.py b/client/tests/profiler_test/profiler_test.py
deleted file mode 100644
index 97754fe..0000000
--- a/client/tests/profiler_test/profiler_test.py
+++ /dev/null
@@ -1,45 +0,0 @@
-import time
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.bin import test, utils
-
-
-class profiler_test(test.test):
-    version = 2
-
-
-    def initialize(self, profiler=None, profiler_args=(), profiler_dargs=None):
-        """
-        Initialize this test with the profiler name, args and dargs.
-
-        @param profiler: Profiler name.
-        @param profiler_args: Profiler non-keyword arguments.
-        @param profiler_dargs: Profiler keyword arguments.
-        """
-        if not profiler:
-            raise error.TestError('No profiler specified.')
-        self._profiler = profiler
-        self._profiler_args = profiler_args
-        self._profiler_dargs = profiler_dargs or {}
-
-
-    def execute(self, seconds=5):
-        """
-        Add and start the profiler, sleep some seconds, stop and delete it.
-
-        We override "execute" and not "run_once" because we need to control
-        profilers here and in "run_once" it would be too late for that.
-
-        @param seconds: Number of seconds to sleep while the profiler is
-                running.
-        """
-        profilers = self.job.profilers
-        profilers.add(self._profiler, *self._profiler_args,
-                      **self._profiler_dargs)
-        profilers.start(self)
-
-        time.sleep(seconds)
-
-        profilers.stop(self)
-        profilers.report(self)
-        # TODO: check for profiler result files?
-        profilers.delete(self._profiler)
diff --git a/client/tests/qemu_iotests/control b/client/tests/qemu_iotests/control
deleted file mode 100644
index 789296a..0000000
--- a/client/tests/qemu_iotests/control
+++ /dev/null
@@ -1,34 +0,0 @@
-NAME = "qemu-iotests"
-AUTHOR = "Yolkfull Chow <yzhou@redhat.com>"
-TIME = "MEDIUM"
-TEST_CATEGORY = "kvm"
-TEST_CLASS = "KERNEL"
-TEST_TYPE = "CLIENT"
-DOC = """
-This is the QEMU I/O test suite autotest module
-
-* Intro
-
-This package contains a simple test suite for the I/O layer of qemu.
-It does not requite a guest, but only the qemu, qemu-img and qemu-io
-binaries.  This does limit it to exercise the low-level I/O path only
-but no actual block drivers like ide, scsi or virtio.
-
-* Usage
-
-Just run ./check to run all tests for the raw image format, or ./check
--qcow2 to test the qcow2 image format.  The output of ./check -h explains
-additional options to test further image formats or I/O methods.
-
-* Feedback and patches
-
-Please send improvements to the upstream test suite, general feedback or just
-reports of failing tests cases to qemu-devel@savannah.nongnu.org.
-"""
-
-image_types = ['raw', 'cow', 'qcow', 'qcow2', 'vpc', 'vmdk']
-
-for image_type in image_types:
-    option_flag = '-' + image_type
-    job.run_test('qemu_iotests', qemu_path='', options=option_flag,
-                 tag=image_type)
diff --git a/client/tests/qemu_iotests/qemu-iotests.tar.bz2 b/client/tests/qemu_iotests/qemu-iotests.tar.bz2
deleted file mode 100644
index 9730569..0000000
--- a/client/tests/qemu_iotests/qemu-iotests.tar.bz2
+++ /dev/null
Binary files differ
diff --git a/client/tests/qemu_iotests/qemu_iotests.py b/client/tests/qemu_iotests/qemu_iotests.py
deleted file mode 100644
index 1a036b9..0000000
--- a/client/tests/qemu_iotests/qemu_iotests.py
+++ /dev/null
@@ -1,99 +0,0 @@
-import os, re, logging
-from autotest_lib.client.bin import test, utils, os_dep
-from autotest_lib.client.common_lib import error
-
-
-class qemu_iotests(test.test):
-    """
-    This autotest module runs the qemu_iotests testsuite.
-
-    @copyright: Red Hat 2009
-    @author: Yolkfull Chow (yzhou@redhat.com)
-    @see: http://www.kernel.org/pub/scm/linux/kernel/git/hch/qemu-iotests.git
-    """
-    version = 2
-    def initialize(self, qemu_path=''):
-        if qemu_path:
-            # Prepending the path at the beginning of $PATH will make the
-            # version found on qemu_path be preferred over other ones.
-            os.environ['PATH'] =  qemu_path + ":" + os.environ['PATH']
-        try:
-            self.qemu_img_path = os_dep.command('qemu-img')
-            self.qemu_io_path = os_dep.command('qemu-io')
-        except ValueError, e:
-            raise error.TestNAError('Commands qemu-img or qemu-io missing')
-        self.job.require_gcc()
-
-
-    def setup(self, tarball='qemu-iotests.tar.bz2'):
-        """
-        Uncompresses the tarball and cleans any leftover output files.
-
-        @param tarball: Relative path to the testsuite tarball.
-        """
-        tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(tarball, self.srcdir)
-        os.chdir(self.srcdir)
-        utils.make('clean')
-
-
-    def run_once(self, options='', testlist=''):
-        """
-        Passes the appropriate parameters to the testsuite.
-
-        # Usage: $0 [options] [testlist]
-        # check options
-        #     -raw                test raw (default)
-        #     -cow                test cow
-        #     -qcow               test qcow
-        #     -qcow2              test qcow2
-        #     -vpc                test vpc
-        #     -vmdk               test vmdk
-        #     -xdiff              graphical mode diff
-        #     -nocache            use O_DIRECT on backing file
-        #     -misalign           misalign memory allocations
-        #     -n                  show me, do not run tests
-        #     -T                  output timestamps
-        #     -r                  randomize test order
-        #
-        # testlist options
-        #     -g group[,group...] include tests from these groups
-        #     -x group[,group...] exclude tests from these groups
-        #     NNN                 include test NNN
-        #     NNN-NNN             include test range (eg. 012-021)
-
-        @param qemu_path: Optional qemu install path.
-        @param options: Options accepted by the testsuite.
-        @param testlist: List of tests that will be executed (by default, all
-                testcases will be executed).
-        """
-        os.chdir(self.srcdir)
-        test_dir = os.path.join(self.srcdir, "scratch")
-        if not os.path.exists(test_dir):
-            os.mkdir(test_dir)
-        cmd = "./check"
-        if options:
-            cmd += " " + options
-        if testlist:
-            cmd += " " + testlist
-
-        try:
-            try:
-                result = utils.system(cmd)
-            except error.CmdError, e:
-                failed_cases = re.findall("Failures: (\d+)", str(e))
-                for num in failed_cases:
-                    failed_name = num + ".out.bad"
-                    src = os.path.join(self.srcdir, failed_name)
-                    dest = os.path.join(self.resultsdir, failed_name)
-                    utils.get_file(src, dest)
-                if failed_cases:
-                    e_msg = ("Qemu-iotests failed. Failed cases: %s" %
-                             failed_cases)
-                else:
-                    e_msg = "Qemu-iotests failed"
-                raise error.TestFail(e_msg)
-        finally:
-            src = os.path.join(self.srcdir, "check.log")
-            dest = os.path.join(self.resultsdir, "check.log")
-            utils.get_file(src, dest)
diff --git a/client/tests/reaim/control b/client/tests/reaim/control
deleted file mode 100644
index b58bab6..0000000
--- a/client/tests/reaim/control
+++ /dev/null
@@ -1,11 +0,0 @@
-AUTHOR = "Martin Bligh <mbligh@google.com>"
-NAME = "AIM 7"
-TIME = "MEDIUM"
-TEST_CLASS = "Kernel"
-TEST_CATEGORY = "Benchmark"
-TEST_TYPE = "Client"
-
-DOC = """Run the Open Source AIM 7 benchmark from
-http://sourceforge.net/project/showfiles.php?group_id=71019"""
-
-job.run_test('reaim')
diff --git a/client/tests/reaim/osdl-aim-7.0.1.13.tar.gz b/client/tests/reaim/osdl-aim-7.0.1.13.tar.gz
deleted file mode 100644
index aeea545..0000000
--- a/client/tests/reaim/osdl-aim-7.0.1.13.tar.gz
+++ /dev/null
Binary files differ
diff --git a/client/tests/reaim/reaim.config b/client/tests/reaim/reaim.config
deleted file mode 100644
index 2268f45..0000000
--- a/client/tests/reaim/reaim.config
+++ /dev/null
@@ -1,30 +0,0 @@
-# Sample configuration file for the reaim workload
-# cliff white, OSDL 4/2003
-#
-# This is a comment (duh)
-# all variables are named in UPPER CASE, unless you want
-# to write a better option parser. Send me a patch
-# I'm keeping this, but not used yet
-#
-# The file and poolsize values can be specified here, or
-# in the workfile. Values in the workfile will over write
-# these values
-FILESIZE 10k
-POOLSIZE 1m
-# 
-# A list of disk directories for the exerciser
-# DISKDIR /tmp/diskdir
-# To control number of users
-# STARTUSERS 2
-# ENDUSERS 3
-# and to control the count
-# INCREMENT 2
-# Number of jobs per child
-# JOBS 20
-# All switch options will use '1' for on, anything else for off
-# Extra output
-# VERBOSE 1
-# Switch for the crossover
-# CROSSOVER 1
-# Switch for STP-style results file
-# BRIEF 1
diff --git a/client/tests/reaim/reaim.py b/client/tests/reaim/reaim.py
deleted file mode 100644
index 83f53d9..0000000
--- a/client/tests/reaim/reaim.py
+++ /dev/null
@@ -1,96 +0,0 @@
-# Needs autoconf & automake & libtool to be installed. Ewwwwwwwwwwwwwwwwwwwwww
-import re, os
-from autotest_lib.client.bin import test, utils, os_dep
-
-
-class reaim(test.test):
-    version = 1
-
-    # http://prdownloads.sourceforge.net/re-aim-7/osdl-aim-7.0.1.13.tar.gz
-    def setup(self, tarball = 'osdl-aim-7.0.1.13.tar.gz'):
-        tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(tarball, self.srcdir)
-
-        self.job.setup_dep(['libaio'])
-        libs = '-L' + self.autodir + '/deps/libaio/lib -laio'
-        cflags = '-I ' + self.autodir + '/deps/libaio/include'
-        var_libs = 'LIBS="' + libs + '"'
-        var_cflags  = 'CFLAGS="' + cflags + '"'
-        self.make_flags = var_libs + ' ' + var_cflags
-
-        os_dep.commands('autoconf', 'automake', 'libtoolize')
-        os.chdir(self.srcdir)
-        utils.system('./bootstrap')
-        utils.system('./configure')
-        # we can't use patch here, as the Makefile is autogenerated
-        # so we can't tell exactly what it looks like.
-        # Perform some foul in-place sed hackery instead.
-        for file in ('Makefile', 'src/Makefile'):
-            utils.system('sed -i "s/^CFLAGS =/CFLAGS +=/" ' + file)
-            utils.system('sed -i "s/^LIBS =/LIBS +=/" ' + file)
-        utils.system(self.make_flags + ' make')
-        os.rename('src/reaim', 'reaim')
-
-
-    def initialize(self):
-        self.job.require_gcc()
-        self.ldlib = 'LD_LIBRARY_PATH=%s/deps/libaio/lib'%(self.autodir)
-
-
-    def execute(self, iterations = 1, workfile = 'workfile.short',
-                    start = 1, end = 10, increment = 2,
-                    extra_args = '', tmpdir = None):
-        if not tmpdir:
-            tmpdir = self.tmpdir
-
-        # -f workfile
-        # -s <number of users to start with>
-        # -e <number of users to end with>
-        # -i <number of users to increment>
-        workfile = os.path.join('data', workfile)
-        args = "-f %s -s %d -e %d -i %d" % (workfile, start, end, increment)
-        config = os.path.join(self.srcdir, 'reaim.config')
-        utils.system('cp -f %s/reaim.config %s' % (self.bindir, config))
-        args += ' -c ./reaim.config'
-        open(config, 'a+').write("DISKDIR %s\n" % tmpdir)
-        os.chdir(self.srcdir)
-        cmd = self.ldlib + ' ./reaim ' + args + ' ' + extra_args
-
-        results = []
-
-        profilers = self.job.profilers
-        if not profilers.only():
-            for i in range(iterations):
-                results.append(utils.system_output(cmd, retain_output=True))
-
-        # Do a profiling run if necessary
-        if profilers.present():
-            profilers.start(self)
-            results.append(utils.system_output(cmd, retain_output=True))
-            profilers.stop(self)
-            profilers.report(self)
-
-        self.__format_results("\n".join(results))
-
-
-    def __format_results(self, results):
-        out = open(self.resultsdir + '/keyval', 'w')
-        for line in results.split('\n'):
-            m = re.match('Max Jobs per Minute (\d+)', line)
-            if m:
-                max_jobs_per_min = m.group(1)
-            if re.match(r"^[0-9\. ]+$", line):
-                fields = line.split()
-        out.write("""\
-max_jobs_per_min=%s
-num_forked=%s
-parent_time=%s
-child_systime=%s
-child_utime=%s
-jobs_min=%s
-jobs_min_child=%s
-std_dev_time=%s
-std_dev_pct=%s
-jti=%s
-""" % tuple([max_jobs_per_min] + fields))
-        out.close()
diff --git a/client/tests/real_time_tests/control b/client/tests/real_time_tests/control
deleted file mode 100644
index 5f97156..0000000
--- a/client/tests/real_time_tests/control
+++ /dev/null
@@ -1,13 +0,0 @@
-AUTHOR = "Chirag <chirag@linux.vnet.ibm.com>"
-
-NAME = "Kernel Realtime Tests"
-
-TIME = "MEDIUM"
-TEST_CLASS = "Kernel"
-TEST_CATEGORY = "Functional"
-TEST_TYPE = "Client"
-
-DOC = """Runs the Kernel Realtime tests from
-http://git.kernel.org/?p=linux/kernel/git/galak/ltp.git;a=tree;f=testcases/realtime"""
-
-job.run_test('real_time_tests')
diff --git a/client/tests/real_time_tests/path-fix.patch b/client/tests/real_time_tests/path-fix.patch
deleted file mode 100644
index 41b3f76..0000000
--- a/client/tests/real_time_tests/path-fix.patch
+++ /dev/null
@@ -1,12 +0,0 @@
-diff -Nurp realtime/scripts/setenv.sh src/scripts/setenv.sh
---- realtime/scripts/setenv.sh	2008-06-10 11:48:10.000000000 +0530
-+++ src/scripts/setenv.sh	2008-06-23 13:49:59.000000000 +0530
-@@ -6,7 +6,7 @@
- #
- #export TESTS_DIR=$(readlink -f $SCRIPTS_DIR/..)
-
--TESTSUITE_NAME=testcases/realtime
-+TESTSUITE_NAME=src
- if [ -z "$PARENT" ]; then
-     PARENT=${PWD%/$TESTSUITE_NAME*}
- fi
diff --git a/client/tests/real_time_tests/real_time_tests.py b/client/tests/real_time_tests/real_time_tests.py
deleted file mode 100644
index dd915f6..0000000
--- a/client/tests/real_time_tests/real_time_tests.py
+++ /dev/null
@@ -1,18 +0,0 @@
-import os
-from autotest_lib.client.bin import test, utils
-
-class real_time_tests(test.test):
-    version = 1
-    preserve_srcdir = True
-
-# http://git.kernel.org/?p=linux/kernel/git/galak/ltp.git;a=tree;f=testcases/realtime
-    def setup(self, tarball = 'realtime-latest-git-snapshot.tar.bz2'):
-        utils.check_glibc_ver('2.5')
-        self.tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(self.tarball, self.srcdir)
-        os.chdir(self.srcdir)
-        utils.system('patch -p1 < ../path-fix.patch')
-
-    def execute(self, args = '-l 10'):
-        os.chdir(self.srcdir)
-        utils.system('./run.sh -t func ' + args)
diff --git a/client/tests/real_time_tests/realtime-latest-git-snapshot.tar.bz2 b/client/tests/real_time_tests/realtime-latest-git-snapshot.tar.bz2
deleted file mode 100644
index 3e23fe8..0000000
--- a/client/tests/real_time_tests/realtime-latest-git-snapshot.tar.bz2
+++ /dev/null
Binary files differ
diff --git a/client/tests/rmaptest/control b/client/tests/rmaptest/control
deleted file mode 100644
index a64614c..0000000
--- a/client/tests/rmaptest/control
+++ /dev/null
@@ -1,14 +0,0 @@
-AUTHOR = "mbligh@google.com"
-
-NAME = "Rmap test"
-
-TIME = "MEDIUM"
-TEST_CLASS = "Kernel"
-TEST_CATEGORY = "Fuctional"
-TEST_TYPE = "Client"
-
-DOC = """
-Create lots of VMAs mapped by lots of tasks.  To tickle objrmap and the
-virtual scan.
-"""
-job.run_test('rmaptest')
diff --git a/client/tests/rmaptest/rmaptest.py b/client/tests/rmaptest/rmaptest.py
deleted file mode 100644
index 96baaf0..0000000
--- a/client/tests/rmaptest/rmaptest.py
+++ /dev/null
@@ -1,30 +0,0 @@
-import os
-from autotest_lib.client.bin import test, utils
-
-
-# tests is a simple array of "cmd" "arguments"
-tests = [["rmaptest", "-h -i100 -n100 -s100 -t100 -V10 -v file1.dat"],
-         ["rmaptest", "-l -i100 -n100 -s100 -t100 -V10 -v file2.dat"],
-         ["rmaptest", "-r -i100 -n100 -s100 -t100 -V10 -v file3.dat"],
-        ]
-name = 0
-arglist = 1
-
-class rmaptest(test.test):
-    version = 1
-    preserve_srcdir = True
-
-    def initialize(self):
-        self.job.require_gcc()
-
-
-    def setup(self):
-        os.chdir(self.srcdir)
-        utils.system(utils.get_cc() + ' -Wall -o rmaptest rmap-test.c')
-
-
-    def execute(self, args = ''):
-        os.chdir(self.tmpdir)
-        for test in tests:
-            cmd = '%s/%s %s %s' % (self.srcdir, test[name], args, test[arglist])
-            utils.system(cmd)
diff --git a/client/tests/rmaptest/src/rmap-test.c b/client/tests/rmaptest/src/rmap-test.c
deleted file mode 100644
index 77594d2..0000000
--- a/client/tests/rmaptest/src/rmap-test.c
+++ /dev/null
@@ -1,255 +0,0 @@
-/*
- * Create lots of VMA's mapped by lots of tasks.  To tickle objrmap and the
- * virtual scan.
- */
-
-#include <stdio.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <stdlib.h>
-#include <string.h>
-#include <errno.h>
-#include <time.h>
-#include <sys/mman.h>
-#include <sys/signal.h>
-#include <sys/stat.h>
-#include <sys/wait.h>
-
-char *progname;
-char *filename;
-void *mapped_mem;
-
-int niters;
-int ntasks = 100;
-int nvmas = 100;
-int vmasize = 1024*1024;
-int vmas_to_do = -1;
-int pagesize;
-int fd;
-char **vma_addresses;
-volatile int *nr_children_running;
-int verbose;
-
-enum access_pattern {
-	ap_random,
-	ap_linear,
-	ap_half
-} access_pattern = ap_linear;
-
-void open_file()
-{
-	fd = open(filename, O_RDWR|O_TRUNC|O_CREAT, 0666);
-	if (fd < 0) {
-		fprintf(stderr, "%s: Cannot open `%s': %s\n",
-			progname, filename, strerror(errno));
-		exit(1);
-	}
-}
-
-void usage(void)
-{
-	fprintf(stderr, "Usage: %s [-hlrvV] [-iN] [-nN] [-sN] [-tN] filename\n",
-				progname);
-	fprintf(stderr, "     -h:          Pattern: half of memory is busy\n");
-	fprintf(stderr, "     -l:          Pattern: linear\n");
-	fprintf(stderr, "     -r:          Pattern: random\n");
-	fprintf(stderr, "     -iN:         Number of iterations\n");
-	fprintf(stderr, "     -nN:         Number of VMAs\n");
-	fprintf(stderr, "     -sN:         VMA size (pages)\n");
-	fprintf(stderr, "     -tN:         Run N tasks\n");
-	fprintf(stderr, "     -VN:         Number of VMAs to process\n");
-	fprintf(stderr, "     -v:          Verbose\n");
-	exit(1);
-}
-
-void touch_pages(int nr_vmas)
-{
-	int i;
-
-	for (i = 0; i < nr_vmas; i++) {
-		char *p = vma_addresses[i];
-		int page;
-
-		for (page = 0; page < vmasize; page++)
-			p[page * pagesize]++;
-	}
-}
-
-void msync_file(int nr_vmas)
-{
-	int i;
-
-	for (i = 0; i < nr_vmas; i++) {
-		char *p = vma_addresses[i];
-
-		msync(p, vmasize * pagesize, MS_ASYNC);
-	}
-}
-
-void touch_random_pages(void)
-{
-	int vma;
-	int page;
-
-	srand(getpid() * time(0));
-
-	for (vma = 0; vma < vmas_to_do; vma++) {
-		for (page = 0; page < vmasize; page++) {
-			int rand_vma;
-			int rand_page;
-			char *p;
-
-			rand_vma = rand() % nvmas;
-			rand_page = rand() % vmasize;
-			p = vma_addresses[rand_vma] + rand_page * pagesize;
-			(*p)++;
-		}
-		if (verbose > 1)
-			printf("vma %d/%d done\n", vma, nvmas);
-	}
-}
-
-void child(int childno)
-{
-	int iter;
-
-	sleep(1);
-	if (access_pattern == ap_half && childno == 0) {
-		while (*nr_children_running > 1) {
-			touch_pages(nvmas / 2);
-		}
-		return;
-	}
-
-	for (iter = 0; iter < niters; iter++) {
-		if (access_pattern == ap_random) {
-			touch_random_pages();
-		} else if (access_pattern == ap_linear) {
-			touch_pages(nvmas);
-		} else if (access_pattern == ap_half) {
-			touch_pages(nvmas);
-		}
-		if (verbose > 0)
-			printf("%d/%d\n", iter, niters);
-	}
-}
-
-int main(int argc, char *argv[])
-{
-	int c;
-	int i;
-	loff_t offset;
-	loff_t file_size;
-	int childno;
-
-	progname = argv[0];
-
-	while ((c = getopt(argc, argv, "vrlhi:n:s:t:V:")) != -1) {
-		switch (c) {
-		case 'h':
-			access_pattern = ap_half;
-			break;
-		case 'l':
-			access_pattern = ap_linear;
-			break;
-		case 'r':
-			access_pattern = ap_random;
-			break;
-		case 'i':
-			niters = strtol(optarg, NULL, 10);
-			break;
-		case 'n':
-			nvmas = strtol(optarg, NULL, 10);
-			break;
-		case 's':
-			vmasize = strtol(optarg, NULL, 10);
-			break;
-		case 't':
-			ntasks = strtol(optarg, NULL, 10);
-			break;
-		case 'V':
-			vmas_to_do = strtol(optarg, NULL, 10);
-			break;
-		case 'v':
-			verbose++;
-			break;
-		}
-	}
-
-	if (optind == argc)
-		usage();
-	filename = argv[optind++];
-	if (optind != argc)
-		usage();
-
-	if (vmas_to_do == -1)
-		vmas_to_do = nvmas;
-
-	pagesize = getpagesize();
-	open_file();
-
-	file_size = nvmas;
-	file_size *= vmasize;
-	file_size += nvmas - 1;
-	file_size *= pagesize;
-
-	printf("Total file size: %lldk, Total memory: %lldk\n",
-		file_size / 1024,
-		((long long)nvmas * vmasize * pagesize) / 1024);
-
-	if (ftruncate(fd, file_size) < 0) {
-		perror("ftruncate");
-		exit(1);
-	}
-
-	vma_addresses = malloc(nvmas * sizeof(*vma_addresses));
-	nr_children_running = (int *)mmap(0, sizeof(*nr_children_running),
-				PROT_READ|PROT_WRITE,
-				MAP_SHARED|MAP_ANONYMOUS,
-				-1,
-				0);
-	if (nr_children_running == MAP_FAILED) {
-		perror("mmap1");
-		exit(1);
-	}
-
-	offset = 0;
-
-	for (i = 0; i < nvmas; i++) {
-		char *p;
-
-		p = mmap(0, vmasize * pagesize, PROT_READ|PROT_WRITE,
-				MAP_SHARED, fd, offset);
-		if (p == MAP_FAILED) {
-			perror("mmap");
-			exit(1);
-		}
-		vma_addresses[i] = p;
-		offset += vmasize * pagesize + pagesize;
-	}
-
-	touch_pages(nvmas);
-	msync_file(nvmas);
-	*nr_children_running = ntasks;
-
-	for (childno = 0; childno < ntasks; childno++) {
-		if (fork() == 0) {
-			child(childno);
-			exit(0);
-		}
-	}
-
-	signal(SIGINT, SIG_IGN);
-
-	for (i = 0; i < ntasks; i++) {
-		pid_t pid;
-		int status;
-		
-		/* Catch each child error status and report. */
-		pid = wait3(&status, 0, 0);
-		if (pid < 0)	/* No more children? */
-			break;
-		(*nr_children_running)--;
-	}
-	exit(0);
-}
diff --git a/client/tests/rtc/control b/client/tests/rtc/control
deleted file mode 100644
index f0f64c9..0000000
--- a/client/tests/rtc/control
+++ /dev/null
@@ -1,15 +0,0 @@
-TIME="SHORT"
-AUTHOR = "Jason Wang <jasowag@redhat.com>"
-DOC = """
-rtc is a simple test of realtime clock driver which was grabbed from 
-Documentation/rtc.txt. It does the functional test of interrupt, alarm and 
-requeseted frequency.
-
-Please refer the kernel documentation for details.
-"""
-NAME = 'rtc'
-TEST_CLASS = 'kernel'
-TEST_CATEGORY = 'Functional'
-TEST_TYPE = 'client'
-
-job.run_test('rtc')
diff --git a/client/tests/rtc/rtc.py b/client/tests/rtc/rtc.py
deleted file mode 100644
index 5345db1..0000000
--- a/client/tests/rtc/rtc.py
+++ /dev/null
@@ -1,23 +0,0 @@
-import os
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-class rtc(test.test):
-    version = 1
-    preserve_srcdir = True
-
-    def setup(self):
-        os.chdir(self.srcdir)
-        utils.make('clobber')
-        utils.make()
-
-
-    def initialize(self):
-        self.job.require_gcc()
-
-
-    def run_once(self, def_rtc="/dev/rtc0", maxfreq=64):
-        if not os.path.exists(def_rtc):
-            raise error.TestNAError("RTC device %s does not exist" % def_rtc)
-        os.chdir(self.srcdir)
-        utils.system('./rtctest %s %s' % (def_rtc, maxfreq))
diff --git a/client/tests/rtc/src/Makefile b/client/tests/rtc/src/Makefile
deleted file mode 100644
index f99dc60..0000000
--- a/client/tests/rtc/src/Makefile
+++ /dev/null
@@ -1,19 +0,0 @@
-CC=		cc
-CFLAGS=	        -O -Wall -Wstrict-prototypes
-
-PROGS=		rtctest
-
-SRCS=		rtctest.c
-OBJS=		${SRCS:.c=.o}
-
-
-all:		$(PROGS)
-
-rtctest:	$(OBJS)
-		$(CC) $(LDFLAGS) -o rtctest $(OBJS)
-
-clean:
-		-rm -f $(OBJS)
-
-clobber:	clean
-		-rm -f $(PROGS)
diff --git a/client/tests/rtc/src/rtctest.c b/client/tests/rtc/src/rtctest.c
deleted file mode 100644
index 5939c77..0000000
--- a/client/tests/rtc/src/rtctest.c
+++ /dev/null
@@ -1,261 +0,0 @@
-/*
- *      Real Time Clock Driver Test/Example Program
- *
- *      Compile with:
- *      gcc -s -Wall -Wstrict-prototypes rtctest.c -o rtctest
- *
- *      Copyright (C) 1996, Paul Gortmaker.
- *      Copyright (C) 2010, Jason Wang <jasowang@redhat.com>
- *
- *      Released under the GNU General Public License, version 2,
- *      included herein by reference.
- *
- */
-
-#include <stdio.h>
-#include <linux/rtc.h>
-#include <sys/ioctl.h>
-#include <sys/time.h>
-#include <sys/types.h>
-#include <fcntl.h>
-#include <unistd.h>
-#include <stdlib.h>
-#include <errno.h>
-
-
-/*
- * This expects the new RTC class driver framework, working with
- * clocks that will often not be clones of what the PC-AT had.
- * Use the command line to specify another RTC if you need one.
- */
-static const char default_rtc[] = "/dev/rtc0";
-static int maxfreq = 64;
-
-int main(int argc, char **argv)
-{
-	int i, fd, retval, irqcount = 0;
-	unsigned long tmp, data;
-	struct rtc_time rtc_tm;
-	const char *rtc = default_rtc;
-
-	switch (argc) {
-	case 3:
-		maxfreq = atoi(argv[2]);
-	case 2:
-		rtc = argv[1];
-		/* FALLTHROUGH */
-	case 1:
-		break;
-	default:
-		fprintf(stderr, "usage:  rtctest [rtcdev] [maxfreq]\n");
-		return 1;
-	}
-
-	fd = open(rtc, O_RDONLY);
-
-	if (fd ==  -1) {
-		perror(rtc);
-		exit(errno);
-	}
-
-	fprintf(stderr, "\n\t\t\tRTC Driver Test Example.\n\n");
-
-	/* Turn on update interrupts (one per second) */
-	retval = ioctl(fd, RTC_UIE_ON, 0);
-	if (retval == -1) {
-		if (errno == ENOTTY) {
-			fprintf(stderr,
-				"\n...Update IRQs not supported.\n");
-			goto test_READ;
-		}
-		perror("RTC_UIE_ON ioctl");
-		exit(errno);
-	}
-
-	fprintf(stderr, "Counting 5 update (1/sec) interrupts from reading %s:",
-			rtc);
-	fflush(stderr);
-	for (i=1; i<6; i++) {
-		/* This read will block */
-		retval = read(fd, &data, sizeof(unsigned long));
-		if (retval == -1) {
-			perror("read");
-			exit(errno);
-		}
-		fprintf(stderr, " %d", i);
-		fflush(stderr);
-		irqcount++;
-	}
-
-	fprintf(stderr, "\nAgain, from using select(2) on /dev/rtc:");
-	fflush(stderr);
-	for (i=1; i<6; i++) {
-		struct timeval tv = {5, 0};     /* 5 second timeout on select */
-		fd_set readfds;
-
-		FD_ZERO(&readfds);
-		FD_SET(fd, &readfds);
-		/* The select will wait until an RTC interrupt happens. */
-		retval = select(fd+1, &readfds, NULL, NULL, &tv);
-		if (retval == -1) {
-				perror("select");
-				exit(errno);
-		}
-		/* This read won't block unlike the select-less case above. */
-		retval = read(fd, &data, sizeof(unsigned long));
-		if (retval == -1) {
-				perror("read");
-				exit(errno);
-		}
-		fprintf(stderr, " %d", i);
-		fflush(stderr);
-		irqcount++;
-	}
-
-	/* Turn off update interrupts */
-	retval = ioctl(fd, RTC_UIE_OFF, 0);
-	if (retval == -1) {
-		perror("RTC_UIE_OFF ioctl");
-		exit(errno);
-	}
-
-test_READ:
-	/* Read the RTC time/date */
-	retval = ioctl(fd, RTC_RD_TIME, &rtc_tm);
-	if (retval == -1) {
-		perror("RTC_RD_TIME ioctl");
-		exit(errno);
-	}
-
-	fprintf(stderr, "\n\nCurrent RTC date/time is %d-%d-%d, %02d:%02d:%02d.\n",
-		rtc_tm.tm_mday, rtc_tm.tm_mon + 1, rtc_tm.tm_year + 1900,
-		rtc_tm.tm_hour, rtc_tm.tm_min, rtc_tm.tm_sec);
-
-	/* Set the alarm to 5 sec in the future, and check for rollover */
-	rtc_tm.tm_sec += 5;
-	if (rtc_tm.tm_sec >= 60) {
-		rtc_tm.tm_sec %= 60;
-		rtc_tm.tm_min++;
-	}
-	if (rtc_tm.tm_min == 60) {
-		rtc_tm.tm_min = 0;
-		rtc_tm.tm_hour++;
-	}
-	if (rtc_tm.tm_hour == 24)
-		rtc_tm.tm_hour = 0;
-
-	retval = ioctl(fd, RTC_ALM_SET, &rtc_tm);
-	if (retval == -1) {
-		if (errno == ENOTTY) {
-			fprintf(stderr,
-				"\n...Alarm IRQs not supported.\n");
-			goto test_PIE;
-		}
-		perror("RTC_ALM_SET ioctl");
-		exit(errno);
-	}
-
-	/* Read the current alarm settings */
-	retval = ioctl(fd, RTC_ALM_READ, &rtc_tm);
-	if (retval == -1) {
-		perror("RTC_ALM_READ ioctl");
-		exit(errno);
-	}
-
-	fprintf(stderr, "Alarm time now set to %02d:%02d:%02d.\n",
-		rtc_tm.tm_hour, rtc_tm.tm_min, rtc_tm.tm_sec);
-
-	/* Enable alarm interrupts */
-	retval = ioctl(fd, RTC_AIE_ON, 0);
-	if (retval == -1) {
-		perror("RTC_AIE_ON ioctl");
-		exit(errno);
-	}
-
-	fprintf(stderr, "Waiting 5 seconds for alarm...");
-	fflush(stderr);
-	/* This blocks until the alarm ring causes an interrupt */
-	retval = read(fd, &data, sizeof(unsigned long));
-	if (retval == -1) {
-		perror("read");
-		exit(errno);
-	}
-	irqcount++;
-	fprintf(stderr, " okay. Alarm rang.\n");
-
-	/* Disable alarm interrupts */
-	retval = ioctl(fd, RTC_AIE_OFF, 0);
-	if (retval == -1) {
-		perror("RTC_AIE_OFF ioctl");
-		exit(errno);
-	}
-
-test_PIE:
-	/* Read periodic IRQ rate */
-	retval = ioctl(fd, RTC_IRQP_READ, &tmp);
-	if (retval == -1) {
-		/* not all RTCs support periodic IRQs */
-		if (errno == ENOTTY) {
-			fprintf(stderr, "\nNo periodic IRQ support\n");
-			goto done;
-		}
-		perror("RTC_IRQP_READ ioctl");
-		exit(errno);
-	}
-	fprintf(stderr, "\nPeriodic IRQ rate is %ldHz.\n", tmp);
-
-	fprintf(stderr, "Counting 20 interrupts at:");
-	fflush(stderr);
-
-	/* The frequencies 128Hz, 256Hz, ... 8192Hz are only allowed for root. */
-	for (tmp=2; tmp<=maxfreq; tmp*=2) {
-
-		retval = ioctl(fd, RTC_IRQP_SET, tmp);
-		if (retval == -1) {
-			/* not all RTCs can change their periodic IRQ rate */
-			if (errno == ENOTTY) {
-				fprintf(stderr,
-					"\n...Periodic IRQ rate is fixed\n");
-				goto done;
-			}
-			perror("RTC_IRQP_SET ioctl");
-			exit(errno);
-		}
-
-		fprintf(stderr, "\n%ldHz:\t", tmp);
-		fflush(stderr);
-
-		/* Enable periodic interrupts */
-		retval = ioctl(fd, RTC_PIE_ON, 0);
-		if (retval == -1) {
-			perror("RTC_PIE_ON ioctl");
-			exit(errno);
-		}
-
-		for (i=1; i<21; i++) {
-			/* This blocks */
-			retval = read(fd, &data, sizeof(unsigned long));
-			if (retval == -1) {
-				perror("read");
-				exit(errno);
-			}
-			fprintf(stderr, " %d",i);
-			fflush(stderr);
-			irqcount++;
-		}
-
-		/* Disable periodic interrupts */
-		retval = ioctl(fd, RTC_PIE_OFF, 0);
-		if (retval == -1) {
-			perror("RTC_PIE_OFF ioctl");
-			exit(errno);
-		}
-	}
-
-done:
-	fprintf(stderr, "\n\n\t\t\t *** Test complete ***\n");
-
-	close(fd);
-
-	return 0;
-}
diff --git a/client/tests/rttester/control b/client/tests/rttester/control
deleted file mode 100644
index 6529e33..0000000
--- a/client/tests/rttester/control
+++ /dev/null
@@ -1,11 +0,0 @@
-AUTHOR = "Michal Piotrowski <michal.k.k.piotrowski@gmail.com>"
-
-NAME = "Real Time Test Cases"
-
-TIME = "MEDIUM"
-TEST_CLASS = "Kernel"
-TEST_CATEGORY = "Functional"
-TEST_TYPE = "Client"
-
-DOC = "Runs some real time tests from rttester"
-job.run_test('rttester')
diff --git a/client/tests/rttester/rttester.py b/client/tests/rttester/rttester.py
deleted file mode 100644
index ca815ea..0000000
--- a/client/tests/rttester/rttester.py
+++ /dev/null
@@ -1,16 +0,0 @@
-import os
-from autotest_lib.client.bin import test, utils, os_dep
-
-
-class rttester(test.test):
-    version = 1
-
-    # http://www.stardust.webpages.pl/files/patches/autotest/rttester.tar.bz2
-
-    def setup(self, tarball = 'rttester.tar.bz2'):
-        tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(tarball, self.srcdir)
-
-    def execute(self):
-        os.chdir(self.srcdir)
-        utils.system(self.srcdir + '/check-all.sh')
diff --git a/client/tests/rttester/rttester.tar.bz2 b/client/tests/rttester/rttester.tar.bz2
deleted file mode 100644
index 7c97fba..0000000
--- a/client/tests/rttester/rttester.tar.bz2
+++ /dev/null
Binary files differ
diff --git a/client/tests/selftest/control b/client/tests/selftest/control
deleted file mode 100644
index f066f74..0000000
--- a/client/tests/selftest/control
+++ /dev/null
@@ -1,10 +0,0 @@
-AUTHOR = "Andy Whitcroft <apw@shadowen.org>"
-TIME = "MEDIUM"
-NAME = "Self Test"
-TEST_TYPE = "client"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "Software"
-DOC = """\
-Run a selftest on the autotest installed branch
-"""
-job.run_test('selftest', cmd=1)
diff --git a/client/tests/selftest/selftest.py b/client/tests/selftest/selftest.py
deleted file mode 100644
index 7770181..0000000
--- a/client/tests/selftest/selftest.py
+++ /dev/null
@@ -1,52 +0,0 @@
-import os, sys, logging
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import error
-
-
-class selftest(test.test):
-    version = 1
-
-    def setup(self):
-        name = self.job.resultdir + '/sequence'
-        if (not os.path.exists(name)):
-            fd = file(name, 'w')
-            fd.write('0')
-            fd.close()
-
-    def __mark(self, checkpoint):
-        name = self.job.resultdir + '/sequence'
-        fd = file(name, 'r')
-        current = int(fd.readline())
-        fd.close()
-
-        current += 1
-        fd = file(name + '.new', 'w')
-        fd.write('%d' % current)
-        fd.close()
-
-        os.rename(name + '.new', name)
-
-        logging.debug("checkpoint %d %d", current, checkpoint)
-
-        if (current != checkpoint):
-            raise error.JobError("selftest: sequence was " +
-                    "%d when %d expected" % (current, checkpoint))
-
-    def __throw(self):
-        __does_not_exist = __does_not_exist_either
-
-    def __print(self, msg):
-        sys.stdout.write(msg)
-
-    def __warn(self, msg):
-        sys.stderr.write(msg)
-
-    def execute(self, cmd, *args):
-        if cmd == 'mark':
-            self.__mark(*args)
-        elif cmd == 'throw':
-            self.__throw(*args)
-        elif cmd == 'print':
-            self.__print(*args)
-        elif cmd == 'warn':
-            self.__warn(*args)
diff --git a/client/tests/signaltest/Makefile b/client/tests/signaltest/Makefile
deleted file mode 100644
index 6431b25..0000000
--- a/client/tests/signaltest/Makefile
+++ /dev/null
@@ -1,12 +0,0 @@
-
-CC ?= $(CROSS_COMPILE)gcc
-TARGET=signaltest
-FLAGS= -Wall -O2 
-LIBS = -lpthread -lrt
-
-all: signaltest.c
-	$(CC) $(FLAGS) $^ -o $(TARGET) $(LIBS)
-
-clean:
-	rm -f $(TARGET) *.o .depend *.*~
-
diff --git a/client/tests/signaltest/control b/client/tests/signaltest/control
deleted file mode 100644
index 13ebb4d..0000000
--- a/client/tests/signaltest/control
+++ /dev/null
@@ -1,10 +0,0 @@
-NAME='Signal Test'
-AUTHOR='Michal Piotrowski <michal.k.k.piotrowski@gmail.com>'
-TIME='SHORT'
-TEST_TYPE='client'
-TEST_CLASS='Kernel'
-TEST_CATEGORY='Functional'
-DOC='''\
-Test signal passing to processes
-'''
-job.run_test('signaltest')
diff --git a/client/tests/signaltest/signaltest.py b/client/tests/signaltest/signaltest.py
deleted file mode 100644
index d8d047a..0000000
--- a/client/tests/signaltest/signaltest.py
+++ /dev/null
@@ -1,21 +0,0 @@
-import os
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import utils
-
-
-class signaltest(test.test):
-    version = 1
-    preserve_srcdir = True
-
-    def initialize(self):
-        self.job.require_gcc()
-
-
-    # git://git.kernel.org/pub/scm/linux/kernel/git/tglx/rt-tests.git
-    def setup(self):
-        os.chdir(self.srcdir)
-        utils.make()
-
-
-    def execute(self, args = '-t 10 -l 100000'):
-        utils.system(self.srcdir + '/signaltest ' + args)
diff --git a/client/tests/signaltest/src/Makefile b/client/tests/signaltest/src/Makefile
deleted file mode 100644
index da81b33..0000000
--- a/client/tests/signaltest/src/Makefile
+++ /dev/null
@@ -1,10 +0,0 @@
-CC ?= $(CROSS_COMPILE)gcc
-TARGET = signaltest
-FLAGS = -Wall -O2
-LIBS = -lpthread -lrt
-
-all: signaltest.c
-	$(CC) $(FLAGS) $^ -o $(TARGET) $(LIBS)
-
-clean:
-	rm -f $(TARGET) *.o .depend *.*~
diff --git a/client/tests/signaltest/src/signaltest.c b/client/tests/signaltest/src/signaltest.c
deleted file mode 100644
index 87dc50a..0000000
--- a/client/tests/signaltest/src/signaltest.c
+++ /dev/null
@@ -1,429 +0,0 @@
-/*
- * RT signal roundtrip test software
- *
- * (C) 2007 Thomas Gleixner <tglx@linutronix.de>
- *
- * This program is free software; you can redistribute it and/or
- * modify it under the terms of the GNU General Public License Veriosn
- * 2 as published by the Free Software Foundation;
- *
- */
-
-#define VERSION_STRING "V 0.3"
-
-#include <fcntl.h>
-#include <getopt.h>
-#include <pthread.h>
-#include <signal.h>
-#include <stdlib.h>
-#include <stdio.h>
-#include <string.h>
-#include <time.h>
-#include <unistd.h>
-
-#include <linux/unistd.h>
-
-#include <sys/prctl.h>
-#include <sys/stat.h>
-#include <sys/types.h>
-#include <sys/time.h>
-
-#define ARRAY_SIZE(x) (sizeof(x) / sizeof((x)[0]))
-
-/* Ugly, but .... */
-#define gettid() syscall(__NR_gettid)
-
-#define USEC_PER_SEC		1000000
-#define NSEC_PER_SEC		1000000000
-
-/* Must be power of 2 ! */
-#define VALBUF_SIZE		16384
-
-/* Struct to transfer parameters to the thread */
-struct thread_param {
-	int id;
-	int prio;
-	int signal;
-	unsigned long max_cycles;
-	struct thread_stat *stats;
-	int bufmsk;
-};
-
-/* Struct for statistics */
-struct thread_stat {
-	unsigned long cycles;
-	unsigned long cyclesread;
-	long min;
-	long max;
-	long act;
-	double avg;
-	long *values;
-	pthread_t thread;
-	pthread_t tothread;
-	int threadstarted;
-	int tid;
-};
-
-static int shutdown;
-static int tracelimit = 0;
-static int ftrace = 0;
-static int oldtrace = 0;
-
-static inline void tsnorm(struct timespec *ts)
-{
-	while (ts->tv_nsec >= NSEC_PER_SEC) {
-		ts->tv_nsec -= NSEC_PER_SEC;
-		ts->tv_sec++;
-	}
-}
-
-static inline long calcdiff(struct timespec t1, struct timespec t2)
-{
-	long diff;
-	diff = USEC_PER_SEC * ((int) t1.tv_sec - (int) t2.tv_sec);
-	diff += ((int) t1.tv_nsec - (int) t2.tv_nsec) / 1000;
-	return diff;
-}
-
-/*
- * signal thread
- *
- */
-void *signalthread(void *param)
-{
-	struct thread_param *par = param;
-	struct sched_param schedp;
-	sigset_t sigset;
-	struct timespec before, after;
-	struct thread_stat *stat = par->stats;
-	int policy = par->prio ? SCHED_FIFO : SCHED_OTHER;
-	int stopped = 0;
-	int first = 1;
-
-	if (tracelimit) {
-		system("echo 1 > /proc/sys/kernel/trace_all_cpus");
-		system("echo 1 > /proc/sys/kernel/trace_freerunning");
-		system("echo 0 > /proc/sys/kernel/trace_print_at_crash");
-		system("echo 1 > /proc/sys/kernel/trace_user_triggered");
-		system("echo -1 > /proc/sys/kernel/trace_user_trigger_irq");
-		system("echo 0 > /proc/sys/kernel/trace_verbose");
-		system("echo 0 > /proc/sys/kernel/preempt_thresh");
-		system("echo 0 > /proc/sys/kernel/wakeup_timing");
-		system("echo 0 > /proc/sys/kernel/preempt_max_latency");
-		if (ftrace)
-			system("echo 1 > /proc/sys/kernel/mcount_enabled");
-
-		system("echo 1 > /proc/sys/kernel/trace_enabled");
-	}
-
-	stat->tid = gettid();
-
-	sigemptyset(&sigset);
-	sigaddset(&sigset, par->signal);
-	sigprocmask(SIG_BLOCK, &sigset, NULL);
-
-	memset(&schedp, 0, sizeof(schedp));
-	schedp.sched_priority = par->prio;
-	sched_setscheduler(0, policy, &schedp);
-
-	stat->threadstarted++;
-
-	if (tracelimit) {
-		if (oldtrace)
-			gettimeofday(0,(struct timezone *)1);
-		else
-			prctl(0, 1);
-	}
-
-	clock_gettime(CLOCK_MONOTONIC, &before);
-
-	while (!shutdown) {
-		struct timespec now;
-		long diff;
-		int sigs;
-
-		if (sigwait(&sigset, &sigs) < 0)
-			goto out;
-
-		clock_gettime(CLOCK_MONOTONIC, &after);
-
-		/*
-		 * If it is the first thread, sleep after every 16
-		 * round trips.
-		 */
-		if (!par->id && !(stat->cycles & 0x0F))
-			usleep(10000);
-
-		/* Get current time */
-		clock_gettime(CLOCK_MONOTONIC, &now);
-		pthread_kill(stat->tothread, SIGUSR1);
-
-		/* Skip the first cycle */
-		if (first) {
-			first = 0;
-			before = now;
-			continue;
-		}
-
-		diff = calcdiff(after, before);
-		before = now;
-		if (diff < stat->min)
-			stat->min = diff;
-		if (diff > stat->max)
-			stat->max = diff;
-		stat->avg += (double) diff;
-
-		if (!stopped && tracelimit && (diff > tracelimit)) {
-			stopped++;
-			if (oldtrace)
-				gettimeofday(0,0);
-			else
-				prctl(0, 0);
-			shutdown++;
-		}
-		stat->act = diff;
-		stat->cycles++;
-
-		if (par->bufmsk)
-			stat->values[stat->cycles & par->bufmsk] = diff;
-
-		if (par->max_cycles && par->max_cycles == stat->cycles)
-			break;
-	}
-
-out:
-	/* switch to normal */
-	schedp.sched_priority = 0;
-	sched_setscheduler(0, SCHED_OTHER, &schedp);
-
-	stat->threadstarted = -1;
-
-	return NULL;
-}
-
-
-/* Print usage information */
-static void display_help(void)
-{
-	printf("signaltest %s\n", VERSION_STRING);
-	printf("Usage:\n"
-	       "signaltest <options>\n\n"
-	       "-b USEC  --breaktrace=USEC send break trace command when latency > USEC\n"
-	       "-f                         function trace (when -b is active)\n"
-	       "-l LOOPS --loops=LOOPS     number of loops: default=0(endless)\n"
-	       "-p PRIO  --prio=PRIO       priority of highest prio thread\n"
-	       "-q       --quiet           print only a summary on exit\n"
-	       "-t NUM   --threads=NUM     number of threads: default=2\n"
-	       "-v       --verbose         output values on stdout for statistics\n"
-	       "                           format: n:c:v n=tasknum c=count v=value in us\n");
-	exit(0);
-}
-
-static int priority;
-static int num_threads = 2;
-static int max_cycles;
-static int verbose;
-static int quiet;
-
-/* Process commandline options */
-static void process_options (int argc, char *argv[])
-{
-	int error = 0;
-	for (;;) {
-		int option_index = 0;
-		/** Options for getopt */
-		static struct option long_options[] = {
-			{"breaktrace", required_argument, NULL, 'b'},
-			{"ftrace", no_argument, NULL, 'f'},
-			{"loops", required_argument, NULL, 'l'},
-			{"priority", required_argument, NULL, 'p'},
-			{"quiet", no_argument, NULL, 'q'},
-			{"threads", required_argument, NULL, 't'},
-			{"verbose", no_argument, NULL, 'v'},
-			{"help", no_argument, NULL, '?'},
-			{NULL, 0, NULL, 0}
-		};
-		int c = getopt_long (argc, argv, "b:fl:p:qt:v",
-			long_options, &option_index);
-		if (c == -1)
-			break;
-		switch (c) {
-		case 'b': tracelimit = atoi(optarg); break;
-		case 'l': max_cycles = atoi(optarg); break;
-		case 'p': priority = atoi(optarg); break;
-		case 'q': quiet = 1; break;
-		case 't': num_threads = atoi(optarg); break;
-		case 'v': verbose = 1; break;
-		case '?': error = 1; break;
-		}
-	}
-
-	if (priority < 0 || priority > 99)
-		error = 1;
-
-	if (num_threads < 2)
-		error = 1;
-
-	if (error)
-		display_help ();
-}
-
-static void check_kernel(void)
-{
-	size_t len;
-	char ver[256];
-	int fd, maj, min, sub;
-
-	fd = open("/proc/version", O_RDONLY, 0666);
-	len = read(fd, ver, 255);
-	close(fd);
-	ver[len-1] = 0x0;
-	sscanf(ver, "Linux version %d.%d.%d", &maj, &min, &sub);
-	if (maj == 2 && min == 6 && sub < 18)
-		oldtrace = 1;
-}
-
-static void sighand(int sig)
-{
-	shutdown = 1;
-}
-
-static void print_stat(struct thread_param *par, int index, int verbose)
-{
-	struct thread_stat *stat = par->stats;
-
-	if (!verbose) {
-		if (quiet != 1) {
-			printf("T:%2d (%5d) P:%2d C:%7lu "
-			       "Min:%7ld Act:%5ld Avg:%5ld Max:%8ld\n",
-			       index, stat->tid, par->prio,
-			       stat->cycles, stat->min, stat->act,
-			       stat->cycles ?
-			       (long)(stat->avg/stat->cycles) : 0, stat->max);
-		}
-	} else {
-		while (stat->cycles != stat->cyclesread) {
-			long diff = stat->values[stat->cyclesread & par->bufmsk];
-			printf("%8d:%8lu:%8ld\n", index, stat->cyclesread, diff);
-			stat->cyclesread++;
-		}
-	}
-}
-
-int main(int argc, char **argv)
-{
-	sigset_t sigset;
-	int signum = SIGUSR1;
-	struct thread_param *par;
-	struct thread_stat *stat;
-	int i, ret = -1;
-
-	if (geteuid()) {
-		printf("need to run as root!\n");
-		exit(-1);
-	}
-
-	process_options(argc, argv);
-
-	check_kernel();
-
-	sigemptyset(&sigset);
-	sigaddset(&sigset, signum);
-	sigprocmask (SIG_BLOCK, &sigset, NULL);
-
-	signal(SIGINT, sighand);
-	signal(SIGTERM, sighand);
-
-	par = calloc(num_threads, sizeof(struct thread_param));
-	if (!par)
-		goto out;
-	stat = calloc(num_threads, sizeof(struct thread_stat));
-	if (!stat)
-		goto outpar;
-
-	for (i = 0; i < num_threads; i++) {
-		if (verbose) {
-			stat[i].values = calloc(VALBUF_SIZE, sizeof(long));
-			if (!stat[i].values)
-				goto outall;
-			par[i].bufmsk = VALBUF_SIZE - 1;
-		}
-
-		par[i].id = i;
-		par[i].prio = priority;
-#if 0
-		if (priority)
-			priority--;
-#endif
-		par[i].signal = signum;
-		par[i].max_cycles = max_cycles;
-		par[i].stats = &stat[i];
-		stat[i].min = 1000000;
-		stat[i].max = -1000000;
-		stat[i].avg = 0.0;
-		stat[i].threadstarted = 1;
-		pthread_create(&stat[i].thread, NULL, signalthread, &par[i]);
-	}
-
-	while (!shutdown) {
-		int allstarted = 1;
-
-		for (i = 0; i < num_threads; i++) {
-			if (stat[i].threadstarted != 2)
-				allstarted = 0;
-		}
-		if (!allstarted)
-			continue;
-
-		for (i = 0; i < num_threads - 1; i++)
-			stat[i].tothread = stat[i+1].thread;
-		stat[i].tothread = stat[0].thread;
-		break;
-	}
-	pthread_kill(stat[0].thread, signum);
-
-	while (!shutdown) {
-		char lavg[256];
-		int fd, len, allstopped = 0;
-
-		if (!verbose && !quiet) {
-			fd = open("/proc/loadavg", O_RDONLY, 0666);
-			len = read(fd, &lavg, 255);
-			close(fd);
-			lavg[len-1] = 0x0;
-			printf("%s          \n\n", lavg);
-		}
-
-		print_stat(&par[0], 0, verbose);
-		if(max_cycles && stat[0].cycles >= max_cycles)
-			allstopped++;
-
-		usleep(10000);
-		if (shutdown || allstopped)
-			break;
-		if (!verbose && !quiet)
-			printf("\033[%dA", 3);
-	}
-	ret = 0;
- outall:
-	shutdown = 1;
-	usleep(50000);
-	if (quiet)
-		quiet = 2;
-	for (i = 0; i < num_threads; i++) {
-		if (stat[i].threadstarted > 0)
-			pthread_kill(stat[i].thread, SIGTERM);
-		if (stat[i].threadstarted) {
-			pthread_join(stat[i].thread, NULL);
-			if (quiet)
-				print_stat(&par[i], i, 0);
-		}
-		if (stat[i].values)
-			free(stat[i].values);
-	}
-	free(stat);
- outpar:
-	free(par);
- out:
-	exit(ret);
-}
diff --git a/client/tests/spew/control b/client/tests/spew/control
deleted file mode 100644
index f7452e2..0000000
--- a/client/tests/spew/control
+++ /dev/null
@@ -1,15 +0,0 @@
-NAME='Spew'
-AUTHOR='Martin Bligh <mbligh@google.com>'
-TEST_TYPE='client'
-TEST_CATEGORY='Benchmark'
-TEST_CLASS='Hardware'
-TIME='MEDIUM'
-DOC='''\
-The spew package is used to test I/O performance and to generate load
-on character devices, block devices, and file systems.  It is similar
-to the lmdd program found in the lmbench test-suite
-(http://www.bitmover.com/lmbench). It is a bit easier to use than lmdd
-and has some added functionality.  For example, it can test both
-random and sequential I/O.
-'''
-job.run_test('spew')
diff --git a/client/tests/spew/spew-1.0.5.tgz b/client/tests/spew/spew-1.0.5.tgz
deleted file mode 100644
index c226af0..0000000
--- a/client/tests/spew/spew-1.0.5.tgz
+++ /dev/null
Binary files differ
diff --git a/client/tests/spew/spew.py b/client/tests/spew/spew.py
deleted file mode 100644
index 0c04bb0..0000000
--- a/client/tests/spew/spew.py
+++ /dev/null
@@ -1,38 +0,0 @@
-import os
-from autotest_lib.client.bin import test, utils
-
-
-class spew(test.test):
-    version = 1
-
-    def initialize(self):
-        self.job.require_gcc()
-
-
-    # ftp://ftp.berlios.de/pub/spew/1.0.5/spew-1.0.5.tgz
-    def setup(self, tarball = 'spew-1.0.5.tgz'):
-        self.tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(self.tarball, self.srcdir)
-
-        os.chdir(self.srcdir)
-        utils.configure()
-        utils.make()
-
-
-    def run_once(self, testdir = None, filesize='100M', type='write',
-                 pattern='random'):
-        cmd = os.path.join(self.srcdir, 'src/spew')
-        if not testdir:
-            testdir = self.tmpdir
-        tmpfile = os.path.join(testdir, 'spew-test.%d' % os.getpid())
-        results = os.path.join(self.resultsdir, 'stdout.%d' % self.iteration)
-        args = '--%s -p %s -b 2k -B 2M %s %s' % \
-                        (type, pattern, filesize, tmpfile)
-        cmd += ' ' + args
-
-        open(self.resultsdir + '/command', 'w').write(cmd + '\n')
-        self.job.logging.redirect(results)
-        try:
-            utils.system(cmd)
-        finally:
-            self.job.logging.restore()
diff --git a/client/tests/stress/control b/client/tests/stress/control
deleted file mode 100644
index d8bbea6..0000000
--- a/client/tests/stress/control
+++ /dev/null
@@ -1,41 +0,0 @@
-NAME='Stress'
-AUTHOR='Yi Yang <yang.y.yi@gmail.com>'
-TEST_TYPE='client'
-TIME='MEDIUM'
-TEST_CATEGORY='Functional'
-TEST_CLASS='Software'
-DOC='''\
-stress is not a benchmark, but is rather a tool designed to put given subsytems
-under a specified load. Instances in which this is useful include those in
-which a system administrator wishes to perform tuning activities, a kernel or
-libc programmer wishes to evaluate denial of service possibilities, etc.
-
-Stress command line options:
-
-     -?, --help         show this help statement
-         --version      show version statement
-     -v, --verbose      be verbose
-     -q, --quiet        be quiet
-     -n, --dry-run      show what would have been done
-     -t, --timeout N    timeout after N seconds
-         --backoff N    wait factor of N microseconds before work starts
-     -c, --cpu N        spawn N workers spinning on sqrt()
-     -i, --io N         spawn N workers spinning on sync()
-     -m, --vm N         spawn N workers spinning on malloc()/free()
-         --vm-bytes B   malloc B bytes per vm worker (default is 256MB)
-         --vm-stride B  touch a byte every B bytes (default is 4096)
-         --vm-hang N    sleep N secs before free (default is none, 0 is inf)
-         --vm-keep      redirty memory instead of freeing and reallocating
-     -d, --hdd N        spawn N workers spinning on write()/unlink()
-         --hdd-bytes B  write B bytes per hdd worker (default is 1GB)
-         --hdd-noclean  do not unlink files created by hdd workers
-    Example: %s --cpu 8 --io 4 --vm 2 --vm-bytes 128M --timeout 10s
-    Note: Numbers may be suffixed with s,m,h,d,y (time) or B,K,M,G (size).
-
-Autotest module options:
-    args = Arguments passed to the stress test. If omitted, an heuristic
-           will be used to calculate sensible defaults
-    stress_length = Time length on which stress will run, in seconds.
-                    By default is 60s.
-'''
-job.run_test('stress')
diff --git a/client/tests/stress/stress-1.0.4.tar.gz b/client/tests/stress/stress-1.0.4.tar.gz
deleted file mode 100644
index 900286e..0000000
--- a/client/tests/stress/stress-1.0.4.tar.gz
+++ /dev/null
Binary files differ
diff --git a/client/tests/stress/stress.py b/client/tests/stress/stress.py
deleted file mode 100644
index 9d17a36..0000000
--- a/client/tests/stress/stress.py
+++ /dev/null
@@ -1,66 +0,0 @@
-import os
-from autotest_lib.client.bin import test, utils
-
-
-class stress(test.test):
-    """
-    Calls stress, a simple program which aims to impose certain types of
-    computing stress on the target machine.
-    @author: Yi Yang (yang.y.yi@gmail.com)
-
-    In order to verify at a glance the options supported by the program stress,
-    check out the options summary located at the stress example control file.
-    """
-    version = 2
-
-    def initialize(self):
-        self.job.require_gcc()
-
-
-    # http://weather.ou.edu/~apw/projects/stress/stress-1.0.4.tar.gz
-    def setup(self, tarball = 'stress-1.0.4.tar.gz'):
-        tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(tarball, self.srcdir)
-        os.chdir(self.srcdir)
-
-        utils.configure()
-        utils.make()
-
-
-    def run_once(self, args = '', stress_length=60):
-        if not args:
-            # We will use 2 workers of each type for each CPU detected
-            threads = 2 * utils.count_cpus()
-
-            # Sometimes the default memory used by each memory worker (256 M)
-            # might make our machine go OOM and then funny things might start to
-            # happen. Let's avoid that.
-            mb = utils.freememtotal() + utils.read_from_meminfo('SwapFree') / 2
-            memory_per_thread = (mb * 1024) / threads
-
-            # Even though unlikely, it's good to prevent from allocating more
-            # disk than this machine actually has on its autotest directory
-            # (limit the amount of disk used to max of 90 % of free space)
-            free_disk = utils.freespace(self.srcdir)
-            file_size_per_thread = 1024 ** 2
-            if (0.9 * free_disk) < file_size_per_thread * threads:
-                file_size_per_thread = (0.9 * free_disk) / threads
-
-            # Number of CPU workers spinning on sqrt()
-            args = '--cpu %d ' % threads
-            # Number of IO workers spinning on sync()
-            args += '--io %d ' % threads
-            # Number of Memory workers spinning on malloc()/free()
-            args += '--vm %d ' % threads
-            # Amount of memory used per each worker
-            args += '--vm-bytes %d ' % memory_per_thread
-            # Number of HD workers spinning on write()/ulink()
-            args += '--hdd %d ' % threads
-            # Size of the files created by each worker in bytes
-            args += '--hdd-bytes %d ' % file_size_per_thread
-            # Time for which the stress test will run
-            args += '--timeout %d ' % stress_length
-            # Verbose flag
-            args += '--verbose'
-
-        utils.system(self.srcdir + '/src/stress ' + args)
diff --git a/client/tests/synctest/control b/client/tests/synctest/control
deleted file mode 100644
index 4d8742f..0000000
--- a/client/tests/synctest/control
+++ /dev/null
@@ -1,15 +0,0 @@
-NAME='Sync Test'
-AUTHOR='Amrita Nayal <amritan@google.com>'
-TIME='SHORT'
-TEST_TYPE='client'
-TEST_CLASS='Kernel'
-TEST_CATEGORY='Functional'
-DOC='''\
-Test interrupting sync system call.
-Child process creates enough dirty data and issues fsync.
-In the meanwhile parent process issues kill.
-On success, child is killed immediately while data sync is on.
-IPC occurs through semaphore and shared memory.
-
-'''
-job.run_test('synctest' ,len='100', loop='10')
diff --git a/client/tests/synctest/src/Makefile b/client/tests/synctest/src/Makefile
deleted file mode 100644
index d3d2544..0000000
--- a/client/tests/synctest/src/Makefile
+++ /dev/null
@@ -1,10 +0,0 @@
-
-TARGET=synctest
-FLAGS= -Wall -O2
-
-all: synctest.c
-	$(CROSS_COMPILE)gcc $(FLAGS) $^ -o $(TARGET)
-
-clean:
-	rm -f $(TARGET) *.o .depend *.*~
-
diff --git a/client/tests/synctest/src/synctest.c b/client/tests/synctest/src/synctest.c
deleted file mode 100644
index 973ab67..0000000
--- a/client/tests/synctest/src/synctest.c
+++ /dev/null
@@ -1,158 +0,0 @@
-#include <stdlib.h>
-#include <stdio.h>
-#include <sys/types.h>
-#include <sys/ipc.h>
-#include <sys/sem.h>
-#include <signal.h>
-#include <assert.h>
-#include <fcntl.h>
-#include <string.h>
-#include <unistd.h>
-#include <wait.h>
-#include <sys/shm.h>
-
-/*
- * Creates dirty data and issue sync at the end.
- * Child creates enough dirty data, issues fsync. Parent synchronizes with
- * child and soon as fsync is issued, dispatches KILL.
- * If KILL was unsuccessful, a flag in shared memory is set.
- * Parent verifies this flag to ensure test result. 
- */
-
-union semun {
-	int val;
-	struct semid_ds *buf;
-	unsigned short  *array;
-	struct seminfo  *__buf;
-};
-
-int main(int argc, char ** argv)
-{
-	int shm_id;
-	char* shm_addr, *data_array;
-	struct shmid_ds shm_desc;
-	union semun data;
-	struct sembuf op;
-	int sem_id;
-
-	int status, pid, fd, len, loop;
-	int count = 0, ret = 1, data_size;
-	int *post_sync;
-
-	if (argc != 3) {
-		printf("Usage : synctest <len> <loop> \n");
-		exit(1);
-	}
-	
-	len = atoi(argv[1]);
-	loop = atoi(argv[2]);
-	
-	data_size = len * 1024 * 1024;
-
-	/* allocate a shared memory segment with size of 10 bytes. */
-	shm_id = shmget(IPC_PRIVATE, 10, IPC_CREAT | IPC_EXCL | 0600);
-	if (shm_id == -1) {
-		perror("main : shmget \n");
-		exit(1);
-	}
-
-	/* attach the shared memory segment to our process's address space. */
-	shm_addr = shmat(shm_id, NULL, 0);
-	if (!shm_addr) { /* operation failed. */
-		perror("main : shmat \n");
-		goto early_out;
-	}
-
-	post_sync = (int*) shm_addr;
-	*post_sync = 0;
-
-	fd = open("testfile", O_RDWR|O_CREAT|O_APPEND|O_NONBLOCK);
-	if (!fd) {
-		perror("main : Failed to create data file \n");
-		goto out;
-	}
-	
-	data_array = (char *)malloc(data_size * sizeof(char));
-	if (!data_array) {
-		perror("main : Not enough memory \n");
-		goto out;
-	}
-	
-	op.sem_num = 0;
-	sem_id = semget(IPC_PRIVATE, 1, IPC_CREAT);
-
-	if (sem_id < 0){
-		perror("main : semget \n");
-		goto out;
-	}
-
-	data.val = 0;
-	semctl(sem_id, 0, SETVAL, data);
-
-	pid = fork();
-	if (pid < 0)
-	{
-		perror("main : fork failed \n");
-		goto out;
-	}
-	if (!pid)
-	{
-		/* child process */
-		while (count++ < loop) {
-			write(fd, data_array, data_size * (sizeof(char)));
-		}
-
-		printf("CHLD : start sync \n");
-		/* increment sema */
-		op.sem_op = 1;
-		semop(sem_id, &op, 1);
-
-		/* wait for parent */
-		op.sem_op = 0;
-		semop(sem_id, &op, 1);
-		fsync(fd);
-		*post_sync = 1;
-		return 0 ;
-	} else {
-		/* parent process */
-		/* waiting for child to increment sema */
-		op.sem_op = -1;
-		semop(sem_id, &op, 1);
-		/* some sleep so fsync gets started before we kill*/
-		sleep(1);
-		
-		ret = kill(pid, SIGKILL);
-		if (ret) {
-			perror("main : kill failed \n");
-			goto out;
-		}
-		
-		printf("PAR : waiting\n");
-		wait(&status);
-	}
-
-	ret = *post_sync;
-
-	if (!ret)
-		printf("PASS : sync interrupted \n");
-	else
-		printf("FAIL : sync not interrupted \n");
-
-out:
-	/* detach the shared memory segment from our process's address space. */
-	if (shmdt(shm_addr) == -1) {
-		perror("main : shmdt");
-	}
-
-	close(fd);
-	system("rm -f testfile \n");
-
-early_out:
-
-	/* de-allocate the shared memory segment. */
-	if (shmctl(shm_id, IPC_RMID, &shm_desc) == -1) {
-		perror("main : shmctl");
-	}
-
-	return ret;
-}
diff --git a/client/tests/synctest/synctest.py b/client/tests/synctest/synctest.py
deleted file mode 100644
index 37d5143..0000000
--- a/client/tests/synctest/synctest.py
+++ /dev/null
@@ -1,24 +0,0 @@
-import os
-from autotest_lib.client.bin import test
-from autotest_lib.client.common_lib import utils
-
-
-class synctest(test.test):
-    version = 1
-    preserve_srcdir = True
-
-    def initialize(self):
-        self.job.require_gcc()
-
-
-    def setup(self):
-        os.chdir(self.srcdir)
-        utils.make()
-
-
-    def run_once(self, len, loop, testdir=None):
-        args = len + ' ' + loop
-        output = os.path.join(self.srcdir, 'synctest ')
-        if testdir:
-            os.chdir(testdir)
-        utils.system(output + args)
diff --git a/client/tests/sysbench/control b/client/tests/sysbench/control
deleted file mode 100644
index 05eb3de..0000000
--- a/client/tests/sysbench/control
+++ /dev/null
@@ -1,25 +0,0 @@
-NAME = 'System Evaluation Benchmark'
-AUTHOR = 'Anton Blanchard <anton@samba.org>'
-TIME = 'MEDIUM'
-TEST_CLASS = 'IO'
-TEST_CATEGORY = 'Benchmark'
-TEST_TYPE = 'client'
-
-DOC = """
-The idea is to quickly get an impression about system performance for MySQL
-usage without setting up complex benchmark and even without installing MySQL.
-In some cases this is very helpful. This is also the reason for having
-everything in simple file not depending on any external libraries.
-"""
-
-build = 1
-for threads in range(1, count_cpus()+1):
-	job.run_test('sysbench', db_type='pgsql', build=build, \
-		num_threads=threads, read_only=1, tag='pgsql.' + str(threads))
-	build = 0
-
-build = 1
-for threads in range(1, count_cpus()+1):
-	job.run_test('sysbench', db_type='mysql', build=build, \
-		num_threads=threads, read_only=1, tag='mysql.' + str(threads))
-	build = 0
diff --git a/client/tests/sysbench/sysbench-0.4.8.tar.bz2 b/client/tests/sysbench/sysbench-0.4.8.tar.bz2
deleted file mode 100644
index 6dd8410..0000000
--- a/client/tests/sysbench/sysbench-0.4.8.tar.bz2
+++ /dev/null
Binary files differ
diff --git a/client/tests/sysbench/sysbench.py b/client/tests/sysbench/sysbench.py
deleted file mode 100644
index 8a5400c..0000000
--- a/client/tests/sysbench/sysbench.py
+++ /dev/null
@@ -1,167 +0,0 @@
-import os, time, re, pwd
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-
-class sysbench(test.test):
-    version = 1
-
-    def initialize(self):
-        self.job.require_gcc()
-        self.results = []
-
-    # http://osdn.dl.sourceforge.net/sourceforge/sysbench/sysbench-0.4.8.tar.gz
-    def setup(self, tarball = 'sysbench-0.4.8.tar.bz2'):
-        tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(tarball, self.srcdir)
-        self.job.setup_dep(['pgsql', 'mysql'])
-
-        os.chdir(self.srcdir)
-
-        pgsql_dir = os.path.join(self.autodir, 'deps/pgsql/pgsql')
-        mysql_dir = os.path.join(self.autodir, 'deps/mysql/mysql')
-
-        # configure wants to get at pg_config, so add its path
-        utils.system(
-            'PATH=%s/bin:$PATH ./configure --with-mysql=%s --with-pgsql'
-            % (pgsql_dir, mysql_dir))
-        utils.make('-j %d' % utils.count_cpus())
-
-
-    def run_once(self, db_type = 'pgsql', build = 1, \
-                    num_threads = utils.count_cpus(), max_time = 60, \
-                    read_only = 0, args = ''):
-        plib = os.path.join(self.autodir, 'deps/pgsql/pgsql/lib')
-        mlib = os.path.join(self.autodir, 'deps/mysql/mysql/lib/mysql')
-        ld_path = utils.prepend_path(plib,
-            utils.environ('LD_LIBRARY_PATH'))
-        ld_path = utils.prepend_path(mlib, ld_path)
-        os.environ['LD_LIBRARY_PATH'] = ld_path
-
-        # The databases don't want to run as root so run them as nobody
-        self.dbuser = 'nobody'
-        self.dbuid = pwd.getpwnam(self.dbuser)[2]
-        self.sudo = 'sudo -u ' + self.dbuser + ' '
-
-        # Check for nobody user
-        try:
-            utils.system(self.sudo + '/bin/true')
-        except:
-            raise error.TestError('Unable to run as nobody')
-
-        if (db_type == 'pgsql'):
-            self.execute_pgsql(build, num_threads, max_time, read_only, args)
-        elif (db_type == 'mysql'):
-            self.execute_mysql(build, num_threads, max_time, read_only, args)
-
-
-    def execute_pgsql(self, build, num_threads, max_time, read_only, args):
-        bin = os.path.join(self.autodir, 'deps/pgsql/pgsql/bin')
-        data = os.path.join(self.autodir, 'deps/pgsql/pgsql/data')
-        log = os.path.join(self.debugdir, 'pgsql.log')
-
-        if build == 1:
-            utils.system('rm -rf ' + data)
-            os.mkdir(data)
-            os.chown(data, self.dbuid, 0)
-            utils.system(self.sudo + bin + '/initdb -D ' + data)
-
-        # Database must be able to write its output into debugdir
-        os.chown(self.debugdir, self.dbuid, 0)
-        utils.system(self.sudo + bin + '/pg_ctl -D %s -l %s start' %(data, log))
-
-        # Wait for database to start
-        time.sleep(5)
-
-        try:
-            base_cmd = self.srcdir + '/sysbench/sysbench --test=oltp ' \
-                       '--db-driver=pgsql --pgsql-user=' + self.dbuser
-
-            if build == 1:
-                utils.system(self.sudo + bin + '/createdb sbtest')
-                cmd = base_cmd +' prepare'
-                utils.system(cmd)
-
-            cmd = base_cmd + \
-                    ' --num-threads=' + str(num_threads) + \
-                    ' --max-time=' + str(max_time) + \
-                    ' --max-requests=0'
-
-            if read_only:
-                cmd = cmd + ' --oltp-read-only=on'
-
-            self.results.append(utils.system_output(cmd + ' run',
-                                                    retain_output=True))
-
-        except:
-            utils.system(self.sudo + bin + '/pg_ctl -D ' + data + ' stop')
-            raise
-
-        utils.system(self.sudo + bin + '/pg_ctl -D ' + data + ' stop')
-
-
-    def execute_mysql(self, build, num_threads, max_time, read_only, args):
-        bin = os.path.join(self.autodir, 'deps/mysql/mysql/bin')
-        data = os.path.join(self.autodir, 'deps/mysql/mysql/var')
-        log = os.path.join(self.debugdir, 'mysql.log')
-
-        if build == 1:
-            utils.system('rm -rf ' + data)
-            os.mkdir(data)
-            os.chown(data, self.dbuid, 0)
-            utils.system(bin + '/mysql_install_db --user=' + self.dbuser)
-
-        utils.system(bin + '/mysqld_safe --log-error=' + log + \
-                ' --user=' + self.dbuser + ' &')
-
-        # Wait for database to start
-        time.sleep(5)
-
-        try:
-            base_cmd = self.srcdir + '/sysbench/sysbench --test=oltp ' \
-                                     '--db-driver=mysql --mysql-user=root'
-
-            if build == 1:
-                utils.system('echo "create database sbtest" | ' + \
-                        bin + '/mysql -u root')
-                cmd = base_cmd +' prepare'
-                utils.system(cmd)
-
-            cmd = base_cmd + \
-                    ' --num-threads=' + str(num_threads) + \
-                    ' --max-time=' + str(max_time) + \
-                    ' --max-requests=0'
-
-            if read_only:
-                cmd = cmd + ' --oltp-read-only=on'
-
-            self.results.append(utils.system_output(cmd + ' run',
-                                                    retain_output=True))
-
-        except:
-            utils.system(bin + '/mysqladmin shutdown')
-            raise
-
-        utils.system(bin + '/mysqladmin shutdown')
-
-
-    def postprocess(self):
-        self.__format_results("\n".join(self.results))
-
-    def __format_results(self, results):
-        threads = 0
-        tps = 0
-
-        out = open(self.resultsdir + '/keyval', 'w')
-        for line in results.split('\n'):
-            threads_re = re.search('Number of threads: (\d+)', line)
-            if threads_re:
-                threads = threads_re.group(1)
-
-            tps_re = re.search('transactions:\s+\d+\s+\((\S+) per sec.\)', line)
-            if tps_re:
-                tps = tps_re.group(1)
-                break
-
-        out.write('threads=%s\ntps=%s' % (threads, tps))
-        out.close()
diff --git a/client/tests/tbench/control b/client/tests/tbench/control
deleted file mode 100644
index c2aa96b..0000000
--- a/client/tests/tbench/control
+++ /dev/null
@@ -1,18 +0,0 @@
-NAME = 'TBench'
-AUTHOR = 'mbligh@google.com (Martin Bligh)'
-TIME = 'MEDIUM'
-TEST_CLASS = 'IO'
-TEST_CATEGORY = 'Benchmark'
-TEST_TYPE = 'client'
-
-DOC = """
-tbench produces only the TCP and process load. It does the same socket
-calls that smbd would do under a netbench load. It does no filesystem
-calls. The idea behind tbench is to eliminate smbd from the netbench
-test, as though the smbd code could be made infinately fast. The
-throughput results of tbench tell us how fast a netbench run could go
-if we eliminated all filesystem IO and SMB packet processing.  tbench
-is built as part of the dbench package.
-"""
-
-job.run_test('tbench')
diff --git a/client/tests/tbench/dbench-3.04.tar.gz b/client/tests/tbench/dbench-3.04.tar.gz
deleted file mode 100644
index c0bb2e2..0000000
--- a/client/tests/tbench/dbench-3.04.tar.gz
+++ /dev/null
Binary files differ
diff --git a/client/tests/tbench/tbench.py b/client/tests/tbench/tbench.py
deleted file mode 100644
index 79466a3..0000000
--- a/client/tests/tbench/tbench.py
+++ /dev/null
@@ -1,47 +0,0 @@
-import time, os, signal, re
-from autotest_lib.client.bin import test, utils
-
-
-class tbench(test.test):
-    version = 2
-
-    def initialize(self):
-        self.job.require_gcc()
-
-
-    # http://samba.org/ftp/tridge/dbench/dbench-3.04.tar.gz
-    def setup(self, tarball = 'dbench-3.04.tar.gz'):
-        tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(tarball, self.srcdir)
-        os.chdir(self.srcdir)
-
-        utils.configure()
-        utils.make()
-
-
-    def run_once(self, nprocs = None, args = ''):
-        # only supports combined server+client model at the moment
-        # should support separate I suppose, but nobody uses it
-        if not nprocs:
-            nprocs = self.job.cpu_count()
-        args = args + ' %s' % nprocs
-
-        pid = os.fork()
-        if pid:                         # parent
-            time.sleep(1)
-            client = self.srcdir + '/client.txt'
-            args = '-c ' + client + ' ' + '%s' % args
-            cmd = os.path.join(self.srcdir, "tbench") + " " + args
-            # Standard output is verbose and merely makes our debug logs huge
-            # so we don't retain it.  It gets parsed for the results.
-            self.results = utils.run(cmd, stderr_tee=utils.TEE_TO_LOGS).stdout
-            os.kill(pid, signal.SIGTERM)    # clean up the server
-        else:                           # child
-            server = self.srcdir + '/tbench_srv'
-            os.execlp(server, server)
-
-
-    def postprocess_iteration(self):
-        pattern = re.compile(r"Throughput (.*?) MB/sec (.*?) procs")
-        (throughput, procs) = pattern.findall(self.results)[0]
-        self.write_perf_keyval({'throughput':throughput, 'procs':procs})
diff --git a/client/tests/tiobench/control b/client/tests/tiobench/control
deleted file mode 100644
index dd40a26..0000000
--- a/client/tests/tiobench/control
+++ /dev/null
@@ -1,15 +0,0 @@
-NAME = 'Threaded IO Bench'
-AUTHOR = 'walkinair@cn.ibm.com'
-TIME = 'MEDIUM'
-TEST_CLASS = 'IO'
-TEST_CATEGORY = 'Benchmark'
-TEST_TYPE = 'client'
-
-DOC = """
-Performs threaded I/O benchmarks.
-"""
-
-job.run_test('tiobench',
-             args='--block=4096 --block=8192 --threads=10 --size=1024',
-             iterations=2,
-             dir='/mnt')
diff --git a/client/tests/tiobench/makefile.patch b/client/tests/tiobench/makefile.patch
deleted file mode 100644
index 3957581..0000000
--- a/client/tests/tiobench/makefile.patch
+++ /dev/null
@@ -1,19 +0,0 @@
---- tiobench-0.3.3/Makefile.orig	2011-02-07 20:32:53.000000000 -0800
-+++ tiobench-0.3.3/Makefile	2011-02-07 20:33:02.000000000 -0800
-@@ -1,6 +1,6 @@
- # Makefile for tiotest
- 
--CC=gcc
-+CC?=gcc
- #CFLAGS=-O3 -fomit-frame-pointer -Wall
- CFLAGS=-O2 -Wall
- 
-@@ -14,7 +14,7 @@ CFLAGS=-O2 -Wall
- 
- #DEFINES=
- 
--LINK=gcc
-+LINK=$(CC)
- EXE=tiotest
- PROJECT=tiobench
- # do it once instead of each time referenced
diff --git a/client/tests/tiobench/tiobench-0.3.3.tar.bz2 b/client/tests/tiobench/tiobench-0.3.3.tar.bz2
deleted file mode 100644
index 39721ba..0000000
--- a/client/tests/tiobench/tiobench-0.3.3.tar.bz2
+++ /dev/null
Binary files differ
diff --git a/client/tests/tiobench/tiobench.py b/client/tests/tiobench/tiobench.py
deleted file mode 100644
index f249509..0000000
--- a/client/tests/tiobench/tiobench.py
+++ /dev/null
@@ -1,39 +0,0 @@
-import os, logging
-from autotest_lib.client.bin import test, utils
-
-
-class tiobench(test.test):
-    version = 1
-
-    # http://prdownloads.sourceforge.net/tiobench/tiobench-0.3.3.tar.gz
-    def setup(self, tarball = 'tiobench-0.3.3.tar.bz2'):
-        tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(tarball, self.srcdir)
-        os.chdir(self.srcdir)
-        utils.system('patch -p1 < ../makefile.patch')
-        utils.system('make')
-
-
-    def initialize(self):
-        self.job.require_gcc()
-
-
-    def run_once(self, dir = None, args = None):
-        if not dir:
-            self.dir = self.tmpdir
-        else:
-            self.dir = dir
-        if not args:
-            self.args = '--block=4096 --block=8192 --threads=10 --size=1024 --numruns=2'
-        else:
-            self.args = args
-
-        os.chdir(self.srcdir)
-        results = utils.system_output('./tiobench.pl --dir %s %s' %
-                                      (self.dir, self.args))
-
-        logging.info(results)
-        results_path = os.path.join(self.resultsdir,
-                                    'raw_output_%s' % self.iteration)
-
-        utils.open_write_close(results_path, results)
diff --git a/client/tests/tracing_microbenchmark/base_tracer.py b/client/tests/tracing_microbenchmark/base_tracer.py
deleted file mode 100644
index e36eca4..0000000
--- a/client/tests/tracing_microbenchmark/base_tracer.py
+++ /dev/null
@@ -1,30 +0,0 @@
-import os
-from autotest_lib.client.bin import utils
-
-
-class Tracer(object):
-    """
-    Common interface for tracing.
-    """
-
-    tracing_dir = None
-
-    def trace_config(self, path, value):
-        """
-        Write value to a tracing config file under self.tracing_dir.
-        """
-        path = os.path.join(self.tracing_dir, path)
-        utils.open_write_close(path, value)
-
-    def warmup(self, buffer_size_kb):
-        pass
-    def cleanup(self):
-        pass
-    def start_tracing(self):
-        pass
-    def stop_tracing(self):
-        pass
-    def gather_stats(self, results):
-        pass
-    def reset_tracing(self):
-        pass
diff --git a/client/tests/tracing_microbenchmark/control b/client/tests/tracing_microbenchmark/control
deleted file mode 100644
index ec7017b..0000000
--- a/client/tests/tracing_microbenchmark/control
+++ /dev/null
@@ -1,28 +0,0 @@
-AUTHOR = "David Sharp <dhsharp@google.com>"
-NAME = "Tracing microbenchmark"
-TIME = "SHORT"
-TEST_CATEGORY = "Benchmark"
-TEST_CLASS = "Kernel"
-TEST_TYPE = "client"
-
-DOC = """
-A simple benchmark of kernel tracers such as ftrace. Enables tracepoints in
-sys_getuid and makes 100,000 calls to getuid with tracing on and off to measure
-the overhead of enabling tracing. The intent for this benchmark is to not
-overflow the ring buffer, so the buffer is generously sized.
-
-
-tracer:  tracepoint enabled
-------
-off:     n/a
-ftrace:  syscalls:sys_enter_getuid
-
-Args:
-  tracer: see table above.
-  buffer_size_kb: Set the tracing ring buffer to this size (per-cpu).
-  calls: Set the number of calls to make to getuid.
-"""
-
-
-job.run_test('tracing_microbenchmark', tracer='off', tag='off', iterations=10)
-job.run_test('tracing_microbenchmark', tracer='ftrace', tag='ftrace', iterations=10)
diff --git a/client/tests/tracing_microbenchmark/src/Makefile b/client/tests/tracing_microbenchmark/src/Makefile
deleted file mode 100644
index ac2af8a..0000000
--- a/client/tests/tracing_microbenchmark/src/Makefile
+++ /dev/null
@@ -1,8 +0,0 @@
-CC = $(CROSS_COMPILE)gcc
-LDLIBS = -lrt
-
-getuid_microbench: getuid_microbench.o
-
-.PHONY: clean
-clean:
-	rm *.o getuid_microbench
diff --git a/client/tests/tracing_microbenchmark/src/getuid_microbench.c b/client/tests/tracing_microbenchmark/src/getuid_microbench.c
deleted file mode 100644
index fd540cb..0000000
--- a/client/tests/tracing_microbenchmark/src/getuid_microbench.c
+++ /dev/null
@@ -1,63 +0,0 @@
-#define _GNU_SOURCE
-#include <sys/syscall.h>
-#include <sys/types.h>
-#include <stdlib.h>
-#include <stdio.h>
-#include <errno.h>
-#include <unistd.h>
-#include <time.h>
-
-void ts_subtract(struct timespec *result,
-                 const struct timespec *time1, const struct timespec *time2) {
-  *result = *time1;
-  result->tv_sec -= time2->tv_sec ;
-  if (result->tv_nsec < time2->tv_nsec) {
-    /* borrow a second */
-    result->tv_nsec += 1000000000L;
-    result->tv_sec--;
-  }
-  result->tv_nsec -= time2->tv_nsec;
-}
-
-void usage(const char *cmd) {
-    fprintf(stderr, "usage: %s <iterations>\n", cmd);
-}
-
-int main (int argc, char *argv[]) {
-  struct timespec start_time, end_time, elapsed_time;
-  uid_t uid;
-  long iterations, i;
-  double per_call;
-
-  if (argc != 2) {
-    usage(argv[0]);
-    return 1;
-  }
-
-  iterations = atol(argv[1]);
-  if (iterations < 0) {
-    usage(argv[0]);
-    return 1;
-  }
-
-  if (clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &start_time)) {
-    perror("clock_gettime");
-    return errno;
-  }
-
-  for (i = iterations; i; i--)
-    uid = syscall(SYS_getuid);
-
-  if (clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &end_time)) {
-    perror("clock_gettime");
-    return errno;
-  }
-
-  ts_subtract(&elapsed_time, &end_time, &start_time);
-  per_call = (elapsed_time.tv_sec * 1000000000.0L + elapsed_time.tv_nsec) /
-      (double)iterations;
-  printf("%ld calls in %ld.%09ld s (%lf ns/call)\n", iterations,
-         elapsed_time.tv_sec, elapsed_time.tv_nsec, per_call);
-
-  return 0;
-}
diff --git a/client/tests/tracing_microbenchmark/tracers.py b/client/tests/tracing_microbenchmark/tracers.py
deleted file mode 100644
index 08ca7e3..0000000
--- a/client/tests/tracing_microbenchmark/tracers.py
+++ /dev/null
@@ -1,60 +0,0 @@
-import os
-from autotest_lib.client.bin import utils
-
-import base_tracer
-try:
-    from site_tracers import *
-except ImportError:
-    pass
-
-
-off = base_tracer.Tracer
-
-
-class ftrace(base_tracer.Tracer):
-
-    mountpoint = '/sys/kernel/debug'
-    tracing_dir = os.path.join(mountpoint, 'tracing')
-
-    def warmup(self, buffer_size_kb):
-        if not os.path.exists(self.tracing_dir):
-            utils.system('mount -t debugfs debugfs %s' % self.mountpoint)
-
-        # ensure clean state:
-        self.trace_config('tracing_enabled', '0')
-        self.trace_config('current_tracer', 'nop')
-        self.trace_config('events/enable', '0')
-        self.trace_config('trace', '')
-        # set ring buffer size:
-        self.trace_config('buffer_size_kb', str(buffer_size_kb))
-        # enable tracepoints:
-        self.trace_config('events/syscalls/sys_enter_getuid/enable', '1')
-
-    def cleanup(self):
-        # reset ring buffer size:
-        self.trace_config('buffer_size_kb', '1408')
-        # disable tracepoints:
-        self.trace_config('events/enable', '0')
-
-    def start_tracing(self):
-        self.trace_config('tracing_enabled', '1')
-
-    def stop_tracing(self):
-        self.trace_config('tracing_enabled', '0')
-
-    def reset_tracing(self):
-        self.trace_config('trace', '')
-
-    def gather_stats(self, results):
-        per_cpu = os.path.join(self.tracing_dir, 'per_cpu')
-        for cpu in os.listdir(per_cpu):
-            cpu_stats = os.path.join(per_cpu, cpu, 'stats')
-            for line in utils.read_file(cpu_stats).splitlines():
-                key, val = line.split(': ')
-                key = key.replace(' ', '_')
-                val = int(val)
-                cpu_key = '%s_%s' % (cpu, key)
-                total_key = 'total_' + key
-                results[cpu_key] = val
-                results[total_key] = (results.get(total_key, 0) +
-                                      results[cpu_key])
diff --git a/client/tests/tracing_microbenchmark/tracing_microbenchmark.py b/client/tests/tracing_microbenchmark/tracing_microbenchmark.py
deleted file mode 100644
index 2d7af6d..0000000
--- a/client/tests/tracing_microbenchmark/tracing_microbenchmark.py
+++ /dev/null
@@ -1,50 +0,0 @@
-import os
-import re
-from autotest_lib.client.bin import test
-from autotest_lib.client.bin import utils
-
-import tracers
-import base_tracer
-
-class tracing_microbenchmark(test.test):
-    version = 1
-    preserve_srcdir = True
-
-    def setup(self):
-        os.chdir(self.srcdir)
-        utils.system('make CROSS_COMPILE=""')
-
-    def initialize(self, tracer='ftrace', calls=100000, **kwargs):
-        self.job.require_gcc()
-        tracer_class = getattr(tracers, tracer)
-        if not issubclass(tracer_class, base_tracer.Tracer):
-            raise TypeError
-        self.tracer = tracer_class()
-
-        getuid_microbench = os.path.join(self.srcdir, 'getuid_microbench')
-        self.cmd = '%s %d' % (getuid_microbench, calls)
-
-    def warmup(self, buffer_size_kb=8000, **kwargs):
-        self.tracer.warmup(buffer_size_kb)
-
-    def cleanup(self):
-        self.tracer.cleanup()
-
-    def run_once(self, **kwargs):
-        self.results = {}
-
-        self.tracer.start_tracing()
-        self.cmd_result = utils.run(self.cmd)
-        self.tracer.stop_tracing()
-
-        self.tracer.gather_stats(self.results)
-        self.tracer.reset_tracing()
-
-    def postprocess_iteration(self):
-        result_re = re.compile(r'(?P<calls>\d+) calls '
-                               r'in (?P<time>\d+\.\d+) s '
-                               '\((?P<ns_per_call>\d+\.\d+) ns/call\)')
-        match = result_re.match(self.cmd_result.stdout)
-        self.results.update(match.groupdict())
-
-        self.write_perf_keyval(self.results)
diff --git a/client/tests/tsc/control b/client/tests/tsc/control
deleted file mode 100644
index 0c1c65a..0000000
--- a/client/tests/tsc/control
+++ /dev/null
@@ -1,13 +0,0 @@
-NAME = 'Check TSC'
-AUTHOR = 'Michael Davidson <md@google.com>'
-TIME = 'MEDIUM'
-TEST_CLASS = 'Kernel'
-TEST_CATEGORY = 'Functional'
-TEST_TYPE = 'client'
-DOC = """
-checktsc is a user space program that checks TSC synchronization
-between pairs of CPUs on an SMP system using a technique borrowed
-from the Linux 2.6.18 kernel.
-"""
-
-job.run_test('tsc')
diff --git a/client/tests/tsc/src/Makefile b/client/tests/tsc/src/Makefile
deleted file mode 100644
index c8843ba..0000000
--- a/client/tests/tsc/src/Makefile
+++ /dev/null
@@ -1,20 +0,0 @@
-CC=		cc
-CFLAGS=		-O
-LIBS=		-lpthread
-
-PROGS=		checktsc
-
-SRCS=		checktsc.c
-OBJS=		${SRCS:.c=.o}
-
-
-all:		$(PROGS)
-
-checktsc:	$(OBJS)
-		$(CC) $(LDFLAGS) -o checktsc $(OBJS) $(LIBS)
-
-clean:
-		-rm -f $(OBJS)
-
-clobber:	clean
-		-rm -f $(PROGS)
diff --git a/client/tests/tsc/src/README b/client/tests/tsc/src/README
deleted file mode 100644
index d6c3fbb..0000000
--- a/client/tests/tsc/src/README
+++ /dev/null
@@ -1,19 +0,0 @@
-checktsc is a user space program that checks TSC synchronization
-between pairs of CPUs on an SMP system using a technique borrowed
-from the Linux 2.6.18 kernel.
-
-The test passes if all TSCs are within +/- "threshold" clock cycles
-of each other. The default value of "threshold" is 500 clock cycles
-and can be changed using the --threshold option.
-
-Default behaviour is to check all of the CPUs on a system and to
-report the observed difference in TSC values between each pair of CPUs.
-The --cpus option can be used to specify a subset of the CPUs to test.
-
-When run with the --silent option the test produces no output (other
-than for catastrophic errors) and success or failure is indicated by
-a 0 or 1 exit status from the program.
-
-Author: md@google.com
-License: GPL
-
diff --git a/client/tests/tsc/src/checktsc.c b/client/tests/tsc/src/checktsc.c
deleted file mode 100644
index f7fc879..0000000
--- a/client/tests/tsc/src/checktsc.c
+++ /dev/null
@@ -1,385 +0,0 @@
-#define _GNU_SOURCE
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <stdint.h>
-#include <stdarg.h>
-#include <string.h>
-#include <getopt.h>
-#include <pthread.h>
-#include <errno.h>
-#include "sched.h"
-
-
-#define MAX_CPUS		32
-#define	DEFAULT_THRESHOLD	500	/* default maximum TSC skew	*/
-
-
-char	*program;
-long	threshold	= DEFAULT_THRESHOLD;
-int	silent		= 0;
-int	verbose		= 0;
-
-
-struct option options[] = {
-	{ "cpus",	required_argument,	0, 	'c'	},
-	{ "help",	no_argument,		0, 	'h'	},
-	{ "silent",	no_argument,		0, 	's'	},
-	{ "threshold",	required_argument,	0, 	't'	},
-	{ "verbose",	no_argument,		0, 	'v'	},
-	{ 0,	0,	0,	0 }
-};
-
-
-void usage(void)
-{
-	printf("usage: %s [-hsv] [-c <cpu_set>] [-t threshold]\n", program);
-}
-
-
-void help(void)
-{
-	usage();
-	printf("check TSC synchronization between CPUs\n");
-	printf("  -c,--cpus        set of cpus to test (default: all)\n");
-	printf("  -h,--help        show this message\n");
-	printf("  -s,--silent      no output if test is successful\n");
-	printf("  -t,--threshold   TSC skew threshold (default: %d cycles)\n",
-		DEFAULT_THRESHOLD);
-	printf("  -v,--verbose     verbose output\n");
-}
-
-
-void error(int err, const char *fmt, ...)
-{
-	va_list	ap;
-
-	fprintf(stderr, "%s: ", program);
-	va_start(ap, fmt);
-	vfprintf(stderr, fmt, ap);
-	va_end(ap);
-
-	if (err)
-		fprintf(stderr, ": %s\n", strerror(err));
-	putc('\n', stderr);
-}
-
-
-/*
- * parse a string containing a comma separated list of ranges
- * of cpu numbers such as: "0,2,4-7" into a cpu_set_t
- */
-int parse_cpu_set(const char *s, cpu_set_t *cpus)
-{
-	CPU_ZERO(cpus);
-
-	while (*s) {
-		char	*next;
-		int	cpu;
-		int	start, end;
-
-		start = end = (int)strtol(s, &next, 0);
-		if (s == next)
-			break;
-		s = next;
-
-		if (*s == '-') {
-			++s;
-			end = (int)strtol(s, &next, 0);
-			if (s == next)
-				break;
-			s = next;
-		}
-
-		if (*s == ',')
-			++s;
-
-		if (start < 0 || start >= CPU_SETSIZE) {
-			error(0, "bad cpu number '%d' in cpu set", start);
-			return 1;
-		}
-
-		if (end < 0 || end >= CPU_SETSIZE) {
-			error(0, "bad cpu number '%d' in cpu set", end);
-			return 1;
-		}
-
-		if (end < start) {
-			error(0, "bad cpu range '%d-%d' in cpu set",
-				start, end);
-			return 1;
-		}
-
-		for (cpu = start; cpu <= end; ++cpu)
-			CPU_SET(cpu, cpus);
-
-	}
-
-	if (*s) {
-		error(0, "unexpected character '%c' in cpu set", *s);
-		return 1;
-	}
-
-	return 0;
-}
-
-
-#define	CACHE_LINE_SIZE	256
-typedef union state {
-	int	state;
-	char	pad[CACHE_LINE_SIZE];
-} state_t;
-
-#define barrier()	__asm__ __volatile__("" : : : "memory")
-
-static void inline set_state(state_t *s, int v)
-{
-	s->state = v;
-}
-
-static void inline wait_for_state(state_t *s, int v)
-{
-	while (s->state != v)
-		barrier();
-}
-
-#if defined(__x86_64__)
-static inline uint64_t rdtsc(void)
-{
-	uint32_t	tsc_lo, tsc_hi;
-
-	__asm__ __volatile__("rdtsc" : "=a" (tsc_lo), "=d" (tsc_hi));
-
-	return ((uint64_t)tsc_hi << 32) | tsc_lo;
-}
-#else
-static inline uint64_t rdtsc(void)
-{
-	uint64_t	tsc;
-
-	__asm__ __volatile__("rdtsc" : "=A" (tsc));
-
-	return tsc;
-}
-#endif
-
-#define	READY	1
-#define	DONE	2
-#define	ERROR	3
-
-state_t		master;
-state_t		slave;
-
-int64_t		slave_tsc;
-int		slave_cpu;
-
-
-int set_cpu_affinity(int cpu)
-{
-	cpu_set_t cpus;
-
-	CPU_ZERO(&cpus);
-	CPU_SET(cpu, &cpus);
-	if (sched_setaffinity(0, sizeof cpus, &cpus) < 0) {
-		error(errno, "sched_setaffinity() failed for CPU %d", cpu);
-		return -1;
-	}
-	return 0;
-}
-
-#define NUM_ITERS	10
-
-int64_t
-tsc_delta(int cpu_a, int cpu_b)
-{
-	uint64_t	best_t0	= 0;
-	uint64_t	best_t1	= ~0ULL;
-	uint64_t	best_tm	= 0;
-	int64_t		delta;
-	uint64_t	t0, t1, tm;
-	int		i;
-
-	if (verbose)
-		printf("CPU %d - CPU %d\n", cpu_a, cpu_b);
-
-	if (set_cpu_affinity(cpu_a) < 0)
-		return -1;
-
-	slave_cpu = cpu_b;
-
-	for (i = 0; i < NUM_ITERS; i++) {
-
-		set_state(&master, READY);
-
-		wait_for_state(&slave, READY);
-
-		t0 = rdtsc();
-		set_state(&master, DONE);
-		wait_for_state(&slave, DONE);
-		t1 = rdtsc();
-
-		if ((t1 - t0) < (best_t1 - best_t0)) {
-			best_t0 = t0;
-			best_t1 = t1;
-			best_tm = slave_tsc;
-		}
-		if (verbose)
-			printf("loop %2d: roundtrip = %5Ld\n", i, t1 - t0);
-	}
-
-	delta = (best_t0/2 + best_t1/2 + (best_t0 & best_t1 & 1)) - best_tm; 
-
-	if (!silent)
-		printf("CPU %d - CPU %d = % 5Ld\n", cpu_a, cpu_b, delta);
-
-	return delta;
-}
-
-
-void *
-slave_thread(void *arg)
-{
-	int	current_cpu = -1;
-
-	for(;;) {
-
-		wait_for_state(&master, READY);
-
-		if (slave_cpu < 0) {
-			return NULL;
-		}
-
-		if (slave_cpu != current_cpu) {
-
-			if (set_cpu_affinity(slave_cpu) < 0) {
-				set_state(&slave, ERROR);
-				return NULL;
-			}
-
-			current_cpu = slave_cpu;
-		}
-
-		set_state(&slave, READY);
-
-		wait_for_state(&master, DONE);
-
-		slave_tsc = rdtsc();
-
-		set_state(&slave, DONE);
-	}
-	return NULL;
-}
-
-
-int
-check_tsc(cpu_set_t *cpus)
-{
-	int		cpu_a, cpu_b;
-	int64_t		delta;
-	int		err	= 0;
-	pthread_t	thread;
-
-	if ((err = pthread_create(&thread, NULL, slave_thread, NULL))) {
-		error(err, "pthread_create_failed");
-		return -1;
-	}
-	
-
-	for (cpu_a = 0; cpu_a < MAX_CPUS; cpu_a++) {
-		if (!CPU_ISSET(cpu_a, cpus))
-			continue;
-
-		for (cpu_b = 0; cpu_b < MAX_CPUS; cpu_b++) {
-			if (!CPU_ISSET(cpu_b, cpus) || cpu_a == cpu_b)
-				continue;
-
-			delta = tsc_delta(cpu_a, cpu_b);
-
-			if (llabs(delta) > threshold) {
-				++err;
-			}
-		}
-	}
-
-	/*
-	 * tell the slave thread to exit
-	 */
-	slave_cpu = -1;
-	set_state(&master, READY);
-
-	pthread_join(thread, NULL);
-
-	return err;
-}
-
-
-int
-main(int argc, char *argv[])
-{
-	int		c;
-	cpu_set_t	cpus;
-	int		errs	= 0;
-	extern int	optind;
-	extern char	*optarg;
-
-	if ((program = strrchr(argv[0], '/')) != NULL)
-		++program;
-	else
-		program = argv[0];
-
-	/*
-	 * default to checking all cpus
-	 */
-	for (c = 0; c < MAX_CPUS; c++) {
-		CPU_SET(c, &cpus);
-	}
-
-	while ((c = getopt_long(argc, argv, "c:hst:v", options, NULL)) != EOF) {
-		switch (c) {
-			case 'c':
-				if (parse_cpu_set(optarg, &cpus) != 0)
-					++errs;
-				break;
-			case 'h':
-				help();
-				exit(0);
-			case 's':
-				++silent;
-				break;
-			case 't':
-				threshold = strtol(optarg, NULL, 0);
-				break;
-			case 'v':
-				++verbose;
-				break;
-			default:
-				++errs;
-				break;
-		}
-	}
-
-	if (errs || optind < argc) {
-		usage();
-		exit(1);
-	}
-
-	/*
-	 * limit the set of CPUs to the ones that are currently available
-	 * (Note that on some kernel versions sched_setaffinity() will fail
-	 * if you specify CPUs that are not currently online so we ignore
-	 * the return value and hope for the best)
-	 */
-	sched_setaffinity(0, sizeof cpus, &cpus);
-	if (sched_getaffinity(0, sizeof cpus, &cpus) < 0) {
-		error(errno, "sched_getaffinity() failed");
-		exit(1);
-	}
-
-	errs = check_tsc(&cpus);
-
-	if (!silent) {
-		printf("%s\n", errs ? "FAIL" : "PASS");
-	}
-
-	return errs ? EXIT_FAILURE : EXIT_SUCCESS;
-}
diff --git a/client/tests/tsc/src/sched.h b/client/tests/tsc/src/sched.h
deleted file mode 100644
index fee0ad2..0000000
--- a/client/tests/tsc/src/sched.h
+++ /dev/null
@@ -1,47 +0,0 @@
-#include <sched.h>
-/*
- * if we have an ancient sched.h we need to provide
- * definitions for cpu_set_t and associated macros
- */
-#if !defined __cpu_set_t_defined
-# define __cpu_set_t_defined
-/* Size definition for CPU sets.  */
-# define __CPU_SETSIZE	1024
-# define __NCPUBITS	(8 * sizeof (__cpu_mask))
-
-/* Type for array elements in 'cpu_set'.  */
-typedef unsigned long int __cpu_mask;
-
-/* Basic access functions.  */
-# define __CPUELT(cpu)	((cpu) / __NCPUBITS)
-# define __CPUMASK(cpu)	((__cpu_mask) 1 << ((cpu) % __NCPUBITS))
-
-/* Data structure to describe CPU mask.  */
-typedef struct
-{
-  __cpu_mask __bits[__CPU_SETSIZE / __NCPUBITS];
-} cpu_set_t;
-
-/* Access functions for CPU masks.  */
-# define __CPU_ZERO(cpusetp) \
-  do {									      \
-    unsigned int __i;							      \
-    cpu_set_t *__arr = (cpusetp);					      \
-    for (__i = 0; __i < sizeof (cpu_set_t) / sizeof (__cpu_mask); ++__i)      \
-      __arr->__bits[__i] = 0;						      \
-  } while (0)
-# define __CPU_SET(cpu, cpusetp) \
-  ((cpusetp)->__bits[__CPUELT (cpu)] |= __CPUMASK (cpu))
-# define __CPU_CLR(cpu, cpusetp) \
-  ((cpusetp)->__bits[__CPUELT (cpu)] &= ~__CPUMASK (cpu))
-# define __CPU_ISSET(cpu, cpusetp) \
-  (((cpusetp)->__bits[__CPUELT (cpu)] & __CPUMASK (cpu)) != 0)
-
-/* Access macros for `cpu_set'.  */
-#define CPU_SETSIZE __CPU_SETSIZE
-#define CPU_SET(cpu, cpusetp)	__CPU_SET (cpu, cpusetp)
-#define CPU_CLR(cpu, cpusetp)	__CPU_CLR (cpu, cpusetp)
-#define CPU_ISSET(cpu, cpusetp)	__CPU_ISSET (cpu, cpusetp)
-#define CPU_ZERO(cpusetp)	__CPU_ZERO (cpusetp)
-
-#endif
diff --git a/client/tests/tsc/tsc.py b/client/tests/tsc/tsc.py
deleted file mode 100644
index 1c1058d..0000000
--- a/client/tests/tsc/tsc.py
+++ /dev/null
@@ -1,61 +0,0 @@
-import os, re, logging
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-class tsc(test.test):
-    version = 3
-
-    preserve_srcdir = True
-
-    def setup(self):
-        os.chdir(self.srcdir)
-        utils.make()
-
-
-    def initialize(self):
-        self.job.require_gcc()
-
-
-    def run_once(self, args = '-t 650'):
-        result = utils.run(self.srcdir + '/checktsc ' + args,
-                           stdout_tee=open(os.path.join(self.resultsdir,
-                                                        'checktsc.log'), 'w'),
-                           ignore_status=True)
-        if result.exit_status != 0:
-            logging.error('Program checktsc exit status is %s',
-                          result.exit_status)
-            default_reason = ("UNKNOWN FAILURE: rc=%d from %s" %
-                              (result.exit_status, result.command))
-            ## Analyze result.stdout to see if it is possible to form qualified
-            ## reason of failure and to raise an appropriate exception.
-            ## For this test we qualify the reason of failure if the
-            ## following conditions are met:
-            ## (i) result.exit_status = 1
-            ## (ii) result.stdout ends with 'FAIL'
-            ## (iii) "FAIL" is preceeded by one or more
-            ##       lines in the following format:
-            ##       CPU x - CPU y = <delta>
-            ## Set as a reason the line that contains max abs(delta)
-            if result.exit_status == 1:
-                if result.stdout.strip('\n').endswith('FAIL'):
-                    ## find all lines
-                    ## CPU x - CPU y = <delta>
-                    ## and parse out delta of max abs value
-                    max_delta = 0
-                    reason = ''
-                    threshold = int(args.split()[1])
-                    latencies = re.findall("CPU \d+ - CPU \d+ =\s+-*\d+",
-                                           result.stdout)
-                    for ln in latencies:
-                        cur_delta = int(ln.split('=', 2)[1])
-                        if abs(cur_delta) > max_delta:
-                            max_delta = abs(cur_delta)
-                            reason = ln
-                    if max_delta > threshold:
-                        reason = "Latency %s exceeds threshold %d" % (reason,
-                                                                      threshold)
-                        raise error.TestFail(reason)
-
-            ## If we are here, we failed to qualify the reason of test failre
-            ## Consider it as a test error
-            raise error.TestError(default_reason)
diff --git a/client/tests/unixbench5/Makefile.patch b/client/tests/unixbench5/Makefile.patch
deleted file mode 100644
index 27ac225..0000000
--- a/client/tests/unixbench5/Makefile.patch
+++ /dev/null
@@ -1,11 +0,0 @@
---- Makefile.bak       2011-01-14 10:45:12.000000000 -0800
-+++ Makefile   2011-01-14 10:46:54.000000000 -0800
-@@ -52,7 +52,7 @@
- # COMPILER CONFIGURATION: Set "CC" to the name of the compiler to use
- # to build the binary benchmarks.  You should also set "$cCompiler" in the
- # Run script to the name of the compiler you want to test.
--CC=gcc
-+CC?=gcc
-
- # OPTIMISATION SETTINGS:
-
diff --git a/client/tests/unixbench5/control b/client/tests/unixbench5/control
deleted file mode 100644
index 862a521..0000000
--- a/client/tests/unixbench5/control
+++ /dev/null
@@ -1,26 +0,0 @@
-NAME = 'Unix Bench 5'
-AUTHOR = 'adrianbg@google.com'
-TIME = 'MEDIUM'
-PURPOSE = 'Measure system level performance.'
-CRITERIA = 'This test is a benchmark.'
-TEST_CLASS = 'Kernel'
-TEST_CATEGORY = 'Benchmark'
-TEST_TYPE = 'client'
-DOC = """
-This test measure system wide performance by running the following tests:
-  - Dhrystone - focuses on string handling.
-  - Whetstone - measure floating point operations.
-  - Execl Throughput - measure the number of execl calls per second.
-  - File Copy
-  - Pipe throughput
-  - Pipe-based context switching
-  - Process creation - number of times a process can fork and reap
-  - Shell Scripts - number of times a process can start and reap a script
-  - System Call Overhead - estimates the cost of entering and leaving the
-    kernel.
-
-For more information visit:
-http://code.google.com/p/byte-unixbench/
-"""
-
-job.run_test('unixbench5')
diff --git a/client/tests/unixbench5/unixbench-5.1.3.tgz b/client/tests/unixbench5/unixbench-5.1.3.tgz
deleted file mode 100644
index c654b33..0000000
--- a/client/tests/unixbench5/unixbench-5.1.3.tgz
+++ /dev/null
Binary files differ
diff --git a/client/tests/unixbench5/unixbench5.py b/client/tests/unixbench5/unixbench5.py
deleted file mode 100644
index 46d176e..0000000
--- a/client/tests/unixbench5/unixbench5.py
+++ /dev/null
@@ -1,251 +0,0 @@
-import os, re
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-
-class unixbench5(test.test):
-    """
-    This test measure system wide performance by running the following tests:
-      - Dhrystone - focuses on string handling.
-      - Whetstone - measure floating point operations.
-      - Execl Throughput - measure the number of execl calls per second.
-      - File Copy
-      - Pipe throughput
-      - Pipe-based context switching
-      - Process creation - number of times a process can fork and reap
-      - Shell Scripts - number of times a process can start and reap a script
-      - System Call Overhead - estimates the cost of entering and leaving the
-        kernel.
-
-    @see: http://code.google.com/p/byte-unixbench/
-    @author: Dale Curtis <dalecurtis@google.com>
-    """
-    version = 1
-
-
-    def initialize(self):
-        self.job.require_gcc()
-        self.err = []
-
-
-    def setup(self, tarball='unixbench-5.1.3.tgz'):
-        """
-        Compiles unixbench.
-
-        @tarball: Path or URL to a unixbench tarball
-        @see: http://byte-unixbench.googlecode.com/files/unixbench-5.1.3.tgz
-        """
-        tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(tarball, self.srcdir)
-        os.chdir(self.srcdir)
-
-        utils.system('patch -p0 < ../Makefile.patch')
-        utils.make()
-
-
-    def run_once(self, args=''):
-        vars = 'UB_TMPDIR="%s" UB_RESULTDIR="%s"' % (self.tmpdir,
-                                                     self.resultsdir)
-        os.chdir(self.srcdir)
-        self.report_data = utils.system_output(vars + ' ./Run ' + args)
-        self.results_path = os.path.join(self.resultsdir,
-                                         'raw_output_%s' % self.iteration)
-        utils.open_write_close(self.results_path, self.report_data)
-
-
-    def cleanup(self):
-        """
-        Check error index list and throw TestError if necessary.
-        """
-        if self.err:
-            e_msg = ("No measured results for output lines: %s\nOutput:%s" %
-                     (" ".join(self.err), self.report_data))
-            raise error.TestError(e_msg)
-
-
-    def process_section(self, section, suffix):
-        keyval = {}
-        subsections = section.split('\n\n')
-
-        if len(subsections) < 3:
-            raise error.TestError('Invalid output format. Unable to parse')
-
-        # Process the subsection containing performance results first.
-        for index, line in enumerate(subsections[1].strip().split('\n')):
-            # Look for problems first.
-            if re.search('no measured results', line, flags=re.IGNORECASE):
-                self.err.append(str(index + 1))
-
-            # Every performance result line ends with 6 values, with the sixth
-            # being the actual result. Make sure there are at least that words
-            # in the line before processing.
-            words = line.lower().split()
-            if len(words) >= 6:
-                key = re.sub('\W', '', '_'.join(words[:-6]))
-                keyval[key + suffix] = words[-6]
-
-        # The final score should be the last item in the third subsection.
-        keyval['score' + suffix] = subsections[2].strip().split()[-1]
-
-        self.write_perf_keyval(keyval)
-
-
-    def postprocess_iteration(self):
-        # Break up sections around dividing lines.
-        sections = self.report_data.split('-'*72)
-
-        # First section is junk to us, second has results for single CPU run.
-        if len(sections) > 1:
-            self.process_section(section=sections[1], suffix='')
-
-            # Only machines with > 1 CPU will have a 3rd section.
-            if len(sections) > 2:
-                self.process_section(section=sections[2], suffix='_multi')
-        else:
-            raise error.TestError('Invalid output format. Unable to parse')
-
-
-""" Here is a sample output:
-
-   #    #  #    #  #  #    #          #####   ######  #    #   ####   #    #
-   #    #  ##   #  #   #  #           #    #  #       ##   #  #    #  #    #
-   #    #  # #  #  #    ##            #####   #####   # #  #  #       ######
-   #    #  #  # #  #    ##            #    #  #       #  # #  #       #    #
-   #    #  #   ##  #   #  #           #    #  #       #   ##  #    #  #    #
-    ####   #    #  #  #    #          #####   ######  #    #   ####   #    #
-
-   Version 5.1.2                      Based on the Byte Magazine Unix Benchmark
-
-   Multi-CPU version                  Version 5 revisions by Ian Smith,
-                                      Sunnyvale, CA, USA
-   December 22, 2007                  johantheghost at yahoo period com
-
-
-1 x Dhrystone 2 using register variables  1 2 3 4 5 6 7 8 9 10
-
-1 x Double-Precision Whetstone  1 2 3 4 5 6 7 8 9 10
-
-1 x Execl Throughput  1 2 3
-
-1 x File Copy 1024 bufsize 2000 maxblocks  1 2 3
-
-1 x File Copy 256 bufsize 500 maxblocks  1 2 3
-
-1 x File Copy 4096 bufsize 8000 maxblocks  1 2 3
-
-1 x Pipe Throughput  1 2 3 4 5 6 7 8 9 10
-
-1 x Pipe-based Context Switching  1 2 3 4 5 6 7 8 9 10
-
-1 x Process Creation  1 2 3
-
-1 x System Call Overhead  1 2 3 4 5 6 7 8 9 10
-
-1 x Shell Scripts (1 concurrent)  1 2 3
-
-1 x Shell Scripts (8 concurrent)  1 2 3
-
-2 x Dhrystone 2 using register variables  1 2 3 4 5 6 7 8 9 10
-
-2 x Double-Precision Whetstone  1 2 3 4 5 6 7 8 9 10
-
-2 x Execl Throughput  1 2 3
-
-2 x File Copy 1024 bufsize 2000 maxblocks  1 2 3
-
-2 x File Copy 256 bufsize 500 maxblocks  1 2 3
-
-2 x File Copy 4096 bufsize 8000 maxblocks  1 2 3
-
-2 x Pipe Throughput  1 2 3 4 5 6 7 8 9 10
-
-2 x Pipe-based Context Switching  1 2 3 4 5 6 7 8 9 10
-
-2 x Process Creation  1 2 3
-
-2 x System Call Overhead  1 2 3 4 5 6 7 8 9 10
-
-2 x Shell Scripts (1 concurrent)  1 2 3
-
-2 x Shell Scripts (8 concurrent)  1 2 3
-
-========================================================================
-   BYTE UNIX Benchmarks (Version 5.1.2)
-
-   System: localhost: GNU/Linux
-   OS: GNU/Linux -- 2.6.32.26+drm33.12 -- #1 SMP Wed Jan 12 16:16:05 PST 2011
-   Machine: i686 (GenuineIntel)
-   Language: en_US.utf8 (charmap=, collate=)
-   CPU 0: Intel(R) Atom(TM) CPU N455 @ 1.66GHz (3325.2 bogomips)
-          Hyper-Threading, x86-64, MMX, Physical Address Ext, SYSENTER/SYSEXIT
-   CPU 1: Intel(R) Atom(TM) CPU N455 @ 1.66GHz (3325.0 bogomips)
-          Hyper-Threading, x86-64, MMX, Physical Address Ext, SYSENTER/SYSEXIT
-   14:11:59 up 1 day,  1:10,  0 users,  load average: 0.47, 0.48, 0.51; runlevel
-
-------------------------------------------------------------------------
-Benchmark Run: Fri Jan 14 2011 14:11:59 - 14:41:26
-2 CPUs in system; running 1 parallel copy of tests
-
-Dhrystone 2 using register variables        2264000.6 lps   (10.0 s, 7 samples)
-Double-Precision Whetstone                      507.0 MWIPS (10.1 s, 7 samples)
-Execl Throughput                                796.7 lps   (30.0 s, 2 samples)
-File Copy 1024 bufsize 2000 maxblocks        110924.1 KBps  (30.1 s, 2 samples)
-File Copy 256 bufsize 500 maxblocks           32600.5 KBps  (30.1 s, 2 samples)
-File Copy 4096 bufsize 8000 maxblocks        284236.5 KBps  (30.0 s, 2 samples)
-Pipe Throughput                              301672.5 lps   (10.0 s, 7 samples)
-Pipe-based Context Switching                  29475.3 lps   (10.0 s, 7 samples)
-Process Creation                               3124.6 lps   (30.0 s, 2 samples)
-Shell Scripts (1 concurrent)                   1753.0 lpm   (60.0 s, 2 samples)
-Shell Scripts (8 concurrent)                    305.9 lpm   (60.1 s, 2 samples)
-System Call Overhead                         592781.7 lps   (10.0 s, 7 samples)
-
-System Benchmarks Index Values               BASELINE       RESULT    INDEX
-Dhrystone 2 using register variables         116700.0    2264000.6    194.0
-Double-Precision Whetstone                       55.0        507.0     92.2
-Execl Throughput                                 43.0        796.7    185.3
-File Copy 1024 bufsize 2000 maxblocks          3960.0     110924.1    280.1
-File Copy 256 bufsize 500 maxblocks            1655.0      32600.5    197.0
-File Copy 4096 bufsize 8000 maxblocks          5800.0     284236.5    490.1
-Pipe Throughput                               12440.0     301672.5    242.5
-Pipe-based Context Switching                   4000.0      29475.3     73.7
-Process Creation                                126.0       3124.6    248.0
-Shell Scripts (1 concurrent)                     42.4       1753.0    413.4
-Shell Scripts (8 concurrent)                      6.0        305.9    509.8
-System Call Overhead                          15000.0     592781.7    395.2
-                                                                   ========
-System Benchmarks Index Score                                         238.0
-
-------------------------------------------------------------------------
-Benchmark Run: Fri Jan 14 2011 14:41:26 - 15:09:23
-2 CPUs in system; running 2 parallel copies of tests
-
-Dhrystone 2 using register variables        3411919.6 lps   (10.0 s, 7 samples)
-Double-Precision Whetstone                      964.3 MWIPS (10.1 s, 7 samples)
-Execl Throughput                               2053.5 lps   (30.0 s, 2 samples)
-File Copy 1024 bufsize 2000 maxblocks        158308.0 KBps  (30.0 s, 2 samples)
-File Copy 256 bufsize 500 maxblocks           46249.5 KBps  (30.0 s, 2 samples)
-File Copy 4096 bufsize 8000 maxblocks        389881.9 KBps  (30.0 s, 2 samples)
-Pipe Throughput                              410193.1 lps   (10.0 s, 7 samples)
-Pipe-based Context Switching                 113780.0 lps   (10.0 s, 7 samples)
-Process Creation                               7609.0 lps   (30.0 s, 2 samples)
-Shell Scripts (1 concurrent)                   2355.0 lpm   (60.0 s, 2 samples)
-Shell Scripts (8 concurrent)                    308.1 lpm   (60.2 s, 2 samples)
-System Call Overhead                        1057063.2 lps   (10.0 s, 7 samples)
-
-System Benchmarks Index Values               BASELINE       RESULT    INDEX
-Dhrystone 2 using register variables         116700.0    3411919.6    292.4
-Double-Precision Whetstone                       55.0        964.3    175.3
-Execl Throughput                                 43.0       2053.5    477.6
-File Copy 1024 bufsize 2000 maxblocks          3960.0     158308.0    399.8
-File Copy 256 bufsize 500 maxblocks            1655.0      46249.5    279.5
-File Copy 4096 bufsize 8000 maxblocks          5800.0     389881.9    672.2
-Pipe Throughput                               12440.0     410193.1    329.7
-Pipe-based Context Switching                   4000.0     113780.0    284.5
-Process Creation                                126.0       7609.0    603.9
-Shell Scripts (1 concurrent)                     42.4       2355.0    555.4
-Shell Scripts (8 concurrent)                      6.0        308.1    513.5
-System Call Overhead                          15000.0    1057063.2    704.7
-                                                                   ========
-System Benchmarks Index Score                                         407.4
-
-"""
diff --git a/client/tests/uptime/control b/client/tests/uptime/control
deleted file mode 100644
index 7a382f7..0000000
--- a/client/tests/uptime/control
+++ /dev/null
@@ -1,29 +0,0 @@
-AUTHOR="Vladimir Samarskiy <vsamarsk@google.com>"
-NAME="Uptime Test"
-TIME="MEDIUM"  ## ~3hrs
-TEST_CLASS="Kernel"
-TEST_CATEGORY="Stress"
-TEST_TYPE="CLIENT"
-
-DOC = """
-The test repeatedly executes kernbench during T=cycle_length seconds and then 
-sleeps for the same amount of time. Itterations continued
-until total elapsed time of the test reaches T=target_time
-"""
-
-import time
-
-
-def uptime_test(cycle_length = 300, target_time = 3*60*60):
-    test_started = time.time()
-    counter = 0
-    while time.time() < test_started + target_time:
-        kernbench_started = time.time()
-        while time.time() < kernbench_started + cycle_length:
-                counter += 1
-                job.run_test('kernbench', tag='%d' % counter)
-        job.run_test('sleeptest', tag='%d' % counter, seconds=cycle_length)
-
-
-uptime_test()
-
diff --git a/client/tests/wb_kupdate/README b/client/tests/wb_kupdate/README
deleted file mode 100644
index ebfe116..0000000
--- a/client/tests/wb_kupdate/README
+++ /dev/null
@@ -1,9 +0,0 @@
-Description:
-------------
-This tests checks the wb_kupdate code path by writting data to a sparse file
-mounted on a loop back device formated with a user specified filesystem,
-and waiting a maximum of 'max_flush_time' for writeback to flush the dirty
-datat from the cache to disk.
-
-At the end of the test a keyval is generated which states per iteration the time
-taken to write the user specified amount of data to disk.
diff --git a/client/tests/wb_kupdate/__init__.py b/client/tests/wb_kupdate/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/client/tests/wb_kupdate/__init__.py
+++ /dev/null
diff --git a/client/tests/wb_kupdate/control b/client/tests/wb_kupdate/control
deleted file mode 100644
index eed8847..0000000
--- a/client/tests/wb_kupdate/control
+++ /dev/null
@@ -1,44 +0,0 @@
-AUTHOR = "Akshay Lal <akshaylal@google.com>"
-NAME = "wb_kupdate"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "General"
-TEST_TYPE = "client"
-TIME = 'MEDIUM'
-DOC='''
-This tests checks the wb_kupdate code path by writting data to a sparse file
-and waiting at max of `max_flush_time` for the file to be flushed from the
-cache to disk.
-'''
-
-import os
-# Required Parameters:
-# --------------------
-mount_point='/export/wb_kupdate' # Absolute path.
-file_count=5                     # The number of files to write.
-write_size=1                     # In MB.
-
-# Optional Parameters:
-# --------------------
-max_flush_time=1                # In minutes.
-file_system='ext4'              # mkfs.<file_system> must already exist on
-                                # the machine. To avoid device initialization
-                                # set to None.
-remove_previous=False           # Boolean.
-sparse_file=os.path.join(       # Absolute path to the sparse file.
-        job.tmpdir,
-        'sparse_file')
-old_cleanup=False               # Remove a previously created mount_point if it
-                                # exits and not mounted.
-
-# Beginning execution of the xfstests:
-# ------------------------------------
-job.run_test('wb_kupdate',
-             mount_point=mount_point,
-             file_count=int(file_count),
-             write_size=int(write_size),
-             max_flush_time=int(max_flush_time),
-             file_system=file_system,
-             remove_previous=remove_previous,
-             sparse_file=sparse_file,
-             old_cleanup=old_cleanup,
-             tag='wb_kupdate_execution')
diff --git a/client/tests/wb_kupdate/wb_kupdate.py b/client/tests/wb_kupdate/wb_kupdate.py
deleted file mode 100644
index d180e65..0000000
--- a/client/tests/wb_kupdate/wb_kupdate.py
+++ /dev/null
@@ -1,269 +0,0 @@
-import datetime, logging, os, time
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-class wb_kupdate(test.test):
-    version = 1
-
-
-    def _check_parameters(self, mount_point, write_size, file_count,
-                          old_cleanup=False):
-        """
-        Check all test parameters.
-
-        @param mount_point: the path to the desired mount_point.
-        @param write_size: the size of data in MB to write.
-        @param file_count: the number of files to write.
-        @param old_cleanup: removes previous mount_point if it exists and is
-                not mounted. Default is False.
-        """
-        # Check mount_point.
-        if not os.path.exists(mount_point):
-            logging.info('%s does not exist. Creating directory.', mount_point)
-        elif not os.path.ismount(mount_point) and old_cleanup:
-            logging.info('Removing previous mount_point directory')
-            os.rmdir(mount_point)
-            logging.info('Creating new mount_point.')
-        else:
-            raise error.TestError('Mount point: %s already exists.' %
-                                  mount_point)
-
-        os.makedirs(mount_point)
-        # Check write_size > 0.
-        if not (write_size > 0):
-            raise error.TestError('Write size should be a positive integer.')
-
-        # Check file_count > 0.
-        if not (file_count > 0) :
-            raise error.TestError('File count shoulde be a positive integer.')
-
-
-    def _reset_device(self):
-        """
-        Reset the test. Reinitialize sparse file.
-        """
-        # Umount device.
-        logging.debug('Cleanup - unmounting loopback device.')
-        utils.system('umount %s' % self.mount_point, ignore_status=True)
-
-        # Remove sparse_file.
-        logging.debug('Cleanup - removing sparse file.')
-        os.remove(self.sparse_file)
-
-        # Remove mount_point directory.
-        logging.debug('Cleanup - removing the mount_point.')
-        os.rmdir(self.mount_point)
-
-
-    def _create_partition(self):
-        """
-        Create and initialize the sparse file.
-        """
-        # Recreate sparse_file.
-        utils.system('dd if=/dev/zero of=%s bs=1M seek=1024 count=1' %
-                      self.sparse_file)
-
-        # Format sparse_file.
-        utils.system('echo "y" |  mkfs -t %s %s' %
-                     (self.file_system, self.sparse_file))
-
-        # Mount sparse_file.
-        utils.system('mount -o loop -t %s %s %s' %
-                     (self.file_system, self.sparse_file, self.mount_point))
-
-
-    def _needs_more_time(self, start_time, duration, _now=None):
-        """
-        Checks to see if the test has run its course.
-
-        @param start_time: a datetime object specifying the start time of the
-                test.
-        @param duration: test duration in minutes.
-        @param _now: used mostly for testing - ensures that the function returns
-                pass/fail depnding on the value of _now.
-
-        @return: True if the test still needs to run longer.
-                 False if the test has run for 'duration' minutes.
-        """
-        if not _now:
-            time_diff = datetime.datetime.now() - start_time
-        else:
-            time_diff = _now - start_time
-        return time_diff <= datetime.timedelta(seconds=duration*60)
-
-
-    def _write_data(self, destination, counter, write_size):
-        """
-        Writes data to the cache/memory.
-
-        @param destination: the absolute path to where the data needs to be
-        written.
-        @param counter: the file counter.
-        @param write_size: the size of data to be written.
-
-        @return: the time when the write completed as a datetime object.
-        """
-        # Write data to disk.
-        file_name = os.path.join(destination, 'test_file_%s' % counter)
-        write_cmd = ('dd if=/dev/zero of=%s bs=1M count=%s' %
-                     (file_name, write_size))
-        utils.system(write_cmd)
-
-        # Time the write operation.
-        write_completion_time = datetime.datetime.now()
-
-        # Return write completion time.
-        return write_completion_time
-
-
-    def _get_disk_usage(self, file_name):
-        """
-        Returns the disk usage of given file.
-
-        @param file_name: the name of the file.
-
-        @return: the disk usage as an integer.
-        """
-        # Check du stats.
-        cmd = '%s %s' % (self._DU_CMD, file_name)
-
-        # Expected value for  output = '1028\tfoo'
-        output = utils.system_output(cmd)
-
-        # Desired ouput = (1028, foo)
-        output = output.split('\t')
-
-        return int(output[0])
-
-
-    def _wait_until_data_flushed(self, start_time, max_wait_time):
-        """
-        Check to see if the sparse file size increases.
-
-        @param start_time: the time when data was actually written into the
-                cache.
-        @param max_wait_time: the max amount of time to wait.
-
-        @return: time waited as a datetime.timedelta object.
-        """
-        current_size = self._get_disk_usage(self.sparse_file)
-        flushed_size = current_size
-
-        logging.debug('current_size: %s' % current_size)
-        logging.debug('flushed_size: %s' % flushed_size)
-
-        # Keep checking until du value changes.
-        while current_size == flushed_size:
-            # Get flushed_size.
-            flushed_size = self._get_disk_usage(self.sparse_file)
-            logging.debug('flushed_size: %s' % flushed_size)
-            time.sleep(1)
-
-            # Check if data has been synced to disk.
-            if not self._needs_more_time(start_time, max_wait_time):
-                raise error.TestError('Data not flushed. Waited for %s minutes '
-                                      'for data to flush out.' % max_wait_time)
-
-        # Return time waited.
-        return datetime.datetime.now() - start_time
-
-
-    def initialize(self):
-        """
-        Initialize all private and global member variables.
-        """
-        self._DU_CMD = 'du'
-        self.partition = None
-        self.mount_point = ''
-        self.sparse_file = ''
-        self.result_map = {}
-        self.file_system = None
-
-
-    def run_once(self, mount_point, file_count, write_size,
-                 max_flush_time=1, file_system=None, remove_previous=False,
-                 sparse_file=os.path.join(os.getcwd(),'sparse_file'),
-                 old_cleanup=False):
-        """
-        Control execution of the test.
-
-        @param mount_point: the absolute path to the mount point.
-        @param file_count: the number of files to write.
-        @param write_size: the size of each file in MB.
-        @param max_flush_time: the maximum time to wait for the writeback to
-                flush dirty data to disk. Default = 1 minute.
-        @param file_system: the new file system to be mounted, if any.
-                Default = None.
-        @param remove_previous: boolean that allows the removal of previous
-                files before creating a new one. Default = False.
-        @param sparse_file: the absolute path to the sparse file.
-        @param old_cleanup: removes previous mount_point if it exists and is
-                not mounted. Default is False.
-        """
-        # Check validity of parameters.
-        self._check_parameters(mount_point, write_size, file_count,
-                               old_cleanup)
-
-        # Initialize class variables.
-        self.mount_point = mount_point
-        self.sparse_file = sparse_file
-        self.file_system = file_system
-
-        # Initialize partition values.
-        self._create_partition()
-
-        # Flush read and write cache.
-        utils.drop_caches()
-
-        # Start iterations.
-        logging.info('Starting test operations.')
-        test_start_time = datetime.datetime.now()
-        counter = 1
-
-        # Run test until file_count files are successfully written to disk.
-        while counter < file_count:
-            logging.info('Iteration %s.', counter)
-
-            # Write data to disk.
-            write_completion_time = self._write_data(self.mount_point, counter,
-                                                     write_size)
-            logging.debug('Write time:%s',
-                          write_completion_time.strftime("%H:%M:%S"))
-
-            # Wait until data get synced to disk.
-            time_taken = self._wait_until_data_flushed(write_completion_time,
-                                                       max_flush_time)
-
-            # Log time statistics.
-            logging.info('Time taken to flush data: %s seconds.',
-                         time_taken.seconds)
-
-            # Check if there is a need to remove the previously written file.
-            if remove_previous:
-                logging.debug('Removing previous file instance.')
-                os.remove(sparse_file)
-            else:
-                logging.debug('Not removing previous file instance.')
-
-            # Flush cache.
-            logging.debug('Flush cache between iterations.')
-            utils.drop_caches()
-
-           # Update the result map.
-            self.result_map[counter] = time_taken.seconds
-
-            # Increment the counter.
-            counter += 1
-
-
-    def postprocess(self):
-        """
-        Cleanup routine.
-        """
-        # Write out keyval map.
-        self.write_perf_keyval(self.result_map)
-
-        # Cleanup device.
-        self._reset_device()
-
-        logging.info('Test operations completed.')
diff --git a/client/tests/wb_kupdate/wb_kupdate_unittest.py b/client/tests/wb_kupdate/wb_kupdate_unittest.py
deleted file mode 100755
index daa4e36..0000000
--- a/client/tests/wb_kupdate/wb_kupdate_unittest.py
+++ /dev/null
@@ -1,105 +0,0 @@
-#!/usr/bin/python2
-
-import common
-import datetime
-import logging
-import os
-import time
-import unittest
-from autotest_lib.client.bin import test
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.test_utils import mock
-from autotest_lib.client.tests.wb_kupdate import wb_kupdate
-
-class WbKupdateUnitTest(unittest.TestCase):
-    def setUp(self):
-        """Set up all required variables for the Unittest.
-        """
-        self._logger = logging.getLogger()
-        self._wbkupdate_obj = WbKupdateSubclass()
-        self._god = mock.mock_god()
-
-    def test_needs_more_time(self):
-        """Tests the _needs_more_time method.
-        """
-        self._logger.info('Testing the "_needs_more_time" method.')
-
-        # Obvious failure - since start_time < start_time + 1.
-        self.assertTrue(self._wbkupdate_obj._needs_more_time(
-                start_time=datetime.datetime.now(),
-                duration=1))
-
-        # Check if 1 minute has elapsed since start_time.
-        self.assertFalse(self._wbkupdate_obj._needs_more_time(
-                start_time=datetime.datetime.now(),
-                duration=1,
-                _now=datetime.datetime.now() + datetime.timedelta(seconds=60)))
-
-    def test_wait_until_data_flushed_pass(self):
-        """Tests the _wait_until_data_flushed method.
-
-        This tests the "success" code path.
-        """
-        self._logger.info('Testing the "_wait_until_data_flushed" method - '
-                          'Success code path.')
-
-        # Creating stubs for required methods.
-        self._god.stub_function(self._wbkupdate_obj,
-                                "_get_disk_usage")
-
-        # Setting default return values for stub functions.
-        # Setting the initial size of the file.
-        self._wbkupdate_obj._get_disk_usage.expect_call('').and_return(10)
-        # Returning the same file size - forcing code path to enter loop.
-        self._wbkupdate_obj._get_disk_usage.expect_call('').and_return(10)
-        # Returning a greater file size - exiting the while loop.
-        self._wbkupdate_obj._get_disk_usage.expect_call('').and_return(11)
-
-        # Call the method.
-        self._wbkupdate_obj._wait_until_data_flushed(datetime.datetime.now(),
-                                                     1)
-
-        # Ensure all stubbed methods called.
-        self._god.check_playback()
-
-
-    def test_wait_until_data_flushed_fail(self):
-        """Tests the _wait_until_data_flushed method.
-
-        This tests the "failure" code path.
-        """
-        self._logger.info('Testing the "_wait_until_data_flushed" method - '
-                          'Failure code path.')
-        # Creating stubs for required methods.
-        self._god.stub_function(self._wbkupdate_obj,
-                                "_get_disk_usage")
-
-        # Setting default return values for stub functions.
-        # Setting the initial size of the file.
-        self._wbkupdate_obj._get_disk_usage.expect_call('').and_return(10)
-        # Returning the same file size - forcing code path to enter loop.
-        self._wbkupdate_obj._get_disk_usage.expect_call('').and_return(10)
-
-        # Call the method.
-        self.assertRaises(error.TestError,
-                          self._wbkupdate_obj._wait_until_data_flushed,
-                          start_time=datetime.datetime.now(),
-                          max_wait_time=0)
-
-        # Ensure all stubbed methods called.
-        self._god.check_playback()
-
-
-class WbKupdateSubclass(wb_kupdate.wb_kupdate):
-    """Sub-classing the wb_kupdate class.
-    """
-    def __init__(self):
-        """Empty constructor.
-        """
-        # Create all test defaults.
-        self.initialize()
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/client/tests/xfstests/control b/client/tests/xfstests/control
deleted file mode 100644
index c57961d..0000000
--- a/client/tests/xfstests/control
+++ /dev/null
@@ -1,61 +0,0 @@
-TIME="LONG"
-AUTHOR = "Cleber Rosa <cleber@redhat.com>"
-NAME = 'xfsFilesystemTestSuiteExt4Crypto'
-TEST_CLASS = 'kernel'
-TEST_CATEGORY = 'Functional'
-TEST_TYPE = 'client'
-DOC = """
-xfstests in autotest
---------------------
-
-This is a wrapper for running xfstests inside autotest.
-
-The control file creates the files (1GB), mount with a loopback device.
-TODO(gwendal): currently the lists of xfstests tests is hardcoded.
-A better solution would be to specify the class of tests to run and
-reimplement the class parsing in python.
-
-"""
-from autotest_lib.client.bin import xfstest_util
-
-xfs_env = xfstest_util.xfstests_env()
-xfs_env.setup_partitions(job, fs_types=['ext4'], crypto=True)
-
-#
-# Adapt to the list of tests you want to run
-#
-TEST_RANGE = {}
-TEST_RANGE['generic'] = ['%03i' % t for t in range(0, 360)]
-# Remove 347: crbug:616822
-TEST_RANGE['generic'].remove('347')
-TEST_RANGE['ext4'] = ['%03i' % t for t in range(0, 20)]
-TEST_RANGE['ext4'].append('271')
-TEST_RANGE['ext4'].extend(['%03i' % t for t in range(300, 310)])
-TEST_RANGE['shared'] = ['001', '002', '003', '006', '032', '051', '272',
-                        '289', '298']
-
-# Fail to produce results, autotest hangs:
-TEST_RANGE['ext4'].remove('307')
-TEST_RANGE['generic'].remove('013')
-TEST_RANGE['generic'].remove('070')
-TEST_RANGE['generic'].remove('083')
-TEST_RANGE['generic'].remove('224')
-
-# Removed: SCRATCH_MNT/file-1073745920-falloc:
-# Start block 31042 not multiple of sunit 4
-TEST_RANGE['generic'].remove('223')
-
-#
-# Finally, run the tests
-#
-try:
-    for fs_type in xfs_env.fs_types:
-        for test_dir in [fs_type, 'generic', 'shared']:
-            for test in TEST_RANGE[test_dir]:
-                tag = '%s.%s' % (test_dir, test)
-                result = job.run_test_detail('xfstests', test_dir=test_dir,
-                                             test_number=test, tag=tag)
-
-finally:
-    # Unmount the partition created
-    xfs_env.unmount_partitions()
\ No newline at end of file
diff --git a/client/tests/xfstests/control.plain b/client/tests/xfstests/control.plain
deleted file mode 100644
index bf2e38d..0000000
--- a/client/tests/xfstests/control.plain
+++ /dev/null
@@ -1,61 +0,0 @@
-TIME="LONG"
-AUTHOR = "Cleber Rosa <cleber@redhat.com>"
-NAME = 'xfsFilesystemTestSuiteExt4Plain'
-TEST_CLASS = 'kernel'
-TEST_CATEGORY = 'Functional'
-TEST_TYPE = 'client'
-DOC = """
-xfstests in autotest
---------------------
-
-This is a wrapper for running xfstests inside autotest.
-
-The control file creates the files (1GB), mount with a loopback device.
-TODO(gwendal): currently the lists of xfstests tests is hardcoded.
-A better solution would be to specify the class of tests to run and
-reimplement the class parsing in python.
-
-"""
-from autotest_lib.client.bin import xfstest_util
-
-xfs_env = xfstest_util.xfstests_env()
-xfs_env.setup_partitions(job, fs_types=['ext4'])
-
-#
-# Adapt to the list of tests you want to run
-#
-TEST_RANGE = {}
-TEST_RANGE['generic'] = ['%03i' % t for t in range(0, 360)]
-# Remove 347: crbug:616822
-TEST_RANGE['generic'].remove('347')
-TEST_RANGE['ext4'] = ['%03i' % t for t in range(0, 20)]
-TEST_RANGE['ext4'].append('271')
-TEST_RANGE['ext4'].extend(['%03i' % t for t in range(300, 310)])
-TEST_RANGE['shared'] = ['001', '002', '003', '006', '032', '051', '272',
-                        '289', '298']
-
-# Fail to produce results, autotest hangs:
-TEST_RANGE['ext4'].remove('307')
-TEST_RANGE['generic'].remove('013')
-TEST_RANGE['generic'].remove('070')
-TEST_RANGE['generic'].remove('083')
-TEST_RANGE['generic'].remove('224')
-
-# Removed: SCRATCH_MNT/file-1073745920-falloc:
-# Start block 31042 not multiple of sunit 4
-TEST_RANGE['generic'].remove('223')
-
-#
-# Finally, run the tests
-#
-try:
-    for fs_type in xfs_env.fs_types:
-        for test_dir in [fs_type, 'generic', 'shared']:
-            for test in TEST_RANGE[test_dir]:
-                tag = '%s.%s' % (test_dir, test)
-                result = job.run_test_detail('xfstests', test_dir=test_dir,
-                                             test_number=test, tag=tag)
-
-finally:
-    # Unmount the partition created
-    xfs_env.unmount_partitions()
\ No newline at end of file
diff --git a/client/tests/xfstests/control.standalone_crypto b/client/tests/xfstests/control.standalone_crypto
deleted file mode 100644
index 0c8ef6f..0000000
--- a/client/tests/xfstests/control.standalone_crypto
+++ /dev/null
@@ -1,29 +0,0 @@
-TIME="LONG"
-AUTHOR = "Cleber Rosa <cleber@redhat.com>"
-NAME = 'xfsFilesystemTestSuiteStandaloneExt4Crypto'
-TEST_CLASS = 'kernel'
-TEST_CATEGORY = 'Functional'
-TEST_TYPE = 'client'
-DOC = """
-xfstests in autotest
---------------------
-
-This is a wrapper for running xfstests inside autotest.
-
-The control file creates the files (1GB), mount with a loopback device.
-
-"""
-from autotest_lib.client.bin import xfstest_util
-
-xfs_env = xfstest_util.xfstests_env()
-xfs_env.setup_partitions(job, fs_types=['ext4'], crypto=True)
-
-#
-# Finally, run the tests
-#
-try:
-    for fs_type in xfs_env.fs_types:
-        result = job.run_test_detail('xfstests', group='auto')
-
-finally:
-    xfs_env.unmount_partitions()
\ No newline at end of file
diff --git a/client/tests/xfstests/control.standalone_plain b/client/tests/xfstests/control.standalone_plain
deleted file mode 100644
index 3e3e1f6..0000000
--- a/client/tests/xfstests/control.standalone_plain
+++ /dev/null
@@ -1,29 +0,0 @@
-TIME="LONG"
-AUTHOR = "Cleber Rosa <cleber@redhat.com>"
-NAME = 'xfsFilesystemTestSuiteStandaloneExt4Plain'
-TEST_CLASS = 'kernel'
-TEST_CATEGORY = 'Functional'
-TEST_TYPE = 'client'
-DOC = """
-xfstests in autotest
---------------------
-
-This is a wrapper for running xfstests inside autotest.
-
-The control file creates the files (1GB), mount with a loopback device.
-
-"""
-from autotest_lib.client.bin import xfstest_util
-
-xfs_env = xfstest_util.xfstests_env()
-xfs_env.setup_partitions(job, fs_types=['ext4'])
-
-#
-# Finally, run the tests
-#
-try:
-    for fs_type in xfs_env.fs_types:
-        result = job.run_test_detail('xfstests', group='auto')
-
-finally:
-    xfs_env.unmount_partitions()
\ No newline at end of file
diff --git a/client/tests/xfstests/xfstests.py b/client/tests/xfstests/xfstests.py
deleted file mode 100644
index 718106e..0000000
--- a/client/tests/xfstests/xfstests.py
+++ /dev/null
@@ -1,135 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os, re, glob, logging, shutil
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.bin import test, utils
-
-class xfstests(test.test):
-    """
-    Runs a single test of the xfstests suite.
-    """
-
-    XFS_TESTS_PATH='/usr/local/xfstests'
-    XFS_EXCLUDE_FILENAME = '/tmp/.xfstests.exclude'
-    version = 2
-
-    PASSED_RE = re.compile(r'Passed all \d+ tests')
-    FAILED_RE = re.compile(r'Failed \d+ of \d+ tests')
-    TEST_RE = re.compile(r'(?P<name>\d+)\.out')
-    NA_RE = re.compile(r'Passed all 0 tests')
-    NA_DETAIL_RE = re.compile(r'(\d{3})\s*(\[not run\])\s*(.*)')
-
-
-    def _get_available_tests(self, fs):
-        os.chdir(os.path.join(self.XFS_TESTS_PATH, 'tests', fs))
-        tests = glob.glob('*.out*')
-        tests_list = []
-        for t in tests:
-            t_m = self.TEST_RE.match(t)
-            if t_m:
-                t_name = t_m.group('name')
-                if t_name not in tests_list and os.path.exists(t_name):
-                    tests_list.append(t_name)
-        tests_list.sort()
-        return tests_list
-
-
-    def _copy_result_test(self, t):
-        for ext in ('full', 'dmesg'):
-            result_file = os.path.join('results', '.'.join([t, ext]))
-            result_file_loc = os.path.join(self.XFS_TESTS_PATH, result_file)
-            test_name = t.replace('/','_')
-            result_file_dest = os.path.join(
-                    self.resultsdir, '.'.join([test_name, ext]))
-            if os.path.isfile(result_file_loc):
-                shutil.copyfile(result_file_loc, result_file_dest)
-
-
-    def _run_sub_test(self, t):
-        os.chdir(self.XFS_TESTS_PATH)
-        logging.debug("Environment variables: %s", os.environ)
-        output = utils.system_output(
-                'bash ./check %s' % os.path.join('tests', t),
-                ignore_status=True,
-                retain_output=True)
-        lines = output.split('\n')
-        result_line = lines[-2]
-        self._copy_result_test(t)
-
-        if self.NA_RE.match(result_line):
-            detail_line = lines[-3]
-            match = self.NA_DETAIL_RE.match(detail_line)
-            if match is not None:
-                error_msg = match.groups()[2]
-            else:
-                error_msg = 'Test dependency failed, test not run'
-            raise error.TestNAError(error_msg)
-
-        elif self.FAILED_RE.match(result_line):
-            raise error.TestError('Test error, check debug logs for complete '
-                                  'test output')
-
-        elif self.PASSED_RE.match(result_line):
-            return
-
-        else:
-            raise error.TestError('Could not assert test success or failure, '
-                                  'assuming failure. Please check debug logs')
-
-
-    def _run_standalone(self, group):
-        os.chdir(self.XFS_TESTS_PATH)
-        logging.debug("Environment variables: %s", os.environ)
-        output = utils.system_output(
-                'bash ./check -E %s -g %s' % (self.XFS_EXCLUDE_FILENAME, group),
-                ignore_status=True,
-                retain_output=True)
-        lines = output.split('\n')
-        result_line = lines[-2]
-
-        if self.NA_RE.match(result_line):
-            raise error.TestNAError('Test dependency failed, no tests run')
-
-        elif self.FAILED_RE.match(result_line):
-            failures_line = re.match(r'Failures: (?P<tests>.*)', lines[-3])
-            if failures_line:
-                test_failures = failures_line.group('tests')
-                tests = test_failures.split(' ')
-                for t in tests:
-                    self._copy_result_test(t)
-
-            raise error.TestError('%s. Check debug logs for complete '
-                                  'test output' % result_line)
-
-        elif self.PASSED_RE.match(result_line):
-            return
-        else:
-            raise error.TestError('Could not assert success or failure, '
-                                  'assuming failure. Please check debug logs')
-
-
-    def run_once(self, test_dir='generic', test_number='000', group=None,
-                 exclude=None):
-        if group:
-            excludeFile = open(self.XFS_EXCLUDE_FILENAME, 'w')
-            for t in exclude or []:
-                excludeFile.write('%s\n' % t)
-            excludeFile.close()
-            logging.debug("Running tests: group %s", group )
-            self._run_standalone(group)
-            if os.path.exists(self.XFS_EXCLUDE_FILENAME):
-                os.remove(self.XFS_EXCLUDE_FILENAME)
-        else:
-            if test_number == '000':
-                logging.debug('Dummy test to setup xfstests')
-                return
-
-            if test_number not in self._get_available_tests(test_dir):
-                raise error.TestNAError(
-                    'test file %s/%s not found' % (test_dir, test_number))
-
-            test_name = os.path.join(test_dir, test_number)
-            logging.debug("Running test: %s", test_name)
-            self._run_sub_test(test_name)
diff --git a/client/tests/xmtest/control b/client/tests/xmtest/control
deleted file mode 100644
index 55393be..0000000
--- a/client/tests/xmtest/control
+++ /dev/null
@@ -1,12 +0,0 @@
-NAME = 'Xen Test'
-AUTHOR = 'Paul Larson <pl@us.ibm.com>'
-TIME = 'MEDIUM'
-TEST_CLASS = 'Kernel'
-TEST_CATEGORY = 'Functional'
-TEST_TYPE = 'client'
-
-DOC = """
-This suite provides a framework for testing the Xen userspace tools.
-"""
-
-job.run_test('xmtest', args='-e nobody@nowhere.org -d xmtest')
diff --git a/client/tests/xmtest/xm-test.tar.bz2 b/client/tests/xmtest/xm-test.tar.bz2
deleted file mode 100644
index 759eb24..0000000
--- a/client/tests/xmtest/xm-test.tar.bz2
+++ /dev/null
Binary files differ
diff --git a/client/tests/xmtest/xmtest.py b/client/tests/xmtest/xmtest.py
deleted file mode 100644
index eb873de..0000000
--- a/client/tests/xmtest/xmtest.py
+++ /dev/null
@@ -1,36 +0,0 @@
-# (C) Copyright IBM Corp. 2006
-# Author: Paul Larson <pl@us.ibm.com>
-# Description:
-#       Autotest script for running Xen xm-test
-#       This should be run from a Xen domain0
-import os
-from autotest_lib.client.bin import test, utils
-
-
-class xmtest(test.test):
-    version = 1
-
-    def initialize(self):
-        self.job.require_gcc()
-
-
-    # This test expects just the xm-test directory, as a tarball
-    # from the Xen source tree
-    # hg clone http://xenbits.xensource.com/xen-unstable.hg
-    # or wget http://www.cl.cam.ac.uk/Research/SRG/netos/xen/downloads/xen-unstable-src.tgz
-    # cd tools
-    # tar -czf xm-test.tgz xm-test
-    def setup(self, tarball = 'xm-test.tar.bz2'):
-        tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
-        utils.extract_tarball_to_dir(tarball, self.srcdir)
-        os.chdir(self.srcdir)
-
-        utils.system('./autogen')
-        utils.configure()
-        utils.make('existing')
-
-
-    def execute(self, args = ''):
-        os.chdir(self.srcdir)
-        utils.system('./runtest.sh ' + args)
-        utils.system('mv xmtest.* ' + self.resultsdir)
diff --git a/client/tools/OWNERS b/client/tools/OWNERS
new file mode 100644
index 0000000..5804625
--- /dev/null
+++ b/client/tools/OWNERS
@@ -0,0 +1 @@
+include chromiumos/config:/owners/testservice
diff --git a/client/tools/autotest b/client/tools/autotest
index 2d4aad8..b9e72b3 100755
--- a/client/tools/autotest
+++ b/client/tools/autotest
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 from __future__ import absolute_import
 from __future__ import division
 from __future__ import print_function
diff --git a/client/tools/avgtime b/client/tools/avgtime
index 283567b..ae9e3db 100755
--- a/client/tools/avgtime
+++ b/client/tools/avgtime
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 from __future__ import absolute_import
 from __future__ import division
 from __future__ import print_function
diff --git a/client/tools/crash_handler.py b/client/tools/crash_handler.py
index 194e605..9c4a273 100755
--- a/client/tools/crash_handler.py
+++ b/client/tools/crash_handler.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 """
 Simple crash handling application for autotest
 
@@ -9,7 +9,6 @@
 from __future__ import division
 from __future__ import print_function
 
-import commands
 import glob
 import os
 import random
@@ -17,6 +16,7 @@
 import shutil
 import six
 import string
+import subprocess
 import sys
 import syslog
 import time
@@ -120,7 +120,7 @@
     @param path: Path to core file.
     """
     full_exe_path = None
-    output = commands.getoutput('gdb -c %s batch' % path)
+    output = subprocess.getoutput('gdb -c %s batch' % path)
     path_pattern = re.compile("Core was generated by `([^\0]+)'", re.IGNORECASE)
     match = re.findall(path_pattern, output)
     for m in match:
@@ -156,7 +156,7 @@
         # Take a backtrace from the running program
         gdb_cmd = ('gdb -e %s -c %s -x %s -n -batch -quiet' %
                    (exe_path, path, gdb_command_path))
-        backtrace = commands.getoutput(gdb_cmd)
+        backtrace = subprocess.getoutput(gdb_cmd)
         # Sanitize output before passing it to the report
         backtrace = six.ensure_text(backtrace, 'utf-8', 'ignore')
     else:
diff --git a/client/tools/diffprofile b/client/tools/diffprofile
index bc7bdb6..508f528 100755
--- a/client/tools/diffprofile
+++ b/client/tools/diffprofile
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright Martin J. Bligh (mbligh@google.com)
 # Released under the GPL, v2
 
diff --git a/client/tools/html_report.py b/client/tools/html_report.py
index 783b93e..c389012 100755
--- a/client/tools/html_report.py
+++ b/client/tools/html_report.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 """
 Module used to parse the autotest job results and generate an HTML report.
 
diff --git a/client/tools/make_clean b/client/tools/make_clean
index 4283977..9cee4f5 100755
--- a/client/tools/make_clean
+++ b/client/tools/make_clean
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 from __future__ import absolute_import
 from __future__ import division
 from __future__ import print_function
diff --git a/client/tools/oprofile_diff b/client/tools/oprofile_diff
index 2c13124..0e16ff9 100755
--- a/client/tools/oprofile_diff
+++ b/client/tools/oprofile_diff
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 from __future__ import absolute_import
 from __future__ import division
 from __future__ import print_function
diff --git a/contrib/abortjob b/contrib/abortjob
deleted file mode 100755
index 560b82f..0000000
--- a/contrib/abortjob
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/usr/bin/env python2
-# A utility script used to abort jobs.
-#
-# Usage:
-#   ./abort_job job_id1 job_id2 ...
-
-import sys
-
-import common
-
-from autotest_lib.server.cros.dynamic_suite import frontend_wrappers
-
-afe = frontend_wrappers.RetryingAFE(timeout_min=5, delay_sec=10)
-jobs_to_abort = sys.argv[1:]
-afe.abort_jobs(jobs_to_abort)
-print 'jobs aborted: %s' % jobs_to_abort
diff --git a/contrib/coverage.py b/contrib/coverage.py
deleted file mode 100755
index c00ce2a..0000000
--- a/contrib/coverage.py
+++ /dev/null
@@ -1,1124 +0,0 @@
-#!/usr/bin/python2
-#
-#             Perforce Defect Tracking Integration Project
-#              <http://www.ravenbrook.com/project/p4dti/>
-#
-#                   COVERAGE.PY -- COVERAGE TESTING
-#
-#             Gareth Rees, Ravenbrook Limited, 2001-12-04
-#                     Ned Batchelder, 2004-12-12
-#         http://nedbatchelder.com/code/modules/coverage.html
-#
-#
-# 1. INTRODUCTION
-#
-# This module provides coverage testing for Python code.
-#
-# The intended readership is all Python developers.
-#
-# This document is not confidential.
-#
-# See [GDR 2001-12-04a] for the command-line interface, programmatic
-# interface and limitations.  See [GDR 2001-12-04b] for requirements and
-# design.
-
-import pdb
-
-r"""Usage:
-
-coverage.py -x [-p] MODULE.py [ARG1 ARG2 ...]
-    Execute module, passing the given command-line arguments, collecting
-    coverage data. With the -p option, write to a temporary file containing
-    the machine name and process ID.
-
-coverage.py -e
-    Erase collected coverage data.
-
-coverage.py -c
-    Collect data from multiple coverage files (as created by -p option above)
-    and store it into a single file representing the union of the coverage.
-
-coverage.py -r [-m] [-o dir1,dir2,...] FILE1 FILE2 ...
-    Report on the statement coverage for the given files.  With the -m
-    option, show line numbers of the statements that weren't executed.
-
-coverage.py -a [-d dir] [-o dir1,dir2,...] FILE1 FILE2 ...
-    Make annotated copies of the given files, marking statements that
-    are executed with > and statements that are missed with !.  With
-    the -d option, make the copies in that directory.  Without the -d
-    option, make each copy in the same directory as the original.
-
--o dir,dir2,...
-  Omit reporting or annotating files when their filename path starts with
-  a directory listed in the omit list.
-  e.g. python coverage.py -i -r -o c:\python23,lib\enthought\traits
-
-Coverage data is saved in the file .coverage by default.  Set the
-COVERAGE_FILE environment variable to save it somewhere else."""
-
-__version__ = "2.78.20070930"    # see detailed history at the end of this file.
-
-import compiler
-import compiler.visitor
-import glob
-import os
-import re
-import string
-import symbol
-import sys
-import threading
-import token
-import types
-from socket import gethostname
-
-# Python version compatibility
-try:
-    strclass = basestring   # new to 2.3
-except:
-    strclass = str
-
-# 2. IMPLEMENTATION
-#
-# This uses the "singleton" pattern.
-#
-# The word "morf" means a module object (from which the source file can
-# be deduced by suitable manipulation of the __file__ attribute) or a
-# filename.
-#
-# When we generate a coverage report we have to canonicalize every
-# filename in the coverage dictionary just in case it refers to the
-# module we are reporting on.  It seems a shame to throw away this
-# information so the data in the coverage dictionary is transferred to
-# the 'cexecuted' dictionary under the canonical filenames.
-#
-# The coverage dictionary is called "c" and the trace function "t".  The
-# reason for these short names is that Python looks up variables by name
-# at runtime and so execution time depends on the length of variables!
-# In the bottleneck of this application it's appropriate to abbreviate
-# names to increase speed.
-
-class StatementFindingAstVisitor(compiler.visitor.ASTVisitor):
-    """ A visitor for a parsed Abstract Syntax Tree which finds executable
-        statements.
-    """
-    def __init__(self, statements, excluded, suite_spots):
-        compiler.visitor.ASTVisitor.__init__(self)
-        self.statements = statements
-        self.excluded = excluded
-        self.suite_spots = suite_spots
-        self.excluding_suite = 0
-
-    def doRecursive(self, node):
-        for n in node.getChildNodes():
-            self.dispatch(n)
-
-    visitStmt = visitModule = doRecursive
-
-    def doCode(self, node):
-        if hasattr(node, 'decorators') and node.decorators:
-            self.dispatch(node.decorators)
-            self.recordAndDispatch(node.code)
-        else:
-            self.doSuite(node, node.code)
-
-    visitFunction = visitClass = doCode
-
-    def getFirstLine(self, node):
-        # Find the first line in the tree node.
-        lineno = node.lineno
-        for n in node.getChildNodes():
-            f = self.getFirstLine(n)
-            if lineno and f:
-                lineno = min(lineno, f)
-            else:
-                lineno = lineno or f
-        return lineno
-
-    def getLastLine(self, node):
-        # Find the first line in the tree node.
-        lineno = node.lineno
-        for n in node.getChildNodes():
-            lineno = max(lineno, self.getLastLine(n))
-        return lineno
-
-    def doStatement(self, node):
-        self.recordLine(self.getFirstLine(node))
-
-    visitAssert = visitAssign = visitAssTuple = visitPrint = \
-        visitPrintnl = visitRaise = visitSubscript = visitDecorators = \
-        doStatement
-
-    def visitPass(self, node):
-        # Pass statements have weird interactions with docstrings.  If this
-        # pass statement is part of one of those pairs, claim that the statement
-        # is on the later of the two lines.
-        l = node.lineno
-        if l:
-            lines = self.suite_spots.get(l, [l,l])
-            self.statements[lines[1]] = 1
-
-    def visitDiscard(self, node):
-        # Discard nodes are statements that execute an expression, but then
-        # discard the results.  This includes function calls, so we can't
-        # ignore them all.  But if the expression is a constant, the statement
-        # won't be "executed", so don't count it now.
-        if node.expr.__class__.__name__ != 'Const':
-            self.doStatement(node)
-
-    def recordNodeLine(self, node):
-        # Stmt nodes often have None, but shouldn't claim the first line of
-        # their children (because the first child might be an ignorable line
-        # like "global a").
-        if node.__class__.__name__ != 'Stmt':
-            return self.recordLine(self.getFirstLine(node))
-        else:
-            return 0
-
-    def recordLine(self, lineno):
-        # Returns a bool, whether the line is included or excluded.
-        if lineno:
-            # Multi-line tests introducing suites have to get charged to their
-            # keyword.
-            if lineno in self.suite_spots:
-                lineno = self.suite_spots[lineno][0]
-            # If we're inside an excluded suite, record that this line was
-            # excluded.
-            if self.excluding_suite:
-                self.excluded[lineno] = 1
-                return 0
-            # If this line is excluded, or suite_spots maps this line to
-            # another line that is exlcuded, then we're excluded.
-            elif self.excluded.has_key(lineno) or \
-                 self.suite_spots.has_key(lineno) and \
-                 self.excluded.has_key(self.suite_spots[lineno][1]):
-                return 0
-            # Otherwise, this is an executable line.
-            else:
-                self.statements[lineno] = 1
-                return 1
-        return 0
-
-    default = recordNodeLine
-
-    def recordAndDispatch(self, node):
-        self.recordNodeLine(node)
-        self.dispatch(node)
-
-    def doSuite(self, intro, body, exclude=0):
-        exsuite = self.excluding_suite
-        if exclude or (intro and not self.recordNodeLine(intro)):
-            self.excluding_suite = 1
-        self.recordAndDispatch(body)
-        self.excluding_suite = exsuite
-
-    def doPlainWordSuite(self, prevsuite, suite):
-        # Finding the exclude lines for else's is tricky, because they aren't
-        # present in the compiler parse tree.  Look at the previous suite,
-        # and find its last line.  If any line between there and the else's
-        # first line are excluded, then we exclude the else.
-        lastprev = self.getLastLine(prevsuite)
-        firstelse = self.getFirstLine(suite)
-        for l in range(lastprev+1, firstelse):
-            if self.suite_spots.has_key(l):
-                self.doSuite(None, suite, exclude=self.excluded.has_key(l))
-                break
-        else:
-            self.doSuite(None, suite)
-
-    def doElse(self, prevsuite, node):
-        if node.else_:
-            self.doPlainWordSuite(prevsuite, node.else_)
-
-    def visitFor(self, node):
-        self.doSuite(node, node.body)
-        self.doElse(node.body, node)
-
-    visitWhile = visitFor
-
-    def visitIf(self, node):
-        # The first test has to be handled separately from the rest.
-        # The first test is credited to the line with the "if", but the others
-        # are credited to the line with the test for the elif.
-        self.doSuite(node, node.tests[0][1])
-        for t, n in node.tests[1:]:
-            self.doSuite(t, n)
-        self.doElse(node.tests[-1][1], node)
-
-    def visitTryExcept(self, node):
-        self.doSuite(node, node.body)
-        for i in range(len(node.handlers)):
-            a, b, h = node.handlers[i]
-            if not a:
-                # It's a plain "except:".  Find the previous suite.
-                if i > 0:
-                    prev = node.handlers[i-1][2]
-                else:
-                    prev = node.body
-                self.doPlainWordSuite(prev, h)
-            else:
-                self.doSuite(a, h)
-        self.doElse(node.handlers[-1][2], node)
-
-    def visitTryFinally(self, node):
-        self.doSuite(node, node.body)
-        self.doPlainWordSuite(node.body, node.final)
-
-    def visitWith(self, node):
-        self.doSuite(node, node.body)
-
-    def visitGlobal(self, node):
-        # "global" statements don't execute like others (they don't call the
-        # trace function), so don't record their line numbers.
-        pass
-
-the_coverage = None
-
-class CoverageException(Exception): pass
-
-class coverage:
-    # Name of the cache file (unless environment variable is set).
-    cache_default = ".coverage"
-
-    # Environment variable naming the cache file.
-    cache_env = "COVERAGE_FILE"
-
-    # A dictionary with an entry for (Python source file name, line number
-    # in that file) if that line has been executed.
-    c = {}
-
-    # A map from canonical Python source file name to a dictionary in
-    # which there's an entry for each line number that has been
-    # executed.
-    cexecuted = {}
-
-    # Cache of results of calling the analysis2() method, so that you can
-    # specify both -r and -a without doing double work.
-    analysis_cache = {}
-
-    # Cache of results of calling the canonical_filename() method, to
-    # avoid duplicating work.
-    canonical_filename_cache = {}
-
-    def __init__(self):
-        global the_coverage
-        if the_coverage:
-            raise CoverageException("Only one coverage object allowed.")
-        self.usecache = 1
-        self.cache = None
-        self.parallel_mode = False
-        self.exclude_re = ''
-        self.nesting = 0
-        self.cstack = []
-        self.xstack = []
-        self.relative_dir = os.path.normcase(os.path.abspath(os.curdir)+os.sep)
-        self.exclude('# *pragma[: ]*[nN][oO] *[cC][oO][vV][eE][rR]')
-
-    # t(f, x, y).  This method is passed to sys.settrace as a trace function.
-    # See [van Rossum 2001-07-20b, 9.2] for an explanation of sys.settrace and
-    # the arguments and return value of the trace function.
-    # See [van Rossum 2001-07-20a, 3.2] for a description of frame and code
-    # objects.
-
-    def t(self, f, w, unused):                                 #pragma: no cover
-        if w == 'line':
-            #print "Executing %s @ %d" % (f.f_code.co_filename, f.f_lineno)
-            self.c[(f.f_code.co_filename, f.f_lineno)] = 1
-            for c in self.cstack:
-                c[(f.f_code.co_filename, f.f_lineno)] = 1
-        return self.t
-
-    def help(self, error=None):     #pragma: no cover
-        if error:
-            print error
-            print
-        print __doc__
-        sys.exit(1)
-
-    def command_line(self, argv, help_fn=None):
-        import getopt
-        help_fn = help_fn or self.help
-        settings = {}
-        optmap = {
-            '-a': 'annotate',
-            '-c': 'collect',
-            '-d:': 'directory=',
-            '-e': 'erase',
-            '-h': 'help',
-            '-i': 'ignore-errors',
-            '-m': 'show-missing',
-            '-p': 'parallel-mode',
-            '-r': 'report',
-            '-x': 'execute',
-            '-o:': 'omit=',
-            }
-        short_opts = string.join(map(lambda o: o[1:], optmap.keys()), '')
-        long_opts = optmap.values()
-        options, args = getopt.getopt(argv, short_opts, long_opts)
-
-        for o, a in options:
-            if optmap.has_key(o):
-                settings[optmap[o]] = 1
-            elif optmap.has_key(o + ':'):
-                settings[optmap[o + ':']] = a
-            elif o[2:] in long_opts:
-                settings[o[2:]] = 1
-            elif o[2:] + '=' in long_opts:
-                settings[o[2:]+'='] = a
-            else:       #pragma: no cover
-                pass    # Can't get here, because getopt won't return anything unknown.
-
-        if settings.get('help'):
-            help_fn()
-
-        for i in ['erase', 'execute']:
-            for j in ['annotate', 'report', 'collect']:
-                if settings.get(i) and settings.get(j):
-                    help_fn("You can't specify the '%s' and '%s' "
-                              "options at the same time." % (i, j))
-
-        args_needed = (settings.get('execute')
-                       or settings.get('annotate')
-                       or settings.get('report'))
-        action = (settings.get('erase')
-                  or settings.get('collect')
-                  or args_needed)
-        if not action:
-            help_fn("You must specify at least one of -e, -x, -c, -r, or -a.")
-        if not args_needed and args:
-            help_fn("Unexpected arguments: %s" % " ".join(args))
-
-        self.parallel_mode = settings.get('parallel-mode')
-        self.get_ready()
-
-        if settings.get('erase'):
-            self.erase()
-        if settings.get('execute'):
-            if not args:
-                help_fn("Nothing to do.")
-            sys.argv = args
-            self.start()
-            import __main__
-            sys.path[0] = os.path.dirname(sys.argv[0])
-            # the line below is needed since otherwise __file__ gets fucked
-            __main__.__dict__["__file__"] = sys.argv[0]
-            execfile(sys.argv[0], __main__.__dict__)
-        if settings.get('collect'):
-            self.collect()
-        if not args:
-            args = self.cexecuted.keys()
-
-        ignore_errors = settings.get('ignore-errors')
-        show_missing = settings.get('show-missing')
-        directory = settings.get('directory=')
-
-        omit = settings.get('omit=')
-        if omit is not None:
-            omit = omit.split(',')
-        else:
-            omit = []
-
-        if settings.get('report'):
-            self.report(args, show_missing, ignore_errors, omit_prefixes=omit)
-        if settings.get('annotate'):
-            self.annotate(args, directory, ignore_errors, omit_prefixes=omit)
-
-    def use_cache(self, usecache, cache_file=None):
-        self.usecache = usecache
-        if cache_file and not self.cache:
-            self.cache_default = cache_file
-
-    def get_ready(self, parallel_mode=False):
-        if self.usecache and not self.cache:
-            self.cache = os.environ.get(self.cache_env, self.cache_default)
-            if self.parallel_mode:
-                self.cache += "." + gethostname() + "." + str(os.getpid())
-            self.restore()
-        self.analysis_cache = {}
-
-    def start(self, parallel_mode=False):
-        self.get_ready()
-        if self.nesting == 0:                               #pragma: no cover
-            sys.settrace(self.t)
-            if hasattr(threading, 'settrace'):
-                threading.settrace(self.t)
-        self.nesting += 1
-
-    def stop(self):
-        self.nesting -= 1
-        if self.nesting == 0:                               #pragma: no cover
-            sys.settrace(None)
-            if hasattr(threading, 'settrace'):
-                threading.settrace(None)
-
-    def erase(self):
-        self.get_ready()
-        self.c = {}
-        self.analysis_cache = {}
-        self.cexecuted = {}
-        if self.cache and os.path.exists(self.cache):
-            os.remove(self.cache)
-
-    def exclude(self, re):
-        if self.exclude_re:
-            self.exclude_re += "|"
-        self.exclude_re += "(" + re + ")"
-
-    def begin_recursive(self):
-        self.cstack.append(self.c)
-        self.xstack.append(self.exclude_re)
-
-    def end_recursive(self):
-        self.c = self.cstack.pop()
-        self.exclude_re = self.xstack.pop()
-
-    # save().  Save coverage data to the coverage cache.
-
-    def save(self):
-        if self.usecache and self.cache:
-            self.canonicalize_filenames()
-            cache = open(self.cache, 'wb')
-            import marshal
-            marshal.dump(self.cexecuted, cache)
-            cache.close()
-
-    # restore().  Restore coverage data from the coverage cache (if it exists).
-
-    def restore(self):
-        self.c = {}
-        self.cexecuted = {}
-        assert self.usecache
-        if os.path.exists(self.cache):
-            self.cexecuted = self.restore_file(self.cache)
-
-    def restore_file(self, file_name):
-        try:
-            cache = open(file_name, 'rb')
-            import marshal
-            cexecuted = marshal.load(cache)
-            cache.close()
-            if isinstance(cexecuted, types.DictType):
-                return cexecuted
-            else:
-                return {}
-        except:
-            return {}
-
-    # collect(). Collect data in multiple files produced by parallel mode
-
-    def collect(self):
-        cache_dir, local = os.path.split(self.cache)
-        for f in os.listdir(cache_dir or '.'):
-            if not f.startswith(local):
-                continue
-
-            full_path = os.path.join(cache_dir, f)
-            cexecuted = self.restore_file(full_path)
-            self.merge_data(cexecuted)
-
-    def merge_data(self, new_data):
-        for file_name, file_data in new_data.items():
-            if self.cexecuted.has_key(file_name):
-                self.merge_file_data(self.cexecuted[file_name], file_data)
-            else:
-                self.cexecuted[file_name] = file_data
-
-    def merge_file_data(self, cache_data, new_data):
-        for line_number in new_data.keys():
-            if not cache_data.has_key(line_number):
-                cache_data[line_number] = new_data[line_number]
-
-    # canonical_filename(filename).  Return a canonical filename for the
-    # file (that is, an absolute path with no redundant components and
-    # normalized case).  See [GDR 2001-12-04b, 3.3].
-
-    def canonical_filename(self, filename):
-        if not self.canonical_filename_cache.has_key(filename):
-            f = filename
-            if os.path.isabs(f) and not os.path.exists(f):
-                f = os.path.basename(f)
-            if not os.path.isabs(f):
-                for path in [os.curdir] + sys.path:
-                    g = os.path.join(path, f)
-                    if os.path.exists(g):
-                        f = g
-                        break
-            cf = os.path.normcase(os.path.abspath(f))
-            self.canonical_filename_cache[filename] = cf
-        return self.canonical_filename_cache[filename]
-
-    # canonicalize_filenames().  Copy results from "c" to "cexecuted",
-    # canonicalizing filenames on the way.  Clear the "c" map.
-
-    def canonicalize_filenames(self):
-        for filename, lineno in self.c.keys():
-            if filename == '<string>':
-                # Can't do anything useful with exec'd strings, so skip them.
-                continue
-            f = self.canonical_filename(filename)
-            if not self.cexecuted.has_key(f):
-                self.cexecuted[f] = {}
-            self.cexecuted[f][lineno] = 1
-        self.c = {}
-
-    # morf_filename(morf).  Return the filename for a module or file.
-
-    def morf_filename(self, morf):
-        if isinstance(morf, types.ModuleType):
-            if not hasattr(morf, '__file__'):
-                raise CoverageException("Module has no __file__ attribute.")
-            f = morf.__file__
-        else:
-            f = morf
-        return self.canonical_filename(f)
-
-    # analyze_morf(morf).  Analyze the module or filename passed as
-    # the argument.  If the source code can't be found, raise an error.
-    # Otherwise, return a tuple of (1) the canonical filename of the
-    # source code for the module, (2) a list of lines of statements
-    # in the source code, (3) a list of lines of excluded statements,
-    # and (4), a map of line numbers to multi-line line number ranges, for
-    # statements that cross lines.
-
-    def analyze_morf(self, morf):
-        if self.analysis_cache.has_key(morf):
-            return self.analysis_cache[morf]
-        filename = self.morf_filename(morf)
-        ext = os.path.splitext(filename)[1]
-        if ext == '.pyc':
-            if not os.path.exists(filename[:-1]):
-                raise CoverageException(
-                    "No source for compiled code '%s'." % filename
-                    )
-            filename = filename[:-1]
-        source = open(filename, 'r')
-        try:
-            lines, excluded_lines, line_map = self.find_executable_statements(
-                source.read(), exclude=self.exclude_re
-                )
-        except SyntaxError, synerr:
-            raise CoverageException(
-                "Couldn't parse '%s' as Python source: '%s' at line %d" %
-                    (filename, synerr.msg, synerr.lineno)
-                )
-        source.close()
-        result = filename, lines, excluded_lines, line_map
-        self.analysis_cache[morf] = result
-        return result
-
-    def first_line_of_tree(self, tree):
-        while True:
-            if len(tree) == 3 and type(tree[2]) == type(1):
-                return tree[2]
-            tree = tree[1]
-
-    def last_line_of_tree(self, tree):
-        while True:
-            if len(tree) == 3 and type(tree[2]) == type(1):
-                return tree[2]
-            tree = tree[-1]
-
-    def find_docstring_pass_pair(self, tree, spots):
-        for i in range(1, len(tree)):
-            if self.is_string_constant(tree[i]) and self.is_pass_stmt(tree[i+1]):
-                first_line = self.first_line_of_tree(tree[i])
-                last_line = self.last_line_of_tree(tree[i+1])
-                self.record_multiline(spots, first_line, last_line)
-
-    def is_string_constant(self, tree):
-        try:
-            return tree[0] == symbol.stmt and tree[1][1][1][0] == symbol.expr_stmt
-        except:
-            return False
-
-    def is_pass_stmt(self, tree):
-        try:
-            return tree[0] == symbol.stmt and tree[1][1][1][0] == symbol.pass_stmt
-        except:
-            return False
-
-    def record_multiline(self, spots, i, j):
-        for l in range(i, j+1):
-            spots[l] = (i, j)
-
-    def get_suite_spots(self, tree, spots):
-        """ Analyze a parse tree to find suite introducers which span a number
-            of lines.
-        """
-        for i in range(1, len(tree)):
-            if type(tree[i]) == type(()):
-                if tree[i][0] == symbol.suite:
-                    # Found a suite, look back for the colon and keyword.
-                    lineno_colon = lineno_word = None
-                    for j in range(i-1, 0, -1):
-                        if tree[j][0] == token.COLON:
-                            # Colons are never executed themselves: we want the
-                            # line number of the last token before the colon.
-                            lineno_colon = self.last_line_of_tree(tree[j-1])
-                        elif tree[j][0] == token.NAME:
-                            if tree[j][1] == 'elif':
-                                # Find the line number of the first non-terminal
-                                # after the keyword.
-                                t = tree[j+1]
-                                while t and token.ISNONTERMINAL(t[0]):
-                                    t = t[1]
-                                if t:
-                                    lineno_word = t[2]
-                            else:
-                                lineno_word = tree[j][2]
-                            break
-                        elif tree[j][0] == symbol.except_clause:
-                            # "except" clauses look like:
-                            # ('except_clause', ('NAME', 'except', lineno), ...)
-                            if tree[j][1][0] == token.NAME:
-                                lineno_word = tree[j][1][2]
-                                break
-                    if lineno_colon and lineno_word:
-                        # Found colon and keyword, mark all the lines
-                        # between the two with the two line numbers.
-                        self.record_multiline(spots, lineno_word, lineno_colon)
-
-                    # "pass" statements are tricky: different versions of Python
-                    # treat them differently, especially in the common case of a
-                    # function with a doc string and a single pass statement.
-                    self.find_docstring_pass_pair(tree[i], spots)
-
-                elif tree[i][0] == symbol.simple_stmt:
-                    first_line = self.first_line_of_tree(tree[i])
-                    last_line = self.last_line_of_tree(tree[i])
-                    if first_line != last_line:
-                        self.record_multiline(spots, first_line, last_line)
-                self.get_suite_spots(tree[i], spots)
-
-    def find_executable_statements(self, text, exclude=None):
-        # Find lines which match an exclusion pattern.
-        excluded = {}
-        suite_spots = {}
-        if exclude:
-            reExclude = re.compile(exclude)
-            lines = text.split('\n')
-            for i in range(len(lines)):
-                if reExclude.search(lines[i]):
-                    excluded[i+1] = 1
-
-        # Parse the code and analyze the parse tree to find out which statements
-        # are multiline, and where suites begin and end.
-        import parser
-        tree = parser.suite(text+'\n\n').totuple(1)
-        self.get_suite_spots(tree, suite_spots)
-        #print "Suite spots:", suite_spots
-
-        # Use the compiler module to parse the text and find the executable
-        # statements.  We add newlines to be impervious to final partial lines.
-        statements = {}
-        ast = compiler.parse(text+'\n\n')
-        visitor = StatementFindingAstVisitor(statements, excluded, suite_spots)
-        compiler.walk(ast, visitor, walker=visitor)
-
-        lines = statements.keys()
-        lines.sort()
-        excluded_lines = excluded.keys()
-        excluded_lines.sort()
-        return lines, excluded_lines, suite_spots
-
-    # format_lines(statements, lines).  Format a list of line numbers
-    # for printing by coalescing groups of lines as long as the lines
-    # represent consecutive statements.  This will coalesce even if
-    # there are gaps between statements, so if statements =
-    # [1,2,3,4,5,10,11,12,13,14] and lines = [1,2,5,10,11,13,14] then
-    # format_lines will return "1-2, 5-11, 13-14".
-
-    def format_lines(self, statements, lines):
-        pairs = []
-        i = 0
-        j = 0
-        start = None
-        pairs = []
-        while i < len(statements) and j < len(lines):
-            if statements[i] == lines[j]:
-                if start is None:
-                    start = lines[j]
-                end = lines[j]
-                j = j + 1
-            elif start:
-                pairs.append((start, end))
-                start = None
-            i = i + 1
-        if start:
-            pairs.append((start, end))
-        def stringify(pair):
-            start, end = pair
-            if start == end:
-                return "%d" % start
-            else:
-                return "%d-%d" % (start, end)
-        ret = string.join(map(stringify, pairs), ", ")
-        return ret
-
-    # Backward compatibility with version 1.
-    def analysis(self, morf):
-        f, s, _, m, mf = self.analysis2(morf)
-        return f, s, m, mf
-
-    def analysis2(self, morf):
-        filename, statements, excluded, line_map = self.analyze_morf(morf)
-        self.canonicalize_filenames()
-        if not self.cexecuted.has_key(filename):
-            self.cexecuted[filename] = {}
-        missing = []
-        for line in statements:
-            lines = line_map.get(line, [line, line])
-            for l in range(lines[0], lines[1]+1):
-                if self.cexecuted[filename].has_key(l):
-                    break
-            else:
-                missing.append(line)
-        return (filename, statements, excluded, missing,
-                self.format_lines(statements, missing))
-
-    def relative_filename(self, filename):
-        """ Convert filename to relative filename from self.relative_dir.
-        """
-        return filename.replace(self.relative_dir, "")
-
-    def morf_name(self, morf):
-        """ Return the name of morf as used in report.
-        """
-        if isinstance(morf, types.ModuleType):
-            return morf.__name__
-        else:
-            return self.relative_filename(os.path.splitext(morf)[0])
-
-    def filter_by_prefix(self, morfs, omit_prefixes):
-        """ Return list of morfs where the morf name does not begin
-            with any one of the omit_prefixes.
-        """
-        filtered_morfs = []
-        for morf in morfs:
-            for prefix in omit_prefixes:
-                if self.morf_name(morf).startswith(prefix):
-                    break
-            else:
-                filtered_morfs.append(morf)
-
-        return filtered_morfs
-
-    def morf_name_compare(self, x, y):
-        return cmp(self.morf_name(x), self.morf_name(y))
-
-    def report(self, morfs, show_missing=1, ignore_errors=0, file=None, omit_prefixes=[]):
-        if not isinstance(morfs, types.ListType):
-            morfs = [morfs]
-        # On windows, the shell doesn't expand wildcards.  Do it here.
-        globbed = []
-        for morf in morfs:
-            if isinstance(morf, strclass):
-                globbed.extend(glob.glob(morf))
-            else:
-                globbed.append(morf)
-        morfs = globbed
-
-        morfs = self.filter_by_prefix(morfs, omit_prefixes)
-        morfs.sort(self.morf_name_compare)
-
-        max_name = max([5,] + map(len, map(self.morf_name, morfs)))
-        fmt_name = "%%- %ds  " % max_name
-        fmt_err = fmt_name + "%s: %s"
-        header = fmt_name % "Name" + " Stmts   Exec  Cover"
-        fmt_coverage = fmt_name + "% 6d % 6d % 5d%%"
-        if show_missing:
-            header = header + "   Missing"
-            fmt_coverage = fmt_coverage + "   %s"
-        if not file:
-            file = sys.stdout
-        print >>file, header
-        print >>file, "-" * len(header)
-        total_statements = 0
-        total_executed = 0
-        for morf in morfs:
-            name = self.morf_name(morf)
-            try:
-                _, statements, _, missing, readable  = self.analysis2(morf)
-                n = len(statements)
-                m = n - len(missing)
-                if n > 0:
-                    pc = 100.0 * m / n
-                else:
-                    pc = 100.0
-                args = (name, n, m, pc)
-                if show_missing:
-                    args = args + (readable,)
-                print >>file, fmt_coverage % args
-                total_statements = total_statements + n
-                total_executed = total_executed + m
-            except KeyboardInterrupt:                       #pragma: no cover
-                raise
-            except:
-                if not ignore_errors:
-                    typ, msg = sys.exc_info()[:2]
-                    print >>file, fmt_err % (name, typ, msg)
-        if len(morfs) > 1:
-            print >>file, "-" * len(header)
-            if total_statements > 0:
-                pc = 100.0 * total_executed / total_statements
-            else:
-                pc = 100.0
-            args = ("TOTAL", total_statements, total_executed, pc)
-            if show_missing:
-                args = args + ("",)
-            print >>file, fmt_coverage % args
-
-    # annotate(morfs, ignore_errors).
-
-    blank_re = re.compile(r"\s*(#|$)")
-    else_re = re.compile(r"\s*else\s*:\s*(#|$)")
-
-    def annotate(self, morfs, directory=None, ignore_errors=0, omit_prefixes=[]):
-        morfs = self.filter_by_prefix(morfs, omit_prefixes)
-        for morf in morfs:
-            try:
-                filename, statements, excluded, missing, _ = self.analysis2(morf)
-                self.annotate_file(filename, statements, excluded, missing, directory)
-            except KeyboardInterrupt:
-                raise
-            except:
-                if not ignore_errors:
-                    raise
-
-    def annotate_file(self, filename, statements, excluded, missing, directory=None):
-        source = open(filename, 'r')
-        if directory:
-            dest_file = os.path.join(directory,
-                                     os.path.basename(filename)
-                                     + ',cover')
-        else:
-            dest_file = filename + ',cover'
-        dest = open(dest_file, 'w')
-        lineno = 0
-        i = 0
-        j = 0
-        covered = 1
-        while 1:
-            line = source.readline()
-            if line == '':
-                break
-            lineno = lineno + 1
-            while i < len(statements) and statements[i] < lineno:
-                i = i + 1
-            while j < len(missing) and missing[j] < lineno:
-                j = j + 1
-            if i < len(statements) and statements[i] == lineno:
-                covered = j >= len(missing) or missing[j] > lineno
-            if self.blank_re.match(line):
-                dest.write('  ')
-            elif self.else_re.match(line):
-                # Special logic for lines containing only 'else:'.
-                # See [GDR 2001-12-04b, 3.2].
-                if i >= len(statements) and j >= len(missing):
-                    dest.write('! ')
-                elif i >= len(statements) or j >= len(missing):
-                    dest.write('> ')
-                elif statements[i] == missing[j]:
-                    dest.write('! ')
-                else:
-                    dest.write('> ')
-            elif lineno in excluded:
-                dest.write('- ')
-            elif covered:
-                dest.write('> ')
-            else:
-                dest.write('! ')
-            dest.write(line)
-        source.close()
-        dest.close()
-
-# Singleton object.
-the_coverage = coverage()
-
-# Module functions call methods in the singleton object.
-def use_cache(*args, **kw):
-    return the_coverage.use_cache(*args, **kw)
-
-def start(*args, **kw):
-    return the_coverage.start(*args, **kw)
-
-def stop(*args, **kw):
-    return the_coverage.stop(*args, **kw)
-
-def erase(*args, **kw):
-    return the_coverage.erase(*args, **kw)
-
-def begin_recursive(*args, **kw):
-    return the_coverage.begin_recursive(*args, **kw)
-
-def end_recursive(*args, **kw):
-    return the_coverage.end_recursive(*args, **kw)
-
-def exclude(*args, **kw):
-    return the_coverage.exclude(*args, **kw)
-
-def analysis(*args, **kw):
-    return the_coverage.analysis(*args, **kw)
-
-def analysis2(*args, **kw):
-    return the_coverage.analysis2(*args, **kw)
-
-def report(*args, **kw):
-    return the_coverage.report(*args, **kw)
-
-def annotate(*args, **kw):
-    return the_coverage.annotate(*args, **kw)
-
-def annotate_file(*args, **kw):
-    return the_coverage.annotate_file(*args, **kw)
-
-# Save coverage data when Python exits.  (The atexit module wasn't
-# introduced until Python 2.0, so use sys.exitfunc when it's not
-# available.)
-try:
-    import atexit
-    atexit.register(the_coverage.save)
-except ImportError:
-    sys.exitfunc = the_coverage.save
-
-# Command-line interface.
-if __name__ == '__main__':
-    the_coverage.command_line(sys.argv[1:])
-
-
-# A. REFERENCES
-#
-# [GDR 2001-12-04a] "Statement coverage for Python"; Gareth Rees;
-# Ravenbrook Limited; 2001-12-04;
-# <http://www.nedbatchelder.com/code/modules/rees-coverage.html>.
-#
-# [GDR 2001-12-04b] "Statement coverage for Python: design and
-# analysis"; Gareth Rees; Ravenbrook Limited; 2001-12-04;
-# <http://www.nedbatchelder.com/code/modules/rees-design.html>.
-#
-# [van Rossum 2001-07-20a] "Python Reference Manual (releae 2.1.1)";
-# Guide van Rossum; 2001-07-20;
-# <http://www.python.org/doc/2.1.1/ref/ref.html>.
-#
-# [van Rossum 2001-07-20b] "Python Library Reference"; Guido van Rossum;
-# 2001-07-20; <http://www.python.org/doc/2.1.1/lib/lib.html>.
-#
-#
-# B. DOCUMENT HISTORY
-#
-# 2001-12-04 GDR Created.
-#
-# 2001-12-06 GDR Added command-line interface and source code
-# annotation.
-#
-# 2001-12-09 GDR Moved design and interface to separate documents.
-#
-# 2001-12-10 GDR Open cache file as binary on Windows.  Allow
-# simultaneous -e and -x, or -a and -r.
-#
-# 2001-12-12 GDR Added command-line help.  Cache analysis so that it
-# only needs to be done once when you specify -a and -r.
-#
-# 2001-12-13 GDR Improved speed while recording.  Portable between
-# Python 1.5.2 and 2.1.1.
-#
-# 2002-01-03 GDR Module-level functions work correctly.
-#
-# 2002-01-07 GDR Update sys.path when running a file with the -x option,
-# so that it matches the value the program would get if it were run on
-# its own.
-#
-# 2004-12-12 NMB Significant code changes.
-# - Finding executable statements has been rewritten so that docstrings and
-#   other quirks of Python execution aren't mistakenly identified as missing
-#   lines.
-# - Lines can be excluded from consideration, even entire suites of lines.
-# - The filesystem cache of covered lines can be disabled programmatically.
-# - Modernized the code.
-#
-# 2004-12-14 NMB Minor tweaks.  Return 'analysis' to its original behavior
-# and add 'analysis2'.  Add a global for 'annotate', and factor it, adding
-# 'annotate_file'.
-#
-# 2004-12-31 NMB Allow for keyword arguments in the module global functions.
-# Thanks, Allen.
-#
-# 2005-12-02 NMB Call threading.settrace so that all threads are measured.
-# Thanks Martin Fuzzey. Add a file argument to report so that reports can be
-# captured to a different destination.
-#
-# 2005-12-03 NMB coverage.py can now measure itself.
-#
-# 2005-12-04 NMB Adapted Greg Rogers' patch for using relative filenames,
-# and sorting and omitting files to report on.
-#
-# 2006-07-23 NMB Applied Joseph Tate's patch for function decorators.
-#
-# 2006-08-21 NMB Applied Sigve Tjora and Mark van der Wal's fixes for argument
-# handling.
-#
-# 2006-08-22 NMB Applied Geoff Bache's parallel mode patch.
-#
-# 2006-08-23 NMB Refactorings to improve testability.  Fixes to command-line
-# logic for parallel mode and collect.
-#
-# 2006-08-25 NMB "#pragma: nocover" is excluded by default.
-#
-# 2006-09-10 NMB Properly ignore docstrings and other constant expressions that
-# appear in the middle of a function, a problem reported by Tim Leslie.
-# Minor changes to avoid lint warnings.
-#
-# 2006-09-17 NMB coverage.erase() shouldn't clobber the exclude regex.
-# Change how parallel mode is invoked, and fix erase() so that it erases the
-# cache when called programmatically.
-#
-# 2007-07-21 NMB In reports, ignore code executed from strings, since we can't
-# do anything useful with it anyway.
-# Better file handling on Linux, thanks Guillaume Chazarain.
-# Better shell support on Windows, thanks Noel O'Boyle.
-# Python 2.2 support maintained, thanks Catherine Proulx.
-#
-# 2007-07-22 NMB Python 2.5 now fully supported. The method of dealing with
-# multi-line statements is now less sensitive to the exact line that Python
-# reports during execution. Pass statements are handled specially so that their
-# disappearance during execution won't throw off the measurement.
-#
-# 2007-07-23 NMB Now Python 2.5 is *really* fully supported: the body of the
-# new with statement is counted as executable.
-#
-# 2007-07-29 NMB Better packaging.
-#
-# 2007-09-30 NMB Don't try to predict whether a file is Python source based on
-# the extension. Extensionless files are often Pythons scripts. Instead, simply
-# parse the file and catch the syntax errors.  Hat tip to Ben Finney.
-
-# C. COPYRIGHT AND LICENCE
-#
-# Copyright 2001 Gareth Rees.  All rights reserved.
-# Copyright 2004-2007 Ned Batchelder.  All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# 1. Redistributions of source code must retain the above copyright
-#    notice, this list of conditions and the following disclaimer.
-#
-# 2. Redistributions in binary form must reproduce the above copyright
-#    notice, this list of conditions and the following disclaimer in the
-#    documentation and/or other materials provided with the
-#    distribution.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# HOLDERS AND CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
-# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
-# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
-# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
-# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
-# DAMAGE.
-#
-# $Id: coverage.py 79 2007-10-01 01:01:52Z nedbat $
diff --git a/contrib/create_tast_categories.sh b/contrib/create_tast_categories.sh
new file mode 100755
index 0000000..a1e3748
--- /dev/null
+++ b/contrib/create_tast_categories.sh
@@ -0,0 +1,154 @@
+#!/bin/bash
+
+set -e
+
+usage() {
+    echo "
+Usage: $0 [OPTION]
+
+Generates control files for Tast categories.
+
+For each Tast category, e.g. 'example', a file
+'server/site_tests/tast/control.category-<category>' and
+'test_suites/control.bvt-tast-cq-<category>' is generated, and attributes to add
+to attribute_allowlist.txt are printed.
+
+The control files follow templates that use test expressions based on test name,
+e.g. name:example.*.
+
+-f      Overwrite existing control files.
+-h      Print this help message.
+"
+    exit 1
+}
+
+overwrite=false
+
+while getopts "fh" o; do
+    case "${o}" in
+        f)
+            overwrite=true ;;
+        *)
+            usage ;;
+    esac
+done
+
+readonly script_dir="$(dirname "$(realpath -e "${BASH_SOURCE[0]}")")"
+readonly repo_root=$(realpath "${script_dir}"/..)
+readonly src_root="$(realpath "${script_dir}"/../../../..)"
+
+categories=()
+types=( "remote" "local" )
+for type in "${types[@]}"; do
+    mapfile -t categories < <(find \
+    "${src_root}/platform/tast-tests/src/chromiumos/tast/${type}/bundles/cros" \
+    -maxdepth 1 -mindepth 1 -type d -printf "%f\n")
+done
+
+mapfile -t categories < <(printf '%s\n' "${categories[@]}" | sort -u)
+
+attributes=()
+
+for c in "${categories[@]}"; do
+    test_suites_file="${repo_root}/test_suites/control.bvt-tast-cq-${c}"
+    if [[ -e ${test_suites_file}  && ${overwrite} == "false" ]]; then
+        echo "File ${test_suites_file} already exists. Use -f to overwrite."
+        exit 1
+    fi
+    cat << EOF > "${test_suites_file}"
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-${c}"
+PURPOSE = 'Tests the critical Tast tests in the "${c}" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "${c}" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-${c}, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-${c} test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
+EOF
+
+    tast_file="${repo_root}/server/site_tests/tast/control.category-${c}"
+    if [[ -e ${tast_file}  && ${overwrite} == "false" ]]; then
+        echo "File ${tast_file} already exists. Use -f to overwrite."
+        exit 1
+    fi
+    cat << EOF > "${tast_file}"
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-${c}'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-${c}'
+MAX_RESULT_SIZE_KB = 256 * 1024
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "${c}" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "${c}" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:${c}.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True)
+
+parallel_simple(run, machines)
+EOF
+
+    attributes+=( "suite:bvt-tast-cq-${c}" )
+
+done
+
+echo "Add the following attributes to attribute_allowlist.txt:"
+printf "%s\n" "${attributes[@]}"
\ No newline at end of file
diff --git a/contrib/db_cleanup.py b/contrib/db_cleanup.py
deleted file mode 100755
index c2f9510..0000000
--- a/contrib/db_cleanup.py
+++ /dev/null
@@ -1,392 +0,0 @@
-#!/usr/bin/python2
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import argparse
-import datetime
-import logging
-import os
-import re
-import sys
-import time
-
-os.environ['DJANGO_SETTINGS_MODULE'] = 'frontend.settings'
-
-import common
-from autotest_lib.server import utils
-from django.db import connections, transaction
-
-
-# Format Appears as: [Date] [Time] - [Msg Level] - [Message]
-LOGGING_FORMAT = '%(asctime)s - %(levelname)s - %(message)s'
-# This regex makes sure the input is in the format of YYYY-MM-DD (2012-02-01)
-DATE_FORMAT_REGEX = ('^(19|20)\d\d[- /.](0[1-9]|1[012])[- /.](0[1-9]|[12][0-9]'
-                     '|3[01])$')
-SELECT_CMD_FORMAT = """
-SELECT %(table)s.%(primary_key)s FROM %(table)s
-WHERE %(table)s.%(time_column)s <= "%(date)s"
-"""
-SELECT_JOIN_CMD_FORMAT = """
-SELECT %(table)s.%(primary_key)s FROM %(table)s
-INNER JOIN %(related_table)s
-  ON %(table)s.%(foreign_key)s=%(related_table)s.%(related_primary_key)s
-WHERE %(related_table)s.%(time_column)s <= "%(date)s"
-"""
-SELECT_WITH_INDIRECTION_FORMAT = """
-SELECT %(table)s.%(primary_key)s FROM %(table)s
-INNER JOIN %(indirection_table)s
-  ON %(table)s.%(foreign_key)s =
-     %(indirection_table)s.%(indirection_primary_key)s
-INNER JOIN %(related_table)s
-  ON %(indirection_table)s.%(indirection_foreign_key)s =
-  %(related_table)s.%(related_primary_key)s
-WHERE %(related_table)s.%(time_column)s <= "%(date)s"
-"""
-DELETE_ROWS_FORMAT = """
-DELETE FROM %(table)s
-WHERE %(table)s.%(primary_key)s IN (%(rows)s)
-"""
-
-
-AFE_JOB_ID = 'afe_job_id'
-JOB_ID = 'job_id'
-JOB_IDX = 'job_idx'
-TEST_IDX = 'test_idx'
-
-# CAUTION: Make sure only the 'default' connection is used. Otherwise
-# db_cleanup may delete stuff from the global database, which is generally not
-# intended.
-cursor = connections['default'].cursor()
-
-# Globals for command line flag constants, for convenience.
-DRY_RUN = False
-STEP_SIZE = None
-LOAD_RATIO = 1.0
-
-class ProgressBar(object):
-    TEXT = "{:<40s} [{:<20s}] ({:>9d}/{:>9d})"
-
-    def __init__(self, name, amount):
-        self._name = name
-        self._amount = amount
-        self._cur = 0
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, a, b, c):
-        sys.stdout.write('\n')
-        sys.stdout.flush()
-
-    def update(self, x):
-        """
-        Advance the counter by `x`.
-
-        @param x: An integer of how many more elements were processed.
-        """
-        self._cur += x
-
-    def show(self):
-        """
-        Display the progress bar on the current line.  Repeated invocations
-        "update" the display.
-        """
-        if self._amount == 0:
-            barlen = 20
-        else:
-            barlen = int(20 * self._cur / float(self._amount))
-        if barlen:
-            bartext = '=' * (barlen-1) + '>'
-        else:
-            bartext = ''
-        text = self.TEXT.format(self._name, bartext, self._cur, self._amount)
-        sys.stdout.write('\r')
-        sys.stdout.write(text)
-        sys.stdout.flush()
-
-
-def grouper(iterable, n):
-    """
-    Group the elements of `iterable` into groups of maximum size `n`.
-
-    @param iterable: An iterable.
-    @param n: Max size of returned groups.
-    @returns: Yields iterables of size <= n.
-
-    >>> grouper('ABCDEFG', 3)
-    [['A', 'B', C'], ['D', 'E', 'F'], ['G']]
-    """
-    args = [iter(iterable)] * n
-    while True:
-        lst = []
-        try:
-            for itr in args:
-                lst.append(next(itr))
-            yield lst
-        except StopIteration:
-            if lst:
-                yield lst
-            break
-
-
-def _delete_table_data_before_date(table_to_delete_from, primary_key,
-                                   related_table, related_primary_key,
-                                   date, foreign_key=None,
-                                   time_column="started_time",
-                                   indirection_table=None,
-                                   indirection_primary_key=None,
-                                   indirection_foreign_key=None):
-    """
-    We want a delete statement that will only delete from one table while
-    using a related table to find the rows to delete.
-
-    An example mysql command:
-    DELETE FROM tko_iteration_result USING tko_iteration_result INNER JOIN
-    tko_tests WHERE tko_iteration_result.test_idx=tko_tests.test_idx AND
-    tko_tests.started_time <= '2012-02-01';
-
-    There are also tables that require 2 joins to determine which rows we want
-    to delete and we determine these rows by joining the table we want to
-    delete from with an indirection table to the actual jobs table.
-
-    @param table_to_delete_from: Table whose rows we want to delete.
-    @param related_table: Table with the date information we are selecting by.
-    @param foreign_key: Foreign key used in table_to_delete_from to reference
-                        the related table. If None, the primary_key is used.
-    @param primary_key: Primary key in the related table.
-    @param date: End date of the information we are trying to delete.
-    @param time_column: Column that we want to use to compare the date to.
-    @param indirection_table: Table we use to link the data we are trying to
-                              delete with the table with the date information.
-    @param indirection_primary_key: Key we use to connect the indirection table
-                                    to the table we are trying to delete rows
-                                    from.
-    @param indirection_foreign_key: Key we use to connect the indirection table
-                                    to the table with the date information.
-    """
-    if not foreign_key:
-        foreign_key = primary_key
-
-    if not related_table:
-        # Deleting from a table directly.
-        variables = dict(table=table_to_delete_from, primary_key=primary_key,
-                         time_column=time_column, date=date)
-        sql = SELECT_CMD_FORMAT % variables
-    elif not indirection_table:
-        # Deleting using a single JOIN to get the date information.
-        variables = dict(primary_key=primary_key, table=table_to_delete_from,
-                         foreign_key=foreign_key, related_table=related_table,
-                         related_primary_key=related_primary_key,
-                         time_column=time_column, date=date)
-        sql = SELECT_JOIN_CMD_FORMAT % variables
-    else:
-        # There are cases where we need to JOIN 3 TABLES to determine the rows
-        # we want to delete.
-        variables = dict(primary_key=primary_key, table=table_to_delete_from,
-                         indirection_table=indirection_table,
-                         foreign_key=foreign_key,
-                         indirection_primary_key=indirection_primary_key,
-                         related_table=related_table,
-                         related_primary_key=related_primary_key,
-                         indirection_foreign_key=indirection_foreign_key,
-                         time_column=time_column, date=date)
-        sql = SELECT_WITH_INDIRECTION_FORMAT % variables
-
-    logging.debug('SQL: %s', sql)
-    cursor.execute(sql, [])
-    rows = [x[0] for x in cursor.fetchall()]
-    logging.debug(rows)
-
-    if not rows or rows == [None]:
-        with ProgressBar(table_to_delete_from, 0) as pb:
-            pb.show()
-        logging.debug('Nothing to delete for %s', table_to_delete_from)
-        return
-
-    with ProgressBar(table_to_delete_from, len(rows)) as pb:
-        for row_keys in grouper(rows, STEP_SIZE):
-            variables['rows'] = ','.join([str(x) for x in row_keys])
-            sql = DELETE_ROWS_FORMAT % variables
-
-            start = time.time()
-            logging.debug('SQL: %s', sql)
-            if not DRY_RUN:
-                cursor.execute(sql, [])
-                transaction.commit_unless_managed(using='default')
-            end = time.time()
-
-            pb.update(len(row_keys))
-            pb.show()
-
-            if LOAD_RATIO != 1.0:
-                assert 0 < LOAD_RATIO <= 1, (
-                        'Load ratio must be a fraction between 0 and 1.')
-                time.sleep((end - start) / LOAD_RATIO)
-
-
-def _subtract_days(date, days_to_subtract):
-    """
-    Return a date (string) that is 'days' before 'date'
-
-    @param date: date (string) we are subtracting from.
-    @param days_to_subtract: days (int) we are subtracting.
-    """
-    date_obj = datetime.datetime.strptime(date, '%Y-%m-%d')
-    difference = date_obj - datetime.timedelta(days=days_to_subtract)
-    return difference.strftime('%Y-%m-%d')
-
-
-def _delete_all_data_before_date(date):
-    """
-    Delete all the database data before a given date.
-
-    This function focuses predominately on the data for jobs in tko_jobs.
-    However not all jobs in afe_jobs are also in tko_jobs.
-
-    Therefore we delete all the afe_job and foreign key relations prior to two
-    days before date. Then we do the queries using tko_jobs and these
-    tables to ensure all the related information is gone. Even though we are
-    repeating deletes on these tables, the second delete will be quick and
-    completely thorough in ensuring we clean up all the foreign key
-    dependencies correctly.
-
-    @param date: End date of the information we are trying to delete.
-    @param step: Rows to delete per SQL query.
-    """
-    # First cleanup all afe_job related data (prior to 2 days before date).
-    # The reason for this is not all afe_jobs may be in tko_jobs.
-    afe_date = _subtract_days(date, 2)
-    logging.info('Cleaning up all afe_job data prior to %s.', afe_date)
-    _delete_table_data_before_date('afe_aborted_host_queue_entries',
-                                   'queue_entry_id',
-                                   'afe_jobs', 'id', afe_date,
-                                   time_column= 'created_on',
-                                   foreign_key='queue_entry_id',
-                                   indirection_table='afe_host_queue_entries',
-                                   indirection_primary_key='id',
-                                   indirection_foreign_key='job_id')
-    _delete_table_data_before_date('afe_special_tasks', 'id',
-                                   'afe_jobs', 'id',
-                                   afe_date, time_column='created_on',
-                                   foreign_key='queue_entry_id',
-                                   indirection_table='afe_host_queue_entries',
-                                   indirection_primary_key='id',
-                                   indirection_foreign_key='job_id')
-    _delete_table_data_before_date('afe_host_queue_entries', 'id',
-                                   'afe_jobs', 'id',
-                                   afe_date, time_column='created_on',
-                                   foreign_key=JOB_ID)
-    _delete_table_data_before_date('afe_job_keyvals', 'id',
-                                   'afe_jobs', 'id',
-                                   afe_date, time_column='created_on',
-                                   foreign_key=JOB_ID)
-    _delete_table_data_before_date('afe_jobs_dependency_labels', 'id',
-                                   'afe_jobs', 'id',
-                                   afe_date, time_column='created_on',
-                                   foreign_key=JOB_ID)
-    _delete_table_data_before_date('afe_jobs', 'id',
-                                   None, None,
-                                   afe_date, time_column='created_on')
-    # Special tasks that aren't associated with an HQE
-    # Since we don't do the queue_entry_id=NULL check, we might wipe out a bit
-    # more than we should, but I doubt anyone will notice or care.
-    _delete_table_data_before_date('afe_special_tasks', 'id',
-                                   None, None,
-                                   afe_date, time_column='time_requested')
-
-    # Now go through and clean up all the rows related to tko_jobs prior to
-    # date.
-    logging.info('Cleaning up all data related to tko_jobs prior to %s.',
-                  date)
-    _delete_table_data_before_date('tko_test_attributes', 'id',
-                                   'tko_tests', TEST_IDX,
-                                   date, foreign_key=TEST_IDX)
-    _delete_table_data_before_date('tko_test_labels_tests', 'id',
-                                   'tko_tests', TEST_IDX,
-                                   date, foreign_key= 'test_id')
-    _delete_table_data_before_date('tko_iteration_result', TEST_IDX,
-                                   'tko_tests', TEST_IDX,
-                                   date)
-    _delete_table_data_before_date('tko_iteration_perf_value', TEST_IDX,
-                                   'tko_tests', TEST_IDX,
-                                   date)
-    _delete_table_data_before_date('tko_iteration_attributes', TEST_IDX,
-                                   'tko_tests', TEST_IDX,
-                                   date)
-    _delete_table_data_before_date('tko_job_keyvals', 'id',
-                                   'tko_jobs', JOB_IDX,
-                                   date, foreign_key='job_id')
-    _delete_table_data_before_date('afe_aborted_host_queue_entries',
-                                   'queue_entry_id',
-                                   'tko_jobs', AFE_JOB_ID, date,
-                                   foreign_key='queue_entry_id',
-                                   indirection_table='afe_host_queue_entries',
-                                   indirection_primary_key='id',
-                                   indirection_foreign_key='job_id')
-    _delete_table_data_before_date('afe_special_tasks', 'id',
-                                   'tko_jobs', AFE_JOB_ID,
-                                   date, foreign_key='queue_entry_id',
-                                   indirection_table='afe_host_queue_entries',
-                                   indirection_primary_key='id',
-                                   indirection_foreign_key='job_id')
-    _delete_table_data_before_date('afe_host_queue_entries', 'id',
-                                   'tko_jobs', AFE_JOB_ID,
-                                   date, foreign_key='job_id')
-    _delete_table_data_before_date('afe_job_keyvals', 'id',
-                                   'tko_jobs', AFE_JOB_ID,
-                                   date, foreign_key='job_id')
-    _delete_table_data_before_date('afe_jobs_dependency_labels', 'id',
-                                   'tko_jobs', AFE_JOB_ID,
-                                   date, foreign_key='job_id')
-    _delete_table_data_before_date('afe_jobs', 'id',
-                                   'tko_jobs', AFE_JOB_ID,
-                                   date, foreign_key='id')
-    _delete_table_data_before_date('tko_tests', TEST_IDX,
-                                   'tko_jobs', JOB_IDX,
-                                   date, foreign_key=JOB_IDX)
-    _delete_table_data_before_date('tko_jobs', JOB_IDX,
-                                   None, None, date)
-
-
-def parse_args():
-    """Parse command line arguments"""
-    parser = argparse.ArgumentParser()
-    parser.add_argument('-v', '--verbose', action='store_true',
-                        help='Print SQL commands and results')
-    parser.add_argument('--step', type=int, action='store',
-                        default=1000,
-                        help='Number of rows to delete at once')
-    parser.add_argument('--dry_run', action='store_true',
-                        help='Print SQL queries instead of executing them.')
-    parser.add_argument('--load_ratio', type=float, action='store', default=0.2,
-                        help=('The fraction of time the script should be '
-                              'performing deletes. For example --load_ratio=.2 '
-                              'will cause the script to sleep 80% of the time, '
-                              'and perform work for the other 20%.'))
-    parser.add_argument('date', help='Keep results newer than')
-    return parser.parse_args()
-
-
-def main():
-    args = parse_args()
-
-    verbose = args.verbose or args.dry_run
-    level = logging.DEBUG if verbose else logging.INFO
-    logging.basicConfig(level=level, format=LOGGING_FORMAT)
-    logging.info('Calling: %s', sys.argv)
-
-    if not re.match(DATE_FORMAT_REGEX, args.date):
-        print 'DATE must be in yyyy-mm-dd format!'
-        return
-
-    global STEP_SIZE, DRY_RUN, LOAD_RATIO
-    STEP_SIZE = args.step
-    DRY_RUN = args.dry_run
-    LOAD_RATIO = args.load_ratio
-
-    _delete_all_data_before_date(args.date)
-
-
-if __name__ == '__main__':
-    main()
diff --git a/contrib/db_optimize.py b/contrib/db_optimize.py
deleted file mode 100755
index ecaac73..0000000
--- a/contrib/db_optimize.py
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/python2
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""This script will run optimize table for chromeos_autotest_db
-
-This script might have notable impact on the mysql performance as it locks
-tables and rebuilds indexes. So be careful when running it on production
-systems.
-"""
-
-import argparse
-import logging
-import socket
-import subprocess
-import sys
-
-import common
-from autotest_lib.frontend import database_settings_helper
-from autotest_lib.server import utils
-
-# Format Appears as: [Date] [Time] - [Msg Level] - [Message]
-LOGGING_FORMAT = '%(asctime)s - %(levelname)s - %(message)s'
-STATS_KEY = 'db_optimize.%s' % socket.gethostname()
-
-def main_without_exception_handling():
-    database_settings = database_settings_helper.get_default_db_config()
-    command = ['mysqlcheck',
-               '-o', database_settings['NAME'],
-               '-u', database_settings['USER'],
-               '-p%s' % database_settings['PASSWORD'],
-               # we want to do db optimation on each master/slave
-               # in rotation. Do not write otimize table to bin log
-               # so that it won't be picked up by slaves automatically
-               '--skip-write-binlog',
-               ]
-    subprocess.check_call(command)
-
-
-def should_optimize():
-    """Check if the server should run db_optimize.
-
-    Only shard should optimize db.
-
-    @returns: True if it should optimize db otherwise False.
-    """
-    return utils.is_shard()
-
-
-def parse_args():
-    """Parse command line arguments"""
-    parser = argparse.ArgumentParser()
-    parser.add_argument('-c', '--check_server', action='store_true',
-                        help='Check if the server should optimize db.')
-    return parser.parse_args()
-
-
-def main():
-    """Main."""
-    args = parse_args()
-
-    logging.basicConfig(level=logging.INFO, format=LOGGING_FORMAT)
-    logging.info('Calling: %s', sys.argv)
-
-    if args.check_server and not should_optimize():
-        print 'Only shard can run db optimization.'
-        return
-
-    try:
-        main_without_exception_handling()
-    except Exception as e:
-        message = 'Uncaught exception; terminating db_optimize.'
-        logging.exception(message)
-        raise
-    logging.info('db_optimize completed.')
-
-
-if __name__ == '__main__':
-    main()
diff --git a/contrib/dhcp_failed_machines.py b/contrib/dhcp_failed_machines.py
deleted file mode 100755
index a44fd2e..0000000
--- a/contrib/dhcp_failed_machines.py
+++ /dev/null
@@ -1,90 +0,0 @@
-#!/usr/bin/python2
-
-"""
-usage:
-./dhcp_failed_machines.py /var/log/dhcp.log
-
-You can also run it directly on the gzip'd logs.
-
-This script basically expects to run from the dhcp machine, as it looks at
-/etc/dhcp/dhcpd.conf to be able to do reverse DNS lookups.  It also expects the
-dhcp log to be copied to some local file.
-
-If you're lucky, there might still be a copy of this script already on the dhcp
-server at /tmp/looky.py.
-"""
-
-import gzip
-import itertools
-import pprint
-import re
-import sys
-
-lookups = {}
-
-with open('/etc/dhcp/dhcpd.conf', 'r') as f:
-  for line in f:
-    if line.startswith('#'):
-      continue
-    if line.split() and line.split()[0] == 'host':
-      hostconf = list(itertools.takewhile(lambda x: x.strip() != '}', f))
-      d = dict([h.strip().split()[-2:] for h in hostconf])
-      hostname = d['ddns-hostname'].replace('"', '').replace(';', '')
-      lookups[d['fixed-address'].replace(';', '')] = hostname
-
-
-offers = {}
-offenders = set()
-restarts = []
-
-rgx = re.compile(
-  r'(?P<command>[A-Z]+) (?:from|on|for) (?P<host>\d+.\d+.\d+.\d+)')
-server_restart_str = 'Internet Systems Consortium'
-
-
-def open_file(f):
-  if f.endswith('.gz'):
-    return gzip.open(f, 'r')
-  else:
-    return open(f, 'r')
-
-with open_file(sys.argv[1]) as f:
-  for line in f:
-    if server_restart_str in line:
-        restarts.append(line)
-        continue
-    m = rgx.search(line)
-    if m:
-      command = m.group('command')
-      host = m.group('host')
-      if command == 'DHCPOFFER':
-        offers[host] = offers.get(host, 0) + 1
-        if offers[host] > 2:
-          offenders.add(host)
-      if command == 'DHCPREQUEST':
-        offers[host] = 0
-
-if restarts:
-    print 'DHCP restarts:\n %s' % ''.join(restarts)
-
-def lookup(h):
-  return lookups.get(h, h)
-
-hosts = sorted([lookup(h) for h in offenders])
-if len(sys.argv) == 2:
-  pprint.pprint(hosts)
-else:
-  warning = int(sys.argv[2])
-  critical = int(sys.argv[3])
-  if len(offenders) > critical:
-    print ('DHCP Critical, number of duts with DHCP failure is %d: %s' %
-           (len(hosts), ', '.join(hosts)))
-    sys.exit(2)
-  elif len(offenders) > warning:
-    print ('DHCP Warning, number of duts with DHCP failure is %d: %s' %
-           (len(hosts), ', '.join(hosts)))
-    sys.exit(1)
-  else:
-    print ('DHCP OK, number of duts with DHCP failure is %d: %s' %
-           (len(hosts), ', '.join(hosts)))
-    sys.exit(0)
diff --git a/contrib/generate_rpm_mapping.py b/contrib/generate_rpm_mapping.py
deleted file mode 100755
index 5b9577a..0000000
--- a/contrib/generate_rpm_mapping.py
+++ /dev/null
@@ -1,471 +0,0 @@
-#!/usr/bin/python2
-
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-This script generates a csv file containing the mapping of
-(device_hostname, rpm_hostname, outlet, hydra_hostname) for each
-host in our lab. The csv file is in the following format.
-
-chromeos-rack2-host1,chromeos-rack2-rpm1,.A1,chromeos-197-hydra1.mtv
-chromeos-rack2-host2,chromeos-rack2-rpm1,.A2,chromeos-197-hydra1.mtv
-...
-
-The generated csv file can be used as input to add_host_powerunit_info.py
-
-Workflow:
-    <Generate the csv file>
-    python generate_rpm_mapping.py --csv mapping_file.csv --server cautotest
-
-    <Upload mapping information in csv file to AFE>
-    python add_host_powerunit_info.py --csv mapping_file.csv
-
-"""
-import argparse
-import collections
-import logging
-import re
-import sys
-
-import common
-
-from autotest_lib.client.common_lib import autotest_enum
-from autotest_lib.server.cros.dynamic_suite import frontend_wrappers
-
-CHROMEOS_LABS = autotest_enum.AutotestEnum('OysterBay', 'Atlantis',
-                                   'Chaos', 'Destiny', start_value=1)
-HOST_REGX = 'chromeos(\d+)(-row(\d+))*-rack(\d+)-host(\d+)'
-DeviceHostname = collections.namedtuple(
-        'DeviceHostname', ['lab', 'row', 'rack', 'host'])
-
-
-class BaseLabConfig(object):
-    """Base class for a lab configuration."""
-    RPM_OUTLET_MAP = {}
-    LAB_NUMBER = -1
-
-    @classmethod
-    def get_rpm_hostname(cls, device_hostname):
-        """Get rpm hostname given a device.
-
-        @param device_hostname: A DeviceHostname named tuple.
-
-        @returns: the rpm hostname, default to empty string.
-
-        """
-        return ''
-
-
-    @classmethod
-    def get_rpm_outlet(cls, device_hostname):
-        """Get rpm outlet given a device.
-
-        @param device_hostname: A DeviceHostname named tuple.
-
-        @returns: the rpm outlet, default to empty string.
-
-        """
-        return ''
-
-
-    @classmethod
-    def get_hydra_hostname(cls, device_hostname):
-        """Get hydra hostname given a device.
-
-        @param device_hostname: A DeviceHostname named tuple.
-
-        @returns: the hydra hostname, default to empty string.
-
-        """
-        return ''
-
-
-    @classmethod
-    def is_device_in_the_lab(cls, device_hostname):
-        """Check whether a dut belongs to the lab.
-
-        @param device_hostname: A DeviceHostname named tuple.
-
-        @returns: True if the dut belongs to the lab,
-                  False otherwise.
-
-        """
-        return device_hostname.lab == cls.LAB_NUMBER
-
-
-class OysterBayConfig(BaseLabConfig):
-    """Configuration for OysterBay"""
-
-    LAB_NUMBER = CHROMEOS_LABS.OYSTERBAY
-
-
-    @classmethod
-    def get_rpm_hostname(cls, device_hostname):
-        """Get rpm hostname.
-
-        @param device_hostname: A DeviceHostname named tuple.
-
-        @returns: hostname of the rpm that has the device.
-
-        """
-        if not device_hostname.row:
-            return ''
-        return 'chromeos%d-row%d-rack%d-rpm1' % (
-                device_hostname.lab, device_hostname.row,
-                device_hostname.rack)
-
-
-    @classmethod
-    def get_rpm_outlet(cls, device_hostname):
-        """Get rpm outlet.
-
-        @param device_hostname: A DeviceHostname named tuple.
-
-        @returns: rpm outlet, e.g. '.A1'
-
-        """
-        if not device_hostname.row:
-            return ''
-        return '.A%d' % device_hostname.host
-
-
-class AtlantisConfig(BaseLabConfig):
-    """Configuration for Atlantis lab."""
-
-    LAB_NUMBER = CHROMEOS_LABS.ATLANTIS
-    # chromeos2, hostX -> outlet
-    RPM_OUTLET_MAP = {
-            1: 1,
-            7: 2,
-            2: 4,
-            8: 5,
-            3: 7,
-            9: 8,
-            4: 9,
-            10: 10,
-            5: 12,
-            11: 13,
-            6: 15,
-            12: 16}
-
-    @classmethod
-    def get_rpm_hostname(cls, device_hostname):
-        """Get rpm hostname.
-
-        @param device_hostname: A DeviceHostname named tuple.
-
-        @returns: hostname of the rpm that has the device.
-
-        """
-        return 'chromeos%d-row%d-rack%d-rpm1' % (
-                device_hostname.lab, device_hostname.row,
-                device_hostname.rack)
-
-
-    @classmethod
-    def get_rpm_outlet(cls, device_hostname):
-        """Get rpm outlet.
-
-        @param device_hostname: A DeviceHostname named tuple.
-
-        @returns: rpm outlet, e.g. '.A1'
-
-        """
-        return '.A%d' % cls.RPM_OUTLET_MAP[device_hostname.host]
-
-
-    @classmethod
-    def get_hydra_hostname(cls, device_hostname):
-        """Get hydra hostname.
-
-        @param device_hostname: A DeviceHostname named tuple.
-
-        @returns: hydra hostname
-
-        """
-        row = device_hostname.row
-        rack = device_hostname.rack
-        if row >= 1 and row <= 5 and rack >= 1 and rack <= 7:
-            return 'chromeos-197-hydra1.cros'
-        elif row >= 1 and row <= 5 and rack >= 8 and rack <= 11:
-            return 'chromeos-197-hydra2.cros'
-        else:
-            logging.error('Could not determine hydra for %s',
-                          device_hostname)
-            return ''
-
-
-class ChaosConfig(BaseLabConfig):
-    """Configuration for Chaos lab."""
-
-    LAB_NUMBER = CHROMEOS_LABS.CHAOS
-
-
-    @classmethod
-    def get_rpm_hostname(cls, device_hostname):
-        """Get rpm hostname.
-
-        @param device_hostname: A DeviceHostname named tuple.
-
-        @returns: hostname of the rpm that has the device.
-
-        """
-        return 'chromeos%d-row%d-rack%d-rpm1' % (
-                device_hostname.lab, device_hostname.row,
-                device_hostname.rack)
-
-
-    @classmethod
-    def get_rpm_outlet(cls, device_hostname):
-        """Get rpm outlet.
-
-        @param device_hostname: A DeviceHostname named tuple.
-
-        @returns: rpm outlet, e.g. '.A1'
-
-        """
-        return '.A%d' % device_hostname.host
-
-
-class DestinyConfig(BaseLabConfig):
-    """Configuration for Desitny lab."""
-
-    LAB_NUMBER = CHROMEOS_LABS.DESTINY
-    # None-densified rack: one host per shelf
-    # (rowX % 2, hostY) -> outlet
-    RPM_OUTLET_MAP = {
-            (1, 1): 1,
-            (0, 1): 2,
-            (1, 2): 4,
-            (0, 2): 5,
-            (1, 3): 7,
-            (0, 3): 8,
-            (1, 4): 9,
-            (0, 4): 10,
-            (1, 5): 12,
-            (0, 5): 13,
-            (1, 6): 15,
-            (0, 6): 16,
-    }
-
-    # Densified rack: one shelf can have two chromeboxes or one notebook.
-    # (rowX % 2, hostY) -> outlet
-    DENSIFIED_RPM_OUTLET_MAP = {
-            (1, 2):  1,  (1, 1): 1,
-            (0, 1):  2,  (0, 2): 2,
-            (1, 4):  3,  (1, 3): 3,
-            (0, 3):  4,  (0, 4): 4,
-            (1, 6):  5,  (1, 5): 5,
-            (0, 5):  6,  (0, 6): 6,
-            (1, 8):  7,  (1, 7): 7,
-            (0, 7):  8,  (0, 8): 8,
-            # outlet 9, 10 are not used
-            (1, 10): 11, (1, 9): 11,
-            (0, 9):  12, (0, 10): 12,
-            (1, 12): 13, (1, 11): 13,
-            (0, 11): 14, (0, 12): 14,
-            (1, 14): 15, (1, 13): 15,
-            (0, 13): 16, (0, 14): 16,
-            (1, 16): 17, (1, 15): 17,
-            (0, 15): 18, (0, 16): 18,
-            (1, 18): 19, (1, 17): 19,
-            (0, 17): 20, (0, 18): 20,
-            (1, 20): 21, (1, 19): 21,
-            (0, 19): 22, (0, 20): 22,
-            (1, 22): 23, (1, 21): 23,
-            (0, 21): 24, (0, 22): 24,
-    }
-
-
-    @classmethod
-    def is_densified(cls, device_hostname):
-        """Whether the host is on a densified rack.
-
-        @param device_hostname: A DeviceHostname named tuple.
-
-        @returns: True if on a densified rack, False otherwise.
-        """
-        return device_hostname.rack in (0, 12, 13)
-
-
-    @classmethod
-    def get_rpm_hostname(cls, device_hostname):
-        """Get rpm hostname.
-
-        @param device_hostname: A DeviceHostname named tuple.
-
-        @returns: hostname of the rpm that has the device.
-
-        """
-        row = device_hostname.row
-        if row == 13:
-            logging.warn('Rule not implemented for row 13 in chromeos4')
-            return ''
-
-        # rpm row is like chromeos4-row1_2-rackX-rpmY
-        rpm_row = ('%d_%d' % (row - 1, row) if row % 2 == 0 else
-                   '%d_%d' % (row, row + 1))
-
-        if cls.is_densified(device_hostname):
-            # Densified rack has two rpms, decide which one the host belongs to
-            # Rule:
-            #     odd row number,  even host number -> rpm1
-            #     odd row number,  odd host number  -> rpm2
-            #     even row number, odd host number  -> rpm1
-            #     even row number, even host number -> rpm2
-            rpm_number = 1 if (row + device_hostname.host) % 2 == 1 else 2
-        else:
-            # Non-densified rack only has one rpm
-            rpm_number = 1
-        return 'chromeos%d-row%s-rack%d-rpm%d' % (
-                device_hostname.lab,
-                rpm_row, device_hostname.rack, rpm_number)
-
-
-    @classmethod
-    def get_rpm_outlet(cls, device_hostname):
-        """Get rpm outlet.
-
-        @param device_hostname: A DeviceHostname named tuple.
-
-        @returns: rpm outlet, e.g. '.A1'
-
-        """
-        try:
-            outlet_map = (cls.DENSIFIED_RPM_OUTLET_MAP
-                          if cls.is_densified(device_hostname) else
-                          cls.RPM_OUTLET_MAP)
-            outlet_number = outlet_map[(device_hostname.row % 2,
-                                        device_hostname.host)]
-            return '.A%d' % outlet_number
-        except KeyError:
-            logging.error('Could not determine outlet for device %s',
-                          device_hostname)
-            return ''
-
-
-    @classmethod
-    def get_hydra_hostname(cls, device_hostname):
-        """Get hydra hostname.
-
-        @param device_hostname: A DeviceHostname named tuple.
-
-        @returns: hydra hostname
-
-        """
-        row = device_hostname.row
-        rack = device_hostname.rack
-        if row >= 1 and row <= 6 and rack >=1 and rack <= 11:
-            return 'chromeos-destiny-hydra1.cros'
-        elif row >= 7 and row <= 12 and rack >=1 and rack <= 11:
-            return 'chromeos-destiny-hydra2.cros'
-        elif row >= 1 and row <= 10 and rack >=12 and rack <= 13:
-            return 'chromeos-destiny-hydra3.cros'
-        elif row in [3, 4, 5, 6, 9, 10] and rack == 0:
-            return 'chromeos-destiny-hydra3.cros'
-        elif row == 13 and rack >= 0 and rack <= 11:
-            return 'chromeos-destiny-hydra3.cros'
-        else:
-            logging.error('Could not determine hydra hostname for %s',
-                          device_hostname)
-            return ''
-
-
-def parse_device_hostname(device_hostname):
-    """Parse device_hostname to DeviceHostname object.
-
-    @param device_hostname: A string, e.g. 'chromeos2-row2-rack4-host3'
-
-    @returns: A DeviceHostname named tuple or None if the
-              the hostname doesn't follow the pattern
-              defined in HOST_REGX.
-
-    """
-    m = re.match(HOST_REGX, device_hostname.strip())
-    if m:
-        return DeviceHostname(
-                lab=int(m.group(1)),
-                row=int(m.group(3)) if m.group(3) else None,
-                rack=int(m.group(4)),
-                host=int(m.group(5)))
-    else:
-        logging.error('Could not parse %s', device_hostname)
-        return None
-
-
-def generate_mapping(hosts, lab_configs):
-    """Generate device_hostname-rpm-outlet-hydra mapping.
-
-    @param hosts: hosts objects get from AFE.
-    @param lab_configs: A list of configuration classes,
-                        each one for a lab.
-
-    @returns: A dictionary that maps device_hostname to
-              (rpm_hostname, outlet, hydra_hostname)
-
-    """
-    # device hostname -> (rpm_hostname, outlet, hydra_hostname)
-    rpm_mapping = {}
-    for host in hosts:
-        device_hostname = parse_device_hostname(host.hostname)
-        if not device_hostname:
-            continue
-        for lab in lab_configs:
-            if lab.is_device_in_the_lab(device_hostname):
-                rpm_hostname = lab.get_rpm_hostname(device_hostname)
-                rpm_outlet = lab.get_rpm_outlet(device_hostname)
-                hydra_hostname = lab.get_hydra_hostname(device_hostname)
-                if not rpm_hostname or not rpm_outlet:
-                    logging.error(
-                            'Skipping device %s: could not determine '
-                            'rpm hostname or outlet.', host.hostname)
-                    break
-                rpm_mapping[host.hostname] = (
-                        rpm_hostname, rpm_outlet, hydra_hostname)
-                break
-        else:
-            logging.info(
-                    '%s is not in a know lab '
-                    '(oyster bay, atlantis, chaos, destiny)',
-                    host.hostname)
-    return rpm_mapping
-
-
-def output_csv(rpm_mapping, csv_file):
-    """Dump the rpm mapping dictionary to csv file.
-
-    @param rpm_mapping: A dictionary that maps device_hostname to
-                        (rpm_hostname, outlet, hydra_hostname)
-    @param csv_file: The name of the file to write to.
-
-    """
-    with open(csv_file, 'w') as f:
-        for hostname, rpm_info in rpm_mapping.iteritems():
-            line = ','.join(rpm_info)
-            line = ','.join([hostname, line])
-            f.write(line + '\n')
-
-
-if __name__ == '__main__':
-    logging.basicConfig(level=logging.DEBUG)
-    parser = argparse.ArgumentParser(
-            description='Generate device_hostname-rpm-outlet-hydra mapping '
-                        'file needed by add_host_powerunit_info.py')
-    parser.add_argument('--csv', type=str, dest='csv_file', required=True,
-                        help='The path to the csv file where we are going to '
-                             'write the mapping information to.')
-    parser.add_argument('--server', type=str, dest='server', default=None,
-                        help='AFE server that the script will be talking to. '
-                             'If not specified, will default to using the '
-                             'server in global_config.ini')
-    options = parser.parse_args()
-
-    AFE = frontend_wrappers.RetryingAFE(timeout_min=5, delay_sec=10,
-                                        server=options.server)
-    logging.info('Connected to %s', AFE.server)
-    rpm_mapping = generate_mapping(
-            AFE.get_hosts(),
-            [OysterBayConfig, AtlantisConfig, ChaosConfig, DestinyConfig])
-    output_csv(rpm_mapping, options.csv_file)
diff --git a/contrib/manage_powerunit_info.py b/contrib/manage_powerunit_info.py
deleted file mode 100755
index 845ab26..0000000
--- a/contrib/manage_powerunit_info.py
+++ /dev/null
@@ -1,185 +0,0 @@
-#!/usr/bin/python2
-
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Manage power unit information for autotest hosts.
-
-  We store rpm hostname, outlet, hydra information for a host in cautotest
-  as host attributes. This tool allows you to add/modify/view/backup
-  rpm attributes for hosts.
-
-* Add/Modify power unit attributes:
-  Step 1: create csv:
-    Put attributes in a csv file, e.g. mapping.csv.
-    Each line in mapping.csv consists of
-        device_hostname, powerunit_hostname, powerunit_outlet, hydra_hostname,
-    seperated by comma. For example
-
-    chromeos-rack2-host1,chromeos-rack2-rpm1,.A1,chromeos-197-hydra1.mtv,
-    chromeos-rack2-host2,chromeos-rack2-rpm1,.A2,chromeos-197-hydra1.mtv,
-
-  Step 2: run
-    ./manage_powerunit_info.py upload --csv mapping_file.csv
-
-* View power unit attributes:
-    ./manage_powerunit_info.py list
-        -m "chromeos-rack2-host1,chromeos-rack2-host2"
-
-* Backup existing attributes for all hosts to a csv file:
-    ./manage_powerunit_info.py backup --csv backup.csv
-"""
-import argparse
-import csv
-import logging
-import os
-import sys
-
-import common
-
-from autotest_lib.client.common_lib import global_config
-from autotest_lib.server.cros.dynamic_suite import frontend_wrappers
-from autotest_lib.site_utils.rpm_control_system import utils as rpm_utils
-
-
-# The host attribute key name for get rpm hostname.
-POWERUNIT_KEYS = [rpm_utils.POWERUNIT_HOSTNAME_KEY,
-                  rpm_utils.POWERUNIT_OUTLET_KEY,
-                  rpm_utils.HYDRA_HOSTNAME_KEY]
-DEFAULT_SERVER = global_config.global_config.get_config_value(
-        'SERVER', 'hostname', default=None)
-
-
-def add_powerunit_info_to_host(afe, device, keyvals):
-    """Add keyvals to the host's attributes in AFE.
-
-    @param afe: AFE server to talk to.
-    @param device: the device hostname, e.g. 'chromeos1-rack1-host1'
-    @param keyvals: A dictionary where keys are the values in POWERUNIT_KEYS.
-                    These are the power unit info about the devcie that we
-                    are going to insert to AFE as host attributes.
-    """
-    if not afe.get_hosts(hostname=device):
-        logging.debug('No host named %s', device)
-        return
-
-    logging.info('Adding host attribues to %s: %s', device, keyvals)
-    for key, val in keyvals.iteritems():
-        afe.set_host_attribute(key, val, hostname=device)
-
-
-def add_from_csv(afe, csv_file):
-    """Read power unit information from csv and add to host attributes.
-
-    @param afe: AFE server to talk to.
-    @param csv_file: A csv file, each line consists of device_hostname,
-                     powerunit_hostname powerunit_outlet, hydra_hostname
-                     separated by comma.
-    """
-    with open(csv_file) as f:
-        reader = csv.reader(f, delimiter=',')
-        for row in reader:
-            device = row[0].strip()
-            hydra = row[3].strip()
-            if not hydra:
-                hydra = None
-            keyvals = dict(zip(
-                    POWERUNIT_KEYS,
-                    [row[1].strip(), row[2].strip(), hydra]))
-            add_powerunit_info_to_host(afe, device, keyvals)
-
-
-def dump_to_csv(afe, csv_file):
-    """Dump power unit info of all hosts to a csv file.
-
-    @param afe: AFE server to talk to.
-    @param csv_file: A file to store the power unit information.
-
-    """
-    logging.info('Back up host attribues to %s', csv_file)
-    with open(csv_file, 'w') as f:
-        hosts = afe.get_hosts()
-        for h in hosts:
-            logging.info('Proccessing %s', h.hostname)
-            f.write(h.hostname + ',')
-            for key in POWERUNIT_KEYS:
-                f.write(h.attributes.get(key, '') + ',')
-            f.write('\n')
-
-
-def list_powerunit_info(afe, devices):
-    """List power unit info for a list of hosts.
-
-    @param afe: AFE server to talk to.
-    @param devices: a list of device hostnames.
-    """
-    hosts = afe.get_hosts(hostname__in = devices)
-    if not hosts:
-        logging.error('No host found.')
-    for h in hosts:
-        info = h.hostname + ','
-        for key in POWERUNIT_KEYS:
-            info += h.attributes.get(key, '') + ','
-        print info
-
-
-def parse_options():
-    """Parse options"""
-    parser = argparse.ArgumentParser(
-            description=__doc__,
-            formatter_class=argparse.RawDescriptionHelpFormatter)
-    action_help = (
-            'upload: read rpm attributes from csv file and set the attributes. '
-            'list: list current attributes for a list of hosts. '
-            'backup: dump existing rpm attributes to a csv file (for backup).')
-    parser.add_argument(
-            'action', choices=('upload', 'list', 'backup'), help=action_help)
-    parser.add_argument('-f', '--csv_file', type=str, dest='csv_file',
-                        help='A path to a csv file. When upload, each line '
-                             'should consist of device_name, powerunit_hostname, '
-                             'powerunit_outlet, hydra_hostname, separated '
-                             'by comma. When dump, the file will be generated.')
-    parser.add_argument('-m', type=str, dest='hostnames', default='',
-                        help='A list of machine hostnames seperated by comma, '
-                             'applicable to "list" command')
-    parser.add_argument('-s', '--server', type=str, dest='server',
-                        default=DEFAULT_SERVER,
-                        help='AFE server that the script will be talking to. '
-                             'If not speicified, will default to using the '
-                             'server in global_config.ini')
-    options = parser.parse_args()
-    if options.action == 'upload' or options.action =='backup':
-        if not options.csv_file:
-            logging.error('Please specifiy a file with -f/--csv')
-            sys.exit(1)
-        file_exists = os.path.exists(options.csv_file)
-        if options.action == 'upload' and not file_exists:
-            logging.error('%s is not a valid file.', options.csv_file)
-            sys.exit(1)
-        if options.action == 'backup' and file_exists:
-            logging.error('%s already exists.', options.csv_file)
-            sys.exit(1)
-    if options.action == 'list' and not options.hostnames:
-       logging.error('Please specify hostnames with -m')
-       sys.exit(1)
-    return options
-
-
-if __name__ == '__main__':
-    logging.basicConfig(level=logging.DEBUG)
-    options = parse_options()
-    afe = frontend_wrappers.RetryingAFE(timeout_min=5, delay_sec=10,
-                                        server=options.server)
-    logging.info('Connected to %s', afe.server)
-    if options.action =='backup':
-        dump_to_csv(afe, options.csv_file)
-    elif options.action == 'upload':
-        confirm_msg = ('Upload rpm mapping from %s, are you sure?'
-                       % options.csv_file)
-        confirm = raw_input("%s (y/N) " % confirm_msg).lower() == 'y'
-        if confirm:
-            add_from_csv(afe, options.csv_file)
-    elif options.action == 'list':
-        list_powerunit_info(afe, [h.strip() for h in options.hostnames.split(',')])
diff --git a/contrib/modelviz.py b/contrib/modelviz.py
deleted file mode 100755
index 6126475..0000000
--- a/contrib/modelviz.py
+++ /dev/null
@@ -1,201 +0,0 @@
-#!/usr/bin/env python2
-"""Django model to DOT (Graphviz) converter
-by Antonio Cavedoni <antonio@cavedoni.org>
-
-Make sure your DJANGO_SETTINGS_MODULE is set to your project or
-place this script in the same directory of the project and call
-the script like this:
-
-$ python modelviz.py [-h] [-d] <app_label> ... <app_label> > <filename>.dot
-$ dot <filename>.dot -Tpng -o <filename>.png
-
-options:
-    -h, --help
-    show this help message and exit.
-
-    -d, --disable_fields
-    don't show the class member fields.
-"""
-__version__ = "0.8"
-__svnid__ = "$Id$"
-__license__ = "Python"
-__author__ = "Antonio Cavedoni <http://cavedoni.com/>"
-__contributors__ = [
-   "Stefano J. Attardi <http://attardi.org/>",
-   "limodou <http://www.donews.net/limodou/>",
-   "Carlo C8E Miron",
-   "Andre Campos <cahenan@gmail.com>",
-   "Justin Findlay <jfindlay@gmail.com>",
-   ]
-
-import getopt, sys
-
-from django.core.management import setup_environ
-
-try:
-    import settings
-except ImportError:
-    pass
-else:
-    setup_environ(settings)
-
-from django.template import Template, Context
-from django.db import models
-from django.db.models import get_models
-from django.db.models.fields.related import \
-    ForeignKey, OneToOneField, ManyToManyField
-
-try:
-    from django.db.models.fields.generic import GenericRelation
-except ImportError:
-    from django.contrib.contenttypes.generic import GenericRelation
-
-head_template = """
-digraph name {
-  fontname = "Helvetica"
-  fontsize = 8
-
-  node [
-    fontname = "Helvetica"
-    fontsize = 8
-    shape = "plaintext"
-  ]
-   edge [
-    fontname = "Helvetica"
-    fontsize = 8
-  ]
-
-"""
-
-body_template = """
-  {% for model in models %}
-    {% for relation in model.relations %}
-    {{ relation.target }} [label=<
-        <TABLE BGCOLOR="palegoldenrod" BORDER="0" CELLBORDER="0" CELLSPACING="0">
-        <TR><TD COLSPAN="2" CELLPADDING="4" ALIGN="CENTER" BGCOLOR="olivedrab4"
-        ><FONT FACE="Helvetica Bold" COLOR="white"
-        >{{ relation.target }}</FONT></TD></TR>
-        </TABLE>
-        >]
-    {{ model.name }} -> {{ relation.target }}
-    [label="{{ relation.name }}"] {{ relation.arrows }};
-    {% endfor %}
-  {% endfor %}
-
-  {% for model in models %}
-    {{ model.name }} [label=<
-    <TABLE BGCOLOR="palegoldenrod" BORDER="0" CELLBORDER="0" CELLSPACING="0">
-     <TR><TD COLSPAN="2" CELLPADDING="4" ALIGN="CENTER" BGCOLOR="olivedrab4"
-     ><FONT FACE="Helvetica Bold" COLOR="white"
-     >{{ model.name }}</FONT></TD></TR>
-
-    {% if not disable_fields %}
-        {% for field in model.fields %}
-        <TR><TD ALIGN="LEFT" BORDER="0"
-        ><FONT {% if field.blank %}COLOR="#7B7B7B" {% endif %}FACE="Helvetica Bold">{{ field.name }}</FONT
-        ></TD>
-        <TD ALIGN="LEFT"
-        ><FONT {% if field.blank %}COLOR="#7B7B7B" {% endif %}FACE="Helvetica Bold">{{ field.type }}</FONT
-        ></TD></TR>
-        {% endfor %}
-    {% endif %}
-    </TABLE>
-    >]
-  {% endfor %}
-"""
-
-tail_template = """
-}
-"""
-
-def generate_dot(app_labels, **kwargs):
-    disable_fields = kwargs.get('disable_fields', False)
-
-    dot = head_template
-
-    for app_label in app_labels:
-        app = models.get_app(app_label)
-        graph = Context({
-            'name': '"%s"' % app.__name__,
-            'disable_fields': disable_fields,
-            'models': []
-            })
-
-        for appmodel in get_models(app):
-            model = {
-                'name': appmodel.__name__,
-                'fields': [],
-                'relations': []
-                }
-
-            # model attributes
-            def add_attributes():
-                model['fields'].append({
-                    'name': field.name,
-                    'type': type(field).__name__,
-                    'blank': field.blank
-                    })
-
-            for field in appmodel._meta.fields:
-                add_attributes()
-
-            if appmodel._meta.many_to_many:
-                for field in appmodel._meta.many_to_many:
-                    add_attributes()
-
-            # relations
-            def add_relation(extras=""):
-                _rel = {
-                    'target': field.rel.to.__name__,
-                    'type': type(field).__name__,
-                    'name': field.name,
-                    'arrows': extras
-                    }
-                if _rel not in model['relations']:
-                    model['relations'].append(_rel)
-
-            for field in appmodel._meta.fields:
-                if isinstance(field, ForeignKey):
-                    add_relation()
-                elif isinstance(field, OneToOneField):
-                    add_relation("[arrowhead=none arrowtail=none]")
-
-            if appmodel._meta.many_to_many:
-                for field in appmodel._meta.many_to_many:
-                    if isinstance(field, ManyToManyField):
-                        add_relation("[arrowhead=normal arrowtail=normal]")
-                    elif isinstance(field, GenericRelation):
-                        add_relation(
-                            '[style="dotted"] [arrowhead=normal arrowtail=normal]')
-            graph['models'].append(model)
-
-        t = Template(body_template)
-        dot += '\n' + t.render(graph)
-
-    dot += '\n' + tail_template
-
-    return dot
-
-def main():
-    try:
-        opts, args = getopt.getopt(sys.argv[1:], "hd",
-                    ["help", "disable_fields"])
-    except getopt.GetoptError, error:
-        print __doc__
-        sys.exit(error)
-    else:
-        if not args:
-            print __doc__
-            sys.exit()
-
-    kwargs = {}
-    for opt, arg in opts:
-        if opt in ("-h", "--help"):
-            print __doc__
-            sys.exit()
-        if opt in ("-d", "--disable_fields"):
-            kwargs['disable_fields'] = True
-    print generate_dot(args, **kwargs)
-
-if __name__ == "__main__":
-    main()
diff --git a/contrib/print_host_labels.py b/contrib/print_host_labels.py
deleted file mode 100755
index 04d32cb..0000000
--- a/contrib/print_host_labels.py
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/usr/bin/env python2
-
-"""
-Usage: ./print_host_labels.py <IP.or.hostname>
-"""
-
-import sys
-import common
-from autotest_lib.server.hosts import factory
-
-if len(sys.argv) < 2:
-    print 'Usage: %s <IP.or.hostname>' % sys.argv[0]
-    exit(1)
-
-host = factory.create_host(sys.argv[1])
-labels = host.get_labels()
-print 'Labels:'
-print labels
diff --git a/contrib/repair_hosts b/contrib/repair_hosts
deleted file mode 100755
index e7dd435..0000000
--- a/contrib/repair_hosts
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/python2
-# Simple utility to trigger a Repair job on a bunch of hosts.
-#
-# CAVEAT:  no error checking; if any argument isn't a valid
-# host, it will be silently ignored.  If there are no command
-# line arguments, silently succeed.
-
-import sys
-
-import common
-
-from autotest_lib.server import frontend
-
-# For simplicity, we want to do nothing if there are no hosts named
-# on the command line.  That makes it easy to do stuff like this:
-#     dut-status -b $BOARD -p bvt -n | xargs repair_hosts
-#
-# By doing nothing, we get more useful behavior if all the DUTs selected
-# by `dut-status` are actually working.
-#
-# Note that we have to specifically test for an empty host list: I
-# _think_ (but I don't know) that the AFE calls operate on all the
-# hosts if there are no arguments given.  I do know for certain that
-# with hostnames=[], the call takes longer than I was willing to
-# wait.
-
-if len(sys.argv) >= 2:
-    frontend.AFE().repair_hosts(hostnames=sys.argv[1:])
diff --git a/contrib/repair_hosts_throttled.py b/contrib/repair_hosts_throttled.py
deleted file mode 100755
index c7a21ca..0000000
--- a/contrib/repair_hosts_throttled.py
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/python2
-# Takes a list of hostnames (via file) and schedules host repair
-# jobs based on the delay specified in order to throttle the jobs
-# and not overwhelm the system.
-
-import argparse
-import sys
-
-import common
-import time
-
-from autotest_lib.server import frontend
-
-def GetParser():
-    """Creates the argparse parser."""
-    parser = argparse.ArgumentParser(description=__doc__)
-    parser.add_argument('--input', type=str, action='store',
-                        help='File with hostnames to repair')
-    parser.add_argument('--delay_seconds', type=int, action='store', default=5,
-                        help='Delay between scheduling repair jobs')
-    return parser
-
-
-def main(argv):
-    parser = GetParser()
-    options = parser.parse_args(argv)
-
-    afe = frontend.AFE()
-
-    with open(options.input) as input:
-        hostnames = input.readlines()
-        remaining = len(hostnames)
-        delay = options.delay_seconds
-        print "Scheduling %d repairs with %s delay in seconds" \
-              % (remaining, delay)
-        for hostname in hostnames:
-            hostname = hostname.strip()
-            afe.repair_hosts([hostname])
-            remaining = remaining - 1
-            print "%s host repair scheduled with %d remaining" \
-                  % (hostname, remaining)
-            time.sleep(delay)
-
-
-if __name__ == '__main__':
-    sys.exit(main(sys.argv[1:]))
diff --git a/contrib/reverify_hosts b/contrib/reverify_hosts
deleted file mode 100755
index da449bf..0000000
--- a/contrib/reverify_hosts
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/python2
-# Simple utility to trigger a Verify job on a bunch of hosts.
-#
-# CAVEAT:  no error checking; if any argument isn't a valid
-# host, it will be silently ignored.  If there are no command
-# line arguments, silently succeed.
-
-import sys
-
-import common
-
-from autotest_lib.server import frontend
-
-# For simplicity, we want to do nothing if there are no hosts named
-# on the command line.  That makes it easy to do stuff like this:
-#     dut-status -b $BOARD -p bvt -n | xargs reverify_hosts
-#
-# By doing nothing, we get more useful behavior if all the DUTs selected
-# by `dut-status` are actually working.
-#
-# Note that we have to specifically test for an empty host list: I
-# _think_ (but I don't know) that the AFE calls operate on all the
-# hosts if there are no arguments given.  I do know for certain that
-# with hostnames=[], the call takes longer than I was willing to
-# wait.
-
-if len(sys.argv) >= 2:
-    frontend.AFE().reverify_hosts(hostnames=sys.argv[1:])
diff --git a/contrib/run-stable-update b/contrib/run-stable-update
deleted file mode 100755
index f4b3b36..0000000
--- a/contrib/run-stable-update
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/bin/bash
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-SCRIPT_DIR=$(dirname $(readlink -f $0))
-cd ${SCRIPT_DIR}/..
-
-LOGDIR=logs/stable-version
-ASSIGN_STABLE=site_utils/stable_images/assign_stable_images.py
-
-mkdir -p ${LOGDIR}
-NOTIFY=(
-  chromeos-test-monitors@google.com
-)
-
-# Redirect onto a log file.  For debug purposes, skip redirection if
-# there's a command line argument (we ignore what the argument is), or
-# if there's no log directory.
-#
-TAG=$(date '+%Y-%W')
-if [ $# -eq 0 -a -d ${LOGDIR} ]; then
-    LOGFILE="update-${TAG}.log"
-    exec >>${LOGDIR}/${LOGFILE} 2>&1
-fi
-
-trap 'rm -f ${TMPFILE}' EXIT
-TMPFILE=$(mktemp)
-
-date
-$ASSIGN_STABLE --web localhost 2>&1 | tee ${TMPFILE}
-echo
-
-# If we have a log directory, clean it up, and send e-mail notification.
-# The log files change name each week, so by throwing out all but the
-# most recent 14 files, we keep about 3 months of history, plus this
-# week's log.
-#
-if [ -d ${LOGDIR} ]; then
-    SUBJECT="Stable version update summary ${TAG}"
-    site_utils/gmail_lib.py -s "${SUBJECT}" "${NOTIFY[@]}" <${TMPFILE}
-    rm -f $(ls -r ${LOGDIR}/update-*.log | sed '1,14 d')
-fi
diff --git a/contrib/shared_hosts.py b/contrib/shared_hosts.py
deleted file mode 100755
index b45d3c8..0000000
--- a/contrib/shared_hosts.py
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/usr/bin/python2
-
-"""
-Finds hosts that are shared between both cautotest and cautotest-cq.
-"""
-
-import common
-from autotest_lib.server import frontend
-
-cautotest = frontend.AFE(server='cautotest')
-cautotest_cq = frontend.AFE(server='cautotest-cq')
-
-cautotest_hosts = [x['hostname'] for x in cautotest.run('get_hosts')
-                   if not x['locked']]
-cautotest_cq_hosts = [x['hostname'] for x in cautotest_cq.run('get_hosts')
-                      if not x['locked']]
-
-for host in cautotest_hosts:
-    if host in cautotest_cq_hosts:
-        print host
diff --git a/contrib/stage_build.py b/contrib/stage_build.py
deleted file mode 100755
index c7a5f59..0000000
--- a/contrib/stage_build.py
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/usr/bin/python2 -t
-
-"""
-Nice little script to quickly stage a build onto a devserver.
-"""
-
-import argparse
-import sys
-
-import common
-from autotest_lib.client.common_lib.cros import dev_server
-from autotest_lib.server.cros.dynamic_suite import tools
-from autotest_lib.server import frontend
-
-def parse_args():
-  """Parse command line arguments."""
-  parser = argparse.ArgumentParser()
-  parser.add_argument('--build', help='e.g. lumpy-release/R26-4321.0.0')
-  parser.add_argument('--server', help='OPTIONAL: e.g. devserver.cros')
-  parser.add_argument('--host',
-                      help='OPTIONAL: e.g. chromeos2-row3-rack4-host5')
-
-  args = parser.parse_args()
-  if not args.build:
-    parser.print_help()
-    sys.exit(1)
-
-  return args
-
-def main():
-  """Stage a build on the devserver."""
-  options = parse_args()
-  if options.server:
-    server = 'http://%s/' % options.server
-    ds = dev_server.ImageServer(server)
-  else:
-    ds = dev_server.ImageServer.resolve(options.build)
-
-  print "Downloading %s..." % options.build
-  ds.stage_artifacts(options.build, ['full_payload', 'stateful',
-                                     'control_files', 'autotest_packages'])
-  if options.host:
-    print "Poking job_repo_url on %s..." % options.host
-    repo_url = tools.get_package_url(ds.url(), options.build)
-    AFE = frontend.AFE()
-    AFE.set_host_attribute('job_repo_url', repo_url, hostname=options.host)
-
-if __name__ == '__main__':
-  main()
diff --git a/contrib/suite_utils.py b/contrib/suite_utils.py
new file mode 100755
index 0000000..4983c48
--- /dev/null
+++ b/contrib/suite_utils.py
@@ -0,0 +1,359 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import ast
+from functools import partial
+import os
+import subprocess
+import graphviz
+import common
+
+from server.cros.dynamic_suite.control_file_getter import FileSystemGetter
+from server.cros.dynamic_suite.suite_common import retrieve_for_suite
+
+class TestSuite(object):
+    def __init__(self, cf_object, name, file_path):
+        self.name = name
+        self.cf_object = cf_object
+        self.tests = []
+        self.file_path = file_path
+
+    def add_test(self, test_object):
+        self.tests.append(test_object)
+
+    def get_tests(self):
+        return self.tests
+
+
+class TestObject(object):
+    def __init__(self, cf_object, file_path):
+        self.name = cf_object.name
+        self.type = 'tast' if ('tast' in self.name) else 'tauto'
+        self.cf_object = cf_object
+        self.file_path = file_path
+        self.tast_exprs = ''
+        self.tast_string = ''
+
+    def get_attributes(self):
+        return self.cf_object.attributes
+
+    def is_tast(self):
+        return self.type == 'tast'
+
+    # use the python syntax tree library to parse the run function
+    # and grab the test_expr from the 'tast' run command
+    def parse_cf_for_tast_string(self):
+        with open(self.file_path, 'r') as cf:
+            mod = ast.parse(cf.read())
+            for n in mod.body:
+                if n.__class__ != ast.FunctionDef:
+                    continue
+                if n.name != 'run':
+                    continue
+                for sub_node in n.body:
+                    if sub_node.__class__ != ast.Expr:
+                        continue
+                    try:
+                        fn_name = sub_node.value.func.value.id
+                        if fn_name != 'job':
+                            continue
+                    except:
+                        continue
+                    if sub_node.value.func.attr != 'run_test':
+                        continue
+                    for keyword in sub_node.value.keywords:
+                        if keyword.arg == 'test_exprs' and keyword.value.__class__ == ast.List:
+                            test_exprs = []
+                            regex_list = False
+                            for elem in keyword.value.elts:
+                                try:
+                                    test_exprs.append(elem.s)
+                                    regex_list = ('(' in elem.s or regex_list)
+                                except AttributeError:
+                                    print('WARNING: Non-standard test found, check '
+                                          + self.file_path + ' manually')
+                                    break
+                            if regex_list:
+                                self.tast_string = ' '.join(test_exprs)
+                            else:
+                                for it in range(len(test_exprs) - 1):
+                                    test_exprs[it] = test_exprs[it] + ','
+                                self.tast_string = ' '.join(test_exprs)
+
+    def enumerate_tast_from_test_expr(self):
+        self.parse_cf_for_tast_string()
+        try:
+            self.tast_exprs = self.tast_string.split(', ')
+        except AttributeError:
+            print('WARNING: Non-standard test found, check' + self.file_path +
+                  ' manually')
+
+    def enumerate_tests_from_tast_exprs(self, dut):
+        tests = []
+        print(self.tast_exprs)
+        for expr in self.tast_exprs:
+            en = subprocess.check_output(
+                    ['tast', 'list', str(dut),
+                     str(expr)], encoding='utf-8')
+            for t in en.split('\n'):
+                if t == '':
+                    continue
+                tests.append(t)
+            en = subprocess.check_output([
+                    'tast', 'list', '-buildbundle=crosint',
+                    str(dut),
+                    str(expr)
+            ],
+                                         encoding='utf-8')
+            for t in en.split('\n'):
+                if t == '':
+                    continue
+                tests.append(t)
+
+        return tests
+
+    def describe(self):
+        return 'test named ' + self.name + ' of type ' + self.type
+
+
+class TestParser(object):
+    def get_all_test_objects(self, locations):
+        tests = {}
+        suites = {}
+
+        cf_getter = FileSystemGetter(locations)
+        for (file_path, cf_object) in retrieve_for_suite(cf_getter,
+                                                         '').items():
+            if cf_object.test_class == 'suite':
+                suites[cf_object.name] = (TestSuite(cf_object, cf_object.name,
+                                                    file_path))
+            else:
+                tests[cf_object.name] = (TestObject(cf_object, file_path))
+                if tests[cf_object.name].is_tast():
+                    tests[cf_object.name].enumerate_tast_from_test_expr()
+
+        return tests, suites
+
+
+class TestManager(object):
+    def __init__(self):
+        self.tests = {}
+        self.suites = {}
+        self.dut = None
+        self.log_functions = [partial(print)]
+        self.test_parser = TestParser()
+
+    def log(self, log_text, *args):
+        for fn in self.log_functions:
+            fn(log_text, *args)
+
+    def csv_logger(self, log_text, file_path):
+        with open(file_path, 'a') as log:
+            log.write(log_text)
+
+    def register_csv_logger(self, file_path):
+        if os.path.exists(file_path):
+            os.remove(file_path)
+        print_to_csv = partial(self.csv_logger, file_path=file_path)
+        self.log_functions.append(print_to_csv)
+        print_to_csv('suite,test\n')
+
+    def initialize_from_fs(self, locations):
+        self.tests, self.suites = self.test_parser.get_all_test_objects(
+                locations)
+
+    def process_all_tests(self):
+        for test, test_object in self.tests.items():
+            for suite in test_object.get_attributes():
+                target_suite = self.find_suite_named(suite)
+                if target_suite is not None:
+                    target_suite.add_test(test)
+
+    def set_dut(self, target):
+        self.dut = target
+
+    def get_dut(self):
+        if self.dut is not None:
+            return self.dut
+        else:
+            raise AttributeError(
+                    'DUT Address not set, please use the --dut flag to indicate the ip address of the DUT'
+            )
+
+    def find_test_named(self, test_name):
+        try:
+            queried_test = self.tests[test_name]
+            return queried_test
+        except KeyError:
+            return None
+
+    def find_suite_named(self, suite_name):
+        try:
+            if suite_name[0:6] == 'suite.':
+                queried_suite = self.suites[suite_name[6:]]
+            elif suite_name[0:6] == 'suite:':
+                queried_suite = self.suites[suite_name[6:]]
+            else:
+                queried_suite = self.suites[suite_name]
+            return queried_suite
+        except KeyError:
+            return None
+
+    def list_suite_named(self, suite_name, pretty=False):
+        suite_tests = []
+        suite = self.find_suite_named(suite_name)
+
+        if suite is None:
+            if pretty:
+                return '\n'
+            return suite_tests
+
+        for test in suite.get_tests():
+            if self.tests[test].is_tast():
+                found_tests = self.tests[test].enumerate_tests_from_tast_exprs(
+                        self.get_dut())
+                for t in found_tests:
+                    if t == '':
+                        continue
+                    suite_tests.append('tast.' + str(t))
+            else:
+                suite_tests.append(test)
+
+        if pretty:
+            out_as_string = ''
+            for test in suite_tests:
+                out_as_string += suite_name + ',' + str(test) + '\n'
+            return out_as_string
+        return suite_tests
+
+    def gs_query_link(self, suite_name):
+        test_names = ','.join([
+                test for test in self.list_suite_named(suite_name)
+                if test != ''
+        ])
+
+        query = 'https://dashboards.corp.google.com/'
+        query += '_86acf8a8_50a5_48e0_829e_fbf1033d3ac6'
+        query += '?f=test_name:in:' + test_names
+        query += '&f=create_date_7_day_filter:in:Past%207%20Days'
+        query += '&f=test_type:in:Tast,Autotest'
+
+        return query
+
+    def graph_suite_named(self, suite_name, dot_graph=None):
+        suite_tests = self.list_suite_named(suite_name)
+        nodes_at_rank = 0
+
+        if dot_graph is None:
+            dot_graph = graphviz.Digraph(comment=suite_name)
+
+        dot_graph.node(suite_name, suite_name)
+        last_level = suite_name
+        child_graph = None
+
+        for test_name in suite_tests:
+            if nodes_at_rank == 0:
+                child_graph = graphviz.Digraph()
+                dot_graph.edge(last_level, test_name)
+                last_level = test_name
+
+            child_graph.node(test_name, test_name)
+            dot_graph.edge(suite_name, test_name)
+
+            if nodes_at_rank == 6:
+                dot_graph.subgraph(child_graph)
+
+            nodes_at_rank += 1
+            nodes_at_rank %= 7
+
+        dot_graph.subgraph(child_graph)
+
+        return dot_graph
+
+    def diff_test_suites(self, suite_a, suite_b):
+        res = ''
+        suite_a_set = set(self.list_suite_named(suite_a))
+        suite_b_set = set(self.list_suite_named(suite_b))
+        res = res + ('Suite B (+)' + str(list(suite_b_set - suite_a_set)))
+        res = res + '\n'
+        res = res + ('Suite B (-)' + str(list(suite_a_set - suite_b_set)))
+        return res
+
+
+def main(args):
+    tests = TestManager()
+
+    basepath = os.path.dirname(os.path.abspath(__file__))
+    tests.initialize_from_fs([(basepath + '/../test_suites'),
+                              (basepath + '/../server/site_tests'),
+                              (basepath + '/../client/site_tests')])
+    tests.process_all_tests()
+
+    if args.csv is not None:
+        tests.register_csv_logger(args.csv)
+    if args.dut is not None:
+        tests.set_dut(args.dut)
+    if args.find_test is not None:
+        test = tests.find_test_named(args.find_test)
+        if test is not None:
+            tests.log(test.file_path)
+        else:
+            tests.log('Queried test not found')
+    if args.find_suite is not None:
+        suite = tests.find_suite_named(args.find_suite)
+        if suite is not None:
+            tests.log(suite.file_path)
+        else:
+            tests.log('Queried suite not found')
+    if args.list_suite is not None:
+        tests.log(tests.list_suite_named(args.list_suite, pretty=True))
+    if args.list_multiple_suites is not None:
+        for suite_name in args.list_multiple_suites:
+            tests.log(tests.list_suite_named(suite_name, pretty=True))
+    if args.diff is not None:
+        tests.log(tests.diff_test_suites(args.diff[0], args.diff[1]))
+    if args.graph_suite is not None:
+        graph = tests.graph_suite_named(args.graph_suite)
+        graph.render('./suite_data/suite_viz.gv', format='png')
+    if args.gs_dashboard is not None:
+        link = tests.gs_query_link(args.gs_dashboard)
+        tests.log(link)
+
+
+if __name__ == '__main__':
+    # pass in the url for the DUT via ssh
+    parser = argparse.ArgumentParser()
+    parser.add_argument('--csv',
+                        help='supply csv file path for logging output')
+    parser.add_argument(
+            '--diff',
+            nargs=2,
+            help=
+            'show diff between two suites. Ex: --diff bvt-tast-cq pvs-tast-cq')
+    parser.add_argument('--find_test',
+                        help='find control file for test_name')
+    parser.add_argument('--find_suite',
+                        help='find control file for suite_name')
+    parser.add_argument(
+            '--graph_suite',
+            help=
+            'graph test dependencies of suite_name, will output to contrib/suite_data'
+    )
+    parser.add_argument('--list_suite',
+                        help='list units in suite_name')
+    parser.add_argument(
+            '--list_multiple_suites',
+            nargs='*',
+            help='list units in suite_name_1 suite_name_2 suite_name_n')
+    parser.add_argument('--dut',
+                        help='ip address and port for tast enumeration')
+    parser.add_argument(
+            '--gs_dashboard',
+            help='generate green stainless dashboard for suite_name')
+    parsed_args = parser.parse_args()
+
+    main(parsed_args)
diff --git a/contrib/upload_results.py b/contrib/upload_results.py
new file mode 100755
index 0000000..3b6fa4e
--- /dev/null
+++ b/contrib/upload_results.py
@@ -0,0 +1,755 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import logging as log
+import os
+import re
+import shlex
+import shutil
+import subprocess
+import multiprocessing
+import sys
+import time
+import uuid
+import json
+import functools
+import glob
+
+from google.cloud import storage
+from google.api_core import exceptions as cloud_exceptions
+# pylint: disable=no-name-in-module, import-error
+
+import common
+from autotest_lib.client.common_lib import global_config
+from autotest_lib.client.common_lib import mail, pidfile
+from autotest_lib.tko.parse import parse_one, export_tko_job_to_file
+
+# Appends the moblab source paths for the pubsub wrapper
+sys.path.append('/mnt/host/source/src/platform/moblab/src')
+from moblab_common import pubsub_client
+
+STATUS_FILE = "status"
+STATUS_LOG_FILE = "status.log"
+KEYVAL_FILE = "keyval"
+NEW_KEYVAL_FILE = "new_keyval"
+UPLOADED_STATUS_FILE = ".uploader_status"
+STATUS_GOOD = "PUBSUB_SENT"
+FAKE_MOBLAB_ID_FILE = "fake_moblab_id_do_not_delete.txt"
+GIT_HASH_FILE = "git_hash.txt"
+GIT_COMMAND = ("git log --pretty=format:'%h -%d %s (%ci) <%an>'"
+               " --abbrev-commit -20")
+AUTOTEST_DIR = "/mnt/host/source/src/third_party/autotest/files/"
+DEFAULT_SUITE_NAME = "default_suite"
+SUITE_NAME_REGEX = "Fetching suite for suite named (.+?)\.\.\."
+DEBUG_FILE_PATH = "debug/test_that.DEBUG"
+CONFIG_DIR = os.path.dirname(os.path.abspath(__file__)) + "/config/"
+DEFAULT_BOTO_CONFIG = CONFIG_DIR + ".boto_upload_utils"
+UPLOAD_CONFIG = CONFIG_DIR + "upload_config.json"
+SERVICE_ACCOUNT_CONFIG = CONFIG_DIR + ".service_account.json"
+
+logging = log.getLogger(__name__)
+
+
+def parse_arguments(argv):
+    """Creates the argument parser.
+
+    Args:
+        argv: A list of input arguments.
+
+    Returns:
+        A parser object for input arguments.
+    """
+    parser = argparse.ArgumentParser(description=__doc__)
+    subparsers = parser.add_subparsers(
+            help='select sub option for test result utility',
+            dest='subcommand')
+    subparsers.required = True
+    parser.add_argument("-v",
+                        "--verbose",
+                        dest='verbose',
+                        action='store_true',
+                        help="Enable verbose (debug) logging.")
+    parser.add_argument("-q",
+                        "--quiet",
+                        dest='quiet',
+                        action='store_true',
+                        help="Quiet mode for background call")
+    def_logfile = "/tmp/" + os.path.basename(
+            sys.argv[0].split(".")[0]) + ".log"
+    parser.add_argument("-l",
+                        "--logfile",
+                        type=str,
+                        required=False,
+                        default=def_logfile,
+                        help="Full path to logfile. Default: " + def_logfile)
+
+    # configuration subcommand to create config file and populate environment
+    config_parser = subparsers.add_parser(name="config",
+                                          help='upload test results to CPCon')
+    config_parser.add_argument(
+            "-b",
+            "--bucket",
+            type=str,
+            required=True,
+            help="The GCS bucket that test results are uploaded to, e.g."
+            "'gs://xxxx'.")
+    config_parser.add_argument("-f",
+                               "--force",
+                               dest='force',
+                               action="store_true",
+                               help="Force overwrite of previous config files")
+
+    upload_parser = subparsers.add_parser(name="upload",
+                                          help='upload test results to CPCon')
+    upload_parser.add_argument(
+            "--bug",
+            type=_valid_bug_id,
+            required=False,
+            help=
+            "Write bug id to the test results. Each test entry can only have "
+            "at most 1 bug id. Optional.")
+    upload_parser.add_argument(
+            "-d",
+            "--directory",
+            type=str,
+            required=True,
+            help="The directory of non-Moblab test results.")
+    upload_parser.add_argument(
+            "--parse_only",
+            action='store_true',
+            help="Generate job.serialize locally but do not upload test "
+            "directories and not send pubsub messages.")
+    upload_parser.add_argument(
+            "--upload_only",
+            action='store_true',
+            help="Leave existing protobuf files as-is, only upload "
+            "directories and send pubsub messages.")
+    upload_parser.add_argument(
+            "-f",
+            "--force",
+            dest='force',
+            action='store_true',
+            help=
+            "force re-upload of results even if results were already successfully uploaded."
+    )
+    upload_parser.add_argument(
+            "-s",
+            "--suite",
+            type=str,
+            default=None,
+            help="The suite is used to identify the type of test results,"
+            "e.g. 'power' for platform power team. If not specific, the "
+            "default value is 'default_suite'.")
+    return parser.parse_args(argv)
+
+
+def _confirm_option(question):
+    """
+        Get a yes/no answer from the user via command line.
+
+    Args:
+        question: string, question to ask the user.
+
+    Returns:
+        A boolean. True if yes; False if no.
+    """
+    expected_answers = ['y', 'yes', 'n', 'no']
+    answer = ''
+    while answer not in expected_answers:
+        answer = input(question + "(y/n): ").lower().strip()
+    return answer[0] == "y"
+
+
+def _read_until_string(pipe, stop_string):
+    lines = [""]
+    while True:
+        c = pipe.read(1)
+        lines[-1] = lines[-1] + c.decode("utf-8")
+        if stop_string == lines[-1]:
+            return lines
+        if c.decode("utf-8") == "\n":
+            lines.append("")
+
+
+def _configure_environment(parsed_args):
+    # create config directory if not exists
+    os.makedirs(CONFIG_DIR, exist_ok=True)
+
+    if os.path.exists(UPLOAD_CONFIG) and not parsed_args.force:
+        logging.error("Environment already configured, run with --force")
+        exit(1)
+
+    # call the gsutil config tool to set up accounts
+    if os.path.exists(DEFAULT_BOTO_CONFIG + ".bak"):
+        os.remove(DEFAULT_BOTO_CONFIG + ".bak")
+
+    if os.path.exists(DEFAULT_BOTO_CONFIG):
+        os.remove(DEFAULT_BOTO_CONFIG)
+    os.mknod(DEFAULT_BOTO_CONFIG)
+    os.environ["BOTO_CONFIG"] = DEFAULT_BOTO_CONFIG
+    os.environ[
+            "GOOGLE_APPLICATION_CREDENTIALS"] = CONFIG_DIR + ".service_account.json"
+    with subprocess.Popen(["gsutil", "config"],
+                          stdout=subprocess.PIPE,
+                          stderr=subprocess.PIPE,
+                          stdin=subprocess.PIPE) as sp:
+        lines = _read_until_string(sp.stdout, "Enter the authorization code: ")
+        code = input("enter auth code from " + str(lines[1]) + ": ")
+        sp.stdin.write(bytes(code + '\n', "utf-8"))
+        sp.stdin.flush()
+        lines = _read_until_string(sp.stdout, "What is your project-id? ")
+        sp.stdin.write(bytes(parsed_args.bucket + '\n', "utf-8"))
+        sp.stdin.flush()
+
+    subprocess.run([
+            "gsutil", "cp",
+            "gs://" + parsed_args.bucket + "/.service_account.json", CONFIG_DIR
+    ])
+    subprocess.run([
+            "gsutil", "cp",
+            "gs://" + parsed_args.bucket + "/pubsub-key-do-not-delete.json",
+            CONFIG_DIR
+    ])
+
+    sa_filename = ""
+    if os.path.exists(CONFIG_DIR + "/.service_account.json"):
+        sa_filename = ".service_account.json"
+    elif os.path.exists(CONFIG_DIR + "/pubsub-key-do-not-delete.json"):
+        sa_filename = "pubsub-key-do-not-delete.json"
+    else:
+        logging.error("No pubsub key found in bucket, failed config!")
+        exit(1)
+
+    # deposit parsed_args.bucket to the json file
+    with open(UPLOAD_CONFIG, "w") as cf:
+        settings = {}
+        settings["bucket"] = parsed_args.bucket
+        settings["service_account"] = CONFIG_DIR + sa_filename
+        settings["boto_key"] = DEFAULT_BOTO_CONFIG
+
+        cf.write(json.dumps(settings))
+
+
+def _load_config():
+    mandatory_keys = ["bucket", "service_account", "boto_key"]
+
+    if not os.path.exists(UPLOAD_CONFIG):
+        logging.error("Missing mandatory config file, run config command")
+        exit(1)
+    with open(UPLOAD_CONFIG, "r") as cf:
+        settings = json.load(cf)
+
+    for key in mandatory_keys:
+        if key not in settings:
+            logging.error("Missing mandatory setting " + str(key) +
+                          ", run config command")
+            exit()
+
+    os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = settings["service_account"]
+    os.environ["BOTO_CONFIG"] = settings["boto_key"]
+    return argparse.Namespace(**settings)
+
+
+class ResultsManager:
+    def __init__(self, results_parser, results_sender):
+        self.parent_directories = []
+        self.result_directories = set()
+        self.results = []
+        self.results_parser = results_parser
+        self.results_sender = results_sender
+        self.bug_id = None
+        self.suite_name = ""
+
+        self.moblab_id = self.get_fake_moblab_id()
+
+    def new_directory(self, parent_dir: str):
+        self.parent_directories.append(parent_dir)
+
+    def enumerate_all_directories(self):
+        self.result_directories = set()
+        for parent_dir in self.parent_directories:
+            self.enumerate_result_directories(parent_dir)
+
+    def enumerate_result_directories(self, parent_dir):
+        """ Gets all test directories.
+
+        Args:
+        parent_dir: The parent directory of one or multiple test directories
+
+        Creates a local_result for all directories with a status.log file
+        and appends to local_results
+        """
+        if not os.path.exists(parent_dir) or not os.path.isdir(parent_dir):
+            logging.warning('Test directory does not exist: %s' % parent_dir)
+            return
+
+        status_log_file = os.path.join(parent_dir, STATUS_LOG_FILE)
+        if os.path.exists(status_log_file):
+            self.result_directories.add(parent_dir)
+            return
+
+        for dir_name in os.listdir(parent_dir):
+            subdir = os.path.join(parent_dir, dir_name)
+            if os.path.isdir(subdir):
+                self.enumerate_result_directories(subdir)
+
+    def set_destination(self, destination):
+        self.results_sender.set_destination(destination)
+
+    def get_fake_moblab_id(self):
+        """Get or generate a fake moblab id.
+
+        Moblab id is the unique id to a moblab device. Since the upload script runs
+        from the chroot instead of a moblab device, we need to generate a fake
+        moblab id to comply with the CPCon backend. If there is a previously saved
+        fake moblab id, read and use it. Otherwise, generate a uuid to fake a moblab
+        device, and store it in the same directory as the upload script.
+
+        Returns:
+            A string representing a fake moblab id.
+        """
+        script_dir = os.path.dirname(__file__)
+        fake_moblab_id_path = os.path.join(script_dir, "config",
+                                           FAKE_MOBLAB_ID_FILE)
+
+        # Migrate from prior moblab ID location into config directory if possible
+        old_moblab_id_file = os.path.join(script_dir, FAKE_MOBLAB_ID_FILE)
+        if os.path.exists(old_moblab_id_file):
+            logging.info(
+                    'Found an existing moblab ID outside config directory, migrating now'
+            )
+            os.rename(old_moblab_id_file, fake_moblab_id_path)
+        try:
+            with open(fake_moblab_id_path, "r") as fake_moblab_id_file:
+                fake_moblab_id = str(fake_moblab_id_file.read())[0:32]
+                if fake_moblab_id:
+                    return fake_moblab_id
+        except IOError as e:
+            logging.info(
+                    'Cannot find a fake moblab id at %s, creating a new one.',
+                    fake_moblab_id_path)
+        fake_moblab_id = uuid.uuid4().hex
+        try:
+            with open(fake_moblab_id_path, "w") as fake_moblab_id_file:
+                fake_moblab_id_file.write(fake_moblab_id)
+        except IOError as e:
+            logging.warning('Unable to write the fake moblab id to %s: %s',
+                            fake_moblab_id_path, e)
+        return fake_moblab_id
+
+    def overwrite_suite_name(self, suite_name):
+        self.suite_name = suite_name
+
+    def annotate_results_with_bugid(self, bug_id):
+        self.bug_id = bug_id
+
+    def parse_all_results(self, upload_only: bool = False):
+        self.results = []
+        self.enumerate_all_directories()
+
+        for result_dir in self.result_directories:
+            if self.bug_id is not None:
+                self.results_parser.write_bug_id(result_dir, self.bug_id)
+            self.results.append(
+                    (result_dir,
+                     self.results_parser.parse(result_dir,
+                                               upload_only,
+                                               suite_name=self.suite_name)))
+
+    def upload_all_results(self, force):
+        for result in self.results:
+            self.results_sender.upload_result_and_notify(
+                    result[0], self.moblab_id, result[1], force)
+
+
+class FakeTkoDb:
+    def find_job(self, tag):
+        return None
+
+    def run_with_retry(self, fn, *args):
+        fn(*args)
+
+
+class ResultsParserClass:
+    def __init__(self):
+        pass
+
+    def job_tag(self, job_id, machine):
+        return str(job_id) + "-moblab/" + str(machine)
+
+    def parse(self, path, upload_only: bool, suite_name=""):
+        #temporarily assign a fake job id until parsed
+        fake_job_id = 1234
+        fake_machine = "localhost"
+        name = self.job_tag(fake_job_id, fake_machine)
+        parse_options = argparse.Namespace(
+                **{
+                        "suite_report": False,
+                        "dry_run": True,
+                        "reparse": False,
+                        "mail_on_failure": False
+                })
+        pid_file_manager = pidfile.PidFileManager("parser", path)
+        self.print_autotest_git_history(path)
+        job = parse_one(FakeTkoDb(), pid_file_manager, name, path,
+                        parse_options)
+        job.board = job.tests[0].attributes['host-board']
+        job_id = int(job.started_time.timestamp() * 1000)
+        job.afe_parent_job_id = job_id + 1
+        if suite_name == "":
+            job.suite = self.parse_suite_name(path)
+        else:
+            job.suite = suite_name
+        job.build_version = self.get_build_version(job.tests)
+        name = self.job_tag(job_id, job.machine)
+        if not upload_only:
+            export_tko_job_to_file(job, name, path + "/job.serialize")
+
+        # autotest_lib appends additional global logger handlers
+        # remove these handlers to avoid affecting logging for the google
+        # storage library
+        for handler in log.getLogger().handlers:
+            log.getLogger().removeHandler(handler)
+        return job
+
+    def print_autotest_git_history(self, path):
+        """
+        Print the hash of the latest git commit of the autotest directory.
+
+        Args:
+            path: The test directory for non-moblab test results.
+        """
+        git_hash = subprocess.check_output(shlex.split(GIT_COMMAND),
+                                           cwd=AUTOTEST_DIR)
+        git_hash_path = os.path.join(path, GIT_HASH_FILE)
+        with open(git_hash_path, "w") as git_hash_file:
+            git_hash_file.write(git_hash.decode("utf-8"))
+
+    def parse_suite_name(self, path):
+        """Get the suite name from a results directory.
+
+        If we don't find the suite name in the first ten lines of test_that.DEBUG
+        then return None.
+
+        Args:
+            path: The directory specified on the command line.
+        """
+        path = path.split('/')[:-1]
+        path = '/'.join(path)
+
+        debug_file = os.path.join(path, DEBUG_FILE_PATH)
+        if not os.path.exists(debug_file) or not os.path.isfile(debug_file):
+            return None
+        exp = re.compile(SUITE_NAME_REGEX)
+        try:
+            with open(debug_file) as f:
+                line_count = 0
+                for line in f:
+                    line_count += 1
+                    if line_count > 10:
+                        break
+                    result = exp.search(line)
+                    if not result:
+                        continue
+                    else:
+                        return result.group(1)
+        except IOError as e:
+            logging.warning('Error trying to read test_that.DEBUG: %s', e)
+        return DEFAULT_SUITE_NAME
+
+    def get_build_version(self, tests):
+        release_version_label = "CHROMEOS_RELEASE_VERSION"
+        milestone_label = "CHROMEOS_RELEASE_CHROME_MILESTONE"
+        for test in tests:
+            if not test.subdir:
+                continue
+
+            release = None
+            milestone = None
+            if release_version_label in test.attributes:
+                release = test.attributes[release_version_label]
+            if milestone_label in test.attributes:
+                milestone = test.attributes[milestone_label]
+            if release and milestone:
+                return "R%s-%s" % (milestone, release)
+
+        return ""
+
+    def valid_bug_id(self, v):
+        """Check if user input bug id is in valid format.
+
+        Args:
+            v: User input bug id in string.
+        Returns:
+            An int representing the bug id.
+        Raises:
+            argparse.ArgumentTypeError: if user input bug id has wrong format.
+        """
+        try:
+            bug_id = int(v)
+        except ValueError as e:
+            raise argparse.ArgumentTypeError(
+                    "Bug id %s is not a positive integer: "
+                    "%s" % (v, e))
+        if bug_id <= 0:
+            raise argparse.ArgumentTypeError(
+                    "Bug id %s is not a positive integer" % v)
+        return bug_id
+
+    def write_bug_id(self, test_dir, bug_id):
+        """
+            Write the bug id to the test results.
+
+        Args:
+            test_dir: The test directory for non-moblab test results.
+            bug_id: The bug id to write to the test results.
+        Returns:
+            A boolean. True if the bug id is written successfully or is the same as
+            the old bug id already in test results; False if failed to write the
+            bug id, or if the user decides not to overwrite the old bug id already
+            in test results.
+        """
+        old_bug_id = None
+        new_keyval = list()
+
+        keyval_file = os.path.join(test_dir, KEYVAL_FILE)
+        try:
+            with open(keyval_file, 'r') as keyval_raw:
+                for line in keyval_raw.readlines():
+                    match = re.match(r'bug_id=(\d+)', line)
+                    if match:
+                        old_bug_id = self.valid_bug_id(match.group(1))
+                    else:
+                        new_keyval.append(line)
+        except IOError as e:
+            logging.error(
+                    'Cannot read keyval file from %s, skip writing the bug '
+                    'id %s: %s', test_dir, bug_id, e)
+            return False
+
+        if old_bug_id:
+            if old_bug_id == bug_id:
+                return True
+            overwrite_bug_id = _confirm_option(
+                    'Would you like to overwrite bug id '
+                    '%s with new bug id %s?' % (old_bug_id, bug_id))
+            if not overwrite_bug_id:
+                return False
+
+        new_keyval.append('bug_id=%s' % bug_id)
+        new_keyval_file = os.path.join(test_dir, NEW_KEYVAL_FILE)
+        try:
+            with open(new_keyval_file, 'w') as new_keyval_raw:
+                for line in new_keyval:
+                    new_keyval_raw.write(line)
+                new_keyval_raw.write('\n')
+            shutil.move(new_keyval_file, keyval_file)
+            return True
+        except Exception as e:
+            logging.error(
+                    'Cannot write bug id to keyval file in %s, skip writing '
+                    'the bug id %s: %s', test_dir, bug_id, e)
+            return False
+
+
+ResultsParser = ResultsParserClass()
+_valid_bug_id = functools.partial(ResultsParserClass.valid_bug_id,
+                                  ResultsParser)
+
+
+class ResultsSenderClass:
+    def __init__(self):
+        self.gcs_bucket = ""
+
+    def set_destination(self, destination):
+        self.gcs_bucket = destination
+
+    def upload_result_and_notify(self, test_dir, moblab_id, job, force):
+        job_id = str(int(job.started_time.timestamp() * 1000))
+        if self.uploaded(test_dir) and not force:
+            return
+        self.upload_result(test_dir, moblab_id, job_id, job.machine)
+        self.send_pubsub_message(test_dir, moblab_id, job_id)
+
+    def upload_batch_files(self, gs_path, test_dir, files):
+        for file in files:
+            if not os.path.isfile(file):
+                continue
+            gs_client_bucket = storage.Client().bucket(self.gcs_bucket)
+            # remove trailing slash to ensure dest_file path gets created properly
+            test_dir = test_dir.rstrip('/')
+            dest_file = gs_path + file.replace(test_dir, "", 1)
+            logging.info("uploading file: %s", dest_file)
+            blob = gs_client_bucket.blob(dest_file)
+            blob.upload_from_filename(file)
+
+    def upload_result(self, test_dir, moblab_id, job_id, hostname):
+        """
+            Upload the test directory with job.serialize to GCS bucket.
+
+        Args:
+            args: A list of input arguments.
+            test_dir: The test directory for non-moblab test results.
+            job_keyval: The key-value object of the job.
+            moblab_id: A string that represents the unique id of a moblab device.
+            job_id: A job id.
+        """
+        upload_status_file = os.path.join(test_dir, UPLOADED_STATUS_FILE)
+        with open(upload_status_file, "w") as upload_status:
+            upload_status.write("UPLOADING")
+
+        fake_moblab_id = moblab_id
+        fake_moblab_install_id = moblab_id
+
+        gcs_bucket_path = os.path.join("results", fake_moblab_id,
+                                       fake_moblab_install_id,
+                                       "%s-moblab" % job_id, hostname)
+
+        try:
+            logging.info(
+                    "Start to upload test directory: %s to GCS bucket path: %s",
+                    test_dir, gcs_bucket_path)
+            with open(upload_status_file, "w") as upload_status:
+                upload_status.write("UPLOADED")
+
+            files_to_upload = glob.glob(test_dir + "/**", recursive=True)
+            batch_size = 8
+            with multiprocessing.Pool(4) as p:
+                files_to_upload_batch = [
+                        files_to_upload[i:i + batch_size]
+                        for i in range(0, len(files_to_upload), batch_size)
+                ]
+                p.map(
+                        functools.partial(
+                                ResultsSenderClass.upload_batch_files, self,
+                                gcs_bucket_path, test_dir),
+                        files_to_upload_batch)
+
+            logging.info(
+                    "Successfully uploaded test directory: %s to GCS bucket path: %s",
+                    test_dir, gcs_bucket_path)
+        except Exception as e:
+            with open(upload_status_file, "w") as upload_status:
+                upload_status.write("UPLOAD_FAILED")
+            raise Exception(
+                    "Failed to upload test directory: %s to GCS bucket "
+                    "path: %s for the error: %s" %
+                    (test_dir, gcs_bucket_path, e))
+
+    def send_pubsub_message(self, test_dir, moblab_id, job_id):
+        """
+            Send pubsub messages to trigger CPCon pipeline to process non-moblab
+            test results in the specific GCS bucket path.
+
+        Args:
+            bucket: The GCS bucket.
+            moblab_id: A moblab id.
+            job_id: A job id.
+        """
+        moblab_install_id = moblab_id
+        console_client = pubsub_client.PubSubBasedClient()
+        gsuri = "gs://%s/results/%s/%s/%s-moblab" % (
+                self.gcs_bucket, moblab_id, moblab_install_id, job_id)
+
+        try:
+            logging.info("Start to send the pubsub message to GCS path: %s",
+                         gsuri)
+            message_id = \
+                console_client.send_test_job_offloaded_message(gsuri,
+                                                            moblab_id,
+                                                            moblab_install_id)
+            upload_status_file = os.path.join(test_dir, UPLOADED_STATUS_FILE)
+            with open(upload_status_file, "w") as upload_status:
+                upload_status.write(STATUS_GOOD)
+
+            logging.info(
+                    "Successfully sent the pubsub message with message id: %s to GCS "
+                    "path: %s", message_id[0], gsuri)
+        except Exception as e:
+            raise Exception(
+                    "Failed to send the pubsub message with moblab id: %s "
+                    "and job id: %s to GCS path: %s for the error: %s" %
+                    (moblab_id, job_id, gsuri, e))
+
+    def uploaded(self, test_dir):
+        """
+        Checks if the message for the uploaded bucket has been sent.
+
+        Args:
+            test_dir: The test directory for non-moblab test results.
+        """
+        upload_status_file = os.path.join(test_dir, UPLOADED_STATUS_FILE)
+        if not os.path.exists(upload_status_file):
+            logging.debug("The upload status file %s does not exist.",
+                          upload_status_file)
+            return False
+
+        with open(upload_status_file, "r") as upload_status:
+            if upload_status.read() == STATUS_GOOD:
+                logging.warn(
+                        "The test directory: %s status has already been "
+                        "sent to CPCon and the .upload_status file has "
+                        "been set to PUBSUB_SENT.", test_dir)
+                return True
+            else:
+                logging.debug("The pubsub message was not successful")
+        return False
+
+
+ResultsSender = ResultsSenderClass()
+
+
+def main(args):
+    parsed_args = parse_arguments(args)
+
+    fmt = log.Formatter('%(asctime)s :: %(levelname)-8s :: %(message)s')
+    logging.propagate = False
+
+    log_level = log.INFO
+    if parsed_args.verbose:
+        log_level = log.DEBUG
+    if not parsed_args.quiet:
+        stream_handler = log.StreamHandler(sys.stdout)
+        stream_handler.setFormatter(fmt)
+        stream_handler.setLevel(log_level)
+        logging.addHandler(stream_handler)
+
+    logging.info("logging to %s", parsed_args.logfile)
+    file_handler = log.FileHandler(parsed_args.logfile, mode='w')
+    file_handler.setFormatter(fmt)
+    file_handler.setLevel(log.DEBUG)
+    logging.addHandler(file_handler)
+
+    if parsed_args.subcommand == "config":
+        _configure_environment(parsed_args)
+        return
+
+    persistent_settings = _load_config()
+
+    results_manager = ResultsManager(ResultsParser, ResultsSender)
+    results_manager.set_destination(persistent_settings.bucket)
+    results_manager.new_directory(parsed_args.directory)
+
+    if parsed_args.bug:
+        results_manager.annotate_results_with_bugid(parsed_args.bug)
+    if parsed_args.suite:
+        results_manager.overwrite_suite_name(parsed_args.suite)
+    if parsed_args.parse_only:
+        results_manager.parse_all_results()
+    elif parsed_args.upload_only:
+        results_manager.parse_all_results(upload_only=True)
+        results_manager.upload_all_results(force=parsed_args.force)
+    else:
+        results_manager.parse_all_results()
+        results_manager.upload_all_results(force=parsed_args.force)
+
+
+if __name__ == "__main__":
+    try:
+        main(sys.argv[1:])
+    except KeyboardInterrupt:
+        sys.exit(0)
diff --git a/database/database_connection.py b/database/database_connection.py
index 90b43e5..d0d370c 100644
--- a/database/database_connection.py
+++ b/database/database_connection.py
@@ -266,7 +266,7 @@
                 if self._reached_max_attempts(num_attempts):
                     raise
                 traceback.print_exc()
-                print ("Can't connect to database; reconnecting in %s sec" %
+                print("Can't connect to database; reconnecting in %s sec" %
                        self.reconnect_delay_sec)
                 time.sleep(self.reconnect_delay_sec)
                 self.disconnect()
@@ -298,7 +298,7 @@
         passed, will override self.reconnect_enabled.
         """
         if self.debug:
-            print 'Executing %s, %s' % (query, parameters)
+            print('Executing %s, %s' % (query, parameters))
         # _connect_backend() contains a retry loop, so don't loop here
         try:
             results = self._backend.execute(query, parameters)
@@ -306,7 +306,7 @@
             if not self._is_reconnect_enabled(try_reconnecting):
                 raise
             traceback.print_exc()
-            print ("MYSQL connection died; reconnecting")
+            print("MYSQL connection died; reconnecting")
             self.disconnect()
             self._connect_backend(try_reconnecting)
             results = self._backend.execute(query, parameters)
diff --git a/database/database_connection_unittest.py b/database/database_connection_unittest.py
index 95d6f6b..73e48ac 100755
--- a/database/database_connection_unittest.py
+++ b/database/database_connection_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 import unittest, time
 import common
@@ -92,7 +92,7 @@
     def _expect_fail_and_reconnect(self, num_reconnects, fail_last=False):
         self._fake_backend.connect.expect_call(**_CONNECT_KWARGS).and_raises(
             FakeDatabaseError())
-        for i in xrange(num_reconnects):
+        for i in range(num_reconnects):
             time.sleep.expect_call(_RECONNECT_DELAY)
             if i < num_reconnects - 1:
                 self._expect_reconnect(fail=True)
diff --git a/database/db_utils_unittest.py b/database/db_utils_unittest.py
index 9c01817..14bf17b 100755
--- a/database/db_utils_unittest.py
+++ b/database/db_utils_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 import unittest
 import common
diff --git a/database/migrate.py b/database/migrate.py
deleted file mode 100755
index 89dd472..0000000
--- a/database/migrate.py
+++ /dev/null
@@ -1,483 +0,0 @@
-#!/usr/bin/python2 -u
-
-import os, sys, re, tempfile
-from optparse import OptionParser
-import common
-from autotest_lib.client.common_lib import utils
-from autotest_lib.database import database_connection
-
-MIGRATE_TABLE = 'migrate_info'
-
-_AUTODIR = os.path.join(os.path.dirname(__file__), '..')
-_MIGRATIONS_DIRS = {
-    'AUTOTEST_WEB': os.path.join(_AUTODIR, 'frontend', 'migrations'),
-    'TKO': os.path.join(_AUTODIR, 'tko', 'migrations'),
-    'AUTOTEST_SERVER_DB': os.path.join(_AUTODIR, 'database',
-                                      'server_db_migrations'),
-}
-_DEFAULT_MIGRATIONS_DIR = 'migrations' # use CWD
-
-class Migration(object):
-    """Represents a database migration."""
-    _UP_ATTRIBUTES = ('migrate_up', 'UP_SQL')
-    _DOWN_ATTRIBUTES = ('migrate_down', 'DOWN_SQL')
-
-    def __init__(self, name, version, module):
-        self.name = name
-        self.version = version
-        self.module = module
-        self._check_attributes(self._UP_ATTRIBUTES)
-        self._check_attributes(self._DOWN_ATTRIBUTES)
-
-
-    @classmethod
-    def from_file(cls, filename):
-        """Instantiates a Migration from a file.
-
-        @param filename: Name of a migration file.
-
-        @return An instantiated Migration object.
-
-        """
-        version = int(filename[:3])
-        name = filename[:-3]
-        module = __import__(name, globals(), locals(), [])
-        return cls(name, version, module)
-
-
-    def _check_attributes(self, attributes):
-        method_name, sql_name = attributes
-        assert (hasattr(self.module, method_name) or
-                hasattr(self.module, sql_name))
-
-
-    def _execute_migration(self, attributes, manager):
-        method_name, sql_name = attributes
-        method = getattr(self.module, method_name, None)
-        if method:
-            assert callable(method)
-            method(manager)
-        else:
-            sql = getattr(self.module, sql_name)
-            assert isinstance(sql, basestring)
-            manager.execute_script(sql)
-
-
-    def migrate_up(self, manager):
-        """Performs an up migration (to a newer version).
-
-        @param manager: A MigrationManager object.
-
-        """
-        self._execute_migration(self._UP_ATTRIBUTES, manager)
-
-
-    def migrate_down(self, manager):
-        """Performs a down migration (to an older version).
-
-        @param manager: A MigrationManager object.
-
-        """
-        self._execute_migration(self._DOWN_ATTRIBUTES, manager)
-
-
-class MigrationManager(object):
-    """Managest database migrations."""
-    connection = None
-    cursor = None
-    migrations_dir = None
-
-    def __init__(self, database_connection, migrations_dir=None, force=False):
-        self._database = database_connection
-        self.force = force
-        # A boolean, this will only be set to True if this migration should be
-        # simulated rather than actually taken. For use with migrations that
-        # may make destructive queries
-        self.simulate = False
-        self._set_migrations_dir(migrations_dir)
-
-
-    def _set_migrations_dir(self, migrations_dir=None):
-        config_section = self._config_section()
-        if migrations_dir is None:
-            migrations_dir = os.path.abspath(
-                _MIGRATIONS_DIRS.get(config_section, _DEFAULT_MIGRATIONS_DIR))
-        self.migrations_dir = migrations_dir
-        sys.path.append(migrations_dir)
-        assert os.path.exists(migrations_dir), migrations_dir + " doesn't exist"
-
-
-    def _config_section(self):
-        return self._database.global_config_section
-
-
-    def get_db_name(self):
-        """Gets the database name."""
-        return self._database.get_database_info()['db_name']
-
-
-    def execute(self, query, *parameters):
-        """Executes a database query.
-
-        @param query: The query to execute.
-        @param parameters: Associated parameters for the query.
-
-        @return The result of the query.
-
-        """
-        return self._database.execute(query, parameters)
-
-
-    def execute_script(self, script):
-        """Executes a set of database queries.
-
-        @param script: A string of semicolon-separated queries.
-
-        """
-        sql_statements = [statement.strip()
-                          for statement in script.split(';')
-                          if statement.strip()]
-        for statement in sql_statements:
-            self.execute(statement)
-
-
-    def check_migrate_table_exists(self):
-        """Checks whether the migration table exists."""
-        try:
-            self.execute("SELECT * FROM %s" % MIGRATE_TABLE)
-            return True
-        except self._database.DatabaseError, exc:
-            # we can't check for more specifics due to differences between DB
-            # backends (we can't even check for a subclass of DatabaseError)
-            return False
-
-
-    def create_migrate_table(self):
-        """Creates the migration table."""
-        if not self.check_migrate_table_exists():
-            self.execute("CREATE TABLE %s (`version` integer)" %
-                         MIGRATE_TABLE)
-        else:
-            self.execute("DELETE FROM %s" % MIGRATE_TABLE)
-        self.execute("INSERT INTO %s VALUES (0)" % MIGRATE_TABLE)
-        assert self._database.rowcount == 1
-
-
-    def set_db_version(self, version):
-        """Sets the database version.
-
-        @param version: The version to which to set the database.
-
-        """
-        assert isinstance(version, int)
-        self.execute("UPDATE %s SET version=%%s" % MIGRATE_TABLE,
-                     version)
-        assert self._database.rowcount == 1
-
-
-    def get_db_version(self):
-        """Gets the database version.
-
-        @return The database version.
-
-        """
-        if not self.check_migrate_table_exists():
-            return 0
-        rows = self.execute("SELECT * FROM %s" % MIGRATE_TABLE)
-        if len(rows) == 0:
-            return 0
-        assert len(rows) == 1 and len(rows[0]) == 1
-        return rows[0][0]
-
-
-    def get_migrations(self, minimum_version=None, maximum_version=None):
-        """Gets the list of migrations to perform.
-
-        @param minimum_version: The minimum database version.
-        @param maximum_version: The maximum database version.
-
-        @return A list of Migration objects.
-
-        """
-        migrate_files = [filename for filename
-                         in os.listdir(self.migrations_dir)
-                         if re.match(r'^\d\d\d_.*\.py$', filename)]
-        migrate_files.sort()
-        migrations = [Migration.from_file(filename)
-                      for filename in migrate_files]
-        if minimum_version is not None:
-            migrations = [migration for migration in migrations
-                          if migration.version >= minimum_version]
-        if maximum_version is not None:
-            migrations = [migration for migration in migrations
-                          if migration.version <= maximum_version]
-        return migrations
-
-
-    def do_migration(self, migration, migrate_up=True):
-        """Performs a migration.
-
-        @param migration: The Migration to perform.
-        @param migrate_up: Whether to migrate up (if not, then migrates down).
-
-        """
-        print 'Applying migration %s' % migration.name, # no newline
-        if migrate_up:
-            print 'up'
-            assert self.get_db_version() == migration.version - 1
-            migration.migrate_up(self)
-            new_version = migration.version
-        else:
-            print 'down'
-            assert self.get_db_version() == migration.version
-            migration.migrate_down(self)
-            new_version = migration.version - 1
-        self.set_db_version(new_version)
-
-
-    def migrate_to_version(self, version):
-        """Performs a migration to a specified version.
-
-        @param version: The version to which to migrate the database.
-
-        """
-        current_version = self.get_db_version()
-        if current_version == 0 and self._config_section() == 'AUTOTEST_WEB':
-            self._migrate_from_base()
-            current_version = self.get_db_version()
-
-        if current_version < version:
-            lower, upper = current_version, version
-            migrate_up = True
-        else:
-            lower, upper = version, current_version
-            migrate_up = False
-
-        migrations = self.get_migrations(lower + 1, upper)
-        if not migrate_up:
-            migrations.reverse()
-        for migration in migrations:
-            self.do_migration(migration, migrate_up)
-
-        assert self.get_db_version() == version
-        print 'At version', version
-
-
-    def _migrate_from_base(self):
-        """Initialize the AFE database.
-        """
-        self.confirm_initialization()
-
-        migration_script = utils.read_file(
-                os.path.join(os.path.dirname(__file__), 'schema_129.sql'))
-        migration_script = migration_script % (
-                dict(username=self._database.get_database_info()['username']))
-        self.execute_script(migration_script)
-
-        self.create_migrate_table()
-        self.set_db_version(129)
-
-
-    def confirm_initialization(self):
-        """Confirms with the user that we should initialize the database.
-
-        @raises Exception, if the user chooses to abort the migration.
-
-        """
-        if not self.force:
-            response = raw_input(
-                'Your %s database does not appear to be initialized.  Do you '
-                'want to recreate it (this will result in loss of any existing '
-                'data) (yes/No)? ' % self.get_db_name())
-            if response != 'yes':
-                raise Exception('User has chosen to abort migration')
-
-
-    def get_latest_version(self):
-        """Gets the latest database version."""
-        migrations = self.get_migrations()
-        return migrations[-1].version
-
-
-    def migrate_to_latest(self):
-        """Migrates the database to the latest version."""
-        latest_version = self.get_latest_version()
-        self.migrate_to_version(latest_version)
-
-
-    def initialize_test_db(self):
-        """Initializes a test database."""
-        db_name = self.get_db_name()
-        test_db_name = 'test_' + db_name
-        # first, connect to no DB so we can create a test DB
-        self._database.connect(db_name='')
-        print 'Creating test DB', test_db_name
-        self.execute('CREATE DATABASE ' + test_db_name)
-        self._database.disconnect()
-        # now connect to the test DB
-        self._database.connect(db_name=test_db_name)
-
-
-    def remove_test_db(self):
-        """Removes a test database."""
-        print 'Removing test DB'
-        self.execute('DROP DATABASE ' + self.get_db_name())
-        # reset connection back to real DB
-        self._database.disconnect()
-        self._database.connect()
-
-
-    def get_mysql_args(self):
-        """Returns the mysql arguments as a string."""
-        return ('-u %(username)s -p%(password)s -h %(host)s %(db_name)s' %
-                self._database.get_database_info())
-
-
-    def migrate_to_version_or_latest(self, version):
-        """Migrates to either a specified version, or the latest version.
-
-        @param version: The version to which to migrate the database,
-            or None in order to migrate to the latest version.
-
-        """
-        if version is None:
-            self.migrate_to_latest()
-        else:
-            self.migrate_to_version(version)
-
-
-    def do_sync_db(self, version=None):
-        """Migrates the database.
-
-        @param version: The version to which to migrate the database.
-
-        """
-        print 'Migration starting for database', self.get_db_name()
-        self.migrate_to_version_or_latest(version)
-        print 'Migration complete'
-
-
-    def test_sync_db(self, version=None):
-        """Create a fresh database and run all migrations on it.
-
-        @param version: The version to which to migrate the database.
-
-        """
-        self.initialize_test_db()
-        try:
-            print 'Starting migration test on DB', self.get_db_name()
-            self.migrate_to_version_or_latest(version)
-            # show schema to the user
-            os.system('mysqldump %s --no-data=true '
-                      '--add-drop-table=false' %
-                      self.get_mysql_args())
-        finally:
-            self.remove_test_db()
-        print 'Test finished successfully'
-
-
-    def simulate_sync_db(self, version=None):
-        """Creates a fresh DB, copies existing DB to it, then synchronizes it.
-
-        @param version: The version to which to migrate the database.
-
-        """
-        db_version = self.get_db_version()
-        # don't do anything if we're already at the latest version
-        if db_version == self.get_latest_version():
-            print 'Skipping simulation, already at latest version'
-            return
-        # get existing data
-        self.initialize_and_fill_test_db()
-        try:
-            print 'Starting migration test on DB', self.get_db_name()
-            self.migrate_to_version_or_latest(version)
-        finally:
-            self.remove_test_db()
-        print 'Test finished successfully'
-
-
-    def initialize_and_fill_test_db(self):
-        """Initializes and fills up a test database."""
-        print 'Dumping existing data'
-        dump_fd, dump_file = tempfile.mkstemp('.migrate_dump')
-        os.system('mysqldump %s >%s' %
-                  (self.get_mysql_args(), dump_file))
-        # fill in test DB
-        self.initialize_test_db()
-        print 'Filling in test DB'
-        os.system('mysql %s <%s' % (self.get_mysql_args(), dump_file))
-        os.close(dump_fd)
-        os.remove(dump_file)
-
-
-USAGE = """\
-%s [options] sync|test|simulate|safesync [version]
-Options:
-    -d --database   Which database to act on
-    -f --force      Don't ask for confirmation
-    --debug         Print all DB queries"""\
-    % sys.argv[0]
-
-
-def main():
-    """Main function for the migration script."""
-    parser = OptionParser()
-    parser.add_option("-d", "--database",
-                      help="which database to act on",
-                      dest="database",
-                      default="AUTOTEST_WEB")
-    parser.add_option("-f", "--force", help="don't ask for confirmation",
-                      action="store_true")
-    parser.add_option('--debug', help='print all DB queries',
-                      action='store_true')
-    (options, args) = parser.parse_args()
-    manager = get_migration_manager(db_name=options.database,
-                                    debug=options.debug, force=options.force)
-
-    if len(args) > 0:
-        if len(args) > 1:
-            version = int(args[1])
-        else:
-            version = None
-        if args[0] == 'sync':
-            manager.do_sync_db(version)
-        elif args[0] == 'test':
-            manager.simulate=True
-            manager.test_sync_db(version)
-        elif args[0] == 'simulate':
-            manager.simulate=True
-            manager.simulate_sync_db(version)
-        elif args[0] == 'safesync':
-            print 'Simluating migration'
-            manager.simulate=True
-            manager.simulate_sync_db(version)
-            print 'Performing real migration'
-            manager.simulate=False
-            manager.do_sync_db(version)
-        else:
-            print USAGE
-        return
-
-    print USAGE
-
-
-def get_migration_manager(db_name, debug, force):
-    """Creates a MigrationManager object.
-
-    @param db_name: The database name.
-    @param debug: Whether to print debug messages.
-    @param force: Whether to force migration without asking for confirmation.
-
-    @return A created MigrationManager object.
-
-    """
-    database = database_connection.DatabaseConnection(db_name)
-    database.debug = debug
-    database.reconnect_enabled = False
-    database.connect()
-    return MigrationManager(database, force=force)
-
-
-if __name__ == '__main__':
-    main()
diff --git a/database/migrate_unittest.py b/database/migrate_unittest.py
deleted file mode 100755
index 890f3d9..0000000
--- a/database/migrate_unittest.py
+++ /dev/null
@@ -1,163 +0,0 @@
-#!/usr/bin/python2
-
-import unittest, tempfile, os
-import common
-import MySQLdb
-from autotest_lib.client.common_lib import global_config
-from autotest_lib.database import database_connection, migrate
-
-# Which section of the global config to pull info from.  We won't actually use
-# that DB, we'll use the corresponding test DB (test_<db name>).
-CONFIG_DB = 'AUTOTEST_WEB'
-
-NUM_MIGRATIONS = 3
-
-class DummyMigration(object):
-    """\
-    Dummy migration class that records all migrations done in a class
-    varaible.
-    """
-
-    migrations_done = []
-
-    def __init__(self, version):
-        self.version = version
-        self.name = '%03d_test' % version
-
-
-    @classmethod
-    def get_migrations_done(cls):
-        return cls.migrations_done
-
-
-    @classmethod
-    def clear_migrations_done(cls):
-        cls.migrations_done = []
-
-
-    @classmethod
-    def do_migration(cls, version, direction):
-        cls.migrations_done.append((version, direction))
-
-
-    def migrate_up(self, manager):
-        self.do_migration(self.version, 'up')
-        if self.version == 1:
-            manager.create_migrate_table()
-
-
-    def migrate_down(self, manager):
-        self.do_migration(self.version, 'down')
-
-
-MIGRATIONS = [DummyMigration(n) for n in xrange(1, NUM_MIGRATIONS + 1)]
-
-
-class TestableMigrationManager(migrate.MigrationManager):
-    def _set_migrations_dir(self, migrations_dir=None):
-        pass
-
-
-    def get_migrations(self, minimum_version=None, maximum_version=None):
-        minimum_version = minimum_version or 1
-        maximum_version = maximum_version or len(MIGRATIONS)
-        return MIGRATIONS[minimum_version-1:maximum_version]
-
-
-class MigrateManagerTest(unittest.TestCase):
-    def setUp(self):
-        self._database = (
-            database_connection.DatabaseConnection.get_test_database())
-        self._database.connect()
-        self.manager = TestableMigrationManager(self._database)
-        DummyMigration.clear_migrations_done()
-
-
-    def tearDown(self):
-        self._database.disconnect()
-
-
-    def test_sync(self):
-        self.manager.do_sync_db()
-        self.assertEquals(self.manager.get_db_version(), NUM_MIGRATIONS)
-        self.assertEquals(DummyMigration.get_migrations_done(),
-                          [(1, 'up'), (2, 'up'), (3, 'up')])
-
-        DummyMigration.clear_migrations_done()
-        self.manager.do_sync_db(0)
-        self.assertEquals(self.manager.get_db_version(), 0)
-        self.assertEquals(DummyMigration.get_migrations_done(),
-                          [(3, 'down'), (2, 'down'), (1, 'down')])
-
-
-    def test_sync_one_by_one(self):
-        for version in xrange(1, NUM_MIGRATIONS + 1):
-            self.manager.do_sync_db(version)
-            self.assertEquals(self.manager.get_db_version(),
-                              version)
-            self.assertEquals(
-                DummyMigration.get_migrations_done()[-1],
-                (version, 'up'))
-
-        for version in xrange(NUM_MIGRATIONS - 1, -1, -1):
-            self.manager.do_sync_db(version)
-            self.assertEquals(self.manager.get_db_version(),
-                              version)
-            self.assertEquals(
-                DummyMigration.get_migrations_done()[-1],
-                (version + 1, 'down'))
-
-
-    def test_null_sync(self):
-        self.manager.do_sync_db()
-        DummyMigration.clear_migrations_done()
-        self.manager.do_sync_db()
-        self.assertEquals(DummyMigration.get_migrations_done(), [])
-
-
-class DummyMigrationManager(object):
-    def __init__(self):
-        self.calls = []
-
-
-    def execute_script(self, script):
-        self.calls.append(script)
-
-
-class MigrationTest(unittest.TestCase):
-    def setUp(self):
-        self.manager = DummyMigrationManager()
-
-
-    def _do_migration(self, migration_module):
-        migration = migrate.Migration('name', 1, migration_module)
-        migration.migrate_up(self.manager)
-        migration.migrate_down(self.manager)
-
-        self.assertEquals(self.manager.calls, ['foo', 'bar'])
-
-
-    def test_migration_with_methods(self):
-        class DummyMigration(object):
-            @staticmethod
-            def migrate_up(manager):
-                manager.execute_script('foo')
-
-
-            @staticmethod
-            def migrate_down(manager):
-                manager.execute_script('bar')
-
-        self._do_migration(DummyMigration)
-
-
-    def test_migration_with_strings(self):
-        class DummyMigration(object):
-            UP_SQL = 'foo'
-            DOWN_SQL = 'bar'
-
-        self._do_migration(DummyMigration)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/docs/OWNERS b/docs/OWNERS
new file mode 100644
index 0000000..860704f
--- /dev/null
+++ b/docs/OWNERS
@@ -0,0 +1,2 @@
+# FAFT TL
+per-file faft* = jbettis@chromium.org
diff --git a/docs/best-practices.md b/docs/best-practices.md
index 334bcc1..773f4d7 100644
--- a/docs/best-practices.md
+++ b/docs/best-practices.md
@@ -1,5 +1,5 @@
 # Autotest Best Practices
-When the Chrome OS team started using autotest, we tried our best to figure out
+When the ChromeOS team started using autotest, we tried our best to figure out
 how to fit our code and our tests into the upstream style with little guidance
 and poor documentation.  This went poorly.  With the benefit of hindsight,
 we’re going to lay out some best-practices that we’d like to enforce going
diff --git a/docs/coding-style.md b/docs/coding-style.md
index c136bec..528edfd 100644
--- a/docs/coding-style.md
+++ b/docs/coding-style.md
@@ -1,4 +1,4 @@
-# Coding style for autotest in Chrome OS / Android / Brillo
+# Coding style for autotest in ChromeOS / Android / Brillo
 These rules elaborate on, but rarely deviate from PEP-8.  When in doubt, go
 with PEP-8.
 
@@ -95,6 +95,20 @@
 from common_lib import error
 ```
 
+### Automatically reorder imports
+
+To sort the imports on a list of files:
+
+`isort -o common -t common -sl FILENAMES`
+
+Or all the files in the current commit:
+
+`isort -o common -t common -sl $(git diff --name-only HEAD^ HEAD)`
+
+Or all the unstaged files:
+
+`isort -o common -t common -sl $(git diff --name-only)`
+
 ## Testing None
 
 Use `is None` rather than `== None` and `is not None` rather than `!= None`.
@@ -220,6 +234,6 @@
 
 ## Submitting patches
 
-Submit changes through the Chrome OS gerrit instance.  This process is
+Submit changes through the ChromeOS gerrit instance.  This process is
 documented on
 [chromium.org](http://dev.chromium.org/developers/contributing-code).
diff --git a/docs/enterprise.md b/docs/enterprise.md
index cb7592f..17be321 100644
--- a/docs/enterprise.md
+++ b/docs/enterprise.md
@@ -91,7 +91,7 @@
 
 ## Test Breakdown
 
-See the [Autotest Best Practices](https://chromium.googlesource.com/chromiumos/third_party/autotest/+/refs/heads/master/docs/best-practices.md#control-files) for general autotest information.
+See the [Autotest Best Practices](https://chromium.googlesource.com/chromiumos/third_party/autotest/+/refs/heads/main/docs/best-practices.md#control-files) for general autotest information.
 This section will provide details on how Enterprise autotests are written.
 Each test will require the following:
 *	A control file
@@ -100,7 +100,8 @@
 
 ### Control files
 
-[Control files](https://chromium.googlesource.com/chromiumos/third_party/autotest/+/refs/heads/master/docs/best-practices.md#control-files) are used as the entry point to a test.
+[Control files](https://chromium.googlesource.com/chromiumos/third_party/autotest/+/refs/heads/main/docs/best-practices.md#control-files) are used as the entry point to a test.
+
 A typical dir for a user policy (client) test will consist of control file(s)
 and, along with .py test file(s). A control file will contain basic description of the
 test as well as options such as these:
@@ -169,7 +170,7 @@
 device before/during/after a test.
 
 In order to support clearing the TPM & rebooting, all device policies must be
-written as a ["server"](https://chromium.googlesource.com/chromiumos/third_party/autotest/+/refs/heads/master/docs/best-practices.md#when_why-to-write-a-server_side-test) test.
+written as a ["server"](https://chromium.googlesource.com/chromiumos/third_party/autotest/+/refs/heads/main/docs/best-practices.md#when_why-to-write-a-server_side-test) test.
 Server tests (for Enterprise) will need a "server" control & test, in addition
 to having a client control file and a .py test file. The server test will do
 any server operations (reboot, servo control, wifi cell control, etc)
diff --git a/docs/faft-code.md b/docs/faft-code.md
index 455fde1..6b83550 100644
--- a/docs/faft-code.md
+++ b/docs/faft-code.md
@@ -1,5 +1,7 @@
 # FAFT Code Overview
 
+_Self-link: [go/faft-code](https://goto.google.com/faft-code)_
+
 _Last updated: 2020/03/13_
 
 ## Introduction
diff --git a/docs/faft-design-doc.md b/docs/faft-design-doc.md
index 48b7df1..ee7c514 100644
--- a/docs/faft-design-doc.md
+++ b/docs/faft-design-doc.md
@@ -1,6 +1,8 @@
 <a name="faft" />
 
-# FAFT (Fully Automated Firmware Test)
+# FAFT (Fully Automated Firmware Test): Design Doc
+
+_Self-link: [go/faft-design-doc](https://goto.google.com/faft-design-doc)_
 
 _Last updated: 2011/11/08_
 
@@ -30,15 +32,15 @@
 
 ## FAFT Related Documents
 
-- FAFT Setup Instructions: [http://goto/faft-setup](http://goto/faft-setup)
-- FAFT Test Coverage: [http://goto/faft-coverage](http://goto/faft-coverage)
-- FAFT Comparing Existing Manual Test Cases: [http://goto/faft-comparison](http://goto/faft-comparison)
+- [FAFT Setup Instructions](https://chromium.googlesource.com/chromiumos/third_party/autotest/+/refs/heads/main/docs/faft-how-to-run-doc.md)  ([Google specific instructions](http://goto/faft-setup))
+- FAFT Test Coverage (Google internal only): [http://goto/faft-coverage](http://goto/faft-coverage)
+- FAFT Comparing Existing Manual Test Cases (Google internal only): [http://goto/faft-comparison](http://goto/faft-comparison)
 
 <a name="introduction" />
 
 ## Introduction
 
-Security is one of the selling points of Chrome OS netbooks. Verified boot provides a solution to Chrome OS security model. In the verified boot design, firmware is the foundation of this secured castle. In addition, part of the firmware is marked as read-only, that means we are unable to patch security holes via autoupdate. So firmware testing is very important; however, our current firmware is lack of automated tests. It only relies on the manual tests by developers, our test team, and dogfooders. It seems to be a risk.
+Security is one of the selling points of ChromeOS netbooks. Verified boot provides a solution to ChromeOS security model. In the verified boot design, firmware is the foundation of this secured castle. In addition, part of the firmware is marked as read-only, that means we are unable to patch security holes via autoupdate. So firmware testing is very important; however, our current firmware is lack of automated tests. It only relies on the manual tests by developers, our test team, and dogfooders. It seems to be a risk.
 
 This document proposes a solution to the test problem. The advantages of FAFT:
 - Fully automatic, no human involved;
@@ -52,7 +54,7 @@
 
 This document uses [U-Boot](http://en.wikipedia.org/wiki/Das_U-Boot) as an example to discuss the FAFT test approach. It can be also applied to [coreboot](http://en.wikipedia.org/wiki/Coreboot) and any proprietary BIOS in a similar way.
 
-The U-Boot software stack looks like the following graph: [cl/70339149](https://critique.corp.google.com/#review/70339149)
+The U-Boot software stack looks like the following graph:
 
 ![faft-u-boot-stack](assets/faft-u-boot-stack.png)
 
@@ -109,7 +111,7 @@
 
 Design Doc: [https://docs.google.com/a/google.com/document/d/1XZgX4_v-Ps7YBRnjZmCFYjrjyBZQz5YmRrg7YWBFfV0/edit?hl=en_US](https://docs.google.com/a/google.com/document/d/1XZgX4_v-Ps7YBRnjZmCFYjrjyBZQz5YmRrg7YWBFfV0/edit?hl=en_US)
 
-This work is an on-going project to ensure the whole factory install flow work fine. It downloads a factory bundle from buildbot, setup TFTP and Omaha servers. It uses a mini-Servo board to control a Chrome OS device, like switching dev switch, resetting device, sending Ctrl-D key, etc. This work is done by Rong Chang and the factory team to ensure partners get a good factory bundle.
+This work is an on-going project to ensure the whole factory install flow work fine. It downloads a factory bundle from buildbot, setup TFTP and Omaha servers. It uses a mini-Servo board to control a ChromeOS device, like switching dev switch, resetting device, sending Ctrl-D key, etc. This work is done by Rong Chang and the factory team to ensure partners get a good factory bundle.
 
 Since using a Servo board can automate almost all the human behaviors. The test can be done fully automated. The proposed FAFT in this document is an extension of this work.
 
@@ -129,7 +131,7 @@
 
 ### Test Environment
 
-We need a [Servo board](https://sites.google.com/a/google.com/chromeos-partner/hardware-control-and-debug/servo) to connect a DUT such that we can:
+We need a [Servo board](https://chromium.googlesource.com/chromiumos/third_party/hdctools/+/HEAD/docs/servo.md) to connect a DUT such that we can:
 
 - flash a new firmware image;
 - run regression tests;
@@ -143,7 +145,7 @@
 
 ### Test Harness
 
-We use Autotest as our test harness and run FAFT as server tests. The host uses Ethernet to connect a DUT to push the test commands to it. Since Autotest is widely used in the Chrome OS projects. We can benefit in:
+We use Autotest as our test harness and run FAFT as server tests. The host uses Ethernet to connect a DUT to push the test commands to it. Since Autotest is widely used in the ChromeOS projects. We can benefit in:
 
 - easy to integrate to buildbot;
 - easy to run remotely;
@@ -282,7 +284,7 @@
 4. Insert a recovery shim;
 5. Wait for the complete of recovery process;
 6. Reboot;
-7. Boot to normal Chrome OS.
+7. Boot to normal ChromeOS.
 
 This sequence can be fully automated by FAFT. For example, in step 1, we require an USB stick plugged to the Servo board by default. We first mux it to connect to the host machine such that we can write the recovery shim image to it. In step 4, we then mux the USB stick to the DUT. Another approach is to emulate an USB storage gadget in the host machine, it would be more flexible to control its functionalities by software.
 
diff --git a/docs/faft-how-to-run-doc.md b/docs/faft-how-to-run-doc.md
index 3e1bbbb..b2e4d71 100644
--- a/docs/faft-how-to-run-doc.md
+++ b/docs/faft-how-to-run-doc.md
@@ -1,24 +1,13 @@
 # How to run FAFT (Fully Automated Firmware Test) {#faft-how-to-run}
 
-_[go/faft-running](https://goto.google.com/faft-running)_
+_Self-link: [go/faft-running](https://goto.google.com/faft-running)_
 
-- [How to run FAFT (Fully Automated Firmware Test)](#faft-how-to-run)
-  - [FAFT Overview](#faft-overview)
-  - [Hardware Setup](#hardware-setup)
-    - [ServoV4 Type-A with servo micro](#servov4-typea-micro)
-    - [ServoV4 Type-C](#servov4-typec)
-    - [ServoV4 Type-C with servo micro](#servov4-typec-micro)
-    - [(Deprecated) ServoV2](#servov2-deprecated)
-    - [Installing Test Image onto USB Stick](#image-onto-usb)
-  - [Running Tests](#faft-running-tests)
-    - [Setup Confirmation](#setup-confirmation)
-    - [Sample Commands](#sample-commands)
-  - [Frequently Asked Questions (FAQ)](#faq)
+[TOC]
 
 ## FAFT Overview {#faft-overview}
 
 [FAFT] (Fully Automated Firmware Tests) is a collection of tests and related
-infrastructure that exercise and verify capabilities of Chrome OS.
+infrastructure that exercise and verify capabilities of ChromeOS.
 The features tested by FAFT are implemented through low-level software
 (firmware/BIOS) and hardware. FAFT evolved from SAFT
 (Semi-Automated Firmware Tests) and you can locate tests in the [FAFT suite]
@@ -49,7 +38,7 @@
 The FAFT suite of tests can be invoked locally or remotely.
 This document describes how to set up the local configuration only.
 
-The Chrome OS firmware controls, among other things, the initial setup of the
+The ChromeOS firmware controls, among other things, the initial setup of the
 system hardware during the boot process. They are necessarily complicated,
 providing reliability against various corruption scenarios and security to
 ensure trusted software is controlling the system. Currently, the purpose of
@@ -57,13 +46,37 @@
 
 ## Hardware Setup {#hardware-setup}
 
+### General requirements
+
+The firmware running on the system needs to be able to deal with the
+signatures on the disks, so when testing your own local ChromeOS build
+signed with dev keys, install dev signed firmware as well.
+
+The setup requires a USB drive: Pick the fastest option that you can
+reasonably employ but even more than that, ensure that it's reliable!
+If the drive is quirky in manual use, FAFT will definitely be confused
+because it won't be able to deal with extraordinary circumstances.
+
+The OS image installed on the USB drive MUST NOT be a recovery image. FAFT
+switches pretty often between normal and dev mode, and the transition into
+dev mode is done by going through the recovery screen. With a recovery
+image present, it will do a recovery instead of going through the dev
+mode transition flow.
+
+The OS on the USB drive and on the disk must be a test image. If not, it
+will lack important tooling for running the tests: If you see messages
+that `rsync` can't be found you're not using a test image and while
+this step will work (albeit slowly because the fallback is to scp files
+individually), running the DUT's side of the tests will fail because
+non-test ChromeOS lacks a suitable python interpreter.
+
 ### ServoV4 Type-A with Micro {#servov4-typea-micro}
 
 The hardware configuration for running FAFT on a servo v4 Type-A
 with servo micro includes:
 
 - A test controller (your host workstation with a working chroot environment)
-- The test device (a device / DUT that can boot Chrome OS)
+- The test device (a device / DUT that can boot ChromeOS)
 - A servo board
 - Related cables and components
     - servo-micro cable
@@ -86,7 +99,7 @@
 1. Connect one end (micro USB) of the servo micro to servoV4 using a micro USB to USB cable.
 2. Connect the servo micro to the debug header on the chrome device.
 3. Connect the USB type A cable of the servoV4 to the DUT.
-4. Prepare a USB flash drive with valid Chrome OS image and plug into the USB port of the servo as shown in the diagram.
+4. Prepare a USB flash drive with valid ChromeOS image and plug into the USB port of the servo as shown in the diagram.
 5. Connect the micro USB port of the servo to the host machine (typically your workstation).
 6. Connect an Ethernet cable to the Ethernet jack of the servo that goes to the a network reachable from the network that your host machine is on.
 
@@ -95,7 +108,7 @@
 The hardware configuration for running FAFT with a servo v4 type-C includes:
 
 - A test controller (your host workstation with a working chroot environment)
-- The test device (a device / DUT that can boot Chrome OS)
+- The test device (a device / DUT that can boot ChromeOS)
 - A servo board
 - Related cables and components
     - USB type-A to USB micro cable for test controller connection (~ 4' - 6' in length)
@@ -113,7 +126,7 @@
 Details of servoV4 Type-C connections in Figure 2:
 
 1. Connect the USB Type-C cable of the servoV4 to the DUT.
-2. Prepare a USB flash drive with valid Chrome OS image and plug into the USB port of the servo as shown in the diagram.
+2. Prepare a USB flash drive with valid ChromeOS image and plug into the USB port of the servo as shown in the diagram.
 3. Connect the micro USB port of the servo to the host machine (typically your workstation).
 4. Connect an Ethernet cable to the Ethernet jack of the servo that goes to the a network reachable from the network that your host machine is on.
 
@@ -158,7 +171,7 @@
 
 1. Connect one end(ribbon cable) of the flex cable to servoV2 and the other end to the debug header on the chrome device.
 2. Connect DUT_HUB_IN(micro USB port) of the servo to the DUT.
-3. Prepare a USB flash drive with valid Chrome OS image and plug into the USB port of the servo as shown in the photo.
+3. Prepare a USB flash drive with valid ChromeOS image and plug into the USB port of the servo as shown in the photo.
 4. Connect the micro USB port of the servo to the host machine(workstation or a labstation).
 5. Connect an Ethernet cable to the Ethernet jack of the servo.
 
@@ -174,71 +187,106 @@
 
 ## Running Tests {#faft-running-tests}
 
+FAFT tests are written in two different frameworks: Autotest and Tast.
+
+Autotest tests are run using the `test_that` command, described below. Tast tests are run using the `tast run` command, which is documented at [go/tast-running](http://chromium.googlesource.com/chromiumos/platform/tast/+/HEAD/docs/running_tests.md).
+
 ### Setup Confirmation {#setup-confirmation}
 
-To run FAFT you use the `test_that` tool, which does not automatically start a
-`servod` process for communicating with the servo board. Running FAFT is easiest
-with `servod` and `test_that` running in separate terminals inside the SDK,
-using either multiple SDK instances (`cros_sdk --enter --no-ns-pid`) or a tool
-such as `screen` inside an SDK instance. Before running any tests, go into
-chroot:
+To run Autotest tests, use the `test_that` tool, which does not automatically
+start a `servod` process for communicating with the servo board. Running FAFT
+is easiest with `servod` and `test_that` running in separate terminals inside
+the SDK, using either multiple SDK instances (`cros_sdk --enter --no-ns-pid`)
+or a tool such as `screen` inside an SDK instance. Before running any tests, go
+into the chroot:
 
-1.  (chroot 1) Run `$ sudo servod --board=$BOARD` where `$BOARD` is the code name of the board you are testing. For example: `$ sudo servod --board=eve`
-1.  Go into a second chroot
-1.  (chroot 2) Run the `firmware_FAFTSetup` test to verify basic functionality and ensure that your setup is correct.
-1.  If test_that is in `/usr/bin`, the syntax is `$ /usr/bin/test_that --board=$BOARD $DUT_IP firmware_FAFTSetup`
+1.  Make sure your tools are up to date.
+    1.  Run `repo sync -j8`
+    2.  Run `./update_chroot`
+2.  (chroot 1) Run `$ sudo servod --board=$BOARD` where `$BOARD` is the code name of the board you are testing. For example: `$ sudo servod --board=eve`
+3.  Go into a second chroot
+4.  (chroot 2) Run the `firmware_FAFTSetup` test to verify basic functionality and ensure that your setup is correct.
+5.  If test_that is in `/usr/bin`, the syntax is `$ /usr/bin/test_that --autotest_dir ~/trunk/src/third_party/autotest/files/ --board=$BOARD $DUT_IP firmware_FAFTSetup`
+6.  Run the `firmware.Pre.normal` test to verify tast tests are working also. `tast run --var=servo=localhost:9999 $DUT_IP firmware.Pre.normal`
 
-It is important to note that this syntax will work only if the correct packages
-for the DUT have been built. To build the packages, which usually takes
-a few hours, run the following from chroot:
+You can omit the --autotest_dir if you have built packages for the board and want to use the build version of the tests, i.e.:
 
 (chroot) `$ ./build_packages --board=$BOARD` where `$BOARD` is the code name of the board under test
-
-If packages have not been built, the command won't work unless a path to the
-autotest directory is included in the command as follows:
-
-(chroot) `$ test_that --autotest_dir ~/trunk/src/third_party/autotest/files/ --args="servo_host=localhost servo_port=9999" -b $BOARD $IP $TEST_NAME`
+(chroot) `$ /usr/bin/test_that --board=$BOARD $DUT_IP firmware_FAFTSetup`
 
 ### Sample Commands {#sample-commands}
 
-A few sample invocations of launching tests against a DUT:
+A few sample invocations of launching Autotest tests against a DUT:
 
 Running FAFT test with test case name
 
-- `$ /usr/bin/test_that --board=$BOARD $DUT_IP f:.*DevMode/control`
+- `$ /usr/bin/test_that --autotest_dir ~/trunk/src/third_party/autotest/files/ --board=$BOARD $DUT_IP f:.*DevMode/control`
 
 Some tests can be run in either normal mode or dev mode, specify the control file
 
-- `$ /usr/bin/test_that --board=$BOARD $DUT_IP f:.*TryFwB/control.dev`
+- `$ /usr/bin/test_that --autotest_dir ~/trunk/src/third_party/autotest/files/ --board=$BOARD $DUT_IP f:.*TryFwB/control.dev`
 
-FAFT can install Chrome OS image from the USB when image filename is specified
+FAFT can install ChromeOS image from the USB when image filename is specified
 
-- `$ /usr/bin/test_that --board=$BOARD $DUT_IP --args "image=$IMAGE_FILE" f:.*RecoveryButton/control.normal`
+- `$ /usr/bin/test_that --autotest_dir ~/trunk/src/third_party/autotest/files/ --board=$BOARD $DUT_IP --args "image=$IMAGE_FILE" f:.*RecoveryButton/control.normal`
 
 To update the firmware using the shellball in the image, specify the argument firmware_update=1
 
-- `$ /usr/bin/test_that --board=$BOARD $DUT_IP --args "image=$IMAGE_FILE firmware_update=1" f:.*RecoveryButton/control.normal`
+- `$ /usr/bin/test_that --autotest_dir ~/trunk/src/third_party/autotest/files/ --board=$BOARD $DUT_IP --args "image=$IMAGE_FILE firmware_update=1" f:.*RecoveryButton/control.normal`
 
 Run the entire faft_bios suite
 
-- `$ /usr/bin/test_that --board=$BOARD $DUT_IP suite:faft_bios`
+- `$ /usr/bin/test_that --autotest_dir ~/trunk/src/third_party/autotest/files/ --board=$BOARD $DUT_IP suite:faft_bios`
 
 Run the entire faft_ec suite
 
-- `$ /usr/bin/test_that --board=$BOARD $DUT_IP suite:faft_ec`
+- `$ /usr/bin/test_that --autotest_dir ~/trunk/src/third_party/autotest/files/ --board=$BOARD $DUT_IP suite:faft_ec`
 
 Run the entire faft_pd suite
 
-- `$ /usr/bin/test_that --board=$BOARD $DUT_IP suite:faft_pd`
+- `$ /usr/bin/test_that --autotest_dir ~/trunk/src/third_party/autotest/files/ --board=$BOARD $DUT_IP suite:faft_pd`
 
 To run servod in a different host, specify the servo_host and servo_port arguments.
 
-- `$ /usr/bin/test_that --board=$BOARD $DUT_IP --args "servo_host=$SERVO_HOST servo_port=$SERVO_PORT" suite:faft_lv1`
+- `$ /usr/bin/test_that --autotest_dir ~/trunk/src/third_party/autotest/files/ --board=$BOARD $DUT_IP --args "servo_host=$SERVO_HOST servo_port=$SERVO_PORT" suite:faft_ec`
 
 To run multiple servo boards on the same servo host (labstation), use serial and port number.
 
 - `$ sudo servod --board=$BOARD --port $port_number --serial $servo_serial_number`
-- `$ /usr/bin/test_that --board=$BOARD $DUT_IP --args "servo_host=localhost servo_port=$port_number faft_iterations=5000" f:.*firmware_ConsecutiveBoot/control`
+- `$ /usr/bin/test_that --autotest_dir ~/trunk/src/third_party/autotest/files/ --board=$BOARD $DUT_IP --args "servo_host=localhost servo_port=$port_number faft_iterations=5000" f:.*firmware_ConsecutiveBoot/control`
+
+### Running Against DUTs With Tunnelled SSH
+
+If you have ssh tunnels setup for your DUT and servo host (for example, via
+[SSH watcher](https://chromium.googlesource.com/chromiumos/platform/dev-util/+/HEAD/contrib/sshwatcher),
+the syntax (with the assumption that your DUT's network interface and your servo
+host's network interface is tunnelled to 2203 and servod is listening on port
+9901 on your servo host) for running tests is:
+
+- `$ test_that localhost:2222 --args="servo_host=localhost servo_host_ssh_port=2223 servo_port=9901 use_icmp=false" $TESTS`
+- `$ tast run -build=false -var=servo=127.0.0.1:9901:ssh:2223 127.0.0.1:2222  $TESTS`
+
+Note that for tast, you will likely need to manually start servod.  Note that
+the tast invocation is a bit unintuitive, as the servo port in the first port
+reference is the real servo port on the servo host, not the redirected one,
+because TAST ssh's to the servohost and tunnels it's own port.  If you don't
+need to run commands on the servo host you can also use
+servo=localhost:${LOCAL_SERVO_PORT}:nossh
+
+## Running FAFT on a new kernel {#faft-kernel-next}
+
+The lab hosts shown in go/cros-testing-kernelnext provide a static environment
+for FAFT to be executed continuously and the recommendation is to pursue the
+sustainable approach of using these DUTs for kernel-next FAFT execution.
+
+Local execution via go/faft-running may be required to debug layers of
+accumulated problems in boards where end-to-end integration tests lack an
+effective continuous execution. Install a kernelnext image onto the test USB
+stick and ensure that a kernelnext image is also installed in the DUT prior
+to running FAFT. The test_that commands to execute tests on a DUT with a
+kernelnext OS are the same.
+
+The key point is to ensure that the USB and DUT contain a kernelnext image.
 
 ## Frequently Asked Questions (FAQ) {#faq}
 
@@ -267,7 +315,7 @@
   powerwash.
 
   It is usually caused by the stateful filesystem becoming corrupted, since
-  Chrome OS performs a powerwash instead of running `fsck` like a standard
+  ChromeOS performs a powerwash instead of running `fsck` like a standard
   Linux distribution would.
 
 Q: What causes filesystem corruption?
@@ -288,14 +336,18 @@
 
 - A: When running tests with a Type-C servo, it is recommended to to rerun a failure using the Type-A setup to do a fast check prior to digging deeper, i.e. before connecting a USB analyzer or probing the signals.
 
-[FAFT suite]: https://chromium.googlesource.com/chromiumos/third_party/autotest/+/master/server/site_tests/
-[servo]: https://chromium.googlesource.com/chromiumos/third_party/hdctools/+/refs/heads/master/README.md#Power-Measurement
-[servo v2]: https://chromium.googlesource.com/chromiumos/third_party/hdctools/+/refs/heads/master/docs/servo_v2.md
-[servo v4]: https://chromium.googlesource.com/chromiumos/third_party/hdctools/+/refs/heads/master/docs/servo_v4.md
-[servo micro]: https://chromium.googlesource.com/chromiumos/third_party/hdctools/+/refs/heads/master/docs/servo_micro.md
-[servo v4 Type-C]: https://chromium.googlesource.com/chromiumos/third_party/hdctools/+/refs/heads/master/docs/servo_v4.md#Type_C-Version
-[stateful partition is too small]: https://crrev.com/c/1935408
-[FAFT]: https://chromium.googlesource.com/chromiumos/third_party/autotest/+/refs/heads/master/docs/faft-design-doc.md
-[FAFT framework]: https://chromium.googlesource.com/chromiumos/third_party/autotest/+/refs/heads/master/docs/faft-code.md
-[servod]: https://chromium.googlesource.com/chromiumos/third_party/hdctools/+/refs/heads/master/docs/servod.md
-[test that]: https://chromium.googlesource.com/chromiumos/third_party/autotest/+/refs/heads/master/docs/test-that.md
+Q: How can I obtain a device for a local FAFT execution?
+
+- A: The lab is a good source of devices for FAFT per go/cros-testing-kernelnext. If DUTs are not available or cannot be repaired by the lab team, request a DUT for development via go/hwrequest.
+
+\[FAFT suite\]: https://chromium.googlesource.com/chromiumos/third_party/autotest/+/main/server/site_tests/ <br>
+\[servo\]: https://chromium.googlesource.com/chromiumos/third_party/hdctools/+/refs/heads/main/README.md#Power-Measurement <br>
+\[servo v2\]: https://chromium.googlesource.com/chromiumos/third_party/hdctools/+/refs/heads/main/docs/servo_v2.md <br>
+\[servo v4\]: https://chromium.googlesource.com/chromiumos/third_party/hdctools/+/refs/heads/main/docs/servo_v4.md <br>
+\[servo micro\]: https://chromium.googlesource.com/chromiumos/third_party/hdctools/+/refs/heads/main/docs/servo_micro.md <br>
+\[servo v4 Type-C\]: https://chromium.googlesource.com/chromiumos/third_party/hdctools/+/refs/heads/main/docs/servo_v4.md#Type_C-Version <br>
+\[stateful partition is too small\]: https://crrev.com/c/1935408 <br>
+\[FAFT\]: https://chromium.googlesource.com/chromiumos/third_party/autotest/+/refs/heads/main/docs/faft-design-doc.md <br>
+\[FAFT framework\]: https://chromium.googlesource.com/chromiumos/third_party/autotest/+/refs/heads/main/docs/faft-code.md <br>
+\[servod\]: https://chromium.googlesource.com/chromiumos/third_party/hdctools/+/refs/heads/main/docs/servod.md <br>
+\[test that\]: https://chromium.googlesource.com/chromiumos/third_party/autotest/+/refs/heads/main/docs/test-that.md <br>
diff --git a/docs/faft-links.md b/docs/faft-links.md
new file mode 100644
index 0000000..21e85d8
--- /dev/null
+++ b/docs/faft-links.md
@@ -0,0 +1,59 @@
+# FAFT Links
+
+_Self-link: [go/faft-links]_
+
+**FAFT**, short for "Fully Automated Firmware Tests", refers to the automated
+firmware end-to-end tests written for ChromiumOS.
+
+FAFT tests were originally written for the remote test driver [Tauto]. There is
+a 2021 initiative to convert FAFT tests to another remote driver, [Tast]. That
+initiative is called [FAFT2Tast]. All new tests should be written in [Tast].
+
+[Tauto]: https://chromium.googlesource.com/chromiumos/third_party/autotest/
+[Tast]: https://chromium.googlesource.com/chromiumos/platform/tast/
+[FAFT2Tast]: https://goto.google.com/faft2tast-overview
+
+## FAFT related links
+
+*Note:* Go links requires access to the Google interanet.
+
+Link name           | Go link              | Source location
+------------------- | -------------------- | ---------------
+FAFT Links          | [go/faft-links]      | [docs/faft-links.md]
+FAFT Running manual | [go/faft-running]    | [docs/faft-how-to-run-doc.md]
+FAFT PD             | [go/faft-pd]         | [docs/faft-pd.md]
+FAFT for bringup    | [go/faft-bringup]    | tast-tests/src/chromiumos/tast/remote/firmware/bringup.md
+Tast FAFT Codelab   | [go/tast-faft-codelab] | tast-tests/src/chromiumos/tast/remote/firmware/codelab/README.md
+FAFT Code overview (deprecated)  | [go/faft-code]       | [docs/faft-code.md]
+FAFT Design Doc    | [go/faft-design-doc] | [docs/faft-design-doc.md]
+
+[go/faft-links]: https://goto.google.com/faft-links
+[docs/faft-links.md]: faft-links.md
+
+[go/faft-design-doc]: https://goto.google.com/faft-design-doc
+[docs/faft-design-doc.md]: faft-design-doc.md
+
+[go/faft-pd]: https://goto.google.com/faft-pd
+[docs/faft-pd.md]: faft-pd.md
+
+[go/faft-running]: https://goto.google.com/faft-running
+[docs/faft-how-to-run-doc.md]: faft-how-to-run-doc.md
+
+[go/faft-code]: https://goto.google.com/faft-code
+[docs/faft-code.md]: faft-code.md
+
+[go/tast-faft-codelab]: https://chromium.googlesource.com/chromiumos/platform/tast-tests/+/HEAD/src/chromiumos/tast/remote/firmware/codelab/README.md
+[go/faft-bringup]: https://chromium.googlesource.com/chromiumos/platform/tast-tests/+/HEAD/src/chromiumos/tast/remote/firmware/bringup.md
+
+## FAFT Users Chat
+
+There is a Google Chat room for FAFT users.
+
+* Go-link: [go/faft-users-chat]
+* External-facing link: https://chat.google.com/room/AAAAsHQFTo8
+
+If you are unable to access the chatroom via these links, please get in touch
+with the ChromeOS Firmware Engprod team. This will definitely happen if you
+don't have an @google.com email address.
+
+[go/faft-users-chat]: https://goto.google.com/faft-users-chat
diff --git a/docs/faft-pd.md b/docs/faft-pd.md
index 6f08650..80cc3cc 100644
--- a/docs/faft-pd.md
+++ b/docs/faft-pd.md
@@ -1,7 +1,9 @@
 # PD FAFT
 
-PD FAFT is another set of firmware tests (FAFT), which targets testing USB-C and
-PD (Power Delivery) functionalities.
+_Self-link: [go/faft-pd](https://goto.google.com/faft-pd)_
+
+PD FAFT is another set of firmware tests (FAFT), which targets testing USB-C,
+PD (Power Delivery) functionalities, and ULP (Ultra Low Power) mode.
 
 [TOC]
 
@@ -10,7 +12,7 @@
 The USB-C and PD stack is complex that involves multiple hardware/firmware:
 
 *   TCPM (USB Type-C Port Manager),
-    [integrated in EC, using Chrome EC firmware](https://chromium.googlesource.com/chromiumos/platform/ec/+/master/docs/usb-c.md)
+    [integrated in EC, using Chrome EC firmware](https://chromium.googlesource.com/chromiumos/platform/ec/+/main/docs/usb-c.md)
 *   TCPC (USB Type-C Port Controller), usually using proprietary firmware, in
     the form of
     *   dedicated chip, like ANX74xx, PS8xxx,
@@ -35,7 +37,7 @@
 a USB-C monitor, etc. The first version of PD FAFT uses
 [Plankton](https://www.chromium.org/chromium-os/plankton) as PDTester. The
 latest version uses
-[ServoV4](https://chromium.googlesource.com/chromiumos/third_party/hdctools/+/master/docs/servo_v4.md)
+[ServoV4](https://chromium.googlesource.com/chromiumos/third_party/hdctools/+/main/docs/servo_v4.md)
 as PDTester.
 
 ## Test details {#test-details}
@@ -88,6 +90,11 @@
 *   Receiving Source Capability messages from PDTester
 *   If PD Dual role mode is operational in the DUT
 
+firmware\_ECWakefromULP, checks:
+
+*   Ability to wake AP and EC from ULP mode by PB, LID.
+*   Ability to wake EC from ULP mode by AC.
+
 The above tests may have multiple subtests, the same test body but different
 prerequisite.
 
@@ -105,10 +112,10 @@
 ## How to run PD FAFT {#how-to-run-pd-faft}
 
 Hardware setup, check this
-[ServoV4 Type-C with servo micro setup](https://chromium.googlesource.com/chromiumos/third_party/autotest/+/refs/heads/master/docs/faft-how-to-run-doc.md#servov4-typec-micro).
+[ServoV4 Type-C with servo micro setup](https://chromium.googlesource.com/chromiumos/third_party/autotest/+/refs/heads/main/docs/faft-how-to-run-doc.md#servov4-typec-micro).
 
 Software setup, check this
-[Running Tests instructions](https://chromium.googlesource.com/chromiumos/third_party/autotest/+/refs/heads/master/docs/faft-how-to-run-doc.md#faft-running-tests).
+[Running Tests instructions](https://chromium.googlesource.com/chromiumos/third_party/autotest/+/refs/heads/main/docs/faft-how-to-run-doc.md#faft-running-tests).
 
 ## Known issues {#known-issues}
 
diff --git a/docs/loading-autotest-extension-on-device.md b/docs/loading-autotest-extension-on-device.md
index 00bc7dd..0cae8a9 100644
--- a/docs/loading-autotest-extension-on-device.md
+++ b/docs/loading-autotest-extension-on-device.md
@@ -1,7 +1,7 @@
 # Loading autotestPrivate extension on your device
 
 AutotestPrivate is an extension that exposes APIs that facilitate the
-interaction with a Chrome OS device during tests. This guide shows how to load
+interaction with a ChromeOS device during tests. This guide shows how to load
 the extension on your device.
 
 [TOC]
@@ -9,7 +9,7 @@
 
 ## Prerequisites
 
-You need a device running a Chrome OS test image and a Chromium OS
+You need a device running a ChromeOS test image and a Chromium OS
 checkout. To load a test image on your device follow [these steps] from the
 Simple Chrome guide. To grab a checkout of Chromium OS follow the
 [OS Developer Guide].
@@ -29,7 +29,7 @@
 
     `test0000`
 
-1.  To run Chrome OS with flags, first make usr partition writeable with:
+1.  To run ChromeOS with flags, first make usr partition writeable with:
 
     ```
     /usr/share/vboot/bin/make_dev_ssd.sh --remove_rootfs_verification --partitions 2
@@ -44,13 +44,13 @@
 
 ## Loading autotest extension on your device
 
-1.  Enter a Chrome OS chroot. Inside of your Chrome OS checkout directory run:
+1.  Enter a ChromeOS chroot. Inside of your ChromeOS checkout directory run:
 
     `cros_sdk`
 
-1.  From inside your Chrome OS chroot run:
+1.  From inside your ChromeOS chroot run:
 
-    `test_that $IP_ADDR -b $BOARD dummy_Pass`
+    `test_that $IP_ADDR -b $BOARD stub_Pass`
 
     This will install the autotestPrivate extension manifest to your device.
 
@@ -88,5 +88,5 @@
 > {isLoggedIn: false, isOwner: false, isReadyForPassword: false, isScreenLocked: false}
 ```
 
-[these steps]: https://chromium.googlesource.com/chromiumos/docs/+/master/simple_chrome_workflow.md#set-up-the-chrome-os-device
-[OS Developer Guide]: https://chromium.googlesource.com/chromiumos/docs/+/master/developer_guide.md#get-the-source
+\[these steps\]: https://chromium.googlesource.com/chromiumos/docs/+/main/simple_chrome_workflow.md#set-up-the-chrome-os-device <br>
+\[OS Developer Guide\]: https://chromium.googlesource.com/chromiumos/docs/+/main/developer_guide.md#get-the-source
diff --git a/docs/test-droid.md b/docs/test-droid.md
index 70d75f0..d44d263 100644
--- a/docs/test-droid.md
+++ b/docs/test-droid.md
@@ -87,7 +87,7 @@
 applies).
 
 The easiest way to set this up is to use the
-[Chrome OS testing keys](https://www.chromium.org/chromium-os/testing/autotest-developer-faq/ssh-test-keys-setup).
+[ChromeOS testing keys](https://www.chromium.org/chromium-os/testing/autotest-developer-faq/ssh-test-keys-setup).
 Add to your SSH config an entry that looks like the following:
 
 ```
diff --git a/docs/test-that.md b/docs/test-that.md
index 57b1ced..b7682d1 100644
--- a/docs/test-that.md
+++ b/docs/test-that.md
@@ -1,6 +1,6 @@
 ## Introduction
 
-`test_that` is the supported mechanism to run autotests against Chrome OS
+`test_that` is the supported mechanism to run autotests against ChromeOS
 devices at your desk.  `test_that` replaces an older script, `run_remote_tests`.
 
 Features for testing a local device:
@@ -22,16 +22,16 @@
 
 ### Example uses (inside the chroot)
 
-Run the test(s) named dummy\_Pass:
+Run the test(s) named stub\_Pass:
 
 ```
-$ test_that -b ${board} ${host} dummy_Pass
+$ test_that -b ${board} ${host} stub_Pass
 ```
 
-Run the test(s) named dummy\_Pass.suspend:
+Run the test(s) named stub\_Pass.suspend:
 
 ```
-$ test_that -b ${board} ${host} dummy_Pass.suspend
+$ test_that -b ${board} ${host} stub_Pass.suspend
 ```
 
 Run the bvt-inline suite against dut:
@@ -49,10 +49,10 @@
 ```
 
 Run all tests whose control file filename matches the regular expression
-`^.*control.dummy$`:
+`^.*control.stub$`:
 
 ```
-$ test_that -b ${board} ${host} f:.*control.dummy
+$ test_that -b ${board} ${host} f:.*control.stub
 ```
 
 ### Running jobs in the lab
@@ -69,10 +69,10 @@
 For instance:
 
 ```
-$ test_that -b lumpy -i lumpy-paladin/R38-6009.0.0-rc4 :lab: dummy_Pass
+$ test_that -b lumpy -i lumpy-paladin/R38-6009.0.0-rc4 :lab: stub_Pass
 ```
 
-This will kick off a suite in the lab that consists of just 1 job, dummy\_Pass,
+This will kick off a suite in the lab that consists of just 1 job, stub\_Pass,
 to run in this case on board lumpy using the image
 lumpy-paladin/R38-6009.0.0-rc4. The lab's scheduler will take responsibility
 for finding a suitable set of hosts, provisioning them to the correct image,
@@ -83,7 +83,7 @@
 before:
 
 ```
-$ test_that -b lumpy -i ${latest_image} :lab: dummy_Pass dummy_Fail
+$ test_that -b lumpy -i ${latest_image} :lab: stub_Pass dummy_Fail
 $ test_that -b lumpy -i ${latest_image} :lab: e:login_.*
 ```
 
@@ -125,8 +125,8 @@
 For instance:
 ```
 $ test_that -b lumpy -i lumpy-paladin/R38-6009.0.0-rc4 --web 100.96.51.136 :lab:
-dummy_Pass
+stub_Pass
 ```
 
-This will kick off the dummy_Pass test on a lumpy device on the Autotest
+This will kick off the stub_Pass test on a lumpy device on the Autotest
 instance located at 100.96.51.136
diff --git a/docs/user-doc.md b/docs/user-doc.md
index 2a7d992..8861d5d 100644
--- a/docs/user-doc.md
+++ b/docs/user-doc.md
@@ -4,15 +4,15 @@
 
 ## Useful documents
 
-[Autotest documentation on GitHub](https://github.com/autotest/autotest/wiki/AutotestApi):
+[Autotest documentation](https://autotest.readthedocs.io/en/latest/index.html):
 This would be a good read if you want to familiarize yourself with the basic
 Autotest concepts.
 
-[Gentoo Portage ebuild/eclass Information](http://www.gentoo.org/proj/en/devrel/handbook/handbook.xml?part=2):
-Getting to know the package build system we use.
+[Gentoo Portage ebuild/eclass Information](https://devmanual.gentoo.org/):
+Guides for getting to know the package build system we use.
 
-[ChromiumOS specific Portage FAQ](http://www.chromium.org/chromium-os/how-tos-and-troubleshooting/portage-build-faq):
-Learning something about the way we use portage.
+[ChromiumOS specific Portage FAQ](https://chromium.googlesource.com/chromiumos/docs/+/HEAD/portage/ebuild_faq.md):
+Learn something about the way we use portage.
 
 ## Autotest and ebuild workflow
 
@@ -244,7 +244,7 @@
 ### Running tests on a machine
 
 Autotests are run with a tool called
-[test_that](https://chromium.googlesource.com/chromiumos/third_party/autotest/+/refs/heads/master/docs/test-that.md).
+[test_that](https://chromium.googlesource.com/chromiumos/third_party/autotest/+/refs/heads/main/docs/test-that.md).
 
 ### Running tests in a VM - cros_run_test
 
@@ -253,7 +253,7 @@
 to test using the Smoke suite.
 
 If you want to run your tests in a VM (see
-[here](https://chromium.googlesource.com/chromiumos/docs/+/master/cros_vm.md#Run-an-autotest-in-the-VM)
+[here](https://chromium.googlesource.com/chromiumos/docs/+/main/cros_vm.md#Run-an-autotest-in-the-VM)
 
 -   `cros_run_test` starts up a VM and runs autotests using the port
 -   specified (defaults to 9222).  As an example:
@@ -264,9 +264,7 @@
 
 -   The emulator command line redirects localhost port 9222 to the emulated
     machine's port 22 to allow you to ssh into the emulator. For Chromium OS to
-    actually listen on this port you must append the `--test_image` parameter
-    when you run the `./image_to_vm.sh` script, or perhaps run the
-    `mod_image_for_test.sh` script instead.
+    actually listen on this port you must create & boot a test image.
 -   You can then run tests on the correct ssh port with something like
 
         $ test_that --board=x86-generic localhost:9222 'f:.*platform_BootPerf/control'
diff --git a/docs/wifi-basics-codelab.md b/docs/wifi-basics-codelab.md
index ceb825a..89ef299 100644
--- a/docs/wifi-basics-codelab.md
+++ b/docs/wifi-basics-codelab.md
@@ -77,7 +77,7 @@
 #### Using a local testing setup
 
 For a local test setup, you'll need a flashed DUT and two flashed Google-made
-wifi routers that run Chrome OS, all running special test images. The
+wifi routers that run ChromeOS, all running special test images. The
 Google-made routers can be either of the boards `whirlwind` or `gale`,
 and see [network_WiFi_UpdateRouter] for what images they should be running.
 In order for Autotest to determine the hostnames of your router and packet
diff --git a/docs/wificell.md b/docs/wificell.md
index 068ec67..2354536 100644
--- a/docs/wificell.md
+++ b/docs/wificell.md
@@ -3,7 +3,8 @@
 Most WiFi tests specify `DEPENDENCIES = 'wificell'` in their control file,
 which means they require not only an autotest server and a DUT, but also a
 special test-enabled Access Point (AP). Additionally, some tests require a
-packet capture (pcap) device or a signal attenuator.
+packet capture (pcap) device or a signal attenuator. For instructions on how
+to set up a wifcell for testing, visit [Setting up a WiFi Test Cell](https://chromeos.google.com/partner/dlm/docs/component-qual/settinguptestcell.html).
 
 The basics of running a wificell autotest are the same as any other, except
 that autotest also needs to know where to find your test AP. For some
@@ -14,15 +15,15 @@
 test_that my-host network_WiFi_SimpleConnect.wifi_check5HT40
 ````
 
-This works for most of the Chrome OS lab WiFi cells, where we configure DNS to
+This works for most of the ChromeOS lab WiFi cells, where we configure DNS to
 pair a DUT at address `${HOST}` with its companion AP at an address
 `${HOST}-router`. See below for more info on addressing your test AP.
 
 ## What is a test AP?
 
 A test AP can come in various forms, but as of this writing, it is typically a
-Chrome OS based router / access point such as Whirlwind or Gale, running a
-testbed-ap variant of a Chrome OS test image in Developer Mode. We have
+ChromeOS based router / access point such as Whirlwind or Gale, running a
+testbed-ap variant of a ChromeOS test image in Developer Mode. We have
 previously supported other consumer routers, running OpenWRT. Setting up a test
 AP is not in the scope for this document.
 
@@ -42,7 +43,7 @@
 
 Autotest assumes that if you have a DUT at address `${HOST}`, then your AP is
 at an address `${HOST}-router` (see [dnsname\_mangler]). This is configured
-automatically by the lab team for most Chrome OS lab WiFi setups.
+automatically by the lab team for most ChromeOS lab WiFi setups.
 
 For custom/local testing without modifying your DNS server, one can accomplish
 this by adding entries to your `/etc/hosts` file. Alternatively, you can supply
diff --git a/frontend/afe/control_file.py b/frontend/afe/control_file.py
index cec2c35..ad6645c 100644
--- a/frontend/afe/control_file.py
+++ b/frontend/afe/control_file.py
@@ -158,9 +158,9 @@
     return prepend, append
 
 
-def _sanity_check_generate_control(is_server, client_control_file):
+def _check_generate_control(is_server, client_control_file):
     """
-    Sanity check some of the parameters to generate_control().
+    Check some of the parameters to generate_control().
 
     This exists as its own function so that site_control_file may call it as
     well from its own generate_control().
@@ -191,7 +191,7 @@
 
     @returns The control file text as a string.
     """
-    _sanity_check_generate_control(is_server=is_server,
+    _check_generate_control(is_server=is_server,
                                    client_control_file=client_control_file)
     control_file_text = EMPTY_TEMPLATE
     prepend, append = _get_profiler_commands(profilers, is_server, profile_only)
diff --git a/frontend/afe/direct_afe.py b/frontend/afe/direct_afe.py
index 996d4c1..1603707 100644
--- a/frontend/afe/direct_afe.py
+++ b/frontend/afe/direct_afe.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/frontend/afe/direct_afe_unittest.py b/frontend/afe/direct_afe_unittest.py
index e6c96fa..7535dd5 100755
--- a/frontend/afe/direct_afe_unittest.py
+++ b/frontend/afe/direct_afe_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #pylint: disable-msg=C0111
 import unittest
 import common
diff --git a/frontend/afe/frontend_test_utils.py b/frontend/afe/frontend_test_utils.py
index 10599ed..8cb3c91 100644
--- a/frontend/afe/frontend_test_utils.py
+++ b/frontend/afe/frontend_test_utils.py
@@ -45,10 +45,10 @@
         self.label3.save()
         self.hosts[0].labels.add(self.label1)
         self.hosts[1].labels.add(self.label2)
-        for hostnum in xrange(4,7):  # host5..host7
+        for hostnum in range(4, 7):  # host5..host7
             self.hosts[hostnum].labels.add(self.label6)
         self.hosts[6].labels.add(self.label7)
-        for hostnum in xrange(7,9):  # host8..host9
+        for hostnum in range(7, 9):  # host8..host9
             self.hosts[hostnum].labels.add(self.label6)
             self.hosts[hostnum].labels.add(self.label7)
 
diff --git a/frontend/afe/json_rpc/proxy.py b/frontend/afe/json_rpc/proxy.py
index 616eed8..0fbf9b1 100644
--- a/frontend/afe/json_rpc/proxy.py
+++ b/frontend/afe/json_rpc/proxy.py
@@ -1,4 +1,4 @@
-
+# Lint as: python2, python3
 """
   Copyright (c) 2007 Jan-Klaas Kollhof
 
@@ -19,11 +19,16 @@
   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 """
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import os
 import socket
 import subprocess
-import urllib
-import urllib2
+from six.moves import urllib
+import six
+from six.moves import urllib
 from autotest_lib.client.common_lib import error as exceptions
 from autotest_lib.client.common_lib import global_config
 
@@ -123,11 +128,13 @@
         # Caller can pass in a minimum value of timeout to be used for urlopen
         # call. Otherwise, the default socket timeout will be used.
         min_rpc_timeout = kwargs.pop('min_rpc_timeout', None)
-        postdata = json_encoder_class().encode({'method': self.__serviceName,
-                                                'params': args + (kwargs,),
-                                                'id': 'jsonrpc'})
-        url_with_args = self.__serviceURL + '?' + urllib.urlencode({
-            'method': self.__serviceName})
+        postdata = json_encoder_class().encode({
+                'method': self.__serviceName,
+                'params': args + (kwargs, ),
+                'id': 'jsonrpc'
+        }).encode('utf-8')
+        url_with_args = self.__serviceURL + '?' + urllib.parse.urlencode(
+                {'method': self.__serviceName})
         if self.__use_sso_client:
             respdata = _sso_request(url_with_args, self.__headers, postdata,
                                     min_rpc_timeout)
@@ -135,6 +142,9 @@
             respdata = _raw_http_request(url_with_args, self.__headers,
                                          postdata, min_rpc_timeout)
 
+        if isinstance(respdata, bytes):
+            respdata = respdata.decode('utf-8')
+
         try:
             resp = decoder.JSONDecoder().decode(respdata)
         except ValueError:
@@ -155,13 +165,15 @@
 
     @returns: the response from the http request.
     """
-    request = urllib2.Request(url_with_args, data=postdata, headers=headers)
+    request = urllib.request.Request(url_with_args,
+                                     data=postdata,
+                                     headers=headers)
     default_timeout = socket.getdefaulttimeout()
     if not default_timeout:
         # If default timeout is None, socket will never time out.
-        return urllib2.urlopen(request).read()
+        return urllib.request.urlopen(request).read()
     else:
-        return urllib2.urlopen(
+        return urllib.request.urlopen(
                 request,
                 timeout=max(timeout, default_timeout),
         ).read()
@@ -177,7 +189,8 @@
 
     @returns: the response from the http request.
     """
-    headers_str = '; '.join(['%s: %s' % (k, v) for k, v in headers.iteritems()])
+    headers_str = '; '.join(
+            ['%s: %s' % (k, v) for k, v in six.iteritems(headers)])
     cmd = [
         'sso_client',
         '-url', url_with_args,
diff --git a/frontend/afe/json_rpc/serviceHandler.py b/frontend/afe/json_rpc/serviceHandler.py
index 2a0fd85..cdf08f7 100644
--- a/frontend/afe/json_rpc/serviceHandler.py
+++ b/frontend/afe/json_rpc/serviceHandler.py
@@ -18,11 +18,17 @@
   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 """
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import socket
 import traceback
 
 from json import decoder
 
+import six
+
 try:
     from django.core import exceptions as django_exceptions
     # Django JSON encoder uses the standard json encoder but can handle DateTime
@@ -44,13 +50,13 @@
     """
     if isinstance(value, float):
         return int(value)
-    elif isinstance(value, unicode):
+    elif isinstance(value, six.text_type):
         return str(value)
     elif isinstance(value, list):
         return [customConvertJson(item) for item in value]
     elif isinstance(value, dict):
         new_dict = {}
-        for key, val in value.iteritems():
+        for key, val in six.iteritems(value):
             new_key = customConvertJson(key)
             new_val = customConvertJson(val)
             new_dict[new_key] = new_val
@@ -107,7 +113,7 @@
         try:
             meth = self.findServiceEndpoint(methName)
             results['result'] = self.invokeServiceEndpoint(meth, args)
-        except Exception, err:
+        except Exception as err:
             results['err_traceback'] = traceback.format_exc()
             results['err'] = err
 
@@ -165,9 +171,9 @@
                          'id': result_dict['id'],
                          'error': result_dict['err'] }
             data = json_encoder.encode(json_dict)
-        except TypeError, e:
+        except TypeError as e:
             err_traceback = traceback.format_exc()
-            print err_traceback
+            print(err_traceback)
             err = {"name" : "JSONEncodeException",
                    "message" : "Result Object Not Serializable",
                    "traceback" : err_traceback}
diff --git a/frontend/afe/json_rpc/serviceHandler_unittest.py b/frontend/afe/json_rpc/serviceHandler_unittest.py
index 9b9c948..4c6c125 100755
--- a/frontend/afe/json_rpc/serviceHandler_unittest.py
+++ b/frontend/afe/json_rpc/serviceHandler_unittest.py
@@ -1,18 +1,26 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
+import json
 import unittest
-import common
+
 from autotest_lib.frontend import setup_django_environment
-import serviceHandler
+
+import common
+
+from . import serviceHandler
 
 
 class RpcMethodHolder(object):
+    """Fake rpc service for testing."""
+
     @staticmethod
     def service_1(x, y):
+        """Returns x + y"""
         return x + y
 
     @staticmethod
     def service_2(path):
+        """Returns the parts of the path."""
         return path.split('/')[-1]
 
 
@@ -47,6 +55,8 @@
 
 
 class TestServiceHandler(unittest.TestCase):
+    """Tests ServiceHandler using a fake service."""
+
     def setUp(self):
         holder = RpcMethodHolder()
         self.serviceHandler = serviceHandler.ServiceHandler(holder)
@@ -54,12 +64,12 @@
 
     def test_handleRequest1(self):
         response = self.serviceHandler.handleRequest(json_request1)
-        self.assertEquals(response, expected_response1)
+        self.assertEquals(json.loads(response), json.loads(expected_response1))
 
 
     def test_handleRequest2(self):
         response = self.serviceHandler.handleRequest(json_request2)
-        self.assertEquals(response, expected_response2)
+        self.assertEquals(json.loads(response), json.loads(expected_response2))
 
 
     def test_handleRequest3(self):
diff --git a/frontend/afe/management.py b/frontend/afe/management.py
index 3e063ad..699687e 100644
--- a/frontend/afe/management.py
+++ b/frontend/afe/management.py
@@ -32,18 +32,19 @@
                 codename=codename))
             if len(permissions) == 0:
                 if verbosity:
-                    print '  No permission ' + codename
+                    print('  No permission ' + codename)
                 continue
             for permission in permissions:
                 if permission not in have_permissions:
                     if verbosity:
-                        print '  Adding permission ' + codename
+                        print('  Adding permission ' + codename)
                     admin_group.permissions.add(permission)
     if verbosity:
         if created:
-            print 'Created group "%s"' % BASIC_ADMIN
+            print('Created group "%s"' % BASIC_ADMIN)
         else:
-            print 'Group "%s" already exists' % BASIC_ADMIN
+            print('Group "%s" already exists' % BASIC_ADMIN)
+
 
 if settings.AUTOTEST_CREATE_ADMIN_GROUPS:
     signals.post_syncdb.connect(create_admin_group, sender=models)
diff --git a/frontend/afe/moblab_rpc_interface.py b/frontend/afe/moblab_rpc_interface.py
deleted file mode 100644
index e48f297..0000000
--- a/frontend/afe/moblab_rpc_interface.py
+++ /dev/null
@@ -1,1084 +0,0 @@
-# Copyright (c) 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-This module includes all moblab-related RPCs. These RPCs can only be run
-on moblab.
-"""
-
-import ConfigParser
-import common
-import logging
-import os
-import re
-import sys
-import shutil
-import socket
-import StringIO
-import subprocess
-import time
-import multiprocessing
-import ctypes
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import global_config
-from autotest_lib.client.common_lib import utils
-from autotest_lib.frontend.afe import models
-from autotest_lib.frontend.afe import rpc_utils
-from autotest_lib.server import frontend
-from autotest_lib.server.hosts import moblab_host
-
-_CONFIG = global_config.global_config
-MOBLAB_BOTO_LOCATION = '/home/moblab/.boto'
-CROS_CACHEDIR = '/mnt/moblab/cros_cache_apache'
-
-# Google Cloud Storage bucket url regex pattern. The pattern is used to extract
-# the bucket name from the bucket URL. For example, "gs://image_bucket/google"
-# should result in a bucket name "image_bucket".
-GOOGLE_STORAGE_BUCKET_URL_PATTERN = re.compile(
-        r'gs://(?P<bucket>[a-zA-Z][a-zA-Z0-9-_]*)/?.*')
-
-# Contants used in Json RPC field names.
-_IMAGE_STORAGE_SERVER = 'image_storage_server'
-_GS_ACCESS_KEY_ID = 'gs_access_key_id'
-_GS_SECRET_ACCESS_KEY = 'gs_secret_access_key'
-_RESULT_STORAGE_SERVER = 'results_storage_server'
-_USE_EXISTING_BOTO_FILE = 'use_existing_boto_file'
-_CLOUD_NOTIFICATION_ENABLED = 'cloud_notification_enabled'
-_WIFI_AP_NAME = 'wifi_dut_ap_name'
-_WIFI_AP_PASS = 'wifi_dut_ap_pass'
-
-# Location where dhcp leases are stored.
-_DHCPD_LEASES = '/var/lib/dhcp/dhcpd.leases'
-
-# File where information about the current device is stored.
-_ETC_LSB_RELEASE = '/etc/lsb-release'
-
-# ChromeOS update engine client binary location
-_UPDATE_ENGINE_CLIENT = '/usr/bin/update_engine_client'
-
-# Set the suite timeout per suite in minutes
-# default is 24 hours
-_DEFAULT_SUITE_TIMEOUT_MINS = 1440
-_SUITE_TIMEOUT_MAP = {
-    'hardware_storagequal': 40320,
-    'hardware_storagequal_quick': 40320
-}
-
-# Full path to the correct gsutil command to run.
-class GsUtil:
-    """Helper class to find correct gsutil command."""
-    _GSUTIL_CMD = None
-
-    @classmethod
-    def get_gsutil_cmd(cls):
-      if not cls._GSUTIL_CMD:
-         cls._GSUTIL_CMD = 'gsutil'
-
-      return cls._GSUTIL_CMD
-
-
-class BucketPerformanceTestException(Exception):
-  """Exception thrown when the command to test the bucket performance fails."""
-  pass
-
-@rpc_utils.moblab_only
-def get_config_values():
-    """Returns all config values parsed from global and shadow configs.
-
-    Config values are grouped by sections, and each section is composed of
-    a list of name value pairs.
-    """
-    sections =_CONFIG.get_sections()
-    config_values = {}
-    for section in sections:
-        config_values[section] = _CONFIG.config.items(section)
-    return rpc_utils.prepare_for_serialization(config_values)
-
-
-def _write_config_file(config_file, config_values, overwrite=False):
-    """Writes out a configuration file.
-
-    @param config_file: The name of the configuration file.
-    @param config_values: The ConfigParser object.
-    @param ovewrite: Flag on if overwriting is allowed.
-    """
-    if not config_file:
-        raise error.RPCException('Empty config file name.')
-    if not overwrite and os.path.exists(config_file):
-        raise error.RPCException('Config file already exists.')
-
-    if config_values:
-        with open(config_file, 'w') as config_file:
-            config_values.write(config_file)
-
-
-def _read_original_config():
-    """Reads the orginal configuratino without shadow.
-
-    @return: A configuration object, see global_config_class.
-    """
-    original_config = global_config.global_config_class()
-    original_config.set_config_files(shadow_file='')
-    return original_config
-
-
-def _read_raw_config(config_file):
-    """Reads the raw configuration from a configuration file.
-
-    @param: config_file: The path of the configuration file.
-
-    @return: A ConfigParser object.
-    """
-    shadow_config = ConfigParser.RawConfigParser()
-    shadow_config.read(config_file)
-    return shadow_config
-
-
-def _get_shadow_config_from_partial_update(config_values):
-    """Finds out the new shadow configuration based on a partial update.
-
-    Since the input is only a partial config, we should not lose the config
-    data inside the existing shadow config file. We also need to distinguish
-    if the input config info overrides with a new value or reverts back to
-    an original value.
-
-    @param config_values: See get_moblab_settings().
-
-    @return: The new shadow configuration as ConfigParser object.
-    """
-    original_config = _read_original_config()
-    existing_shadow = _read_raw_config(_CONFIG.shadow_file)
-    for section, config_value_list in config_values.iteritems():
-        for key, value in config_value_list:
-            if original_config.get_config_value(section, key,
-                                                default='',
-                                                allow_blank=True) != value:
-                if not existing_shadow.has_section(section):
-                    existing_shadow.add_section(section)
-                existing_shadow.set(section, key, value)
-            elif existing_shadow.has_option(section, key):
-                existing_shadow.remove_option(section, key)
-    return existing_shadow
-
-
-def _update_partial_config(config_values):
-    """Updates the shadow configuration file with a partial config udpate.
-
-    @param config_values: See get_moblab_settings().
-    """
-    existing_config = _get_shadow_config_from_partial_update(config_values)
-    _write_config_file(_CONFIG.shadow_file, existing_config, True)
-
-
-@rpc_utils.moblab_only
-def update_config_handler(config_values):
-    """Update config values and override shadow config.
-
-    @param config_values: See get_moblab_settings().
-    """
-    original_config = _read_original_config()
-    new_shadow = ConfigParser.RawConfigParser()
-    for section, config_value_list in config_values.iteritems():
-        for key, value in config_value_list:
-            if original_config.get_config_value(section, key,
-                                                default='',
-                                                allow_blank=True) != value:
-                if not new_shadow.has_section(section):
-                    new_shadow.add_section(section)
-                new_shadow.set(section, key, value)
-
-    if not _CONFIG.shadow_file or not os.path.exists(_CONFIG.shadow_file):
-        raise error.RPCException('Shadow config file does not exist.')
-    _write_config_file(_CONFIG.shadow_file, new_shadow, True)
-
-    # TODO (sbasi) crbug.com/403916 - Remove the reboot command and
-    # instead restart the services that rely on the config values.
-    os.system('sudo reboot')
-
-
-@rpc_utils.moblab_only
-def reset_config_settings():
-    """Reset moblab shadow config."""
-    with open(_CONFIG.shadow_file, 'w') as config_file:
-        pass
-    os.system('sudo reboot')
-
-
-@rpc_utils.moblab_only
-def reboot_moblab():
-    """Simply reboot the device."""
-    os.system('sudo reboot')
-
-
-@rpc_utils.moblab_only
-def set_boto_key(boto_key):
-    """Update the boto_key file.
-
-    @param boto_key: File name of boto_key uploaded through handle_file_upload.
-    """
-    if not os.path.exists(boto_key):
-        raise error.RPCException('Boto key: %s does not exist!' % boto_key)
-    shutil.copyfile(boto_key, moblab_host.MOBLAB_BOTO_LOCATION)
-
-
-@rpc_utils.moblab_only
-def set_service_account_credential(service_account_filename):
-    """Update the service account credential file.
-
-    @param service_account_filename: Name of uploaded file through
-            handle_file_upload.
-    """
-    if not os.path.exists(service_account_filename):
-        raise error.RPCException(
-                'Service account file: %s does not exist!' %
-                service_account_filename)
-    shutil.copyfile(
-            service_account_filename,
-            moblab_host.MOBLAB_SERVICE_ACCOUNT_LOCATION)
-
-
-@rpc_utils.moblab_only
-def set_launch_control_key(launch_control_key):
-    """Update the launch_control_key file.
-
-    @param launch_control_key: File name of launch_control_key uploaded through
-            handle_file_upload.
-    """
-    if not os.path.exists(launch_control_key):
-        raise error.RPCException('Launch Control key: %s does not exist!' %
-                                 launch_control_key)
-    shutil.copyfile(launch_control_key,
-                    moblab_host.MOBLAB_LAUNCH_CONTROL_KEY_LOCATION)
-    # Restart the devserver service.
-    os.system('sudo restart moblab-devserver-init')
-
-
-###########Moblab Config Wizard RPCs #######################
-def _get_public_ip_address(socket_handle):
-    """Gets the public IP address.
-
-    Connects to Google DNS server using a socket and gets the preferred IP
-    address from the connection.
-
-    @param: socket_handle: a unix socket.
-
-    @return: public ip address as string.
-    """
-    try:
-        socket_handle.settimeout(1)
-        socket_handle.connect(('8.8.8.8', 53))
-        socket_name = socket_handle.getsockname()
-        if socket_name is not None:
-            logging.info('Got socket name from UDP socket.')
-            return socket_name[0]
-        logging.warn('Created UDP socket but with no socket_name.')
-    except socket.error:
-        logging.warn('Could not get socket name from UDP socket.')
-    return None
-
-
-def _get_network_info():
-    """Gets the network information.
-
-    TCP socket is used to test the connectivity. If there is no connectivity,
-    try to get the public IP with UDP socket.
-
-    @return: a tuple as (public_ip_address, connected_to_internet).
-    """
-    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-    ip = _get_public_ip_address(s)
-    if ip is not None:
-        logging.info('Established TCP connection with well known server.')
-        return (ip, True)
-    s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
-    return (_get_public_ip_address(s), False)
-
-
-@rpc_utils.moblab_only
-def get_network_info():
-    """Returns the server ip addresses, and if the server connectivity.
-
-    The server ip addresses as an array of strings, and the connectivity as a
-    flag.
-    """
-    network_info = {}
-    info = _get_network_info()
-    if info[0] is not None:
-        network_info['server_ips'] = [info[0]]
-    network_info['is_connected'] = info[1]
-
-    return rpc_utils.prepare_for_serialization(network_info)
-
-
-# Gets the boto configuration.
-def _get_boto_config():
-    """Reads the boto configuration from the boto file.
-
-    @return: Boto configuration as ConfigParser object.
-    """
-    boto_config = ConfigParser.ConfigParser()
-    boto_config.read(MOBLAB_BOTO_LOCATION)
-    return boto_config
-
-
-@rpc_utils.moblab_only
-def get_cloud_storage_info():
-    """RPC handler to get the cloud storage access information.
-    """
-    cloud_storage_info = {}
-    value =_CONFIG.get_config_value('CROS', _IMAGE_STORAGE_SERVER)
-    if value is not None:
-        cloud_storage_info[_IMAGE_STORAGE_SERVER] = value
-    value = _CONFIG.get_config_value('CROS', _RESULT_STORAGE_SERVER,
-            default=None)
-    if value is not None:
-        cloud_storage_info[_RESULT_STORAGE_SERVER] = value
-
-    boto_config = _get_boto_config()
-    sections = boto_config.sections()
-
-    if sections:
-        cloud_storage_info[_USE_EXISTING_BOTO_FILE] = True
-    else:
-        cloud_storage_info[_USE_EXISTING_BOTO_FILE] = False
-    if 'Credentials' in sections:
-        options = boto_config.options('Credentials')
-        if _GS_ACCESS_KEY_ID in options:
-            value = boto_config.get('Credentials', _GS_ACCESS_KEY_ID)
-            cloud_storage_info[_GS_ACCESS_KEY_ID] = value
-        if _GS_SECRET_ACCESS_KEY in options:
-            value = boto_config.get('Credentials', _GS_SECRET_ACCESS_KEY)
-            cloud_storage_info[_GS_SECRET_ACCESS_KEY] = value
-
-    return rpc_utils.prepare_for_serialization(cloud_storage_info)
-
-
-def _get_bucket_name_from_url(bucket_url):
-    """Gets the bucket name from a bucket url.
-
-    @param: bucket_url: the bucket url string.
-    """
-    if bucket_url:
-        match = GOOGLE_STORAGE_BUCKET_URL_PATTERN.match(bucket_url)
-        if match:
-            return match.group('bucket')
-    return None
-
-
-def _is_valid_boto_key(key_id, key_secret, directory):
-  try:
-      _run_bucket_performance_test(key_id, key_secret, directory)
-  except BucketPerformanceTestException as e:
-       return(False, str(e))
-  return(True, None)
-
-
-def _validate_cloud_storage_info(cloud_storage_info):
-    """Checks if the cloud storage information is valid.
-
-    @param: cloud_storage_info: The JSON RPC object for cloud storage info.
-
-    @return: A tuple as (valid_boolean, details_string).
-    """
-    valid = True
-    details = None
-    if not cloud_storage_info[_USE_EXISTING_BOTO_FILE]:
-        key_id = cloud_storage_info[_GS_ACCESS_KEY_ID]
-        key_secret = cloud_storage_info[_GS_SECRET_ACCESS_KEY]
-        valid, details = _is_valid_boto_key(
-            key_id, key_secret, cloud_storage_info[_IMAGE_STORAGE_SERVER])
-    return (valid, details)
-
-
-def _create_operation_status_response(is_ok, details):
-    """Helper method to create a operation status reponse.
-
-    @param: is_ok: Boolean for if the operation is ok.
-    @param: details: A detailed string.
-
-    @return: A serialized JSON RPC object.
-    """
-    status_response = {'status_ok': is_ok}
-    if details:
-        status_response['status_details'] = details
-    return rpc_utils.prepare_for_serialization(status_response)
-
-
-@rpc_utils.moblab_only
-def validate_cloud_storage_info(cloud_storage_info):
-    """RPC handler to check if the cloud storage info is valid.
-
-    @param cloud_storage_info: The JSON RPC object for cloud storage info.
-    """
-    valid, details = _validate_cloud_storage_info(cloud_storage_info)
-    return _create_operation_status_response(valid, details)
-
-
-@rpc_utils.moblab_only
-def submit_wizard_config_info(cloud_storage_info, wifi_info):
-    """RPC handler to submit the cloud storage info.
-
-    @param cloud_storage_info: The JSON RPC object for cloud storage info.
-    @param wifi_info: The JSON RPC object for DUT wifi info.
-    """
-    config_update = {}
-    config_update['CROS'] = [
-        (_IMAGE_STORAGE_SERVER, cloud_storage_info[_IMAGE_STORAGE_SERVER]),
-        (_RESULT_STORAGE_SERVER, cloud_storage_info[_RESULT_STORAGE_SERVER])
-    ]
-    config_update['MOBLAB'] = [
-        (_WIFI_AP_NAME, wifi_info.get(_WIFI_AP_NAME) or ''),
-        (_WIFI_AP_PASS, wifi_info.get(_WIFI_AP_PASS) or '')
-    ]
-    _update_partial_config(config_update)
-
-    if not cloud_storage_info[_USE_EXISTING_BOTO_FILE]:
-        boto_config = ConfigParser.RawConfigParser()
-        boto_config.add_section('Credentials')
-        boto_config.set('Credentials', _GS_ACCESS_KEY_ID,
-                        cloud_storage_info[_GS_ACCESS_KEY_ID])
-        boto_config.set('Credentials', _GS_SECRET_ACCESS_KEY,
-                        cloud_storage_info[_GS_SECRET_ACCESS_KEY])
-        _write_config_file(MOBLAB_BOTO_LOCATION, boto_config, True)
-
-    _CONFIG.parse_config_file()
-    _enable_notification_using_credentials_in_bucket()
-    services = ['moblab-devserver-init',
-    'moblab-devserver-cleanup-init', 'moblab-gsoffloader_s-init',
-    'moblab-scheduler-init', 'moblab-gsoffloader-init']
-    cmd = 'export ATEST_RESULTS_DIR=/usr/local/autotest/results;'
-    cmd += 'sudo stop ' + ';sudo stop '.join(services)
-    cmd += ';sudo start ' + ';sudo start '.join(services)
-    cmd += ';sudo apache2 -k graceful'
-    logging.info(cmd)
-    try:
-        utils.run(cmd)
-    except error.CmdError as e:
-        logging.error(e)
-        # if all else fails reboot the device.
-        utils.run('sudo reboot')
-
-    return _create_operation_status_response(True, None)
-
-
-@rpc_utils.moblab_only
-def get_version_info():
-    """ RPC handler to get informaiton about the version of the moblab.
-
-    @return: A serialized JSON RPC object.
-    """
-    lines = open(_ETC_LSB_RELEASE).readlines()
-    version_response = {
-        x.split('=')[0]: x.split('=')[1] for x in lines if '=' in x}
-    version_response['MOBLAB_ID'] = utils.get_moblab_id();
-    version_response['MOBLAB_SERIAL_NUMBER'] = (
-        utils.get_moblab_serial_number())
-    _check_for_system_update()
-    update_status = _get_system_update_status()
-    version_response['MOBLAB_UPDATE_VERSION'] = update_status['NEW_VERSION']
-    version_response['MOBLAB_UPDATE_STATUS'] = update_status['CURRENT_OP']
-    version_response['MOBLAB_UPDATE_PROGRESS'] = update_status['PROGRESS']
-    return rpc_utils.prepare_for_serialization(version_response)
-
-
-@rpc_utils.moblab_only
-def update_moblab():
-    """ RPC call to update and reboot moblab """
-    _install_system_update()
-
-
-def _check_for_system_update():
-    """ Run the ChromeOS update client to check update server for an
-    update. If an update exists, the update client begins downloading it
-    in the background
-    """
-    # sudo is required to run the update client
-    subprocess.call(['sudo', _UPDATE_ENGINE_CLIENT, '--check_for_update'])
-    # wait for update engine to finish checking
-    tries = 0
-    while ('CHECKING_FOR_UPDATE' in _get_system_update_status()['CURRENT_OP']
-            and tries < 10):
-        time.sleep(.1)
-        tries = tries + 1
-
-def _get_system_update_status():
-    """ Run the ChromeOS update client to check status on a
-    pending/downloading update
-
-    @return: A dictionary containing {
-        PROGRESS: str containing percent progress of an update download
-        CURRENT_OP: str current status of the update engine,
-            ex UPDATE_STATUS_UPDATED_NEED_REBOOT
-        NEW_SIZE: str size of the update
-        NEW_VERSION: str version number for the update
-        LAST_CHECKED_TIME: str unix time stamp of the last update check
-    }
-    """
-    # sudo is required to run the update client
-    cmd_out = subprocess.check_output(
-        ['sudo' ,_UPDATE_ENGINE_CLIENT, '--status'])
-    split_lines = [x.split('=') for x in cmd_out.strip().split('\n')]
-    status = dict((key, val) for [key, val] in split_lines)
-    return status
-
-
-def _install_system_update():
-    """ Installs a ChromeOS update, will cause the system to reboot
-    """
-    # sudo is required to run the update client
-    # first run a blocking command to check, fetch, prepare an update
-    # then check if a reboot is needed
-    try:
-        subprocess.check_call(['sudo', _UPDATE_ENGINE_CLIENT, '--update'])
-        # --is_reboot_needed returns 0 if a reboot is required
-        subprocess.check_call(
-            ['sudo', _UPDATE_ENGINE_CLIENT, '--is_reboot_needed'])
-        subprocess.call(['sudo', _UPDATE_ENGINE_CLIENT, '--reboot'])
-
-    except subprocess.CalledProcessError as e:
-        update_error = subprocess.check_output(
-            ['sudo', _UPDATE_ENGINE_CLIENT, '--last_attempt_error'])
-        raise error.RPCException(update_error)
-
-
-@rpc_utils.moblab_only
-def get_connected_dut_info():
-    """ RPC handler to get informaiton about the DUTs connected to the moblab.
-
-    @return: A serialized JSON RPC object.
-    """
-    # Make a list of the connected DUT's
-    leases = _get_dhcp_dut_leases()
-
-
-    connected_duts = _test_all_dut_connections(leases)
-
-    # Get a list of the AFE configured DUT's
-    hosts = list(rpc_utils.get_host_query((), False, True, {}))
-    models.Host.objects.populate_relationships(hosts, models.Label,
-                                               'label_list')
-    configured_duts = {}
-    for host in hosts:
-        labels = [label.name for label in host.label_list]
-        labels.sort()
-        for host_attribute in host.hostattribute_set.all():
-              labels.append("ATTR:(%s=%s)" % (host_attribute.attribute,
-                                              host_attribute.value))
-        configured_duts[host.hostname] = ', '.join(labels)
-
-    return rpc_utils.prepare_for_serialization(
-            {'configured_duts': configured_duts,
-             'connected_duts': connected_duts})
-
-
-def _get_dhcp_dut_leases():
-     """ Extract information about connected duts from the dhcp server.
-
-     @return: A dict of ipaddress to mac address for each device connected.
-     """
-     lease_info = open(_DHCPD_LEASES).read()
-
-     leases = {}
-     for lease in lease_info.split('lease'):
-         if lease.find('binding state active;') != -1:
-             ipaddress = lease.split('\n')[0].strip(' {')
-             last_octet = int(ipaddress.split('.')[-1].strip())
-             if last_octet > 150:
-                 continue
-             mac_address_search = re.search('hardware ethernet (.*);', lease)
-             if mac_address_search:
-                 leases[ipaddress] = mac_address_search.group(1)
-     return leases
-
-def _test_all_dut_connections(leases):
-    """ Test ssh connection of all connected DUTs in parallel
-
-    @param leases: dict containing key value pairs of ip and mac address
-
-    @return: dict containing {
-        ip: {mac_address:[string], ssh_connection_ok:[boolean]}
-    }
-    """
-    # target function for parallel process
-    def _test_dut(ip, result):
-        result.value = _test_dut_ssh_connection(ip)
-
-    processes = []
-    for ip in leases:
-        # use a shared variable to get the ssh test result from child process
-        ssh_test_result = multiprocessing.Value(ctypes.c_bool)
-        # create a subprocess to test each DUT
-        process = multiprocessing.Process(
-            target=_test_dut, args=(ip, ssh_test_result))
-        process.start()
-
-        processes.append({
-            'ip': ip,
-            'ssh_test_result': ssh_test_result,
-            'process': process
-        })
-
-    connected_duts = {}
-    for process in processes:
-        process['process'].join()
-        ip = process['ip']
-        connected_duts[ip] = {
-            'mac_address': leases[ip],
-            'ssh_connection_ok': process['ssh_test_result'].value
-        }
-
-    return connected_duts
-
-
-def _test_dut_ssh_connection(ip):
-    """ Test if a connected dut is accessible via ssh.
-    The primary use case is to verify that the dut has a test image.
-
-    @return: True if the ssh connection is good False else
-    """
-    cmd = ('ssh -o ConnectTimeout=3 -o StrictHostKeyChecking=no '
-            "root@%s 'timeout 2 cat /etc/lsb-release'") % ip
-    try:
-        release = subprocess.check_output(cmd, shell=True)
-        return 'CHROMEOS_RELEASE_APPID' in release
-    except:
-        return False
-
-
-@rpc_utils.moblab_only
-def add_moblab_dut(ipaddress):
-    """ RPC handler to add a connected DUT to autotest.
-
-    @param ipaddress: IP address of the DUT.
-
-    @return: A string giving information about the status.
-    """
-    cmd = '/usr/local/autotest/cli/atest host create %s &' % ipaddress
-    subprocess.call(cmd, shell=True)
-    return (True, 'DUT %s added to Autotest' % ipaddress)
-
-
-@rpc_utils.moblab_only
-def remove_moblab_dut(ipaddress):
-    """ RPC handler to remove DUT entry from autotest.
-
-    @param ipaddress: IP address of the DUT.
-
-    @return: True if the command succeeds without an exception
-    """
-    models.Host.smart_get(ipaddress).delete()
-    return (True, 'DUT %s deleted from Autotest' % ipaddress)
-
-
-@rpc_utils.moblab_only
-def add_moblab_label(ipaddress, label_name):
-    """ RPC handler to add a label in autotest to a DUT entry.
-
-    @param ipaddress: IP address of the DUT.
-    @param label_name: The label name.
-
-    @return: A string giving information about the status.
-    """
-    # Try to create the label in case it does not already exist.
-    label = None
-    try:
-        label = models.Label.add_object(name=label_name)
-    except:
-        label = models.Label.smart_get(label_name)
-        if label.is_replaced_by_static():
-            raise error.UnmodifiableLabelException(
-                    'Failed to add label "%s" because it is a static label. '
-                    'Use go/chromeos-skylab-inventory-tools to add this '
-                    'label.' % label.name)
-
-    host_obj = models.Host.smart_get(ipaddress)
-    if label:
-        label.host_set.add(host_obj)
-        return (True, 'Added label %s to DUT %s' % (label_name, ipaddress))
-    return (False,
-            'Failed to add label %s to DUT %s' % (label_name, ipaddress))
-
-
-@rpc_utils.moblab_only
-def remove_moblab_label(ipaddress, label_name):
-    """ RPC handler to remove a label in autotest from a DUT entry.
-
-    @param ipaddress: IP address of the DUT.
-    @param label_name: The label name.
-
-    @return: A string giving information about the status.
-    """
-    host_obj = models.Host.smart_get(ipaddress)
-    label = models.Label.smart_get(label_name)
-    if label.is_replaced_by_static():
-        raise error.UnmodifiableLabelException(
-                    'Failed to remove label "%s" because it is a static label. '
-                    'Use go/chromeos-skylab-inventory-tools to remove this '
-                    'label.' % label.name)
-
-    label.host_set.remove(host_obj)
-    return (True, 'Removed label %s from DUT %s' % (label_name, ipaddress))
-
-
-@rpc_utils.moblab_only
-def set_host_attrib(ipaddress, attribute, value):
-    """ RPC handler to set an attribute of a host.
-
-    @param ipaddress: IP address of the DUT.
-    @param attribute: string name of attribute
-    @param value: string, or None to delete an attribute
-
-    @return: True if the command succeeds without an exception
-    """
-    host_obj = models.Host.smart_get(ipaddress)
-    host_obj.set_or_delete_attribute(attribute, value)
-    return (True, 'Updated attribute %s to %s on DUT %s' % (
-        attribute, value, ipaddress))
-
-
-@rpc_utils.moblab_only
-def delete_host_attrib(ipaddress, attribute):
-    """ RPC handler to delete an attribute of a host.
-
-    @param ipaddress: IP address of the DUT.
-    @param attribute: string name of attribute
-
-    @return: True if the command succeeds without an exception
-    """
-    host_obj = models.Host.smart_get(ipaddress)
-    host_obj.set_or_delete_attribute(attribute, None)
-    return (True, 'Deleted attribute %s from DUT %s' % (
-        attribute, ipaddress))
-
-
-def _get_connected_dut_labels(requested_label, only_first_label=True):
-    """ Query the DUT's attached to the moblab and return a filtered list
-        of labels.
-
-    @param requested_label:  the label name you are requesting.
-    @param only_first_label:  if the device has the same label name multiple
-                              times only return the first label value in the
-                              list.
-
-    @return: A de-duped list of requested dut labels attached to the moblab.
-    """
-    hosts = list(rpc_utils.get_host_query((), False, True, {}))
-    if not hosts:
-        return []
-    models.Host.objects.populate_relationships(hosts, models.Label,
-                                               'label_list')
-    labels = set()
-    for host in hosts:
-        for label in host.label_list:
-            if requested_label in label.name:
-                labels.add(label.name.replace(requested_label, ''))
-                if only_first_label:
-                    break
-    return list(labels)
-
-def _get_connected_dut_board_models():
-    """ Get the boards and their models of attached DUTs
-
-    @return: A de-duped list of dut board/model attached to the moblab
-    format: [
-        {
-            "board": "carl",
-            "model": "bruce"
-        },
-        {
-            "board": "veyron_minnie",
-            "model": "veyron_minnie"
-        }
-    ]
-    """
-    hosts = list(rpc_utils.get_host_query((), False, True, {}))
-    if not hosts:
-        return []
-    models.Host.objects.populate_relationships(hosts, models.Label,
-                                               'label_list')
-    model_board_map = dict()
-    for host in hosts:
-        model = ''
-        board = ''
-        for label in host.label_list:
-            if 'model:' in label.name:
-                model = label.name.replace('model:', '')
-            elif 'board:' in label.name:
-                board = label.name.replace('board:', '')
-        model_board_map[model] = board
-
-    board_models_list = []
-    for model in sorted(model_board_map.keys()):
-        board_models_list.append({
-            'model': model,
-            'board': model_board_map[model]
-        })
-    return board_models_list
-
-
-@rpc_utils.moblab_only
-def get_connected_boards():
-    """ RPC handler to get a list of the boards connected to the moblab.
-
-    @return: A de-duped list of board types attached to the moblab.
-    """
-    return _get_connected_dut_board_models()
-
-
-@rpc_utils.moblab_only
-def get_connected_pools():
-    """ RPC handler to get a list of the pools labels on the DUT's connected.
-
-    @return: A de-duped list of pool labels.
-    """
-    pools = _get_connected_dut_labels("pool:", False)
-    pools.sort()
-    return pools
-
-
-@rpc_utils.moblab_only
-def get_builds_for_board(board_name):
-    """ RPC handler to find the most recent builds for a board.
-
-
-    @param board_name: The name of a connected board.
-    @return: A list of string with the most recent builds for the latest
-             three milestones.
-    """
-    return _get_builds_for_in_directory(board_name + '-release',
-                                        milestone_limit=4)
-
-
-@rpc_utils.moblab_only
-def get_firmware_for_board(board_name):
-    """ RPC handler to find the most recent firmware for a board.
-
-
-    @param board_name: The name of a connected board.
-    @return: A list of strings with the most recent firmware builds for the
-             latest three milestones.
-    """
-    return _get_builds_for_in_directory(board_name + '-firmware')
-
-
-def _get_sortable_build_number(sort_key):
-    """ Converts a build number line cyan-release/R59-9460.27.0 into an integer.
-
-        To be able to sort a list of builds you need to convert the build number
-        into an integer so it can be compared correctly to other build.
-
-        cyan-release/R59-9460.27.0 =>  5909460027000
-
-        If the sort key is not recognised as a build number 1 will be returned.
-
-    @param sort_key: A string that represents a build number like
-                     cyan-release/R59-9460.27.0
-    @return: An integer that represents that build number or 1 if not recognised
-             as a build.
-    """
-    build_number = re.search('.*/R([0-9]*)-([0-9]*)\.([0-9]*)\.([0-9]*)',
-                             sort_key)
-    if not build_number or not len(build_number.groups()) == 4:
-      return 1
-    return int("%d%05d%03d%03d" % (int(build_number.group(1)),
-                                   int(build_number.group(2)),
-                                   int(build_number.group(3)),
-                                   int(build_number.group(4))))
-
-def _get_builds_for_in_directory(directory_name, milestone_limit=3,
-                                 build_limit=20):
-    """ Fetch the most recent builds for the last three milestones from gcs.
-
-
-    @param directory_name: The sub-directory under the configured GCS image
-                           storage bucket to search.
-
-
-    @return: A string list no longer than <milestone_limit> x <build_limit>
-             items, containing the most recent <build_limit> builds from the
-             last milestone_limit milestones.
-    """
-    output = StringIO.StringIO()
-    gs_image_location =_CONFIG.get_config_value('CROS', _IMAGE_STORAGE_SERVER)
-    try:
-        utils.run(GsUtil.get_gsutil_cmd(),
-                  args=('ls', gs_image_location + directory_name),
-                  stdout_tee=output)
-    except error.CmdError as e:
-        error_text = ('Failed to list builds from %s.\n'
-                'Did you configure your boto key? Try running the config '
-                'wizard again.\n\n%s') % ((gs_image_location + directory_name),
-                    e.result_obj.stderr)
-        raise error.RPCException(error_text)
-    lines = output.getvalue().split('\n')
-    output.close()
-    builds = [line.replace(gs_image_location,'').strip('/ ')
-              for line in lines if line != '']
-    build_matcher = re.compile(r'^.*\/R([0-9]*)-.*')
-    build_map = {}
-    for build in builds:
-        match = build_matcher.match(build)
-        if match:
-            milestone = match.group(1)
-            if milestone not in build_map:
-                build_map[milestone] = []
-            build_map[milestone].append(build)
-    milestones = build_map.keys()
-    milestones.sort()
-    milestones.reverse()
-    build_list = []
-    for milestone in milestones[:milestone_limit]:
-         builds = build_map[milestone]
-         builds.sort(key=_get_sortable_build_number)
-         builds.reverse()
-         build_list.extend(builds[:build_limit])
-    return build_list
-
-
-def _run_bucket_performance_test(key_id, key_secret, bucket_name,
-                                 test_size='1M', iterations='1',
-                                 result_file='/tmp/gsutil_perf.json'):
-    """Run a gsutil perfdiag on a supplied bucket and output the results"
-
-       @param key_id: boto key of the bucket to be accessed
-       @param key_secret: boto secret of the bucket to be accessed
-       @param bucket_name: bucket to be tested.
-       @param test_size: size of file to use in test, see gsutil perfdiag help.
-       @param iterations: number of times each test is run.
-       @param result_file: name of file to write results out to.
-
-       @return None
-       @raises BucketPerformanceTestException if the command fails.
-    """
-    try:
-      utils.run(GsUtil.get_gsutil_cmd(), args=(
-          '-o', 'Credentials:gs_access_key_id=%s' % key_id,
-          '-o', 'Credentials:gs_secret_access_key=%s' % key_secret,
-          'perfdiag', '-s', test_size, '-o', result_file,
-          '-n', iterations,
-          bucket_name))
-    except error.CmdError as e:
-       logging.error(e)
-       # Extract useful error from the stacktrace
-       errormsg = str(e)
-       start_error_pos = errormsg.find("<Error>")
-       end_error_pos = errormsg.find("</Error>", start_error_pos)
-       extracted_error_msg = errormsg[start_error_pos:end_error_pos]
-       raise BucketPerformanceTestException(
-           extracted_error_msg if extracted_error_msg else errormsg)
-    # TODO(haddowk) send the results to the cloud console when that feature is
-    # enabled.
-
-
-# TODO(haddowk) Change suite_args name to "test_filter_list" or similar. May
-# also need to make changes at MoblabRpcHelper.java
-@rpc_utils.moblab_only
-def run_suite(board, build, suite, model=None, ro_firmware=None,
-              rw_firmware=None, pool=None, suite_args=None, test_args=None,
-              bug_id=None, part_id=None):
-    """ RPC handler to run a test suite.
-
-    @param board: a board name connected to the moblab.
-    @param build: a build name of a build in the GCS.
-    @param suite: the name of a suite to run
-    @param model: a board model name connected to the moblab.
-    @param ro_firmware: Optional ro firmware build number to use.
-    @param rw_firmware: Optional rw firmware build number to use.
-    @param pool: Optional pool name to run the suite in.
-    @param suite_args: Arguments to be used in the suite control file.
-    @param test_args: '\n' delimited key=val pairs passed to test control file.
-    @param bug_id: Optional bug ID used for AVL qualification process.
-    @param part_id: Optional part ID used for AVL qualification
-    process.
-
-    @return: None
-    """
-    builds = {'cros-version': build}
-    # TODO(mattmallett b/92031054) Standardize bug id, part id passing for memory/storage qual
-    processed_suite_args = dict()
-    processed_test_args = dict()
-    if rw_firmware:
-        builds['fwrw-version'] = rw_firmware
-    if ro_firmware:
-        builds['fwro-version'] = ro_firmware
-    if suite_args:
-        processed_suite_args['tests'] = \
-            [s.strip() for s in suite_args.split(',')]
-    if bug_id:
-        processed_suite_args['bug_id'] = bug_id
-    if part_id:
-        processed_suite_args['part_id'] = part_id
-    processed_test_args['bug_id'] = bug_id or ''
-    processed_test_args['part_id'] = part_id or ''
-
-
-    # set processed_suite_args to None instead of empty dict when there is no
-    # argument in processed_suite_args
-    if len(processed_suite_args) == 0:
-        processed_suite_args = None
-
-    if test_args:
-        try:
-          processed_test_args['args'] = [test_args]
-          for line in test_args.split('\n'):
-              key, value = line.strip().split('=')
-              processed_test_args[key] = value
-        except:
-            raise error.RPCException('Could not parse test args.')
-
-
-    ap_name =_CONFIG.get_config_value('MOBLAB', _WIFI_AP_NAME, default=None)
-    processed_test_args['ssid'] = ap_name
-    ap_pass =_CONFIG.get_config_value('MOBLAB', _WIFI_AP_PASS, default='')
-    processed_test_args['wifipass'] = ap_pass
-
-    suite_timeout_mins = _SUITE_TIMEOUT_MAP.get(
-            suite, _DEFAULT_SUITE_TIMEOUT_MINS)
-
-    afe = frontend.AFE(user='moblab')
-    afe.run('create_suite_job', board=board, builds=builds, name=suite,
-            pool=pool, run_prod_code=False, test_source_build=build,
-            wait_for_results=True, suite_args=processed_suite_args,
-            test_args=processed_test_args, job_retry=True,
-            max_retries=sys.maxint, model=model,
-            timeout_mins=suite_timeout_mins,
-            max_runtime_mins=suite_timeout_mins)
-
-
-def _enable_notification_using_credentials_in_bucket():
-    """ Check and enable cloud notification if a credentials file exits.
-    @return: None
-    """
-    gs_image_location =_CONFIG.get_config_value('CROS', _IMAGE_STORAGE_SERVER)
-    try:
-        utils.run(GsUtil.get_gsutil_cmd(), args=(
-            'cp', gs_image_location + 'pubsub-key-do-not-delete.json', '/tmp'))
-        # This runs the copy as moblab user
-        shutil.copyfile('/tmp/pubsub-key-do-not-delete.json',
-                        moblab_host.MOBLAB_SERVICE_ACCOUNT_LOCATION)
-
-    except error.CmdError as e:
-        logging.error(e)
-    else:
-        logging.info('Enabling cloud notifications')
-        config_update = {}
-        config_update['CROS'] = [(_CLOUD_NOTIFICATION_ENABLED, True)]
-        _update_partial_config(config_update)
-
-
-@rpc_utils.moblab_only
-def get_dut_wifi_info():
-    """RPC handler to get the dut wifi AP information.
-    """
-    dut_wifi_info = {}
-    value =_CONFIG.get_config_value('MOBLAB', _WIFI_AP_NAME,
-        default=None)
-    if value is not None:
-        dut_wifi_info[_WIFI_AP_NAME] = value
-    value = _CONFIG.get_config_value('MOBLAB', _WIFI_AP_PASS,
-        default=None)
-    if value is not None:
-        dut_wifi_info[_WIFI_AP_PASS] = value
-    return rpc_utils.prepare_for_serialization(dut_wifi_info)
diff --git a/frontend/afe/moblab_rpc_interface_unittest.py b/frontend/afe/moblab_rpc_interface_unittest.py
deleted file mode 100644
index ba6d22c..0000000
--- a/frontend/afe/moblab_rpc_interface_unittest.py
+++ /dev/null
@@ -1,729 +0,0 @@
-#!/usr/bin/python2
-#
-# Copyright (c) 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Unit tests for frontend/afe/moblab_rpc_interface.py."""
-
-import __builtin__
-# The boto module is only available/used in Moblab for validation of cloud
-# storage access. The module is not available in the test lab environment,
-# and the import error is handled.
-import ConfigParser
-import mox
-import StringIO
-import unittest
-
-import common
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import global_config
-from autotest_lib.client.common_lib import lsbrelease_utils
-from autotest_lib.frontend import setup_django_environment
-from autotest_lib.frontend.afe import frontend_test_utils
-from autotest_lib.frontend.afe import moblab_rpc_interface
-from autotest_lib.frontend.afe import rpc_utils
-from autotest_lib.server import utils
-from autotest_lib.server.hosts import moblab_host
-from autotest_lib.client.common_lib import utils as common_lib_utils
-
-
-class MoblabRpcInterfaceTest(mox.MoxTestBase,
-                             frontend_test_utils.FrontendTestMixin):
-    """Unit tests for functions in moblab_rpc_interface.py."""
-
-    def setUp(self):
-        super(MoblabRpcInterfaceTest, self).setUp()
-        self._frontend_common_setup(fill_data=False)
-
-
-    def tearDown(self):
-        self._frontend_common_teardown()
-
-
-    def setIsMoblab(self, is_moblab):
-        """Set utils.is_moblab result.
-
-        @param is_moblab: Value to have utils.is_moblab to return.
-        """
-        self.mox.StubOutWithMock(utils, 'is_moblab')
-        utils.is_moblab().AndReturn(is_moblab)
-
-
-    def _mockReadFile(self, path, lines=[]):
-        """Mock out reading a file line by line.
-
-        @param path: Path of the file we are mock reading.
-        @param lines: lines of the mock file that will be returned when
-                      readLine() is called.
-        """
-        mockFile = self.mox.CreateMockAnything()
-        for line in lines:
-            mockFile.readline().AndReturn(line)
-        mockFile.readline()
-        mockFile.close()
-        open(path).AndReturn(mockFile)
-
-
-    def testMoblabOnlyDecorator(self):
-        """Ensure the moblab only decorator gates functions properly."""
-        self.setIsMoblab(False)
-        self.mox.ReplayAll()
-        self.assertRaises(error.RPCException,
-                          moblab_rpc_interface.get_config_values)
-
-
-    def testGetConfigValues(self):
-        """Ensure that the config object is properly converted to a dict."""
-        self.setIsMoblab(True)
-        config_mock = self.mox.CreateMockAnything()
-        moblab_rpc_interface._CONFIG = config_mock
-        config_mock.get_sections().AndReturn(['section1', 'section2'])
-        config_mock.config = self.mox.CreateMockAnything()
-        config_mock.config.items('section1').AndReturn([('item1', 'value1'),
-                                                        ('item2', 'value2')])
-        config_mock.config.items('section2').AndReturn([('item3', 'value3'),
-                                                        ('item4', 'value4')])
-
-        rpc_utils.prepare_for_serialization(
-            {'section1' : [('item1', 'value1'),
-                           ('item2', 'value2')],
-             'section2' : [('item3', 'value3'),
-                           ('item4', 'value4')]})
-        self.mox.ReplayAll()
-        moblab_rpc_interface.get_config_values()
-
-
-    def testUpdateConfig(self):
-        """Ensure that updating the config works as expected."""
-        self.setIsMoblab(True)
-        moblab_rpc_interface.os = self.mox.CreateMockAnything()
-
-        self.mox.StubOutWithMock(__builtin__, 'open')
-        self._mockReadFile(global_config.DEFAULT_CONFIG_FILE)
-
-        self.mox.StubOutWithMock(lsbrelease_utils, 'is_moblab')
-        lsbrelease_utils.is_moblab().AndReturn(True)
-
-        self._mockReadFile(global_config.DEFAULT_MOBLAB_FILE,
-                           ['[section1]', 'item1: value1'])
-
-        moblab_rpc_interface.os = self.mox.CreateMockAnything()
-        moblab_rpc_interface.os.path = self.mox.CreateMockAnything()
-        moblab_rpc_interface.os.path.exists(
-                moblab_rpc_interface._CONFIG.shadow_file).AndReturn(
-                True)
-        mockShadowFile = self.mox.CreateMockAnything()
-        mockShadowFileContents = StringIO.StringIO()
-        mockShadowFile.__enter__().AndReturn(mockShadowFileContents)
-        mockShadowFile.__exit__(mox.IgnoreArg(), mox.IgnoreArg(),
-                                mox.IgnoreArg())
-        open(moblab_rpc_interface._CONFIG.shadow_file,
-             'w').AndReturn(mockShadowFile)
-        moblab_rpc_interface.os.system('sudo reboot')
-
-        self.mox.ReplayAll()
-        moblab_rpc_interface.update_config_handler(
-                {'section1' : [('item1', 'value1'),
-                               ('item2', 'value2')],
-                 'section2' : [('item3', 'value3'),
-                               ('item4', 'value4')]})
-
-        # item1 should not be in the new shadow config as its updated value
-        # matches the original config's value.
-        self.assertEquals(
-                mockShadowFileContents.getvalue(),
-                '[section2]\nitem3 = value3\nitem4 = value4\n\n'
-                '[section1]\nitem2 = value2\n\n')
-
-
-    def testResetConfig(self):
-        """Ensure that reset opens the shadow_config file for writing."""
-        self.setIsMoblab(True)
-        config_mock = self.mox.CreateMockAnything()
-        moblab_rpc_interface._CONFIG = config_mock
-        config_mock.shadow_file = 'shadow_config.ini'
-        self.mox.StubOutWithMock(__builtin__, 'open')
-        mockFile = self.mox.CreateMockAnything()
-        file_contents = self.mox.CreateMockAnything()
-        mockFile.__enter__().AndReturn(file_contents)
-        mockFile.__exit__(mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg())
-        open(config_mock.shadow_file, 'w').AndReturn(mockFile)
-        moblab_rpc_interface.os = self.mox.CreateMockAnything()
-        moblab_rpc_interface.os.system('sudo reboot')
-        self.mox.ReplayAll()
-        moblab_rpc_interface.reset_config_settings()
-
-
-    def testSetLaunchControlKey(self):
-        """Ensure that the Launch Control key path supplied is copied correctly.
-        """
-        self.setIsMoblab(True)
-        launch_control_key = '/tmp/launch_control'
-        moblab_rpc_interface.os = self.mox.CreateMockAnything()
-        moblab_rpc_interface.os.path = self.mox.CreateMockAnything()
-        moblab_rpc_interface.os.path.exists(launch_control_key).AndReturn(
-                True)
-        moblab_rpc_interface.shutil = self.mox.CreateMockAnything()
-        moblab_rpc_interface.shutil.copyfile(
-                launch_control_key,
-                moblab_host.MOBLAB_LAUNCH_CONTROL_KEY_LOCATION)
-        moblab_rpc_interface.os.system('sudo restart moblab-devserver-init')
-        self.mox.ReplayAll()
-        moblab_rpc_interface.set_launch_control_key(launch_control_key)
-
-
-    def testGetNetworkInfo(self):
-        """Ensure the network info is properly converted to a dict."""
-        self.setIsMoblab(True)
-
-        self.mox.StubOutWithMock(moblab_rpc_interface, '_get_network_info')
-        moblab_rpc_interface._get_network_info().AndReturn(('10.0.0.1', True))
-        self.mox.StubOutWithMock(rpc_utils, 'prepare_for_serialization')
-
-        rpc_utils.prepare_for_serialization(
-               {'is_connected': True, 'server_ips': ['10.0.0.1']})
-        self.mox.ReplayAll()
-        moblab_rpc_interface.get_network_info()
-        self.mox.VerifyAll()
-
-
-    def testGetNetworkInfoWithNoIp(self):
-        """Queries network info with no public IP address."""
-        self.setIsMoblab(True)
-
-        self.mox.StubOutWithMock(moblab_rpc_interface, '_get_network_info')
-        moblab_rpc_interface._get_network_info().AndReturn((None, False))
-        self.mox.StubOutWithMock(rpc_utils, 'prepare_for_serialization')
-
-        rpc_utils.prepare_for_serialization(
-               {'is_connected': False})
-        self.mox.ReplayAll()
-        moblab_rpc_interface.get_network_info()
-        self.mox.VerifyAll()
-
-
-    def testGetNetworkInfoWithNoConnectivity(self):
-        """Queries network info with public IP address but no connectivity."""
-        self.setIsMoblab(True)
-
-        self.mox.StubOutWithMock(moblab_rpc_interface, '_get_network_info')
-        moblab_rpc_interface._get_network_info().AndReturn(('10.0.0.1', False))
-        self.mox.StubOutWithMock(rpc_utils, 'prepare_for_serialization')
-
-        rpc_utils.prepare_for_serialization(
-               {'is_connected': False, 'server_ips': ['10.0.0.1']})
-        self.mox.ReplayAll()
-        moblab_rpc_interface.get_network_info()
-        self.mox.VerifyAll()
-
-
-    def testGetCloudStorageInfo(self):
-        """Ensure the cloud storage info is properly converted to a dict."""
-        self.setIsMoblab(True)
-        config_mock = self.mox.CreateMockAnything()
-        moblab_rpc_interface._CONFIG = config_mock
-        config_mock.get_config_value(
-            'CROS', 'image_storage_server').AndReturn('gs://bucket1')
-        config_mock.get_config_value(
-            'CROS', 'results_storage_server', default=None).AndReturn(
-                    'gs://bucket2')
-        self.mox.StubOutWithMock(moblab_rpc_interface, '_get_boto_config')
-        moblab_rpc_interface._get_boto_config().AndReturn(config_mock)
-        config_mock.sections().AndReturn(['Credentials', 'b'])
-        config_mock.options('Credentials').AndReturn(
-            ['gs_access_key_id', 'gs_secret_access_key'])
-        config_mock.get(
-            'Credentials', 'gs_access_key_id').AndReturn('key')
-        config_mock.get(
-            'Credentials', 'gs_secret_access_key').AndReturn('secret')
-        rpc_utils.prepare_for_serialization(
-                {
-                    'gs_access_key_id': 'key',
-                    'gs_secret_access_key' : 'secret',
-                    'use_existing_boto_file': True,
-                    'image_storage_server' : 'gs://bucket1',
-                    'results_storage_server' : 'gs://bucket2'
-                })
-        self.mox.ReplayAll()
-        moblab_rpc_interface.get_cloud_storage_info()
-        self.mox.VerifyAll()
-
-
-    def testValidateCloudStorageInfo(self):
-        """ Ensure the cloud storage info validation flow."""
-        self.setIsMoblab(True)
-        cloud_storage_info = {
-            'use_existing_boto_file': False,
-            'gs_access_key_id': 'key',
-            'gs_secret_access_key': 'secret',
-            'image_storage_server': 'gs://bucket1',
-            'results_storage_server': 'gs://bucket2'}
-        self.mox.StubOutWithMock(moblab_rpc_interface,
-            '_run_bucket_performance_test')
-        moblab_rpc_interface._run_bucket_performance_test(
-            'key', 'secret', 'gs://bucket1').AndReturn((True, None))
-        rpc_utils.prepare_for_serialization({'status_ok': True })
-        self.mox.ReplayAll()
-        moblab_rpc_interface.validate_cloud_storage_info(cloud_storage_info)
-        self.mox.VerifyAll()
-
-
-    def testGetBucketNameFromUrl(self):
-        """Gets bucket name from bucket URL."""
-        self.assertEquals(
-            'bucket_name-123',
-            moblab_rpc_interface._get_bucket_name_from_url(
-                    'gs://bucket_name-123'))
-        self.assertEquals(
-            'bucket_name-123',
-            moblab_rpc_interface._get_bucket_name_from_url(
-                    'gs://bucket_name-123/'))
-        self.assertEquals(
-            'bucket_name-123',
-            moblab_rpc_interface._get_bucket_name_from_url(
-                    'gs://bucket_name-123/a/b/c'))
-        self.assertIsNone(moblab_rpc_interface._get_bucket_name_from_url(
-            'bucket_name-123/a/b/c'))
-
-
-    def testGetShadowConfigFromPartialUpdate(self):
-        """Tests getting shadow configuration based on partial upate."""
-        partial_config = {
-                'section1': [
-                    ('opt1', 'value1'),
-                    ('opt2', 'value2'),
-                    ('opt3', 'value3'),
-                    ('opt4', 'value4'),
-                    ]
-                }
-        shadow_config_str = "[section1]\nopt2 = value2_1\nopt4 = value4_1"
-        shadow_config = ConfigParser.ConfigParser()
-        shadow_config.readfp(StringIO.StringIO(shadow_config_str))
-        original_config = self.mox.CreateMockAnything()
-        self.mox.StubOutWithMock(moblab_rpc_interface, '_read_original_config')
-        self.mox.StubOutWithMock(moblab_rpc_interface, '_read_raw_config')
-        moblab_rpc_interface._read_original_config().AndReturn(original_config)
-        moblab_rpc_interface._read_raw_config(
-                moblab_rpc_interface._CONFIG.shadow_file).AndReturn(shadow_config)
-        original_config.get_config_value(
-                'section1', 'opt1',
-                allow_blank=True, default='').AndReturn('value1')
-        original_config.get_config_value(
-                'section1', 'opt2',
-                allow_blank=True, default='').AndReturn('value2')
-        original_config.get_config_value(
-                'section1', 'opt3',
-                allow_blank=True, default='').AndReturn('blah')
-        original_config.get_config_value(
-                'section1', 'opt4',
-                allow_blank=True, default='').AndReturn('blah')
-        self.mox.ReplayAll()
-        shadow_config = moblab_rpc_interface._get_shadow_config_from_partial_update(
-                partial_config)
-        # opt1 same as the original.
-        self.assertFalse(shadow_config.has_option('section1', 'opt1'))
-        # opt2 reverts back to original
-        self.assertFalse(shadow_config.has_option('section1', 'opt2'))
-        # opt3 is updated from original.
-        self.assertEquals('value3', shadow_config.get('section1', 'opt3'))
-        # opt3 in shadow but updated again.
-        self.assertEquals('value4', shadow_config.get('section1', 'opt4'))
-        self.mox.VerifyAll()
-
-
-    def testGetShadowConfigFromPartialUpdateWithNewSection(self):
-        """
-        Test getting shadown configuration based on partial update with new section.
-        """
-        partial_config = {
-                'section2': [
-                    ('opt5', 'value5'),
-                    ('opt6', 'value6'),
-                    ],
-                }
-        shadow_config_str = "[section1]\nopt2 = value2_1\n"
-        shadow_config = ConfigParser.ConfigParser()
-        shadow_config.readfp(StringIO.StringIO(shadow_config_str))
-        original_config = self.mox.CreateMockAnything()
-        self.mox.StubOutWithMock(moblab_rpc_interface, '_read_original_config')
-        self.mox.StubOutWithMock(moblab_rpc_interface, '_read_raw_config')
-        moblab_rpc_interface._read_original_config().AndReturn(original_config)
-        moblab_rpc_interface._read_raw_config(
-            moblab_rpc_interface._CONFIG.shadow_file).AndReturn(shadow_config)
-        original_config.get_config_value(
-                'section2', 'opt5',
-                allow_blank=True, default='').AndReturn('value5')
-        original_config.get_config_value(
-                'section2', 'opt6',
-                allow_blank=True, default='').AndReturn('blah')
-        self.mox.ReplayAll()
-        shadow_config = moblab_rpc_interface._get_shadow_config_from_partial_update(
-                partial_config)
-        # opt2 is still in shadow
-        self.assertEquals('value2_1', shadow_config.get('section1', 'opt2'))
-        # opt5 is not changed.
-        self.assertFalse(shadow_config.has_option('section2', 'opt5'))
-        # opt6 is updated.
-        self.assertEquals('value6', shadow_config.get('section2', 'opt6'))
-        self.mox.VerifyAll()
-
-    def testGetBuildsForInDirectory(self):
-        config_mock = self.mox.CreateMockAnything()
-        moblab_rpc_interface._CONFIG = config_mock
-        config_mock.get_config_value(
-            'CROS', 'image_storage_server').AndReturn('gs://bucket1/')
-        self.mox.StubOutWithMock(common_lib_utils, 'run')
-        output = self.mox.CreateMockAnything()
-        self.mox.StubOutWithMock(StringIO, 'StringIO', use_mock_anything=True)
-        StringIO.StringIO().AndReturn(output)
-        output.getvalue().AndReturn(
-        """gs://bucket1/dummy/R53-8480.0.0/\ngs://bucket1/dummy/R53-8530.72.0/\n
-        gs://bucket1/dummy/R54-8712.0.0/\ngs://bucket1/dummy/R54-8717.0.0/\n
-        gs://bucket1/dummy/R55-8759.0.0/\n
-        gs://bucket1/dummy/R55-8760.0.0-b5849/\n
-        gs://bucket1/dummy/R56-8995.0.0/\ngs://bucket1/dummy/R56-9001.0.0/\n
-        gs://bucket1/dummy/R57-9202.66.0/\ngs://bucket1/dummy/R58-9331.0.0/\n
-        gs://bucket1/dummy/R58-9334.15.0/\ngs://bucket1/dummy/R58-9334.17.0/\n
-        gs://bucket1/dummy/R58-9334.18.0/\ngs://bucket1/dummy/R58-9334.19.0/\n
-        gs://bucket1/dummy/R58-9334.22.0/\ngs://bucket1/dummy/R58-9334.28.0/\n
-        gs://bucket1/dummy/R58-9334.3.0/\ngs://bucket1/dummy/R58-9334.30.0/\n
-        gs://bucket1/dummy/R58-9334.36.0/\ngs://bucket1/dummy/R58-9334.55.0/\n
-        gs://bucket1/dummy/R58-9334.6.0/\ngs://bucket1/dummy/R58-9334.7.0/\n
-        gs://bucket1/dummy/R58-9334.9.0/\ngs://bucket1/dummy/R59-9346.0.0/\n
-        gs://bucket1/dummy/R59-9372.0.0/\ngs://bucket1/dummy/R59-9387.0.0/\n
-        gs://bucket1/dummy/R59-9436.0.0/\ngs://bucket1/dummy/R59-9452.0.0/\n
-        gs://bucket1/dummy/R59-9453.0.0/\ngs://bucket1/dummy/R59-9455.0.0/\n
-        gs://bucket1/dummy/R59-9460.0.0/\ngs://bucket1/dummy/R59-9460.11.0/\n
-        gs://bucket1/dummy/R59-9460.16.0/\ngs://bucket1/dummy/R59-9460.25.0/\n
-        gs://bucket1/dummy/R59-9460.8.0/\ngs://bucket1/dummy/R59-9460.9.0/\n
-        gs://bucket1/dummy/R60-9472.0.0/\ngs://bucket1/dummy/R60-9491.0.0/\n
-        gs://bucket1/dummy/R60-9492.0.0/\ngs://bucket1/dummy/R60-9497.0.0/\n
-        gs://bucket1/dummy/R60-9500.0.0/""")
-
-        output.close()
-
-        self.mox.StubOutWithMock(moblab_rpc_interface.GsUtil, 'get_gsutil_cmd')
-        moblab_rpc_interface.GsUtil.get_gsutil_cmd().AndReturn(
-            '/path/to/gsutil')
-
-        common_lib_utils.run('/path/to/gsutil',
-                             args=('ls', 'gs://bucket1/dummy'),
-                             stdout_tee=mox.IgnoreArg()).AndReturn(output)
-        self.mox.ReplayAll()
-        expected_results = ['dummy/R60-9500.0.0', 'dummy/R60-9497.0.0',
-            'dummy/R60-9492.0.0', 'dummy/R60-9491.0.0', 'dummy/R60-9472.0.0',
-            'dummy/R59-9460.25.0', 'dummy/R59-9460.16.0', 'dummy/R59-9460.11.0',
-            'dummy/R59-9460.9.0', 'dummy/R59-9460.8.0', 'dummy/R58-9334.55.0',
-            'dummy/R58-9334.36.0', 'dummy/R58-9334.30.0', 'dummy/R58-9334.28.0',
-            'dummy/R58-9334.22.0']
-        actual_results = moblab_rpc_interface._get_builds_for_in_directory(
-            "dummy",3, 5)
-        self.assertEquals(expected_results, actual_results)
-        self.mox.VerifyAll()
-
-    def testRunBucketPerformanceTestFail(self):
-        self.mox.StubOutWithMock(moblab_rpc_interface.GsUtil, 'get_gsutil_cmd')
-        moblab_rpc_interface.GsUtil.get_gsutil_cmd().AndReturn(
-            '/path/to/gsutil')
-        self.mox.StubOutWithMock(common_lib_utils, 'run')
-        common_lib_utils.run('/path/to/gsutil',
-                  args=(
-                  '-o', 'Credentials:gs_access_key_id=key',
-                  '-o', 'Credentials:gs_secret_access_key=secret',
-                  'perfdiag', '-s', '1K',
-                  '-o', 'testoutput',
-                  '-n', '10',
-                  'gs://bucket1')).AndRaise(
-            error.CmdError("fakecommand", common_lib_utils.CmdResult(),
-                           "xxxxxx<Error>yyyyyyyyyy</Error>"))
-
-        self.mox.ReplayAll()
-        self.assertRaisesRegexp(
-            moblab_rpc_interface.BucketPerformanceTestException,
-            '<Error>yyyyyyyyyy',
-            moblab_rpc_interface._run_bucket_performance_test,
-            'key', 'secret', 'gs://bucket1', '1K', '10', 'testoutput')
-        self.mox.VerifyAll()
-
-    def testEnableNotificationUsingCredentialsInBucketFail(self):
-        config_mock = self.mox.CreateMockAnything()
-        moblab_rpc_interface._CONFIG = config_mock
-        config_mock.get_config_value(
-            'CROS', 'image_storage_server').AndReturn('gs://bucket1/')
-
-        self.mox.StubOutWithMock(moblab_rpc_interface.GsUtil, 'get_gsutil_cmd')
-        moblab_rpc_interface.GsUtil.get_gsutil_cmd().AndReturn(
-            '/path/to/gsutil')
-
-        self.mox.StubOutWithMock(common_lib_utils, 'run')
-        common_lib_utils.run('/path/to/gsutil',
-            args=('cp', 'gs://bucket1/pubsub-key-do-not-delete.json',
-            '/tmp')).AndRaise(
-                error.CmdError("fakecommand", common_lib_utils.CmdResult(), ""))
-        self.mox.ReplayAll()
-        moblab_rpc_interface._enable_notification_using_credentials_in_bucket()
-
-    def testEnableNotificationUsingCredentialsInBucketSuccess(self):
-        config_mock = self.mox.CreateMockAnything()
-        moblab_rpc_interface._CONFIG = config_mock
-        config_mock.get_config_value(
-            'CROS', 'image_storage_server').AndReturn('gs://bucket1/')
-
-        self.mox.StubOutWithMock(moblab_rpc_interface.GsUtil, 'get_gsutil_cmd')
-        moblab_rpc_interface.GsUtil.get_gsutil_cmd().AndReturn(
-            '/path/to/gsutil')
-
-        self.mox.StubOutWithMock(common_lib_utils, 'run')
-        common_lib_utils.run('/path/to/gsutil',
-            args=('cp', 'gs://bucket1/pubsub-key-do-not-delete.json',
-            '/tmp'))
-        moblab_rpc_interface.shutil = self.mox.CreateMockAnything()
-        moblab_rpc_interface.shutil.copyfile(
-                '/tmp/pubsub-key-do-not-delete.json',
-                moblab_host.MOBLAB_SERVICE_ACCOUNT_LOCATION)
-        self.mox.StubOutWithMock(moblab_rpc_interface, '_update_partial_config')
-        moblab_rpc_interface._update_partial_config(
-            {'CROS': [(moblab_rpc_interface._CLOUD_NOTIFICATION_ENABLED, True)]}
-        )
-        self.mox.ReplayAll()
-        moblab_rpc_interface._enable_notification_using_credentials_in_bucket()
-
-    def testInstallSystemUpdate(self):
-        update_engine_client = moblab_rpc_interface._UPDATE_ENGINE_CLIENT
-
-        self.mox.StubOutWithMock(moblab_rpc_interface.subprocess, 'check_call')
-        moblab_rpc_interface.subprocess.check_call(['sudo',
-                update_engine_client, '--update'])
-        moblab_rpc_interface.subprocess.check_call(['sudo',
-                update_engine_client, '--is_reboot_needed'])
-
-        self.mox.StubOutWithMock(moblab_rpc_interface.subprocess, 'call')
-        moblab_rpc_interface.subprocess.call(['sudo', update_engine_client,
-                '--reboot'])
-
-        self.mox.ReplayAll()
-        moblab_rpc_interface._install_system_update()
-
-    def testInstallSystemUpdateError(self):
-        update_engine_client = moblab_rpc_interface._UPDATE_ENGINE_CLIENT
-
-        error_message = ('ERROR_CODE=37\n'
-            'ERROR_MESSAGE=ErrorCode::kOmahaErrorInHTTPResponse')
-
-        self.mox.StubOutWithMock(moblab_rpc_interface.subprocess, 'check_call')
-        moblab_rpc_interface.subprocess.check_call(['sudo',
-                update_engine_client, '--update']).AndRaise(
-                    moblab_rpc_interface.subprocess.CalledProcessError(1,
-                        'sudo'))
-
-        self.mox.StubOutWithMock(moblab_rpc_interface.subprocess,
-                'check_output')
-        moblab_rpc_interface.subprocess.check_output(['sudo',
-                update_engine_client, '--last_attempt_error']).AndReturn(
-                error_message)
-
-        self.mox.ReplayAll()
-        try:
-            moblab_rpc_interface._install_system_update()
-        except moblab_rpc_interface.error.RPCException as e:
-            self.assertEquals(str(e), error_message)
-
-
-    def testGetSystemUpdateStatus(self):
-        update_engine_client = moblab_rpc_interface._UPDATE_ENGINE_CLIENT
-        update_status = ('LAST_CHECKED_TIME=1516753795\n'
-                         'PROGRESS=0.220121\n'
-                         'CURRENT_OP=UPDATE_STATUS_DOWNLOADING\n'
-                         'NEW_VERSION=10032.89.0\n'
-                         'NEW_SIZE=782805733')
-
-        self.mox.StubOutWithMock(moblab_rpc_interface.subprocess,
-                'check_output')
-        moblab_rpc_interface.subprocess.check_output(['sudo',
-                update_engine_client, '--status']).AndReturn(
-                        update_status)
-
-        self.mox.ReplayAll()
-        output = moblab_rpc_interface._get_system_update_status()
-
-        self.assertEquals(output['PROGRESS'], '0.220121')
-        self.assertEquals(output['CURRENT_OP'], 'UPDATE_STATUS_DOWNLOADING')
-        self.assertEquals(output['NEW_VERSION'], '10032.89.0')
-        self.assertEquals(output['NEW_SIZE'], '782805733')
-
-    def testCheckForSystemUpdate(self):
-        update_engine_client = moblab_rpc_interface._UPDATE_ENGINE_CLIENT
-
-        self.mox.StubOutWithMock(moblab_rpc_interface.subprocess, 'call')
-        moblab_rpc_interface.subprocess.call(['sudo', update_engine_client,
-                '--check_for_update'])
-
-        self.mox.StubOutWithMock(moblab_rpc_interface,
-                '_get_system_update_status')
-        for i in range(0,4):
-            moblab_rpc_interface._get_system_update_status().AndReturn(
-                    dict({'CURRENT_OP': 'UPDATE_STATUS_CHECKING_FOR_UPDATE'})
-            )
-        moblab_rpc_interface._get_system_update_status().AndReturn(
-                dict({'CURRENT_OP': 'UPDATE_STATUS_DOWNLOADING'})
-        )
-        self.mox.ReplayAll()
-        moblab_rpc_interface._check_for_system_update()
-
-    def testGetConnectedDutBoardModels(self):
-        # setting up mocks for 2 duts with different boards and models
-        mock_minnie_labels = [
-            self.mox.CreateMockAnything(),
-            self.mox.CreateMockAnything(),
-        ]
-        mock_minnie_labels[0].name = 'board:veyron_minnie'
-        mock_minnie_labels[1].name = 'model:veyron_minnie'
-        mock_minnie = self.mox.CreateMockAnything()
-        mock_minnie.label_list = mock_minnie_labels
-
-        mock_bruce_labels = [
-            self.mox.CreateMockAnything(),
-            self.mox.CreateMockAnything()
-        ]
-        mock_bruce_labels[0].name = 'board:carl'
-        mock_bruce_labels[1].name = 'model:bruce'
-        mock_bruce = self.mox.CreateMockAnything()
-        mock_bruce.label_list = mock_bruce_labels
-        hosts = [mock_minnie, mock_bruce]
-
-        # stub out the host query calls
-        self.mox.StubOutWithMock(moblab_rpc_interface.rpc_utils,
-                'get_host_query')
-        moblab_rpc_interface.rpc_utils.get_host_query(
-                (), False, True, {}).AndReturn(hosts)
-
-        self.mox.StubOutWithMock(moblab_rpc_interface.models.Host.objects,
-                'populate_relationships'),
-        moblab_rpc_interface.models.Host.objects.populate_relationships(hosts,
-                moblab_rpc_interface.models.Label, 'label_list')
-
-        expected = [{
-            'model': 'bruce',
-            'board': 'carl'
-        },
-        {
-            'model': 'veyron_minnie',
-            'board': 'veyron_minnie'
-        }]
-
-        self.mox.ReplayAll()
-        output = moblab_rpc_interface._get_connected_dut_board_models()
-        self.assertEquals(output, expected)
-        # test sorting
-        self.assertEquals(output[0]['model'], 'bruce')
-
-    def testAllDutConnections(self):
-        leases = {
-            '192.168.0.20': '3c:52:82:5f:15:20',
-            '192.168.0.30': '3c:52:82:5f:15:21'
-        }
-
-        # stub out all of the multiprocessing
-        mock_value = self.mox.CreateMockAnything()
-        mock_value.value = True
-        mock_process = self.mox.CreateMockAnything()
-
-        for key in leases:
-            mock_process.start()
-        for key in leases:
-            mock_process.join()
-
-        self.mox.StubOutWithMock(
-                moblab_rpc_interface, 'multiprocessing')
-
-        for key in leases:
-            moblab_rpc_interface.multiprocessing.Value(
-                    mox.IgnoreArg()).AndReturn(mock_value)
-            moblab_rpc_interface.multiprocessing.Process(
-                    target=mox.IgnoreArg(), args=mox.IgnoreArg()).AndReturn(
-                        mock_process)
-
-        self.mox.ReplayAll()
-
-        expected = {
-            '192.168.0.20': {
-                'mac_address': '3c:52:82:5f:15:20',
-                'ssh_connection_ok': True
-            },
-            '192.168.0.30': {
-                'mac_address': '3c:52:82:5f:15:21',
-                'ssh_connection_ok': True
-            }
-        }
-
-        connected_duts = moblab_rpc_interface._test_all_dut_connections(leases)
-        self.assertDictEqual(expected, connected_duts)
-
-    def testAllDutConnectionsFailure(self):
-        leases = {
-            '192.168.0.20': '3c:52:82:5f:15:20',
-            '192.168.0.30': '3c:52:82:5f:15:21'
-        }
-
-        # stub out all of the multiprocessing
-        mock_value = self.mox.CreateMockAnything()
-        mock_value.value = False
-        mock_process = self.mox.CreateMockAnything()
-
-        for key in leases:
-            mock_process.start()
-        for key in leases:
-            mock_process.join()
-
-        self.mox.StubOutWithMock(
-                moblab_rpc_interface, 'multiprocessing')
-
-        for key in leases:
-            moblab_rpc_interface.multiprocessing.Value(
-                    mox.IgnoreArg()).AndReturn(mock_value)
-            moblab_rpc_interface.multiprocessing.Process(
-                    target=mox.IgnoreArg(), args=mox.IgnoreArg()).AndReturn(
-                        mock_process)
-
-        self.mox.ReplayAll()
-
-        expected = {
-            '192.168.0.20': {
-                'mac_address': '3c:52:82:5f:15:20',
-                'ssh_connection_ok': False
-            },
-            '192.168.0.30': {
-                'mac_address': '3c:52:82:5f:15:21',
-                'ssh_connection_ok': False
-            }
-        }
-
-        connected_duts = moblab_rpc_interface._test_all_dut_connections(leases)
-        self.assertDictEqual(expected, connected_duts)
-
-    def testDutSshConnection(self):
-        good_ip = '192.168.0.20'
-        bad_ip = '192.168.0.30'
-        cmd = ('ssh -o ConnectTimeout=3 -o StrictHostKeyChecking=no '
-                "root@%s 'timeout 2 cat /etc/lsb-release'")
-
-        self.mox.StubOutWithMock(moblab_rpc_interface.subprocess,
-                'check_output')
-        moblab_rpc_interface.subprocess.check_output(
-                cmd % good_ip, shell=True).AndReturn('CHROMEOS_RELEASE_APPID')
-
-        moblab_rpc_interface.subprocess.check_output(
-                cmd % bad_ip, shell=True).AndRaise(
-                moblab_rpc_interface.subprocess.CalledProcessError(1, cmd))
-
-        self.mox.ReplayAll()
-        self.assertEquals(
-            moblab_rpc_interface._test_dut_ssh_connection(good_ip), True)
-        self.assertEquals(
-            moblab_rpc_interface._test_dut_ssh_connection(bad_ip), False)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/frontend/afe/model_logic.py b/frontend/afe/model_logic.py
index 334e6ad..41cf15e 100644
--- a/frontend/afe/model_logic.py
+++ b/frontend/afe/model_logic.py
@@ -3,15 +3,14 @@
 """
 
 import django.core.exceptions
-from django.db import connection
-from django.db import connections
+import django.db.models.sql.where
+import six
+from autotest_lib.client.common_lib import error
+from autotest_lib.frontend.afe import rdb_model_extensions
+from django.db import connection, connections
 from django.db import models as dbmodels
 from django.db import transaction
 from django.db.models.sql import query
-import django.db.models.sql.where
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.frontend.afe import rdb_model_extensions
 
 
 class ValidationError(django.core.exceptions.ValidationError):
@@ -39,18 +38,22 @@
     """
 
     class CustomQuery(query.Query):
+        """A custom query"""
+
         def __init__(self, *args, **kwargs):
             super(ExtendedManager.CustomQuery, self).__init__(*args, **kwargs)
             self._custom_joins = []
 
 
         def clone(self, klass=None, **kwargs):
+            """Clones the query and returns the clone."""
             obj = super(ExtendedManager.CustomQuery, self).clone(klass)
             obj._custom_joins = list(self._custom_joins)
             return obj
 
 
         def combine(self, rhs, connector):
+            """Combines query with another query."""
             super(ExtendedManager.CustomQuery, self).combine(rhs, connector)
             if hasattr(rhs, '_custom_joins'):
                 self._custom_joins.extend(rhs._custom_joins)
@@ -58,6 +61,7 @@
 
         def add_custom_join(self, table, condition, join_type,
                             condition_values=(), alias=None):
+            """Adds a custom join to the query."""
             if alias is None:
                 alias = table
             join_dict = dict(table=table,
@@ -93,10 +97,12 @@
 
 
         def as_sql(self, qn=None, connection=None):
+            """Converts the clause to SQL and returns it."""
             return self._clause, self._values
 
 
         def relabel_aliases(self, change_map):
+            """Does nothing."""
             return
 
 
@@ -263,6 +269,7 @@
 
 
     def add_where(self, query_set, where, values=()):
+        """Adds a where clause to the query_set."""
         query_set = query_set.all()
         query_set.query.where.add(self._WhereClause(where, values),
                                   django.db.models.sql.where.AND)
@@ -281,6 +288,7 @@
 
 
     def escape_user_sql(self, sql):
+        """Escapes % in sql."""
         return sql.replace('%', '%%')
 
 
@@ -408,8 +416,9 @@
         """ % dict(from_field=pivot_from_field,
                    to_field=pivot_to_field,
                    table=pivot_table,
-                   id_list=','.join(str(id_) for id_
-                                    in base_objects_by_id.iterkeys()))
+                   id_list=','.join(
+                           str(id_)
+                           for id_ in six.iterkeys(base_objects_by_id)))
 
         # Chose the connection that's responsible for this type of object
         # The databases for related_model and the current model will always
@@ -459,7 +468,7 @@
         # The default maximum value of a host parameter number in SQLite is 999.
         # Exceed this will get a DatabaseError later.
         batch_size = 900
-        for i in xrange(0, len(base_objects), batch_size):
+        for i in range(0, len(base_objects), batch_size):
             base_objects_batch = base_objects[i:i + batch_size]
             base_objects_by_id = dict((base_object._get_pk_val(), base_object)
                                       for base_object in base_objects_batch)
@@ -478,6 +487,7 @@
     QuerySet that handles delete() properly for models with an "invalid" bit
     """
     def delete(self):
+        """Deletes the QuerySet."""
         for model in self:
             model.delete()
 
@@ -600,7 +610,7 @@
         cls = type(self)
         field_dict = self.get_field_dict()
         manager = cls.get_valid_manager()
-        for field_name, field_obj in field_dict.iteritems():
+        for field_name, field_obj in six.iteritems(field_dict):
             if not field_obj.unique:
                 continue
 
@@ -637,7 +647,7 @@
             try:
                 python_value = f.to_python(
                     getattr(self, f.attname, f.get_default()))
-            except django.core.exceptions.ValidationError, e:
+            except django.core.exceptions.ValidationError as e:
                 error_dict[f.name] = str(e)
                 continue
 
@@ -651,9 +661,10 @@
 
 
     def do_validate(self):
+        """Validate fields."""
         errors = self._validate()
         unique_errors = self._validate_unique()
-        for field_name, error in unique_errors.iteritems():
+        for field_name, error in six.iteritems(unique_errors):
             errors.setdefault(field_name, error)
         if errors:
             raise ValidationError(errors)
@@ -690,7 +701,7 @@
         data.update(kwargs)
         data = self.prepare_data_args(data)
         self.convert_human_readable_values(data)
-        for field_name, value in data.iteritems():
+        for field_name, value in six.iteritems(data):
             setattr(self, field_name, value)
         self.do_validate()
         self.save()
@@ -836,9 +847,10 @@
         else:
             manager = cls.objects
 
-        if isinstance(id_or_name, (int, long)):
+        if isinstance(id_or_name, six.integer_types):
             return manager.get(pk=id_or_name)
-        if isinstance(id_or_name, basestring) and hasattr(cls, 'name_field'):
+        if isinstance(id_or_name, six.string_types) and hasattr(
+                cls, 'name_field'):
             return manager.get(**{cls.name_field : id_or_name})
         raise ValueError(
             'Invalid positional argument: %s (%s)' % (id_or_name,
@@ -847,6 +859,7 @@
 
     @classmethod
     def smart_get_bulk(cls, id_or_name_list):
+        """Like smart_get, but for a list of ids or names"""
         invalid_inputs = []
         result_objects = []
         for id_or_name in id_or_name_list:
@@ -891,7 +904,7 @@
         """
         See on_attribute_changed.
         """
-        assert not isinstance(attributes, basestring)
+        assert not isinstance(attributes, six.string_types)
         self._recorded_attributes = dict((attribute, getattr(self, attribute))
                                          for attribute in attributes)
 
@@ -900,7 +913,8 @@
         """
         See on_attribute_changed.
         """
-        for attribute, original_value in self._recorded_attributes.iteritems():
+        for attribute, original_value in six.iteritems(
+                self._recorded_attributes):
             new_value = getattr(self, attribute)
             if original_value != new_value:
                 self.on_attribute_changed(attribute, original_value)
@@ -985,7 +999,7 @@
                  fields/objects.
         """
         links_to_local_values, links_to_related_values = [], []
-        for link, value in data.iteritems():
+        for link, value in six.iteritems(data):
             if link in cls.SERIALIZATION_LINKS_TO_FOLLOW:
                 # It's a foreign key
                 links_to_related_values.append((link, value))
@@ -1123,7 +1137,7 @@
         return instance
 
 
-    def sanity_check_update_from_shard(self, shard, updated_serialized,
+    def _check_update_from_shard(self, shard, updated_serialized,
                                        *args, **kwargs):
         """Check if an update sent from a shard is legitimate.
 
@@ -1131,7 +1145,7 @@
                 legitimate.
         """
         raise NotImplementedError(
-            'sanity_check_update_from_shard must be implemented by subclass %s '
+            '_check_update_from_shard must be implemented by subclass %s '
             'for type %s' % type(self))
 
 
@@ -1239,6 +1253,7 @@
     """
 
     def save(self, *args, **kwargs):
+        """Saves the model"""
         first_time = (self.id is None)
         if first_time:
             # see if this object was previously added and invalidated
@@ -1274,6 +1289,7 @@
 
 
     def delete(self):
+        """Deletes the model"""
         self.invalid = self.invalid
         assert not self.invalid
         self.invalid = True
@@ -1337,6 +1353,7 @@
 
 
     def delete_attribute(self, attribute):
+        """Deletes an attribute"""
         if self._is_replaced_by_static_attribute(attribute):
             raise error.UnmodifiableAttributeException(
                     'Failed to delete attribute "%s" for host "%s" since it '
@@ -1362,6 +1379,7 @@
     """Manager for use with the ModelWithHash abstract model class"""
 
     def create(self, **kwargs):
+        """Always raises exception."""
         raise Exception('ModelWithHash manager should use get_or_create() '
                         'instead of create()')
 
@@ -1379,6 +1397,7 @@
     objects = ModelWithHashManager()
 
     class Meta:
+        """Overrides dbmodels.Model.Meta."""
         abstract = True
 
 
diff --git a/frontend/afe/models.py b/frontend/afe/models.py
index d637a2b..46ce4ef 100644
--- a/frontend/afe/models.py
+++ b/frontend/afe/models.py
@@ -7,6 +7,7 @@
 import os
 
 import django.core
+import six
 try:
     from django.db import models as dbmodels, connection
 except django.core.exceptions.ImproperlyConfigured:
@@ -505,7 +506,7 @@
         self.shard = Shard.deserialize(data)
 
 
-    # Note: Only specify foreign keys here, specify all native host columns in
+    # Note: Only specify foreign keys here, specify host columns in
     # rdb_model_extensions instead.
     Protection = host_protections.Protection
     labels = dbmodels.ManyToManyField(Label, blank=True,
@@ -1470,7 +1471,7 @@
         self.shard = Shard.deserialize(data)
 
 
-    def sanity_check_update_from_shard(self, shard, updated_serialized):
+    def _check_update_from_shard(self, shard, updated_serialized):
         # If the job got aborted on the main after the client fetched it
         # no shard_id will be set. The shard might still push updates though,
         # as the job might complete before the abort bit syncs to the shard.
@@ -1504,8 +1505,8 @@
         'AUTOTEST_WEB', 'parse_failed_repair_default', type=bool, default=False)
     FETCH_READONLY_JOBS = global_config.global_config.get_config_value(
         'AUTOTEST_WEB','readonly_heartbeat', type=bool, default=False)
-    SKIP_JOBS_CREATED_BEFORE = global_config.global_config.get_config_value(
-        'SHARD', 'skip_jobs_created_before', type=int, default=0)
+    # TODO(ayatane): Deprecated, not removed due to difficulty untangling imports
+    SKIP_JOBS_CREATED_BEFORE = 0
 
 
 
@@ -1635,7 +1636,7 @@
         job.dependency_labels = options['dependencies']
 
         if options.get('keyvals'):
-            for key, value in options['keyvals'].iteritems():
+            for key, value in six.iteritems(options['keyvals']):
                 # None (or NULL) is not acceptable by DB, so change it to an
                 # empty string in case.
                 JobKeyval.objects.create(job=job, key=key,
@@ -1924,7 +1925,7 @@
         self.meta_host = Label.deserialize(data)
 
 
-    def sanity_check_update_from_shard(self, shard, updated_serialized,
+    def _check_update_from_shard(self, shard, updated_serialized,
                                        job_ids_sent):
         if self.job_id not in job_ids_sent:
             raise error.IgnorableUnallowedRecordsSentToMain(
diff --git a/frontend/afe/models_test.py b/frontend/afe/models_test.py
index ae49b72..aa1af89 100755
--- a/frontend/afe/models_test.py
+++ b/frontend/afe/models_test.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # pylint: disable=missing-docstring
 
 import unittest
@@ -468,7 +468,7 @@
                           'jobkeyval_set': [{'id': 10,
                                              'job_id': 5,
                                              'key': 'suite',
-                                             'value': 'dummy'},
+                                             'value': 'stub'},
                                             {'id': 11,
                                              'job_id': 5,
                                              'key': 'build',
@@ -536,7 +536,7 @@
                           'jobkeyval_set': [{'id': 16,
                                              'job_id': 7,
                                              'key': 'suite',
-                                             'value': 'dummy'},
+                                             'value': 'stub'},
                                             {'id': 17,
                                              'job_id': 7,
                                              'key': 'build',
diff --git a/frontend/afe/rdb_model_extensions.py b/frontend/afe/rdb_model_extensions.py
index 12ba892..1bf2c8f 100644
--- a/frontend/afe/rdb_model_extensions.py
+++ b/frontend/afe/rdb_model_extensions.py
@@ -5,16 +5,13 @@
 """Model extensions common to both the server and client rdb modules.
 """
 
-
+import six
+from autotest_lib.client.common_lib import host_protections, host_states
+from autotest_lib.frontend import settings
 from django.core import exceptions as django_exceptions
 from django.db import models as dbmodels
 
 
-from autotest_lib.client.common_lib import host_protections
-from autotest_lib.client.common_lib import host_states
-from autotest_lib.frontend import settings
-
-
 class ModelValidators(object):
     """Convenience functions for model validation.
 
@@ -85,7 +82,7 @@
         """
         new_data = dict(data)
         field_dict = cls.get_field_dict()
-        for name, obj in field_dict.iteritems():
+        for name, obj in six.iteritems(field_dict):
             if data.get(name) is not None:
                 continue
             if obj.default is not dbmodels.fields.NOT_PROVIDED:
@@ -202,4 +199,5 @@
 
 
     class Meta:
+        """Extends dbmodels.Model.Meta"""
         abstract = True
diff --git a/frontend/afe/rpc_client_lib.py b/frontend/afe/rpc_client_lib.py
index b1daf04..126d56b 100644
--- a/frontend/afe/rpc_client_lib.py
+++ b/frontend/afe/rpc_client_lib.py
@@ -6,7 +6,7 @@
 __author__ = 'showard@google.com (Steve Howard)'
 
 import getpass, os
-from json_rpc import proxy
+from autotest_lib.frontend.afe.json_rpc import proxy
 from autotest_lib.client.common_lib import utils
 
 
diff --git a/frontend/afe/rpc_handler.py b/frontend/afe/rpc_handler.py
index 3018d48..2bd7709 100644
--- a/frontend/afe/rpc_handler.py
+++ b/frontend/afe/rpc_handler.py
@@ -37,7 +37,7 @@
 
 
 class RpcMethodHolder(object):
-    'Dummy class to hold RPC interface methods as attributes.'
+    'Stub class to hold RPC interface methods as attributes.'
 
 
 class RpcValidator(object):
diff --git a/frontend/afe/rpc_interface.py b/frontend/afe/rpc_interface.py
index 08ca1d1..68ecbeb 100644
--- a/frontend/afe/rpc_interface.py
+++ b/frontend/afe/rpc_interface.py
@@ -40,37 +40,29 @@
 import sys
 import warnings
 
+import six
+from autotest_lib.client.common_lib import (control_data, error, global_config,
+                                            priorities)
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.frontend.afe import control_file as control_file_lib
+from autotest_lib.frontend.afe import (model_attributes, model_logic, models,
+                                       rpc_utils)
+from autotest_lib.frontend.tko import models as tko_models
+from autotest_lib.frontend.tko import rpc_interface as tko_rpc_interface
+from autotest_lib.server import frontend, utils
+from autotest_lib.server.cros import provision
+from autotest_lib.server.cros.dynamic_suite import (constants,
+                                                    control_file_getter,
+                                                    suite_common, tools)
+from autotest_lib.server.cros.dynamic_suite.suite import Suite
+from autotest_lib.server.lib import status_history
+from autotest_lib.site_utils import job_history, stable_version_utils
 from django.db import connection as db_connection
 from django.db import transaction
 from django.db.models import Count
 from django.db.utils import DatabaseError
 
 import common
-from autotest_lib.client.common_lib import control_data
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import global_config
-from autotest_lib.client.common_lib import priorities
-from autotest_lib.client.common_lib.cros import dev_server
-from autotest_lib.frontend.afe import control_file as control_file_lib
-from autotest_lib.frontend.afe import model_attributes
-from autotest_lib.frontend.afe import model_logic
-from autotest_lib.frontend.afe import models
-from autotest_lib.frontend.afe import rpc_utils
-from autotest_lib.frontend.tko import models as tko_models
-from autotest_lib.frontend.tko import rpc_interface as tko_rpc_interface
-from autotest_lib.server import frontend
-from autotest_lib.server import utils
-from autotest_lib.server.cros import provision
-from autotest_lib.server.cros.dynamic_suite import constants
-from autotest_lib.server.cros.dynamic_suite import control_file_getter
-from autotest_lib.server.cros.dynamic_suite import suite_common
-from autotest_lib.server.cros.dynamic_suite import tools
-from autotest_lib.server.cros.dynamic_suite.suite import Suite
-from autotest_lib.server.lib import status_history
-from autotest_lib.site_utils import job_history
-from autotest_lib.site_utils import server_manager_utils
-from autotest_lib.site_utils import stable_version_utils
-
 
 _CONFIG = global_config.global_config
 
@@ -164,9 +156,9 @@
             # If the exception is raised not because of duplicated
             # "name", then raise the original exception.
             if label is None:
-                raise exc_info[0], exc_info[1], exc_info[2]
+                six.reraise(exc_info[0], exc_info[1], exc_info[2])
         else:
-            raise exc_info[0], exc_info[1], exc_info[2]
+            six.reraise(exc_info[0], exc_info[1], exc_info[2])
     return label.id
 
 
@@ -213,7 +205,7 @@
         # This matches the type checks in smart_get, which is a hack
         # in and off itself. The aim here is to create any non-existent
         # label, which we cannot do if the 'id' specified isn't a label name.
-        if isinstance(id, basestring):
+        if isinstance(id, six.string_types):
             label = models.Label.smart_get(add_label(id))
         else:
             raise ValueError('Label id (%s) does not exist. Please specify '
@@ -1327,7 +1319,7 @@
         builds = None
         if isinstance(value, dict):
             builds = value
-        elif isinstance(value, basestring):
+        elif isinstance(value, six.string_types):
             builds = ast.literal_eval(value)
         if builds:
             image = builds.get('cros-version')
@@ -2123,9 +2115,9 @@
     @returns: A list of label models that ready to be added to shard.
     """
     if not labels:
-      # allow creation of label-less shards (labels='' would otherwise fail the
-      # checks below)
-      return []
+        # allow creation of label-less shards (labels='' would otherwise fail the
+        # checks below)
+        return []
     labels = labels.split(',')
     label_models = []
     for label in labels:
@@ -2265,12 +2257,7 @@
     @raises error.RPCException: If server database is not used.
     @return: A list of server names for servers with matching role and status.
     """
-    if not server_manager_utils.use_server_db():
-        raise error.RPCException('Server database is not enabled. Please try '
-                                 'retrieve servers from global config.')
-    servers = server_manager_utils.get_servers(hostname=hostname, role=role,
-                                               status=status)
-    return [s.get_details() for s in servers]
+    raise DeprecationWarning("server_manager_utils has been removed.")
 
 
 @rpc_utils.route_rpc_to_main
diff --git a/frontend/afe/rpc_interface_unittest.py b/frontend/afe/rpc_interface_unittest.py
index 255ab10..5a83a4b 100755
--- a/frontend/afe/rpc_interface_unittest.py
+++ b/frontend/afe/rpc_interface_unittest.py
@@ -1,31 +1,28 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # pylint: disable=missing-docstring
 
-import datetime
-import mox
-import unittest
+from __future__ import absolute_import
 
-import common
-from autotest_lib.client.common_lib import control_data
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import global_config
-from autotest_lib.client.common_lib import priorities
+import datetime
+import unittest
+from unittest.mock import patch
+from unittest.mock import MagicMock
+
+import six
+from autotest_lib.client.common_lib import (control_data, error, global_config,
+                                            priorities)
 from autotest_lib.client.common_lib.cros import dev_server
-from autotest_lib.client.common_lib.test_utils import mock
 from autotest_lib.frontend import setup_django_environment
-from autotest_lib.frontend.afe import frontend_test_utils
-from autotest_lib.frontend.afe import model_logic
-from autotest_lib.frontend.afe import models
-from autotest_lib.frontend.afe import rpc_interface
-from autotest_lib.frontend.afe import rpc_utils
-from autotest_lib.server import frontend
+from autotest_lib.frontend.afe import (frontend_test_utils, model_logic,
+                                       models, rpc_interface, rpc_utils)
 from autotest_lib.server import utils as server_utils
 from autotest_lib.server.cros import provision
-from autotest_lib.server.cros.dynamic_suite import constants
-from autotest_lib.server.cros.dynamic_suite import control_file_getter
-from autotest_lib.server.cros.dynamic_suite import frontend_wrappers
-from autotest_lib.server.cros.dynamic_suite import suite_common
+from autotest_lib.server.cros.dynamic_suite import (constants,
+                                                    control_file_getter,
+                                                    frontend_wrappers,
+                                                    suite_common)
 
+import common
 
 CLIENT = control_data.CONTROL_TYPE_NAMES.CLIENT
 SERVER = control_data.CONTROL_TYPE_NAMES.SERVER
@@ -33,7 +30,7 @@
 _hqe_status = models.HostQueueEntry.Status
 
 
-class ShardHeartbeatTest(mox.MoxTestBase, unittest.TestCase):
+class ShardHeartbeatTest(unittest.TestCase):
 
     _PRIORITY = priorities.Priority.DEFAULT
 
@@ -88,7 +85,7 @@
 
 
     def _createJobForLabel(self, label):
-        job_id = rpc_interface.create_job(name='dummy', priority=self._PRIORITY,
+        job_id = rpc_interface.create_job(name='stub', priority=self._PRIORITY,
                                           control_file='foo',
                                           control_type=CLIENT,
                                           meta_hosts=[label.name],
@@ -123,17 +120,17 @@
         self.assertEqual(host2.shard, None)
 
         # In the middle of the assign_to_shard call, remove label1 from shard1.
-        self.mox.StubOutWithMock(models.Host, '_assign_to_shard_nothing_helper')
-        def remove_label():
-            rpc_interface.remove_board_from_shard(shard1.hostname, label1.name)
+        with patch.object(
+                models.Host,
+                '_assign_to_shard_nothing_helper',
+                side_effect=lambda: rpc_interface.remove_board_from_shard(
+                        shard1.hostname, label1.name)):
+            self._do_heartbeat_and_assert_response(
+                    known_hosts=[host1],
+                    hosts=[],
+                    incorrect_host_ids=[host1.id])
+            host2 = models.Host.smart_get(host2.id)
 
-        models.Host._assign_to_shard_nothing_helper().WithSideEffects(
-            remove_label)
-        self.mox.ReplayAll()
-
-        self._do_heartbeat_and_assert_response(
-            known_hosts=[host1], hosts=[], incorrect_host_ids=[host1.id])
-        host2 = models.Host.smart_get(host2.id)
         self.assertEqual(host2.shard, None)
 
 
@@ -223,18 +220,18 @@
 
         models.Test.objects.create(name='platform_BootPerfServer:shard',
                                    test_type=1)
-        self.mox.StubOutWithMock(server_utils, 'read_file')
-        self.mox.ReplayAll()
-        rpc_interface.delete_shard(hostname=shard1.hostname)
+        with patch.object(server_utils, 'read_file'):
+            rpc_interface.delete_shard(hostname=shard1.hostname)
 
-        self.assertRaises(
-            models.Shard.DoesNotExist, models.Shard.objects.get, pk=shard1.id)
+            self.assertRaises(models.Shard.DoesNotExist,
+                              models.Shard.objects.get,
+                              pk=shard1.id)
 
-        job1 = models.Job.objects.get(pk=job1.id)
-        label1 = models.Label.objects.get(pk=label1.id)
+            job1 = models.Job.objects.get(pk=job1.id)
+            label1 = models.Label.objects.get(pk=label1.id)
 
-        self.assertIsNone(job1.shard)
-        self.assertEqual(len(label1.shard_set.all()), 0)
+            self.assertIsNone(job1.shard)
+            self.assertEqual(len(label1.shard_set.all()), 0)
 
 
     def _testResendHostsAfterFailedHeartbeatHelper(self, host1):
@@ -249,21 +246,19 @@
         self._do_heartbeat_and_assert_response(known_hosts=[host1])
 
 
-class RpcInterfaceTestWithStaticAttribute(
-        mox.MoxTestBase, unittest.TestCase,
-        frontend_test_utils.FrontendTestMixin):
+class RpcInterfaceTestWithStaticAttribute(unittest.TestCase,
+                                          frontend_test_utils.FrontendTestMixin
+                                          ):
 
     def setUp(self):
         super(RpcInterfaceTestWithStaticAttribute, self).setUp()
         self._frontend_common_setup()
-        self.god = mock.mock_god()
         self.old_respect_static_config = rpc_interface.RESPECT_STATIC_ATTRIBUTES
         rpc_interface.RESPECT_STATIC_ATTRIBUTES = True
         models.RESPECT_STATIC_ATTRIBUTES = True
 
 
     def tearDown(self):
-        self.god.unstub_all()
         self._frontend_common_teardown()
         global_config.global_config.reset_config_values()
         rpc_interface.RESPECT_STATIC_ATTRIBUTES = self.old_respect_static_config
@@ -363,14 +358,12 @@
     def setUp(self):
         super(RpcInterfaceTestWithStaticLabel, self).setUp()
         self._frontend_common_setup()
-        self.god = mock.mock_god()
         self.old_respect_static_config = rpc_interface.RESPECT_STATIC_LABELS
         rpc_interface.RESPECT_STATIC_LABELS = True
         models.RESPECT_STATIC_LABELS = True
 
 
     def tearDown(self):
-        self.god.unstub_all()
         self._frontend_common_teardown()
         global_config.global_config.reset_config_values()
         rpc_interface.RESPECT_STATIC_LABELS = self.old_respect_static_config
@@ -424,15 +417,10 @@
         host2.labels.add(label1)
         host2.save()
 
-        mock_afe = self.god.create_mock_class_obj(frontend_wrappers.RetryingAFE,
-                                                  'MockAFE')
-        self.god.stub_with(frontend_wrappers, 'RetryingAFE', mock_afe)
+        with patch.object(frontend_wrappers, 'RetryingAFE') as mock_afe:
+            self.assertRaises(error.UnmodifiableLabelException,
+                              rpc_interface.delete_label, label1.id)
 
-        self.assertRaises(error.UnmodifiableLabelException,
-                          rpc_interface.delete_label,
-                          label1.id)
-
-        self.god.check_playback()
 
 
     def test_modify_static_label(self):
@@ -445,17 +433,13 @@
         host2.labels.add(label1)
         host2.save()
 
-        mock_afe = self.god.create_mock_class_obj(frontend_wrappers.RetryingAFE,
-                                                  'MockAFE')
-        self.god.stub_with(frontend_wrappers, 'RetryingAFE', mock_afe)
-
-        self.assertRaises(error.UnmodifiableLabelException,
-                          rpc_interface.modify_label,
-                          label1.id,
-                          invalid=1)
+        with patch.object(frontend_wrappers, 'RetryingAFE') as mock_afe:
+            self.assertRaises(error.UnmodifiableLabelException,
+                              rpc_interface.modify_label,
+                              label1.id,
+                              invalid=1)
 
         self.assertEqual(models.Label.smart_get('static').invalid, 0)
-        self.god.check_playback()
 
 
     def test_multiple_platforms_add_non_static_to_static(self):
@@ -565,7 +549,7 @@
         host1 = models.Host.smart_get(host.id)
         shard1 = models.Shard.smart_get(shard.id)
         self.assertEqual(host1.shard, None)
-        self.assertItemsEqual(shard1.labels.all(), [])
+        six.assertCountEqual(self, shard1.labels.all(), [])
 
 
     def test_check_job_dependencies_success(self):
@@ -695,11 +679,9 @@
                        frontend_test_utils.FrontendTestMixin):
     def setUp(self):
         self._frontend_common_setup()
-        self.god = mock.mock_god()
 
 
     def tearDown(self):
-        self.god.unstub_all()
         self._frontend_common_teardown()
         global_config.global_config.reset_config_values()
 
@@ -798,7 +780,7 @@
 
 
     def test_get_jobs_summary(self):
-        job = self._create_job(hosts=xrange(1, 4))
+        job = self._create_job(hosts=range(1, 4))
         entries = list(job.hostqueueentry_set.all())
         entries[1].status = _hqe_status.FAILED
         entries[1].save()
@@ -807,15 +789,16 @@
         entries[2].save()
 
         # Mock up tko_rpc_interface.get_status_counts.
-        self.god.stub_function_to_return(rpc_interface.tko_rpc_interface,
-                                         'get_status_counts',
-                                         None)
-
-        job_summaries = rpc_interface.get_jobs_summary(id=job.id)
-        self.assertEquals(len(job_summaries), 1)
-        summary = job_summaries[0]
-        self.assertEquals(summary['status_counts'], {'Queued': 1,
-                                                     'Failed': 2})
+        with patch.object(rpc_interface.tko_rpc_interface,
+                          'get_status_counts',
+                          return_value=None):
+            job_summaries = rpc_interface.get_jobs_summary(id=job.id)
+            self.assertEquals(len(job_summaries), 1)
+            summary = job_summaries[0]
+            self.assertEquals(summary['status_counts'], {
+                    'Queued': 1,
+                    'Failed': 2
+            })
 
 
     def _check_job_ids(self, actual_job_dicts, expected_jobs):
@@ -1142,40 +1125,40 @@
 
         self.assertFalse(host.locked)
 
-        self.god.stub_class_method(frontend.AFE, 'run')
+        with MagicMock() as afe_instance, patch.object(
+                frontend_wrappers, 'RetryingAFE',
+                return_value=afe_instance) as mock_afe:
+            rpc_interface.modify_host(id=host.id,
+                                      locked=True,
+                                      lock_reason='_modify_host_helper lock',
+                                      lock_time=datetime.datetime(
+                                              2015, 12, 15))
 
-        if host_on_shard and not on_shard:
-            mock_afe = self.god.create_mock_class_obj(
-                    frontend_wrappers.RetryingAFE, 'MockAFE')
-            self.god.stub_with(frontend_wrappers, 'RetryingAFE', mock_afe)
-
-            mock_afe2 = frontend_wrappers.RetryingAFE.expect_new(
-                    server=shard_hostname, user=None)
-            mock_afe2.run.expect_call('modify_host_local', id=host.id,
-                    locked=True, lock_reason='_modify_host_helper lock',
-                    lock_time=datetime.datetime(2015, 12, 15))
-        elif on_shard:
-            mock_afe = self.god.create_mock_class_obj(
-                    frontend_wrappers.RetryingAFE, 'MockAFE')
-            self.god.stub_with(frontend_wrappers, 'RetryingAFE', mock_afe)
-
-            mock_afe2 = frontend_wrappers.RetryingAFE.expect_new(
-                    server=server_utils.get_global_afe_hostname(), user=None)
-            mock_afe2.run.expect_call('modify_host', id=host.id,
-                    locked=True, lock_reason='_modify_host_helper lock',
-                    lock_time=datetime.datetime(2015, 12, 15))
-
-        rpc_interface.modify_host(id=host.id, locked=True,
-                                  lock_reason='_modify_host_helper lock',
-                                  lock_time=datetime.datetime(2015, 12, 15))
-
-        host = models.Host.objects.get(pk=host.id)
-        if on_shard:
-            # modify_host on shard does nothing but routing the RPC to main.
-            self.assertFalse(host.locked)
-        else:
-            self.assertTrue(host.locked)
-        self.god.check_playback()
+            host = models.Host.objects.get(pk=host.id)
+            if on_shard:
+                # modify_host on shard does nothing but routing the RPC to
+                # main.
+                self.assertFalse(host.locked)
+            else:
+                self.assertTrue(host.locked)
+            if host_on_shard and not on_shard:
+                mock_afe.assert_called_with(server=shard_hostname, user=None)
+                afe_instance.run.assert_called_with(
+                        'modify_host_local',
+                        id=host.id,
+                        locked=True,
+                        lock_reason='_modify_host_helper lock',
+                        lock_time=datetime.datetime(2015, 12, 15))
+            elif on_shard:
+                mock_afe.assert_called_with(
+                        server=server_utils.get_global_afe_hostname(),
+                        user=None)
+                afe_instance.run.assert_called_with(
+                        'modify_host',
+                        id=host.id,
+                        locked=True,
+                        lock_reason='_modify_host_helper lock',
+                        lock_time=datetime.datetime(2015, 12, 15))
 
 
     def test_modify_host_on_main_host_on_main(self):
@@ -1209,44 +1192,46 @@
         self.assertFalse(host1.locked)
         self.assertFalse(host2.locked)
 
-        mock_afe = self.god.create_mock_class_obj(frontend_wrappers.RetryingAFE,
-                                                  'MockAFE')
-        self.god.stub_with(frontend_wrappers, 'RetryingAFE', mock_afe)
+        with MagicMock() as mock_afe1, MagicMock() as mock_afe2, patch.object(
+                frontend_wrappers,
+                'RetryingAFE',
+                side_effect=(lambda server='', user=None: mock_afe1 if server
+                             == 'shard1' else mock_afe2)) as mock_afe:
+            # The statuses of one host might differ on main and shard.
+            # Filters are always applied on the main. So the host on the shard
+            # will be affected no matter what the host status is.
+            filters_to_use = {'status': 'Ready'}
 
-        # The statuses of one host might differ on main and shard.
-        # Filters are always applied on the main. So the host on the shard
-        # will be affected no matter what his status is.
-        filters_to_use = {'status': 'Ready'}
+            rpc_interface.modify_hosts(host_filter_data={'status': 'Ready'},
+                                       update_data={
+                                               'locked':
+                                               True,
+                                               'lock_reason':
+                                               'Testing forward to shard',
+                                               'lock_time':
+                                               datetime.datetime(2015, 12, 15)
+                                       })
 
-        mock_afe2 = frontend_wrappers.RetryingAFE.expect_new(
-                server='shard2', user=None)
-        mock_afe2.run.expect_call(
-            'modify_hosts_local',
-            host_filter_data={'id__in': [shard1.id, shard2.id]},
-            update_data={'locked': True,
-                         'lock_reason': 'Testing forward to shard',
-                         'lock_time' : datetime.datetime(2015, 12, 15) })
-
-        mock_afe1 = frontend_wrappers.RetryingAFE.expect_new(
-                server='shard1', user=None)
-        mock_afe1.run.expect_call(
-            'modify_hosts_local',
-            host_filter_data={'id__in': [shard1.id, shard2.id]},
-            update_data={'locked': True,
-                         'lock_reason': 'Testing forward to shard',
-                         'lock_time' : datetime.datetime(2015, 12, 15)})
-
-        rpc_interface.modify_hosts(
-                host_filter_data={'status': 'Ready'},
-                update_data={'locked': True,
-                             'lock_reason': 'Testing forward to shard',
-                             'lock_time' : datetime.datetime(2015, 12, 15) })
-
-        host1 = models.Host.objects.get(pk=host1.id)
-        self.assertTrue(host1.locked)
-        host2 = models.Host.objects.get(pk=host2.id)
-        self.assertTrue(host2.locked)
-        self.god.check_playback()
+            host1 = models.Host.objects.get(pk=host1.id)
+            self.assertTrue(host1.locked)
+            host2 = models.Host.objects.get(pk=host2.id)
+            self.assertTrue(host2.locked)
+            mock_afe1.run.assert_called_with(
+                    'modify_hosts_local',
+                    host_filter_data={'id__in': [shard1.id, shard2.id]},
+                    update_data={
+                            'locked': True,
+                            'lock_reason': 'Testing forward to shard',
+                            'lock_time': datetime.datetime(2015, 12, 15)
+                    })
+            mock_afe2.run.assert_called_with(
+                    'modify_hosts_local',
+                    host_filter_data={'id__in': [shard1.id, shard2.id]},
+                    update_data={
+                            'locked': True,
+                            'lock_reason': 'Testing forward to shard',
+                            'lock_time': datetime.datetime(2015, 12, 15)
+                    })
 
 
     def test_delete_host(self):
@@ -1257,20 +1242,16 @@
         host1.save()
         host1_id = host1.id
 
-        mock_afe = self.god.create_mock_class_obj(frontend_wrappers.RetryingAFE,
-                                                 'MockAFE')
-        self.god.stub_with(frontend_wrappers, 'RetryingAFE', mock_afe)
+        with MagicMock() as mock_afe1, patch.object(
+                frontend_wrappers, 'RetryingAFE',
+                return_value=mock_afe1) as mock_afe:
+            rpc_interface.delete_host(id=host1.id)
 
-        mock_afe1 = frontend_wrappers.RetryingAFE.expect_new(
-                server='shard1', user=None)
-        mock_afe1.run.expect_call('delete_host', id=host1.id)
+            self.assertRaises(models.Host.DoesNotExist, models.Host.smart_get,
+                              host1_id)
 
-        rpc_interface.delete_host(id=host1.id)
-
-        self.assertRaises(models.Host.DoesNotExist,
-                          models.Host.smart_get, host1_id)
-
-        self.god.check_playback()
+            mock_afe.assert_called_with(server='shard1', user=None)
+            mock_afe1.run.assert_called_with('delete_host', id=host1.id)
 
 
     def test_delete_shard(self):
@@ -1298,18 +1279,16 @@
         host2.labels.add(label1)
         host2.save()
 
-        mock_afe = self.god.create_mock_class_obj(frontend_wrappers.RetryingAFE,
-                                                  'MockAFE')
-        self.god.stub_with(frontend_wrappers, 'RetryingAFE', mock_afe)
+        with MagicMock() as mock_afe1, patch.object(
+                frontend_wrappers, 'RetryingAFE',
+                return_value=mock_afe1) as mock_afe:
+            rpc_interface.modify_label(label1.id, invalid=1)
 
-        mock_afe1 = frontend_wrappers.RetryingAFE.expect_new(
-                server='shard1', user=None)
-        mock_afe1.run.expect_call('modify_label', id=label1.id, invalid=1)
-
-        rpc_interface.modify_label(label1.id, invalid=1)
-
-        self.assertEqual(models.Label.objects.all()[0].invalid, 1)
-        self.god.check_playback()
+            self.assertEqual(models.Label.objects.all()[0].invalid, 1)
+            mock_afe.assert_called_with(server='shard1', user=None)
+            mock_afe1.run.assert_called_with('modify_label',
+                                             id=label1.id,
+                                             invalid=1)
 
 
     def test_delete_label(self):
@@ -1321,19 +1300,15 @@
         host2.labels.add(label1)
         host2.save()
 
-        mock_afe = self.god.create_mock_class_obj(frontend_wrappers.RetryingAFE,
-                                                  'MockAFE')
-        self.god.stub_with(frontend_wrappers, 'RetryingAFE', mock_afe)
+        with MagicMock() as mock_afe1, patch.object(
+                frontend_wrappers, 'RetryingAFE',
+                return_value=mock_afe1) as mock_afe:
+            rpc_interface.delete_label(id=label1.id)
 
-        mock_afe1 = frontend_wrappers.RetryingAFE.expect_new(
-                server='shard1', user=None)
-        mock_afe1.run.expect_call('delete_label', id=label1.id)
-
-        rpc_interface.delete_label(id=label1.id)
-
-        self.assertRaises(models.Label.DoesNotExist,
-                          models.Label.smart_get, label1.id)
-        self.god.check_playback()
+            self.assertRaises(models.Label.DoesNotExist,
+                              models.Label.smart_get, label1.id)
+            mock_afe.assert_called_with(server='shard1', user=None)
+            mock_afe1.run.assert_called_with('delete_label', id=label1.id)
 
 
     def test_get_image_for_job_with_keyval_build(self):
@@ -1415,28 +1390,30 @@
         super(ExtraRpcInterfaceTest, self).setUp()
         self._SUITE_NAME = suite_common.canonicalize_suite_name(
             self._NAME)
-        self.dev_server = self.mox.CreateMock(dev_server.ImageServer)
+        patcher = patch.object(dev_server, 'ImageServer')
+        self.dev_server = patcher.start()
+        self.addCleanup(patcher.stop)
         self._frontend_common_setup(fill_data=False)
 
 
     def tearDown(self):
         self._frontend_common_teardown()
-
+        if self.dev_server.resolve.call_count > 0:
+            self.dev_server.resolve.assert_called_with(self._BUILD,
+                                                       None,
+                                                       ban_list=None)
 
     def _setupDevserver(self):
-        self.mox.StubOutClassWithMocks(dev_server, 'ImageServer')
-        dev_server.resolve(self._BUILD).AndReturn(self.dev_server)
+        self.dev_server.resolve.return_value = self.dev_server
 
 
     def _mockDevServerGetter(self, get_control_file=True):
         self._setupDevserver()
         if get_control_file:
-          self.getter = self.mox.CreateMock(
-              control_file_getter.DevServerGetter)
-          self.mox.StubOutWithMock(control_file_getter.DevServerGetter,
-                                   'create')
-          control_file_getter.DevServerGetter.create(
-              mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(self.getter)
+            patcher = patch.object(control_file_getter, 'DevServerGetter')
+            self.getter = patcher.start()
+            self.getter.create.return_value = self.getter
+            self.addCleanup(patcher.stop)
 
 
     def _mockRpcUtils(self, to_return, control_file_substring=''):
@@ -1451,39 +1428,27 @@
         """
         download_started_time = constants.DOWNLOAD_STARTED_TIME
         payload_finished_time = constants.PAYLOAD_FINISHED_TIME
-        self.mox.StubOutWithMock(rpc_utils, 'create_job_common')
-        rpc_utils.create_job_common(mox.And(mox.StrContains(self._NAME),
-                                    mox.StrContains(self._BUILD)),
-                            priority=self._PRIORITY,
-                            timeout_mins=self._TIMEOUT*60,
-                            max_runtime_mins=self._TIMEOUT*60,
-                            control_type='Server',
-                            control_file=mox.And(mox.StrContains(self._BOARD),
-                                                 mox.StrContains(self._BUILD),
-                                                 mox.StrContains(
-                                                     control_file_substring)),
-                            hostless=True,
-                            keyvals=mox.And(mox.In(download_started_time),
-                                            mox.In(payload_finished_time))
-                            ).AndReturn(to_return)
-
+        patcher = patch.object(rpc_utils,
+                               'create_job_common',
+                               return_value=to_return)
+        self.rpc_utils = patcher.start()
+        self.addCleanup(patcher.stop)
 
     def testStageBuildFail(self):
         """Ensure that a failure to stage the desired build fails the RPC."""
         self._setupDevserver()
 
         self.dev_server.hostname = 'mox_url'
-        self.dev_server.stage_artifacts(
-                image=self._BUILD,
-                artifacts=['test_suites', 'control_files']).AndRaise(
+        self.dev_server.stage_artifacts.side_effect = (
                 dev_server.DevServerException())
-        self.mox.ReplayAll()
         self.assertRaises(error.StageControlFileFailure,
                           rpc_interface.create_suite_job,
                           name=self._NAME,
                           board=self._BOARD,
                           builds=self._BUILDS,
                           pool=None)
+        self.dev_server.stage_artifacts.assert_called_with(
+                image=self._BUILD, artifacts=['test_suites', 'control_files'])
 
 
     def testGetControlFileFail(self):
@@ -1491,19 +1456,19 @@
         self._mockDevServerGetter()
 
         self.dev_server.hostname = 'mox_url'
-        self.dev_server.stage_artifacts(
-                image=self._BUILD,
-                artifacts=['test_suites', 'control_files']).AndReturn(True)
+        self.dev_server.stage_artifacts.return_value = True
+        self.getter.get_control_file_contents_by_name.return_value = None
 
-        self.getter.get_control_file_contents_by_name(
-            self._SUITE_NAME).AndReturn(None)
-        self.mox.ReplayAll()
         self.assertRaises(error.ControlFileEmpty,
                           rpc_interface.create_suite_job,
                           name=self._NAME,
                           board=self._BOARD,
                           builds=self._BUILDS,
                           pool=None)
+        self.dev_server.stage_artifacts.assert_called_with(
+                image=self._BUILD, artifacts=['test_suites', 'control_files'])
+        self.getter.get_control_file_contents_by_name.assert_called_with(
+                self._SUITE_NAME)
 
 
     def testGetControlFileListFail(self):
@@ -1511,19 +1476,20 @@
         self._mockDevServerGetter()
 
         self.dev_server.hostname = 'mox_url'
-        self.dev_server.stage_artifacts(
-                image=self._BUILD,
-                artifacts=['test_suites', 'control_files']).AndReturn(True)
+        self.dev_server.stage_artifacts.return_value = True
+        self.getter.get_control_file_contents_by_name.side_effect = (
+                error.NoControlFileList())
 
-        self.getter.get_control_file_contents_by_name(
-            self._SUITE_NAME).AndRaise(error.NoControlFileList())
-        self.mox.ReplayAll()
         self.assertRaises(error.NoControlFileList,
                           rpc_interface.create_suite_job,
                           name=self._NAME,
                           board=self._BOARD,
                           builds=self._BUILDS,
                           pool=None)
+        self.dev_server.stage_artifacts.assert_called_with(
+                image=self._BUILD, artifacts=['test_suites', 'control_files'])
+        self.getter.get_control_file_contents_by_name.assert_called_with(
+                self._SUITE_NAME)
 
 
     def testCreateSuiteJobFail(self):
@@ -1531,21 +1497,20 @@
         self._mockDevServerGetter()
 
         self.dev_server.hostname = 'mox_url'
-        self.dev_server.stage_artifacts(
-                image=self._BUILD,
-                artifacts=['test_suites', 'control_files']).AndReturn(True)
-
-        self.getter.get_control_file_contents_by_name(
-            self._SUITE_NAME).AndReturn('f')
-
-        self.dev_server.url().AndReturn('mox_url')
+        self.dev_server.stage_artifacts.return_value = True
+        self.getter.get_control_file_contents_by_name.return_value = 'f'
+        self.dev_server.url.return_value = 'mox_url'
         self._mockRpcUtils(-1)
-        self.mox.ReplayAll()
+
         self.assertEquals(
             rpc_interface.create_suite_job(name=self._NAME,
                                            board=self._BOARD,
                                            builds=self._BUILDS, pool=None),
             -1)
+        self.dev_server.stage_artifacts.assert_called_with(
+                image=self._BUILD, artifacts=['test_suites', 'control_files'])
+        self.getter.get_control_file_contents_by_name.assert_called_with(
+                self._SUITE_NAME)
 
 
     def testCreateSuiteJobSuccess(self):
@@ -1553,23 +1518,22 @@
         self._mockDevServerGetter()
 
         self.dev_server.hostname = 'mox_url'
-        self.dev_server.stage_artifacts(
-                image=self._BUILD,
-                artifacts=['test_suites', 'control_files']).AndReturn(True)
-
-        self.getter.get_control_file_contents_by_name(
-            self._SUITE_NAME).AndReturn('f')
-
-        self.dev_server.url().AndReturn('mox_url')
+        self.dev_server.stage_artifacts.return_value = True
+        self.getter.get_control_file_contents_by_name.return_value = 'f'
+        self.dev_server.url.return_value = 'mox_url'
         job_id = 5
         self._mockRpcUtils(job_id)
-        self.mox.ReplayAll()
+
         self.assertEquals(
             rpc_interface.create_suite_job(name=self._NAME,
                                            board=self._BOARD,
                                            builds=self._BUILDS,
                                            pool=None),
             job_id)
+        self.dev_server.stage_artifacts.assert_called_with(
+                image=self._BUILD, artifacts=['test_suites', 'control_files'])
+        self.getter.get_control_file_contents_by_name.assert_called_with(
+                self._SUITE_NAME)
 
 
     def testCreateSuiteJobNoHostCheckSuccess(self):
@@ -1577,23 +1541,22 @@
         self._mockDevServerGetter()
 
         self.dev_server.hostname = 'mox_url'
-        self.dev_server.stage_artifacts(
-                image=self._BUILD,
-                artifacts=['test_suites', 'control_files']).AndReturn(True)
-
-        self.getter.get_control_file_contents_by_name(
-            self._SUITE_NAME).AndReturn('f')
-
-        self.dev_server.url().AndReturn('mox_url')
+        self.dev_server.stage_artifacts.return_value = True
+        self.getter.get_control_file_contents_by_name.return_value = 'f'
+        self.dev_server.url.return_value = 'mox_url'
         job_id = 5
         self._mockRpcUtils(job_id)
-        self.mox.ReplayAll()
+
         self.assertEquals(
-          rpc_interface.create_suite_job(name=self._NAME,
-                                         board=self._BOARD,
-                                         builds=self._BUILDS,
-                                         pool=None, check_hosts=False),
-          job_id)
+                rpc_interface.create_suite_job(name=self._NAME,
+                                               board=self._BOARD,
+                                               builds=self._BUILDS,
+                                               pool=None,
+                                               check_hosts=False), job_id)
+        self.getter.get_control_file_contents_by_name.assert_called_with(
+                self._SUITE_NAME)
+        self.dev_server.stage_artifacts.assert_called_with(
+                image=self._BUILD, artifacts=['test_suites', 'control_files'])
 
 
     def testCreateSuiteJobControlFileSupplied(self):
@@ -1601,13 +1564,10 @@
         self._mockDevServerGetter(get_control_file=False)
 
         self.dev_server.hostname = 'mox_url'
-        self.dev_server.stage_artifacts(
-                image=self._BUILD,
-                artifacts=['test_suites', 'control_files']).AndReturn(True)
-        self.dev_server.url().AndReturn('mox_url')
+        self.dev_server.stage_artifacts.return_value = True
+        self.dev_server.url.return_value = 'mox_url'
         job_id = 5
         self._mockRpcUtils(job_id)
-        self.mox.ReplayAll()
         self.assertEquals(
             rpc_interface.create_suite_job(name='%s/%s' % (self._NAME,
                                                            self._BUILD),
@@ -1616,6 +1576,8 @@
                                            pool=None,
                                            control_file='CONTROL FILE'),
             job_id)
+        self.dev_server.stage_artifacts.assert_called_with(
+                image=self._BUILD, artifacts=['test_suites', 'control_files'])
 
 
     def _get_records_for_sending_to_main(self):
@@ -1626,7 +1588,7 @@
                  'email_list': '',
                  'max_runtime_hrs': 72,
                  'max_runtime_mins': 1440,
-                 'name': 'dummy',
+                 'name': 'stub',
                  'owner': 'autotest_system',
                  'parse_failed_repair': True,
                  'priority': 40,
@@ -1656,7 +1618,7 @@
         self, jobs, hqes, shard_hostname='host1',
         exception_to_throw=error.UnallowedRecordsSentToMain, aborted=False):
         job_id = rpc_interface.create_job(
-                name='dummy',
+                name='stub',
                 priority=self._PRIORITY,
                 control_file='foo',
                 control_type=SERVER,
@@ -1792,14 +1754,13 @@
         shard1, host1, lumpy_label = self._createShardAndHostWithLabel()
 
         self.assertEqual(host1.shard, shard1)
-        self.assertItemsEqual(shard1.labels.all(), [lumpy_label])
+        six.assertCountEqual(self, shard1.labels.all(), [lumpy_label])
         rpc_interface.remove_board_from_shard(
                 shard1.hostname, lumpy_label.name)
         host1 = models.Host.smart_get(host1.id)
         shard1 = models.Shard.smart_get(shard1.id)
         self.assertEqual(host1.shard, None)
-        self.assertItemsEqual(shard1.labels.all(), [])
-
+        six.assertCountEqual(self, shard1.labels.all(), [])
 
     def testCreateListShard(self):
         """Retrieve a list of all shards."""
diff --git a/frontend/afe/rpc_utils.py b/frontend/afe/rpc_utils.py
index 5397ef8..900f81f 100644
--- a/frontend/afe/rpc_utils.py
+++ b/frontend/afe/rpc_utils.py
@@ -8,20 +8,19 @@
 
 import collections
 import datetime
-from functools import wraps
 import inspect
 import logging
 import os
-import sys
+from functools import wraps
+
 import django.db.utils
 import django.http
-
-from autotest_lib.frontend import thread_local
-from autotest_lib.frontend.afe import models, model_logic
-from autotest_lib.client.common_lib import control_data, error
-from autotest_lib.client.common_lib import global_config
-from autotest_lib.client.common_lib import time_utils
+import six
+from autotest_lib.client.common_lib import (control_data, error, global_config,
+                                            time_utils)
 from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.frontend import thread_local
+from autotest_lib.frontend.afe import model_logic, models
 from autotest_lib.server import utils as server_utils
 from autotest_lib.server.cros import provision
 from autotest_lib.server.cros.dynamic_suite import frontend_wrappers
@@ -74,7 +73,7 @@
     """
     if isinstance(data, dict):
         new_data = {}
-        for key, value in data.iteritems():
+        for key, value in six.iteritems(data):
             new_data[key] = _prepare_data(value)
         return new_data
     elif (isinstance(data, list) or isinstance(data, tuple) or
@@ -113,7 +112,7 @@
     objects = collections.OrderedDict()
     for obj in dict_iterable:
         objects.setdefault(obj['id'], obj)
-    return objects.values()
+    return list(objects.values())
 
 
 def extra_job_status_filters(not_yet_run=False, running=False, finished=False):
@@ -225,7 +224,7 @@
         return test_dict
 
     numerized_dict = {}
-    for key, value in test_dict.iteritems():
+    for key, value in six.iteritems(test_dict):
         try:
             numerized_dict[key] = int(value)
         except (ValueError, TypeError):
@@ -242,7 +241,7 @@
     @returns A boolean to identify if the test type is server test.
     """
     if test_type is not None:
-        if isinstance(test_type, basestring):
+        if isinstance(test_type, six.string_types):
             try:
                 test_type = control_data.CONTROL_TYPE.get_value(test_type)
             except AttributeError:
@@ -262,7 +261,7 @@
     # ensure tests are all the same type
     try:
         test_type = get_consistent_value(test_objects, 'test_type')
-    except InconsistencyException, exc:
+    except InconsistencyException as exc:
         test1, test2 = exc.args
         raise model_logic.ValidationError(
             {'tests' : 'You cannot run both test_suites and server-side '
@@ -300,11 +299,12 @@
     for index, dependency in enumerate(job_dependencies):
         if not provision.is_for_special_action(dependency):
             try:
-              label = models.Label.smart_get(dependency)
+                label = models.Label.smart_get(dependency)
             except models.Label.DoesNotExist:
-              logging.info('Label %r does not exist, so it cannot '
-                           'be replaced by static label.', dependency)
-              label = None
+                logging.info(
+                        'Label %r does not exist, so it cannot '
+                        'be replaced by static label.', dependency)
+                label = None
 
             if label is not None and label.is_replaced_by_static():
                 ok_hosts = ok_hosts.filter(static_labels__name=dependency)
@@ -381,7 +381,7 @@
 
 def check_modify_host(update_data):
     """
-    Sanity check modify_host* requests.
+    Check modify_host* requests.
 
     @param update_data: A dictionary with the changes to make to a host
             or hosts.
@@ -481,9 +481,10 @@
 
 def check_for_duplicate_hosts(host_objects):
     host_counts = collections.Counter(host_objects)
-    duplicate_hostnames = {host.hostname
-                           for host, count in host_counts.iteritems()
-                           if count > 1}
+    duplicate_hostnames = {
+            host.hostname
+            for host, count in six.iteritems(host_counts) if count > 1
+    }
     if duplicate_hostnames:
         raise model_logic.ValidationError(
                 {'hosts' : 'Duplicate hosts: %s'
@@ -854,7 +855,7 @@
     if num_shards > 1:
         return False
     if num_shards == 1:
-        hosts_on_shard = shard_host_map.values()[0]
+        hosts_on_shard = list(shard_host_map.values())[0]
         assert len(hosts_on_shard) <= len(host_objects)
         return len(hosts_on_shard) == len(host_objects)
     else:
@@ -973,12 +974,10 @@
     @param shard: The shard the records were sent from.
     @param records: The records sent in their serialized format.
     @param record_type: Type of the objects represented by records.
-    @param args: Additional arguments that will be passed on to the sanity
-                 checks.
-    @param kwargs: Additional arguments that will be passed on to the sanity
-                  checks.
+    @param args: Additional arguments that will be passed on to the checks.
+    @param kwargs: Additional arguments that will be passed on to the checks.
 
-    @raises error.UnallowedRecordsSentToMain if any of the sanity checks fail.
+    @raises error.UnallowedRecordsSentToMain if any of the checks fail.
 
     @returns: List of primary keys of the processed records.
     """
@@ -993,7 +992,7 @@
                     pk, record_type))
 
         try:
-            current_record.sanity_check_update_from_shard(
+            current_record._check_update_from_shard(
                 shard, serialized_record, *args, **kwargs)
         except error.IgnorableUnallowedRecordsSentToMain:
             # An illegal record change was attempted, but it was of a non-fatal
@@ -1008,13 +1007,13 @@
 
 def persist_records_sent_from_shard(shard, jobs, hqes):
     """
-    Sanity checking then saving serialized records sent to main from shard.
+    Checking then saving serialized records sent to main from shard.
 
     During heartbeats shards upload jobs and hostqueuentries. This performs
-    some sanity checks on these and then updates the existing records for those
+    some checks on these and then updates the existing records for those
     entries with the updated ones from the heartbeat.
 
-    The sanity checks include:
+    The checks include:
     - Checking if the objects sent already exist on the main.
     - Checking if the objects sent were assigned to this shard.
     - hostqueueentries must be sent together with their jobs.
@@ -1023,7 +1022,7 @@
     @param jobs: The jobs the shard sent.
     @param hqes: The hostqueuentries the shart sent.
 
-    @raises error.UnallowedRecordsSentToMain if any of the sanity checks fail.
+    @raises error.UnallowedRecordsSentToMain if any of the checks fail.
     """
     job_ids_persisted = _persist_records_with_type_sent_from_shard(
             shard, jobs, models.Job)
@@ -1035,7 +1034,7 @@
 def forward_single_host_rpc_to_shard(func):
     """This decorator forwards rpc calls that modify a host to a shard.
 
-    If a host is assigned to a shard, rpcs that change his attributes should be
+    If a host is assigned to a shard, rpcs that change the host attributes should be
     forwarded to the shard.
 
     This assumes the first argument of the function represents a host id.
@@ -1058,8 +1057,7 @@
             shard_hostname = host.shard.hostname
         ret = func(**kwargs)
         if shard_hostname and not server_utils.is_shard():
-            run_rpc_on_multiple_hostnames(func.func_name,
-                                          [shard_hostname],
+            run_rpc_on_multiple_hostnames(func.__name__, [shard_hostname],
                                           **kwargs)
         return ret
 
@@ -1081,16 +1079,14 @@
     shard_host_map = bucket_hosts_by_shard(host_objs)
 
     # Execute the rpc against the appropriate shards.
-    for shard, hostnames in shard_host_map.iteritems():
+    for shard, hostnames in six.iteritems(shard_host_map):
         if include_hostnames:
             kwargs['hosts'] = hostnames
         try:
             run_rpc_on_multiple_hostnames(rpc_name, [shard], **kwargs)
-        except:
-            ei = sys.exc_info()
-            new_exc = error.RPCException('RPC %s failed on shard %s due to '
-                    '%s: %s' % (rpc_name, shard, ei[0].__name__, ei[1]))
-            raise new_exc.__class__, new_exc, ei[2]
+        except Exception as e:
+            raise error.RPCException('RPC %s failed on shard %s due to %s' %
+                                     (rpc_name, shard, e))
 
 
 def run_rpc_on_multiple_hostnames(rpc_call, shard_hostnames, **kwargs):
@@ -1168,7 +1164,7 @@
             afe = frontend_wrappers.RetryingAFE(
                     server=server_utils.get_global_afe_hostname(),
                     user=thread_local.get_user())
-            return afe.run(func.func_name, **kwargs)
+            return afe.run(func.__name__, **kwargs)
         return func(**kwargs)
 
     return replacement
diff --git a/frontend/afe/rpc_utils_unittest.py b/frontend/afe/rpc_utils_unittest.py
index 5b517f4..396196b 100755
--- a/frontend/afe/rpc_utils_unittest.py
+++ b/frontend/afe/rpc_utils_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 #
 # Copyright (c) 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -6,8 +6,8 @@
 
 """Unit tests for frontend/afe/rpc_utils.py."""
 
-import mock
 import unittest
+from unittest import mock
 
 import common
 from autotest_lib.client.common_lib import control_data
diff --git a/frontend/afe/views.py b/frontend/afe/views.py
index e85af5c..b03e115 100644
--- a/frontend/afe/views.py
+++ b/frontend/afe/views.py
@@ -8,14 +8,7 @@
 from autotest_lib.frontend.afe import models, rpc_handler, rpc_interface
 from autotest_lib.frontend.afe import rpc_utils
 
-moblab_rpc_interface = utils.import_site_module(
-        __file__, 'autotest_lib.frontend.afe.moblab_rpc_interface',
-        dummy=object())
-
-# since moblab_rpc_interface is later in the list, its methods will
-# override those of rpc_interface
-rpc_handler_obj = rpc_handler.RpcHandler((rpc_interface,
-                                          moblab_rpc_interface),
+rpc_handler_obj = rpc_handler.RpcHandler((rpc_interface, ),
                                          document_module=rpc_interface)
 
 
diff --git a/frontend/client/gwt_dir b/frontend/client/gwt_dir
index 9bcd4bf..7125228 100755
--- a/frontend/client/gwt_dir
+++ b/frontend/client/gwt_dir
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #
 # Find the GWT installation and print its location to stdout.
 
@@ -10,11 +10,11 @@
         os.path.dirname(__file__), '..', '..', 'site-packages', 'gwt'))
 
 if os.path.isdir(site_gwt):
-    print site_gwt
+    print(site_gwt)
     sys.exit(0)
 
 if not os.path.isdir(DEFAULT_GWT):
     sys.stderr.write('(%s): GWT not installed?\n' % __file__)
 
-print DEFAULT_GWT
+print(DEFAULT_GWT)
 
diff --git a/frontend/client/src/autotest/moblab/SuiteRunnerView.java b/frontend/client/src/autotest/moblab/SuiteRunnerView.java
index 7a4c6b9..9a781da 100644
--- a/frontend/client/src/autotest/moblab/SuiteRunnerView.java
+++ b/frontend/client/src/autotest/moblab/SuiteRunnerView.java
@@ -59,6 +59,7 @@
     "bvt-cq",
     "bvt-inline",
     "bvt-tast-cq",
+    "camera-usb-qual",
     "check_setup_cts_N",
     "check_setup_storage_qual",
     "cts_N",
@@ -72,8 +73,7 @@
     "power_loadtest_1hour",
     "power_loadtest_fast",
     "power_measurement_wrapper",
-    "power_sanity",
-    "usb-camera",
+    "power_check",
     "wifi_matfunc",
     "wifi_perf"
   );
diff --git a/frontend/client/src/autotest/moblab/wizard/WizardCard.java b/frontend/client/src/autotest/moblab/wizard/WizardCard.java
index fde4395..7c83b93 100644
--- a/frontend/client/src/autotest/moblab/wizard/WizardCard.java
+++ b/frontend/client/src/autotest/moblab/wizard/WizardCard.java
@@ -128,12 +128,12 @@
   }
 
   /**
-   * A dummy card for testing purpose.
+   * A stub card for testing purpose.
    */
-  public static class DummyCard extends WizardCard {
-    public DummyCard() {
-      setViewTitle("Dummy view");
-      setEditTitle("Dummy Edit");
+  public static class StubCard extends WizardCard {
+    public StubCard() {
+      setViewTitle("Stub view");
+      setEditTitle("Stub Edit");
     }
 
     @Override
diff --git a/frontend/client/src/autotest/public/AfeClient.html b/frontend/client/src/autotest/public/AfeClient.html
index 3bb1bb6..b778ce9 100644
--- a/frontend/client/src/autotest/public/AfeClient.html
+++ b/frontend/client/src/autotest/public/AfeClient.html
@@ -183,7 +183,7 @@
           <tr class="data-row">
             <td class="field-name">Job name:</td>
             <td class="has-tooltip" id="create_job_name"></td>
-            <td><!-- Dummy cell so background colour fills entire row !--></td>
+            <td><!-- Stub cell so background colour fills entire row !--></td>
           </tr>
           <tr class="data-row data-row-alternate">
             <td class="field-name">Image URL/Build: (optional)</td>
diff --git a/frontend/client/src/autotest/tko/TestDetailView.java b/frontend/client/src/autotest/tko/TestDetailView.java
index 0a700f6..8ecf080 100644
--- a/frontend/client/src/autotest/tko/TestDetailView.java
+++ b/frontend/client/src/autotest/tko/TestDetailView.java
@@ -82,7 +82,7 @@
         public void onOpen(OpenEvent<DisclosurePanel> event) {
             JSONObject params = new JSONObject();
             params.put("path", new JSONString(getLogUrl()));
-            logLoadingProxy.rpcCall("dummy", params, rpcCallback);
+            logLoadingProxy.rpcCall("stub", params, rpcCallback);
 
             setStatusText("Loading...");
         }
diff --git a/frontend/client/test/autotest/moblab/wizard/ConfigWizardTest.java b/frontend/client/test/autotest/moblab/wizard/ConfigWizardTest.java
index f656277..2510dab 100644
--- a/frontend/client/test/autotest/moblab/wizard/ConfigWizardTest.java
+++ b/frontend/client/test/autotest/moblab/wizard/ConfigWizardTest.java
@@ -7,7 +7,7 @@
 
   public void testWizard() {
     ConfigWizard wizard = new ConfigWizard();
-    WizardCard[] cards = new WizardCard[] { new WizardCard.DummyCard(), new WizardCard.DummyCard()};
+    WizardCard[] cards = new WizardCard[] { new WizardCard.StubCard(), new WizardCard.StubCard()};
     wizard.setCards(cards);
   }
 }
diff --git a/frontend/client_compilation_unittest.py b/frontend/client_compilation_unittest.py
index 763467a..d0ddcf0 100755
--- a/frontend/client_compilation_unittest.py
+++ b/frontend/client_compilation_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 import os, shutil, tempfile, unittest
 import common
diff --git a/frontend/db/backends/afe/base.py b/frontend/db/backends/afe/base.py
index 1773ea0..4b2814d 100644
--- a/frontend/db/backends/afe/base.py
+++ b/frontend/db/backends/afe/base.py
@@ -5,7 +5,7 @@
 
 try:
     import MySQLdb as Database
-except ImportError, e:
+except ImportError as e:
     from django.core.exceptions import ImproperlyConfigured
     raise ImproperlyConfigured("Error loading MySQLdb module: %s" % e)
 
diff --git a/frontend/django_lite_unittest.py b/frontend/django_lite_unittest.py
index 7f25c26..335c7bc 100755
--- a/frontend/django_lite_unittest.py
+++ b/frontend/django_lite_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #pylint: disable-msg=C0111
 
 import unittest
diff --git a/frontend/manage.py b/frontend/manage.py
index 7f0e6af..c879bbc 100755
--- a/frontend/manage.py
+++ b/frontend/manage.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 import sys
 import setup_django_environment
 
diff --git a/frontend/migrations/001_initial_db.py b/frontend/migrations/001_initial_db.py
deleted file mode 100644
index 0b0887a..0000000
--- a/frontend/migrations/001_initial_db.py
+++ /dev/null
@@ -1,22 +0,0 @@
-def migrate_up(manager):
-    raise Exception('migrate.py should be migrating directly to schema 51 '
-                    'instead of running migration 1...')
-
-
-def migrate_down(manager):
-    manager.execute_script(DROP_DB_SQL)
-
-
-DROP_DB_SQL = """\
-DROP TABLE IF EXISTS `acl_groups`;
-DROP TABLE IF EXISTS `acl_groups_hosts`;
-DROP TABLE IF EXISTS `acl_groups_users`;
-DROP TABLE IF EXISTS `autotests`;
-DROP TABLE IF EXISTS `host_queue_entries`;
-DROP TABLE IF EXISTS `hosts`;
-DROP TABLE IF EXISTS `hosts_labels`;
-DROP TABLE IF EXISTS `ineligible_host_queues`;
-DROP TABLE IF EXISTS `jobs`;
-DROP TABLE IF EXISTS `labels`;
-DROP TABLE IF EXISTS `users`;
-"""
diff --git a/frontend/migrations/002_cleanup_fields.py b/frontend/migrations/002_cleanup_fields.py
deleted file mode 100644
index 88abc78..0000000
--- a/frontend/migrations/002_cleanup_fields.py
+++ /dev/null
@@ -1,12 +0,0 @@
-def migrate_up(manager):
-    manager.execute('ALTER TABLE autotests DROP params')
-    manager.execute('ALTER TABLE jobs DROP kernel_url, DROP status, '
-                    'DROP submitted_on')
-    manager.execute('ALTER TABLE host_queue_entries DROP created_on')
-
-def migrate_down(manager):
-    manager.execute('ALTER TABLE autotests ADD params VARCHAR(255)')
-    manager.execute('ALTER TABLE jobs ADD kernel_url VARCHAR(255), '
-                    'ADD status VARCHAR(255), ADD submitted_on datetime')
-    manager.execute('ALTER TABLE host_queue_entries ADD created_on '
-                    'datetime')
diff --git a/frontend/migrations/003_test_synch_type.py b/frontend/migrations/003_test_synch_type.py
deleted file mode 100644
index 939d65f..0000000
--- a/frontend/migrations/003_test_synch_type.py
+++ /dev/null
@@ -1,9 +0,0 @@
-def migrate_up(manager):
-    manager.execute('ALTER TABLE autotests ADD `synch_type` smallint '
-                    'NOT NULL')
-    # set all to asynchronous by default
-    manager.execute('UPDATE autotests SET synch_type=1')
-
-
-def migrate_down(manager):
-    manager.execute('ALTER TABLE autotests DROP `synch_type`')
diff --git a/frontend/migrations/004_add_indexes.py b/frontend/migrations/004_add_indexes.py
deleted file mode 100644
index 40b496f..0000000
--- a/frontend/migrations/004_add_indexes.py
+++ /dev/null
@@ -1,23 +0,0 @@
-INDEXES = (
-    ('ineligible_host_queues', 'job_id'),
-    ('ineligible_host_queues', 'host_id'),
-    ('host_queue_entries', 'job_id'),
-    ('host_queue_entries', 'host_id'),
-    ('host_queue_entries', 'meta_host'),
-    ('hosts_labels', 'label_id'),
-)
-
-def get_index_name(table, field):
-    return table + '_' + field
-
-
-def migrate_up(manager):
-    for table, field in INDEXES:
-        manager.execute('CREATE INDEX %s ON %s (%s)' %
-                        (get_index_name(table, field), table, field))
-
-
-def migrate_down(manager):
-    for table, field in INDEXES:
-        manager.execute('DROP INDEX %s ON %s' %
-                        (get_index_name(table, field), table))
diff --git a/frontend/migrations/005_one_more_index.py b/frontend/migrations/005_one_more_index.py
deleted file mode 100644
index 787c1c0..0000000
--- a/frontend/migrations/005_one_more_index.py
+++ /dev/null
@@ -1,7 +0,0 @@
-def migrate_up(manger):
-    manger.execute('CREATE INDEX hosts_labels_host_id ON hosts_labels '
-                   '(host_id)')
-
-
-def migrate_down(manger):
-    manger.execute('DROP INDEX hosts_labels_host_id ON hosts_labels')
diff --git a/frontend/migrations/006_host_label_invalid.py b/frontend/migrations/006_host_label_invalid.py
deleted file mode 100644
index 20c5e4c..0000000
--- a/frontend/migrations/006_host_label_invalid.py
+++ /dev/null
@@ -1,8 +0,0 @@
-def migrate_up(manager):
-    manager.execute('ALTER TABLE hosts ADD `invalid` bool NOT NULL')
-    manager.execute('ALTER TABLE labels ADD `invalid` bool NOT NULL')
-
-
-def migrate_down(manager):
-    manager.execute('ALTER TABLE hosts DROP invalid')
-    manager.execute('ALTER TABLE labels DROP invalid')
diff --git a/frontend/migrations/007_indexes_on_acl_tables.py b/frontend/migrations/007_indexes_on_acl_tables.py
deleted file mode 100644
index 1dab8cf..0000000
--- a/frontend/migrations/007_indexes_on_acl_tables.py
+++ /dev/null
@@ -1,21 +0,0 @@
-INDEXES = (
-    ('acl_groups_hosts', 'host_id'),
-    ('acl_groups_hosts', 'acl_group_id'),
-    ('acl_groups_users', 'user_id'),
-    ('acl_groups_users', 'acl_group_id'),
-)
-
-def get_index_name(table, field):
-    return table + '_' + field
-
-
-def migrate_up(manager):
-    for table, field in INDEXES:
-        manager.execute('CREATE INDEX %s ON %s (%s)' %
-                        (get_index_name(table, field), table, field))
-
-
-def migrate_down(manager):
-    for table, field in INDEXES:
-        manager.execute('DROP INDEX %s ON %s' %
-                        (get_index_name(table, field), table))
diff --git a/frontend/migrations/008_add_profiler_table.py b/frontend/migrations/008_add_profiler_table.py
deleted file mode 100644
index 7463733..0000000
--- a/frontend/migrations/008_add_profiler_table.py
+++ /dev/null
@@ -1,17 +0,0 @@
-def migrate_up(manager):
-    manager.execute_script(CREATE_TABLE)
-
-
-def migrate_down(manager):
-    manager.execute("DROP TABLE IF EXISTS 'profilers'")
-
-
-CREATE_TABLE = """\
-CREATE TABLE `profilers` (
-  `id` int(11) NOT NULL auto_increment,
-  `name` varchar(255) NOT NULL,
-  `description` longtext NOT NULL,
-  PRIMARY KEY  (`id`),
-  UNIQUE KEY `name` (`name`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1
-"""
diff --git a/frontend/migrations/009_add_timeout_to_jobs.py b/frontend/migrations/009_add_timeout_to_jobs.py
deleted file mode 100644
index 819a492..0000000
--- a/frontend/migrations/009_add_timeout_to_jobs.py
+++ /dev/null
@@ -1,19 +0,0 @@
-from autotest_lib.client.common_lib import global_config
-
-def migrate_up(manager):
-    # Add the column with a default first, and then drop the default.
-    # We cannot add the column, populate the values, and then specify NOT NULL
-    # because a record added while this is executing could enter a null value
-    # into the table before NOT NULL is specified.
-    manager.execute(ADD_COLUMN)
-    manager.execute(DROP_DEFAULT)
-
-def migrate_down(manager):
-    manager.execute(DROP_COLUMN)
-
-job_timeout_default = global_config.global_config.get_config_value(
-    'AUTOTEST_WEB', 'job_timeout_default')
-ADD_COLUMN = ('ALTER TABLE jobs ADD COLUMN timeout INT NOT NULL DEFAULT %s'
-              % job_timeout_default)
-DROP_DEFAULT = 'ALTER TABLE jobs ALTER COLUMN timeout DROP DEFAULT'
-DROP_COLUMN = 'ALTER TABLE jobs DROP COLUMN timeout'
diff --git a/frontend/migrations/010_add_protection_to_hosts.py b/frontend/migrations/010_add_protection_to_hosts.py
deleted file mode 100644
index fc01b77..0000000
--- a/frontend/migrations/010_add_protection_to_hosts.py
+++ /dev/null
@@ -1,24 +0,0 @@
-from autotest_lib.client.common_lib import global_config, host_protections
-
-def migrate_up(manager):
-    manager.execute_script(ADD_PROTECTION_COLUMN)
-
-def migrate_down(manager):
-    manager.execute(DROP_COLUMN)
-
-default_protection = global_config.global_config.get_config_value(
-    'HOSTS', 'default_protection')
-default_protection_value = host_protections.Protection.get_value(
-    default_protection)
-
-ADD_PROTECTION_COLUMN = """ALTER TABLE hosts
-                           ADD COLUMN protection INT NOT NULL
-                           DEFAULT %s;
-
-                           ALTER TABLE hosts
-                           ALTER COLUMN protection
-                           DROP DEFAULT;
-                           """ % default_protection_value
-
-DROP_COLUMN = """ALTER TABLE hosts
-                 DROP COLUMN protection"""
diff --git a/frontend/migrations/011_support_one_time_hosts.py b/frontend/migrations/011_support_one_time_hosts.py
deleted file mode 100644
index 96c666d..0000000
--- a/frontend/migrations/011_support_one_time_hosts.py
+++ /dev/null
@@ -1,41 +0,0 @@
-def migrate_up(manager):
-    manager.execute_script(CLEAN_DATABASE)
-    manager.execute(ADD_HOST_QUEUE_DELETED_COLUMN)
-    manager.execute(DROP_DEFAULT)
-
-def migrate_down(manager):
-    manager.execute(DROP_HOST_QUEUE_DELETED_COLUMN)
-
-CLEAN_DATABASE = """DELETE FROM acl_groups_hosts
-                    WHERE host_id IN
-                        (SELECT id FROM hosts WHERE invalid = TRUE);
-
-                    DELETE FROM ineligible_host_queues
-                    WHERE host_id IN
-                        (SELECT id FROM hosts WHERE invalid = TRUE);
-
-                    UPDATE host_queue_entries
-                    SET status = 'Abort'
-                    WHERE host_id IN
-                        (SELECT id FROM hosts WHERE invalid = TRUE)
-                        AND active = TRUE;
-
-                    UPDATE host_queue_entries
-                    SET status = 'Aborted', complete = TRUE
-                    WHERE host_id IN
-                        (SELECT id FROM hosts WHERE invalid = TRUE)
-                        AND active = FALSE AND complete = FALSE;
-
-                    DELETE FROM hosts_labels
-                    WHERE host_id IN
-                        (SELECT id FROM hosts WHERE invalid = TRUE);"""
-
-DROP_HOST_QUEUE_DELETED_COLUMN = """ALTER TABLE host_queue_entries
-                                    DROP COLUMN deleted"""
-
-ADD_HOST_QUEUE_DELETED_COLUMN = """ALTER TABLE host_queue_entries
-                                   ADD COLUMN deleted BOOLEAN
-                                       NOT NULL DEFAULT FALSE"""
-
-DROP_DEFAULT = """ALTER TABLE host_queue_entries
-                  ALTER COLUMN deleted DROP DEFAULT"""
diff --git a/frontend/migrations/012_reset_access_levels.py b/frontend/migrations/012_reset_access_levels.py
deleted file mode 100644
index 5a95416..0000000
--- a/frontend/migrations/012_reset_access_levels.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def migrate_up(manager):
-    manager.execute('UPDATE users SET access_level = 0')
-
-def migrate_down(manager):
-    pass
diff --git a/frontend/migrations/013_new_test_fields.py b/frontend/migrations/013_new_test_fields.py
deleted file mode 100644
index 02ce6e8..0000000
--- a/frontend/migrations/013_new_test_fields.py
+++ /dev/null
@@ -1,20 +0,0 @@
-def migrate_up(manager):
-    manager.execute('ALTER TABLE jobs ADD run_verify tinyint(1) default 1')
-    manager.execute('ALTER TABLE autotests ADD author VARCHAR(256)')
-    manager.execute('ALTER TABLE autotests ADD dependencies VARCHAR(256)')
-    manager.execute('ALTER TABLE autotests ADD experimental SMALLINT DEFAULT 0')
-    manager.execute('ALTER TABLE autotests ADD run_verify SMALLINT DEFAULT 1')
-    manager.execute('ALTER TABLE autotests ADD test_time SMALLINT DEFAULT 1')
-    manager.execute('ALTER TABLE autotests ADD test_category VARCHAR(256)')
-    manager.execute('ALTER TABLE autotests ADD sync_count INT(11) DEFAULT 1')
-
-
-def migrate_down(manager):
-    manager.execute('ALTER TABLE jobs DROP run_verify')
-    manager.execute('ALTER TABLE autotests DROP sync_count')
-    manager.execute('ALTER TABLE autotests DROP author')
-    manager.execute('ALTER TABLE autotests DROP dependencies')
-    manager.execute('ALTER TABLE autotests DROP experimental')
-    manager.execute('ALTER TABLE autotests DROP run_verify')
-    manager.execute('ALTER TABLE autotests DROP test_time')
-    manager.execute('ALTER TABLE autotests DROP test_category')
diff --git a/frontend/migrations/014_run_verify.py b/frontend/migrations/014_run_verify.py
deleted file mode 100644
index eac7e28..0000000
--- a/frontend/migrations/014_run_verify.py
+++ /dev/null
@@ -1,6 +0,0 @@
-def migrate_up(manager):
-    manager.execute('ALTER TABLE host_queue_entries ADD run_verify SMALLINT DEFAULT 1')
-
-
-def migrate_down(manager):
-    manager.execute('ALTER TABLE host_queue_entries DROP run_verify')
diff --git a/frontend/migrations/015_add_locked_by_and_lock_time.py b/frontend/migrations/015_add_locked_by_and_lock_time.py
deleted file mode 100644
index 421d37b..0000000
--- a/frontend/migrations/015_add_locked_by_and_lock_time.py
+++ /dev/null
@@ -1,12 +0,0 @@
-def migrate_up(manager):
-    manager.execute("""ALTER TABLE hosts
-                       ADD COLUMN locked_by_id
-                       INT(11) DEFAULT NULL""")
-    manager.execute("""ALTER TABLE hosts
-                       ADD COLUMN lock_time
-                       DATETIME DEFAULT NULL""")
-
-
-def migrate_down(manager):
-    manager.execute('ALTER TABLE hosts DROP COLUMN locked_by_id')
-    manager.execute('ALTER TABLE hosts DROP COLUMN lock_time')
diff --git a/frontend/migrations/016_remove_run_verify.py b/frontend/migrations/016_remove_run_verify.py
deleted file mode 100644
index d47edf3..0000000
--- a/frontend/migrations/016_remove_run_verify.py
+++ /dev/null
@@ -1,6 +0,0 @@
-def migrate_up(manager):
-    manager.execute('ALTER TABLE host_queue_entries DROP run_verify')
-
-
-def migrate_down(manager):
-    manager.execute('ALTER TABLE host_queue_entries ADD run_verify SMALLINT DEFAULT 1')
diff --git a/frontend/migrations/017_add_email_list.py b/frontend/migrations/017_add_email_list.py
deleted file mode 100644
index 5de2432..0000000
--- a/frontend/migrations/017_add_email_list.py
+++ /dev/null
@@ -1,8 +0,0 @@
-def migrate_up(manager):
-    manager.execute(ADD_COLUMN)
-
-def migrate_down(manager):
-    manager.execute(DROP_COLUMN)
-
-ADD_COLUMN = 'ALTER TABLE jobs ADD COLUMN email_list varchar(250) NOT NULL'
-DROP_COLUMN = 'ALTER TABLE jobs DROP COLUMN email_list'
diff --git a/frontend/migrations/018_add_label_only_if_needed.py b/frontend/migrations/018_add_label_only_if_needed.py
deleted file mode 100644
index 790299a..0000000
--- a/frontend/migrations/018_add_label_only_if_needed.py
+++ /dev/null
@@ -1,24 +0,0 @@
-CREATE_MANY2MANY_TABLES = """
-CREATE TABLE `autotests_dependency_labels` (
-    `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
-    `test_id` integer NOT NULL REFERENCES `autotests` (`id`),
-    `label_id` integer NOT NULL REFERENCES `labels` (`id`),
-    UNIQUE (`test_id`, `label_id`)
-);
-CREATE TABLE `jobs_dependency_labels` (
-    `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
-    `job_id` integer NOT NULL REFERENCES `jobs` (`id`),
-    `label_id` integer NOT NULL REFERENCES `labels` (`id`),
-    UNIQUE (`job_id`, `label_id`)
-);
-"""
-
-def migrate_up(manager):
-    manager.execute('ALTER TABLE labels '
-                    'ADD COLUMN only_if_needed bool NOT NULL')
-    manager.execute_script(CREATE_MANY2MANY_TABLES)
-
-def migrate_down(manager):
-    manager.execute('ALTER TABLE labels DROP COLUMN only_if_needed')
-    manager.execute('DROP TABLE IF EXISTS `autotests_dependency_labels`')
-    manager.execute('DROP TABLE IF EXISTS `jobs_dependency_labels`')
diff --git a/frontend/migrations/019_add_abort_log.py b/frontend/migrations/019_add_abort_log.py
deleted file mode 100644
index 48311d1..0000000
--- a/frontend/migrations/019_add_abort_log.py
+++ /dev/null
@@ -1,13 +0,0 @@
-def migrate_up(manager):
-    manager.execute(CREATE_TABLE)
-
-def migrate_down(manager):
-    manager.execute("DROP TABLE IF EXISTS `aborted_host_queue_entries`")
-
-CREATE_TABLE = """\
-CREATE TABLE `aborted_host_queue_entries` (
-    `queue_entry_id` integer NOT NULL PRIMARY KEY,
-    `aborted_by_id` integer NOT NULL,
-    `aborted_on` datetime NOT NULL
-) ENGINE=InnoDB DEFAULT CHARSET=latin1
-"""
diff --git a/frontend/migrations/020_add_host_dirty_and_job_reboots.py b/frontend/migrations/020_add_host_dirty_and_job_reboots.py
deleted file mode 100644
index 54e85ab..0000000
--- a/frontend/migrations/020_add_host_dirty_and_job_reboots.py
+++ /dev/null
@@ -1,18 +0,0 @@
-UP_SQL = """
-ALTER TABLE hosts ADD COLUMN `dirty` bool NOT NULL;
-ALTER TABLE jobs ADD COLUMN `reboot_before` smallint NOT NULL;
-ALTER TABLE jobs ADD COLUMN `reboot_after` smallint NOT NULL;
-"""
-
-DOWN_SQL = """
-ALTER TABLE hosts DROP COLUMN `dirty`;
-ALTER TABLE jobs DROP COLUMN `reboot_before`;
-ALTER TABLE jobs DROP COLUMN `reboot_after`;
-"""
-
-def migrate_up(manager):
-    manager.execute_script(UP_SQL)
-
-
-def migrate_down(manager):
-    manager.execute_script(DOWN_SQL)
diff --git a/frontend/migrations/021_add_user_reboot_prefs.py b/frontend/migrations/021_add_user_reboot_prefs.py
deleted file mode 100644
index cd7cd5c..0000000
--- a/frontend/migrations/021_add_user_reboot_prefs.py
+++ /dev/null
@@ -1,19 +0,0 @@
-UP_SQL = """
-ALTER TABLE users ADD COLUMN `reboot_before` smallint NOT NULL;
-ALTER TABLE users ADD COLUMN `reboot_after` smallint NOT NULL;
-UPDATE users SET reboot_before=1, reboot_after=2;
-"""
-
-
-DOWN_SQL = """
-ALTER TABLE users DROP COLUMN reboot_before;
-ALTER TABLE users DROP COLUMN reboot_after;
-"""
-
-
-def migrate_up(manager):
-    manager.execute_script(UP_SQL)
-
-
-def migrate_down(manager):
-    manager.execute_script(DOWN_SQL)
diff --git a/frontend/migrations/022_implement_sync_count.py b/frontend/migrations/022_implement_sync_count.py
deleted file mode 100644
index f16b0dc..0000000
--- a/frontend/migrations/022_implement_sync_count.py
+++ /dev/null
@@ -1,50 +0,0 @@
-DOWN_SQL = """
-ALTER TABLE jobs ADD COLUMN synchronizing tinyint(1) default NULL;
-ALTER TABLE autotests ADD COLUMN synch_type smallint(6) NOT NULL;
-UPDATE autotests SET synch_type = 1;
-UPDATE autotests SET synch_type = 2 WHERE sync_count > 1;
-ALTER TABLE jobs ADD COLUMN synch_type int(11) default NULL;
-UPDATE jobs SET synch_type = 1;
-UPDATE jobs SET synch_type = 2 WHERE synch_count > 1;
-ALTER TABLE host_queue_entries DROP COLUMN `execution_subdir`;
-"""
-
-def migrate_up(manager):
-    # add execution_subdir field
-    manager.execute("""ALTER TABLE host_queue_entries ADD COLUMN
-                       `execution_subdir` varchar(255) NOT NULL""")
-
-    # fill in execution_subdir field for running/complete entries
-    rows = manager.execute("""
-        SELECT jobs.id, jobs.synch_type, COUNT(1) FROM jobs
-        INNER JOIN host_queue_entries AS hqe ON jobs.id = hqe.job_id
-        GROUP BY jobs.id""")
-    job_hqe_count = dict((row[0], row[2]) for row in rows)
-    synch_jobs = set(row[0] for row in rows if row[1] == 2)
-    hqes = manager.execute("""
-        SELECT hqe.id, hqe.job_id, hqe.status, hqe.complete, hosts.hostname
-        FROM host_queue_entries AS hqe
-        INNER JOIN hosts ON hqe.host_id = hosts.id
-        WHERE hqe.status IN ('Starting', 'Running') OR complete""")
-    for id, job_id, status, complete, hostname in hqes:
-        if job_id in synch_jobs or job_hqe_count[job_id] == 1:
-            execution_subdir = ''
-        else:
-            execution_subdir = hostname
-        manager.execute(
-            'UPDATE host_queue_entries SET execution_subdir = %s WHERE id = %s',
-            execution_subdir, id)
-
-    # ensure synch_type information doesn't get lost if we need to migrate down
-    manager.execute('UPDATE jobs SET synch_count = 1 WHERE synch_type = 1')
-    manager.execute('UPDATE jobs SET synch_count = 2 '
-                    'WHERE synch_type = 2 AND synch_count = 1')
-    # drop the old synch_type fields
-    manager.execute('ALTER TABLE jobs DROP COLUMN synch_type')
-    manager.execute('ALTER TABLE autotests DROP COLUMN synch_type')
-    # drop deprecated synchronizing field
-    manager.execute('ALTER TABLE jobs DROP COLUMN synchronizing')
-
-
-def migrate_down(manager):
-    manager.execute_script(DOWN_SQL)
diff --git a/frontend/migrations/023_add_show_experimental_pref.py b/frontend/migrations/023_add_show_experimental_pref.py
deleted file mode 100644
index 8a336d4..0000000
--- a/frontend/migrations/023_add_show_experimental_pref.py
+++ /dev/null
@@ -1,14 +0,0 @@
-UP_SQL = """
-ALTER TABLE users ADD COLUMN `show_experimental` bool NOT NULL DEFAULT FALSE;
-"""
-
-DOWN_SQL = """
-ALTER TABLE users DROP COLUMN `show_experimental`;
-"""
-
-def migrate_up(manager):
-    manager.execute_script(UP_SQL)
-
-
-def migrate_down(manager):
-    manager.execute_script(DOWN_SQL)
diff --git a/frontend/migrations/024_make_label_name_unique.py b/frontend/migrations/024_make_label_name_unique.py
deleted file mode 100644
index 1467604..0000000
--- a/frontend/migrations/024_make_label_name_unique.py
+++ /dev/null
@@ -1,14 +0,0 @@
-UP_SQL = """
-ALTER TABLE labels MODIFY name VARCHAR(255) UNIQUE;
-"""
-
-DOWN_SQL = """
-ALTER TABLE labels MODIFY name VARCHAR(255);
-"""
-
-def migrate_up(manager):
-    manager.execute_script(UP_SQL)
-
-
-def migrate_down(manager):
-    manager.execute_script(DOWN_SQL)
diff --git a/frontend/migrations/025_aclgroup_id_column_rename.py b/frontend/migrations/025_aclgroup_id_column_rename.py
deleted file mode 100644
index 8bf3ed8..0000000
--- a/frontend/migrations/025_aclgroup_id_column_rename.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# acl_group_id in the many2many pivot table was an old Ruby-ism which
-# required a gross hack on Django 0.96 to support.  The Django name for the
-# column is aclgroup_id, it requires no unsupportable hacks.
-
-# NOTE: This is annoying the MySQL way of renaming columns.
-UP_SQL = """
-ALTER TABLE acl_groups_hosts CHANGE
-    acl_group_id aclgroup_id int(11) default NULL;
-ALTER TABLE acl_groups_users CHANGE
-    acl_group_id aclgroup_id int(11) default NULL;
-"""
-
-DOWN_SQL = """
-ALTER TABLE acl_groups_hosts CHANGE
-    aclgroup_id acl_group_id int(11) default NULL;
-ALTER TABLE acl_groups_users CHANGE
-    aclgroup_id acl_group_id int(11) default NULL;
-"""
-
-def migrate_up(manager):
-    manager.execute_script(UP_SQL)
-
-
-def migrate_down(manager):
-    manager.execute_script(DOWN_SQL)
diff --git a/frontend/migrations/026_remove_hqe_priority.py b/frontend/migrations/026_remove_hqe_priority.py
deleted file mode 100644
index a710a62..0000000
--- a/frontend/migrations/026_remove_hqe_priority.py
+++ /dev/null
@@ -1,14 +0,0 @@
-UP_SQL = """
-ALTER TABLE `host_queue_entries` DROP COLUMN `priority`;
-"""
-
-DOWN_SQL = """
-ALTER TABLE `host_queue_entries` ADD COLUMN `priority` int(11) default NULL
-"""
-
-def migrate_up(manager):
-    manager.execute_script(UP_SQL)
-
-
-def migrate_down(manager):
-    manager.execute_script(DOWN_SQL)
diff --git a/frontend/migrations/027_fix_innodb.py b/frontend/migrations/027_fix_innodb.py
deleted file mode 100644
index 6299560..0000000
--- a/frontend/migrations/027_fix_innodb.py
+++ /dev/null
@@ -1,11 +0,0 @@
-UP_SQL = """
-ALTER TABLE autotests_dependency_labels ENGINE=InnoDB;
-ALTER TABLE jobs_dependency_labels ENGINE=InnoDB;
-"""
-
-def migrate_up(manager):
-    manager.execute_script(UP_SQL)
-
-
-def migrate_down(manager):
-    pass
diff --git a/frontend/migrations/028_add_atomic_groups.py b/frontend/migrations/028_add_atomic_groups.py
deleted file mode 100644
index 62130c9..0000000
--- a/frontend/migrations/028_add_atomic_groups.py
+++ /dev/null
@@ -1,30 +0,0 @@
-def migrate_up(manager):
-    manager.execute_script(CREATE_TABLE)
-    manager.execute("ALTER TABLE labels ADD `atomic_group_id` "
-                    "INT(11) DEFAULT NULL ")
-    manager.execute("ALTER TABLE labels ADD CONSTRAINT FOREIGN KEY "
-                    "(`atomic_group_id`) REFERENCES `atomic_groups` (`id`) "
-                    "ON DELETE NO ACTION")
-    manager.execute("ALTER TABLE host_queue_entries ADD `atomic_group_id` "
-                    "INT(11) DEFAULT NULL")
-    manager.execute("ALTER TABLE host_queue_entries ADD CONSTRAINT FOREIGN KEY "
-                    "(`atomic_group_id`) REFERENCES `atomic_groups` (`id`) "
-                    "ON DELETE NO ACTION")
-
-
-def migrate_down(manager):
-    manager.execute("ALTER TABLE host_queue_entries REMOVE `atomic_group_id`")
-    manager.execute("ALTER TABLE labels REMOVE `atomic_group_id`")
-    manager.execute("DROP TABLE IF EXISTS `atomic_groups`")
-
-
-CREATE_TABLE = """\
-CREATE TABLE `atomic_groups` (
-  `id` int(11) NOT NULL auto_increment,
-  `name` varchar(255) NOT NULL,
-  `description` longtext DEFAULT NULL,
-  `max_number_of_machines` int(11) NOT NULL,
-  PRIMARY KEY  (`id`),
-  UNIQUE KEY `name` (`name`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1
-"""
diff --git a/frontend/migrations/029_add_atomic_group_invalid.py b/frontend/migrations/029_add_atomic_group_invalid.py
deleted file mode 100644
index e0f1486..0000000
--- a/frontend/migrations/029_add_atomic_group_invalid.py
+++ /dev/null
@@ -1,6 +0,0 @@
-def migrate_up(manager):
-    manager.execute('ALTER TABLE atomic_groups ADD `invalid` bool NOT NULL')
-
-
-def migrate_down(manager):
-    manager.execute('ALTER TABLE atomic_groups DROP invalid')
diff --git a/frontend/migrations/030_update_hosts_invalid.py b/frontend/migrations/030_update_hosts_invalid.py
deleted file mode 100644
index 737f92e..0000000
--- a/frontend/migrations/030_update_hosts_invalid.py
+++ /dev/null
@@ -1,6 +0,0 @@
-def migrate_up(manager):
-    manager.execute("ALTER TABLE hosts MODIFY invalid TINYINT(1) DEFAULT 0")
-
-
-def migrate_down(manager):
-    manager.execute("ALTER TABLE hosts MODIFY invalid TINYINT(1) DEFAULT NULL")
diff --git a/frontend/migrations/031_add_hqe_aborted_flag.py b/frontend/migrations/031_add_hqe_aborted_flag.py
deleted file mode 100644
index 57c8d0f..0000000
--- a/frontend/migrations/031_add_hqe_aborted_flag.py
+++ /dev/null
@@ -1,11 +0,0 @@
-def migrate_up(manager):
-    manager.execute('ALTER TABLE host_queue_entries '
-                    'ADD COLUMN `aborted` bool NOT NULL DEFAULT FALSE')
-    manager.execute("UPDATE host_queue_entries SET aborted = true WHERE "
-                    "status IN ('Abort', 'Aborting', 'Aborted')")
-
-
-def migrate_down(manager):
-    manager.execute("UPDATE host_queue_entries SET status = 'Abort' WHERE "
-                    "aborted AND status != 'Aborted'")
-    manager.execute('ALTER TABLE host_queue_entries DROP COLUMN `aborted`')
diff --git a/frontend/migrations/032_add_recurring_run.py b/frontend/migrations/032_add_recurring_run.py
deleted file mode 100644
index 08fa2c0..0000000
--- a/frontend/migrations/032_add_recurring_run.py
+++ /dev/null
@@ -1,23 +0,0 @@
-def migrate_up(manager):
-    manager.execute_script(CREATE_TABLE)
-
-def migrate_down(manager):
-    manager.execute_script(DROP_TABLE)
-
-CREATE_TABLE = """\
-CREATE TABLE `recurring_run` (
-    `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
-    `job_id` integer NOT NULL REFERENCES `jobs` (`id`),
-    `owner_id` integer NOT NULL REFERENCES `users` (`id`),
-    `start_date` datetime NOT NULL,
-    `loop_period` integer NOT NULL,
-    `loop_count` integer NOT NULL
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-CREATE INDEX recurring_run_job_id ON `recurring_run` (`job_id`);
-CREATE INDEX recurring_run_owner_id ON `recurring_run` (`owner_id`);
-"""
-
-DROP_TABLE = """\
-DROP INDEX recurring_run_job_id ON `recurring_run`;
-DROP TABLE IF EXISTS `recurring_run`;
-"""
diff --git a/frontend/migrations/033_add_host_attributes.py b/frontend/migrations/033_add_host_attributes.py
deleted file mode 100644
index 642ddf3..0000000
--- a/frontend/migrations/033_add_host_attributes.py
+++ /dev/null
@@ -1,21 +0,0 @@
-UP_SQL = """
-CREATE TABLE `host_attributes` (
-    `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
-    `host_id` integer NOT NULL,
-    `attribute` varchar(90) NOT NULL,
-    `value` varchar(300) NOT NULL,
-    FOREIGN KEY (host_id) REFERENCES hosts (id),
-    KEY (attribute)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-"""
-
-DOWN_SQL = """
-DROP TABLE IF EXISTS host_attributes;
-"""
-
-def migrate_up(manager):
-    manager.execute_script(UP_SQL)
-
-
-def migrate_down(manager):
-    manager.execute_script(DOWN_SQL)
diff --git a/frontend/migrations/034_add_parse_failed_repair_option.py b/frontend/migrations/034_add_parse_failed_repair_option.py
deleted file mode 100644
index 94019c5..0000000
--- a/frontend/migrations/034_add_parse_failed_repair_option.py
+++ /dev/null
@@ -1,7 +0,0 @@
-UP_SQL = """
-ALTER TABLE jobs ADD COLUMN parse_failed_repair bool NOT NULL DEFAULT TRUE;
-"""
-
-DOWN_SQL = """
-ALTER TABLE jobs DROP COLUMN parse_failed_repair;
-"""
diff --git a/frontend/migrations/035_job_max_runtime.py b/frontend/migrations/035_job_max_runtime.py
deleted file mode 100644
index 907ec62..0000000
--- a/frontend/migrations/035_job_max_runtime.py
+++ /dev/null
@@ -1,12 +0,0 @@
-UP_SQL = """
-ALTER TABLE host_queue_entries ADD COLUMN started_on datetime NULL;
-ALTER TABLE jobs ADD COLUMN max_runtime_hrs integer NOT NULL;
--- conservative value for existing jobs, to make sure they don't get
--- unexpectedly timed out.
-UPDATE jobs SET max_runtime_hrs = timeout;
-"""
-
-DOWN_SQL = """
-ALTER TABLE jobs DROP COLUMN max_runtime_hrs;
-ALTER TABLE host_queue_entries DROP COLUMN started_on;
-"""
diff --git a/frontend/migrations/036_add_special_tasks.py b/frontend/migrations/036_add_special_tasks.py
deleted file mode 100644
index f8572b2..0000000
--- a/frontend/migrations/036_add_special_tasks.py
+++ /dev/null
@@ -1,15 +0,0 @@
-UP_SQL = """
-CREATE TABLE special_tasks (
-  id INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
-  host_id INT NOT NULL REFERENCES hosts(id),
-  task VARCHAR(64) NOT NULL,
-  time_requested DATETIME NOT NULL,
-  is_active TINYINT(1) NOT NULL DEFAULT FALSE,
-  is_complete TINYINT(1) NOT NULL DEFAULT FALSE,
-  INDEX special_tasks_host_id (host_id)
-) ENGINE=innodb;
-"""
-
-DOWN_SQL = """
-DROP TABLE special_tasks;
-"""
diff --git a/frontend/migrations/037_db_constraints.py b/frontend/migrations/037_db_constraints.py
deleted file mode 100644
index 8f8554f..0000000
--- a/frontend/migrations/037_db_constraints.py
+++ /dev/null
@@ -1,135 +0,0 @@
-def execute_safely(manager, statement):
-    try:
-        manager.execute(statement)
-    except Exception:
-        print 'Statement %r failed (this is not fatal)' % statement
-
-
-def delete_duplicates(manager, table, first_id, second_id):
-    rows = manager.execute(
-        'SELECT %s, %s, COUNT(1) AS count FROM %s '
-        'GROUP BY %s, %s HAVING count > 1' %
-        (first_id, second_id, table, first_id, second_id))
-    for first_id_value, second_id_value, count_unused in rows:
-        manager.execute('DELETE FROM %s '
-                        'WHERE %s = %%s AND %s = %%s LIMIT 1' %
-                        (table, first_id, second_id),
-                        first_id_value, second_id_value)
-    if rows:
-        print 'Deleted %s duplicate rows from %s' % (len(rows), table)
-
-
-def delete_invalid_foriegn_keys(manager, pivot_table, foreign_key_field,
-                                destination_table):
-    manager.execute(
-        'DELETE %(table)s.* FROM %(table)s '
-        'LEFT JOIN %(destination_table)s '
-        'ON %(table)s.%(field)s = %(destination_table)s.id '
-        'WHERE %(destination_table)s.id IS NULL' %
-        dict(table=pivot_table, field=foreign_key_field,
-             destination_table=destination_table))
-    deleted_count = manager._database.rowcount
-    if deleted_count:
-        print ('Deleted %s invalid foreign key references from %s (%s)' %
-               (deleted_count, pivot_table, foreign_key_field))
-
-
-def unique_index_name(table):
-    return table + '_both_ids'
-
-
-def basic_index_name(table, field):
-    if field == 'aclgroup_id':
-        field = 'acl_group_id'
-    return table + '_' + field
-
-
-def create_unique_index(manager, pivot_table, first_field, second_field):
-    index_name = unique_index_name(pivot_table)
-    manager.execute('CREATE UNIQUE INDEX %s ON %s (%s, %s)' %
-                    (index_name, pivot_table, first_field, second_field))
-
-    # these indices are in the migrations but may not exist for historical
-    # reasons
-    old_index_name = basic_index_name(pivot_table, first_field)
-    execute_safely(manager, 'DROP INDEX %s ON %s' %
-                   (old_index_name, pivot_table))
-
-
-def drop_unique_index(manager, pivot_table, first_field):
-    index_name = unique_index_name(pivot_table)
-    manager.execute('DROP INDEX %s ON %s' % (index_name, pivot_table))
-
-    old_index_name = basic_index_name(pivot_table, first_field)
-    manager.execute('CREATE INDEX %s ON %s (%s)' %
-                    (old_index_name, pivot_table, first_field))
-
-
-def foreign_key_name(table, field):
-    return '_'.join([table, field, 'fk'])
-
-
-def create_foreign_key_constraint(manager, table, field, destination_table):
-    key_name = foreign_key_name(table, field)
-    manager.execute('ALTER TABLE %s ADD CONSTRAINT %s FOREIGN KEY (%s) '
-                    'REFERENCES %s (id) ON DELETE NO ACTION' %
-                    (table, key_name, field, destination_table))
-
-
-def drop_foreign_key_constraint(manager, table, field):
-    key_name = foreign_key_name(table, field)
-    manager.execute('ALTER TABLE %s DROP FOREIGN KEY %s' % (table, key_name))
-
-
-def cleanup_m2m_pivot(manager, pivot_table, first_field, first_table,
-                      second_field, second_table, create_unique):
-    delete_duplicates(manager, pivot_table, first_field, second_field)
-    delete_invalid_foriegn_keys(manager, pivot_table, first_field, first_table)
-    delete_invalid_foriegn_keys(manager, pivot_table, second_field,
-                                second_table)
-
-    if create_unique:
-        # first field is the more commonly used one, so we'll replace the
-        # less-commonly-used index with the larger unique index
-        create_unique_index(manager, pivot_table, second_field, first_field)
-
-    create_foreign_key_constraint(manager, pivot_table, first_field,
-                                  first_table)
-    create_foreign_key_constraint(manager, pivot_table, second_field,
-                                  second_table)
-
-
-def reverse_cleanup_m2m_pivot(manager, pivot_table, first_field, second_field,
-                              drop_unique):
-    drop_foreign_key_constraint(manager, pivot_table, second_field)
-    drop_foreign_key_constraint(manager, pivot_table, first_field)
-    if drop_unique:
-        drop_unique_index(manager, pivot_table, second_field)
-
-
-TABLES = (
-        ('hosts_labels', 'host_id', 'hosts', 'label_id', 'labels', True),
-        ('acl_groups_hosts', 'host_id', 'hosts', 'aclgroup_id', 'acl_groups',
-         True),
-        ('acl_groups_users', 'user_id', 'users', 'aclgroup_id', 'acl_groups',
-         True),
-        ('autotests_dependency_labels', 'test_id', 'autotests', 'label_id',
-         'labels', False),
-        ('jobs_dependency_labels', 'job_id', 'jobs', 'label_id', 'labels',
-         False),
-        ('ineligible_host_queues', 'job_id', 'jobs', 'host_id', 'hosts', True),
-    )
-
-
-def migrate_up(manager):
-    for (table, first_field, first_table, second_field, second_table,
-         create_unique) in TABLES:
-        cleanup_m2m_pivot(manager, table, first_field, first_table,
-                          second_field, second_table, create_unique)
-
-
-def migrate_down(manager):
-    for (table, first_field, first_table, second_field, second_table,
-         drop_unique) in reversed(TABLES):
-        reverse_cleanup_m2m_pivot(manager, table, first_field, second_field,
-                                  drop_unique)
diff --git a/frontend/migrations/038_add_hqe_id_and_foreign_keys_to_special_tasks.py b/frontend/migrations/038_add_hqe_id_and_foreign_keys_to_special_tasks.py
deleted file mode 100644
index 46c0960..0000000
--- a/frontend/migrations/038_add_hqe_id_and_foreign_keys_to_special_tasks.py
+++ /dev/null
@@ -1,32 +0,0 @@
-UP_SQL = """
-UPDATE special_tasks
-SET task = 'Verify'
-WHERE task = 'Reverify';
-
-ALTER TABLE special_tasks
-ADD COLUMN time_started DATETIME;
-
-ALTER TABLE special_tasks
-ADD COLUMN log_file VARCHAR(45) NOT NULL DEFAULT '';
-
-ALTER TABLE special_tasks
-ADD COLUMN queue_entry_id INT;
-
-ALTER TABLE special_tasks
-ADD CONSTRAINT special_tasks_to_hosts_ibfk FOREIGN KEY
-(host_id) REFERENCES hosts(id);
-
-ALTER TABLE special_tasks
-ADD CONSTRAINT special_tasks_to_host_queue_entries_ibfk
-FOREIGN KEY special_tasks_host_queue_entry_id
-(queue_entry_id) REFERENCES host_queue_entries(id);
-"""
-
-DOWN_SQL = """
-ALTER TABLE special_tasks DROP FOREIGN KEY
-    special_tasks_to_host_queue_entries_ibfk;
-ALTER TABLE special_tasks DROP FOREIGN KEY special_tasks_to_hosts_ibfk;
-ALTER TABLE special_tasks DROP COLUMN queue_entry_id;
-ALTER TABLE special_tasks DROP COLUMN log_file;
-ALTER TABLE special_tasks DROP COLUMN time_started;
-"""
diff --git a/frontend/migrations/039_remove_special_tasks_log_file.py b/frontend/migrations/039_remove_special_tasks_log_file.py
deleted file mode 100644
index 4f5aa59..0000000
--- a/frontend/migrations/039_remove_special_tasks_log_file.py
+++ /dev/null
@@ -1,5 +0,0 @@
-UP_SQL = 'ALTER TABLE special_tasks DROP COLUMN log_file'
-
-DOWN_SQL = """
-ALTER TABLE special_tasks ADD COLUMN log_file VARCHAR(45) NOT NULL DEFAULT ''
-"""
diff --git a/frontend/migrations/040_add_foreign_keys.py b/frontend/migrations/040_add_foreign_keys.py
deleted file mode 100644
index 7b021fc..0000000
--- a/frontend/migrations/040_add_foreign_keys.py
+++ /dev/null
@@ -1,75 +0,0 @@
-UP_SQL = """
-ALTER TABLE hosts
-ADD CONSTRAINT hosts_locked_by_fk FOREIGN KEY
-(locked_by_id) REFERENCES users(id)
-ON DELETE NO ACTION;
-
-ALTER TABLE host_queue_entries
-ADD CONSTRAINT host_queue_entries_job_id_fk FOREIGN KEY
-(job_id) REFERENCES jobs(id)
-ON DELETE NO ACTION;
-
-INSERT INTO hosts (hostname, invalid, protection, dirty)
-VALUES ('__missing_host__', 1, 0, 1);
-
-UPDATE host_queue_entries AS hqe
-    LEFT OUTER JOIN hosts ON (hqe.host_id = hosts.id)
-SET hqe.host_id = (SELECT id FROM hosts WHERE hostname = '__missing_host__')
-WHERE hqe.host_id IS NOT NULL AND hosts.id IS NULL;
-
-ALTER TABLE host_queue_entries
-ADD CONSTRAINT host_queue_entries_host_id_fk FOREIGN KEY
-(host_id) REFERENCES hosts(id)
-ON DELETE NO ACTION;
-
-ALTER TABLE host_queue_entries
-ADD CONSTRAINT host_queue_entries_meta_host_fk FOREIGN KEY
-(meta_host) REFERENCES labels(id)
-ON DELETE NO ACTION;
-
-ALTER TABLE aborted_host_queue_entries
-ADD CONSTRAINT aborted_host_queue_entries_queue_entry_id_fk FOREIGN KEY
-(queue_entry_id) REFERENCES host_queue_entries(id)
-ON DELETE NO ACTION;
-
-ALTER TABLE aborted_host_queue_entries
-ADD CONSTRAINT aborted_host_queue_entries_aborted_by_id_fk FOREIGN KEY
-(aborted_by_id) REFERENCES users(id)
-ON DELETE NO ACTION;
-
-ALTER TABLE recurring_run
-ADD CONSTRAINT recurring_run_job_id_fk FOREIGN KEY
-(job_id) REFERENCES jobs(id)
-ON DELETE NO ACTION;
-
-ALTER TABLE recurring_run
-ADD CONSTRAINT recurring_run_owner_id_fk FOREIGN KEY
-(owner_id) REFERENCES users(id)
-ON DELETE NO ACTION;
-"""
-
-DOWN_SQL = """
-ALTER TABLE hosts
-DROP FOREIGN KEY hosts_locked_by_fk;
-
-ALTER TABLE host_queue_entries
-DROP FOREIGN KEY host_queue_entries_job_id_fk;
-
-ALTER TABLE host_queue_entries
-DROP FOREIGN KEY host_queue_entries_host_id_fk;
-
-ALTER TABLE host_queue_entries
-DROP FOREIGN KEY host_queue_entries_meta_host_fk;
-
-ALTER TABLE aborted_host_queue_entries
-DROP FOREIGN KEY aborted_host_queue_entries_queue_entry_id_fk;
-
-ALTER TABLE aborted_host_queue_entries
-DROP FOREIGN KEY aborted_host_queue_entries_aborted_by_id_fk;
-
-ALTER TABLE recurring_run
-DROP FOREIGN KEY recurring_run_job_id_fk;
-
-ALTER TABLE recurring_run
-DROP FOREIGN KEY recurring_run_owner_id_fk;
-"""
diff --git a/frontend/migrations/041_add_special_task_success.py b/frontend/migrations/041_add_special_task_success.py
deleted file mode 100644
index 2000088..0000000
--- a/frontend/migrations/041_add_special_task_success.py
+++ /dev/null
@@ -1,14 +0,0 @@
-UP_SQL = """
-ALTER TABLE special_tasks
-ADD COLUMN success TINYINT(1)
-NOT NULL DEFAULT 0;
-
-UPDATE special_tasks
-SET success = 1
-WHERE is_complete = 1;
-"""
-
-DOWN_SQL = """
-ALTER TABLE special_tasks
-DROP COLUMN success;
-"""
diff --git a/frontend/migrations/042_unique_index_on_hqe_job_and_host.py b/frontend/migrations/042_unique_index_on_hqe_job_and_host.py
deleted file mode 100644
index d019c73..0000000
--- a/frontend/migrations/042_unique_index_on_hqe_job_and_host.py
+++ /dev/null
@@ -1,56 +0,0 @@
-UP_SQL = """
-CREATE UNIQUE INDEX host_queue_entries_job_id_and_host_id
-ON host_queue_entries (job_id, host_id);
-
-DROP INDEX host_queue_entries_job_id ON host_queue_entries;
-"""
-
-
-DOWN_SQL = """
-CREATE INDEX host_queue_entries_job_id ON host_queue_entries (job_id);
-
-DROP INDEX host_queue_entries_job_id_and_host_id ON host_queue_entries;
-"""
-
-
-def null_out_duplicate_hqes(manager, hqe_ids):
-    if not hqe_ids:
-        return
-    ids_to_null_string = ','.join(str(hqe_id) for hqe_id in hqe_ids)
-
-    # check if any of the HQEs we're going to null out are active. if so, it's
-    # too dangerous to proceed.
-    rows = manager.execute('SELECT id FROM host_queue_entries '
-                           'WHERE active AND id IN (%s)' % ids_to_null_string)
-    if rows:
-        raise Exception('Active duplicate HQEs exist, cannot proceed.  Please '
-                        'manually abort these HQE IDs: %s' % ids_to_null_string)
-
-    # go ahead and null them out
-    print 'Nulling out duplicate HQE IDs: %s' % ids_to_null_string
-    manager.execute('UPDATE host_queue_entries '
-                    'SET host_id = NULL, active = FALSE, complete = TRUE, '
-                    'aborted = TRUE, status = "Aborted" '
-                    'WHERE id IN (%s)' % ids_to_null_string)
-
-
-def migrate_up(manager):
-    # cleanup duplicate host_queue_entries. rather than deleting them (and
-    # dealing with foreign key references), we'll just null out their host_ids
-    # and set them to aborted.
-    rows = manager.execute('SELECT GROUP_CONCAT(id), COUNT(1) AS count '
-                           'FROM host_queue_entries '
-                           'WHERE host_id IS NOT NULL '
-                           'GROUP BY job_id, host_id HAVING count > 1')
-    # gather all the HQE IDs we want to null out
-    ids_to_null = []
-    for ids_string, _ in rows:
-        id_list = ids_string.split(',')
-        # null out all but the first one.  this isn't terribly important, but
-        # the first one is the most likely to have actually executed, so might
-        # as well keep that one.
-        ids_to_null.extend(id_list[1:])
-
-    null_out_duplicate_hqes(manager, ids_to_null)
-
-    manager.execute_script(UP_SQL)
diff --git a/frontend/migrations/043_add_special_task_requested_by.py b/frontend/migrations/043_add_special_task_requested_by.py
deleted file mode 100644
index 9cc67ca..0000000
--- a/frontend/migrations/043_add_special_task_requested_by.py
+++ /dev/null
@@ -1,11 +0,0 @@
-UP_SQL = """
-ALTER TABLE special_tasks ADD requested_by_id integer;
-
-ALTER TABLE special_tasks ADD CONSTRAINT special_tasks_requested_by_id
-        FOREIGN KEY (requested_by_id) REFERENCES users (id) ON DELETE NO ACTION;
-"""
-
-DOWN_SQL = """
-ALTER TABLE special_tasks DROP FOREIGN KEY special_tasks_requested_by_id;
-ALTER TABLE special_tasks DROP COLUMN requested_by_id;
-"""
diff --git a/frontend/migrations/044_rename_afe_tables.py b/frontend/migrations/044_rename_afe_tables.py
deleted file mode 100644
index 3c01ba4..0000000
--- a/frontend/migrations/044_rename_afe_tables.py
+++ /dev/null
@@ -1,37 +0,0 @@
-import common
-from autotest_lib.database import db_utils
-
-
-ORIG_NAMES = (
-        'aborted_host_queue_entries',
-        'acl_groups',
-        'acl_groups_hosts',
-        'acl_groups_users',
-        'atomic_groups',
-        'autotests',
-        'autotests_dependency_labels',
-        'host_attributes',
-        'host_queue_entries',
-        'hosts',
-        'hosts_labels',
-        'ineligible_host_queues',
-        'jobs',
-        'jobs_dependency_labels',
-        'labels',
-        'profilers',
-        'recurring_run',
-        'special_tasks',
-        'users',
-        )
-
-RENAMES_UP = dict((name, 'afe_' + name) for name in ORIG_NAMES)
-
-RENAMES_DOWN = dict((value, key) for key, value in RENAMES_UP.iteritems())
-
-
-def migrate_up(manager):
-    db_utils.rename(manager, RENAMES_UP)
-
-
-def migrate_down(manager):
-    db_utils.rename(manager, RENAMES_DOWN)
diff --git a/frontend/migrations/045_test_planner_framework.py b/frontend/migrations/045_test_planner_framework.py
deleted file mode 100644
index 568cbec..0000000
--- a/frontend/migrations/045_test_planner_framework.py
+++ /dev/null
@@ -1,255 +0,0 @@
-import common
-from autotest_lib.database import migrate
-
-UP_SQL = """\
-BEGIN;
-
-SET storage_engine = InnoDB;
-
-CREATE TABLE `planner_plans` (
-    `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
-    `name` varchar(255) NOT NULL UNIQUE,
-    `label_override` varchar(255) NULL,
-    `support` longtext NOT NULL,
-    `complete` bool NOT NULL,
-    `dirty` bool NOT NULL,
-    `initialized` bool NOT NULL
-)
-;
-
-
-CREATE TABLE `planner_hosts` (
-    `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
-    `plan_id` integer NOT NULL,
-    `host_id` integer NOT NULL,
-    `complete` bool NOT NULL,
-    `blocked` bool NOT NULL
-)
-;
-ALTER TABLE `planner_hosts` ADD CONSTRAINT hosts_plan_id_fk FOREIGN KEY (`plan_id`) REFERENCES `planner_plans` (`id`);
-ALTER TABLE `planner_hosts` ADD CONSTRAINT hosts_host_id_fk FOREIGN KEY (`host_id`) REFERENCES `afe_hosts` (`id`);
-
-
-CREATE TABLE `planner_test_control_files` (
-    `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
-    `the_hash` varchar(40) NOT NULL UNIQUE,
-    `contents` longtext NOT NULL
-)
-;
-
-
-CREATE TABLE `planner_tests` (
-    `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
-    `plan_id` integer NOT NULL,
-    `control_file_id` integer NOT NULL,
-    `execution_order` integer NOT NULL
-)
-;
-ALTER TABLE `planner_tests` ADD CONSTRAINT tests_plan_id_fk FOREIGN KEY (`plan_id`) REFERENCES `planner_plans` (`id`);
-ALTER TABLE `planner_tests` ADD CONSTRAINT tests_control_file_id_fk FOREIGN KEY (`control_file_id`) REFERENCES `planner_test_control_files` (`id`);
-
-
-CREATE TABLE `planner_test_jobs` (
-    `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
-    `plan_id` integer NOT NULL,
-    `test_id` integer NOT NULL,
-    `afe_job_id` integer NOT NULL
-)
-;
-ALTER TABLE `planner_test_jobs` ADD CONSTRAINT test_jobs_plan_id_fk FOREIGN KEY (`plan_id`) REFERENCES `planner_plans` (`id`);
-ALTER TABLE `planner_test_jobs` ADD CONSTRAINT test_jobs_test_id_fk FOREIGN KEY (`test_id`) REFERENCES `planner_tests` (`id`);
-ALTER TABLE `planner_test_jobs` ADD CONSTRAINT test_jobs_afe_job_id_fk FOREIGN KEY (`afe_job_id`) REFERENCES `afe_jobs` (`id`);
-CREATE TABLE `planner_bugs` (
-    `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
-    `external_uid` varchar(255) NOT NULL UNIQUE
-)
-;
-
-
-CREATE TABLE `planner_test_runs` (
-    `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
-    `plan_id` integer NOT NULL,
-    `test_job_id` integer NOT NULL,
-    `tko_test_id` integer(10) UNSIGNED NOT NULL,
-    `status` varchar(16) NOT NULL,
-    `finalized` bool NOT NULL,
-    `seen` bool NOT NULL,
-    `triaged` bool NOT NULL
-)
-;
-ALTER TABLE `planner_test_runs` ADD CONSTRAINT test_runs_plan_id_fk FOREIGN KEY (`plan_id`) REFERENCES `planner_plans` (`id`);
-ALTER TABLE `planner_test_runs` ADD CONSTRAINT test_runs_test_job_id_fk FOREIGN KEY (`test_job_id`) REFERENCES `planner_test_jobs` (`id`);
-ALTER TABLE `planner_test_runs` ADD CONSTRAINT test_runs_tko_test_id_fk FOREIGN KEY (`tko_test_id`) REFERENCES `%(tko_db_name)s`.`tko_tests` (`test_idx`);
-
-
-CREATE TABLE `planner_data_types` (
-    `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
-    `name` varchar(255) NOT NULL,
-    `db_table` varchar(255) NOT NULL
-)
-;
-
-
-CREATE TABLE `planner_history` (
-    `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
-    `plan_id` integer NOT NULL,
-    `action_id` integer NOT NULL,
-    `user_id` integer NOT NULL,
-    `data_type_id` integer NOT NULL,
-    `object_id` integer NOT NULL,
-    `old_object_repr` longtext NOT NULL,
-    `new_object_repr` longtext NOT NULL,
-    `time` datetime NOT NULL
-)
-;
-ALTER TABLE `planner_history` ADD CONSTRAINT history_plan_id_fk FOREIGN KEY (`plan_id`) REFERENCES `planner_plans` (`id`);
-ALTER TABLE `planner_history` ADD CONSTRAINT history_user_id_fk FOREIGN KEY (`user_id`) REFERENCES `afe_users` (`id`);
-ALTER TABLE `planner_history` ADD CONSTRAINT history_data_type_id_fk FOREIGN KEY (`data_type_id`) REFERENCES `planner_data_types` (`id`);
-
-
-CREATE TABLE `planner_saved_objects` (
-    `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
-    `user_id` integer NOT NULL,
-    `type` varchar(16) NOT NULL,
-    `name` varchar(255) NOT NULL,
-    `encoded_object` longtext NOT NULL,
-    UNIQUE (`user_id`, `type`, `name`)
-)
-;
-ALTER TABLE `planner_saved_objects` ADD CONSTRAINT saved_objects_user_id_fk FOREIGN KEY (`user_id`) REFERENCES `afe_users` (`id`);
-
-
-CREATE TABLE `planner_custom_queries` (
-    `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
-    `plan_id` integer NOT NULL,
-    `query` longtext NOT NULL
-)
-;
-ALTER TABLE `planner_custom_queries` ADD CONSTRAINT custom_queries_plan_id_fk FOREIGN KEY (`plan_id`) REFERENCES `planner_plans` (`id`);
-
-
-CREATE TABLE `planner_keyvals` (
-    `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
-    `the_hash` varchar(40) NOT NULL UNIQUE,
-    `key` varchar(1024) NOT NULL,
-    `value` varchar(1024) NOT NULL
-)
-;
-
-
-CREATE TABLE `planner_autoprocess` (
-    `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
-    `plan_id` integer NOT NULL,
-    `condition` longtext NOT NULL,
-    `enabled` bool NOT NULL,
-    `reason_override` varchar(255) NULL
-)
-;
-ALTER TABLE `planner_autoprocess` ADD CONSTRAINT autoprocess_plan_id_fk FOREIGN KEY (`plan_id`) REFERENCES `planner_plans` (`id`);
-
-
-CREATE TABLE `planner_plan_owners` (
-    `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
-    `plan_id` integer NOT NULL,
-    `user_id` integer NOT NULL,
-    UNIQUE (`plan_id`, `user_id`)
-)
-;
-ALTER TABLE `planner_plan_owners` ADD CONSTRAINT plan_owners_plan_id_fk FOREIGN KEY (`plan_id`) REFERENCES `planner_plans` (`id`);
-ALTER TABLE `planner_plan_owners` ADD CONSTRAINT plan_owners_user_id_fk FOREIGN KEY (`user_id`) REFERENCES `afe_users` (`id`);
-
-
-CREATE TABLE `planner_test_run_bugs` (
-    `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
-    `testrun_id` integer NOT NULL,
-    `bug_id` integer NOT NULL,
-    UNIQUE (`testrun_id`, `bug_id`)
-)
-;
-ALTER TABLE `planner_test_run_bugs` ADD CONSTRAINT test_run_bugs_testrun_id_fk FOREIGN KEY (`testrun_id`) REFERENCES `planner_test_runs` (`id`);
-ALTER TABLE `planner_test_run_bugs` ADD CONSTRAINT test_run_bugs_bug_id_fk FOREIGN KEY (`bug_id`) REFERENCES `planner_bugs` (`id`);
-
-
-CREATE TABLE `planner_autoprocess_labels` (
-    `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
-    `autoprocess_id` integer NOT NULL,
-    `testlabel_id` integer NOT NULL,
-    UNIQUE (`autoprocess_id`, `testlabel_id`)
-)
-;
-ALTER TABLE `planner_autoprocess_labels` ADD CONSTRAINT autoprocess_labels_autoprocess_id_fk FOREIGN KEY (`autoprocess_id`) REFERENCES `planner_autoprocess` (`id`);
-ALTER TABLE `planner_autoprocess_labels` ADD CONSTRAINT autoprocess_labels_testlabel_id_fk FOREIGN KEY (`testlabel_id`) REFERENCES `%(tko_db_name)s`.`tko_test_labels` (`id`);
-
-
-CREATE TABLE `planner_autoprocess_keyvals` (
-    `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
-    `autoprocess_id` integer NOT NULL,
-    `keyval_id` integer NOT NULL,
-    UNIQUE (`autoprocess_id`, `keyval_id`)
-)
-;
-ALTER TABLE `planner_autoprocess_keyvals` ADD CONSTRAINT autoprocess_keyvals_autoprocess_id_fk FOREIGN KEY (`autoprocess_id`) REFERENCES `planner_autoprocess` (`id`);
-ALTER TABLE `planner_autoprocess_keyvals` ADD CONSTRAINT autoprocess_keyvals_keyval_id_fk FOREIGN KEY (`keyval_id`) REFERENCES `planner_keyvals` (`id`);
-
-
-CREATE TABLE `planner_autoprocess_bugs` (
-    `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
-    `autoprocess_id` integer NOT NULL,
-    `bug_id` integer NOT NULL,
-    UNIQUE (`autoprocess_id`, `bug_id`)
-)
-;
-ALTER TABLE `planner_autoprocess_bugs` ADD CONSTRAINT autoprocess_bugs_autoprocess_id_fk FOREIGN KEY (`autoprocess_id`) REFERENCES `planner_autoprocess` (`id`);
-ALTER TABLE `planner_autoprocess_bugs` ADD CONSTRAINT autoprocess_bugs_bug_id_fk FOREIGN KEY (`bug_id`) REFERENCES `planner_bugs` (`id`);
-
-
-CREATE INDEX `planner_hosts_plan_id` ON `planner_hosts` (`plan_id`);
-CREATE INDEX `planner_hosts_host_id` ON `planner_hosts` (`host_id`);
-CREATE INDEX `planner_tests_plan_id` ON `planner_tests` (`plan_id`);
-CREATE INDEX `planner_tests_control_file_id` ON `planner_tests` (`control_file_id`);
-CREATE INDEX `planner_test_jobs_plan_id` ON `planner_test_jobs` (`plan_id`);
-CREATE INDEX `planner_test_jobs_test_id` ON `planner_test_jobs` (`test_id`);
-CREATE INDEX `planner_test_jobs_afe_job_id` ON `planner_test_jobs` (`afe_job_id`);
-CREATE INDEX `planner_test_runs_plan_id` ON `planner_test_runs` (`plan_id`);
-CREATE INDEX `planner_test_runs_test_job_id` ON `planner_test_runs` (`test_job_id`);
-CREATE INDEX `planner_test_runs_tko_test_id` ON `planner_test_runs` (`tko_test_id`);
-CREATE INDEX `planner_history_plan_id` ON `planner_history` (`plan_id`);
-CREATE INDEX `planner_history_user_id` ON `planner_history` (`user_id`);
-CREATE INDEX `planner_history_data_type_id` ON `planner_history` (`data_type_id`);
-CREATE INDEX `planner_saved_objects_user_id` ON `planner_saved_objects` (`user_id`);
-CREATE INDEX `planner_custom_queries_plan_id` ON `planner_custom_queries` (`plan_id`);
-CREATE INDEX `planner_autoprocess_plan_id` ON `planner_autoprocess` (`plan_id`);
-
-COMMIT;
-"""
-
-DOWN_SQL = """\
-DROP TABLE IF EXISTS planner_autoprocess_labels;
-DROP TABLE IF EXISTS planner_autoprocess_bugs;
-DROP TABLE IF EXISTS planner_autoprocess_keyvals;
-DROP TABLE IF EXISTS planner_autoprocess;
-DROP TABLE IF EXISTS planner_custom_queries;
-DROP TABLE IF EXISTS planner_saved_objects;
-DROP TABLE IF EXISTS planner_history;
-DROP TABLE IF EXISTS planner_data_types;
-DROP TABLE IF EXISTS planner_hosts;
-DROP TABLE IF EXISTS planner_keyvals;
-DROP TABLE IF EXISTS planner_plan_owners;
-DROP TABLE IF EXISTS planner_test_run_bugs;
-DROP TABLE IF EXISTS planner_test_runs;
-DROP TABLE IF EXISTS planner_test_jobs;
-DROP TABLE IF EXISTS planner_tests;
-DROP TABLE IF EXISTS planner_test_control_files;
-DROP TABLE IF EXISTS planner_bugs;
-DROP TABLE IF EXISTS planner_plans;
-"""
-
-
-def migrate_up(manager):
-    tko_manager = migrate.get_migration_manager(db_name='TKO', debug=False,
-                                                force=False)
-    if tko_manager.get_db_version() < 31:
-        raise Exception('You must update the TKO database to at least version '
-                        '31 before applying AUTOTEST_WEB migration 45')
-
-    manager.execute_script(UP_SQL % dict(tko_db_name=tko_manager.get_db_name()))
diff --git a/frontend/migrations/046_merge_databases.py b/frontend/migrations/046_merge_databases.py
deleted file mode 100644
index f73dc74..0000000
--- a/frontend/migrations/046_merge_databases.py
+++ /dev/null
@@ -1,38 +0,0 @@
-import common
-from autotest_lib.database import db_utils, migrate
-
-TKO_MIGRATION_NAME = '031_rename_tko_tables'
-migrations_module = __import__('autotest_lib.tko.migrations', globals(),
-                               locals(), [TKO_MIGRATION_NAME])
-tko_migration = getattr(migrations_module, TKO_MIGRATION_NAME)
-
-TABLE_NAMES = tko_migration.RENAMES_UP.values()
-
-
-def migrate_up(manager):
-    tko_manager = migrate.get_migration_manager(db_name='TKO', debug=False,
-                                                force=False)
-    if tko_manager.get_db_version() < 31:
-        raise Exception('You must update the TKO database to at least version '
-                        '31 before applying AUTOTEST_WEB migration 46')
-
-    if manager.simulate:
-        tko_manager.initialize_and_fill_test_db()
-
-    if not manager.force:
-        response = raw_input(
-                'This migration will merge the autotest_web and tko databases. '
-                'Following the migration, the tko database will be dropped. '
-                'Any user-added tables in tko will NOT be migrated. This '
-                'migration is NOT reversible. Are you sure you want to '
-                'continue? (yes/no) ')
-        if response != 'yes':
-            raise Exception('User has chosen to abort migration')
-
-    db_utils.move_tables(manager, tko_manager, TABLE_NAMES)
-    db_utils.drop_database(tko_manager)
-    manager.execute_script(tko_migration.RECREATE_VIEWS_UP)
-
-
-def migrate_down(manager):
-    raise Exception('Migration 46 is not reversible!')
diff --git a/frontend/migrations/047_job_keyvals.py b/frontend/migrations/047_job_keyvals.py
deleted file mode 100644
index d587fd8..0000000
--- a/frontend/migrations/047_job_keyvals.py
+++ /dev/null
@@ -1,28 +0,0 @@
-UP_SQL = """
-CREATE TABLE `afe_job_keyvals` (
-    `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
-    `job_id` integer NOT NULL,
-    INDEX `afe_job_keyvals_job_id` (`job_id`),
-    FOREIGN KEY (`job_id`) REFERENCES `afe_jobs` (`id`) ON DELETE NO ACTION,
-    `key` varchar(90) NOT NULL,
-    INDEX `afe_job_keyvals_key` (`key`),
-    `value` varchar(300) NOT NULL
-) ENGINE=InnoDB;
-
-CREATE TABLE `tko_job_keyvals` (
-    `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
-    `job_id` int(10) unsigned NOT NULL,
-    INDEX `tko_job_keyvals_job_id` (`job_id`),
-    FOREIGN KEY (`job_id`) REFERENCES `tko_jobs` (`job_idx`)
-        ON DELETE NO ACTION,
-    `key` varchar(90) NOT NULL,
-    INDEX `tko_job_keyvals_key` (`key`),
-    `value` varchar(300) NOT NULL
-) ENGINE=InnoDB;
-"""
-
-
-DOWN_SQL = """
-DROP TABLE afe_job_keyvals;
-DROP TABLE tko_job_keyvals;
-"""
diff --git a/frontend/migrations/048_expand_label_name_field.py b/frontend/migrations/048_expand_label_name_field.py
deleted file mode 100644
index 0fcf427..0000000
--- a/frontend/migrations/048_expand_label_name_field.py
+++ /dev/null
@@ -1,7 +0,0 @@
-UP_SQL = """
-ALTER TABLE afe_labels MODIFY name varchar(750) default NULL;
-"""
-
-DOWN_SQL = """
-ALTER TABLE afe_labels MODIFY name varchar(255) default NULL;
-"""
diff --git a/frontend/migrations/049_test_planner_additions.py b/frontend/migrations/049_test_planner_additions.py
deleted file mode 100644
index 21fc8ea..0000000
--- a/frontend/migrations/049_test_planner_additions.py
+++ /dev/null
@@ -1,36 +0,0 @@
-UP_SQL = """\
-BEGIN;
-
-SET storage_engine = InnoDB;
-
-CREATE TABLE `planner_plan_host_labels` (
-    `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
-    `plan_id` integer NOT NULL,
-    `label_id` integer NOT NULL
-)
-;
-ALTER TABLE `planner_plan_host_labels` ADD CONSTRAINT plan_host_labels_plan_id_fk FOREIGN KEY (`plan_id`) REFERENCES `planner_plans` (`id`);
-ALTER TABLE `planner_plan_host_labels` ADD CONSTRAINT plan_host_labels_label_id_fk FOREIGN KEY (`label_id`) REFERENCES `afe_labels` (`id`);
-
-
-ALTER TABLE `planner_tests` ADD COLUMN `alias` varchar(255) NOT NULL;
-ALTER TABLE `planner_tests` ADD CONSTRAINT `tests_plan_id_alias_unique` UNIQUE KEY (`plan_id`, `alias`);
-
-
-ALTER TABLE `planner_tests` ADD COLUMN `estimated_runtime` int NOT NULL;
-
-
-ALTER TABLE `planner_test_runs` ADD COLUMN `host_id` int NOT NULL;
-ALTER TABLE `planner_test_runs` ADD CONSTRAINT `test_runs_host_id_fk` FOREIGN KEY (`host_id`) REFERENCES `planner_hosts` (`id`);
-
-COMMIT;
-"""
-
-DOWN_SQL = """\
-ALTER TABLE `planner_tests` DROP KEY `tests_plan_id_alias_unique`;
-ALTER TABLE `planner_tests` DROP COLUMN `alias`;
-ALTER TABLE `planner_tests` DROP COLUMN `estimated_runtime`;
-ALTER TABLE `planner_test_runs` DROP FOREIGN KEY `test_runs_host_id_fk`;
-ALTER TABLE `planner_test_runs` DROP COLUMN `host_id`;
-DROP TABLE IF EXISTS `planner_plan_host_labels`;
-"""
diff --git a/frontend/migrations/050_more_test_planner_additions.py b/frontend/migrations/050_more_test_planner_additions.py
deleted file mode 100644
index 63c4629..0000000
--- a/frontend/migrations/050_more_test_planner_additions.py
+++ /dev/null
@@ -1,13 +0,0 @@
-UP_SQL = """
-ALTER TABLE `planner_test_runs` ADD CONSTRAINT `test_runs_unique` UNIQUE KEY (`plan_id`, `test_job_id`, `tko_test_id`, `host_id`);
-
-ALTER TABLE `planner_tests` ADD COLUMN `is_server` tinyint(1) DEFAULT 1;
-
-ALTER TABLE `planner_hosts` ADD COLUMN `added_by_label` tinyint(1) DEFAULT 0;
-"""
-
-DOWN_SQL = """
-ALTER TABLE `planner_hosts` DROP COLUMN `added_by_label`;
-ALTER TABLE `planner_tests` DROP COLUMN `is_server`;
-ALTER TABLE `planner_test_runs` DROP KEY `test_runs_unique`;
-"""
diff --git a/frontend/migrations/051_rename_planner_tests.py b/frontend/migrations/051_rename_planner_tests.py
deleted file mode 100644
index 25214a6..0000000
--- a/frontend/migrations/051_rename_planner_tests.py
+++ /dev/null
@@ -1,27 +0,0 @@
-UP_SQL = """
-ALTER TABLE `planner_tests` RENAME TO `planner_test_configs`;
-
-ALTER TABLE `planner_test_jobs` DROP FOREIGN KEY `test_jobs_test_id_fk`;
-ALTER TABLE `planner_test_jobs` DROP KEY `planner_test_jobs_test_id`;
-
-ALTER TABLE `planner_test_jobs` CHANGE COLUMN
-`test_id` `test_config_id` INT NOT NULL;
-
-ALTER TABLE `planner_test_jobs` ADD CONSTRAINT `test_jobs_test_config_id_fk`
-FOREIGN KEY `planner_test_jobs_test_config_id`
-(`test_config_id`) REFERENCES `planner_test_configs` (`id`);
-"""
-
-DOWN_SQL = """
-ALTER TABLE `planner_test_configs` RENAME TO `planner_tests`;
-
-ALTER TABLE `planner_test_jobs` DROP FOREIGN KEY `test_jobs_test_config_id_fk`;
-ALTER TABLE `planner_test_jobs` DROP KEY `planner_test_jobs_test_config_id`;
-
-ALTER TABLE `planner_test_jobs` CHANGE COLUMN
-`test_config_id` `test_id` INT NOT NULL;
-
-ALTER TABLE `planner_test_jobs` ADD CONSTRAINT `test_jobs_test_id_fk`
-FOREIGN KEY `planner_test_jobs_test_id`
-(`test_id`) REFERENCES `planner_tests` (`id`);
-"""
diff --git a/frontend/migrations/052_expand_test_subdir_fields.py b/frontend/migrations/052_expand_test_subdir_fields.py
deleted file mode 100644
index f72ca60..0000000
--- a/frontend/migrations/052_expand_test_subdir_fields.py
+++ /dev/null
@@ -1,9 +0,0 @@
-UP_SQL = """
-ALTER TABLE tko_tests MODIFY test varchar(300) default NULL;
-ALTER TABLE tko_tests MODIFY subdir varchar(300) default NULL;
-"""
-
-DOWN_SQL = """
-ALTER TABLE tko_tests MODIFY test varchar(60) default NULL;
-ALTER TABLE tko_tests MODIFY subdir varchar(60) default NULL;
-"""
diff --git a/frontend/migrations/053_nop.py b/frontend/migrations/053_nop.py
deleted file mode 100644
index ae8a2b2..0000000
--- a/frontend/migrations/053_nop.py
+++ /dev/null
@@ -1,6 +0,0 @@
-def migrate_up(manager):
-    pass
-
-
-def migrate_down(manager):
-    pass
diff --git a/frontend/migrations/054_nop.py b/frontend/migrations/054_nop.py
deleted file mode 100644
index ae8a2b2..0000000
--- a/frontend/migrations/054_nop.py
+++ /dev/null
@@ -1,6 +0,0 @@
-def migrate_up(manager):
-    pass
-
-
-def migrate_down(manager):
-    pass
diff --git a/frontend/migrations/055_ensure_invalidated_test_label_exists.py b/frontend/migrations/055_ensure_invalidated_test_label_exists.py
deleted file mode 100644
index 05d3210..0000000
--- a/frontend/migrations/055_ensure_invalidated_test_label_exists.py
+++ /dev/null
@@ -1,13 +0,0 @@
-UP_SQL = """
-ALTER TABLE tko_test_labels
-ADD CONSTRAINT tko_test_labels_unique
-UNIQUE INDEX (name);
-
-INSERT IGNORE INTO tko_test_labels (name, description)
-VALUES ('invalidated', '');
-"""
-
-DOWN_SQL = """
-ALTER TABLE tko_test_labels
-DROP INDEX tko_test_labels_unique;
-"""
diff --git a/frontend/migrations/056_planner_global_support.py b/frontend/migrations/056_planner_global_support.py
deleted file mode 100644
index e5ce4d3..0000000
--- a/frontend/migrations/056_planner_global_support.py
+++ /dev/null
@@ -1,19 +0,0 @@
-UP_SQL = """
-CREATE TABLE planner_test_configs_skipped_hosts (
-  testconfig_id INT NOT NULL,
-  host_id INT NOT NULL,
-  PRIMARY KEY (testconfig_id, host_id)
-) ENGINE = InnoDB;
-
-ALTER TABLE planner_test_configs_skipped_hosts
-ADD CONSTRAINT planner_test_configs_skipped_hosts_testconfig_ibfk
-FOREIGN KEY (testconfig_id) REFERENCES planner_test_configs (id);
-
-ALTER TABLE planner_test_configs_skipped_hosts
-ADD CONSTRAINT planner_test_configs_skipped_hosts_host_ibfk
-FOREIGN KEY (host_id) REFERENCES afe_hosts (id);
-"""
-
-DOWN_SQL = """
-DROP TABLE IF EXISTS planner_test_configs_skipped_hosts;
-"""
diff --git a/frontend/migrations/057_add_planner_triage_actions.py b/frontend/migrations/057_add_planner_triage_actions.py
deleted file mode 100644
index 5f6d97b..0000000
--- a/frontend/migrations/057_add_planner_triage_actions.py
+++ /dev/null
@@ -1,15 +0,0 @@
-UP_SQL = """
-ALTER TABLE planner_test_runs
-ADD COLUMN invalidated TINYINT(1) DEFAULT FALSE;
-
-ALTER TABLE planner_test_jobs
-ADD COLUMN requires_rerun TINYINT(1) DEFAULT FALSE;
-"""
-
-DOWN_SQL = """
-ALTER TABLE planner_test_jobs
-DROP COLUMN requires_rerun;
-
-ALTER TABLE planner_test_runs
-DROP COLUMN invalidated;
-"""
diff --git a/frontend/migrations/058_drone_management.py b/frontend/migrations/058_drone_management.py
deleted file mode 100644
index 5c19f99..0000000
--- a/frontend/migrations/058_drone_management.py
+++ /dev/null
@@ -1,89 +0,0 @@
-UP_SQL = """
-CREATE TABLE afe_drones (
-  id INT AUTO_INCREMENT NOT NULL PRIMARY KEY,
-  hostname VARCHAR(255) NOT NULL
-) ENGINE=InnoDB;
-
-ALTER TABLE afe_drones
-ADD CONSTRAINT afe_drones_unique
-UNIQUE KEY (hostname);
-
-
-CREATE TABLE afe_drone_sets (
-  id INT AUTO_INCREMENT NOT NULL PRIMARY KEY,
-  name VARCHAR(255) NOT NULL
-) ENGINE=InnoDB;
-
-ALTER TABLE afe_drone_sets
-ADD CONSTRAINT afe_drone_sets_unique
-UNIQUE KEY (name);
-
-
-CREATE TABLE afe_drone_sets_drones (
-  id INT AUTO_INCREMENT NOT NULL PRIMARY KEY,
-  droneset_id INT NOT NULL,
-  drone_id INT NOT NULL
-) ENGINE=InnoDB;
-
-ALTER TABLE afe_drone_sets_drones
-ADD CONSTRAINT afe_drone_sets_drones_droneset_ibfk
-FOREIGN KEY (droneset_id) REFERENCES afe_drone_sets (id);
-
-ALTER TABLE afe_drone_sets_drones
-ADD CONSTRAINT afe_drone_sets_drones_drone_ibfk
-FOREIGN KEY (drone_id) REFERENCES afe_drones (id);
-
-ALTER TABLE afe_drone_sets_drones
-ADD CONSTRAINT afe_drone_sets_drones_unique
-UNIQUE KEY (droneset_id, drone_id);
-
-
-ALTER TABLE afe_jobs
-ADD COLUMN drone_set_id INT;
-
-ALTER TABLE afe_jobs
-ADD CONSTRAINT afe_jobs_drone_set_ibfk
-FOREIGN KEY (drone_set_id) REFERENCES afe_drone_sets (id);
-
-
-ALTER TABLE afe_users
-ADD COLUMN drone_set_id INT;
-
-ALTER TABLE afe_users
-ADD CONSTRAINT afe_users_drone_set_ibfk
-FOREIGN KEY (drone_set_id) REFERENCES afe_drone_sets (id);
-
-
-UPDATE afe_special_tasks SET requested_by_id = (
-  SELECT id FROM afe_users WHERE login = 'autotest_system')
-WHERE requested_by_id IS NULL;
-
-SET foreign_key_checks = 0;
-
-ALTER TABLE afe_special_tasks
-MODIFY COLUMN requested_by_id INT NOT NULL;
-
-SET foreign_key_checks = 1;
-"""
-
-
-DOWN_SQL = """
-ALTER TABLE afe_special_tasks
-MODIFY COLUMN requested_by_id INT DEFAULT NULL;
-
-ALTER TABLE afe_users
-DROP FOREIGN KEY afe_users_drone_set_ibfk;
-
-ALTER TABLE afe_users
-DROP COLUMN drone_set_id;
-
-ALTER TABLE afe_jobs
-DROP FOREIGN KEY afe_jobs_drone_set_ibfk;
-
-ALTER TABLE afe_jobs
-DROP COLUMN drone_set_id;
-
-DROP TABLE IF EXISTS afe_drone_sets_drones;
-DROP TABLE IF EXISTS afe_drone_sets;
-DROP TABLE IF EXISTS afe_drones;
-"""
diff --git a/frontend/migrations/059_drone_sets_permissions.py b/frontend/migrations/059_drone_sets_permissions.py
deleted file mode 100644
index a5d2ccc..0000000
--- a/frontend/migrations/059_drone_sets_permissions.py
+++ /dev/null
@@ -1,39 +0,0 @@
-import common
-from autotest_lib.database import db_utils
-
-UP_SQL = """
-SET @group_id = (SELECT id FROM auth_group WHERE name = 'Basic Admin');
-
-INSERT IGNORE INTO auth_group_permissions (group_id, permission_id)
-SELECT @group_id, id FROM auth_permission WHERE codename IN (
-  'add_droneset', 'change_droneset', 'delete_droneset', 'add_drone',
-  'change_drone', 'delete_drone');
-"""
-
-DOWN_SQL = """
-DELETE auth_group_permissions.* FROM
-auth_group INNER JOIN auth_group_permissions ON (
-  auth_group.id = auth_group_permissions.group_id)
-INNER JOIN auth_permission ON (
-  auth_group_permissions.permission_id = auth_permission.id)
-WHERE auth_group.name = 'Basic Admin' AND codename IN (
-  'add_droneset', 'change_droneset', 'delete_droneset', 'add_drone',
-  'change_drone', 'delete_drone');
-"""
-
-
-def migrate_up(manager):
-    """
-    If the auth tables don't exist, we shouldn't try to set the permissions.
-
-    The auth tables will exist if this is an existing Autotest installation. If
-    they don't, then this is a fresh installation, and the user will run
-    `manage.py syncdb` later, which will add the proper permissions.
-    """
-    if db_utils.auth_tables_exist(manager):
-        manager.execute_script(UP_SQL)
-
-
-def migrate_down(manager):
-    if db_utils.auth_tables_exist(manager):
-        manager.execute_script(DOWN_SQL)
diff --git a/frontend/migrations/060_add_planner_additional_parameters.py b/frontend/migrations/060_add_planner_additional_parameters.py
deleted file mode 100644
index 7c7a2e3..0000000
--- a/frontend/migrations/060_add_planner_additional_parameters.py
+++ /dev/null
@@ -1,47 +0,0 @@
-UP_SQL = """
-CREATE TABLE planner_additional_parameters (
-  id INT PRIMARY KEY AUTO_INCREMENT,
-  plan_id INT NOT NULL,
-  hostname_regex VARCHAR(255) NOT NULL,
-  param_type VARCHAR(32) NOT NULL,
-  application_order INT NOT NULL
-) ENGINE = InnoDB;
-
-ALTER TABLE planner_additional_parameters
-ADD CONSTRAINT planner_additional_parameters_plan_ibfk
-FOREIGN KEY (plan_id) REFERENCES planner_plans (id);
-
-ALTER TABLE planner_additional_parameters
-ADD CONSTRAINT planner_additional_parameters_unique
-UNIQUE KEY (plan_id, hostname_regex, param_type);
-
-
-CREATE TABLE planner_additional_parameter_values (
-  id INT PRIMARY KEY AUTO_INCREMENT,
-  additional_parameter_id INT NOT NULL,
-  `key` VARCHAR(255) NOT NULL,
-  value VARCHAR(255) NOT NULL
-) ENGINE = InnoDB;
-
-ALTER TABLE planner_additional_parameter_values
-ADD CONSTRAINT planner_additional_parameter_values_additional_parameter_ibfk
-FOREIGN KEY (additional_parameter_id)
-  REFERENCES planner_additional_parameters (id);
-
-ALTER TABLE planner_additional_parameter_values
-ADD CONSTRAINT planner_additional_parameter_values_unique
-UNIQUE KEY (additional_parameter_id, `key`);
-"""
-
-DOWN_SQL = """
-ALTER TABLE planner_additional_parameter_values
-DROP FOREIGN KEY planner_additional_parameter_values_additional_parameter_ibfk;
-
-DROP TABLE planner_additional_parameter_values;
-
-
-ALTER TABLE planner_additional_parameters
-DROP FOREIGN KEY planner_additional_parameters_plan_ibfk;
-
-DROP TABLE planner_additional_parameters;
-"""
diff --git a/frontend/migrations/061_drone_sets_permissions_proper.py b/frontend/migrations/061_drone_sets_permissions_proper.py
deleted file mode 100644
index 70c0eed..0000000
--- a/frontend/migrations/061_drone_sets_permissions_proper.py
+++ /dev/null
@@ -1,34 +0,0 @@
-from django.core import management
-import common
-from autotest_lib.frontend import settings
-from autotest_lib.database import db_utils
-
-AFE_MIGRATION_NAME = '059_drone_sets_permissions'
-migrations_module = __import__('autotest_lib.frontend.migrations', globals(),
-                               locals(), [AFE_MIGRATION_NAME])
-migration_059 = getattr(migrations_module, AFE_MIGRATION_NAME)
-
-
-def migrate_up(manager):
-    """
-    If the auth tables don't exist, we shouldn't try to set the permissions.
-
-    See migration 059
-    """
-    if db_utils.auth_tables_exist(manager):
-        management.setup_environ(settings)
-        # These have to be imported after the environment is set up
-        from django.contrib.contenttypes import management as content_management
-        from django.contrib.auth import management as auth_management
-        from django.db import models as db_models
-
-        content_management.update_all_contenttypes()
-        for app in db_models.get_apps():
-            auth_management.create_permissions(app, None, 2)
-
-        manager.execute_script(migration_059.UP_SQL)
-
-
-def migrate_down(manager):
-    if db_utils.auth_tables_exist(manager):
-        manager.execute_script(migration_059.DOWN_SQL)
diff --git a/frontend/migrations/062_drone_sets_unique.py b/frontend/migrations/062_drone_sets_unique.py
deleted file mode 100644
index 738a0f0..0000000
--- a/frontend/migrations/062_drone_sets_unique.py
+++ /dev/null
@@ -1,51 +0,0 @@
-import common
-from autotest_lib.database import db_utils
-
-UP_SQL = """
-CREATE INDEX afe_drone_sets_drones_droneset_ibfk
-ON afe_drone_sets_drones (droneset_id);
-
-ALTER TABLE afe_drone_sets_drones
-DROP KEY afe_drone_sets_drones_unique;
-
-ALTER TABLE afe_drone_sets_drones
-ADD CONSTRAINT afe_drone_sets_drones_unique
-UNIQUE KEY (drone_id);
-"""
-
-# On first migration to 62, this key will be deleted automatically. However, if
-# you migrate to 62, then down to 61, then back to 62, this key will remain.
-DROP_KEY_SQL = """
-ALTER TABLE afe_drone_sets_drones
-DROP KEY afe_drone_sets_drones_drone_ibfk;
-"""
-
-DOWN_SQL = """
-CREATE INDEX afe_drone_sets_drones_drone_ibfk
-ON afe_drone_sets_drones (drone_id);
-
-ALTER TABLE afe_drone_sets_drones
-DROP KEY afe_drone_sets_drones_unique;
-
-ALTER TABLE afe_drone_sets_drones
-ADD CONSTRAINT afe_drone_sets_drones_unique
-UNIQUE KEY (droneset_id, drone_id);
-
-ALTER TABLE afe_drone_sets_drones
-DROP KEY afe_drone_sets_drones_droneset_ibfk;
-"""
-
-
-def migrate_up(manager):
-    query = ('SELECT * FROM afe_drone_sets_drones '
-             'GROUP BY drone_id HAVING COUNT(*) > 1')
-    rows = manager.execute(query)
-    if rows:
-        raise Exception('Some drones are associated with more than one drone '
-                        'set. Please remove all duplicates before running this '
-                        'migration.')
-    manager.execute_script(UP_SQL)
-
-    if db_utils.check_index_exists(manager, 'afe_drone_sets_drones',
-                                   'afe_drone_sets_drones_drone_ibfk'):
-        manager.execute(DROP_KEY_SQL)
diff --git a/frontend/migrations/063_parameterized_tests.py b/frontend/migrations/063_parameterized_tests.py
deleted file mode 100644
index 83ad3b0..0000000
--- a/frontend/migrations/063_parameterized_tests.py
+++ /dev/null
@@ -1,144 +0,0 @@
-UP_SQL = """
-CREATE TABLE afe_test_parameters (
-  id INT PRIMARY KEY AUTO_INCREMENT,
-  test_id INT NOT NULL,
-  name VARCHAR(255) NOT NULL
-) ENGINE = InnoDB;
-
-ALTER TABLE afe_test_parameters
-ADD CONSTRAINT afe_test_parameters_test_ibfk
-FOREIGN KEY (test_id) REFERENCES afe_autotests (id);
-
-ALTER TABLE afe_test_parameters
-ADD CONSTRAINT afe_test_parameters_unique
-UNIQUE KEY (test_id, name);
-
-
-CREATE TABLE afe_parameterized_jobs (
-  id INT PRIMARY KEY AUTO_INCREMENT,
-  test_id INT NOT NULL,
-  label_id INT DEFAULT NULL,
-  use_container TINYINT(1) DEFAULT 0,
-  profile_only TINYINT(1) DEFAULT 0,
-  upload_kernel_config TINYINT(1) DEFAULT 0
-) ENGINE = InnoDB;
-
-ALTER TABLE afe_parameterized_jobs
-ADD CONSTRAINT afe_parameterized_jobs_test_ibfk
-FOREIGN KEY (test_id) REFERENCES afe_autotests (id);
-
-ALTER TABLE afe_parameterized_jobs
-ADD CONSTRAINT afe_parameterized_jobs_label_ibfk
-FOREIGN KEY (label_id) REFERENCES afe_labels (id);
-
-
-CREATE TABLE afe_kernels (
-  id INT PRIMARY KEY AUTO_INCREMENT,
-  version VARCHAR(255) NOT NULL,
-  cmdline VARCHAR(255) DEFAULT ''
-) ENGINE = InnoDB;
-
-ALTER TABLE afe_kernels
-ADD CONSTRAINT afe_kernals_unique
-UNIQUE KEY (version, cmdline);
-
-
-CREATE TABLE afe_parameterized_jobs_kernels (
-  parameterized_job_id INT NOT NULL,
-  kernel_id INT NOT NULL,
-  PRIMARY KEY (parameterized_job_id, kernel_id)
-) ENGINE = InnoDB;
-
-ALTER TABLE afe_parameterized_jobs_kernels
-ADD CONSTRAINT afe_parameterized_jobs_kernels_parameterized_job_ibfk
-FOREIGN KEY (parameterized_job_id) REFERENCES afe_parameterized_jobs (id);
-
-
-CREATE TABLE afe_parameterized_jobs_profilers (
-  id INT PRIMARY KEY AUTO_INCREMENT,
-  parameterized_job_id INT NOT NULL,
-  profiler_id INT NOT NULL
-) ENGINE = InnoDB;
-
-ALTER TABLE afe_parameterized_jobs_profilers
-ADD CONSTRAINT afe_parameterized_jobs_profilers_parameterized_job_ibfk
-FOREIGN KEY (parameterized_job_id) REFERENCES afe_parameterized_jobs (id);
-
-ALTER TABLE afe_parameterized_jobs_profilers
-ADD CONSTRAINT afe_parameterized_jobs_profilers_profile_ibfk
-FOREIGN KEY (profiler_id) REFERENCES afe_profilers (id);
-
-ALTER TABLE afe_parameterized_jobs_profilers
-ADD CONSTRAINT afe_parameterized_jobs_profilers_unique
-UNIQUE KEY (parameterized_job_id, profiler_id);
-
-
-CREATE TABLE afe_parameterized_job_profiler_parameters (
-  id INT PRIMARY KEY AUTO_INCREMENT,
-  parameterized_job_profiler_id INT NOT NULL,
-  parameter_name VARCHAR(255) NOT NULL,
-  parameter_value TEXT NOT NULL,
-  parameter_type ENUM('int', 'float', 'string')
-) ENGINE = InnoDB;
-
-ALTER TABLE afe_parameterized_job_profiler_parameters
-ADD CONSTRAINT afe_parameterized_job_profiler_parameters_ibfk
-FOREIGN KEY (parameterized_job_profiler_id)
-  REFERENCES afe_parameterized_jobs_profilers (id);
-
-ALTER TABLE afe_parameterized_job_profiler_parameters
-ADD CONSTRAINT afe_parameterized_job_profiler_parameters_unique
-UNIQUE KEY (parameterized_job_profiler_id, parameter_name);
-
-
-CREATE TABLE afe_parameterized_job_parameters (
-  id INT PRIMARY KEY AUTO_INCREMENT,
-  parameterized_job_id INT NOT NULL,
-  test_parameter_id INT NOT NULL,
-  parameter_value TEXT NOT NULL,
-  parameter_type ENUM('int', 'float', 'string')
-) ENGINE = InnoDB;
-
-ALTER TABLE afe_parameterized_job_parameters
-ADD CONSTRAINT afe_parameterized_job_parameters_job_ibfk
-FOREIGN KEY (parameterized_job_id) REFERENCES afe_parameterized_jobs (id);
-
-ALTER TABLE afe_parameterized_job_parameters
-ADD CONSTRAINT afe_parameterized_job_parameters_test_parameter_ibfk
-FOREIGN KEY (test_parameter_id) REFERENCES afe_test_parameters (id);
-
-ALTER TABLE afe_parameterized_job_parameters
-ADD CONSTRAINT afe_parameterized_job_parameters_unique
-UNIQUE KEY (parameterized_job_id, test_parameter_id);
-
-
-ALTER TABLE afe_jobs
-MODIFY COLUMN control_file TEXT DEFAULT NULL;
-
-ALTER TABLE afe_jobs
-ADD COLUMN parameterized_job_id INT DEFAULT NULL;
-
-ALTER TABLE afe_jobs
-ADD CONSTRAINT afe_jobs_parameterized_job_ibfk
-FOREIGN KEY (parameterized_job_id) REFERENCES afe_parameterized_jobs (id);
-"""
-
-
-DOWN_SQL = """
-ALTER TABLE afe_jobs
-DROP FOREIGN KEY afe_jobs_parameterized_job_ibfk;
-
-ALTER TABLE afe_jobs
-DROP COLUMN parameterized_job_id;
-
-ALTER TABLE afe_jobs
-MODIFY COLUMN control_file TEXT;
-
-DROP TABLE afe_parameterized_job_parameters;
-DROP TABLE afe_parameterized_job_profiler_parameters;
-DROP TABLE afe_parameterized_jobs_profilers;
-DROP TABLE afe_parameterized_jobs_kernels;
-DROP TABLE afe_kernels;
-DROP TABLE afe_parameterized_jobs;
-DROP TABLE afe_test_parameters;
-"""
diff --git a/frontend/migrations/064_add_jobs_and_tests_time_indices.py b/frontend/migrations/064_add_jobs_and_tests_time_indices.py
deleted file mode 100644
index 3508bba..0000000
--- a/frontend/migrations/064_add_jobs_and_tests_time_indices.py
+++ /dev/null
@@ -1,10 +0,0 @@
-# These indices speed up date-range queries often used in making dashboards.
-UP_SQL = """
-alter table tko_tests add index started_time (started_time);
-alter table afe_jobs add index created_on (created_on);
-"""
-
-DOWN_SQL = """
-drop index started_time on tko_tests;
-drop index created_on on afe_jobs;
-"""
diff --git a/frontend/migrations/065_add_id_afe_hosts_labels.py b/frontend/migrations/065_add_id_afe_hosts_labels.py
deleted file mode 100644
index 995feef..0000000
--- a/frontend/migrations/065_add_id_afe_hosts_labels.py
+++ /dev/null
@@ -1,7 +0,0 @@
-UP_SQL = """
-ALTER TABLE afe_hosts_labels ADD COLUMN id integer AUTO_INCREMENT NOT NULL PRIMARY KEY FIRST;
-"""
-
-DOWN_SQL = """
-ALTER TABLE afe_hosts_labels DROP COLUMN id;
-"""
diff --git a/frontend/migrations/066_drop_planner.py b/frontend/migrations/066_drop_planner.py
deleted file mode 100644
index 3185dc4..0000000
--- a/frontend/migrations/066_drop_planner.py
+++ /dev/null
@@ -1,443 +0,0 @@
-UP_SQL = """
-DROP TABLE IF EXISTS planner_test_run_bugs;
-DROP TABLE IF EXISTS planner_test_runs;
-DROP TABLE IF EXISTS planner_history;
-DROP TABLE IF EXISTS planner_autoprocess_bugs;
-DROP TABLE IF EXISTS planner_bugs;
-DROP TABLE IF EXISTS planner_hosts;
-DROP TABLE IF EXISTS planner_additional_parameter_values;
-DROP TABLE IF EXISTS planner_additional_parameters;
-DROP TABLE IF EXISTS planner_autoprocess_labels;
-DROP TABLE IF EXISTS planner_autoprocess_keyvals;
-DROP TABLE IF EXISTS planner_autoprocess;
-DROP TABLE IF EXISTS planner_custom_queries;
-DROP TABLE IF EXISTS planner_plan_host_labels;
-DROP TABLE IF EXISTS planner_plan_owners;
-DROP TABLE IF EXISTS planner_saved_objects;
-DROP TABLE IF EXISTS planner_test_configs_skipped_hosts;
-DROP TABLE IF EXISTS planner_test_jobs;
-DROP TABLE IF EXISTS planner_data_types;
-DROP TABLE IF EXISTS planner_keyvals;
-DROP TABLE IF EXISTS planner_test_configs;
-DROP TABLE IF EXISTS planner_test_control_files;
-DROP TABLE IF EXISTS planner_plans;
-"""
-
-DOWN_SQL = """
---
--- Table structure for table `planner_plans`
---
-
-SET @saved_cs_client     = @@character_set_client;
-SET character_set_client = utf8;
-CREATE TABLE `planner_plans` (
-  `id` int(11) NOT NULL auto_increment,
-  `name` varchar(255) NOT NULL,
-  `label_override` varchar(255) default NULL,
-  `support` longtext NOT NULL,
-  `complete` tinyint(1) NOT NULL,
-  `dirty` tinyint(1) NOT NULL,
-  `initialized` tinyint(1) default '0',
-  PRIMARY KEY  (`id`),
-  UNIQUE KEY `name` (`name`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-SET character_set_client = @saved_cs_client;
-
---
--- Table structure for table `planner_test_control_files`
---
-
-SET @saved_cs_client     = @@character_set_client;
-SET character_set_client = utf8;
-CREATE TABLE `planner_test_control_files` (
-  `id` int(11) NOT NULL auto_increment,
-  `the_hash` varchar(40) NOT NULL,
-  `contents` longtext NOT NULL,
-  PRIMARY KEY  (`id`),
-  UNIQUE KEY `the_hash` (`the_hash`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-SET character_set_client = @saved_cs_client;
-
---
--- Table structure for table `planner_test_configs`
---
-
-SET @saved_cs_client     = @@character_set_client;
-SET character_set_client = utf8;
-CREATE TABLE `planner_test_configs` (
-  `id` int(11) NOT NULL auto_increment,
-  `plan_id` int(11) NOT NULL,
-  `control_file_id` int(11) NOT NULL,
-  `execution_order` int(11) NOT NULL,
-  `alias` varchar(255) NOT NULL,
-  `estimated_runtime` int(11) NOT NULL,
-  `is_server` tinyint(1) default '1',
-  PRIMARY KEY  (`id`),
-  UNIQUE KEY `tests_plan_id_alias_unique` (`plan_id`,`alias`),
-  KEY `planner_tests_plan_id` (`plan_id`),
-  KEY `planner_tests_control_file_id` (`control_file_id`),
-  CONSTRAINT `tests_control_file_id_fk` FOREIGN KEY (`control_file_id`) REFERENCES `planner_test_control_files` (`id`),
-  CONSTRAINT `tests_plan_id_fk` FOREIGN KEY (`plan_id`) REFERENCES `planner_plans` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-SET character_set_client = @saved_cs_client;
-
---
--- Table structure for table `planner_keyvals`
---
-
-SET @saved_cs_client     = @@character_set_client;
-SET character_set_client = utf8;
-CREATE TABLE `planner_keyvals` (
-  `id` int(11) NOT NULL auto_increment,
-  `the_hash` varchar(40) NOT NULL,
-  `key` varchar(1024) NOT NULL,
-  `value` varchar(1024) NOT NULL,
-  PRIMARY KEY  (`id`),
-  UNIQUE KEY `the_hash` (`the_hash`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-SET character_set_client = @saved_cs_client;
-
---
--- Table structure for table `planner_data_types`
---
-
-SET @saved_cs_client     = @@character_set_client;
-SET character_set_client = utf8;
-CREATE TABLE `planner_data_types` (
-  `id` int(11) NOT NULL auto_increment,
-  `name` varchar(255) NOT NULL,
-  `db_table` varchar(255) NOT NULL,
-  PRIMARY KEY  (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-SET character_set_client = @saved_cs_client;
-
---
--- Table structure for table `planner_test_jobs`
---
-
-SET @saved_cs_client     = @@character_set_client;
-SET character_set_client = utf8;
-CREATE TABLE `planner_test_jobs` (
-  `id` int(11) NOT NULL auto_increment,
-  `plan_id` int(11) NOT NULL,
-  `test_config_id` int(11) NOT NULL,
-  `afe_job_id` int(11) NOT NULL,
-  PRIMARY KEY  (`id`),
-  KEY `planner_test_jobs_plan_id` (`plan_id`),
-  KEY `planner_test_jobs_afe_job_id` (`afe_job_id`),
-  KEY `planner_test_jobs_test_config_id` (`test_config_id`),
-  CONSTRAINT `test_jobs_afe_job_id_fk` FOREIGN KEY (`afe_job_id`) REFERENCES `afe_jobs` (`id`),
-  CONSTRAINT `test_jobs_plan_id_fk` FOREIGN KEY (`plan_id`) REFERENCES `planner_plans` (`id`),
-  CONSTRAINT `test_jobs_test_config_id_fk` FOREIGN KEY (`test_config_id`) REFERENCES `planner_test_configs` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-SET character_set_client = @saved_cs_client;
-
---
--- Table structure for table `planner_test_configs_skipped_hosts`
---
-
-CREATE TABLE planner_test_configs_skipped_hosts (
-  testconfig_id INT NOT NULL,
-  host_id INT NOT NULL,
-  PRIMARY KEY (testconfig_id, host_id)
-) ENGINE = InnoDB;
-
-ALTER TABLE planner_test_configs_skipped_hosts
-ADD CONSTRAINT planner_test_configs_skipped_hosts_testconfig_ibfk
-FOREIGN KEY (testconfig_id) REFERENCES planner_test_configs (id);
-
-ALTER TABLE planner_test_configs_skipped_hosts
-ADD CONSTRAINT planner_test_configs_skipped_hosts_host_ibfk
-FOREIGN KEY (host_id) REFERENCES afe_hosts (id);
-
---
--- Table structure for table `planner_saved_objects`
---
-
-SET @saved_cs_client     = @@character_set_client;
-SET character_set_client = utf8;
-CREATE TABLE `planner_saved_objects` (
-  `id` int(11) NOT NULL auto_increment,
-  `user_id` int(11) NOT NULL,
-  `type` varchar(16) NOT NULL,
-  `name` varchar(255) NOT NULL,
-  `encoded_object` longtext NOT NULL,
-  PRIMARY KEY  (`id`),
-  UNIQUE KEY `user_id` (`user_id`,`type`,`name`),
-  KEY `planner_saved_objects_user_id` (`user_id`),
-  CONSTRAINT `saved_objects_user_id_fk` FOREIGN KEY (`user_id`) REFERENCES `afe_users` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-SET character_set_client = @saved_cs_client;
-
---
--- Table structure for table `planner_plan_owners`
---
-
-SET @saved_cs_client     = @@character_set_client;
-SET character_set_client = utf8;
-CREATE TABLE `planner_plan_owners` (
-  `id` int(11) NOT NULL auto_increment,
-  `plan_id` int(11) NOT NULL,
-  `user_id` int(11) NOT NULL,
-  PRIMARY KEY  (`id`),
-  UNIQUE KEY `plan_id` (`plan_id`,`user_id`),
-  KEY `plan_owners_user_id_fk` (`user_id`),
-  CONSTRAINT `plan_owners_plan_id_fk` FOREIGN KEY (`plan_id`) REFERENCES `planner_plans` (`id`),
-  CONSTRAINT `plan_owners_user_id_fk` FOREIGN KEY (`user_id`) REFERENCES `afe_users` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-SET character_set_client = @saved_cs_client;
-
---
--- Table structure for table `planner_plan_host_labels`
---
-
-SET @saved_cs_client     = @@character_set_client;
-SET character_set_client = utf8;
-CREATE TABLE `planner_plan_host_labels` (
-  `id` int(11) NOT NULL auto_increment,
-  `plan_id` int(11) NOT NULL,
-  `label_id` int(11) NOT NULL,
-  PRIMARY KEY  (`id`),
-  KEY `plan_host_labels_plan_id_fk` (`plan_id`),
-  KEY `plan_host_labels_label_id_fk` (`label_id`),
-  CONSTRAINT `plan_host_labels_label_id_fk` FOREIGN KEY (`label_id`) REFERENCES `afe_labels` (`id`),
-  CONSTRAINT `plan_host_labels_plan_id_fk` FOREIGN KEY (`plan_id`) REFERENCES `planner_plans` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-SET character_set_client = @saved_cs_client;
-
---
--- Table structure for table `planner_custom_queries`
---
-
-SET @saved_cs_client     = @@character_set_client;
-SET character_set_client = utf8;
-CREATE TABLE `planner_custom_queries` (
-  `id` int(11) NOT NULL auto_increment,
-  `plan_id` int(11) NOT NULL,
-  `query` longtext NOT NULL,
-  PRIMARY KEY  (`id`),
-  KEY `planner_custom_queries_plan_id` (`plan_id`),
-  CONSTRAINT `custom_queries_plan_id_fk` FOREIGN KEY (`plan_id`) REFERENCES `planner_plans` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-SET character_set_client = @saved_cs_client;
-
---
--- Table structure for table `planner_autoprocess`
---
-
-SET @saved_cs_client     = @@character_set_client;
-SET character_set_client = utf8;
-CREATE TABLE `planner_autoprocess` (
-  `id` int(11) NOT NULL auto_increment,
-  `plan_id` int(11) NOT NULL,
-  `condition` longtext NOT NULL,
-  `enabled` tinyint(1) NOT NULL,
-  `reason_override` varchar(255) default NULL,
-  PRIMARY KEY  (`id`),
-  KEY `planner_autoprocess_plan_id` (`plan_id`),
-  CONSTRAINT `autoprocess_plan_id_fk` FOREIGN KEY (`plan_id`) REFERENCES `planner_plans` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-SET character_set_client = @saved_cs_client;
-
---
--- Table structure for table `planner_autoprocess_keyvals`
---
-
-SET @saved_cs_client     = @@character_set_client;
-SET character_set_client = utf8;
-CREATE TABLE `planner_autoprocess_keyvals` (
-  `id` int(11) NOT NULL auto_increment,
-  `autoprocess_id` int(11) NOT NULL,
-  `keyval_id` int(11) NOT NULL,
-  PRIMARY KEY  (`id`),
-  UNIQUE KEY `autoprocess_id` (`autoprocess_id`,`keyval_id`),
-  KEY `autoprocess_keyvals_keyval_id_fk` (`keyval_id`),
-  CONSTRAINT `autoprocess_keyvals_autoprocess_id_fk` FOREIGN KEY (`autoprocess_id`) REFERENCES `planner_autoprocess` (`id`),
-  CONSTRAINT `autoprocess_keyvals_keyval_id_fk` FOREIGN KEY (`keyval_id`) REFERENCES `planner_keyvals` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-SET character_set_client = @saved_cs_client;
-
---
--- Table structure for table `planner_autoprocess_labels`
---
-
-SET @saved_cs_client     = @@character_set_client;
-SET character_set_client = utf8;
-CREATE TABLE `planner_autoprocess_labels` (
-  `id` int(11) NOT NULL auto_increment,
-  `autoprocess_id` int(11) NOT NULL,
-  `testlabel_id` int(11) NOT NULL,
-  PRIMARY KEY  (`id`),
-  UNIQUE KEY `autoprocess_id` (`autoprocess_id`,`testlabel_id`),
-  KEY `autoprocess_labels_testlabel_id_fk` (`testlabel_id`),
-  CONSTRAINT `autoprocess_labels_autoprocess_id_fk` FOREIGN KEY (`autoprocess_id`) REFERENCES `planner_autoprocess` (`id`),
-  CONSTRAINT `autoprocess_labels_testlabel_id_fk` FOREIGN KEY (`testlabel_id`) REFERENCES `tko_test_labels` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-SET character_set_client = @saved_cs_client;
-
---
--- Table structure for table `planner_additional_parameters`
---
-
-CREATE TABLE planner_additional_parameters (
-  id INT PRIMARY KEY AUTO_INCREMENT,
-  plan_id INT NOT NULL,
-  hostname_regex VARCHAR(255) NOT NULL,
-  param_type VARCHAR(32) NOT NULL,
-  application_order INT NOT NULL
-) ENGINE = InnoDB;
-
-ALTER TABLE planner_additional_parameters
-ADD CONSTRAINT planner_additional_parameters_plan_ibfk
-FOREIGN KEY (plan_id) REFERENCES planner_plans (id);
-
-ALTER TABLE planner_additional_parameters
-ADD CONSTRAINT planner_additional_parameters_unique
-UNIQUE KEY (plan_id, hostname_regex, param_type);
-
---
--- Table structure for table `planner_additional_parameter_values`
---
-
-CREATE TABLE planner_additional_parameter_values (
-  id INT PRIMARY KEY AUTO_INCREMENT,
-  additional_parameter_id INT NOT NULL,
-  `key` VARCHAR(255) NOT NULL,
-  value VARCHAR(255) NOT NULL
-) ENGINE = InnoDB;
-
-ALTER TABLE planner_additional_parameter_values
-ADD CONSTRAINT planner_additional_parameter_values_additional_parameter_ibfk
-FOREIGN KEY (additional_parameter_id)
-  REFERENCES planner_additional_parameters (id);
-
-ALTER TABLE planner_additional_parameter_values
-ADD CONSTRAINT planner_additional_parameter_values_unique
-UNIQUE KEY (additional_parameter_id, `key`);
-
---
--- Table structure for table `planner_hosts`
---
-
-SET @saved_cs_client     = @@character_set_client;
-SET character_set_client = utf8;
-CREATE TABLE `planner_hosts` (
-  `id` int(11) NOT NULL auto_increment,
-  `plan_id` int(11) NOT NULL,
-  `host_id` int(11) NOT NULL,
-  `complete` tinyint(1) NOT NULL,
-  `blocked` tinyint(1) NOT NULL,
-  `added_by_label` tinyint(1) default '0',
-  PRIMARY KEY  (`id`),
-  KEY `planner_hosts_plan_id` (`plan_id`),
-  KEY `planner_hosts_host_id` (`host_id`),
-  CONSTRAINT `hosts_host_id_fk` FOREIGN KEY (`host_id`) REFERENCES `afe_hosts` (`id`),
-  CONSTRAINT `hosts_plan_id_fk` FOREIGN KEY (`plan_id`) REFERENCES `planner_plans` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-SET character_set_client = @saved_cs_client;
-
---
--- Table structure for table `planner_bugs`
---
-
-SET @saved_cs_client     = @@character_set_client;
-SET character_set_client = utf8;
-CREATE TABLE `planner_bugs` (
-  `id` int(11) NOT NULL auto_increment,
-  `external_uid` varchar(255) NOT NULL,
-  PRIMARY KEY  (`id`),
-  UNIQUE KEY `external_uid` (`external_uid`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-SET character_set_client = @saved_cs_client;
-
---
--- Table structure for table `planner_autoprocess_bugs`
---
-
-SET @saved_cs_client     = @@character_set_client;
-SET character_set_client = utf8;
-CREATE TABLE `planner_autoprocess_bugs` (
-  `id` int(11) NOT NULL auto_increment,
-  `autoprocess_id` int(11) NOT NULL,
-  `bug_id` int(11) NOT NULL,
-  PRIMARY KEY  (`id`),
-  UNIQUE KEY `autoprocess_id` (`autoprocess_id`,`bug_id`),
-  KEY `autoprocess_bugs_bug_id_fk` (`bug_id`),
-  CONSTRAINT `autoprocess_bugs_autoprocess_id_fk` FOREIGN KEY (`autoprocess_id`) REFERENCES `planner_autoprocess` (`id`),
-  CONSTRAINT `autoprocess_bugs_bug_id_fk` FOREIGN KEY (`bug_id`) REFERENCES `planner_bugs` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-SET character_set_client = @saved_cs_client;
-
---
--- Table structure for table `planner_history`
---
-
-SET @saved_cs_client     = @@character_set_client;
-SET character_set_client = utf8;
-CREATE TABLE `planner_history` (
-  `id` int(11) NOT NULL auto_increment,
-  `plan_id` int(11) NOT NULL,
-  `action_id` int(11) NOT NULL,
-  `user_id` int(11) NOT NULL,
-  `data_type_id` int(11) NOT NULL,
-  `object_id` int(11) NOT NULL,
-  `old_object_repr` longtext NOT NULL,
-  `new_object_repr` longtext NOT NULL,
-  `time` datetime NOT NULL,
-  PRIMARY KEY  (`id`),
-  KEY `planner_history_plan_id` (`plan_id`),
-  KEY `planner_history_user_id` (`user_id`),
-  KEY `planner_history_data_type_id` (`data_type_id`),
-  CONSTRAINT `history_data_type_id_fk` FOREIGN KEY (`data_type_id`) REFERENCES `planner_data_types` (`id`),
-  CONSTRAINT `history_plan_id_fk` FOREIGN KEY (`plan_id`) REFERENCES `planner_plans` (`id`),
-  CONSTRAINT `history_user_id_fk` FOREIGN KEY (`user_id`) REFERENCES `afe_users` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-SET character_set_client = @saved_cs_client;
-
---
--- Table structure for table `planner_test_runs`
---
-
-SET @saved_cs_client     = @@character_set_client;
-SET character_set_client = utf8;
-CREATE TABLE `planner_test_runs` (
-  `id` int(11) NOT NULL auto_increment,
-  `plan_id` int(11) NOT NULL,
-  `test_job_id` int(11) NOT NULL,
-  `tko_test_id` int(10) unsigned NOT NULL,
-  `status` varchar(16) NOT NULL,
-  `finalized` tinyint(1) NOT NULL,
-  `seen` tinyint(1) NOT NULL,
-  `triaged` tinyint(1) NOT NULL,
-  `host_id` int(11) NOT NULL,
-  PRIMARY KEY  (`id`),
-  UNIQUE KEY `test_runs_unique` (`plan_id`,`test_job_id`,`tko_test_id`,`host_id`),
-  KEY `planner_test_runs_plan_id` (`plan_id`),
-  KEY `planner_test_runs_test_job_id` (`test_job_id`),
-  KEY `planner_test_runs_tko_test_id` (`tko_test_id`),
-  KEY `test_runs_host_id_fk` (`host_id`),
-  CONSTRAINT `test_runs_host_id_fk` FOREIGN KEY (`host_id`) REFERENCES `planner_hosts` (`id`),
-  CONSTRAINT `test_runs_plan_id_fk` FOREIGN KEY (`plan_id`) REFERENCES `planner_plans` (`id`),
-  CONSTRAINT `test_runs_test_job_id_fk` FOREIGN KEY (`test_job_id`) REFERENCES `planner_test_jobs` (`id`),
-  CONSTRAINT `test_runs_tko_test_id_fk` FOREIGN KEY (`tko_test_id`) REFERENCES `tko_tests` (`test_idx`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-SET character_set_client = @saved_cs_client;
-
---
--- Table structure for table `planner_test_run_bugs`
---
-
-SET @saved_cs_client     = @@character_set_client;
-SET character_set_client = utf8;
-CREATE TABLE `planner_test_run_bugs` (
-  `id` int(11) NOT NULL auto_increment,
-  `testrun_id` int(11) NOT NULL,
-  `bug_id` int(11) NOT NULL,
-  PRIMARY KEY  (`id`),
-  UNIQUE KEY `testrun_id` (`testrun_id`,`bug_id`),
-  KEY `test_run_bugs_bug_id_fk` (`bug_id`),
-  CONSTRAINT `test_run_bugs_bug_id_fk` FOREIGN KEY (`bug_id`) REFERENCES `planner_bugs` (`id`),
-  CONSTRAINT `test_run_bugs_testrun_id_fk` FOREIGN KEY (`testrun_id`) REFERENCES `planner_test_runs` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-SET character_set_client = @saved_cs_client;
-"""
diff --git a/frontend/migrations/067_add_id_afe_acl_groups_hosts.py b/frontend/migrations/067_add_id_afe_acl_groups_hosts.py
deleted file mode 100644
index 4bd6572..0000000
--- a/frontend/migrations/067_add_id_afe_acl_groups_hosts.py
+++ /dev/null
@@ -1,7 +0,0 @@
-UP_SQL = """
-ALTER TABLE afe_acl_groups_hosts ADD COLUMN id integer AUTO_INCREMENT NOT NULL PRIMARY KEY FIRST;
-"""
-
-DOWN_SQL = """
-ALTER TABLE afe_acl_groups_hosts DROP COLUMN id;
-"""
diff --git a/frontend/migrations/068_add_id_afe_acl_groups_users.py b/frontend/migrations/068_add_id_afe_acl_groups_users.py
deleted file mode 100644
index 185eb05..0000000
--- a/frontend/migrations/068_add_id_afe_acl_groups_users.py
+++ /dev/null
@@ -1,7 +0,0 @@
-UP_SQL = """
-ALTER TABLE afe_acl_groups_users ADD COLUMN id integer AUTO_INCREMENT NOT NULL PRIMARY KEY FIRST;
-"""
-
-DOWN_SQL = """
-ALTER TABLE afe_acl_groups_users DROP COLUMN id;
-"""
diff --git a/frontend/migrations/069_tko_tests_extend_reason_table.py b/frontend/migrations/069_tko_tests_extend_reason_table.py
deleted file mode 100644
index 77e5330..0000000
--- a/frontend/migrations/069_tko_tests_extend_reason_table.py
+++ /dev/null
@@ -1,7 +0,0 @@
-UP_SQL = """
-ALTER TABLE tko_tests MODIFY reason VARCHAR(4096);
-"""
-
-DOWN_SQL = """
-ALTER TABLE tko_tests MODIFY reason VARCHAR(1024);
-"""
diff --git a/frontend/migrations/070_job_add_max_runtime_mins.py b/frontend/migrations/070_job_add_max_runtime_mins.py
deleted file mode 100644
index 7db10c6..0000000
--- a/frontend/migrations/070_job_add_max_runtime_mins.py
+++ /dev/null
@@ -1,8 +0,0 @@
-UP_SQL = """
-ALTER TABLE afe_jobs ADD COLUMN max_runtime_mins integer NOT NULL;
-UPDATE afe_jobs SET max_runtime_mins = max_runtime_hrs * 60;
-"""
-
-DOWN_SQL = """
-ALTER TABLE afe_jobs DROP COLUMN max_runtime_mins;
-"""
diff --git a/frontend/migrations/071_job_add_parent_job.py b/frontend/migrations/071_job_add_parent_job.py
deleted file mode 100644
index c28131c..0000000
--- a/frontend/migrations/071_job_add_parent_job.py
+++ /dev/null
@@ -1,8 +0,0 @@
-UP_SQL = """
-ALTER TABLE afe_jobs ADD COLUMN parent_job_id integer NULL;
-UPDATE afe_jobs SET parent_job_id = NULL;
-"""
-
-DOWN_SQL = """
-ALTER TABLE afe_jobs DROP COLUMN parent_job_id;
-"""
diff --git a/frontend/migrations/072_expand_perf_key_fields.py b/frontend/migrations/072_expand_perf_key_fields.py
deleted file mode 100644
index 8e40a54..0000000
--- a/frontend/migrations/072_expand_perf_key_fields.py
+++ /dev/null
@@ -1,7 +0,0 @@
-UP_SQL = """
-ALTER TABLE tko_iteration_result MODIFY attribute varchar(256) default NULL;
-"""
-
-DOWN_SQL = """
-ALTER TABLE tko_iteration_result MODIFY attribute varchar(30) default NULL;
-"""
diff --git a/frontend/migrations/073_job_add_test_retry.py b/frontend/migrations/073_job_add_test_retry.py
deleted file mode 100644
index 699b38d..0000000
--- a/frontend/migrations/073_job_add_test_retry.py
+++ /dev/null
@@ -1,7 +0,0 @@
-UP_SQL = """
-ALTER TABLE afe_jobs ADD COLUMN test_retry integer NOT NULL DEFAULT '0';
-"""
-
-DOWN_SQL = """
-ALTER TABLE afe_jobs DROP COLUMN test_retry;
-"""
diff --git a/frontend/migrations/074_test_add_test_retry.py b/frontend/migrations/074_test_add_test_retry.py
deleted file mode 100644
index d30fb4d..0000000
--- a/frontend/migrations/074_test_add_test_retry.py
+++ /dev/null
@@ -1,7 +0,0 @@
-UP_SQL = """
-ALTER TABLE afe_autotests ADD COLUMN test_retry integer NOT NULL DEFAULT '0';
-"""
-
-DOWN_SQL = """
-ALTER TABLE afe_autotests DROP COLUMN test_retry;
-"""
diff --git a/frontend/migrations/075_index_get_prioritized_special_tasks.py b/frontend/migrations/075_index_get_prioritized_special_tasks.py
deleted file mode 100644
index 997c677..0000000
--- a/frontend/migrations/075_index_get_prioritized_special_tasks.py
+++ /dev/null
@@ -1,10 +0,0 @@
-UP_SQL = """
-CREATE INDEX host_queue_entries_host_active ON afe_host_queue_entries (host_id, active);
-CREATE INDEX special_tasks_active_complete ON afe_special_tasks (is_active, is_complete);
-"""
-
-DOWN_SQL = """
-DROP INDEX host_queue_entries_host_active ON afe_host_queue_entries;
-DROP INDEX special_tasks_active_complete ON afe_special_tasks;
-"""
-
diff --git a/frontend/migrations/076_index_pending_hqe.py b/frontend/migrations/076_index_pending_hqe.py
deleted file mode 100644
index 2986224..0000000
--- a/frontend/migrations/076_index_pending_hqe.py
+++ /dev/null
@@ -1,7 +0,0 @@
-UP_SQL = """
-CREATE INDEX host_queue_entry_status ON afe_host_queue_entries (status);
-"""
-
-DOWN_SQL = """
-DROP INDEX host_queue_entry_status ON afe_host_queue_entries;
-"""
diff --git a/frontend/migrations/077_add_run_reset.py b/frontend/migrations/077_add_run_reset.py
deleted file mode 100644
index b71ab9a..0000000
--- a/frontend/migrations/077_add_run_reset.py
+++ /dev/null
@@ -1,9 +0,0 @@
-UP_SQL = """
-ALTER TABLE afe_autotests ADD COLUMN run_reset SMALLINT NOT NULL DEFAULT '1';
-ALTER TABLE afe_jobs ADD COLUMN run_reset SMALLINT NOT NULL DEFAULT '1';
-"""
-
-DOWN_SQL = """
-ALTER TABLE afe_autotests DROP COLUMN run_reset;
-ALTER TABLE afe_jobs DROP COLUMN run_reset;
-"""
\ No newline at end of file
diff --git a/frontend/migrations/078_add_tko_iteration_perf_value.py b/frontend/migrations/078_add_tko_iteration_perf_value.py
deleted file mode 100644
index 77e720d..0000000
--- a/frontend/migrations/078_add_tko_iteration_perf_value.py
+++ /dev/null
@@ -1,20 +0,0 @@
-UP_SQL = """
-CREATE TABLE tko_iteration_perf_value (
-  test_idx INT(10) UNSIGNED NOT NULL,
-  iteration INT(11) DEFAULT NULL,
-  description VARCHAR(256) DEFAULT NULL,
-  value FLOAT DEFAULT NULL,
-  stddev FLOAT DEFAULT NULL,
-  units VARCHAR(32) DEFAULT NULL,
-  higher_is_better BOOLEAN NOT NULL DEFAULT TRUE,
-  KEY test_idx (test_idx),
-  KEY description (description),
-  KEY value (value),
-  CONSTRAINT tko_iteration_perf_value_ibfk FOREIGN KEY (test_idx)
-      REFERENCES tko_tests (test_idx) ON DELETE CASCADE
-) ENGINE = InnoDB;
-"""
-
-DOWN_SQL = """
-DROP TABLE tko_iteration_perf_value;
-"""
diff --git a/frontend/migrations/079_special_task_abort.py b/frontend/migrations/079_special_task_abort.py
deleted file mode 100644
index 6799ac2..0000000
--- a/frontend/migrations/079_special_task_abort.py
+++ /dev/null
@@ -1,7 +0,0 @@
-UP_SQL = """
-ALTER TABLE afe_special_tasks ADD COLUMN is_aborted TINYINT(1) NOT NULL DEFAULT '0';
-"""
-
-DOWN_SQL = """
-ALTER TABLE afe_special_tasks DROP COLUMN is_aborted;
-"""
diff --git a/frontend/migrations/080_index_abort_host_queue_entries.py b/frontend/migrations/080_index_abort_host_queue_entries.py
deleted file mode 100644
index 843c11e..0000000
--- a/frontend/migrations/080_index_abort_host_queue_entries.py
+++ /dev/null
@@ -1,8 +0,0 @@
-UP_SQL = """
-CREATE INDEX host_queue_entries_abort_incomplete ON afe_host_queue_entries (aborted, complete);
-"""
-
-DOWN_SQL = """
-DROP INDEX host_queue_entries_abort_incomplete ON afe_host_queue_entries;
-"""
-
diff --git a/frontend/migrations/081_index_afe_jobs_on_parent_id.py b/frontend/migrations/081_index_afe_jobs_on_parent_id.py
deleted file mode 100644
index d269fce..0000000
--- a/frontend/migrations/081_index_afe_jobs_on_parent_id.py
+++ /dev/null
@@ -1,8 +0,0 @@
-UP_SQL = """
-CREATE INDEX parent_job_id_index ON afe_jobs (parent_job_id);
-"""
-
-DOWN_SQL = """
-DROP INDEX parent_job_id_index ON afe_jobs;
-"""
-
diff --git a/frontend/migrations/082_add_tko_iteration_perf_value_graph_field.py b/frontend/migrations/082_add_tko_iteration_perf_value_graph_field.py
deleted file mode 100644
index 6a4c88c..0000000
--- a/frontend/migrations/082_add_tko_iteration_perf_value_graph_field.py
+++ /dev/null
@@ -1,7 +0,0 @@
-UP_SQL = """
-ALTER TABLE tko_iteration_perf_value ADD COLUMN graph VARCHAR(256) DEFAULT NULL;
-"""
-
-DOWN_SQL = """
-ALTER TABLE tko_iteration_perf_value DROP COLUMN graph;
-"""
diff --git a/frontend/migrations/083_job_add_timeout_mins.py b/frontend/migrations/083_job_add_timeout_mins.py
deleted file mode 100644
index 4caa26a..0000000
--- a/frontend/migrations/083_job_add_timeout_mins.py
+++ /dev/null
@@ -1,8 +0,0 @@
-UP_SQL = """
-ALTER TABLE afe_jobs ADD COLUMN timeout_mins integer NOT NULL;
-UPDATE afe_jobs SET timeout_mins = timeout * 60;
-"""
-
-DOWN_SQL = """
-ALTER TABLE afe_jobs DROP COLUMN timeout_mins;
-"""
\ No newline at end of file
diff --git a/frontend/migrations/084_convert_metahost_to_label.py b/frontend/migrations/084_convert_metahost_to_label.py
deleted file mode 100644
index 9fb154c..0000000
--- a/frontend/migrations/084_convert_metahost_to_label.py
+++ /dev/null
@@ -1,8 +0,0 @@
-UP_SQL = """
-INSERT INTO afe_jobs_dependency_labels (job_id, label_id)
-SELECT job_id, meta_host FROM afe_host_queue_entries
-WHERE NOT complete AND NOT active AND status="Queued" AND NOT aborted;
-"""
-
-DOWN_SQL="""
-"""
diff --git a/frontend/migrations/085_lease_hosts.py b/frontend/migrations/085_lease_hosts.py
deleted file mode 100644
index fd277bb..0000000
--- a/frontend/migrations/085_lease_hosts.py
+++ /dev/null
@@ -1,9 +0,0 @@
-UP_SQL = """
-ALTER TABLE afe_hosts ADD COLUMN leased TINYINT(1) NOT NULL DEFAULT '1';
-CREATE INDEX leased_hosts ON afe_hosts (leased, locked);
-"""
-
-DOWN_SQL = """
-DROP INDEX leased_hosts ON afe_hosts;
-ALTER TABLE afe_hosts DROP COLUMN leased;
-"""
diff --git a/frontend/migrations/086_add_invalidates_test_idx_to_tko_tests.py b/frontend/migrations/086_add_invalidates_test_idx_to_tko_tests.py
deleted file mode 100644
index ffded8c..0000000
--- a/frontend/migrations/086_add_invalidates_test_idx_to_tko_tests.py
+++ /dev/null
@@ -1,35 +0,0 @@
-ADD_COLUMN = """
-ALTER TABLE tko_tests
-ADD COLUMN `invalidates_test_idx` int(10) unsigned DEFAULT NULL;
-"""
-ADD_INDEX = """ALTER TABLE tko_tests ADD INDEX(invalidates_test_idx);"""
-ADD_FOREIGN_KEY = """
-ALTER TABLE tko_tests
-ADD CONSTRAINT invalidates_test_idx_fk FOREIGN KEY
-(`invalidates_test_idx`) REFERENCES `tko_tests`(`test_idx`)
-ON DELETE NO ACTION;
-"""
-DROP_FOREIGN_KEY = """
-ALTER TABLE tko_tests DROP FOREIGN KEY `invalidates_test_idx_fk`;
-"""
-DROP_COLUMN = """ALTER TABLE tko_tests DROP `invalidates_test_idx`; """
-
-def migrate_up(manager):
-    """Pick up the changes.
-
-    @param manager: A MigrationManager object.
-
-    """
-    manager.execute(ADD_COLUMN)
-    manager.execute(ADD_INDEX)
-    manager.execute(ADD_FOREIGN_KEY)
-
-
-def migrate_down(manager):
-    """Drop the changes.
-
-    @param manager: A MigrationManager object.
-
-    """
-    manager.execute(DROP_FOREIGN_KEY)
-    manager.execute(DROP_COLUMN)
diff --git a/frontend/migrations/087_add_fields_to_tko_test_view_2.py b/frontend/migrations/087_add_fields_to_tko_test_view_2.py
deleted file mode 100644
index 0d802e0..0000000
--- a/frontend/migrations/087_add_fields_to_tko_test_view_2.py
+++ /dev/null
@@ -1,68 +0,0 @@
-UP_SQL = """
-ALTER VIEW tko_test_view_2 AS
-SELECT  tko_tests.test_idx,
-        tko_tests.job_idx,
-        tko_tests.test AS test_name,
-        tko_tests.subdir,
-        tko_tests.kernel_idx,
-        tko_tests.status AS status_idx,
-        tko_tests.reason,
-        tko_tests.machine_idx,
-        tko_tests.invalid,
-        tko_tests.invalidates_test_idx,
-        tko_tests.started_time AS test_started_time,
-        tko_tests.finished_time AS test_finished_time,
-        tko_jobs.tag AS job_tag,
-        tko_jobs.label AS job_name,
-        tko_jobs.username AS job_owner,
-        tko_jobs.queued_time AS job_queued_time,
-        tko_jobs.started_time AS job_started_time,
-        tko_jobs.finished_time AS job_finished_time,
-        tko_jobs.afe_job_id AS afe_job_id,
-        tko_machines.hostname AS hostname,
-        tko_machines.machine_group AS platform,
-        tko_machines.owner AS machine_owner,
-        tko_kernels.kernel_hash,
-        tko_kernels.base AS kernel_base,
-        tko_kernels.printable AS kernel,
-        tko_status.word AS status
-FROM tko_tests
-INNER JOIN tko_jobs ON tko_jobs.job_idx = tko_tests.job_idx
-INNER JOIN tko_machines ON tko_machines.machine_idx = tko_jobs.machine_idx
-INNER JOIN tko_kernels ON tko_kernels.kernel_idx = tko_tests.kernel_idx
-INNER JOIN tko_status ON tko_status.status_idx = tko_tests.status;
-"""
-
-
-DOWN_SQL = """
-ALTER VIEW tko_test_view_2 AS
-SELECT  tko_tests.test_idx,
-        tko_tests.job_idx,
-        tko_tests.test AS test_name,
-        tko_tests.subdir,
-        tko_tests.kernel_idx,
-        tko_tests.status AS status_idx,
-        tko_tests.reason,
-        tko_tests.machine_idx,
-        tko_tests.started_time AS test_started_time,
-        tko_tests.finished_time AS test_finished_time,
-        tko_jobs.tag AS job_tag,
-        tko_jobs.label AS job_name,
-        tko_jobs.username AS job_owner,
-        tko_jobs.queued_time AS job_queued_time,
-        tko_jobs.started_time AS job_started_time,
-        tko_jobs.finished_time AS job_finished_time,
-        tko_jobs.afe_job_id AS afe_job_id,
-        tko_machines.hostname AS hostname,
-        tko_machines.machine_group AS platform,
-        tko_machines.owner AS machine_owner,
-        tko_kernels.kernel_hash,
-        tko_kernels.base AS kernel_base,
-        tko_kernels.printable AS kernel,
-        tko_status.word AS status
-FROM tko_tests
-INNER JOIN tko_jobs ON tko_jobs.job_idx = tko_tests.job_idx
-INNER JOIN tko_machines ON tko_machines.machine_idx = tko_jobs.machine_idx
-INNER JOIN tko_kernels ON tko_kernels.kernel_idx = tko_tests.kernel_idx
-INNER JOIN tko_status ON tko_status.status_idx = tko_tests.status;
-"""
diff --git a/frontend/migrations/088_synch_count_not_null.py b/frontend/migrations/088_synch_count_not_null.py
deleted file mode 100644
index e4dd7d5..0000000
--- a/frontend/migrations/088_synch_count_not_null.py
+++ /dev/null
@@ -1,8 +0,0 @@
-UP_SQL = """
-UPDATE afe_jobs SET synch_count=0 WHERE synch_count IS NULL;
-ALTER TABLE afe_jobs MODIFY synch_count int(11) NOT NULL;
-"""
-
-DOWN_SQL = """
-ALTER TABLE afe_jobs MODIFY synch_count int(11) NULL;
-"""
\ No newline at end of file
diff --git a/frontend/migrations/089_index_afe_jobs_on_owner.py b/frontend/migrations/089_index_afe_jobs_on_owner.py
deleted file mode 100644
index b382d2f..0000000
--- a/frontend/migrations/089_index_afe_jobs_on_owner.py
+++ /dev/null
@@ -1,7 +0,0 @@
-UP_SQL = """
-CREATE INDEX owner_index ON afe_jobs (owner);
-"""
-
-DOWN_SQL = """
-DROP INDEX owner_index ON afe_jobs;
-"""
diff --git a/frontend/migrations/090_add_field_time_finished_to_afe_special_tasks.py b/frontend/migrations/090_add_field_time_finished_to_afe_special_tasks.py
deleted file mode 100644
index 359d462..0000000
--- a/frontend/migrations/090_add_field_time_finished_to_afe_special_tasks.py
+++ /dev/null
@@ -1,7 +0,0 @@
-UP_SQL = """
-ALTER TABLE afe_special_tasks ADD COLUMN time_finished DATETIME;
-"""
-
-DOWN_SQL = """
-ALTER TABLE afe_special_tasks DROP COLUMN time_finished;
-"""
diff --git a/frontend/migrations/091_add_hqe_finished_on.py b/frontend/migrations/091_add_hqe_finished_on.py
deleted file mode 100644
index ff499a3..0000000
--- a/frontend/migrations/091_add_hqe_finished_on.py
+++ /dev/null
@@ -1,7 +0,0 @@
-UP_SQL = """
-ALTER TABLE afe_host_queue_entries ADD COLUMN finished_on datetime NULL;
-"""
-
-DOWN_SQL = """
-ALTER TABLE afe_host_queue_entries DROP COLUMN finished_on;
-"""
diff --git a/frontend/migrations/092_host_attributes_permissions.py b/frontend/migrations/092_host_attributes_permissions.py
deleted file mode 100644
index 7250ac2..0000000
--- a/frontend/migrations/092_host_attributes_permissions.py
+++ /dev/null
@@ -1,30 +0,0 @@
-import common
-from autotest_lib.database import db_utils
-
-UP_SQL = """
-SET @group_id = (SELECT id FROM auth_group WHERE name = 'Basic Admin');
-
-INSERT IGNORE INTO auth_group_permissions (group_id, permission_id)
-SELECT @group_id, id FROM auth_permission WHERE codename IN (
-  'add_hostattribute', 'change_hostattribute', 'delete_hostattribute');
-"""
-
-DOWN_SQL = """
-DELETE auth_group_permissions.* FROM
-auth_group INNER JOIN auth_group_permissions ON (
-  auth_group.id = auth_group_permissions.group_id)
-INNER JOIN auth_permission ON (
-  auth_group_permissions.permission_id = auth_permission.id)
-WHERE auth_group.name = 'Basic Admin' AND codename IN (
-  'add_hostattribute', 'change_hostattribute', 'delete_hostattribute');
-"""
-
-
-def migrate_up(manager):
-    if db_utils.auth_tables_exist(manager):
-        manager.execute_script(UP_SQL)
-
-
-def migrate_down(manager):
-    if db_utils.auth_tables_exist(manager):
-        manager.execute_script(DOWN_SQL)
diff --git a/frontend/migrations/093_make_afe_users_login_unique.py b/frontend/migrations/093_make_afe_users_login_unique.py
deleted file mode 100644
index 4b37f16..0000000
--- a/frontend/migrations/093_make_afe_users_login_unique.py
+++ /dev/null
@@ -1,7 +0,0 @@
-UP_SQL = """
-CREATE UNIQUE INDEX login_unique ON afe_users (login);
-"""
-
-DOWN_SQL = """
-DROP INDEX login_unique ON afe_users;
-"""
\ No newline at end of file
diff --git a/frontend/migrations/094_add_shards.py b/frontend/migrations/094_add_shards.py
deleted file mode 100644
index fbf4c8a..0000000
--- a/frontend/migrations/094_add_shards.py
+++ /dev/null
@@ -1,31 +0,0 @@
-UP_SQL = """
-CREATE TABLE afe_shards (
-  id INT NOT NULL AUTO_INCREMENT PRIMARY KEY
-) ENGINE=innodb;
-
-ALTER TABLE afe_jobs ADD COLUMN shard_id INT NULL;
-ALTER TABLE afe_jobs ADD CONSTRAINT jobs_to_shard_ibfk
-    FOREIGN KEY (shard_id) REFERENCES afe_shards(id);
-
-CREATE TABLE afe_shards_labels (
-    id INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
-    shard_id INT NOT NULL,
-    label_id INT NOT NULL
-) ENGINE=InnoDB;
-
-ALTER TABLE `afe_shards_labels` ADD CONSTRAINT shard_shard_id_fk
-    FOREIGN KEY (`shard_id`) REFERENCES `afe_shards` (`id`);
-ALTER TABLE `afe_shards_labels` ADD CONSTRAINT shard_label_id_fk
-    FOREIGN KEY (`label_id`) REFERENCES `afe_labels` (`id`);
-"""
-
-DOWN_SQL = """
-ALTER TABLE afe_jobs DROP FOREIGN KEY jobs_to_shard_ibfk;
-ALTER TABLE afe_jobs DROP COLUMN shard_id;
-
-ALTER TABLE afe_shards_labels DROP FOREIGN KEY shard_label_id_fk;
-ALTER TABLE afe_shards_labels DROP FOREIGN KEY shard_shard_id_fk;
-DROP TABLE afe_shards_labels;
-
-DROP TABLE afe_shards;
-"""
diff --git a/frontend/migrations/095_shards_hostnames_and_hosts.py b/frontend/migrations/095_shards_hostnames_and_hosts.py
deleted file mode 100644
index e3a5f93..0000000
--- a/frontend/migrations/095_shards_hostnames_and_hosts.py
+++ /dev/null
@@ -1,13 +0,0 @@
-UP_SQL = """
-ALTER TABLE afe_shards ADD COLUMN hostname VARCHAR(255) NOT NULL;
-ALTER TABLE afe_hosts ADD COLUMN shard_id INT NULL;
-ALTER TABLE afe_hosts ADD CONSTRAINT hosts_to_shard_ibfk
-    FOREIGN KEY (shard_id) REFERENCES afe_shards(id);
-"""
-
-DOWN_SQL = """
-ALTER TABLE afe_hosts DROP FOREIGN KEY hosts_to_shard_ibfk;
-ALTER TABLE afe_hosts DROP COLUMN shard_id;
-
-ALTER TABLE afe_shards DROP COLUMN hostname;
-"""
diff --git a/frontend/migrations/096_only_one_shard_per_label.py b/frontend/migrations/096_only_one_shard_per_label.py
deleted file mode 100644
index 9e8f86c..0000000
--- a/frontend/migrations/096_only_one_shard_per_label.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# Adding the unique constraint will lead to the index being destroyed and a new
-# unique index being created.
-# To not rely on this implicit behavior, we explicitly delete the old index,
-# and then create the new indexes.
-# This is not really needed for the upwards migration, but if we can't be sure
-# about the indexes names, it gets harder to do the DOWN migration later.
-# Therefore we do this magic manually.
-UP_SQL = """
-ALTER TABLE afe_shards_labels DROP FOREIGN KEY shard_label_id_fk;
-ALTER TABLE afe_shards_labels DROP INDEX shard_label_id_fk;
-ALTER TABLE `afe_shards_labels` ADD UNIQUE `shard_label_id_uc` (`label_id`);
-ALTER TABLE `afe_shards_labels` ADD CONSTRAINT shard_label_id_fk
-        FOREIGN KEY (`label_id`) REFERENCES `afe_labels` (`id`);
-"""
-
-# Normally removing unique constraints is done just by deleting the index.
-# This doesn't work here, as the index is also needed for the foreign key.
-# Making an index back non-unique doesn't work in mysql.
-# Therefore delete the foreign key, delete the index, re-add the foreign key.
-DOWN_SQL = """
-ALTER TABLE afe_shards_labels DROP FOREIGN KEY shard_label_id_fk;
-ALTER TABLE afe_shards_labels DROP INDEX shard_label_id_uc;
-ALTER TABLE `afe_shards_labels` ADD CONSTRAINT shard_label_id_fk
-        FOREIGN KEY (`label_id`) REFERENCES `afe_labels` (`id`);
-"""
diff --git a/frontend/migrations/097_add_stable_versions.py b/frontend/migrations/097_add_stable_versions.py
deleted file mode 100644
index 78e983f..0000000
--- a/frontend/migrations/097_add_stable_versions.py
+++ /dev/null
@@ -1,12 +0,0 @@
-UP_SQL = """
-CREATE TABLE afe_stable_versions (
-  id INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
-  board VARCHAR(255) NOT NULL,
-  version VARCHAR(255) NOT NULL,
-  UNIQUE KEY `board_UNIQUE` (`board`)
-) ENGINE=innodb;
-"""
-
-DOWN_SQL = """
-DROP TABLE afe_stable_versions;
-"""
diff --git a/frontend/migrations/098_add_require_ssp.py b/frontend/migrations/098_add_require_ssp.py
deleted file mode 100644
index 3959a58..0000000
--- a/frontend/migrations/098_add_require_ssp.py
+++ /dev/null
@@ -1,7 +0,0 @@
-UP_SQL = """
-ALTER TABLE afe_jobs ADD COLUMN require_ssp tinyint(1) NULL;
-"""
-
-DOWN_SQL = """
-ALTER TABLE afe_jobs DROP COLUMN require_ssp;
-"""
diff --git a/frontend/migrations/099_drop_afe_job_keyvals_foreign_key.py b/frontend/migrations/099_drop_afe_job_keyvals_foreign_key.py
deleted file mode 100644
index ba8458a..0000000
--- a/frontend/migrations/099_drop_afe_job_keyvals_foreign_key.py
+++ /dev/null
@@ -1,19 +0,0 @@
-UP_SQL = """
-ALTER TABLE afe_job_keyvals DROP FOREIGN KEY `afe_job_keyvals_ibfk_1`;
-"""
-
-# Before syncing down, you must cleanup the rows that do not meet
-# the foreign key constraint by:
-#
-# DELETE t1 FROM afe_job_keyvals t1
-# LEFT OUTER JOIN afe_jobs t2
-# ON (t1.job_id = t2.id) WHERE t2.id is NULL;
-#
-# Execute with care!
-
-DOWN_SQL = """
-ALTER TABLE afe_job_keyvals
-ADD CONSTRAINT afe_job_keyvals_ibfk_1 FOREIGN KEY
-(`job_id`) REFERENCES `afe_jobs`(`id`)
-ON DELETE NO ACTION;
-"""
diff --git a/frontend/migrations/100_dummy_migrate.py b/frontend/migrations/100_dummy_migrate.py
deleted file mode 100644
index f48309d..0000000
--- a/frontend/migrations/100_dummy_migrate.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# This database migration does nothing. It works as a replacement for a reverted
-# migrate CL: https://chromium-review.googlesource.com/#/c/253760
-# For routine database migration revert, a new CL should be added to add
-# DOWN_SQL of the reverted migration as UP_SQL. However, in that CL's case,
-# DOWN_SQL is not available.
-# The dummy migration avoid the requirement to manually downgrade migrate_info
-# in each database.
-
-UP_SQL = """
-"""
-
-DOWN_SQL="""
-"""
-
diff --git a/frontend/migrations/101_add_lock_reason_afe_hosts.py b/frontend/migrations/101_add_lock_reason_afe_hosts.py
deleted file mode 100644
index 3693f2f..0000000
--- a/frontend/migrations/101_add_lock_reason_afe_hosts.py
+++ /dev/null
@@ -1,7 +0,0 @@
-UP_SQL = """
-ALTER TABLE afe_hosts ADD COLUMN lock_reason TEXT DEFAULT NULL
-"""
-
-DOWN_SQL = """
-ALTER TABLE afe_hosts DROP COLUMN lock_reason
-"""
diff --git a/frontend/migrations/102_change_invalidates_test_idx_fk_constraint.py b/frontend/migrations/102_change_invalidates_test_idx_fk_constraint.py
deleted file mode 100644
index b672713..0000000
--- a/frontend/migrations/102_change_invalidates_test_idx_fk_constraint.py
+++ /dev/null
@@ -1,15 +0,0 @@
-UP_SQL = """
-ALTER TABLE tko_tests
-DROP FOREIGN KEY `invalidates_test_idx_fk`,
-ADD CONSTRAINT `invalidates_test_idx_fk_1`
-FOREIGN KEY (`invalidates_test_idx`)
-REFERENCES `tko_tests`(`test_idx`) ON DELETE CASCADE;
-"""
-
-DOWN_SQL = """
-ALTER TABLE tko_tests
-DROP FOREIGN KEY `invalidates_test_idx_fk_1`,
-ADD CONSTRAINT `invalidates_test_idx_fk`
-FOREIGN KEY (`invalidates_test_idx`)
-REFERENCES `tko_tests`(`test_idx`) ON DELETE NO ACTION;
-"""
diff --git a/frontend/migrations/103_add_5_columns_to_tko_jobs.py b/frontend/migrations/103_add_5_columns_to_tko_jobs.py
deleted file mode 100644
index ef22387..0000000
--- a/frontend/migrations/103_add_5_columns_to_tko_jobs.py
+++ /dev/null
@@ -1,94 +0,0 @@
-UP_SQL = """
-ALTER TABLE tko_jobs
-ADD COLUMN (afe_parent_job_id INT default NULL,
-            build varchar(255) default NULL,
-            build_version varchar(255) default NULL,
-            suite varchar(40) default NULL,
-            board varchar(40) default NULL),
-ADD INDEX afe_parent_job_id (afe_parent_job_id),
-ADD INDEX build (build),
-ADD INDEX build_version_suite_board (build_version, suite, board);
-
-ALTER VIEW tko_test_view_2 AS
-SELECT  tko_tests.test_idx,
-        tko_tests.job_idx,
-        tko_tests.test AS test_name,
-        tko_tests.subdir,
-        tko_tests.kernel_idx,
-        tko_tests.status AS status_idx,
-        tko_tests.reason,
-        tko_tests.machine_idx,
-        tko_tests.invalid,
-        tko_tests.invalidates_test_idx,
-        tko_tests.started_time AS test_started_time,
-        tko_tests.finished_time AS test_finished_time,
-        tko_jobs.tag AS job_tag,
-        tko_jobs.label AS job_name,
-        tko_jobs.username AS job_owner,
-        tko_jobs.queued_time AS job_queued_time,
-        tko_jobs.started_time AS job_started_time,
-        tko_jobs.finished_time AS job_finished_time,
-        tko_jobs.afe_job_id AS afe_job_id,
-        tko_jobs.afe_parent_job_id AS afe_parent_job_id,
-        tko_jobs.build as build,
-        tko_jobs.build_version as build_version,
-        tko_jobs.suite as suite,
-        tko_jobs.board as board,
-        tko_machines.hostname AS hostname,
-        tko_machines.machine_group AS platform,
-        tko_machines.owner AS machine_owner,
-        tko_kernels.kernel_hash,
-        tko_kernels.base AS kernel_base,
-        tko_kernels.printable AS kernel,
-        tko_status.word AS status
-FROM tko_tests
-INNER JOIN tko_jobs ON tko_jobs.job_idx = tko_tests.job_idx
-INNER JOIN tko_machines ON tko_machines.machine_idx = tko_jobs.machine_idx
-INNER JOIN tko_kernels ON tko_kernels.kernel_idx = tko_tests.kernel_idx
-INNER JOIN tko_status ON tko_status.status_idx = tko_tests.status;
-"""
-
-DOWN_SQL = """
-ALTER VIEW tko_test_view_2 AS
-SELECT  tko_tests.test_idx,
-        tko_tests.job_idx,
-        tko_tests.test AS test_name,
-        tko_tests.subdir,
-        tko_tests.kernel_idx,
-        tko_tests.status AS status_idx,
-        tko_tests.reason,
-        tko_tests.machine_idx,
-        tko_tests.invalid,
-        tko_tests.invalidates_test_idx,
-        tko_tests.started_time AS test_started_time,
-        tko_tests.finished_time AS test_finished_time,
-        tko_jobs.tag AS job_tag,
-        tko_jobs.label AS job_name,
-        tko_jobs.username AS job_owner,
-        tko_jobs.queued_time AS job_queued_time,
-        tko_jobs.started_time AS job_started_time,
-        tko_jobs.finished_time AS job_finished_time,
-        tko_jobs.afe_job_id AS afe_job_id,
-        tko_machines.hostname AS hostname,
-        tko_machines.machine_group AS platform,
-        tko_machines.owner AS machine_owner,
-        tko_kernels.kernel_hash,
-        tko_kernels.base AS kernel_base,
-        tko_kernels.printable AS kernel,
-        tko_status.word AS status
-FROM tko_tests
-INNER JOIN tko_jobs ON tko_jobs.job_idx = tko_tests.job_idx
-INNER JOIN tko_machines ON tko_machines.machine_idx = tko_jobs.machine_idx
-INNER JOIN tko_kernels ON tko_kernels.kernel_idx = tko_tests.kernel_idx
-INNER JOIN tko_status ON tko_status.status_idx = tko_tests.status;
-
-ALTER TABLE tko_jobs
-DROP INDEX afe_parent_job_id,
-DROP INDEX build,
-DROP INDEX build_version_suite_board,
-DROP COLUMN afe_parent_job_id,
-DROP COLUMN build,
-DROP COLUMN build_version,
-DROP COLUMN suite,
-DROP COLUMN board;
-"""
diff --git a/frontend/migrations/104_index_job_name.py b/frontend/migrations/104_index_job_name.py
deleted file mode 100644
index f8e8096..0000000
--- a/frontend/migrations/104_index_job_name.py
+++ /dev/null
@@ -1,7 +0,0 @@
-UP_SQL = """
-CREATE INDEX name_index ON afe_jobs (name);
-"""
-
-DOWN_SQL = """
-DROP INDEX name_index ON afe_jobs;
-"""
diff --git a/frontend/migrations/105_update_test_label_foreign_keys.py b/frontend/migrations/105_update_test_label_foreign_keys.py
deleted file mode 100644
index d92b5f0..0000000
--- a/frontend/migrations/105_update_test_label_foreign_keys.py
+++ /dev/null
@@ -1,26 +0,0 @@
-ADD_FOREIGN_KEYS = """
-ALTER TABLE tko_test_labels_tests DROP FOREIGN KEY tests_labels_tests_ibfk_1;
-ALTER TABLE tko_test_labels_tests ADD CONSTRAINT tests_labels_tests_ibfk_1
-    FOREIGN KEY (testlabel_id) REFERENCES tko_test_labels (id)
-    ON DELETE CASCADE;
-
-ALTER TABLE tko_test_labels_tests DROP FOREIGN KEY tests_labels_tests_ibfk_2;
-ALTER TABLE tko_test_labels_tests ADD CONSTRAINT tests_labels_tests_ibfk_2
-    FOREIGN KEY (test_id) REFERENCES tko_tests (test_idx) ON DELETE CASCADE;
-"""
-
-DROP_FOREIGN_KEYS = """
-ALTER TABLE tko_test_labels_tests DROP FOREIGN KEY tests_labels_tests_ibfk_1;
-ALTER TABLE tko_test_labels_tests ADD CONSTRAINT tests_labels_tests_ibfk_1
-    FOREIGN KEY (testlabel_id) REFERENCES tko_test_labels (id);
-
-ALTER TABLE tko_test_labels_tests DROP FOREIGN KEY tests_labels_tests_ibfk_2;
-ALTER TABLE tko_test_labels_tests ADD CONSTRAINT tests_labels_tests_ibfk_2
-    FOREIGN KEY (test_id) REFERENCES tko_tests (test_idx);
-"""
-
-def migrate_up(mgr):
-    mgr.execute_script(ADD_FOREIGN_KEYS)
-
-def migrate_down(mgr):
-    mgr.execute_script(DROP_FOREIGN_KEYS)
diff --git a/frontend/migrations/106_update_test_job_keyvals_foreign_keys.py b/frontend/migrations/106_update_test_job_keyvals_foreign_keys.py
deleted file mode 100644
index 24277f9..0000000
--- a/frontend/migrations/106_update_test_job_keyvals_foreign_keys.py
+++ /dev/null
@@ -1,17 +0,0 @@
-ADD_FOREIGN_KEYS = """
-ALTER TABLE tko_job_keyvals DROP FOREIGN KEY tko_job_keyvals_ibfk_1;
-ALTER TABLE tko_job_keyvals ADD CONSTRAINT tko_job_keyvals_ibfk_1
-    FOREIGN KEY (job_id) REFERENCES tko_jobs (job_idx) ON DELETE CASCADE;
-"""
-
-DROP_FOREIGN_KEYS = """
-ALTER TABLE tko_job_keyvals DROP FOREIGN KEY tko_job_keyvals_ibfk_1;
-ALTER TABLE tko_job_keyvals ADD CONSTRAINT tko_job_keyvals_ibfk_1
-    FOREIGN KEY (job_id) REFERENCES tko_jobs (job_idx);
-"""
-
-def migrate_up(mgr):
-    mgr.execute_script(ADD_FOREIGN_KEYS)
-
-def migrate_down(mgr):
-    mgr.execute_script(DROP_FOREIGN_KEYS)
diff --git a/frontend/migrations/107_index_tko_jobs_started_time.py b/frontend/migrations/107_index_tko_jobs_started_time.py
deleted file mode 100644
index 125daf7..0000000
--- a/frontend/migrations/107_index_tko_jobs_started_time.py
+++ /dev/null
@@ -1,7 +0,0 @@
-UP_SQL = """
-CREATE INDEX started_time_index ON tko_jobs (started_time);
-"""
-
-DOWN_SQL = """
-DROP INDEX started_time_index ON tko_jobs;
-"""
diff --git a/frontend/migrations/108_index_tko_tests_finished_time.py b/frontend/migrations/108_index_tko_tests_finished_time.py
deleted file mode 100644
index 5360a8c..0000000
--- a/frontend/migrations/108_index_tko_tests_finished_time.py
+++ /dev/null
@@ -1,7 +0,0 @@
-UP_SQL = """
-CREATE INDEX finished_time_idx ON tko_tests (finished_time);
-"""
-
-DOWN_SQL = """
-DROP INDEX finished_time_idx ON tko_tests;
-"""
diff --git a/frontend/migrations/109_change_tko_test_attributes_id_data_type.py b/frontend/migrations/109_change_tko_test_attributes_id_data_type.py
deleted file mode 100644
index 38a5b79..0000000
--- a/frontend/migrations/109_change_tko_test_attributes_id_data_type.py
+++ /dev/null
@@ -1,7 +0,0 @@
-UP_SQL = """
-ALTER TABLE tko_test_attributes MODIFY id bigint(20) AUTO_INCREMENT;
-"""
-
-DOWN_SQL = """
-ALTER TABLE tko_test_attributes MODIFY id int(11) AUTO_INCREMENT;
-"""
diff --git a/frontend/migrations/110_remove_synch_id.py b/frontend/migrations/110_remove_synch_id.py
deleted file mode 100644
index d3a6852..0000000
--- a/frontend/migrations/110_remove_synch_id.py
+++ /dev/null
@@ -1,5 +0,0 @@
-# This migration has been nulled out, see crbug.com/719628 or the backup
-# ".ignore" migration files in this directory.
-UP_SQL = "SELECT 1;"
-
-DOWN_SQL = "SELECT 1;"
diff --git a/frontend/migrations/111_add_back_synch_id_temporarily.py b/frontend/migrations/111_add_back_synch_id_temporarily.py
deleted file mode 100644
index d3a6852..0000000
--- a/frontend/migrations/111_add_back_synch_id_temporarily.py
+++ /dev/null
@@ -1,5 +0,0 @@
-# This migration has been nulled out, see crbug.com/719628 or the backup
-# ".ignore" migration files in this directory.
-UP_SQL = "SELECT 1;"
-
-DOWN_SQL = "SELECT 1;"
diff --git a/frontend/migrations/112_remove_synch_id.py b/frontend/migrations/112_remove_synch_id.py
deleted file mode 100644
index d3a6852..0000000
--- a/frontend/migrations/112_remove_synch_id.py
+++ /dev/null
@@ -1,5 +0,0 @@
-# This migration has been nulled out, see crbug.com/719628 or the backup
-# ".ignore" migration files in this directory.
-UP_SQL = "SELECT 1;"
-
-DOWN_SQL = "SELECT 1;"
diff --git a/frontend/migrations/113_add_back_sync_id_temporarily.py b/frontend/migrations/113_add_back_sync_id_temporarily.py
deleted file mode 100644
index d3a6852..0000000
--- a/frontend/migrations/113_add_back_sync_id_temporarily.py
+++ /dev/null
@@ -1,5 +0,0 @@
-# This migration has been nulled out, see crbug.com/719628 or the backup
-# ".ignore" migration files in this directory.
-UP_SQL = "SELECT 1;"
-
-DOWN_SQL = "SELECT 1;"
diff --git a/frontend/migrations/114_add_hqe_start_times.py b/frontend/migrations/114_add_hqe_start_times.py
deleted file mode 100644
index 7fce3ff..0000000
--- a/frontend/migrations/114_add_hqe_start_times.py
+++ /dev/null
@@ -1,13 +0,0 @@
-UP_SQL = """
-CREATE TABLE afe_host_queue_entry_start_times (
-    id INT NOT NULL AUTO_INCREMENT,
-    insert_time TIMESTAMP NOT NULL,
-    highest_hqe_id INT NOT NULL,
-    PRIMARY KEY (id),
-    INDEX afe_hqe_insert_times_index (insert_time)
-);
-"""
-
-DOWN_SQL = """
-DROP TABLE afe_host_queue_entry_start_times;
-"""
diff --git a/frontend/migrations/115_add_hqe_index_updating_event.py b/frontend/migrations/115_add_hqe_index_updating_event.py
deleted file mode 100644
index 653bbaf..0000000
--- a/frontend/migrations/115_add_hqe_index_updating_event.py
+++ /dev/null
@@ -1,11 +0,0 @@
-UP_SQL = """
-CREATE EVENT afe_add_entry_to_hqe_start_times
-ON SCHEDULE EVERY 10 MINUTE DO
-INSERT INTO afe_host_queue_entry_start_times (insert_time, highest_hqe_id)
-SELECT NOW(), MAX(afe_host_queue_entries.id)
-FROM afe_host_queue_entries;
-"""
-
-DOWN_SQL = """
-DROP EVENT afe_add_entry_to_hqe_start_times;
-"""
diff --git a/frontend/migrations/116_add_label_lockout_table.py b/frontend/migrations/116_add_label_lockout_table.py
deleted file mode 100644
index 35579e2..0000000
--- a/frontend/migrations/116_add_label_lockout_table.py
+++ /dev/null
@@ -1,13 +0,0 @@
-UP_SQL = """
-CREATE TABLE afe_label_lockout_times (
-    id INT NOT NULL AUTO_INCREMENT,
-    label_name VARCHAR(750) NOT NULL,
-    lockout_end_time TIMESTAMP not null,
-    PRIMARY KEY (id),
-    INDEX afe_lockout_times_index (lockout_end_time)
-);
-"""
-
-DOWN_SQL = """
-DROP TABLE afe_label_lockout_times;
-"""
diff --git a/frontend/migrations/117_drop_label_lockout_table.py b/frontend/migrations/117_drop_label_lockout_table.py
deleted file mode 100644
index 8b08612..0000000
--- a/frontend/migrations/117_drop_label_lockout_table.py
+++ /dev/null
@@ -1,13 +0,0 @@
-UP_SQL = """
-DROP TABLE afe_label_lockout_times;
-"""
-
-DOWN_SQL = """
-CREATE TABLE afe_label_lockout_times (
-    id INT NOT NULL AUTO_INCREMENT,
-    label_name VARCHAR(750) NOT NULL,
-    lockout_end_time TIMESTAMP not null,
-    PRIMARY KEY (id),
-    INDEX afe_lockout_times_index (lockout_end_time)
-);
-"""
diff --git a/frontend/migrations/118_add_hqe_indices.py b/frontend/migrations/118_add_hqe_indices.py
deleted file mode 100644
index cc90df2..0000000
--- a/frontend/migrations/118_add_hqe_indices.py
+++ /dev/null
@@ -1,27 +0,0 @@
-INDICES = (
-    ('afe_host_queue_entries', 'active'),
-    ('afe_host_queue_entries', 'complete'),
-    ('afe_host_queue_entries', 'deleted'),
-    ('afe_host_queue_entries', 'aborted'),
-    ('afe_host_queue_entries', 'started_on'),
-    ('afe_host_queue_entries', 'finished_on'),
-    ('afe_host_queue_entries', 'job_id'),
-)
-
-def get_index_name(table, field):
-    """Formats the index name from a |table| and |field|."""
-    return table + '_' + field
-
-
-def migrate_up(manager):
-    """Creates the indices."""
-    for table, field in INDICES:
-        manager.execute('CREATE INDEX %s ON %s (%s)' %
-                        (get_index_name(table, field), table, field))
-
-
-def migrate_down(manager):
-    """Removes the indices."""
-    for table, field in INDICES:
-        manager.execute('DROP INDEX %s ON %s' %
-                        (get_index_name(table, field), table))
diff --git a/frontend/migrations/119_add_tko_jobs_indices.py b/frontend/migrations/119_add_tko_jobs_indices.py
deleted file mode 100644
index 8ce83b3..0000000
--- a/frontend/migrations/119_add_tko_jobs_indices.py
+++ /dev/null
@@ -1,9 +0,0 @@
-UP_SQL = """
-CREATE INDEX queued_time ON tko_jobs (queued_time);
-CREATE INDEX finished_time ON tko_jobs (finished_time);
-"""
-
-DOWN_SQL = """
-DROP INDEX queued_time ON tko_jobs;
-DROP INDEX finished_time ON tko_jobs;
-"""
diff --git a/frontend/migrations/120_add_job_handoff_table.py b/frontend/migrations/120_add_job_handoff_table.py
deleted file mode 100644
index 67e0eda..0000000
--- a/frontend/migrations/120_add_job_handoff_table.py
+++ /dev/null
@@ -1,13 +0,0 @@
-UP_SQL = """
-CREATE TABLE afe_job_handoffs (
-  job_id int(11) NOT NULL,
-  PRIMARY KEY (job_id),
-  CONSTRAINT job_fk FOREIGN KEY (job_id)
-    REFERENCES afe_jobs(id)
-    ON DELETE CASCADE
-) ENGINE=INNODB;
-"""
-
-DOWN_SQL = """
-DROP TABLE afe_job_handoffs;
-"""
diff --git a/frontend/migrations/121_update_afe_stable_versions_table_add_archive_url.py b/frontend/migrations/121_update_afe_stable_versions_table_add_archive_url.py
deleted file mode 100644
index 275ca79..0000000
--- a/frontend/migrations/121_update_afe_stable_versions_table_add_archive_url.py
+++ /dev/null
@@ -1,9 +0,0 @@
-UP_SQL = """
-ALTER TABLE afe_stable_versions
-  ADD COLUMN archive_url TEXT;
-"""
-
-DOWN_SQL = """
-ALTER TABLE afe_stable_versions
-  DROP COLUMN archive_url;
-"""
diff --git a/frontend/migrations/122_add_job_handoff_completed_columns.py b/frontend/migrations/122_add_job_handoff_completed_columns.py
deleted file mode 100644
index 091ca46..0000000
--- a/frontend/migrations/122_add_job_handoff_completed_columns.py
+++ /dev/null
@@ -1,11 +0,0 @@
-UP_SQL = """
-ALTER TABLE afe_job_handoffs ADD COLUMN (
-  created datetime NOT NULL,
-  completed tinyint(1) NOT NULL
-);
-UPDATE afe_job_handoffs SET completed = 1;
-"""
-
-DOWN_SQL = """
-ALTER TABLE afe_job_handoffs DROP COLUMN created, DROP COLUMN completed;
-"""
diff --git a/frontend/migrations/123_add_static_labels_tables.py b/frontend/migrations/123_add_static_labels_tables.py
deleted file mode 100644
index c6f17a9..0000000
--- a/frontend/migrations/123_add_static_labels_tables.py
+++ /dev/null
@@ -1,37 +0,0 @@
-UP_SQL = """
-CREATE TABLE afe_static_labels (
-  id int(11) NOT NULL auto_increment,
-  name varchar(750) default NULL,
-  kernel_config varchar(255) default NULL,
-  platform tinyint(1) default '0',
-  invalid tinyint(1) NOT NULL,
-  only_if_needed tinyint(1) NOT NULL,
-  atomic_group_id int(11) default NULL,
-  PRIMARY KEY (id),
-  UNIQUE KEY name (name(50)),
-  KEY atomic_group_id (atomic_group_id),
-  CONSTRAINT afe_static_labels_idfk_1
-  FOREIGN KEY (atomic_group_id)
-    REFERENCES afe_atomic_groups (id) ON DELETE NO ACTION
-) ENGINE=InnoDB;
-
-CREATE TABLE afe_static_hosts_labels (
-  id int(11) NOT NULL auto_increment,
-  host_id int(11) default NULL,
-  staticlabel_id int(11) default NULL,
-  PRIMARY KEY (id),
-  UNIQUE KEY hosts_labels_both_ids (staticlabel_id,host_id),
-  KEY hosts_labels_host_id (host_id),
-  CONSTRAINT static_hosts_labels_host_id_fk
-  FOREIGN KEY (host_id)
-    REFERENCES afe_hosts (id) ON DELETE NO ACTION,
-  CONSTRAINT static_hosts_labels_label_id_fk
-  FOREIGN KEY (staticlabel_id)
-    REFERENCES afe_static_labels (id) ON DELETE NO ACTION
-) ENGINE=InnoDB;
-"""
-
-DOWN_SQL = """
-DROP TABLE IF EXISTS afe_static_labels;
-DROP TABLE IF EXISTS afe_static_hosts_labels;
-"""
diff --git a/frontend/migrations/124_add_replaced_column_to_afe_labels.py b/frontend/migrations/124_add_replaced_column_to_afe_labels.py
deleted file mode 100644
index 86828c0..0000000
--- a/frontend/migrations/124_add_replaced_column_to_afe_labels.py
+++ /dev/null
@@ -1,5 +0,0 @@
-# This migration has been nulled out, see crbug.com/796210 &
-# crrev.com/c/836732 for details.
-UP_SQL = "SELECT 1;"
-
-DOWN_SQL = "SELECT 1;"
diff --git a/frontend/migrations/125_add_replaced_labels_tables.py b/frontend/migrations/125_add_replaced_labels_tables.py
deleted file mode 100644
index db1e9e7..0000000
--- a/frontend/migrations/125_add_replaced_labels_tables.py
+++ /dev/null
@@ -1,14 +0,0 @@
-UP_SQL = """
-CREATE TABLE afe_replaced_labels (
-  id int(11) NOT NULL auto_increment,
-  label_id int(11) default NULL,
-  PRIMARY KEY (id),
-  UNIQUE KEY label_id (label_id),
-  FOREIGN KEY (label_id)
-    REFERENCES afe_labels (id) ON DELETE CASCADE
-) ENGINE=InnoDB;
-"""
-
-DOWN_SQL = """
-DROP TABLE IF EXISTS afe_replaced_labels;
-"""
diff --git a/frontend/migrations/126_add_job_handoff_drone_column.py b/frontend/migrations/126_add_job_handoff_drone_column.py
deleted file mode 100644
index 8b162b8..0000000
--- a/frontend/migrations/126_add_job_handoff_drone_column.py
+++ /dev/null
@@ -1,9 +0,0 @@
-UP_SQL = """
-ALTER TABLE afe_job_handoffs ADD COLUMN (
-  drone varchar(128) NULL
-);
-"""
-
-DOWN_SQL = """
-ALTER TABLE afe_job_handoffs DROP COLUMN drone;
-"""
diff --git a/frontend/migrations/127_add_afe_static_host_attributes.py b/frontend/migrations/127_add_afe_static_host_attributes.py
deleted file mode 100644
index f16bc28..0000000
--- a/frontend/migrations/127_add_afe_static_host_attributes.py
+++ /dev/null
@@ -1,15 +0,0 @@
-UP_SQL = """
-CREATE TABLE `afe_static_host_attributes` (
-    `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
-    `host_id` integer NOT NULL,
-    `attribute` varchar(90) NOT NULL,
-    `value` varchar(300) NOT NULL,
-    FOREIGN KEY (host_id)
-    REFERENCES afe_hosts (id) ON DELETE CASCADE,
-    KEY (attribute)
-) ENGINE=InnoDB;
-"""
-
-DOWN_SQL = """
-DROP TABLE IF EXISTS afe_static_host_attributes;
-"""
diff --git a/frontend/migrations/128_add_tko_task_references.py b/frontend/migrations/128_add_tko_task_references.py
deleted file mode 100644
index b5198cb..0000000
--- a/frontend/migrations/128_add_tko_task_references.py
+++ /dev/null
@@ -1,17 +0,0 @@
-# task_id can be NULL because tko_jobs.afe_job_id, which it replaces, can be
-# NULL. Same for parent_task_id.
-UP_SQL = """
-CREATE TABLE tko_task_references (
-    id integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
-    reference_type enum('skylab', 'afe') NOT NULL,
-    tko_job_idx int(10) unsigned NOT NULL,
-    task_id varchar(20) DEFAULT NULL,
-    parent_task_id varchar(20) DEFAULT NULL,
-    CONSTRAINT tko_task_references_ibfk_1 FOREIGN KEY (tko_job_idx) REFERENCES tko_jobs (job_idx) ON DELETE CASCADE,
-    KEY reference_type_id (reference_type, id)
-) ENGINE=InnoDB;
-"""
-
-DOWN_SQL = """
-DROP TABLE IF EXISTS tko_task_references;
-"""
diff --git a/frontend/migrations/129_create_indices_tko_task_references.py b/frontend/migrations/129_create_indices_tko_task_references.py
deleted file mode 100644
index e849780..0000000
--- a/frontend/migrations/129_create_indices_tko_task_references.py
+++ /dev/null
@@ -1,11 +0,0 @@
-# task_id can be NULL because tko_jobs.afe_job_id, which it replaces, can be
-# NULL. Same for parent_task_id.
-UP_SQL = """
-CREATE INDEX reference_type_task_id ON tko_task_references (reference_type, task_id);
-CREATE INDEX reference_type_parent_task_id ON tko_task_references (reference_type, parent_task_id);
-"""
-
-DOWN_SQL = """
-DROP INDEX reference_type_task_id ON tko_task_references;
-DROP INDEX reference_parent_type_task_id ON tko_task_references;
-"""
diff --git a/frontend/migrations/__init__.py b/frontend/migrations/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/frontend/migrations/__init__.py
+++ /dev/null
diff --git a/frontend/migrations/common.py b/frontend/migrations/common.py
deleted file mode 100644
index 4c8760b..0000000
--- a/frontend/migrations/common.py
+++ /dev/null
@@ -1,8 +0,0 @@
-import os, sys
-dirname = os.path.dirname(sys.modules[__name__].__file__)
-autotest_dir = os.path.abspath(os.path.join(dirname, "..", '..'))
-client_dir = os.path.join(autotest_dir, "client")
-sys.path.insert(0, client_dir)
-import setup_modules
-sys.path.pop(0)
-setup_modules.setup(base_path=autotest_dir, root_module_name="autotest_lib")
diff --git a/frontend/migrations/old_110_remove_synch_id.ignore b/frontend/migrations/old_110_remove_synch_id.ignore
deleted file mode 100644
index 8dc5e18..0000000
--- a/frontend/migrations/old_110_remove_synch_id.ignore
+++ /dev/null
@@ -1,7 +0,0 @@
-UP_SQL = """
-ALTER TABLE `afe_hosts` DROP COLUMN `synch_id`;
-"""
-
-DOWN_SQL = """
-ALTER TABLE `afe_hosts` ADD `synch_id` int(11) default NULL;
-"""
diff --git a/frontend/migrations/old_111_add_back_synch_id_temporarily.ignore b/frontend/migrations/old_111_add_back_synch_id_temporarily.ignore
deleted file mode 100644
index 839b8d2..0000000
--- a/frontend/migrations/old_111_add_back_synch_id_temporarily.ignore
+++ /dev/null
@@ -1,7 +0,0 @@
-UP_SQL = """
-ALTER TABLE `afe_hosts` ADD `synch_id` int(11) default NULL;
-"""
-
-DOWN_SQL = """
-ALTER TABLE `afe_hosts` DROP COLUMN `synch_id`;
-"""
diff --git a/frontend/server/models.py b/frontend/server/models.py
index 381b7f4..b537637 100644
--- a/frontend/server/models.py
+++ b/frontend/server/models.py
@@ -101,7 +101,7 @@
             'afe',
             'crash_server',
             'database',
-            'database_slave',
+            'database_slave', # nocheck
             'devserver',
             'drone',
             'golo_proxy',
diff --git a/frontend/setup_test_environment.py b/frontend/setup_test_environment.py
index 4895a5b..f588ee6 100644
--- a/frontend/setup_test_environment.py
+++ b/frontend/setup_test_environment.py
@@ -78,4 +78,4 @@
     interest from the command line.
     """
     for query in connection.queries:
-        print query['sql'] + ';\n'
+        print(query['sql'] + ';\n')
diff --git a/frontend/tko/csv_encoder.py b/frontend/tko/csv_encoder.py
index f12e878..3c9bdf5 100644
--- a/frontend/tko/csv_encoder.py
+++ b/frontend/tko/csv_encoder.py
@@ -63,7 +63,7 @@
 
     def _process_value_table(self, value_table, row_headers):
         total_index = 0
-        for row_index in xrange(self._num_rows):
+        for row_index in range(self._num_rows):
             row_header = self._header_string(row_headers[row_index])
             row_end_index = total_index + self._num_columns
             row_values = value_table[total_index:row_end_index]
diff --git a/frontend/tko/csv_encoder_unittest.py b/frontend/tko/csv_encoder_unittest.py
index 9880bd1..29bd08d 100755
--- a/frontend/tko/csv_encoder_unittest.py
+++ b/frontend/tko/csv_encoder_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 import unittest
 import common
@@ -26,7 +26,7 @@
         response = encoder.encode()
         csv_result = response.content
         expected_csv = '\r\n'.join(expected_csv_rows) + '\r\n'
-        self.assertEquals(csv_result, expected_csv)
+        self.assertEquals(csv_result, expected_csv.encode())
 
 
     def test_spreadsheet_encoder(self):
diff --git a/frontend/tko/models_test.py b/frontend/tko/models_test.py
index 0a7f98c..1221620 100644
--- a/frontend/tko/models_test.py
+++ b/frontend/tko/models_test.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # pylint: disable=missing-docstring
 
 import unittest
diff --git a/global_config.ini b/global_config.ini
index b77c557..aa0946c 100644
--- a/global_config.ini
+++ b/global_config.ini
@@ -28,19 +28,17 @@
 sql_debug_mode: False
 stainless_url: https://stainless.corp.google.com
 
-# Servers that should use the readonly slaves for heartbeat. Not shards.
+# Servers that should use the readonly followers for heartbeat. Not shards.
 readonly_heartbeat: False
-# Whether to check the master if the slave returns no results.
-heartbeat_fall_back_to_master: False
 
 # Restricted user group. The users in the specified groups only have
-# access to master server. Will always direct them to google storage for logs
+# access to leader server. Will always direct them to google storage for logs
 # rather than drones or shards.
 restricted_groups:  USE SHADOW RESTRICTED_GROUPS
 
 # The tko parser will use these database settings.
 # This is for sharding: Even when sharding, the results (tko tables) should
-# still be written to the master database.
+# still be written to the leader database.
 global_db_host:
 global_db_database:
 global_db_type:
@@ -61,19 +59,10 @@
 heartbeat_pause_sec: 60
 throttle_incomplete_jobs_upload: False
 
-# skip_jobs_created_before controls which jobs are assigned to shards by the
-# master in a shard heartbeat. If set to a positive integer, any jobs created
-# longer than skip_jobs_created_before hours ago are skipped during job
-# assignment.
-#
-# Unit: int (number of hours)
-# Only relevant on the master AFE that serves the shard_heartbeat() RPC.
-skip_jobs_created_before: 0
-
 [AUTOSERV]
 # Autotest potential install paths
 client_autodir_paths: /usr/local/autotest,/usr/local/autodir
-# White list of tests with run time measurement enabled.
+# Allow list of tests with run time measurement enabled.
 measure_run_time_tests: desktopui_ScreenLocker,login_LoginSuccess,security_ProfilePermissions
 
 # Don't export tko job information to disk file.
@@ -84,6 +73,8 @@
 
 # Directory stores LXC containers
 container_path: /usr/local/autotest/containers
+# Directory stores the base LXC container
+base_container_path: /usr/local/autotest/containers
 # Shared mount point for host mounts for LXC containers.
 container_shared_host_path: /usr/local/autotest/containers/host
 
@@ -111,7 +102,7 @@
 # Enable test result throttling.
 enable_result_throttling: False
 # Default maximum test result size in KB.
-default_max_result_size_KB: 40000
+default_max_result_size_KB: 350000
 
 [CLIENT]
 drop_caches: False
@@ -122,7 +113,7 @@
 #wireless_ssid: SEE SHADOW CONFIG
 #wireless_password: SEE SHADOW CONFIG
 #wireless_security: SEE SHADOW CONFIG
-# The zone that all Chrome OS devices are in if they are in a lab.
+# The zone that all ChromeOS devices are in if they are in a lab.
 dns_zone: cros.corp.google.com
 # If necessary, specify a proxy for client downloads
 http_proxy:
@@ -189,60 +180,11 @@
 # Time in hours to wait before giving up on crash collection.
 crash_collection_hours_to_wait: 0.001
 
-# If True, use autotest_server_db to verify the host before running services
-# like scheduler, host-scheduler and suite-scheduler.
-use_server_db: False
-
-# AFE server connected to the master DB.
+# AFE server connected to the leader DB.
 global_afe_hostname: cautotest
 
-# Credential directory where all credentials files should go. If not specified,
-# will look for credentils in autotest root dir.
-creds_dir:
-
-# Set to True to upload results to prod Sponge server.
-use_prod_sponge_server: False
-
 [SCHEDULER]
-die_on_orphans: False
-enable_scheduler: True
-notify_email_errors: USE SHADOW NOTIFY_EMAIL_ERRORS
-notify_email_statuses: Completed,Failed,Aborted
-max_processes_per_drone: 1000
-max_parse_processes: 100
-max_transfer_processes: 50
-tick_pause_sec: 5
-minimum_tick_sec: 0.5
-host_scheduler_minimum_tick_sec: 3
-clean_interval_minutes: 5
-drones: SET IN SHADOW CONFIG
 drone_installation_directory: /usr/local/autotest
-results_host: localhost
-results_host_installation_directory:
-secs_to_wait_for_atomic_group_hosts: 600
-pidfile_timeout_mins: 300
-max_pidfile_refreshes: 2000
-# set nonzero to enable periodic reverification of all dead hosts
-reverify_period_minutes: 30
-reverify_max_hosts_at_once: 30
-drone_sets_enabled: False
-# default_drone_set_name: This is required if drone sets are enabled.
-default_drone_set_name:
-# Disable archiving by default.
-enable_archiving: False
-copy_task_results_back: False
-copy_parse_log_back: False
-tick_debug: True
-extra_debugging: False
-# max_repair_limit sets how many times a single HQE will go through
-# repairing -> requeued -> fail -> repairing
-max_repair_limit: 2
-max_provision_retries: 0
-drone_build_externals: False
-inline_host_acquisition: USE SHADOW INLINE_HOST_ACQUISITION
-# If True, the drone manager creates a thread for each drone.
-# Otherwise, drones are handled in a single thread.
-threaded_drone_manager: True
 
 [HOSTS]
 wait_up_processes:
@@ -260,9 +202,9 @@
 [AUTOSERV]
 # Set to True to take advantage of OpenSSH-based connection sharing. This would
 # have bigger performance impact when ssh_engine is 'raw_ssh'.
-# enable_master_ssh is being depricated in favor of enable_main_ssh.
-enable_master_ssh: True
 enable_main_ssh: True
+# By default there will not be tls. This will be enabled in the puppet scripts.
+enable_tls: False
 
 [PACKAGES]
 # in days
@@ -294,8 +236,7 @@
 ctsbvt_apfe_server: gs://chromeos-cts-bvt-apfe/
 dev_server: http://100.115.245.199:8082, http://100.115.245.200:8082, http://100.115.219.131:8082, http://100.115.219.132:8082, http://100.115.219.133:8082, http://100.115.219.134:8082, http://100.115.219.137:8082
 canary_channel_server: gs://chromeos-releases/canary-channel/
-# chromeos-crash1.cros
-crash_server: http://172.17.40.24:8082, http://100.107.160.6:8082, http://100.107.160.5:8082
+crash_server:
 sharding_factor: 1
 infrastructure_user: chromeos-test
 gs_offloader_use_rsync: False
@@ -357,11 +298,7 @@
 # the "Sentry Switched CDU" type
 rpm_sentry_username: fake_user
 rpm_sentry_password: fake_password
-rpm_frontend_uri: http://chromeos-rpm-server.mtv.corp.google.com:9999
-
-# Path the devserver source tree, used for spawning devserver from autoserv in
-# some of the tests. Please override in local shadow config file.
-#devserver_dir: /path/to/src/platform/dev
+rpm_frontend_uri: http://rpm-service:9999
 
 lab_status_url: http://chromiumos-lab.appspot.com/current?format=json
 
@@ -371,11 +308,8 @@
 
 skip_devserver_health_check: True
 
-# Limit the number of files in the result folder.
-gs_offloader_limit_file_count: False
-
 # A list of pools that allow to be repaired using firmware repair.
-pools_support_firmware_repair: faft-test,faft-test-tot,faft-test-experiment,faft_test_debug,faft-cr50,faft-cr50-debug,faft-cr50-experimental,faft-cr50-tot
+pools_support_firmware_repair: faft-test,faft-test-tot,faft-test-experiment,faft_test_debug,faft-cr50,faft-cr50-debug,faft-cr50-experimental,faft-cr50-tot,faft-experimental
 
 # A list of restricted subnets, in the format of ip/mask_bits, e.g., 10.0.0.1/24
 restricted_subnets:
@@ -398,9 +332,6 @@
 # http call even if this option is set to True.
 enable_ssh_connection_for_devserver: False
 
-# Flags to enable/disable get control file contents in batch.
-enable_getting_controls_in_batch: False
-
 # File for hwid key.
 HWID_KEY: no_hwid_labels
 
@@ -432,14 +363,6 @@
 pool_health_labels: recoverduts,Pri-1
 pool_health_components: Infra>Client>ChromeOS
 
-
-[NOTIFICATIONS]
-chromium_build_url: http://build.chromium.org/p/chromiumos/
-sheriffs: USE SHADOW SHERIFFS
-lab_sheriffs: USE SHADOW SHERIFFS
-gmail_api_credentials:
-gmail_api_credentials_test_failure:
-
 [SSP]
 # Section for configuration needed for server-side packaging.
 # User that runs the autoserv process in the host of the container.
diff --git a/metadata/OWNERS b/metadata/OWNERS
new file mode 100644
index 0000000..b8468c0
--- /dev/null
+++ b/metadata/OWNERS
@@ -0,0 +1,2 @@
+include /INFRA_OWNERS
+*
diff --git a/metadata/generated/config.cfg b/metadata/generated/config.cfg
index ee34237..6bce5ab 100644
--- a/metadata/generated/config.cfg
+++ b/metadata/generated/config.cfg
@@ -273,7 +273,7 @@
 					"informational": {
 						"details": {
 							"main": {
-								"python_package": "autotest_lib.client.site_tests.graphics_Sanity.graphics_Sanity",
+								"python_package": "autotest_lib.client.site_tests.graphics_Check.graphics_Check",
 								"test_args": []
 							}
 						}
@@ -401,22 +401,6 @@
 					}
 				},
 				{
-					"name": "remoteTestDrivers/tauto/tests/platform/AnomalyDetector",
-					"attributes": [
-						{
-							"name": "suite:bvt-perbuild"
-						}
-					],
-					"informational": {
-						"details": {
-							"main": {
-								"python_package": "autotest_lib.client.site_tests.platform_AnomalyDetector.platform_AnomalyDetector",
-								"test_args": []
-							}
-						}
-					}
-				},
-				{
 					"name": "remoteTestDrivers/tauto/tests/platform/BootLockbox",
 					"informational": {
 						"details": {
@@ -2022,7 +2006,7 @@
 					"informational": {
 						"details": {
 							"main": {
-								"python_package": "autotest_lib.client.site_tests.desktopui_ChromeSanity.desktopui_ChromeSanity",
+								"python_package": "autotest_lib.client.site_tests.desktopui_ChromeCheck.desktopui_ChromeCheck",
 								"test_args": []
 							}
 						}
@@ -3323,7 +3307,7 @@
 					}
 				},
 				{
-					"name": "remoteTestDrivers/tauto/tests/network/ChromeCelluarEndToEnd",
+					"name": "remoteTestDrivers/tauto/tests/network/ChromeCellularEndToEnd",
 					"informational": {
 						"details": {
 							"main": {
@@ -4003,25 +3987,6 @@
 					}
 				},
 				{
-					"name": "remoteTestDrivers/tauto/tests/network/WiFi_ConnectionIdentifier",
-					"attributes": [
-						{
-							"name": "suite:wifi_matfunc"
-						},
-						{
-							"name": "suite:wificell-cq"
-						}
-					],
-					"informational": {
-						"details": {
-							"main": {
-								"python_package": "",
-								"test_args": []
-							}
-						}
-					}
-				},
-				{
 					"name": "remoteTestDrivers/tauto/tests/network/WiFi_DarkResumeActiveScans",
 					"attributes": [
 						{
@@ -4710,16 +4675,16 @@
 					}
 				},
 				{
-					"name": "remoteTestDrivers/tauto/tests/dummy/Pass",
+					"name": "remoteTestDrivers/tauto/tests/stub/Pass",
 					"attributes": [
 						{
 							"name": "suite:dev_drone_image_test"
 						},
 						{
-							"name": "suite:dummy"
+							"name": "suite:stub"
 						},
 						{
-							"name": "suite:dummyclientretries"
+							"name": "suite:stubclientretries"
 						},
 						{
 							"name": "suite:push_to_prod"
@@ -4734,7 +4699,7 @@
 					"informational": {
 						"details": {
 							"main": {
-								"python_package": "autotest_lib.client.site_tests.dummy_Pass.dummy_Pass",
+								"python_package": "autotest_lib.client.site_tests.stub_Pass.stub_Pass",
 								"test_args": []
 							}
 						}
@@ -4958,7 +4923,7 @@
 							"name": "suite:power_monitoring"
 						},
 						{
-							"name": "suite:power_sanity"
+							"name": "suite:power_check"
 						}
 					],
 					"informational": {
@@ -5021,7 +4986,7 @@
 							"name": "suite:power_monitoring"
 						},
 						{
-							"name": "suite:power_sanity"
+							"name": "suite:power_check"
 						}
 					],
 					"informational": {
@@ -5399,7 +5364,7 @@
 							"name": "suite:power_monitoring"
 						},
 						{
-							"name": "suite:power_sanity"
+							"name": "suite:power_check"
 						}
 					],
 					"informational": {
@@ -5483,7 +5448,7 @@
 							"name": "suite:power_monitoring"
 						},
 						{
-							"name": "suite:power_sanity"
+							"name": "suite:power_check"
 						}
 					],
 					"informational": {
@@ -6912,7 +6877,7 @@
 					"informational": {
 						"details": {
 							"main": {
-								"python_package": "autotest_lib.client.site_tests.audio_CrasSanity.audio_CrasSanity",
+								"python_package": "autotest_lib.client.site_tests.audio_CrasCheck.audio_CrasCheck",
 								"test_args": []
 							}
 						}
@@ -11426,7 +11391,7 @@
 					"informational": {
 						"details": {
 							"main": {
-								"python_package": "autotest_lib.client.site_tests.telemetry_Sanity.telemetry_Sanity",
+								"python_package": "autotest_lib.client.site_tests.telemetry_Check.telemetry_Check",
 								"test_args": []
 							}
 						}
@@ -11508,7 +11473,7 @@
 					"informational": {
 						"details": {
 							"main": {
-								"python_package": "autotest_lib.client.site_tests.accessibility_Sanity.accessibility_Sanity",
+								"python_package": "autotest_lib.client.site_tests.accessibility_Check.accessibility_Check",
 								"test_args": []
 							}
 						}
@@ -11961,4 +11926,4 @@
 			]
 		}
 	]
-}
\ No newline at end of file
+}
diff --git a/metadata/test_common.star b/metadata/test_common.star
index e157bf0..3a2fa6c 100644
--- a/metadata/test_common.star
+++ b/metadata/test_common.star
@@ -32,7 +32,7 @@
         suites: A list of test suites this test belongs to, without the 'suite:'
                 prefix.
         main_package: Python package that contains the entry function.
-                e.g. autotest_lib.client.site_tests.dummy_Pass.dummy_Pass
+                e.g. autotest_lib.client.site_tests.stub_Pass.stub_Pass
         main_args: A list of arguments to the entry function.
     """
     test_args = google_pb.ListValue(values = [
diff --git a/metadata/tests/accessibility.star b/metadata/tests/accessibility.star
index c71c001..1dfb63b 100644
--- a/metadata/tests/accessibility.star
+++ b/metadata/tests/accessibility.star
@@ -17,6 +17,6 @@
         test_common.define_test(
             'accessibility/Sanity',
             suites = [],
-            main_package = 'autotest_lib.client.site_tests.accessibility_Sanity.accessibility_Sanity',
+            main_package = 'autotest_lib.client.site_tests.accessibility_Check.accessibility_Check',
         )
     ]
diff --git a/metadata/tests/audio.star b/metadata/tests/audio.star
index c313465..8ff3734 100644
--- a/metadata/tests/audio.star
+++ b/metadata/tests/audio.star
@@ -147,7 +147,7 @@
         test_common.define_test(
             'audio/CrasSanity',
             suites = ['bvt-perbuild'],
-            main_package = 'autotest_lib.client.site_tests.audio_CrasSanity.audio_CrasSanity',
+            main_package = 'autotest_lib.client.site_tests.audio_CrasCheck.audio_CrasCheck',
         ),
         test_common.define_test(
             'audio/CrasStress',
diff --git a/metadata/tests/desktopui.star b/metadata/tests/desktopui.star
index 6083f9d..0f8f7c5 100644
--- a/metadata/tests/desktopui.star
+++ b/metadata/tests/desktopui.star
@@ -17,7 +17,7 @@
         test_common.define_test(
             'desktopui/ChromeSanity',
             suites = ['bvt-perbuild'],
-            main_package = 'autotest_lib.client.site_tests.desktopui_ChromeSanity.desktopui_ChromeSanity',
+            main_package = 'autotest_lib.client.site_tests.desktopui_ChromeCheck.desktopui_ChromeCheck',
         ),
         test_common.define_test(
             'desktopui/ConnectivityDiagnostics',
diff --git a/metadata/tests/dummy.star b/metadata/tests/dummy.star
index a492e61..eeaee20 100644
--- a/metadata/tests/dummy.star
+++ b/metadata/tests/dummy.star
@@ -20,9 +20,9 @@
             main_package = '',
         ),
         test_common.define_test(
-            'dummy/Pass',
-            suites = ['dev_drone_image_test', 'dummy', 'dummyclientretries', 'push_to_prod', 'skylab_staging_test', 'something_else'],
-            main_package = 'autotest_lib.client.site_tests.dummy_Pass.dummy_Pass',
+            'stub/Pass',
+            suites = ['dev_drone_image_test', 'stub', 'stubclientretries', 'push_to_prod', 'skylab_staging_test', 'something_else'],
+            main_package = 'autotest_lib.client.site_tests.stub_Pass.stub_Pass',
         ),
         test_common.define_test(
             'dummy/PassServer',
diff --git a/metadata/tests/graphics.star b/metadata/tests/graphics.star
index 71400e1..4393fa3 100644
--- a/metadata/tests/graphics.star
+++ b/metadata/tests/graphics.star
@@ -30,11 +30,6 @@
             main_package = 'autotest_lib.client.site_tests.graphics_Gbm.graphics_Gbm',
         ),
         test_common.define_test(
-            'graphics/Gralloc',
-            suites = ['graphics', 'graphics_per-day'],
-            main_package = 'autotest_lib.client.site_tests.graphics_Gralloc.graphics_Gralloc',
-        ),
-        test_common.define_test(
             'graphics/Idle',
             suites = ['bvt-perbuild', 'graphics', 'graphics_per-day', 'graphics_system'],
             main_package = 'autotest_lib.client.site_tests.graphics_Idle.graphics_Idle',
@@ -72,7 +67,7 @@
         test_common.define_test(
             'graphics/Sanity',
             suites = ['graphics', 'graphics_per-day', 'graphics_system'],
-            main_package = 'autotest_lib.client.site_tests.graphics_Sanity.graphics_Sanity',
+            main_package = 'autotest_lib.client.site_tests.graphics_Check.graphics_Check',
         ),
         test_common.define_test(
             'graphics/VTSwitch',
diff --git a/metadata/tests/network.star b/metadata/tests/network.star
index c2d6b3d..9f537fe 100644
--- a/metadata/tests/network.star
+++ b/metadata/tests/network.star
@@ -10,7 +10,7 @@
 def define_tests():
     return [
         test_common.define_test(
-            'network/ChromeCelluarEndToEnd',
+            'network/ChromeCellularEndToEnd',
             suites = [],
             main_package = 'autotest_lib.client.site_tests.network_ChromeCellularEndToEnd.network_ChromeCellularEndToEnd',
         ),
@@ -260,11 +260,6 @@
             main_package = '',
         ),
         test_common.define_test(
-            'network/WiFi_ConnectionIdentifier',
-            suites = ['wifi_matfunc', 'wificell-cq'],
-            main_package = '',
-        ),
-        test_common.define_test(
             'network/WiFi_DarkResumeActiveScans',
             suites = ['wifi_lucidsleep'],
             main_package = '',
diff --git a/metadata/tests/platform.star b/metadata/tests/platform.star
index 9ddd5ba..64a81c7 100644
--- a/metadata/tests/platform.star
+++ b/metadata/tests/platform.star
@@ -25,11 +25,6 @@
             main_package = 'autotest_lib.client.site_tests.platform_AesThroughput.platform_AesThroughput',
         ),
         test_common.define_test(
-            'platform/AnomalyDetector',
-            suites = ['bvt-perbuild'],
-            main_package = 'autotest_lib.client.site_tests.platform_AnomalyDetector.platform_AnomalyDetector',
-        ),
-        test_common.define_test(
             'platform/BootLockbox',
             suites = [],
             main_package = 'autotest_lib.client.site_tests.platform_BootLockbox.platform_BootLockbox',
diff --git a/metadata/tests/policy.star b/metadata/tests/policy.star
index a4e0dfa..c0e14e6 100644
--- a/metadata/tests/policy.star
+++ b/metadata/tests/policy.star
@@ -52,7 +52,7 @@
         test_common.define_test(
             'policy/AutotestSanity',
             suites = ['bvt-perbuild', 'ent-nightly', 'policy', 'smoke'],
-            main_package = 'autotest_lib.client.site_tests.policy_AutotestSanity.policy_AutotestSanity',
+            main_package = 'autotest_lib.client.site_tests.policy_AutotestCheck.policy_AutotestCheck',
         ),
         test_common.define_test(
             'policy/BookmarkBarEnabled',
diff --git a/metadata/tests/power.star b/metadata/tests/power.star
index e645c6c..c878454 100644
--- a/metadata/tests/power.star
+++ b/metadata/tests/power.star
@@ -66,7 +66,7 @@
         ),
         test_common.define_test(
             'power/Display',
-            suites = ['power_daily', 'power_monitoring', 'power_sanity'],
+            suites = ['power_daily', 'power_monitoring', 'power_check'],
             main_package = 'autotest_lib.client.site_tests.power_Display.power_Display',
         ),
         test_common.define_test(
@@ -86,7 +86,7 @@
         ),
         test_common.define_test(
             'power/Idle',
-            suites = ['bvt-perbuild', 'power_idle', 'power_monitoring', 'power_sanity'],
+            suites = ['bvt-perbuild', 'power_idle', 'power_monitoring', 'power_check'],
             main_package = 'autotest_lib.client.site_tests.power_Idle.power_Idle',
         ),
         test_common.define_test(
@@ -221,7 +221,7 @@
         ),
         test_common.define_test(
             'power/VideoPlayback',
-            suites = ['power_daily', 'power_monitoring', 'power_sanity'],
+            suites = ['power_daily', 'power_monitoring', 'power_check'],
             main_package = 'autotest_lib.client.site_tests.power_VideoPlayback.power_VideoPlayback',
         ),
         test_common.define_test(
@@ -246,7 +246,7 @@
         ),
         test_common.define_test(
             'power/WebGL',
-            suites = ['power_daily', 'power_monitoring', 'power_sanity'],
+            suites = ['power_daily', 'power_monitoring', 'power_check'],
             main_package = 'autotest_lib.client.site_tests.power_WebGL.power_WebGL',
         ),
         test_common.define_test(
diff --git a/metadata/tests/telemetry.star b/metadata/tests/telemetry.star
index c52991c..018634b 100644
--- a/metadata/tests/telemetry.star
+++ b/metadata/tests/telemetry.star
@@ -27,7 +27,7 @@
         test_common.define_test(
             'telemetry/Sanity',
             suites = ['bvt-perbuild', 'smoke'],
-            main_package = 'autotest_lib.client.site_tests.telemetry_Sanity.telemetry_Sanity',
+            main_package = 'autotest_lib.client.site_tests.telemetry_Check.telemetry_Check',
         ),
         test_common.define_test(
             'telemetry/ScrollingActionTests',
diff --git a/moblab_config.ini b/moblab_config.ini
deleted file mode 100644
index 31a6385..0000000
--- a/moblab_config.ini
+++ /dev/null
@@ -1,64 +0,0 @@
-[AUTOSERV]
-# Minimum OS version that supports server side packaging. Older builds may
-# not have server side package built or with Autotest code change to support
-# server-side packaging.
-# This build is older than the one used in global config (6986). This allows
-# moblab can still test builds in R43 branch with server-side packaging, except
-# that with it can't run paygen_au_canary and moblab_RunSuite suites.
-enable_ssp_container: True
-min_version_support_ssp: 6919
-
-auto_start_servod: True
-
-# Name of the base container.
-container_base_name: moblab_base_09
-
-# Exports tko job information to file.
-export_tko_job_to_file: True
-
-# Do not reduce the log size makes it hard to debug.
-enable_result_throttling: False
-
-[SCHEDULER]
-minimum_tick_sec: 5
-inline_host_acquisition: False
-exit_on_failed_ssp_setup: True
-
-[AUTOTEST_WEB]
-wmatrix_url:  /wmatrix
-stainless_url:
-
-[CROS]
-# Crash servers are not accessable externally.
-crash_server:
-# CTS result server is only available for internal google testing.
-cts_results_server:
-# In moblab, this paramater is empty and uses the image bucket by default.
-results_storage_server:
-stable_cros_version: R77-12371.75.0
-devserver_dir = /usr/lib/devserver/
-
-# The pubsub topic the gs_offloader notification is sent to.
-cloud_notification_topic: projects/chromeos-partner-moblab/topics/moblab-notification
-
-# All moblab DUT's are in dev mode, so skip the repair check.
-dev_mode_allowed: True
-
-# Reduce upload bandwidth for partner by switching on tar and compress results.
-gs_offloader_limit_file_count: True
-
-# Heartbeat rate to the cloud.
-heartbeat_rate_seconds: 0
-
-# Disable trampoline in moblab as it's designed for old release builder and TPM
-# to use during skylab and quota-scheduler migration.
-enable_run_suite_trampoline: False
-
-[REMOTE_TASK_SCHEDULER]
-enabled = False
-tick_interval = 15
-debug_enabled = False
-
-[TKO]
-database: chromeos_autotest_db
-
diff --git a/server/OWNERS b/server/OWNERS
index f9bd0f4..2e97f12 100644
--- a/server/OWNERS
+++ b/server/OWNERS
@@ -1,3 +1 @@
-include /INFRA_OWNERS
-include /ENGPROD_OWNERS
-*
+include /HARNESS_OWNERS
diff --git a/server/_autoserv b/server/_autoserv
new file mode 100755
index 0000000..ab9d821
--- /dev/null
+++ b/server/_autoserv
@@ -0,0 +1,884 @@
+#!/usr/bin/python3 -u
+# Copyright 2007-2008 Martin J. Bligh <mbligh@google.com>, Google Inc.
+# Released under the GPL v2
+
+"""
+Run a control file through the server side engine
+"""
+
+import datetime
+import contextlib
+import getpass
+import logging
+import os
+import re
+import shutil
+import signal
+import socket
+import sys
+import traceback
+import time
+import six
+from six.moves import urllib
+
+import common
+from autotest_lib.client.bin.result_tools import utils as result_utils
+from autotest_lib.client.bin.result_tools import view as result_view
+from autotest_lib.client.common_lib import control_data
+from autotest_lib.client.common_lib import autotest_enum
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib import global_config
+from autotest_lib.client.common_lib import host_queue_entry_states
+from autotest_lib.client.common_lib import host_states
+from autotest_lib.client.common_lib import seven
+from autotest_lib.server.cros.dynamic_suite import suite
+
+try:
+    from autotest_lib.utils.frozen_chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import cloud_trace
+except ImportError as e:
+    from autotest_lib.client.common_lib import utils as common_utils
+    metrics = common_utils.metrics_mock
+    import mock
+    cloud_trace = mock.MagicMock()
+
+# Number of seconds to wait before returning if testing mode is enabled
+TESTING_MODE_SLEEP_SECS = 1
+
+
+from autotest_lib.server import frontend
+from autotest_lib.server import server_logging_config
+from autotest_lib.server import server_job, utils, autoserv_parser, autotest
+from autotest_lib.server import utils as server_utils
+from autotest_lib.server import site_utils
+from autotest_lib.server.cros.dynamic_suite import frontend_wrappers
+from autotest_lib.site_utils import job_directories
+from autotest_lib.site_utils import lxc
+from autotest_lib.site_utils.lxc import utils as lxc_utils
+from autotest_lib.client.common_lib import pidfile, logging_manager
+
+
+# Control segment to stage server-side package.
+STAGE_SERVER_SIDE_PACKAGE_CONTROL_FILE = server_job._control_segment_path(
+        'stage_server_side_package')
+
+# Command line to start servod in a moblab.
+START_SERVOD_CMD = 'sudo start servod BOARD=%s PORT=%s'
+STOP_SERVOD_CMD = 'sudo stop servod'
+
+_AUTOTEST_ROOT = os.path.realpath(os.path.join(os.path.dirname(__file__), '..'))
+_CONTROL_FILE_FROM_CONTROL_NAME = 'control.from_control_name'
+
+_LXC_JOB_FOLDER = 'lxc_job_folder'
+
+def log_alarm(signum, frame):
+    logging.error("Received SIGALARM. Ignoring and continuing on.")
+    sys.exit(1)
+
+
+def _get_companions(parser):
+    """Get a list of companion devices from command line arg -ch.
+
+    @param parser: Parser for the command line arguments.
+
+    @return: A list of companion devices from command line arg -ch.
+    """
+    if parser.options.companion_hosts:
+        companions = parser.options.companion_hosts.replace(',', ' ').strip().split()
+    else:
+        companions = []
+
+    if companions:
+        for companion in companions:
+            if not companion or re.search('\s', companion):
+                parser.parser.error("Invalid companion: %s" % str(companion))
+        companions = list(set(companions))
+        companions.sort()
+    return companions
+
+
+def _get_dutservers(parser):
+    """Get a list of DUT server addresses from command line arg --dut_servers.
+
+    @param parser: Parser for the command line arguments.
+
+    @return: A list of DUT server addresses from command line arg
+             --dut_servers.
+    """
+    if parser.options.dut_servers:
+        dut_servers = parser.options.dut_servers.replace(
+            ',', ' ').strip().split()
+    else:
+        dut_servers = []
+
+    if dut_servers:
+        for dut_server in dut_servers:
+            if not dut_server or re.search('\s', dut_server):
+                parser.parser.error(
+                    "Invalid DUT Server address: %s" % str(dut_server))
+        dut_servers = list(set(dut_servers))
+        dut_servers.sort()
+    return dut_servers
+
+
+def _get_machines(parser):
+    """Get a list of machine names from command line arg -m or a file.
+
+    @param parser: Parser for the command line arguments.
+
+    @return: A list of machine names from command line arg -m or the
+             machines file specified in the command line arg -M.
+    """
+    if parser.options.machines:
+        machines = parser.options.machines.replace(',', ' ').strip().split()
+    else:
+        machines = []
+    machines_file = parser.options.machines_file
+    if machines_file:
+        machines = []
+        for m in open(machines_file, 'r').readlines():
+            # remove comments, spaces
+            m = re.sub('#.*', '', m).strip()
+            if m:
+                machines.append(m)
+        logging.debug('Read list of machines from file: %s', machines_file)
+        logging.debug('Machines: %s', ','.join(machines))
+
+    if machines:
+        for machine in machines:
+            if not machine or re.search('\s', machine):
+                parser.parser.error("Invalid machine: %s" % str(machine))
+        machines = list(set(machines))
+        machines.sort()
+    return machines
+
+
+def _stage_ssp(parser, resultsdir):
+    """Stage server-side package.
+
+    This function calls a control segment to stage server-side package based on
+    the job and autoserv command line option. The detail implementation could
+    be different for each host type. Currently, only CrosHost has
+    stage_server_side_package function defined.
+    The script returns None if no server-side package is available. However,
+    it may raise exception if it failed for reasons other than artifact (the
+    server-side package) not found.
+
+    @param parser: Command line arguments parser passed in the autoserv process.
+    @param resultsdir: Folder to store results. This could be different from
+            parser.options.results: parser.options.results  can be set to None
+            for results to be stored in a temp folder. resultsdir can be None
+            for autoserv run requires no logging.
+
+    @return: url to the autotest server-side package. None in case of errors.
+    """
+    machines_list = _get_machines(parser)
+    machines_list = server_job.get_machine_dicts(
+            machine_names=machines_list,
+            store_dir=os.path.join(resultsdir, parser.options.host_info_subdir),
+            in_lab=parser.options.lab,
+            use_shadow_store=not parser.options.local_only_host_info,
+            host_attributes=parser.options.host_attributes,
+    )
+
+    namespace = {'machines': machines_list,
+                 'image': parser.options.test_source_build}
+    script_locals = {}
+
+    seven.exec_file(
+        STAGE_SERVER_SIDE_PACKAGE_CONTROL_FILE,
+        globals_=namespace,
+        locals_=script_locals,
+    )
+    ssp_url = script_locals['ssp_url']
+    if not ssp_url:
+        logging.error('Failed to stage SSP package: %s',
+                      script_locals['error_msg'])
+        logging.error('This job will fail later, when attempting to run with'
+                      ' SSP')
+    return ssp_url
+
+
+def _run_with_ssp(job, container_id, job_id, results, parser, ssp_url,
+                  machines):
+    """Run the server job with server-side packaging.
+
+    @param job: The server job object.
+    @param container_id: ID of the container to run the test.
+    @param job_id: ID of the test job.
+    @param results: Folder to store results. This could be different from
+                    parser.options.results:
+                    parser.options.results  can be set to None for results to be
+                    stored in a temp folder.
+                    results can be None if the autoserv run requires no logging.
+    @param parser: Command line parser that contains the options.
+    @param ssp_url: url of the staged server-side package.
+    @param machines: A list of machines to run the test.
+    """
+    if not ssp_url:
+        job.record('FAIL', None, None,
+                   'Failed to stage server-side package')
+        raise error.AutoservError('Failed to stage server-side package')
+
+    bucket = lxc.ContainerBucket(
+            base_name=_ssp_base_image_name_or_default(parser.options))
+    control = (parser.args[0] if len(parser.args) > 0 and parser.args[0] != ''
+               else None)
+    try:
+        dut_name = machines[0] if len(machines) >= 1 else None
+        test_container = bucket.setup_test(container_id, job_id, ssp_url,
+                                           results, control=control,
+                                           job_folder=_LXC_JOB_FOLDER,
+                                           dut_name=dut_name)
+    except Exception as e:
+        job.record('START', None, None, 'Starting SSP')
+        job.record('END ABORT', None, None,
+                   'Failed to setup container for test: %s. Check logs in '
+                   'ssp_logs folder for more details.' % e)
+        raise error.AutoservSSPError
+
+    args = sys.argv[:]
+    args.remove('--require-ssp')
+    # --parent_job_id is only useful in autoserv running in host, not in
+    # container. Include this argument will cause test to fail for builds before
+    # CL 286265 was merged.
+    if '--parent_job_id' in args:
+        index = args.index('--parent_job_id')
+        args.remove('--parent_job_id')
+        # Remove the actual parent job id in command line arg.
+        del args[index]
+
+    # A dictionary of paths to replace in the command line. Key is the path to
+    # be replaced with the one in value.
+    paths_to_replace = {}
+    # Replace the control file path with the one in container.
+    if control:
+        container_control_filename = os.path.join(
+                lxc.CONTROL_TEMP_PATH, os.path.basename(control))
+        paths_to_replace[control] = container_control_filename
+    # Update result directory with the one in container.
+    container_result_dir = os.path.join(lxc.RESULT_DIR_FMT % _LXC_JOB_FOLDER)
+    if parser.options.results:
+        paths_to_replace[parser.options.results] = container_result_dir
+    args = [paths_to_replace.get(arg, arg) for arg in args]
+
+    # Apply --use-existing-results, results directory is aready created and
+    # mounted in container. Apply this arg to avoid exception being raised.
+    if not '--use-existing-results' in args:
+        args.append('--use-existing-results')
+
+    # Make sure autoserv running in container using a different pid file.
+    if not '--pidfile-label' in args:
+        args.extend(['--pidfile-label', 'container_autoserv'])
+
+    cmd_line = ' '.join(["'%s'" % arg if ' ' in arg else arg for arg in args])
+    logging.info('Run command in container: %s', cmd_line)
+    success = False
+    try:
+        test_container.attach_run(cmd_line)
+        success = True
+    except Exception as e:
+        # If the test run inside container fails without generating any log,
+        # write a message to status.log to help troubleshooting.
+        debug_files = os.listdir(os.path.join(results, 'debug'))
+        if not debug_files:
+            job.record('FAIL', None, None,
+                       'Failed to run test inside the container: %s. Check '
+                       'logs in ssp_logs folder for more details.' % e)
+        raise
+    finally:
+        metrics.Counter(
+            'chromeos/autotest/experimental/execute_job_in_ssp').increment(
+                fields={'success': success})
+        test_container.destroy()
+
+
+def correct_results_folder_permission(results):
+    """Make sure the results folder has the right permission settings.
+
+    For tests running with server-side packaging, the results folder has the
+    owner of root. This must be changed to the user running the autoserv
+    process, so parsing job can access the results folder.
+    TODO(dshi): crbug.com/459344 Remove this function when test container can be
+    unprivileged container.
+
+    @param results: Path to the results folder.
+
+    """
+    if not results:
+        return
+
+    utils.run('sudo -n chown -R %s "%s"' % (os.getuid(), results))
+    utils.run('sudo -n chgrp -R %s "%s"' % (os.getgid(), results))
+
+
+def _start_servod(machine):
+    """Try to start servod in moblab if it's not already running or running with
+    different board or port.
+
+    @param machine: Name of the dut used for test.
+    """
+    if not utils.is_moblab():
+        return
+
+    logging.debug('Trying to start servod.')
+    try:
+        afe = frontend.AFE()
+        board = server_utils.get_board_from_afe(machine, afe)
+        hosts = afe.get_hosts(hostname=machine)
+        servo_host = hosts[0].attributes.get('servo_host', None)
+        servo_port = hosts[0].attributes.get('servo_port', 9999)
+        if not servo_host in ['localhost', '127.0.0.1']:
+            logging.warning('Starting servod is aborted. The dut\'s servo_host '
+                         'attribute is not set to localhost.')
+            return
+    except (urllib.error.HTTPError, urllib.error.URLError):
+        # Ignore error if RPC failed to get board
+        logging.error('Failed to get board name from AFE. Start servod is '
+                      'aborted')
+        return
+
+    try:
+        pid = utils.run('pgrep servod').stdout
+        cmd_line = utils.run('ps -fp %s' % pid).stdout
+        if ('--board %s' % board in cmd_line and
+            '--port %s' % servo_port in cmd_line):
+            logging.debug('Servod is already running with given board and port.'
+                          ' There is no need to restart servod.')
+            return
+        logging.debug('Servod is running with different board or port. '
+                      'Stopping existing servod.')
+        utils.run('sudo stop servod')
+    except error.CmdError:
+        # servod is not running.
+        pass
+
+    try:
+        utils.run(START_SERVOD_CMD % (board, servo_port))
+        logging.debug('Servod is started')
+    except error.CmdError as e:
+        logging.error('Servod failed to be started, error: %s', e)
+
+
+def _control_path_on_disk(control_name):
+    """Find the control file corresponding to the given control name, on disk.
+
+    @param control_name: NAME attribute of the control file to fetch.
+    @return: Path to the control file.
+    """
+    cf_getter = suite.create_fs_getter(_AUTOTEST_ROOT)
+    control_name_predicate = suite.test_name_matches_pattern_predicate(
+            '^%s$' % control_name)
+    tests = suite.find_and_parse_tests(cf_getter, control_name_predicate)
+    if not tests:
+        raise error.AutoservError(
+                'Failed to find any control files with NAME %s' % control_name)
+    if len(tests) > 1:
+        logging.error('Found more than one control file with NAME %s: %s',
+                      control_name, [t.path for t in tests])
+        raise error.AutoservError(
+                'Found more than one control file with NAME %s' % control_name)
+    return tests[0].path
+
+
+def _stage_control_file(control_name, results_dir):
+    """Stage the control file to execute from local autotest checkout.
+
+    @param control_name: Name of the control file to stage.
+    @param results_dir: Results directory to stage the control file into.
+    @return: Absolute path to the staged control file.
+    """
+    control_path = _control_path_on_disk(control_name)
+    new_control = os.path.join(results_dir, _CONTROL_FILE_FROM_CONTROL_NAME)
+    shutil.copy2(control_path, new_control)
+    return new_control
+
+
+def run_autoserv(pid_file_manager, results, parser, ssp_url, use_ssp):
+    """Run server job with given options.
+
+    @param pid_file_manager: PidFileManager used to monitor the autoserv process
+    @param results: Folder to store results.
+    @param parser: Parser for the command line arguments.
+    @param ssp_url: Url to server-side package.
+    @param use_ssp: Set to True to run with server-side packaging.
+    """
+    # send stdin to /dev/null
+    dev_null = os.open(os.devnull, os.O_RDONLY)
+    os.dup2(dev_null, sys.stdin.fileno())
+    os.close(dev_null)
+
+    # Create separate process group if the process is not a process group
+    # leader. This allows autoserv process to keep running after the caller
+    # process (drone manager call) exits.
+    if os.getpid() != os.getpgid(0):
+        os.setsid()
+
+    # Container name is predefined so the container can be destroyed in
+    # handle_sigterm.
+    job_or_task_id = job_directories.get_job_id_or_task_id(
+            parser.options.results)
+    container_id = lxc.ContainerId(job_or_task_id, time.time(), os.getpid())
+
+    # Implement SIGTERM handler
+    def handle_sigterm(signum, frame):
+        logging.debug('Received SIGTERM')
+        if pid_file_manager:
+            pid_file_manager.close_file(1, signal.SIGTERM)
+        logging.debug('Finished writing to pid_file. Killing process.')
+
+        # Update results folder's file permission. This needs to be done ASAP
+        # before the parsing process tries to access the log.
+        if use_ssp and results:
+            correct_results_folder_permission(results)
+
+        # This sleep allows the pending output to be logged before the kill
+        # signal is sent.
+        time.sleep(.1)
+        if use_ssp:
+            logging.debug('Destroy container %s before aborting the autoserv '
+                          'process.', container_id)
+            try:
+                bucket = lxc.ContainerBucket(
+                        base_name=_ssp_base_image_name_or_default(
+                                parser.options))
+                container = bucket.get_container(container_id)
+                if container:
+                    container.destroy()
+                    logging.debug("Container %s destroyed.", container_id)
+                else:
+                    logging.debug('Container %s is not found.', container_id)
+                    bucket.scrub_container_location(container_id)
+            except:
+                # Handle any exception so the autoserv process can be aborted.
+                logging.exception('Failed to destroy container %s.',
+                                  container_id)
+            # Try to correct the result file permission again after the
+            # container is destroyed, as the container might have created some
+            # new files in the result folder.
+            if results:
+                correct_results_folder_permission(results)
+
+        os.killpg(os.getpgrp(), signal.SIGKILL)
+
+    # Set signal handler
+    signal.signal(signal.SIGTERM, handle_sigterm)
+
+    # faulthandler is only needed to debug in the Lab and is not avaliable to
+    # be imported in the chroot as part of VMTest, so Try-Except it.
+    try:
+        import faulthandler
+        faulthandler.register(signal.SIGTERM, all_threads=True, chain=True)
+        logging.debug('faulthandler registered on SIGTERM.')
+    except ImportError:
+        # exc_clear() doesn't exist (nor is needed) in python3
+        if six.PY2:
+            sys.exc_clear()
+
+    # Ignore SIGTTOU's generated by output from forked children.
+    signal.signal(signal.SIGTTOU, signal.SIG_IGN)
+
+    # If we received a SIGALARM, let's be loud about it.
+    signal.signal(signal.SIGALRM, log_alarm)
+
+    # Server side tests that call shell scripts often depend on $USER being set
+    # but depending on how you launch your autotest scheduler it may not be set.
+    os.environ['USER'] = getpass.getuser()
+
+    label = parser.options.label
+    group_name = parser.options.group_name
+    user = parser.options.user
+    client = parser.options.client
+    server = parser.options.server
+    verify = parser.options.verify
+    repair = parser.options.repair
+    cleanup = parser.options.cleanup
+    provision = parser.options.provision
+    reset = parser.options.reset
+    job_labels = parser.options.job_labels
+    no_tee = parser.options.no_tee
+    execution_tag = parser.options.execution_tag
+    ssh_user = parser.options.ssh_user
+    ssh_port = parser.options.ssh_port
+    ssh_pass = parser.options.ssh_pass
+    collect_crashinfo = parser.options.collect_crashinfo
+    control_filename = parser.options.control_filename
+    verify_job_repo_url = parser.options.verify_job_repo_url
+    skip_crash_collection = parser.options.skip_crash_collection
+    ssh_verbosity = int(parser.options.ssh_verbosity)
+    ssh_options = parser.options.ssh_options
+    no_use_packaging = parser.options.no_use_packaging
+    in_lab = bool(parser.options.lab)
+    companion_hosts = _get_companions(parser)
+    dut_servers = _get_dutservers(parser)
+    is_cft = parser.options.cft
+    force_full_log_collection = parser.options.force_full_log_collection
+
+    # can't be both a client and a server side test
+    if client and server:
+        parser.parser.error("Can not specify a test as both server and client!")
+
+    if provision and client:
+        parser.parser.error("Cannot specify provisioning and client!")
+
+    is_special_task = (verify or repair or cleanup or collect_crashinfo or
+                       provision or reset)
+    use_client_trampoline = False
+    if parser.options.control_name:
+        if use_ssp:
+            # When use_ssp is True, autoserv will be re-executed inside a
+            # container preserving the --control-name argument. Control file
+            # will be staged inside the rexecuted autoserv.
+            control = None
+        else:
+            try:
+                control = _stage_control_file(parser.options.control_name,
+                                              results)
+            except error.AutoservError as e:
+                logging.info("Using client trampoline because of: %s", e)
+                control = parser.options.control_name
+                use_client_trampoline = True
+
+    elif parser.args:
+        control = parser.args[0]
+    else:
+        if not is_special_task:
+            parser.parser.error("Missing argument: control file")
+        control = None
+
+    if ssh_verbosity > 0:
+        # ssh_verbosity is an integer between 0 and 3, inclusive
+        ssh_verbosity_flag = '-' + 'v' * ssh_verbosity
+    else:
+        ssh_verbosity_flag = ''
+
+    machines = _get_machines(parser)
+    if group_name and len(machines) < 2:
+        parser.parser.error('-G %r may only be supplied with more than one '
+                            'machine.' % group_name)
+
+    logging.debug("Parser.args is %r", parser.args)
+    try:
+      logging.debug("Parser.options.args is %r", parser.options.args)
+    except AttributeError:
+      logging.debug("No Parser.options.args.")
+
+    try:
+      logging.debug("Parser.options is %r", parser.options)
+    except AttributeError:
+      logging.debug("No Parser.options.")
+    job_kwargs = {
+            'control': control,
+            'args': parser.args[1:],
+            'resultdir': results,
+            'label': label,
+            'user': user,
+            'machines': machines,
+            'machine_dict_list': server_job.get_machine_dicts(
+                    machine_names=machines,
+                    store_dir=os.path.join(results,
+                                           parser.options.host_info_subdir),
+                    in_lab=in_lab,
+                    use_shadow_store=not parser.options.local_only_host_info,
+                    host_attributes=parser.options.host_attributes,
+            ),
+            'client': client,
+            'ssh_user': ssh_user,
+            'ssh_port': ssh_port,
+            'ssh_pass': ssh_pass,
+            'ssh_verbosity_flag': ssh_verbosity_flag,
+            'ssh_options': ssh_options,
+            'group_name': group_name,
+            'tag': execution_tag,
+            'disable_sysinfo': parser.options.disable_sysinfo,
+            'in_lab': in_lab,
+            'use_client_trampoline': use_client_trampoline,
+            'sync_offload_dir': parser.options.sync_offload_dir,
+            'companion_hosts': server_job.get_machine_dicts(
+                    machine_names=companion_hosts,
+                    store_dir=os.path.join(results,
+                                           parser.options.host_info_subdir),
+                    in_lab=in_lab,
+                    use_shadow_store=not parser.options.local_only_host_info,
+                    host_attributes=parser.options.host_attributes),
+            'dut_servers': dut_servers,
+            'is_cft': is_cft,
+            'force_full_log_collection': force_full_log_collection
+    }
+    if parser.options.parent_job_id:
+        job_kwargs['parent_job_id'] = int(parser.options.parent_job_id)
+    if control_filename:
+        job_kwargs['control_filename'] = control_filename
+    if parser.options.image_storage_server:
+        global_config.global_config.override_config_value(
+            'CROS', 'image_storage_server',
+            os.path.join(parser.options.image_storage_server, ''))
+
+    job = server_job.server_job(**job_kwargs)
+
+    job.logging.start_logging()
+
+    # perform checks
+    job.precheck()
+
+    # run the job
+    exit_code = 0
+    auto_start_servod = global_config.global_config.get_config_value(
+            'AUTOSERV', 'auto_start_servod', type=bool, default=False)
+
+    if not utils.is_in_container():
+        # crbug.com/1054522 -- ts_mon setup is broken inside the SSP container
+        # due to a problem in the installed python packages.
+        # Trying to clean up an incorrectly initialized ts_mon state adds a 5
+        # second overhead in process teardown, so avoid setting up ts_mon
+        # entirely inside the SSP container.
+        site_utils.SetupTsMonGlobalState('autoserv', indirect=False,
+                                         short_lived=True)
+    try:
+        try:
+            if repair:
+                if auto_start_servod and len(machines) == 1:
+                    _start_servod(machines[0])
+                job.repair(job_labels)
+            elif verify:
+                job.verify(job_labels)
+            elif provision:
+                job.provision(job_labels)
+            elif reset:
+                job.reset(job_labels)
+            elif cleanup:
+                job.cleanup(job_labels)
+            else:
+                if auto_start_servod and len(machines) == 1:
+                    _start_servod(machines[0])
+                if use_ssp:
+                    try:
+                        _run_with_ssp(job, container_id, job_or_task_id,
+                                        results, parser, ssp_url, machines)
+                    finally:
+                        # Update the ownership of files in result folder.
+                        correct_results_folder_permission(results)
+                else:
+                    if collect_crashinfo:
+                        # Update the ownership of files in result folder. If the
+                        # job to collect crashinfo was running inside container
+                        # (SSP) and crashed before correcting folder permission,
+                        # the result folder might have wrong permission setting.
+                        try:
+                            correct_results_folder_permission(results)
+                        except:
+                            # Ignore any error as the user may not have root
+                            # permission to run sudo command.
+                            pass
+                    metric_name = ('chromeos/autotest/experimental/'
+                                   'autoserv_job_run_duration')
+                    f = {'in_container': utils.is_in_container(),
+                         'success': False}
+                    with metrics.SecondsTimer(metric_name, fields=f) as c:
+                        job.run(verify_job_repo_url=verify_job_repo_url,
+                                only_collect_crashinfo=collect_crashinfo,
+                                skip_crash_collection=skip_crash_collection,
+                                job_labels=job_labels,
+                                use_packaging=(not no_use_packaging))
+                        c['success'] = True
+
+        finally:
+            job.close()
+    except error.AutoservSSPError:
+        # Due to the complexity of the TKO parsing/stainless connection, this
+        # must be 0 so that the "abort" is actually reflected on stainless.
+        exit_code = 0
+        traceback.print_exc()
+    except:
+        exit_code = 1
+        traceback.print_exc()
+    finally:
+        metrics.Flush()
+
+    sys.exit(exit_code)
+
+
+# Job breakdown statuses
+_hs = host_states.Status
+_qs = host_queue_entry_states.Status
+_status_list = [
+        _qs.QUEUED, _qs.RESETTING, _qs.VERIFYING,
+        _qs.PROVISIONING, _hs.REPAIRING, _qs.CLEANING,
+        _qs.RUNNING, _qs.GATHERING, _qs.PARSING]
+_JOB_OVERHEAD_STATUS = autotest_enum.AutotestEnum(*_status_list,
+                                                  string_values=True)
+
+
+def get_job_status(options):
+    """Returns the HQE Status for this run.
+
+    @param options: parser options.
+    """
+    s = _JOB_OVERHEAD_STATUS
+    task_mapping = {
+            'reset': s.RESETTING, 'verify': s.VERIFYING,
+            'provision': s.PROVISIONING, 'repair': s.REPAIRING,
+            'cleanup': s.CLEANING, 'collect_crashinfo': s.GATHERING}
+    match = [task for task in task_mapping if getattr(options, task, False)]
+    return task_mapping[match[0]] if match else s.RUNNING
+
+
+def _require_ssp_from_control(control_name):
+    """Read the value of REQUIRE_SSP from test control file.
+
+    This reads the control file from the prod checkout of autotest and uses that
+    to determine whether to even stage the SSP package on a devserver.
+
+    This means:
+    [1] Any change in REQUIRE_SSP directive in a test requires a prod-push to go
+    live.
+    [2] This function may find that the control file does not exist but the SSP
+    package may contain the test file. This function conservatively returns True
+    in that case.
+
+    This function is called very early in autoserv, before logging is setup.
+    """
+    if not control_name:
+        return True
+    try:
+        path = _control_path_on_disk(control_name)
+    except error.AutoservError as e:
+        sys.stderr.write("autoserv: Could not determine control file path,"
+                         " assuming we need SSP: %s\n" % e)
+        sys.stderr.flush()
+        return True
+    if not os.path.isfile(path):
+        return True
+    control = control_data.parse_control(path)
+    # There must be explicit directive in the control file to disable SSP.
+    if not control or control.require_ssp is None:
+        return True
+    return control.require_ssp
+
+
+def _ssp_base_image_name_or_default(options):
+    """Extract base image name from autoserv options or the global config."""
+    if options.ssp_base_image_name:
+        return options.ssp_base_image_name
+    return global_config.global_config.get_config_value('AUTOSERV',
+                                                        'container_base_name')
+
+
+def main():
+    start_time = datetime.datetime.now()
+    parser = autoserv_parser.autoserv_parser
+    parser.parse_args()
+
+    if len(sys.argv) == 1:
+        parser.parser.print_help()
+        sys.exit(1)
+
+    if parser.options.no_logging:
+        results = None
+    else:
+        results = parser.options.results
+        if not results:
+            results = 'results.' + time.strftime('%Y-%m-%d-%H.%M.%S')
+        results = os.path.abspath(results)
+        resultdir_exists = False
+        for filename in ('control.srv', 'status.log', '.autoserv_execute'):
+            if os.path.exists(os.path.join(results, filename)):
+                resultdir_exists = True
+        if not parser.options.use_existing_results and resultdir_exists:
+            error = "Error: results directory already exists: %s\n" % results
+            sys.stderr.write(error)
+            sys.exit(1)
+
+        # Now that we certified that there's no leftover results dir from
+        # previous jobs, lets create the result dir since the logging system
+        # needs to create the log file in there.
+        if not os.path.isdir(results):
+            os.makedirs(results)
+
+    if parser.options.require_ssp:
+        # This is currently only used for skylab (i.e., when --control-name is
+        # used).
+        use_ssp = _require_ssp_from_control(parser.options.control_name)
+    else:
+        use_ssp = False
+
+
+    if use_ssp:
+        log_dir = os.path.join(results, 'ssp_logs') if results else None
+        if log_dir and not os.path.exists(log_dir):
+            os.makedirs(log_dir)
+    else:
+        log_dir = results
+
+    logging_manager.configure_logging(
+            server_logging_config.ServerLoggingConfig(),
+            results_dir=log_dir,
+            use_console=not parser.options.no_tee,
+            verbose=parser.options.verbose,
+            no_console_prefix=parser.options.no_console_prefix)
+
+    logging.debug('autoserv is running in drone %s.', socket.gethostname())
+    logging.debug('autoserv environment: %r', os.environ)
+    logging.debug('autoserv command was: %s', ' '.join(sys.argv))
+    logging.debug('autoserv parsed options: %s', parser.options)
+    logging.debug('autoserv python version: %s', sys.version)
+
+    if use_ssp:
+        ssp_url = _stage_ssp(parser, results)
+    else:
+        ssp_url = None
+
+    if results:
+        logging.info("Results placed in %s" % results)
+
+        # wait until now to perform this check, so it get properly logged
+        if (parser.options.use_existing_results and not resultdir_exists and
+            not utils.is_in_container()):
+            logging.error("No existing results directory found: %s", results)
+            sys.exit(1)
+
+    if parser.options.write_pidfile and results:
+        pid_file_manager = pidfile.PidFileManager(parser.options.pidfile_label,
+                                                  results)
+        pid_file_manager.open_file()
+    else:
+        pid_file_manager = None
+
+    autotest.Autotest.set_install_in_tmpdir(
+        parser.options.install_in_tmpdir)
+
+    exit_code = 0
+    is_task = (parser.options.verify or parser.options.repair or
+               parser.options.provision or parser.options.reset or
+               parser.options.cleanup or parser.options.collect_crashinfo)
+
+    trace_labels = {
+            'job_id': job_directories.get_job_id_or_task_id(
+                    parser.options.results)
+    }
+    trace = cloud_trace.SpanStack(
+            labels=trace_labels,
+            global_context=parser.options.cloud_trace_context)
+    trace.enabled = parser.options.cloud_trace_context_enabled == 'True'
+    try:
+        try:
+            with trace.Span(get_job_status(parser.options)):
+                run_autoserv(pid_file_manager, results, parser, ssp_url,
+                             use_ssp)
+        except SystemExit as e:
+            exit_code = e.code
+            if exit_code:
+                logging.exception('Uncaught SystemExit with code %s', exit_code)
+        except Exception:
+            # If we don't know what happened, we'll classify it as
+            # an 'abort' and return 1.
+            logging.exception('Uncaught Exception, exit_code = 1.')
+            exit_code = 1
+    finally:
+        if pid_file_manager:
+            pid_file_manager.close_file(exit_code)
+    sys.exit(exit_code)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/server/_autoserv.py b/server/_autoserv.py
new file mode 120000
index 0000000..76e71e1
--- /dev/null
+++ b/server/_autoserv.py
@@ -0,0 +1 @@
+_autoserv
\ No newline at end of file
diff --git a/server/afe_urls_unittest.py b/server/afe_urls_unittest.py
index 0d90848..9831904 100644
--- a/server/afe_urls_unittest.py
+++ b/server/afe_urls_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -7,6 +7,7 @@
 import unittest
 
 import common
+
 from autotest_lib.server import afe_urls
 
 
diff --git a/server/afe_utils.py b/server/afe_utils.py
index 1c033bb..0ce6cab 100644
--- a/server/afe_utils.py
+++ b/server/afe_utils.py
@@ -36,11 +36,11 @@
 
 
 def get_stable_cros_image_name_v2(host_info):
-    """Retrieve the Chrome OS stable image name for a given board.
+    """Retrieve the ChromeOS stable image name for a given board.
 
     @param host_info: a host_info_store object.
 
-    @returns Name of a Chrome OS image to be installed in order to
+    @returns Name of a ChromeOS image to be installed in order to
             repair the given board.
     """
     if not host_info.cros_stable_version:
diff --git a/server/afe_utils_unittest.py b/server/afe_utils_unittest.py
index 9874de0..27ce13e 100644
--- a/server/afe_utils_unittest.py
+++ b/server/afe_utils_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/autoserv b/server/autoserv
index 8eb4218..f76b48e 100755
--- a/server/autoserv
+++ b/server/autoserv
@@ -1,817 +1,30 @@
-#!/usr/bin/python2 -u
-# Copyright 2007-2008 Martin J. Bligh <mbligh@google.com>, Google Inc.
-# Released under the GPL v2
+#!/usr/bin/python3 -u
 
-"""
-Run a control file through the server side engine
-"""
-
-import datetime
-import contextlib
-import getpass
-import logging
 import os
-import re
-import shutil
-import signal
-import socket
 import sys
-import traceback
-import time
-import six
-from six.moves import urllib
-
-import common
-from autotest_lib.client.bin.result_tools import utils as result_utils
-from autotest_lib.client.bin.result_tools import view as result_view
-from autotest_lib.client.common_lib import control_data
-from autotest_lib.client.common_lib import autotest_enum
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import global_config
-from autotest_lib.client.common_lib import host_queue_entry_states
-from autotest_lib.client.common_lib import host_states
-from autotest_lib.client.common_lib import seven
-from autotest_lib.server.cros.dynamic_suite import suite
-
-try:
-    from chromite.lib import metrics
-    from chromite.lib import cloud_trace
-except ImportError:
-    from autotest_lib.client.common_lib import utils as common_utils
-    metrics = common_utils.metrics_mock
-    import mock
-    cloud_trace = mock.MagicMock()
-
-_CONFIG = global_config.global_config
-
-# Number of seconds to wait before returning if testing mode is enabled
-TESTING_MODE_SLEEP_SECS = 1
+import subprocess
 
 
-from autotest_lib.server import frontend
-from autotest_lib.server import server_logging_config
-from autotest_lib.server import server_job, utils, autoserv_parser, autotest
-from autotest_lib.server import utils as server_utils
-from autotest_lib.server import site_utils
-from autotest_lib.server.cros.dynamic_suite import frontend_wrappers
-from autotest_lib.site_utils import job_directories
-from autotest_lib.site_utils import lxc
-from autotest_lib.site_utils.lxc import utils as lxc_utils
-from autotest_lib.client.common_lib import pidfile, logging_manager
+dir_name = os.path.dirname(os.path.abspath(__file__))
+test_name = None
+suite_name = None
 
-
-# Control segment to stage server-side package.
-STAGE_SERVER_SIDE_PACKAGE_CONTROL_FILE = server_job._control_segment_path(
-        'stage_server_side_package')
-
-# Command line to start servod in a moblab.
-START_SERVOD_CMD = 'sudo start servod BOARD=%s PORT=%s'
-STOP_SERVOD_CMD = 'sudo stop servod'
-
-_AUTOTEST_ROOT = os.path.realpath(os.path.join(os.path.dirname(__file__), '..'))
-_CONTROL_FILE_FROM_CONTROL_NAME = 'control.from_control_name'
-
-_LXC_JOB_FOLDER = 'lxc_job_folder'
-
-def log_alarm(signum, frame):
-    logging.error("Received SIGALARM. Ignoring and continuing on.")
-    sys.exit(1)
-
-
-def _get_machines(parser):
-    """Get a list of machine names from command line arg -m or a file.
-
-    @param parser: Parser for the command line arguments.
-
-    @return: A list of machine names from command line arg -m or the
-             machines file specified in the command line arg -M.
-    """
-    if parser.options.machines:
-        machines = parser.options.machines.replace(',', ' ').strip().split()
-    else:
-        machines = []
-    machines_file = parser.options.machines_file
-    if machines_file:
-        machines = []
-        for m in open(machines_file, 'r').readlines():
-            # remove comments, spaces
-            m = re.sub('#.*', '', m).strip()
-            if m:
-                machines.append(m)
-        logging.debug('Read list of machines from file: %s', machines_file)
-        logging.debug('Machines: %s', ','.join(machines))
-
-    if machines:
-        for machine in machines:
-            if not machine or re.search('\s', machine):
-                parser.parser.error("Invalid machine: %s" % str(machine))
-        machines = list(set(machines))
-        machines.sort()
-    return machines
-
-
-def _stage_ssp(parser, resultsdir):
-    """Stage server-side package.
-
-    This function calls a control segment to stage server-side package based on
-    the job and autoserv command line option. The detail implementation could
-    be different for each host type. Currently, only CrosHost has
-    stage_server_side_package function defined.
-    The script returns None if no server-side package is available. However,
-    it may raise exception if it failed for reasons other than artifact (the
-    server-side package) not found.
-
-    @param parser: Command line arguments parser passed in the autoserv process.
-    @param resultsdir: Folder to store results. This could be different from
-            parser.options.results: parser.options.results  can be set to None
-            for results to be stored in a temp folder. resultsdir can be None
-            for autoserv run requires no logging.
-
-    @return: url to the autotest server-side package. None in case of errors.
-    """
-    machines_list = _get_machines(parser)
-    machines_list = server_job.get_machine_dicts(
-            machine_names=machines_list,
-            store_dir=os.path.join(resultsdir, parser.options.host_info_subdir),
-            in_lab=parser.options.lab,
-            use_shadow_store=not parser.options.local_only_host_info,
-            host_attributes=parser.options.host_attributes,
-    )
-
-    namespace = {'machines': machines_list,
-                 'isolate_hash': parser.options.isolate,
-                 'image': parser.options.test_source_build}
-    script_locals = {}
-
-    seven.exec_file(
-        STAGE_SERVER_SIDE_PACKAGE_CONTROL_FILE,
-        globals_=namespace,
-        locals_=script_locals,
-    )
-    ssp_url = script_locals['ssp_url']
-    if not ssp_url:
-        logging.error('Failed to stage SSP package: %s',
-                      script_locals['error_msg'])
-        logging.error('This job will fail later, when attempting to run with'
-                      ' SSP')
-    return ssp_url
-
-
-def _run_with_ssp(job, container_id, job_id, results, parser, ssp_url,
-                  machines):
-    """Run the server job with server-side packaging.
-
-    @param job: The server job object.
-    @param container_id: ID of the container to run the test.
-    @param job_id: ID of the test job.
-    @param results: Folder to store results. This could be different from
-                    parser.options.results:
-                    parser.options.results  can be set to None for results to be
-                    stored in a temp folder.
-                    results can be None if the autoserv run requires no logging.
-    @param parser: Command line parser that contains the options.
-    @param ssp_url: url of the staged server-side package.
-    @param machines: A list of machines to run the test.
-    """
-    if not ssp_url:
-        job.record('FAIL', None, None,
-                   'Failed to stage server-side package')
-        raise error.AutoservError('Failed to stage server-side package')
-
-    bucket = lxc.ContainerBucket(
-            base_name=_ssp_base_image_name_or_default(parser.options))
-    control = (parser.args[0] if len(parser.args) > 0 and parser.args[0] != ''
-               else None)
+if "-l" in sys.argv:
     try:
-        dut_name = machines[0] if len(machines) >= 1 else None
-        test_container = bucket.setup_test(container_id, job_id, ssp_url,
-                                           results, control=control,
-                                           job_folder=_LXC_JOB_FOLDER,
-                                           dut_name=dut_name,
-                                           isolate_hash=parser.options.isolate)
-    except Exception as e:
-        job.record('FAIL', None, None,
-                   'Failed to setup container for test: %s. Check logs in '
-                   'ssp_logs folder for more details.' % e)
-        raise
-
-    args = sys.argv[:]
-    args.remove('--require-ssp')
-    # --parent_job_id is only useful in autoserv running in host, not in
-    # container. Include this argument will cause test to fail for builds before
-    # CL 286265 was merged.
-    if '--parent_job_id' in args:
-        index = args.index('--parent_job_id')
-        args.remove('--parent_job_id')
-        # Remove the actual parent job id in command line arg.
-        del args[index]
-
-    # A dictionary of paths to replace in the command line. Key is the path to
-    # be replaced with the one in value.
-    paths_to_replace = {}
-    # Replace the control file path with the one in container.
-    if control:
-        container_control_filename = os.path.join(
-                lxc.CONTROL_TEMP_PATH, os.path.basename(control))
-        paths_to_replace[control] = container_control_filename
-    # Update result directory with the one in container.
-    container_result_dir = os.path.join(lxc.RESULT_DIR_FMT % _LXC_JOB_FOLDER)
-    if parser.options.results:
-        paths_to_replace[parser.options.results] = container_result_dir
-    args = [paths_to_replace.get(arg, arg) for arg in args]
-
-    # Apply --use-existing-results, results directory is aready created and
-    # mounted in container. Apply this arg to avoid exception being raised.
-    if not '--use-existing-results' in args:
-        args.append('--use-existing-results')
-
-    # Make sure autoserv running in container using a different pid file.
-    if not '--pidfile-label' in args:
-        args.extend(['--pidfile-label', 'container_autoserv'])
-
-    cmd_line = ' '.join(["'%s'" % arg if ' ' in arg else arg for arg in args])
-    logging.info('Run command in container: %s', cmd_line)
-    success = False
+        label = sys.argv[sys.argv.index('-l') + 1]
+    except IndexError:
+        raise Exception("No job name followed -l flag")
+    label_sections = label.split("/")
+    if len(label_sections) > 1:
+        test_name = label_sections[-1]
+    if len(label_sections) > 2:
+        suite_name = label_sections[-2]
+elif "--control-name" in sys.argv:
     try:
-        test_container.attach_run(cmd_line)
-        success = True
-    except Exception as e:
-        # If the test run inside container fails without generating any log,
-        # write a message to status.log to help troubleshooting.
-        debug_files = os.listdir(os.path.join(results, 'debug'))
-        if not debug_files:
-            job.record('FAIL', None, None,
-                       'Failed to run test inside the container: %s. Check '
-                       'logs in ssp_logs folder for more details.' % e)
-        raise
-    finally:
-        metrics.Counter(
-            'chromeos/autotest/experimental/execute_job_in_ssp').increment(
-                fields={'success': success})
-        test_container.destroy()
+        test_name = sys.argv[sys.argv.index('--control-name') + 1]
+    except IndexError:
+        raise Exception("No test name followed --control-name flag")
 
+os.environ["PY_VERSION"] = "3"
 
-def correct_results_folder_permission(results):
-    """Make sure the results folder has the right permission settings.
-
-    For tests running with server-side packaging, the results folder has the
-    owner of root. This must be changed to the user running the autoserv
-    process, so parsing job can access the results folder.
-    TODO(dshi): crbug.com/459344 Remove this function when test container can be
-    unprivileged container.
-
-    @param results: Path to the results folder.
-
-    """
-    if not results:
-        return
-
-    utils.run('sudo -n chown -R %s "%s"' % (os.getuid(), results))
-    utils.run('sudo -n chgrp -R %s "%s"' % (os.getgid(), results))
-
-
-def _start_servod(machine):
-    """Try to start servod in moblab if it's not already running or running with
-    different board or port.
-
-    @param machine: Name of the dut used for test.
-    """
-    if not utils.is_moblab():
-        return
-
-    logging.debug('Trying to start servod.')
-    try:
-        afe = frontend.AFE()
-        board = server_utils.get_board_from_afe(machine, afe)
-        hosts = afe.get_hosts(hostname=machine)
-        servo_host = hosts[0].attributes.get('servo_host', None)
-        servo_port = hosts[0].attributes.get('servo_port', 9999)
-        if not servo_host in ['localhost', '127.0.0.1']:
-            logging.warn('Starting servod is aborted. The dut\'s servo_host '
-                         'attribute is not set to localhost.')
-            return
-    except (urllib.error.HTTPError, urllib.error.URLError):
-        # Ignore error if RPC failed to get board
-        logging.error('Failed to get board name from AFE. Start servod is '
-                      'aborted')
-        return
-
-    try:
-        pid = utils.run('pgrep servod').stdout
-        cmd_line = utils.run('ps -fp %s' % pid).stdout
-        if ('--board %s' % board in cmd_line and
-            '--port %s' % servo_port in cmd_line):
-            logging.debug('Servod is already running with given board and port.'
-                          ' There is no need to restart servod.')
-            return
-        logging.debug('Servod is running with different board or port. '
-                      'Stopping existing servod.')
-        utils.run('sudo stop servod')
-    except error.CmdError:
-        # servod is not running.
-        pass
-
-    try:
-        utils.run(START_SERVOD_CMD % (board, servo_port))
-        logging.debug('Servod is started')
-    except error.CmdError as e:
-        logging.error('Servod failed to be started, error: %s', e)
-
-
-def _control_path_on_disk(control_name):
-    """Find the control file corresponding to the given control name, on disk.
-
-    @param control_name: NAME attribute of the control file to fetch.
-    @return: Path to the control file.
-    """
-    cf_getter = suite.create_fs_getter(_AUTOTEST_ROOT)
-    control_name_predicate = suite.test_name_matches_pattern_predicate(
-            '^%s$' % control_name)
-    tests = suite.find_and_parse_tests(cf_getter, control_name_predicate)
-    if not tests:
-        raise error.AutoservError(
-                'Failed to find any control files with NAME %s' % control_name)
-    if len(tests) > 1:
-        logging.error('Found more than one control file with NAME %s: %s',
-                      control_name, [t.path for t in tests])
-        raise error.AutoservError(
-                'Found more than one control file with NAME %s' % control_name)
-    return tests[0].path
-
-
-def _stage_control_file(control_name, results_dir):
-    """Stage the control file to execute from local autotest checkout.
-
-    @param control_name: Name of the control file to stage.
-    @param results_dir: Results directory to stage the control file into.
-    @return: Absolute path to the staged control file.
-    """
-    control_path = _control_path_on_disk(control_name)
-    new_control = os.path.join(results_dir, _CONTROL_FILE_FROM_CONTROL_NAME)
-    shutil.copy2(control_path, new_control)
-    return new_control
-
-
-def run_autoserv(pid_file_manager, results, parser, ssp_url, use_ssp):
-    """Run server job with given options.
-
-    @param pid_file_manager: PidFileManager used to monitor the autoserv process
-    @param results: Folder to store results.
-    @param parser: Parser for the command line arguments.
-    @param ssp_url: Url to server-side package.
-    @param use_ssp: Set to True to run with server-side packaging.
-    """
-    # send stdin to /dev/null
-    dev_null = os.open(os.devnull, os.O_RDONLY)
-    os.dup2(dev_null, sys.stdin.fileno())
-    os.close(dev_null)
-
-    # Create separate process group if the process is not a process group
-    # leader. This allows autoserv process to keep running after the caller
-    # process (drone manager call) exits.
-    if os.getpid() != os.getpgid(0):
-        os.setsid()
-
-    # Container name is predefined so the container can be destroyed in
-    # handle_sigterm.
-    job_or_task_id = job_directories.get_job_id_or_task_id(
-            parser.options.results)
-    container_id = lxc.ContainerId(job_or_task_id, time.time(), os.getpid())
-
-    # Implement SIGTERM handler
-    def handle_sigterm(signum, frame):
-        logging.debug('Received SIGTERM')
-        if pid_file_manager:
-            pid_file_manager.close_file(1, signal.SIGTERM)
-        logging.debug('Finished writing to pid_file. Killing process.')
-
-        # Update results folder's file permission. This needs to be done ASAP
-        # before the parsing process tries to access the log.
-        if use_ssp and results:
-            correct_results_folder_permission(results)
-
-        # This sleep allows the pending output to be logged before the kill
-        # signal is sent.
-        time.sleep(.1)
-        if use_ssp:
-            logging.debug('Destroy container %s before aborting the autoserv '
-                          'process.', container_id)
-            try:
-                bucket = lxc.ContainerBucket(
-                        base_name=_ssp_base_image_name_or_default(
-                                parser.options))
-                container = bucket.get_container(container_id)
-                if container:
-                    container.destroy()
-                    logging.debug("Container %s destroyed.", container_id)
-                else:
-                    logging.debug('Container %s is not found.', container_id)
-                    bucket.scrub_container_location(container_id)
-            except:
-                # Handle any exception so the autoserv process can be aborted.
-                logging.exception('Failed to destroy container %s.',
-                                  container_id)
-            # Try to correct the result file permission again after the
-            # container is destroyed, as the container might have created some
-            # new files in the result folder.
-            if results:
-                correct_results_folder_permission(results)
-
-        os.killpg(os.getpgrp(), signal.SIGKILL)
-
-    # Set signal handler
-    signal.signal(signal.SIGTERM, handle_sigterm)
-
-    # faulthandler is only needed to debug in the Lab and is not avaliable to
-    # be imported in the chroot as part of VMTest, so Try-Except it.
-    try:
-        import faulthandler
-        faulthandler.register(signal.SIGTERM, all_threads=True, chain=True)
-        logging.debug('faulthandler registered on SIGTERM.')
-    except ImportError:
-        # exc_clear() doesn't exist (nor is needed) in python3
-        if six.PY2:
-            sys.exc_clear()
-
-    # Ignore SIGTTOU's generated by output from forked children.
-    signal.signal(signal.SIGTTOU, signal.SIG_IGN)
-
-    # If we received a SIGALARM, let's be loud about it.
-    signal.signal(signal.SIGALRM, log_alarm)
-
-    # Server side tests that call shell scripts often depend on $USER being set
-    # but depending on how you launch your autotest scheduler it may not be set.
-    os.environ['USER'] = getpass.getuser()
-
-    label = parser.options.label
-    group_name = parser.options.group_name
-    user = parser.options.user
-    client = parser.options.client
-    server = parser.options.server
-    verify = parser.options.verify
-    repair = parser.options.repair
-    cleanup = parser.options.cleanup
-    provision = parser.options.provision
-    reset = parser.options.reset
-    job_labels = parser.options.job_labels
-    no_tee = parser.options.no_tee
-    execution_tag = parser.options.execution_tag
-    ssh_user = parser.options.ssh_user
-    ssh_port = parser.options.ssh_port
-    ssh_pass = parser.options.ssh_pass
-    collect_crashinfo = parser.options.collect_crashinfo
-    control_filename = parser.options.control_filename
-    verify_job_repo_url = parser.options.verify_job_repo_url
-    skip_crash_collection = parser.options.skip_crash_collection
-    ssh_verbosity = int(parser.options.ssh_verbosity)
-    ssh_options = parser.options.ssh_options
-    no_use_packaging = parser.options.no_use_packaging
-    in_lab = bool(parser.options.lab)
-
-    # can't be both a client and a server side test
-    if client and server:
-        parser.parser.error("Can not specify a test as both server and client!")
-
-    if provision and client:
-        parser.parser.error("Cannot specify provisioning and client!")
-
-    is_special_task = (verify or repair or cleanup or collect_crashinfo or
-                       provision or reset)
-    use_client_trampoline = False
-    if parser.options.control_name:
-        if use_ssp:
-            # When use_ssp is True, autoserv will be re-executed inside a
-            # container preserving the --control-name argument. Control file
-            # will be staged inside the rexecuted autoserv.
-            control = None
-        else:
-            try:
-                control = _stage_control_file(parser.options.control_name,
-                                              results)
-            except error.AutoservError as e:
-                logging.info("Using client trampoline because of: %s", e)
-                control = parser.options.control_name
-                use_client_trampoline = True
-
-    elif parser.args:
-        control = parser.args[0]
-    else:
-        if not is_special_task:
-            parser.parser.error("Missing argument: control file")
-        control = None
-
-    if ssh_verbosity > 0:
-        # ssh_verbosity is an integer between 0 and 3, inclusive
-        ssh_verbosity_flag = '-' + 'v' * ssh_verbosity
-    else:
-        ssh_verbosity_flag = ''
-
-    machines = _get_machines(parser)
-    if group_name and len(machines) < 2:
-        parser.parser.error('-G %r may only be supplied with more than one '
-                            'machine.' % group_name)
-
-    logging.debug("Parser.args is %r", parser.args)
-    try:
-      logging.debug("Parser.options.args is %r", parser.options.args)
-    except AttributeError:
-      logging.debug("No Parser.options.args.")
-
-    try:
-      logging.debug("Parser.options is %r", parser.options)
-    except AttributeError:
-      logging.debug("No Parser.options.")
-    job_kwargs = {
-            'control': control,
-            'args': parser.args[1:],
-            'resultdir': results,
-            'label': label,
-            'user': user,
-            'machines': machines,
-            'machine_dict_list': server_job.get_machine_dicts(
-                    machine_names=machines,
-                    store_dir=os.path.join(results,
-                                           parser.options.host_info_subdir),
-                    in_lab=in_lab,
-                    use_shadow_store=not parser.options.local_only_host_info,
-                    host_attributes=parser.options.host_attributes,
-            ),
-            'client': client,
-            'ssh_user': ssh_user,
-            'ssh_port': ssh_port,
-            'ssh_pass': ssh_pass,
-            'ssh_verbosity_flag': ssh_verbosity_flag,
-            'ssh_options': ssh_options,
-            'group_name': group_name,
-            'tag': execution_tag,
-            'disable_sysinfo': parser.options.disable_sysinfo,
-            'in_lab': in_lab,
-            'use_client_trampoline': use_client_trampoline,
-            'sync_offload_dir': parser.options.sync_offload_dir,
-    }
-    if parser.options.parent_job_id:
-        job_kwargs['parent_job_id'] = int(parser.options.parent_job_id)
-    if control_filename:
-        job_kwargs['control_filename'] = control_filename
-    job = server_job.server_job(**job_kwargs)
-
-    job.logging.start_logging()
-
-    # perform checks
-    job.precheck()
-
-    # run the job
-    exit_code = 0
-    auto_start_servod = _CONFIG.get_config_value(
-            'AUTOSERV', 'auto_start_servod', type=bool, default=False)
-
-    if not utils.is_in_container():
-        # crbug.com/1054522 -- ts_mon setup is broken inside the SSP container
-        # due to a problem in the installed python packages.
-        # Trying to clean up an incorrectly initialized ts_mon state adds a 5
-        # second overhead in process teardown, so avoid setting up ts_mon
-        # entirely inside the SSP container.
-        site_utils.SetupTsMonGlobalState('autoserv', indirect=False,
-                                         short_lived=True)
-    try:
-        try:
-            if repair:
-                if auto_start_servod and len(machines) == 1:
-                    _start_servod(machines[0])
-                job.repair(job_labels)
-            elif verify:
-                job.verify(job_labels)
-            elif provision:
-                job.provision(job_labels)
-            elif reset:
-                job.reset(job_labels)
-            elif cleanup:
-                job.cleanup(job_labels)
-            else:
-                if auto_start_servod and len(machines) == 1:
-                    _start_servod(machines[0])
-                if use_ssp:
-                    try:
-                        _run_with_ssp(job, container_id, job_or_task_id,
-                                        results, parser, ssp_url, machines)
-                    finally:
-                        # Update the ownership of files in result folder.
-                        correct_results_folder_permission(results)
-                else:
-                    if collect_crashinfo:
-                        # Update the ownership of files in result folder. If the
-                        # job to collect crashinfo was running inside container
-                        # (SSP) and crashed before correcting folder permission,
-                        # the result folder might have wrong permission setting.
-                        try:
-                            correct_results_folder_permission(results)
-                        except:
-                            # Ignore any error as the user may not have root
-                            # permission to run sudo command.
-                            pass
-                    metric_name = ('chromeos/autotest/experimental/'
-                                   'autoserv_job_run_duration')
-                    f = {'in_container': utils.is_in_container(),
-                         'success': False}
-                    with metrics.SecondsTimer(metric_name, fields=f) as c:
-                        job.run(verify_job_repo_url=verify_job_repo_url,
-                                only_collect_crashinfo=collect_crashinfo,
-                                skip_crash_collection=skip_crash_collection,
-                                job_labels=job_labels,
-                                use_packaging=(not no_use_packaging))
-                        c['success'] = True
-
-        finally:
-            job.close()
-    except:
-        exit_code = 1
-        traceback.print_exc()
-    finally:
-        metrics.Flush()
-
-    sys.exit(exit_code)
-
-
-# Job breakdown statuses
-_hs = host_states.Status
-_qs = host_queue_entry_states.Status
-_status_list = [
-        _qs.QUEUED, _qs.RESETTING, _qs.VERIFYING,
-        _qs.PROVISIONING, _hs.REPAIRING, _qs.CLEANING,
-        _qs.RUNNING, _qs.GATHERING, _qs.PARSING]
-_JOB_OVERHEAD_STATUS = autotest_enum.AutotestEnum(*_status_list,
-                                                  string_values=True)
-
-
-def get_job_status(options):
-    """Returns the HQE Status for this run.
-
-    @param options: parser options.
-    """
-    s = _JOB_OVERHEAD_STATUS
-    task_mapping = {
-            'reset': s.RESETTING, 'verify': s.VERIFYING,
-            'provision': s.PROVISIONING, 'repair': s.REPAIRING,
-            'cleanup': s.CLEANING, 'collect_crashinfo': s.GATHERING}
-    match = [task for task in task_mapping if getattr(options, task, False)]
-    return task_mapping[match[0]] if match else s.RUNNING
-
-
-def _require_ssp_from_control(control_name):
-    """Read the value of REQUIRE_SSP from test control file.
-
-    This reads the control file from the prod checkout of autotest and uses that
-    to determine whether to even stage the SSP package on a devserver.
-
-    This means:
-    [1] Any change in REQUIRE_SSP directive in a test requires a prod-push to go
-    live.
-    [2] This function may find that the control file does not exist but the SSP
-    package may contain the test file. This function conservatively returns True
-    in that case.
-
-    This function is called very early in autoserv, before logging is setup.
-    """
-    if not control_name:
-        return True
-    try:
-        path = _control_path_on_disk(control_name)
-    except error.AutoservError as e:
-        sys.stderr.write("autoserv: Could not determine control file path,"
-                         " assuming we need SSP: %s\n" % e)
-        sys.stderr.flush()
-        return True
-    if not os.path.isfile(path):
-        return True
-    control = control_data.parse_control(path)
-    # There must be explicit directive in the control file to disable SSP.
-    if not control or control.require_ssp is None:
-        return True
-    return control.require_ssp
-
-
-def _ssp_base_image_name_or_default(options):
-    """Extract base image name from autoserv options or the global config."""
-    if options.ssp_base_image_name:
-        return options.ssp_base_image_name
-    return global_config.global_config.get_config_value('AUTOSERV',
-                                                        'container_base_name')
-
-
-def main():
-    start_time = datetime.datetime.now()
-    parser = autoserv_parser.autoserv_parser
-    parser.parse_args()
-
-    if len(sys.argv) == 1:
-        parser.parser.print_help()
-        sys.exit(1)
-
-    if parser.options.no_logging:
-        results = None
-    else:
-        results = parser.options.results
-        if not results:
-            results = 'results.' + time.strftime('%Y-%m-%d-%H.%M.%S')
-        results = os.path.abspath(results)
-        resultdir_exists = False
-        for filename in ('control.srv', 'status.log', '.autoserv_execute'):
-            if os.path.exists(os.path.join(results, filename)):
-                resultdir_exists = True
-        if not parser.options.use_existing_results and resultdir_exists:
-            error = "Error: results directory already exists: %s\n" % results
-            sys.stderr.write(error)
-            sys.exit(1)
-
-        # Now that we certified that there's no leftover results dir from
-        # previous jobs, lets create the result dir since the logging system
-        # needs to create the log file in there.
-        if not os.path.isdir(results):
-            os.makedirs(results)
-
-    if parser.options.require_ssp:
-        # This is currently only used for skylab (i.e., when --control-name is
-        # used).
-        use_ssp = _require_ssp_from_control(parser.options.control_name)
-    else:
-        use_ssp = False
-
-
-    if use_ssp:
-        log_dir = os.path.join(results, 'ssp_logs') if results else None
-        if log_dir and not os.path.exists(log_dir):
-            os.makedirs(log_dir)
-    else:
-        log_dir = results
-
-    logging_manager.configure_logging(
-            server_logging_config.ServerLoggingConfig(),
-            results_dir=log_dir,
-            use_console=not parser.options.no_tee,
-            verbose=parser.options.verbose,
-            no_console_prefix=parser.options.no_console_prefix)
-
-    logging.debug('autoserv is running in drone %s.', socket.gethostname())
-    logging.debug('autoserv environment: %r', os.environ)
-    logging.debug('autoserv command was: %s', ' '.join(sys.argv))
-    logging.debug('autoserv parsed options: %s', parser.options)
-
-    if use_ssp:
-        ssp_url = _stage_ssp(parser, results)
-    else:
-        ssp_url = None
-
-    if results:
-        logging.info("Results placed in %s" % results)
-
-        # wait until now to perform this check, so it get properly logged
-        if (parser.options.use_existing_results and not resultdir_exists and
-            not utils.is_in_container()):
-            logging.error("No existing results directory found: %s", results)
-            sys.exit(1)
-
-    if parser.options.write_pidfile and results:
-        pid_file_manager = pidfile.PidFileManager(parser.options.pidfile_label,
-                                                  results)
-        pid_file_manager.open_file()
-    else:
-        pid_file_manager = None
-
-    autotest.Autotest.set_install_in_tmpdir(
-        parser.options.install_in_tmpdir)
-
-    exit_code = 0
-    is_task = (parser.options.verify or parser.options.repair or
-               parser.options.provision or parser.options.reset or
-               parser.options.cleanup or parser.options.collect_crashinfo)
-
-    trace_labels = {
-            'job_id': job_directories.get_job_id_or_task_id(
-                    parser.options.results)
-    }
-    trace = cloud_trace.SpanStack(
-            labels=trace_labels,
-            global_context=parser.options.cloud_trace_context)
-    trace.enabled = parser.options.cloud_trace_context_enabled == 'True'
-    try:
-        try:
-            with trace.Span(get_job_status(parser.options)):
-                run_autoserv(pid_file_manager, results, parser, ssp_url,
-                             use_ssp)
-        except SystemExit as e:
-            exit_code = e.code
-            if exit_code:
-                logging.exception('Uncaught SystemExit with code %s', exit_code)
-        except Exception:
-            # If we don't know what happened, we'll classify it as
-            # an 'abort' and return 1.
-            logging.exception('Uncaught Exception, exit_code = 1.')
-            exit_code = 1
-    finally:
-        if pid_file_manager:
-            pid_file_manager.close_file(exit_code)
-    sys.exit(exit_code)
-
-
-if __name__ == '__main__':
-    main()
+exit(subprocess.call([os.path.join(dir_name, '_autoserv')] + sys.argv[1:]))
diff --git a/server/autoserv_parser.py b/server/autoserv_parser.py
index 6733d70..929e02b 100644
--- a/server/autoserv_parser.py
+++ b/server/autoserv_parser.py
@@ -113,8 +113,11 @@
         self.parser.add_argument('--ssh-user', action='store',
                                  type=str, dest='ssh_user', default='root',
                                  help='specify the user for ssh connections')
-        self.parser.add_argument('--ssh-port', action='store',
-                                 type=int, dest='ssh_port', default=22,
+        self.parser.add_argument('--ssh-port',
+                                 action='store',
+                                 type=int,
+                                 dest='ssh_port',
+                                 default=None,
                                  help=('specify the port to use for ssh '
                                        'connections'))
         self.parser.add_argument('--ssh-pass', action='store',
@@ -163,13 +166,6 @@
                                  dest='no_use_packaging', default=False,
                                  help=('Disable install modes that use the '
                                        'packaging system.'))
-        self.parser.add_argument('--source_isolate', action='store',
-                                 type=str, default='',
-                                 dest='isolate',
-                                 help=('Hash for isolate containing build '
-                                       'contents needed for server-side '
-                                       'packaging. Takes precedence over '
-                                       'test_source_build, if present.'))
         self.parser.add_argument('--test_source_build', action='store',
                                  type=str, default='',
                                  dest='test_source_build',
@@ -192,6 +188,12 @@
         self.parser.add_argument('--lab', action='store', type=str,
                                  dest='lab', default='',
                                  help=argparse.SUPPRESS)
+        self.parser.add_argument('--CFT',
+                                 action='store_true',
+                                 dest='cft',
+                                 default=False,
+                                 help=('If running in, or mocking, '
+                                       'the CFT env.'))
         self.parser.add_argument('--cloud_trace_context', type=str, default='',
                                  action='store', dest='cloud_trace_context',
                                  help=('Global trace context to configure '
@@ -238,6 +240,35 @@
                      ' enabled. The default value is provided via the global'
                      ' config setting for AUTOSERV/container_base_name.'
         )
+        self.parser.add_argument(
+                '--image-storage-server',
+                action='store',
+                type=str,
+                default='',
+                help='The gs path to the image storage server to be used'
+                ' for this autoserv invocation. This overrides the'
+                ' default provided by CROS/image_storage_server.')
+        self.parser.add_argument('--py_version',
+                                 action='store',
+                                 dest='py_version',
+                                 default='2',
+                                 type=str,
+                                 choices=['2', '3'])
+        self.parser.add_argument('-ch',
+                                 action='store',
+                                 type=str,
+                                 dest='companion_hosts',
+                                 help='list of companion hosts for the test.')
+        self.parser.add_argument('--dut_servers',
+                                 action='store',
+                                 type=str,
+                                 dest='dut_servers',
+                                 help='list of DUT servers for the test.')
+        self.parser.add_argument('--force_full_log_collection',
+                                 action='store_true',
+                                 dest='force_full_log_collection',
+                                 default=False,
+                                 help='Force full log collection on tests.')
 
         #
         # Warning! Please read before adding any new arguments!
@@ -269,7 +300,7 @@
             if unknown_args:
                 removed_args.append(unknown_args.pop(0))
         if removed_args:
-            logging.warn('Unknown arguments are removed from the options: %s',
+            logging.warning('Unknown arguments are removed from the options: %s',
                          removed_args)
 
         self.args = unknown_args + shlex.split(self.options.args or '')
@@ -277,7 +308,7 @@
         self.options.host_attributes = ast.literal_eval(
                 self.options.host_attributes)
         if self.options.lab and self.options.host_attributes:
-            logging.warn(
+            logging.warning(
                     '--lab and --host-attributes are mutually exclusive. '
                     'Ignoring custom host attributes: %s',
                     str(self.options.host_attributes))
diff --git a/server/autoserv_parser_unittest.py b/server/autoserv_parser_unittest.py
index 5cbdf8e..b7c4106 100755
--- a/server/autoserv_parser_unittest.py
+++ b/server/autoserv_parser_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 """Tests for autoserv_parser."""
 
diff --git a/server/autoserv_utils.py b/server/autoserv_utils.py
index 9fefc43..584a454 100644
--- a/server/autoserv_utils.py
+++ b/server/autoserv_utils.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -17,10 +17,15 @@
 autoserv_path = os.path.join(autoserv_directory, 'autoserv')
 
 
-def autoserv_run_job_command(autoserv_directory, machines,
-                             results_directory=None, extra_args=[], job=None,
-                             queue_entry=None, verbose=True,
-                             write_pidfile=True, fast_mode=False,
+def autoserv_run_job_command(autoserv_directory,
+                             machines,
+                             results_directory=None,
+                             extra_args=[],
+                             job=None,
+                             queue_entry=None,
+                             verbose=True,
+                             write_pidfile=True,
+                             fast_mode=False,
                              ssh_verbosity=0,
                              no_console_prefix=False,
                              ssh_options=None,
@@ -28,7 +33,10 @@
                              in_lab=False,
                              host_attributes=None,
                              use_virtualenv=False,
-                             host_info_subdir=''):
+                             host_info_subdir='',
+                             companion_hosts=None,
+                             dut_servers=None,
+                             is_cft=False):
     """
     Construct an autoserv command from a job or host queue entry.
 
@@ -68,6 +76,12 @@
                            support everywhere. Default: False.
     @param host_info_subdir: When set, a sub-directory of the results directory
                              where host info file(s) are stored.
+    @param companion_hosts: a str or list of hosts to be used as companions
+                            for the and provided to test. NOTE: these are
+                            different than  machines, where each host is a host
+                            that the test would be run on.
+    @param dut_servers: a str or list of hosts to be used as DUT server
+                            provided to test.
 
     @returns The autoserv command line as a list of executable + parameters.
 
@@ -77,7 +91,7 @@
     full_script_path = os.path.join(autoserv_directory, script_name)
 
     # virtualenv_autoserv is a `POSIX shell script, ASCII text executable`.
-    # Calling with `sys.executable` would fail because python doesn't 
+    # Calling with `sys.executable` would fail because python doesn't
     # interpret shebangs itself.
     if use_virtualenv:
         command = [full_script_path]
@@ -96,6 +110,16 @@
     if machines:
         command += ['-m', machines]
 
+    if companion_hosts:
+        if not isinstance(companion_hosts, list):
+            companion_hosts = [companion_hosts]
+        command += ['-ch', ",".join(companion_hosts)]
+
+    if dut_servers:
+        if not isinstance(dut_servers, list):
+            dut_servers = [dut_servers]
+        command += ['--dut_servers', ",".join(dut_servers)]
+
     if ssh_verbosity:
         command += ['--ssh_verbosity', str(ssh_verbosity)]
 
@@ -142,4 +166,11 @@
     if in_lab:
         command.extend(['--lab', 'True'])
 
+    if is_cft:
+        command.append('--CFT')
+
+    py_version = os.getenv('PY_VERSION')
+    if py_version:
+        command.extend(['--py_version', py_version])
+
     return command + extra_args
diff --git a/server/autotest.py b/server/autotest.py
index d195bd0..859b888 100644
--- a/server/autotest.py
+++ b/server/autotest.py
@@ -32,7 +32,7 @@
 
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = client_utils.metrics_mock
 
@@ -55,6 +55,11 @@
 LOG_BUFFER_SIZE_BYTES = 64
 
 
+def _set_py_version():
+    """As of ~R102 (aka when this merges), DUTs only have Python 3."""
+    return '--py_version=3'
+
+
 class AutodirNotFoundError(Exception):
     """No Autotest installation could be found."""
 
@@ -345,6 +350,8 @@
             self.get()
         host.wait_up(timeout=30)
         host.setup()
+        # B/203609358 someting is removing telemetry. Adding this to check the
+        # status of the folder as early as possible.
         logging.info("Installing autotest on %s", host.hostname)
 
         # set up the autotest directory on the remote machine
@@ -647,13 +654,26 @@
             self._check_client_test_result(host, test_name)
 
 
-    def run_test(self, test_name, results_dir='.', host=None,
-                 parallel_flag=False, background=False,
-                 client_disconnect_timeout=None, *args, **dargs):
-        self.run_timed_test(test_name, results_dir, host, timeout=None,
-                            parallel_flag=parallel_flag, background=background,
-                            client_disconnect_timeout=client_disconnect_timeout,
-                            *args, **dargs)
+    def run_test(self,
+                 test_name,
+                 results_dir='.',
+                 host=None,
+                 parallel_flag=False,
+                 background=False,
+                 client_disconnect_timeout=None,
+                 timeout=None,
+                 *args,
+                 **dargs):
+        self.run_timed_test(
+                test_name,
+                results_dir,
+                host,
+                timeout=timeout,
+                parallel_flag=parallel_flag,
+                background=background,
+                client_disconnect_timeout=client_disconnect_timeout,
+                *args,
+                **dargs)
 
 
     def run_static_method(self, module, method, results_dir='.', host=None,
@@ -735,23 +755,36 @@
 
 
     def get_background_cmd(self, section):
-        cmd = ['nohup', os.path.join(self.autodir, 'bin/autotest_client')]
+        cmd = [
+                'nohup',
+                os.path.join(self.autodir, 'bin/autotest_client'),
+                _set_py_version()
+        ]
         cmd += self.get_base_cmd_args(section)
         cmd += ['>/dev/null', '2>/dev/null', '&']
         return ' '.join(cmd)
 
 
     def get_daemon_cmd(self, section, monitor_dir):
-        cmd = ['nohup', os.path.join(self.autodir, 'bin/autotestd'),
-               monitor_dir, '-H autoserv']
+        cmd = [
+                'nohup',
+                os.path.join(self.autodir, 'bin/autotestd'), monitor_dir,
+                '-H autoserv',
+                _set_py_version()
+        ]
         cmd += self.get_base_cmd_args(section)
         cmd += ['>/dev/null', '2>/dev/null', '&']
         return ' '.join(cmd)
 
 
     def get_monitor_cmd(self, monitor_dir, stdout_read, stderr_read):
-        cmd = [os.path.join(self.autodir, 'bin', 'autotestd_monitor'),
-               monitor_dir, str(stdout_read), str(stderr_read)]
+        cmd = [
+                os.path.join(self.autodir, 'bin', 'autotestd_monitor'),
+                monitor_dir,
+                str(stdout_read),
+                str(stderr_read),
+                _set_py_version()
+        ]
         return ' '.join(cmd)
 
 
@@ -968,14 +1001,40 @@
 
 
     @staticmethod
-    def _strip_stderr_prologue(stderr):
+    def _strip_stderr_prologue(stderr, monitor_cmd):
         """Strips the 'standard' prologue that get pre-pended to every
         remote command and returns the text that was actually written to
-        stderr by the remote command."""
-        stderr_lines = stderr.split("\n")[1:]
+        stderr by the remote command.
+
+        This will always strip atleast the first line ('standard' prologue),
+        and strip any extra messages prior. The following are common 'extra'
+        messages which could appear.
+
+        1.) Any warnings. For example, on CrOS version R90, any script running
+            in python2 result in the following warning in the stderr:
+            "warning: Python 2.7 is deprecated and will be removed from CrOS by
+            end of 2021. All users must migrate ASAP"
+        2.) The actual command used to launch autotestd_monitor (monitor_cmd)
+
+        Additionally there is a NOTE line that could be present needing also to
+        be stripped.
+        """
+        stderr_lines = stderr.split("\n")
         if not stderr_lines:
             return ""
-        elif stderr_lines[0].startswith("NOTE: autotestd_monitor"):
+
+        # If no warnings/monitor_cmd, strip only the first line
+        skipn = 1
+        for i, line in enumerate(stderr_lines):
+            if monitor_cmd in line:
+                # add *2* (1 for the index, 1 for the 'standard prolouge'
+                # which follows this line).
+                skipn = i + 2
+                break
+
+        stderr_lines = stderr_lines[skipn:]
+
+        if stderr_lines[0].startswith("NOTE: autotestd_monitor"):
             del stderr_lines[0]
         return "\n".join(stderr_lines)
 
@@ -1020,7 +1079,9 @@
                         "NETWORK")
 
                 stdout_read += len(result.stdout)
-                stderr_read += len(self._strip_stderr_prologue(result.stderr))
+                stderr_read += len(
+                        self._strip_stderr_prologue(result.stderr,
+                                                    monitor_cmd))
 
                 if result.exit_status is not None:
                     # TODO (crosbug.com/38224)- sbasi: Remove extra logging.
@@ -1178,6 +1239,8 @@
                            "client on %s\n") % self.host.hostname
                     raise error.AutotestRunError(msg)
         finally:
+            # B/203609358 someting is removing telemetry. Adding this to check the
+            # status of the folder as late as possible.
             logging.debug('Autotest job finishes running. Below is the '
                           'post-processing operations.')
             logger.close()
@@ -1419,8 +1482,8 @@
             server_package = os.path.join(self.job.pkgmgr.pkgmgr_dir,
                                           'packages', pkg_name)
             if os.path.exists(server_package):
-              self.host.send_file(server_package, remote_dest)
-              return
+                self.host.send_file(server_package, remote_dest)
+                return
 
         except error.AutoservRunError:
             msg = ("Package %s could not be sent from the package cache." %
diff --git a/server/autotest_unittest.py b/server/autotest_unittest.py
index 74310fc..2c8edab 100755
--- a/server/autotest_unittest.py
+++ b/server/autotest_unittest.py
@@ -1,10 +1,12 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #pylint: disable-msg=C0111
 __author__ = "raphtee@google.com (Travis Miller)"
 
+from six.moves import StringIO
 import unittest, os, tempfile, logging
 
 import common
+
 from autotest_lib.server import autotest, utils, hosts, server_job, profilers
 from autotest_lib.client.bin import sysinfo
 from autotest_lib.client.common_lib import packages
@@ -212,7 +214,7 @@
         c.get_config_value.expect_call("PACKAGES",
             'fetch_location', type=list, default=[]).and_return(['repo'])
 
-        cfile = self.god.create_mock_class(file, "file")
+        cfile = self.god.create_mock_class(StringIO, "StringIO")
         cfile_orig = "original control file"
         cfile_new = "args = []\njob.add_repository(['repo'])\n"
         cfile_new += cfile_orig
@@ -250,8 +252,8 @@
 
 
     def _expect_failed_run(self, command):
-        (self.host.run.expect_call(command)
-         .and_raises(error.AutoservRunError('dummy', object())))
+        (self.host.run.expect_call(command).and_raises(
+                error.AutoservRunError('placeholder', object())))
 
 
     def test_get_installed_autodir(self):
diff --git a/server/base_utils.py b/server/base_utils.py
index fd8be19..68701ff 100644
--- a/server/base_utils.py
+++ b/server/base_utils.py
@@ -236,7 +236,7 @@
     return (ntuples, failures)
 
 
-def parse_machine(machine, user='root', password='', port=22):
+def parse_machine(machine, user='root', password='', port=None):
     """
     Parse the machine string user:pass@host:port and return it separately,
     if the machine string is not complete, use the default parameters
diff --git a/server/base_utils_unittest.py b/server/base_utils_unittest.py
index d5a21e3..4d601ee 100755
--- a/server/base_utils_unittest.py
+++ b/server/base_utils_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 __author__ = 'raphtee@google.com (Travis Miller)'
 
@@ -29,25 +29,23 @@
     # parse_machine() test cases
     def test_parse_machine_good(self):
         '''test that parse_machine() is outputting the correct data'''
-        gooddata = (('host',                ('host', 'root', '', 22)),
-                    ('host:21',             ('host', 'root', '', 21)),
-                    ('user@host',           ('host', 'user', '', 22)),
-                    ('user:pass@host',      ('host', 'user', 'pass', 22)),
-                    ('user:pass@host:1234', ('host', 'user', 'pass', 1234)),
-
-                    ('user:pass@10.3.2.1',
-                     ('10.3.2.1', 'user', 'pass', 22)),
-                    ('user:pass@10.3.2.1:1234',
-                     ('10.3.2.1', 'user', 'pass', 1234)),
-
-                    ('::1',                 ('::1', 'root', '', 22)),
-                    ('user:pass@abdc::ef',  ('abdc::ef', 'user', 'pass', 22)),
-                    ('abdc::ef:99',         ('abdc::ef:99', 'root', '', 22)),
-                    ('user:pass@[abdc::ef:99]',
-                     ('abdc::ef:99', 'user', 'pass', 22)),
-                    ('user:pass@[abdc::ef]:1234',
-                     ('abdc::ef', 'user', 'pass', 1234)),
-                   )
+        gooddata = (
+                ('host', ('host', 'root', '', None)),
+                ('host:21', ('host', 'root', '', 21)),
+                ('user@host', ('host', 'user', '', None)),
+                ('user:pass@host', ('host', 'user', 'pass', None)),
+                ('user:pass@host:1234', ('host', 'user', 'pass', 1234)),
+                ('user:pass@10.3.2.1', ('10.3.2.1', 'user', 'pass', None)),
+                ('user:pass@10.3.2.1:1234', ('10.3.2.1', 'user', 'pass',
+                                             1234)),
+                ('::1', ('::1', 'root', '', None)),
+                ('user:pass@abdc::ef', ('abdc::ef', 'user', 'pass', None)),
+                ('abdc::ef:99', ('abdc::ef:99', 'root', '', None)),
+                ('user:pass@[abdc::ef:99]', ('abdc::ef:99', 'user', 'pass',
+                                             None)),
+                ('user:pass@[abdc::ef]:1234', ('abdc::ef', 'user', 'pass',
+                                               1234)),
+        )
         for machine, result in gooddata:
             self.assertEquals(utils.parse_machine(machine), result)
 
diff --git a/server/control_segments/cleanup b/server/control_segments/cleanup
index 9e57a60..cbdfbe6 100644
--- a/server/control_segments/cleanup
+++ b/server/control_segments/cleanup
@@ -6,7 +6,7 @@
 from autotest_lib.server.cros import provision
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = utils.metrics_mock
 
diff --git a/server/control_segments/client_trampoline b/server/control_segments/client_trampoline
index 5c870fd..13a648c 100644
--- a/server/control_segments/client_trampoline
+++ b/server/control_segments/client_trampoline
@@ -8,6 +8,15 @@
 
 def _client_trampoline():
     path = job.stage_control_file(trampoline_testname)
-    execfile(path, globals(), globals())
+    with open(path, "r") as fh:
+
+        code_obj = compile(
+                fh.read(),
+                path,
+                mode="exec",
+                flags=0,
+                dont_inherit=1,
+        )
+        exec(code_obj, globals(), globals())
 
 _client_trampoline()
diff --git a/server/control_segments/client_wrapper b/server/control_segments/client_wrapper
index 2ebac14..3f8df87 100644
--- a/server/control_segments/client_wrapper
+++ b/server/control_segments/client_wrapper
@@ -6,7 +6,8 @@
     host.log_kernel()
     if synchronous_offload_dir:
         host.env[autotest.OFFLOAD_ENVVAR] = synchronous_offload_dir
-    at.run(control, host=host, use_packaging=use_packaging)
+    at.run(control, host=host, use_packaging=use_packaging,
+           timeout=extended_timeout)
 
 
 job.parallel_simple(run_client, machines)
diff --git a/server/control_segments/get_network_stats b/server/control_segments/get_network_stats
index 7dc9a3f..a32ef15 100644
--- a/server/control_segments/get_network_stats
+++ b/server/control_segments/get_network_stats
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -15,7 +15,7 @@
 
         # The information is not critical, so ping the DUT first
         # and if it doesn't reply quickly, give up.
-        if utils.ping(dut.hostname, tries=1, timeout=3) != 0:
+        if dut.use_icmp and utils.ping(dut.hostname, tries=1, timeout=3) != 0:
             logging.info('ping failed: not collecting network stats')
             return
 
diff --git a/server/control_segments/provision b/server/control_segments/provision
index 05f10e9..5d8b0aa 100644
--- a/server/control_segments/provision
+++ b/server/control_segments/provision
@@ -11,7 +11,7 @@
 from autotest_lib.server.cros import provision
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = utils.metrics_mock
 
@@ -45,7 +45,14 @@
     those given in job_labels.
     """
     job.record('START', None, 'provision')
-    host = hosts.create_target_machine(machine, try_lab_servo=True)
+    # Determine if we should initialize servo based on request type as
+    # we don't need servo in OS only provision.
+    need_servo = False
+    for label in labels_list:
+        if (label.startswith(provision.FW_RW_VERSION_PREFIX) or
+            label.startswith(provision.FW_RO_VERSION_PREFIX)):
+            need_servo = True
+    host = hosts.create_target_machine(machine, try_lab_servo=need_servo)
     try:
         job.sysinfo.add_logdir(
                 sysinfo.logdir(constants.AUTOUPDATE_PRESERVE_LOG))
diff --git a/server/control_segments/repair b/server/control_segments/repair
index efb97d3..ab54009 100644
--- a/server/control_segments/repair
+++ b/server/control_segments/repair
@@ -21,7 +21,8 @@
         job.record('START', None, 'repair')
         target = hosts.create_target_machine(machine,
                                              try_lab_servo=True,
-                                             try_servo_repair=True)
+                                             try_servo_repair=True,
+                                             try_servo_recovery=True)
 
         try:
             # We don't need to collect logs or crash info if we're a
diff --git a/server/control_segments/reset b/server/control_segments/reset
index e54550f..230e3dd 100644
--- a/server/control_segments/reset
+++ b/server/control_segments/reset
@@ -4,7 +4,7 @@
 from autotest_lib.server.cros import provision
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = utils.metrics_mock
 
diff --git a/server/control_segments/stage_server_side_package b/server/control_segments/stage_server_side_package
index 0245e3d..5d6ab0b 100644
--- a/server/control_segments/stage_server_side_package
+++ b/server/control_segments/stage_server_side_package
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/server/control_segments/verify b/server/control_segments/verify
index 8dfab06..0128dd7 100644
--- a/server/control_segments/verify
+++ b/server/control_segments/verify
@@ -2,7 +2,7 @@
 from autotest_lib.server.cros import provision
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = utils.metrics_mock
 
@@ -19,7 +19,8 @@
     print('Initializing host %s' % machine)
     job.record('START', None, 'verify')
     target = hosts.create_target_machine(machine,
-                                         try_lab_servo=True)
+                                         try_lab_servo=True,
+                                         try_servo_recovery=True)
     hostname = utils.get_hostname_from_machine(machine)
     try:
         with metrics.SecondsTimer(DURATION_METRIC,
diff --git a/server/crashcollect.py b/server/crashcollect.py
index cf64428..bcc0488 100644
--- a/server/crashcollect.py
+++ b/server/crashcollect.py
@@ -16,7 +16,7 @@
 from autotest_lib.server import utils
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = utils.metrics_mock
 
diff --git a/server/cros/OWNERS b/server/cros/OWNERS
index 71c72f7..3b49d7f 100644
--- a/server/cros/OWNERS
+++ b/server/cros/OWNERS
@@ -1 +1,3 @@
 include /FIRMWARE_OWNERS
+include /FIRMWARE_OWNERS
+include /ENGPROD_OWNERS
\ No newline at end of file
diff --git a/server/cros/ap_config.py b/server/cros/ap_config.py
index b8c296b..d6e0b8d 100644
--- a/server/cros/ap_config.py
+++ b/server/cros/ap_config.py
@@ -4,11 +4,13 @@
 # found in the LICENSE file.
 
 import collections
-import six.moves.configparser
 import logging
 import os
 import time
 
+import six.moves.configparser
+
+from autotest_lib.client.common_lib import seven
 from autotest_lib.client.common_lib.cros.network import ap_constants
 from autotest_lib.server.cros.ap_configurators import ap_spec
 
@@ -28,7 +30,7 @@
     aps = []
     # chaos_ap_list.conf holds static conf of all APs in lab.
     for filename in ['chaos_ap_list.conf']:
-        ap_config = six.moves.configparser.RawConfigParser(
+        ap_config = seven.config_parser(
                 {AP.CONF_RPM_MANAGED: 'False'})
         path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             filename)
diff --git a/server/cros/ap_config_unittest.py b/server/cros/ap_config_unittest.py
index c0b460d..e32cc21 100644
--- a/server/cros/ap_config_unittest.py
+++ b/server/cros/ap_config_unittest.py
@@ -3,11 +3,13 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import six.moves.configparser
 import io
+import six
 import unittest
 
-import common
+from . import common
+
+from autotest_lib.client.common_lib import seven
 from autotest_lib.server.cros import ap_config
 
 
@@ -39,8 +41,12 @@
 
 
 def _parse_config_from_string(conf):
-    parser = six.moves.configparser.RawConfigParser()
-    parser.readfp(io.BytesIO(conf))
+    parser = seven.config_parser()
+    if six.PY2:
+        parser.readfp(io.BytesIO(conf))
+    else:
+        parser.read_string(conf)
+
     return parser
 
 
diff --git a/server/cros/ap_configurators/ap_batch_locker.py b/server/cros/ap_configurators/ap_batch_locker.py
index d370ae5..a8a857a 100644
--- a/server/cros/ap_configurators/ap_batch_locker.py
+++ b/server/cros/ap_configurators/ap_batch_locker.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -7,7 +8,7 @@
 
 from time import sleep
 
-import common
+from . import common
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib import utils
 from autotest_lib.server.cros.ap_configurators import \
diff --git a/server/cros/ap_configurators/ap_cartridge.py b/server/cros/ap_configurators/ap_cartridge.py
index 9c49626..802cd10 100644
--- a/server/cros/ap_configurators/ap_cartridge.py
+++ b/server/cros/ap_configurators/ap_cartridge.py
@@ -1,9 +1,13 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 import logging
-import Queue
+try:
+    import queue
+except ImportError:
+    import Queue as queue
 import time
 import traceback
 
@@ -20,7 +24,7 @@
 
 
     def __init__(self):
-        self.cartridge = Queue.Queue()
+        self.cartridge = queue.Queue()
 
 
     def push_configurators(self, configurators):
diff --git a/server/cros/ap_configurators/ap_configurator_factory.py b/server/cros/ap_configurators/ap_configurator_factory.py
index 50fdb5c..43a6adc 100644
--- a/server/cros/ap_configurators/ap_configurator_factory.py
+++ b/server/cros/ap_configurators/ap_configurator_factory.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -6,7 +7,7 @@
 
 import logging
 
-import common
+from . import common
 from autotest_lib.client.common_lib.cros.network import ap_constants
 from autotest_lib.server import site_utils
 from autotest_lib.server.cros import ap_config
@@ -53,8 +54,7 @@
         if visible:
             return set(self.ap_list)
 
-        return set(filter(lambda ap: ap.is_visibility_supported(),
-                          self.ap_list))
+        return set([ap for ap in self.ap_list if ap.is_visibility_supported()])
 
 
     def _get_aps_by_mode(self, band, mode):
diff --git a/server/cros/ap_configurators/ap_spec.py b/server/cros/ap_configurators/ap_spec.py
index 6a9ab1e..1307a14 100644
--- a/server/cros/ap_configurators/ap_spec.py
+++ b/server/cros/ap_configurators/ap_spec.py
@@ -1,7 +1,10 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+import six
+
 from autotest_lib.client.common_lib.cros.network import iw_runner
 
 
@@ -52,7 +55,7 @@
                     SECURITY_TYPE_WEP]
 
 # List of valid channels.
-VALID_2GHZ_CHANNELS = range(1,15)
+VALID_2GHZ_CHANNELS = list(range(1,15))
 VALID_5GHZ_CHANNELS = [36, 40, 44, 48, 128, 149, 153, 157, 161, 165]
 
 # Frequency to channel conversion table
@@ -67,7 +70,7 @@
 
 # This only works because the frequency table is one to one
 # for channels/frequencies.
-FREQUENCY_TABLE = dict((v,k) for k,v in CHANNEL_TABLE.iteritems())
+FREQUENCY_TABLE = dict((v,k) for k,v in six.iteritems(CHANNEL_TABLE))
 
 # Configurator type
 CONFIGURATOR_STATIC = 1
diff --git a/server/cros/ap_configurators/ap_spec_unittest.py b/server/cros/ap_configurators/ap_spec_unittest.py
index c1981ca..1129f00 100755
--- a/server/cros/ap_configurators/ap_spec_unittest.py
+++ b/server/cros/ap_configurators/ap_spec_unittest.py
@@ -16,108 +16,108 @@
     def test_default_creation(self):
         """Test building a default ap_spec object."""
         spec = ap_spec.APSpec()
-        self.assertEquals(spec.visible, True)
-        self.assertEquals(spec.security, ap_spec.DEFAULT_SECURITY_TYPE)
-        self.assertEquals(spec.band, ap_spec.DEFAULT_BAND)
-        self.assertEquals(spec.mode, ap_spec.DEFAULT_2GHZ_MODE)
-        self.assertEquals(spec.channel, ap_spec.DEFAULT_2GHZ_CHANNEL)
-        self.assertEquals(spec.password, 'chromeos')
+        self.assertEqual(spec.visible, True)
+        self.assertEqual(spec.security, ap_spec.DEFAULT_SECURITY_TYPE)
+        self.assertEqual(spec.band, ap_spec.DEFAULT_BAND)
+        self.assertEqual(spec.mode, ap_spec.DEFAULT_2GHZ_MODE)
+        self.assertEqual(spec.channel, ap_spec.DEFAULT_2GHZ_CHANNEL)
+        self.assertEqual(spec.password, 'chromeos')
 
 
     def test_only_set_band_2ghz(self):
         """Test setting only the band to 2GHz."""
         spec = ap_spec.APSpec(band=ap_spec.BAND_2GHZ)
-        self.assertEquals(spec.channel, ap_spec.DEFAULT_2GHZ_CHANNEL)
-        self.assertEquals(spec.mode, ap_spec.DEFAULT_2GHZ_MODE)
+        self.assertEqual(spec.channel, ap_spec.DEFAULT_2GHZ_CHANNEL)
+        self.assertEqual(spec.mode, ap_spec.DEFAULT_2GHZ_MODE)
 
 
     def test_only_set_band_5ghz(self):
         """Test setting only the band to 5GHz."""
         spec = ap_spec.APSpec(band=ap_spec.BAND_5GHZ)
-        self.assertEquals(spec.channel, ap_spec.DEFAULT_5GHZ_CHANNEL)
-        self.assertEquals(spec.mode, ap_spec.DEFAULT_5GHZ_MODE)
+        self.assertEqual(spec.channel, ap_spec.DEFAULT_5GHZ_CHANNEL)
+        self.assertEqual(spec.mode, ap_spec.DEFAULT_5GHZ_MODE)
 
 
     def test_only_set_mode_2ghz(self):
         """Test setting only a 2GHz mode."""
         spec = ap_spec.APSpec(mode=ap_spec.MODE_B)
-        self.assertEquals(spec.band, ap_spec.DEFAULT_BAND)
-        self.assertEquals(spec.channel, ap_spec.DEFAULT_2GHZ_CHANNEL)
+        self.assertEqual(spec.band, ap_spec.DEFAULT_BAND)
+        self.assertEqual(spec.channel, ap_spec.DEFAULT_2GHZ_CHANNEL)
 
 
     def test_only_set_mode_5ghz(self):
         """Test setting only a 5GHz mode."""
         spec = ap_spec.APSpec(mode=ap_spec.MODE_A)
-        self.assertEquals(spec.band, ap_spec.BAND_5GHZ)
-        self.assertEquals(spec.channel, ap_spec.DEFAULT_5GHZ_CHANNEL)
+        self.assertEqual(spec.band, ap_spec.BAND_5GHZ)
+        self.assertEqual(spec.channel, ap_spec.DEFAULT_5GHZ_CHANNEL)
 
 
     def test_only_set_mode_n(self):
         """Test setting the mode to N."""
         spec = ap_spec.APSpec(mode=ap_spec.MODE_N)
-        self.assertEquals(spec.band, ap_spec.DEFAULT_BAND)
-        self.assertEquals(spec.channel, ap_spec.DEFAULT_2GHZ_CHANNEL)
+        self.assertEqual(spec.band, ap_spec.DEFAULT_BAND)
+        self.assertEqual(spec.channel, ap_spec.DEFAULT_2GHZ_CHANNEL)
 
 
     def test_only_set_channel_2ghz(self):
         """Test setting only a 2GHz channel."""
         spec = ap_spec.APSpec(channel=ap_spec.DEFAULT_2GHZ_CHANNEL)
-        self.assertEquals(spec.band, ap_spec.BAND_2GHZ)
-        self.assertEquals(spec.mode, ap_spec.DEFAULT_2GHZ_MODE)
+        self.assertEqual(spec.band, ap_spec.BAND_2GHZ)
+        self.assertEqual(spec.mode, ap_spec.DEFAULT_2GHZ_MODE)
 
 
     def test_only_set_channel_5ghz(self):
         """Test setting only a 5GHz channel."""
         spec = ap_spec.APSpec(channel=ap_spec.DEFAULT_5GHZ_CHANNEL)
-        self.assertEquals(spec.band, ap_spec.BAND_5GHZ)
-        self.assertEquals(spec.mode, ap_spec.DEFAULT_5GHZ_MODE)
+        self.assertEqual(spec.band, ap_spec.BAND_5GHZ)
+        self.assertEqual(spec.mode, ap_spec.DEFAULT_5GHZ_MODE)
 
 
     def test_set_band_and_mode_2ghz(self):
         """Test setting the band and mode to valid 2GHz values."""
         spec = ap_spec.APSpec(band=ap_spec.BAND_2GHZ, mode=ap_spec.MODE_G)
-        self.assertEquals(spec.channel, ap_spec.DEFAULT_2GHZ_CHANNEL)
+        self.assertEqual(spec.channel, ap_spec.DEFAULT_2GHZ_CHANNEL)
 
 
     def test_set_band_and_mode_5ghz(self):
         """Test setting the band and mode to valid 5GHz values."""
         spec = ap_spec.APSpec(band=ap_spec.BAND_5GHZ, mode=ap_spec.MODE_A)
-        self.assertEquals(spec.channel, ap_spec.DEFAULT_5GHZ_CHANNEL)
+        self.assertEqual(spec.channel, ap_spec.DEFAULT_5GHZ_CHANNEL)
 
 
     def test_set_band_mode_and_channel_2ghz(self):
         """Test setting the band and channel to valid 2GHz values."""
         spec = ap_spec.APSpec(band=ap_spec.BAND_2GHZ, mode=ap_spec.MODE_N,
                               channel=ap_spec.DEFAULT_2GHZ_CHANNEL)
-        self.assertEquals(spec.mode, ap_spec.MODE_N)
+        self.assertEqual(spec.mode, ap_spec.MODE_N)
 
 
     def test_set_band_mode_and_channel_5ghz(self):
         """Test setting the band and channel to valid 5GHz value."""
         spec = ap_spec.APSpec(band=ap_spec.BAND_5GHZ, mode=ap_spec.MODE_N,
                               channel=ap_spec.DEFAULT_5GHZ_CHANNEL)
-        self.assertEquals(spec.mode, ap_spec.MODE_N)
+        self.assertEqual(spec.mode, ap_spec.MODE_N)
 
 
     def test_set_security_psk_default(self):
         """Test setting security to WPAPSK."""
         spec = ap_spec.APSpec(security=ap_spec.SECURITY_TYPE_WPAPSK)
-        self.assertEquals(spec.visible, True)
-        self.assertEquals(spec.security, ap_spec.SECURITY_TYPE_WPAPSK)
-        self.assertEquals(spec.band, ap_spec.DEFAULT_BAND)
-        self.assertEquals(spec.mode, ap_spec.DEFAULT_2GHZ_MODE)
-        self.assertEquals(spec.channel, ap_spec.DEFAULT_2GHZ_CHANNEL)
+        self.assertEqual(spec.visible, True)
+        self.assertEqual(spec.security, ap_spec.SECURITY_TYPE_WPAPSK)
+        self.assertEqual(spec.band, ap_spec.DEFAULT_BAND)
+        self.assertEqual(spec.mode, ap_spec.DEFAULT_2GHZ_MODE)
+        self.assertEqual(spec.channel, ap_spec.DEFAULT_2GHZ_CHANNEL)
 
 
     def test_set_security_and_visibility(self):
         """Test setting visibility to hidden and security to WPAPSK."""
         spec = ap_spec.APSpec(visible=False,
                               security=ap_spec.SECURITY_TYPE_WPAPSK)
-        self.assertEquals(spec.visible, False)
-        self.assertEquals(spec.security, ap_spec.SECURITY_TYPE_WPAPSK)
-        self.assertEquals(spec.band, ap_spec.DEFAULT_BAND)
-        self.assertEquals(spec.mode, ap_spec.DEFAULT_2GHZ_MODE)
-        self.assertEquals(spec.channel, ap_spec.DEFAULT_2GHZ_CHANNEL)
+        self.assertEqual(spec.visible, False)
+        self.assertEqual(spec.security, ap_spec.SECURITY_TYPE_WPAPSK)
+        self.assertEqual(spec.band, ap_spec.DEFAULT_BAND)
+        self.assertEqual(spec.mode, ap_spec.DEFAULT_2GHZ_MODE)
+        self.assertEqual(spec.channel, ap_spec.DEFAULT_2GHZ_CHANNEL)
         self.assertIsNotNone(spec.password)
 
 
@@ -153,24 +153,24 @@
     def test_mode_string_generation(self):
         """Test a set of mode constants a generates a human readable string."""
         mode = ap_spec.mode_string_for_mode(ap_spec.MODE_B | ap_spec.MODE_G)
-        self.assertEquals('b/g', mode)
+        self.assertEqual('b/g', mode)
 
         mode = ap_spec.mode_string_for_mode(ap_spec.MODE_B | ap_spec.MODE_G |
                                             ap_spec.MODE_N)
-        self.assertEquals('b/g/n', mode)
+        self.assertEqual('b/g/n', mode)
 
         mode = ap_spec.mode_string_for_mode(ap_spec.MODE_A)
-        self.assertEquals('a', mode)
+        self.assertEqual('a', mode)
 
 
     def test_mode_n_on_both_bands(self):
         """Test that band is maintained when setting a mode N spec."""
         spec = ap_spec.APSpec(band=ap_spec.BAND_5GHZ, mode=ap_spec.MODE_N)
-        self.assertEquals(spec.band, ap_spec.BAND_5GHZ)
-        self.assertEquals(spec.mode, ap_spec.MODE_N)
+        self.assertEqual(spec.band, ap_spec.BAND_5GHZ)
+        self.assertEqual(spec.mode, ap_spec.MODE_N)
         spec = ap_spec.APSpec(band=ap_spec.BAND_2GHZ, mode=ap_spec.MODE_N)
-        self.assertEquals(spec.band, ap_spec.BAND_2GHZ)
-        self.assertEquals(spec.mode, ap_spec.MODE_N)
+        self.assertEqual(spec.band, ap_spec.BAND_2GHZ)
+        self.assertEqual(spec.mode, ap_spec.MODE_N)
 
 
 if __name__ == '__main__':
diff --git a/server/cros/ap_configurators/fetch_prebuilt_pyauto.py b/server/cros/ap_configurators/fetch_prebuilt_pyauto.py
index a8b02d5..ac2ee8c0 100644
--- a/server/cros/ap_configurators/fetch_prebuilt_pyauto.py
+++ b/server/cros/ap_configurators/fetch_prebuilt_pyauto.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/cros/ap_configurators/pyauto_utils.py b/server/cros/ap_configurators/pyauto_utils.py
index c06bb8a..cc25ca6 100644
--- a/server/cros/ap_configurators/pyauto_utils.py
+++ b/server/cros/ap_configurators/pyauto_utils.py
@@ -19,7 +19,7 @@
 
 
 class ExistingPathReplacer(object):
-  """Facilitates backing up a given path (file or dir)..
+    """Facilitates backing up a given path (file or dir)..
 
   Often you want to manipulate a directory or file for testing but don't want to
   meddle with the existing contents.  This class lets you make a backup, and
@@ -30,121 +30,127 @@
 
   Automatically reinstates the backed up path (if any) when object is deleted.
   """
-  _path = ''
-  _backup_dir = None  # dir to which existing content is backed up
-  _backup_basename = ''
+    _path = ''
+    _backup_dir = None  # dir to which existing content is backed up
+    _backup_basename = ''
 
-  def __init__(self, path, path_type='dir'):
-    """Initialize the object, making backups if necessary.
+    def __init__(self, path, path_type='dir'):
+        """Initialize the object, making backups if necessary.
 
     Args:
       path: the requested path to file or directory
       path_type: path type. Options: 'file', 'dir'. Default: 'dir'
     """
-    assert path_type in ('file', 'dir'), 'Invalid path_type: %s' % path_type
-    self._path_type = path_type
-    self._path = path
-    if os.path.exists(self._path):
-      if 'dir' == self._path_type:
-        assert os.path.isdir(self._path), '%s is not a directory' % self._path
-      else:
-        assert os.path.isfile(self._path), '%s is not a file' % self._path
-      # take a backup
-      self._backup_basename = os.path.basename(self._path)
-      self._backup_dir = tempfile.mkdtemp(dir=os.path.dirname(self._path),
-                                          prefix='bkp-' + self._backup_basename)
-      logging.info('Backing up %s in %s' % (self._path, self._backup_dir))
-      shutil.move(self._path,
-                  os.path.join(self._backup_dir, self._backup_basename))
-    self._CreateRequestedPath()
+        assert path_type in ('file',
+                             'dir'), 'Invalid path_type: %s' % path_type
+        self._path_type = path_type
+        self._path = path
+        if os.path.exists(self._path):
+            if 'dir' == self._path_type:
+                assert os.path.isdir(
+                        self._path), '%s is not a directory' % self._path
+            else:
+                assert os.path.isfile(
+                        self._path), '%s is not a file' % self._path
+            # take a backup
+            self._backup_basename = os.path.basename(self._path)
+            self._backup_dir = tempfile.mkdtemp(
+                    dir=os.path.dirname(self._path),
+                    prefix='bkp-' + self._backup_basename)
+            logging.info('Backing up %s in %s' %
+                         (self._path, self._backup_dir))
+            shutil.move(self._path,
+                        os.path.join(self._backup_dir, self._backup_basename))
+        self._CreateRequestedPath()
 
-  def __del__(self):
-    """Cleanup. Reinstate backup."""
-    self._CleanupRequestedPath()
-    if self._backup_dir:  # Reinstate, if backed up.
-      from_path = os.path.join(self._backup_dir, self._backup_basename)
-      logging.info('Reinstating backup from %s to %s' % (from_path, self._path))
-      shutil.move(from_path, self._path)
-    self._RemoveBackupDir()
+    def __del__(self):
+        """Cleanup. Reinstate backup."""
+        self._CleanupRequestedPath()
+        if self._backup_dir:  # Reinstate, if backed up.
+            from_path = os.path.join(self._backup_dir, self._backup_basename)
+            logging.info('Reinstating backup from %s to %s' %
+                         (from_path, self._path))
+            shutil.move(from_path, self._path)
+        self._RemoveBackupDir()
 
-  def _CreateRequestedPath(self):
-    # Create intermediate dirs if needed.
-    if not os.path.exists(os.path.dirname(self._path)):
-      os.makedirs(os.path.dirname(self._path))
-    if 'dir' == self._path_type:
-      os.mkdir(self._path)
-    else:
-      open(self._path, 'w').close()
+    def _CreateRequestedPath(self):
+        # Create intermediate dirs if needed.
+        if not os.path.exists(os.path.dirname(self._path)):
+            os.makedirs(os.path.dirname(self._path))
+        if 'dir' == self._path_type:
+            os.mkdir(self._path)
+        else:
+            open(self._path, 'w').close()
 
-  def _CleanupRequestedPath(self):
-    if os.path.exists(self._path):
-      if os.path.isdir(self._path):
-        shutil.rmtree(self._path, ignore_errors=True)
-      else:
-        os.remove(self._path)
+    def _CleanupRequestedPath(self):
+        if os.path.exists(self._path):
+            if os.path.isdir(self._path):
+                shutil.rmtree(self._path, ignore_errors=True)
+            else:
+                os.remove(self._path)
 
-  def _RemoveBackupDir(self):
-    if self._backup_dir and os.path.isdir(self._backup_dir):
-      shutil.rmtree(self._backup_dir, ignore_errors=True)
+    def _RemoveBackupDir(self):
+        if self._backup_dir and os.path.isdir(self._backup_dir):
+            shutil.rmtree(self._backup_dir, ignore_errors=True)
 
 
 def RemovePath(path):
-  """Remove the given path (file or dir)."""
-  if os.path.isdir(path):
-    shutil.rmtree(path, ignore_errors=True)
-    return
-  try:
-    os.remove(path)
-  except OSError:
-    pass
+    """Remove the given path (file or dir)."""
+    if os.path.isdir(path):
+        shutil.rmtree(path, ignore_errors=True)
+        return
+    try:
+        os.remove(path)
+    except OSError:
+        pass
 
 
 def UnzipFilenameToDir(filename, dir):
-  """Unzip |filename| to directory |dir|.
+    """Unzip |filename| to directory |dir|.
 
   This works with as low as python2.4 (used on win).
   """
-  zf = zipfile.ZipFile(filename)
-  pushd = os.getcwd()
-  if not os.path.isdir(dir):
-    os.mkdir(dir)
-  os.chdir(dir)
-  # Extract files.
-  for info in zf.infolist():
-    name = info.filename
-    if name.endswith('/'):  # dir
-      if not os.path.isdir(name):
-        os.makedirs(name)
-    else:  # file
-      dir = os.path.dirname(name)
-      if not os.path.isdir(dir):
-        os.makedirs(dir)
-      out = open(name, 'wb')
-      out.write(zf.read(name))
-      out.close()
-    # Set permissions. Permission info in external_attr is shifted 16 bits.
-    os.chmod(name, info.external_attr >> 16)
-  os.chdir(pushd)
+    zf = zipfile.ZipFile(filename)
+    pushd = os.getcwd()
+    if not os.path.isdir(dir):
+        os.mkdir(dir)
+    os.chdir(dir)
+    # Extract files.
+    for info in zf.infolist():
+        name = info.filename
+        if name.endswith('/'):  # dir
+            if not os.path.isdir(name):
+                os.makedirs(name)
+        else:  # file
+            dir = os.path.dirname(name)
+            if not os.path.isdir(dir):
+                os.makedirs(dir)
+            out = open(name, 'wb')
+            out.write(zf.read(name))
+            out.close()
+        # Set permissions. Permission info in external_attr is shifted 16 bits.
+        os.chmod(name, info.external_attr >> 16)
+    os.chdir(pushd)
 
 
 def GetCurrentPlatform():
-  """Get a string representation for the current platform.
+    """Get a string representation for the current platform.
 
   Returns:
     'mac', 'win' or 'linux'
   """
-  if sys.platform == 'darwin':
-    return 'mac'
-  if sys.platform == 'win32':
-    return 'win'
-  if sys.platform.startswith('linux'):
-    return 'linux'
-  raise RuntimeError('Unknown platform')
+    if sys.platform == 'darwin':
+        return 'mac'
+    if sys.platform == 'win32':
+        return 'win'
+    if sys.platform.startswith('linux'):
+        return 'linux'
+    raise RuntimeError('Unknown platform')
 
 
 def PrintPerfResult(graph_name, series_name, data_point, units,
                     show_on_waterfall=False):
-  """Prints a line to stdout that is specially formatted for the perf bots.
+    """Prints a line to stdout that is specially formatted for the perf bots.
 
   Args:
     graph_name: String name for the graph on which to plot the data.
@@ -160,32 +166,32 @@
                        buildbot waterfall itself (in the buildbot step running
                        this test on the waterfall page, not the stdio page).
   """
-  waterfall_indicator = ['', '*'][show_on_waterfall]
-  print('%sRESULT %s: %s= %s %s' % (
-      waterfall_indicator, graph_name, series_name,
-      str(data_point).replace(' ', ''), units))
-  sys.stdout.flush()
+    waterfall_indicator = ['', '*'][show_on_waterfall]
+    print('%sRESULT %s: %s= %s %s' %
+          (waterfall_indicator, graph_name, series_name,
+           str(data_point).replace(' ', ''), units))
+    sys.stdout.flush()
 
 
 def Shard(ilist, shard_index, num_shards):
-  """Shard a given list and return the group at index |shard_index|.
+    """Shard a given list and return the group at index |shard_index|.
 
   Args:
     ilist: input list
     shard_index: 0-based sharding index
     num_shards: shard count
   """
-  chunk_size = len(ilist) / num_shards
-  chunk_start = shard_index * chunk_size
-  if shard_index == num_shards - 1:  # Exhaust the remainder in the last shard.
-    chunk_end = len(ilist)
-  else:
-    chunk_end = chunk_start + chunk_size
-  return ilist[chunk_start:chunk_end]
+    chunk_size = len(ilist) / num_shards
+    chunk_start = shard_index * chunk_size
+    if shard_index == num_shards - 1:  # Exhaust the remainder in the last shard.
+        chunk_end = len(ilist)
+    else:
+        chunk_end = chunk_start + chunk_size
+    return ilist[chunk_start:chunk_end]
 
 
 def WaitForDomElement(pyauto, driver, xpath):
-  """Wait for the UI element to appear.
+    """Wait for the UI element to appear.
 
   Args:
     pyauto: an instance of pyauto.PyUITest.
@@ -196,12 +202,12 @@
     The element if it is found.
     NoSuchElementException if it is not found.
   """
-  pyauto.WaitUntil(lambda: len(driver.find_elements_by_xpath(xpath)) > 0)
-  return driver.find_element_by_xpath(xpath)
+    pyauto.WaitUntil(lambda: len(driver.find_elements_by_xpath(xpath)) > 0)
+    return driver.find_element_by_xpath(xpath)
 
 
 def DoesUrlExist(url):
-  """Determines whether a resource exists at the given URL.
+    """Determines whether a resource exists at the given URL.
 
   Args:
     url: URL to be verified.
@@ -209,68 +215,72 @@
   Returns:
     True if url exists, otherwise False.
   """
-  parsed = urlparse.urlparse(url)
-  try:
-    conn = httplib.HTTPConnection(parsed.netloc)
-    conn.request('HEAD', parsed.path)
-    response = conn.getresponse()
-  except (socket.gaierror, socket.error):
-    return False
-  finally:
-    conn.close()
-  # Follow both permanent (301) and temporary (302) redirects.
-  if response.status == 302 or response.status == 301:
-    return DoesUrlExist(response.getheader('location'))
-  return response.status == 200
+    parsed = urlparse.urlparse(url)
+    try:
+        conn = httplib.HTTPConnection(parsed.netloc)
+        conn.request('HEAD', parsed.path)
+        response = conn.getresponse()
+    except (socket.gaierror, socket.error):
+        return False
+    finally:
+        conn.close()
+    # Follow both permanent (301) and temporary (302) redirects.
+    if response.status == 302 or response.status == 301:
+        return DoesUrlExist(response.getheader('location'))
+    return response.status == 200
 
 
 class _GTestTextTestResult(unittest._TextTestResult):
-  """A test result class that can print formatted text results to a stream.
+    """A test result class that can print formatted text results to a stream.
 
   Results printed in conformance with gtest output format, like:
   [ RUN        ] autofill.AutofillTest.testAutofillInvalid: "test desc."
   [         OK ] autofill.AutofillTest.testAutofillInvalid
   [ RUN        ] autofill.AutofillTest.testFillProfile: "test desc."
   [         OK ] autofill.AutofillTest.testFillProfile
-  [ RUN        ] autofill.AutofillTest.testFillProfileCrazyCharacters: "Test."
-  [         OK ] autofill.AutofillTest.testFillProfileCrazyCharacters
+  [ RUN        ] autofill.AutofillTest.testFillProfileComplexCharacters: "Test."
+  [         OK ] autofill.AutofillTest.testFillProfileComplexCharacters
   """
 
-  def __init__(self, stream, descriptions, verbosity):
-    unittest._TextTestResult.__init__(self, stream, descriptions, verbosity)
+    def __init__(self, stream, descriptions, verbosity):
+        unittest._TextTestResult.__init__(self, stream, descriptions,
+                                          verbosity)
 
-  def _GetTestURI(self, test):
-    return '%s.%s' % (unittest._strclass(test.__class__), test._testMethodName)
+    def _GetTestURI(self, test):
+        return '%s.%s' % (unittest._strclass(
+                test.__class__), test._testMethodName)
 
-  def getDescription(self, test):
-    return '%s: "%s"' % (self._GetTestURI(test), test.shortDescription())
+    def getDescription(self, test):
+        return '%s: "%s"' % (self._GetTestURI(test), test.shortDescription())
 
-  def startTest(self, test):
-    unittest.TestResult.startTest(self, test)
-    self.stream.writeln('[ RUN        ] %s' % self.getDescription(test))
+    def startTest(self, test):
+        unittest.TestResult.startTest(self, test)
+        self.stream.writeln('[ RUN        ] %s' % self.getDescription(test))
 
-  def addSuccess(self, test):
-    unittest.TestResult.addSuccess(self, test)
-    self.stream.writeln('[         OK ] %s' % self._GetTestURI(test))
+    def addSuccess(self, test):
+        unittest.TestResult.addSuccess(self, test)
+        self.stream.writeln('[         OK ] %s' % self._GetTestURI(test))
 
-  def addError(self, test, err):
-    unittest.TestResult.addError(self, test, err)
-    self.stream.writeln('[      ERROR ] %s' % self._GetTestURI(test))
+    def addError(self, test, err):
+        unittest.TestResult.addError(self, test, err)
+        self.stream.writeln('[      ERROR ] %s' % self._GetTestURI(test))
 
-  def addFailure(self, test, err):
-    unittest.TestResult.addFailure(self, test, err)
-    self.stream.writeln('[     FAILED ] %s' % self._GetTestURI(test))
+    def addFailure(self, test, err):
+        unittest.TestResult.addFailure(self, test, err)
+        self.stream.writeln('[     FAILED ] %s' % self._GetTestURI(test))
 
 
 class GTestTextTestRunner(unittest.TextTestRunner):
-  """Test Runner for displaying test results in textual format.
+    """Test Runner for displaying test results in textual format.
 
   Results are displayed in conformance with gtest output.
   """
 
-  def __init__(self, verbosity=1):
-    unittest.TextTestRunner.__init__(self, stream=sys.stderr,
-                                     verbosity=verbosity)
+    def __init__(self, verbosity=1):
+        unittest.TextTestRunner.__init__(self,
+                                         stream=sys.stderr,
+                                         verbosity=verbosity)
 
-  def _makeResult(self):
-    return _GTestTextTestResult(self.stream, self.descriptions, self.verbosity)
+    def _makeResult(self):
+        return _GTestTextTestResult(self.stream, self.descriptions,
+                                    self.verbosity)
diff --git a/server/cros/ap_configurators/static_ap_configurator.py b/server/cros/ap_configurators/static_ap_configurator.py
index 849e740..4ae616a 100644
--- a/server/cros/ap_configurators/static_ap_configurator.py
+++ b/server/cros/ap_configurators/static_ap_configurator.py
@@ -1,12 +1,14 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 import collections
+import logging
 import pprint
 import re
-import xmlrpclib
 
+from six.moves import xmlrpc_client as xmlrpclib
 from autotest_lib.client.common_lib import global_config
 from autotest_lib.client.common_lib.cros.network import ap_constants
 from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
@@ -47,7 +49,7 @@
         self.mac_address = ap_config.get_wan_mac()
         self.host_name = ap_config.get_wan_host()
         # Get corresponding PDU from host name.
-        self.pdu = re.sub('host\d+', 'rpm1', self.host_name) + '.cros'
+        self.pdu = re.sub('host\\d+', 'rpm1', self.host_name) + '.cros'
         self.channel = ap_config.get_channel()
         self.band = ap_config.get_band()
         self.current_band = ap_config.get_band()
@@ -142,6 +144,8 @@
         """Allow cartridge to run commands in _command_list"""
         self.check_pdu_status()
         for command in self._command_list:
+            logging.debug("Command to run method: %s", command.method.__name__)
+            logging.debug("Command to run with args: %s", str(command.args))
             command.method(*command.args)
 
 
diff --git a/server/cros/audio/audio_test.py b/server/cros/audio/audio_test.py
index fd918ce..79d0e79 100644
--- a/server/cros/audio/audio_test.py
+++ b/server/cros/audio/audio_test.py
@@ -20,11 +20,11 @@
     collection of server side audio tests. It is assumed to be run with a
     Chameleon audio boards. It is recommended to use this base class for server
     side Chameleon audio tests to take advantage of the initialize, setup and
-    sanity check.
+    confidence check.
     """
 
     def initialize(self, host):
-        """Initialize audio test needed components and do some sanity checks"""
+        """Initialize audio test needed components and do some checks"""
         if host.chameleon is None:
             raise error.TestError("host.chameleon is None."
                                   "Please check the chameleon of this DUT.")
diff --git a/server/cros/bluetooth/OWNERS b/server/cros/bluetooth/OWNERS
new file mode 100644
index 0000000..3c5c8a3
--- /dev/null
+++ b/server/cros/bluetooth/OWNERS
@@ -0,0 +1 @@
+include /BLUETOOTH_OWNERS
diff --git a/server/cros/bluetooth/advertisements_data.py b/server/cros/bluetooth/advertisements_data.py
index e5d2883..e519447 100644
--- a/server/cros/bluetooth/advertisements_data.py
+++ b/server/cros/bluetooth/advertisements_data.py
@@ -1,69 +1,130 @@
+# Lint as: python2, python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 """A list of advertisements data for testing purpose."""
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+from six.moves import range
+
 
 ADVERTISEMENT1 = {
-    'Path': '/org/bluez/test/advertisement1',
-    'Type': 'peripheral',
-    'ManufacturerData': {'0xff01': [0x1a, 0x1b, 0x1c, 0x1d, 0x1e]},
-    'ServiceUUIDs': ['180D', '180F'],
-    'SolicitUUIDs': [],
-    'ServiceData': {'9991': [0x11, 0x12, 0x13, 0x14, 0x15]},
-    'IncludeTxPower': True}
-
+        'Path': '/org/bluez/test/advertisement1',
+        'Type': 'peripheral',
+        'ManufacturerData': {
+                '0xff01': [0x1a, 0x1b, 0x1c, 0x1d, 0x1e]
+        },
+        'ServiceUUIDs': ['180D', '180F'],
+        'SolicitUUIDs': [],
+        'ServiceData': {
+                '9991': [0x11, 0x12, 0x13, 0x14, 0x15]
+        },
+        'ScanResponseData': {
+                '0x16': [0xcd, 0xab] + list(range(1, 21))
+        },
+        'Discoverable': True,
+        'IncludeTxPower': True,
+        'MinInterval': 100,
+        'MaxInterval': 100,
+        'TxPower': 10
+}
 
 ADVERTISEMENT2 = {
-    'Path': '/org/bluez/test/advertisement2',
-    'Type': 'peripheral',
-    'ManufacturerData': {'0xff02': [0x2a, 0x2b, 0x2c, 0x2d, 0x2e]},
-    'ServiceUUIDs': ['1821'],
-    'SolicitUUIDs': [],
-    'ServiceData': {'9992': [0x21, 0x22, 0x23, 0x24, 0x25]},
-    'IncludeTxPower': True}
-
+        'Path': '/org/bluez/test/advertisement2',
+        'Type': 'peripheral',
+        'ManufacturerData': {
+                '0xff02': [0x2a, 0x2b, 0x2c, 0x2d, 0x2e]
+        },
+        'ServiceUUIDs': ['1821'],
+        'SolicitUUIDs': [],
+        'ServiceData': {
+                '9992': [0x21, 0x22, 0x23, 0x24, 0x25]
+        },
+        'ScanResponseData': {
+                '0x16': [0xcd, 0xab] + list(range(22, 42))
+        },
+        'Discoverable': True,
+        'IncludeTxPower': True,
+        'MinInterval': 100,
+        'MaxInterval': 100,
+        'TxPower': 7
+}
 
 ADVERTISEMENT3 = {
-    'Path': '/org/bluez/test/advertisement3',
-    'Type': 'peripheral',
-    'ManufacturerData': {'0xff03': [0x3a, 0x3b, 0x3c, 0x3d, 0x3e]},
-    'ServiceUUIDs': ['1819', '180E'],
-    'SolicitUUIDs': [],
-    'ServiceData': {'9993': [0x31, 0x32, 0x33, 0x34, 0x35]},
-    'IncludeTxPower': True}
-
+        'Path': '/org/bluez/test/advertisement3',
+        'Type': 'peripheral',
+        'ManufacturerData': {
+                '0xff03': [0x3a, 0x3b, 0x3c, 0x3d, 0x3e]
+        },
+        'ServiceUUIDs': ['1819', '180E'],
+        'SolicitUUIDs': [],
+        'ServiceData': {
+                '9993': [0x31, 0x32, 0x33, 0x34, 0x35]
+        },
+        'Discoverable': True,
+        'IncludeTxPower': True,
+        'MinInterval': 100,
+        'MaxInterval': 100,
+        'TxPower': 4
+}
 
 ADVERTISEMENT4 = {
-    'Path': '/org/bluez/test/advertisement4',
-    'Type': 'peripheral',
-    'ManufacturerData': {'0xff04': [0x4a, 0x4b, 0x4c, 0x4d, 0x4e]},
-    'ServiceUUIDs': ['1808', '1810'],
-    'SolicitUUIDs': [],
-    'ServiceData': {'9994': [0x41, 0x42, 0x43, 0x44, 0x45]},
-    'IncludeTxPower': True}
-
+        'Path': '/org/bluez/test/advertisement4',
+        'Type': 'peripheral',
+        'ManufacturerData': {
+                '0xff04': [0x4a, 0x4b, 0x4c, 0x4d, 0x4e]
+        },
+        'ServiceUUIDs': ['1808', '1810'],
+        'SolicitUUIDs': [],
+        'ServiceData': {
+                '9994': [0x41, 0x42, 0x43, 0x44, 0x45]
+        },
+        'Discoverable': True,
+        'IncludeTxPower': True,
+        'MinInterval': 100,
+        'MaxInterval': 100,
+        'TxPower': 1
+}
 
 ADVERTISEMENT5 = {
-    'Path': '/org/bluez/test/advertisement5',
-    'Type': 'peripheral',
-    'ManufacturerData': {'0xff05': [0x5a, 0x5b, 0x5c, 0x5d, 0x5e]},
-    'ServiceUUIDs': ['1818', '181B'],
-    'SolicitUUIDs': [],
-    'ServiceData': {'9995': [0x51, 0x52, 0x53, 0x54, 0x55]},
-    'IncludeTxPower': True}
-
+        'Path': '/org/bluez/test/advertisement5',
+        'Type': 'peripheral',
+        'ManufacturerData': {
+                '0xff05': [0x5a, 0x5b, 0x5c, 0x5d, 0x5e]
+        },
+        'ServiceUUIDs': ['1818', '181B'],
+        'SolicitUUIDs': [],
+        'ServiceData': {
+                '9995': [0x51, 0x52, 0x53, 0x54, 0x55]
+        },
+        'Discoverable': True,
+        'IncludeTxPower': True,
+        'MinInterval': 100,
+        'MaxInterval': 100,
+        'TxPower': -2
+}
 
 ADVERTISEMENT6 = {
-    'Path': '/org/bluez/test/advertisement6',
-    'Type': 'peripheral',
-    'ManufacturerData': {'0xff06': [0x6a, 0x6b, 0x6c, 0x6d, 0x6e]},
-    'ServiceUUIDs': ['1820'],
-    'SolicitUUIDs': [],
-    'ServiceData': {'9996': [0x61, 0x62, 0x63, 0x64, 0x65]},
-    'IncludeTxPower': True}
-
+        'Path': '/org/bluez/test/advertisement6',
+        'Type': 'peripheral',
+        'ManufacturerData': {
+                '0xff06': [0x6a, 0x6b, 0x6c, 0x6d, 0x6e]
+        },
+        'ServiceUUIDs': ['1820'],
+        'SolicitUUIDs': [],
+        'ServiceData': {
+                '9996': [0x61, 0x62, 0x63, 0x64, 0x65]
+        },
+        'Discoverable': True,
+        'IncludeTxPower': True,
+        'MinInterval': 100,
+        'MaxInterval': 100,
+        'TxPower': -5
+}
 
 NEARBY_BROADCAST_ADV = {
         'Path': '/org/bluez/test/advertisement7',
@@ -76,7 +137,9 @@
         'ServiceData': {
                 'FE2C': [0x61, 0x62, 0x63, 0x64, 0x65]
         },
-        'IncludeTxPower': True
+        'IncludeTxPower': True,
+        'MinInterval': 100,
+        'MaxInterval': 100
 }
 
 
@@ -89,8 +152,10 @@
         'Type': 'peripheral',
         'ServiceUUIDs': ['FEF3'],
         'ScanResponseData': {
-                '0x16': [0xf3, 0xfe] + range(1, 21)
+                '0x16': [0xf3, 0xfe] + list(range(1, 21))
         },
+        'MinInterval': 100,
+        'MaxInterval': 100
 }
 
 ADVERTISEMENTS = [ADVERTISEMENT1, ADVERTISEMENT2, ADVERTISEMENT3,
diff --git a/server/cros/bluetooth/bluetooth_adapter_adv_monitor_tests.py b/server/cros/bluetooth/bluetooth_adapter_adv_monitor_tests.py
index 67ca692..50f37c0 100644
--- a/server/cros/bluetooth/bluetooth_adapter_adv_monitor_tests.py
+++ b/server/cros/bluetooth/bluetooth_adapter_adv_monitor_tests.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -13,6 +14,16 @@
 from autotest_lib.client.common_lib import error
 
 
+# List of the controllers that does not support Adv Monitor HW offloading.
+ADVMON_UNSUPPORTED_CHIPSETS = [
+        'BCM-43540', 'BCM-43560',
+        'Intel-AC7260', 'Intel-AC7265',
+        'MVL-8797', 'MVL-8887', 'MVL-8897', 'MVL-8997',
+        'Realtek-RTL8822C-USB', 'Realtek-RTL8822C-UART', 'Realtek-RTL8852A-USB',
+        'QCA-6174A-3-UART', 'QCA-6174A-5-USB'
+]
+
+
 class TestMonitor():
     """Local object hosting the test values for Advertisement Monitor object.
 
@@ -32,6 +43,7 @@
         """
         self.type = None
         self.rssi = []
+        self.sampling_period = 256  # unset Sampling Period
         self.patterns = []
         self.monitor_id = None
         self.app_id = app_id
@@ -45,7 +57,7 @@
         @returns: the byte array.
 
         """
-        return [b for b in array.array('B', str_data)]
+        return [b for b in array.array('B', str_data.encode())]
 
 
     def update_type(self, monitor_type):
@@ -66,6 +78,15 @@
         self.rssi = monitor_rssi
 
 
+    def update_sampling_period(self, monitor_sampling_period):
+        """Update the sampling period value.
+
+        @param monitor_sampling_period: sampling period value.
+
+        """
+        self.sampling_period = monitor_sampling_period
+
+
     def update_patterns(self, monitor_patterns):
         """Update the content filter patterns.
 
@@ -96,7 +117,7 @@
         @returns: List containing the monitor data.
 
         """
-        return [self.type, self.rssi, self.patterns]
+        return [self.type, self.rssi + [self.sampling_period], self.patterns]
 
 
     def get_monitor_id(self):
@@ -132,6 +153,11 @@
     ADD_MONITOR_POLLING_SLEEP_SECS = 1
     PAIR_TEST_SLEEP_SECS = 5
 
+    # Refer doc/advertisement-monitor-api.txt for more info about unset values.
+    UNSET_RSSI = 127
+    UNSET_TIMEOUT = 0
+    UNSET_SAMPLING_PERIOD = 256
+
     # Non-zero count value is used to indicate the case where multiple
     # DeviceFound/DeviceLost events are expected to occur.
     MULTIPLE_EVENTS = -1
@@ -148,6 +174,12 @@
     # Duration of kernel perform 'start discovery', in sec
     DISCOVERY_DURATION = 10.24
 
+    # Acceptable difference between the first RSSI sample and following one.
+    # LOW_RSSI_THRESHOLD_TOLERANCE must be larger than
+    # HIGH_RSSI_THRESHOLD_TOLERANCE
+    HIGH_RSSI_THRESHOLD_TOLERANCE = 20
+    LOW_RSSI_THRESHOLD_TOLERANCE = 40
+
     test_case_log = bluetooth_adapter_tests.test_case_log
     test_retry_and_log = bluetooth_adapter_tests.test_retry_and_log
 
@@ -286,6 +318,21 @@
                                                               monitor_id,
                                                               event)
 
+    def set_target_devices(self, app_id, monitor_id, devices):
+        """Set the target devices to the given monitor.
+
+        DeviceFound and DeviceLost will only be counted if it is triggered by a
+        target device.
+
+        @param app_id: the app id.
+        @param monitor_id: the monitor id.
+        @param devices: a list of devices in MAC address
+
+        @returns: True on success, False otherwise.
+
+        """
+        return self.bluetooth_facade.advmon_set_target_devices(
+                app_id, monitor_id, devices)
 
     def interleave_logger_start(self):
         """ Start interleave logger recording
@@ -330,6 +377,15 @@
                 logging.warning('More than one cancel events found %s', events)
         return event
 
+    def interleave_scan_get_durations(self):
+        """Get durations of allowlist scan and no filter scan
+
+        @returns: a dict of {'allowlist': allowlist_duration,
+                             'no filter': no_filter_duration},
+                  or None if something went wrong
+        """
+        return self.bluetooth_facade.advmon_interleave_scan_get_durations()
+
     @test_retry_and_log(False)
     def test_supported_types(self):
         """Test supported monitor types.
@@ -361,42 +417,38 @@
 
 
     def test_is_controller_offloading_supported(self):
-        """Check if controller based RSSI filtering is supported.
+        """Check if the controller supports HW offloading.
 
-            By default the LE_SCAN_FILTER_DUP flag is enabled on all platforms.
-            Due to this, the host does not receive as many advertisements during
-            passive scanning, which causes SW based RSSI filtering not to work
-            as intended. So, if the controller offloading is not supported, skip
-            the tests that involves RSSI filtering and raise TEST_NA.
-
-            @raises: TestNA if controller based RSSI filtering is not supported.
+        @raises: TestFail if the controller is expected to support Monitor
+                 Offloading but the support is missing.
 
         """
-        supported_features = self.read_supported_features()
-        if not supported_features:
-            logging.info('Controller offloading not supported')
-            raise error.TestNAError('Controller offloading not supported')
+        chipset = self.bluetooth_facade.get_chipset_name()
+        if chipset in ADVMON_UNSUPPORTED_CHIPSETS:
+            logging.warning('Controller support check skipped for %s', chipset)
+        else:
+            supported_features = self.read_supported_features()
+            if not supported_features:
+                logging.error('Controller support missing on %s', chipset)
+                raise error.TestFail('Controller offloading not supported')
+            logging.info('Controller offloading supported on %s', chipset)
 
 
-    def test_is_adv_monitoring_supported(self, require_rssi_filtering = False):
+    def test_is_adv_monitoring_supported(self):
         """Check if Adv Monitor API is supported.
 
             If AdvMonitor API is not supported by the platform,
             AdvertisementMonitorManager1 interface won't be exposed by
             bluetoothd. In such case, skip the test and raise TestNA.
 
-            @param require_rssi_filtering: True if test requires RSSI filtering.
-
-            @raises: TestNA if Adv Monitor API is not supported or if controller
-                     based RSSI filtering is not supported.
+            @raises: TestNA if Adv Monitor API is not supported.
 
         """
         if not self.advmon_check_manager_interface_exist():
             logging.info('Advertisement Monitor API not supported')
             raise error.TestNAError('Advertisement Monitor API not supported')
 
-        if require_rssi_filtering:
-            self.test_is_controller_offloading_supported()
+        self.test_is_controller_offloading_supported()
 
 
     @test_retry_and_log(False)
@@ -543,8 +595,18 @@
         checked_count = self.get_event_count(app_id, monitor_id, 'DeviceFound')
 
         if count == self.MULTIPLE_EVENTS:
+            self.results = {
+                    'Found events': checked_count,
+                    'Expected events': 'multiple'
+            }
+
             return checked_count > 0
 
+        self.results = {
+                'Found events': checked_count,
+                'Expected events': count
+        }
+
         return checked_count == count
 
 
@@ -570,8 +632,18 @@
         checked_count = self.get_event_count(app_id, monitor_id, 'DeviceLost')
 
         if count == self.MULTIPLE_EVENTS:
+            self.results = {
+                    'Found events': checked_count,
+                    'Expected events': 'multiple'
+            }
+
             return checked_count > 1
 
+        self.results = {
+                'Found events': checked_count,
+                'Expected events': count
+        }
+
         return checked_count == count
 
 
@@ -622,6 +694,17 @@
             self.remove_monitor(app_id, monitor_id)
             monitor.update_monitor_id(None)
 
+        # Set the target devices so that AdvMon ignores Adv from other devices
+        target_devices = []
+
+        if hasattr(self, 'peer_mouse'):
+            target_devices.append(self.peer_mouse.address)
+
+        if hasattr(self, 'peer_keybd'):
+            target_devices.append(self.peer_keybd.address)
+
+        self.set_target_devices(app_id, monitor_id, target_devices)
+
         self.results = {
                 'activated': checked_activate,
                 'released': checked_release
@@ -662,6 +745,9 @@
         self.peer_keybd = None
         self.peer_mouse = None
 
+        self.LOW_RSSI = None
+        self.HIGH_RSSI = None
+
         for device_type, device_list in self.devices.items():
             for device in device_list:
                 if device_type is 'BLE_KEYBOARD':
@@ -669,15 +755,27 @@
                 elif device_type is 'BLE_MOUSE':
                     self.peer_mouse = device
 
-        if self.peer_keybd is not None and self.peer_mouse is not None:
-            self.test_stop_peer_device_adv(self.peer_keybd)
-            self.test_stop_peer_device_adv(self.peer_mouse)
+        if self.peer_keybd is None or self.peer_mouse is None:
+            raise error.TestNAError('some peer device is not found')
 
-        self.results = {
-                'keybd': self.peer_keybd is not None,
-                'mouse': self.peer_mouse is not None
-        }
-        return all(self.results.values())
+        # Setup default RSSI threshold based on real RSSI range
+        keybd_rssi = self.get_device_sample_rssi(self.peer_keybd)
+        mouse_rssi = self.get_device_sample_rssi(self.peer_mouse)
+
+        if mouse_rssi is None or keybd_rssi is None:
+            raise error.TestNAError('failed to examine peer RSSI')
+
+        min_rssi = min(mouse_rssi, keybd_rssi)
+
+        # Make RSSI threshold tolerable.
+        self.HIGH_RSSI = max(min_rssi - self.HIGH_RSSI_THRESHOLD_TOLERANCE,
+                             -126)
+        self.LOW_RSSI = max(min_rssi - self.LOW_RSSI_THRESHOLD_TOLERANCE, -127)
+
+        self.test_stop_peer_device_adv(self.peer_keybd)
+        self.test_stop_peer_device_adv(self.peer_mouse)
+
+        return True
 
 
     @test_retry_and_log(False)
@@ -728,7 +826,7 @@
 
         """
 
-        actual_cycle = len(records) / len(durations.keys())
+        actual_cycle = len(records) // len(list(durations.keys()))
         offset = self.INTERLEAVE_SCAN_CYCLE_NUM_TOLERANCE
         expect_cycle_lowerbound = max(1, expect_cycles - offset)
         expect_cycle_upperbound = expect_cycles + offset
@@ -835,12 +933,14 @@
 
         """
 
-        # TODO(b/171844106): get this parameters via
-        #                    MGMT_OP_READ_DEF_SYSTEM_CONFIG
-        durations = {'allowlist': 300, 'no filter': 500}
+        durations = self.interleave_scan_get_durations()
+        if durations is None:
+            raise error.TestFail(
+                    'Unexpected error: failed to get interleave durations')
 
         # Change the unit from msec to second for future convenience.
         durations = {key: value * 0.001 for key, value in durations.items()}
+
         return durations
 
     @test_retry_and_log(False)
@@ -875,7 +975,7 @@
         return all(self.results.values())
 
     @test_retry_and_log(False)
-    def test_interleaving_suspend_resume(self):
+    def test_interleaving_suspend_resume(self, expect_true):
         """ Test for checking if kernel paused interleave scan during system
             suspended.
 
@@ -904,15 +1004,19 @@
         logging.debug(records)
         logging.debug(cancel_event)
 
-        # Currently resume time is not very reliable. It is likely the actual
-        # time in sleeping is less than expect_suspend_time.
-        # Check the interleave scan paused for at least one cycle long instead.
-        self.results = self.check_records_paused(records, cancel_event,
-                                                 interleave_period, True)
+        if not expect_true:
+            self.results = {'No records': len(records) == 0}
+        else:
+            # Currently resume time is not very reliable. It is likely the
+            # actual time in sleeping is less than expect_suspend_time.
+            # Check the interleave scan paused for at least one cycle long
+            # instead.
+            self.results = self.check_records_paused(records, cancel_event,
+                                                     interleave_period, True)
         return all(self.results.values())
 
     @test_retry_and_log(False)
-    def test_interleaving_active_scan_cycle(self):
+    def test_interleaving_active_scan_cycle(self, expect_true):
         """ Test for checking if kernel paused interleave scan during active
             scan.
 
@@ -936,12 +1040,15 @@
         logging.debug(records)
         logging.debug(cancel_event)
 
-        # BlueZ pauses discovery for every DISCOVERY_DURATION then restarts it
-        # 5 seconds later. Interleave scan also get restarted during the paused
-        # time.
-        self.results = self.check_records_paused(records, cancel_event,
-                                                 self.DISCOVERY_DURATION,
-                                                 False)
+        if not expect_true:
+            self.results = {'No records': len(records) == 0}
+        else:
+            # BlueZ pauses discovery for every DISCOVERY_DURATION then restarts
+            # it 5 seconds later. Interleave scan also get restarted during the
+            # paused time.
+            self.results = self.check_records_paused(records, cancel_event,
+                                                     self.DISCOVERY_DURATION,
+                                                     False)
         self.test_stop_discovery()
         return all(self.results.values())
 
@@ -1043,12 +1150,6 @@
         self.test_add_monitor(monitor1, expected_release=True)
 
         # Incorrect rssi parameters, release should get called.
-        monitor2.update_rssi([-40, 0, -60, 10])
-        self.test_add_monitor(monitor2, expected_release=True)
-
-        monitor2.update_rssi([-40, 10, -60, 0])
-        self.test_add_monitor(monitor2, expected_release=True)
-
         monitor2.update_rssi([40, 10, -60, 10])
         self.test_add_monitor(monitor2, expected_release=True)
 
@@ -1064,8 +1165,57 @@
         monitor2.update_rssi([-60, 10, -40, 10])
         self.test_add_monitor(monitor2, expected_release=True)
 
-        # Unset the rssi filter parameters.
-        monitor2.update_rssi([127, 0, 127, 0])
+        # Correct rssi parameters, activate should get called.
+        monitor2.update_rssi([self.UNSET_RSSI, 10, -60, 10])
+        self.test_add_monitor(monitor2, expected_activate=True)
+        self.test_remove_monitor(monitor2)
+
+        monitor2.update_rssi([-40, self.UNSET_TIMEOUT, -60, 10])
+        self.test_add_monitor(monitor2, expected_activate=True)
+        self.test_remove_monitor(monitor2)
+
+        monitor2.update_rssi([-40, 10, self.UNSET_RSSI, 10])
+        self.test_add_monitor(monitor2, expected_activate=True)
+        self.test_remove_monitor(monitor2)
+
+        monitor2.update_rssi([-40, 10, -60, self.UNSET_TIMEOUT])
+        self.test_add_monitor(monitor2, expected_activate=True)
+        self.test_remove_monitor(monitor2)
+
+        # Incorrect sampling period, release should get called.
+        monitor2.update_sampling_period(257)
+        self.test_add_monitor(monitor2, expected_release=True)
+
+        # Partial RSSI filter and sampling period, activate should get called.
+        monitor2.update_rssi([-40, 10, self.UNSET_RSSI, self.UNSET_TIMEOUT])
+        monitor2.update_sampling_period(self.UNSET_SAMPLING_PERIOD)
+        self.test_add_monitor(monitor2, expected_activate=True)
+        self.test_remove_monitor(monitor2)
+
+        monitor2.update_rssi([-40, 10, self.UNSET_RSSI, self.UNSET_TIMEOUT])
+        monitor2.update_sampling_period(5)
+        self.test_add_monitor(monitor2, expected_activate=True)
+        self.test_remove_monitor(monitor2)
+
+        monitor2.update_rssi([self.UNSET_RSSI, self.UNSET_TIMEOUT, -60, 10])
+        monitor2.update_sampling_period(self.UNSET_SAMPLING_PERIOD)
+        self.test_add_monitor(monitor2, expected_activate=True)
+        self.test_remove_monitor(monitor2)
+
+        monitor2.update_rssi([self.UNSET_RSSI, self.UNSET_TIMEOUT, -60, 10])
+        monitor2.update_sampling_period(10)
+        self.test_add_monitor(monitor2, expected_activate=True)
+        self.test_remove_monitor(monitor2)
+
+        monitor2.update_rssi([
+                self.UNSET_RSSI,
+                self.UNSET_TIMEOUT,
+                self.UNSET_RSSI,
+                self.UNSET_TIMEOUT
+        ])
+        monitor2.update_sampling_period(self.UNSET_SAMPLING_PERIOD)
+        self.test_add_monitor(monitor2, expected_activate=True)
+        self.test_remove_monitor(monitor2)
 
         # Incorrect pattern parameters, release should get called.
         monitor2.update_patterns([
@@ -1122,126 +1272,22 @@
         self.test_exit_app(app1)
 
 
-    def advmon_test_pattern_filter_only(self):
-        """Test case: PATTERN_FILTER_ONLY
+    def advmon_test_pattern_filter(self):
+        """Test case: PATTERN_FILTER
 
         Verify matching of advertisements w.r.t. various pattern values and
         different AD Data Types - Local Name Service UUID and Device Type.
-        Test working of patterns filter matching with multiple clients,
-        multiple monitors and suspend/resume, without RSSI filtering.
 
         """
         self.test_is_adv_monitoring_supported()
         self.test_setup_peer_devices()
 
-        # Create two test app instances.
-        app1 = self.create_app()
-        app2 = self.create_app()
-
-        # Register both apps, should not fail.
-        self.test_register_app(app1)
-        self.test_register_app(app2)
-
-        # Add monitors in both apps.
-        monitor1 = TestMonitor(app1)
-        monitor1.update_type('or_patterns')
-        monitor1.update_patterns([
-                [5, 0x09, '_REF'],
-        ])
-        monitor1.update_rssi([127, 0, 127, 0])
-
-        monitor2 = TestMonitor(app1)
-        monitor2.update_type('or_patterns')
-        monitor2.update_patterns([
-                [0, 0x03, [0x12, 0x18]],
-        ])
-        monitor2.update_rssi([127, 0, 127, 0])
-
-        monitor3 = TestMonitor(app2)
-        monitor3.update_type('or_patterns')
-        monitor3.update_patterns([
-                [0, 0x19, [0xc1, 0x03]],
-                [0, 0x09, 'MOUSE'],
-        ])
-        monitor3.update_rssi([127, 0, 127, 0])
-
-        monitor4 = TestMonitor(app2)
-        monitor4.update_type('or_patterns')
-        monitor4.update_patterns([
-                [0, 0x19, [0xc1, 0x03]],
-                [0, 0x19, [0xc3, 0x03]],
-        ])
-        monitor4.update_rssi([127, 0, 127, 0])
-
-        # Activate should get invoked.
-        self.test_add_monitor(monitor1, expected_activate=True)
-        self.test_add_monitor(monitor2, expected_activate=True)
-        self.test_add_monitor(monitor3, expected_activate=True)
-        self.test_add_monitor(monitor4, expected_activate=True)
-
-        # DeviceFound for mouse should get triggered only for monitors
-        # matching the adv pattern filter.
-        self.test_start_peer_device_adv(self.peer_mouse, duration=5)
-        self.test_device_found(monitor1, count=self.MULTIPLE_EVENTS)
-        self.test_device_found(monitor2, count=self.MULTIPLE_EVENTS)
-        self.test_device_found(monitor3, count=self.MULTIPLE_EVENTS)
-        # Device type 0xc203 should not match.
-        self.test_device_found(monitor4, count=0)
-        self.test_stop_peer_device_adv(self.peer_mouse)
-
-        # Initiate suspend/resume.
-        self.suspend_resume()
-
-        # Remove a monitor from one app, shouldn't affect working of other
-        # monitors or apps.
-        self.test_remove_monitor(monitor1)
-
-        # Reset event counts before next test.
-        self.test_reset_event_count(monitor2)
-        self.test_reset_event_count(monitor3)
-
-        # DeviceFound for mouse should get triggered again for monitors
-        # matching the adv pattern filter.
-        self.test_start_peer_device_adv(self.peer_mouse, duration=5)
-        self.test_device_found(monitor2, count=self.MULTIPLE_EVENTS)
-        self.test_device_found(monitor3, count=self.MULTIPLE_EVENTS)
-        self.test_stop_peer_device_adv(self.peer_mouse)
-
-        # Terminate an app, shouldn't affect working of monitors in other apps.
-        self.test_exit_app(app1)
-
-        # Reset event counts before next test.
-        self.test_reset_event_count(monitor3)
-
-        # DeviceFound should get triggered for keyboard.
-        self.test_start_peer_device_adv(self.peer_keybd, duration=5)
-        self.test_device_found(monitor3, count=self.MULTIPLE_EVENTS)
-        self.test_device_found(monitor4, count=self.MULTIPLE_EVENTS)
-        self.test_stop_peer_device_adv(self.peer_keybd)
-
-        # Unregister the running app, should not fail.
-        self.test_unregister_app(app2)
-
-        # Terminate the running test app instance.
-        self.test_exit_app(app2)
-
-
-    def advmon_test_pattern_filter_1(self):
-        """Test case: PATTERN_FILTER_1
-
-        Verify matching of advertisements w.r.t. various pattern values and
-        different AD Data Types - Local Name Service UUID and Device Type.
-
-        """
-        self.test_is_adv_monitoring_supported(require_rssi_filtering = True)
-        self.test_setup_peer_devices()
-
         # Create a test app instance.
         app1 = self.create_app()
 
         monitor1 = TestMonitor(app1)
         monitor1.update_type('or_patterns')
-        monitor1.update_rssi([-60, 3, -80, 3])
+        monitor1.update_rssi([self.HIGH_RSSI, 3, self.LOW_RSSI, 3])
 
         # Register the app, should not fail.
         self.test_register_app(app1)
@@ -1323,13 +1369,13 @@
         self.test_exit_app(app1)
 
 
-    def advmon_test_rssi_filter_1(self):
-        """Test case: RSSI_FILTER_1
+    def advmon_test_rssi_filter_range(self):
+        """Test case: RSSI_FILTER_RANGE
 
         Verify unset RSSI filter and filter with no matching RSSI values.
 
         """
-        self.test_is_adv_monitoring_supported(require_rssi_filtering = True)
+        self.test_is_adv_monitoring_supported()
         self.test_setup_peer_devices()
 
         # Create a test app instance.
@@ -1344,7 +1390,12 @@
         # Register the app, should not fail.
         self.test_register_app(app1)
 
-        monitor1.update_rssi([127, 0, 127, 0])
+        monitor1.update_rssi([
+                self.UNSET_RSSI,
+                self.UNSET_TIMEOUT,
+                self.UNSET_RSSI,
+                self.UNSET_TIMEOUT
+        ])
         self.test_add_monitor(monitor1, expected_activate=True)
 
         # Unset RSSI filter, adv should match multiple times.
@@ -1377,13 +1428,13 @@
         self.test_exit_app(app1)
 
 
-    def advmon_test_rssi_filter_2(self):
-        """Test case: RSSI_FILTER_2
+    def advmon_test_rssi_filter_multi_peers(self):
+        """Test case: RSSI_FILTER_MULTI_PEERS
 
         Verify RSSI filter matching with multiple peer devices.
 
         """
-        self.test_is_adv_monitoring_supported(require_rssi_filtering = True)
+        self.test_is_adv_monitoring_supported()
         self.test_setup_peer_devices()
 
         # Create a test app instance.
@@ -1398,7 +1449,9 @@
         # Register the app, should not fail.
         self.test_register_app(app1)
 
-        monitor1.update_rssi([-60, 3, -80, 3])
+        monitor1.update_rssi([
+                self.HIGH_RSSI, self.UNSET_TIMEOUT, self.LOW_RSSI, 3,
+        ])
         self.test_add_monitor(monitor1, expected_activate=True)
 
         # DeviceFound should get triggered only once per device.
@@ -1419,22 +1472,6 @@
 
         self.test_remove_monitor(monitor1)
 
-        monitor1.update_rssi([-60, 10, -80, 10])
-        self.test_add_monitor(monitor1, expected_activate=True)
-
-        # Device was online for short period of time, so DeviceFound should
-        # not get triggered.
-        self.test_start_peer_device_adv(self.peer_keybd, duration=5)
-        self.test_device_found(monitor1, count=0)
-
-        # Device did not come back online, DeviceFound should not get triggered.
-        # No device was found earlier, so DeviceLost should not get triggered.
-        self.test_stop_peer_device_adv(self.peer_keybd, duration=15)
-        self.test_device_found(monitor1, count=0)
-        self.test_device_lost(monitor1, count=0)
-
-        self.test_remove_monitor(monitor1)
-
         # Unregister the app, should not fail.
         self.test_unregister_app(app1)
 
@@ -1442,13 +1479,13 @@
         self.test_exit_app(app1)
 
 
-    def advmon_test_rssi_filter_3(self):
-        """Test case: RSSI_FILTER_3
+    def advmon_test_rssi_filter_reset(self):
+        """Test case: RSSI_FILTER_RESET
 
         Verify reset of RSSI timers based on advertisements.
 
         """
-        self.test_is_adv_monitoring_supported(require_rssi_filtering = True)
+        self.test_is_adv_monitoring_supported()
         self.test_setup_peer_devices()
 
         # Create a test app instance.
@@ -1463,25 +1500,14 @@
         # Register the app, should not fail.
         self.test_register_app(app1)
 
-        monitor1.update_rssi([-60, 10, -80, 10])
+        monitor1.update_rssi([
+                self.HIGH_RSSI, self.UNSET_TIMEOUT, self.LOW_RSSI, 10,
+        ])
         self.test_add_monitor(monitor1, expected_activate=True)
 
-        # DeviceFound should not get triggered before timeout.
+        # DeviceFound should get triggered once the peer starts advertising.
         self.test_start_peer_device_adv(self.peer_keybd, duration=5)
-        self.test_device_found(monitor1, count=0)
-
-        # DeviceFound should not get triggered as device went offline.
-        # No device was found earlier, so DeviceLost should not get triggered.
-        self.test_stop_peer_device_adv(self.peer_keybd, duration=10)
-        self.test_device_found(monitor1, count=0)
-        self.test_device_lost(monitor1, count=0)
-
-        # Timer should get reset, so DeviceFound should not get triggered.
-        self.test_start_peer_device_adv(self.peer_keybd, duration=5)
-        self.test_device_found(monitor1, count=0)
-
-        # DeviceFound should get triggered once timer completes.
-        self.test_device_found(monitor1, count=1, delay=10)
+        self.test_device_found(monitor1, count=1)
 
         # DeviceLost should not get triggered before timeout.
         self.test_stop_peer_device_adv(self.peer_keybd, duration=5)
@@ -1516,7 +1542,7 @@
         clients and multiple monitors.
 
         """
-        self.test_is_adv_monitoring_supported(require_rssi_filtering = True)
+        self.test_is_adv_monitoring_supported()
         self.test_setup_peer_devices()
 
         # Create two test app instances.
@@ -1534,7 +1560,9 @@
                 [0, 0x03, [0x12, 0x18]],
                 [0, 0x19, [0xc1, 0x03]],
         ])
-        monitor1.update_rssi([-60, 3, -80, 3])
+        monitor1.update_rssi([
+                self.HIGH_RSSI, self.UNSET_TIMEOUT, self.LOW_RSSI, 3,
+        ])
 
         monitor2 = TestMonitor(app2)
         monitor2.update_type('or_patterns')
@@ -1542,7 +1570,9 @@
                 [0, 0x03, [0x12, 0x18]],
                 [0, 0x19, [0xc1, 0x03]],
         ])
-        monitor2.update_rssi([-60, 3, -80, 3])
+        monitor2.update_rssi([
+                self.HIGH_RSSI, self.UNSET_TIMEOUT, self.LOW_RSSI, 3,
+        ])
 
         # Activate should get invoked.
         self.test_add_monitor(monitor1, expected_activate=True)
@@ -1563,14 +1593,18 @@
         monitor3.update_patterns([
                 [0, 0x19, [0xc2, 0x03]],
         ])
-        monitor3.update_rssi([-60, 3, -80, 3])
+        monitor3.update_rssi([
+                self.HIGH_RSSI, self.UNSET_TIMEOUT, self.LOW_RSSI, 3,
+        ])
 
         monitor4 = TestMonitor(app2)
         monitor4.update_type('or_patterns')
         monitor4.update_patterns([
                 [0, 0x19, [0xc2, 0x03]],
         ])
-        monitor4.update_rssi([-60, 10, -80, 10])
+        monitor4.update_rssi([
+                self.HIGH_RSSI, self.UNSET_TIMEOUT, self.LOW_RSSI, 10,
+        ])
 
         # Activate should get invoked.
         self.test_add_monitor(monitor3, expected_activate=True)
@@ -1580,11 +1614,7 @@
         self.test_start_peer_device_adv(self.peer_mouse, duration=5)
         self.test_device_found(monitor2, count=2)
         self.test_device_found(monitor3, count=1)
-
-        # Since the RSSI timeouts are different for monitor4, DeviceFound
-        # event should get triggered after total of 10 seconds.
-        self.test_device_found(monitor4, count=0)
-        self.test_device_found(monitor4, count=1, delay=5)
+        self.test_device_found(monitor4, count=1)
         self.test_stop_peer_device_adv(self.peer_mouse)
 
         # Unregister both apps, should not fail.
@@ -1614,7 +1644,9 @@
         monitor1.update_patterns([
                 [0, 0x03, [0x12, 0x18]],
         ])
-        monitor1.update_rssi([127, 0, 127, 0])
+        monitor1.update_rssi([
+                self.HIGH_RSSI, self.UNSET_TIMEOUT, self.LOW_RSSI, 3,
+        ])
 
         # Register the app, should not fail.
         self.test_register_app(app1)
@@ -1636,7 +1668,7 @@
         self.test_reset_event_count(monitor1)
         self.test_start_peer_device_adv(self.peer_keybd, duration=5)
         self.test_device_found(monitor1, count=self.MULTIPLE_EVENTS)
-        self.test_stop_peer_device_adv(self.peer_keybd)
+        self.test_stop_peer_device_adv(self.peer_keybd, duration=5)
 
         # Start foreground scanning.
         self.test_start_discovery()
@@ -1657,7 +1689,7 @@
         self.test_reset_event_count(monitor1)
         self.test_start_peer_device_adv(self.peer_keybd, duration=10)
         self.test_device_found(monitor1, count=self.MULTIPLE_EVENTS)
-        self.test_stop_peer_device_adv(self.peer_keybd)
+        self.test_stop_peer_device_adv(self.peer_keybd, duration=5)
 
         # Stop foreground scanning.
         self.test_stop_discovery()
@@ -1690,7 +1722,7 @@
         Verify working of background scanning with suspend/resume.
 
         """
-        self.test_is_adv_monitoring_supported(require_rssi_filtering = True)
+        self.test_is_adv_monitoring_supported()
         self.test_setup_peer_devices()
 
         # Create two test app instances.
@@ -1705,22 +1737,30 @@
         monitor1 = TestMonitor(app1)
         monitor1.update_type('or_patterns')
         monitor1.update_patterns([ [0, 0x03, [0x12, 0x18]], ])
-        monitor1.update_rssi([-60, 3, -80, 3])
+        monitor1.update_rssi([
+                self.HIGH_RSSI, self.UNSET_TIMEOUT, self.LOW_RSSI, 3,
+        ])
 
         monitor2 = TestMonitor(app1)
         monitor2.update_type('or_patterns')
         monitor2.update_patterns([ [0, 0x19, [0xc2, 0x03]], ])
-        monitor2.update_rssi([-60, 10, -80, 10])
+        monitor2.update_rssi([
+                self.HIGH_RSSI, self.UNSET_TIMEOUT, self.LOW_RSSI, 10,
+        ])
 
         monitor3 = TestMonitor(app2)
         monitor3.update_type('or_patterns')
         monitor3.update_patterns([ [0, 0x03, [0x12, 0x18]], ])
-        monitor3.update_rssi([-60, 3, -80, 3])
+        monitor3.update_rssi([
+                self.HIGH_RSSI, self.UNSET_TIMEOUT, self.LOW_RSSI, 3,
+        ])
 
         monitor4 = TestMonitor(app2)
         monitor4.update_type('or_patterns')
-        monitor4.update_patterns([ [0, 0x19, [0xc2, 0x03]], ])
-        monitor4.update_rssi([-60, 15, -80, 15])
+        monitor4.update_patterns([ [0, 0x19, [0xc1, 0x03]], ])
+        monitor4.update_rssi([
+                self.HIGH_RSSI, self.UNSET_TIMEOUT, self.LOW_RSSI, 15,
+        ])
 
         # Activate should get invoked.
         self.test_add_monitor(monitor1, expected_activate=True)
@@ -1728,32 +1768,40 @@
         self.test_add_monitor(monitor3, expected_activate=True)
         self.test_add_monitor(monitor4, expected_activate=True)
 
-        # DeviceFound for mouse should get triggered only for monitors
-        # satisfying the RSSI timers.
+        # DeviceFound for mouse should get triggered only for matched monitors
         self.test_start_peer_device_adv(self.peer_mouse, duration=5)
         self.test_device_found(monitor1, count=1)
-        self.test_device_found(monitor2, count=0)
+        self.test_device_found(monitor2, count=1)
         self.test_device_found(monitor3, count=1)
         self.test_device_found(monitor4, count=0)
 
         # Initiate suspend/resume.
         self.suspend_resume()
 
+        # DeviceLost should get triggered for tracked devices on resume.
+        self.test_device_lost(monitor1, count=1)
+        self.test_device_lost(monitor2, count=1)
+        self.test_device_lost(monitor3, count=1)
+        self.test_device_lost(monitor4, count=0)
+
+        # DeviceFound should get triggered again for matched monitors on resume.
+        self.test_device_found(monitor1, count=2)
+        self.test_device_found(monitor2, count=2)
+        self.test_device_found(monitor3, count=2)
+        self.test_device_found(monitor4, count=0)
+        self.test_stop_peer_device_adv(self.peer_mouse)
+
         # Remove a monitor from one app, shouldn't affect working of other
         # monitors or apps.
         self.test_remove_monitor(monitor1)
 
-        # DeviceFound should get triggered for monitors with higher RSSI timers.
-        self.test_device_found(monitor2, count=1, delay=10)
-        self.test_device_found(monitor4, count=1, delay=5)
-        self.test_stop_peer_device_adv(self.peer_mouse)
-
         # Terminate an app, shouldn't affect working of monitors in other apps.
         self.test_exit_app(app1)
 
         # DeviceFound should get triggered for keyboard.
         self.test_start_peer_device_adv(self.peer_keybd, duration=5)
-        self.test_device_found(monitor3, count=2)
+        self.test_device_found(monitor3, count=3)
+        self.test_device_found(monitor4, count=1)
         self.test_stop_peer_device_adv(self.peer_keybd)
 
         # Unregister the running app, should not fail.
@@ -1771,6 +1819,15 @@
         # cycles to collect logs for tests expect no interleave scan
         EXPECT_FALSE_TEST_CYCLE = 3
 
+        supported_features = self.read_supported_features()
+
+        if 'controller-patterns' in supported_features:
+            # For device supporting hardware filtering, software interleave
+            # scan shall not be used.
+            sw_interleave_scan = False
+        else:
+            sw_interleave_scan = True
+
         # Create a test app instance.
         app1 = self.create_app()
 
@@ -1779,7 +1836,12 @@
         monitor1.update_patterns([
                 [0, 0x03, [0x12, 0x18]],
         ])
-        monitor1.update_rssi([127, 0, 127, 0])
+        monitor1.update_rssi([
+                self.UNSET_RSSI,
+                self.UNSET_TIMEOUT,
+                self.UNSET_RSSI,
+                self.UNSET_TIMEOUT
+        ])
 
         # Register the app, should not fail.
         self.test_register_app(app1)
@@ -1814,18 +1876,18 @@
         device.AdapterPowerOff()
         # Make sure the peer is disconnected
         self.test_device_is_not_connected(device.address)
-        self.test_interleaving_state(True)
+        self.test_interleaving_state(sw_interleave_scan)
 
         # Interleaving with allowlist should get paused during active scan
-        self.test_interleaving_active_scan_cycle()
+        self.test_interleaving_active_scan_cycle(sw_interleave_scan)
 
         # Interleaving with allowlist should get resumed after stopping scan
-        self.test_interleaving_state(True)
+        self.test_interleaving_state(sw_interleave_scan)
 
         # Interleaving with allowlist should get paused during system suspend,
         # get resumed after system awake
-        self.test_interleaving_suspend_resume()
-        self.test_interleaving_state(True)
+        self.test_interleaving_suspend_resume(sw_interleave_scan)
+        self.test_interleaving_state(sw_interleave_scan)
 
         self.test_remove_monitor(monitor1)
         self.test_interleaving_state(False, cycles=EXPECT_FALSE_TEST_CYCLE)
@@ -1835,3 +1897,5 @@
 
         # Terminate the test app instance.
         self.test_exit_app(app1)
+
+        device.AdapterPowerOn()
diff --git a/server/cros/bluetooth/bluetooth_adapter_audio_tests.py b/server/cros/bluetooth/bluetooth_adapter_audio_tests.py
index 8dec4e4..8c24dad 100644
--- a/server/cros/bluetooth/bluetooth_adapter_audio_tests.py
+++ b/server/cros/bluetooth/bluetooth_adapter_audio_tests.py
@@ -19,10 +19,10 @@
 from autotest_lib.client.bin import utils
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.cros.bluetooth.bluetooth_audio_test_data import (
-        A2DP, HFP_NBS, HFP_WBS, AUDIO_DATA_TARBALL_PATH, VISQOL_BUFFER_LENGTH,
-        DATA_DIR, VISQOL_PATH, VISQOL_SIMILARITY_MODEL, VISQOL_TEST_DIR,
-        AUDIO_RECORD_DIR, audio_test_data, get_audio_test_data,
-        get_visqol_binary)
+        A2DP, HFP_NBS, HFP_NBS_MEDIUM, HFP_WBS, HFP_WBS_MEDIUM,
+        AUDIO_DATA_TARBALL_PATH, VISQOL_BUFFER_LENGTH, DATA_DIR, VISQOL_PATH,
+        VISQOL_SIMILARITY_MODEL, VISQOL_TEST_DIR, AUDIO_RECORD_DIR,
+        audio_test_data, get_audio_test_data, get_visqol_binary)
 from autotest_lib.server.cros.bluetooth.bluetooth_adapter_tests import (
     BluetoothAdapterTests, test_retry_and_log)
 from six.moves import range
@@ -43,6 +43,7 @@
     # The node types of the bluetooth output nodes in cras are the same for both
     # A2DP and HFP.
     CRAS_BLUETOOTH_OUTPUT_NODE_TYPE = 'BLUETOOTH'
+    CRAS_INTERNAL_SPEAKER_OUTPUT_NODE_TYPE = 'INTERNAL_SPEAKER'
     # The node types of the bluetooth input nodes in cras are different for WBS
     # and NBS.
     CRAS_HFP_BLUETOOTH_INPUT_NODE_TYPE = {HFP_WBS: 'BLUETOOTH',
@@ -219,6 +220,25 @@
         except Exception as e:
             raise error.TestError('Exception occurred when %s (%s)' % (desc, e))
 
+    def _scp_to_dut(self, device, src_file, dest_file):
+        """SCP file from peer device to DuT."""
+        ip = self.host.ip
+        # Localhost is unlikely to be the correct ip target so take the local
+        # host ip if it exists.
+        if self.host.ip == '127.0.0.1' and self.local_host_ip:
+            ip = self.local_host_ip
+            logging.info('Using local host ip = %s', ip)
+
+        device.ScpToDut(src_file, dest_file, ip)
+
+    def check_wbs_capability(self):
+        """Check if the DUT supports WBS capability.
+
+        @raises: TestNAError if the dut does not support wbs.
+        """
+        capabilities, err = self.bluetooth_facade.get_supported_capabilities()
+        return err is None and bool(capabilities.get('wide band speech'))
+
 
     def initialize_bluetooth_audio(self, device, test_profile):
         """Initialize the Bluetooth audio task.
@@ -238,7 +258,7 @@
             raise error.TestError('Failed to start pulseaudio.')
         logging.debug('pulseaudio is started.')
 
-        if test_profile in (HFP_WBS, HFP_NBS):
+        if test_profile in (HFP_WBS, HFP_NBS, HFP_NBS_MEDIUM, HFP_WBS_MEDIUM):
             if device.StartOfono():
                 logging.debug('ofono is started.')
             else:
@@ -246,7 +266,7 @@
         elif device.StopOfono():
             logging.debug('ofono is stopped.')
         else:
-            logging.warn('Failed to stop ofono. Ignored.')
+            logging.warning('Failed to stop ofono. Ignored.')
 
         # Need time to complete starting services.
         time.sleep(self.WAIT_DAEMONS_READY_SECS)
@@ -262,12 +282,12 @@
         if device.StopPulseaudio():
             logging.debug('pulseaudio is stopped.')
         else:
-            logging.warn('Failed to stop pulseaudio. Ignored.')
+            logging.warning('Failed to stop pulseaudio. Ignored.')
 
         if device.StopOfono():
             logging.debug('ofono is stopped.')
         else:
-            logging.warn('Failed to stop ofono. Ignored.')
+            logging.warning('Failed to stop ofono. Ignored.')
 
 
     def initialize_bluetooth_player(self, device):
@@ -295,170 +315,6 @@
         device.UnexportMediaPlayer()
 
 
-    def select_audio_output_node(self):
-        """Select the audio output node through cras.
-
-        @raises: error.TestError if failed.
-        """
-        def bluetooth_type_selected(node_type):
-            """Check if the bluetooth node type is selected."""
-            selected = self.bluetooth_facade.get_selected_output_device_type()
-            logging.debug('active output node type: %s, expected %s',
-                          selected, node_type)
-            return selected == node_type
-
-        node_type = self.CRAS_BLUETOOTH_OUTPUT_NODE_TYPE
-        if not self.bluetooth_facade.select_output_node(node_type):
-            raise error.TestError('select_output_node failed')
-
-        desc='waiting for %s as active cras audio output node type' % node_type
-        logging.debug(desc)
-        self._poll_for_condition(lambda: bluetooth_type_selected(node_type),
-                                 desc=desc)
-
-
-    def initialize_hfp(self, device, test_profile, test_data,
-                       recording_device, bluez_function):
-        """Initial set up for hfp tests.
-
-        Setup that is required for all hfp tests where
-        dut is either source or sink. Selects input device, starts recording,
-        and lastly it waits for pulseaudio bluez source/sink.
-
-        @param device: the bluetooth peer device
-        @param test_profile: the test profile used, HFP_WBS or HFP_NBS
-        @param test_data: a dictionary about the audio test data defined in
-                client/cros/bluetooth/bluetooth_audio_test_data.py
-        @param recording_device: which device recorded the audio, possible
-                values are 'recorded_by_dut' or 'recorded_by_peer'
-        @param bluez_function: the appropriate bluez hfp function either
-                _get_pulseaudio_bluez_source_hfp or
-                _get_pulseaudio_bluez_sink_hfp depending on the role of the dut
-        """
-        device_type = 'DUT' if recording_device == 'recorded_by_dut' else 'Peer'
-        dut_role = 'sink' if recording_device == 'recorded_by_dut' else 'source'
-
-        # Select audio input device.
-        desc = 'waiting for cras to select audio input device'
-        logging.debug(desc)
-        self._poll_for_condition(
-                lambda: self.bluetooth_facade.select_input_device(device.name),
-                desc=desc)
-
-        # Select audio output node so that we do not rely on chrome to do it.
-        self.select_audio_output_node()
-
-        # Enable HFP profile.
-        logging.debug('Start recording audio on {}'.format(device_type))
-        if not self.bluetooth_facade.start_capturing_audio_subprocess(
-                test_data, recording_device):
-            desc = '{} failed to start capturing audio.'.format(device_type)
-            raise error.TestError(desc)
-
-        # Wait for pulseaudio bluez hfp source/sink
-        desc = 'waiting for pulseaudio bluez hfp {}'.format(dut_role)
-        logging.debug(desc)
-        self._poll_for_condition(lambda: bluez_function(device, test_profile),
-                                 desc=desc)
-
-
-    def hfp_record_on_dut(self, device, test_profile, test_data):
-        """Play audio from test_data dictionary from peer device to dut.
-
-        Play file described in test_data dictionary from peer device to dut
-        using test_profile, either HFP_WBS or HFP_NBS and record on dut.
-
-        @param device: the bluetooth peer device
-        @param test_profile: the test profile used, HFP_WBS or HFP_NBS
-        @param test_data: a dictionary about the audio test data defined in
-                client/cros/bluetooth/bluetooth_audio_test_data.py
-
-        @returns: True if the recorded audio frames are legitimate, False
-                if they are not, ie. it did not record.
-        """
-        # Select audio input device.
-        logging.debug('Select input device')
-        if not self.bluetooth_facade.select_input_device(device.name):
-            raise error.TestError('DUT failed to select audio input device.')
-
-        # Start playing audio on chameleon.
-        logging.debug('Start playing audio on Pi')
-        if not device.StartPlayingAudioSubprocess(test_profile, test_data):
-            err = 'Failed to start playing audio file on the peer device'
-            raise error.TestError(err)
-
-        time.sleep(test_data['duration'])
-
-        # Stop playing audio on chameleon.
-        logging.debug('Stop playing audio on Pi')
-        if not device.StopPlayingAudioSubprocess():
-            err = 'Failed to stop playing audio on the peer device'
-            raise error.TestError(err)
-
-        # Disable HFP profile.
-        logging.debug('Stop recording audio on DUT')
-        if not self.bluetooth_facade.stop_capturing_audio_subprocess():
-            raise error.TestError('DUT failed to stop capturing audio.')
-
-        # Check if the audio frames in the recorded file are legitimate.
-        return self._check_audio_frames_legitimacy(test_data, 'recorded_by_dut')
-
-
-    def hfp_record_on_peer(self, device, test_profile, test_data):
-        """Play audio from test_data dictionary from dut to peer device.
-
-        Play file described in test_data dictionary from dut to peer device
-        using test_profile, either HFP_WBS or HFP_NBS and record on peer.
-
-        @param device: The bluetooth peer device.
-        @param test_profile: The test profile used, HFP_WBS or HFP_NBS.
-        @param test_data: A dictionary about the audio test data defined in
-                client/cros/bluetooth/bluetooth_audio_test_data.py.
-
-        @returns: True if the recorded audio frames are legitimate, False
-                if they are not, ie. it did not record.
-        """
-        logging.debug('Start recording audio on Pi')
-        # Start recording audio on the peer Bluetooth audio device.
-        if not device.StartRecordingAudioSubprocess(test_profile, test_data):
-            raise error.TestError(
-                    'Failed to record on the peer Bluetooth audio device.')
-
-        # Play audio on the DUT in a non-blocked way.
-        # If there are issues, cras_test_client playing back might be blocked
-        # forever. We would like to avoid the testing procedure from that.
-        logging.debug('Start playing audio')
-        if not self.bluetooth_facade.start_playing_audio_subprocess(test_data):
-            raise error.TestError('DUT failed to play audio.')
-
-        time.sleep(test_data['duration'])
-
-        logging.debug('Stop recording audio on Pi')
-        # Stop recording audio on the peer Bluetooth audio device.
-        if not device.StopRecordingingAudioSubprocess():
-            msg = 'Failed to stop recording on the peer Bluetooth audio device'
-            logging.error(msg)
-
-        # Disable HFP profile.
-        logging.debug('Stop recording audio on DUT')
-        if not self.bluetooth_facade.stop_capturing_audio_subprocess():
-            raise error.TestError('DUT failed to stop capturing audio.')
-
-        # Stop playing audio on DUT.
-        logging.debug('Stop playing audio on DUT')
-        if not self.bluetooth_facade.stop_playing_audio_subprocess():
-            raise error.TestError('DUT failed to stop playing audio.')
-
-        # Copy the recorded audio file to the DUT for spectrum analysis.
-        logging.debug('Scp to DUT')
-        recorded_file = test_data['recorded_by_peer']
-        device.ScpToDut(recorded_file, recorded_file, self.host.ip)
-
-        # Check if the audio frames in the recorded file are legitimate.
-        return self._check_audio_frames_legitimacy(test_data,
-                                                   'recorded_by_peer')
-
-
     def parse_visqol_output(self, stdout, stderr):
         """
         Parse stdout and stderr string from VISQOL output and parse into
@@ -477,13 +333,14 @@
         @returns: A tuple of a float score and string representation of the
                 srderr or None if there was no error.
         """
-        string_out = stdout or ''
+        string_out = stdout.decode('utf-8') or ''
+        stderr = stderr.decode('utf-8')
 
         # Log verbose VISQOL output:
         log_file = os.path.join(VISQOL_TEST_DIR, 'VISQOL_LOG.txt')
         with open(log_file, 'w+') as f:
             f.write('String Error:\n{}\n'.format(stderr))
-            f.write('String Out:\n{}\n'.format(stdout))
+            f.write('String Out:\n{}\n'.format(string_out))
 
         # pattern matches first float or int after 'MOS-LQO:' in stdout,
         # e.g. it would match the line 'MOS-LQO       2.3' in the stdout
@@ -606,7 +463,8 @@
                 raise error.TestError('Could not convert raw file to wav')
 
         # Compute the duration of played file without added buffer
-        new_duration = test_data['duration'] - VISQOL_BUFFER_LENGTH
+        new_duration = (test_data['chunk_checking_duration'] -
+                        VISQOL_BUFFER_LENGTH)
         # build path for file resulting from trimming to desired duration
         trimmed_file = '{}_t{}'.format(*os.path.splitext(untrimmed_file))
         if not self.bluetooth_facade.trim_wav_file(
@@ -616,14 +474,141 @@
         return self.get_ref_and_deg_files(trimmed_file, test_profile, test_data)
 
 
-    def handle_chunks(self, device, test_profile, test_data, duration):
-        """Handle chunks of recorded streams and verify the primary frequencies.
+    def handle_one_chunk(self, device, chunk_in_secs, index, test_profile):
+        """Handle one chunk of audio data by calling chameleon api."""
+
+        ip = self.host.ip
+        # Localhost is unlikely to be the correct ip target so take the local
+        # host ip if it exists.
+        if self.host.ip == '127.0.0.1' and self.local_host_ip:
+            ip = self.local_host_ip
+            logging.info('Using local host ip = %s', ip)
+
+        # TODO(b/207046142): Remove the old version fallback after the new
+        # Chameleon bundle is deployed.
+        try:
+            recorded_file = device.HandleOneChunk(chunk_in_secs, index, ip)
+        except Exception as e:
+            logging.debug("Unable to use new version of HandleOneChunk;"
+                          "fall back to use the old one.")
+            try:
+                recorded_file = device.HandleOneChunk(chunk_in_secs, index,
+                                                      test_profile, ip)
+            except Exception as e:
+                raise error.TestError('Failed to handle chunk (%s)', e)
+
+        return recorded_file
+
+
+    # ---------------------------------------------------------------
+    # Definitions of all bluetooth audio test cases
+    # ---------------------------------------------------------------
+
+
+    @test_retry_and_log(False)
+    def test_select_audio_input_device(self, device_name):
+        """Select the audio input device for the DUT.
+
+        @param: device_name: the audio input device to be selected.
+
+        @returns: True on success. Raise otherwise.
+        """
+        desc = 'waiting for cras to select audio input device'
+        logging.debug(desc)
+        self._poll_for_condition(
+                lambda: self.bluetooth_facade.select_input_device(device_name),
+                desc=desc)
+        return True
+
+
+    @test_retry_and_log(False)
+    def test_select_audio_output_node_bluetooth(self):
+        """Select the Bluetooth device as output node.
+
+        @returns: True on success. False otherwise.
+        """
+        return self._test_select_audio_output_node(
+                self.CRAS_BLUETOOTH_OUTPUT_NODE_TYPE)
+
+
+    @test_retry_and_log(False)
+    def test_select_audio_output_node_internal_speaker(self):
+        """Select the internal speaker as output node.
+
+        @returns: True on success. False otherwise.
+        """
+        return self._test_select_audio_output_node(
+                self.CRAS_INTERNAL_SPEAKER_OUTPUT_NODE_TYPE)
+
+
+    def _test_select_audio_output_node(self, node_type=None):
+        """Select the audio output node through cras.
+
+        @param node_type: a str representing node type defined in
+                          CRAS_NODE_TYPES.
+        @raises: error.TestError if failed.
+
+        @return True if select given node success.
+        """
+        def node_type_selected(node_type):
+            """Check if the given node type is selected."""
+            selected = self.bluetooth_facade.get_selected_output_device_type()
+            logging.debug('active output node type: %s, expected %s', selected,
+                          node_type)
+            return selected == node_type
+
+        desc = 'waiting for bluetooth_facade.select_output_node()'
+        self._poll_for_condition(
+                lambda: self.bluetooth_facade.select_output_node(node_type),
+                desc=desc)
+
+        desc = 'waiting for %s as active cras audio output node type' % node_type
+        logging.debug(desc)
+        self._poll_for_condition(lambda: node_type_selected(node_type),
+                                 desc=desc)
+
+        return True
+
+
+    @test_retry_and_log(False)
+    def test_audio_is_alive_on_dut(self):
+        """Test that if the audio stream is alive on the DUT.
+
+        @returns: True if the audio summary is found on the DUT.
+        """
+        summary = self.bluetooth_facade.get_audio_thread_summary()
+        result = bool(summary)
+
+        # If we can find something starts with summary like: "Summary: Output
+        # device [Silent playback device.] 4096 48000 2  Summary: Output stream
+        # CRAS_CLIENT_TYPE_TEST CRAS_STREAM_TYPE_DEFAULT 480 240 0x0000 48000
+        # 2 0" this means that there's an audio stream alive on the DUT.
+        desc = " ".join(str(line) for line in summary)
+        logging.debug('find summary: %s', desc)
+
+        self.results = {'test_audio_is_alive_on_dut': result}
+        return all(self.results.values())
+
+
+    @test_retry_and_log(False)
+    def test_check_chunks(self,
+                          device,
+                          test_profile,
+                          test_data,
+                          duration,
+                          check_legitimacy=True,
+                          check_frequencies=True):
+        """Check chunks of recorded streams and verify the primary frequencies.
 
         @param device: the bluetooth peer device
         @param test_profile: the a2dp test profile;
                              choices are A2DP and A2DP_LONG
         @param test_data: the test data of the test profile
         @param duration: the duration of the audio file to test
+        @param check_legitimacy: specify this to True to run
+                                _check_audio_frames_legitimacy test
+        @param check_frequencies: specify this to True to run
+                                 _check_primary_frequencies test
 
         @returns: True if all chunks pass the frequencies check.
         """
@@ -633,171 +618,279 @@
         nchunks = duration // chunk_in_secs
         logging.info('Number of chunks: %d', nchunks)
 
-        all_chunks_test_result = True
+        check_audio_frames_legitimacy = True
+        check_primary_frequencies = True
         for i in range(nchunks):
-            logging.info('Handle chunk %d', i)
-            recorded_file = device.HandleOneChunk(chunk_in_secs, i,
-                                                  test_profile, self.host.ip)
+            logging.debug('Check chunk %d', i)
+
+            recorded_file = self.handle_one_chunk(device, chunk_in_secs, i,
+                                                  test_profile)
             if recorded_file is None:
                 raise error.TestError('Failed to handle chunk %d' % i)
 
+            if check_legitimacy:
+                # Check if the audio frames in the recorded file are legitimate.
+                if not self._check_audio_frames_legitimacy(
+                        test_data, 'recorded_by_peer', recorded_file=recorded_file):
+                    if (i > self.IGNORE_LAST_FEW_CHUNKS and
+                            i >= nchunks - self.IGNORE_LAST_FEW_CHUNKS):
+                        logging.info('empty chunk %d ignored for last %d chunks',
+                                     i, self.IGNORE_LAST_FEW_CHUNKS)
+                    else:
+                        check_audio_frames_legitimacy = False
+                    break
+
+            if check_frequencies:
+                # Check if the primary frequencies of the recorded file
+                # meet expectation.
+                if not self._check_primary_frequencies(
+                        test_profile,
+                        test_data,
+                        'recorded_by_peer',
+                        recorded_file=recorded_file):
+                    if (i > self.IGNORE_LAST_FEW_CHUNKS and
+                            i >= nchunks - self.IGNORE_LAST_FEW_CHUNKS):
+                        msg = 'partially filled chunk %d ignored for last %d chunks'
+                        logging.info(msg, i, self.IGNORE_LAST_FEW_CHUNKS)
+                    else:
+                        check_primary_frequencies = False
+                    break
+
+        self.results = dict()
+        if check_legitimacy:
+            self.results['check_audio_frames_legitimacy'] = (
+                    check_audio_frames_legitimacy)
+
+        if check_frequencies:
+            self.results['check_primary_frequencies'] = (
+                    check_primary_frequencies)
+
+        return all(self.results.values())
+
+
+    @test_retry_and_log(False)
+    def test_check_empty_chunks(self, device, test_data, duration,
+                                test_profile):
+        """Check if all the chunks are empty.
+
+        @param device: The Bluetooth peer device.
+        @param test_data: The test data of the test profile.
+        @param duration: The duration of the audio file to test.
+        @param test_profile: Which audio profile is used. Profiles are defined
+                             in bluetooth_audio_test_data.py.
+
+        @returns: True if all the chunks are empty.
+        """
+        chunk_in_secs = test_data['chunk_in_secs']
+        if not bool(chunk_in_secs):
+            chunk_in_secs = self.DEFAULT_CHUNK_IN_SECS
+        nchunks = duration // chunk_in_secs
+        logging.info('Number of chunks: %d', nchunks)
+
+        all_chunks_empty = True
+        for i in range(nchunks):
+            logging.info('Check chunk %d', i)
+
+            recorded_file = self.handle_one_chunk(device, chunk_in_secs, i,
+                                                  test_profile)
+            if recorded_file is None:
+                raise error.TestError('Failed to handle chunk %d' % i)
+
+
             # Check if the audio frames in the recorded file are legitimate.
-            if not self._check_audio_frames_legitimacy(
-                    test_data, 'recorded_by_peer', recorded_file=recorded_file):
+            if self._check_audio_frames_legitimacy(
+                    test_data, 'recorded_by_peer', recorded_file):
                 if (i > self.IGNORE_LAST_FEW_CHUNKS and
                         i >= nchunks - self.IGNORE_LAST_FEW_CHUNKS):
                     logging.info('empty chunk %d ignored for last %d chunks',
                                  i, self.IGNORE_LAST_FEW_CHUNKS)
                 else:
-                    all_chunks_test_result = False
+                    all_chunks_empty = False
                 break
 
-            # Check if the primary frequencies of the recorded file
-            # meet expectation.
-            if not self._check_primary_frequencies(A2DP, test_data,
-                                                   'recorded_by_peer',
-                                                   recorded_file=recorded_file):
-                if (i > self.IGNORE_LAST_FEW_CHUNKS and
-                        i >= nchunks - self.IGNORE_LAST_FEW_CHUNKS):
-                    msg = 'partially filled chunk %d ignored for last %d chunks'
-                    logging.info(msg, i, self.IGNORE_LAST_FEW_CHUNKS)
-                else:
-                    all_chunks_test_result = False
-                break
+        self.results = {'all chunks are empty': all_chunks_empty}
 
-        return all_chunks_test_result
-
-
-    # ---------------------------------------------------------------
-    # Definitions of all bluetooth audio test cases
-    # ---------------------------------------------------------------
+        return all(self.results.values())
 
 
     @test_retry_and_log(False)
-    def test_hfp_dut_as_source_visqol_score(self, device, test_profile):
-        """Test Case: hfp test files streaming from peer device to dut
+    def test_check_audio_file(self,
+                              device,
+                              test_profile,
+                              test_data,
+                              recording_device,
+                              check_legitimacy=True,
+                              check_frequencies=True):
+        """Check the audio file and verify the primary frequencies.
 
-        @param device: the bluetooth peer device
-        @param test_profile: which test profile is used, HFP_WBS or HFP_NBS
+        @param device: the Bluetooth peer device.
+        @param test_profile: A2DP or HFP test profile.
+        @param test_data: the test data of the test profile.
+        @param recording_device: which device recorded the audio,
+                possible values are 'recorded_by_dut' or 'recorded_by_peer'.
+        @param check_legitimacy: if set this to True, run
+                                _check_audio_frames_legitimacy test.
+        @param check_frequencies: if set this to True, run
+                                 _check_primary_frequencies test.
 
-        @returns: True if the all the test files score at or above their
-                  source_passing_score value as defined in
-                  bluetooth_audio_test_data.py
+        @returns: True if audio file passes the frequencies check.
         """
-        # list of test wav files
-        hfp_test_data = audio_test_data[test_profile]
-        test_files = hfp_test_data['visqol_test_files']
+        if recording_device == 'recorded_by_peer':
+            logging.debug('Scp to DUT')
+            try:
+                recorded_file = test_data[recording_device]
+                self._scp_to_dut(device, recorded_file, recorded_file)
+                logging.debug('Recorded {} successfully'.format(recorded_file))
+            except Exception as e:
+                raise error.TestError('Exception occurred when (%s)' % (e))
 
-        get_visqol_binary()
-        get_audio_test_data()
+        self.results = dict()
+        if check_legitimacy:
+            self.results['check_audio_frames_legitimacy'] = (
+                    self._check_audio_frames_legitimacy(
+                            test_data, recording_device))
 
-        # Download test data to DUT
-        self.host.send_file(AUDIO_DATA_TARBALL_PATH, AUDIO_DATA_TARBALL_PATH)
-        if not self.bluetooth_facade.unzip_audio_test_data(
-                AUDIO_DATA_TARBALL_PATH, DATA_DIR):
-            logging.error('Audio data directory not found in DUT')
-            raise error.TestError('Failed to unzip audio test data to DUT')
+        if check_frequencies:
+            self.results['check_primary_frequencies'] = (
+                    self._check_primary_frequencies(
+                            test_profile, test_data, recording_device))
 
-        # Result of visqol test on all files
-        visqol_results = dict()
-
-        for test_file in test_files:
-            filename = os.path.split(test_file['file'])[1]
-            logging.debug('Testing file: {}'.format(filename))
-
-            # Set up hfp test to record on peer
-            self.initialize_hfp(device, test_profile, test_file,
-                                'recorded_by_peer',
-                                self._get_pulseaudio_bluez_source_hfp)
-            logging.debug('Initialized HFP')
-
-            if not self.hfp_record_on_peer(device, test_profile, test_file):
-                return False
-            logging.debug('Recorded {} successfully'.format(filename))
-
-            ref_file, deg_file = self.format_recorded_file(test_file,
-                                                           test_profile,
-                                                           'recorded_by_peer')
-            if not ref_file or not deg_file:
-                desc = 'Failed to get ref and deg file: ref {}, deg {}'.format(
-                        ref_file, deg_file)
-                raise error.TestError(desc)
-
-            score = self.get_visqol_score(ref_file, deg_file,
-                                          speech_mode=test_file['speech_mode'])
-
-            logging.info('{} scored {}, min passing score: {}'.format(
-                    filename, score, test_file['source_passing_score']))
-            passed = score >= test_file['source_passing_score']
-            visqol_results[filename] = passed
-
-            if not passed:
-                logging.warning('Failed: {}'.format(filename))
-
-        return all(visqol_results.values())
+        return all(self.results.values())
 
 
     @test_retry_and_log(False)
-    def test_hfp_dut_as_sink_visqol_score(self, device, test_profile):
-        """Test Case: hfp test files streaming from peer device to dut
+    def test_dut_to_start_playing_audio_subprocess(self,
+                                                   test_data,
+                                                   pin_device=None):
+        """Start playing audio in a subprocess.
+
+        @param test_data: the audio test data
+
+        @returns: True on success. False otherwise.
+        """
+        start_playing_audio = self.bluetooth_facade.start_playing_audio_subprocess(
+                test_data, pin_device)
+        self.results = {
+                'dut_to_start_playing_audio_subprocess': start_playing_audio
+        }
+        return all(self.results.values())
+
+    @test_retry_and_log(False)
+    def test_dut_to_stop_playing_audio_subprocess(self):
+        """Stop playing audio in the subprocess.
+
+        @returns: True on success. False otherwise.
+        """
+        stop_playing_audio = (
+                self.bluetooth_facade.stop_playing_audio_subprocess())
+
+        self.results = {
+                'dut_to_stop_playing_audio_subprocess': stop_playing_audio
+        }
+        return all(self.results.values())
+
+    @test_retry_and_log(False)
+    def test_dut_to_start_capturing_audio_subprocess(self, audio_data,
+                                                     recording_device):
+        """Start capturing audio in a subprocess.
+
+        @param audio_data: the audio test data
+        @param recording_device: which device recorded the audio,
+                possible values are 'recorded_by_dut' or 'recorded_by_peer'
+
+        @returns: True on success. False otherwise.
+        """
+        # Let the dut capture audio stream until it is stopped explicitly by
+        # setting duration to None. This is required on some slower devices.
+        audio_data = audio_data.copy()
+        audio_data.update({'duration': None})
+
+        start_capturing_audio = self.bluetooth_facade.start_capturing_audio_subprocess(
+                audio_data, recording_device)
+        self.results = {
+                'dut_to_start_capturing_audio_subprocess':
+                start_capturing_audio
+        }
+        return all(self.results.values())
+
+    @test_retry_and_log(False)
+    def test_dut_to_stop_capturing_audio_subprocess(self):
+        """Stop capturing audio.
+
+        @returns: True on success. False otherwise.
+        """
+        stop_capturing_audio = (
+                self.bluetooth_facade.stop_capturing_audio_subprocess())
+
+        self.results = {
+                'dut_to_stop_capturing_audio_subprocess': stop_capturing_audio
+        }
+        return all(self.results.values())
+
+    @test_retry_and_log(False)
+    def test_device_to_start_playing_audio_subprocess(self, device,
+                                                      test_profile, test_data):
+        """Start playing the audio file in a subprocess.
 
         @param device: the bluetooth peer device
-        @param test_profile: which test profile is used, HFP_WBS or HFP_NBS
+        @param test_data: the audio file to play and data about the file
+        @param audio_profile: the audio profile, either a2dp, hfp_wbs, or hfp_nbs
 
-        @returns: True if the all the test files score at or above their
-                  sink_passing_score value as defined in
-                  bluetooth_audio_test_data.py
+        @returns: True on success. False otherwise.
         """
-        # list of test wav files
-        hfp_test_data = audio_test_data[test_profile]
-        test_files = hfp_test_data['visqol_test_files']
+        start_playing_audio = device.StartPlayingAudioSubprocess(
+                test_profile, test_data)
+        self.results = {
+                'device_to_start_playing_audio_subprocess': start_playing_audio
+        }
+        return all(self.results.values())
 
-        get_visqol_binary()
-        get_audio_test_data()
-        self.host.send_file(AUDIO_DATA_TARBALL_PATH, AUDIO_DATA_TARBALL_PATH)
-        if not self.bluetooth_facade.unzip_audio_test_data(
-                AUDIO_DATA_TARBALL_PATH, DATA_DIR):
-            logging.error('Audio data directory not found in DUT')
-            raise error.TestError('Failed to unzip audio test data to DUT')
+    @test_retry_and_log(False)
+    def test_device_to_stop_playing_audio_subprocess(self, device):
+        """Stop playing the audio file in a subprocess.
 
-        # Result of visqol test on all files
-        visqol_results = dict()
+        @param device: the bluetooth peer device
 
-        for test_file in test_files:
-            filename = os.path.split(test_file['file'])[1]
-            logging.debug('Testing file: {}'.format(filename))
+        @returns: True on success. False otherwise.
+        """
+        stop_playing_audio = device.StopPlayingAudioSubprocess()
+        self.results = {
+                'device_to_stop_playing_audio_subprocess': stop_playing_audio
+        }
+        return all(self.results.values())
 
-            # Set up hfp test to record on dut
-            self.initialize_hfp(device, test_profile, test_file,
-                                'recorded_by_dut',
-                                self._get_pulseaudio_bluez_sink_hfp)
-            logging.debug('Initialized HFP')
-            # Record audio on dut played from pi, returns true if anything
-            # was successfully recorded, false otherwise
-            if not self.hfp_record_on_dut(device, test_profile, test_file):
-                return False
-            logging.debug('Recorded {} successfully'.format(filename))
+    @test_retry_and_log(False)
+    def test_device_to_start_recording_audio_subprocess(
+            self, device, test_profile, test_data):
+        """Start recording audio in a subprocess.
 
-            ref_file, deg_file = self.format_recorded_file(test_file,
-                                                           test_profile,
-                                                           'recorded_by_dut')
-            if not ref_file or not deg_file:
-                desc = 'Failed to get ref and deg file: ref {}, deg {}'.format(
-                        ref_file, deg_file)
-                raise error.TestError(desc)
+        @param device: the bluetooth peer device
+        @param test_profile: the audio profile used to get the recording settings
+        @param test_data: the details of the file being recorded
 
-            score = self.get_visqol_score(ref_file, deg_file,
-                                          speech_mode=test_file['speech_mode'])
+        @returns: True on success. False otherwise.
+        """
+        start_recording_audio = device.StartRecordingAudioSubprocess(
+                test_profile, test_data)
+        self.results = {
+                'device_to_start_recording_audio_subprocess':
+                start_recording_audio
+        }
+        return all(self.results.values())
 
-            logging.info('{} scored {}, min passing score: {}'.format(
-                    filename, score, test_file['sink_passing_score']))
-            passed = score >= test_file['sink_passing_score']
-            visqol_results[filename] = passed
+    @test_retry_and_log(False)
+    def test_device_to_stop_recording_audio_subprocess(self, device):
+        """Stop the recording subprocess.
 
-            if not passed:
-                logging.warning('Failed: {}'.format(filename))
+        @returns: True on success. False otherwise.
+        """
+        stop_recording_audio = device.StopRecordingingAudioSubprocess()
+        self.results = {
+                'device_to_stop_recording_audio_subprocess':
+                stop_recording_audio
+        }
+        return all(self.results.values())
 
-        return all(visqol_results.values())
 
     @test_retry_and_log(False)
     def test_device_a2dp_connected(self, device, timeout=15):
@@ -812,123 +905,98 @@
 
         return all(self.results.values())
 
-    @test_retry_and_log(False)
-    def test_a2dp_sinewaves(self, device, test_profile, duration):
-        """Test Case: a2dp sinewaves
-
-        @param device: the bluetooth peer device
-        @param test_profile: the a2dp test profile;
-                             choices are A2DP and A2DP_LONG
-        @param duration: the duration of the audio file to test
-                         0 means to use the default value in the test profile
-
-        @returns: True if the recorded primary frequency is within the
-                  tolerance of the playback sine wave frequency.
-
-        """
-        # Make a copy since the test_data may be formatted with distinct
-        # arguments in the follow-up tests.
-        test_data = audio_test_data[test_profile].copy()
-        if bool(duration):
-            test_data['duration'] = duration
-        else:
-            duration = test_data['duration']
-
-        test_data['file'] %= duration
-        logging.info('%s test for %d seconds.', test_profile, duration)
-
-        # Wait for pulseaudio a2dp bluez source
-        desc = 'waiting for pulseaudio a2dp bluez source'
-        logging.debug(desc)
-        self._poll_for_condition(
-                lambda: self._get_pulseaudio_bluez_source_a2dp(device,
-                                                               test_profile),
-                desc=desc)
-
-        # Select audio output node so that we do not rely on chrome to do it.
-        self.select_audio_output_node()
-
-        # Start recording audio on the peer Bluetooth audio device.
-        logging.debug('Start recording a2dp')
-        if not device.StartRecordingAudioSubprocess(test_profile, test_data):
-            raise error.TestError(
-                    'Failed to record on the peer Bluetooth audio device.')
-
-        # Play audio on the DUT in a non-blocked way and check the recorded
-        # audio stream in a real-time manner.
-        logging.debug('Start playing audio')
-        if not self.bluetooth_facade.start_playing_audio_subprocess(test_data):
-            raise error.TestError('DUT failed to play audio.')
-
-        # Handle chunks of recorded streams and verify the primary frequencies.
-        # This is a blocking call until all chunks are completed.
-        all_chunks_test_result = self.handle_chunks(device, test_profile,
-                                                    test_data, duration)
-
-        # Stop recording audio on the peer Bluetooth audio device.
-        logging.debug('Stop recording a2dp')
-        if not device.StopRecordingingAudioSubprocess():
-            msg = 'Failed to stop recording on the peer Bluetooth audio device'
-            logging.error(msg)
-
-        # Stop playing audio on DUT.
-        logging.debug('Stop playing audio on DUT')
-        if not self.bluetooth_facade.stop_playing_audio_subprocess():
-            raise error.TestError('DUT failed to stop playing audio.')
-
-        return all_chunks_test_result
 
     @test_retry_and_log(False)
-    def test_hfp_dut_as_source(self, device, test_profile):
-        """Test Case: hfp sinewave streaming from dut to peer device
+    def test_hfp_connected(self,
+                           bluez_function,
+                           device,
+                           test_profile,
+                           timeout=15):
+        """Tests HFP profile is connected.
 
-        @param device: the bluetooth peer device
-        @param test_profile: which test profile is used, HFP_WBS or HFP_NBS
+        @param bluez_function: the appropriate bluez HFP function either
+                _get_pulseaudio_bluez_source_hfp or
+                _get_pulseaudio_bluez_sink_hfp depending on the role of the DUT.
+        @param device: the Bluetooth peer device.
+        @param test_profile: which test profile is used, HFP_WBS or HFP_NBS.
+        @param timeout: number of seconds to wait before giving up connecting
+                        to HFP profile.
 
-        @returns: True if the recorded primary frequency is within the
-                  tolerance of the playback sine wave frequency.
+        @returns: True on success. False otherwise.
         """
-        hfp_test_data = audio_test_data[test_profile]
+        check_connection = lambda: bluez_function(device, test_profile)
+        is_connected = self._wait_for_condition(check_connection,
+                                                'test_hfp_connected',
+                                                timeout=timeout)
+        self.results = {'peer hfp connected': is_connected}
 
-        self.initialize_hfp(device, test_profile, hfp_test_data,
-                            'recorded_by_peer',
-                            self._get_pulseaudio_bluez_source_hfp)
-
-        if not self.hfp_record_on_peer(device, test_profile, hfp_test_data):
-            return False
-
-        # Check if the primary frequencies of recorded file meet expectation.
-        check_freq_result = self._check_primary_frequencies(
-                test_profile, hfp_test_data, 'recorded_by_peer')
-        return check_freq_result
+        return all(self.results.values())
 
 
     @test_retry_and_log(False)
-    def test_hfp_dut_as_sink(self, device, test_profile):
-        """Test Case: hfp sinewave streaming from peer device to dut
+    def test_send_audio_to_dut_and_unzip(self):
+        """Send the audio file to the DUT and unzip it.
 
-        @param device: the bluetooth peer device
-        @param test_profile: which test profile is used, HFP_WBS or HFP_NBS
-
-        @returns: True if the recorded primary frequency is within the
-                  tolerance of the playback sine wave frequency.
-
+        @returns: True on success. False otherwise.
         """
-        hfp_test_data = audio_test_data[test_profile]
+        try:
+            self.host.send_file(AUDIO_DATA_TARBALL_PATH,
+                                AUDIO_DATA_TARBALL_PATH)
+        except Exception as e:
+            raise error.TestError('Fail to send file to the DUT: (%s)', e)
 
-        # Set up hfp test to record on dut
-        self.initialize_hfp(device, test_profile, hfp_test_data,
-                            'recorded_by_dut',
-                            self._get_pulseaudio_bluez_sink_hfp)
+        unzip_success = self.bluetooth_facade.unzip_audio_test_data(
+                AUDIO_DATA_TARBALL_PATH, DATA_DIR)
 
-        # Record audio on dut play from pi, returns true if anything recorded
-        if not self.hfp_record_on_dut(device, test_profile, hfp_test_data):
-            return False
+        self.results = {'unzip audio file': unzip_success}
 
-        # Check if the primary frequencies of recorded file meet expectation.
-        check_freq_result = self._check_primary_frequencies(
-                test_profile, hfp_test_data, 'recorded_by_dut')
-        return check_freq_result
+        return all(self.results.values())
+
+
+    @test_retry_and_log(False)
+    def test_get_visqol_score(self, test_file, test_profile, recording_device):
+        """Test that if the recorded audio file meets the passing score.
+
+        This function also records the visqol performance.
+
+        @param device: the Bluetooth peer device.
+        @param test_profile: which test profile is used, HFP_WBS or HFP_NBS.
+        @param recording_device: which device recorded the audio,
+                possible values are 'recorded_by_dut' or 'recorded_by_peer'.
+
+        @returns: True if the test files score at or above the
+                  source_passing_score value as defined in
+                  bluetooth_audio_test_data.py.
+        """
+        dut_role = 'sink' if recording_device == 'recorded_by_dut' else 'source'
+        filename = os.path.split(test_file['file'])[1]
+
+        ref_file, deg_file = self.format_recorded_file(test_file, test_profile,
+                                                       recording_device)
+        if not ref_file or not deg_file:
+            desc = 'Failed to get ref and deg file: ref {}, deg {}'.format(
+                    ref_file, deg_file)
+            raise error.TestError(desc)
+
+        score = self.get_visqol_score(ref_file,
+                                      deg_file,
+                                      speech_mode=test_file['speech_mode'])
+
+        key = ''.join((dut_role, '_passing_score'))
+        logging.info('{} scored {}, min passing score: {}'.format(
+                filename, score, test_file[key]))
+        passed = score >= test_file[key]
+        self.results = {filename: passed}
+
+        # Track visqol performance
+        test_desc = '{}_{}_{}'.format(test_profile, dut_role,
+                                      test_file['reporting_type'])
+        self.write_perf_keyval({test_desc: score})
+
+        if not passed:
+            logging.warning('Failed: {}'.format(filename))
+
+        return all(self.results.values())
 
 
     @test_retry_and_log(False)
@@ -1049,3 +1117,595 @@
                         'artist': result_artist, 'title': result_title,
                         'length': result_length}
         return all(self.results.values())
+
+
+    # ---------------------------------------------------------------
+    # Definitions of all bluetooth audio test sequences
+    # ---------------------------------------------------------------
+
+    def test_a2dp_sinewaves(self, device, test_profile, duration):
+        """Test Case: a2dp sinewaves
+
+        @param device: the bluetooth peer device
+        @param test_profile: the a2dp test profile;
+                             choices are A2DP and A2DP_LONG
+        @param duration: the duration of the audio file to test
+                         0 means to use the default value in the test profile
+
+        """
+        # Make a copy since the test_data may be formatted with distinct
+        # arguments in the follow-up tests.
+        test_data = audio_test_data[test_profile].copy()
+        if bool(duration):
+            test_data['duration'] = duration
+        else:
+            duration = test_data['duration']
+
+        test_data['file'] %= duration
+        logging.info('%s test for %d seconds.', test_profile, duration)
+
+        # Wait for pulseaudio a2dp bluez source
+        self.test_device_a2dp_connected(device)
+
+        # Select audio output node so that we do not rely on chrome to do it.
+        self.test_select_audio_output_node_bluetooth()
+
+        # Start recording audio on the peer Bluetooth audio device.
+        self.test_device_to_start_recording_audio_subprocess(
+                device, test_profile, test_data)
+
+        # Play audio on the DUT in a non-blocked way and check the recorded
+        # audio stream in a real-time manner.
+        self.test_dut_to_start_playing_audio_subprocess(test_data)
+
+        # Check chunks of recorded streams and verify the primary frequencies.
+        # This is a blocking call until all chunks are completed.
+        self.test_check_chunks(device, test_profile, test_data, duration)
+
+        # Stop recording audio on the peer Bluetooth audio device.
+        self.test_device_to_stop_recording_audio_subprocess(device)
+
+        # Stop playing audio on DUT.
+        self.test_dut_to_stop_playing_audio_subprocess()
+
+
+    def playback_and_connect(self, device, test_profile):
+        """Connect then disconnect an A2DP device while playing stream.
+
+        This test first plays the audio stream and then selects the BT device
+        as output node, checking if the stream has routed to the BT device.
+        After that, disconnect the BT device and also check whether the stream
+        closes on it gracefully.
+
+        @param device: the Bluetooth peer device.
+        @param test_profile: to select which A2DP test profile is used.
+        """
+        test_data = audio_test_data[test_profile]
+
+        # TODO(b/207046142): Remove the old version fallback after the new
+        # Chameleon bundle is deployed.
+        # Currently the BT audio tests store test profile parameters in
+        # Chameleon bundle. However, we decide to move the test profiles to
+        # server test. During the transition, the new test code may interact
+        # with old/existing Chameleon bundle, which does not have A2DP_MEDIUM
+        # profile. We use a trick here: override the passing-in test_profile
+        # with A2DP so that Chameleon can look up the profile, and override the
+        # three parameters locally to make it a A2DP_MEDIUM profile.
+        test_profile = A2DP
+        test_data = audio_test_data[test_profile].copy()
+        test_data['duration'] = 60
+        test_data['chunk_checking_duration'] = 5
+        test_data['chunk_in_secs'] = 1
+
+        # Start playing audio on the Dut.
+        self.test_dut_to_start_playing_audio_subprocess(test_data)
+
+        # Connect the Bluetooth device.
+        self.test_device_set_discoverable(device, True)
+        self.test_discover_device(device.address)
+        self.test_pairing(device.address, device.pin, trusted=True)
+        self.test_connection_by_adapter(device.address)
+        self.test_device_a2dp_connected(device)
+
+        # Select Bluetooth as output node.
+        self.test_select_audio_output_node_bluetooth()
+
+        self.test_device_to_start_recording_audio_subprocess(
+                device, test_profile, test_data)
+
+        # Handle chunks of recorded streams and verify the primary frequencies.
+        # This is a blocking call until all chunks are completed.
+        self.test_check_chunks(device, test_profile, test_data,
+                               test_data['chunk_checking_duration'])
+
+        self.test_device_to_stop_recording_audio_subprocess(device)
+
+        self.test_select_audio_output_node_internal_speaker()
+
+        # Check if the device disconnects successfully.
+        self.expect_test(False, self.test_device_a2dp_connected, device)
+
+        self.test_dut_to_stop_playing_audio_subprocess()
+
+
+    def playback_and_disconnect(self, device, test_profile):
+        """Disconnect the Bluetooth device while the stream is playing.
+
+        This test will keep the stream playing and then disconnect the
+        Bluetooth device. The goal is to check the stream is still alive
+        after the Bluetooth device disconnected.
+
+        @param device: the Bluetooth peer device.
+        @param test_profile: to select which A2DP test profile is used.
+        """
+        test_data = audio_test_data[test_profile]
+
+        # TODO(b/207046142): Remove the old version fallback after the new
+        # Chameleon bundle is deployed.
+        # Currently the BT audio tests store test profile parameters in
+        # Chameleon bundle. However, we decide to move the test profiles to
+        # server test. During the transition, the new test code may interact
+        # with old/existing Chameleon bundle, which does not have A2DP_MEDIUM
+        # profile. We use a trick here: override the passing-in test_profile
+        # with A2DP so that Chameleon can look up the profile, and override the
+        # three parameters locally to make it a A2DP_MEDIUM profile.
+        test_profile = A2DP
+        test_data = audio_test_data[test_profile].copy()
+        test_data['duration'] = 60
+        test_data['chunk_checking_duration'] = 5
+        test_data['chunk_in_secs'] = 1
+
+        # Connect the Bluetooth device.
+        self.test_device_set_discoverable(device, True)
+        self.test_discover_device(device.address)
+        self.test_pairing(device.address, device.pin, trusted=True)
+        self.test_connection_by_adapter(device.address)
+        self.test_device_a2dp_connected(device)
+
+        # Select Bluetooth as output node.
+        self.test_select_audio_output_node_bluetooth()
+
+        self.test_device_to_start_recording_audio_subprocess(
+                device, test_profile, test_data)
+
+        # Start playing audio on the DUT.
+        self.test_dut_to_start_playing_audio_subprocess(test_data)
+
+        # Handle chunks of recorded streams and verify the primary frequencies.
+        # This is a blocking call until all chunks are completed.
+        self.test_check_chunks(device, test_profile, test_data,
+                               test_data['chunk_checking_duration'])
+
+        self.test_device_to_stop_recording_audio_subprocess(device)
+
+        # Disconnect the Bluetooth device.
+        self.test_disconnection_by_adapter(device.address)
+
+        # Obtain audio thread summary to check if the audio stream is still
+        # alive.
+        self.test_audio_is_alive_on_dut()
+
+        # Stop playing audio on the DUT.
+        self.test_dut_to_stop_playing_audio_subprocess()
+
+
+    def playback_back2back(self, device, test_profile):
+        """Repeat to start and stop the playback stream several times.
+
+        This test repeats to start and stop the playback stream and verify
+        that the Bluetooth device receives the stream correctly.
+
+        @param device: the Bluetooth peer device.
+        @param test_profile: to select which A2DP test profile is used.
+        """
+        test_data = audio_test_data[test_profile]
+
+        # TODO(b/207046142): Remove the old version fallback after the new
+        # Chameleon bundle is deployed.
+        # Currently the BT audio tests store test profile parameters in
+        # Chameleon bundle. However, we decide to move the test profiles to
+        # server test. During the transition, the new test code may interact
+        # with old/existing Chameleon bundle, which does not have A2DP_MEDIUM
+        # profile. We use a trick here: override the passing-in test_profile
+        # with A2DP so that Chameleon can look up the profile, and override the
+        # three parameters locally to make it a A2DP_MEDIUM profile.
+        test_profile = A2DP
+        test_data = audio_test_data[test_profile].copy()
+        test_data['duration'] = 60
+        test_data['chunk_checking_duration'] = 5
+        test_data['chunk_in_secs'] = 1
+
+        self.test_device_set_discoverable(device, True)
+        self.test_discover_device(device.address)
+        self.test_pairing(device.address, device.pin, trusted=True)
+        self.test_connection_by_adapter(device.address)
+
+        self.test_device_a2dp_connected(device)
+        self.test_select_audio_output_node_bluetooth()
+
+        for _ in range(3):
+            # TODO(b/208165757): In here if we record the audio stream before
+            # playing that will cause an audio blank about 1~2 sec in the
+            # beginning of the recorded file and make the chunks checking fail.
+            # Need to fix this problem in the future.
+            self.test_dut_to_start_playing_audio_subprocess(test_data)
+            self.test_device_to_start_recording_audio_subprocess(
+                    device, test_profile, test_data)
+            self.test_check_chunks(device, test_profile, test_data,
+                                   test_data['chunk_checking_duration'])
+            self.test_dut_to_stop_playing_audio_subprocess()
+            self.test_device_to_stop_recording_audio_subprocess(device)
+
+            self.test_device_to_start_recording_audio_subprocess(
+                    device, test_profile, test_data)
+            self.test_check_empty_chunks(device, test_data,
+                                         test_data['chunk_checking_duration'],
+                                         test_profile)
+            self.test_device_to_stop_recording_audio_subprocess(device)
+
+        self.test_disconnection_by_adapter(device.address)
+
+
+    def pinned_playback(self, device, test_profile):
+        """Play an audio stream that is pinned to the Bluetooth device.
+
+        This test does not choose Bluetooth as the output node but directly
+        plays the sound that is pinned to the Bluetooth device and check
+        whether it receives the audio stream correctly.
+
+        @param device: the Bluetooth peer device.
+        @param test_profile: to select which A2DP test profile is used.
+        """
+        test_data = audio_test_data[test_profile]
+
+        self.test_device_set_discoverable(device, True)
+        self.test_discover_device(device.address)
+        self.test_pairing(device.address, device.pin, trusted=True)
+        self.test_connection_by_adapter(device.address)
+
+        self.test_device_a2dp_connected(device)
+        self.test_device_to_start_recording_audio_subprocess(
+                device, test_profile, test_data)
+
+        # We do not select Bluetooth as output node but play audio pinned to
+        # the Bluetooth device straight forward.
+        device_id = self.bluetooth_facade.get_device_id_from_node_type(
+                self.CRAS_BLUETOOTH_OUTPUT_NODE_TYPE, False)
+        logging.info("Bluetooth device id for audio stream output: %s",
+                     device_id)
+        self.test_dut_to_start_playing_audio_subprocess(test_data, device_id)
+        self.test_check_chunks(device, test_profile, test_data,
+                               test_data['duration'])
+        self.test_dut_to_stop_playing_audio_subprocess()
+        self.test_device_to_stop_recording_audio_subprocess(device)
+        self.test_disconnection_by_adapter(device.address)
+
+
+    def hfp_dut_as_source_visqol_score(self, device, test_profile):
+        """Test Case: HFP test files streaming from peer device to the DUT.
+
+        @param device: the Bluetooth peer device.
+        @param test_profile: which test profile is used, HFP_WBS or HFP_NBS.
+        """
+        # list of test wav files
+        hfp_test_data = audio_test_data[test_profile]
+        test_files = hfp_test_data['visqol_test_files']
+
+        get_visqol_binary()
+        get_audio_test_data()
+
+        # Download test data to the DUT.
+        self.test_send_audio_to_dut_and_unzip()
+
+        for test_file in test_files:
+            filename = os.path.split(test_file['file'])[1]
+            logging.debug('Testing file: {}'.format(filename))
+
+            self.test_select_audio_input_device(device.name)
+            self.test_select_audio_output_node_bluetooth()
+
+            # Enable HFP profile.
+            self.test_dut_to_start_capturing_audio_subprocess(
+                    test_file, 'recorded_by_peer')
+
+            # Wait for pulseaudio bluez hfp source/sink
+            self.test_hfp_connected(self._get_pulseaudio_bluez_source_hfp,
+                                    device, test_profile)
+
+            self.test_device_to_start_recording_audio_subprocess(
+                    device, test_profile, test_file)
+
+            # Play audio on the DUT in a non-blocked way.
+            # If there are issues, cras_test_client playing back might be blocked
+            # forever. We would like to avoid the testing procedure from that.
+            self.test_dut_to_start_playing_audio_subprocess(test_file)
+            time.sleep(test_file['chunk_checking_duration'])
+            self.test_dut_to_stop_playing_audio_subprocess()
+            self.test_device_to_stop_recording_audio_subprocess(device)
+
+            # Disable HFP profile.
+            self.test_dut_to_stop_capturing_audio_subprocess()
+
+            # Copy the recorded audio file to the DUT for spectrum analysis.
+            recorded_file = test_file['recorded_by_peer']
+            self._scp_to_dut(device, recorded_file, recorded_file)
+
+            self.test_get_visqol_score(test_file, test_profile,
+                                       'recorded_by_peer')
+
+
+    def hfp_dut_as_sink_visqol_score(self, device, test_profile):
+        """Test Case: HFP test files streaming from peer device to the DUT.
+
+        @param device: the Bluetooth peer device.
+        @param test_profile: which test profile is used, HFP_WBS or HFP_NBS.
+        """
+        # list of test wav files
+        hfp_test_data = audio_test_data[test_profile]
+        test_files = hfp_test_data['visqol_test_files']
+
+        get_visqol_binary()
+        get_audio_test_data()
+
+        # Download test data to the DUT.
+        self.test_send_audio_to_dut_and_unzip()
+
+        for test_file in test_files:
+            filename = os.path.split(test_file['file'])[1]
+            logging.debug('Testing file: {}'.format(filename))
+
+            self.test_select_audio_input_device(device.name)
+            self.test_select_audio_output_node_bluetooth()
+
+            # Enable HFP profile.
+            self.test_dut_to_start_capturing_audio_subprocess(
+                    test_file, 'recorded_by_dut')
+
+            # Wait for pulseaudio bluez hfp source/sink.
+            self.test_hfp_connected(self._get_pulseaudio_bluez_sink_hfp,
+                                    device, test_profile)
+
+            self.test_select_audio_input_device(device.name)
+
+            self.test_device_to_start_playing_audio_subprocess(
+                    device, test_profile, test_file)
+            time.sleep(test_file['chunk_checking_duration'])
+            self.test_device_to_stop_playing_audio_subprocess(device)
+
+            # Disable HFP profile.
+            self.test_dut_to_stop_capturing_audio_subprocess()
+            logging.debug('Recorded {} successfully'.format(filename))
+
+            self.test_get_visqol_score(test_file, test_profile,
+                                       'recorded_by_dut')
+
+
+    def hfp_dut_as_source(self, device, test_profile):
+        """Test Case: HFP sinewave streaming from the DUT to peer device.
+
+        @param device: the Bluetooth peer device.
+        @param test_profile: which test profile is used, HFP_WBS or HFP_NBS.
+        """
+        hfp_test_data = audio_test_data[test_profile]
+
+        self.test_select_audio_input_device(device.name)
+        self.test_select_audio_output_node_bluetooth()
+
+        # Enable HFP profile.
+        self.test_dut_to_start_capturing_audio_subprocess(
+                hfp_test_data, 'recorded_by_peer')
+
+        # Wait for pulseaudio bluez hfp source/sink
+        self.test_hfp_connected(self._get_pulseaudio_bluez_source_hfp, device,
+                                test_profile)
+
+        self.test_device_to_start_recording_audio_subprocess(
+                device, test_profile, hfp_test_data)
+        self.test_dut_to_start_playing_audio_subprocess(hfp_test_data)
+        time.sleep(hfp_test_data['chunk_checking_duration'])
+        self.test_dut_to_stop_playing_audio_subprocess()
+        self.test_device_to_stop_recording_audio_subprocess(device)
+        self.test_check_audio_file(device, test_profile, hfp_test_data,
+                                   'recorded_by_peer')
+
+        # Disable HFP profile.
+        self.test_dut_to_stop_capturing_audio_subprocess()
+
+
+    def hfp_dut_as_sink(self, device, test_profile):
+        """Test Case: HFP sinewave streaming from peer device to the DUT.
+
+        @param device: the Bluetooth peer device.
+        @param test_profile: which test profile is used, HFP_WBS or HFP_NBS.
+        """
+        hfp_test_data = audio_test_data[test_profile]
+
+        self.test_select_audio_input_device(device.name)
+        self.test_select_audio_output_node_bluetooth()
+
+        # Enable HFP profile.
+        self.test_dut_to_start_capturing_audio_subprocess(
+                hfp_test_data, 'recorded_by_dut')
+
+        # Wait for pulseaudio bluez hfp source/sink
+        self.test_hfp_connected(self._get_pulseaudio_bluez_sink_hfp, device,
+                                test_profile)
+
+        self.test_select_audio_input_device(device.name)
+
+        self.test_device_to_start_playing_audio_subprocess(
+                device, test_profile, hfp_test_data)
+        time.sleep(hfp_test_data['chunk_checking_duration'])
+        self.test_device_to_stop_playing_audio_subprocess(device)
+
+        # Disable HFP profile.
+        self.test_dut_to_stop_capturing_audio_subprocess()
+        self.test_check_audio_file(device, test_profile, hfp_test_data,
+                                   'recorded_by_dut')
+
+
+    def hfp_dut_as_source_back2back(self, device, test_profile):
+        """Play and stop the audio stream from DUT to Bluetooth peer device.
+
+        The test starts then stops the stream playback for three times. In each
+        iteration, it checks the Bluetooth device can successfully receive the
+        stream when it is played; also check the absence of the streama when
+        stop playing.
+
+        @param device: the Bluetooth peer device.
+        @param test_profile: which test profile is used, HFP_WBS or HFP_NBS.
+        """
+        hfp_test_data = audio_test_data[test_profile]
+
+        # Select audio input device.
+        self.test_select_audio_input_device(device.name)
+
+        # Select audio output node so that we do not rely on chrome to do it.
+        self.test_select_audio_output_node_bluetooth()
+
+        # Enable HFP profile.
+        self.test_dut_to_start_capturing_audio_subprocess(hfp_test_data,
+                                                          'recorded_by_peer')
+
+        # Wait for pulseaudio bluez hfp source/sink
+        self.test_hfp_connected(
+                self._get_pulseaudio_bluez_source_hfp, device, test_profile)
+
+        for _ in range(3):
+            # TODO(b/208165757): If we record the audio stream before playing
+            # that will cause an audio blank about 1~2 sec in the beginning of
+            # the recorded file and make the chunks checking fail. Need to fix
+            # this problem in the future.
+            self.test_dut_to_start_playing_audio_subprocess(hfp_test_data)
+            self.test_device_to_start_recording_audio_subprocess(
+                    device, test_profile, hfp_test_data)
+            time.sleep(hfp_test_data['chunk_checking_duration'])
+
+            self.test_dut_to_stop_playing_audio_subprocess()
+            self.test_device_to_stop_recording_audio_subprocess(device)
+            self.test_check_audio_file(device, test_profile, hfp_test_data,
+                                       'recorded_by_peer')
+
+            self.test_device_to_start_recording_audio_subprocess(
+                    device, test_profile, hfp_test_data)
+            time.sleep(hfp_test_data['chunk_checking_duration'])
+
+            self.test_device_to_stop_recording_audio_subprocess(device)
+            self.test_check_audio_file(device, test_profile, hfp_test_data,
+                                       recording_device='recorded_by_peer',
+                                       check_frequencies=False)
+
+        # Disable HFP profile.
+        self.test_dut_to_stop_capturing_audio_subprocess()
+
+
+    def a2dp_to_hfp_dut_as_source(self, device, test_profile):
+        """Play the audio from DUT to Bluetooth device and switch the profile.
+
+        This test first uses A2DP profile and plays the audio stream on the
+        DUT, checking if the peer receives the audio stream correctly. And
+        then switch to the HFP_NBS profile and check the audio stream again.
+
+        @param device: the Bluetooth peer device.
+        @param test_profile: which test profile is used, HFP_WBS_MEDIUM or
+                             HFP_NBS_MEDIUM.
+        """
+        hfp_test_data = audio_test_data[test_profile]
+
+        # Wait for pulseaudio a2dp bluez source.
+        self.test_device_a2dp_connected(device)
+
+        # Select audio output node so that we do not rely on chrome to do it.
+        self.test_select_audio_output_node_bluetooth()
+
+        self.test_device_to_start_recording_audio_subprocess(
+                device, test_profile, hfp_test_data)
+
+        # Play audio on the DUT in a non-blocked way and check the recorded
+        # audio stream in a real-time manner.
+        self.test_dut_to_start_playing_audio_subprocess(hfp_test_data)
+
+        time.sleep(hfp_test_data['chunk_checking_duration'])
+
+        self.test_device_to_stop_recording_audio_subprocess(device)
+
+        self.test_check_audio_file(device, test_profile, hfp_test_data,
+                                   'recorded_by_peer')
+
+        self.test_select_audio_input_device(device.name)
+
+        # Enable HFP profile.
+        self.test_dut_to_start_capturing_audio_subprocess(hfp_test_data,
+                                                          'recorded_by_peer')
+
+        # Wait for pulseaudio bluez hfp source/sink.
+        self.test_hfp_connected(
+                self._get_pulseaudio_bluez_source_hfp, device, test_profile)
+
+        self.test_device_to_start_recording_audio_subprocess(
+                device, test_profile, hfp_test_data)
+
+        time.sleep(hfp_test_data['chunk_checking_duration'])
+
+        self.test_dut_to_stop_playing_audio_subprocess()
+
+        self.test_device_to_stop_recording_audio_subprocess(device)
+
+        self.test_check_audio_file(device, test_profile, hfp_test_data,
+                                   'recorded_by_peer')
+
+        # Disable HFP profile.
+        self.test_dut_to_stop_capturing_audio_subprocess()
+
+
+    def hfp_to_a2dp_dut_as_source(self, device, test_profile):
+        """Play the audio from DUT to Bluetooth peer in A2DP then switch to HFP.
+
+        This test first uses HFP profile and plays the audio stream on the DUT,
+        checking if the peer receives the audio stream correctly. And then
+        switch to the A2DP profile and check the audio stream again.
+
+        @param device: the Bluetooth peer device.
+        @param test_profile: which test profile is used,
+                             HFP_NBS_MEDIUM or HFP_WBS_MEDIUM.
+        """
+        hfp_test_data = audio_test_data[test_profile]
+
+        self.test_select_audio_input_device(device.name)
+
+        # Select audio output node so that we do not rely on chrome to do it.
+        self.test_select_audio_output_node_bluetooth()
+
+        # Enable HFP profile.
+        self.test_dut_to_start_capturing_audio_subprocess(hfp_test_data,
+                                                          'recorded_by_peer')
+
+        # Wait for pulseaudio bluez hfp source/sink.
+        self.test_hfp_connected(
+                self._get_pulseaudio_bluez_source_hfp, device, test_profile)
+
+        # Play audio on the DUT in a non-blocked way and check the recorded
+        # audio stream in a real-time manner.
+        self.test_dut_to_start_playing_audio_subprocess(hfp_test_data)
+        self.test_device_to_start_recording_audio_subprocess(
+                device, test_profile, hfp_test_data)
+        time.sleep(hfp_test_data['chunk_checking_duration'])
+
+        self.test_device_to_stop_recording_audio_subprocess(device)
+        self.test_check_audio_file(device, test_profile, hfp_test_data,
+                                   'recorded_by_peer')
+
+        # Disable HFP profile.
+        self.test_dut_to_stop_capturing_audio_subprocess()
+
+        # Wait for pulseaudio a2dp bluez source.
+        self.test_device_a2dp_connected(device)
+
+        self.test_device_to_start_recording_audio_subprocess(
+                device, test_profile, hfp_test_data)
+        time.sleep(hfp_test_data['chunk_checking_duration'])
+
+        self.test_dut_to_stop_playing_audio_subprocess()
+        self.test_check_audio_file(device, test_profile, hfp_test_data,
+                                   'recorded_by_peer')
+        self.test_device_to_stop_recording_audio_subprocess(device)
diff --git a/server/cros/bluetooth/bluetooth_adapter_better_together.py b/server/cros/bluetooth/bluetooth_adapter_better_together.py
index 5ca6011..44abb94 100644
--- a/server/cros/bluetooth/bluetooth_adapter_better_together.py
+++ b/server/cros/bluetooth/bluetooth_adapter_better_together.py
@@ -167,6 +167,29 @@
     return True
 
 
+  def test_smart_unlock_llt(self, address):
+    """Smart unlock flow for llt cases """
+    filter = {'Transport': 'le'}
+    parameters = {'MinimumConnectionInterval': 6,
+                  'MaximumConnectionInterval': 6}
+
+    self.test_set_discovery_filter(filter)
+    self.test_discover_device(address)
+
+    self.test_set_le_connection_parameters(address, parameters)
+    self.test_connection_by_adapter(address)
+
+    self.test_set_trusted(address)
+    self.test_service_resolved(address)
+    self.test_find_object_path(address)
+
+    self.test_start_notify(self.rx_object_path,
+                           self.CCCD_VALUE_INDICATION)
+    self.test_messages_exchange(
+        self.rx_object_path, self.tx_object_path, address)
+    self.test_stop_notify(self.rx_object_path)
+
+
   @test_retry_and_log(False)
   def test_remove_device_object(self, address):
     """Test the device object can be removed from the adapter"""
diff --git a/server/cros/bluetooth/bluetooth_adapter_controller_role_tests.py b/server/cros/bluetooth/bluetooth_adapter_controller_role_tests.py
index 1b22b7f..4731501 100644
--- a/server/cros/bluetooth/bluetooth_adapter_controller_role_tests.py
+++ b/server/cros/bluetooth/bluetooth_adapter_controller_role_tests.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -43,7 +44,7 @@
 
         self.test_discover_device(device.address)
         time.sleep(self.TEST_SLEEP_SECS)
-        self.test_pairing(device.address, device.pin, trusted=True)
+        self.test_pairing(device.address, device.pin, trusted=False)
         self.test_disconnection_by_adapter(device.address)
 
 
@@ -54,10 +55,19 @@
         @param secondary_test_func: function handle to test connection
         """
         logging.info('Setting up secondary device')
-        self.test_discover_device(device.address)
-        self.test_pairing(device.address, device.pin, trusted=True)
+        if not self.test_discover_device(device.address):
+            logging.error('connect_and_test_secondary_device exits early as '
+                          'test_discover_device fails')
+            return
+        if not self.test_pairing(device.address, device.pin, trusted=False):
+            logging.error('connect_and_test_secondary_device exits early as '
+                          'test_pairing fails')
+            return
         time.sleep(self.TEST_SLEEP_SECS)
-        self.test_connection_by_adapter(device.address)
+        if not self.test_connection_by_adapter(device.address):
+            logging.error('connect_and_test_secondary_device exits early as '
+                          'test_connection_by_adapter fails')
+            return
         time.sleep(self.TEST_SLEEP_SECS)
         secondary_test_func(device)
 
@@ -142,8 +152,7 @@
         self.test_set_advertising_intervals(DEFAULT_MIN_ADV_INTERVAL,
                                             DEFAULT_MAX_ADV_INTERVAL)
         self.test_register_advertisement(advertisements_data.ADVERTISEMENTS[0],
-                                         1, DEFAULT_MIN_ADV_INTERVAL,
-                                         DEFAULT_MAX_ADV_INTERVAL)
+                                         1)
 
         # Discover DUT from peer
         self.test_discover_by_device(primary_device)
@@ -228,9 +237,8 @@
         # For now, advertise connectable advertisement. If we use a broadcast
         # advertisement, the Pi can't resolve the address and
         # test_discover_by_device will fail
-        self.test_register_advertisement(
-            advertisements_data.ADVERTISEMENTS[0], 1,
-            DEFAULT_MIN_ADV_INTERVAL, DEFAULT_MAX_ADV_INTERVAL)
+        self.test_register_advertisement(advertisements_data.ADVERTISEMENTS[0],
+                                         1)
 
         # Second thread runs on peer, delays, discovers DUT, and then advertises
         # itself back
@@ -332,8 +340,7 @@
         self.test_set_advertising_intervals(DEFAULT_MIN_ADV_INTERVAL,
                                             DEFAULT_MAX_ADV_INTERVAL)
         self.test_register_advertisement(advertisements_data.ADVERTISEMENTS[0],
-                                         1, DEFAULT_MIN_ADV_INTERVAL,
-                                         DEFAULT_MAX_ADV_INTERVAL)
+                                         1)
 
         # If test requires it, connect and test secondary device
         if secondary_info is not None and device_use == 'mid':
diff --git a/server/cros/bluetooth/bluetooth_adapter_hidreports_tests.py b/server/cros/bluetooth/bluetooth_adapter_hidreports_tests.py
index 1999889..f992c48 100644
--- a/server/cros/bluetooth/bluetooth_adapter_hidreports_tests.py
+++ b/server/cros/bluetooth/bluetooth_adapter_hidreports_tests.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -42,7 +43,7 @@
 
 
     def run_keyboard_tests(self, device):
-        """Run all bluetooth mouse reports tests.
+        """Run all bluetooth keyboard reports tests.
 
         @param device: the bluetooth HID device.
 
@@ -60,64 +61,103 @@
 
         self.test_battery_reporting(device)
 
-    def run_hid_reports_test(self, device,
+    def run_hid_reports_test(self,
+                             device,
                              check_connected_method=lambda device: True,
-                             suspend_resume=False, reboot=False):
+                             suspend_resume=False,
+                             reboot=False,
+                             restart=False):
         """Running Bluetooth HID reports tests."""
         logging.info("run hid reports test")
         # Reset the adapter and set it pairable.
-        self.test_reset_on_adapter()
-        self.test_pairable()
+        if not self.test_reset_on_adapter():
+            return
+        if not self.test_pairable():
+            return
 
-        # Let the adapter pair, and connect to the target device.
-        self.test_discover_device(device.address)
-        self.test_pairing(device.address, device.pin, trusted=True)
-        self.test_connection_by_adapter(device.address)
-
-        # Run hid test to make sure profile is connected
-        check_connected_method(device)
-
-        if suspend_resume:
-            self.suspend_resume()
+        def run_hid_test():
+            """Checks if the device is connected and can be used."""
+            time.sleep(self.HID_TEST_SLEEP_SECS)
+            if not self.test_device_name(device.address, device.name):
+                return False
 
             time.sleep(self.HID_TEST_SLEEP_SECS)
-            self.test_device_is_paired(device.address)
+            if not check_connected_method(device):
+                return False
+            return True
 
+        dev_paired = False
+        dev_connected = False
+        try:
+            # Let the adapter pair, and connect to the target device.
+            self.test_discover_device(device.address)
+            dev_paired = self.test_pairing(device.address,
+                                           device.pin,
+                                           trusted=True)
+            if not dev_paired:
+                return
+            dev_connected = self.test_connection_by_adapter(device.address)
+            if not dev_connected:
+                return
 
-            # check if peripheral is connected after suspend resume, reconnect
-            # if it isn't
-            if not self.ignore_failure(check_connected_method, device):
-                logging.info("device not connected after suspend_resume")
-                self.test_connection_by_device(device)
-            else:
-                logging.info("device remains connected after suspend_resume")
+            # Run hid test to make sure profile is connected
+            if not run_hid_test():
+                return
 
-            time.sleep(self.HID_TEST_SLEEP_SECS)
-            check_connected_method(device)
+            if suspend_resume:
+                self.suspend_resume()
 
-            time.sleep(self.HID_TEST_SLEEP_SECS)
-            self.test_device_name(device.address, device.name)
+                time.sleep(self.HID_TEST_SLEEP_SECS)
+                if not self.test_device_is_paired(device.address):
+                    return
 
-        if reboot:
-            self.reboot()
+                # Check if peripheral is connected after suspend resume, reconnect
+                # and try again if it isn't.
+                if not self.ignore_failure(check_connected_method, device):
+                    logging.info("device not connected after suspend_resume")
+                    self.test_connection_by_device(device)
+                run_hid_test()
 
-            time.sleep(self.HID_TEST_SLEEP_SECS)
-            # TODO(b/173146480) - Power on the adapter for now until this bug
-            # is resolved.
-            self.test_power_on_adapter()
+            if reboot:
+                # If we expect the DUT to automatically reconnect to the peer on
+                # boot, we reset the peer into a connectable state
+                if self.platform_will_reconnect_on_boot():
+                    logging.info(
+                            "Restarting peer to accept DUT connection on boot")
+                    device_type = self.get_peer_device_type(device)
+                    self.reset_emulated_device(device, device_type)
 
-            self.test_device_is_paired(device.address)
+                self.reboot()
 
-            time.sleep(self.HID_TEST_SLEEP_SECS)
-            self.test_connection_by_device(device)
+                time.sleep(self.HID_TEST_SLEEP_SECS)
+                # TODO(b/173146480) - Power on the adapter for now until this bug
+                # is resolved.
+                if not self.bluetooth_facade.is_powered_on():
+                    self.test_power_on_adapter()
 
-            time.sleep(self.HID_TEST_SLEEP_SECS)
-            self.test_device_name(device.address, device.name)
+                if not self.test_device_is_paired(device.address):
+                    return
 
-        # Run HID test after suspend/reboot as well.
-        if suspend_resume or reboot:
-            check_connected_method(device)
+                time.sleep(self.HID_TEST_SLEEP_SECS)
+                if not self.platform_will_reconnect_on_boot():
+                    self.test_connection_by_device(device)
 
-        # Disconnect the device, and remove the pairing.
-        self.test_disconnection_by_adapter(device.address)
-        self.test_remove_pairing(device.address)
+                else:
+                    self.test_device_is_connected(device.address)
+                run_hid_test()
+
+            if restart:
+                self.test_stop_bluetoothd()
+                self.test_start_bluetoothd()
+
+                if not self.ignore_failure(self.test_device_is_connected,
+                                           device.address):
+                    self.test_connection_by_device(device)
+                run_hid_test()
+
+        finally:
+            # Cleans up the test
+            if dev_connected:
+                self.test_disconnection_by_adapter(device.address)
+            if dev_paired:
+                self.test_remove_pairing(device.address)
diff --git a/server/cros/bluetooth/bluetooth_adapter_leadvertising_tests.py b/server/cros/bluetooth/bluetooth_adapter_leadvertising_tests.py
index 19df1a4..0d3c104 100644
--- a/server/cros/bluetooth/bluetooth_adapter_leadvertising_tests.py
+++ b/server/cros/bluetooth/bluetooth_adapter_leadvertising_tests.py
@@ -89,24 +89,21 @@
         @param min_adv_interval_ms: min_adv_interval in milliseconds.
         @param max_adv_interval_ms: max_adv_interval in milliseconds.
         @param instance_ids: the list of instance IDs to register.
-
         """
         if instance_ids is None:
             instance_ids = self.get_instance_ids(advertisements)
 
         for instance_id, advertisement in zip(instance_ids, advertisements):
-            self.test_register_advertisement(advertisement,
-                                             instance_id,
-                                             min_adv_interval_ms,
-                                             max_adv_interval_ms)
+            advertisement['MinInterval'] = min_adv_interval_ms
+            advertisement['MaxInterval'] = max_adv_interval_ms
+
+            self.test_register_advertisement(advertisement, instance_id)
 
 
     def unregister_advertisements(self, advertisements, instance_ids=None):
         """Register multiple advertisements.
 
         @param advertisements: a list of advertisement instances.
-        @param min_adv_interval_ms: min_adv_interval in milliseconds.
-        @param max_adv_interval_ms: max_adv_interval in milliseconds.
         @param instance_ids: the list of instance IDs to unregister.
 
         """
@@ -248,6 +245,51 @@
         return discovered_service_data
 
 
+    def validate_scan_rsp_reception(self, peer, advertisement, discover_time):
+        """Validate our advertisement's scan response is located by the peer
+
+        If our advertisements are configured with scan response data, we wish
+        to confirm that a scanning peer will be able to discover this content.
+
+        @param peer: Handle to peer device for advertisement collection
+        @param advertisement: Advertisement data that has been enabled on DUT
+            side
+        @param discover_time: Number of seconds we should spend discovering
+            before considering the device undiscoverable
+
+        @returns: True if scan response is discovered and is correct, else False
+        """
+
+        scan_rsp_data = advertisement.get('ScanResponseData', {})
+
+        # For now, scan response can only contain service data (ad type 0x16):
+        # It appears in a scan response event with the following format:
+        # 'Service Data (UUID 0xfef3): 010203...'
+        if '0x16' in scan_rsp_data:
+            service_uuid_data = scan_rsp_data['0x16']
+
+            # First two bytes of data make up 16 bit service UUID
+            uuid = service_uuid_data[1] * 256 + service_uuid_data[0]
+            # Subsequent bytes make up the service data
+            service_data = ''.join(
+                    ['{:02x}'.format(data) for data in service_uuid_data[2:]])
+
+            search_str = 'Service Data (UUID 0x{:4x}): {}'.format(
+                    uuid, service_data)
+            logging.debug('Searching btmon for content: {}'.format(search_str))
+
+            # Locate a scan response with the above entry. Pass if it is found
+            start_time = time.time()
+            found_adv = peer.FindAdvertisementWithAttributes(
+                    [search_str, 'SCAN_RSP'], discover_time)
+
+            logging.info('Scan response discovered after %fs',
+                         time.time() - start_time)
+
+            return bool(found_adv)
+
+        return True
+
     def _test_peer_received_correct_adv(self, peer, advertisement,
                                         discover_time):
         """Test that configured advertisements are found by peer
@@ -266,6 +308,8 @@
         @returns: True if advertisement is discovered and is correct, else False
         """
 
+        self.results = {}
+
         # We locate the advertisement by searching for the ServiceData
         # attribute we configured.
         data_to_match = list(advertisement['ServiceData'].keys())[0]
@@ -276,6 +320,9 @@
         logging.info('Advertisement discovered after %fs',
                      time.time() - start_time)
 
+        if not found_adv:
+            self.results['advertisement_found'] = False
+
         # Check that our service UUIDs match what we expect
         found_service_uuids = self._get_uuids_from_advertisement(
                 found_adv, 'Service')
@@ -284,6 +331,7 @@
             if int(UUID, 16) not in found_service_uuids:
                 logging.info('Service id %d not found in %s', int(UUID, 16),
                              str(found_service_uuids))
+                self.results['service_ids_found'] = False
                 return False
 
         # Check that our solicit UUIDs match what we expect
@@ -294,6 +342,7 @@
             if int(UUID, 16) not in found_solicit_uuids:
                 logging.info('Solicid ID %d not found in %s', int(UUID, 16),
                              str(found_solicit_uuids))
+                self.results['solicit_ids_found'] = False
                 return False
 
         # Check that our Manufacturer info is correct
@@ -304,6 +353,7 @@
             if int(UUID, 16) not in company_info:
                 logging.info('Company ID %d not found in advertisement',
                         int(UUID, 16))
+                self.results['manufacturer_uuid_found'] = False
                 return False
 
             expected_data = expected_company_info.get(UUID, None)
@@ -312,6 +362,7 @@
             if formatted_data != company_info.get(int(UUID, 16)):
                 logging.info('Manufacturer data %s didn\'t match expected %s',
                         company_info.get(int(UUID, 16)), formatted_data)
+                self.results['manufacturer_data_found'] = False
                 return False
 
         # Check that our service data is correct
@@ -322,6 +373,7 @@
             if int(UUID, 16) not in service_data:
                 logging.info('Service UUID %d not found in advertisement',
                              int(UUID, 16))
+                self.results['service_data_uuid_found'] = False
                 return False
 
             expected_data = expected_service_data.get(UUID, None)
@@ -330,8 +382,15 @@
             if formatted_data != service_data.get(int(UUID, 16)):
                 logging.info('Service data %s didn\'t match expected %s',
                              service_data.get(int(UUID, 16)), formatted_data)
+                self.results['service_data_found'] = False
                 return False
 
+        # Validate scan response from peer's perspective
+        if not self.validate_scan_rsp_reception(peer, advertisement,
+                                                discover_time):
+            self.results['scan_rsp_found'] = False
+            return False
+
         return True
 
 
@@ -434,11 +493,14 @@
         # advertisements are discoverable. Give a few extra seconds in suspend
         # to be safe
         suspend_time = discover_time * num_adv + 10
+        logging.debug(
+                'suspend_time(%d) = discover_time(%d) * num_adv(%d) + 10',
+                suspend_time, discover_time, num_adv)
 
         # Trigger suspend, asynchronously trigger wake and wait for resume
         boot_id = self.host.get_boot_id()
         suspend = self.suspend_async(suspend_time=suspend_time)
-        start_time = self.bluetooth_facade.get_device_time()
+        start_time = self.bluetooth_facade.get_device_utc_time()
         self.test_suspend_and_wait_for_sleep(suspend, sleep_timeout=5)
 
         # Verify they can not be discovered
@@ -447,9 +509,11 @@
                     peer, advertisements_data.ADVERTISEMENTS[i], discover_time)
 
         # Wait for device to come out of suspend
+        logging.debug('test_wait_for_resume(resume_timeout=%d, start_time=%s)',
+                      suspend_time, start_time)
         self.test_wait_for_resume(boot_id,
                                   suspend,
-                                  resume_timeout=suspend_time + 5,
+                                  resume_timeout=suspend_time,
                                   test_start_time=start_time)
 
         # Verify reception of advertisements again
@@ -467,9 +531,6 @@
 
         self.test_reset_advertising()
 
-        self.test_set_advertising_intervals(new_min_adv_interval_ms,
-                                            new_max_adv_interval_ms)
-
         self.register_advertisements(advertisements, new_min_adv_interval_ms,
                                      new_max_adv_interval_ms)
 
@@ -492,9 +553,6 @@
 
         self.test_reset_advertising()
 
-        self.test_set_advertising_intervals(new_min_adv_interval_ms,
-                                            new_max_adv_interval_ms)
-
         self.register_advertisements(advertisements, new_min_adv_interval_ms,
                                      new_max_adv_interval_ms)
 
@@ -542,9 +600,6 @@
 
         self.test_reset_advertising()
 
-        self.test_set_advertising_intervals(new_min_adv_interval_ms,
-                                            new_max_adv_interval_ms)
-
         self.register_advertisements(advertisements, new_min_adv_interval_ms,
                                      new_max_adv_interval_ms)
 
@@ -565,9 +620,6 @@
 
         self.test_reset_advertising()
 
-        self.test_set_advertising_intervals(new_min_adv_interval_ms,
-                                            new_max_adv_interval_ms)
-
         self.register_advertisements(advertisements, new_min_adv_interval_ms,
                                      new_max_adv_interval_ms)
 
@@ -604,9 +656,6 @@
 
         self.test_reset_advertising()
 
-        self.test_set_advertising_intervals(new_min_adv_interval_ms,
-                                            new_max_adv_interval_ms)
-
         self.register_advertisements(advertisements1, new_min_adv_interval_ms,
                                      new_max_adv_interval_ms)
 
@@ -655,9 +704,6 @@
 
         self.test_reset_advertising()
 
-        self.test_set_advertising_intervals(new_min_adv_interval_ms,
-                                            new_max_adv_interval_ms)
-
         self.register_advertisements(advertisements, new_min_adv_interval_ms,
                                      new_max_adv_interval_ms)
 
@@ -690,9 +736,6 @@
 
         self.test_reset_advertising()
 
-        self.test_set_advertising_intervals(new_min_adv_interval_ms,
-                                            new_max_adv_interval_ms)
-
         self.register_advertisements(advertisements, new_min_adv_interval_ms,
                                      new_max_adv_interval_ms)
 
@@ -721,9 +764,6 @@
 
         self.test_reset_advertising()
 
-        self.test_set_advertising_intervals(new_min_adv_interval_ms,
-                                            new_max_adv_interval_ms)
-
         self.register_advertisements(advertisements, new_min_adv_interval_ms,
                                      new_max_adv_interval_ms)
 
@@ -744,8 +784,8 @@
     @test_case_log
     def test_case_RA3_CD_SI200_CD_UA3(self):
         """Test Case: RA(3) - CD - SI(200) - CD - UA(3)"""
-        orig_min_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
-        orig_max_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_min_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_max_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
         new_min_adv_interval_ms = 200
         new_max_adv_interval_ms = 200
         advertisements = self.three_advertisements
@@ -760,8 +800,9 @@
                                                orig_max_adv_interval_ms,
                                                number_advs)
 
-        self.test_set_advertising_intervals(new_min_adv_interval_ms,
-                                            new_max_adv_interval_ms)
+        self.unregister_advertisements(advertisements)
+        self.register_advertisements(advertisements, new_min_adv_interval_ms,
+                                     new_max_adv_interval_ms)
 
         self.test_check_duration_and_intervals(new_min_adv_interval_ms,
                                                new_max_adv_interval_ms,
@@ -773,8 +814,8 @@
     @test_case_log
     def test_case_RA3_CD_SI200_CD_RS(self):
         """Test Case: RA(3) - CD - SI(200) - CD - RS"""
-        orig_min_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
-        orig_max_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_min_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_max_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
         new_min_adv_interval_ms = 200
         new_max_adv_interval_ms = 200
         advertisements = self.three_advertisements
@@ -789,8 +830,9 @@
                                                orig_max_adv_interval_ms,
                                                number_advs)
 
-        self.test_set_advertising_intervals(new_min_adv_interval_ms,
-                                            new_max_adv_interval_ms)
+        self.unregister_advertisements(advertisements)
+        self.register_advertisements(advertisements, new_min_adv_interval_ms,
+                                     new_max_adv_interval_ms)
 
         self.test_check_duration_and_intervals(new_min_adv_interval_ms,
                                                new_max_adv_interval_ms,
@@ -803,8 +845,8 @@
     @test_case_log
     def test_case_RA3_CD_SI200_CD_UA1_CD_RS(self):
         """Test Case: RA(3) - CD - SI(200) - CD - UA(1) - CD - RS"""
-        orig_min_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
-        orig_max_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_min_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_max_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
         new_min_adv_interval_ms = 200
         new_max_adv_interval_ms = 200
         advertisements = self.three_advertisements
@@ -819,8 +861,9 @@
                                                orig_max_adv_interval_ms,
                                                number_advs)
 
-        self.test_set_advertising_intervals(new_min_adv_interval_ms,
-                                            new_max_adv_interval_ms)
+        self.unregister_advertisements(advertisements)
+        self.register_advertisements(advertisements, new_min_adv_interval_ms,
+                                     new_max_adv_interval_ms)
 
         self.test_check_duration_and_intervals(new_min_adv_interval_ms,
                                                new_max_adv_interval_ms,
@@ -844,8 +887,8 @@
     @test_case_log
     def test_case_RA3_CD_SI200_CD_SI2000_CD_UA3(self):
         """Test Case: RA(3) - CD - SI(200) - CD - SI(2000) - CD - UA(3)"""
-        orig_min_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
-        orig_max_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_min_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_max_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
         new_small_min_adv_interval_ms = 200
         new_small_max_adv_interval_ms = 200
         new_large_min_adv_interval_ms = 2000
@@ -862,15 +905,19 @@
                                                orig_max_adv_interval_ms,
                                                number_advs)
 
-        self.test_set_advertising_intervals(new_small_min_adv_interval_ms,
-                                            new_small_max_adv_interval_ms)
+        self.unregister_advertisements(advertisements)
+        self.register_advertisements(advertisements,
+                                     new_small_min_adv_interval_ms,
+                                     new_small_max_adv_interval_ms)
 
         self.test_check_duration_and_intervals(new_small_min_adv_interval_ms,
                                                new_small_max_adv_interval_ms,
                                                number_advs)
 
-        self.test_set_advertising_intervals(new_large_min_adv_interval_ms,
-                                            new_large_max_adv_interval_ms)
+        self.unregister_advertisements(advertisements)
+        self.register_advertisements(advertisements,
+                                     new_large_min_adv_interval_ms,
+                                     new_large_max_adv_interval_ms)
 
         self.test_check_duration_and_intervals(new_large_min_adv_interval_ms,
                                                new_large_max_adv_interval_ms,
@@ -882,8 +929,8 @@
     @test_case_log
     def test_case_RA5_CD_SI200_CD_FRA1_CD_UA5(self):
         """Test Case: RA(5) - CD - SI(200) - CD - FRA(1) - CD - UA(5)"""
-        orig_min_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
-        orig_max_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_min_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_max_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
         new_min_adv_interval_ms = 200
         new_max_adv_interval_ms = 200
         advertisements = self.five_advertisements
@@ -899,8 +946,9 @@
                                                orig_max_adv_interval_ms,
                                                number_advs)
 
-        self.test_set_advertising_intervals(new_min_adv_interval_ms,
-                                            new_max_adv_interval_ms)
+        self.unregister_advertisements(advertisements)
+        self.register_advertisements(advertisements, new_min_adv_interval_ms,
+                                     new_max_adv_interval_ms)
 
         self.test_check_duration_and_intervals(new_min_adv_interval_ms,
                                                new_max_adv_interval_ms,
@@ -927,8 +975,8 @@
         """Test Case: RA(3) - CD - SI(200) - CD - FSI(10) - CD - FSI(20000) - CD
         - UA(3)
         """
-        orig_min_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
-        orig_max_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_min_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_max_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
         new_min_adv_interval_ms = 200
         new_max_adv_interval_ms = 200
         invalid_small_min_adv_interval_ms = 10
@@ -947,8 +995,9 @@
                                                orig_max_adv_interval_ms,
                                                number_advs)
 
-        self.test_set_advertising_intervals(new_min_adv_interval_ms,
-                                            new_max_adv_interval_ms)
+        self.unregister_advertisements(advertisements)
+        self.register_advertisements(advertisements, new_min_adv_interval_ms,
+                                     new_max_adv_interval_ms)
 
         self.test_check_duration_and_intervals(new_min_adv_interval_ms,
                                                new_max_adv_interval_ms,
@@ -983,8 +1032,8 @@
     @test_case_log
     def test_case_RA3_CD_SI200_CD_PC_CD_UA3(self):
         """Test Case: RA(3) - CD - SI(200) - CD - PC - CD - UA(3)"""
-        orig_min_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
-        orig_max_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_min_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_max_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
         new_min_adv_interval_ms = 200
         new_max_adv_interval_ms = 200
         advertisements = self.three_advertisements
@@ -999,8 +1048,9 @@
                                                orig_max_adv_interval_ms,
                                                number_advs)
 
-        self.test_set_advertising_intervals(new_min_adv_interval_ms,
-                                            new_max_adv_interval_ms)
+        self.unregister_advertisements(advertisements)
+        self.register_advertisements(advertisements, new_min_adv_interval_ms,
+                                     new_max_adv_interval_ms)
 
         self.test_check_duration_and_intervals(new_min_adv_interval_ms,
                                                new_max_adv_interval_ms,
@@ -1023,8 +1073,8 @@
     @test_case_log
     def test_case_RA3_CD_SI200_CD_SR_CD_UA3(self):
         """Test Case: RA(3) - CD - SI(200) - CD - SR - CD - UA(3)"""
-        orig_min_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
-        orig_max_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_min_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_max_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
         new_min_adv_interval_ms = 200
         new_max_adv_interval_ms = 200
         advertisements = self.three_advertisements
@@ -1039,8 +1089,9 @@
                                                orig_max_adv_interval_ms,
                                                number_advs)
 
-        self.test_set_advertising_intervals(new_min_adv_interval_ms,
-                                            new_max_adv_interval_ms)
+        self.unregister_advertisements(advertisements)
+        self.register_advertisements(advertisements, new_min_adv_interval_ms,
+                                     new_max_adv_interval_ms)
 
         self.test_check_duration_and_intervals(new_min_adv_interval_ms,
                                                new_max_adv_interval_ms,
@@ -1067,9 +1118,6 @@
 
         self.test_reset_advertising()
 
-        self.test_set_advertising_intervals(new_min_adv_interval_ms,
-                                            new_max_adv_interval_ms)
-
         self.register_advertisements(advertisements, new_min_adv_interval_ms,
                                      new_max_adv_interval_ms)
 
@@ -1088,8 +1136,6 @@
         advertisements = [self.first_advertisement]
 
         self.test_reset_advertising()
-        self.test_set_advertising_intervals(new_min_adv_interval_ms,
-                                            new_max_adv_interval_ms)
 
         self.register_advertisements(advertisements, new_min_adv_interval_ms,
                                      new_max_adv_interval_ms)
@@ -1110,9 +1156,6 @@
 
         self.test_reset_advertising()
 
-        self.test_set_advertising_intervals(new_min_adv_interval_ms,
-                                            new_max_adv_interval_ms)
-
         self.register_advertisements(advertisements, new_min_adv_interval_ms,
                                      new_max_adv_interval_ms)
 
@@ -1120,11 +1163,7 @@
                                                new_max_adv_interval_ms,
                                                len(advertisements))
 
-        # On some devices suspend/resume unregisters the advertisement
-        # causing the test to fail. Disabling suspend/resume till
-        # the issue is resolved.
-        # TODO(crbug/949802)
-        # self.suspend_resume()
+        self.suspend_resume()
 
         self.test_check_duration_and_intervals(new_min_adv_interval_ms,
                                                new_max_adv_interval_ms,
@@ -1136,8 +1175,8 @@
     @test_case_log
     def test_case_RA1_CD_SI200_CD_UA1(self):
         """Test Case: RA(1) - CD - SI(200) - CD - UA(1)"""
-        orig_min_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
-        orig_max_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_min_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_max_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
         new_min_adv_interval_ms = 200
         new_max_adv_interval_ms = 200
         advertisements = [self.first_advertisement]
@@ -1151,9 +1190,9 @@
                                                orig_max_adv_interval_ms,
                                                len(advertisements))
 
-        self.test_set_advertising_intervals(new_min_adv_interval_ms,
-                                            new_max_adv_interval_ms)
-
+        self.unregister_advertisements(advertisements)
+        self.register_advertisements(advertisements, new_min_adv_interval_ms,
+                                     new_max_adv_interval_ms)
 
         self.test_check_duration_and_intervals(new_min_adv_interval_ms,
                                                new_max_adv_interval_ms,
@@ -1165,8 +1204,8 @@
     @test_case_log
     def test_case_RA1_CD_SI200_CD_RS(self):
         """Test Case: RA(1) - CD - SI(200) - CD - RS"""
-        orig_min_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
-        orig_max_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_min_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_max_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
         new_min_adv_interval_ms = 200
         new_max_adv_interval_ms = 200
         advertisements = [self.sixth_advertisement]
@@ -1179,8 +1218,9 @@
                                                orig_max_adv_interval_ms,
                                                len(advertisements))
 
-        self.test_set_advertising_intervals(new_min_adv_interval_ms,
-                                            new_max_adv_interval_ms)
+        self.unregister_advertisements(advertisements)
+        self.register_advertisements(advertisements, new_min_adv_interval_ms,
+                                     new_max_adv_interval_ms)
 
         self.test_check_duration_and_intervals(new_min_adv_interval_ms,
                                                new_max_adv_interval_ms,
@@ -1192,8 +1232,8 @@
     def test_case_RA1_CD_SI200_CD_FSI10_UA1_RA1_CD_UA1(self):
         """Test Case:  RA(1) - CD - SI(200) - CD - FSI(10) - UA(1)
          - RA(1) - CD - UA(1)"""
-        orig_min_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
-        orig_max_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_min_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_max_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
         new_min_adv_interval_ms = 200
         new_max_adv_interval_ms = 200
         invalid_small_min_adv_interval_ms = 10
@@ -1210,8 +1250,9 @@
                                                orig_max_adv_interval_ms,
                                                len(advertisements))
 
-        self.test_set_advertising_intervals(new_min_adv_interval_ms,
-                                            new_max_adv_interval_ms)
+        self.unregister_advertisements(advertisements)
+        self.register_advertisements(advertisements, new_min_adv_interval_ms,
+                                     new_max_adv_interval_ms)
 
         self.test_check_duration_and_intervals(new_min_adv_interval_ms,
                                                new_max_adv_interval_ms,
@@ -1240,8 +1281,8 @@
     def test_case_RA1_CD_SI200_CD_FSI20000_UA1_RA1_CD_UA1(self):
         """Test Case:  RA(1) - CD - SI(200) - CD - FSI(20000) - UA(1)
          - RA(1) - CD - UA(1)"""
-        orig_min_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
-        orig_max_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_min_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_max_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
         new_min_adv_interval_ms = 200
         new_max_adv_interval_ms = 200
         invalid_large_min_adv_interval_ms = 20000
@@ -1258,8 +1299,9 @@
                                                orig_max_adv_interval_ms,
                                                len(advertisements))
 
-        self.test_set_advertising_intervals(new_min_adv_interval_ms,
-                                            new_max_adv_interval_ms)
+        self.unregister_advertisements(advertisements)
+        self.register_advertisements(advertisements, new_min_adv_interval_ms,
+                                     new_max_adv_interval_ms)
 
         self.test_check_duration_and_intervals(new_min_adv_interval_ms,
                                                new_max_adv_interval_ms,
@@ -1286,8 +1328,8 @@
     @test_case_log
     def test_case_RA1_CD_SI200_CD_PC_CD_UA1(self):
         """Test Case: RA(1) - CD - SI(200) - CD - PC - CD - UA(1)"""
-        orig_min_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
-        orig_max_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_min_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_max_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
         new_min_adv_interval_ms = 200
         new_max_adv_interval_ms = 200
         advertisements = [self.sixth_advertisement]
@@ -1300,8 +1342,9 @@
                                                orig_max_adv_interval_ms,
                                                len(advertisements))
 
-        self.test_set_advertising_intervals(new_min_adv_interval_ms,
-                                            new_max_adv_interval_ms)
+        self.unregister_advertisements(advertisements)
+        self.register_advertisements(advertisements, new_min_adv_interval_ms,
+                                     new_max_adv_interval_ms)
 
         self.test_check_duration_and_intervals(new_min_adv_interval_ms,
                                                new_max_adv_interval_ms,
@@ -1322,8 +1365,8 @@
     @test_case_log
     def test_case_RA1_CD_SI200_CD_SR_CD_UA1(self):
         """Test Case: RA(1) - CD - SI(200) - CD - SR - CD - UA(1)"""
-        orig_min_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
-        orig_max_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_min_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_max_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
         new_min_adv_interval_ms = 200
         new_max_adv_interval_ms = 200
         advertisements = [self.first_advertisement]
@@ -1336,20 +1379,15 @@
                                                orig_max_adv_interval_ms,
                                                len(advertisements))
 
-        self.test_set_advertising_intervals(new_min_adv_interval_ms,
-                                            new_max_adv_interval_ms)
+        self.unregister_advertisements(advertisements)
+        self.register_advertisements(advertisements, new_min_adv_interval_ms,
+                                     new_max_adv_interval_ms)
 
         self.test_check_duration_and_intervals(new_min_adv_interval_ms,
                                                new_max_adv_interval_ms,
                                                len(advertisements))
 
-        logging.info("Suspend resume is disabled due to crbug/949802")
-
-        # On some devices suspend/resume unregisters the advertisement
-        # causing the test to fail. Disabling suspend/resume till
-        # the issue is resolved.
-        # TODO(crbug/949802)
-        # self.suspend_resume()
+        self.suspend_resume()
 
         self.test_check_duration_and_intervals(new_min_adv_interval_ms,
                                                new_max_adv_interval_ms,
@@ -1360,8 +1398,8 @@
     @test_case_log
     def test_case_nearby_mediums_fast(self):
         """Verify minimal test case for nearby sharing"""
-        orig_min_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
-        orig_max_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_min_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_max_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
 
         # We set a specific advertisement with fields required by Nearby
         # sharing service
@@ -1381,15 +1419,46 @@
         self.test_advertising_flags(['Advertise as Discoverable'])
 
     @test_case_log
+    def test_case_adv_before_scan(self):
+        """Verify we can scan after advertising starts
+
+        We found that when extended advertising is available, any Set Adv
+        Disable HCI command would mark the hdev as not advertising, even if
+        other instances were active at the time. Later attempts to start
+        discovery would fail, because kernel tries to update the random address
+        without knowing to pause the advertisements. This test case replicates
+        this failure condition to validate the fix.
+        """
+        orig_min_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_max_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        advertisements = self.three_advertisements
+
+        self.test_reset_advertising()
+
+        # Register several advertisements
+        self.register_advertisements(advertisements, orig_min_adv_interval_ms,
+                                     orig_max_adv_interval_ms)
+
+        # Unregister one active advertisement.
+        instance_id = 2
+        self.test_unregister_advertisement(advertisements[instance_id - 1],
+                                           instance_id,
+                                           advertising_disabled=False)
+
+        self.test_start_discovery()
+
+        # Test if advertising is reset correctly.Only instances [1, 3] are left.
+        self.test_reset_advertising([1, 3])
+
+    @test_case_log
     def test_case_broadcast(self):
         """Verify minimal test case for broadcasted advertising"""
-        orig_min_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
-        orig_max_adv_interval_ms = self.DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_min_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+        orig_max_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
 
         # We set a specific advertisement that uses the 'broadcast' mode
         advertisements = [advertisements_data.NEARBY_BROADCAST_ADV]
 
-        self.bluetooth_le_facade = self.bluetooth_facade
         self.test_reset_advertising()
 
         # Verify that registration is successful, and that all configured
@@ -1446,8 +1515,6 @@
             # until test can be refactored.
             # self.test_case_RA5_CD_SI200_CD_FRA1_CD_UA5()
             self.test_case_RA3_CD_SI200_CD_FSI10_CD_FSI20000_CD_UA3()
-            self.test_case_SI200_RA3_CD_SR_CD_UA3()
-            self.test_case_RA3_CD_SI200_CD_SR_CD_UA3()
             self.test_case_SI200_RA3_CD_PC_CD_UA3()
             self.test_case_RA3_CD_SI200_CD_PC_CD_UA3()
 
@@ -1463,8 +1530,6 @@
             self.test_case_RA1_CD_SI200_CD_RS()
             self.test_case_RA1_CD_SI200_CD_FSI10_UA1_RA1_CD_UA1()
             self.test_case_RA1_CD_SI200_CD_FSI20000_UA1_RA1_CD_UA1()
-            self.test_case_SI200_RA1_CD_SR_CD_UA1()
-            self.test_case_RA1_CD_SI200_CD_SR_CD_UA1()
             self.test_case_RA1_CD_SI200_CD_PC_CD_UA1()
 
         elif test_type == 'suspend_resume':
@@ -1486,3 +1551,7 @@
 
         elif test_type == 'nearby':
             self.test_case_nearby_mediums_fast()
+            self.test_case_adv_before_scan()
+
+        elif test_type == 'broadcast':
+            self.test_case_broadcast()
diff --git a/server/cros/bluetooth/bluetooth_adapter_pairing_tests.py b/server/cros/bluetooth/bluetooth_adapter_pairing_tests.py
index a4718cf..84cec85 100644
--- a/server/cros/bluetooth/bluetooth_adapter_pairing_tests.py
+++ b/server/cros/bluetooth/bluetooth_adapter_pairing_tests.py
@@ -43,58 +43,68 @@
         """Running Bluetooth adapter tests about pairing to a device."""
 
         # Reset the adapter to forget previously paired devices if any.
-        self.test_reset_on_adapter()
+        if not self.test_reset_on_adapter():
+            return
 
         # The adapter must be set to the pairable state.
-        self.test_pairable()
+        if not self.test_pairable():
+            return
 
         # Test if the adapter could discover the target device.
         time.sleep(self.PAIR_TEST_SLEEP_SECS)
-        self.test_discover_device(device.address)
+        if not self.test_discover_device(device.address):
+            return
 
         # Test if the discovered device class of service is correct.
-        self.test_device_class_of_service(device.address,
-                                          device.class_of_service)
+        if not self.test_device_class_of_service(device.address,
+                                                 device.class_of_service):
+            return
 
         # Test if the discovered device class of device is correct.
-        self.test_device_class_of_device(device.address,
-                                         device.class_of_device)
+        if not self.test_device_class_of_device(device.address,
+                                                device.class_of_device):
+            return
 
         # Verify that the adapter could pair with the device.
         # Also set the device trusted when pairing is done.
         # Device will be connected at the end of pairing.
-        self.test_pairing(device.address, device.pin, trusted=True)
+        if not self.test_pairing(device.address, device.pin, trusted=True):
+            return
 
         # Test if the discovered device name is correct.
         # Sometimes, it takes quite a long time after discovering
         # the device (more than 60 seconds) to resolve the device name.
         # Hence, it is safer to test the device name after pairing and
         # connection is done.
-        self.test_device_name(device.address, device.name)
+        if not self.test_device_name(device.address, device.name):
+            return
 
         # Run hid test to make sure profile is connected
-        check_connected_method(device)
+        if not check_connected_method(device):
+            return
 
         # Test if the device is still connected after suspend/resume.
         if suspend_resume:
             self.suspend_resume()
 
             time.sleep(self.PAIR_TEST_SLEEP_SECS)
-            self.test_device_is_paired(device.address)
+            if not self.test_device_is_paired(device.address):
+                return
 
 
             # check if peripheral is connected after suspend resume
             if not self.ignore_failure(check_connected_method, device):
                 logging.info("device not connected after suspend_resume")
                 self.test_connection_by_device(device)
+                time.sleep(self.PAIR_TEST_SLEEP_SECS)
+                if not check_connected_method(device):
+                    return
             else:
                 logging.info("device remains connected after suspend_resume")
 
             time.sleep(self.PAIR_TEST_SLEEP_SECS)
-            check_connected_method(device)
-
-            time.sleep(self.PAIR_TEST_SLEEP_SECS)
-            self.test_device_name(device.address, device.name)
+            if not self.test_device_name(device.address, device.name):
+                return
 
         # Test if the device is still connected after reboot.
         # if reboot:
@@ -115,43 +125,58 @@
         #     self.test_device_name(device.address, device.name)
 
         # Verify that the adapter could disconnect the device.
-        self.test_disconnection_by_adapter(device.address)
+        if not self.test_disconnection_by_adapter(device.address):
+            return
 
         time.sleep(self.PAIR_TEST_SLEEP_SECS)
-        if device.can_init_connection:
-            # Verify that the device could initiate the connection.
-            self.test_connection_by_device(device)
 
-            # With raspberry pi peer, it takes a moment before the device is
-            # registered as an input device. Without delay, the input recorder
-            # doesn't find the device
-            time.sleep(1)
-            check_connected_method(device)
-        else:
-            # Reconnect so that we can test disconnection from the kit
-            self.test_connection_by_adapter(device.address)
+        def test_connection():
+            """Tests connection inited by either the device or the adapter"""
+            if device.can_init_connection:
+                # Verify that the device could initiate the connection.
+                if not self.test_connection_by_device(device):
+                    return False
 
-        # TODO(alent): Needs a new capability, but this is a good proxy
-        if device.can_init_connection:
-            # Verify that the device could initiate the disconnection.
-            self.test_disconnection_by_device(device)
-        else:
-            # Reconnect so that we can test disconnection from the kit
-            self.test_disconnection_by_adapter(device.address)
+                # With raspberry pi peer, it takes a moment before the device is
+                # registered as an input device. Without delay, the input recorder
+                # doesn't find the device
+                time.sleep(1)
+                return check_connected_method(device)
+            # Adapter inited connection.
+            # Reconnect so that we can test disconnection from the kit.
+            return self.test_connection_by_adapter(device.address)
+
+        if not test_connection():
+            return
+
+        def test_disconnection():
+            """Tests disconnection inited by either the device or the adapter"""
+            # TODO(alent): Needs a new capability, but this is a good proxy
+            if device.can_init_connection:
+                # Verify that the device could initiate the disconnection.
+                return self.test_disconnection_by_device(device)
+            # Adapter inited connection.
+            return self.test_disconnection_by_adapter(device.address)
+
+        if not test_disconnection():
+            return
 
         # Verify that the adapter could remove the paired device.
-        self.test_remove_pairing(device.address)
+        if not self.test_remove_pairing(device.address):
+            return
 
         # Check if the device could be re-paired after being forgotten.
         if pairing_twice:
             # Test if the adapter could discover the target device again.
             time.sleep(self.PAIR_TEST_SLEEP_SECS)
-            self.test_discover_device(device.address)
+            if not self.test_discover_device(device.address):
+                return
 
             # Verify that the adapter could pair with the device again.
             # Also set the device trusted when pairing is done.
             time.sleep(self.PAIR_TEST_SLEEP_SECS)
-            self.test_pairing(device.address, device.pin, trusted=True)
+            if not self.test_pairing(device.address, device.pin, trusted=True):
+                return
 
             # Verify that the adapter could remove the paired device again.
             time.sleep(self.PAIR_TEST_SLEEP_SECS)
@@ -167,7 +192,7 @@
         # crbug:905374
         # self.test_stop_discovery()
         time.sleep(self.PAIR_TEST_SLEEP_SECS)
-        self.test_pairing(device.address, device.pin, trusted=True)
+        self.test_pairing(device.address, device.pin, trusted=False)
 
         # Verify device is now connected
         self.test_device_is_connected(device.address)
@@ -207,6 +232,71 @@
                          total_duration_by_adapter/loop_cnt)
 
 
+    def connect_disconnect_by_device_loop(
+            self,
+            device,
+            loops,
+            device_type,
+            check_connected_method=lambda device: True):
+        """Perform a connect disconnect loop test"""
+
+        # Reset the adapter to forget previously paired devices if any.
+        self.test_reset_on_adapter()
+        self.test_pairable()
+        # First pair and disconnect, to emulate real life scenario
+        self.test_discover_device(device.address)
+        time.sleep(self.PAIR_TEST_SLEEP_SECS)
+        self.test_pairing(device.address, device.pin, trusted=True)
+
+        # Verify device is now connected
+        self.test_device_is_connected(device.address)
+        self.test_hid_device_created(device.address)
+
+        # Make device not discoverable and disconnect
+        self.test_device_set_discoverable(device, False)
+        self.test_disconnection_by_device(device)
+
+        total_reconnection_duration = 0
+        loop_cnt = 0
+        for i in range(0, loops):
+
+            # Verify device didn't connect automatically
+            time.sleep(2)
+            self.test_device_is_not_connected(device.address)
+
+            start_time = time.time()
+            if 'BLE' in device_type:
+                self.test_device_set_discoverable(device, True)
+                self.test_device_is_connected(device.address,
+                                              sleep_interval=0.1)
+            else:
+                self.test_connection_by_device(device, post_connection_delay=0)
+
+            check_connected_method(device)
+            end_time = time.time()
+            time_diff = end_time - start_time
+
+            if 'BLE' in device_type:
+                self.test_device_set_discoverable(device, False)
+
+            self.test_disconnection_by_device(device)
+
+            if not bool(self.fails):
+                loop_cnt += 1
+                total_reconnection_duration += time_diff
+                logging.info('%d: Connection establishment duration %f sec', i,
+                             time_diff)
+            else:
+                break
+
+        self.test_remove_pairing(device.address)
+        if not bool(self.fails):
+            average_reconnection_duration = total_reconnection_duration / loop_cnt
+            logging.info('Average duration (by device) %f sec',
+                         average_reconnection_duration)
+            return average_reconnection_duration
+
+
     def auto_reconnect_loop(self,
                             device,
                             loops,
@@ -254,3 +344,19 @@
         if not bool(self.fails):
             logging.info('Average Reconnection duration %f sec',
                          total_reconnection_duration/loop_cnt)
+
+
+    def hid_reconnect_speed(self, device, device_type):
+        """Test the HID device reconnect speed
+
+        @param device: the meta data with the peer device
+        @param device_type: The device type (used to check if it's LE)
+        """
+
+        duration = self.connect_disconnect_by_device_loop(
+                device=device,
+                loops=3,
+                device_type=device_type,
+                check_connected_method=self.test_hid_device_created_speed)
+        if duration is not None:
+            self.test_hid_device_reconnect_time(duration, device_type)
diff --git a/server/cros/bluetooth/bluetooth_adapter_qr_tests.py b/server/cros/bluetooth/bluetooth_adapter_qr_tests.py
new file mode 100644
index 0000000..5175f64
--- /dev/null
+++ b/server/cros/bluetooth/bluetooth_adapter_qr_tests.py
@@ -0,0 +1,539 @@
+# Lint as: python2, python3
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Server side Bluetooth Quality Report tests."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import collections
+import logging
+import os
+from threading import Thread
+import time
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.server.cros.bluetooth.bluetooth_adapter_audio_tests import (
+        BluetoothAdapterAudioTests)
+from autotest_lib.server.cros.bluetooth.bluetooth_adapter_hidreports_tests import (
+        BluetoothAdapterHIDReportTests)
+from autotest_lib.server.cros.bluetooth.bluetooth_adapter_tests import (
+        test_retry_and_log)
+
+# List of the controllers that does not support the Bluetooth Quality Report.
+QR_UNSUPPORTED_CHIPSETS = [
+        'MVL-8897', 'MVL-8997',
+        'Intel-AC7260', 'Intel-AC7265',
+        'QCA-6174A-3-UART', 'QCA-6174A-5-USB'
+]
+
+# An example AOSP BQR event in btsnoop.log looks like:
+# = bluetoothd: quality: BQR Quality Report                       75.018599
+# = bluetoothd: quality:   quality_report_id 1                    75.018658
+# = bluetoothd: quality:   packet_type 2                          75.019402
+# = bluetoothd: quality:   conn_handle 1                          75.019477
+# = bluetoothd: quality:   conn_role 0                            75.019539
+# = bluetoothd: quality:   tx_power_level 0                       75.019601
+# = bluetoothd: quality:   rssi -29                               75.019665
+# = bluetoothd: quality:   snr 0                                  75.019727
+# = bluetoothd: quality:   unused_afh_channel_count 3             75.019787
+# = bluetoothd: quality:   afh_select_unideal_channel_count 0     75.019847
+# = bluetoothd: quality:   lsto 20000.00                          75.019906
+# = bluetoothd: quality:   conn_piconet_clock 9143780.00          75.019965
+# = bluetoothd: quality:   retransmission_count 0                 75.020050
+# = bluetoothd: quality:   no_rx_count 0                          75.020120
+# = bluetoothd: quality:   nak_count 0                            75.020420
+# = bluetoothd: quality:   last_tx_ack_timestamp 9143754.06       75.020485
+# = bluetoothd: quality:   flow_off_count 0                       75.020551
+# = bluetoothd: quality:   last_flow_on_timestamp 9143779.06      75.020610
+# = bluetoothd: quality:   buffer_overflow_bytes 0                75.020670
+# = bluetoothd: quality:   buffer_underflow_bytes 150492          75.020732
+
+# An example Telemetry event for A2DP (ACL) in btsnoop.log looks like:
+# = bluetoothd: quality: Intel Extended Telemetry Event           5.251502
+# = bluetoothd: quality:   ACL connection handle: 0x0100          5.251520
+# = bluetoothd: quality:   Rx HEC errors: 0                       5.251546
+# = bluetoothd: quality:   Rx CRC errors: 0                       5.251558
+# = bluetoothd: quality:   Packets from host: 222                 5.251581
+# = bluetoothd: quality:   Tx packets: 221                        5.251594
+# = bluetoothd: quality:   Tx packets 0 retries: 217              5.251617
+# = bluetoothd: quality:   Tx packets 1 retries: 4                5.251630
+# = bluetoothd: quality:   Tx packets 2 retries: 0                5.251651
+# = bluetoothd: quality:   Tx packets 3 retries: 0                5.251662
+# = bluetoothd: quality:   Tx packets 4 retries: 0                5.251686
+# = bluetoothd: quality:   Tx DH1 packets: 0                      5.251703
+# = bluetoothd: quality:   Tx DH3 packets: 0                      5.251725
+# = bluetoothd: quality:   Tx DH5 packets: 0                      5.251762
+# = bluetoothd: quality:   Tx 2DH1 packets: 0                     5.251790
+# = bluetoothd: quality:   Tx 2DH3 packets: 0                     5.251818
+# = bluetoothd: quality:   Tx 2DH5 packets: 0                     5.251847
+# = bluetoothd: quality:   Tx 3DH1 packets: 55                    5.251872
+# = bluetoothd: quality:   Tx 3DH3 packets: 2                     5.251898
+# = bluetoothd: quality:   Tx 3DH5 packets: 164                   5.251926
+# = bluetoothd: quality:   Rx packets: 1304                       5.251953
+# = bluetoothd: quality:   ACL link throughput: 97143             5.251978
+# = bluetoothd: quality:   ACL max packet latency: 25625          5.252023
+# = bluetoothd: quality:   ACL avg packet latency: 9143           5.252052
+
+# An example Telemetry events for HFP (SCO) in btsnoop.log looks like:
+# = bluetoothd: quality: Intel Extended Telemetry Event                5.894338
+# = bluetoothd: quality:   SCO connection handle: 0x010a               5.894359
+# = bluetoothd: quality:   Packets from host: 1584                     5.894378
+# = bluetoothd: quality:   Tx packets: 637                             5.894397
+# = bluetoothd: quality:   Rx payload lost: 0                          5.894417
+# = bluetoothd: quality:   Tx payload lost: 24                         5.894436
+# = bluetoothd: quality:   Rx No SYNC errors (slot 0): 0               5.894454
+# = bluetoothd: quality:   Rx No SYNC errors (slot 1): 20              5.894474
+# = bluetoothd: quality:   Rx No SYNC errors (slot 2): 0               5.894492
+# = bluetoothd: quality:   Rx No SYNC errors (slot 3): 0               5.894511
+# = bluetoothd: quality:   Rx No SYNC errors (slot 4): 0               5.894531
+# = bluetoothd: quality:   Rx HEC errors (slot 0): 65536               5.894550
+# = bluetoothd: quality:   Rx HEC errors (slot 1): 1                   5.894569
+# = bluetoothd: quality:   Rx HEC errors (slot 2): 0                   5.894590
+# = bluetoothd: quality:   Rx HEC errors (slot 3): 0                   5.894608
+# = bluetoothd: quality:   Rx HEC errors (slot 4): 0                   5.894627
+# = bluetoothd: quality:   Rx CRC errors (slot 0): 0                   5.894645
+# = bluetoothd: quality:   Rx CRC errors (slot 1): 0                   5.894664
+# = bluetoothd: quality:   Rx CRC errors (slot 2): 0                   5.894682
+# = bluetoothd: quality:   Rx CRC errors (slot 3): 0                   5.894701
+# = bluetoothd: quality:   Rx CRC errors (slot 4): 0                   5.894720
+# = bluetoothd: quality:   Rx NAK errors (slot 0): 41549824            5.894738
+# = bluetoothd: quality:   Rx NAK errors (slot 1): 4                   5.894757
+# = bluetoothd: quality:   Rx NAK errors (slot 2): 0                   5.894775
+# = bluetoothd: quality:   Rx NAK errors (slot 3): 0                   5.894806
+# = bluetoothd: quality:   Rx NAK errors (slot 4): 0                   5.894824
+# = bluetoothd: quality:   Failed Tx due to Wifi coex (slot 0): 0      5.894843
+# = bluetoothd: quality:   Failed Tx due to Wifi coex (slot 1): 0      5.894861
+# = bluetoothd: quality:   Failed Tx due to Wifi coex (slot 2): 0      5.894876
+# = bluetoothd: quality:   Failed Tx due to Wifi coex (slot 3): 0      5.894890
+# = bluetoothd: quality:   Failed Tx due to Wifi coex (slot 4): 0      5.894903
+# = bluetoothd: quality:   Failed Rx due to Wifi coex (slot 0): 0      5.894917
+# = bluetoothd: quality:   Failed Rx due to Wifi coex (slot 1): 0      5.894930
+# = bluetoothd: quality:   Failed Rx due to Wifi coex (slot 2): 0      5.894944
+# = bluetoothd: quality:   Failed Rx due to Wifi coex (slot 3): 0      5.894957
+# = bluetoothd: quality:   Failed Rx due to Wifi coex (slot 4): 0      5.894971
+# = bluetoothd: quality:   Late samples inserted based on CDC: 0       5.894984
+# = bluetoothd: quality:   Samples dropped: 0                          5.894997
+# = bluetoothd: quality:   Mute samples sent at initial connection: 18 5.895032
+# = bluetoothd: quality:   PLC injection data: 0                       5.895050
+
+# Define constants
+QR_EVENT_PERIOD = 5
+TELEMETRY_NUM_SLOTS = 5
+TELEMETRY_NUM_RETRIES = 5
+TELEMETRY_NUM_PACKET_TYPES = 9
+
+# Define event types
+AOSP_BQR = 0
+TELEMETRY_ACL = 1
+TELEMETRY_SCO = 2
+
+# Define event subevts
+AOSP_SUBEVTS = [
+        'quality_report_id', 'packet_type', 'conn_handle', 'conn_role',
+        'tx_power_level', 'rssi', 'snr', 'unused_afh_channel_count',
+        'afh_select_unideal_channel_count', 'lsto', 'conn_piconet_clock',
+        'retransmission_count', 'no_rx_count', 'nak_count',
+        'last_tx_ack_timestamp', 'flow_off_count',
+        'last_flow_on_timestamp', 'buffer_overflow_bytes',
+        'buffer_underflow_bytes'
+]
+
+BREDR_PACKET_TYPE = [
+        'DH1', 'DH3', 'DH5', '2DH1', '2DH3', '2DH5', '3DH1', '3DH3', '3DH5'
+]
+
+TELEMETRY_ACL_SUBEVTS = [
+        'ACL_connection_handle', 'Rx_HEC_errors', 'Rx_CRC_errors',
+        'Packets_from_host', 'Tx_packets', 'Rx_packets',
+        'ACL_link_throughput', 'ACL_max_packet_latency',
+        'ACL_avg_packet_latency'
+]
+
+for t in BREDR_PACKET_TYPE:
+    TELEMETRY_ACL_SUBEVTS.append(f'Tx_{t}_packets')
+
+for i in range(TELEMETRY_NUM_RETRIES):
+    TELEMETRY_ACL_SUBEVTS.append(f'Tx_packets_{i}_retries')
+
+TELEMETRY_SCO_SUBEVTS = [
+        'Tx_packets', 'Rx_payload_lost',
+        'Late_samples_inserted_based_on_CDC', 'Samples_dropped',
+        'Mute_samples_sent_at_initial_connection', 'PLC_injection_data'
+]
+
+for i in range(TELEMETRY_NUM_SLOTS):
+    TELEMETRY_SCO_SUBEVTS.append(f'Rx_No_SYNC_errors_(slot_{i})')
+    TELEMETRY_SCO_SUBEVTS.append(f'Rx_HEC_errors_(slot_{i})')
+    TELEMETRY_SCO_SUBEVTS.append(f'Rx_CRC_errors_(slot_{i})')
+    TELEMETRY_SCO_SUBEVTS.append(f'Rx_NAK_errors_(slot_{i})')
+    TELEMETRY_SCO_SUBEVTS.append(f'Failed_Tx_due_to_Wifi_coex_(slot_{i})')
+    TELEMETRY_SCO_SUBEVTS.append(f'Failed_Rx_due_to_Wifi_coex_(slot_{i})')
+
+START_TIME_SUBEVT = 'start_time'
+END_TIME_SUBEVT = 'end_time'
+QUALITY_PREFIX_STRING = '= bluetoothd: quality:'
+
+# Define event handler ids and last ids
+AOSP_HANDLER_SUBEVT = 'conn_handle'
+AOSP_LAST_SUBEVT = 'buffer_underflow_bytes'
+
+TELEMETRY_ACL_HANDLER_SUBEVT = 'ACL_connection_handle'
+TELEMETRY_ACL_LAST_SUBEVT = 'ACL_avg_packet_latency'
+
+TELEMETRY_SCO_HANDLER_SUBEVT = 'SCO_connection_handle'
+TELEMETRY_SCO_LAST_SUBEVT = 'PLC_injection_data'
+
+HANDLER_SUBEVT = (AOSP_HANDLER_SUBEVT, TELEMETRY_ACL_HANDLER_SUBEVT,
+                    TELEMETRY_SCO_HANDLER_SUBEVT)
+END_SUBEVT = (AOSP_LAST_SUBEVT, TELEMETRY_ACL_LAST_SUBEVT,
+                TELEMETRY_SCO_LAST_SUBEVT)
+CHECK_SUBEVTS = (AOSP_SUBEVTS, TELEMETRY_ACL_SUBEVTS,
+                    TELEMETRY_SCO_SUBEVTS)
+NOT_EVENT_SUBEVTS = (START_TIME_SUBEVT, END_TIME_SUBEVT)
+
+def _read_line(line):
+    """Reading a line of log produced by the quality event packet.
+
+    A line of log looks like:
+
+        = bluetoothd: quality:   buffer_underflow_bytes 150492 75.020732
+
+    line[0:2] is the prefix,
+    line[3:-2] is the data subevt, may separate by some spaces,
+    line[-2] is the value of the subevt,
+    line[-1] is the sending time of the data.
+
+    @returns: subevt, name of the variable in the packet.
+                value, value of the variable in the packet.
+                time, sending time of the variable in the packet.
+
+    @raises: error.TestError if failed.
+    """
+    try:
+        line = line.split()
+        subevt = '_'.join(line[3:-2]).strip(':')
+        value = line[-2]
+        time_ = line[-1]
+    except Exception as e:
+        raise error.TestError(
+                'Exception in reading Bluetooth Quality Report: %s' % e)
+    return subevt, value, time_
+
+def _handler_to_base_10(handler):
+    """Convert handler from string to base 10 integer.
+
+    @param handler: a string of quality report handler.
+
+    @returns: integer represents the handler.
+    """
+    # Either base 10 or base 16.
+    if handler.startswith('0x'):
+        handler = int(handler, 16)
+    else:
+        handler = int(handler)
+
+    return handler
+
+def collect_qr_event_from_log(file_path):
+    """Collecting all the quality event reports from the btsnoop log.
+
+    This function will grep all the quality event from the log
+    and store into a dict.
+
+    @param file_path: where the btsnoop log place at.
+
+    @returns: all_reports, a dict with the format:
+                {'handler1':packet_list1, 'handler2':packet_list2, ...}.
+
+    @raises: error.TestError if failed.
+    """
+    all_reports = collections.defaultdict(list)
+
+    lines = None
+    with open(file_path, 'r') as f:
+        lines = f.readlines()
+
+    report, handler = {}, None
+    for line in lines:
+        if not line.startswith(QUALITY_PREFIX_STRING):
+            continue
+
+        subevt, value, time_ = _read_line(line)
+        if not report:
+            report[START_TIME_SUBEVT] = time_
+        else:
+            report[subevt] = value
+
+            if subevt in HANDLER_SUBEVT:
+                handler = _handler_to_base_10(value)
+
+            if subevt in END_SUBEVT:
+                if handler is None:
+                    raise error.TestError(
+                            'Report handler is None type')
+
+                report[END_TIME_SUBEVT] = time_
+                all_reports[handler].append(report)
+                report, handler = {}, None
+
+    logging.debug("========== Got reports: ========== ")
+    for handler, reports in all_reports.items():
+        logging.debug('handler: %s \n', handler)
+        for report in reports:
+            logging.debug('report: %s \n', report)
+        logging.debug('\n')
+
+    return all_reports
+
+class BluetoothAdapterQRTests(BluetoothAdapterHIDReportTests,
+                              BluetoothAdapterAudioTests):
+    """Server side Bluetooth adapter QR test class."""
+    BTSNOOP_LOG_DIR = '/tmp'
+    BTSNOOP_LOG_FILENAME = 'btsnoop.log'
+    BTSNOOP_LOG_FILE = os.path.join(BTSNOOP_LOG_DIR, BTSNOOP_LOG_FILENAME)
+
+    def collect_qr_event_from_log(self):
+        """Collect the quality event from btsnoop log"""
+        return collect_qr_event_from_log(self.BTSNOOP_LOG_FILE)
+
+    @test_retry_and_log(False)
+    def test_check_connection_handle_unique(self, reports, handler_subevt):
+        """Checking if the handler subevt in the quality packet list is unique.
+
+        @param reports: a list of quality event reports.
+        @param handler_subevt: specify a handler subevt in HANDLER_SUBEVT to
+                               check.
+
+        @returns: True if the handler subevt is unique in the packet list,
+                  False otherwise.
+        """
+        reports_len = len(reports)
+        if reports_len <= 1:
+            return True
+
+        handlers = [reports[i][handler_subevt] for i in range(reports_len)]
+        return len(set(handlers)) == 1
+
+    @test_retry_and_log(False)
+    def test_check_reports_completeness(self, reports, check_subevt_list):
+        """Check if all sub-events in check_subevt_list can be found in reports.
+
+        @param reports: a list of quality event reports.
+        @param check_subevt_list: a set of subevts that define the content of
+                              the quality event packet.
+
+        @returns: True if all sub-events in check_subevt_list can be found in
+                  reports, False otherwise.
+        """
+        missing_subevt = []
+        for report in reports:
+            # Check the completeness of the packet.
+            for check_subevt in check_subevt_list:
+                if check_subevt not in report:
+                    missing_subevt.append(check_subevt)
+
+            # Check the length of the packet.
+            if (len(check_subevt_list) + len(NOT_EVENT_SUBEVTS)) > len(report):
+                logging.error('Error in test_check_reports_completeness(): '
+                              'wrong packet size')
+                return False
+
+        if missing_subevt:
+            logging.info(
+                    'Error in test_check_reports_completeness(): '
+                    'missing subevt: %s in all reports', missing_subevt)
+            return False
+        return True
+
+    @test_retry_and_log(False)
+    def test_check_period(self, reports, report_type,
+                          tolerable_deviation=0.05):
+        """Checking if the sending time between adjecent packet is tolerable.
+
+        @param reports: a list of quality event reports.
+        @param tolerable_deviation : the percentage of the tolerable deviation
+                                     to the QR_EVENT_PERIOD.
+
+        @returns: True if all the time differences between reports are
+                  less than the tolerance.
+        """
+        if len(reports) <= 1:
+            return True
+
+        tolerance = tolerable_deviation * QR_EVENT_PERIOD
+
+        # According to the spec of AOSP, there are 4 kind of sub-events and we
+        # only care about the sub-event whose quality_report_id is 1.
+        if report_type == AOSP_BQR:
+            reports = [
+                    report for report in reports
+                    if report['quality_report_id'] == '1'
+            ]
+
+        for i in range(1, len(reports)):
+            time_diff = (float(reports[i][START_TIME_SUBEVT]) -
+                         float(reports[i - 1][END_TIME_SUBEVT]))
+
+            if time_diff < 0:
+                logging.error('Error in test_check_period(): time_diff < 0')
+                return False
+            if abs(time_diff - QR_EVENT_PERIOD) >= tolerance:
+                logging.error('Error in test_check_period: tolerance exceed')
+                return False
+        return True
+
+    @test_retry_and_log(False)
+    def test_send_log(self):
+        """Sending the btsnoop log from the DUT back to the autoserv.
+
+        This test can be used only when the self.dut_btmon_log_path
+        was set and this variable is set in the quick_test_init() by default.
+
+        @returns: True if success, False otherwise.
+        """
+        btsnoop_path = self.BTSNOOP_LOG_FILE
+        try:
+            cmd = f'btmon -C 100 -r {self.dut_btmon_log_path} > {btsnoop_path}'
+            res = self.host.run(cmd).stdout
+            logging.debug('run command: %s, result: %s', cmd, res)
+
+            self.host.get_file(btsnoop_path, btsnoop_path, delete_dest=True)
+        except Exception as e:
+            logging.error('Exception in test_send_log: %s', e)
+            return False
+        return True
+
+    @test_retry_and_log(False)
+    def test_not_receive_qr_event_log(self):
+        """Checking if not reveice the qr event log"""
+        all_reports = self.collect_qr_event_from_log()
+        logging.debug("all_reports: %s", all_reports)
+        return len(all_reports) == 0
+
+    # ---------------------------------------------------------------
+    # Definitions of all bluetooth audio test sequences
+    # ---------------------------------------------------------------
+
+    def check_qr_event_log(self, num_devices):
+        """Checking if the all the packet list pass the criteria.
+
+        This function check four things:
+                - the number of event handlers is greater than the num_devices
+                - test_check_connection_handle_unique
+                - test_check_reports_completeness
+                - test_check_period
+
+        @param num_devices: number of Bluetooth devices expected.
+        """
+        all_reports = self.collect_qr_event_from_log()
+
+        if len(all_reports) < num_devices:
+            raise error.TestFail(
+                    'Error in test_check_qr_event_log: wrong '
+                    'handler number: %s, expected: %s' % (len(all_reports),
+                    num_devices))
+
+        for reports in all_reports.values():
+            report_type = None
+            for type_, handler_subevt in enumerate(HANDLER_SUBEVT):
+                if handler_subevt in reports[0]:
+                    report_type = type_
+                    break
+            if report_type is None:
+                raise error.TestError('report_type is None')
+
+            self.test_check_connection_handle_unique(
+                    reports, HANDLER_SUBEVT[report_type])
+            self.test_check_reports_completeness(
+                    reports, CHECK_SUBEVTS[report_type])
+            self.test_check_period(reports, report_type)
+
+    def qr_a2dp(self, device, test_profile):
+        """Checking if quality event works fine with A2DP streaming.
+
+        @param device: the bluetooth peer device.
+        @param test_profile: the test profile to used.
+        """
+        self.test_a2dp_sinewaves(device, test_profile, duration=None)
+
+    def qr_hfp_dut_as_src(self, device, test_profile):
+        """Checking if quality event works fine with HFP streaming.
+
+        @param device: the bluetooth peer device.
+        @param test_profile: the test profile to used.
+        """
+        self.hfp_dut_as_source(device, test_profile)
+
+    def qr_disabled_a2dp(self, device, test_profile):
+        """Checking if disable logging quality event success.
+
+        @param device: the bluetooth peer device.
+        @param test_profile: the test profile to used.
+        """
+        self.enable_disable_debug_log(enable=True)
+        self.enable_disable_quality_debug_log(enable=True)
+        time.sleep(3)
+        self.enable_disable_quality_debug_log(enable=False)
+        self.enable_disable_debug_log(enable=False)
+        time.sleep(3)
+
+        self.dut_btmon_log_path = self.start_new_btmon()
+        self.test_a2dp_sinewaves(device, test_profile, duration=None)
+        self.test_send_log()
+        self.test_not_receive_qr_event_log()
+
+    def qr_a2dp_cl_keyboard(self, audio_device, keyboard_device, test_profile):
+        """Checking if quality event works fine with multiple devices.
+
+        @param audio_device: the bluetooth audio device.
+        @param keyboard_device: the bluetooth keyboard device.
+        @param test_profile: the audio test profile to used.
+        """
+        p1 = Thread(target=self.test_keyboard_input_from_trace,
+                    args=(keyboard_device, "simple_text"))
+        p2 = Thread(target=self.test_a2dp_sinewaves,
+                    args=(audio_device, test_profile, None))
+        p1.start()
+        p2.start()
+        p1.join()
+        p2.join()
+
+    def qr_hfp_dut_as_sink_cl_keyboard(self, audio_device, keyboard_device,
+                                       test_profile):
+        """Checking if quality event works fine with multiple devices.
+
+        @param audio_device: the bluetooth audio device.
+        @param keyboard_device: the bluetooth keyboard device.
+        @param test_profile: the audio test profile to used.
+        """
+        p1 = Thread(target=self.test_keyboard_input_from_trace,
+                    args=(keyboard_device, "simple_text"))
+        p2 = Thread(target=self.hfp_dut_as_sink,
+                    args=(audio_device, test_profile))
+        p1.start()
+        p2.start()
+        p1.join()
+        p2.join()
+
+    def qr_power_cycle_a2dp(self, device, test_profile):
+        """Checking if the enable debug state persists after power reset.
+
+        @param device: the bluetooth audio device.
+        @param test_profile: the audio test profile to used.
+        """
+        self.test_reset_off_adapter()
+        time.sleep(3)
+        self.test_reset_on_adapter()
+
+        # Need to connect to the device again.
+        self.test_bluetoothd_running()
+        self.test_discover_device(device.address)
+        self.test_pairing(device.address, device.pin, trusted=True)
+        self.test_connection_by_adapter(device.address)
+
+        self.dut_btmon_log_path = self.start_new_btmon()
+
+        self.test_a2dp_sinewaves(device, test_profile, duration=None)
diff --git a/server/cros/bluetooth/bluetooth_adapter_quick_tests.py b/server/cros/bluetooth/bluetooth_adapter_quick_tests.py
index ec1f786..9162594 100644
--- a/server/cros/bluetooth/bluetooth_adapter_quick_tests.py
+++ b/server/cros/bluetooth/bluetooth_adapter_quick_tests.py
@@ -20,43 +20,27 @@
 
 import common
 from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib.cros.bluetooth import bluetooth_quick_tests_base
 from autotest_lib.server import site_utils
+from autotest_lib.server.cros.bluetooth import bluetooth_peer_update
 from autotest_lib.server.cros.bluetooth import bluetooth_adapter_tests
+from autotest_lib.server.cros.bluetooth import bluetooth_attenuator
 from autotest_lib.server.cros.multimedia import remote_facade_factory
 from autotest_lib.client.bin import utils
 from six.moves import range
 
-class BluetoothAdapterQuickTests(bluetooth_adapter_tests.BluetoothAdapterTests):
-    """This class provide wrapper function for Bluetooth quick health test
-    batches or packages.
-    The Bluetooth quick test infrastructure provides a way to quickly run a set
-    of tests. As for today, auto-test ramp up time per test is about 90-120
-    seconds, where a typical Bluetooth test may take ~30-60 seconds to run.
+PROFILE_CONNECT_WAIT = 15
+SUSPEND_SEC = 15
+EXPECT_NO_WAKE_SUSPEND_SEC = 30
+EXPECT_PEER_WAKE_SUSPEND_SEC = 60
+EXPECT_PEER_WAKE_RESUME_BY = 30
 
-    The quick test infra, implemented in this class, saves this huge overhead
-    by running only the minimal reset and cleanup operations required between
-    each set of tests (takes a few seconds).
 
-    This class provides wrapper functions to start and end a test, a batch or a
-    package. A batch is defined as a set of tests, preferably with a common
-    subject. A package is a set of batches.
-    This class takes care of tests, batches, and packages test results, and
-    prints out summaries to results. The class also resets and cleans up
-    required states between tests, batches and packages.
+class BluetoothAdapterQuickTests(
+        bluetooth_adapter_tests.BluetoothAdapterTests,
+        bluetooth_quick_tests_base.BluetoothQuickTestsBase):
+    """Bluetooth quick test implementation for server tests."""
 
-    A batch can also run as a separate auto-test. There is a place holder to
-    add a way to run a specific test of a batch autonomously.
-
-    A batch can be implemented by inheriting from this class, and using its
-    wrapper functions. A package can be implemented by inheriting from a set of
-    batches.
-
-    Adding a test to one of the batches is as easy as adding a method to the
-    class of the batch.
-    """
-
-    # Some delay is needed between tests. TODO(yshavit): investigate and remove
-    TEST_SLEEP_SECS = 3
     GCS_MTBF_BUCKET = 'gs://chromeos-mtbf-bt-results/'
 
 
@@ -67,7 +51,7 @@
 
         # Grab current device list for initialization
         connected_devices = self.devices
-        self.cleanup(test_state='MID')
+        self.cleanup_bt_test(test_state='MID')
 
         for device_type, device_list in connected_devices.items():
             for device in device_list:
@@ -83,58 +67,111 @@
             logging.info('Starting peer devices...')
             self.get_device_rasp(devices)
 
-            # Grab all the addresses to verify RSSI
-            addresses = []
+            # Grab all the devices to verify RSSI
+            devices = []
             for device_type, device_list in self.devices.items():
                 # Skip bluetooth_tester since it won't be discoverable
                 if 'TESTER' in device_type:
                     continue
 
                 for device in device_list:
-                    addresses.append(device.address)
+                    devices.append(device)
                     self.start_agent(device)
 
-            # Make sure device RSSI is sufficient
-            self.verify_device_rssi(addresses)
-
-    def _print_delimiter(self):
-        logging.info('=======================================================')
+            if self.rssi_check:
+                # Make sure device RSSI is sufficient
+                self.verify_device_rssi(devices)
+            else:
+                logging.info('Skip RSSI check.')
 
     @staticmethod
-    def _get_update_btpeers_arguments(args_dict=None):
-        """Parse the update_btpeers argument"""
-        key = 'update_btpeers'
+    def _get_bool_arg(arg, args_dict, default_value):
+        """Get the target bool argument from args_dict.
+
+        @param arg: the target argument to query
+        @param args_dict: the argument dictionary
+        @param default_value: the default value of the argument
+                if the arg is not in args_dict or
+                if arg is neither 'true' nor 'false'
+
+        @returns: the bool value of the target argument
+        """
+        if args_dict is not None and arg in args_dict:
+            arg_value = args_dict[arg].lower()
+            if arg_value == 'true':
+                return True
+            elif arg_value == 'false':
+                return False
+        return default_value
+
+    @staticmethod
+    def _get_clean_kernel_log_arguments(args_dict=None):
+        """Parse the clean_kernel_log argument"""
+        key = 'clean_kernel_log'
         if args_dict is not None and key in args_dict:
-            return args_dict[key].lower() != 'false'
-        return True
+            return args_dict[key].upper()
+        return 'DEBUG'
 
     def quick_test_init(self,
                         host,
                         use_btpeer=True,
                         flag='Quick Health',
                         args_dict=None,
-                        start_browser=False):
+                        start_browser=False,
+                        floss=False):
         """Inits the test batch"""
+
+        super().quick_test_init(flag)
+
         self.host = host
         self.start_browser = start_browser
         self.use_btpeer = use_btpeer
-        update_btpeers = self._get_update_btpeers_arguments(args_dict)
+        self.floss = floss
+        self.local_host_ip = None
+
+        logging.debug('args_dict %s', args_dict)
+        update_btpeers = self._get_bool_arg('update_btpeers', args_dict, True)
+        self.rssi_check = self._get_bool_arg('rssi_check', args_dict, True)
+        clean_log = self._get_clean_kernel_log_arguments(args_dict)
         btpeer_args = []
         if args_dict is not None:
             btpeer_args = self.host.get_btpeer_arguments(args_dict)
+            ip_args = self.host.get_local_host_ip(args_dict)
+            if ip_args:
+                self.local_host_ip = ip_args['local_host_ip']
+
         #factory can not be declared as local variable, otherwise
         #factory._proxy.__del__ will be invoked, which shutdown the xmlrpc
         # server, which log out the user.
 
         self.factory = remote_facade_factory.RemoteFacadeFactory(
-                host, no_chrome=not self.start_browser, disable_arc=True)
+                host,
+                no_chrome=not self.start_browser,
+                disable_arc=True,
+                force_python3=True)
         try:
-            self.bluetooth_facade = self.factory.create_bluetooth_facade()
+            self.bluetooth_facade = self.factory.create_bluetooth_facade(
+                    self.floss)
         except Exception as e:
             logging.error('Exception %s while creating bluetooth_facade',
                           str(e))
             raise error.TestFail('Unable to create bluetooth_facade')
 
+        if clean_log is not 'FALSE':
+            # Clean Bluetooth kernel logs in the DUT to prevent
+            # /var/log/messages occupying too much space
+            self.clean_bluetooth_kernel_log(clean_log)
+
+        # Some test beds has a attenuator for Bluetooth. If Bluetooth
+        # attenuator is present, set its attenuation to 0
+        self.bt_attenuator = bluetooth_attenuator.init_btattenuator(
+                self.host, args_dict)
+
+        logging.debug("Bluetooth attenuator is %s", self.bt_attenuator)
+
+        # Check whether this device supports floss
+        if self.floss:
+            self.check_floss_support()
 
         if self.use_btpeer:
             self.input_facade = self.factory.create_input_facade()
@@ -149,7 +186,7 @@
 
             # Check the chameleond version on the peer and update if necessary
             if update_btpeers:
-                if not self.update_btpeer():
+                if not bluetooth_peer_update.update_all_peers(self.host):
                     logging.error('Updating btpeers failed. Ignored')
             else:
                 logging.info('No attempting peer update.')
@@ -165,7 +202,6 @@
             self.btpeer_group_copy = dict()
             self.group_btpeers_type()
 
-
         # Clear the active devices for this test
         self.active_test_devices = {}
 
@@ -183,47 +219,48 @@
         self.host.run('[ ! -d {0} ] || rm -rf {0} || true'.format(
                                                     self.USBMON_DIR_LOG_PATH))
 
-        self.start_new_btmon()
+        self.dut_btmon_log_path = self.start_new_btmon()
         self.start_new_usbmon()
 
-        self.flag = flag
-        self.test_iter = None
+        self.identify_platform_failure_reasons()
 
-        self.bat_tests_results = []
-        self.bat_pass_count = 0
-        self.bat_fail_count = 0
-        self.bat_testna_count = 0
-        self.bat_warn_count = 0
-        self.bat_name = None
-        self.bat_iter = None
-
-        self.pkg_tests_results = []
-        self.pkg_pass_count = 0
-        self.pkg_fail_count = 0
-        self.pkg_testna_count = 0
-        self.pkg_warn_count = 0
-        self.pkg_name = None
-        self.pkg_iter = None
-        self.pkg_is_running = False
         self.mtbf_end = False
         self.mtbf_end_lock = threading.Lock()
 
+    def quick_test_get_model_name(self):
+        """Returns the model name.
+
+           Needed by BluetoothQuickTestsBase.quick_test_test_decorator.
+        """
+        return self.get_base_platform_name()
+
+    def quick_test_get_chipset_name(self):
+        """Returns the chipset name.
+
+           Needed by BluetoothQuickTestsBase.quick_test_test_decorator.
+        """
+        return self.bluetooth_facade.get_chipset_name()
 
     @staticmethod
-    def quick_test_test_decorator(test_name, devices={}, flags=['All'],
+    def quick_test_test_decorator(test_name,
+                                  devices={},
+                                  flags=['All'],
                                   model_testNA=[],
                                   model_testWarn=[],
                                   skip_models=[],
                                   skip_chipsets=[],
-                                  shared_devices_count=0):
+                                  skip_common_errors=False,
+                                  supports_floss=False,
+                                  use_all_peers=False):
         """A decorator providing a wrapper to a quick test.
            Using the decorator a test method can implement only the core
            test and let the decorator handle the quick test wrapper methods
-           (test_start and test_end).
+           (reset/cleanup/logging).
 
            @param test_name: the name of the test to log.
-           @param devices:   list of device names which are going to be used
-                             in the following test.
+           @param devices: map of the device types and the quantities needed for
+                           the test.
+                           For example, {'BLE_KEYBOARD':1, 'BLE_MOUSE':1}.
            @param flags: list of string to describe who should run the
                          test. The string could be one of the following:
                          ['AVL', 'Quick Health', 'All'].
@@ -235,109 +272,99 @@
                                to run the tests.
            @param skip_chipsets: Raises TestNA on these chipset and doesn't
                                  attempt to run the tests.
-
+           @param skip_common_errors: If the test encounters a common error
+                                      (such as USB disconnect or daemon crash),
+                                      mark the test as TESTNA instead.
+                                      USE THIS SPARINGLY, it may mask bugs. This
+                                      is available for tests that require state
+                                      to be properly retained throughout the
+                                      whole test (i.e. advertising) and any
+                                      outside failure will cause the test to
+                                      fail.
+           @param supports_floss: Does this test support running on Floss?
+           @param use_all_peers: Set number of devices to be used to the
+                                 maximum available. This is used for tests
+                                 like bluetooth_PeerVerify which uses all
+                                 available peers. Specify only one device type
+                                 if this is set to true
         """
 
-        def decorator(test_method):
-            """A decorator wrapper of the decorated test_method.
-               @param test_method: the test method being decorated.
-               @returns the wrapper of the test method.
-            """
+        base_class = bluetooth_quick_tests_base.BluetoothQuickTestsBase
+        return base_class.quick_test_test_decorator(
+                test_name,
+                flags=flags,
+                check_runnable_func=lambda self: self.quick_test_test_runnable(
+                        supports_floss),
+                pretest_func=lambda self: self.quick_test_test_pretest(
+                        test_name, devices, use_all_peers),
+                posttest_func=lambda self: self.quick_test_test_posttest(),
+                model_testNA=model_testNA,
+                model_testWarn=model_testWarn,
+                skip_models=skip_models,
+                skip_chipsets=skip_chipsets,
+                skip_common_errors=skip_common_errors)
 
-            def _check_runnable(self):
-                """Check if the test could be run"""
+    def quick_test_test_runnable(self, supports_floss):
+        """Checks if the test could be run."""
 
-                # Check that the test is runnable in current setting
-                if not(self.flag in flags or 'All' in flags):
-                    logging.info('SKIPPING TEST %s', test_name)
-                    logging.info('flag %s not in %s', self.flag, flags)
-                    self._print_delimiter()
-                    return False
-                return True
+        # If the current test was to run with Floss, the test must
+        # support running with Floss.
+        if self.floss:
+            return supports_floss
 
+        return True
 
-            def _is_enough_peers_present(self):
-                """Check if enough peer devices are available."""
+    def quick_test_test_pretest(self,
+                                test_name=None,
+                                devices={},
+                                use_all_peers=False):
+        """Runs pretest checks and resets DUT's adapter and peer devices.
 
-                # Check that btpeer has all required devices before running
-                for device_type, number in devices.items():
-                    if self.available_devices.get(device_type, 0) < number:
-                        logging.info('SKIPPING TEST %s', test_name)
-                        logging.info('%s not available', device_type)
-                        self._print_delimiter()
-                        return False
-
-                # Check if there are enough peers
-                total_num_devices = sum(devices.values()) + shared_devices_count
-                if total_num_devices > len(self.host.btpeer_list):
-                    logging.info('SKIPPING TEST %s', test_name)
-                    logging.info(
-                            'Number of devices required %s is greater'
-                            'than number of peers available %d',
-                            total_num_devices, len(self.host.btpeer_list))
-                    self._print_delimiter()
-                    return False
-                return True
-
-            @functools.wraps(test_method)
-            def wrapper(self):
-                """A wrapper of the decorated method."""
-                # Set test name before exiting so batches correctly identify
-                # failing tests
-                self.test_name = test_name
-
-                if not _check_runnable(self):
-                    return
-
-                try:
-                    if not _is_enough_peers_present(self):
-                        logging.info('Not enough peer available')
-                        raise error.TestNAError('Not enough peer available')
-
-                    model = self.get_base_platform_name()
-                    if model in skip_models:
-                        logging.info('SKIPPING TEST %s', test_name)
-                        raise error.TestNAError(
-                                'Test not supported on this model')
-
-                    chipset = self.get_chipset_name()
-                    logging.debug('Bluetooth module name is %s', chipset)
-                    if chipset in skip_chipsets:
-                        logging.info('SKIPPING TEST %s on chipset %s',
-                                     test_name, chipset)
-                        raise error.TestNAError(
-                                'Test not supported on this chipset')
-
-                    self.quick_test_test_start(test_name, devices,
-                                               shared_devices_count)
-
-                    test_method(self)
-                except error.TestError as e:
-                    self.fails.append('[--- error {} ({})]'.format(
-                            test_method.__name__, str(e)))
-                except error.TestFail as e:
-                    if not bool(self.fails):
-                        self.fails.append('[--- failed {} ({})]'.format(
-                                test_method.__name__, str(e)))
-                except error.TestNAError as e:
-                    self.fails.append('[--- SKIPPED: {}]'.format(str(e)))
-                except Exception as e:
-                    self.fails.append('[--- unknown error {} ({})]'.format(
-                            test_method.__name__, str(e)))
-
-                self.quick_test_test_end(model_testNA=model_testNA,
-                                         model_testWarn=model_testWarn)
-            return wrapper
-
-        return decorator
-
-
-    def quick_test_test_start(
-            self, test_name=None, devices={}, shared_devices_count=0):
-        """Start a quick test. The method clears and restarts adapter on DUT
-           as well as peer devices. In addition the methods prints test start
-           traces.
+           @param test_name: the name of the test to log.
+           @param devices: map of the device types and the quantities needed for
+                           the test.
+                           For example, {'BLE_KEYBOARD':1, 'BLE_MOUSE':1}.
+           @param use_all_peers: Set number of devices to be used to the
+                                 maximum available. This is used for tests
+                                 like bluetooth_PeerVerify which uses all
+                                 available peers. Specify only one device type
+                                 if this is set to true
         """
+
+        def _is_enough_peers_present(self):
+            """Checks if enough peer devices are available."""
+
+            # Check that btpeer has all required devices before running
+            for device_type, number in devices.items():
+                if self.available_devices.get(device_type, 0) < number:
+                    logging.info('SKIPPING TEST %s', test_name)
+                    logging.info('%s not available', device_type)
+                    self._print_delimiter()
+                    return False
+
+            # Check if there are enough peers
+            total_num_devices = sum(devices.values())
+            if total_num_devices > len(self.host.btpeer_list):
+                logging.info('SKIPPING TEST %s', test_name)
+                logging.info(
+                        'Number of devices required %s is greater'
+                        'than number of peers available %d', total_num_devices,
+                        len(self.host.btpeer_list))
+                self._print_delimiter()
+                return False
+            return True
+
+        if use_all_peers:
+            if devices != {}:
+                devices[list(devices.keys())[0]] = len(self.host.btpeer_list)
+
+        if not _is_enough_peers_present(self):
+            logging.info('Not enough peer available')
+            raise error.TestNAError('Not enough peer available')
+
+        # Every test_method should pass by default.
+        self._expected_result = True
+
         # Bluetoothd could have crashed behind the scenes; check to see if
         # everything is still ok and recover if needed.
         self.test_is_facade_valid()
@@ -345,70 +372,29 @@
 
         # Reset the adapter
         self.test_reset_on_adapter()
+
+        # Reset the policy allowlist so that all UUIDs are allowed.
+        self.test_reset_allowlist()
+
+        # Reset power/wakeup to disabled.
+        self.test_adapter_set_wake_disabled()
+
         # Initialize bluetooth_adapter_tests class (also clears self.fails)
         self.initialize()
         # Start and peer HID devices
         self.start_peers(devices)
-        self.shared_peers = self.host.btpeer_list[-shared_devices_count:]
 
-        if test_name is not None:
-            time.sleep(self.TEST_SLEEP_SECS)
-            self._print_delimiter()
-            logging.info('Starting test: %s', test_name)
-            self.log_message('Starting test: %s'% test_name)
+        time.sleep(self.TEST_SLEEP_SECS)
+        self.log_message('Starting test: %s' % test_name)
 
-    def quick_test_test_end(self, model_testNA=[], model_testWarn=[]):
-        """Log and track the test results"""
-        result_msgs = []
-        model = self.get_base_platform_name()
+    def quick_test_test_posttest(self):
+        """Runs posttest cleanups."""
 
-        if self.test_iter is not None:
-            result_msgs += ['Test Iter: ' + str(self.test_iter)]
+        logging.info('Cleanning up and restarting towards next test...')
+        self.log_message(self.bat_tests_results[-1])
 
-        if self.bat_iter is not None:
-            result_msgs += ['Batch Iter: ' + str(self.bat_iter)]
-
-        if self.pkg_is_running is True:
-            result_msgs += ['Package iter: ' + str(self.pkg_iter)]
-
-        if self.bat_name is not None:
-            result_msgs += ['Batch Name: ' + self.bat_name]
-
-        if self.test_name is not None:
-            result_msgs += ['Test Name: ' + self.test_name]
-
-        result_msg = ", ".join(result_msgs)
-
-        if not bool(self.fails):
-            result_msg = 'PASSED | ' + result_msg
-            self.bat_pass_count += 1
-            self.pkg_pass_count += 1
-        # The test should be marked as TESTNA if any of the test expressions
-        # were SKIPPED (they threw their own TESTNA error) or the model is in
-        # the list of NA models (so any failure is considered NA instead)
-        elif model in model_testNA or any(['SKIPPED' in x
-                                           for x in self.fails]):
-            result_msg = 'TESTNA | ' + result_msg
-            self.bat_testna_count += 1
-            self.pkg_testna_count += 1
-        elif model in model_testWarn:
-            result_msg = 'WARN   | ' + result_msg
-            self.bat_warn_count += 1
-            self.pkg_warn_count += 1
-        else:
-            result_msg = 'FAIL   | ' + result_msg
-            self.bat_fail_count += 1
-            self.pkg_fail_count += 1
-
-        logging.info(result_msg)
-        self.log_message(result_msg)
-        self._print_delimiter()
-        self.bat_tests_results.append(result_msg)
-        self.pkg_tests_results.append(result_msg)
-
-        if self.test_name is not None:
-            logging.info('Cleanning up and restarting towards next test...')
-
+        # Every test_method should pass by default.
+        self._expected_result = True
 
         # Bluetoothd could have crashed behind the scenes; check if everything
         # is ok and recover if needed. This is done as part of clean-up as well
@@ -417,6 +403,19 @@
 
         self.bluetooth_facade.stop_discovery()
 
+        # Catch possible exceptions in test_reset_allowlist().
+        # Refer to b/184947150 for more context.
+        try:
+            # Reset the policy allowlist so that all UUIDs are allowed.
+            self.test_reset_allowlist()
+        except:
+            msg = ('Failed to reset the policy allowlist.\n'
+                   '### Note: reset the allowlist manually if needed. ###\n\n'
+                   'dbus-send --system --print-reply --dest=org.bluez '
+                   '/org/bluez/hci0 org.bluez.AdminPolicy1.SetServiceAllowList '
+                   'array:string:"" \n')
+            logging.error(msg)
+
         # Store a copy of active devices for raspi reset in the final step
         self.active_test_devices = self.devices
 
@@ -457,149 +456,7 @@
             self.group_btpeers_type()
 
         # Close the connection between peers
-        self.cleanup(test_state='NEW')
-
-    @staticmethod
-    def quick_test_batch_decorator(batch_name):
-        """A decorator providing a wrapper to a batch.
-           Using the decorator a test batch method can implement only its
-           core tests invocations and let the decorator handle the wrapper,
-           which is taking care for whether to run a specific test or the
-           batch as a whole and and running the batch in iterations
-
-           @param batch_name: the name of the batch to log
-        """
-
-        def decorator(batch_method):
-            """A decorator wrapper of the decorated test_method.
-               @param test_method: the test method being decorated.
-               @returns the wrapper of the test method.
-            """
-
-            @functools.wraps(batch_method)
-            def wrapper(self, num_iterations=1, test_name=None):
-                """A wrapper of the decorated method.
-                  @param num_iterations: how many interations to run
-                  @param test_name: specifc test to run otherwise None to run
-                                    the whole batch
-                """
-                if test_name is not None:
-                    single_test_method = getattr(self,  test_name)
-                    for iter in range(1,num_iterations+1):
-                        self.test_iter = iter
-                        single_test_method()
-
-                    if self.fails:
-                        # If failure is marked as TESTNA, prioritize that over
-                        # a failure. Same with WARN.
-                        if self.bat_testna_count > 0:
-                            raise error.TestNAError(self.fails)
-                        elif self.bat_warn_count > 0:
-                            raise error.TestWarn(self.fails)
-                        else:
-                            raise error.TestFail(self.fails)
-                else:
-                    for iter in range(1,num_iterations+1):
-                        self.quick_test_batch_start(batch_name, iter)
-                        batch_method(self, num_iterations, test_name)
-                        self.quick_test_batch_end()
-            return wrapper
-
-        return decorator
-
-
-    def quick_test_batch_start(self, bat_name, iteration=1):
-        """Start a test batch. The method clears and set batch variables"""
-        self.bat_tests_results = []
-        self.bat_pass_count = 0
-        self.bat_fail_count = 0
-        self.bat_testna_count = 0
-        self.bat_warn_count = 0
-        self.bat_name = bat_name
-        self.bat_iter = iteration
-
-
-    def quick_test_batch_end(self):
-        """Print results summary of a test batch """
-        logging.info(
-                '%s Test Batch Summary: total pass %d, total fail %d, '
-                'warn %d, NA %d', self.bat_name, self.bat_pass_count,
-                self.bat_fail_count, self.bat_warn_count,
-                self.bat_testna_count)
-        for result in self.bat_tests_results:
-            logging.info(result)
-        self._print_delimiter();
-        if self.bat_fail_count > 0:
-            logging.error('===> Test Batch Failed! More than one failure')
-            self._print_delimiter();
-            if self.pkg_is_running is False:
-                raise error.TestFail(self.bat_tests_results)
-        elif self.bat_testna_count > 0:
-            logging.error('===> Test Batch Passed! Some TestNA results')
-            self._print_delimiter();
-            if self.pkg_is_running is False:
-                raise error.TestNAError(self.bat_tests_results)
-        elif self.bat_warn_count > 0:
-            logging.error('===> Test Batch Passed! Some WARN results')
-            self._print_delimiter();
-            if self.pkg_is_running is False:
-                raise error.TestWarn(self.bat_tests_results)
-        else:
-            logging.info('===> Test Batch Passed! zero failures')
-            self._print_delimiter();
-
-
-    def quick_test_package_start(self, pkg_name):
-        """Start a test package. The method clears and set batch variables"""
-        self.pkg_tests_results = []
-        self.pkg_pass_count = 0
-        self.pkg_fail_count = 0
-        self.pkg_name = pkg_name
-        self.pkg_is_running = True
-
-
-    def quick_test_print_summary(self):
-        """Print results summary of a test package"""
-        logging.info(
-                '%s Test Package Summary: total pass %d, total fail %d, '
-                'Warn %d, NA %d', self.pkg_name, self.pkg_pass_count,
-                self.pkg_fail_count, self.pkg_warn_count,
-                self.pkg_testna_count)
-        for result in self.pkg_tests_results:
-            logging.info(result)
-        self._print_delimiter();
-
-
-    def quick_test_package_update_iteration(self, iteration):
-        """Update state and print log per package iteration.
-           Must be called to have a proper package test result tracking.
-        """
-        self.pkg_iter = iteration
-        if self.pkg_name is None:
-            logging.error('Error: no quick package is running')
-            raise error.TestFail('Error: no quick package is running')
-        logging.info('Starting %s Test Package iteration %d',
-                     self.pkg_name, iteration)
-
-
-    def quick_test_package_end(self):
-        """Print final result of a test package"""
-        if self.pkg_fail_count > 0:
-            logging.error('===> Test Package Failed! More than one failure')
-            self._print_delimiter();
-            raise error.TestFail(self.bat_tests_results)
-        elif self.pkg_testna_count > 0:
-            logging.error('===> Test Package Passed! Some TestNA results')
-            self._print_delimiter();
-            raise error.TestNAError(self.bat_tests_results)
-        elif self.pkg_warn_count > 0:
-            logging.error('===> Test Package Passed! Some WARN results')
-            self._print_delimiter();
-            raise error.TestWarn(self.bat_tests_results)
-        else:
-            logging.info('===> Test Package Passed! zero failures')
-            self._print_delimiter();
-        self.pkg_is_running = False
+        self.cleanup_bt_test(test_state='NEW')
 
 
     def quick_test_cleanup(self):
@@ -702,3 +559,127 @@
             if in_lab:
                 logging.info('Uploading result')
                 utils.run(cmd)
+
+
+    # ---------------------------------------------------------------
+    # Wake from suspend tests
+    # ---------------------------------------------------------------
+
+    def run_peer_wakeup_device(self,
+                               device_type,
+                               device,
+                               device_test=None,
+                               iterations=1,
+                               should_wake=True,
+                               should_pair=True,
+                               keep_paired=False):
+        """ Uses paired peer device to wake the device from suspend.
+
+        @param device_type: the device type (used to determine if it's LE)
+        @param device: the meta device with the paired device
+        @param device_test: What to test to run after waking and connecting the
+                            adapter/host
+        @param iterations: Number of suspend + peer wake loops to run
+        @param should_wake: Whether wakeup should occur on this test. With HID
+                            peers, this should be True. With non-HID peers, this
+                            should be false.
+        @param should_pair: Pair and connect the device first before running
+                            the wakeup test.
+        @param keep_paired: Keep the paried devices after test.
+        """
+        boot_id = self.host.get_boot_id()
+
+        if should_wake:
+            sleep_time = EXPECT_PEER_WAKE_SUSPEND_SEC
+            resume_time = EXPECT_PEER_WAKE_RESUME_BY
+            resume_slack = 5  # Allow 5s slack for resume timeout
+            measure_resume = True
+        else:
+            sleep_time = EXPECT_NO_WAKE_SUSPEND_SEC
+            resume_time = EXPECT_NO_WAKE_SUSPEND_SEC
+            # Negative resume slack lets us wake a bit earlier than expected
+            # If suspend takes a while to enter, this may be necessary to get
+            # the timings right.
+            resume_slack = -5
+            measure_resume = False
+
+        try:
+            if should_pair:
+                # Clear wake before testing
+                self.test_adapter_set_wake_disabled()
+                self.assert_discover_and_pair(device)
+                self.assert_on_fail(
+                        self.test_device_set_discoverable(device, False))
+
+                # Confirm connection completed
+                self.assert_on_fail(
+                        self.test_device_is_connected(device.address))
+
+            # Profile connection may not have completed yet and this will
+            # race with a subsequent disconnection (due to suspend). Use the
+            # device test to force profile connect or wait if no test was
+            # given.
+            if device_test is not None:
+                self.assert_on_fail(device_test(device))
+            else:
+                time.sleep(PROFILE_CONNECT_WAIT)
+
+            for it in range(iterations):
+                logging.info(
+                        'Running iteration {}/{} of suspend peer wake'.format(
+                                it + 1, iterations))
+
+                # Start a new suspend instance
+                suspend = self.suspend_async(suspend_time=sleep_time,
+                                             expect_bt_wake=should_wake)
+                start_time = self.bluetooth_facade.get_device_utc_time()
+
+                if should_wake:
+                    self.test_device_wake_allowed(device.address)
+                    # Also wait until powerd marks adapter as wake enabled
+                    self.test_adapter_wake_enabled()
+                else:
+                    self.test_device_wake_not_allowed(device.address)
+
+                # Trigger suspend, asynchronously wake and wait for resume
+                adapter_address = self.bluetooth_facade.address
+                self.test_suspend_and_wait_for_sleep(suspend,
+                                                     sleep_timeout=SUSPEND_SEC)
+
+                # Trigger peer wakeup
+                peer_wake = self.device_connect_async(device_type,
+                                                      device,
+                                                      adapter_address,
+                                                      delay_wake=5,
+                                                      should_wake=should_wake)
+                peer_wake.start()
+
+                # Expect a quick resume. If a timeout occurs, test fails. Since
+                # we delay sending the wake signal, we should accommodate that
+                # in our expected timeout.
+                self.test_wait_for_resume(boot_id,
+                                          suspend,
+                                          resume_timeout=resume_time,
+                                          test_start_time=start_time,
+                                          resume_slack=resume_slack,
+                                          fail_on_timeout=should_wake,
+                                          fail_early_wake=not should_wake,
+                                          collect_resume_time=measure_resume)
+
+                # Finish peer wake process
+                peer_wake.join()
+
+                # Only check peer device connection state if we expected to wake
+                # from it. Otherwise, we may or may not be connected based on
+                # the specific profile's reconnection policy.
+                if should_wake:
+                    # Make sure we're actually connected
+                    self.test_device_is_connected(device.address)
+
+                    # Verify the profile is working
+                    if device_test is not None:
+                        device_test(device)
+
+        finally:
+            if should_pair and not keep_paired:
+                self.test_remove_pairing(device.address)
diff --git a/server/cros/bluetooth/bluetooth_adapter_tests.py b/server/cros/bluetooth/bluetooth_adapter_tests.py
index a8bd830..3bb0787 100644
--- a/server/cros/bluetooth/bluetooth_adapter_tests.py
+++ b/server/cros/bluetooth/bluetooth_adapter_tests.py
@@ -28,7 +28,6 @@
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib.cros.bluetooth import bluetooth_socket
 from autotest_lib.client.cros.chameleon import chameleon
-from autotest_lib.server.cros.bluetooth import bluetooth_peer_update
 from autotest_lib.server.cros.bluetooth import bluetooth_test_utils
 from autotest_lib.server import test
 
@@ -47,28 +46,9 @@
 
 Event = recorder.Event
 
-CHIPSET_TO_VIDPID = { 'BRCM-4354':[('0x002d','0x4354')],
-                      'MVL-8897':[('0x02df','0x912d')],
-                      'MVL-8997':[('0x1b4b','0x2b42')],
-                      'QCA-9462': [('0x168c', '0x0034')],
-                      'QCA-6174A-5':[('0x168c','0x003e')],
-                      'QCA-6174A-3':[('0x271','0x050a')],   # UART
-                      'Intel-AX200':[('0x8086', '0x2723')], # CcP2
-                      'Intel-AX201':[('0x8086','0x02f0')],  # HrP2
-                      'Intel-AC9260':[('0x8086','0x2526')], # ThP2
-                      'Intel-AC9560':[('0x8086','0x31dc'),  # JfP2
-                                      ('0x8086','0x9df0')],
-                      'Intel-AC7260':[('0x8086','0x08b1'),  # WP2
-                                      ('0x8086','0x08b2')],
-                      'Intel-AC7265':[('0x8086','0x095a'),  # StP2
-                                      ('0x8086','0x095b')],
-                      'Realtek-RTL8822C-USB':[('0x10ec','0xc822')] }
-
 # We have a number of chipsets that are no longer supported. Known issues
 # related to firmware will be ignored on these devices (b/169328792).
-UNSUPPORTED_CHIPSETS = [
-        'BRCM-4354', 'MVL-8897', 'MVL-8997', 'Intel-AC7260', 'Intel-AC7265'
-]
+UNSUPPORTED_CHIPSETS = ['MVL-8897', 'MVL-8997', 'Intel-AC7260', 'Intel-AC7265']
 
 # Location of data traces relative to this (bluetooth_adapter_tests.py) file
 BT_ADAPTER_TEST_PATH = os.path.dirname(__file__)
@@ -78,27 +58,32 @@
 
 # Delay binding the methods since host is only available at run time.
 SUPPORTED_DEVICE_TYPES = {
-    'MOUSE': lambda btpeer: btpeer.get_bluetooth_hid_mouse,
-    'KEYBOARD': lambda btpeer: btpeer.get_bluetooth_hid_keyboard,
-    'BLE_MOUSE': lambda btpeer: btpeer.get_ble_mouse,
-    'BLE_KEYBOARD': lambda btpeer: btpeer.get_ble_keyboard,
-    # Tester allows us to test DUT's discoverability, etc. from a peer
-    'BLUETOOTH_TESTER': lambda btpeer: btpeer.get_bluetooth_tester,
-    # This is a base object that does not emulate any Bluetooth device.
-    # This object is preferred when only a pure XMLRPC server is needed
-    # on the btpeer host, e.g., to perform servod methods.
-    'BLUETOOTH_BASE': lambda btpeer: btpeer.get_bluetooth_base,
-    # on the chameleon host, e.g., to perform servod methods.
-    'BLUETOOTH_BASE': lambda chameleon: chameleon.get_bluetooth_base,
-    # A phone device that supports Bluetooth
-    'BLE_PHONE': lambda chameleon: chameleon.get_ble_phone,
-    # A Bluetooth audio device emulating a headphone
-    'BLUETOOTH_AUDIO': lambda chameleon: chameleon.get_bluetooth_audio,
+        'MOUSE': lambda btpeer: btpeer.get_bluetooth_hid_mouse,
+        'KEYBOARD': lambda btpeer: btpeer.get_bluetooth_hid_keyboard,
+        'BLE_MOUSE': lambda btpeer: btpeer.get_ble_mouse,
+        'BLE_KEYBOARD': lambda btpeer: btpeer.get_ble_keyboard,
+        # Tester allows us to test DUT's discoverability, etc. from a peer
+        'BLUETOOTH_TESTER': lambda btpeer: btpeer.get_bluetooth_tester,
+        # This is a base object that does not emulate any Bluetooth device.
+        # This object is preferred when only a pure XMLRPC server is needed
+        # on the btpeer host, e.g., to perform servod methods.
+        'BLUETOOTH_BASE': lambda btpeer: btpeer.get_bluetooth_base,
+        # on the chameleon host, e.g., to perform servod methods.
+        'BLUETOOTH_BASE': lambda chameleon: chameleon.get_bluetooth_base,
+        # A phone device that supports Bluetooth
+        'BLE_PHONE': lambda chameleon: chameleon.get_ble_phone,
+        # A Bluetooth audio device emulating a headphone
+        'BLUETOOTH_AUDIO': lambda chameleon: chameleon.get_bluetooth_audio,
+        # A Bluetooth device that implements the Fast Pair protocol.
+        'BLE_FAST_PAIR': lambda chameleon: chameleon.get_ble_fast_pair,
 }
 
 COMMON_FAILURES = {
         'Freeing adapter /org/bluez/hci': 'adapter_freed',
         '/var/spool/crash/bluetoothd': 'bluetoothd_crashed',
+        'btintel_hw_error': 'intel hardware error detected',
+        'qca_hw_error': 'qca hardware error detected',
+        'cmd_cnt 0 cmd queued ([5-9]|[1-9][0-9]+)': 'controller cmd capacity',
 }
 
 # TODO(b/150898182) - Don't run some tests on tablet form factors
@@ -106,16 +91,11 @@
 # the ones that were not launched
 TABLET_MODELS = ['kakadu', 'kodama', 'krane', 'dru', 'druwl', 'dumo']
 
-# TODO(b/161005264) - Some tests rely on software rotation to pass, so we must
-# know which models don't use software rotation. Use a static list until we can
-# query the bluez API instead. Extended advertising is supported on platforms
-# on 4.19 and 5.4, with HrP2, JfP2, CcP2, RTL8822C, or QCN3991 chipsets.
-EXT_ADV_MODELS = ['ezkinil', 'trembyle', 'drawcia', 'drawlat', 'drawman',
-                  'maglia', 'magolor', 'sarien', 'arcada', 'akemi',
-                  'drallion', 'drallion360', 'hatch', 'stryke', 'helios',
-                  'dragonair', 'dratini', 'duffy', 'jinlon', 'kaisa',
-                  'kindred', 'kled', 'puff', 'kohaku', 'nightfury', 'morphius',
-                  'lazor', 'trogdor']
+# Some platforms do not have built-in I/O hardware, and so they are configured
+# to automatically reconnect to paired HID devices on boot. We note these
+# platform types here as there will be different behavior expectations around
+# reboot.
+RECONNECT_PLATFORM_TYPES = ['CHROMEBOX', 'CHROMEBIT', 'CHROMEBASE']
 
 # TODO(b/158336394) Realtek: Powers down during suspend due to high power usage
 #                            during S3.
@@ -126,6 +106,37 @@
 #                            to check for suspend stability.
 SUSPEND_POWER_DOWN_CHIPSETS = ['Realtek-RTL8822C-USB', 'MVL-8897', 'MVL-8997']
 
+# All realtek chipsets on USB will drop its firmware and reload on
+# suspend-resume unless it is connected to a peer device. This doesn't
+# include RTL8822, which would reset regardless of the peer.
+SUSPEND_RESET_IF_NO_PEER_CHIPSETS = ['Realtek-RTL8852A-USB']
+
+# Models to skip since they power down on suspend.
+SUSPEND_POWER_DOWN_MODELS = ['dru', 'druwl', 'dumo']
+
+# Chipsets which do not support Bluetooth Hardware Filtering.
+UNSUPPORTED_BT_HW_FILTERING_CHIPSETS = [
+        'MVL-8897', 'MVL-8997', 'QCA-6174A-5-USB', 'QCA-6174A-3-UART',
+        'QCA-WCN6856', 'Intel-AC7260', 'Intel-AC7265', 'Realtek-RTL8822C-USB',
+        'Realtek-RTL8822C-UART', 'Realtek-RTL8852A-USB',
+        'Mediatek-MTK7921-USB', 'Mediatek-MTK7921-SDIO'
+]
+
+KERNEL_LOG_LEVEL = {
+        'EMERG': 0,
+        'ALERT': 1,
+        'CRIT': 2,
+        'ERR': 3,
+        'WARNING': 4,
+        'NOTICE': 5,
+        'INFO': 6,
+        'DEBUG': 7
+}
+
+# The benchmark criterion to determine whether HID device reconnection is fast
+HID_RECONNECT_TIME_MAX_SEC = 3
+LE_HID_RECONNECT_TIME_MAX_SEC = 3
+
 
 def method_name():
     """Get the method name of a class.
@@ -403,14 +414,18 @@
     it is added to the test results, to make it easier to identify common root
     causes from Stainless
     """
+    had_failure = False
 
     for fail_tag, fail_log in COMMON_FAILURES.items():
         if instance.bluetooth_facade.messages_find(fail_tag):
+            had_failure = True
             logging.error('Detected failure tag: %s', fail_tag)
             # We mark this instance's results with the discovered failure
             if type(instance.results) is dict:
                 instance.results[fail_log] = True
 
+    return had_failure
+
 
 def fix_serial_device(btpeer, device, operation='reset'):
     """Fix the serial device.
@@ -558,6 +573,8 @@
             syslog_captured = False
 
             try:
+                logging.info('[>>> running: {}]'.format(test_method.__name__))
+                start_time = time.time()
                 if messages_start:
                     # Grab /var/log/messages output during test run
                     instance.bluetooth_facade.messages_start()
@@ -571,34 +588,46 @@
                 if messages_stop:
                     syslog_captured = instance.bluetooth_facade.messages_stop()
 
-                if test_result:
-                    logging.info('[*** passed: {}]'.format(
-                            test_method.__name__))
+                if syslog_captured:
+                    had_failure = _flag_common_failures(instance)
+                    instance.had_known_common_failure = any(
+                            [instance.had_known_common_failure, had_failure])
+
+                logging.debug('instance._expected_result : %s',
+                              instance._expected_result)
+                elapsed_time = 'elapsed_time: {:.3f}s'.format(time.time() -
+                                                              start_time)
+                if instance._expected_result:
+                    if test_result:
+                        logging.info('[*** passed: {}] {}'.format(
+                                test_method.__name__, elapsed_time))
+                    else:
+                        fail_msg = '[--- failed: {} ({})]'.format(
+                                test_method.__name__, str(instance.results))
+                        logging.error('{} {}'.format(fail_msg, elapsed_time))
+                        instance.fails.append(fail_msg)
                 else:
-                    if syslog_captured:
-                        _flag_common_failures(instance)
-                    fail_msg = '[--- failed: {} ({})]'.format(
-                            test_method.__name__, str(instance.results))
-                    logging.error(fail_msg)
-                    instance.fails.append(fail_msg)
-            # Log TestError and TestNA and let the quicktest wrapper catch it.
-            # Those errors should skip out of the testcase entirely.
-            except error.TestNAError as e:
-                fail_msg = '[--- TESTNA {} ({})]'.format(
-                        test_method.__name__, str(e))
-                logging.error(fail_msg)
+                    if test_result:
+                        # The test is expected to fail; but it passed.
+                        reason = 'expected fail, actually passed'
+                        fail_msg = '[--- failed: {} ({})]'.format(
+                                test_method.__name__, reason)
+                        logging.error('{} {}'.format(fail_msg, elapsed_time))
+                        instance.fails.append(fail_msg)
+                    else:
+                        # The test is expected to fail; and it did fail.
+                        reason = 'expected fail, actually failed'
+                        logging.info('[*** passed: {} ({})] {}'.format(
+                                test_method.__name__, reason, elapsed_time))
+
+            # Reset _expected_result and let the quicktest wrapper catch it.
+            # These errors should skip out of the testcase entirely.
+            except (error.TestNAError, error.TestError, error.TestFail):
+                instance._expected_result = True
                 raise
-            except error.TestError as e:
-                fail_msg = '[--- ERROR {} ({})]'.format(
-                        test_method.__name__, str(e))
-                logging.error(fail_msg)
-                raise
-            except error.TestFail as e:
-                fail_msg = '[--- failed {} ({})]'.format(
-                        test_method.__name__, str(e))
-                logging.error(fail_msg)
-                instance.fails.append(fail_msg)
-                should_raise = True
+
+            # Next test_method should pass by default.
+            instance._expected_result = True
 
             # Check whether we should fail fast
             if fail_msg and should_raise:
@@ -690,8 +719,8 @@
     CLASS_OF_DEVICE_MASK = 0x001FFF
 
     # Constants about advertising.
-    DAFAULT_MIN_ADVERTISEMENT_INTERVAL_MS = 181.25
-    DAFAULT_MAX_ADVERTISEMENT_INTERVAL_MS = 181.25
+    DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS = 200
+    DEFAULT_MAX_ADVERTISEMENT_INTERVAL_MS = 200
     ADVERTISING_INTERVAL_UNIT = 0.625
 
     # Error messages about advertising dbus methods.
@@ -700,7 +729,7 @@
     ERROR_INVALID_ADVERTISING_INTERVALS = (
             'org.bluez.Error.InvalidArguments: Invalid arguments')
 
-    # Supported profiles by chrome os.
+    # Supported profiles by ChromeOS.
     SUPPORTED_UUIDS = {
             'GATT_UUID': '00001801-0000-1000-8000-00805f9b34fb',
             'A2DP_SOURCE_UUID': '0000110a-0000-1000-8000-00805f9b34fb',
@@ -720,9 +749,24 @@
     # Path for usbmon logs
     USBMON_DIR_LOG_PATH = '/var/log/usbmon'
 
+    # Parameters for usbmon log rotation
+    USBMON_SINGLE_FILE_MAX_SIZE = '10M'  # 10M bytes
+    USBMON_NUM_OF_ROTATE_FILE = 2
+
     # The agent capability of various device types.
+    # Currently all non-Fast Pair are set to NoInputNoOutput since currently
+    # we don't have a way to report the displayed passkey to the device in case
+    # of Passkey Entry. Therefore, use 'Just Works'.
+    # Fast Pair uses DisplayYesNo because this is expected by that protocol.
+    # TODO(b/181945748) update the capabilities when Passkey Entry is supported.
     AGENT_CAPABILITY = {
+            'BLE_MOUSE': 'NoInputNoOutput',
+            'BLE_KEYBOARD': 'NoInputNoOutput',
+            'BLE_PHONE': 'NoInputNoOutput',
             'BLUETOOTH_AUDIO': 'NoInputNoOutput',
+            'FAST_PAIR': 'DisplayYesNo',
+            'KEYBOARD': 'NoInputNoOutput',
+            'MOUSE': 'NoInputNoOutput',
     }
 
     def assert_on_fail(self, result, raiseNA=False):
@@ -749,6 +793,76 @@
                 raise error.TestFail(failure_msg)
 
 
+    def expect_fail(self, test_method, *args, **kwargs):
+        """Run the test_method which is expected to fail.
+
+        Here a test means one that comes with the @test_retry_and_log
+        decorator.
+
+        In most cases, a test is expected to pass by default. However, in
+        some cases, we may expect a test to fail. As an example, a test is
+        expected to fail if the behavior is disallowed by the policy. In
+        this case, the failure is considered a pass. The example statements
+        look like
+
+            # Set an arbitrary UUID that disallows the HID device.
+            self.test_check_set_allowlist('0xabcd', True)
+
+            # Since the HID device is disallowed above,
+            # the self.test_keyboard_input_from_trace test itself would
+            # fail which is expected.
+            self.expect_fail(self.test_keyboard_input_from_trace,
+                             device, "simple_text")
+
+        In the log, the message would show that the keyboard input failed as
+
+            test_keyboard_input_from_trace: InputEventRecorderError: Failed to
+            find the device node of KEYBD_REF.
+
+        As a result, the log message shows that the test conceptually passed.
+
+            [*** passed: test_keyboard_input_from_trace (expected fail,
+                                                         actually failed)]
+
+        @param test_method: the test method to run.
+
+        @returns: True if the test method failed; False otherwise.
+        """
+        self._expected_result = False
+        logging.debug('self._expected_result %s', self._expected_result)
+        return test_method(*args, **kwargs)
+
+
+    def expect_test(self, expected_result, test_method, *args, **kwargs):
+        """Run the test method and expect the test result as expected_result.
+
+        This little helper is used to make simple the following statements
+
+            if expected_result:
+                self.test_xxx(device)
+            else:
+                self.expect_fail(self.test_xxx, device)
+
+        which can be converted to
+
+            self.expect_test(expected_result, self.test_xxx, device)
+
+        @param expected_result: True if the test is expected to pass;
+                False otherwise.
+        @param test_method: the test method to run.
+
+        @returns: True if the test result matches expected_result;
+                False otherwise.
+        """
+        if expected_result:
+            # If the test is expected to pass, just run it normally.
+            return test_method(*args, **kwargs)
+        else:
+            # If the test is expected to fail, run it throuigh self.expect_fail
+            # to handle the failure.
+            return self.expect_fail(test_method, *args, **kwargs)
+
+
     # TODO(b/131170539) remove when sarien/arcada no longer have _signed
     # postfix
     def get_base_platform_name(self):
@@ -765,6 +879,16 @@
 
         return platform.replace('_signed', '').replace('_unsigned', '')
 
+    def platform_will_reconnect_on_boot(self):
+        """Indicates if we should expect DUT to automatically reconnect on boot
+
+        Some platforms do not have built-in I/O (i.e. ChromeBox) and will
+        automatically reconnect to paired HID devices on boot.
+
+        @returns: True if platform will reconnect on boot, else False
+        """
+
+        return self.host.get_board_type() in RECONNECT_PLATFORM_TYPES
 
     def group_btpeers_type(self):
         """Group all Bluetooth peers by the type of their detected device."""
@@ -789,7 +913,7 @@
                     device = gen_device_func(btpeer)()
                     if device.CheckSerialConnection():
                         self.btpeer_group[device_type].append(btpeer)
-                        logging.info('%d-th btpeer find device %s', \
+                        logging.debug('%d-th btpeer find device %s', \
                                      idx, device_type)
                         # Create copy of btpeer_group
                         self.btpeer_group_copy[device_type].append(btpeer)
@@ -839,14 +963,14 @@
             raise error.TestError('Peer is not available after waiting')
 
 
-    def clear_raspi_device(self, device):
+    def clear_raspi_device(self, device, next_device_type=None):
         """Clears a device on a raspi peer by resetting bluetooth stack
 
         @param device: proxy object of peripheral device
         """
 
         try:
-            device.ResetStack()
+            device.ResetStack(next_device_type)
 
         except socket.error as e:
             # Ignore conn reset, expected during stack reset
@@ -855,8 +979,10 @@
 
         except chameleon.ChameleonConnectionError as e:
             # Ignore chameleon conn reset, expected during stack reset
-            if str(errno.ECONNRESET) not in str(e):
+            if (str(errno.ECONNRESET) not in str(e) and
+                    'ResetStack' not in str(e)):
                 raise
+            logging.info('Ignored exception due to ResetStack: %s', str(e))
 
         except six.moves.http_client.BadStatusLine as e:
             # BadStatusLine occurs occasionally when chameleon
@@ -916,7 +1042,7 @@
 
             for btpeer in self.btpeer_group[device_type][:number]:
                 logging.info("getting emulated %s", device_type)
-                device = self.reset_device(btpeer, device_type, on_start)
+                device = self.reset_btpeer(btpeer, device_type, on_start)
 
                 self.devices[device_type].append(device)
 
@@ -928,6 +1054,24 @@
 
         return True
 
+    def get_peer_device_type(self, device):
+        """Determine the type of peer a device is emulating
+
+        Sometimes it is useful to be able to flexibly determine what type of
+        peripheral a device is emulating. This helper function does a reverse
+        look-up to determine what type it was registered as.
+
+        @param device: the emulated peer device
+
+        @returns: the emulated device type if found, e.g. 'MOUSE' or
+            'BLE_KEYBOARD', else None
+        """
+
+        for device_type, device_list in self.devices.items():
+            if device in device_list:
+                return device_type
+
+        return None
 
     def get_device(self, device_type, on_start=True):
         """Get the bluetooth device object.
@@ -942,26 +1086,24 @@
         """
 
         self.devices[device_type].append(
-                self.reset_device(self.host.btpeer, device_type, on_start))
+                self.reset_btpeer(self.host.btpeer, device_type, on_start))
 
         return self.devices[device_type][-1]
 
 
-    def reset_device(self, peer, device_type, clear_device=True):
-        """Reset the peer device in order to be used as a different type.
+    def reset_emulated_device(self, device, device_type, clear_device=True):
+        """Reset the emulated device in order to be used as a different type.
 
-        @param peer: the peer device to reset with new device type
+        @param device: the emulated peer device to reset with new device type
         @param device_type : the new bluetooth device type, e.g., 'MOUSE'
         @param clear_device: whether to clear the device state
 
         @returns: the bluetooth device object
 
         """
-        device = get_bluetooth_emulated_device(peer, device_type)
-
         # Re-fresh device to clean state if test is starting
         if clear_device:
-            self.clear_raspi_device(device)
+            self.clear_raspi_device(device, next_device_type=device_type)
 
         try:
             # Tell generic chameleon to bind to this device type
@@ -978,6 +1120,19 @@
 
         return device
 
+    def reset_btpeer(self, peer, device_type, clear_device=True):
+        """Reset the btpeer device in order to be used as a different type.
+
+        @param peer: the btpeer device to reset with new device type
+        @param device_type : the new bluetooth device type, e.g., 'MOUSE'
+        @param clear_device: whether to clear the device state
+
+        @returns: the bluetooth device object
+
+        """
+        device = get_bluetooth_emulated_device(peer, device_type)
+
+        return self.reset_emulated_device(device, device_type, clear_device)
 
     def is_device_available(self, btpeer, device_type):
         """Determines if the named device is available on the linked peer
@@ -1026,7 +1181,7 @@
         """
         boot_id = self.host.get_boot_id()
         suspend = self.suspend_async(suspend_time=suspend_time)
-        start_time = self.bluetooth_facade.get_device_time()
+        start_time = self.bluetooth_facade.get_device_utc_time()
 
         # Give the system some time to enter suspend
         self.test_suspend_and_wait_for_sleep(
@@ -1054,8 +1209,12 @@
         if hasattr(self, 'input_facade'):
             del self.input_facade
         self.factory = remote_facade_factory.RemoteFacadeFactory(
-                self.host, disable_arc=True, no_chrome=not self.start_browser)
-        self.bluetooth_facade = self.factory.create_bluetooth_facade()
+                self.host,
+                disable_arc=True,
+                no_chrome=not self.start_browser,
+                force_python3=True)
+        self.bluetooth_facade = self.factory.create_bluetooth_facade(
+                self.floss)
         self.input_facade = self.factory.create_input_facade()
 
         # Re-enable debugging verbose since Chrome will set it to
@@ -1069,7 +1228,7 @@
         self.enable_disable_ui(enable=False)
 
         self.start_new_btmon()
-        self.start_new_usbmon()
+        self.start_new_usbmon(reboot=True)
 
 
     def _wait_till_condition_holds(self, func, method_name,
@@ -1343,7 +1502,7 @@
 
         @returns: True if cras was restart successfully, else False
         """
-        return self.restart_services(['cras', ])
+        return self.bluetooth_facade.restart_cras()
 
 
     def enable_disable_debug_log(self, enable):
@@ -1352,8 +1511,14 @@
                        False to disable all of the debug log.
         """
         level = int(enable)
-        self.bluetooth_facade.set_debug_log_levels(level, level, level, level)
+        self.bluetooth_facade.set_debug_log_levels(level, level)
 
+    def enable_disable_quality_debug_log(self, enable):
+        """Enable or disable bluez quality debug log in the DUT
+        @param enable: True to enable all of the debug log,
+                       False to disable all of the debug log.
+        """
+        self.bluetooth_facade.set_quality_debug_log(bool(enable))
 
     def start_new_btmon(self):
         """ Start a new btmon process and save the log """
@@ -1367,23 +1532,36 @@
         # Time format. Ex, 2020_02_20_17_52_45
         now = time.strftime("%Y_%m_%d_%H_%M_%S")
         file_name = 'btsnoop_%s' % now
-        self.host.run_background('btmon -SAw %s/%s' % (self.BTMON_DIR_LOG_PATH,
-                                                       file_name))
 
-    def start_new_usbmon(self):
-        """ Start a new USBMON process and save the log """
+        path = os.path.join(self.BTMON_DIR_LOG_PATH, file_name)
+        self.host.run_background('btmon -SAw %s' % path)
+        return path
+
+    def start_new_usbmon(self, reboot=False):
+        """ Start a new USBMON process and save the log
+
+        @param reboot: True to indicate we are starting new usbmon on reboot
+                       False otherwise
+        """
 
         # Kill all usbmon process before creating a new one
         self.host.run('pkill tcpdump || true')
 
+        # Delete usbmon logs from previous tests unless we are starting another
+        # usbmon because of reboot.
+        if not reboot:
+            self.host.run('rm -f %s/*' % (self.USBMON_DIR_LOG_PATH))
+
         # Make sure the directory exists
         self.host.run('mkdir -p %s' % self.USBMON_DIR_LOG_PATH)
 
         # Time format. Ex, 2020_02_20_17_52_45
         now = time.strftime("%Y_%m_%d_%H_%M_%S")
         file_name = 'usbmon_%s' % now
-        self.host.run_background('tcpdump -i usbmon0 -w %s/%s' %
-                                 (self.USBMON_DIR_LOG_PATH, file_name))
+        self.host.run_background('tcpdump -i usbmon0 -w %s/%s -C %s -W %d' %
+                                 (self.USBMON_DIR_LOG_PATH, file_name,
+                                  self.USBMON_SINGLE_FILE_MAX_SIZE,
+                                  self.USBMON_NUM_OF_ROTATE_FILE))
 
 
     def log_message(self, msg):
@@ -1402,20 +1580,6 @@
         """ Collect WRT logs from Intel Adapters."""
         return self.bluetooth_facade.collect_wrt_logs()
 
-    def get_num_connected_devices(self):
-        """ Return number of remote devices currently connected to the DUT.
-
-        @returns: The number of devices known to bluez with the Connected
-            property active
-        """
-
-        num_connected_devices = 0
-        for dev in self.bluetooth_facade.get_devices():
-            if dev and dev.get('Connected', 0):
-                num_connected_devices += 1
-
-        return num_connected_devices
-
     @test_retry_and_log
     def test_bluetoothd_running(self):
         """Test that bluetoothd is running."""
@@ -1572,13 +1736,18 @@
         session failed and wait for a new session if it did.
         """
         initially_ok = self.bluetooth_facade.is_bluetoothd_valid()
-        bluez_started = initially_ok or self.bluetooth_facade.start_bluetoothd()
+        daemon_started = initially_ok or self.bluetooth_facade.start_bluetoothd(
+        )
+        eventually_ok = initially_ok or self.bluetooth_facade.is_bluetoothd_valid(
+        )
 
         self.results = {
                 'initially_ok': initially_ok,
-                'bluez_started': bluez_started
+                'eventually_ok': eventually_ok,
+                'daemon_started': daemon_started,
         }
-        return all(self.results.values())
+        return all(
+                [self.results[x] for x in ['eventually_ok', 'daemon_started']])
 
 
     @test_retry_and_log(False)
@@ -1598,7 +1767,7 @@
 
             self.reboot()
 
-            chipset = self.get_chipset_name()
+            chipset = self.bluetooth_facade.get_chipset_name()
 
             if not chipset:
                 raise error.TestFail('Unknown adapter is missing')
@@ -1900,7 +2069,7 @@
         logging.debug('Saw Bluetooth ID of: %s', modalias)
 
         # Valid Device ID is:
-        # <00E0(Google)>/<C405(Chrome OS)>/<non-zero versionNumber>
+        # <00E0(Google)>/<C405(ChromeOS)>/<non-zero versionNumber>
         bt_format = 'bluetooth:v00E0pC405d(?!0000)'
 
         if not re.match(bt_format, modalias):
@@ -1963,7 +2132,6 @@
         @returns: True if the device is found. False otherwise.
 
         """
-        has_device_initially = False
         discovery_stopped = False
         is_not_discovering = False
         device_discovered = False
@@ -1971,39 +2139,45 @@
         discovery_started = not start_discovery
         has_device = self.bluetooth_facade.has_device
 
-        if has_device(device_address):
-            has_device_initially = True
-        else:
-            if start_discovery:
-                discovery_started = self.bluetooth_facade.start_discovery()
+        if start_discovery:
+            if has_device(device_address):
+                # Before starting a new discovery, remove the found device since
+                # it is likely to be a temporary device, and we don't know when
+                # it will be removed by bluez. Therefore, remove it and re-find
+                # the device to ensure the device object exists for the
+                # following test, e.g. test_pairing.
+                logging.debug('Removing device %s to restart temporary timer',
+                              device_address)
+                self.bluetooth_facade.remove_device_object(device_address)
 
-            if discovery_started:
-                try:
-                    utils.poll_for_condition(
-                            condition=(lambda: has_device(device_address)),
-                            timeout=self.ADAPTER_DISCOVER_TIMEOUT_SECS,
-                            sleep_interval=
-                            self.ADAPTER_DISCOVER_POLLING_SLEEP_SECS,
-                            desc='Waiting for discovering %s' % device_address)
-                    device_discovered = True
-                except utils.TimeoutError as e:
-                    logging.error('test_discover_device: %s', e)
-                except Exception as e:
-                    logging.error('test_discover_device: %s', e)
-                    err = ('bluetoothd probably crashed.'
-                           'Check out /var/log/messages')
-                    logging.error(err)
-                except:
-                    logging.error('test_discover_device: unexpected error')
+            discovery_started = self.bluetooth_facade.start_discovery()
 
-            if start_discovery and stop_discovery:
-                discovery_stopped, _ = self.bluetooth_facade.stop_discovery()
-                is_not_discovering = self._wait_for_condition(
-                        lambda: not self.bluetooth_facade.is_discovering(),
-                        method_name())
+        if discovery_started:
+            try:
+                utils.poll_for_condition(
+                        condition=(lambda: has_device(device_address)),
+                        timeout=self.ADAPTER_DISCOVER_TIMEOUT_SECS,
+                        sleep_interval=self.
+                        ADAPTER_DISCOVER_POLLING_SLEEP_SECS,
+                        desc='Waiting for discovering %s' % device_address)
+                device_discovered = True
+            except utils.TimeoutError as e:
+                logging.error('test_discover_device: %s', e)
+            except Exception as e:
+                logging.error('test_discover_device: %s', e)
+                err = ('bluetoothd probably crashed.'
+                       'Check out /var/log/messages')
+                logging.error(err)
+            except:
+                logging.error('test_discover_device: unexpected error')
+
+        if start_discovery and stop_discovery:
+            discovery_stopped, _ = self.bluetooth_facade.stop_discovery()
+            is_not_discovering = self._wait_for_condition(
+                    lambda: not self.bluetooth_facade.is_discovering(),
+                    method_name())
 
         self.results = {
-                'has_device_initially': has_device_initially,
                 'should_start_discovery': start_discovery,
                 'should_stop_discovery': stop_discovery,
                 'start_discovery': discovery_started,
@@ -2012,11 +2186,11 @@
                 'device_discovered': device_discovered}
 
         # Make sure a discovered device properly started and stopped discovery
-        device_found = device_discovered and discovery_started and (
-                discovery_stopped and is_not_discovering
-                if stop_discovery else True)
+        device_found = device_discovered and (discovery_stopped
+                                              and is_not_discovering
+                                              if stop_discovery else True)
 
-        return has_device_initially or device_found
+        return device_found
 
 
     def _test_discover_by_device(self, device):
@@ -2097,61 +2271,45 @@
         def _pair_device():
             """Pair to the device.
 
-            @returns: True if it could pair with the device. False otherwise.
+            @returns: True if it could pair with, connect to, and retrieve
+                      connection info from the device. False otherwise.
 
             """
-            return self.bluetooth_facade.pair_legacy_device(
+            self.results['paired'] = self.bluetooth_facade.pair_legacy_device(
                     device_address, pin, trusted,
                     self.ADAPTER_PAIRING_TIMEOUT_SECS)
+            self.results[
+                    'connected'] = self.bluetooth_facade.device_is_connected(
+                            device_address)
+            self.results[
+                    'connection_info_retrievable'] = self.bluetooth_facade.has_connection_info(
+                            device_address)
 
+            return self.results['paired'] and self.results[
+                    'connected'] and self.results['connection_info_retrievable']
 
-        def _verify_connection_info():
-            """Verify that connection info to device is retrievable.
-
-            @returns: True if the connection info is retrievable.
-                      False otherwise.
-            """
-            return (self.bluetooth_facade.get_connection_info(device_address)
-                    is not None)
-
-        def _verify_connected():
-            """Verify the device is connected.
-
-            @returns: True if the device is connected, False otherwise.
-            """
-            return self.bluetooth_facade.device_is_connected(device_address)
-
-        has_device = False
-        paired = False
-        connected = False
-        connection_info_retrievable = False
-        connected_devices = self.get_num_connected_devices()
+        self.results = {
+                'has_device': False,
+                'paired': False,
+                'connected': False,
+                'connection_info_retrievable': False,
+                'connection_num':
+                self.bluetooth_facade.get_num_connected_devices() + 1
+        }
 
         if self.bluetooth_facade.has_device(device_address):
-            has_device = True
+            self.results['has_device'] = True
             try:
                 utils.poll_for_condition(
                         condition=_pair_device,
                         timeout=self.ADAPTER_PAIRING_TIMEOUT_SECS,
                         sleep_interval=self.ADAPTER_PAIRING_POLLING_SLEEP_SECS,
                         desc='Waiting for pairing %s' % device_address)
-                paired = True
             except utils.TimeoutError as e:
                 logging.error('test_pairing: %s', e)
             except:
                 logging.error('test_pairing: unexpected error')
 
-            connection_info_retrievable = _verify_connection_info()
-            connected = _verify_connected()
-
-        self.results = {
-                'has_device': has_device,
-                'paired': paired,
-                'connected': connected,
-                'connection_info_retrievable': connection_info_retrievable,
-                'connection_num': connected_devices + 1
-        }
-
         return all(self.results.values())
 
     @test_retry_and_log
@@ -2278,13 +2436,16 @@
 
 
     @test_retry_and_log
-    def test_connection_by_device(self, device):
+    def test_connection_by_device(
+            self, device, post_connection_delay=ADAPTER_HID_INPUT_DELAY):
         """Test that the device could connect to the adapter successfully.
 
         This emulates the behavior that a device may initiate a
         connection request after waking up from power saving mode.
 
         @param device: the bluetooth HID device
+        @param post_connection_delay: the delay introduced post connection to
+                                      allow profile functionality to be ready
 
         @returns: True if connection is performed correctly by device and
                   the adapter also enters connection state.
@@ -2317,7 +2478,7 @@
 
             # Although the connect may be complete, it can take a few
             # seconds for the input device to be ready for use
-            time.sleep(self.ADAPTER_HID_INPUT_DELAY)
+            time.sleep(post_connection_delay)
         except utils.TimeoutError as e:
             logging.error('%s (adapter): %s', method_name, e)
         except:
@@ -2373,6 +2534,10 @@
                   False otherwise.
 
         """
+        # TODO(b/182864322) - remove the following statement when the bug
+        # is fixed.
+        device.SetRemoteAddress(self.bluetooth_facade.address)
+
         method_name = 'test_disconnection_by_device'
         disconnection_by_device = False
         try:
@@ -2404,10 +2569,16 @@
 
 
     @test_retry_and_log(False)
-    def test_device_is_connected(self, device_address):
+    def test_device_is_connected(
+            self,
+            device_address,
+            timeout=ADAPTER_CONNECTION_TIMEOUT_SECS,
+            sleep_interval=ADAPTER_PAIRING_POLLING_SLEEP_SECS):
         """Test that device address given is currently connected.
 
         @param device_address: Address of the device.
+        @param timeout: maximum number of seconds to wait
+        @param sleep_interval: time to sleep between polls
 
         @returns: True if the device is connected.
                   False otherwise.
@@ -2421,7 +2592,6 @@
             """
             return self.bluetooth_facade.device_is_connected(device_address)
 
-
         method_name = 'test_device_is_connected'
         has_device = False
         connected = False
@@ -2430,10 +2600,10 @@
             try:
                 utils.poll_for_condition(
                         condition=_is_connected,
-                        timeout=self.ADAPTER_CONNECTION_TIMEOUT_SECS,
-                        sleep_interval=self.ADAPTER_PAIRING_POLLING_SLEEP_SECS,
+                        timeout=timeout,
+                        sleep_interval=sleep_interval,
                         desc='Waiting to check connection to %s' %
-                              device_address)
+                        device_address)
                 connected = True
             except utils.TimeoutError as e:
                 logging.error('%s: %s', method_name, e)
@@ -2640,7 +2810,7 @@
         @returns: the equivalent jiffies
 
         """
-        return adv_interval_ms / self.ADVERTISING_INTERVAL_UNIT
+        return int(round(adv_interval_ms / self.ADVERTISING_INTERVAL_UNIT))
 
 
     def compute_duration(self, max_adv_interval_ms):
@@ -2762,7 +2932,7 @@
 
         self.results = {
                 'check_duration': check_duration,
-                'max_adv_interval_ms_found': max_adv_interval_ms_found,
+                'min_adv_interval_ms_found': min_adv_interval_ms_found,
                 'max_adv_interval_ms_found': max_adv_interval_ms_found,
         }
         return all(self.results.values())
@@ -2784,12 +2954,12 @@
         """
         min_str = ('Min advertising interval: %.3f msec (0x%04x)' %
                    (min_adv_interval_ms,
-                    min_adv_interval_ms / self.ADVERTISING_INTERVAL_UNIT))
+                    self.convert_to_adv_jiffies(min_adv_interval_ms)))
         logging.debug('min_adv_interval_ms: %s', min_str)
 
         max_str = ('Max advertising interval: %.3f msec (0x%04x)' %
                    (max_adv_interval_ms,
-                    max_adv_interval_ms / self.ADVERTISING_INTERVAL_UNIT))
+                    self.convert_to_adv_jiffies(max_adv_interval_ms)))
         logging.debug('max_adv_interval_ms: %s', max_str)
 
         return (min_str, max_str)
@@ -2883,13 +3053,62 @@
 
         @returns True if extended advertising is supported, else False
         """
-        platform = self.get_base_platform_name()
-        return platform in EXT_ADV_MODELS
 
+        adv_features = self.bluetooth_facade.get_advertising_manager_property(
+                'SupportedFeatures')
+
+        return 'HardwareOffload' in adv_features
+
+    def _verify_adv_tx_power(self, advertising_data):
+        """ Verify that advertisement uses Tx Power correctly via the following:
+
+            1. Confirm the correct Tx Power is propagated in both MGMT and
+                HCI commands.
+            2. Validate that the Tx Power selected by the controller is
+                returned to the client via dbus.
+
+        @param: advertising_data: dictionary of advertising data properties
+            used to register the advertisement
+
+        @returns: True if the above requirements are met, False otherwise
+        """
+
+        # If we aren't using TxPower in this advertisement, success
+        if not self.ext_adv_enabled() or 'TxPower' not in advertising_data:
+            return True
+
+        # Make sure the correct Tx power was passed in both MGMT and HCI
+        # commands by searching for two instances of search string
+        search_str = 'TX power: {} dbm'.format(advertising_data['TxPower'])
+        contents = self.bluetooth_le_facade.btmon_get(search_str=search_str,
+                                                      start_str='')
+        if len(contents) < 2:
+            logging.error('Could not locate correct Tx power in MGMT and HCI')
+            return False
+
+        # Locate tx power selected by controller
+        search_str = 'TX power \(selected\)'
+        contents = self.bluetooth_le_facade.btmon_get(search_str=search_str,
+                                                      start_str='')
+
+        if not contents:
+            logging.error('No Tx Power selected event found, failing')
+            return False
+
+        # The line we want has the following structure:
+        # 'TX power (selected): -5 dbm (0x07)'
+        # We locate the number before 'dbm'
+        items = contents[0].split(' ')
+        selected_tx_power = int(items[items.index('dbm') - 1])
+
+        # Validate that client's advertisement was updated correctly.
+        new_tx_prop = self.bluetooth_le_facade.get_advertisement_property(
+                advertising_data['Path'], 'TxPower')
+
+        return new_tx_prop == selected_tx_power
 
     @test_retry_and_log(False)
-    def test_register_advertisement(self, advertisement_data, instance_id,
-                                    min_adv_interval_ms, max_adv_interval_ms):
+    def test_register_advertisement(self, advertisement_data, instance_id):
         """Verify that an advertisement is registered correctly.
 
         This test verifies the following data:
@@ -2899,16 +3118,26 @@
         - service data
         - advertising intervals
         - advertising enabled
+        - Tx power set (if extended advertising available)
 
         @param advertisement_data: the data of an advertisement to register.
         @param instance_id: the instance id which starts at 1.
-        @param min_adv_interval_ms: min_adv_interval in milliseconds.
-        @param max_adv_interval_ms: max_adv_interval in milliseconds.
 
         @returns: True if the advertisement is registered correctly.
                   False otherwise.
 
         """
+
+        # We need to know the intervals used to verify later. If advertisement
+        # structure contains it, use them. Otherwise, use bluez's defaults
+        if set(advertisement_data) >= {'MinInterval', 'MaxInterval'}:
+            min_adv_interval_ms = advertisement_data['MinInterval']
+            max_adv_interval_ms = advertisement_data['MaxInterval']
+
+        else:
+            min_adv_interval_ms = self.DEFAULT_MIN_ADVERTISEMENT_INTERVAL_MS
+            max_adv_interval_ms = self.DEFAULT_MAX_ADVERTISEMENT_INTERVAL_MS
+
         # When registering a new advertisement, it is possible that another
         # instance is advertising. It may need to wait for all other
         # advertisements to complete advertising once.
@@ -2919,6 +3148,11 @@
                         advertisement_data),
                 logging_timespan=logging_timespan)
 
+        # _get_btmon_log will store the return value of the registration request
+        # in self.advertising_msg. If the request was successful, the return
+        # value was an empty string
+        registration_succeeded = (self.advertising_msg == '')
+
         # Verify that a new advertisement is added.
         advertisement_added = (
                 self.bluetooth_le_facade.btmon_find('Advertising Added') and
@@ -2978,7 +3212,14 @@
         advertising_enabled = self.bluetooth_le_facade.btmon_find(
                 'Advertising: Enabled (0x01)')
 
+        # Verify new APIs were used
+        new_apis_used = self.bluetooth_le_facade.btmon_find(
+                'Add Extended Advertising Parameters')
+
+        tx_power_correct = self._verify_adv_tx_power(advertisement_data)
+
         self.results = {
+                'registration_succeeded': registration_succeeded,
                 'advertisement_added': advertisement_added,
                 'manufacturer_data_found': manufacturer_data_found,
                 'service_uuids_found': service_uuids_found,
@@ -2987,6 +3228,8 @@
                 'max_adv_interval_ms_found': max_adv_interval_ms_found,
                 'scan_rsp_correct': scan_rsp_correct,
                 'advertising_enabled': advertising_enabled,
+                'new_apis_used': new_apis_used,
+                'tx_power_correct': tx_power_correct,
         }
         return all(self.results.values())
 
@@ -3290,10 +3533,6 @@
         if not advertisement_removed:
             logging.error('Failed to remove advertisement')
 
-        # Verify that "Reset Advertising Intervals" command has been issued.
-        reset_advertising_intervals = self.bluetooth_le_facade.btmon_find(
-                'bluetoothd: Reset Advertising Intervals')
-
         # Verify the advertising is disabled.
         advertising_disabled_observied = self.bluetooth_le_facade.btmon_find(
                 'Advertising: Disabled')
@@ -3304,7 +3543,6 @@
 
         self.results = {
                 'advertisement_removed': advertisement_removed,
-                'reset_advertising_intervals': reset_advertising_intervals,
                 'advertising_disabled': advertising_disabled,
         }
         return all(self.results.values())
@@ -3658,7 +3896,7 @@
 
 
     @test_retry_and_log
-    def test_mouse_move_in_xy(self, device, delta_x, delta_y):
+    def test_mouse_move_in_xy(self, device, delta_x=-60, delta_y=100):
         """Test that the mouse move events could be received correctly.
 
         @param device: the meta device containing a bluetooth HID device
@@ -3781,31 +4019,6 @@
     # Bluetooth keyboard related tests
     # -------------------------------------------------------------------
 
-    # TODO may be deprecated as stated in b:140515628
-    @test_retry_and_log
-    def test_keyboard_input_from_string(self, device, string_to_send):
-        """Test that the keyboard's key events could be received correctly.
-
-        @param device: the meta device containing a bluetooth HID device
-        @param string_to_send: the set of keys that will be pressed one-by-one
-
-        @returns: True if the report received by the host matches the
-                  expected one. False otherwise.
-
-        """
-
-        gesture = lambda: device.KeyboardSendString(string_to_send)
-
-        actual_events = self._record_input_events(device,
-                                                  gesture,
-                                                  address=device.address)
-
-        resulting_string = bluetooth_test_utils.reconstruct_string(
-                           actual_events)
-
-        return string_to_send == resulting_string
-
-
     @test_retry_and_log
     def test_keyboard_input_from_trace(self, device, trace_name):
         """ Tests that keyboard events can be transmitted and received correctly
@@ -3943,6 +4156,140 @@
 
 
     # -------------------------------------------------------------------
+    # Enterprise policy tests
+    # -------------------------------------------------------------------
+
+    def _test_check_set_allowlist(self, uuids, expected_result):
+        """The test to set valid and invalid allowlists test.
+
+        @param uuids: the uuids in the allowlist to set.
+        @param expected_result: True if the test is expected to pass.
+        """
+        create_uuid = bluetooth_test_utils.Bluetooth_UUID.create_valid_uuid
+        exp_res_str = 'valid' if expected_result else 'invalid'
+        logging.info('%s uuids: "%s"', exp_res_str, uuids)
+
+        result, err_msg = self.bluetooth_facade.policy_set_service_allow_list(
+                uuids)
+        logging.debug('result %s (%s)', result, err_msg)
+
+        if expected_result:
+            check_set_allowlist = result
+        else:
+            check_set_allowlist = ('org.bluez.Error.InvalidArguments' in err_msg
+                                   and not result)
+
+        # Query bluez to read the allow list.
+        actual_uuids_list = [
+                create_uuid(uuid) for uuid in
+                self.bluetooth_facade.policy_get_service_allow_list()]
+        actual_uuids_list.sort()
+
+        # Convert the original UUIDs into a list of full-length UUIDs and
+        # remove duplicate UUIDs in order to compare the original UUIDs
+        # with the actual UUIDs set by bluez.
+        orig_uuids_list = []
+        if expected_result and uuids != '':
+            for uuid in uuids.split(','):
+                u = create_uuid(uuid)
+                if u is None:
+                    raise error.TestFail('uuid %s in uuids %s is not valid' %
+                                         (uuid, uuids))
+                orig_uuids_list.append(u)
+        orig_dedup_uuids = list(set(orig_uuids_list))
+        orig_dedup_uuids.sort()
+        uuids_comp_result = actual_uuids_list == orig_dedup_uuids
+
+        self.results = {'uuids': uuids,
+                        'expected_set_allowlist_result': expected_result,
+                        'actual_set_allowlist_result': result,
+                        'orig_dedup_uuids': orig_dedup_uuids,
+                        'actual_uuids_list': actual_uuids_list,
+                        'check_set_allowlist': check_set_allowlist,
+                        'uuids_comp_result': uuids_comp_result}
+        logging.debug('actual_uuids_list %s', actual_uuids_list)
+        logging.debug('orig_uuids_list %s', orig_uuids_list)
+
+        return (check_set_allowlist and uuids_comp_result)
+
+
+    @test_retry_and_log(False)
+    def test_check_set_allowlist(self, uuids, expected_result):
+        """The test to set valid and invalid allowlists test.
+
+        @param uuids: the uuids in the allowlist to set.
+        @param expected_result: True if the test is expected to pass.
+        """
+        return self._test_check_set_allowlist(uuids, expected_result)
+
+
+    @test_retry_and_log(False)
+    def test_reset_allowlist(self):
+        """The test to reset the allowlists.
+
+        The test is used to clean up the allowlist.
+        """
+        return self._test_check_set_allowlist('', True)
+
+
+    def policy_is_affected(self, device):
+        """Check if the device is affected by policy.
+
+        @param device: the connected device.
+
+        @returns: True if the device is affected by the enterprise policy.
+                  False if not. None if the device is not found.
+        """
+        return self.bluetooth_facade.policy_get_device_affected(device.address)
+
+
+    @test_retry_and_log(False)
+    def test_affected_by_policy(self, device):
+        """A test that the device is affected by policy
+
+        @param device: the peripheral device
+        @returns: True if the device is affected; False otherwise.
+        """
+        result = self.policy_is_affected(device)
+        logging.debug('policy_is_affected(%s): %s', device.address, result)
+        self.results = {
+                'expected_result': 'True (affected)',
+                'actual_result': result
+        }
+        return result is True
+
+
+    @test_retry_and_log(False)
+    def test_not_affected_by_policy(self, device):
+        """A test that the device is not affected by policy
+
+        @param device: the peripheral device
+        @returns: True if the device is not affected; False otherwise.
+        """
+        result = self.policy_is_affected(device)
+        logging.debug('policy_is_affected(%s): %s', device.address, result)
+        self.results = {
+                'expected_result': 'False (not affected)',
+                'actual_result': result
+        }
+        return result is False
+
+    def check_if_affected_by_policy(self, device, expected_result):
+        """A test that the device policy is enforced correctly
+
+        @param device: the peripheral device
+        @param expected_result: True if the test is expected to pass.
+
+        @returns: True if the device is affected or not affected per
+                  expected_result; False otherwise.
+        """
+        if expected_result:
+            return self.test_affected_by_policy(device)
+        else:
+            return self.test_not_affected_by_policy(device)
+
+
+    # -------------------------------------------------------------------
     # Servod related tests
     # -------------------------------------------------------------------
 
@@ -4069,7 +4416,8 @@
                              test_start_time,
                              resume_slack=RESUME_DELTA,
                              fail_on_timeout=False,
-                             fail_early_wake=True):
+                             fail_early_wake=True,
+                             collect_resume_time=False):
         """ Wait for device to resume from suspend.
 
         @param boot_id: Current boot id
@@ -4079,6 +4427,7 @@
         @param resume_slack: Allow some slack on resume timeout.
         @param fail_on_timeout: Fails if timeout is reached
         @param fail_early_wake: Fails if timeout isn't reached
+        @param collect_resume_time: Collect time to resume as perf keyval.
 
         @return True if suspend sub-process completed without error.
         """
@@ -4111,6 +4460,15 @@
                         'Started test at {} but last suspend ended at {}'.
                         format(test_start, wake_at))
 
+            # If the last suspend attempt recorded time is some time in the
+            # future, probably a time conversion error occurred.
+            current_time = self.bluetooth_facade.get_device_utc_time()
+            if current_time < wake_at:
+                raise error.TestFail(
+                        'Timezone conversion error found. '
+                        'Last suspend ended at {} but current time is {}'.
+                        format(wake_at, current_time))
+
             return True
 
         def _check_retcode_or_raise(retcode):
@@ -4153,7 +4511,11 @@
             # we can use measured time instead.
             info = self.bluetooth_facade.find_last_suspend_via_powerd_logs()
             if info:
-                (start_suspend_at, end_suspend_at, retcode) = info
+                start_suspend_at, end_suspend_at, retcode = info
+                logging.debug('find_last_suspend_via_powerd_logs returned: '
+                              'start_suspend_at: {}, end_suspend_at: {}, '
+                              'retcode {}'.format(start_suspend_at,
+                                                  end_suspend_at, retcode))
                 actual_delta = end_suspend_at - start_suspend_at
                 results['powerd time to resume'] = actual_delta.total_seconds()
                 results['powerd retcode'] = retcode
@@ -4163,12 +4525,25 @@
                 # fail here if BT blocked suspend, not if we woke spuriously.
                 # This is by design (we depend on the timeout to check for
                 # spurious wakeup).
-                success = _check_suspend_attempt_or_raise(
-                        test_start_time,
-                        end_suspend_at) and _check_retcode_or_raise(
-                                retcode) and _check_timeout(actual_delta)
+                try:
+                    suspend_ok, retcode_ok, timeout_ok = False, False, False
+                    suspend_ok = _check_suspend_attempt_or_raise(
+                            test_start_time, end_suspend_at)
+                    retcode_ok = _check_retcode_or_raise(retcode)
+                    timeout_ok = _check_timeout(actual_delta)
+                except error.TestNAError as e:
+                    raise e
+                finally:
+                    logging.debug('_check_suspend_attempt_or_raise: {} '
+                                  '_check_retcode_or_raise: {} '
+                                  '_check_timeout: {}'.format(
+                                          suspend_ok, retcode_ok, timeout_ok))
+                success = suspend_ok and retcode_ok and timeout_ok
             else:
                 results['time to resume'] = network_delta.total_seconds()
+                logging.debug(
+                        'Unable to get time to resume from powerd. Estimate sleep time '
+                        'using network ping')
                 success = _check_timeout(network_delta)
         except error.TestFail as e:
             results['device accessible on resume'] = False
@@ -4182,10 +4557,20 @@
         finally:
             suspend.join()
 
+        # Log wake performance
+        if collect_resume_time:
+            test_desc = '{}_wake_time'.format(self.test_name.replace(' ', '_'))
+            wake_time = results.get('powerd time to resume',
+                                    results.get('time to resume', 0))
+            # Only write perf if wake time exists (non-zero)
+            if wake_time:
+                self.write_perf_keyval({test_desc: wake_time})
+
         results['success'] = success
         results['suspend exit code'] = suspend.exitcode
         self.results = results
 
+        logging.info('test_wait_for_resume(): %r', results)
         return all([success, suspend.exitcode == 0])
 
 
@@ -4273,6 +4658,38 @@
         return all(self.results.values())
 
 
+    @test_retry_and_log(False)
+    def test_hid_device_created_speed(self, device):
+        """ Tests that the hid device is created with faster polling.
+
+        @param device: Peripheral device
+        """
+        device_found = self.bluetooth_facade.wait_for_hid_device(
+                device_address=device.address, sleep_interval=0.1)
+        self.results = {'device_found': device_found}
+        return all(self.results.values())
+
+
+    @test_retry_and_log(False)
+    def test_hid_device_reconnect_time(self, duration, device_type):
+        """ Tests that the hid device reconnection is fast enough.
+
+        @param duration: The averaged duration of HID reconnection
+        @param device_type: Specified the type of the device
+        """
+
+        if 'BLE' in device_type:
+            max_duration = LE_HID_RECONNECT_TIME_MAX_SEC
+        else:
+            max_duration = HID_RECONNECT_TIME_MAX_SEC
+
+        self.results = {
+                'hid_reconnect_time': duration,
+                'max_passing_time': max_duration
+        }
+        return duration < max_duration
+
+
     @test_retry_and_log
     def test_battery_reporting(self, device):
         """ Tests that battery reporting through GATT can be received
@@ -4282,10 +4699,19 @@
         @returns: true if battery reporting is received
         """
 
-        percentage = self.bluetooth_facade.get_battery_property(
-                device.address, 'Percentage')
+        def _get_battery_percentage():
+            return self.bluetooth_facade.get_battery_property(
+                    device.address, 'Percentage')
 
-        return percentage > 0
+        # Sometimes the battery interface isn't available on the device
+        # right away. Wait for it to become available.
+        utils.poll_for_condition(
+                condition=lambda: _get_battery_percentage() is not None,
+                timeout=self.ADAPTER_WAIT_DEFAULT_TIMEOUT_SECS,
+                sleep_interval=self.ADAPTER_POLLING_DEFAULT_SLEEP_SECS,
+                desc='Waiting for battery on %s' % device.address)
+
+        return _get_battery_percentage() > 0
 
     def _apply_new_adapter_alias(self, alias):
         """ Sets new system alias and applies discoverable setting
@@ -4340,9 +4766,13 @@
         # what conditions failed by looking at the log.
         self.results = None
 
+        # If any known failures were seen in the logs at any time during this
+        # test execution, we capture that here. This includes daemon crashes,
+        # usb disconnects or any of the other known common failure reasons
+        self.had_known_common_failure = False
+
         # Some tests may instantiate a peripheral device for testing.
         self.devices = dict()
-        self.shared_peers = []
         for device_type in SUPPORTED_DEVICE_TYPES:
             self.devices[device_type] = list()
 
@@ -4350,124 +4780,131 @@
         self.count_advertisements = 0
 
 
-    def update_btpeer(self):
-        """ Check and update the chameleond bundle on Bluetooth peer
-        Latest chameleond bundle and git commit is stored in the google cloud
-        This function compares the git commit of the Bluetooth peers and update
-        the peer if the commit does not match
+    def get_device_sample_rssi(self, device, use_cached_value=True):
+        """ Get one RSSI value of the given device.
 
-        @returns True: If all peer are updated to (or currently) in latest
-                       commit. False if any update fails
+        @param device: the peer device to be examined RSSI
+        @param use_cached_value: Use the cached value
 
+        @returns: rssi value if the device is found,
+                  None otherwise
         """
-        def _update_btpeer():
-            status = {}
-            for peer in self.host.btpeer_list:
-                status[peer] = {}
-                status[peer]['update_needed'] = \
-                    bluetooth_peer_update.is_update_needed(peer, commit)
 
-            logging.debug(status)
-            if not any([v['update_needed'] for v in status.values()]):
-                logging.info('No peer needed update')
-                return True
-            logging.debug('Atleast one peer needs update')
+        # Maximum retry attempts of RSSI query
+        MAX_RETRY = 3
+        # Time between each RSSI query
+        WAIT_TIME = 2
+        rssi = None
 
-            if not bluetooth_peer_update.download_installation_files(self.host,
-                                                                     commit):
-                logging.error('Unable to download installation files ')
-                return False
-
-            # TODO(b:160782273) Make this parallel
-            for peer in self.host.btpeer_list:
-                if status[peer]['update_needed']:
-                    status[peer]['updated'], status[peer]['reason'] = \
-                        bluetooth_peer_update.update_peer(peer, commit)
-
-            for peer, v in status.items():
-                if not v['update_needed']:
-                    logging.debug('peer %s did not need update', str(peer.host))
-                elif not v['updated']:
-                    logging.error('update peer %s failed %s', str(peer.host),
-                                  v['reason'])
-                else:
-                    logging.debug('peer %s updated successfully',
-                                  str(peer.host))
-
-            return all([v['updated'] for v in status.values()
-                        if v['update_needed']])
+        # device could have tested RSSI if we enable check_rssi, if so, reuse it
+        #
+        # Note:
+        # device is special in that hasattr(device, xxx) will evaluate to
+        # the _Method class if xxx does not physically exist. Hence,
+        # isinstance(device.rssi, int) instead of hasattr(device, 'rssi')
+        # is used as the condition below.
+        # Refer to class _Method in client/cros/chameleon/chameleon.py
+        if isinstance(device.rssi, int) and use_cached_value:
+            return device.rssi
 
         try:
-            commit = None
-            (_, commit) = bluetooth_peer_update.get_latest_commit()
-            if commit is None:
-                logging.error('Unable to get current commit')
-                return False
+            self.test_start_discovery()
 
-            return _update_btpeer()
-        except Exception as e:
-            logging.error('Exception %s in update_btpeer', str(e))
-            return False
+            # The RSSI property is only maintained while discovery is
+            # enabled.  Stopping discovery removes the property. Thus, look
+            # up the RSSI without modifying discovery state.
+            found = self.test_discover_device(device.address,
+                                              start_discovery=False,
+                                              stop_discovery=False)
+
+            if not found:
+                logging.info('Device %s not found', device.address)
+                return None
+
+            for i in range(MAX_RETRY):
+                rssi = self.bluetooth_facade.get_device_property(
+                        device.address, 'RSSI')
+                if rssi:
+                    break
+                time.sleep(WAIT_TIME)
+
+            if not rssi:
+                logging.info('RSSI of device %s not found', device.address)
+                return None
+
+            device.rssi = rssi
+            logging.info('Peer {} RSSI {}'.format(device.address, rssi))
+
         finally:
-            if not bluetooth_peer_update.cleanup(self.host, commit):
-                logging.error('Update peer cleanup failed')
+            self.test_stop_discovery()
+            logging.info('Clearing device for test: {}'.format(device.address))
+            self.bluetooth_facade.remove_device_object(device.address)
+
+        return rssi
+
+    def check_floss_support(self):
+        """ Check whether this device supports Floss
+
+        Check for the presence of /usr/bin/btmanagerd and fail with TESTNA
+        if the file is not present. This should only fail on the following boards
+        with 2GB rootfs where Floss is not enabled
+        ['asuka', 'banon', 'bob', 'caroline', 'cave', 'celes',
+        'chell', 'coral', 'cyan', 'edgar', 'elm', 'gru', 'hana',
+        'kefka', 'kevin', 'lars', 'pyro', 'reef', 'reks', 'relm',
+        'sand', 'scarlet', 'sentry', 'setzer', 'snappy', 'terra', 'ultima']
 
 
-    def get_chipset_name(self):
-        """ Get the name of BT/WiFi chipset on this host
-
-        @returns chipset name if successful else ''
+        @raises error.TestNA if device doesn't support Floss
         """
-        (vid,pid) = self.bluetooth_facade.get_wlan_vid_pid()
-        logging.debug('Bluetooth module vid pid is %s %s', vid, pid)
-        if vid is None or pid is None:
-            # Controllers that aren't WLAN+BT combo chips does not expose
-            # Vendor ID/Product ID. Use alternate method.
-            # This will return one of ['WCN3991', ''] or a string containing
-            # the name of chipset read from DUT
-            return self.bluetooth_facade.get_bt_module_name()
-        for name, l in CHIPSET_TO_VIDPID.items():
-            if (vid, pid) in l:
-                return name
-        return ''
+        if not self.bluetooth_facade.is_btmanagerd_present():
+            raise error.TestNAError('Floss cannot be enabled on this device')
 
-
-    def verify_device_rssi(self, address_list):
+    def verify_device_rssi(self, device_list):
         """ Test device rssi is over required threshold.
 
-        @param address_list: List of peer devices to verify address for
+        @param device_list: List of peer devices to verify rssi
 
         @raises error.TestNA if any device isn't found or RSSI is too low
         """
         try:
             self.test_start_discovery()
-            for device_address in address_list:
+            for device in device_list:
                 # The RSSI property is only maintained while discovery is
                 # enabled.  Stopping discovery removes the property. Thus, look
                 # up the RSSI without modifying discovery state.
-                found = self.test_discover_device(device_address,
+                found = self.test_discover_device(device.address,
                                                   start_discovery=False,
                                                   stop_discovery=False)
                 rssi = self.bluetooth_facade.get_device_property(
-                        device_address, 'RSSI')
+                        device.address, 'RSSI')
 
                 if not found:
-                    logging.info('Failing with TEST_NA as peer %s was not'
-                                  ' discovered', device_address)
+                    # Not clearing self.fails will result in test
+                    # failing with test_discover_device failure
+                    self.fails = []
+                    logging.info(
+                            'Failing with TEST_NA as peer %s was not'
+                            ' discovered during RSSI check', device.address)
                     raise error.TestNAError(
-                            'Peer {} not discovered'.format(device_address))
+                            'Peer {} not discovered during RSSI check'.format(
+                                    device.address))
 
                 if not rssi or rssi < self.MIN_RSSI:
                     logging.info('Failing with TEST_NA since RSSI (%s) is low ',
                                   rssi)
                     raise error.TestNAError(
                             'Peer {} RSSI is too low: {}'.format(
-                                    device_address, rssi))
+                                    device.address, rssi))
+                device.rssi = rssi
 
-                logging.info('Peer {} RSSI {}'.format(device_address, rssi))
+                logging.info('Peer {} RSSI {}'.format(device.address, rssi))
         finally:
             self.test_stop_discovery()
 
+            for device in device_list:
+                logging.info('Clearing device for test: {}'.format(
+                        device.address))
+                self.bluetooth_facade.remove_device_object(device.address)
 
     def verify_controller_capability(self, required_roles=[],
                                      test_type=''):
@@ -4528,13 +4965,28 @@
         self.assert_on_fail(
                 self.test_pairing(device.address, device.pin, trusted=True))
 
+    def identify_platform_failure_reasons(self):
+        """ Identifies platform failure reasons to watch for in logs """
+        s = self.bluetooth_facade.get_bt_usb_disconnect_str()
+        if s:
+            COMMON_FAILURES[s] = 'USB disconnect detected'
+
+    def clean_bluetooth_kernel_log(self, level_name):
+        """Remove Bluetooth kernel logs in /var/log/messages with equal or lower
+        prioity than level_name
+
+        @param level_name: name of the log level, e.x. 'INFO', 'DEBUG'...
+        """
+        self.bluetooth_facade.clean_bluetooth_kernel_log(
+                KERNEL_LOG_LEVEL[level_name])
+
     def run_once(self, *args, **kwargs):
         """This method should be implemented by children classes.
 
         Typically, the run_once() method would look like:
 
         factory = remote_facade_factory.RemoteFacadeFactory(host)
-        self.bluetooth_facade = factory.create_bluetooth_facade()
+        self.bluetooth_facade = factory.create_bluetooth_facade(self.floss)
 
         self.test_bluetoothd_running()
         # ...
@@ -4548,7 +5000,7 @@
         raise NotImplementedError
 
 
-    def cleanup(self, test_state='END'):
+    def cleanup_bt_test(self, test_state='END'):
         """Clean up bluetooth adapter tests.
 
         @param test_state: string describing the requested clear is for
@@ -4593,3 +5045,9 @@
         self.devices = dict()
         for device_type in SUPPORTED_DEVICE_TYPES:
             self.devices[device_type] = list()
+
+    # Called only by test.test
+    def cleanup(self):
+        """Cleanup test.test instance"""
+
+        self.cleanup_bt_test()
diff --git a/server/cros/bluetooth/bluetooth_attenuator.py b/server/cros/bluetooth/bluetooth_attenuator.py
new file mode 100644
index 0000000..7786968
--- /dev/null
+++ b/server/cros/bluetooth/bluetooth_attenuator.py
@@ -0,0 +1,71 @@
+# Lint as: python2, python3
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""
+This class provides functions to initialize variable attentuator used for
+Bluetooth range vs rate tests
+"""
+
+import logging
+
+from autotest_lib.client.bin import utils
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib import global_config
+from autotest_lib.client.common_lib.cros.network import ping_runner
+from autotest_lib.server.cros import dnsname_mangler
+from autotest_lib.server.cros.network import attenuator_controller
+
+
+def init_btattenuator(host, args_dict):
+    """
+    Function to initialize bluetooth attenuator and zero the attenuator
+
+    Attenuator address can be passed as argument to test_that or have to
+    be derived from the host name (hostname-btattenuator). For devices in lab,
+    attenuator is assumed to be absent unless added to attenuator_hosts.py file
+    If attenuator is present but not accessible, an exception is raised.
+
+    @param host: cros host object representing the DUT
+           args_dict : arguments passed to test_that
+    @return: AttenuatorController object if attenutator is present else None
+    @raises: TestError if attenautor init fails or if attenutator cannot be
+             accessed
+    """
+    try:
+        if not utils.is_in_container():
+            is_moblab = utils.is_moblab()
+        else:
+            is_moblab = global_config.global_config.get_config_value(
+                    'SSP', 'is_moblab', type=bool, default=False)
+        if is_moblab:
+            # TODO(b:183231262) Implement for moblab
+            logging.debug('bt attenuator not implemented for moblab')
+            return None
+
+        # If attenuator address is provided in args, then it is used
+        # else try to derive attenuator hostname from DUT hostname
+        btattenuator_args = host.get_btattenuator_arguments(
+                args_dict) if args_dict is not None else {}
+        btatten_addr = btattenuator_args.get('btatten_addr')
+        btatten_addr = dnsname_mangler.get_btattenuator_addr(
+                host.hostname, btatten_addr, True)
+        logging.debug('Bluetooth attentuator address is %s', btatten_addr)
+
+        if not btatten_addr:
+            logging.debug('Bluetooth attenuator not present')
+            return None
+        # Attenuator retains previous attenuation set even if it powered down
+        # Do not proceed if attenutator is not accessible
+        if not ping_runner.PingRunner().simple_ping(btatten_addr):
+            logging.debug('Bluetooth attenuator not accessible')
+            return None
+
+        # Init also sets attenutation to zero
+        logging.debug('Initializing bluetooth attenuator')
+        return attenuator_controller.AttenuatorController(btatten_addr)
+    except error.TestError:
+        raise
+    except Exception as e:
+        logging.error('Exception %s while initializing bt attenuator', str(e))
+        return None
diff --git a/server/cros/bluetooth/bluetooth_commits.yaml b/server/cros/bluetooth/bluetooth_commits.yaml
new file mode 100644
index 0000000..957c5b2
--- /dev/null
+++ b/server/cros/bluetooth/bluetooth_commits.yaml
@@ -0,0 +1,53 @@
+# The chameleon bundle commits for Bluetooth tests in the test lab.
+
+# The current commit for all DUTs except those in lab_next_hosts.
+# Copy lab_next_commit to lab_curr_commit only when the latter proves
+# to be stable.
+# Note: d732343cf is the last python2 commit.
+# Do not modify this attribute anymore. Use the lab_commit_map if the host's
+# build version is after chromium: 3386750.
+lab_curr_commit: 881f0e0
+
+# the chameleon commit to update on the selected DUTs in lab_next_hosts
+# E.g.,
+#   lab_next_commit: 71be114
+# Leave it empty as below to always use lab_curr_commit.
+#   lab_next_commit:
+lab_next_commit: 87bed79
+
+# the lab_next_commit will be only used for this particular build number
+# Check http://go/cros-bt-stainless-result to find the next build number.
+# E.g.,
+#   lab_next_build: 13750.0.0
+# Leave it empty as below to always use lab_curr_commit.
+#   lab_next_build:
+lab_next_build: 14461.0.0
+
+lab_next_hosts:
+  - chromeos15-row8-metro4-host5    # lazor, Qualcomm WCN3990, kernel5.4
+  - chromeos15-row8-metro4-host6    # lazor, Qualcomm WCN3990, kernel5.4
+  - chromeos15-row5-rack7-host7     # kohaku, Intel AX201, kernel4.19
+  - chromeos15-row5-rack8-host7     # kohaku, Intel AX201, kernel4.19
+  - chromeos15-row5-rack1-host4     # blooglet, RTL8822, kernel4.14
+  - chromeos15-row5-rack2-host4     # blooglet, RTL8822, kernel4.14
+
+# The lab_commit_map has the format:
+# lab_commit_map:
+#   - build_version: 14461.0.0
+#     chameleon_commit: 87bed79
+#   - build_version: 00000.0.0
+#     chameleon_commit: 881f0e0
+# Each item in the lab_commit_map has the format:
+# {'build_version': build_version, 'chameleon_commit': chameleon_commit}.
+# Test server will choose the most recent build_version according to the DUT's
+# build version. The config should provide a default bundle (version 00000.0.0
+# as it is the smallest version). Note that this map is stored in reverse
+# chronological order, and each build_version or chameleon_commits should be
+# newer than the prior one.
+# We need to clean up the commit table once the corresponding build is older
+# than the current LTS version. Current LTS version is: 13816.103.0 (M90).
+lab_commit_map:
+  - build_version: 14461.0.0
+    chameleon_commit: 87bed79
+  - build_version: 00000.0.0
+    chameleon_commit: 881f0e0
diff --git a/server/cros/bluetooth/bluetooth_dbus_api_tests.py b/server/cros/bluetooth/bluetooth_dbus_api_tests.py
index 2dc7f1a..a80fefe 100644
--- a/server/cros/bluetooth/bluetooth_dbus_api_tests.py
+++ b/server/cros/bluetooth/bluetooth_dbus_api_tests.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -65,7 +66,7 @@
 
     def _compare_error(self, actual, expected):
         """ Helper function to compare error and log. """
-        if expected == actual:
+        if expected in actual:
             return True
         else:
             logging.debug("Expected error is %s Actual error is %s",expected,
diff --git a/server/cros/bluetooth/bluetooth_default_state_test.py b/server/cros/bluetooth/bluetooth_default_state_test.py
index cfe5cc1..552332c 100644
--- a/server/cros/bluetooth/bluetooth_default_state_test.py
+++ b/server/cros/bluetooth/bluetooth_default_state_test.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/cros/bluetooth/bluetooth_device.py b/server/cros/bluetooth/bluetooth_device.py
index 4a1fcab..28e598f 100644
--- a/server/cros/bluetooth/bluetooth_device.py
+++ b/server/cros/bluetooth/bluetooth_device.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -42,15 +43,16 @@
 
     # We currently get dates back in string format due to some inconsistencies
     # between python2 and python3. This is the standard date format we use.
-    NATIVE_DATE_FORMAT = '%Y-%m-%d %H:%M:%S.%f'
+    STANDARD_DATE_FORMAT = '%Y-%m-%d %H:%M:%S.%f'
 
-    def __init__(self, device_host, remote_facade_proxy=None):
+    def __init__(self, device_host, remote_facade_proxy=None, floss=False):
         """Construct a BluetoothDevice.
 
         @param device_host: host object representing a remote host.
 
         """
         self.host = device_host
+        self.floss = floss
         self._remote_proxy = remote_facade_proxy
 
         # Make sure the client library is on the device so that the proxy code
@@ -59,19 +61,67 @@
         client_at.install()
         self._proxy_lock = threading.Lock()
 
-        # If remote facade wasn't already created, connect directly here
-        if not self._remote_proxy:
-            self._connect_xmlrpc_directly()
+        # Assign the correct _proxy based on the remote facade
+        if self._remote_proxy:
+            if self.floss:
+                self._proxy = self._remote_proxy.floss
+            else:
+                self._proxy = self._remote_proxy.bluetooth
+        else:
+            # If remote facade wasn't already created, connect directly here
+            self._proxy = self._connect_xmlrpc_directly()
 
-        # Get some static information about the bluetooth adapter.
-        properties = self.get_adapter_properties()
-        self.bluez_version = properties.get('Name')
-        self.address = properties.get('Address')
-        self.bluetooth_class = properties.get('Class')
-        self.UUIDs = properties.get('UUIDs')
+    def __getattr__(self, name):
+        """Override default attribute behavior to call proxy methods.
+
+        To remove duplicate code in this class, we allow methods in the proxy
+        class to be called directly from the bluetooth device class. If an
+        attribute is contained within this class, we return it. Otherwise, if
+        the proxy object contains a callable attribute of that name, we return a
+        function that calls that object when invoked.
+
+        All methods called on the proxy in this way will hold the proxy lock.
+        """
+        try:
+            return object.__getattr__(self, name)
+        except AttributeError as ae:
+            pass
+
+        # We only return proxied methods if no such attribute exists on this
+        # class. Any attribute errors here will be raised at the end if attr
+        # is None
+        try:
+            proxy = object.__getattribute__(self, '_proxy')
+            proxy_lock = object.__getattribute__(self, '_proxy_lock')
+            attr = proxy.__getattr__(name)
+            if attr:
+
+                def wrapper(*args, **kwargs):
+                    """Call target function while holding proxy lock."""
+                    with proxy_lock:
+                        return attr(*args, **kwargs)
+
+                return wrapper
+        except AttributeError as ae:
+            pass
+
+        # Couldn't find the attribute in either self or self._proxy.
+        raise AttributeError('{} has no attribute: {}'.format(
+                type(self).__name__, name))
+
+    def is_floss(self):
+        """Is the current facade running Floss?"""
+        return self.floss
 
     def _connect_xmlrpc_directly(self):
-        """Connects to the bluetooth native facade directly via xmlrpc."""
+        """Connects to the bluetooth facade directly via xmlrpc."""
+        # When the xmlrpc server is already created (using the
+        # RemoteFacadeFactory), we will use the BluezFacadeLocal inside the
+        # remote proxy. Otherwise, we will use the xmlrpc server started from
+        # this class. Currently, there are a few users outside of the Bluetooth
+        # autotests that use this and this can be removed once those users
+        # migrate to using the RemoteFacadeFactory to generate the xmlrpc
+        # connection.
         proxy = self.host.rpc_server_tracker.xmlrpc_connect(
                 constants.BLUETOOTH_DEVICE_XMLRPC_SERVER_COMMAND,
                 constants.BLUETOOTH_DEVICE_XMLRPC_SERVER_PORT,
@@ -87,37 +137,40 @@
         return proxy
 
     @property
-    def _proxy(self):
-        """Gets the proxy to the DUT bluetooth facade.
+    @proxy_thread_safe
+    def address(self):
+        """Get the adapter address."""
+        return self._proxy.get_address()
 
-        @return XML RPC proxy to DUT bluetooth facade.
+    @property
+    @proxy_thread_safe
+    def bluez_version(self):
+        """Get the bluez version."""
+        return self._proxy.get_bluez_version()
 
-        """
-        # When the xmlrpc server is already created (using the
-        # RemoteFacadeFactory), we will use the BluetoothNativeFacade inside the
-        # remote proxy. Otherwise, we will use the xmlrpc server started from
-        # this class. Currently, there are a few users outside of the Bluetooth
-        # autotests that use this and this can be removed once those users
-        # migrate to using the RemoteFacadeFactory to generate the xmlrpc
-        # connection.
-        if self._remote_proxy:
-            return self._remote_proxy.bluetooth
-        else:
-            return self._bt_direct_proxy
+    @property
+    @proxy_thread_safe
+    def bluetooth_class(self):
+        """Get the bluetooth class."""
+        return self._proxy.get_bluetooth_class()
 
     @proxy_thread_safe
-    def set_debug_log_levels(self, dispatcher_vb, newblue_vb, bluez_vb,
-                             kernel_vb):
+    def set_debug_log_levels(self, bluez_vb, kernel_vb):
         """Enable or disable the debug logs of bluetooth
 
-        @param dispatcher_vb: verbosity of btdispatcher debug log, either 0 or 1
-        @param newblue_vb: verbosity of newblued debug log, either 0 or 1
         @param bluez_vb: verbosity of bluez debug log, either 0 or 1
         @param kernel_vb: verbosity of kernel debug log, either 0 or 1
 
         """
-        return self._proxy.set_debug_log_levels(dispatcher_vb, newblue_vb,
-                                                bluez_vb, kernel_vb)
+        return self._proxy.set_debug_log_levels(bluez_vb, kernel_vb)
+
+    @proxy_thread_safe
+    def set_quality_debug_log(self, enable):
+        """Enable or disable bluez quality debug log in the DUT
+        @param enable: True to enable all of the debug log,
+                       False to disable all of the debug log.
+        """
+        return self._proxy.set_quality_debug_log(enable)
 
     @proxy_thread_safe
     def log_message(self, msg, dut=True, peer=True):
@@ -209,27 +262,6 @@
         """
         return self._proxy.is_bluetoothd_proxy_valid()
 
-
-    @proxy_thread_safe
-    def reset_on(self):
-        """Reset the adapter and settings and power up the adapter.
-
-        @return True on success, False otherwise.
-
-        """
-        return self._proxy.reset_on()
-
-
-    @proxy_thread_safe
-    def reset_off(self):
-        """Reset the adapter and settings, leave the adapter powered off.
-
-        @return True on success, False otherwise.
-
-        """
-        return self._proxy.reset_off()
-
-
     @proxy_thread_safe
     def has_adapter(self):
         """@return True if an adapter is present, False if not."""
@@ -253,28 +285,6 @@
         return self._proxy.set_wake_enabled(value)
 
 
-    @proxy_thread_safe
-    def set_powered(self, powered):
-        """Set the adapter power state.
-
-        @param powered: adapter power state to set (True or False).
-
-        @return True on success, False otherwise.
-
-        """
-        return self._proxy.set_powered(powered)
-
-
-    def is_powered_on(self):
-        """Is the adapter powered on?
-
-        @returns: True if the adapter is powered on
-
-        """
-        properties = self.get_adapter_properties()
-        return bool(properties.get(u'Powered'))
-
-
     def get_hci(self):
         """Get hci of the adapter; normally, it is 'hci0'.
 
@@ -287,42 +297,13 @@
         return hci
 
 
-    def get_address(self):
-        """Get the bluetooth address of the adapter.
-
-        An example of the bluetooth address of the adapter: '6C:29:95:1A:D4:6F'
-
-        @returns: the bluetooth address of the adapter.
-
-        """
-        return self.address
-
-
-    def get_bluez_version(self):
-        """Get bluez version.
-
-        An exmaple of bluez version: 'BlueZ 5.39'
-
-        @returns: the bluez version
-
-        """
-        return self.bluez_version
-
-
-    def get_bluetooth_class(self):
-        """Get the bluetooth class of the adapter.
-
-        An example of the bluetooth class of a chromebook: 4718852
-
-        @returns: the bluetooth class.
-
-        """
-        return self.bluetooth_class
-
-
     def get_UUIDs(self):
         """Get the UUIDs.
 
+        The UUIDs can be dynamically changed at run time due to adapter/CRAS
+        services availability. Therefore, always query them from the adapter,
+        not from the cache.
+
         An example of UUIDs:
             [u'00001112-0000-1000-8000-00805f9b34fb',
              u'00001801-0000-1000-8000-00805f9b34fb',
@@ -334,52 +315,8 @@
         @returns: the list of the UUIDs.
 
         """
-        return self.UUIDs
-
-
-    @proxy_thread_safe
-    def set_discoverable(self, discoverable):
-        """Set the adapter discoverable state.
-
-        @param discoverable: adapter discoverable state to set (True or False).
-
-        @return True on success, False otherwise.
-
-        """
-        return self._proxy.set_discoverable(discoverable)
-
-
-    def is_discoverable(self):
-        """Is the adapter in the discoverable state?
-
-        @return True if discoverable. False otherwise.
-
-        """
         properties = self.get_adapter_properties()
-        return properties.get('Discoverable') == 1
-
-
-    @proxy_thread_safe
-    def set_discoverable_timeout(self, discoverable_timeout):
-        """Set the adapter DiscoverableTimeout.
-
-        @param discoverable_timeout: adapter DiscoverableTimeout
-                value to set in seconds (Integer).
-
-        @return True on success, False otherwise.
-
-        """
-        return self._proxy.set_discoverable_timeout(discoverable_timeout)
-
-
-    @proxy_thread_safe
-    def get_discoverable_timeout(self):
-        """Get the adapter DiscoverableTimeout.
-
-        @return Value of property DiscoverableTimeout in seconds (Integer).
-
-        """
-        return self._proxy.get_discoverable_timeout()
+        return properties.get('UUIDs')
 
 
     @proxy_thread_safe
@@ -417,14 +354,14 @@
         return self._proxy.set_pairable(pairable)
 
 
+    @proxy_thread_safe
     def is_pairable(self):
         """Is the adapter in the pairable state?
 
         @return True if pairable. False otherwise.
 
         """
-        properties = self.get_adapter_properties()
-        return properties.get('Pairable') == 1
+        return self._proxy.get_pairable()
 
     @proxy_thread_safe
     def set_adapter_alias(self, alias):
@@ -552,7 +489,7 @@
         Required to handle non-ascii data
         @param data: data to be JSON and base64 decode
 
-        @return : JSON and base64 decoded date
+        @return : JSON and base64 decoded data
 
 
         """
@@ -586,8 +523,7 @@
             dictionaries on success, the value False otherwise.
 
         """
-        encoded_devices = self._proxy.get_devices()
-        return self._decode_json_base64(encoded_devices)
+        return json.loads(self._proxy.get_devices())
 
 
     @proxy_thread_safe
@@ -602,12 +538,12 @@
 
         prop_val = self._proxy.get_device_property(address, prop_name)
 
-        # Handle dbus error case returned by xmlrpc_server.dbus_safe decorator
+        # Handle dbus error case returned by dbus_safe decorator
         if prop_val is None:
-            return prop_val
+            return None
 
         # Decode and return property value
-        return self._decode_json_base64(prop_val)
+        return json.loads(prop_val)
 
 
     @proxy_thread_safe
@@ -624,38 +560,6 @@
         return self._proxy.get_battery_property(address, prop_name)
 
     @proxy_thread_safe
-    def start_discovery(self):
-        """Start discovery of remote devices.
-
-        Obtain the discovered device information using get_devices(), called
-        stop_discovery() when done.
-
-        @return (True, None) on success, (False, <error>) otherwise.
-
-        """
-        return self._proxy.start_discovery()
-
-
-    @proxy_thread_safe
-    def stop_discovery(self):
-        """Stop discovery of remote devices.
-
-        @return (True, None) on success, (False, <error>) otherwise.
-
-        """
-        return self._proxy.stop_discovery()
-
-
-    def is_discovering(self):
-        """Is it discovering?
-
-        @return True if it is discovering. False otherwise.
-
-        """
-        return self.get_adapter_properties().get('Discovering') == 1
-
-
-    @proxy_thread_safe
     def get_dev_info(self):
         """Read raw HCI device information.
 
@@ -1002,6 +906,23 @@
         return self._proxy.advmon_reset_event_count(app_id, monitor_id, event)
 
     @proxy_thread_safe
+    def advmon_set_target_devices(self, app_id, monitor_id, devices):
+        """Set the target devices to the given monitor.
+
+        DeviceFound and DeviceLost will only be counted if it is triggered by a
+        target device.
+
+        @param app_id: the app id.
+        @param monitor_id: the monitor id.
+        @param devices: a list of devices in MAC address
+
+        @returns: True on success, False otherwise.
+
+        """
+        return self._proxy.advmon_set_target_devices(app_id, monitor_id,
+                                                     devices)
+
+    @proxy_thread_safe
     def advmon_interleave_scan_logger_start(self):
         """ Start interleave logger recording
         """
@@ -1041,6 +962,16 @@
         return self._proxy.advmon_interleave_scan_logger_get_cancel_events()
 
     @proxy_thread_safe
+    def advmon_interleave_scan_get_durations(self):
+        """Get durations of allowlist scan and no filter scan
+
+        @returns: a dict of {'allowlist': allowlist_duration,
+                             'no filter': no_filter_duration},
+                  or None if something went wrong
+        """
+        return self._proxy.get_advmon_interleave_durations()
+
+    @proxy_thread_safe
     def messages_start(self):
         """Start messages monitoring."""
         self._proxy.messages_start()
@@ -1066,6 +997,15 @@
         return self._proxy.messages_find(pattern_str)
 
     @proxy_thread_safe
+    def clean_bluetooth_kernel_log(self, log_level=7):
+        """Remove Bluetooth kernel logs in /var/log/messages with loglevel
+           equal to or greater than |log_level|
+
+        @param log_level: int in range [0..7]
+        """
+        self._proxy.clean_bluetooth_kernel_log(log_level)
+
+    @proxy_thread_safe
     def register_advertisement(self, advertisement_data):
         """Register an advertisement.
 
@@ -1110,6 +1050,40 @@
 
 
     @proxy_thread_safe
+    def get_advertisement_property(self, adv_path, prop_name):
+        """Grab property of an advertisement registered on the DUT
+
+        The service on the DUT registers a dbus object and holds it. During the
+        test, some properties on the object may change, so this allows the test
+        access to the properties at run-time.
+
+        @param adv_path: string path of the dbus object
+        @param prop_name: string name of the property required
+
+        @returns: the value of the property in standard (non-dbus) type if the
+                    property exists, else None
+        """
+
+        return self._proxy.get_advertisement_property(adv_path, prop_name)
+
+    @proxy_thread_safe
+    def get_advertising_manager_property(self, prop_name):
+        """Grab property of the bluez advertising manager
+
+        This allows us to understand the DUT's advertising capabilities, for
+        instance the maximum number of advertising instances supported, so that
+        we can test these capabilities.
+
+        @param adv_path: string path of the dbus object
+        @param prop_name: string name of the property required
+
+        @returns: the value of the property in standard (non-dbus) type if the
+                    property exists, else None
+        """
+
+        return self._proxy.get_advertising_manager_property(prop_name)
+
+    @proxy_thread_safe
     def reset_advertising(self):
         """Reset advertising.
 
@@ -1132,6 +1106,24 @@
         """
         return self._proxy.create_audio_record_directory(audio_record_dir)
 
+    @proxy_thread_safe
+    def get_audio_thread_summary(self):
+        """Dumps audio thread info.
+
+        @returns: a list of cras audio information.
+        """
+        return self._proxy.get_audio_thread_summary()
+
+    @proxy_thread_safe
+    def get_device_id_from_node_type(self, node_type, is_input):
+        """Gets device id from node type.
+
+        @param node_type: a node type defined in CRAS_NODE_TYPES.
+        @param is_input: True if the node is input. False otherwise.
+
+        @returns: a string for device id.
+        """
+        return self._proxy.get_device_id_from_node_type(node_type, is_input)
 
     @proxy_thread_safe
     def start_capturing_audio_subprocess(self, audio_data, recording_device):
@@ -1157,15 +1149,17 @@
 
 
     @proxy_thread_safe
-    def start_playing_audio_subprocess(self, audio_data):
+    def start_playing_audio_subprocess(self, audio_data, pin_device=None):
         """Start playing audio in a subprocess.
 
-        @param audio_data: the audio test data
+        @param audio_data: the audio test data.
+        @param pin_device: the device id to play audio.
 
         @returns: True on success. False otherwise.
         """
         audio_data = json.dumps(audio_data)
-        return self._proxy.start_playing_audio_subprocess(audio_data)
+        return self._proxy.start_playing_audio_subprocess(
+                audio_data, pin_device)
 
 
     @proxy_thread_safe
@@ -1630,16 +1624,22 @@
 
 
     @proxy_thread_safe
-    def wait_for_hid_device(self, device_address):
+    def wait_for_hid_device(self,
+                            device_address,
+                            timeout=None,
+                            sleep_interval=None):
         """Wait for hid device with given device address.
 
         Args:
             device_address: Peripheral Address
+            timeout: maximum number of seconds to wait
+            sleep_interval: time to sleep between polls
 
         Returns:
             True if hid device is found.
         """
-        return self._proxy.wait_for_hid_device(device_address)
+        return self._proxy.wait_for_hid_device(device_address, timeout,
+                                               sleep_interval)
 
 
     @proxy_thread_safe
@@ -1665,8 +1665,8 @@
         # python3 (hopefully)
         # TODO - Revisit converting date to string and back in this method
         if info:
-            start_date = datetime.strptime(info[0], self.NATIVE_DATE_FORMAT)
-            end_date = datetime.strptime(info[1], self.NATIVE_DATE_FORMAT)
+            start_date = datetime.strptime(info[0], self.STANDARD_DATE_FORMAT)
+            end_date = datetime.strptime(info[1], self.STANDARD_DATE_FORMAT)
             ret = info[2]
 
             return (start_date, end_date, ret)
@@ -1695,6 +1695,14 @@
         return self._proxy.get_wlan_vid_pid()
 
     @proxy_thread_safe
+    def get_bt_transport(self):
+        """ Return the transport used by Bluetooth module
+
+        @returns: USB/UART/SDIO on success; None on failure
+        """
+        return self._proxy.get_bt_transport()
+
+    @proxy_thread_safe
     def get_bt_module_name(self):
         """ Return bluetooth module name for non-USB devices
 
@@ -1704,10 +1712,31 @@
         return self._proxy.get_bt_module_name()
 
     @proxy_thread_safe
-    def get_device_time(self):
-        """ Get the current device time. """
-        return datetime.strptime(self._proxy.get_device_time(),
-                                 self.NATIVE_DATE_FORMAT)
+    def get_chipset_name(self):
+        """ Get the name of BT/WiFi chipset on this host
+
+        @returns chipset name if successful else ''
+        """
+        return self._proxy.get_chipset_name()
+
+    @proxy_thread_safe
+    def get_device_utc_time(self):
+        """ Get the current device time in UTC. """
+        return datetime.strptime(self._proxy.get_device_utc_time(),
+                                 self.STANDARD_DATE_FORMAT)
+
+    @proxy_thread_safe
+    def get_bt_usb_disconnect_str(self):
+        """ Return the expected log error on USB disconnect
+
+        Locate the descriptor that will be used from the list of all usb
+        descriptors associated with our bluetooth chip, and format into the
+        expected string error for USB disconnect
+
+        @returns: string representing expected usb disconnect log entry if usb
+                  device could be identified, None otherwise
+        """
+        return self._proxy.get_bt_usb_disconnect_str()
 
     @proxy_thread_safe
     def close(self, close_host=True):
@@ -1726,6 +1755,39 @@
         # This kills the RPC server.
         if close_host:
             self.host.close()
-        elif self._bt_direct_proxy:
+        elif hasattr(self, '_bt_direct_proxy'):
             self.host.rpc_server_tracker.disconnect(
                     constants.BLUETOOTH_DEVICE_XMLRPC_SERVER_PORT)
+
+
+    @proxy_thread_safe
+    def policy_get_service_allow_list(self):
+        """Get the service allow list for enterprise policy.
+
+        @returns: array of strings representing the allowed service UUIDs.
+        """
+        return self._proxy.policy_get_service_allow_list()
+
+
+    @proxy_thread_safe
+    def policy_set_service_allow_list(self, uuids):
+        """Get the service allow list for enterprise policy.
+
+        @param uuids: a string representing the uuids
+                      e.g., "0x1234,0xabcd" or ""
+
+        @returns: (True, '') on success, (False, '<error>') on failure
+        """
+        return self._proxy.policy_set_service_allow_list(uuids)
+
+    @proxy_thread_safe
+    def policy_get_device_affected(self, device_address):
+        """Get if the device is affected by enterprise policy.
+
+        @param device_address: address of the device
+                               e.g. '6C:29:95:1A:D4:6F'
+
+        @returns: True if the device is affected by the enterprise policy.
+                  False if not. None if the device is not found.
+        """
+        return self._proxy.policy_get_device_affected(device_address)
diff --git a/server/cros/bluetooth/bluetooth_gatt_client_utils.py b/server/cros/bluetooth/bluetooth_gatt_client_utils.py
index b77a7f1..e491385 100644
--- a/server/cros/bluetooth/bluetooth_gatt_client_utils.py
+++ b/server/cros/bluetooth/bluetooth_gatt_client_utils.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/cros/bluetooth/bluetooth_peer_update.py b/server/cros/bluetooth/bluetooth_peer_update.py
index 119e3c0..5dc1a3c 100644
--- a/server/cros/bluetooth/bluetooth_peer_update.py
+++ b/server/cros/bluetooth/bluetooth_peer_update.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -15,6 +16,7 @@
 import sys
 import tempfile
 import time
+import yaml
 
 from datetime import datetime
 
@@ -26,8 +28,12 @@
 # The location of the package in the cloud
 GS_PUBLIC = 'gs://chromeos-localmirror/distfiles/bluetooth_peer_bundle/'
 
-# NAME of the file that stores  commit info in the cloud
-COMMIT_FILENAME = 'latest_bluetooth_commit'
+# NAME of the file that stores python2 commits info in the cloud
+PYTHON2_COMMITS_FILENAME = 'bluetooth_python2_commits'
+
+# NAME of the file that stores commits info in the Google cloud storage.
+COMMITS_FILENAME = 'bluetooth_commits.yaml'
+
 
 # The following needs to be kept in sync with values chameleond code
 BUNDLE_TEMPLATE='chameleond-0.0.2-{}.tar.gz' # Name of the chamleond package
@@ -53,17 +59,44 @@
         return False, None
 
 
-def is_update_needed(peer, latest_commit):
+def read_google_cloud_file(filename):
+    """ Check if update is required
+
+    Read the contents of the Googlle cloud file.
+
+    @param filename: the filename of the Google cloud file
+
+    @returns: the contexts of the file if successful; None otherwise.
+    """
+    try:
+        with tempfile.NamedTemporaryFile() as tmp_file:
+            tmp_filename = tmp_file.name
+            cmd = 'gsutil cp {} {}'.format(filename, tmp_filename)
+            result = utils.run(cmd)
+            if result.exit_status != 0:
+                logging.error('Downloading file %s failed with %s',
+                              filename, result.exit_status)
+                return None
+            with open(tmp_filename) as f:
+                content = f.read()
+                logging.debug('content of the file %s: %s', filename, content)
+                return content
+    except Exception as e:
+        logging.error('Error in reading %s', filename)
+        return None
+
+
+def is_update_needed(peer, target_commit):
     """ Check if update is required
 
     Update if the commit hash doesn't match
 
     @returns: True/False
     """
-    return not is_commit_hash_equal(peer, latest_commit)
+    return not is_commit_hash_equal(peer, target_commit)
 
 
-def is_commit_hash_equal(peer, latest_commit):
+def is_commit_hash_equal(peer, target_commit):
     """ Check if chameleond commit hash is the expected one"""
     try:
         commit = peer.get_bt_commit_hash()
@@ -73,16 +106,55 @@
         return True
 
     logging.debug('commit %s found on peer %s', commit, peer.host)
-    return commit == latest_commit
+    return commit == target_commit
 
 
-def perform_update(peer, latest_commit):
-    """ Update the chameleond on the peer"""
+def is_chromeos_build_greater_or_equal(build1, build2):
+    """ Check if build1 is greater or equal to the build2"""
+    build1 = [int(key1) for key1 in build1.split('.')]
+    build2 = [int(key2) for key2 in build2.split('.')]
+    for key1, key2 in zip(build1, build2):
+        if key1 > key2:
+            return True
+        elif key1 == key2:
+            continue
+        else:
+            return False
+    return True
+
+
+def perform_update(force_system_packages_update, peer, target_commit,
+                   latest_commit):
+    """ Update the chameleond on the peer
+
+    @param force_system_packages_update: True to update system packages of the
+                                          peer.
+    @param peer: btpeer to be updated
+    @param target_commit: target git commit
+    @param latest_commit: the latest git commit in the lab_commit_map, which
+                           is defined in the bluetooth_commits.yaml
+
+    @returns: True if the update process is success, False otherwise
+    """
+
+    # Only update the system when the target commit is the latest.
+    # Since system packages are backward compatible so it's safe to keep
+    # it the latest.
+    needs_system_update = 'true'
+    if force_system_packages_update:
+        logging.info("Forced system packages update on the peer.")
+    elif target_commit == latest_commit:
+        logging.info(
+                "Perform system packages update as the peer's "
+                "target_commit is the latest one %s", target_commit)
+    else:
+        logging.info("Skip updating system packages on the peer.")
+        needs_system_update = 'false'
 
     logging.info('copy the file over to the peer')
     try:
         cur_dir = '/tmp/'
-        bundle = BUNDLE_TEMPLATE.format(latest_commit)
+        bundle = BUNDLE_TEMPLATE.format(target_commit)
         bundle_path = os.path.join(cur_dir, bundle)
         logging.debug('package location is %s', bundle_path)
 
@@ -92,18 +164,31 @@
         logging.error(str(os.listdir(cur_dir)))
         return False
 
+    # Backward compatibility for deploying the chamleeon bundle:
+    # use 'PY_VERSION=python3' only when the target_commit is not in
+    # the specified python2 commits. When py_version_option is empty,
+    # python2 will be used in the deployment.
+    python2_commits_filename = GS_PUBLIC + PYTHON2_COMMITS_FILENAME
+    python2_commits = read_google_cloud_file(python2_commits_filename)
+    logging.info('target_commit %s python2_commits %s ',
+                 target_commit, python2_commits)
+    if bool(python2_commits) and target_commit in python2_commits:
+        py_version_option = ''
+    else:
+        py_version_option = 'PY_VERSION=python3'
+
     HOST_NOW = datetime.strftime(datetime.now(), '%Y-%m-%d %H:%M:%S')
     logging.info('running make on peer')
     cmd = ('cd %s && rm -rf %s && tar zxf %s &&'
            'cd %s && find -exec touch -c {} \; &&'
            'make install REMOTE_INSTALL=TRUE '
            'HOST_NOW="%s" BUNDLE_VERSION=%s '
-           'CHAMELEON_BOARD=%s && rm %s%s') % (cur_dir,BUNDLE_DIR, bundle,
-                                               BUNDLE_DIR, HOST_NOW,
-                                               BUNDLE_VERSION,
-                                               CHAMELEON_BOARD, cur_dir,
-                                               bundle)
-    logging.debug(cmd)
+           'CHAMELEON_BOARD=%s NEEDS_SYSTEM_UPDATE=%s '
+           '%s && rm %s%s' %
+           (cur_dir, BUNDLE_DIR, bundle, BUNDLE_DIR, HOST_NOW, BUNDLE_VERSION,
+            CHAMELEON_BOARD, needs_system_update, py_version_option, cur_dir,
+            bundle))
+    logging.info(cmd)
     status, _ = run_cmd(peer, cmd)
     if not status:
         logging.info('make failed')
@@ -136,11 +221,16 @@
     return status and expected_output in output
 
 
-def update_peer(peer, latest_commit):
+def update_peer(force_system_packages_update, peer, target_commit,
+                latest_commit):
     """Update the chameleond on peer devices if required
 
-    @params peer: btpeer to be updated
-    @params latest_commit: target git commit
+    @param force_system_packages_update: True to update system packages of the
+                                          peer
+    @param peer: btpeer to be updated
+    @param target_commit: target git commit
+    @param latest_commit: the latest git commit in the lab_commit_map, which
+                           is defined in the bluetooth_commits.yaml
 
     @returns: (True, None) if update succeeded
               (False, reason) if update failed
@@ -150,86 +240,250 @@
         logging.error('Unsupported peer %s',str(peer.host))
         return False, 'Unsupported peer'
 
-    if not perform_update(peer, latest_commit):
+    if not perform_update(force_system_packages_update, peer, target_commit,
+                          latest_commit):
         return False, 'Update failed'
 
     if not restart_check_chameleond(peer):
         return False, 'Unable to start chameleond'
 
-    if is_update_needed(peer, latest_commit):
+    if is_update_needed(peer, target_commit):
         return False, 'Commit not updated after upgrade'
 
     logging.info('updating chameleond succeded')
     return True, ''
 
 
-def update_peers(host, latest_commit):
-    """Update the chameleond on alll peer devices of an host"""
+def update_all_peers(host, raise_error=False):
+    """Update the chameleond on all peer devices of the given host
 
-    if host.btpeer_list == []:
-        raise error.TestError('Bluetooth Peer not present')
+    @param host: the DUT, usually a Chromebook
+    @param raise_error: set this to True to raise an error if any
 
-    status = {}
-    for peer in host.btpeer_list:
-        #TODO(b:160782273) Make this parallel
-        status[peer] = {}
-        status[peer]['update_needed'] = is_update_needed(peer,latest_commit)
+    @returns: True if _update_all_peers success
+              False if raise_error=False and _update_all_peers failed
 
-    logging.debug(status)
-    if not any([v['update_needed'] for v in status.values()]):
-        logging.info("Update not needed on any of the peers")
-        return
-    for peer in host.btpeer_list:
-        if status[peer]['update_needed']:
-            status[peer]['updated'], status[peer]['reason'] = \
-            update_peer(peer, latest_commit)
+    @raises: error.TestFail if raise_error=True and _update_all_peers failed
+    """
+    fail_reason = _update_all_peers(host)
 
-    logging.debug(status)
-    # If any of the peers failed update, raise failure with the reason
-    if not all([v['updated'] for v in status.values() if v['update_needed']]):
-        for peer, v in status.items():
-            if v['update_needed']:
-                if not v['updated']:
-                    logging.error('updating peer %s failed %s', str(peer.host),
-                                  v['reason'])
-        raise error.TestFail()
-
-    logging.info('%s peers updated',len([v['updated'] for v in status.values()
-                                         if v['update_needed']]))
+    if fail_reason:
+        if raise_error:
+            raise error.TestFail(fail_reason)
+        logging.error(fail_reason)
+        return False
+    else:
+        return True
 
 
-def get_latest_commit():
-    """ Get the latest commit
+def _update_all_peers(host):
+    """Update the chameleond on all peer devices of an host"""
+    try:
+        target_commit = get_target_commit(host)
+        latest_commit = get_latest_commit(host)
 
-    Download the file containing the latest commit and
-    parse it contents, and cleanup.
-    @returns (True,commit) in case of success ; (False, None) in case of failure
+        if target_commit is None:
+            return 'Unable to get current commit'
+
+        if latest_commit is None:
+            return 'Unable to get latest commit'
+
+        if host.btpeer_list == []:
+            return 'Bluetooth Peer not present'
+
+        peers_to_update = [
+                p for p in host.btpeer_list
+                if is_update_needed(p, target_commit)
+        ]
+
+        if not peers_to_update:
+            logging.info('No peer needed update')
+            return
+        logging.debug('At least one peer needs update')
+
+        if not download_installation_files(host, target_commit):
+            return 'Unable to download installation files'
+
+        # TODO(b:160782273) Make this parallel
+        failed_peers = []
+        host_is_in_lab_next_hosts = is_in_lab_next_hosts(host)
+        for peer in peers_to_update:
+            updated, reason = update_peer(host_is_in_lab_next_hosts, peer,
+                                          target_commit, latest_commit)
+            if updated:
+                logging.info('peer %s updated successfully', str(peer.host))
+            else:
+                failed_peers.append((str(peer.host), reason))
+
+        if failed_peers:
+            return 'peer update failed (host, reason): %s' % failed_peers
+
+    except Exception as e:
+        return 'Exception raised in _update_all_peers: %s' % e
+    finally:
+        if not cleanup(host, target_commit):
+            return 'Update peer cleanup failed'
+
+
+def get_bluetooth_commits_yaml(host, method='from_cloud'):
+    """Get the bluetooth_commit.yaml file
+
+    This function has the side effect that it will set the attribute,
+    host.bluetooth_commits_yaml for caching.
+
+    @param host: the DUT, usually a Chromebook
+    @param method: from_cloud: download the YAML file from the Google Cloud
+                                Storage
+                    from_local: download the YAML file from local, this option
+                                is convienent for testing
+    @returns: bluetooth_commits.yaml file if exists
+
+    @raises: error.TestFail if failed to get the yaml file
     """
     try:
-        commit = None
-        src = GS_PUBLIC + COMMIT_FILENAME
-
-        with tempfile.NamedTemporaryFile(suffix='bt_commit') as tmp_file:
-            tmp_filename = tmp_file.name
-            cmd = 'gsutil cp {} {}'.format(src, tmp_filename)
-            result = utils.run(cmd)
-            if result.exit_status != 0:
-                logging.error('Downloading commit file failed with %s',
-                              result.exit_status)
-                return (False, None)
-            with open(tmp_filename) as f:
-                content = f.read()
-                logging.debug('content of the file is %s', content)
-                commit = content.strip('\n').strip()
-
-        logging.info('latest commit is %s', commit)
-        if commit is None:
-            return (False, None)
-        else:
-            return (True, commit)
+        if not hasattr(host, 'bluetooth_commits_yaml'):
+            if method == 'from_cloud':
+                src = GS_PUBLIC + COMMITS_FILENAME
+                host.bluetooth_commits_yaml = yaml.safe_load(
+                        read_google_cloud_file(src))
+            elif method == 'from_local':
+                yaml_file_path = os.path.dirname(os.path.realpath(__file__))
+                yaml_file_path = os.path.join(yaml_file_path,
+                                              'bluetooth_commits.yaml')
+                with open(yaml_file_path) as f:
+                    yaml_file = f.read()
+                    host.bluetooth_commits_yaml = yaml.safe_load(yaml_file)
+            else:
+                raise error.TestError('invalid YAML download method: %s',
+                                      method)
+            logging.info('content of yaml file: %s',
+                         host.bluetooth_commits_yaml)
     except Exception as e:
-        logging.error('exception %s in get_latest_commit', str(e))
-        return (False, None)
+        logging.error('Error getting bluetooth_commits.yaml: %s', e)
+
+    return host.bluetooth_commits_yaml
+
+
+def is_in_lab_next_hosts(host):
+    """Check if the host is in the lab_next_hosts
+
+    This function has the side effect that it will set the attribute,
+    host.is_in_lab_next_hosts for caching.
+
+    @param host: the DUT, usually a Chromebook
+
+    @returns: True if the host is in the lab_next_hosts, False otherwise.
+    """
+    if not hasattr(host, 'is_in_lab_next_hosts'):
+        host_build = host.get_release_version()
+        content = get_bluetooth_commits_yaml(host)
+
+        if (host_name(host) in content.get('lab_next_hosts')
+                    and host_build == content.get('lab_next_build')):
+            host.is_in_lab_next_hosts = True
+        else:
+            host.is_in_lab_next_hosts = False
+    return host.is_in_lab_next_hosts
+
+
+def get_latest_commit(host):
+    """ Get the latest_commmit in the bluetooth_commits.yaml
+
+    @param host: the DUT, usually a Chromebook
+
+    @returns: the latest commit hash if exists
+    """
+    try:
+        content = get_bluetooth_commits_yaml(host)
+        latest_commit = content.get('lab_commit_map')[0]['chameleon_commit']
+        logging.info('The latest commit is: %s', latest_commit)
+    except Exception as e:
+        logging.error('Exception in get_latest_commit(): ', str(e))
+    return latest_commit
+
+
+def host_name(host):
+    """ Get the name of a host
+
+    @param host: the DUT, usually a Chromebook
+
+    @returns: the hostname if exists, None otherwise
+    """
+    if hasattr(host, 'hostname'):
+        return host.hostname.rstrip('.cros')
+    else:
+        return None
+
+
+def get_target_commit(host):
+    """ Get the target commit per the DUT
+
+    Download the yaml file containing the commits, parse its contents,
+    and cleanup.
+
+    The yaml file looks like
+    ------------------------
+    lab_curr_commit: d732343cf
+    lab_next_build: 13721.0.0
+    lab_next_commit: 71be114
+    lab_next_hosts:
+      - chromeos15-row8-rack5-host1
+      - chromeos15-row5-rack7-host7
+      - chromeos15-row5-rack1-host4
+    lab_commit_map:
+      - build_version: 14461.0.0
+        chameleon_commit: 87bed79
+      - build_version: 00000.0.0
+        chameleon_commit: 881f0e0
+
+    The lab_next_commit will be used only when 3 conditions are satisfied
+    - the lab_next_commit is non-empty
+    - the hostname of the DUT can be found in lab_next_hosts
+    - the host_build of the DUT is the same as lab_next_build
+
+    Tests of next build will go back to the commits in the lab_commit_map
+    automatically. The purpose is that in case lab_next_commit is not stable,
+    the DUTs will go back to use the supposed stable commit according to the
+    lab_commit_map. Test server will choose the biggest build_version in the
+    lab_commit_map which is smaller than the host_build.
+
+    On the other hand, if lab_next_commit is stable by juding from the lab
+    dashboard, someone can then copy lab_next_build to lab_commit_map manually.
+
+    @param host: the DUT, usually a Chromebook
+
+    @returns commit in case of success; None in case of failure
+    """
+    hostname = host_name(host)
+
+    try:
+        content = get_bluetooth_commits_yaml(host)
+
+        lab_next_commit = content.get('lab_next_commit')
+        if (is_in_lab_next_hosts(host) and bool(lab_next_commit)):
+            commit = lab_next_commit
+            logging.info(
+                    'target commit of the host %s is: %s from the '
+                    'lab_next_commit', hostname, commit)
+        else:
+            host_build = host.get_release_version()
+            lab_commit_map = content.get('lab_commit_map')
+            for item in lab_commit_map:
+                build = item['build_version']
+                if is_chromeos_build_greater_or_equal(host_build, build):
+                    commit = item['chameleon_commit']
+                    break
+            else:
+                logging.error('lab_commit_map is corrupted')
+                commit = None
+            logging.info(
+                    'target commit of the host %s is: %s from the '
+                    'lab_commit_map', hostname, commit)
+
+    except Exception as e:
+        logging.error('Exception %s in get_target_commit()', str(e))
+        commit = None
+    return commit
 
 
 def download_installation_files(host, commit):
diff --git a/server/cros/bluetooth/bluetooth_rvr_tests.py b/server/cros/bluetooth/bluetooth_rvr_tests.py
new file mode 100644
index 0000000..714c69c
--- /dev/null
+++ b/server/cros/bluetooth/bluetooth_rvr_tests.py
@@ -0,0 +1,63 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Server side Bluetooth range vs rate tests."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import logging
+
+import common
+from autotest_lib.client.common_lib import error
+from autotest_lib.server.cros.bluetooth.bluetooth_adapter_tests import (
+        BluetoothAdapterTests)
+from six.moves import range
+
+
+class BluetoothAdapterRvRTests(BluetoothAdapterTests):
+    """Server side Bluetooth adapter audio test class."""
+
+    def check_rssi_vs_attenuation(self, device, bt_attenuator):
+        """
+        @param device: Object representing the peer device
+        @param bt_attenuator: Object representing the controllable variable attenuator
+
+        @returns: Dict containing attenuation:rssi values. Empty on failure
+
+        This function keeps measuring the rssi while increasing the attenuation.
+        At some point the device discovery will fail, which is expected. So this
+        failure is ignored and self.fails cleared.
+
+        This should not be run in a batch
+        """
+        try:
+            fixed_attenuation = bt_attenuator.get_minimal_total_attenuation()
+            logging.debug('Fixed attentuation is %s', fixed_attenuation)
+            final_attenuation = 100  # Maximum attenuation
+            freq = 2427  # Frequency used to calculate total attenuation
+            rssi_dict = {}
+            for attn in range(fixed_attenuation, final_attenuation):
+                logging.debug('Setting attenuation to %s', attn)
+                bt_attenuator.set_total_attenuation(attn, freq)
+                device.SetDiscoverable(True)
+                try:
+                    rssi = self.get_device_sample_rssi(device,
+                                                       use_cached_value=False)
+                except error.TestFail as e:
+                    # test_discover_device might fail if RSSI is too low
+                    logging.debug(
+                            'get_device_sample rssi failed with %s.'
+                            'This is expected if RSSI is too low', str(e))
+                    self.fails = []
+                    break
+                logging.info('Total attenuation is %s RSSI is %s', attn, rssi)
+                rssi_dict[attn] = rssi
+            return rssi_dict
+        except Exception as e:
+            logging.error('Exception in check_rssi_vs_attenuation %s', str(e))
+            return {}
+        finally:
+            bt_attenuator.set_variable_attenuation(0)
diff --git a/server/cros/bluetooth/bluetooth_sdp_tests.py b/server/cros/bluetooth/bluetooth_sdp_tests.py
index 4eee536..d0f2f23 100644
--- a/server/cros/bluetooth/bluetooth_sdp_tests.py
+++ b/server/cros/bluetooth/bluetooth_sdp_tests.py
@@ -84,8 +84,11 @@
 
         service_id_attr = ET.Element(
             'attribute', {'id': str(self.SERVICE_CLASS_ID_ATTR_ID)})
-        service_id_attr.append(
-            ET.Element('uuid', {'value': '0x%X' % self.FAKE_SERVICE_CLASS_ID}))
+        sequence = ET.Element('sequence')
+        sequence.append(
+                ET.Element('uuid',
+                           {'value': '0x%X' % self.FAKE_SERVICE_CLASS_ID}))
+        service_id_attr.append(sequence)
         sdp_record.append(service_id_attr)
 
         for attr_id in self.FAKE_GENERAL_ATTRIBUTE_IDS:
@@ -1176,5 +1179,5 @@
                         bluetooth_SDP_ServiceBrowse,
                         bluetooth_SDP_ServiceSearchAttributeRequest,
                         bluetooth_SDP_ServiceSearchRequestBasic):
-    """Master class that simplifies inheritance of sdp tests"""
+    """Derived class that simplifies inheritance of sdp tests"""
     pass
diff --git a/server/cros/bluetooth/bluetooth_test_utils.py b/server/cros/bluetooth/bluetooth_test_utils.py
index b7f0ea5..57ba077 100644
--- a/server/cros/bluetooth/bluetooth_test_utils.py
+++ b/server/cros/bluetooth/bluetooth_test_utils.py
@@ -1,13 +1,20 @@
+# Lint as: python2, python3
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
 """Provides utilities to support bluetooth adapter tests"""
 
 from __future__ import absolute_import
 
+import logging
+import re
+import uuid
+
 import common
 from autotest_lib.client.bin.input.linux_input import EV_KEY
 from autotest_lib.server.cros.bluetooth.debug_linux_keymap import (
         linux_input_keymap)
 from ast import literal_eval as make_tuple
-import logging
 
 
 def reconstruct_string(events):
@@ -47,3 +54,84 @@
         return None
 
     return contents
+
+
+class Bluetooth_UUID(uuid.UUID):
+    """A class to manipulate Bluetooth UUIDs."""
+
+    BLUETOOTH_BASE_UUID_FORMAT = '%s-0000-1000-8000-00805F9B34FB'
+
+    def __init__(self, hex_str):
+        super(Bluetooth_UUID, self).__init__(hex_str)
+
+
+    @classmethod
+    def create_valid_uuid(cls, hex_str):
+        """Create valid long UUIDs based on Bluetooth short UUIDs.
+
+        @param hex_str: the hex string that represents a short or long UUID.
+
+        @returns: the UUID object if successful; or None otherwise.
+        """
+        h = re.sub('^0x', '', hex_str).replace('-', '')
+
+        # The Bluetooth spec only allowed short UUIDs in 16 bits or 32 bits.
+        # The long UUID takes 128 bits.
+        # Reference:
+        # www.bluetooth.com/specifications/assigned-numbers/service-discovery
+        hlen = len(h)
+        if hlen not in (4, 8, 32):
+            return None
+
+        # Convert the short UUIDs to the full UUID.
+        if hlen in (4, 8):
+            h = cls.BLUETOOTH_BASE_UUID_FORMAT % h.zfill(8)
+
+        return cls(h)
+
+
+class BluetoothPolicy(object):
+    """A helper class to keep popular bluetooth service lists.
+
+    Refer to
+    https://www.bluetooth.com/specifications/assigned-numbers/service-discovery/
+    """
+
+    def to_allowlist(uuids):
+        """Helper function to convert a group of uuids to allowlist format
+
+        @param uuids: an iterable object of UUID string
+
+        @returns: comma-separated UUID string
+        """
+        return ','.join(list(uuids))
+
+    UUID_HID = '0x1124'
+    UUID_HOG = '0x1812'
+    UUID_DIS = '0x180a'
+    UUID_BATT = '0x180f'
+
+    UUID_A2DP = '0x110d'
+    UUID_AUDIO_SOURCE = '0x110a'
+    UUID_AUDIO_SINK = '0x110b'
+    UUID_AVRCP = '0x110e'
+    UUID_AVRCP_TARGET = '0x110c'
+    UUID_AVRCP_CONTROLLER = '0x110f'
+    UUID_GENERIC_AUDIO = '0x1203'
+    UUID_HANDSFREE = '0x111e'
+    UUID_HANDSFREE_AUDIO_GATEWAY = '0x111f'
+    UUID_HEADSET = '0x1108'
+    UUID_HEADSET_AUDIO_GATEWAY = '0x1112'
+
+    UUIDSET_BLE_HID = {UUID_HOG, UUID_DIS, UUID_BATT}
+    UUIDSET_AUDIO = {UUID_A2DP, UUID_AUDIO_SINK, UUID_AUDIO_SOURCE,
+                     UUID_AVRCP, UUID_AVRCP_TARGET, UUID_AVRCP_CONTROLLER,
+                     UUID_GENERIC_AUDIO,
+                     UUID_HANDSFREE, UUID_HANDSFREE_AUDIO_GATEWAY,
+                     UUID_HEADSET, UUID_HEADSET_AUDIO_GATEWAY}
+
+    ALLOWLIST_CLASSIC_HID = UUID_HID
+    ALLOWLIST_BLE_HID = to_allowlist(UUIDSET_BLE_HID)
+    ALLOWLIST_AUDIO = to_allowlist(UUIDSET_AUDIO)
+    ALLOWLIST_BLE_HID_AUDIO = to_allowlist(UUIDSET_BLE_HID.union(UUIDSET_AUDIO))
+    ALLOWLIST_CLASSIC_BLE_HID = to_allowlist(UUIDSET_BLE_HID.union({UUID_HID}))
diff --git a/server/cros/bluetooth/bluetooth_test_version_control.py b/server/cros/bluetooth/bluetooth_test_version_control.py
new file mode 100644
index 0000000..73a6985
--- /dev/null
+++ b/server/cros/bluetooth/bluetooth_test_version_control.py
@@ -0,0 +1,118 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Provide version control for Bluetooth tests"""
+
+import logging
+import os
+
+from autotest_lib.server import utils
+
+CWD = os.getcwd()
+BLUETOOTH_DIR = os.path.dirname(__file__)
+REMOTE_NAME = 'cros'
+BRANCH_NAME = 'main'
+BRANCH_NAME_FULL = os.path.join(REMOTE_NAME, BRANCH_NAME)
+HTTP_MIRROR_URL =\
+        'http://commondatastorage.googleapis.com/chromeos-localmirror'
+BUNDLE_PATH = 'distfiles/bluetooth_peer_bundle'
+HTTP_BUNDLE_URL = os.path.join(HTTP_MIRROR_URL, BUNDLE_PATH)
+LATEST_STABLE_AUTOTEST_COMMIT = 'LATEST_STABLE_AUTOTEST_COMMIT'
+HTTP_LATEST_STABLE_AUTOTEST_COMMIT_URL = os.path.join(
+        HTTP_BUNDLE_URL, LATEST_STABLE_AUTOTEST_COMMIT)
+
+
+def check_git_tree_clean():
+    """ Check if local directory is clear from modification
+
+    @returns: True if success, False otherwise
+    """
+    output = utils.run('git status --porcelain')
+    if output.stdout != '':
+        logging.info(
+                'The Autotest directory is not clean! To perform the AVL\n'
+                'testing consistently, the AVL setup process will fetch\n'
+                'a specific commit hash from the server and check out\n'
+                'locally. To preserve your local changes, please commit\n'
+                'or stash your changes! Changes:')
+        logging.info(output.stdout)
+        return False
+
+    logging.info('Local git tree is clean.')
+    return True
+
+
+def fetch_target_commit():
+    """ Fetch from the cloud or git to retrieve latest ToT or latest stable
+    commit hash.
+
+    @returns: current and targeted commit hash
+    """
+    current_commit = utils.system_output('git rev-parse HEAD')
+    utils.run('git fetch ' + REMOTE_NAME)
+    target_commit = utils.system_output(
+            'git rev-parse {}'.format(BRANCH_NAME_FULL))
+
+    output = utils.run('wget -O {} {}'.format(
+            LATEST_STABLE_AUTOTEST_COMMIT,
+            HTTP_LATEST_STABLE_AUTOTEST_COMMIT_URL),
+                       ignore_status=True)
+
+    if output.exit_status != 0:
+        logging.info('Failed to fetch the latest commit from the server')
+        logging.info(output.stdout)
+        logging.info(output.stderr)
+    else:
+        with open(LATEST_STABLE_AUTOTEST_COMMIT) as commit_file:
+            target_commit = commit_file.readline().strip()
+
+    logging.info('The latest commit will be used is:\n%s', target_commit)
+    return current_commit, target_commit
+
+
+def checkout_commit(commit):
+    """ Checkout the autotest directory to the specified commit."""
+    output = utils.run('git checkout {}'.format(commit), ignore_status=True)
+    if output.exit_status != 0:
+        logging.info(output.stderr)
+        logging.info('Failed to checkout target commit, please retry '
+                     'after\nrepo sync')
+    else:
+        logging.info('Target (stable or ToT) autotest commit is checked out,\n'
+                     'please rerun the test!')
+
+
+def test_version_setup_exit_print():
+    """ Exit the setup and return to the previous CWD."""
+    logging.info('=======================================================\n')
+    os.chdir(CWD)
+
+
+def test_version_setup():
+    """This and above functions hope to sync the AVL test environments
+    among different vendors, partners, and developers by providing an
+    automatic process to fetch a commit hash of the "released"
+    (or "stabled") version of the autotest directory from the cloud and
+    checkout locally. No manual interaction should be expected.
+
+    @returns: True if current commit version satisfied requirement, the
+              test shall proceed. False otherwise.
+    """
+    logging.info('=======================================================\n'
+                 '                    AVL Test Setup\n')
+
+    os.chdir(BLUETOOTH_DIR)
+    if not check_git_tree_clean():
+        test_version_setup_exit_print()
+        return False
+
+    current_commit, target_commit = fetch_target_commit()
+    if current_commit == target_commit:
+        logging.info('Local tree is already at target autotest commit.')
+        test_version_setup_exit_print()
+        return True
+
+    checkout_commit(target_commit)
+    test_version_setup_exit_print()
+    return False
diff --git a/server/cros/bluetooth/bluetooth_valid_address_test.py b/server/cros/bluetooth/bluetooth_valid_address_test.py
index 989961a..a3d1d1b 100644
--- a/server/cros/bluetooth/bluetooth_valid_address_test.py
+++ b/server/cros/bluetooth/bluetooth_valid_address_test.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -20,21 +21,35 @@
     version = 1
 
     def valid_address_test(self):
-        """Verify that the client Bluetooth adapter has a valid address."""
-        # Reset the adapter to the powered off state.
-        self.test_reset_off_adapter()
+        """Verify that the client Bluetooth adapter has a valid address.
 
-        # Read the address both via BlueZ and via the kernel mgmt_ops interface.
-        # Compare the two, they should not differ.
-        bluez_properties = self.get_adapter_properties()
-        controller_info = self.read_info()
+        The test is different when running Floss vs Bluez.
 
-        if bluez_properties['Address'] != controller_info[0]:
-            raise error.TestFail(
-                    'BlueZ and Kernel adapter address differ: %s != %s' %
-                    (bluez_properties['Address'], controller_info[0]))
+        On Floss, we enable the adapter and check the address is valid.
 
-        address = controller_info[0]
+        On Bluez, we power off the adapter, verify the address, then power on
+        the adapter to make sure nothing changes. We also compare the address
+        seen in userspace vs kernel.
+        """
+        if self.bluetooth_facade.is_floss():
+            self.test_reset_on_adapter()
+        else:
+            # Reset the adapter to the powered off state.
+            self.test_reset_off_adapter()
+
+        address = self.bluetooth_facade.get_address()
+
+        # Bluez needs to compare address against kernel
+        if not self.bluetooth_facade.is_floss():
+            # Read the address both via BlueZ and via the kernel mgmt_ops
+            # interface.  Compare the two, they should not differ.
+            controller_info = self.read_info()
+
+            if address != controller_info[0]:
+                raise error.TestFail(
+                        'BlueZ and Kernel adapter address differ: %s != %s' %
+                        (address, controller_info[0]))
+
         logging.debug('Bluetooth address of adapter is %s', address)
 
         # Health check the address
@@ -52,16 +67,17 @@
         if address.endswith(':FF:FF:FF'):
             raise error.TestFail('Device portion of address is all ones')
 
-        # Verify that the address is still the same after powering on the radio.
-        self.test_power_on_adapter()
-        bluez_properties = self.get_adapter_properties()
-        controller_info = self.read_info()
+        if not self.bluetooth_facade.is_floss():
+            # Verify that the address is still the same after powering on the radio.
+            self.test_power_on_adapter()
+            new_address = self.bluetooth_facade.get_address()
+            controller_info = self.read_info()
 
-        if bluez_properties['Address'] != address:
-            raise error.TestFail(
-                    'BlueZ adapter address changed after power on: %s != %s' %
-                    (bluez_properties['Address'], address))
-        if controller_info[0] != address:
-            raise error.TestFail(
-                    'Kernel adapter address changed after power on: %s != %s' %
-                    (controller_info[0], address))
+            if new_address != address:
+                raise error.TestFail(
+                        'BlueZ adapter address changed after power on: %s != %s'
+                        % (new_address, address))
+            if controller_info[0] != address:
+                raise error.TestFail(
+                        'Kernel adapter address changed after power on: %s != %s'
+                        % (controller_info[0], address))
diff --git a/server/cros/bluetooth/common.py b/server/cros/bluetooth/common.py
index 3bae9bd..d36a6a4 100644
--- a/server/cros/bluetooth/common.py
+++ b/server/cros/bluetooth/common.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/cros/bluetooth/debug_linux_keymap.py b/server/cros/bluetooth/debug_linux_keymap.py
index fc59c85..0604d24 100644
--- a/server/cros/bluetooth/debug_linux_keymap.py
+++ b/server/cros/bluetooth/debug_linux_keymap.py
@@ -1,56 +1,58 @@
+# Lint as: python2, python3
+
 # HID has one keymapping, and linux input for the event recorder has another.
 
 # This is not complete, just for initial debugging
 linux_input_keymap = {
-    2 : '1',
-    3 : '2',
-    4 : '3',
-    5 : '4',
-    6 : '5',
-    7 : '6',
-    8 : '7',
-    9 : '8',
-    10 : '9',
-    11 : '0',
-    12 : '-',
-    13 : '=',
-    14 : '\b',
-    15 : '\t',
-    16 : 'q',
-    17 : 'w',
-    18 : 'e',
-    19 : 'r',
-    20 : 't',
-    21 : 'y',
-    22 : 'u',
-    23 : 'i',
-    24 : 'o',
-    25 : 'p',
-    26 : '{',
-    27 : '}',
-    28 : '\n',
-    30 : 'a',
-    31 : 's',
-    32 : 'd',
-    33 : 'f',
-    34 : 'g',
-    35 : 'h',
-    36 : 'j',
-    37 : 'k',
-    38 : 'l',
-    39 : ';',
-    40 : '\'',
-    41 : '`',
-    43 : '\\',
-    44 : 'z',
-    45 : 'x',
-    46 : 'c',
-    47 : 'v',
-    48 : 'b',
-    49 : 'n',
-    50 : 'm',
-    51 : ',',
-    52 : '.',
-    53 : '/',
-    57 : ' ',
-}
\ No newline at end of file
+        2: '1',
+        3: '2',
+        4: '3',
+        5: '4',
+        6: '5',
+        7: '6',
+        8: '7',
+        9: '8',
+        10: '9',
+        11: '0',
+        12: '-',
+        13: '=',
+        14: '\b',
+        15: '\t',
+        16: 'q',
+        17: 'w',
+        18: 'e',
+        19: 'r',
+        20: 't',
+        21: 'y',
+        22: 'u',
+        23: 'i',
+        24: 'o',
+        25: 'p',
+        26: '{',
+        27: '}',
+        28: '\n',
+        30: 'a',
+        31: 's',
+        32: 'd',
+        33: 'f',
+        34: 'g',
+        35: 'h',
+        36: 'j',
+        37: 'k',
+        38: 'l',
+        39: ';',
+        40: '\'',
+        41: '`',
+        43: '\\',
+        44: 'z',
+        45: 'x',
+        46: 'c',
+        47: 'v',
+        48: 'b',
+        49: 'n',
+        50: 'm',
+        51: ',',
+        52: '.',
+        53: '/',
+        57: ' ',
+}
diff --git a/server/cros/camerabox_utils.py b/server/cros/camerabox_utils.py
index d654469..f1257a4 100644
--- a/server/cros/camerabox_utils.py
+++ b/server/cros/camerabox_utils.py
@@ -6,29 +6,39 @@
 import contextlib
 import json
 import logging
-from lxml import etree
 import os
-import six
 import time
 
 from autotest_lib.client.common_lib import error, utils
-from autotest_lib.server.cros.tradefed import tradefed_chromelogin as login
 
 
 class ChartFixture:
-    """Sets up chart tablet to display dummy scene image."""
+    """Sets up chart tablet to display placeholder scene image."""
     DISPLAY_SCRIPT = '/usr/local/autotest/bin/display_chart.py'
     OUTPUT_LOG = '/tmp/chart_service.log'
 
-    def __init__(self, chart_host, scene_uri):
+    def __init__(self, chart_host, scene_uri, job=None):
         self.host = chart_host
         self.scene_uri = scene_uri
+        self.job = job
         self.display_pid = None
-        self.host.run(['rm', '-f', self.OUTPUT_LOG])
+        self.host.run(['rm', '-f', self.OUTPUT_LOG], ignore_status=True)
 
     def initialize(self):
         """Prepare scene file and display it on chart host."""
         logging.info('Prepare scene file')
+        chart_version = self.host.run(
+                'cat /etc/lsb-release | grep CHROMEOS_RELEASE_BUILDER_PATH')
+        logging.info('Chart version: %s', chart_version)
+        if utils.is_in_container():
+            # Reboot chart to clean the dirty state from last test. See
+            # b/201032899.
+            version = self.host.get_release_builder_path()
+            self.job.run_test('provision_QuickProvision',
+                              host=self.host,
+                              value=version,
+                              force_update_engine=True)
+
         tmpdir = self.host.get_tmp_dir()
         scene_path = os.path.join(
                 tmpdir, self.scene_uri[self.scene_uri.rfind('/') + 1:])
@@ -36,7 +46,7 @@
 
         logging.info('Display scene file')
         self.display_pid = self.host.run_background(
-                'python2 {script} {scene} >{log} 2>&1'.format(
+                'python {script} {scene} >{log} 2>&1'.format(
                         script=self.DISPLAY_SCRIPT,
                         scene=scene_path,
                         log=self.OUTPUT_LOG))
@@ -87,8 +97,6 @@
 class DUTFixture:
     """Sets up camera filter for target camera facing on DUT."""
     TEST_CONFIG_PATH = '/var/cache/camera/test_config.json'
-    CAMERA_PROFILE_PATH = ('/mnt/stateful_partition/encrypted/var/cache/camera'
-                           '/media_profiles.xml')
     CAMERA_SCENE_LOG = '/tmp/scene.jpg'
 
     def __init__(self, test, host, facing):
@@ -103,50 +111,6 @@
         yield
         self.host.run('setenforce', args=(selinux_mode, ))
 
-    def _filter_camera_profile(self, content, facing):
-        """Filter camera profile of target facing from content of camera
-        profile.
-
-        @return:
-            New camera profile with only target facing, camera ids are
-            renumbered from 0.
-        """
-        tree = etree.parse(
-                six.StringIO(content),
-                parser=etree.XMLParser(compact=False))
-        root = tree.getroot()
-        profiles = root.findall('CamcorderProfiles')
-        logging.debug('%d number of camera(s) found in camera profile',
-                      len(profiles))
-        assert 1 <= len(profiles) <= 2
-        if len(profiles) == 2:
-            cam_id = 0 if facing == 'back' else 1
-            for p in profiles:
-                if cam_id == int(p.attrib['cameraId']):
-                    p.attrib['cameraId'] = '0'
-                else:
-                    root.remove(p)
-        else:
-            with login.login_chrome(
-                    hosts=[self.host],
-                    board=self.test._get_board_name(),
-            ), self._set_selinux_permissive():
-                has_front_camera = (
-                        'feature:android.hardware.camera.front' in self.host.
-                        run_output('android-sh -c "pm list features"'))
-                logging.debug('has_front_camera=%s', has_front_camera)
-            if (facing == 'front') != has_front_camera:
-                root.remove(profiles[0])
-        return etree.tostring(
-                tree, xml_declaration=True, encoding=tree.docinfo.encoding)
-
-    def _read_file(self, filepath):
-        """Read content of filepath from host."""
-        tmp_path = os.path.join(self.test.tmpdir, os.path.basename(filepath))
-        self.host.get_file(filepath, tmp_path, delete_dest=True)
-        with open(tmp_path) as f:
-            return f.read()
-
     def _write_file(self, filepath, content, permission=None, owner=None):
         """Write content to filepath on remote host.
         @param permission: set permission to 0xxx octal number of remote file.
@@ -163,7 +127,6 @@
 
     def initialize(self):
         """Filter out camera other than target facing on DUT."""
-        logging.info('Restart camera service with filter option')
         self._write_file(
                 self.TEST_CONFIG_PATH,
                 json.dumps({
@@ -172,16 +135,26 @@
                         'enable_external_camera': False
                 }),
                 owner='arc-camera')
+
+        # cros_camera_service will reference the test config to filter out
+        # undesired cameras.
+        logging.info('Restart camera service with filter option')
         self.host.upstart_restart('cros-camera')
 
-        logging.info('Replace camera profile in ARC++ container')
-        profile = self._read_file(self.CAMERA_PROFILE_PATH)
-        new_profile = self._filter_camera_profile(profile, self.facing)
-        self._write_file(self.CAMERA_PROFILE_PATH, new_profile)
+        # arc_setup will reference the test config to filter out the media
+        # profile of undesired cameras.
+        logging.info('Restart ARC++ container with camera test config')
         self.host.run('restart ui')
 
     @contextlib.contextmanager
     def _stop_camera_service(self):
+        # Ensure camera service is running or the
+        # upstart_stop()/upstart_restart() may failed due to in
+        # "start|post-stop" sleep for respawning state. See b/183904344 for
+        # detail.
+        logging.info('Wait for presence of camera service')
+        self.host.wait_for_service('cros-camera')
+
         self.host.upstart_stop('cros-camera')
         yield
         self.host.upstart_restart('cros-camera')
@@ -207,8 +180,8 @@
     def cleanup(self):
         """Cleanup camera filter."""
         logging.info('Remove filter option and restore camera service')
-        self.host.run('rm', args=('-f', self.TEST_CONFIG_PATH))
-        self.host.upstart_restart('cros-camera')
+        with self._stop_camera_service():
+            self.host.run('rm', args=('-f', self.TEST_CONFIG_PATH))
 
         logging.info('Restore camera profile in ARC++ container')
         self.host.run('restart ui')
diff --git a/server/cros/cellular/__init__.py b/server/cros/cellular/__init__.py
new file mode 100644
index 0000000..8fd5dbe
--- /dev/null
+++ b/server/cros/cellular/__init__.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Consistent path includes that work across local-dev and portage package"""
+
+import sys
+import os
+
+this_dir = os.path.dirname(__file__)
+sys.path.insert(0, this_dir)
+# Portage path
+sys.path.insert(0, os.path.join(this_dir, '../../../config/python/'))
+# Local path
+sys.path.insert(0, os.path.join(this_dir, '../../../../../../config/python/'))
diff --git a/server/cros/cellular/abstract_inst.py b/server/cros/cellular/abstract_inst.py
new file mode 100644
index 0000000..08ec0bd
--- /dev/null
+++ b/server/cros/cellular/abstract_inst.py
@@ -0,0 +1,207 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Python module for Abstract Instrument Library."""
+
+import logging
+import requests
+import socket
+
+
+class SocketInstrumentError(Exception):
+    """Abstract Instrument Error Class, via Socket and SCPI."""
+
+    def __init__(self, error, command=None):
+        """Init method for Socket Instrument Error.
+
+        Args:
+            error: Exception error.
+            command: Additional information on command,
+                Type, Str.
+        """
+        super(SocketInstrumentError, self).__init__(error)
+        self._error_code = error
+        self._error_message = self._error_code
+        if command is not None:
+            self._error_message = 'Command {} returned the error: {}.'.format(
+                    repr(command), repr(self._error_message))
+
+    def __str__(self):
+        return self._error_message
+
+
+class SocketInstrument(object):
+    """Abstract Instrument Class, via Socket and SCPI."""
+
+    def __init__(self, ip_addr, ip_port):
+        """Init method for Socket Instrument.
+
+        Args:
+            ip_addr: IP Address.
+                Type, str.
+            ip_port: TCPIP Port.
+                Type, str.
+        """
+        self._logger = logging.getLogger(__name__)
+        self._socket_timeout = 120
+        self._socket_buffer_size = 1024
+
+        self._ip_addr = ip_addr
+        self._ip_port = ip_port
+
+        self._escseq = '\n'
+        self._codefmt = 'utf-8'
+
+        self._socket = None
+
+    def _connect_socket(self):
+        """Init and Connect to socket."""
+        try:
+            self._socket = socket.create_connection(
+                    (self._ip_addr, self._ip_port),
+                    timeout=self._socket_timeout)
+
+            infmsg = 'Opened Socket connection to {}:{} with handle {}.'.format(
+                    repr(self._ip_addr), repr(self._ip_port),
+                    repr(self._socket))
+
+        except socket.timeout:
+            errmsg = 'Socket timeout while connecting to instrument.'
+            raise SocketInstrumentError(errmsg)
+
+        except socket.error:
+            errmsg = 'Socket error while connecting to instrument.'
+            raise SocketInstrumentError(errmsg)
+
+    def _send(self, cmd):
+        """Send command via Socket.
+
+        Args:
+            cmd: Command to send,
+                Type, Str.
+        """
+        if not self._socket:
+            self._connect_socket()
+
+        cmd_es = cmd + self._escseq
+
+        try:
+            self._socket.sendall(cmd_es.encode(self._codefmt))
+
+        except socket.timeout:
+            errmsg = ('Socket timeout while sending command {} '
+                      'to instrument.').format(repr(cmd))
+            raise SocketInstrumentError(errmsg)
+
+        except socket.error:
+            errmsg = ('Socket error while sending command {} '
+                      'to instrument.').format(repr(cmd))
+            raise SocketInstrumentError(errmsg)
+
+        except Exception as err:
+            errmsg = ('Error {} while sending command {} '
+                      'to instrument.').format(repr(cmd), repr(err))
+            raise SocketInstrumentError(errmsg)
+
+    def _recv(self):
+        """Receive response via Socket.
+
+        Returns:
+            resp: Response from Instrument via Socket,
+                Type, Str.
+        """
+        if not self._socket:
+            self._connect_socket()
+
+        resp = ''
+
+        try:
+            while True:
+                resp_tmp = self._socket.recv(self._socket_buffer_size)
+                resp_tmp = resp_tmp.decode(self._codefmt)
+                resp += resp_tmp
+                if len(resp_tmp) < self._socket_buffer_size:
+                    break
+
+        except socket.timeout:
+            errmsg = 'Socket timeout while receiving response from instrument.'
+            raise SocketInstrumentError(errmsg)
+
+        except socket.error:
+            errmsg = 'Socket error while receiving response from instrument.'
+            raise SocketInstrumentError(errmsg)
+
+        except Exception as err:
+            errmsg = ('Error {} while receiving response '
+                      'from instrument').format(repr(err))
+            raise SocketInstrumentError(errmsg)
+
+        resp = resp.rstrip(self._escseq)
+
+        return resp
+
+    def _close_socket(self):
+        """Close Socket Instrument."""
+        if not self._socket:
+            return
+
+        try:
+            self._socket.shutdown(socket.SHUT_RDWR)
+            self._socket.close()
+            self._socket = None
+
+        except Exception as err:
+            errmsg = 'Error {} while closing instrument.'.format(repr(err))
+            raise SocketInstrumentError(errmsg)
+
+    def _query(self, cmd):
+        """query instrument via Socket.
+
+        Args:
+            cmd: Command to send,
+                Type, Str.
+
+        Returns:
+            resp: Response from Instrument via Socket,
+                Type, Str.
+        """
+        self._send(cmd + ';*OPC?')
+        resp = self._recv()
+        return resp
+
+
+class RequestInstrument(object):
+    """Abstract Instrument Class, via Request."""
+
+    def __init__(self, ip_addr):
+        """Init method for request instrument.
+
+        Args:
+            ip_addr: IP Address.
+                Type, Str.
+        """
+        self._request_timeout = 120
+        self._request_protocol = 'http'
+        self._ip_addr = ip_addr
+        self._escseq = '\r\n'
+
+    def _query(self, cmd):
+        """query instrument via request.
+
+        Args:
+            cmd: Command to send,
+                Type, Str.
+
+        Returns:
+            resp: Response from Instrument via request,
+                Type, Str.
+        """
+        request_cmd = '{}://{}/{}'.format(self._request_protocol,
+                                          self._ip_addr, cmd)
+        resp_raw = requests.get(request_cmd, timeout=self._request_timeout)
+
+        resp = resp_raw.text
+        for char_del in self._escseq:
+            resp = resp.replace(char_del, '')
+
+        return resp
diff --git a/server/cros/cellular/callbox_server.py b/server/cros/cellular/callbox_server.py
new file mode 100644
index 0000000..8b793e8
--- /dev/null
+++ b/server/cros/cellular/callbox_server.py
@@ -0,0 +1,32 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import grpc
+
+from chromiumos.test.api import callbox_service_pb2 as cbp
+from chromiumos.test.api import callbox_service_pb2_grpc as cbs
+
+from concurrent import futures
+
+
+class CallBoxServer(cbs.CallboxServiceServicer):
+    """Implements the callbox_service.proto API"""
+
+    def CheckHealth(self, request, context):
+        """ Basic endpoint to check the service is up """
+        return cbp.CheckHealthResponse()
+
+
+def serve():
+    """Start/run the server with a single worker thread"""
+    server = grpc.server(futures.ThreadPoolExecutor(max_workers=1))
+    cbs.add_CallboxServiceServicer_to_server(CallBoxServer(), server)
+    server.add_insecure_port('[::]:50051')
+    server.start()
+    return server
+
+
+if __name__ == '__main__':
+    server = serve()
+    server.wait_for_termination()
diff --git a/server/cros/cellular/callbox_server_unittest.py b/server/cros/cellular/callbox_server_unittest.py
new file mode 100644
index 0000000..21e6a75
--- /dev/null
+++ b/server/cros/cellular/callbox_server_unittest.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=module-missing-docstring,class-missing-docstring
+
+import grpc
+import unittest
+
+import callbox_server
+
+from chromiumos.test.api import callbox_service_pb2 as cbp
+from chromiumos.test.api import callbox_service_pb2_grpc as cbs
+
+
+class CallboxServerTest(unittest.TestCase):
+    def test_check_health(self):
+        server = callbox_server.serve()
+        with grpc.insecure_channel('localhost:50051') as channel:
+            client = cbs.CallboxServiceStub(channel)
+            client.CheckHealth(cbp.CheckHealthRequest())
+        server.stop(grace=1).wait()
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/server/cros/cellular/callbox_utils/CallboxLookup.py b/server/cros/cellular/callbox_utils/CallboxLookup.py
new file mode 100644
index 0000000..f0d9d45
--- /dev/null
+++ b/server/cros/cellular/callbox_utils/CallboxLookup.py
@@ -0,0 +1,27 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""
+Dict that takes DUT host names as the key and returns the callbox
+that it is connected to. DUTs need to be added to this as they are
+set up.
+"""
+
+callboxes = {
+        'chromeos1-donutlab-callbox1-host1.cros':
+        'chromeos1-donutlab-callbox1.cros',
+        'chromeos1-donutlab-callbox1-host1':
+        'chromeos1-donutlab-callbox1.cros',
+        'chromeos1-donutlab-callbox2-host1.cros':
+        'chromeos1-donutlab-callbox2.cros',
+        'chromeos1-donutlab-callbox2-host1':
+        'chromeos1-donutlab-callbox2.cros',
+        'chromeos1-donutlab-callbox3-host1.cros':
+        'chromeos1-donutlab-callbox3.cros',
+        'chromeos1-donutlab-callbox3-host1':
+        'chromeos1-donutlab-callbox3.cros',
+        'chromeos1-donutlab-callbox4-host1.cros':
+        'chromeos1-donutlab-callbox4.cros',
+        'chromeos1-donutlab-callbox4-host1':
+        'chromeos1-donutlab-callbox4.cros',
+}
diff --git a/server/cros/packet_generation/__init__.py b/server/cros/cellular/callbox_utils/__init__.py
similarity index 100%
copy from server/cros/packet_generation/__init__.py
copy to server/cros/cellular/callbox_utils/__init__.py
diff --git a/server/cros/cellular/callbox_utils/cmw500.py b/server/cros/cellular/callbox_utils/cmw500.py
new file mode 100644
index 0000000..043d31d
--- /dev/null
+++ b/server/cros/cellular/callbox_utils/cmw500.py
@@ -0,0 +1,1168 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from enum import Enum
+import time
+
+from autotest_lib.server.cros.cellular import abstract_inst
+
+LTE_ATTACH_RESP = 'ATT'
+LTE_CONN_RESP = 'CONN'
+LTE_IDLE_RESP = 'IDLE'
+LTE_PSWITCHED_ON_RESP = 'ON'
+LTE_PSWITCHED_OFF_RESP = 'OFF'
+
+STATE_CHANGE_TIMEOUT = 20
+
+
+class LteState(Enum):
+    """LTE ON and OFF"""
+    LTE_ON = 'ON'
+    LTE_OFF = 'OFF'
+
+
+class BtsNumber(Enum):
+    """Base station Identifiers."""
+    BTS1 = 'PCC'
+    BTS2 = 'SCC1'
+    BTS3 = 'SCC2'
+    BTS4 = 'SCC3'
+    BTS5 = 'SCC4'
+    BTS6 = 'SCC6'
+    BTS7 = 'SCC7'
+
+
+class LteBandwidth(Enum):
+    """Supported LTE bandwidths."""
+    BANDWIDTH_1MHz = 'B014'
+    BANDWIDTH_3MHz = 'B030'
+    BANDWIDTH_5MHz = 'B050'
+    BANDWIDTH_10MHz = 'B100'
+    BANDWIDTH_15MHz = 'B150'
+    BANDWIDTH_20MHz = 'B200'
+
+
+class DuplexMode(Enum):
+    """Duplex Modes"""
+    FDD = 'FDD'
+    TDD = 'TDD'
+
+
+class SchedulingMode(Enum):
+    """Supported scheduling modes."""
+    RMC = 'RMC'
+    USERDEFINEDCH = 'UDCHannels'
+
+
+class TransmissionModes(Enum):
+    """Supported transmission modes."""
+    TM1 = 'TM1'
+    TM2 = 'TM2'
+    TM3 = 'TM3'
+    TM4 = 'TM4'
+    TM7 = 'TM7'
+    TM8 = 'TM8'
+    TM9 = 'TM9'
+
+
+class UseCarrierSpecific(Enum):
+    """Enable or disable carrier specific."""
+    UCS_ON = 'ON'
+    UCS_OFF = 'OFF'
+
+
+class RbPosition(Enum):
+    """Supported RB positions."""
+    LOW = 'LOW'
+    HIGH = 'HIGH'
+    P5 = 'P5'
+    P10 = 'P10'
+    P23 = 'P23'
+    P35 = 'P35'
+    P48 = 'P48'
+
+
+class ModulationType(Enum):
+    """Supported Modulation Types."""
+    QPSK = 'QPSK'
+    Q16 = 'Q16'
+    Q64 = 'Q64'
+    Q256 = 'Q256'
+
+
+class DciFormat(Enum):
+    """Support DCI Formats for MIMOs"""
+    D1 = 'D1'
+    D1A = 'D1A'
+    D1B = 'D1B'
+    D2 = 'D2'
+    D2A = 'D2A'
+    D2B = 'D2B'
+    D2C = 'D2C'
+
+
+class MimoModes(Enum):
+    """MIMO Modes dl antennas"""
+    MIMO1x1 = 'ONE'
+    MIMO2x2 = 'TWO'
+    MIMO4x4 = 'FOUR'
+
+
+class MimoScenario(Enum):
+    """Supported mimo scenarios"""
+    SCEN1x1 = 'SCELl:FLEXible SUA1,RF1C,RX1,RF1C,TX1'
+    SCEN2x2 = 'TRO:FLEXible SUA1,RF1C,RX1,RF1C,TX1,RF3C,TX2'
+    SCEN4x4 = 'FRO FLEXible SUA1,RF1C,RX1,RF1C,TX1,RF3C,TX2,RF2C,TX3,RF4C,TX4'
+
+
+class RrcState(Enum):
+    """States to enable/disable rrc."""
+    RRC_ON = 'ON'
+    RRC_OFF = 'OFF'
+
+
+class MacPadding(Enum):
+    """Enables/Disables Mac Padding."""
+    ON = 'ON'
+    OFF = 'OFF'
+
+
+class ConnectionType(Enum):
+    """Supported Connection Types."""
+    TEST = 'TESTmode'
+    DAU = 'DAPPlication'
+
+
+class RepetitionMode(Enum):
+    """Specifies LTE Measurement Repetition Mode."""
+    SINGLESHOT = 'SINGleshot'
+    CONTINUOUS = 'CONTinuous'
+
+
+class TpcPowerControl(Enum):
+    """Specifies Up Link power control types."""
+    MIN_POWER = 'MINPower'
+    MAX_POWER = 'MAXPower'
+    CONSTANT = 'CONStant'
+    SINGLE = 'SINGle'
+    UDSINGLE = 'UDSingle'
+    UDCONTINUOUS = 'UDContinuous'
+    ALTERNATE = 'ALT0'
+    CLOSED_LOOP = 'CLOop'
+    RP_CONTROL = 'RPControl'
+    FLEX_POWER = 'FULPower'
+
+
+class ReducedPdcch(Enum):
+    """Enables/disables the reduction of PDCCH resources."""
+    ON = 'ON'
+    OFF = 'OFF'
+
+
+class Cmw500(abstract_inst.SocketInstrument):
+    """ Base class for interfacing with the CMW500 Callbox device """
+
+    def __init__(self, ip_addr, port):
+        """Init method to setup variables for controllers.
+
+        Args:
+              ip_addr: Controller's ip address.
+              port: Port
+        """
+        super(Cmw500, self).__init__(ip_addr, port)
+        self._connect_socket()
+        self._send('*CLS')
+        self._send('*ESE 0;*SRE 0')
+        self._send('*CLS')
+        self._send('*ESE 1;*SRE 4')
+        self._send('SYST:DISP:UPD ON')
+
+    def switch_lte_signalling(self, state):
+        """ Turns LTE signalling ON/OFF.
+
+        Args:
+              state: an instance of LteState indicating the state to which LTE
+                signal has to be set.
+        """
+        if not isinstance(state, LteState):
+            raise ValueError('state should be the instance of LteState.')
+
+        state = state.value
+
+        cmd = 'SOURce:LTE:SIGN:CELL:STATe {}'.format(state)
+        self.send_and_recv(cmd)
+
+        time_elapsed = 0
+        while time_elapsed < STATE_CHANGE_TIMEOUT:
+            response = self.send_and_recv('SOURce:LTE:SIGN:CELL:STATe:ALL?')
+
+            if response == state + ',ADJ':
+                self._logger.info('LTE signalling is now {}.'.format(state))
+                break
+
+            # Wait for a second and increase time count by one
+            time.sleep(1)
+            time_elapsed += 1
+        else:
+            raise CmwError('Failed to turn {} LTE signalling.'.format(state))
+
+    def enable_packet_switching(self):
+        """Enable packet switching in call box."""
+        self.send_and_recv('CALL:LTE:SIGN:PSWitched:ACTion CONNect')
+        self.wait_for_pswitched_state()
+
+    def disable_packet_switching(self):
+        """Disable packet switching in call box."""
+        self.send_and_recv('CALL:LTE:SIGN:PSWitched:ACTion DISConnect')
+        self.wait_for_pswitched_state()
+
+    @property
+    def use_carrier_specific(self):
+        """Gets current status of carrier specific duplex configuration."""
+        return self.send_and_recv('CONFigure:LTE:SIGN:DMODe:UCSPECific?')
+
+    @use_carrier_specific.setter
+    def use_carrier_specific(self, state):
+        """Sets the carrier specific duplex configuration.
+
+        Args:
+            state: ON/OFF UCS configuration.
+        """
+        cmd = 'CONFigure:LTE:SIGN:DMODe:UCSPECific {}'.format(state)
+        self.send_and_recv(cmd)
+
+    def send_and_recv(self, cmd):
+        """Send and recv the status of the command.
+
+        Args:
+            cmd: Command to send.
+
+        Returns:
+            status: returns the status of the command sent.
+        """
+
+        self._send(cmd)
+        if '?' in cmd:
+            status = self._recv()
+            return status
+
+    def configure_mimo_settings(self, mimo):
+        """Sets the mimo scenario for the test.
+
+        Args:
+            mimo: mimo scenario to set.
+        """
+        cmd = 'ROUTe:LTE:SIGN:SCENario:{}'.format(mimo.value)
+        self.send_and_recv(cmd)
+
+    def wait_for_pswitched_state(self, timeout=10):
+        """Wait until pswitched state.
+
+        Args:
+            timeout: timeout for lte pswitched state.
+
+        Raises:
+            CmwError on timeout.
+        """
+        while timeout > 0:
+            state = self.send_and_recv('FETCh:LTE:SIGN:PSWitched:STATe?')
+            if state == LTE_PSWITCHED_ON_RESP:
+                self._logger.debug('Connection to setup initiated.')
+                break
+            elif state == LTE_PSWITCHED_OFF_RESP:
+                self._logger.debug('Connection to setup detached.')
+                break
+
+            # Wait for a second and decrease count by one
+            time.sleep(1)
+            timeout -= 1
+        else:
+            raise CmwError('Failure in setting up/detaching connection')
+
+    def wait_for_attached_state(self, timeout=120):
+        """Attach the controller with device.
+
+        Args:
+            timeout: timeout for phone to get attached.
+
+        Raises:
+            CmwError on time out.
+        """
+        while timeout > 0:
+            state = self.send_and_recv('FETCh:LTE:SIGN:PSWitched:STATe?')
+
+            if state == LTE_ATTACH_RESP:
+                self._logger.debug('Call box attached with device')
+                break
+
+            # Wait for a second and decrease count by one
+            time.sleep(1)
+            timeout -= 1
+        else:
+            raise CmwError('Device could not be attached')
+
+    def wait_for_rrc_state(self, state, timeout=120):
+        """ Waits until a certain RRC state is set.
+
+        Args:
+            state: the RRC state that is being waited for.
+            timeout: timeout for phone to be in connected state.
+
+        Raises:
+            CmwError on time out.
+        """
+        if state not in [LTE_CONN_RESP, LTE_IDLE_RESP]:
+            raise ValueError(
+                    'The allowed values for state are {} and {}.'.format(
+                            LTE_CONN_RESP, LTE_IDLE_RESP))
+
+        while timeout > 0:
+            new_state = self.send_and_recv('SENSe:LTE:SIGN:RRCState?')
+            if new_state == state:
+                self._logger.debug('The RRC state is {}.'.format(new_state))
+                break
+
+            # Wait for a second and decrease count by one
+            time.sleep(1)
+            timeout -= 1
+        else:
+            raise CmwError('Timeout before RRC state was {}.'.format(state))
+
+    def reset(self):
+        """System level reset"""
+        self.send_and_recv('*RST; *OPC')
+
+    @property
+    def get_instrument_id(self):
+        """Gets instrument identification number"""
+        return self.send_and_recv('*IDN?')
+
+    def disconnect(self):
+        """Disconnect controller from device and switch to local mode."""
+        self.switch_lte_signalling(LteState.LTE_OFF)
+        self.close_remote_mode()
+        self._close_socket()
+
+    def close_remote_mode(self):
+        """Exits remote mode to local mode."""
+        self.send_and_recv('&GTL')
+
+    def detach(self):
+        """Detach callbox and controller."""
+        self.send_and_recv('CALL:LTE:SIGN:PSWitched:ACTion DETach')
+
+    @property
+    def rrc_connection(self):
+        """Gets the RRC connection state."""
+        return self.send_and_recv('CONFigure:LTE:SIGN:CONNection:KRRC?')
+
+    @rrc_connection.setter
+    def rrc_connection(self, state):
+        """Selects whether the RRC connection is kept or released after attach.
+
+        Args:
+            mode: RRC State ON/OFF.
+        """
+        if not isinstance(state, RrcState):
+            raise ValueError('state should be the instance of RrcState.')
+
+        cmd = 'CONFigure:LTE:SIGN:CONNection:KRRC {}'.format(state.value)
+        self.send_and_recv(cmd)
+
+    @property
+    def rrc_connection_timer(self):
+        """Gets the inactivity timeout for disabled rrc connection."""
+        return self.send_and_recv('CONFigure:LTE:SIGN:CONNection:RITimer?')
+
+    @rrc_connection_timer.setter
+    def rrc_connection_timer(self, time_in_secs):
+        """Sets the inactivity timeout for disabled rrc connection. By default
+        the timeout is set to 5.
+
+        Args:
+            time_in_secs: timeout of inactivity in rrc connection.
+        """
+        cmd = 'CONFigure:LTE:SIGN:CONNection:RITimer {}'.format(time_in_secs)
+        self.send_and_recv(cmd)
+
+    @property
+    def dl_mac_padding(self):
+        """Gets the state of mac padding."""
+        return self.send_and_recv('CONFigure:LTE:SIGN:CONNection:DLPadding?')
+
+    @dl_mac_padding.setter
+    def dl_mac_padding(self, state):
+        """Enables/Disables downlink padding at the mac layer.
+
+        Args:
+            state: ON/OFF
+        """
+        cmd = 'CONFigure:LTE:SIGN:CONNection:DLPadding {}'.format(state.value)
+        self.send_and_recv(cmd)
+
+    @property
+    def connection_type(self):
+        """Gets the connection type applied in callbox."""
+        return self.send_and_recv('CONFigure:LTE:SIGN:CONNection:CTYPe?')
+
+    @connection_type.setter
+    def connection_type(self, ctype):
+        """Sets the connection type to be applied.
+
+        Args:
+            ctype: Connection type.
+        """
+        cmd = 'CONFigure:LTE:SIGN:CONNection:CTYPe {}'.format(ctype.value)
+        self.send_and_recv(cmd)
+
+    def get_base_station(self, bts_num=BtsNumber.BTS1):
+        """Gets the base station object based on bts num. By default
+        bts_num set to PCC
+
+        Args:
+            bts_num: base station identifier
+
+        Returns:
+            base station object.
+        """
+        return BaseStation(self, bts_num)
+
+    def init_lte_measurement(self):
+        """Gets the class object for lte measurement which can be used to
+        initiate measurements.
+
+        Returns:
+            lte measurement object.
+        """
+        return LteMeasurement(self)
+
+    def set_sms(self, sms_message):
+        """Sets the SMS message to be sent by the callbox."""
+        self.send_and_recv('CONFigure:LTE:SIGN:SMS:OUTGoing:INTernal "%s"' % sms_message)
+
+    def send_sms(self):
+        """Sends the SMS message."""
+        self.send_and_recv('CALL:LTE:SIGN:PSWitched:ACTion SMS; *OPC?')
+        timeout = time.time() + STATE_CHANGE_TIMEOUT
+        while "SUCC" != self.send_and_recv('SENSe:LTE:SIGN:SMS:OUTGoing:INFO:LMSent?'):
+            if time.time() > timeout:
+                raise CmwError("SENSe:LTE:SIGN:SMS:OUTGoing:INFO:LMSent? never returns status 'SUCC' instead got (%s)" % self.send_and_recv('SENSe:LTE:SIGN:SMS:OUTGoing:INFO:LMSent?'))
+            time.sleep(2)
+
+
+class BaseStation(object):
+    """Class to interact with different base stations"""
+
+    def __init__(self, cmw, bts_num):
+        if not isinstance(bts_num, BtsNumber):
+            raise ValueError('bts_num should be an instance of BtsNumber.')
+        self._bts = bts_num.value
+        self._cmw = cmw
+
+    @property
+    def duplex_mode(self):
+        """Gets current duplex of cell."""
+        cmd = 'CONFigure:LTE:SIGN:{}:DMODe?'.format(self._bts)
+        return self._cmw.send_and_recv(cmd)
+
+    @duplex_mode.setter
+    def duplex_mode(self, mode):
+        """Sets the Duplex mode of cell.
+
+        Args:
+            mode: String indicating FDD or TDD.
+        """
+        if not isinstance(mode, DuplexMode):
+            raise ValueError('mode should be an instance of DuplexMode.')
+
+        cmd = 'CONFigure:LTE:SIGN:{}:DMODe {}'.format(self._bts, mode.value)
+        self._cmw.send_and_recv(cmd)
+
+    @property
+    def band(self):
+        """Gets the current band of cell."""
+        cmd = 'CONFigure:LTE:SIGN:{}:BAND?'.format(self._bts)
+        return self._cmw.send_and_recv(cmd)
+
+    @band.setter
+    def band(self, band):
+        """Sets the Band of cell.
+
+        Args:
+            band: band of cell.
+        """
+        cmd = 'CONFigure:LTE:SIGN:{}:BAND {}'.format(self._bts, band)
+        self._cmw.send_and_recv(cmd)
+
+    @property
+    def dl_channel(self):
+        """Gets the downlink channel of cell."""
+        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:DL?'.format(self._bts)
+        return self._cmw.send_and_recv(cmd)
+
+    @dl_channel.setter
+    def dl_channel(self, channel):
+        """Sets the downlink channel number of cell.
+
+        Args:
+            channel: downlink channel number of cell.
+        """
+        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:DL {}'.format(
+                self._bts, channel)
+        self._cmw.send_and_recv(cmd)
+
+    @property
+    def ul_channel(self):
+        """Gets the uplink channel of cell."""
+        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:UL?'.format(self._bts)
+        return self._cmw.send_and_recv(cmd)
+
+    @ul_channel.setter
+    def ul_channel(self, channel):
+        """Sets the up link channel number of cell.
+
+        Args:
+            channel: up link channel number of cell.
+        """
+        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:UL {}'.format(
+                self._bts, channel)
+        self._cmw.send_and_recv(cmd)
+
+    @property
+    def bandwidth(self):
+        """Get the channel bandwidth of the cell."""
+        cmd = 'CONFigure:LTE:SIGN:CELL:BANDwidth:{}:DL?'.format(self._bts)
+        return self._cmw.send_and_recv(cmd)
+
+    @bandwidth.setter
+    def bandwidth(self, bandwidth):
+        """Sets the channel bandwidth of the cell.
+
+        Args:
+            bandwidth: channel bandwidth of cell.
+        """
+        if not isinstance(bandwidth, LteBandwidth):
+            raise ValueError('bandwidth should be an instance of '
+                             'LteBandwidth.')
+        cmd = 'CONFigure:LTE:SIGN:CELL:BANDwidth:{}:DL {}'.format(
+                self._bts, bandwidth.value)
+        self._cmw.send_and_recv(cmd)
+
+    @property
+    def ul_frequency(self):
+        """Get the uplink frequency of the cell."""
+        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:UL? MHZ'.format(
+                self._bts)
+        return self._cmw.send_and_recv(cmd)
+
+    @ul_frequency.setter
+    def ul_frequency(self, freq):
+        """Get the uplink frequency of the cell.
+
+        Args:
+            freq: uplink frequency of the cell.
+        """
+        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:UL {} MHZ'.format(
+                self._bts, freq)
+        self._cmw.send_and_recv(cmd)
+
+    @property
+    def dl_frequency(self):
+        """Get the downlink frequency of the cell"""
+        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:DL? MHZ'.format(
+                self._bts)
+        return self._cmw.send_and_recv(cmd)
+
+    @dl_frequency.setter
+    def dl_frequency(self, freq):
+        """Get the downlink frequency of the cell.
+
+        Args:
+            freq: downlink frequency of the cell.
+        """
+        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:DL {} MHZ'.format(
+                self._bts, freq)
+        self._cmw.send_and_recv(cmd)
+
+    @property
+    def transmode(self):
+        """Gets the TM of cell."""
+        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:TRANsmission?'.format(
+                self._bts)
+        return self._cmw.send_and_recv(cmd)
+
+    @transmode.setter
+    def transmode(self, tm_mode):
+        """Sets the TM of cell.
+
+        Args:
+            tm_mode: TM of cell.
+        """
+        if not isinstance(tm_mode, TransmissionModes):
+            raise ValueError('tm_mode should be an instance of '
+                             'Transmission modes.')
+
+        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:TRANsmission {}'.format(
+                self._bts, tm_mode.value)
+        self._cmw.send_and_recv(cmd)
+
+    @property
+    def downlink_power_level(self):
+        """Gets RSPRE level."""
+        cmd = 'CONFigure:LTE:SIGN:DL:{}:RSEPre:LEVel?'.format(self._bts)
+        return self._cmw.send_and_recv(cmd)
+
+    @downlink_power_level.setter
+    def downlink_power_level(self, pwlevel):
+        """Modifies RSPRE level.
+
+        Args:
+            pwlevel: power level in dBm.
+        """
+        cmd = 'CONFigure:LTE:SIGN:DL:{}:RSEPre:LEVel {}'.format(
+                self._bts, pwlevel)
+        self._cmw.send_and_recv(cmd)
+
+    @property
+    def uplink_power_control(self):
+        """Gets open loop nominal power directly."""
+        cmd = 'CONFigure:LTE:SIGN:UL:{}:PUSCh:OLNPower?'.format(self._bts)
+        return self._cmw.send_and_recv(cmd)
+
+    @uplink_power_control.setter
+    def uplink_power_control(self, ul_power):
+        """Sets open loop nominal power directly.
+
+        Args:
+            ul_power: uplink power level.
+        """
+        cmd = 'CONFigure:LTE:SIGN:UL:{}:PUSCh:OLNPower {}'.format(
+                self._bts, ul_power)
+        self._cmw.send_and_recv(cmd)
+
+    @property
+    def uldl_configuration(self):
+        """Gets uldl configuration of the cell."""
+        cmd = 'CONFigure:LTE:SIGN:CELL:{}:ULDL?'.format(self._bts)
+        return self._cmw.send_and_recv(cmd)
+
+    @uldl_configuration.setter
+    def uldl_configuration(self, uldl):
+        """Sets the ul-dl configuration.
+
+        Args:
+            uldl: Configuration value ranging from 0 to 6.
+        """
+        if uldl not in range(0, 7):
+            raise ValueError('uldl configuration value should be between'
+                             ' 0 and 6 inclusive.')
+
+        cmd = 'CONFigure:LTE:SIGN:CELL:{}:ULDL {}'.format(self._bts, uldl)
+        self._cmw.send_and_recv(cmd)
+
+    @property
+    def tdd_special_subframe(self):
+        """Gets special subframe of the cell."""
+        cmd = 'CONFigure:LTE:SIGN:CELL:{}:SSUBframe?'.format(self._bts)
+        return self._cmw.send_and_recv(cmd)
+
+    @tdd_special_subframe.setter
+    def tdd_special_subframe(self, sframe):
+        """Sets the tdd special subframe of the cell.
+
+        Args:
+            sframe: Integer value ranging from 1 to 9.
+        """
+        if sframe not in range(0, 10):
+            raise ValueError('tdd special subframe should be between 0 and 9'
+                             ' inclusive.')
+
+        cmd = 'CONFigure:LTE:SIGN:CELL:{}:SSUBframe {}'.format(
+                self._bts, sframe)
+        self._cmw.send_and_recv(cmd)
+
+    @property
+    def scheduling_mode(self):
+        """Gets the current scheduling mode."""
+        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:STYPe?'.format(self._bts)
+        return self._cmw.send_and_recv(cmd)
+
+    @scheduling_mode.setter
+    def scheduling_mode(self, mode):
+        """Sets the scheduling type for the cell.
+
+        Args:
+            mode: Selects the channel mode to be scheduled.
+        """
+        if not isinstance(mode, SchedulingMode):
+            raise ValueError('mode should be the instance of scheduling mode.')
+
+        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:STYPe {}'.format(
+                self._bts, mode.value)
+        self._cmw.send_and_recv(cmd)
+
+    @property
+    def rb_configuration_dl(self):
+        """Gets rmc's rb configuration for down link. This function returns
+        Number of Resource blocks, Resource block position and Modulation type.
+        """
+        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:{}:DL?'.format(
+                self._bts, self.scheduling_mode)
+        return self._cmw.send_and_recv(cmd)
+
+    @rb_configuration_dl.setter
+    def rb_configuration_dl(self, rb_config):
+        """Sets the rb configuration for down link for scheduling type.
+
+        Args:
+            rb_config: Tuple containing Number of resource blocks, resource
+            block position and modulation type.
+
+        Raises:
+            ValueError: If tuple unpacking fails.
+        """
+        if self.scheduling_mode == 'RMC':
+            rb, rb_pos, modulation = rb_config
+
+            cmd = ('CONFigure:LTE:SIGN:CONNection:{}:RMC:DL {},{},'
+                   '{}'.format(self._bts, rb, rb_pos, modulation))
+            self._cmw.send_and_recv(cmd)
+
+        elif self.scheduling_mode == 'UDCH':
+            rb, start_rb, modulation, tbs = rb_config
+
+            self.validate_rb(rb)
+
+            if not isinstance(modulation, ModulationType):
+                raise ValueError('Modulation should be of type '
+                                 'ModulationType.')
+
+            cmd = ('CONFigure:LTE:SIGN:CONNection:{}:UDCHannels:DL {},{},'
+                   '{},{}'.format(self._bts, rb, start_rb, modulation.value,
+                                  tbs))
+            self._cmw.send_and_recv(cmd)
+
+    @property
+    def rb_configuration_ul(self):
+        """Gets rb configuration for up link. This function returns
+        Number of Resource blocks, Resource block position and Modulation type.
+        """
+        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:{}:UL?'.format(
+                self._bts, self.scheduling_mode)
+        return self._cmw.send_and_recv(cmd)
+
+    @rb_configuration_ul.setter
+    def rb_configuration_ul(self, rb_config):
+        """Sets the rb configuration for down link for scheduling mode.
+
+        Args:
+            rb_config: Tuple containing Number of resource blocks, resource
+            block position and modulation type.
+
+        Raises:
+            ValueError: If tuple unpacking fails.
+        """
+        if self.scheduling_mode == 'RMC':
+            rb, rb_pos, modulation = rb_config
+
+            cmd = ('CONFigure:LTE:SIGN:CONNection:{}:RMC:UL {},{},'
+                   '{}'.format(self._bts, rb, rb_pos, modulation))
+            self._cmw.send_and_recv(cmd)
+
+        elif self.scheduling_mode == 'UDCH':
+            rb, start_rb, modulation, tbs = rb_config
+
+            self.validate_rb(rb)
+
+            if not isinstance(modulation, ModulationType):
+                raise ValueError('Modulation should be of type '
+                                 'ModulationType.')
+            cmd = ('CONFigure:LTE:SIGN:CONNection:{}:UDCHannels:UL {},{},'
+                   '{},{}'.format(self._bts, rb, start_rb, modulation.value,
+                                  tbs))
+            self._cmw.send_and_recv(cmd)
+
+    def validate_rb(self, rb):
+        """Validates if rb is within the limits for bandwidth set.
+
+        Args:
+            rb: No. of resource blocks.
+
+        Raises:
+            ValueError if rb out of range.
+        """
+        bandwidth = self.bandwidth
+
+        if bandwidth == LteBandwidth.BANDWIDTH_1MHz.value:
+            if not 0 <= rb <= 6:
+                raise ValueError('RB should be between 0 to 6 inclusive'
+                                 ' for 1.4Mhz.')
+        elif bandwidth == LteBandwidth.BANDWIDTH_3MHz.value:
+            if not 0 <= rb <= 10:
+                raise ValueError('RB should be between 0 to 10 inclusive'
+                                 ' for 3 Mhz.')
+        elif bandwidth == LteBandwidth.BANDWIDTH_5MHz.value:
+            if not 0 <= rb <= 25:
+                raise ValueError('RB should be between 0 to 25 inclusive'
+                                 ' for 5 Mhz.')
+        elif bandwidth == LteBandwidth.BANDWIDTH_10MHz.value:
+            if not 0 <= rb <= 50:
+                raise ValueError('RB should be between 0 to 50 inclusive'
+                                 ' for 10 Mhz.')
+        elif bandwidth == LteBandwidth.BANDWIDTH_15MHz.value:
+            if not 0 <= rb <= 75:
+                raise ValueError('RB should be between 0 to 75 inclusive'
+                                 ' for 15 Mhz.')
+        elif bandwidth == LteBandwidth.BANDWIDTH_20MHz.value:
+            if not 0 <= rb <= 100:
+                raise ValueError('RB should be between 0 to 100 inclusive'
+                                 ' for 20 Mhz.')
+
+    @property
+    def rb_position_dl(self):
+        """Gets the position of the allocated down link resource blocks within
+        the channel band-width.
+        """
+        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:RMC:RBPosition:DL?'.format(
+                self._bts)
+        return self._cmw.send_and_recv(cmd)
+
+    @rb_position_dl.setter
+    def rb_position_dl(self, rbpos):
+        """Selects the position of the allocated down link resource blocks
+        within the channel band-width
+
+        Args:
+            rbpos: position of resource blocks.
+        """
+        if not isinstance(rbpos, RbPosition):
+            raise ValueError('rbpos should be the instance of RbPosition.')
+
+        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:RMC:RBPosition:DL {}'.format(
+                self._bts, rbpos.value)
+        self._cmw.send_and_recv(cmd)
+
+    @property
+    def rb_position_ul(self):
+        """Gets the position of the allocated up link resource blocks within
+        the channel band-width.
+        """
+        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:RMC:RBPosition:UL?'.format(
+                self._bts)
+        return self._cmw.send_and_recv(cmd)
+
+    @rb_position_ul.setter
+    def rb_position_ul(self, rbpos):
+        """Selects the position of the allocated up link resource blocks
+        within the channel band-width.
+
+        Args:
+            rbpos: position of resource blocks.
+        """
+        if not isinstance(rbpos, RbPosition):
+            raise ValueError('rbpos should be the instance of RbPosition.')
+
+        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:RMC:RBPosition:UL {}'.format(
+                self._bts, rbpos.value)
+        self._cmw.send_and_recv(cmd)
+
+    @property
+    def dci_format(self):
+        """Gets the downlink control information (DCI) format."""
+        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:DCIFormat?'.format(self._bts)
+        return self._cmw.send_and_recv(cmd)
+
+    @dci_format.setter
+    def dci_format(self, dci_format):
+        """Selects the downlink control information (DCI) format.
+
+        Args:
+            dci_format: supported dci.
+        """
+        if not isinstance(dci_format, DciFormat):
+            raise ValueError('dci_format should be the instance of DciFormat.')
+
+        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:DCIFormat {}'.format(
+                self._bts, dci_format)
+        self._cmw.send_and_recv(cmd)
+
+    @property
+    def dl_antenna(self):
+        """Gets dl antenna count of cell."""
+        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:NENBantennas?'.format(
+                self._bts)
+        return self._cmw.send_and_recv(cmd)
+
+    @dl_antenna.setter
+    def dl_antenna(self, num_antenna):
+        """Sets the dl antenna count of cell.
+
+        Args:
+            num_antenna: Count of number of dl antennas to use.
+        """
+        if not isinstance(num_antenna, MimoModes):
+            raise ValueError('num_antenna should be an instance of MimoModes.')
+        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:NENBantennas {}'.format(
+                self._bts, num_antenna)
+        self._cmw.send_and_recv(cmd)
+
+    @property
+    def reduced_pdcch(self):
+        """Gets the reduction of PDCCH resources state."""
+        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:PDCCh:RPDCch?'.format(
+                self._bts)
+        return self._cmw.send_and_recv(cmd)
+
+    @reduced_pdcch.setter
+    def reduced_pdcch(self, state):
+        """Sets the reduction of PDCCH resources state.
+
+        Args:
+            state: ON/OFF.
+        """
+        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:PDCCh:RPDCch {}'.format(
+                self._bts, state.value)
+        self._cmw.send_and_recv(cmd)
+
+    def tpc_power_control(self, set_type):
+        """Set and execute the Up Link Power Control via TPC.
+
+        Args:
+            set_type: Type of tpc power control.
+        """
+
+        if not isinstance(set_type, TpcPowerControl):
+            raise ValueError('set_type should be the instance of '
+                             'TpCPowerControl.')
+        cmd = 'CONFigure:LTE:SIGN:UL:{}:PUSCh:TPC:SET {}'.format(
+                self._bts, set_type.value)
+        self._cmw.send_and_recv(cmd)
+        cmd = 'CONFigure:LTE:SIGN:UL:{}:PUSCh:TPC:PEXecute'.format(self._bts)
+        self._cmw.send_and_recv(cmd)
+
+    @property
+    def tpc_closed_loop_target_power(self):
+        """Gets the target powers for power control with the TPC setup."""
+        cmd = 'CONFigure:LTE:SIGN:UL:{}:PUSCh:TPC:CLTPower?'.format(self._bts)
+        return self._cmw.send_and_recv(cmd)
+
+    @tpc_closed_loop_target_power.setter
+    def tpc_closed_loop_target_power(self, cltpower):
+        """Sets the target powers for power control with the TPC setup.
+
+        Args:
+            tpower: Target power.
+        """
+        cmd = 'CONFigure:LTE:SIGN:UL:{}:PUSCh:TPC:CLTPower {}'.format(
+                self._bts, cltpower)
+        self._cmw.send_and_recv(cmd)
+
+    @property
+    def drx_connected_mode(self):
+        """ Gets the Connected DRX LTE cell parameter
+
+        Args:
+            None
+
+        Returns:
+            DRX connected mode (OFF, AUTO, MANUAL)
+        """
+        raise NotImplementedError()
+
+    @drx_connected_mode.setter
+    def drx_connected_mode(self, mode):
+        """  Sets the Connected DRX LTE cell parameter
+
+        Args:
+            mode: DRX Connected mode
+
+        Returns:
+            None
+        """
+        raise NotImplementedError()
+
+    @property
+    def drx_on_duration_timer(self):
+        """ Gets the amount of PDCCH subframes to wait for data after
+            waking up from a DRX cycle
+
+        Args:
+            None
+
+        Returns:
+            DRX mode duration timer
+        """
+        raise NotImplementedError()
+
+    @drx_on_duration_timer.setter
+    def drx_on_duration_timer(self, time):
+        """ Sets the amount of PDCCH subframes to wait for data after
+            waking up from a DRX cycle
+
+        Args:
+            timer: Length of interval to wait for user data to be transmitted
+
+        Returns:
+            None
+        """
+        raise NotImplementedError()
+
+    @property
+    def drx_inactivity_timer(self):
+        """ Gets the number of PDCCH subframes to wait before entering DRX mode
+
+        Args:
+            None
+
+        Returns:
+            DRX mode inactivity timer
+        """
+        raise NotImplementedError()
+
+    @drx_inactivity_timer.setter
+    def drx_inactivity_timer(self, time):
+        """ Sets the number of PDCCH subframes to wait before entering DRX mode
+
+        Args:
+            timer: Length of the interval to wait
+
+        Returns:
+            None
+        """
+        raise NotImplementedError()
+
+    @property
+    def drx_retransmission_timer(self):
+        """ Gets the number of consecutive PDCCH subframes to wait
+        for retransmission
+
+        Args:
+            None
+
+        Returns:
+            Number of PDCCH subframes to wait for retransmission
+        """
+        raise NotImplementedError()
+
+    @drx_retransmission_timer.setter
+    def drx_retransmission_timer(self, time):
+        """ Sets the number of consecutive PDCCH subframes to wait
+        for retransmission
+
+        Args:
+            time: Number of PDCCH subframes to wait
+            for retransmission
+
+        Returns:
+            None
+        """
+        raise NotImplementedError()
+
+    @property
+    def drx_long_cycle(self):
+        """ Gets the amount of subframes representing a DRX long cycle
+
+        Args:
+            None
+
+        Returns:
+            The amount of subframes representing one long DRX cycle.
+            One cycle consists of DRX sleep + DRX on duration
+        """
+        raise NotImplementedError()
+
+    @drx_long_cycle.setter
+    def drx_long_cycle(self, time):
+        """ Sets the amount of subframes representing a DRX long cycle
+
+        Args:
+            long_cycle: The amount of subframes representing one long DRX cycle.
+                One cycle consists of DRX sleep + DRX on duration
+
+        Returns:
+            None
+        """
+        raise NotImplementedError()
+
+    @property
+    def drx_long_cycle_offset(self):
+        """ Gets the offset used to determine long cycle starting
+        subframe
+
+        Args:
+            None
+
+        Returns:
+            Long cycle offset
+        """
+        raise NotImplementedError()
+
+    @drx_long_cycle_offset.setter
+    def drx_long_cycle_offset(self, offset):
+        """ Sets the offset used to determine long cycle starting
+        subframe
+
+        Args:
+            offset: Number in range 0...(long cycle - 1)
+        """
+        raise NotImplementedError()
+
+
+class LteMeasurement(object):
+    """ Class for measuring LTE performance """
+
+    def __init__(self, cmw):
+        self._cmw = cmw
+
+    def intitilize_measurement(self):
+        """Initialize measurement modules."""
+        self._cmw.send_and_recv('INIT:LTE:MEAS:MEValuation')
+
+    @property
+    def measurement_repetition(self):
+        """Returns the measurement repetition mode that has been set."""
+        return self._cmw.send_and_recv(
+                'CONFigure:LTE:MEAS:MEValuation:REPetition?')
+
+    @measurement_repetition.setter
+    def measurement_repetition(self, mode):
+        """Sets the mode for measuring power levels.
+
+        Args:
+            mode: Single shot/continuous.
+        """
+        if not isinstance(mode, RepetitionMode):
+            raise ValueError('mode should be the instance of Repetition Mode')
+
+        cmd = 'CONFigure:LTE:MEAS:MEValuation:REPetition {}'.format(mode.value)
+        self._cmw.send_and_recv(cmd)
+
+    @property
+    def query_measurement_state(self):
+        """Returns the states and sub states of measurement."""
+        return self._cmw.send_and_recv('FETCh:LTE:MEAS:MEValuation:STATe:ALL?')
+
+    @property
+    def measure_tx_power(self):
+        """Return the current Tx power measurement."""
+        return self._cmw.send_and_recv(
+                'FETCh:LTE:MEAS:MEValuation:PMONitor:AVERage?')
+
+    def stop_measurement(self):
+        """Stops the on-going measurement.
+        This function call does not free up resources allocated for
+        measurement. Instead it moves from RUN to RDY state.
+        """
+        self._cmw.send_and_recv('STOP:LTE:MEAS:MEValuation')
+
+    def abort_measurement(self):
+        """Aborts the measurement abruptly.
+        This function call will free up the resources allocated for
+        measurement and all the results will be wiped off.
+        """
+        self._cmw.send_and_recv('ABORt:LTE:MEAS:MEValuation')
+
+
+class CmwError(Exception):
+    """Class to raise exceptions related to cmw."""
diff --git a/server/cros/cellular/callbox_utils/cmw500_cellular_simulator.py b/server/cros/cellular/callbox_utils/cmw500_cellular_simulator.py
new file mode 100644
index 0000000..ce2c04e
--- /dev/null
+++ b/server/cros/cellular/callbox_utils/cmw500_cellular_simulator.py
@@ -0,0 +1,601 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import time
+import logging
+
+from autotest_lib.server.cros.cellular import cellular_simulator as cc
+from autotest_lib.server.cros.cellular.callbox_utils import cmw500
+from autotest_lib.server.cros.cellular.simulation_utils import LteSimulation
+
+CMW_TM_MAPPING = {
+        LteSimulation.TransmissionMode.TM1: cmw500.TransmissionModes.TM1,
+        LteSimulation.TransmissionMode.TM2: cmw500.TransmissionModes.TM2,
+        LteSimulation.TransmissionMode.TM3: cmw500.TransmissionModes.TM3,
+        LteSimulation.TransmissionMode.TM4: cmw500.TransmissionModes.TM4,
+        LteSimulation.TransmissionMode.TM7: cmw500.TransmissionModes.TM7,
+        LteSimulation.TransmissionMode.TM8: cmw500.TransmissionModes.TM8,
+        LteSimulation.TransmissionMode.TM9: cmw500.TransmissionModes.TM9
+}
+
+CMW_SCH_MAPPING = {
+        LteSimulation.SchedulingMode.STATIC:
+        cmw500.SchedulingMode.USERDEFINEDCH
+}
+
+CMW_MIMO_MAPPING = {
+        LteSimulation.MimoMode.MIMO_1x1: cmw500.MimoModes.MIMO1x1,
+        LteSimulation.MimoMode.MIMO_2x2: cmw500.MimoModes.MIMO2x2,
+        LteSimulation.MimoMode.MIMO_4x4: cmw500.MimoModes.MIMO4x4
+}
+
+CMW_MODULATION_MAPPING = {
+        LteSimulation.ModulationType.QPSK: cmw500.ModulationType.QPSK,
+        LteSimulation.ModulationType.Q16: cmw500.ModulationType.Q16,
+        LteSimulation.ModulationType.Q64: cmw500.ModulationType.Q64,
+        LteSimulation.ModulationType.Q256: cmw500.ModulationType.Q256
+}
+
+# get mcs vs tbsi map with 256-qam disabled(downlink)
+get_mcs_tbsi_map_dl = {
+        cmw500.ModulationType.QPSK: {
+                0: 0,
+                1: 1,
+                2: 2,
+                3: 3,
+                4: 4,
+                5: 5,
+                6: 6,
+                7: 7,
+                8: 8,
+                9: 9
+        },
+        cmw500.ModulationType.Q16: {
+                10: 9,
+                11: 10,
+                12: 11,
+                13: 12,
+                14: 13,
+                15: 14,
+                16: 15
+        },
+        cmw500.ModulationType.Q64: {
+                17: 15,
+                18: 16,
+                19: 17,
+                20: 18,
+                21: 19,
+                22: 20,
+                23: 21,
+                24: 22,
+                25: 23,
+                26: 24,
+                27: 25,
+                28: 26
+        }
+}
+
+# get mcs vs tbsi map with 256-qam enabled(downlink)
+get_mcs_tbsi_map_for_256qam_dl = {
+        cmw500.ModulationType.QPSK: {
+                0: 0,
+                1: 2,
+                2: 4,
+                3: 6,
+                4: 8,
+        },
+        cmw500.ModulationType.Q16: {
+                5: 10,
+                6: 11,
+                7: 12,
+                8: 13,
+                9: 14,
+                10: 15
+        },
+        cmw500.ModulationType.Q64: {
+                11: 16,
+                12: 17,
+                13: 18,
+                14: 19,
+                15: 20,
+                16: 21,
+                17: 22,
+                18: 23,
+                19: 24
+        },
+        cmw500.ModulationType.Q256: {
+                20: 25,
+                21: 27,
+                22: 28,
+                23: 29,
+                24: 30,
+                25: 31,
+                26: 32,
+                27: 33
+        }
+}
+
+# get mcs vs tbsi map (uplink)
+get_mcs_tbsi_map_ul = {
+        cmw500.ModulationType.QPSK: {
+                0: 0,
+                1: 1,
+                2: 2,
+                3: 3,
+                4: 4,
+                5: 5,
+                6: 6,
+                7: 7,
+                8: 8,
+                9: 9
+        },
+        cmw500.ModulationType.Q16: {
+                10: 10,
+                11: 10,
+                12: 11,
+                13: 12,
+                14: 13,
+                15: 14,
+                16: 15,
+                17: 16,
+                18: 17,
+                19: 18,
+                20: 19,
+                21: 19,
+                22: 20,
+                23: 21,
+                24: 22,
+                25: 23,
+                26: 24,
+                27: 25,
+                28: 26
+        }
+}
+
+
+class CMW500CellularSimulator(cc.AbstractCellularSimulator):
+    """ A cellular simulator for telephony simulations based on the CMW 500
+    controller. """
+
+    # Indicates if it is able to use 256 QAM as the downlink modulation for LTE
+    LTE_SUPPORTS_DL_256QAM = True
+
+    # Indicates if it is able to use 64 QAM as the uplink modulation for LTE
+    LTE_SUPPORTS_UL_64QAM = True
+
+    # Indicates if 4x4 MIMO is supported for LTE
+    LTE_SUPPORTS_4X4_MIMO = True
+
+    # The maximum number of carriers that this simulator can support for LTE
+    LTE_MAX_CARRIERS = 1
+
+    def __init__(self, ip_address, port):
+        """ Initializes the cellular simulator.
+
+        Args:
+            ip_address: the ip address of the CMW500
+            port: the port number for the CMW500 controller
+        """
+
+        try:
+            self.cmw = cmw500.Cmw500(ip_address, port)
+        except cmw500.CmwError:
+            raise cc.CellularSimulatorError('Could not connect to CMW500.')
+
+        self.bts = None
+        self.log = logging.getLogger(__name__)
+        self.dl_modulation = None
+        self.ul_modulation = None
+
+    def destroy(self):
+        """ Sends finalization commands to the cellular equipment and closes
+        the connection. """
+        self.cmw.disconnect()
+
+    def setup_lte_scenario(self):
+        """ Configures the equipment for an LTE simulation. """
+        self.cmw.connection_type = cmw500.ConnectionType.DAU
+        self.bts = [self.cmw.get_base_station()]
+        self.cmw.switch_lte_signalling(cmw500.LteState.LTE_ON)
+
+    def setup_lte_ca_scenario(self):
+        """ Configures the equipment for an LTE with CA simulation. """
+        raise NotImplementedError()
+
+    def set_lte_rrc_state_change_timer(self, enabled, time=10):
+        """ Configures the LTE RRC state change timer.
+
+        Args:
+            enabled: a boolean indicating if the timer should be on or off.
+            time: time in seconds for the timer to expire
+        """
+        if enabled:
+            self.cmw.rrc_connection = cmw500.RrcState.RRC_OFF
+            self.cmw.rrc_connection_timer = time
+        else:
+            self.cmw.rrc_connection = cmw500.RrcState.RRC_ON
+
+    def set_band(self, bts_index, band):
+        """ Sets the band for the indicated base station.
+
+        Args:
+            bts_index: the base station number
+            band: the new band
+        """
+        bts = self.bts[bts_index]
+        bts.duplex_mode = self.get_duplex_mode(band)
+        band = 'OB' + band
+        bts.band = band
+        self.log.debug('Band set to {}'.format(band))
+
+    def get_duplex_mode(self, band):
+        """ Determines if the band uses FDD or TDD duplex mode
+
+        Args:
+            band: a band number
+
+        Returns:
+            an variable of class DuplexMode indicating if band is FDD or TDD
+        """
+        if 33 <= int(band) <= 46:
+            return cmw500.DuplexMode.TDD
+        else:
+            return cmw500.DuplexMode.FDD
+
+    def set_input_power(self, bts_index, input_power):
+        """ Sets the input power for the indicated base station.
+
+        Args:
+            bts_index: the base station number
+            input_power: the new input power
+        """
+        bts = self.bts[bts_index]
+        if input_power > 23:
+            self.log.warning('Open loop supports-50dBm to 23 dBm. '
+                             'Setting it to max power 23 dBm')
+            input_power = 23
+        bts.uplink_power_control = input_power
+        bts.tpc_power_control = cmw500.TpcPowerControl.CLOSED_LOOP
+        bts.tpc_closed_loop_target_power = input_power
+
+    def set_output_power(self, bts_index, output_power):
+        """ Sets the output power for the indicated base station.
+
+        Args:
+            bts_index: the base station number
+            output_power: the new output power
+        """
+        bts = self.bts[bts_index]
+        bts.downlink_power_level = output_power
+
+    def set_tdd_config(self, bts_index, tdd_config):
+        """ Sets the tdd configuration number for the indicated base station.
+
+        Args:
+            bts_index: the base station number
+            tdd_config: the new tdd configuration number
+        """
+        self.bts[bts_index].uldl_configuration = tdd_config
+
+    def set_ssf_config(self, bts_index, ssf_config):
+        """ Sets the Special Sub-Frame config number for the indicated
+        base station.
+
+        Args:
+            bts_index: the base station number
+            ssf_config: the new ssf config number
+        """
+        if not 0 <= ssf_config <= 9:
+            raise ValueError('The Special Sub-Frame configuration has to be a '
+                             'number between 0 and 9.')
+
+        self.bts[bts_index].tdd_special_subframe = ssf_config
+
+    def set_bandwidth(self, bts_index, bandwidth):
+        """ Sets the bandwidth for the indicated base station.
+
+        Args:
+            bts_index: the base station number
+            bandwidth: the new bandwidth
+        """
+        bts = self.bts[bts_index]
+
+        if bandwidth == 20:
+            bts.bandwidth = cmw500.LteBandwidth.BANDWIDTH_20MHz
+        elif bandwidth == 15:
+            bts.bandwidth = cmw500.LteBandwidth.BANDWIDTH_15MHz
+        elif bandwidth == 10:
+            bts.bandwidth = cmw500.LteBandwidth.BANDWIDTH_10MHz
+        elif bandwidth == 5:
+            bts.bandwidth = cmw500.LteBandwidth.BANDWIDTH_5MHz
+        elif bandwidth == 3:
+            bts.bandwidth = cmw500.LteBandwidth.BANDWIDTH_3MHz
+        elif bandwidth == 1.4:
+            bts.bandwidth = cmw500.LteBandwidth.BANDWIDTH_1MHz
+        else:
+            msg = 'Bandwidth {} MHz is not valid for LTE'.format(bandwidth)
+            raise ValueError(msg)
+
+    def set_downlink_channel_number(self, bts_index, channel_number):
+        """ Sets the downlink channel number for the indicated base station.
+
+        Args:
+            bts_index: the base station number
+            channel_number: the new channel number
+        """
+        bts = self.bts[bts_index]
+        bts.dl_channel = channel_number
+        self.log.debug('Downlink Channel set to {}'.format(bts.dl_channel))
+
+    def set_mimo_mode(self, bts_index, mimo_mode):
+        """ Sets the mimo mode for the indicated base station.
+
+        Args:
+            bts_index: the base station number
+            mimo_mode: the new mimo mode
+        """
+        bts = self.bts[bts_index]
+        mimo_mode = CMW_MIMO_MAPPING[mimo_mode]
+        if mimo_mode == cmw500.MimoModes.MIMO1x1:
+            self.cmw.configure_mimo_settings(cmw500.MimoScenario.SCEN1x1)
+            bts.dl_antenna = cmw500.MimoModes.MIMO1x1
+
+        elif mimo_mode == cmw500.MimoModes.MIMO2x2:
+            self.cmw.configure_mimo_settings(cmw500.MimoScenario.SCEN2x2)
+            bts.dl_antenna = cmw500.MimoModes.MIMO2x2
+
+        elif mimo_mode == cmw500.MimoModes.MIMO4x4:
+            self.cmw.configure_mimo_settings(cmw500.MimoScenario.SCEN4x4)
+            bts.dl_antenna = cmw500.MimoModes.MIMO4x4
+        else:
+            raise RuntimeError('The requested MIMO mode is not supported.')
+
+    def set_transmission_mode(self, bts_index, tmode):
+        """ Sets the transmission mode for the indicated base station.
+
+        Args:
+            bts_index: the base station number
+            tmode: the new transmission mode
+        """
+        bts = self.bts[bts_index]
+
+        tmode = CMW_TM_MAPPING[tmode]
+
+        if (tmode in [
+                cmw500.TransmissionModes.TM1, cmw500.TransmissionModes.TM7
+        ] and bts.dl_antenna == cmw500.MimoModes.MIMO1x1.value):
+            bts.transmode = tmode
+        elif (tmode.value in cmw500.TransmissionModes.__members__
+              and bts.dl_antenna == cmw500.MimoModes.MIMO2x2.value):
+            bts.transmode = tmode
+        elif (tmode in [
+                cmw500.TransmissionModes.TM2, cmw500.TransmissionModes.TM3,
+                cmw500.TransmissionModes.TM4, cmw500.TransmissionModes.TM9
+        ] and bts.dl_antenna == cmw500.MimoModes.MIMO4x4.value):
+            bts.transmode = tmode
+
+        else:
+            raise ValueError('Transmission modes should support the current '
+                             'mimo mode')
+
+    def set_scheduling_mode(self,
+                            bts_index,
+                            scheduling,
+                            mcs_dl=None,
+                            mcs_ul=None,
+                            nrb_dl=None,
+                            nrb_ul=None):
+        """ Sets the scheduling mode for the indicated base station.
+
+        Args:
+            bts_index: the base station number.
+            scheduling: the new scheduling mode.
+            mcs_dl: Downlink MCS.
+            mcs_ul: Uplink MCS.
+            nrb_dl: Number of RBs for downlink.
+            nrb_ul: Number of RBs for uplink.
+        """
+        bts = self.bts[bts_index]
+        bts.reduced_pdcch = cmw500.ReducedPdcch.ON
+
+        scheduling = CMW_SCH_MAPPING[scheduling]
+        bts.scheduling_mode = scheduling
+
+        if not (self.ul_modulation and self.dl_modulation):
+            raise ValueError('Modulation should be set prior to scheduling '
+                             'call')
+
+        if scheduling == cmw500.SchedulingMode.RMC:
+
+            if not nrb_ul and nrb_dl:
+                raise ValueError('nrb_ul and nrb dl should not be none')
+
+            bts.rb_configuration_ul = (nrb_ul, self.ul_modulation, 'KEEP')
+            self.log.info('ul rb configurations set to {}'.format(
+                    bts.rb_configuration_ul))
+
+            time.sleep(1)
+
+            self.log.debug('Setting rb configurations for down link')
+            bts.rb_configuration_dl = (nrb_dl, self.dl_modulation, 'KEEP')
+            self.log.info('dl rb configurations set to {}'.format(
+                    bts.rb_configuration_ul))
+
+        elif scheduling == cmw500.SchedulingMode.USERDEFINEDCH:
+
+            if not all([nrb_ul, nrb_dl, mcs_dl, mcs_ul]):
+                raise ValueError('All parameters are mandatory.')
+
+            tbs = get_mcs_tbsi_map_ul[self.ul_modulation][mcs_ul]
+
+            bts.rb_configuration_ul = (nrb_ul, 0, self.ul_modulation, tbs)
+            self.log.info('ul rb configurations set to {}'.format(
+                    bts.rb_configuration_ul))
+
+            time.sleep(1)
+
+            if self.dl_modulation == cmw500.ModulationType.Q256:
+                tbs = get_mcs_tbsi_map_for_256qam_dl[
+                        self.dl_modulation][mcs_dl]
+            else:
+                tbs = get_mcs_tbsi_map_dl[self.dl_modulation][mcs_dl]
+
+            bts.rb_configuration_dl = (nrb_dl, 0, self.dl_modulation, tbs)
+            self.log.info('dl rb configurations set to {}'.format(
+                    bts.rb_configuration_dl))
+
+    def set_dl_modulation(self, bts_index, modulation):
+        """ Sets the DL modulation for the indicated base station.
+
+        This function does not actually configure the test equipment with this
+        setting, but stores the value to be used later on when setting the
+        scheduling type. This is because the CMW500 API only allows to set
+        this parameters together.
+
+        Args:
+            bts_index: the base station number
+            modulation: the new DL modulation
+        """
+        # Convert dl modulation type to CMW modulation type.
+        self.dl_modulation = CMW_MODULATION_MAPPING[modulation]
+
+        self.log.warning('Modulation config stored but not applied until '
+                         'set_scheduling_mode called.')
+
+    def set_ul_modulation(self, bts_index, modulation):
+        """ Sets the UL modulation for the indicated base station.
+
+        This function does not actually configure the test equipment with this
+        setting, but stores the value to be used later on when setting the
+        scheduling type. This is because the CMW500 API only allows to set
+        this parameters together.
+
+        Args:
+            bts_index: the base station number
+            modulation: the new UL modulation
+        """
+
+        # Convert ul modulation type to CMW modulation type.
+        self.ul_modulation = CMW_MODULATION_MAPPING[modulation]
+
+        self.log.warning('Modulation config stored but not applied until '
+                         'set_scheduling_mode called.')
+
+    def set_tbs_pattern_on(self, bts_index, tbs_pattern_on):
+        """ Enables or disables TBS pattern in the indicated base station.
+
+        Args:
+            bts_index: the base station number
+            tbs_pattern_on: the new TBS pattern setting
+        """
+        # TODO (b/143918664): CMW500 doesn't have an equivalent setting.
+        pass
+
+    def set_cfi(self, bts_index, cfi):
+        """ Sets the Channel Format Indicator for the indicated base station.
+
+        Args:
+            bts_index: the base station number
+            cfi: the new CFI setting
+        """
+        # TODO (b/143497738): implement.
+        self.log.error('Setting CFI is not yet implemented in the CMW500 '
+                       'controller.')
+
+    def set_paging_cycle(self, bts_index, cycle_duration):
+        """ Sets the paging cycle duration for the indicated base station.
+
+        Args:
+            bts_index: the base station number
+            cycle_duration: the new paging cycle duration in milliseconds
+        """
+        # TODO (b/146068532): implement.
+        self.log.error('Setting the paging cycle duration is not yet '
+                       'implemented in the CMW500 controller.')
+
+    def set_phich_resource(self, bts_index, phich):
+        """ Sets the PHICH Resource setting for the indicated base station.
+
+        Args:
+            bts_index: the base station number
+            phich: the new PHICH resource setting
+        """
+        self.log.error('Configuring the PHICH resource setting is not yet '
+                       'implemented in the CMW500 controller.')
+
+    def lte_attach_secondary_carriers(self, ue_capability_enquiry):
+        """ Activates the secondary carriers for CA. Requires the DUT to be
+        attached to the primary carrier first.
+
+        Args:
+            ue_capability_enquiry: UE capability enquiry message to be sent to
+        the UE before starting carrier aggregation.
+        """
+        raise NotImplementedError()
+
+    def wait_until_attached(self, timeout=120):
+        """ Waits until the DUT is attached to the primary carrier.
+
+        Args:
+            timeout: after this amount of time the method will raise a
+                CellularSimulatorError exception. Default is 120 seconds.
+        """
+        try:
+            self.cmw.wait_for_attached_state(timeout=timeout)
+        except cmw500.CmwError:
+            raise cc.CellularSimulatorError('The phone was not in '
+                                            'attached state before '
+                                            'the timeout period ended.')
+
+    def wait_until_communication_state(self, timeout=120):
+        """ Waits until the DUT is in Communication state.
+
+        Args:
+            timeout: after this amount of time the method will raise a
+                CellularSimulatorError exception. Default is 120 seconds.
+        """
+        try:
+            self.cmw.wait_for_rrc_state(cmw500.LTE_CONN_RESP, timeout=timeout)
+        except cmw500.CmwError:
+            raise cc.CellularSimulatorError('The phone was not in '
+                                            'Communication state before '
+                                            'the timeout period ended.')
+
+    def wait_until_idle_state(self, timeout=120):
+        """ Waits until the DUT is in Idle state.
+
+        Args:
+            timeout: after this amount of time the method will raise a
+                CellularSimulatorError exception. Default is 120 seconds.
+        """
+        try:
+            self.cmw.wait_for_rrc_state(cmw500.LTE_IDLE_RESP, timeout=timeout)
+        except cmw500.CmwError:
+            raise cc.CellularSimulatorError('The phone was not in '
+                                            'Idle state before '
+                                            'the timeout period ended.')
+
+    def detach(self):
+        """ Turns off all the base stations so the DUT loose connection."""
+        self.cmw.detach()
+
+    def stop(self):
+        """ Stops current simulation. After calling this method, the simulator
+        will need to be set up again. """
+        raise NotImplementedError()
+
+    def start_data_traffic(self):
+        """ Starts transmitting data from the instrument to the DUT. """
+        raise NotImplementedError()
+
+    def stop_data_traffic(self):
+        """ Stops transmitting data from the instrument to the DUT. """
+        raise NotImplementedError()
+
+    def send_sms(self, sms_message):
+        """ Sends SMS message from the instrument to the DUT. """
+        self.cmw.wait_for_attached_state()
+        self.cmw.set_sms(sms_message)
+        self.cmw.send_sms()
+
diff --git a/server/cros/cellular/callbox_utils/common.py b/server/cros/cellular/callbox_utils/common.py
new file mode 100644
index 0000000..9c2b102
--- /dev/null
+++ b/server/cros/cellular/callbox_utils/common.py
@@ -0,0 +1,8 @@
+import os, sys
+dirname = os.path.dirname(sys.modules[__name__].__file__)
+autotest_dir = os.path.abspath(os.path.join(dirname, "..", "..", "..", ".."))
+client_dir = os.path.join(autotest_dir, "client")
+sys.path.insert(0, client_dir)
+import setup_modules
+sys.path.pop(0)
+setup_modules.setup(base_path=autotest_dir, root_module_name="autotest_lib")
diff --git a/server/cros/cellular/cellular_simulator.py b/server/cros/cellular/cellular_simulator.py
new file mode 100644
index 0000000..a15b64c
--- /dev/null
+++ b/server/cros/cellular/cellular_simulator.py
@@ -0,0 +1,462 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server.cros.cellular import simulation_utils as sims
+
+
+class AbstractCellularSimulator:
+    """ A generic cellular simulator controller class that can be derived to
+    implement equipment specific classes and allows the tests to be implemented
+    without depending on a singular instrument model.
+
+    This class defines the interface that every cellular simulator controller
+    needs to implement and shouldn't be instantiated by itself. """
+
+    # Indicates if it is able to use 256 QAM as the downlink modulation for LTE
+    LTE_SUPPORTS_DL_256QAM = None
+
+    # Indicates if it is able to use 64 QAM as the uplink modulation for LTE
+    LTE_SUPPORTS_UL_64QAM = None
+
+    # Indicates if 4x4 MIMO is supported for LTE
+    LTE_SUPPORTS_4X4_MIMO = None
+
+    # The maximum number of carriers that this simulator can support for LTE
+    LTE_MAX_CARRIERS = None
+
+    # The maximum power that the equipment is able to transmit
+    MAX_DL_POWER = None
+
+    def __init__(self):
+        """ Initializes the cellular simulator.  Logger init goes here. """
+
+    def destroy(self):
+        """ Sends finalization commands to the cellular equipment and closes
+        the connection. """
+        raise NotImplementedError()
+
+    def setup_lte_scenario(self):
+        """ Configures the equipment for an LTE simulation. """
+        raise NotImplementedError()
+
+    def setup_lte_ca_scenario(self):
+        """ Configures the equipment for an LTE with CA simulation. """
+        raise NotImplementedError()
+
+    def set_ca_combination(self, combination):
+        """ Prepares the test equipment for the indicated CA combination.
+
+        The reason why this is implemented in a separate method and not calling
+        LteSimulation.BtsConfig for each separate band is that configuring each
+        ssc cannot be done separately, as it is necessary to know which
+        carriers are on the same band in order to decide which RF outputs can
+        be shared in the test equipment.
+
+        Args:
+            combination: carrier aggregation configurations are indicated
+                with a list of strings consisting of the band number followed
+                by the CA class. For example, for 5 CA using 3C 7C and 28A
+                the parameter value should be [3c, 7c, 28a].
+        """
+        raise NotImplementedError()
+
+    def configure_bts(self, config, bts_index=0):
+        """ Commands the equipment to setup a base station with the required
+        configuration. This method applies configurations that are common to all
+        RATs.
+
+        Args:
+            config: a BaseSimulation.BtsConfig object.
+            bts_index: the base station number.
+        """
+
+        if config.output_power:
+            self.set_output_power(bts_index, config.output_power)
+
+        if config.input_power:
+            self.set_input_power(bts_index, config.input_power)
+
+        if isinstance(config, sims.LteSimulation.LteSimulation.BtsConfig):
+            self.configure_lte_bts(config, bts_index)
+
+    def configure_lte_bts(self, config, bts_index=0):
+        """ Commands the equipment to setup an LTE base station with the
+        required configuration.
+
+        Args:
+            config: an LteSimulation.BtsConfig object.
+            bts_index: the base station number.
+        """
+        if config.band:
+            self.set_band(bts_index, config.band)
+
+        if config.dlul_config:
+            self.set_tdd_config(bts_index, config.dlul_config)
+
+        if config.ssf_config:
+            self.set_ssf_config(bts_index, config.ssf_config)
+
+        if config.bandwidth:
+            self.set_bandwidth(bts_index, config.bandwidth)
+
+        if config.dl_channel:
+            self.set_downlink_channel_number(bts_index, config.dl_channel)
+
+        if config.mimo_mode:
+            self.set_mimo_mode(bts_index, config.mimo_mode)
+
+        if config.transmission_mode:
+            self.set_transmission_mode(bts_index, config.transmission_mode)
+
+        # Modulation order should be set before set_scheduling_mode being
+        # called.
+        if config.dl_modulation_order:
+            self.set_dl_modulation(bts_index, config.dl_modulation_order)
+
+        if config.ul_modulation_order:
+            self.set_ul_modulation(bts_index, config.ul_modulation_order)
+
+        if config.scheduling_mode:
+
+            if (config.scheduling_mode ==
+                        sims.LteSimulation.SchedulingMode.STATIC
+                        and not (config.dl_rbs and config.ul_rbs
+                                 and config.dl_mcs and config.ul_mcs)):
+                raise ValueError('When the scheduling mode is set to manual, '
+                                 'the RB and MCS parameters are required.')
+
+            # If scheduling mode is set to Dynamic, the RB and MCS parameters
+            # will be ignored by set_scheduling_mode.
+            self.set_scheduling_mode(bts_index, config.scheduling_mode,
+                                     config.dl_mcs, config.ul_mcs,
+                                     config.dl_rbs, config.ul_rbs)
+
+        # This variable stores a boolean value so the following is needed to
+        # differentiate False from None
+        if config.tbs_pattern_on is not None:
+            self.set_tbs_pattern_on(bts_index, config.tbs_pattern_on)
+
+        if config.cfi:
+            self.set_cfi(bts_index, config.cfi)
+
+        if config.paging_cycle:
+            self.set_paging_cycle(bts_index, config.paging_cycle)
+
+        if config.phich:
+            self.set_phich_resource(bts_index, config.phich)
+
+        if config.drx_connected_mode:
+            self.set_drx_connected_mode(bts_index, config.drx_connected_mode)
+
+            if config.drx_on_duration_timer:
+                self.set_drx_on_duration_timer(bts_index,
+                                               config.drx_on_duration_timer)
+
+            if config.drx_inactivity_timer:
+                self.set_drx_inactivity_timer(bts_index,
+                                              config.drx_inactivity_timer)
+
+            if config.drx_retransmission_timer:
+                self.set_drx_retransmission_timer(
+                        bts_index, config.drx_retransmission_timer)
+
+            if config.drx_long_cycle:
+                self.set_drx_long_cycle(bts_index, config.drx_long_cycle)
+
+            if config.drx_long_cycle_offset is not None:
+                self.set_drx_long_cycle_offset(bts_index,
+                                               config.drx_long_cycle_offset)
+
+    def set_lte_rrc_state_change_timer(self, enabled, time=10):
+        """ Configures the LTE RRC state change timer.
+
+        Args:
+            enabled: a boolean indicating if the timer should be on or off.
+            time: time in seconds for the timer to expire
+        """
+        raise NotImplementedError()
+
+    def set_band(self, bts_index, band):
+        """ Sets the band for the indicated base station.
+
+        Args:
+            bts_index: the base station number
+            band: the new band
+        """
+        raise NotImplementedError()
+
+    def set_input_power(self, bts_index, input_power):
+        """ Sets the input power for the indicated base station.
+
+        Args:
+            bts_index: the base station number
+            input_power: the new input power
+        """
+        raise NotImplementedError()
+
+    def set_output_power(self, bts_index, output_power):
+        """ Sets the output power for the indicated base station.
+
+        Args:
+            bts_index: the base station number
+            output_power: the new output power
+        """
+        raise NotImplementedError()
+
+    def set_tdd_config(self, bts_index, tdd_config):
+        """ Sets the tdd configuration number for the indicated base station.
+
+        Args:
+            bts_index: the base station number
+            tdd_config: the new tdd configuration number
+        """
+        raise NotImplementedError()
+
+    def set_ssf_config(self, bts_index, ssf_config):
+        """ Sets the Special Sub-Frame config number for the indicated
+        base station.
+
+        Args:
+            bts_index: the base station number
+            ssf_config: the new ssf config number
+        """
+        raise NotImplementedError()
+
+    def set_bandwidth(self, bts_index, bandwidth):
+        """ Sets the bandwidth for the indicated base station.
+
+        Args:
+            bts_index: the base station number
+            bandwidth: the new bandwidth
+        """
+        raise NotImplementedError()
+
+    def set_downlink_channel_number(self, bts_index, channel_number):
+        """ Sets the downlink channel number for the indicated base station.
+
+        Args:
+            bts_index: the base station number
+            channel_number: the new channel number
+        """
+        raise NotImplementedError()
+
+    def set_mimo_mode(self, bts_index, mimo_mode):
+        """ Sets the mimo mode for the indicated base station.
+
+        Args:
+            bts_index: the base station number
+            mimo_mode: the new mimo mode
+        """
+        raise NotImplementedError()
+
+    def set_transmission_mode(self, bts_index, transmission_mode):
+        """ Sets the transmission mode for the indicated base station.
+
+        Args:
+            bts_index: the base station number
+            transmission_mode: the new transmission mode
+        """
+        raise NotImplementedError()
+
+    def set_scheduling_mode(self, bts_index, scheduling_mode, mcs_dl, mcs_ul,
+                            nrb_dl, nrb_ul):
+        """ Sets the scheduling mode for the indicated base station.
+
+        Args:
+            bts_index: the base station number
+            scheduling_mode: the new scheduling mode
+            mcs_dl: Downlink MCS (only for STATIC scheduling)
+            mcs_ul: Uplink MCS (only for STATIC scheduling)
+            nrb_dl: Number of RBs for downlink (only for STATIC scheduling)
+            nrb_ul: Number of RBs for uplink (only for STATIC scheduling)
+        """
+        raise NotImplementedError()
+
+    def set_dl_modulation(self, bts_index, modulation):
+        """ Sets the DL modulation for the indicated base station.
+
+        Args:
+            bts_index: the base station number
+            modulation: the new DL modulation
+        """
+        raise NotImplementedError()
+
+    def set_ul_modulation(self, bts_index, modulation):
+        """ Sets the UL modulation for the indicated base station.
+
+        Args:
+            bts_index: the base station number
+            modulation: the new UL modulation
+        """
+        raise NotImplementedError()
+
+    def set_tbs_pattern_on(self, bts_index, tbs_pattern_on):
+        """ Enables or disables TBS pattern in the indicated base station.
+
+        Args:
+            bts_index: the base station number
+            tbs_pattern_on: the new TBS pattern setting
+        """
+        raise NotImplementedError()
+
+    def set_cfi(self, bts_index, cfi):
+        """ Sets the Channel Format Indicator for the indicated base station.
+
+        Args:
+            bts_index: the base station number
+            cfi: the new CFI setting
+        """
+        raise NotImplementedError()
+
+    def set_paging_cycle(self, bts_index, cycle_duration):
+        """ Sets the paging cycle duration for the indicated base station.
+
+        Args:
+            bts_index: the base station number
+            cycle_duration: the new paging cycle duration in milliseconds
+        """
+        raise NotImplementedError()
+
+    def set_phich_resource(self, bts_index, phich):
+        """ Sets the PHICH Resource setting for the indicated base station.
+
+        Args:
+            bts_index: the base station number
+            phich: the new PHICH resource setting
+        """
+        raise NotImplementedError()
+
+    def set_drx_connected_mode(self, bts_index, active):
+        """ Sets the time interval to wait before entering DRX mode
+
+        Args:
+            bts_index: the base station number
+            active: Boolean indicating whether cDRX mode
+                is active
+        """
+        raise NotImplementedError()
+
+    def set_drx_on_duration_timer(self, bts_index, timer):
+        """ Sets the amount of PDCCH subframes to wait for data after
+            waking up from a DRX cycle
+
+        Args:
+            bts_index: the base station number
+            timer: Number of PDCCH subframes to wait and check for user data
+                after waking from the DRX cycle
+        """
+        raise NotImplementedError()
+
+    def set_drx_inactivity_timer(self, bts_index, timer):
+        """ Sets the number of PDCCH subframes to wait before entering DRX mode
+
+        Args:
+            bts_index: the base station number
+            timer: The amount of time to wait before entering DRX mode
+        """
+        raise NotImplementedError()
+
+    def set_drx_retransmission_timer(self, bts_index, timer):
+        """ Sets the number of consecutive PDCCH subframes to wait
+        for retransmission
+
+        Args:
+            bts_index: the base station number
+            timer: Number of PDCCH subframes to remain active
+
+        """
+        raise NotImplementedError()
+
+    def set_drx_long_cycle(self, bts_index, cycle):
+        """ Sets the amount of subframes representing a DRX long cycle.
+
+        Args:
+            bts_index: the base station number
+            cycle: The amount of subframes representing one long DRX cycle.
+                One cycle consists of DRX sleep + DRX on duration
+        """
+        raise NotImplementedError()
+
+    def set_drx_long_cycle_offset(self, bts_index, offset):
+        """ Sets the offset used to determine the subframe number
+        to begin the long drx cycle
+
+        Args:
+            bts_index: the base station number
+            offset: Number in range 0 to (long cycle - 1)
+        """
+        raise NotImplementedError()
+
+    def lte_attach_secondary_carriers(self, ue_capability_enquiry):
+        """ Activates the secondary carriers for CA. Requires the DUT to be
+        attached to the primary carrier first.
+
+        Args:
+            ue_capability_enquiry: UE capability enquiry message to be sent to
+        the UE before starting carrier aggregation.
+        """
+        raise NotImplementedError()
+
+    def wait_until_attached(self, timeout=120):
+        """ Waits until the DUT is attached to the primary carrier.
+
+        Args:
+            timeout: after this amount of time the method will raise a
+                CellularSimulatorError exception. Default is 120 seconds.
+        """
+        raise NotImplementedError()
+
+    def wait_until_communication_state(self, timeout=120):
+        """ Waits until the DUT is in Communication state.
+
+        Args:
+            timeout: after this amount of time the method will raise a
+                CellularSimulatorError exception. Default is 120 seconds.
+        """
+        raise NotImplementedError()
+
+    def wait_until_idle_state(self, timeout=120):
+        """ Waits until the DUT is in Idle state.
+
+        Args:
+            timeout: after this amount of time the method will raise a
+                CellularSimulatorError exception. Default is 120 seconds.
+        """
+        raise NotImplementedError()
+
+    def detach(self):
+        """ Turns off all the base stations so the DUT loose connection."""
+        raise NotImplementedError()
+
+    def stop(self):
+        """ Stops current simulation. After calling this method, the simulator
+        will need to be set up again. """
+        raise NotImplementedError()
+
+    def start_data_traffic(self):
+        """ Starts transmitting data from the instrument to the DUT. """
+        raise NotImplementedError()
+
+    def stop_data_traffic(self):
+        """ Stops transmitting data from the instrument to the DUT. """
+        raise NotImplementedError()
+
+    def get_measured_pusch_power(self):
+        """ Queries PUSCH power measured at the callbox.
+
+        Returns:
+            The PUSCH power in the primary input port.
+        """
+        raise NotImplementedError()
+
+    def send_sms(self, sms_message):
+        """ Sends SMS message from the instrument to the DUT. """
+        raise NotImplementedError()
+
+
+
+class CellularSimulatorError(Exception):
+    """ Exceptions thrown when the cellular equipment is unreachable or it
+    returns an error after receiving a command. """
+    pass
diff --git a/client/tests/wb_kupdate/common.py b/server/cros/cellular/common.py
similarity index 100%
rename from client/tests/wb_kupdate/common.py
rename to server/cros/cellular/common.py
diff --git a/server/cros/cellular/simulation_utils/BaseCellularDut.py b/server/cros/cellular/simulation_utils/BaseCellularDut.py
new file mode 100644
index 0000000..3272a44
--- /dev/null
+++ b/server/cros/cellular/simulation_utils/BaseCellularDut.py
@@ -0,0 +1,67 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from enum import Enum
+
+
+class PreferredNetworkType(Enum):
+    """ Available preferred network types that can be passed to
+  set_preferred_network_type"""
+    LTE_ONLY = 'lte-only'
+    GSM_ONLY = 'gsm-only'
+    WCDMA_ONLY = 'wcdma-only'
+
+
+class BaseCellularDut():
+    """ Base class for DUTs used with cellular simulators. """
+
+    def toggle_airplane_mode(self, new_state=True):
+        """ Turns airplane mode on / off.
+
+        Args:
+          new_state: True if airplane mode needs to be enabled.
+        """
+        raise NotImplementedError()
+
+    def toggle_data_roaming(self, new_state=True):
+        """ Enables or disables cellular data roaming.
+
+        Args:
+          new_state: True if data roaming needs to be enabled.
+        """
+        raise NotImplementedError()
+
+    def get_rx_tx_power_levels(self):
+        """ Obtains Rx and Tx power levels measured from the DUT.
+
+        Returns:
+          A tuple where the first element is an array with the RSRP value
+          in each Rx chain, and the second element is the Tx power in dBm.
+          Values for invalid or disabled Rx / Tx chains are set to None.
+        """
+        raise NotImplementedError()
+
+    def set_apn(self, name, apn, type='default'):
+        """ Sets the Access Point Name.
+
+        Args:
+          name: the APN name
+          apn: the APN
+          type: the APN type
+        """
+        raise NotImplementedError()
+
+    def set_preferred_network_type(self, type):
+        """ Sets the preferred RAT.
+
+        Args:
+          type: an instance of class PreferredNetworkType
+        """
+        raise NotImplementedError()
+
+    def get_telephony_signal_strength(self):
+        """ Wrapper for the method with the same name in tel_utils.
+
+        Will be deprecated and replaced by get_rx_tx_power_levels. """
+        raise NotImplementedError()
diff --git a/server/cros/cellular/simulation_utils/BaseSimulation.py b/server/cros/cellular/simulation_utils/BaseSimulation.py
new file mode 100644
index 0000000..d665235
--- /dev/null
+++ b/server/cros/cellular/simulation_utils/BaseSimulation.py
@@ -0,0 +1,788 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import common
+import numpy as np
+import time
+
+from autotest_lib.server.cros.cellular import cellular_simulator
+from enum import Enum
+
+
+class BaseSimulation(object):
+    """ Base class for cellular connectivity simulations.
+
+    Classes that inherit from this base class implement different simulation
+    setups. The base class contains methods that are common to all simulation
+    configurations.
+
+    """
+
+    NUM_UL_CAL_READS = 3
+    NUM_DL_CAL_READS = 5
+    MAX_BTS_INPUT_POWER = 30
+    MAX_PHONE_OUTPUT_POWER = 23
+    UL_MIN_POWER = -60.0
+
+    # Keys to obtain settings from the test_config dictionary.
+    KEY_CALIBRATION = "calibration"
+    KEY_ATTACH_RETRIES = "attach_retries"
+    KEY_ATTACH_TIMEOUT = "attach_timeout"
+
+    # Filepath to the config files stored in the Anritsu callbox. Needs to be
+    # formatted to replace {} with either A or B depending on the model.
+    CALLBOX_PATH_FORMAT_STR = 'C:\\Users\\MD8475{}\\Documents\\DAN_configs\\'
+
+    # Time in seconds to wait for the phone to settle
+    # after attaching to the base station.
+    SETTLING_TIME = 10
+
+    # Default time in seconds to wait for the phone to attach to the basestation
+    # after toggling airplane mode. This setting can be changed with the
+    # KEY_ATTACH_TIMEOUT keyword in the test configuration file.
+    DEFAULT_ATTACH_TIMEOUT = 120
+
+    # The default number of attach retries. This setting can be changed with
+    # the KEY_ATTACH_RETRIES keyword in the test configuration file.
+    DEFAULT_ATTACH_RETRIES = 3
+
+    # These two dictionaries allow to map from a string to a signal level and
+    # have to be overridden by the simulations inheriting from this class.
+    UPLINK_SIGNAL_LEVEL_DICTIONARY = {}
+    DOWNLINK_SIGNAL_LEVEL_DICTIONARY = {}
+
+    # Units for downlink signal level. This variable has to be overridden by
+    # the simulations inheriting from this class.
+    DOWNLINK_SIGNAL_LEVEL_UNITS = None
+
+    class BtsConfig(object):
+        """ Base station configuration class. This class is only a container for
+        base station parameters and should not interact with the instrument
+        controller.
+
+        Attributes:
+            output_power: a float indicating the required signal level at the
+                instrument's output.
+            input_level: a float indicating the required signal level at the
+                instrument's input.
+        """
+
+        def __init__(self):
+            """ Initialize the base station config by setting all its
+            parameters to None. """
+            self.output_power = None
+            self.input_power = None
+            self.band = None
+
+        def incorporate(self, new_config):
+            """ Incorporates a different configuration by replacing the current
+            values with the new ones for all the parameters different to None.
+            """
+            for attr, value in vars(new_config).items():
+                if value:
+                    setattr(self, attr, value)
+
+    def __init__(self, simulator, log, dut, test_config, calibration_table):
+        """ Initializes the Simulation object.
+
+        Keeps a reference to the callbox, log and dut handlers and
+        initializes the class attributes.
+
+        Args:
+            simulator: a cellular simulator controller
+            log: a logger handle
+            dut: a device handler implementing BaseCellularDut
+            test_config: test configuration obtained from the config file
+            calibration_table: a dictionary containing path losses for
+                different bands.
+        """
+
+        self.simulator = simulator
+        self.log = log
+        self.dut = dut
+        self.calibration_table = calibration_table
+
+        # Turn calibration on or off depending on the test config value. If the
+        # key is not present, set to False by default
+        if self.KEY_CALIBRATION not in test_config:
+            self.log.warning('The {} key is not set in the testbed '
+                             'parameters. Setting to off by default. To '
+                             'turn calibration on, include the key with '
+                             'a true/false value.'.format(
+                                     self.KEY_CALIBRATION))
+
+        self.calibration_required = test_config.get(self.KEY_CALIBRATION,
+                                                    False)
+
+        # Obtain the allowed number of retries from the test configs
+        if self.KEY_ATTACH_RETRIES not in test_config:
+            self.log.warning('The {} key is not set in the testbed '
+                             'parameters. Setting to {} by default.'.format(
+                                     self.KEY_ATTACH_RETRIES,
+                                     self.DEFAULT_ATTACH_RETRIES))
+
+        self.attach_retries = test_config.get(self.KEY_ATTACH_RETRIES,
+                                              self.DEFAULT_ATTACH_RETRIES)
+
+        # Obtain the attach timeout from the test configs
+        if self.KEY_ATTACH_TIMEOUT not in test_config:
+            self.log.warning('The {} key is not set in the testbed '
+                             'parameters. Setting to {} by default.'.format(
+                                     self.KEY_ATTACH_TIMEOUT,
+                                     self.DEFAULT_ATTACH_TIMEOUT))
+
+        self.attach_timeout = test_config.get(self.KEY_ATTACH_TIMEOUT,
+                                              self.DEFAULT_ATTACH_TIMEOUT)
+
+        # Configuration object for the primary base station
+        self.primary_config = self.BtsConfig()
+
+        # Store the current calibrated band
+        self.current_calibrated_band = None
+
+        # Path loss measured during calibration
+        self.dl_path_loss = None
+        self.ul_path_loss = None
+
+        # Target signal levels obtained during configuration
+        self.sim_dl_power = None
+        self.sim_ul_power = None
+
+        # Stores RRC status change timer
+        self.rrc_sc_timer = None
+
+        # Set to default APN
+        log.info("Configuring APN.")
+        self.dut.set_apn('test', 'test')
+
+        # Enable roaming on the phone
+        self.dut.toggle_data_roaming(True)
+
+        # Make sure airplane mode is on so the phone won't attach right away
+        self.dut.toggle_airplane_mode(True)
+
+        # Wait for airplane mode setting to propagate
+        # TODO @latware b/186880504 change this to a poll_for_condition (Q3 21)
+        time.sleep(2)
+
+        # Prepare the simulator for this simulation setup
+        self.setup_simulator()
+
+    def setup_simulator(self):
+        """ Do initial configuration in the simulator. """
+        raise NotImplementedError()
+
+    def attach(self):
+        """ Attach the phone to the basestation.
+
+        Sets a good signal level, toggles airplane mode
+        and waits for the phone to attach.
+
+        Returns:
+            True if the phone was able to attach, False if not.
+        """
+
+        # Turn on airplane mode
+        self.dut.toggle_airplane_mode(True)
+
+        # Wait for airplane mode setting to propagate
+        # TODO @latware b/186880504 change this to a poll_for_condition (Q3 21)
+        time.sleep(2)
+
+        # Provide a good signal power for the phone to attach easily
+        new_config = self.BtsConfig()
+        new_config.input_power = -10
+        new_config.output_power = -30
+        self.simulator.configure_bts(new_config)
+        self.primary_config.incorporate(new_config)
+
+        # Try to attach the phone.
+        for i in range(self.attach_retries):
+
+            try:
+
+                # Turn off airplane mode
+                self.dut.toggle_airplane_mode(False)
+
+                # Wait for the phone to attach.
+                self.simulator.wait_until_attached(timeout=self.attach_timeout)
+
+            except cellular_simulator.CellularSimulatorError:
+
+                # The phone failed to attach
+                self.log.info(
+                        "UE failed to attach on attempt number {}.".format(i +
+                                                                           1))
+
+                # Turn airplane mode on to prepare the phone for a retry.
+                self.dut.toggle_airplane_mode(True)
+
+                # Wait for APM to propagate
+                # TODO @latware b/186880504 change this to a poll_for_condition (Q3 21)
+                time.sleep(3)
+
+                # Retry
+                if i < self.attach_retries - 1:
+                    continue
+                else:
+                    return False
+
+            else:
+                # The phone attached successfully.
+                # TODO @latware b/186880504 change this to a poll_for_condition (Q3 21)
+                time.sleep(self.SETTLING_TIME)
+                self.log.info("UE attached to the callbox.")
+                break
+
+        return True
+
+    def detach(self):
+        """ Detach the phone from the basestation.
+
+        Turns airplane mode and resets basestation.
+        """
+
+        # Set the DUT to airplane mode so it doesn't see the
+        # cellular network going off
+        self.dut.toggle_airplane_mode(True)
+
+        # Wait for APM to propagate
+        # TODO @latware b/186880504 change this to a poll_for_condition (Q3 21)
+        time.sleep(2)
+
+        # Power off basestation
+        self.simulator.detach()
+
+    def stop(self):
+        """  Detach phone from the basestation by stopping the simulation.
+
+        Stop the simulation and turn airplane mode on. """
+
+        # Set the DUT to airplane mode so it doesn't see the
+        # cellular network going off
+        self.dut.toggle_airplane_mode(True)
+
+        # Wait for APM to propagate
+        # TODO @latware b/186880504 change this to a poll_for_condition (Q3 21)
+        time.sleep(2)
+
+        # Stop the simulation
+        self.simulator.stop()
+
+    def start(self):
+        """ Start the simulation by attaching the phone and setting the
+        required DL and UL power.
+
+        Note that this refers to starting the simulated testing environment
+        and not to starting the signaling on the cellular instruments,
+        which might have been done earlier depending on the cellular
+        instrument controller implementation. """
+
+        if not self.attach():
+            raise RuntimeError('Could not attach to base station.')
+
+        # Starts IP traffic while changing this setting to force the UE to be
+        # in Communication state, as UL power cannot be set in Idle state
+        self.start_traffic_for_calibration()
+
+        self.simulator.wait_until_communication_state()
+
+        # Set uplink power to a minimum before going to the actual desired
+        # value. This avoid inconsistencies produced by the hysteresis in the
+        # PA switching points.
+        self.log.info('Setting UL power to -30 dBm before going to the '
+                      'requested value to avoid incosistencies caused by '
+                      'hysteresis.')
+        self.set_uplink_tx_power(-30)
+
+        # Set signal levels obtained from the test parameters
+        self.set_downlink_rx_power(self.sim_dl_power)
+        self.set_uplink_tx_power(self.sim_ul_power)
+
+        # Verify signal level
+        try:
+            rx_power, tx_power = self.dut.get_rx_tx_power_levels()
+
+            if not tx_power or not rx_power[0]:
+                raise RuntimeError('The method return invalid Tx/Rx values.')
+
+            self.log.info('Signal level reported by the DUT in dBm: Tx = {}, '
+                          'Rx = {}.'.format(tx_power, rx_power))
+
+            if abs(self.sim_ul_power - tx_power) > 1:
+                self.log.warning('Tx power at the UE is off by more than 1 dB')
+
+        except RuntimeError as e:
+            self.log.error('Could not verify Rx / Tx levels: %s.' % e)
+
+        # Stop IP traffic after setting the UL power level
+        self.stop_traffic_for_calibration()
+
+    def parse_parameters(self, parameters):
+        """ Configures simulation using a list of parameters.
+
+        Consumes parameters from a list.
+        Children classes need to call this method first.
+
+        Args:
+            parameters: list of parameters
+        """
+
+        raise NotImplementedError()
+
+    def consume_parameter(self, parameters, parameter_name, num_values=0):
+        """ Parses a parameter from a list.
+
+        Allows to parse the parameter list. Will delete parameters from the
+        list after consuming them to ensure that they are not used twice.
+
+        Args:
+            parameters: list of parameters
+            parameter_name: keyword to look up in the list
+            num_values: number of arguments following the
+                parameter name in the list
+        Returns:
+            A list containing the parameter name and the following num_values
+            arguments
+        """
+
+        try:
+            i = parameters.index(parameter_name)
+        except ValueError:
+            # parameter_name is not set
+            return []
+
+        return_list = []
+
+        try:
+            for j in range(num_values + 1):
+                return_list.append(parameters.pop(i))
+        except IndexError:
+            raise ValueError(
+                    "Parameter {} has to be followed by {} values.".format(
+                            parameter_name, num_values))
+
+        return return_list
+
+    def set_uplink_tx_power(self, signal_level):
+        """ Configure the uplink tx power level
+
+        Args:
+            signal_level: calibrated tx power in dBm
+        """
+        new_config = self.BtsConfig()
+        new_config.input_power = self.calibrated_uplink_tx_power(
+                self.primary_config, signal_level)
+        self.simulator.configure_bts(new_config)
+        self.primary_config.incorporate(new_config)
+
+    def set_downlink_rx_power(self, signal_level):
+        """ Configure the downlink rx power level
+
+        Args:
+            signal_level: calibrated rx power in dBm
+        """
+        new_config = self.BtsConfig()
+        new_config.output_power = self.calibrated_downlink_rx_power(
+                self.primary_config, signal_level)
+        self.simulator.configure_bts(new_config)
+        self.primary_config.incorporate(new_config)
+
+    def get_uplink_power_from_parameters(self, parameters):
+        """ Reads uplink power from a list of parameters. """
+
+        values = self.consume_parameter(parameters, self.PARAM_UL_PW, 1)
+
+        if values:
+            if values[1] in self.UPLINK_SIGNAL_LEVEL_DICTIONARY:
+                return self.UPLINK_SIGNAL_LEVEL_DICTIONARY[values[1]]
+            else:
+                try:
+                    if values[1][0] == 'n':
+                        # Treat the 'n' character as a negative sign
+                        return -int(values[1][1:])
+                    else:
+                        return int(values[1])
+                except ValueError:
+                    pass
+
+        # If the method got to this point it is because PARAM_UL_PW was not
+        # included in the test parameters or the provided value was invalid.
+        raise ValueError(
+                "The test name needs to include parameter {} followed by the "
+                "desired uplink power expressed by an integer number in dBm "
+                "or by one the following values: {}. To indicate negative "
+                "values, use the letter n instead of - sign.".format(
+                        self.PARAM_UL_PW,
+                        list(self.UPLINK_SIGNAL_LEVEL_DICTIONARY.keys())))
+
+    def get_downlink_power_from_parameters(self, parameters):
+        """ Reads downlink power from a list of parameters. """
+
+        values = self.consume_parameter(parameters, self.PARAM_DL_PW, 1)
+
+        if values:
+            if values[1] not in self.DOWNLINK_SIGNAL_LEVEL_DICTIONARY:
+                raise ValueError("Invalid signal level value {}.".format(
+                        values[1]))
+            else:
+                return self.DOWNLINK_SIGNAL_LEVEL_DICTIONARY[values[1]]
+        else:
+            # Use default value
+            power = self.DOWNLINK_SIGNAL_LEVEL_DICTIONARY['excellent']
+            self.log.info("No DL signal level value was indicated in the test "
+                          "parameters. Using default value of {} {}.".format(
+                                  power, self.DOWNLINK_SIGNAL_LEVEL_UNITS))
+            return power
+
+    def calibrated_downlink_rx_power(self, bts_config, signal_level):
+        """ Calculates the power level at the instrument's output in order to
+        obtain the required rx power level at the DUT's input.
+
+        If calibration values are not available, returns the uncalibrated signal
+        level.
+
+        Args:
+            bts_config: the current configuration at the base station. derived
+                classes implementations can use this object to indicate power as
+                spectral power density or in other units.
+            signal_level: desired downlink received power, can be either a
+                key value pair, an int or a float
+        """
+
+        # Obtain power value if the provided signal_level is a key value pair
+        if isinstance(signal_level, Enum):
+            power = signal_level.value
+        else:
+            power = signal_level
+
+        # Try to use measured path loss value. If this was not set, it will
+        # throw an TypeError exception
+        try:
+            calibrated_power = round(power + self.dl_path_loss)
+            if calibrated_power > self.simulator.MAX_DL_POWER:
+                self.log.warning(
+                        "Cannot achieve phone DL Rx power of {} dBm. Requested TX "
+                        "power of {} dBm exceeds callbox limit!".format(
+                                power, calibrated_power))
+                calibrated_power = self.simulator.MAX_DL_POWER
+                self.log.warning(
+                        "Setting callbox Tx power to max possible ({} dBm)".
+                        format(calibrated_power))
+
+            self.log.info(
+                    "Requested phone DL Rx power of {} dBm, setting callbox Tx "
+                    "power at {} dBm".format(power, calibrated_power))
+            # Power has to be a natural number so calibration wont be exact.
+            # Inform the actual received power after rounding.
+            self.log.info(
+                    "Phone downlink received power is {0:.2f} dBm".format(
+                            calibrated_power - self.dl_path_loss))
+            return calibrated_power
+        except TypeError:
+            self.log.info("Phone downlink received power set to {} (link is "
+                          "uncalibrated).".format(round(power)))
+            return round(power)
+
+    def calibrated_uplink_tx_power(self, bts_config, signal_level):
+        """ Calculates the power level at the instrument's input in order to
+        obtain the required tx power level at the DUT's output.
+
+        If calibration values are not available, returns the uncalibrated signal
+        level.
+
+        Args:
+            bts_config: the current configuration at the base station. derived
+                classes implementations can use this object to indicate power as
+                spectral power density or in other units.
+            signal_level: desired uplink transmitted power, can be either a
+                key value pair, an int or a float
+        """
+
+        # Obtain power value if the provided signal_level is a key value pair
+        if isinstance(signal_level, Enum):
+            power = signal_level.value
+        else:
+            power = signal_level
+
+        # Try to use measured path loss value. If this was not set, it will
+        # throw an TypeError exception
+        try:
+            calibrated_power = round(power - self.ul_path_loss)
+            if calibrated_power < self.UL_MIN_POWER:
+                self.log.warning(
+                        "Cannot achieve phone UL Tx power of {} dBm. Requested UL "
+                        "power of {} dBm exceeds callbox limit!".format(
+                                power, calibrated_power))
+                calibrated_power = self.UL_MIN_POWER
+                self.log.warning(
+                        "Setting UL Tx power to min possible ({} dBm)".format(
+                                calibrated_power))
+
+            self.log.info(
+                    "Requested phone UL Tx power of {} dBm, setting callbox Rx "
+                    "power at {} dBm".format(power, calibrated_power))
+            # Power has to be a natural number so calibration wont be exact.
+            # Inform the actual transmitted power after rounding.
+            self.log.info(
+                    "Phone uplink transmitted power is {0:.2f} dBm".format(
+                            calibrated_power + self.ul_path_loss))
+            return calibrated_power
+        except TypeError:
+            self.log.info("Phone uplink transmitted power set to {} (link is "
+                          "uncalibrated).".format(round(power)))
+            return round(power)
+
+    def calibrate(self, band):
+        """ Calculates UL and DL path loss if it wasn't done before.
+
+        The should be already set to the required band before calling this
+        method.
+
+        Args:
+            band: the band that is currently being calibrated.
+        """
+
+        if self.dl_path_loss and self.ul_path_loss:
+            self.log.info("Measurements are already calibrated.")
+
+        # Attach the phone to the base station
+        if not self.attach():
+            self.log.info(
+                    "Skipping calibration because the phone failed to attach.")
+            return
+
+        # If downlink or uplink were not yet calibrated, do it now
+        if not self.dl_path_loss:
+            self.dl_path_loss = self.downlink_calibration()
+        if not self.ul_path_loss:
+            self.ul_path_loss = self.uplink_calibration()
+
+        # Detach after calibrating
+        self.detach()
+        # TODO @latware b/186880504 change this to a poll_for_condition (Q3 21)
+        time.sleep(2)
+
+    def start_traffic_for_calibration(self):
+        """
+            Starts UDP IP traffic before running calibration. Uses APN_1
+            configured in the phone.
+        """
+        self.simulator.start_data_traffic()
+
+    def stop_traffic_for_calibration(self):
+        """
+            Stops IP traffic after calibration.
+        """
+        self.simulator.stop_data_traffic()
+
+    def downlink_calibration(self, rat=None, power_units_conversion_func=None):
+        """ Computes downlink path loss and returns the calibration value
+
+        The DUT needs to be attached to the base station before calling this
+        method.
+
+        Args:
+            rat: desired RAT to calibrate (matching the label reported by
+                the phone)
+            power_units_conversion_func: a function to convert the units
+                reported by the phone to dBm. needs to take two arguments: the
+                reported signal level and bts. use None if no conversion is
+                needed.
+        Returns:
+            Downlink calibration value and measured DL power.
+        """
+
+        # Check if this parameter was set. Child classes may need to override
+        # this class passing the necessary parameters.
+        if not rat:
+            raise ValueError(
+                    "The parameter 'rat' has to indicate the RAT being used as "
+                    "reported by the phone.")
+
+        # Save initial output level to restore it after calibration
+        restoration_config = self.BtsConfig()
+        restoration_config.output_power = self.primary_config.output_power
+
+        # Set BTS to a good output level to minimize measurement error
+        new_config = self.BtsConfig()
+        new_config.output_power = self.simulator.MAX_DL_POWER - 5
+        self.simulator.configure_bts(new_config)
+
+        # Starting IP traffic
+        self.start_traffic_for_calibration()
+
+        down_power_measured = []
+        for i in range(0, self.NUM_DL_CAL_READS):
+            # For some reason, the RSRP gets updated on Screen ON event
+            signal_strength = self.dut.get_telephony_signal_strength()
+            down_power_measured.append(signal_strength[rat])
+            # TODO @latware b/186880504 change this to a poll_for_condition (Q3 21)
+            time.sleep(5)
+
+        # Stop IP traffic
+        self.stop_traffic_for_calibration()
+
+        # Reset bts to original settings
+        self.simulator.configure_bts(restoration_config)
+        # TODO @latware b/186880504 change this to a poll_for_condition (Q3 21)
+        time.sleep(2)
+
+        # Calculate the mean of the measurements
+        reported_asu_power = np.nanmean(down_power_measured)
+
+        # Convert from RSRP to signal power
+        if power_units_conversion_func:
+            avg_down_power = power_units_conversion_func(
+                    reported_asu_power, self.primary_config)
+        else:
+            avg_down_power = reported_asu_power
+
+        # Calculate Path Loss
+        dl_target_power = self.simulator.MAX_DL_POWER - 5
+        down_call_path_loss = dl_target_power - avg_down_power
+
+        # Validate the result
+        if not 0 < down_call_path_loss < 100:
+            raise RuntimeError(
+                    "Downlink calibration failed. The calculated path loss value "
+                    "was {} dBm.".format(down_call_path_loss))
+
+        self.log.info("Measured downlink path loss: {} dB".format(
+                down_call_path_loss))
+
+        return down_call_path_loss
+
+    def uplink_calibration(self):
+        """ Computes uplink path loss and returns the calibration value
+
+        The DUT needs to be attached to the base station before calling this
+        method.
+
+        Returns:
+            Uplink calibration value and measured UL power
+        """
+
+        # Save initial input level to restore it after calibration
+        restoration_config = self.BtsConfig()
+        restoration_config.input_power = self.primary_config.input_power
+
+        # Set BTS1 to maximum input allowed in order to perform
+        # uplink calibration
+        target_power = self.MAX_PHONE_OUTPUT_POWER
+        new_config = self.BtsConfig()
+        new_config.input_power = self.MAX_BTS_INPUT_POWER
+        self.simulator.configure_bts(new_config)
+
+        # Start IP traffic
+        self.start_traffic_for_calibration()
+
+        up_power_per_chain = []
+        # Get the number of chains
+        cmd = 'MONITOR? UL_PUSCH'
+        uplink_meas_power = self.anritsu.send_query(cmd)
+        str_power_chain = uplink_meas_power.split(',')
+        num_chains = len(str_power_chain)
+        for ichain in range(0, num_chains):
+            up_power_per_chain.append([])
+
+        for i in range(0, self.NUM_UL_CAL_READS):
+            uplink_meas_power = self.anritsu.send_query(cmd)
+            str_power_chain = uplink_meas_power.split(',')
+
+            for ichain in range(0, num_chains):
+                if (str_power_chain[ichain] == 'DEACTIVE'):
+                    up_power_per_chain[ichain].append(float('nan'))
+                else:
+                    up_power_per_chain[ichain].append(
+                            float(str_power_chain[ichain]))
+
+            # TODO @latware b/186880504 change this to a poll_for_condition (Q3 21)
+            time.sleep(3)
+
+        # Stop IP traffic
+        self.stop_traffic_for_calibration()
+
+        # Reset bts to original settings
+        self.simulator.configure_bts(restoration_config)
+        # TODO @latware b/186880504 change this to a poll_for_condition (Q3 21)
+        time.sleep(2)
+
+        # Phone only supports 1x1 Uplink so always chain 0
+        avg_up_power = np.nanmean(up_power_per_chain[0])
+        if np.isnan(avg_up_power):
+            raise RuntimeError(
+                    "Calibration failed because the callbox reported the chain to "
+                    "be deactive.")
+
+        up_call_path_loss = target_power - avg_up_power
+
+        # Validate the result
+        if not 0 < up_call_path_loss < 100:
+            raise RuntimeError(
+                    "Uplink calibration failed. The calculated path loss value "
+                    "was {} dBm.".format(up_call_path_loss))
+
+        self.log.info(
+                "Measured uplink path loss: {} dB".format(up_call_path_loss))
+
+        return up_call_path_loss
+
+    def load_pathloss_if_required(self):
+        """ If calibration is required, try to obtain the pathloss values from
+        the calibration table and measure them if they are not available. """
+        # Invalidate the previous values
+        self.dl_path_loss = None
+        self.ul_path_loss = None
+
+        # Load the new ones
+        if self.calibration_required:
+            band = self.primary_config.band
+
+            # Try loading the path loss values from the calibration table. If
+            # they are not available, use the automated calibration procedure.
+            try:
+                self.dl_path_loss = self.calibration_table[band]["dl"]
+                self.ul_path_loss = self.calibration_table[band]["ul"]
+            except KeyError:
+                self.calibrate(band)
+
+            # Complete the calibration table with the new values to be used in
+            # the next tests.
+            if band not in self.calibration_table:
+                self.calibration_table[band] = {}
+
+            if "dl" not in self.calibration_table[band] and self.dl_path_loss:
+                self.calibration_table[band]["dl"] = self.dl_path_loss
+
+            if "ul" not in self.calibration_table[band] and self.ul_path_loss:
+                self.calibration_table[band]["ul"] = self.ul_path_loss
+
+    def maximum_downlink_throughput(self):
+        """ Calculates maximum achievable downlink throughput in the current
+        simulation state.
+
+        Because thoughput is dependent on the RAT, this method needs to be
+        implemented by children classes.
+
+        Returns:
+            Maximum throughput in mbps
+        """
+        raise NotImplementedError()
+
+    def maximum_uplink_throughput(self):
+        """ Calculates maximum achievable downlink throughput in the current
+        simulation state.
+
+        Because thoughput is dependent on the RAT, this method needs to be
+        implemented by children classes.
+
+        Returns:
+            Maximum throughput in mbps
+        """
+        raise NotImplementedError()
+
+    def send_sms(self, sms_message):
+        """ Sends the set SMS message. """
+        raise NotImplementedError()
diff --git a/server/cros/cellular/simulation_utils/ChromebookCellularDut.py b/server/cros/cellular/simulation_utils/ChromebookCellularDut.py
new file mode 100644
index 0000000..51b9a85
--- /dev/null
+++ b/server/cros/cellular/simulation_utils/ChromebookCellularDut.py
@@ -0,0 +1,66 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server.cros.cellular.simulation_utils import BaseCellularDut
+
+
+class ChromebookCellularDut(BaseCellularDut.BaseCellularDut):
+    """ Chromebook implementation of the cellular DUT class."""
+
+    def __init__(self, ad, logger):
+        """ Keeps a handler to the chromebook device.
+
+        Args:
+           ad: a handle to the chromebook device
+           logger: a handler to the logger object
+        """
+        self.ad = ad
+        self.log = logger
+
+    def toggle_airplane_mode(self, new_state=True):
+        """ Turns on and off mobile data.
+        """
+        if new_state:
+            self.ad.run(
+                "dbus-send --system --fixed --print-reply --dest=org.chromium."
+                "flimflam / org.chromium.flimflam.Manager.DisableTechnology st"
+                "ring:cellular")
+        else:
+            self.ad.run(
+                "dbus-send --system --fixed --print-reply --dest=org.chromium."
+                "flimflam / org.chromium.flimflam.Manager.EnableTechnology str"
+                "ing:cellular")
+
+    def toggle_data_roaming(self, new_state=True):
+        """ Enables or disables cellular data roaming.
+
+        Args:
+          new_state: True if data roaming needs to be enabled.
+        """
+        pass
+
+    def get_rx_tx_power_levels(self):
+        """ Not relevant to Chromebooks,
+        but required interface for compatibility.
+        """
+        return (None, None)
+
+    def set_apn(self, name, apn, type='default'):
+        """ Not currently supported by Chromebooks yet.
+        """
+        pass
+
+    def set_preferred_network_type(self, type):
+        """ Sets the preferred RAT.
+
+        Args:
+          type: an instance of class PreferredNetworkType
+        """
+        pass
+
+    def get_telephony_signal_strength(self):
+        """ Not relevant to Chromebooks,
+        but required interface for compatibility.
+        """
+        pass
diff --git a/server/cros/cellular/simulation_utils/LteSimulation.py b/server/cros/cellular/simulation_utils/LteSimulation.py
new file mode 100644
index 0000000..3b7778c
--- /dev/null
+++ b/server/cros/cellular/simulation_utils/LteSimulation.py
@@ -0,0 +1,1299 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import math
+import time
+from enum import Enum
+
+import common
+
+from autotest_lib.server.cros.cellular.simulation_utils.BaseSimulation import BaseSimulation
+from autotest_lib.server.cros.cellular.simulation_utils import BaseCellularDut
+
+
+class TransmissionMode(Enum):
+    """ Transmission modes for LTE (e.g., TM1, TM4, ...) """
+    TM1 = "TM1"
+    TM2 = "TM2"
+    TM3 = "TM3"
+    TM4 = "TM4"
+    TM7 = "TM7"
+    TM8 = "TM8"
+    TM9 = "TM9"
+
+
+class MimoMode(Enum):
+    """ Mimo modes """
+    MIMO_1x1 = "1x1"
+    MIMO_2x2 = "2x2"
+    MIMO_4x4 = "4x4"
+
+
+class SchedulingMode(Enum):
+    """ Traffic scheduling modes (e.g., STATIC, DYNAMIC) """
+    DYNAMIC = "DYNAMIC"
+    STATIC = "STATIC"
+
+
+class DuplexMode(Enum):
+    """ DL/UL Duplex mode """
+    FDD = "FDD"
+    TDD = "TDD"
+
+
+class ModulationType(Enum):
+    """DL/UL Modulation order."""
+    QPSK = 'QPSK'
+    Q16 = '16QAM'
+    Q64 = '64QAM'
+    Q256 = '256QAM'
+
+
+class LteSimulation(BaseSimulation):
+    """ Single-carrier LTE simulation. """
+
+    # Simulation config keywords contained in the test name
+    PARAM_FRAME_CONFIG = "tddconfig"
+    PARAM_BW = "bw"
+    PARAM_SCHEDULING = "scheduling"
+    PARAM_SCHEDULING_STATIC = "static"
+    PARAM_SCHEDULING_DYNAMIC = "dynamic"
+    PARAM_PATTERN = "pattern"
+    PARAM_TM = "tm"
+    PARAM_UL_PW = 'pul'
+    PARAM_DL_PW = 'pdl'
+    PARAM_BAND = "band"
+    PARAM_MIMO = "mimo"
+    PARAM_DL_MCS = 'dlmcs'
+    PARAM_UL_MCS = 'ulmcs'
+    PARAM_SSF = 'ssf'
+    PARAM_CFI = 'cfi'
+    PARAM_PAGING = 'paging'
+    PARAM_PHICH = 'phich'
+    PARAM_RRC_STATUS_CHANGE_TIMER = "rrcstatuschangetimer"
+    PARAM_DRX = 'drx'
+
+    # Test config keywords
+    KEY_TBS_PATTERN = "tbs_pattern_on"
+    KEY_DL_256_QAM = "256_qam_dl"
+    KEY_UL_64_QAM = "64_qam_ul"
+
+    # Units in which signal level is defined in DOWNLINK_SIGNAL_LEVEL_DICTIONARY
+    DOWNLINK_SIGNAL_LEVEL_UNITS = "RSRP"
+
+    # RSRP signal levels thresholds (as reported by Android) in dBm/15KHz.
+    # Excellent is set to -75 since callbox B Tx power is limited to -30 dBm
+    DOWNLINK_SIGNAL_LEVEL_DICTIONARY = {
+            'excellent': -75,
+            'high': -110,
+            'medium': -115,
+            'weak': -120,
+            'disconnected': -170
+    }
+
+    # Transmitted output power for the phone (dBm)
+    UPLINK_SIGNAL_LEVEL_DICTIONARY = {
+            'max': 24,
+            'high': 13,
+            'medium': 3,
+            'low': -20
+    }
+
+    # Bandwidth [MHz] to total RBs mapping
+    total_rbs_dictionary = {20: 100, 15: 75, 10: 50, 5: 25, 3: 15, 1.4: 6}
+
+    # Bandwidth [MHz] to RB group size
+    rbg_dictionary = {20: 4, 15: 4, 10: 3, 5: 2, 3: 2, 1.4: 1}
+
+    # Bandwidth [MHz] to minimum number of DL RBs that can be assigned to a UE
+    min_dl_rbs_dictionary = {20: 16, 15: 12, 10: 9, 5: 4, 3: 4, 1.4: 2}
+
+    # Bandwidth [MHz] to minimum number of UL RBs that can be assigned to a UE
+    min_ul_rbs_dictionary = {20: 8, 15: 6, 10: 4, 5: 2, 3: 2, 1.4: 1}
+
+    # Allowed bandwidth for each band.
+    allowed_bandwidth_dictionary = {
+            1: [5, 10, 15, 20],
+            2: [1.4, 3, 5, 10, 15, 20],
+            3: [1.4, 3, 5, 10, 15, 20],
+            4: [1.4, 3, 5, 10, 15, 20],
+            5: [1.4, 3, 5, 10],
+            7: [5, 10, 15, 20],
+            8: [1.4, 3, 5, 10],
+            10: [5, 10, 15, 20],
+            11: [5, 10],
+            12: [1.4, 3, 5, 10],
+            13: [5, 10],
+            14: [5, 10],
+            17: [5, 10],
+            18: [5, 10, 15],
+            19: [5, 10, 15],
+            20: [5, 10, 15, 20],
+            21: [5, 10, 15],
+            22: [5, 10, 15, 20],
+            24: [5, 10],
+            25: [1.4, 3, 5, 10, 15, 20],
+            26: [1.4, 3, 5, 10, 15],
+            27: [1.4, 3, 5, 10],
+            28: [3, 5, 10, 15, 20],
+            29: [3, 5, 10],
+            30: [5, 10],
+            31: [1.4, 3, 5],
+            32: [5, 10, 15, 20],
+            33: [5, 10, 15, 20],
+            34: [5, 10, 15],
+            35: [1.4, 3, 5, 10, 15, 20],
+            36: [1.4, 3, 5, 10, 15, 20],
+            37: [5, 10, 15, 20],
+            38: [20],
+            39: [5, 10, 15, 20],
+            40: [5, 10, 15, 20],
+            41: [5, 10, 15, 20],
+            42: [5, 10, 15, 20],
+            43: [5, 10, 15, 20],
+            44: [3, 5, 10, 15, 20],
+            45: [5, 10, 15, 20],
+            46: [10, 20],
+            47: [10, 20],
+            48: [5, 10, 15, 20],
+            49: [10, 20],
+            50: [3, 5, 10, 15, 20],
+            51: [3, 5],
+            52: [5, 10, 15, 20],
+            65: [5, 10, 15, 20],
+            66: [1.4, 3, 5, 10, 15, 20],
+            67: [5, 10, 15, 20],
+            68: [5, 10, 15],
+            69: [5],
+            70: [5, 10, 15],
+            71: [5, 10, 15, 20],
+            72: [1.4, 3, 5],
+            73: [1.4, 3, 5],
+            74: [1.4, 3, 5, 10, 15, 20],
+            75: [5, 10, 15, 20],
+            76: [5],
+            85: [5, 10],
+            252: [20],
+            255: [20]
+    }
+
+    # Peak throughput lookup tables for each TDD subframe
+    # configuration and bandwidth
+    # yapf: disable
+    tdd_config4_tput_lut = {
+        0: {
+            5: {'DL': 3.82, 'UL': 2.63},
+            10: {'DL': 11.31,'UL': 9.03},
+            15: {'DL': 16.9, 'UL': 20.62},
+            20: {'DL': 22.88, 'UL': 28.43}
+        },
+        1: {
+            5: {'DL': 6.13, 'UL': 4.08},
+            10: {'DL': 18.36, 'UL': 9.69},
+            15: {'DL': 28.62, 'UL': 14.21},
+            20: {'DL': 39.04, 'UL': 19.23}
+        },
+        2: {
+            5: {'DL': 5.68, 'UL': 2.30},
+            10: {'DL': 25.51, 'UL': 4.68},
+            15: {'DL': 39.3, 'UL': 7.13},
+            20: {'DL': 53.64, 'UL': 9.72}
+        },
+        3: {
+            5: {'DL': 8.26, 'UL': 3.45},
+            10: {'DL': 23.20, 'UL': 6.99},
+            15: {'DL': 35.35, 'UL': 10.75},
+            20: {'DL': 48.3, 'UL': 14.6}
+        },
+        4: {
+            5: {'DL': 6.16, 'UL': 2.30},
+            10: {'DL': 26.77, 'UL': 4.68},
+            15: {'DL': 40.7, 'UL': 7.18},
+            20: {'DL': 55.6, 'UL': 9.73}
+        },
+        5: {
+            5: {'DL': 6.91, 'UL': 1.12},
+            10: {'DL': 30.33, 'UL': 2.33},
+            15: {'DL': 46.04, 'UL': 3.54},
+            20: {'DL': 62.9, 'UL': 4.83}
+        },
+        6: {
+            5: {'DL': 6.13, 'UL': 4.13},
+            10: {'DL': 14.79, 'UL': 11.98},
+            15: {'DL': 23.28, 'UL': 17.46},
+            20: {'DL': 31.75, 'UL': 23.95}
+        }
+    }
+
+    tdd_config3_tput_lut = {
+        0: {
+            5: {'DL': 5.04, 'UL': 3.7},
+            10: {'DL': 15.11, 'UL': 17.56},
+            15: {'DL': 22.59, 'UL': 30.31},
+            20: {'DL': 30.41, 'UL': 41.61}
+        },
+        1: {
+            5: {'DL': 8.07, 'UL': 5.66},
+            10: {'DL': 24.58, 'UL': 13.66},
+            15: {'DL': 39.05, 'UL': 20.68},
+            20: {'DL': 51.59, 'UL': 28.76}
+        },
+        2: {
+            5: {'DL': 7.59, 'UL': 3.31},
+            10: {'DL': 34.08, 'UL': 6.93},
+            15: {'DL': 53.64, 'UL': 10.51},
+            20: {'DL': 70.55, 'UL': 14.41}
+        },
+        3: {
+            5: {'DL': 10.9, 'UL': 5.0},
+            10: {'DL': 30.99, 'UL': 10.25},
+            15: {'DL': 48.3, 'UL': 15.81},
+            20: {'DL': 63.24, 'UL': 21.65}
+        },
+        4: {
+            5: {'DL': 8.11, 'UL': 3.32},
+            10: {'DL': 35.74, 'UL': 6.95},
+            15: {'DL': 55.6, 'UL': 10.51},
+            20: {'DL': 72.72, 'UL': 14.41}
+        },
+        5: {
+            5: {'DL': 9.28, 'UL': 1.57},
+            10: {'DL': 40.49, 'UL': 3.44},
+            15: {'DL': 62.9, 'UL': 5.23},
+            20: {'DL': 82.21, 'UL': 7.15}
+        },
+        6: {
+            5: {'DL': 8.06, 'UL': 5.74},
+            10: {'DL': 19.82, 'UL': 17.51},
+            15: {'DL': 31.75, 'UL': 25.77},
+            20: {'DL': 42.12, 'UL': 34.91}
+        }
+    }
+
+    tdd_config2_tput_lut = {
+        0: {
+            5: {'DL': 3.11, 'UL': 2.55},
+            10: {'DL': 9.93, 'UL': 11.1},
+            15: {'DL': 13.9, 'UL': 21.51},
+            20: {'DL': 20.02, 'UL': 41.66}
+        },
+        1: {
+            5: {'DL': 5.33, 'UL': 4.27},
+            10: {'DL': 15.14, 'UL': 13.95},
+            15: {'DL': 33.84, 'UL': 19.73},
+            20: {'DL': 44.61, 'UL': 27.35}
+        },
+        2: {
+            5: {'DL': 6.87, 'UL': 3.32},
+            10: {'DL': 17.06, 'UL': 6.76},
+            15: {'DL': 49.63, 'UL': 10.5},
+            20: {'DL': 65.2, 'UL': 14.41}
+        },
+        3: {
+            5: {'DL': 5.41, 'UL': 4.17},
+            10: {'DL': 16.89, 'UL': 9.73},
+            15: {'DL': 44.29, 'UL': 15.7},
+            20: {'DL': 53.95, 'UL': 19.85}
+        },
+        4: {
+            5: {'DL': 8.7, 'UL': 3.32},
+            10: {'DL': 17.58, 'UL': 6.76},
+            15: {'DL': 51.08, 'UL': 10.47},
+            20: {'DL': 66.45, 'UL': 14.38}
+        },
+        5: {
+            5: {'DL': 9.46, 'UL': 1.55},
+            10: {'DL': 19.02, 'UL': 3.48},
+            15: {'DL': 58.89, 'UL': 5.23},
+            20: {'DL': 76.85, 'UL': 7.1}
+        },
+        6: {
+            5: {'DL': 4.74, 'UL': 3.9},
+            10: {'DL': 12.32, 'UL': 13.37},
+            15: {'DL': 27.74, 'UL': 25.02},
+            20: {'DL': 35.48, 'UL': 32.95}
+        }
+    }
+
+    tdd_config1_tput_lut = {
+        0: {
+            5: {'DL': 4.25, 'UL': 3.35},
+            10: {'DL': 8.38, 'UL': 7.22},
+            15: {'DL': 12.41, 'UL': 13.91},
+            20: {'DL': 16.27, 'UL': 24.09}
+        },
+        1: {
+            5: {'DL': 7.28, 'UL': 4.61},
+            10: {'DL': 14.73, 'UL': 9.69},
+            15: {'DL': 21.91, 'UL': 13.86},
+            20: {'DL': 27.63, 'UL': 17.18}
+        },
+        2: {
+            5: {'DL': 10.37, 'UL': 2.27},
+            10: {'DL': 20.92, 'UL': 4.66},
+            15: {'DL': 31.01, 'UL': 7.04},
+            20: {'DL': 42.03, 'UL': 9.75}
+        },
+        3: {
+            5: {'DL': 9.25, 'UL': 3.44},
+            10: {'DL': 18.38, 'UL': 6.95},
+            15: {'DL': 27.59, 'UL': 10.62},
+            20: {'DL': 34.85, 'UL': 13.45}
+        },
+        4: {
+            5: {'DL': 10.71, 'UL': 2.26},
+            10: {'DL': 21.54, 'UL': 4.67},
+            15: {'DL': 31.91, 'UL': 7.2},
+            20: {'DL': 43.35, 'UL': 9.74}
+        },
+        5: {
+            5: {'DL': 12.34, 'UL': 1.08},
+            10: {'DL': 24.78, 'UL': 2.34},
+            15: {'DL': 36.68, 'UL': 3.57},
+            20: {'DL': 49.84, 'UL': 4.81}
+        },
+        6: {
+            5: {'DL': 5.76, 'UL': 4.41},
+            10: {'DL': 11.68, 'UL': 9.7},
+            15: {'DL': 17.34, 'UL': 17.95},
+            20: {'DL': 23.5, 'UL': 23.42}
+        }
+    }
+    # yapf: enable
+
+    # Peak throughput lookup table dictionary
+    tdd_config_tput_lut_dict = {
+            'TDD_CONFIG1':
+            tdd_config1_tput_lut,  # DL 256QAM, UL 64QAM & TBS turned OFF
+            'TDD_CONFIG2':
+            tdd_config2_tput_lut,  # DL 256QAM, UL 64 QAM turned ON & TBS OFF
+            'TDD_CONFIG3':
+            tdd_config3_tput_lut,  # DL 256QAM, UL 64QAM & TBS turned ON
+            'TDD_CONFIG4':
+            tdd_config4_tput_lut  # DL 256QAM, UL 64 QAM turned OFF & TBS ON
+    }
+
+    class BtsConfig(BaseSimulation.BtsConfig):
+        """ Extension of the BaseBtsConfig to implement parameters that are
+         exclusive to LTE.
+
+        Attributes:
+            band: an integer indicating the required band number.
+            dlul_config: an integer indicating the TDD config number.
+            ssf_config: an integer indicating the Special Sub-Frame config.
+            bandwidth: a float indicating the required channel bandwidth.
+            mimo_mode: an instance of LteSimulation.MimoMode indicating the
+                required MIMO mode for the downlink signal.
+            transmission_mode: an instance of LteSimulation.TransmissionMode
+                indicating the required TM.
+            scheduling_mode: an instance of LteSimulation.SchedulingMode
+                indicating whether to use Static or Dynamic scheduling.
+            dl_rbs: an integer indicating the number of downlink RBs
+            ul_rbs: an integer indicating the number of uplink RBs
+            dl_mcs: an integer indicating the MCS for the downlink signal
+            ul_mcs: an integer indicating the MCS for the uplink signal
+            dl_modulation_order: a string indicating a DL modulation scheme
+            ul_modulation_order: a string indicating an UL modulation scheme
+            tbs_pattern_on: a boolean indicating whether full allocation mode
+                should be used or not
+            dl_channel: an integer indicating the downlink channel number
+            cfi: an integer indicating the Control Format Indicator
+            paging_cycle: an integer indicating the paging cycle duration in
+                milliseconds
+            phich: a string indicating the PHICH group size parameter
+            drx_connected_mode: a boolean indicating whether cDRX mode is
+                on or off
+            drx_on_duration_timer: number of PDCCH subframes representing
+                DRX on duration
+            drx_inactivity_timer: number of PDCCH subframes to wait before
+                entering DRX mode
+            drx_retransmission_timer: number of consecutive PDCCH subframes
+                to wait for retransmission
+            drx_long_cycle: number of subframes representing one long DRX cycle.
+                One cycle consists of DRX sleep + DRX on duration
+            drx_long_cycle_offset: number representing offset in range
+                0 to drx_long_cycle - 1
+        """
+
+        def __init__(self):
+            """ Initialize the base station config by setting all its
+            parameters to None. """
+            super(LteSimulation.BtsConfig, self).__init__()
+            self.band = None
+            self.dlul_config = None
+            self.ssf_config = None
+            self.bandwidth = None
+            self.mimo_mode = None
+            self.transmission_mode = None
+            self.scheduling_mode = None
+            self.dl_rbs = None
+            self.ul_rbs = None
+            self.dl_mcs = None
+            self.ul_mcs = None
+            self.dl_modulation_order = None
+            self.ul_modulation_order = None
+            self.tbs_pattern_on = None
+            self.dl_channel = None
+            self.cfi = None
+            self.paging_cycle = None
+            self.phich = None
+            self.drx_connected_mode = None
+            self.drx_on_duration_timer = None
+            self.drx_inactivity_timer = None
+            self.drx_retransmission_timer = None
+            self.drx_long_cycle = None
+            self.drx_long_cycle_offset = None
+
+    def __init__(self, simulator, log, dut, test_config, calibration_table):
+        """ Initializes the simulator for a single-carrier LTE simulation.
+
+        Loads a simple LTE simulation environment with 1 basestation.
+
+        Args:
+            simulator: a cellular simulator controller
+            log: a logger handle
+            dut: a device handler implementing BaseCellularDut
+            test_config: test configuration obtained from the config file
+            calibration_table: a dictionary containing path losses for
+                different bands.
+
+        """
+
+        super(LteSimulation, self).__init__(simulator, log, dut, test_config,
+                                            calibration_table)
+
+        self.dut.set_preferred_network_type(
+                BaseCellularDut.PreferredNetworkType.LTE_ONLY)
+
+        # Get TBS pattern setting from the test configuration
+        if self.KEY_TBS_PATTERN not in test_config:
+            self.log.warning("The key '{}' is not set in the config file. "
+                             "Setting to true by default.".format(
+                                     self.KEY_TBS_PATTERN))
+        self.primary_config.tbs_pattern_on = test_config.get(
+                self.KEY_TBS_PATTERN, True)
+
+        # Get the 256-QAM setting from the test configuration
+        if self.KEY_DL_256_QAM not in test_config:
+            self.log.warning("The key '{}' is not set in the config file. "
+                             "Setting to false by default.".format(
+                                     self.KEY_DL_256_QAM))
+
+        self.dl_256_qam = test_config.get(self.KEY_DL_256_QAM, False)
+
+        if self.dl_256_qam:
+            if not self.simulator.LTE_SUPPORTS_DL_256QAM:
+                self.log.warning("The key '{}' is set to true but the "
+                                 "simulator doesn't support that modulation "
+                                 "order.".format(self.KEY_DL_256_QAM))
+                self.dl_256_qam = False
+            else:
+                self.primary_config.dl_modulation_order = ModulationType.Q256
+
+        else:
+            self.log.warning(
+                    'dl modulation 256QAM is not specified in config, '
+                    'setting to default value 64QAM')
+            self.primary_config.dl_modulation_order = ModulationType.Q64
+        # Get the 64-QAM setting from the test configuration
+        if self.KEY_UL_64_QAM not in test_config:
+            self.log.warning("The key '{}' is not set in the config file. "
+                             "Setting to false by default.".format(
+                                     self.KEY_UL_64_QAM))
+
+        self.ul_64_qam = test_config.get(self.KEY_UL_64_QAM, False)
+
+        if self.ul_64_qam:
+            if not self.simulator.LTE_SUPPORTS_UL_64QAM:
+                self.log.warning("The key '{}' is set to true but the "
+                                 "simulator doesn't support that modulation "
+                                 "order.".format(self.KEY_UL_64_QAM))
+                self.ul_64_qam = False
+            else:
+                self.primary_config.ul_modulation_order = ModulationType.Q64
+        else:
+            self.log.warning('ul modulation 64QAM is not specified in config, '
+                             'setting to default value 16QAM')
+            self.primary_config.ul_modulation_order = ModulationType.Q16
+
+        self.simulator.configure_bts(self.primary_config)
+
+    def setup_simulator(self):
+        """ Do initial configuration in the simulator. """
+        self.simulator.setup_lte_scenario()
+
+    def parse_parameters(self, parameters):
+        """ Configs an LTE simulation using a list of parameters.
+
+        Calls the parent method first, then consumes parameters specific to LTE.
+
+        Args:
+            parameters: list of parameters
+        """
+
+        # Instantiate a new configuration object
+        new_config = self.BtsConfig()
+
+        # Setup band
+
+        values = self.consume_parameter(parameters, self.PARAM_BAND, 1)
+
+        if not values:
+            raise ValueError(
+                    "The test name needs to include parameter '{}' followed by "
+                    "the required band number.".format(self.PARAM_BAND))
+
+        new_config.band = values[1]
+
+        # Set TDD-only configs
+        if self.get_duplex_mode(new_config.band) == DuplexMode.TDD:
+
+            # Sub-frame DL/UL config
+            values = self.consume_parameter(parameters,
+                                            self.PARAM_FRAME_CONFIG, 1)
+            if not values:
+                raise ValueError(
+                        "When a TDD band is selected the frame "
+                        "structure has to be indicated with the '{}' "
+                        "parameter followed by a number from 0 to 6.".format(
+                                self.PARAM_FRAME_CONFIG))
+
+            new_config.dlul_config = int(values[1])
+
+            # Special Sub-Frame configuration
+            values = self.consume_parameter(parameters, self.PARAM_SSF, 1)
+
+            if not values:
+                self.log.warning(
+                        'The {} parameter was not provided. Setting '
+                        'Special Sub-Frame config to 6 by default.'.format(
+                                self.PARAM_SSF))
+                new_config.ssf_config = 6
+            else:
+                new_config.ssf_config = int(values[1])
+
+        # Setup bandwidth
+
+        values = self.consume_parameter(parameters, self.PARAM_BW, 1)
+
+        if not values:
+            raise ValueError(
+                    "The test name needs to include parameter {} followed by an "
+                    "int value (to indicate 1.4 MHz use 14).".format(
+                            self.PARAM_BW))
+
+        bw = float(values[1])
+
+        if bw == 14:
+            bw = 1.4
+
+        new_config.bandwidth = bw
+
+        # Setup mimo mode
+
+        values = self.consume_parameter(parameters, self.PARAM_MIMO, 1)
+
+        if not values:
+            raise ValueError(
+                    "The test name needs to include parameter '{}' followed by the "
+                    "mimo mode.".format(self.PARAM_MIMO))
+
+        for mimo_mode in MimoMode:
+            if values[1] == mimo_mode.value:
+                new_config.mimo_mode = mimo_mode
+                break
+        else:
+            raise ValueError("The {} parameter needs to be followed by either "
+                             "1x1, 2x2 or 4x4.".format(self.PARAM_MIMO))
+
+        if (new_config.mimo_mode == MimoMode.MIMO_4x4
+                    and not self.simulator.LTE_SUPPORTS_4X4_MIMO):
+            raise ValueError("The test requires 4x4 MIMO, but that is not "
+                             "supported by the cellular simulator.")
+
+        # Setup transmission mode
+
+        values = self.consume_parameter(parameters, self.PARAM_TM, 1)
+
+        if not values:
+            raise ValueError(
+                    "The test name needs to include parameter {} followed by an "
+                    "int value from 1 to 4 indicating transmission mode.".
+                    format(self.PARAM_TM))
+
+        for tm in TransmissionMode:
+            if values[1] == tm.value[2:]:
+                new_config.transmission_mode = tm
+                break
+        else:
+            raise ValueError("The {} parameter needs to be followed by either "
+                             "TM1, TM2, TM3, TM4, TM7, TM8 or TM9.".format(
+                                     self.PARAM_MIMO))
+
+        # Setup scheduling mode
+
+        values = self.consume_parameter(parameters, self.PARAM_SCHEDULING, 1)
+
+        if not values:
+            new_config.scheduling_mode = SchedulingMode.STATIC
+            self.log.warning(
+                    "The test name does not include the '{}' parameter. Setting to "
+                    "static by default.".format(self.PARAM_SCHEDULING))
+        elif values[1] == self.PARAM_SCHEDULING_DYNAMIC:
+            new_config.scheduling_mode = SchedulingMode.DYNAMIC
+        elif values[1] == self.PARAM_SCHEDULING_STATIC:
+            new_config.scheduling_mode = SchedulingMode.STATIC
+        else:
+            raise ValueError(
+                    "The test name parameter '{}' has to be followed by either "
+                    "'dynamic' or 'static'.".format(self.PARAM_SCHEDULING))
+
+        if new_config.scheduling_mode == SchedulingMode.STATIC:
+
+            values = self.consume_parameter(parameters, self.PARAM_PATTERN, 2)
+
+            if not values:
+                self.log.warning(
+                        "The '{}' parameter was not set, using 100% RBs for both "
+                        "DL and UL. To set the percentages of total RBs include "
+                        "the '{}' parameter followed by two ints separated by an "
+                        "underscore indicating downlink and uplink percentages."
+                        .format(self.PARAM_PATTERN, self.PARAM_PATTERN))
+                dl_pattern = 100
+                ul_pattern = 100
+            else:
+                dl_pattern = int(values[1])
+                ul_pattern = int(values[2])
+
+            if not (0 <= dl_pattern <= 100 and 0 <= ul_pattern <= 100):
+                raise ValueError(
+                        "The scheduling pattern parameters need to be two "
+                        "positive numbers between 0 and 100.")
+
+            new_config.dl_rbs, new_config.ul_rbs = (
+                    self.allocation_percentages_to_rbs(
+                            new_config.bandwidth, new_config.transmission_mode,
+                            dl_pattern, ul_pattern))
+
+            # Look for a DL MCS configuration in the test parameters. If it is
+            # not present, use a default value.
+            dlmcs = self.consume_parameter(parameters, self.PARAM_DL_MCS, 1)
+
+            if dlmcs:
+                new_config.dl_mcs = int(dlmcs[1])
+            else:
+                self.log.warning(
+                        'The test name does not include the {} parameter. Setting '
+                        'to the max value by default'.format(
+                                self.PARAM_DL_MCS))
+                if self.dl_256_qam and new_config.bandwidth == 1.4:
+                    new_config.dl_mcs = 26
+                elif (not self.dl_256_qam
+                      and self.primary_config.tbs_pattern_on
+                      and new_config.bandwidth != 1.4):
+                    new_config.dl_mcs = 28
+                else:
+                    new_config.dl_mcs = 27
+
+            # Look for an UL MCS configuration in the test parameters. If it is
+            # not present, use a default value.
+            ulmcs = self.consume_parameter(parameters, self.PARAM_UL_MCS, 1)
+
+            if ulmcs:
+                new_config.ul_mcs = int(ulmcs[1])
+            else:
+                self.log.warning(
+                        'The test name does not include the {} parameter. Setting '
+                        'to the max value by default'.format(
+                                self.PARAM_UL_MCS))
+                if self.ul_64_qam:
+                    new_config.ul_mcs = 28
+                else:
+                    new_config.ul_mcs = 23
+
+        # Configure the simulation for DRX mode
+
+        drx = self.consume_parameter(parameters, self.PARAM_DRX, 5)
+
+        if drx and len(drx) == 6:
+            new_config.drx_connected_mode = True
+            new_config.drx_on_duration_timer = drx[1]
+            new_config.drx_inactivity_timer = drx[2]
+            new_config.drx_retransmission_timer = drx[3]
+            new_config.drx_long_cycle = drx[4]
+            try:
+                long_cycle = int(drx[4])
+                long_cycle_offset = int(drx[5])
+                if long_cycle_offset in range(0, long_cycle):
+                    new_config.drx_long_cycle_offset = long_cycle_offset
+                else:
+                    self.log.error(
+                            ("The cDRX long cycle offset must be in the "
+                             "range 0 to (long cycle  - 1). Setting "
+                             "long cycle offset to 0"))
+                    new_config.drx_long_cycle_offset = 0
+
+            except ValueError:
+                self.log.error(("cDRX long cycle and long cycle offset "
+                                "must be integers. Disabling cDRX mode."))
+                new_config.drx_connected_mode = False
+        else:
+            self.log.warning(("DRX mode was not configured properly. "
+                              "Please provide the following 5 values: "
+                              "1) DRX on duration timer "
+                              "2) Inactivity timer "
+                              "3) Retransmission timer "
+                              "4) Long DRX cycle duration "
+                              "5) Long DRX cycle offset "
+                              "Example: drx_2_6_16_20_0"))
+
+        # Setup LTE RRC status change function and timer for LTE idle test case
+        values = self.consume_parameter(parameters,
+                                        self.PARAM_RRC_STATUS_CHANGE_TIMER, 1)
+        if not values:
+            self.log.info(
+                    "The test name does not include the '{}' parameter. Disabled "
+                    "by default.".format(self.PARAM_RRC_STATUS_CHANGE_TIMER))
+            self.simulator.set_lte_rrc_state_change_timer(False)
+        else:
+            timer = int(values[1])
+            self.simulator.set_lte_rrc_state_change_timer(True, timer)
+            self.rrc_sc_timer = timer
+
+        # Channel Control Indicator
+        values = self.consume_parameter(parameters, self.PARAM_CFI, 1)
+
+        if not values:
+            self.log.warning('The {} parameter was not provided. Setting '
+                             'CFI to BESTEFFORT.'.format(self.PARAM_CFI))
+            new_config.cfi = 'BESTEFFORT'
+        else:
+            new_config.cfi = values[1]
+
+        # PHICH group size
+        values = self.consume_parameter(parameters, self.PARAM_PHICH, 1)
+
+        if not values:
+            self.log.warning('The {} parameter was not provided. Setting '
+                             'PHICH group size to 1 by default.'.format(
+                                     self.PARAM_PHICH))
+            new_config.phich = '1'
+        else:
+            if values[1] == '16':
+                new_config.phich = '1/6'
+            elif values[1] == '12':
+                new_config.phich = '1/2'
+            elif values[1] in ['1/6', '1/2', '1', '2']:
+                new_config.phich = values[1]
+            else:
+                raise ValueError('The {} parameter can only be followed by 1,'
+                                 '2, 1/2 (or 12) and 1/6 (or 16).'.format(
+                                         self.PARAM_PHICH))
+
+        # Paging cycle duration
+        values = self.consume_parameter(parameters, self.PARAM_PAGING, 1)
+
+        if not values:
+            self.log.warning('The {} parameter was not provided. Setting '
+                             'paging cycle duration to 1280 ms by '
+                             'default.'.format(self.PARAM_PAGING))
+            new_config.paging_cycle = 1280
+        else:
+            try:
+                new_config.paging_cycle = int(values[1])
+            except ValueError:
+                raise ValueError(
+                        'The {} parameter has to be followed by the paging cycle '
+                        'duration in milliseconds.'.format(self.PARAM_PAGING))
+
+        # Get uplink power
+
+        ul_power = self.get_uplink_power_from_parameters(parameters)
+
+        # Power is not set on the callbox until after the simulation is
+        # started. Saving this value in a variable for later
+        self.sim_ul_power = ul_power
+
+        # Get downlink power
+
+        dl_power = self.get_downlink_power_from_parameters(parameters)
+
+        # Power is not set on the callbox until after the simulation is
+        # started. Saving this value in a variable for later
+        self.sim_dl_power = dl_power
+
+        # Setup the base station with the obtained configuration and then save
+        # these parameters in the current configuration object
+        self.simulator.configure_bts(new_config)
+        self.primary_config.incorporate(new_config)
+
+        # Now that the band is set, calibrate the link if necessary
+        self.load_pathloss_if_required()
+
+    def calibrated_downlink_rx_power(self, bts_config, rsrp):
+        """ LTE simulation overrides this method so that it can convert from
+        RSRP to total signal power transmitted from the basestation.
+
+        Args:
+            bts_config: the current configuration at the base station
+            rsrp: desired rsrp, contained in a key value pair
+        """
+
+        power = self.rsrp_to_signal_power(rsrp, bts_config)
+
+        self.log.info(
+                "Setting downlink signal level to {} RSRP ({} dBm)".format(
+                        rsrp, power))
+
+        # Use parent method to calculate signal level
+        return super(LteSimulation,
+                     self).calibrated_downlink_rx_power(bts_config, power)
+
+    def downlink_calibration(self, rat=None, power_units_conversion_func=None):
+        """ Computes downlink path loss and returns the calibration value.
+
+        See base class implementation for details.
+
+        Args:
+            rat: ignored, replaced by 'lteRsrp'
+            power_units_conversion_func: ignored, replaced by
+                self.rsrp_to_signal_power
+
+        Returns:
+            Downlink calibration value and measured DL power. Note that the
+            phone only reports RSRP of the primary chain
+        """
+
+        return super().downlink_calibration(
+                rat='lteDbm',
+                power_units_conversion_func=self.rsrp_to_signal_power)
+
+    def rsrp_to_signal_power(self, rsrp, bts_config):
+        """ Converts rsrp to total band signal power
+
+        RSRP is measured per subcarrier, so total band power needs to be
+        multiplied by the number of subcarriers being used.
+
+        Args:
+            rsrp: desired rsrp in dBm
+            bts_config: a base station configuration object
+        Returns:
+            Total band signal power in dBm
+        """
+
+        bandwidth = bts_config.bandwidth
+
+        if bandwidth == 20:  # 100 RBs
+            power = rsrp + 30.79
+        elif bandwidth == 15:  # 75 RBs
+            power = rsrp + 29.54
+        elif bandwidth == 10:  # 50 RBs
+            power = rsrp + 27.78
+        elif bandwidth == 5:  # 25 RBs
+            power = rsrp + 24.77
+        elif bandwidth == 3:  # 15 RBs
+            power = rsrp + 22.55
+        elif bandwidth == 1.4:  # 6 RBs
+            power = rsrp + 18.57
+        else:
+            raise ValueError("Invalid bandwidth value.")
+
+        return power
+
+    def maximum_downlink_throughput(self):
+        """ Calculates maximum achievable downlink throughput in the current
+            simulation state.
+
+        Returns:
+            Maximum throughput in mbps.
+
+        """
+
+        return self.bts_maximum_downlink_throughtput(self.primary_config)
+
+    def bts_maximum_downlink_throughtput(self, bts_config):
+        """ Calculates maximum achievable downlink throughput for a single
+        base station from its configuration object.
+
+        Args:
+            bts_config: a base station configuration object.
+
+        Returns:
+            Maximum throughput in mbps.
+
+        """
+        if bts_config.mimo_mode == MimoMode.MIMO_1x1:
+            streams = 1
+        elif bts_config.mimo_mode == MimoMode.MIMO_2x2:
+            streams = 2
+        elif bts_config.mimo_mode == MimoMode.MIMO_4x4:
+            streams = 4
+        else:
+            raise ValueError('Unable to calculate maximum downlink throughput '
+                             'because the MIMO mode has not been set.')
+
+        bandwidth = bts_config.bandwidth
+        rb_ratio = bts_config.dl_rbs / self.total_rbs_dictionary[bandwidth]
+        mcs = bts_config.dl_mcs
+
+        max_rate_per_stream = None
+
+        tdd_subframe_config = bts_config.dlul_config
+        duplex_mode = self.get_duplex_mode(bts_config.band)
+
+        if duplex_mode == DuplexMode.TDD:
+            if self.dl_256_qam:
+                if mcs == 27:
+                    if bts_config.tbs_pattern_on:
+                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
+                                'TDD_CONFIG3'][tdd_subframe_config][bandwidth][
+                                        'DL']
+                    else:
+                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
+                                'TDD_CONFIG2'][tdd_subframe_config][bandwidth][
+                                        'DL']
+            else:
+                if mcs == 28:
+                    if bts_config.tbs_pattern_on:
+                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
+                                'TDD_CONFIG4'][tdd_subframe_config][bandwidth][
+                                        'DL']
+                    else:
+                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
+                                'TDD_CONFIG1'][tdd_subframe_config][bandwidth][
+                                        'DL']
+
+        elif duplex_mode == DuplexMode.FDD:
+            if (not self.dl_256_qam and bts_config.tbs_pattern_on
+                        and mcs == 28):
+                max_rate_per_stream = {
+                        3: 9.96,
+                        5: 17.0,
+                        10: 34.7,
+                        15: 52.7,
+                        20: 72.2
+                }.get(bandwidth, None)
+            if (not self.dl_256_qam and bts_config.tbs_pattern_on
+                        and mcs == 27):
+                max_rate_per_stream = {
+                        1.4: 2.94,
+                }.get(bandwidth, None)
+            elif (not self.dl_256_qam and not bts_config.tbs_pattern_on
+                  and mcs == 27):
+                max_rate_per_stream = {
+                        1.4: 2.87,
+                        3: 7.7,
+                        5: 14.4,
+                        10: 28.7,
+                        15: 42.3,
+                        20: 57.7
+                }.get(bandwidth, None)
+            elif self.dl_256_qam and bts_config.tbs_pattern_on and mcs == 27:
+                max_rate_per_stream = {
+                        3: 13.2,
+                        5: 22.9,
+                        10: 46.3,
+                        15: 72.2,
+                        20: 93.9
+                }.get(bandwidth, None)
+            elif self.dl_256_qam and bts_config.tbs_pattern_on and mcs == 26:
+                max_rate_per_stream = {
+                        1.4: 3.96,
+                }.get(bandwidth, None)
+            elif (self.dl_256_qam and not bts_config.tbs_pattern_on
+                  and mcs == 27):
+                max_rate_per_stream = {
+                        3: 11.3,
+                        5: 19.8,
+                        10: 44.1,
+                        15: 68.1,
+                        20: 88.4
+                }.get(bandwidth, None)
+            elif (self.dl_256_qam and not bts_config.tbs_pattern_on
+                  and mcs == 26):
+                max_rate_per_stream = {
+                        1.4: 3.96,
+                }.get(bandwidth, None)
+
+        if not max_rate_per_stream:
+            raise NotImplementedError(
+                    "The calculation for tbs pattern = {} "
+                    "and mcs = {} is not implemented.".format(
+                            "FULLALLOCATION"
+                            if bts_config.tbs_pattern_on else "OFF", mcs))
+
+        return max_rate_per_stream * streams * rb_ratio
+
+    def maximum_uplink_throughput(self):
+        """ Calculates maximum achievable uplink throughput in the current
+            simulation state.
+
+        Returns:
+            Maximum throughput in mbps.
+
+        """
+
+        return self.bts_maximum_uplink_throughtput(self.primary_config)
+
+    def bts_maximum_uplink_throughtput(self, bts_config):
+        """ Calculates maximum achievable uplink throughput for the selected
+        basestation from its configuration object.
+
+        Args:
+            bts_config: an LTE base station configuration object.
+
+        Returns:
+            Maximum throughput in mbps.
+
+        """
+
+        bandwidth = bts_config.bandwidth
+        rb_ratio = bts_config.ul_rbs / self.total_rbs_dictionary[bandwidth]
+        mcs = bts_config.ul_mcs
+
+        max_rate_per_stream = None
+
+        tdd_subframe_config = bts_config.dlul_config
+        duplex_mode = self.get_duplex_mode(bts_config.band)
+
+        if duplex_mode == DuplexMode.TDD:
+            if self.ul_64_qam:
+                if mcs == 28:
+                    if bts_config.tbs_pattern_on:
+                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
+                                'TDD_CONFIG3'][tdd_subframe_config][bandwidth][
+                                        'UL']
+                    else:
+                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
+                                'TDD_CONFIG2'][tdd_subframe_config][bandwidth][
+                                        'UL']
+            else:
+                if mcs == 23:
+                    if bts_config.tbs_pattern_on:
+                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
+                                'TDD_CONFIG4'][tdd_subframe_config][bandwidth][
+                                        'UL']
+                    else:
+                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
+                                'TDD_CONFIG1'][tdd_subframe_config][bandwidth][
+                                        'UL']
+
+        elif duplex_mode == DuplexMode.FDD:
+            if mcs == 23 and not self.ul_64_qam:
+                max_rate_per_stream = {
+                        1.4: 2.85,
+                        3: 7.18,
+                        5: 12.1,
+                        10: 24.5,
+                        15: 36.5,
+                        20: 49.1
+                }.get(bandwidth, None)
+            elif mcs == 28 and self.ul_64_qam:
+                max_rate_per_stream = {
+                        1.4: 4.2,
+                        3: 10.5,
+                        5: 17.2,
+                        10: 35.3,
+                        15: 53.0,
+                        20: 72.6
+                }.get(bandwidth, None)
+
+        if not max_rate_per_stream:
+            raise NotImplementedError(
+                    "The calculation fir mcs = {} is not implemented.".format(
+                            "FULLALLOCATION"
+                            if bts_config.tbs_pattern_on else "OFF", mcs))
+
+        return max_rate_per_stream * rb_ratio
+
+    def allocation_percentages_to_rbs(self, bw, tm, dl, ul):
+        """ Converts usage percentages to number of DL/UL RBs
+
+        Because not any number of DL/UL RBs can be obtained for a certain
+        bandwidth, this function calculates the number of RBs that most
+        closely matches the desired DL/UL percentages.
+
+        Args:
+            bw: the bandwidth for the which the RB configuration is requested
+            tm: the transmission in which the base station will be operating
+            dl: desired percentage of downlink RBs
+            ul: desired percentage of uplink RBs
+        Returns:
+            a tuple indicating the number of downlink and uplink RBs
+        """
+
+        # Validate the arguments
+        if (not 0 <= dl <= 100) or (not 0 <= ul <= 100):
+            raise ValueError("The percentage of DL and UL RBs have to be two "
+                             "positive between 0 and 100.")
+
+        # Get min and max values from tables
+        max_rbs = self.total_rbs_dictionary[bw]
+        min_dl_rbs = self.min_dl_rbs_dictionary[bw]
+        min_ul_rbs = self.min_ul_rbs_dictionary[bw]
+
+        def percentage_to_amount(min_val, max_val, percentage):
+            """ Returns the integer between min_val and max_val that is closest
+            to percentage/100*max_val
+            """
+
+            # Calculate the value that corresponds to the required percentage.
+            closest_int = round(max_val * percentage / 100)
+            # Cannot be less than min_val
+            closest_int = max(closest_int, min_val)
+            # RBs cannot be more than max_rbs
+            closest_int = min(closest_int, max_val)
+
+            return closest_int
+
+        # Calculate the number of DL RBs
+
+        # Get the number of DL RBs that corresponds to
+        #  the required percentage.
+        desired_dl_rbs = percentage_to_amount(min_val=min_dl_rbs,
+                                              max_val=max_rbs,
+                                              percentage=dl)
+
+        if tm == TransmissionMode.TM3 or tm == TransmissionMode.TM4:
+
+            # For TM3 and TM4 the number of DL RBs needs to be max_rbs or a
+            # multiple of the RBG size
+
+            if desired_dl_rbs == max_rbs:
+                dl_rbs = max_rbs
+            else:
+                dl_rbs = (math.ceil(desired_dl_rbs / self.rbg_dictionary[bw]) *
+                          self.rbg_dictionary[bw])
+
+        else:
+            # The other TMs allow any number of RBs between 1 and max_rbs
+            dl_rbs = desired_dl_rbs
+
+        # Calculate the number of UL RBs
+
+        # Get the number of UL RBs that corresponds
+        # to the required percentage
+        desired_ul_rbs = percentage_to_amount(min_val=min_ul_rbs,
+                                              max_val=max_rbs,
+                                              percentage=ul)
+
+        # Create a list of all possible UL RBs assignment
+        # The standard allows any number that can be written as
+        # 2**a * 3**b * 5**c for any combination of a, b and c.
+
+        def pow_range(max_value, base):
+            """ Returns a range of all possible powers of base under
+              the given max_value.
+          """
+            return range(int(math.ceil(math.log(max_value, base))))
+
+        possible_ul_rbs = [
+            2**a * 3**b * 5**c for a in pow_range(max_rbs, 2)
+            for b in pow_range(max_rbs, 3)
+            for c in pow_range(max_rbs, 5)
+            if 2**a * 3**b * 5**c <= max_rbs] # yapf: disable
+
+        # Find the value in the list that is closest to desired_ul_rbs
+        differences = [abs(rbs - desired_ul_rbs) for rbs in possible_ul_rbs]
+        ul_rbs = possible_ul_rbs[differences.index(min(differences))]
+
+        # Report what are the obtained RB percentages
+        self.log.info("Requested a {}% / {}% RB allocation. Closest possible "
+                      "percentages are {}% / {}%.".format(
+                              dl, ul, round(100 * dl_rbs / max_rbs),
+                              round(100 * ul_rbs / max_rbs)))
+
+        return dl_rbs, ul_rbs
+
+    def calibrate(self, band):
+        """ Calculates UL and DL path loss if it wasn't done before
+
+        Before running the base class implementation, configure the base station
+        to only use one downlink antenna with maximum bandwidth.
+
+        Args:
+            band: the band that is currently being calibrated.
+        """
+
+        # Save initial values in a configuration object so they can be restored
+        restore_config = self.BtsConfig()
+        restore_config.mimo_mode = self.primary_config.mimo_mode
+        restore_config.transmission_mode = self.primary_config.transmission_mode
+        restore_config.bandwidth = self.primary_config.bandwidth
+
+        # Set up a temporary calibration configuration.
+        temporary_config = self.BtsConfig()
+        temporary_config.mimo_mode = MimoMode.MIMO_1x1
+        temporary_config.transmission_mode = TransmissionMode.TM1
+        temporary_config.bandwidth = max(
+                self.allowed_bandwidth_dictionary[int(band)])
+        self.simulator.configure_bts(temporary_config)
+        self.primary_config.incorporate(temporary_config)
+
+        super().calibrate(band)
+
+        # Restore values as they were before changing them for calibration.
+        self.simulator.configure_bts(restore_config)
+        self.primary_config.incorporate(restore_config)
+
+    def start_traffic_for_calibration(self):
+        """
+            If TBS pattern is set to full allocation, there is no need to start
+            IP traffic.
+        """
+        if not self.primary_config.tbs_pattern_on:
+            super().start_traffic_for_calibration()
+
+    def stop_traffic_for_calibration(self):
+        """
+            If TBS pattern is set to full allocation, IP traffic wasn't started
+        """
+        if not self.primary_config.tbs_pattern_on:
+            super().stop_traffic_for_calibration()
+
+    def get_duplex_mode(self, band):
+        """ Determines if the band uses FDD or TDD duplex mode
+
+        Args:
+            band: a band number
+        Returns:
+            an variable of class DuplexMode indicating if band is FDD or TDD
+        """
+
+        if 33 <= int(band) <= 46:
+            return DuplexMode.TDD
+        else:
+            return DuplexMode.FDD
+
+    def get_measured_ul_power(self, samples=5, wait_after_sample=3):
+        """ Calculates UL power using measurements from the callbox and the
+        calibration data.
+
+        Args:
+            samples: the numble of samples to average
+            wait_after_sample: time in seconds to wait in between samples
+
+        Returns:
+            the ul power at the UE antenna ports in dBs
+        """
+        ul_power_sum = 0
+        samples_left = samples
+
+        while samples_left > 0:
+            ul_power_sum += self.simulator.get_measured_pusch_power()
+            samples_left -= 1
+            time.sleep(wait_after_sample)
+
+        # Got enough samples, return calibrated average
+        if self.dl_path_loss:
+            return ul_power_sum / samples + self.ul_path_loss
+        else:
+            self.log.warning('No uplink calibration data. Returning '
+                             'uncalibrated values as measured by the '
+                             'callbox.')
+            return ul_power_sum / samples
+
+    def send_sms(self, sms_message):
+        """ Sets the SMS message for the simulation. """
+        self.simulator.send_sms(sms_message)
diff --git a/server/cros/packet_generation/__init__.py b/server/cros/cellular/simulation_utils/__init__.py
similarity index 100%
copy from server/cros/packet_generation/__init__.py
copy to server/cros/cellular/simulation_utils/__init__.py
diff --git a/server/cros/cellular/simulation_utils/common.py b/server/cros/cellular/simulation_utils/common.py
new file mode 100644
index 0000000..9c2b102
--- /dev/null
+++ b/server/cros/cellular/simulation_utils/common.py
@@ -0,0 +1,8 @@
+import os, sys
+dirname = os.path.dirname(sys.modules[__name__].__file__)
+autotest_dir = os.path.abspath(os.path.join(dirname, "..", "..", "..", ".."))
+client_dir = os.path.join(autotest_dir, "client")
+sys.path.insert(0, client_dir)
+import setup_modules
+sys.path.pop(0)
+setup_modules.setup(base_path=autotest_dir, root_module_name="autotest_lib")
diff --git a/server/cros/cfm/cfm_base_test.py b/server/cros/cfm/cfm_base_test.py
index c23f7d3..7a3ec9a 100644
--- a/server/cros/cfm/cfm_base_test.py
+++ b/server/cros/cfm/cfm_base_test.py
@@ -85,7 +85,7 @@
             self._host.servo.set('dut_hub1_rst1', 'off')
             time.sleep(SHORT_TIMEOUT)
         except error.TestFail:
-            logging.warn('Failed to configure servo. This is not fatal unless '
+            logging.warning('Failed to configure servo. This is not fatal unless '
                          'your test is explicitly using the servo.',
                          exc_info=True)
 
@@ -174,4 +174,3 @@
         except Exception as e:
             logging.exception(
                 'Exception while copying file "%s"', remote_path)
-
diff --git a/server/cros/cfm/configurable_test/README.md b/server/cros/cfm/configurable_test/README.md
index 478f4f9..5d8509a 100644
--- a/server/cros/cfm/configurable_test/README.md
+++ b/server/cros/cfm/configurable_test/README.md
@@ -28,24 +28,24 @@
 that have no easy way to modify other source code can create and modify
 such tests.
 
-For the test to be executed properly it has to subclass [`autotest_lib.server.cros.cfm.configurable_test.configurable_cfm_tests.ConfigurableCfmTest`](https://chromium.googlesource.com/chromiumos/third_party/autotest/+/master/server/cros/cfm/configurable_test/configurable_cfm_test.py).
+For the test to be executed properly it has to subclass [`autotest_lib.server.cros.cfm.configurable_test.configurable_cfm_tests.ConfigurableCfmTest`](https://chromium.googlesource.com/chromiumos/third_party/autotest/+/main/server/cros/cfm/configurable_test/configurable_cfm_test.py).
 
 ## Actions
 
 Each step in a scenario is an Action. The available actions are listed in
-[`autotest_lib.server.cros.cfm.configurable_test.actions`](https://chromium.googlesource.com/chromiumos/third_party/autotest/+/master/server/cros/cfm/configurable_test/actions.py).
+[`autotest_lib.server.cros.cfm.configurable_test.actions`](https://chromium.googlesource.com/chromiumos/third_party/autotest/+/main/server/cros/cfm/configurable_test/actions.py).
 
 ## Configuration
 
 Besides Actions, a test can be configured with configuration params that affect
 behavior outside of the actions. The available configuration flags are
 documented in
-[`autotest_lib.server.cros.cfm.configurable_test.configuration`](https://chromium.googlesource.com/chromiumos/third_party/autotest/+/master/server/cros/cfm/configurable_test/configuration.py).
+[`autotest_lib.server.cros.cfm.configurable_test.configuration`](https://chromium.googlesource.com/chromiumos/third_party/autotest/+/main/server/cros/cfm/configurable_test/configuration.py).
 
 ## Samples
 
 For complete samples see the Autotest
-[`enterprise_CFM_ConfigurableCfmTestSanity`](https://chromium.googlesource.com/chromiumos/third_party/autotest/+/master/server/site_tests/enterprise_CFM_ConfigurableCfmTestSanity/)
+[`enterprise_CFM_ConfigurableCfmTestSanity`](https://chromium.googlesource.com/chromiumos/third_party/autotest/+/main/server/site_tests/enterprise_CFM_ConfigurableCfmTestSanity/)
 that we use to test the framework itself.
 
 
diff --git a/server/cros/cfm/configurable_test/actions.py b/server/cros/cfm/configurable_test/actions.py
index 99618df..73a039d 100644
--- a/server/cros/cfm/configurable_test/actions.py
+++ b/server/cros/cfm/configurable_test/actions.py
@@ -55,11 +55,13 @@
         context.cfm_facade.unmute_mic()
 
 class WaitForMeetingsLandingPage(Action):
-  """
+    """
   Wait for landing page to load after reboot.
   """
-  def do_execute(self, context):
-    context.cfm_facade.wait_for_meetings_landing_page()
+
+    def do_execute(self, context):
+        context.cfm_facade.wait_for_meetings_landing_page()
+
 
 class JoinMeeting(Action):
     """
@@ -135,7 +137,7 @@
         return 'Repeat[scenario=%s, times=%s]' % (self.scenario, self.times)
 
     def do_execute(self, context):
-        for _ in xrange(self.times):
+        for _ in range(self.times):
             self.scenario.execute(context)
 
 class AssertFileDoesNotContain(Action):
@@ -233,7 +235,7 @@
         self._random = random.Random(random_seed)
 
     def do_execute(self, context):
-        for _ in xrange(self._run_times):
+        for _ in range(self._run_times):
             self._random.choice(self._scenarios).execute(context)
 
     def __repr__(self):
@@ -354,7 +356,7 @@
         self._retry_delay_seconds = retry_delay_seconds
 
     def do_execute(self, context):
-        for attempt in xrange(self._num_tries):
+        for attempt in range(self._num_tries):
             try:
                 self._action.execute(context)
                 return
@@ -407,7 +409,7 @@
     """
     if condition():
         return
-    for _ in xrange(timeout_seconds):
+    for _ in range(timeout_seconds):
         time.sleep(1)
         if condition():
             return
diff --git a/server/cros/cfm/configurable_test/actions_unittest.py b/server/cros/cfm/configurable_test/actions_unittest.py
index a3fb4b5..6481a20 100644
--- a/server/cros/cfm/configurable_test/actions_unittest.py
+++ b/server/cros/cfm/configurable_test/actions_unittest.py
@@ -1,5 +1,5 @@
-import mock
 import unittest
+from unittest import mock
 
 from autotest_lib.client.common_lib.cros.cfm.usb import usb_device
 from autotest_lib.client.common_lib.cros.cfm.usb import usb_device_spec
@@ -106,15 +106,17 @@
         action.execute(self.context_with_mocks)
 
     def test_select_scenario_at_random(self):
-        dummy_action1 = DummyAction()
-        dummy_action2 = DummyAction()
-        scenarios = [scenario.Scenario(dummy_action1),
-                     scenario.Scenario(dummy_action2)]
+        placeholder_action1 = StubAction()
+        placeholder_action2 = StubAction()
+        scenarios = [
+                scenario.Scenario(placeholder_action1),
+                scenario.Scenario(placeholder_action2)
+        ]
         action = actions.SelectScenarioAtRandom(scenarios, 10)
         action.execute(self.context_with_mocks)
         # Assert that our actions were executed the expected number of times.
-        total_executes = (dummy_action1.executed_times
-                          + dummy_action2.executed_times)
+        total_executes = (placeholder_action1.executed_times +
+                          placeholder_action2.executed_times)
         self.assertEqual(10, total_executes)
 
     def test_select_scenario_at_random_str_contains_seed(self):
@@ -122,12 +124,12 @@
         self.assertTrue('seed=123' in str(action))
 
     def test_select_scenario_at_random_same_seed_same_actions(self):
-        scenario1_action1 = DummyAction()
-        scenario1_action2 = DummyAction()
+        scenario1_action1 = StubAction()
+        scenario1_action2 = StubAction()
         scenarios1 = [scenario.Scenario(scenario1_action1),
                      scenario.Scenario(scenario1_action2)]
-        scenario2_action1 = DummyAction()
-        scenario2_action2 = DummyAction()
+        scenario2_action1 = StubAction()
+        scenario2_action2 = StubAction()
         scenarios2 = [scenario.Scenario(scenario2_action1),
                      scenario.Scenario(scenario2_action2)]
         action1 = actions.SelectScenarioAtRandom(scenarios1, 100, 0)
@@ -214,7 +216,8 @@
     def collect_file_contents(self, path):
         return self.contents
 
-class DummyAction(actions.Action):
+
+class StubAction(actions.Action):
     def __init__(self):
         self.executed_times = 0
 
@@ -229,4 +232,3 @@
         if not self.executed:
             self.executed = True
             raise AssertionError()
-
diff --git a/server/cros/cfm/configurable_test/configurable_cfm_test_unittest.py b/server/cros/cfm/configurable_test/configurable_cfm_test_unittest.py
index c226e96..415456f 100644
--- a/server/cros/cfm/configurable_test/configurable_cfm_test_unittest.py
+++ b/server/cros/cfm/configurable_test/configurable_cfm_test_unittest.py
@@ -1,6 +1,5 @@
 import unittest
-
-from mock import MagicMock
+from unittest.mock import MagicMock
 
 from autotest_lib.server.cros.cfm.configurable_test import action_context
 from autotest_lib.server.cros.cfm.configurable_test import configurable_cfm_test
@@ -44,4 +43,3 @@
         runner.run_test(cfm_test)
         cfm_facade_mock.start_meeting_session.assert_called_once_with()
         self.assertEqual(5, cfm_facade_mock.mute_mic.call_count)
-
diff --git a/server/cros/cfm_jmidata_log_collector.py b/server/cros/cfm_jmidata_log_collector.py
index dbb70e1..7994294 100644
--- a/server/cros/cfm_jmidata_log_collector.py
+++ b/server/cros/cfm_jmidata_log_collector.py
@@ -4,6 +4,8 @@
 
 import logging
 
+import common
+
 from autotest_lib.server.cros import cfm_jmidata_v3_helper
 
 
@@ -68,4 +70,3 @@
     # Ensure we always return at least one element, or perf uploads will be
     # sad.
     return data_array or [0]
-
diff --git a/server/cros/cfm_jmidata_log_collector_unittest.py b/server/cros/cfm_jmidata_log_collector_unittest.py
index 4660a88..89a7dcf 100644
--- a/server/cros/cfm_jmidata_log_collector_unittest.py
+++ b/server/cros/cfm_jmidata_log_collector_unittest.py
@@ -1,6 +1,8 @@
 import unittest
 
-import cfm_jmidata_log_collector
+import common
+
+from autotest_lib.server.cros import cfm_jmidata_log_collector
 
 
 class CfmJmidataLogCollectorTest(unittest.TestCase):
@@ -286,4 +288,3 @@
 'diaType":"video","googFrameHeightSent":720,"googFrameRateSent":24,"googCode'
 'cName":"VP8","googAdaptationChanges":0,"ssrc":3865366536,"googFirsReceived"'
 ':0,"packetsSent":1869,"bytesSent":1796767,"googAdaptationReason":0}}]')
-
diff --git a/server/cros/chaos_ap_list.conf b/server/cros/chaos_ap_list.conf
index 3aef32e..bf9e611 100644
--- a/server/cros/chaos_ap_list.conf
+++ b/server/cros/chaos_ap_list.conf
@@ -224,36 +224,37 @@
 psk = chromeos
 class_name = StaticAPConfigurator
 
-[0c:9d:92:02:40:f1]
-brand = asus
-wan_hostname = chromeos3-row2-rack1-host9
-ssid = asus_ax88u_n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack1-rpm1
-rpm_outlet = .A9
-bss = 0c:9d:92:02:40:f1
-wan mac = 0c:9d:92:02:40:f0
-model = ax88u
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
+### Unable to physically find router. http://b/187830256
+### [0c:9d:92:02:40:f1]
+### brand = asus
+### wan_hostname = chromeos3-row2-rack1-host9
+### ssid = asus_ax88u_n_ch11_wpa2
+### frequency = 2462
+### rpm_managed = True
+### rpm_hostname = chromeos3-row2-rack1-rpm1
+### rpm_outlet = .A9
+### bss = 0c:9d:92:02:40:f1
+### wan mac = 0c:9d:92:02:40:f0
+### model = ax88u
+### security = wpa2
+### psk = chromeos
+### class_name = StaticAPConfigurator
 
-[0c:9d:92:02:40:f4]
-brand = asus
-wan_hostname = chromeos3-row2-rack1-host9
-ssid = asus_ax88u_ac_ax_ch153_wpa2
-frequency = 5765
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack1-rpm1
-rpm_outlet = .A9
-bss = 0c:9d:92:02:40:f4
-wan mac = 0c:9d:92:02:40:f0
-model = ax88u
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
+### Unable to physically find router. http://b/187830256
+### [0c:9d:92:02:40:f4]
+### brand = asus
+### wan_hostname = chromeos3-row2-rack1-host9
+### ssid = asus_ax88u_ac_ax_ch153_wpa2
+### frequency = 5765
+### rpm_managed = True
+### rpm_hostname = chromeos3-row2-rack1-rpm1
+### rpm_outlet = .A9
+### bss = 0c:9d:92:02:40:f4
+### wan mac = 0c:9d:92:02:40:f0
+### model = ax88u
+### security = wpa2
+### psk = chromeos
+### class_name = StaticAPConfigurator
 
 [00:11:32:a5:90:c2]
 brand = synology
@@ -266,14 +267,14 @@
 bss = 00:11:32:a5:90:c2
 wan mac = 00:11:32:a5:90:c0
 model = mr2200
-security = wpa2_3
+security = wpa2
 psk = chromeos
 class_name = StaticAPConfigurator
 
 [00:11:32:a5:90:c3]
 brand = synology
 wan_hostname = chromeos3-row2-rack1-host10
-ssid = synology_mr2200__n_ac_ch153_wpa2_3
+ssid = synology_mr2200__ac_ch153_wpa23
 frequency = 5765
 rpm_managed = True
 rpm_hostname = chromeos3-row2-rack1-rpm1
@@ -285,20 +286,21 @@
 psk = chromeos
 class_name = StaticAPConfigurator
 
-[00:11:32:a5:90:c4]
-brand = synology
-wan_hostname = chromeos3-row2-rack1-host10
-ssid = synology_mr2200__ac_ch40_wpa2_3
-frequency = 5200
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack1-rpm1
-rpm_outlet = .A10
-bss = 00:11:32:a5:90:c4
-wan mac = 00:11:32:a5:90:c0
-model = mr2200
-security = wpa2_3
-psk = chromeos
-class_name = StaticAPConfigurator
+### http://b/194491244
+### [00:11:32:a5:90:c4]
+### brand = synology
+### wan_hostname = chromeos3-row2-rack1-host10
+### ssid = synology_mr2200__ac_ch40_wpa2_3
+### frequency = 5200
+### rpm_managed = True
+### rpm_hostname = chromeos3-row2-rack1-rpm1
+### rpm_outlet = .A10
+### bss = 00:11:32:a5:90:c4
+### wan mac = 00:11:32:a5:90:c0
+### model = mr2200
+### security = wpa2_3
+### psk = chromeos
+### class_name = StaticAPConfigurator
 
 [94:44:52:18:f0:a7]
 brand = belkin
@@ -1682,14 +1684,14 @@
 [d8:0d:17:b2:8e:37]
 brand = tp-link
 wan_hostname = chromeos3-row2-rack4-host11
-ssid = tplink_a9__n_ch6_wpa2
+ssid = tplink_c9__n_ch6_wpa2
 frequency = 2437
 rpm_managed = True
 rpm_hostname = chromeos3-row2-rack4-rpm1
 rpm_outlet = .A11
 bss = d8:0d:17:b2:8e:37
 wan mac = d8:0d:17:b2:8e:39
-model = tplink_a9
+model = tplink_c9
 security = wpa2
 psk = chromeos
 class_name = StaticAPConfigurator
@@ -1697,14 +1699,14 @@
 [d8:0d:17:b2:8e:36]
 brand = tp-link
 wan_hostname = chromeos3-row2-rack4-host11
-ssid = tplink_a9__ac_ch40_wpa2
+ssid = tplink_c9__ac_ch40_wpa2
 frequency = 5200
 rpm_managed = True
 rpm_hostname = chromeos3-row2-rack4-rpm1
 rpm_outlet = .A11
 bss = d8:0d:17:b2:8e:36
 wan mac = d8:0d:17:b2:8e:39
-model = tplink_a9
+model = tplink_c9
 security = wpa2
 psk = chromeos
 class_name = StaticAPConfigurator
@@ -1844,20 +1846,21 @@
 psk = chromeos
 class_name = StaticAPConfigurator
 
-[04:d4:c4:08:67:08]
-brand = asus
-wan_hostname = chromeos3-row2-rack4-host17
-ssid = asus_ac88u__n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A17
-bss = 04:d4:c4:08:67:08
-wan mac = 04:d4:c4:08:67:08
-model = ac3100
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
+### Unable to physically find router. http://b/187830256
+### [04:d4:c4:08:67:08]
+### brand = asus
+### wan_hostname = chromeos3-row2-rack4-host17
+### ssid = asus_ac88u__n_ch6_wpa2
+### frequency = 2437
+### rpm_managed = True
+### rpm_hostname = chromeos3-row2-rack4-rpm1
+### rpm_outlet = .A17
+### bss = 04:d4:c4:08:67:08
+### wan mac = 04:d4:c4:08:67:08
+### model = ac3100
+### security = wpa2
+### psk = chromeos
+### class_name = StaticAPConfigurator
 
 ### b/153560792
 ### [04:d4:c4:08:67:0c]
@@ -1905,37 +1908,35 @@
 psk = chromeos
 class_name = StaticAPConfigurator
 
-### b/153560792
-### [4c:ed:fb:7b:a2:88]
-### brand = asus
-### wan_hostname = chromeos3-row2-rack4-host19
-### ssid = asus_ac1900p__n_ch1_wpa2
-### frequency = 2412
-### rpm_managed = True
-### rpm_hostname = chromeos3-row2-rack4-rpm1
-### rpm_outlet = .A19
-### bss = 4c:ed:fb:7b:a2:88
-### wan mac = 4c:ed:fb:7b:a2:88
-### model = ac1900p
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
+[4c:ed:fb:7b:a2:88]
+brand = asus
+wan_hostname = chromeos3-row2-rack4-host19
+ssid = asus_ac1900p__n_ch1_wpa2
+frequency = 2412
+rpm_managed = True
+rpm_hostname = chromeos3-row2-rack4-rpm1
+rpm_outlet = .A19
+bss = 4c:ed:fb:7b:a2:88
+wan mac = 4c:ed:fb:7b:a2:88
+model = ac1900p
+security = wpa2
+psk = chromeos
+class_name = StaticAPConfigurator
 
-### b/153560792
-### [4c:ed:fb:7b:a2:8c]
-### brand = asus
-### wan_hostname = chromeos3-row2-rack4-host19
-### ssid = asus_ac1900p__ac_ch36_wpa2
-### frequency = 5180
-### rpm_managed = True
-### rpm_hostname = chromeos3-row2-rack4-rpm1
-### rpm_outlet = .A19
-### bss = 4c:ed:fb:7b:a2:8c
-### wan mac = 4c:ed:fb:7b:a2:88
-### model = ac1900p
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
+[4c:ed:fb:7b:a2:8c]
+brand = asus
+wan_hostname = chromeos3-row2-rack4-host19
+ssid = asus_ac1900p__ac_ch36_wpa2
+frequency = 5180
+rpm_managed = True
+rpm_hostname = chromeos3-row2-rack4-rpm1
+rpm_outlet = .A19
+bss = 4c:ed:fb:7b:a2:8c
+wan mac = 4c:ed:fb:7b:a2:88
+model = ac1900p
+security = wpa2
+psk = chromeos
+class_name = StaticAPConfigurator
 
 [74:03:bd:00:8a:60]
 brand = buffalo
@@ -1943,7 +1944,7 @@
 ssid = buffalo_whr1166d__n_ch1_wpa2
 frequency = 2412
 rpm_managed = True
-rpm_hostname = chromeos3-row3-rack4-rpm1
+rpm_hostname = chromeos3-row2-rack4-rpm1
 rpm_outlet = .A20
 bss = 74:03:bd:00:8a:60
 wan mac = 74:03:bd:00:8a:60
@@ -1958,7 +1959,7 @@
 ssid = buffalo_whr1166d__ac_ch149_wpa2
 frequency = 5745
 rpm_managed = True
-rpm_hostname = chromeos3-row3-rack4-rpm1
+rpm_hostname = chromeos3-row2-rack4-rpm1
 rpm_outlet = .A20
 bss = 74:03:bd:00:8a:64
 wan mac = 74:03:bd:00:8a:60
@@ -2117,21 +2118,20 @@
 psk = chromeos
 class_name = StaticAPConfigurator
 
-### b/153560792
-### [74:da:38:f6:62:6a]
-### brand = edimax
-### wan_hostname = chromeos3-row3-rack1-host6
-### ssid = edimax_br6478ac__n_ch6_wpa2
-### frequency = 2437
-### rpm_managed = True
-### rpm_hostname = chromeos3-row3-rack1-rpm1
-### rpm_outlet = .A6
-### bss = 74:da:38:f6:62:6a
-### wan mac = 74:da:38:f6:62:6c
-### model = br6478ac
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
+[74:da:38:f6:62:6a]
+brand = edimax
+wan_hostname = chromeos3-row3-rack1-host6
+ssid = edimax_br6478ac__n_ch6_wpa2
+frequency = 2437
+rpm_managed = True
+rpm_hostname = chromeos3-row3-rack1-rpm1
+rpm_outlet = .A6
+bss = 74:da:38:f6:62:6a
+wan mac = 74:da:38:f6:62:6c
+model = br6478ac
+security = wpa2
+psk = chromeos
+class_name = StaticAPConfigurator
 
 [74:da:38:f6:62:6b]
 brand = edimax
@@ -2148,35 +2148,37 @@
 psk = chromeos
 class_name = StaticAPConfigurator
 
-[e4:90:7e:f5:67:b4]
-brand = motorola
-wan_hostname = chromeos3-row3-rack1-host7
-ssid = motorola_mr1900__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A7
-bss = e4:90:7e:f5:67:b4
-wan mac = e4:90:7e:f5:67:b0
-model = mr1900
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
+### Lab team cannot login to router.http://b/187830256
+### [e4:90:7e:f5:67:b4]
+### brand = motorola
+### wan_hostname = chromeos3-row3-rack1-host7
+### ssid = motorola_mr1900__n_ch1_wpa2
+### frequency = 2412
+### rpm_managed = True
+### rpm_hostname = chromeos3-row3-rack1-rpm1
+### rpm_outlet = .A7
+### bss = e4:90:7e:f5:67:b4
+### wan mac = e4:90:7e:f5:67:b0
+### model = mr1900
+### security = wpa2
+### psk = chromeos
+### class_name = StaticAPConfigurator
 
-[e4:90:7e:f5:67:ba]
-brand = motorola
-wan_hostname = chromeos3-row3-rack1-host7
-ssid = motorola_mr1900__ac_ch36_wpa2
-frequency = 5180
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A7
-bss = e4:90:7e:f5:67:ba
-wan mac = e4:90:7e:f5:67:b0
-model = mr1900
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
+### Lab team cannot login to router.http://b/187830256
+### [e4:90:7e:f5:67:ba]
+### brand = motorola
+### wan_hostname = chromeos3-row3-rack1-host7
+### ssid = motorola_mr1900__ac_ch36_wpa2
+### frequency = 5180
+### rpm_managed = True
+### rpm_hostname = chromeos3-row3-rack1-rpm1
+### rpm_outlet = .A7
+### bss = e4:90:7e:f5:67:ba
+### wan mac = e4:90:7e:f5:67:b0
+### model = mr1900
+### security = wpa2
+### psk = chromeos
+### class_name = StaticAPConfigurator
 
 [50:c7:bf:de:3f:75]
 brand = iqrouter
@@ -2211,7 +2213,7 @@
 [80:3f:5d:50:57:46]
 brand = meco
 wan_hostname = chromeos3-row3-rack1-host9
-ssid = meco_n300 __n_ch6_wpa2 
+ssid = meco_n300__n_ch6_wpa2
 frequency = 2437
 rpm_managed = True
 rpm_hostname = chromeos3-row3-rack1-rpm1
@@ -2344,35 +2346,37 @@
 psk = chromeos
 class_name = StaticAPConfigurator
 
-[88:dc:96:6e:15:9a]
-brand = engenius
-wan_hostname = chromeos3-row3-rack1-host14
-ssid = engenius_eap1300ext__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A14
-bss = 88:dc:96:6e:15:9a
-wan mac = 88:dc:96:6e:15:99
-model = eap1300ext
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
+### Lab team cannot login to router.http://b/187830256
+### [88:dc:96:6e:15:9a]
+### brand = engenius
+### wan_hostname = chromeos3-row3-rack1-host14
+### ssid = engenius_eap1300ext__n_ch1_wpa2
+### frequency = 2412
+### rpm_managed = True
+### rpm_hostname = chromeos3-row3-rack1-rpm1
+### rpm_outlet = .A14
+### bss = 88:dc:96:6e:15:9a
+### wan mac = 88:dc:96:6e:15:99
+### model = eap1300ext
+### security = wpa2
+### psk = chromeos
+### class_name = StaticAPConfigurator
 
-[88:dc:96:6e:15:9b]
-brand = engenius
-wan_hostname = chromeos3-row3-rack1-host14
-ssid = engenius_eap1300ext__ac_ch44_wpa2
-frequency = 5220
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A14
-bss = 88:dc:96:6e:15:9b
-wan mac = 88:dc:96:6e:15:99
-model = eap1300ext
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
+### Lab team cannot login to router.http://b/187830256
+### [88:dc:96:6e:15:9b]
+### brand = engenius
+### wan_hostname = chromeos3-row3-rack1-host14
+### ssid = engenius_eap1300ext__ac_ch44_wpa2
+### frequency = 5220
+### rpm_managed = True
+### rpm_hostname = chromeos3-row3-rack1-rpm1
+### rpm_outlet = .A14
+### bss = 88:dc:96:6e:15:9b
+### wan mac = 88:dc:96:6e:15:99
+### model = eap1300ext
+### security = wpa2
+### psk = chromeos
+### class_name = StaticAPConfigurator
 
 [04:d9:f5:77:fa:d8]
 brand = asus
@@ -2841,9 +2845,9 @@
 class_name = StaticAPConfigurator
 
 [c4:ad:34:6d:e6:c8]
-brand = microtik
+brand = mikrotik
 wan_hostname = chromeos3-row3-rack2-host12
-ssid = microtik_2hnd__n_ch6_wpa2
+ssid = mikrotik_2hnd__n_ch6_wpa2
 frequency = 2437
 rpm_managed = True
 rpm_hostname = chromeos3-row3-rack2-rpm1
@@ -2855,21 +2859,6 @@
 psk = chromeos
 class_name = StaticAPConfigurator
 
-[c4:ad:34:6d:e6:cd]
-brand = microtik
-wan_hostname = chromeos3-row3-rack2-host12
-ssid = microtik_2hnd__ac_ch149_wpa2
-frequency = 5745
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A12
-bss = c4:ad:34:6d:e6:cd
-wan mac = c4:ad:34:6d:e6:c8
-model = 2hnd
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
 [28:80:88:23:8c:e6]
 brand = netgear
 wan_hostname = chromeos3-row3-rack2-host13
@@ -3140,6 +3129,36 @@
 psk = chromeos
 class_name = StaticAPConfigurator
 
+[cc:32:e5:49:cf:82]
+brand = tp-link
+wan_hostname = chromeos3-row3-rack3-host2
+ssid = tplink_archera5__n_ch1_wpa2
+frequency = 2412
+rpm_managed = True
+rpm_hostname = chromeos3-row3-rack3-rpm1
+rpm_outlet = .A2
+bss = cc:32:e5:49:cf:82
+wan mac = cc:32:e5:49:cf:83
+model = archera5
+security = wpa2
+psk = chromeos
+class_name = StaticAPConfigurator
+
+[cc:32:e5:49:cf:81]
+brand = tp-link
+wan_hostname = chromeos3-row3-rack3-host2
+ssid = tplink_archera5__ac_ch44_wpa2
+frequency = 5220
+rpm_managed = True
+rpm_hostname = chromeos3-row3-rack3-rpm1
+rpm_outlet = .A2
+bss = cc:32:e5:49:cf:81
+wan mac = cc:32:e5:49:cf:83
+model = archera5
+security = wpa2
+psk = chromeos
+class_name = StaticAPConfigurator
+
 [cc:32:e5:e6:f5:ef]
 brand = tp-link
 wan_hostname = chromeos3-row3-rack3-host4
@@ -3199,3 +3218,64 @@
 security = wpa2
 psk = chromeos
 class_name = StaticAPConfigurator
+
+[80:3f:5d:f8:68:6a]
+brand = wavlink
+wan_hostname = chromeos3-row3-rack3-host7
+ssid = wavlink_arialg__n_ch6_wpa2
+frequency = 2437
+rpm_managed = True
+rpm_hostname = chromeos3-row3-rack3-rpm1
+rpm_outlet = .A7
+bss = 80:3f:5d:f8:68:6a
+wan mac = 80:3f:5d:f8:68:68
+model = arialg
+security = wpa2
+psk = chromeos
+class_name = StaticAPConfigurator
+
+[80:3f:5d:f8:68:6b]
+brand = wavlink
+wan_hostname = chromeos3-row3-rack3-host7
+ssid = wavlink_arialg__ac_ch36_wpa2
+frequency = 5180
+rpm_managed = True
+rpm_hostname = chromeos3-row3-rack3-rpm1
+rpm_outlet = .A7
+bss = 80:3f:5d:f8:68:6b
+wan mac = 80:3f:5d:f8:68:68
+model = arialg
+security = wpa2
+psk = chromeos
+class_name = StaticAPConfigurator
+
+[5c:4a:1f:fc:ca:ec]
+brand = juplink
+wan_hostname = chromeos3-row3-rack3-host8
+ssid = vanin_juplinkrx4__n_ch1_wpa2
+frequency = 2412
+rpm_managed = True
+rpm_hostname = chromeos3-row3-rack3-rpm1
+rpm_outlet = .A8
+bss = 5c:4a:1f:fc:ca:ec
+wan mac = 5c:4a:1f:fc:ca:ee
+model = juplinkrx4
+security = wpa2
+psk = chromeos
+class_name = StaticAPConfigurator
+
+[85c:4a:1f:fc:ca:ed]
+brand = juplink
+wan_hostname = chromeos3-row3-rack3-host8
+ssid = vanin_juplinkrx4__ac_ch153_wpa2
+frequency = 5765
+rpm_managed = True
+rpm_hostname = chromeos3-row3-rack3-rpm1
+rpm_outlet = .A8
+bss = 5c:4a:1f:fc:ca:ed
+wan mac = 5c:4a:1f:fc:ca:ee
+model = juplinkrx4
+security = wpa2
+psk = chromeos
+class_name = StaticAPConfigurator
+
diff --git a/server/cros/chaos_lib/chaos_analyzer.py b/server/cros/chaos_lib/chaos_analyzer.py
index fd2b18b..305c767 100755
--- a/server/cros/chaos_lib/chaos_analyzer.py
+++ b/server/cros/chaos_lib/chaos_analyzer.py
@@ -1,16 +1,17 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-from __future__ import print_function
+
 
 import argparse
 import os
 import re
 
-import chaos_capture_analyzer
-import chaos_log_analyzer
+from . import chaos_capture_analyzer
+from . import chaos_log_analyzer
 
 class ChaosTestInfo(object):
     """ Class to gather the relevant test information from a folder. """
diff --git a/server/cros/chaos_lib/chaos_capture_analyzer.py b/server/cros/chaos_lib/chaos_capture_analyzer.py
index b5bed71..e329fb5 100755
--- a/server/cros/chaos_lib/chaos_capture_analyzer.py
+++ b/server/cros/chaos_lib/chaos_capture_analyzer.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -331,7 +331,7 @@
                                           DHCP_ACK_MESSAGE_TYPE },
                                         STATE_END)
     STATE_INFO_END = StateInfo("END", 0, {}, STATE_END)
-    # Master State Table Map of State Infos
+    # State Table Map of State Infos
     STATE_INFO_MAP = {STATE_INIT:         STATE_INFO_INIT,
                       STATE_PROBE_REQ:    STATE_INFO_PROBE_REQ,
                       STATE_PROBE_RESP:   STATE_INFO_PROBE_RESP,
@@ -374,7 +374,7 @@
                                               WLAN_DISASSOC_REQ_FRAME_TYPE },
                                             [ PACKET_DETAIL_SENDER,
                                               PACKET_DETAIL_REASON_CODE ])
-    # Master State Table Tuple of Error State Infos
+    # State Table Tuple of Error State Infos
     ERROR_STATE_INFO_TUPLE = (ERROR_STATE_INFO_DEAUTH, ERROR_STATE_INFO_DEASSOC)
 
     # These warnings actually match successful states, but since the we
@@ -389,7 +389,7 @@
             { PACKET_MATCH_WLAN_FRAME_TYPE: WLAN_ASSOC_RESP_FRAME_TYPE },
             [ PACKET_DETAIL_STATUS_CODE ])
 
-    # Master Table Tuple of warning information.
+    # Table Tuple of warning information.
     WARNING_INFO_TUPLE = (WARNING_INFO_AUTH_REJ, WARNING_INFO_ASSOC_REJ)
 
 
diff --git a/server/cros/chaos_lib/chaos_datastore_utils.py b/server/cros/chaos_lib/chaos_datastore_utils.py
index 23925f1..107d2d3 100644
--- a/server/cros/chaos_lib/chaos_datastore_utils.py
+++ b/server/cros/chaos_lib/chaos_datastore_utils.py
@@ -1,5 +1,6 @@
 # -*- coding: utf-8 -*-
-#!/usr/bin/env python2.7
+#!/usr/bin/env python3
+# Lint as: python2, python3
 # Copyright (c) 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -15,20 +16,20 @@
 
 class ChaosDataStoreUtils(object):
 
-    CHAOS_DATASTORE_URL = 'https://chaos-188802.appspot.com'
+    CHAOS_DATASTORE_URL = r'https://chaos-188802.appspot.com/'
 
     # The Datastore defines the following paths for operating methods.
-    ADD_DEVICE = "devices/new"
-    REMOVE_DEVICE = "devices/delete"
-    LOCK_DEVICE = "devices/lock"
-    UNLOCK_DEVICE = 'devices/unlock'
-    SHOW_DEVICE = "devices/"
-    GET_DEVICES = 'devices/'
-    GET_UNLOCKED_DEVICES = "unlocked_devices/"
-    GET_DEVICES_BY_AP_LABEL = "devices/location"
+    ADD_DEVICE = r"devices/new"
+    REMOVE_DEVICE = r"devices/delete"
+    LOCK_DEVICE = r"devices/lock"
+    UNLOCK_DEVICE = r"devices/unlock"
+    SHOW_DEVICE = r"devices/"
+    GET_DEVICES = r"devices/"
+    GET_UNLOCKED_DEVICES = r"unlocked_devices/"
+    GET_DEVICES_BY_AP_LABEL = r"devices/location"
 
     # HTTP content type. JSON encoded with UTF-8 character encoding.
-    HTTP_HEADER = {'content-type': 'application/json'}
+    HTTP_HEADER = {'content-type': r'application/json'}
 
 
     def add_device(self, host_name, ap_label):
@@ -43,7 +44,7 @@
         @rtype: bool
 
         """
-        request = self.CHAOS_DATASTORE_URL + '/' + self.ADD_DEVICE
+        request = self.CHAOS_DATASTORE_URL + self.ADD_DEVICE
         logging.debug("Request = %s", request)
         response = requests.post(request,
                                  headers=self.HTTP_HEADER,
@@ -68,7 +69,7 @@
         @rtype: bool
 
         """
-        request = self.CHAOS_DATASTORE_URL + '/' + self.REMOVE_DEVICE
+        request = self.CHAOS_DATASTORE_URL + self.REMOVE_DEVICE
         logging.debug("Request = %s", request)
         response = requests.put(request,
                                 headers=self.HTTP_HEADER,
@@ -91,7 +92,7 @@
         @rtype: bool
 
         """
-        request = self.CHAOS_DATASTORE_URL + '/' + self.LOCK_DEVICE
+        request = self.CHAOS_DATASTORE_URL + self.LOCK_DEVICE
         logging.debug("Request = %s", request)
         response = requests.put(request,
                                 headers=self.HTTP_HEADER,
@@ -114,7 +115,7 @@
         @rtype: bool
 
         """
-        request = self.CHAOS_DATASTORE_URL + '/' + self.UNLOCK_DEVICE
+        request = self.CHAOS_DATASTORE_URL + self.UNLOCK_DEVICE
         logging.debug("Request = %s", request)
         response = requests.put(request,
                                 headers=self.HTTP_HEADER,
@@ -138,7 +139,7 @@
         @rtype: dict when True, else bool:False
 
         """
-        request = self.CHAOS_DATASTORE_URL + '/' + self.SHOW_DEVICE + host_name
+        request = str(self.CHAOS_DATASTORE_URL) + str(self.SHOW_DEVICE) + str(host_name)
         logging.debug("Request = %s", request)
         response = requests.get(request)
         if 'error' in response.text:
@@ -156,7 +157,7 @@
         @rtype: dict when True, else bool:False
 
         """
-        request = self.CHAOS_DATASTORE_URL + '/' + self.GET_UNLOCKED_DEVICES
+        request = self.CHAOS_DATASTORE_URL + self.GET_UNLOCKED_DEVICES
         logging.debug("Request = %s", request)
         response = requests.get(request)
         if 'error' in response.text:
@@ -174,7 +175,7 @@
         @rtype: dict when True, else bool:False
 
         """
-        request = self.CHAOS_DATASTORE_URL + '/' + self.GET_DEVICES
+        request = self.CHAOS_DATASTORE_URL + self.GET_DEVICES
         logging.debug("Request = %s", request)
         response = requests.get(request)
         if 'error' in response.text:
@@ -195,7 +196,7 @@
         @rtype: dict when True, else bool:False
 
         """
-        request = self.CHAOS_DATASTORE_URL + '/' +  self.GET_DEVICES_BY_AP_LABEL
+        request = self.CHAOS_DATASTORE_URL + self.GET_DEVICES_BY_AP_LABEL
         logging.debug("Request = %s", request)
         response = requests.put(request,
                                 headers=self.HTTP_HEADER,
@@ -216,7 +217,7 @@
         @rtype: bool
 
         """
-        request = self.CHAOS_DATASTORE_URL + '/' + self.SHOW_DEVICE + host_name
+        request = self.CHAOS_DATASTORE_URL + self.SHOW_DEVICE + host_name
         logging.debug("Request = %s", request)
         response = requests.get(request)
         if 'null' in response.text:
diff --git a/server/cros/chaos_lib/chaos_log_analyzer.py b/server/cros/chaos_lib/chaos_log_analyzer.py
index 66256be..c517057 100755
--- a/server/cros/chaos_lib/chaos_log_analyzer.py
+++ b/server/cros/chaos_lib/chaos_log_analyzer.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/cros/chaos_lib/chaos_parser.py b/server/cros/chaos_lib/chaos_parser.py
index be696f3..6aa5f40 100644
--- a/server/cros/chaos_lib/chaos_parser.py
+++ b/server/cros/chaos_lib/chaos_parser.py
@@ -1,8 +1,9 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-from __future__ import print_function
+
 
 import argparse
 import copy
diff --git a/server/cros/chaos_lib/chaos_runner.py b/server/cros/chaos_lib/chaos_runner.py
index 9174d7e..ec22a35 100644
--- a/server/cros/chaos_lib/chaos_runner.py
+++ b/server/cros/chaos_lib/chaos_runner.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -22,8 +23,10 @@
 from autotest_lib.server.cros.network import chaos_clique_utils as utils
 from autotest_lib.server.cros.network import wifi_client
 
-# Webdriver master hostname
-MASTERNAME = 'chromeos3-chaosvmmaster.cros.corp.google.com'
+# Webdriver main hostname
+# TODO b:169251326 terms below are set outside of this codebase and should
+# be updated when possible ("master" -> "main"). # nocheck
+MAINNAME = 'chromeos3-chaosvmmaster.cros.corp.google.com'  # nocheck
 WEBDRIVER_PORT = 9515
 
 
@@ -69,7 +72,10 @@
         """
 
         lock_manager = host_lock_manager.HostLockManager()
-        webdriver_master = hosts.SSHHost(MASTERNAME, user='chaosvmmaster')
+        # TODO b:169251326 terms below are set outside of this codebase and
+        # should be updated when possible ("master" -> "main"). # nocheck
+        webdriver_main = hosts.SSHHost(MAINNAME,
+                                       user='chaosvmmaster')  # nocheck
         host_prefix = self._host.hostname.split('-')[0]
         with host_lock_manager.HostsLockedBy(lock_manager):
             capture_host = utils.allocate_packet_capturer(
@@ -114,15 +120,15 @@
             # Lock VM. If on, power off; always power on. Then create a tunnel.
             webdriver_instance = utils.allocate_webdriver_instance(lock_manager)
 
-            if utils.is_VM_running(webdriver_master, webdriver_instance):
+            if utils.is_VM_running(webdriver_main, webdriver_instance):
                 logging.info('VM %s was on; powering off for a clean instance',
                              webdriver_instance)
-                utils.power_off_VM(webdriver_master, webdriver_instance)
+                utils.power_off_VM(webdriver_main, webdriver_instance)
                 logging.info('Allow VM time to gracefully shut down')
                 time.sleep(5)
 
             logging.info('Starting up VM %s', webdriver_instance)
-            utils.power_on_VM(webdriver_master, webdriver_instance)
+            utils.power_on_VM(webdriver_main, webdriver_instance)
             logging.info('Allow VM time to power on before creating a tunnel.')
             time.sleep(30)
 
@@ -211,10 +217,10 @@
                                            capture_host, {},'packet_capturer')
                             continue
                         if networks == list():
-                           # Packet capturer did not find the SSID in scan or
-                           # there was a security mismatch.
-                           utils.release_ap(ap, batch_locker, self._broken_pdus)
-                           continue
+                            # Packet capturer did not find the SSID in scan or
+                            # there was a security mismatch.
+                            utils.release_ap(ap, batch_locker, self._broken_pdus)
+                            continue
 
                         assoc_params = ap.get_association_parameters()
 
@@ -272,7 +278,7 @@
                 webdriver_instance.close()
             capturer.close()
             logging.info('Powering off VM %s', webdriver_instance)
-            utils.power_off_VM(webdriver_master, webdriver_instance)
+            utils.power_off_VM(webdriver_main, webdriver_instance)
             lock_manager.unlock(webdriver_instance.hostname)
 
             if self._broken_pdus:
diff --git a/server/cros/chrome_sideloader.py b/server/cros/chrome_sideloader.py
new file mode 100644
index 0000000..cc25f8e
--- /dev/null
+++ b/server/cros/chrome_sideloader.py
@@ -0,0 +1,278 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import base64
+import logging
+import os
+import json
+import random
+import stat
+import string
+
+# Shell command to force unmount a mount point if it is mounted
+FORCED_UMOUNT_DIR_IF_MOUNTPOINT_CMD = (
+        'if mountpoint -q %(dir)s; then umount -l %(dir)s; fi')
+# Shell command to set exec and suid flags
+SET_MOUNT_FLAGS_CMD = 'mount -o remount,exec,suid %s'
+# Shell command to send SIGHUP to dbus daemon
+DBUS_RELOAD_COMMAND = 'killall -HUP dbus-daemon'
+
+
+def extract_from_image(host, image_name, dest_dir):
+    """
+    Extract contents of an image to a directory.
+
+    @param host: The DUT to execute the command on
+    @param image_name: Name of image
+    @param dest_dir: directory where contents of image will be placed.
+
+    """
+
+    if not host.path_exists('/var/lib/imageloader/%s' % image_name):
+        raise Exception('Image %s not found on host %s' % (image_name, host))
+
+    def gen_random_str(length):
+        """
+        Generate random string
+
+        @param length: Length of the string
+
+        @return random string of specified length
+
+        """
+        return ''.join(
+                [random.choice(string.hexdigits) for _ in range(length)])
+
+    image_mount_point = '/tmp/image_%s' % gen_random_str(8)
+
+    # Create directories from scratch
+    host.run(['rm', '-rf', dest_dir])
+    host.run(['mkdir', '-p', '--mode', '0755', dest_dir, image_mount_point])
+
+    try:
+        # Mount image and copy content to the destination directory
+        host.run([
+                'imageloader', '--mount',
+                '--mount_component=%s' % image_name,
+                '--mount_point=%s' % image_mount_point
+        ])
+
+        host.run(['cp', '-r', '%s/*' % image_mount_point, '%s/' % dest_dir])
+    except Exception as e:
+        raise Exception(
+                'Error extracting content from image %s on host %s ' %
+                (image_name, host), e)
+    finally:
+        # Unmount image and remove the temporary directory
+        host.run([
+                'imageloader', '--unmount',
+                '--mount_point=%s' % image_mount_point
+        ])
+        host.run(['rm', '-rf', image_mount_point])
+
+
+def _stop_chrome_if_necessary(host):
+    """
+    Stop chrome if it is running.
+
+    @param host: The DUT to execute the command on
+
+    @return True if chrome was stopped. False otherwise.
+
+    """
+    status = host.run_output('status ui')
+    if 'start' in status:
+        return host.run('stop ui', ignore_status=True).exit_status == 0
+
+    return False
+
+
+def _mount_chrome(host, chrome_dir, chrome_mount_point):
+    """
+    Mount chrome to a mount point
+
+    @param host: The DUT to execute the command on
+    @param chrome_dir: directory where the chrome binary and artifacts
+                       will be placed.
+    @param chrome_mount_point: Chrome mount point
+
+    """
+    chrome_stopped = _stop_chrome_if_necessary(host)
+    _umount_chrome(host, chrome_mount_point)
+
+    # Mount chrome to the desired chrome directory
+    # Upon restart, this version of chrome will be used instead.
+    host.run(['mount', '--rbind', chrome_dir, chrome_mount_point])
+
+    # Chrome needs partition to have exec and suid flags set
+    host.run(SET_MOUNT_FLAGS_CMD % chrome_mount_point)
+
+    # Send SIGHUP to dbus-daemon to tell it to reload its configs. This won't
+    # pick up major changes (bus type, logging, etc.), but all we care about is
+    # getting the latest policy from /opt/google/chrome/dbus so that Chrome will
+    # be authorized to take ownership of its service names.
+    host.run(DBUS_RELOAD_COMMAND, ignore_status=True)
+
+    if chrome_stopped:
+        host.run('start ui', ignore_status=True)
+
+
+def _umount_chrome(host, chrome_mount_point):
+    """
+    Unmount chrome
+
+    @param host: The DUT to execute the command on
+    @param chrome_mount_point: Chrome mount point
+
+    """
+    chrome_stopped = _stop_chrome_if_necessary(host)
+    # Unmount chrome. Upon restart, the default version of chrome
+    # under the root partition will be used.
+    try:
+        host.run(FORCED_UMOUNT_DIR_IF_MOUNTPOINT_CMD %
+                 {'dir': chrome_mount_point})
+    except Exception as e:
+        raise Exception('Exception during cleanup on host %s' % host, e)
+
+    if chrome_stopped:
+        host.run('start ui', ignore_status=True)
+
+
+def setup_host(host, chrome_dir, chrome_mount_point):
+    """
+    Perform setup on host.
+
+    Mount chrome to point to the version provisioned by TLS.
+    The provisioning mechanism of chrome from the chrome builder is
+    based on Lacros Tast Test on Skylab (go/lacros-tast-on-skylab).
+
+    The lacros image provisioned by TLS contains the chrome binary
+    and artifacts.
+
+    @param host: The DUT to execute the command on
+    @param chrome_dir: directory where the chrome binary and artifacts
+                       will be placed.
+    @param chrome_mount_point: Chrome mount point
+
+    """
+    logging.info("Setting up host:%s", host)
+    try:
+        extract_from_image(host, 'lacros', chrome_dir)
+        if chrome_mount_point:
+            _mount_chrome(host, '%s/out/Release' % chrome_dir,
+                          chrome_mount_point)
+    except Exception as e:
+        raise Exception(
+                'Exception while mounting %s on host %s' %
+                (chrome_mount_point, host), e)
+
+
+def cleanup_host(host, chrome_dir, chrome_mount_point):
+    """
+    Umount chrome and perform cleanup.
+
+    @param host: The DUT to execute the command on
+    @param chrome_dir: directory where the chrome binary and artifacts
+                       is placed.
+    @param chrome_mount_point: Chrome mount point
+
+    """
+    logging.info("Unmounting chrome on host: %s", host)
+    try:
+        if chrome_mount_point:
+            _umount_chrome(host, chrome_mount_point)
+        host.run(['rm', '-rf', chrome_dir])
+    except Exception as e:
+        raise Exception('Exception during cleanup on host %s' % host, e)
+
+
+def get_tast_expr_from_file(host, args_dict, results_dir, base_path=None):
+    """
+    Get Tast expression from argument dictionary using a file.
+    If the tast_expr_file and tast_expr_key are in the dictionary returns the
+    tast expression from the file. If either/both args are not in the dict,
+    None is returned.
+    tast_expr_file expects a file containing a json dictionary which it will
+    then use tast_expr_key to pull the tast_expr.
+
+    The tast_expr_file is a json file containing a dictionary of names to tast
+    expressions like:
+
+    {
+    "default": "(\"group:mainline\" && \"dep:lacros\" && !informational)",
+    "tast_disabled_tests_from_lacros_example": "(\"group:mainline\" && \"dep:lacros\" && !informational && !\"name:lacros.Basic\")"
+    }
+
+    @param host: Host having the provisioned lacros image with the file
+    @param args_dict: Argument dictionary
+    @param results_dir: Where to store the tast_expr_file from the dut
+    @param base_path: Base path of the provisioned folder
+
+    """
+    tast_expr_file_name = args_dict.get('tast_expr_file')
+    tast_expr_key = args_dict.get('tast_expr_key')
+    if tast_expr_file_name and tast_expr_key:
+        if base_path:
+            tast_expr_file_name = os.path.join(base_path, tast_expr_file_name)
+
+        # Get the tast expr file from the provisioned lacros folder
+        if not host.path_exists(tast_expr_file_name):
+            raise Exception(
+                    'tast_expr_file: %s could not be found on the dut' %
+                    tast_expr_file_name)
+        local_file_name = os.path.join(results_dir,
+                                       os.path.basename(tast_expr_file_name))
+        st = os.stat(results_dir)
+        os.chmod(results_dir, st.st_mode | stat.S_IWRITE)
+        host.get_file(tast_expr_file_name, local_file_name, delete_dest=True)
+
+        with open(local_file_name) as tast_expr_file:
+            expr_dict = json.load(tast_expr_file)
+            expr = expr_dict.get(tast_expr_key)
+            # If both args were provided, the entry is expected in the file
+            if not expr:
+                raise Exception('tast_expr_key: %s could not be found' %
+                                tast_expr_key)
+            logging.info("tast_expr retreived from:%s", tast_expr_file)
+            return expr
+    return None
+
+
+def get_tast_expr(args_dict):
+    """
+    Get Tast expression from argument dictionary.
+    Users have options of using tast_expr or tast_expr_b64 in dictionary.
+    tast_expr_b64 expects a base64 encoded tast_expr, for instance:
+      tast_expr = '("group:mainline" && "dep:lacros")'
+      tast_expr_b64 = base64.b64encode(s.encode('utf-8')).decode('ascii')
+
+    @param args_dict: Argument dictionary
+
+    """
+    expr = args_dict.get('tast_expr')
+    if expr:
+        return expr
+
+    expr_b64 = args_dict.get('tast_expr_b64')
+    if expr_b64:
+        try:
+            expr = base64.b64decode(expr_b64).decode()
+            return expr
+        except Exception as e:
+            raise Exception('Failed to decode tast_expr_b64: %s' %
+                            expr_b64) from e
+
+    raise Exception(
+            '''Tast expression is unspecified: set tast_expr or tast_expr_b64 in --args.\n'''
+            '''  Example: test_that --args="tast_expr=lacros.Basic"\n'''
+            '''  If the expression contains spaces, consider transforming it to\n'''
+            '''  base64 and passing it via tast_expr_b64 flag.\n'''
+            '''  Example:\n'''
+            '''    In Python:\n'''
+            '''      tast_expr = '("group:mainline" && "dep:lacros")'\n'''
+            '''      # Yields 'KCJncm91cDptYWlubGluZSIgJiYgImRlcDpsYWNyb3MiKQ=='\n'''
+            '''      tast_expr_b64 = base64.b64encode(s.encode('utf-8')).decode('ascii')\n'''
+            '''    Then in Autotest CLI:\n'''
+            '''      test_that --args="tast_expr_b64=KCJncm91cDptYWlubGluZSIgJiYgImRlcDpsYWNyb3MiKQ=="\n'''
+            '''  More details at go/lacros-on-skylab.''')
diff --git a/server/cros/chrome_sideloader_unittest.py b/server/cros/chrome_sideloader_unittest.py
new file mode 100644
index 0000000..608f32c
--- /dev/null
+++ b/server/cros/chrome_sideloader_unittest.py
@@ -0,0 +1,67 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import base64
+import unittest
+from unittest import mock
+
+from autotest_lib.server.cros import chrome_sideloader
+
+
+class TestGetTastExpr(unittest.TestCase):
+    """Test GetTastExpr in ChromeSideloader library."""
+
+    class MockHost:
+        def path_exists(self, path):
+            return True
+
+        def get_file(self, src, dst, delete_dest):
+            pass
+
+    def testTastExpr(self):
+        tast_expr = "lacros.Basic"
+        args_dict = {'tast_expr': tast_expr}
+        self.assertEqual(chrome_sideloader.get_tast_expr(args_dict), tast_expr)
+
+    def testEmptyArgsDict(self):
+        args_dict = {}
+        with self.assertRaises(Exception):
+            chrome_sideloader.get_tast_expr(args_dict)
+
+    def testTastExprB64(self):
+        tast_expr = '''("group:mainline" && !informational)'''
+        tast_expr_b64 = _base64_encode_str(tast_expr)
+        args_dict = {'tast_expr_b64': tast_expr_b64}
+        self.assertEqual(chrome_sideloader.get_tast_expr(args_dict), tast_expr)
+
+    def testTastExprB64Corrupted(self):
+        tast_expr = '''("group:mainline" && !informational)'''
+        tast_expr_b64 = _base64_encode_str(tast_expr)
+        # remove last character to corrupt the encoding
+        tast_expr_b64 = tast_expr_b64[:-1]
+        args_dict = {'tast_expr_b64': tast_expr_b64}
+        with self.assertRaises(Exception):
+            chrome_sideloader.get_tast_expr(args_dict)
+
+    def testTastFileWithKey(self):
+        read_data = '{"default": "(\\"group:mainline\\" && !informational)"}'
+        file_mock = mock.mock_open(read_data=read_data)
+        args_dict = {
+                'tast_expr_file': 'mocked_file',
+                'tast_expr_key': 'default'
+        }
+        with mock.patch('builtins.open', file_mock),\
+            mock.patch('os.stat'),\
+            mock.patch('os.chmod'):
+            expr = chrome_sideloader.get_tast_expr_from_file(
+                    TestGetTastExpr.MockHost(), args_dict, 'mock/path/')
+            self.assertEqual('("group:mainline" && !informational)', expr)
+
+
+def _base64_encode_str(s):
+    return base64.b64encode(s.encode('utf-8')).decode('ascii')
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/server/cros/clique_lib/clique_dut_control.py b/server/cros/clique_lib/clique_dut_control.py
index bf5ef9f..9e04c78 100644
--- a/server/cros/clique_lib/clique_dut_control.py
+++ b/server/cros/clique_lib/clique_dut_control.py
@@ -47,12 +47,12 @@
 """
 
 
-# Dummy result error reason to be used when exception is encountered in a role.
+# Stub result error reason to be used when exception is encountered in a role.
 ROLE_SETUP_EXCEPTION = "Role Setup Exception! "
 ROLE_EXECUTE_EXCEPTION = "Role Execute Exception! "
 ROLE_CLEANUP_EXCEPTION = "Role Teardown Exception! "
 
-# Dummy result error reason to be used when exception is encountered in a role.
+# Stub result error reason to be used when exception is encountered in a role.
 POOL_SETUP_EXCEPTION = "Pool Setup Exception! "
 POOL_CLEANUP_EXCEPTION = "Pool Teardown Exception! "
 
diff --git a/server/cros/crosperf/device_setup_utils.py b/server/cros/crosperf/device_setup_utils.py
index 2f46c2a..a1003e3 100755
--- a/server/cros/crosperf/device_setup_utils.py
+++ b/server/cros/crosperf/device_setup_utils.py
@@ -41,8 +41,8 @@
                    'returned %d\n'
                    'Error message: %s' % (dut.hostname, command, ret, err_msg))
         if ignore_status:
-            logging.warning(err_msg +
-                            '\n(Failure is considered non-fatal. Continue.)')
+            logging.warning(err_msg)
+            logging.warning('Failure is considered non-fatal. Continue.')
         else:
             logging.error(err_msg)
 
@@ -273,8 +273,8 @@
     # If cpu0 not in the online list and it exists in all online CPUs
     # add it to the online list.
     if 0 not in cpu_online and '0' in all_online_str:
-      # Add core0 to online cores.
-      cpu_online[0] = 1
+        # Add core0 to online cores.
+        cpu_online[0] = 1
     # At least one CPU has to be online.
     assert cpu_online
 
@@ -357,9 +357,34 @@
     waittime = 0
     timeout_in_sec = int(cooldown_time) * 60
     # Temperature from sensors come in uCelsius units.
-    temp_in_ucels = int(cooldown_temp) * 1000
+    cooldown_temp_in_ucels = int(cooldown_temp) * 1000
     sleep_interval = 30
 
+    _, all_thermal_sensors, _ = run_command_on_dut(
+            dut, 'ls /sys/class/thermal/thermal_zone*/temp')
+    _, all_thermal_sensor_types, _ = run_command_on_dut(
+            dut, 'cat /sys/class/thermal/thermal_zone*/type')
+    all_thermal_sensors = all_thermal_sensors.split('\n')
+    all_thermal_sensor_types = all_thermal_sensor_types.split('\n')
+    assert len(all_thermal_sensors) == len(all_thermal_sensor_types), (
+            'Number of sensors must match the number of types '
+            'read from /sys/class/thermal/thermal_zone*/type.')
+
+    monitor_thermal_sensors = []
+    # Filter in only the relevant thermal sensors.
+    filter_in_thermal_sensors = ('cpu', 'gpu', 'soc', 'pkg', 'core')
+    for sensor_path, sensor_type in zip(all_thermal_sensors,
+                                        all_thermal_sensor_types):
+        if any(
+                sensor_type.startswith(cpu_type)
+                for cpu_type in filter_in_thermal_sensors):
+            # Monitor only the sensors which names start from
+            # cpu/gpu/soc/pkg/core.
+            # We don't want irrelant sensors to slow down performance testing.
+            monitor_thermal_sensors.append(sensor_path)
+            logging.info('Monitor thermal sensor %s of type %s', sensor_path,
+                         sensor_type)
+
     # Wait until any of two events occurs:
     # 1. CPU cools down to a specified temperature.
     # 2. Timeout cooldown_time expires.
@@ -371,20 +396,29 @@
     # "high" should be calculated based on empirical data per platform.
     # Based on such reports we can adjust CPU configuration or
     # cooldown limits accordingly.
-    while waittime < timeout_in_sec:
-        _, temp_output, _ = run_command_on_dut(
-            dut,
-            'cat /sys/class/thermal/thermal_zone*/temp',
-            ignore_status=True)
-        if any(int(temp) > temp_in_ucels for temp in temp_output.split()):
+    for sensor in monitor_thermal_sensors:
+        while waittime < timeout_in_sec:
+            err, sensor_value_str, _ = run_command_on_dut(dut,
+                                                          'cat ' + sensor,
+                                                          ignore_status=True)
+            # If
+            # - sensor is not readable or
+            # - sensor temperature is below threshold
+            # stop monitoring the sensor and move to the next one.
+            if err:
+                logging.warning(
+                        'Sensor %s is removed from monitoring due '
+                        'to the read error %d', sensor, err)
+                break
+            elif int(sensor_value_str) <= cooldown_temp_in_ucels:
+                break
+
+            logging.debug(
+                    '%s=%s is above threshold %d mC.\n'
+                    'Wait %ds and check again.', sensor, sensor_value_str,
+                    cooldown_temp_in_ucels, sleep_interval)
             time.sleep(sleep_interval)
             waittime += sleep_interval
-        else:
-            # Exit the loop when:
-            # 1. Reported temp numbers from all thermal sensors do not exceed
-            # 'cooldown_temp' or
-            # 2. No data from the sensors.
-            break
 
     logging.info('Cooldown wait time: %.1f min', (waittime / 60))
     return waittime
diff --git a/server/cros/crosperf/device_setup_utils_unittest.py b/server/cros/crosperf/device_setup_utils_unittest.py
index 2941e22..4920f96 100755
--- a/server/cros/crosperf/device_setup_utils_unittest.py
+++ b/server/cros/crosperf/device_setup_utils_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # -*- coding: utf-8 -*-
 #
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
@@ -14,7 +14,7 @@
 import logging
 import time
 import unittest
-import mock
+from unittest import mock
 
 import common
 from autotest_lib.server import hosts
@@ -151,11 +151,13 @@
         self.dut.run.assert_called_once_with(
                 'run command;', ignore_status=True)
         # Error status causes log fatal.
-        logging.warning.assert_called_once_with(
-                'Command execution on DUT lumpy.cros2 failed.\n'
-                'Failing command: run command;\nreturned 1\n'
-                'Error message: Error!\n'
-                '(Failure is considered non-fatal. Continue.)')
+        calls = [
+                mock.call('Command execution on DUT lumpy.cros2 failed.\n'
+                          'Failing command: run command;\nreturned 1\n'
+                          'Error message: Error!'),
+                mock.call('Failure is considered non-fatal. Continue.'),
+        ]
+        logging.warning.assert_has_calls(calls)
 
 
     @mock.patch.object(device_setup_utils, 'run_command_on_dut')
@@ -299,15 +301,15 @@
         """Test that not exposed cpu0/online will still be in the list."""
 
         def run_command(dut, cmd):
-          """Helper function."""
-          if '/sys/devices/system/cpu/cpu' in cmd:
-              # Cpu0 online is not exposed.
-              return (0, '/sys/devices/system/cpu/cpu1/online 1\n', '')
-          elif '/sys/devices/system/cpu/online' in cmd:
-              # All online cores shows cpu0.
-              return (0, '0-1', '')
-          else:
-              return (1, '', '')
+            """Helper function."""
+            if '/sys/devices/system/cpu/cpu' in cmd:
+                # Cpu0 online is not exposed.
+                return (0, '/sys/devices/system/cpu/cpu1/online 1\n', '')
+            elif '/sys/devices/system/cpu/online' in cmd:
+                # All online cores shows cpu0.
+                return (0, '0-1', '')
+            else:
+                return (1, '', '')
 
         mock_run_command.side_effect = run_command
         cpu_online = device_setup_utils.get_cpu_online(self.dut)
@@ -595,15 +597,23 @@
     @mock.patch.object(device_setup_utils, 'run_command_on_dut')
     @mock.patch.object(time, 'sleep')
     def test_wait_cooldown_nowait(self, mock_sleep, mock_run_command):
+        """
+        No cooldown wait.
+
+        Don't wait when the temperature in uC does not exceed 40C.
+        """
         mock_sleep.return_value = 0
-        mock_run_command.return_value = (0, '39000', '')
+        mock_run_command.side_effect = [
+                (0, '/sys/class/thermal/thermal_zone0/temp', ''),
+                (0, 'cpu', ''),
+                (0, '39000', ''),
+        ]
         cooldown_time = 10
         cooldown_temp = 40
         wait_time = device_setup_utils.wait_cooldown(self.dut, cooldown_time,
                                                      cooldown_temp)
-        # Send command to DUT only once to check temperature
-        # and make sure it does not exceed the threshold.
-        mock_run_command.assert_called_once()
+        mock_run_command.assert_called()
+        # Expect no wait time.
         mock_sleep.assert_not_called()
         self.assertEqual(wait_time, 0)
 
@@ -614,27 +624,57 @@
         """
         Wait one iteration for cooldown.
 
-        Set large enough timeout and changing temperature
-        output. Make sure it exits when expected value
-        received.
-        Expect that WaitCooldown check temp twice.
-
+        Set large enough timeout and changing temperature output.
+        Make sure it exits when expected value received.
         """
         mock_sleep.return_value = 0
-        mock_run_command.side_effect = [(0, '41000', ''), (0, '39999', '')]
+        mock_run_command.side_effect = [
+                (0, '/sys/class/thermal/thermal_zone0/temp', ''),
+                (0, 'cpu', ''),
+                (0, '41000', ''),
+                (0, '39000', ''),
+        ]
         cooldown_time = 100
         cooldown_temp = 40
         wait_time = device_setup_utils.wait_cooldown(self.dut, cooldown_time,
                                                      cooldown_temp)
         mock_run_command.assert_called()
-        self.assertEqual(mock_run_command.call_count, 2)
+        # Wait time is non-zero.
         mock_sleep.assert_called()
         self.assertGreater(wait_time, 0)
 
 
     @mock.patch.object(device_setup_utils, 'run_command_on_dut')
     @mock.patch.object(time, 'sleep')
-    def test_wait_cooldown_needwait(self, mock_sleep, mock_run_command):
+    def test_wait_cooldown_space_in_thermal_name(self, mock_sleep,
+                                                 mock_run_command):
+        """
+        Wait one iteration for cooldown.
+
+        Make sure the cooldown is working properly when there is a space
+        in the sensor type name.
+        """
+        mock_sleep.return_value = 0
+        mock_run_command.side_effect = [
+                (0, '/sys/class/thermal/thermal_zone0/temp\n'
+                 '/sys/class/thermal/thermal_zone1/temp', ''),
+                (0, 'cpu\ngpu thermal', ''),
+                (0, '39000', ''),
+                (0, '41000', ''),
+                (0, '38000', ''),
+        ]
+        cooldown_time = 10
+        cooldown_temp = 40
+        wait_time = device_setup_utils.wait_cooldown(self.dut, cooldown_time,
+                                                     cooldown_temp)
+        mock_run_command.assert_called()
+        # Expect no wait time.
+        mock_sleep.assert_called_once()
+        self.assertGreater(wait_time, 0)
+
+    @mock.patch.object(device_setup_utils, 'run_command_on_dut')
+    @mock.patch.object(time, 'sleep')
+    def test_wait_cooldown_wait_timeout(self, mock_sleep, mock_run_command):
         """
         Test exit by timeout.
 
@@ -643,16 +683,26 @@
         Output from temperature sensor never changes.
 
         """
+
+        def constant_temp(temp):
+            """Helper function returns gradually decreasing temperature."""
+            yield (0, '/sys/class/thermal/thermal_zone0/temp', '')
+            yield (0, 'cpu', '')
+            while True:
+                yield (0, str(temp), '')
+
         mock_sleep.return_value = 0
-        mock_run_command.return_value = (0, '41000', '')
-        cooldown_time = 60
+        # Set the temperature higher than a default threshold 40k.
+        mock_run_command.side_effect = constant_temp(41000)
+        # Cooldown time - 5 minutes.
+        cooldown_time = 5
         cooldown_temp = 40
         wait_time = device_setup_utils.wait_cooldown(self.dut, cooldown_time,
                                                      cooldown_temp)
         mock_run_command.assert_called()
-        self.assertGreater(mock_run_command.call_count, 2)
         mock_sleep.assert_called()
-        self.assertGreater(wait_time, 0)
+        # Convert cooldown_time to seconds.
+        self.assertEqual(wait_time, cooldown_time * 60)
 
 
     @mock.patch.object(device_setup_utils, 'run_command_on_dut')
@@ -665,108 +715,103 @@
         Set large enough timeout and changing temperature
         output. Make sure it exits when expected value
         for all temperatures received.
-        Expect 3 checks.
-
         """
         mock_sleep.return_value = 0
         mock_run_command.side_effect = [
-                (0, '41000\n20000\n30000\n45000', ''),
-                (0, '39000\n20000\n30000\n41000', ''),
-                (0, '39000\n20000\n30000\n31000', ''),
+                (0, '/sys/class/thermal/thermal_zone0/temp\n'
+                 '/sys/class/thermal/thermal_zone1/temp\n'
+                 '/sys/class/thermal/thermal_zone2/temp', ''),
+                (0, 'cpu0\ncpu1\ngpu', ''),
+                # Iteration 1 of monitoring.
+                (0, '45000', ''),
+                (0, '41000', ''),
+                (0, '20000', ''),
+                # Iteration 2 of monitoring.
+                (0, '42000', ''),
+                (0, '39000', ''),
+                # Iteration 3 of monitoring.
+                (0, '38000', ''),
+                # Monitoring ends.
         ]
         cooldown_time = 100
         cooldown_temp = 40
         wait_time = device_setup_utils.wait_cooldown(self.dut, cooldown_time,
                                                      cooldown_temp)
         mock_run_command.assert_called()
-        self.assertEqual(mock_run_command.call_count, 3)
+        # Wait time is non-zero.
         mock_sleep.assert_called()
         self.assertGreater(wait_time, 0)
 
 
     @mock.patch.object(device_setup_utils, 'run_command_on_dut')
     @mock.patch.object(time, 'sleep')
+    def test_wait_cooldown_ignore_irrelevant_sensor(self, mock_sleep,
+                                                    mock_run_command):
+        """
+        Ignore non cpu/gpu sensors.
+
+        Set large temperature of a non-cpu sensor.
+        Make sure we don't wait if cpu temperature is low
+        regardless of other reports.
+        """
+        mock_sleep.return_value = 0
+        mock_run_command.side_effect = [
+                (0, '/sys/class/thermal/thermal_zone0/temp\n'
+                 '/sys/class/thermal/thermal_zone1/temp', ''),
+                (0, 'cpu0\ncharger-sensor', ''),
+                # Iteration 1 of monitoring, check only cpu0.
+                # cpu0
+                (0, '39000', ''),
+                # Monitoring should stop at this point since the other
+                # sensor is irrelevant.
+                # If it doesn't, the test will fail since the function
+                # will continue monitoring until 50C drops but there is
+                # no more input.
+                (0, '50000', ''),
+        ]
+        cooldown_time = 100
+        cooldown_temp = 40
+        wait_time = device_setup_utils.wait_cooldown(self.dut, cooldown_time,
+                                                     cooldown_temp)
+        mock_run_command.assert_called()
+        # Wait time is zero.
+        mock_sleep.assert_not_called()
+        self.assertEqual(wait_time, 0)
+
+
+    @mock.patch.object(device_setup_utils, 'run_command_on_dut')
+    @mock.patch.object(time, 'sleep')
     def test_wait_cooldown_thermal_error(self, mock_sleep, mock_run_command):
         """
-        Handle error status.
+        Handle error status gracefully.
 
-        Any error should be considered non-fatal.
-
+        Sensor with an error is excluded from temperature monitoring.
+        But wait_cooldown still waits.
         """
         mock_sleep.return_value = 0
+        # Error status and output with a high temperature.
         mock_run_command.side_effect = [
-                (1, '39000\n20000\n30000\n41000', 'Thermal error'),
-                (1, '39000\n20000\n30000\n31000', 'Thermal error'),
+                (0, '/sys/class/thermal/thermal_zone0/temp\n'
+                 '/sys/class/thermal/thermal_zone1/temp', ''),
+                (0, 'cpu0\ncpu1', ''),
+                # Iteration 1 of monitoring.
+                # cpu0
+                (1, '', 'Thernal error'),
+                # Iteration 2 of monitoring.
+                # cpu1
+                (0, '45000', ''),
+                (0, '39000', ''),
         ]
         cooldown_time = 10
         cooldown_temp = 40
         wait_time = device_setup_utils.wait_cooldown(self.dut, cooldown_time,
                                                      cooldown_temp)
-        # Check that errors are ignored.
-        mock_run_command.assert_called_with(
-                self.dut,
-                'cat /sys/class/thermal/thermal_zone*/temp',
-                ignore_status=True)
-        self.assertEqual(mock_run_command.call_count, 2)
-        # Check that we are waiting even when an error is returned
-        # as soon as data is coming.
+        # Wait time is greater than 0.
         mock_sleep.assert_called()
         self.assertGreater(wait_time, 0)
 
 
     @mock.patch.object(device_setup_utils, 'run_command_on_dut')
-    @mock.patch.object(time, 'sleep')
-    def test_wait_cooldown_thermal_no_output(self, mock_sleep,
-                                             mock_run_command):
-        """
-        Handle no output.
-
-        Check handling of empty stdout.
-
-        """
-        mock_sleep.return_value = 0
-        mock_run_command.side_effect = [(1, '', 'Thermal error')]
-        cooldown_time = 10
-        cooldown_temp = 40
-        wait_time = device_setup_utils.wait_cooldown(self.dut, cooldown_time,
-                                                     cooldown_temp)
-        # Check that errors are ignored.
-        mock_run_command.assert_called_once_with(
-                self.dut,
-                'cat /sys/class/thermal/thermal_zone*/temp',
-                ignore_status=True)
-        # No wait.
-        mock_sleep.assert_not_called()
-        self.assertEqual(wait_time, 0)
-
-
-    @mock.patch.object(device_setup_utils, 'run_command_on_dut')
-    @mock.patch.object(time, 'sleep')
-    def test_wait_cooldown_thermal_ws_output(self, mock_sleep,
-                                             mock_run_command):
-        """
-        Handle whitespace output.
-
-        Check handling of whitespace only.
-
-        """
-        mock_sleep.return_value = 0
-        mock_run_command.side_effect = [(1, '\n', 'Thermal error')]
-        cooldown_time = 10
-        cooldown_temp = 40
-        wait_time = device_setup_utils.wait_cooldown(self.dut, cooldown_time,
-                                                     cooldown_temp)
-        # Check that errors are ignored.
-        mock_run_command.assert_called_once_with(
-                self.dut,
-                'cat /sys/class/thermal/thermal_zone*/temp',
-                ignore_status=True)
-        # No wait.
-        mock_sleep.assert_not_called()
-        self.assertEqual(wait_time, 0)
-
-
-    @mock.patch.object(device_setup_utils, 'run_command_on_dut')
     def test_stop_ui(self, mock_run_command):
         mock_run_command.return_value = (0, '', '')
         device_setup_utils.stop_ui(self.dut)
diff --git a/server/cros/packet_generation/__init__.py b/server/cros/crossdevice/__init__.py
similarity index 100%
copy from server/cros/packet_generation/__init__.py
copy to server/cros/crossdevice/__init__.py
diff --git a/server/cros/crossdevice/cross_device_util.py b/server/cros/crossdevice/cross_device_util.py
new file mode 100644
index 0000000..82aa4ba
--- /dev/null
+++ b/server/cros/crossdevice/cross_device_util.py
@@ -0,0 +1,30 @@
+# Lint as: python2, python3
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from autotest_lib.client.common_lib import error
+
+
+def connect_to_wifi(host, ssid, password):
+    """
+    Performs steps needed to configure a CrOS device for Cross Device tests.
+
+    @param host: Host to run the command on.
+    @param ssid: SSID of the Wifi network to connect to
+    @param password: password to connect to wifi network
+
+    """
+    host.run(
+            'dbus-send --system --print-reply --dest=org.chromium.flimflam / org.chromium.flimflam.Manager.EnableTechnology string:wifi'
+    )
+    try:
+        host.run('/usr/local/autotest/cros/scripts/wifi connect %s %s' %
+                 (ssid, password))
+    except error.AutoservRunError as e:
+        if 'already connected' in str(e):
+            logging.debug('Already connected to network. Ignoring error.')
+        else:
+            raise
diff --git a/server/cros/device_health_profile/device_health_profile.py b/server/cros/device_health_profile/device_health_profile.py
index 378ea71..96f6a71 100644
--- a/server/cros/device_health_profile/device_health_profile.py
+++ b/server/cros/device_health_profile/device_health_profile.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -8,6 +8,9 @@
 import json
 import time
 import logging
+import shutil
+
+import common
 
 from autotest_lib.server.cros.device_health_profile.profile_constants import *
 
@@ -44,13 +47,17 @@
         # the profile is located on servo-host as temporally location.
         # The servo-host will be provided later
         self._profile_host = None
+        # The flag will be set when we set _profile_host.
+        # For servod container setups we keep device prfile on drone instead
+        # the servo-host.
+        self._is_containerized_servod = False
         self._health_profile = None
 
         # Construct remote and local file path.
-        profile_filename = self._hostname + '.profile'
-        self._remote_path = os.path.join(PROFILE_FILE_DIR, profile_filename)
+        self._filename = self._hostname + '.profile'
+        self._remote_path = os.path.join(PROFILE_FILE_DIR, self._filename)
         result_dir = result_dir or '/tmp'
-        self._local_path = os.path.join(result_dir, profile_filename)
+        self._local_path = os.path.join(result_dir, self._filename)
 
     def init_profile(self, profile_host):
         """Initialize device health profile data.
@@ -65,18 +72,26 @@
         if not profile_host:
             raise DeviceHealthProfileError('The profile host is not provided.')
         self._profile_host = profile_host
-        # Do a lightweighted check to make sure the machine is up
-        # (by ping), as we don't waste time on unreachable DUT.
-        if not self._profile_host.check_cached_up_status():
-            raise DeviceHealthProfileError(
-                'The profile host %s is not reachable via ping.'
-                % self._profile_host.hostname)
+        # When we work with containeried servod we do not have access to the
+        # remote host and keep profiles in local volume on the drone.
+        if self._profile_host.is_containerized_servod():
+            self._is_containerized_servod = True
+            # Set path to volume on the drone where we keep all profiles.
+            self._remote_path = os.path.join(PROFILE_DIR_CONTAINER,
+                                             self._filename)
+        else:
+            # Do a lightweighted check to make sure the machine is up
+            # (by ping), as we don't waste time on unreachable DUT.
+            if not self._profile_host.check_cached_up_status():
+                raise DeviceHealthProfileError(
+                        'The profile host %s is not reachable via ping.' %
+                        self._profile_host.hostname)
 
-        # We also want try to check if the DUT is available for ssh.
-        if not self._profile_host.is_up():
-            raise DeviceHealthProfileError(
-                'The profile host %s is pingable but not sshable.'
-                % self._profile_host.hostname)
+            # We also want try to check if the DUT is available for ssh.
+            if not self._profile_host.is_up():
+                raise DeviceHealthProfileError(
+                        'The profile host %s is pingable but not sshable.' %
+                        self._profile_host.hostname)
 
         if not self._sync_existing_profile():
             self._create_profile_from_template()
@@ -91,11 +106,15 @@
 
         @returns True if sync and validate succeed otherwise False.
         """
-        if not self._profile_host.is_file_exists(self._remote_path):
-            logging.debug('%s not exists on %s.', self._remote_path,
-                          self._profile_host.hostname)
-            return False
-        self._download_profile()
+        if self._is_containerized_servod:
+            self._copy_from_local()
+        else:
+            if not self._profile_host.is_file_exists(self._remote_path):
+                logging.debug('%s not exists on %s.', self._remote_path,
+                              self._profile_host.hostname)
+                return False
+            self._download_profile()
+
         self._read_profile()
         return self._validate_profile_data(self._health_profile)
 
@@ -123,9 +142,36 @@
         self._profile_host.send_file(source=self._local_path,
                                      dest=self._remote_path)
 
+    def _copy_from_local(self):
+        """Copy profile from local volume to result directory.
+
+        For Satlab all device profiles saved in special volume on the drone.
+        """
+        if os.path.exists(self._remote_path):
+            logging.info('Copying profile file from %s to local path: %s',
+                         self._remote_path, self._local_path)
+            shutil.copyfile(self._remote_path, self._local_path)
+        else:
+            logging.info(
+                    'Skipping copy from remote path %s as file is not exist.',
+                    self._remote_path)
+
+    def _copy_to_local(self):
+        """Copy profile file from result directory to local volume.
+
+        For Satlab all device profiles saved in special volume on the drone.
+        """
+        logging.info('Copying profile file from %s to remote path: %s',
+                     self._local_path, self._remote_path)
+        shutil.copyfile(self._local_path, self._remote_path)
+
     def _read_profile(self):
         """Read profile data from local path and convert it into json format.
         """
+        if not os.path.exists(self._local_path):
+            logging.info('Skipping reading as local file: %s is not exist.',
+                         self._local_path)
+            return
         logging.debug('Reading device health profile from: %s',
                       self._local_path)
         with open(self._local_path, 'r') as f:
@@ -499,4 +545,7 @@
         # pylint: disable=missing-docstring
         self.refresh_update_time()
         self._dump_profile()
-        self._upload_profile()
+        if self._is_containerized_servod:
+            self._copy_to_local()
+        else:
+            self._upload_profile()
diff --git a/server/cros/device_health_profile/device_health_profile_unittest.py b/server/cros/device_health_profile/device_health_profile_unittest.py
index aced19c..97eefa0 100644
--- a/server/cros/device_health_profile/device_health_profile_unittest.py
+++ b/server/cros/device_health_profile/device_health_profile_unittest.py
@@ -1,9 +1,8 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # pylint: disable=missing-docstring
 
 import time
 import unittest
-import mock
 
 import common
 from autotest_lib.server.cros.device_health_profile import device_health_profile
@@ -35,12 +34,15 @@
     def is_file_exists(self, file_path):
         return False
 
+    def is_containerized_servod(self):
+        return False
+
 
 def create_device_health_profile():
-    servohost = MockHost('dummy_servohost_hostname')
+    servohost = MockHost('placeholder_servohost_hostname')
     host_info = MockHostInfoStore()
     dhp = device_health_profile.DeviceHealthProfile(
-            hostname='dummy_dut_hostname',
+            hostname='placeholder_dut_hostname',
             host_info=host_info,
             result_dir=None)
     dhp.init_profile(servohost)
@@ -50,10 +52,15 @@
 class DeviceHealthProfileTestCase(unittest.TestCase):
     dhp = create_device_health_profile()
 
+    def _sleep(self):
+        """Sleep to create a difference in timestamp between updates."""
+        # Set 2 seconds as 1 seconds brought the flakiness of the tests.
+        time.sleep(2)
+
     def test_shows_not_loaded_till_profile_host_provided(self):
         host_info = MockHostInfoStore()
         dhp = device_health_profile.DeviceHealthProfile(
-                hostname='dummy_dut_hostname',
+                hostname='placeholder_dut_hostname',
                 host_info=host_info,
                 result_dir=None)
         self.assertFalse(dhp.is_loaded())
@@ -91,30 +98,28 @@
     def test_cros_stable_version(self):
         self.assertEqual(self.dhp.get_cros_stable_version(),
                          profile_constants.DEFAULT_STRING_VALUE)
-        self.dhp.set_cros_stable_version('dummy-release/R80-10000.0.0')
+        self.dhp.set_cros_stable_version('placeholder-release/R80-10000.0.0')
         self.assertEqual(self.dhp.get_cros_stable_version(),
-                         'dummy-release/R80-10000.0.0')
+                         'placeholder-release/R80-10000.0.0')
 
     def test_firmware_stable_version(self):
         self.assertEqual(self.dhp.get_firmware_stable_version(),
                          profile_constants.DEFAULT_STRING_VALUE)
-        self.dhp.set_firmware_stable_version('dummy_firmware_release')
+        self.dhp.set_firmware_stable_version('placeholder_firmware_release')
         self.assertEqual(self.dhp.get_firmware_stable_version(),
-                         'dummy_firmware_release')
+                         'placeholder_firmware_release')
 
     def test_last_update_time(self):
         cached_time = self.dhp.get_last_update_time()
         self.assertRegexpMatches(cached_time, r'\d{4}[-/]\d{2}[-/]\d{2}')
-        # Sleep 1 second so updated timestamp is different than current one.
-        time.sleep(1)
+        self._sleep()
         self.dhp.refresh_update_time()
         self.assertNotEqual(cached_time, self.dhp.get_last_update_time())
 
     def test_last_update_time_epoch(self):
         cached_time_epoch = self.dhp.get_last_update_time_epoch()
         self.assertEqual(type(cached_time_epoch), int)
-        # Sleep 1 second so updated timestamp is different than current one.
-        time.sleep(1)
+        self._sleep()
         self.dhp.refresh_update_time()
         self.assertGreater(self.dhp.get_last_update_time_epoch(),
                            cached_time_epoch)
@@ -122,8 +127,7 @@
     def test_enter_current_state_time(self):
         cached_time = self.dhp.get_enter_current_state_time()
         self.assertRegexpMatches(cached_time, r'\d{4}[-/]\d{2}[-/]\d{2}')
-        # Sleep 1 second so updated timestamp is different than current one.
-        time.sleep(1)
+        self._sleep()
         self.dhp.update_dut_state('test_state_2')
         self.assertNotEqual(cached_time,
                             self.dhp.get_enter_current_state_time())
@@ -131,8 +135,7 @@
     def test_enter_current_state_time_epoch(self):
         cached_time_epoch = self.dhp.get_enter_current_state_time_epoch()
         self.assertEqual(type(cached_time_epoch), int)
-        # Sleep 1 second so updated timestamp is different than current one.
-        time.sleep(1)
+        self._sleep()
         self.dhp.update_dut_state('test_state_3')
         self.assertGreater(self.dhp.get_enter_current_state_time_epoch(),
                            cached_time_epoch)
@@ -148,45 +151,43 @@
         self.assertEqual(self.dhp.get_provision_fail_count(), cached_count + 1)
 
     def test_failed_verifiers(self):
-        tag = 'dummy_verifier'
+        tag = 'placeholder_verifier'
         self.assertEqual(self.dhp.get_failed_verifiers(), {})
         self.assertEqual(self.dhp.get_failed_verifier(tag), 0)
         self.dhp.insert_failed_verifier(tag)
         self.assertEqual(self.dhp.get_failed_verifier(tag), 1)
         self.assertEqual(self.dhp.get_failed_verifiers(),
-                         {'dummy_verifier': 1})
+                         {'placeholder_verifier': 1})
 
     def test_succeed_repair_action(self):
-        tag = 'dummy_succeed_action'
+        tag = 'placeholder_succeed_action'
         self.assertEqual(self.dhp.get_succeed_repair_actions(), {})
         self.assertEqual(self.dhp.get_succeed_repair_action(tag), 0)
         self.dhp.insert_succeed_repair_action(tag)
         self.assertEqual(self.dhp.get_succeed_repair_action(tag), 1)
         self.assertEqual(self.dhp.get_succeed_repair_actions(),
-                         {'dummy_succeed_action': 1})
+                         {'placeholder_succeed_action': 1})
 
     def test_failed_repair_action(self):
-        tag = 'dummy_failed_action'
+        tag = 'placeholder_failed_action'
         self.assertEqual(self.dhp.get_failed_repair_actions(), {})
         self.assertEqual(self.dhp.get_failed_repair_action(tag), 0)
         self.dhp.insert_failed_repair_action(tag)
         self.assertEqual(self.dhp.get_failed_repair_action(tag), 1)
         self.assertEqual(self.dhp.get_failed_repair_actions(),
-                         {'dummy_failed_action': 1})
+                         {'placeholder_failed_action': 1})
 
     def test_get_badblocks_ro_run_time(self):
         cached_time = self.dhp.get_badblocks_ro_run_time()
         self.assertRegexpMatches(cached_time, r'\d{4}[-/]\d{2}[-/]\d{2}')
-        # Sleep 1 second so updated timestamp is different than current one.
-        time.sleep(1)
+        self._sleep()
         self.dhp.refresh_badblocks_ro_run_time()
         self.assertNotEqual(cached_time, self.dhp.get_badblocks_ro_run_time())
 
     def test_get_badblocks_ro_run_time_epoch(self):
         cached_time_epoch = self.dhp.get_badblocks_ro_run_time_epoch()
         self.assertEqual(type(cached_time_epoch), int)
-        # Sleep 1 second so updated timestamp is different than current one.
-        time.sleep(1)
+        self._sleep()
         self.dhp.refresh_badblocks_ro_run_time()
         self.assertGreater(self.dhp.get_badblocks_ro_run_time_epoch(),
                            cached_time_epoch)
@@ -194,16 +195,14 @@
     def test_get_badblocks_rw_run_time(self):
         cached_time = self.dhp.get_badblocks_rw_run_time()
         self.assertRegexpMatches(cached_time, r'\d{4}[-/]\d{2}[-/]\d{2}')
-        # Sleep 1 second so updated timestamp is different than current one.
-        time.sleep(1)
+        self._sleep()
         self.dhp.refresh_badblocks_rw_run_time()
         self.assertNotEqual(cached_time, self.dhp.get_badblocks_rw_run_time())
 
     def test_get_badblocks_rw_run_time_epoch(self):
         cached_time_epoch = self.dhp.get_badblocks_rw_run_time_epoch()
         self.assertEqual(type(cached_time_epoch), int)
-        # Sleep 1 second so updated timestamp is different than current one.
-        time.sleep(1)
+        self._sleep()
         self.dhp.refresh_badblocks_rw_run_time()
         self.assertGreater(self.dhp.get_badblocks_rw_run_time_epoch(),
                            cached_time_epoch)
@@ -211,8 +210,7 @@
     def test_get_servo_micro_fw_update_time(self):
         cached_time = self.dhp.get_servo_micro_fw_update_time()
         self.assertRegexpMatches(cached_time, r'\d{4}[-/]\d{2}[-/]\d{2}')
-        # Sleep 1 second so updated timestamp is different than current one.
-        time.sleep(1)
+        self._sleep()
         self.dhp.refresh_servo_miro_fw_update_run_time()
         self.assertNotEqual(cached_time,
                             self.dhp.get_servo_micro_fw_update_time())
@@ -220,8 +218,7 @@
     def test_get_servo_micro_fw_update_time_epoch(self):
         cached_time_epoch = self.dhp.get_servo_micro_fw_update_time_epoch()
         self.assertEqual(type(cached_time_epoch), int)
-        # Sleep 1 second so updated timestamp is different than current one.
-        time.sleep(1)
+        self._sleep()
         self.dhp.refresh_servo_miro_fw_update_run_time()
         self.assertGreater(self.dhp.get_servo_micro_fw_update_time_epoch(),
                            cached_time_epoch)
diff --git a/server/cros/device_health_profile/profile_constants.py b/server/cros/device_health_profile/profile_constants.py
index 0984399..f0365b3 100644
--- a/server/cros/device_health_profile/profile_constants.py
+++ b/server/cros/device_health_profile/profile_constants.py
@@ -1,9 +1,13 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+import os
+
 PROFILE_FILE_DIR = '/var/lib/device_health_profile/'
+PROFILE_DIR_CONTAINER = os.environ.get("SERVOD_PROFILES_DIR",
+                                       '/var/servod/profile/')
 
 # Constants that will be used as key name in device health profile.
 BOARD_KEY = 'board'
diff --git a/server/cros/dnsname_mangler.py b/server/cros/dnsname_mangler.py
index 9312611..b326b96 100644
--- a/server/cros/dnsname_mangler.py
+++ b/server/cros/dnsname_mangler.py
@@ -14,6 +14,9 @@
 ATTENUATOR_FAILURE_MESSAGE = (
         'Cannot infer DNS name of WiFi variable attenuator from a client IP '
         'address.  Use --atten_addr=<ip or dns name>')
+BTATTENUATOR_FAILURE_MESSAGE = (
+        'Cannot infer DNS name of Bluetooth variable attenuator from a client IP '
+        'address.  Use --btatten_addr=<ip or dns name>')
 ROUTER_FAILURE_MESSAGE = (
         'Cannot infer DNS name of WiFi router from a client IP address.')
 PCAP_FAILURE_MESSAGE = (
@@ -126,3 +129,26 @@
             cmdline_override=cmdline_override,
             not_dnsname_msg=ATTENUATOR_FAILURE_MESSAGE,
             allow_failure=allow_failure)
+
+
+def get_btattenuator_addr(client_hostname,
+                          cmdline_override=None,
+                          allow_failure=False):
+    """Build a hostname for a Bluetooth variable attenuator from the client hostname.
+
+    Optionally override that hostname with the provided command line hostname.
+
+    @param client_hostname: string DNS name of the client.
+    @param cmdline_override: string DNS name of the variable attenuator
+            controller provided via commandline arguments.
+    @param allow_failure: boolean True iff we should return None on failure to
+            infer a DNS name.
+    @return usable DNS name for attenuator controller.
+
+    """
+    return get_companion_device_addr(
+            client_hostname,
+            '-btattenuator',
+            cmdline_override=cmdline_override,
+            not_dnsname_msg=BTATTENUATOR_FAILURE_MESSAGE,
+            allow_failure=allow_failure)
diff --git a/server/cros/dnsname_mangler_unittest.py b/server/cros/dnsname_mangler_unittest.py
index b0e1b3e..d9d758a 100755
--- a/server/cros/dnsname_mangler_unittest.py
+++ b/server/cros/dnsname_mangler_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #
 # Copyright 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/server/cros/dynamic_suite/boolparse_lib_unittest.py b/server/cros/dynamic_suite/boolparse_lib_unittest.py
index 80761a1..386dcf4 100755
--- a/server/cros/dynamic_suite/boolparse_lib_unittest.py
+++ b/server/cros/dynamic_suite/boolparse_lib_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # -*- coding: utf-8 -*-
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -10,7 +10,7 @@
 
 import unittest
 
-import boolparse_lib
+from autotest_lib.server.cros.dynamic_suite import boolparse_lib
 
 
 class ParserTest(unittest.TestCase):
diff --git a/server/cros/dynamic_suite/comparators.py b/server/cros/dynamic_suite/comparators.py
index c391cbc..19f1b18 100644
--- a/server/cros/dynamic_suite/comparators.py
+++ b/server/cros/dynamic_suite/comparators.py
@@ -4,19 +4,19 @@
 
 """Comparators for use in dynamic_suite module unit tests."""
 
-import mox
+from unittest.mock import ANY
 
-class StatusContains(mox.Comparator):
+
+class StatusContains(object):
     @staticmethod
     def CreateFromStrings(status=None, test_name=None, reason=None):
-        status_comp = mox.StrContains(status) if status else mox.IgnoreArg()
-        name_comp = mox.StrContains(test_name) if test_name else mox.IgnoreArg()
-        reason_comp = mox.StrContains(reason) if reason else mox.IgnoreArg()
+        status_comp = AnyStringWith(status) if status else ANY
+        name_comp = AnyStringWith(test_name) if test_name else ANY
+        reason_comp = AnyStringWith(reason) if reason else ANY
         return StatusContains(status_comp, name_comp, reason_comp)
 
 
-    def __init__(self, status=mox.IgnoreArg(), test_name=mox.IgnoreArg(),
-                 reason=mox.IgnoreArg()):
+    def __init__(self, status=ANY, test_name=ANY, reason=ANY):
         """Initialize.
 
         Takes mox.Comparator objects to apply to job_status.Status
@@ -46,3 +46,8 @@
         return '<Status containing \'%s\t%s\t%s\'>' % (self._status,
                                                        self._test_name,
                                                        self._reason)
+
+
+class AnyStringWith(str):
+    def __eq__(self, other):
+        return self in str(other)
diff --git a/server/cros/dynamic_suite/constants.py b/server/cros/dynamic_suite/constants.py
index 329dd6e..1649a6d 100644
--- a/server/cros/dynamic_suite/constants.py
+++ b/server/cros/dynamic_suite/constants.py
@@ -52,7 +52,10 @@
 # maintain backwards compatibility.
 REIMAGE_TYPE_OS = 'os'
 REIMAGE_TYPE_FIRMWARE = 'firmware'
-LATEST_BUILD_URL = 'gs://chromeos-image-archive/master-paladin/LATEST-master'
+
+# TODO b:169251326 terms below are set outside of this codebase and should
+# be updated when possible ("master" -> "main"). # nocheck
+LATEST_BUILD_URL = 'gs://chromeos-image-archive/master-paladin/LATEST-master'  # nocheck
 
 JOB_OFFLOAD_FAILURES_KEY = 'offload_failures_only'
 
@@ -66,13 +69,13 @@
 KEYVAL_CIDB_BUILD_STAGE_ID = 'cidb_build_stage_id'
 KEYVAL_BRANCH = 'branch'
 KEYVAL_BUILDER_NAME = 'build_config'
-KEYVAL_MASTER_BUILDER_NAME = 'master_build_config'
+KEYVAL_MAIN_BUILDER_NAME = 'main_build_config'
 SUITE_NAME = 'suite'
 INHERITED_KEYVALS = (
-    KEYVAL_CIDB_BUILD_ID,
-    KEYVAL_CIDB_BUILD_STAGE_ID,
-    KEYVAL_BRANCH,
-    KEYVAL_BUILDER_NAME,
-    KEYVAL_MASTER_BUILDER_NAME,
-    SUITE_NAME,
+        KEYVAL_CIDB_BUILD_ID,
+        KEYVAL_CIDB_BUILD_STAGE_ID,
+        KEYVAL_BRANCH,
+        KEYVAL_BUILDER_NAME,
+        KEYVAL_MAIN_BUILDER_NAME,
+        SUITE_NAME,
 )
diff --git a/server/cros/dynamic_suite/control_file_getter.py b/server/cros/dynamic_suite/control_file_getter.py
index a59241f..ac57d0e 100644
--- a/server/cros/dynamic_suite/control_file_getter.py
+++ b/server/cros/dynamic_suite/control_file_getter.py
@@ -105,7 +105,7 @@
         """
         files = self._get_control_file_list(suite_name=suite_name)
         for cf_filter in self.CONTROL_FILE_FILTERS:
-          files = [path for path in files if not path.endswith(cf_filter)]
+            files = [path for path in files if not path.endswith(cf_filter)]
         self._files = files
         return self._files
 
@@ -203,10 +203,14 @@
         # Some of our callers are ill-considered and request that we
         # search all of /usr/local/autotest (crbug.com/771823).
         # Fixing the callers immediately is somewhere between a
-        # nuisance and hard.  So, we have a blacklist, hoping two
+        # nuisance and hard.  So, we have a blocklist, hoping two
         # wrongs will somehow make it right.
-        blacklist = {
-            'site-packages', 'venv', 'results', 'logs', 'containers',
+        blocklist = {
+                'site-packages',
+                'venv',
+                'results',
+                'logs',
+                'containers',
         }
         while len(directories) > 0:
             directory = directories.pop()
@@ -214,7 +218,7 @@
                 continue
             try:
                 for name in os.listdir(directory):
-                    if name in blacklist:
+                    if name in blocklist:
                         continue
                     fullpath = os.path.join(directory, name)
                     if os.path.isfile(fullpath):
diff --git a/server/cros/dynamic_suite/control_file_getter_unittest.py b/server/cros/dynamic_suite/control_file_getter_unittest.py
index e52199f..dee9e8e 100755
--- a/server/cros/dynamic_suite/control_file_getter_unittest.py
+++ b/server/cros/dynamic_suite/control_file_getter_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -6,8 +6,8 @@
 
 """Unit tests for client/common_lib/cros/control_file_getter.py."""
 
-import mox
 import unittest
+from unittest.mock import patch
 
 import common
 
@@ -16,7 +16,7 @@
 from autotest_lib.server.cros.dynamic_suite import control_file_getter
 
 
-class DevServerGetterTest(mox.MoxTestBase):
+class DevServerGetterTest(unittest.TestCase):
     """Unit tests for control_file_getter.DevServerGetter.
 
     @var _HOST: fake dev server host address.
@@ -29,40 +29,44 @@
 
     def setUp(self):
         super(DevServerGetterTest, self).setUp()
-        self.dev_server = self.mox.CreateMock(dev_server.ImageServer)
+        patcher = patch.object(dev_server, 'ImageServer')
+        self.dev_server = patcher.start()
+        self.addCleanup(patcher.stop)
+
         self.getter = control_file_getter.DevServerGetter(self._BUILD,
                                                           self.dev_server)
 
+    def tearDown(self):
+        if self.dev_server.resolve.call_count > 0:
+            self.dev_server.resolve.assert_called_with(self._BUILD,
+                                                       None,
+                                                       ban_list=None)
 
     def testListControlFiles(self):
         """Should successfully list control files from the dev server."""
-        self.dev_server.list_control_files(
-                self._BUILD,
-                suite_name='').AndReturn(self._FILES)
-        self.mox.ReplayAll()
+        self.dev_server.list_control_files.return_value = self._FILES
         self.assertEquals(self.getter.get_control_file_list(), self._FILES)
         self.assertEquals(self.getter._files, self._FILES)
-
+        self.dev_server.list_control_files.assert_called_with(self._BUILD,
+                                                              suite_name='')
 
     def testListControlFilesFail(self):
         """Should fail to list control files from the dev server."""
-        self.dev_server.list_control_files(
-                self._BUILD,
-                suite_name='').AndRaise(self._403)
-        self.mox.ReplayAll()
+        self.dev_server.list_control_files.return_value = None
+
+        self.dev_server.list_control_files.side_effect = self._403
         self.assertRaises(error.NoControlFileList,
                           self.getter.get_control_file_list)
-
+        self.dev_server.list_control_files.assert_called_with(self._BUILD,
+                                                              suite_name='')
 
     def testGetControlFile(self):
         """Should successfully get a control file from the dev server."""
         path = self._FILES[0]
-        self.dev_server.get_control_file(self._BUILD,
-                                         path).AndReturn(self._CONTENTS)
-        self.mox.ReplayAll()
+        self.dev_server.get_control_file.return_value = self._CONTENTS
         self.assertEquals(self.getter.get_control_file_contents(path),
                           self._CONTENTS)
-
+        self.dev_server.get_control_file.assert_called_with(self._BUILD, path)
 
     def testGetSuiteInfo(self):
         """
@@ -70,38 +74,35 @@
         dev server.
         """
         file_contents = {f:self._CONTENTS for f in self._FILES}
-        self.dev_server.list_suite_controls(
-                self._BUILD,
-                suite_name='').AndReturn(file_contents)
-        self.mox.ReplayAll()
+        self.dev_server.list_suite_controls.return_value = file_contents
+
         suite_info = self.getter.get_suite_info()
         for k in suite_info.keys():
             self.assertEquals(suite_info[k], file_contents[k])
         self.assertEquals(sorted(self.getter._files), sorted(self._FILES))
-
+        self.dev_server.list_suite_controls.assert_called_with(self._BUILD,
+                                                               suite_name='')
 
     def testListSuiteControlisFail(self):
         """
         Should fail to list all control file's contents from the dev server.
         """
-        self.dev_server.list_suite_controls(
-                self._BUILD,
-                suite_name='').AndRaise(self._403)
-        self.mox.ReplayAll()
+        self.dev_server.list_suite_controls.side_effect = self._403
         self.assertRaises(error.SuiteControlFileException,
                           self.getter.get_suite_info,
                           '')
-
+        self.dev_server.list_suite_controls.assert_called_with(self._BUILD,
+                                                               suite_name='')
 
     def testGetControlFileFail(self):
         """Should fail to get a control file from the dev server."""
         path = self._FILES[0]
-        self.dev_server.get_control_file(self._BUILD, path).AndRaise(self._403)
-        self.mox.ReplayAll()
+        self.dev_server.get_control_file.side_effect = self._403
+
         self.assertRaises(error.ControlFileNotFound,
                           self.getter.get_control_file_contents,
                           path)
-
+        self.dev_server.get_control_file.assert_called_with(self._BUILD, path)
 
     def testGetControlFileByNameCached(self):
         """\
@@ -111,12 +112,10 @@
         path = "file/%s/control" % name
 
         self.getter._files = self._FILES + [path]
-        self.dev_server.get_control_file(self._BUILD,
-                                         path).AndReturn(self._CONTENTS)
-        self.mox.ReplayAll()
+        self.dev_server.get_control_file.return_value = self._CONTENTS
         self.assertEquals(self.getter.get_control_file_contents_by_name(name),
                           self._CONTENTS)
-
+        self.dev_server.get_control_file.assert_called_with(self._BUILD, path)
 
     def testGetControlFileByName(self):
         """\
@@ -126,15 +125,13 @@
         path = "file/%s/control" % name
 
         files = self._FILES + [path]
-        self.dev_server.list_control_files(
-                self._BUILD,
-                suite_name='').AndReturn(files)
-        self.dev_server.get_control_file(self._BUILD,
-                                         path).AndReturn(self._CONTENTS)
-        self.mox.ReplayAll()
+        self.dev_server.list_control_files.return_value = files
+        self.dev_server.get_control_file.return_value = self._CONTENTS
         self.assertEquals(self.getter.get_control_file_contents_by_name(name),
                           self._CONTENTS)
-
+        self.dev_server.list_control_files.assert_called_with(self._BUILD,
+                                                              suite_name='')
+        self.dev_server.get_control_file.assert_called_with(self._BUILD, path)
 
     def testGetSuiteControlFileByName(self):
         """\
@@ -144,24 +141,19 @@
         path = "file/" + name
 
         files = self._FILES + [path]
-        self.dev_server.list_control_files(
-                self._BUILD,
-                suite_name='').AndReturn(files)
-        self.dev_server.get_control_file(self._BUILD,
-                                         path).AndReturn(self._CONTENTS)
-        self.mox.ReplayAll()
+        self.dev_server.list_control_files.return_value = files
+        self.dev_server.get_control_file.return_value = self._CONTENTS
         self.assertEquals(self.getter.get_control_file_contents_by_name(name),
                           self._CONTENTS)
-
+        self.dev_server.list_control_files.assert_called_with(self._BUILD,
+                                                              suite_name='')
+        self.dev_server.get_control_file.assert_called_with(self._BUILD, path)
 
     def testGetControlFileByNameFail(self):
         """Should fail to get a control file from the dev server by name."""
         name = 'one'
 
-        self.dev_server.list_control_files(
-                self._BUILD,
-                suite_name='').AndReturn(self._FILES)
-        self.mox.ReplayAll()
+        self.dev_server.list_control_files.return_value = self._FILES
         self.assertRaises(error.ControlFileNotFound,
                           self.getter.get_control_file_contents_by_name,
                           name)
diff --git a/server/cros/dynamic_suite/dynamic_suite.py b/server/cros/dynamic_suite/dynamic_suite.py
index 9b2760a..0252eb3 100644
--- a/server/cros/dynamic_suite/dynamic_suite.py
+++ b/server/cros/dynamic_suite/dynamic_suite.py
@@ -439,7 +439,7 @@
     Run a provision suite.
 
     Will re-image a number of devices (of the specified board) with the
-    provided builds by scheduling dummy_Pass.
+    provided builds by scheduling stub_Pass.
 
     @param job: an instance of client.common_lib.base_job representing the
                 currently running suite job.
@@ -657,4 +657,4 @@
     elif isinstance(input_, six.text_type):
         return six.ensure_binary(input_, 'utf-8')
     else:
-        return input_
\ No newline at end of file
+        return input_
diff --git a/server/cros/dynamic_suite/dynamic_suite_unittest.py b/server/cros/dynamic_suite/dynamic_suite_unittest.py
index 7c6bdd9..5b76bd0 100755
--- a/server/cros/dynamic_suite/dynamic_suite_unittest.py
+++ b/server/cros/dynamic_suite/dynamic_suite_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -9,9 +9,8 @@
 import os
 import signal
 import unittest
-
-import mox
-import mock
+from unittest import mock
+from unittest.mock import MagicMock
 
 import common
 from autotest_lib.client.common_lib import base_job, error
@@ -20,7 +19,7 @@
 from autotest_lib.server.cros.dynamic_suite.suite import Suite
 
 
-class DynamicSuiteTest(mox.MoxTestBase):
+class DynamicSuiteTest(unittest.TestCase):
     """Unit tests for dynamic_suite module methods.
 
     @var _DARGS: default args to vet.
@@ -33,15 +32,17 @@
     def setUp(self):
 
         super(DynamicSuiteTest, self).setUp()
-        self._DARGS = {'name': 'name',
-                       'builds': self._BUILDS,
-                       'board': 'board',
-                       'job': self.mox.CreateMock(base_job.base_job),
-                       'pool': 'pool',
-                       'check_hosts': False,
-                       'add_experimental': False,
-                       'suite_dependencies': ['test_dep'],
-                       'devserver_url': self._DEVSERVER_HOST}
+        self._DARGS = {
+                'name': 'name',
+                'builds': self._BUILDS,
+                'board': 'board',
+                'job': MagicMock(base_job.base_job),
+                'pool': 'pool',
+                'check_hosts': False,
+                'add_experimental': False,
+                'suite_dependencies': ['test_dep'],
+                'devserver_url': self._DEVSERVER_HOST
+        }
 
 
 
diff --git a/server/cros/dynamic_suite/frontend_wrappers.py b/server/cros/dynamic_suite/frontend_wrappers.py
index 4088e1a..4d05817 100644
--- a/server/cros/dynamic_suite/frontend_wrappers.py
+++ b/server/cros/dynamic_suite/frontend_wrappers.py
@@ -6,7 +6,7 @@
 import math
 import threading
 
-import common
+from . import common
 from autotest_lib.client.common_lib import env
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib import utils
@@ -14,17 +14,18 @@
 from autotest_lib.frontend.afe.json_rpc import proxy
 from autotest_lib.server import frontend
 try:
-    from chromite.lib import retry_util
-    from chromite.lib import timeout_util
-except ImportError:
-    logging.warn('Unable to import chromite.')
+    from autotest_lib.utils.frozen_chromite.lib import retry_util
+    from autotest_lib.utils.frozen_chromite.lib import timeout_util
+except ImportError as e:
+    logging.warning('Unable to import chromite: %s', e)
     retry_util = None
     timeout_util = None
 
 try:
-    from chromite.lib import metrics
-except ImportError:
-    logging.warn('Unable to import metrics from chromite.')
+    from autotest_lib.utils.frozen_chromite.lib import metrics
+except ImportError as e:
+    logging.warning('Unable to import metrics from '
+                 'autotest_lib.utils.frozen_chromite: %s', e)
     metrics = utils.metrics_mock
 
 
diff --git a/server/cros/dynamic_suite/frontend_wrappers_unittest.py b/server/cros/dynamic_suite/frontend_wrappers_unittest.py
index fd5ac73..1cc0326 100755
--- a/server/cros/dynamic_suite/frontend_wrappers_unittest.py
+++ b/server/cros/dynamic_suite/frontend_wrappers_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -6,7 +6,6 @@
 
 """Unittests for server/cros/dynamic_suite/frontend_wrappers.py"""
 
-import mox
 import unittest
 
 import common
@@ -14,75 +13,72 @@
 from autotest_lib.server.cros.dynamic_suite import frontend_wrappers
 
 
-class FrontendWrappersTest(mox.MoxTestBase):
-  """Unit tests for frontend_wrappers global functions."""
+class FrontendWrappersTest(unittest.TestCase):
+    """Unit tests for frontend_wrappers global functions."""
 
-  def testConvertTimeoutToRetryBasic(self):
-    """Test converting timeout and delay values to retry attempts."""
-    backoff = 2
-    timeout_min = 10
-    delay_sec = 10
+    def testConvertTimeoutToRetryBasic(self):
+        """Test converting timeout and delay values to retry attempts."""
+        backoff = 2
+        timeout_min = 10
+        delay_sec = 10
 
-    max_retry = frontend_wrappers.convert_timeout_to_retry(backoff,
-                                                           timeout_min,
-                                                           delay_sec)
+        max_retry = frontend_wrappers.convert_timeout_to_retry(
+                backoff, timeout_min, delay_sec)
 
-    self.assertEquals(max_retry, 6)
+        self.assertEquals(max_retry, 6)
 
-  def testConvertTimeoutToRetryLimit(self):
-    """Test approaching a change in attempt amount."""
-    backoff = 2
-    delay_sec = 10
-    timeout_min_lower_limit = 42.499999
-    timeout_min_at_limit = 42.5
-    timeout_min_upper_limit = 42.599999
+    def testConvertTimeoutToRetryLimit(self):
+        """Test approaching a change in attempt amount."""
+        backoff = 2
+        delay_sec = 10
+        timeout_min_lower_limit = 42.499999
+        timeout_min_at_limit = 42.5
+        timeout_min_upper_limit = 42.599999
 
-    max_retry_lower_limit = frontend_wrappers.convert_timeout_to_retry(
-        backoff, timeout_min_lower_limit, delay_sec)
+        max_retry_lower_limit = frontend_wrappers.convert_timeout_to_retry(
+                backoff, timeout_min_lower_limit, delay_sec)
 
-    max_retry_at_limit = frontend_wrappers.convert_timeout_to_retry(
-        backoff, timeout_min_at_limit, delay_sec)
+        max_retry_at_limit = frontend_wrappers.convert_timeout_to_retry(
+                backoff, timeout_min_at_limit, delay_sec)
 
-    max_retry_upper_limit = frontend_wrappers.convert_timeout_to_retry(
-        backoff, timeout_min_upper_limit, delay_sec)
+        max_retry_upper_limit = frontend_wrappers.convert_timeout_to_retry(
+                backoff, timeout_min_upper_limit, delay_sec)
 
-    # Eight attempts with a backoff factor of two should be sufficient
-    # for timeouts up to 2550 seconds (or 42.5 minutes).
-    self.assertEquals(max_retry_lower_limit, 8)
-    self.assertEquals(max_retry_at_limit, 8)
+        # Eight attempts with a backoff factor of two should be sufficient
+        # for timeouts up to 2550 seconds (or 42.5 minutes).
+        self.assertEquals(max_retry_lower_limit, 8)
+        self.assertEquals(max_retry_at_limit, 8)
 
-    # We expect to see nine attempts, as we are above the 42.5 minute
-    # threshold.
-    self.assertEquals(max_retry_upper_limit, 9)
+        # We expect to see nine attempts, as we are above the 42.5 minute
+        # threshold.
+        self.assertEquals(max_retry_upper_limit, 9)
 
-  def testConvertTimeoutToRetrySmallTimeout(self):
-    """Test converting to retry attempts when a small timeout is used."""
-    backoff = 2
-    timeout_min = 0.01
-    delay_sec = 10
+    def testConvertTimeoutToRetrySmallTimeout(self):
+        """Test converting to retry attempts when a small timeout is used."""
+        backoff = 2
+        timeout_min = 0.01
+        delay_sec = 10
 
-    max_retry = frontend_wrappers.convert_timeout_to_retry(backoff,
-                                                           timeout_min,
-                                                           delay_sec)
+        max_retry = frontend_wrappers.convert_timeout_to_retry(
+                backoff, timeout_min, delay_sec)
 
-    # The number of attempts should be less than one using the formula
-    # outlined in the function, but, we always round up to the nearest
-    # integer.
-    self.assertEquals(max_retry, 1)
+        # The number of attempts should be less than one using the formula
+        # outlined in the function, but, we always round up to the nearest
+        # integer.
+        self.assertEquals(max_retry, 1)
 
-  def testConvertTimeoutToRetrySmallDelay(self):
-    """Test converting to retry attempts when the delay is small."""
-    backoff = 2
-    timeout_min = 30
-    delay_sec = 0.01
+    def testConvertTimeoutToRetrySmallDelay(self):
+        """Test converting to retry attempts when the delay is small."""
+        backoff = 2
+        timeout_min = 30
+        delay_sec = 0.01
 
-    max_retry = frontend_wrappers.convert_timeout_to_retry(backoff,
-                                                           timeout_min,
-                                                           delay_sec)
+        max_retry = frontend_wrappers.convert_timeout_to_retry(
+                backoff, timeout_min, delay_sec)
 
-    # The number of retries shouldn't be too large despite the small
-    # delay as a result of backing off in an exponential fashion.
-    self.assertEquals(max_retry, 18)
+        # The number of retries shouldn't be too large despite the small
+        # delay as a result of backing off in an exponential fashion.
+        self.assertEquals(max_retry, 18)
 
 
 if __name__ == '__main__':
diff --git a/server/cros/dynamic_suite/host_spec_unittest.py b/server/cros/dynamic_suite/host_spec_unittest.py
index 4fa7202..07055d7 100755
--- a/server/cros/dynamic_suite/host_spec_unittest.py
+++ b/server/cros/dynamic_suite/host_spec_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -6,7 +6,6 @@
 
 """Unit tests for server/cros/dynamic_suite/host_spec.py."""
 
-import mox
 import unittest
 
 import common
@@ -15,7 +14,7 @@
 from autotest_lib.server.cros.dynamic_suite.fakes import FakeHost
 
 
-class HostSpecTest(mox.MoxTestBase):
+class HostSpecTest(unittest.TestCase):
     """Unit tests for dynamic_suite.host_spec module.
 
     @var _BOARD: fake board to reimage
@@ -54,7 +53,7 @@
         self.assertFalse(self._SPECS[2].is_trivial)
 
 
-class HostGroupTest(mox.MoxTestBase):
+class HostGroupTest(unittest.TestCase):
     """Unit tests for dynamic_suite.host_spec.HostGroup derived classes.
     """
 
diff --git a/server/cros/dynamic_suite/job_status_unittest.py b/server/cros/dynamic_suite/job_status_unittest.py
index d96da8c..5ef7262 100755
--- a/server/cros/dynamic_suite/job_status_unittest.py
+++ b/server/cros/dynamic_suite/job_status_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 #
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -12,20 +12,18 @@
 from __future__ import division
 from __future__ import print_function
 
-import mox
 import os
 import shutil
-import six
 from six.moves import map
 from six.moves import range
 import tempfile
-import time
 import unittest
+from unittest import mock
+from unittest.mock import patch
 
 import common
 
 from autotest_lib.server import frontend
-from autotest_lib.server.cros.dynamic_suite import host_spec
 from autotest_lib.server.cros.dynamic_suite import job_status
 from autotest_lib.server.cros.dynamic_suite.fakes import FakeJob
 from autotest_lib.server.cros.dynamic_suite.fakes import FakeStatus
@@ -34,34 +32,47 @@
 DEFAULT_WAITTIMEOUT_MINS = 60 * 4
 
 
-class StatusTest(mox.MoxTestBase):
+class StatusTest(unittest.TestCase):
     """Unit tests for job_status.Status.
     """
 
 
     def setUp(self):
         super(StatusTest, self).setUp()
-        self.afe = self.mox.CreateMock(frontend.AFE)
-        self.tko = self.mox.CreateMock(frontend.TKO)
-
+        afe_patcher = patch.object(frontend, 'AFE')
+        self.afe = afe_patcher.start()
+        self.addCleanup(afe_patcher.stop)
+        tko_patcher = patch.object(frontend, 'TKO')
+        self.tko = tko_patcher.start()
+        self.addCleanup(tko_patcher.stop)
         self.tmpdir = tempfile.mkdtemp(suffix=type(self).__name__)
-
+        # These are called a few times, so we need to return via side_effect.
+        # for some reason side_effect doesn't like appending, so just keeping
+        # a list to then be added at once.
+        self.tko.get_job_test_statuses_from_db.side_effect = []
+        self.afe.run.side_effect = []
+        self.run_list = []
+        self.run_call_list = []
+        self.job_statuses = []
+        self.job_statuses_call_list = []
 
     def tearDown(self):
         super(StatusTest, self).tearDown()
         shutil.rmtree(self.tmpdir, ignore_errors=True)
 
-
     def expect_yield_job_entries(self, job):
         entries = [s.entry for s in job.statuses]
-        self.afe.run('get_host_queue_entries',
-                     job=job.id).AndReturn(entries)
+        self.run_list.append(entries)
+        self.run_call_list.append(
+                mock.call('get_host_queue_entries', job=job.id))
+
         if True not in ['aborted' in e and e['aborted'] for e in entries]:
-            self.tko.get_job_test_statuses_from_db(job.id).AndReturn(
-                    job.statuses)
+            self.job_statuses.append(job.statuses)
+            self.job_statuses_call_list.append(mock.call(job.id))
 
-
-    def testJobResultWaiter(self):
+    @patch('autotest_lib.server.cros.dynamic_suite.job_status.JobResultWaiter._sleep'
+           )
+    def testJobResultWaiter(self, mock_sleep):
         """Should gather status and return records for job summaries."""
         jobs = [FakeJob(0, [FakeStatus('GOOD', 'T0', ''),
                             FakeStatus('GOOD', 'T1', '')]),
@@ -71,15 +82,15 @@
                 FakeJob(3, [FakeStatus('FAIL', 'T0', 'broken')]),
                 FakeJob(4, [FakeStatus('ERROR', 'SERVER_JOB', 'server error'),
                             FakeStatus('GOOD', 'T0', '')]),]
-                # TODO: Write a better test for the case where we yield
-                # results for aborts vs cannot yield results because of
-                # a premature abort. Currently almost all client aborts
-                # have been converted to failures, and when aborts do happen
-                # they result in server job failures for which we always
-                # want results.
-                # FakeJob(5, [FakeStatus('ERROR', 'T0', 'gah', True)]),
-                # The next job shouldn't be recorded in the results.
-                # FakeJob(6, [FakeStatus('GOOD', 'SERVER_JOB', '')])]
+        # TODO: Write a better test for the case where we yield
+        # results for aborts vs cannot yield results because of
+        # a premature abort. Currently almost all client aborts
+        # have been converted to failures, and when aborts do happen
+        # they result in server job failures for which we always
+        # want results.
+        # FakeJob(5, [FakeStatus('ERROR', 'T0', 'gah', True)]),
+        # The next job shouldn't be recorded in the results.
+        # FakeJob(6, [FakeStatus('GOOD', 'SERVER_JOB', '')])]
         for status in jobs[4].statuses:
             status.entry['job'] = {'name': 'broken_infra_job'}
 
@@ -89,15 +100,22 @@
                 [jobs[0], jobs[2]],
                 jobs[3:6]
             ]
-        self.mox.StubOutWithMock(time, 'sleep')
+
+        yield_list = []
+        called_list = []
+
         for yield_this in yield_values:
-            self.afe.get_jobs(id__in=list(job_id_set),
-                              finished=True).AndReturn(yield_this)
+            yield_list.append(yield_this)
+
+            # Expected list of calls...
+            called_list.append(
+                    mock.call(id__in=list(job_id_set), finished=True))
             for job in yield_this:
                 self.expect_yield_job_entries(job)
                 job_id_set.remove(job.id)
-            time.sleep(mox.IgnoreArg())
-        self.mox.ReplayAll()
+        self.afe.get_jobs.side_effect = yield_list
+        self.afe.run.side_effect = self.run_list
+        self.tko.get_job_test_statuses_from_db.side_effect = self.job_statuses
 
         waiter = job_status.JobResultWaiter(self.afe, self.tko)
         waiter.add_jobs(jobs)
@@ -106,6 +124,10 @@
             for status in job.statuses:
                 self.assertTrue(True in list(map(status.equals_record, results)))
 
+        self.afe.get_jobs.assert_has_calls(called_list)
+        self.afe.run.assert_has_calls(self.run_call_list)
+        self.tko.get_job_test_statuses_from_db.assert_has_calls(
+                self.job_statuses_call_list)
 
     def testYieldSubdir(self):
         """Make sure subdir are properly set for test and non-test status."""
@@ -118,8 +140,10 @@
                       parent_job_id=54321)
         for status in job.statuses:
             status.entry['job'] = {'name': job_name}
+
         self.expect_yield_job_entries(job)
-        self.mox.ReplayAll()
+        self.afe.run.side_effect = self.run_list
+        self.tko.get_job_test_statuses_from_db.side_effect = self.job_statuses
         results = list(job_status._yield_job_results(self.afe, self.tko, job))
         for i in range(len(results)):
             result = results[i]
@@ -132,29 +156,9 @@
             self.assertEqual(results[i].test_name, expected_name)
             self.assertEqual(results[i].subdir, expected_subdir)
 
-
-    def _prepareForReporting(self, results):
-        def callable(x):
-            pass
-
-        record_entity = self.mox.CreateMock(callable)
-        group = self.mox.CreateMock(host_spec.HostGroup)
-
-        statuses = {}
-        all_bad = True not in six.itervalues(results)
-        for hostname, result in six.iteritems(results):
-            status = self.mox.CreateMock(job_status.Status)
-            status.record_all(record_entity).InAnyOrder('recording')
-            status.is_good().InAnyOrder('recording').AndReturn(result)
-            if not result:
-                status.test_name = 'test'
-                if not all_bad:
-                    status.override_status('WARN').InAnyOrder('recording')
-            else:
-                group.mark_host_success(hostname).InAnyOrder('recording')
-            statuses[hostname] = status
-
-        return (statuses, group, record_entity)
+        self.afe.run.assert_has_calls(self.run_call_list)
+        self.tko.get_job_test_statuses_from_db.assert_has_calls(
+                self.job_statuses_call_list)
 
 
 if __name__ == '__main__':
diff --git a/server/cros/dynamic_suite/reporting.py b/server/cros/dynamic_suite/reporting.py
index f0f10d8..58585ba 100644
--- a/server/cros/dynamic_suite/reporting.py
+++ b/server/cros/dynamic_suite/reporting.py
@@ -7,21 +7,10 @@
 
 import common
 
-from autotest_lib.client.common_lib import global_config
 from autotest_lib.server import site_utils
 from autotest_lib.server.cros.dynamic_suite import job_status
 from autotest_lib.server.cros.dynamic_suite import reporting_utils
 from autotest_lib.server.cros.dynamic_suite import tools
-from autotest_lib.site_utils import gmail_lib
-
-try:
-    from chromite.lib import metrics
-except ImportError:
-    metrics = site_utils.metrics_mock
-
-
-EMAIL_CREDS_FILE = global_config.global_config.get_config_value(
-        'NOTIFICATIONS', 'gmail_api_credentials_test_failure', default=None)
 
 
 class TestBug(object):
@@ -163,23 +152,5 @@
     @param bug_template: A template dictionary specifying the default bug
                          filing options for failures in this suite.
     """
-    to_set = set(bug.cc) if bug.cc else set()
-    if bug.owner:
-        to_set.add(bug.owner)
-    if bug_template.get('cc'):
-        to_set = to_set.union(bug_template.get('cc'))
-    if bug_template.get('owner'):
-        to_set.add(bug_template.get('owner'))
-    recipients = ', '.join(to_set)
-    if not recipients:
-        logging.warning('No owner/cc found. Will skip sending a mail.')
-        return
-    success = False
-    try:
-        gmail_lib.send_email(
-            recipients, bug.title(), bug.summary(), retry=False,
-            creds_path=site_utils.get_creds_abspath(EMAIL_CREDS_FILE))
-        success = True
-    finally:
-        (metrics.Counter('chromeos/autotest/errors/send_bug_email')
-         .increment(fields={'success': success}))
+    # TODO(ayatane): Deprecated, no time to untangle imports to delete right now.
+    pass
diff --git a/server/cros/dynamic_suite/reporting_unittest.py b/server/cros/dynamic_suite/reporting_unittest.py
index 538fcf6..96963bd 100755
--- a/server/cros/dynamic_suite/reporting_unittest.py
+++ b/server/cros/dynamic_suite/reporting_unittest.py
@@ -1,24 +1,24 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 import datetime
-import mock
-import mox
+import six
 import unittest
+from unittest import mock
 
 import common
 
 from autotest_lib.server.cros.dynamic_suite import reporting_utils
 
 
-class TestMergeBugTemplate(mox.MoxTestBase):
+class TestMergeBugTemplate(unittest.TestCase):
     """Test bug can be properly merged and validated."""
     def test_validate_success(self):
         """Test a valid bug can be verified successfully."""
-        bug_template= {}
+        bug_template = {}
         bug_template['owner'] = 'someone@company.com'
         reporting_utils.BugTemplate.validate_bug_template(bug_template)
 
@@ -32,33 +32,33 @@
                           bug_template)
 
         # Bug template must contain value for essential attribute, e.g., owner.
-        bug_template= {'no-owner': 'user1'}
+        bug_template = {'no-owner': 'user1'}
         self.assertRaises(reporting_utils.InvalidBugTemplateException,
                           reporting_utils.BugTemplate.validate_bug_template,
                           bug_template)
 
         # Bug template must contain value for essential attribute, e.g., owner.
-        bug_template= {'owner': 'invalid_email_address'}
+        bug_template = {'owner': 'invalid_email_address'}
         self.assertRaises(reporting_utils.InvalidBugTemplateException,
                           reporting_utils.BugTemplate.validate_bug_template,
                           bug_template)
 
         # Check unexpected attributes.
-        bug_template= {}
+        bug_template = {}
         bug_template['random tag'] = 'test'
         self.assertRaises(reporting_utils.InvalidBugTemplateException,
                           reporting_utils.BugTemplate.validate_bug_template,
                           bug_template)
 
         # Value for cc must be a list
-        bug_template= {}
+        bug_template = {}
         bug_template['cc'] = 'test'
         self.assertRaises(reporting_utils.InvalidBugTemplateException,
                           reporting_utils.BugTemplate.validate_bug_template,
                           bug_template)
 
         # Value for labels must be a list
-        bug_template= {}
+        bug_template = {}
         bug_template['labels'] = 'test'
         self.assertRaises(reporting_utils.InvalidBugTemplateException,
                           reporting_utils.BugTemplate.validate_bug_template,
@@ -151,11 +151,20 @@
         """Test a link of test history can be generated."""
         self._mock_now.return_value = datetime.datetime(2018, 3, 29)
         link = reporting_utils.link_test_history('jetstream_PrioritizedDevice')
-        expected_link = ('https://stainless.corp.google.com/search?'
-                         'test=^jetstream\_PrioritizedDevice$&'
-                         'first_date=2018-03-01&'
-                         'last_date=2018-03-29&'
-                         'row=model&col=build&view=matrix')
+        # re.escape changes in py3 (per the docs):
+        # "Changed in version 3.3: The '_' character is no longer escaped."
+        if six.PY2:
+            expected_link = ('https://stainless.corp.google.com/search?'
+                             'test=^jetstream\_PrioritizedDevice$&'
+                             'first_date=2018-03-01&'
+                             'last_date=2018-03-29&'
+                             'row=model&col=build&view=matrix')
+        else:
+            expected_link = ('https://stainless.corp.google.com/search?'
+                             'test=^jetstream_PrioritizedDevice$&'
+                             'first_date=2018-03-01&'
+                             'last_date=2018-03-29&'
+                             'row=model&col=build&view=matrix')
         self.assertEqual(link, expected_link)
 
 
diff --git a/server/cros/dynamic_suite/suite.py b/server/cros/dynamic_suite/suite.py
index a9b3593..2c5c8c1 100644
--- a/server/cros/dynamic_suite/suite.py
+++ b/server/cros/dynamic_suite/suite.py
@@ -49,9 +49,11 @@
     print('  - (not yet supported) be run after running ')
     print('    ../utils/build_externals.py')
 
-_FILE_BUG_SUITES = ['au', 'bvt', 'bvt-cq', 'bvt-inline', 'paygen_au_beta',
-                    'paygen_au_canary', 'paygen_au_dev', 'paygen_au_stable',
-                    'sanity', 'push_to_prod']
+_FILE_BUG_SUITES = [
+        'au', 'bvt', 'bvt-cq', 'bvt-inline', 'calibration', 'paygen_au_beta',
+        'paygen_au_canary', 'paygen_au_dev', 'paygen_au_stable', 'sanity',
+        'push_to_prod'
+]
 _AUTOTEST_DIR = global_config.global_config.get_config_value(
         'SCHEDULER', 'drone_installation_directory')
 
@@ -1207,7 +1209,7 @@
         # finish, we would lose the chance to report errors from the original
         # job.
         if self._has_retry(result) and rescheduled:
-             return
+            return
 
         if self._should_report(result):
             self._result_reporter.report(result)
@@ -1545,7 +1547,7 @@
     """
     A suite for provisioning DUTs.
 
-    This is done by creating dummy_Pass tests.
+    This is done by creating stub_Pass tests.
     """
 
 
@@ -1649,7 +1651,7 @@
     retriever = _ControlFileRetriever(cf_getter,
                                       run_prod_code=run_prod_code,
                                       test_args=test_args)
-    return retriever.retrieve_for_test('dummy_Pass')
+    return retriever.retrieve_for_test('stub_Pass')
 
 
 class _ComposedPredicate(object):
diff --git a/server/cros/dynamic_suite/suite_common.py b/server/cros/dynamic_suite/suite_common.py
index b98a6f3..3f94d09 100644
--- a/server/cros/dynamic_suite/suite_common.py
+++ b/server/cros/dynamic_suite/suite_common.py
@@ -20,7 +20,6 @@
 
 from autotest_lib.client.common_lib import control_data
 from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import global_config
 from autotest_lib.client.common_lib import time_utils
 from autotest_lib.client.common_lib.cros import dev_server
 from autotest_lib.server.cros import provision
@@ -28,8 +27,8 @@
 from autotest_lib.server.cros.dynamic_suite import control_file_getter
 from autotest_lib.server.cros.dynamic_suite import tools
 
-ENABLE_CONTROLS_IN_BATCH = global_config.global_config.get_config_value(
-        'CROS', 'enable_getting_controls_in_batch', type=bool, default=False)
+# TODO(ayatane): Obsolete, not cleaning up now due to time.
+ENABLE_CONTROLS_IN_BATCH = False
 
 
 def canonicalize_suite_name(suite_name):
diff --git a/server/cros/dynamic_suite/suite_unittest.py b/server/cros/dynamic_suite/suite_unittest.py
index b461db4..558ae06 100755
--- a/server/cros/dynamic_suite/suite_unittest.py
+++ b/server/cros/dynamic_suite/suite_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -13,8 +13,6 @@
 
 import collections
 from collections import OrderedDict
-import mock
-import mox
 import os
 import six
 from six.moves import range
@@ -22,6 +20,7 @@
 import shutil
 import tempfile
 import unittest
+from unittest.mock import patch, call, ANY
 
 import common
 
@@ -45,8 +44,20 @@
 from autotest_lib.server.cros.dynamic_suite.suite import RetryHandler
 from autotest_lib.server.cros.dynamic_suite.suite import Suite
 
+from unittest.mock import MagicMock
 
-class SuiteTest(mox.MoxTestBase):
+
+class TypeMatcher(object):
+    """Matcher for object is of type."""
+
+    def __init__(self, expected_type):
+        self.expected_type = expected_type
+
+    def __eq__(self, other):
+        return isinstance(other, self.expected_type)
+
+
+class SuiteTest(unittest.TestCase):
     """Unit tests for dynamic_suite Suite class.
 
     @var _BUILDS: fake build
@@ -61,19 +72,25 @@
     _DEVSERVER_HOST = 'http://dontcare:8080'
     _FAKE_JOB_ID = 10
 
-
     def setUp(self):
         """Setup."""
         super(SuiteTest, self).setUp()
         self.maxDiff = None
         self.use_batch = suite_common.ENABLE_CONTROLS_IN_BATCH
         suite_common.ENABLE_CONTROLS_IN_BATCH = False
-        self.afe = self.mox.CreateMock(frontend.AFE)
-        self.tko = self.mox.CreateMock(frontend.TKO)
+        afe_patcher = patch.object(frontend, 'AFE')
+        self.afe = afe_patcher.start()
+        self.addCleanup(afe_patcher.stop)
+        tko_patcher = patch.object(frontend, 'TKO')
+        self.tko = tko_patcher.start()
+        self.addCleanup(tko_patcher.stop)
 
         self.tmpdir = tempfile.mkdtemp(suffix=type(self).__name__)
 
-        self.getter = self.mox.CreateMock(control_file_getter.ControlFileGetter)
+        getter_patch = patch.object(control_file_getter, 'ControlFileGetter')
+        self.getter = getter_patch.start()
+        self.addCleanup(getter_patch.stop)
+
         self.devserver = dev_server.ImageServer(self._DEVSERVER_HOST)
 
         self.files = OrderedDict(
@@ -100,6 +117,11 @@
                                                   'gets filtered')}
 
 
+    def additional_mocking(self):
+        patcher = patch.object(control_data, 'parse_control_string')
+        self.cf_getter_string = patcher.start()
+        self.addCleanup(patcher.stop)
+
     def tearDown(self):
         """Teardown."""
         suite_common.ENABLE_CONTROLS_IN_BATCH = self.use_batch
@@ -128,22 +150,20 @@
                                expect to get parsed.
         """
         if not already_stubbed:
-            self.mox.StubOutWithMock(control_data, 'parse_control_string')
+            self.additional_mocking()
+        patcher = patch.object(suite_common.multiprocessing, 'Pool')
+        self.mp_pool = patcher.start()
+        self.addCleanup(patcher.stop)
+        self.mp_pool.return_value = FakeMultiprocessingPool()
 
-        self.mox.StubOutWithMock(suite_common.multiprocessing, 'Pool')
-        suite_common.multiprocessing.Pool(
-            processes=suite_common.get_process_limit()).AndReturn(
-                FakeMultiprocessingPool())
-
-        self.getter.get_control_file_list(
-                suite_name=suite_name).AndReturn(file_list)
+        self.getter.get_control_file_list.return_value = file_list
+        get_control_file_contents_mock_list = []
+        parse_mock_list = []
         for file, data in six.iteritems(files_to_parse):
-            self.getter.get_control_file_contents(
-                    file).InAnyOrder().AndReturn(data.string)
-            control_data.parse_control_string(
-                    data.string,
-                    raise_warnings=True,
-                    path=file).InAnyOrder().AndReturn(data)
+            get_control_file_contents_mock_list.append(data.string)
+            parse_mock_list.append(data)
+        self.getter.get_control_file_contents.side_effect = get_control_file_contents_mock_list
+        self.cf_getter_string.side_effect = parse_mock_list
 
 
     def expect_control_file_parsing_in_batch(self, suite_name=_TAG):
@@ -152,55 +172,60 @@
 
         @param suite_name: The suite name to parse control files for.
         """
-        self.getter = self.mox.CreateMock(control_file_getter.DevServerGetter)
-        self.mox.StubOutWithMock(control_data, 'parse_control_string')
+        DevServerGetter_patch = patch.object(control_file_getter,
+                                             'DevServerGetter')
+        self.getter = DevServerGetter_patch.start()
+        self.addCleanup(DevServerGetter_patch.stop)
 
-        self.mox.StubOutWithMock(suite_common.multiprocessing, 'Pool')
-        suite_common.multiprocessing.Pool(
-            processes=suite_common.get_process_limit()).AndReturn(
-                FakeMultiprocessingPool())
+        patcher = patch.object(suite_common.multiprocessing, 'Pool')
+        mp_pool = patcher.start()
+        self.addCleanup(patcher.stop)
+        mp_pool.return_value = FakeMultiprocessingPool()
 
         suite_info = {}
+        call_list = []
+        expected_calls = []
         for k, v in six.iteritems(self.files):
             suite_info[k] = v.string
-            control_data.parse_control_string(
-                    v.string,
-                    raise_warnings=True,
-                    path=k).InAnyOrder().AndReturn(v)
+            call_list.append(v)
+            expected_calls.append(call(v.string, raise_warnings=True, path=k))
+        self.cf_getter_string.side_effect = (call_list)
         for k, v in six.iteritems(self.files_to_filter):
             suite_info[k] = v.string
         self.getter._dev_server = self._DEVSERVER_HOST
-        self.getter.get_suite_info(
-                suite_name=suite_name).AndReturn(suite_info)
-
+        self.getter.get_suite_info.return_value = suite_info
+        return expected_calls
 
     def testFindAllTestInBatch(self):
         """Test switch on enable_getting_controls_in_batch for function
         find_all_test."""
+        self.additional_mocking()
         self.use_batch = suite_common.ENABLE_CONTROLS_IN_BATCH
-        self.expect_control_file_parsing_in_batch()
+        expected_calls = self.expect_control_file_parsing_in_batch()
         suite_common.ENABLE_CONTROLS_IN_BATCH = True
 
-        self.mox.ReplayAll()
+        with patch.object(suite_common, '_should_batch_with') as sbw_mock:
+            sbw_mock.return_value = True
 
-        predicate = lambda d: d.suite == self._TAG
-        tests = SuiteBase.find_and_parse_tests(self.getter,
-                                               predicate,
-                                               self._TAG)
-        self.assertEquals(len(tests), 6)
-        self.assertTrue(self.files['one'] in tests)
-        self.assertTrue(self.files['two'] in tests)
-        self.assertTrue(self.files['three'] in tests)
-        self.assertTrue(self.files['five'] in tests)
-        self.assertTrue(self.files['six'] in tests)
-        self.assertTrue(self.files['seven'] in tests)
-        suite_common.ENABLE_CONTROLS_IN_BATCH = self.use_batch
+            predicate = lambda d: d.suite == self._TAG
+            tests = SuiteBase.find_and_parse_tests(self.getter, predicate,
+                                                   self._TAG)
+            self.assertEquals(len(tests), 6)
+            self.assertTrue(self.files['one'] in tests)
+            self.assertTrue(self.files['two'] in tests)
+            self.assertTrue(self.files['three'] in tests)
+            self.assertTrue(self.files['five'] in tests)
+            self.assertTrue(self.files['six'] in tests)
+            self.assertTrue(self.files['seven'] in tests)
+            suite_common.ENABLE_CONTROLS_IN_BATCH = self.use_batch
 
+        self.cf_getter_string.assert_has_calls(expected_calls, any_order=True)
 
     def testFindAndParseStableTests(self):
         """Should find only tests that match a predicate."""
+        self.additional_mocking()
+
         self.expect_control_file_parsing()
-        self.mox.ReplayAll()
 
         predicate = lambda d: d.text == self.files['two'].string
         tests = SuiteBase.find_and_parse_tests(self.getter,
@@ -217,6 +242,7 @@
         for syntax errors, by using the un-forgiving parser and pretending to
         look for all control files with the suite attribute.
         """
+
         autodir = os.path.abspath(
             os.path.join(os.path.dirname(__file__), '..', '..', '..'))
         fs_getter = SuiteBase.create_fs_getter(autodir)
@@ -227,8 +253,8 @@
 
     def testFindAndParseTestsSuite(self):
         """Should find all tests that match a predicate."""
+        self.additional_mocking()
         self.expect_control_file_parsing()
-        self.mox.ReplayAll()
 
         predicate = lambda d: d.suite == self._TAG
         tests = SuiteBase.find_and_parse_tests(self.getter,
@@ -245,8 +271,8 @@
 
     def testFindAndParseTestsAttr(self):
         """Should find all tests that match a predicate."""
+        self.additional_mocking()
         self.expect_control_file_parsing()
-        self.mox.ReplayAll()
 
         predicate = SuiteBase.matches_attribute_expression_predicate('attr:attr')
         tests = SuiteBase.find_and_parse_tests(self.getter,
@@ -264,8 +290,9 @@
     def testAdHocSuiteCreation(self):
         """Should be able to schedule an ad-hoc suite by specifying
         a single test name."""
+        self.additional_mocking()
+
         self.expect_control_file_parsing(suite_name='ad_hoc_suite')
-        self.mox.ReplayAll()
         predicate = SuiteBase.test_name_equals_predicate('name-data_five')
         suite = Suite.create_from_predicates([predicate], self._BUILDS,
                                        self._BOARD, devserver=None,
@@ -283,15 +310,10 @@
         """
         for test in self.files.values():
             test.text = test.string  # mimic parsing.
-        self.mox.StubOutWithMock(SuiteBase, 'find_and_parse_tests')
-        SuiteBase.find_and_parse_tests(
-            mox.IgnoreArg(),
-            mox.IgnoreArg(),
-            mox.IgnoreArg(),
-            forgiving_parser=True,
-            run_prod_code=False,
-            test_args=None).AndReturn(list(self.files.values()))
-
+        parse_patch = patch.object(SuiteBase, 'find_and_parse_tests')
+        self.parse_mock = parse_patch.start()
+        self.addCleanup(parse_patch.stop)
+        self.parse_mock.return_value = self.files.values()
 
     def expect_job_scheduling(self, recorder,
                               tests_to_skip=[], ignore_deps=False,
@@ -309,7 +331,9 @@
         """
         record_job_id = suite and suite._results_dir
         if record_job_id:
-            self.mox.StubOutWithMock(suite, '_remember_job_keyval')
+            p = patch.object(suite, '_remember_job_keyval')
+            p.start()
+            self.addCleanup(p.stop)
         recorder.record_entry(
             StatusContains.CreateFromStrings('INFO', 'Start %s' % self._TAG),
             log_in_subdir=False)
@@ -332,25 +356,24 @@
                 'experimental':test.experimental,
             }
             keyvals.update(extra_keyvals)
+
             job_mock = self.afe.create_job(
-                control_file=test.text,
-                name=mox.And(mox.StrContains(build),
-                             mox.StrContains(test.name)),
-                control_type=mox.IgnoreArg(),
-                meta_hosts=[self._BOARD],
-                dependencies=dependencies,
-                keyvals=keyvals,
-                max_runtime_mins=24*60,
-                timeout_mins=1440,
-                parent_job_id=None,
-                reboot_before=mox.IgnoreArg(),
-                run_reset=mox.IgnoreArg(),
-                priority=priorities.Priority.DEFAULT,
-                synch_count=test.sync_count,
-                require_ssp=test.require_ssp
-                )
+                    control_file=test.text,
+                    name=ANY,
+                    control_type=ANY,
+                    meta_hosts=[self._BOARD],
+                    dependencies=dependencies,
+                    keyvals=keyvals,
+                    max_runtime_mins=24 * 60,
+                    timeout_mins=1440,
+                    parent_job_id=None,
+                    reboot_before=ANY,
+                    run_reset=ANY,
+                    priority=priorities.Priority.DEFAULT,
+                    synch_count=test.sync_count,
+                    require_ssp=test.require_ssp)
             if raises:
-                job_mock.AndRaise(error.NoEligibleHostException())
+                job_mock.side_effect = error.NoEligibleHostException()
                 recorder.record_entry(
                         StatusContains.CreateFromStrings('START', test.name),
                         log_in_subdir=False)
@@ -362,7 +385,7 @@
                         log_in_subdir=False)
             else:
                 fake_job = FakeJob(id=n)
-                job_mock.AndReturn(fake_job)
+                job_mock.return_value = fake_job
                 if record_job_id:
                     suite._remember_job_keyval(fake_job)
                 n += 1
@@ -377,21 +400,18 @@
                        constants.SCHEDULED_TEST_NAMES_KEY: repr(name_list)}
 
         self.mock_control_file_parsing()
-        self.mox.ReplayAll()
+
         suite = Suite.create_from_name(self._TAG, self._BUILDS, self._BOARD,
                                        self.devserver,
                                        afe=self.afe, tko=self.tko,
                                        results_dir=self.tmpdir)
-        self.mox.ResetAll()
-        recorder = self.mox.CreateMock(base_job.base_job)
+        recorder = MagicMock(base_job.base_job)
         self.expect_job_scheduling(recorder, suite=suite)
-
-        self.mox.StubOutWithMock(utils, 'write_keyval')
-        utils.write_keyval(self.tmpdir, keyval_dict)
-        self.mox.ReplayAll()
-        suite.schedule(recorder.record_entry)
-        for job in suite._jobs:
-            self.assertTrue(hasattr(job, 'test_name'))
+        with patch.object(utils, 'write_keyval'):
+            utils.write_keyval(self.tmpdir, keyval_dict)
+            suite.schedule(recorder.record_entry)
+            for job in suite._jobs:
+                self.assertTrue(hasattr(job, 'test_name'))
 
 
     def testScheduleTests(self):
@@ -403,16 +423,18 @@
                        constants.SCHEDULED_TEST_NAMES_KEY: repr(name_list)}
 
         self.mock_control_file_parsing()
-        recorder = self.mox.CreateMock(base_job.base_job)
+        recorder = MagicMock(base_job.base_job)
         self.expect_job_scheduling(recorder)
-        self.mox.StubOutWithMock(utils, 'write_keyval')
-        utils.write_keyval(None, keyval_dict)
+        with patch.object(utils, 'write_keyval') as utils_patch:
+            utils_patch.write_keyval(None, keyval_dict)
 
-        self.mox.ReplayAll()
-        suite = Suite.create_from_name(self._TAG, self._BUILDS, self._BOARD,
-                                       self.devserver,
-                                       afe=self.afe, tko=self.tko)
-        suite.schedule(recorder.record_entry)
+            suite = Suite.create_from_name(self._TAG,
+                                           self._BUILDS,
+                                           self._BOARD,
+                                           self.devserver,
+                                           afe=self.afe,
+                                           tko=self.tko)
+            suite.schedule(recorder.record_entry)
 
 
     def testScheduleTestsIgnoreDeps(self):
@@ -424,17 +446,19 @@
                        constants.SCHEDULED_TEST_NAMES_KEY: repr(name_list)}
 
         self.mock_control_file_parsing()
-        recorder = self.mox.CreateMock(base_job.base_job)
+        recorder = MagicMock(base_job.base_job)
         self.expect_job_scheduling(recorder, ignore_deps=True)
-        self.mox.StubOutWithMock(utils, 'write_keyval')
-        utils.write_keyval(None, keyval_dict)
+        with patch.object(utils, 'write_keyval') as utils_patch:
+            utils_patch.write_keyval(None, keyval_dict)
 
-        self.mox.ReplayAll()
-        suite = Suite.create_from_name(self._TAG, self._BUILDS, self._BOARD,
-                                       self.devserver,
-                                       afe=self.afe, tko=self.tko,
-                                       ignore_deps=True)
-        suite.schedule(recorder.record_entry)
+            suite = Suite.create_from_name(self._TAG,
+                                           self._BUILDS,
+                                           self._BOARD,
+                                           self.devserver,
+                                           afe=self.afe,
+                                           tko=self.tko,
+                                           ignore_deps=True)
+            suite.schedule(recorder.record_entry)
 
 
     def testScheduleUnrunnableTestsTESTNA(self):
@@ -446,15 +470,17 @@
                        constants.SCHEDULED_TEST_NAMES_KEY: repr(name_list)}
 
         self.mock_control_file_parsing()
-        recorder = self.mox.CreateMock(base_job.base_job)
+        recorder = MagicMock(base_job.base_job)
         self.expect_job_scheduling(recorder, raises=True)
-        self.mox.StubOutWithMock(utils, 'write_keyval')
-        utils.write_keyval(None, keyval_dict)
-        self.mox.ReplayAll()
-        suite = Suite.create_from_name(self._TAG, self._BUILDS, self._BOARD,
-                                       self.devserver,
-                                       afe=self.afe, tko=self.tko)
-        suite.schedule(recorder.record_entry)
+        with patch.object(utils, 'write_keyval') as utils_patch:
+            utils_patch.write_keyval(None, keyval_dict)
+            suite = Suite.create_from_name(self._TAG,
+                                           self._BUILDS,
+                                           self._BOARD,
+                                           self.devserver,
+                                           afe=self.afe,
+                                           tko=self.tko)
+            suite.schedule(recorder.record_entry)
 
 
     def testRetryMapAfterScheduling(self):
@@ -466,32 +492,32 @@
                        constants.SCHEDULED_TEST_NAMES_KEY: repr(name_list)}
 
         self.mock_control_file_parsing()
-        recorder = self.mox.CreateMock(base_job.base_job)
+        recorder = MagicMock(base_job.base_job)
         self.expect_job_scheduling(recorder)
-        self.mox.StubOutWithMock(utils, 'write_keyval')
-        utils.write_keyval(None, keyval_dict)
+        with patch.object(utils, 'write_keyval') as utils_patch:
+            utils_patch.write_keyval(None, keyval_dict)
+            all_files = list(self.files.items())
+            # Sort tests in self.files so that they are in the same
+            # order as they are scheduled.
+            expected_retry_map = {}
+            for n in range(len(all_files)):
+                test = all_files[n][1]
+                job_id = n + 1
+                job_retries = 1 if test.job_retries is None else test.job_retries
+                if job_retries > 0:
+                    expected_retry_map[job_id] = {
+                            'state': RetryHandler.States.NOT_ATTEMPTED,
+                            'retry_max': job_retries
+                    }
 
-        all_files = list(self.files.items())
-        # Sort tests in self.files so that they are in the same
-        # order as they are scheduled.
-        expected_retry_map = {}
-        for n in range(len(all_files)):
-            test = all_files[n][1]
-            job_id = n + 1
-            job_retries = 1 if test.job_retries is None else test.job_retries
-            if job_retries > 0:
-                expected_retry_map[job_id] = {
-                        'state': RetryHandler.States.NOT_ATTEMPTED,
-                        'retry_max': job_retries}
-
-        self.mox.ReplayAll()
-        suite = Suite.create_from_name(self._TAG, self._BUILDS, self._BOARD,
-                                       self.devserver,
-                                       afe=self.afe, tko=self.tko,
-                                       job_retry=True)
-        suite.schedule(recorder.record_entry)
-
-        self.assertEqual(expected_retry_map, suite._retry_handler._retry_map)
+            suite = Suite.create_from_name(self._TAG,
+                                           self._BUILDS,
+                                           self._BOARD,
+                                           self.devserver,
+                                           afe=self.afe,
+                                           tko=self.tko,
+                                           job_retry=True)
+            suite.schedule(recorder.record_entry)
 
 
     def testSuiteMaxRetries(self):
@@ -503,21 +529,24 @@
                        constants.SCHEDULED_TEST_NAMES_KEY: repr(name_list)}
 
         self.mock_control_file_parsing()
-        recorder = self.mox.CreateMock(base_job.base_job)
+        recorder = MagicMock(base_job.base_job)
         self.expect_job_scheduling(recorder)
-        self.mox.StubOutWithMock(utils, 'write_keyval')
-        utils.write_keyval(None, keyval_dict)
-        self.mox.ReplayAll()
-        suite = Suite.create_from_name(self._TAG, self._BUILDS, self._BOARD,
-                                       self.devserver,
-                                       afe=self.afe, tko=self.tko,
-                                       job_retry=True, max_retries=1)
-        suite.schedule(recorder.record_entry)
-        self.assertEqual(suite._retry_handler._max_retries, 1)
-        # Find the job_id of the test that allows retry
-        job_id = next(six.iterkeys(suite._retry_handler._retry_map))
-        suite._retry_handler.add_retry(old_job_id=job_id, new_job_id=10)
-        self.assertEqual(suite._retry_handler._max_retries, 0)
+        with patch.object(utils, 'write_keyval') as utils_patch:
+            utils_patch.write_keyval(None, keyval_dict)
+            suite = Suite.create_from_name(self._TAG,
+                                           self._BUILDS,
+                                           self._BOARD,
+                                           self.devserver,
+                                           afe=self.afe,
+                                           tko=self.tko,
+                                           job_retry=True,
+                                           max_retries=1)
+            suite.schedule(recorder.record_entry)
+            self.assertEqual(suite._retry_handler._max_retries, 1)
+            # Find the job_id of the test that allows retry
+            job_id = next(six.iterkeys(suite._retry_handler._retry_map))
+            suite._retry_handler.add_retry(old_job_id=job_id, new_job_id=10)
+            self.assertEqual(suite._retry_handler._max_retries, 0)
 
 
     def testSuiteDependencies(self):
@@ -529,47 +558,49 @@
                        constants.SCHEDULED_TEST_NAMES_KEY: repr(name_list)}
 
         self.mock_control_file_parsing()
-        recorder = self.mox.CreateMock(base_job.base_job)
+        recorder = MagicMock(base_job.base_job)
         self.expect_job_scheduling(recorder, suite_deps=['extra'])
-        self.mox.StubOutWithMock(utils, 'write_keyval')
-        utils.write_keyval(None, keyval_dict)
+        with patch.object(utils, 'write_keyval') as utils_patch:
+            utils_patch.write_keyval(None, keyval_dict)
 
-        self.mox.ReplayAll()
-        suite = Suite.create_from_name(self._TAG, self._BUILDS, self._BOARD,
-                                       self.devserver, extra_deps=['extra'],
-                                       afe=self.afe, tko=self.tko)
-        suite.schedule(recorder.record_entry)
+            suite = Suite.create_from_name(self._TAG,
+                                           self._BUILDS,
+                                           self._BOARD,
+                                           self.devserver,
+                                           extra_deps=['extra'],
+                                           afe=self.afe,
+                                           tko=self.tko)
+            suite.schedule(recorder.record_entry)
 
 
     def testInheritedKeyvals(self):
         """Tests should inherit some allowlisted job keyvals."""
         # Only keyvals in constants.INHERITED_KEYVALS are inherited to tests.
         job_keyvals = {
-            constants.KEYVAL_CIDB_BUILD_ID: '111',
-            constants.KEYVAL_CIDB_BUILD_STAGE_ID: '222',
-            constants.KEYVAL_BRANCH: 'dummy_branch',
-            constants.KEYVAL_BUILDER_NAME: 'model-dummy',
-            constants.KEYVAL_MASTER_BUILDER_NAME: 'master-dummy',
-            'your': 'name',
+                constants.KEYVAL_CIDB_BUILD_ID: '111',
+                constants.KEYVAL_CIDB_BUILD_STAGE_ID: '222',
+                constants.KEYVAL_BRANCH: 'placeholder_branch',
+                constants.KEYVAL_BUILDER_NAME: 'model-placeholder',
+                constants.KEYVAL_MAIN_BUILDER_NAME: 'main-placeholder',
+                'your': 'name',
         }
         test_keyvals = {
-            constants.KEYVAL_CIDB_BUILD_ID: '111',
-            constants.KEYVAL_CIDB_BUILD_STAGE_ID: '222',
-            constants.KEYVAL_BRANCH: 'dummy_branch',
-            constants.KEYVAL_BUILDER_NAME: 'model-dummy',
-            constants.KEYVAL_MASTER_BUILDER_NAME: 'master-dummy',
+                constants.KEYVAL_CIDB_BUILD_ID: '111',
+                constants.KEYVAL_CIDB_BUILD_STAGE_ID: '222',
+                constants.KEYVAL_BRANCH: 'placeholder_branch',
+                constants.KEYVAL_BUILDER_NAME: 'model-placeholder',
+                constants.KEYVAL_MAIN_BUILDER_NAME: 'main-placeholder',
         }
 
         self.mock_control_file_parsing()
-        recorder = self.mox.CreateMock(base_job.base_job)
+        recorder = MagicMock(base_job.base_job)
         self.expect_job_scheduling(
             recorder,
             extra_keyvals=test_keyvals)
-        self.mox.StubOutWithMock(utils, 'write_keyval')
-        utils.write_keyval(None, job_keyvals)
-        utils.write_keyval(None, mox.IgnoreArg())
+        with patch.object(utils, 'write_keyval') as utils_patch:
+            utils_patch.write_keyval(None, job_keyvals)
+            utils_patch.write_keyval(None, ANY)
 
-        self.mox.ReplayAll()
         suite = Suite.create_from_name(self._TAG, self._BUILDS, self._BOARD,
                                        self.devserver,
                                        afe=self.afe, tko=self.tko,
@@ -584,7 +615,6 @@
         """
         self.result_reporter = _MemoryResultReporter()
         self.expect_control_file_parsing()
-        self.mox.ReplayAll()
         suite = Suite.create_from_name(
                 self._TAG,
                 self._BUILDS,
@@ -597,7 +627,6 @@
                 job_retry=True,
                 result_reporter=self.result_reporter,
         )
-        self.mox.ResetAll()
         return suite
 
 
@@ -616,9 +645,8 @@
         test_predicates = test_report.predicates
         test_fallout = test_report.fallout
 
-        self.recorder = self.mox.CreateMock(base_job.base_job)
-        self.recorder.record_entry = self.mox.CreateMock(
-                base_job.base_job.record_entry)
+        self.recorder = MagicMock(base_job.base_job)
+        self.recorder.record_entry = MagicMock(base_job.base_job.record_entry)
         self._mock_recorder_with_results([test_predicates], self.recorder)
         return [test_predicates, test_fallout]
 
@@ -646,7 +674,7 @@
 
 
     def schedule_and_expect_these_results(self, suite, results, recorder):
-        """Create mox stubs for call to suite.schedule and
+        """Create stubs for call to suite.schedule and
         job_status.wait_for_results
 
         @param suite:    suite object for which to stub out schedule(...)
@@ -669,58 +697,57 @@
                 if new_input:
                     yield None
 
-        self.mox.StubOutWithMock(suite, 'schedule')
-        suite.schedule(recorder.record_entry)
+        suite_schedule_patch = patch.object(suite, 'schedule')
+        self.suite_schedule_mock = suite_schedule_patch.start()
+        self.addCleanup(suite_schedule_patch.stop)
+        self.suite_schedule_mock(recorder.record_entry)
         suite._retry_handler = RetryHandler({})
 
-        waiter_patch = mock.patch.object(
-                job_status.JobResultWaiter, 'wait_for_results', autospec=True)
+        waiter_patch = patch.object(job_status.JobResultWaiter,
+                                    'wait_for_results',
+                                    autospec=True)
         waiter_mock = waiter_patch.start()
         waiter_mock.return_value = result_generator(results)
         self.addCleanup(waiter_patch.stop)
 
-
-    def testRunAndWaitSuccess(self):
+    @patch('autotest_lib.client.common_lib.base_job.base_job',
+           autospec=base_job.base_job)
+    def testRunAndWaitSuccess(self, recorder):
         """Should record successful results."""
         suite = self._createSuiteWithMockedTestsAndControlFiles()
 
-        recorder = self.mox.CreateMock(base_job.base_job)
-
         results = [('GOOD', 'good'), ('FAIL', 'bad', 'reason')]
         self._mock_recorder_with_results(results, recorder)
         self.schedule_and_expect_these_results(suite, results, recorder)
-        self.mox.ReplayAll()
-
         suite.schedule(recorder.record_entry)
         suite.wait(recorder.record_entry)
 
-
-    def testRunAndWaitFailure(self):
+    @patch('autotest_lib.client.common_lib.base_job.base_job',
+           autospec=base_job.base_job)
+    def testRunAndWaitFailure(self, recorder):
         """Should record failure to gather results."""
         suite = self._createSuiteWithMockedTestsAndControlFiles()
 
-        recorder = self.mox.CreateMock(base_job.base_job)
         recorder.record_entry(
             StatusContains.CreateFromStrings('FAIL', self._TAG, 'waiting'),
             log_in_subdir=False)
 
-        self.mox.StubOutWithMock(suite, 'schedule')
-        suite.schedule(recorder.record_entry)
-        self.mox.ReplayAll()
+        with patch.object(suite, 'schedule') as ss:
+            ss.schedule(recorder.record_entry)
 
-        with mock.patch.object(
-                job_status.JobResultWaiter, 'wait_for_results',
-                autospec=True) as wait_mock:
-            wait_mock.side_effect = Exception
-            suite.schedule(recorder.record_entry)
-            suite.wait(recorder.record_entry)
+            with patch.object(job_status.JobResultWaiter,
+                              'wait_for_results',
+                              autospec=True) as wait_mock:
+                wait_mock.side_effect = Exception
+                suite.schedule(recorder.record_entry)
+                suite.wait(recorder.record_entry)
 
-
-    def testRunAndWaitScheduleFailure(self):
+    @patch('autotest_lib.client.common_lib.base_job.base_job',
+           autospec=base_job.base_job)
+    def testRunAndWaitScheduleFailure(self, recorder):
         """Should record failure to schedule jobs."""
+        self.additional_mocking()
         suite = self._createSuiteWithMockedTestsAndControlFiles()
-
-        recorder = self.mox.CreateMock(base_job.base_job)
         recorder.record_entry(
             StatusContains.CreateFromStrings('INFO', 'Start %s' % self._TAG),
             log_in_subdir=False)
@@ -729,20 +756,20 @@
             StatusContains.CreateFromStrings('FAIL', self._TAG, 'scheduling'),
             log_in_subdir=False)
 
-        self.mox.StubOutWithMock(suite._job_creator, 'create_job')
-        suite._job_creator.create_job(
-            mox.IgnoreArg(), retry_for=mox.IgnoreArg()).AndRaise(
-            Exception('Expected during test.'))
-        self.mox.ReplayAll()
+        local_patcher = patch.object(suite._job_creator, 'create_job')
+        patcher = local_patcher.start()
+        self.addCleanup(local_patcher.stop)
+        patcher.side_effect = (Exception('Expected during test.'))
 
         suite.schedule(recorder.record_entry)
         suite.wait(recorder.record_entry)
+        patcher.assert_called_with(ANY, retry_for=ANY)
 
 
     def testGetTestsSortedByTime(self):
         """Should find all tests and sorted by TIME setting."""
+        self.additional_mocking()
         self.expect_control_file_parsing()
-        self.mox.ReplayAll()
         # Get all tests.
         tests = SuiteBase.find_and_parse_tests(self.getter,
                                                lambda d: True,
@@ -774,6 +801,7 @@
 
     def testJobRetryTestFail(self):
         """Test retry works."""
+        self.additional_mocking()
         test_to_retry = self.files['seven']
         fake_new_job_id = self._FAKE_JOB_ID + 1
         fake_job = FakeJob(id=self._FAKE_JOB_ID)
@@ -784,34 +812,38 @@
                 self.suite,
                 [test_results[0] + test_results[1]],
                 self.recorder)
-        self.mox.StubOutWithMock(self.suite._job_creator, 'create_job')
-        self.suite._job_creator.create_job(
-                test_to_retry,
-                retry_for=self._FAKE_JOB_ID).AndReturn(fake_new_job)
-        self.mox.ReplayAll()
-        self.suite.schedule(self.recorder.record_entry)
-        self.suite._retry_handler._retry_map = {
-                self._FAKE_JOB_ID: {'state': RetryHandler.States.NOT_ATTEMPTED,
-                                    'retry_max': 1}
-                }
-        self.suite._jobs_to_tests[self._FAKE_JOB_ID] = test_to_retry
-        self.suite.wait(self.recorder.record_entry)
-        expected_retry_map = {
-                self._FAKE_JOB_ID: {'state': RetryHandler.States.RETRIED,
-                                    'retry_max': 1},
-                fake_new_job_id: {'state': RetryHandler.States.NOT_ATTEMPTED,
-                                  'retry_max': 0}
-                }
-        # Check retry map is correctly updated
-        self.assertEquals(self.suite._retry_handler._retry_map,
-                          expected_retry_map)
-        # Check _jobs_to_tests is correctly updated
-        self.assertEquals(self.suite._jobs_to_tests[fake_new_job_id],
-                          test_to_retry)
+        with patch.object(self.suite._job_creator, 'create_job') as suite_mock:
 
+            suite_mock.return_value = fake_new_job
+            self.suite.schedule(self.recorder.record_entry)
+            self.suite._retry_handler._retry_map = {
+                    self._FAKE_JOB_ID: {
+                            'state': RetryHandler.States.NOT_ATTEMPTED,
+                            'retry_max': 1
+                    }
+            }
+            self.suite._jobs_to_tests[self._FAKE_JOB_ID] = test_to_retry
+            self.suite.wait(self.recorder.record_entry)
+            expected_retry_map = {
+                    self._FAKE_JOB_ID: {
+                            'state': RetryHandler.States.RETRIED,
+                            'retry_max': 1
+                    },
+                    fake_new_job_id: {
+                            'state': RetryHandler.States.NOT_ATTEMPTED,
+                            'retry_max': 0
+                    }
+            }
+            # Check retry map is correctly updated
+            self.assertEquals(self.suite._retry_handler._retry_map,
+                              expected_retry_map)
+            # Check _jobs_to_tests is correctly updated
+            self.assertEquals(self.suite._jobs_to_tests[fake_new_job_id],
+                              test_to_retry)
 
     def testJobRetryTestWarn(self):
         """Test that no retry is scheduled if test warns."""
+        self.additional_mocking()
         test_to_retry = self.files['seven']
         fake_job = FakeJob(id=self._FAKE_JOB_ID)
         test_results = self._createSuiteMockResults(result_status='WARN')
@@ -819,7 +851,6 @@
                 self.suite,
                 [test_results[0] + test_results[1]],
                 self.recorder)
-        self.mox.ReplayAll()
         self.suite.schedule(self.recorder.record_entry)
         self.suite._retry_handler._retry_map = {
                 self._FAKE_JOB_ID: {'state': RetryHandler.States.NOT_ATTEMPTED,
@@ -835,42 +866,48 @@
                           expected_retry_map)
         self.assertEquals(self.suite._jobs_to_tests, expected_jobs_to_tests)
 
-
     def testFailedJobRetry(self):
         """Make sure the suite survives even if the retry failed."""
+        self.additional_mocking()
         test_to_retry = self.files['seven']
-        fake_job = FakeJob(id=self._FAKE_JOB_ID)
+        FakeJob(id=self._FAKE_JOB_ID)
 
         test_results = self._createSuiteMockResults()
         self.schedule_and_expect_these_results(
                 self.suite,
                 [test_results[0] + test_results[1]],
                 self.recorder)
-        self.mox.StubOutWithMock(self.suite._job_creator, 'create_job')
-        self.suite._job_creator.create_job(
-                test_to_retry, retry_for=self._FAKE_JOB_ID).AndRaise(
-                error.RPCException('Expected during test'))
-        # Do not file a bug.
-        self.mox.StubOutWithMock(self.suite, '_should_report')
-        self.suite._should_report(mox.IgnoreArg()).AndReturn(False)
+        with patch.object(self.suite._job_creator,
+                          'create_job') as suite_mock, patch.object(
+                                  self.suite, '_should_report') as report_mock:
+            suite_mock.side_effect = error.RPCException('Expected during test')
 
-        self.mox.ReplayAll()
+            # Do not file a bug.
+            report_mock.return_value = False
 
-        self.suite.schedule(self.recorder.record_entry)
-        self.suite._retry_handler._retry_map = {
-                self._FAKE_JOB_ID: {
-                        'state': RetryHandler.States.NOT_ATTEMPTED,
-                        'retry_max': 1}}
-        self.suite._jobs_to_tests[self._FAKE_JOB_ID] = test_to_retry
-        self.suite.wait(self.recorder.record_entry)
-        expected_retry_map = {
-                self._FAKE_JOB_ID: {
-                        'state': RetryHandler.States.ATTEMPTED,
-                        'retry_max': 1}}
-        expected_jobs_to_tests = self.suite._jobs_to_tests.copy()
-        self.assertEquals(self.suite._retry_handler._retry_map,
-                          expected_retry_map)
-        self.assertEquals(self.suite._jobs_to_tests, expected_jobs_to_tests)
+            self.suite.schedule(self.recorder.record_entry)
+            self.suite._retry_handler._retry_map = {
+                    self._FAKE_JOB_ID: {
+                            'state': RetryHandler.States.NOT_ATTEMPTED,
+                            'retry_max': 1
+                    }
+            }
+            self.suite._jobs_to_tests[self._FAKE_JOB_ID] = test_to_retry
+            self.suite.wait(self.recorder.record_entry)
+            expected_retry_map = {
+                    self._FAKE_JOB_ID: {
+                            'state': RetryHandler.States.ATTEMPTED,
+                            'retry_max': 1
+                    }
+            }
+            expected_jobs_to_tests = self.suite._jobs_to_tests.copy()
+            self.assertEquals(self.suite._retry_handler._retry_map,
+                              expected_retry_map)
+            self.assertEquals(self.suite._jobs_to_tests,
+                              expected_jobs_to_tests)
+
+            suite_mock.assert_called_with(test_to_retry,
+                                          retry_for=self._FAKE_JOB_ID)
 
 
 class _MemoryResultReporter(SuiteBase._ResultReporter):
diff --git a/server/cros/dynamic_suite/tools.py b/server/cros/dynamic_suite/tools.py
index 9ca87f7..2027c9d 100644
--- a/server/cros/dynamic_suite/tools.py
+++ b/server/cros/dynamic_suite/tools.py
@@ -206,7 +206,7 @@
             ch == '\n' or ch == ' ' or ch == '\t'):
         end += 1
         if end < total_length:
-          ch = control_file_in[end]
+            ch = control_file_in[end]
     return control_file_in[:start] + control_file_in[end:]
 
 
@@ -343,9 +343,9 @@
 
     @param build: name of the build, e.g., lumpy-release/R31-1234.0.0.
     @param suite: name of the suite, e.g., bvt.
-    @param test_name: name of the test, e.g., dummy_Pass.
+    @param test_name: name of the test, e.g., stub_ServerToClientPass.
     @return: the test job's name, e.g.,
-             lumpy-release/R31-1234.0.0/bvt/dummy_Pass.
+             lumpy-release/R31-1234.0.0/bvt/stub_ServerToClientPass.
     """
     return '/'.join([build, suite, test_name])
 
@@ -359,8 +359,8 @@
     @param build: name of the build, e.g., lumpy-release/R31-1234.0.0.
     @param suite: name of the suite, e.g., bvt.
     @param test_job_name: name of the test job, e.g.,
-                          lumpy-release/R31-1234.0.0/bvt/dummy_Pass_SERVER_JOB.
-    @return: the test name, e.g., dummy_Pass_SERVER_JOB.
+                          lumpy-release/R31-1234.0.0/bvt/stub_ServerToClientPass.
+    @return: the test name, e.g., stub_ServerToClientPass.
     """
     # Do not change this naming convention without updating
     # site_utils.parse_job_name.
diff --git a/server/cros/dynamic_suite/tools_unittest.py b/server/cros/dynamic_suite/tools_unittest.py
index 472da84..4d6a475 100755
--- a/server/cros/dynamic_suite/tools_unittest.py
+++ b/server/cros/dynamic_suite/tools_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -10,9 +10,9 @@
 from __future__ import division
 from __future__ import print_function
 
-import mox
 import six
 import unittest
+from unittest.mock import patch, MagicMock
 
 import common
 
@@ -23,7 +23,7 @@
 from autotest_lib.server import frontend
 
 
-class DynamicSuiteToolsTest(mox.MoxTestBase):
+class DynamicSuiteToolsTest(unittest.TestCase):
     """Unit tests for dynamic_suite tools module methods.
 
     @var _BOARD: fake board to reimage
@@ -35,8 +35,8 @@
 
     def setUp(self):
         super(DynamicSuiteToolsTest, self).setUp()
-        self.afe = self.mox.CreateMock(frontend.AFE)
-        self.tko = self.mox.CreateMock(frontend.TKO)
+        self.afe = MagicMock()
+        self.tko = MagicMock()
         # Having these ordered by complexity is important!
         host_spec_list = [HostSpec([self._BOARD, self._POOL])]
         for dep_list in six.itervalues(self._DEPENDENCIES):
@@ -112,9 +112,11 @@
     def testNotIncorrectlyLocked(self):
         """Should accept hosts locked by the infrastructure."""
         infra_user = 'an infra user'
-        self.mox.StubOutWithMock(tools, 'infrastructure_user')
-        tools.infrastructure_user().AndReturn(infra_user)
-        self.mox.ReplayAll()
+        patcher = patch.object(tools, 'infrastructure_user')
+        tools_mock = patcher.start()
+        self.addCleanup(patcher.stop)
+        tools_mock.return_value = infra_user
+
         host = FakeHost(locked=True, locked_by=infra_user)
         self.assertFalse(tools.incorrectly_locked(host))
 
diff --git a/server/cros/faft/cr50_test.py b/server/cros/faft/cr50_test.py
index 4497c13..1297ff6 100644
--- a/server/cros/faft/cr50_test.py
+++ b/server/cros/faft/cr50_test.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -7,8 +8,7 @@
 import logging
 import os
 import pprint
-import StringIO
-import subprocess
+import six
 import time
 
 from autotest_lib.client.bin import utils
@@ -22,7 +22,7 @@
     """Base class that sets up helper objects/functions for cr50 tests."""
     version = 1
 
-    RELEASE_POOLS = ['faft-cr50-experimental']
+    RELEASE_POOLS = ['faft-cr50-experimental', 'faft-cr50']
     RESPONSE_TIMEOUT = 180
     GS_PRIVATE = 'gs://chromeos-localmirror-private/distfiles/'
     # Prod signed test images are stored in the private cr50 directory.
@@ -52,17 +52,6 @@
             ERASEFLASHINFO_IMAGE: ['chip_bid'],
     }
     PP_SHORT_INTERVAL = 3
-    # Cr50 may have flash operation errors during the test. Here's an example
-    # of one error message.
-    # do_flash_op:245 errors 20 fsh_pe_control 40720004
-    # The stuff after the ':' may change, but all flash operation errors
-    # contain do_flash_op. do_flash_op is only ever printed if there is an
-    # error during the flash operation. Just search for do_flash_op to simplify
-    # the search string and make it applicable to all flash op errors.
-    CR50_FLASH_OP_ERROR_MSG = 'do_flash_op'
-    # USB issues may show up with the timer sof calibration overflow interrupt.
-    # Count these during cleanup.
-    CR50_USB_ERROR = 'timer_sof_calibration_overflow_int'
 
     def initialize(self,
                    host,
@@ -81,7 +70,7 @@
             raise error.TestNAError('Test can only be run on devices with '
                                     'access to the Cr50 console')
         # TODO(b/149948314): remove when dual-v4 is sorted out.
-        if 'ccd_cr50' in self.servo.get_servo_version():
+        if 'ccd' in self.servo.get_servo_version():
             self.servo.disable_ccd_watchdog_for_test()
 
         logging.info('Test Args: %r', full_args)
@@ -102,6 +91,15 @@
         # Clear the FWMP, so it can't disable CCD.
         self.clear_fwmp()
 
+        # TODO(b/218492933) : find better way to disable rddkeepalive
+        # Disable rddkeepalive, so the test can disable ccd.
+        self.cr50.send_command('ccd testlab open')
+        self.cr50.send_command('rddkeepalive disable')
+        # faft-cr50 locks and reopens ccd. This will restrict some capabilities
+        # c2d2 uses to control the duts. Set the capabilities to Always, so
+        # individiual tests don't need to care that much.
+        self.cr50.enable_servo_control_caps()
+
         if self.can_set_ccd_level:
             # Lock cr50 so the console will be restricted
             self.cr50.set_ccd_level('lock')
@@ -224,12 +222,34 @@
             logging.info('Running qual image. No update needed.')
             return
         logging.info('Cr50 qual update required.')
-        filesystem_util.make_rootfs_writable(self.host)
+        self.make_rootfs_writable()
         self._update_device_images_and_running_cr50_firmware(
                 qual_state, qual_path, prod_path, prepvt_path)
         logging.info("Recording qual device state as 'original' device state")
         self._save_original_state(qual_path)
 
+    def make_rootfs_writable(self):
+        """Make rootfs writeable. Recover the dut if necessary."""
+        path = None
+        try:
+            filesystem_util.make_rootfs_writable(self.host)
+            return
+        except error.AutoservRunError as e:
+            if 'cannot remount' not in e.result_obj.stderr:
+                raise
+            path = e.result_obj.stderr.partition(
+                    'cannot remount')[2].split()[0]
+        # This shouldn't be possible.
+        if not path:
+            raise error.TestError('Need path to repair filesystem')
+        logging.info('repair %s', path)
+        # Repair the block. Assume yes to all questions. The exit status will be
+        # 3, so ignore errors. make_rootfs_writable will fail if something
+        # actually went wrong.
+        self.host.run('e2fsck -y %s' % path, ignore_status=True)
+        self.host.reboot()
+        filesystem_util.make_rootfs_writable(self.host)
+
     def _saved_cr50_state(self, state):
         """Returns True if the test has saved the given state
 
@@ -461,6 +481,39 @@
 
         self._retry_cr50_update(image_path, 3, True)
 
+    def _discharging_factory_mode_cleanup(self):
+        """Try to get the dut back into charging mode.
+
+        Shutdown the DUT, fake disconnect AC, and then turn on the DUT to
+        try to recover the EC.
+
+        When Cr50 enters factory mode on Wilco, the EC disables charging.
+        Try to run the sequence to get the Wilco EC out of the factory mode
+        state, so it reenables charging.
+        """
+        if self.faft_config.chrome_ec:
+            return
+        charge_state = self.host.get_power_supply_info()['Battery']['state']
+        logging.info('Charge state: %r', charge_state)
+        if 'Discharging' not in charge_state:
+            logging.info('Charge state is ok')
+            return
+
+        if not self.servo.is_servo_v4_type_c():
+            raise error.TestError(
+                    'Cannot recover charging without Type C servo')
+        # Disconnect the charger and reset the dut to recover charging.
+        logging.info('Recovering charging')
+        self.faft_client.system.run_shell_command('poweroff')
+        time.sleep(self.cr50.SHORT_WAIT)
+        self.servo.set_nocheck('servo_v4_uart_cmd', 'fakedisconnect 100 20000')
+        time.sleep(self.cr50.SHORT_WAIT)
+        self._try_to_bring_dut_up()
+        charge_state = self.host.get_power_supply_info()['Battery']['state']
+        logging.info('Charge state: %r', charge_state)
+        if 'Discharging' in charge_state:
+            logging.warning('DUT still discharging')
+
     def _cleanup_required(self, state_mismatch, image_type):
         """Return True if the update can fix something in the mismatched state.
 
@@ -524,7 +577,7 @@
         mismatch = {}
         state = self.get_image_and_bid_state()
 
-        for k, expected_val in expected_state.iteritems():
+        for k, expected_val in six.iteritems(expected_state):
             val = state[k]
             if val != expected_val:
                 mismatch[k] = 'expected: %s, current: %s' % (expected_val, val)
@@ -555,7 +608,7 @@
                 # Even if we can't open cr50, do our best to reset the rest of
                 # the system state. Log a warning here.
                 logging.warning('Unable to Open cr50', exc_info=True)
-            self.cr50.send_command('ccd reset')
+            self.cr50.ccd_reset(servo_en=False)
             if not self.cr50.ccd_is_reset():
                 raise error.TestFail('Could not reset ccd')
 
@@ -573,6 +626,26 @@
         elif self.original_ccd_level != self.cr50.get_ccd_level():
             self.cr50.set_ccd_level(self.original_ccd_level)
 
+    def fast_ccd_open(self,
+                      enable_testlab=False,
+                      reset_ccd=True,
+                      dev_mode=False):
+        """Check for watchdog resets after opening ccd.
+
+        Args:
+            enable_testlab: If True, enable testlab mode after cr50 is open.
+            reset_ccd: If True, reset ccd after open.
+            dev_mode: True if the device should be in dev mode after ccd is
+                      is opened.
+        """
+        try:
+            super(Cr50Test, self).fast_ccd_open(enable_testlab, reset_ccd,
+                                                dev_mode)
+        except Exception as e:
+            # Check for cr50 watchdog resets.
+            self.cr50.check_for_console_errors('Fast ccd open')
+            raise
+
     def cleanup(self):
         """Attempt to cleanup the cr50 state. Then run firmware cleanup"""
         try:
@@ -586,40 +659,16 @@
 
             self._try_to_bring_dut_up()
             self._restore_cr50_state()
+
+            # Make sure the sarien EC isn't stuck in factory mode.
+            self._discharging_factory_mode_cleanup()
         finally:
             super(Cr50Test, self).cleanup()
 
         # Check the logs captured during firmware_test cleanup for cr50 errors.
-        self._get_cr50_stats_from_uart_capture()
+        self.cr50.check_for_console_errors('Test Cleanup')
         self.servo.allow_ccd_watchdog_for_test()
 
-    def _get_cr50_stats_from_uart_capture(self):
-        """Check cr50 uart output for errors.
-
-        Use the logs captured during firmware_test cleanup to check for cr50
-        errors. Flash operation issues aren't obvious unless you check the logs.
-        All flash op errors print do_flash_op and it isn't printed during normal
-        operation. Open the cr50 uart file and count the number of times this is
-        printed. Log the number of errors.
-        """
-        cr50_uart_file = self.servo.get_uart_logfile('cr50')
-        if not cr50_uart_file:
-            logging.info('There is not a cr50 uart file')
-            return
-
-        flash_error_count = 0
-        usb_error_count = 0
-        with open(cr50_uart_file, 'r') as f:
-            for line in f:
-                if self.CR50_FLASH_OP_ERROR_MSG in line:
-                    flash_error_count += 1
-                if self.CR50_USB_ERROR in line:
-                    usb_error_count += 1
-
-        # Log any flash operation errors.
-        logging.info('do_flash_op count: %d', flash_error_count)
-        logging.info('usb error count: %d', usb_error_count)
-
     def _update_device_images_and_running_cr50_firmware(
             self, state, release_path, prod_path, prepvt_path):
         """Update cr50, set the board id, and copy firmware to the DUT.
@@ -641,6 +690,7 @@
             self.update_cr50_image_and_board_id(release_path,
                                                 state['chip_bid'])
 
+        self._try_to_bring_dut_up()
         new_mismatch = self._check_running_image_and_board_id(state)
         # Copy the original .prod and .prepvt images back onto the DUT.
         if (self._cleanup_required(new_mismatch, self.DEVICE_IMAGES)
@@ -682,10 +732,10 @@
         try:
             self.cr50.ccd_enable()
         except Exception as e:
-            logging.warn('Ignored exception enabling ccd %r', str(e))
+            logging.warning('Ignored exception enabling ccd %r', str(e))
         self.cr50.send_command('ccd testlab open')
         self.cr50.send_command('rddkeepalive disable')
-        self.cr50.send_command('ccd reset')
+        self.cr50.ccd_reset()
         self.cr50.send_command('wp follow_batt_pres atboot')
 
     def _restore_ccd_settings(self):
@@ -702,6 +752,7 @@
         # reboot to normal mode if the device is in dev mode.
         self.enter_mode_after_checking_cr50_state('normal')
 
+        self._try_to_bring_dut_up()
         tpm_utils.ClearTPMOwnerRequest(self.host, wait_for_ready=True)
         self.clear_fwmp()
 
@@ -911,6 +962,9 @@
         """
         tmp_dest = '/tmp/' + os.path.basename(path)
 
+        # Make sure the dut is sshable before installing the image.
+        self._try_to_bring_dut_up()
+
         dest, image_ver = cr50_utils.InstallImage(self.host, path, tmp_dest)
         # Use the -p option to make sure the DUT does a clean reboot.
         cr50_utils.GSCTool(self.host, ['-a', dest, '-p'])
@@ -958,55 +1012,24 @@
         @param name: The name to give the job
         @param expect_error: True if the command should fail
         """
-        set_pwd_cmd = utils.sh_escape(cmd)
-        full_ssh_command = '%s "%s"' % (self.host.ssh_command(options='-tt'),
-                                        set_pwd_cmd)
         logging.info('Running: %s', cmd)
         logging.info('Password: %s', password)
-
         # Make sure the test waits long enough to avoid ccd rate limiting.
         time.sleep(self.cr50.CCD_PASSWORD_RATE_LIMIT)
-
-        stdout = StringIO.StringIO()
-        # Start running the gsctool Command in the background.
-        gsctool_job = utils.BgJob(
-                full_ssh_command,
-                nickname='%s_with_password' % name,
-                stdout_tee=stdout,
-                stderr_tee=utils.TEE_TO_LOGS,
-                stdin=subprocess.PIPE)
-        if gsctool_job == None:
-            raise error.TestFail('could not start gsctool command %r' % cmd)
-
-        try:
-            # Wait for enter prompt
-            gsctool_job.process_output()
-            logging.info(stdout.getvalue().strip())
-            # Enter the password
-            gsctool_job.sp.stdin.write(password + '\n')
-
-            # Wait for re-enter prompt
-            gsctool_job.process_output()
-            logging.info(stdout.getvalue().strip())
-            # Re-enter the password
-            gsctool_job.sp.stdin.write(password + '\n')
-            time.sleep(self.cr50.CONSERVATIVE_CCD_WAIT)
-            gsctool_job.process_output()
-        finally:
-            exit_status = utils.nuke_subprocess(gsctool_job.sp)
-            output = stdout.getvalue().strip()
-            logging.info('%s stdout: %s', name, output)
-            logging.info('%s exit status: %s', name, exit_status)
-            if exit_status:
-                message = ('gsctool %s failed using %r: %s %s' %
-                           (name, password, exit_status, output))
-                if expect_error:
-                    logging.info(message)
-                else:
-                    raise error.TestFail(message)
-            elif expect_error:
-                raise error.TestFail('%s with %r did not fail when expected' %
-                                     (name, password))
+        full_cmd = "echo -e '%s\n%s\n' | %s" % (password, password, cmd)
+        result = self.host.run(full_cmd, ignore_status=expect_error)
+        if result.exit_status:
+            message = ('gsctool %s failed using %r: %s %s' %
+                       (name, password, result.exit_status, result.stderr))
+            if expect_error:
+                logging.info(message)
+            else:
+                raise error.TestFail(message)
+        elif expect_error:
+            raise error.TestFail('%s with %r did not fail when expected' %
+                                 (name, password))
+        else:
+            logging.info('ran %s password command: %r', name, result.stdout)
 
     def set_ccd_password(self, password, expect_error=False):
         """Set the ccd password"""
diff --git a/server/cros/faft/fingerprint_test.py b/server/cros/faft/fingerprint_test.py
index 7a58976..ce94641 100644
--- a/server/cros/faft/fingerprint_test.py
+++ b/server/cros/faft/fingerprint_test.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -6,6 +7,8 @@
 import os
 import time
 
+import six
+
 from autotest_lib.server import test
 from autotest_lib.server.cros import filesystem_util
 from autotest_lib.client.common_lib import error, utils
@@ -91,6 +94,9 @@
             _FP_BOARD_NAME_BLOONCHIPPER: {
                     'hatch': 'bloonchipper_v2.0.4277-9f652bb3',
                     'zork': 'bloonchipper_v2.0.5938-197506c1',
+                    'volteer': 'bloonchipper_v2.0.5938-197506c1',
+                    'brya': 'bloonchipper_v2.0.5938-197506c1',
+                    'guybrush': 'bloonchipper_v2.0.5938-197506c1',
             },
             _FP_BOARD_NAME_DARTMONKEY: 'dartmonkey_v2.0.2887-311310808',
             _FP_BOARD_NAME_NOCTURNE: 'nocturne_fp_v2.2.64-58cf5974e',
@@ -110,46 +116,47 @@
     #      what we release) is exactly what we expect.
     _FIRMWARE_VERSION_MAP = {
         _FP_BOARD_NAME_BLOONCHIPPER: {
-            'bloonchipper_v2.0.4277-9f652bb3.bin': {
-                _FIRMWARE_VERSION_SHA256SUM: '7d9b788a908bee5c83e27450258b2bbf110d7253d49faa4804562ae27e42cb3b',
+            'bloonchipper_v2.0.4277-9f652bb3-RO_v2.0.13589-727a419-RW.bin': {
+                _FIRMWARE_VERSION_SHA256SUM: 'b500a08d1c4f49ac1455214f1957f178288a2f4b36b40e7cd49acad1d0896ccc',
                 _FIRMWARE_VERSION_RO_VERSION: 'bloonchipper_v2.0.4277-9f652bb3',
-                _FIRMWARE_VERSION_RW_VERSION: 'bloonchipper_v2.0.4277-9f652bb3',
+                _FIRMWARE_VERSION_RW_VERSION: 'bloonchipper_v2.0.13589-727a419',
                 _FIRMWARE_VERSION_KEY_ID: '1c590ef36399f6a2b2ef87079c135b69ef89eb60',
             },
-            'bloonchipper_v2.0.5938-197506c1.bin': {
-                _FIRMWARE_VERSION_SHA256SUM: 'dc62e4b05eaf4fa8ab5546dcf18abdb30c8e64e9bf0fbf377ebc85155c7c3a47',
+            'bloonchipper_v2.0.5938-197506c1-RO_v2.0.13589-727a419-RW.bin': {
+                _FIRMWARE_VERSION_SHA256SUM: 'dfa1a9e409893441c990edde86dbe6d0e301c03b7a9e604ec6af5fc1691ef1be',
                 _FIRMWARE_VERSION_RO_VERSION: 'bloonchipper_v2.0.5938-197506c1',
-                _FIRMWARE_VERSION_RW_VERSION: 'bloonchipper_v2.0.5938-197506c1',
+                _FIRMWARE_VERSION_RW_VERSION: 'bloonchipper_v2.0.13589-727a419',
                 _FIRMWARE_VERSION_KEY_ID: '1c590ef36399f6a2b2ef87079c135b69ef89eb60',
             },
         },
         _FP_BOARD_NAME_NOCTURNE: {
-            'nocturne_fp_v2.2.64-58cf5974e-RO_v2.0.4017-9c45fb4b3-RW.bin': {
-                _FIRMWARE_VERSION_SHA256SUM: '16c405eeaff75dcbc76dbc9f368f66e3fabc47e2ebcf13bd2b64b8b133bbff97',
+            'nocturne_fp_v2.2.64-58cf5974e-RO_v2.0.13584-6fcfe697-RW.bin': {
+                _FIRMWARE_VERSION_SHA256SUM: '8ebc978bf18fc1629a8ab9b33ac91817d850ce5ca9c55dc69c99b0acfb540948',
                 _FIRMWARE_VERSION_RO_VERSION: 'nocturne_fp_v2.2.64-58cf5974e',
-                _FIRMWARE_VERSION_RW_VERSION: 'nocturne_fp_v2.0.4017-9c45fb4b3',
+                _FIRMWARE_VERSION_RW_VERSION: 'nocturne_fp_v2.0.13584-6fcfe697',
                 _FIRMWARE_VERSION_KEY_ID: '6f38c866182bd9bf7a4462c06ac04fa6a0074351',
             },
         },
         _FP_BOARD_NAME_NAMI: {
-            'nami_fp_v2.2.144-7a08e07eb-RO_v2.0.4017-9c45fb4b3-RW.bin': {
-                _FIRMWARE_VERSION_SHA256SUM: '7965ea4c4371ee6d21dc462b9ed7c99078d17f4b772bec51441ca9af7d8f3a80',
+            'nami_fp_v2.2.144-7a08e07eb-RO_v2.0.13584-6fcfe69780-RW.bin': {
+                _FIRMWARE_VERSION_SHA256SUM: 'e198db08020ac71a11a53d641d6ada750061fb3f3faa2728aab7835266ed9e7b',
                 _FIRMWARE_VERSION_RO_VERSION: 'nami_fp_v2.2.144-7a08e07eb',
-                _FIRMWARE_VERSION_RW_VERSION: 'nami_fp_v2.0.4017-9c45fb4b3',
+                _FIRMWARE_VERSION_RW_VERSION: 'nami_fp_v2.0.13584-6fcfe69780',
                 _FIRMWARE_VERSION_KEY_ID: '35486c0090ca390408f1fbbf2a182966084fe2f8',
             },
         },
         _FP_BOARD_NAME_DARTMONKEY: {
-            'dartmonkey_v2.0.2887-311310808-RO_v2.0.4017-9c45fb4b3-RW.bin': {
-                _FIRMWARE_VERSION_SHA256SUM: 'b84914c70e93c28e2221f48be338dbf0ad0cfb12b7877baaf6b47f7bfd2aa958',
+            'dartmonkey_v2.0.2887-311310808-RO_v2.0.13584-6fcfe6978-RW.bin': {
+                _FIRMWARE_VERSION_SHA256SUM: '8fa168c19d886b5fe8e852bba7d3b04cd0cd2344d377d9b3d278a45d76b206a1',
                 _FIRMWARE_VERSION_RO_VERSION: 'dartmonkey_v2.0.2887-311310808',
-                _FIRMWARE_VERSION_RW_VERSION: 'dartmonkey_v2.0.4017-9c45fb4b3',
+                _FIRMWARE_VERSION_RW_VERSION: 'dartmonkey_v2.0.13584-6fcfe6978',
                 _FIRMWARE_VERSION_KEY_ID: '257a0aa3ac9e81aa4bc3aabdb6d3d079117c5799',
             }
         }
     }
 
     _BIOD_UPSTART_JOB_NAME = 'biod'
+    _POWERD_UPSTART_JOB_NAME = 'powerd'
     # TODO(crbug.com/925545)
     _TIMBERSLIDE_UPSTART_JOB_NAME = \
         'timberslide LOG_PATH=/sys/kernel/debug/cros_fp/console_log'
@@ -230,9 +237,18 @@
             logging.info('Stopping %s', self._BIOD_UPSTART_JOB_NAME)
             self.host.upstart_stop(self._BIOD_UPSTART_JOB_NAME)
 
+        # TODO(b/183123775): Remove when bug is fixed.
+        #  Disabling powerd to prevent the display from turning off, which kills
+        #  USB on some platforms.
+        self._powerd_running = self.host.upstart_status(
+            self._POWERD_UPSTART_JOB_NAME)
+        if self._powerd_running:
+            logging.info('Stopping %s', self._POWERD_UPSTART_JOB_NAME)
+            self.host.upstart_stop(self._POWERD_UPSTART_JOB_NAME)
+
         # On some platforms an AP reboot is needed after flashing firmware to
         # rebind the driver.
-        self._dut_needs_reboot = self.get_host_board() == 'zork'
+        self._dut_needs_reboot = self.is_uart_device()
 
         if filesystem_util.is_rootfs_writable(self.host):
             if self._dut_needs_reboot:
@@ -285,11 +301,13 @@
 
     def cleanup(self):
         """Restores original state."""
-        # Once the tests complete we need to make sure we're running the
-        # original firmware (not dev version) and potentially reset rollback.
-        self._initialize_running_fw_version(use_dev_signed_fw=False,
-                                            force_firmware_flashing=False)
-        self._initialize_fw_entropy()
+        if hasattr(self, '_need_fw_restore') and self._need_fw_restore:
+            # Once the tests complete we need to make sure we're running the
+            # original firmware (not dev version) and potentially reset rollback.
+            self._initialize_running_fw_version(use_dev_signed_fw=False,
+                                                force_firmware_flashing=False)
+            self._initialize_fw_entropy()
+
         # Re-enable biod and updater after flashing and initializing entropy so
         # that they don't interfere if there was a reboot.
         if hasattr(self, '_dut_needs_reboot') and self._dut_needs_reboot:
@@ -300,6 +318,10 @@
         self._initialize_hw_and_sw_write_protect(
             enable_hardware_write_protect=True,
             enable_software_write_protect=True)
+        # TODO(b/183123775)
+        if hasattr(self, '_powerd_running') and self._powerd_running:
+            logging.info('Restarting powerd')
+            self.host.upstart_restart(self._POWERD_UPSTART_JOB_NAME)
         if hasattr(self, '_biod_running') and self._biod_running:
             logging.info('Restarting biod')
             self.host.upstart_restart(self._BIOD_UPSTART_JOB_NAME)
@@ -361,7 +383,7 @@
 
         Example: self.TEST_IMAGE_DEV = /some/path/images/nocturne_fp.dev
         """
-        for key, val in self._TEST_IMAGE_FORMAT_MAP.iteritems():
+        for key, val in six.iteritems(self._TEST_IMAGE_FORMAT_MAP):
             full_path = os.path.join(dut_fw_test_images_dir,
                                      val % self.get_fp_board())
             setattr(self, key, full_path)
@@ -438,6 +460,11 @@
                 'Unable to get fingerprint board with cros_config')
         return result.stdout.rstrip()
 
+    def is_uart_device(self) -> bool:
+        """Returns True if the boards transpot device is UART"""
+        uart_devices = ['zork', 'guybrush']
+        return self.get_host_board() in uart_devices
+
     def get_host_board(self):
         """Returns name of the host board."""
         return self.host.get_board().split(':')[-1]
@@ -802,8 +829,10 @@
 
     def flash_rw_ro_firmware(self, fw_path):
         """Flashes *all* firmware (both RO and RW)."""
+        # Check if FPMCU firmware needs to be re-flashed during cleanup
+        self._need_fw_restore = True
         self.set_hardware_write_protect(False)
-        flash_cmd = 'flash_fp_mcu' + ' ' + fw_path
+        flash_cmd = 'flash_fp_mcu' + ' --noservices ' + fw_path
         logging.info('Running flash cmd: %s', flash_cmd)
         flash_result = self.run_cmd(flash_cmd)
         self.set_hardware_write_protect(True)
@@ -866,7 +895,7 @@
         # Sync the filesystem in case we need to reboot the AP soon.
         self.run_cmd('sync')
 
-    def run_server_cmd(self, command, timeout=60):
+    def run_server_cmd(self, command, timeout=65):
         """Runs command on server; return result with output and exit code."""
         logging.info('Server execute: %s', command)
         result = utils.run(command, timeout=timeout, ignore_status=True)
diff --git a/server/cros/faft/firmware_test.py b/server/cros/faft/firmware_test.py
index 5e4e764..50d485a 100644
--- a/server/cros/faft/firmware_test.py
+++ b/server/cros/faft/firmware_test.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -9,27 +10,26 @@
 import os
 import pprint
 import re
-import StringIO
 import time
 import uuid
+from xml.parsers import expat
 
+import six
 from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import global_config
-from autotest_lib.client.common_lib.cros import retry
-from autotest_lib.client.common_lib.cros import tpm_utils
+from autotest_lib.client.common_lib import error, global_config
+from autotest_lib.client.common_lib.cros import dev_server, retry, tpm_utils
 from autotest_lib.server import test
 from autotest_lib.server.cros import vboot_constants as vboot
-from autotest_lib.server.cros.faft.utils.config import Config as FAFTConfig
+from autotest_lib.server.cros.faft import telemetry
 from autotest_lib.server.cros.faft.rpc_proxy import RPCProxy
-from autotest_lib.server.cros.faft.utils import mode_switcher
+from autotest_lib.server.cros.faft.utils import (menu_mode_switcher,
+                                                 menu_navigator, mode_switcher)
+from autotest_lib.server.cros.faft.utils.config import Config as FAFTConfig
 from autotest_lib.server.cros.faft.utils.faft_checkers import FAFTCheckers
 from autotest_lib.server.cros.power import utils as PowerUtils
-from autotest_lib.server.cros.servo import chrome_base_ec
-from autotest_lib.server.cros.servo import chrome_cr50
-from autotest_lib.server.cros.servo import chrome_ec
-from autotest_lib.server.cros.servo import servo
-from autotest_lib.server.cros.faft import telemetry
+from autotest_lib.server.cros.servo import (chrome_base_ec, chrome_cr50,
+                                            chrome_ec, chrome_ti50, servo)
+from autotest_lib.site_utils import test_runner_utils
 
 # Experimentally tuned time in minutes to wait for partition device nodes on a
 # USB stick to be ready after plugging in the stick.
@@ -67,6 +67,9 @@
     OTHER_KERNEL_MAP = {'a':'4', 'b':'2', '2':'4', '4':'2', '3':'4', '5':'2'}
     OTHER_ROOTFS_MAP = {'a':'5', 'b':'3', '2':'5', '4':'3', '3':'5', '5':'3'}
 
+    # Mapping of kernel type and name.
+    KERNEL_TYPE_NAME_MAP = {'KERN': 'kernel', 'MINIOS': 'minios'}
+
     CHROMEOS_MAGIC = "CHROMEOS"
     CORRUPTED_MAGIC = "CORRUPTD"
 
@@ -90,6 +93,10 @@
     # Delay for establishing state after changing PD settings
     PD_RESYNC_DELAY = 2
 
+    # Delay to wait for servo USB to work after power role swap:
+    # tPSSourceOff (920ms) + tPSSourceOn (480ms) + buffer
+    POWER_ROLE_SWAP_DELAY = 2
+
     # The default number of power state check retries (each try takes 3 secs)
     DEFAULT_PWR_RETRIES = 5
 
@@ -148,11 +155,15 @@
         self.run_id = str(uuid.uuid4())
         self._client = host
         self.servo = host.servo
+        if self.servo is None:
+            raise error.TestError('FirmwareTest failed to set up servo')
 
         self.lockfile = '/usr/local/tmp/faft/lock'
         self._backup_gbb_flags = None
         self._backup_firmware_identity = dict()
         self._backup_kernel_sha = dict()
+        for kernel_type in self.KERNEL_TYPE_NAME_MAP:
+            self._backup_kernel_sha[kernel_type] = dict()
         self._backup_cgpt_attr = dict()
         self._backup_dev_mode = None
         self._restore_power_mode = None
@@ -188,9 +199,20 @@
                 self.faft_client.system.get_model_name())
         self.checkers = FAFTCheckers(self)
 
+        # Mapping of kernel type and kernel servicer class
+        self.kernel_servicer = {
+                'KERN': self.faft_client.kernel,
+                'MINIOS': self.faft_client.minios,
+        }
+
         if self.faft_config.chrome_ec:
             self.ec = chrome_ec.ChromeEC(self.servo)
-        self.switcher = mode_switcher.create_mode_switcher(self)
+        self.menu_navigator = menu_navigator.create_menu_navigator(self)
+        self.switcher = mode_switcher.create_mode_switcher(
+                self, self.menu_navigator)
+        # This will be None for menuless UI
+        self.menu_switcher = menu_mode_switcher.create_menu_mode_switcher(
+                self, self.menu_navigator)
         # Check for presence of a USBPD console
         if self.faft_config.chrome_usbpd:
             self.usbpd = chrome_ec.ChromeUSBPD(self.servo)
@@ -200,18 +222,23 @@
         # Get pdtester console
         self.pdtester = host.pdtester
         self.pdtester_host = host._pdtester_host
-        # Check for presence of a working Cr50 console
-        if self.servo.has_control('cr50_version'):
+        gsc = None
+        if self.servo.has_control('ti50_version') or \
+            self.servo.has_control('ti50_version', 'ccd_gsc'):
+            gsc = chrome_ti50.ChromeTi50(self.servo, self.faft_config)
+        elif self.servo.has_control('cr50_version'):
+            gsc = chrome_cr50.ChromeCr50(self.servo, self.faft_config)
+        if gsc:
             try:
-                # Check that the console works before declaring the cr50 console
+                # Check that the gsc console works before declaring the
                 # connection exists and enabling uart capture.
-                cr50 = chrome_cr50.ChromeCr50(self.servo, self.faft_config)
-                cr50.get_version()
-                self.cr50 = cr50
+                gsc.get_version()
+                self.cr50 = gsc
             except servo.ControlUnavailableError:
-                logging.warn('cr50 console not supported.')
+                logging.warning('gsc console not supported.')
             except Exception as e:
-                logging.warn('Ignored unknown cr50 version error: %s', str(e))
+                logging.warning('Ignored unknown gsc version error: %s',
+                                str(e))
 
         if 'power_control' in args:
             self.power_control = args['power_control']
@@ -268,7 +295,6 @@
         self._setup_gbb_flags()
         self.faft_client.updater.stop_daemon()
         self._create_faft_lockfile()
-        self._create_old_faft_lockfile()
         self._setup_ec_write_protect(ec_wp)
         # See chromium:239034 regarding needing this sync.
         self.blocking_sync()
@@ -280,7 +306,7 @@
         @return: True if build is verified to be on USB key, False otherwise.
         """
         info = self._client.host_info_store.get()
-        if info.build:
+        if info.build and info.build != test_runner_utils.NO_BUILD:
             current_build = self._client._servo_host.validate_image_usbkey()
             if current_build != info.build:
                 logging.debug('Current build on USB: %s differs from test'
@@ -289,17 +315,19 @@
                 try:
                     self._client.stage_build_to_usb(info.build)
                     return True
-                except error.AutotestError as e:
-                    logging.warn('Stage build to USB failed, tests that require'
-                                 ' test image on Servo USB may fail: {}'.format(e))
+                except (error.AutotestError,
+                        dev_server.DevServerException) as e:
+                    logging.warning(
+                            'Stage build to USB failed, tests that require'
+                            ' test image on Servo USB may fail: {}'.format(e))
                     return False
             else:
                 logging.debug('Current build on USB: %s is same as test'
                               ' build, skip download.', current_build)
                 return True
         else:
-            logging.warn('Failed to get build label from the DUT, will use'
-                         ' existing image in Servo USB.')
+            logging.warning('Failed to get build label from the DUT, will use'
+                            ' existing image in Servo USB.')
             return False
 
     def run_once(self, *args, **dargs):
@@ -361,7 +389,7 @@
         try:
             self._record_uart_capture()
         except:
-            logging.warn('Failed initial uart capture during cleanup')
+            logging.warning('Failed initial uart capture during cleanup')
 
         try:
             self.faft_client.system.is_available()
@@ -381,9 +409,8 @@
             self.faft_client.updater.start_daemon()
             self.faft_client.updater.cleanup()
             self._remove_faft_lockfile()
-            self._remove_old_faft_lockfile()
-            self._record_faft_client_log()
             self.faft_client.quit()
+            self.faft_client.collect_logfiles(self.resultsdir)
 
         # Capture any new uart output, then discard log messages again.
         self._cleanup_uart_capture()
@@ -468,7 +495,7 @@
             self.switcher.wait_for_client()
             return
         except ConnectionError:
-            logging.warn('Cold reboot doesn\'t help, still connection error.')
+            logging.warning("Cold reboot didn't help, still connection error.")
 
         # DUT may be broken by a corrupted firmware. Restore firmware.
         # We assume the recovery boot still works fine. Since the recovery
@@ -476,48 +503,48 @@
         # except GBB.
         if self.is_firmware_saved():
             self._ensure_client_in_recovery()
-            logging.info('Try restore the original firmware...')
-            if self.is_firmware_changed():
-                try:
-                    self.restore_firmware()
+            logging.info('Try restoring the original firmware...')
+            try:
+                if self.restore_firmware():
                     return
-                except ConnectionError:
-                    logging.warn('Restoring firmware doesn\'t help, still '
-                                 'connection error.')
+            except ConnectionError:
+                logging.warning("Restoring firmware didn't help, still "
+                                "connection error.")
 
         # Perhaps it's kernel that's broken. Let's try restoring it.
-        if self.is_kernel_saved():
-            self._ensure_client_in_recovery()
-            logging.info('Try restore the original kernel...')
-            if self.is_kernel_changed():
+        for kernel_type, kernel_name in self.KERNEL_TYPE_NAME_MAP.items():
+            if self.is_kernel_saved(kernel_type):
+                self._ensure_client_in_recovery()
+                logging.info('Try restoring the original %s...', kernel_name)
                 try:
-                    self.restore_kernel()
-                    return
+                    if self.restore_kernel(kernel_type=kernel_type):
+                        return
                 except ConnectionError:
-                    logging.warn('Restoring kernel doesn\'t help, still '
-                                 'connection error.')
+                    logging.warning(
+                            "Restoring %s didn't help, still "
+                            "connection error.", kernel_name)
 
         # DUT may be broken by a corrupted OS image. Restore OS image.
         self._ensure_client_in_recovery()
-        logging.info('Try restore the OS image...')
+        logging.info('Try restoring the OS image...')
         self.faft_client.system.run_shell_command('chromeos-install --yes')
         self.switcher.mode_aware_reboot(wait_for_dut_up=False)
         self.switcher.wait_for_client_offline()
         self.switcher.bypass_dev_mode()
         try:
             self.switcher.wait_for_client()
-            logging.info('Successfully restore OS image.')
+            logging.info('Successfully restored OS image.')
             return
         except ConnectionError:
-            logging.warn('Restoring OS image doesn\'t help, still connection '
-                         'error.')
+            logging.warning("Restoring OS image didn't help, still connection "
+                            "error.")
 
     def _ensure_client_in_recovery(self):
         """Ensure client in recovery boot; reboot into it if necessary.
 
         @raise TestError: if failed to boot the USB image.
         """
-        logging.info('Try boot into USB image...')
+        logging.info('Try booting into USB image...')
         self.switcher.reboot_to_mode(to_mode='rec', sync_before_boot=False,
                                      wait_for_dut_up=False)
         self.servo.switch_usbkey('host')
@@ -538,8 +565,8 @@
         # DUT is disconnected. Capture the UART output for debug.
         self._record_uart_capture()
 
-        # TODO(waihong@chromium.org): Implement replugging the Ethernet to
-        # identify if it is a network flaky.
+        # Replug the Ethernet to identify if it is a network flaky.
+        self.servo.eth_power_reset()
 
         recovery_reason = self._retrieve_recovery_reason_from_trap()
 
@@ -617,7 +644,8 @@
                 raise error.TestError('USB stick in servo contains a %s '
                     'image, but DUT is a %s' % (usb_board, dut_board))
         finally:
-            for cmd in ('umount -l %s' % tmpd, 'sync', 'rm -rf %s' % tmpd):
+            for cmd in ('umount -l %s' % tmpd, 'sync %s' % usb_dev,
+                        'rm -rf %s' % tmpd):
                 self.servo.system(cmd)
 
         self.mark_setup_done('usb_check')
@@ -634,13 +662,16 @@
         @raise TestError: If Servo v4 not setup properly.
         """
 
-        # PD FAFT is only tested with a least a servo V4 with servo micro
-        # or C2D2.
-        if pd_faft and (
-                'servo_v4_with_servo_micro' not in self.pdtester.servo_type
-        ) and ('servo_v4_with_c2d2' not in self.pdtester.servo_type):
-            raise error.TestError('servo_v4_with_servo_micro or '
-                                  'servo_v4_with_c2d2 is a mandatory setup '
+        # PD FAFT is only tested with a combination of servo_v4 or servo_v4p1
+        # with servo micro or C2D2.
+        pd_setup = []
+        for first in self.pdtester.FIRST_PD_SETUP_ELEMENT:
+            for second in self.pdtester.SECOND_PD_SETUP_ELEMENT:
+                pd_setup.append(first + '_with_' + second)
+
+        if pd_faft and self.pdtester.servo_type not in pd_setup:
+            raise error.TestError(', '.join(pd_setup) +
+                                  ' is a mandatory setup '
                                   'for PD FAFT. Got %s.' %
                                   self.pdtester.servo_type)
 
@@ -653,10 +684,12 @@
 
         # Servo v4 by default has dts_mode enabled. Enabling dts_mode affects
         # the behaviors of what PD FAFT tests. So we want it disabled.
-        if 'servo_v4' in self.pdtester.servo_type:
+        pd_tester_device = self.pdtester.servo_type.split('_with_')[0]
+        if pd_tester_device in self.pdtester.FIRST_PD_SETUP_ELEMENT:
             self.servo.set_dts_mode('on' if dts_mode else 'off')
         else:
-            logging.warn('Configuring DTS mode only supported on Servo v4')
+            logging.warning('Configuring DTS mode only supported on %s',
+                            pd_tester_device)
 
         self.pdtester.set('usbc_polarity', 'cc2' if flip_cc else 'cc1')
         # Make it sourcing max voltage.
@@ -666,13 +699,13 @@
 
         # Servo v4 requires an external charger to source power. Make sure
         # this setup is correct.
-        if 'servo_v4' in self.pdtester.servo_type:
-            role = self.pdtester.get('servo_v4_role')
+        if pd_tester_device in self.pdtester.FIRST_PD_SETUP_ELEMENT:
+            role = self.pdtester.get('servo_pd_role')
             if role != 'src':
                 raise error.TestError(
-                        'Servo v4 is not sourcing power! Make sure the servo '
+                        '%s is not sourcing power! Make sure the servo '
                         '"DUT POWER" port is connected to a working charger. '
-                        'servo_v4_role:%s' % role)
+                        'servo_pd_role:%s' % (pd_tester_device, role))
 
     def setup_usbkey(self, usbkey, host=None, used_for_recovery=None):
         """Setup the USB disk for the test.
@@ -720,6 +753,12 @@
             # low and sleep for a while for charging.
             self.set_servo_v4_role_to_snk()
 
+            # Force reconnection; otherwise, the next RPC call will timeout
+            logging.info('Waiting for reconnection after power role swap...')
+            time.sleep(self.POWER_ROLE_SWAP_DELAY)
+            self.faft_client.disconnect()
+            self.faft_client.connect()
+
     def set_servo_v4_role_to_snk(self, pd_comm=False):
         """Set the servo v4 role to SNK.
 
@@ -794,60 +833,41 @@
         command = 'touch %s' % (self.lockfile)
         self.faft_client.system.run_shell_command(command)
 
-    def _create_old_faft_lockfile(self):
-        """
-        Creates the FAFT lockfile in its legacy location.
-
-        TODO (once M83 is stable, approx. June 9 2020):
-        Delete this function, as platform/installer/chromeos-setgoodkernel
-        will look for the lockfile in the new location
-        (/usr/local/tmp/faft/lock)
-        """
-        logging.info('Creating legacy FAFT lockfile...')
-        self.faft_client.system.run_shell_command('mkdir -p /var/tmp/faft')
-        self.faft_client.system.run_shell_command('touch /var/tmp/faft/lock')
-
     def _remove_faft_lockfile(self):
         """Removes the FAFT lockfile."""
         logging.info('Removing FAFT lockfile...')
         command = 'rm -f %s' % (self.lockfile)
         self.faft_client.system.run_shell_command(command)
 
-    def _remove_old_faft_lockfile(self):
-        """
-        Removes the FAFT lockfile from its legacy location.
-
-        TODO (once M83 is stable, approx. June 9 2020):
-        Delete this function, as platform/installer/chromeos-setgoodkernel
-        will look for the lockfile in the new location
-        (/usr/local/tmp/faft/lock)
-        """
-        logging.info('Removing legacy FAFT lockfile...')
-        self.faft_client.system.run_shell_command('rm -rf /var/tmp/faft')
-
-    def clear_set_gbb_flags(self, clear_mask, set_mask):
+    def clear_set_gbb_flags(self, clear_mask, set_mask, reboot=True):
         """Clear and set the GBB flags in the current flashrom.
 
         @param clear_mask: A mask of flags to be cleared.
         @param set_mask: A mask of flags to be set.
+        @param reboot: If true, then this method will reboot the DUT if certain
+                       flags are modified.
         """
         gbb_flags = self.faft_client.bios.get_gbb_flags()
         new_flags = gbb_flags & ctypes.c_uint32(~clear_mask).value | set_mask
-        self.gbb_flags = new_flags
-        if new_flags != gbb_flags:
+        if new_flags == gbb_flags:
+            logging.info(
+                    'Current GBB flags (0x%x) match desired clear mask '
+                    '(0x%x) and desired set mask (0x%x)', gbb_flags,
+                    clear_mask, set_mask)
+            return
+
+        logging.info('Changing GBB flags from 0x%x to 0x%x.', gbb_flags,
+                     new_flags)
+        if self._backup_gbb_flags is None:
             self._backup_gbb_flags = gbb_flags
-            logging.info('Changing GBB flags from 0x%x to 0x%x.',
-                         gbb_flags, new_flags)
-            self.faft_client.bios.set_gbb_flags(new_flags)
-            # If changing FORCE_DEV_SWITCH_ON or DISABLE_EC_SOFTWARE_SYNC flag,
-            # reboot to get a clear state
-            if ((gbb_flags ^ new_flags) &
-                (vboot.GBB_FLAG_FORCE_DEV_SWITCH_ON |
-                 vboot.GBB_FLAG_DISABLE_EC_SOFTWARE_SYNC)):
-                self.switcher.mode_aware_reboot()
-        else:
-            logging.info('Current GBB flags look good for test: 0x%x.',
-                         gbb_flags)
+        self.faft_client.bios.set_gbb_flags(new_flags)
+
+        # If changing FORCE_DEV_SWITCH_ON or DISABLE_EC_SOFTWARE_SYNC flag,
+        # reboot to get a clear state
+        if reboot and bool((gbb_flags ^ new_flags)
+                           & (vboot.GBB_FLAG_FORCE_DEV_SWITCH_ON
+                              | vboot.GBB_FLAG_DISABLE_EC_SOFTWARE_SYNC)):
+            self.switcher.mode_aware_reboot()
 
 
     def _check_capability(self, target, required_cap, suppress_warning):
@@ -866,8 +886,9 @@
         for cap in required_cap:
             if cap not in getattr(self.faft_config, target + '_capability'):
                 if not suppress_warning:
-                    logging.warn('Requires %s capability "%s" to run this '
-                                 'test.', target, cap)
+                    logging.warning(
+                            'Requires %s capability "%s" to run this '
+                            'test.', target, cap)
                 return False
 
         return True
@@ -883,7 +904,7 @@
         """
         if not self.faft_config.chrome_ec:
             if not suppress_warning:
-                logging.warn('Requires Chrome EC to run this test.')
+                logging.warning('Requires Chrome EC to run this test.')
             return False
         return self._check_capability('ec', required_cap, suppress_warning)
 
@@ -898,7 +919,7 @@
         """
         if not hasattr(self, 'cr50'):
             if not suppress_warning:
-                logging.warn('Requires Chrome Cr50 to run this test.')
+                logging.warning('Requires Chrome Cr50 to run this test.')
             return False
         return self._check_capability('cr50', required_cap, suppress_warning)
 
@@ -978,7 +999,6 @@
 
         @param original_dev_boot_usb: Original dev_boot_usb value.
         """
-        logging.info('Checking internal device boot.')
         self.faft_client.system.set_dev_default_boot()
         if self.faft_client.system.is_removable_device_boot():
             logging.info('Reboot into internal disk...')
@@ -1154,8 +1174,8 @@
         else:
             self._restore_power_mode = False
             raise error.TestFail('System fail to enter %s state. '
-                    'Current state: %s', target_power_state,
-                    self.get_power_state())
+                                 'Current state: %s' %
+                                 (target_power_state, self.get_power_state()))
 
     def restore_ap_on_power_mode(self):
         """
@@ -1185,35 +1205,44 @@
         pattern = r'power state (\w+) = (\w+),'
 
         try:
-            match = self.ec.send_command_get_output("powerinfo", [pattern])
-        except error.TestFail as err:
-            logging.warn("powerinfo command encountered an error: %s", err)
+            match = self.ec.send_command_get_output("powerinfo", [pattern],
+                                                    retries=3)
+        except (error.TestFail, expat.ExpatError) as err:
+            logging.warning("powerinfo command encountered an error: %s", err)
             return None
         if not match:
-            logging.warn("powerinfo output did not match pattern: %r", pattern)
+            logging.warning("powerinfo output did not match pattern: %r",
+                            pattern)
             return None
         (line, state_num, state_name) = match[0]
         logging.debug("power state info %r", match)
         return state_name
 
-    def _check_power_state(self, power_state):
+    def _check_power_state(self, expected_power_state, actual_power_state):
         """
         Check for correct power state of the AP (via EC 'powerinfo' command)
 
+        @param expected_power_state: full-string regex of power state you are
+        expecting
+        @param actual_power_state: the power state returned from get_power_state
         @return: the line and the match, if the output matched.
         @raise error.TestFail: if output didn't match after the delay.
         """
-        if not isinstance(power_state, str):
+        if not isinstance(expected_power_state, six.string_types):
             raise error.TestError('%s is not a string while it should be.' %
-                                  power_state)
-        return self.ec.send_command_get_output("powerinfo",
-            ['\\b' + power_state + '\\b'])
+                                  expected_power_state)
+        if not isinstance(actual_power_state, six.string_types):
+            raise error.TestError('%s is not a string while it should be.' %
+                                  actual_power_state)
+        if re.match('^' + expected_power_state + '$', actual_power_state):
+            return True
+        return False
 
-    def wait_power_state(self, power_state, retries, retry_delay=0):
+    def wait_power_state(self, power_state, retries, retry_delay=3):
         """
         Wait for certain power state.
 
-        @param power_state: power state you are expecting
+        @param power_state: full-string regex of power state you are expecting
         @param retries: retries.  This is necessary if AP is powering down
         and transitioning through different states.
         @param retry_delay: delay between retries in seconds
@@ -1221,24 +1250,28 @@
         logging.info('Checking power state "%s" maximum %d times.',
                      power_state, retries)
 
-        # Reset the cache, in case previous calls silently changed it on servod
-        self.ec.set_uart_regexp('None')
-
+        last_power_state = ''
         while retries > 0:
-            logging.info("try count: %d", retries)
+            logging.debug("try count: %d", retries)
             start_time = time.time()
             try:
                 retries = retries - 1
-                if self._check_power_state(power_state):
+                actual_power_state = self.get_power_state()
+                if last_power_state != actual_power_state:
+                    logging.info("power state: %s", actual_power_state)
+                if actual_power_state is None:
+                    continue
+                if self._check_power_state(power_state, actual_power_state):
                     return True
-            except error.TestFail:
+                last_power_state = actual_power_state
+            except (error.TestFail, expat.ExpatError):
                 pass
             delay_time = retry_delay - time.time() + start_time
             if delay_time > 0:
                 time.sleep(delay_time)
         return False
 
-    def run_shutdown_cmd(self):
+    def run_shutdown_cmd(self, wait_for_offline=True):
         """Shut down the DUT by running '/sbin/shutdown -P now'."""
         self.faft_client.disconnect()
         # Shut down in the background after sleeping so the call gets a reply.
@@ -1247,10 +1280,12 @@
         except error.AutoservRunError as e:
             # From the ssh man page, error code 255 indicates ssh errors.
             if e.result_obj.exit_status == 255:
-                logging.warn("Ignoring error from ssh: %s", e)
+                logging.warning("Ignoring error from ssh: %s", e)
             else:
                 raise
-        self.switcher.wait_for_client_offline()
+        if wait_for_offline:
+            self.switcher.wait_for_client_offline()
+        self._client.close_main_ssh()
 
     def suspend(self):
         """Suspends the DUT."""
@@ -1259,21 +1294,14 @@
         self.faft_client.system.run_shell_command(cmd, block)
         time.sleep(self.EC_SUSPEND_DELAY)
 
-    def _record_faft_client_log(self):
-        """Record the faft client log to the results directory."""
-        client_log = self.faft_client.system.dump_log(True)
-        client_log_file = os.path.join(self.resultsdir, 'faft_client.log')
-        with open(client_log_file, 'w') as f:
-            f.write(client_log)
-
     def _setup_gbb_flags(self):
         """Setup the GBB flags for FAFT test."""
         if self.check_setup_done('gbb_flags'):
             return
 
-        logging.info('Set proper GBB flags for test.')
+        logging.info('Setting proper GBB flags for test.')
         # Ensure that GBB flags are set to 0x140.
-        flags_to_set = (vboot.GBB_FLAG_FAFT_KEY_OVERIDE |
+        flags_to_set = (vboot.GBB_FLAG_RUNNING_FAFT |
                         vboot.GBB_FLAG_ENTER_TRIGGERS_TONORM)
         # And if the "no_ec_sync" argument is set, then disable EC software
         # sync.
@@ -1385,7 +1413,7 @@
 
     def do_blocking_sync(self, device):
         """Run a blocking sync command."""
-        logging.info("Blocking sync for %s", device)
+        logging.debug("Blocking sync for %s", device)
 
         if 'mmcblk' in device:
             # For mmc devices, use `mmc status get` command to send an
@@ -1439,7 +1467,7 @@
             try:
                 return self._client.blocking_sync(freeze_for_reset)
             except (AttributeError, ImportError, error.AutoservRunError) as e:
-                logging.warn(
+                logging.warning(
                         'Falling back to old sync method due to error: %s', e)
 
         # The double calls to sync fakes a blocking call
@@ -1489,7 +1517,7 @@
         boot_id = self.get_bootid()
         self.faft_client.disconnect()
 
-        # Press power button to trigger Chrome OS normal shutdown process.
+        # Press power button to trigger ChromeOS normal shutdown process.
         # We use a customized delay since the normal-press 1.2s is not enough.
         self.servo.power_key(self.faft_config.hold_pwr_button_poweroff)
         # device can take 44-51 seconds to restart,
@@ -1718,10 +1746,10 @@
             except error.AutoservRunError:
                 retry -= 1
                 if retry:
-                    logging.info('Retry to get boot_id...')
+                    logging.debug('Retry to get boot_id...')
                 else:
                     logging.warning('Failed to get boot_id.')
-        logging.info('boot_id: %s', boot_id)
+        logging.debug('boot_id: %s', boot_id)
         return boot_id
 
     def check_state(self, func):
@@ -1743,9 +1771,7 @@
         @return: The result value of the action function.
         @raise TestFail: If the function does notsucceed.
         """
-        logging.info("-[FAFT]-[ start stepstate_checker ]----------")
         self._call_action(func, check_status=True)
-        logging.info("-[FAFT]-[ end state_checker ]----------------")
 
     def get_current_firmware_identity(self):
         """Get current firmware sha and fwids of body and vblock.
@@ -1859,11 +1885,11 @@
             try:
                 self._client.run(ec_cmd, timeout=300)
             except error.AutoservSSHTimeout:
-                logging.warn("DUT connection died during EC restore")
+                logging.warning("DUT connection died during EC restore")
                 self.faft_client.disconnect()
 
             except error.GenericHostRunError:
-                logging.warn("DUT command failed during EC restore")
+                logging.warning("DUT command failed during EC restore")
                 logging.debug("Full exception:", exc_info=True)
             if reboot_ec:
                 self.switcher.mode_aware_reboot(
@@ -1915,65 +1941,80 @@
                 self.faft_client.ec.dump_firmware(ec_in_work_path)
             self.faft_client.updater.repack_shellball()
 
-    def is_kernel_changed(self):
+    def is_kernel_changed(self, kernel_type='KERN'):
         """Check if the current kernel is changed, by comparing its SHA1 hash.
 
+        @param kernel_type: The type name of kernel ('KERN' or 'MINIOS').
         @return: True if it is changed; otherwise, False.
         """
         changed = False
         for p in ('A', 'B'):
-            backup_sha = self._backup_kernel_sha.get(p, None)
-            current_sha = self.faft_client.kernel.get_sha(p)
+            backup_sha = self._backup_kernel_sha[kernel_type].get(p, None)
+            current_sha = self.kernel_servicer[kernel_type].get_sha(p)
             if backup_sha != current_sha:
                 changed = True
-                logging.info('Kernel %s is changed', p)
+                logging.info('Kernel %s-%s is changed', kernel_type, p)
         return changed
 
-    def backup_kernel(self, suffix='.original'):
+    def backup_kernel(self, suffix='.original', kernel_type='KERN'):
         """Backup kernel to files, and the send them to host.
 
+        @param kernel_type: The type name of kernel ('KERN' or 'MINIOS').
         @param suffix: a string appended to backup file name.
         """
+        kernel_name = self.KERNEL_TYPE_NAME_MAP[kernel_type]
+        servicer = self.kernel_servicer[kernel_type]
         remote_temp_dir = self.faft_client.system.create_temp_dir()
         for p in ('A', 'B'):
-            remote_path = os.path.join(remote_temp_dir, 'kernel_%s' % p)
-            self.faft_client.kernel.dump(p, remote_path)
+            remote_path = os.path.join(remote_temp_dir,
+                                       '%s_%s' % (kernel_name, p))
+            servicer.dump(p, remote_path)
             self._client.get_file(
                     remote_path,
-                    os.path.join(self.resultsdir, 'kernel_%s%s' % (p, suffix)))
-            self._backup_kernel_sha[p] = self.faft_client.kernel.get_sha(p)
-        logging.info('Backup kernel stored in %s with suffix %s',
-            self.resultsdir, suffix)
+                    os.path.join(self.resultsdir,
+                                 '%s_%s%s' % (kernel_name, p, suffix)))
+            self._backup_kernel_sha[kernel_type][p] = servicer.get_sha(p)
+        logging.info('Backup %s stored in %s with suffix %s', kernel_name,
+                     self.resultsdir, suffix)
 
-    def is_kernel_saved(self):
+    def is_kernel_saved(self, kernel_type='KERN'):
         """Check if kernel images are saved (backup_kernel called before).
 
+        @param kernel_type: The type name of kernel ('KERN' or 'MINIOS').
         @return: True if the kernel is saved; otherwise, False.
         """
-        return len(self._backup_kernel_sha) != 0
+        return len(self._backup_kernel_sha[kernel_type]) != 0
 
-    def restore_kernel(self, suffix='.original'):
+    def restore_kernel(self, suffix='.original', kernel_type='KERN'):
         """Restore kernel from host in resultsdir.
 
+        @param kernel_type: The type name of kernel ('KERN' or 'MINIOS').
         @param suffix: a string appended to backup file name.
+        @return: True if kernel needed to be restored
         """
-        if not self.is_kernel_changed():
-            return
+        if not self.is_kernel_changed(kernel_type):
+            return False
 
         # Backup current corrupted kernel.
-        self.backup_kernel(suffix='.corrupt')
+        self.backup_kernel(suffix='.corrupt', kernel_type=kernel_type)
 
         # Restore kernel.
+        kernel_name = self.KERNEL_TYPE_NAME_MAP[kernel_type]
+        servicer = self.kernel_servicer[kernel_type]
         remote_temp_dir = self.faft_client.system.create_temp_dir()
         for p in ('A', 'B'):
-            remote_path = os.path.join(remote_temp_dir, 'kernel_%s' % p)
+            remote_path = os.path.join(remote_temp_dir,
+                                       '%s_%s' % (kernel_name, p))
+            servicer.dump(p, remote_path)
             self._client.send_file(
-                    os.path.join(self.resultsdir, 'kernel_%s%s' % (p, suffix)),
+                    os.path.join(self.resultsdir,
+                                 '%s_%s%s' % (kernel_name, p, suffix)),
                     remote_path)
-            self.faft_client.kernel.write(p, remote_path)
+            servicer.write(p, remote_path)
 
         self.switcher.mode_aware_reboot()
-        logging.info('Successfully restored kernel.')
+        logging.info('Successfully restored %s.', kernel_type)
+        return True
 
     def backup_cgpt_attributes(self):
         """Backup CGPT partition table attributes."""
@@ -2156,7 +2197,8 @@
                             ignore_status=True)
         if res.exit_status and res.exit_status != self.FWMP_CLEARED_EXIT_STATUS:
             raise error.TestError('Could not run cryptohome command %r' % res)
-        return self.FWMP_CLEARED_ERROR_MSG in res.stdout
+        return (self.FWMP_CLEARED_ERROR_MSG in res.stdout
+                or tpm_utils.FwmpIsAllZero(res.stdout))
 
 
     def _tpm_is_owned(self):
@@ -2207,18 +2249,10 @@
         if not hasattr(self, 'cr50'):
             raise error.TestNAError('Test can only be run on devices with '
                                     'access to the Cr50 console')
-
-        logging.info('checking dut state')
+        logging.info('Bringing DUT up')
 
         self.servo.set_nocheck('cold_reset', 'off')
-        try:
-            self.servo.set_nocheck('warm_reset', 'off')
-        except error.TestFail as e:
-            # TODO(b/159338538): remove once the kukui remap issue is resolved.
-            if 'Timed out waiting for interfaces to become available' in str(e):
-                logging.warn('Ignoring warm_reset interface issue b/159338538')
-            else:
-                raise
+        self.servo.set_nocheck('warm_reset', 'off')
 
         time.sleep(self.cr50.SHORT_WAIT)
         if not self.cr50.ap_is_on():
@@ -2229,7 +2263,8 @@
         while not self.host.ping_wait_up(
                 self.faft_config.delay_reboot_to_ping * 2):
             if time.time() > end_time:
-                logging.warn('DUT is unresponsive after trying to bring it up')
+                logging.warning(
+                        'DUT is unresponsive after trying to bring it up')
                 return
             self.servo.get_power_state_controller().reset()
             logging.info('DUT did not respond. Resetting it.')
@@ -2262,10 +2297,10 @@
                                     'access to the Cr50 console')
 
         self._ccd_open_job.process_output()
+        output = self._ccd_open_stdout.getvalue()
         self._ccd_open_stdout.seek(self._ccd_open_last_len)
-        output = self._ccd_open_stdout.read()
-        self._ccd_open_last_len = self._ccd_open_stdout.len
-        return output
+        self._ccd_open_last_len = len(output)
+        return self._ccd_open_stdout.read().strip()
 
     def _close_ccd_open_job(self):
         """Terminate the process and check the results."""
@@ -2304,7 +2339,7 @@
 
         self._ccd_open_last_len = 0
 
-        self._ccd_open_stdout = StringIO.StringIO()
+        self._ccd_open_stdout = six.StringIO()
 
         ccd_open_cmd = utils.sh_escape('gsctool -a -o')
         full_ssh_cmd = '%s "%s"' % (self.host.ssh_command(options='-tt'),
@@ -2378,7 +2413,7 @@
             raise error.TestNAError('Test can only be run on devices with '
                                     'access to the Cr50 console')
 
-        if self.servo.main_device_is_ccd():
+        if self.servo.main_device_is_ccd() and not self.cr50.testlab_is_on():
             error_txt = 'because the main servo device is CCD.'
             if enable_testlab:
                 raise error.TestNAError('Cannot enable testlab: %s' % error_txt)
@@ -2416,7 +2451,7 @@
                 self.cr50.set_ccd_testlab('on')
 
         if reset_ccd:
-            self.cr50.send_command('ccd reset')
+            self.cr50.ccd_reset()
 
         # In default, the device should be in normal mode. After opening cr50,
         # the TPM should be cleared and the device should automatically reset to
diff --git a/server/cros/faft/firmware_test_unittest.py b/server/cros/faft/firmware_test_unittest.py
index 1ae77275..42ef960 100644
--- a/server/cros/faft/firmware_test_unittest.py
+++ b/server/cros/faft/firmware_test_unittest.py
@@ -1,10 +1,11 @@
+# Lint as: python2, python3
 # Copyright (c) 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import mock
-import re
 import unittest
+from unittest import mock
+
 from autotest_lib.client.common_lib import error
 from autotest_lib.server.cros.faft import firmware_test
 
@@ -111,29 +112,10 @@
     # Mock out EC behavior to return definable power states
     class MockedECFirmwareTest(firmware_test.FirmwareTest):
         """A stubbed out FirmwareTest to check the precision behavior"""
-        class FakeEC:
-            """A stub EC class providing what's needed for this test"""
-            def __init__(self):
-                self.test = None
-                self.match = None
-
-            def set_test_string(self, s):
-                """Sets the string to test again"""
-                self.test = s
-
-            def send_command_get_output(self, _cmd, regex_list):
-                """Stub to simulate matching EC output against regex_list"""
-                self.match = None
-
-                for r in regex_list:
-                    result = re.search(r, self.test)
-                    if result is not None:
-                        self.match = result.group(0)
-                        break
 
         def __init__(self, *_args, **_dargs):
             # pylint: disable=super-init-not-called
-            self.ec = self.FakeEC()
+            pass
 
     # power_state is supposed to be a string, but lists seem somewhat common,
     # so guard against them.
@@ -141,42 +123,58 @@
         ft = self.MockedECFirmwareTest()
 
         with self.assertRaises(error.TestError):
-            ft._check_power_state([])
+            ft._check_power_state([], 'S0')
 
     def test_s0ix_isnt_s0(self):
         ft = self.MockedECFirmwareTest()
 
-        ft.ec.set_test_string("S0ix")
-        ft._check_power_state("S0")
-        self.assertIsNone(ft.ec.match)
+        self.assertEqual(False, ft._check_power_state("S0", "S0ix"))
 
-    def test_s0_in_parens_is_found(self):
+    def test_s0_is_found(self):
         ft = self.MockedECFirmwareTest()
 
-        ft.ec.set_test_string("(S0)")
-        ft._check_power_state("S0")
-        self.assertEqual(ft.ec.match, "S0")
+        self.assertEqual(True, ft._check_power_state("S0", "S0"))
+
+    def test_s0_is_found_unicode(self):
+        ft = self.MockedECFirmwareTest()
+
+        self.assertEqual(True, ft._check_power_state(u"S0", "S0"))
+        self.assertEqual(True, ft._check_power_state("S0", u"S0"))
+        self.assertEqual(True, ft._check_power_state(u"S0", u"S0"))
+
+    def test_s0_or_s3_is_found(self):
+        ft = self.MockedECFirmwareTest()
+
+        self.assertEqual(True, ft._check_power_state("(S0|S3)", "S0"))
+        self.assertEqual(True, ft._check_power_state("(S0|S3)", "S3"))
+        self.assertEqual(False, ft._check_power_state("(S0|S3)", "G3"))
 
 
 class Test_stage_build_to_usbkey(unittest.TestCase):
+    """stage_build_to_usbkey test"""
+
     class MockFirmwareTest(firmware_test.FirmwareTest):
+        """Mock of FirmwareTest"""
+
         def __init__(self):
             self._client = mock.MagicMock()
+            self.faft_client = mock.MagicMock()
 
     def setUp(self):
         self.test = self.MockFirmwareTest()
 
     def test_stage_build_to_usbkey(self):
-        self.test._client.host_info_store.get.return_value.build = "dummy_build"
+        self.test._client.host_info_store.get.return_value.build = "placeholder_build"
         self.test._client._servo_host.validate_image_usbkey.return_value = (
             "another_build")
         self.assertTrue(self.test.stage_build_to_usbkey())
-        self.test._client.stage_build_to_usb.assert_called_with("dummy_build")
+        self.test._client.stage_build_to_usb.assert_called_with(
+                "placeholder_build")
 
     def test_stage_build_to_usbkey_same_build(self):
-        self.test._client.host_info_store.get.return_value.build = "dummy_build"
+        self.test._client.host_info_store.get.return_value.build = "placeholder_build"
         self.test._client._servo_host.validate_image_usbkey.return_value = (
-            "dummy_build")
+                "placeholder_build")
         self.assertTrue(self.test.stage_build_to_usbkey())
         self.test._client.stage_build_to_usb.assert_not_called()
 
@@ -186,27 +184,30 @@
         self.test._client.stage_build_to_usb.assert_not_called()
 
     def test_stage_build_to_usbkey_download_error(self):
-        self.test._client.host_info_store.get.return_value.build = "dummy_build"
+        self.test._client.host_info_store.get.return_value.build = "placeholder_build"
         self.test._client._servo_host.validate_image_usbkey.return_value = (
             "another_build")
         self.test._client.stage_build_to_usb = (
             mock.MagicMock(side_effect=error.AutotestError("download")))
         self.assertFalse(self.test.stage_build_to_usbkey())
-        self.test._client.stage_build_to_usb.assert_called_with("dummy_build")
+        self.test._client.stage_build_to_usb.assert_called_with(
+                "placeholder_build")
 
     def test_setup_usbkey(self):
-        self.test._client.host_info_store.get.return_value.build = "dummy_build"
+        self.test._client.host_info_store.get.return_value.build = "placeholder_build"
         self.test._client._servo_host.validate_image_usbkey.return_value = (
             "another_build")
         self.test.assert_test_image_in_usb_disk = mock.MagicMock()
         self.test.set_servo_v4_role_to_snk = mock.MagicMock()
-        self.test.setup_usbkey(usbkey=True)
-        self.test._client.stage_build_to_usb.assert_called_with("dummy_build")
+        with mock.patch('time.sleep'):
+            self.test.setup_usbkey(usbkey=True)
+        self.test._client.stage_build_to_usb.assert_called_with(
+                "placeholder_build")
         self.test.assert_test_image_in_usb_disk.assert_called()
         self.test.set_servo_v4_role_to_snk.assert_called()
 
     def test_setup_usbkey_no_stage(self):
-        self.test._client.host_info_store.get.return_value.build = "dummy_build"
+        self.test._client.host_info_store.get.return_value.build = "placeholder_build"
         self.test._client._servo_host.validate_image_usbkey.return_value = (
             "another_build")
         self.test.assert_test_image_in_usb_disk = mock.MagicMock()
diff --git a/server/cros/faft/rpc_proxy.py b/server/cros/faft/rpc_proxy.py
index 1ea1b00..a0f6cff 100644
--- a/server/cros/faft/rpc_proxy.py
+++ b/server/cros/faft/rpc_proxy.py
@@ -1,12 +1,13 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import httplib
 import logging
+from six.moves import http_client as httplib
 import socket
 import time
-import xmlrpclib
+from six.moves import xmlrpc_client as xmlrpclib
 
 from autotest_lib.client.cros.faft.config import Config as ClientConfig
 from autotest_lib.server import autotest
@@ -62,8 +63,9 @@
 
         @param host: The host object, passed via the test control file.
         """
-        self._client = host
+        self.host = host
         self._faft_client = None
+        self.logfiles = []
 
     def __del__(self):
         self.disconnect()
@@ -97,17 +99,20 @@
     def connect(self):
         """Connect the RPC server."""
         # Make sure Autotest dependency is there.
-        autotest.Autotest(self._client).install()
-        self._faft_client = self._client.rpc_server_tracker.xmlrpc_connect(
+        autotest.Autotest(self.host).install()
+        logfile = "%s.%s" % (self._client_config.rpc_logfile, time.time())
+        self.logfiles.append(logfile)
+        self._faft_client = self.host.rpc_server_tracker.xmlrpc_connect(
                 self._client_config.rpc_command,
                 self._client_config.rpc_port,
                 command_name=self._client_config.rpc_command_short,
                 ready_test_name=self._client_config.rpc_ready_call,
                 timeout_seconds=self._client_config.rpc_timeout,
-                logfile="%s.%s" % (self._client_config.rpc_logfile,
-                                   time.time()),
-                server_desc=str(self)
-                )
+                logfile=logfile,
+                server_desc=str(self),
+                request_timeout_seconds=self._client_config.
+                rpc_request_timeout,
+        )
 
     def disconnect(self):
         """Disconnect the RPC server."""
@@ -115,8 +120,8 @@
         # so no need to pkill upon disconnect.
         if self._faft_client is not None:
             logging.debug("Closing FAFT RPC server connection.")
-        self._client.rpc_server_tracker.disconnect(
-                self._client_config.rpc_port, pkill=False)
+        self.host.rpc_server_tracker.disconnect(self._client_config.rpc_port,
+                                                pkill=False)
         self._faft_client = None
 
     def quit(self):
@@ -129,23 +134,31 @@
                     self._faft_client, self._client_config.rpc_quit_call)
             remote_quit()
             need_pkill = False
-        except (StandardError, httplib.BadStatusLine, xmlrpclib.Error) as e:
-            logging.warn("Error while telling FAFT RPC server to quit: %s", e)
+        except Exception as e:
+            logging.warning("Error while telling FAFT RPC server to quit: %s", e)
             # If we failed to tell the RPC server to quit for some reason,
             # fall back to SIGTERM, because it may not have exited.
             need_pkill = True
 
-        self._client.rpc_server_tracker.disconnect(
-                self._client_config.rpc_port, pkill=need_pkill)
+        self.host.rpc_server_tracker.disconnect(self._client_config.rpc_port,
+                                                pkill=need_pkill)
         self._faft_client = None
 
+    def collect_logfiles(self, dest):
+        """Download all logfiles from the DUT, then delete them."""
+        if self.logfiles:
+            for logfile in self.logfiles:
+                if self.host.run("test -f", args=[logfile],
+                                 ignore_status=True).exit_status == 0:
+                    self.host.get_file(logfile, dest)
+                    self.host.run("rm -f", ignore_status=True, args=[logfile])
+            self.logfiles.clear()
+
     def __repr__(self):
         """Return a description of the proxy object"""
-        return '%s(%s)' % (self.__class__.__name__, self._client)
+        return '%s(%s)' % (self.__class__.__name__, self.host)
 
     def __str__(self):
         """Return a description of the proxy object"""
-        return "<%s '%s:%s'>" % (
-                self.__class__.__name__,
-                self._client.hostname,
-                self._client_config.rpc_port)
+        return "<%s '%s:%s'>" % (self.__class__.__name__, self.host.hostname,
+                                 self._client_config.rpc_port)
diff --git a/server/cros/faft/telemetry.py b/server/cros/faft/telemetry.py
index 2a15284..2f258f8 100644
--- a/server/cros/faft/telemetry.py
+++ b/server/cros/faft/telemetry.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/cros/faft/utils/common.py b/server/cros/faft/utils/common.py
index 7915157..9c893cd 100644
--- a/server/cros/faft/utils/common.py
+++ b/server/cros/faft/utils/common.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/cros/faft/utils/config.py b/server/cros/faft/utils/config.py
index f5cc67c..1ca56cf 100644
--- a/server/cros/faft/utils/config.py
+++ b/server/cros/faft/utils/config.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -68,7 +69,7 @@
             seen_platforms = [self.platform]
             self._add_cfg_to_precedence(self.platform, platform_config)
             model_configs = platform_config.get('models', {})
-            model_config = model_configs.get(model, None)
+            model_config = model_configs.get(str(model), None)
             if model_config is not None:
                 self._add_cfg_to_precedence(
                         'MODEL:%s' % model, model_config, prepend=True)
diff --git a/server/cros/faft/utils/config_unittest.py b/server/cros/faft/utils/config_unittest.py
index ba63b52..f606ca2 100755
--- a/server/cros/faft/utils/config_unittest.py
+++ b/server/cros/faft/utils/config_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/server/cros/faft/utils/faft_checkers.py b/server/cros/faft/utils/faft_checkers.py
index 837e551..6bd46e0 100644
--- a/server/cros/faft/utils/faft_checkers.py
+++ b/server/cros/faft/utils/faft_checkers.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -74,7 +75,7 @@
         got_dict = self._parse_crossystem_output(lines)
         for key in expected_dict:
             if key not in got_dict:
-                logging.warn('Expected key %r not in crossystem result', key)
+                logging.warning('Expected key %r not in crossystem result', key)
                 succeed = False
                 continue
             if isinstance(expected_dict[key], str):
@@ -96,7 +97,7 @@
                     message = ('Expected %r values %r == real value %r' % (
                                key, expected_dict[key], got_dict[key]))
             else:
-                logging.warn('The expected value of %r is neither a str nor a '
+                logging.warning('The expected value of %r is neither a str nor a '
                              'dict: %r', key, expected_dict[key])
                 succeed = False
                 continue
@@ -105,27 +106,17 @@
         return succeed
 
     def mode_checker(self, mode):
-        """Check the current system in the given mode.
+        """Check whether the DUT is in the given firmware boot mode.
 
-        @param mode: A string of mode, one of 'normal', 'dev', or 'rec'.
+        @param mode: A string of the expected boot mode: normal, rec, or dev.
         @return: True if the system in the given mode; otherwise, False.
+        @raise ValueError: If the expected boot mode is not one of normal, rec,
+                           or dev.
         """
-        if mode == 'normal':
-            return self.crossystem_checker(
-                    {'devsw_boot': '0',
-                     'mainfw_type': 'normal'},
-                    suppress_logging=True)
-        elif mode == 'dev':
-            return self.crossystem_checker(
-                    {'devsw_boot': '1',
-                     'mainfw_type': 'developer'},
-                    suppress_logging=True)
-        elif mode == 'rec':
-            return self.crossystem_checker(
-                    {'mainfw_type': 'recovery'},
-                    suppress_logging=True)
-        else:
-            raise NotImplementedError('The given mode %s not supported' % mode)
+        if mode not in ('normal', 'rec', 'dev'):
+            raise ValueError(
+                    'Unexpected boot mode %s: want normal, rec, or dev' % mode)
+        return self.faft_client.system.get_boot_mode() == mode
 
     def fw_tries_checker(self,
                          expected_mainfw_act,
@@ -226,3 +217,20 @@
                     return False
         logging.info("Wrong output format of '%s':\n%s", cmd, '\n'.join(lines))
         return False
+
+    def minios_checker(self):
+        """Check the current boot is a success MiniOS boot via SSH.
+
+        The DUT with test image should allow SSH connection, and we will use the
+        raw command, host.run_output(), to check since autotest client libraries
+        cannot be installed in MiniOS.
+
+        @return True if DUT booted to MiniOS; otherwise, False.
+        @raise TestError if DUT does not enable MiniOS.
+        """
+
+        if not self.faft_config.minios_enabled:
+            raise error.TestError('MiniOS is not enabled for this board')
+
+        cmdline = self.faft_client.host.run_output('cat /proc/cmdline')
+        return 'cros_minios' in cmdline.split()
diff --git a/server/cros/faft/utils/menu_mode_switcher.py b/server/cros/faft/utils/menu_mode_switcher.py
new file mode 100644
index 0000000..1057999
--- /dev/null
+++ b/server/cros/faft/utils/menu_mode_switcher.py
@@ -0,0 +1,476 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import abc
+import logging
+import six
+
+from autotest_lib.client.common_lib import error
+
+
+@six.add_metaclass(abc.ABCMeta)
+class _BaseMenuModeSwitcher:
+    """
+    Base class for mode switch with menu navigator.
+    """
+
+    def __init__(self, faft_framework, menu_navigator):
+        self.test = faft_framework
+        self.faft_config = self.test.faft_config
+        self.servo = self.test.servo
+        self.menu = menu_navigator
+        self.checkers = faft_framework.checkers
+
+        self.minidiag_enabled = self.faft_config.minidiag_enabled
+        self.minios_enabled = self.faft_config.minios_enabled
+
+    @abc.abstractmethod
+    def trigger_rec_to_dev(self):
+        """
+        Trigger to-dev transition.
+        """
+        raise NotImplementedError
+
+    @abc.abstractmethod
+    def dev_boot_from_internal(self):
+        """
+        Boot from internal disk in developer mode.
+        """
+        raise NotImplementedError
+
+    @abc.abstractmethod
+    def dev_boot_from_external(self):
+        """Boot from external disk in developer mode."""
+        raise NotImplementedError
+
+    @abc.abstractmethod
+    def trigger_dev_to_normal(self):
+        """
+        Trigger dev-to-norm transition.
+        """
+        raise NotImplementedError
+
+    @abc.abstractmethod
+    def power_off(self):
+        """
+        Power off the device.
+
+        This method should work in both developer and recovery screens.
+        """
+        raise NotImplementedError
+
+
+class _TabletDetachableMenuModeSwitcher(_BaseMenuModeSwitcher):
+    """
+    Mode switcher with menu navigator for legacy menu UI.
+
+    The "legacy menu UI" is an old menu-based UI, which has been replaced
+    by the new one, called "menu UI".
+    """
+
+    def trigger_rec_to_dev(self):
+        """
+        Trigger to-dev transition.
+        """
+        self.test.switcher.trigger_rec_to_dev()
+
+    def dev_boot_from_internal(self):
+        """
+        Boot from internal disk in developer mode.
+
+        Menu items in developer warning screen:
+            0. Developer Options
+            1. Show Debug Info
+            2. Enable OS Verification
+           *3. Power Off
+            4. Language
+
+        Menu items in developer boot options screen:
+            0. Boot From Network
+            1. Boot Legacy BIOS
+            2. Boot From USB or SD Card
+           *3. Boot From Internal Disk
+            4. Cancel
+            5. Power Off
+
+        (*) is the default selection.
+        """
+        self.test.wait_for('firmware_screen')
+        self.menu.move_to(3, 0)
+        self.menu.select('Selecting "Developer Options"...')
+        self.test.wait_for('keypress_delay')
+        self.menu.select('Selecting "Boot From Internal Disk"...')
+
+    def dev_boot_from_external(self):
+        """Boot from external disk in developer mode.
+
+        Menu items in developer warning screen:
+            0. Developer Options
+            1. Show Debug Info
+            2. Enable OS Verification
+           *3. Power Off
+            4. Language
+
+        Menu items in developer boot options screen:
+            0. Boot From Network
+            1. Boot Legacy BIOS
+            2. Boot From USB or SD Card
+           *3. Boot From Internal Disk
+            4. Cancel
+            5. Power Off
+            6. Language
+        """
+        self.test.wait_for('firmware_screen')
+        self.menu.move_to(3, 0)
+        self.menu.select('Selecting "Developer Options"...')
+        self.test.wait_for('keypress_delay')
+        self.menu.move_to(3, 2)
+        self.menu.select('Selecting "Boot From USB or SD Card"...')
+
+    def trigger_dev_to_normal(self):
+        """
+        Trigger dev-to-norm transition.
+
+        Menu items in developer warning screen:
+            0. Developer Options
+            1. Show Debug Info
+            2. Enable OS Verification
+           *3. Power Off
+            4. Language
+
+        Menu items in to-norm confirmation screen:
+           *0. Confirm Enabling OS Verification
+            1. Cancel
+            2. Power Off
+            3. Language
+
+        (*) is the default selection.
+        """
+        self.test.wait_for('firmware_screen')
+        self.menu.move_to(3, 2)
+        self.menu.select('Selecting "Enable OS Verification"...')
+        self.test.wait_for('keypress_delay')
+        self.menu.select('Selecing "Confirm Enabling OS Verification"...')
+
+    def power_off(self):
+        """
+        Power off the device.
+
+        This method should work in both developer and recovery screens.
+        """
+        self.test.wait_for('firmware_screen')
+        # Either in developer or recovery screen, the "Power Off" option is the
+        # default one.
+        self.menu.select('Selecting "Power Off"...')
+
+
+class _MenuModeSwitcher(_BaseMenuModeSwitcher):
+    """
+    Mode switcher with menu navigator for menu UI.
+
+    The "menu UI" aims to replace both "legacy clamshell UI" and "legacy
+    menu UI". See chromium:1033815 for the discussion about the naming.
+
+    Menu items in recovery select screen:
+        0. Language
+        1. Recovery using phone (always hidden)
+        2. Recovery using external disk
+        3. Recovery using internet connection (shown if minios_enabled)
+        4. Launch diagnostics (shown if minidiag_enabled)
+        5. Advanced options
+        6. Power off
+    """
+    RECOVERY_SELECT_ITEM_COUNT = 7
+
+    def _confirm_to_dev(self):
+        if self.faft_config.rec_button_dev_switch:
+            logging.info('Confirm to-dev by RECOVERY button')
+            self.servo.toggle_recovery_switch()
+        elif self.faft_config.power_button_dev_switch:
+            logging.info('Confirm to-dev by POWER button')
+            self.servo.power_normal_press()
+        else:
+            self.menu.select('Confirm to-dev by menu selection')
+
+    def trigger_rec_to_dev(self):
+        """
+        Trigger to-dev transition.
+
+        Menu items in advanced options screen:
+            0. Language
+           *1. Enable developer mode
+            2. Back
+            3. Power off
+
+        Menu items in to-dev screen:
+            0. Language
+           *1. Confirm
+            2. Cancel
+            3. Power off
+
+        (*) is the default selection.
+        """
+        self.test.wait_for('firmware_screen')
+        # The default selection is unknown, navigate to the last item first
+        self.menu.move_to(0, self.RECOVERY_SELECT_ITEM_COUNT)
+        # Navigate to "Advanced options"
+        self.menu.up()
+        self.test.wait_for('keypress_delay')
+        self.menu.select('Selecting "Advanced options"...')
+        self.test.wait_for('keypress_delay')
+        self.menu.select('Selecting "Enable developer mode"...')
+        self.test.wait_for('keypress_delay')
+        # Confirm to-dev transition
+        self._confirm_to_dev()
+
+    def dev_boot_from_internal(self):
+        """
+        Boot from internal disk in developer mode.
+
+        Menu items in developer mode screen:
+            0. Language
+            1. Return to secure mode
+            2. Boot from internal disk
+            3. Boot from external disk
+            4. Advanced options
+            5. Power off
+        """
+        self.test.wait_for('firmware_screen')
+        # Since the default selection is unknown, navigate to item 0 first
+        self.menu.move_to(5, 0)
+        # Navigate to "Boot from internal disk"
+        self.menu.move_to(0, 2)
+        self.menu.select('Selecting "Boot from internal disk"...')
+
+    def dev_boot_from_external(self):
+        """Boot from external disk in developer mode.
+
+        Menu items in developer mode screen:
+            0. Language
+            1. Return to secure mode
+            2. Boot from internal disk
+            3. Boot from external disk
+            4. Advanced options
+            5. Power off
+        """
+        self.test.wait_for('firmware_screen')
+        # Since the default selection is unknown, navigate to item 0 first
+        self.menu.move_to(5, 0)
+        # Navigate to "Boot from external disk"
+        self.menu.move_to(0, 3)
+        self.menu.select('Selecting "Boot from external disk"...')
+
+    def trigger_dev_to_normal(self):
+        """
+        Trigger dev-to-norm transition.
+
+        Menu items in developer mode screen:
+            0. Language
+            1. Return to secure mode
+            2. Boot from internal disk
+            3. Boot from external disk
+            4. Advanced options
+            5. Power off
+
+        Menu items in to-norm screen:
+            0. Language
+           *1. Confirm
+            2. Cancel
+            3. Power off
+
+        (*) is the default selection.
+        """
+        self.test.wait_for('firmware_screen')
+        # Since the default selection is unknown, navigate to item 0 first
+        self.menu.move_to(5, 0)
+        # Navigate to "Return to secure mode"
+        self.menu.down()
+        self.test.wait_for('keypress_delay')
+        self.menu.select('Selecting "Return to secure mode"...')
+        self.test.wait_for('keypress_delay')
+        self.menu.select('Selecing "Confirm"...')
+
+    def power_off(self):
+        """
+        Power off the device.
+
+        This method should work in both developer and recovery screens.
+        """
+        self.test.wait_for('firmware_screen')
+        # Since there are at most 6 menu items in dev/rec screen, move the
+        # cursor down 6 times to ensure we reach the last menu item.
+        self.menu.move_to(0, 6)
+        self.menu.select('Selecting "Power off"...')
+
+    def trigger_rec_to_minidiag(self):
+        """
+        Trigger rec-to-MiniDiag.
+
+        @raise TestError if MiniDiag is not enabled.
+        """
+
+        # Validity check; this only applicable for MiniDiag enabled devices.
+        if not self.minidiag_enabled:
+            raise error.TestError('MiniDiag is not enabled for this board')
+
+        self.test.wait_for('firmware_screen')
+        # The default selection is unknown, so navigate to the last item first
+        self.menu.move_to(0, self.RECOVERY_SELECT_ITEM_COUNT)
+        # Navigate to "Launch diagnostics"
+        self.menu.up()
+        self.test.wait_for('keypress_delay')
+        self.menu.up()
+        self.test.wait_for('keypress_delay')
+        self.menu.select('Selecting "Launch diagnostics"...')
+        self.test.wait_for('firmware_screen')
+
+    def navigate_minidiag_storage(self):
+        """
+        Navigate to storage screen.
+
+        Menu items in storage screen:
+            0. Language
+            1. Page up (disabled)
+            2. Page down
+            3. Back
+            4. Power off
+
+        @raise TestError if MiniDiag is not enabled.
+        """
+
+        # Validity check; this only applicable for MiniDiag enabled devices.
+        if not self.minidiag_enabled:
+            raise error.TestError('MiniDiag is not enabled for this board')
+
+        # From root screen to storage screen
+        self.menu.select('Selecting "Storage"...')
+        self.test.wait_for('keypress_delay')
+        # Since the default selection is unknown, navigate to item 4 first
+        self.menu.move_to(0, 4)
+        # Navigate to "Back"
+        self.menu.up()
+        self.test.wait_for('keypress_delay')
+        self.menu.select('Back to MiniDiag root screen...')
+        self.test.wait_for('keypress_delay')
+
+    def navigate_minidiag_quick_memory_check(self):
+        """
+        Navigate to quick memory test screen.
+
+        Menu items in quick memory test screen:
+            0. Language
+            1. Page up (disabled)
+            2. Page down (disabled
+            3. Back
+            4. Power off
+
+        @raise TestError if MiniDiag is not enabled.
+        """
+
+        # Validity check; this only applicable for MiniDiag enabled devices.
+        if not self.minidiag_enabled:
+            raise error.TestError('MiniDiag is not enabled for this board')
+
+        # From root screen to quick memory test screen
+        # There might be self test items, so navigate to the last item first
+        self.menu.move_to(0, 5)
+        self.menu.up()  # full memory test
+        self.test.wait_for('keypress_delay')
+        self.menu.up()  # quick memory test
+        self.test.wait_for('keypress_delay')
+        self.menu.select('Selecting "Quick memory test"...')
+        self.test.wait_for('keypress_delay')
+        # Wait for quick memory test
+        self.menu.select('Back to MiniDiag root screen...')
+        self.test.wait_for('keypress_delay')
+
+    def reset_and_leave_minidiag(self):
+        """
+        Reset the DUT and normal boot to leave MiniDiag.
+
+        @raise TestError if MiniDiag is not enabled or no apreset support.
+        """
+
+        # Validity check; this only applicable for MiniDiag enabled devices.
+        if not self.minidiag_enabled:
+            raise error.TestError('MiniDiag is not enabled for this board')
+
+        # Since we want to keep the cbmem log, we need an AP reset and reboot to
+        # normal mode
+        if self.test.ec.has_command('apreset'):
+            logging.info('Trigger apreset')
+            self.test.ec.send_command('apreset')
+        else:
+            raise error.TestError('EC command apreset is not supported')
+
+    def trigger_rec_to_minios(self, older_version=False):
+        """
+        Trigger recovery-to-MiniOS transition.
+
+        Menu items in advanced options screen, developer mode:
+            0. Language
+           *1. Debug info
+            2. Firmware log
+            3. Internet recovery (older version)
+            4. Back
+            5. Power off
+
+        (*) is the default selection.
+
+        @param older_version: True for selecting the older version button in the
+                              advanced options screen, and False for selecting
+                              the newer one in the recovery selection screen.
+        @raise TestError if MiniOS is not enabled.
+        """
+        # Validity check
+        if not self.minios_enabled:
+            raise NotImplementedError
+
+        # Boot to MiniOS through UI menu
+        if older_version:
+            logging.info('Boot to MiniOS (older version)')
+            # The default selection is unknown, so navigate to the last item
+            # first
+            self.menu.move_to(0, self.RECOVERY_SELECT_ITEM_COUNT)
+            # Navigate to "Advanced options"
+            self.menu.up()
+            self.test.wait_for('keypress_delay')
+            self.menu.select('Selecting "Advanced options"...')
+            self.test.wait_for('keypress_delay')
+            # Navigate to the last item in advanced options
+            self.menu.move_to(0, 5)
+            self.menu.move_to(5, 3)
+            self.menu.select(
+                    'Selecting "Internet recovery (older version)"...')
+        else:
+            logging.info('Boot to MiniOS')
+            self.menu.down()
+            self.menu.select(
+                    'Selecting "Recovery using internet connection"...')
+
+        self.test.wait_for('minios_screen')
+
+
+_MENU_MODE_SWITCHER_CLASSES = {
+        'menu_switcher': _MenuModeSwitcher,
+        'tablet_detachable_switcher': _TabletDetachableMenuModeSwitcher,
+}
+
+
+def create_menu_mode_switcher(faft_framework, menu_navigator):
+    """
+    Create a proper navigator based on its mode switcher type.
+
+    @param faft_framework: The main FAFT framework object.
+    @param menu_navigator: The menu navigator for base logic of navigation.
+    """
+    switcher_type = faft_framework.faft_config.mode_switcher_type
+    switcher_class = _MENU_MODE_SWITCHER_CLASSES.get(switcher_type, None)
+    if switcher_class is None:
+        # Not all devices support menu-based UI, so it is fine to return None.
+        logging.info('Switcher type %s is menuless, return None',
+                     switcher_type)
+        return None
+    return switcher_class(faft_framework, menu_navigator)
diff --git a/server/cros/faft/utils/menu_navigator.py b/server/cros/faft/utils/menu_navigator.py
new file mode 100644
index 0000000..7e90461
--- /dev/null
+++ b/server/cros/faft/utils/menu_navigator.py
@@ -0,0 +1,88 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import abc
+import logging
+import six
+
+
+@six.add_metaclass(abc.ABCMeta)
+class _BaseMenuNavigator:
+    """Abstract base class for menu navigator."""
+
+    def __init__(self, test):
+        self.test = test
+        self.faft_config = self.test.faft_config
+        self.servo = self.test.servo
+
+    @abc.abstractmethod
+    def up(self):
+        """Navigate up in the menu."""
+        raise NotImplementedError
+
+    @abc.abstractmethod
+    def down(self):
+        """Navigate down in the menu."""
+        raise NotImplementedError
+
+    @abc.abstractmethod
+    def select(self, msg=None):
+        """Select a menu item."""
+        raise NotImplementedError
+
+    def move_to(self, from_idx, to_idx):
+        """Move from 'from_idx' to 'to_idx' by menu up/down."""
+        if from_idx > to_idx:
+            for _ in range(from_idx, to_idx, -1):
+                self.up()
+                self.test.wait_for('keypress_delay')
+        elif from_idx < to_idx:
+            for _ in range(from_idx, to_idx, 1):
+                self.down()
+                self.test.wait_for('keypress_delay')
+
+
+class _KeyboardMenuNavigator(_BaseMenuNavigator):
+    """Navigate with arrow and function keys."""
+
+    def up(self):
+        """Navigate up in the menu."""
+        self.servo.arrow_up()
+
+    def down(self):
+        """Navigate down in the menu."""
+        self.servo.arrow_down()
+
+    def select(self, msg=None):
+        """Select a menu item."""
+        if msg:
+            logging.info(msg)
+        self.servo.enter_key()
+
+
+class _DetachableMenuNavigator(_BaseMenuNavigator):
+    """Navigate with physical buttons for tablet or detachable devices."""
+
+    def up(self):
+        """Navigate up in the menu."""
+        self.servo.set_nocheck('volume_up_hold', 100)
+
+    def down(self):
+        """Navigate down in the menu."""
+        self.servo.set_nocheck('volume_down_hold', 100)
+
+    def select(self, msg=None):
+        """Select a menu item."""
+        if msg:
+            logging.info(msg)
+        self.servo.power_short_press()
+
+
+def create_menu_navigator(faft_framework):
+    """Create a proper navigator based on whether or not it is detachable"""
+    if faft_framework.faft_config.is_detachable:
+        return _DetachableMenuNavigator(faft_framework)
+    else:
+        return _KeyboardMenuNavigator(faft_framework)
diff --git a/server/cros/faft/utils/mode_switcher.py b/server/cros/faft/utils/mode_switcher.py
index f03720d..2d1d75d 100644
--- a/server/cros/faft/utils/mode_switcher.py
+++ b/server/cros/faft/utils/mode_switcher.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -6,6 +7,7 @@
 import time
 
 from autotest_lib.client.common_lib import error
+from autotest_lib.server.cros import vboot_constants as vboot
 
 DEBOUNCE_STATE = 'debouncing'
 
@@ -21,12 +23,13 @@
     # warning screen in tablets/detachables.
     HOLD_VOL_DOWN_BUTTON_BYPASS = 3
 
-    def __init__(self, faft_framework):
+    def __init__(self, faft_framework, menu_navigator):
         self.faft_framework = faft_framework
         self.servo = faft_framework.servo
         self.faft_config = faft_framework.faft_config
         self.client_host = faft_framework._client
         self.ec = getattr(faft_framework, 'ec', None)
+        self.menu = menu_navigator
 
 
     def bypass_dev_mode(self):
@@ -68,7 +71,8 @@
     def check_vbus_and_pd_state(self):
         """Perform PD power and data swap, if DUT is SRC and doesn't supply
         Vbus"""
-        if self.ec and self.faft_config.ec_ro_vbus_bug:
+        if (self.ec and self.faft_config.ec_ro_vbus_bug
+                    and self.servo.is_servo_v4_type_c()):
             time.sleep(self.faft_framework.PD_RESYNC_DELAY)
             servo_pr_role = self.servo.get_servo_v4_role()
             if servo_pr_role == 'snk':
@@ -79,15 +83,28 @@
                     # Make servo SRC to supply Vbus correctly
                     self.servo.set_servo_v4_role('src')
                     time.sleep(self.faft_framework.PD_RESYNC_DELAY)
+
             # After reboot, EC can be UFP so check that
-            if not self.ec.is_dfp():
+            MAX_PORTS = 2
+            ec_is_dfp = False
+            for port in range(0, MAX_PORTS):
+                if self.ec.is_dfp(port):
+                    ec_is_dfp = True
+                    break
+
+            if not ec_is_dfp:
                 # EC is UFP, perform PD Data Swap
-                self.ec.send_command("pd 0 swap data")
-                time.sleep(self.faft_framework.PD_RESYNC_DELAY)
-                # Make sure EC is DFP now
-                if not self.ec.is_dfp():
-                    # EC is still UFP
-                    raise error.TestError('DUT is not DFP in recovery mode.')
+                for port in range(0, MAX_PORTS):
+                    self.ec.send_command("pd %d swap data" % port)
+                    time.sleep(self.faft_framework.PD_RESYNC_DELAY)
+                    # Make sure EC is DFP now
+                    if self.ec.is_dfp(port):
+                        ec_is_dfp = True
+                        break
+
+            if not ec_is_dfp:
+                # EC is still UFP
+                raise error.TestError('DUT is not DFP in recovery mode.')
 
 
 class _KeyboardBypasser(_BaseFwBypasser):
@@ -120,7 +137,7 @@
     def bypass_dev_default_boot(self):
         """Bypass the dev mode firmware logic to boot from default target."""
         self.faft_framework.wait_for('firmware_screen', 'Pressing enter')
-        self.servo.enter_key()
+        self.menu.select()
 
 
     def bypass_rec_mode(self):
@@ -130,13 +147,18 @@
         self.check_vbus_and_pd_state()
         self.servo.switch_usbkey('dut')
         logging.info('Enabled dut_sees_usb')
-        if not self.client_host.ping_wait_up(
-                timeout=self.faft_config.delay_reboot_to_ping):
-            logging.info('ping timed out, try REC_ON')
+        tries = 3
+        while tries > 0 and not self.client_host.wait_up(
+                timeout=self.faft_config.delay_reboot_to_ping,
+                host_is_down=True):
+            tries = tries - 1
+            logging.info('connect timed out, try REC_ON, retries left: %d',
+                         tries)
             psc = self.servo.get_power_state_controller()
             psc.power_on(psc.REC_ON)
             # Check Vbus after reboot again
             self.check_vbus_and_pd_state()
+        logging.info('bypass_rec_mode DONE')
 
 
     def trigger_dev_to_rec(self):
@@ -149,7 +171,8 @@
         """Trigger to the dev mode from the rec screen."""
         self.faft_framework.wait_for('firmware_screen', 'Pressing ctrl+d')
         self.servo.ctrl_d()
-        self.faft_framework.wait_for('confirm_screen', 'Pressing button to switch to dev mode')
+        self.faft_framework.wait_for('keypress_delay',
+                                     'Pressing button to switch to dev mode')
         if self.faft_config.rec_button_dev_switch:
             logging.info('RECOVERY button pressed to switch to dev mode')
             self.servo.toggle_recovery_switch()
@@ -158,7 +181,7 @@
             self.servo.power_normal_press()
         else:
             logging.info('ENTER pressed to switch to dev mode')
-            self.servo.enter_key()
+            self.menu.select()
 
 
     def trigger_dev_to_normal(self):
@@ -167,8 +190,8 @@
         self.faft_framework.wait_for('firmware_screen', 'Pressing ctrl+s')
         self.servo.ctrl_s()
         # Select "Confirm"
-        self.faft_framework.wait_for('confirm_screen', 'Pressing enter')
-        self.servo.enter_key()
+        self.faft_framework.wait_for('keypress_delay', 'Pressing enter')
+        self.menu.select()
 
 
 class _LegacyKeyboardBypasser(_KeyboardBypasser):
@@ -177,14 +200,14 @@
     def trigger_dev_to_rec(self):
         """Trigger to the to-norm screen from the dev screen."""
         self.faft_framework.wait_for('firmware_screen', 'Pressing enter')
-        self.servo.enter_key()
+        self.menu.select()
 
     def trigger_dev_to_normal(self):
         """Trigger to the normal mode from the dev screen."""
         self.faft_framework.wait_for('firmware_screen', 'Pressing enter')
-        self.servo.enter_key()
-        self.faft_framework.wait_for('confirm_screen', 'Pressing enter')
-        self.servo.enter_key()
+        self.menu.select()
+        self.faft_framework.wait_for('keypress_delay', 'Pressing enter')
+        self.menu.select()
 
 
 class _JetstreamBypasser(_BaseFwBypasser):
@@ -242,7 +265,7 @@
     def set_button(self, button, duration, info):
         """Helper method that sets the button hold time for UI selections"""
         self.servo.set_nocheck(button, duration)
-        self.faft_framework.wait_for('confirm_screen')
+        self.faft_framework.wait_for('keypress_delay')
         logging.info(info)
 
 
@@ -275,7 +298,7 @@
         self.faft_framework.wait_for('firmware_screen', 'Pressing volume up')
         self.set_button('volume_up_hold', 100, ('Selecting power as'
                         ' enter key to select Boot USB Image'))
-        self.servo.power_short_press()
+        self.menu.select()
 
     def bypass_dev_default_boot(self):
         """Open the Developer Options menu, and accept the default boot device
@@ -301,7 +324,7 @@
         self.faft_framework.wait_for('firmware_screen', 'Pressing power button')
         logging.info('Selecting power as enter key to accept the default'
                      ' boot option.')
-        self.servo.power_short_press()
+        self.menu.select()
 
     def bypass_rec_mode(self):
         """Bypass the rec mode firmware logic to boot USB."""
@@ -366,12 +389,12 @@
         """
         self.faft_framework.wait_for('firmware_screen', 'Pressing volume up')
         self.servo.set_nocheck('volume_up_hold', 100)
-        self.faft_framework.wait_for('confirm_screen', 'Pressing volume up')
+        self.faft_framework.wait_for('keypress_delay', 'Pressing volume up')
         self.servo.set_nocheck('volume_up_hold', 100)
-        self.faft_framework.wait_for('confirm_screen', 'Pressing volume up')
+        self.faft_framework.wait_for('keypress_delay', 'Pressing volume up')
         self.set_button('volume_up_hold', 100, ('Selecting power '
                         'as enter key to select Developer Options'))
-        self.servo.power_short_press()
+        self.menu.select()
 
 
     def trigger_rec_to_dev(self):
@@ -390,10 +413,10 @@
         """
         self.faft_framework.wait_for('firmware_screen', 'Pressing volume up + volume down')
         self.set_button('volume_up_down_hold', 100, ('Enter Recovery Menu.'))
-        self.faft_framework.wait_for('confirm_screen', 'Pressing volume up')
+        self.faft_framework.wait_for('keypress_delay', 'Pressing volume up')
         self.set_button('volume_up_hold', 100, ('Selecting power as '
                         'enter key to select Confirm Enabling Developer Mode'))
-        self.servo.power_short_press()
+        self.menu.select()
         self.faft_framework.wait_for('firmware_screen')
 
 
@@ -418,13 +441,13 @@
         self.set_button('volume_up_hold', 100, ('Selecting '
                         'Enable Root Verification using pwr '
                         'button to enter TO_NORM screen'))
-        self.servo.power_short_press()
+        self.menu.select()
         logging.info('Transitioning from DEV to TO_NORM screen.')
         self.faft_framework.wait_for('firmware_screen', 'Pressing power button')
         logging.info('Selecting Confirm Enabling Verified '
                         'Boot using pwr button in '
                         'TO_NORM screen')
-        self.servo.power_short_press()
+        self.menu.select()
 
     def trigger_dev_to_rec(self):
         """Trigger to the TO_NORM screen from the dev screen.
@@ -446,7 +469,7 @@
         self.set_button('volume_up_hold', 100, ('Selecting '
                         'Enable Root Verification using pwr '
                         'button to enter TO_NORM screen'))
-        self.servo.power_short_press()
+        self.menu.select()
         logging.info('Transitioning from DEV to TO_NORM screen.')
         self.faft_framework.wait_for('firmware_screen', 'Pressing volume down')
 
@@ -456,9 +479,9 @@
         # a generic action and wait for next action of either Lid close or
         # power button press.
         self.servo.set_nocheck('volume_down_hold', 100)
-        self.faft_framework.wait_for('confirm_screen', 'Pressing volume down')
+        self.faft_framework.wait_for('keypress_delay', 'Pressing volume down')
         self.servo.set_nocheck('volume_down_hold', 100)
-        self.faft_framework.wait_for('confirm_screen')
+        self.faft_framework.wait_for('keypress_delay')
 
 
 class _BaseModeSwitcher(object):
@@ -468,66 +491,89 @@
 
     FW_BYPASSER_CLASS = _BaseFwBypasser
 
-    def __init__(self, faft_framework):
+    def __init__(self, faft_framework, menu_navigator):
         self.faft_framework = faft_framework
         self.client_host = faft_framework._client
         self.faft_client = faft_framework.faft_client
         self.servo = faft_framework.servo
         self.faft_config = faft_framework.faft_config
         self.checkers = faft_framework.checkers
+        self.menu = menu_navigator
         self.bypasser = self._create_fw_bypasser()
-        self._backup_mode = None
+        original_boot_mode = self.faft_client.system.get_boot_mode()
+        # Only resume to normal/dev mode after test, not recovery.
+        self._backup_mode = 'dev' if original_boot_mode == 'dev' else 'normal'
 
     def _create_fw_bypasser(self):
         """Creates a proper firmware bypasser.
 
         @rtype: _BaseFwBypasser
         """
-        return self.FW_BYPASSER_CLASS(self.faft_framework)
+        return self.FW_BYPASSER_CLASS(self.faft_framework, self.menu)
 
-    def setup_mode(self, mode):
-        """Setup for the requested mode.
+    def setup_mode(self, to_mode, allow_gbb_force=False):
+        """Setup for the requested boot mode.
 
         It makes sure the system in the requested mode. If not, it tries to
         do so.
 
-        @param mode: A string of mode, one of 'normal', 'dev', or 'rec'.
+        @param to_mode: A string of boot mode, one of 'normal', 'dev', or 'rec'.
+        @param allow_gbb_force: Bool. If True, allow forcing dev mode via GBB
+                                flags. This is more reliable, but it can prevent
+                                testing other mode-switch workflows.
         @raise TestFail: If the system not switched to expected mode after
                          reboot_to_mode.
 
         """
-        if not self.checkers.mode_checker(mode):
-            logging.info('System not in expected %s mode. Reboot into it.',
-                         mode)
-            if self._backup_mode is None:
-                # Only resume to normal/dev mode after test, not recovery.
-                self._backup_mode = 'dev' if mode == 'normal' else 'normal'
-            self.reboot_to_mode(mode)
-            if not self.checkers.mode_checker(mode):
-                raise error.TestFail('System not switched to expected %s'
-                        ' mode after setup_mode.' % mode)
+        current_mode = self.faft_client.system.get_boot_mode()
+        if current_mode == to_mode:
+            logging.debug('System already in expected %s mode.', to_mode)
+            return
+        logging.info('System not in expected %s mode. Reboot into it.',
+                     to_mode)
+
+        self.reboot_to_mode(to_mode, allow_gbb_force=allow_gbb_force)
+        current_mode = self.faft_client.system.get_boot_mode()
+        if current_mode != to_mode:
+            raise error.TestFail(
+                    'After setup_mode, wanted mode=%s but got %s' %
+                    (to_mode, current_mode))
 
     def restore_mode(self):
         """Restores original dev mode status if it has changed.
 
         @raise TestFail: If the system not restored to expected mode.
         """
-        if (self._backup_mode is not None and
-            not self.checkers.mode_checker(self._backup_mode)):
-            self.reboot_to_mode(self._backup_mode)
-            if not self.checkers.mode_checker(self._backup_mode):
-                raise error.TestFail('System not restored to expected %s'
-                        ' mode in cleanup.' % self._backup_mode)
+        if self._backup_mode is None:
+            logging.debug('No backup mode to restore.')
+            return
+        current_mode = self.faft_client.system.get_boot_mode()
+        if current_mode == self._backup_mode:
+            logging.debug('System already in backup %s mode.', current_mode)
+            return
 
+        self.reboot_to_mode(self._backup_mode, allow_gbb_force=True)
+        current_mode = self.faft_client.system.get_boot_mode()
+        if current_mode != self._backup_mode:
+            raise error.TestFail(
+                    'After restore_mode, wanted mode=%s but got %s' %
+                    (self._backup_mode, current_mode))
+        self._backup_mode = None
 
-
-    def reboot_to_mode(self, to_mode, from_mode=None, sync_before_boot=True,
-                       wait_for_dut_up=True):
+    def reboot_to_mode(self,
+                       to_mode,
+                       allow_gbb_force=False,
+                       sync_before_boot=True,
+                       wait_for_dut_up=True,
+                       rec_usb_state='dut'):
         """Reboot and execute the mode switching sequence.
 
-        This method simulates what a user would do to switch between different
-        modes of ChromeOS.  Note that the modes are end-states where the OS is
-        booted up to the Welcome screen, so it takes care of navigating through
+        Normally this method simulates what a user would do to switch between
+        different modes of ChromeOS. However, if allow_gbb_force is True, then
+        booting to dev mode will instead be forced by GBB flags.
+
+        Note that the modes are end-states where the OS is booted up
+        to the Welcome screen, so it takes care of navigating through
         intermediate steps such as various boot confirmation screens.
 
         From the user perspective, these are the states (note that there's also
@@ -538,9 +584,6 @@
           |                         |
           +-------------------------+
 
-        This is the implementation, note that "from_mode" is only used for
-        logging purposes.
-
         Normal <-----> Dev:
           _enable_dev_mode_and_reboot()
 
@@ -551,31 +594,36 @@
           _enable_normal_mode_and_reboot()
 
         Normal <-----> rec:
-          enable_rec_mode_and_reboot(usb_state='dut')
+          enable_rec_mode_and_reboot(usb_state=rec_usb_state)
 
         Normal <-----> rec_force_mrc:
-          _enable_rec_mode_force_mrc_and_reboot(usb_state='dut')
+          _enable_rec_mode_force_mrc_and_reboot(usb_state=rec_usb_state)
 
         Note that one shouldn't transition to dev again without going through the
         normal mode.  This is because trying to disable os_verification when it's
         already off is not supported by reboot_to_mode.
 
         @param to_mode: The target mode, one of 'normal', 'dev', or 'rec'.
-        @param from_mode: The original mode, optional, one of 'normal, 'dev',
-                          or 'rec'.
+        @param allow_gbb_force: Bool. If True, allow forcing dev mode via GBB
+                                flags. This is more reliable, but it can prevent
+                                testing other mode-switch workflows.
         @param sync_before_boot: True to sync to disk before booting.
         @param wait_for_dut_up: True to wait DUT online again. False to do the
                                 reboot and mode switching sequence only and may
                                 need more operations to pass the firmware
                                 screen.
+        @param rec_usb_state: None or a string, one of 'dut', 'host', or 'off'.
+                              This parameter is only valid when to_mode is 'rec'
+                              or 'rec_force_mrc'. Set this to None to prevent
+                              changing the USB state before rebooting.
         """
-        logging.info('-[ModeSwitcher]-[ start reboot_to_mode(%r, %r, %r) ]-',
-                     to_mode, from_mode, wait_for_dut_up)
+        logging.info(
+                '-[ModeSwitcher]-[ start reboot_to_mode(%r, %r, %r, %r)]-',
+                to_mode, sync_before_boot, allow_gbb_force, wait_for_dut_up)
 
-        if from_mode:
-            note = 'reboot_to_mode: to=%s, from=%s' % (from_mode, to_mode)
-        else:
-            note = 'reboot_to_mode: to=%s' % to_mode
+        from_mode = self.faft_client.system.get_boot_mode()
+        note = 'reboot_to_mode: from=%s, to=%s' % (from_mode, to_mode)
+
         if sync_before_boot:
             lines = self.faft_client.system.run_shell_command_get_output(
                 'crossystem')
@@ -587,30 +635,36 @@
             self.faft_framework.blocking_sync(freeze_for_reset=True)
         note += '.'
 
+        # If booting to anything but dev mode, make sure we're not forcing dev.
+        # This is irrespective of allow_gbb_force: disabling the flag doesn't
+        # force a mode, it just stops forcing dev.
+        if to_mode != 'dev':
+            self.faft_framework.clear_set_gbb_flags(
+                    vboot.GBB_FLAG_FORCE_DEV_SWITCH_ON, 0, reboot=False)
+
         if to_mode == 'rec':
-            self.enable_rec_mode_and_reboot(usb_state='dut')
-
+            self.enable_rec_mode_and_reboot(usb_state=rec_usb_state)
         elif to_mode == 'rec_force_mrc':
-            self._enable_rec_mode_force_mrc_and_reboot(usb_state='dut')
-
+            self._enable_rec_mode_force_mrc_and_reboot(usb_state=rec_usb_state)
         elif to_mode == 'dev':
-            self._enable_dev_mode_and_reboot()
-            if wait_for_dut_up:
-                self.bypass_dev_mode()
-
+            if allow_gbb_force:
+                self.faft_framework.clear_set_gbb_flags(
+                        0, vboot.GBB_FLAG_FORCE_DEV_SWITCH_ON, reboot=True)
+            else:
+                self._enable_dev_mode_and_reboot()
+                if wait_for_dut_up:
+                    self.bypass_dev_mode()
         elif to_mode == 'normal':
             self._enable_normal_mode_and_reboot()
-
         else:
-            raise NotImplementedError(
-                    'Not supported mode switching from %s to %s' %
-                     (str(from_mode), to_mode))
-
+            raise NotImplementedError('Unexpected boot mode param: %s',
+                                      to_mode)
         if wait_for_dut_up:
             self.wait_for_client(retry_power_on=True, note=note)
 
-        logging.info('-[ModeSwitcher]-[ end reboot_to_mode(%r, %r, %r) ]-',
-                     to_mode, from_mode, wait_for_dut_up)
+        logging.info('-[ModeSwitcher]-[ end reboot_to_mode(%r, %r, %r, %r)]-',
+                     to_mode, sync_before_boot, allow_gbb_force,
+                     wait_for_dut_up)
 
     def simple_reboot(self, reboot_type='warm', sync_before_boot=True):
         """Simple reboot method
@@ -870,9 +924,10 @@
             raise ConnectionError(msg)
 
         # Wait for the system to respond to ping before attempting ssh
-        if not self.client_host.ping_wait_up(timeout):
+        if self.client_host.use_icmp and not self.client_host.ping_wait_up(
+                timeout):
             logging.warning("-[FAFT]-[ system did not respond to ping ]")
-        if self.client_host.wait_up(timeout):
+        if self.client_host.wait_up(timeout, host_is_down=True):
             # Check the FAFT client is avaiable.
             self.faft_client.system.is_available()
             # Stop update-engine as it may change firmware/kernel.
@@ -896,20 +951,37 @@
         @param orig_boot_id: A string containing the original boot id.
         @raise ConnectionError: Failed to wait DUT offline.
         """
-        # When running against panther, we see that sometimes
-        # ping_wait_down() does not work correctly. There needs to
-        # be some investigation to the root cause.
-        # If we sleep for 120s before running get_boot_id(), it
-        # does succeed. But if we change this to ping_wait_down()
-        # there are implications on the wait time when running
-        # commands at the fw screens.
         if not self.client_host.ping_wait_down(timeout):
             if orig_boot_id and self.client_host.get_boot_id() != orig_boot_id:
-                logging.warn('Reboot done very quickly.')
+                logging.warning('Reboot done very quickly.')
                 return
             raise ConnectionError('DUT is still up unexpectedly')
 
 
+    def launch_minios(self, minios_priority=None):
+        """Reboot to recovery mode and launch MiniOS with specified priority.
+        The DUT must have the config 'minios_enabled'.
+
+        @param minios_priority: Set to 'a' or 'b' for specified priority; Set to
+                                None to skip assigning the priority.
+        @raise ConnectionError: Failed to wait DUT offline.
+        @raise NotImplementedError: DUT does not support MiniOS.
+        """
+        raise NotImplementedError
+
+    def leave_minios(self, is_devsw_boot=False):
+        """Leave MiniOS and use a mode-aware way to reboot DUT.
+
+        The DUT must have the config 'minios_enabled'.
+        This method will reboot DUT to leave MiniOS.
+
+        @param is_devsw_boot: True to bypass the developer screen.
+        @raise ConnectionError: Failed to wait DUT offline.
+        @raise NotImplementedError: DUT does not support MiniOS.
+        """
+        raise NotImplementedError
+
+
 class _MenuSwitcher(_BaseModeSwitcher):
     """Mode switcher via keyboard shortcuts for menu UI."""
 
@@ -931,6 +1003,56 @@
         self.wait_for_client_offline()
         self.bypasser.trigger_dev_to_normal()
 
+    def launch_minios(self, minios_priority=None):
+        """Reboot to recovery mode and launch MiniOS with specified priority.
+        The DUT must have the config 'minios_enabled'.
+
+        @param minios_priority: Set to 'a' or 'b' for specified priority; Set to
+                                None to skip assigning the priority.
+        @raise ConnectionError: Failed to wait DUT offline.
+        @raise NotImplementedError: DUT does not support MiniOS.
+        """
+        # Validity check
+        if not self.faft_config.minios_enabled:
+            raise NotImplementedError
+
+        # Set MiniOS priority
+        if minios_priority:
+            logging.info('Set the MiniOS priority to %s', minios_priority)
+            self.faft_client.system.set_minios_priority(minios_priority)
+        else:
+            logging.info('Use the original MiniOS priority setting')
+
+        # Boot to recovery mode to launch MiniOS. We do not want to change the
+        # usb state since it will disturb the MiniOS booting flow.
+        logging.info('Boot into recovery mode')
+        self.reboot_to_mode(to_mode="rec",
+                            wait_for_dut_up=False,
+                            rec_usb_state=None)
+        self.faft_framework.wait_for('firmware_screen')
+
+        # Use Ctrl+R shortcut to boot MiniOS
+        logging.info('Try to boot MiniOS')
+        self.servo.ctrl_r()
+        self.faft_framework.wait_for('minios_screen')
+
+    def leave_minios(self, is_devsw_boot=False):
+        """Leave MiniOS and use a mode-aware way to reboot DUT.
+
+        The DUT must have the config 'minios_enabled'.
+        This method will reboot DUT to leave MiniOS.
+
+        @param is_devsw_boot: True to bypass the developer screen.
+        @raise ConnectionError: Failed to wait DUT offline.
+        @raise NotImplementedError: DUT does not support MiniOS.
+        """
+        # mode_aware_reboot() cannot be used here since it leverages autotest
+        # libraries which don't exist within MiniOS.
+        self.simple_reboot(sync_before_boot=False)
+        if is_devsw_boot:
+            self.faft_framework.wait_for('firmware_screen')
+            self.bypass_dev_mode()
+
 
 class _KeyboardDevSwitcher(_MenuSwitcher):
     """Mode switcher via keyboard shortcuts for legacy clamshell UI."""
@@ -1010,10 +1132,11 @@
 }
 
 
-def create_mode_switcher(faft_framework):
+def create_mode_switcher(faft_framework, menu_navigator):
     """Creates a proper mode switcher.
 
     @param faft_framework: The main FAFT framework object.
+    @param menu_navigator: The menu navigator for base logic of navigation.
     """
     switcher_type = faft_framework.faft_config.mode_switcher_type
     switcher_class = _SWITCHER_CLASSES.get(switcher_type, None)
@@ -1021,4 +1144,4 @@
         raise NotImplementedError('Not supported mode_switcher_type: %s',
                                   switcher_type)
     else:
-        return switcher_class(faft_framework)
+        return switcher_class(faft_framework, menu_navigator)
diff --git a/server/cros/goofy_client.py b/server/cros/goofy_client.py
deleted file mode 100644
index 57117cc..0000000
--- a/server/cros/goofy_client.py
+++ /dev/null
@@ -1,439 +0,0 @@
-# Lint as: python2, python3
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import collections
-import six.moves.http_client
-import logging
-import os
-import re
-import socket
-import time
-
-import common
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import retry
-from autotest_lib.server import utils
-
-
-GOOFY_JSONRPC_SERVER_PORT = 0x0FAC
-GOOFY_RUNNING = 'RUNNING'
-
-
-class GoofyProxyException(Exception):
-    """Exception raised when a goofy rpc fails."""
-    pass
-
-
-class GoofyRuntimeException(Exception):
-    """Exception raised when something goes wrong while a test is running."""
-    pass
-
-
-def retry_goofy_rpc(exception_tuple, timeout_min=30):
-    """A decorator to use with goofy rpcs.
-
-    This decorator tries to recreate the goofy client proxy on
-    socket error. It will continue trying to do so until it
-    executes the method without any socket errors or till the
-    retry.retry decorator hits it's timeout.
-
-    Usage:
-        If you just want to recreate the proxy:
-        1. @retry_goofy_rpc(exception_tuple=(<exception>, socket.error),
-                            timeout_min=<timeout>)
-        2. @retry_goofy_rpc(socket.error, timeout_min=<timeout>)
-            Note: you need to specify the socket.error exception because we
-            want to retry the call after recreating the proxy.
-
-    @param exception_tuple: A tuple of exceptions to pass to
-        the retry decorator. Any of these exceptions will result
-        in retries for the duration of timeout_min.
-    @param timeout_min: The timeout, in minutes, for which we should
-        retry the method ignoring any exception in exception_tuple.
-    """
-    def inner_decorator(method):
-        """Inner retry decorator applied to the method.
-
-        @param method: The method that needs to be wrapped in the decorator.
-
-        @return A wrapper function that implements the retry.
-        """
-
-        @retry.retry(exception_tuple, timeout_min=timeout_min)
-        def wrapper(*args, **kwargs):
-            """This wrapper handles socket errors.
-
-            If the method in question:
-            1. Throws an exception in exception_tuple and it is not a
-               socket.error, retry for timeout_min through retry.retry.
-            2. Throws a socket.error, recreate the client proxy, and
-               retry for timeout_min through retry.retry.
-            3. Throws an exception not in exception_tuple, fail.
-            """
-            try:
-                return method(*args, **kwargs)
-            except socket.error as e:
-                goofy_proxy = args[0]
-                if type(goofy_proxy) is GoofyProxy:
-                    logging.warning('Socket error while running factory tests '
-                                    '%s, recreating goofy proxy.', e)
-                    goofy_proxy._create_client_proxy(timeout_min=timeout_min)
-                else:
-                    logging.warning('Connectivity was lost and the retry '
-                                    'decorator was unable to recreate a goofy '
-                                    'client proxy, args: %s.', args)
-                raise
-
-        return wrapper
-
-    return inner_decorator
-
-
-class GoofyProxy(object):
-    """Client capable of making rpc calls to goofy.
-
-    Methods of this class that can cause goofy to change state
-    usually need a retry decorator. Methods that have a retry decorator
-    need to be 'pure', i.e return the same results when called multiple
-    times with the same argument.
-
-    There are 2 known exceptions this class can deal with, a socket.error
-    which happens when we try to execute an rpc when the DUT is, say, suspended
-    and a BadStatusLine, which we get when we try to execute an rpc while the
-    DUT is going through a factory_restart. Ideally we would like to handle
-    socket timeouts different from BadStatusLines as we can get connection
-    errors even when a device reboots and BadStatusLines ususally only when
-    factory restarts. crbug.com/281714.
-    """
-
-    # This timeout was arbitrarily chosen as many tests in the factory test
-    # suite run for days. Ideally we would like to split this into at least 2
-    # timeouts, one which we use for rpcs that run while no other test is,
-    # running and is smaller than the second that is designed for use with rpcs
-    # that might execute simultaneously with a test. The latter needs a longer
-    # timeout since tests could suspend,resume for a long time, and a call like
-    # GetGoofyStatus should be tolerant to these suspend/resumes. In designing
-    # the base timeout one needs to allocate time to component methods of this
-    # class (such as _set_test_list) as a multiple of the number of rpcs it
-    # executes.
-    BASE_RPC_TIMEOUT = 1440
-    POLLING_INTERVAL = 5
-    FACTORY_BUG_RE = r'.*(/tmp/factory_bug.*tar.bz2).*'
-    UNTAR_COMMAND = 'tar jxf %s -C %s'
-
-
-    def __init__(self, host):
-        """
-        @param host: The host object representing the DUT running goofy.
-        """
-        self._host = host
-        self._raw_stop_running_tests()
-        self._create_client_proxy(timeout_min=self.BASE_RPC_TIMEOUT)
-
-
-    def _create_client_proxy(self, timeout_min=30):
-        """Create a goofy client proxy.
-
-        Ping the host till it's up, then proceed to create a goofy proxy. We
-        don't wrap this method with a retry because it's used in the retry
-        decorator itself.
-        """
-
-        # We don't ssh ping here as there is a potential dealy in O(minutes)
-        # with our ssh command against a sleeping DUT, once it wakes up, and
-        # that will lead to significant overhead incurred over many reboots.
-        self._host.ping_wait_up(timeout_min)
-        logging.info('Host is pingable, creating goofy client proxy')
-        self._client = self._host.rpc_server_tracker.jsonrpc_connect(
-                GOOFY_JSONRPC_SERVER_PORT)
-
-
-    @retry.retry((six.moves.http_client.BadStatusLine, socket.error),
-                 timeout_min=BASE_RPC_TIMEOUT)
-    def _raw_stop_running_tests(self):
-        """Stop running tests by shelling out to the DUT.
-
-        Use this method only before we have actually created the client
-        proxy, as shelling out has several pitfalls. We need to stop all
-        tests in a retry loop because tests will start executing as soon
-        as we have reimaged a DUT and trying to create the proxy while
-        the DUT is rebooting will lead to a spurious failure.
-
-        Note that we use the plain retry decorator for this method since
-        we don't need to recreate the client proxy on failure.
-        """
-        logging.info('Stopping all tests and clearing factory state')
-        self._host.run('factory clear')
-
-
-    @retry_goofy_rpc((six.moves.http_client.BadStatusLine, socket.error),
-                     timeout_min=BASE_RPC_TIMEOUT)
-    def _get_goofy_status(self):
-        """Return status of goofy, ignoring socket timeouts and http exceptions.
-        """
-        status = self._client.GetGoofyStatus().get('status')
-        return status
-
-
-    def _wait_for_goofy(self, timeout_min=BASE_RPC_TIMEOUT*2):
-        """Wait till goofy is running or a timeout occurs.
-
-        @param timeout_min: Minutes to wait before timing this call out.
-        """
-        current_time = time.time()
-        timeout_secs = timeout_min * 60
-        logging.info('Waiting on goofy')
-        while self._get_goofy_status() != GOOFY_RUNNING:
-            if time.time() - current_time > timeout_secs:
-                break
-        return
-
-
-    @retry_goofy_rpc(socket.error, timeout_min=BASE_RPC_TIMEOUT*2)
-    def _set_test_list(self, next_list):
-        """Set the given test list for execution and turn on test automation.
-
-        Confirm that the given test list is a test that has been baked into
-        the image, then run it. Some test lists are configured to start
-        execution automatically when we call SetTestList, while others wait
-        for a corresponding RunTest.
-
-        @param next_list: The name of the test list.
-
-        @raise jsonrpclib.ProtocolError: If the test list we're trying to switch
-                                         to isn't on the DUT.
-        """
-
-        # We can get a BadStatus line on 2 occassions:
-        # 1. As part of SwitchTestList goofy performs a factory restart, which
-        # will throw a BadStatusLine because the rpc can't exit cleanly. We
-        # don't want to retry on this exception, since we've already set the
-        # right test list.
-        # 2. If we try to set a test list while goofy is already down
-        # (from a previous factory restart). In this case we wouldn't have
-        # set the new test list, because we coulnd't connect to goofy.
-        # To properly set a new test list it's important to wait till goofy is
-        # up before attempting to set the test list, while being aware that the
-        # preceding httplib error is from the rpc we just executed leading to
-        # a factory restart. Also note that if the test list is not already on
-        # the DUT this method will fail, emitting the possible test lists one
-        # can switch to.
-        self._wait_for_goofy()
-        logging.info('Switching to test list %s', next_list)
-        try:
-            # Enable full factory test automation. Full test automation mode
-            # skips all manual tests and test barriers, which is what we want in
-            # the test lab. There are other automation modes: partial and none.
-            # In partial automation mode manual tests and barrier are enabled
-            # and user intervention is required; none disables automation.
-            self._client.SwitchTestList(next_list, 'full')
-        except six.moves.http_client.BadStatusLine:
-            logging.info('Switched to list %s, goofy restarting', next_list)
-
-
-    @retry_goofy_rpc((six.moves.http_client.BadStatusLine, socket.error),
-                     timeout_min=BASE_RPC_TIMEOUT*2)
-    def _stop_running_tests(self):
-        """Stop all running tests.
-
-        Wrap the StopTest rpc so we can attempt to stop tests even while a DUT
-        is suspended or rebooting.
-        """
-        logging.info('Stopping tests.')
-        self._client.StopTest()
-
-
-    def _get_test_map(self):
-        """Get a mapping of test suites -> tests.
-
-        Ignore entries for tests that don't have a path.
-
-        @return: A dictionary of the form
-                 {'suite_name': ['suite_name.path_to_test', ...]}.
-        """
-        test_all = set([test['path'] for test in self._client.GetTests()
-                        if test.get('path')])
-
-        test_map = collections.defaultdict(list)
-        for names in test_all:
-            test_map[names.split('.')[0]].append(names)
-        return test_map
-
-
-    def _log_test_results(self, test_status, current_suite):
-        """Format test status results and write them to status.log.
-
-        @param test_status: The status dictionary of a single test.
-        @param current_suite: The current suite name.
-        """
-        try:
-            self._host.job.record('INFO', None, None,
-                                  'suite %s, test %s, status: %s' %
-                                  (current_suite, test_status.get('path'),
-                                   test_status.get('status')))
-        except AttributeError as e:
-            logging.error('Could not gather results for current test: %s', e)
-
-
-    @retry_goofy_rpc((six.moves.http_client.BadStatusLine, socket.error),
-                     timeout_min=BASE_RPC_TIMEOUT*2)
-    def _get_test_info(self, test_name):
-        """Get the status of one test.
-
-        @param test_name: The name of the test we need the status of.
-
-        @return: The entry for the test in the status dictionary.
-        """
-        for test in self._client.GetTests():
-            if test['path'] == test_name:
-                return test
-        raise ValueError('Could not find test_name %s in _get_test_info.' %
-                          test_name)
-
-
-    @retry_goofy_rpc((six.moves.http_client.BadStatusLine, socket.error),
-                     timeout_min=BASE_RPC_TIMEOUT*2)
-    def _get_test_run_info(self, run_id):
-        """Get the information about the given test run.
-
-        @param run_id: The ID of the test run.
-
-        @return: A dict of test run status.
-        """
-        return self._client.GetTestRunStatus(run_id)
-
-
-    def _wait_on_run(self, run_id):
-        """Wait until the given test run to end.
-
-        @param run_id: The ID of the test run.
-
-        @raises GoofyRuntimeException: If the test run does not finish
-            gracefully.
-        """
-        finished_tests = set()
-        run_info = self._get_test_run_info(run_id)
-        while run_info['status'] == 'RUNNING':
-            finished = [(t['path'], t['status']) for t in
-                        run_info['scheduled_tests']
-                        if t['status'] in ('PASSED', 'FAILED')]
-            for t in finished:
-                if t not in finished_tests:
-                    logging.info('[%s] %s', t[1], t[0])
-                    finished_tests.add(t)
-            time.sleep(self.POLLING_INTERVAL)
-            run_info = self._get_test_run_info(run_id)
-        if run_info['status'] != 'FINISHED':
-            raise GoofyRuntimeException(
-                    'The requested test run was interrupted.')
-
-
-    def _synchronous_run_suite(self, suite_name):
-        """Run one suite and wait for it to finish.
-
-        Will start a test run for the specified suite_name and wait until it
-        ends.
-
-        @param suite_name: The name of the suite to wait for.
-
-        @raises GoofyProxyException: If the status of the suite
-            doesn't switch to active after we call RunTest.
-
-        @return: The result of the suite.
-        """
-        logging.info('Starting suite: %s', suite_name)
-        run_id = self._client.RunTest(suite_name)
-        self._wait_on_run(run_id)
-        return self._get_test_run_info(run_id)
-
-
-    def monitor_tests(self, test_list):
-        """Run a test list.
-
-        Will run each suite in the given list in sequence, starting each one
-        by name and waiting on its results. This method makes the following
-        assumptions:
-            - A test list is made up of self contained suites.
-            - These suites trigger several things in parallel.
-            - After a suite finishes it leaves goofy in an idle state.
-
-        It is not safe to pull results for individual tests during the suite
-        as the device could be rebooting, or goofy could be under stress.
-        Instead, this method synchronously waits on an entire suite, then
-        asks goofy for the status of each test in the suite. Since certain
-        test lists automatically start and others don't, this method stops
-        test list execution regardless, and sequentially triggers each suite.
-
-        @param test_list: The test list to run.
-        """
-        self._set_test_list(test_list)
-        self._wait_for_goofy()
-        self._stop_running_tests()
-
-        test_map = self._get_test_map()
-        if test_map:
-            logging.info('About to execute tests: %s', test_map)
-        else:
-            raise GoofyRuntimeException('Test map is empty, you might have an '
-                                        'error in your test_list.')
-
-
-        for current_suite in test_map.keys():
-            logging.info('Processing suite %s', current_suite)
-
-            result = self._synchronous_run_suite(current_suite)
-            logging.info(result)
-
-            for test_names in test_map.get(current_suite):
-                self._log_test_results(self._get_test_info(test_names),
-                                       current_suite)
-
-
-    @retry_goofy_rpc((six.moves.http_client.BadStatusLine, socket.error),
-                     timeout_min=BASE_RPC_TIMEOUT*2)
-    def get_results(self, resultsdir):
-        """Copies results from the DUT to a local results directory.
-
-        Copy the tarball over to the results folder, untar, and delete the
-        tarball if everything was successful. This will effectively place
-        all the logs relevant to factory testing in the job's results folder.
-
-        @param resultsdir: The directory in which to untar the contents of the
-                           tarball factory_bug generates.
-        """
-        logging.info('Getting results logs for test_list.')
-
-        try:
-            factory_bug_log = self._host.run('factory_bug').stderr
-        except error.CmdError as e:
-            logging.error('Could not execute factory_bug: %s', e)
-            return
-
-        try:
-            factory_bug_tar = re.match(self.FACTORY_BUG_RE,
-                                       factory_bug_log).groups(1)[0]
-        except (IndexError, AttributeError):
-            logging.error('could not collect logs for factory results, '
-                          'factory bug returned %s', factory_bug_log)
-            return
-
-        factory_bug_tar_file = os.path.basename(factory_bug_tar)
-        local_factory_bug_tar = os.path.join(resultsdir, factory_bug_tar_file)
-
-        try:
-            self._host.get_file(factory_bug_tar, local_factory_bug_tar)
-        except error.AutoservRunError as e:
-            logging.error('Failed to pull back the results tarball: %s', e)
-            return
-
-        try:
-            utils.run(self.UNTAR_COMMAND % (local_factory_bug_tar, resultsdir))
-        except error.CmdError as e:
-            logging.error('Failed to untar the results tarball: %s', e)
-            return
-        finally:
-            if os.path.exists(local_factory_bug_tar):
-                os.remove(local_factory_bug_tar)
diff --git a/server/cros/graphics/graphics_power.py b/server/cros/graphics/graphics_power.py
index 4429e02..789d976 100644
--- a/server/cros/graphics/graphics_power.py
+++ b/server/cros/graphics/graphics_power.py
@@ -120,7 +120,8 @@
             if timeout and time.time() >= time_end:
                 self.stop()
                 raise self.InitTimeoutError(
-                    'The graphics_Power subtest initialization timed out')
+                    'The graphics_Power subtest initialization timed out after'
+                    ' %d second(s).' % timeout)
             if not self.is_alive():
                 raise RuntimeError(
                     'The graphics_Power subtest failed to initialize')
diff --git a/server/cros/graphics/graphics_tracereplayextended.py b/server/cros/graphics/graphics_tracereplayextended.py
new file mode 100644
index 0000000..5606383
--- /dev/null
+++ b/server/cros/graphics/graphics_tracereplayextended.py
@@ -0,0 +1,296 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Implementation of the graphics_TraceReplayExtended server test."""
+
+from enum import Enum
+import logging
+import os
+import threading
+import time
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.server import test
+from autotest_lib.server.cros.graphics import graphics_power
+from autotest_lib.server.site_tests.tast import tast
+
+
+class TastTestResult():
+    """Stores the test result for a single Tast subtest"""
+
+    class TestStatus(Enum):
+        """Encodes all actionable Tast subtest completion statuses"""
+        Passed = 1
+        Skipped = 2
+        Failed = 3
+
+    def __init__(self, name, status, errors):
+        self.name = name  # type: str
+        self.status = status  # type: self.TestStatus
+        self.errors = errors  # type: List[json-string]
+
+
+class TastManagerThread(threading.Thread):
+    """Thread for running a local tast test from an autotest server test."""
+
+    def __init__(self,
+                 host,
+                 tast_instance,
+                 client_test,
+                 max_duration_minutes,
+                 build_bundle,
+                 varslist=None,
+                 command_args=None):
+        """Initializes the thread.
+
+        Args:
+            host: An autotest host instance.
+            tast_instance: An instance of the tast.tast() class.
+            client_test: String identifying which tast test to run.
+            max_duration_minutes: Float defining the maximum running time of the
+                managed sub-test.
+            build_bundle: String defining which tast test bundle to build and
+                query for the client_test.
+            varslist: list of strings that define dynamic variables made
+                available to tast tests at runtime via `tast run -var=name=value
+                ...`. Each string should be formatted as 'name=value'.
+            command_args: list of strings that are passed as args to the `tast
+                run` command.
+        """
+        super(TastManagerThread, self).__init__(name=__name__)
+        self.tast = tast_instance
+        self.tast.initialize(
+            host=host,
+            test_exprs=[client_test],
+            ignore_test_failures=True,
+            max_run_sec=max_duration_minutes * 60,
+            command_args=command_args if command_args else [],
+            build_bundle=build_bundle,
+            varslist=varslist)
+
+    def run(self):
+        logging.info('Started thread: %s', self.__class__.__name__)
+        self.tast.run_once()
+
+    def get_subtest_results(self):
+        """Returns the status for the tast subtest managed by this class.
+
+        Parses the Tast client tests' json-formatted result payloads to
+        determine the status and associated messages for each.
+
+        self.tast._test_results is populated with JSON objects for each test
+        during self.tast.run_once(). The JSON spec is detailed at
+        src/platform/tast/src/chromiumos/tast/cmd/tast/internal/run/results.go.
+        """
+        subtest_results = []
+        for res in self.tast._test_results:
+            name = res.get('name')
+            skip_reason = res.get('skipReason')
+            errors = res.get('errors')
+            if skip_reason:
+                logging.info('Tast subtest "%s" was skipped with reason: %s',
+                             name, skip_reason)
+                status = TastTestResult.TestStatus.Skipped
+            elif errors:
+                logging.info('Tast subtest "%s" failed with errors: %s', name,
+                             str([err.get('reason') for err in errors]))
+                status = TastTestResult.TestStatus.Failed
+            else:
+                logging.info('Tast subtest "%s" succeeded.', name)
+                status = TastTestResult.TestStatus.Passed
+            subtest_results.append(TastTestResult(name, status, errors))
+        return subtest_results
+
+
+class GraphicsTraceReplayExtendedBase(test.test):
+    """Base Autotest server test for running repeated trace replays in a VM.
+
+    This test simultaneously initiates system performance logging and extended
+    trace replay processes on a target host, and parses their test results for
+    combined analysis and reporting.
+    """
+    version = 1
+
+    @staticmethod
+    def _initialize_dir_on_host(host, directory):
+        """Initialize a directory to a consistent (empty) state on the host.
+
+        Args:
+            host: An autotest host instance.
+            directory: String defining the location of the directory to
+                initialize.
+
+        Raises:
+            TestFail: If the directory cannot be initialized.
+        """
+        try:
+            host.run('rm -r %(0)s 2>/dev/null || true; ! test -d %(0)s' %
+                     {'0': directory})
+            host.run('mkdir -p %s' % directory)
+        except (error.AutotestHostRunCmdError, error.AutoservRunError) as err:
+            logging.exception(err)
+            raise error.TestFail(
+                'Failed to initialize directory "%s" on the test host' %
+                directory)
+
+    @staticmethod
+    def _cleanup_dir_on_host(host, directory):
+        """Ensure that a directory and its contents are deleted on the host.
+
+        Args:
+            host: An autotest host instance.
+            directory: String defining the location of the directory to delete.
+
+        Raises:
+            TestFail: If the directory remains on the host.
+        """
+        try:
+            host.run('rm -r %(0)s || true; ! test -d %(0)s' % {'0': directory})
+        except (error.AutotestHostRunCmdError, error.AutoservRunError) as err:
+            logging.exception(err)
+            raise error.TestFail(
+                'Failed to cleanup directory "%s" on the test host' % directory)
+
+    def run_once(self,
+                 host,
+                 client_tast_test,
+                 max_duration_minutes,
+                 tast_build_bundle='cros',
+                 tast_varslist=None,
+                 tast_command_args=None,
+                 pdash_note=None):
+        """Runs the test.
+
+        Args:
+            host: An autotest host instance.
+            client_tast_test: String defining which tast test to run.
+            max_duration_minutes: Float defining the maximum running time of the
+                managed sub-test.
+            tast_build_bundle: String defining which tast test bundle to build
+                and query for the client_test.
+            tast_varslist: list of strings that define dynamic variables made
+                available to tast tests at runtime via `tast run -var=name=value
+                ...`. Each string should be formatted as 'name=value'.
+            tast_command_args: list of strings that are passed as args to the
+                `tast run` command.
+        """
+        # Construct a suffix tag indicating which managing test is using logged
+        # data from the graphics_Power subtest.
+        trace_name = client_tast_test.split('.')[-1]
+
+        # workaround for running test locally since crrev/c/2374267 and
+        # crrev/i/2374267
+        if not tast_command_args:
+            tast_command_args = []
+        tast_command_args.extend([
+                'extraallowedbuckets=termina-component-testing,cros-containers-staging'
+        ])
+
+        # Define paths of signal files for basic RPC/IPC between sub-tests.
+        temp_io_root = '/tmp/%s/' % self.__class__.__name__
+        result_dir = os.path.join(temp_io_root, 'results')
+        signal_running_file = os.path.join(temp_io_root, 'signal_running')
+        signal_checkpoint_file = os.path.join(temp_io_root, 'signal_checkpoint')
+
+        # This test is responsible for creating/deleting root and resultdir.
+        logging.debug('Creating temporary IPC/RPC dir: %s', temp_io_root)
+        self._initialize_dir_on_host(host, temp_io_root)
+        self._initialize_dir_on_host(host, result_dir)
+
+        # Start background system performance monitoring process on the test
+        # target (via an autotest client 'power_Test').
+        logging.debug('Connecting to autotest client on host')
+        graphics_power_thread = graphics_power.GraphicsPowerThread(
+            host=host,
+            max_duration_minutes=max_duration_minutes,
+            test_tag='Trace' + '.' + trace_name,
+            pdash_note=pdash_note,
+            result_dir=result_dir,
+            signal_running_file=signal_running_file,
+            signal_checkpoint_file=signal_checkpoint_file)
+        graphics_power_thread.start()
+
+        logging.info('Waiting for graphics_Power subtest to initialize...')
+        try:
+            graphics_power_thread.wait_until_running(timeout=10 * 60)
+        except Exception as err:
+            logging.exception(err)
+            raise error.TestFail(
+                'An error occured during graphics_Power subtest initialization')
+        logging.info('The graphics_Power subtest was properly initialized')
+
+        # Start repeated trace replay process on the test target (via a tast
+        # local test).
+        logging.info('Running Tast test: %s', client_tast_test)
+        tast_outputdir = os.path.join(self.outputdir, 'tast')
+        if not os.path.exists(tast_outputdir):
+            logging.debug('Creating tast outputdir: %s', tast_outputdir)
+            os.makedirs(tast_outputdir)
+
+        if not tast_varslist:
+            tast_varslist = []
+        tast_varslist.extend([
+            'PowerTest.resultDir=' + result_dir,
+            'PowerTest.signalRunningFile=' + signal_running_file,
+            'PowerTest.signalCheckpointFile=' + signal_checkpoint_file,
+        ])
+
+        tast_instance = tast.tast(
+            job=self.job, bindir=self.bindir, outputdir=tast_outputdir)
+        tast_manager_thread = TastManagerThread(
+            host,
+            tast_instance,
+            client_tast_test,
+            max_duration_minutes,
+            tast_build_bundle,
+            varslist=tast_varslist,
+            command_args=tast_command_args)
+        tast_manager_thread.start()
+
+        # Block until both subtests finish.
+        threads = [graphics_power_thread, tast_manager_thread]
+        stop_attempts = 0
+        while threads:
+            # TODO(ryanneph): Move stop signal emission to tast test instance.
+            if (not tast_manager_thread.is_alive() and
+                    graphics_power_thread.is_alive() and stop_attempts < 1):
+                logging.info('Attempting to stop graphics_Power thread')
+                graphics_power_thread.stop(timeout=0)
+                stop_attempts += 1
+
+            # Raise test failure if graphics_Power thread ends before tast test.
+            if (not graphics_power_thread.is_alive() and
+                    tast_manager_thread.is_alive()):
+                raise error.TestFail(
+                    'The graphics_Power subtest ended too soon.')
+
+            for thread in list(threads):
+                if not thread.is_alive():
+                    logging.info('Thread "%s" has ended',
+                                 thread.__class__.__name__)
+                    threads.remove(thread)
+            time.sleep(1)
+
+        # Aggregate subtest results and report overall test result
+        subtest_results = tast_manager_thread.get_subtest_results()
+        num_failed_subtests = 0
+        for res in subtest_results:
+            num_failed_subtests += int(
+                res.status == TastTestResult.TestStatus.Failed)
+        if num_failed_subtests:
+            raise error.TestFail('%d of %d Tast subtests have failed.' %
+                                 (num_failed_subtests, len(subtest_results)))
+        elif all([res.status == TastTestResult.TestStatus.Skipped
+                  for res in subtest_results]):
+            raise error.TestNAError('All %d Tast subtests have been skipped' %
+                                    len(subtest_results))
+
+        client_result_dir = os.path.join(self.outputdir, 'client_results')
+        logging.info('Saving client results to %s', client_result_dir)
+        host.get_file(result_dir, client_result_dir)
+
+        # Ensure the host filesystem is clean for the next test.
+        self._cleanup_dir_on_host(host, result_dir)
+        self._cleanup_dir_on_host(host, temp_io_root)
+
+        # TODO(ryanneph): Implement results parsing/analysis/reporting
diff --git a/server/cros/host_lock_manager.py b/server/cros/host_lock_manager.py
index 4b8908d..7de6f4a 100644
--- a/server/cros/host_lock_manager.py
+++ b/server/cros/host_lock_manager.py
@@ -1,10 +1,11 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 import logging
 import signal
-import common
+from . import common
 
 from autotest_lib.server import site_utils
 from autotest_lib.server.cros.chaos_lib import chaos_datastore_utils
diff --git a/server/cros/host_lock_manager_unittest.py b/server/cros/host_lock_manager_unittest.py
index 9922f1e..b78c3da 100755
--- a/server/cros/host_lock_manager_unittest.py
+++ b/server/cros/host_lock_manager_unittest.py
@@ -1,13 +1,15 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 """Unit tests for server/cros/host_lock_manager.py."""
-import mock
+
 import unittest
-import common
+from unittest import mock
+
+from . import common
 
 from autotest_lib.server.cros import host_lock_manager
 from autotest_lib.server.cros.chaos_lib import chaos_datastore_utils
@@ -61,7 +63,7 @@
         return_value=False)
     def testCheckHost_SkipsUnknownHost(self, get_mock):
         actual = self.manager._check_host('host1', None)
-        self.assertEquals(None, actual)
+        self.assertEqual(None, actual)
 
 
     @mock.patch.object(chaos_datastore_utils.ChaosDataStoreUtils, 'show_device',
@@ -70,7 +72,7 @@
     def testCheckHost_DetectsLockedHost(self, get_mock):
         """Test that a host which is already locked is skipped."""
         actual = self.manager._check_host(self.HOST1, self.manager.LOCK)
-        self.assertEquals(None, actual)
+        self.assertEqual(None, actual)
 
 
     @mock.patch.object(chaos_datastore_utils.ChaosDataStoreUtils, 'show_device',
@@ -79,7 +81,7 @@
     def testCheckHost_DetectsUnlockedHost(self, get_mock):
         """Test that a host which is already unlocked is skipped."""
         actual = self.manager._check_host(self.HOST1, self.manager.UNLOCK)
-        self.assertEquals(None, actual)
+        self.assertEqual(None, actual)
 
 
     @mock.patch.object(chaos_datastore_utils.ChaosDataStoreUtils, 'show_device',
@@ -88,7 +90,7 @@
     def testCheckHost_ReturnsHostToLock(self, get_mock):
         """Test that a host which can be locked is returned."""
         actual = self.manager._check_host(self.HOST1, self.manager.LOCK)
-        self.assertEquals(self.HOST1, actual)
+        self.assertEqual(self.HOST1, actual)
 
 
     @mock.patch.object(chaos_datastore_utils.ChaosDataStoreUtils, 'show_device',
@@ -97,7 +99,7 @@
     def testCheckHost_ReturnsHostToUnlock(self, get_mock):
         """Test that a host which can be unlocked is returned."""
         actual = self.manager._check_host(self.HOST1, self.manager.UNLOCK)
-        self.assertEquals(self.HOST1, actual)
+        self.assertEqual(self.HOST1, actual)
 
 
     def testLock_WithNonOverlappingHosts(self):
@@ -106,7 +108,7 @@
         manager = self.MockHostLockManager()
         manager.locked_hosts = set([self.HOST1])
         manager.lock(hosts, lock_reason='Locking for test')
-        self.assertEquals(set([self.HOST1, self.HOST2]), manager.locked_hosts)
+        self.assertEqual(set([self.HOST1, self.HOST2]), manager.locked_hosts)
 
 
     def testLock_WithPartialOverlappingHosts(self):
@@ -115,7 +117,7 @@
         manager = self.MockHostLockManager()
         manager.locked_hosts = set([self.HOST1, self.HOST3])
         manager.lock(hosts, lock_reason='Locking for test')
-        self.assertEquals(set([self.HOST1, self.HOST2, self.HOST3]),
+        self.assertEqual(set([self.HOST1, self.HOST2, self.HOST3]),
                           manager.locked_hosts)
 
 
@@ -124,7 +126,7 @@
         hosts = [self.HOST1, self.HOST2]
         self.manager.locked_hosts = set(hosts)
         self.manager.lock(hosts)
-        self.assertEquals(set(hosts), self.manager.locked_hosts)
+        self.assertEqual(set(hosts), self.manager.locked_hosts)
 
 
     def testUnlock_WithNonOverlappingHosts(self):
@@ -132,7 +134,7 @@
         hosts = [self.HOST2]
         self.manager.locked_hosts = set([self.HOST1])
         self.manager.unlock(hosts)
-        self.assertEquals(set([self.HOST1]), self.manager.locked_hosts)
+        self.assertEqual(set([self.HOST1]), self.manager.locked_hosts)
 
 
 if __name__ == '__main__':
diff --git a/server/cros/packet_generation/__init__.py b/server/cros/minios/__init__.py
similarity index 100%
copy from server/cros/packet_generation/__init__.py
copy to server/cros/minios/__init__.py
diff --git a/server/cros/minios/minios_test.py b/server/cros/minios/minios_test.py
new file mode 100644
index 0000000..acd2cef
--- /dev/null
+++ b/server/cros/minios/minios_test.py
@@ -0,0 +1,223 @@
+# Lint as: python2, python3
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import json
+import logging
+import os
+import re
+import time
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.server.cros.minios import minios_util
+from autotest_lib.server.cros.update_engine import update_engine_test
+
+
+class MiniOsTest(update_engine_test.UpdateEngineTest):
+    """
+    Base class that sets up helper objects/functions for NBR tests.
+
+    """
+
+    _MINIOS_CLIENT_CMD = 'minios_client'
+    _MINIOS_KERNEL_FLAG = 'cros_minios'
+
+    # Period to wait for firmware screen in seconds.
+    # Value based on Brya, which is the slowest so far.
+    _FIRMWARE_SCREEN_TIMEOUT = 30
+
+    # Number of times to attempt booting into MiniOS.
+    _MINIOS_BOOT_MAX_ATTEMPTS = 3
+
+    # Timeout periods, given in seconds.
+    _MINIOS_SHUTDOWN_TIMEOUT = 30
+
+    # Number of seconds to wait for the host to boot into MiniOS. Should always
+    # be greater than `_FIRMWARE_SCREEN_TIMEOUT`.
+    _MINIOS_WAIT_UP_TIME_SECONDS = 120
+
+    # Version reported to OMAHA/NEBRASKA for recovery.
+    _RECOVERY_VERSION = '0.0.0.0'
+
+    # Files used by the tests.
+    _DEPENDENCY_DIRS = ['bin', 'lib', 'lib64', 'libexec']
+    _DEPENDENCY_INSTALL_DIR = '/usr/local'
+    _MINIOS_TEMP_STATEFUL_DIR = '/usr/local/tmp/stateful'
+    _STATEFUL_DEV_IMAGE_NAME = 'dev_image_new'
+
+    def initialize(self, host):
+        """
+        Sets default variables for the test.
+
+        @param host: The DUT we will be running on.
+
+        """
+        super(MiniOsTest, self).initialize(host)
+        self._nebraska = None
+        self._servo = host.servo
+        self._servo.initialize_dut()
+
+    def cleanup(self):
+        """Clean up minios autotests."""
+        if self._nebraska:
+            self._nebraska.stop()
+        super(MiniOsTest, self).cleanup()
+        # Make sure to reboot DUT into CroS in case of failures.
+        self._host.reboot()
+
+    def _boot_minios(self):
+        """Boot the DUT into MiniOS."""
+        # Turn off usbkey to avoid booting into usb-recovery image.
+        self._servo.switch_usbkey('off')
+        psc = self._servo.get_power_state_controller()
+        psc.power_off()
+        psc.power_on(psc.REC_ON)
+        self._host.test_wait_for_shutdown(self._MINIOS_SHUTDOWN_TIMEOUT)
+        logging.info('Waiting for firmware screen')
+        time.sleep(self._FIRMWARE_SCREEN_TIMEOUT)
+
+        # Attempt multiple times to boot into MiniOS. If all attempts fail then
+        # this is some kind of firmware issue. Since we failed to boot an OS use
+        # servo to reset the unit and then report test failure.
+        attempts = 0
+        minios_is_up = False
+        while not minios_is_up and attempts < self._MINIOS_BOOT_MAX_ATTEMPTS:
+            # Use Ctrl+R shortcut to boot 'MiniOS
+            logging.info('Try to boot MiniOS')
+            self._servo.ctrl_r()
+            minios_is_up = self._host.wait_up(
+                    timeout=self._MINIOS_WAIT_UP_TIME_SECONDS,
+                    host_is_down=True)
+            attempts += 1
+
+        if minios_is_up:
+            # If mainfw_type is recovery then we are in MiniOS.
+            mainfw_type = self._host.run_output('crossystem mainfw_type')
+            if mainfw_type != 'recovery':
+                raise error.TestError(
+                        'Boot to MiniOS - invalid firmware: %s.' % mainfw_type)
+            # There are multiple types of recovery images, make sure we booted
+            # into minios.
+            pattern = r'\b%s\b' % self._MINIOS_KERNEL_FLAG
+            if not re.search(pattern, self._host.get_cmdline()):
+                raise error.TestError(
+                        'Boot to MiniOS - recovery image is not minios.')
+        else:
+            # Try to not leave unit on recovery firmware screen.
+            self._host.power_cycle()
+            raise error.TestError('Boot to MiniOS failed.')
+
+    def _create_minios_hostlog(self):
+        """Create the minios hostlog file.
+
+        To ensure the recovery was successful we need to compare the update
+        events against expected update events. This function creates the hostlog
+        for minios before the recovery reboots the DUT.
+
+        """
+        # Check that update logs exist.
+        if len(self._get_update_engine_logs()) < 1:
+            err_msg = 'update_engine logs are missing. Cannot verify recovery.'
+            raise error.TestFail(err_msg)
+
+        # Download the logs instead of reading it over the network since it will
+        # disappear after MiniOS reboots the DUT.
+        logfile = os.path.join(self.resultsdir, 'minios_update_engine.log')
+        self._host.get_file(self._UPDATE_ENGINE_LOG, logfile)
+        logfile_content = None
+        with open(logfile) as f:
+            logfile_content = f.read()
+        minios_hostlog = os.path.join(self.resultsdir, 'hostlog_minios')
+        with open(minios_hostlog, 'w') as fp:
+            # There are four expected hostlog events during recovery.
+            extract_logs = self._extract_request_logs(logfile_content)
+            json.dump(extract_logs[-4:], fp)
+        return minios_hostlog
+
+    def _install_test_dependencies(self, public_bucket=False):
+        """
+        Install test dependencies from a downloaded stateful archive.
+
+        @param public_bucket: True to download stateful from a public bucket.
+
+        """
+        if not self._job_repo_url:
+            raise error.TestError('No job repo url set.')
+
+        statefuldev_url = self._stage_stateful(public_bucket)
+        logging.info('Installing dependencies from %s', statefuldev_url)
+
+        # Create destination directories.
+        minios_dev_image_dir = os.path.join(self._MINIOS_TEMP_STATEFUL_DIR,
+                                            self._STATEFUL_DEV_IMAGE_NAME)
+        install_dirs = [
+                os.path.join(self._DEPENDENCY_INSTALL_DIR, dir)
+                for dir in self._DEPENDENCY_DIRS
+        ]
+        self._run(['mkdir', '-p', minios_dev_image_dir] + install_dirs)
+        # Symlink the install dirs into the staging destination.
+        for dir in install_dirs:
+            self._run(['ln', '-s', dir, minios_dev_image_dir])
+
+        # Generate the list of stateful archive members that we want to extract.
+        members = [
+                os.path.join(self._STATEFUL_DEV_IMAGE_NAME, dir)
+                for dir in self._DEPENDENCY_DIRS
+        ]
+        try:
+            self._download_and_extract_stateful(statefuldev_url,
+                                                self._MINIOS_TEMP_STATEFUL_DIR,
+                                                members=members,
+                                                keep_symlinks=True)
+        except error.AutoservRunError as e:
+            err_str = 'Failed to install the test dependencies'
+            raise error.TestFail('%s: %s' % (err_str, str(e)))
+
+        self._setup_python_symlinks()
+
+        # Clean-up unused files to save memory.
+        self._run(['rm', '-rf', self._MINIOS_TEMP_STATEFUL_DIR])
+
+    def _setup_python_symlinks(self):
+        """
+        Create symlinks in the root to point to all python paths in /usr/local
+        for stateful installed python to work. This is needed because Gentoo
+        creates wrappers with hardcoded paths to the root (e.g. python-exec).
+
+        """
+        for path in self._DEPENDENCY_DIRS:
+            self._run([
+                    'find',
+                    os.path.join(self._DEPENDENCY_INSTALL_DIR, path),
+                    '-maxdepth', '1', '\(', '-name', 'python*', '-o', '-name',
+                    'portage', '\)', '-exec', 'ln', '-s', '{}',
+                    os.path.join('/usr', path), '\;'
+            ])
+
+    def _start_nebraska(self, payload_url=None):
+        """
+        Initialize and start nebraska on the DUT.
+
+        @param payload_url: The payload to served by nebraska.
+
+        """
+        if not self._nebraska:
+            self._nebraska = minios_util.NebraskaService(
+                    self, self._host, payload_url)
+        self._nebraska.start()
+
+    def _verify_reboot(self, old_boot_id):
+        """
+        Verify that the unit rebooted using the boot_id.
+
+        @param old_boot_id A boot id value obtained before the
+                               reboot.
+
+        """
+        self._host.test_wait_for_shutdown(self._MINIOS_SHUTDOWN_TIMEOUT)
+        self._host.test_wait_for_boot(old_boot_id)
diff --git a/server/cros/minios/minios_util.py b/server/cros/minios/minios_util.py
new file mode 100644
index 0000000..79a8c8e
--- /dev/null
+++ b/server/cros/minios/minios_util.py
@@ -0,0 +1,175 @@
+# Lint as: python2, python3
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import json
+import logging
+import os
+import requests
+import six
+
+from autotest_lib.client.common_lib import autotemp
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.cros.update_engine import nebraska_wrapper
+
+
+class NebraskaService:
+    """
+    Remotely sets up nebraska on the DUT.
+
+    This service is different from
+    `autotest_lib.client.cros.update_engine.nebraska_wrapper.NebraskaWrapper` in
+    that it is used by server-only tests to remotely launch nebraska on the DUT.
+
+    """
+
+    def __init__(self, test, host, payload_url=None, **props_to_override):
+        """
+        Initializes the NebraskaService.
+
+        @param test: Instance of the test using the service.
+        @param host: The DUT we will be running on.
+        @param payload_url: The payload that will be returned in responses for
+            update requests. This can be a single URL string or a list of URLs
+            to return multiple payload URLs (such as a platform payload + DLC
+            payloads) in the responses.
+        @param props_to_override: Dictionary of key/values to use in responses
+            instead of the default values in payload_url's properties file.
+
+        """
+        self._host = host
+        self._test = test
+
+        # _update_metadata_dir is the directory for storing the json metadata
+        # files associated with the payloads.
+        # _update_payloads_address is the address of the update server where
+        # the payloads are staged.
+        self._update_metadata_dir = None
+        self._update_payloads_address = None
+
+        if payload_url:
+            # Normalize payload_url to be a list.
+            if not isinstance(payload_url, list):
+                payload_url = [payload_url]
+
+            self._update_metadata_dir = self._host.get_tmp_dir()
+            self._update_payloads_address = ''.join(
+                    payload_url[0].rpartition('/')[0:2])
+
+            # Download the metadata files and save them in a tempdir for general
+            # use.
+            for url in payload_url:
+                self.get_payload_properties_file(url,
+                                                 self._update_metadata_dir,
+                                                 **props_to_override)
+
+    def get_payload_properties_file(self, payload_url, target_dir, **kwargs):
+        """
+        Downloads the payload properties file into a directory on the DUT.
+
+        @param payload_url: The URL to the update payload file.
+        @param target_dir: The directory on the DUT to download the file into.
+        @param kwargs: A dictionary of key/values that needs to be overridden on
+            the payload properties file.
+
+        """
+        payload_props_url = payload_url + '.json'
+        _, _, file_name = payload_props_url.rpartition('/')
+        try:
+            response = json.loads(requests.get(payload_props_url).text)
+            # Override existing keys if any.
+            for k, v in six.iteritems(kwargs):
+                # Don't set default None values. We don't want to override good
+                # values to None.
+                if v is not None:
+                    response[k] = v
+            self._write_remote_file(os.path.join(target_dir, file_name),
+                                    json.dumps(response))
+
+        except (requests.exceptions.RequestException, IOError,
+                ValueError) as err:
+            raise error.TestError(
+                    'Failed to get update payload properties: %s with error: %s'
+                    % (payload_props_url, err))
+
+    def start(self, **kwargs):
+        """Launch nebraska on DUT."""
+        # Generate nebraska configuration.
+        self._write_remote_file(
+                nebraska_wrapper.NEBRASKA_CONFIG,
+                json.dumps(self._create_startup_config(**kwargs)),
+        )
+        logging.info('Start nebraska service')
+        self._host.upstart_restart('nebraska')
+        self._host.wait_for_service('nebraska')
+
+    def stop(self):
+        """Stop Nebraska service."""
+        logging.info('Stop nebraska service')
+        self._host.upstart_stop('nebraska')
+        self._host.run('rm', args=('-f', nebraska_wrapper.NEBRASKA_CONFIG))
+
+    def _create_startup_config(self, **kwargs):
+        """
+        Creates a nebraska startup config file. If this file is present, nebraska
+        can be started by upstart.
+
+        @param kwargs: A dictionary of key/values for nebraska config options.
+            See platform/dev/nebraska/nebraska.py for more info.
+
+        @return: A dictionary of nebraska config options.
+
+        """
+        conf = {}
+        if self._update_metadata_dir:
+            conf['update_metadata'] = self._update_metadata_dir
+        if self._update_payloads_address:
+            conf['update_payloads_address'] = self._update_payloads_address
+
+        for k, v in six.iteritems(kwargs):
+            conf[k] = v
+        return conf
+
+    def _create_remote_dir(self, remote_dir, owner=None):
+        """
+        Create directory on DUT.
+
+        @param remote_dir: The directory to create.
+        @param owner: Set owner of the remote directory.
+
+        """
+        permission = '1777'
+        if owner:
+            permission = '1770'
+        self._host.run(['mkdir', '-p', '-m', permission, remote_dir])
+        if owner:
+            self._host.run('chown', args=(owner, remote_dir))
+
+    def _write_remote_file(self,
+                           filepath,
+                           content,
+                           permission=None,
+                           owner=None):
+        """
+        Write content to filepath on DUT.
+
+        @param permission: set permission to 0xxx octal number of remote file.
+        @param owner: set owner of remote file.
+
+        """
+        tmpdir = autotemp.tempdir(unique_id='minios')
+        tmp_path = os.path.join(tmpdir.name, os.path.basename(filepath))
+        with open(tmp_path, 'w') as f:
+            f.write(content)
+        if permission is not None:
+            os.chmod(tmp_path, permission)
+        self._create_remote_dir(os.path.dirname(filepath), owner)
+        self._host.send_file(tmp_path, filepath, delete_dest=True)
+        if owner is not None:
+            self._host.run('chown', args=(owner, filepath))
+        tmpdir.clean()
diff --git a/server/cros/multimedia/audio_facade_adapter.py b/server/cros/multimedia/audio_facade_adapter.py
index 2c57c46..11f1e4f 100644
--- a/server/cros/multimedia/audio_facade_adapter.py
+++ b/server/cros/multimedia/audio_facade_adapter.py
@@ -287,6 +287,14 @@
         """
         self._audio_proxy.set_chrome_mute(mute)
 
+    def set_chrome_active_input_gain(self, gain):
+        """Sets the active audio input gain using chrome.audio API.
+
+        @param volume: Gain to set (0~100).
+
+        """
+        self._audio_proxy.set_chrome_active_input_gain(gain)
+
     def check_audio_stream_at_selected_device(self):
         """Checks the audio output is at expected node"""
         self._audio_proxy.check_audio_stream_at_selected_device()
@@ -319,6 +327,32 @@
                 output_node_type, input_node_type)
 
 
+    def get_noise_cancellation_supported(self):
+        """Gets whether the device supports Noise Cancellation.
+
+        @returns: True is supported; False otherwise.
+
+        """
+        return self._audio_proxy.get_noise_cancellation_supported()
+
+
+    def set_bypass_block_noise_cancellation(self, bypass):
+        """Sets CRAS to bypass the blocking logic of Noise Cancellation.
+
+        @param bypass: True for bypass; False for un-bypass.
+
+        """
+        self._audio_proxy.set_bypass_block_noise_cancellation(bypass)
+
+
+    def set_noise_cancellation_enabled(self, enabled):
+        """Sets the state to enable or disable Noise Cancellation.
+
+        @param enabled: True to enable; False to disable.
+
+        """
+        self._audio_proxy.set_noise_cancellation_enabled(enabled)
+
     def start_arc_recording(self):
         """Starts recording using microphone app in container."""
         self._audio_proxy.start_arc_recording()
diff --git a/server/cros/multimedia/bluetooth_facade_adapter.py b/server/cros/multimedia/bluetooth_facade_adapter.py
index 0141cbc..fe4983b 100644
--- a/server/cros/multimedia/bluetooth_facade_adapter.py
+++ b/server/cros/multimedia/bluetooth_facade_adapter.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -14,13 +15,14 @@
 
     """
 
-    def __init__(self, host, remote_facade_proxy):
+    def __init__(self, host, remote_facade_proxy, floss):
         """Construct an BluetoothFacadeRemoteAdapter.
 
         @param host: Host object representing a remote host.
         @param remote_facade_proxy: RemoteFacadeProxy object.
+        @param floss: Target the Floss daemon?
 
         """
         self._client = host
         super(BluetoothFacadeRemoteAdapter,
-              self).__init__(host, remote_facade_proxy)
+              self).__init__(host, remote_facade_proxy, floss)
diff --git a/server/cros/multimedia/input_facade_adapter.py b/server/cros/multimedia/input_facade_adapter.py
index 4fec178..b94d5f2 100644
--- a/server/cros/multimedia/input_facade_adapter.py
+++ b/server/cros/multimedia/input_facade_adapter.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/cros/multimedia/remote_facade_factory.py b/server/cros/multimedia/remote_facade_factory.py
index e72eb7d..62ac7ee 100644
--- a/server/cros/multimedia/remote_facade_factory.py
+++ b/server/cros/multimedia/remote_facade_factory.py
@@ -87,7 +87,8 @@
                  host,
                  no_chrome,
                  extra_browser_args=None,
-                 disable_arc=False):
+                 disable_arc=False,
+                 force_python3=False):
         """Construct a RemoteFacadeProxy.
 
         @param host: Host object representing a remote host.
@@ -95,6 +96,7 @@
         @param extra_browser_args: A list containing extra browser args passed
                                    to Chrome in addition to default ones.
         @param disable_arc: True to disable ARC++.
+        @param force_python3: Force the xmlrpc server to run as python3.
 
         """
         self._client = host
@@ -103,6 +105,8 @@
         self._no_chrome = no_chrome
         self._extra_browser_args = extra_browser_args
         self._disable_arc = disable_arc
+        self._force_python3 = force_python3
+
         self.connect()
         if not no_chrome:
             self._start_chrome(reconnect=False, retry=True,
@@ -258,18 +262,26 @@
                       delay_sec=self.XMLRPC_RETRY_DELAY)
         def connect_with_retries():
             """Connects the XML-RPC proxy with retries."""
+            # Until all facades support python3 and are tested with it, we only
+            # force python3 if specifically asked to.
+            if self._force_python3:
+                cmd = '{} {}'.format(
+                        constants.MULTIMEDIA_XMLRPC_SERVER_COMMAND,
+                        '--py_version=3')
+            else:
+                cmd = constants.MULTIMEDIA_XMLRPC_SERVER_COMMAND
+
             self._xmlrpc_proxy = self._client.rpc_server_tracker.xmlrpc_connect(
-                    constants.MULTIMEDIA_XMLRPC_SERVER_COMMAND,
+                    cmd,
                     constants.MULTIMEDIA_XMLRPC_SERVER_PORT,
-                    command_name=(
-                        constants.MULTIMEDIA_XMLRPC_SERVER_CLEANUP_PATTERN
-                    ),
+                    command_name=(constants.
+                                  MULTIMEDIA_XMLRPC_SERVER_CLEANUP_PATTERN),
                     ready_test_name=(
-                        constants.MULTIMEDIA_XMLRPC_SERVER_READY_METHOD),
+                            constants.MULTIMEDIA_XMLRPC_SERVER_READY_METHOD),
                     timeout_seconds=self.XMLRPC_CONNECT_TIMEOUT,
                     logfile=constants.MULTIMEDIA_XMLRPC_SERVER_LOG_FILE,
-                    request_timeout_seconds=
-                            constants.MULTIMEDIA_XMLRPC_SERVER_REQUEST_TIMEOUT)
+                    request_timeout_seconds=constants.
+                    MULTIMEDIA_XMLRPC_SERVER_REQUEST_TIMEOUT)
 
         logging.info('Setup the connection to RPC server, with retries...')
         connect_with_retries()
@@ -337,7 +349,8 @@
                  install_autotest=True,
                  results_dir=None,
                  extra_browser_args=None,
-                 disable_arc=False):
+                 disable_arc=False,
+                 force_python3=False):
         """Construct a RemoteFacadeFactory.
 
         @param host: Host object representing a remote host.
@@ -347,6 +360,7 @@
         @param extra_browser_args: A list containing extra browser args passed
                                    to Chrome in addition to default ones.
         @param disable_arc: True to disable ARC++.
+        @param force_python3: Force remote facade to run in python3.
         If it is not None, we will get multimedia init log to the results_dir.
 
         """
@@ -361,7 +375,8 @@
                     host=self._client,
                     no_chrome=no_chrome,
                     extra_browser_args=extra_browser_args,
-                    disable_arc=disable_arc)
+                    disable_arc=disable_arc,
+                    force_python3=force_python3)
         finally:
             if results_dir:
                 host.get_file(constants.MULTIMEDIA_XMLRPC_SERVER_LOG_FILE,
@@ -412,10 +427,10 @@
         return browser_facade_adapter.BrowserFacadeRemoteAdapter(self._proxy)
 
 
-    def create_bluetooth_facade(self):
+    def create_bluetooth_facade(self, floss):
         """"Creates a bluetooth facade object."""
         return bluetooth_facade_adapter.BluetoothFacadeRemoteAdapter(
-                self._client, self._proxy)
+                self._client, self._proxy, floss)
 
 
     def create_input_facade(self):
diff --git a/server/cros/multimedia/system_facade_adapter.py b/server/cros/multimedia/system_facade_adapter.py
index 6b51f1b..a130508 100644
--- a/server/cros/multimedia/system_facade_adapter.py
+++ b/server/cros/multimedia/system_facade_adapter.py
@@ -140,6 +140,12 @@
         """
         return self._system_proxy.get_and_discard_bg_worker_output()
 
+    def get_energy_usage(self):
+        """
+        Gets the energy counter value as a string.
+        """
+        return self._system_proxy.get_energy_usage()
+
     def stop_bg_worker(self):
         """
         Stop the worker.
diff --git a/server/cros/multimedia/video_facade_adapter.py b/server/cros/multimedia/video_facade_adapter.py
index a61374b..04e27c2 100644
--- a/server/cros/multimedia/video_facade_adapter.py
+++ b/server/cros/multimedia/video_facade_adapter.py
@@ -39,7 +39,7 @@
     def send_playback_file(self, path):
         """Copies a file to client.
 
-        The files on the client side will be deleted by VideoFacadeNative
+        The files on the client side will be deleted by VideoFacadeLocal
         after the test.
 
         @param path: A path to the file.
diff --git a/server/cros/network/OWNERS b/server/cros/network/OWNERS
new file mode 100644
index 0000000..97e5eb6
--- /dev/null
+++ b/server/cros/network/OWNERS
@@ -0,0 +1 @@
+include /WIFI_OWNERS
diff --git a/server/cros/network/apmanager_service_provider.py b/server/cros/network/apmanager_service_provider.py
index cae84c9..966fe9b 100644
--- a/server/cros/network/apmanager_service_provider.py
+++ b/server/cros/network/apmanager_service_provider.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2014 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/cros/network/arping_runner.py b/server/cros/network/arping_runner.py
index bf82c80..009a941 100644
--- a/server/cros/network/arping_runner.py
+++ b/server/cros/network/arping_runner.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/cros/network/attenuator.py b/server/cros/network/attenuator.py
index 70c3aae..98049db 100644
--- a/server/cros/network/attenuator.py
+++ b/server/cros/network/attenuator.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -40,8 +41,8 @@
         if config_str.startswith("MN="):
             config_str = config_str[len("MN="):]
 
-        self.properties = dict(zip(['model', 'max_freq', 'max_atten'],
-                                   config_str.split("-", 2)))
+        self.properties = dict(list(zip(['model', 'max_freq', 'max_atten'],
+                                   config_str.split("-", 2))))
         self.max_atten = float(self.properties['max_atten'])
         self.min_atten = 0
 
diff --git a/server/cros/network/attenuator_controller.py b/server/cros/network/attenuator_controller.py
index 90d91f3..cf27b57 100644
--- a/server/cros/network/attenuator_controller.py
+++ b/server/cros/network/attenuator_controller.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -8,9 +9,11 @@
 from autotest_lib.server.cros.network import attenuator
 from autotest_lib.server.cros.network import attenuator_hosts
 
-from chromite.lib import timeout_util
+from autotest_lib.utils.frozen_chromite.lib import timeout_util
 
 HOST_TO_FIXED_ATTENUATIONS = attenuator_hosts.HOST_FIXED_ATTENUATIONS
+# Fake entry to deal with attenuator not added to attenuator_hosts.py file
+FAKE_HOST = HOST_TO_FIXED_ATTENUATIONS['fake-atten-host']
 
 
 class AttenuatorController(object):
@@ -21,12 +24,15 @@
     test some roaming situations.  The throughput vs signal strength tests
     are referred to rate vs range (RvR) tests in places.
 
+    Fixed attenuatations should be recorded in attenuator_hosts.py else
+    TestError will be raised when fixed attentuations are accessed.
+
     """
 
     @property
     def supported_attenuators(self):
         """@return iterable of int attenuators supported on this host."""
-        return self._fixed_attenuations.keys()
+        return list(self._fixed_attenuations.keys())
 
 
     def __init__(self, hostname):
@@ -37,10 +43,13 @@
         """
         self.hostname = hostname
         super(AttenuatorController, self).__init__()
-        part = hostname.split('.', 1)[0]
-        if part not in HOST_TO_FIXED_ATTENUATIONS.keys():
-            raise error.TestError('Unexpected RvR host name %r.' % hostname)
-        self._fixed_attenuations = HOST_TO_FIXED_ATTENUATIONS[part]
+        part = hostname.split('.cros', 1)[0]
+        if part not in list(HOST_TO_FIXED_ATTENUATIONS.keys()):
+            logging.debug('Attenuator %s not found in attenuator_host list',
+                          part)
+            self._fixed_attenuations = FAKE_HOST
+        else:
+            self._fixed_attenuations = HOST_TO_FIXED_ATTENUATIONS[part]
         num_atten = len(self.supported_attenuators)
 
         self._attenuator = attenuator.Attenuator(hostname, num_atten)
@@ -57,11 +66,14 @@
                 attenuator has a different fixed path loss per frequency.
         @param freq: int frequency in MHz.
         @returns int approximate frequency from self._fixed_attenuations.
+        @raises TestError if attenuator is not in attenuator_hosts.py
 
         """
+        self._fail_if_fake()
+
         old_offset = None
         approx_freq = None
-        for defined_freq in self._fixed_attenuations[attenuator_num].keys():
+        for defined_freq in list(self._fixed_attenuations[attenuator_num].keys()):
             new_offset = abs(defined_freq - freq)
             if old_offset is None or new_offset < old_offset:
                 old_offset = new_offset
@@ -89,8 +101,11 @@
                 varies with frequency.
         @param attenuator_num: int attenuator to change, or None to
                 set all variable attenuators.
+        @raises TestError if attenuator is not in attenuator_hosts.py
 
         """
+        self._fail_if_fake()
+
         affected_attenuators = self.supported_attenuators
         if attenuator_num is not None:
             affected_attenuators = [attenuator_num]
@@ -137,11 +152,14 @@
         minimal total attenuation when stepping through attenuation levels.
 
         @return maximum starting attenuation value
+        @raises TestError if attenuator is not in attenuator_hosts.py
 
         """
+        self._fail_if_fake()
+
         max_atten = 0
-        for atten_num in self._fixed_attenuations.iterkeys():
-            atten_values = self._fixed_attenuations[atten_num].values()
+        for atten_num in self._fixed_attenuations.keys():
+            atten_values = list(self._fixed_attenuations[atten_num].values())
             max_atten = max(max(atten_values), max_atten)
         return max_atten
 
@@ -214,3 +232,14 @@
         if min_sig <= curr_sig_level <= max_sig:
             return True
         return False
+
+    def _fail_if_fake(self):
+        """ Raises test error if this attenuator is missing
+
+        If an attenuator is missing, we use use a fake entry. This function
+        will fail the test if the current attenuator is fake.
+        """
+        if self._fixed_attenuations == FAKE_HOST:
+            raise error.TestError(
+                    'Attenuator %r  not found in attenuator_hosts.py' %
+                    self.hostname)
diff --git a/server/cros/network/attenuator_hosts.py b/server/cros/network/attenuator_hosts.py
index c0a0778..a060882 100644
--- a/server/cros/network/attenuator_hosts.py
+++ b/server/cros/network/attenuator_hosts.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -8,6 +9,11 @@
 # The map maps from:
 #     attenuator hostname -> attenuator number -> frequency -> loss in dB.
 HOST_FIXED_ATTENUATIONS = {
+        'fake-atten-host': {
+                0: {2437: 0, 5220: 0, 5765: 0},
+                1: {2437: 0, 5220: 0, 5765: 0},
+                2: {2437: 0, 5220: 0, 5765: 0},
+                3: {2437: 0, 5220: 0, 5765: 0}},
         'chromeos1-dev-host4-attenuator': {
                 0: {2437: 53, 5220: 59, 5765: 59},
                 1: {2437: 56, 5220: 56, 5765: 56},
@@ -64,16 +70,21 @@
                 1: {2437: 56, 5220: 56, 5765: 56},
                 2: {2437: 53, 5220: 58, 5765: 60},
                 3: {2437: 56, 5220: 56, 5765: 56}},
+        'chromeos15-row3-rack7-host3-btattenuator': {
+                0: {2450: 55},
+                1: {2450: 55},
+                2: {2450: 55},
+                3: {2450: 55}},
         'chromeos15-row3-rack7-host4-attenuator': {
                 0: {2437: 53, 5220: 59, 5765: 60},
                 1: {2437: 57, 5220: 56, 5765: 58},
                 2: {2437: 53, 5220: 59, 5765: 60},
                 3: {2437: 57, 5220: 56, 5765: 58}},
         'chromeos15-row3-rack7-host5-attenuator': {
-                0: {2437: 53, 5220: 59, 5765: 59},
-                1: {2437: 56, 5220: 56, 5765: 57},
-                2: {2437: 52, 5220: 59, 5765: 58},
-                3: {2437: 56, 5220: 56, 5765: 57}},
+                0: {2437: 53, 5220: 58, 5765: 60},
+                1: {2437: 55, 5220: 55, 5765: 54},
+                2: {2437: 52, 5220: 57, 5765: 60},
+                3: {2437: 55, 5220: 56, 5765: 54}},
         'chromeos15-row3-rack7-host6-attenuator': {
                 0: {2437: 53, 5220: 58, 5765: 59},
                 1: {2437: 56, 5220: 57, 5765: 57},
@@ -152,10 +163,10 @@
                 2: {2437: 52, 5220: 56, 5765: 58},
                 3: {2437: 55, 5220: 54, 5765: 57}},
         'chromeos15-row3-rack10-host3-attenuator': {
-                0: {2437: 52, 5220: 55, 5765: 60},
-                1: {2437: 55, 5220: 54, 5765: 59},
-                2: {2437: 52, 5220: 55, 5765: 61},
-                3: {2437: 55, 5220: 53, 5765: 59}},
+                0: {2437: 53, 5220: 57, 5765: 57},
+                1: {2437: 56, 5220: 56, 5765: 57},
+                2: {2437: 53, 5220: 57, 5765: 59},
+                3: {2437: 56, 5220: 57, 5765: 58}},
         'chromeos15-row3-rack10-host4-attenuator': {
                 0: {2437: 51, 5220: 55, 5765: 59},
                 1: {2437: 55, 5220: 53, 5765: 58},
diff --git a/server/cros/network/chaos_clique_utils.py b/server/cros/network/chaos_clique_utils.py
index 099fd44..189f4f3 100644
--- a/server/cros/network/chaos_clique_utils.py
+++ b/server/cros/network/chaos_clique_utils.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -38,11 +39,11 @@
     for pcap in available_pcaps:
         # Ensure the pcap and dut are in the same subnet
         # Encode response that's in unicode format
-        pcap_hostname = pcap['hostname'].encode("utf-8")
+        pcap_hostname = (pcap['hostname'])
         # Pass pcap hostname as set to lock_kmanager
         pcap_host = set([pcap_hostname])
         if lock_manager.lock(pcap_host):
-            return hosts.SSHHost(pcap['hostname'] + '.cros')
+            return hosts.SSHHost(pcap['hostname'])
         else:
             logging.info('Unable to lock %s', pcap['hostname'])
             continue
diff --git a/server/cros/network/connection_worker.py b/server/cros/network/connection_worker.py
index 990afc1..df693ae 100644
--- a/server/cros/network/connection_worker.py
+++ b/server/cros/network/connection_worker.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/cros/network/expected_performance_results.py b/server/cros/network/expected_performance_results.py
new file mode 100644
index 0000000..21d74a0
--- /dev/null
+++ b/server/cros/network/expected_performance_results.py
@@ -0,0 +1,233 @@
+# Lint as: python2, python3
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.server.cros.network import hostap_config
+from autotest_lib.server.cros.network import perf_test_manager as perf_manager
+"""
+This file defines the expected throughput values that should be used with the network_WiFi_Perf.*
+tests.
+
+The expected throughput values depend on the following parameters:
+1- The test type:
+    a) TCP_BIDIRECTIONAL
+    b) TCP_RX
+    c) TCP_TX
+    a) UDP_BIDIRECTIONAL
+    b) UDP_RX
+    c) UDP_TX
+    Note: The thoughput is viewed from the DUT perspective:
+        streaming to DUT = RX
+        streaming from DUT = TX
+        simultaneous TX + RX = BIDIERECTIONAL
+2- The Connection mode:
+    a) 80211n
+    b) 80211ac
+3- The channel width:
+    a) 20 MHz
+    b) 40 MHz
+    c) 80 MHz
+"""
+
+expected_throughput_wifi = {
+        perf_manager.PerfTestTypes.TEST_TYPE_TCP_BIDIRECTIONAL: {
+                hostap_config.HostapConfig.MODE_11N_PURE: {
+                        hostap_config.HostapConfig.HT_CHANNEL_WIDTH_20: (0, 0),
+                        hostap_config.HostapConfig.HT_CHANNEL_WIDTH_40_PLUS:
+                        (0, 0),
+                        hostap_config.HostapConfig.HT_CHANNEL_WIDTH_40_MINUS:
+                        (0, 0)
+                },
+                hostap_config.HostapConfig.MODE_11AC_MIXED: {
+                        hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_80: (0, 0)
+                },
+                hostap_config.HostapConfig.MODE_11AC_PURE: {
+                        hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_20:
+                        (0, 0),
+                        hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_40: (0, 0)
+                }
+        },
+        perf_manager.PerfTestTypes.TEST_TYPE_UDP_BIDIRECTIONAL: {
+                hostap_config.HostapConfig.MODE_11N_PURE: {
+                        hostap_config.HostapConfig.HT_CHANNEL_WIDTH_20: (0, 0),
+                        hostap_config.HostapConfig.HT_CHANNEL_WIDTH_40_PLUS:
+                        (0, 0),
+                        hostap_config.HostapConfig.HT_CHANNEL_WIDTH_40_MINUS:
+                        (0, 0)
+                },
+                hostap_config.HostapConfig.MODE_11AC_MIXED: {
+                        hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_80: (0, 0)
+                },
+                hostap_config.HostapConfig.MODE_11AC_PURE: {
+                        hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_20:
+                        (0, 0),
+                        hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_40: (0, 0)
+                }
+        },
+        perf_manager.PerfTestTypes.TEST_TYPE_TCP_RX: {
+                hostap_config.HostapConfig.MODE_11N_PURE: {
+                        hostap_config.HostapConfig.HT_CHANNEL_WIDTH_20:
+                        (61, 86),
+                        hostap_config.HostapConfig.HT_CHANNEL_WIDTH_40_PLUS:
+                        (115, 166),
+                        hostap_config.HostapConfig.HT_CHANNEL_WIDTH_40_MINUS:
+                        (115, 166)
+                },
+                hostap_config.HostapConfig.MODE_11AC_MIXED: {
+                        hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_80:
+                        (200, 400)
+                },
+                hostap_config.HostapConfig.MODE_11AC_PURE: {
+                        hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_20:
+                        (74, 103),
+                        hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_40:
+                        (153, 221)
+                }
+        },
+        perf_manager.PerfTestTypes.TEST_TYPE_TCP_TX: {
+                hostap_config.HostapConfig.MODE_11N_PURE: {
+                        hostap_config.HostapConfig.HT_CHANNEL_WIDTH_20:
+                        (61, 86),
+                        hostap_config.HostapConfig.HT_CHANNEL_WIDTH_40_PLUS:
+                        (115, 166),
+                        hostap_config.HostapConfig.HT_CHANNEL_WIDTH_40_MINUS:
+                        (115, 166)
+                },
+                hostap_config.HostapConfig.MODE_11AC_MIXED: {
+                        hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_80:
+                        (200, 400)
+                },
+                hostap_config.HostapConfig.MODE_11AC_PURE: {
+                        hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_20:
+                        (74, 103),
+                        hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_40:
+                        (153, 221)
+                }
+        },
+        perf_manager.PerfTestTypes.TEST_TYPE_UDP_RX: {
+                hostap_config.HostapConfig.MODE_11N_PURE: {
+                        hostap_config.HostapConfig.HT_CHANNEL_WIDTH_20:
+                        (72, 101),
+                        hostap_config.HostapConfig.HT_CHANNEL_WIDTH_40_PLUS:
+                        (135, 195),
+                        hostap_config.HostapConfig.HT_CHANNEL_WIDTH_40_MINUS:
+                        (135, 195)
+                },
+                hostap_config.HostapConfig.MODE_11AC_MIXED: {
+                        hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_80:
+                        (347, 500)
+                },
+                hostap_config.HostapConfig.MODE_11AC_PURE: {
+                        hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_20:
+                        (87, 121),
+                        hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_40:
+                        (180, 260)
+                }
+        },
+        perf_manager.PerfTestTypes.TEST_TYPE_UDP_TX: {
+                hostap_config.HostapConfig.MODE_11N_PURE: {
+                        hostap_config.HostapConfig.HT_CHANNEL_WIDTH_20:
+                        (72, 101),
+                        hostap_config.HostapConfig.HT_CHANNEL_WIDTH_40_PLUS:
+                        (135, 195),
+                        hostap_config.HostapConfig.HT_CHANNEL_WIDTH_40_MINUS:
+                        (135, 195)
+                },
+                hostap_config.HostapConfig.MODE_11AC_MIXED: {
+                        hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_80:
+                        (347, 500)
+                },
+                hostap_config.HostapConfig.MODE_11AC_PURE: {
+                        hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_20:
+                        (87, 121),
+                        hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_40:
+                        (180, 260)
+                }
+        }
+}
+
+
+def get_expected_throughput_wifi(test_type, mode, channel_width):
+    """returns the expected throughput for WiFi only performance tests.
+
+    @param test_type: the PerfTestTypes test type.
+
+    @param mode: the WiFi mode such as 80211n.
+
+    @param channel_width: the channel width used in the test.
+
+    @return a tuple of two integers (must,should) of the expected throughputs in Mbps.
+
+    """
+    if test_type in expected_throughput_wifi:
+        if mode in expected_throughput_wifi[test_type]:
+            if channel_width in expected_throughput_wifi[test_type][mode]:
+                return expected_throughput_wifi[test_type][mode][channel_width]
+    ret_mode = hostap_config.HostapConfig.VHT_NAMES[channel_width]
+    if ret_mode is None:
+        ret_mode = hostap_config.HostapConfig.HT_NAMES[channel_width]
+    raise error.TestFail(
+            'Failed to find the expected throughput from the key values, test type = %s, mode = %s, channel width = %s'
+            % (test_type, mode, ret_mode))
+
+
+"""These are special exceptions for specific boards that define the maximum
+throughput in Mbps that we expect boards to be able to achieve. Generally, these
+boards were qualified before the advent of platform throughput requirements, and
+therefore are exempted from meeting certain requirements. Each board must be
+annotated with a bug which includes the history on why the specific expectations
+for that board.
+"""
+max_throughput_expectation_for_boards = {
+        # caroline throughput results tracked in b:200743117.
+        "caroline": {
+                perf_manager.PerfTestTypes.TEST_TYPE_UDP_RX: 200
+        },
+        # elm throughput results tracked in b:201806809.
+        "elm": {
+                perf_manager.PerfTestTypes.TEST_TYPE_UDP_TX: 200,
+                perf_manager.PerfTestTypes.TEST_TYPE_UDP_RX: 300
+        },
+        # eve throughput results tracked in b:200743117.
+        "eve": {
+                perf_manager.PerfTestTypes.TEST_TYPE_UDP_RX: 275
+        },
+        # kukui throughput results tracked in b:201807413.
+        "kukui": {
+                perf_manager.PerfTestTypes.TEST_TYPE_UDP_RX: 300
+        },
+        # nami throughput results tracked in b:200743117.
+        "nami": {
+                perf_manager.PerfTestTypes.TEST_TYPE_UDP_RX: 140
+        },
+        # trogdor throughput results tracked in b:201807655.
+        "trogdor": {
+                perf_manager.PerfTestTypes.TEST_TYPE_UDP_RX: 250
+        },
+        # veyron_fievel throughput results tracked in b:199946512.
+        "veyron_fievel": {
+                perf_manager.PerfTestTypes.TEST_TYPE_TCP_TX: 130,
+                perf_manager.PerfTestTypes.TEST_TYPE_TCP_RX: 70,
+                perf_manager.PerfTestTypes.TEST_TYPE_UDP_TX: 130,
+                perf_manager.PerfTestTypes.TEST_TYPE_UDP_RX: 130
+        }
+}
+
+
+def get_board_max_expectation(test_type, board_name):
+    """Returns the maximum throughput expectation for a given board in a given
+    test type, or None if the board has no exception for that test type.
+
+    @param test_type: the PerfTestTypes test type.
+    @param board_name: string name of the board, as defined by
+    SiteLinuxSystem.board field.
+
+    @return integer value for maximum throughput expectation (in Mbps) for the
+    given boardand test type, or None if the maximum is not defined.
+    """
+    board_maximums = max_throughput_expectation_for_boards.get(board_name)
+    if not board_maximums:
+        return None
+    return board_maximums.get(test_type)
diff --git a/server/cros/network/frame_sender.py b/server/cros/network/frame_sender.py
index eabd4e2..1966d7a 100644
--- a/server/cros/network/frame_sender.py
+++ b/server/cros/network/frame_sender.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/cros/network/hostap_config.py b/server/cros/network/hostap_config.py
index 561f02e..6b2c400 100644
--- a/server/cros/network/hostap_config.py
+++ b/server/cros/network/hostap_config.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -6,6 +7,8 @@
 import copy
 import logging
 
+import six
+
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
 from autotest_lib.server.cros.network import packet_capturer
@@ -168,6 +171,7 @@
         HT_CHANNEL_WIDTH_40_MINUS: 'HT40-',
     }
 
+    VHT_CHANNEL_WIDTH_20 = object()
     VHT_CHANNEL_WIDTH_40 = object()
     VHT_CHANNEL_WIDTH_80 = object()
     VHT_CHANNEL_WIDTH_160 = object()
@@ -175,10 +179,11 @@
 
     # Human readable names for these channel widths.
     VHT_NAMES = {
-        VHT_CHANNEL_WIDTH_40: 'VHT40',
-        VHT_CHANNEL_WIDTH_80: 'VHT80',
-        VHT_CHANNEL_WIDTH_160: 'VHT160',
-        VHT_CHANNEL_WIDTH_80_80: 'VHT80+80',
+            VHT_CHANNEL_WIDTH_20: 'VHT20',
+            VHT_CHANNEL_WIDTH_40: 'VHT40',
+            VHT_CHANNEL_WIDTH_80: 'VHT80',
+            VHT_CHANNEL_WIDTH_160: 'VHT160',
+            VHT_CHANNEL_WIDTH_80_80: 'VHT80+80',
     }
 
     # This is a loose merging of the rules for US and EU regulatory
@@ -186,13 +191,13 @@
     # we tolerate HT40 in channels 149-161 (not allowed in EU), but also
     # tolerate HT40+ on channel 7 (not allowed in the US).  We take the loose
     # definition so that we don't prohibit testing in either domain.
-    HT40_ALLOW_MAP = {N_CAPABILITY_HT40_MINUS: range(5, 14) +
-                                           range(40, 65, 8) +
-                                           range(104, 145, 8) +
+    HT40_ALLOW_MAP = {N_CAPABILITY_HT40_MINUS: list(range(5, 14)) +
+                                           list(range(40, 65, 8)) +
+                                           list(range(104, 145, 8)) +
                                            [153, 161],
-                  N_CAPABILITY_HT40_PLUS: range(1, 10) +
-                                           range(36, 61, 8) +
-                                           range(100, 141, 8) +
+                  N_CAPABILITY_HT40_PLUS: list(range(1, 10)) +
+                                           list(range(36, 61, 8)) +
+                                           list(range(100, 141, 8)) +
                                            [149, 157]}
 
     PMF_SUPPORT_DISABLED = 0
@@ -226,7 +231,7 @@
         @return int frequency in MHz associated with the channel.
 
         """
-        for frequency, channel_iter in HostapConfig.CHANNEL_MAP.iteritems():
+        for frequency, channel_iter in six.iteritems(HostapConfig.CHANNEL_MAP):
             if channel == channel_iter:
                 return frequency
         else:
@@ -288,7 +293,7 @@
         """@return string suitable for the vht_capab= line in a hostapd config.
         """
         ret = []
-        for cap in self.AC_CAPABILITIES_MAPPING.keys():
+        for cap in list(self.AC_CAPABILITIES_MAPPING.keys()):
             if cap in self._ac_capabilities:
                 ret.append(self.AC_CAPABILITIES_MAPPING[cap])
         return ''.join(ret)
@@ -324,6 +329,28 @@
 
         raise error.TestFail('Invalid mode.')
 
+    @property
+    def mode(self):
+        """@return string hardware mode."""
+        return self._mode
+
+    @property
+    def channel_width(self):
+        """@return object channel width.
+        Note: This property ignores legacy rate (e.g., 11g), It will return
+              None for these rate.
+        """
+        ht_channel_width = self._ht_mode
+        if self.vht_channel_width is not None:
+            if (
+                    self.vht_channel_width == self.VHT_CHANNEL_WIDTH_40
+                    or self.vht_channel_width == self.VHT_CHANNEL_WIDTH_20):
+                if ht_channel_width == self.HT_CHANNEL_WIDTH_20:
+                    return self.VHT_CHANNEL_WIDTH_20
+            return self.vht_channel_width
+        if ht_channel_width:
+            return ht_channel_width
+        return None
 
     @property
     def _is_11n(self):
@@ -421,8 +448,9 @@
 
         """
 
-        if (not self.vht_channel_width or
-                self.vht_channel_width == self.VHT_CHANNEL_WIDTH_40):
+        if (not self.vht_channel_width
+                    or self.vht_channel_width == self.VHT_CHANNEL_WIDTH_40
+                    or self.vht_channel_width == self.VHT_CHANNEL_WIDTH_20):
             # if it is VHT40, capture packets on the correct 40MHz band since
             # for packet capturing purposes, only the channel width matters
             ht_mode = self._ht_mode
@@ -544,6 +572,11 @@
         are checked for validity (i.e. you can't specify an invalid channel
         or a frequency that will not be accepted).
 
+        According to IEEE80211ac, both HT and VHT channel width fields must
+        be used to select the desired VHT channel width. Refer to IEEE 80211ac
+        Tables (VHT Operation Information subfields) and VHT BSS operating
+        channel width.
+
         @param mode string MODE_11x defined above.
         @param channel int channel number.
         @param frequency int frequency of channel.
@@ -635,7 +668,8 @@
         self._security_config = (copy.copy(security_config) or
                                 xmlrpc_security_types.SecurityConfig())
         self._obss_interval = obss_interval
-        if vht_channel_width == self.VHT_CHANNEL_WIDTH_40:
+        if (vht_channel_width == self.VHT_CHANNEL_WIDTH_40
+                    or vht_channel_width == self.VHT_CHANNEL_WIDTH_20):
             self._vht_oper_chwidth = 0
         elif vht_channel_width == self.VHT_CHANNEL_WIDTH_80:
             self._vht_oper_chwidth = 1
@@ -697,7 +731,7 @@
         @return True iff the current mode supports the band of the channel.
 
         """
-        for freq, channel in self.CHANNEL_MAP.iteritems():
+        for freq, channel in six.iteritems(self.CHANNEL_MAP):
             if channel == value:
                 return self.supports_frequency(freq)
 
diff --git a/server/cros/network/ip_config_context_manager.py b/server/cros/network/ip_config_context_manager.py
new file mode 100644
index 0000000..f072e48
--- /dev/null
+++ b/server/cros/network/ip_config_context_manager.py
@@ -0,0 +1,89 @@
+# Lint as: python2, python3
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+
+class IpConfigContextManager(object):
+    """Allows changes to IP configs on multiple host test devices which are
+    guaranteed to be reverted when the context is exited.
+    """
+
+    def bring_interface_up(self, host, dev_name):
+        """Bring a device interface up on the host. This interface will
+        automatically be brought down when the context is exited.
+
+        @param host Host Device to bring the interface up on.
+        @param dev_name String name of the device to bring up.
+
+        """
+        clear_command = 'sudo ip link set %s down' % dev_name
+        if host in self._iface_cleanup_dict:
+            self._iface_cleanup_dict[host].append(clear_command)
+        else:
+            self._iface_cleanup_dict[host] = [clear_command]
+        host.run('sudo ip link set %s up' % dev_name)
+
+    def add_ip_route(self, host, dest_ip, iface_name, via_ip=None):
+        """Add an ip route to the device. This route will be deleted when the
+        context is exited.
+
+        @param host Host Device to assign the ip route on.
+        @param dest_ip String destination ip address of the ip route.
+        @param iface_name String The local iface to route the traffic from.
+        @param via_ip String an optional ip address to route the traffic through.
+
+        """
+        via = "via %s " % via_ip if via_ip else ""
+        clear_command = 'sudo ip route del table 255 %s %sdev %s' % (
+                dest_ip, via, iface_name)
+        if host in self._ip_route_cleanup_dict:
+            self._ip_route_cleanup_dict[host].append(clear_command)
+        else:
+            self._ip_route_cleanup_dict[host] = [clear_command]
+        host.run('sudo ip route replace table 255 %s %sdev %s' %
+                 (dest_ip, via, iface_name))
+
+    def assign_ip_addr_to_iface(self, host, ip_addr, iface_name):
+        """Assign an ip address to an interface on the host. This address will be
+        deleted when the context is exited.
+
+        @param host Host Device to assign the ip address on.
+        @param ip_addr String ip address to assign.
+        @param iface_name String The interface to assign the ip address to.
+
+        """
+        clear_command = 'sudo ip addr del %s/24 dev %s' % (ip_addr, iface_name)
+        if host in self._ip_addr_cleanup_dict:
+            self._ip_addr_cleanup_dict[host].append(clear_command)
+        else:
+            self._ip_addr_cleanup_dict[host] = [clear_command]
+        host.run('sudo ip addr replace %s/24 dev %s' % (ip_addr, iface_name))
+
+    def __init__(self):
+        """Construct an IpConfigContextManager. This class uses dictionaries to
+        store the cleanup commands that must be run on various hosts when the
+        context is exited.
+        """
+        self._iface_cleanup_dict = dict()
+        self._ip_route_cleanup_dict = dict()
+        self._ip_addr_cleanup_dict = dict()
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, exc_type, exc_value, traceback):
+        logging.info('Cleaning up ip configs from test devices.')
+        for host in self._ip_route_cleanup_dict:
+            for command in self._ip_route_cleanup_dict[host]:
+                host.run(command)
+
+        for host in self._ip_addr_cleanup_dict:
+            for command in self._ip_addr_cleanup_dict[host]:
+                host.run(command)
+
+        for host in self._iface_cleanup_dict:
+            for command in self._iface_cleanup_dict[host]:
+                host.run(command)
diff --git a/server/cros/network/iperf_runner.py b/server/cros/network/iperf_runner.py
new file mode 100644
index 0000000..6e1ed34
--- /dev/null
+++ b/server/cros/network/iperf_runner.py
@@ -0,0 +1,449 @@
+# Lint as: python2, python3
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os.path
+import logging
+import time
+import math
+import numbers
+import numpy
+from enum import IntEnum
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib.cros import path_utils
+
+
+class IperfResult(object):
+    """Logic for parsing and representing iperf results."""
+
+    @staticmethod
+    def from_iperf_output(results, config):
+        """Parse the text output of iperf and return an IperfResult.
+
+        @param results string raw results from iperf.
+        @param config IperfConfig the config for the test.
+
+        @return IperfResult result.
+
+        """
+
+        class IperfIndex(IntEnum):
+            """Defines the indices of certain values in iperf output."""
+            LOG_ID_INDEX = 5
+            INTERVAL_INDEX = 6
+            DATA_TRANSFERED_INDEX = 7
+            PERCENT_LOSS_INDEX = 12
+
+        NUM_FIELDS_IN_SERVER_OUTPUT = 14
+
+        lines = results.splitlines()
+        total_throughput = 0
+        test_durations = []
+        percent_losses = []
+        for line in lines:
+            fields = line.split(',')
+            # Negative Log ID values are used for sum total output which we
+            # don't use.
+            if float(fields[IperfIndex.LOG_ID_INDEX]) < 0:
+                continue
+            # Filter out client side logs from UDP results. We only want server
+            # side results because they reflect the amount of data that was
+            # actually received by the server. Server output has 14 fields while
+            # client output has 9 fields, so we use this to differentiate.
+            # Ideally we'd use the '-x D' option to filter the client side data,
+            # but this option makes the iperf output unreliable.
+            if config.udp and len(fields) < NUM_FIELDS_IN_SERVER_OUTPUT:
+                continue
+            total_data_bytes = float(fields[IperfIndex.DATA_TRANSFERED_INDEX])
+            test_interval = fields[IperfIndex.INTERVAL_INDEX]
+            test_start_end = test_interval.split('-')
+            duration = float(test_start_end[1]) - float(test_start_end[0])
+            test_durations.append(duration)
+            total_throughput += IperfResult._calculate_throughput(
+                    total_data_bytes, duration)
+            if (config.udp):
+                percent_losses.append(
+                        float(fields[IperfIndex.PERCENT_LOSS_INDEX]))
+
+        # We should get one line of output for each port used in the test. In
+        # rare cases, the data from one of the ports is not included in the
+        # iperf output, so discard results in these cases.
+        expected_num_output_lines = config.num_ports
+        if config.bidirectional:
+            expected_num_output_lines *= 2
+        if len(test_durations) != expected_num_output_lines:
+            logging.info(
+                    'iperf command output was missing some data, ignoring test run.'
+            )
+            return None
+
+        test_duration = math.fsum(test_durations) / len(test_durations)
+        if config.udp:
+            percent_loss = math.fsum(percent_losses) / len(percent_losses)
+        else:
+            percent_loss = None
+        return IperfResult(test_duration, total_throughput, percent_loss)
+
+    @staticmethod
+    def from_samples(samples):
+        """Build an averaged IperfResult from |samples|.
+
+        Calculate an representative sample with averaged values
+        and standard deviation of throughput from samples.
+
+        @param samples list of IperfResult objects.
+        @return IperfResult object.
+
+        """
+        if len(samples) == 0:
+            return None
+        duration_samples = [float(sample.duration) for sample in samples]
+        duration_mean = numpy.mean(duration_samples)
+
+        throughput_samples = [float(sample.throughput) for sample in samples]
+        throughput_mean = numpy.mean(throughput_samples)
+        throughput_dev = numpy.std(throughput_samples)
+
+        # For TCP connections, the packet loss is 0 by definition. In these
+        # cases, the percent_loss will be None for all samples, and UDP results
+        # should never have a percent_loss of None, so we can just check the
+        # first sample.
+        if samples[0].percent_loss == None:
+            percent_loss_mean = None
+        else:
+            percent_loss_samples = [
+                    float(sample.percent_loss) for sample in samples
+            ]
+            percent_loss_mean = numpy.mean(percent_loss_samples)
+
+        return IperfResult(duration_mean,
+                           throughput_mean,
+                           percent_loss_mean,
+                           throughput_dev=throughput_dev)
+
+    def throughput_cv_less_than_maximum(self, max_cv):
+        """Check that the throughput from this result is "accurate" enough.
+
+        We say that an IperfResult is "accurate" enough when the coefficient of
+        variance (standard deviation / mean) is below the passed in fraction.
+
+        @param fraction float maximum coefficient of variance for the
+        throughput sample.
+        @return True on above condition.
+
+        """
+        if self.throughput is None or self.throughput_dev is None:
+            return True
+
+        if not self.throughput_dev and not self.throughput:
+            # 0/0 is undefined, but take this to be good for our purposes.
+            return True
+
+        if self.throughput_dev and not self.throughput:
+            # Deviation is non-zero, but the average is 0.  Deviation
+            # as a fraction of the self.throughput is undefined but in theory
+            # a "very large number."
+            return False
+
+        if self.throughput_dev / self.throughput > max_cv:
+            return False
+
+        return True
+
+    @staticmethod
+    def _calculate_throughput(total_data, duration):
+        """Calculate the throughput from the total bytes transeferred and the
+        duration of the test.
+
+        @param total_data int The number of bytes transferred during the test.
+        @param duration float The duration of the test in seconds.
+
+        @return float The throughput of the test in Mbps.
+        """
+        if duration == 0:
+            return 0
+        total_bits = total_data * 8
+        bits_per_second = total_bits / duration
+        return bits_per_second / 1000000
+
+    def __init__(self, duration, throughput, percent_loss,
+                 throughput_dev=None):
+        """Construct an IperfResult.
+
+        @param duration float how long the test took in seconds.
+        @param throughput float test throughput in Mbps.
+        @param percent_loss float percentage of packets lost in UDP transfer.
+        @param throughput_dev standard deviation of throughputs.
+        """
+        self.duration = duration
+        self.throughput = throughput
+        self.percent_loss = percent_loss
+        self.throughput_dev = throughput_dev
+
+    def get_keyval(self, prefix=''):
+        ret = {}
+        if prefix:
+            prefix = prefix + '_'
+        if self.throughput_dev is None:
+            margin = ''
+        else:
+            margin = '+-%0.2f' % self.throughput_dev
+        if self.throughput is not None:
+            ret[prefix + 'throughput'] = '%0.2f%s' % (self.throughput, margin)
+        return ret
+
+    def __repr__(self):
+        fields = []
+        fields += [
+                '%s=%0.2f' % item for item in list(vars(self).items())
+                if item[1] is not None and isinstance(item[1], numbers.Number)
+        ]
+        return '%s(%s)' % (self.__class__.__name__, ', '.join(fields))
+
+
+class IperfConfig(object):
+    """ Defines the configuration for an iperf run. """
+    DEFAULT_TEST_TIME = 10
+    DEFAULT_MAX_BANDWIDTH = '10000M'
+    DEFAULT_NUM_PORTS = 4
+
+    IPERF_TEST_TYPE_TCP_TX = 'tcp_tx'
+    IPERF_TEST_TYPE_TCP_RX = 'tcp_rx'
+    IPERF_TEST_TYPE_TCP_BIDIRECTIONAL = 'tcp_bidirectional'
+    IPERF_TEST_TYPE_UDP_TX = 'udp_tx'
+    IPERF_TEST_TYPE_UDP_RX = 'udp_rx'
+    IPERF_TEST_TYPE_UDP_BIDIRECTIONAL = 'udp_bidirectional'
+
+    def __init__(self,
+                 test_type,
+                 max_bandwidth=DEFAULT_MAX_BANDWIDTH,
+                 test_time=DEFAULT_TEST_TIME,
+                 num_ports=DEFAULT_NUM_PORTS):
+        """ Construct an IperfConfig.
+
+        @param test_type string, PerfTestTypes test type.
+        @param max_bandwidth string maximum bandwidth to be used during the test
+        e.x. 100M (100 Mbps).
+        @param test_time int number of seconds to run the test for.
+        @param num_ports int number of ports use in the test.
+        """
+
+        if test_type == IperfConfig.IPERF_TEST_TYPE_TCP_TX:
+            self.udp = False
+            self.bidirectional = False
+        elif test_type == IperfConfig.IPERF_TEST_TYPE_TCP_RX:
+            self.udp = False
+            self.bidirectional = False
+        elif test_type == IperfConfig.IPERF_TEST_TYPE_TCP_BIDIRECTIONAL:
+            self.udp = False
+            self.bidirectional = True
+        elif test_type == IperfConfig.IPERF_TEST_TYPE_UDP_TX:
+            self.udp = True
+            self.bidirectional = False
+        elif test_type == IperfConfig.IPERF_TEST_TYPE_UDP_RX:
+            self.udp = True
+            self.bidirectional = False
+        elif test_type == IperfConfig.IPERF_TEST_TYPE_UDP_BIDIRECTIONAL:
+            self.udp = True
+            self.bidirectional = True
+        else:
+            raise error.TestFail(
+                    'Test type %s is not supported by iperf_runner.' %
+                    test_type)
+        self.max_bandwidth = max_bandwidth
+        self.test_time = test_time
+        self.num_ports = num_ports
+        self.test_type = test_type
+
+
+class IperfRunner(object):
+    """Delegate to run iperf on a client/server pair."""
+
+    DEFAULT_TEST_TIME = 10
+    IPERF_SERVER_MAX_STARTUP_WAIT_TIME = 11
+    IPERF_CLIENT_TURNDOWN_WAIT_TIME = 1
+    IPERF_COMMAND_TIMEOUT_MARGIN = 20
+
+    def __init__(
+            self,
+            client_proxy,
+            server_proxy,
+            config,
+            client_interface=None,
+            server_interface=None,
+    ):
+        """Construct an IperfRunner. Use the IP addresses of the passed
+        interfaces if they are provided. Otherwise, attempt to use the WiFi
+        interface on the devices.
+
+        @param client LinuxSystem object.
+        @param server LinuxSystem object.
+        @param client_interface Interface object.
+        @param server_interface Interface object.
+
+        """
+        self._client_proxy = client_proxy
+        self._server_proxy = server_proxy
+        self._server_host = server_proxy.host
+        self._client_host = client_proxy.host
+        if server_interface:
+            self._server_ip = server_interface.ipv4_address
+        # If a server interface was not explicitly provided, attempt to use
+        # the WiFi IP of the device.
+        else:
+            try:
+                self._server_ip = server_proxy.wifi_ip
+            except:
+                raise error.TestFail('Server device has no WiFi IP address, '\
+                    'and no alternate interface was specified.')
+
+        if client_interface:
+            self._client_ip = client_interface.ipv4_address
+        # If a client interface was not explicitly provided, use the WiFi IP
+        # address of the WiFiClient device.
+        else:
+            try:
+                self._client_ip = client_proxy.wifi_ip
+            except:
+                raise error.TestFail('Client device has no WiFi IP address, '\
+                    'and no alternate interface was specified.')
+
+        # Assume minijail0 is on ${PATH}, but raise exception if it's not
+        # available on both server and client.
+        self._minijail = 'minijail0'
+        path_utils.must_be_installed(self._minijail, host=self._server_host)
+        path_utils.must_be_installed(self._minijail, host=self._client_host)
+        # Bind mount a tmpfs over /tmp, since netserver hard-codes the /tmp
+        # path. netserver's log files aren't useful anyway.
+        self._minijail = ("%s -v -k 'tmpfs,/tmp,tmpfs,"
+                          "MS_NODEV|MS_NOEXEC|MS_NOSUID,mode=755,size=10M'" %
+                          self._minijail)
+
+        self._config = config
+        self._command_iperf_server = path_utils.must_be_installed(
+                'iperf', host=self._server_host)
+        self._command_iperf_client = path_utils.must_be_installed(
+                'iperf', host=self._client_host)
+        self._udp_flag = '-u' if config.udp else ''
+        self._bidirectional_flag = '-d' if config.bidirectional else ''
+
+    def __enter__(self):
+        self._restart_iperf_server()
+        return self
+
+    def __exit__(self, exc_type, exc_value, traceback):
+        self._client_proxy.firewall_cleanup()
+        self._server_proxy.firewall_cleanup()
+        self._kill_iperf_server()
+        self._kill_iperf_client()
+
+    def _kill_iperf_client(self):
+        """Kills any existing iperf process on the client."""
+        self._client_host.run('pkill -9 %s' %
+                              os.path.basename(self._command_iperf_client),
+                              ignore_status=True)
+
+    def _kill_iperf_server(self):
+        """Kills any existing iperf process on the serving host."""
+        self._server_host.run('pkill -9 %s' %
+                              os.path.basename(self._command_iperf_server),
+                              ignore_status=True)
+
+    def _restart_iperf_server(self):
+        """Start an iperf server on the server device. Also opens up firewalls
+        on the test devices.
+        """
+        logging.info('Starting iperf server...')
+        self._kill_iperf_server()
+        logging.debug('iperf server invocation: %s %s -s -B %s -D %s -w 320k',
+                      self._minijail, self._command_iperf_server,
+                      self._server_ip, self._udp_flag)
+        devnull = open(os.devnull, "w")
+        # 320kB is the maximum socket buffer size on Gale (default is 208kB).
+        self._server_host.run('%s %s -s -B %s -D %s -w 320k' %
+                              (self._minijail, self._command_iperf_server,
+                               self._server_ip, self._udp_flag),
+                              stderr_tee=devnull)
+        startup_time = time.time()
+        # Ensure the endpoints aren't firewalled.
+        protocol = 'udp' if self._config.udp else 'tcp'
+        self._client_proxy.firewall_open(protocol, self._server_ip)
+        self._server_proxy.firewall_open(protocol, self._client_ip)
+
+        # Run a client iperf test. The client will attempt to connect to the
+        # server for 10 seconds, but will exit early if it succeeds before
+        # that. This ensures that the server has had suffiecient time to come
+        # online before we begin the tests. We don't fail on timeout here
+        # because the logic for failed connections is contained in run().
+        iperf_test = '%s -c %s -B %s -t 1 %s' % (
+                self._command_iperf_client, self._server_ip, self._client_ip,
+                self._udp_flag)
+        result = self._client_host.run(
+                iperf_test,
+                ignore_status=True,
+                ignore_timeout=True,
+                timeout=self.IPERF_SERVER_MAX_STARTUP_WAIT_TIME)
+        if not result or result.exit_status:
+            logging.debug(
+                    'Failed to make a connection to the server in %s seconds.',
+                    self.IPERF_SERVER_MAX_STARTUP_WAIT_TIME)
+        else:
+            logging.debug('Successfully made a connection to the server.')
+        # TODO(b:198343041) When iperf2 clients are run too quickly back to
+        # back, the server is unable to distinguish between them. Wait briefly
+        # to allow the server to reset.
+        time.sleep(self.IPERF_CLIENT_TURNDOWN_WAIT_TIME)
+
+    def run(self, ignore_failures=False, retry_count=3):
+        """Run iperf and take a performance measurement.
+
+        @param ignore_failures bool True iff iperf runs that fail should be
+                ignored.  If this happens, run will return a None value rather
+                than an IperfResult.
+        @param retry_count int number of times to retry the iperf command if
+                it fails due to an internal timeout within iperf.
+        @return IperfResult summarizing an iperf run.
+
+        """
+        iperf_client = '%s -c %s -B %s -b %s -x C -y c -P 4 -t %s %s %s' % (
+                self._command_iperf_client, self._server_ip, self._client_ip,
+                self._config.max_bandwidth, self._config.test_time,
+                self._udp_flag, self._bidirectional_flag)
+
+        logging.info('Running iperf client for %d seconds.',
+                     self._config.test_time)
+        logging.debug('iperf client invocation: %s', iperf_client)
+        timeout = self._config.test_time + self.IPERF_COMMAND_TIMEOUT_MARGIN
+
+        for _ in range(retry_count):
+            result = self._client_host.run(iperf_client,
+                                           ignore_status=True,
+                                           ignore_timeout=ignore_failures,
+                                           timeout=timeout)
+            if not result:
+                logging.info('Retrying iperf after empty result.')
+                continue
+
+            # Exit retry loop on success.
+            if not result.exit_status:
+                break
+
+            # We are in an unhandled error case.
+            logging.info('Retrying iperf after an unknown error.')
+
+        if ignore_failures and (result is None or result.exit_status):
+            return None
+
+        if result is None:
+            raise error.TestFail("No results; cmd: %s", iperf_client)
+
+        if result.exit_status:
+            raise error.CmdError(iperf_client, result,
+                                 "Command returned non-zero exit status")
+        # TODO(b:198343041) When iperf2 clients are run too quickly back to
+        # back, the server is unable to distinguish between them. Wait briefly
+        # to allow the server to reset.
+        time.sleep(self.IPERF_CLIENT_TURNDOWN_WAIT_TIME)
+        return IperfResult.from_iperf_output(result.stdout, self._config)
diff --git a/server/cros/network/iperf_session.py b/server/cros/network/iperf_session.py
new file mode 100644
index 0000000..8fc44ab
--- /dev/null
+++ b/server/cros/network/iperf_session.py
@@ -0,0 +1,80 @@
+# Lint as: python2, python3
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from autotest_lib.server.cros.network import iperf_runner
+
+
+class IperfSession(object):
+    """Runs iperf tests and reports average results."""
+
+    MEASUREMENT_MAX_SAMPLES = 10
+    MEASUREMENT_MAX_FAILURES = 2
+    MEASUREMENT_MIN_SAMPLES = 3
+    MAX_THROUGHPUT_CV = 0.03
+
+    def __init__(self,
+                 client_proxy,
+                 server_proxy,
+                 client_interface=None,
+                 server_interface=None,
+                 ignore_failures=False):
+        """Construct an IperfSession.
+
+        @param client_proxy: LinuxSystem object.
+        @param server_proxy: LinuxSystem object.
+        @param client_interface Interface object.
+        @param server_interface Interface object.
+
+        """
+        self._client_proxy = client_proxy
+        self._server_proxy = server_proxy
+        self._client_interface = client_interface
+        self._server_interface = server_interface
+        self._ignore_failures = ignore_failures
+
+    def run(self, config):
+        """Run multiple iperf tests and take the average performance values.
+
+        @param config IperfConfig.
+
+        """
+
+        logging.info('Performing %s measurements in iperf session.',
+                     config.test_type)
+        history = []
+        failure_count = 0
+        final_result = None
+        with iperf_runner.IperfRunner(self._client_proxy, self._server_proxy,
+                                      config, self._client_interface,
+                                      self._server_interface) as runner:
+            while len(history) + failure_count < self.MEASUREMENT_MAX_SAMPLES:
+                result = runner.run(ignore_failures=self._ignore_failures)
+                if result is None:
+                    failure_count += 1
+                    # Might occur when, e.g., signal strength is too low.
+                    if failure_count > self.MEASUREMENT_MAX_FAILURES:
+                        logging.error('Too many failures (%d), aborting',
+                                      failure_count)
+                        break
+                    continue
+                logging.info('Took iperf %s Measurement: %r', config.test_type,
+                             result)
+
+                history.append(result)
+                if len(history) < self.MEASUREMENT_MIN_SAMPLES:
+                    continue
+
+                final_result = iperf_runner.IperfResult.from_samples(history)
+                if final_result.throughput_cv_less_than_maximum(
+                        self.MAX_THROUGHPUT_CV):
+                    break
+
+        if final_result is None:
+            final_result = iperf_runner.IperfResult.from_samples(history)
+        logging.info('Took averaged measurement from %s iperf %s runs: %r.',
+                     len(history), config.test_type, final_result)
+        return history or None
diff --git a/server/cros/network/netperf_runner.py b/server/cros/network/netperf_runner.py
index c8cd7fd..0edda86 100644
--- a/server/cros/network/netperf_runner.py
+++ b/server/cros/network/netperf_runner.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -10,6 +11,8 @@
 import time
 import os.path
 
+import six
+
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib.cros import path_utils
 
@@ -92,6 +95,19 @@
 
             result = NetperfResult(test_type, duration_seconds,
                                    transaction_rate=float(lines[0].split()[5]))
+        elif test_type in NetperfConfig.BIDIRECTIONAL_TESTS:
+            """Parses the following which works for both bidirectional (TCP and UDP)
+            tests and returns the sum of the two throughputs.
+            46.92
+            58.35
+            """
+            if len(lines) < 2:
+                return None
+
+            result = NetperfResult(test_type,
+                                   duration_seconds,
+                                   throughput=float(lines[0]) +
+                                   float(lines[1]))
         else:
             raise error.TestFail('Invalid netperf test type: %r.' % test_type)
 
@@ -101,15 +117,15 @@
 
     @staticmethod
     def _get_stats(samples, field_name):
-        if any(map(lambda x: getattr(x, field_name) is None, samples)):
+        if any([getattr(x, field_name) is None for x in samples]):
             return (None, None)
 
-        values = map(lambda x: getattr(x, field_name), samples)
+        values = [getattr(x, field_name) for x in samples]
         N = len(samples)
         mean = math.fsum(values) / N
         deviation = None
         if N > 1:
-            differences = map(lambda x: math.pow(mean - x, 2), values)
+            differences = [math.pow(mean - x, 2) for x in values]
             deviation = math.sqrt(math.fsum(differences) / (N - 1))
         return mean, deviation
 
@@ -184,10 +200,10 @@
 
     def __repr__(self):
         fields = ['test_type=%s' % self.test_type]
-        fields += ['%s=%0.2f' % item
-                   for item in vars(self).iteritems()
-                   if item[1] is not None
-                   and isinstance(item[1], numbers.Number)]
+        fields += [
+                '%s=%0.2f' % item for item in six.iteritems(vars(self))
+                if item[1] is not None and isinstance(item[1], numbers.Number)
+        ]
         return '%s(%s)' % (self.__class__.__name__, ', '.join(fields))
 
 
@@ -318,10 +334,10 @@
                   'error_max': self.errors_bounds.upper,
                   'transaction_rate_min': self.transaction_rate_bounds.lower,
                   'transaction_rate_max': self.transaction_rate_bounds.upper}
-        return '%s(%s)' % (self.__class__.__name__,
-                           ', '.join(['%s=%r' % item
-                                      for item in fields.iteritems()
-                                      if item[1] is not None]))
+        return '%s(%s)' % (self.__class__.__name__, ', '.join([
+                '%s=%r' % item
+                for item in six.iteritems(fields) if item[1] is not None
+        ]))
 
 
 class NetperfConfig(object):
@@ -357,6 +373,8 @@
     # server to the DUT by running the netperf server on the DUT and the
     # client on the server and then doing a UDP_STREAM test.
     TEST_TYPE_UDP_MAERTS = 'UDP_MAERTS'
+    TEST_TYPE_TCP_BIDIRECTIONAL = 'TCP'
+    TEST_TYPE_UDP_BIDIRECTIONAL = 'UDP'
     # Different kinds of tests have different output formats.
     REQUEST_RESPONSE_TESTS = [ TEST_TYPE_TCP_CRR,
                                TEST_TYPE_TCP_RR,
@@ -366,24 +384,35 @@
                          TEST_TYPE_TCP_STREAM ]
     UDP_STREAM_TESTS = [ TEST_TYPE_UDP_STREAM,
                          TEST_TYPE_UDP_MAERTS ]
+    BIDIRECTIONAL_TESTS = [
+            TEST_TYPE_TCP_BIDIRECTIONAL, TEST_TYPE_UDP_BIDIRECTIONAL
+    ]
 
-    SHORT_TAGS = { TEST_TYPE_TCP_CRR: 'tcp_crr',
-                   TEST_TYPE_TCP_MAERTS: 'tcp_rx',
-                   TEST_TYPE_TCP_RR: 'tcp_rr',
-                   TEST_TYPE_TCP_SENDFILE: 'tcp_stx',
-                   TEST_TYPE_TCP_STREAM: 'tcp_tx',
-                   TEST_TYPE_UDP_RR: 'udp_rr',
-                   TEST_TYPE_UDP_STREAM: 'udp_tx',
-                   TEST_TYPE_UDP_MAERTS: 'udp_rx' }
+    SHORT_TAGS = {
+            TEST_TYPE_TCP_CRR: 'tcp_crr',
+            TEST_TYPE_TCP_MAERTS: 'tcp_rx',
+            TEST_TYPE_TCP_RR: 'tcp_rr',
+            TEST_TYPE_TCP_SENDFILE: 'tcp_stx',
+            TEST_TYPE_TCP_STREAM: 'tcp_tx',
+            TEST_TYPE_UDP_RR: 'udp_rr',
+            TEST_TYPE_UDP_STREAM: 'udp_tx',
+            TEST_TYPE_UDP_MAERTS: 'udp_rx',
+            TEST_TYPE_TCP_BIDIRECTIONAL: 'tcp_tx_rx',
+            TEST_TYPE_UDP_BIDIRECTIONAL: 'udp_tx_rx'
+    }
 
-    READABLE_TAGS = { TEST_TYPE_TCP_CRR: 'tcp_connect_roundtrip_rate',
-                      TEST_TYPE_TCP_MAERTS: 'tcp_downstream',
-                      TEST_TYPE_TCP_RR: 'tcp_roundtrip_rate',
-                      TEST_TYPE_TCP_SENDFILE: 'tcp_upstream_sendfile',
-                      TEST_TYPE_TCP_STREAM: 'tcp_upstream',
-                      TEST_TYPE_UDP_RR: 'udp_roundtrip',
-                      TEST_TYPE_UDP_STREAM: 'udp_upstream',
-                      TEST_TYPE_UDP_MAERTS: 'udp_downstream' }
+    READABLE_TAGS = {
+            TEST_TYPE_TCP_CRR: 'tcp_connect_roundtrip_rate',
+            TEST_TYPE_TCP_MAERTS: 'tcp_downstream',
+            TEST_TYPE_TCP_RR: 'tcp_roundtrip_rate',
+            TEST_TYPE_TCP_SENDFILE: 'tcp_upstream_sendfile',
+            TEST_TYPE_TCP_STREAM: 'tcp_upstream',
+            TEST_TYPE_UDP_RR: 'udp_roundtrip',
+            TEST_TYPE_UDP_STREAM: 'udp_upstream',
+            TEST_TYPE_UDP_MAERTS: 'udp_downstream',
+            TEST_TYPE_TCP_BIDIRECTIONAL: 'tcp_upstream_downstream',
+            TEST_TYPE_UDP_BIDIRECTIONAL: 'udp_upstream_downstream'
+    }
 
 
     @staticmethod
@@ -393,9 +422,10 @@
         @param test_type string test type.
 
         """
-        if (test_type not in NetperfConfig.REQUEST_RESPONSE_TESTS and
-            test_type not in NetperfConfig.TCP_STREAM_TESTS and
-            test_type not in NetperfConfig.UDP_STREAM_TESTS):
+        if (test_type not in NetperfConfig.REQUEST_RESPONSE_TESTS
+                    and test_type not in NetperfConfig.TCP_STREAM_TESTS
+                    and test_type not in NetperfConfig.UDP_STREAM_TESTS
+                    and test_type not in NetperfConfig.BIDIRECTIONAL_TESTS):
             raise error.TestFail('Invalid netperf test type: %r.' % test_type)
 
 
@@ -434,6 +464,10 @@
 
         return self.test_type
 
+    @property
+    def test_type_name(self):
+        """@return string test type name."""
+        return self.test_type
 
     @property
     def server_serves(self):
@@ -481,23 +515,53 @@
     NETPERF_COMMAND_TIMEOUT_MARGIN = 60
 
 
-    def __init__(self, client_proxy, server_proxy, config):
-        """Construct a NetperfRunner.
+    def __init__(self,
+                 client_proxy,
+                 server_proxy,
+                 config,
+                 client_interface=None,
+                 server_interface=None):
+        """Construct a NetperfRunner. Use the IP addresses of the passed interfaces
+        if they are provided. Otherwise, attempt to use the WiFi interface on the devices.
 
         @param client WiFiClient object.
         @param server LinuxSystem object.
+        @param client_interface Interface object.
+        @param server_interface Interface object.
 
         """
         self._client_proxy = client_proxy
         self._server_proxy = server_proxy
+        if server_interface:
+            self._server_ip = server_interface.ipv4_address
+        # If a server interface was not explicitly provided, attempt to use
+        # the WiFi IP of the device.
+        else:
+            try:
+                self._server_ip = server_proxy.wifi_ip
+            except:
+                raise error.TestFail(
+                        'Server device has no WiFi IP address, '
+                        'and no alternate interface was specified.')
+
+        if client_interface:
+            self._client_ip = client_interface.ipv4_address
+        # If a client interface was not explicitly provided, use the WiFi IP
+        # address of the WiFiClient device.
+        else:
+            self._client_ip = client_proxy.wifi_ip
+
         if config.server_serves:
             self._server_host = server_proxy.host
             self._client_host = client_proxy.host
-            self._target_ip = server_proxy.wifi_ip
+            self._target_ip = self._server_ip
+            self._source_ip = self._client_ip
+
         else:
             self._server_host = client_proxy.host
             self._client_host = server_proxy.host
-            self._target_ip = client_proxy.wifi_ip
+            self._target_ip = self._client_ip
+            self._source_ip = self._server_ip
 
         # Assume minijail0 is on ${PATH}, but raise exception if it's not
         # available on both server and client.
@@ -541,8 +605,8 @@
                               (self._minijail, self._command_netserv,
                                self.NETPERF_PORT))
         startup_time = time.time()
-        self._client_proxy.firewall_open('tcp', self._server_proxy.wifi_ip)
-        self._client_proxy.firewall_open('udp', self._server_proxy.wifi_ip)
+        self._client_proxy.firewall_open('tcp', self._server_ip)
+        self._client_proxy.firewall_open('udp', self._server_ip)
         # Wait for the netserv to come up.
         while time.time() - startup_time < self.NETSERV_STARTUP_WAIT_TIME:
             time.sleep(0.1)
@@ -559,19 +623,25 @@
         @return NetperfResult summarizing a netperf run.
 
         """
-        netperf = '%s -H %s -p %s -t %s -l %d -- -P 0,%d' % (
-                self._command_netperf,
-                self._target_ip,
-                self.NETPERF_PORT,
-                self._config.netperf_test_type,
-                self._config.test_time,
-                self.NETPERF_DATA_PORT)
+        if self._config.netperf_test_type in NetperfConfig.BIDIRECTIONAL_TESTS:
+            netperf = 'for i in 1; do %s -H %s -t omni -T %s -l %d -L %s -P 0 -- -R 1 -d stream -s 256K -S 256K -o throughput & %s -H %s -t omni -T %s -l %d -P 0 -L %s -- -R 1 -d maerts -s 256K -S 256K -o throughput; done' % (
+                    self._command_netperf, self._target_ip,
+                    self._config.netperf_test_type, self._config.test_time,
+                    self._source_ip, self._command_netperf, self._target_ip,
+                    self._config.netperf_test_type, self._config.test_time,
+                    self._source_ip)
+        else:
+            netperf = '%s -H %s -p %s -t %s -l %d -L %s -- -P 0,%d -R 1' % (
+                    self._command_netperf, self._target_ip, self.NETPERF_PORT,
+                    self._config.netperf_test_type, self._config.test_time,
+                    self._source_ip, self.NETPERF_DATA_PORT)
         logging.debug('Running netperf client.')
         logging.info('Running netperf for %d seconds.', self._config.test_time)
         timeout = self._config.test_time + self.NETPERF_COMMAND_TIMEOUT_MARGIN
         for _ in range(retry_count):
             start_time = time.time()
-            result = self._client_host.run(netperf, ignore_status=True,
+            result = self._client_host.run(netperf,
+                                           ignore_status=True,
                                            ignore_timeout=ignore_failures,
                                            timeout=timeout)
             if not result:
diff --git a/server/cros/network/netperf_session.py b/server/cros/network/netperf_session.py
index f2c3a8f..22ef578 100644
--- a/server/cros/network/netperf_session.py
+++ b/server/cros/network/netperf_session.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -31,15 +32,24 @@
         return netperf_runner.NetperfResult.from_samples(samples)
 
 
-    def __init__(self, client_proxy, server_proxy, ignore_failures=False):
+    def __init__(self,
+                 client_proxy,
+                 server_proxy,
+                 client_interface=None,
+                 server_interface=None,
+                 ignore_failures=False):
         """Construct a NetperfSession.
 
         @param client_proxy: WiFiClient object.
         @param server_proxy: LinuxSystem object.
+        @param client_interface Interface object.
+        @param server_interface Interface object.
 
         """
         self._client_proxy = client_proxy
         self._server_proxy = server_proxy
+        self._client_interface = client_interface
+        self._server_interface = server_interface
         self._ignore_failures = ignore_failures
 
 
@@ -70,8 +80,10 @@
         config = netperf_runner.NetperfConfig(
                 test_type, test_time=self.WARMUP_SAMPLE_TIME_SECONDS)
         warmup_history = []
-        with netperf_runner.NetperfRunner(
-                self._client_proxy, self._server_proxy, config) as runner:
+        with netperf_runner.NetperfRunner(self._client_proxy,
+                                          self._server_proxy, config,
+                                          self._client_interface,
+                                          self._server_interface) as runner:
             while len(warmup_history) < self.WARMUP_MAX_SAMPLES:
                 warmup_history.append(runner.run())
                 if len(warmup_history) > 2 * self.WARMUP_WINDOW_SIZE:
@@ -107,8 +119,10 @@
         history = []
         none_count = 0
         final_result = None
-        with netperf_runner.NetperfRunner(
-                self._client_proxy, self._server_proxy, config) as runner:
+        with netperf_runner.NetperfRunner(self._client_proxy,
+                                          self._server_proxy, config,
+                                          self._client_interface,
+                                          self._server_interface) as runner:
             while len(history) + none_count < self.MEASUREMENT_MAX_SAMPLES:
                 result = runner.run(ignore_failures=self._ignore_failures)
                 if result is None:
diff --git a/server/cros/network/packet_capturer.py b/server/cros/network/packet_capturer.py
index eda42be..8ad986d 100644
--- a/server/cros/network/packet_capturer.py
+++ b/server/cros/network/packet_capturer.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/cros/network/perf_test_manager.py b/server/cros/network/perf_test_manager.py
new file mode 100644
index 0000000..7c76d85
--- /dev/null
+++ b/server/cros/network/perf_test_manager.py
@@ -0,0 +1,180 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.server.cros.network import iperf_runner
+from autotest_lib.server.cros.network import iperf_session
+from autotest_lib.server.cros.network import netperf_runner
+from autotest_lib.server.cros.network import netperf_session
+
+
+class PerfTestTypes(object):
+    """These are the different performance test types that are supported by
+    autotest. The are defined from perspective of the Device Under Test, so for
+    example 'tcp_rx' refers to a performance test of data transfer from a remote
+    server to the DUT using the TCP protocol.
+    """
+    TEST_TYPE_TCP_TX = 'tcp_tx'
+    TEST_TYPE_TCP_RX = 'tcp_rx'
+    TEST_TYPE_TCP_BIDIRECTIONAL = 'tcp_bidirectional'
+    TEST_TYPE_UDP_TX = 'udp_tx'
+    TEST_TYPE_UDP_RX = 'udp_rx'
+    TEST_TYPE_UDP_BIDIRECTIONAL = 'udp_bidirectional'
+
+
+class PerfTestManager(object):
+    """Manager for Performance tests. This class provides a unified API to allow
+    callers run performance tests using the supported tools.
+    """
+
+    # TODO(b:195574472): Add support for iperf in this class.
+
+    DEFAULT_TEST_TIME = 10
+
+    def __init__(self, use_iperf):
+        """Construct a PerfTestManager.
+
+        TODO(b:198343041) iperf2 bidirectional tests are unreliable, so we
+        always use netperf for bidirectional tests.
+
+        @param bool use_iperf True if the tests should use iperf, false if the
+        tests should use netperf.
+        """
+        self._use_iperf = use_iperf
+
+    def get_config(self, test_type, test_time=DEFAULT_TEST_TIME):
+        """Get a config object for a performance tests based on the test
+        type and other parameters. Will return either a NetperfConfig or
+        IperfConfig based on the use_iperf value of the class.
+
+        @param test_type string, test type from performance_test_types.
+        @param test_time int number of seconds to run the test for.
+
+        @return NetperfConfig or IperfConfig object.
+        """
+        # (b:198343041): Always use netperf for bidirectional tests.
+        if self._use_iperf and test_type not in [
+                PerfTestTypes.TEST_TYPE_TCP_BIDIRECTIONAL,
+                PerfTestTypes.TEST_TYPE_UDP_BIDIRECTIONAL
+        ]:
+            return iperf_runner.IperfConfig(
+                    self._iperf_type_from_perf_type(test_type))
+        return netperf_runner.NetperfConfig(
+                self._netperf_type_from_perf_type(test_type),
+                test_time=test_time)
+
+    def get_session(self,
+                    test_type,
+                    test_device_proxy,
+                    peer_device_proxy,
+                    test_device_interface=None,
+                    peer_device_interface=None,
+                    ignore_failures=False):
+        """Get a Session object for a set of performance tests. Will return
+        either a NetperfSession or IperfSession based on the use_iperf value of
+        the class.
+
+        @param test_device_proxy: WiFiClient object for the device-under-test.
+        @param peer_device_proxy: LinuxSystem object for the performance testing
+        peer of the DUT.
+        @param test_device_interface Interface object for the test device.
+        @param peer_device_interface Interface object for the peer device.
+
+        @return NetperfSession object.
+        """
+        # (b:198343041) Always use netperf for bidirectional tests.
+        if self._use_iperf and test_type not in [
+                PerfTestTypes.TEST_TYPE_TCP_BIDIRECTIONAL,
+                PerfTestTypes.TEST_TYPE_UDP_BIDIRECTIONAL
+        ]:
+            if test_type in [
+                    PerfTestTypes.TEST_TYPE_TCP_TX,
+                    PerfTestTypes.TEST_TYPE_UDP_TX
+            ]:
+                return iperf_session.IperfSession(
+                        test_device_proxy,
+                        peer_device_proxy,
+                        client_interface=test_device_interface,
+                        server_interface=peer_device_interface,
+                        ignore_failures=ignore_failures)
+            if test_type in [
+                    PerfTestTypes.TEST_TYPE_TCP_RX,
+                    PerfTestTypes.TEST_TYPE_UDP_RX
+            ]:
+                return iperf_session.IperfSession(
+                        peer_device_proxy,
+                        test_device_proxy,
+                        client_interface=peer_device_interface,
+                        server_interface=test_device_interface,
+                        ignore_failures=ignore_failures)
+
+            raise error.TestFail(
+                    'Test type %s is not supported by this test.' % test_type)
+
+        return netperf_session.NetperfSession(
+                test_device_proxy,
+                peer_device_proxy,
+                client_interface=test_device_interface,
+                server_interface=peer_device_interface,
+                ignore_failures=ignore_failures)
+
+    def get_result(self, results):
+        """Get a single performance result from a list of results.
+
+        @param results list of IperfResults or NetperfResults.
+
+        @return a single IperfResult or NetperfResult which represents the
+        distribution of results.
+        """
+        # All the results will be of the same type, so we can safely check the
+        # first result only.
+        if isinstance(results[0], iperf_runner.IperfResult):
+            return iperf_runner.IperfResult.from_samples(results)
+        if isinstance(results[0], netperf_runner.NetperfResult):
+            return netperf_runner.NetperfResult.from_samples(results)
+        raise error.TestFail('Invalid test result type: %s' % type(results))
+
+    def _netperf_type_from_perf_type(self, test_type):
+        """Convert a performance test type to a netperf test type.
+
+        @param test_type string, test type from PerfTestTypes.
+
+        @return string netperf test type that corresponds to the generic test type.
+        """
+        if test_type == PerfTestTypes.TEST_TYPE_TCP_TX:
+            return netperf_runner.NetperfConfig.TEST_TYPE_TCP_STREAM
+        elif test_type == PerfTestTypes.TEST_TYPE_TCP_RX:
+            return netperf_runner.NetperfConfig.TEST_TYPE_TCP_MAERTS
+        elif test_type == PerfTestTypes.TEST_TYPE_TCP_BIDIRECTIONAL:
+            return netperf_runner.NetperfConfig.TEST_TYPE_TCP_BIDIRECTIONAL
+        elif test_type == PerfTestTypes.TEST_TYPE_UDP_TX:
+            return netperf_runner.NetperfConfig.TEST_TYPE_UDP_STREAM
+        elif test_type == PerfTestTypes.TEST_TYPE_UDP_RX:
+            return netperf_runner.NetperfConfig.TEST_TYPE_UDP_MAERTS
+        elif test_type == PerfTestTypes.TEST_TYPE_UDP_BIDIRECTIONAL:
+            return netperf_runner.NetperfConfig.TEST_TYPE_UDP_BIDIRECTIONAL
+        raise error.TestFail(
+                'Test type %s is not supported by netperf_runner.' % test_type)
+
+    def _iperf_type_from_perf_type(self, test_type):
+        """Convert a performance test type to an iperf test type
+
+        @param test_type string, test type from PerfTestTypes.
+
+        @return string iperf test type that corresponds to the generic test type.
+        """
+        if test_type == PerfTestTypes.TEST_TYPE_TCP_TX:
+            return iperf_runner.IperfConfig.IPERF_TEST_TYPE_TCP_TX
+        elif test_type == PerfTestTypes.TEST_TYPE_TCP_RX:
+            return iperf_runner.IperfConfig.IPERF_TEST_TYPE_TCP_RX
+        elif test_type == PerfTestTypes.TEST_TYPE_TCP_BIDIRECTIONAL:
+            return iperf_runner.IperfConfig.IPERF_TEST_TYPE_TCP_BIDIRECTIONAL
+        elif test_type == PerfTestTypes.TEST_TYPE_UDP_TX:
+            return iperf_runner.IperfConfig.IPERF_TEST_TYPE_UDP_TX
+        elif test_type == PerfTestTypes.TEST_TYPE_UDP_RX:
+            return iperf_runner.IperfConfig.IPERF_TEST_TYPE_UDP_RX
+        elif test_type == PerfTestTypes.TEST_TYPE_UDP_BIDIRECTIONAL:
+            return iperf_runner.IperfConfig.IPERF_TEST_TYPE_UDP_BIDIRECTIONAL
+        raise error.TestFail(
+                'Test type %s is not supported by netperf_runner.' % test_type)
diff --git a/server/cros/network/rf_switch/rf_switch.py b/server/cros/network/rf_switch/rf_switch.py
index a0b4384..d8cd5be 100644
--- a/server/cros/network/rf_switch/rf_switch.py
+++ b/server/cros/network/rf_switch/rf_switch.py
@@ -221,7 +221,7 @@
 
         """
         attenuations = []
-        for x in xrange(self._MIN_ENCLOSURE, self._MAX_ENCLOSURE + 1):
+        for x in range(self._MIN_ENCLOSURE, self._MAX_ENCLOSURE + 1):
             attenuations.append(self.get_attenuation(x))
         return tuple(attenuations)
 
@@ -236,7 +236,7 @@
         """
         if ap_enclosure == self._ALL_ENCLOSURE:
             # set attenuation on all
-            for x in xrange(self._MIN_ENCLOSURE, self._MAX_ENCLOSURE + 1):
+            for x in range(self._MIN_ENCLOSURE, self._MAX_ENCLOSURE + 1):
                 self.set_attenuation(x, attenuation)
         elif (ap_enclosure < self._MIN_ENCLOSURE or
               ap_enclosure > self._MAX_ENCLOSURE):
diff --git a/server/cros/network/rf_switch/rf_switch_unittest.py b/server/cros/network/rf_switch/rf_switch_unittest.py
index 62e84db..9f3be18 100644
--- a/server/cros/network/rf_switch/rf_switch_unittest.py
+++ b/server/cros/network/rf_switch/rf_switch_unittest.py
@@ -231,7 +231,7 @@
         """Verify we can set same attenuation to all."""
 
         # 0 should close all relays
-        for x in xrange(rf_switch.RfSwitch._MAX_ENCLOSURE):
+        for x in range(rf_switch.RfSwitch._MAX_ENCLOSURE):
             relays = ','.join(rf_switch.RfSwitch._AP_ATTENUATOR_RELAYS[x + 1])
             reverse_relays = ','.join(
                 rf_switch.RfSwitch._AP_ATTENUATOR_RELAYS[x + 1][::-1])
@@ -242,7 +242,7 @@
         self.mock_rf_switch.set_attenuation(0, 0)
 
         # 127 should open all (close none)
-        for x in xrange(rf_switch.RfSwitch._MAX_ENCLOSURE):
+        for x in range(rf_switch.RfSwitch._MAX_ENCLOSURE):
             relays = ','.join(rf_switch.RfSwitch._AP_ATTENUATOR_RELAYS[x + 1])
             self._populate_stack_for_cmd(
                 '%s (@%s)\n' % (rf_switch.RfSwitch._CMD_OPEN_RELAYS, relays))
diff --git a/server/cros/network/rf_switch_utils_test.py b/server/cros/network/rf_switch_utils_test.py
index e66ed1c..6db287e 100644
--- a/server/cros/network/rf_switch_utils_test.py
+++ b/server/cros/network/rf_switch_utils_test.py
@@ -1,9 +1,9 @@
 """Tests for rf_switch_utils."""
 
-import common
 import unittest
+from unittest import mock
 
-import mock
+import common
 
 from autotest_lib.server.cros.network import rf_switch_utils
 
diff --git a/server/cros/network/telnet_helper.py b/server/cros/network/telnet_helper.py
index cef88e3..fcfec82 100644
--- a/server/cros/network/telnet_helper.py
+++ b/server/cros/network/telnet_helper.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -10,6 +11,14 @@
 SHORT_TIMEOUT = 2
 LONG_TIMEOUT = 30
 
+def _ascii_string(uc_string):
+    """Returns ascii string given unicode string.
+
+    @param uc_string: Unicode string
+
+    """
+    return str(uc_string).encode('ASCII')
+
 class TelnetHelper(object):
     """Helper class to run basic string commands on a telnet host."""
 
@@ -62,12 +71,12 @@
 
         cmd_str.strip(self._tx_cmd_separator)
         try:
-            self._tn.read_until(self._prompt, SHORT_TIMEOUT)
+            self._tn.read_until(_ascii_string(self._prompt), SHORT_TIMEOUT)
         except EOFError as e:
             raise error.TestError("Connection closed. EOFError (%s)" % e)
 
         try:
-            self._tn.write(cmd_str + self._tx_cmd_separator)
+            self._tn.write(_ascii_string(cmd_str + self._tx_cmd_separator))
         except socket.error as e:
             raise error.TestError("Connection closed. Socket error (%s)." % e)
 
@@ -75,9 +84,8 @@
             return None
 
         try:
-            match_channel_idx, _, ret_text = \
-                    self._tn.expect(["\S+" + self._rx_cmd_separator],
-                                    SHORT_TIMEOUT)
+            match_channel_idx, _, ret_text = self._tn.expect(
+                [_ascii_string("\S+" + self._rx_cmd_separator)], SHORT_TIMEOUT)
         except EOFError as e:
             raise error.TestError("Connection closed. EOFError (%s)" % e)
 
@@ -85,6 +93,7 @@
             raise error.TestError("Telnet command failed to return valid data. "
                                   "Data returned: %s" % ret_text)
 
+        ret_text = ret_text.decode()
         ret_text = ret_text.strip()
 
-        return ret_text
+        return ret_text
\ No newline at end of file
diff --git a/server/cros/network/wifi_cell_perf_test_base.py b/server/cros/network/wifi_cell_perf_test_base.py
new file mode 100644
index 0000000..be11651
--- /dev/null
+++ b/server/cros/network/wifi_cell_perf_test_base.py
@@ -0,0 +1,75 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib.cros.network import ping_runner
+from autotest_lib.server.cros.network import ip_config_context_manager
+from autotest_lib.server.cros.network import wifi_cell_test_base
+
+
+class WiFiCellPerfTestBase(wifi_cell_test_base.WiFiCellTestBase):
+    """An abstract base class for autotests in WiFi performance cells.
+
+    Similar to WiFiCellTestBase with one major exception:
+
+    The pcap device is also used as an endpoint in performance tests, so the
+    router and pcap device must have a direct Ethernet connection over their LAN
+    ports in a WiFiCellPerfTestBase.
+    """
+
+    def configure_and_connect_to_ap(self, ap_config):
+        """Configure the router as an AP with the given config and connect
+        the DUT to it.
+
+        @param ap_config HostapConfig object.
+
+        @return name of the configured AP
+        """
+        # self.context.configure has a similar check - but that one only
+        # errors out if the AP *requires* VHT i.e. AP is requesting
+        # MODE_11AC_PURE and the client does not support it.
+        # For performance tests we don't want to run MODE_11AC_MIXED on the AP if
+        # the client does not support VHT, as we are guaranteed to get the
+        # same results at 802.11n/HT40 in that case.
+        if ap_config.is_11ac and not self.context.client.is_vht_supported():
+            raise error.TestNAError('Client does not have AC support')
+        return super(WiFiCellPerfTestBase,
+                     self).configure_and_connect_to_ap(ap_config)
+
+    def _verify_additional_setup_requirements(self):
+        """Ensure that the router and pcap device in the cell have a direct
+        connection available over their respective LAN ports. Raises a test NA
+        error if this connection cannot be verified.
+        """
+        router_lan_ip_addr = "192.168.1.50"
+        pcap_lan_ip_addr = "192.168.1.51"
+        router_lan_iface_name = "eth1"
+        pcap_lan_iface_name = "eth1"
+
+        with ip_config_context_manager.IpConfigContextManager() as ip_context:
+            try:
+                ip_context.bring_interface_up(self.context.router.host,
+                                              router_lan_iface_name)
+                ip_context.bring_interface_up(self.context.pcap_host.host,
+                                              pcap_lan_iface_name)
+                ip_context.assign_ip_addr_to_iface(self.context.router.host,
+                                                   router_lan_ip_addr,
+                                                   router_lan_iface_name)
+                ip_context.assign_ip_addr_to_iface(self.context.pcap_host.host,
+                                                   pcap_lan_ip_addr,
+                                                   pcap_lan_iface_name)
+                ping_config = ping_runner.PingConfig(
+                        pcap_lan_ip_addr,
+                        count=5,
+                        source_iface=router_lan_iface_name,
+                        ignore_result=True)
+                ping_result = self.context.router.ping(ping_config)
+                if ping_result.received == 0:
+                    raise Exception("Ping failed (%s)" % (ping_result))
+            except Exception as e:
+                raise error.TestNAError(
+                        'Could not verify connection between router and pcap '
+                        'devices. Router and pcap device must have a direct '
+                        'Ethernet connection over their LAN ports to run '
+                        'performance tests: %s' % (e))
diff --git a/server/cros/network/wifi_cell_test_base.py b/server/cros/network/wifi_cell_test_base.py
index 8e8233f..5b2d2c1 100644
--- a/server/cros/network/wifi_cell_test_base.py
+++ b/server/cros/network/wifi_cell_test_base.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -71,6 +72,7 @@
                 self.debugdir)
 
         self._wifi_context.setup(pcap_as_router=pcap_as_router)
+        self._verify_additional_setup_requirements()
         self.parse_additional_arguments(cmdline_args, additional_params)
 
         msg = '======= WiFi autotest setup complete. Starting test... ======='
@@ -96,6 +98,13 @@
         """
         self.context.configure(ap_config)
         ap_ssid = self.context.router.get_ssid()
-        assoc_params = xmlrpc_datatypes.AssociationParameters(ssid=ap_ssid)
+        assoc_params = xmlrpc_datatypes.AssociationParameters(
+                ssid=ap_ssid, security_config=ap_config.security_config)
         self.context.assert_connect_wifi(assoc_params)
         return ap_ssid
+
+    def _verify_additional_setup_requirements(self):
+        """Subclasses can override this method to do any additional checking
+        of the physical testing setup that they require.
+        """
+        pass
diff --git a/server/cros/network/wifi_client.py b/server/cros/network/wifi_client.py
index 8cad7a0..834628a 100644
--- a/server/cros/network/wifi_client.py
+++ b/server/cros/network/wifi_client.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -106,7 +107,7 @@
 
     # List of interface names we won't consider for use as "the" WiFi interface
     # on Android or CastOS hosts.
-    WIFI_IF_BLACKLIST = ['p2p0', 'wfd0']
+    WIFI_IF_BLOCKLIST = ['p2p0', 'wfd0']
 
     UNKNOWN_BOARD_TYPE = 'unknown'
 
@@ -132,8 +133,7 @@
         if not uname_result.exit_status and uname_result.stdout.find(' ') < 0:
             kernel_arch = uname_result.stdout.strip()
         cpu_info = self.host.run('cat /proc/cpuinfo').stdout.splitlines()
-        cpu_count = len(filter(lambda x: x.lower().startswith('bogomips'),
-                               cpu_info))
+        cpu_count = len([x for x in cpu_info if x.lower().startswith('bogomips')])
         cpu_count_str = ''
         if cpu_count:
             cpu_count_str = 'x%d' % cpu_count
@@ -154,7 +154,7 @@
         result = self.host.run("iw dev %s get power_save" % self.wifi_if)
         output = result.stdout.rstrip()       # NB: chop \n
         # Output should be either "Power save: on" or "Power save: off".
-        find_re = re.compile('([^:]+):\s+(\w+)')
+        find_re = re.compile(r'([^:]+):\s+(\w+)')
         find_results = find_re.match(output)
         if not find_results:
             raise error.TestFail('Failed to find power_save parameter '
@@ -328,7 +328,7 @@
         super(WiFiClient, self).__init__(client_host, 'client',
                                          inherit_interfaces=True)
         self._command_ip = 'ip'
-        self._command_iptables = 'iptables'
+        self._command_iptables = 'iptables -w 5'
         self._command_ping6 = 'ping6'
         self._command_wpa_cli = 'wpa_cli'
         self._machine_id = None
@@ -339,7 +339,7 @@
             # Look up the WiFi device (and its MAC) on the client.
             devs = self.iw_runner.list_interfaces(desired_if_type='managed')
             devs = [dev for dev in devs
-                    if dev.if_name not in self.WIFI_IF_BLACKLIST]
+                    if dev.if_name not in self.WIFI_IF_BLOCKLIST]
             if not devs:
                 raise error.TestFail('No wlan devices found on %s.' %
                                      self.host.hostname)
@@ -367,7 +367,6 @@
         self._wpa_mon = wpa_mon.WpaMon(self.host, self.wifi_if)
         logging.debug('WiFi interface is: %r',
                       self._interface.device_description)
-        self._firewall_rules = []
         # All tests that use this object assume the interface starts enabled.
         self.set_device_enabled(self._wifi_if, True)
         # Turn off powersave mode by default.
@@ -451,33 +450,6 @@
         self.shill.clean_profiles()
         super(WiFiClient, self).close()
 
-
-    def firewall_open(self, proto, src):
-        """Opens up firewall to run netperf tests.
-
-        By default, we have a firewall rule for NFQUEUE (see crbug.com/220736).
-        In order to run netperf test, we need to add a new firewall rule BEFORE
-        this NFQUEUE rule in the INPUT chain.
-
-        @param proto a string, test traffic protocol, e.g. udp, tcp.
-        @param src a string, subnet/mask.
-
-        @return a string firewall rule added.
-
-        """
-        rule = 'INPUT -s %s/32 -p %s -m %s -j ACCEPT' % (src, proto, proto)
-        self.host.run('%s -I %s' % (self._command_iptables, rule))
-        self._firewall_rules.append(rule)
-        return rule
-
-
-    def firewall_cleanup(self):
-        """Cleans up firewall rules."""
-        for rule in self._firewall_rules:
-            self.host.run('%s -D %s' % (self._command_iptables, rule))
-        self._firewall_rules = []
-
-
     def sync_host_times(self):
         """Set time on our DUT to match local time."""
         epoch_seconds = time.time()
@@ -625,7 +597,7 @@
             """
             is_requested_bss = lambda iw_bss: iw_bss.bss == bssid
             scan_results = self.iw_runner.scan(self.wifi_if)
-            return scan_results and filter(is_requested_bss, scan_results)
+            return scan_results and list(filter(is_requested_bss, scan_results))
         try:
             utils.poll_for_condition(
                 condition=dut_sees_bss,
@@ -636,7 +608,7 @@
 
 
     def wait_for_bsses(self, ssid, num_bss_expected, timeout_seconds=15):
-      """Wait for all BSSes associated with given SSID to be discovered in the
+        """Wait for all BSSes associated with given SSID to be discovered in the
       scan.
 
       @param ssid string name of network being queried
@@ -644,41 +616,42 @@
       @param timeout_seconds int seconds to wait for BSSes to be discovered
 
       """
-      # If the scan returns None, return 0, else return the matching count
+        # If the scan returns None, return 0, else return the matching count
 
-      # Wrap num_bss_actual as a mutable object, list, so that an inner function
-      # can update the value without making an assignment to it. Without any
-      # assignment, the inner function will look for the variable in outer scope
-      # instead of creating a new local one.
-      num_bss_actual = [0]
-      def are_all_bsses_discovered():
-          """Determine if all BSSes associated with the SSID from parent
+        # Wrap num_bss_actual as a mutable object, list, so that an inner function
+        # can update the value without making an assignment to it. Without any
+        # assignment, the inner function will look for the variable in outer scope
+        # instead of creating a new local one.
+        num_bss_actual = [0]
+
+        def are_all_bsses_discovered():
+            """Determine if all BSSes associated with the SSID from parent
           function are discovered in the scan
 
           @return boolean representing whether the expected bss count matches
           how many in the scan match the given ssid
           """
-          self.claim_wifi_if() # Stop shill/supplicant scans
-          try:
-            scan_results = self.iw_runner.scan(
-                    self.wifi_if,
-                    frequencies=[],
-                    ssids=[ssid])
-            if scan_results is None:
-                return False
-            num_bss_actual[0] = sum(ssid == bss.ssid for bss in scan_results)
-            return num_bss_expected == num_bss_actual[0]
-          finally:
-            self.release_wifi_if()
-      try:
-          utils.poll_for_condition(
-              condition=are_all_bsses_discovered,
-              timeout=timeout_seconds,
-              sleep_interval=0.5)
-      except utils.TimeoutError:
-          raise error.TestFail('Failed to discover all BSSes. Found %d,'
-                               ' wanted %d with SSID %s' %
-                               (num_bss_actual[0], num_bss_expected, ssid))
+            self.claim_wifi_if()  # Stop shill/supplicant scans
+            try:
+                scan_results = self.iw_runner.scan(self.wifi_if,
+                                                   frequencies=[],
+                                                   ssids=[ssid])
+                if scan_results is None:
+                    return False
+                num_bss_actual[0] = sum(ssid == bss.ssid
+                                        for bss in scan_results)
+                return num_bss_expected == num_bss_actual[0]
+            finally:
+                self.release_wifi_if()
+
+        try:
+            utils.poll_for_condition(condition=are_all_bsses_discovered,
+                                     timeout=timeout_seconds,
+                                     sleep_interval=0.5)
+        except utils.TimeoutError:
+            raise error.TestFail('Failed to discover all BSSes. Found %d,'
+                                 ' wanted %d with SSID %s' %
+                                 (num_bss_actual[0], num_bss_expected, ssid))
 
     def wait_for_service_states(self, ssid, states, timeout_seconds):
         """Waits for a WiFi service to achieve one of |states|.
@@ -730,16 +703,16 @@
         return result.stdout, result.stderr
 
 
-    def clear_supplicant_blacklist(self):
-        """Clear's the AP blacklist on the DUT.
+    def clear_supplicant_blocklist(self):
+        """Clear's the AP blocklist on the DUT.
 
         @return stdout and stderror returns passed from wpa_cli command.
 
         """
-        result = self._wpa_cli_proxy.run_wpa_cli_cmd('blacklist clear',
-                                                     check_result=False);
-        logging.info('wpa_cli blacklist clear: out:%r err:%r', result.stdout,
-                     result.stderr)
+        result = self._wpa_cli_proxy.run_wpa_cli_cmd('bssid_ignore clear',
+                                                     check_result=False)
+        logging.info('wpa_cli bssid_ignore clear: out:%r err:%r',
+                     result.stdout, result.stderr)
         return result.stdout, result.stderr
 
 
@@ -1057,8 +1030,13 @@
             logging.info('Reassociate time: %.2f seconds', reassociate_time)
 
 
-    def wait_for_connection(self, ssid, timeout_seconds=30, freq=None,
-                            ping_ip=None, desired_subnet=None):
+    def wait_for_connection(self,
+                            ssid,
+                            timeout_seconds=30,
+                            freq=None,
+                            ping_ip=None,
+                            desired_subnet=None,
+                            source_iface=None):
         """Verifies a connection to network ssid, optionally verifying
         frequency, ping connectivity and subnet.
 
@@ -1116,7 +1094,8 @@
                     return False
 
             if ping_ip:
-                ping_config = ping_runner.PingConfig(ping_ip)
+                ping_config = ping_runner.PingConfig(ping_ip,
+                                                     source_iface=source_iface)
                 self.ping(ping_config)
 
             return ConnectTime(state[0], conn_time)
@@ -1202,17 +1181,17 @@
 
         lines = result.stdout.strip().split('\n')
         disconnect_reasons = []
-        disconnect_reason_regex = re.compile(' to (\D?\d+)')
+        disconnect_reason_regex = re.compile(r' to (\D?\d+)')
 
         found = False
         for line in reversed(lines):
-          match = disconnect_reason_regex.search(line)
-          if match is not None:
-            disconnect_reasons.append(match.group(1))
-            found = True
-          else:
-            if (found):
-                break
+            match = disconnect_reason_regex.search(line)
+            if match is not None:
+                disconnect_reasons.append(match.group(1))
+                found = True
+            else:
+                if (found):
+                    break
         return list(reversed(disconnect_reasons))
 
 
@@ -1281,9 +1260,9 @@
         # where 1941 is an arbitrary PID number. By checking if the last
         # instance of this message contains the substring "not connected", we
         # can determine whether or not shill was connected on its last resume.
-        connection_status_log_regex_str = 'INFO:wifi\.cc.*OnAfterResume'
-        not_connected_substr = 'not connected'
-        connected_substr = 'connected'
+        connection_status_log_regex_str = str(r'INFO:wifi\.cc.*OnAfterResume')
+        not_connected_substr = str(r'not connected')
+        connected_substr = str(r'connected')
 
         cmd = ('grep -E %s /var/log/net.log | tail -1' %
                connection_status_log_regex_str)
@@ -1314,7 +1293,7 @@
         # wake on WiFi was throttled. This is an example of the error message:
         #     [...] [ERROR:wake_on_wifi.cc(1304)] OnDarkResume: Too many dark \
         #       resumes; disabling wake on WiFi temporarily
-        dark_resume_log_regex_str = 'ERROR:wake_on_wifi\.cc.*OnDarkResume:.*'
+        dark_resume_log_regex_str = str(r'ERROR:wake_on_wifi\.cc.*OnDarkResume:.*')
         throttled_msg_substr = ('Too many dark resumes; disabling wake on '
                                    'WiFi temporarily')
 
diff --git a/server/cros/network/wifi_test_context_manager.py b/server/cros/network/wifi_test_context_manager.py
index e0feb94..4ef8172 100644
--- a/server/cros/network/wifi_test_context_manager.py
+++ b/server/cros/network/wifi_test_context_manager.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -189,7 +190,8 @@
             self.client.start_capture(ap_config.frequency,
                                       snaplen=self._packet_capture_snaplen)
         if self._enable_packet_captures:
-           self.capture_host.start_capture(ap_config.frequency,
+            self.capture_host.start_capture(
+                    ap_config.frequency,
                     width_type=ap_config.packet_capture_mode,
                     snaplen=self._packet_capture_snaplen)
 
@@ -357,7 +359,8 @@
             ap_num = 0
         if ping_config is None:
             ping_ip = self.router.get_wifi_ip(ap_num=ap_num)
-            ping_config = ping_runner.PingConfig(ping_ip)
+            wifi_if = self.client.wifi_if
+            ping_config = ping_runner.PingConfig(ping_ip, source_iface=wifi_if)
         self.client.ping(ping_config)
 
 
@@ -393,6 +396,16 @@
             ap_num = 0
         desired_subnet = self.router.get_wifi_ip_subnet(ap_num)
         wifi_ip = self.router.get_wifi_ip(ap_num)
-        return self.client.wait_for_connection(
-                ssid, timeout_seconds=timeout_seconds, freq=freq,
-                ping_ip=wifi_ip, desired_subnet=desired_subnet)
+        wifi_if = self.client.wifi_if
+
+        # ping command used for verifying WiFi connection should bind to
+        # WiFi interface, so that ping packets won't go through Ethernet
+        # interface in some scenarios.
+        # See b/199940334: autotest: wifi_client's wait_for_connection
+        # does not consider NUD state
+        return self.client.wait_for_connection(ssid,
+                                               timeout_seconds=timeout_seconds,
+                                               freq=freq,
+                                               ping_ip=wifi_ip,
+                                               desired_subnet=desired_subnet,
+                                               source_iface=wifi_if)
diff --git a/server/cros/network/wpa_cli_proxy.py b/server/cros/network/wpa_cli_proxy.py
index 9df2403..8cd7007 100644
--- a/server/cros/network/wpa_cli_proxy.py
+++ b/server/cros/network/wpa_cli_proxy.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -7,6 +8,8 @@
 import re
 import time
 
+import six
+
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib import utils
 from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
@@ -35,16 +38,19 @@
     CROS_CMD_FORMAT = ('su wpa -s /bin/bash '
                        '-c "/usr/bin/wpa_cli -i {0[ifname]} {0[cmd]}"')
     CAST_CMD_FORMAT = '/system/bin/wpa_cli -i {0[ifname]} {0[cmd]}'
+    RPI_CMD_FORMAT = '/sbin/wpa_cli -i {0[ifname]} {0[cmd]}'
 
 
-    def __init__(self, host, wifi_if):
+    def __init__(self, host, wifi_if, RPi=False):
         self._host = host
         self._wifi_if = wifi_if
         self._created_networks = {}
+        if RPi:
+            self._wpa_cli_cmd_format = self.RPI_CMD_FORMAT
         # TODO(wiley) Hardcoding this IFNAME prefix makes some big assumptions.
         #             we'll need to discover this parameter as it becomes more
         #             generally useful.
-        if host.get_os_type() == 'android':
+        elif host.get_os_type() == 'android':
             self._wpa_cli_cmd_format = self.ANDROID_CMD_FORMAT
         elif host.get_os_type() == 'brillo':
             self._wpa_cli_cmd_format = self.BRILLO_CMD_FORMAT
@@ -71,7 +77,7 @@
         return network_id
 
 
-    def run_wpa_cli_cmd(self, command, check_result=True):
+    def run_wpa_cli_cmd(self, command, if_name=None, check_result=True):
         """
         Run a wpa_cli command and optionally check the result.
 
@@ -80,13 +86,18 @@
 
         @param command string: suffix of a command to be prefixed with
                 an appropriate wpa_cli for this host.
+        @param if_name string: interface name. The wifi interface (self._wifi_if)
+                would be used, if the if_name was not specified.
         @param check_result bool: True iff we want to check that the
                 command comes back with an 'OK' response.
         @return result object returned by host.run.
 
         """
-        cmd = self._wpa_cli_cmd_format.format(
-                {'ifname' : self._wifi_if, 'cmd' : command})
+        iface = if_name if if_name else self._wifi_if
+        cmd = self._wpa_cli_cmd_format.format({
+                'ifname': iface,
+                'cmd': command
+        })
         result = self._host.run(cmd)
         if check_result and not result.stdout.strip().endswith('OK'):
             raise error.TestFail('wpa_cli command failed: %s' % command)
@@ -244,7 +255,7 @@
                                  (network_id, 'scan_ssid', '1'))
 
         sec_config = assoc_params.security_config
-        for field, value in sec_config.get_wpa_cli_properties().iteritems():
+        for field, value in six.iteritems(sec_config.get_wpa_cli_properties()):
             self.run_wpa_cli_cmd('set_network %d %s %s' %
                                  (network_id, field, value))
         self.run_wpa_cli_cmd('select_network %d' % network_id)
diff --git a/server/cros/network/wpa_mon.py b/server/cros/network/wpa_mon.py
index 75768dc..33ee1c2 100644
--- a/server/cros/network/wpa_mon.py
+++ b/server/cros/network/wpa_mon.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -21,14 +22,14 @@
     CTRL_EVENT_SCAN_RESULTS = 'CTRL-EVENT-SCAN-RESULTS'
     CTRL_EVENT_BSS_ADDED = 'CTRL-EVENT-BSS-ADDED'
 
-    ROAM_MATCH = ' cur_bssid=([\da-fA-F:]+) cur_freq=(\d+) ' \
-                 'cur_level=([\d-]+) cur_est=(\d+) ' \
-                 'sel_bssid=([\da-fA-F:]+) sel_freq=(\d+) ' \
-                 'sel_level=([\d-]+) sel_est=(\d+)'
-    DISCONNECT_MATCH = ' bssid=([\da-fA-F:]+) reason=(\d+)' \
-                       '(?: locally_generated=(1))?'
-    SCAN_RESULTS_MATCH = '()'
-    BSS_ADDED_MATCH = ' ([\d]+) ([\da-fA-F:]+)'
+    ROAM_MATCH = str(r' cur_bssid=([\da-fA-F:]+) cur_freq=(\d+) ' \
+                 r'cur_level=([\d-]+) cur_est=(\d+) ' \
+                 r'sel_bssid=([\da-fA-F:]+) sel_freq=(\d+) ' \
+                 r'sel_level=([\d-]+) sel_est=(\d+)')
+    DISCONNECT_MATCH = str(r' bssid=([\da-fA-F:]+) reason=(\d+)' \
+                       '(?: locally_generated=(1))?')
+    SCAN_RESULTS_MATCH = str(r'()')
+    BSS_ADDED_MATCH = str(r' ([\d]+) ([\da-fA-F:]+)')
 
     Roam = namedtuple('Roam',
                       ['cur_bssid', 'cur_freq', 'cur_level', 'cur_est',
@@ -70,9 +71,9 @@
         # encounters an EOF. Using `cat` or the PIPE address type would close
         # the input stream after the first write, instructing socat to tear
         # everything else down.
-        command = "nohup sudo -u wpa -g wpa socat SYSTEM:'mkfifo %s; " \
-                  "tail -f %s'\!\!STDOUT UNIX-CONNECT:%s,type=2,bind=%s " \
-                  "</dev/null >%s 2>&1 & echo $!" % \
+        command = r"nohup sudo -u wpa -g wpa socat SYSTEM:'mkfifo %s; " \
+                  r"tail -f %s'\!\!STDOUT UNIX-CONNECT:%s,type=2,bind=%s " \
+                  r"</dev/null >%s 2>&1 & echo $!" % \
                   (self._pipe, self._pipe, self._dest, local, self._log_path)
         out_lines = self._host.run(command).stdout.splitlines()
         pid = int(out_lines[0])
@@ -181,7 +182,7 @@
         for match in matches:
             obj = self.EVENT_MATCH_DICT[event].obj(*match)
             does_match = True
-            for attr, val in attrs.items():
+            for attr, val in list(attrs.items()):
                 if getattr(obj, attr) != val:
                     does_match = False
                     break
diff --git a/server/cros/packet_generation/IP_utils.py b/server/cros/packet_generation/IP_utils.py
deleted file mode 100644
index 45ff5a2..0000000
--- a/server/cros/packet_generation/IP_utils.py
+++ /dev/null
@@ -1,87 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-
-from autotest_lib.server.hosts import ssh_host
-
-RA_SCRIPT = 'sendra.py'
-SCAPY = 'scapy-2.2.0.tar.gz'
-SCAPY_INSTALL_COMMAND = 'sudo python setup.py install'
-PROC_NET_SNMP6 = '/proc/net/snmp6'
-MULTICAST_ADDR = '33:33:00:00:00:01'
-IFACE = 'managed0'
-LIFETIME = 180
-
-
-class IPutils(object):
-
-    def __init__(self, host):
-        """Initializes an IP utility interface.
-
-        @param host: Router host object.
-
-        """
-        self.host = host
-        self.install_path = self.host.run('mktemp -d').stdout.rstrip()
-
-
-    def install_scapy(self):
-        """Installs scapy on the target device. Scapy and all related files and
-        scripts will be installed in a temp directory under /tmp.
-
-        """
-        scapy = os.path.join(self.install_path, SCAPY)
-        ap_sshhost = ssh_host.SSHHost(hostname=self.host.hostname)
-        current_dir = os.path.dirname(os.path.realpath(__file__))
-        send_ra_script = os.path.join(current_dir, RA_SCRIPT)
-        send_scapy = os.path.join(current_dir, SCAPY)
-        ap_sshhost.send_file(send_scapy, self.install_path)
-        ap_sshhost.send_file(send_ra_script, self.install_path)
-
-        self.host.run('tar -xvf %s -C %s' % (scapy, self.install_path))
-        self.host.run('cd %s; %s' % (self.install_path, SCAPY_INSTALL_COMMAND))
-
-
-    def cleanup_scapy(self):
-        """Remove all scapy related files and scripts from device.
-
-        @param host: Router host object.
-
-        """
-        self.host.run('rm -rf %s' % self.install_path)
-
-
-    def send_ra(self, mac=MULTICAST_ADDR, interval=1, count=None, iface=IFACE,
-                lifetime=LIFETIME):
-        """Invoke scapy and send RA to the device.
-
-        @param host: Router host object.
-        @param mac: string HWAddr/MAC address to send the packets to.
-        @param interval: int Time to sleep between consecutive packets.
-        @param count: int Number of packets to be sent.
-        @param iface: string of the WiFi interface to use for sending packets.
-        @param lifetime: int original RA's router lifetime in seconds.
-
-        """
-        scapy_command = os.path.join(self.install_path, RA_SCRIPT)
-        options = ' -m %s -i %d -c %d -l %d -in %s' %(mac, interval, count,
-                                                      lifetime, iface)
-        self.host.run(scapy_command + options)
-
-
-    def get_icmp6intype134(self, host):
-        """Read the value of Icmp6InType134 and return integer.
-
-        @param host: DUT host object.
-
-        @returns integer value >0 if grep is successful; 0 otherwise.
-
-        """
-        ra_count_str = host.run(
-                       'grep Icmp6InType134 %s || true' % PROC_NET_SNMP6).stdout
-        if ra_count_str:
-            return int(ra_count_str.split()[1])
-        # If grep failed it means that there is no entry for Icmp6InType134 in file.
-        return 0
diff --git a/server/cros/packet_generation/common.py b/server/cros/packet_generation/common.py
deleted file mode 100644
index 01b2171..0000000
--- a/server/cros/packet_generation/common.py
+++ /dev/null
@@ -1,8 +0,0 @@
-import os, sys
-dirname = os.path.dirname(sys.modules[__name__].__file__)
-autotest_dir = os.path.abspath(os.path.join(dirname, "../../.."))
-client_dir = os.path.join(autotest_dir, 'client')
-sys.path.insert(0, client_dir)
-import setup_modules
-sys.path.pop(0)
-setup_modules.setup(base_path=autotest_dir, root_module_name='autotest_lib')
diff --git a/server/cros/packet_generation/scapy-2.2.0.tar.gz b/server/cros/packet_generation/scapy-2.2.0.tar.gz
deleted file mode 100644
index 47ac039..0000000
--- a/server/cros/packet_generation/scapy-2.2.0.tar.gz
+++ /dev/null
Binary files differ
diff --git a/server/cros/packet_generation/sendra.py b/server/cros/packet_generation/sendra.py
deleted file mode 100755
index 62f6145..0000000
--- a/server/cros/packet_generation/sendra.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/python2
-
-import argparse
-import time
-
-from scapy import all as scapy
-
-
-def send(dstmac, interval, count, lifetime, iface):
-    """Generate IPv6 Router Advertisement and send to destination.
-
-    @param dstmac: string HWAddr of the destination ipv6 node.
-    @param interval: int Time to sleep between consecutive packets.
-    @param count: int Number of packets to be sent.
-    @param lifetime: Router lifetime value for the original RA.
-    @param iface: string Router's WiFi interface to send packets over.
-
-    """
-    while count:
-        ra = (scapy.Ether(dst=dstmac) /
-              scapy.IPv6() /
-              scapy.ICMPv6ND_RA(routerlifetime=lifetime))
-        scapy.sendp(ra, iface=iface)
-        count = count - 1
-        time.sleep(interval)
-        lifetime = lifetime - interval
-
-
-if __name__ == "__main__":
-    parser = argparse.ArgumentParser()
-    parser.add_argument('-m', '--mac-address', action='store', default=None,
-                         help='HWAddr to send the packet to.')
-    parser.add_argument('-i', '--t-interval', action='store', default=None,
-                         type=int, help='Time to sleep between consecutive')
-    parser.add_argument('-c', '--pkt-count', action='store', default=None,
-                        type=int, help='NUmber of packets to send.')
-    parser.add_argument('-l', '--life-time', action='store', default=None,
-                        type=int, help='Lifetime in seconds for the first RA')
-    parser.add_argument('-in', '--wifi-interface', action='store', default=None,
-                        help='The wifi interface to send packets over.')
-    args = parser.parse_args()
-    send(args.mac_address, args.t_interval, args.pkt_count, args.life_time,
-         args.wifi_interface)
diff --git a/server/cros/power/OWNERS b/server/cros/power/OWNERS
new file mode 100644
index 0000000..850e02e
--- /dev/null
+++ b/server/cros/power/OWNERS
@@ -0,0 +1,2 @@
+include /POWER_OWNERS
+include ../OWNERS
diff --git a/server/cros/power/power_dashboard.py b/server/cros/power/power_dashboard.py
index 83e6478..3c612a1 100644
--- a/server/cros/power/power_dashboard.py
+++ b/server/cros/power/power_dashboard.py
@@ -47,27 +47,30 @@
             board += '_hammer'
 
         dut_info_dict = {
-            'board': board,
-            'version': {
-                'hw': self._host.get_hardware_revision(),
-                'milestone': self._host.get_chromeos_release_milestone(),
-                'os': self._host.get_release_version(),
-                'channel': self._host.get_channel(),
-                'firmware': self._host.get_firmware_version(),
-                'ec': self._host.get_ec_version(),
-                'kernel': self._host.get_kernel_version(),
-            },
-            'sku' : {
-                'cpu': self._host.get_cpu_name(),
-                'memory_size': self._host.get_mem_total_gb(),
-                'storage_size': self._host.get_disk_size_gb(),
-                'display_resolution': self._host.get_screen_resolution(),
-            },
-            'ina': {
-                'version': 0,
-                'ina': power_rails,
-            },
-            'note': self._note,
+                'board': board,
+                'version': {
+                        'hw': self._host.get_hardware_revision(),
+                        'milestone':
+                        self._host.get_chromeos_release_milestone(),
+                        'os': self._host.get_release_version(),
+                        'channel': self._host.get_channel(),
+                        'firmware': self._host.get_firmware_version(),
+                        'ec': self._host.get_ec_version(),
+                        'kernel': self._host.get_kernel_version(),
+                },
+                'sku': {
+                        'cpu': self._host.get_cpu_name(),
+                        'memory_size': self._host.get_mem_total_gb(),
+                        'storage_size': self._host.get_disk_size_gb(),
+                        'display_resolution':
+                        self._host.get_screen_resolution(),
+                        'hwid': self._host.get_hardware_id(),
+                },
+                'ina': {
+                        'version': 0,
+                        'ina': power_rails,
+                },
+                'note': self._note,
         }
 
         if self._host.has_battery():
diff --git a/server/cros/power/power_telemetry_logger.py b/server/cros/power/power_telemetry_logger.py
index e292bee..4b71988 100644
--- a/server/cros/power/power_telemetry_logger.py
+++ b/server/cros/power/power_telemetry_logger.py
@@ -10,6 +10,7 @@
 from __future__ import print_function
 
 import collections
+import csv
 import datetime
 from distutils import sysconfig
 import json
@@ -17,8 +18,10 @@
 import numpy
 import os
 import re
+import shutil
 import six
 import string
+import subprocess
 import threading
 import time
 
@@ -121,19 +124,20 @@
         """
         self._end_measurement()
         logging.info('%s finishes.', self.__class__.__name__)
+        checkpoint_logger = self._get_client_test_checkpoint_logger(
+                client_test_dir)
         start_ts, end_ts = self._get_client_test_ts(client_test_dir)
         loggers = self._load_and_trim_data(start_ts, end_ts)
         # Call export after trimming to only export trimmed data.
-        self._export_data_locally(client_test_dir)
-        checkpoint_logger = self._get_client_test_checkpoint_logger(
-                client_test_dir)
+        self._export_data_locally(client_test_dir,
+                                  checkpoint_logger.checkpoint_data)
         self._upload_data(loggers, checkpoint_logger)
 
     def _end_measurement(self):
         """End power telemetry devices."""
         raise NotImplementedError('Subclasses must implement _end_measurement.')
 
-    def _export_data_locally(self, client_test_dir):
+    def _export_data_locally(self, client_test_dir, checkpoint_data=None):
         """Slot for the logger to export measurements locally."""
         raise NotImplementedError('Subclasses must implement '
                                   '_export_data_locally.')
@@ -316,6 +320,19 @@
                 '_load_and_trim_data and return a list of loggers.')
 
     def _get_client_test_checkpoint_logger(self, client_test_dir):
+        """Load the client-side test checkpoints.
+
+        The key data we need is the checkpoint_logger.checkpoint_data object.
+        This is a dictionary that contains for each key a list of [start, end]
+        timestamps (seconds since epoch) for a checkpoint.
+        Note: should there be issues loading the data, the checkpoint logger
+        will still be returned, but it will be empty. Code that relies on the
+        returned object here and wants to make sure its valid, needs to check
+        against the |checkpoint_logger.checkpoint_data| being empty, as it
+        will never be None
+
+        Returns: CheckpointLogger object with client endpoints, or empty data
+        """
         client_test_resultsdir = os.path.join(client_test_dir, 'results')
         checkpoint_logger = power_status.get_checkpoint_logger_from_file(
                 resultsdir=client_test_resultsdir)
@@ -338,9 +355,203 @@
             pdash.upload()
 
 
+class PacTelemetryLogger(PowerTelemetryLogger):
+    """This logger class measures power via pacman debugger."""
+
+    def __init__(self, config, resultsdir, host):
+        """Init PacTelemetryLogger.
+
+        @param config: the args argument from test_that in a dict. Settings for
+                       power telemetry devices.
+                       required data:
+                       {'test': 'test_TestName.tag',
+                        'config': PAC address and sense resistor .py file location,
+                        'mapping: DUT power rail mapping csv file,
+                        'gpio': gpio}
+        @param resultsdir: path to directory where current autotest results are
+                           stored, e.g. /tmp/test_that_results/
+                           results-1-test_TestName.tag/test_TestName.tag/
+                           results/
+        @param host: CrosHost object representing the DUT.
+
+        @raises error.TestError if problem running pacman.py
+        """
+        super(PacTelemetryLogger, self).__init__(config, resultsdir, host)
+        required_args = ['config', 'mapping', 'gpio']
+        for arg in required_args:
+            if arg not in config:
+                msg = 'Missing required arguments for PacTelemetryLogger: %s' % arg
+                raise error.TestError(msg)
+        self._pac_config_file = config['config']
+        self._pac_mapping_file = config['mapping']
+        self._pac_gpio_file = config['gpio']
+        self._resultsdir = resultsdir
+        self.pac_path = self._get_pacman_install_path()
+        self.pac_data_path = os.path.join(resultsdir, 'pac')
+
+        os.makedirs(self.pac_data_path, exist_ok=True)
+
+        # Check if pacman is able to run
+        try:
+            subprocess.check_output('pacman.py', timeout=5, cwd=self.pac_path)
+        except subprocess.CalledProcessError as e:
+            msg = 'Error running pacman.py '\
+                  'Check dependencies have been installed'
+            logging.error(msg)
+            logging.error(e.output)
+            raise error.TestError(e)
+
+    def _start_measurement(self):
+        """Start a pacman thread with the given config, mapping, and gpio files."""
+
+        self._log = open(os.path.join(self.pac_data_path, "pac.log"), "a")
+
+        self._pacman_args = [
+                '--config', self._pac_config_file, '--mapping',
+                self._pac_mapping_file, '--gpio', self._pac_gpio_file,
+                '--output', self.pac_data_path
+        ]
+
+        logging.debug('Starting pacman process')
+        cmds = ['pacman.py'] + self._pacman_args
+        logging.debug(cmds)
+
+        self._pacman_process = subprocess.Popen(cmds,
+                                                cwd=self.pac_path,
+                                                stdout=self._log,
+                                                stderr=self._log)
+
+    def _end_measurement(self):
+        """Stop pacman thread. This will dump and process the accumulators."""
+        self._pacman_process.send_signal(2)
+        self._pacman_process.wait(timeout=10)
+        self._load_and_trim_data(None, None)
+        self._export_data_locally(self._resultsdir)
+
+        self._log.close()
+
+    def _get_pacman_install_path(self):
+        """Return the absolute path of pacman on the host.
+
+        @raises error.TestError if pacman is not in PATH
+        """
+        pac_path = shutil.which('pacman.py')
+        if pac_path == None:
+            msg = 'Unable to locate pacman.py \n'\
+                  'Check pacman.py is in PATH'
+            logging.error(msg)
+            raise error.TestNAError(msg)
+        return os.path.dirname(pac_path)
+
+    def _load_and_trim_data(self, start_ts, end_ts):
+        """Load data and trim data.
+
+        Load and format data recorded by power telemetry devices. Trim data if
+        necessary.
+
+        @param start_ts: start timestamp in seconds since epoch, None if no
+                         need to trim data.
+        @param end_ts: end timestamp in seconds since epoch, None if no need to
+                       trim data.
+        @return a list of loggers, where each logger contains raw power data and
+                statistics.
+
+        @raises TestError when unable to locate or open pacman accumulator results
+
+        logger format:
+        {
+            'sample_count' : 60,
+            'sample_duration' : 60,
+            'data' : {
+                'domain_1' : [ 111.11, 123.45 , ... , 99.99 ],
+                ...
+                'domain_n' : [ 3999.99, 4242.42, ... , 4567.89 ]
+            },
+            'average' : {
+                'domain_1' : 100.00,
+                ...
+                'domain_n' : 4300.00
+            },
+            'unit' : {
+                'domain_1' : 'milliwatt',
+                ...
+                'domain_n' : 'milliwatt'
+            },
+            'type' : {
+                'domain_1' : 'servod',
+                ...
+                'domain_n' : 'servod'
+            },
+        }
+        """
+        loggers = list()
+        accumulator_path = os.path.join(self.pac_data_path, 'accumulatorData.csv')
+        if not os.path.exists(accumulator_path):
+            raise error.TestError('Unable to locate pacman results!')
+        # Load resulting pacman csv file
+        try:
+            with open(accumulator_path, 'r') as csvfile:
+                reader = csv.reader(csvfile, delimiter=',')
+                # Capture the first line
+                schema = next(reader)
+                # First column is an index
+                schema[0] = 'index'
+                # Place data into a dictionary
+                self._accumulator_data = list()
+                for row in reader:
+                    measurement = dict(zip(schema, row))
+                    self._accumulator_data.append(measurement)
+        except OSError:
+            raise error.TestError('Unable to open pacman accumulator results!')
+
+        # Match required logger format
+        log = {
+                'sample_count': 1,
+                'sample_duration': float(self._accumulator_data[0]['tAccum']),
+                'data': {
+                        x['Rail']: [float(x['Average Power (w)'])]
+                        for x in self._accumulator_data
+                },
+                'average': {
+                        x['Rail']: float(x['Average Power (w)'])
+                        for x in self._accumulator_data
+                },
+                'unit': {x['Rail']: 'watts'
+                         for x in self._accumulator_data},
+                'type': {x['Rail']: 'pacman'
+                         for x in self._accumulator_data},
+        }
+        loggers.append(log)
+        return loggers
+
+    def output_pacman_aggregates(self, test):
+        """This outputs all the processed aggregate values to the results-chart.json
+
+        @param test: the test.test object to use when outputting the
+                    performance values to results-chart.json
+        """
+        for rail in self._accumulator_data:
+            test.output_perf_value(rail['Rail'],
+                                   float(rail['Average Power (w)']),
+                                   units='watts',
+                                   replace_existing_values=True)
+
+    def _export_data_locally(self, client_test_dir, checkpoint_data=None):
+        """Slot for the logger to export measurements locally."""
+        self._local_pac_data_path = os.path.join(client_test_dir,
+                                                 'pacman_data')
+        shutil.copytree(self.pac_data_path, self._local_pac_data_path)
+
+    def _upload_data(self, loggers, checkpoint_logger):
+        """
+        _upload_data is defined as a pass as a hot-fix to external partners' lack
+        of access to the power_dashboard URL
+        """
+        pass
+
+
 class ServodTelemetryLogger(PowerTelemetryLogger):
-    """This logger class measures power by querying a servod instance.
-    """
+    """This logger class measures power by querying a servod instance."""
 
     DEFAULT_INA_RATE = 20.0
     DEFAULT_VBAT_RATE = 60.0
@@ -368,7 +579,7 @@
         self._vbat_rate = float(config.get('vbat_rate', self.DEFAULT_VBAT_RATE))
         self._pm = measure_power.PowerMeasurement(host=self._servo_host,
                                                   port=self._servo_port,
-                                                  ina_rate=self._ina_rate,
+                                                  adc_rate=self._ina_rate,
                                                   vbat_rate=self._vbat_rate)
 
     def _start_measurement(self):
@@ -381,12 +592,38 @@
         """End querying servod."""
         self._pm.FinishMeasurement()
 
-    def _export_data_locally(self, client_test_dir):
-        """Output formatted text summaries locally."""
+    def _export_data_locally(self, client_test_dir, checkpoint_data=None):
+        """Output formatted text summaries to test results directory.
+
+        @param client_test_dir: path to the client test output
+        @param checkpoint_data: dict, checkpoint data. data is list of tuples
+                                of (start,end) format for the timesteps
+        """
         # At this point the PowerMeasurement unit has been processed. Dump its
         # formatted summaries into the results directory.
         power_summaries_dir = os.path.join(self._resultsdir, 'power_summaries')
         self._pm.SaveSummary(outdir=power_summaries_dir)
+        # After the main summaries are exported, we also want to export one
+        # for each checkpoint. As each checkpoint might contain multiple
+        # entries, the checkpoint name is expanded by a digit.
+        def export_checkpoint(name, start, end):
+            """Helper to avoid code duplication for 0th and next cases."""
+            self._pm.SaveTrimmedSummary(tag=name,
+                                        tstart=start,
+                                        tend=end,
+                                        outdir=power_summaries_dir)
+
+        if checkpoint_data:
+            for checkpoint_name, checkpoint_list in checkpoint_data.items():
+                # Export the first entry without any sort of name change.
+                tstart, tend = checkpoint_list[0]
+                export_checkpoint(checkpoint_name, tstart, tend)
+                for suffix, checkpoint_element in enumerate(
+                        checkpoint_list[1:], start=1):
+                    # Export subsequent entries with a suffix
+                    tstart, tend = checkpoint_element
+                    export_checkpoint('%s%d' % (checkpoint_name, suffix),
+                                      tstart, tend)
 
     def _load_and_trim_data(self, start_ts, end_ts):
         """Load data and trim data.
@@ -510,7 +747,7 @@
         """Start power measurement with Sweetberry via powerlog tool."""
         self._sweetberry_thread.start()
 
-    def _export_data_locally(self, client_test_dir):
+    def _export_data_locally(self, client_test_dir, checkpoint_data=None):
         """Output formatted text summaries locally."""
         #TODO(crbug.com/978665): implement this.
         pass
diff --git a/server/cros/power/servo_charger.py b/server/cros/power/servo_charger.py
index bbe1382..24f47e3 100644
--- a/server/cros/power/servo_charger.py
+++ b/server/cros/power/servo_charger.py
@@ -56,7 +56,7 @@
 
         Make sure that Servo is v4 and can manage charging. Make sure that DUT
         responds to Servo charging commands. Restore Servo v4 power role after
-        sanity check.
+        confidence check.
 
         @param host: CrosHost object representing the DUT or None.
                      If host is None, then the is_ac_connected check on the
@@ -70,7 +70,7 @@
             raise error.TestNAError('Servo setup does not support PD control. '
                                     'Check logs for details.')
 
-        self._original_role = self._servo.get('servo_v4_role')
+        self._original_role = self._servo.get('servo_pd_role')
         if self._original_role == 'snk':
             self.start_charging()
             self.stop_charging()
@@ -150,7 +150,7 @@
 
         @raises error.TestError: if the role did not change successfully.
         """
-        self._servo.set_nocheck('servo_v4_role', role)
+        self._servo.set_nocheck('servo_pd_role', role)
         # Sometimes the role reverts quickly. Add a short delay to let the new
         # role stabilize.
         time.sleep(_ROLE_SETTLING_DELAY_SEC)
@@ -162,7 +162,7 @@
                      delay_sec=_DELAY_SEC, backoff=_BACKOFF)
         def check_servo_role(role):
             """Check if servo role is as expected, if not, retry."""
-            if self._servo.get('servo_v4_role') != role:
+            if self._servo.get('servo_pd_role') != role:
                 raise error.TestError('Servo v4 failed to set its PD role to '
                                       '%s.' % role)
         check_servo_role(role)
@@ -176,7 +176,7 @@
             if not self._servo.has_control('charger_connected'):
                 # TODO(coconutruben): remove this check once labs have the
                 # latest hdctools with the required control.
-                logging.warn('Could not verify %r control as the '
+                logging.warning('Could not verify %r control as the '
                               'control is not available on servod.',
                               'charger_connected')
                 return
diff --git a/server/cros/provision.py b/server/cros/provision.py
index acd0860..d84282e 100644
--- a/server/cros/provision.py
+++ b/server/cros/provision.py
@@ -66,9 +66,9 @@
     to, and returns the corresponding version label prefix.
 
     Known version label prefixes are:
-      * `CROS_VERSION_PREFIX` for Chrome OS version strings.
+      * `CROS_VERSION_PREFIX` for ChromeOS version strings.
         These images have names like `cave-release/R57-9030.0.0`.
-      * `CROS_ANDROID_VERSION_PREFIX` for Chrome OS Android version strings.
+      * `CROS_ANDROID_VERSION_PREFIX` for ChromeOS Android version strings.
         These images have names like `git_nyc-arc/cheets_x86-user/3512523`.
 
     @param image: The image name to be parsed.
@@ -257,7 +257,7 @@
 
 class Verify(_SpecialTaskAction):
     """
-    Tests to verify that the DUT is in a sane, known good state that we can run
+    Tests to verify that the DUT is in a known good state that we can run
     tests on.  Failure to verify leads to running Repair.
     """
 
@@ -272,7 +272,7 @@
         # Another way to do this is to remove rpm dependency from tests' control
         # file. That will involve changes on multiple control files. This one
         # line change here is a simple temporary fix.
-        'rpm': actionables.TestActionable('dummy_PassServer'),
+        'rpm': actionables.TestActionable('stub_PassServer'),
     }
 
     name = 'verify'
@@ -328,7 +328,7 @@
 class Cleanup(_SpecialTaskAction):
     """
     Cleanup runs after a test fails to try and remove artifacts of tests and
-    ensure the DUT will be in a sane state for the next test run.
+    ensure the DUT will be in a good state for the next test run.
     """
 
     _actions = {
diff --git a/server/cros/provision_actionables.py b/server/cros/provision_actionables.py
index ac5c799..01a6b04 100644
--- a/server/cros/provision_actionables.py
+++ b/server/cros/provision_actionables.py
@@ -34,7 +34,7 @@
     def __init__(self, test, extra_kwargs={}):
         """Init method.
 
-        @param test: String, the test to run, e.g. dummy_PassServer
+        @param test: String, the test to run, e.g. stub_ServerToClientPass
         @param extra_kargs: A dictionary, extra keyval-based args
                             that will be passed when execute the test.
         """
diff --git a/server/cros/provision_unittest.py b/server/cros/provision_unittest.py
index d0ca926..48afc37 100755
--- a/server/cros/provision_unittest.py
+++ b/server/cros/provision_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.7
+#!/usr/bin/python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -46,12 +46,12 @@
             self.assertEqual(prefix, expected)
 
     def test_cros_prefix(self):
-        """Test handling of Chrome OS version strings."""
+        """Test handling of ChromeOS version strings."""
         self._do_test_prefixes(provision.CROS_VERSION_PREFIX,
                                _CROS_VERSION_SAMPLES)
 
     def test_cros_android_prefix(self):
-        """Test handling of Chrome OS version strings."""
+        """Test handling of ChromeOS version strings."""
         self._do_test_prefixes(provision.CROS_ANDROID_VERSION_PREFIX,
                                _CROS_ANDROID_VERSION_SAMPLES)
 
diff --git a/server/cros/provisioner.py b/server/cros/provisioner.py
index e40b247..b209c57 100644
--- a/server/cros/provisioner.py
+++ b/server/cros/provisioner.py
@@ -21,7 +21,7 @@
 from autotest_lib.server.cros.dynamic_suite import tools
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = utils.metrics_mock
 
@@ -44,7 +44,7 @@
 # parts of the system in Chromium OS test images will behave in ways
 # convenient to the test lab when this file is present.  Generally,
 # we create this immediately after any update completes.
-_LAB_MACHINE_FILE = '/mnt/stateful_partition/.labmachine'
+LAB_MACHINE_FILE = '/mnt/stateful_partition/.labmachine'
 
 # _TARGET_VERSION - A file containing the new version to which we plan
 # to update.  This file is used by the CrOS shutdown code to detect and
@@ -162,7 +162,7 @@
     @param update_url: url to the image to update to.
 
     """
-    # The Chrome OS version is generally the last element in the URL. The only
+    # The ChromeOS version is generally the last element in the URL. The only
     # exception is delta update URLs, which are rooted under the version; e.g.,
     # http://.../update/.../0.14.755.0/au/0.14.754.0. In this case we want to
     # strip off the au section of the path before reading the version.
@@ -218,7 +218,8 @@
                  host=None,
                  interactive=True,
                  is_release_bucket=None,
-                 is_servohost=False):
+                 is_servohost=False,
+                 public_bucket=False):
         """Initializes the object.
 
         @param update_url: The URL we want the update to use.
@@ -227,6 +228,9 @@
         @param is_release_bucket: If True, use release bucket
             gs://chromeos-releases.
         @param is_servohost: Bool whether the update target is a servohost.
+        @param public_bucket: True to copy payloads to a public throwaway GS
+            bucket. This avoids using a lab cache server, so local test runs
+            can provision without any special setup.
         """
         self.update_url = update_url
         self.host = host
@@ -234,6 +238,7 @@
         self.update_version = _url_to_version(update_url)
         self._is_release_bucket = is_release_bucket
         self._is_servohost = is_servohost
+        self._public_bucket = public_bucket
 
     def _run(self, cmd, *args, **kwargs):
         """Abbreviated form of self.host.run(...)"""
@@ -404,6 +409,20 @@
                         'gs_cache': False
                 })
 
+    def _quick_provision_with_public_bucket(self, provision_command,
+                                            image_name):
+        """Run quick_provision using public GS bucket.
+
+        @param provision_command: The path of quick_provision command.
+        @param image_name: The image to be installed.
+        """
+        logging.info('Try quick provision with public bucket.')
+
+        bucket_url = self.update_url[:self.update_url.find(image_name) - 1]
+        command = '%s --noreboot %s %s' % (provision_command, image_name,
+                                           bucket_url)
+        self._run(command)
+
     def _install_update(self):
         """Install an updating using the `quick-provision` script.
 
@@ -415,20 +434,27 @@
         logging.info('Installing image at %s onto %s', self.update_url,
                      self.host.hostname)
         server_name = six.moves.urllib.parse.urlparse(self.update_url)[1]
-        image_name = url_to_image_name(self.update_url)
+        if self._public_bucket:
+            image_name = self.update_url.partition('provision/')[2]
+        else:
+            image_name = url_to_image_name(self.update_url)
 
         logging.info('Installing image using quick-provision.')
         provision_command = self._get_remote_script(_QUICK_PROVISION_SCRIPT)
         try:
-            try:
-                self._quick_provision_with_gs_cache(provision_command,
-                                                    server_name, image_name)
-            except Exception as e:
-                logging.error(
-                        'Failed to quick-provision with gscache with '
-                        'error %s', e)
-                self._quick_provision_with_devserver(provision_command,
-                                                     server_name, image_name)
+            if self._public_bucket:
+                self._quick_provision_with_public_bucket(
+                        provision_command, image_name)
+            else:
+                try:
+                    self._quick_provision_with_gs_cache(
+                            provision_command, server_name, image_name)
+                except Exception as e:
+                    logging.error(
+                            'Failed to quick-provision with gscache with '
+                            'error %s', e)
+                    self._quick_provision_with_devserver(
+                            provision_command, server_name, image_name)
 
             self._set_target_version()
             return kernel_utils.verify_kernel_state_after_update(self.host)
@@ -468,10 +494,10 @@
         # Touch the lab machine file to leave a marker that
         # distinguishes this image from other test images.
         # Afterwards, we must re-run the autoreboot script because
-        # it depends on the _LAB_MACHINE_FILE.
+        # it depends on the LAB_MACHINE_FILE.
         autoreboot_cmd = ('FILE="%s" ; [ -f "$FILE" ] || '
                           '( touch "$FILE" ; start autoreboot )')
-        self._run(autoreboot_cmd % _LAB_MACHINE_FILE)
+        self._run(autoreboot_cmd % LAB_MACHINE_FILE)
         try:
             kernel_utils.verify_boot_expectations(
                     expected_kernel, NewBuildUpdateError.ROLLBACK_FAILURE,
@@ -506,9 +532,11 @@
             `image_name` is the name of the image installed, and
             `attributes` is new attributes to be applied to the DUT.
         """
-        server_name = dev_server.get_resolved_hostname(self.update_url)
-        metrics.Counter(_metric_name('install')).increment(
-                fields={'devserver': server_name})
+        server_name = ""
+        if not self._public_bucket:
+            server_name = dev_server.get_resolved_hostname(self.update_url)
+            metrics.Counter(_metric_name('install')).increment(
+                    fields={'devserver': server_name})
 
         try:
             self._prepare_host()
diff --git a/server/cros/provisioner_unittest.py b/server/cros/provisioner_unittest.py
index 585375b..d0cb414 100755
--- a/server/cros/provisioner_unittest.py
+++ b/server/cros/provisioner_unittest.py
@@ -1,11 +1,10 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import mock
-import mox
 import unittest
+from unittest import mock
 
 import common
 from autotest_lib.client.common_lib.cros import kernel_utils
@@ -60,26 +59,26 @@
         self.assertIn(_StubUpdateError._SUMMARY, stub.failure_summary)
 
     def test_host_update_error(self):
-        """Sanity test the `HostUpdateError` classifier."""
+        """Test the `HostUpdateError` classifier."""
         exception = provisioner.HostUpdateError('chromeos6-row3-rack3-host19',
                                                 'Fake message')
         self.assertTrue(isinstance(exception.failure_summary, str))
 
     def test_image_install_error(self):
-        """Sanity test the `ImageInstallError` classifier."""
+        """Test the `ImageInstallError` classifier."""
         exception = provisioner.ImageInstallError(
                 'chromeos6-row3-rack3-host19', 'chromeos4-devserver7.cros',
                 'Fake message')
         self.assertTrue(isinstance(exception.failure_summary, str))
 
     def test_new_build_update_error(self):
-        """Sanity test the `NewBuildUpdateError` classifier."""
+        """Test the `NewBuildUpdateError` classifier."""
         exception = provisioner.NewBuildUpdateError('R68-10621.0.0',
                                                     'Fake message')
         self.assertTrue(isinstance(exception.failure_summary, str))
 
 
-class TestProvisioner(mox.MoxTestBase):
+class TestProvisioner(unittest.TestCase):
     """Test provisioner module."""
 
     def testParseBuildFromUpdateUrlwithUpdate(self):
@@ -96,31 +95,34 @@
                       'R28-4444.0.0-b2996')
         script_name = 'fubar'
         local_script = '/usr/local/bin/%s' % script_name
-        host = self.mox.CreateMockAnything()
+
+        host = mock.MagicMock()
         cros_provisioner = provisioner.ChromiumOSProvisioner(update_url,
                                                              host=host)
-        host.path_exists(local_script).AndReturn(True)
+        host.path_exists.return_value = True
 
-        self.mox.ReplayAll()
         # Simple case:  file exists on DUT
         self.assertEqual(cros_provisioner._get_remote_script(script_name),
                          local_script)
-        self.mox.VerifyAll()
+        host.path_exists.assert_called_with(local_script)
 
-        self.mox.ResetAll()
         fake_shell = '/bin/ash'
         tmp_script = '/usr/local/tmp/%s' % script_name
-        fake_result = self.mox.CreateMockAnything()
-        fake_result.stdout = '#!%s\n' % fake_shell
-        host.path_exists(local_script).AndReturn(False)
-        host.run(mox.IgnoreArg())
-        host.run(mox.IgnoreArg()).AndReturn(fake_result)
+        fake_res = fake_result('#!%s\n' % fake_shell)
 
-        self.mox.ReplayAll()
+        host.path_exists.return_value = False
+        host.run.return_value = fake_res
+
         # Complicated case:  script not on DUT, so try to download it.
         self.assertEqual(cros_provisioner._get_remote_script(script_name),
                          '%s %s' % (fake_shell, tmp_script))
-        self.mox.VerifyAll()
+        host.path_exists.assert_called_with(local_script)
+
+
+class fake_result(object):
+    """A fake result with stdout attribute."""
+    def __init__(self, result):
+        self.stdout = result
 
 
 class TestProvisioner2(unittest.TestCase):
diff --git a/server/cros/packet_generation/__init__.py b/server/cros/pvs/__init__.py
similarity index 100%
rename from server/cros/packet_generation/__init__.py
rename to server/cros/pvs/__init__.py
diff --git a/server/cros/pvs/example_criteria.textproto b/server/cros/pvs/example_criteria.textproto
new file mode 100644
index 0000000..a941ca7
--- /dev/null
+++ b/server/cros/pvs/example_criteria.textproto
@@ -0,0 +1,19 @@
+criteria {
+    name_regex: "minutes_tested"
+    test_name: "power_LoadTest"
+    lower_bound: {
+        bound: 0
+    }
+    upper_bound: {
+        bound: 100
+    }
+}
+criteria {
+    name_regex: "wh_energy_.*"
+    lower_bound: {
+        bound: -10
+    }
+    upper_bound: {
+        bound: 80
+    }
+}
\ No newline at end of file
diff --git a/server/cros/pvs/pass_criteria.proto b/server/cros/pvs/pass_criteria.proto
new file mode 100644
index 0000000..7e597a2
--- /dev/null
+++ b/server/cros/pvs/pass_criteria.proto
@@ -0,0 +1,24 @@
+syntax = "proto3";
+
+// Bound represents a limit for the performance value
+message Bound {
+    double bound = 1;
+}
+
+// Criteria captures a single pass criteria
+message Criteria {
+    // name is presented as a regular expression
+    string name_regex = 1;
+
+    // only apply criteria to this test, if populated
+    string test_name = 2;
+
+    // lower and upper Bound messages as describe above
+    Bound lower_bound = 3;
+    Bound upper_bound = 4;
+}
+
+// PassCriteria contains any number of criteria as above
+message PassCriteria {
+    repeated Criteria criteria = 1;
+}
\ No newline at end of file
diff --git a/server/cros/pvs/pass_criteria_pb2.py b/server/cros/pvs/pass_criteria_pb2.py
new file mode 100644
index 0000000..83b4a1d
--- /dev/null
+++ b/server/cros/pvs/pass_criteria_pb2.py
@@ -0,0 +1,216 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: pass_criteria.proto
+
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+        name='pass_criteria.proto',
+        package='',
+        syntax='proto3',
+        serialized_options=None,
+        serialized_pb=
+        b'\n\x13pass_criteria.proto\"\x16\n\x05\x42ound\x12\r\n\x05\x62ound\x18\x01 \x01(\x01\"k\n\x08\x43riteria\x12\x12\n\nname_regex\x18\x01 \x01(\t\x12\x11\n\ttest_name\x18\x02 \x01(\t\x12\x1b\n\x0blower_bound\x18\x03 \x01(\x0b\x32\x06.Bound\x12\x1b\n\x0bupper_bound\x18\x04 \x01(\x0b\x32\x06.Bound\"+\n\x0cPassCriteria\x12\x1b\n\x08\x63riteria\x18\x01 \x03(\x0b\x32\t.Criteriab\x06proto3'
+)
+
+_BOUND = _descriptor.Descriptor(
+        name='Bound',
+        full_name='Bound',
+        filename=None,
+        file=DESCRIPTOR,
+        containing_type=None,
+        fields=[
+                _descriptor.FieldDescriptor(name='bound',
+                                            full_name='Bound.bound',
+                                            index=0,
+                                            number=1,
+                                            type=1,
+                                            cpp_type=5,
+                                            label=1,
+                                            has_default_value=False,
+                                            default_value=float(0),
+                                            message_type=None,
+                                            enum_type=None,
+                                            containing_type=None,
+                                            is_extension=False,
+                                            extension_scope=None,
+                                            serialized_options=None,
+                                            file=DESCRIPTOR),
+        ],
+        extensions=[],
+        nested_types=[],
+        enum_types=[],
+        serialized_options=None,
+        is_extendable=False,
+        syntax='proto3',
+        extension_ranges=[],
+        oneofs=[],
+        serialized_start=23,
+        serialized_end=45,
+)
+
+_CRITERIA = _descriptor.Descriptor(
+        name='Criteria',
+        full_name='Criteria',
+        filename=None,
+        file=DESCRIPTOR,
+        containing_type=None,
+        fields=[
+                _descriptor.FieldDescriptor(name='name_regex',
+                                            full_name='Criteria.name_regex',
+                                            index=0,
+                                            number=1,
+                                            type=9,
+                                            cpp_type=9,
+                                            label=1,
+                                            has_default_value=False,
+                                            default_value=b"".decode('utf-8'),
+                                            message_type=None,
+                                            enum_type=None,
+                                            containing_type=None,
+                                            is_extension=False,
+                                            extension_scope=None,
+                                            serialized_options=None,
+                                            file=DESCRIPTOR),
+                _descriptor.FieldDescriptor(name='test_name',
+                                            full_name='Criteria.test_name',
+                                            index=1,
+                                            number=2,
+                                            type=9,
+                                            cpp_type=9,
+                                            label=1,
+                                            has_default_value=False,
+                                            default_value=b"".decode('utf-8'),
+                                            message_type=None,
+                                            enum_type=None,
+                                            containing_type=None,
+                                            is_extension=False,
+                                            extension_scope=None,
+                                            serialized_options=None,
+                                            file=DESCRIPTOR),
+                _descriptor.FieldDescriptor(name='lower_bound',
+                                            full_name='Criteria.lower_bound',
+                                            index=2,
+                                            number=3,
+                                            type=11,
+                                            cpp_type=10,
+                                            label=1,
+                                            has_default_value=False,
+                                            default_value=None,
+                                            message_type=None,
+                                            enum_type=None,
+                                            containing_type=None,
+                                            is_extension=False,
+                                            extension_scope=None,
+                                            serialized_options=None,
+                                            file=DESCRIPTOR),
+                _descriptor.FieldDescriptor(name='upper_bound',
+                                            full_name='Criteria.upper_bound',
+                                            index=3,
+                                            number=4,
+                                            type=11,
+                                            cpp_type=10,
+                                            label=1,
+                                            has_default_value=False,
+                                            default_value=None,
+                                            message_type=None,
+                                            enum_type=None,
+                                            containing_type=None,
+                                            is_extension=False,
+                                            extension_scope=None,
+                                            serialized_options=None,
+                                            file=DESCRIPTOR),
+        ],
+        extensions=[],
+        nested_types=[],
+        enum_types=[],
+        serialized_options=None,
+        is_extendable=False,
+        syntax='proto3',
+        extension_ranges=[],
+        oneofs=[],
+        serialized_start=47,
+        serialized_end=154,
+)
+
+_PASSCRITERIA = _descriptor.Descriptor(
+        name='PassCriteria',
+        full_name='PassCriteria',
+        filename=None,
+        file=DESCRIPTOR,
+        containing_type=None,
+        fields=[
+                _descriptor.FieldDescriptor(name='criteria',
+                                            full_name='PassCriteria.criteria',
+                                            index=0,
+                                            number=1,
+                                            type=11,
+                                            cpp_type=10,
+                                            label=3,
+                                            has_default_value=False,
+                                            default_value=[],
+                                            message_type=None,
+                                            enum_type=None,
+                                            containing_type=None,
+                                            is_extension=False,
+                                            extension_scope=None,
+                                            serialized_options=None,
+                                            file=DESCRIPTOR),
+        ],
+        extensions=[],
+        nested_types=[],
+        enum_types=[],
+        serialized_options=None,
+        is_extendable=False,
+        syntax='proto3',
+        extension_ranges=[],
+        oneofs=[],
+        serialized_start=156,
+        serialized_end=199,
+)
+
+_CRITERIA.fields_by_name['lower_bound'].message_type = _BOUND
+_CRITERIA.fields_by_name['upper_bound'].message_type = _BOUND
+_PASSCRITERIA.fields_by_name['criteria'].message_type = _CRITERIA
+DESCRIPTOR.message_types_by_name['Bound'] = _BOUND
+DESCRIPTOR.message_types_by_name['Criteria'] = _CRITERIA
+DESCRIPTOR.message_types_by_name['PassCriteria'] = _PASSCRITERIA
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+Bound = _reflection.GeneratedProtocolMessageType(
+        'Bound',
+        (_message.Message, ),
+        {
+                'DESCRIPTOR': _BOUND,
+                '__module__': 'pass_criteria_pb2'
+                # @@protoc_insertion_point(class_scope:Bound)
+        })
+_sym_db.RegisterMessage(Bound)
+
+Criteria = _reflection.GeneratedProtocolMessageType(
+        'Criteria',
+        (_message.Message, ),
+        {
+                'DESCRIPTOR': _CRITERIA,
+                '__module__': 'pass_criteria_pb2'
+                # @@protoc_insertion_point(class_scope:Criteria)
+        })
+_sym_db.RegisterMessage(Criteria)
+
+PassCriteria = _reflection.GeneratedProtocolMessageType(
+        'PassCriteria',
+        (_message.Message, ),
+        {
+                'DESCRIPTOR': _PASSCRITERIA,
+                '__module__': 'pass_criteria_pb2'
+                # @@protoc_insertion_point(class_scope:PassCriteria)
+        })
+_sym_db.RegisterMessage(PassCriteria)
+
+# @@protoc_insertion_point(module_scope)
diff --git a/server/cros/pvs/sequence.py b/server/cros/pvs/sequence.py
new file mode 100644
index 0000000..2f0111a
--- /dev/null
+++ b/server/cros/pvs/sequence.py
@@ -0,0 +1,123 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import shutil
+
+from autotest_lib.server import autotest, test
+from autotest_lib.client.common_lib import error
+
+
+class test_sequence(test.test):
+    """
+    test_sequence extends the base test implementation to allow for
+    encapsulating a series of (client or server) tests which must
+    be run in a given sequence.
+    """
+
+    def initialize(self, sequence):
+        """
+        initialize implements the initialize call in test.test, is called before
+        execution of the test
+
+        @param sequence: the sequence of tests constructed in the wrapper
+        @param sequence_verdicts: verdicts from each executed test in the
+                sequence. Passed by reference and used by the caller to
+                annotate results.
+        """
+        self._sequenced_tests = sequence
+        self._sequence_verdicts = {}
+        self._results_path = self.job._server_offload_dir_path()
+        self._wrapper_results_dir = os.path.join(self._results_path,
+                                                 self.tagged_testname)
+
+    def process_test_results_post_hook(self):
+        """
+        process_test_results is used as a post_run_hook to record results to the
+        status.log file following the execution of the run. For tests that were
+        completed (i.e. no exceptions occurred to end the sequence), results are
+        moved to the top level from the child results directory
+        """
+        for test, args, server_test in self._sequenced_tests:
+            if test not in self._sequence_verdicts:
+                continue
+
+            if server_test:
+                self.surface_server_test_resultsdir(test)
+            else:
+                self.surface_client_test_resultsdir(test)
+            annotated_testname = self.tagged_testname + "." + test
+            self.job.record('START', None, annotated_testname)
+            self.job.record('INFO', None, annotated_testname)
+            if self._sequence_verdicts[test]:
+                self.job.record('END GOOD', None, annotated_testname, "")
+            else:
+                self.job.record('END FAIL', None, annotated_testname, "")
+
+    def execute_sequenced_test(self, client, test, argv, server_test):
+        """
+        execute_sequenced_test runs a single test from the sequence with the
+        given argument vector
+
+        @param test: test name (url) to run
+        @param argv: argument dictionary to run the test with
+
+        @raises error.TestFail: on failure of the wrapped tests
+        """
+        try:
+            self._sequence_verdicts[test] = True
+            if server_test:
+                err = self.job.run_test(test, **argv)
+                if err == False:
+                    raise error.TestFail()
+            else:
+                client.run_test(test, check_client_result=True, **argv)
+        except:
+            self._sequence_verdicts[test] = False
+            self.postprocess()
+            raise error.TestFail('Sequenced test %s failed, reason: ' % test)
+
+    def surface_client_test_resultsdir(self, test):
+        """
+        surface_client_test_resultsdir retrieves the child test results from a
+        sequenced client job
+
+        @param test: the child test name to grab results from
+        """
+        wrapped_test_results_path = os.path.join(self._wrapper_results_dir,
+                                                 test)
+        tagged_destination = os.path.join(self._results_path,
+                                          self.tagged_testname + "." + test)
+        shutil.move(wrapped_test_results_path, tagged_destination)
+
+    def surface_server_test_resultsdir(self, test):
+        """
+        surface_server_test_resultsdir renames the server test results from a sequenced child
+
+        @param test: the child test name to grab results from
+        """
+        wrapped_test_results_path = os.path.join(self._results_path, test)
+        tagged_destination = os.path.join(self._results_path,
+                                          self.tagged_testname + "." + test)
+        shutil.move(wrapped_test_results_path, tagged_destination)
+
+    def run_once(self, host=None):
+        """
+        run_once implements the run_once call in test.test, is called to begin
+        execution of the test
+
+        @param host: host from control file with which to run the test
+        """
+        client_at = autotest.Autotest(host)
+        for test, argv, server_test in self._sequenced_tests:
+            self.execute_sequenced_test(client_at, test, argv, server_test)
+
+    def postprocess(self):
+        """
+        postprocess is the post routine for test.test. We must add our post_hook
+        in this function (as opposed to the initialize call) because if added in
+        initialize, this will be called after each child server test as well as
+        at the end of the function
+        """
+        self.job.add_post_run_hook(self.process_test_results_post_hook)
diff --git a/server/cros/pvs/test_with_pass_criteria.py b/server/cros/pvs/test_with_pass_criteria.py
new file mode 100644
index 0000000..8958dd8
--- /dev/null
+++ b/server/cros/pvs/test_with_pass_criteria.py
@@ -0,0 +1,253 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+import logging
+import json
+
+import common
+from autotest_lib.server import autotest, test
+from autotest_lib.client.common_lib import error
+
+from google.protobuf.text_format import Parse
+
+# run protoc --proto_path=./ pass_criteria.proto --python_out ./
+# with caution for version/upgrade compatibility
+from . import pass_criteria_pb2
+
+
+class test_with_pass_criteria(test.test):
+    """
+    test_with_pass_criteria extends the base test implementation to allow for
+    test result comparison between the performance keyvalues output from a
+    target test, and the input pass_criteria dictionary.
+
+    It can be used to create a domain specific test wrapper such as
+    power_QualTestWrapper.
+    """
+
+    def initialize(self, test_to_wrap):
+        """
+        initialize implements the initialize call in test.test, is called before
+        execution of the test
+        """
+        self._test_prefix = []
+        self._perf_dict = {}
+        self._attr_dict = {}
+        self._results_path = self.job._server_offload_dir_path()
+        self._wrapper_results = self._results_path + self.tagged_testname + '/'
+        logging.debug('...results going to %s', str(self._results_path))
+        self._wrapped_test_results_keyval_path = (self._wrapper_results +
+                                                  test_to_wrap +
+                                                  '/results/keyval')
+        self._wrapped_test_keyval_path = self._wrapper_results + test_to_wrap + '/keyval'
+        self._wrapper_test_keyval_path = self._wrapper_results + 'keyval'
+
+    def _check_wrapped_test_passed(self, test_name):
+        results_path = self._wrapper_results + test_name + ""
+
+    def _load_proto_to_pass_criteria(self):
+        """
+        _load_proto_to_pass_criteria optionally inputs a textproto file
+        or a ':' separated string which represents the pass criteria for
+        the test, and adds it to the pass criteria dictionary.
+        """
+        for textproto in self._textproto_path.split(':'):
+            if not os.path.exists(textproto):
+                raise error.TestFail('provided textproto path ' + textproto +
+                                     ' does not exist')
+
+            logging.info('loading criteria from textproto %s', textproto)
+            with open(textproto) as textpb:
+                textproto_criteria = Parse(textpb.read(),
+                                           pass_criteria_pb2.PassCriteria())
+            for criteria in textproto_criteria.criteria:
+                lower_bound = criteria.lower_bound.bound if (
+                        criteria.HasField('lower_bound')) else None
+                upper_bound = criteria.upper_bound.bound if (
+                        criteria.HasField('upper_bound')) else None
+                if criteria.test_name != self._test_to_wrap and criteria.test_name != '':
+                    logging.info('criteria %s does not apply',
+                                 criteria.name_regex)
+                    continue
+                try:
+                    self._pass_criteria[criteria.name_regex] = (lower_bound,
+                                                                upper_bound)
+                    logging.info('adding criteria %s', criteria.name_regex)
+                except:
+                    raise error.TestFail('invalid pass criteria provided')
+
+    def add_prefix_test(self, test='', prefix_args_dict=None):
+        """
+        add_prefix_test takes a test_name and args_dict for that test.
+        This function allows a user creating a domain specific test wrapper
+        to add any prefix tests that must run prior to execution of the
+        target test.
+
+        @param test: the name of the test to add as a prefix test operation
+        @param prefix_args_dict: the dictionary of args to pass to the test
+        when it is run
+        """
+        if prefix_args_dict is None:
+            prefix_args_dict = {}
+        self._test_prefix.append((test, prefix_args_dict))
+
+    def _print_bounds_error(self, criteria, failed_criteria, value):
+        """
+        _print_bounds_error will indicate missing pass criteria, printing the
+        error string with failing criteria and target range
+
+        @param criteria: the name of the pass criteria to log a failure on
+        @param failed_criteria: the name of the criteria that regex matched
+        @param value: the actual value of the failing pass criteria
+        """
+        logging.info('criteria %s: %s out of range %s', failed_criteria,
+                     str(value), str(self._pass_criteria[criteria]))
+
+    def _parse_wrapped_results_keyvals(self):
+        """
+        _parse_wrapped_results_keyvals first loads all of the performance and
+        and attribute keyvals from the wrapped test, and then copies all of
+        the test_attribute keyvals from that wrapped test into the wrapper.
+        Without these keyvals being copied over, none of the metadata from
+        the client job are captured in the job summary.
+
+        @raises: error.TestFail: If any of the respective keyvals are missing
+        """
+        if os.path.exists(self._wrapped_test_results_keyval_path):
+            with open(self._wrapped_test_results_keyval_path
+                      ) as results_keyval_file:
+                keyval_result = results_keyval_file.readline()
+                while keyval_result:
+                    regmatch = re.search(r'(.*){(.*)}=(.*)', keyval_result)
+                    if regmatch is None:
+                        break
+                    key = regmatch.group(1)
+                    which_dict = regmatch.group(2)
+                    value = regmatch.group(3)
+                    if which_dict != 'perf':
+                        continue
+
+                    self._perf_dict[key] = value
+                    keyval_result = results_keyval_file.readline()
+
+        with open(self._wrapped_test_keyval_path,
+                  'r') as wrapped_test_keyval_file, open(
+                          self._wrapper_test_keyval_path,
+                          'a') as test_keyval_file:
+            for keyval in wrapped_test_keyval_file:
+                test_keyval_file.write(keyval)
+
+    def _find_matching_keyvals(self):
+        for c in self._pass_criteria:
+            self._criteria_to_keyvals[c] = []
+            for key in self._perf_dict.keys():
+                if re.fullmatch(c, key):
+                    logging.info('adding %s as matched key', key)
+                    self._criteria_to_keyvals[c].append(key)
+
+    def _verify_criteria(self):
+        failing_criteria = 0
+        for criteria in self._pass_criteria:
+            logging.info('Checking %s now', criteria)
+            if type(criteria) is not str:
+                criteria = criteria.decode('utf-8')
+            range_spec = self._pass_criteria[criteria]
+
+            for perf_val in self._criteria_to_keyvals[criteria]:
+                logging.info('Checking: %s against %s', str(criteria),
+                             perf_val)
+                actual_value = self._perf_dict[perf_val]
+                logging.info('%s value is %s, spec is %s', perf_val,
+                             float(actual_value), range_spec)
+
+                # range_spec is passed into the dictionary as a tuple of upper and lower
+                lower_bound, upper_bound = range_spec
+
+                if lower_bound is not None and not (float(actual_value) >=
+                                                    float(lower_bound)):
+                    failing_criteria = failing_criteria + 1
+                    self._print_bounds_error(criteria, perf_val, actual_value)
+
+                if upper_bound is not None and not (float(actual_value) <
+                                                    float(upper_bound)):
+                    failing_criteria = failing_criteria + 1
+                    self._print_bounds_error(criteria, perf_val, actual_value)
+
+        if failing_criteria > 0:
+            raise error.TestFail(
+                    str(failing_criteria) +
+                    ' criteria failed, see log for detail')
+
+    def run_once(self,
+                 host=None,
+                 test_to_wrap=None,
+                 pdash_note='',
+                 wrap_args={},
+                 pass_criteria={}):
+        """
+        run_once implements the run_once call in test.test, is called to begin
+        execution of the test
+
+        @param host: host from control file with which to run the test
+        @param test_to_wrap: test name to execute in the wrapper
+        @param pdash_note: note to annotate results on the dashboard
+        @param wrap_args: args to pass to the wrapped test execution
+        @param pass_criteria: dictionary of criteria to compare results against
+
+        @raises error.TestFail: on failure of the wrapped tests
+        """
+        logging.debug('running test_with_pass_criteria run_once')
+        logging.debug('with test name %s', str(self.tagged_testname))
+        self._wrap_args = wrap_args
+        self._test_to_wrap = test_to_wrap
+        if self._test_to_wrap == None:
+            raise error.TestFail('No test_to_wrap given')
+
+        if isinstance(pass_criteria, dict):
+            self._pass_criteria = pass_criteria
+        else:
+            logging.info('loading from string dict %s', pass_criteria)
+            self._pass_criteria = json.loads(pass_criteria)
+
+        self._textproto_path = self._pass_criteria.get('textproto_path', None)
+        if self._textproto_path is None:
+            logging.info('not using textproto criteria definitions')
+        else:
+            self._pass_criteria.pop('textproto_path')
+            self._load_proto_to_pass_criteria()
+
+        logging.debug('wrapping test %s', self._test_to_wrap)
+        logging.debug('with wrap args %s', str(self._wrap_args))
+        logging.debug('and pass criteria %s', str(self._pass_criteria))
+        client_at = autotest.Autotest(host)
+
+        for test, argv in self._test_prefix:
+            argv['pdash_note'] = pdash_note
+            try:
+                client_at.run_test(test, check_client_result=True, **argv)
+            except:
+                raise error.TestFail('Prefix test failed, see log for details')
+
+        try:
+            client_at.run_test(self._test_to_wrap,
+                               check_client_result=True,
+                               **self._wrap_args)
+        except:
+            self.postprocess()
+            raise error.TestFail('Wrapped test failed, see log for details')
+
+    def postprocess(self):
+        """
+        postprocess is called after the completion of run_once by the test framework
+
+        @raises error.TestFail: on any pass criteria failure
+        """
+        self._parse_wrapped_results_keyvals()
+        if self._pass_criteria == {}:
+            return
+        self._criteria_to_keyvals = {}
+        self._find_matching_keyvals()
+        self._verify_criteria()
diff --git a/server/cros/pvs/wrapper_job_with_name.py b/server/cros/pvs/wrapper_job_with_name.py
new file mode 100644
index 0000000..1acf9fd
--- /dev/null
+++ b/server/cros/pvs/wrapper_job_with_name.py
@@ -0,0 +1,77 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+PASSCRITERIA_PREFIX = 'passcriteria_'
+
+
+class wrapper_job_with_name():
+    """
+    wrapper_job_with_name wraps the server_job which is passed from the control
+    file. This is used to alter the name of the job, by using the job.record
+    function to represent the results of the internal "wrapped" job. Without
+    the wrapper job, all tests will have the generic wrapper name, regardless
+    of the test that is being wrapped.
+    """
+
+    def __init__(self,
+                 job,
+                 job_name,
+                 wrapper_url,
+                 args_dict,
+                 default_pass_criteria={}):
+        """
+        wrapper_job_with_name wraps the server_job which is passed from the control
+        file. This takes in the necessary parameters to execute that wrapper job.
+
+        @param job: server_job object in the control file
+        @param job_name: the name with which to overwrite the generic name
+        @param wrapper_url: the name of the generic wrapper to call
+        @param args_dict: passed in args_dict from the control file
+        @param default_pass_criteria: the pass criteria to use if none are given
+        """
+        self._job = job
+        self._name = job_name
+        self._wrapper_url = wrapper_url
+        self._pass_criteria = default_pass_criteria
+        self._args_dict = args_dict
+        self._parse_arg_dict_to_criteria()
+        self._args_dict["pass_criteria"] = self._pass_criteria
+
+    def run(self, host, test_to_wrap, wrap_args):
+        """
+        run executes the generic wrapper with the test_to_wrap and wrap_args
+        necessary for that test wrapper, as well as recording the outer job
+        state (which will overwrite the name of the test in results)
+
+        @param host: host from the control file
+        @param test_to_wrap: test name to pass into the generic wrapper
+        @param wrap_args: test args to pass into the generic wrapper
+        """
+        self._job.record('START', None, self._name)
+        if self._job.run_test(
+                self._wrapper_url,
+                host=host,
+                test_to_wrap=test_to_wrap,
+                wrap_args=wrap_args,
+                disable_sysinfo=True,
+                results_path=self._job._server_offload_dir_path(),
+                **self._args_dict):
+            self._job.record('INFO', None, self._name)
+            self._job.record('END GOOD', None, self._name, "")
+        else:
+            self._job.record('INFO', None, self._name)
+            self._job.record('END FAIL', None, self._name, "")
+
+    def _parse_arg_dict_to_criteria(self):
+        """
+        _parse_arg_dict_to_criteria takes in the generic arg dict and looks
+        for items with the prefix passcriteria_*. These are pass criteria values, and
+        will be appended to the dictionary passed into the wrapper run.
+
+        @param: arg_dict, the argv from autoserv parsed into a dict
+        """
+        for key in self._args_dict.keys():
+            if key.startswith(PASSCRITERIA_PREFIX):
+                self._pass_criteria[
+                        key[len(PASSCRITERIA_PREFIX):]] = self._args_dict[key]
diff --git a/server/cros/queue_barrier.py b/server/cros/queue_barrier.py
deleted file mode 100644
index e292ec2..0000000
--- a/server/cros/queue_barrier.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# Lint as: python2, python3
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-from multiprocessing import Queue, queues
-from six.moves import range
-
-
-class QueueBarrierTimeout(Exception):
-    """QueueBarrier timeout exception."""
-
-
-class QueueBarrier(object):
-    """This class implements a simple barrier to synchronize processes. The
-    barrier relies on the fact that there a single process "main" and |n|
-    different "nodes" to make the implementation simpler. Also, given this
-    hierarchy, the nodes and the main can exchange a token while passing
-    through the barrier.
-
-    The so called "main" shall call main_barrier() while the "node" shall
-    call the node_barrier() method.
-
-    If the same group of |n| nodes and the same main are participating in the
-    barrier, it is totally safe to reuse the barrier several times with the same
-    group of processes.
-    """
-
-
-    def __init__(self, n):
-        """Initializes the barrier with |n| node processes and a main.
-
-        @param n: The number of node processes."""
-        self.n_ = n
-        self.queue_main_ = Queue()
-        self.queue_node_ = Queue()
-
-
-    def main_barrier(self, token=None, timeout=None):
-        """Makes the main wait until all the "n" nodes have reached this
-        point.
-
-        @param token: A value passed to every node.
-        @param timeout: The timeout, in seconds, to wait for the nodes.
-                A None value will block forever.
-
-        Returns the list of received tokens from the nodes.
-        """
-        # Wait for all the nodes.
-        result = []
-        try:
-            for _ in range(self.n_):
-                result.append(self.queue_main_.get(timeout=timeout))
-        except queues.Empty:
-            # Timeout expired
-            raise QueueBarrierTimeout()
-        # Release all the blocked nodes.
-        for _ in range(self.n_):
-            self.queue_node_.put(token)
-        return result
-
-
-    def node_barrier(self, token=None, timeout=None):
-        """Makes a node wait until all the "n" nodes and the main have
-        reached this point.
-
-        @param token: A value passed to the main.
-        @param timeout: The timeout, in seconds, to wait for the nodes.
-                A None value will block forever.
-        """
-        self.queue_main_.put(token)
-        try:
-            return self.queue_node_.get(timeout=timeout)
-        except queues.Empty:
-            # Timeout expired
-            raise QueueBarrierTimeout()
diff --git a/server/cros/packet_generation/__init__.py b/server/cros/repair/__init__.py
similarity index 100%
copy from server/cros/packet_generation/__init__.py
copy to server/cros/repair/__init__.py
diff --git a/server/cros/repair/common.py b/server/cros/repair/common.py
new file mode 100644
index 0000000..2967b41
--- /dev/null
+++ b/server/cros/repair/common.py
@@ -0,0 +1,14 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Build relative paths for files with root of autotest_lib.
+
+import os, sys
+dirname = os.path.dirname(sys.modules[__name__].__file__)
+autotest_dir = os.path.abspath(os.path.join(dirname, '../../../..'))
+client_dir = os.path.join(autotest_dir, 'client')
+sys.path.insert(0, client_dir)
+import setup_modules
+sys.path.pop(0)
+setup_modules.setup(base_path=autotest_dir, root_module_name='autotest_lib')
diff --git a/server/cros/repair/mac_address_helper.py b/server/cros/repair/mac_address_helper.py
new file mode 100644
index 0000000..2d63d8c
--- /dev/null
+++ b/server/cros/repair/mac_address_helper.py
@@ -0,0 +1,177 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+import re
+
+import common
+from autotest_lib.client.common_lib import error
+
+
+class MacAddressHelper():
+    """Verify and update cached NIC mac address on servo.
+
+    Servo_v4 plugged to the DUT and providing NIC for that. We caching mac
+    address on servod side to better debugging.
+    """
+
+    # HUB and NIC VID/PID.
+    # Values presented as the string of the hex without 0x to match
+    # representation in sysfs (idVendor/idProduct).
+    HUB_VID = '04b4'
+    HUB_PID = '6502'
+    NIC_VID = '0bda'
+    NIC_PID = '8153'
+
+    # Regex to check mac address format.
+    # eg: f4:f5:e8:50:e9:45
+    RE_MACADDR = re.compile('^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$')
+
+    def is_supported(self, host):
+        """Verify if setup is support cached NIC mac address on servo
+
+        @param host:    CrosHost instance
+        """
+        if not host._servo_host.is_labstation():
+            logging.info('Only servo_v4 has NIC; Skipping the action')
+            return False
+        if not host.servo.has_control('macaddr'):
+            logging.info('"macaddr" control not supported;'
+                         'Skipping the action')
+            return False
+        return True
+
+    def update_if_needed(self, host):
+        """Update the cached NIC mac address on servo
+
+        The process will verify if NIC mac changes and update only if
+        it required.
+
+        @param host:    CrosHost instance
+        """
+
+        if not self.is_supported(host):
+            return
+
+        servo = host.servo
+        # Path to the NIC has to be located in the HUB.
+        # eg.
+        # HUB: /sys/bus/usb/devices/1-1
+        # NIC: /sys/bus/usb/devices/1-1.1
+        hub_path = self._get_device_path(host, None, self.HUB_VID,
+                                         self.HUB_PID)
+        if not hub_path or hub_path == '.':
+            raise Exception('The servo_v4 HUB not detected from DUT.')
+        logging.debug('Path to the servo_v4 HUB device: %s', hub_path)
+        nic_path = self._get_device_path(host, hub_path, self.NIC_VID,
+                                         self.NIC_PID)
+        if not nic_path or nic_path == '.':
+            raise Exception('The servo_v4 NIC not detected in HUB folder.')
+        logging.debug('Path to the servo_v4 NIC device: %s', nic_path)
+        if hub_path == nic_path or not nic_path.startswith(hub_path):
+            raise Exception('The servo_v4 NIC was detect out of servo_v4 HUB')
+
+        macaddr = self._get_mac_address(host, nic_path)
+        if not macaddr:
+            raise Exception('Failed to extract mac address from host.')
+
+        cached_mac = self._get_cached_mac_address(host)
+        if not cached_mac or macaddr != cached_mac:
+            try:
+                servo.set('macaddr', macaddr)
+                logging.info('Successfully updated the servo "macaddr"!')
+            except error.TestFail as e:
+                logging.debug('Fail to update macaddr value; %s', e)
+                raise Exception('Fail to update the "macaddr" value!')
+        else:
+            logging.info('The servo "macaddr" doe not need update.')
+
+    def _get_cached_mac_address(self, host):
+        """Get NIC mac address from servo cache"""
+        try:
+            return host.servo.get('macaddr')
+        except error.TestFail as e:
+            logging.debug('(Non-critical) Fail to get macaddr: %s', e)
+            return None
+
+    def _get_mac_address(self, host, nic_path):
+        """Get NIC mac address from host
+
+        @param host:        CrosHost instance
+        @param nic_path:    Path to network device on the host
+        """
+        cmd = r'find %s/ | grep /net/ | grep /address' % nic_path
+        res = host.run(cmd,
+                       timeout=30,
+                       ignore_status=True,
+                       ignore_timeout=True)
+        if not res:
+            logging.info('Timeout during retriving NIC address files.')
+            return None
+        addrs = res.stdout.splitlines()
+        if not addrs or len(addrs) == 0:
+            logging.info('No NIC address file found.')
+            return None
+        if len(addrs) > 1:
+            logging.info('More than one NIC address file found.')
+            return None
+        logging.info('Found NIC address file: %s', addrs[0])
+        cmd = r'cat %s' % addrs[0]
+        res = host.run(cmd,
+                       timeout=30,
+                       ignore_status=True,
+                       ignore_timeout=True)
+        if not res:
+            logging.info('Timeout during attemp read NIC address file: %s',
+                         addrs[0])
+            return None
+        mac_addr = res.stdout.strip()
+        if not self.RE_MACADDR.match(mac_addr):
+            logging.info('incorrect format of the mac address: %s', mac_addr)
+            return None
+        logging.info('Servo_v4 NIC mac address from DUT side: %s', mac_addr)
+        return mac_addr
+
+    def _get_device_path(self, host, base_path, vid, pid):
+        """Find a device by VID/PID under particular path.
+
+        1) Get path to the unique idVendor file with VID
+        2) Get path to the unique idProduct file with PID
+        3) Get directions of both file and compare them
+
+        @param host:        CrosHost instance
+        @param base_path:   Path to the directory where to look for the device.
+        @param vid:         Vendor ID of the looking device.
+        @param pid:         Product ID of the looking device.
+
+        @returns: path to the folder of the device
+        """
+
+        def _run(cmd):
+            res = host.run(cmd,
+                           timeout=30,
+                           ignore_status=True,
+                           ignore_timeout=True)
+            l = res.stdout.splitlines()
+            if not l or len(l) != 1:
+                return None
+            return l[0]
+
+        if not base_path:
+            base_path = '/sys/bus/usb/devices/*/'
+        else:
+            base_path += '*/'
+        cmd_template = 'grep -l %s $(find %s -maxdepth 1 -name %s)'
+        vid_path = _run(cmd_template % (vid, base_path, 'idVendor'))
+        if not vid_path:
+            return None
+
+        pid_path = _run(cmd_template % (pid, base_path, 'idProduct'))
+        if not pid_path:
+            return None
+
+        # check if both files locates in the same folder
+        return _run('LC_ALL=C comm -12 <(dirname %s) <(dirname %s)' %
+                    (vid_path, pid_path))
diff --git a/server/cros/resource_monitor_unittest.py b/server/cros/resource_monitor_unittest.py
index df03968..4196885 100644
--- a/server/cros/resource_monitor_unittest.py
+++ b/server/cros/resource_monitor_unittest.py
@@ -264,7 +264,7 @@
         """
         parsed_results = resource_monitor.ResourceMonitorParsedResult(
                 testdata_file)
-        with open(testans_file, "rb") as testans:
+        with open(testans_file, "r") as testans:
             csvreader = csv.reader(testans)
             columns = next(csvreader)
             self.assertEqual(list(columns),
diff --git a/server/cros/servo/chrome_base_ec.py b/server/cros/servo/chrome_base_ec.py
index 9f6351a..0f4e939 100644
--- a/server/cros/servo/chrome_base_ec.py
+++ b/server/cros/servo/chrome_base_ec.py
@@ -115,5 +115,5 @@
     if base_board:
         return ChromeBaseEC(servo, base_board)
     else:
-        logging.warn('No Base EC found on the servo board')
+        logging.warning('No Base EC found on the servo board')
         return None
diff --git a/server/cros/servo/chrome_cr50.py b/server/cros/servo/chrome_cr50.py
index bbb99c6..ec76dde 100644
--- a/server/cros/servo/chrome_cr50.py
+++ b/server/cros/servo/chrome_cr50.py
@@ -83,7 +83,7 @@
     GETTIME = ['= (\S+)']
     FWMP_LOCKED_PROD = ["Managed device console can't be unlocked"]
     FWMP_LOCKED_DBG = ['Ignoring FWMP unlock setting']
-    MAX_RETRY_COUNT = 5
+    MAX_RETRY_COUNT = 10
     CCDSTATE_MAX_RETRY_COUNT = 20
     START_STR = ['((Havn|UART).*Console is enabled;)']
     REBOOT_DELAY_WITH_CCD = 60
@@ -95,15 +95,16 @@
     CAP_SETTING = 1
     CAP_REQ = 2
     GET_CAP_TRIES = 20
+    CAP_ALWAYS = 'Always'
     # Regex to match the valid capability settings.
-    CAP_STATES = '(Always|Default|IfOpened|UnlessLocked)'
+    CAP_STATES = '(%s|Default|IfOpened|UnlessLocked)' % CAP_ALWAYS
     # List of all cr50 ccd capabilities. Same order of 'ccd' output
     CAP_NAMES = [
-        'UartGscRxAPTx', 'UartGscTxAPRx', 'UartGscRxECTx', 'UartGscTxECRx',
-        'FlashAP', 'FlashEC', 'OverrideWP', 'RebootECAP', 'GscFullConsole',
-        'UnlockNoReboot', 'UnlockNoShortPP', 'OpenNoTPMWipe', 'OpenNoLongPP',
-        'BatteryBypassPP', 'UpdateNoTPMWipe', 'I2C', 'FlashRead',
-        'OpenNoDevMode', 'OpenFromUSB', 'OverrideBatt'
+            'UartGscRxAPTx', 'UartGscTxAPRx', 'UartGscRxECTx', 'UartGscTxECRx',
+            'FlashAP', 'FlashEC', 'OverrideWP', 'RebootECAP', 'GscFullConsole',
+            'UnlockNoReboot', 'UnlockNoShortPP', 'OpenNoTPMWipe',
+            'OpenNoLongPP', 'BatteryBypassPP', '(UpdateNoTPMWipe|Unused)',
+            'I2C', 'FlashRead', 'OpenNoDevMode', 'OpenFromUSB', 'OverrideBatt'
     ]
     # There are two capability formats. Match both.
     #  UartGscRxECTx   Y 3=IfOpened
@@ -131,26 +132,29 @@
 
     # CR50 Board Properties as defined in platform/ec/board/cr50/scratch-reg1.h
     BOARD_PROP = {
-           'BOARD_SLAVE_CONFIG_SPI'      : 1 << 0,
-           'BOARD_SLAVE_CONFIG_I2C'      : 1 << 1,
-           'BOARD_NEEDS_SYS_RST_PULL_UP' : 1 << 5,
-           'BOARD_USE_PLT_RESET'         : 1 << 6,
-           'BOARD_WP_ASSERTED'           : 1 << 8,
-           'BOARD_FORCING_WP'            : 1 << 9,
-           'BOARD_NO_RO_UART'            : 1 << 10,
-           'BOARD_CCD_STATE_MASK'        : 3 << 11,
-           'BOARD_DEEP_SLEEP_DISABLED'   : 1 << 13,
-           'BOARD_DETECT_AP_WITH_UART'   : 1 << 14,
-           'BOARD_ITE_EC_SYNC_NEEDED'    : 1 << 15,
-           'BOARD_WP_DISABLE_DELAY'      : 1 << 16,
-           'BOARD_CLOSED_SOURCE_SET1'    : 1 << 17,
-           'BOARD_CLOSED_LOOP_RESET'     : 1 << 18,
-           'BOARD_NO_INA_SUPPORT'        : 1 << 19,
-           'BOARD_ALLOW_CHANGE_TPM_MODE' : 1 << 20,
-           'BOARD_EC_CR50_COMM_SUPPORT'  : 1 << 21,
-           'BOARD_CCD_REC_LID_PIN_DIOA1' : 0x01 << 22,
-           'BOARD_CCD_REC_LID_PIN_DIOA9' : 0x02 << 22,
-           'BOARD_CCD_REC_LID_PIN_DIOA12': 0x03 << 22,
+            'BOARD_PERIPH_CONFIG_SPI': (1 << 0, None),
+            'BOARD_PERIPH_CONFIG_SPI': (1 << 0, None),
+            'BOARD_PERIPH_CONFIG_I2C': (1 << 1, None),
+            'BOARD_PERIPH_CONFIG_I2C': (1 << 1, None),
+            'BOARD_NEEDS_SYS_RST_PULL_UP': (1 << 5, None),
+            'BOARD_USE_PLT_RESET': (1 << 6, None),
+            'BOARD_WP_ASSERTED': (1 << 8, None),
+            'BOARD_FORCING_WP': (1 << 9, None),
+            'BOARD_NO_RO_UART': (1 << 10, None),
+            'BOARD_CCD_UNLOCKED': (1 << 11, 3 << 11),
+            'BOARD_CCD_OPENED': (2 << 11, 3 << 11),
+            'BOARD_DEEP_SLEEP_DISABLED': (1 << 13, None),
+            'BOARD_DETECT_AP_WITH_UART': (1 << 14, None),
+            'BOARD_ITE_EC_SYNC_NEEDED': (1 << 15, None),
+            'BOARD_WP_DISABLE_DELAY': (1 << 16, None),
+            'BOARD_CLOSED_SOURCE_SET1': (1 << 17, None),
+            'BOARD_CLOSED_LOOP_RESET': (1 << 18, None),
+            'BOARD_NO_INA_SUPPORT': (1 << 19, None),
+            'BOARD_ALLOW_CHANGE_TPM_MODE': (1 << 20, None),
+            'BOARD_EC_CR50_COMM_SUPPORT': (1 << 21, None),
+            'BOARD_CCD_REC_LID_PIN_DIOA1': (1 << 22, 3 << 22),
+            'BOARD_CCD_REC_LID_PIN_DIOA9': (2 << 22, 3 << 22),
+            'BOARD_CCD_REC_LID_PIN_DIOA12': (3 << 22, 3 << 22)
     }
 
     # CR50 reset flags as defined in platform ec_commands.h. These are only the
@@ -169,6 +173,38 @@
            'RESET_FLAG_RBOX'             : 1 << 16,
            'RESET_FLAG_SECURITY'         : 1 << 17,
     }
+    FIPS_RE = r' ([^ ]*)approved.*allowed: (1|0)'
+    # CCD Capabilities used for c2d2 control drivers.
+    SERVO_DRV_CAPS = ['OverrideWP', 'GscFullConsole', 'RebootECAP']
+    # Cr50 may have flash operation errors during the test. Here's an example
+    # of one error message.
+    # do_flash_op:245 errors 20 fsh_pe_control 40720004
+    # The stuff after the ':' may change, but all flash operation errors
+    # contain do_flash_op. do_flash_op is only ever printed if there is an
+    # error during the flash operation. Just search for do_flash_op to simplify
+    # the search string and make it applicable to all flash op errors.
+    FLASH_OP_ERROR_MSG = 'do_flash_op'
+    # USB issues may show up with the timer sof calibration overflow interrupt.
+    # Count these during cleanup.
+    USB_ERROR = 'timer_sof_calibration_overflow_int'
+    # Message printed during watchdog reset.
+    WATCHDOG_RST = 'WATCHDOG PC'
+    # ===============================================================
+    # AP_RO strings
+    # Cr50 only supports v2
+    AP_RO_VERSIONS = [1]
+    AP_RO_HASH_RE = r'sha256 (hash) ([0-9a-f]{64})'
+    AP_RO_UNSUPPORTED_UNPROGRAMMED = 'RO verification not programmed'
+    AP_RO_UNSUPPORTED_BID_BLOCKED = 'BID blocked'
+    AP_RO_REASON_RE = r'(ap_ro_check_unsupported): (.*)\]'
+    AP_RO_RESULT_RE = r'(result)\s*: (\d)'
+    AP_RO_SUPPORTED_RE = r'(supported)\s*: (yes|no)'
+    AP_RO_UNSUPPORTED_OUTPUT = [
+            AP_RO_REASON_RE, AP_RO_RESULT_RE, AP_RO_SUPPORTED_RE
+    ]
+    AP_RO_SAVED_OUTPUT = [AP_RO_RESULT_RE, AP_RO_SUPPORTED_RE, AP_RO_HASH_RE]
+
+    # ===============================================================
 
     def __init__(self, servo, faft_config):
         """Initializes a ChromeCr50 object.
@@ -179,7 +215,6 @@
         super(ChromeCr50, self).__init__(servo, 'cr50_uart')
         self.faft_config = faft_config
 
-
     def wake_cr50(self):
         """Wake up cr50 by sending some linebreaks and wait for the response"""
         for i in range(self.MAX_RETRY_COUNT):
@@ -198,7 +233,7 @@
         """Send command through UART.
 
         Cr50 will drop characters input to the UART when it resumes from sleep.
-        If servo is not using ccd, send some dummy characters before sending the
+        If servo is not using ccd, send some characters before sending the
         real command to make sure cr50 is awake.
 
         @param commands: the command string to send to cr50
@@ -394,7 +429,7 @@
         """Send command through UART and wait for response.
 
         Cr50 will drop characters input to the UART when it resumes from sleep.
-        If servo is not using ccd, send some dummy characters before sending the
+        If servo is not using ccd, send some characters before sending the
         real command to make sure cr50 is awake.
 
         @param command: the command to send
@@ -503,14 +538,18 @@
         @param prop_name: a property name in string type.
         """
         brdprop = self.get_board_properties()
-        prop = self.BOARD_PROP[prop_name]
-        return (brdprop & prop) == prop
+        (prop, mask) = self.BOARD_PROP[prop_name]
+        # Use the board property value for the mask if no mask is given.
+        mask = mask or prop
+        return (brdprop & mask) == prop
 
 
     def has_command(self, cmd):
         """Returns 1 if cr50 has the command 0 if it doesn't"""
         try:
-            self.send_safe_command_get_output('help', [cmd])
+            self.send_command_retry_get_output('help', [cmd],
+                                               safe=True,
+                                               retries=3)
         except:
             logging.info("Image does not include '%s' command", cmd)
             return 0
@@ -930,6 +969,7 @@
                         ensure_ap_on=ap_is_on)
 
         if level != self.get_ccd_level():
+            self.check_for_console_errors('Running console ccd %s' % level)
             raise error.TestFail('Could not set privilege level to %s' % level)
 
         logging.info('Successfully set CCD privelege level to %s', level)
@@ -1082,7 +1122,12 @@
             line = line.strip()
             if ':' in line:
                 k, v = line.split(':', 1)
-                ccdstate[k.strip()] = v.strip()
+                k = k.strip()
+                v = v.strip()
+                if '(' in v:
+                    ccdstate[k + ' full'] = v
+                    v = v.split('(')[0].strip()
+                ccdstate[k] = v
         logging.info('Current CCD state:\n%s', pprint.pformat(ccdstate))
         return ccdstate
 
@@ -1093,9 +1138,9 @@
         @return: True if the AP is on; False otherwise.
         """
         ap_state = self.get_ccdstate()['AP']
-        if ap_state == 'on':
+        if ap_state.lower() == 'on':
             return True
-        elif ap_state == 'off':
+        elif ap_state.lower() == 'off':
             return False
         else:
             raise error.TestFail('Read unusable AP state from ccdstate: %r' %
@@ -1229,7 +1274,7 @@
 
     def check_servo_monitor(self):
         """Returns True if cr50 can detect servo connect/disconnect"""
-        orig_dts = self._servo.get('servo_v4_dts_mode')
+        orig_dts = self._servo.get('servo_dts_mode')
         # Detach ccd so EC uart won't interfere with servo detection
         self._servo.set_dts_mode('off')
         self._servo.set('ec_uart_en', 'off')
@@ -1245,3 +1290,147 @@
             return False
         self._servo.set_dts_mode(orig_dts)
         return True
+
+    def fips_crypto_allowed(self):
+        """Return 1 if fips crypto is enabled."""
+        if not self.has_command('fips'):
+            return 0
+
+        rv = self.send_command_retry_get_output('fips', [self.FIPS_RE])
+        logging.info('FIPS: %r', rv)
+        _, approved, allowed = rv[0]
+        if int(approved == '') != int(allowed):
+            raise error.TestFail('Approved does not match allowed %r' % rv)
+        return int(allowed)
+
+    def unlock_is_supported(self):
+        """Returns True if GSC supports the ccd unlock state."""
+        return True
+
+    def cap_is_always_on(self, cap):
+        """Returns True if the capability is set to Always"""
+        rv = self.send_command_retry_get_output('ccd',
+                                                [cap + self.CAP_FORMAT])[0]
+        # The third field could be Default or "Always". If it's Default,
+        # "Always" must show up in the third field.
+        return self.CAP_ALWAYS in rv[2] or self.CAP_ALWAYS in rv[3]
+
+    def servo_drv_enabled(self):
+        """Check if the caps  are accessible on boards wigh gsc controls."""
+        if not self._servo.main_device_uses_gsc_drv():
+            return True
+        for cap in self.SERVO_DRV_CAPS:
+            # If any capability isn't accessible, return False.
+            if not self.cap_is_always_on(cap):
+                return False
+        return True
+
+    def enable_servo_control_caps(self):
+        """Set all servo control capabilities to Always."""
+        # Nothing do do if servo doesn't use gsc for any controls.
+        if not self._servo.main_device_uses_gsc_drv():
+            return
+        logging.info('Setting servo caps to Always')
+        self.send_command('ccd testlab open')
+        for cap in self.SERVO_DRV_CAPS:
+            self.send_command('ccd set %s Always' % cap)
+        return self.servo_drv_enabled()
+
+    def ccd_reset_factory(self):
+        """Enable factory mode."""
+        self.send_command('ccd reset factory')
+
+    def ccd_reset(self, servo_en=True):
+        """Reset ccd capabilities."""
+        servo_uses_gsc = self._servo.main_device_uses_gsc_drv()
+        # If testlab mode is enabled, capabilities can be restored. It's
+        # ok to reset ccd.
+        if not servo_en and servo_uses_gsc and not self.testlab_is_on():
+            raise error.TestError(
+                    'Board uses ccd drivers. Enable testlab mode '
+                    'before ccd reset')
+        self.send_command('ccd reset')
+        if servo_en:
+            self.enable_servo_control_caps()
+
+    def check_for_console_errors(self, desc):
+        """Check cr50 uart output for errors.
+
+        Use the logs captured during firmware_test cleanup to check for cr50
+        errors. Flash operation issues aren't obvious unless you check the logs.
+        All flash op errors print do_flash_op and it isn't printed during normal
+        operation. Open the cr50 uart file and count the number of times this is
+        printed. Log the number of errors.
+        """
+        self._servo.record_uart_capture()
+        cr50_uart_file = self._servo.get_uart_logfile('cr50')
+        if not cr50_uart_file:
+            logging.info('There is not a cr50 uart file')
+            return
+
+        flash_error_count = 0
+        usb_error_count = 0
+        watchdog_count = 0
+        with open(cr50_uart_file, 'r') as f:
+            for line in f:
+                if self.FLASH_OP_ERROR_MSG in line:
+                    flash_error_count += 1
+                if self.USB_ERROR in line:
+                    usb_error_count += 1
+                if self.WATCHDOG_RST in line:
+                    watchdog_count += 1
+
+        # Log any flash operation errors.
+        logging.info('do_flash_op count: %d', flash_error_count)
+        logging.info('usb error count: %d', usb_error_count)
+        logging.info('watchdog count: %d', watchdog_count)
+        if watchdog_count:
+            raise error.TestFail('Found %r %d times in logs after %s' %
+                                 (self.WATCHDOG_RST, watchdog_count, desc))
+
+    def ap_ro_version_is_supported(self, version):
+        """Returns True if GSC supports the given version."""
+        return version in self.AP_RO_VERSIONS
+
+    def ap_ro_supported(self):
+        """Returns True if the hash is saved and AP RO is supported."""
+        return self.send_command_retry_get_output(
+                'ap_ro_info', [self.AP_RO_SUPPORTED_RE])[0][2] == 'yes'
+
+    def get_ap_ro_info(self):
+        """Returns a dictionary of the AP RO info.
+
+        Get the ap_ro_info output. Convert it to a usable dictionary.
+
+        Returns:
+            A dictionary with the following key value pairs.
+                'reason': String of unsupported reason or None if ap ro is
+                          supported.
+                'hash': 64 char hash or None if it isn't supported.
+                'supported': bool whether AP RO verification is supported.
+                'result': int of the AP RO verification result.
+        """
+        # Cr50 prints different output based on whether ap ro verification is
+        # supported.
+        if self.ap_ro_supported():
+            output = self.AP_RO_SAVED_OUTPUT
+        else:
+            output = self.AP_RO_UNSUPPORTED_OUTPUT
+        # The reason and hash output is optional. Make sure it's in the
+        # dictionary even if it isn't in the output.
+        info = {'hash': None, 'reason': None}
+        rv = self.send_command_retry_get_output('ap_ro_info',
+                                                output,
+                                                compare_output=True)
+        for _, k, v in rv:
+            # Make key more usable.
+            if k == 'ap_ro_check_unsupported':
+                k = 'reason'
+            # Convert digit strings to ints
+            if v.isdigit():
+                v = int(v)
+            # Convert yes or no to bool
+            if k == 'supported':
+                v = v == 'yes'
+            info[k] = v
+        return info
diff --git a/server/cros/servo/chrome_ec.py b/server/cros/servo/chrome_ec.py
index 4a12609..4838bb4 100644
--- a/server/cros/servo/chrome_ec.py
+++ b/server/cros/servo/chrome_ec.py
@@ -6,6 +6,7 @@
 import logging
 import re
 import time
+from xml.parsers import expat
 
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.cros import ec
@@ -175,7 +176,7 @@
             return False
         return result is not None
 
-    def send_command_get_output(self, command, regexp_list):
+    def send_command_get_output(self, command, regexp_list, retries=1):
         """Send command through UART and wait for response.
 
         This function waits for response message matching regular expressions.
@@ -204,12 +205,19 @@
             raise error.TestError('Arugment regexp_list is not a list: %s' %
                                   str(regexp_list))
 
-        self.set_uart_regexp(str(regexp_list))
-        self._servo.set_nocheck(self.uart_cmd, command)
-        rv = ast.literal_eval(self._servo.get(self.uart_cmd))
-        self.clear_uart_regex()
-
-        return rv
+        while retries > 0:
+            retries -= 1
+            try:
+                self.set_uart_regexp(str(regexp_list))
+                self._servo.set_nocheck(self.uart_cmd, command)
+                return ast.literal_eval(self._servo.get(self.uart_cmd))
+            except (servo.UnresponsiveConsoleError,
+                    servo.ResponsiveConsoleError, expat.ExpatError) as e:
+                if retries <= 0:
+                    raise
+                logging.warning('Failed to send EC cmd. %s', e)
+            finally:
+                self.clear_uart_regex()
 
 
     def is_dfp(self, port=0):
@@ -223,15 +231,21 @@
           False: if EC is not DFP
         """
         is_dfp = None
+        ret = None
         try:
-            # After reboot, EC should be UFP, but workaround in servod
-            # can perform PD Data Swap in workaroud so check that
-            ret = self.send_command_get_output("pd %d state" % port, ["DFP"])
+            ret = self.send_command_get_output("pd %d state" % port,
+                                               ["DFP.*Flag"])
             is_dfp = True
         except Exception as e:
-            # EC is UFP
             is_dfp = False
 
+        # For TCPMv1, after disconnecting a device the data state remains
+        # the same, so even when pd state shows DPF, make sure the device is
+        # not disconnected
+        if is_dfp:
+            if "DRP_AUTO_TOGGLE" in ret[0] or "DISCONNECTED" in ret[0]:
+                is_dfp = False
+
         return is_dfp
 
 
@@ -243,6 +257,9 @@
     This class is to abstract these interfaces.
     """
 
+    # The dict to cache the battery information
+    BATTERY_INFO = {}
+
     def __init__(self, servo, name="ec_uart"):
         super(ChromeEC, self).__init__(servo, name)
 
@@ -378,10 +395,12 @@
            Additionally, can be used to verify if EC console is available.
         """
         self.send_command("chan 0")
-        expected_output = ["Chip:\s+([^\r\n]*)\r\n",
-                           "RO:\s+([^\r\n]*)\r\n",
-                           "RW_?[AB]?:\s+([^\r\n]*)\r\n",
-                           "Build:\s+([^\r\n]*)\r\n"]
+        # Use "[ \t]" here and not \s because sometimes the version is blank,
+        # i.e. 'RO:   \r\n' which matches RO:\s+
+        expected_output = [
+                "Chip:[ \t]+([^\r\n]*)\r\n", "RO:[ \t]+([^\r\n]*)\r\n",
+                "RW_?[AB]?:[ \t]+([^\r\n]*)\r\n", "Build:[ \t]+([^\r\n]*)\r\n"
+        ]
         l = self.send_command_get_output("version", expected_output)
         self.send_command("chan 0xffffffff")
         return l
@@ -425,13 +444,117 @@
         try:
             result = self.send_command_get_output('feat', [regexp])
         except servo.ResponsiveConsoleError as e:
-            logging.warn("feat command is not available: %s", str(e))
+            logging.warning("feat command is not available: %s", str(e))
             return False
 
         feat_bitmap = int(result[0][1], 16)
 
         return ((1 << (feat_id - feat_start)) & feat_bitmap) != 0
 
+    def update_battery_info(self):
+        """Get the battery info we care for this test."""
+        # The battery parameters we care for this test. The order must match
+        # the output of EC battery command.
+        battery_params = [
+                'V', 'V-desired', 'I', 'I-desired', 'Charging', 'Remaining'
+        ]
+        regex_str_list = []
+
+        for p in battery_params:
+            if p == 'Remaining':
+                regex_str_list.append(p + ':\s+(\d+)\s+')
+            elif p == 'Charging':
+                regex_str_list.append(p + r':\s+(Not Allowed|Allowed)\s+')
+            else:
+                regex_str_list.append(p +
+                                      r':\s+0x[0-9a-f]*\s+=\s+([0-9-]+)\s+')
+
+        # For unknown reasons, servod doesn't always capture the ec
+        # command output. It doesn't happen often, but retry if it does.
+        retries = 3
+        while retries > 0:
+            retries -= 1
+            try:
+                battery_regex_match = self.send_command_get_output(
+                        'battery', regex_str_list)
+                break
+            except (servo.UnresponsiveConsoleError,
+                    servo.ResponsiveConsoleError) as e:
+                if retries <= 0:
+                    raise
+                logging.warning('Failed to get battery status. %s', e)
+        else:
+            battery_regex_match = self.send_command_get_output(
+                    'battery', regex_str_list)
+
+        for i in range(len(battery_params)):
+            if battery_params[i] == 'Charging':
+                self.BATTERY_INFO[
+                        battery_params[i]] = battery_regex_match[i][1]
+            else:
+                self.BATTERY_INFO[battery_params[i]] = int(
+                        battery_regex_match[i][1])
+        logging.debug('Battery info: %s', self.BATTERY_INFO)
+
+    def get_battery_desired_voltage(self, print_result=True):
+        """Get battery desired voltage value."""
+        if not self.BATTERY_INFO:
+            self.update_battery_info()
+        if print_result:
+            logging.info('Battery desired voltage = %d mV',
+                         self.BATTERY_INFO['V-desired'])
+        return self.BATTERY_INFO['V-desired']
+
+    def get_battery_desired_current(self, print_result=True):
+        """Get battery desired current value."""
+        if not self.BATTERY_INFO:
+            self.update_battery_info()
+        if print_result:
+            logging.info('Battery desired current = %d mA',
+                         self.BATTERY_INFO['I-desired'])
+        return self.BATTERY_INFO['I-desired']
+
+    def get_battery_actual_voltage(self, print_result=True):
+        """Get the actual voltage from charger to battery."""
+        if not self.BATTERY_INFO:
+            self.update_battery_info()
+        if print_result:
+            logging.info('Battery actual voltage = %d mV',
+                         self.BATTERY_INFO['V'])
+        return self.BATTERY_INFO['V']
+
+    def get_battery_actual_current(self, print_result=True):
+        """Get the actual current from charger to battery."""
+        if not self.BATTERY_INFO:
+            self.update_battery_info()
+        if print_result:
+            logging.info('Battery actual current = %d mA',
+                         self.BATTERY_INFO['I'])
+        return self.BATTERY_INFO['I']
+
+    def get_battery_remaining(self, print_result=True):
+        """Get battery charge remaining in mAh."""
+        if not self.BATTERY_INFO:
+            self.update_battery_info()
+        if print_result:
+            logging.info("Battery charge remaining = %d mAh",
+                         self.BATTERY_INFO['Remaining'])
+        return self.BATTERY_INFO['Remaining']
+
+    def get_battery_charging_allowed(self, print_result=True):
+        """Get the battery charging state.
+
+        Returns True if charging is allowed.
+        """
+        if not self.BATTERY_INFO:
+            self.update_battery_info()
+        if print_result:
+            logging.info("Battery charging = %s",
+                         self.BATTERY_INFO['Charging'])
+        if self.BATTERY_INFO['Charging'] == 'Allowed':
+            return True
+        return False
+
 
 class ChromeUSBPD(ChromeEC):
     """Manages control of a Chrome USBPD.
diff --git a/server/cros/servo/chrome_ti50.py b/server/cros/servo/chrome_ti50.py
new file mode 100644
index 0000000..e123198
--- /dev/null
+++ b/server/cros/servo/chrome_ti50.py
@@ -0,0 +1,79 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server.cros.servo import chrome_cr50
+from autotest_lib.client.common_lib import error
+
+
+class ChromeTi50(chrome_cr50.ChromeCr50):
+    """Manages control of a Chrome Ti50.
+
+    We control the Chrome Ti50 via the console of a Servo board. Chrome Ti50
+    provides many interfaces to set and get its behavior via console commands.
+    This class is to abstract these interfaces.
+    """
+
+    WAKE_RESPONSE = ['(>|ti50_common)']
+    START_STR = ['ti50_common']
+
+    # List of all ti50 ccd capabilities. Same order of 'ccd' output.
+    # This is not the same as cr50 list.
+    CAP_NAMES = [
+            'UartGscRxAPTx', 'UartGscTxAPRx', 'UartGscRxECTx', 'UartGscTxECRx',
+            'UartGscRxFpmcuTx', 'UartGscTxFpmcuRx', 'FlashAP', 'FlashEC',
+            'OverrideWP', 'RebootECAP', 'GscFullConsole', 'UnlockNoReboot',
+            'UnlockNoShortPP', 'OpenNoTPMWipe', 'OpenNoLongPP',
+            'BatteryBypassPP', 'I2C', 'FlashRead', 'OpenNoDevMode',
+            'OpenFromUSB', 'OverrideBatt'
+    ]
+    # Ti50 only supports v2
+    AP_RO_VERSIONS = [2]
+
+    def __init__(self, servo, faft_config):
+        """Initializes a ChromeCr50 object.
+
+        @param servo: A servo object.
+        @param faft_config: A faft config object.
+        """
+        super(ChromeTi50, self).__init__(servo, 'cr50_uart')
+        self.faft_config = faft_config
+        # Update CCD_FORMAT to use ti50 version of CAP_NAMES.
+        self.CCD_FORMAT['Capabilities'] = \
+            '(Capabilities:.*(?P<Capabilities>%s))' % \
+            (self.CAP_FORMAT.join(self.CAP_NAMES) + self.CAP_FORMAT)
+
+    def set_ccd_level(self, level, password=''):
+        if level == 'unlock':
+            raise error.TestError(
+                "Ti50 does not support privilege level unlock")
+        super(ChromeTi50, self).set_ccd_level(level, password)
+
+    def unlock_is_supported(self):
+        return False
+
+    def check_boot_mode(self, mode_exp='NORMAL'):
+        """Query the Ti50 boot mode, and compare it against mode_exp.
+
+        Args:
+            mode_exp: expected boot mode. It should be either 'NORMAL'
+                      or 'NO_BOOT'.
+        Returns:
+            True if the boot mode matches mode_exp.
+            False, otherwise.
+        Raises:
+            TestError: Input parameter is not valid.
+        """
+
+        # Ti50 implements EFS 2.1, Cr50 implements EFS 2.0. This means
+        # 'NORMAL' is renamed to 'VERIFIED'. Ti50 also changes the case.
+        rv = self.send_command_retry_get_output('ec_comm',
+                [r'boot_mode\s*:\s*(Verified|NoBoot)'], safe=True)
+        if mode_exp == 'NORMAL':
+            return rv[0][1] == 'Verified'
+        elif mode_exp == 'NO_BOOT':
+            return rv[0][1] == 'NoBoot'
+        else:
+            raise error.TestError('parameter, mode_exp is not valid: %s' %
+                                  mode_exp)
diff --git a/server/cros/servo/firmware_programmer.py b/server/cros/servo/firmware_programmer.py
index c32dc21..8f161a5 100644
--- a/server/cros/servo/firmware_programmer.py
+++ b/server/cros/servo/firmware_programmer.py
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-"""A utility to program Chrome OS devices' firmware using servo.
+"""A utility to program ChromeOS devices' firmware using servo.
 
 This utility expects the DUT to be connected to a servo device. This allows us
 to put the DUT into the required state and to actually program the DUT's
@@ -75,7 +75,7 @@
             # We should reinstate this exception once the programmer is working
             # to indicate the missing utilities earlier in the test cycle.
             # Bug chromium:371011 filed to track this.
-            logging.warn("Ignoring exception when verify required bins : %s",
+            logging.warning("Ignoring exception when verify required bins : %s",
                          ' '.join(req_list))
 
 
@@ -87,7 +87,7 @@
             try:
                 present = self._servo.get(key)
             except error.TestFail:
-                logging.warn('Missing servo control: %s', key)
+                logging.warning('Missing servo control: %s', key)
                 continue
             if present == 'not_applicable':
                 # control is has no bearing in this servo config so ignore it.
@@ -173,13 +173,13 @@
                     programmer += ',serial=%s' % servo_serial
             elif self._servo_version == 'servo_v3':
                 programmer = servo_v3_programmer
-            elif self._servo_version == 'servo_v4_with_servo_micro':
+            elif 'with_servo_micro' in self._servo_version:
                 # When a uServo is connected to a DUT with CCD support, the
                 # firmware programmer will always use the uServo to program.
                 servo_micro_serial = self._servo_serials.get('servo_micro')
                 programmer = servo_v4_with_micro_programmer
                 programmer += ':serial=%s' % servo_micro_serial
-            elif self._servo_version == 'servo_v4_with_ccd_cr50':
+            elif 'with_ccd' in self._servo_version:
                 ccd_serial = self._servo_serials.get('ccd')
                 programmer = servo_v4_with_ccd_programmer
                 programmer += ',serial=%s' % ccd_serial
@@ -240,7 +240,7 @@
         self._servo_version = self._servo.get_servo_version(active=True)
 
         # CCD takes care holding AP/EC. Don't need the following steps.
-        if self._servo_version != 'servo_v4_with_ccd_cr50':
+        if 'with_ccd' not in self._servo_version:
             faft_config = FAFTConfig(self._servo.get_board())
             self._servo_prog_state_delay = faft_config.servo_prog_state_delay
             self._servo_prog_state = (
diff --git a/server/cros/packet_generation/__init__.py b/server/cros/servo/keyboard/__init__.py
similarity index 100%
copy from server/cros/packet_generation/__init__.py
copy to server/cros/servo/keyboard/__init__.py
diff --git a/server/cros/servo/keyboard/common.py b/server/cros/servo/keyboard/common.py
new file mode 100644
index 0000000..2967b41
--- /dev/null
+++ b/server/cros/servo/keyboard/common.py
@@ -0,0 +1,14 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Build relative paths for files with root of autotest_lib.
+
+import os, sys
+dirname = os.path.dirname(sys.modules[__name__].__file__)
+autotest_dir = os.path.abspath(os.path.join(dirname, '../../../..'))
+client_dir = os.path.join(autotest_dir, 'client')
+sys.path.insert(0, client_dir)
+import setup_modules
+sys.path.pop(0)
+setup_modules.setup(base_path=autotest_dir, root_module_name='autotest_lib')
diff --git a/site_utils/admin_audit/data/keyboard.hex b/server/cros/servo/keyboard/data/keyboard.hex
similarity index 100%
rename from site_utils/admin_audit/data/keyboard.hex
rename to server/cros/servo/keyboard/data/keyboard.hex
diff --git a/server/cros/servo/keyboard/servo_keyboard_flasher.py b/server/cros/servo/keyboard/servo_keyboard_flasher.py
new file mode 100644
index 0000000..34a8084
--- /dev/null
+++ b/server/cros/servo/keyboard/servo_keyboard_flasher.py
@@ -0,0 +1,111 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import time
+import os
+
+import common
+from autotest_lib.client.common_lib import error
+from autotest_lib.server.cros import servo_keyboard_utils
+
+
+class ServoKeyboardMapFlasher():
+    """Flash the servo keyboard map on servo."""
+
+    _ATMEGA_RESET_DELAY = 0.2
+    _ATMEGA_FLASH_TIMEOUT = 120
+    _USB_PRESENT_DELAY = 1
+
+    # Command to detect LUFA Keyboard Demo by VID.
+    LSUSB_CMD = 'lsusb -d %s:' % servo_keyboard_utils.ATMEL_USB_VENDOR_ID
+    LSUSB_TIMEOUT = 30
+
+    def is_image_supported(self, host):
+        """Check if servo keyboard map supported on host
+
+        @param host: CrosHost instance
+        """
+        if host.run('hash dfu-programmer', ignore_status=True).exit_status:
+            return False
+        return True
+
+    def update(self, host):
+        """Update servo keyboard map firmware on the host if required.
+
+        The process will verify present of the keyboard firmware on the host
+        and flash it if device was not detected.
+
+        @param host: CrosHost instance
+        """
+        if not self.is_image_supported(host):
+            raise Exception(
+                    'The image is too old that does not have dfu-programmer.')
+
+        try:
+            logging.debug('Starting flashing the keyboard map.')
+            host.servo.set_nocheck('init_usb_keyboard', 'on')
+
+            if self._is_keyboard_present(host):
+                logging.info('Already using the new keyboard map.')
+                return
+
+            self._flash_keyboard_map(host)
+        finally:
+            # Restore the default settings.
+            # Select the chip on the USB mux unless using Servo V4
+            if 'servo_v4' not in host.servo.get_servo_type():
+                host.servo.set('usb_mux_sel4', 'on')
+
+    def _flash_keyboard_map(self, host):
+        """FLash servo keyboard firmware on the host."""
+        servo = host.servo
+        # Boot AVR into DFU mode by enabling the HardWareBoot mode
+        # strapping and reset.
+        servo.set_get_all([
+                'at_hwb:on', 'atmega_rst:on',
+                'sleep:%f' % self._ATMEGA_RESET_DELAY, 'atmega_rst:off',
+                'sleep:%f' % self._ATMEGA_RESET_DELAY, 'at_hwb:off'
+        ])
+
+        time.sleep(self._USB_PRESENT_DELAY)
+        result = host.run(self.LSUSB_CMD,
+                          timeout=self.LSUSB_TIMEOUT).stdout.strip()
+        if not 'Atmel Corp. atmega32u4 DFU bootloader' in result:
+            raise Exception('Not an expected chip: %s', result)
+
+        # Update the keyboard map.
+        bindir = os.path.dirname(os.path.realpath(__file__))
+        local_path = os.path.join(bindir, 'data', 'keyboard.hex')
+        host.send_file(local_path, '/tmp')
+        logging.info('Updating the keyboard map...')
+        host.run('dfu-programmer atmega32u4 erase --force',
+                 timeout=self._ATMEGA_FLASH_TIMEOUT)
+        host.run('dfu-programmer atmega32u4 flash /tmp/keyboard.hex',
+                 timeout=self._ATMEGA_FLASH_TIMEOUT)
+
+        # Reset the chip.
+        servo.set_get_all([
+                'atmega_rst:on',
+                'sleep:%f' % self._ATMEGA_RESET_DELAY, 'atmega_rst:off'
+        ])
+        if self._is_keyboard_present(host):
+            logging.info('Update successfully!')
+        else:
+            raise Exception('Update failed!')
+
+    def _is_keyboard_present(self, host):
+        """Verify if servo keyboard is present on the host.
+
+        The keyboard will be detected as USB device on the host with name:
+                'Atmel Corp. LUFA Keyboard Demo Application'
+        """
+        time.sleep(self._USB_PRESENT_DELAY)
+        result = host.run(self.LSUSB_CMD,
+                          timeout=self.LSUSB_TIMEOUT).stdout.strip()
+        logging.debug('got the result: %s', result)
+        if ('LUFA Keyboard Demo' in result
+                    and servo_keyboard_utils.is_servo_usb_wake_capable(host)):
+            return True
+        return False
diff --git a/server/cros/servo/pd_device.py b/server/cros/servo/pd_device.py
index 5b943b1..41cdc44 100644
--- a/server/cros/servo/pd_device.py
+++ b/server/cros/servo/pd_device.py
@@ -269,7 +269,7 @@
         """
         # Dualrole mode must be supported
         if self.is_drp() is False:
-            logging.warn('Device not DRP capable, unabled to force disconnect')
+            logging.warning('Device not DRP capable, unabled to force disconnect')
             return False
         # Force state will be the opposite of current connect state
         if self.is_src():
@@ -298,7 +298,7 @@
                 # Restore orignal power role
                 connect = self.pr_swap()
                 if connect == False:
-                    logging.warn('DRP on both devices, 2nd power swap failed')
+                    logging.warning('DRP on both devices, 2nd power swap failed')
                 return connect
 
         # Restore default dualrole mode
@@ -354,7 +354,7 @@
 
         # Determine if Try.SRC feature is supported
         if 'Try.SRC' not in m[0][0]:
-            logging.warn('Try.SRC not supported on this PD device')
+            logging.warning('Try.SRC not supported on this PD device')
             return False
 
         # TrySRC is supported on this PD device, verify setting.
@@ -410,7 +410,7 @@
             pattern = '|'.join((tcpmv1_pattern, tcpmv2_pattern))
             self.utils.send_pd_command_get_output(cmd, [pattern])
         except error.TestFail:
-            logging.warn('HARD RST TX not found')
+            logging.warning('HARD RST TX not found')
             return False
         finally:
             self.utils.disable_pd_console_debug()
@@ -432,10 +432,10 @@
         """
         # Get starting state
         if not self.is_drp():
-            logging.warn('Dualrole Mode not enabled!')
+            logging.warning('Dualrole Mode not enabled!')
             return False
         if self.is_connected() == False:
-            logging.warn('PD contract not established!')
+            logging.warning('PD contract not established!')
             return False
         current_pr = self.utils.get_pd_state(self.port)
         swap_cmd = 'pd %d swap power' % self.port
@@ -444,7 +444,7 @@
         new_pr = self.utils.get_pd_state(self.port)
         logging.info('Power swap: %s -> %s', current_pr, new_pr)
         if self.is_connected() == False:
-            logging.warn('Device not connected following PR swap attempt.')
+            logging.warning('Device not connected following PR swap attempt.')
             return False
         return current_pr != new_pr
 
@@ -539,12 +539,20 @@
         """
         DISC_DELAY = 100
         disc_cmd = 'fakedisconnect %d %d' % (DISC_DELAY, disc_time_sec * 1000)
+        state_exp = '(C%d)\s+[\w]+:?\s(%s)'
+
+        disconnected_tuple = self.utils.get_disconnected_states()
+        disconnected_states = '|'.join(disconnected_tuple)
+        disconnected_exp = state_exp % (self.port, disconnected_states)
+
         src_connected_tuple = self.utils.get_src_connect_states()
         snk_connected_tuple = self.utils.get_snk_connect_states()
-        connected_exp = '|'.join(src_connected_tuple + snk_connected_tuple)
-        reply_exp = ['(.*)(C%d)\s+[\w]+:?\s(%s)' % (self.port, connected_exp)]
-        m = self.utils.send_pd_command_get_output(disc_cmd, reply_exp)
-        return m[0][3]
+        connected_states = '|'.join(src_connected_tuple + snk_connected_tuple)
+        connected_exp = state_exp % (self.port, connected_states)
+
+        m = self.utils.send_pd_command_get_output(disc_cmd, [disconnected_exp,
+            connected_exp])
+        return m[1][2]
 
     def drp_disconnect_connect(self, disc_time_sec):
         """Disconnect/reconnect using PDTester
diff --git a/server/cros/servo/pdtester.py b/server/cros/servo/pdtester.py
index a36d85c..fad35db 100644
--- a/server/cros/servo/pdtester.py
+++ b/server/cros/servo/pdtester.py
@@ -33,8 +33,10 @@
     # USB charging command delays in seconds.
     USBC_COMMAND_DELAY = 0.5
     # PDTester USBC commands.
-    USBC_ROLE= 'usbc_role' # TODO(b:140256624): deprecate by USBC_PR
-    USBC_PR= 'usbc_pr'
+    USBC_DRSWAP = 'usbc_drswap'
+    USBC_PRSWAP = 'usbc_prswap'
+    USBC_ROLE = 'usbc_role'  # TODO(b:140256624): deprecate by USBC_PR
+    USBC_PR = 'usbc_pr'
     USBC_MUX = 'usbc_mux'
     RE_USBC_ROLE_VOLTAGE = r'src(\d+)v'
     USBC_SRC_CAPS = 'ada_srccaps'
@@ -61,6 +63,8 @@
         'sink': 'SNK_READY',
         'source': 'SRC_READY'}
     POLL_STATE_SECS = 2
+    FIRST_PD_SETUP_ELEMENT = ['servo_v4', 'servo_v4p1']
+    SECOND_PD_SETUP_ELEMENT = ['servo_micro', 'c2d2']
 
     def __init__(self, servo, servod_proxy):
         """Initialize and keep the servo object.
@@ -69,8 +73,9 @@
         @param servod_proxy: Servod proxy for pdtester host
         """
         self.servo_type = servo.get_servo_version()
-        if 'servo_v4' in self.servo_type:
-            uart_prefix = 'servo_v4_uart'
+        pd_tester_device = self.servo_type.split('_with_')[0]
+        if pd_tester_device in self.FIRST_PD_SETUP_ELEMENT:
+            uart_prefix = pd_tester_device + "_uart"
         else:
             uart_prefix = 'ec_uart'
 
@@ -149,14 +154,18 @@
             srccaps = self.get_adapter_source_caps()
         except PDTesterError:
             # htctools and servov4 is not updated, fallback to the old path.
-            logging.warn('hdctools or servov4 firmware too old, fallback to '
+            logging.warning('hdctools or servov4 firmware too old, fallback to '
                          'fixed charging voltages.')
             return list(self.USBC_CHARGING_VOLTAGES_LEGACY.keys())
 
         # insert 0 voltage for sink
         vols = [0]
         for pdo in srccaps:
-            vols.append(pdo[0]/1000)
+            # Only include the voltages that are in USBC_CHARGING_VOLTAGES
+            if pdo[0] / 1000 in self.USBC_CHARGING_VOLTAGES:
+                vols.append(pdo[0] / 1000)
+            else:
+                logging.debug("Omitting unsupported PDO = %s", pdo)
         return vols
 
     def charge(self, voltage):
@@ -166,14 +175,23 @@
         """
         charging_voltages = self.get_charging_voltages()
         if voltage not in charging_voltages:
-            logging.warning('Unsupported voltage(%s) of the adapter. '
-                            'Maybe firmware or servod too old? '
-                            'sudo servo_updater -b servo_v4; '
-                            'sudo emerge hdctools' % voltage)
+            logging.warning(
+                    'Unsupported voltage(%s) of the adapter. '
+                    'Maybe firmware or servod too old? '
+                    'sudo servo_updater -b servo_v4; '
+                    'sudo emerge hdctools', voltage)
+        if voltage not in self.USBC_CHARGING_VOLTAGES:
+            raise PDTesterError(
+                    'Cannot set voltage to %s, not supported by %s' %
+                    (voltage, self.USBC_PR))
 
         try:
             self.set(self.USBC_PR, self.USBC_CHARGING_VOLTAGES[voltage])
         except:
+            if voltage not in self.USBC_CHARGING_VOLTAGES_LEGACY:
+                raise PDTesterError(
+                        'Cannot set voltage to %s, not supported by %s' %
+                        (voltage, self.USBC_ROLE))
             self.set(self.USBC_ROLE,
                      self.USBC_CHARGING_VOLTAGES_LEGACY[voltage])
         time.sleep(self.USBC_COMMAND_DELAY)
@@ -184,10 +202,11 @@
         try:
             usbc_pr = self.get(self.USBC_PR)
         except:
-            logging.warn('Unsupported control(%s). '
-                         'Maybe firmware or servod too old? '
-                         'sudo servo_updater -b servo_v4; '
-                         'sudo emerge hdctools' % self.USBC_PR)
+            logging.warning(
+                    'Unsupported control(%s). '
+                    'Maybe firmware or servod too old? '
+                    'sudo servo_updater -b servo_v4; '
+                    'sudo emerge hdctools', self.USBC_PR)
             usbc_pr = self.get(self.USBC_ROLE)
         m = re.match(self.RE_USBC_ROLE_VOLTAGE, usbc_pr)
         if m:
@@ -225,3 +244,21 @@
                                 'should be either \'dp\' or \'usb\'.' % mux)
         self.set(self.USBC_MUX, mux)
         time.sleep(self.USBC_COMMAND_DELAY)
+
+    def allow_pr_swap(self, allow):
+        """Issue usbc_action prswap PDTester command
+
+        @param allow: a bool for ACK or NACK to PR_SWAP
+                      command requested by DUT
+        @returns value of prswap in PDTester FW
+        """
+        self.set(self.USBC_PRSWAP, int(allow))
+
+    def allow_dr_swap(self, allow):
+        """Issue usbc_action drswap PDTester command
+
+        @param allow: a bool for ACK or NACK to DR_SWAP
+                      command requested by DUT
+        @returns value of drswap in PDTester FW
+        """
+        self.set(self.USBC_DRSWAP, int(allow))
diff --git a/server/cros/servo/servo.py b/server/cros/servo/servo.py
index 948eaac..d0690a3 100644
--- a/server/cros/servo/servo.py
+++ b/server/cros/servo/servo.py
@@ -116,8 +116,14 @@
 
     def __str__(self):
         """String representation of the exception"""
-        return '%s -- [Errno %d] %s: %r' % (self.when, self.errno,
-                                            self.strerror, self.filename)
+        msgv = [self.when]
+        if self.errno is not None or self.strerror is not None:
+            msgv.append('--')
+        if self.errno is not None:
+            msgv.append('[Errno %d]' % self.errno)
+        if self.strerror is not None:
+            msgv.append(self.strerror)
+        return '%s: %r' % (' '.join(msgv), self.filename)
 
 
 # TODO: once in python 3, inherit from AbstractContextManager
@@ -185,8 +191,17 @@
 
             if isinstance(exc_val, seven.SOCKET_ERRORS):
                 self._log_exception(exc_type, exc_val, exc_tb)
-                err = ServodConnectionError(self.description, exc_val.args[0],
-                                            exc_val.args[1], self.servo_name)
+                if len(exc_val.args) == 0:
+                    errno = None
+                    strerror = None
+                elif len(exc_val.args) == 1:
+                    errno = None
+                    strerror = exc_val.args[0]
+                else:
+                    errno = exc_val.args[0]
+                    strerror = exc_val.args[1]
+                err = ServodConnectionError(self.description, errno, strerror,
+                                            self.servo_name)
                 six.reraise(err.__class__, err, exc_tb)
 
             if isinstance(exc_val, six.moves.xmlrpc_client.Fault):
@@ -239,11 +254,13 @@
     # Generate a list of all tarball files
     stdout = server_utils.system_output('tar tf %s' % tarball,
                                         timeout=timeout,
-                                        ignore_status=True)
+                                        ignore_status=True,
+                                        args=image_candidates)
     tarball_files = stdout.splitlines()
 
     # Check if image candidates are in the list of tarball files
     for image in image_candidates:
+        logging.debug("Trying to extract %s (autotest)", image)
         if image in tarball_files:
             # Extract and return the first image candidate found
             tar_cmd = 'tar xf %s -C %s %s' % (tarball, dest_dir, image)
@@ -393,17 +410,9 @@
 
 class _Uart(object):
     """Class to capture UART streams of CPU, EC, Cr50, etc."""
-    _UartToCapture = (
-        'cpu',
-        'cr50',
-        'ec',
-        'servo_micro',
-        'servo_v4',
-        'usbpd',
-        'ccd_cr50.ec',
-        'ccd_cr50.cpu',
-        'ccd_cr50.cr50'
-    )
+    _UartToCapture = ('cpu', 'cr50', 'ec', 'servo_micro', 'servo_v4', 'usbpd',
+                      'ccd_cr50.ec', 'ccd_cr50.cpu', 'ccd_cr50.cr50'
+                      'ccd_gsc.ec', 'ccd_gsc.cpu', 'ccd_gsc.cr50')
 
 
     def __init__(self, servo):
@@ -448,7 +457,10 @@
     def start_capture(self):
         """Start capturing UART streams."""
         for uart in self._UartToCapture:
-            if self._start_stop_capture(uart, True):
+            # Always try to start the uart. Only add it to _streams if it's not
+            # in the list.
+            if (self._start_stop_capture(uart, True)
+                        and uart not in self._streams):
                 self._streams.append(uart)
 
     def get_logfile(self, uart):
@@ -468,11 +480,11 @@
             try:
                 content = self._servo.get(stream)
             except Exception as err:
-                logging.warn('Failed to get UART log for %s: %s', stream, err)
+                logging.warning('Failed to get UART log for %s: %s', stream, err)
                 continue
 
             if content == 'not_applicable':
-                logging.warn('%s is not applicable', stream)
+                logging.warning('%s is not applicable', stream)
                 continue
 
             # The UART stream may contain non-printable characters, and servo
@@ -491,7 +503,7 @@
             try:
                 self._start_stop_capture(uart, False)
             except Exception as err:
-                logging.warn('Failed to stop UART logging for %s: %s', uart,
+                logging.warning('Failed to stop UART logging for %s: %s', uart,
                              err)
 
 
@@ -550,31 +562,44 @@
     # This was increased from 60 seconds to support boards with very
     # large (>500MB) firmware archives taking longer than expected to
     # extract firmware on the lab host machines (b/149419503).
-    EXTRACT_TIMEOUT_SECS = 180
+    EXTRACT_TIMEOUT_SECS = 900
 
     # The VBUS voltage threshold used to detect if VBUS is supplied
     VBUS_THRESHOLD = 3000.0
 
-    def __init__(self, servo_host, servo_serial=None):
+    # List of servos that connect to a debug header on the board.
+    FLEX_SERVOS = ['c2d2', 'servo_micro', 'servo_v3']
+
+    # List of servos that rely on gsc commands for some part of dut control.
+    GSC_DRV_SERVOS = ['c2d2', 'ccd_gsc', 'ccd_cr50']
+
+    CCD_PREFIX = 'ccd_'
+
+    def __init__(self, servo_host, servo_serial=None, delay_init=False):
         """Sets up the servo communication infrastructure.
 
         @param servo_host: A ServoHost object representing
                            the host running servod.
         @type servo_host: autotest_lib.server.hosts.servo_host.ServoHost
         @param servo_serial: Serial number of the servo board.
+        @param delay_init:  Delay cache servo_type and power_state to prevent
+                            attempt to connect to the servod.
         """
         # TODO(fdeng): crbug.com/298379
         # We should move servo_host object out of servo object
         # to minimize the dependencies on the rest of Autotest.
         self._servo_host = servo_host
         self._servo_serial = servo_serial
-        self._servo_type = self.get_servo_version()
-        self._power_state = _PowerStateController(self)
-        self._uart = _Uart(self)
+        self._servo_type = None
+        self._power_state = None
         self._programmer = None
         self._prev_log_inode = None
         self._prev_log_size = 0
         self._ccd_watchdog_disabled = False
+        if not delay_init:
+            self._servo_type = self.get_servo_version()
+            self._power_state = _PowerStateController(self)
+        self._uart = _Uart(self)
 
     def __str__(self):
         """Description of this object and address, for use in errors"""
@@ -601,6 +626,8 @@
         interfaces for reset, power-on, power-off operations.
 
         """
+        if self._power_state is None:
+            self._power_state = _PowerStateController(self)
         return self._power_state
 
 
@@ -643,26 +670,29 @@
         # v4p1).
         # TODO(coconutruben): eventually, replace this with a metric to track
         # SBU voltages wrt servo-hw/dut-hw
-        if self.has_control('servo_v4_sbu1_mv'):
+        if self.has_control('servo_dut_sbu1_mv'):
             # Attempt to take a reading of sbu1 and sbu2 multiple times to
             # account for situations where the two lines exchange hi/lo roles
             # frequently.
             for i in range(10):
                 try:
-                    sbu1 = int(self.get('servo_v4_sbu1_mv'))
-                    sbu2 = int(self.get('servo_v4_sbu2_mv'))
+                    sbu1 = int(self.get('servo_dut_sbu1_mv'))
+                    sbu2 = int(self.get('servo_dut_sbu2_mv'))
                     logging.info('attempt %d sbu1 %d sbu2 %d', i, sbu1, sbu2)
                 except error.TestFail as e:
                     # This is a nice to have but if reading this fails, it
                     # shouldn't interfere with the test.
                     logging.exception(e)
         self._uart.start_capture()
+        # Run testlab open if servo relies on ccd to control the dut.
+        if self.main_device_uses_gsc_drv():
+            self.set_nocheck('cr50_testlab', 'open')
         if cold_reset:
-            if not self._power_state.supported:
+            if not self.get_power_state_controller().supported:
                 logging.info('Cold-reset for DUT requested, but servo '
                              'setup does not support power_state. Skipping.')
             else:
-                self._power_state.reset()
+                self.get_power_state_controller().reset()
         with _WrapServoErrors(
                 servo=self, description='initialize_dut()->get_version()'):
             version = self._server.get_version()
@@ -705,8 +735,11 @@
         # chromeos-ci-legacy-us-central1-b-x32-55-u8zc // builder information
         # For debugging purposes, we mainly care about the version, and the
         # timestamp.
+        if type(sversion) == type(b' '):
+            sversion = sversion.decode("utf-8")
         return ' '.join(sversion.split()[1:4])
 
+
     def power_long_press(self):
         """Simulate a long power button press."""
         # After a long power press, the EC may ignore the next power
@@ -714,19 +747,19 @@
         # won't happen, we need to allow the EC one second to
         # collect itself.
         # long_press is defined as 8.5s in servod
-        self.set_nocheck('power_key', 'long_press')
+        self.power_key('long_press')
 
 
     def power_normal_press(self):
         """Simulate a normal power button press."""
         # press is defined as 1.2s in servod
-        self.set_nocheck('power_key', 'press')
+        self.power_key('press')
 
 
     def power_short_press(self):
         """Simulate a short power button press."""
         # tab is defined as 0.2s in servod
-        self.set_nocheck('power_key', 'tab')
+        self.power_key('tab')
 
 
     def power_key(self, press_secs='tab'):
@@ -735,7 +768,24 @@
         @param press_secs: int, float, str; time to press key in seconds or
                            known shorthand: 'tab' 'press' 'long_press'
         """
-        self.set_nocheck('power_key', press_secs)
+        # TODO(b/224804060): use the power_key control for all servo types when
+        # c2d2 has a defined power_key driver.
+        if 'c2d2' not in self.get_servo_type():
+            self.set_nocheck('power_key', press_secs)
+            return
+        if isinstance(press_secs, str):
+            if press_secs == 'tab':
+                press_secs = 0.2
+            elif press_secs == 'press':
+                press_secs = 1.2
+            elif press_secs == 'long_press':
+                press_secs = 8.5
+            else:
+                raise error.TestError('Invalid press %r' % press_secs)
+        logging.info('Manual power button press for %ds', press_secs)
+        self.set_nocheck('pwr_button', 'press')
+        time.sleep(press_secs)
+        self.set_nocheck('pwr_button', 'release')
 
 
     def pwr_button(self, action='press'):
@@ -836,6 +886,14 @@
         self.set_nocheck('ctrl_d', press_secs)
 
 
+    def ctrl_r(self, press_secs='tab'):
+        """Simulate Ctrl-r simultaneous button presses.
+
+        @param press_secs: int, float, str; time to press key in seconds or
+                           known shorthand: 'tab' 'press' 'long_press'
+        """
+        self.set_nocheck('ctrl_r', press_secs)
+
     def ctrl_s(self, press_secs='tab'):
         """Simulate Ctrl-s simultaneous button presses.
 
@@ -989,17 +1047,30 @@
                 return ''
             raise
 
-    def get_ec_board(self):
-        """Get the board name from EC."""
-        if self.has_control('active_v4_device'):
+    def can_set_active_device(self):
+        """Returns True if the servo setup supports setting the active device
+
+        Servo can only change the active device if there are multiple devices
+        and servo has the active_dut_controller control.
+        """
+        return ('_and_' in self.get_servo_type()
+                and self.has_control('active_dut_controller'))
+
+    def get_active_device_prefix(self):
+        """Return ccd_(gsc|cr50) or '' if the main device is active."""
+        active_device = ''
+        if self.can_set_active_device():
             # If servo v4 is allowing dual_v4 devices, then choose the
             # active device.
-            active_device = self.get('active_v4_device')
+            active_device = self.get('active_dut_controller')
             if active_device == self.get_main_servo_device():
                 active_device = ''
-        else:
-            active_device = ''
-        return self.get('ec_board', prefix=active_device)
+        return active_device
+
+    def get_ec_board(self):
+        """Get the board name from EC."""
+
+        return self.get('ec_board', prefix=self.get_active_device_prefix())
 
     def get_ec_active_copy(self):
         """Get the active copy of the EC image."""
@@ -1141,7 +1212,7 @@
         # to do to the device after hotplug.  To avoid surprises,
         # force the DUT to be off.
         if power_off_dut:
-            self._power_state.power_off()
+            self.get_power_state_controller().power_off()
 
         if image_path:
             logging.info('Searching for usb device and copying image to it. '
@@ -1191,7 +1262,8 @@
             self.set_servo_v4_role('snk')
 
         try:
-            self._power_state.power_on(rec_mode=self._power_state.REC_ON)
+            power_state = self.get_power_state_controller()
+            power_state.power_on(rec_mode=power_state.REC_ON)
         except error.TestFail as e:
             self.set_servo_v4_role('src')
             logging.error('Failed to boot DUT in recovery mode. %s.', str(e))
@@ -1300,62 +1372,91 @@
 
         # If servo v4 is using ccd and servo micro, modify the servo type to
         # reflect the active device.
-        active_device = self.get('active_v4_device')
+        active_device = self.get('active_dut_controller')
         if active_device in servo_type:
             logging.info('%s is active', active_device)
             return 'servo_v4_with_' + active_device
 
-        logging.warn("%s is active even though it's not in servo type",
+        logging.warning("%s is active even though it's not in servo type",
                      active_device)
         return servo_type
 
 
     def get_servo_type(self):
+        if self._servo_type is None:
+            self._servo_type = self.get_servo_version()
         return self._servo_type
 
+    def get_servo_v4_type(self):
+        """Return the servo_v4_type (such as 'type-c'), or None if not v4."""
+        if not hasattr(self, '_servo_v4_type'):
+            if 'servo_v4' in self.get_servo_type():
+                self._servo_v4_type = self.get('root.dut_connection_type')
+            else:
+                self._servo_v4_type = None
+        return self._servo_v4_type
+
+    def is_servo_v4_type_a(self):
+        """True if the servo is v4 and type-a, else False."""
+        return self.get_servo_v4_type() == 'type-a'
+
+    def is_servo_v4_type_c(self):
+        """True if the servo is v4 and type-c, else False."""
+        return self.get_servo_v4_type() == 'type-c'
 
     def get_main_servo_device(self):
         """Return the main servo device"""
-        return self._servo_type.split('_with_')[-1].split('_and_')[0]
+        return self.get_servo_type().split('_with_')[-1].split('_and_')[0]
 
 
     def enable_main_servo_device(self):
         """Make sure the main device has control of the dut."""
-        if not self.has_control('active_v4_device'):
+        if not self.can_set_active_device():
             return
-        self.set('active_v4_device', self.get_main_servo_device())
+        self.set('active_dut_controller', self.get_main_servo_device())
 
+    def get_ccd_servo_device(self):
+        """Return the ccd servo device or '' if no ccd devices are connected."""
+        servo_type = self.get_servo_type()
+        if 'ccd' not in servo_type:
+            return ''
+        return servo_type.split('_with_')[-1].split('_and_')[-1]
+
+    def active_device_is_ccd(self):
+        """Returns True if a ccd device is active."""
+        return 'ccd' in self.get_servo_version(active=True)
+
+    def enable_ccd_servo_device(self):
+        """Make sure the ccd device has control of the dut.
+
+        Returns True if the ccd device is in control of the dut.
+        """
+        if self.active_device_is_ccd():
+            return True
+        ccd_device = self.get_ccd_servo_device()
+        if not self.can_set_active_device() or not ccd_device:
+            return False
+        self.set('active_dut_controller', ccd_device)
+        return True
 
     def main_device_is_ccd(self):
         """Whether the main servo device (no prefixes) is a ccd device."""
-        with _WrapServoErrors(
-                servo=self, description='main_device_is_ccd()->get_version()'):
-            servo = self._server.get_version()
-        return 'ccd_cr50' in servo and 'servo_micro' not in servo
-
+        servo = self.get_servo_type()
+        return 'ccd' in servo and not self.main_device_is_flex()
 
     def main_device_is_flex(self):
         """Whether the main servo device (no prefixes) is a legacy device."""
-        return not self.main_device_is_ccd()
+        servo = self.get_servo_type()
+        return any([flex in servo for flex in self.FLEX_SERVOS])
 
+    def main_device_uses_gsc_drv(self):
+        """Whether the main servo device uses gsc drivers.
 
-    def main_device_is_active(self):
-        """Return whether the main device is the active device.
-
-        This is only relevant for a dual setup with ccd and legacy on the same
-        DUT. The main device is the servo that has no prefix on its controls.
-        This helper answers the question whether that device is also the
-        active device or not.
+        Servo may use gsc wp or console commands to control the dut. These
+        get restricted with ccd capabilities. This returns true if some of
+        the servo functionality will be disabled if ccd is restricted.
         """
-        # TODO(coconutruben): The current implementation of the dual setup only
-        # ever has legacy as the main device. Therefore, it suffices to ask
-        # whether the active device is ccd.
-        if not self.dts_mode_is_valid():
-            # Use dts support as a proxy to whether the servo setup could
-            # support a dual role. Only those setups now support legacy and ccd.
-            return True
-        active_device = self.get('active_v4_device')
-        return 'ccd_cr50' not in active_device
+        return self.get_main_servo_device() in self.GSC_DRV_SERVOS
 
     def _initialize_programmer(self, rw_only=False):
         """Initialize the firmware programmer.
@@ -1366,19 +1467,20 @@
         if self._programmer:
             return
         # Initialize firmware programmer
-        if self._servo_type.startswith('servo_v2'):
+        servo_type = self.get_servo_type()
+        if servo_type.startswith('servo_v2'):
             self._programmer = firmware_programmer.ProgrammerV2(self)
             self._programmer_rw = firmware_programmer.ProgrammerV2RwOnly(self)
         # Both servo v3 and v4 use the same programming methods so just leverage
         # ProgrammerV3 for servo v4 as well.
-        elif (self._servo_type.startswith('servo_v3') or
-              self._servo_type.startswith('servo_v4')):
+        elif (servo_type.startswith('servo_v3')
+              or servo_type.startswith('servo_v4')):
             self._programmer = firmware_programmer.ProgrammerV3(self)
             self._programmer_rw = firmware_programmer.ProgrammerV3RwOnly(self)
         else:
             raise error.TestError(
                     'No firmware programmer for servo version: %s' %
-                    self._servo_type)
+                    self.get_servo_type())
 
 
     def program_bios(self, image, rw_only=False, copy_image=True):
@@ -1390,7 +1492,7 @@
         @param copy_image: True indicates we need scp the image to servohost
                            while False means the image file is already on
                            servohost.
-
+        @return: a string, full path name of the copied file on the remote.
         """
         self._initialize_programmer()
         # We don't need scp if test runs locally.
@@ -1400,6 +1502,7 @@
             self._programmer_rw.program_bios(image)
         else:
             self._programmer.program_bios(image)
+        return image
 
 
     def program_ec(self, image, rw_only=False, copy_image=True):
@@ -1411,6 +1514,7 @@
         @param copy_image: True indicates we need scp the image to servohost
                            while False means the image file is already on
                            servohost.
+        @return: a string, full path name of the copied file on the remote.
         """
         self._initialize_programmer()
         # We don't need scp if test runs locally.
@@ -1420,14 +1524,16 @@
             self._programmer_rw.program_ec(image)
         else:
             self._programmer.program_ec(image)
+        return image
 
 
-    def extract_ec_image(self, board, model, tarball_path):
+    def extract_ec_image(self, board, model, tarball_path, fake_image=False):
         """Helper function to extract EC image from downloaded tarball.
 
         @param board: The DUT board name.
         @param model: The DUT model name.
         @param tarball_path: The path of the downloaded build tarball.
+        @param fake_image: True to return a fake zero-filled image instead.
 
         @return: Path to extracted EC image.
         """
@@ -1435,25 +1541,25 @@
         # Ignore extracting EC image and re-programming if not a Chrome EC
         chrome_ec = FAFTConfig(board).chrome_ec
         if not chrome_ec:
-            logging.warn('Not a Chrome EC, ignore re-programming it')
+            logging.warning('Not a Chrome EC, ignore re-programming it')
             return None
 
-        # Try to retrieve firmware build target from the version reported
-        # by the EC. If this doesn't work, we assume the firmware build
-        # target is the same as the model name.
-        try:
-            fw_target = self.get_ec_board()
-        except Exception as err:
-            logging.warn('Failed to get ec_board value; ignoring')
-            fw_target = model
-            pass
+        # Most boards use the model name as the ec directory.
+        ec_image_candidates = ['%s/ec.bin' % model]
 
-        # Array of candidates for EC image
-        ec_image_candidates = [
-                'ec.bin',
-                '%s/ec.bin' % fw_target,
-                '%s/ec.bin' % board
-        ]
+        if model == "dragonair":
+            ec_image_candidates.append('dratini/ec.bin')
+
+        # If that isn't found try the name from the EC RO version.
+        try:
+            fw_target = self.get_ec_board().lower()
+            ec_image_candidates.append('%s/ec.bin' % fw_target)
+        except Exception as err:
+            logging.warning('Failed to get ec_board value; ignoring')
+
+        # Fallback to the name of the board, and then a bare ec.bin.
+        ec_image_candidates.append('%s/ec.bin' % board)
+        ec_image_candidates.append('ec.bin')
 
         # Extract EC image from tarball
         dest_dir = os.path.join(os.path.dirname(tarball_path), 'EC')
@@ -1471,6 +1577,17 @@
             _extract_image_from_tarball(tarball_path, dest_dir, mon_candidates,
                                         self.EXTRACT_TIMEOUT_SECS)
 
+            if fake_image:
+                # Create a small (25% of original size) zero-filled binary to
+                # replace the real ec_image
+                file_size = os.path.getsize(ec_image) / 4
+                ec_image = os.path.join(os.path.dirname(ec_image),
+                                        "zero_ec.bin")
+                dump_cmd = 'dd if=/dev/zero of=%s bs=4096 count=%d' % (
+                        os.path.join(dest_dir, ec_image), file_size / 4096)
+                if server_utils.system(dump_cmd, ignore_status=True) != 0:
+                    return None
+
             return os.path.join(dest_dir, ec_image)
         else:
             raise error.TestError('Failed to extract EC image from %s' %
@@ -1487,23 +1604,25 @@
         @return: Path to extracted BIOS image.
         """
 
-        # Try to retrieve firmware build target from the version reported
-        # by the EC. If this doesn't work, we assume the firmware build
-        # target is the same as the model name.
-        try:
-            fw_target = self.get_ec_board()
-        except Exception as err:
-            logging.warn('Failed to get ec_board value; ignoring')
-            fw_target = model
-            pass
-
-        # Array of candidates for BIOS image
+        # Most boards use the model name as the image filename.
         bios_image_candidates = [
-                'image.bin',
-                'image-%s.bin' % fw_target,
-                'image-%s.bin' % board
+                'image-%s.bin' % model,
         ]
 
+        if model == "dragonair":
+            bios_image_candidates.append('image-dratini.bin')
+
+        # If that isn't found try the name from the EC RO version.
+        try:
+            fw_target = self.get_ec_board().lower()
+            bios_image_candidates.append('image-%s.bin' % fw_target)
+        except Exception as err:
+            logging.warning('Failed to get ec_board value; ignoring')
+
+        # Fallback to the name of the board, and then a bare image.bin.
+        bios_image_candidates.append('image-%s.bin' % board)
+        bios_image_candidates.append('image.bin')
+
         # Extract BIOS image from tarball
         dest_dir = os.path.join(os.path.dirname(tarball_path), 'BIOS')
         bios_image = _extract_image_from_tarball(tarball_path,
@@ -1580,19 +1699,19 @@
 
         @param role: Power role for DUT port on servo v4, either 'src' or 'snk'.
         """
-        if not self._servo_type.startswith('servo_v4'):
+        if not self.get_servo_type().startswith('servo_v4'):
             logging.debug('Not a servo v4, unable to set role to %s.', role)
             return
 
-        if not self.has_control('servo_v4_role'):
+        if not self.has_control('servo_pd_role'):
             logging.debug(
                     'Servo does not has servo_v4_role control, unable'
                     ' to set role to %s.', role)
             return
 
-        value = self.get('servo_v4_role')
+        value = self.get('servo_pd_role')
         if value != role:
-            self.set_nocheck('servo_v4_role', role)
+            self.set_nocheck('servo_pd_role', role)
         else:
             logging.debug('Already in the role: %s.', role)
 
@@ -1601,17 +1720,17 @@
 
         It returns None if not a servo v4.
         """
-        if not self._servo_type.startswith('servo_v4'):
+        if not self.get_servo_type().startswith('servo_v4'):
             logging.debug('Not a servo v4, unable to get role')
             return None
 
-        if not self.has_control('servo_v4_role'):
+        if not self.has_control('servo_pd_role'):
             logging.debug(
                     'Servo does not has servo_v4_role control, unable'
                     ' to get the role.')
             return None
 
-        return self.get('servo_v4_role')
+        return self.get('servo_pd_role')
 
     def set_servo_v4_pd_comm(self, en):
         """Set the PD communication of servo v4, either 'on' or 'off'.
@@ -1620,43 +1739,31 @@
 
         @param en: a string of 'on' or 'off' for PD communication.
         """
-        if self._servo_type.startswith('servo_v4'):
-            self.set_nocheck('servo_v4_pd_comm', en)
+        if self.get_servo_type().startswith('servo_v4'):
+            self.set_nocheck('servo_pd_comm', en)
         else:
             logging.debug('Not a servo v4, unable to set PD comm to %s.', en)
 
     def supports_built_in_pd_control(self):
         """Return whether the servo type supports pd charging and control."""
-        if 'servo_v4' not in self._servo_type:
-            # Only servo v4 supports this feature.
-            logging.info('%r type does not support pd control.',
-                         self._servo_type)
-            return False
-        # On servo v4, it still needs to be the type-c version.
-        if not self.get('servo_v4_type') == 'type-c':
-            logging.info('PD controls require a type-c servo v4.')
+        # Only servo v4 type-c supports this feature.
+        if not self.is_servo_v4_type_c():
+            logging.info('PD controls require a servo v4 type-c.')
             return False
         # Lastly, one cannot really do anything without a plugged in charger.
         chg_port_mv = self.get('ppchg5_mv')
         if chg_port_mv < V4_CHG_ATTACHED_MIN_VOLTAGE_MV:
-            logging.warn('It appears that no charger is plugged into servo v4. '
-                         'Charger port voltage: %dmV', chg_port_mv)
+            logging.info(
+                    'It appears that no charger is plugged into servo v4. '
+                    'Charger port voltage: %dmV', chg_port_mv)
             return False
         logging.info('Charger port voltage: %dmV', chg_port_mv)
         return True
 
     def dts_mode_is_valid(self):
         """Return whether servo setup supports dts mode control for cr50."""
-        if 'servo_v4' not in self._servo_type:
-            # Only servo v4 supports this feature.
-            logging.debug('%r type does not support dts mode control.',
-                          self._servo_type)
-            return False
-        # On servo v4, it still needs ot be the type-c version.
-        if not 'type-c' == self.get('servo_v4_type'):
-            logging.info('DTS controls require a type-c servo v4.')
-            return False
-        return True
+        # Only servo v4 type-c supports this feature.
+        return self.is_servo_v4_type_c()
 
     def dts_mode_is_safe(self):
         """Return whether servo setup supports dts mode without losing access.
@@ -1675,11 +1782,11 @@
         if not self.dts_mode_is_valid():
             logging.info('Not a valid servo setup. Unable to get dts mode.')
             return
-        return self.get('servo_v4_dts_mode')
+        return self.get('servo_dts_mode')
 
     def ccd_watchdog_enable(self, enable):
         """Control the ccd watchdog."""
-        if 'ccd' not in self._servo_type:
+        if 'ccd' not in self.get_servo_type():
             return
         if self._ccd_watchdog_disabled and enable:
             logging.info('CCD watchdog disabled for test')
@@ -1717,7 +1824,7 @@
         if not enable_watchdog:
             self.ccd_watchdog_enable(False)
 
-        self.set_nocheck('servo_v4_dts_mode', state)
+        self.set_nocheck('servo_dts_mode', state)
 
         if enable_watchdog:
             self.ccd_watchdog_enable(True)
@@ -1726,20 +1833,22 @@
     def _get_servo_type_fw_version(self, servo_type, prefix=''):
         """Helper to handle fw retrieval for micro/v4 vs ccd.
 
-        @param servo_type: one of 'servo_v4', 'servo_micro', 'ccd_cr50'
+        @param servo_type: one of 'servo_v4', 'servo_micro', 'c2d2',
+                           'ccd_cr50', or 'ccd_gsc'
         @param prefix: whether the control has a prefix
 
         @returns: fw version for non-ccd devices, cr50 version for ccd device
         """
-        if servo_type == 'ccd_cr50':
-            # ccd_cr50 runs on cr50, so need to query the cr50 fw.
-            servo_type = 'cr50'
+        # If it's a ccd device, remove the 'ccd_' prefix to find the firmware
+        # name.
+        if servo_type.startswith(self.CCD_PREFIX):
+            servo_type = servo_type[len(self.CCD_PREFIX)::]
         cmd = '%s_version' % servo_type
         try:
             return self.get(cmd, prefix=prefix)
         except error.TestFail:
             # Do not fail here, simply report the version as unknown.
-            logging.warn('Unable to query %r to get servo fw version.', cmd)
+            logging.warning('Unable to query %r to get servo fw version.', cmd)
             return 'unknown'
 
 
@@ -1757,12 +1866,18 @@
             return '%s_version.%s' % (dev, tag)
 
         fw_versions = {}
-        if 'servo_v4' not in self._servo_type:
+        # Note, this works because v4p1 starts with v4 as well.
+        # TODO(coconutruben): make this more robust so that it can work on
+        # a future v-whatever as well.
+        if 'servo_v4' not in self.get_servo_type():
             return {}
-        v4_tag = get_fw_version_tag('support', 'servo_v4')
-        fw_versions[v4_tag] = self._get_servo_type_fw_version('servo_v4')
-        if 'with' in self._servo_type:
-            dut_devs = self._servo_type.split('_with_')[1].split('_and_')
+        # v4 or v4p1
+        v4_flavor = self.get_servo_type().split('_with_')[0]
+        v4_tag = get_fw_version_tag('root', v4_flavor)
+        fw_versions[v4_tag] = self._get_servo_type_fw_version('servo_fw',
+                                                              prefix='root')
+        if 'with' in self.get_servo_type():
+            dut_devs = self.get_servo_type().split('_with_')[1].split('_and_')
             main_tag = get_fw_version_tag('main', dut_devs[0])
             fw_versions[main_tag] = self._get_servo_type_fw_version(dut_devs[0])
             if len(dut_devs) == 2:
@@ -1773,7 +1888,7 @@
                 # the time that there are more cases of '_and_' devices,
                 # this needs to be reworked.
                 dual_tag = get_fw_version_tag('ccd_flex_secondary', dut_devs[1])
-                fw = self._get_servo_type_fw_version(dut_devs[1], 'ccd_cr50')
+                fw = self._get_servo_type_fw_version(dut_devs[1], dut_devs[1])
                 fw_versions[dual_tag] = fw
         return fw_versions
 
@@ -1832,3 +1947,40 @@
             return None
 
         return self.get('vbus_voltage')
+
+    def supports_eth_power_control(self):
+        """True if servo supports power management for ethernet dongle."""
+        return self.has_control('dut_eth_pwr_en')
+
+    def set_eth_power(self, state):
+        """Set ethernet dongle power state, either 'on' or 'off'.
+
+        Note: this functionality is supported only on servo v4p1.
+
+        @param state: a string of 'on' or 'off'.
+        """
+        if state != 'off' and state != 'on':
+            raise error.TestError('Unknown ethernet power state request: %s' %
+                                  state)
+
+        if not self.supports_eth_power_control():
+            logging.info('Not a supported servo setup. Unable to set ethernet'
+                         'dongle power state %s.', state)
+            return
+
+        self.set_nocheck('dut_eth_pwr_en', state)
+
+    def eth_power_reset(self):
+        """Reset ethernet dongle power state if supported'.
+
+        It does nothing if servo setup does not support power management for
+        the etherent dongle, only log information about this.
+        """
+        if self.supports_eth_power_control():
+            logging.info("Resetting servo's Ethernet controller...")
+            self.set_eth_power('off')
+            time.sleep(1)
+            self.set_eth_power('on')
+        else:
+            logging.info("Trying to reset servo's Ethernet controller, but"
+                         "this feature is not supported on used servo setup.")
diff --git a/server/cros/servo/topology/common.py b/server/cros/servo/topology/common.py
index 6eebf8b..2967b41 100644
--- a/server/cros/servo/topology/common.py
+++ b/server/cros/servo/topology/common.py
@@ -1,4 +1,3 @@
-#!/usr/bin/python2
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/cros/servo/topology/servo_topology.py b/server/cros/servo/topology/servo_topology.py
index 5ee911f..531a491 100644
--- a/server/cros/servo/topology/servo_topology.py
+++ b/server/cros/servo/topology/servo_topology.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python2
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -9,13 +8,11 @@
 from __future__ import division
 
 import os
-import copy
 import json
 import base64
 import logging
 
 import common
-from autotest_lib.client.common_lib import hosts
 from autotest_lib.server.cros.servo.topology import topology_constants as stc
 
 
@@ -42,20 +39,28 @@
 class ServoTopology(object):
     """Class to read, generate and validate servo topology in the lab.
 
-    The class support detection of servo listed in ST_PRODUCT_TYPES.
+    The class support detection of servo listed in VID_PID_SERVO_TYPES.
     To save servo topology to host-info date passed two steps:
        - convert to the json
        - encode to base64
     """
+    # Command to get usb-path to device
+    SERVOD_TOOL_USB_PATH = 'servodtool device -s %s usb-path'
+
+    # Base folder where all servo devices will be enumerated.
+    SERVOS_BASE_PATH = '/sys/bus/usb/devices'
+
+    # Minimal length of usb-path for servo devices connected to the host.
+    MIN_SERVO_PATH = len(SERVOS_BASE_PATH + '/X')
 
     def __init__(self, servo_host):
         self._host = servo_host
-        self._topology = None
+        self.reset()
 
     def read(self, host_info):
         """Reading servo-topology info."""
         logging.info('Reading servo topology info...')
-        self._topology = None
+        self.reset()
         if not host_info:
             logging.info('The host_info not provided. Skip reading.')
             return
@@ -89,12 +94,19 @@
         host_info_store.commit(host_info)
         logging.info('Servo topology saved successfully.')
 
+    def reset(self):
+        """Reset topology to the initialize state.
+
+        All cash will be reset to empty state.
+        """
+        self._topology = None
+
     def generate(self):
         """Read servo data and create topology."""
+        self.reset()
         try:
             self._topology = self._generate()
         except Exception as e:
-            self._topology = None
             logging.debug('(Not critical) %s', e)
             logging.info('Fail to generate servo-topology')
         if not self.is_empty():
@@ -111,12 +123,15 @@
         - set-up expectation: min one child or 2 for DUAL_V4
         - last saved topology: check if any device missed
 
+        Update topology cache if validation passed successfully.
+
         @params raise_error: raise error if validate did not pass otherwise
                              return False.
         @params dual_set:    Check if servo expect DUAL_V4 setup.
         @params compare:     Validate against saved topology.
         """
         new_st = self._generate()
+        logging.debug("Generate topology: %s", new_st)
         if not new_st or not new_st.get(stc.ST_DEVICE_MAIN):
             message = 'Main device is not detected'
             return self._process_error(message, raise_error)
@@ -156,6 +171,16 @@
                 message = 'Some child is missed'
                 return self._process_error(message, raise_error)
         logging.info('Servo topology successfully verified.')
+        self._topology = new_st
+        return True
+
+    def is_servo_serial_provided(self):
+        """Verify that root servo serial is provided."""
+        root_servo_serial = self._host.servo_serial
+        if not root_servo_serial:
+            logging.info('Root servo serial is not provided.')
+            return False
+        logging.debug('Root servo serial: %s', root_servo_serial)
         return True
 
     def _process_error(self, message, raise_error):
@@ -184,95 +209,227 @@
         Read and generate topology structure with out update the state.
         """
         logging.debug('Trying generate a servo-topology')
-        core_servo_serial = self._host.servo_serial
-        if not core_servo_serial:
-            logging.info('Servo serial is not provided.')
-            return None
-        logging.debug('Getting topology for core servo: %s', core_servo_serial)
-        # collect main device info
-        cmd_hub = 'servodtool device -s %s usb-path' % core_servo_serial
-        servo_path = self._read_line(cmd_hub)
-        logging.debug('Device -%s path: %s', core_servo_serial, servo_path)
-        if not servo_path:
-            logging.info('Core servo not detected.')
-            return None
-        if not self._is_expected_type(servo_path):
-            return None
-        main_device = self._read_device_info(servo_path)
-        if not main_device:
-            logging.debug('Core device missed some data')
-            return None
-        # collect child device info
+        if not self.is_servo_serial_provided():
+            return
+        root_servo_serial = self._host.servo_serial
+        root_servo = None
         children = []
-        hub_path = servo_path[0:-2]
-        logging.debug('Core hub path: %s', hub_path)
-        devices_cmd = 'find %s/* -name serial' % hub_path
-        devices = self._read_multilines(devices_cmd)
-        core_device_port = main_device.get(stc.ST_DEVICE_HUB_PORT)
+        devices = self.get_list_of_devices()
         for device in devices:
-            logging.debug('Child device %s', device)
-            device_dir = os.path.dirname(device)
-            if not self._is_expected_type(device_dir):
-                # skip not expected device type like USB or hubs
+            if not device.is_good():
+                logging.info('Skip %s as missing some data', device)
                 continue
-            child = self._read_device_info(device_dir)
-            if not child:
-                logging.debug('Child missed some data.')
-                continue
-            if core_device_port == child.get(stc.ST_DEVICE_HUB_PORT):
-                logging.debug('Skip device if match with core device')
-                continue
-            children.append(child)
+            if device.get_serial_number() == root_servo_serial:
+                root_servo = device.get_topology_item()
+            else:
+                children.append(device.get_topology_item())
+        if not root_servo:
+            logging.debug('Root servo missed!')
+            return None
         topology = {
-                stc.ST_DEVICE_MAIN: main_device,
+                stc.ST_DEVICE_MAIN: root_servo,
                 stc.ST_DEVICE_CHILDREN: children
         }
         logging.debug('Servo topology: %s', topology)
         return topology
 
-    def _is_expected_type(self, path):
-        """Check if device type is known servo type.
+    def _get_servo_hub_path(self, servo_serial):
+        """Get path to the servo hub.
 
-        Please update ST_PRODUCT_TYPES to extend more servo types.
+        The root servo is connected directly to the servo-hub. To find other
+        servos connected to the hub we need find the path to the servo-hub.
+        The servod-tool always return direct path to the servo, like:
+            /sys/bus/usb/devices/1-3.2.1
+            base path:  /sys/bus/usb/devices/
+            root-servo:  1-3.2.1
+        the alternative path is '/sys/bus/usb/devices/1-3.2/1-3.2.1/'
+        where '1-3.2' is path to servo-hub. To extract path to servo-hub
+        logic parse parse and remove last digit of the port where root servo
+        connected to the servo-hub.
+            base path:  /sys/bus/usb/devices/
+            servo-hub:  1-3.2
+            root-servo: .1
+        After we will join only base path with servo-hub.
+
+        @params servo_serial    Serial number of the servo connected to hub
+        @returns: A string representation of fs-path to servo-hub device
         """
-        product = self._read_file(path, 'product')
-        if bool(stc.ST_PRODUCT_TYPES.get(product)):
-            return True
-        logging.info('Unknown product: %s', product)
-        return False
+        logging.debug('Try to find a hub-path for servo:%s', servo_serial)
+        cmd_hub = self.SERVOD_TOOL_USB_PATH % servo_serial
+        servo_path = self._read_line(cmd_hub)
+        logging.debug('Servo %s path: %s', servo_serial, servo_path)
+        if not servo_path or len(servo_path) < self.MIN_SERVO_PATH:
+            logging.info('Servo not detected.')
+            return None
+        base_path = os.path.dirname(servo_path)
+        root_servo_tail = os.path.basename(servo_path)
+        # Removing last port as
+        servo_hub_tail = '.'.join(root_servo_tail.split('.')[:-1])
+        return os.path.join(base_path, servo_hub_tail)
 
-    def _read_device_info(self, path):
-        """Read device details for topology.
+    def get_root_servo(self):
+        """Get root servo device.
 
-        @params path: Absolute path to the device in FS.
+        @returns: ConnectedServo if device found.
         """
+        logging.debug('Try to find a root servo')
+        if not self.is_servo_serial_provided():
+            return None
+        # Find the path to the servo-hub folder.
+        root_servo_serial = self._host.servo_serial
+        cmd_hub = self.SERVOD_TOOL_USB_PATH % root_servo_serial
+        servo_path = self._read_line(cmd_hub)
+        logging.debug('Servo %s path: %s', root_servo_serial, servo_path)
+        if not servo_path or len(servo_path) < self.MIN_SERVO_PATH:
+            logging.info('Servo not detected.')
+            return None
+        device = self._get_device(servo_path)
+        if device and device.is_good():
+            return device
+        return None
+
+    def get_root_servo_from_cache(self):
+        """Get root servo device based on topology cache data.
+
+        First we try to find servo based on topology info.
+
+        @returns: ConnectedServo if device found.
+        """
+        logging.info('Trying to find root device from topology cache!')
+        if (not self._topology or not self._topology.get(stc.ST_DEVICE_MAIN)):
+            logging.info('Topology cache is empty or not present')
+            return None
+        devpath = self._topology.get(
+                stc.ST_DEVICE_MAIN)[stc.ST_DEVICE_HUB_PORT]
+        logging.debug('devpath=%s', devpath)
+        if not devpath:
+            return None
+        # devpath represent sequence of ports used to detect device
+        device_fs_port = '1-%s' % devpath
+        logging.debug('device_fs_port=%s', device_fs_port)
+        device_path = os.path.join(self.SERVOS_BASE_PATH, device_fs_port)
+        device = self._get_device(device_path)
+        logging.info('device=%s', device)
+        if device and device.is_good():
+            return device
+        logging.debug('Trying to verify present of the hub!')
+        hub_folder = '.'.join(device_fs_port.split('.')[:-1])
+        logging.debug('servo_hub_folder=%s', hub_folder)
+        hub_product = os.path.join(self.SERVOS_BASE_PATH, hub_folder,
+                                   'product')
+        logging.debug('hub_product=%s', hub_product)
+        hub_name = self._read_line('cat %s' % hub_product)
+        logging.debug('hub_name=%s', hub_name)
+        if hub_name:
+            raise ServoTopologyError(
+                    'Root servo hardware potentially missing!')
+        raise ServoTopologyError(
+                'No USB device on expected port for the servo!')
+
+    def get_list_of_devices(self):
+        """Generate list of devices with serials.
+
+        Logic based on detecting all device enumerated under servo-hub device.
+
+        @returns: Collection of detected device connected to the servo-hub.
+        """
+        logging.debug('Trying generate device-a servo-topology')
+        if not self.is_servo_serial_provided():
+            return []
+        # Find the path to the servo-hub folder.
+        hub_path = self._get_servo_hub_path(self._host.servo_serial)
+        logging.debug('Servo hub path: %s', hub_path)
+        if not hub_path:
+            return []
+
+        # Find all serial filed of devices under servo-hub. Each device
+        # has to have serial number.
+        devices_cmd = 'find %s/* -name serial' % hub_path
+        devices = self._read_multilines(devices_cmd)
+        children = []
+        for device in devices:
+            logging.debug('Child device %s', device)
+            device_dir = os.path.dirname(device)
+            child = self._get_device(device_dir)
+            if not child:
+                logging.debug('Child missed some data.')
+                continue
+            children.append(child)
+        logging.debug('Detected devices: %s', len(children))
+        return children
+
+    def update_servo_version(self, device=None):
+        """Update version of servo device.
+
+        @params device: ConnectedServo instance.
+        """
+        if not device:
+            logging.debug('Device is not provided')
+            return
+        device._version = self._read_file(device.get_path(), 'configuration')
+        logging.debug('New servo version: %s', device.get_version())
+
+    def get_list_available_servos(self):
+        """List all servos enumerated on the host."""
+        logging.debug('Started process to collect all devices on the host.')
+        devices = []
+        # Looking only devices with Google vendor-id (18d1).
+        cmd = 'grep -s  -R "18d1" %s/*/idVendor' % self.SERVOS_BASE_PATH
+        result_paths = self._read_multilines(cmd)
+        for path in result_paths:
+            idVendor_path = path.split(':')[0]
+            if not idVendor_path:
+                logging.debug('Cannot extract path to file from: %s', path)
+                continue
+            base_path = os.path.dirname(idVendor_path)
+            if not base_path:
+                logging.debug('Cannot extract base path from: %s',
+                              idVendor_path)
+                continue
+            device = self._get_device(base_path)
+            if not device:
+                logging.debug('Not found device under: %s', base_path)
+                continue
+            devices.append(device)
+        return devices
+
+    def _get_vid_pid(self, path):
+        """Read VID and PID of the device.
+
+        @params path    Absolute path to the device in FS.
+        @returns: A string representation VID:PID of device.
+        """
+        vid = self._read_file(path, 'idVendor')
+        pid = self._read_file(path, 'idProduct')
+        if not vid or not pid:
+            return None
+        vid_pid = '%s:%s' % (vid, pid)
+        logging.debug("VID/PID of device device: '%s'", vid_pid)
+        return vid_pid
+
+    def _get_device(self, path):
+        """Create device representation.
+
+        @params path:   Absolute path to the device in FS.
+        @returns: ConnectedServo if VID/PID present.
+        """
+        vid_pid = self._get_vid_pid(path)
+        if not vid_pid:
+            return None
         serial = self._read_file(path, 'serial')
         product = self._read_file(path, 'product')
         hub_path = self._read_file(path, 'devpath')
-        stype = stc.ST_PRODUCT_TYPES.get(product)
-        return self._create_item(serial, stype, product, hub_path)
-
-    def _create_item(self, servo_serial, servo_type, product, hub_path):
-        """Create topology item.
-
-        Return created item only if all details provided.
-
-        @params servo_serial:   Serial number of device.
-        @params servo_type:     Product type code of the device.
-        @params product:        Product name of the device.
-        @params hub_path:       Device enumerated folder name. Show the
-                                chain of used ports to connect the device.
-        """
-        item = {
-                stc.ST_DEVICE_SERIAL: servo_serial,
-                stc.ST_DEVICE_TYPE: servo_type,
-                stc.ST_DEVICE_PRODUCT: product,
-                stc.ST_DEVICE_HUB_PORT: hub_path
-        }
-        if not (servo_serial and servo_type and product and hub_path):
-            logging.debug('Some data missing: %s', item)
+        configuration = self._read_file(path, 'configuration')
+        servo_type = stc.VID_PID_SERVO_TYPES.get(vid_pid)
+        if not servo_type:
             return None
-        return item
+        return ConnectedServo(device_path=path,
+                              device_product=product,
+                              device_serial=serial,
+                              device_type=servo_type,
+                              device_vid_pid=vid_pid,
+                              device_hub_path=hub_path,
+                              device_version=configuration)
 
     def _read_file(self, path, file_name):
         """Read context of the file and return result as one line.
@@ -312,6 +469,60 @@
         return []
 
 
+class ConnectedServo(object):
+    """Class to hold info about connected detected."""
+
+    def __init__(self,
+                 device_path=None,
+                 device_product=None,
+                 device_serial=None,
+                 device_type=None,
+                 device_vid_pid=None,
+                 device_hub_path=None,
+                 device_version=None):
+        self._path = device_path
+        self._product = device_product
+        self._serial = device_serial
+        self._type = device_type
+        self._vid_pid = device_vid_pid
+        self._hub_path = device_hub_path
+        self._version = device_version
+
+    def get_topology_item(self):
+        """Extract as topology item."""
+        return {
+                stc.ST_DEVICE_SERIAL: self._serial,
+                stc.ST_DEVICE_TYPE: self._type,
+                stc.ST_DEVICE_PRODUCT: self._product,
+                stc.ST_DEVICE_HUB_PORT: self._hub_path
+        }
+
+    def is_good(self):
+        """Check if minimal data for topology item is present."""
+        return self._serial and self._type and self._hub_path
+
+    def get_type(self):
+        """Servo type."""
+        return self._type
+
+    def get_path(self):
+        """Path to servo folder in sysfs."""
+        return self._path
+
+    def get_serial_number(self):
+        """Servo serial number."""
+        return self._serial
+
+    def get_version(self):
+        """Get servo version."""
+        return self._version
+
+    def __str__(self):
+        return ("Device %s:%s (%s, %s) version: %s" %
+                (self._type, self._serial, self._vid_pid, self._hub_path,
+                 self._version))
+
+
 def _convert_topology_to_string(topology):
     """Convert topology to the string respresentation.
 
@@ -334,7 +545,7 @@
         # recommended to convert to the bytes for python 3
         b64_string = base64.b64encode(json_string.encode("utf-8"))
         logging.debug('Servo topology (b64): %s', b64_string)
-        return b64_string
+        return b64_string.decode()
     except Exception as e:
         logging.debug('(Not critical) %s', e)
         logging.info('Failed to convert topology to base64')
diff --git a/server/cros/servo/topology/servo_topology_unittest.py b/server/cros/servo/topology/servo_topology_unittest.py
index 890d2f8..a5caf89 100644
--- a/server/cros/servo/topology/servo_topology_unittest.py
+++ b/server/cros/servo/topology/servo_topology_unittest.py
@@ -1,13 +1,11 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import mock
 import unittest
 
 import common
-from autotest_lib.server.cros.servo.topology import topology_constants as stc
 from autotest_lib.server.cros.servo.topology import servo_topology
 
 # pylint: disable=missing-docstring
diff --git a/server/cros/servo/topology/topology_constants.py b/server/cros/servo/topology/topology_constants.py
index 6702a5e..70e6c51 100644
--- a/server/cros/servo/topology/topology_constants.py
+++ b/server/cros/servo/topology/topology_constants.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python2
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -14,16 +13,31 @@
 ST_DEVICE_PRODUCT = 'sysfs_product'
 ST_DEVICE_HUB_PORT = 'usb_hub_port'
 
+ST_V4_TYPE = 'servo_v4'
+ST_V4P1_TYPE = 'servo_v4p1'
 ST_CR50_TYPE = 'ccd_cr50'
+ST_C2D2_TYPE = 'c2d2'
+ST_SERVO_MICRO_TYPE = 'servo_micro'
+ST_SWEETBERRY_TYPE = 'sweetberry'
 
 # Mapping between product names and types.
 ST_PRODUCT_TYPES = {
-        'Servo V4': 'servo_v4',
-        'Servo V4p1': 'servo_v4p1',
+        'Servo V4': ST_V4_TYPE,
+        'Servo V4p1': ST_V4P1_TYPE,
         'Cr50': ST_CR50_TYPE,
-        'Servo Micro': 'servo_micro',
-        'C2D2': 'c2d2',
-        'Sweetberry': 'sweetberry'
+        'Servo Micro': ST_SERVO_MICRO_TYPE,
+        'C2D2': ST_C2D2_TYPE,
+        'Sweetberry': ST_SWEETBERRY_TYPE
+}
+
+# Mapping vid-pid to servo types
+VID_PID_SERVO_TYPES = {
+        '18d1:501b': ST_V4_TYPE,
+        '18d1:520d': ST_V4P1_TYPE,
+        '18d1:5014': ST_CR50_TYPE,
+        '18d1:501a': ST_SERVO_MICRO_TYPE,
+        '18d1:5041': ST_C2D2_TYPE,
+        '18d1:5020': ST_SWEETBERRY_TYPE
 }
 
 # List unchangeable fields per device.
diff --git a/server/cros/storage/storage_validate.py b/server/cros/storage/storage_validate.py
index 2df49bd..a71bc5e 100644
--- a/server/cros/storage/storage_validate.py
+++ b/server/cros/storage/storage_validate.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python2
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -124,10 +123,8 @@
             if run_badblocks is None:
                 if _is_time_to_run_badblocks_ro(dhp):
                     run_badblocks = BADBLOCK_CHECK_RO
-                # Blocked for now till we confirm that SMART stats is not
-                # detect is before we do.
-                # if usb_boot and _is_time_to_run_badblocks_rw(dhp):
-                #     run_badblocks = BADBLOCK_CHECK_RW
+                if usb_boot and _is_time_to_run_badblocks_rw(dhp):
+                    run_badblocks = BADBLOCK_CHECK_RW
             logging.debug('run_badblocks=%s', run_badblocks)
             if usb_boot and run_badblocks == BADBLOCK_CHECK_RW:
                 self._run_read_write_badblocks_check()
diff --git a/server/cros/storage/storage_validate_unittest.py b/server/cros/storage/storage_validate_unittest.py
deleted file mode 100644
index b919332..0000000
--- a/server/cros/storage/storage_validate_unittest.py
+++ /dev/null
@@ -1,117 +0,0 @@
-#!/usr/bin/env python2
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import time
-import unittest
-import mock
-
-import common
-from autotest_lib.server.cros.storage import storage_validate
-from autotest_lib.server.cros.device_health_profile import device_health_profile
-from autotest_lib.server.cros.device_health_profile import profile_constants
-
-
-class MockHostInfoStore(object):
-    def __init__(self):
-        self.board = 'mock_board'
-        self.model = 'mock_model'
-
-
-class MockHost(object):
-    def __init__(self, hostname):
-        self.hostname = hostname
-        self.host_info_store = mock.Mock()
-        self.host_info_store.get.return_value = MockHostInfoStore()
-        self.job = None
-
-    def check_cached_up_status(self):
-        return True
-
-    def is_up(self):
-        return True
-
-    def send_file(self, source, dest):
-        return True
-
-    def get_file(self, source, dest):
-        return True
-
-    def is_file_exists(self, file_path):
-        return False
-
-
-def create_device_health_profile():
-    servohost = MockHost('dummy_servohost_hostname')
-    dhp = device_health_profile.DeviceHealthProfile(
-            hostname='dummy_dut_hostname',
-            host_info=MockHostInfoStore(),
-            result_dir=None)
-    dhp.init_profile(servohost)
-    return dhp
-
-
-def _add_days_to_time(secs, days):
-    new_time = time.localtime(secs + (days * 24 * 60 * 60))
-    return time.strftime(profile_constants.TIME_PATTERN, new_time)
-
-
-class BadblocksRunReadyTestCase(unittest.TestCase):
-    dhp = create_device_health_profile()
-
-    def test_is_time_to_run_badblocks_ro(self):
-        self.dhp.refresh_badblocks_ro_run_time()
-        last_time = self.dhp.get_badblocks_ro_run_time_epoch()
-        # sleep for a second to make difference from now to avoid flakiness
-        time.sleep(1)
-        self.assertFalse(
-                storage_validate._is_time_to_run_badblocks_ro(self.dhp))
-        # set 5 days ago
-        self.dhp._update_profile(
-                profile_constants.LAST_BADBLOCKS_RO_RUN_TIME_KEY,
-                _add_days_to_time(last_time, -5))
-        self.assertFalse(
-                storage_validate._is_time_to_run_badblocks_ro(self.dhp))
-        # set 6 days ago
-        self.dhp._update_profile(
-                profile_constants.LAST_BADBLOCKS_RO_RUN_TIME_KEY,
-                _add_days_to_time(last_time, -6))
-        self.assertTrue(storage_validate._is_time_to_run_badblocks_ro(
-                self.dhp))
-        # set 7 days ago
-        self.dhp._update_profile(
-                profile_constants.LAST_BADBLOCKS_RO_RUN_TIME_KEY,
-                _add_days_to_time(last_time, -7))
-        self.assertTrue(storage_validate._is_time_to_run_badblocks_ro(
-                self.dhp))
-
-    def test_is_time_to_run_badblocks_rw(self):
-        self.dhp.refresh_badblocks_rw_run_time()
-        last_time = self.dhp.get_badblocks_rw_run_time_epoch()
-        # sleep for a second to make difference from now to avoid flakiness
-        time.sleep(1)
-        self.assertFalse(
-                storage_validate._is_time_to_run_badblocks_rw(self.dhp))
-        # set 59 days ago
-        self.dhp._update_profile(
-                profile_constants.LAST_BADBLOCKS_RW_RUN_TIME_KEY,
-                _add_days_to_time(last_time, -59))
-        self.assertFalse(
-                storage_validate._is_time_to_run_badblocks_rw(self.dhp))
-        # set 60 days ago
-        self.dhp._update_profile(
-                profile_constants.LAST_BADBLOCKS_RW_RUN_TIME_KEY,
-                _add_days_to_time(last_time, -60))
-        self.assertTrue(storage_validate._is_time_to_run_badblocks_rw(
-                self.dhp))
-        # set 61 days ago
-        self.dhp._update_profile(
-                profile_constants.LAST_BADBLOCKS_RW_RUN_TIME_KEY,
-                _add_days_to_time(last_time, -61))
-        self.assertTrue(storage_validate._is_time_to_run_badblocks_rw(
-                self.dhp))
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/server/cros/stress_unittest.py b/server/cros/stress_unittest.py
index 44e3091..456223e 100755
--- a/server/cros/stress_unittest.py
+++ b/server/cros/stress_unittest.py
@@ -5,7 +5,9 @@
 import threading
 import unittest
 
-import stress
+import common
+
+from autotest_lib.server.cros import stress
 
 
 class StopThreadForTesting(Exception):
diff --git a/server/cros/telemetry_runner.py b/server/cros/telemetry_runner.py
index 1010f10..245131f 100644
--- a/server/cros/telemetry_runner.py
+++ b/server/cros/telemetry_runner.py
@@ -1,4 +1,4 @@
-# Lint as: python2, python3
+# Lint as: python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -7,17 +7,20 @@
 from __future__ import division
 from __future__ import print_function
 
+import abc
 import json
 import logging
 import numbers
 import os
 import tempfile
 import six
+import sys
 
 import numpy
 
+import common
 from autotest_lib.client.common_lib import error, utils
-from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.server.cros import telemetry_setup
 
 TELEMETRY_RUN_BENCHMARKS_SCRIPT = 'tools/perf/run_benchmark'
 TELEMETRY_RUN_TESTS_SCRIPT = 'tools/telemetry/run_tests'
@@ -66,108 +69,71 @@
         self.output = '\n'.join([stdout, stderr])
 
 
-class TelemetryRunner(object):
+class TelemetryRunnerFactory(object):
+    """A factory class to determine TelemetryRunner subclass to be used.
+
+    The TelemetryRunner class, today, has various ways to execute the telemetry
+    test. The test can be executed locally (using a tool like test_that) or can
+    be executed in the Lab environment - for this usecase, either the drone OR
+    the devserver can be used.
+
+    A Factory class offloads this determination overhead from the clients. Users
+    of the TelemetryRunner class are highly encouraged to go through this
+    Factory class while determining the correct TelemetryRunner subclass.
+    """
+
+    def get_runner(self,
+                   host,
+                   local=False,
+                   telemetry_on_dut=True,
+                   is_lacros=False):
+        """Method to determine which TelemetryRunner subclass to use."""
+        if local:
+            return LocalTelemetryRunner(host, telemetry_on_dut)
+        else:
+            return DroneTelemetryRunner(host, telemetry_on_dut, is_lacros)
+
+
+class TelemetryRunner(six.with_metaclass(abc.ABCMeta, object)):
     """Class responsible for telemetry for a given build.
 
-    This class will extract and install telemetry on the devserver and is
+    This class will extract and install telemetry environment and is
     responsible for executing the telemetry benchmarks and returning their
     output to the caller.
     """
 
-    def __init__(self, host, local=False, telemetry_on_dut=True):
+    def __init__(self, host, telemetry_on_dut=True, is_lacros=False):
         """Initializes this telemetry runner instance.
 
         If telemetry is not installed for this build, it will be.
 
-        Basically, the following commands on the local pc on which test_that
-        will be executed, depending on the 4 possible combinations of
-        local x telemetry_on_dut:
-
-        local=True, telemetry_on_dut=False:
-        python2 run_benchmark --browser=cros-chrome --remote=[dut] [test]
-
-        local=True, telemetry_on_dut=True:
-        ssh [dut] python2 run_benchmark --browser=system [test]
-
-        local=False, telemetry_on_dut=False:
-        ssh [devserver] python2 run_benchmark --browser=cros-chrome
-        --remote=[dut] [test]
-
-        local=False, telemetry_on_dut=True:
-        ssh [devserver] ssh [dut] python2 run_benchmark --browser=system [test]
-
         @param host: Host where the test will be run.
-        @param local: If set, no devserver will be used, test will be run
-                      locally.
-                      If not set, "ssh [devserver] " will be appended to test
-                      commands.
         @param telemetry_on_dut: If set, telemetry itself (the test harness)
                                  will run on dut.
                                  It decides browser=[system|cros-chrome]
+        @param is_lacros: If true, run telemetry on lacros chrome, by defining
+                          browser=lacros-chrome. It is only valid for remote
+                          test mode.
         """
         self._host = host
-        self._devserver = None
         self._telemetry_path = None
         self._perf_value_writer = None
+        self._setup_telemetry()
         self._telemetry_on_dut = telemetry_on_dut
-        # TODO (llozano crbug.com/324964). Remove conditional code.
-        # Use a class hierarchy instead.
-        if local:
-            self._setup_local_telemetry()
-        else:
-            self._setup_devserver_telemetry()
         self._benchmark_deps = None
-
+        self._is_lacros = is_lacros
         logging.debug('Telemetry Path: %s', self._telemetry_path)
 
-    def _setup_devserver_telemetry(self):
-        """Setup Telemetry to use the devserver."""
-        logging.debug('Setting up telemetry for devserver testing')
-        logging.debug('Grabbing build from AFE.')
-        info = self._host.host_info_store.get()
-        if not info.build:
-            logging.error('Unable to locate build label for host: %s.',
-                          self._host.host_port)
-            raise error.AutotestError(
-                    'Failed to grab build for host %s.' % self._host.host_port)
+    def __enter__(self):
+        """Called while entering context manager; does nothing."""
+        return self
 
-        logging.debug('Setting up telemetry for build: %s', info.build)
+    def __exit__(self, exc_type, exc_value, traceback):
+        """Called while exiting context manager."""
 
-        self._devserver = dev_server.ImageServer.resolve(
-                info.build, hostname=self._host.hostname)
-        self._devserver.stage_artifacts(info.build, ['autotest_packages'])
-        self._telemetry_path = self._devserver.setup_telemetry(
-                build=info.build)
-
-    def _setup_local_telemetry(self):
-        """Setup Telemetry to use local path to its sources.
-
-        First look for chrome source root, either externally mounted, or inside
-        the chroot.  Prefer chrome-src-internal source tree to chrome-src.
-        """
-        TELEMETRY_DIR = 'src'
-        CHROME_LOCAL_SRC = '/var/cache/chromeos-cache/distfiles/target/'
-        CHROME_EXTERNAL_SRC = os.path.expanduser('~/chrome_root/')
-
-        logging.debug('Setting up telemetry for local testing')
-
-        sources_list = ('chrome-src-internal', 'chrome-src')
-        dir_list = [CHROME_EXTERNAL_SRC]
-        dir_list.extend(
-                [os.path.join(CHROME_LOCAL_SRC, x) for x in sources_list])
-        if 'CHROME_ROOT' in os.environ:
-            dir_list.insert(0, os.environ['CHROME_ROOT'])
-
-        telemetry_src = ''
-        for dir in dir_list:
-            if os.path.exists(dir):
-                telemetry_src = os.path.join(dir, TELEMETRY_DIR)
-                break
-        else:
-            raise error.TestError('Telemetry source directory not found.')
-
-        self._devserver = None
-        self._telemetry_path = telemetry_src
+    @abc.abstractmethod
+    def _setup_telemetry(self):
+        """Set up telemetry environment."""
 
     def _get_telemetry_cmd(self, script, test_or_benchmark, output_format,
                            *args, **kwargs):
@@ -187,10 +153,6 @@
         @returns Full telemetry command to execute the script.
         """
         telemetry_cmd = []
-        if self._devserver:
-            devserver_hostname = self._devserver.hostname
-            telemetry_cmd.extend(['ssh', devserver_hostname])
-
         no_verbose = kwargs.get('no_verbose', False)
 
         output_dir = (DUT_CHROME_ROOT
@@ -202,24 +164,25 @@
 
         if self._telemetry_on_dut:
             telemetry_cmd.extend([
-                    self._host.ssh_command(
-                            alive_interval=900, connection_attempts=4),
-                    'python2',
+                    self._host.ssh_command(alive_interval=900,
+                                           connection_attempts=4),
+                    sys.executable,
                     script,
                     '--output-format=%s' % output_format,
                     '--output-dir=%s' % output_dir,
                     '--browser=system',
             ])
         else:
+            browser = 'lacros-chrome' if self._is_lacros else 'cros-chrome'
             telemetry_cmd.extend([
-                    'python2',
+                    sys.executable,
                     script,
-                    '--browser=cros-chrome',
+                    '--browser=%s' % browser,
                     '--output-format=%s' % output_format,
                     '--output-dir=%s' % output_dir,
                     '--remote=%s' % self._host.hostname,
             ])
-            if self._host.host_port != self._host.hostname:
+            if self._host.host_port != self._host.hostname and self._host.host_port:
                 # If the user specify a different port for the DUT, we should
                 # use different telemetry argument to set it up.
                 #
@@ -238,7 +201,7 @@
 
     def _scp_telemetry_results_cmd(self, perf_results_dir, output_format,
                                    artifacts):
-        """Build command to copy the telemetry results from the devserver.
+        """Build command to copy the telemetry results from the work directory.
 
         @param perf_results_dir: directory path where test output is to be
                                  collected.
@@ -266,16 +229,13 @@
             src = 'root@%s:%s' % (self._host.hostname, DUT_CHROME_ROOT)
         else:
             # Use rsync --remove-source-file to move rather than copy from
-            # server. This is because each run will generate certain artifacts
+            # work dir. This is because each run will generate certain artifacts
             # and will not be removed after, making result size getting larger.
             # We don't do this for results on DUT because 1) rsync doesn't work
             # 2) DUT will be reflashed frequently and no need to worry about
             # result size.
             scp_cmd.extend(['rsync', '-avz', '--remove-source-files'])
-            devserver_hostname = ''
-            if self._devserver:
-                devserver_hostname = self._devserver.hostname + ':'
-            src = '%s%s' % (devserver_hostname, self._telemetry_path)
+            src = self._telemetry_path
 
         if self._perf_value_writer:
             src = os.path.join(src, self._perf_value_writer.tmpdir.strip('/'))
@@ -375,7 +335,7 @@
         """Runs a telemetry test on a dut.
 
         @param script: Which telemetry test script we want to run. Can be
-                       telemetry's base test script or the Chrome OS specific
+                       telemetry's base test script or the ChromeOS specific
                        test script.
         @param test: Telemetry test we want to run.
         @param args: additional list of arguments to pass to the script.
@@ -475,16 +435,11 @@
                  execution.
         """
         script = os.path.join(DUT_CHROME_ROOT, TELEMETRY_RUN_GPU_TESTS_SCRIPT)
-        cmd = []
-        if self._devserver:
-            devserver_hostname = self._devserver.hostname
-            cmd.extend(['ssh', devserver_hostname])
-
-        cmd.extend([
-                self._host.ssh_command(
-                        alive_interval=900, connection_attempts=4), 'python2',
+        cmd = [
+                self._host.ssh_command(alive_interval=900,
+                                       connection_attempts=4), sys.executable,
                 script
-        ])
+        ]
         cmd.extend(args)
         cmd.append(test)
         cmd = ' '.join(cmd)
@@ -515,15 +470,9 @@
         self._benchmark_deps = tempfile.NamedTemporaryFile(
                 prefix='fetch_benchmark_deps_result.', suffix='.json')
         deps_path = self._benchmark_deps.name
-        format_fetch = ('python2 %s --output-deps=%s %s')
-        command_fetch = format_fetch % (fetch_path, deps_path, test_name)
-        command_get = 'cat %s' % deps_path
-
-        if self._devserver:
-            devserver_hostname = self._devserver.url().split(
-                    'http://')[1].split(':')[0]
-            command_fetch = 'ssh %s %s' % (devserver_hostname, command_fetch)
-            command_get = 'ssh %s %s' % (devserver_hostname, command_get)
+        command_fetch = (f'{sys.executable} {fetch_path} '
+                         f'--output-deps={deps_path} {test_name}')
+        command_get = f'cat {deps_path}'
 
         logging.info('Getting DEPs: %s', command_fetch)
         _, _, exit_code = self._run_cmd(command_fetch)
@@ -539,17 +488,10 @@
         for dep in deps[test_name]:
             src = os.path.join(self._telemetry_path, dep)
             dst = os.path.join(DUT_CHROME_ROOT, dep)
-            if self._devserver:
-                logging.info('Copying: %s -> %s', src, dst)
-                rsync_cmd = utils.sh_escape(
-                        'rsync %s %s %s:%s' % (self._host.rsync_options(), src,
-                                               self._host.hostname, dst))
-                utils.run('ssh %s "%s"' % (devserver_hostname, rsync_cmd))
-            else:
-                if not os.path.isfile(src):
-                    raise error.TestFail('Error occurred while saving DEPs.')
-                logging.info('Copying: %s -> %s', src, dst)
-                dut.send_file(src, dst)
+            if not os.path.isfile(src):
+                raise error.TestFail('Error occurred while saving DEPs.')
+            logging.info('Copying: %s -> %s', src, dst)
+            dut.send_file(src, dst)
 
     @staticmethod
     def convert_chart_json(histogram_set):
@@ -659,3 +601,116 @@
                 'charts': charts,
                 'format_version': 1.0
         }
+
+
+class LocalTelemetryRunner(TelemetryRunner):
+    """Specialized TelemetryRunner to handle local telemetry test runs."""
+
+    def __init__(self, *args, **kwargs):
+        """Initialize LocalTelemetryRunner.
+
+        The telemetry test will run locally. Depending on whether
+        telemetry_on_dut is True or False, there can be possible combinations
+        for the execution of this test:
+
+        telemetry_on_dut=False:
+        python run_benchmark --browser=cros-chrome --remote=[dut] [test]
+
+        telemetry_on_dut=True:
+        ssh [dut] python run_benchmark --browser=system [test]
+
+        @param args: The list of arguments to be passed. See Base class for a
+                     complete list of accepted arguments.
+        @param kwargs: Any keyword arguments to be passed. See Base class for a
+                       complete list of accepted keyword arguments.
+        """
+        super(LocalTelemetryRunner, self).__init__(*args, **kwargs)
+
+    def _setup_telemetry(self):
+        """Setup Telemetry to use local path to its sources.
+
+        First look for chrome source root, either externally mounted, or inside
+        the chroot.  Prefer chrome-src-internal source tree to chrome-src.
+        """
+        TELEMETRY_DIR = 'src'
+        CHROME_LOCAL_SRC = '/var/cache/chromeos-cache/distfiles/target/'
+        CHROME_EXTERNAL_SRC = os.path.expanduser('~/chrome_root/')
+
+        logging.debug('Setting up telemetry for local testing')
+
+        sources_list = ('chrome-src-internal', 'chrome-src')
+        dir_list = [CHROME_EXTERNAL_SRC]
+        dir_list.extend(
+                [os.path.join(CHROME_LOCAL_SRC, x) for x in sources_list])
+        if 'CHROME_ROOT' in os.environ:
+            dir_list.insert(0, os.environ['CHROME_ROOT'])
+
+        telemetry_src = ''
+        for dir in dir_list:
+            if os.path.exists(dir):
+                telemetry_src = os.path.join(dir, TELEMETRY_DIR)
+                break
+        else:
+            raise error.TestError('Telemetry source directory not found.')
+
+        self._telemetry_path = telemetry_src
+
+
+class DroneTelemetryRunner(TelemetryRunner):
+    """Handle telemetry test setup on the drone.
+
+    Users of this class are strongly advised to use this class as a context
+    manager. Since the setup for telemetry environment happens on the drone, it
+    is imperative that this setup be cleaned up once the test is done. Using
+    this class as a context manager will transfer the burden of clean up from
+    the user to Python.
+    """
+
+    def __init__(self, *args, **kwargs):
+        """Initialize DroneTelemetryRunner.
+
+        The telemetry test will run on the drone. Depending on whether
+        telemetry_on_dut is True or False, there can be possible combinations
+        for the execution of this test:
+
+        telemetry_on_dut=False:
+        python run_benchmark --browser=cros-chrome --remote=[dut] [test]
+
+        telemetry_on_dut=True:
+        ssh [dut] python run_benchmark --browser=system [test]
+
+        @param args: The list of arguments to be passed. See Base class for a
+                     complete list of accepted arguments.
+        @param kwargs: Any keyword arguments to be passed. See Base class for a
+                       complete list of accepted keyword arguments.
+        """
+        self._telemetry_setup = None
+        super(DroneTelemetryRunner, self).__init__(*args, **kwargs)
+
+    def __enter__(self):
+        """Called while entering context manager; does nothing."""
+        return self
+
+    def __exit__(self, exc_type, exc_value, traceback):
+        """Called while exiting context manager; cleans up temp files."""
+        logging.info('Cleaning up the telemetry environment on the drone.')
+        self._telemetry_setup.Cleanup()
+
+    def _setup_telemetry(self):
+        """Setup Telemetry on the drone."""
+        logging.debug('Setting up telemetry on the drone')
+        info = self._host.host_info_store.get()
+        if not info.build:
+            logging.error('Unable to locate build label for host: %s.',
+                          self._host.host_port)
+            raise error.AutotestError('Failed to grab build for host %s.' %
+                                      self._host.host_port)
+
+        logging.debug('Setting up telemetry for build: %s', info.build)
+        try:
+            self._telemetry_setup = telemetry_setup.TelemetrySetup(
+                    hostname=self._host.hostname, build=info.build)
+            self._telemetry_path = self._telemetry_setup.Setup()
+        except telemetry_setup.TelemetrySetupError as e:
+            raise error.AutotestError('Telemetry Environment could not be '
+                                      'setup: %s.' % e)
diff --git a/server/cros/telemetry_runner_unittest.py b/server/cros/telemetry_runner_unittest.py
index ba7b588..26d1961 100755
--- a/server/cros/telemetry_runner_unittest.py
+++ b/server/cros/telemetry_runner_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/cros/telemetry_setup.py b/server/cros/telemetry_setup.py
new file mode 100644
index 0000000..a7d05bf
--- /dev/null
+++ b/server/cros/telemetry_setup.py
@@ -0,0 +1,247 @@
+# -*- coding: utf-8 -*-
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""A class that sets up the environment for telemetry testing."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+from autotest_lib.client.common_lib.cros import dev_server
+
+import contextlib
+import errno
+import fcntl
+import logging
+import os
+import shutil
+import subprocess
+import tempfile
+
+import requests
+
+_READ_BUFFER_SIZE_BYTES = 1024 * 1024  # 1 MB
+
+
+@contextlib.contextmanager
+def lock_dir(dir_name):
+    """Lock a directory exclusively by placing a file lock in it.
+
+    Args:
+      dir_name: the directory name to be locked.
+    """
+    lock_file = os.path.join(dir_name, '.lock')
+    with open(lock_file, 'w+') as f:
+        fcntl.flock(f, fcntl.LOCK_EX)
+        try:
+            yield
+        finally:
+            fcntl.flock(f, fcntl.LOCK_UN)
+
+
+class TelemetrySetupError(Exception):
+    """Exception class used by this module."""
+    pass
+
+
+class TelemetrySetup(object):
+    """Class that sets up the environment for telemetry testing."""
+
+    # Relevant directory paths.
+    _BASE_DIR_PATH = '/tmp/telemetry-workdir'
+    _PARTIAL_DEPENDENCY_DIR_PATH = 'autotest/packages'
+
+    # Relevant directory names.
+    _TELEMETRY_SRC_DIR_NAME = 'telemetry_src'
+    _TEST_SRC_DIR_NAME = 'test_src'
+    _SRC_DIR_NAME = 'src'
+
+    # Names of the telemetry dependency tarballs.
+    _DEPENDENCIES = [
+            'dep-telemetry_dep.tar.bz2',
+            'dep-page_cycler_dep.tar.bz2',
+            'dep-chrome_test.tar.bz2',
+            'dep-perf_data_dep.tar.bz2',
+    ]
+
+    # Partial devserver URLs.
+    _STATIC_URL_TEMPLATE = '%s/static/%s/autotest/packages/%s'
+
+    def __init__(self, hostname, build):
+        """Initializes the TelemetrySetup class.
+
+        Args:
+        hostname: The host for which telemetry environment should be setup. This
+            is important for devserver resolution.
+        build: The build for which telemetry environment should be setup. It is
+            typically in the format <board>/<version>.
+        """
+        self._build = build
+        self._ds = dev_server.ImageServer.resolve(self._build,
+                                                  hostname=hostname)
+        self._setup_dir_path = tempfile.mkdtemp(prefix='telemetry-setupdir_')
+        self._tmp_build_dir = os.path.join(self._BASE_DIR_PATH, self._build)
+        self._tlm_src_dir_path = os.path.join(self._tmp_build_dir,
+                                              self._TELEMETRY_SRC_DIR_NAME)
+
+    def Setup(self):
+        """Sets up the environment for telemetry testing.
+
+        This method downloads the telemetry dependency tarballs and extracts
+        them into a 'src' directory.
+
+        Returns:
+        Path to the src directory where the telemetry dependencies have been
+            downloaded and extracted.
+        """
+        src_folder = os.path.join(self._tlm_src_dir_path, self._SRC_DIR_NAME)
+        test_src = os.path.join(self._tlm_src_dir_path,
+                                self._TEST_SRC_DIR_NAME)
+        self._MkDirP(self._tlm_src_dir_path)
+        with lock_dir(self._tlm_src_dir_path):
+            if not os.path.exists(src_folder):
+                # Download the required dependency tarballs.
+                for dep in self._DEPENDENCIES:
+                    dep_path = self._DownloadFilesFromDevserver(
+                            dep, self._setup_dir_path)
+                    if os.path.exists(dep_path):
+                        self._ExtractTarball(dep_path, self._tlm_src_dir_path)
+
+                # By default all the tarballs extract to test_src but some parts
+                # of the telemetry code specifically hardcoded to exist inside
+                # of 'src'.
+                try:
+                    shutil.move(test_src, src_folder)
+                except shutil.Error:
+                    raise TelemetrySetupError(
+                            'Failure in telemetry setup for build %s. Appears '
+                            'that the test_src to src move failed.' %
+                            self._build)
+        return src_folder
+
+    def _DownloadFilesFromDevserver(self, filename, dest_path):
+        """Downloads the given tar.bz2 file from the devserver.
+
+        Args:
+          filename: Name of the tar.bz2 file to be downloaded.
+          dest_path: Full path to the directory where it should be downloaded.
+
+        Returns:
+            Full path to the downloaded file.
+
+        Raises:
+          TelemetrySetupError when the download cannot be completed for any
+              reason.
+        """
+        dep_path = os.path.join(dest_path, filename)
+        url = (self._STATIC_URL_TEMPLATE %
+               (self._ds.url(), self._build, filename))
+        try:
+            resp = requests.get(url)
+            resp.raise_for_status()
+            with open(dep_path, 'wb') as f:
+                for content in resp.iter_content(_READ_BUFFER_SIZE_BYTES):
+                    f.write(content)
+        except Exception as e:
+            if (isinstance(e, requests.exceptions.HTTPError)
+                        and resp.status_code == 404):
+                logging.error(
+                        'The request %s returned a 404 Not Found status.'
+                        'This dependency could be new and therefore does not '
+                        'exist. Hence, squashing the exception and proceeding.',
+                        url)
+            elif isinstance(e, requests.exceptions.ConnectionError):
+                logging.warning(
+                        'The request failed because a connection to the devserver '
+                        '%s could not be established. Attempting to execute the '
+                        'request %s once by SSH-ing into the devserver.',
+                        self._ds.url(), url)
+                return self._DownloadFilesFromDevserverViaSSH(url, dep_path)
+            else:
+                raise TelemetrySetupError(
+                        'An error occurred while trying to complete  %s: %s' %
+                        (url, e))
+        return dep_path
+
+    def _DownloadFilesFromDevserverViaSSH(self, url, dep_path):
+        """Downloads the file at the URL from the devserver by SSH-ing into it.
+
+        Args:
+          url: URL of the location of the tar.bz2 file on the devserver.
+          dep_path: Full path to the file where it will be downloaded.
+
+        Returns:
+            Full path to the downloaded file.
+
+        Raises:
+          TelemetrySetupError when the download cannot be completed for any
+              reason.
+        """
+        cmd = ['ssh', self._ds.hostname, 'curl', url]
+        with open(dep_path, 'w') as f:
+            proc = subprocess.Popen(cmd, stdout=f, stderr=subprocess.PIPE)
+            _, err = proc.communicate()
+            if proc.returncode != 0:
+                raise TelemetrySetupError(
+                        'The command: %s finished with returncode %s and '
+                        'errors as following: %s. The telemetry dependency '
+                        'could not be downloaded.' %
+                        (' '.join(cmd), proc.returncode, err))
+        return dep_path
+
+    def _ExtractTarball(self, tarball_path, dest_path):
+        """Extracts the given tarball into the destination directory.
+
+        Args:
+          tarball_path: Full path to the tarball to be extracted.
+          dest_path: Full path to the directory where the tarball should be
+              extracted.
+
+        Raises:
+          TelemetrySetupError if the method is unable to extract the tarball for
+              any reason.
+        """
+        cmd = ['tar', 'xf', tarball_path, '--directory', dest_path]
+        try:
+            proc = subprocess.Popen(cmd,
+                                    stdout=subprocess.PIPE,
+                                    stderr=subprocess.PIPE)
+            proc.communicate()
+        except Exception as e:
+            shutil.rmtree(dest_path)
+            raise TelemetrySetupError(
+                    'An exception occurred while trying to untar %s into %s: %s'
+                    % (tarball_path, dest_path, str(e)))
+
+    def _MkDirP(self, path):
+        """Recursively creates the given directory.
+
+        Args:
+          path: Full path to the directory that needs to the created.
+
+        Raises:
+          TelemetrySetupError is the method is unable to create directories for
+              any reason except OSError EEXIST which indicates that the
+              directory already exists.
+        """
+        try:
+            os.makedirs(path)
+        except Exception as e:
+            if not isinstance(e, OSError) or e.errno != errno.EEXIST:
+                raise TelemetrySetupError(
+                        'Could not create directory %s due to %s.' %
+                        (path, str(e)))
+
+    def Cleanup(self):
+        """Cleans up telemetry setup and work environment."""
+        try:
+            shutil.rmtree(self._setup_dir_path)
+        except Exception as e:
+            logging.error('Something went wrong. Could not delete %s: %s',
+                          self._setup_dir_path, e)
+        try:
+            shutil.rmtree(self._tlm_src_dir_path)
+        except Exception as e:
+            logging.error('Something went wrong. Could not delete %s: %s',
+                          self._tlm_src_dir_path, e)
diff --git a/server/cros/tradefed/adb.py b/server/cros/tradefed/adb.py
new file mode 100644
index 0000000..71b116a
--- /dev/null
+++ b/server/cros/tradefed/adb.py
@@ -0,0 +1,93 @@
+# Lint as: python2, python3
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TODO(rkuroiwa): Rename this file to adb_utils.py to align with other utility
+# modules. Also when class Adb is instantiated, the user is likely to call the
+# instance "adb" which would collide with this file name (unless they always
+# use "import adb as someothername".
+
+import logging
+import re
+
+from autotest_lib.server import utils
+
+
+class Adb:
+    """Class for running adb commands."""
+
+    def __init__(self):
+        self._install_paths = set()
+
+    def add_path(self, path):
+        """Adds path for executing commands.
+
+        Path to ADB and AAPT may have to be added it if is not in the path.
+        Use this method to add it to the path before using run().
+        """
+        self._install_paths.add(path)
+
+    def get_paths(self):
+        return self._install_paths
+
+    def run(self, host, *args, **kwargs):
+        """Runs an ADB command on the host.
+
+        @param host: DUT to issue the adb command.
+        @param args: Extra args passed to the adb command.
+        @param kwargs: Extra arguments passed to utils.run().
+        """
+        additional_option = _tradefed_options(host)
+        kwargs['args'] = additional_option + kwargs.get('args', ())
+
+        # _install_paths should include the directory with adb.
+        # utils.run() will append these to paths.
+        kwargs['extra_paths'] = (kwargs.get('extra_paths', []) +
+                                 list(self._install_paths))
+        result = utils.run('adb', **kwargs)
+        logging.info('adb %s:\n%s', ' '.join(kwargs.get('args')),
+                     result.stdout + result.stderr)
+        return result
+
+
+def get_adb_target(host):
+    """Get the adb target format.
+
+    This method is slightly different from host.host_port as we need to
+    explicitly specify the port so the serial name of adb target would
+    match.
+
+    @param host: a DUT accessible via adb.
+    @return a string for specifying the host using adb command.
+    """
+    port = 22 if host.port is None else host.port
+    if re.search(r':.*:', host.hostname):
+        # Add [] for raw IPv6 addresses, stripped for ssh.
+        # In the Python >= 3.3 future, 'import ipaddress' will parse
+        # addresses.
+        return '[{}]:{}'.format(host.hostname, port)
+    return '{}:{}'.format(host.hostname, port)
+
+
+def get_adb_targets(hosts):
+    """Get a list of adb targets."""
+    return [get_adb_target(host) for host in hosts]
+
+
+def _tradefed_options(host):
+    """ADB arguments for tradefed.
+
+    These arguments are specific to using adb with tradefed.
+
+    @param host: DUT that want to connect to. (None if the adb command is
+                 intended to run in the server. eg. keygen)
+    @return a tuple of arguments for adb command.
+    """
+    if host:
+        host_port = get_adb_target(host)
+        ret = ('-s', host_port)
+        return ret
+    # As of N, tradefed could not specify which adb socket to use, which use
+    # tcp:localhost:5037 by default.
+    return ('-H', 'localhost', '-P', '5037')
diff --git a/server/cros/tradefed/adb_unittest.py b/server/cros/tradefed/adb_unittest.py
new file mode 100644
index 0000000..d4c2bb4
--- /dev/null
+++ b/server/cros/tradefed/adb_unittest.py
@@ -0,0 +1,88 @@
+# Lint as: python2, python3
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from unittest.mock import Mock, patch
+from autotest_lib.server.cros.tradefed import adb
+
+
+class AdbTest(unittest.TestCase):
+    """Tests for ADB module."""
+
+    # Verify that ipv4 is put into IP:PORT format.
+    def test_get_adb_target_ipv4(self):
+        mock_host = Mock()
+        mock_host.port = 3467
+        mock_host.hostname = '123.76.0.29'
+        target = adb.get_adb_target(mock_host)
+        self.assertEqual(target, '123.76.0.29:3467')
+
+    # Verify that ipv6 is put into [IP]:PORT format.
+    def test_get_adb_target_ipv6(self):
+        mock_host = Mock()
+        mock_host.port = 1234
+        mock_host.hostname = '2409::3'
+        target = adb.get_adb_target(mock_host)
+        self.assertEqual(target, '[2409::3]:1234')
+
+    # Verify that a host name works.
+    def test_get_adb_target_hostname(self):
+        mock_host = Mock()
+        mock_host.port = 4792
+        mock_host.hostname = 'some.hostname.cros'
+        target = adb.get_adb_target(mock_host)
+        self.assertEqual(target, 'some.hostname.cros:4792')
+
+    # Verify that a list of hosts work.
+    def test_get_adb_targets(self):
+        mock_host1 = Mock()
+        mock_host2 = Mock()
+        mock_host3 = Mock()
+        mock_host1.port = 1111
+        mock_host2.port = 2222
+        mock_host3.port = 3333
+        mock_host1.hostname = 'host1'
+        mock_host2.hostname = 'host2'
+        mock_host3.hostname = 'host3'
+
+        targets = adb.get_adb_targets([mock_host1, mock_host2, mock_host3])
+        self.assertEqual(targets, ['host1:1111', 'host2:2222', 'host3:3333'])
+
+    def test_add_paths(self):
+        instance = adb.Adb()
+        instance.add_path('/some/install/path')
+        instance.add_path('/another/directory')
+
+        self.assertEqual(set(['/some/install/path', '/another/directory']),
+                         instance.get_paths())
+
+    @patch('autotest_lib.server.utils.run')
+    def test_run(self, mock_run):
+        instance = adb.Adb()
+        instance.add_path('/some/install/path')
+
+        mock_host = Mock()
+        mock_host.port = 3467
+        mock_host.hostname = '123.76.0.29'
+
+        instance.run(mock_host, args=('some', 'command'), timeout=240)
+        mock_run.assert_called_with('adb',
+                                    args=('-s', '123.76.0.29:3467', 'some',
+                                          'command'),
+                                    timeout=240,
+                                    extra_paths=['/some/install/path'])
+
+    @patch('autotest_lib.server.utils.run')
+    def test_run_without_host(self, mock_run):
+        instance = adb.Adb()
+        instance.add_path('/some/install/path')
+
+        instance.run(None, args=('some', 'command'), timeout=240)
+        mock_run.assert_called_with('adb',
+                                    args=('-H', 'localhost', '-P', '5037',
+                                          'some', 'command'),
+                                    timeout=240,
+                                    extra_paths=['/some/install/path'])
diff --git a/server/cros/tradefed/cts_expected_failure_parser.py b/server/cros/tradefed/cts_expected_failure_parser.py
index 7ed2112..ee8385f 100644
--- a/server/cros/tradefed/cts_expected_failure_parser.py
+++ b/server/cros/tradefed/cts_expected_failure_parser.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -11,28 +12,38 @@
     def __init__(self, failure_files):
         self.waivers_yaml = self._load_failures(failure_files)
 
-    def _validate_waiver_config(self, arch, board, bundle_abi, sdk_ver,
-                                first_api_level, config):
+    def _validate_waiver_config(self,
+                                arch,
+                                board,
+                                model,
+                                bundle_abi,
+                                sdk_ver,
+                                first_api_level,
+                                config,
+                                extra_dut_config=[]):
         """Validate if the test environment matches the test config.
 
         @param arch: DUT's arch type.
         @param board: DUT's board name.
+        @paran model: DUT's model name.
         @param bundle_abi: The test's abi type.
         @param sdk_ver: DUT's Android SDK version
         @param first_api_level: DUT's Android first API level.
         @param config: config for an expected failing test.
+        @param extra_dut_config: list of DUT configs added from _get_extra_dut_config(host).
         @return True if test arch or board is part of the config, else False.
         """
         # Map only the versions that ARC releases care.
-        sdk_ver_map = {25: 'N', 28: 'P', 30: 'R'}
+        sdk_ver_map = {25: 'N', 28: 'P', 30: 'R', 33: 'T'}
 
         # 'all' applies to all devices.
         # 'x86' or 'arm' applies to the DUT's architecture.
         # board name like 'eve' or 'kevin' applies to the DUT running the board.
-        dut_config = ['all', arch, board]
-        # 'nativebridge' applies to the case running ARM CTS on x86 devices.
-        if bundle_abi and bundle_abi != arch:
-            dut_config.append('nativebridge')
+        dut_config = ['all', arch, board, model]
+        dut_config.extend(extra_dut_config)
+        # 'binarytranslated' applies to the case running ARM CTS on x86 devices.
+        if bundle_abi and bundle_abi[0:3] != arch:
+            dut_config.append('binarytranslated')
         # 'N' or 'P' or 'R' applies to the device running that Android version.
         if sdk_ver in sdk_ver_map:
             dut_config.append(sdk_ver_map[sdk_ver])
@@ -42,6 +53,19 @@
             dut_config.append('shipat' + sdk_ver_map[first_api_level])
         return len(set(dut_config).intersection(config)) > 0
 
+    def _get_extra_dut_config(self, host):
+        """
+        @param host: DUT to be connected. Passed for additional params.
+        """
+        extra_dut_config = []
+        # some modules are notest if ARC hardware vulkan exists.
+        if host.has_arc_hardware_vulkan():
+            extra_dut_config.append('vulkan')
+        # some modules are notest if there is no ARC hardware vulkan.
+        else:
+            extra_dut_config.append('no_vulkan')
+        return extra_dut_config
+
     def _load_failures(self, failure_files):
         """Load failures from files.
 
@@ -53,7 +77,7 @@
             try:
                 logging.info('Loading expected failure file: %s.', failure_file)
                 with open(failure_file) as wf:
-                    waivers_yaml.update(yaml.load(wf.read()))
+                    waivers_yaml.update(yaml.safe_load(wf.read()))
             except IOError as e:
                 logging.error('Error loading %s (%s).',
                               failure_file,
@@ -63,20 +87,31 @@
                          failure_file)
         return waivers_yaml
 
-    def find_waivers(self, arch, board, bundle_abi, sdk_ver, first_api_level):
+    def find_waivers(self,
+                     arch,
+                     board,
+                     model,
+                     bundle_abi,
+                     sdk_ver,
+                     first_api_level,
+                     host=None):
         """Finds waivers for the test board.
 
         @param arch: DUT's arch type.
         @param board: DUT's board name.
+        @param model: DUT's model name.
         @param bundle_abi: The test's abi type.
         @param sdk_ver: DUT's Android SDK version.
         @param first_api_level: DUT's Android first API level.
+        @param host: DUT to be connected. Passed for additional params.
         @return a set of waivers/no-test-modules applied to the test board.
         """
         applied_waiver_list = set()
-        for test, config in self.waivers_yaml.iteritems():
-            if self._validate_waiver_config(arch, board, bundle_abi, sdk_ver,
-                                            first_api_level, config):
+        extra_dut_config = self._get_extra_dut_config(host)
+        for test, config in self.waivers_yaml.items():
+            if self._validate_waiver_config(arch, board, model, bundle_abi,
+                                            sdk_ver, first_api_level, config,
+                                            extra_dut_config):
                 applied_waiver_list.add(test)
         logging.info('Excluding tests/packages from rerun: %s.',
                      applied_waiver_list)
diff --git a/server/cros/tradefed/cts_expected_failure_parser_unittest.py b/server/cros/tradefed/cts_expected_failure_parser_unittest.py
new file mode 100644
index 0000000..6517a10
--- /dev/null
+++ b/server/cros/tradefed/cts_expected_failure_parser_unittest.py
@@ -0,0 +1,88 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+import glob
+import os
+import unittest
+
+import common
+
+from autotest_lib.server.cros.tradefed import cts_expected_failure_parser
+from autotest_lib.server.hosts import cros_host
+
+
+class MockHost(cros_host.CrosHost):
+    """Simple host for running mock'd host commands"""
+
+    def __init__(self, hostname, vulkan=False):
+        self.hostname = hostname
+        self.vulkan = vulkan
+
+    def has_arc_hardware_vulkan(self):
+        return self.vulkan
+
+
+def glob_add_files(expected_fail_dir):
+    """Return a list of files based on a directory path."""
+
+    expected_fail_files = []
+    expected_fail_dir_path = os.path.join(
+            os.path.dirname(os.path.realpath(__file__)), expected_fail_dir)
+    if os.path.exists(expected_fail_dir_path):
+        expected_fail_files += glob.glob(expected_fail_dir_path + '/*.yaml')
+    return expected_fail_files
+
+
+class CtsExpectedFailureParserTest(unittest.TestCase):
+    """Unittest for cts_expected_failure_parser."""
+
+    def test_should_skip_if_no_vulkan(self):
+        mockhost = MockHost('MockHost', False)
+        expected_fail_files = glob_add_files(
+                'cts_expected_failure_parser_unittest_data')
+
+        waivers = cts_expected_failure_parser.ParseKnownCTSFailures(
+                expected_fail_files)
+        # params: arch, board, model, bundle_abi, sdk_ver, first_api_level, host
+        found_waivers = waivers.find_waivers('x86', 'hatch', 'kohaku', 'x86',
+                                             '30', '30', mockhost)
+        self.assertFalse('GtsOpenglTestCases' in found_waivers)
+        self.assertTrue('GtsVulkanTestCases' in found_waivers)
+
+    def test_should_not_skip_if_has_vulkan(self):
+        mockhost = MockHost('MockHost', True)
+        expected_fail_files = glob_add_files(
+                'cts_expected_failure_parser_unittest_data')
+
+        waivers = cts_expected_failure_parser.ParseKnownCTSFailures(
+                expected_fail_files)
+        # params: arch, board, model, bundle_abi, sdk_ver, first_api_level, host
+        found_waivers = waivers.find_waivers('x86', 'hatch', 'kohaku', 'x86',
+                                             '30', '30', mockhost)
+
+        self.assertTrue('GtsOpenglTestCases' in found_waivers)
+        self.assertFalse('GtsVulkanTestCases' in found_waivers)
+
+    def test_binarytranslated_tag(self):
+        mockhost = MockHost('MockHost', False)
+        expected_fail_files = glob_add_files(
+                'cts_expected_failure_parser_unittest_data')
+
+        waivers = cts_expected_failure_parser.ParseKnownCTSFailures(
+                expected_fail_files)
+        # params: arch, board, model, bundle_abi, sdk_ver, first_api_level, host
+        found_waivers = waivers.find_waivers('x86', 'hatch', 'kohaku', 'arm',
+                                             '30', '30', mockhost)
+
+        self.assertTrue('GtsOnlyPrimaryAbiTestCases' in found_waivers)
+
+        # params: arch, board, model, bundle_abi, sdk_ver, first_api_level, host
+        found_waivers = waivers.find_waivers('x86', 'hatch', 'kohaku', 'x86',
+                                             '30', '30', mockhost)
+
+        self.assertFalse('GtsOnlyPrimaryAbiTestCases' in found_waivers)
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/server/cros/tradefed/cts_expected_failure_parser_unittest_data/cheets_GTS_R_notest_modules.yaml b/server/cros/tradefed/cts_expected_failure_parser_unittest_data/cheets_GTS_R_notest_modules.yaml
new file mode 100644
index 0000000..b9d1599
--- /dev/null
+++ b/server/cros/tradefed/cts_expected_failure_parser_unittest_data/cheets_GTS_R_notest_modules.yaml
@@ -0,0 +1,3 @@
+GtsOnlyPrimaryAbiTestCases: [binarytranslated]
+GtsOpenglTestCases: [vulkan]
+GtsVulkanTestCases: [no_vulkan]
diff --git a/server/cros/tradefed/generate_controlfiles_CTS_Instant.py b/server/cros/tradefed/generate_controlfiles_CTS_Instant.py
index 9a022e9..5d1aeec 100755
--- a/server/cros/tradefed/generate_controlfiles_CTS_Instant.py
+++ b/server/cros/tradefed/generate_controlfiles_CTS_Instant.py
@@ -1,4 +1,5 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
+# Lint as: python2, python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -49,7 +50,7 @@
 _COLLECT = 'tradefed-run-collect-tests-only-internal'
 _PUBLIC_COLLECT = 'tradefed-run-collect-tests-only'
 
-# Unlike regular CTS we have to target the native ABI only.
+# Unlike regular CTS we have to target the primary ABI only.
 CONFIG['LAB_DEPENDENCY'] = {
     'x86': ['cts_cpu_x86'],
     'arm': ['cts_cpu_arm']
diff --git a/server/cros/tradefed/generate_controlfiles_CTS_P.py b/server/cros/tradefed/generate_controlfiles_CTS_P.py
index 4791120..c4ba595 100755
--- a/server/cros/tradefed/generate_controlfiles_CTS_P.py
+++ b/server/cros/tradefed/generate_controlfiles_CTS_P.py
@@ -1,4 +1,5 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
+# Lint as: python2, python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -11,6 +12,7 @@
 CONFIG['TEST_NAME'] = 'cheets_CTS_P'
 CONFIG['DOC_TITLE'] = 'Android Compatibility Test Suite (CTS)'
 CONFIG['MOBLAB_SUITE_NAME'] = 'suite:cts_P, suite:cts'
+CONFIG['MOBLAB_HARDWARE_SUITE_NAME'] = 'suite:cts-hardware'
 CONFIG['COPYRIGHT_YEAR'] = 2018
 CONFIG['AUTHKEY'] = ''
 
@@ -34,6 +36,7 @@
 # suite:arc-cts-unibuild on selected models.
 CONFIG['INTERNAL_SUITE_NAMES'] = ['suite:arc-cts', 'suite:arc-cts-unibuild']
 CONFIG['QUAL_SUITE_NAMES'] = ['suite:arc-cts-qual']
+CONFIG['HARDWARE_SUITE_NAME'] = 'suite:arc-cts-hardware'
 
 CONFIG['CONTROLFILE_TEST_FUNCTION_NAME'] = 'run_TS'
 CONFIG['CONTROLFILE_WRITE_SIMPLE_QUAL_AND_REGRESS'] = False
@@ -82,6 +85,7 @@
         'CtsPrintTestCases': 1.5,
         'CtsSecurityTestCases': 2.0,
         'CtsVideoTestCases': 1.5,
+        'CtsWidgetTestCases': 1.5,
         _COLLECT: 2.5,
         _PUBLIC_COLLECT: 2.5,
         _WM_PRESUBMIT: 0.2,
@@ -99,28 +103,30 @@
 CONFIG['BVT_TIMEOUT'] = 0.2
 
 CONFIG['QUAL_BOOKMARKS'] = sorted([
-    'A',  # A bookend to simplify partition algorithm.
-    'CtsAccessibilityServiceTestCases',  # TODO(ihf) remove when b/121291711 fixed. This module causes problems. Put it into its own control file.
-    'CtsAccessibilityServiceTestCasesz',
-    'CtsActivityManagerDevice',  # Runs long enough. (3h)
-    'CtsActivityManagerDevicez',
-    'CtsDeqpTestCases',
-    'CtsDeqpTestCasesz',  # Put Deqp in one control file. Long enough, fairly stable.
-    'CtsFileSystemTestCases',  # Runs long enough. (3h)
-    'CtsFileSystemTestCasesz',
-    'CtsMediaBitstreamsTestCases',  # Put each Media module in its own control file. Long enough.
-    'CtsMediaHostTestCases',
-    'CtsMediaStressTestCases',
-    'CtsMediaTestCases',
-    'CtsMediaTestCasesz',
-    'CtsJvmti',
-    'CtsSecurityHostTestCases',  # TODO(ihf): remove when passing cleanly.
-    'CtsSecurityHostTestCasesz',
-    'CtsSensorTestCases',  # TODO(ihf): Remove when not needing 30 retries.
-    'CtsSensorTestCasesz',
-    'CtsViewTestCases',  # TODO(b/126741318): Fix performance regression and remove this.
-    'CtsViewTestCasesz',
-    'zzzzz'  # A bookend to simplify algorithm.
+        'A',  # A bookend to simplify partition algorithm.
+        'CtsAccessibilityServiceTestCases',  # TODO(ihf) remove when b/121291711 fixed. This module causes problems. Put it into its own control file.
+        'CtsAccessibilityServiceTestCasesz',
+        'CtsActivityManagerDevice',  # Runs long enough. (3h)
+        'CtsActivityManagerDevicez',
+        'CtsCameraTestCases',  # Recurrenly becomes flaky and affects other tests.
+        'CtsCameraTestCasesz',
+        'CtsDeqpTestCases',
+        'CtsDeqpTestCasesz',  # Put Deqp in one control file. Long enough, fairly stable.
+        'CtsFileSystemTestCases',  # Runs long enough. (3h)
+        'CtsFileSystemTestCasesz',
+        'CtsMediaBitstreamsTestCases',  # Put each Media module in its own control file. Long enough.
+        'CtsMediaHostTestCases',
+        'CtsMediaStressTestCases',
+        'CtsMediaTestCases',
+        'CtsMediaTestCasesz',
+        'CtsJvmti',
+        'CtsSecurityHostTestCases',  # TODO(ihf): remove when passing cleanly.
+        'CtsSecurityHostTestCasesz',
+        'CtsSensorTestCases',  # TODO(ihf): Remove when not needing 30 retries.
+        'CtsSensorTestCasesz',
+        'CtsViewTestCases',  # TODO(b/126741318): Fix performance regression and remove this.
+        'CtsViewTestCasesz',
+        'zzzzz'  # A bookend to simplify algorithm.
 ])
 
 CONFIG['SMOKE'] = [
@@ -161,15 +167,6 @@
 
 # The suite is divided based on the run-time hint in the *.config file.
 CONFIG['VMTEST_INFO_SUITES'] = collections.OrderedDict()
-# This is the default suite for all the modules that are not specified below.
-CONFIG['VMTEST_INFO_SUITES']['vmtest-informational1'] = []
-CONFIG['VMTEST_INFO_SUITES']['vmtest-informational2'] = [
-    'CtsMediaTestCases', 'CtsMediaStressTestCases', 'CtsHardwareTestCases'
-]
-CONFIG['VMTEST_INFO_SUITES']['vmtest-informational3'] = [
-    'CtsThemeHostTestCases', 'CtsHardwareTestCases', 'CtsLibcoreTestCases'
-]
-CONFIG['VMTEST_INFO_SUITES']['vmtest-informational4'] = ['']
 
 # Modules that are known to download and/or push media file assets.
 CONFIG['MEDIA_MODULES'] = [
@@ -179,14 +176,21 @@
 ]
 
 CONFIG['NEEDS_PUSH_MEDIA'] = CONFIG['MEDIA_MODULES'] + [
-    'CtsMediaTestCases.audio',
+        'CtsMediaStressTestCases.camera',
+        'CtsMediaTestCases.audio',
+]
+CONFIG['SPLIT_BY_BITS_MODULES'] = [
+        'CtsDeqpTestCases',
+        'CtsMediaTestCases',
+        'CtsViewTestCases',
 ]
 
 # See b/149889853. Non-media test basically does not require dynamic
 # config. To reduce the flakiness, let us suppress the config.
 CONFIG['NEEDS_DYNAMIC_CONFIG_ON_COLLECTION'] = False
 CONFIG['NEEDS_DYNAMIC_CONFIG'] = CONFIG['MEDIA_MODULES'] + [
-    'CtsIntentSignatureTestCases'
+        'CtsIntentSignatureTestCases',
+        'CtsMediaStressTestCases.camera',
 ]
 
 # Modules that are known to need the default apps of Chrome (eg. Files.app).
@@ -212,14 +216,23 @@
 # TODO(b/126741318): Fix performance regression and remove this.
 _SLEEP_60_COMMAND = "\'sleep 60\'"
 
+_START_MDNS_COMMAND = "\'android-sh -c \\\'setprop ctl.start mdnsd\\\'\'"
+
+_WIFI_CONNECT_COMMANDS_V2 = [
+        # These needs to be in order.
+        "'adb shell cmd wifi add-network %s %s %s' % (pipes.quote(ssid), 'open' if wifipass == '' else 'wpa', pipes.quote(wifipass))",
+        "'adb shell cmd wifi connect-network %s' % pipes.quote(ssid)",
+        "'adb shell dumpsys wifi transports -eth'",
+]
+
 # Preconditions applicable to public and internal tests.
 CONFIG['PRECONDITION'] = {
-    'CtsSecurityHostTestCases': [
-        _SECURITY_PARANOID_COMMAND, _CONFIG_MODULE_COMMAND
-    ],
-    # Tests are performance-sensitive, workaround to avoid CPU load on login.
-    # TODO(b/126741318): Fix performance regression and remove this.
-    'CtsViewTestCases': [_SLEEP_60_COMMAND],
+        'CtsSecurityHostTestCases':
+        [_SECURITY_PARANOID_COMMAND, _CONFIG_MODULE_COMMAND],
+        # Tests are performance-sensitive, workaround to avoid CPU load on login.
+        # TODO(b/126741318): Fix performance regression and remove this.
+        'CtsViewTestCases': [_SLEEP_60_COMMAND],
+        'CtsNetTestCases': [_START_MDNS_COMMAND],
 }
 CONFIG['LOGIN_PRECONDITION'] = {
     'CtsAppSecurityHostTestCases': [_EJECT_REMOVABLE_DISK_COMMAND],
@@ -237,12 +250,11 @@
 
 # Preconditions applicable to public tests.
 CONFIG['PUBLIC_PRECONDITION'] = {
-    'CtsSecurityHostTestCases': [
-        _SECURITY_PARANOID_COMMAND, _CONFIG_MODULE_COMMAND
-    ],
-    'CtsUsageStatsTestCases': _WIFI_CONNECT_COMMANDS,
-    'CtsNetTestCases': _WIFI_CONNECT_COMMANDS,
-    'CtsLibcoreTestCases': _WIFI_CONNECT_COMMANDS,
+        'CtsSecurityHostTestCases':
+        [_SECURITY_PARANOID_COMMAND, _CONFIG_MODULE_COMMAND],
+        'CtsUsageStatsTestCases': _WIFI_CONNECT_COMMANDS,
+        'CtsNetTestCases': _WIFI_CONNECT_COMMANDS + [_START_MDNS_COMMAND],
+        'CtsLibcoreTestCases': _WIFI_CONNECT_COMMANDS,
 }
 
 CONFIG['PUBLIC_DEPENDENCIES'] = {
@@ -300,76 +312,153 @@
     'CtsLibcoreTestCases',
 ])
 
+# This list of modules will be used for reduced set of testing for build
+# variant process. Suites: cts_hardware & arc-cts-hardware.
+CONFIG['HARDWARE_MODULES'] = [
+        'CtsPerfettoTestCases',
+        'CtsSustainedPerformanceHostTestCases',
+        'CtsCameraTestCases',
+        'CtsViewTestCases',
+        'CtsMediaTestCases',
+        'CtsNativeMediaAAudioTestCases',
+        'CtsNetTestCases',
+        'CtsUsageStatsTestCases',
+        'CtsSensorTestCases',
+]
+
+SUITES_DEQP_SUBMODULE = [
+    'suite:arc-cts-deqp','suite:graphics_per-week']
+
 CONFIG['EXTRA_MODULES'] = {
-    'CtsDeqpTestCases': {
-        'SUBMODULES': set([
-            'CtsDeqpTestCases.dEQP-EGL',
-            'CtsDeqpTestCases.dEQP-GLES2',
-            'CtsDeqpTestCases.dEQP-GLES3',
-            'CtsDeqpTestCases.dEQP-GLES31',
-            'CtsDeqpTestCases.dEQP-VK'
-        ]),
-        'SUITES': ['suite:arc-cts-deqp', 'suite:graphics_per-week'],
-    },
-    'CtsMediaTestCases': {
-        'SUBMODULES': set([
-            'CtsMediaTestCases.audio',
-        ]),
-        'SUITES': ['suite:arc-cts'],
-    },
-    _WM_PRESUBMIT: {
-        'SUBMODULES': set([_WM_PRESUBMIT]),
-        'SUITES': [],
-    },
+        'CtsDeqpTestCases': {
+                'CtsDeqpTestCases.dEQP-EGL': SUITES_DEQP_SUBMODULE,
+                'CtsDeqpTestCases.dEQP-GLES2': SUITES_DEQP_SUBMODULE,
+                'CtsDeqpTestCases.dEQP-GLES3': SUITES_DEQP_SUBMODULE,
+                'CtsDeqpTestCases.dEQP-GLES31': SUITES_DEQP_SUBMODULE,
+                'CtsDeqpTestCases.dEQP-VK': SUITES_DEQP_SUBMODULE,
+        },
+        'CtsMediaTestCases': {
+                'CtsMediaTestCases.audio': ['suite:arc-cts'],
+        },
+        _WM_PRESUBMIT: {
+                _WM_PRESUBMIT: [],
+        },
+}
+
+# In addition to EXTRA_MODULES, these modules do require separate control files
+# requiring separate declaration.
+CONFIG['HARDWAREONLY_EXTRA_MODULES'] = {
+        'CtsAppTestCases': {
+                'CtsAppTestCases.feature': [],
+        },
+        'CtsDeqpTestCases': {
+                'CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite': [],
+        },
+        'CtsMediaStressTestCases': {
+                'CtsMediaStressTestCases.camera': [],
+        },
+        'CtsPermissionTestCases': {
+                'CtsPermissionTestCases.camera': [],
+        },
 }
 
 # Moblab wants to shard dEQP really finely. This isn't needed anymore as it got
 # faster, but I guess better safe than sorry.
 CONFIG['PUBLIC_EXTRA_MODULES'] = {
-    'CtsDeqpTestCases' : [
-        'CtsDeqpTestCases.dEQP-EGL',
-        'CtsDeqpTestCases.dEQP-GLES2',
-        'CtsDeqpTestCases.dEQP-GLES3',
-        'CtsDeqpTestCases.dEQP-GLES31',
-        'CtsDeqpTestCases.dEQP-VK.api',
-        'CtsDeqpTestCases.dEQP-VK.binding_model',
-        'CtsDeqpTestCases.dEQP-VK.clipping',
-        'CtsDeqpTestCases.dEQP-VK.compute',
-        'CtsDeqpTestCases.dEQP-VK.device_group',
-        'CtsDeqpTestCases.dEQP-VK.draw',
-        'CtsDeqpTestCases.dEQP-VK.dynamic_state',
-        'CtsDeqpTestCases.dEQP-VK.fragment_operations',
-        'CtsDeqpTestCases.dEQP-VK.geometry',
-        'CtsDeqpTestCases.dEQP-VK.glsl',
-        'CtsDeqpTestCases.dEQP-VK.image',
-        'CtsDeqpTestCases.dEQP-VK.info',
-        'CtsDeqpTestCases.dEQP-VK.memory',
-        'CtsDeqpTestCases.dEQP-VK.multiview',
-        'CtsDeqpTestCases.dEQP-VK.pipeline',
-        'CtsDeqpTestCases.dEQP-VK.protected_memory',
-        'CtsDeqpTestCases.dEQP-VK.query_pool',
-        'CtsDeqpTestCases.dEQP-VK.rasterization',
-        'CtsDeqpTestCases.dEQP-VK.renderpass',
-        'CtsDeqpTestCases.dEQP-VK.renderpass2',
-        'CtsDeqpTestCases.dEQP-VK.robustness',
-        'CtsDeqpTestCases.dEQP-VK.sparse_resources',
-        'CtsDeqpTestCases.dEQP-VK.spirv_assembly',
-        'CtsDeqpTestCases.dEQP-VK.ssbo',
-        'CtsDeqpTestCases.dEQP-VK.subgroups',
-        'CtsDeqpTestCases.dEQP-VK.subgroups.b',
-        'CtsDeqpTestCases.dEQP-VK.subgroups.s',
-        'CtsDeqpTestCases.dEQP-VK.subgroups.vote',
-        'CtsDeqpTestCases.dEQP-VK.subgroups.arithmetic',
-        'CtsDeqpTestCases.dEQP-VK.subgroups.clustered',
-        'CtsDeqpTestCases.dEQP-VK.subgroups.quad',
-        'CtsDeqpTestCases.dEQP-VK.synchronization',
-        'CtsDeqpTestCases.dEQP-VK.tessellation',
-        'CtsDeqpTestCases.dEQP-VK.texture',
-        'CtsDeqpTestCases.dEQP-VK.ubo',
-        'CtsDeqpTestCases.dEQP-VK.wsi',
-        'CtsDeqpTestCases.dEQP-VK.ycbcr'
-    ]
+        'CtsDeqpTestCases': {
+                # moblab cts suite
+                'CtsDeqpTestCases.dEQP-EGL': [CONFIG['MOBLAB_SUITE_NAME']],
+                'CtsDeqpTestCases.dEQP-GLES2': [CONFIG['MOBLAB_SUITE_NAME']],
+                'CtsDeqpTestCases.dEQP-GLES3': [CONFIG['MOBLAB_SUITE_NAME']],
+                'CtsDeqpTestCases.dEQP-GLES31': [CONFIG['MOBLAB_SUITE_NAME']],
+                'CtsDeqpTestCases.dEQP-VK.api': [CONFIG['MOBLAB_SUITE_NAME']],
+                'CtsDeqpTestCases.dEQP-VK.binding_model':
+                [CONFIG['MOBLAB_SUITE_NAME']],
+                'CtsDeqpTestCases.dEQP-VK.clipping':
+                [CONFIG['MOBLAB_SUITE_NAME']],
+                'CtsDeqpTestCases.dEQP-VK.compute':
+                [CONFIG['MOBLAB_SUITE_NAME']],
+                'CtsDeqpTestCases.dEQP-VK.device_group':
+                [CONFIG['MOBLAB_SUITE_NAME']],
+                'CtsDeqpTestCases.dEQP-VK.draw': [CONFIG['MOBLAB_SUITE_NAME']],
+                'CtsDeqpTestCases.dEQP-VK.dynamic_state':
+                [CONFIG['MOBLAB_SUITE_NAME']],
+                'CtsDeqpTestCases.dEQP-VK.fragment_operations':
+                [CONFIG['MOBLAB_SUITE_NAME']],
+                'CtsDeqpTestCases.dEQP-VK.geometry':
+                [CONFIG['MOBLAB_SUITE_NAME']],
+                'CtsDeqpTestCases.dEQP-VK.glsl': [CONFIG['MOBLAB_SUITE_NAME']],
+                'CtsDeqpTestCases.dEQP-VK.image':
+                [CONFIG['MOBLAB_SUITE_NAME']],
+                'CtsDeqpTestCases.dEQP-VK.info': [CONFIG['MOBLAB_SUITE_NAME']],
+                'CtsDeqpTestCases.dEQP-VK.memory':
+                [CONFIG['MOBLAB_SUITE_NAME']],
+                'CtsDeqpTestCases.dEQP-VK.multiview':
+                [CONFIG['MOBLAB_SUITE_NAME']],
+                'CtsDeqpTestCases.dEQP-VK.pipeline':
+                [CONFIG['MOBLAB_SUITE_NAME']],
+                'CtsDeqpTestCases.dEQP-VK.protected_memory':
+                [CONFIG['MOBLAB_SUITE_NAME']],
+                'CtsDeqpTestCases.dEQP-VK.query_pool': [
+                        CONFIG['MOBLAB_SUITE_NAME']
+                ],
+                'CtsDeqpTestCases.dEQP-VK.rasterization': [
+                        CONFIG['MOBLAB_SUITE_NAME']
+                ],
+                'CtsDeqpTestCases.dEQP-VK.renderpass': [
+                        CONFIG['MOBLAB_SUITE_NAME']
+                ],
+                'CtsDeqpTestCases.dEQP-VK.renderpass2': [
+                        CONFIG['MOBLAB_SUITE_NAME']
+                ],
+                'CtsDeqpTestCases.dEQP-VK.robustness': [
+                        CONFIG['MOBLAB_SUITE_NAME']
+                ],
+                'CtsDeqpTestCases.dEQP-VK.sparse_resources': [
+                        CONFIG['MOBLAB_SUITE_NAME']
+                ],
+                'CtsDeqpTestCases.dEQP-VK.spirv_assembly': [
+                        CONFIG['MOBLAB_SUITE_NAME']
+                ],
+                'CtsDeqpTestCases.dEQP-VK.ssbo': [CONFIG['MOBLAB_SUITE_NAME']],
+                'CtsDeqpTestCases.dEQP-VK.subgroups': [
+                        CONFIG['MOBLAB_SUITE_NAME']
+                ],
+                'CtsDeqpTestCases.dEQP-VK.subgroups.b': [
+                        CONFIG['MOBLAB_SUITE_NAME']
+                ],
+                'CtsDeqpTestCases.dEQP-VK.subgroups.s': [
+                        CONFIG['MOBLAB_SUITE_NAME']
+                ],
+                'CtsDeqpTestCases.dEQP-VK.subgroups.vote': [
+                        CONFIG['MOBLAB_SUITE_NAME']
+                ],
+                'CtsDeqpTestCases.dEQP-VK.subgroups.arithmetic': [
+                        CONFIG['MOBLAB_SUITE_NAME']
+                ],
+                'CtsDeqpTestCases.dEQP-VK.subgroups.clustered': [
+                        CONFIG['MOBLAB_SUITE_NAME']
+                ],
+                'CtsDeqpTestCases.dEQP-VK.subgroups.quad': [
+                        CONFIG['MOBLAB_SUITE_NAME']
+                ],
+                'CtsDeqpTestCases.dEQP-VK.synchronization': [
+                        CONFIG['MOBLAB_SUITE_NAME']
+                ],
+                'CtsDeqpTestCases.dEQP-VK.tessellation': [
+                        CONFIG['MOBLAB_SUITE_NAME']
+                ],
+                'CtsDeqpTestCases.dEQP-VK.texture': [
+                        CONFIG['MOBLAB_SUITE_NAME']
+                ],
+                'CtsDeqpTestCases.dEQP-VK.ubo': [CONFIG['MOBLAB_SUITE_NAME']],
+                'CtsDeqpTestCases.dEQP-VK.wsi': [CONFIG['MOBLAB_SUITE_NAME']],
+                'CtsDeqpTestCases.dEQP-VK.ycbcr': [
+                        CONFIG['MOBLAB_SUITE_NAME']
+                ],
+        },
 }
+
 # TODO(haddowk,kinaba): Hack for b/138622686. Clean up later.
 CONFIG['EXTRA_SUBMODULE_OVERRIDE'] = {
     'x86': {
@@ -381,235 +470,290 @@
 }
 
 CONFIG['EXTRA_COMMANDLINE'] = {
-    'CtsDeqpTestCases.dEQP-EGL': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-EGL.*'
-    ],
-    'CtsDeqpTestCases.dEQP-GLES2': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-GLES2.*'
-    ],
-    'CtsDeqpTestCases.dEQP-GLES3': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-GLES3.*'
-    ],
-    'CtsDeqpTestCases.dEQP-GLES31': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-GLES31.*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.api': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.api.*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.binding_model': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.binding_model.*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.clipping': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.clipping.*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.compute': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.compute.*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.device_group': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.device_group*'  # Not ending on .* like most others!
-    ],
-    'CtsDeqpTestCases.dEQP-VK.draw': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.draw.*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.dynamic_state': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.dynamic_state.*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.fragment_operations': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.fragment_operations.*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.geometry': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.geometry.*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.glsl': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.glsl.*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.image': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.image.*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.info': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.info*'  # Not ending on .* like most others!
-    ],
-    'CtsDeqpTestCases.dEQP-VK.memory': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.memory.*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.multiview': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.multiview.*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.pipeline': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.pipeline.*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.protected_memory': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.protected_memory.*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.query_pool': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.query_pool.*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.rasterization': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.rasterization.*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.renderpass': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.renderpass.*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.renderpass2': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.renderpass2.*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.robustness': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.robustness.*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.sparse_resources': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.sparse_resources.*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.spirv_assembly': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.spirv_assembly.*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.ssbo': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.ssbo.*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.subgroups': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.subgroups.*'
-    ],
-    # Splitting VK.subgroups to smaller pieces to workaround b/138622686.
-    # TODO(kinaba,haddowk): remove them once the root cause is fixed, or
-    # reconsider the sharding strategy.
-    'CtsDeqpTestCases.dEQP-VK.subgroups.b': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.subgroups.b*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.subgroups.s': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.subgroups.s*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.subgroups.vote': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.subgroups.vote#*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.subgroups.arithmetic': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.subgroups.arithmetic#*'
-    ],
-    # TODO(haddowk,kinaba): Hack for b/138622686. Clean up later.
-    'CtsDeqpTestCases.dEQP-VK.subgroups.arithmetic.32': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.subgroups.arithmetic#*', '--abi', 'x86'
-    ],
-    # TODO(haddowk,kinaba): Hack for b/138622686. Clean up later.
-    'CtsDeqpTestCases.dEQP-VK.subgroups.arithmetic.64': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.subgroups.arithmetic#*', '--abi', 'x86_64'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.subgroups.clustered': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.subgroups.clustered#*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.subgroups.quad': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.subgroups.quad#*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.synchronization': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.synchronization.*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.tessellation': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.tessellation.*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.texture': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.texture.*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.ubo': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.ubo.*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.wsi': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.wsi.*'
-    ],
-    'CtsDeqpTestCases.dEQP-VK.ycbcr': [
-        '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases',
-        '--test', 'dEQP-VK.ycbcr.*'
-    ],
-    'CtsMediaTestCases.audio': [
-        '--include-filter', 'CtsMediaTestCases android.media.cts.AudioAttributesTest',
-        '--include-filter', 'CtsMediaTestCases android.media.cts.AudioEffectTest',
-        '--include-filter', 'CtsMediaTestCases android.media.cts.AudioFocusTest',
-        '--include-filter', 'CtsMediaTestCases android.media.cts.AudioFormatTest',
-        '--include-filter', 'CtsMediaTestCases android.media.cts.AudioManagerTest',
-        '--include-filter', 'CtsMediaTestCases android.media.cts.AudioNativeTest',
-        '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPlayRoutingNative',
-        '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPlaybackConfigurationTest',
-        '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPreProcessingTest',
-        '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPresentationTest',
-        '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordAppOpTest',
-        '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordRoutingNative',
-        '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordTest',
-        '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecord_BufferSizeTest',
-        '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordingConfigurationTest',
-        '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackLatencyTest',
-        '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackSurroundTest',
-        '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackTest',
-        '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrack_ListenerTest',
-        '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolAacTest',
-        '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolMidiTest',
-        '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolOggTest',
-        '--include-filter', 'CtsMediaTestCases android.media.cts.VolumeShaperTest',
-    ],
-    _WM_PRESUBMIT: [
-        '--include-filter', 'CtsActivityManagerDeviceSdk25TestCases',
-        '--include-filter', 'CtsActivityManagerDeviceTestCases',
-        '--include-filter',
-        'CtsAppTestCases android.app.cts.TaskDescriptionTest',
-        '--include-filter', 'CtsWindowManagerDeviceTestCases',
-        '--test-arg', (
-            'com.android.compatibility.common.tradefed.testtype.JarHostTest:'
-            'include-annotation:android.platform.test.annotations.Presubmit'
-        ),
-        '--test-arg', (
-            'com.android.tradefed.testtype.AndroidJUnitTest:'
-            'include-annotation:android.platform.test.annotations.Presubmit'
-        ),
-        '--test-arg', (
-            'com.android.tradefed.testtype.HostTest:'
-            'include-annotation:android.platform.test.annotations.Presubmit'
-        ),
-        '--test-arg', (
-            'com.android.tradefed.testtype.AndroidJUnitTest:'
-            'exclude-annotation:androidx.test.filters.FlakyTest'
-        ),
-    ],
+        'CtsAppTestCases.feature': [
+                '--module', 'CtsAppTestCases', '--test',
+                'android.app.cts.SystemFeaturesTest'
+        ],
+        'CtsDeqpTestCases.dEQP-EGL': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-EGL.*'
+        ],
+        'CtsDeqpTestCases.dEQP-GLES2': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-GLES2.*'
+        ],
+        'CtsDeqpTestCases.dEQP-GLES3': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-GLES3.*'
+        ],
+        'CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite': [
+                '--module', 'CtsDeqpTestCases', '--test',
+                'dEQP-GLES3.functional.prerequisite#*'
+        ],
+        'CtsDeqpTestCases.dEQP-GLES31': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-GLES31.*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.api': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.api.*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.binding_model': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.binding_model.*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.clipping': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.clipping.*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.compute': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.compute.*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.device_group': [
+                '--include-filter',
+                'CtsDeqpTestCases',
+                '--module',
+                'CtsDeqpTestCases',
+                '--test',
+                'dEQP-VK.device_group*'  # Not ending on .* like most others!
+        ],
+        'CtsDeqpTestCases.dEQP-VK.draw': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.draw.*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.dynamic_state': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.dynamic_state.*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.fragment_operations': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.fragment_operations.*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.geometry': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.geometry.*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.glsl': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.glsl.*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.image': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.image.*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.info': [
+                '--include-filter',
+                'CtsDeqpTestCases',
+                '--module',
+                'CtsDeqpTestCases',
+                '--test',
+                'dEQP-VK.info*'  # Not ending on .* like most others!
+        ],
+        'CtsDeqpTestCases.dEQP-VK.memory': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.memory.*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.multiview': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.multiview.*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.pipeline': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.pipeline.*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.protected_memory': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.protected_memory.*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.query_pool': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.query_pool.*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.rasterization': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.rasterization.*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.renderpass': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.renderpass.*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.renderpass2': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.renderpass2.*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.robustness': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.robustness.*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.sparse_resources': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.sparse_resources.*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.spirv_assembly': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.spirv_assembly.*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.ssbo': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.ssbo.*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.subgroups': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.subgroups.*'
+        ],
+        # Splitting VK.subgroups to smaller pieces to workaround b/138622686.
+        # TODO(kinaba,haddowk): remove them once the root cause is fixed, or
+        # reconsider the sharding strategy.
+        'CtsDeqpTestCases.dEQP-VK.subgroups.b': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.subgroups.b*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.subgroups.s': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.subgroups.s*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.subgroups.vote': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.subgroups.vote#*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.subgroups.arithmetic': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.subgroups.arithmetic#*'
+        ],
+        # TODO(haddowk,kinaba): Hack for b/138622686. Clean up later.
+        'CtsDeqpTestCases.dEQP-VK.subgroups.arithmetic.32': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.subgroups.arithmetic#*',
+                '--abi', 'x86'
+        ],
+        # TODO(haddowk,kinaba): Hack for b/138622686. Clean up later.
+        'CtsDeqpTestCases.dEQP-VK.subgroups.arithmetic.64': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.subgroups.arithmetic#*',
+                '--abi', 'x86_64'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.subgroups.clustered': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.subgroups.clustered#*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.subgroups.quad': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.subgroups.quad#*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.synchronization': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.synchronization.*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.tessellation': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.tessellation.*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.texture': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.texture.*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.ubo': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.ubo.*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.wsi': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.wsi.*'
+        ],
+        'CtsDeqpTestCases.dEQP-VK.ycbcr': [
+                '--include-filter', 'CtsDeqpTestCases', '--module',
+                'CtsDeqpTestCases', '--test', 'dEQP-VK.ycbcr.*'
+        ],
+        'CtsMediaStressTestCases.camera': [
+                '--module',
+                'CtsMediaStressTestCases',
+                '--test',
+                'android.mediastress.cts.MediaRecorderStressTest',
+        ],
+        'CtsMediaTestCases.audio': [
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.AudioAttributesTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.AudioEffectTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.AudioFocusTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.AudioFormatTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.AudioManagerTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.AudioNativeTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.AudioPlayRoutingNative',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.AudioPlaybackConfigurationTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.AudioPreProcessingTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.AudioPresentationTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.AudioRecordAppOpTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.AudioRecordRoutingNative',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.AudioRecordTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.AudioRecord_BufferSizeTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.AudioRecordingConfigurationTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.AudioTrackLatencyTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.AudioTrackSurroundTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.AudioTrackTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.AudioTrack_ListenerTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.SoundPoolAacTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.SoundPoolMidiTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.SoundPoolOggTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.VolumeShaperTest',
+        ],
+        'CtsPermissionTestCases.camera': [
+                '--include-filter',
+                'CtsPermissionTestCases android.permission.cts.CameraPermissionTest',
+                '--include-filter',
+                'CtsPermissionTestCases android.permission.cts.Camera2PermissionTest',
+        ],
+        _WM_PRESUBMIT: [
+                '--include-filter',
+                'CtsActivityManagerDeviceSdk25TestCases',
+                '--include-filter',
+                'CtsActivityManagerDeviceTestCases',
+                '--include-filter',
+                'CtsAppTestCases android.app.cts.TaskDescriptionTest',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases',
+                '--test-arg',
+                ('com.android.compatibility.common.tradefed.testtype.JarHostTest:'
+                 'include-annotation:android.platform.test.annotations.Presubmit'
+                 ),
+                '--test-arg',
+                ('com.android.tradefed.testtype.AndroidJUnitTest:'
+                 'include-annotation:android.platform.test.annotations.Presubmit'
+                 ),
+                '--test-arg',
+                ('com.android.tradefed.testtype.HostTest:'
+                 'include-annotation:android.platform.test.annotations.Presubmit'
+                 ),
+                '--test-arg',
+                ('com.android.tradefed.testtype.AndroidJUnitTest:'
+                 'exclude-annotation:androidx.test.filters.FlakyTest'),
+        ],
 }
 
 CONFIG['EXTRA_ATTRIBUTES'] = {
diff --git a/server/cros/tradefed/generate_controlfiles_CTS_R.py b/server/cros/tradefed/generate_controlfiles_CTS_R.py
index e83f243..dd80a6e 100755
--- a/server/cros/tradefed/generate_controlfiles_CTS_R.py
+++ b/server/cros/tradefed/generate_controlfiles_CTS_R.py
@@ -1,4 +1,5 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -10,6 +11,7 @@
 CONFIG['TEST_NAME'] = 'cheets_CTS_R'
 CONFIG['DOC_TITLE'] = 'Android Compatibility Test Suite (CTS)'
 CONFIG['MOBLAB_SUITE_NAME'] = 'suite:cts'
+CONFIG['MOBLAB_HARDWARE_SUITE_NAME'] = 'suite:cts-hardware'
 CONFIG['COPYRIGHT_YEAR'] = 2020
 CONFIG['AUTHKEY'] = ''
 
@@ -30,12 +32,15 @@
 CONFIG['TRADEFED_IGNORE_BUSINESS_LOGIC_FAILURE'] = False
 
 # On moblab everything runs in the same suite.
-CONFIG['INTERNAL_SUITE_NAMES'] = ['suite:arc-cts-r']
-CONFIG['QUAL_SUITE_NAMES'] = []
+CONFIG['INTERNAL_SUITE_NAMES'] = [
+        'suite:arc-cts-r', 'suite:arc-cts', 'suite:arc-cts-unibuild'
+]
+CONFIG['QUAL_SUITE_NAMES'] = ['suite:arc-cts-qual']
+CONFIG['HARDWARE_SUITE_NAME'] = 'suite:arc-cts-hardware'
 
 CONFIG['CONTROLFILE_TEST_FUNCTION_NAME'] = 'run_TS'
 CONFIG['CONTROLFILE_WRITE_SIMPLE_QUAL_AND_REGRESS'] = False
-CONFIG['CONTROLFILE_WRITE_CAMERA'] = False
+CONFIG['CONTROLFILE_WRITE_CAMERA'] = True
 CONFIG['CONTROLFILE_WRITE_EXTRA'] = True
 
 # The dashboard suppresses upload to APFE for GS directories (based on autotest
@@ -48,12 +53,21 @@
 
 CONFIG['CTS_JOB_RETRIES_IN_PUBLIC'] = 1
 CONFIG['CTS_QUAL_RETRIES'] = 9
-CONFIG['CTS_MAX_RETRIES'] = {}
+CONFIG['CTS_MAX_RETRIES'] = {
+        # TODO(b/183196062): Remove once the flakiness is fixed.
+        'CtsHardwareTestCases': 30,
+        # TODO(b/168262403): Remove once the flakiness is fixed.
+        'CtsIncidentHostTestCases': 10,
+        # TODO(b/181543065): Remove once the flakiness is fixed.
+        'CtsWindowManagerDeviceTestCases': 10,
+}
 
 # Timeout in hours.
 CONFIG['CTS_TIMEOUT_DEFAULT'] = 1.0
 CONFIG['CTS_TIMEOUT'] = {
+        'CtsAppSecurityHostTestCases': 2.0,
         'CtsAutoFillServiceTestCases': 2.5,  # TODO(b/134662826)
+        'CtsCameraTestCases': 1.5,
         'CtsDeqpTestCases': 30.0,
         'CtsDeqpTestCases.dEQP-EGL': 2.0,
         'CtsDeqpTestCases.dEQP-GLES2': 2.0,
@@ -63,6 +77,7 @@
         'CtsFileSystemTestCases': 3.0,
         'CtsHardwareTestCases': 2.0,
         'CtsIcuTestCases': 2.0,
+        'CtsKeystoreTestCases': 2.0,
         'CtsLibcoreOjTestCases': 2.0,
         'CtsMediaStressTestCases': 5.0,
         'CtsMediaTestCases': 10.0,
@@ -74,6 +89,7 @@
         'CtsSensorTestCases': 2.0,
         'CtsStatsdHostTestCases': 2.0,
         'CtsVideoTestCases': 1.5,
+        'CtsViewTestCases': 2.5,
         'CtsWidgetTestCases': 2.0,
         _COLLECT: 2.5,
         _PUBLIC_COLLECT: 2.5,
@@ -86,7 +102,30 @@
 # We allow a very long runtime for qualification (2 days).
 CONFIG['QUAL_TIMEOUT'] = 48
 
-CONFIG['QUAL_BOOKMARKS'] = sorted([])
+CONFIG['QUAL_BOOKMARKS'] = sorted([
+        'A',  # A bookend to simplify partition algorithm.
+        'CtsAccessibilityServiceTestCases',  # TODO(ihf) remove when b/121291711 fixed. This module causes problems. Put it into its own control file.
+        'CtsAccessibilityServiceTestCasesz',
+        'CtsCameraTestCases',  # Flaky
+        'CtsCameraTestCasesz',
+        'CtsDeqpTestCases',
+        'CtsDeqpTestCasesz',  # Put Deqp in one control file. Long enough, fairly stable.
+        'CtsFileSystemTestCases',  # Runs long enough. (3h)
+        'CtsFileSystemTestCasesz',
+        'CtsMediaStressTestCases',  # Put heavy  Media module in its own control file. Long enough.
+        'CtsMediaTestCases',
+        'CtsMediaTestCasesz',
+        'CtsJvmti',
+        'CtsProvider',  # TODO(b/184680306): Remove once the USB stick issue is resolved.
+        'CtsSecurityHostTestCases',  # TODO(ihf): remove when passing cleanly.
+        'CtsSecurityHostTestCasesz',
+        'CtsSensorTestCases',  # TODO(ihf): Remove when not needing 30 retries.
+        'CtsSensorTestCasesz',
+        'CtsSystem',  # TODO(b/183170604): Remove when flakiness is fixed.
+        'CtsViewTestCases',  # TODO(b/126741318): Fix performance regression and remove this.
+        'CtsViewTestCasesz',
+        'zzzzz'  # A bookend to simplify algorithm.
+])
 
 CONFIG['SMOKE'] = []
 
@@ -97,9 +136,16 @@
         'CtsMidiTestCases',
 ]
 
-CONFIG['NEEDS_POWER_CYCLE'] = []
+CONFIG['NEEDS_POWER_CYCLE'] = [
+        'CtsAppTestCases',
+        'CtsSensorTestCases',
+]
 
-CONFIG['HARDWARE_DEPENDENT_MODULES'] = []
+CONFIG['HARDWARE_DEPENDENT_MODULES'] = [
+        'CtsSensorTestCases',
+        'CtsCameraTestCases',
+        'CtsBluetoothTestCases',
+]
 
 # The suite is divided based on the run-time hint in the *.config file.
 CONFIG['VMTEST_INFO_SUITES'] = collections.OrderedDict()
@@ -112,10 +158,30 @@
 ]
 
 CONFIG['NEEDS_PUSH_MEDIA'] = CONFIG['MEDIA_MODULES'] + [
+        'CtsMediaStressTestCases.camera',
         'CtsMediaTestCases.audio',
+        'CtsMediaTestCases.perf',
         'CtsMediaTestCases.video',
 ]
 
+CONFIG['NEEDS_CTS_HELPERS'] = [
+        'CtsPrintTestCases',
+]
+
+CONFIG['SPLIT_BY_BITS_MODULES'] = [
+        'CtsDeqpTestCases',
+        'CtsDeqpTestCases.dEQP-VK',
+        'CtsMediaTestCases',
+]
+
+CONFIG['PUBLIC_SPLIT_BY_BITS_MODULES'] = [
+        'CtsDeqpTestCases',
+]
+
+CONFIG['USE_OLD_ADB'] = [
+        'CtsStatsdHostTestCases',
+]
+
 # Modules that are known to need the default apps of Chrome (eg. Files.app).
 CONFIG['ENABLE_DEFAULT_APPS'] = [
         'CtsAppSecurityHostTestCases',
@@ -129,22 +195,47 @@
 _WIFI_CONNECT_COMMANDS = [
         # These needs to be in order.
         "'/usr/local/autotest/cros/scripts/wifi connect %s %s\' % (ssid, wifipass)",
-        "'/usr/local/autotest/cros/scripts/reorder-services-moblab.sh wifi'"
+        "'android-sh -c \\'dumpsys wifi transports -eth\\''"
+]
+
+_WIFI_CONNECT_COMMANDS_V2 = [
+        # These needs to be in order.
+        "'adb shell cmd wifi add-network %s %s %s' % (pipes.quote(ssid), 'open' if wifipass == '' else 'wpa', pipes.quote(wifipass))",
+        "'adb shell cmd wifi connect-network %s' % pipes.quote(ssid)",
+        "'adb shell dumpsys wifi transports -eth'",
+]
+
+_DISPLAY_REFRESH_COMMANDS = [
+        "'sleep 20'",  # Wait for the intent helper mojo connection established
+        "'android-sh -c \\'am start -a android.intent.action.VIEW -d https://webglsamples.org/electricflower/electricflower.html\\''"
 ]
 
 # Preconditions applicable to public and internal tests.
-CONFIG['PRECONDITION'] = {}
-CONFIG['LOGIN_PRECONDITION'] = {}
+CONFIG['PRECONDITION'] = {
+        'CtsCameraTestCases.NativeCameraDeviceTest': _DISPLAY_REFRESH_COMMANDS,
+}
+
+CONFIG['LOGIN_PRECONDITION'] = {
+        'CtsAppSecurityHostTestCases': [_EJECT_REMOVABLE_DISK_COMMAND],
+        'CtsJobSchedulerTestCases': [_EJECT_REMOVABLE_DISK_COMMAND],
+        'CtsMediaTestCases': [_EJECT_REMOVABLE_DISK_COMMAND],
+        'CtsOsTestCases': [_EJECT_REMOVABLE_DISK_COMMAND],
+        'CtsProviderTestCases': [_EJECT_REMOVABLE_DISK_COMMAND],
+}
 
 # Preconditions applicable to public tests.
 CONFIG['PUBLIC_PRECONDITION'] = {
+        'CtsCameraTestCases.NativeCameraDeviceTest': _DISPLAY_REFRESH_COMMANDS,
         'CtsHostsideNetworkTests': _WIFI_CONNECT_COMMANDS,
         'CtsLibcoreTestCases': _WIFI_CONNECT_COMMANDS,
+        'CtsNetApi23TestCases': _WIFI_CONNECT_COMMANDS,
         'CtsNetTestCases': _WIFI_CONNECT_COMMANDS,
         'CtsJobSchedulerTestCases': _WIFI_CONNECT_COMMANDS,
         'CtsUsageStatsTestCases': _WIFI_CONNECT_COMMANDS,
         'CtsStatsdHostTestCases': _WIFI_CONNECT_COMMANDS,
+        'CtsWifiTestCases': _WIFI_CONNECT_COMMANDS,
 }
+
 CONFIG['PUBLIC_DEPENDENCIES'] = {
         'CtsCameraTestCases': ['lighting'],
         'CtsMediaTestCases': ['noloopback'],
@@ -162,7 +253,14 @@
 
 # This information is changed based on regular analysis of the failure rate on
 # partner moblabs.
-CONFIG['PUBLIC_MODULE_RETRY_COUNT'] = {}
+CONFIG['PUBLIC_MODULE_RETRY_COUNT'] = {
+        # TODO(b/183196062): Remove once the flakiness is fixed.
+        'CtsHardwareTestCases': 30,
+        # TODO(b/168262403): Remove once the flakiness is fixed.
+        'CtsIncidentHostTestCases': 10,
+        # TODO(b/181543065): Remove once the flakiness is fixed.
+        'CtsWindowManagerDeviceTestCases': 10,
+}
 
 # This information is changed based on regular analysis of the job run time on
 # partner moblabs.
@@ -193,63 +291,108 @@
         'CtsLibcoreTestCases',
 ])
 
+# This list of modules will be used for reduced set of testing for build
+# variant process. Suites: cts_hardware & arc-cts-hardware.
+CONFIG['HARDWARE_MODULES'] = [
+        'CtsPerfettoTestCases',
+        'CtsSustainedPerformanceHostTestCases',
+        'CtsCameraTestCases',
+        'CtsViewTestCases',
+        'CtsMediaTestCases',
+        'CtsNativeMediaAAudioTestCases',
+        'CtsNetTestCases',
+        'CtsWifiTestCases',
+        'CtsUsageStatsTestCases',
+        'CtsSensorTestCases',
+]
+
+SUITE_ARC_CTS_R = ['suite:arc-cts-r']
+R_QUAL_SUITES = ['suite:arc-cts-r', 'suite:arc-cts-qual']
+R_REGRESSION_SUITES = ['suite:arc-cts-r', 'suite:arc-cts']
+R_REGRESSION_AND_QUAL_SUITES = CONFIG['QUAL_SUITE_NAMES'] + R_REGRESSION_SUITES
+
 CONFIG['EXTRA_MODULES'] = {
+    'CtsCameraTestCases': {
+        'CtsCameraTestCases.NativeCameraDeviceTest': R_REGRESSION_AND_QUAL_SUITES,
+    },
+    'CtsDeqpTestCases': {
+        'CtsDeqpTestCases.dEQP-EGL': R_QUAL_SUITES,
+        'CtsDeqpTestCases.dEQP-GLES2': R_QUAL_SUITES,
+        'CtsDeqpTestCases.dEQP-GLES3': R_QUAL_SUITES,
+        'CtsDeqpTestCases.dEQP-GLES31': R_QUAL_SUITES,
+        'CtsDeqpTestCases.dEQP-VK': R_QUAL_SUITES,
+    },
+    'CtsMediaTestCases': {
+        'CtsMediaTestCases.audio': R_REGRESSION_AND_QUAL_SUITES,
+        'CtsMediaTestCases.perf': R_REGRESSION_AND_QUAL_SUITES,
+        'CtsMediaTestCases.video': R_REGRESSION_AND_QUAL_SUITES,
+    },
+    'CtsWindowManagerDeviceTestCases': {
+        'CtsWindowManager.A': R_REGRESSION_SUITES,
+        'CtsWindowManager.C': R_REGRESSION_SUITES,
+        'CtsWindowManager.D': R_REGRESSION_SUITES,
+        'CtsWindowManager.F': R_REGRESSION_SUITES,
+        'CtsWindowManager.L': R_REGRESSION_SUITES,
+        'CtsWindowManager.M': R_REGRESSION_SUITES,
+        'CtsWindowManager.Override': R_REGRESSION_SUITES,
+        'CtsWindowManager.P': R_REGRESSION_SUITES,
+        'CtsWindowManager.R': R_REGRESSION_SUITES,
+        'CtsWindowManager.S': R_REGRESSION_SUITES,
+        'CtsWindowManager.T': R_REGRESSION_SUITES,
+        'CtsWindowManager.Window': R_REGRESSION_SUITES,
+        'CtsWindowManager.intent': R_REGRESSION_SUITES,
+        'CtsWindowManager.lifecycle': R_REGRESSION_SUITES,
+    },
+}
+
+# In addition to EXTRA_MODULES, these modules do require separate control files
+# for internal and moblab.
+CONFIG['HARDWAREONLY_EXTRA_MODULES'] = {
+        'CtsAppTestCases': {
+                'CtsAppTestCases.feature': [],
+        },
+        'CtsCameraTestCases': {
+                'CtsCameraTestCases.NativeCameraDeviceTest': [],
+        },
         'CtsDeqpTestCases': {
-                'SUBMODULES':
-                set([
-                        'CtsDeqpTestCases.dEQP-EGL',
-                        'CtsDeqpTestCases.dEQP-GLES2',
-                        'CtsDeqpTestCases.dEQP-GLES3',
-                        'CtsDeqpTestCases.dEQP-GLES31',
-                        'CtsDeqpTestCases.dEQP-VK'
-                ]),
-                'SUITES': ['suite:arc-cts-r'],
+                'CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite': [],
         },
-        'CtsMediaTestCases': {
-                'SUBMODULES': set([
-                        'CtsMediaTestCases.audio',
-                        'CtsMediaTestCases.video',
-                ]),
-                'SUITES': ['suite:arc-cts-r'],
+        'CtsMediaStressTestCases': {
+                'CtsMediaStressTestCases.camera': [],
         },
-        'CtsWindowManagerDeviceTestCases': {
-                'SUBMODULES': set([
-                        'CtsWindowManager.A',
-                        'CtsWindowManager.C',
-                        'CtsWindowManager.D',
-                        'CtsWindowManager.Ensure',
-                        'CtsWindowManager.F',
-                        'CtsWindowManager.L',
-                        'CtsWindowManager.M',
-                        'CtsWindowManager.Override',
-                        'CtsWindowManager.P',
-                        'CtsWindowManager.R',
-                        'CtsWindowManager.S',
-                        'CtsWindowManager.T',
-                        'CtsWindowManager.Window',
-                        'CtsWindowManager.intent',
-                        'CtsWindowManager.lifecycle',
-                ]),
-                'SUITES': ['suite:arc-cts-r'],
+        'CtsPermissionTestCases': {
+                'CtsPermissionTestCases.camera': [],
         },
 }
 
 # Moblab optionally can reshard modules, this was originally used
 # for deqp but it is no longer required for that module.  Retaining
 # feature in case future slower module needs to be sharded.
-CONFIG['PUBLIC_EXTRA_MODULES'] = {}
+CONFIG['PUBLIC_EXTRA_MODULES'] = {
+        'CtsCameraTestCases': {
+             'CtsCameraTestCases.NativeCameraDeviceTest':
+                [CONFIG['MOBLAB_SUITE_NAME']],
+        },
+}
 
-# TODO(haddowk,kinaba): Hack for b/138622686. Clean up later.
 CONFIG['EXTRA_SUBMODULE_OVERRIDE'] = {
-        'x86': {
-                'CtsDeqpTestCases.dEQP-VK.subgroups.arithmetic': [
-                        'CtsDeqpTestCases.dEQP-VK.subgroups.arithmetic.32',
-                        'CtsDeqpTestCases.dEQP-VK.subgroups.arithmetic.64',
-                ]
-        }
 }
 
 CONFIG['EXTRA_COMMANDLINE'] = {
+        'CtsAppTestCases.feature': [
+                '--module', 'CtsAppTestCases', '--test',
+                'android.app.cts.SystemFeaturesTest'
+        ],
+        'CtsCameraTestCases.NativeCameraDeviceTest': [
+                '--include-filter',
+                'CtsCameraTestCases android.hardware.camera2.cts.NativeCameraDeviceTest',
+                '--include-filter',
+                'CtsCameraTestCases[instant] android.hardware.camera2.cts.NativeCameraDeviceTest',
+                '--include-filter',
+                'CtsCameraTestCases android.hardware.camera2.cts.RecordingTest#testVideoPreviewSurfaceSharing[1]',
+                '--include-filter',
+                'CtsCameraTestCases[instant] android.hardware.camera2.cts.RecordingTest#testVideoPreviewSurfaceSharing[1]',
+        ],
         'CtsDeqpTestCases.dEQP-EGL': [
                 '--include-filter', 'CtsDeqpTestCases', '--module',
                 'CtsDeqpTestCases', '--test', 'dEQP-EGL.*'
@@ -262,6 +405,10 @@
                 '--include-filter', 'CtsDeqpTestCases', '--module',
                 'CtsDeqpTestCases', '--test', 'dEQP-GLES3.*'
         ],
+        'CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite': [
+                '--module', 'CtsDeqpTestCases', '--test',
+                'dEQP-GLES3.functional.prerequisite#*'
+        ],
         'CtsDeqpTestCases.dEQP-GLES31': [
                 '--include-filter', 'CtsDeqpTestCases', '--module',
                 'CtsDeqpTestCases', '--test', 'dEQP-GLES31.*'
@@ -441,6 +588,12 @@
                 '--include-filter', 'CtsDeqpTestCases', '--module',
                 'CtsDeqpTestCases', '--test', 'dEQP-VK.ycbcr.*'
         ],
+        'CtsMediaStressTestCases.camera': [
+                '--module',
+                'CtsMediaStressTestCases',
+                '--test',
+                'android.mediastress.cts.MediaRecorderStressTest',
+        ],
         'CtsMediaTestCases.audio': [
                 '--include-filter',
                 'CtsMediaTestCases android.media.cts.AudioAttributesTest',
@@ -505,223 +658,241 @@
                 '--include-filter',
                 'CtsMediaTestCases android.media.cts.VolumeShaperTest',
         ],
+        'CtsMediaTestCases.perf': [
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.VideoDecoderPerfTest',
+        ],
         'CtsMediaTestCases.video': [
-               '--include-filter',
-               'CtsMediaTestCases android.media.cts.AdaptivePlaybackTest',
-               '--include-filter',
-               'CtsMediaTestCases android.media.cts.DecodeAccuracyTest',
-               '--include-filter',
-               'CtsMediaTestCases android.media.cts.DecodeEditEncodeTest',
-               '--include-filter',
-               'CtsMediaTestCases android.media.cts.DecoderConformanceTest',
-               '--include-filter',
-               'CtsMediaTestCases android.media.cts.EncodeDecodeTest',
-               '--include-filter',
-               'CtsMediaTestCases android.media.cts.ExtractDecodeEditEncodeMuxTest',
-               '--include-filter',
-               'CtsMediaTestCases android.media.cts.MediaCodecPlayerTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.AdaptivePlaybackTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.DecodeAccuracyTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.DecodeEditEncodeTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.DecoderConformanceTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.EncodeDecodeTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.ExtractDecodeEditEncodeMuxTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.MediaCodecPlayerTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.MediaCodecPlayerTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.MediaDrmClearkeyTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.MediaRecorderTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.MediaSynctest#testPlayVideo',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.VideoCodecTest',
+                '--include-filter',
+                'CtsMediaTestCases android.media.cts.VideoEncoderTest',
+        ],
+        'CtsPermissionTestCases.camera': [
+                '--include-filter',
+                'CtsPermissionTestCases android.permission.cts.CameraPermissionTest',
+                '--include-filter',
+                'CtsPermissionTestCases android.permission.cts.Camera2PermissionTest',
         ],
         'CtsWindowManager.A': [
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.ActivityManagerGetConfigTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.ActivityMetricsLoggerTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.ActivityTaskAffinityTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.ActivityTransitionTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.ActivityViewTest',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.ActivityVisibilityTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.AddWindowAsUserTest',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.AlertWindowsAppOpsTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.AlertWindowsImportanceTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.AlertWindowsTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.AmProfileTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.AmStartOptionsTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.AnrTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.AppConfigurationTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.AspectRatioTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.AssistantStackTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.ActivityManagerGetConfigTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.ActivityMetricsLoggerTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.ActivityTaskAffinityTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.ActivityTransitionTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.ActivityViewTest',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.ActivityVisibilityTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.AddWindowAsUserTest',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.AlertWindowsAppOpsTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.AlertWindowsImportanceTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.AlertWindowsTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.AmProfileTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.AmStartOptionsTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.AnrTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.AppConfigurationTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.AspectRatioTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.AssistantStackTests',
         ],
         'CtsWindowManager.C': [
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.CloseOnOutsideTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.ConfigChangeTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.CrossAppDragAndDropTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.CloseOnOutsideTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.ConfigChangeTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.CrossAppDragAndDropTests',
         ],
         'CtsWindowManager.D': [
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.DecorInsetTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.DeprecatedTargetSdkTest',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.DialogFrameTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.DisplayCutoutTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.DisplaySizeTest',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.DisplayTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.DragDropTest',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.DreamManagerServiceTests',
-        ],
-        'CtsWindowManager.Ensure': [
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.EnsureBarContrastTest',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.DecorInsetTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.DeprecatedTargetSdkTest',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.DialogFrameTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.DisplayCutoutTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.DisplaySizeTest',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.DisplayTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.DragDropTest',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.DreamManagerServiceTests',
         ],
         'CtsWindowManager.F': [
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.ForceRelayoutTest',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.FreeformWindowingModeTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.ForceRelayoutTest',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.FreeformWindowingModeTests',
         ],
         'CtsWindowManager.L': [
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.LayoutTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.LocationInWindowTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.LocationOnScreenTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.LayoutTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.LocationInWindowTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.LocationOnScreenTests',
         ],
         'CtsWindowManager.M': [
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.ManifestLayoutTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.MinimalPostProcessingTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayActivityLaunchTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayClientTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayKeyguardTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayLockedKeyguardTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayPolicyTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayPrivateDisplayTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplaySecurityTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplaySystemDecorationTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.ManifestLayoutTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.MinimalPostProcessingTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayActivityLaunchTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayClientTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayKeyguardTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayLockedKeyguardTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayPolicyTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayPrivateDisplayTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplaySecurityTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplaySystemDecorationTests',
         ],
         'CtsWindowManager.Override': [
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.OverrideConfigTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.OverrideConfigTests',
         ],
         'CtsWindowManager.P': [
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.PinnedStackTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.PrereleaseSdkTest',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.PresentationTest',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.PinnedStackTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.PrereleaseSdkTest',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.PresentationTest',
         ],
         'CtsWindowManager.R': [
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.ReplaceWindowTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.RobustnessTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.ReplaceWindowTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.RobustnessTests',
         ],
         'CtsWindowManager.S': [
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.SplashscreenTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.SplitScreenTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.StartActivityAsUserTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.StartActivityTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.SurfaceControlTest',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.SurfaceControlViewHostTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.SurfaceViewSurfaceValidatorTest',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.SurfaceViewTest',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.SplashscreenTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.SplitScreenTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.StartActivityAsUserTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.StartActivityTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.SurfaceControlTest',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.SurfaceControlViewHostTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.SurfaceViewSurfaceValidatorTest',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.SurfaceViewTest',
         ],
         'CtsWindowManager.T': [
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.ToastWindowTest',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.TransitionSelectionTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.ToastWindowTest',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.TransitionSelectionTests',
         ],
         'CtsWindowManager.Window': [
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.WindowContextPolicyTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.WindowContextTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.WindowFocusTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.WindowInputTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationCallbackTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationControllerTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationImeTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationSynchronicityTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsControllerTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsLayoutTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsPolicyTest',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsTest',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.WindowManager_BadTokenExceptionTest',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.WindowManager_LayoutParamsTest',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.WindowMetricsTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.WindowTest',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.WindowContextPolicyTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.WindowContextTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.WindowFocusTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.WindowInputTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationCallbackTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationControllerTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationImeTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationSynchronicityTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsControllerTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsLayoutTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsPolicyTest',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsTest',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.WindowManager_BadTokenExceptionTest',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.WindowManager_LayoutParamsTest',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.WindowMetricsTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.WindowTest',
         ],
         'CtsWindowManager.intent': [
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.intent.IntentGenerationTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.intent.IntentTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.intent.IntentGenerationTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.intent.IntentTests',
         ],
         'CtsWindowManager.lifecycle': [
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleFreeformTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleKeyguardTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecyclePipTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleSplitScreenTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleTopResumedStateTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityStarterTests',
-               '--include-filter',
-               'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleFreeformTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleKeyguardTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecyclePipTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleSplitScreenTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleTopResumedStateTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityStarterTests',
+                '--include-filter',
+                'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityTests',
         ],
 }
 
diff --git a/server/cros/tradefed/generate_controlfiles_GTS.py b/server/cros/tradefed/generate_controlfiles_GTS.py
deleted file mode 100755
index 8a9a7e9..0000000
--- a/server/cros/tradefed/generate_controlfiles_GTS.py
+++ /dev/null
@@ -1,158 +0,0 @@
-#!/usr/bin/env python2
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import collections
-
-# The dashboard suppresses upload to APFE for GS directories (based on autotest
-# tag) that contain 'tradefed-run-collect-tests'. b/119640440
-# Do not change the name/tag without adjusting the dashboard.
-_COLLECT = 'tradefed-run-collect-tests-only-internal'
-_PUBLIC_COLLECT = 'tradefed-run-collect-tests-only'
-_ALL = 'all'
-
-CONFIG = {}
-
-CONFIG['TEST_NAME'] = 'cheets_GTS'
-CONFIG['DOC_TITLE'] = 'Android Google Test Suite (GTS)'
-CONFIG['MOBLAB_SUITE_NAME'] = 'suite:gts'
-CONFIG['COPYRIGHT_YEAR'] = 2016
-
-CONFIG['AUTHKEY'] = 'gs://chromeos-arc-images/cts/bundle/gts-arc.json'
-CONFIG['TRADEFED_IGNORE_BUSINESS_LOGIC_FAILURE'] = True
-
-CONFIG['LARGE_MAX_RESULT_SIZE'] = 500 * 1024
-CONFIG['NORMAL_MAX_RESULT_SIZE'] = 300 * 1024
-
-CONFIG['TRADEFED_CTS_COMMAND'] ='gts'
-CONFIG['TRADEFED_RETRY_COMMAND'] = 'retry'
-CONFIG['TRADEFED_DISABLE_REBOOT'] = False
-CONFIG['TRADEFED_DISABLE_REBOOT_ON_COLLECTION'] = True
-CONFIG['TRADEFED_MAY_SKIP_DEVICE_INFO'] = False
-CONFIG['NEEDS_DEVICE_INFO'] = []
-CONFIG['TRADEFED_EXECUTABLE_PATH'] = 'android-gts/tools/gts-tradefed'
-
-CONFIG['INTERNAL_SUITE_NAMES'] = ['suite:arc-gts']
-CONFIG['QUAL_SUITE_NAMES'] = ['suite:arc-gts-qual']
-
-CONFIG['CONTROLFILE_TEST_FUNCTION_NAME'] = 'run_TS'
-CONFIG['CONTROLFILE_WRITE_SIMPLE_QUAL_AND_REGRESS'] = False
-CONFIG['CONTROLFILE_WRITE_CAMERA'] = False
-CONFIG['CONTROLFILE_WRITE_EXTRA'] = False
-
-CONFIG['CTS_JOB_RETRIES_IN_PUBLIC'] = 2
-CONFIG['CTS_QUAL_RETRIES'] = 9
-CONFIG['CTS_MAX_RETRIES'] = {}
-
-# Timeout in hours.
-# Modules that run very long are encoded here.
-CONFIG['CTS_TIMEOUT_DEFAULT'] = 0.2
-CONFIG['CTS_TIMEOUT'] = {
-        'GtsAssistantMicHostTestCases': 0.5,
-        'GtsExoPlayerTestCases': 1.5,
-        'GtsGmscoreHostTestCases': 1.0,
-        'GtsMediaTestCases': 4,
-        'GtsNetworkWatchlistTestCases': 1.0,
-        'GtsYouTubeTestCases': 1.0,
-        _ALL: 24,
-        _COLLECT: 1.0,
-        _PUBLIC_COLLECT: 1.0,
-}
-
-# Any test that runs as part as blocking BVT needs to be stable and fast. For
-# this reason we enforce a tight timeout on these modules/jobs.
-# Timeout in hours. (0.1h = 6 minutes)
-CONFIG['BVT_TIMEOUT'] = 0.1
-# We allow a very long runtime for qualification (1 day).
-CONFIG['QUAL_TIMEOUT'] = 24
-
-CONFIG['QUAL_BOOKMARKS'] = sorted([
-    'A',  # A bookend to simplify partition algorithm.
-    'GtsExoPlayerTestCases',
-    'GtsMediaTestCases',
-    'GtsMediaTestCasesz',  # runs the biggest module in a single job.
-    'zzzzz'  # A bookend to simplify algorithm.
-])
-
-CONFIG['SMOKE'] = []
-
-CONFIG['BVT_ARC'] = []
-
-CONFIG['BVT_PERBUILD'] = [
-    'GtsAdminTestCases',
-    'GtsMemoryHostTestCases',
-    'GtsMemoryTestCases',
-    'GtsNetTestCases',
-    'GtsOsTestCases',
-    'GtsPlacementTestCases',
-    'GtsPrivacyTestCases',
-]
-
-CONFIG['NEEDS_POWER_CYCLE'] = []
-
-CONFIG['HARDWARE_DEPENDENT_MODULES'] = []
-
-CONFIG['VMTEST_INFO_SUITES'] = collections.OrderedDict()
-
-# Modules that are known to download and/or push media file assets.
-CONFIG['MEDIA_MODULES'] = ['GtsYouTubeTestCases']
-CONFIG['NEEDS_PUSH_MEDIA'] = CONFIG['MEDIA_MODULES'] + [_ALL]
-CONFIG['ENABLE_DEFAULT_APPS'] = []
-
-# Preconditions applicable to public and internal tests.
-CONFIG['PRECONDITION'] = {}
-CONFIG['LOGIN_PRECONDITION'] = {}
-
-CONFIG['LAB_DEPENDENCY'] = {}
-
-# Preconditions applicable to public tests.
-CONFIG['PUBLIC_PRECONDITION'] = {}
-CONFIG['PUBLIC_DEPENDENCIES'] = {}
-
-# This information is changed based on regular analysis of the failure rate on
-# partner moblabs.
-CONFIG['PUBLIC_MODULE_RETRY_COUNT'] = {
-  _ALL: 2,
-  'GtsExoPlayerTestCases': 5,  # TODO(b/149376356, b/164230246)
-  'GtsMediaTestCases': 5,  # TODO(b/140841434)
-  'GtsYouTubeTestCases': 5,  # TODO(b/149376356)
-}
-
-CONFIG['PUBLIC_OVERRIDE_TEST_PRIORITY'] = {
-    _PUBLIC_COLLECT: 70,
-}
-
-# This information is changed based on regular analysis of the job run time on
-# partner moblabs.
-
-CONFIG['OVERRIDE_TEST_LENGTH'] = {
-    'GtsMediaTestCases': 4,
-    _ALL: 4,
-    # Even though collect tests doesn't run very long, it must be the very first
-    # job executed inside of the suite. Hence it is the only 'LENGTHY' test.
-    _COLLECT: 5,  # LENGTHY
-}
-
-# Enabling --logcat-on-failure can extend total run time significantly if
-# individual tests finish in the order of 10ms or less (b/118836700). Specify
-# modules here to not enable the flag.
-CONFIG['DISABLE_LOGCAT_ON_FAILURE'] = set([])
-CONFIG['EXTRA_MODULES'] = {}
-CONFIG['PUBLIC_EXTRA_MODULES'] = {}
-CONFIG['EXTRA_SUBMODULE_OVERRIDE'] = {}
-CONFIG['EXTRA_COMMANDLINE'] = {}
-CONFIG['EXTRA_ATTRIBUTES'] = {
-    'tradefed-run-collect-tests-only-internal': ['suite:arc-gts'],
-}
-CONFIG['EXTRA_ARTIFACTS'] = {}
-
-CONFIG['PREREQUISITES'] = {
-    'GtsGmscoreHostTestCases': ['bluetooth'],
-}
-CONFIG['USE_JDK9'] = True
-
-from generate_controlfiles_common import main
-
-if __name__ == '__main__':
-    main(CONFIG)
diff --git a/server/cros/tradefed/generate_controlfiles_GTS_R.py b/server/cros/tradefed/generate_controlfiles_GTS_R.py
deleted file mode 100755
index 7b0508f..0000000
--- a/server/cros/tradefed/generate_controlfiles_GTS_R.py
+++ /dev/null
@@ -1,146 +0,0 @@
-#!/usr/bin/env python2
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import collections
-
-# The dashboard suppresses upload to APFE for GS directories (based on autotest
-# tag) that contain 'tradefed-run-collect-tests'. b/119640440
-# Do not change the name/tag without adjusting the dashboard.
-_COLLECT = 'tradefed-run-collect-tests-only-internal'
-_PUBLIC_COLLECT = 'tradefed-run-collect-tests-only'
-_ALL = 'all'
-
-CONFIG = {}
-
-CONFIG['TEST_NAME'] = 'cheets_GTS_R'
-CONFIG['DOC_TITLE'] = 'Android Google Test Suite (GTS)'
-CONFIG['MOBLAB_SUITE_NAME'] = 'suite:gts'
-CONFIG['COPYRIGHT_YEAR'] = 2020
-
-CONFIG['AUTHKEY'] = 'gs://chromeos-arc-images/cts/bundle/gts-arc.json'
-CONFIG['TRADEFED_IGNORE_BUSINESS_LOGIC_FAILURE'] = True
-
-CONFIG['LARGE_MAX_RESULT_SIZE'] = 500 * 1024
-CONFIG['NORMAL_MAX_RESULT_SIZE'] = 300 * 1024
-
-CONFIG['TRADEFED_CTS_COMMAND'] ='gts'
-CONFIG['TRADEFED_RETRY_COMMAND'] = 'retry'
-CONFIG['TRADEFED_DISABLE_REBOOT'] = False
-CONFIG['TRADEFED_DISABLE_REBOOT_ON_COLLECTION'] = True
-CONFIG['TRADEFED_MAY_SKIP_DEVICE_INFO'] = False
-CONFIG['NEEDS_DEVICE_INFO'] = []
-CONFIG['TRADEFED_EXECUTABLE_PATH'] = 'android-gts/tools/gts-tradefed'
-
-# For now only run as a part of arc-cts-r.
-# TODO(kinaba): move to arc-gts and arc-gts-qual after R
-# got out from the experimental state.
-CONFIG['INTERNAL_SUITE_NAMES'] = ['suite:arc-cts-r']
-CONFIG['QUAL_SUITE_NAMES'] = []
-
-CONFIG['CONTROLFILE_TEST_FUNCTION_NAME'] = 'run_TS'
-CONFIG['CONTROLFILE_WRITE_SIMPLE_QUAL_AND_REGRESS'] = False
-CONFIG['CONTROLFILE_WRITE_CAMERA'] = False
-CONFIG['CONTROLFILE_WRITE_EXTRA'] = False
-
-CONFIG['CTS_JOB_RETRIES_IN_PUBLIC'] = 2
-CONFIG['CTS_QUAL_RETRIES'] = 9
-CONFIG['CTS_MAX_RETRIES'] = {}
-
-# Timeout in hours.
-# Modules that run very long are encoded here.
-CONFIG['CTS_TIMEOUT_DEFAULT'] = 0.2
-CONFIG['CTS_TIMEOUT'] = {
-    'GtsExoPlayerTestCases': 1.5,
-    'GtsGmscoreHostTestCases': 1.0,
-    'GtsMediaTestCases': 4,
-    'GtsYouTubeTestCases': 1.0,
-    _ALL: 24,
-    _COLLECT: 0.5,
-    _PUBLIC_COLLECT: 0.5,
-}
-
-# Any test that runs as part as blocking BVT needs to be stable and fast. For
-# this reason we enforce a tight timeout on these modules/jobs.
-# Timeout in hours. (0.1h = 6 minutes)
-CONFIG['BVT_TIMEOUT'] = 0.1
-# We allow a very long runtime for qualification (1 day).
-CONFIG['QUAL_TIMEOUT'] = 24
-
-# TODO(kinab): Set up when we move the test to arc-gts-qual
-CONFIG['QUAL_BOOKMARKS'] = sorted([])
-
-CONFIG['SMOKE'] = []
-
-CONFIG['BVT_ARC'] = []
-
-CONFIG['BVT_PERBUILD'] = []
-
-CONFIG['NEEDS_POWER_CYCLE'] = []
-
-CONFIG['HARDWARE_DEPENDENT_MODULES'] = []
-
-CONFIG['VMTEST_INFO_SUITES'] = collections.OrderedDict()
-
-# Modules that are known to download and/or push media file assets.
-CONFIG['MEDIA_MODULES'] = ['GtsYouTubeTestCases']
-CONFIG['NEEDS_PUSH_MEDIA'] = CONFIG['MEDIA_MODULES'] + [_ALL]
-CONFIG['ENABLE_DEFAULT_APPS'] = []
-
-# Preconditions applicable to public and internal tests.
-CONFIG['PRECONDITION'] = {}
-CONFIG['LOGIN_PRECONDITION'] = {}
-
-CONFIG['LAB_DEPENDENCY'] = {}
-
-# Preconditions applicable to public tests.
-CONFIG['PUBLIC_PRECONDITION'] = {}
-CONFIG['PUBLIC_DEPENDENCIES'] = {}
-
-# This information is changed based on regular analysis of the failure rate on
-# partner moblabs.
-CONFIG['PUBLIC_MODULE_RETRY_COUNT'] = {
-  _ALL: 2,
-  'GtsMediaTestCases': 5,  # TODO(b/140841434)
-  'GtsYouTubeTestCases': 5,  # TODO(b/149376356)
-}
-
-CONFIG['PUBLIC_OVERRIDE_TEST_PRIORITY'] = {
-    _PUBLIC_COLLECT: 70,
-}
-
-# This information is changed based on regular analysis of the job run time on
-# partner moblabs.
-
-CONFIG['OVERRIDE_TEST_LENGTH'] = {
-    'GtsMediaTestCases': 4,
-    _ALL: 4,
-    # Even though collect tests doesn't run very long, it must be the very first
-    # job executed inside of the suite. Hence it is the only 'LENGTHY' test.
-    _COLLECT: 5,  # LENGTHY
-}
-
-# Enabling --logcat-on-failure can extend total run time significantly if
-# individual tests finish in the order of 10ms or less (b/118836700). Specify
-# modules here to not enable the flag.
-CONFIG['DISABLE_LOGCAT_ON_FAILURE'] = set([])
-CONFIG['EXTRA_MODULES'] = {}
-CONFIG['PUBLIC_EXTRA_MODULES'] = {}
-CONFIG['EXTRA_SUBMODULE_OVERRIDE'] = {}
-CONFIG['EXTRA_COMMANDLINE'] = {}
-CONFIG['EXTRA_ATTRIBUTES'] = {
-    'tradefed-run-collect-tests-only-internal': ['suite:arc-gts'],
-}
-CONFIG['EXTRA_ARTIFACTS'] = {}
-
-CONFIG['PREREQUISITES'] = {
-    'GtsGmscoreHostTestCases': ['bluetooth'],
-}
-CONFIG['USE_JDK9'] = True
-
-from generate_controlfiles_common import main
-
-if __name__ == '__main__':
-    main(CONFIG)
-
diff --git a/server/cros/tradefed/generate_controlfiles_VTS_R.py b/server/cros/tradefed/generate_controlfiles_VTS_R.py
index f64e414..36ba412 100755
--- a/server/cros/tradefed/generate_controlfiles_VTS_R.py
+++ b/server/cros/tradefed/generate_controlfiles_VTS_R.py
@@ -1,4 +1,5 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -42,7 +43,12 @@
 _COLLECT = 'tradefed-run-collect-tests-only-internal'
 _PUBLIC_COLLECT = 'tradefed-run-collect-tests-only'
 
-CONFIG['LAB_DEPENDENCY'] = {'x86': ['cts_abi_x86'], 'arm': ['cts_abi_arm']}
+CONFIG['LAB_DEPENDENCY'] = {
+        'arm': ['cts_abi_arm'],
+        'arm64': ['cts_abi_arm'],
+        'x86': ['cts_abi_x86'],
+        'x86_64': ['cts_abi_x86'],
+}
 
 CONFIG['CTS_JOB_RETRIES_IN_PUBLIC'] = 1
 CONFIG['CTS_QUAL_RETRIES'] = 9
diff --git a/server/cros/tradefed/generate_controlfiles_common.py b/server/cros/tradefed/generate_controlfiles_common.py
index 8e6b956..c3c9622 100755
--- a/server/cros/tradefed/generate_controlfiles_common.py
+++ b/server/cros/tradefed/generate_controlfiles_common.py
@@ -1,4 +1,5 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
+# Lint as: python2, python3
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -8,6 +9,7 @@
 import argparse
 import contextlib
 import copy
+from enum import Enum
 import logging
 import os
 import re
@@ -20,21 +22,30 @@
 # Use 'sudo pip install jinja2' to install.
 from jinja2 import Template
 
+# Type of source storage from where the generated control files should
+# retrieve the xTS bundle zip file.
+#  'MOBLAB' means the bucket for moblab used by 3PL.
+#  'LATEST' means the latest official xTS release.
+#  'DEV' means the preview version build from development branch.
+SourceType = Enum('SourceType', ['MOBLAB', 'LATEST', 'DEV'])
+
 
 # TODO(ihf): Assign better TIME to control files. Scheduling uses this to run
 # LENGTHY first, then LONG, MEDIUM etc. But we need LENGTHY for the collect
 # job, downgrade all others. Make sure this still works in CQ/smoke suite.
 _CONTROLFILE_TEMPLATE = Template(
-    textwrap.dedent("""\
+        textwrap.dedent("""\
     # Copyright {{year}} The Chromium OS Authors. All rights reserved.
     # Use of this source code is governed by a BSD-style license that can be
     # found in the LICENSE file.
 
     # This file has been automatically generated. Do not edit!
     {%- if servo_support_needed %}
-
-    from autotest_lib.server import utils
-
+    from autotest_lib.server import utils as server_utils
+    {%- endif %}
+    {%- if wifi_info_needed %}
+    from autotest_lib.client.common_lib import utils, global_config
+    import pipes
     {%- endif %}
 
     AUTHOR = 'ARC++ Team'
@@ -45,6 +56,7 @@
     TEST_TYPE = 'server'
     TIME = '{{test_length}}'
     MAX_RESULT_SIZE_KB = {{max_result_size_kb}}
+    PY_VERSION = 3
     {%- if sync_count and sync_count > 1 %}
     SYNC_COUNT = {{sync_count}}
     {%- endif %}
@@ -56,7 +68,7 @@
 
     # For local debugging, if your test setup doesn't have servo, REMOVE these
     # two lines.
-    args_dict = utils.args_to_dict(args)
+    args_dict = server_utils.args_to_dict(args)
     servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
 
     {%- endif %}
@@ -77,6 +89,11 @@
         {%- else %}
         host_list = [hosts.create_host(machine)]
         {%- endif %}
+        {%- if wifi_info_needed %}
+        ssid = utils.get_wireless_ssid(machine['hostname'])
+        wifipass = global_config.global_config.get_config_value('CLIENT',
+                    'wireless_password', default=None)
+        {%- endif %}
     {%- endif %}
         job.run_test(
             '{{base_name}}',
@@ -95,6 +112,9 @@
     {%- if needs_push_media %}
             needs_push_media={{needs_push_media}},
     {%- endif %}
+    {%- if needs_cts_helpers %}
+            use_helpers={{needs_cts_helpers}},
+    {%- endif %}
             tag='{{tag}}',
             test_name='{{name}}',
     {%- if authkey %}
@@ -123,7 +143,7 @@
             hard_reboot_on_failure=True,
     {%- endif %}
     {%- if camera_facing %}
-            load_waivers=False,
+            retry_manual_tests=True,
     {%- endif %}
             timeout={{timeout}})
 
@@ -142,6 +162,9 @@
 
 _COLLECT = 'tradefed-run-collect-tests-only-internal'
 _PUBLIC_COLLECT = 'tradefed-run-collect-tests-only'
+_CTSHARDWARE_COLLECT = 'tradefed-run-collect-tests-only-hardware-internal'
+_PUBLIC_CTSHARDWARE_COLLECT = 'tradefed-run-collect-tests-only-hardware'
+
 
 _TEST_LENGTH = {1: 'FAST', 2: 'SHORT', 3: 'MEDIUM', 4: 'LONG', 5: 'LENGTHY'}
 
@@ -197,7 +220,7 @@
     In this case we chose to guess by filename, but we could also parse the
     xml files in the module. (Maybe this needs to be done in the future.)
     """
-    if CONFIG.get('DYNAMIC_TEST_FETCH'):
+    if CONFIG.get('SINGLE_CONTROL_FILE'):
         return None
     if filename.endswith('arm.zip'):
         return 'arm'
@@ -205,45 +228,65 @@
         return 'arm64'
     if filename.endswith('x86.zip'):
         return 'x86'
+    if filename.endswith('x86_64.zip'):
+        return 'x86_64'
 
     assert(CONFIG['TRADEFED_CTS_COMMAND'] =='gts'), 'Only GTS has empty ABI'
     return ''
 
 
-def get_extension(module, abi, revision, is_public=False, led_provision=None, camera_facing=None):
+def get_extension(module,
+                  abi,
+                  revision,
+                  is_public=False,
+                  led_provision=None,
+                  camera_facing=None,
+                  hardware_suite=False,
+                  abi_bits=None):
     """Defines a unique string.
 
     Notice we chose module revision first, then abi, as the module revision
     changes regularly. This ordering makes it simpler to add/remove modules.
     @param module: CTS module which will be tested in the control file. If 'all'
                    is specified, the control file will runs all the tests.
-    @param public: boolean variable to specify whether or not the bundle is from
-                   public source or not.
+    @param is_public: boolean variable to specify whether or not the bundle is
+                   from public source or not.
     @param led_provision: string or None indicate whether the camerabox has led
                           light or not.
     @param camera_facing: string or None indicate whether it's camerabox tests
                           for specific camera facing or not.
+    @param abi_bits: 32 or 64 or None indicate the bitwidth for the specific
+                     abi to run.
     @return string: unique string for specific tests. If public=True then the
                     string is "<abi>.<module>", otherwise, the unique string is
-                    "<revision>.<abi>.<module>". Note that if abi is empty, the
-                    abi part is omitted.
+                    "internal.<abi>.<module>" for internal. Note that if abi is empty,
+                    the abi part is omitted.
     """
     ext_parts = []
-    if not CONFIG.get('DYNAMIC_TEST_FETCH') and not is_public:
-        ext_parts = [revision]
-    if not CONFIG.get('DYNAMIC_TEST_FETCH') and abi:
+    if not CONFIG.get('SINGLE_CONTROL_FILE') and not is_public:
+        if module == _COLLECT:
+            ext_parts = [revision]
+        else:
+            ext_parts = ['internal']
+    if not CONFIG.get('SINGLE_CONTROL_FILE') and abi:
         ext_parts += [abi]
     ext_parts += [module]
     if led_provision:
         ext_parts += [led_provision]
     if camera_facing:
         ext_parts += ['camerabox', camera_facing]
+    if hardware_suite and module not in get_collect_modules(
+            is_public, hardware_suite):
+        ext_parts += ['ctshardware']
+    if not CONFIG.get('SINGLE_CONTROL_FILE') and abi and abi_bits:
+        ext_parts += [str(abi_bits)]
     return '.'.join(ext_parts)
 
 
 def get_doc(modules, abi, is_public):
     """Defines the control file DOC string."""
-    if modules.intersection(get_collect_modules(is_public)):
+    if modules.intersection(get_collect_modules(is_public)) or CONFIG.get(
+            'SINGLE_CONTROL_FILE'):
         module_text = 'all'
     else:
         # Generate per-module DOC
@@ -258,7 +301,13 @@
 
 def servo_support_needed(modules, is_public=True):
     """Determines if servo support is needed for a module."""
-    return not is_public and all(module in CONFIG['NEEDS_POWER_CYCLE']
+    return not is_public and any(module in CONFIG['NEEDS_POWER_CYCLE']
+                                 for module in modules)
+
+
+def wifi_info_needed(modules, is_public):
+    """Determines if Wifi AP info needs to be retrieved."""
+    return not is_public and any(module in CONFIG.get('WIFI_MODULES', [])
                                  for module in modules)
 
 
@@ -267,7 +316,9 @@
                          revision,
                          is_public=False,
                          led_provision=None,
-                         camera_facing=None):
+                         camera_facing=None,
+                         abi_bits=None,
+                         hardware_suite=False):
     """Defines the control file name.
 
     @param module: CTS module which will be tested in the control file. If 'all'
@@ -278,20 +329,24 @@
                           for specific camera facing or not.
     @param led_provision: string or None indicate whether the camerabox has led
                           light or not.
+    @param abi_bits: 32 or 64 or None indicate the bitwidth for the specific
+                     abi to run.
     @return string: control file for specific tests. If public=True or
                     module=all, then the name will be "control.<abi>.<module>",
                     otherwise, the name will be
                     "control.<revision>.<abi>.<module>".
     """
-    return 'control.%s' % get_extension(module, abi, revision, is_public, led_provision,
-                                        camera_facing)
+    return 'control.%s' % get_extension(module, abi, revision, is_public,
+                                        led_provision, camera_facing, hardware_suite,
+                                        abi_bits)
 
 
 def get_sync_count(_modules, _abi, _is_public):
     return 1
 
 
-def get_suites(modules, abi, is_public, camera_facing=None):
+def get_suites(modules, abi, is_public, camera_facing=None,
+               hardware_suite=False):
     """Defines the suites associated with a module.
 
     @param module: CTS module which will be tested in the control file. If 'all'
@@ -299,14 +354,18 @@
     # TODO(ihf): Make this work with the "all" and "collect" generation,
     # which currently bypass this function.
     """
+    cts_hardware_modules = set(CONFIG.get('HARDWARE_MODULES', []))
+
     if is_public:
-        # On moblab everything runs in the same suite.
-        return [CONFIG['MOBLAB_SUITE_NAME']]
+        suites = set([CONFIG['MOBLAB_SUITE_NAME']])
+        if hardware_suite:
+            suites = set([CONFIG['MOBLAB_HARDWARE_SUITE_NAME']])
+        return sorted(list(suites))
 
     suites = set(CONFIG['INTERNAL_SUITE_NAMES'])
 
     for module in modules:
-        if module in get_collect_modules(is_public):
+        if module in get_collect_modules(is_public, hardware_suite):
             # We collect all tests both in arc-gts and arc-gts-qual as both have
             # a chance to be complete (and used for submission).
             suites |= set(CONFIG['QUAL_SUITE_NAMES'])
@@ -339,6 +398,9 @@
         elif module in CONFIG['BVT_PERBUILD'] and (abi == 'arm' or abi == ''):
             suites.add('suite:bvt-perbuild')
 
+    if hardware_suite:
+        suites = set([CONFIG['HARDWARE_SUITE_NAME']])
+
     if camera_facing != None:
         suites.add('suite:arc-cts-camera')
 
@@ -397,7 +459,7 @@
         # We don't want job retries for module collection or special cases.
         if (module in get_collect_modules(is_public) or module == _ALL or
             ('CtsDeqpTestCases' in CONFIG['EXTRA_MODULES'] and
-             module in CONFIG['EXTRA_MODULES']['CtsDeqpTestCases']['SUBMODULES']
+             module in CONFIG['EXTRA_MODULES']['CtsDeqpTestCases']
              )):
             retries = 0
     return retries
@@ -473,10 +535,12 @@
     preconditions = []
     login_preconditions = []
     prerequisites = []
-    for module in modules:
+    for module in sorted(modules):
         # Remove this once JDK9 is the base JDK for lab.
         if CONFIG.get('USE_JDK9', False):
             extra_args.add('use_jdk9=True')
+        if module in CONFIG.get('USE_OLD_ADB', []):
+            extra_args.add('use_old_adb=True')
         if is_public:
             extra_args.add('warn_on_test_retry=False')
             extra_args.add('retry_manual_tests=True')
@@ -564,13 +628,16 @@
     return CONFIG['AUTHKEY']
 
 
-def _format_collect_cmd(is_public, abi_to_run, retry):
+def _format_collect_cmd(is_public, abi_to_run, retry, is_hardware=False):
     """Returns a list specifying tokens for tradefed to list all tests."""
     if retry:
         return None
     cmd = ['run', 'commandAndExit', 'collect-tests-only']
     if CONFIG['TRADEFED_DISABLE_REBOOT_ON_COLLECTION']:
         cmd += ['--disable-reboot']
+    if is_hardware:
+        cmd.append('--subplan')
+        cmd.append('cts-hardware')
     for m in CONFIG['MEDIA_MODULES']:
         cmd.append('--module-arg')
         cmd.append('%s:skip-media-download:true' % m)
@@ -594,7 +661,8 @@
                         abi_to_run,
                         modules=None,
                         retry=False,
-                        whole_module_set=None):
+                        whole_module_set=None,
+                        is_hardware=False):
     """Returns list of command tokens for tradefed."""
     if retry:
         assert(CONFIG['TRADEFED_RETRY_COMMAND'] == 'cts' or
@@ -608,10 +676,37 @@
 
         special_cmd = _get_special_command_line(modules, is_public)
         if special_cmd:
+            if is_hardware:
+                # For hardware suite we want to exclude [instant] modules.
+                filtered = []
+                i = 0
+                while i < len(special_cmd):
+                    if (special_cmd[i] == '--include-filter'
+                                and '[instant]' in special_cmd[i + 1]):
+                        i += 2
+                    elif (special_cmd[i] == '--module'
+                          and i + 3 < len(special_cmd)
+                          and special_cmd[i + 2] == '--test'):
+                        # [--module, x, --test, y] ==> [--include-filter, "x y"]
+                        # because --module implicitly include [instant] modules
+                        filtered.append('--include-filter')
+                        filtered.append(
+                                '%s %s' %
+                                (special_cmd[i + 1], special_cmd[i + 3]))
+                        i += 4
+                    elif special_cmd[i] == '--module':
+                        # [--module, x] ==> [--include-filter, x]
+                        filtered.append('--include-filter')
+                        filtered.append(special_cmd[i + 1])
+                        i += 2
+                    else:
+                        filtered.append(special_cmd[i])
+                        i += 1
+                special_cmd = filtered
             cmd.extend(special_cmd)
         elif _ALL in modules:
             pass
-        elif len(modules) == 1:
+        elif len(modules) == 1 and not is_hardware:
             cmd += ['--module', list(modules)[0]]
         else:
             if whole_module_set is None:
@@ -620,11 +715,16 @@
                 # We run each module with its own --include-filter option.
                 # https://source.android.com/compatibility/cts/run
                 for module in sorted(modules):
+                    # b/196756614 32-bit jobs should skip [parameter] modules.
+                    if is_parameterized_module(module) and abi_to_run in [
+                            'x86', 'armeabi-v7a'
+                    ]:
+                        continue
                     cmd += ['--include-filter', module]
             else:
                 # CTS-Instant does not support --include-filter due to
                 # its implementation detail. Instead, exclude the complement.
-                for module in whole_module_set - set(modules):
+                for module in sorted(whole_module_set - set(modules)):
                     cmd += ['--exclude-filter', module]
 
         # For runs create a logcat file for each individual failure.
@@ -658,19 +758,25 @@
                      is_public,
                      retry=False,
                      abi_to_run=None,
-                     whole_module_set=None):
+                     whole_module_set=None,
+                     is_hardware=False):
     """Command to run the modules specified by a control file."""
-    no_intersection = not modules.intersection(get_collect_modules(is_public))
-    collect_present = (_COLLECT in modules or _PUBLIC_COLLECT in modules)
+    no_intersection = not modules.intersection(get_collect_modules(is_public,
+                          is_hardware))
+    collect_present = (_COLLECT in modules or _PUBLIC_COLLECT in modules or
+                       _CTSHARDWARE_COLLECT in modules or
+                       _PUBLIC_CTSHARDWARE_COLLECT in modules)
     all_present = _ALL in modules
     if no_intersection or (all_present and not collect_present):
         return _format_modules_cmd(is_public,
                                    abi_to_run,
                                    modules,
                                    retry=retry,
-                                   whole_module_set=whole_module_set)
+                                   whole_module_set=whole_module_set,
+                                   is_hardware=is_hardware)
     elif collect_present:
-        return _format_collect_cmd(is_public, abi_to_run, retry=retry)
+        return _format_collect_cmd(is_public, abi_to_run, retry=retry,
+                   is_hardware=is_hardware)
     return None
 
 def get_retry_template(modules, is_public):
@@ -678,22 +784,24 @@
     return get_run_template(modules, is_public, retry=True)
 
 
-def get_extra_modules_dict(is_public, abi):
-    if not is_public:
+def get_extra_modules_dict(source_type, abi):
+    if source_type != SourceType.MOBLAB:
         return CONFIG['EXTRA_MODULES']
 
     extra_modules = copy.deepcopy(CONFIG['PUBLIC_EXTRA_MODULES'])
     if abi in CONFIG['EXTRA_SUBMODULE_OVERRIDE']:
-        for _, submodules in extra_modules.items():
+        for _, config in extra_modules.items():
             for old, news in CONFIG['EXTRA_SUBMODULE_OVERRIDE'][abi].items():
-                submodules.remove(old)
-                submodules.extend(news)
-    return {
-        module: {
-            'SUBMODULES': submodules,
-            'SUITES': [CONFIG['MOBLAB_SUITE_NAME']],
-        } for module, submodules in extra_modules.items()
-    }
+                if old in config.keys():
+                    suite = config[old]
+                    config.pop(old)
+                    for module in news:
+                        config[module] = suite
+
+    return extra_modules
+
+def get_extra_hardware_modules_dict(is_public, abi):
+    return CONFIG.get('HARDWAREONLY_EXTRA_MODULES', {})
 
 
 def get_extra_artifacts(modules):
@@ -757,6 +865,15 @@
     return False
 
 
+def needs_cts_helpers(modules):
+    """Oracle to determine if CTS helpers should be downloaded from DUT."""
+    if 'NEEDS_CTS_HELPERS' not in CONFIG:
+        return False
+    if modules.intersection(set(CONFIG['NEEDS_CTS_HELPERS'])):
+        return True
+    return False
+
+
 def enable_default_apps(modules):
     """Oracle to determine if to enable default apps (eg. Files.app)."""
     if modules.intersection(set(CONFIG['ENABLE_DEFAULT_APPS'])):
@@ -764,6 +881,11 @@
     return False
 
 
+def is_parameterized_module(module):
+    """Determines if the given module is a parameterized module."""
+    return '[' in module
+
+
 def get_controlfile_content(combined,
                             modules,
                             abi,
@@ -771,10 +893,11 @@
                             build,
                             uri,
                             suites=None,
-                            is_public=False,
-                            is_latest=False,
+                            source_type=None,
+                            abi_bits=None,
                             led_provision=None,
                             camera_facing=None,
+                            hardware_suite=False,
                             whole_module_set=None):
     """Returns the text inside of a control file.
 
@@ -783,24 +906,38 @@
                    file. If 'all' is specified, the control file will run
                    all the tests.
     """
+    is_public = (source_type == SourceType.MOBLAB)
     # We tag results with full revision now to get result directories containing
     # the revision. This fits stainless/ better.
-    tag = '%s' % get_extension(combined, abi, revision, is_public, led_provision,
-                               camera_facing)
+    tag = '%s' % get_extension(combined, abi, revision, is_public,
+                               led_provision, camera_facing, hardware_suite, abi_bits)
     # For test_that the NAME should be the same as for the control file name.
     # We could try some trickery here to get shorter extensions for a default
     # suite/ARM. But with the monthly uprevs this will quickly get confusing.
     name = '%s.%s' % (CONFIG['TEST_NAME'], tag)
     if not suites:
-        suites = get_suites(modules, abi, is_public, camera_facing)
+        suites = get_suites(modules, abi, is_public, camera_facing, hardware_suite)
     attributes = ', '.join(suites)
-    uri = 'LATEST' if is_latest else (None if is_public else uri)
+    uri = {
+            SourceType.MOBLAB: None,
+            SourceType.LATEST: 'LATEST',
+            SourceType.DEV: 'DEV'
+    }.get(source_type)
     target_module = None
     if (combined not in get_collect_modules(is_public) and combined != _ALL):
         target_module = combined
-    for target, config in get_extra_modules_dict(is_public, abi).items():
-        if combined in config['SUBMODULES']:
+    for target, config in get_extra_modules_dict(source_type, abi).items():
+        if combined in config.keys():
             target_module = target
+    abi_to_run = {
+            ("arm", 32): 'armeabi-v7a',
+            ("arm", 64): 'arm64-v8a',
+            ("x86", 32): 'x86',
+            ("x86", 64): 'x86_64'
+    }.get((abi, abi_bits), None)
+    subplan = None
+    if _CTSHARDWARE_COLLECT in modules or _PUBLIC_CTSHARDWARE_COLLECT in modules:
+        subplan = 'cts-hardware'
     return _CONTROLFILE_TEMPLATE.render(
             year=CONFIG['COPYRIGHT_YEAR'],
             name=name,
@@ -817,22 +954,25 @@
             build=build,
             abi=abi,
             needs_push_media=needs_push_media(modules),
+            needs_cts_helpers=needs_cts_helpers(modules),
             enable_default_apps=enable_default_apps(modules),
             tag=tag,
             uri=uri,
             DOC=get_doc(modules, abi, is_public),
             servo_support_needed=servo_support_needed(modules, is_public),
+            wifi_info_needed=wifi_info_needed(modules, is_public),
             max_retries=get_max_retries(modules, abi, suites, is_public),
             timeout=calculate_timeout(modules, suites),
             run_template=get_run_template(modules,
                                           is_public,
                                           abi_to_run=CONFIG.get(
                                                   'REPRESENTATIVE_ABI',
-                                                  {}).get(abi, None),
-                                          whole_module_set=whole_module_set),
+                                                  {}).get(abi, abi_to_run),
+                                          whole_module_set=whole_module_set,
+                                          is_hardware=hardware_suite),
             retry_template=get_retry_template(modules, is_public),
             target_module=target_module,
-            target_plan=None,
+            target_plan=subplan,
             test_length=get_test_length(modules),
             priority=get_test_priority(modules, is_public),
             extra_args=get_extra_args(modules, is_public),
@@ -847,24 +987,41 @@
     Notice that the parsing gets broken at times with major new CTS drops.
     """
     tradefed = os.path.join(path, CONFIG['TRADEFED_EXECUTABLE_PATH'])
-    # Forgive me for I have sinned. Same as: chmod +x tradefed.
+    # Python's zipfle module does not set the executable bit.
+    # tradefed and java command need chmod +x.
     os.chmod(tradefed, os.stat(tradefed).st_mode | stat.S_IEXEC)
+    java = CONFIG.get('JAVA_EXECUTABLE_PATH', None)
+    if java:
+        java = os.path.join(path, java)
+        os.chmod(java, os.stat(java).st_mode | stat.S_IEXEC)
     cmd_list = [tradefed, 'list', 'modules']
     logging.info('Calling tradefed for list of modules.')
     with open(os.devnull, 'w') as devnull:
         # tradefed terminates itself if stdin is not a tty.
-        tradefed_output = subprocess.check_output(cmd_list, stdin=devnull)
+        tradefed_output = subprocess.check_output(cmd_list,
+                                                  stdin=devnull).decode()
 
-    # TODO(ihf): Get a tradefed command which terminates then refactor.
-    p = subprocess.Popen(cmd_list, stdout=subprocess.PIPE)
+    _ABI_PREFIXES = ('arm', 'x86')
+    _MODULE_PREFIXES = ('Cts', 'cts-', 'signed-Cts', 'vm-tests-tf', 'Sts')
+
+    # Some CTS/GTS versions insert extra linebreaks due to a bug b/196912758.
+    # Below is a heurestical workaround for the situation.
+    lines = []
+    prev_line_abi_prefixed = False
+    for line in tradefed_output.splitlines():
+        abi_prefixed = line.startswith(_ABI_PREFIXES)
+        end_of_modules = (len(line) == 0 or 'Saved log to' in line)
+        if prev_line_abi_prefixed and not end_of_modules and not abi_prefixed:
+            # Merge a line immediately following 'abi XtsModuleName'
+            lines[-1] += line
+        else:
+            lines.append(line)
+        prev_line_abi_prefixed = abi_prefixed
+
     modules = set()
     build = '<unknown>'
-    line = ''
     revision = None
-    is_in_intaractive_mode = True
-    # The process does not terminate, but we know the last test is vm-tests-tf.
-    while True:
-        line = p.stdout.readline().strip()
+    for line in lines:
         # Android Compatibility Test Suite 7.0 (3423912)
         if (line.startswith('Android Compatibility Test Suite ')
                     or line.startswith('Android Google ')
@@ -873,45 +1030,18 @@
             logging.info('Unpacking: %s.', line)
             build = get_tradefed_build(line)
             revision = get_tradefed_revision(line)
-        elif line.startswith('Non-interactive mode: '):
-            is_in_intaractive_mode = False
-        elif line.startswith('arm') or line.startswith('x86'):
+        elif line.startswith(_ABI_PREFIXES):
             # Newer CTS shows ABI-module pairs like "arm64-v8a CtsNetTestCases"
             line = line.split()[1]
             if line not in CONFIG.get('EXCLUDE_MODULES', []):
                 modules.add(line)
-        elif line.startswith('Cts'):
+        elif line.startswith(_MODULE_PREFIXES):
+            # Old CTS plainly lists up the module name
             modules.add(line)
-        elif line.startswith('Gts'):
-            # Older GTS plainly lists the module names
-            modules.add(line)
-        elif line.startswith('cts-'):
-            modules.add(line)
-        elif line.startswith('signed-Cts'):
-            modules.add(line)
-        elif line.startswith('vm-tests-tf'):
-            modules.add(line)
-            break  # TODO(ihf): Fix using this as EOS.
-        elif not line:
-            exit_code = p.poll()
-            if exit_code is not None:
-                # The process has automatically exited.
-                if is_in_intaractive_mode or exit_code != 0:
-                    # The process exited unexpectedly in interactive mode,
-                    # or exited with error in non-interactive mode.
-                    logging.warning(
-                        'The process has exited unexpectedly (exit code: %d)',
-                        exit_code)
-                    modules = set()
-                break
         elif line.isspace() or line.startswith('Use "help"'):
             pass
         else:
             logging.warning('Ignoring "%s"', line)
-    if p.poll() is None:
-        # Kill the process if alive.
-        p.kill()
-    p.wait()
 
     if not modules:
         raise Exception("no modules found.")
@@ -949,10 +1079,15 @@
             zf.extractall()
 
 
-def get_collect_modules(is_public):
+def get_collect_modules(is_public, is_hardware=False):
     if is_public:
+        if is_hardware:
+            return set([_PUBLIC_CTSHARDWARE_COLLECT])
         return set([_PUBLIC_COLLECT])
-    return set([_COLLECT])
+    else:
+        if is_hardware:
+            return set([_CTSHARDWARE_COLLECT])
+        return set([_COLLECT])
 
 
 @contextlib.contextmanager
@@ -971,7 +1106,7 @@
     Break after l+1 CamelCase word.
     Example: CtsDebugTestCases -> CtsDebug.
     """
-    s = re.findall('^[a-z-]+|[A-Z]*[^A-Z0-9]*', m)[0:l + 1]
+    s = re.findall('^[a-z-_]+|[A-Z]*[^A-Z0-9]*', m)[0:l + 1]
     # Ignore Test or TestCases at the end as they don't add anything.
     if len(s) > l:
         if s[l].startswith('Test') or s[l].startswith('['):
@@ -1057,7 +1192,7 @@
         # New name is first element '_-_' last element.
         # Notice there is a bug in $ADB_VENDOR_KEYS path name preventing
         # arbitrary characters.
-        prefix = v[0] + '_-_' + v[-1]
+        prefix = re.sub(r'\[[^]]*\]', '', v[0] + '_-_' + v[-1])
         combined[prefix] = set(v)
     return combined
 
@@ -1069,26 +1204,45 @@
                       build,
                       uri,
                       suites,
-                      is_public,
-                      is_latest=False,
-                      whole_module_set=None):
-    """Write a single control file."""
-    filename = get_controlfile_name(name, abi, revision, is_public)
-    content = get_controlfile_content(name,
-                                      modules,
-                                      abi,
-                                      revision,
-                                      build,
-                                      uri,
-                                      suites,
-                                      is_public,
-                                      is_latest,
-                                      whole_module_set=whole_module_set)
-    with open(filename, 'w') as f:
-        f.write(content)
+                      source_type,
+                      whole_module_set=None,
+                      hardware_suite=False,
+                      abi_bits=None):
+    """Write control files per each ABI or combined."""
+    is_public = (source_type == SourceType.MOBLAB)
+    abi_bits_list = []
+    config_key = 'PUBLIC_SPLIT_BY_BITS_MODULES' if is_public else 'SPLIT_BY_BITS_MODULES'
+    if modules & set(CONFIG.get(config_key, [])):
+        # If |abi| is predefined (like CTS), splits the modules by
+        # 32/64-bits. If not (like GTS) generate both arm and x86 jobs.
+        for abi_arch in [abi] if abi else ['arm', 'x86']:
+            for abi_bits in [32, 64]:
+                abi_bits_list.append((abi_arch, abi_bits))
+    else:
+        abi_bits_list.append((abi, None))
+
+    for abi, abi_bits in abi_bits_list:
+        filename = get_controlfile_name(name,
+                                        abi,
+                                        revision,
+                                        is_public,
+                                        abi_bits=abi_bits)
+        content = get_controlfile_content(name,
+                                          modules,
+                                          abi,
+                                          revision,
+                                          build,
+                                          uri,
+                                          suites,
+                                          source_type,
+                                          hardware_suite=hardware_suite,
+                                          whole_module_set=whole_module_set,
+                                          abi_bits=abi_bits)
+        with open(filename, 'w') as f:
+            f.write(content)
 
 
-def write_moblab_controlfiles(modules, abi, revision, build, uri, is_public):
+def write_moblab_controlfiles(modules, abi, revision, build, uri):
     """Write all control files for moblab.
 
     Nothing gets combined.
@@ -1100,14 +1254,20 @@
     for module in modules:
         # No need to generate control files with extra suffix, since --module
         # option will cover variants with optional parameters.
-        if "[" in module:
+        if is_parameterized_module(module):
             continue
-        write_controlfile(module, set([module]), abi, revision, build, uri,
-                          [CONFIG['MOBLAB_SUITE_NAME']], is_public)
+        write_controlfile(module,
+                          set([module]),
+                          abi,
+                          revision,
+                          build,
+                          uri,
+                          None,
+                          source_type=SourceType.MOBLAB)
 
 
 def write_regression_controlfiles(modules, abi, revision, build, uri,
-                                  is_public, is_latest):
+                                  source_type):
     """Write all control files for stainless/ToT regression lab coverage.
 
     Regression coverage on tot currently relies heavily on watching stainless
@@ -1116,14 +1276,27 @@
     became too much in P (more than 300 per ABI). Instead we combine modules
     with similar names and run these in the same job (alphabetically).
     """
-    combined = combine_modules_by_common_word(set(modules))
-    for key in combined:
-        write_controlfile(key, combined[key], abi, revision, build, uri, None,
-                          is_public, is_latest)
+
+    if CONFIG.get('SINGLE_CONTROL_FILE'):
+        module_set = set(modules)
+        write_controlfile('all',
+                          module_set,
+                          abi,
+                          revision,
+                          build,
+                          uri,
+                          None,
+                          source_type,
+                          whole_module_set=module_set)
+    else:
+        combined = combine_modules_by_common_word(set(modules))
+        for key in combined:
+            write_controlfile(key, combined[key], abi, revision, build, uri,
+                              None, source_type)
 
 
 def write_qualification_controlfiles(modules, abi, revision, build, uri,
-                                     is_public):
+                                     source_type):
     """Write all control files to run "all" tests for qualification.
 
     Qualification was performed on N by running all tests using tradefed
@@ -1133,12 +1306,28 @@
     """
     combined = combine_modules_by_bookmark(set(modules))
     for key in combined:
-        write_controlfile('all.' + key, combined[key], abi, revision, build,
-                          uri, CONFIG.get('QUAL_SUITE_NAMES'), is_public)
+        if combined[key] & set(CONFIG.get('SPLIT_BY_BITS_MODULES', [])):
+            # If |abi| is predefined (like CTS), splits the modules by
+            # 32/64-bits. If not (like GTS) generate both arm and x86 jobs.
+            for abi_arch in [abi] if abi else ['arm', 'x86']:
+                for abi_bits in [32, 64]:
+                    write_controlfile('all.' + key,
+                                      combined[key],
+                                      abi_arch,
+                                      revision,
+                                      build,
+                                      uri,
+                                      CONFIG.get('QUAL_SUITE_NAMES'),
+                                      source_type,
+                                      abi_bits=abi_bits)
+        else:
+            write_controlfile('all.' + key, combined[key], abi,
+                              revision, build, uri,
+                              CONFIG.get('QUAL_SUITE_NAMES'), source_type)
 
 
 def write_qualification_and_regression_controlfile(modules, abi, revision,
-                                                   build, uri, is_public):
+                                                   build, uri, source_type):
     """Write a control file to run "all" tests for qualification and regression.
     """
     # For cts-instant, qualication control files are expected to cover
@@ -1154,69 +1343,126 @@
                           build,
                           uri,
                           suites,
-                          is_public,
+                          source_type,
                           whole_module_set=module_set)
 
 
-def write_collect_controlfiles(_modules, abi, revision, build, uri, is_public,
-                               is_latest):
+def write_collect_controlfiles(_modules,
+                               abi,
+                               revision,
+                               build,
+                               uri,
+                               source_type,
+                               is_hardware=False):
     """Write all control files for test collection used as reference to
 
     compute completeness (missing tests) on the CTS dashboard.
     """
+    is_public = (source_type == SourceType.MOBLAB)
     if is_public:
-        suites = [CONFIG['MOBLAB_SUITE_NAME']]
+        if is_hardware:
+            suites = [CONFIG['MOBLAB_HARDWARE_SUITE_NAME']]
+        else:
+            suites = [CONFIG['MOBLAB_SUITE_NAME']]
     else:
-        suites = CONFIG['INTERNAL_SUITE_NAMES'] \
-               + CONFIG.get('QUAL_SUITE_NAMES', [])
-    for module in get_collect_modules(is_public):
-        write_controlfile(module, set([module]), abi, revision, build, uri,
-                          suites, is_public, is_latest)
+        if is_hardware:
+            suites = [CONFIG['HARDWARE_SUITE_NAME']]
+        else:
+            suites = CONFIG['INTERNAL_SUITE_NAMES'] \
+                   + CONFIG.get('QUAL_SUITE_NAMES', [])
+    for module in get_collect_modules(is_public, is_hardware=is_hardware):
+        write_controlfile(module,
+                          set([module]),
+                          abi,
+                          revision,
+                          build,
+                          uri,
+                          suites,
+                          source_type,
+                          hardware_suite=is_hardware)
 
 
-def write_extra_controlfiles(_modules, abi, revision, build, uri, is_public,
-                             is_latest):
+def write_extra_controlfiles(_modules, abi, revision, build, uri, source_type):
     """Write all extra control files as specified in config.
 
     This is used by moblab to load balance large modules like Deqp, as well as
     making custom modules such as WM presubmit. A similar approach was also used
     during bringup of grunt to split media tests.
     """
-    for module, config in get_extra_modules_dict(is_public, abi).items():
-        for submodule in config['SUBMODULES']:
+    for module, config in get_extra_modules_dict(source_type, abi).items():
+        for submodule, suites in config.items():
             write_controlfile(submodule, set([submodule]), abi, revision,
-                              build, uri, config['SUITES'], is_public,
-                              is_latest)
+                              build, uri, suites, source_type)
 
 
-def write_extra_camera_controlfiles(abi, revision, build, uri, is_public):
-    """Control files for CtsCameraTestCases.camerabox.*"""
-    module = 'CtsCameraTestCases'
-    for facing in ['back', 'front']:
-        for led_provision in ['led', 'noled']:
-            name = get_controlfile_name(module, abi,
-                                        revision, is_public, led_provision, facing)
-            content = get_controlfile_content(module,
-                                              set([module]),
+def write_hardwaresuite_controlfiles(abi, revision, build, uri, source_type):
+    """Control files for Build variant hardware only tests."""
+    is_public = (source_type == SourceType.MOBLAB)
+    cts_hardware_modules = set(CONFIG.get('HARDWARE_MODULES', []))
+    for module in cts_hardware_modules:
+        name = get_controlfile_name(module, abi, revision, is_public,
+                                    hardware_suite=True)
+
+        content = get_controlfile_content(module,
+                                          set([module]),
+                                          abi,
+                                          revision,
+                                          build,
+                                          uri,
+                                          None,
+                                          source_type,
+                                          hardware_suite=True)
+
+        with open(name, 'w') as f:
+            f.write(content)
+
+    for module, config in get_extra_hardware_modules_dict(is_public, abi).items():
+        for submodule, suites in config.items():
+            name = get_controlfile_name(submodule, abi, revision, is_public,
+                                        hardware_suite=True)
+            content = get_controlfile_content(submodule,
+                                              set([submodule]),
                                               abi,
                                               revision,
                                               build,
                                               uri,
                                               None,
-                                              is_public,
-                                              led_provision=led_provision,
-                                              camera_facing=facing)
+                                              source_type,
+                                              hardware_suite=True)
             with open(name, 'w') as f:
                 f.write(content)
 
 
-def run(uris, is_public, is_latest, cache_dir):
+def write_extra_camera_controlfiles(abi, revision, build, uri, source_type):
+    """Control files for CtsCameraTestCases.camerabox.*"""
+    module = 'CtsCameraTestCases'
+    is_public = (source_type == SourceType.MOBLAB)
+    for facing in ['back', 'front']:
+        led_provision = 'noled'
+        name = get_controlfile_name(module, abi, revision, is_public,
+                                    led_provision, facing)
+        content = get_controlfile_content(module,
+                                          set([module]),
+                                          abi,
+                                          revision,
+                                          build,
+                                          uri,
+                                          None,
+                                          source_type,
+                                          led_provision=led_provision,
+                                          camera_facing=facing)
+        with open(name, 'w') as f:
+            f.write(content)
+
+
+def run(uris, source_type, cache_dir):
     """Downloads each bundle in |uris| and generates control files for each
 
     module as reported to us by tradefed.
     """
     for uri in uris:
         abi = get_bundle_abi(uri)
+        is_public = (source_type == SourceType.MOBLAB)
         # Get tradefed data by downloading & unzipping the files
         with TemporaryDirectory(prefix='cts-android_') as tmp:
             if cache_dir is not None:
@@ -1236,31 +1482,63 @@
                 raise Exception('Could not determine revision.')
 
             logging.info('Writing all control files.')
-            if is_public:
-                write_moblab_controlfiles(modules, abi, revision, build, uri,
-                                          is_public)
-            else:
-                if CONFIG['CONTROLFILE_WRITE_SIMPLE_QUAL_AND_REGRESS']:
-                    write_qualification_and_regression_controlfile(
-                            modules, abi, revision, build, uri, is_public)
-                else:
-                    write_regression_controlfiles(modules, abi, revision,
-                                                  build, uri, is_public,
-                                                  is_latest)
-                    write_qualification_controlfiles(modules, abi, revision,
-                                                     build, uri, is_public)
+            if source_type == SourceType.MOBLAB:
+                write_moblab_controlfiles(modules, abi, revision, build, uri)
 
-                if CONFIG['CONTROLFILE_WRITE_CAMERA']:
+            if CONFIG['CONTROLFILE_WRITE_SIMPLE_QUAL_AND_REGRESS']:
+                # Might be worth generating DEV control files, but since this
+                # is used for only ARC-P CTS_Instant modules whose regression
+                # is 99.99% coverved by CTS DEV runs, having only LATEST is
+                # sufficient.
+                if source_type == SourceType.LATEST:
+                    write_qualification_and_regression_controlfile(
+                            modules, abi, revision, build, uri, source_type)
+            else:
+                if source_type == SourceType.DEV:
+                    write_regression_controlfiles(modules, abi, revision,
+                                                  build, uri, source_type)
+                if source_type == SourceType.LATEST:
+                    write_qualification_controlfiles(modules, abi, revision,
+                                                     build, uri, source_type)
+
+            if CONFIG['CONTROLFILE_WRITE_CAMERA']:
+                # For now camerabox is not stable for qualification purpose.
+                # Hence, the usage is limited to DEV. In the future we need
+                # to reconsider.
+                if source_type == SourceType.DEV:
                     write_extra_camera_controlfiles(abi, revision, build, uri,
-                                                    is_public)
+                                                    source_type)
 
             if CONFIG.get('CONTROLFILE_WRITE_COLLECT', True):
-                write_collect_controlfiles(modules, abi, revision, build, uri,
-                                           is_public, is_latest)
+                # Collect-test control files are used for checking the test
+                # completeness before qualification. Not needed for DEV.
+                if source_type == SourceType.LATEST or source_type == SourceType.MOBLAB:
+                    for_hardware_suite = [False]
+                    if 'HARDWARE_MODULES' in CONFIG:
+                        for_hardware_suite.append(True)
+                    for is_hardware in for_hardware_suite:
+                        write_collect_controlfiles(modules,
+                                                   abi,
+                                                   revision,
+                                                   build,
+                                                   uri,
+                                                   source_type,
+                                                   is_hardware=is_hardware)
 
             if CONFIG['CONTROLFILE_WRITE_EXTRA']:
-                write_extra_controlfiles(None, abi, revision, build, uri,
-                                         is_public, is_latest)
+                # "EXTRA" control files are for workaround test instability
+                # by running only sub-tests. For now let's attribute them to
+                # qualification suites, since it is sometimes critical to
+                # have the stability for qualification. If needed we need to
+                # implement some way to add them to DEV suites as well.
+                if source_type == SourceType.LATEST or source_type == SourceType.MOBLAB:
+                    write_extra_controlfiles(None, abi, revision, build, uri,
+                                             source_type)
+
+            # "Hardware only" jobs are for reducing tests on qualification.
+            if source_type == SourceType.LATEST or source_type == SourceType.MOBLAB:
+                write_hardwaresuite_controlfiles(abi, revision, build, uri,
+                                                 source_type)
 
 
 def main(config):
@@ -1302,4 +1580,10 @@
             'bundle file if exists, or caches a downloaded file to this '
             'directory if not.')
     args = parser.parse_args()
-    run(args.uris, args.is_public, args.is_latest, args.cache_dir)
+    if args.is_public:
+        source_type = SourceType.MOBLAB
+    elif args.is_latest:
+        source_type = SourceType.LATEST
+    else:
+        source_type = SourceType.DEV
+    run(args.uris, source_type, args.cache_dir)
diff --git a/server/cros/tradefed/push_arc_image.py b/server/cros/tradefed/push_arc_image.py
new file mode 100644
index 0000000..bf2eef6
--- /dev/null
+++ b/server/cros/tradefed/push_arc_image.py
@@ -0,0 +1,108 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+
+_BUILDS_BUCKET = 'gs://chromeos-arc-images/builds'
+
+_ABI_MAP = {
+        'armeabi-v7a': 'arm',
+        'arm64-v8a': 'arm64',
+        'x86': 'x86',
+        'x86_64': 'x86_64'
+}
+
+# This version and beyond contains logic in push_to_device.py that supports
+# HOST:PORT format for specifying the remote machine.
+_PTD_MIN_VERSION_MAP = {
+        'pi-arc': 7740639,
+        'rvc-arc': 7741959,
+        'sc-arc-dev': 7743996,
+}
+
+
+def push_userdebug_image(host, branch_prefix, lunch_target, download_func,
+                         install_bundle_func, run_func):
+    """This pushes a userdebug android image to the host.
+
+    This downloads the userdebug android image and push_to_device.py tool
+    from a google storage bucket.
+    This uses the push_to_device.py tool to push the image onto the host.
+
+    @param host: target host to push the image to.
+    @param branch_prefix: the branch name prefix of where the image is
+                          (e.g. rvc-arc, pi-arc). This does not have to be
+                          the exact branch name for a particular release
+                          (e.g. rvc-arc-m91).
+    @param lunch_target: the target lunch name (e.g. cheets, bertha)
+    @param download_func: function for downloading an object. This shall be
+                          self._download_to_cache when invoking from TradefedTest class.
+    @param install_bundle_func: function for downloading and unarchiving files.
+                                This shall be self._install_bundle when invoking
+                                from TradefedTest class.
+    @param run_func: function for running a command. This shall be
+                     self._run when invoking from TradefedTest class.
+
+    @returns True on success, False otherwise.
+    """
+    arc_version = host.get_arc_version()
+    if not arc_version:
+        logging.error('Failed to determine ARC version.')
+        return False
+
+    # The split is necessary because push_to_device.py puts the whole image name
+    # in CHROMEOS_ARC_VERSION, e.g. bertha_x86_64-img-7759413.
+    # The split won't hurt even if it is just a number e.g.
+    # CHROMEOS_ARC_VERSION=7759413.
+    arc_version = int(arc_version.split('-')[-1])
+
+    abi = _ABI_MAP[host.get_arc_primary_abi()]
+
+    # Using '*' here to let gsutil figure out the release branch name.
+    # arc_version is unique and will not show multiple branches.
+    image_base_uri = '{}/git_{}-*linux-{}_{}-userdebug'.format(
+            _BUILDS_BUCKET, branch_prefix, lunch_target, abi)
+
+    image_uri = '{}/{}/{}_{}-img-{}.zip'.format(image_base_uri, arc_version,
+                                                lunch_target, abi, arc_version)
+    se_policy_uri = '{}/{}/sepolicy.zip'.format(image_base_uri, arc_version)
+
+    image_file = download_func(image_uri)
+    se_policy_file = download_func(se_policy_uri)
+
+    if branch_prefix in _PTD_MIN_VERSION_MAP:
+        ptd_version = max(arc_version, _PTD_MIN_VERSION_MAP[branch_prefix])
+    else:
+        logging.warning(
+                '{} is not in _PTD_MIN_VERSION_MAP. This might fail to fetch '
+                'the push_to_device tool.'.format(branch_prefix))
+        ptd_version = arc_version
+
+    push_to_device_uri = '{}/{}/push_to_device.zip'.format(
+            image_base_uri, ptd_version)
+
+    push_to_device_dir = install_bundle_func(push_to_device_uri)
+    push_to_device_tool = os.path.join(push_to_device_dir, 'push_to_device.py')
+
+    # This file on the device tells the infrastructure
+    # that the device has to be reprovisioned before running other tasks.
+    host.run('touch /mnt/stateful_partition/.force_provision', )
+    logging.info('Pushing ARC userdebug image {} to {}.'.format(
+            arc_version, host.host_port))
+    run_func(
+            push_to_device_tool,
+            args=[
+                    '--use-prebuilt-file',
+                    image_file,
+                    '--sepolicy-artifacts-path',
+                    se_policy_file,
+                    '--force',
+                    host.host_port,
+            ],
+            ignore_status=False,
+            verbose=True,
+            nickname='Push userdebug image.',
+    )
+    return True
diff --git a/server/cros/tradefed/push_arc_image_unittest.py b/server/cros/tradefed/push_arc_image_unittest.py
new file mode 100644
index 0000000..177f2fb
--- /dev/null
+++ b/server/cros/tradefed/push_arc_image_unittest.py
@@ -0,0 +1,416 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+from unittest.mock import Mock, ANY, call
+
+from autotest_lib.server.cros.tradefed import push_arc_image
+
+# Use this version for tests if there isn't a special version that has to be
+# tested.
+_TEST_DEFAULT_ARC_VERSION = '7750398'
+
+# Use this host port combination if there isn't a special configuration that has
+# to be tested.
+_TEST_HOST_PORT = 'somehost:9384'
+
+# Expected command to be run on the host (device) to mark the device for
+# reprovisioning.
+_MARK_DIRTY_PROVISION_COMMAND = 'touch /mnt/stateful_partition/.force_provision'
+
+# The expected default values passed to run for mocks created with create*()
+# methods, and they are unmodified.
+_DEFAULT_EXPECTED_RUN_ARGS = [
+        '--use-prebuilt-file',
+        'arc-img.zip',
+        '--sepolicy-artifacts-path',
+        'sepolicy.zip',
+        '--force',
+        'somehost:9384',
+]
+
+# The expected default values passed to run as the script path for mocks created
+# with create*() methods, and they are unmodified.
+_DEFAULT_EXPECTED_PTD_PATH = 'some/extracted/dir/push_to_device.py'
+
+
+class PushArcImageTest(unittest.TestCase):
+    """Unittest for push_arc_image."""
+
+    def createMockHost(self, version, abi):
+        mock_host = Mock()
+        mock_host.get_arc_version.return_value = version
+        mock_host.get_arc_primary_abi.return_value = abi
+        mock_host.host_port = _TEST_HOST_PORT
+        return mock_host
+
+    def createMockDownloadFunc(self):
+        mock_download_func = Mock()
+        mock_download_func.side_effect = ['arc-img.zip', 'sepolicy.zip']
+        return mock_download_func
+
+    def createMockInstallBundleFunc(self):
+        mock_install_bundle_func = Mock()
+        mock_install_bundle_func.return_value = 'some/extracted/dir'
+        return mock_install_bundle_func
+
+    def test_push_userdebug_image_bertha_arm64(self):
+        mock_host = self.createMockHost(_TEST_DEFAULT_ARC_VERSION, 'arm64-v8a')
+        mock_download_func = self.createMockDownloadFunc()
+        mock_install_bundle_func = self.createMockInstallBundleFunc()
+        mock_run_func = Mock()
+
+        in_sequence = Mock()
+        in_sequence.attach_mock(mock_run_func, 'run')
+        in_sequence.attach_mock(mock_host.run, 'host_run')
+
+        self.assertTrue(
+                push_arc_image.push_userdebug_image(mock_host, 'rvc-arc',
+                                                    'bertha',
+                                                    mock_download_func,
+                                                    mock_install_bundle_func,
+                                                    mock_run_func))
+
+        mock_host.get_arc_version.assert_called_once()
+        mock_host.get_arc_primary_abi.assert_called_once()
+        mock_download_func.assert_any_call(
+                'gs://chromeos-arc-images/builds/'
+                'git_rvc-arc-*linux-bertha_arm64-userdebug/'
+                '7750398/bertha_arm64-img-7750398.zip')
+
+        mock_download_func.assert_any_call(
+                'gs://chromeos-arc-images/builds/'
+                'git_rvc-arc-*linux-bertha_arm64-userdebug/'
+                '7750398/sepolicy.zip')
+
+        mock_install_bundle_func.assert_any_call(
+                'gs://chromeos-arc-images/builds/'
+                'git_rvc-arc-*linux-bertha_arm64-userdebug/'
+                '7750398/push_to_device.zip')
+
+        expected_calls = [
+                call.host_run(_MARK_DIRTY_PROVISION_COMMAND),
+                call.run(
+                        _DEFAULT_EXPECTED_PTD_PATH,
+                        args=_DEFAULT_EXPECTED_RUN_ARGS,
+                        ignore_status=ANY,
+                        verbose=ANY,
+                        nickname=ANY,
+                ),
+        ]
+        self.assertEqual(in_sequence.mock_calls, expected_calls)
+
+    def test_push_userdebug_image_bertha_x86_64(self):
+        mock_host = self.createMockHost(_TEST_DEFAULT_ARC_VERSION, 'x86_64')
+        mock_download_func = self.createMockDownloadFunc()
+        mock_install_bundle_func = self.createMockInstallBundleFunc()
+        mock_run_func = Mock()
+
+        in_sequence = Mock()
+        in_sequence.attach_mock(mock_run_func, 'run')
+        in_sequence.attach_mock(mock_host.run, 'host_run')
+
+        self.assertTrue(
+                push_arc_image.push_userdebug_image(mock_host, 'rvc-arc',
+                                                    'bertha',
+                                                    mock_download_func,
+                                                    mock_install_bundle_func,
+                                                    mock_run_func))
+
+        mock_host.get_arc_version.assert_called_once()
+        mock_host.get_arc_primary_abi.assert_called_once()
+        mock_download_func.assert_any_call(
+                'gs://chromeos-arc-images/builds/'
+                'git_rvc-arc-*linux-bertha_x86_64-userdebug/'
+                '7750398/bertha_x86_64-img-7750398.zip')
+
+        mock_download_func.assert_any_call(
+                'gs://chromeos-arc-images/builds/'
+                'git_rvc-arc-*linux-bertha_x86_64-userdebug/'
+                '7750398/sepolicy.zip')
+
+        mock_install_bundle_func.assert_any_call(
+                'gs://chromeos-arc-images/builds/'
+                'git_rvc-arc-*linux-bertha_x86_64-userdebug/'
+                '7750398/push_to_device.zip')
+
+        expected_calls = [
+                call.host_run(_MARK_DIRTY_PROVISION_COMMAND),
+                call.run(
+                        _DEFAULT_EXPECTED_PTD_PATH,
+                        args=_DEFAULT_EXPECTED_RUN_ARGS,
+                        ignore_status=ANY,
+                        verbose=ANY,
+                        nickname=ANY,
+                ),
+        ]
+        self.assertEqual(in_sequence.mock_calls, expected_calls)
+
+    def test_push_userdebug_image_cheets_arm(self):
+        mock_host = self.createMockHost(_TEST_DEFAULT_ARC_VERSION,
+                                        'armeabi-v7a')
+        mock_download_func = self.createMockDownloadFunc()
+        mock_install_bundle_func = self.createMockInstallBundleFunc()
+        mock_run_func = Mock()
+
+        in_sequence = Mock()
+        in_sequence.attach_mock(mock_run_func, 'run')
+        in_sequence.attach_mock(mock_host.run, 'host_run')
+
+        self.assertTrue(
+                push_arc_image.push_userdebug_image(mock_host, 'pi-arc',
+                                                    'cheets',
+                                                    mock_download_func,
+                                                    mock_install_bundle_func,
+                                                    mock_run_func))
+
+        mock_host.get_arc_version.assert_called_once()
+        mock_host.get_arc_primary_abi.assert_called_once()
+        mock_download_func.assert_any_call(
+                'gs://chromeos-arc-images/builds/'
+                'git_pi-arc-*linux-cheets_arm-userdebug/'
+                '7750398/cheets_arm-img-7750398.zip')
+
+        mock_download_func.assert_any_call(
+                'gs://chromeos-arc-images/builds/'
+                'git_pi-arc-*linux-cheets_arm-userdebug/'
+                '7750398/sepolicy.zip')
+
+        mock_install_bundle_func.assert_any_call(
+                'gs://chromeos-arc-images/builds/'
+                'git_pi-arc-*linux-cheets_arm-userdebug/'
+                '7750398/push_to_device.zip')
+
+        expected_calls = [
+                call.host_run(_MARK_DIRTY_PROVISION_COMMAND),
+                call.run(
+                        _DEFAULT_EXPECTED_PTD_PATH,
+                        args=_DEFAULT_EXPECTED_RUN_ARGS,
+                        ignore_status=ANY,
+                        verbose=ANY,
+                        nickname=ANY,
+                ),
+        ]
+        self.assertEqual(in_sequence.mock_calls, expected_calls)
+
+    def test_push_userdebug_image_cheets_arm64(self):
+        mock_host = self.createMockHost(_TEST_DEFAULT_ARC_VERSION, 'arm64-v8a')
+        mock_download_func = self.createMockDownloadFunc()
+        mock_install_bundle_func = self.createMockInstallBundleFunc()
+        mock_run_func = Mock()
+
+        in_sequence = Mock()
+        in_sequence.attach_mock(mock_run_func, 'run')
+        in_sequence.attach_mock(mock_host.run, 'host_run')
+
+        self.assertTrue(
+                push_arc_image.push_userdebug_image(mock_host, 'pi-arc',
+                                                    'cheets',
+                                                    mock_download_func,
+                                                    mock_install_bundle_func,
+                                                    mock_run_func))
+
+        mock_host.get_arc_version.assert_called_once()
+        mock_host.get_arc_primary_abi.assert_called_once()
+        mock_download_func.assert_any_call(
+                'gs://chromeos-arc-images/builds/'
+                'git_pi-arc-*linux-cheets_arm64-userdebug/'
+                '7750398/cheets_arm64-img-7750398.zip')
+
+        mock_download_func.assert_any_call(
+                'gs://chromeos-arc-images/builds/'
+                'git_pi-arc-*linux-cheets_arm64-userdebug/'
+                '7750398/sepolicy.zip')
+
+        mock_install_bundle_func.assert_any_call(
+                'gs://chromeos-arc-images/builds/'
+                'git_pi-arc-*linux-cheets_arm64-userdebug/'
+                '7750398/push_to_device.zip')
+
+        expected_calls = [
+                call.host_run(_MARK_DIRTY_PROVISION_COMMAND),
+                call.run(
+                        _DEFAULT_EXPECTED_PTD_PATH,
+                        args=_DEFAULT_EXPECTED_RUN_ARGS,
+                        ignore_status=ANY,
+                        verbose=ANY,
+                        nickname=ANY,
+                ),
+        ]
+        self.assertEqual(in_sequence.mock_calls, expected_calls)
+
+    def test_push_userdebug_image_cheets_x86(self):
+        mock_host = self.createMockHost(_TEST_DEFAULT_ARC_VERSION, 'x86')
+        mock_download_func = self.createMockDownloadFunc()
+        mock_install_bundle_func = self.createMockInstallBundleFunc()
+        mock_run_func = Mock()
+
+        in_sequence = Mock()
+        in_sequence.attach_mock(mock_run_func, 'run')
+        in_sequence.attach_mock(mock_host.run, 'host_run')
+
+        self.assertTrue(
+                push_arc_image.push_userdebug_image(mock_host, 'pi-arc',
+                                                    'cheets',
+                                                    mock_download_func,
+                                                    mock_install_bundle_func,
+                                                    mock_run_func))
+
+        mock_host.get_arc_version.assert_called_once()
+        mock_host.get_arc_primary_abi.assert_called_once()
+        mock_download_func.assert_any_call(
+                'gs://chromeos-arc-images/builds/'
+                'git_pi-arc-*linux-cheets_x86-userdebug/'
+                '7750398/cheets_x86-img-7750398.zip')
+
+        mock_download_func.assert_any_call(
+                'gs://chromeos-arc-images/builds/'
+                'git_pi-arc-*linux-cheets_x86-userdebug/'
+                '7750398/sepolicy.zip')
+
+        mock_install_bundle_func.assert_any_call(
+                'gs://chromeos-arc-images/builds/'
+                'git_pi-arc-*linux-cheets_x86-userdebug/'
+                '7750398/push_to_device.zip')
+
+        expected_calls = [
+                call.host_run(_MARK_DIRTY_PROVISION_COMMAND),
+                call.run(
+                        _DEFAULT_EXPECTED_PTD_PATH,
+                        args=_DEFAULT_EXPECTED_RUN_ARGS,
+                        ignore_status=ANY,
+                        verbose=ANY,
+                        nickname=ANY,
+                ),
+        ]
+        self.assertEqual(in_sequence.mock_calls, expected_calls)
+
+    def test_push_userdebug_image_cheets_x86_64(self):
+        mock_host = self.createMockHost(_TEST_DEFAULT_ARC_VERSION, 'x86_64')
+        mock_download_func = self.createMockDownloadFunc()
+        mock_install_bundle_func = self.createMockInstallBundleFunc()
+        mock_run_func = Mock()
+
+        in_sequence = Mock()
+        in_sequence.attach_mock(mock_run_func, 'run')
+        in_sequence.attach_mock(mock_host.run, 'host_run')
+
+        self.assertTrue(
+                push_arc_image.push_userdebug_image(mock_host, 'pi-arc',
+                                                    'cheets',
+                                                    mock_download_func,
+                                                    mock_install_bundle_func,
+                                                    mock_run_func))
+
+        mock_host.get_arc_version.assert_called_once()
+        mock_host.get_arc_primary_abi.assert_called_once()
+        mock_download_func.assert_any_call(
+                'gs://chromeos-arc-images/builds/'
+                'git_pi-arc-*linux-cheets_x86_64-userdebug/'
+                '7750398/cheets_x86_64-img-7750398.zip')
+
+        mock_download_func.assert_any_call(
+                'gs://chromeos-arc-images/builds/'
+                'git_pi-arc-*linux-cheets_x86_64-userdebug/'
+                '7750398/sepolicy.zip')
+
+        mock_install_bundle_func.assert_any_call(
+                'gs://chromeos-arc-images/builds/'
+                'git_pi-arc-*linux-cheets_x86_64-userdebug/'
+                '7750398/push_to_device.zip')
+
+        expected_calls = [
+                call.host_run(_MARK_DIRTY_PROVISION_COMMAND),
+                call.run(
+                        _DEFAULT_EXPECTED_PTD_PATH,
+                        args=_DEFAULT_EXPECTED_RUN_ARGS,
+                        ignore_status=ANY,
+                        verbose=ANY,
+                        nickname=ANY,
+                ),
+        ]
+        self.assertEqual(in_sequence.mock_calls, expected_calls)
+
+    # Only newer push to device has support for HOST:PORT format.
+    # Verify that the if the build ID on the device is old, it
+    # downloads a newer ptd.py that has the necessary features.
+    def test_push_userdebug_image_old_image_bertha(self):
+        mock_host = self.createMockHost('5985921', 'x86_64')
+        mock_download_func = self.createMockDownloadFunc()
+        mock_install_bundle_func = self.createMockInstallBundleFunc()
+        mock_run_func = Mock()
+
+        self.assertTrue(
+                push_arc_image.push_userdebug_image(mock_host, 'rvc-arc',
+                                                    'bertha',
+                                                    mock_download_func,
+                                                    mock_install_bundle_func,
+                                                    mock_run_func))
+
+        mock_host.get_arc_version.assert_called_once()
+        mock_host.get_arc_primary_abi.assert_called_once()
+
+        mock_install_bundle_func.assert_any_call(
+                'gs://chromeos-arc-images/builds/'
+                'git_rvc-arc-*linux-bertha_x86_64-userdebug/'
+                '7741959/push_to_device.zip')
+
+    # Cheets has a different "minimum" version compared to bertha.
+    def test_push_userdebug_image_old_image_cheets(self):
+        mock_host = self.createMockHost('5985921', 'x86_64')
+        mock_download_func = self.createMockDownloadFunc()
+        mock_install_bundle_func = self.createMockInstallBundleFunc()
+        mock_run_func = Mock()
+
+        self.assertTrue(
+                push_arc_image.push_userdebug_image(mock_host, 'pi-arc',
+                                                    'cheets',
+                                                    mock_download_func,
+                                                    mock_install_bundle_func,
+                                                    mock_run_func))
+
+        mock_host.get_arc_version.assert_called_once()
+        mock_host.get_arc_primary_abi.assert_called_once()
+
+        mock_install_bundle_func.assert_any_call(
+                'gs://chromeos-arc-images/builds/'
+                'git_pi-arc-*linux-cheets_x86_64-userdebug/'
+                '7740639/push_to_device.zip')
+
+    # Even if the branch prefix is unknown, it should still try to get PTD tool.
+    def test_push_userdebug_image_unknown_branch_prefix(self):
+        mock_host = self.createMockHost('123456789', 'x86_64')
+        mock_download_func = self.createMockDownloadFunc()
+        mock_install_bundle_func = self.createMockInstallBundleFunc()
+        mock_run_func = Mock()
+        self.assertTrue(
+                push_arc_image.push_userdebug_image(mock_host,
+                                                    'myspecialbranch',
+                                                    'bertha',
+                                                    mock_download_func,
+                                                    mock_install_bundle_func,
+                                                    mock_run_func))
+
+        mock_host.get_arc_version.assert_called_once()
+        mock_host.get_arc_primary_abi.assert_called_once()
+
+        mock_install_bundle_func.assert_any_call(
+                'gs://chromeos-arc-images/builds/'
+                'git_myspecialbranch-*linux-bertha_x86_64-userdebug/'
+                '123456789/push_to_device.zip')
+
+    # ARC version returned by the host could be None. Verify the function
+    # returns False.
+    def test_push_userdebug_image_failed_to_get_arc_version(self):
+        mock_host = self.createMockHost(None, 'x86_64')
+        mock_download_func = self.createMockDownloadFunc()
+        mock_install_bundle_func = self.createMockInstallBundleFunc()
+        mock_run_func = Mock()
+
+        self.assertFalse(
+                push_arc_image.push_userdebug_image(mock_host, 'rvc-arc',
+                                                    'bertha',
+                                                    mock_download_func,
+                                                    mock_install_bundle_func,
+                                                    mock_run_func))
diff --git a/server/cros/tradefed/tradefed_chromelogin.py b/server/cros/tradefed/tradefed_chromelogin.py
index 255c086..93cd70b 100644
--- a/server/cros/tradefed/tradefed_chromelogin.py
+++ b/server/cros/tradefed/tradefed_chromelogin.py
@@ -1,9 +1,11 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 import contextlib
 import logging
+import os
 
 from autotest_lib.client.common_lib import error
 from autotest_lib.server import autotest
@@ -20,9 +22,13 @@
         if hard_reboot and self._host.servo:
             self._hard_reboot_on_failure = True
 
-    def __init__(self, host, board=None, dont_override_profile=False,
-                 enable_default_apps=False, toggle_ndk=False,
-                 nativebridge64=False):
+    def __init__(self,
+                 host,
+                 board=None,
+                 dont_override_profile=False,
+                 enable_default_apps=False,
+                 toggle_ndk=False,
+                 log_dir=None):
         """Initializes the ChromeLogin object.
 
         @param board: optional parameter to extend timeout for login for slow
@@ -30,7 +36,8 @@
         @param dont_override_profile: reuses the existing test profile if any
         @param enable_default_apps: enables default apps (like Files app)
         @param toggle_ndk: toggles native bridge engine switch.
-        @param nativebridge64: enables 64-bit native bridge experiment.
+        @param log_dir: Any log files for this Chrome session is written to this
+               directory.
         """
         self._host = host
         self._timeout = constants.LOGIN_BOARD_TIMEOUT.get(
@@ -40,7 +47,7 @@
         self._need_reboot = False
         self._hard_reboot_on_failure = False
         self._toggle_ndk = toggle_ndk
-        self._nativebridge64 = nativebridge64
+        self._log_dir = log_dir
 
     def _cmd_builder(self, verbose=False):
         """Gets remote command to start browser with ARC enabled."""
@@ -54,6 +61,9 @@
         cmd += ' --no-startup-window'
         # Disable several forms of auto-installation to stablize the tests.
         cmd += ' --no-arc-syncs'
+        # TODO(b/196460968) delete after M96 branch, or after finishing the
+        # experiment.
+        cmd += ' --feature=NotificationsRefresh'
         # Toggle the translation from houdini to ndk
         if self._toggle_ndk:
             cmd += ' --toggle_ndk'
@@ -65,8 +75,6 @@
         if self._enable_default_apps:
             logging.info('Using --enable_default_apps to start Chrome.')
             cmd += ' --enable_default_apps'
-        if self._nativebridge64:
-            cmd += ' --nativebridge64'
         if not verbose:
             cmd += ' > /dev/null 2>&1'
         return cmd
@@ -92,7 +100,7 @@
         if install_autotest:
             # Installs the autotest client to the DUT by running a no-op test.
             autotest.Autotest(self._host).run_timed_test(
-                'dummy_Pass', timeout=2 * timeout, check_client_result=True)
+                    'stub_Pass', timeout=2 * timeout, check_client_result=True)
             # The (re)run the login script.
             self._login_by_script(timeout=timeout, verbose=verbose)
 
@@ -127,7 +135,15 @@
                 raise error.TestError('Failed to login to Chrome')
 
     def exit(self):
-        """On exit restart the browser or reboot the machine."""
+        """On exit restart the browser or reboot the machine.
+
+        If self._log_dir is set, the VM kernel log is written
+        to a file.
+
+        """
+        if self._log_dir:
+            self._write_kernel_log()
+
         if not self._need_reboot:
             try:
                 self._restart()
@@ -137,6 +153,23 @@
         if self._need_reboot:
             self._reboot()
 
+    def _write_kernel_log(self):
+        """Writes ARCVM kernel logs."""
+        if not os.path.exists(self._log_dir):
+            os.makedirs(self._log_dir)
+
+        output_path = os.path.join(
+                self._log_dir, '%s_vm_pstore_dump.txt' % self._host.hostname)
+
+        with open(output_path, 'w') as f:
+            try:
+                logging.info('Getting VM kernel logs.')
+                self._host.run('/usr/bin/vm_pstore_dump', stdout_tee=f)
+            except Exception as e:
+                logging.error('vm_pstore_dump command failed: %s', e)
+            else:
+                logging.info('Wrote VM kernel logs.')
+
     def _restart(self):
         """Restart Chrome browser."""
         # We clean up /tmp (which is memory backed) from crashes and
diff --git a/server/cros/tradefed/tradefed_constants.py b/server/cros/tradefed/tradefed_constants.py
index b9d5155..ca0a33e 100644
--- a/server/cros/tradefed/tradefed_constants.py
+++ b/server/cros/tradefed/tradefed_constants.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -6,14 +7,17 @@
 SDK_TOOLS_DIR = 'gs://chromeos-arc-images/builds/git_nyc-mr1-arc-linux-static_sdk_tools/3544738'
 SDK_TOOLS_FILES = ['aapt']
 
-# To stabilize adb behavior, we use statically linked adb.
-ADB_DIR = 'gs://chromeos-arc-images/builds/git_qt-release-static_sdk_tools/6118618'
+# Use old version of adb for a speculative workaround for b/183438202
+ADB_DIR_OLD = 'gs://chromeos-arc-images/builds/git_qt-release-static_sdk_tools/6118618'
+# adb 31.0.0 from https://developer.android.com/studio/releases/platform-tools
+ADB_DIR = 'gs://chromeos-arc-images/builds/aosp-sdk-release/7110759/'
 ADB_FILES = ['adb']
 
 ADB_POLLING_INTERVAL_SECONDS = 1
 ADB_CONNECT_TIMEOUT_SECONDS = 10
 ADB_KILL_SERVER_TIMEOUT_SECONDS = 10
 ADB_READY_TIMEOUT_SECONDS = 30
+ADB_PUSH_MEDIASTRESS_TIMEOUT_SECONDS = 600
 
 ARC_POLLING_INTERVAL_SECONDS = 1
 ARC_READY_TIMEOUT_SECONDS = 60
@@ -34,22 +38,28 @@
 TRADEFED_CACHE_CONTAINER = '/usr/local/autotest/results/shared/cache'
 TRADEFED_CACHE_CONTAINER_LOCK = '/usr/local/autotest/results/shared/lock'
 # The maximum size of the shared global cache. It needs to be able to hold
-# N, M, x86, arm CTS bundles (500MB), the GTS bundle and media stress videos
-# (2GB) zipped to not thrash. In addition it needs to be able to hold one
-# different revision per Chrome OS branch. While this sounds  like a lot,
-# only a single bundle is copied into each lxc instance (500MB), hence the
-# impact of running say 100 CTS tests in parallel is acceptable (quarter
-# servers have 500GB of disk, while full servers have 2TB).
-TRADEFED_CACHE_MAX_SIZE = (20 * 1024 * 1024 * 1024)
+# P, R, x86, arm, official, dev CTS bundles, as well as GTS bundles, and
+# media assets. (See b/126165348#comment40 for the calculation.)
+# In the current implementation, each test instance just symlinks to the
+# shared cache for majority of the content, so running multiple parallel
+# CTS tests should be acceptable in terms of storage.
+TRADEFED_CACHE_MAX_SIZE = (100 * 1024 * 1024 * 1024)
 # The path that cts-tradefed uses to place media assets. By downloading and
 # expanding the archive here beforehand, tradefed can reuse the content.
 TRADEFED_MEDIA_PATH = '/tmp/android-cts-media'
+# The property tradefed reads to decide which helpers to install.
+TRADEFED_CTS_HELPERS_PROPERTY = 'ro.vendor.cts_interaction_helper_packages'
+# The directory on the board where CTS helpers can be found.
+BOARD_CTS_HELPERS_DIR = '/usr/local/opt/google/vms/android'
 
 # It looks like the GCE builder can be very slow and login on VMs take much
 # longer than on hardware or bare metal.
 LOGIN_BOARD_TIMEOUT = {'betty': 300, 'betty-arcnext': 300, 'betty-pi-arc': 300}
 LOGIN_DEFAULT_TIMEOUT = 90
 
+# List of boards that we want to run CTS in tablet mode for some models.
+TABLET_MODE_BOARDS = ('kukui', 'nocturne', 'scarlet')
+
 # Approximately assume ChromeOS revision Rdd-xxxxx.y.z with y>=45 as stable.
 APPROXIMATE_STABLE_BRANCH_NUMBER = 45
 
diff --git a/server/cros/tradefed/tradefed_prerequisite.py b/server/cros/tradefed/tradefed_prerequisite.py
index 5364c55..881cbfc 100644
--- a/server/cros/tradefed/tradefed_prerequisite.py
+++ b/server/cros/tradefed/tradefed_prerequisite.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/cros/tradefed/tradefed_test.py b/server/cros/tradefed/tradefed_test.py
index e47d860..66d518c 100644
--- a/server/cros/tradefed/tradefed_test.py
+++ b/server/cros/tradefed/tradefed_test.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -28,12 +29,13 @@
 import subprocess
 import tempfile
 import time
-import urlparse
+import six.moves.urllib_parse as urlparse
 
 from autotest_lib.client.bin import utils as client_utils
 from autotest_lib.client.common_lib import error
 from autotest_lib.server import test
 from autotest_lib.server import utils
+from autotest_lib.server.cros.tradefed import adb as adb_utils
 from autotest_lib.server.cros.tradefed import cts_expected_failure_parser
 from autotest_lib.server.cros.tradefed import tradefed_chromelogin as login
 from autotest_lib.server.cros.tradefed import tradefed_constants as constants
@@ -64,6 +66,7 @@
     _SHARD_CMD = None
     _board_arch = None
     _board_name = None
+    _model_name = None
     _release_branch_number = None  # The 'y' of OS version Rxx-xxxxx.y.z
     _android_version = None
     _first_api_level = None
@@ -74,6 +77,11 @@
     _MAX_LAB_JOB_LENGTH_IN_SEC = 16 * 60 * 60 - 30 * 60
     _job_deadline = None
 
+    # Currently this is only used for dependency injection for testing.
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args)
+        self._adb = kwargs.get('adb', adb_utils.Adb())
+
     def _log_java_version(self):
         """Log java version to debug failures due to version mismatch."""
         utils.run(
@@ -94,7 +102,8 @@
                    retry_manual_tests=False,
                    warn_on_test_retry=True,
                    hard_reboot_on_failure=False,
-                   use_jdk9=False):
+                   use_jdk9=False,
+                   use_old_adb=False):
         """Sets up the tools and binary bundles for the test."""
         if utils.is_in_container() and not client_utils.is_moblab():
             self._job_deadline = time.time() + self._MAX_LAB_JOB_LENGTH_IN_SEC
@@ -135,8 +144,9 @@
         permission = (
             stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH
             | stat.S_IXOTH)
-        self._install_files(constants.ADB_DIR, constants.ADB_FILES,
-                            permission)
+
+        adb_dir = constants.ADB_DIR_OLD if use_old_adb else constants.ADB_DIR
+        self._install_files(adb_dir, constants.ADB_FILES, permission)
         self._install_files(constants.SDK_TOOLS_DIR,
                             constants.SDK_TOOLS_FILES, permission)
 
@@ -156,10 +166,14 @@
             else:
                 logging.info('Non-lab environment: should be using JDK9+')
 
+        # TODO(kinaba): Remove the hack and fully enable the feature.
+        # For release branches (Rx-yyyyy.3.0 or above), always use the
+        # official build instead of the release build. See b/210369548
+        if uri == 'DEV' and self._get_release_branch_number() >= 3:
+            uri = 'LATEST'
         # Install the tradefed bundle.
         bundle_install_path = self._install_bundle(
-                self._get_latest_bundle_url(bundle) if uri == 'LATEST' else (
-                        uri or self._get_default_bundle_url(bundle)))
+                self._get_bundle_url(uri, bundle))
         self._repository = os.path.join(bundle_install_path,
                                         self._get_tradefed_base_dir())
 
@@ -177,11 +191,6 @@
                 bundle)
         self._hard_reboot_on_failure = hard_reboot_on_failure
 
-    def postprocess(self):
-        """Postprocess: synchronous offloads and performance data"""
-        self._output_perf()
-        self._prepare_synchronous_offloads()
-
     def _output_perf(self):
         """Output performance values."""
         base = self._default_tradefed_base_dir()
@@ -230,6 +239,23 @@
 
     def cleanup(self):
         """Cleans up any dirtied state."""
+
+        # We also run a postprocess result and performance data
+        # offloading here so that WARN and FAIL runs also run the
+        # steps. postprocess() method only runs for PASSing jobs.
+        self._prepare_synchronous_offloads()
+        self._output_perf()
+
+        try:
+            # Clean up test data that may not be deletable on previous
+            # ChromeOS versions. See b/170276268.
+            self._run_commands([
+                    'cryptohome --action=remove --force --user=test@test.test'
+            ],
+                               ignore_status=True)
+        except:
+            logging.error('Failed to clean up the test account.')
+
         self._kill_adb_server()
 
         if hasattr(self, '_tradefed_install'):
@@ -242,8 +268,10 @@
     def _kill_adb_server(self):
         # Kill any lingering adb servers.
         try:
-            self._run_adb_cmd(verbose=True, args=('kill-server',),
-                timeout=constants.ADB_KILL_SERVER_TIMEOUT_SECONDS)
+            self._adb.run(None,
+                          verbose=True,
+                          args=('kill-server', ),
+                          timeout=constants.ADB_KILL_SERVER_TIMEOUT_SECONDS)
         except error.CmdTimeoutError as e:
             logging.warn(e)
             # `adb kill-server` sometimes hangs up. Kill it more brutally.
@@ -284,10 +312,11 @@
         This method should only be called after all hosts' Android has been
         successfully booted up."""
         # Check all hosts have same Android fingerprint.
-        fingerprint = set(self._run_adb_cmd(
-            host,
-            args=('shell', 'getprop', 'ro.build.fingerprint')).stdout
-            for host in self._hosts)
+        fingerprint = set(
+                self._adb.run(host,
+                              args=('shell', 'getprop',
+                                    'ro.build.fingerprint')).stdout
+                for host in self._hosts)
         if len(fingerprint) > 1:
             raise error.TestFail('Hosts\' supported fingerprint is different: '
                                  '%s', fingerprint)
@@ -298,11 +327,13 @@
         The value equals to the times each test case is run, which is determined
         by the intersection of the supported ABIs of the CTS/GTS bundle and that
         of the tested device."""
+        # This is only a conservative approximation. Some suites only run the
+        # primary ABI, so to be fully precise, those have to be counted as 1.
         arm_abis = set(('armeabi-v7a', 'arm64-v8a'))
         x86_abis = set(('x86', 'x86_64'))
-        if bundle == 'arm':
+        if bundle and bundle.startswith('arm'):
             tradefed_abis = arm_abis
-        elif bundle == 'x86':
+        elif bundle and bundle.startswith('x86'):
             tradefed_abis = x86_abis
         else:
             tradefed_abis = arm_abis | x86_abis
@@ -310,36 +341,6 @@
         # Avoid setting timeout=0 (None) in any cases.
         self._timeout_factor = max(1, self._test_count_factor)
 
-    def _get_adb_targets(self):
-        """Get a list of adb targets."""
-        return [self._get_adb_target(host) for host in self._hosts]
-
-    def _get_adb_target(self, host):
-        """Get the adb target format.
-
-        This method is slightly different from host.host_port as we need to
-        explicitly specify the port so the serial name of adb target would
-        match."""
-        return '{}:{}'.format(host.hostname, host.port)
-
-    def _run_adb_cmd(self, host=None, **kwargs):
-        """Running adb command.
-
-        @param host: DUT that want to connect to. (None if the adb command is
-                     intended to run in the server. eg. keygen)
-        """
-        # As of N, tradefed could not specify which adb socket to use, which use
-        # tcp:localhost:5037 by default.
-        adb_global_option = ('-H', 'localhost', '-P', '5037')
-        if host:
-            host_port = self._get_adb_target(host)
-            adb_global_option = ('-s', host_port)
-        kwargs['args'] = adb_global_option + kwargs.get('args', ())
-        result = self._run('adb', **kwargs)
-        logging.info('adb %s:\n%s', ' '.join(kwargs.get('args')),
-                     result.stdout + result.stderr)
-        return result
-
     def _try_adb_connect(self, host):
         """Attempts to connect to adb on the DUT.
 
@@ -353,16 +354,22 @@
             # This may fail return failure due to a race condition in adb
             # connect (b/29370989). If adb is already connected, this command
             # will immediately return success.
-            host_port = self._get_adb_target(host)
-            result = self._run_adb_cmd(
-                host, args=('connect', host_port), verbose=True, env=env,
-                ignore_status=True,
-                timeout=constants.ADB_CONNECT_TIMEOUT_SECONDS)
+            host_port = adb_utils.get_adb_target(host)
+            result = self._adb.run(
+                    host,
+                    args=('connect', host_port),
+                    verbose=True,
+                    env=env,
+                    ignore_status=True,
+                    timeout=constants.ADB_CONNECT_TIMEOUT_SECONDS)
             if result.exit_status != 0:
                 return False
 
-            result = self._run_adb_cmd(host, args=('devices',), env=env,
-                timeout=constants.ADB_CONNECT_TIMEOUT_SECONDS)
+            result = self._adb.run(
+                    host,
+                    args=('devices', ),
+                    env=env,
+                    timeout=constants.ADB_CONNECT_TIMEOUT_SECONDS)
             if not re.search(r'{}\s+(device|unauthorized)'.format(
                     re.escape(host_port)), result.stdout):
                 logging.info('No result found in with pattern: %s',
@@ -372,10 +379,13 @@
 
             # Actually test the connection with an adb command as there can be
             # a race between detecting the connected device and actually being
-            # able to run a commmand with authenticated adb.
-            result = self._run_adb_cmd(
-                host, args=('shell', 'exit'), env=env, ignore_status=True,
-                timeout=constants.ADB_CONNECT_TIMEOUT_SECONDS)
+            # able to run a command with authenticated adb.
+            result = self._adb.run(
+                    host,
+                    args=('shell', 'exit'),
+                    env=env,
+                    ignore_status=True,
+                    timeout=constants.ADB_CONNECT_TIMEOUT_SECONDS)
             return result.exit_status == 0
         except error.CmdTimeoutError as e:
             logging.warning(e)
@@ -413,10 +423,10 @@
         """
 
         def _intent_helper_running():
-            result = self._run_adb_cmd(
-                host,
-                args=('shell', 'pgrep', '-f', 'org.chromium.arc.intent_helper'),
-                ignore_status=True)
+            result = self._adb.run(host,
+                                   args=('shell', 'pgrep', '-f',
+                                         'org.chromium.arc.intent_helper'),
+                                   ignore_status=True)
             return bool(result.stdout)
 
         utils.poll_for_condition(
@@ -435,11 +445,10 @@
         This method disables it.
         """
         logging.info('Disabling the adb install dialog.')
-        result = self._run_adb_cmd(
-            host,
-            verbose=True,
-            args=('shell', 'settings', 'put', 'global',
-                  'verifier_verify_adb_installs', '0'))
+        result = self._adb.run(host,
+                               verbose=True,
+                               args=('shell', 'settings', 'put', 'global',
+                                     'verifier_verify_adb_installs', '0'))
         logging.info('Disable adb dialog: %s', result.stdout)
 
         # Android "RescueParty" feature can reset the above settings when the
@@ -588,7 +597,7 @@
         # the uris are supposed to contain version information and an object is
         # not supposed to be changed once created.
         output_dir = os.path.join(self._tradefed_cache,
-                                  hashlib.md5(uri).hexdigest())
+                                  hashlib.md5(uri.encode()).hexdigest())
         # Check for existence of cache entry. We check for directory existence
         # instead of file existence, so that _install_bundle can delete original
         # zip files to save disk space.
@@ -677,8 +686,58 @@
         dir2 = os.path.basename(cache_path)
         dir1 = os.path.basename(os.path.dirname(cache_path))
         instance_path = os.path.join(self._tradefed_install, dir1, dir2)
-        logging.info('Copying %s to instance %s', cache_path, instance_path)
-        shutil.copytree(cache_path, instance_path)
+        # TODO(kinaba): Fix in a safer way.
+        # Below is a workaround to avoid copying large CTS/GTS tree in test lab.
+        # Contents of $cache_path/android-cts are symlinked to the destination
+        # rather than copied.
+        #  1) Why not symlink 'android-cts' itself? Because the tests will
+        #     create results/ logs/ subplans/ subdirectory there. We do not
+        #     want to write to the shared cache.
+        #  2) Why not hardlink? Cache and the local directory may be on
+        #     different mount points, so hardlink may not work.
+        #  3) Why this isn't safe? Cache is cleared when it became full, even
+        #     during the test is run on an instance.
+        #  4) Why this is acceptable despite the unsatefy? Cache clearance is
+        #     a rare event (once in 6 months). Skylab drones won't usually
+        #     live that long, and even if it did, the failure is once in 6
+        #     months after all.
+        special_src = None
+        special_dest = None
+        if utils.is_in_container() and not client_utils.is_moblab():
+            for xts_name in ['android-cts', 'android-gts', 'android-sts']:
+                xts_root = os.path.join(cache_path, xts_name)
+                if os.path.exists(xts_root):
+                    special_src = xts_root
+                    special_dest = os.path.join(instance_path, xts_name)
+                    break
+        if special_src:
+            logging.info('SYMLINK&COPY contents of %s to instance %s',
+                         cache_path, instance_path)
+            self._safe_makedirs(special_dest)
+            for entry in os.listdir(special_src):
+                # Subdirectories are created by relative path from
+                # tools/cts_tradefed. So for 'tools' dir we copy.
+                if entry == 'tools':
+                    shutil.copytree(os.path.join(special_src, entry),
+                                    os.path.join(special_dest, entry))
+                elif entry == 'testcases':
+                    # Directory structure in testcases/ needs to be
+                    # instantiated, because CTS tries `find` command
+                    # in the directory without following symlinks
+                    for subdir, _, files in os.walk(
+                            os.path.join(special_src, entry)):
+                        rel = os.path.relpath(subdir, special_src)
+                        os.mkdir(os.path.join(special_dest, rel))
+                        for file in files:
+                            os.symlink(os.path.join(special_src, rel, file),
+                                       os.path.join(special_dest, rel, file))
+                else:
+                    os.symlink(os.path.join(special_src, entry),
+                               os.path.join(special_dest, entry))
+        else:
+            logging.info('Copying %s to instance %s', cache_path,
+                         instance_path)
+            shutil.copytree(cache_path, instance_path)
         return instance_path
 
     def _install_bundle(self, gs_uri):
@@ -729,7 +788,9 @@
                 local = self._instance_copyfile(cache_path)
             os.chmod(local, permission)
             # Keep track of PATH.
-            self._install_paths.append(os.path.dirname(local))
+            local_dir = os.path.dirname(local)
+            self._install_paths.append(local_dir)
+            self._adb.add_path(local_dir)
 
     def _prepare_media(self, media_asset):
         """Downloads and offers the cached media files to tradefed."""
@@ -763,10 +824,73 @@
     def _fail_on_unexpected_media_download(self, media_asset):
         if os.path.isdir(media_asset.localpath):
             contents = os.listdir(media_asset.localpath)
+            # Ignore a table-of-contents file created by newer xTS
+            TOC_FILE = 'contents.toc'
+            if TOC_FILE in contents:
+                contents.remove(TOC_FILE)
             if len(contents) > self._num_media_bundles:
                 raise error.TestFail(
                     'Failed: Unexpected media bundle was added %s' % contents)
 
+    def _should_push_mediastress_asset(self, target_module, board):
+        """Returns whether we should manually push mediastress assets.
+
+        TODO(b/210801048): Remove this workaround once ARCVM storage performance
+        on ARM becomes good enough.
+        """
+        return (target_module and 'CtsMediaStressTestCases' in target_module
+                and board in ['kukui-arc-r'])
+
+    def _push_mediastress_asset(self, media_asset):
+        """Pushes mediastress assets to the DUT for the upcoming test."""
+        logging.info(
+                'Pushing mediastress assets in advance to workaround slow '
+                'storage on ARM boards (b/210801048)')
+
+        media_dir = os.path.join(media_asset.localpath,
+                                 'android-cts-media-1.5')
+        copy_media_sh = os.path.join(media_dir, 'copy_media.sh')
+        os.chmod(copy_media_sh, 0o755)
+
+        old_cwd = os.getcwd()
+        os.chdir(media_dir)
+        try:
+            for host in self._hosts:
+                host_port = adb_utils.get_adb_target(host)
+                self._run(
+                        copy_media_sh,
+                        args=('all', '-s', host_port),
+                        timeout=constants.ADB_PUSH_MEDIASTRESS_TIMEOUT_SECONDS,
+                        verbose=True,
+                        ignore_status=False,
+                        stdout_tee=utils.TEE_TO_LOGS,
+                        stderr_tee=utils.TEE_TO_LOGS)
+        finally:
+            os.chdir(old_cwd)
+
+    def _fetch_helpers_from_dut(self):
+        """Fetches the CTS helpers from the dut and installs into the testcases
+           subdirectory of our local autotest copy.
+        """
+        tf_testcases = os.path.join(self._repository, 'testcases')
+
+        # Earlier checks enforce that each host has the same build fingerprint,
+        # so we can assume that the packages from the first host will work
+        # across the whole set.
+        package_list = self._adb.run(
+                self._hosts[0],
+                args=('shell', 'getprop',
+                      constants.TRADEFED_CTS_HELPERS_PROPERTY)).stdout.strip()
+        for pkg in package_list.split(':'):
+            if not pkg:
+                continue
+            apk_name = pkg + '.apk'
+            logging.info('Installing CTS helper package %s to %s', apk_name,
+                         tf_testcases)
+            self._hosts[0].get_file(
+                    os.path.join(constants.BOARD_CTS_HELPERS_DIR, apk_name),
+                    tf_testcases)
+
     def _run(self, *args, **kwargs):
         """Executes the given command line.
 
@@ -808,6 +932,7 @@
         # Load waivers and manual tests so TF doesn't re-run them.
         expected_fail_files = []
         test_board = self._get_board_name()
+        test_model = self._get_model_name()
         test_arch = self._get_board_arch()
         sdk_ver = self._get_android_version()
         first_api_level = self._get_first_api_level()
@@ -817,8 +942,9 @@
 
         waivers = cts_expected_failure_parser.ParseKnownCTSFailures(
             expected_fail_files)
-        return waivers.find_waivers(test_arch, test_board, bundle_abi, sdk_ver,
-                                    first_api_level)
+        return waivers.find_waivers(test_arch, test_board, test_model,
+                                    bundle_abi, sdk_ver, first_api_level,
+                                    self._hosts[0])
 
     def _get_abilist(self):
         """Return the abilist supported by calling adb command.
@@ -827,10 +953,10 @@
         successfully initialized."""
         if not self._abilist:
             for _ in range(3):
-                abilist_str = self._run_adb_cmd(
-                    self._hosts[0],
-                    args=('shell', 'getprop',
-                          'ro.product.cpu.abilist')).stdout.strip()
+                abilist_str = self._adb.run(
+                        self._hosts[0],
+                        args=('shell', 'getprop',
+                              'ro.product.cpu.abilist')).stdout.strip()
                 if abilist_str:
                     self._abilist = abilist_str.split(',')
                     break
@@ -860,6 +986,12 @@
             self._board_name = self._hosts[0].get_board().split(':')[1]
         return self._board_name
 
+    def _get_model_name(self):
+        """Return target DUT model name."""
+        if not self._model_name:
+            self._model_name = self._hosts[0].get_model_from_cros_config()
+        return self._model_name
+
     def _get_android_version(self):
         """Return target DUT Android SDK version"""
         # TODO(kinaba): factor this out to server/hosts/cros_host.py
@@ -908,9 +1040,31 @@
         logging.warning('Could not establish channel. Using retry=0.')
         return 0
 
+    def _is_tablet_mode_device(self):
+        """Returns if running the test on a tabled mode device"""
+        # TODO(kinaba): consider adding per-model check
+        board = self._get_board_name()
+        return any(board.startswith(b) for b in constants.TABLET_MODE_BOARDS)
+
     def _run_commands(self, commands, **kwargs):
         """Run commands on all the hosts."""
+        # We need to copy the ADB key to the device to run adb on it.
+        pre_commands = []
+        if any(command.startswith('adb ') for command in commands):
+            key_path = '/tmp/arc.adb_key'
+            for host in self._hosts:
+                host.env['ADB_VENDOR_KEYS'] = key_path
+            pre_commands = [
+                    'adb kill-server',
+                    'echo %s > %s' %
+                    (pipes.quote(constants.PRIVATE_KEY), key_path)
+            ]
+
         for host in self._hosts:
+            if pre_commands:
+                logging.info('Running DUT adb setup')
+                for command in pre_commands:
+                    host.run(command, ignore_status=True, verbose=False)
             for command in commands:
                 logging.info('RUN: %s\n', command)
                 output = host.run(command, **kwargs)
@@ -955,6 +1109,44 @@
             logging.warning('Failed to restore powerd policy, overrided policy '
                             'will persist until device reboot.')
 
+    def _should_set_cpu_governor(self, target_module, board):
+        """Returns whether we should set performance governor."""
+        # TODO(kinaba): The current restore logic only applies to Kukui
+        # and Trogdor. Please update the logic when expanding the scope.
+        return (target_module and "CtsDeqp" in target_module) and (board in [
+                'kukui-arc-r', 'trogdor-arc-r'
+        ])
+
+    def _set_cpu_governor(self, governor):
+        """Set the specified CPU governor."""
+        self._run_commands([('for i in /sys/devices/system/cpu/cpufreq/*; do'
+                             ' echo %s > $i/scaling_governor; done') % governor
+                            ])
+
+    def _override_cpu_governor(self):
+        """Override the CPU governor for performance mode."""
+        try:
+            self._set_cpu_governor('performance')
+        except (error.AutoservRunError, error.AutoservSSHTimeout):
+            logging.warning('Failed to override CPU governor, tests depending '
+                            'on boosted performance may fail.')
+
+    def _restore_cpu_governor(self):
+        """Restore the CPU governor to the default value."""
+        try:
+            self._set_cpu_governor('schedutil')
+        except (error.AutoservRunError, error.AutoservSSHTimeout):
+            logging.warning('Failed to restore CPU governor, overrided policy '
+                            'will persist until device reboot.')
+
+    def _mute_device(self):
+        """Mutes the device to avoid noises while running tests"""
+        try:
+            self._run_commands(['cras_test_client --mute 1'],
+                               ignore_status=True)
+        except:
+            logging.warning('Failed to mute the device')
+
     def _clean_crash_logs(self):
         try:
             self._run_commands(['rm -f /home/chronos/crash/*'])
@@ -970,7 +1162,7 @@
         """
         target_argument = []
         for host in self._hosts:
-            target_argument += ['-s', self._get_adb_target(host)]
+            target_argument += ['-s', adb_utils.get_adb_target(host)]
         shard_argument = []
         if len(self._hosts) > 1:
             if self._SHARD_CMD:
@@ -1081,8 +1273,10 @@
             logging.info('Copying extra artifacts from "%s" to "%s".',
                          artifact, output_dir)
             try:
-                self._run_adb_cmd(host, verbose=True, timeout=120,
-                                  args=('pull', artifact, output_dir))
+                self._adb.run(host,
+                              verbose=True,
+                              timeout=120,
+                              args=('pull', artifact, output_dir))
             except:
                 # Maybe ADB connection failed, or the artifacts don't exist.
                 logging.exception('Copying extra artifacts failed.')
@@ -1123,6 +1317,11 @@
             lastmatch = (session, passed, failed, done == total)
         return lastmatch
 
+    def _get_bundle_url(self, uri, bundle):
+        # TODO: Replace with NotImplementedError once all subclasses are done
+        return self._get_latest_bundle_url(bundle) if uri == 'LATEST' else (
+                uri or self._get_default_bundle_url(bundle))
+
     def _tradefed_retry_command(self, template, session_id):
         raise NotImplementedError('Subclass should override this function')
 
@@ -1137,8 +1336,8 @@
 
     def _run_tradefed_with_timeout(self, command, timeout):
         tradefed = self._tradefed_cmd_path()
-        with tradefed_utils.adb_keepalive(self._get_adb_targets(),
-                                          self._install_paths):
+        with tradefed_utils.adb_keepalive(
+                adb_utils.get_adb_targets(self._hosts), self._install_paths):
             logging.info('RUN(timeout=%d): %s', timeout,
                          ' '.join([tradefed] + command))
             output = self._run(
@@ -1179,9 +1378,9 @@
                                    target_plan=None,
                                    executable_test_count=None,
                                    bundle=None,
+                                   use_helpers=False,
                                    extra_artifacts=[],
                                    extra_artifacts_host=[],
-                                   cts_uri=None,
                                    login_precondition_commands=[],
                                    precondition_commands=[],
                                    prerequisites=[]):
@@ -1207,11 +1406,8 @@
 
         steps = -1  # For historic reasons the first iteration is not counted.
         self.summary = ''
-        accurate = []
         board = self._get_board_name()
         session_id = None
-        toggle_ndk = board == 'rammus-arc-r' # Toggle to ndk translation for this board
-        nativebridge64_experiment = (self._get_release_branch_number() == 0)
 
         self._setup_result_directories()
         if media_asset:
@@ -1227,19 +1423,20 @@
             # TODO(kinaba): Make it a general config (per-model choice
             # of tablet,clamshell,default) if the code below works.
             if utils.is_in_container() and not client_utils.is_moblab():
-                # Force all hatch devices run the test in laptop mode,
-                # regardless of their physical placement.
-                if board == 'hatch' or board == 'hatch-arc-r':
+                # Force laptop mode for non TABLET_MODE_BOARDS
+                if not self._is_tablet_mode_device():
                     self._run_commands(
                         ['inject_powerd_input_event --code=tablet --value=0'],
                         ignore_status=True)
-            with login.login_chrome(
-                    hosts=self._hosts,
-                    board=board,
-                    dont_override_profile=keep_media,
-                    enable_default_apps=enable_default_apps,
-                    toggle_ndk=toggle_ndk,
-                    nativebridge64=nativebridge64_experiment) as current_logins:
+
+            session_log_dir = os.path.join(self.resultsdir,
+                                           'login_session_log',
+                                           'step%02d' % steps)
+            with login.login_chrome(hosts=self._hosts,
+                                    board=board,
+                                    dont_override_profile=keep_media,
+                                    enable_default_apps=enable_default_apps,
+                                    log_dir=session_log_dir) as current_logins:
                 if self._should_reboot(steps):
                     # TODO(rohitbm): Evaluate if power cycle really helps with
                     # Bluetooth test failures, and then make the implementation
@@ -1255,7 +1452,29 @@
                         current_login.need_reboot(hard_reboot=hard_reboot)
                 self._ready_arc()
                 self._calculate_test_count_factor(bundle)
+
+                # Check the ABI list and skip (pass) the tests if not applicable.
+                # This needs to be done after _ready_arc() for reading the device's
+                # ABI list from the booted ARC instance.
+                if '--abi' in run_template:
+                    abi = run_template[run_template.index('--abi') + 1]
+                    abilist = self._get_abilist()
+                    if abilist and abi not in abilist:
+                        logging.info(
+                                'Specified ABI %s is not in the device ABI list %s. Skipping.',
+                                abi, abilist)
+                        return
+
+                # For CtsMediaStressTestCases, push media assets in advance if
+                # applicable.
+                if (not keep_media and media_asset
+                            and self._should_push_mediastress_asset(
+                                    target_module, board)):
+                    self._push_mediastress_asset(media_asset)
+
                 self._run_commands(precondition_commands, ignore_status=True)
+                if use_helpers:
+                    self._fetch_helpers_from_dut()
 
                 # Run tradefed.
                 if session_id == None:
@@ -1275,17 +1494,25 @@
                     # enough disk space for 16GB storage devices: b/156075084.
                     if not keep_media:
                         self._clean_crash_logs()
-                # TODO(b/137917339): Only prevent screen from turning off for
-                # media tests. Remove this check once the GPU issue is fixed.
-                keep_screen_on = (media_asset and media_asset.uri) or (
-                        target_module and "Media" in target_module)
+                # b/196748125. Mute before running tests to avoid noises.
+                self._mute_device()
+                set_performance_governor = self._should_set_cpu_governor(
+                        target_module, board)
+                # TODO(b/182397469): speculatively disable the "screen-on"
+                # handler for dEQP. Revert when the issue is resolved.
+                keep_screen_on = not (target_module
+                                      and "CtsDeqpTestCases" in target_module)
+                if set_performance_governor:
+                    self._override_cpu_governor()
                 if keep_screen_on:
                     self._override_powerd_prefs()
                 try:
-                    waived_tests, acc = self._run_and_parse_tradefed(command)
+                    waived_tests = self._run_and_parse_tradefed(command)
                 finally:
                     if keep_screen_on:
                         self._restore_powerd_prefs()
+                    if set_performance_governor:
+                        self._restore_cpu_governor()
                 if media_asset:
                     self._fail_on_unexpected_media_download(media_asset)
                 result = self._run_tradefed_list_results()
@@ -1316,10 +1543,6 @@
                     if media_asset:
                         self._cleanup_media(media_asset)
 
-                # If the result is |acc|urate according to the log, or the
-                # inaccuracy is recognized by tradefed (not all_done), then
-                # it is fine.
-                accurate.append(acc or not last_all_done)
                 if last_failed < last_waived:
                     logging.error(
                         'Error: Internal waiver bookkeeping has become '
@@ -1382,11 +1605,6 @@
             raise error.TestFail('Error: Could not find any tests in module.')
 
         if failed <= waived and all_done:
-            if not all(accurate):
-                raise error.TestFail(
-                    'Failed: Not all tests were executed. After %d '
-                    'retries passing %d tests, waived=%d. %s' % (
-                        steps, passed, waived, self.summary))
             # TODO(ihf): Make this error.TestPass('...') once
             # available.
             if steps > 0 and self._warn_on_test_retry:
@@ -1397,8 +1615,7 @@
             return
 
         raise error.TestFail(
-            'Failed: after %d retries giving up. '
-            'passed=%d, failed=%d, waived=%d%s%s. %s' %
-            (steps, passed, failed, waived, '' if all_done else ', notexec>=1',
-             '' if all(accurate) else ', Tests may not be accurate.',
-             self.summary))
+                'Failed: after %d retries giving up. '
+                'passed=%d, failed=%d, waived=%d%s. %s' %
+                (steps, passed, failed, waived,
+                 '' if all_done else ', notexec>=1', self.summary))
diff --git a/server/cros/tradefed/tradefed_test_unittest.py b/server/cros/tradefed/tradefed_test_unittest.py
new file mode 100644
index 0000000..6a01b15
--- /dev/null
+++ b/server/cros/tradefed/tradefed_test_unittest.py
@@ -0,0 +1,284 @@
+# Lint as: python2, python3
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+import tempfile
+import shutil
+import stat
+
+from unittest.mock import Mock, ANY, patch
+from autotest_lib.client.common_lib import error
+from autotest_lib.server.cros.tradefed import tradefed_test
+
+
+class TradefedTestTest(unittest.TestCase):
+    """Tests for TradefedTest class."""
+
+    def setUp(self):
+        self._mockjob_tmpdirs = []
+        self._bindir = tempfile.mkdtemp()
+        self._outputdir = tempfile.mkdtemp()
+        self.mock_adb = Mock()
+        self.tradefed = tradefed_test.TradefedTest(self.create_mock_job(),
+                                                   self._bindir,
+                                                   self._outputdir,
+                                                   adb=self.mock_adb)
+
+    def tearDown(self):
+        shutil.rmtree(self._bindir)
+        shutil.rmtree(self._outputdir)
+        for tmpdir in self._mockjob_tmpdirs:
+            shutil.rmtree(tmpdir)
+
+    def create_mock_job(self):
+        """Creates a mock necessary for constructing tradefed_test instance."""
+        mock_job = Mock()
+        mock_job.pkgmgr = None
+        mock_job.autodir = None
+        mock_job.tmpdir = tempfile.mkdtemp()
+        self._mockjob_tmpdirs.append(mock_job.tmpdir)
+        return mock_job
+
+    # Verify that try_adb_connect fails when run_adb_cmd fails.
+    @patch('autotest_lib.server.cros.tradefed.adb.get_adb_target')
+    def test_try_adb_connect_run_adb_fail(self, mock_get_adb_target):
+        mock_run_adb_cmd = self.mock_adb.run
+
+        # Exit status is set to non-0 to exit _try_adb_connect() early.
+        mock_run_adb_cmd.return_value.exit_status = 1
+        mock_get_adb_target.return_value = '123.76.0.29:3467'
+
+        self.assertFalse(self.tradefed._try_adb_connect(Mock()))
+        mock_run_adb_cmd.assert_called_with(ANY,
+                                            args=('connect',
+                                                  '123.76.0.29:3467'),
+                                            verbose=ANY,
+                                            env=ANY,
+                                            ignore_status=ANY,
+                                            timeout=ANY)
+
+    # Verify that _run_tradefed_with_timeout works.
+    @patch('autotest_lib.server.cros.tradefed.tradefed_test.TradefedTest._run')
+    @patch('autotest_lib.server.cros.tradefed.tradefed_test.TradefedTest._tradefed_cmd_path'
+           )
+    @patch('autotest_lib.server.cros.tradefed.tradefed_utils.adb_keepalive')
+    @patch('autotest_lib.server.cros.tradefed.adb.get_adb_targets')
+    def test_run_tradefed_with_timeout(self, mock_get_adb_targets, _,
+                                       mock_tradefed_cmd_path, mock_run):
+        self.tradefed._install_paths = '/any/install/path'
+
+        mock_host1 = Mock()
+        mock_host2 = Mock()
+        self.tradefed._hosts = [mock_host1, mock_host2]
+
+        mock_get_adb_targets.return_value = ['host1:4321', 'host2:22']
+
+        mock_tradefed_cmd_path.return_value = '/any/path'
+
+        self.tradefed._run_tradefed_with_timeout(['command'], 1234)
+        mock_get_adb_targets.assert_called_with(self.tradefed._hosts)
+
+    def test_kill_adb_server(self):
+        mock_run = self.mock_adb.run
+        self.tradefed._kill_adb_server()
+        mock_run.assert_called_with(None,
+                                    args=('kill-server', ),
+                                    timeout=ANY,
+                                    verbose=ANY)
+
+    def test_verify_arc_hosts_single_host(self):
+        mock_run = self.mock_adb.run
+        mock_host = Mock()
+        self.tradefed._hosts = [mock_host]
+
+        self.tradefed._verify_arc_hosts()
+
+        mock_run.assert_called_with(mock_host,
+                                    args=('shell', 'getprop',
+                                          'ro.build.fingerprint'))
+
+    # Verify that multiple hosts with differet fingerprints fail.
+    def test_verify_arc_hosts_different_fingerprints(self):
+        mock_run = self.mock_adb.run
+        mock_host1 = Mock()
+        mock_host2 = Mock()
+        self.tradefed._hosts = [mock_host1, mock_host2]
+
+        side_effects = [Mock(), Mock()]
+        side_effects[0].stdout = 'fingerprint1'
+        side_effects[1].stdout = 'fingerprint2'
+        mock_run.side_effect = side_effects
+
+        self.assertRaises(error.TestFail, self.tradefed._verify_arc_hosts)
+
+        mock_run.assert_any_call(mock_host1,
+                                 args=('shell', 'getprop',
+                                       'ro.build.fingerprint'))
+        mock_run.assert_any_call(mock_host2,
+                                 args=('shell', 'getprop',
+                                       'ro.build.fingerprint'))
+
+    # Verify that wait for arc boot uses polling with adb.
+    @patch('autotest_lib.server.utils.poll_for_condition')
+    def test_wait_for_arc_boot(self, mock_poll_for_condition):
+        mock_run = self.mock_adb.run
+
+        # stdout just has to be something that evaluates to True.
+        mock_run.return_value.stdout = 'anything'
+
+        mock_host = Mock()
+        self.tradefed._wait_for_arc_boot(mock_host)
+
+        self.assertEqual(mock_run.call_count, 0)
+
+        # Verify that the condition function uses the expected adb command.
+        self.assertEqual(mock_poll_for_condition.call_count, 1)
+        args = mock_poll_for_condition.call_args[0]
+        condition_func = args[0]
+        self.assertTrue(condition_func())
+
+        mock_run.assert_called_with(mock_host,
+                                    args=('shell', 'pgrep', '-f',
+                                          'org.chromium.arc.intent_helper'),
+                                    ignore_status=True)
+
+    def test_disable_adb_install_dialog_android_version_over_29(self):
+        mock_run = self.mock_adb.run
+        mock_run.return_value.stdout = 'disabled'
+
+        self.tradefed._android_version = 30
+        mock_host = Mock()
+        self.tradefed._disable_adb_install_dialog(mock_host)
+
+        mock_run.assert_called_with(mock_host,
+                                    args=('shell', 'settings', 'put', 'global',
+                                          'verifier_verify_adb_installs', '0'),
+                                    verbose=ANY)
+
+    def test_disable_adb_install_dialog_android_version_under_29(self):
+        mock_run = self.mock_adb.run
+
+        mock_run.return_value.stdout = 'disabled'
+
+        self.tradefed._android_version = 28
+        mock_host = Mock()
+        self.tradefed._disable_adb_install_dialog(mock_host)
+
+        mock_run.assert_called_with(mock_host,
+                                    args=('shell', 'settings', 'put', 'global',
+                                          'verifier_verify_adb_installs', '0'),
+                                    verbose=ANY)
+
+        mock_host.run.assert_called_with(
+                'android-sh -c \'setprop persist.sys.disable_rescue true\'')
+
+    def test_fetch_helpers_from_dut(self):
+        mock_run = self.mock_adb.run
+        self.tradefed._repository = '/repo/path'
+
+        mock_host = Mock()
+        self.tradefed._hosts = [mock_host]
+
+        # '::' is intentional and should be skipped.
+        mock_run.return_value.stdout = 'package1:package2::package3'
+
+        self.tradefed._fetch_helpers_from_dut()
+
+        mock_run.assert_called_with(
+                mock_host,
+                args=('shell', 'getprop',
+                      'ro.vendor.cts_interaction_helper_packages'))
+
+        self.assertEqual(mock_host.get_file.call_count, 3)
+
+        mock_host.get_file.assert_any_call(
+                '/usr/local/opt/google/vms/android/package1.apk',
+                '/repo/path/testcases',
+        )
+
+        mock_host.get_file.assert_any_call(
+                '/usr/local/opt/google/vms/android/package2.apk',
+                '/repo/path/testcases',
+        )
+
+        mock_host.get_file.assert_any_call(
+                '/usr/local/opt/google/vms/android/package3.apk',
+                '/repo/path/testcases',
+        )
+
+    def test_get_abilist(self):
+        mock_run = self.mock_adb.run
+        mock_host = Mock()
+        self.tradefed._hosts = [mock_host]
+
+        mock_run.return_value.stdout = 'arm,x86,my_awesome_architecture'
+
+        self.assertEqual(['arm', 'x86', 'my_awesome_architecture'],
+                         self.tradefed._get_abilist())
+
+        mock_run.assert_called_with(mock_host,
+                                    args=('shell', 'getprop',
+                                          'ro.product.cpu.abilist'))
+
+    def test_copy_extra_artifacts_dut(self):
+        mock_run = self.mock_adb.run
+        mock_host = Mock()
+
+        extra_artifacts = ['artifacts', '/path/to/some/file']
+        self.tradefed._copy_extra_artifacts_dut(extra_artifacts, mock_host,
+                                                self._outputdir)
+
+        self.assertEqual(mock_run.call_count, 2)
+
+        mock_run.assert_any_call(
+                mock_host,
+                args=('pull', 'artifacts', self._outputdir),
+                verbose=ANY,
+                timeout=ANY,
+        )
+
+        mock_run.assert_any_call(
+                mock_host,
+                args=('pull', '/path/to/some/file', self._outputdir),
+                verbose=ANY,
+                timeout=ANY,
+        )
+
+    # TODO(rkuroiwa): This test was added to test Adb.add_path.
+    # So most of these tradefed_test functions are mocked because
+    # they are not ncecessarily ready to be tested.
+    # Once the rest of the modules are tested, reevaluate unmocking them.
+    @patch('os.chmod')
+    @patch('autotest_lib.server.cros.tradefed.tradefed_test.TradefedTest._instance_copyfile'
+           )
+    @patch('autotest_lib.server.cros.tradefed.tradefed_test.TradefedTest._validate_download_cache'
+           )
+    @patch('autotest_lib.server.cros.tradefed.tradefed_test.TradefedTest._download_to_cache'
+           )
+    @patch('autotest_lib.server.cros.tradefed.tradefed_test.TradefedTest._invalidate_download_cache'
+           )
+    @patch('autotest_lib.server.cros.tradefed.tradefed_utils.lock')
+    def test_install_files(self, mock_lock, mock_invalidate_download_cache,
+                           mock_download_to_cache,
+                           mock_validate_download_cache, mock_instance_copy,
+                           mock_chmod):
+        mock_add_path = self.mock_adb.add_path
+        self.tradefed._tradefed_cache_lock = '/lock/lock_file'
+        self.tradefed._install_paths = []
+
+        mock_download_to_cache.return_value = '/path/to/downloaded/file'
+        mock_instance_copy.return_value = '/path/to/local/downloaded_file'
+
+        self.tradefed._install_files('gs://mybucket/path/to/dir', ['anyfile'],
+                                     stat.S_IRWXU)
+
+        mock_lock.assert_called_with('/lock/lock_file')
+        mock_invalidate_download_cache.assert_called()
+        mock_validate_download_cache.assert_called()
+        mock_chmod.assert_called_with('/path/to/local/downloaded_file',
+                                      stat.S_IRWXU)
+        mock_add_path.assert_called_with('/path/to/local')
+
+        self.assertEqual(self.tradefed._install_paths, ['/path/to/local'])
diff --git a/server/cros/tradefed/tradefed_utils.py b/server/cros/tradefed/tradefed_utils.py
index 8d18b79..8a17ef8 100644
--- a/server/cros/tradefed/tradefed_utils.py
+++ b/server/cros/tradefed/tradefed_utils.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -26,19 +27,23 @@
     # has very poor temporal granularity (timeout/10), which is unsuitable for
     # our needs. See /usr/lib64/python2.7/site-packages/lockfile/
     attempts = 0
+    total_wait_seconds = 0
     while not filelock.i_am_locking():
         try:
             attempts += 1
             logging.info('Waiting for cache lock...')
+            # Cap the wait by 2 minutes. Setting too long wait makes long-lived
+            # tasks to have less chance to obtain the lock, leading to failures.
             # We must not use a random integer as the filelock implementations
             # may underflow an integer division.
-            filelock.acquire(random.uniform(0.0, pow(2.0, attempts)))
+            wait = min(120.0, random.uniform(0.0, pow(2.0, attempts)))
+            total_wait_seconds += wait
+            filelock.acquire(wait)
         except (lockfile.AlreadyLocked, lockfile.LockTimeout):
             # Our goal is to wait long enough to be sure something very bad
-            # happened to the locking thread. 11 attempts is between 15 and
-            # 30 minutes.
-            if attempts > 11:
-                # Normally we should aqcuire the lock immediately. Once we
+            # happened to the locking thread. Wait 2 hours at maximum
+            if total_wait_seconds >= 7200:
+                # Normally we should acquire the lock immediately. Once we
                 # wait on the order of 10 minutes either the dev server IO is
                 # overloaded or a lock didn't get cleaned up. Take one for the
                 # team, break the lock and report a failure. This should fix
@@ -144,13 +149,14 @@
             logging.info('Waived failure for %s %d time(s)',
                          testname, fail_count)
         logging.info('>> Total waived = %s', waived)
-        return waived, True
+        return waived
 
     except Exception as e:
-        logging.warning(
-            'Exception raised in '
-            '|tradefed_utils.parse_tradefed_result_xml|: {'
-            '0}'.format(e))
+        logging.warning('Exception raised in '
+                        '|tradefed_utils.parse_tradefed_testresults_xml|: {'
+                        '0}'.format(e))
+        return []
+
 
 def parse_tradefed_result(result, waivers=None):
     """Check the result from the tradefed output.
@@ -274,6 +280,8 @@
                     test_name = test.get('name')
                     for metric in test.iter('Metric'):
                         score_type = metric.get('score_type')
+                        if score_type == None:
+                            continue
                         if score_type not in ['higher_better', 'lower_better']:
                             logging.warning(
                                 'Unsupported score_type in %s/%s/%s',
diff --git a/server/cros/tradefed/tradefed_utils_unittest.py b/server/cros/tradefed/tradefed_utils_unittest.py
index 399086a..70cad22 100644
--- a/server/cros/tradefed/tradefed_utils_unittest.py
+++ b/server/cros/tradefed/tradefed_utils_unittest.py
@@ -1,10 +1,13 @@
+# Lint as: python2, python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 import os
 import unittest
 
-import tradefed_utils
+import common
+
+from autotest_lib.server.cros.tradefed import tradefed_utils
 
 
 def _load_data(filename):
@@ -147,17 +150,17 @@
             'tradefed_utils_unittest_data', 'not_exist'))
 
     def test_parse_tradefed_testresults_xml_no_failure(self):
-        waived, accurate = tradefed_utils.parse_tradefed_testresults_xml(
-            os.path.join(os.path.dirname(os.path.realpath(__file__)),
-                         'tradefed_utils_unittest_data', 'test_result.xml'))
+        waived = tradefed_utils.parse_tradefed_testresults_xml(
+                os.path.join(os.path.dirname(os.path.realpath(__file__)),
+                             'tradefed_utils_unittest_data',
+                             'test_result.xml'))
         self.assertEquals(0, len(waived))
-        self.assertTrue(accurate)
 
     def test_parse_tradefed_testresult_xml_waivers(self):
-        waived, accurate = tradefed_utils.parse_tradefed_testresults_xml(
-            os.path.join(os.path.dirname(os.path.realpath(__file__)),
-                         'tradefed_utils_unittest_data',
-                         'gtsplacement_test_result.xml'))
+        waived = tradefed_utils.parse_tradefed_testresults_xml(
+                os.path.join(os.path.dirname(os.path.realpath(__file__)),
+                             'tradefed_utils_unittest_data',
+                             'gtsplacement_test_result.xml'))
         self.assertEquals(0, len(waived))
 
         waivers = set([
@@ -173,10 +176,11 @@
             'com.google.android.placement.gts.CoreGmsAppsTest#testGoogleDuoPreloaded',
             'com.google.android.placement.gts.CoreGmsAppsTest#testCoreGmsAppsPreloaded',
             'com.google.android.media.gts.WidevineH264PlaybackTests#testCbcsL1WithUHD30'])
-        waived, accurate = tradefed_utils.parse_tradefed_testresults_xml(
-            os.path.join(os.path.dirname(os.path.realpath(__file__)),
-                         'tradefed_utils_unittest_data',
-                         'gtsplacement_test_result.xml'), waivers=waivers)
+        waived = tradefed_utils.parse_tradefed_testresults_xml(os.path.join(
+                os.path.dirname(os.path.realpath(__file__)),
+                'tradefed_utils_unittest_data',
+                'gtsplacement_test_result.xml'),
+                                                               waivers=waivers)
         self.assertEquals(4, len(waived))
 
     def test_get_perf_metrics_from_test_result_xml(self):
diff --git a/server/cros/tradefed/tradefed_utils_unittest_data/CtsPrintTestCases.txt b/server/cros/tradefed/tradefed_utils_unittest_data/CtsPrintTestCases.txt
index 775619e..41b33f7 100644
--- a/server/cros/tradefed/tradefed_utils_unittest_data/CtsPrintTestCases.txt
+++ b/server/cros/tradefed/tradefed_utils_unittest_data/CtsPrintTestCases.txt
@@ -188,9 +188,9 @@
 05-19 13:20:58 D/ModuleListener: ModuleListener.testStarted(android.print.cts.PrinterCapabilitiesTest#testPrinterCapabilityChange)
 05-19 13:22:55 D/ModuleListener: ModuleListener.testEnded(android.print.cts.PrinterCapabilitiesTest#testPrinterCapabilityChange, {})
 05-19 13:22:55 I/ConsoleReporter: [14/48 x86 CtsPrintTestCases chromeos4-row8-rack6-host1:22] android.print.cts.PrinterCapabilitiesTest#testPrinterCapabilityChange pass
-05-19 13:22:55 D/ModuleListener: ModuleListener.testStarted(android.print.cts.PrinterCapabilitiesTest#testSanePrinterCapabilityInfos)
-05-19 13:23:02 D/ModuleListener: ModuleListener.testEnded(android.print.cts.PrinterCapabilitiesTest#testSanePrinterCapabilityInfos, {})
-05-19 13:23:02 I/ConsoleReporter: [15/48 x86 CtsPrintTestCases chromeos4-row8-rack6-host1:22] android.print.cts.PrinterCapabilitiesTest#testSanePrinterCapabilityInfos pass
+05-19 13:22:55 D/ModuleListener: ModuleListener.testStarted(android.print.cts.PrinterCapabilitiesTest#testValidPrinterCapabilityInfos)
+05-19 13:23:02 D/ModuleListener: ModuleListener.testEnded(android.print.cts.PrinterCapabilitiesTest#testValidPrinterCapabilityInfos, {})
+05-19 13:23:02 I/ConsoleReporter: [15/48 x86 CtsPrintTestCases chromeos4-row8-rack6-host1:22] android.print.cts.PrinterCapabilitiesTest#testValidPrinterCapabilityInfos pass
 05-19 13:23:02 D/ModuleListener: ModuleListener.testStarted(android.print.cts.PrintServicesTest#testProgress)
 05-19 13:23:14 D/ModuleListener: ModuleListener.testEnded(android.print.cts.PrintServicesTest#testProgress, {})
 05-19 13:23:14 I/ConsoleReporter: [16/48 x86 CtsPrintTestCases chromeos4-row8-rack6-host1:22] android.print.cts.PrintServicesTest#testProgress pass
diff --git a/server/cros/tradefed/tradefed_utils_unittest_data/test_result.xml b/server/cros/tradefed/tradefed_utils_unittest_data/test_result.xml
index de4a109..e3d5027 100644
--- a/server/cros/tradefed/tradefed_utils_unittest_data/test_result.xml
+++ b/server/cros/tradefed/tradefed_utils_unittest_data/test_result.xml
@@ -2599,5 +2599,10 @@
       <Test result="pass" name="testSyncFrameVP9" />

       <Test result="pass" name="testSyncFrameVP9Ndk" />

     </TestCase>

+    <TestCase name="dummy.EmptyMetricsTest">

+      <Test result="pass" name="testEmptyMetric">

+         <Metric key="CPU_Usage_Histogram_0-9_Percent">100.00</Metric>

+      </Test>

+    </TestCase>

   </Module>

-</Result>
\ No newline at end of file
+</Result>

diff --git a/server/cros/update_engine/update_engine_test.py b/server/cros/update_engine/update_engine_test.py
index 8d734f7..afc3fb5 100644
--- a/server/cros/update_engine/update_engine_test.py
+++ b/server/cros/update_engine/update_engine_test.py
@@ -7,11 +7,13 @@
 from __future__ import division
 from __future__ import print_function
 
+import base64
 import json
 import logging
 import os
 import re
 import shutil
+import time
 from six.moves import zip
 from six.moves import zip_longest
 import six.moves.urllib.parse
@@ -19,19 +21,28 @@
 from datetime import datetime, timedelta
 from xml.etree import ElementTree
 
+from autotest_lib.client.common_lib import autotemp
+from autotest_lib.client.common_lib import autotest_enum
 from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib import global_config
+from autotest_lib.client.common_lib import lsbrelease_utils
 from autotest_lib.client.common_lib import utils
 from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.client.cros import constants
 from autotest_lib.client.cros.update_engine import dlc_util
 from autotest_lib.client.cros.update_engine import update_engine_event as uee
 from autotest_lib.client.cros.update_engine import update_engine_util
 from autotest_lib.server import autotest
 from autotest_lib.server import test
+from autotest_lib.server.cros import gsutil_wrapper
 from autotest_lib.server.cros.dynamic_suite import tools
-from chromite.lib import auto_updater
-from chromite.lib import auto_updater_transfer
-from chromite.lib import remote_access
-from chromite.lib import retry_util
+from autotest_lib.utils.frozen_chromite.lib import auto_updater
+from autotest_lib.utils.frozen_chromite.lib import auto_updater_transfer
+from autotest_lib.utils.frozen_chromite.lib import constants as chromite_constants
+from autotest_lib.utils.frozen_chromite.lib import gob_util
+from autotest_lib.utils.frozen_chromite.lib import osutils
+from autotest_lib.utils.frozen_chromite.lib import remote_access
+from autotest_lib.utils.frozen_chromite.lib import retry_util
 
 
 class UpdateEngineTest(test.test, update_engine_util.UpdateEngineUtil):
@@ -46,8 +57,8 @@
     # Timeout periods, given in seconds.
     _INITIAL_CHECK_TIMEOUT = 12 * 60
     _DOWNLOAD_STARTED_TIMEOUT = 4 * 60
-    _DOWNLOAD_FINISHED_TIMEOUT = 20 * 60
-    _UPDATE_COMPLETED_TIMEOUT = 4 * 60
+    _DOWNLOAD_FINISHED_TIMEOUT = 60 * 60
+    _UPDATE_COMPLETED_TIMEOUT = 8 * 60
     _POST_REBOOT_TIMEOUT = 15 * 60
 
     # Name of the logfile generated by nebraska.py.
@@ -60,20 +71,30 @@
 
     _TIMESTAMP_FORMAT = '%Y-%m-%d %H:%M:%S'
 
+    # Paygen.json file provides information about all builds on all channels.
+    _PAYGEN_JSON_URI = 'gs://chromeos-build-release-console/paygen.json'
 
-    def initialize(self, host=None, hosts=None):
+    # Subtest to use for logging into DUTs in a test.
+    _LOGIN_TEST = 'login_LoginSuccess'
+    _LOGIN_TEST_PIN = 'login_LoginPin'
+
+    _CORRUPT_STATEFUL_PATH = '/mnt/stateful_partition/.corrupt_stateful'
+
+    _STATEFUL_ARCHIVE_NAME = 'stateful.tgz'
+    _KERNEL_ARCHIVE_NAME = 'full_dev_part_KERN.bin.gz'
+    _ROOTFS_ARCHIVE_NAME = 'full_dev_part_ROOT.bin.gz'
+
+    _PAYLOAD_TYPE = autotest_enum.AutotestEnum('CROS', 'DLC', 'MINIOS')
+
+    def initialize(self, host=None):
         """
         Sets default variables for the test.
 
         @param host: The DUT we will be running on.
-        @param hosts: If we are running a test with multiple DUTs (eg P2P)
-                      we will use hosts instead of host.
 
         """
         self._current_timestamp = None
         self._host = host
-        # Some AU tests use multiple DUTs
-        self._hosts = hosts
 
         # Define functions used in update_engine_util.
         self._run = self._host.run if self._host else None
@@ -82,6 +103,21 @@
         # Utilities for DLC management
         self._dlc_util = dlc_util.DLCUtil(self._run)
 
+        # URL pointing to the autotest package on a lab cache server. It is
+        # used by lab runs to select the right board+build when finding the
+        # update payloads. The cache server will also be used for downloading
+        # the update.
+        self._job_repo_url = None
+
+        # The target build for the update. Uses the release builder path
+        # format, ex: octopus-release/R102-14650.0.0
+        self._build = None
+
+        # Flag to indicate that the test has progressed far enough that
+        # stateful should be restored on failure.
+        self._should_restore_stateful = False
+
+        self._autotest_devserver = None
 
     def cleanup(self):
         """Clean up update_engine autotests."""
@@ -89,6 +125,20 @@
             self._host.get_file(self._UPDATE_ENGINE_LOG, self.resultsdir)
 
 
+    def _get_release_builder_path(self):
+        """
+        Returns the release builder path currently provisioned on the device
+        which can be used to get the current board and build number.
+        Ex: octopus-release/R102-14650.0.0
+        """
+        lsb_release_content = self._run(['cat', constants.LSB_RELEASE]).stdout
+        builder_path = lsbrelease_utils.get_chromeos_release_builder_path(
+                lsb_release_content)
+        logging.info("Current release builder path on the DUT is %s",
+                     builder_path)
+        return builder_path
+
+
     def _get_expected_events_for_rootfs_update(self, source_release):
         """
         Creates a list of expected events fired during a rootfs update.
@@ -168,6 +218,25 @@
 
         # If the event happened before the timeout
         difference = event_timestamp - self._current_timestamp
+
+        # We check if the difference > 7 hs. If so we assume that this is due to
+        # timezone jump from local to utc caused by log format change
+        # crrev.com/c/2652108 and adjust accordingly. Another assumption here is
+        # that the DUT are in PST timezone. The jump will be 7 hs with DST and 8
+        # hs without. This hack should be removed once reasonable time has
+        # passed and we do not need to consider the old log format anymore.
+        # TODO(crbug.com/c/1178930): Remove the hack below.
+        if difference > timedelta(hours=7):
+            logging.info(
+                    'Detected a timezone jump with difference %s with event %s',
+                    difference,
+                    uee.get_event_type(
+                            expected_event._expected_attrs['event_type']))
+            if difference > timedelta(hours=8):
+                difference -= timedelta(hours=8)
+            else:
+                difference -= timedelta(hours=7)
+
         if difference < timedelta(seconds=expected_event._timeout):
             logging.info('Event took %s seconds to fire during the '
                          'update', difference.seconds)
@@ -232,6 +301,10 @@
         if properties_file:
             filenames.append(filename + '.json')
         try:
+            if self._autotest_devserver is None:
+                self._autotest_devserver = dev_server.ImageServer.resolve(
+                        build_name, self._host.hostname)
+
             self._autotest_devserver.stage_artifacts(image=build_name,
                                                      files=filenames,
                                                      archive_url=archive_url)
@@ -259,26 +332,25 @@
         return autotest_devserver
 
 
-    def _get_payload_url(self, build=None, full_payload=True, is_dlc=False):
+    def _get_payload_url(self,
+                         full_payload=True,
+                         payload_type=_PAYLOAD_TYPE.CROS):
         """
-        Gets the GStorage URL of the full or delta payload for this build, for
-        either platform or DLC payloads.
+        Gets the GStorage URL of the full or delta payload for the target
+        update version for either platform or DLC payloads.
 
-        @param build: build string e.g eve-release/R85-13265.0.0.
         @param full_payload: True for full payload. False for delta.
-        @param is_dlc: True to get the payload URL for sample-dlc.
+        @param payload_type: The type of payload to get. Can be a value of the
+                             _PAYLOAD_TYPE enum.
 
-        @returns the payload URL.
+        @returns the payload URL. For example, a full payload URL looks like:
+        gs://chromeos-image-archive/octopus-release/R102-14650.0.0/chromeos_R102-14650.0.0_octopus_full_dev.bin
 
         """
-        if build is None:
-            if self._job_repo_url is None:
-                self._job_repo_url = self._get_job_repo_url()
-            ds_url, build = tools.get_devserver_build_from_package_url(
-                self._job_repo_url)
-            self._autotest_devserver = dev_server.ImageServer(ds_url)
-
-        gs = dev_server._get_image_storage_server()
+        image_path = global_config.global_config.get_config_value(
+                'CROS', 'image_storage_server', type=str)
+        # This forces a trailing '/'' if not there yet.
+        gs = os.path.join(image_path, '')
 
         # Example payload names (AU):
         # chromeos_R85-13265.0.0_eve_full_dev.bin
@@ -286,18 +358,23 @@
         # Example payload names (DLC):
         # dlc_sample-dlc_package_R85-13265.0.0_eve_full_dev.bin
         # dlc_sample-dlc_package_R85-13265.0.0_R85-13265.0.0_eve_delta_dev.bin
-        if is_dlc:
-            payload_prefix = 'dlc_*%s*_%s_*' % (build.rpartition('/')[2], '%s')
+        # Example payload names (MiniOS):
+        # minios_R102-14667.0.0_guybrush_full_dev.bin
+        # minios_R102-14667.0.0_R102-14667.0.0_guybrush_delta_dev.bin
+        if payload_type is self._PAYLOAD_TYPE.DLC:
+            payload_prefix = 'dlc_*_%s_*.bin'
+        elif payload_type is self._PAYLOAD_TYPE.MINIOS:
+            payload_prefix = 'minios_*_%s_*.bin'
         else:
             payload_prefix = 'chromeos_*_%s_*.bin'
 
         regex = payload_prefix % ('full' if full_payload else 'delta')
 
-        payload_url_regex = gs + build + '/' + regex
+        payload_url_regex = gs + self._build + '/' + regex
         logging.debug('Trying to find payloads at %s', payload_url_regex)
         payloads = utils.gs_ls(payload_url_regex)
         if not payloads:
-            raise error.TestFail('Could not find payload for %s', build)
+            raise error.TestFail('Could not find payload for %s', self._build)
         logging.debug('Payloads found: %s', payloads)
         return payloads[0]
 
@@ -312,8 +389,6 @@
         """Gets the job_repo_url argument supplied to the test by the lab."""
         if job_repo_url is not None:
             return job_repo_url
-        if self._hosts is not None:
-            self._host = self._hosts[0]
         if self._host is None:
             raise error.TestFail('No host specified by AU test.')
         info = self._host.host_info_store.get()
@@ -355,21 +430,48 @@
         return self._get_stateful_uri(build_uri)
 
 
-    def _copy_payload_to_public_bucket(self, payload_url):
+    def _copy_payload_to_public_bucket(self,
+                                       payload_url,
+                                       use_globbing=True,
+                                       destination_filename=None):
         """
-        Copy payload and make link public.
+        Copy payload and make link public (if not already there).
 
         @param payload_url: Payload URL on Google Storage.
+        @param use_globbing: Use globbing with payload url as prefix.
+        @param destination_filename: Filename of payload on public bucket if it
+            should be different from the source filename. Note that gsutil will
+            treat this as destination directory if `use_globbing` is true and
+            resolves to multiple files.
 
         @returns The payload URL that is now publicly accessible.
 
         """
         payload_filename = payload_url.rpartition('/')[2]
-        utils.run(['gsutil', 'cp', '%s*' % payload_url, self._CELLULAR_BUCKET])
+        if destination_filename:
+            payload_filename = destination_filename
         new_gs_url = self._CELLULAR_BUCKET + payload_filename
+        public_url = new_gs_url.replace('gs://',
+                                        'https://storage.googleapis.com/')
+
+        src_url = '%s*' % payload_url if use_globbing else payload_url
+        dst_url = new_gs_url if destination_filename else self._CELLULAR_BUCKET
+
+        # Check if public bucket already has the payload.
+        try:
+            payloads = utils.gs_ls(new_gs_url)
+            if payloads:
+                logging.info(
+                        'Payload already exists in public bucket. Returning '
+                        'url to existing payload')
+                return public_url
+        except error.CmdError:
+            logging.warning('No existing payload exists. Copying payload...')
+
+        utils.run(['gsutil', 'cp', '-n', src_url, dst_url])
         utils.run(['gsutil', 'acl', 'ch', '-u', 'AllUsers:R',
                    '%s*' % new_gs_url])
-        return new_gs_url.replace('gs://', 'https://storage.googleapis.com/')
+        return public_url
 
 
     def _suspend_then_resume(self):
@@ -410,20 +512,45 @@
         requests = pattern.findall(update_engine_log)
 
         # We are looking for patterns like this:
-        # [0324/151230.562305:INFO:omaha_request_action.cc(501)] Request:
-        timestamp_pattern = re.compile(r'\[([0-9]+)/([0-9]+).*?\] Request:')
+        # "2021-01-28T10:14:33.998217Z INFO update_engine: \
+        # [omaha_request_action.cc(794)] Request: <?xml"
+        timestamp_pattern = re.compile(
+                r'(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}).* Request:.*xml')
         timestamps = [
-            # Just use the current year since the logs don't have the year
-            # value. Let's all hope tests don't start to fail on new year's
-            # eve LOL.
-            datetime(datetime.now().year,
-                     int(ts[0][0:2]),  # Month
-                     int(ts[0][2:4]),  # Day
-                     int(ts[1][0:2]),  # Hours
-                     int(ts[1][2:4]),  # Minutes
-                     int(ts[1][4:6]))  # Seconds
-            for ts in timestamp_pattern.findall(update_engine_log)
+                datetime.strptime(ts, '%Y-%m-%dT%H:%M:%S')
+                for ts in timestamp_pattern.findall(update_engine_log)
         ]
+        if len(timestamps) == 0:
+            # We might be reading log in old format so try parsing with another regexp.
+            # [0324/151230.562305:INFO:omaha_request_action.cc(501)] Request:
+            timestamp_pattern_old = re.compile(
+                    r'\[([0-9]+)/([0-9]+).*?\] Request:')
+
+            # Since the old format uses local time, we want to convert it to utc time.
+            is_dst = time.daylight and time.localtime().tm_isdst > 0
+            utc_offset = timedelta(
+                    seconds=(time.altzone if is_dst else time.timezone))
+            if utc_offset > timedelta(seconds=0):
+                logging.info('Parsing old log format. Adding utc offset of %s',
+                             utc_offset)
+            else:
+                logging.warning(
+                        'Local time to UTC conversion might fail. utc_offset=%s. time.altzone=%s, time.timezone=%s',
+                        utc_offset, time.altzone, time.timezone)
+            timestamps = [
+                    # Just use the current year since the logs don't have the year
+                    # value. Let's all hope tests don't start to fail on new year's
+                    # eve LOL.
+                    datetime(
+                            datetime.now().year,
+                            int(ts[0][0:2]),  # Month
+                            int(ts[0][2:4]),  # Day
+                            int(ts[1][0:2]),  # Hours
+                            int(ts[1][2:4]),  # Minutes
+                            int(ts[1][4:6])  # Seconds
+                    ) + utc_offset
+                    for ts in timestamp_pattern_old.findall(update_engine_log)
+            ]
 
         if len(requests) != len(timestamps):
             raise error.TestFail('Failed to properly parse the update_engine '
@@ -503,13 +630,16 @@
             return result
 
 
-    def _create_hostlog_files(self):
+    def _create_hostlog_files(self, ignore_event_rootfs=False):
         """Create the two hostlog files for the update.
 
         To ensure the update was successful we need to compare the update
         events against expected update events. There is a hostlog for the
         rootfs update and for the post reboot update check.
 
+        @param ignore_event_rootfs: Ignores the last event in the rootfs
+                                    log.
+
         """
         # Check that update logs exist for the update that just happened.
         if len(self._get_update_engine_logs()) < 2:
@@ -522,8 +652,13 @@
         rootfs_hostlog = os.path.join(self.resultsdir, 'hostlog_rootfs')
         with open(rootfs_hostlog, 'w') as fp:
             # There are four expected hostlog events during update.
-            json.dump(self._extract_request_logs(
-                self._get_update_engine_log(1))[-4:], fp)
+            extract_logs = self._extract_request_logs(
+                    self._get_update_engine_log(1))
+            if ignore_event_rootfs:
+                logs = extract_logs[-5:-1]
+            else:
+                logs = extract_logs[-4:]
+            json.dump(logs, fp)
 
         reboot_hostlog = os.path.join(self.resultsdir, 'hostlog_reboot')
         with open(reboot_hostlog, 'w') as fp:
@@ -659,36 +794,46 @@
         return build_name, payload_file
 
 
-    def _restore_stateful(self):
-        """Restore the stateful partition after a destructive test."""
+    def _update_stateful(self):
+        # Tries to update stateful without clobbering it.
+        return self._restore_stateful(clobber_stateful=False)
+
+    def _restore_stateful(self, clobber_stateful=True, public_bucket=False):
+        """
+        Restore the stateful partition after a destructive test.
+
+        @param clobber_stateful: True to wipe clobber user state.
+        @param public_bucket: True to restore from a public bucket.
+
+        """
+        # Test failed before any update preparations began. No need to fix stateful.
+        if not self._should_restore_stateful:
+            return
+
+        # Fallback to lab provisioning if this function fails to restore.
+        self._run(['touch', self._CORRUPT_STATEFUL_PATH])
+
         # Stage stateful payload.
-        ds_url, build = tools.get_devserver_build_from_package_url(
-                self._job_repo_url)
-        self._autotest_devserver = dev_server.ImageServer(ds_url)
-        self._autotest_devserver.stage_artifacts(build, ['stateful'])
+        statefuldev_url = self._stage_stateful(public_bucket)
 
         logging.info('Restoring stateful partition...')
         # Setup local dir.
         self._run(['mkdir', '-p', '-m', '1777', '/usr/local/tmp'])
 
         # Download and extract the stateful payload.
-        update_url = self._autotest_devserver.get_update_url(build)
-        statefuldev_url = update_url.replace('update', 'static')
-        statefuldev_url += '/stateful.tgz'
-        cmd = [
-                'curl', '--silent', '--show-error', '--max-time', '600',
-                statefuldev_url, '|', 'tar', '--ignore-command-error',
-                '--overwrite', '--directory', '/mnt/stateful_partition', '-xz'
-        ]
         try:
-            self._run(cmd)
+            self._download_and_extract_stateful(statefuldev_url,
+                                                self._STATEFUL_MOUNT_DIR)
         except error.AutoservRunError as e:
             err_str = 'Failed to restore the stateful partition'
             raise error.TestFail('%s: %s' % (err_str, str(e)))
 
         # Touch a file so changes are picked up after reboot.
         update_file = '/mnt/stateful_partition/.update_available'
-        self._run(['echo', '-n', 'clobber', '>', update_file])
+        if clobber_stateful:
+            self._run(['echo', '-n', 'clobber', '>', update_file])
+        else:
+            self._run(['touch', update_file])
         self._host.reboot()
 
         # Make sure python is available again.
@@ -701,6 +846,57 @@
         logging.info('Stateful restored successfully.')
 
 
+    def _download_and_extract_stateful(self,
+                                       stateful_url,
+                                       destination,
+                                       keep_symlinks=False,
+                                       members=None):
+        """
+        Download and extract the stateful partition.
+
+        @param stateful_url: The url of the stateful archive.
+        @param destination: The directory that the stateful archive will be
+            extracted into.
+        @param keep_symlinks: Don't overwrite symlinks in destination directory.
+        @param members: When provided, they specify the names of the stateful
+            archive members to be extracted else everything will be extracted.
+
+        """
+        cmd = [
+                'curl', '--silent', '--show-error', '--max-time', '600',
+                stateful_url, '|', '/bin/tar', '--ignore-command-error',
+                '--overwrite', '--directory', destination, '-xz'
+        ]
+        if keep_symlinks:
+            cmd += ['--keep-directory-symlink']
+        if members:
+            # Normalize members to be a list.
+            if not isinstance(members, list):
+                members = [members]
+            cmd += members
+        self._run(cmd)
+
+    def _stage_stateful(self, public_bucket=False):
+        """
+        Stage the stateful archive for download.
+
+        @param public_bucket: True to return archive on a public bucket.
+
+        """
+        if public_bucket:
+            statefuldev_url = self._get_stateful_url_on_public_bucket()
+        else:
+            # Stage stateful payload.
+            ds_url, build = tools.get_devserver_build_from_package_url(
+                    self._job_repo_url)
+            self._autotest_devserver = dev_server.ImageServer(ds_url)
+            self._autotest_devserver.stage_artifacts(build, ['stateful'])
+
+            update_url = self._autotest_devserver.get_update_url(build)
+            statefuldev_url = update_url.replace('update', 'static')
+            statefuldev_url += '/stateful.tgz'
+        return statefuldev_url
+
     def verify_update_events(self, source_release, hostlog_filename,
                              target_release=None):
         """Compares a hostlog file against a set of expected events.
@@ -736,47 +932,10 @@
                                      err_msg))
 
 
-    def get_update_url_for_test(self, job_repo_url=None, full_payload=True,
-                                stateful=False):
-        """
-        Returns a devserver update URL for tests that cannot use a Nebraska
-        instance on the DUT for updating.
-
-        This expects the test to set self._host or self._hosts.
-
-        @param job_repo_url: string url containing the current build.
-        @param full_payload: bool whether we want a full payload.
-        @param stateful: bool whether we want to stage stateful payload too.
-
-        @returns a valid devserver update URL.
-
-        """
-        self._job_repo_url = self._get_job_repo_url(job_repo_url)
-        if not self._job_repo_url:
-            raise error.TestFail('There was no job_repo_url so we cannot get '
-                                 'a payload to use.')
-        ds_url, build = tools.get_devserver_build_from_package_url(
-            self._job_repo_url)
-
-        # The lab devserver assigned to this test.
-        lab_devserver = dev_server.ImageServer(ds_url)
-
-        # Stage payloads on the lab devserver.
-        self._autotest_devserver = lab_devserver
-        artifacts = ['full_payload' if full_payload else 'delta_payload']
-        if stateful:
-            artifacts.append('stateful')
-        self._autotest_devserver.stage_artifacts(build, artifacts)
-
-        # Use the same lab devserver to also handle the update.
-        url = self._autotest_devserver.get_update_url(build)
-
-        logging.info('Update URL: %s', url)
-        return url
-
-
-    def get_payload_url_on_public_bucket(self, job_repo_url=None,
-                                         full_payload=True, is_dlc=False):
+    def get_payload_url_on_public_bucket(self,
+                                         job_repo_url=None,
+                                         full_payload=True,
+                                         payload_type=_PAYLOAD_TYPE.CROS):
         """
         Get the google storage url of the payload in a public bucket.
 
@@ -785,40 +944,153 @@
 
         @param job_repo_url: string url containing the current build.
         @param full_payload: True for full, False for delta.
-        @param is_dlc: True to get the payload URL for sample-dlc.
+        @param payload_type: The type of payload to get. Can be a value of the
+                             _PAYLOAD_TYPE enum.
 
         """
-        self._job_repo_url = self._get_job_repo_url(job_repo_url)
+        if job_repo_url is not None:
+            self._job_repo_url = job_repo_url
+            _, build = tools.get_devserver_build_from_package_url(
+                    self._job_repo_url)
+            self._build = build
+            self._should_restore_stateful = True
+
         payload_url = self._get_payload_url(full_payload=full_payload,
-                                            is_dlc=is_dlc)
+                                            payload_type=payload_type)
         url = self._copy_payload_to_public_bucket(payload_url)
         logging.info('Public update URL: %s', url)
         return url
 
 
-    def get_payload_for_nebraska(self, job_repo_url=None, full_payload=True,
-                                 public_bucket=False, is_dlc=False):
+    def _get_stateful_url_on_public_bucket(self):
+        """
+        Get the google storage URL of the payload stateful in a public bucket.
+
+        We will be copying the payload to a public google storage bucket
+        (similar location to updates via autest command).
+
+        """
+        payload_url = self._get_payload_url()
+        stateful_url = '/'.join(
+                [payload_url.rpartition('/')[0], self._STATEFUL_ARCHIVE_NAME])
+        # We have a flat directory structure in the public directory. Therefore
+        # we need to disambiguate the stateful archive by prepending full
+        # payload name.
+        stateful_filename = '_'.join([
+                os.path.splitext(payload_url.rpartition('/')[2])[0],
+                self._STATEFUL_ARCHIVE_NAME
+        ])
+        url = self._copy_payload_to_public_bucket(
+                stateful_url,
+                use_globbing=False,
+                destination_filename=stateful_filename)
+        logging.info('Public stateful URL: %s', url)
+        return url
+
+    def _get_provision_url_on_public_bucket(self, release_path):
+        """
+        Copy the necessary artifacts for quick-provision to the public bucket
+        and return the URL pointing to them.
+
+        This is to enable local runs of tests that need to provision a source
+        version on the DUT (such as m2n tests) without requiring lab cache
+        server SSH access.
+
+        @param release_path: path to the build artifacts in
+            gs://chromeos-releases. Ex: dev-channel/asurada/14515.0.0. The
+            output of _get_latest_serving_stable_build matches this format.
+
+        """
+        # We have a flat directory structure in the public directory. Therefore
+        # we need to disambiguate the path to the provision artifacts.
+        new_gs_dir = os.path.join(self._CELLULAR_BUCKET, 'provision',
+                                  release_path)
+        src_gs_dir = os.path.join('gs://chromeos-releases', release_path)
+        provision_artifacts = [
+                self._STATEFUL_ARCHIVE_NAME, self._ROOTFS_ARCHIVE_NAME,
+                self._KERNEL_ARCHIVE_NAME
+        ]
+
+        for file in provision_artifacts:
+            src_url = os.path.join(src_gs_dir, file)
+            dst_url = os.path.join(new_gs_dir, file)
+            utils.run([
+                    'gsutil', 'cp', '-n', '-a', 'public-read', src_url, dst_url
+            ])
+
+        public_url = new_gs_dir.replace('gs://',
+                                        'https://storage.googleapis.com/')
+
+        return public_url
+
+    def _copy_quick_provision_to_dut(self):
+        """ Copies the quick-provision script to the DUT from googlesource."""
+        tmp = autotemp.tempdir(unique_id='m2n')
+        src = os.path.join(tmp.name, 'quick-provision')
+        dst = '/usr/local/bin/quick-provision'
+        logging.info('Downloading quick-provision from googlesource')
+        qp_url_path = '%s/+/%s/%s?format=text' % (
+                'chromiumos/platform/dev-util', 'refs/heads/main',
+                'quick-provision/quick-provision')
+        contents_b64 = gob_util.FetchUrl(chromite_constants.EXTERNAL_GOB_HOST,
+                                         qp_url_path)
+        osutils.WriteFile(src, base64.b64decode(contents_b64).decode('utf-8'))
+        self._host.send_file(src, dst)
+        self._run(['chown', '$USER', dst])
+        self._run(['chmod', '755', dst])
+
+    def get_payload_for_nebraska(self,
+                                 job_repo_url=None,
+                                 build=None,
+                                 full_payload=True,
+                                 public_bucket=False,
+                                 payload_type=_PAYLOAD_TYPE.CROS):
         """
         Gets a platform or DLC payload URL to be used with a nebraska instance
         on the DUT.
 
-        @param job_repo_url: string url containing the current build.
+        @param job_repo_url: string url containing the current build and cache
+                             server to use.
+        @param build: string containing the build to use for the update,
+                      like R102-14644.0.0 (the milestone number is required).
+                      Only used for the public bucket update flow.
         @param full_payload: bool whether we want a full payload.
         @param public_bucket: True to return a payload on a public bucket.
-        @param is_dlc: True to get the payload URL for sample-dlc.
+        @param payload_type: The type of payload to get. Can be a value of the
+                             _PAYLOAD_TYPE enum.
 
         @returns string URL of a payload staged on a lab devserver.
 
         """
+        self._job_repo_url = self._get_job_repo_url(job_repo_url)
+
+        if self._job_repo_url:
+            logging.info('Getting payload for the build in job_repo_url')
+            ds_url, build = tools.get_devserver_build_from_package_url(
+                    self._job_repo_url)
+            self._autotest_devserver = dev_server.ImageServer(ds_url)
+            self._build = build
+        elif build is not None:
+            logging.info('Getting payload for the build provided in test_that')
+            # self._build looks like this: octopus-release/R102-14650.0.0
+            # Replace the version with the one provided in test_that.
+            self._build = self._get_release_builder_path().rsplit(
+                    '/')[0] + '/' + build
+        else:
+            logging.info('Getting payload for the current build on the DUT')
+            self._build = self._get_release_builder_path()
+
+        logging.info("Getting payload for nebraska for build %s", self._build)
+
         if public_bucket:
             return self.get_payload_url_on_public_bucket(
-                job_repo_url, full_payload=full_payload, is_dlc=is_dlc)
+                    full_payload=full_payload, payload_type=payload_type)
 
-        self._job_repo_url = self._get_job_repo_url(job_repo_url)
         payload = self._get_payload_url(full_payload=full_payload,
-                                        is_dlc=is_dlc)
+                                        payload_type=payload_type)
         payload_url, _ = self._stage_payload_by_uri(payload)
         logging.info('Payload URL for Nebraska: %s', payload_url)
+        self._should_restore_stateful = True
         return payload_url
 
 
@@ -826,7 +1098,8 @@
                       payload_uri,
                       clobber_stateful=False,
                       tag='source',
-                      ignore_appid=False):
+                      ignore_appid=False,
+                      m2n=False):
         """
         Updates the device.
 
@@ -843,14 +1116,27 @@
                              when parsing the update request. This allows
                              the target update to use a different board's
                              image, which is needed for kernelnext updates.
+        @param m2n: True for an m2n update. m2n update tests don't use signed
+                    payloads from gs://chromeos-releases/, so the payload paths
+                    need to be parsed differently.
 
         @raise error.TestFail if anything goes wrong with the update.
 
         """
         cros_preserved_path = ('/mnt/stateful_partition/unencrypted/'
                                'preserve/cros-update')
-        build_name, payload_filename = self._get_update_parameters_from_uri(
-            payload_uri)
+        if m2n:
+            # The payload_uri for an m2n update looks like:
+            # http://100.115.220.112:8082/static/octopus-release/R102-14692.0.0/chromeos_R102-14692.0.0_octopus_full_dev.bin
+            payload_path = payload_uri[payload_uri.index('static/'):]
+            build_name = '/'.join(payload_path.split('/')[1:-1])
+            payload_filename = payload_path.split('/')[-1]
+        else:
+            # Otherwise the payload_uri looks like:
+            # gs://chromeos-releases/dev-channel/octopus/14698.0.0/payloads/chromeos_14698.0.0_octopus_dev-channel_full_test.bin-gyzdkobygyzdck3swpkou632wan55vgx
+            build_name, payload_filename = self._get_update_parameters_from_uri(
+                    payload_uri)
+
         logging.info('Installing %s on the DUT', payload_uri)
         with remote_access.ChromiumOSDeviceHandler(
             self._host.hostname, base_dir=cros_preserved_path) as device:
@@ -876,3 +1162,75 @@
             finally:
                 self._copy_generated_nebraska_logs(
                     updater.request_logs_dir, identifier=tag)
+
+    def _get_paygen_json(self):
+        """Return the paygen.json file as a json dictionary."""
+        bucket, paygen_file = self._PAYGEN_JSON_URI.rsplit('/', 1)
+        tmpdir = '/tmp/m2n/'
+        self._host.run('mkdir -p %s' % tmpdir)
+        gsutil_wrapper.copy_private_bucket(host=self._host,
+                                           bucket=bucket,
+                                           filename=paygen_file,
+                                           destination=tmpdir)
+        return json.loads(
+                self._host.run('cat %s' % os.path.join(tmpdir, paygen_file),
+                               verbose=False).stdout)
+
+    def _paygen_json_lookup(self, board, channel, delta_type):
+        """
+        Filters the paygen.json file by board, channel, and payload type.
+
+        @param board: The board name.
+        @param channel: The ChromeOS channel.
+        @param delta_type: OMAHA, FSI, MILESTONE. STEPPING_STONE.
+
+        @returns json results filtered by the input params.
+
+        """
+        paygen_data = self._get_paygen_json()
+        result = []
+        if channel.endswith('-channel'):
+            channel = channel[:-len('-channel')]
+        for delta in paygen_data['delta']:
+            if ((delta['board']['public_codename'] == board)
+                        and (delta.get('channel', None) == channel)
+                        and (delta.get('delta_type', None) == delta_type)):
+                result.append(delta)
+        return result
+
+    def _get_latest_serving_stable_build(self):
+        """
+      Returns the latest serving stable build on Omaha for the current board.
+
+      It will lookup the paygen.json file and return the build label that can
+      be passed to quick_provision. This is useful for M2N tests to easily find
+      the first build to provision.
+
+      @returns latest stable serving omaha build.
+
+      """
+        if lsbrelease_utils.is_moblab():
+            raise error.TestNA("Moblab cannot run M2N tests. See b/193438616.")
+        board = self._host.get_board().split(':')[1]
+        # Boards like auron_paine are auron-paine in paygen.json and builders.
+        if '_' in board:
+            board = board.replace('_', '-')
+        channel = 'stable-channel'
+        delta_type = 'OMAHA'
+        stable_paygen_data = self._paygen_json_lookup(board, channel,
+                                                      delta_type)
+        if not stable_paygen_data:
+            # Some unibuild boards can have ALL of their stable serving builds
+            # also be an FSI. When this happens we will not find an OMAHA
+            # payload to use because GE only publishes one for a channel+build
+            # pair. So try to get the latest FSI on stable channel.
+            logging.info('No OMAHA payloads found. Falling back to FSI')
+            stable_paygen_data = self._paygen_json_lookup(
+                    board, channel, 'FSI')
+        if not stable_paygen_data:
+            raise error.TestFail(
+                    'No stable build found in paygen.json for %s' % board)
+        latest_stable_paygen_data = max(
+                stable_paygen_data, key=(lambda key: key['chrome_os_version']))
+        return os.path.join(channel, board,
+                            latest_stable_paygen_data["chrome_os_version"])
diff --git a/server/cros/vboot_constants.py b/server/cros/vboot_constants.py
index d130bc8..adda8eb 100644
--- a/server/cros/vboot_constants.py
+++ b/server/cros/vboot_constants.py
@@ -123,37 +123,24 @@
 }
 
 # GBB flags, copied from:
-#     vboot_reference/firmware/include/gbb_header.h
-GBB_FLAG_DEV_SCREEN_SHORT_DELAY    = 0x00000001
-GBB_FLAG_LOAD_OPTION_ROMS          = 0x00000002
-GBB_FLAG_ENABLE_ALTERNATE_OS       = 0x00000004
-GBB_FLAG_FORCE_DEV_SWITCH_ON       = 0x00000008
-GBB_FLAG_FORCE_DEV_BOOT_USB        = 0x00000010
-GBB_FLAG_DISABLE_FW_ROLLBACK_CHECK = 0x00000020
-GBB_FLAG_ENTER_TRIGGERS_TONORM     = 0x00000040
-GBB_FLAG_FORCE_DEV_BOOT_LEGACY     = 0x00000080
-GBB_FLAG_FAFT_KEY_OVERIDE          = 0x00000100
-GBB_FLAG_DISABLE_EC_SOFTWARE_SYNC  = 0x00000200
-GBB_FLAG_DEFAULT_DEV_BOOT_LEGACY   = 0x00000400
-GBB_FLAG_DISABLE_PD_SOFTWARE_SYNC  = 0x00000800
-GBB_FLAG_DISABLE_LID_SHUTDOWN      = 0x00001000
-GBB_FLAG_FORCE_DEV_BOOT_FASTBOOT_FULL_CAP = 0x00002000
-
-
-# VbSharedData flags, copied from:
-#     vboot_reference/firmware/include/vboot_struct.h
-VDAT_FLAG_FWB_TRIED                = 0x00000001
-VDAT_FLAG_KERNEL_KEY_VERIFIED      = 0x00000002
-VDAT_FLAG_LF_DEV_SWITCH_ON         = 0x00000004
-VDAT_FLAG_LF_USE_RO_NORMAL         = 0x00000008
-VDAT_FLAG_BOOT_DEV_SWITCH_ON       = 0x00000010
-VDAT_FLAG_BOOT_REC_SWITCH_ON       = 0x00000020
-VDAT_FLAG_BOOT_FIRMWARE_WP_ENABLED = 0x00000040
-VDAT_FLAG_BOOT_S3_RESUME           = 0x00000100
-VDAT_FLAG_BOOT_RO_NORMAL_SUPPORT   = 0x00000200
-VDAT_FLAG_HONOR_VIRT_DEV_SWITCH    = 0x00000400
-VDAT_FLAG_EC_SOFTWARE_SYNC         = 0x00000800
-VDAT_FLAG_EC_SLOW_UPDATE           = 0x00001000
+#     vboot_reference/firmware/2lib/include/2gbb_flags.h
+GBB_FLAG_DEV_SCREEN_SHORT_DELAY = 1 << 0
+GBB_FLAG_LOAD_OPTION_ROMS = 1 << 1
+GBB_FLAG_ENABLE_ALTERNATE_OS = 1 << 2
+GBB_FLAG_FORCE_DEV_SWITCH_ON = 1 << 3
+GBB_FLAG_FORCE_DEV_BOOT_USB = 1 << 4
+GBB_FLAG_DISABLE_FW_ROLLBACK_CHECK = 1 << 5
+GBB_FLAG_ENTER_TRIGGERS_TONORM = 1 << 6
+GBB_FLAG_FORCE_DEV_BOOT_ALTFW = 1 << 7
+GBB_FLAG_RUNNING_FAFT = 1 << 8
+GBB_FLAG_DISABLE_EC_SOFTWARE_SYNC = 1 << 9
+GBB_FLAG_DEFAULT_DEV_BOOT_ALTFW = 1 << 10
+GBB_FLAG_DISABLE_AUXFW_SOFTWARE_SYNC = 1 << 11
+GBB_FLAG_DISABLE_LID_SHUTDOWN = 1 << 12
+GBB_FLAG_DEPRECATED_FORCE_DEV_BOOT_FASTBOOT_FULL_CAP = 1 << 13
+GBB_FLAG_FORCE_MANUAL_RECOVERY = 1 << 14
+GBB_FLAG_DISABLE_FWMP = 1 << 15
+GBB_FLAG_ENABLE_UDC = 1 << 16
 
 # Firmware preamble flags, copied from:
 #     vboot_reference/firmware/include/vboot_struct.h
diff --git a/server/frontend.py b/server/frontend.py
index e331fe5..90915c8 100644
--- a/server/frontend.py
+++ b/server/frontend.py
@@ -36,7 +36,7 @@
 from six.moves import zip
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = utils.metrics_mock
 
@@ -301,9 +301,9 @@
 
 class _CrosVersionMap(_OSVersionMap):
     """
-    Stable version mapping for Chrome OS release images.
+    Stable version mapping for ChromeOS release images.
 
-    This class manages a mapping of Chrome OS board names to known-good
+    This class manages a mapping of ChromeOS board names to known-good
     release (or canary) images.  The images selected can be installed on
     DUTs during repair tasks, as a way of getting a DUT into a known
     working state.
@@ -397,9 +397,9 @@
 
 class _FirmwareVersionMap(_SuffixHackVersionMap):
     """
-    Stable version mapping for firmware supplied in Chrome OS images.
+    Stable version mapping for firmware supplied in ChromeOS images.
 
-    A Chrome OS image bundles a version of the firmware that the
+    A ChromeOS image bundles a version of the firmware that the
     device should update to when the OS version is installed during
     AU.
 
@@ -427,7 +427,7 @@
 class AFE(RpcClient):
 
     # Known image types for stable version mapping objects.
-    # CROS_IMAGE_TYPE - Mappings for Chrome OS images.
+    # CROS_IMAGE_TYPE - Mappings for ChromeOS images.
     # FAFT_IMAGE_TYPE - Mappings for Firmware images for FAFT repair.
     # FIRMWARE_IMAGE_TYPE - Mappings for released RW Firmware images.
     #
diff --git a/server/frontend_unittest.py b/server/frontend_unittest.py
index 4510958..2f01ce4 100755
--- a/server/frontend_unittest.py
+++ b/server/frontend_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #
 # Copyright Gregory P. Smith, Google Inc 2008
 # Released under the GPL v2
diff --git a/server/hosts/OWNERS b/server/hosts/OWNERS
new file mode 100644
index 0000000..2e97f12
--- /dev/null
+++ b/server/hosts/OWNERS
@@ -0,0 +1 @@
+include /HARNESS_OWNERS
diff --git a/server/hosts/__init__.py b/server/hosts/__init__.py
index e58e28d..d943447 100644
--- a/server/hosts/__init__.py
+++ b/server/hosts/__init__.py
@@ -18,10 +18,13 @@
     from autotest_lib.server.hosts.chameleon_host import ChameleonHost
     from autotest_lib.server.hosts.servo_host import ServoHost
     from autotest_lib.server.hosts.labstation_host import LabstationHost
+    from autotest_lib.server.hosts.android_host import AndroidHost
 
     # factory function
     from autotest_lib.server.hosts.factory import create_host
     from autotest_lib.server.hosts.factory import create_target_machine
+    from autotest_lib.server.hosts.factory import create_companion_hosts
+
 except ImportError:
     # host abstract classes
     from base_classes import Host
@@ -33,7 +36,9 @@
     from chameleon_host import ChameleonHost
     from servo_host import ServoHost
     from labstation_host import LabstationHost
+    from android_host import AndroidHost
 
     # factory function
     from factory import create_host
     from factory import create_target_machine
+    from factory import create_companion_hosts
diff --git a/server/hosts/abstract_ssh.py b/server/hosts/abstract_ssh.py
index a1bf439..b804089 100644
--- a/server/hosts/abstract_ssh.py
+++ b/server/hosts/abstract_ssh.py
@@ -16,28 +16,38 @@
 from autotest_lib.client.common_lib import utils
 from autotest_lib.client.common_lib.cros.network import ping_runner
 from autotest_lib.client.common_lib.global_config import global_config
+from autotest_lib.server import autoserv_parser
 from autotest_lib.server import utils, autotest
 from autotest_lib.server.hosts import host_info
 from autotest_lib.server.hosts import remote
 from autotest_lib.server.hosts import rpc_server_tracker
 from autotest_lib.server.hosts import ssh_multiplex
+from autotest_lib.server.hosts.tls_client import exec_dut_command
+
 import six
 from six.moves import filter
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = utils.metrics_mock
 
 # pylint: disable=C0111
 
 get_value = global_config.get_config_value
-enable_master_ssh = get_value('AUTOSERV', 'enable_master_ssh', type=bool,
-                              default=False)
+enable_main_ssh = get_value('AUTOSERV',
+                            'enable_main_ssh',
+                            type=bool,
+                            default=False)
+
+ENABLE_EXEC_DUT_COMMAND = get_value('AUTOSERV',
+                                    'enable_tls',
+                                    type=bool,
+                                    default=False)
 
 # Number of seconds to use the cached up status.
 _DEFAULT_UP_STATUS_EXPIRATION_SECONDS = 300
-_DEFAULT_SSH_PORT = 22
+_DEFAULT_SSH_PORT = None
 
 # Number of seconds to wait for the host to shut down in wait_down().
 _DEFAULT_WAIT_DOWN_TIME_SECONDS = 120
@@ -49,6 +59,10 @@
 # and a single ssh ping in wait_up().
 _DEFAULT_MAX_PING_TIMEOUT = 10
 
+# The client symlink directory.
+AUTOTEST_CLIENT_SYMLINK_END = 'client/autotest_lib'
+
+
 class AbstractSSHHost(remote.RemoteHost):
     """
     This class represents a generic implementation of most of the
@@ -57,13 +71,20 @@
     Host.run method.
     """
     VERSION_PREFIX = ''
-    # Timeout for master ssh connection setup, in seconds.
-    DEFAULT_START_MASTER_SSH_TIMEOUT_S = 5
+    # Timeout for main ssh connection setup, in seconds.
+    DEFAULT_START_MAIN_SSH_TIMEOUT_S = 5
 
-    def _initialize(self, hostname, user="root", port=_DEFAULT_SSH_PORT,
-                    password="", is_client_install_supported=True,
-                    afe_host=None, host_info_store=None, connection_pool=None,
-                    *args, **dargs):
+    def _initialize(self,
+                    hostname,
+                    user="root",
+                    port=_DEFAULT_SSH_PORT,
+                    password="",
+                    is_client_install_supported=True,
+                    afe_host=None,
+                    host_info_store=None,
+                    connection_pool=None,
+                    *args,
+                    **dargs):
         super(AbstractSSHHost, self)._initialize(hostname=hostname,
                                                  *args, **dargs)
         """
@@ -77,7 +98,7 @@
         @param host_info_store: Optional host_info.CachingHostInfoStore object
                 to obtain / update host information.
         @param connection_pool: ssh_multiplex.ConnectionPool instance to share
-                the master ssh connection across control scripts.
+                the main ssh connection across control scripts.
         """
         self._track_class_usage()
         # IP address is retrieved only on demand. Otherwise the host
@@ -90,17 +111,31 @@
         self._use_rsync = None
         self.known_hosts_file = tempfile.mkstemp()[1]
         self._rpc_server_tracker = rpc_server_tracker.RpcServerTracker(self);
+        self._tls_exec_dut_command_client = None
+        self._tls_unstable = False
 
+        # Read the value of the use_icmp flag, setting to true if missing.
+        args_string = autoserv_parser.autoserv_parser.options.args
+        args_dict = utils.args_to_dict(
+                args_string.split() if args_string is not None else '')
+        value = args_dict.get('use_icmp', 'true').lower()
+        if value == 'true':
+            self._use_icmp = True
+        elif value == 'false':
+            self._use_icmp = False
+        else:
+            raise ValueError(
+                    'use_icmp must be true or false: {}'.format(value))
         """
-        Master SSH connection background job, socket temp directory and socket
-        control path option. If master-SSH is enabled, these fields will be
-        initialized by start_master_ssh when a new SSH connection is initiated.
+        Main SSH connection background job, socket temp directory and socket
+        control path option. If main-SSH is enabled, these fields will be
+        initialized by start_main_ssh when a new SSH connection is initiated.
         """
         self._connection_pool = connection_pool
         if connection_pool:
-            self._master_ssh = connection_pool.get(hostname, user, port)
+            self._main_ssh = connection_pool.get(hostname, user, port)
         else:
-            self._master_ssh = ssh_multiplex.MasterSsh(hostname, user, port)
+            self._main_ssh = ssh_multiplex.MainSsh(hostname, user, port)
 
         self._afe_host = afe_host or utils.EmptyAFEHost()
         self.host_info_store = (host_info_store or
@@ -129,6 +164,16 @@
         """
         return self._is_client_install_supported
 
+    def is_satlab(self):
+        """Determine if the host is part of satlab
+
+        TODO(otabek@): Remove or update to better logic to determime Satlab.
+
+        @returns True if ths host is running under satlab otherwise False.
+        """
+        if not hasattr(self, '_is_satlab'):
+            self._is_satlab = self.hostname.startswith('satlab-')
+        return self._is_satlab
 
     @property
     def rpc_server_tracker(self):
@@ -141,7 +186,7 @@
     @property
     def is_default_port(self):
         """Returns True if its port is default SSH port."""
-        return self.port == _DEFAULT_SSH_PORT
+        return self.port == _DEFAULT_SSH_PORT or self.port is None
 
     @property
     def host_port(self):
@@ -152,6 +197,11 @@
         else:
             return '%s:%d' % (self.hostname, self.port)
 
+    @property
+    def use_icmp(self):
+        """Returns True if icmp pings are allowed."""
+        return self._use_icmp
+
 
     # Though it doesn't use self here, it is not declared as staticmethod
     # because its subclass may use self to access member variables.
@@ -165,7 +215,8 @@
                 hosts_file=hosts_file, connect_timeout=connect_timeout,
                 alive_interval=alive_interval, alive_count_max=alive_count_max,
                 connection_attempts=connection_attempts)])
-        return "/usr/bin/ssh -a -x %s -l %s -p %d" % (ssh_options, user, port)
+        return ("/usr/bin/ssh -a -x %s -l %s %s" %
+                (ssh_options, user, "-p %d " % port if port else ""))
 
 
     @staticmethod
@@ -250,7 +301,7 @@
                       safe_symlinks=False, excludes=None):
         """Obtains rsync options for the remote."""
         ssh_cmd = self.make_ssh_command(user=self.user, port=self.port,
-                                        opts=self._master_ssh.ssh_option,
+                                        opts=self._main_ssh.ssh_option,
                                         hosts_file=self.known_hosts_file)
         if delete_dest:
             delete_flag = "--delete"
@@ -289,7 +340,7 @@
         to run commands directly on the machine
         """
         base_cmd = self.make_ssh_command(user=self.user, port=self.port,
-                                         opts=self._master_ssh.ssh_option,
+                                         opts=self._main_ssh.ssh_option,
                                          hosts_file=self.known_hosts_file)
 
         return '%s %s "%s"' % (base_cmd, self.hostname, utils.sh_escape(cmd))
@@ -301,9 +352,10 @@
         pre-encoded.
         """
         command = ("scp -rq %s -o StrictHostKeyChecking=no "
-                   "-o UserKnownHostsFile=%s -P %d %s '%s'")
-        return command % (self._master_ssh.ssh_option, self.known_hosts_file,
-                          self.port, sources, dest)
+                   "-o UserKnownHostsFile=%s %s%s '%s'")
+        return command % (self._main_ssh.ssh_option, self.known_hosts_file,
+                          "-P %d " % self.port if self.port else '', sources,
+                          dest)
 
 
     def _make_rsync_compatible_globs(self, path, is_local):
@@ -438,8 +490,8 @@
                       'preserve_perm: %s, preserve_symlinks:%s', source, dest,
                       delete_dest, preserve_perm, preserve_symlinks)
 
-        # Start a master SSH connection if necessary.
-        self.start_master_ssh()
+        # Start a main SSH connection if necessary.
+        self.start_main_ssh()
 
         if isinstance(source, six.string_types):
             source = [source]
@@ -544,12 +596,18 @@
         logging.debug('send_file. source: %s, dest: %s, delete_dest: %s,'
                       'preserve_symlinks:%s', source, dest,
                       delete_dest, preserve_symlinks)
-        # Start a master SSH connection if necessary.
-        self.start_master_ssh()
+        # Start a main SSH connection if necessary.
+        self.start_main_ssh()
 
         if isinstance(source, six.string_types):
             source = [source]
 
+        client_symlink = _client_symlink(source)
+        # The client symlink *must* be preserved, and should not be sent with
+        # the main send_file in case scp is used, which does not support symlink
+        if client_symlink:
+            source.remove(client_symlink)
+
         local_sources = self._encode_local_paths(source)
         if not local_sources:
             raise error.TestError('source |%s| yielded an empty string' % (
@@ -557,47 +615,148 @@
         if local_sources.find('\x00') != -1:
             raise error.TestError('one or more sources include NUL char')
 
-        # If rsync is disabled or fails, try scp.
-        try_scp = True
+        self._send_file(
+                dest=dest,
+                source=source,
+                local_sources=local_sources,
+                delete_dest=delete_dest,
+                excludes=excludes,
+                preserve_symlinks=preserve_symlinks)
+
+        # Send the client symlink after the rest of the autotest repo has been
+        # sent.
+        if client_symlink:
+            self._send_client_symlink(dest=dest,
+                                      source=[client_symlink],
+                                      local_sources=client_symlink,
+                                      delete_dest=delete_dest,
+                                      excludes=excludes,
+                                      preserve_symlinks=True)
+
+    def _send_client_symlink(self, dest, source, local_sources, delete_dest,
+                             excludes, preserve_symlinks):
         if self.use_rsync():
-            logging.debug('Using Rsync.')
-            remote_dest = self._encode_remote_paths([dest])
+            if self._send_using_rsync(dest=dest,
+                                      local_sources=local_sources,
+                                      delete_dest=delete_dest,
+                                      preserve_symlinks=preserve_symlinks,
+                                      excludes=excludes):
+                return
+        # Manually create the symlink if rsync is not available, or fails.
+        try:
+            self.run('mkdir {f} && touch {f}/__init__.py && cd {f} && '
+                     'ln -s ../ client'.format(
+                             f=os.path.join(dest, 'autotest_lib')))
+        except Exception as e:
+            raise error.AutotestHostRunError(
+                    "Could not create client symlink on host: %s" % e)
+
+    def _send_file(self, dest, source, local_sources, delete_dest, excludes,
+                   preserve_symlinks):
+        """Send file(s), trying rsync first, then scp."""
+        if self.use_rsync():
+            rsync_success = self._send_using_rsync(
+                    dest=dest,
+                    local_sources=local_sources,
+                    delete_dest=delete_dest,
+                    preserve_symlinks=preserve_symlinks,
+                    excludes=excludes)
+            if rsync_success:
+                return
+
+        # Send using scp if you cannot via rsync, or rsync fails.
+        self._send_using_scp(dest=dest,
+                             source=source,
+                             delete_dest=delete_dest,
+                             excludes=excludes)
+
+    def _send_using_rsync(self, dest, local_sources, delete_dest,
+                          preserve_symlinks, excludes):
+        """Send using rsync.
+
+        Args:
+            dest: a file or a directory (if source contains a
+                    directory or more than one element, you must
+                    supply a directory dest)
+            local_sources: a string of files/dirs to send separated with spaces
+            delete_dest: if this is true, the command will also clear
+                         out any old files at dest that are not in the
+                         source
+            preserve_symlinks: controls if symlinks on the source will be
+                copied as such on the destination or transformed into the
+                referenced file/directory
+            excludes: A list of file pattern that matches files not to be
+                      sent. `send_file` will fail if exclude is set, since
+                      local copy does not support --exclude, e.g., when
+                      using scp to copy file.
+        Returns:
+            bool: True if the cmd succeeded, else False
+
+        """
+        logging.debug('Using Rsync.')
+        remote_dest = self._encode_remote_paths([dest])
+        try:
+            rsync = self._make_rsync_cmd(local_sources,
+                                         remote_dest,
+                                         delete_dest,
+                                         preserve_symlinks,
+                                         False,
+                                         excludes=excludes)
+            utils.run(rsync)
+            return True
+        except error.CmdError as e:
+            logging.warning("trying scp, rsync failed: %s", e)
+        return False
+
+    def _send_using_scp(self, dest, source, delete_dest, excludes):
+        """Send using scp.
+
+        Args:
+                source: either
+                        1) a single file or directory, as a string
+                        2) a list of one or more (possibly mixed)
+                                files or directories
+                dest: a file or a directory (if source contains a
+                        directory or more than one element, you must
+                        supply a directory dest)
+                delete_dest: if this is true, the command will also clear
+                             out any old files at dest that are not in the
+                             source
+                excludes: A list of file pattern that matches files not to be
+                          sent. `send_file` will fail if exclude is set, since
+                          local copy does not support --exclude, e.g., when
+                          using scp to copy file.
+
+        Raises:
+                AutoservRunError: the scp command failed
+        """
+        logging.debug('Trying scp.')
+        if excludes:
+            raise error.AutotestHostRunError(
+                    '--exclude is not supported in scp, try to use rsync. '
+                    'excludes: %s' % ','.join(excludes), None)
+
+        # scp has no equivalent to --delete, just drop the entire dest dir
+        if delete_dest:
+            is_dir = self.run("ls -d %s/" % dest,
+                              ignore_status=True).exit_status == 0
+            if is_dir:
+                cmd = "rm -rf %s && mkdir %s"
+                cmd %= (dest, dest)
+                self.run(cmd)
+
+        remote_dest = self._encode_remote_paths([dest], use_scp=True)
+        local_sources = self._make_rsync_compatible_source(source, True)
+        if local_sources:
+            sources = self._encode_local_paths(local_sources, escape=False)
+            scp = self._make_scp_cmd(sources, remote_dest)
             try:
-                rsync = self._make_rsync_cmd(local_sources, remote_dest,
-                                             delete_dest, preserve_symlinks,
-                                             False, excludes=excludes)
-                utils.run(rsync)
-                try_scp = False
+                utils.run(scp)
             except error.CmdError as e:
-                logging.warning("trying scp, rsync failed: %s", e)
-
-        if try_scp:
-            logging.debug('Trying scp.')
-            if excludes:
-                raise error.AutotestHostRunError(
-                        '--exclude is not supported in scp, try to use rsync. '
-                        'excludes: %s' % ','.join(excludes), None)
-            # scp has no equivalent to --delete, just drop the entire dest dir
-            if delete_dest:
-                is_dir = self.run("ls -d %s/" % dest,
-                                  ignore_status=True).exit_status == 0
-                if is_dir:
-                    cmd = "rm -rf %s && mkdir %s"
-                    cmd %= (dest, dest)
-                    self.run(cmd)
-
-            remote_dest = self._encode_remote_paths([dest], use_scp=True)
-            local_sources = self._make_rsync_compatible_source(source, True)
-            if local_sources:
-                sources = self._encode_local_paths(local_sources, escape=False)
-                scp = self._make_scp_cmd(sources, remote_dest)
-                try:
-                    utils.run(scp)
-                except error.CmdError as e:
-                    logging.debug('scp failed: %s', e)
-                    raise error.AutoservRunError(e.args[0], e.args[1])
-            else:
-                logging.debug('skipping scp for empty source list')
+                logging.debug('scp failed: %s', e)
+                raise error.AutoservRunError(e.args[0], e.args[1])
+        else:
+            logging.debug('skipping scp for empty source list')
 
     def verify_ssh_user_access(self):
         """Verify ssh access to this host.
@@ -670,14 +829,30 @@
         @param count How many time try to ping before decide that host is not
                     reachable by ping.
         """
+        if not self._use_icmp:
+            stack = self._get_server_stack_state(lowest_frames=1,
+                                                 highest_frames=7)
+            logging.warning("is_up_fast called with icmp disabled from %s!",
+                            stack)
+            return True
         ping_config = ping_runner.PingConfig(self.hostname,
-                                             count=count,
+                                             count=1,
                                              ignore_result=True,
                                              ignore_status=True)
-        return ping_runner.PingRunner().ping(ping_config).received > 0
+
+        # Run up to the amount specified, but also exit as soon as the first
+        # reply is found.
+        loops_remaining = count
+        while loops_remaining > 0:
+            loops_remaining -= 1
+            if ping_runner.PingRunner().ping(ping_config).received > 0:
+                return True
+        return False
 
 
-    def wait_up(self, timeout=_DEFAULT_WAIT_UP_TIME_SECONDS):
+    def wait_up(self,
+                timeout=_DEFAULT_WAIT_UP_TIME_SECONDS,
+                host_is_down=False):
         """
         Wait until the remote host is up or the timeout expires.
 
@@ -686,18 +861,34 @@
 
         @param timeout time limit in seconds before returning even
             if the host is not up.
+        @param host_is_down set to True if the host is known to be down before
+            wait_up.
 
         @returns True if the host was found to be up before the timeout expires,
                  False otherwise
         """
+        if host_is_down:
+            # Since we expect the host to be down when this is called, if there is
+            # an existing ssh main connection close it.
+            self.close_main_ssh()
         current_time = int(time.time())
         end_time = current_time + timeout
 
+        ssh_success_logged = False
         autoserv_error_logged = False
         while current_time < end_time:
             ping_timeout = min(_DEFAULT_MAX_PING_TIMEOUT,
                                end_time - current_time)
             if self.is_up(timeout=ping_timeout, connect_timeout=ping_timeout):
+                if not ssh_success_logged:
+                    logging.debug('Successfully pinged host %s',
+                                  self.host_port)
+                    wait_procs = self.get_wait_up_processes()
+                    if wait_procs:
+                        logging.debug('Waiting for processes: %s', wait_procs)
+                    else:
+                        logging.debug('No wait_up processes to wait for')
+                    ssh_success_logged = True
                 try:
                     if self.are_wait_up_processes_up():
                         logging.debug('Host %s is now up', self.host_port)
@@ -814,7 +1005,7 @@
     def verify_connectivity(self):
         super(AbstractSSHHost, self).verify_connectivity()
 
-        logging.info('Pinging host ' + self.host_port)
+        logging.info('Pinging host %s', self.host_port)
         self.ssh_ping()
         logging.info("Host (ssh) %s is alive", self.host_port)
 
@@ -834,43 +1025,83 @@
             logging.exception('autodir space check exception, this is probably '
                              'safe to ignore\n')
 
-
     def close(self):
         super(AbstractSSHHost, self).close()
         self.rpc_server_tracker.disconnect_all()
         if not self._connection_pool:
-            self._master_ssh.close()
+            self._main_ssh.close()
         if os.path.exists(self.known_hosts_file):
             os.remove(self.known_hosts_file)
+        self.tls_exec_dut_command = None
 
+    def close_main_ssh(self):
+        """Stop the ssh main connection.
 
-    def restart_master_ssh(self):
+        Intended for situations when the host is known to be down and we don't
+        need a ssh timeout to tell us it is down. For example, if you just
+        instructed the host to shutdown or hibernate.
         """
-        Stop and restart the ssh master connection.  This is meant as a last
+        logging.debug("Stopping main ssh connection")
+        self._main_ssh.close()
+
+    def restart_main_ssh(self):
+        """
+        Stop and restart the ssh main connection.  This is meant as a last
         resort when ssh commands fail and we don't understand why.
         """
-        logging.debug('Restarting master ssh connection')
-        self._master_ssh.close()
-        self._master_ssh.maybe_start(timeout=30)
+        logging.debug("Restarting main ssh connection")
+        self._main_ssh.close()
+        self._main_ssh.maybe_start(timeout=30)
 
-
-
-    def start_master_ssh(self, timeout=DEFAULT_START_MASTER_SSH_TIMEOUT_S):
+    def start_main_ssh(self, timeout=DEFAULT_START_MAIN_SSH_TIMEOUT_S):
         """
-        Called whenever a slave SSH connection needs to be initiated (e.g., by
-        run, rsync, scp). If master SSH support is enabled and a master SSH
+        Called whenever a non-main SSH connection needs to be initiated (e.g.,
+        by run, rsync, scp). If main SSH support is enabled and a main SSH
         connection is not active already, start a new one in the background.
-        Also, cleanup any zombie master SSH connections (e.g., dead due to
+        Also, cleanup any zombie main SSH connections (e.g., dead due to
         reboot).
 
-        timeout: timeout in seconds (default 5) to wait for master ssh
+        timeout: timeout in seconds (default 5) to wait for main ssh
                  connection to be established. If timeout is reached, a
                  warning message is logged, but no other action is taken.
         """
-        if not enable_master_ssh:
+        if not enable_main_ssh:
             return
-        self._master_ssh.maybe_start(timeout=timeout)
+        self._main_ssh.maybe_start(timeout=timeout)
 
+    @property
+    def tls_unstable(self):
+        # A single test will rebuild remote many times. Its safe to assume if
+        # TLS unstable for one try, it will be for others. If we check each,
+        # it adds ~60 seconds per test (if its dead).
+        if os.getenv('TLS_UNSTABLE'):
+            return bool(os.getenv('TLS_UNSTABLE'))
+        if self._tls_unstable is not None:
+            return self._tls_unstable
+
+    @tls_unstable.setter
+    def tls_unstable(self, v):
+        if not isinstance(v, bool):
+            raise error.AutoservError('tls_stable setting must be bool, got %s'
+                                      % (type(v)))
+        os.environ['TLS_UNSTABLE'] = str(v)
+        self._tls_unstable = v
+
+    @property
+    def tls_exec_dut_command_client(self):
+        # If client is already initialized, return that.
+        if not ENABLE_EXEC_DUT_COMMAND:
+            return None
+        if self.tls_unstable:
+            return None
+        if self._tls_exec_dut_command_client is not None:
+            return self._tls_exec_dut_command_client
+        # If the TLS connection is alive, create a new client.
+        if self.tls_connection is None:
+            return None
+        return exec_dut_command.TLSExecDutCommandClient(
+            tlsconnection=self.tls_connection,
+            hostname=self.hostname)
 
     def clear_known_hosts(self):
         """Clears out the temporary ssh known_hosts file.
@@ -1015,6 +1246,9 @@
         # * cached status is False, so the method can check if the host is up
         #   again.
         # * If the cached status is older than `expiration_seconds`
+        # If we have icmp disabled, treat that as a cached ping.
+        if not self._use_icmp:
+            return True
         expire_time = time.time() - expiration_seconds
         if (self._cached_up_status_updated is None or
                 not self._cached_up_status or
@@ -1051,3 +1285,11 @@
                           timeout=30,
                           ignore_status=True)
         return result.exit_status == 0
+
+
+def _client_symlink(sources):
+    """Return the client symlink if in sources."""
+    for source in sources:
+        if source.endswith(AUTOTEST_CLIENT_SYMLINK_END):
+            return source
+    return None
diff --git a/server/hosts/afe_store_unittest.py b/server/hosts/afe_store_unittest.py
index bb7cdd0..6168feb 100644
--- a/server/hosts/afe_store_unittest.py
+++ b/server/hosts/afe_store_unittest.py
@@ -7,8 +7,8 @@
 from __future__ import division
 from __future__ import print_function
 
-import mock
 import unittest
+from unittest import mock
 
 import common
 from autotest_lib.frontend.afe.json_rpc import proxy as rpc_proxy
@@ -126,7 +126,7 @@
 
 
     def test_str(self):
-        """Sanity tests the __str__ implementaiton"""
+        """Tests the __str__ implementaiton"""
         self.assertEqual(str(self.store), 'AfeStore[some-host]')
 
 
diff --git a/server/hosts/android_constants.py b/server/hosts/android_constants.py
new file mode 100644
index 0000000..8646d45
--- /dev/null
+++ b/server/hosts/android_constants.py
@@ -0,0 +1,14 @@
+# Copyright (c) 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+ANDROID_PHONE_STATION_ATTR = 'phone_station'
+ANDROID_PHONE_STATION_SSH_PORT_ATTR = 'phone_station_ssh_port'
+ANDROID_SERIAL_NUMBER_ATTR = 'android_serial'
+
+ALL_ANDROID_ATTRS = (ANDROID_PHONE_STATION_ATTR,
+                     ANDROID_PHONE_STATION_SSH_PORT_ATTR,
+                     ANDROID_SERIAL_NUMBER_ATTR)
+
+CRITICAL_ANDROID_ATTRS = (ANDROID_PHONE_STATION_ATTR,
+                          ANDROID_SERIAL_NUMBER_ATTR)
diff --git a/server/hosts/android_host.py b/server/hosts/android_host.py
new file mode 100644
index 0000000..ace1568
--- /dev/null
+++ b/server/hosts/android_host.py
@@ -0,0 +1,273 @@
+# Lint as: python2, python3
+# Copyright (c) 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Expects to be run in an environment with sudo and no interactive password
+# prompt, such as within the Chromium OS development chroot.
+
+import logging
+import socket
+
+import common
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.server.hosts import host_info
+from autotest_lib.server.hosts import attached_device_host
+from autotest_lib.server.hosts import android_constants
+from autotest_lib.server.hosts import base_classes
+
+
+class AndroidHost(base_classes.Host):
+    """Host class for Android devices"""
+    PHONE_STATION_LABEL_PREFIX = "associated_hostname"
+    SERIAL_NUMBER_LABEL_PREFIX = "serial_number"
+    # adb auth key path on the phone_station.
+    ADB_KEY_PATH = '/var/lib/android_keys'
+
+    def __init__(self,
+                 hostname,
+                 host_info_store=None,
+                 android_args=None,
+                 *args,
+                 **dargs):
+        """Construct a AndroidHost object.
+
+        Args:
+            hostname: Hostname of the Android phone.
+            host_info_store: Optional host_info.CachingHostInfoStore object
+                             to obtain / update host information.
+            android_args: Android args for local test run.
+        """
+        self.hostname = hostname
+        super(AndroidHost, self).__init__(*args, **dargs)
+        self.host_info_store = (host_info_store
+                                or host_info.InMemoryHostInfoStore())
+        self.associated_hostname = None
+        self.serial_number = None
+        self.phone_station_ssh_port = None
+        # For local test, android_args are passed in.
+        if android_args:
+            self._read_essential_data_from_args_dict(android_args)
+        else:
+            self._read_essential_data_from_host_info_store()
+        # Since we won't be ssh into an Android device directly, all the
+        # communication will be handled by run ADB CLI on the phone
+        # station(chromebox or linux machine) that physically connected
+        # to the Android devices via USB cable. So we need to setup an
+        # AttachedDeviceHost for phone station as ssh proxy.
+        self.phone_station = self._create_phone_station_host_proxy()
+        self.adb_tcp_mode = False
+        self.usb_dev_path = None
+        self.closed = False
+
+    def _create_phone_station_host_proxy(self):
+        logging.info('Creating host for phone station %s',
+                     self.associated_hostname)
+        return attached_device_host.AttachedDeviceHost(
+                hostname=self.associated_hostname,
+                serial_number=self.serial_number,
+                phone_station_ssh_port=self.phone_station_ssh_port)
+
+    def _read_essential_data_from_args_dict(self, android_args):
+        self.associated_hostname = android_args.get(
+                android_constants.ANDROID_PHONE_STATION_ATTR)
+        self.phone_station_ssh_port = android_args.get(
+                android_constants.ANDROID_PHONE_STATION_SSH_PORT_ATTR)
+        self.serial_number = android_args.get(
+                android_constants.ANDROID_SERIAL_NUMBER_ATTR)
+
+    def _read_essential_data_from_host_info_store(self):
+        info = self.host_info_store.get()
+        self.associated_hostname = info.get_label_value(
+                self.PHONE_STATION_LABEL_PREFIX)
+        if not self.associated_hostname:
+            raise error.AutoservError(
+                    'Failed to initialize Android host due to'
+                    ' associated_hostname is not found in host_info_store.')
+        self.serial_number = info.get_label_value(
+                self.SERIAL_NUMBER_LABEL_PREFIX)
+        if not self.serial_number:
+            raise error.AutoservError(
+                    'Failed to initialize Android host due to'
+                    ' serial_number is not found in host_info_store.')
+
+    def adb_over_tcp(self, port=5555, persist_reboot=False):
+        """Restart adb server listening on a TCP port.
+
+        Args:
+            port: Tcp port for adb server to listening on, default value
+                  is 5555 which is the default TCP/IP port for adb.
+            persist_reboot: True for adb over tcp to continue listening
+                            after the device reboots.
+        """
+        port = str(port)
+        if persist_reboot:
+            self.run_adb_command('shell setprop persist.adb.tcp.port %s' %
+                                 port)
+            self.run_adb_command('shell setprop ctl.restart adbd')
+            self.wait_for_transport_state()
+
+        self.run_adb_command('tcpip %s' % port)
+        self.adb_tcp_mode = True
+
+    def cache_usb_dev_path(self):
+        """
+        Read and cache usb devpath for the Android device.
+        """
+        cmd = 'adb devices -l | grep %s' % self.serial_number
+        res = self.phone_station.run(cmd)
+        for line in res.stdout.strip().split('\n'):
+            if len(line.split()) > 2 and line.split()[1] == 'device':
+                self.usb_dev_path = line.split()[2]
+                logging.info('USB devpath: %s', self.usb_dev_path)
+                break
+        if not self.usb_dev_path:
+            logging.warning(
+                    'Failed to collect usbdev path of the Android device.')
+
+    def ensure_device_connectivity(self):
+        """Ensure we can interact with the Android device via adb and
+        the device is in the expected state.
+        """
+        res = self.run_adb_command('get-state')
+        state = res.stdout.strip()
+        logging.info('Android device state from adb: %s', state)
+        return state == 'device'
+
+    def get_wifi_ip_address(self):
+        """Get ipv4 address from the Android device"""
+        res = self.run_adb_command('shell ip route')
+        # An example response would looks like: "192.168.86.0/24 dev wlan0"
+        # " proto kernel scope link src 192.168.86.22 \n"
+        ip_string = res.stdout.strip().split(' ')[-1]
+        logging.info('Ip address collected from the Android device: %s',
+                     ip_string)
+        try:
+            socket.inet_aton(ip_string)
+        except (OSError, ValueError, socket.error):
+            raise error.AutoservError(
+                    'Failed to get ip address from the Android device.')
+        return ip_string
+
+    def job_start(self):
+        """This method is called from create_host factory when
+        construct the host object. We need to override it since actions
+        like copy /var/log/messages are not applicable on Android devices.
+        """
+        logging.info('Skip standard job_start actions for Android host.')
+
+    def restart_adb_server(self):
+        """Restart adb server from the phone station"""
+        self.stop_adb_server()
+        self.start_adb_server()
+
+    def run_adb_command(self, adb_command, ignore_status=False):
+        """Run adb command on the Android device.
+
+        Args:
+            adb_command: adb commands to execute on the Android device.
+
+        Returns:
+            An autotest_lib.client.common_lib.utils.CmdResult object.
+        """
+        # When use adb to interact with an Android device, we prefer to use
+        # devpath to distinguish the particular device as the serial number
+        # is not guaranteed to be unique.
+        if self.usb_dev_path:
+            command = 'adb -s %s %s' % (self.usb_dev_path, adb_command)
+        else:
+            command = 'adb -s %s %s' % (self.serial_number, adb_command)
+        return self.phone_station.run(command, ignore_status=ignore_status)
+
+    def wait_for_transport_state(self, transport='usb', state='device'):
+        """
+        Wait for a device to reach a desired state.
+
+        Args:
+            transport: usb, local, any
+            state: device, recovery, sideload, bootloader
+
+        """
+        self.run_adb_command('wait-for-%s-%s' % (transport, state))
+
+    def start_adb_server(self):
+        """Start adb server from the phone station."""
+        # Adb home is created upon CrOS login, however on labstation we
+        # never login so we'll need to ensure the adb home is exist before
+        # starting adb server.
+        self.phone_station.run("mkdir -p /run/arc/adb")
+        self.phone_station.run("ADB_VENDOR_KEYS=%s adb start-server" %
+                               self.ADB_KEY_PATH)
+        # Logging states of all attached devices.
+        self.phone_station.run('adb devices')
+
+    def stop_adb_server(self):
+        """Stop adb server from the phone station."""
+        self.phone_station.run("adb kill-server")
+
+    def setup_for_cross_device_tests(self, adb_persist_reboot=False):
+        """
+        Setup the Android phone for Cross Device tests.
+
+        Ensures the phone can connect to its labstation and sets up
+        adb-over-tcp.
+
+        Returns:
+            IP Address of Phone.
+        """
+        dut_out = self.phone_station.run('echo True').stdout.strip()
+        if dut_out != 'True':
+            raise error.TestError('phone station stdout != True (got: %s)',
+                                  dut_out)
+
+        self.restart_adb_server()
+        self.cache_usb_dev_path()
+        self.ensure_device_connectivity()
+        ip_address = self.get_wifi_ip_address()
+        self.adb_over_tcp(persist_reboot=adb_persist_reboot)
+        return ip_address
+
+    def close(self):
+        """Clean up Android host and its phone station proxy host."""
+        if self.closed:
+            logging.debug('Android host %s already closed.', self.hostname)
+            return
+        try:
+            if self.adb_tcp_mode:
+                # In some rare cases, leave the Android device in adb over tcp
+                # mode may break USB connection so we want to always reset adb
+                # to usb mode before teardown.
+                self.run_adb_command('usb', ignore_status=True)
+            self.stop_adb_server()
+            if self.phone_station:
+                self.phone_station.close()
+            self.closed = True
+        finally:
+            super(AndroidHost, self).close()
+
+    @staticmethod
+    def get_android_arguments(args_dict):
+        """Extract android args from `args_dict` and return the result.
+
+        Recommended usage in control file:
+            args_dict = utils.args_to_dict(args)
+            android_args = hosts.Android.get_android_arguments(args_dict)
+            host = hosts.create_host(machine, android_args=android_args)
+
+        Args:
+            args_dict: A dict of test args.
+
+        Returns:
+            An dict of android related args.
+        """
+        android_args = {
+                key: args_dict[key]
+                for key in android_constants.ALL_ANDROID_ATTRS
+                if key in args_dict
+        }
+        for attr in android_constants.CRITICAL_ANDROID_ATTRS:
+            if attr not in android_args or not android_args.get(attr):
+                raise error.AutoservError("Critical attribute %s is missing"
+                                          " from android_args." % attr)
+        return android_args
diff --git a/server/hosts/attached_device_host.py b/server/hosts/attached_device_host.py
new file mode 100644
index 0000000..3e351bb
--- /dev/null
+++ b/server/hosts/attached_device_host.py
@@ -0,0 +1,287 @@
+# Lint as: python2, python3
+# Copyright (c) 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Expects to be run in an environment with sudo and no interactive password
+# prompt, such as within the Chromium OS development chroot.
+"""This is the base host class for attached devices"""
+
+import logging
+import time
+
+import common
+
+from autotest_lib.client.bin import utils
+from autotest_lib.client.common_lib import error
+from autotest_lib.server.hosts import ssh_host
+
+
+class AttachedDeviceHost(ssh_host.SSHHost):
+    """Host class for all attached devices(e.g. Android)"""
+
+    # Since we currently use labstation as phone host, the repair logic
+    # of labstation checks /var/lib/servod/ path to make reboot decision.
+    #TODO(b:226151633): use a separated path after adjust repair logic.
+    TEMP_FILE_DIR = '/var/lib/servod/'
+    LOCK_FILE_POSTFIX = "_in_use"
+    REBOOT_TIMEOUT_SECONDS = 240
+
+    def _initialize(self,
+                    hostname,
+                    serial_number,
+                    phone_station_ssh_port=None,
+                    *args,
+                    **dargs):
+        """Construct a AttachedDeviceHost object.
+
+        Args:
+            hostname: Hostname of the attached device host.
+            serial_number: Usb serial number of the associated
+                           device(e.g. Android).
+            phone_station_ssh_port: port for ssh to phone station, it
+                                    use default 22 if the value is None.
+        """
+        self.serial_number = serial_number
+        if phone_station_ssh_port:
+            dargs['port'] = int(phone_station_ssh_port)
+        super(AttachedDeviceHost, self)._initialize(hostname=hostname,
+                                                    *args,
+                                                    **dargs)
+
+        # When run local test against a remote DUT in lab, user may use
+        # port forwarding to bypass corp ssh relay. So the hostname may
+        # be localhost while the command intended to run on a remote DUT,
+        # we can differentiate this by checking if a non-default port
+        # is specified.
+        self._is_localhost = (self.hostname in {'localhost', "127.0.0.1"}
+                              and phone_station_ssh_port is None)
+        # Commands on the the host must be run by the superuser.
+        # Our account on a remote host is root, but if our target is
+        # localhost then we might be running unprivileged.  If so,
+        # `sudo` will have to be added to the commands.
+        self._sudo_required = False
+        if self._is_localhost:
+            self._sudo_required = utils.system_output('id -u') != '0'
+
+        # We need to lock the attached device host to prevent other task
+        # perform any interruptive actions(e.g. reboot) since they can
+        # be shared by multiple devices
+        self._is_locked = False
+        self._lock_file = (self.TEMP_FILE_DIR + self.serial_number +
+                           self.LOCK_FILE_POSTFIX)
+        if not self.wait_up(self.REBOOT_TIMEOUT_SECONDS):
+            raise error.AutoservError(
+                    'Attached device host %s is not reachable via ssh.' %
+                    self.hostname)
+        if not self._is_localhost:
+            self._lock()
+            self.wait_ready()
+
+    def _lock(self):
+        logging.debug('Locking host %s by touching %s file', self.hostname,
+                      self._lock_file)
+        self.run('mkdir -p %s' % self.TEMP_FILE_DIR)
+        self.run('touch %s' % self._lock_file)
+        self._is_locked = True
+
+    def _unlock(self):
+        logging.debug('Unlocking host by removing %s file', self._lock_file)
+        self.run('rm %s' % self._lock_file, ignore_status=True)
+        self._is_locked = False
+
+    def make_ssh_command(self,
+                         user='root',
+                         port=22,
+                         opts='',
+                         hosts_file=None,
+                         connect_timeout=None,
+                         alive_interval=None,
+                         alive_count_max=None,
+                         connection_attempts=None):
+        """Override default make_ssh_command to use tuned options.
+
+        Tuning changes:
+          - ConnectTimeout=30; maximum of 30 seconds allowed for an SSH
+          connection failure. Consistency with remote_access.py.
+
+          - ServerAliveInterval=180; which causes SSH to ping connection every
+          180 seconds. In conjunction with ServerAliveCountMax ensures
+          that if the connection dies, Autotest will bail out quickly.
+
+          - ServerAliveCountMax=3; consistency with remote_access.py.
+
+          - ConnectAttempts=4; reduce flakiness in connection errors;
+          consistency with remote_access.py.
+
+          - UserKnownHostsFile=/dev/null; we don't care about the keys.
+
+          - SSH protocol forced to 2; needed for ServerAliveInterval.
+
+        Args:
+            user: User name to use for the ssh connection.
+            port: Port on the target host to use for ssh connection.
+            opts: Additional options to the ssh command.
+            hosts_file: Ignored.
+            connect_timeout: Ignored.
+            alive_interval: Ignored.
+            alive_count_max: Ignored.
+            connection_attempts: Ignored.
+
+        Returns:
+            An ssh command with the requested settings.
+        """
+        options = ' '.join([opts, '-o Protocol=2'])
+        return super(AttachedDeviceHost,
+                     self).make_ssh_command(user=user,
+                                            port=port,
+                                            opts=options,
+                                            hosts_file='/dev/null',
+                                            connect_timeout=30,
+                                            alive_interval=180,
+                                            alive_count_max=3,
+                                            connection_attempts=4)
+
+    def _make_scp_cmd(self, sources, dest):
+        """Format scp command.
+
+        Given a list of source paths and a destination path, produces the
+        appropriate scp command for encoding it. Remote paths must be
+        pre-encoded. Overrides _make_scp_cmd in AbstractSSHHost
+        to allow additional ssh options.
+
+        Args:
+            sources: A list of source paths to copy from.
+            dest: Destination path to copy to.
+
+        Returns:
+            An scp command that copies |sources| on local machine to
+            |dest| on the remote host.
+        """
+        command = ('scp -rq %s -o BatchMode=yes -o StrictHostKeyChecking=no '
+                   '-o UserKnownHostsFile=/dev/null %s %s "%s"')
+        port = self.port
+        if port is None:
+            logging.info('AttachedDeviceHost: defaulting to port 22.'
+                         ' See b/204502754.')
+            port = 22
+        args = (
+                self._main_ssh.ssh_option,
+                ("-P %s" % port),
+                sources,
+                dest,
+        )
+        return command % args
+
+    def run(self,
+            command,
+            timeout=3600,
+            ignore_status=False,
+            stdout_tee=utils.TEE_TO_LOGS,
+            stderr_tee=utils.TEE_TO_LOGS,
+            connect_timeout=30,
+            ssh_failure_retry_ok=False,
+            options='',
+            stdin=None,
+            verbose=True,
+            args=()):
+        """Run a command on the attached device host.
+
+        Extends method `run` in SSHHost. If the host is a remote device,
+        it will call `run` in SSHost without changing anything.
+        If the host is 'localhost', it will call utils.system_output.
+
+        Args:
+            command: The command line string.
+            timeout: Time limit in seconds before attempting to
+                     kill the running process. The run() function
+                     will take a few seconds longer than 'timeout'
+                     to complete if it has to kill the process.
+            ignore_status: Do not raise an exception, no matter
+                           what the exit code of the command is.
+            stdout_tee: Where to tee the stdout.
+            stderr_tee: Where to tee the stderr.
+            connect_timeout: SSH connection timeout (in seconds)
+                             Ignored if host is 'localhost'.
+            options: String with additional ssh command options
+                     Ignored if host is 'localhost'.
+            ssh_failure_retry_ok: when True and ssh connection failure is
+                                  suspected, OK to retry command (but not
+                                  compulsory, and likely not needed here)
+            stdin: Stdin to pass (a string) to the executed command.
+            verbose: Log the commands.
+            args: Sequence of strings to pass as arguments to command by
+                  quoting them in " and escaping their contents if
+                  necessary.
+
+        Returns:
+            A utils.CmdResult object.
+
+        Raises:
+            AutoservRunError: If the command failed.
+            AutoservSSHTimeout: SSH connection has timed out. Only applies
+                                when the host is not 'localhost'.
+        """
+        run_args = {
+                'command': command,
+                'timeout': timeout,
+                'ignore_status': ignore_status,
+                'stdout_tee': stdout_tee,
+                'stderr_tee': stderr_tee,
+                # connect_timeout     n/a for localhost
+                # options             n/a for localhost
+                # ssh_failure_retry_ok n/a for localhost
+                'stdin': stdin,
+                'verbose': verbose,
+                'args': args,
+        }
+        if self._is_localhost:
+            if self._sudo_required:
+                run_args['command'] = 'sudo -n sh -c "%s"' % utils.sh_escape(
+                        command)
+            try:
+                return utils.run(**run_args)
+            except error.CmdError as e:
+                logging.error(e)
+                raise error.AutoservRunError('command execution error',
+                                             e.result_obj)
+        else:
+            run_args['connect_timeout'] = connect_timeout
+            run_args['options'] = options
+            run_args['ssh_failure_retry_ok'] = ssh_failure_retry_ok
+            return super(AttachedDeviceHost, self).run(**run_args)
+
+    def wait_ready(self, required_uptime=300):
+        """Wait ready for the host if it has been rebooted recently.
+
+        It may take a few minutes until the system and usb components
+        re-enumerated and become ready after a attached device reboot,
+        so we need to make sure the host has been up for a given a mount
+        of time before trying to start any actions.
+
+        Args:
+            required_uptime: Minimum uptime in seconds that we can
+                             consider an attached device host be ready.
+        """
+        uptime = float(self.check_uptime())
+        # To prevent unexpected output from check_uptime() that causes long
+        # sleep, make sure the maximum wait time <= required_uptime.
+        diff = min(required_uptime - uptime, required_uptime)
+        if diff > 0:
+            logging.info(
+                    'The attached device host was just rebooted, wait %s'
+                    ' seconds for all system services ready and usb'
+                    ' components re-enumerated.', diff)
+            #TODO(b:226401363): Use a poll to ensure all dependencies are ready.
+            time.sleep(diff)
+
+    def close(self):
+        try:
+            if self._is_locked:
+                self._unlock()
+        except error.AutoservSSHTimeout:
+            logging.error('Unlock attached device host failed due to ssh'
+                          ' timeout. It may caused by the host went down'
+                          ' during the task.')
+        finally:
+            super(AttachedDeviceHost, self).close()
diff --git a/server/hosts/base_classes_unittest.py b/server/hosts/base_classes_unittest.py
index 2a9f787..4861554 100755
--- a/server/hosts/base_classes_unittest.py
+++ b/server/hosts/base_classes_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # pylint: disable=missing-docstring
 
 import unittest
@@ -36,7 +36,7 @@
 
     def test_install(self):
         host = base_classes.Host()
-        # create a dummy installable class
+        # create a stub installable class
         class installable(object):
             def install(self, host):
                 pass
diff --git a/server/hosts/base_label_unittest.py b/server/hosts/base_label_unittest.py
index 2a6db21..c41d300 100755
--- a/server/hosts/base_label_unittest.py
+++ b/server/hosts/base_label_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/hosts/base_servohost.py b/server/hosts/base_servohost.py
index 77e0861..247587a 100644
--- a/server/hosts/base_servohost.py
+++ b/server/hosts/base_servohost.py
@@ -17,6 +17,12 @@
 import time
 import os
 
+try:
+    import docker
+    from autotest_lib.site_utils.docker import utils as docker_utils
+except ImportError:
+    logging.info("Docker API is not installed in this environment")
+
 from autotest_lib.client.bin import utils
 from autotest_lib.client.common_lib import autotest_enum
 from autotest_lib.client.common_lib import error
@@ -53,7 +59,13 @@
     UPDATE_STATE = autotest_enum.AutotestEnum('IDLE', 'RUNNING',
                                               'PENDING_REBOOT')
 
-    def _initialize(self, hostname, is_in_lab=None, *args, **dargs):
+    def _initialize(self,
+                    hostname,
+                    is_in_lab=None,
+                    servo_host_ssh_port=None,
+                    servod_docker=None,
+                    *args,
+                    **dargs):
         """Construct a BaseServoHost object.
 
         @param is_in_lab: True if the servo host is in Cros Lab. Default is set
@@ -61,13 +73,29 @@
                           called to check if the servo host is in Cros lab.
 
         """
+        if servo_host_ssh_port is not None:
+            dargs['port'] = int(servo_host_ssh_port)
+
         super(BaseServoHost, self)._initialize(hostname=hostname,
                                                *args, **dargs)
-        self._is_localhost = (self.hostname == 'localhost')
-        if self._is_localhost:
+
+        self.servod_container_name = None
+        self._is_containerized_servod = False
+        if bool(servod_docker):
+            self._is_containerized_servod = True
+            self.servod_container_name = servod_docker
+        elif self.hostname.endswith('docker_servod'):
+            # For backward compatibility
+            self.servod_container_name = self.hostname
+            self._is_containerized_servod = True
+
+        self._is_localhost = (self.hostname == 'localhost'
+                              and servo_host_ssh_port is None)
+        if self._is_localhost or self._is_containerized_servod:
             self._is_in_lab = False
         elif is_in_lab is None:
-            self._is_in_lab = utils.host_is_in_lab_zone(self.hostname)
+            self._is_in_lab = (utils.host_is_in_lab_zone(self.hostname)
+                               or self.is_satlab())
         else:
             self._is_in_lab = is_in_lab
 
@@ -130,9 +158,21 @@
 
         @returns True if ths host is a labstation otherwise False.
         """
+        if self.is_containerized_servod():
+            return False
+
         if self._is_labstation is None:
-            board = self.get_board()
-            self._is_labstation = board is not None and 'labstation' in board
+            if 'labstation' in self.hostname:
+                logging.info('Based on hostname, the servohost is'
+                             ' a labstation.')
+                self._is_labstation = True
+            else:
+                logging.info(
+                        'Cannot determine if %s is a labstation from'
+                        ' hostname, getting board info from the'
+                        ' servohost.', self.hostname)
+                board = self.get_board()
+                self._is_labstation = bool(board) and 'labstation' in board
 
         return self._is_labstation
 
@@ -213,12 +253,22 @@
         return self._is_localhost
 
 
+    def is_containerized_servod(self):
+        """Checks whether the servo host is a containerized servod.
+
+        @returns: True if using containerized servod, otherwise False.
+
+        """
+        return self._is_containerized_servod
+
     def is_cros_host(self):
         """Check if a servo host is running chromeos.
 
         @return: True if the servo host is running chromeos.
             False if it isn't, or we don't have enough information.
         """
+        if self.is_containerized_servod():
+            return False
         try:
             result = self.run('grep -q CHROMEOS /etc/lsb-release',
                               ignore_status=True, timeout=10)
@@ -245,12 +295,12 @@
         """Update the image on the servo host, if needed.
 
         This method recognizes the following cases:
-          * If the Host is not running Chrome OS, do nothing.
+          * If the Host is not running ChromeOS, do nothing.
           * If a previously triggered update is now complete, reboot
             to the new version.
           * If the host is processing an update do nothing.
           * If the host has an update that pending on reboot, do nothing.
-          * If the host is running a version of Chrome OS different
+          * If the host is running a version of ChromeOS different
             from the default for servo Hosts, start an update.
 
         @stable_version the target build number.(e.g. R82-12900.0.0)
@@ -394,6 +444,18 @@
 
     def _servo_host_reboot(self):
         """Reboot this servo host because a reboot is requested."""
+        try:
+            # TODO(otabek) remove if found the fix for b/174514811
+            # The default factory firmware remember the latest chromeboxes
+            # status after power off. If box was in sleep mode before the
+            # break, the box will stay at sleep mode after power on.
+            # Disable power manager has make chromebox to boot always when
+            # we deliver the power to the device.
+            logging.info('Stoping powerd service on device')
+            self.run('stop powerd', ignore_status=True, timeout=30)
+        except Exception as e:
+            logging.debug('(Not critical) Fail to stop powerd; %s', e)
+
         logging.info('Rebooting servo host %s from build %s', self.hostname,
                      self.get_release_version())
         # Tell the reboot() call not to wait for completion.
@@ -410,7 +472,7 @@
         # with the logging bits ripped out, so that they can't cause
         # the failure logging problem described above.
         #
-        # The black stain that this has left on my soul can never be
+        # The stain that this has left on my soul can never be
         # erased.
         old_boot_id = self.get_boot_id()
         if not self.wait_down(timeout=self.WAIT_DOWN_REBOOT_TIMEOUT,
@@ -485,9 +547,18 @@
 
         """
         command = ('scp -rq %s -o BatchMode=yes -o StrictHostKeyChecking=no '
-                   '-o UserKnownHostsFile=/dev/null -P %d %s "%s"')
-        return command % (self._master_ssh.ssh_option,
-                          self.port, sources, dest)
+                   '-o UserKnownHostsFile=/dev/null %s %s "%s"')
+        port = self.port
+        if port is None:
+            logging.info('BaseServoHost: defaulting to port 22. See b/204502754.')
+            port = 22
+        args = (
+            self._main_ssh.ssh_option,
+            ("-P %s" % port),
+            sources,
+            dest,
+        )
+        return command % args
 
 
     def run(self, command, timeout=3600, ignore_status=False,
@@ -540,10 +611,30 @@
             'verbose'             : verbose,
             'args'                : args,
         }
-        if self.is_localhost():
+        if self.is_containerized_servod():
+            logging.debug("Trying to run the command %s", command)
+            client = docker_utils.get_docker_client(timeout=timeout)
+            container = client.containers.get(self.servod_container_name)
+            try:
+                (exit_code,
+                 output) = container.exec_run("bash -c '%s'" % command)
+                # b/217780680, Make this compatible with python3,
+                if isinstance(output, bytes):
+                    output = output.decode(errors='replace')
+            except docker.errors.APIError:
+                logging.exception("Failed to run command %s", command)
+                for line in container.logs().split(b'\n'):
+                    logging.error(line)
+                return utils.CmdResult(command=command,
+                                       stdout="",
+                                       exit_status=-1)
+            return utils.CmdResult(command=command,
+                                   stdout=output,
+                                   exit_status=exit_code)
+        elif self.is_localhost():
             if self._sudo_required:
                 run_args['command'] = 'sudo -n sh -c "%s"' % utils.sh_escape(
-                    command)
+                        command)
             try:
                 return utils.run(**run_args)
             except error.CmdError as e:
@@ -603,3 +694,38 @@
                     'The servohost was just rebooted, wait %s'
                     ' seconds for it to become ready', diff)
             time.sleep(diff)
+
+    def is_up(self,
+              timeout=60,
+              connect_timeout=None,
+              base_cmd="true",
+              with_servod=True):
+        """
+        Check if the remote host is up by ssh-ing and running a base command.
+
+        @param timeout: command execution timeout in seconds.
+        @param connect_timeout: ssh connection timeout in seconds.
+        @param base_cmd: a base command to run with ssh. The default is 'true'.
+        @returns True if the remote host is up before the timeout expires,
+                 False otherwise.
+        """
+        if self.is_containerized_servod():
+            client = docker_utils.get_docker_client(timeout=timeout)
+            # Look up the container list with hostname and with/without servod process by label.
+            containers = client.containers.list(
+                    filters={
+                            'name': self.hostname,
+                            'label': ["WITH_SERVOD=%s" % str(with_servod)]
+                    })
+            if not containers:
+                return False
+            elif with_servod:
+                # For container with servod process, check if servod process started.
+                (exit_code, output) = containers[0].exec_run("ps")
+                logging.info("Is Up output %s", output)
+                if b"servod" not in output:
+                    return False
+            return True
+        else:
+            return super(BaseServoHost, self).is_up(timeout, connect_timeout,
+                                                    base_cmd)
diff --git a/server/hosts/chameleon_host.py b/server/hosts/chameleon_host.py
index f297a67..bf5ea70 100644
--- a/server/hosts/chameleon_host.py
+++ b/server/hosts/chameleon_host.py
@@ -8,13 +8,13 @@
 import logging
 
 from autotest_lib.client.bin import utils
+from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib import global_config
 from autotest_lib.client.cros.chameleon import chameleon
 from autotest_lib.server.cros import dnsname_mangler
 from autotest_lib.server.cros.dynamic_suite import frontend_wrappers
 from autotest_lib.server.hosts import ssh_host
 
-
 # Names of the host attributes in the database that represent the values for
 # the chameleon_host and chameleon_port for a servo connected to the DUT.
 CHAMELEON_HOST_ATTR = 'chameleon_host'
@@ -126,16 +126,13 @@
         """
         # TODO(waihong): Add verify and repair logic which are required while
         # deploying to Cros Lab.
-        chameleon_board = None
         try:
             chameleon_board = chameleon.ChameleonBoard(
                     self._chameleon_connection, self)
             return chameleon_board
-        except:
-            self.reboot()
-            chameleon_board = chameleon.ChameleonBoard(
-                self._chameleon_connection, self)
-            return chameleon_board
+        except Exception as e:
+            raise ChameleonHostError('Can not create chameleon board: %s(%s)',
+                                     e.__class__, e)
 
 
 def create_chameleon_host(dut, chameleon_args):
@@ -174,11 +171,17 @@
             if utils.host_is_in_lab_zone(chameleon_hostname):
                 # Be more tolerant on chameleon in the lab because
                 # we don't want dead chameleon blocks non-chameleon tests.
-                if utils.ping(chameleon_hostname, deadline=3):
-                   logging.warning(
-                           'Chameleon %s is not accessible. Please file a bug'
-                           ' to test lab', chameleon_hostname)
-                   return None
+                # We use ssh ping here as BeyondCorp-only hosts cannot make ICMP
+                # ping to chameleon test devices.
+                try:
+                    ssh_host.SSHHost(chameleon_hostname).ssh_ping()
+                except (error.AutoservSSHTimeout,
+                        error.AutoservSshPermissionDeniedError,
+                        error.AutoservSshPingHostError) as e:
+                    logging.warning(
+                            'Chameleon %s is not accessible. Please file a bug'
+                            ' to test lab: %s', chameleon_hostname, e)
+                    return None
                 return ChameleonHost(chameleon_host=chameleon_hostname)
         if chameleon_args:
             return ChameleonHost(**chameleon_args)
@@ -232,7 +235,7 @@
         if 'btpeer_host' in args:
             ret_args['chameleon_host'] = args['btpeer_host']
         if 'btpeer_port' in args:
-            ret_args['chameleon_port'] = args['btpeer_port']
+            ret_args['chameleon_port'] = int(args['btpeer_port'])
         if 'btpeer_ssh_port' in args:
             ret_args['port'] = int(args['btpeer_ssh_port'])
         return ret_args
diff --git a/server/hosts/cros_firmware.py b/server/hosts/cros_firmware.py
index 3d51959..6571ca3 100644
--- a/server/hosts/cros_firmware.py
+++ b/server/hosts/cros_firmware.py
@@ -7,7 +7,7 @@
 Repair actions and verifiers relating to CrOS firmware.
 
 This contains the repair actions and verifiers need to find problems
-with the firmware installed on Chrome OS DUTs, and when necessary, to
+with the firmware installed on ChromeOS DUTs, and when necessary, to
 fix problems by updating or re-installing the firmware.
 
 The operations in the module support two distinct use cases:
@@ -52,7 +52,7 @@
 from autotest_lib.server.hosts import repair_utils
 from autotest_lib.server.hosts import cros_constants
 
-from chromite.lib import timeout_util
+from autotest_lib.utils.frozen_chromite.lib import timeout_util
 import six
 
 
@@ -374,7 +374,7 @@
         the firmware was built.  This function checks that the hardware
         identified by `version_a` and `version_b` is the same.
 
-        This is a sanity check to protect us from installing the wrong
+        This is a confidence check to protect us from installing the wrong
         firmware on a DUT when a board label has somehow gone astray.
 
         @param version_a  First firmware version for the comparison.
@@ -387,6 +387,28 @@
             raise hosts.AutoservVerifyError(
                     message % (version_a, version_b))
 
+    def _is_stable_image_installed(self, host):
+        """Verify that ChromeOS image on host is a stable version.
+
+        This check verify that device booted from stable image to protect us
+        from installing the firmware from bad/broken/no-tested image. Bad
+        image can have broken updater or corrupted firmware.
+
+        The representation version looks like:
+                nocturne-release/R89-13728.0.0
+        Check compare version from host to version provide as stable image
+        from host-info file.
+
+        @param host  CrosHost instance.
+        """
+        os_from_host = host.get_release_builder_path()
+        os_from_host_info = host.get_cros_repair_image_name()
+        if os_from_host != os_from_host_info:
+            raise hosts.AutoservNonCriticalVerifyError(
+                    'Firmware update can be run only from stable image.'
+                    ' Expected version:"%s", actually: "%s"' %
+                    (os_from_host_info, os_from_host))
+
     @timeout_util.TimeoutDecorator(cros_constants.VERIFY_TIMEOUT_SEC)
     def verify(self, host):
         # Test 1 - The DUT is not excluded from updates.
@@ -407,9 +429,10 @@
                               ' with exception: %s', e)
 
         if stable_firmware is None:
+            logging.debug('Expected FW version not found')
             # This DUT doesn't have a firmware update target
             return
-
+        logging.debug('Expected FW version: %s', stable_firmware)
         # For tests 3 and 4:  If the output from `crossystem` or
         # `chromeos-firmwareupdate` isn't what we expect, we log an
         # error, but don't fail:  We don't want DUTs unable to test a
@@ -421,6 +444,7 @@
         if current_firmware is None:
             logging.error('DUT firmware version can\'t be determined.')
             return
+        logging.debug('Current FW version: %s', current_firmware)
         if current_firmware == stable_firmware:
             return
         # Test 4 - The firmware supplied in the running OS build is not
@@ -430,6 +454,7 @@
             logging.error('Supplied firmware version in OS can\'t be '
                           'determined.')
             return
+        self._is_stable_image_installed(host)
         if available_firmware != stable_firmware:
             raise hosts.AutoservVerifyError(
                     'DUT firmware requires update from %s to %s' %
diff --git a/server/hosts/cros_firmware_unittest.py b/server/hosts/cros_firmware_unittest.py
index 899b335..2ff7d36 100644
--- a/server/hosts/cros_firmware_unittest.py
+++ b/server/hosts/cros_firmware_unittest.py
@@ -2,8 +2,8 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import mock
 import unittest
+from unittest import mock
 
 import common
 from autotest_lib.server import utils
diff --git a/server/hosts/cros_host.py b/server/hosts/cros_host.py
index 77b0b24..685ae79 100644
--- a/server/hosts/cros_host.py
+++ b/server/hosts/cros_host.py
@@ -6,6 +6,8 @@
 from __future__ import absolute_import
 from __future__ import division
 from __future__ import print_function
+from io import StringIO
+import json
 
 import logging
 import os
@@ -49,10 +51,11 @@
 from autotest_lib.site_utils.admin_audit import verifiers as audit_verify
 from six.moves import zip
 
+
 # In case cros_host is being ran via SSP on an older Moblab version with an
 # older chromite version.
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = utils.metrics_mock
 
@@ -71,7 +74,7 @@
 
     _AFE = frontend_wrappers.RetryingAFE(timeout_min=5, delay_sec=10)
 
-    # Timeout values (in seconds) associated with various Chrome OS
+    # Timeout values (in seconds) associated with various ChromeOS
     # state changes.
     #
     # In general, a good rule of thumb is that the timeout can be up
@@ -91,6 +94,8 @@
     #   including the 30 second dev-mode delay and time to start the
     #   network.
     # INSTALL_TIMEOUT: Time to allow for chromeos-install.
+    # ADMIN_INSTALL_TIMEOUT: Time to allow for chromeos-install
+    #   used by admin tasks.
     # POWERWASH_BOOT_TIMEOUT: Time to allow for a reboot that
     #   includes powerwash.
 
@@ -100,7 +105,9 @@
     BOOT_TIMEOUT = 150
     USB_BOOT_TIMEOUT = 300
     INSTALL_TIMEOUT = 480
+    ADMIN_INSTALL_TIMEOUT = 600
     POWERWASH_BOOT_TIMEOUT = 60
+    DEVSERVER_DOWNLOAD_TIMEOUT = 600
 
     # Minimum OS version that supports server side packaging. Older builds may
     # not have server side package built or with Autotest code change to support
@@ -191,17 +198,16 @@
             result = host.run(
                     'grep -q CHROMEOS /etc/lsb-release && '
                     '! grep -q moblab /etc/lsb-release && '
-                    '! grep -q labstation /etc/lsb-release',
-                    ignore_status=True, timeout=timeout)
-            if result.exit_status == 0:
-                lsb_release_content = host.run(
-                    'grep CHROMEOS_RELEASE_BOARD /etc/lsb-release',
+                    '! grep -q labstation /etc/lsb-release &&'
+                    ' grep CHROMEOS_RELEASE_BOARD /etc/lsb-release',
+                    ignore_status=True,
                     timeout=timeout).stdout
+            if result:
                 return not (
                     lsbrelease_utils.is_jetstream(
-                        lsb_release_content=lsb_release_content) or
+                        lsb_release_content=result) or
                     lsbrelease_utils.is_gce_board(
-                        lsb_release_content=lsb_release_content))
+                        lsb_release_content=result))
 
         except (error.AutoservRunError, error.AutoservSSHTimeout):
             return False
@@ -231,6 +237,21 @@
         return chameleon_args
 
     @staticmethod
+    def get_btattenuator_arguments(args_dict):
+        """Extract btattenuator options from `args_dict` and return the result.
+
+        @param args_dict Dictionary from which to extract the btattenuator
+          arguments.
+        """
+        logging.debug("args dict in croshost is  %s", args_dict)
+        btattenuator_args = {
+                key: args_dict[key]
+                for key in ('btatten_addr', ) if key in args_dict
+        }
+
+        return btattenuator_args
+
+    @staticmethod
     def get_btpeer_arguments(args_dict):
         """Extract btpeer options from `args_dict` and return the result.
 
@@ -242,6 +263,17 @@
             host = hosts.create_host(machine, btpeer_args=btpeer_args)
         ~~~~~~~~
 
+        If btpeer_host_list is given, it should be a comma delimited list of
+            host:ssh_port/chameleon_port
+            127.0.0.1:22/9992
+
+        When using ipv6, wrap the host portion in square brackets:
+            [::1]:22/9992
+
+        Note: Only the host name is required. Both ports are optional.
+              If providing the chameleon port, note that you should provide an
+              unforwarded port (i.e. the port exposed on the actual dut).
+
         @param args_dict: Dictionary from which to extract the btpeer
           arguments.
         """
@@ -251,9 +283,21 @@
                 # IPv6 addresses including a port number should be enclosed in
                 # square brackets.
                 delimiter = ']:' if re.search(r':.*:', btpeer) else ':'
-                result.append({key: value for key,value in
-                    zip(('btpeer_host','btpeer_port'),
-                    btpeer.strip('[]').split(delimiter))})
+
+                # Split into ip + ports
+                split = btpeer.strip('[]').split(delimiter)
+
+                # If ports are given, split into ssh + chameleon ports
+                if len(split) > 1:
+                    ports = split[1].split('/')
+                    split = [split[0]] + ports
+
+                result.append({
+                        key: value
+                        for key, value in zip(('btpeer_host',
+                                               'btpeer_ssh_port',
+                                               'btpeer_port'), split)
+                })
             return result
         else:
             return {key: args_dict[key]
@@ -262,6 +306,24 @@
 
 
     @staticmethod
+    def get_local_host_ip(args_dict):
+        """Ip address of DUT in the local LAN.
+
+        When using port forwarding during testing, the host ip is 127.0.0.1 and
+        can't be used by any peer devices (for example to scp). A local IP
+        should be given in this case so peripherals can access the DUT in the
+        local LAN.
+
+        The argument should be given with the key |local_host_ip|.
+
+        @params args_dict: Dictionary from which to extract the local host ip.
+        """
+        return {
+                key: args_dict[key]
+                for key in ('local_host_ip', ) if key in args_dict
+        }
+
+    @staticmethod
     def get_pdtester_arguments(args_dict):
         """Extract chameleon options from `args_dict` and return the result.
 
@@ -295,7 +357,9 @@
           arguments.
         """
         servo_attrs = (servo_constants.SERVO_HOST_ATTR,
+                       servo_constants.SERVO_HOST_SSH_PORT_ATTR,
                        servo_constants.SERVO_PORT_ATTR,
+                       servo_constants.SERVOD_DOCKER_ATTR,
                        servo_constants.SERVO_SERIAL_ATTR,
                        servo_constants.SERVO_BOARD_ATTR,
                        servo_constants.SERVO_MODEL_ATTR)
@@ -309,10 +373,18 @@
             else servo_args)
 
 
-    def _initialize(self, hostname, chameleon_args=None, servo_args=None,
-                    pdtester_args=None, try_lab_servo=False,
-                    try_servo_repair=False, ssh_verbosity_flag='',
-                    ssh_options='', *args, **dargs):
+    def _initialize(self,
+                    hostname,
+                    chameleon_args=None,
+                    servo_args=None,
+                    pdtester_args=None,
+                    try_lab_servo=False,
+                    try_servo_repair=False,
+                    ssh_verbosity_flag='',
+                    ssh_options='',
+                    try_servo_recovery=False,
+                    *args,
+                    **dargs):
         """Initialize superclasses, |self.chameleon|, and |self.servo|.
 
         This method will attempt to create the test-assistant object
@@ -336,9 +408,10 @@
                                    verbosity.
         @param ssh_options: String, other ssh options to pass to the ssh
                             command.
+        @param try_servo_recovery:  When True, start servod in recovery mode.
+                                    See servo_host for details.
         """
-        super(CrosHost, self)._initialize(hostname=hostname,
-                                          *args, **dargs)
+        super(CrosHost, self)._initialize(hostname=hostname, *args, **dargs)
         self._repair_strategy = cros_repair.create_cros_repair_strategy()
         # hold special dut_state for repair process
         self._device_repair_state = None
@@ -358,14 +431,18 @@
                 result_dir=self.get_result_dir())
 
         # TODO(otabek@): remove when b/171414073 closed
-        pingable_before_servo = self.is_up_fast(count=3)
-        if pingable_before_servo:
-            logging.info('DUT is pingable before init Servo.')
+        if self.use_icmp:
+            pingable_before_servo = self.is_up_fast(count=1)
+            if pingable_before_servo:
+                logging.info('DUT is pingable before init Servo.')
+        else:
+            logging.info('Skipping ping to DUT before init Servo.')
         _servo_host, servo_state = servo_host.create_servo_host(
                 dut=self,
                 servo_args=servo_args,
                 try_lab_servo=try_lab_servo,
                 try_servo_repair=try_servo_repair,
+                try_servo_recovery=try_servo_recovery,
                 dut_host_info=self.host_info_store.get(),
                 dut_health_profile=dut_health_profile)
         if dut_health_profile.is_loaded():
@@ -376,22 +453,6 @@
             self.health_profile = dut_health_profile
         self.set_servo_host(_servo_host, servo_state)
 
-        # TODO(otabek@): remove when b/171414073 closed
-        # Introduced to collect cases when servo made DUT not sshable
-        pingable_after_servo = self.is_up_fast(count=3)
-        if pingable_after_servo:
-            logging.info('DUT is pingable after init Servo.')
-        elif pingable_before_servo:
-            logging.info('DUT was pingable before init Servo but not now')
-            if servo_args and self._servo_host and self._servo_host.hostname:
-                # collect stats only for tests.
-                dut_ping_servo_init_data = {
-                        'host': self.hostname,
-                        'servo_host': self._servo_host.hostname,
-                }
-                metrics.Counter('chromeos/autotest/dut_ping_servo_init2'
-                                ).increment(fields=dut_ping_servo_init_data)
-
         # TODO(waihong): Do the simplication on Chameleon too.
         self._chameleon_host = chameleon_host.create_chameleon_host(
             dut=self.hostname,
@@ -419,7 +480,6 @@
         else:
             self.pdtester = None
 
-
     def initialize_btpeer(self, btpeer_args=[]):
         """ Initialize the Bluetooth peers
 
@@ -454,7 +514,6 @@
             logging.error('Exception %s in initialize_btpeer', str(e))
 
 
-
     def get_cros_repair_image_name(self):
         """Get latest stable cros image name from AFE.
 
@@ -465,7 +524,7 @@
         """
         info = self.host_info_store.get()
         if not info.board:
-            logging.warn('No board label value found. Trying to infer '
+            logging.warning('No board label value found. Trying to infer '
                          'from the host itself.')
             try:
                 info.labels.append(self.get_board())
@@ -801,30 +860,48 @@
             raise error.AutoservError('Cannot find the latest firmware')
 
     @staticmethod
-    def get_version_from_image(image, version_regex):
+    def get_version_from_image(host, bios_image, ec_image):
         """Get version string from binary image using regular expression.
 
-        @param image: Binary image to search
-        @param version_regex: Regular expression to search for
+        @param host: An instance of hosts.Host.
+        @param bios_image: Filename of AP BIOS image on the DUT/labstation.
+        @param ec_image: Filename of EC image on the DUT/labstation.
 
-        @return Version string
-
-        @raises TestFail if no version string is found in image
+        @return Tuple of bios version and ec version
         """
-        with open(image, 'rb') as f:
-            image_data = f.read()
-        match = re.findall(version_regex,
-                           image_data.decode('ISO-8859-1', errors='ignore'))
-        if match:
-            return match[0]
-        else:
-            raise error.TestFail('Failed to read version from %s.' % image)
+        if not host:
+            return None
+        cmd_args = ['futility', 'update', '--manifest']
+        if bios_image:
+            cmd_args.append('-i')
+            cmd_args.append(bios_image)
+        if ec_image:
+            cmd_args.append('-e')
+            cmd_args.append(ec_image)
+        cmd = ' '.join([utils.sh_quote_word(arg) for arg in cmd_args])
+        stdout = host.run(cmd).stdout
+        if not isinstance(stdout, six.text_type):
+            stdout = stdout.decode('utf-8')
+        io = StringIO(stdout)
+        data = json.load(io)
+        return (
+                data.get('default', {}).get('host', {}).get('versions',
+                                                            {}).get('rw'),
+                data.get('default', {}).get('ec', {}).get('versions',
+                                                          {}).get('rw'),
+        )
 
 
-    def firmware_install(self, build, rw_only=False, dest=None,
-                         local_tarball=None, verify_version=False,
-                         try_scp=False, install_ec=True, install_bios=True,
-                         board_as=None):
+    def firmware_install(self,
+                         build,
+                         rw_only=False,
+                         dest=None,
+                         local_tarball=None,
+                         verify_version=False,
+                         try_scp=False,
+                         install_ec=True,
+                         install_bios=True,
+                         corrupt_ec=False):
         """Install firmware to the DUT.
 
         Use stateful update if the DUT is already running the same build.
@@ -851,7 +928,7 @@
                         the firmware and programming from the DUT.
         @param install_ec: True to install EC FW, and False to skip it.
         @param install_bios: True to install BIOS, and False to skip it.
-        @param board_as: A board name to force to use.
+        @param corrupt_ec: True to flash EC with a false image (for test purpose).
 
         TODO(dshi): After bug 381718 is fixed, update here with corresponding
                     exceptions that could be raised.
@@ -869,16 +946,11 @@
         if board is None or board == '':
             board = self.servo.get_board()
 
-        # if board_as argument is passed, then use it instead of the original
-        # board name.
-        if board_as:
-            board = board_as
-
         if model is None or model == '':
             try:
                 model = self.get_platform()
             except Exception as e:
-                logging.warn('Dut is unresponsive: %s', str(e))
+                logging.warning('Dut is unresponsive: %s', str(e))
 
         # If local firmware path not provided fetch it from the dev server
         tmpd = None
@@ -896,7 +968,12 @@
                 # Download firmware image
                 fwurl = self._FW_IMAGE_URL_PATTERN % (ds.url(), build)
                 local_tarball = os.path.join(dest, os.path.basename(fwurl))
-                ds.download_file(fwurl, local_tarball)
+                logging.info('Downloading file from %s to %s.', fwurl,
+                             local_tarball)
+                ds.download_file(fwurl,
+                                 local_tarball,
+                                 timeout=self.DEVSERVER_DOWNLOAD_TIMEOUT)
+                logging.info('Done downloading')
             except Exception as e:
                 raise error.TestError('Failed to download firmware package: %s'
                                       % str(e))
@@ -905,7 +982,8 @@
         if install_ec:
             # Extract EC image from tarball
             logging.info('Extracting EC image.')
-            ec_image = self.servo.extract_ec_image(board, model, local_tarball)
+            ec_image = self.servo.extract_ec_image(board, model, local_tarball,
+                                                   corrupt_ec)
             logging.info('Extracted: %s', ec_image)
 
         bios_image = None
@@ -924,12 +1002,17 @@
 
         # Install firmware from local tarball
         try:
+            image_ec_version = None
+            image_bios_version = None
+
             # Check if copying to DUT is enabled and DUT is available
             if try_scp and self.is_up():
                 # DUT is available, make temp firmware directory to store images
                 logging.info('Making temp folder.')
                 dest_folder = '/tmp/firmware'
                 self.run('mkdir -p ' + dest_folder)
+                dest_bios_path = None
+                dest_ec_path = None
 
                 fw_cmd = self._FW_UPDATE_CMD % ('--wp=1' if rw_only else '')
 
@@ -964,20 +1047,27 @@
                     if e.result_obj.exit_status != 255:
                         raise
                     elif ec_image:
-                        logging.warn("DUT network dropped during update"
+                        logging.warning("DUT network dropped during update"
                                      " (often caused by EC resetting USB)")
                     else:
                         logging.error("DUT network dropped during update"
                                       " (unexpected, since no EC image)")
                         raise
+                image_bios_version, image_ec_version = self.get_version_from_image(
+                        self, dest_bios_path, dest_ec_path)
             else:
                 # Host is not available, program firmware using servo
+                dest_bios_path = None
+                dest_ec_path = None
                 if ec_image:
-                    self.servo.program_ec(ec_image, rw_only)
+                    dest_ec_path = self.servo.program_ec(ec_image, rw_only)
                 if bios_image:
-                    self.servo.program_bios(bios_image, rw_only)
+                    dest_bios_path = self.servo.program_bios(
+                            bios_image, rw_only)
                 if utils.host_is_in_lab_zone(self.hostname):
                     self._add_fw_version_label(build, rw_only)
+                image_bios_version, image_ec_version = self.get_version_from_image(
+                        self._servo_host, dest_bios_path, dest_ec_path)
 
             # Reboot and wait for DUT after installing firmware
             logging.info('Rebooting DUT.')
@@ -990,11 +1080,10 @@
                 # Check programmed EC firmware when EC image was found
                 if ec_image:
                     logging.info('Checking EC firmware version.')
+                    if image_ec_version is None:
+                        raise error.TestFail(
+                                'Could not find EC version in %s' % ec_image)
                     dest_ec_version = self.get_ec_version()
-                    ec_version_prefix = dest_ec_version.split('_', 1)[0]
-                    ec_regex = self._EC_REGEX % ec_version_prefix
-                    image_ec_version = self.get_version_from_image(ec_image,
-                                                                   ec_regex)
                     if dest_ec_version != image_ec_version:
                         raise error.TestFail(
                             'Failed to update EC firmware, version %s '
@@ -1004,11 +1093,11 @@
                 if bios_image:
                     # Check programmed BIOS firmware against expected version
                     logging.info('Checking BIOS firmware version.')
+                    if image_bios_version is None:
+                        raise error.TestFail(
+                                'Could not find BIOS version in %s' %
+                                bios_image)
                     dest_bios_version = self.get_firmware_version()
-                    bios_version_prefix = dest_bios_version.split('.', 1)[0]
-                    bios_regex = self._BIOS_REGEX % bios_version_prefix
-                    image_bios_version = self.get_version_from_image(bios_image,
-                                                                     bios_regex)
                     if dest_bios_version != image_bios_version:
                         raise error.TestFail(
                             'Failed to update BIOS, version %s '
@@ -1019,45 +1108,28 @@
                 tmpd.clean()
 
 
-    def servo_install(self,
-                      image_url=None,
-                      usb_boot_timeout=USB_BOOT_TIMEOUT,
-                      install_timeout=INSTALL_TIMEOUT,
-                      is_repair=False):
-        """
-        Re-install the OS on the DUT by:
-        1) installing a test image on a USB storage device attached to the Servo
-                board,
-        2) booting that image in recovery mode, and then
-        3) installing the image with chromeos-install.
+    def install_image_to_servo_usb(self, image_url=None):
+        """Installing a test image on a USB storage device.
 
-        @param image_url: If specified use as the url to install on the DUT.
-                otherwise boot the currently staged image on the USB stick.
-        @param usb_boot_timeout: The usb_boot_timeout to use during reimage.
-                Factory images need a longer usb_boot_timeout than regular
-                cros images.
-        @param install_timeout: The timeout to use when installing the chromeos
-                image. Factory images need a longer install_timeout.
-        @param is_repair: Indicates if the method is called from a repair task.
+        Download image to USB-storage attached to the Servo board.
 
-        @raises AutoservError if the image fails to boot.
+        @param image_url:       If specified use as the url to download to
+                                USB-storage.
+
+        @raises AutoservError if the image fails to download.
 
         """
-        if image_url:
-            logging.info('Downloading image to USB, then booting from it.'
-                         ' Usb boot timeout = %s', usb_boot_timeout)
-        else:
-            logging.info('Booting from USB directly. Usb boot timeout = %s',
-                    usb_boot_timeout)
+        if not image_url:
+            logging.debug('Skip download as image_url not provided!')
+            return
 
+        logging.info('Downloading image to USB')
         metrics_field = {'download': bool(image_url)}
         metrics.Counter(
-            'chromeos/autotest/provision/servo_install/download_image'
-            ).increment(fields=metrics_field)
-
+                'chromeos/autotest/provision/servo_install/download_image'
+        ).increment(fields=metrics_field)
         with metrics.SecondsTimer(
-                'chromeos/autotest/provision/servo_install/boot_duration'):
-            self.servo._power_state.power_off()
+                'chromeos/autotest/servo_install/download_image_time'):
             try:
                 self.servo.image_to_servo_usb(image_path=image_url,
                                               power_off_dut=False)
@@ -1066,11 +1138,28 @@
                                 ).increment(
                                         fields={'host': self.hostname or ''})
                 six.reraise(error.AutotestError, str(e), sys.exc_info()[2])
-            # Give the DUT some time to power_off if we skip
-            # download image to usb. (crbug.com/982993)
-            if not image_url:
-                time.sleep(10)
-            need_snk = self.require_snk_mode_in_recovery()
+
+    def boot_in_recovery_mode(self,
+                              usb_boot_timeout=USB_BOOT_TIMEOUT,
+                              need_snk=False):
+        """Booting host  in recovery mode.
+
+        Boot device in recovery mode and verify that device booted from
+        external storage as expected.
+
+        @param usb_boot_timeout:    The usb_boot_timeout to use wait the host
+                                    to boot. Factory images need a longer
+                                    usb_boot_timeout than regular cros images.
+        @param snk_mode:            If True, switch servo_v4 role to 'snk'
+                                    mode before boot DUT into recovery mode.
+
+        @raises AutoservError if the image fails to boot.
+
+        """
+        logging.info('Booting from USB directly. Usb boot timeout: %s',
+                     usb_boot_timeout)
+        with metrics.SecondsTimer(
+                'chromeos/autotest/provision/servo_install/boot_duration'):
             self.servo.boot_in_recovery_mode(snk_mode=need_snk)
             if not self.wait_up(timeout=usb_boot_timeout):
                 if need_snk:
@@ -1087,14 +1176,42 @@
                     'a usb stick), however it seems still boot from an'
                     ' internal storage.', 'boot_from_internal_storage')
 
+    def run_install_image(self,
+                          install_timeout=INSTALL_TIMEOUT,
+                          need_snk=False,
+                          is_repair=False):
+        """Installing the image with chromeos-install.
+
+        Steps included:
+        1) Recover TPM on the device
+        2) Run chromeos-install
+        2.a) if success: power off/on the device
+        2.b) if fail:
+        2.b.1) Mark for replacement if fail with hardware issue
+        2.b.2) Run internal storage check. (Only if is_repair=True)
+        3) Wait the device to boot as verifier of success install
+
+        Device has to booted from external storage.
+
+        @param install_timeout:     The timeout to use when installing the
+                                    chromeos image. Factory images need a
+                                    longer install_timeout.
+        @param snk_mode:            If True, switch servo_v4 role to 'snk'
+                                    mode before boot DUT into recovery mode.
+        @param is_repair:           Indicates if the method is called from a
+                                    repair task.
+
+        @raises AutoservError if the fail in process of install image.
+        @raises AutoservRepairError if fail to boot after install image.
+
+        """
         # The new chromeos-tpm-recovery has been merged since R44-7073.0.0.
         # In old CrOS images, this command fails. Skip the error.
         logging.info('Resetting the TPM status')
         try:
             self.run('chromeos-tpm-recovery')
         except error.AutoservRunError:
-            logging.warn('chromeos-tpm-recovery is too old.')
-
+            logging.warning('chromeos-tpm-recovery is too old.')
 
         with metrics.SecondsTimer(
                 'chromeos/autotest/provision/servo_install/install_duration'):
@@ -1163,6 +1280,50 @@
                                             self.BOOT_TIMEOUT,
                                             'failed_to_boot_post_install')
 
+    def servo_install(self,
+                      image_url=None,
+                      usb_boot_timeout=USB_BOOT_TIMEOUT,
+                      install_timeout=INSTALL_TIMEOUT,
+                      is_repair=False):
+        """Re-install the OS on the DUT by:
+
+        Steps:
+        1) Power off the host
+        2) Installing an image on a USB-storage attached to the Servo board
+        3) Booting that image in recovery mode
+        4) Installing the image with chromeos-install.
+
+        @param image_url:           If specified use as the url to install on
+                                    the DUT otherwise boot the currently
+                                    staged image on the USB stick.
+        @param usb_boot_timeout:    The usb_boot_timeout to use during
+                                    re-image. Factory images need a longer
+                                    usb_boot_timeout than regular cros images.
+        @param install_timeout:     The timeout to use when installing the
+                                    chromeos image. Factory images need a
+                                    longer install_timeout.
+        @param is_repair:           Indicates if the method is called from a
+                                    repair task.
+
+        @raises AutoservError if the image fails to boot.
+
+        """
+        self.servo.get_power_state_controller().power_off()
+        if image_url:
+            self.install_image_to_servo_usb(image_url=image_url)
+        else:
+            # Give the DUT some time to power_off if we skip
+            # download image to usb. (crbug.com/982993)
+            time.sleep(10)
+
+        need_snk = self.require_snk_mode_in_recovery()
+
+        self.boot_in_recovery_mode(usb_boot_timeout=usb_boot_timeout,
+                                   need_snk=need_snk)
+
+        self.run_install_image(install_timeout=install_timeout,
+                               need_snk=need_snk,
+                               is_repair=is_repair)
 
     def set_servo_host(self, host, servo_state=None):
         """Set our servo host member, and associated servo.
@@ -1216,6 +1377,9 @@
         if not self.servo:
             logging.debug('Servo is not initialized to get servo_type.')
             return
+        if not self.is_servo_in_working_state():
+            logging.debug('Servo is not good, skip update servo_type.')
+            return
         servo_type = self.servo.get_servo_type()
         if not servo_type:
             logging.debug('Cannot collect servo_type from servo'
@@ -1335,12 +1499,19 @@
             self.set_health_profile_dut_state(profile_state)
 
     def get_verifier_state(self, tag):
-        """Return the state of servo verifier.
+        """Return the state of host verifier by tag.
 
         @returns: bool or None
         """
         return self._repair_strategy.verifier_is_good(tag)
 
+    def get_repair_strategy_node(self, tag):
+        """Return the instance of verifier/repair node for host by tag.
+
+        @returns: _DependencyNode or None
+        """
+        return self._repair_strategy.node_by_tag(tag)
+
     def close(self):
         """Close connection."""
         super(CrosHost, self).close()
@@ -1558,11 +1729,19 @@
                 logging.error('Failed to find %s in device.', filename)
         return build_info
 
-    def _get_arc_primary_abi(self):
+    def has_arc_hardware_vulkan(self):
+        """Returns a boolean whether device has hardware vulkan."""
+        return self._get_arc_build_info().get('ro.hardware.vulkan')
+
+    def get_arc_build_type(self):
+        """Returns the ARC build type of the host."""
+        return self._get_arc_build_info().get('ro.build.type')
+
+    def get_arc_primary_abi(self):
         """Returns the primary abi of the host."""
         return self._get_arc_build_info().get('ro.product.cpu.abi')
 
-    def _get_arc_security_patch(self):
+    def get_arc_security_patch(self):
         """Returns the security patch of the host."""
         return self._get_arc_build_info().get('ro.build.version.security_patch')
 
@@ -1672,7 +1851,7 @@
         """
         This function reboots the site host. The more generic
         RemoteHost.reboot() performs sync and sleeps for 5
-        seconds. This is not necessary for Chrome OS devices as the
+        seconds. This is not necessary for ChromeOS devices as the
         sync should be finished in a short time during the reboot
         command.
         """
@@ -1698,6 +1877,8 @@
                                'error' : ''}
 
         t0 = time.time()
+        logging.debug('Pre reboot lsb-release {}'.format(
+                self._get_lsb_release_content()))
         try:
             super(CrosHost, self).reboot(**dargs)
         except Exception as e:
@@ -1707,6 +1888,9 @@
             raise
         finally:
             duration = int(time.time() - t0)
+            logging.debug('Post reboot lsb-release {}'.format(
+                    self._get_lsb_release_content()))
+
             metrics.Counter(
                     'chromeos/autotest/autoserv/reboot_count').increment(
                     fields=metric_fields)
@@ -1717,6 +1901,40 @@
                     'chromeos/autotest/autoserv/reboot_duration').add(
                     duration, fields=metric_fields)
 
+    def _default_suspend_cmd(self, suspend_time=60, delay_seconds=0):
+        """
+        Return the default suspend command
+
+        @param suspend_time: How long to suspend as integer seconds.
+        @param suspend_cmd: Suspend command to execute.
+
+        @returns formatted suspend_cmd string to execute
+        """
+        suspend_cmd = ' && '.join([
+            'echo 0 > /sys/class/rtc/rtc0/wakealarm',
+            'echo +%d > /sys/class/rtc/rtc0/wakealarm' % suspend_time,
+            'powerd_dbus_suspend --delay=%d' % delay_seconds])
+        return suspend_cmd
+
+    def suspend_bg(self, suspend_time=60, delay_seconds=0,
+                suspend_cmd=None):
+        """
+        This function suspends the site host and returns right away.
+
+        Note: use this when you need to perform work *while* the host is
+        suspended.
+
+        @param suspend_time: How long to suspend as integer seconds.
+        @param suspend_cmd: Suspend command to execute.
+
+        @exception AutoservSuspendError: if |suspend_cmd| fails
+        """
+        if suspend_cmd is None:
+            suspend_cmd = self._default_suspend_cmd(suspend_time, delay_seconds)
+        try:
+            self.run_background(suspend_cmd)
+        except error.AutoservRunError:
+            raise error.AutoservSuspendError("suspend command failed")
 
     def suspend(self, suspend_time=60, delay_seconds=0,
                 suspend_cmd=None, allow_early_resume=False):
@@ -1733,10 +1951,7 @@
         """
 
         if suspend_cmd is None:
-            suspend_cmd = ' && '.join([
-                'echo 0 > /sys/class/rtc/rtc0/wakealarm',
-                'echo +%d > /sys/class/rtc/rtc0/wakealarm' % suspend_time,
-                'powerd_dbus_suspend --delay=%d' % delay_seconds])
+            suspend_cmd = self._default_suspend_cmd(suspend_time, delay_seconds)
         super(CrosHost, self).suspend(suspend_time, suspend_cmd,
                                       allow_early_resume);
 
@@ -1784,7 +1999,7 @@
         return True
 
     def verify_software(self):
-        """Verify working software on a Chrome OS system.
+        """Verify working software on a ChromeOS system.
 
         Tests for the following conditions:
          1. All conditions tested by the parent version of this
@@ -1825,6 +2040,14 @@
 
 
     @retry.retry(error.AutoservError, timeout_min=5, delay_sec=10)
+    def wait_for_service(self, service_name):
+        """Wait for target status of an upstart init script.
+
+        @param service_name: Service to wait for.
+        """
+        if not self.upstart_status(service_name):
+            raise error.AutoservError('Service %s not running.' % service_name)
+
     def wait_for_system_services(self):
         """Waits for system-services to be running.
 
@@ -1832,13 +2055,11 @@
         should give this some time to finish. See crbug.com/765686#c38 for
         details.
         """
-        if not self.upstart_status('system-services'):
-            raise error.AutoservError('Chrome failed to reach login. '
-                                      'System services not running.')
+        self.wait_for_service('system-services')
 
 
     def verify(self):
-        """Verify Chrome OS system is in good state."""
+        """Verify ChromeOS system is in good state."""
         message = 'Beginning verify for host %s board %s model %s'
         info = self.host_info_store.get()
         message %= (self.hostname, info.board, info.model)
@@ -1852,10 +2073,16 @@
                 raise
 
 
-    def make_ssh_command(self, user='root', port=22, opts='', hosts_file=None,
-                         connect_timeout=None, alive_interval=None,
-                         alive_count_max=None, connection_attempts=None):
-        """Override default make_ssh_command to use options tuned for Chrome OS.
+    def make_ssh_command(self,
+                         user='root',
+                         port=None,
+                         opts='',
+                         hosts_file=None,
+                         connect_timeout=None,
+                         alive_interval=None,
+                         alive_count_max=None,
+                         connection_attempts=None):
+        """Override default make_ssh_command to use options tuned for ChromeOS.
 
         Tuning changes:
           - ConnectTimeout=30; maximum of 30 seconds allowed for an SSH
@@ -1972,7 +2199,11 @@
         @return True iff the host answered to ping before the timeout.
 
         """
-        return self._ping_wait_for_status(self._PING_STATUS_UP, timeout)
+        if self.use_icmp:
+            return self._ping_wait_for_status(self._PING_STATUS_UP, timeout)
+        else:
+            logging.debug('Using SSH instead of ICMP for ping_wait_up.')
+            return self.wait_up(timeout)
 
     def ping_wait_down(self, timeout):
         """Wait until the host no longer responds to `ping`.
@@ -1986,7 +2217,11 @@
                 timeout.
 
         """
-        return self._ping_wait_for_status(self._PING_STATUS_DOWN, timeout)
+        if self.use_icmp:
+            return self._ping_wait_for_status(self._PING_STATUS_DOWN, timeout)
+        else:
+            logging.debug('Using SSH instead of ICMP for ping_wait_down.')
+            return self.wait_down(timeout)
 
     def _is_host_port_forwarded(self):
         """Checks if the dut is connected over port forwarding.
@@ -2426,6 +2661,15 @@
         return crossystem.fwid()
 
 
+    def get_hardware_id(self):
+        """Get hardware id as strings.
+
+        @returns a string representing this host's hardware id.
+        """
+        crossystem = utils.Crossystem(self)
+        crossystem.init()
+        return crossystem.hwid()
+
     def get_hardware_revision(self):
         """Get the hardware revision as strings.
 
@@ -2626,11 +2870,10 @@
                         for extended use (for moving the machine, etc)
                  'power:AC_only' when the device has no battery at all.
         """
-        psu = self.run(command='mosys psu type', ignore_status=True)
+        psu = self.run(command='cros_config /hardware-properties psu-type',
+                       ignore_status=True)
         if psu.exit_status:
-            # The psu command for mosys is not included for all platforms. The
-            # assumption is that the device will have a battery if the command
-            # is not found.
+            # Assume battery if unspecified in cros_config.
             return 'power:battery'
 
         psu_str = psu.stdout.strip()
@@ -2646,22 +2889,7 @@
         Returns:
             Boolean, False if known not to have battery, True otherwise.
         """
-        rv = True
-        power_supply = self.get_power_supply()
-        if power_supply == 'power:battery':
-            _NO_BATTERY_BOARD_TYPE = ['CHROMEBOX', 'CHROMEBIT', 'CHROMEBASE']
-            board_type = self.get_board_type()
-            if board_type in _NO_BATTERY_BOARD_TYPE:
-                logging.warn('Do NOT believe type %s has battery. '
-                             'See debug for mosys details', board_type)
-                psu = utils.system_output('mosys -vvvv psu type',
-                                         ignore_status=True)
-                logging.debug(psu)
-                rv = False
-        elif power_supply == 'power:AC_only':
-            rv = False
-
-        return rv
+        return self.get_power_supply() == 'power:battery'
 
 
     def get_servo(self):
@@ -2674,12 +2902,15 @@
         """
         return 'servo' if self._servo_host else None
 
-
-    def has_internal_display(self):
-        """Determine if the device under test is equipped with an internal
-        display.
-
-        @return: 'internal_display' if one is present; None otherwise.
+    def _has_display(self, internal):
+        """ Determine if the device under test is equipped with a display
+        @params internal: True if checking internal display else checking
+                          external display.
+        @return: 'internal_display' if internal is true and internal display
+                 present;
+                 'external_display' if internal is false and external display
+                 present;
+                 None otherwise.
         """
         from autotest_lib.client.cros.graphics import graphics_utils
         from autotest_lib.client.common_lib import utils as common_utils
@@ -2698,13 +2929,33 @@
         utils.system_output = __system_output
         common_utils.read_file = __read_file
         try:
-            return ('internal_display' if graphics_utils.has_internal_display()
-                                   else None)
+            if internal:
+                return ('internal_display'
+                        if graphics_utils.has_internal_display() else None)
+            else:
+                return ('external_display'
+                        if graphics_utils.has_external_display() else None)
         finally:
             utils.system_output = original_system_output
             common_utils.read_file = original_read_file
 
 
+    def has_internal_display(self):
+        """Determine if the device under test is equipped with an internal
+        display.
+
+        @return: 'internal_display' if one is present; None otherwise.
+        """
+        return self._has_display(True)
+
+    def has_external_display(self):
+        """Determine if the device under test is equipped with an external
+        display.
+
+        @return: 'external_display' if one is present; None otherwise.
+        """
+        return self._has_display(False)
+
     def is_boot_from_usb(self):
         """Check if DUT is boot from USB.
 
@@ -2767,11 +3018,18 @@
 
     def get_board_type(self):
         """
-        Get the DUT's device type from /etc/lsb-release.
-        DEVICETYPE can be one of CHROMEBOX, CHROMEBASE, CHROMEBOOK or more.
+        Get the DUT's device type / form factor from cros_config. It can be one
+        of CHROMEBOX, CHROMEBASE, CHROMEBOOK, or CHROMEBIT.
 
-        @return value of DEVICETYPE param from lsb-release.
+        @return form factor value from cros_config.
         """
+
+        device_type = self.run('cros_config /hardware-properties form-factor',
+                ignore_status=True).stdout
+        if device_type:
+            return device_type
+
+        # TODO: remove lsb-release fallback once cros_config works everywhere
         device_type = self.run('grep DEVICETYPE /etc/lsb-release',
                                ignore_status=True).stdout
         if device_type:
@@ -2896,6 +3154,11 @@
         """Get device repair state"""
         return self._device_repair_state
 
+    def is_marked_for_replacement(self):
+        """Verify if device was marked for replacemnet during admin task."""
+        expected_state = cros_constants.DEVICE_STATE_NEEDS_REPLACEMENT
+        return self.get_device_repair_state() == expected_state
+
     def set_device_repair_state(self, state, resultdir=None):
         """Set device repair state.
 
@@ -2930,16 +3193,17 @@
             cros_constants.DEVICE_STATE_NEEDS_REPLACEMENT,
             resultdir=resultdir)
 
-    def _dut_fail_ssh_verifier(self):
-        """Check if DUT failed SSH verifier.
+    def _dut_is_accessible_by_verifier(self):
+        """Check if DUT accessible by SSH or PING verifier.
 
-        @returns: bool, True - verifier marked as fail.
-                        False - result not reachable, verifier did not fail.
+        @returns: bool, True - verifier marked as success.
+                        False - result not reachable, verifier did not success.
         """
         if not self._repair_strategy:
             return False
-        dut_ssh_verifier = self._repair_strategy.verifier_is_good('ssh')
-        return dut_ssh_verifier == hosts.VERIFY_FAILED
+        dut_ssh = self._repair_strategy.verifier_is_good('ssh')
+        dut_ping = self._repair_strategy.verifier_is_good('ping')
+        return dut_ssh == hosts.VERIFY_SUCCESS or dut_ssh == hosts.VERIFY_SUCCESS
 
     def _stat_if_pingable_but_not_sshable(self):
         """Check if DUT pingable but failed SSH verifier."""
@@ -2962,8 +3226,8 @@
         # state can be set by any cros repair actions
         if self.get_device_repair_state():
             return
-        if not self._dut_fail_ssh_verifier():
-            # DUT is sshable and we still have many options to repair it.
+        if self._dut_is_accessible_by_verifier():
+            # DUT is accessible and we still have many options to repair it.
             return
         needs_manual_repair = False
         dhp = self.health_profile
@@ -2999,14 +3263,14 @@
 
         @returns: None
         """
-        message_prefix = "Don't need to request servo-host reboot "
-        if not self._dut_fail_ssh_verifier():
+        message_prefix = "Don't need to request servo-host reboot"
+        if self._dut_is_accessible_by_verifier():
             return
         if not self._servo_host:
-            logging.debug(message_prefix + 'as it not initialized')
+            logging.debug('%s as it not initialized', message_prefix)
             return
         if not self._servo_host.is_up_fast():
-            logging.debug(message_prefix + 'as servo-host is not sshable')
+            logging.debug('%s as servo-host is not sshable', message_prefix)
             return
         if not self._servo_host.is_labstation():
             logging.debug('Servo_v3 is not requested to reboot for the DUT')
@@ -3020,7 +3284,7 @@
             # - '2' or '2.1'   - port on the hub or smart-hub
             # - '3'   - port on servo hub
             if len(connected_port.split('.')) > 2:
-                logging.debug(message_prefix + 'as servo connected by hub')
+                logging.debug('%s as servo connected by hub', message_prefix)
                 return
         self._servo_host.request_reboot()
         logging.info('Requested labstation reboot because DUT is not sshable')
@@ -3151,13 +3415,17 @@
         """Set servo-topology info to the host-info."""
         logging.debug('Try to save servo topology to host-info.')
         if not self._servo_host:
-            logging.info('Servo host is not initilized.')
+            logging.debug('Servo host is not initialized.')
+            return
+        if not self.is_servo_in_working_state():
+            logging.debug('Is servo is not in working state then'
+                          ' update topology is not allowed.')
             return
         if not self._servo_host.is_servo_topology_supported():
-            logging.info('Servo-topology is not supported.')
+            logging.debug('Servo-topology is not supported.')
             return
         servo_topology = self._servo_host.get_topology()
         if not servo_topology or servo_topology.is_empty():
-            logging.info('Servo topology is empty')
+            logging.debug('Servo topology is empty')
             return
         servo_topology.save(self.host_info_store)
diff --git a/server/hosts/cros_host_unittest.py b/server/hosts/cros_host_unittest.py
index ccad92d..c816519 100755
--- a/server/hosts/cros_host_unittest.py
+++ b/server/hosts/cros_host_unittest.py
@@ -1,8 +1,8 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # pylint: disable=missing-docstring
 
-import mock
 import unittest
+from unittest import mock
 
 import common
 
diff --git a/server/hosts/cros_label.py b/server/hosts/cros_label.py
index 388ed51..9db2aa0 100644
--- a/server/hosts/cros_label.py
+++ b/server/hosts/cros_label.py
@@ -10,27 +10,23 @@
 from __future__ import print_function
 
 import collections
+import configparser
 import logging
 import re
 
 import common
 
 from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import global_config
 from autotest_lib.client.cros.audio import cras_utils
 from autotest_lib.server.cros.dynamic_suite import constants as ds_constants
 from autotest_lib.server.hosts import base_label
 from autotest_lib.server.hosts import common_label
 from autotest_lib.server.hosts import servo_constants
-from autotest_lib.site_utils import hwid_lib
 from six.moves import zip
 
 # pylint: disable=missing-docstring
 LsbOutput = collections.namedtuple('LsbOutput', ['unibuild', 'board'])
 
-# fallback values if we can't contact the HWID server
-HWID_LABELS_FALLBACK = ['sku', 'phase', 'touchscreen', 'touchpad', 'variant', 'stylus']
-
 # Repair and Deploy taskName
 REPAIR_TASK_NAME = 'repair'
 DEPLOY_TASK_NAME = 'deploy'
@@ -224,6 +220,45 @@
         return task_name in (DEPLOY_TASK_NAME, '')
 
 
+class AudioConfigLabel(base_label.StringPrefixLabel):
+    """Determine the label of CRAS configuration for the device.
+
+    It parses config keys from the board.ini file content as the example below:
+
+    [hotword]
+    pause_at_suspend=1
+    [processing]
+    nc_supported=1
+
+    """
+
+    _NAME = 'audio'
+
+    def generate_labels(self, host):
+        # Get model name for determining the board.ini file path.
+        cros_config_cmd = 'cros_config / name'
+        result = host.run(command=cros_config_cmd, ignore_status=True)
+        if result.exit_status != 0:
+            logging.error('Failed to run command: %s', cros_config_cmd)
+            return []
+
+        model = result.stdout.strip()
+        cras_config_cmd = 'cat /etc/cras/{}/board.ini'.format(model)
+        result = host.run(command=cras_config_cmd, ignore_status=True)
+        if result.exit_status != 0:
+            logging.error('Failed to run command: %s', cras_config_cmd)
+            return []
+
+        config = configparser.ConfigParser()
+        config.read_string(result.stdout)
+        labels = []
+        # Generate "has_noise_cancellation" from "processing:nc_supported".
+        if config.getboolean('processing', 'nc_supported', fallback=False):
+            labels.append('has_noise_cancellation')
+
+        return labels
+
+
 class AudioLoopbackDongleLabel(base_label.BaseLabel):
     """Return the label if an audio loopback dongle is plugged in."""
 
@@ -335,119 +370,14 @@
     return res
 
 
-class HWIDLabel(base_label.StringLabel):
-    """Return all the labels generated from the hwid."""
-
-    # We leave out _NAME because hwid_lib will generate everything for us.
-
-    def __init__(self):
-        # Grab the key file needed to access the hwid service.
-        self.key_file = global_config.global_config.get_config_value(
-                'CROS', 'HWID_KEY', type=str)
-
-
-    @staticmethod
-    def _merge_hwid_label_lists(new, old):
-        """merge a list of old and new values for hwid_labels.
-        preferring new values if available
-
-        @returns: list of labels"""
-        # TODO(gregorynisbet): what is the appropriate way to merge
-        # old and new information?
-        retained = set(x for x in old)
-        for label in new:
-            key, sep, value = label.partition(':')
-            # If we have a key-value key such as variant:aaa,
-            # then we remove all the old labels with the same key.
-            if sep:
-                retained = set(x for x in retained if (not x.startswith(key + ':')))
-        return list(sorted(retained.union(new)))
-
-
-    def _hwid_label_names(self):
-        """get the labels that hwid_lib controls.
-
-        @returns: hwid_labels
-        """
-        all_hwid_labels, _ = self.get_all_labels()
-        # If and only if get_all_labels was unsuccessful,
-        # it will return a falsey value.
-        out = all_hwid_labels or HWID_LABELS_FALLBACK
-
-        # TODO(gregorynisbet): remove this
-        # TODO(crbug.com/999785)
-        if "sku" not in out:
-            logging.info("sku-less label names %s", out)
-
-        return out
-
-
-    def _old_label_values(self, host):
-        """get the hwid_lib labels on previous run
-
-        @returns: hwid_labels"""
-        out = []
-        info = host.host_info_store.get()
-        for hwid_label in self._hwid_label_names():
-            for label in info.labels:
-                # NOTE: we want *all* the labels starting
-                # with this prefix.
-                if label.startswith(hwid_label):
-                    out.append(label)
-        return out
-
-
-    def generate_labels(self, host):
-        # use previous values as default
-        old_hwid_labels = self._old_label_values(host)
-        logging.info("old_hwid_labels: %r", old_hwid_labels)
-        hwid = host.run_output('crossystem hwid').strip()
-        hwid_info_list = []
-        try:
-            hwid_info_response = hwid_lib.get_hwid_info(
-                hwid=hwid,
-                info_type=hwid_lib.HWID_INFO_LABEL,
-                key_file=self.key_file,
-            )
-            logging.info("hwid_info_response: %r", hwid_info_response)
-            hwid_info_list = hwid_info_response.get('labels', [])
-        except hwid_lib.HwIdException as e:
-            logging.info("HwIdException: %s", e)
-
-        new_hwid_labels = _parse_hwid_labels(hwid_info_list)
-        logging.info("new HWID labels: %r", new_hwid_labels)
-
-        return HWIDLabel._merge_hwid_label_lists(
-            old=old_hwid_labels,
-            new=new_hwid_labels,
-        )
-
-
-    def get_all_labels(self):
-        """We need to try all labels as a prefix and as standalone.
-
-        We don't know for sure which labels are prefix labels and which are
-        standalone so we try all of them as both.
-        """
-        all_hwid_labels = []
-        try:
-            all_hwid_labels = hwid_lib.get_all_possible_dut_labels(
-                    self.key_file)
-        except IOError:
-            logging.error('Can not open key file: %s', self.key_file)
-        except hwid_lib.HwIdException as e:
-            logging.error('hwid service: %s', e)
-        return all_hwid_labels, all_hwid_labels
-
-
 CROS_LABELS = [
+    AudioConfigLabel(),
     AudioLoopbackDongleLabel(), #STATECONFIG
     BluetoothPeerLabel(), #STATECONFIG
     ChameleonConnectionLabel(), #LABCONFIG
     ChameleonLabel(), #STATECONFIG
     common_label.OSLabel(),
     DeviceSkuLabel(), #LABCONFIG
-    HWIDLabel(),
     ServoTypeLabel(), #LABCONFIG
     # Temporarily add back as there's no way to reference cr50 configs.
     # See crbug.com/1057145 for the root cause.
diff --git a/server/hosts/cros_label_unittest.py b/server/hosts/cros_label_unittest.py
index 1b228b5..2652f2b 100755
--- a/server/hosts/cros_label_unittest.py
+++ b/server/hosts/cros_label_unittest.py
@@ -1,20 +1,20 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 import unittest
-import mock
+from unittest import mock
 
 import common
 
 from autotest_lib.server import utils
-from autotest_lib.server.hosts import cros_label
 from autotest_lib.server.hosts.cros_label import BrandCodeLabel
 from autotest_lib.server.hosts.cros_label import Cr50Label
 from autotest_lib.server.hosts.cros_label import Cr50ROKeyidLabel
 from autotest_lib.server.hosts.cros_label import Cr50RWKeyidLabel
 from autotest_lib.server.hosts.cros_label import DeviceSkuLabel
+from autotest_lib.server.hosts.cros_label import AudioConfigLabel
 from autotest_lib.server.hosts.cros_label import AudioLoopbackDongleLabel
 from autotest_lib.server.hosts.cros_label import ChameleonConnectionLabel
 from autotest_lib.server.hosts.cros_label import ChameleonLabel
@@ -243,33 +243,72 @@
         self.assertEqual(Cr50ROKeyidLabel().get(host), [])
 
 
-class HWIDLabelTests(unittest.TestCase):
-    def test_merge_hwid_label_lists_empty(self):
-        self.assertEqual(cros_label.HWIDLabel._merge_hwid_label_lists([], []), [])
+class AudioConfigLabelTests(unittest.TestCase):
+    """Unit tests for AudioConfigLabel"""
 
-    def test_merge_hwid_label_lists_singleton(self):
-        self.assertEqual(cros_label.HWIDLabel._merge_hwid_label_lists([], ["4"]),
-                         ["4"])
-        self.assertEqual(cros_label.HWIDLabel._merge_hwid_label_lists(["7"], []),
-                         ["7"])
+    HAS_NC_BOARD_INI_OUTPUT = """
+    [hotword]
+    pause_at_suspend=1
+    [processing]
+    nc_supported=1
+    hw_echo_ref_disabled=1
+    """
 
-    def test_merge_hwid_label_lists_override(self):
-        self.assertEqual(
-            cros_label.HWIDLabel._merge_hwid_label_lists(old=["7:a"], new=["7:b"]),
-            ["7:b"])
+    NO_NC_BOARD_INI_OUTPUT = """
+    [hotword]
+    pause_at_suspend=1
+    [processing]
+    hw_echo_ref_disabled=1
+    """
 
-    def test_merge_hwid_label_lists_no_override(self):
-        self.assertEqual(
-            cros_label.HWIDLabel._merge_hwid_label_lists(old=["7a"], new=["7b"]),
-            ["7a", "7b"])
+    DISABLED_NC_BOARD_INI_OUTPUT = """
+    [hotword]
+    pause_at_suspend=1
+    [processing]
+    hw_echo_ref_disabled=1
+    nc_supported=0
+    """
 
-    def test_hwid_label_names(self):
-        class HWIDLabelTester(cros_label.HWIDLabel):
-            def get_all_labels(self):
-                return [], []
+    def test_has_noise_cancellation_label_enabled(self):
+        cros_config_cmd = 'cros_config / name'
+        cras_config_cmd = 'cat /etc/cras/HAS_NC/board.ini'
+        host = MockHost([], MockCmd(cros_config_cmd, 0, 'HAS_NC\n'),
+                        MockCmd(cras_config_cmd, 0,
+                                self.HAS_NC_BOARD_INI_OUTPUT))
+        self.assertEqual(AudioConfigLabel().get(host),
+                         ['audio:has_noise_cancellation'])
 
-        item = HWIDLabelTester()
-        self.assertEqual(item._hwid_label_names(), cros_label.HWID_LABELS_FALLBACK)
+    def test_has_noise_cancellation_label_not_exists(self):
+        cros_config_cmd = 'cros_config / name'
+        cras_config_cmd = 'cat /etc/cras/NO_NC/board.ini'
+        host = MockHost([], MockCmd(cros_config_cmd, 0, 'NO_NC\n'),
+                        MockCmd(cras_config_cmd, 0,
+                                self.NO_NC_BOARD_INI_OUTPUT))
+        self.assertEqual(AudioConfigLabel().get(host), [])
+
+    def test_has_noise_cancellation_label_disabled(self):
+        cros_config_cmd = 'cros_config / name'
+        cras_config_cmd = 'cat /etc/cras/DISABLED_NC/board.ini'
+        host = MockHost([], MockCmd(cros_config_cmd, 0, 'DISABLED_NC\n'),
+                        MockCmd(cras_config_cmd, 0,
+                                self.DISABLED_NC_BOARD_INI_OUTPUT))
+        self.assertEqual(AudioConfigLabel().get(host), [])
+
+    def test_has_noise_cancellation_label_fails_cros_config(self):
+        cros_config_cmd = 'cros_config / name'
+        cras_config_cmd = 'cat /etc/cras/HAS_NC/board.ini'
+        host = MockHost([], MockCmd(cros_config_cmd, 1, 'HAS_NC\n'),
+                        MockCmd(cras_config_cmd, 0,
+                                self.HAS_NC_BOARD_INI_OUTPUT))
+        self.assertEqual(AudioConfigLabel().get(host), [])
+
+    def test_has_noise_cancellation_label_fails_cras_config(self):
+        cros_config_cmd = 'cros_config / name'
+        cras_config_cmd = 'cat /etc/cras/HAS_NC/board.ini'
+        host = MockHost([], MockCmd(cros_config_cmd, 0, 'HAS_NC\n'),
+                        MockCmd(cras_config_cmd, 1,
+                                self.HAS_NC_BOARD_INI_OUTPUT))
+        self.assertEqual(AudioConfigLabel().get(host), [])
 
 
 class AudioLoopbackDongleLabelTests(unittest.TestCase):
diff --git a/server/hosts/cros_repair.py b/server/hosts/cros_repair.py
index dec36c9..4d72712 100644
--- a/server/hosts/cros_repair.py
+++ b/server/hosts/cros_repair.py
@@ -7,10 +7,11 @@
 from __future__ import division
 from __future__ import print_function
 
-import json
 import logging
-import time
 import math
+import six
+import sys
+import time
 
 import common
 from autotest_lib.client.common_lib import error
@@ -24,19 +25,23 @@
 from autotest_lib.server import crashcollect
 from autotest_lib.server.cros import provisioner
 from autotest_lib.server.cros.dynamic_suite import tools
+from autotest_lib.server.cros.dynamic_suite import constants as ds_constants
+from autotest_lib.server.cros.servo.keyboard import servo_keyboard_flasher
+from autotest_lib.server.cros.repair import mac_address_helper
 from autotest_lib.server.hosts import cros_constants
 from autotest_lib.server.hosts import cros_firmware
 from autotest_lib.server.hosts import repair_utils
 from autotest_lib.site_utils.admin_audit import verifiers as audit_verify
 from autotest_lib.site_utils.admin_audit import constants as audit_const
+from autotest_lib.site_utils.admin_audit import battery_validator
 from six.moves import range
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = utils.metrics_mock
 
-from chromite.lib import timeout_util
+from autotest_lib.utils.frozen_chromite.lib import timeout_util
 
 DEFAULT_SERVO_RESET_TRIGGER = (
         'ping',
@@ -105,7 +110,6 @@
         'ping',
         'ssh',
         'writable',
-        'stop_start_ui',
 )
 _JETSTREAM_USB_TRIGGERS = (
         'ping',
@@ -116,6 +120,10 @@
         'ping',
         'ssh',
 )
+_CROS_AC_TRIGGERS = (
+        'ping',
+        'power',
+)
 _CROS_USB_DEPENDENCIES = ('usb_drive', )
 
 
@@ -154,6 +162,15 @@
                     'Cannot determine AC power status')
 
     def _validate_battery(self, host, info):
+        host_info = host.host_info_store.get()
+        if host_info.get_label_value('power') == 'battery':
+            if 'Battery' not in info:
+                data = {'host': host.hostname, 'model': host_info.model}
+                metrics.Counter('chromeos/autotest/battery_not_detected'
+                                ).increment(fields=data)
+                logging.info('Battery is not presented but expected!'
+                             ' Probably hardware issue.')
+
         try:
             charging_state = info['Battery']['state']
             battery_level = float(info['Battery']['percentage'])
@@ -213,21 +230,26 @@
     @property
     def description(self):
         # pylint: disable=missing-docstring
-        return 'The DUT is plugged in to AC power and battery is charing'
+        return 'The DUT is plugged in to AC power and battery is charging'
 
 
-class CrosVerisionVerifier(hosts.Verifier):
+class ProvisioningLabelsVerifier(hosts.Verifier):
     """Confirm that current ChromeOS image on the host is matches
-    to provision-cros_version label.
+    to provision labels.
 
     Some tests behavior may changed DUT image while they don't update
-    provision-cros_version label, which could cause the next test run
-    on the same host gets an unexpected OS version and yields false
-    positive test result.
+    provision-cros_version or provisioning-job_repo_url labels, which could
+    cause the next test run on the same host gets an unexpected data and
+    yields false positive test result.
     """
 
     @timeout_util.TimeoutDecorator(cros_constants.VERIFY_TIMEOUT_SEC)
     def verify(self, host):
+        self._verify_cros_version(host)
+        self._verify_job_repo_url(host)
+
+    def _verify_cros_version(self, host):
+        """Verify that cros-version match version on the host."""
         label_match = True
         try:
             label_match = host.verify_cros_version_label()
@@ -235,14 +257,28 @@
             # We don't want fail this verifier for any errors that other
             # than a actual version mismatch, as that can make debugging
             # more challenge.
-            logging.warning('Unexpected error during verify cros verision'
-                            ' on %s; %s', host.hostname, e)
+            logging.warning(
+                    'Unexpected error during verify cros version on %s; %s',
+                    host.hostname, e)
 
         if not label_match:
             raise hosts.AutoservVerifyError('ChromeOS image on the host'
                                             ' does not match to cros-version'
                                             ' label.')
 
+    def _verify_job_repo_url(self, host):
+        """Verify that job_repo_url match version on the host."""
+        info = host.host_info_store.get()
+        job_repo_url = info.attributes.get(ds_constants.JOB_REPO_URL, '')
+        if not job_repo_url:
+            logging.debug('job_repo_url is empty. Skip check.')
+            return
+        os_from_host = host.get_release_builder_path()
+        if not os_from_host in job_repo_url:
+            raise hosts.AutoservVerifyError('ChromeOS image on the host'
+                                            ' does not match to job_repo_url'
+                                            ' label.')
+
     @property
     def description(self):
         # pylint: disable=missing-docstring
@@ -364,28 +400,19 @@
             return
 
         try:
-            status = CryptohomeStatus(host)
+            status = TpmStatus(host)
         except hosts.AutoservVerifyError:
             logging.info('Cannot determine the Cryptohome valid status - '
                          'skipping check.')
             return
         try:
-            tpm = status['tpm']
-            if not tpm['enabled']:
+            if not status['is_enabled']:
                 raise hosts.AutoservVerifyError(
                         'TPM is not enabled -- Hardware is not working.')
-            if not tpm['can_connect']:
-                raise hosts.AutoservVerifyError(
-                        ('TPM connect failed -- '
-                         'last_error=%d.' % tpm['last_error']))
-            if tpm['owned'] and not tpm['can_load_srk']:
-                raise hosts.AutoservVerifyError(
-                        'Cannot load the TPM SRK')
-            if tpm['can_load_srk'] and not tpm['can_load_srk_pubkey']:
-                raise hosts.AutoservVerifyError(
-                        'Cannot load the TPM SRK public key')
+            if status['is_owned'] and not status['is_srk_default_auth']:
+                raise hosts.AutoservVerifyError('Cannot load the TPM SRK')
         except KeyError:
-            logging.info('Cannot determine the Cryptohome valid status - '
+            logging.info('Cannot determine the TPM valid status - '
                          'skipping check.')
 
     @property
@@ -535,6 +562,12 @@
                 req = host.run('vpd -g serial_number', ignore_status=True)
         return req.stdout
 
+    def _is_applicable(self, host):
+        if host.is_satlab():
+            logging.info('Not critical for Satlab. Skipping')
+            return False
+        return True
+
     @property
     def description(self):
         # pylint: disable=missing-docstring
@@ -648,7 +681,7 @@
     def verify(self, host):
         # pylint: disable=missing-docstring
         try:
-            status = CryptohomeStatus(host)
+            status = TpmStatus(host)
             if not status.tpm_enabled:
                 raise hosts.AutoservVerifyError('TPM is not enabled')
             if not status.tpm_owned:
@@ -710,14 +743,6 @@
     def verify(self, host):
         # pylint: disable=missing-docstring
         try:
-            if not host.upstart_status('ap-controller'):
-                raise hosts.AutoservVerifyError(
-                    'ap-controller service is not running')
-        except error.AutoservRunError:
-            raise hosts.AutoservVerifyError(
-                'ap-controller service not found')
-
-        try:
             host.run('pgrep ap-controller')
         except error.AutoservRunError:
             raise hosts.AutoservVerifyError(
@@ -741,7 +766,7 @@
     @timeout_util.TimeoutDecorator(cros_constants.VERIFY_TIMEOUT_SEC)
     def verify(self, host):
         try:
-            host.run('stop ui && start ui', ignore_status=True, timeout=10)
+            host.run('stop ui && start ui', ignore_status=True, timeout=45)
         except error.AutoservSSHTimeout:
             raise hosts.AutoservVerifyError(
                 "Got timeout when stop ui/start ui. DUT might crash.")
@@ -751,6 +776,36 @@
         return 'The DUT image works fine when stop ui/start ui.'
 
 
+class GscToolPresentVerifier(hosts.Verifier):
+    """Verify that GSC tool is functional.
+
+    If board/model expected to have GSC tool but it does not have it then need
+    to re-image the host to recover it.
+    If host-info has label 'cr50' then we expect to have GSC tool on the host.
+    """
+
+    VERIFY_GSC_CMD = 'gsctool -a -f'
+
+    @timeout_util.TimeoutDecorator(cros_constants.VERIFY_TIMEOUT_SEC)
+    def verify(self, host):
+        r = host.run(self.VERIFY_GSC_CMD, ignore_status=True, timeout=10)
+        if r.exit_status != 0:
+            raise hosts.AutoservNonCriticalVerifyError(
+                    "GSC tool issue detected.")
+        logging.debug('GSC tool is functional.')
+
+    def _is_applicable(self, host):
+        host_info = host.host_info_store.get()
+        if host_info.get_label_value('cr50'):
+            return True
+        logging.info('GSC is not on the host.')
+        return False
+
+    @property
+    def description(self):
+        return 'Verify GSC tool is functional.'
+
+
 class ServoUSBDriveVerifier(hosts.Verifier):
     """Verify that USB drive on Servo is good to use.
 
@@ -779,8 +834,12 @@
         usb_state = host_info.get_label_value(
                 audit_const.SERVO_USB_STATE_PREFIX)
         if usb_state and usb_state == audit_const.HW_STATE_NEED_REPLACEMENT:
-            raise hosts.AutoservNonCriticalVerifyError(
-                    'USB-drive marked for replacement')
+            # Allow to use USB-key marked for replacement.
+            # Goal to collect metrics to see if DUT still can recovered
+            return
+            # TODO(otabek): restory when fix crbug.com/1164408
+            # raise hosts.AutoservNonCriticalVerifyError(
+            #         'USB-drive marked for replacement')
 
         # The USB-drive detected and was not mark for replacement.
         # Set as normal for future audit.
@@ -824,6 +883,97 @@
         return 'Ensure DUT storage SMART information is in good state.'
 
 
+class AuditBattery(hosts.Verifier):
+    """Verify that battery on DUT is good to use.
+
+    Check if DUT drive is providing good SMART stats which not showing any
+    issues on it. The verifier can mark DUT for replacement if SMART stats
+    show outworn data.
+    """
+
+    @timeout_util.TimeoutDecorator(cros_constants.VERIFY_TIMEOUT_SEC)
+    def verify(self, host):
+        # pylint: disable=missing-docstring
+        state = None
+        try:
+            state = self._get_validator(host).validate()
+        except Exception as e:
+            # We do not want stop main process if it fail.
+            logging.debug('(Not critical) %s', e)
+        if not state:
+            raise hosts.AutoservNonCriticalVerifyError(
+                    'DUT battery did not detected or state cannot extracted.')
+        if state == audit_const.HW_STATE_NEED_REPLACEMENT:
+            logging.info('Detected issue with storage on the DUT.')
+            host.set_device_needs_replacement()
+
+    def _is_applicable(self, host):
+        return self._get_validator(host).is_battery_expected()
+
+    def _get_validator(self, host):
+        if not getattr(self, '_validator', None):
+            self._validator = battery_validator.BatteryValidator(host)
+        return self._validator
+
+    @property
+    def description(self):
+        return 'Ensure DUT battery is in good state.'
+
+
+class ServoKeyboardMapVerifier(hosts.Verifier):
+    """Not critical verify to flash servo keyboard for the host.
+
+    Check if host support servo keyboard and update if firmware is not present.
+    """
+
+    @timeout_util.TimeoutDecorator(cros_constants.VERIFY_TIMEOUT_SEC)
+    def verify(self, host):
+        try:
+            flasher = servo_keyboard_flasher.ServoKeyboardMapFlasher()
+            if flasher.is_image_supported(host):
+                flasher.update(host)
+        except Exception as e:
+            logging.debug('(Not critical) %s', e)
+            raise hosts.AutoservNonCriticalVerifyError(
+                    'Fail to verify/update servo keyboard map on the host.')
+
+    def _is_applicable(self, host):
+        if host.servo:
+            return True
+        return False
+
+    @property
+    def description(self):
+        return 'Verify and update servo keyboard map.'
+
+
+class ServoMacAddressVerifier(hosts.Verifier):
+    """Not critical verify to cache NIC mac address for the host on servo.
+
+    Servo_v4 plugged to the DUT and providing NIC for that. We caching mac
+    address on servod side for better debugging.
+    """
+
+    @timeout_util.TimeoutDecorator(cros_constants.VERIFY_TIMEOUT_SEC)
+    def verify(self, host):
+        try:
+            helper = mac_address_helper.MacAddressHelper()
+            helper.update_if_needed(host)
+        except Exception as e:
+            logging.debug('(Not critical) %s', e)
+            raise hosts.AutoservNonCriticalVerifyError(
+                    'Fail to verify/update servo NIC mac address for host.')
+
+    def _is_applicable(self, host):
+        if host.servo:
+            return True
+        return False
+
+    @property
+    def description(self):
+        return 'Verify and update cached NIC mac address.'
+
+
 class _ResetRepairAction(hosts.RepairAction):
     """Common handling for repair actions that reset a DUT."""
 
@@ -831,7 +981,6 @@
         """Collect logs from a successfully repaired DUT."""
         dirname = 'after_%s' % self.tag
         local_log_dir = crashcollect.get_crashinfo_dir(host, dirname)
-        host.collect_logs('/var/log', local_log_dir, ignore_errors=True)
         # Collect crash info.
         crashcollect.get_crashinfo(host, None)
 
@@ -903,6 +1052,11 @@
         host.servo.get_power_state_controller().reset()
         self._check_reset_success(host)
 
+    def _is_applicable(self, host):
+        if host.servo:
+            return True
+        return False
+
     @property
     def description(self):
         # pylint: disable=missing-docstring
@@ -925,8 +1079,8 @@
         finally:
             # cr50 reset will clear some some init like `ccd testlab open`
             # so we want to re-initialize servo after cr50 reset if the main
-            # device is ccd.
-            if host.servo.main_device_is_ccd():
+            # device uses cr50 console commands.
+            if host.servo.main_device_uses_gsc_drv():
                 host.servo.initialize_dut()
 
     def _is_applicable(self, host):
@@ -975,28 +1129,21 @@
         return 'Reset GBB flags and Reboot the host'
 
 
-class LabelCleanupRepair(hosts.RepairAction):
-    """Cleanup unexpected labels for the host, e.g. mismatched
-    cros-version label.
+class ProvisioningLabelsRepair(hosts.RepairAction):
+    """Repair issue with provisioning labels for the host.
+
+    The repair is doing simple clean up of labels as next provisioning will
+    re-generate required fields.
     """
-    # The repair action currently only cleanup cros-version label, however
-    # we can extent it to cleanup other labels when there is need, and it
-    # should be able to determine which label to clean based on check the
-    # cached result from it's trigger list. (example: trigger verifiers can
-    # be access via self._trigger_list, and we can tell which verifier failed
-    # by check Verifier._is_good() method.)
 
     @timeout_util.TimeoutDecorator(cros_constants.SHORT_REPAIR_TIMEOUT_SEC)
     def repair(self, host):
-        logging.info('Removing %s label from the host', host.VERSION_PREFIX)
-        info = host.host_info_store.get()
-        info.clear_version_labels()
-        host.host_info_store.commit(info)
+        afe_utils.clean_provision_labels(host)
 
     @property
     def description(self):
         # pylint: disable=missing-docstring
-        return 'Cleanup unexpected labels for the host'
+        return 'Cleanup provisioning labels for the host'
 
 
 class EnrollmentCleanupRepair(hosts.RepairAction):
@@ -1081,6 +1228,7 @@
     # stage image to usb drive, install chromeos image.
     @timeout_util.TimeoutDecorator(60 * 60)
     def repair(self, host):
+        self.boot_in_recovery = False
         # pylint: disable=missing-docstring
         repair_utils.require_servo(host, ignore_state=True)
         image_name = host.get_cros_repair_image_name()
@@ -1111,8 +1259,54 @@
             logging.info('Staging image: %s on caching server.', image_name)
             _, update_url = host.stage_image_for_servo()
         afe_utils.clean_provision_labels(host)
-        host.servo_install(update_url, is_repair=True)
+        # Start process to install new image from USB
+        need_snk = host.require_snk_mode_in_recovery()
+
+        host.servo.get_power_state_controller().power_off()
+        if update_url:
+            try:
+                host.install_image_to_servo_usb(image_url=update_url)
+            except Exception as e:
+                # Format USB-storage as incorrect download image can cause
+                # false believe that image downloaded.
+                self._format_usb_storage(host)
+                # Powering DUT on as if leave it in off mode can cause issue
+                # with detecting ccd_cr50 on the board.
+                host.servo.get_power_state_controller().power_on()
+                six.reraise(error.AutotestError, str(e), sys.exc_info()[2])
+        else:
+            # Give the DUT some time to power_off if we skip
+            # download image to usb. (crbug.com/982993)
+            time.sleep(10)
+
+        host.boot_in_recovery_mode(need_snk=need_snk)
+        # Note that device successful booted from USB
+        # That mean fw RO is good.
+        self.boot_in_recovery = True
+        host.run_install_image(install_timeout=host.ADMIN_INSTALL_TIMEOUT * 2,
+                               need_snk=need_snk,
+                               is_repair=True)
         afe_utils.add_provision_labels(host, host.VERSION_PREFIX, image_name)
+        # Collect info which USB-key used for successful re-image.
+        host_info = host.host_info_store.get()
+        if host_info:
+            usb_state = host_info.get_label_value(
+                    audit_const.SERVO_USB_STATE_PREFIX)
+            metrics_data = {'host': host.hostname, 'usb_state': usb_state}
+            metrics.Counter('chromeos/autotest/usbkey_install_success'
+                            ).increment(fields=metrics_data)
+
+    def _format_usb_storage(self, host):
+        """Format USB-storage connected to servo."""
+        try:
+            # Format USB-storage to prevent corrupted image to be
+            # counted as good image.
+            usb_path = host.servo.probe_host_usb_dev()
+            logging.info('Formating %s', usb_path)
+            cmd = 'mkfs.ext4 -F %s' % usb_path
+            host._servo_host.run(cmd, ignore_status=True)
+        except Exception as e:
+            logging.info('(Not critical) fail to format USB-storage: %s', e)
 
     @property
     def description(self):
@@ -1120,6 +1314,207 @@
         return 'Reinstall from USB using servo'
 
 
+class ServoResetAfterUSBRepair(_ResetRepairAction):
+    """Repair a host by resetting it with servo.
+
+    This is follow up action for cases when device fail to boot as part of
+    USB-install. The repair will be applicable only if device was successful
+    booted from USB-key.
+    """
+
+    @timeout_util.TimeoutDecorator(cros_constants.REPAIR_TIMEOUT_SEC)
+    def repair(self, host):
+        # pylint: disable=missing-docstring
+        host.servo.get_power_state_controller().reset()
+        self._check_reset_success(host)
+
+    def _is_applicable(self, host):
+        if not host.servo:
+            return False
+        if host.is_marked_for_replacement():
+            logging.debug('The device marked for replacement.'
+                          ' Skip the action.')
+            return False
+        usb_install = host.get_repair_strategy_node('usb')
+        if not usb_install:
+            logging.debug('Strategy node not found! Skip repair action.')
+            return False
+        if not getattr(usb_install, 'boot_in_recovery', False):
+            logging.debug('Device did not boot in recovery mode.'
+                          ' Skip repair action.')
+            return False
+        return True
+
+    @property
+    def description(self):
+        # pylint: disable=missing-docstring
+        return 'Reset the DUT via servo after USB-install'
+
+
+class RecoverFwAfterUSBRepair(_ResetRepairAction):
+    """Recover FW on the host when host can boot in recovery mode.
+
+    This is follow up action for cases when device fail to boot as part of
+    USB-install but successful booted in recovery mode.
+
+    If host can boot in recovery mode but fail boot in default mode then
+    probably we have corrupted firmware. The repair try to recover firmware
+    on the host by booting from USB-key.
+    """
+
+    # Command to update firmware located on host
+    _FW_UPDATE_CMD = 'chromeos-firmwareupdate --mode=recovery'
+
+    @timeout_util.TimeoutDecorator(cros_constants.LONG_REPAIR_TIMEOUT_SEC)
+    def repair(self, host):
+        # pylint: disable=missing-docstring
+        # Switch USB_key to servo to wake up it as sometimes it can show
+        # USB-key direction to DUT but it is not yet seeing by DUT.
+        host.servo.switch_usbkey('host')
+        time.sleep(host.servo.USB_DETECTION_DELAY)
+        # Power off the DUT as in this case the host will boot
+        # in recovery mode with higher chance.
+        host.servo.get_power_state_controller().power_off()
+        # Give the DUT some time to power_off if we skip
+        # download image to usb. (crbug.com/982993)
+        time.sleep(10)
+
+        # Boot host in recovery mode as it is working and verified
+        # by another repair action.
+        need_snk = host.require_snk_mode_in_recovery()
+        try:
+            host.boot_in_recovery_mode(need_snk=need_snk)
+            logging.debug('Host booted in recovery mode')
+
+            result = host.run(self._FW_UPDATE_CMD, ignore_status=True)
+            if result.exit_status != 0:
+                logging.error('chromeos-firmwareupdate failed: %s',
+                              result.stdout.strip())
+            host.halt()
+        finally:
+            # We need reset the DUT no matter success or not,
+            # as we don't want leave the DUT in boot from usb state.
+            # N.B. The Servo API requires that we use power_on() here
+            # for two reasons:
+            #  1) After turning on a DUT in recovery mode, you must turn
+            #     it off and then on with power_on() once more to
+            #     disable recovery mode (this is a Parrot specific
+            #     requirement).
+            #  2) After power_off(), the only way to turn on is with
+            #     power_on() (this is a Storm specific requirement).
+            logging.debug('Power cycling DUT through servo.')
+            host.servo.get_power_state_controller().power_off()
+            host.servo.switch_usbkey('off')
+            if need_snk:
+                # Attempt to restore servo_v4 role to 'src' mode.
+                host.servo.set_servo_v4_role('src')
+            # Use cold-reset instead 'on' to increase the chance to boot DUT
+            host.servo.get_power_state_controller().reset()
+        self._check_reset_success(host)
+
+    def _is_applicable(self, host):
+        if not host.servo:
+            return False
+        if host.is_marked_for_replacement():
+            logging.debug('The device marked for replacement.'
+                          ' Skip the action.')
+            return False
+        usb_install = host.get_repair_strategy_node('usb')
+        if not usb_install:
+            logging.debug('Strategy node not found! Skip repair action.')
+            return False
+        if not getattr(usb_install, 'boot_in_recovery', False):
+            logging.debug('Device did not boot in recovery mode.'
+                          ' Skip repair action.')
+            return False
+        dhp = host.health_profile
+        if not dhp:
+            logging.info('Device health profile is not available, cannot'
+                         ' determine if firmware repair is needed.')
+            return False
+        if dhp.get_failed_repair_action(self.tag) > 2:
+            logging.info('Firmware recovery has been attempted and failed 3'
+                         ' times, no need to retry.')
+            return False
+        return True
+
+    @property
+    def description(self):
+        # pylint: disable=missing-docstring
+        return 'Recover FW on the host after USB-install'
+
+
+class RecoverACPowerRepair(_ResetRepairAction):
+    """Recover AC detection if AC is not detected.
+
+    The fix based on toggle PD negotiating on EC level of DUT.
+    Repair works only for the DUT which has EC and battery.
+    """
+
+    @timeout_util.TimeoutDecorator(cros_constants.REPAIR_TIMEOUT_SEC)
+    def repair(self, host):
+        # pylint: disable=missing-docstring
+        repair_utils.require_servo(host, ignore_state=True)
+        # Verify that EC is available and we can interact with that.
+        # Do not put it in '_is_applicable' to avoid extra DUT reset.
+        try:
+            host.servo.get_ec_board()
+        except Exception as e:
+            logging.debug('(Not critical) %s', e)
+            # if EC is off it will fail to execute any EC command
+            # to wake it up we do cold-reboot then we will have active ec
+            # connection for ~30 seconds
+            host.servo.get_power_state_controller().reset()
+        try:
+            if host.servo.get('battery_is_charging'):
+                # device is changing.
+                return
+        except Exception as e:
+            logging.debug('(Not critical) %s', e)
+            raise hosts.AutoservRepairError(
+                    'Fail to read battery metrics from EC')
+        # Simple off-on not always working stable in all cases as source-sink
+        # not working too in another cases. To cover more cases here we do
+        # both toggle to recover PD negotiation.
+        # Source/sink switching CC lines to make DUT work as supplying or
+        # consuming power (between Rp and Rd).
+        self._set_pd_dualrole(host, 'off')
+        self._set_pd_dualrole(host, 'on')
+        self._set_pd_dualrole(host, 'source')
+        self._set_pd_dualrole(host, 'sink')
+        # wait to reinitialize PD negotiation and charge a little bit
+        time.sleep(120)
+        # Recommended to reset EC after manipulation with PD
+        host.servo.get_power_state_controller().reset()
+        # Verify if repair well done.
+        if not host.servo.get('battery_is_charging'):
+            raise hosts.AutoservRepairError(
+                    'Fail recovery AC detection fo the DUT.',
+                    'failed_recover_usb_pd_ac')
+        self._check_reset_success(host)
+
+    def _set_pd_dualrole(self, host, role):
+        host.servo.set_nocheck('ec_uart_flush', 'off')
+        host.servo.set_nocheck('ec_uart_cmd', 'pd dualrole %s' % role)
+        host.servo.set_nocheck('ec_uart_flush', 'on')
+        time.sleep(1)
+
+    def _is_applicable(self, host):
+        if not host._servo_host.is_ec_supported():
+            logging.info('The board not support EC')
+            return False
+        host_info = host.host_info_store.get()
+        if host_info.get_label_value('power') != 'battery':
+            logging.info('The board does not have battery')
+            return False
+        return True
+
+    @property
+    def description(self):
+        # pylint: disable=missing-docstring
+        return 'Recovery AC of DUT'
+
+
 class JetstreamTpmRepair(hosts.RepairAction):
     """Repair by resetting TPM and rebooting."""
 
@@ -1180,7 +1575,7 @@
             (FirmwareVersionVerifier, 'rwfw', ('ssh', )),
             (PythonVerifier, 'python', ('ssh', )),
             (repair_utils.LegacyHostVerifier, 'cros', ('ssh', )),
-            (CrosVerisionVerifier, 'cros_version_label', ('ssh', )),
+            (ProvisioningLabelsVerifier, 'provisioning_labels', ('ssh', )),
     )
     return verify_dag
 
@@ -1190,6 +1585,10 @@
     return (
             (StopStartUIVerifier, 'stop_start_ui', ('ssh', )),
             (DUTStorageVerifier, 'storage', ('ssh', )),
+            (AuditBattery, 'audit_battery', ()),
+            (GscToolPresentVerifier, 'dut_gsctool', ('ssh', )),
+            (ServoKeyboardMapVerifier, 'dut_servo_keyboard', ('ssh', )),
+            (ServoMacAddressVerifier, 'dut_servo_macaddr', ('ssh', )),
     )
 
 
@@ -1215,8 +1614,8 @@
                     'ping',
                     'ssh',
             )),
-            (LabelCleanupRepair, 'label_cleanup', ('ssh', ),
-             ('cros_version_label', )),
+            (ProvisioningLabelsRepair, 'provisioning_labels_repair', ('ssh', ),
+             ('provisioning_labels', )),
 
             # N.B. FaftFirmwareRepair can't fix a 'good_provision' failure
             # directly, because it doesn't remove the flag file that triggers
@@ -1274,20 +1673,88 @@
     return repair_actions
 
 
-def _cros_dedicated_repair_actions(firmware_triggers=_CROS_FIRMWARE_TRIGGERS,
-                                   usb_dependencies=_CROS_USB_DEPENDENCIES):
-    """Return the repair actions that only works for `CrosHost`"""
-
-    repair_actions = ((cros_firmware.GeneralFirmwareRepair, 'general_firmware',
-                       usb_dependencies, firmware_triggers), )
-    return repair_actions
-
-
 def _cros_repair_actions():
     """Return the repair actions for a `CrosHost`."""
-    repair_actions = (_cros_basic_repair_actions() +
-                      _cros_extended_repair_actions() +
-                      _cros_dedicated_repair_actions())
+
+    servo_reset_trigger = DEFAULT_SERVO_RESET_TRIGGER
+    firmware_triggers = _CROS_FIRMWARE_TRIGGERS
+    ac_triggers = _CROS_AC_TRIGGERS
+    usb_dependencies = _CROS_USB_DEPENDENCIES
+    provision_triggers = _CROS_PROVISION_TRIGGERS + (
+            'stop_start_ui',
+            'dut_gsctool',
+    )
+    powerwash_triggers = _CROS_POWERWASH_TRIGGERS
+    usb_triggers = _CROS_USB_TRIGGERS
+
+    repair_actions = (
+            # RPM cycling must precede Servo reset:  if the DUT has a dead
+            # battery, we need to reattach AC power before we reset via servo.
+            (repair_utils.RPMCycleRepair, 'rpm', (), (
+                    'ping',
+                    'ssh',
+                    'power',
+            )),
+            (ServoResetRepair, 'servoreset', (), servo_reset_trigger),
+            (ServoCr50RebootRepair, 'cr50_reset', (), servo_reset_trigger),
+            (ServoSysRqRepair, 'sysrq', (), (
+                    'ping',
+                    'ssh',
+            )),
+            (ProvisioningLabelsRepair, 'provisioning_labels_repair', ('ssh', ),
+             ('provisioning_labels', )),
+
+            # N.B. FaftFirmwareRepair can't fix a 'good_provision' failure
+            # directly, because it doesn't remove the flag file that triggers
+            # the failure.  We include it as a repair trigger because it's
+            # possible the the last update failed because of the firmware,
+            # and we want the repair steps below to be able to trust the
+            # firmware.
+            (cros_firmware.FaftFirmwareRepair, 'faft_firmware_repair', (), (
+                    'ping',
+                    'ssh',
+                    'fwstatus',
+                    'good_provision',
+            )),
+            (DevDefaultBootRepair, 'set_default_boot', ('ssh', ),
+             ('dev_default_boot', )),
+            (CrosRebootRepair, 'reboot', ('ssh', ), (
+                    'devmode',
+                    'writable',
+            )),
+            (EnrollmentCleanupRepair, 'cleanup_enrollment', ('ssh', ),
+             ('enrollment_state', )),
+            (cros_firmware.GeneralFirmwareRepair, 'general_firmware',
+             usb_dependencies, firmware_triggers),
+            (RecoverACPowerRepair, 'ac_recover', (), ac_triggers),
+            (ProvisionRepair, 'provision', usb_triggers + powerwash_triggers,
+             provision_triggers),
+            (PowerWashRepair, 'powerwash', usb_triggers,
+             powerwash_triggers + provision_triggers),
+            (
+                    ServoInstallRepair,
+                    'usb',
+                    usb_dependencies,
+                    # faft_tpm is a trigger of usb repair action but should
+                    # not be dependence of provision and powerwash repair
+                    # action, due to restriction of current structure, we
+                    # hardcode it here instead of put it into
+                    # _CROS_USB_TRIGGERS. TODO(xianuowang@) refactor the logic
+                    # to create action/verifier DAG for different host type
+                    # after we decouple infra from test autotest repo.
+                    usb_triggers + powerwash_triggers + provision_triggers +
+                    ('faft_tpm', )),
+            (ServoResetAfterUSBRepair, 'servo_reset_after_usb',
+             (usb_dependencies), (
+                     'ping',
+                     'ssh',
+             )),
+            (RecoverFwAfterUSBRepair, 'recover_fw_after_usb',
+             (usb_dependencies), (
+                     'ping',
+                     'ssh',
+             )),
+    )
     return repair_actions
 
 
@@ -1406,72 +1873,59 @@
             'qemu' in output.stdout.lower())
 
 
-class CryptohomeStatus(dict):
+class TpmStatus(dict):
     """Wrapper for getting cryptohome status from a host."""
 
     def __init__(self, host):
-        super(CryptohomeStatus, self).__init__()
-        self.update(_get_cryptohome_status(host))
-        self.tpm = self['tpm']
+        super(TpmStatus, self).__init__()
+        self.update(_get_tpm_status(host))
 
     @property
     def tpm_enabled(self):
         # pylint: disable=missing-docstring
-        return self.tpm.get('enabled') == True
+        return self.get('is_enabled') == True
 
     @property
     def tpm_owned(self):
         # pylint: disable=missing-docstring
-        return self.tpm.get('owned') == True
+        return self.get('is_owned') == True
 
     @property
     def tpm_can_load_srk(self):
         # pylint: disable=missing-docstring
-        return self.tpm.get('can_load_srk') == True
+        return self.tpm_owned and self.get('is_srk_default_auth') == True
 
     @property
     def tpm_can_load_srk_pubkey(self):
         # pylint: disable=missing-docstring
-        return self.tpm.get('can_load_srk_pubkey') == True
+        return self.tpm_owned and self.get('is_srk_default_auth') == True
 
 
-def _get_cryptohome_status(host):
-    """Returns a dictionary containing the cryptohome status.
+def _get_tpm_status(host):
+    """Returns a dictionary containing the TPM status.
 
     @param host: a hosts.Host object.
-    @returns A dictionary containing the cryptohome status.
+    @returns A dictionary containing the TPM status.
     @raises AutoservVerifyError: if the output could not be parsed or the TPM
        status is missing.
     @raises hosts.AutoservRunError: if the cryptohome command failed.
     """
-    # This cryptohome command emits status information in JSON format. It
-    # looks something like this:
-    # {
-    #    "installattrs": {
-    #       ...
-    #    },
-    #    "mounts": [ {
-    #       ...
-    #    } ],
-    #    "tpm": {
-    #       "being_owned": false,
-    #       "can_connect": true,
-    #       "can_decrypt": false,
-    #       "can_encrypt": false,
-    #       "can_load_srk": true,
-    #       "can_load_srk_pubkey": true,
-    #       "enabled": true,
-    #       "has_context": true,
-    #       "has_cryptohome_key": false,
-    #       "has_key_handle": false,
-    #       "last_error": 0,
-    #       "owned": true
-    #    }
-    # }
     try:
-        output = host.run('cryptohome --action=status').stdout.strip()
-        status = json.loads(output)
-        if 'tpm' not in status:
+        output = host.run(
+                'tpm_manager_client status --nonsensitive').stdout.strip()
+        lines = output.split('\n')[1:-1]
+        status = {}
+        for item in lines:
+            item = item.split(':')
+            if not item[0]:
+                continue
+            if len(item) == 1:
+                item.append('')
+            item = [x.strip() for x in item]
+            item[1] = True if item[1] == 'true' else item[1]
+            item[1] = False if item[1] == 'false' else item[1]
+            status[item[0]] = item[1]
+        if status['status'] != 'STATUS_SUCCESS':
             raise hosts.AutoservVerifyError('TPM status is missing')
         return status
     except ValueError:
diff --git a/server/hosts/cros_repair_unittest.py b/server/hosts/cros_repair_unittest.py
index 2b94ad9..bc3979a 100755
--- a/server/hosts/cros_repair_unittest.py
+++ b/server/hosts/cros_repair_unittest.py
@@ -1,11 +1,11 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 import itertools
-import mock
 import unittest
+from unittest import mock
 
 import common
 from autotest_lib.client.common_lib import error
@@ -34,9 +34,15 @@
         (cros_firmware.FirmwareVersionVerifier, 'rwfw', ('ssh', )),
         (cros_repair.PythonVerifier, 'python', ('ssh', )),
         (repair_utils.LegacyHostVerifier, 'cros', ('ssh', )),
-        (cros_repair.CrosVerisionVerifier, 'cros_version_label', ('ssh', )),
+        (cros_repair.ProvisioningLabelsVerifier, 'provisioning_labels',
+         ('ssh', )),
         (cros_repair.StopStartUIVerifier, 'stop_start_ui', ('ssh', )),
         (cros_repair.DUTStorageVerifier, 'storage', ('ssh', )),
+        (cros_repair.AuditBattery, 'audit_battery', ()),
+        (cros_repair.GscToolPresentVerifier, 'dut_gsctool', ('ssh', )),
+        (cros_repair.ServoKeyboardMapVerifier, 'dut_servo_keyboard',
+         ('ssh', )),
+        (cros_repair.ServoMacAddressVerifier, 'dut_servo_macaddr', ('ssh', )),
 )
 
 CROS_REPAIR_ACTIONS = (
@@ -51,18 +57,14 @@
                 'stop_start_ui',
                 'power',
         )),
-        (
-                cros_repair.ServoCr50RebootRepair,
-                'cr50_reset',
-                (),
-                ('ping', 'ssh', 'stop_start_ui', 'power'),
-        ),
+        (cros_repair.ServoCr50RebootRepair, 'cr50_reset', (),
+         ('ping', 'ssh', 'stop_start_ui', 'power')),
         (cros_repair.ServoSysRqRepair, 'sysrq', (), (
                 'ping',
                 'ssh',
         )),
-        (cros_repair.LabelCleanupRepair, 'label_cleanup', ('ssh', ),
-         ('cros_version_label', )),
+        (cros_repair.ProvisioningLabelsRepair, 'provisioning_labels_repair',
+         ('ssh', ), ('provisioning_labels', )),
         (cros_firmware.FaftFirmwareRepair, 'faft_firmware_repair', (),
          ('ping', 'ssh', 'fwstatus', 'good_provision')),
         (cros_repair.DevDefaultBootRepair, 'set_default_boot', ('ssh', ),
@@ -73,23 +75,29 @@
         )),
         (cros_repair.EnrollmentCleanupRepair, 'cleanup_enrollment', ('ssh', ),
          ('enrollment_state', )),
-        (cros_repair.ProvisionRepair, 'provision',
-         ('ping', 'ssh', 'writable', 'stop_start_ui', 'tpm', 'good_provision',
-          'ext4'), ('power', 'rwfw', 'fwstatus', 'python', 'hwid', 'cros',
-                    'dev_default_boot')),
-        (cros_repair.PowerWashRepair, 'powerwash', ('ping', 'ssh', 'writable',
-                                                    'stop_start_ui'),
-         ('tpm', 'good_provision', 'ext4', 'power', 'rwfw', 'fwstatus',
-          'python', 'hwid', 'cros', 'dev_default_boot')),
-        (cros_repair.ServoInstallRepair, 'usb', ('usb_drive', ),
-         ('ping', 'ssh', 'writable', 'stop_start_ui', 'tpm', 'good_provision',
-          'ext4', 'power', 'rwfw', 'fwstatus', 'python', 'hwid', 'cros',
-          'dev_default_boot', 'faft_tpm')),
         (cros_firmware.GeneralFirmwareRepair, 'general_firmware',
          ('usb_drive', ), (
                  'ping',
                  'ssh',
          )),
+        (cros_repair.RecoverACPowerRepair, 'ac_recover', (), ('ping',
+                                                              'power')),
+        (cros_repair.ProvisionRepair, 'provision',
+         ('ping', 'ssh', 'writable', 'tpm', 'good_provision',
+          'ext4'), ('power', 'rwfw', 'fwstatus', 'python', 'hwid', 'cros',
+                    'dev_default_boot', 'stop_start_ui', 'dut_gsctool')),
+        (cros_repair.PowerWashRepair, 'powerwash', ('ping', 'ssh', 'writable'),
+         ('tpm', 'good_provision', 'ext4', 'power', 'rwfw', 'fwstatus',
+          'python', 'hwid', 'cros', 'dev_default_boot', 'stop_start_ui',
+          'dut_gsctool')),
+        (cros_repair.ServoInstallRepair, 'usb', ('usb_drive', ),
+         ('ping', 'ssh', 'writable', 'tpm', 'good_provision', 'ext4', 'power',
+          'rwfw', 'fwstatus', 'python', 'hwid', 'cros', 'dev_default_boot',
+          'stop_start_ui', 'dut_gsctool', 'faft_tpm')),
+        (cros_repair.ServoResetAfterUSBRepair, 'servo_reset_after_usb',
+         ('usb_drive', ), ('ping', 'ssh')),
+        (cros_repair.RecoverFwAfterUSBRepair, 'recover_fw_after_usb',
+         ('usb_drive', ), ('ping', 'ssh')),
 )
 
 MOBLAB_VERIFY_DAG = (
@@ -123,7 +131,8 @@
         (cros_firmware.FirmwareVersionVerifier, 'rwfw', ('ssh', )),
         (cros_repair.PythonVerifier, 'python', ('ssh', )),
         (repair_utils.LegacyHostVerifier, 'cros', ('ssh', )),
-        (cros_repair.CrosVerisionVerifier, 'cros_version_label', ('ssh', )),
+        (cros_repair.ProvisioningLabelsVerifier, 'provisioning_labels',
+         ('ssh', )),
         (cros_repair.JetstreamTpmVerifier, 'jetstream_tpm', ('ssh', )),
         (cros_repair.JetstreamAttestationVerifier, 'jetstream_attestation',
          ('ssh', )),
@@ -149,8 +158,8 @@
                 'ping',
                 'ssh',
         )),
-        (cros_repair.LabelCleanupRepair, 'label_cleanup', ('ssh', ),
-         ('cros_version_label', )),
+        (cros_repair.ProvisioningLabelsRepair, 'provisioning_labels_repair',
+         ('ssh', ), ('provisioning_labels', )),
         (cros_firmware.FaftFirmwareRepair, 'faft_firmware_repair', (),
          ('ping', 'ssh', 'fwstatus', 'good_provision')),
         (cros_repair.DevDefaultBootRepair, 'set_default_boot', ('ssh', ),
@@ -201,90 +210,36 @@
         )),
 )
 
-CRYPTOHOME_STATUS_OWNED = """{
-   "installattrs": {
-      "first_install": true,
-      "initialized": true,
-      "invalid": false,
-      "lockbox_index": 536870916,
-      "lockbox_nvram_version": 2,
-      "secure": true,
-      "size": 0,
-      "version": 1
-   },
-   "mounts": [  ],
-   "tpm": {
-      "being_owned": false,
-      "can_connect": true,
-      "can_decrypt": false,
-      "can_encrypt": false,
-      "can_load_srk": true,
-      "can_load_srk_pubkey": true,
-      "enabled": true,
-      "has_context": true,
-      "has_cryptohome_key": false,
-      "has_key_handle": false,
-      "last_error": 0,
-      "owned": true
-   }
+TPM_STATUS_OWNED = """
+Message Reply: [tpm_manager.GetTpmNonsensitiveStatusReply] {
+  status: STATUS_SUCCESS
+  is_enabled: true
+  is_owned: true
+  is_owner_password_present: true
+  has_reset_lock_permissions: true
+  is_srk_default_auth: true
 }
 """
 
-CRYPTOHOME_STATUS_NOT_OWNED = """{
-   "installattrs": {
-      "first_install": true,
-      "initialized": true,
-      "invalid": false,
-      "lockbox_index": 536870916,
-      "lockbox_nvram_version": 2,
-      "secure": true,
-      "size": 0,
-      "version": 1
-   },
-   "mounts": [  ],
-   "tpm": {
-      "being_owned": false,
-      "can_connect": true,
-      "can_decrypt": false,
-      "can_encrypt": false,
-      "can_load_srk": false,
-      "can_load_srk_pubkey": false,
-      "enabled": true,
-      "has_context": true,
-      "has_cryptohome_key": false,
-      "has_key_handle": false,
-      "last_error": 0,
-      "owned": false
-   }
+TPM_STATUS_NOT_OWNED = """
+Message Reply: [tpm_manager.GetTpmNonsensitiveStatusReply] {
+  status: STATUS_SUCCESS
+  is_enabled: true
+  is_owned: false
+  is_owner_password_present: false
+  has_reset_lock_permissions: false
+  is_srk_default_auth: true
 }
 """
 
-CRYPTOHOME_STATUS_CANNOT_LOAD_SRK = """{
-   "installattrs": {
-      "first_install": true,
-      "initialized": true,
-      "invalid": false,
-      "lockbox_index": 536870916,
-      "lockbox_nvram_version": 2,
-      "secure": true,
-      "size": 0,
-      "version": 1
-   },
-   "mounts": [  ],
-   "tpm": {
-      "being_owned": false,
-      "can_connect": true,
-      "can_decrypt": false,
-      "can_encrypt": false,
-      "can_load_srk": false,
-      "can_load_srk_pubkey": false,
-      "enabled": true,
-      "has_context": true,
-      "has_cryptohome_key": false,
-      "has_key_handle": false,
-      "last_error": 0,
-      "owned": true
-   }
+TPM_STATUS_CANNOT_LOAD_SRK = """
+Message Reply: [tpm_manager.GetTpmNonsensitiveStatusReply] {
+  status: STATUS_SUCCESS
+  is_enabled: true
+  is_owned: true
+  is_owner_password_present: false
+  has_reset_lock_permissions: false
+  is_srk_default_auth: false
 }
 """
 
@@ -350,47 +305,19 @@
             for label in deps + triggers:
                 self.assertIn(label, verify_labels)
 
-    def test_get_cryptohome_status_owned(self):
+    def test_get_tpm_status_owned(self):
         mock_host = mock.Mock()
-        mock_host.run.return_value.stdout = CRYPTOHOME_STATUS_OWNED
-        status = cros_repair.CryptohomeStatus(mock_host)
-        self.assertDictEqual({
-            'being_owned': False,
-            'can_connect': True,
-            'can_decrypt': False,
-            'can_encrypt': False,
-            'can_load_srk': True,
-            'can_load_srk_pubkey': True,
-            'enabled': True,
-            'has_context': True,
-            'has_cryptohome_key': False,
-            'has_key_handle': False,
-            'last_error': 0,
-            'owned': True,
-            }, status['tpm'])
+        mock_host.run.return_value.stdout = TPM_STATUS_OWNED
+        status = cros_repair.TpmStatus(mock_host)
         self.assertTrue(status.tpm_enabled)
         self.assertTrue(status.tpm_owned)
         self.assertTrue(status.tpm_can_load_srk)
         self.assertTrue(status.tpm_can_load_srk_pubkey)
 
-    def test_get_cryptohome_status_not_owned(self):
+    def test_get_tpm_status_not_owned(self):
         mock_host = mock.Mock()
-        mock_host.run.return_value.stdout = CRYPTOHOME_STATUS_NOT_OWNED
-        status = cros_repair.CryptohomeStatus(mock_host)
-        self.assertDictEqual({
-            'being_owned': False,
-            'can_connect': True,
-            'can_decrypt': False,
-            'can_encrypt': False,
-            'can_load_srk': False,
-            'can_load_srk_pubkey': False,
-            'enabled': True,
-            'has_context': True,
-            'has_cryptohome_key': False,
-            'has_key_handle': False,
-            'last_error': 0,
-            'owned': False,
-        }, status['tpm'])
+        mock_host.run.return_value.stdout = TPM_STATUS_NOT_OWNED
+        status = cros_repair.TpmStatus(mock_host)
         self.assertTrue(status.tpm_enabled)
         self.assertFalse(status.tpm_owned)
         self.assertFalse(status.tpm_can_load_srk)
@@ -400,7 +327,7 @@
     def test_tpm_status_verifier_owned(self, mock_is_virt):
         mock_is_virt.return_value = False
         mock_host = mock.Mock()
-        mock_host.run.return_value.stdout = CRYPTOHOME_STATUS_OWNED
+        mock_host.run.return_value.stdout = TPM_STATUS_OWNED
         tpm_verifier = cros_repair.TPMStatusVerifier('test', [])
         tpm_verifier.verify(mock_host)
 
@@ -408,7 +335,7 @@
     def test_tpm_status_verifier_not_owned(self, mock_is_virt):
         mock_is_virt.return_value = False
         mock_host = mock.Mock()
-        mock_host.run.return_value.stdout = CRYPTOHOME_STATUS_NOT_OWNED
+        mock_host.run.return_value.stdout = TPM_STATUS_NOT_OWNED
         tpm_verifier = cros_repair.TPMStatusVerifier('test', [])
         tpm_verifier.verify(mock_host)
 
@@ -416,18 +343,17 @@
     def test_tpm_status_verifier_cannot_load_srk_pubkey(self, mock_is_virt):
         mock_is_virt.return_value = False
         mock_host = mock.Mock()
-        mock_host.run.return_value.stdout = CRYPTOHOME_STATUS_CANNOT_LOAD_SRK
+        mock_host.run.return_value.stdout = TPM_STATUS_CANNOT_LOAD_SRK
         tpm_verifier = cros_repair.TPMStatusVerifier('test', [])
         with self.assertRaises(hosts.AutoservVerifyError) as ctx:
             tpm_verifier.verify(mock_host)
-        self.assertEqual('Cannot load the TPM SRK',
-                         ctx.exception.message)
+        self.assertEqual('Cannot load the TPM SRK', str(ctx.exception))
 
     def test_jetstream_tpm_owned(self):
         mock_host = mock.Mock()
         mock_host.run.side_effect = [
-            mock.Mock(stdout=CRYPTOHOME_STATUS_OWNED),
-            mock.Mock(stdout=TPM_STATUS_READY),
+                mock.Mock(stdout=TPM_STATUS_OWNED),
+                mock.Mock(stdout=TPM_STATUS_READY),
         ]
         tpm_verifier = cros_repair.JetstreamTpmVerifier('test', [])
         tpm_verifier.verify(mock_host)
@@ -438,11 +364,11 @@
     def test_jetstream_tpm_not_owned(self, mock_sleep, mock_time, mock_logging):
         mock_time.side_effect = itertools.count(0, 20)
         mock_host = mock.Mock()
-        mock_host.run.return_value.stdout = CRYPTOHOME_STATUS_NOT_OWNED
+        mock_host.run.return_value.stdout = TPM_STATUS_NOT_OWNED
         tpm_verifier = cros_repair.JetstreamTpmVerifier('test', [])
         with self.assertRaises(hosts.AutoservVerifyError) as ctx:
             tpm_verifier.verify(mock_host)
-        self.assertEqual('TPM is not owned', ctx.exception.message)
+        self.assertEqual('TPM is not owned', str(ctx.exception))
 
     @mock.patch.object(retry.logging, 'warning')
     @mock.patch.object(retry.time, 'time')
@@ -451,27 +377,25 @@
         mock_time.side_effect = itertools.count(0, 20)
         mock_host = mock.Mock()
         mock_host.run.side_effect = itertools.cycle([
-            mock.Mock(stdout=CRYPTOHOME_STATUS_OWNED),
-            mock.Mock(stdout=TPM_STATUS_NOT_READY),
+                mock.Mock(stdout=TPM_STATUS_OWNED),
+                mock.Mock(stdout=TPM_STATUS_NOT_READY),
         ])
         tpm_verifier = cros_repair.JetstreamTpmVerifier('test', [])
         with self.assertRaises(hosts.AutoservVerifyError) as ctx:
             tpm_verifier.verify(mock_host)
-        self.assertEqual('TPM is not ready', ctx.exception.message)
+        self.assertEqual('TPM is not ready', str(ctx.exception))
 
     @mock.patch.object(retry.logging, 'warning')
     @mock.patch.object(retry.time, 'time')
     @mock.patch.object(retry.time, 'sleep')
-    def test_jetstream_cryptohome_missing(self, mock_sleep, mock_time,
-                                          mock_logging):
+    def test_jetstream_tpm_missing(self, mock_sleep, mock_time, mock_logging):
         mock_time.side_effect = itertools.count(0, 20)
         mock_host = mock.Mock()
         mock_host.run.side_effect = error.AutoservRunError('test', None)
         tpm_verifier = cros_repair.JetstreamTpmVerifier('test', [])
         with self.assertRaises(hosts.AutoservVerifyError) as ctx:
             tpm_verifier.verify(mock_host)
-        self.assertEqual('Could not determine TPM status',
-                         ctx.exception.message)
+        self.assertEqual('Could not determine TPM status', str(ctx.exception))
 
 
 if __name__ == '__main__':
diff --git a/server/hosts/drone_api_client/README b/server/hosts/drone_api_client/README
deleted file mode 100644
index b82a74d..0000000
--- a/server/hosts/drone_api_client/README
+++ /dev/null
@@ -1,9 +0,0 @@
-Protos in this dir are generated using
-`python -m grpc_tools.protoc -I. --python_out=. --grpc_python_out=. common.proto`
-and manually checked into the tree. The source of the protos is
-src/config/proto/chromiumos/config/api/test/tls/common.proto, relative to a
-standard repo.
-(https://chromium.git.corp.google.com/chromiumos/config/+/HEAD/proto/chromiumos/config/api/test/tls/common.proto)
-If there are updates to src proto, the protos here must be re-generated. Run the
-script "buildprotos.py" to automatically rebuild them from the source. You must
-manually repo sync prior to this.
diff --git a/server/hosts/drone_api_client/buildprotos.py b/server/hosts/drone_api_client/buildprotos.py
deleted file mode 100644
index ce9413e..0000000
--- a/server/hosts/drone_api_client/buildprotos.py
+++ /dev/null
@@ -1,53 +0,0 @@
-"""Deletes the existing bindings, then rebuild using the source .proto file."""
-
-import os
-from shutil import copyfile
-
-
-UP = '../'
-PROTO_PATH = 'src/config/proto/chromiumos/config/api/test/tls/'
-PROTO_NAME = 'common.proto'
-BUILD_CMD = (
-    "python -m grpc_tools.protoc -I. --python_out=. --grpc_python_out=. {}"
-    .format(PROTO_NAME))
-
-
-def delete_old_protos():
-    """Delete any existing protos or built proto bindings."""
-    for file in os.listdir('.'):
-        if 'common' in file:
-            os.remove(file)
-
-
-def copy_proto_from_src():
-    """Copy the proto from the src dir to the local dir."""
-    proto_dir = get_proto_dir()
-    if os.path.isfile(proto_dir):
-        copyfile(proto_dir, PROTO_NAME)
-    else:
-        raise Exception("Protos not found @ {}".format(proto_dir))
-
-
-def get_proto_dir():
-    """Return the full path of the common.proto from TLS."""
-    return "{}{}{}".format(UP * get_current_depth(), PROTO_PATH, PROTO_NAME)
-
-
-def get_current_depth():
-    """Return the current depth off /src/ within the file structure."""
-    dirs = os.getcwd().split('/')
-    src_level = dirs.index('src')
-    return len(dirs) - src_level
-
-
-def create_bindings():
-    os.system(BUILD_CMD)
-
-
-def main():
-    delete_old_protos()
-    copy_proto_from_src()
-    create_bindings()
-
-if __name__ == "__main__":
-    main()
diff --git a/server/hosts/drone_api_client/client.py b/server/hosts/drone_api_client/client.py
deleted file mode 100644
index 222283d..0000000
--- a/server/hosts/drone_api_client/client.py
+++ /dev/null
@@ -1,104 +0,0 @@
-# Lint as: python2, python3
-"""Client for Autotest side communcations to the TLS SSH Server."""
-
-
-import grpc
-import logging
-import six
-import time
-
-import common_pb2
-import common_pb2_grpc
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-
-TLS_PORT = 7152
-TLS_IP = '10.254.254.254'
-
-class TLSClient(object):
-    """The client side connection to Common-TLS service running in a drone."""
-
-    def __init__(self, hostname):
-        """Configure the grpc channel."""
-        self.hostname = hostname
-        self.channel = grpc.insecure_channel('{}:{}'.format(TLS_IP, TLS_PORT))
-        self.stub = common_pb2_grpc.CommonStub(self.channel)
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, *exc):
-        self.close()
-
-    def run_cmd(self, cmd, stdout_tee=None, stderr_tee=None, timeout=120):
-        """
-        Run a command on the host configured during init.
-
-        @param cmd: shell cmd to execute on the DUT
-        @param: stdout_tee/stderr_tee: objects to write the data from the
-            respective streams to
-        @param timeout int(seconds): how long to allow the command to run
-            before forcefully killing it.
-
-        """
-        res = utils.CmdResult(command=cmd)
-        try:
-            logging.debug(
-                "Running command %s via TLS ExecDutCommand on host %s",
-                cmd, self.hostname)
-            self._run(cmd, stdout_tee, stderr_tee, res, timeout)
-        except grpc.RpcError as e:
-            if e.code().name == "DEADLINE_EXCEEDED":
-                raise error.CmdTimeoutError(
-                    cmd, res,
-                    "Command(s) did not complete within %d seconds" % timeout)
-            raise e
-        except Exception as e:
-            raise e
-        return res
-
-    def _run(self, cmd, stdout_tee, stderr_tee, res, timeout):
-        """Run the provided cmd, populate the res and return it."""
-        start_time = time.time()
-        response = self._send_cmd(cmd, timeout)
-
-        stdout_buf = six.StringIO()
-        stderr_buf = six.StringIO()
-        last_status = 0
-
-        if response:
-            for item in response:
-                last_status = item.exit_info.status
-                _parse_item_and_log(item.stdout, stdout_buf, stdout_tee)
-                _parse_item_and_log(item.stderr, stderr_buf, stderr_tee)
-
-        res.stdout = stdout_buf.getvalue()
-        res.stderr = stderr_buf.getvalue()
-        res.exit_status = last_status
-        res.duration = time.time() - start_time
-
-    def _send_cmd(self, cmd, timeout):
-        """Serialize and send the cmd to the TLS service."""
-        formatted_cmd = common_pb2.ExecDutCommandRequest(name=self.hostname,
-                                                         command=cmd)
-        return self.stub.ExecDutCommand(formatted_cmd, timeout=timeout)
-
-    def close(self):
-        """Close the grpc channel."""
-        self.channel.close()
-
-
-def _parse_item_and_log(item, buf, tee):
-    """
-    Parse the provided item.
-
-    If the item exists, append the provided arr with the item & write to
-        the provided tee if provided.
-
-    """
-    if not item:
-        return
-    buf.write(item)
-    if tee is not None and tee is not utils.TEE_TO_LOGS:
-        tee.write(item)
diff --git a/server/hosts/drone_api_client/common.proto b/server/hosts/drone_api_client/common.proto
deleted file mode 100644
index d18ed63..0000000
--- a/server/hosts/drone_api_client/common.proto
+++ /dev/null
@@ -1,122 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-syntax = "proto3";
-
-package chromiumos.config.api.test.tls;
-
-option go_package = "go.chromium.org/chromiumos/config/go/api/test/tls";
-
-// Common lab services implemented on top of the wiring APIs.
-//
-// The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL
-// NOT", "SHOULD", "SHOULD NOT", "RECOMMENDED",  "MAY", and
-// "OPTIONAL" in this document are to be interpreted as described in
-// RFC 2119.
-//
-// All clients SHOULD pass the gRPC metadata key request_trace_id with one
-// value. The value is a unique string that is associated with the method call
-// in metrics. Clients that do not pass request_trace_id MAY be rejected so that
-// they can be fixed.
-service Common {
-  // ExecDutCommand runs a command on a DUT.
-  //
-  // The working directory is /.
-  // A tty is not spawned for the command.
-  // The user and group is root.
-  // All signals have their default dispositions and are not masked.
-  // The umask is set to 0.
-  //
-  // The environment contains:
-  //
-  //   TERM=dumb
-  //   PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/opt/bin
-  //   LANG=en_US.UTF-8
-  //   USER=root
-  //   HOME=/root
-  //
-  // The environment MAY also contain SSH client variables.
-  // The environment SHALL NOT contain variables not mentioned above.
-  //
-  // If the stream is interrupted, the implementation MAY attempt to
-  // stop the command by sending SIGINT, SIGHUP, SIGTERM, or SIGKILL.
-  rpc ExecDutCommand(ExecDutCommandRequest)
-      returns (stream ExecDutCommandResponse);
-}
-
-message ExecDutCommandRequest {
-  // name is the resource name for the DUT.
-  // The DUT name is passed to the RTD when the RTD is started.
-  // It is not specified whether the name is the DUT hostname.
-  string name = 1;
-  // command is the command to run.
-  // If this contains no slashes, it is resolved using PATH.
-  // If this starts with /, it is used as an absolute path to the
-  // program to run.
-  // Otherwise, this is treated as a path relative to the working
-  // directory.
-  string command = 2;
-  // args are the arguments to pass to the command.
-  repeated string args = 3;
-  // stdin is passed to the command as the program's stdin.
-  // The stream does not support seeking.
-  // An empty bytes is not treated specially; if the command reads
-  // from stdin, it will receive zero bytes.
-  bytes stdin = 4;
-  // stdout indicates how to handle the command's stdout.
-  Output stdout = 5;
-  // stderr indicates how to handle the command's stderr.
-  Output stderr = 6;
-}
-message ExecDutCommandResponse {
-  message ExitInfo {
-    // status provides information about how the command process
-    // terminated.
-    //
-    // If the command failed to start, status is set to an arbitrary
-    // non-zero value.
-    //
-    // If signaled is set, status is set to the signal that caused
-    // the command to terminate.
-    //
-    // Otherwise, status is set to the exit status of the process.
-    // Exit statuses outside of 0 to 255 inclusive are not supported;
-    // they will be mapped to an arbitrary non-zero value.
-    //
-    // status is zero if and only if the process was successfully
-    // started and exited with a zero status.
-    int32 status = 1;
-    // signaled indicates whether the command exited due to a signal.
-    // If set, status contains the signal.
-    bool signaled = 2;
-    // started indicates whether the command was started.
-    bool started = 3;
-    // error_message provides a human readable explanation for some errors.
-    // This MUST NOT be inspected by programs.
-    string error_message = 4;
-  }
-  // exit_info contains exit information.
-  // This is set when the command has exited or failed to start.
-  // This is set on the last message in the response stream.
-  ExitInfo exit_info = 1;
-  // stdout contains the shell command's stdout output since the last
-  // response in the stream.
-  // The implementation MAY batch or delay output to later
-  // responses in the stream.
-  bytes stdout = 2;
-  // stderr contains the shell command's stderr output since the last
-  // response in the stream.
-  // The implementation MAY batch or delay output to later
-  // responses in the stream.
-  bytes stderr = 3;
-}
-
-// Output enumeration for ExecDutCommandRequest.
-enum Output {
-  // OUTPUT_PIPE means to collect output and return it.
-  OUTPUT_PIPE = 0;
-  // OUTPUT_STDOUT is a special value for stderr which means to merge stderr
-  // into stdout.
-  OUTPUT_STDOUT = 1;
-}
diff --git a/server/hosts/drone_api_client/common_pb2.py b/server/hosts/drone_api_client/common_pb2.py
deleted file mode 100644
index 64cca42..0000000
--- a/server/hosts/drone_api_client/common_pb2.py
+++ /dev/null
@@ -1,272 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: common.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf.internal import enum_type_wrapper
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='common.proto',
-  package='chromiumos.config.api.test.tls',
-  syntax='proto3',
-  serialized_options=_b('Z1go.chromium.org/chromiumos/config/go/api/test/tls'),
-  serialized_pb=_b('\n\x0c\x63ommon.proto\x12\x1e\x63hromiumos.config.api.test.tls\"\xc3\x01\n\x15\x45xecDutCommandRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07\x63ommand\x18\x02 \x01(\t\x12\x0c\n\x04\x61rgs\x18\x03 \x03(\t\x12\r\n\x05stdin\x18\x04 \x01(\x0c\x12\x36\n\x06stdout\x18\x05 \x01(\x0e\x32&.chromiumos.config.api.test.tls.Output\x12\x36\n\x06stderr\x18\x06 \x01(\x0e\x32&.chromiumos.config.api.test.tls.Output\"\xe2\x01\n\x16\x45xecDutCommandResponse\x12R\n\texit_info\x18\x01 \x01(\x0b\x32?.chromiumos.config.api.test.tls.ExecDutCommandResponse.ExitInfo\x12\x0e\n\x06stdout\x18\x02 \x01(\x0c\x12\x0e\n\x06stderr\x18\x03 \x01(\x0c\x1aT\n\x08\x45xitInfo\x12\x0e\n\x06status\x18\x01 \x01(\x05\x12\x10\n\x08signaled\x18\x02 \x01(\x08\x12\x0f\n\x07started\x18\x03 \x01(\x08\x12\x15\n\rerror_message\x18\x04 \x01(\t*,\n\x06Output\x12\x0f\n\x0bOUTPUT_PIPE\x10\x00\x12\x11\n\rOUTPUT_STDOUT\x10\x01\x32\x8c\x01\n\x06\x43ommon\x12\x81\x01\n\x0e\x45xecDutCommand\x12\x35.chromiumos.config.api.test.tls.ExecDutCommandRequest\x1a\x36.chromiumos.config.api.test.tls.ExecDutCommandResponse0\x01\x42\x33Z1go.chromium.org/chromiumos/config/go/api/test/tlsb\x06proto3')
-)
-
-_OUTPUT = _descriptor.EnumDescriptor(
-  name='Output',
-  full_name='chromiumos.config.api.test.tls.Output',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='OUTPUT_PIPE', index=0, number=0,
-      serialized_options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='OUTPUT_STDOUT', index=1, number=1,
-      serialized_options=None,
-      type=None),
-  ],
-  containing_type=None,
-  serialized_options=None,
-  serialized_start=475,
-  serialized_end=519,
-)
-_sym_db.RegisterEnumDescriptor(_OUTPUT)
-
-Output = enum_type_wrapper.EnumTypeWrapper(_OUTPUT)
-OUTPUT_PIPE = 0
-OUTPUT_STDOUT = 1
-
-
-
-_EXECDUTCOMMANDREQUEST = _descriptor.Descriptor(
-  name='ExecDutCommandRequest',
-  full_name='chromiumos.config.api.test.tls.ExecDutCommandRequest',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='name', full_name='chromiumos.config.api.test.tls.ExecDutCommandRequest.name', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='command', full_name='chromiumos.config.api.test.tls.ExecDutCommandRequest.command', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='args', full_name='chromiumos.config.api.test.tls.ExecDutCommandRequest.args', index=2,
-      number=3, type=9, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='stdin', full_name='chromiumos.config.api.test.tls.ExecDutCommandRequest.stdin', index=3,
-      number=4, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='stdout', full_name='chromiumos.config.api.test.tls.ExecDutCommandRequest.stdout', index=4,
-      number=5, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='stderr', full_name='chromiumos.config.api.test.tls.ExecDutCommandRequest.stderr', index=5,
-      number=6, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=49,
-  serialized_end=244,
-)
-
-
-_EXECDUTCOMMANDRESPONSE_EXITINFO = _descriptor.Descriptor(
-  name='ExitInfo',
-  full_name='chromiumos.config.api.test.tls.ExecDutCommandResponse.ExitInfo',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='status', full_name='chromiumos.config.api.test.tls.ExecDutCommandResponse.ExitInfo.status', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='signaled', full_name='chromiumos.config.api.test.tls.ExecDutCommandResponse.ExitInfo.signaled', index=1,
-      number=2, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='started', full_name='chromiumos.config.api.test.tls.ExecDutCommandResponse.ExitInfo.started', index=2,
-      number=3, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='error_message', full_name='chromiumos.config.api.test.tls.ExecDutCommandResponse.ExitInfo.error_message', index=3,
-      number=4, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=389,
-  serialized_end=473,
-)
-
-_EXECDUTCOMMANDRESPONSE = _descriptor.Descriptor(
-  name='ExecDutCommandResponse',
-  full_name='chromiumos.config.api.test.tls.ExecDutCommandResponse',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='exit_info', full_name='chromiumos.config.api.test.tls.ExecDutCommandResponse.exit_info', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='stdout', full_name='chromiumos.config.api.test.tls.ExecDutCommandResponse.stdout', index=1,
-      number=2, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='stderr', full_name='chromiumos.config.api.test.tls.ExecDutCommandResponse.stderr', index=2,
-      number=3, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-  ],
-  extensions=[
-  ],
-  nested_types=[_EXECDUTCOMMANDRESPONSE_EXITINFO, ],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=247,
-  serialized_end=473,
-)
-
-_EXECDUTCOMMANDREQUEST.fields_by_name['stdout'].enum_type = _OUTPUT
-_EXECDUTCOMMANDREQUEST.fields_by_name['stderr'].enum_type = _OUTPUT
-_EXECDUTCOMMANDRESPONSE_EXITINFO.containing_type = _EXECDUTCOMMANDRESPONSE
-_EXECDUTCOMMANDRESPONSE.fields_by_name['exit_info'].message_type = _EXECDUTCOMMANDRESPONSE_EXITINFO
-DESCRIPTOR.message_types_by_name['ExecDutCommandRequest'] = _EXECDUTCOMMANDREQUEST
-DESCRIPTOR.message_types_by_name['ExecDutCommandResponse'] = _EXECDUTCOMMANDRESPONSE
-DESCRIPTOR.enum_types_by_name['Output'] = _OUTPUT
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-ExecDutCommandRequest = _reflection.GeneratedProtocolMessageType('ExecDutCommandRequest', (_message.Message,), dict(
-  DESCRIPTOR = _EXECDUTCOMMANDREQUEST,
-  __module__ = 'common_pb2'
-  # @@protoc_insertion_point(class_scope:chromiumos.config.api.test.tls.ExecDutCommandRequest)
-  ))
-_sym_db.RegisterMessage(ExecDutCommandRequest)
-
-ExecDutCommandResponse = _reflection.GeneratedProtocolMessageType('ExecDutCommandResponse', (_message.Message,), dict(
-
-  ExitInfo = _reflection.GeneratedProtocolMessageType('ExitInfo', (_message.Message,), dict(
-    DESCRIPTOR = _EXECDUTCOMMANDRESPONSE_EXITINFO,
-    __module__ = 'common_pb2'
-    # @@protoc_insertion_point(class_scope:chromiumos.config.api.test.tls.ExecDutCommandResponse.ExitInfo)
-    ))
-  ,
-  DESCRIPTOR = _EXECDUTCOMMANDRESPONSE,
-  __module__ = 'common_pb2'
-  # @@protoc_insertion_point(class_scope:chromiumos.config.api.test.tls.ExecDutCommandResponse)
-  ))
-_sym_db.RegisterMessage(ExecDutCommandResponse)
-_sym_db.RegisterMessage(ExecDutCommandResponse.ExitInfo)
-
-
-DESCRIPTOR._options = None
-
-_COMMON = _descriptor.ServiceDescriptor(
-  name='Common',
-  full_name='chromiumos.config.api.test.tls.Common',
-  file=DESCRIPTOR,
-  index=0,
-  serialized_options=None,
-  serialized_start=522,
-  serialized_end=662,
-  methods=[
-  _descriptor.MethodDescriptor(
-    name='ExecDutCommand',
-    full_name='chromiumos.config.api.test.tls.Common.ExecDutCommand',
-    index=0,
-    containing_service=None,
-    input_type=_EXECDUTCOMMANDREQUEST,
-    output_type=_EXECDUTCOMMANDRESPONSE,
-    serialized_options=None,
-  ),
-])
-_sym_db.RegisterServiceDescriptor(_COMMON)
-
-DESCRIPTOR.services_by_name['Common'] = _COMMON
-
-# @@protoc_insertion_point(module_scope)
diff --git a/server/hosts/drone_api_client/common_pb2_grpc.py b/server/hosts/drone_api_client/common_pb2_grpc.py
deleted file mode 100644
index a3472b8..0000000
--- a/server/hosts/drone_api_client/common_pb2_grpc.py
+++ /dev/null
@@ -1,86 +0,0 @@
-# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
-import grpc
-
-import common_pb2 as common__pb2
-
-
-class CommonStub(object):
-  """Common lab services implemented on top of the wiring APIs.
-
-  The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL
-  NOT", "SHOULD", "SHOULD NOT", "RECOMMENDED",  "MAY", and
-  "OPTIONAL" in this document are to be interpreted as described in
-  RFC 2119.
-
-  All clients SHOULD pass the gRPC metadata key request_trace_id with one
-  value. The value is a unique string that is associated with the method call
-  in metrics. Clients that do not pass request_trace_id MAY be rejected so that
-  they can be fixed.
-  """
-
-  def __init__(self, channel):
-    """Constructor.
-
-    Args:
-      channel: A grpc.Channel.
-    """
-    self.ExecDutCommand = channel.unary_stream(
-        '/chromiumos.config.api.test.tls.Common/ExecDutCommand',
-        request_serializer=common__pb2.ExecDutCommandRequest.SerializeToString,
-        response_deserializer=common__pb2.ExecDutCommandResponse.FromString,
-        )
-
-
-class CommonServicer(object):
-  """Common lab services implemented on top of the wiring APIs.
-
-  The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL
-  NOT", "SHOULD", "SHOULD NOT", "RECOMMENDED",  "MAY", and
-  "OPTIONAL" in this document are to be interpreted as described in
-  RFC 2119.
-
-  All clients SHOULD pass the gRPC metadata key request_trace_id with one
-  value. The value is a unique string that is associated with the method call
-  in metrics. Clients that do not pass request_trace_id MAY be rejected so that
-  they can be fixed.
-  """
-
-  def ExecDutCommand(self, request, context):
-    """ExecDutCommand runs a command on a DUT.
-
-    The working directory is /.
-    A tty is not spawned for the command.
-    The user and group is root.
-    All signals have their default dispositions and are not masked.
-    The umask is set to 0.
-
-    The environment contains:
-
-    TERM=dumb
-    PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/opt/bin
-    LANG=en_US.UTF-8
-    USER=root
-    HOME=/root
-
-    The environment MAY also contain SSH client variables.
-    The environment SHALL NOT contain variables not mentioned above.
-
-    If the stream is interrupted, the implementation MAY attempt to
-    stop the command by sending SIGINT, SIGHUP, SIGTERM, or SIGKILL.
-    """
-    context.set_code(grpc.StatusCode.UNIMPLEMENTED)
-    context.set_details('Method not implemented!')
-    raise NotImplementedError('Method not implemented!')
-
-
-def add_CommonServicer_to_server(servicer, server):
-  rpc_method_handlers = {
-      'ExecDutCommand': grpc.unary_stream_rpc_method_handler(
-          servicer.ExecDutCommand,
-          request_deserializer=common__pb2.ExecDutCommandRequest.FromString,
-          response_serializer=common__pb2.ExecDutCommandResponse.SerializeToString,
-      ),
-  }
-  generic_handler = grpc.method_handlers_generic_handler(
-      'chromiumos.config.api.test.tls.Common', rpc_method_handlers)
-  server.add_generic_rpc_handlers((generic_handler,))
diff --git a/server/hosts/factory.py b/server/hosts/factory.py
index a80d7d0..c5a4188 100644
--- a/server/hosts/factory.py
+++ b/server/hosts/factory.py
@@ -4,9 +4,10 @@
 
 """Provides a factory method to create a host object."""
 
-import logging
 from contextlib import closing
 from contextlib import contextmanager
+import logging
+import os
 
 from autotest_lib.client.bin import local_host
 from autotest_lib.client.bin import utils
@@ -15,6 +16,7 @@
 from autotest_lib.client.common_lib import global_config
 from autotest_lib.server import utils as server_utils
 from autotest_lib.server.cros.dynamic_suite import constants
+from autotest_lib.server.hosts import android_host
 from autotest_lib.server.hosts import cros_host
 from autotest_lib.server.hosts import host_info
 from autotest_lib.server.hosts import jetstream_host
@@ -30,7 +32,7 @@
 # Default ssh options used in creating a host.
 DEFAULT_SSH_USER = 'root'
 DEFAULT_SSH_PASS = ''
-DEFAULT_SSH_PORT = 22
+DEFAULT_SSH_PORT = None
 DEFAULT_SSH_VERBOSITY = ''
 DEFAULT_SSH_OPTIONS = ''
 
@@ -43,16 +45,26 @@
 host_types = [cros_host.CrosHost, labstation_host.LabstationHost,
               moblab_host.MoblabHost, jetstream_host.JetstreamHost,
               gce_host.GceHost]
-OS_HOST_DICT = {'cros': cros_host.CrosHost,
-                'jetstream': jetstream_host.JetstreamHost,
-                'moblab': moblab_host.MoblabHost,
-                'labstation': labstation_host.LabstationHost}
+OS_HOST_DICT = {
+        'android': android_host.AndroidHost,
+        'cros': cros_host.CrosHost,
+        'jetstream': jetstream_host.JetstreamHost,
+        'moblab': moblab_host.MoblabHost,
+        'labstation': labstation_host.LabstationHost
+}
+
+LOOKUP_DICT = {
+        'CrosHost': cros_host.CrosHost,
+        'JetstreamHost': jetstream_host.JetstreamHost,
+        'MoblabHost': moblab_host.MoblabHost,
+        'LabstationHost': labstation_host.LabstationHost
+}
 
 # Timeout for early connectivity check to the host, in seconds.
 _CONNECTIVITY_CHECK_TIMEOUT_S = 10
 
 
-def _get_host_arguments(machine):
+def _get_host_arguments(machine, **args):
     """Get parameters to construct a host object.
 
     There are currently 2 use cases for creating a host.
@@ -61,6 +73,8 @@
        are available as the variables ssh_user, ssh_pass etc.
     2. Directly through factory.create_host, in which case we use
        the same defaults as used in the server job to create a host.
+    3. Through neither of the above, in which case args can be provided
+       and should be respected if a globa
 
     @param machine: machine dict
     @return: A dictionary containing arguments for host specifically hostname,
@@ -73,27 +87,41 @@
     info = host_info_store.get()
 
     g = globals()
-    user = info.attributes.get('ssh_user', g.get('ssh_user', DEFAULT_SSH_USER))
+
+    # For each arg, try to fetch the arg from the globals...
+    # If its not there, then try to get it from **args.
+    # If its not there, use the default.
+    default_user = DEFAULT_SSH_USER if 'user' not in args else args['user']
+    user = info.attributes.get('ssh_user', g.get('ssh_user', default_user))
+
+    default_pass = DEFAULT_SSH_PASS if 'ssh_pass' not in args else args['ssh_pass']
     password = info.attributes.get('ssh_pass', g.get('ssh_pass',
-                                                     DEFAULT_SSH_PASS))
-    port = info.attributes.get('ssh_port', g.get('ssh_port', DEFAULT_SSH_PORT))
+                                                     default_pass))
+
+    default_port = DEFAULT_SSH_PORT if 'ssh_port' not in args else args['ssh_port']
+    port = info.attributes.get('ssh_port', g.get('ssh_port', default_port))
+
+    default_verbosity = DEFAULT_SSH_VERBOSITY if 'ssh_verbosity_flag' not in args else args['ssh_verbosity_flag']
     ssh_verbosity_flag = info.attributes.get('ssh_verbosity_flag',
                                              g.get('ssh_verbosity_flag',
-                                                   DEFAULT_SSH_VERBOSITY))
+                                                   default_verbosity))
+
+    default_options = DEFAULT_SSH_OPTIONS if 'ssh_options' not in args else args['ssh_options']
     ssh_options = info.attributes.get('ssh_options',
                                       g.get('ssh_options',
-                                            DEFAULT_SSH_OPTIONS))
+                                            default_options))
 
     hostname, user, password, port = server_utils.parse_machine(hostname, user,
                                                                 password, port)
-
+    if port:
+        port = int(port)
     host_args = {
             'hostname': hostname,
             'afe_host': afe_host,
             'host_info_store': host_info_store,
             'user': user,
             'password': password,
-            'port': int(port),
+            'port': port,
             'ssh_verbosity_flag': ssh_verbosity_flag,
             'ssh_options': ssh_options,
             'connection_pool': connection_pool,
@@ -118,11 +146,17 @@
     @returns: Class type of the first host class that returns True to the
               check_host method.
     """
+    preset_host = _preset_host(hostname)
+    if preset_host:
+        logging.debug("Using preset_host %s for %s ", preset_host.__name__,
+                      hostname)
+        return preset_host
     with closing(connectivity_class(hostname, **args)) as host:
         for host_module in host_types:
             logging.info('Attempting to autodetect if host is of type %s',
                          host_module.__name__)
             if host_module.check_host(host, timeout=10):
+                os.environ['HOST_%s' % hostname] = str(host_module.__name__)
                 return host_module
 
     logging.warning('Unable to apply conventional host detection methods, '
@@ -130,6 +164,19 @@
     return cros_host.CrosHost
 
 
+def _preset_host(hostname):
+    """Check the environmental variables to see if the host type has been set.
+
+    @param hostname: A string representing the host name of the device.
+
+    @returns: Class type of the host, if previously found & set in
+        _detect_host, else None.
+    """
+    preset_host = os.getenv('HOST_%s' % hostname)
+    if preset_host:
+        return LOOKUP_DICT.get(preset_host, None)
+
+
 def _choose_connectivity_class(hostname, ssh_port):
     """Choose a connectivity class for this hostname.
 
@@ -160,6 +207,20 @@
                  ignore_timeout=False)
 
 
+def create_companion_hosts(companion_hosts):
+    """Wrapped for create_hosts for making host objects on companion duts.
+
+    @param companion_hosts: str or list of extra_host hostnames
+
+    @returns: A list of host objects for each host in companion_hosts
+    """
+    if not isinstance(companion_hosts, list):
+        companion_hosts = [companion_hosts]
+    hosts = []
+    for host in companion_hosts:
+        hosts.append(create_host(host))
+    return hosts
+
 # TODO(kevcheng): Update the creation method so it's not a research project
 # determining the class inheritance model.
 def create_host(machine, host_class=None, connectivity_class=None, **args):
@@ -188,7 +249,7 @@
         deprecation.warn('server.create_hosts:connectivity_class')
         connectivity_class = None
 
-    detected_args = _get_host_arguments(machine)
+    detected_args = _get_host_arguments(machine, **args)
     hostname = detected_args.pop('hostname')
     afe_host = detected_args['afe_host']
     info_store = detected_args['host_info_store'].get()
@@ -199,6 +260,7 @@
     for label in info_store.labels:
         if label.startswith(full_os_prefix):
             host_os = label[len(full_os_prefix):]
+            logging.debug('Detected host os: %s from info_store.', host_os)
             break
 
     connectivity_class = _choose_connectivity_class(hostname, args['port'])
@@ -207,6 +269,11 @@
                   or OS_HOST_DICT.get(afe_host.attributes.get('os_type'))
                   or OS_HOST_DICT.get(host_os))
 
+    if host_class is android_host.AndroidHost:
+        # We don't have direct ssh access to Android devices, so we do
+        # not need connectivity_class for AndroidHost here.
+        connectivity_class = None
+
     if host_class is None:
         # TODO(pprabhu) If we fail to verify connectivity, we skip the costly
         # host autodetection logic. We should ideally just error out in this
@@ -226,8 +293,13 @@
             logging.debug('Defaulting to CrosHost.')
 
     # create a custom host class for this machine and return an instance of it
-    classes = (host_class, connectivity_class)
-    custom_host_class = type("%s_host" % hostname, classes, {})
+    if connectivity_class:
+        classes = (host_class, connectivity_class)
+        custom_host_class = type("%s_host" % hostname, classes, {})
+    else:
+        custom_host_class = host_class
+
+    logging.info('creating host class for {} w/ {}||'.format(hostname, args))
     host_instance = custom_host_class(hostname, **args)
 
     # call job_start if this is the first time this host is being used
@@ -296,6 +368,6 @@
     if servo_uart_logs_dir and host.servo:
         host.servo.uart_logs_dir = servo_uart_logs_dir
     try:
-      yield host
+        yield host
     finally:
-      host.close()
+        host.close()
diff --git a/server/hosts/factory_unittest.py b/server/hosts/factory_unittest.py
index 3f62aae..38fcf8a 100755
--- a/server/hosts/factory_unittest.py
+++ b/server/hosts/factory_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/hosts/file_store.py b/server/hosts/file_store.py
index c85c7c5..0221264 100644
--- a/server/hosts/file_store.py
+++ b/server/hosts/file_store.py
@@ -7,8 +7,8 @@
 
 import common
 from autotest_lib.server.hosts import host_info
-from chromite.lib import locking
-from chromite.lib import retry_util
+from autotest_lib.utils.frozen_chromite.lib import locking
+from autotest_lib.utils.frozen_chromite.lib import retry_util
 
 
 _FILE_LOCK_TIMEOUT_SECONDS = 5
diff --git a/server/hosts/file_store_unittest.py b/server/hosts/file_store_unittest.py
index 1e86eee..72a5e73 100644
--- a/server/hosts/file_store_unittest.py
+++ b/server/hosts/file_store_unittest.py
@@ -2,16 +2,16 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import mock
 import os
 import stat
 import unittest
+from unittest import mock
 
 import common
 from autotest_lib.client.common_lib import autotemp
 from autotest_lib.server.hosts import file_store
 from autotest_lib.server.hosts import host_info
-from chromite.lib import locking
+from autotest_lib.utils.frozen_chromite.lib import locking
 
 class FileStoreTestCase(unittest.TestCase):
     """Test file_store.FileStore functionality."""
@@ -119,7 +119,8 @@
             store.get(force_refresh=True)
 
 
-    @mock.patch('chromite.lib.locking.FileLock', autospec=True)
+    @mock.patch('autotest_lib.utils.frozen_chromite.lib.locking.FileLock',
+                autospec=True)
     def test_commit_succeeds_after_lock_retry(self, mock_file_lock_class):
         """Tests that commit succeeds when locking requires retries.
 
@@ -139,7 +140,8 @@
         self.assertEqual(2, mock_file_lock.write_lock.call_count)
 
 
-    @mock.patch('chromite.lib.locking.FileLock', autospec=True)
+    @mock.patch('autotest_lib.utils.frozen_chromite.lib.locking.FileLock',
+                autospec=True)
     def test_refresh_succeeds_after_lock_retry(self, mock_file_lock_class):
         """Tests that refresh succeeds when locking requires retries.
 
@@ -164,7 +166,8 @@
         self.assertEqual(4, mock_file_lock.write_lock.call_count)
 
 
-    @mock.patch('chromite.lib.locking.FileLock', autospec=True)
+    @mock.patch('autotest_lib.utils.frozen_chromite.lib.locking.FileLock',
+                autospec=True)
     def test_commit_with_negative_timeout_clips(self, mock_file_lock_class):
         """Commit request with negative timeout is same as 0 timeout.
 
@@ -184,7 +187,7 @@
 
 
     def test_str(self):
-        """Sanity tests the __str__ implementaiton"""
+        """Tests the __str__ implementaiton"""
         store = file_store.FileStore('/foo/path')
         self.assertEqual(str(store), 'FileStore[/foo/path]')
 
diff --git a/server/hosts/gce_host.py b/server/hosts/gce_host.py
index cdfe9da..58b1247 100644
--- a/server/hosts/gce_host.py
+++ b/server/hosts/gce_host.py
@@ -4,10 +4,9 @@
 
 import logging
 import os
-import string
 
 import common
-from chromite.lib import gce
+from autotest_lib.utils.frozen_chromite.lib import gce
 
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib import lsbrelease_utils
@@ -82,12 +81,12 @@
         """
         keys = self.gce.GetCommonInstanceMetadata(
                 SSH_KEYS_METADATA_KEY) or ''
-        key_set = set(string.split(keys, '\n'))
+        key_set = set(keys.split('\n'))
         new_key_set = (key_set | set(to_add)) - set(to_remove)
         if key_set != new_key_set:
             self.gce.SetCommonInstanceMetadata(
                     SSH_KEYS_METADATA_KEY,
-                    string.join(list(new_key_set), '\n'))
+                    '\n'.join(list(new_key_set)))
 
     def add_ssh_key(self, username, ssh_key):
         """Adds a new SSH key in GCE metadata.
diff --git a/server/hosts/gsc_devboard_host.py b/server/hosts/gsc_devboard_host.py
new file mode 100644
index 0000000..3383ad4
--- /dev/null
+++ b/server/hosts/gsc_devboard_host.py
@@ -0,0 +1,212 @@
+# Lint as: python2, python3
+# Copyright (c) 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Expects to be run in an environment with sudo and no interactive password
+# prompt, such as within the Chromium OS development chroot.
+"""Host class for GSC devboard connected host."""
+
+import contextlib
+import logging
+try:
+    import docker
+except ImportError:
+    logging.info("Docker API is not installed in this environment")
+
+DOCKER_IMAGE = "gcr.io/satlab-images/gsc_dev_board:release"
+
+SATLAB_DOCKER_HOST = 'tcp://192.168.231.1:2375'
+LOCAL_DOCKER_HOST = 'tcp://127.0.0.1:2375'
+DEFAULT_DOCKER_HOST = 'unix:///var/run/docker.sock'
+
+DEFAULT_SERVICE_PORT = 39999
+
+ULTRADEBUG = '18d1:0304'
+
+
+class GSCDevboardHost(object):
+    """
+    A host that is physically connected to a GSC devboard.
+
+    It could either be a SDK workstation (chroot) or a SatLab box.
+    """
+
+    def _initialize(self,
+                    hostname,
+                    service_debugger_serial=None,
+                    service_ip=None,
+                    service_port=DEFAULT_SERVICE_PORT,
+                    *args,
+                    **dargs):
+        """Construct a GSCDevboardHost object.
+
+        @hostname: Name of the devboard host, will be used in future to look up
+                   the debugger serial, not currently used.
+        @service_debugger_serial: debugger connected to devboard, defaults to
+                                  the first one found on the container.
+        @service_ip: devboard service ip, default is to start a new container.
+        @service_port: devboard service port, defaults to 39999.
+        """
+
+        # Use docker host from environment or by probing a list of candidates.
+        self._client = None
+        try:
+            self._client = docker.from_env()
+            logging.info("Created docker host from env")
+        except NameError:
+            raise NameError('Please install docker using '
+                            '"autotest/files/utils/install_docker_chroot.sh"')
+        except docker.errors.DockerException:
+            docker_host = None
+            candidate_hosts = [
+                    SATLAB_DOCKER_HOST, DEFAULT_DOCKER_HOST, LOCAL_DOCKER_HOST
+            ]
+            for h in candidate_hosts:
+                try:
+                    c = docker.DockerClient(base_url=h, timeout=2)
+                    c.close()
+                    docker_host = h
+                    break
+                except docker.errors.DockerException:
+                    pass
+            if docker_host is not None:
+                self._client = docker.DockerClient(base_url=docker_host,
+                                                   timeout=300)
+            else:
+                raise ValueError('Invalid DOCKER_HOST, ensure dockerd is'
+                                 ' running.')
+            logging.info("Using docker host at %s", docker_host)
+
+        self._satlab = False
+        # GSCDevboardHost should only be created on Satlab or localhost, so
+        # assume Satlab if a drone container is running.
+        if len(self._client.containers.list(filters={'name': 'drone'})) > 0:
+            logging.info("In Satlab")
+            self._satlab = True
+
+        self._service_debugger_serial = service_debugger_serial
+        self._service_ip = service_ip
+        self._service_port = service_port
+        logging.info("Using service port %s", self._service_port)
+
+        self._docker_network = 'default_satlab' if self._satlab else 'host'
+        self._docker_container = None
+
+        serials = self._list_debugger_serials()
+        if len(serials) == 0:
+            raise ValueError('No debuggers found')
+        logging.info("Available debuggers: [%s]", ', '.join(serials))
+
+        if self._service_debugger_serial is None:
+            self._service_debugger_serial = serials[0]
+        else:
+            if self._service_debugger_serial not in serials:
+                raise ValueError(
+                        '%s debugger not found in [%s]' %
+                        (self._service_debugger_serial, ', '.join(serials)))
+        logging.info("Using debugger %s", self._service_debugger_serial)
+        self._docker_container_name = "gsc_dev_board_{}".format(
+                self._service_debugger_serial)
+
+    def _list_debugger_serials(self):
+        """List all attached debuggers."""
+
+        c = self._client.containers.run(DOCKER_IMAGE,
+                                        remove=True,
+                                        privileged=True,
+                                        name='list_debugger_serial',
+                                        hostname='list_debugger_serial',
+                                        detach=True,
+                                        volumes=["/dev:/hostdev"],
+                                        command=['sleep', '5'])
+
+        res, output = c.exec_run(['lsusb', '-v', '-d', ULTRADEBUG],
+                                 stderr=False,
+                                 privileged=True)
+        c.kill()
+        if res != 0:
+            return []
+        output = output.decode("utf-8").split('\n')
+        serials = [
+                l.strip().split(' ')[-1] for l in output
+                if l.strip()[:7] == 'iSerial'
+        ]
+        return serials
+
+    @contextlib.contextmanager
+    def service_context(self):
+        """Service context manager that provides the service endpoint."""
+        self.start_service()
+        try:
+            yield "{}:{}".format(self.service_ip, self.service_port)
+        finally:
+            self.stop_service()
+
+    def start_service(self):
+        """Starts service if needed."""
+        if self._docker_container is not None:
+            return
+
+        if self._service_ip:
+            # Assume container was manually started if service_ip was set
+            logging.info("Skip start_service due to set service_ip")
+            return
+
+        #TODO(b/215767105): Pull image onto Satlab box if not present.
+
+        environment = {
+                'DEVBOARDSVC_PORT': self._service_port,
+                'DEBUGGER_SERIAL': self._service_debugger_serial
+        }
+        start_cmd = ['/opt/gscdevboard/start_devboardsvc.sh']
+
+        # Stop any leftover containers
+        try:
+            c = self._client.containers.get(self._docker_container_name)
+            c.kill()
+        except docker.errors.NotFound:
+            pass
+
+        self._client.containers.run(DOCKER_IMAGE,
+                                    remove=True,
+                                    privileged=True,
+                                    name=self._docker_container_name,
+                                    hostname=self._docker_container_name,
+                                    network=self._docker_network,
+                                    cap_add=["NET_ADMIN"],
+                                    detach=True,
+                                    volumes=["/dev:/hostdev"],
+                                    environment=environment,
+                                    command=start_cmd)
+
+        # A separate containers.get call is needed to capture network attributes
+        self._docker_container = self._client.containers.get(
+                self._docker_container_name)
+
+    def stop_service(self):
+        """Stops service by killing the container."""
+        if self._docker_container is None:
+            return
+        self._docker_container.kill()
+        self._docker_container = None
+
+    @property
+    def service_port(self):
+        """Return service port (local to the container host)."""
+        return self._service_port
+
+    @property
+    def service_ip(self):
+        """Return service ip (local to the container host)."""
+        if self._service_ip is not None:
+            return self._service_ip
+
+        if self._docker_network == 'host':
+            return '127.0.0.1'
+        else:
+            if self._docker_container is None:
+                return ''
+            else:
+                settings = self._docker_container.attrs['NetworkSettings']
+                return settings['Networks'][self._docker_network]['IPAddress']
diff --git a/server/hosts/host_info.py b/server/hosts/host_info.py
index 2d273a0..cbb5664 100644
--- a/server/hosts/host_info.py
+++ b/server/hosts/host_info.py
@@ -411,7 +411,7 @@
     """Obtain the host_info_store object stuffed in the machine dict.
 
     The machine argument to jobs can be a string (a hostname) or a dict because
-    of legacy reasons. If we can't get a real store, return a dummy.
+    of legacy reasons. If we can't get a real store, return a stub.
     """
     if isinstance(machine, dict):
         return machine['host_info_store']
@@ -462,7 +462,7 @@
 
     try:
         return HostInfo(deserialized_json['labels'],
-                        deserialized_json['attributes'],
+                        deserialized_json.get('attributes', {}),
                         deserialized_json.get('stable_versions', {}))
     except KeyError as e:
         raise DeserializationError('Malformed serialized host_info: %r' % e)
diff --git a/server/hosts/host_info_unittest.py b/server/hosts/host_info_unittest.py
index 2f9e16b..2f55d42 100644
--- a/server/hosts/host_info_unittest.py
+++ b/server/hosts/host_info_unittest.py
@@ -131,7 +131,7 @@
 
 
     def test_str(self):
-        """Sanity checks the __str__ implementation."""
+        """Checks the __str__ implementation."""
         info = host_info.HostInfo(labels=['a'], attributes={'b': 2})
         self.assertEqual(str(info),
                          "HostInfo[Labels: ['a'], Attributes: {'b': 2}, StableVersions: {}]")
@@ -294,7 +294,7 @@
 
 
     def test_str(self):
-        """Sanity tests __str__ implementation."""
+        """Tests __str__ implementation."""
         self.store.info = host_info.HostInfo(['label1'],
                                              {'attrib1': {'key1': 'data1'}})
         self.assertEqual(str(self.store),
@@ -328,7 +328,7 @@
 
 
     def test_failed_refresh_cleans_cache(self):
-        """Sanity checks return values when refresh raises."""
+        """Checks return values when refresh raises."""
         with self.assertRaises(host_info.StoreError):
             self.store.get()
         # Since |get| hit an error, a subsequent get should again hit the store.
diff --git a/server/hosts/jetstream_host.py b/server/hosts/jetstream_host.py
index bd95ccb..c24d414 100644
--- a/server/hosts/jetstream_host.py
+++ b/server/hosts/jetstream_host.py
@@ -80,7 +80,10 @@
         self.wait_up(timeout=RESET_TIMEOUT_SECONDS)
 
         # Stop service ap-update-manager to prevent rebooting during autoupdate.
-        self.run('sudo stop ap-update-manager', ignore_status=False)
+        # Jetstream has migrated job control from upstart to process manager,
+        # issue commands on both paths for backward compatibility.
+        self.run('sudo stop ap-update-manager', ignore_status=True)
+        self.run('sudo ap-pm-cli stop ap-update-manager', ignore_status=True)
 
     def prepare_for_update(self):
         """Prepare the host for an update."""
diff --git a/server/hosts/jetstream_host_unittest.py b/server/hosts/jetstream_host_unittest.py
index d5f4021..6d0d4f1 100755
--- a/server/hosts/jetstream_host_unittest.py
+++ b/server/hosts/jetstream_host_unittest.py
@@ -1,10 +1,10 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright (c) 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import mock
 import unittest
+from unittest import mock
 
 import common
 from autotest_lib.client.common_lib import error
diff --git a/server/hosts/labstation_host_unittest.py b/server/hosts/labstation_host_unittest.py
index bef52d5..6ed153a 100644
--- a/server/hosts/labstation_host_unittest.py
+++ b/server/hosts/labstation_host_unittest.py
@@ -19,7 +19,7 @@
         assert new_state == self._expected_state
 
 
-class MockMasterSsh(object):
+class MockMainSsh(object):
     def __init__(self):
         self.ssh_option = ""
 
@@ -39,7 +39,7 @@
     """
     def __init__(self):
         self._is_localhost = False
-        self._master_ssh = MockMasterSsh()
+        self._main_ssh = MockMainSsh()
         self.env = {}
         self.user = "a"
         self.port = 7
diff --git a/server/hosts/labstation_repair.py b/server/hosts/labstation_repair.py
index 6bb4acc..343a4dc 100644
--- a/server/hosts/labstation_repair.py
+++ b/server/hosts/labstation_repair.py
@@ -9,16 +9,18 @@
 from autotest_lib.server.hosts import repair_utils
 from autotest_lib.client.common_lib import utils
 
-from chromite.lib import timeout_util
+from autotest_lib.utils.frozen_chromite.lib import timeout_util
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = utils.metrics_mock
 
 # There are some labstations we don't want they receive auto-update,
 # e.g. labstations that used for image qualification purpose
-UPDATE_EXEMPTED_POOL = {'servo_verification', 'labstation_tryjob'}
+UPDATE_EXEMPTED_POOL = {
+        'servo_verification', 'labstation_tryjob', 'labstation_canary'
+}
 
 
 class _LabstationUpdateVerifier(hosts.Verifier):
diff --git a/server/hosts/moblab_host.py b/server/hosts/moblab_host.py
index 85f1158..ad045c5 100644
--- a/server/hosts/moblab_host.py
+++ b/server/hosts/moblab_host.py
@@ -19,7 +19,7 @@
 from autotest_lib.server.hosts import cros_host
 from autotest_lib.server.hosts import cros_repair
 
-from chromite.lib import timeout_util
+from autotest_lib.utils.frozen_chromite.lib import timeout_util
 import six
 
 AUTOTEST_INSTALL_DIR = global_config.global_config.get_config_value(
@@ -120,13 +120,7 @@
         @raises AutoservRunError: If the command failed.
         @raises AutoservSSHTimeout: Ssh connection has timed out.
         """
-        try:
-            result = host.run(
-                    'grep -q moblab /etc/lsb-release',
-                    ignore_status=True, timeout=timeout)
-        except (error.AutoservRunError, error.AutoservSSHTimeout):
-            return False
-        return result.exit_status == 0
+        return False
 
 
     def install_boto_file(self, boto_path=''):
@@ -213,16 +207,16 @@
                     existing_hosts.append(dut_ip)
 
     def _check_dut_ssh(self, dut_ip):
-       is_sshable = False
-       count = 0
-       while not is_sshable and count < 10:
-           cmd = ('ssh  -o ConnectTimeout=30 -o ConnectionAttempts=30'
-                  ' root@%s echo Testing' % dut_ip)
-           result = self.run(cmd)
-           is_sshable = 'Testing' in result.stdout
-           logging.info(is_sshable)
-           count += 1
-       return is_sshable
+        is_sshable = False
+        count = 0
+        while not is_sshable and count < 10:
+            cmd = ('ssh  -o ConnectTimeout=30 -o ConnectionAttempts=30'
+                   ' root@%s echo Testing' % dut_ip)
+            result = self.run(cmd)
+            is_sshable = 'Testing' in result.stdout
+            logging.info(is_sshable)
+            count += 1
+        return is_sshable
 
     def verify_software(self):
         """Create the autodir then do standard verify."""
diff --git a/server/hosts/remote.py b/server/hosts/remote.py
index 5f720a0..febbde2 100644
--- a/server/hosts/remote.py
+++ b/server/hosts/remote.py
@@ -8,9 +8,14 @@
 import six
 from six.moves import urllib
 import re
+
+import common
+
 from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib.global_config import global_config
 from autotest_lib.server import utils
 from autotest_lib.server.hosts import base_classes
+from autotest_lib.server.hosts.tls_client.connection import TLSConnection
 
 
 class RemoteHost(base_classes.Host):
@@ -45,6 +50,13 @@
         self.autodir = autodir
         self.tmp_dirs = []
 
+        get_value = global_config.get_config_value
+
+        self.tls_connection = None
+        try:
+            self.tls_connection = TLSConnection()
+        except Exception as e:
+            logging.warning("Could not establish TLS connection %s", e)
 
     def __repr__(self):
         return "<remote host: %s>" % self.hostname
@@ -61,7 +73,9 @@
                     self.run('rm -rf "%s"' % (utils.sh_escape(dir)))
                 except error.AutoservRunError:
                     pass
-
+        if self.tls_connection:
+            self.tls_connection.close()
+            self.tls_connection = None
 
     def job_start(self):
         """
@@ -265,6 +279,7 @@
         it.
         """
         template = os.path.join(parent, self.TMP_DIR_TEMPLATE)
+        parent = os.path.dirname(template)
         dir_name = self.run('mkdir -p %s && mktemp -d %s' % (parent, template)).stdout.rstrip()
         self.tmp_dirs.append(dir_name)
         return dir_name
diff --git a/server/hosts/remote_unittest.py b/server/hosts/remote_unittest.py
index cc61fdc..45457f7 100755
--- a/server/hosts/remote_unittest.py
+++ b/server/hosts/remote_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 import unittest
 import common
diff --git a/server/hosts/repair_utils.py b/server/hosts/repair_utils.py
index a79db04..7ed0ceb 100644
--- a/server/hosts/repair_utils.py
+++ b/server/hosts/repair_utils.py
@@ -11,7 +11,7 @@
 from autotest_lib.server.hosts import servo_constants
 from autotest_lib.server.hosts import cros_constants
 
-from chromite.lib import timeout_util
+from autotest_lib.utils.frozen_chromite.lib import timeout_util
 
 
 def require_servo(host, ignore_state=False):
@@ -91,7 +91,9 @@
             raise hosts.AutoservVerifyError(msg)
         if not ip_address:
             msg = 'Hostname: %s not present in DNS' % host.hostname
-            raise hosts.AutoservVerifyError(msg)
+        else:
+            msg = 'Hostname: %s not pingable' % host.hostname
+        raise hosts.AutoservVerifyError(msg)
 
     @property
     def description(self):
diff --git a/server/hosts/rpc_server_tracker.py b/server/hosts/rpc_server_tracker.py
index 0fc50af..d001e86 100644
--- a/server/hosts/rpc_server_tracker.py
+++ b/server/hosts/rpc_server_tracker.py
@@ -15,17 +15,12 @@
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib.cros import retry
 
-try:
-    import jsonrpclib
-except ImportError:
-    jsonrpclib = None
-
 
 class RpcServerTracker(object):
     """
-    This class keeps track of all the RPC server connections started on a remote
-    host. The caller can use either |xmlrpc_connect| or |jsonrpc_connect| to
-    start the required type of rpc server on the remote host.
+    This class keeps track of all the RPC server connections started on a
+    remote host. The caller can use either |xmlrpc_connect| to start the
+    required type of rpc server on the remote host.
     The host will cleanup all the open RPC server connections on disconnect.
     """
 
@@ -46,7 +41,7 @@
     def _setup_port(self, port, command_name, remote_pid=None):
         """Sets up a tunnel process and register it to rpc_server_tracker.
 
-        Chrome OS on the target closes down most external ports for security.
+        ChromeOS on the target closes down most external ports for security.
         We could open the port, but doing that would conflict with security
         tests that check that only expected ports are open.  So, to get to
         the port on the target we use an ssh tunnel.
@@ -209,13 +204,13 @@
             except Exception as exc:
                 log_lines = []
                 if logfile:
-                    logging.warn('Failed to start XMLRPC server; getting log.')
+                    logging.warning('Failed to start XMLRPC server; getting log.')
                     with tempfile.NamedTemporaryFile() as temp:
                         self._host.get_file(logfile, temp.name)
                         with open(temp.name) as f:
                             log_lines = f.read().rstrip().splitlines()
                 else:
-                    logging.warn('Failed to start XMLRPC server; no log.')
+                    logging.warning('Failed to start XMLRPC server; no log.')
 
                 logging.error(
                         'Failed to start XMLRPC server:  %s.%s: %s.',
@@ -258,38 +253,6 @@
         logging.info('XMLRPC server started successfully.')
         return proxy
 
-
-    def jsonrpc_connect(self, port):
-        """Creates a jsonrpc proxy connection through an ssh tunnel.
-
-        This method exists to facilitate communication with goofy (which is
-        the default system manager on all factory images) and as such, leaves
-        most of the rpc server sanity checking to the caller. Unlike
-        xmlrpc_connect, this method does not facilitate the creation of a remote
-        jsonrpc server, as the only clients of this code are factory tests,
-        for which the goofy system manager is built in to the image and starts
-        when the target boots.
-
-        One can theoretically create multiple jsonrpc proxies all forwarded
-        to the same remote port, provided the remote port has an rpc server
-        listening. However, in doing so we stand the risk of leaking an
-        existing tunnel process, so we always disconnect any older tunnels
-        we might have through disconnect.
-
-        @param port: port on the remote host that is serving this proxy.
-
-        @return: The client proxy.
-        """
-        if not jsonrpclib:
-            logging.warning('Jsonrpclib could not be imported. Check that '
-                            'site-packages contains jsonrpclib.')
-            return None
-
-        proxy = jsonrpclib.jsonrpc.ServerProxy(self._setup_rpc(port, None))
-
-        logging.info('Established a jsonrpc connection through port %s.', port)
-        return proxy
-
     def disconnect(self, port, pkill=True):
         """Disconnect from an RPC server on the host.
 
@@ -321,8 +284,8 @@
             # status.
             self._host.run("pkill -f '%s'" % remote_name, ignore_status=True)
             if remote_pid:
-                logging.info('Waiting for RPC server "%s" shutdown',
-                             remote_name)
+                logging.info('Waiting for RPC server "%s" shutdown (%s)',
+                             remote_name, remote_pid)
                 start_time = time.time()
                 while (time.time() - start_time <
                        self._RPC_SHUTDOWN_TIMEOUT_SECONDS):
@@ -330,9 +293,11 @@
                             "pgrep -f '%s'" % remote_name,
                             ignore_status=True).stdout.split()
                     if not remote_pid in running_processes:
-                        logging.info('Shut down RPC server.')
+                        logging.info('Shut down RPC server %s.', remote_pid)
                         break
                     time.sleep(self._RPC_SHUTDOWN_POLLING_PERIOD_SECONDS)
+                    self._host.run("pkill -9 -f '%s'" % remote_name,
+                                   ignore_status=True)
                 else:
                     raise error.TestError('Failed to shutdown RPC server %s' %
                                           remote_name)
@@ -343,7 +308,7 @@
 
     def disconnect_all(self):
         """Disconnect all known RPC proxy ports."""
-        for port in self._rpc_proxy_map.keys():
+        for port in list(self._rpc_proxy_map.keys()):
             self.disconnect(port)
 
 
diff --git a/server/hosts/servo_afe_board_map_unittest.py b/server/hosts/servo_afe_board_map_unittest.py
index 7811124..9b9b198 100755
--- a/server/hosts/servo_afe_board_map_unittest.py
+++ b/server/hosts/servo_afe_board_map_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/server/hosts/servo_constants.py b/server/hosts/servo_constants.py
index bb74c05..9075f9a 100644
--- a/server/hosts/servo_constants.py
+++ b/server/hosts/servo_constants.py
@@ -9,20 +9,27 @@
 # Names of the host attributes in the database that represent the values for
 # the servo_host and servo_port for a servo connected to the DUT.
 SERVO_HOST_ATTR = 'servo_host'
+SERVO_HOST_SSH_PORT_ATTR = 'servo_host_ssh_port'
 SERVO_PORT_ATTR = 'servo_port'
 SERVO_BOARD_ATTR = 'servo_board'
 # Model is inferred from host labels.
 SERVO_MODEL_ATTR = 'servo_model'
 SERVO_SERIAL_ATTR = 'servo_serial'
+SERVOD_DOCKER_ATTR = 'servod_docker'
 # Indicates what type of servo setup, example value: REGULAR or DUAL_V4.
 SERVO_SETUP_ATTR = 'servo_setup'
+SERVO_FW_CHANNEL_ATTR = 'servo_fw_channel'
 SERVO_SETUP_VALUE_DUAL_V4 = 'DUAL_V4'
+SERVO_RECOVERY_MODE = 'servo_recovery'
 SERVO_ATTR_KEYS = (
         SERVO_BOARD_ATTR,
         SERVO_HOST_ATTR,
+        SERVOD_DOCKER_ATTR,
+        SERVO_HOST_SSH_PORT_ATTR,
         SERVO_PORT_ATTR,
         SERVO_SERIAL_ATTR,
         SERVO_SETUP_ATTR,
+        SERVO_FW_CHANNEL_ATTR,
 )
 
 # Additional args that will be appended to servod start command.
@@ -47,14 +54,21 @@
 SERVO_STATE_MISSING_CONFIG = 'MISSING_CONFIG'
 SERVO_STATE_WRONG_CONFIG = 'WRONG_CONFIG'
 SERVO_STATE_NO_SSH = 'NO_SSH'
+SERVO_STATE_SERVO_HOST_ISSUE = 'SERVO_HOST_ISSUE'
 SERVO_STATE_NOT_CONNECTED = 'NOT_CONNECTED'
+SERVO_STATE_SERIAL_MISMATCH = 'SERVO_SERIAL_MISMATCH'
+SERVO_STATE_SERVO_UPDATER_ISSUE = 'SERVO_UPDATER_ISSUE'
+SERVO_STATE_SERVOD_DUT_CONTROLLER_MISSING = 'SERVOD_DUT_CONTROLLER_MISSING'
 SERVO_STATE_NEED_REPLACEMENT = 'NEED_REPLACEMENT'
 SERVO_STATE_CR50_CONSOLE_MISSING = 'CR50_CONSOLE_MISSING'
 SERVO_STATE_CCD_TESTLAB_ISSUE = 'CCD_TESTLAB_ISSUE'
 SERVO_STATE_SERVOD_ISSUE = 'SERVOD_ISSUE'
+SERVO_STATE_SERVOD_PROXY_ISSUE = 'SERVOD_PROXY_ISSUE'
 SERVO_STATE_LID_OPEN_FAILED = 'LID_OPEN_FAILED'
 SERVO_STATE_BAD_RIBBON_CABLE = 'BAD_RIBBON_CABLE'
 SERVO_STATE_TOPOLOGY_ISSUE = 'TOPOLOGY_ISSUE'
+SERVO_STATE_SBU_LOW_VOLTAGE = 'SBU_LOW_VOLTAGE'
+SERVO_STATE_CR50_NOT_ENUMERATED = 'CR50_NOT_ENUMERATED'
 SERVO_STATE_DUT_NOT_CONNECTED = 'DUT_NOT_CONNECTED'
 SERVO_STATE_EC_BROKEN = 'EC_BROKEN'
 SERVO_STATE_BROKEN = 'BROKEN'
diff --git a/server/hosts/servo_host.py b/server/hosts/servo_host.py
index 9a573f8..860242c 100644
--- a/server/hosts/servo_host.py
+++ b/server/hosts/servo_host.py
@@ -20,33 +20,46 @@
 import tarfile
 import threading
 import json
+import tempfile
 import time
 import six
 import six.moves.xmlrpc_client
-import calendar
+
+try:
+    import docker
+    from autotest_lib.site_utils.docker import utils as docker_utils
+except ImportError:
+    logging.info("Docker API is not installed in this environment")
 
 from autotest_lib.client.bin import utils
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib import hosts
 from autotest_lib.client.common_lib import lsbrelease_utils
 from autotest_lib.client.common_lib.cros import retry
-from autotest_lib.client.common_lib.cros.network import ping_runner
+from autotest_lib.server import crashcollect
 from autotest_lib.server.cros.servo import servo
 from autotest_lib.server.hosts import servo_repair
 from autotest_lib.server.hosts import base_servohost
 from autotest_lib.server.hosts import servo_constants
 from autotest_lib.server.cros.faft.utils import config
 from autotest_lib.client.common_lib import global_config
-from autotest_lib.site_utils.admin_audit import servo_updater
 from autotest_lib.server.cros.servo.topology import servo_topology
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = utils.metrics_mock
 
 _CONFIG = global_config.global_config
 
+SERVOD_CONTAINER_IMAGE_PATH = "us-docker.pkg.dev/chromeos-partner-moblab/common-core"
+
+# Create the flag on drone or inside the ssp container to enable servod debug mode.
+# The servod container will not be deleted after being stopped.
+SERVOD_DEBUG_FLAG = '/servod_debug'
+
+DOCKER_SERVOD_DEBUG_MODE = os.environ.get('DOCKER_SERVOD_DEBUG_MODE', '0')
+
 
 class ServoHost(base_servohost.BaseServoHost):
     """Host class for a servo host(e.g. beaglebone, labstation)
@@ -111,7 +124,7 @@
     # The log format starts with a timestamp
     MCU_RE = (r'[\d\-]+ [\d:,]+ '
               # The mcu that is logging this is next.
-              r'- (?P<%s>\w+) - '
+              r'- (?P<%s>[\w/]+) - '
               # Next, we have more log outputs before the actual line.
               # Information about the file line, logging function etc.
               # Anchor on EC3PO Console, LogConsoleOutput and dev/pts.
@@ -135,22 +148,16 @@
     # run.
     OLD_LOG_SUFFIX = 'old'
 
-    # Mapping servo board with their vid-pid
-    SERVO_VID_PID = {
-            'servo_v4': '18d1:501b',
-            'ccd_cr50': '18d1:5014',
-            'servo_micro': '18d1:501a',
-            'servo_v3': ['18d1:5004', '0403:6014'],
-            'c2d2': '1d81:5041',
-    }
-
     def _init_attributes(self):
         self._servo_state = None
         self.servo_port = None
         self.servo_board = None
+        self._ec_supported = None
         self.servo_model = None
         self.servo_serial = None
         self.servo_setup = None
+        self.servo_recovery = None
+        self.servo_fw_channel = None
         self.additional_servod_args = None
         self._dut_health_profile = None
         # The flag that indicate if a servo is connected to a smart usbhub.
@@ -175,6 +182,8 @@
                     servo_model=None,
                     servo_serial=None,
                     servo_setup=None,
+                    servo_recovery=None,
+                    servo_fw_channel=None,
                     additional_servod_args=None,
                     is_in_lab=None,
                     *args,
@@ -207,6 +216,8 @@
         self.servo_model = servo_model
         self.servo_serial = servo_serial
         self.servo_setup = servo_setup
+        self.servo_recovery = servo_recovery
+        self.servo_fw_channel = servo_fw_channel
         self.additional_servod_args = additional_servod_args
 
         # The location of the log files on the servo host for this instance.
@@ -224,8 +235,9 @@
         # make sure the labstation is up here, in the case of the labstation is
         # in the middle of reboot.
         self._is_locked = False
-        if (self.wait_up(self.REBOOT_TIMEOUT) and self.is_in_lab()
-            and self.is_labstation()):
+        if (not self.is_containerized_servod()
+                    and self.wait_up(self.REBOOT_TIMEOUT) and self.is_in_lab()
+                    and self.is_labstation()):
             self._lock()
             try:
                 self.wait_ready()
@@ -244,10 +256,10 @@
     def connect_servo(self):
         """ Initialize and setup servo for later use.
         """
-        self.initilize_servo()
+        self.initialize_servo()
         self.initialize_dut_for_servo()
 
-    def initilize_servo(self):
+    def initialize_servo(self):
         """Establish a connection to the servod server on this host.
 
         Initializes `self._servo` and then verifies that all network
@@ -289,6 +301,13 @@
             self.rpc_server_tracker.disconnect(self.servo_port)
             self._servo = None
 
+    def _to_str(self, maybe_str):
+        """If the param is bytes, convert it to a str."""
+        # b/217780680, Make this compatible with python3,
+        if isinstance(maybe_str, bytes):
+            return maybe_str.decode('utf-8', errors='replace')
+        return maybe_str
+
     def _maybe_create_servod_ssh_tunnel_proxy(self):
         """Create a xmlrpc proxy for use with a ssh tunnel.
         A lock is used to safely create a singleton proxy.
@@ -312,7 +331,8 @@
                   server on the host.
         """
         if (servo_constants.ENABLE_SSH_TUNNEL_FOR_SERVO
-                and not self.is_localhost()):
+                    and not self.is_localhost()
+                    and not self.is_containerized_servod()):
             # Check for existing ssh tunnel proxy.
             if self._tunnel_proxy is None:
                 self._maybe_create_servod_ssh_tunnel_proxy()
@@ -321,7 +341,12 @@
             # xmlrpc/httplib is not thread-safe, so each thread must have its
             # own separate proxy connection.
             if not hasattr(self._local, "_per_thread_proxy"):
-                remote = 'http://%s:%s' % (self.hostname, self.servo_port)
+                if self.is_containerized_servod():
+                    ip_addr = docker_utils.get_container_ip(self.hostname)
+                    remote = 'http://%s:%s' % (ip_addr, self.servo_port)
+                else:
+                    remote = 'http://%s:%s' % (self.hostname, self.servo_port)
+                logging.debug('Servo Proxy RPC URL: %s', remote)
                 self._local._per_thread_proxy = six.moves.xmlrpc_client.ServerProxy(remote)
             return self._local._per_thread_proxy
 
@@ -338,9 +363,6 @@
             self._servo_state = servo_constants.SERVO_STATE_WORKING
             self.record('INFO', None, None,
                         'ServoHost verify set servo_state as WORKING')
-            if self.is_servo_topology_supported():
-                self._topology = servo_topology.ServoTopology(self)
-                self._topology.generate()
         except Exception as e:
             if not self.is_localhost():
                 self._servo_state = self.determine_servo_state()
@@ -375,9 +397,9 @@
                 logging.debug('Unexpected error occurred on mount usb drive.')
                 return ''
 
-            release_content = self.run(
-                'cat %s/etc/lsb-release' % mount_dst,
-                ignore_status=True).stdout.strip()
+            release_content = self._to_str(
+                    self.run('cat %s/etc/lsb-release' % mount_dst,
+                             ignore_status=True).stdout.strip())
 
             if not re.search(r'RELEASE_TRACK=.*test', release_content):
                 logging.info('The image on usbkey is not a test image')
@@ -415,7 +437,8 @@
             updater_bin = os.path.join(mount_dst,
                                        'usr/sbin/chromeos-firmwareupdate')
             self.run('%s --unpack %s' % (updater_bin, fw_dst))
-            return self.run('%s --manifest' % updater_bin).stdout
+            return self._to_str(self.run('%s --manifest' % updater_bin).stdout)
+
         finally:
             self._unmount_drive(mount_dst)
 
@@ -529,12 +552,18 @@
     def is_ec_supported(self):
         """Check if ec is supported on the servo_board"""
         if self.servo_board:
+            if self._ec_supported is not None:
+                return self._ec_supported
             try:
                 frm_config = config.Config(self.servo_board, self.servo_model)
-                return frm_config.chrome_ec
+                self._ec_supported = getattr(frm_config, 'chrome_ec', False)
+                return self._ec_supported
             except Exception as e:
-                logging.error('Unexpected error when read from firmware'
-                    ' configs; %s', str(e))
+                logging.error(
+                        'Unexpected error when read from firmware'
+                        ' configs; %s', e)
+        else:
+            logging.debug('Cannot detect if DUT has EC as board unknown.')
         return False
 
     def validate_image_usbkey(self):
@@ -577,9 +606,6 @@
             # reboot request created by this servo because it passed repair.
             if self.is_labstation():
                 self.withdraw_reboot_request()
-            if self.is_servo_topology_supported():
-                self._topology = servo_topology.ServoTopology(self)
-                self._topology.generate()
         except Exception as e:
             if not self.is_localhost():
                 self._servo_state = self.determine_servo_state()
@@ -639,6 +665,9 @@
             logging.debug("Servohost is a localhost, skipping start servod.")
             return
 
+        if self.is_containerized_servod():
+            return self.start_containerized_servod()
+
         cmd = 'start servod'
         if self.servo_board:
             cmd += ' BOARD=%s' % self.servo_board
@@ -660,6 +689,9 @@
         if self._require_cr50_servod_config():
             cmd += ' CONFIG=cr50.xml'
 
+        if self.servo_recovery == True:
+            cmd += ' REC_MODE=1'
+
         # Adding customized args if any.
         if self.additional_servod_args:
             cmd += ' ' + self.additional_servod_args
@@ -697,6 +729,16 @@
             logging.debug("Servohost is a localhost, skipping stop servod.")
             return
 
+        if self.is_containerized_servod():
+            # TODO(gregorynisbet): Remove this message in 2022Q2.
+            logging.debug("ServoHost: Detected containerized servod.")
+            remove_container = True
+            if DOCKER_SERVOD_DEBUG_MODE == '1' or os.path.exists(
+                    SERVOD_DEBUG_FLAG):
+                remove_container = False
+            self.stop_containerized_servod(remove_container=remove_container)
+            return
+
         logging.debug('Stopping servod on port %s', self.servo_port)
         self.run('stop servod PORT=%d' % self.servo_port,
                  timeout=60, ignore_status=True)
@@ -704,6 +746,157 @@
                       servo_constants.SERVOD_TEARDOWN_TIMEOUT)
         time.sleep(servo_constants.SERVOD_TEARDOWN_TIMEOUT)
 
+    def wait_for_init_servod_in_container(self, container):
+        """Waits for servod process to be ready to listen inside container."""
+        ready_output = "Instance associated with id %s ready" % self.servo_port
+        if not container:
+            logging.debug("Container object is None.")
+            return False
+        try:
+            # Executes servodtool command to wait for servod to be active.
+            exit_code, output = container.exec_run(
+                    cmd="servodtool instance wait-for-active -p %s" %
+                    self.servo_port,
+                    stdout=True)
+            # b/217780680, Make this compatible with python3,
+            if isinstance(output, bytes):
+                output = output.decode(errors='replace')
+            if exit_code != 0 or ready_output not in output:
+                logging.debug(
+                        'Failed to start servod process inside container,'
+                        'exit_code=%s, output=%s.', exit_code, output)
+                return False
+        except docker.errors.APIError as e:
+            logging.error('%s', e)
+        return True
+
+    def start_containerized_servod(self, with_servod=True):
+        """Start the servod process on servohost."""
+        logging.info("Starting servod container %s.",
+                     self.servod_container_name)
+        client = docker_utils.get_docker_client()
+        logging.debug("Docker deamon ping %s", client.ping())
+        labels = {'WITH_SERVOD': str(with_servod)}
+        try:
+            if self.is_up(with_servod=with_servod):
+                logging.warning("Container already exists - not starting")
+                return
+            logging.info(
+                    'Servod container either does not exist or is not running.'
+            )
+            self.stop_containerized_servod()
+        except docker.errors.APIError:
+            # Container exists but is not running
+            logging.info("Cleanup of non functional container.")
+            self.stop_containerized_servod()
+
+        label = os.environ.get("SERVOD_CONTAINER_LABEL", "release")
+        registry = os.environ.get("REGISTRY_URI", SERVOD_CONTAINER_IMAGE_PATH)
+        image = "%s/servod:%s" % (registry, label)
+        logging.info("Servod container image: %s", image)
+
+        try:
+            client.images.pull(image)
+        except docker.errors.APIError:
+            logging.exception("Failed to pull servod container image.")
+
+        environment = [
+                "BOARD=%s" % self.servo_board,
+                "MODEL=%s" % self.servo_model,
+                "SERIAL=%s" % self.servo_serial,
+                "PORT=%s" % self.servo_port,
+        ]
+        # Start servod with dual_v4 based on servo_setup.
+        if self.is_dual_setup():
+            environment.append("DUAL_V4=1")
+        # Start servod with CONFIG=cr50.xml which required for some pools.
+        if self._require_cr50_servod_config():
+            environment.append("CONFIG=cr50.xml")
+        if self.servo_recovery == True:
+            environment.append("REC_MODE=1")
+
+        container_network = os.environ.get("DOCKER_DEFAULT_NETWORK", None)
+        # In case the network environment is not set, fallback to default network
+        # for moblab or satlab based on the TLE.
+        if not container_network:
+            container_network = "default_moblab"
+            if 'drone' in docker_utils.get_running_containers(client=client):
+                container_network = "default_satlab"
+
+        logging.info('Servod container will use %s network', container_network)
+        logging.info('Servod container environment: %s', environment)
+        try:
+            start_cmds = ["bash", "/start_servod.sh"]
+            if not with_servod:
+                # In some cases we do not need container without running servod.
+                start_cmds = ["tail", "-f", "/dev/null"]
+                logging.debug(
+                        'Attempting to start Servod container without servod')
+            container = client.containers.run(
+                    image,
+                    remove=False,
+                    privileged=True,
+                    labels=labels,
+                    name=self.servod_container_name,
+                    hostname=self.servod_container_name,
+                    network=container_network,
+                    cap_add=["NET_ADMIN"],
+                    detach=True,
+                    volumes=[
+                            "/dev:/dev",
+                            "%s_log:/var/log/servod_%s/" %
+                            (self.servod_container_name, self.servo_port)
+                    ],
+                    environment=environment,
+                    command=start_cmds,
+            )
+            # Probing servod ready state fails.
+            if with_servod:
+                current_time = 0
+                while not self.wait_for_init_servod_in_container(
+                        container
+                ) and current_time <= servo_constants.SERVOD_STARTUP_TIMEOUT:
+                    time.sleep(10)
+                    current_time += 10
+
+                if not self.wait_for_init_servod_in_container(container):
+                    logging.info(
+                            'Servod process is not up within the servod container after %s seconds.'
+                            % servo_constants.SERVOD_STARTUP_TIMEOUT)
+                else:
+                    logging.info(
+                            'Servod process is up within the servod container after %s seconds.'
+                            % current_time)
+            else:
+                logging.info(
+                        "Servod container %s up and running without servod process.",
+                        self.servod_container_name)
+
+        except docker.errors.ContainerError as e:
+            logging.exception("Failed to start servod container. %s", e)
+            raise
+        except docker.errors.ImageNotFound:
+            logging.exception("Servod container image %s not found.", image)
+
+    def stop_containerized_servod(self, remove_container=True):
+        """Stop the container running servod."""
+        logging.info("Stopping servod container %s.",
+                     self.servod_container_name)
+        client = docker_utils.get_docker_client()
+        try:
+            cont = client.containers.get(self.servod_container_name)
+        except docker.errors.NotFound:
+            logging.info("Servod container %s not found no need to stop it.",
+                         self.servod_container_name)
+        except docker.errors.APIError:
+            logging.exception(
+                    "Stopping servod container %s caused a docker error.",
+                    self.servod_container_name)
+        else:
+            if remove_container == True:
+                cont.remove(force=True)
+                logging.debug('Servod container instance removed')
+
     def restart_servod(self, quick_startup=False):
         """Restart the servod process on servohost.
         """
@@ -714,15 +907,16 @@
         """Helper function to handle non-zero servodtool response.
         """
         if re.search(servo_constants.ERROR_MESSAGE_USB_HUB_NOT_COMPATIBLE,
-                     response.stdout):
+                     self._to_str(response.stdout)):
             logging.error('The servo is not plugged on a usb hub that supports'
                           ' power-cycle!')
             # change the flag so we can update this label in later process.
             self.smart_usbhub = False
             return
 
-        if re.search(servo_constants.ERROR_MESSAGE_DEVICE_NOT_FOUND %
-                     self.servo_serial, response.stdout):
+        if re.search(
+                servo_constants.ERROR_MESSAGE_DEVICE_NOT_FOUND %
+                self.servo_serial, self._to_str(response.stdout)):
             logging.error('No servo with serial %s found!', self.servo_serial)
             return
 
@@ -751,7 +945,8 @@
         if resp.exit_status != 0:
             self._process_servodtool_error(resp)
             return ''
-        usb_path = resp.stdout.strip()
+        usb_path = self._to_str(resp.stdout.strip())
+
         logging.info('Usb path of servo %s is %s', self.servo_serial, usb_path)
         return usb_path
 
@@ -764,7 +959,7 @@
         if resp.exit_status != 0:
             self._process_servodtool_error(resp)
             return ''
-        return resp.stdout.strip()
+        return self._to_str(resp.stdout.strip())
 
     def reboot_servo_v3_on_need(self):
         """Check and reboot servo_v3 based on below conditions.
@@ -912,6 +1107,9 @@
                 match = self.MCU_EXTRACTOR.match(line)
                 if match:
                     mcu = match.group(self.MCU_GROUP).lower()
+                    # The |mcu| might contain a '/' in it. Replace it with '.'
+                    # to avoid generating bad filepaths.
+                    mcu = mcu.replace('/', '.')
                     line = match.group(self.LINE_GROUP)
                     if mcu not in mcu_files:
                         mcu_file = os.path.join(log_subdir,
@@ -967,7 +1165,7 @@
         # panic information from servo micro and servo v4 for the current logs.
         # This can only happen if the |_servo| attribute is initialized.
         if self._servo:
-            for mcu in ['servo_micro', 'servo_v4']:
+            for mcu in ['servo_micro', 'servo_v4', 'servo_v4p1']:
                 ctrl = '%s_uart_cmd' % mcu
                 if self._servo.has_control(ctrl):
                     logging.info('Trying to retrieve %r panicinfo into logs',
@@ -994,7 +1192,7 @@
             # Here we failed to find anything.
             logging.info('Failed to find remote servod logs. Ignoring.')
             return []
-        logfiles = res.stdout.strip().split()
+        logfiles = self._to_str(res.stdout.strip().split())
         timestamps = set()
         for logfile in logfiles:
             ts_match = self.TS_EXTRACTOR.match(logfile)
@@ -1037,8 +1235,10 @@
                 logging.warning('Failed to find servod logs on servo host.')
                 logging.warning(res.stderr.strip())
             return None
-        fname = os.path.basename(res.stdout.strip())
+        fname = os.path.basename(self._to_str(res.stdout.strip()))
         # From the fname, ought to extract the timestamp using the TS_EXTRACTOR
+        if type(fname) == type(b' '):
+            fname = fname.decode("utf-8")
         ts_match = self.TS_EXTRACTOR.match(fname)
         if not ts_match:
             logging.warning('Failed to extract timestamp from servod log file '
@@ -1047,6 +1247,56 @@
             return None
         return ts_match.group(self.TS_GROUP)
 
+    def get_servohost_logs(self, outdir):
+        """Get logs that can help debugging servo/servod problem from
+        the servohost
+
+        @param outdir: directory to create a subdirectory into to place the
+                       servod logs into.
+        """
+        log_dir = os.path.join(outdir, 'servohost_%s' % self.hostname)
+        if os.path.isdir(log_dir):
+            # In multi-DUTs testing, each DUTs will may their own servohost
+            # instance, where could cause duplicate efforts if they share a
+            # same servohost, so we can just skip the collect if the log
+            # dir already exists.
+            logging.info(
+                    'Skip dmesg and messages logs collecting as %s'
+                    ' already exists.', log_dir)
+            return
+        logging.info('Collecting dmesg and messages from servohost %s',
+                     self.hostname)
+        os.mkdir(log_dir)
+        logging.info('Saving servohost logs to %s.', log_dir)
+        # First collect dmesg from the servohost.
+        crashcollect.collect_command(self, 'dmesg -H',
+                                     os.path.join(log_dir, 'dmesg'))
+        # Collect messages log from the servohost.
+        if not self.is_containerized_servod():
+            try:
+                self.get_file('/var/log/messages', log_dir, try_rsync=False)
+            except error.AutoservRunError:
+                logging.warning(
+                        'Failed to collect messages log from servohost.')
+
+    def get_servod_startup_log(self, outdir):
+        """Get servod start_up log, this log is available even servod was
+        not started successfully.
+
+        @param outdir: directory to create a subdirectory into to place the
+                       servod logs into.
+        """
+        if self.is_containerized_servod():
+            return
+        log_dir = os.path.join(outdir, 'servod_startup_%s' % self.servo_port)
+        os.mkdir(log_dir)
+        start_up_log = '/var/log/servod_%s.STARTUP.log' % self.servo_port
+        try:
+            self.get_file(start_up_log, log_dir, try_rsync=False)
+        except error.AutoservRunError:
+            logging.warning('Failed to collect servod start up log'
+                            ' from servohost.')
+
     def get_instance_logs(self, instance_ts, outdir, old=False):
         """Collect all logs with |instance_ts| and dump into a dir in |outdir|
 
@@ -1076,14 +1326,31 @@
         cmd = 'find %s -maxdepth 1 -name "log.%s*"' % (self.remote_log_dir,
                                                        instance_ts)
         res = self.run(cmd, stderr_tee=None, ignore_status=True)
-        files = res.stdout.strip().split()
+        files = self._to_str(res.stdout.strip()).split()
         try:
-            self.get_file(files, log_dir, try_rsync=False)
+            if self.is_containerized_servod():
+                client = docker_utils.get_docker_client()
+                container = client.containers.get(self.servod_container_name)
+                for f in files:
+                    file_stream, stat = container.get_archive(f)
+                    tf = tempfile.NamedTemporaryFile(delete=False)
+                    for block in file_stream:
+                        tf.write(block)
+                    tf.close()
+                    pw_tar = tarfile.TarFile(tf.name)
+                    pw_tar.extractall(log_dir)
+                    os.remove(tf.name)
+            else:
+                self.get_file(files, log_dir, try_rsync=False)
+
             if not os.listdir(log_dir):
                 logging.info('No servod logs retrieved. Ignoring, and removing '
                              '%r again.', log_dir)
                 os.rmdir(log_dir)
                 return
+        except docker.errors.NotFound:
+            logging.info("Servod container %s not found no need to stop it.",
+                         self.hostname)
         except error.AutoservRunError as e:
             result = e.result_obj
             if result.exit_status != 0:
@@ -1172,17 +1439,27 @@
         # and not guarantee the servohost is sshable.
         servo_host_ready = self.check_cached_up_status() and self.is_up()
 
-        if servo_host_ready:
+        # If the dockerized servod is in used, we can start a new servod container
+        # with out the servod process for log collection.
+        if (not self.is_localhost() and not servo_host_ready
+                    and self.is_containerized_servod()):
+            logging.info(
+                    'Start servod container without servod for log collection.'
+            )
+            self.start_containerized_servod(with_servod=False)
+            servo_host_ready = True
+
+        # TODO(crbug.com/1011516): once enabled, remove the check against
+        # localhost and instead check against log-rotiation enablement.
+        should_collect_log = (servo_host_ready and self.job
+                              and not self.is_localhost())
+        if should_collect_log:
             instance_ts = self.get_instance_logs_ts()
         else:
             logging.info('Servohost is down, will skip servod log collecting.')
             instance_ts = None
-        # TODO(crbug.com/1011516): once enabled, remove the check against
-        # localhost and instead check against log-rotiation enablement.
-        logs_available = (instance_ts is not None and
-                          self.job and
-                          not self.is_localhost())
-        if logs_available:
+        servod_logs_available = instance_ts is not None
+        if servod_logs_available:
             # Probe whether there was a servod restart, and grab those old
             # logs as well.
             try:
@@ -1199,19 +1476,25 @@
             outdir = None if not self.job else self.job.resultdir
             # In some cases when we run as lab-tools, the job object is None.
             self._servo.close(outdir)
-
-        if logs_available:
+        try:
+            if should_collect_log:
+                self.get_servod_startup_log(self.job.resultdir)
+                self.get_servohost_logs(self.job.resultdir)
             # Grab current (not old like above) logs after the servo instance
             # was closed out.
-            try:
+            if servod_logs_available:
                 self.get_instance_logs(instance_ts, self.job.resultdir)
-            except error.AutoservRunError as e:
-                logging.info('Failed to grab servo logs due to: %s. '
-                             'This error is forgiven.', str(e))
-            except Exception as e:
-                logging.error('Unexpected error grabbing servod logs. %s. '
-                              'Forgiven. Please file a bug and fix or catch '
-                              'in log grabbing function', str(e), exc_info=True)
+        except error.AutoservRunError as e:
+            logging.info(
+                    'Failed to grab servo logs due to: %s. '
+                    'This error is forgiven.', str(e))
+        except Exception as e:
+            logging.error(
+                    'Unexpected error grabbing servod logs. %s. '
+                    'Forgiven. Please file a bug and fix or catch '
+                    'in log grabbing function',
+                    str(e),
+                    exc_info=True)
 
         if self._is_locked and servo_host_ready:
             # Remove the lock if the servohost has been locked.
@@ -1243,49 +1526,15 @@
                 'host': self.get_dut_hostname() or self.hostname,
                 'board': self.servo_board or ''}
 
-    def _is_servo_device_connected(self, servo_type, serial):
-        """Check if device is connected to the labstation.
-
-        Works for all servo devices connected to the labstation.
-        For servo_v3 please use 'self._is_servo_board_present_on_servo_v3'
-
-        @param servo_type:  The type of servo device. Expecting value can be
-                            servo_v4 or servo_micro.
-        @param serial:      The serial number of the device to detect it.
-        """
-        vid_pid = self.SERVO_VID_PID.get(servo_type)
-        if not vid_pid or not serial:
-            # device cannot detected without VID/PID or serial number
-            return False
-        logging.debug('Started to detect %s', servo_type)
-        try:
-            cmd = 'lsusb -v -d %s |grep iSerial |grep %s' % (vid_pid, serial)
-            result = self.run(cmd, ignore_status=True, timeout=30)
-            if result.exit_status == 0 and result.stdout.strip():
-                logging.debug('The %s is plugged in to the host.', servo_type)
-                return True
-            logging.debug('%s device is not detected; %s', servo_type, result)
-            return False
-        except Exception as e:
-            # can be triggered by timeout issue due running the script
-            metrics.Counter(
-                'chromeos/autotest/repair/servo_detection/timeout'
-                ).increment(fields=self._get_host_metrics_data())
-            logging.error('%s device is not detected; %s', servo_type, str(e))
-        return None
-
-    def _is_servo_board_present_on_servo_v3(self):
+    def is_servo_board_present_on_servo_v3(self):
         """Check if servo board is detected on servo_v3"""
-        vid_pids = self.SERVO_VID_PID['servo_v3']
-        if not vid_pids or len(vid_pids) == 0:
-            # device cannot detected without VID/PID
-            return False
         logging.debug('Started to detect servo board on servo_v3')
+        vid_pids = ['18d1:5004', '0403:6014']
         not_detected = 'The servo board is not detected on servo_v3'
         try:
             cmd = 'lsusb | grep "%s"' % "\|".join(vid_pids)
             result = self.run(cmd, ignore_status=True, timeout=30)
-            if result.exit_status == 0 and result.stdout.strip():
+            if result.exit_status == 0 and self._to_str(result.stdout.strip()):
                 logging.debug('The servo board is detected on servo_v3')
                 return True
             logging.debug('%s; %s', not_detected, result)
@@ -1298,44 +1547,6 @@
             logging.error('%s; %s', not_detected, str(e))
         return None
 
-    def _is_main_device_not_detected_on_servo_v4(self):
-        """Check if servod cannot find main device on servo.
-
-        The check based on reading servod logs for servo_v4.
-        """
-        if not self._initial_instance_ts:
-            # latest log not found
-            return False
-        logging.debug('latest log for servod created at %s',
-                      self._initial_instance_ts)
-        try:
-            log_created = calendar.timegm(time.strptime(
-                self._initial_instance_ts,
-                "%Y-%m-%d--%H-%M-%S.%f"))
-        except ValueError as e:
-            logging.debug('Cannot read time from log file name: %s',
-                          self._initial_instance_ts)
-            return False
-        min_time_created = calendar.timegm(time.gmtime())
-        if min_time_created > log_created + 3600:
-            # the log file is old we cannot use it
-            logging.debug('log file was created more than hour ago, too old')
-            return False
-        logging.debug('latest log was created not longer then 1 hour ago')
-
-        # check if servod can detect main device by servo_v4
-        message = 'ERROR - No servo micro or CCD detected for board'
-        cmd = ('cat /var/log/servod_%s/log.%s.INFO |grep "%s"'
-               % (self.servo_port, self._initial_instance_ts, message))
-        result = self.run(cmd, ignore_status=True)
-        if result.stdout.strip():
-            logging.info('Servod cannot detect main device on the servo; '
-                        'Can be caused by bad hardware of servo or '
-                        'issue on the DUT side.')
-            return True
-        logging.debug('The main device is detected')
-        return False
-
     def _require_cr50_servod_config(self):
         """Check whether we need start servod with CONFIG=cr50.xml"""
         dut_host_info = self.get_dut_host_info()
@@ -1353,6 +1564,13 @@
         """
         return self._repair_strategy.verifier_is_good(tag)
 
+    def get_repair_strategy_node(self, tag):
+        """Return the instance of verifier/repair node for host by tag.
+
+        @returns: _DependencyNode or None
+        """
+        return self._repair_strategy.node_by_tag(tag)
+
     def determine_servo_state(self):
         """Determine servo state based on the failed verifier.
 
@@ -1360,42 +1578,67 @@
         The state detecting based on first fail verifier or collecting of
         them.
         """
-        ssh = self.get_verifier_state('servo_ssh')
-        disk_space = self.get_verifier_state('disk_space')
-        start_servod = self.get_verifier_state('servod_job')
+        ssh = self.get_verifier_state('connection')
+        servo_root_present = self.get_verifier_state('servo_root_present')
+        servo_root_present_node = self.get_repair_strategy_node(
+                'servo_root_present')
+        servo_v3_present = self.get_verifier_state('servo_v3_root_present')
+        servo_fw = self.get_verifier_state('servo_fw')
+        servo_fw_update = self.get_repair_strategy_node('servo_fw_update')
+        servod_dut_controller_missing = self.get_repair_strategy_node(
+                'servod_dut_controller_missing')
+        disk_space = self.get_verifier_state('servo_disk_space')
+        start_servod = self.get_verifier_state('start_servod')
+        servod_started = self.get_verifier_state('servod_started')
+        servod_echo = self.get_verifier_state('servod_echo')
         create_servo = self.get_verifier_state('servod_connection')
         init_servo = self.get_verifier_state('servod_control')
+        cr50_low_sbu = self.get_verifier_state('servo_cr50_low_sbu')
+        cr50_off = self.get_verifier_state('servo_cr50_off')
         servo_topology = self.get_verifier_state('servo_topology')
-        dut_connected = self.get_verifier_state('dut_connected')
-        hub_connected = self.get_verifier_state('hub_connected')
-        pwr_button = self.get_verifier_state('pwr_button')
-        lid_open = self.get_verifier_state('lid_open')
-        ec_board = self.get_verifier_state('ec_board')
-        cr50_console = self.get_verifier_state('cr50_console')
-        ccd_testlab = self.get_verifier_state('ccd_testlab')
+        dut_connected = self.get_verifier_state('servo_dut_connected')
+        hub_connected = self.get_verifier_state('servo_hub_connected')
+        pwr_button = self.get_verifier_state('servo_pwr_button')
+        lid_open = self.get_verifier_state('servo_lid_open')
+        ec_board = self.get_verifier_state('servo_ec_console')
+        cr50_console = self.get_verifier_state('servo_cr50_console')
+        ccd_testlab = self.get_verifier_state('servo_ccd_testlab')
 
         if not ssh:
             return servo_constants.SERVO_STATE_NO_SSH
+        if start_servod == hosts.VERIFY_FAILED:
+            return servo_constants.SERVO_STATE_SERVO_HOST_ISSUE
+        if servo_root_present == hosts.VERIFY_FAILED:
+            if not self.servo_serial:
+                return servo_constants.SERVO_STATE_WRONG_CONFIG
+            if hasattr(servo_root_present_node, 'serial_mismatch'):
+                return servo_constants.SERVO_STATE_SERIAL_MISMATCH
+            return servo_constants.SERVO_STATE_NOT_CONNECTED
+        if servo_v3_present == hosts.VERIFY_FAILED:
+            # if we cannot find required board on servo_v3
+            return servo_constants.SERVO_STATE_NEED_REPLACEMENT
+        if servo_fw == hosts.VERIFY_FAILED:
+            logging.info(servo_fw_update)
+            if hasattr(servo_fw_update, 'servo_updater_issue_detected'):
+                return servo_constants.SERVO_STATE_SERVO_UPDATER_ISSUE
+            return servo_constants.SERVO_STATE_NEED_REPLACEMENT
 
-        if (start_servod == hosts.VERIFY_FAILED
-                    or create_servo == hosts.VERIFY_FAILED):
-            # sometimes servo can start with out present servo
-            if self.is_labstation():
-                if not self.servo_serial:
-                    return servo_constants.SERVO_STATE_WRONG_CONFIG
-                if self._is_servo_device_connected(
-                    'servo_v4',
-                    self.servo_serial) == False:
-                    return servo_constants.SERVO_STATE_NOT_CONNECTED
-            elif self._is_servo_board_present_on_servo_v3() == False:
-                return servo_constants.SERVO_STATE_NOT_CONNECTED
+        if dut_connected == hosts.VERIFY_FAILED:
+            return servo_constants.SERVO_STATE_DUT_NOT_CONNECTED
+        if hub_connected == hosts.VERIFY_FAILED:
+            logging.info('Servo HUB not connected')
+            return servo_constants.SERVO_STATE_DUT_NOT_CONNECTED
 
+        if cr50_low_sbu == hosts.VERIFY_FAILED:
+            return servo_constants.SERVO_STATE_SBU_LOW_VOLTAGE
+        if cr50_off == hosts.VERIFY_FAILED:
+            return servo_constants.SERVO_STATE_CR50_NOT_ENUMERATED
+
+        if servod_dut_controller_missing == hosts.VERIFY_FAILED:
+            return servo_constants.SERVO_STATE_SERVOD_DUT_CONTROLLER_MISSING
         if servo_topology == hosts.VERIFY_FAILED:
             return servo_constants.SERVO_STATE_TOPOLOGY_ISSUE
 
-        if (dut_connected == hosts.VERIFY_FAILED
-                    or hub_connected == hosts.VERIFY_FAILED):
-            return servo_constants.SERVO_STATE_DUT_NOT_CONNECTED
         # TODO(otabek@): detect special cases detected by pwr_button
         if dut_connected == hosts.VERIFY_SUCCESS:
             if pwr_button == hosts.VERIFY_FAILED:
@@ -1403,31 +1646,10 @@
                         'chromeos/autotest/repair/servo_unexpected/pwr_button2'
                 ).increment(fields=self._get_host_metrics_data())
 
-        if start_servod == hosts.VERIFY_FAILED:
+        if (servod_started == hosts.VERIFY_FAILED
+                    or servod_echo == hosts.VERIFY_FAILED):
             return servo_constants.SERVO_STATE_SERVOD_ISSUE
 
-        if create_servo == hosts.VERIFY_FAILED:
-            if (self.is_labstation()
-                and self._is_main_device_not_detected_on_servo_v4()):
-                servo_type = None
-                if self.get_dut_host_info():
-                    servo_type = self.get_dut_host_info().get_label_value(
-                        servo_constants.SERVO_TYPE_LABEL_PREFIX)
-                if servo_type and 'servo_micro' in servo_type:
-                    serial = self.get_servo_micro_serial_number()
-                    logging.debug('servo_micro serial: %s', serial)
-                    if self._is_servo_device_detected('servo_micro',
-                                                      serial):
-                        return servo_constants.SERVO_STATE_BAD_RIBBON_CABLE
-                # Device can be not detected because of DUT
-                # TODO (otabek) update after b/159755652 and b/159754985
-                metrics.Counter(
-                    'chromeos/autotest/repair/servo_state/needs_replacement'
-                    ).increment(fields=self._get_host_metrics_data())
-            elif not self.is_labstation():
-                # Here need logic to check if flex cable is connected
-                pass
-
         # one of the reason why servo can not initialized
         if cr50_console == hosts.VERIFY_FAILED:
             return servo_constants.SERVO_STATE_CR50_CONSOLE_MISSING
@@ -1436,7 +1658,7 @@
 
         if (create_servo == hosts.VERIFY_FAILED
                     or init_servo == hosts.VERIFY_FAILED):
-            return servo_constants.SERVO_STATE_SERVOD_ISSUE
+            return servo_constants.SERVO_STATE_SERVOD_PROXY_ISSUE
 
         if ec_board == hosts.VERIFY_FAILED:
             return servo_constants.SERVO_STATE_EC_BROKEN
@@ -1453,19 +1675,24 @@
 
     def is_servo_topology_supported(self):
         """Check if servo_topology is supported."""
+        if self.is_containerized_servod():
+            logging.info('Servod is running within a container.')
+            return True
         if not self.is_up_fast():
             logging.info('Servo-Host is not reachable.')
             return False
-        if not self.is_labstation():
-            logging.info('Servo-topology supported only for labstation.')
-            return False
         if not self.servo_serial:
             logging.info('Servo-topology required a servo serial.')
             return False
-        return True
+        if self.is_labstation():
+            logging.info('Servod is running within labstation.')
+            return True
+        return False
 
     def get_topology(self):
         """Get servo topology."""
+        if not self._topology:
+            self._topology = servo_topology.ServoTopology(self)
         return self._topology
 
     def is_dual_setup(self):
@@ -1490,6 +1717,16 @@
         """
         return self._dut_health_profile
 
+    def print_all_servo_of_host(self):
+        """Print all servos detected on the host."""
+        try:
+            logging.info('\tDevices detected on the host:')
+            devices = self.get_topology().get_list_available_servos()
+            for device in devices:
+                logging.info('\t%s', device)
+        except Exception as e:
+            logging.debug('(Not critical) Fail list all servos: %s', e)
+
 
 def make_servo_hostname(dut_hostname):
     """Given a DUT's hostname, return the hostname of its servo.
@@ -1540,6 +1777,16 @@
     servo_args = {k: v for k, v in six.iteritems(info.attributes)
                   if k in servo_constants.SERVO_ATTR_KEYS}
 
+    if servo_constants.SERVO_HOST_SSH_PORT_ATTR in servo_args:
+        try:
+            servo_args[servo_constants.SERVO_HOST_SSH_PORT_ATTR] = int(
+                    servo_args[servo_constants.SERVO_HOST_SSH_PORT_ATTR])
+        except ValueError:
+            logging.error('servo host port is not an int: %s',
+                          servo_args[servo_constants.SERVO_HOST_SSH_PORT_ATTR])
+            # Reset servo_args because we don't want to use an invalid port.
+            servo_args.pop(servo_constants.SERVO_HOST_SSH_PORT_ATTR, None)
+
     if servo_constants.SERVO_PORT_ATTR in servo_args:
         try:
             servo_args[servo_constants.SERVO_PORT_ATTR] = int(
@@ -1569,6 +1816,7 @@
                       servo_args,
                       try_lab_servo=False,
                       try_servo_repair=False,
+                      try_servo_recovery=True,
                       dut_host_info=None,
                       dut_health_profile=None):
     """Create a ServoHost object for a given DUT, if appropriate.
@@ -1608,26 +1856,45 @@
       * If no other options are found, the parameters will be taken
         from the `servo_args` dict passed in from the caller.
 
-    @param dut            An instance of `Host` from which to take
-                          servo parameters (if available).
-    @param servo_args     A dictionary with servo parameters to use if
-                          they can't be found from `dut`.  If this
-                          argument is supplied, unrepaired exceptions
-                          from `verify()` will be passed back to the
-                          caller.
-    @param try_lab_servo  If not true, servo host creation will be
-                          skipped unless otherwise required by the
-                          caller.
-    @param try_servo_repair  If true, check a servo host with
-                          `repair()` instead of `verify()`.
-    @param dut_host_info: A HostInfo object of the DUT that connected
-                          to this servo.
-    @param dut_health_profile: DUT repair info with history.
+    @param dut:                 An instance of `Host` from which to take
+                                servo parameters (if available).
+    @param servo_args:          A dictionary with servo parameters to use if
+                                they can't be found from `dut`.  If this
+                                argument is supplied, unrepaired exceptions
+                                from `verify()` will be passed back to the
+                                caller.
+    @param try_lab_servo:       If not true, servo host creation will be
+                                skipped unless otherwise required by the
+                                caller.
+    @param try_servo_repair:    If true, check a servo host with
+                                `repair()` instead of `verify()`.
+    @param try_servo_recovery:  If true, start servod in recovery mode.
+                                Default value is True.
+    @param dut_host_info:       A HostInfo object of the DUT that connected
+                                to this servo.
+    @param dut_health_profile:  DUT repair info with history.
 
     @returns: A ServoHost object or None. See comments above.
 
     """
-    servo_dependency = servo_args is not None
+    # We are explicitly looking for if servo_args is None here(which means
+    # servo not needed), as servo_args == {} means servo is needed and
+    # we expect load servo_args from host_info_store.
+    if servo_args is None:
+        servo_dependency = False
+        local_run = False
+    else:
+        servo_dependency = True
+        # If servo_args pass in directly, then this is a local test run.
+        local_run = servo_constants.SERVO_HOST_ATTR in servo_args
+
+    if local_run:
+        logging.warning('User input servo_args detected, will attempt'
+                        ' to start servod and initialize servo conncetion'
+                        ' directly. All servo/servohost verify and repair'
+                        ' steps will be skipped.')
+
+    # Loading servo args from host_info_store.
     if dut is not None and (try_lab_servo or servo_dependency):
         servo_args_override = get_servo_args_for_host(dut)
         if servo_args_override is not None:
@@ -1641,7 +1908,7 @@
             )
             servo_args = servo_args_override
 
-    if servo_args is None:
+    if not servo_args:
         logging.debug('No servo_args provided, and failed to find overrides.')
         if try_lab_servo or servo_dependency:
             return None, servo_constants.SERVO_STATE_MISSING_CONFIG
@@ -1651,19 +1918,48 @@
 
     servo_hostname = servo_args.get(servo_constants.SERVO_HOST_ATTR)
     servo_port = servo_args.get(servo_constants.SERVO_PORT_ATTR)
-    if not _is_servo_host_information_exist(servo_hostname, servo_port):
-        logging.debug(
-            'Servo connection info missed hostname: %s , port: %s',
-            servo_hostname, servo_port)
-        return None, servo_constants.SERVO_STATE_MISSING_CONFIG
-    if not is_servo_host_information_valid(servo_hostname, servo_port):
-        logging.debug(
-            'Servo connection info is incorrect hostname: %s , port: %s',
-            servo_hostname, servo_port)
-        return None, servo_constants.SERVO_STATE_WRONG_CONFIG
+    if not local_run:
+        if not _is_servo_host_information_exist(servo_hostname, servo_port):
+            logging.debug(
+                    'Servo connection info missed hostname: %s , port: %s',
+                    servo_hostname, servo_port)
+            return None, servo_constants.SERVO_STATE_MISSING_CONFIG
+        if not is_servo_host_information_valid(servo_hostname, servo_port):
+            logging.debug(
+                    'Servo connection info is incorrect hostname: %s , port: %s',
+                    servo_hostname, servo_port)
+            return None, servo_constants.SERVO_STATE_WRONG_CONFIG
+
+        if try_servo_recovery == True:
+            servo_args[servo_constants.SERVO_RECOVERY_MODE] = True
 
     newhost = ServoHost(**servo_args)
-    if not newhost.is_up_fast(count=3):
+    if local_run:
+        try:
+            newhost.start_servod()
+        except:
+            # If we failed to start servod here, we can assume the servod
+            # either already started or the test is running against a
+            # non-standard servohost so the user will resiponsble for ensure
+            # servod is running.
+            pass
+        try:
+            newhost.initialize_servo()
+            newhost.initialize_dut_for_servo()
+            newhost._servo_state = servo_constants.SERVO_STATE_WORKING
+            return newhost, newhost.get_servo_state()
+        except Exception as e:
+            logging.error('Failed to initialize servo. %s', e)
+            return None, servo_constants.SERVO_STATE_BROKEN
+
+    if newhost.is_containerized_servod():
+        # TODO(otabek@): Update for servod-manager.
+        # Servod docker is not available for access.
+        newhost.start_containerized_servod()
+    # use_icmp is always set to true in autotest, but in labpack
+    # this is determined by autoserv args. We need to make this consistent
+    # across labpack and autotest b/205855910
+    elif newhost.use_icmp and not newhost.is_up_fast(count=3):
         # ServoHost has internal check to wait if servo-host is in reboot
         # process. If servo-host still is not available this check will stop
         # further attempts as we do not have any option to recover servo_host.
@@ -1672,6 +1968,8 @@
     # Reset or reboot servo device only during AdminRepair tasks.
     if try_servo_repair:
         if newhost._is_locked:
+            # Print available servos on the host for debugging.
+            newhost.print_all_servo_of_host()
             # Reset servo if the servo is locked, as we check if the servohost
             # is up, if the servohost is labstation and if the servohost is in
             # lab inside the locking logic.
@@ -1687,7 +1985,7 @@
         newhost.set_dut_hostname(dut.hostname)
     if dut_host_info:
         newhost.set_dut_host_info(dut_host_info)
-    if dut_health_profile and (try_lab_servo or try_servo_repair):
+    if (dut_health_profile and (try_lab_servo or try_servo_repair)):
         try:
             if newhost.is_localhost():
                 logging.info('Servohost is a localhost, skip device'
@@ -1700,26 +1998,6 @@
                     '[Non-critical] Unexpected error while trying to'
                     ' load device health profile; %s', e)
 
-    if try_lab_servo or try_servo_repair:
-        try:
-            logging.info("Check and update servo firmware.")
-            servo_updater.update_servo_firmware(
-                newhost,
-                force_update=False)
-        except Exception as e:
-            logging.error("Servo device update error: %s", e)
-
-    try:
-        newhost.restart_servod(quick_startup=True)
-    except error.AutoservSSHTimeout:
-        logging.warning("Restart servod failed due ssh connection "
-                        "to servohost timed out. This error is forgiven"
-                        " here, we will retry in servo repair process.")
-    except error.AutoservRunError as e:
-        logging.warning("Restart servod failed due to:\n%s\n"
-                        "This error is forgiven here, we will retry"
-                        " in servo repair process.", str(e))
-
     # Note that the logic of repair() includes everything done
     # by verify().  It's sufficient to call one or the other;
     # we don't need both.
@@ -1750,7 +2028,6 @@
             int(port)
         except ValueError:
             return False
-
     return True
 
 
@@ -1769,6 +2046,6 @@
     if port_int < 1 or port_int > 65000:
         return False
     # we expecting host contain only latters, digits and '-' or '_'
-    if not re.match('[a-zA-Z0-9-_\.]*$', hostname) or len(hostname) < 5:
+    if not re.match('[a-zA-Z0-9-_\.:]*$', hostname) or len(hostname) < 5:
         return False
     return True
diff --git a/server/hosts/servo_host_unittest.py b/server/hosts/servo_host_unittest.py
index 67456b9..fb332db 100644
--- a/server/hosts/servo_host_unittest.py
+++ b/server/hosts/servo_host_unittest.py
@@ -1,6 +1,6 @@
-import mock
-import unittest
 import re
+import unittest
+from unittest import mock
 
 import common
 
@@ -26,6 +26,9 @@
         self.hostname = "chromeos1-row1-rack1-host1"
         self._dut_hostname = 'dut-' + self.hostname
         self.servo_port = '9991'
+        self._is_localhost = False
+        self._use_icmp = True
+        self._is_containerized_servod = False
 
     def run(self, command, **kwargs):
         """Finds the matching result by command value"""
@@ -101,6 +104,9 @@
         self.assertTrue(servo_host.is_servo_host_information_valid(hostname, port))
         hostname = 'my.dut-1'
         self.assertTrue(servo_host.is_servo_host_information_valid(hostname, port))
+        hostname = '192.168.0.1:8022'
+        self.assertTrue(
+                servo_host.is_servo_host_information_valid(hostname, port))
         # diff ports
         self.assertTrue(servo_host.is_servo_host_information_valid(hostname, 7000))
         self.assertTrue(servo_host.is_servo_host_information_valid(hostname, 1234))
@@ -138,7 +144,6 @@
         self.assertFalse(servo_host.is_servo_host_information_valid(hostname, 'a1234'))
         self.assertFalse(servo_host.is_servo_host_information_valid(hostname, 'o1234'))
         self.assertFalse(servo_host.is_servo_host_information_valid(hostname, '71234'))
-        self.assertFalse(servo_host.is_servo_host_information_valid(hostname, '71_24'))
         self.assertFalse(servo_host.is_servo_host_information_valid(hostname, '71.24'))
         self.assertFalse(servo_host.is_servo_host_information_valid(hostname, '71-24'))
         self.assertFalse(servo_host.is_servo_host_information_valid(hostname, '-234'))
diff --git a/server/hosts/servo_repair.py b/server/hosts/servo_repair.py
index c32eace..1da6c8b 100644
--- a/server/hosts/servo_repair.py
+++ b/server/hosts/servo_repair.py
@@ -7,17 +7,16 @@
 from __future__ import division
 from __future__ import print_function
 
-import sys
 import functools
 import logging
 import math
+import sys
 import time
 
 import common
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib import hosts
 from autotest_lib.client.common_lib import utils
-from autotest_lib.server.cros.power import servo_charger
 from autotest_lib.server.cros.servo import servo
 from autotest_lib.server.hosts import cros_constants
 from autotest_lib.server.hosts import repair_utils
@@ -27,14 +26,11 @@
 import six
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = utils.metrics_mock
 
-
-# TODO(gregorynisbet): will importing chromite always succeed in all contexts?
-from chromite.lib import timeout_util
-
+from autotest_lib.utils.frozen_chromite.lib import timeout_util
 
 def ignore_exception_for_non_cros_host(func):
     """
@@ -62,6 +58,7 @@
     """
     Verifier to trigger a servo host update, if necessary.
 
+    The verifier works only for servo_v3.
     The operation doesn't wait for the update to complete and is
     considered a success whether or not the servo is currently
     up-to-date.
@@ -69,37 +66,172 @@
 
     @timeout_util.TimeoutDecorator(cros_constants.LONG_VERIFY_TIMEOUT_SEC)
     def verify(self, host):
-        # First, only run this verifier if the host is in the physical lab.
-        # Secondly, skip if the test is being run by test_that, because subnet
-        # restrictions can cause the update to fail.
         try:
-            if host.is_labstation():
-                logging.info("Skip update check because the host is a"
-                             " labstation and labstation update is handled"
-                             " by labstation AdminRepair task.")
+            if (
+                    not host.get_dut_host_info()
+                    or not host.get_dut_host_info().servo_cros_stable_version):
+                logging.info('Servo stable version missed.'
+                             ' Skip update check action.')
                 return
-            if host.is_in_lab() and host.job and host.job.in_lab:
-                if (
-                        not host.get_dut_host_info() or
-                        not host.get_dut_host_info().servo_cros_stable_version
-                ):
-                    logging.info('Servo stable version missed.'
-                                 ' Skip update check action.')
-                    return
-                # We have seen cases that invalid GPT headers/entries block
-                # v3s from been update, so always try to repair here.
-                # See crbug.com/994396, crbug.com/1057302.
-                host.run('cgpt repair /dev/mmcblk0', ignore_status=True)
-                host.update_image()
+            # We have seen cases that invalid GPT headers/entries block
+            # v3s from been update, so always try to repair here.
+            # See crbug.com/994396, crbug.com/1057302.
+            host.run('cgpt repair /dev/mmcblk0', ignore_status=True)
+            host.update_image()
         # We don't want failure from update block DUT repair action.
         # See crbug.com/1029950.
         except Exception as e:
-            six.reraise(hosts.AutoservNonCriticalVerifyError, str(e),
+            six.reraise(hosts.AutoservNonCriticalVerifyError,
+                        hosts.AutoservNonCriticalVerifyError(e),
                         sys.exc_info()[2])
 
+    def _is_applicable(self, host):
+        # Run only for servo_v3 host.
+        if host.is_labstation() or host.is_containerized_servod():
+            return False
+        # Only run if the host is in the physical lab.
+        return host.is_in_lab()
+
     @property
     def description(self):
-        return 'servo host software is up-to-date'
+        return 'Servo_v3 host software is up-to-date'
+
+
+class _StartServodVerifier(hosts.Verifier):
+    """First start of servod on the host.
+
+    Single running action to start servod in the first time.
+    This verifier created to fit current flow and will be revisited later.
+    Action never fails!
+    """
+
+    @timeout_util.TimeoutDecorator(cros_constants.VERIFY_TIMEOUT_SEC)
+    def verify(self, host):
+        if not hasattr(self, 'started'):
+            logging.info('Starting servod!')
+            host.restart_servod(quick_startup=True)
+        # caching the value to prevent restart service when trigger verifier.
+        self.started = True
+
+    @property
+    def description(self):
+        return 'Initial servod start'
+
+
+class _RootServoPresentVerifier(hosts.Verifier):
+    """Verifier that first servo is present."""
+
+    @timeout_util.TimeoutDecorator(cros_constants.VERIFY_TIMEOUT_SEC)
+    def verify(self, host):
+        device = None
+        topology = host.get_topology()
+        topology.read(host.get_dut_host_info())
+        try:
+            device = topology.get_root_servo()
+        except Exception as e:
+            if host.is_containerized_servod():
+                host.restart_servod()
+                logging.debug('Restarting servod container (Not critical) %s',
+                              e)
+            else:
+                host.request_reboot()
+                logging.info(
+                        'Reboot labstation requested, it will be handled'
+                        ' by labstation AdminRepair task.'
+                        ' Unable to detect root servo info from topology.')
+                logging.debug('(Not critical) %s', e)
+        if device:
+            logging.info('Root servo is present')
+            return
+        device = topology.get_root_servo_from_cache()
+        if device:
+            logging.debug('Found device: %s', device)
+            if device.get_serial_number() != host.servo_serial:
+                self.serial_mismatch = True
+                raise hosts.AutoservVerifyError('Serial mismatch detected')
+            logging.info('Root servo is present')
+            return
+        # Leaving error in case we got empty device.
+        raise hosts.AutoservVerifyError('Root servo not found!')
+
+    def _is_applicable(self, host):
+        if host.is_containerized_servod():
+            logging.info('Servod is running within a container.')
+            return True
+        if not host.is_labstation():
+            logging.info('Not supported for servo_v3.')
+            return False
+        # Only run if the host is in the physical lab.
+        return host.is_in_lab()
+
+    @property
+    def description(self):
+        return 'Root servo is present'
+
+
+class _RootServoV3PresentVerifier(hosts.Verifier):
+    """Verifier that first servo is present."""
+
+    RETRY_COUNT = 3
+
+    @timeout_util.TimeoutDecorator(cros_constants.VERIFY_TIMEOUT_SEC)
+    def verify(self, host):
+        for a in range(self.RETRY_COUNT):
+            logging.debug('Attempt: %s find servo board on servo_v3.', a + 1)
+            present = host.is_servo_board_present_on_servo_v3()
+            if present == False:
+                raise hosts.AutoservVerifyError('Servo board not found!')
+            elif present == True:
+                logging.debug('Servo board is present')
+                return
+        raise hosts.AutoservVerifyError('Fail to find servo board!')
+
+    def _is_applicable(self, host):
+        if host.is_containerized_servod():
+            logging.info('Servod is running within a container.')
+            return False
+        # Do not run for servos under labstations.
+        if host.is_labstation():
+            logging.info('Servod is running on labstation.')
+            return False
+        # Only run if the host is in the physical lab.
+        return host.is_in_lab()
+
+    @property
+    def description(self):
+        return 'Servo board on servo_v3 is present'
+
+
+class _ServoFwVerifier(hosts.Verifier):
+    """Verifier to check is a servo fw is up-to-date."""
+
+    @timeout_util.TimeoutDecorator(cros_constants.VERIFY_TIMEOUT_SEC)
+    def verify(self, host):
+        try:
+            if servo_updater.any_servo_needs_firmware_update(host):
+                raise hosts.AutoservVerifyError(
+                        'Some servo requires firmware update')
+        except servo_updater.ServoFwVersionMissedError as e:
+            # Do not fail as it will trigger re-flash fw on the servo
+            logging.info(
+                    'Issue with detect new version of firmware for servo.'
+                    ' Please file a bug agains Fleet Automation team (go/fleet-bug)'
+            )
+
+    def _is_applicable(self, host):
+        if host.is_containerized_servod():
+            logging.info('Servod is running within a container.')
+            return True
+        # Run only for servos under labstations.
+        if not host.is_labstation():
+            logging.info('Not supported for servo_v3.')
+            return False
+        # Only run if the host is in the physical lab.
+        return host.is_in_lab()
+
+    @property
+    def description(self):
+        return 'Servo fw is up-to-date'
 
 
 class _ConfigVerifier(hosts.Verifier):
@@ -263,6 +395,42 @@
         return 'servod upstart job is running'
 
 
+class _ServodEchoVerifier(hosts.Verifier):
+    """
+    Verifier to check that the `servod` upstart job is responsible.
+    """
+
+    SERVOD_INITIALIZED = 'servodtool instance wait-for-active -p %d --timeout 60'
+    SERVOD_RESPONSIVE = 'dut-control -p %d serialname'
+
+    @timeout_util.TimeoutDecorator(cros_constants.VERIFY_TIMEOUT_SEC)
+    def verify(self, host):
+        self._verify_servod_initialized(host)
+        self._verify_servod_responsive(host)
+
+    def _verify_servod_initialized(self, host):
+        # Verify that servod initialized.
+        cmd = self.SERVOD_INITIALIZED % host.servo_port
+        res = host.run(cmd, ignore_status=True, timeout=120)
+        if res.exit_status != 0:
+            raise hosts.AutoservVerifyError(
+                    'Servod instance is not initialized')
+        logging.debug("Presented instance: %s", res.stdout.strip())
+
+    def _verify_servod_responsive(self, host):
+        # Verify if servod started and process is responsible.
+        cmd = self.SERVOD_RESPONSIVE % host.servo_port
+        res = host.run(cmd, ignore_status=True, timeout=120)
+        if res.exit_status != 0:
+            raise hosts.AutoservVerifyError(
+                    'Servod is not responsive for dut-control commands')
+        logging.info('Servod responsive: %s', res.stdout)
+
+    @property
+    def description(self):
+        return 'Servod is running and responsive to dut-control run.'
+
+
 class _DiskSpaceVerifier(hosts.Verifier):
     """
     Verifier to make sure there is enough disk space left on servohost.
@@ -277,6 +445,12 @@
     def description(self):
         return 'servohost has enough disk space.'
 
+    def _is_applicable(self, host):
+        if host.is_containerized_servod():
+            logging.info('Servod is running within a container.')
+            return False
+        return True
+
 
 class _ServodConnectionVerifier(hosts.Verifier):
     """
@@ -289,7 +463,7 @@
 
     @timeout_util.TimeoutDecorator(cros_constants.VERIFY_TIMEOUT_SEC)
     def verify(self, host):
-        host.initilize_servo()
+        host.initialize_servo()
 
     @property
     def description(self):
@@ -314,7 +488,8 @@
         try:
             host.initialize_dut_for_servo()
         except Exception as e:
-            six.reraise(hosts.AutoservNonCriticalVerifyError, str(e),
+            six.reraise(hosts.AutoservNonCriticalVerifyError,
+                        hosts.AutoservNonCriticalVerifyError(e),
                         sys.exc_info()[2])
 
     @property
@@ -330,7 +505,7 @@
     """
 
     COMMAND_TO_CHECK_CONSOLE = (
-            'cr50_ccd_level',
+            'gsc_ccd_level',
             'cr50_testlab',
             'cr50_ccd_state_flags',
     )
@@ -344,14 +519,14 @@
                     # Response of command is not important.
                     host.get_servo().get(command)
         except Exception as e:
-            six.reraise(hosts.AutoservNonCriticalVerifyError, str(e),
+            six.reraise(hosts.AutoservNonCriticalVerifyError,
+                        hosts.AutoservNonCriticalVerifyError(e),
                         sys.exc_info()[2])
 
     def _is_applicable(self, host):
-        # Only when DUT is running through ccd.
+        # Only when DUT is running through ccd or c2d2.
         # TODO(coconutruben): replace with ccd API when available in servo.py
-        return (host.get_servo()
-                and host.get_servo().get_main_servo_device() == 'ccd_cr50')
+        return host.get_servo() and host.get_servo().main_device_uses_gsc_drv()
 
     @property
     def description(self):
@@ -372,12 +547,16 @@
     def verify(self, host):
         if not host.get_servo().has_control('cr50_testlab'):
             raise hosts.AutoservVerifyError(
-                'cr50 has to be supported when use servo with '
-                'ccd_cr50/type-c connection')
+                    'gsc has to be supported when use servo with '
+                    'ccd_*/type-c connection')
 
         status = host.get_servo().get('cr50_testlab')
         # check by 'on' to fail when get unexpected value
         if status == 'on':
+            # If servo uses cr50 to control the dut, open ccd so repair actions
+            # that reset the dut will work (cr50_reboot, cold_reset, warm_reset)
+            if host.get_servo().main_device_uses_gsc_drv():
+                host.get_servo().set_nocheck('cr50_testlab', 'open')
             # ccd testlab enabled
             return
         raise hosts.AutoservNonCriticalVerifyError(
@@ -387,8 +566,7 @@
     def _is_applicable(self, host):
         # Only when DUT is running through ccd.
         # TODO(coconutruben): replace with ccd API when available in servo.py
-        return (host.get_servo()
-                and host.get_servo().get_main_servo_device() == 'ccd_cr50')
+        return host.get_servo() and host.get_servo().main_device_is_ccd()
 
     @property
     def description(self):
@@ -402,53 +580,19 @@
     src --  servo in power delivery mode and passes power to the DUT.
     snk --  servo in normal mode and not passes power to DUT.
     We want to ensure that servo_v4_role is set to src.
-
-    TODO(xianuowang@) Convert it to verifier/repair action pair or remove it
-    once we collected enough metrics.
     """
-    # Change to use the  constant value in CrosHost if we move it to
-    # verifier/repair pair.
-    CHANGE_SERVO_ROLE_TIMEOUT = 180
-
     @timeout_util.TimeoutDecorator(cros_constants.VERIFY_TIMEOUT_SEC)
     def verify(self, host):
-        if host.get_servo().get('servo_v4_role') == 'snk':
-            logging.warning('The servo initlized with role snk while'
-                            ' supporting power delivery, resetting role'
-                            ' to src...')
+        if host.get_servo():
+            self._printControl(host.get_servo(), 'ppdut5_mv')
+            self._printControl(host.get_servo(), 'ppchg5_mv')
+        if host.get_servo().get('servo_pd_role') == 'snk':
+            raise hosts.AutoservNonCriticalVerifyError(
+                    'Power delivery not in src role.')
 
-            try:
-                logging.info('setting power direction with retries')
-                # do not pass host since host does not inherit from CrosHost.
-                charge_manager = servo_charger.ServoV4ChargeManager(
-                    host=None,
-                    servo=host.get_servo(),
-                )
-                attempts = charge_manager.start_charging()
-                logging.info('setting power direction took %d tries', attempts)
-                # if control makes it here, we successfully changed the host
-                # direction
-                result = 'src'
-            except Exception as e:
-                logging.error(
-                    'setting power direction with retries failed %s',
-                    str(e),
-                )
-            finally:
-                time.sleep(self.CHANGE_SERVO_ROLE_TIMEOUT)
-
-            result = host.get_servo().get('servo_v4_role')
-            logging.debug('Servo_v4 role after reset: %s', result)
-
-            metrics_data = {
-                'hostname': host.get_dut_hostname() or 'unknown',
-                'status': 'success' if result == 'src' else 'failed',
-                'board': host.servo_board or 'unknown',
-                'model': host.servo_model or 'unknown'
-            }
-            metrics.Counter(
-                'chromeos/autotest/repair/verifier/power_delivery3'
-            ).increment(fields=metrics_data)
+    def _printControl(self, servo, control):
+        if servo.has_control(control):
+            logging.info("%s: %s", control, servo.get(control))
 
     def _is_applicable(self, host):
         return (host.is_in_lab() and
@@ -483,6 +627,7 @@
         logging.debug('Started check by ppdut5_mv:on')
         try:
             val = host.get_servo().get('ppdut5_mv')
+            logging.info('ppdut5_mv=%s', val)
             if val < self.MAX_PPDUT5_MV_WHEN_NOT_CONNECTED:
                 # servo is not connected to the DUT
                 return False
@@ -511,7 +656,9 @@
         """
         logging.debug('Started check by cold_reset:on')
         try:
-            if host.get_servo().get('cold_reset') == 'on':
+            val = host.get_servo().get('cold_reset')
+            logging.info('cold_reset=%s', val)
+            if val == 'on':
                 # If cold_reset has is on can be right signal
                 # or caused by missing connection between servo_micro and DUT.
                 # if we can switch it to the off then it was signal.
@@ -533,14 +680,10 @@
         return False
 
     def _is_servo_v4_type_a(self, host):
-        return (host.is_labstation()
-                and host.get_servo().has_control('servo_v4_type')
-                and host.get_servo().get('servo_v4_type') == 'type-a')
+        return host.is_labstation() and host.get_servo().is_servo_v4_type_a()
 
     def _is_servo_v4_type_c(self, host):
-        return (host.is_labstation()
-                and host.get_servo().has_control('servo_v4_type')
-                and host.get_servo().get('servo_v4_type') == 'type-c')
+        return host.is_labstation() and host.get_servo().is_servo_v4_type_c()
 
     def _is_servo_v3(self, host):
         return not host.is_labstation()
@@ -565,23 +708,14 @@
                 raise hosts.AutoservVerifyError(
                         'Servo_micro is likely not connected to the DUT.')
         elif self._is_servo_v4_type_c(host):
-            logging.info('Skip check for type-c till confirm it in the lab')
-            # TODO(otabek@) block check till verify on the lab
-            # if not self._is_usb_hub_connected(host):
-            #     raise hosts.AutoservVerifyError(
-            #             'Servo_v4 is likely not connected to the DUT.')
+            if (host.get_servo().supports_built_in_pd_control()
+                        and not self._is_usb_hub_connected(host)):
+                raise hosts.AutoservVerifyError(
+                        'Servo_v4 is likely not connected to the DUT.')
         elif self._is_servo_v3(host):
             if not self._is_ribbon_cable_connected(host):
                 raise hosts.AutoservVerifyError(
                         'Servo_v3 is likely not connected to the DUT.')
-        else:
-            logging.warn('Unsupported servo type!')
-
-    def _is_applicable(self, host):
-        if host.is_ec_supported():
-            return True
-        logging.info('DUT is not support EC.')
-        return False
 
     @property
     def description(self):
@@ -607,7 +741,7 @@
     def _is_applicable(self, host):
         if host.is_ec_supported():
             return True
-        logging.info('DUT is not support EC.')
+        logging.info('Host does not support EC.')
         return False
 
     @property
@@ -615,13 +749,112 @@
         return 'Ensure the Servo HUB connected to the DUT.'
 
 
+class _BaseCr50SBUVerifier(_BaseDUTConnectionVerifier):
+    """Check servod issue related to SBU voltage."""
+
+    # Min SBU voltage to detect usb-device
+    SBU_THRESHOLD = 2500.0
+    # How many times collect SBU voltage to calc AVG value.
+    _TOTAL_CHECK_SBU_VOLTAGE = 10
+
+    def _is_applicable(self, host):
+        if host.is_localhost():
+            logging.info('Target servo is not in a lab,'
+                         ' action is not applicable.')
+            return False
+        if not self._is_servo_v4_type_c(host):
+            logging.info('Check support only servo-v4 (type-c),'
+                         ' action is not applicable.')
+            return False
+        return True
+
+    def _is_sbu_voltage_issue(self, host):
+        """Check if servo does not detected by SBU voltage issue."""
+        command = 'dut_sbu_voltage_float_fault'
+        if host.get_servo().has_control(command):
+            if host.get_servo().get(command) == 'on':
+                return True
+        return False
+
+    def _get_max_sbu_value(self, host):
+        """Get average voltage on SBU lines."""
+        servo = host.get_servo()
+        if not servo.has_control('servo_dut_sbu1_mv'):
+            return -1
+        s1 = 0
+        s2 = 0
+        for i in range(self._TOTAL_CHECK_SBU_VOLTAGE):
+            try:
+                sbu1 = int(servo.get('servo_dut_sbu1_mv'))
+                sbu2 = int(servo.get('servo_dut_sbu2_mv'))
+                logging.debug('Attempt:%2d, sbu1 %4d sbu2 %4d', i, sbu1, sbu2)
+                s1 += sbu1
+                s2 += sbu2
+            except error.TestFail as e:
+                # This is a nice to have but if reading this fails, it
+                # shouldn't interfere with the test.
+                logging.exception(e)
+        logging.debug('Total:  sbu1 %4d sbu2 %4d', s1, s2)
+        # Use float to get values with changes
+        s1 = s1 / float(self._TOTAL_CHECK_SBU_VOLTAGE)
+        s2 = s2 / float(self._TOTAL_CHECK_SBU_VOLTAGE)
+        logging.debug('Avg: sbu1 %7.2f sbu2 %7.2f', s1, s2)
+        max_sbu = max(s1, s2)
+        logging.info('Max sbu: %7.2f', max_sbu)
+        return max_sbu
+
+
+class _Cr50OffVerifier(_BaseCr50SBUVerifier):
+    """Check if CR50 is in deep sleep and fail to detected.
+
+    If SBU voltage is higher threshold but still cannot be detected
+    as usb device then probably CR50 is in deep sleep.
+    Threshold is 2500 mV on any SBU lines.
+    """
+
+    @ignore_exception_for_non_cros_host
+    @timeout_util.TimeoutDecorator(cros_constants.VERIFY_TIMEOUT_SEC)
+    def verify(self, host):
+        if self._is_sbu_voltage_issue(host):
+            if self._get_max_sbu_value(host) > self.SBU_THRESHOLD:
+                raise hosts.AutoservVerifyError(
+                        'CR50 voltage detected but usb device not enumerated')
+
+    @property
+    def description(self):
+        return 'CR50 voltage detected but not enumerated.'
+
+
+class _Cr50LowSBUVerifier(_BaseCr50SBUVerifier):
+    """Check if servod fail to detect CR50 due low voltage.
+
+    CR50 cannot be enumerated as SBU voltage line lower then
+    threshold.
+    Threshold is 2500 mV on any SBU lines.
+    """
+
+    @ignore_exception_for_non_cros_host
+    @timeout_util.TimeoutDecorator(cros_constants.VERIFY_TIMEOUT_SEC)
+    def verify(self, host):
+        if self._is_sbu_voltage_issue(host):
+            v = self._get_max_sbu_value(host)
+            if v > 1 and v <= self.SBU_THRESHOLD:
+                raise hosts.AutoservVerifyError(
+                        'Cr50 is not detected due to SBU voltages'
+                        ' being below %dmV' % self.SBU_THRESHOLD)
+
+    @property
+    def description(self):
+        return 'Cr50 not detected as both SBU voltages are below threshold.'
+
+
 class _TopologyVerifier(hosts.Verifier):
     """Verifier that all servo component is presented."""
 
     @ignore_exception_for_non_cros_host
     @timeout_util.TimeoutDecorator(cros_constants.VERIFY_TIMEOUT_SEC)
     def verify(self, host):
-        topology = servo_topology.ServoTopology(host)
+        topology = host.get_topology()
         topology.read(host.get_dut_host_info())
         try:
             # Linux takes 1 second to detect and enumerate USB device since
@@ -632,7 +865,9 @@
                               dual_set=host.is_dual_setup(),
                               compare=True)
         except servo_topology.ServoTopologyError as e:
-            six.reraise(hosts.AutoservVerifyError, str(e), sys.exc_info()[2])
+            six.reraise(hosts.AutoservVerifyError,
+                        hosts.AutoservVerifyError(e),
+                        sys.exc_info()[2])
 
     def _is_applicable(self, host):
         if host.is_localhost():
@@ -652,14 +887,14 @@
 
 class _PowerButtonVerifier(hosts.Verifier):
     """
-    Verifier to check sanity of the `pwr_button` signal.
+    Verifier to check the `pwr_button` signal.
 
     Tests that the `pwr_button` signal shows the power button has been
     released.  When `pwr_button` is stuck at `press`, it commonly
     indicates that the ribbon cable is disconnected.
     """
     # TODO (crbug.com/646593) - Remove list below once servo has been updated
-    # with a dummy pwr_button signal.
+    # with a fake pwr_button signal.
     _BOARDS_WO_PWR_BUTTON = ['arkham', 'gale', 'mistral', 'storm', 'whirlwind']
 
     @ignore_exception_for_non_cros_host
@@ -670,7 +905,8 @@
         try:
             button = host.get_servo().get('pwr_button')
         except Exception as e:
-            six.reraise(hosts.AutoservNonCriticalVerifyError, str(e),
+            six.reraise(hosts.AutoservNonCriticalVerifyError,
+                        hosts.AutoservNonCriticalVerifyError(e),
                         sys.exc_info()[2])
 
         if button != 'release':
@@ -744,7 +980,7 @@
 
 class _LidVerifier(hosts.Verifier):
     """
-    Verifier to check sanity of the `lid_open` signal.
+    Verifier to check the `lid_open` signal.
     """
 
     @ignore_exception_for_non_cros_host
@@ -753,7 +989,8 @@
         try:
             lid_open = host.get_servo().get('lid_open')
         except Exception as e:
-            six.reraise(hosts.AutoservNonCriticalVerifyError, str(e),
+            six.reraise(hosts.AutoservNonCriticalVerifyError,
+                        hosts.AutoservNonCriticalVerifyError(e),
                         sys.exc_info()[2])
 
         if lid_open != 'yes' and lid_open != 'not_applicable':
@@ -765,29 +1002,92 @@
         return 'lid_open control is normal'
 
 
-class _EcBoardVerifier(hosts.Verifier):
+class ECConsoleVerifier(hosts.Verifier):
     """
-    Verifier response from the 'ec_board' control.
+    Verifier response from the EC console.
     """
 
+    COMMAND_TO_CHECK_CONSOLE = (
+            'ec_system_powerstate',
+            'ec_board',
+    )
+
     @ignore_exception_for_non_cros_host
-    @timeout_util.TimeoutDecorator(cros_constants.SHORT_VERIFY_TIMEOUT_SEC)
+    @timeout_util.TimeoutDecorator(cros_constants.VERIFY_TIMEOUT_SEC)
     def verify(self, host):
-        if host.is_ec_supported():
-            ec_board_name = ''
-            try:
-                ec_board_name = host.get_servo().get_ec_board()
-                logging.debug('EC board: %s', ec_board_name)
-            except Exception as e:
-                raise hosts.AutoservNonCriticalVerifyError(
-                        '`ec_board` control is not responding; '
-                        'may be caused of broken EC firmware')
-        else:
-            logging.info('The board not support EC')
+        if not host.is_ec_supported():
+            logging.info('The board does not support EC')
+            return
+
+        for command in self.COMMAND_TO_CHECK_CONSOLE:
+            if host.get_servo().has_control(command):
+                try:
+                    # Response of command is not important.
+                    r = host.get_servo().get(command)
+                    logging.debug('Result %s:%s', command, r)
+                    # Exiting as we confirmed that console is working.
+                    return
+                except Exception as e:
+                    logging.error('Fail to read %s control. Error: %s',
+                                  command, e)
+        # If we reached this point then no command succeeded.
+        raise hosts.AutoservNonCriticalVerifyError(
+                'EC console is not responding; '
+                'may be caused of broken EC firmware')
 
     @property
     def description(self):
-        return 'Check EC by get `ec_board` control'
+        return 'Check EC console'
+
+
+class ServodDutControllerMissingVerifier(hosts.Verifier):
+    """Verifier to check whether the servod dut controller is missing or not.
+
+    When servod is initializing, it checks if DUT controller is
+    missing. If yes,then it sets 'dut_controller_missing_fault' to
+    'on', otherwise, to 'off'. Missing controller means servo
+    component connected to the DUT is missing, or is not responsive.
+    """
+
+    @timeout_util.TimeoutDecorator(cros_constants.VERIFY_TIMEOUT_SEC)
+    def verify(self, host):
+        logging.debug('ServodDutControllerMissingVerifier: Starting verifier.')
+        if host.get_servo().get('dut_controller_missing_fault') == 'on':
+            logging.debug('ServodDutControllerMissingVerifier: DUT Controller missing fault is on.')
+            raise hosts.AutoservVerifyError('Servod is missing dut controller')
+        else:
+            logging.debug('ServodDutControllerMissingVerifier: DUT Controller missing fault is not on.')
+
+    def _is_applicable(self, host):
+        if host.is_containerized_servod():
+            logging.debug('ServodDutControllerMissingVerifier: Detected containerized servod.')
+            logging.info('Servod is running within a container')
+            return True
+        if not host.is_labstation():
+            logging.debug('ServodDutControllerMissingVerifier: Detected non-labstation.')
+            logging.info('Not supported for servo_v3.')
+            return False
+        return host.is_in_lab()
+
+    @property
+    def description(self):
+        return 'ensure servod does not have missing dut controller'
+
+
+class _ConnectionVerifier(repair_utils.SshVerifier):
+    """
+    Ensure the servo host container is up.
+    """
+
+    def verify(self, host):
+        if host.is_containerized_servod():
+            # We need start servod container first before check it-is present
+            host.start_containerized_servod()
+        return super(_ConnectionVerifier, self).verify(host)
+
+    @property
+    def description(self):
+        return 'Check the connection to the machine or container running servod.'
 
 
 class _RestartServod(hosts.RepairAction):
@@ -795,10 +1095,12 @@
 
     @timeout_util.TimeoutDecorator(cros_constants.REPAIR_TIMEOUT_SEC)
     def repair(self, host):
-        if not host.is_cros_host():
+        if host.is_containerized_servod():
+            logging.debug('Restarting servod container')
+        elif not host.is_cros_host():
             raise hosts.AutoservRepairError(
                     'Can\'t restart servod: not running '
-                    'embedded Chrome OS.',
+                    'embedded ChromeOS.',
                     'servo_not_applicable_to_non_cros_host')
         host.restart_servod()
 
@@ -811,7 +1113,7 @@
     """Try repair servo by reboot servohost.
 
     This is the same as the standard `RebootRepair`, for servo_v3 it will
-    reboot the beaglebone board immidiately while for labstation it will
+    reboot the beaglebone board immediately while for labstation it will
     request a reboot by touch a flag file on its labstation, then
     labstation reboot will be handled by labstation AdminRepair task as
     labstation host multiple servos and need do an synchronized reboot.
@@ -845,22 +1147,45 @@
     """Try repair servod by toggle cc.
 
     When cr50 is not enumerated we can try to recover it by toggle cc line.
-    Repair action running from servohost.
-    We using usb_console temporally witch required stop servod.
-
-    TODO(otabek@) review the logic when b/159755652 implemented
     """
+    # Timeout for shut down configuration channel.
+    CC_OFF_TIMEOUT = 10
+    # Timeout for initialize configuration channel.
+    CC_ON_TIMEOUT = 30
 
     @timeout_util.TimeoutDecorator(cros_constants.REPAIR_TIMEOUT_SEC)
     def repair(self, host):
-        host.stop_servod()
-        self._reset_usbc_pigtail_connection(host)
+        logging.info('Turn off configuration channel and wait 10 seconds.')
+        servo_uart_cmd = 'servo_v4_uart_cmd'
+        if not host.get_servo().has_control(servo_uart_cmd):
+            servo_uart_cmd = 'servo_v4p1_uart_cmd'
+        host.get_servo().set_nocheck(servo_uart_cmd, 'cc off')
+        # wait till command will be effected
+        time.sleep(self.CC_OFF_TIMEOUT)
+
+        logging.info('Turn on configuration channel and wait 30 seconds.')
+        # alternative option to turn line on is by `cc srcdts`
+        host.get_servo().set_nocheck('servo_pd_role', 'src')
+        host.get_servo().set_nocheck('servo_dts_mode', 'on')
+        # wait till command will be effected
+        time.sleep(self.CC_ON_TIMEOUT)
         host.restart_servod()
 
     def _is_applicable(self, host):
-        if host.is_localhost() or not host.is_labstation():
+        if host.is_localhost():
+            logging.debug('Not supported for localhost.')
             return False
         if not host.servo_serial:
+            logging.debug('Servod does not have serial.')
+            return False
+        if not host.servo_recovery:
+            logging.debug('Servod is not running in recovery mode.')
+            return False
+        if not (host.is_labstation() or host.is_containerized_servod()):
+            logging.debug('Not supported for servo_v3.')
+            return False
+        if not host.get_servo():
+            logging.debug('Servo is not initialized.')
             return False
         return self._is_type_c(host)
 
@@ -868,45 +1193,114 @@
         if host.get_dut_host_info():
             servo_type = host.get_dut_host_info().get_label_value(
                     servo_constants.SERVO_TYPE_LABEL_PREFIX)
-            return 'ccd_cr50' in servo_type
+            return 'ccd' in servo_type
         return False
 
-    def _reset_usbc_pigtail_connection(self, host):
-        """Reset USBC pigtail connection on servo board.
-
-        To reset need to run 'cc off' and then 'cc srcdts' in usb_console.
-        """
-        logging.debug('Starting reset USBC pigtail connection.')
-
-        def _run_command(cc_command):
-            """Run configuration channel commands.
-
-            @returns: True if pas successful and False if fail.
-            """
-            try:
-                cmd = (r"echo 'cc %s' | usb_console -d 18d1:501b -s %s" %
-                       (cc_command, host.servo_serial))
-                resp = host.run(cmd, timeout=host.DEFAULT_TERMINAL_TIMEOUT)
-                return True
-            except Exception as e:
-                logging.info('(Non-critical) %s.', e)
-            return False
-
-        logging.info('Turn off configuration channel. And wait 5 seconds.')
-        if _run_command('off'):
-            # wait till command will be effected
-            time.sleep(5)
-            logging.info('Turn on configuration channel. '
-                         'And wait 15 seconds.')
-            if _run_command('srcdts'):
-                # wait till command will be effected
-                time.sleep(15)
-
     @property
     def description(self):
         return 'Toggle cc lines'
 
 
+class _FakedisconnectRepair(hosts.RepairAction):
+    """Try repair servod by mimic reconnection of servo.
+
+    When cr50 is not enumerated as we can try to recover it by reconnect to DUT.
+    """
+    # Delay to disconnect.
+    DISC_DELAY_MS = 100
+    # Timeout to wait to restore the connection.
+    DISC_TIMEOUT_MS = 2000
+    # Timeout to wait to execute the command and apply effect.
+    EXEC_TIMEOUT = (DISC_DELAY_MS + DISC_TIMEOUT_MS) / 1000 + 2
+
+    @timeout_util.TimeoutDecorator(cros_constants.REPAIR_TIMEOUT_SEC)
+    def repair(self, host):
+        disc_cmd = ('fakedisconnect %d %d' %
+                    (self.DISC_DELAY_MS, self.DISC_TIMEOUT_MS))
+        # cannot use 'set' as control is not returned executed commands
+        servo_uart_cmd = 'servo_v4_uart_cmd'
+        if not host.get_servo().has_control(servo_uart_cmd):
+            servo_uart_cmd = 'servo_v4p1_uart_cmd'
+        host.get_servo().set_nocheck(servo_uart_cmd, disc_cmd)
+        logging.debug('Waiting %ss for affect of action', self.EXEC_TIMEOUT)
+        time.sleep(self.EXEC_TIMEOUT)
+        host.restart_servod()
+
+    def _is_applicable(self, host):
+        if host.is_localhost():
+            logging.debug('Not supported for localhost.')
+            return False
+        if not host.servo_serial:
+            logging.debug('Servod does not have serial.')
+            return False
+        if not host.servo_recovery:
+            logging.debug('Servod is not running in recovery mode.')
+            return False
+        if not (host.is_labstation() or host.is_containerized_servod()):
+            logging.debug('Not supported for servo_v3.')
+            return False
+        if not host.get_servo():
+            logging.debug('Servo is not initialized.')
+            return False
+        return self._is_type_c(host)
+
+    def _is_type_c(self, host):
+        if host.get_dut_host_info():
+            servo_type = host.get_dut_host_info().get_label_value(
+                    servo_constants.SERVO_TYPE_LABEL_PREFIX)
+            return 'ccd' in servo_type
+        return False
+
+    @property
+    def description(self):
+        return 'Fake reconnect to DUT'
+
+
+class _PowerDeliveryRepair(hosts.RepairAction):
+    """Repair to check servo_v4_role for servos that support
+    power delivery feature(a.k.a power pass through).
+
+    There are currently two position of servo_v4_role, src and snk:
+    src --  servo in power delivery mode and passes power to the DUT.
+    snk --  servo in normal mode and not passes power to DUT.
+    """
+    # How many time retry to set PD in correct mode and verify that is stay.
+    # Set 5 as each attempt has 10 attempts inside 'set' method.
+    _SET_ATTEMPT_COUNT = 5
+
+    @timeout_util.TimeoutDecorator(cros_constants.REPAIR_TIMEOUT_SEC)
+    def repair(self, host):
+        host.get_servo().set_nocheck('servo_pd_role', 'snk')
+        time.sleep(1)
+        for x in range(self._SET_ATTEMPT_COUNT):
+            logging.debug('Try set servo_v4_role to src.'
+                          ' Attempt: %s', x + 1)
+            try:
+                host.get_servo().set('servo_pd_role', 'src')
+                # Waiting a few seconds as it can be change to snk if PD
+                # on servo has issue.
+                time.sleep(5)
+            except BaseException as e:
+                logging.debug('Setting PD with retries failed %s', e)
+            if host.get_servo().get('servo_pd_role') == 'src':
+                break
+        if host.get_servo().get('servo_pd_role') == 'snk':
+            raise hosts.AutoservNonCriticalVerifyError(
+                    'Cannot switch power delivery to the src role')
+        # Restart servod to re-initialize servos.
+        # In some cases if device did not receive power can block detection
+        # of servo components.
+        host.restart_servod()
+
+    def _is_type_c(self, host):
+        return (host.is_in_lab() and host.get_servo()
+                and host.get_servo().supports_built_in_pd_control())
+
+    @property
+    def description(self):
+        return 'Recover power delivery on servo'
+
+
 class _ECRebootRepair(hosts.RepairAction):
     """
     Reboot EC on DUT from servo.
@@ -978,6 +1372,33 @@
         return 'Clean up old logs/metrics on servohost to free up disk space.'
 
 
+class _ServoFwUpdateRepair(hosts.RepairAction):
+    """Update firmware for servos.
+
+    We try to update servo 3 times and then try to force update it.
+    """
+
+    @timeout_util.TimeoutDecorator(cros_constants.REPAIR_TIMEOUT_SEC)
+    def repair(self, host):
+        try:
+            servo_updater.update_servo_firmware(host,
+                                                try_attempt_count=3,
+                                                force_update=False,
+                                                try_force_update=True)
+        except servo_updater.ServoUpdaterError as er:
+            # Catch servo_updater issue to cache it.
+            self.servo_updater_issue_detected = True
+            raise hosts.AutoservVerifyError('ServoUpdater issue detected')
+
+    def _is_applicable(self, host):
+        # Run only for servo_v4 and servo_v4p1.
+        return host.is_labstation() or host.is_containerized_servod()
+
+    @property
+    def description(self):
+        return 'Update servo-fw if required.'
+
+
 class _ServoMicroFlashRepair(hosts.RepairAction):
     """
     Remove old logs/metrics/crash_dumps on servohost to free up disk space.
@@ -989,7 +1410,7 @@
         if not host.is_cros_host():
             raise hosts.AutoservRepairError(
                     'Can\'t restart servod: not running '
-                    'embedded Chrome OS.',
+                    'embedded ChromeOS.',
                     'servo_not_applicable_to_non_cros_host')
         servo = host.get_servo()
         if not servo or self._TARGET_SERVO not in servo.get_servo_type():
@@ -1032,48 +1453,100 @@
         return 'Re-flash servo_micro firmware.'
 
 
+def _servo_verifier_actions():
+    """
+    Return a verifiers for a `ServoHost`.
+    """
+    return (
+            (_ConnectionVerifier, 'connection', []),
+            (_RootServoPresentVerifier, 'servo_root_present', ['connection']),
+            (_RootServoV3PresentVerifier, 'servo_v3_root_present',
+             ['connection']),
+            (_ServoFwVerifier, 'servo_fw', ['servo_root_present']),
+            (_StartServodVerifier, 'start_servod',
+             ['servo_fw', 'servo_v3_root_present']),
+            (_DiskSpaceVerifier, 'servo_disk_space', ['connection']),
+            (_UpdateVerifier, 'servo_update', ['servo_v3_root_present']),
+            (_BoardConfigVerifier, 'servo_config_board', ['connection']),
+            (_SerialConfigVerifier, 'servo_config_serial', ['connection']),
+            (_ServodJobVerifier, 'servod_started', [
+                    'start_servod', 'servo_config_board',
+                    'servo_config_serial', 'servo_disk_space'
+            ]),
+            (_ServodEchoVerifier, 'servod_echo', ['servod_started']),
+            (_TopologyVerifier, 'servo_topology', ['servod_echo']),
+            (_ServodConnectionVerifier, 'servod_connection', ['servod_echo']),
+            (_Cr50LowSBUVerifier, 'servo_cr50_low_sbu', ['servod_connection']),
+            (ServodDutControllerMissingVerifier,
+             'servod_dut_controller_missing', ['servod_connection']),
+            (_Cr50OffVerifier, 'servo_cr50_off', ['servod_connection']),
+            (_ServodControlVerifier, 'servod_control', ['servod_connection']),
+            (_DUTConnectionVerifier, 'servo_dut_connected',
+             ['servod_connection']),
+            (_ServoHubConnectionVerifier, 'servo_hub_connected',
+             ['servo_dut_connected']),
+            (_PowerButtonVerifier, 'servo_pwr_button', ['servo_hub_connected'
+                                                        ]),
+            (_BatteryVerifier, 'servo_battery', ['servo_hub_connected']),
+            (_LidVerifier, 'servo_lid_open', ['servo_hub_connected']),
+            (ECConsoleVerifier, 'servo_ec_console', ['servo_dut_connected']),
+            (_Cr50ConsoleVerifier, 'servo_cr50_console',
+             ['servo_dut_connected']),
+            (_CCDTestlabVerifier, 'servo_ccd_testlab', ['servo_cr50_console']),
+            (_CCDPowerDeliveryVerifier, 'servo_power_delivery',
+             ['servod_connection']),
+    )
+
+
+def _servo_repair_actions():
+    """
+    Return a `RepairStrategy` for a `ServoHost`.
+    """
+    config = ['servo_config_board', 'servo_config_serial', 'start_servod']
+    base_triggers = [
+            'servod_started', 'servo_topology', 'servod_connection',
+            'servod_echo', 'servod_control', 'servo_dut_connected',
+            'servo_hub_connected', 'servo_pwr_button', 'servo_cr50_console',
+            'servo_cr50_low_sbu', 'servo_cr50_off', 'servo_power_delivery',
+            'servod_dut_controller_missing'
+    ]
+    dut_triggers = [
+            'servod_control', 'servo_lid_open', 'servo_ec_console',
+            'servo_topology', 'servo_dut_connected', 'servo_hub_connected',
+            'servo_cr50_low_sbu', 'servo_cr50_off', 'servo_cr50_console',
+            'servo_power_delivery', 'servod_dut_controller_missing'
+    ]
+    reboot_triggers = [
+            'servo_topology', 'servo_root_present', 'servo_disk_space',
+            'servo_power_delivery'
+    ]
+    return (
+            (_ServoFwUpdateRepair, 'servo_fw_update', ['connection'],
+             ['servo_fw']),
+            (_DiskCleanupRepair, 'servo_disk_cleanup', ['connection'],
+             ['servo_disk_space']),
+            (_ServoMicroFlashRepair, 'servo_micro_flash',
+             ['connection', 'servo_topology'], ['servo_dut_connected']),
+            (_RestartServod, 'servod_restart', ['connection', 'servo_fw'],
+             config + base_triggers),
+            (_ServoRebootRepair, 'servo_reboot', ['connection'],
+             reboot_triggers),
+            (_PowerDeliveryRepair, 'servo_pd_recover', ['servod_connection'],
+             base_triggers),
+            (_FakedisconnectRepair, 'servo_fakedisconnect',
+             ['servod_connection'], base_triggers),
+            (_ToggleCCLineRepair, 'servo_cc', ['servod_connection'],
+             base_triggers),
+            (_DutRebootRepair, 'servo_dut_reboot', ['servod_connection'],
+             dut_triggers),
+            (_ECRebootRepair, 'servo_ec_reboot', ['servod_connection'],
+             dut_triggers),
+    )
+
+
 def create_servo_repair_strategy():
     """
     Return a `RepairStrategy` for a `ServoHost`.
     """
-    config = ['brd_config', 'ser_config']
-    verify_dag = [
-            (repair_utils.SshVerifier, 'servo_ssh', []),
-            (_DiskSpaceVerifier, 'disk_space', ['servo_ssh']),
-            (_UpdateVerifier, 'update', ['servo_ssh']),
-            (_BoardConfigVerifier, 'brd_config', ['servo_ssh']),
-            (_SerialConfigVerifier, 'ser_config', ['servo_ssh']),
-            (_ServodJobVerifier, 'servod_job', config + ['disk_space']),
-            (_TopologyVerifier, 'servo_topology', ['servod_job']),
-            (_ServodConnectionVerifier, 'servod_connection', ['servod_job']),
-            (_ServodControlVerifier, 'servod_control', ['servod_connection']),
-            (_DUTConnectionVerifier, 'dut_connected', ['servod_connection']),
-            (_ServoHubConnectionVerifier, 'hub_connected', ['dut_connected']),
-            (_PowerButtonVerifier, 'pwr_button', ['hub_connected']),
-            (_BatteryVerifier, 'battery', ['hub_connected']),
-            (_LidVerifier, 'lid_open', ['hub_connected']),
-            (_EcBoardVerifier, 'ec_board', ['dut_connected']),
-            (_Cr50ConsoleVerifier, 'cr50_console', ['dut_connected']),
-            (_CCDTestlabVerifier, 'ccd_testlab', ['cr50_console']),
-            (_CCDPowerDeliveryVerifier, 'power_delivery', ['dut_connected']),
-    ]
-
-    servod_deps = [
-            'servod_job', 'servo_topology', 'servod_connection',
-            'servod_control', 'dut_connected', 'hub_connected', 'pwr_button',
-            'cr50_console'
-    ]
-    repair_actions = [
-            (_DiskCleanupRepair, 'disk_cleanup', ['servo_ssh'], ['disk_space'
-                                                                 ]),
-            (_ServoMicroFlashRepair, 'servo_micro_flash',
-             ['servo_ssh', 'servo_topology'], ['dut_connected']),
-            (_RestartServod, 'restart', ['servo_ssh'], config + servod_deps),
-            (_ServoRebootRepair, 'servo_reboot', ['servo_ssh'], servod_deps),
-            (_ToggleCCLineRepair, 'servo_cc', ['servo_ssh'], servod_deps),
-            (_DutRebootRepair, 'dut_reboot', ['servod_connection'],
-             ['servod_control', 'lid_open', 'ec_board']),
-            (_ECRebootRepair, 'ec_reboot', ['servod_connection'],
-             ['servod_control', 'lid_open', 'ec_board']),
-    ]
-    return hosts.RepairStrategy(verify_dag, repair_actions, 'servo')
+    return hosts.RepairStrategy(_servo_verifier_actions(),
+                                _servo_repair_actions(), 'servo')
diff --git a/server/hosts/servo_repair_unittest.py b/server/hosts/servo_repair_unittest.py
new file mode 100644
index 0000000..93351ea
--- /dev/null
+++ b/server/hosts/servo_repair_unittest.py
@@ -0,0 +1,162 @@
+#!/usr/bin/python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=missing-docstring
+
+import unittest
+from unittest import mock
+
+import common
+from autotest_lib.server.hosts import servo_repair
+from autotest_lib.server.hosts import repair_utils
+
+SERVO_VERIFY_DAG = (
+        (servo_repair._ConnectionVerifier, 'connection', []),
+        (servo_repair._RootServoPresentVerifier, 'servo_root_present',
+         ['connection']),
+        (servo_repair._RootServoV3PresentVerifier, 'servo_v3_root_present',
+         ['connection']),
+        (servo_repair._ServoFwVerifier, 'servo_fw', ['servo_root_present']),
+        (servo_repair._StartServodVerifier, 'start_servod',
+         ['servo_fw', 'servo_v3_root_present']),
+        (servo_repair._DiskSpaceVerifier, 'servo_disk_space', ['connection']),
+        (servo_repair._UpdateVerifier, 'servo_update',
+         ['servo_v3_root_present']),
+        (servo_repair._BoardConfigVerifier, 'servo_config_board',
+         ['connection']),
+        (servo_repair._SerialConfigVerifier, 'servo_config_serial',
+         ['connection']),
+        (servo_repair._ServodJobVerifier, 'servod_started', [
+                'start_servod', 'servo_config_board', 'servo_config_serial',
+                'servo_disk_space'
+        ]),
+        (servo_repair._ServodEchoVerifier, 'servod_echo', ['servod_started']),
+        (servo_repair._TopologyVerifier, 'servo_topology', ['servod_echo']),
+        (servo_repair._ServodConnectionVerifier, 'servod_connection',
+         ['servod_echo']),
+        (servo_repair._Cr50LowSBUVerifier, 'servo_cr50_low_sbu',
+         ['servod_connection']),
+        (servo_repair.ServodDutControllerMissingVerifier,
+         'servod_dut_controller_missing', ['servod_connection']),
+        (servo_repair._Cr50OffVerifier, 'servo_cr50_off',
+         ['servod_connection']),
+        (servo_repair._ServodControlVerifier, 'servod_control',
+         ['servod_connection']),
+        (servo_repair._DUTConnectionVerifier, 'servo_dut_connected',
+         ['servod_connection']),
+        (servo_repair._ServoHubConnectionVerifier, 'servo_hub_connected',
+         ['servo_dut_connected']),
+        (servo_repair._PowerButtonVerifier, 'servo_pwr_button',
+         ['servo_hub_connected']),
+        (servo_repair._BatteryVerifier, 'servo_battery',
+         ['servo_hub_connected']),
+        (servo_repair._LidVerifier, 'servo_lid_open', ['servo_hub_connected']),
+        (servo_repair.ECConsoleVerifier, 'servo_ec_console',
+         ['servo_dut_connected']),
+        (servo_repair._Cr50ConsoleVerifier, 'servo_cr50_console',
+         ['servo_dut_connected']),
+        (servo_repair._CCDTestlabVerifier, 'servo_ccd_testlab',
+         ['servo_cr50_console']),
+        (servo_repair._CCDPowerDeliveryVerifier, 'servo_power_delivery',
+         ['servod_connection']),
+)
+
+SERVO_REPAIR_ACTIONS = (
+        (servo_repair._ServoFwUpdateRepair, 'servo_fw_update', ['connection'],
+         ['servo_fw']),
+        (servo_repair._DiskCleanupRepair, 'servo_disk_cleanup', ['connection'],
+         ['servo_disk_space']),
+        (servo_repair._ServoMicroFlashRepair, 'servo_micro_flash',
+         ['connection', 'servo_topology'], ['servo_dut_connected']),
+        (servo_repair._RestartServod, 'servod_restart',
+         ['connection', 'servo_fw'], [
+                 'servo_config_board', 'servo_config_serial', 'start_servod',
+                 'servod_started', 'servo_topology', 'servod_connection',
+                 'servod_echo', 'servod_control', 'servo_dut_connected',
+                 'servo_hub_connected', 'servo_pwr_button',
+                 'servo_cr50_console', 'servo_cr50_low_sbu', 'servo_cr50_off',
+                 'servo_power_delivery', 'servod_dut_controller_missing'
+         ]),
+        (servo_repair._ServoRebootRepair, 'servo_reboot', ['connection'], [
+                'servo_topology', 'servo_root_present', 'servo_disk_space',
+                'servo_power_delivery'
+        ]),
+        (servo_repair._PowerDeliveryRepair, 'servo_pd_recover',
+         ['servod_connection'], [
+                 'servod_started', 'servo_topology', 'servod_connection',
+                 'servod_echo', 'servod_control', 'servo_dut_connected',
+                 'servo_hub_connected', 'servo_pwr_button',
+                 'servo_cr50_console', 'servo_cr50_low_sbu', 'servo_cr50_off',
+                 'servo_power_delivery', 'servod_dut_controller_missing'
+         ]),
+        (servo_repair._FakedisconnectRepair, 'servo_fakedisconnect',
+         ['servod_connection'], [
+                 'servod_started', 'servo_topology', 'servod_connection',
+                 'servod_echo', 'servod_control', 'servo_dut_connected',
+                 'servo_hub_connected', 'servo_pwr_button',
+                 'servo_cr50_console', 'servo_cr50_low_sbu', 'servo_cr50_off',
+                 'servo_power_delivery', 'servod_dut_controller_missing'
+         ]),
+        (servo_repair._ToggleCCLineRepair, 'servo_cc', ['servod_connection'], [
+                'servod_started', 'servo_topology', 'servod_connection',
+                'servod_echo', 'servod_control', 'servo_dut_connected',
+                'servo_hub_connected', 'servo_pwr_button',
+                'servo_cr50_console', 'servo_cr50_low_sbu', 'servo_cr50_off',
+                'servo_power_delivery', 'servod_dut_controller_missing'
+        ]),
+        (servo_repair._DutRebootRepair, 'servo_dut_reboot',
+         ['servod_connection'], [
+                 'servod_control', 'servo_lid_open', 'servo_ec_console',
+                 'servo_topology', 'servo_dut_connected',
+                 'servo_hub_connected', 'servo_cr50_low_sbu', 'servo_cr50_off',
+                 'servo_cr50_console', 'servo_power_delivery',
+                 'servod_dut_controller_missing'
+         ]),
+        (servo_repair._ECRebootRepair, 'servo_ec_reboot',
+         ['servod_connection'], [
+                 'servod_control', 'servo_lid_open', 'servo_ec_console',
+                 'servo_topology', 'servo_dut_connected',
+                 'servo_hub_connected', 'servo_cr50_low_sbu', 'servo_cr50_off',
+                 'servo_cr50_console', 'servo_power_delivery',
+                 'servod_dut_controller_missing'
+         ]),
+)
+
+
+class ServoRepairUnittests(unittest.TestCase):
+
+    # Allow to show all diff when compare tuple.
+    maxDiff = None
+
+    def test_servo_repair_components(self):
+        verify_dag = servo_repair._servo_verifier_actions()
+        self.assertTupleEqual(verify_dag, SERVO_VERIFY_DAG)
+        self.check_verify_dag(verify_dag)
+        repair_actions = servo_repair._servo_repair_actions()
+        self.assertTupleEqual(repair_actions, SERVO_REPAIR_ACTIONS)
+        self.check_repair_actions(verify_dag, repair_actions)
+
+    def test_servo_repair_strategy(self):
+        servo_repair.create_servo_repair_strategy()
+
+    def check_verify_dag(self, verify_dag):
+        """Checks that dependency labels are defined."""
+        labels = [n[1] for n in verify_dag]
+        for node in verify_dag:
+            for dep in node[2]:
+                self.assertIn(dep, labels)
+
+    def check_repair_actions(self, verify_dag, repair_actions):
+        """Checks that dependency and trigger labels are defined."""
+        verify_labels = [n[1] for n in verify_dag]
+        for action in repair_actions:
+            deps = action[2]
+            triggers = action[3]
+            for label in deps + triggers:
+                self.assertIn(label, verify_labels)
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/server/hosts/shadowing_store.py b/server/hosts/shadowing_store.py
index 9e53daa..be43081 100644
--- a/server/hosts/shadowing_store.py
+++ b/server/hosts/shadowing_store.py
@@ -9,7 +9,7 @@
 
 import common
 from autotest_lib.server.hosts import host_info
-from chromite.lib import metrics
+from autotest_lib.utils.frozen_chromite.lib import metrics
 
 
 _METRICS_PREFIX = 'chromeos/autotest/autoserv/host_info/shadowing_store/'
diff --git a/server/hosts/shadowing_store_unittest.py b/server/hosts/shadowing_store_unittest.py
index 161fd6a..57fdf22 100644
--- a/server/hosts/shadowing_store_unittest.py
+++ b/server/hosts/shadowing_store_unittest.py
@@ -5,8 +5,8 @@
 from __future__ import print_function
 from __future__ import absolute_import
 
-import mock
 import unittest
+from unittest import mock
 
 import common
 from autotest_lib.server.hosts import host_info
diff --git a/server/hosts/ssh_host.py b/server/hosts/ssh_host.py
index 85eb318..7242bd3 100644
--- a/server/hosts/ssh_host.py
+++ b/server/hosts/ssh_host.py
@@ -30,7 +30,7 @@
 # In case cros_host is being ran via SSP on an older Moblab version with an
 # older chromite version.
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = utils.metrics_mock
 
@@ -87,7 +87,7 @@
         @param alive_count_max: SSH AliveCountMax.
         @param connection_attempts: SSH ConnectionAttempts
         """
-        options = " ".join([options, self._master_ssh.ssh_option])
+        options = " ".join([options, self._main_ssh.ssh_option])
         base_cmd = self.make_ssh_command(user=self.user, port=self.port,
                                          opts=options,
                                          hosts_file=self.known_hosts_file,
@@ -127,10 +127,37 @@
                    % (stack, utils.sh_escape(command), command))
         return command
 
+    def _tls_run(self, original_cmd, timeout, ignore_status, stdout, stderr,
+                 args, ignore_timeout):
+        """Helper function for run(), uses the tls client."""
+        if not self.tls_connection.alive:
+            raise error.TLSConnectionError("TLS not connected.")
+        original_cmd = ' '.join([original_cmd] +
+                                [utils.sh_quote_word(arg) for arg in args])
 
-    def _run(self, command, timeout, ignore_status,
-             stdout, stderr, connect_timeout, env, options, stdin, args,
-             ignore_timeout, ssh_failure_retry_ok):
+        try:
+            result = self.tls_exec_dut_command_client.run_cmd(original_cmd, timeout,
+                                                       stdout, stderr,
+                                                       ignore_timeout)
+        except Exception as e:
+            logging.warning("TLS Client run err %s", e)
+            raise e
+
+        if not ignore_status and result.exit_status > 0:
+            msg = result.stderr.strip()
+            if not msg:
+                msg = result.stdout.strip()
+                if msg:
+                    msg = msg.splitlines()[-1]
+            raise error.AutoservRunError(
+                    "command execution error using TLS (%d): %s" %
+                    (result.exit_status, msg), result)
+
+        return result
+
+    def _run(self, command, timeout, ignore_status, stdout, stderr,
+             connect_timeout, env, options, stdin, args, ignore_timeout,
+             ssh_failure_retry_ok, verbose):
         """Helper function for run()."""
         if connect_timeout > timeout:
             # timeout passed from run() may be smaller than 1, because we
@@ -138,6 +165,33 @@
             connect_timeout = max(int(timeout), 1)
         original_cmd = command
 
+        # If TLS client has been built, and not marked as unstable, use it.
+        # NOTE: if the tls_enabled setting in the config is not True, the
+        # client will not have been built.
+        use_tls = self.tls_exec_dut_command_client and not self.tls_unstable
+
+        if verbose:
+            stack = self._get_server_stack_state(lowest_frames=2,
+                                                 highest_frames=8)
+
+            logging.debug("Running (via %s) '%s' from '%s'",
+                          'TLS' if use_tls else 'SSH', command, stack)
+            command = self._verbose_logger_command(command)
+
+        if use_tls:
+            try:
+                return self._tls_run(command, timeout, ignore_status, stdout,
+                                     stderr, args, ignore_timeout)
+            except (error.AutoservRunError, error.CmdTimeoutError) as e:
+                raise e
+            except Exception as e:
+                # If TLS fails for unknown reason, we will revert to normal ssh.
+                logging.warning(
+                        "Unexpected TLS cmd failed. Reverting to SSH.\n %s", e)
+
+                # Note the TLS as unstable so we do not attempt to re-start it.
+                self.tls_unstable = True
+
         ssh_cmd = self.ssh_command(connect_timeout, options)
         if not env.strip():
             env = ""
@@ -147,17 +201,6 @@
             command += ' "%s"' % utils.sh_escape(arg)
         full_cmd = '%s "%s %s"' % (ssh_cmd, env, utils.sh_escape(command))
 
-        # TODO(jrbarnette):  crbug.com/484726 - When we're in an SSP
-        # container, sometimes shortly after reboot we will see DNS
-        # resolution errors on ssh commands; the problem never
-        # occurs more than once in a row.  This especially affects
-        # the autoupdate_Rollback test, but other cases have been
-        # affected, too.
-        #
-        # We work around it by detecting the first DNS resolution error
-        # and retrying exactly one time.
-        dns_error_retry_count = 1
-
         def counters_inc(counter_name, failure_name):
             """Helper function to increment metrics counters.
             @param counter_name: string indicating which counter to use
@@ -181,7 +224,7 @@
                 run_counter.increment(fields=fields)
 
         # If ssh_failure_retry_ok is True, retry twice on timeouts and generic
-        # error 255: if a simple retry doesn't work, kill the ssh master
+        # error 255: if a simple retry doesn't work, kill the ssh main
         # connection and try again.  (Note that either error could come from
         # the command running in the DUT, in which case the retry may be
         # useless but, in theory, also harmless.)
@@ -234,10 +277,7 @@
             if failure_name:
                 # There was a failure: decide whether to retry.
                 if failure_name == 'dns_failure':
-                    if dns_error_retry_count > 0:
-                        logging.debug('retrying ssh because of DNS failure')
-                        dns_error_retry_count -= 1
-                        continue
+                    raise error.AutoservSshDnsError("DNS Failure: ", result)
                 else:
                     if ssh_failure_retry_count == 2:
                         logging.debug('retrying ssh command after %s',
@@ -245,10 +285,14 @@
                         ssh_failure_retry_count -= 1
                         continue
                     elif ssh_failure_retry_count == 1:
-                        # After two failures, restart the master connection
+                        # After two failures, restart the main connection
                         # before the final try.
-                        logging.debug('retry 2: restarting master connection')
-                        self.restart_master_ssh()
+                        stack = self._get_server_stack_state(lowest_frames=1,
+                                                             highest_frames=7)
+                        logging.debug(
+                                'retry 2: restarting main connection from \'%s\'',
+                                stack)
+                        self.restart_main_ssh()
                         # Last retry: reinstate timeout behavior.
                         ignore_timeout = original_ignore_timeout
                         ssh_failure_retry_count -= 1
@@ -321,7 +365,7 @@
         @param ssh_failure_retry_ok: True if the command may be retried on
                 probable ssh failure (error 255 or timeout).  When true,
                 the command may be executed up to three times, the second
-                time after restarting the ssh master connection.  Use only for
+                time after restarting the ssh main connection.  Use only for
                 commands that are idempotent, because when a "probable
                 ssh failure" occurs, we cannot tell if the command executed
                 or not.
@@ -337,17 +381,12 @@
         if timeout is None:
             timeout = self._default_run_timeout
         start_time = time.time()
-        with metrics.SecondsTimer('chromeos/autotest/ssh/master_ssh_time',
+        with metrics.SecondsTimer('chromeos/autotest/ssh/main_ssh_time',
                                   scale=0.001):
-            if verbose:
-                stack = self._get_server_stack_state(lowest_frames=1,
-                                                     highest_frames=7)
-                logging.debug("Running (ssh) '%s' from '%s'", command, stack)
-                command = self._verbose_logger_command(command)
 
-            self.start_master_ssh(min(
+            self.start_main_ssh(min(
                     timeout,
-                    self.DEFAULT_START_MASTER_SSH_TIMEOUT_S,
+                    self.DEFAULT_START_MAIN_SSH_TIMEOUT_S,
             ))
 
             env = " ".join("=".join(pair) for pair in six.iteritems(self.env))
@@ -356,7 +395,7 @@
                 return self._run(command, timeout - elapsed, ignore_status,
                                  stdout_tee, stderr_tee, connect_timeout, env,
                                  options, stdin, args, ignore_timeout,
-                                 ssh_failure_retry_ok)
+                                 ssh_failure_retry_ok, verbose)
             except error.CmdError as cmderr:
                 # We get a CmdError here only if there is timeout of that
                 # command. Catch that and stuff it into AutoservRunError and
diff --git a/server/hosts/ssh_multiplex.py b/server/hosts/ssh_multiplex.py
index dbaf3e6..74db8d9 100644
--- a/server/hosts/ssh_multiplex.py
+++ b/server/hosts/ssh_multiplex.py
@@ -16,22 +16,24 @@
 from autotest_lib.server import utils
 import six
 
-_MASTER_SSH_COMMAND_TEMPLATE = (
-    '/usr/bin/ssh -a -x -N '
-    '-o ControlMaster=yes '  # Create multiplex socket.
-    '-o ControlPath=%(socket)s '
-    '-o StrictHostKeyChecking=no '
-    '-o UserKnownHostsFile=/dev/null '
-    '-o BatchMode=yes '
-    '-o ConnectTimeout=30 '
-    '-o ServerAliveInterval=30 '
-    '-o ServerAliveCountMax=1 '
-    '-o ConnectionAttempts=1 '
-    '-o Protocol=2 '
-    '-l %(user)s -p %(port)d %(hostname)s')
+# TODO b:169251326 terms below are set outside of this codebase
+# and should be updated when possible. ("master" -> "main")
+_MAIN_SSH_COMMAND_TEMPLATE = (
+        '/usr/bin/ssh -a -x -N '
+        '-o ControlMaster=yes '  # Create multiplex socket. # nocheck
+        '-o ControlPath=%(socket)s '
+        '-o StrictHostKeyChecking=no '
+        '-o UserKnownHostsFile=/dev/null '
+        '-o BatchMode=yes '
+        '-o ConnectTimeout=30 '
+        '-o ServerAliveInterval=30 '
+        '-o ServerAliveCountMax=1 '
+        '-o ConnectionAttempts=1 '
+        '-o Protocol=2 '
+        '-l %(user)s %(port)s %(hostname)s')
 
 
-class MasterSsh(object):
+class MainSsh(object):
     """Manages multiplex ssh connection."""
 
     def __init__(self, hostname, user, port):
@@ -39,8 +41,8 @@
         self._user = user
         self._port = port
 
-        self._master_job = None
-        self._master_tempdir = None
+        self._main_job = None
+        self._main_tempdir = None
 
         self._lock = multiprocessing.Lock()
 
@@ -49,7 +51,7 @@
 
     @property
     def _socket_path(self):
-        return os.path.join(self._master_tempdir.name, 'socket')
+        return os.path.join(self._main_tempdir.name, 'socket')
 
     @property
     def ssh_option(self):
@@ -57,7 +59,7 @@
 
         If background process is not running, returns an empty string.
         """
-        if not self._master_tempdir:
+        if not self._main_tempdir:
             return ''
         return '-o ControlPath=%s' % (self._socket_path,)
 
@@ -68,43 +70,43 @@
         If there is a stale process or a stale socket, first clean them up,
         then create a background process.
 
-        @param timeout: timeout in seconds (default 5) to wait for master ssh
+        @param timeout: timeout in seconds (default 5) to wait for main ssh
                         connection to be established. If timeout is reached, a
                         warning message is logged, but no other action is
                         taken.
         """
-        # Multiple processes might try in parallel to clean up the old master
+        # Multiple processes might try in parallel to clean up the old main
         # ssh connection and create a new one, therefore use a lock to protect
         # against race conditions.
         with self._lock:
-            # If a previously started master SSH connection is not running
+            # If a previously started main SSH connection is not running
             # anymore, it needs to be cleaned up and then restarted.
-            if (self._master_job and (not os.path.exists(self._socket_path) or
-                                      self._master_job.sp.poll() is not None)):
+            if (self._main_job and (not os.path.exists(self._socket_path) or
+                                      self._main_job.sp.poll() is not None)):
                 logging.info(
-                        'Master ssh connection to %s is down.', self._hostname)
+                        'Main-ssh connection to %s is down.', self._hostname)
                 self._close_internal()
 
-            # Start a new master SSH connection.
-            if not self._master_job:
+            # Start a new main SSH connection.
+            if not self._main_job:
                 # Create a shared socket in a temp location.
-                self._master_tempdir = autotemp.tempdir(dir=_short_tmpdir())
+                self._main_tempdir = autotemp.tempdir(dir=_short_tmpdir())
 
-                # Start the master SSH connection in the background.
-                master_cmd = _MASTER_SSH_COMMAND_TEMPLATE % {
+                # Start the main SSH connection in the background.
+                main_cmd = _MAIN_SSH_COMMAND_TEMPLATE % {
                         'hostname': self._hostname,
                         'user': self._user,
-                        'port': self._port,
+                        'port': "-p %s" % self._port if self._port else "",
                         'socket': self._socket_path,
                 }
                 logging.info(
-                        'Starting master ssh connection \'%s\'', master_cmd)
-                self._master_job = utils.BgJob(
-                         master_cmd, nickname='master-ssh',
-                         stdout_tee=utils.DEVNULL, stderr_tee=utils.DEVNULL,
-                         unjoinable=True)
+                    'Starting main-ssh connection \'%s\'', main_cmd)
+                self._main_job = utils.BgJob(
+                    main_cmd, nickname='main-ssh',
+                    stdout_tee=utils.DEVNULL, stderr_tee=utils.DEVNULL,
+                    unjoinable=True)
 
-                # To prevent a race between the master ssh connection
+                # To prevent a race between the main ssh connection
                 # startup and its first attempted use, wait for socket file to
                 # exist before returning.
                 try:
@@ -112,7 +114,7 @@
                             condition=lambda: os.path.exists(self._socket_path),
                             timeout=timeout,
                             sleep_interval=0.2,
-                            desc='master-ssh connection up')
+                            desc='main-ssh connection up')
                 except utils.TimeoutError:
                     # poll_for_conditional already logs an error upon timeout
                     pass
@@ -125,15 +127,15 @@
 
     def _close_internal(self):
         # Assume that when this is called, _lock should be acquired, already.
-        if self._master_job:
-            logging.debug('Nuking ssh master_job')
-            utils.nuke_subprocess(self._master_job.sp)
-            self._master_job = None
+        if self._main_job:
+            logging.debug('Nuking ssh main_job')
+            utils.nuke_subprocess(self._main_job.sp)
+            self._main_job = None
 
-        if self._master_tempdir:
-            logging.debug('Cleaning ssh master_tempdir')
-            self._master_tempdir.clean()
-            self._master_tempdir = None
+        if self._main_tempdir:
+            logging.debug('Cleaning ssh main_tempdir')
+            self._main_tempdir.clean()
+            self._main_tempdir = None
 
 
 class ConnectionPool(object):
@@ -144,7 +146,7 @@
         self._lock = threading.Lock()
 
     def get(self, hostname, user, port):
-        """Returns MasterSsh instance for the given endpoint.
+        """Returns MainSsh instance for the given endpoint.
 
         If the pool holds the instance already, returns it. If not, create the
         instance, and returns it.
@@ -156,13 +158,13 @@
         @param port: Port number sshd is listening.
         """
         key = (hostname, user, port)
-        logging.debug('Get master ssh connection for %s@%s:%d', user, hostname,
-                      port)
+        logging.debug('Get main ssh connection for %s@%s%s', user, hostname,
+                      ":%s" % port if port else "")
 
         with self._lock:
             conn = self._pool.get(key)
             if not conn:
-                conn = MasterSsh(hostname, user, port)
+                conn = MainSsh(hostname, user, port)
                 self._pool[key] = conn
             return conn
 
@@ -178,7 +180,7 @@
     # /tmp.
     # So use a shared parent directory in /tmp
     user = os.environ.get("USER", "no_USER")[:8]
-    d = '/tmp/ssh-master_%s' % user
+    d = '/tmp/ssh-main_%s' % user
     if not os.path.exists(d):
         os.mkdir(d)
     return d
diff --git a/server/hosts/ssh_multiplex_unittest.py b/server/hosts/ssh_multiplex_unittest.py
index 2cfe29e..03ff9cd 100755
--- a/server/hosts/ssh_multiplex_unittest.py
+++ b/server/hosts/ssh_multiplex_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -13,7 +13,7 @@
 class ConnectionPoolTest(unittest.TestCase):
     """ Test for SSH Connection Pool """
     def test_get(self):
-        """ We can get MasterSsh object for a host from the pool """
+        """ We can get MainSsh object for a host from the pool """
         p = ssh_multiplex.ConnectionPool()
         conn1 = p.get('host', 'user', 22)
         self.assertIsNotNone(conn1)
diff --git a/server/hosts/teststation_host.py b/server/hosts/teststation_host.py
index e2fac55..79d3dd3 100644
--- a/server/hosts/teststation_host.py
+++ b/server/hosts/teststation_host.py
@@ -128,7 +128,7 @@
                  ignore_timeout is True.
         """
         # TODO (sbasi/kevcheng) - Make teststation_host check if running
-        # on Chrome OS, rather than MobLab when prepending sudo to fastboot.
+        # on ChromeOS, rather than MobLab when prepending sudo to fastboot.
         if cmd.startswith('fastboot ') and self.is_moblab:
             cmd = 'sudo -n ' + cmd
         if force_tty:
diff --git a/server/hosts/tls_client/OWNERS b/server/hosts/tls_client/OWNERS
new file mode 100644
index 0000000..bf13e9b
--- /dev/null
+++ b/server/hosts/tls_client/OWNERS
@@ -0,0 +1,2 @@
+# ChromeOS Core Automation
+dbeckett@chromium.org
\ No newline at end of file
diff --git a/server/hosts/tls_client/README b/server/hosts/tls_client/README
new file mode 100644
index 0000000..3d4a08e
--- /dev/null
+++ b/server/hosts/tls_client/README
@@ -0,0 +1,10 @@
+Protos in this dir are generated using buildprotos.py and manually checked
+into the tree. The source of the protos is
+src/config/proto/chromiumos/config/api/test/tls/commontls.proto, and
+src/config/proto/chromiumos/config/api/test/tls/dependencies/longrunning/operations.proto
+relative to a standard repo.
+(https://chromium.git.corp.google.com/chromiumos/config/+/HEAD/proto/chromiumos/config/api/test/tls/commontls.proto)
+If there are updates to src proto, the protos here must be re-generated. Run the
+script "buildprotos.py" to automatically rebuild them from the source. You must
+manually repo sync prior to this. If there are changes to the file structure
+in the source, buildprotos.py will need to be updated for this.
diff --git a/server/hosts/drone_api_client/__init__.py b/server/hosts/tls_client/__init__.py
similarity index 100%
rename from server/hosts/drone_api_client/__init__.py
rename to server/hosts/tls_client/__init__.py
diff --git a/server/hosts/tls_client/autotest_common.proto b/server/hosts/tls_client/autotest_common.proto
new file mode 100644
index 0000000..6b842cc
--- /dev/null
+++ b/server/hosts/tls_client/autotest_common.proto
@@ -0,0 +1,431 @@
+// Copyright 2019 The Chromium OS Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+syntax = "proto3";
+
+package chromiumos.config.api.test.tls;
+
+option go_package = "go.chromium.org/chromiumos/config/go/api/test/tls";
+
+import "google/protobuf/empty.proto";
+
+import "dependencies/longrunning/operations.proto";
+
+// Common lab services implemented on top of the wiring APIs.
+//
+// The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL
+// NOT", "SHOULD", "SHOULD NOT", "RECOMMENDED",  "MAY", and
+// "OPTIONAL" in this document are to be interpreted as described in
+// RFC 2119.
+//
+// All clients SHOULD pass the gRPC metadata key request_trace_id with one
+// value. The value is a unique string that is associated with the method call
+// in metrics. Clients that do not pass request_trace_id MAY be rejected so that
+// they can be fixed.
+service Common {
+  // ExecDutCommand runs a command on a DUT.
+  //
+  // The working directory is /.
+  // A tty is not spawned for the command.
+  // The user and group is root.
+  // All signals have their default dispositions and are not masked.
+  // The umask is set to 0.
+  //
+  // The environment contains:
+  //
+  //   TERM=dumb
+  //   PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/opt/bin
+  //   LANG=en_US.UTF-8
+  //   USER=root
+  //   HOME=/root
+  //
+  // The environment MAY also contain SSH client variables.
+  // The environment SHALL NOT contain variables not mentioned above.
+  //
+  // If the stream is interrupted, the implementation MAY attempt to
+  // stop the command by sending SIGINT, SIGHUP, SIGTERM, or SIGKILL.
+  rpc ExecDutCommand(ExecDutCommandRequest)
+      returns (stream ExecDutCommandResponse);
+
+  // ProvisionDut installs a specified version of ChromeOS on the DUT, along
+  // with any specified DLCs.
+  //
+  // If the DUT is already on the specified version of ChromeOS, the OS will
+  // not be provisioned.
+  //
+  // If the DUT already has the specified list of DLCs, only the missing DLCs
+  // will be provisioned.
+  rpc ProvisionDut(ProvisionDutRequest) returns (google.longrunning.Operation) {
+    option (google.longrunning.operation_info) = {
+      response_type: "ProvisionDutResponse",
+      metadata_type: "ProvisionDutMetadata"
+    };
+  }
+
+  // ProvisionLacros installs a specified version of Lacros on the DUT.
+  //
+  // If the DUT already has the specified version of Lacros, Lacros will not be
+  // provisioned.
+  rpc ProvisionLacros(ProvisionLacrosRequest) returns (google.longrunning.Operation) {
+    option (google.longrunning.operation_info) = {
+      response_type: "ProvisionLacrosResponse",
+      metadata_type: "ProvisionLacrosMetadata"
+    };
+  }
+
+  // FetchCrashes gets a stream of all crash reports currently on the DUT.
+  //
+  // The stream returned may split up a crash over multiple
+  // `FetchCrashesResponse` protos. See the definition of that proto for
+  // details.
+  //
+  // This call is read-only: it doesn't delete the crashes that it reads.
+  rpc FetchCrashes(FetchCrashesRequest) returns (stream FetchCrashesResponse);
+
+  // CreateFakeOmaha starts a fake Omaha service on TLS and exposes the
+  // listened port to the DUT.
+  rpc CreateFakeOmaha(CreateFakeOmahaRequest) returns (FakeOmaha);
+  // DeleteFakeOmaha deletes the specified fake Omaha resource created by
+  // CreateFakeOmaha.
+  rpc DeleteFakeOmaha(DeleteFakeOmahaRequest) returns (google.protobuf.Empty);
+}
+
+message ExecDutCommandRequest {
+  // name is the resource name for the DUT.
+  // The DUT name is passed to the RTD when the RTD is started.
+  // It is not specified whether the name is the DUT hostname.
+  string name = 1;
+  // command is the command to run.
+  // If this contains no slashes, it is resolved using PATH.
+  // If this starts with /, it is used as an absolute path to the
+  // program to run.
+  // Otherwise, this is treated as a path relative to the working
+  // directory.
+  string command = 2;
+  // args are the arguments to pass to the command.
+  repeated string args = 3;
+  // stdin is passed to the command as the program's stdin.
+  // The stream does not support seeking.
+  // An empty bytes is not treated specially; if the command reads
+  // from stdin, it will receive zero bytes.
+  bytes stdin = 4;
+  // stdout indicates how to handle the command's stdout.
+  Output stdout = 5;
+  // stderr indicates how to handle the command's stderr.
+  Output stderr = 6;
+}
+message ExecDutCommandResponse {
+  message ExitInfo {
+    // status provides information about how the command process
+    // terminated.
+    //
+    // If the command failed to start, status is set to an arbitrary
+    // non-zero value.
+    //
+    // If signaled is set, status is set to the signal that caused
+    // the command to terminate.
+    //
+    // Otherwise, status is set to the exit status of the process.
+    // Exit statuses outside of 0 to 255 inclusive are not supported;
+    // they will be mapped to an arbitrary non-zero value.
+    //
+    // status is zero if and only if the process was successfully
+    // started and exited with a zero status.
+    int32 status = 1;
+    // signaled indicates whether the command exited due to a signal.
+    // If set, status contains the signal.
+    bool signaled = 2;
+    // started indicates whether the command was started.
+    bool started = 3;
+    // error_message provides a human readable explanation for some errors.
+    // This MUST NOT be inspected by programs.
+    string error_message = 4;
+  }
+  // exit_info contains exit information.
+  // This is set when the command has exited or failed to start.
+  // This is set on the last message in the response stream.
+  ExitInfo exit_info = 1;
+  // stdout contains the shell command's stdout output since the last
+  // response in the stream.
+  // The implementation MAY batch or delay output to later
+  // responses in the stream.
+  bytes stdout = 2;
+  // stderr contains the shell command's stderr output since the last
+  // response in the stream.
+  // The implementation MAY batch or delay output to later
+  // responses in the stream.
+  bytes stderr = 3;
+}
+
+// Output enumeration for ExecDutCommandRequest.
+enum Output {
+  // OUTPUT_PIPE means to collect output and return it.
+  OUTPUT_PIPE = 0;
+  // OUTPUT_STDOUT is a special value for stderr which means to merge stderr
+  // into stdout.
+  OUTPUT_STDOUT = 1;
+}
+
+message ProvisionDutRequest {
+  // name is the resource name for the DUT.
+  // The DUT name is passed to the RTD when the RTD is started.
+  // It is not specified whether the name is the DUT hostname.
+  string name = 1;
+
+  // TODO(crbug.com/1155247) Deprecate this nested message and replace with
+  // top level ChromeOsImage.
+  message ChromeOSImage {
+    oneof path_oneof {
+      // gs_path_prefix is the GS path to where kernel, rootfs, and stateful
+      // images are located. If DLCs are to be provisioned, it must be a GS path
+      // that also has the dlc directory.
+      // Only gs://chromeos-image-archive bucket is supported.
+      // For example the format should be:
+      // - gs://chromeos-image-archive/eve-release/R86-13380.0.0
+      string gs_path_prefix = 1;
+    }
+  }
+  // image specifies the ChromeOS image with which to provision the DUT.
+  ChromeOSImage image = 2;
+
+  // Reference DLCs developer documentation:
+  // https://source.corp.google.com/chromeos_public/src/platform2/dlcservice/docs/developer.md
+  message DLCSpec {
+    // id is the DLC ID which is a unique identifier.
+    // The DLC ID must follow a specific format that can be found in the DLC
+    // developer doc below.
+    string id = 1;
+  }
+  // dlc_specs specifies which DLCs to install on the DUT after provisioning.
+  repeated DLCSpec dlc_specs = 3;
+  // preserve_stateful specifies whether the stateful partition should be preserved during
+  // provisioning. If preserve_stateful is not set to true, the stateful partition is
+  // block-level wiped and reset during provisioning.
+  bool preserve_stateful = 4;
+}
+
+message ProvisionDutResponse {
+  // When the status code is other than OK, details in Status message should be
+  // parsed for ErrorInfo message with the following Reasons as the reason.
+  enum Reason {
+    // status code: INVALID_ARGUMENT
+    REASON_INVALID_REQUEST = 0;
+    // status code: FAILED_PRECONDITION
+    REASON_DUT_UNREACHABLE_PRE_PROVISION = 1;
+    // status code: FAILED_PRECONDITION
+    REASON_DOWNLOADING_IMAGE_FAILED = 2;
+    // status code: DEADLINE_EXCEEDED
+    REASON_PROVISIONING_TIMEDOUT = 3;
+    // status code: ABORTED
+    REASON_PROVISIONING_FAILED = 4;
+    // status code: ABORTED
+    REASON_DUT_UNREACHABLE_POST_PROVISION = 5;
+  }
+}
+
+message ProvisionDutMetadata {
+}
+
+message ProvisionLacrosRequest {
+  // name is the resource name for the DUT.
+  // The DUT name is passed to the RTD when the RTD is started.
+  // It is not specified whether the name is the DUT hostname.
+  string name = 1;
+
+  message LacrosImage {
+    oneof path_oneof {
+      // gs_path_prefix is the GS path prefix to where Lacros is located.
+      string gs_path_prefix = 1;
+    }
+  }
+  // image specifies the Lacros image with which to provision the DUT.
+  LacrosImage image = 2;
+}
+
+message ProvisionLacrosResponse {
+  // When the status code is other than OK, details in Status message should be
+  // parsed for ErrorInfo message with the following Reasons as the reason.
+  enum Reason {
+    // Failed as the ProvisionLacros request is invalid.
+    REASON_INVALID_REQUEST = 0;
+    // Failed to connect to the DUT prior to provisioning Lacros.
+    REASON_DUT_UNREACHABLE_PRE_PROVISION = 1;
+    // Failed to download the Lacros image or a timeout during download.
+    REASON_DOWNLOADING_IMAGE_FAILED = 2;
+    // Failed due to a timeout during the main Lacros provisioning.
+    // Excludes timeout during other steps.
+    REASON_PROVISIONING_TIMEDOUT = 3;
+    // General failure in Lacros provisioning.
+    REASON_PROVISIONING_FAILED = 4;
+  }
+}
+
+message ProvisionLacrosMetadata {
+}
+
+message FetchCrashesRequest {
+    // dut is the resource name for the DUT from which to fetch crashes.
+    // The DUT name is passed to the RTD when the RTD is started.
+    // It is not specified whether the name is the DUT hostname.
+    string dut = 1;
+    // If true, fetch the core file.
+    // For uploads to the crash server, that should generally be false.
+    // If the crash file is likely to be used for manual debugging (e.g. on
+    // a manually-invoked test suite run), this might be true.
+    // Coredumps can be extremely large (even gigabytes), so if resource usage
+    // is a concern, this should probably be false.
+    bool fetch_core = 2;
+}
+
+// When this response is streamed, the first proto with a given crash ID will
+// always contain the CrashInfo.
+// Files and core dumps (if present) may be streamed. If they are,
+// subsequent protos with the same crash ID will follow, each containing a chunk
+// of file/coredump. To reassemble these, concatenate the bytes received from
+// each subsequent proto with a matching crash_id (concatenate blobs that have
+// matching crash_ids and keys).
+// Additional crashes may be reported in the same stream with a new crash ID.
+message FetchCrashesResponse {
+    // Crash id. unique only within responses to a single FetchCrashes request.
+    // Used to assemble multiple streamed |FetchCrashesResponse| protos into a
+    // single crash report.
+    int64 crash_id = 1;
+    oneof data {
+      // Full details of crash report.
+      CrashInfo crash = 2;
+      // Misc file (e.g. minidump, large binary log, etc)
+      CrashBlob blob = 3;
+      // Coredump. Present iff fetch_core was true in FetchCrashesRequest and
+      // the crash has a coredump. (kernel warnings, for example, do not have
+      // one).
+      bytes core = 4;
+    }
+}
+
+// The data in this proto matches the metadata from crash-reporter's meta files.
+// Sender::CreateCrashFormData puts this data into crash upload POST requests.
+// (See src/platform2/crash-reporter/crash_sender_util.cc.)
+// The names in this proto MUST match the names that crash-reporter uses so
+// that, when crashes are uploaded to the crash server, they are interpreted
+// as they are when crash-reporter uploads them.
+// Similarly, when this proto is converted into a POST request to send to the
+// crash server, the names must not be altered.
+message CrashInfo {
+    // Name of executable that crashed (e.g. "chrome")
+    string exec_name = 1;
+    // Product name (e.g. "Chrome_ChromeOS" or "ChromeOS")
+    string prod = 2;
+    // Product version (e.g. "12345.0.0")
+    string ver = 3;
+    // Crash signature (may not be populated for all crashes)
+    string sig = 4;
+    // The name of the integration test that was running when this crash
+    // happened, if any.
+    string in_progress_integration_test = 5;
+    // The name of the collector (e.g. chrome_collector, arc_collector)
+    string collector = 6;
+    // Additional key-value pairs of metadata (e.g. "crash_loop_mode = true").
+    // These should be included in any POSTs to the crash server in a standard
+    // POST form, as seen in CreateCrashFormData.
+    // (despite the fact that this message is a subfield, it should be a flat
+    // structure in any POSTs).
+    repeated CrashMetadata fields = 7;
+}
+
+// Arbitrary text-only key-value pair corresponding to the key-value pairs in
+// crash report metadata files.
+message CrashMetadata {
+    // This value is a UTF8, human-readable, description of the data.
+    string key = 1;
+    // The value will be a human-readable string (e.g. "12345.0.0"), which must
+    // be valid UTF-8.
+    string text = 2;
+};
+
+// Arbitrary non-UTF8 key-value pair from crash report metadata files.
+message CrashBlob {
+    // This value is a UTF8, human-readable, description of the data.
+    // This should be passed as the 'name' to the crash server.
+    // For instance, upload_file_fake_payload
+    string key = 1;
+    // The value is a blob (e.g. a file from sysfs or a minidump), which need
+    // not be valid UTF-8, and may be large.
+    bytes blob = 2;
+    // The basename of the file. Must be specified as the filename in data
+    // uploaded to the crash server.
+    // e.g. foo_binary.20201027.102345.0.dmp
+    string filename = 3;
+};
+
+message ChromeOsImage {
+  oneof path_oneof {
+    // gs_path_prefix is the GS path to where the payloads are located. For
+    // example the format MAY be:
+    // gs://chromeos-image-archive/eve-release/R86-13380.0.0
+    string gs_path_prefix = 1;
+  }
+}
+
+message FakeOmaha {
+  // name is the resource name of the fake Omaha service.
+  // Format: fakeOmaha/{fake-omaha-id}
+  // The implementation MUST set it after creating the fake Omaha service.
+  // Clients SHOULD NOT set it.
+  string name = 1;
+  // dut is the resource name for the DUT.
+  // The DUT name is passed to the RTD when the RTD is started.
+  // It is not specified whether the name is the DUT hostname.
+  string dut = 2;
+
+  // target_build is the ChromeOS build that the fake Omaha service will serve
+  // payloads for.
+  ChromeOsImage target_build = 3;
+
+  message Payload {
+    enum Type {
+      TYPE_UNSPECIFIED = 0;
+      FULL = 1;
+      DELTA = 2;
+    }
+    // id is the id of the payload. It MAY be "ROOTFS" or a DLC id, etc.
+    string id = 1;
+    // type is the payload type, e.g. TYPE_FULL or TYPE_DELTA.
+    Type type = 2;
+  }
+  // payloads is the payloads can be served by the fake Omaha service.
+  repeated Payload payloads = 4;
+  // exposed_via_proxy indicates that the fake Omaha service is exposed to a
+  // DUT via a proxy server, instead of exposing to the DUT directly. So the
+  // service exposing won't be impacted by rebooting the DUT, disconnecting the
+  // DUT network, etc.
+  bool exposed_via_proxy = 5;
+  // critical_update instructs the fake Omaha created that the update is
+  // critical if set.
+  bool critical_update = 6;
+  // return_noupdate_starting indicates from which update check to start returning noupdate.
+  // It MUST be 0 or greater.
+  // When set to 0 (the default value), disables returning noupdate.
+  // If set to positive N, returns noupdate for the Nth check and for every
+  // check thereafter.
+  // For example, if set to 1, returns noupdate starting from the first check,
+  // i.e., always returns noupdate.
+  int32 return_noupdate_starting = 7;
+  // omaha_url is the current fake Omaha service URL which is reachable from
+  // the specified DUT.
+  // The URL can be used as input of the update engine client of the DUT.
+  // The implementation MUST set it after creating the fake Omaha service.
+  // Clients SHOULD NOT set it.
+  string omaha_url = 8;
+}
+
+message CreateFakeOmahaRequest {
+  // fake_omaha is the fake omaha service to be created.
+  FakeOmaha fake_omaha = 1;
+}
+
+message DeleteFakeOmahaRequest {
+  // The resource name of the fake Omaha service to stop.
+  // Format: fakeOmahaServices/{fake-omaha-id}
+  string name = 1;
+}
diff --git a/server/hosts/tls_client/autotest_common_pb2.py b/server/hosts/tls_client/autotest_common_pb2.py
new file mode 100644
index 0000000..6ad02bd
--- /dev/null
+++ b/server/hosts/tls_client/autotest_common_pb2.py
@@ -0,0 +1,1353 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: autotest_common.proto
+
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
+
+import common
+from autotest_lib.server.hosts.tls_client.dependencies.longrunning import operations_pb2 as dependencies_dot_longrunning_dot_operations__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='autotest_common.proto',
+  package='chromiumos.config.api.test.tls',
+  syntax='proto3',
+  serialized_options=b'Z1go.chromium.org/chromiumos/config/go/api/test/tls',
+  serialized_pb=b'\n\x15\x61utotest_common.proto\x12\x1e\x63hromiumos.config.api.test.tls\x1a\x1bgoogle/protobuf/empty.proto\x1a)dependencies/longrunning/operations.proto\"\xc3\x01\n\x15\x45xecDutCommandRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07\x63ommand\x18\x02 \x01(\t\x12\x0c\n\x04\x61rgs\x18\x03 \x03(\t\x12\r\n\x05stdin\x18\x04 \x01(\x0c\x12\x36\n\x06stdout\x18\x05 \x01(\x0e\x32&.chromiumos.config.api.test.tls.Output\x12\x36\n\x06stderr\x18\x06 \x01(\x0e\x32&.chromiumos.config.api.test.tls.Output\"\xe2\x01\n\x16\x45xecDutCommandResponse\x12R\n\texit_info\x18\x01 \x01(\x0b\x32?.chromiumos.config.api.test.tls.ExecDutCommandResponse.ExitInfo\x12\x0e\n\x06stdout\x18\x02 \x01(\x0c\x12\x0e\n\x06stderr\x18\x03 \x01(\x0c\x1aT\n\x08\x45xitInfo\x12\x0e\n\x06status\x18\x01 \x01(\x05\x12\x10\n\x08signaled\x18\x02 \x01(\x08\x12\x0f\n\x07started\x18\x03 \x01(\x08\x12\x15\n\rerror_message\x18\x04 \x01(\t\"\xb0\x02\n\x13ProvisionDutRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12P\n\x05image\x18\x02 \x01(\x0b\x32\x41.chromiumos.config.api.test.tls.ProvisionDutRequest.ChromeOSImage\x12N\n\tdlc_specs\x18\x03 \x03(\x0b\x32;.chromiumos.config.api.test.tls.ProvisionDutRequest.DLCSpec\x12\x19\n\x11preserve_stateful\x18\x04 \x01(\x08\x1a\x37\n\rChromeOSImage\x12\x18\n\x0egs_path_prefix\x18\x01 \x01(\tH\x00\x42\x0c\n\npath_oneof\x1a\x15\n\x07\x44LCSpec\x12\n\n\x02id\x18\x01 \x01(\t\"\xf9\x01\n\x14ProvisionDutResponse\"\xe0\x01\n\x06Reason\x12\x1a\n\x16REASON_INVALID_REQUEST\x10\x00\x12(\n$REASON_DUT_UNREACHABLE_PRE_PROVISION\x10\x01\x12#\n\x1fREASON_DOWNLOADING_IMAGE_FAILED\x10\x02\x12 \n\x1cREASON_PROVISIONING_TIMEDOUT\x10\x03\x12\x1e\n\x1aREASON_PROVISIONING_FAILED\x10\x04\x12)\n%REASON_DUT_UNREACHABLE_POST_PROVISION\x10\x05\"\x16\n\x14ProvisionDutMetadata\"\xb0\x01\n\x16ProvisionLacrosRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12Q\n\x05image\x18\x02 \x01(\x0b\x32\x42.chromiumos.config.api.test.tls.ProvisionLacrosRequest.LacrosImage\x1a\x35\n\x0bLacrosImage\x12\x18\n\x0egs_path_prefix\x18\x01 \x01(\tH\x00\x42\x0c\n\npath_oneof\"\xd1\x01\n\x17ProvisionLacrosResponse\"\xb5\x01\n\x06Reason\x12\x1a\n\x16REASON_INVALID_REQUEST\x10\x00\x12(\n$REASON_DUT_UNREACHABLE_PRE_PROVISION\x10\x01\x12#\n\x1fREASON_DOWNLOADING_IMAGE_FAILED\x10\x02\x12 \n\x1cREASON_PROVISIONING_TIMEDOUT\x10\x03\x12\x1e\n\x1aREASON_PROVISIONING_FAILED\x10\x04\"\x19\n\x17ProvisionLacrosMetadata\"6\n\x13\x46\x65tchCrashesRequest\x12\x0b\n\x03\x64ut\x18\x01 \x01(\t\x12\x12\n\nfetch_core\x18\x02 \x01(\x08\"\xb7\x01\n\x14\x46\x65tchCrashesResponse\x12\x10\n\x08\x63rash_id\x18\x01 \x01(\x03\x12:\n\x05\x63rash\x18\x02 \x01(\x0b\x32).chromiumos.config.api.test.tls.CrashInfoH\x00\x12\x39\n\x04\x62lob\x18\x03 \x01(\x0b\x32).chromiumos.config.api.test.tls.CrashBlobH\x00\x12\x0e\n\x04\x63ore\x18\x04 \x01(\x0cH\x00\x42\x06\n\x04\x64\x61ta\"\xbe\x01\n\tCrashInfo\x12\x11\n\texec_name\x18\x01 \x01(\t\x12\x0c\n\x04prod\x18\x02 \x01(\t\x12\x0b\n\x03ver\x18\x03 \x01(\t\x12\x0b\n\x03sig\x18\x04 \x01(\t\x12$\n\x1cin_progress_integration_test\x18\x05 \x01(\t\x12\x11\n\tcollector\x18\x06 \x01(\t\x12=\n\x06\x66ields\x18\x07 \x03(\x0b\x32-.chromiumos.config.api.test.tls.CrashMetadata\"*\n\rCrashMetadata\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0c\n\x04text\x18\x02 \x01(\t\"8\n\tCrashBlob\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0c\n\x04\x62lob\x18\x02 \x01(\x0c\x12\x10\n\x08\x66ilename\x18\x03 \x01(\t\"7\n\rChromeOsImage\x12\x18\n\x0egs_path_prefix\x18\x01 \x01(\tH\x00\x42\x0c\n\npath_oneof\"\xaa\x03\n\tFakeOmaha\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0b\n\x03\x64ut\x18\x02 \x01(\t\x12\x43\n\x0ctarget_build\x18\x03 \x01(\x0b\x32-.chromiumos.config.api.test.tls.ChromeOsImage\x12\x43\n\x08payloads\x18\x04 \x03(\x0b\x32\x31.chromiumos.config.api.test.tls.FakeOmaha.Payload\x12\x19\n\x11\x65xposed_via_proxy\x18\x05 \x01(\x08\x12\x17\n\x0f\x63ritical_update\x18\x06 \x01(\x08\x12 \n\x18return_noupdate_starting\x18\x07 \x01(\x05\x12\x11\n\tomaha_url\x18\x08 \x01(\t\x1a\x8e\x01\n\x07Payload\x12\n\n\x02id\x18\x01 \x01(\t\x12\x44\n\x04type\x18\x02 \x01(\x0e\x32\x36.chromiumos.config.api.test.tls.FakeOmaha.Payload.Type\"1\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\x08\n\x04\x46ULL\x10\x01\x12\t\n\x05\x44\x45LTA\x10\x02\"W\n\x16\x43reateFakeOmahaRequest\x12=\n\nfake_omaha\x18\x01 \x01(\x0b\x32).chromiumos.config.api.test.tls.FakeOmaha\"&\n\x16\x44\x65leteFakeOmahaRequest\x12\x0c\n\x04name\x18\x01 \x01(\t*,\n\x06Output\x12\x0f\n\x0bOUTPUT_PIPE\x10\x00\x12\x11\n\rOUTPUT_STDOUT\x10\x01\x32\x9a\x06\n\x06\x43ommon\x12\x81\x01\n\x0e\x45xecDutCommand\x12\x35.chromiumos.config.api.test.tls.ExecDutCommandRequest\x1a\x36.chromiumos.config.api.test.tls.ExecDutCommandResponse0\x01\x12\x93\x01\n\x0cProvisionDut\x12\x33.chromiumos.config.api.test.tls.ProvisionDutRequest\x1a\x1d.google.longrunning.Operation\"/\xca\x41,\n\x14ProvisionDutResponse\x12\x14ProvisionDutMetadata\x12\x9f\x01\n\x0fProvisionLacros\x12\x36.chromiumos.config.api.test.tls.ProvisionLacrosRequest\x1a\x1d.google.longrunning.Operation\"5\xca\x41\x32\n\x17ProvisionLacrosResponse\x12\x17ProvisionLacrosMetadata\x12{\n\x0c\x46\x65tchCrashes\x12\x33.chromiumos.config.api.test.tls.FetchCrashesRequest\x1a\x34.chromiumos.config.api.test.tls.FetchCrashesResponse0\x01\x12t\n\x0f\x43reateFakeOmaha\x12\x36.chromiumos.config.api.test.tls.CreateFakeOmahaRequest\x1a).chromiumos.config.api.test.tls.FakeOmaha\x12\x61\n\x0f\x44\x65leteFakeOmaha\x12\x36.chromiumos.config.api.test.tls.DeleteFakeOmahaRequest\x1a\x16.google.protobuf.EmptyB3Z1go.chromium.org/chromiumos/config/go/api/test/tlsb\x06proto3'
+  ,
+  dependencies=[google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,dependencies_dot_longrunning_dot_operations__pb2.DESCRIPTOR,])
+
+_OUTPUT = _descriptor.EnumDescriptor(
+  name='Output',
+  full_name='chromiumos.config.api.test.tls.Output',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='OUTPUT_PIPE', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='OUTPUT_STDOUT', index=1, number=1,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=2709,
+  serialized_end=2753,
+)
+_sym_db.RegisterEnumDescriptor(_OUTPUT)
+
+Output = enum_type_wrapper.EnumTypeWrapper(_OUTPUT)
+OUTPUT_PIPE = 0
+OUTPUT_STDOUT = 1
+
+
+_PROVISIONDUTRESPONSE_REASON = _descriptor.EnumDescriptor(
+  name='Reason',
+  full_name='chromiumos.config.api.test.tls.ProvisionDutResponse.Reason',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='REASON_INVALID_REQUEST', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='REASON_DUT_UNREACHABLE_PRE_PROVISION', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='REASON_DOWNLOADING_IMAGE_FAILED', index=2, number=2,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='REASON_PROVISIONING_TIMEDOUT', index=3, number=3,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='REASON_PROVISIONING_FAILED', index=4, number=4,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='REASON_DUT_UNREACHABLE_POST_PROVISION', index=5, number=5,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=889,
+  serialized_end=1113,
+)
+_sym_db.RegisterEnumDescriptor(_PROVISIONDUTRESPONSE_REASON)
+
+_PROVISIONLACROSRESPONSE_REASON = _descriptor.EnumDescriptor(
+  name='Reason',
+  full_name='chromiumos.config.api.test.tls.ProvisionLacrosResponse.Reason',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='REASON_INVALID_REQUEST', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='REASON_DUT_UNREACHABLE_PRE_PROVISION', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='REASON_DOWNLOADING_IMAGE_FAILED', index=2, number=2,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='REASON_PROVISIONING_TIMEDOUT', index=3, number=3,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='REASON_PROVISIONING_FAILED', index=4, number=4,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=889,
+  serialized_end=1070,
+)
+_sym_db.RegisterEnumDescriptor(_PROVISIONLACROSRESPONSE_REASON)
+
+_FAKEOMAHA_PAYLOAD_TYPE = _descriptor.EnumDescriptor(
+  name='Type',
+  full_name='chromiumos.config.api.test.tls.FakeOmaha.Payload.Type',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_UNSPECIFIED', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='FULL', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='DELTA', index=2, number=2,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=2529,
+  serialized_end=2578,
+)
+_sym_db.RegisterEnumDescriptor(_FAKEOMAHA_PAYLOAD_TYPE)
+
+
+_EXECDUTCOMMANDREQUEST = _descriptor.Descriptor(
+  name='ExecDutCommandRequest',
+  full_name='chromiumos.config.api.test.tls.ExecDutCommandRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='chromiumos.config.api.test.tls.ExecDutCommandRequest.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='command', full_name='chromiumos.config.api.test.tls.ExecDutCommandRequest.command', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='args', full_name='chromiumos.config.api.test.tls.ExecDutCommandRequest.args', index=2,
+      number=3, type=9, cpp_type=9, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='stdin', full_name='chromiumos.config.api.test.tls.ExecDutCommandRequest.stdin', index=3,
+      number=4, type=12, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"",
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='stdout', full_name='chromiumos.config.api.test.tls.ExecDutCommandRequest.stdout', index=4,
+      number=5, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='stderr', full_name='chromiumos.config.api.test.tls.ExecDutCommandRequest.stderr', index=5,
+      number=6, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=130,
+  serialized_end=325,
+)
+
+
+_EXECDUTCOMMANDRESPONSE_EXITINFO = _descriptor.Descriptor(
+  name='ExitInfo',
+  full_name='chromiumos.config.api.test.tls.ExecDutCommandResponse.ExitInfo',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='status', full_name='chromiumos.config.api.test.tls.ExecDutCommandResponse.ExitInfo.status', index=0,
+      number=1, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='signaled', full_name='chromiumos.config.api.test.tls.ExecDutCommandResponse.ExitInfo.signaled', index=1,
+      number=2, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='started', full_name='chromiumos.config.api.test.tls.ExecDutCommandResponse.ExitInfo.started', index=2,
+      number=3, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='error_message', full_name='chromiumos.config.api.test.tls.ExecDutCommandResponse.ExitInfo.error_message', index=3,
+      number=4, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=470,
+  serialized_end=554,
+)
+
+_EXECDUTCOMMANDRESPONSE = _descriptor.Descriptor(
+  name='ExecDutCommandResponse',
+  full_name='chromiumos.config.api.test.tls.ExecDutCommandResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='exit_info', full_name='chromiumos.config.api.test.tls.ExecDutCommandResponse.exit_info', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='stdout', full_name='chromiumos.config.api.test.tls.ExecDutCommandResponse.stdout', index=1,
+      number=2, type=12, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"",
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='stderr', full_name='chromiumos.config.api.test.tls.ExecDutCommandResponse.stderr', index=2,
+      number=3, type=12, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"",
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[_EXECDUTCOMMANDRESPONSE_EXITINFO, ],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=328,
+  serialized_end=554,
+)
+
+
+_PROVISIONDUTREQUEST_CHROMEOSIMAGE = _descriptor.Descriptor(
+  name='ChromeOSImage',
+  full_name='chromiumos.config.api.test.tls.ProvisionDutRequest.ChromeOSImage',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='gs_path_prefix', full_name='chromiumos.config.api.test.tls.ProvisionDutRequest.ChromeOSImage.gs_path_prefix', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+    _descriptor.OneofDescriptor(
+      name='path_oneof', full_name='chromiumos.config.api.test.tls.ProvisionDutRequest.ChromeOSImage.path_oneof',
+      index=0, containing_type=None, fields=[]),
+  ],
+  serialized_start=783,
+  serialized_end=838,
+)
+
+_PROVISIONDUTREQUEST_DLCSPEC = _descriptor.Descriptor(
+  name='DLCSpec',
+  full_name='chromiumos.config.api.test.tls.ProvisionDutRequest.DLCSpec',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='id', full_name='chromiumos.config.api.test.tls.ProvisionDutRequest.DLCSpec.id', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=840,
+  serialized_end=861,
+)
+
+_PROVISIONDUTREQUEST = _descriptor.Descriptor(
+  name='ProvisionDutRequest',
+  full_name='chromiumos.config.api.test.tls.ProvisionDutRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='chromiumos.config.api.test.tls.ProvisionDutRequest.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='image', full_name='chromiumos.config.api.test.tls.ProvisionDutRequest.image', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='dlc_specs', full_name='chromiumos.config.api.test.tls.ProvisionDutRequest.dlc_specs', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='preserve_stateful', full_name='chromiumos.config.api.test.tls.ProvisionDutRequest.preserve_stateful', index=3,
+      number=4, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[_PROVISIONDUTREQUEST_CHROMEOSIMAGE, _PROVISIONDUTREQUEST_DLCSPEC, ],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=557,
+  serialized_end=861,
+)
+
+
+_PROVISIONDUTRESPONSE = _descriptor.Descriptor(
+  name='ProvisionDutResponse',
+  full_name='chromiumos.config.api.test.tls.ProvisionDutResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+    _PROVISIONDUTRESPONSE_REASON,
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=864,
+  serialized_end=1113,
+)
+
+
+_PROVISIONDUTMETADATA = _descriptor.Descriptor(
+  name='ProvisionDutMetadata',
+  full_name='chromiumos.config.api.test.tls.ProvisionDutMetadata',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1115,
+  serialized_end=1137,
+)
+
+
+_PROVISIONLACROSREQUEST_LACROSIMAGE = _descriptor.Descriptor(
+  name='LacrosImage',
+  full_name='chromiumos.config.api.test.tls.ProvisionLacrosRequest.LacrosImage',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='gs_path_prefix', full_name='chromiumos.config.api.test.tls.ProvisionLacrosRequest.LacrosImage.gs_path_prefix', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+    _descriptor.OneofDescriptor(
+      name='path_oneof', full_name='chromiumos.config.api.test.tls.ProvisionLacrosRequest.LacrosImage.path_oneof',
+      index=0, containing_type=None, fields=[]),
+  ],
+  serialized_start=1263,
+  serialized_end=1316,
+)
+
+_PROVISIONLACROSREQUEST = _descriptor.Descriptor(
+  name='ProvisionLacrosRequest',
+  full_name='chromiumos.config.api.test.tls.ProvisionLacrosRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='chromiumos.config.api.test.tls.ProvisionLacrosRequest.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='image', full_name='chromiumos.config.api.test.tls.ProvisionLacrosRequest.image', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[_PROVISIONLACROSREQUEST_LACROSIMAGE, ],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1140,
+  serialized_end=1316,
+)
+
+
+_PROVISIONLACROSRESPONSE = _descriptor.Descriptor(
+  name='ProvisionLacrosResponse',
+  full_name='chromiumos.config.api.test.tls.ProvisionLacrosResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+    _PROVISIONLACROSRESPONSE_REASON,
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1319,
+  serialized_end=1528,
+)
+
+
+_PROVISIONLACROSMETADATA = _descriptor.Descriptor(
+  name='ProvisionLacrosMetadata',
+  full_name='chromiumos.config.api.test.tls.ProvisionLacrosMetadata',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1530,
+  serialized_end=1555,
+)
+
+
+_FETCHCRASHESREQUEST = _descriptor.Descriptor(
+  name='FetchCrashesRequest',
+  full_name='chromiumos.config.api.test.tls.FetchCrashesRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='dut', full_name='chromiumos.config.api.test.tls.FetchCrashesRequest.dut', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='fetch_core', full_name='chromiumos.config.api.test.tls.FetchCrashesRequest.fetch_core', index=1,
+      number=2, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1557,
+  serialized_end=1611,
+)
+
+
+_FETCHCRASHESRESPONSE = _descriptor.Descriptor(
+  name='FetchCrashesResponse',
+  full_name='chromiumos.config.api.test.tls.FetchCrashesResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='crash_id', full_name='chromiumos.config.api.test.tls.FetchCrashesResponse.crash_id', index=0,
+      number=1, type=3, cpp_type=2, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='crash', full_name='chromiumos.config.api.test.tls.FetchCrashesResponse.crash', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='blob', full_name='chromiumos.config.api.test.tls.FetchCrashesResponse.blob', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='core', full_name='chromiumos.config.api.test.tls.FetchCrashesResponse.core', index=3,
+      number=4, type=12, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"",
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+    _descriptor.OneofDescriptor(
+      name='data', full_name='chromiumos.config.api.test.tls.FetchCrashesResponse.data',
+      index=0, containing_type=None, fields=[]),
+  ],
+  serialized_start=1614,
+  serialized_end=1797,
+)
+
+
+_CRASHINFO = _descriptor.Descriptor(
+  name='CrashInfo',
+  full_name='chromiumos.config.api.test.tls.CrashInfo',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='exec_name', full_name='chromiumos.config.api.test.tls.CrashInfo.exec_name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='prod', full_name='chromiumos.config.api.test.tls.CrashInfo.prod', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='ver', full_name='chromiumos.config.api.test.tls.CrashInfo.ver', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='sig', full_name='chromiumos.config.api.test.tls.CrashInfo.sig', index=3,
+      number=4, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='in_progress_integration_test', full_name='chromiumos.config.api.test.tls.CrashInfo.in_progress_integration_test', index=4,
+      number=5, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='collector', full_name='chromiumos.config.api.test.tls.CrashInfo.collector', index=5,
+      number=6, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='fields', full_name='chromiumos.config.api.test.tls.CrashInfo.fields', index=6,
+      number=7, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1800,
+  serialized_end=1990,
+)
+
+
+_CRASHMETADATA = _descriptor.Descriptor(
+  name='CrashMetadata',
+  full_name='chromiumos.config.api.test.tls.CrashMetadata',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='key', full_name='chromiumos.config.api.test.tls.CrashMetadata.key', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='text', full_name='chromiumos.config.api.test.tls.CrashMetadata.text', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1992,
+  serialized_end=2034,
+)
+
+
+_CRASHBLOB = _descriptor.Descriptor(
+  name='CrashBlob',
+  full_name='chromiumos.config.api.test.tls.CrashBlob',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='key', full_name='chromiumos.config.api.test.tls.CrashBlob.key', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='blob', full_name='chromiumos.config.api.test.tls.CrashBlob.blob', index=1,
+      number=2, type=12, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"",
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='filename', full_name='chromiumos.config.api.test.tls.CrashBlob.filename', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2036,
+  serialized_end=2092,
+)
+
+
+_CHROMEOSIMAGE = _descriptor.Descriptor(
+  name='ChromeOsImage',
+  full_name='chromiumos.config.api.test.tls.ChromeOsImage',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='gs_path_prefix', full_name='chromiumos.config.api.test.tls.ChromeOsImage.gs_path_prefix', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+    _descriptor.OneofDescriptor(
+      name='path_oneof', full_name='chromiumos.config.api.test.tls.ChromeOsImage.path_oneof',
+      index=0, containing_type=None, fields=[]),
+  ],
+  serialized_start=2094,
+  serialized_end=2149,
+)
+
+
+_FAKEOMAHA_PAYLOAD = _descriptor.Descriptor(
+  name='Payload',
+  full_name='chromiumos.config.api.test.tls.FakeOmaha.Payload',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='id', full_name='chromiumos.config.api.test.tls.FakeOmaha.Payload.id', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='type', full_name='chromiumos.config.api.test.tls.FakeOmaha.Payload.type', index=1,
+      number=2, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+    _FAKEOMAHA_PAYLOAD_TYPE,
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2436,
+  serialized_end=2578,
+)
+
+_FAKEOMAHA = _descriptor.Descriptor(
+  name='FakeOmaha',
+  full_name='chromiumos.config.api.test.tls.FakeOmaha',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='chromiumos.config.api.test.tls.FakeOmaha.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='dut', full_name='chromiumos.config.api.test.tls.FakeOmaha.dut', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='target_build', full_name='chromiumos.config.api.test.tls.FakeOmaha.target_build', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='payloads', full_name='chromiumos.config.api.test.tls.FakeOmaha.payloads', index=3,
+      number=4, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='exposed_via_proxy', full_name='chromiumos.config.api.test.tls.FakeOmaha.exposed_via_proxy', index=4,
+      number=5, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='critical_update', full_name='chromiumos.config.api.test.tls.FakeOmaha.critical_update', index=5,
+      number=6, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='return_noupdate_starting', full_name='chromiumos.config.api.test.tls.FakeOmaha.return_noupdate_starting', index=6,
+      number=7, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='omaha_url', full_name='chromiumos.config.api.test.tls.FakeOmaha.omaha_url', index=7,
+      number=8, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[_FAKEOMAHA_PAYLOAD, ],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2152,
+  serialized_end=2578,
+)
+
+
+_CREATEFAKEOMAHAREQUEST = _descriptor.Descriptor(
+  name='CreateFakeOmahaRequest',
+  full_name='chromiumos.config.api.test.tls.CreateFakeOmahaRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='fake_omaha', full_name='chromiumos.config.api.test.tls.CreateFakeOmahaRequest.fake_omaha', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2580,
+  serialized_end=2667,
+)
+
+
+_DELETEFAKEOMAHAREQUEST = _descriptor.Descriptor(
+  name='DeleteFakeOmahaRequest',
+  full_name='chromiumos.config.api.test.tls.DeleteFakeOmahaRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='chromiumos.config.api.test.tls.DeleteFakeOmahaRequest.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2669,
+  serialized_end=2707,
+)
+
+_EXECDUTCOMMANDREQUEST.fields_by_name['stdout'].enum_type = _OUTPUT
+_EXECDUTCOMMANDREQUEST.fields_by_name['stderr'].enum_type = _OUTPUT
+_EXECDUTCOMMANDRESPONSE_EXITINFO.containing_type = _EXECDUTCOMMANDRESPONSE
+_EXECDUTCOMMANDRESPONSE.fields_by_name['exit_info'].message_type = _EXECDUTCOMMANDRESPONSE_EXITINFO
+_PROVISIONDUTREQUEST_CHROMEOSIMAGE.containing_type = _PROVISIONDUTREQUEST
+_PROVISIONDUTREQUEST_CHROMEOSIMAGE.oneofs_by_name['path_oneof'].fields.append(
+  _PROVISIONDUTREQUEST_CHROMEOSIMAGE.fields_by_name['gs_path_prefix'])
+_PROVISIONDUTREQUEST_CHROMEOSIMAGE.fields_by_name['gs_path_prefix'].containing_oneof = _PROVISIONDUTREQUEST_CHROMEOSIMAGE.oneofs_by_name['path_oneof']
+_PROVISIONDUTREQUEST_DLCSPEC.containing_type = _PROVISIONDUTREQUEST
+_PROVISIONDUTREQUEST.fields_by_name['image'].message_type = _PROVISIONDUTREQUEST_CHROMEOSIMAGE
+_PROVISIONDUTREQUEST.fields_by_name['dlc_specs'].message_type = _PROVISIONDUTREQUEST_DLCSPEC
+_PROVISIONDUTRESPONSE_REASON.containing_type = _PROVISIONDUTRESPONSE
+_PROVISIONLACROSREQUEST_LACROSIMAGE.containing_type = _PROVISIONLACROSREQUEST
+_PROVISIONLACROSREQUEST_LACROSIMAGE.oneofs_by_name['path_oneof'].fields.append(
+  _PROVISIONLACROSREQUEST_LACROSIMAGE.fields_by_name['gs_path_prefix'])
+_PROVISIONLACROSREQUEST_LACROSIMAGE.fields_by_name['gs_path_prefix'].containing_oneof = _PROVISIONLACROSREQUEST_LACROSIMAGE.oneofs_by_name['path_oneof']
+_PROVISIONLACROSREQUEST.fields_by_name['image'].message_type = _PROVISIONLACROSREQUEST_LACROSIMAGE
+_PROVISIONLACROSRESPONSE_REASON.containing_type = _PROVISIONLACROSRESPONSE
+_FETCHCRASHESRESPONSE.fields_by_name['crash'].message_type = _CRASHINFO
+_FETCHCRASHESRESPONSE.fields_by_name['blob'].message_type = _CRASHBLOB
+_FETCHCRASHESRESPONSE.oneofs_by_name['data'].fields.append(
+  _FETCHCRASHESRESPONSE.fields_by_name['crash'])
+_FETCHCRASHESRESPONSE.fields_by_name['crash'].containing_oneof = _FETCHCRASHESRESPONSE.oneofs_by_name['data']
+_FETCHCRASHESRESPONSE.oneofs_by_name['data'].fields.append(
+  _FETCHCRASHESRESPONSE.fields_by_name['blob'])
+_FETCHCRASHESRESPONSE.fields_by_name['blob'].containing_oneof = _FETCHCRASHESRESPONSE.oneofs_by_name['data']
+_FETCHCRASHESRESPONSE.oneofs_by_name['data'].fields.append(
+  _FETCHCRASHESRESPONSE.fields_by_name['core'])
+_FETCHCRASHESRESPONSE.fields_by_name['core'].containing_oneof = _FETCHCRASHESRESPONSE.oneofs_by_name['data']
+_CRASHINFO.fields_by_name['fields'].message_type = _CRASHMETADATA
+_CHROMEOSIMAGE.oneofs_by_name['path_oneof'].fields.append(
+  _CHROMEOSIMAGE.fields_by_name['gs_path_prefix'])
+_CHROMEOSIMAGE.fields_by_name['gs_path_prefix'].containing_oneof = _CHROMEOSIMAGE.oneofs_by_name['path_oneof']
+_FAKEOMAHA_PAYLOAD.fields_by_name['type'].enum_type = _FAKEOMAHA_PAYLOAD_TYPE
+_FAKEOMAHA_PAYLOAD.containing_type = _FAKEOMAHA
+_FAKEOMAHA_PAYLOAD_TYPE.containing_type = _FAKEOMAHA_PAYLOAD
+_FAKEOMAHA.fields_by_name['target_build'].message_type = _CHROMEOSIMAGE
+_FAKEOMAHA.fields_by_name['payloads'].message_type = _FAKEOMAHA_PAYLOAD
+_CREATEFAKEOMAHAREQUEST.fields_by_name['fake_omaha'].message_type = _FAKEOMAHA
+DESCRIPTOR.message_types_by_name['ExecDutCommandRequest'] = _EXECDUTCOMMANDREQUEST
+DESCRIPTOR.message_types_by_name['ExecDutCommandResponse'] = _EXECDUTCOMMANDRESPONSE
+DESCRIPTOR.message_types_by_name['ProvisionDutRequest'] = _PROVISIONDUTREQUEST
+DESCRIPTOR.message_types_by_name['ProvisionDutResponse'] = _PROVISIONDUTRESPONSE
+DESCRIPTOR.message_types_by_name['ProvisionDutMetadata'] = _PROVISIONDUTMETADATA
+DESCRIPTOR.message_types_by_name['ProvisionLacrosRequest'] = _PROVISIONLACROSREQUEST
+DESCRIPTOR.message_types_by_name['ProvisionLacrosResponse'] = _PROVISIONLACROSRESPONSE
+DESCRIPTOR.message_types_by_name['ProvisionLacrosMetadata'] = _PROVISIONLACROSMETADATA
+DESCRIPTOR.message_types_by_name['FetchCrashesRequest'] = _FETCHCRASHESREQUEST
+DESCRIPTOR.message_types_by_name['FetchCrashesResponse'] = _FETCHCRASHESRESPONSE
+DESCRIPTOR.message_types_by_name['CrashInfo'] = _CRASHINFO
+DESCRIPTOR.message_types_by_name['CrashMetadata'] = _CRASHMETADATA
+DESCRIPTOR.message_types_by_name['CrashBlob'] = _CRASHBLOB
+DESCRIPTOR.message_types_by_name['ChromeOsImage'] = _CHROMEOSIMAGE
+DESCRIPTOR.message_types_by_name['FakeOmaha'] = _FAKEOMAHA
+DESCRIPTOR.message_types_by_name['CreateFakeOmahaRequest'] = _CREATEFAKEOMAHAREQUEST
+DESCRIPTOR.message_types_by_name['DeleteFakeOmahaRequest'] = _DELETEFAKEOMAHAREQUEST
+DESCRIPTOR.enum_types_by_name['Output'] = _OUTPUT
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+ExecDutCommandRequest = _reflection.GeneratedProtocolMessageType('ExecDutCommandRequest', (_message.Message,), {
+  'DESCRIPTOR' : _EXECDUTCOMMANDREQUEST,
+  '__module__' : 'autotest_common_pb2'
+  # @@protoc_insertion_point(class_scope:chromiumos.config.api.test.tls.ExecDutCommandRequest)
+  })
+_sym_db.RegisterMessage(ExecDutCommandRequest)
+
+ExecDutCommandResponse = _reflection.GeneratedProtocolMessageType('ExecDutCommandResponse', (_message.Message,), {
+
+  'ExitInfo' : _reflection.GeneratedProtocolMessageType('ExitInfo', (_message.Message,), {
+    'DESCRIPTOR' : _EXECDUTCOMMANDRESPONSE_EXITINFO,
+    '__module__' : 'autotest_common_pb2'
+    # @@protoc_insertion_point(class_scope:chromiumos.config.api.test.tls.ExecDutCommandResponse.ExitInfo)
+    })
+  ,
+  'DESCRIPTOR' : _EXECDUTCOMMANDRESPONSE,
+  '__module__' : 'autotest_common_pb2'
+  # @@protoc_insertion_point(class_scope:chromiumos.config.api.test.tls.ExecDutCommandResponse)
+  })
+_sym_db.RegisterMessage(ExecDutCommandResponse)
+_sym_db.RegisterMessage(ExecDutCommandResponse.ExitInfo)
+
+ProvisionDutRequest = _reflection.GeneratedProtocolMessageType('ProvisionDutRequest', (_message.Message,), {
+
+  'ChromeOSImage' : _reflection.GeneratedProtocolMessageType('ChromeOSImage', (_message.Message,), {
+    'DESCRIPTOR' : _PROVISIONDUTREQUEST_CHROMEOSIMAGE,
+    '__module__' : 'autotest_common_pb2'
+    # @@protoc_insertion_point(class_scope:chromiumos.config.api.test.tls.ProvisionDutRequest.ChromeOSImage)
+    })
+  ,
+
+  'DLCSpec' : _reflection.GeneratedProtocolMessageType('DLCSpec', (_message.Message,), {
+    'DESCRIPTOR' : _PROVISIONDUTREQUEST_DLCSPEC,
+    '__module__' : 'autotest_common_pb2'
+    # @@protoc_insertion_point(class_scope:chromiumos.config.api.test.tls.ProvisionDutRequest.DLCSpec)
+    })
+  ,
+  'DESCRIPTOR' : _PROVISIONDUTREQUEST,
+  '__module__' : 'autotest_common_pb2'
+  # @@protoc_insertion_point(class_scope:chromiumos.config.api.test.tls.ProvisionDutRequest)
+  })
+_sym_db.RegisterMessage(ProvisionDutRequest)
+_sym_db.RegisterMessage(ProvisionDutRequest.ChromeOSImage)
+_sym_db.RegisterMessage(ProvisionDutRequest.DLCSpec)
+
+ProvisionDutResponse = _reflection.GeneratedProtocolMessageType('ProvisionDutResponse', (_message.Message,), {
+  'DESCRIPTOR' : _PROVISIONDUTRESPONSE,
+  '__module__' : 'autotest_common_pb2'
+  # @@protoc_insertion_point(class_scope:chromiumos.config.api.test.tls.ProvisionDutResponse)
+  })
+_sym_db.RegisterMessage(ProvisionDutResponse)
+
+ProvisionDutMetadata = _reflection.GeneratedProtocolMessageType('ProvisionDutMetadata', (_message.Message,), {
+  'DESCRIPTOR' : _PROVISIONDUTMETADATA,
+  '__module__' : 'autotest_common_pb2'
+  # @@protoc_insertion_point(class_scope:chromiumos.config.api.test.tls.ProvisionDutMetadata)
+  })
+_sym_db.RegisterMessage(ProvisionDutMetadata)
+
+ProvisionLacrosRequest = _reflection.GeneratedProtocolMessageType('ProvisionLacrosRequest', (_message.Message,), {
+
+  'LacrosImage' : _reflection.GeneratedProtocolMessageType('LacrosImage', (_message.Message,), {
+    'DESCRIPTOR' : _PROVISIONLACROSREQUEST_LACROSIMAGE,
+    '__module__' : 'autotest_common_pb2'
+    # @@protoc_insertion_point(class_scope:chromiumos.config.api.test.tls.ProvisionLacrosRequest.LacrosImage)
+    })
+  ,
+  'DESCRIPTOR' : _PROVISIONLACROSREQUEST,
+  '__module__' : 'autotest_common_pb2'
+  # @@protoc_insertion_point(class_scope:chromiumos.config.api.test.tls.ProvisionLacrosRequest)
+  })
+_sym_db.RegisterMessage(ProvisionLacrosRequest)
+_sym_db.RegisterMessage(ProvisionLacrosRequest.LacrosImage)
+
+ProvisionLacrosResponse = _reflection.GeneratedProtocolMessageType('ProvisionLacrosResponse', (_message.Message,), {
+  'DESCRIPTOR' : _PROVISIONLACROSRESPONSE,
+  '__module__' : 'autotest_common_pb2'
+  # @@protoc_insertion_point(class_scope:chromiumos.config.api.test.tls.ProvisionLacrosResponse)
+  })
+_sym_db.RegisterMessage(ProvisionLacrosResponse)
+
+ProvisionLacrosMetadata = _reflection.GeneratedProtocolMessageType('ProvisionLacrosMetadata', (_message.Message,), {
+  'DESCRIPTOR' : _PROVISIONLACROSMETADATA,
+  '__module__' : 'autotest_common_pb2'
+  # @@protoc_insertion_point(class_scope:chromiumos.config.api.test.tls.ProvisionLacrosMetadata)
+  })
+_sym_db.RegisterMessage(ProvisionLacrosMetadata)
+
+FetchCrashesRequest = _reflection.GeneratedProtocolMessageType('FetchCrashesRequest', (_message.Message,), {
+  'DESCRIPTOR' : _FETCHCRASHESREQUEST,
+  '__module__' : 'autotest_common_pb2'
+  # @@protoc_insertion_point(class_scope:chromiumos.config.api.test.tls.FetchCrashesRequest)
+  })
+_sym_db.RegisterMessage(FetchCrashesRequest)
+
+FetchCrashesResponse = _reflection.GeneratedProtocolMessageType('FetchCrashesResponse', (_message.Message,), {
+  'DESCRIPTOR' : _FETCHCRASHESRESPONSE,
+  '__module__' : 'autotest_common_pb2'
+  # @@protoc_insertion_point(class_scope:chromiumos.config.api.test.tls.FetchCrashesResponse)
+  })
+_sym_db.RegisterMessage(FetchCrashesResponse)
+
+CrashInfo = _reflection.GeneratedProtocolMessageType('CrashInfo', (_message.Message,), {
+  'DESCRIPTOR' : _CRASHINFO,
+  '__module__' : 'autotest_common_pb2'
+  # @@protoc_insertion_point(class_scope:chromiumos.config.api.test.tls.CrashInfo)
+  })
+_sym_db.RegisterMessage(CrashInfo)
+
+CrashMetadata = _reflection.GeneratedProtocolMessageType('CrashMetadata', (_message.Message,), {
+  'DESCRIPTOR' : _CRASHMETADATA,
+  '__module__' : 'autotest_common_pb2'
+  # @@protoc_insertion_point(class_scope:chromiumos.config.api.test.tls.CrashMetadata)
+  })
+_sym_db.RegisterMessage(CrashMetadata)
+
+CrashBlob = _reflection.GeneratedProtocolMessageType('CrashBlob', (_message.Message,), {
+  'DESCRIPTOR' : _CRASHBLOB,
+  '__module__' : 'autotest_common_pb2'
+  # @@protoc_insertion_point(class_scope:chromiumos.config.api.test.tls.CrashBlob)
+  })
+_sym_db.RegisterMessage(CrashBlob)
+
+ChromeOsImage = _reflection.GeneratedProtocolMessageType('ChromeOsImage', (_message.Message,), {
+  'DESCRIPTOR' : _CHROMEOSIMAGE,
+  '__module__' : 'autotest_common_pb2'
+  # @@protoc_insertion_point(class_scope:chromiumos.config.api.test.tls.ChromeOsImage)
+  })
+_sym_db.RegisterMessage(ChromeOsImage)
+
+FakeOmaha = _reflection.GeneratedProtocolMessageType('FakeOmaha', (_message.Message,), {
+
+  'Payload' : _reflection.GeneratedProtocolMessageType('Payload', (_message.Message,), {
+    'DESCRIPTOR' : _FAKEOMAHA_PAYLOAD,
+    '__module__' : 'autotest_common_pb2'
+    # @@protoc_insertion_point(class_scope:chromiumos.config.api.test.tls.FakeOmaha.Payload)
+    })
+  ,
+  'DESCRIPTOR' : _FAKEOMAHA,
+  '__module__' : 'autotest_common_pb2'
+  # @@protoc_insertion_point(class_scope:chromiumos.config.api.test.tls.FakeOmaha)
+  })
+_sym_db.RegisterMessage(FakeOmaha)
+_sym_db.RegisterMessage(FakeOmaha.Payload)
+
+CreateFakeOmahaRequest = _reflection.GeneratedProtocolMessageType('CreateFakeOmahaRequest', (_message.Message,), {
+  'DESCRIPTOR' : _CREATEFAKEOMAHAREQUEST,
+  '__module__' : 'autotest_common_pb2'
+  # @@protoc_insertion_point(class_scope:chromiumos.config.api.test.tls.CreateFakeOmahaRequest)
+  })
+_sym_db.RegisterMessage(CreateFakeOmahaRequest)
+
+DeleteFakeOmahaRequest = _reflection.GeneratedProtocolMessageType('DeleteFakeOmahaRequest', (_message.Message,), {
+  'DESCRIPTOR' : _DELETEFAKEOMAHAREQUEST,
+  '__module__' : 'autotest_common_pb2'
+  # @@protoc_insertion_point(class_scope:chromiumos.config.api.test.tls.DeleteFakeOmahaRequest)
+  })
+_sym_db.RegisterMessage(DeleteFakeOmahaRequest)
+
+
+DESCRIPTOR._options = None
+
+_COMMON = _descriptor.ServiceDescriptor(
+  name='Common',
+  full_name='chromiumos.config.api.test.tls.Common',
+  file=DESCRIPTOR,
+  index=0,
+  serialized_options=None,
+  serialized_start=2756,
+  serialized_end=3550,
+  methods=[
+  _descriptor.MethodDescriptor(
+    name='ExecDutCommand',
+    full_name='chromiumos.config.api.test.tls.Common.ExecDutCommand',
+    index=0,
+    containing_service=None,
+    input_type=_EXECDUTCOMMANDREQUEST,
+    output_type=_EXECDUTCOMMANDRESPONSE,
+    serialized_options=None,
+  ),
+  _descriptor.MethodDescriptor(
+    name='ProvisionDut',
+    full_name='chromiumos.config.api.test.tls.Common.ProvisionDut',
+    index=1,
+    containing_service=None,
+    input_type=_PROVISIONDUTREQUEST,
+    output_type=dependencies_dot_longrunning_dot_operations__pb2._OPERATION,
+    serialized_options=b'\312A,\n\024ProvisionDutResponse\022\024ProvisionDutMetadata',
+  ),
+  _descriptor.MethodDescriptor(
+    name='ProvisionLacros',
+    full_name='chromiumos.config.api.test.tls.Common.ProvisionLacros',
+    index=2,
+    containing_service=None,
+    input_type=_PROVISIONLACROSREQUEST,
+    output_type=dependencies_dot_longrunning_dot_operations__pb2._OPERATION,
+    serialized_options=b'\312A2\n\027ProvisionLacrosResponse\022\027ProvisionLacrosMetadata',
+  ),
+  _descriptor.MethodDescriptor(
+    name='FetchCrashes',
+    full_name='chromiumos.config.api.test.tls.Common.FetchCrashes',
+    index=3,
+    containing_service=None,
+    input_type=_FETCHCRASHESREQUEST,
+    output_type=_FETCHCRASHESRESPONSE,
+    serialized_options=None,
+  ),
+  _descriptor.MethodDescriptor(
+    name='CreateFakeOmaha',
+    full_name='chromiumos.config.api.test.tls.Common.CreateFakeOmaha',
+    index=4,
+    containing_service=None,
+    input_type=_CREATEFAKEOMAHAREQUEST,
+    output_type=_FAKEOMAHA,
+    serialized_options=None,
+  ),
+  _descriptor.MethodDescriptor(
+    name='DeleteFakeOmaha',
+    full_name='chromiumos.config.api.test.tls.Common.DeleteFakeOmaha',
+    index=5,
+    containing_service=None,
+    input_type=_DELETEFAKEOMAHAREQUEST,
+    output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
+    serialized_options=None,
+  ),
+])
+_sym_db.RegisterServiceDescriptor(_COMMON)
+
+DESCRIPTOR.services_by_name['Common'] = _COMMON
+
+# @@protoc_insertion_point(module_scope)
diff --git a/server/hosts/tls_client/autotest_common_pb2_grpc.py b/server/hosts/tls_client/autotest_common_pb2_grpc.py
new file mode 100644
index 0000000..5bac36a
--- /dev/null
+++ b/server/hosts/tls_client/autotest_common_pb2_grpc.py
@@ -0,0 +1,191 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+import grpc
+from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
+
+import common
+from autotest_lib.server.hosts.tls_client import autotest_common_pb2 as autotest__common__pb2
+from autotest_lib.server.hosts.tls_client.dependencies.longrunning import operations_pb2 as dependencies_dot_longrunning_dot_operations__pb2
+
+class CommonStub(object):
+    """Common lab services implemented on top of the wiring APIs.
+
+  The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL
+  NOT", "SHOULD", "SHOULD NOT", "RECOMMENDED",  "MAY", and
+  "OPTIONAL" in this document are to be interpreted as described in
+  RFC 2119.
+
+  All clients SHOULD pass the gRPC metadata key request_trace_id with one
+  value. The value is a unique string that is associated with the method call
+  in metrics. Clients that do not pass request_trace_id MAY be rejected so that
+  they can be fixed.
+  """
+
+    def __init__(self, channel):
+        """Constructor.
+
+    Args:
+      channel: A grpc.Channel.
+    """
+        self.ExecDutCommand = channel.unary_stream(
+            '/chromiumos.config.api.test.tls.Common/ExecDutCommand',
+            request_serializer=autotest__common__pb2.ExecDutCommandRequest.SerializeToString,
+            response_deserializer=autotest__common__pb2.ExecDutCommandResponse.FromString,
+            )
+        self.ProvisionDut = channel.unary_unary(
+            '/chromiumos.config.api.test.tls.Common/ProvisionDut',
+            request_serializer=autotest__common__pb2.ProvisionDutRequest.SerializeToString,
+            response_deserializer=dependencies_dot_longrunning_dot_operations__pb2.Operation.FromString,
+            )
+        self.ProvisionLacros = channel.unary_unary(
+            '/chromiumos.config.api.test.tls.Common/ProvisionLacros',
+            request_serializer=autotest__common__pb2.ProvisionLacrosRequest.SerializeToString,
+            response_deserializer=dependencies_dot_longrunning_dot_operations__pb2.Operation.FromString,
+            )
+        self.FetchCrashes = channel.unary_stream(
+            '/chromiumos.config.api.test.tls.Common/FetchCrashes',
+            request_serializer=autotest__common__pb2.FetchCrashesRequest.SerializeToString,
+            response_deserializer=autotest__common__pb2.FetchCrashesResponse.FromString,
+            )
+        self.CreateFakeOmaha = channel.unary_unary(
+            '/chromiumos.config.api.test.tls.Common/CreateFakeOmaha',
+            request_serializer=autotest__common__pb2.CreateFakeOmahaRequest.SerializeToString,
+            response_deserializer=autotest__common__pb2.FakeOmaha.FromString,
+            )
+        self.DeleteFakeOmaha = channel.unary_unary(
+            '/chromiumos.config.api.test.tls.Common/DeleteFakeOmaha',
+            request_serializer=autotest__common__pb2.DeleteFakeOmahaRequest.SerializeToString,
+            response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+            )
+
+
+class CommonServicer(object):
+    """Common lab services implemented on top of the wiring APIs.
+
+  The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL
+  NOT", "SHOULD", "SHOULD NOT", "RECOMMENDED",  "MAY", and
+  "OPTIONAL" in this document are to be interpreted as described in
+  RFC 2119.
+
+  All clients SHOULD pass the gRPC metadata key request_trace_id with one
+  value. The value is a unique string that is associated with the method call
+  in metrics. Clients that do not pass request_trace_id MAY be rejected so that
+  they can be fixed.
+  """
+
+    def ExecDutCommand(self, request, context):
+        """ExecDutCommand runs a command on a DUT.
+
+    The working directory is /.
+    A tty is not spawned for the command.
+    The user and group is root.
+    All signals have their default dispositions and are not masked.
+    The umask is set to 0.
+
+    The environment contains:
+
+    TERM=dumb
+    PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/opt/bin
+    LANG=en_US.UTF-8
+    USER=root
+    HOME=/root
+
+    The environment MAY also contain SSH client variables.
+    The environment SHALL NOT contain variables not mentioned above.
+
+    If the stream is interrupted, the implementation MAY attempt to
+    stop the command by sending SIGINT, SIGHUP, SIGTERM, or SIGKILL.
+    """
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def ProvisionDut(self, request, context):
+        """ProvisionDut installs a specified version of ChromeOS on the DUT, along
+    with any specified DLCs.
+
+    If the DUT is already on the specified version of ChromeOS, the OS will
+    not be provisioned.
+
+    If the DUT already has the specified list of DLCs, only the missing DLCs
+    will be provisioned.
+    """
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def ProvisionLacros(self, request, context):
+        """ProvisionLacros installs a specified version of Lacros on the DUT.
+
+    If the DUT already has the specified version of Lacros, Lacros will not be
+    provisioned.
+    """
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def FetchCrashes(self, request, context):
+        """FetchCrashes gets a stream of all crash reports currently on the DUT.
+
+    The stream returned may split up a crash over multiple
+    `FetchCrashesResponse` protos. See the definition of that proto for
+    details.
+
+    This call is read-only: it doesn't delete the crashes that it reads.
+    """
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def CreateFakeOmaha(self, request, context):
+        """CreateFakeOmaha starts a fake Omaha service on TLS and exposes the
+    listened port to the DUT.
+    """
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def DeleteFakeOmaha(self, request, context):
+        """DeleteFakeOmaha deletes the specified fake Omaha resource created by
+    CreateFakeOmaha.
+    """
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+
+def add_CommonServicer_to_server(servicer, server):
+    rpc_method_handlers = {
+        'ExecDutCommand': grpc.unary_stream_rpc_method_handler(
+            servicer.ExecDutCommand,
+            request_deserializer=autotest__common__pb2.ExecDutCommandRequest.FromString,
+            response_serializer=autotest__common__pb2.ExecDutCommandResponse.SerializeToString,
+        ),
+        'ProvisionDut': grpc.unary_unary_rpc_method_handler(
+            servicer.ProvisionDut,
+            request_deserializer=autotest__common__pb2.ProvisionDutRequest.FromString,
+            response_serializer=dependencies_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
+        ),
+        'ProvisionLacros': grpc.unary_unary_rpc_method_handler(
+            servicer.ProvisionLacros,
+            request_deserializer=autotest__common__pb2.ProvisionLacrosRequest.FromString,
+            response_serializer=dependencies_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
+        ),
+        'FetchCrashes': grpc.unary_stream_rpc_method_handler(
+            servicer.FetchCrashes,
+            request_deserializer=autotest__common__pb2.FetchCrashesRequest.FromString,
+            response_serializer=autotest__common__pb2.FetchCrashesResponse.SerializeToString,
+        ),
+        'CreateFakeOmaha': grpc.unary_unary_rpc_method_handler(
+            servicer.CreateFakeOmaha,
+            request_deserializer=autotest__common__pb2.CreateFakeOmahaRequest.FromString,
+            response_serializer=autotest__common__pb2.FakeOmaha.SerializeToString,
+        ),
+        'DeleteFakeOmaha': grpc.unary_unary_rpc_method_handler(
+            servicer.DeleteFakeOmaha,
+            request_deserializer=autotest__common__pb2.DeleteFakeOmahaRequest.FromString,
+            response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
+        ),
+    }
+    generic_handler = grpc.method_handlers_generic_handler(
+        'chromiumos.config.api.test.tls.Common', rpc_method_handlers)
+    server.add_generic_rpc_handlers((generic_handler,))
diff --git a/server/hosts/tls_client/buildprotos.py b/server/hosts/tls_client/buildprotos.py
new file mode 100644
index 0000000..3f8bd61
--- /dev/null
+++ b/server/hosts/tls_client/buildprotos.py
@@ -0,0 +1,88 @@
+"""Deletes the existing bindings, then rebuild using the source .proto file."""
+
+import os
+from shutil import copyfile
+
+UP = '../'
+PROTO_PATH = 'src/config/proto/chromiumos/config/api/test/tls/'
+PROTO_NAME = 'commontls.proto'
+DEST_PROTO_NAME = 'autotest_common.proto'
+DEP_PROTO_RELATIVE_PATH = 'dependencies/longrunning/'
+DEP_PROTO_NAME = 'operations.proto'
+
+BUILD_CMD = (
+        'python -m grpc_tools.protoc -I. --python_out=. --grpc_python_out=. {} {}'
+        .format(DEST_PROTO_NAME,
+                os.path.join(DEP_PROTO_RELATIVE_PATH, DEP_PROTO_NAME)))
+
+
+def delete_old_protos():
+    """Delete any existing protos or built proto bindings."""
+    for file in os.listdir('.'):
+        if 'autotest_common' in file:
+            os.remove(file)
+
+    for file in os.listdir(DEP_PROTO_RELATIVE_PATH):
+        if 'operations' in file:
+            os.remove(os.path.join(DEP_PROTO_RELATIVE_PATH, file))
+
+
+def copy_proto_from_src():
+    """Copy the proto from the src dirs to the local dir."""
+    copy_list = [(get_proto_path(), DEST_PROTO_NAME),
+                 (get_proto_deps_dir(),
+                  os.path.join(DEP_PROTO_RELATIVE_PATH, DEP_PROTO_NAME))]
+
+    for src, dest in copy_list:
+        if os.path.isfile(src):
+            copyfile(src, dest)
+        else:
+            raise Exception('Proto missing at %s' % src)
+
+
+def get_proto_path():
+    """Return the full path of the commontls.proto from TLS."""
+    return os.path.join(UP * get_current_depth(), PROTO_PATH, PROTO_NAME)
+
+
+def get_proto_deps_dir():
+    """Return the full path of the operations.proto from TLS."""
+    return os.path.join(UP * get_current_depth(), PROTO_PATH,
+                        DEP_PROTO_RELATIVE_PATH, DEP_PROTO_NAME)
+
+
+def get_current_depth():
+    """Return the current depth off /src/ within the file structure."""
+    dirs = os.getcwd().split('/')
+    src_level = dirs.index('src')
+    return len(dirs) - src_level
+
+
+def modify_proto():
+    """Change the full path for the dependencies for a local one."""
+    # This is likely a dirty hack, but compiling with the full src in autotest
+    # doesn't work. Open to suggestions for alternatives.
+
+    #TODO (dbeckett@) b/183220746, work on a better thats not a hack...
+    with open(DEST_PROTO_NAME, 'r+') as f:
+        original = f.read()
+    new = original.replace(
+            'import "chromiumos/config/api/test/tls/dependencies/longrunning/operations.proto";',
+            'import "dependencies/longrunning/operations.proto";')
+    with open(DEST_PROTO_NAME, 'w') as wf:
+        wf.write(new)
+
+
+def create_bindings():
+    os.system(BUILD_CMD)
+
+
+def main():
+    delete_old_protos()
+    copy_proto_from_src()
+    modify_proto()
+    create_bindings()
+
+
+if __name__ == "__main__":
+    main()
diff --git a/client/tests/wb_kupdate/common.py b/server/hosts/tls_client/common.py
similarity index 100%
copy from client/tests/wb_kupdate/common.py
copy to server/hosts/tls_client/common.py
diff --git a/server/hosts/tls_client/connection.py b/server/hosts/tls_client/connection.py
new file mode 100644
index 0000000..a847f81
--- /dev/null
+++ b/server/hosts/tls_client/connection.py
@@ -0,0 +1,35 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Abstract Client for Autotest side communications to the TLS Server."""
+
+import grpc
+
+import common
+
+from autotest_lib.server.hosts.tls_client import autotest_common_pb2_grpc
+
+TLS_PORT = 7152
+TLS_IP = '10.254.254.254'
+
+
+class TLSConnection(object):
+    """The client side connection to Common-TLS service running in a drone."""
+
+    def __init__(self):
+        """Configure the grpc channel."""
+        self.channel = grpc.insecure_channel('{}:{}'.format(TLS_IP, TLS_PORT))
+        self.stub = autotest_common_pb2_grpc.CommonStub(self.channel)
+        self.alive = True
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, *exc):
+        self.close()
+
+    def close(self):
+        """Close the grpc channel."""
+        self.channel.close()
+        self.alive = False
diff --git a/server/hosts/drone_api_client/__init__.py b/server/hosts/tls_client/dependencies/__init__.py
similarity index 100%
copy from server/hosts/drone_api_client/__init__.py
copy to server/hosts/tls_client/dependencies/__init__.py
diff --git a/server/hosts/tls_client/dependencies/common.py b/server/hosts/tls_client/dependencies/common.py
new file mode 100644
index 0000000..9c2b102
--- /dev/null
+++ b/server/hosts/tls_client/dependencies/common.py
@@ -0,0 +1,8 @@
+import os, sys
+dirname = os.path.dirname(sys.modules[__name__].__file__)
+autotest_dir = os.path.abspath(os.path.join(dirname, "..", "..", "..", ".."))
+client_dir = os.path.join(autotest_dir, "client")
+sys.path.insert(0, client_dir)
+import setup_modules
+sys.path.pop(0)
+setup_modules.setup(base_path=autotest_dir, root_module_name="autotest_lib")
diff --git a/server/hosts/drone_api_client/__init__.py b/server/hosts/tls_client/dependencies/longrunning/__init__.py
similarity index 100%
copy from server/hosts/drone_api_client/__init__.py
copy to server/hosts/tls_client/dependencies/longrunning/__init__.py
diff --git a/server/hosts/tls_client/dependencies/longrunning/common.py b/server/hosts/tls_client/dependencies/longrunning/common.py
new file mode 100644
index 0000000..f4dce84
--- /dev/null
+++ b/server/hosts/tls_client/dependencies/longrunning/common.py
@@ -0,0 +1,9 @@
+import os, sys
+dirname = os.path.dirname(sys.modules[__name__].__file__)
+autotest_dir = os.path.abspath(
+        os.path.join(dirname, "..", "..", "..", "..", ".."))
+client_dir = os.path.join(autotest_dir, "client")
+sys.path.insert(0, client_dir)
+import setup_modules
+sys.path.pop(0)
+setup_modules.setup(base_path=autotest_dir, root_module_name="autotest_lib")
diff --git a/server/hosts/tls_client/dependencies/longrunning/operations.proto b/server/hosts/tls_client/dependencies/longrunning/operations.proto
new file mode 100644
index 0000000..1e5ddf1
--- /dev/null
+++ b/server/hosts/tls_client/dependencies/longrunning/operations.proto
@@ -0,0 +1,281 @@
+// Copyright 2016 Google Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+
+package google.longrunning;
+
+import "google/protobuf/any.proto";
+import "google/protobuf/descriptor.proto";
+import "google/protobuf/duration.proto";
+import "google/protobuf/empty.proto";
+
+option go_package = "go.chromium.org/chromiumos/config/go/api/test/tls/dependencies/longrunning";
+
+
+// Manages long-running operations with an API service.
+//
+// When an API method normally takes long time to complete, it can be designed
+// to return [Operation][google.longrunning.Operation] to the client, and the client can use this
+// interface to receive the real response asynchronously by polling the
+// operation resource, or pass the operation resource to another API (such as
+// Google Cloud Pub/Sub API) to receive the response.  Any API service that
+// returns long-running operations should implement the `Operations` interface
+// so developers can have a consistent client experience.
+service Operations {
+  // Lists operations that match the specified filter in the request. If the
+  // server doesn't support this method, it returns `UNIMPLEMENTED`.
+  //
+  // NOTE: the `name` binding below allows API services to override the binding
+  // to use different resource name schemes, such as `users/*/operations`.
+  rpc ListOperations(ListOperationsRequest) returns (ListOperationsResponse) {
+  }
+
+  // Gets the latest state of a long-running operation.  Clients can use this
+  // method to poll the operation result at intervals as recommended by the API
+  // service.
+  rpc GetOperation(GetOperationRequest) returns (Operation) {
+  }
+
+  // Deletes a long-running operation. This method indicates that the client is
+  // no longer interested in the operation result. It does not cancel the
+  // operation. If the server doesn't support this method, it returns
+  // `google.rpc.Code.UNIMPLEMENTED`.
+  rpc DeleteOperation(DeleteOperationRequest) returns (google.protobuf.Empty) {
+  }
+
+  // Starts asynchronous cancellation on a long-running operation.  The server
+  // makes a best effort to cancel the operation, but success is not
+  // guaranteed.  If the server doesn't support this method, it returns
+  // `google.rpc.Code.UNIMPLEMENTED`.  Clients can use
+  // [Operations.GetOperation][google.longrunning.Operations.GetOperation] or
+  // other methods to check whether the cancellation succeeded or whether the
+  // operation completed despite cancellation. On successful cancellation,
+  // the operation is not deleted; instead, it becomes an operation with
+  // an [Operation.error][google.longrunning.Operation.error] value with a [google.rpc.Status.code][google.rpc.Status.code] of 1,
+  // corresponding to `Code.CANCELLED`.
+  rpc CancelOperation(CancelOperationRequest) returns (google.protobuf.Empty) {
+  }
+  // Waits for the specified long-running operation until it is done or reaches
+  // at most a specified timeout, returning the latest state.  If the operation
+  // is already done, the latest state is immediately returned.  If the timeout
+  // specified is greater than the default HTTP/RPC timeout, the HTTP/RPC
+  // timeout is used.  If the server does not support this method, it returns
+  // `google.rpc.Code.UNIMPLEMENTED`.
+  // Note that this method is on a best-effort basis.  It may return the latest
+  // state before the specified timeout (including immediately), meaning even an
+  // immediate response is no guarantee that the operation is done.
+  rpc WaitOperation(WaitOperationRequest) returns (Operation) {
+  }
+}
+
+// This resource represents a long-running operation that is the result of a
+// network API call.
+message Operation {
+  // The server-assigned name, which is only unique within the same service that
+  // originally returns it. If you use the default HTTP mapping, the
+  // `name` should have the format of `operations/some/unique/name`.
+  string name = 1;
+
+  // Service-specific metadata associated with the operation.  It typically
+  // contains progress information and common metadata such as create time.
+  // Some services might not provide such metadata.  Any method that returns a
+  // long-running operation should document the metadata type, if any.
+  google.protobuf.Any metadata = 2;
+
+  // If the value is `false`, it means the operation is still in progress.
+  // If true, the operation is completed, and either `error` or `response` is
+  // available.
+  bool done = 3;
+
+  // The operation result, which can be either an `error` or a valid `response`.
+  // If `done` == `false`, neither `error` nor `response` is set.
+  // If `done` == `true`, exactly one of `error` or `response` is set.
+  oneof result {
+    // The error result of the operation in case of failure or cancellation.
+    Status error = 4;
+
+    // The normal response of the operation in case of success.  If the original
+    // method returns no data on success, such as `Delete`, the response is
+    // `google.protobuf.Empty`.  If the original method is standard
+    // `Get`/`Create`/`Update`, the response should be the resource.  For other
+    // methods, the response should have the type `XxxResponse`, where `Xxx`
+    // is the original method name.  For example, if the original method name
+    // is `TakeSnapshot()`, the inferred response type is
+    // `TakeSnapshotResponse`.
+    google.protobuf.Any response = 5;
+  }
+}
+
+// The request message for [Operations.GetOperation][google.longrunning.Operations.GetOperation].
+message GetOperationRequest {
+  // The name of the operation resource.
+  string name = 1;
+}
+
+// The request message for [Operations.ListOperations][google.longrunning.Operations.ListOperations].
+message ListOperationsRequest {
+  // The name of the operation collection.
+  string name = 4;
+
+  // The standard list filter.
+  string filter = 1;
+
+  // The standard list page size.
+  int32 page_size = 2;
+
+  // The standard list page token.
+  string page_token = 3;
+}
+
+// The response message for [Operations.ListOperations][google.longrunning.Operations.ListOperations].
+message ListOperationsResponse {
+  // A list of operations that matches the specified filter in the request.
+  repeated Operation operations = 1;
+
+  // The standard List next-page token.
+  string next_page_token = 2;
+}
+
+// The request message for [Operations.CancelOperation][google.longrunning.Operations.CancelOperation].
+message CancelOperationRequest {
+  // The name of the operation resource to be cancelled.
+  string name = 1;
+}
+
+// The request message for [Operations.DeleteOperation][google.longrunning.Operations.DeleteOperation].
+message DeleteOperationRequest {
+  // The name of the operation resource to be deleted.
+  string name = 1;
+}
+
+// The request message for [Operations.WaitOperation][google.longrunning.Operations.WaitOperation].
+message WaitOperationRequest {
+  // The name of the operation resource to wait on.
+  string name = 1;
+
+  // The maximum duration to wait before timing out. If left blank, the wait
+  // will be at most the time permitted by the underlying HTTP/RPC protocol.
+  // If RPC context deadline is also specified, the shorter one will be used.
+  google.protobuf.Duration timeout = 2;
+}
+
+// A message representing the message types used by a long-running operation.
+//
+// Example:
+//
+//   rpc LongRunningRecognize(LongRunningRecognizeRequest)
+//       returns (google.longrunning.Operation) {
+//     option (google.longrunning.operation_info) = {
+//       response_type: "LongRunningRecognizeResponse"
+//       metadata_type: "LongRunningRecognizeMetadata"
+//     };
+//   }
+message OperationInfo {
+  // Required. The message name of the primary return type for this
+  // long-running operation.
+  // This type will be used to deserialize the LRO's response.
+  //
+  // If the response is in a different package from the rpc, a fully-qualified
+  // message name must be used (e.g. `google.protobuf.Struct`).
+  //
+  // Note: Altering this value constitutes a breaking change.
+  string response_type = 1;
+
+  // Required. The message name of the metadata type for this long-running
+  // operation.
+  //
+  // If the response is in a different package from the rpc, a fully-qualified
+  // message name must be used (e.g. `google.protobuf.Struct`).
+  //
+  // Note: Altering this value constitutes a breaking change.
+  string metadata_type = 2;
+}
+
+extend google.protobuf.MethodOptions {
+  // Additional information regarding long-running operations.
+  // In particular, this specifies the types that are returned from
+  // long-running operations.
+  //
+  // Required for methods that return `google.longrunning.Operation`; invalid
+  // otherwise.
+  OperationInfo operation_info = 1049;
+}
+
+// The `Status` type defines a logical error model that is suitable for different
+// programming environments, including REST APIs and RPC APIs. It is used by
+// [gRPC](https://github.com/grpc). The error model is designed to be:
+//
+// - Simple to use and understand for most users
+// - Flexible enough to meet unexpected needs
+//
+// # Overview
+//
+// The `Status` message contains three pieces of data: error code, error message,
+// and error details. The error code should be an enum value of
+// [google.rpc.Code][google.rpc.Code], but it may accept additional error codes if needed.  The
+// error message should be a developer-facing English message that helps
+// developers *understand* and *resolve* the error. If a localized user-facing
+// error message is needed, put the localized message in the error details or
+// localize it in the client. The optional error details may contain arbitrary
+// information about the error. There is a predefined set of error detail types
+// in the package `google.rpc` that can be used for common error conditions.
+//
+// # Language mapping
+//
+// The `Status` message is the logical representation of the error model, but it
+// is not necessarily the actual wire format. When the `Status` message is
+// exposed in different client libraries and different wire protocols, it can be
+// mapped differently. For example, it will likely be mapped to some exceptions
+// in Java, but more likely mapped to some error codes in C.
+//
+// # Other uses
+//
+// The error model and the `Status` message can be used in a variety of
+// environments, either with or without APIs, to provide a
+// consistent developer experience across different environments.
+//
+// Example uses of this error model include:
+//
+// - Partial errors. If a service needs to return partial errors to the client,
+//     it may embed the `Status` in the normal response to indicate the partial
+//     errors.
+//
+// - Workflow errors. A typical workflow has multiple steps. Each step may
+//     have a `Status` message for error reporting.
+//
+// - Batch operations. If a client uses batch request and batch response, the
+//     `Status` message should be used directly inside batch response, one for
+//     each error sub-response.
+//
+// - Asynchronous operations. If an API call embeds asynchronous operation
+//     results in its response, the status of those operations should be
+//     represented directly using the `Status` message.
+//
+// - Logging. If some API errors are stored in logs, the message `Status` could
+//     be used directly after any stripping needed for security/privacy reasons.
+message Status {
+  // The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code].
+  int32 code = 1;
+
+  // A developer-facing error message, which should be in English. Any
+  // user-facing error message should be localized and sent in the
+  // [google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client.
+  string message = 2;
+
+  // A list of messages that carry the error details.  There is a common set of
+  // message types for APIs to use.
+  repeated google.protobuf.Any details = 3;
+}
diff --git a/server/hosts/tls_client/dependencies/longrunning/operations_pb2.py b/server/hosts/tls_client/dependencies/longrunning/operations_pb2.py
new file mode 100644
index 0000000..bb465f0
--- /dev/null
+++ b/server/hosts/tls_client/dependencies/longrunning/operations_pb2.py
@@ -0,0 +1,556 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: dependencies/longrunning/operations.proto
+
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
+from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
+from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2
+from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='dependencies/longrunning/operations.proto',
+  package='google.longrunning',
+  syntax='proto3',
+  serialized_options=b'ZJgo.chromium.org/chromiumos/config/go/api/test/tls/dependencies/longrunning',
+  serialized_pb=b'\n)dependencies/longrunning/operations.proto\x12\x12google.longrunning\x1a\x19google/protobuf/any.proto\x1a google/protobuf/descriptor.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\"\xb0\x01\n\tOperation\x12\x0c\n\x04name\x18\x01 \x01(\t\x12&\n\x08metadata\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08\x12+\n\x05\x65rror\x18\x04 \x01(\x0b\x32\x1a.google.longrunning.StatusH\x00\x12(\n\x08response\x18\x05 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x42\x08\n\x06result\"#\n\x13GetOperationRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\\\n\x15ListOperationsRequest\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"d\n\x16ListOperationsResponse\x12\x31\n\noperations\x18\x01 \x03(\x0b\x32\x1d.google.longrunning.Operation\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"&\n\x16\x43\x61ncelOperationRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"&\n\x16\x44\x65leteOperationRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"P\n\x14WaitOperationRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12*\n\x07timeout\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\"=\n\rOperationInfo\x12\x15\n\rresponse_type\x18\x01 \x01(\t\x12\x15\n\rmetadata_type\x18\x02 \x01(\t\"N\n\x06Status\x12\x0c\n\x04\x63ode\x18\x01 \x01(\x05\x12\x0f\n\x07message\x18\x02 \x01(\t\x12%\n\x07\x64\x65tails\x18\x03 \x03(\x0b\x32\x14.google.protobuf.Any2\xdf\x03\n\nOperations\x12i\n\x0eListOperations\x12).google.longrunning.ListOperationsRequest\x1a*.google.longrunning.ListOperationsResponse\"\x00\x12X\n\x0cGetOperation\x12\'.google.longrunning.GetOperationRequest\x1a\x1d.google.longrunning.Operation\"\x00\x12W\n\x0f\x44\x65leteOperation\x12*.google.longrunning.DeleteOperationRequest\x1a\x16.google.protobuf.Empty\"\x00\x12W\n\x0f\x43\x61ncelOperation\x12*.google.longrunning.CancelOperationRequest\x1a\x16.google.protobuf.Empty\"\x00\x12Z\n\rWaitOperation\x12(.google.longrunning.WaitOperationRequest\x1a\x1d.google.longrunning.Operation\"\x00:Z\n\x0eoperation_info\x12\x1e.google.protobuf.MethodOptions\x18\x99\x08 \x01(\x0b\x32!.google.longrunning.OperationInfoBLZJgo.chromium.org/chromiumos/config/go/api/test/tls/dependencies/longrunningb\x06proto3'
+  ,
+  dependencies=[google_dot_protobuf_dot_any__pb2.DESCRIPTOR,google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,])
+
+
+OPERATION_INFO_FIELD_NUMBER = 1049
+operation_info = _descriptor.FieldDescriptor(
+  name='operation_info', full_name='google.longrunning.operation_info', index=0,
+  number=1049, type=11, cpp_type=10, label=1,
+  has_default_value=False, default_value=None,
+  message_type=None, enum_type=None, containing_type=None,
+  is_extension=True, extension_scope=None,
+  serialized_options=None, file=DESCRIPTOR)
+
+
+_OPERATION = _descriptor.Descriptor(
+  name='Operation',
+  full_name='google.longrunning.Operation',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='google.longrunning.Operation.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='metadata', full_name='google.longrunning.Operation.metadata', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='done', full_name='google.longrunning.Operation.done', index=2,
+      number=3, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='error', full_name='google.longrunning.Operation.error', index=3,
+      number=4, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='response', full_name='google.longrunning.Operation.response', index=4,
+      number=5, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+    _descriptor.OneofDescriptor(
+      name='result', full_name='google.longrunning.Operation.result',
+      index=0, containing_type=None, fields=[]),
+  ],
+  serialized_start=188,
+  serialized_end=364,
+)
+
+
+_GETOPERATIONREQUEST = _descriptor.Descriptor(
+  name='GetOperationRequest',
+  full_name='google.longrunning.GetOperationRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='google.longrunning.GetOperationRequest.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=366,
+  serialized_end=401,
+)
+
+
+_LISTOPERATIONSREQUEST = _descriptor.Descriptor(
+  name='ListOperationsRequest',
+  full_name='google.longrunning.ListOperationsRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='google.longrunning.ListOperationsRequest.name', index=0,
+      number=4, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='filter', full_name='google.longrunning.ListOperationsRequest.filter', index=1,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='page_size', full_name='google.longrunning.ListOperationsRequest.page_size', index=2,
+      number=2, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='page_token', full_name='google.longrunning.ListOperationsRequest.page_token', index=3,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=403,
+  serialized_end=495,
+)
+
+
+_LISTOPERATIONSRESPONSE = _descriptor.Descriptor(
+  name='ListOperationsResponse',
+  full_name='google.longrunning.ListOperationsResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='operations', full_name='google.longrunning.ListOperationsResponse.operations', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='next_page_token', full_name='google.longrunning.ListOperationsResponse.next_page_token', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=497,
+  serialized_end=597,
+)
+
+
+_CANCELOPERATIONREQUEST = _descriptor.Descriptor(
+  name='CancelOperationRequest',
+  full_name='google.longrunning.CancelOperationRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='google.longrunning.CancelOperationRequest.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=599,
+  serialized_end=637,
+)
+
+
+_DELETEOPERATIONREQUEST = _descriptor.Descriptor(
+  name='DeleteOperationRequest',
+  full_name='google.longrunning.DeleteOperationRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='google.longrunning.DeleteOperationRequest.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=639,
+  serialized_end=677,
+)
+
+
+_WAITOPERATIONREQUEST = _descriptor.Descriptor(
+  name='WaitOperationRequest',
+  full_name='google.longrunning.WaitOperationRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='google.longrunning.WaitOperationRequest.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='timeout', full_name='google.longrunning.WaitOperationRequest.timeout', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=679,
+  serialized_end=759,
+)
+
+
+_OPERATIONINFO = _descriptor.Descriptor(
+  name='OperationInfo',
+  full_name='google.longrunning.OperationInfo',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='response_type', full_name='google.longrunning.OperationInfo.response_type', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='metadata_type', full_name='google.longrunning.OperationInfo.metadata_type', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=761,
+  serialized_end=822,
+)
+
+
+_STATUS = _descriptor.Descriptor(
+  name='Status',
+  full_name='google.longrunning.Status',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='code', full_name='google.longrunning.Status.code', index=0,
+      number=1, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='message', full_name='google.longrunning.Status.message', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='details', full_name='google.longrunning.Status.details', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=824,
+  serialized_end=902,
+)
+
+_OPERATION.fields_by_name['metadata'].message_type = google_dot_protobuf_dot_any__pb2._ANY
+_OPERATION.fields_by_name['error'].message_type = _STATUS
+_OPERATION.fields_by_name['response'].message_type = google_dot_protobuf_dot_any__pb2._ANY
+_OPERATION.oneofs_by_name['result'].fields.append(
+  _OPERATION.fields_by_name['error'])
+_OPERATION.fields_by_name['error'].containing_oneof = _OPERATION.oneofs_by_name['result']
+_OPERATION.oneofs_by_name['result'].fields.append(
+  _OPERATION.fields_by_name['response'])
+_OPERATION.fields_by_name['response'].containing_oneof = _OPERATION.oneofs_by_name['result']
+_LISTOPERATIONSRESPONSE.fields_by_name['operations'].message_type = _OPERATION
+_WAITOPERATIONREQUEST.fields_by_name['timeout'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION
+_STATUS.fields_by_name['details'].message_type = google_dot_protobuf_dot_any__pb2._ANY
+DESCRIPTOR.message_types_by_name['Operation'] = _OPERATION
+DESCRIPTOR.message_types_by_name['GetOperationRequest'] = _GETOPERATIONREQUEST
+DESCRIPTOR.message_types_by_name['ListOperationsRequest'] = _LISTOPERATIONSREQUEST
+DESCRIPTOR.message_types_by_name['ListOperationsResponse'] = _LISTOPERATIONSRESPONSE
+DESCRIPTOR.message_types_by_name['CancelOperationRequest'] = _CANCELOPERATIONREQUEST
+DESCRIPTOR.message_types_by_name['DeleteOperationRequest'] = _DELETEOPERATIONREQUEST
+DESCRIPTOR.message_types_by_name['WaitOperationRequest'] = _WAITOPERATIONREQUEST
+DESCRIPTOR.message_types_by_name['OperationInfo'] = _OPERATIONINFO
+DESCRIPTOR.message_types_by_name['Status'] = _STATUS
+DESCRIPTOR.extensions_by_name['operation_info'] = operation_info
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+Operation = _reflection.GeneratedProtocolMessageType('Operation', (_message.Message,), {
+  'DESCRIPTOR' : _OPERATION,
+  '__module__' : 'dependencies.longrunning.operations_pb2'
+  # @@protoc_insertion_point(class_scope:google.longrunning.Operation)
+  })
+_sym_db.RegisterMessage(Operation)
+
+GetOperationRequest = _reflection.GeneratedProtocolMessageType('GetOperationRequest', (_message.Message,), {
+  'DESCRIPTOR' : _GETOPERATIONREQUEST,
+  '__module__' : 'dependencies.longrunning.operations_pb2'
+  # @@protoc_insertion_point(class_scope:google.longrunning.GetOperationRequest)
+  })
+_sym_db.RegisterMessage(GetOperationRequest)
+
+ListOperationsRequest = _reflection.GeneratedProtocolMessageType('ListOperationsRequest', (_message.Message,), {
+  'DESCRIPTOR' : _LISTOPERATIONSREQUEST,
+  '__module__' : 'dependencies.longrunning.operations_pb2'
+  # @@protoc_insertion_point(class_scope:google.longrunning.ListOperationsRequest)
+  })
+_sym_db.RegisterMessage(ListOperationsRequest)
+
+ListOperationsResponse = _reflection.GeneratedProtocolMessageType('ListOperationsResponse', (_message.Message,), {
+  'DESCRIPTOR' : _LISTOPERATIONSRESPONSE,
+  '__module__' : 'dependencies.longrunning.operations_pb2'
+  # @@protoc_insertion_point(class_scope:google.longrunning.ListOperationsResponse)
+  })
+_sym_db.RegisterMessage(ListOperationsResponse)
+
+CancelOperationRequest = _reflection.GeneratedProtocolMessageType('CancelOperationRequest', (_message.Message,), {
+  'DESCRIPTOR' : _CANCELOPERATIONREQUEST,
+  '__module__' : 'dependencies.longrunning.operations_pb2'
+  # @@protoc_insertion_point(class_scope:google.longrunning.CancelOperationRequest)
+  })
+_sym_db.RegisterMessage(CancelOperationRequest)
+
+DeleteOperationRequest = _reflection.GeneratedProtocolMessageType('DeleteOperationRequest', (_message.Message,), {
+  'DESCRIPTOR' : _DELETEOPERATIONREQUEST,
+  '__module__' : 'dependencies.longrunning.operations_pb2'
+  # @@protoc_insertion_point(class_scope:google.longrunning.DeleteOperationRequest)
+  })
+_sym_db.RegisterMessage(DeleteOperationRequest)
+
+WaitOperationRequest = _reflection.GeneratedProtocolMessageType('WaitOperationRequest', (_message.Message,), {
+  'DESCRIPTOR' : _WAITOPERATIONREQUEST,
+  '__module__' : 'dependencies.longrunning.operations_pb2'
+  # @@protoc_insertion_point(class_scope:google.longrunning.WaitOperationRequest)
+  })
+_sym_db.RegisterMessage(WaitOperationRequest)
+
+OperationInfo = _reflection.GeneratedProtocolMessageType('OperationInfo', (_message.Message,), {
+  'DESCRIPTOR' : _OPERATIONINFO,
+  '__module__' : 'dependencies.longrunning.operations_pb2'
+  # @@protoc_insertion_point(class_scope:google.longrunning.OperationInfo)
+  })
+_sym_db.RegisterMessage(OperationInfo)
+
+Status = _reflection.GeneratedProtocolMessageType('Status', (_message.Message,), {
+  'DESCRIPTOR' : _STATUS,
+  '__module__' : 'dependencies.longrunning.operations_pb2'
+  # @@protoc_insertion_point(class_scope:google.longrunning.Status)
+  })
+_sym_db.RegisterMessage(Status)
+
+operation_info.message_type = _OPERATIONINFO
+google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(operation_info)
+
+DESCRIPTOR._options = None
+
+_OPERATIONS = _descriptor.ServiceDescriptor(
+  name='Operations',
+  full_name='google.longrunning.Operations',
+  file=DESCRIPTOR,
+  index=0,
+  serialized_options=None,
+  serialized_start=905,
+  serialized_end=1384,
+  methods=[
+  _descriptor.MethodDescriptor(
+    name='ListOperations',
+    full_name='google.longrunning.Operations.ListOperations',
+    index=0,
+    containing_service=None,
+    input_type=_LISTOPERATIONSREQUEST,
+    output_type=_LISTOPERATIONSRESPONSE,
+    serialized_options=None,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetOperation',
+    full_name='google.longrunning.Operations.GetOperation',
+    index=1,
+    containing_service=None,
+    input_type=_GETOPERATIONREQUEST,
+    output_type=_OPERATION,
+    serialized_options=None,
+  ),
+  _descriptor.MethodDescriptor(
+    name='DeleteOperation',
+    full_name='google.longrunning.Operations.DeleteOperation',
+    index=2,
+    containing_service=None,
+    input_type=_DELETEOPERATIONREQUEST,
+    output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
+    serialized_options=None,
+  ),
+  _descriptor.MethodDescriptor(
+    name='CancelOperation',
+    full_name='google.longrunning.Operations.CancelOperation',
+    index=3,
+    containing_service=None,
+    input_type=_CANCELOPERATIONREQUEST,
+    output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
+    serialized_options=None,
+  ),
+  _descriptor.MethodDescriptor(
+    name='WaitOperation',
+    full_name='google.longrunning.Operations.WaitOperation',
+    index=4,
+    containing_service=None,
+    input_type=_WAITOPERATIONREQUEST,
+    output_type=_OPERATION,
+    serialized_options=None,
+  ),
+])
+_sym_db.RegisterServiceDescriptor(_OPERATIONS)
+
+DESCRIPTOR.services_by_name['Operations'] = _OPERATIONS
+
+# @@protoc_insertion_point(module_scope)
diff --git a/server/hosts/tls_client/dependencies/longrunning/operations_pb2_grpc.py b/server/hosts/tls_client/dependencies/longrunning/operations_pb2_grpc.py
new file mode 100644
index 0000000..cd7858f
--- /dev/null
+++ b/server/hosts/tls_client/dependencies/longrunning/operations_pb2_grpc.py
@@ -0,0 +1,157 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+import grpc
+
+from dependencies.longrunning import operations_pb2 as dependencies_dot_longrunning_dot_operations__pb2
+from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
+
+
+class OperationsStub(object):
+  """Manages long-running operations with an API service.
+
+  When an API method normally takes long time to complete, it can be designed
+  to return [Operation][google.longrunning.Operation] to the client, and the client can use this
+  interface to receive the real response asynchronously by polling the
+  operation resource, or pass the operation resource to another API (such as
+  Google Cloud Pub/Sub API) to receive the response.  Any API service that
+  returns long-running operations should implement the `Operations` interface
+  so developers can have a consistent client experience.
+  """
+
+  def __init__(self, channel):
+    """Constructor.
+
+    Args:
+      channel: A grpc.Channel.
+    """
+    self.ListOperations = channel.unary_unary(
+        '/google.longrunning.Operations/ListOperations',
+        request_serializer=dependencies_dot_longrunning_dot_operations__pb2.ListOperationsRequest.SerializeToString,
+        response_deserializer=dependencies_dot_longrunning_dot_operations__pb2.ListOperationsResponse.FromString,
+        )
+    self.GetOperation = channel.unary_unary(
+        '/google.longrunning.Operations/GetOperation',
+        request_serializer=dependencies_dot_longrunning_dot_operations__pb2.GetOperationRequest.SerializeToString,
+        response_deserializer=dependencies_dot_longrunning_dot_operations__pb2.Operation.FromString,
+        )
+    self.DeleteOperation = channel.unary_unary(
+        '/google.longrunning.Operations/DeleteOperation',
+        request_serializer=dependencies_dot_longrunning_dot_operations__pb2.DeleteOperationRequest.SerializeToString,
+        response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+        )
+    self.CancelOperation = channel.unary_unary(
+        '/google.longrunning.Operations/CancelOperation',
+        request_serializer=dependencies_dot_longrunning_dot_operations__pb2.CancelOperationRequest.SerializeToString,
+        response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+        )
+    self.WaitOperation = channel.unary_unary(
+        '/google.longrunning.Operations/WaitOperation',
+        request_serializer=dependencies_dot_longrunning_dot_operations__pb2.WaitOperationRequest.SerializeToString,
+        response_deserializer=dependencies_dot_longrunning_dot_operations__pb2.Operation.FromString,
+        )
+
+
+class OperationsServicer(object):
+  """Manages long-running operations with an API service.
+
+  When an API method normally takes long time to complete, it can be designed
+  to return [Operation][google.longrunning.Operation] to the client, and the client can use this
+  interface to receive the real response asynchronously by polling the
+  operation resource, or pass the operation resource to another API (such as
+  Google Cloud Pub/Sub API) to receive the response.  Any API service that
+  returns long-running operations should implement the `Operations` interface
+  so developers can have a consistent client experience.
+  """
+
+  def ListOperations(self, request, context):
+    """Lists operations that match the specified filter in the request. If the
+    server doesn't support this method, it returns `UNIMPLEMENTED`.
+
+    NOTE: the `name` binding below allows API services to override the binding
+    to use different resource name schemes, such as `users/*/operations`.
+    """
+    context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+    context.set_details('Method not implemented!')
+    raise NotImplementedError('Method not implemented!')
+
+  def GetOperation(self, request, context):
+    """Gets the latest state of a long-running operation.  Clients can use this
+    method to poll the operation result at intervals as recommended by the API
+    service.
+    """
+    context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+    context.set_details('Method not implemented!')
+    raise NotImplementedError('Method not implemented!')
+
+  def DeleteOperation(self, request, context):
+    """Deletes a long-running operation. This method indicates that the client is
+    no longer interested in the operation result. It does not cancel the
+    operation. If the server doesn't support this method, it returns
+    `google.rpc.Code.UNIMPLEMENTED`.
+    """
+    context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+    context.set_details('Method not implemented!')
+    raise NotImplementedError('Method not implemented!')
+
+  def CancelOperation(self, request, context):
+    """Starts asynchronous cancellation on a long-running operation.  The server
+    makes a best effort to cancel the operation, but success is not
+    guaranteed.  If the server doesn't support this method, it returns
+    `google.rpc.Code.UNIMPLEMENTED`.  Clients can use
+    [Operations.GetOperation][google.longrunning.Operations.GetOperation] or
+    other methods to check whether the cancellation succeeded or whether the
+    operation completed despite cancellation. On successful cancellation,
+    the operation is not deleted; instead, it becomes an operation with
+    an [Operation.error][google.longrunning.Operation.error] value with a [google.rpc.Status.code][google.rpc.Status.code] of 1,
+    corresponding to `Code.CANCELLED`.
+    """
+    context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+    context.set_details('Method not implemented!')
+    raise NotImplementedError('Method not implemented!')
+
+  def WaitOperation(self, request, context):
+    """Waits for the specified long-running operation until it is done or reaches
+    at most a specified timeout, returning the latest state.  If the operation
+    is already done, the latest state is immediately returned.  If the timeout
+    specified is greater than the default HTTP/RPC timeout, the HTTP/RPC
+    timeout is used.  If the server does not support this method, it returns
+    `google.rpc.Code.UNIMPLEMENTED`.
+    Note that this method is on a best-effort basis.  It may return the latest
+    state before the specified timeout (including immediately), meaning even an
+    immediate response is no guarantee that the operation is done.
+    """
+    context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+    context.set_details('Method not implemented!')
+    raise NotImplementedError('Method not implemented!')
+
+
+def add_OperationsServicer_to_server(servicer, server):
+  rpc_method_handlers = {
+      'ListOperations': grpc.unary_unary_rpc_method_handler(
+          servicer.ListOperations,
+          request_deserializer=dependencies_dot_longrunning_dot_operations__pb2.ListOperationsRequest.FromString,
+          response_serializer=dependencies_dot_longrunning_dot_operations__pb2.ListOperationsResponse.SerializeToString,
+      ),
+      'GetOperation': grpc.unary_unary_rpc_method_handler(
+          servicer.GetOperation,
+          request_deserializer=dependencies_dot_longrunning_dot_operations__pb2.GetOperationRequest.FromString,
+          response_serializer=dependencies_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
+      ),
+      'DeleteOperation': grpc.unary_unary_rpc_method_handler(
+          servicer.DeleteOperation,
+          request_deserializer=dependencies_dot_longrunning_dot_operations__pb2.DeleteOperationRequest.FromString,
+          response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
+      ),
+      'CancelOperation': grpc.unary_unary_rpc_method_handler(
+          servicer.CancelOperation,
+          request_deserializer=dependencies_dot_longrunning_dot_operations__pb2.CancelOperationRequest.FromString,
+          response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
+      ),
+      'WaitOperation': grpc.unary_unary_rpc_method_handler(
+          servicer.WaitOperation,
+          request_deserializer=dependencies_dot_longrunning_dot_operations__pb2.WaitOperationRequest.FromString,
+          response_serializer=dependencies_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
+      ),
+  }
+  generic_handler = grpc.method_handlers_generic_handler(
+      'google.longrunning.Operations', rpc_method_handlers)
+  server.add_generic_rpc_handlers((generic_handler,))
diff --git a/server/hosts/tls_client/exec_dut_command.py b/server/hosts/tls_client/exec_dut_command.py
new file mode 100644
index 0000000..f82588b
--- /dev/null
+++ b/server/hosts/tls_client/exec_dut_command.py
@@ -0,0 +1,109 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Autotest communcations to the Hosts (DUTs) via TLS ExecDutCommand."""
+
+import common
+import grpc
+import logging
+import six
+import time
+
+from autotest_lib.server.hosts.tls_client import autotest_common_pb2
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib import utils
+
+
+class TLSExecDutCommandClient():
+    """Object for sending commands to a host, and getting the response."""
+
+    def __init__(self, tlsconnection, hostname):
+        """Configure the grpc channel."""
+        if tlsconnection.alive:
+            self.stub = tlsconnection.stub
+        else:
+            raise error.TLSConnectionError(
+                "TLS connection is not alive when try to creating"
+                " exec_dut_command client.")
+
+        self.hostname = hostname
+        self.tlsconnection = tlsconnection
+
+    def run_cmd(self,
+                cmd,
+                timeout=120,
+                stdout_tee=None,
+                stderr_tee=None,
+                ignore_timeout=False):
+        """
+        Run a command on the host configured during init.
+
+        @param cmd: shell cmd to execute on the DUT
+        @param: stdout_tee/stderr_tee: objects to write the data from the
+            respective streams to
+        @param timeout int(seconds): how long to allow the command to run
+            before forcefully killing it.
+        @param ignore_timeout: if True, do not raise err on timeouts.
+        """
+        if not self.tlsconnection.alive:
+            error.TLSConnectionError(
+                "TLS connection is not up when try to run exec_dut_command.")
+        result = utils.CmdResult(command=cmd)
+        try:
+            self._run(cmd, stdout_tee, stderr_tee, result, timeout)
+        except grpc.RpcError as e:
+            if e.code().name == "DEADLINE_EXCEEDED":
+                if ignore_timeout:
+                    return None
+                raise error.CmdTimeoutError(
+                        cmd, result,
+                        "Command(s) did not complete within %d seconds" %
+                        timeout)
+            raise e
+        except Exception as e:
+            raise e
+        return result
+
+    def _run(self, cmd, stdout_tee, stderr_tee, result, timeout):
+        """Run the provided cmd, populate the result in place."""
+        start_time = time.time()
+        response = self._send_cmd(cmd, timeout)
+
+        stdout_buf = six.StringIO()
+        stderr_buf = six.StringIO()
+        last_status = 0
+
+        if response:
+            for item in response:
+                last_status = item.exit_info.status
+                _log_item(item.stdout, stdout_buf, stdout_tee)
+                _log_item(item.stderr, stderr_buf, stderr_tee)
+
+        result.stdout = stdout_buf.getvalue()
+        result.stderr = stderr_buf.getvalue()
+        result.exit_status = last_status
+        result.duration = time.time() - start_time
+
+    def _send_cmd(self, cmd, timeout):
+        """Serialize and send the cmd to the TLS service."""
+        formatted_cmd = autotest_common_pb2.ExecDutCommandRequest(
+                name=self.hostname, command=cmd)
+        return self.stub.ExecDutCommand(formatted_cmd, timeout=timeout)
+
+
+def _log_item(item, buf, tee):
+    """
+    Parse the provided item.
+
+    If the item exists, append the provided arr with the item & write to
+        the provided tee if provided.
+
+    """
+    if not item:
+        return
+    # TODO dbeckett@ (crbug.com/990593), adjust this to be PY3 compatible.
+    buf.write(item)
+    if tee is not None and tee is not utils.TEE_TO_LOGS:
+        tee.write(item)
diff --git a/server/hosts/tls_client/fake_omaha.py b/server/hosts/tls_client/fake_omaha.py
new file mode 100644
index 0000000..eb30d6f
--- /dev/null
+++ b/server/hosts/tls_client/fake_omaha.py
@@ -0,0 +1,108 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Communication with the TLS FakeOmaha Service."""
+
+import logging
+
+import common
+
+from autotest_lib.server.hosts.tls_client import autotest_common_pb2
+from autotest_lib.client.common_lib import error
+
+PAYLOAD_TYPE = {
+        'TYPE_UNSPECIFIED':
+        autotest_common_pb2.FakeOmaha.Payload.TYPE_UNSPECIFIED,
+        'FULL': autotest_common_pb2.FakeOmaha.Payload.FULL,
+        'DELTA': autotest_common_pb2.FakeOmaha.Payload.DELTA
+}
+
+
+class TLSFakeOmaha():
+    """Object for sending commands to a host, and getting the response."""
+
+    def __init__(self, tlsconnection):
+        """Configure the grpc channel."""
+        if tlsconnection.alive:
+            self.stub = tlsconnection.stub
+        else:
+            raise error.TLSConnectionError(
+                    "TLS connection is not alive when try to creating"
+                    " FakeOmaha client.")
+
+        self.tlsconnection = tlsconnection
+        self.fo_name = None
+
+    def _make_payloads(self, payloads):
+        """Serialize and return the list of payloads."""
+        serialized_payloads = []
+        for payload in payloads:
+            serialized_payloads.append(
+                    autotest_common_pb2.FakeOmaha.Payload(
+                            id=payload['payload_id'],
+                            type=PAYLOAD_TYPE[payload['payload_type']]))
+
+        return serialized_payloads
+
+    def start_omaha(self,
+                    hostname,
+                    target_build,
+                    payloads,
+                    exposed_via_proxy=False,
+                    critical_update=False,
+                    return_noupdate_starting=0):
+        """Serialize and send the cmd to the TLS service.
+
+        @param hostname: hostname of dut. Normally 'hostname' or 'self.hostname'
+        @param target_build: full target build for the update. Example:
+
+        @param payloads: list of the payloads in the format:
+            [{'payload_id': <id>, 'payload_type': <type>}]
+            example:
+                [{'payload_id': 'ROOTFS', 'payload_type': 'FULL'},]
+        @param exposed_via_proxy: bool indicates that the fake Omaha service is
+            exposed to a DUT via a proxy server, instead of exposing to the DUT
+                directly.
+        @param critical_update:bool instructs the fake Omaha created that the
+            update is critical.
+        @param return_noupdate_starting: int indicates from which update check
+            to start returning noupdate.
+
+        @returns: the omaha_url
+        """
+        payload = self._make_payloads(payloads)
+
+        target_build = autotest_common_pb2.ChromeOsImage(
+                gs_path_prefix=target_build)
+        fake_omaha = autotest_common_pb2.FakeOmaha(
+                dut=hostname,
+                target_build=target_build,
+                payloads=payload,
+                exposed_via_proxy=exposed_via_proxy,
+                critical_update=critical_update,
+                return_noupdate_starting=return_noupdate_starting)
+
+        req = autotest_common_pb2.CreateFakeOmahaRequest(fake_omaha=fake_omaha)
+
+        try:
+            result = self.stub.CreateFakeOmaha(req)
+            self.fo_name = result.name
+            return result.omaha_url
+        except Exception as e:
+            logging.error("TLS FakeOmaha Debug String: %s",
+                          e.debug_error_string())
+            raise error.TestError(
+                    "Could not start FakeOmaha Server because %s", e.details())
+
+    def stop_omaha(self):
+        """Delete the running Omaha Service."""
+        if not self.fo_name:
+            raise error.TestWarn(
+                    "No FakeOmaha name specified, has it been started?")
+        req = autotest_common_pb2.DeleteFakeOmahaRequest(name=self.fo_name)
+        try:
+            self.stub.DeleteFakeOmaha(req)
+        except Exception as e:
+            raise error.TestWarn("Could not stop FakeOmaha Server because %s",
+                                 e.details())
diff --git a/server/lab_status_unittest.py b/server/lab_status_unittest.py
deleted file mode 100644
index dd22a0d..0000000
--- a/server/lab_status_unittest.py
+++ /dev/null
@@ -1,390 +0,0 @@
-# Lint as: python2, python3
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-from six.moves import range
-import six
-import json
-import mox
-import time
-import unittest
-from six.moves import urllib
-
-import common
-from autotest_lib.client.common_lib import global_config
-from autotest_lib.server import site_utils
-
-_DEADBUILD = 'deadboard-release/R33-4966.0.0'
-_LIVEBUILD = 'liveboard-release/R32-4920.14.0'
-
-_STATUS_TEMPLATE = '''
-    {
-      "username": "fizzbin@google.com",
-      "date": "2013-11-16 00:25:23.511208",
-      "message": "%s",
-      "can_commit_freely": %s,
-      "general_state": "%s"
-    }
-    '''
-
-
-def _make_status(message, can_commit, state):
-    return _STATUS_TEMPLATE % (message, can_commit, state)
-
-
-def _make_open_status(message, state):
-    return _make_status(message, 'true', state)
-
-
-def _make_closed_status(message):
-    return _make_status(message, 'false', 'closed')
-
-
-def _make_deadbuild_status(message):
-    return _make_status(message, 'false', 'open')
-
-
-_OPEN_STATUS_VALUES = [
-    _make_open_status('Lab is up (cross your fingers)', 'open'),
-    _make_open_status('Lab is on fire', 'throttled'),
-    _make_open_status('Lab is up despite deadboard', 'open'),
-    _make_open_status('Lab is up despite .*/R33-4966.0.0', 'open'),
-]
-
-_CLOSED_STATUS_VALUES = [
-    _make_closed_status('Lab is down for spite'),
-    _make_closed_status('Lab is down even for [%s]' % _LIVEBUILD),
-    _make_closed_status('Lab is down even for [%s]' % _DEADBUILD),
-]
-
-_DEADBUILD_STATUS_VALUES = [
-    _make_deadbuild_status('Lab is up except for [deadboard-]'),
-    _make_deadbuild_status('Lab is up except for [board- deadboard-]'),
-    _make_deadbuild_status('Lab is up except for [.*/R33-]'),
-    _make_deadbuild_status('Lab is up except for [deadboard-.*/R33-]'),
-    _make_deadbuild_status('Lab is up except for [ deadboard-]'),
-    _make_deadbuild_status('Lab is up except for [deadboard- ]'),
-    _make_deadbuild_status('Lab is up [first .*/R33- last]'),
-    _make_deadbuild_status('liveboard is good, but [deadboard-] is bad'),
-    _make_deadbuild_status('Lab is up [deadboard- otherboard-]'),
-    _make_deadbuild_status('Lab is up [otherboard- deadboard-]'),
-]
-
-
-_FAKE_URL = 'ignore://not.a.url'
-
-
-class _FakeURLResponse(object):
-
-    """Everything needed to pretend to be a response from urlopen().
-
-    Creates a StringIO instance to handle the File operations.
-
-    N.B.  StringIO is lame:  we can't inherit from it (super won't
-    work), and it doesn't implement __getattr__(), either.  So, we
-    have to manually forward calls to the StringIO object.  This
-    forwards only what empirical testing says is required; YMMV.
-
-    """
-
-    def __init__(self, code, buffer):
-        self._stringio = six.StringIO(buffer)
-        self._code = code
-
-
-    def read(self, size=-1):
-        """Standard file-like read operation.
-
-        @param size size for read operation.
-        """
-        return self._stringio.read(size)
-
-
-    def getcode(self):
-        """Get URL HTTP response code."""
-        return self._code
-
-
-class GetStatusTest(mox.MoxTestBase):
-
-    """Test case for _get_lab_status().
-
-    We mock out dependencies on urllib2 and time.sleep(), and
-    confirm that the function returns the proper JSON representation
-    for a pre-defined response.
-
-    """
-
-    def setUp(self):
-        super(GetStatusTest, self).setUp()
-        self.mox.StubOutWithMock(urllib.request, 'urlopen')
-        self.mox.StubOutWithMock(time, 'sleep')
-
-
-    def test_success(self):
-        """Test that successful calls to urlopen() succeed."""
-        json_string = _OPEN_STATUS_VALUES[0]
-        json_value = json.loads(json_string)
-        urllib.request.urlopen(mox.IgnoreArg()).AndReturn(
-                _FakeURLResponse(200, json_string))
-        self.mox.ReplayAll()
-        result = site_utils._get_lab_status(_FAKE_URL)
-        self.mox.VerifyAll()
-        self.assertEqual(json_value, result)
-
-
-    def test_retry_ioerror(self):
-        """Test that an IOError retries at least once."""
-        json_string = _OPEN_STATUS_VALUES[0]
-        json_value = json.loads(json_string)
-        urllib.request.urlopen(mox.IgnoreArg()).AndRaise(
-                IOError('Fake I/O error for a fake URL'))
-        time.sleep(mox.IgnoreArg()).AndReturn(None)
-        urllib.request.urlopen(mox.IgnoreArg()).AndReturn(
-                _FakeURLResponse(200, json_string))
-        self.mox.ReplayAll()
-        result = site_utils._get_lab_status(_FAKE_URL)
-        self.mox.VerifyAll()
-        self.assertEqual(json_value, result)
-
-
-    def test_retry_http_internal_error(self):
-        """Test that an HTTP error retries at least once."""
-        json_string = _OPEN_STATUS_VALUES[0]
-        json_value = json.loads(json_string)
-        urllib.request.urlopen(mox.IgnoreArg()).AndReturn(
-                _FakeURLResponse(500, ''))
-        time.sleep(mox.IgnoreArg()).AndReturn(None)
-        urllib.request.urlopen(mox.IgnoreArg()).AndReturn(
-                _FakeURLResponse(200, json_string))
-        self.mox.ReplayAll()
-        result = site_utils._get_lab_status(_FAKE_URL)
-        self.mox.VerifyAll()
-        self.assertEqual(json_value, result)
-
-
-    def test_failure_ioerror(self):
-        """Test that there's a failure if urlopen() never succeeds."""
-        json_string = _OPEN_STATUS_VALUES[0]
-        json_value = json.loads(json_string)
-        for _ in range(site_utils._MAX_LAB_STATUS_ATTEMPTS):
-            urllib.request.urlopen(mox.IgnoreArg()).AndRaise(
-                    IOError('Fake I/O error for a fake URL'))
-            time.sleep(mox.IgnoreArg()).AndReturn(None)
-        self.mox.ReplayAll()
-        result = site_utils._get_lab_status(_FAKE_URL)
-        self.mox.VerifyAll()
-        self.assertEqual(None, result)
-
-
-    def test_failure_http_internal_error(self):
-        """Test that there's a failure for a permanent HTTP error."""
-        json_string = _OPEN_STATUS_VALUES[0]
-        json_value = json.loads(json_string)
-        for _ in range(site_utils._MAX_LAB_STATUS_ATTEMPTS):
-            urllib.request.urlopen(mox.IgnoreArg()).AndReturn(
-                    _FakeURLResponse(404, 'Not here, never gonna be'))
-            time.sleep(mox.IgnoreArg()).InAnyOrder().AndReturn(None)
-        self.mox.ReplayAll()
-        result = site_utils._get_lab_status(_FAKE_URL)
-        self.mox.VerifyAll()
-        self.assertEqual(None, result)
-
-
-class DecodeStatusTest(unittest.TestCase):
-
-    """Test case for _decode_lab_status().
-
-    Testing covers three distinct possible states:
-     1. Lab is up.  All calls to _decode_lab_status() will
-        succeed without raising an exception.
-     2. Lab is down.  All calls to _decode_lab_status() will
-        fail with TestLabException.
-     3. Build disabled.  Calls to _decode_lab_status() will
-        succeed, except that board `_DEADBUILD` will raise
-        TestLabException.
-
-    """
-
-    def _assert_lab_open(self, lab_status):
-        """Test that open status values are handled properly.
-
-        Test that _decode_lab_status() succeeds when the lab status
-        is up.
-
-        @param lab_status JSON value describing lab status.
-
-        """
-        site_utils._decode_lab_status(lab_status, _LIVEBUILD)
-        site_utils._decode_lab_status(lab_status, _DEADBUILD)
-
-
-    def _assert_lab_closed(self, lab_status):
-        """Test that closed status values are handled properly.
-
-        Test that _decode_lab_status() raises TestLabException
-        when the lab status is down.
-
-        @param lab_status JSON value describing lab status.
-
-        """
-        with self.assertRaises(site_utils.TestLabException):
-            site_utils._decode_lab_status(lab_status, _LIVEBUILD)
-        with self.assertRaises(site_utils.TestLabException):
-            site_utils._decode_lab_status(lab_status, _DEADBUILD)
-
-
-    def _assert_lab_deadbuild(self, lab_status):
-        """Test that disabled builds are handled properly.
-
-        Test that _decode_lab_status() raises TestLabException
-        for build `_DEADBUILD` and succeeds otherwise.
-
-        @param lab_status JSON value describing lab status.
-
-        """
-        site_utils._decode_lab_status(lab_status, _LIVEBUILD)
-        with self.assertRaises(site_utils.TestLabException):
-            site_utils._decode_lab_status(lab_status, _DEADBUILD)
-
-
-    def _assert_lab_status(self, test_values, checker):
-        """General purpose test for _decode_lab_status().
-
-        Decode each JSON string in `test_values`, and call the
-        `checker` function to test the corresponding status is
-        correctly handled.
-
-        @param test_values Array of JSON encoded strings representing
-                           lab status.
-        @param checker Function to be called against each of the lab
-                       status values in the `test_values` array.
-
-        """
-        for s in test_values:
-            lab_status = json.loads(s)
-            checker(lab_status)
-
-
-    def test_open_lab(self):
-        """Test that open lab status values are handled correctly."""
-        self._assert_lab_status(_OPEN_STATUS_VALUES,
-                                self._assert_lab_open)
-
-
-    def test_closed_lab(self):
-        """Test that closed lab status values are handled correctly."""
-        self._assert_lab_status(_CLOSED_STATUS_VALUES,
-                                self._assert_lab_closed)
-
-
-    def test_dead_build(self):
-        """Test that disabled builds are handled correctly."""
-        self._assert_lab_status(_DEADBUILD_STATUS_VALUES,
-                                self._assert_lab_deadbuild)
-
-
-class CheckStatusTest(mox.MoxTestBase):
-
-    """Test case for `check_lab_status()`.
-
-    We mock out dependencies on `global_config.global_config()`,
-    `_get_lab_status()` and confirm that the function succeeds or
-    fails as expected.
-
-    N.B.  We don't mock `_decode_lab_status()`; if DecodeStatusTest
-    is failing, this test may fail, too.
-
-    """
-
-    def setUp(self):
-        super(CheckStatusTest, self).setUp()
-        self.mox.StubOutWithMock(global_config.global_config,
-                                 'get_config_value')
-        self.mox.StubOutWithMock(site_utils, '_get_lab_status')
-
-
-    def _setup_not_cautotest(self):
-        """Set up to mock the "we're not on cautotest" case."""
-        global_config.global_config.get_config_value(
-                'SERVER', 'hostname').AndReturn('not-cautotest')
-
-
-    def _setup_no_status(self):
-        """Set up to mock lab status as unavailable."""
-        global_config.global_config.get_config_value(
-                'SERVER', 'hostname').AndReturn('cautotest')
-        global_config.global_config.get_config_value(
-                'CROS', 'lab_status_url').AndReturn(_FAKE_URL)
-        site_utils._get_lab_status(_FAKE_URL).AndReturn(None)
-
-
-    def _setup_lab_status(self, json_string):
-        """Set up to mock a given lab status.
-
-        @param json_string JSON string for the JSON object to return
-                           from `_get_lab_status()`.
-
-        """
-        global_config.global_config.get_config_value(
-                'SERVER', 'hostname').AndReturn('cautotest')
-        global_config.global_config.get_config_value(
-                'CROS', 'lab_status_url').AndReturn(_FAKE_URL)
-        json_value = json.loads(json_string)
-        site_utils._get_lab_status(_FAKE_URL).AndReturn(json_value)
-
-
-    def _try_check_status(self, build):
-        """Test calling check_lab_status() with `build`."""
-        try:
-            self.mox.ReplayAll()
-            site_utils.check_lab_status(build)
-        finally:
-            self.mox.VerifyAll()
-
-
-    def test_non_cautotest(self):
-        """Test a call with a build when the host isn't cautotest."""
-        self._setup_not_cautotest()
-        self._try_check_status(_LIVEBUILD)
-
-
-    def test_no_lab_status(self):
-        """Test with a build when `_get_lab_status()` returns `None`."""
-        self._setup_no_status()
-        self._try_check_status(_LIVEBUILD)
-
-
-    def test_lab_up_live_build(self):
-        """Test lab open with a build specified."""
-        self._setup_lab_status(_OPEN_STATUS_VALUES[0])
-        self._try_check_status(_LIVEBUILD)
-
-
-    def test_lab_down_live_build(self):
-        """Test lab closed with a build specified."""
-        self._setup_lab_status(_CLOSED_STATUS_VALUES[0])
-        with self.assertRaises(site_utils.TestLabException):
-            self._try_check_status(_LIVEBUILD)
-
-
-    def test_build_disabled_live_build(self):
-        """Test build disabled with a live build specified."""
-        self._setup_lab_status(_DEADBUILD_STATUS_VALUES[0])
-        self._try_check_status(_LIVEBUILD)
-
-
-    def test_build_disabled_dead_build(self):
-        """Test build disabled with the disabled build specified."""
-        self._setup_lab_status(_DEADBUILD_STATUS_VALUES[0])
-        with self.assertRaises(site_utils.TestLabException):
-            self._try_check_status(_DEADBUILD)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/server/lib/suite_report.py b/server/lib/suite_report.py
index 484c31f..46baeb4 100644
--- a/server/lib/suite_report.py
+++ b/server/lib/suite_report.py
@@ -117,7 +117,7 @@
         if s.hostname:
             dut = s.hostname
         if s.test_started_time == 'None' or s.test_finished_time == 'None':
-            logging.warn('TKO entry for %d missing time: %s' % (job.id, str(s)))
+            logging.warning('TKO entry for %d missing time: %s' % (job.id, str(s)))
     start_time, finish_time = find_start_finish_times(statuses)
     entry = make_entry(('Suite' if suite_job else 'HWTest', int(job.id)),
                        job.name.split('/')[-1], status, start_time,
diff --git a/server/samples/profilertest.srv b/server/samples/profilertest.srv
index 7f9e224..c7cb332 100644
--- a/server/samples/profilertest.srv
+++ b/server/samples/profilertest.srv
@@ -13,9 +13,9 @@
 
     control_file.append(("job.run_test('profiler_sync', timeout_sync=%d, "
                          "timeout_start=%d, timeout_stop=%d, "
-                         "hostid='%s', masterid='%s', all_ids=%s)")
+                         "hostid='%s', mainid='%s', all_ids=%s)")
                         % (timeout_sync, timeout_start, timeout_stop,
-                           at.host.hostname, "PROF_MASTER", str(machines)))
+                           at.host.hostname, "PROF_MAIN", str(machines)))
 
     for profiler in profilers:
         control_file.append("job.profilers.delete('%s')" % profiler[0])
@@ -26,21 +26,21 @@
 
 def wait_for_profilers(machines, timeout = 180):
     # wait until the profilers have started
-    sync_bar = barrier("PROF_MASTER", "sync_profilers",
+    sync_bar = barrier("PROF_MAIN", "sync_profilers",
             timeout, port=11920)
-    sync_bar.rendezvous_servers("PROF_MASTER", *machines)
+    sync_bar.rendezvous_servers("PROF_MAIN", *machines)
 
 
 def start_profilers(machines, timeout = 180):
     # wait until the profilers have started
-    start_bar = barrier("PROF_MASTER", "start_profilers",
+    start_bar = barrier("PROF_MAIN", "start_profilers",
             timeout, port=11920)
-    start_bar.rendezvous_servers("PROF_MASTER", *machines)
+    start_bar.rendezvous_servers("PROF_MAIN", *machines)
 
 
 def stop_profilers(machines, timeout = 120):
-    stop_bar = barrier("PROF_MASTER", "stop_profilers", timeout, port=11920)
-    stop_bar.rendezvous_servers("PROF_MASTER", *machines)
+    stop_bar = barrier("PROF_MAIN", "stop_profilers", timeout, port=11920)
+    stop_bar.rendezvous_servers("PROF_MAIN", *machines)
 
 
 def server_sleep_test(seconds):
diff --git a/server/sequence.py b/server/sequence.py
index f38bc2d..b59205d 100644
--- a/server/sequence.py
+++ b/server/sequence.py
@@ -142,7 +142,7 @@
         afe = frontend_wrappers.RetryingAFE(timeout_min=30, delay_sec=10,
                                             user=job.user, debug=False)
         # job_directores.get_job_id_or_task_id() will return a non-int opaque id
-        # for Chrome OS Skylab tasks. But sequences will break in that case
+        # for ChromeOS Skylab tasks. But sequences will break in that case
         # anyway, because they try to create AFE jobs internally.
         current_job_id = int(
                 job_directories.get_job_id_or_task_id(job.resultdir))
diff --git a/server/server_job.py b/server/server_job.py
index 25488dd..56a4507 100644
--- a/server/server_job.py
+++ b/server/server_job.py
@@ -19,7 +19,6 @@
 import errno
 import fcntl
 import getpass
-import itertools
 import logging
 import os
 import pickle
@@ -64,7 +63,7 @@
 from six.moves import zip
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = utils.metrics_mock
 
@@ -130,10 +129,12 @@
     for machine in machine_names:
         if not in_lab:
             afe_host = server_utils.EmptyAFEHost()
-            host_info_store = host_info.InMemoryHostInfoStore()
-            if host_attributes is not None:
+            host_info_store = _create_file_backed_host_info_store(
+                    store_dir, machine)
+            if host_attributes:
                 afe_host.attributes.update(host_attributes)
-                info = host_info.HostInfo(attributes=host_attributes)
+                info = host_info.HostInfo(labels=host_info_store.get().labels,
+                                          attributes=host_attributes)
                 host_info_store.commit(info)
         elif use_shadow_store:
             afe_host = _create_afe_host(machine)
@@ -242,7 +243,13 @@
     _STATUS_VERSION = 1
 
     # TODO crbug.com/285395 eliminate ssh_verbosity_flag
-    def __init__(self, control, args, resultdir, label, user, machines,
+    def __init__(self,
+                 control,
+                 args,
+                 resultdir,
+                 label,
+                 user,
+                 machines,
                  machine_dict_list,
                  client=False,
                  ssh_user=host_factory.DEFAULT_SSH_USER,
@@ -251,11 +258,17 @@
                  ssh_verbosity_flag=host_factory.DEFAULT_SSH_VERBOSITY,
                  ssh_options=host_factory.DEFAULT_SSH_OPTIONS,
                  group_name='',
-                 tag='', disable_sysinfo=False,
+                 tag='',
+                 disable_sysinfo=False,
                  control_filename=SERVER_CONTROL_FILENAME,
-                 parent_job_id=None, in_lab=False,
+                 parent_job_id=None,
+                 in_lab=False,
                  use_client_trampoline=False,
-                 sync_offload_dir=''):
+                 sync_offload_dir='',
+                 companion_hosts=None,
+                 dut_servers=None,
+                 is_cft=False,
+                 force_full_log_collection=False):
         """
         Create a server side job object.
 
@@ -295,6 +308,14 @@
                 control file.
         @param sync_offload_dir: String; relative path to synchronous offload
                 dir, relative to the results directory. Ignored if empty.
+        @param companion_hosts: a str or list of hosts to be used as companions
+                for the and provided to test. NOTE: these are different than
+                machines, where each host is a host that the test would be run
+                on.
+        @param dut_servers: a str or list of hosts to be used as DUT servers
+                provided to test.
+        @param force_full_log_collection: bool; force full log collection even
+                when test passes.
         """
         super(server_job, self).__init__(resultdir=resultdir)
         self.control = control
@@ -327,20 +348,38 @@
         self._control_filename = control_filename
         self._disable_sysinfo = disable_sysinfo
         self._use_client_trampoline = use_client_trampoline
+        self._companion_hosts = companion_hosts
+        self._dut_servers = dut_servers
+        self._is_cft = is_cft
+        self.force_full_log_collection = force_full_log_collection
+
+        # Parse the release number from the label to setup sysinfo.
+        version = re.findall('release/R(\d+)-', label)
+        if version:
+            version = int(version[0])
 
         self.logging = logging_manager.get_logging_manager(
                 manage_stdout_and_stderr=True, redirect_fds=True)
         subcommand.logging_manager_object = self.logging
 
-        self.sysinfo = sysinfo.sysinfo(self.resultdir)
+        self.sysinfo = sysinfo.sysinfo(self.resultdir, version=version)
         self.profilers = profilers.profilers(self)
         self._sync_offload_dir = sync_offload_dir
 
-        job_data = {'label' : label, 'user' : user,
-                    'hostname' : ','.join(machines),
-                    'drone' : platform.node(),
-                    'status_version' : str(self._STATUS_VERSION),
-                    'job_started' : str(int(time.time()))}
+        job_data = {
+                'user': user,
+                'hostname': ','.join(machines),
+                'drone': platform.node(),
+                'status_version': str(self._STATUS_VERSION),
+                'job_started': str(int(time.time()))
+        }
+
+        # Adhoc/<testname> is the default label, and should not be written,
+        # as this can cause conflicts with results uploading in CFT.
+        # However, some pipelines (such as PVS) do need `label` within the
+        # keyval, which can now by done with the `-l` flag in test_that.
+        if 'adhoc' not in label:
+            job_data['label'] = label
         # Save parent job id to keyvals, so parser can retrieve the info and
         # write to tko_jobs record.
         if parent_job_id:
@@ -399,6 +438,12 @@
                     control, raise_warnings=False)
             self.fast = parsed_control.fast
             self.max_result_size_KB = parsed_control.max_result_size_KB
+            # wrap this in a try to prevent client/SSP issues. Note: if the
+            # except is hit, the timeout will be ignored.
+            try:
+                self.extended_timeout = parsed_control.extended_timeout
+            except AttributeError:
+                self.extended_timeout = None
         else:
             self.fast = False
             # Set the maximum result size to be the default specified in
@@ -838,8 +883,14 @@
                     logging.debug("Results dir is %s", self.resultdir)
                     logging.debug("Synchronous offload dir is %s", sync_dir)
                 logging.info("Processing control file")
+                if self._companion_hosts:
+                    namespace['companion_hosts'] = self._companion_hosts
+                if self._dut_servers:
+                    namespace['dut_servers'] = self._dut_servers
                 namespace['use_packaging'] = use_packaging
                 namespace['synchronous_offload_dir'] = sync_dir
+                namespace['extended_timeout'] = self.extended_timeout
+                namespace['is_cft'] = self._is_cft
                 os.environ[OFFLOAD_ENVVAR] = sync_dir
                 self._execute_code(server_control_file, namespace)
                 logging.info("Finished processing control file")
diff --git a/server/server_job_unittest.py b/server/server_job_unittest.py
index 70707fb..bdcd34d 100755
--- a/server/server_job_unittest.py
+++ b/server/server_job_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 import os
 import tempfile
@@ -58,7 +58,8 @@
                            lambda *a,**k: manager())
         class sysi:
             log_per_reboot_data = lambda self: None
-        self.god.stub_with(server_job.sysinfo, 'sysinfo', lambda r: sysi())
+        self.god.stub_with(
+                server_job.sysinfo, 'sysinfo', lambda r, version=None: sysi())
 
         self.job.__init__(
                 self.control_file,
diff --git a/server/site_crashcollect.py b/server/site_crashcollect.py
index b4ac6a0..68932a3 100644
--- a/server/site_crashcollect.py
+++ b/server/site_crashcollect.py
@@ -15,7 +15,7 @@
 from autotest_lib.server import utils
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = client_utils.metrics_mock
 
@@ -211,7 +211,7 @@
     try:
         for file in _find_orphaned_crashdumps(host):
             logging.info('Collecting %s...', file)
-            collect_log_file(host, file, infodir, clean=True)
+            collect_log_file(host, file, infodir, clean=False)
             orphans.append(file)
     except Exception as e:
         logging.warning('Collection of orphaned crash dumps failed %s', e)
diff --git a/server/site_host_attributes.py b/server/site_host_attributes.py
index 5aa859b..2431676 100644
--- a/server/site_host_attributes.py
+++ b/server/site_host_attributes.py
@@ -44,19 +44,19 @@
 """
 
 
-import hashlib, logging, os, utils
-import six
+import hashlib, logging, os
 
+from autotest_lib.client.common_lib import utils
 
 private_host_attributes = utils.import_site_symbol(
-    __file__,
-    'autotest_lib.server.private_host_attributes_config',
-    'private_host_attributes', dummy={})
+        __file__,
+        'autotest_lib.server.private_host_attributes_config',
+        'private_host_attributes',
+        placeholder={})
 
 try:
     settings = 'autotest_lib.frontend.settings'
     os.environ['DJANGO_SETTINGS_MODULE'] = settings
-    from autotest_lib.frontend.afe import models
     has_models = True
 except Exception:
     has_models = False
diff --git a/server/site_linux_system.py b/server/site_linux_system.py
index 93750f3..d67871e 100644
--- a/server/site_linux_system.py
+++ b/server/site_linux_system.py
@@ -137,6 +137,8 @@
         self._ping_runner = ping_runner.PingRunner(host=self.host)
         self._bridge_interface = None
         self._virtual_ethernet_pair = None
+        self._firewall_rules = []
+        self._command_iptables = 'iptables -w 5'
 
         # TODO(crbug.com/839164): some routers fill their stateful partition
         # with uncollected metrics.
@@ -662,7 +664,7 @@
         """
         for net_dev in self._wlanifs_in_use:
             if net_dev.if_name == wlanif:
-                 self._wlanifs_in_use.remove(net_dev)
+                self._wlanifs_in_use.remove(net_dev)
 
 
     def get_bridge_interface(self):
@@ -781,6 +783,29 @@
         logging.info('Pinging from the %s.', self.role)
         return self._ping_runner.ping(ping_config)
 
+    def firewall_open(self, proto, src):
+        """Opens up firewall to run performance tests.
+
+        By default, we have a firewall rule for NFQUEUE (see crbug.com/220736).
+        In order to run netperf test, we need to add a new firewall rule BEFORE
+        this NFQUEUE rule in the INPUT chain.
+
+        @param proto a string, test traffic protocol, e.g. udp, tcp.
+        @param src a string, subnet/mask.
+
+        @return a string firewall rule added.
+
+        """
+        rule = 'INPUT -s %s/32 -p %s -m %s -j ACCEPT' % (src, proto, proto)
+        self.host.run('%s -I %s' % (self._command_iptables, rule))
+        self._firewall_rules.append(rule)
+        return rule
+
+    def firewall_cleanup(self):
+        """Cleans up firewall rules."""
+        for rule in self._firewall_rules:
+            self.host.run('%s -D %s' % (self._command_iptables, rule))
+        self._firewall_rules = []
 
     @property
     def logdir(self):
diff --git a/server/site_tests/OWNERS b/server/site_tests/OWNERS
index 71c72f7..6b037fb 100644
--- a/server/site_tests/OWNERS
+++ b/server/site_tests/OWNERS
@@ -1 +1,3 @@
 include /FIRMWARE_OWNERS
+include /ENGPROD_OWNERS
+*
diff --git a/server/site_tests/android_CrashLogging/android_CrashLogging.py b/server/site_tests/android_CrashLogging/android_CrashLogging.py
deleted file mode 100644
index d7260ba..0000000
--- a/server/site_tests/android_CrashLogging/android_CrashLogging.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import time
-import common
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import test
-
-# Process to kill for log-generation purposes.
-TARGET_PROCESS = '/system/bin/mediaserver'
-# Number of seconds to wait for host.run commands to timeout.
-COMMAND_TIMEOUT_SECONDS = 10
-# Number of times to try to kill the process.
-KILL_RETRIES = 10
-# Number of times to retry the command the find command to find logs.
-LOG_FIND_RETRIES = 5
-
-class android_CrashLogging(test.test):
-    """Confirm that crash logs are generated for native crashes."""
-    version = 1
-
-
-    def run_once(self, host=None):
-        """Confirm that crash logs are generated for crashes.
-
-        @param host: host object representing the device under test.
-        """
-        if host is None:
-            raise error.TestFail('android_Crashlogging test executed without '
-                                 'a host')
-        self.host = host
-
-        # Remove any pre-existing tombstones.
-        self.host.run('rm /data/tombstones/tombstone_*',
-                      timeout=COMMAND_TIMEOUT_SECONDS, ignore_status=True)
-
-        # Find and kill a process.
-        result = self.host.run('pgrep %s' % TARGET_PROCESS,
-                               timeout=COMMAND_TIMEOUT_SECONDS,
-                               ignore_status=True)
-        pid = result.stdout.strip()
-        if result.exit_status != 0 or not len(pid):
-            raise error.TestFail('No %s process found to kill' % TARGET_PROCESS)
-        for _ in xrange(KILL_RETRIES):
-            status = self.host.run('kill -SIGSEGV %s' % pid,
-                                   timeout=COMMAND_TIMEOUT_SECONDS,
-                                   ignore_status=True).exit_status
-            if status != 0:
-                break
-
-        logs = None
-        for _ in xrange(LOG_FIND_RETRIES):
-            try:
-                logs = self.host.run_output(
-                        'find /data/tombstones -maxdepth 1 -type f',
-                        timeout=COMMAND_TIMEOUT_SECONDS).split()
-            except (error.GenericHostRunError, error.AutoservSSHTimeout,
-                    error.CmdTimeoutError):
-                raise error.TestFail('No crash logs were created because of a '
-                                     'host error or because the directory '
-                                     'where crash logs are written to does not '
-                                     'exist on the DUT.')
-            if logs:
-                break
-            time.sleep(1)
-
-        if not logs:
-            raise error.TestFail('No crash logs were created.')
diff --git a/server/site_tests/android_CrashLogging/control b/server/site_tests/android_CrashLogging/control
deleted file mode 100644
index 08113be..0000000
--- a/server/site_tests/android_CrashLogging/control
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'bryanlew'
-NAME = 'android_CrashLogging'
-TIME = 'FAST'
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'os:android'
-
-DOC = """
-Check that Android non-Java crash logs are being generated.
-"""
-
-def run(machine):
-    job.run_test('android_CrashLogging', host=hosts.create_host(machine))
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/android_DummyTest/android_DummyTest.py b/server/site_tests/android_DummyTest/android_DummyTest.py
deleted file mode 100644
index e94bb2f..0000000
--- a/server/site_tests/android_DummyTest/android_DummyTest.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.server import test
-
-
-class android_DummyTest(test.test):
-    """A dummy test to verify Android device can be accessible with adb."""
-    version = 1
-
-    def run_once(self, host=None):
-        """A dummy test to verify Android device can be accessible with adb.
-
-        Prerequisite: The DUT is in ADB mode.
-
-        @param host: host object representing the device under test.
-        """
-        self.host = host
-        self.host.adb_run('shell ls')
diff --git a/server/site_tests/android_DummyTest/control b/server/site_tests/android_DummyTest/control
deleted file mode 100644
index 27dff7f..0000000
--- a/server/site_tests/android_DummyTest/control
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dshi'
-NAME = 'android_DummyTest'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-SUITE = ''
-# All android tests do not support server-side packaging.
-REQUIRE_SSP = False
-
-DOC = """
-A dummy test to verify Android device can be accessible with adb.
-"""
-
-def run(machine):
-    job.run_test('android_DummyTest', host=hosts.create_host(machine))
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/android_Invariants/android_Invariants.py b/server/site_tests/android_Invariants/android_Invariants.py
deleted file mode 100644
index 50496a1..0000000
--- a/server/site_tests/android_Invariants/android_Invariants.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import common
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import test
-
-
-class android_Invariants(test.test):
-    """Verify basic characteristics common to all Android devices."""
-    version = 1
-
-
-    def assert_path_test(self, path, test, negative=False):
-        """Performs a test against a path.
-
-        See the man page for test(1) for valid tests (e.g. -e, -b, -d).
-
-        @param path: the path to check.
-        @param test: the test to perform, without leading dash.
-        @param negative: if True, test for the negative.
-        """
-        self.host.run('test %s -%s %s' % ('!' if negative else '', test, path))
-
-
-    def assert_selinux_context(self, path, ctx):
-        """Checks the selinux context of a path.
-
-        @param path: the path to check.
-        @param ctx: the selinux context to check for.
-
-        @raises error.TestFail
-        """
-        # Example output of 'ls -LZ /dev/block/by-name/misc' is:
-        # u:object_r:misc_block_device:s0 /dev/block/by-name/misc
-        tokens = self.host.run_output('ls -LZ %s' % path).split()
-        path_ctx = tokens[0]
-        if not ctx in path_ctx:
-            raise error.TestFail('Context "%s" for path "%s" does not '
-                                 'contain "%s"' % (path_ctx, path, ctx))
-
-
-    def check_fstab_name(self):
-        """Checks that the fstab file has the name /fstab.<ro.hardware>.
-        """
-        hardware = self.host.run_output('getprop ro.hardware')
-        self.assert_path_test('/fstab.%s' % hardware, 'e')
-
-
-    def run_once(self, host=None):
-        """Verify basic characteristics common to all Android devices.
-
-        @param host: host object representing the device under test.
-        """
-        self.host = host
-        self.check_fstab_name()
diff --git a/server/site_tests/android_Invariants/control b/server/site_tests/android_Invariants/control
deleted file mode 100644
index 2c81cce..0000000
--- a/server/site_tests/android_Invariants/control
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'jorgelo'
-NAME = 'android_Invariants'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-# No Android test supports server-side packaging.
-REQUIRE_SSP = False
-
-DOC = """
-A simple test to verify basic characteristics common to all Android devices.
-"""
-
-def run(machine):
-    job.run_test('android_Invariants', host=hosts.create_host(machine))
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/apmanager_SimpleConnect/apmanager_SimpleConnect.py b/server/site_tests/apmanager_SimpleConnect/apmanager_SimpleConnect.py
deleted file mode 100644
index 53a8e40..0000000
--- a/server/site_tests/apmanager_SimpleConnect/apmanager_SimpleConnect.py
+++ /dev/null
@@ -1,55 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import apmanager_constants
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import apmanager_service_provider
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class apmanager_SimpleConnect(wifi_cell_test_base.WiFiCellTestBase):
-    """Test that the DUT can connect to an AP created by apmanager."""
-    version = 1
-
-    XMLRPC_BRINGUP_TIMEOUT_SECONDS = 60
-
-    def parse_additional_arguments(self, commandline_args, additional_params):
-        """Hook into super class to take control files parameters.
-
-        @param commandline_args dict of parsed parameters from the autotest.
-        @param additional_params dict of AP configuration parameters.
-
-        """
-        self._configurations = additional_params
-
-
-    def run_once(self):
-        """Sets up a router, connects to it, pings it."""
-        # Setup bridge mode test environments if AP is configured to operate in
-        # bridge mode.
-        if (apmanager_constants.CONFIG_OPERATION_MODE in self._configurations
-            and self._configurations[apmanager_constants.CONFIG_OPERATION_MODE]
-                    == apmanager_constants.OPERATION_MODE_BRIDGE):
-            # Setup DHCP server on the other side of the bridge.
-            self.context.router.setup_bridge_mode_dhcp_server()
-            self._configurations[apmanager_constants.CONFIG_BRIDGE_INTERFACE] =\
-                    self.context.router.get_bridge_interface()
-
-        ssid = self.context.router.build_unique_ssid()
-        self._configurations[apmanager_constants.CONFIG_SSID] = ssid
-        with apmanager_service_provider.ApmanagerServiceProvider(
-                self.context.router, self._configurations):
-            assoc_params = xmlrpc_datatypes.AssociationParameters()
-            assoc_params.ssid = ssid
-            self.context.assert_connect_wifi(assoc_params)
-            self.context.assert_ping_from_server()
-        # AP is terminated, wait for client to become disconnected.
-        success, state, elapsed_seconds = \
-                self.context.client.wait_for_service_states(
-                        ssid, ( 'idle', ), 30)
-        if not success:
-            raise error.TestFail('Failed to disconnect from %s after AP was '
-                                 'terminated for %f seconds (state=%s)' %
-                                 (ssid, elapsed_seconds, state))
diff --git a/server/site_tests/apmanager_SimpleConnect/control.bridge b/server/site_tests/apmanager_SimpleConnect/control.bridge
deleted file mode 100644
index 241bcce..0000000
--- a/server/site_tests/apmanager_SimpleConnect/control.bridge
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'zqiu, wiley, pstew'
-NAME = 'apmanager_SimpleConnect.bridge'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test verifies that DUT can connect to an WiFi network configured in bridge
-mode.
-"""
-
-from autotest_lib.client.common_lib.cros.network import apmanager_constants
-
-def run(machine):
-    # Bridge mode configuration.
-    configurations = {apmanager_constants.CONFIG_OPERATION_MODE: \
-                              apmanager_constants.OPERATION_MODE_BRIDGE}
-    host = hosts.create_host(machine)
-    job.run_test('apmanager_SimpleConnect',
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=configurations)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/apmanager_SimpleConnect/control.default b/server/site_tests/apmanager_SimpleConnect/control.default
deleted file mode 100644
index e056b18..0000000
--- a/server/site_tests/apmanager_SimpleConnect/control.default
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'zqiu, wiley, pstew'
-NAME = 'apmanager_SimpleConnect.default'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test verifies that DUT can connect to an WiFi network created using
-apmanager with default configurations (SSID will be generated during test).
-"""
-
-def run(machine):
-    # Using default configuration provided by apmanager. SSID will be generated
-    # during test.
-    configurations = {}
-    host = hosts.create_host(machine)
-    job.run_test('apmanager_SimpleConnect',
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=configurations)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/audio_AudioAfterReboot/audio_AudioAfterReboot.py b/server/site_tests/audio_AudioAfterReboot/audio_AudioAfterReboot.py
index 0634fda..61afc89 100644
--- a/server/site_tests/audio_AudioAfterReboot/audio_AudioAfterReboot.py
+++ b/server/site_tests/audio_AudioAfterReboot/audio_AudioAfterReboot.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -95,7 +96,7 @@
         """Checks the node selected by Cras is correct."""
         # Selects and checks the node selected by cras is correct.
         audio_test_utils.check_and_set_chrome_active_node_types(
-                self.facade, self.audio_nodes[0][0], self.audio_nodes[1][0])
+                self.facade, self.audio_nodes[0][0], None)
         audio_test_utils.check_audio_nodes(self.facade, self.audio_nodes)
 
     def play_reboot_play_and_record(self, source_widget, recorder_widget):
@@ -146,7 +147,8 @@
                  source=None,
                  recorder=None,
                  is_internal=False,
-                 cfm_speaker=False):
+                 cfm_speaker=False,
+                 blocked_boards=[]):
         """Runs the test main workflow.
 
         @param host: A host object representing the DUT.
@@ -164,8 +166,11 @@
         @param is_internal: whether internal audio is tested flag
         @param cfm_speaker: whether cfm_speaker's audio is tested which is an
             external USB speaker on CFM (ChromeBox For Meetings) devices.
+        @blocked_boards: boards to ignore and exit.
 
         """
+        if self.host.get_board().split(':')[1] in blocked_boards:
+            raise error.TestNAError('Board NOT APPLICABLE to test!')
         if ((bind_from == chameleon_audio_ids.CrosIds.HEADPHONE
              or bind_to == chameleon_audio_ids.CrosIds.EXTERNAL_MIC)
                     and not audio_test_utils.has_audio_jack(self.host)):
@@ -226,7 +231,7 @@
 
         # Selects and checks the node selected by cras is correct.
         audio_test_utils.check_and_set_chrome_active_node_types(
-                self.facade, audio_nodes[0][0], audio_nodes[1][0])
+                self.facade, audio_nodes[0][0], None)
 
         # Play only, reboot, then play and record.
         if binder_widget != None:
diff --git a/server/site_tests/audio_AudioAfterReboot/control.external_mic b/server/site_tests/audio_AudioAfterReboot/control.external_mic
index 152fb6f..cf49575 100644
--- a/server/site_tests/audio_AudioAfterReboot/control.external_mic
+++ b/server/site_tests/audio_AudioAfterReboot/control.external_mic
@@ -18,6 +18,7 @@
 ATTRIBUTES = "suite:audio_essential"
 DEPENDENCIES = 'audio_cable'
 JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests external_mic audio function against DUT after reboot.
@@ -34,6 +35,9 @@
                  bind_to=chameleon_audio_ids.CrosIds.EXTERNAL_MIC,
                  audio_nodes=([audio_test_utils.get_headphone_node(host)],
                               ['MIC']),
-                 tag = "external_mic")
+                 tag = "external_mic",
+                 blocked_boards=['lars',    # Pending fix for b/147647051
+                                 'lars-kernelnext']
+                 )
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/audio_AudioAfterReboot/control.headphone b/server/site_tests/audio_AudioAfterReboot/control.headphone
index 45208df..4650ac6 100644
--- a/server/site_tests/audio_AudioAfterReboot/control.headphone
+++ b/server/site_tests/audio_AudioAfterReboot/control.headphone
@@ -18,6 +18,7 @@
 ATTRIBUTES = "suite:audio_essential"
 DEPENDENCIES = 'audio_cable'
 JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests headphone audio function against DUT after reboot.
@@ -33,7 +34,7 @@
                  bind_from=chameleon_audio_ids.CrosIds.HEADPHONE,
                  bind_to=chameleon_audio_ids.ChameleonIds.LINEIN,
                  audio_nodes=([audio_test_utils.get_headphone_node(host)],
-                              [ 'MIC']),
+                              None),
                  tag = "headphone")
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/audio_AudioAfterReboot/control.internal_mic b/server/site_tests/audio_AudioAfterReboot/control.internal_mic
index 3f64185..c9ed49d 100644
--- a/server/site_tests/audio_AudioAfterReboot/control.internal_mic
+++ b/server/site_tests/audio_AudioAfterReboot/control.internal_mic
@@ -17,7 +17,8 @@
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:audio_essential"
 DEPENDENCIES = 'audio_box'
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests internal_mic audio function against DUT after reboot.
diff --git a/server/site_tests/audio_AudioAfterReboot/control.internal_speaker b/server/site_tests/audio_AudioAfterReboot/control.internal_speaker
index 362c98f..2b75b68 100644
--- a/server/site_tests/audio_AudioAfterReboot/control.internal_speaker
+++ b/server/site_tests/audio_AudioAfterReboot/control.internal_speaker
@@ -17,7 +17,8 @@
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:audio_essential"
 DEPENDENCIES = 'audio_box'
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests internal_speaker audio function against DUT after reboot.
diff --git a/server/site_tests/audio_AudioAfterSuspend/audio_AudioAfterSuspend.py b/server/site_tests/audio_AudioAfterSuspend/audio_AudioAfterSuspend.py
index 656ffdd..92b608d 100644
--- a/server/site_tests/audio_AudioAfterSuspend/audio_AudioAfterSuspend.py
+++ b/server/site_tests/audio_AudioAfterSuspend/audio_AudioAfterSuspend.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -180,7 +181,8 @@
 
     def run_once(self, host, audio_nodes, golden_data,
                  bind_from=None, bind_to=None,
-                 source=None, recorder=None, plug_status=None):
+                 source=None, recorder=None, plug_status=None,
+                 blocked_boards=[]):
         """Runs the test main workflow
 
         @param host: A host object representing the DUT.
@@ -196,8 +198,11 @@
         @param recorder: recorder widget entity
             should be defined in chameleon_audio_ids
         @param plug_status: audio channel plug unplug sequence
+        @blocked_boards: boards to ignore and exit.
 
         """
+        if self.host.get_board().split(':')[1] in blocked_boards:
+            raise error.TestNAError('Board NOT APPLICABLE to test!')
         if ((bind_from == chameleon_audio_ids.CrosIds.HEADPHONE or
             bind_to == chameleon_audio_ids.CrosIds.EXTERNAL_MIC) and
             not audio_test_utils.has_audio_jack(self.host)):
diff --git a/server/site_tests/audio_AudioAfterSuspend/control.external_mic b/server/site_tests/audio_AudioAfterSuspend/control.external_mic
index 77e6249..7f93d5f 100644
--- a/server/site_tests/audio_AudioAfterSuspend/control.external_mic
+++ b/server/site_tests/audio_AudioAfterSuspend/control.external_mic
@@ -17,6 +17,7 @@
 ATTRIBUTES = "suite:audio_essential"
 DEPENDENCIES = "audio_cable"
 JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests external_mic audio function against DUT after suspend.
@@ -37,6 +38,9 @@
                  bind_from=chameleon_audio_ids.ChameleonIds.LINEOUT,
                  bind_to=chameleon_audio_ids.CrosIds.EXTERNAL_MIC,
                  audio_nodes=(None, ['MIC']),
-                 plug_status=plug_status, tag = "external_mic")
+                 plug_status=plug_status, tag = "external_mic",
+                 blocked_boards=['lars',    # Pending fix for b/147647051
+                                 'lars-kernelnext']
+)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/audio_AudioAfterSuspend/control.external_mic.unplug b/server/site_tests/audio_AudioAfterSuspend/control.external_mic.unplug
index c79825f..528ef8f 100644
--- a/server/site_tests/audio_AudioAfterSuspend/control.external_mic.unplug
+++ b/server/site_tests/audio_AudioAfterSuspend/control.external_mic.unplug
@@ -16,7 +16,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "audio_box"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests external_mic audio function against DUT after suspend.
diff --git a/server/site_tests/audio_AudioAfterSuspend/control.headphone b/server/site_tests/audio_AudioAfterSuspend/control.headphone
index 17c40f7..a59c7cb 100644
--- a/server/site_tests/audio_AudioAfterSuspend/control.headphone
+++ b/server/site_tests/audio_AudioAfterSuspend/control.headphone
@@ -18,6 +18,7 @@
 ATTRIBUTES = "suite:audio_essential"
 DEPENDENCIES = "audio_cable"
 JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests headphone audio function against DUT after suspend.
diff --git a/server/site_tests/audio_AudioAfterSuspend/control.headphone.unplug b/server/site_tests/audio_AudioAfterSuspend/control.headphone.unplug
index a880c31..65f4ddf 100644
--- a/server/site_tests/audio_AudioAfterSuspend/control.headphone.unplug
+++ b/server/site_tests/audio_AudioAfterSuspend/control.headphone.unplug
@@ -17,7 +17,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "audio_box"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests headphone audio function against DUT after suspend.
diff --git a/server/site_tests/audio_AudioAfterSuspend/control.internal_mic b/server/site_tests/audio_AudioAfterSuspend/control.internal_mic
index 4966637..c9ddb25 100644
--- a/server/site_tests/audio_AudioAfterSuspend/control.internal_mic
+++ b/server/site_tests/audio_AudioAfterSuspend/control.internal_mic
@@ -17,7 +17,8 @@
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:audio_essential"
 DEPENDENCIES = "audio_box"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests internal_mic audio function against DUT after suspend.
diff --git a/server/site_tests/audio_AudioAfterSuspend/control.internal_mic.plug b/server/site_tests/audio_AudioAfterSuspend/control.internal_mic.plug
index f3fe91e..aeb12c3 100644
--- a/server/site_tests/audio_AudioAfterSuspend/control.internal_mic.plug
+++ b/server/site_tests/audio_AudioAfterSuspend/control.internal_mic.plug
@@ -17,6 +17,7 @@
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "audio_box"
 JOB_RETRIES = 2
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests internal_mic audio function against DUT after suspend.
diff --git a/server/site_tests/audio_AudioAfterSuspend/control.internal_speaker b/server/site_tests/audio_AudioAfterSuspend/control.internal_speaker
index c5d7dba..d534d85 100644
--- a/server/site_tests/audio_AudioAfterSuspend/control.internal_speaker
+++ b/server/site_tests/audio_AudioAfterSuspend/control.internal_speaker
@@ -17,7 +17,8 @@
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:audio_essential"
 DEPENDENCIES = "audio_box"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests internal_speaker audio function against DUT after suspend.
diff --git a/server/site_tests/audio_AudioAfterSuspend/control.internal_speaker.plug b/server/site_tests/audio_AudioAfterSuspend/control.internal_speaker.plug
index f1fcf05..9cb3f1c 100644
--- a/server/site_tests/audio_AudioAfterSuspend/control.internal_speaker.plug
+++ b/server/site_tests/audio_AudioAfterSuspend/control.internal_speaker.plug
@@ -16,7 +16,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "audio_box"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests internal_speaker audio function against DUT after suspend.
diff --git a/server/site_tests/audio_AudioArtifacts/audio_AudioArtifacts.py b/server/site_tests/audio_AudioArtifacts/audio_AudioArtifacts.py
index e411f66..3bd902c 100644
--- a/server/site_tests/audio_AudioArtifacts/audio_AudioArtifacts.py
+++ b/server/site_tests/audio_AudioArtifacts/audio_AudioArtifacts.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -183,4 +184,3 @@
                     check_artifacts=True,
                     mute_durations=mute_duration_in_secs,
                     volume_changes=volume_changes)
-
diff --git a/server/site_tests/audio_AudioArtifacts/control.a2dp b/server/site_tests/audio_AudioArtifacts/control.a2dp
index 05ec9b8..ee51c12 100644
--- a/server/site_tests/audio_AudioArtifacts/control.a2dp
+++ b/server/site_tests/audio_AudioArtifacts/control.a2dp
@@ -16,7 +16,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_bluetooth"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests bluetooth A2DP audio artifacts against DUT.
diff --git a/server/site_tests/audio_AudioArtifacts/control.hdmi b/server/site_tests/audio_AudioArtifacts/control.hdmi
index 0d29f3d..c6b20db 100644
--- a/server/site_tests/audio_AudioArtifacts/control.hdmi
+++ b/server/site_tests/audio_AudioArtifacts/control.hdmi
@@ -16,7 +16,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_hdmiaudio"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests HDMI audio artifacts against DUT.
diff --git a/server/site_tests/audio_AudioArtifacts/control.headphone b/server/site_tests/audio_AudioArtifacts/control.headphone
index 8ff38e1..d1fc9f9 100644
--- a/server/site_tests/audio_AudioArtifacts/control.headphone
+++ b/server/site_tests/audio_AudioArtifacts/control.headphone
@@ -16,7 +16,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_audiojack"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests headphone audio artifacts against DUT.
diff --git a/server/site_tests/audio_AudioArtifacts/control.headphone.150s b/server/site_tests/audio_AudioArtifacts/control.headphone.150s
index 6d098b2..7302ecc 100644
--- a/server/site_tests/audio_AudioArtifacts/control.headphone.150s
+++ b/server/site_tests/audio_AudioArtifacts/control.headphone.150s
@@ -16,7 +16,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_audiojack"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests headphone audio artifacts against DUT.
diff --git a/server/site_tests/audio_AudioArtifacts/control.hsp b/server/site_tests/audio_AudioArtifacts/control.hsp
index 5e1d253..0a89085 100644
--- a/server/site_tests/audio_AudioArtifacts/control.hsp
+++ b/server/site_tests/audio_AudioArtifacts/control.hsp
@@ -16,7 +16,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_bluetooth"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests bluetooth HSP audio artifacts against DUT.
diff --git a/server/site_tests/audio_AudioArtifacts/control.mute_headphone b/server/site_tests/audio_AudioArtifacts/control.mute_headphone
index a6642b2..8e39ef3 100644
--- a/server/site_tests/audio_AudioArtifacts/control.mute_headphone
+++ b/server/site_tests/audio_AudioArtifacts/control.mute_headphone
@@ -16,7 +16,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_audiojack"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests headphone audio artifacts against DUT.
diff --git a/server/site_tests/audio_AudioArtifacts/control.speaker b/server/site_tests/audio_AudioArtifacts/control.speaker
index 52526e2..6b5859c 100644
--- a/server/site_tests/audio_AudioArtifacts/control.speaker
+++ b/server/site_tests/audio_AudioArtifacts/control.speaker
@@ -16,7 +16,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "audio_box"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests speaker audio artifacts against DUT.
diff --git a/server/site_tests/audio_AudioArtifacts/control.usb b/server/site_tests/audio_AudioArtifacts/control.usb
index 3ac74c2..d7c9497 100644
--- a/server/site_tests/audio_AudioArtifacts/control.usb
+++ b/server/site_tests/audio_AudioArtifacts/control.usb
@@ -16,7 +16,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_usbaudio"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests USB audio artifacts against DUT.
diff --git a/server/site_tests/audio_AudioArtifacts/control.volume_changing_headphone b/server/site_tests/audio_AudioArtifacts/control.volume_changing_headphone
index 747b383..76bbb28 100644
--- a/server/site_tests/audio_AudioArtifacts/control.volume_changing_headphone
+++ b/server/site_tests/audio_AudioArtifacts/control.volume_changing_headphone
@@ -16,7 +16,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_audiojack"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests headphone audio artifacts against DUT.
diff --git a/server/site_tests/audio_AudioBasicAssistant/audio_AudioBasicAssistant.py b/server/site_tests/audio_AudioBasicAssistant/audio_AudioBasicAssistant.py
index 6c85856..c4a1b01 100644
--- a/server/site_tests/audio_AudioBasicAssistant/audio_AudioBasicAssistant.py
+++ b/server/site_tests/audio_AudioBasicAssistant/audio_AudioBasicAssistant.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/audio_AudioBasicAssistant/control b/server/site_tests/audio_AudioBasicAssistant/control
index 67f6bca..509a7ca 100644
--- a/server/site_tests/audio_AudioBasicAssistant/control
+++ b/server/site_tests/audio_AudioBasicAssistant/control
@@ -13,8 +13,9 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "audio"
 TEST_TYPE = "server"
-#ATTRIBUTES = "suite:chameleon_audio_unstable"
+ATTRIBUTES = "suite:audio_advanced"
 DEPENDENCIES = "audio_box"
+PY_VERSION = 3
 
 DOC = """
 A basic assistant voice command test.
diff --git a/server/site_tests/audio_AudioBasicAssistant/control.dsp b/server/site_tests/audio_AudioBasicAssistant/control.dsp
index 9109dac..099a682 100644
--- a/server/site_tests/audio_AudioBasicAssistant/control.dsp
+++ b/server/site_tests/audio_AudioBasicAssistant/control.dsp
@@ -15,6 +15,7 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "audio_box, hotwording"
+PY_VERSION = 3
 
 DOC = """
 A basic assistant voice command test with hotword dsp.
diff --git a/server/site_tests/audio_AudioBasicBluetoothPlayback/audio_AudioBasicBluetoothPlayback.py b/server/site_tests/audio_AudioBasicBluetoothPlayback/audio_AudioBasicBluetoothPlayback.py
index 86e8776..523a328 100644
--- a/server/site_tests/audio_AudioBasicBluetoothPlayback/audio_AudioBasicBluetoothPlayback.py
+++ b/server/site_tests/audio_AudioBasicBluetoothPlayback/audio_AudioBasicBluetoothPlayback.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/audio_AudioBasicBluetoothPlayback/control b/server/site_tests/audio_AudioBasicBluetoothPlayback/control
index d588fbd..a481d90 100644
--- a/server/site_tests/audio_AudioBasicBluetoothPlayback/control
+++ b/server/site_tests/audio_AudioBasicBluetoothPlayback/control
@@ -14,7 +14,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_bluetooth"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests bluetooth playback audio function.
diff --git a/server/site_tests/audio_AudioBasicBluetoothPlayback/control.disable b/server/site_tests/audio_AudioBasicBluetoothPlayback/control.disable
index 26c7dfb..8667703 100644
--- a/server/site_tests/audio_AudioBasicBluetoothPlayback/control.disable
+++ b/server/site_tests/audio_AudioBasicBluetoothPlayback/control.disable
@@ -14,7 +14,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_bluetooth"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests bluetooth playback audio function
diff --git a/server/site_tests/audio_AudioBasicBluetoothPlayback/control.disconnect b/server/site_tests/audio_AudioBasicBluetoothPlayback/control.disconnect
index eeeac83..e4becef 100644
--- a/server/site_tests/audio_AudioBasicBluetoothPlayback/control.disconnect
+++ b/server/site_tests/audio_AudioBasicBluetoothPlayback/control.disconnect
@@ -14,7 +14,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_bluetooth"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests bluetooth playback audio function
diff --git a/server/site_tests/audio_AudioBasicBluetoothPlayback/control.quality b/server/site_tests/audio_AudioBasicBluetoothPlayback/control.quality
index 6ac5e82..4543c02 100644
--- a/server/site_tests/audio_AudioBasicBluetoothPlayback/control.quality
+++ b/server/site_tests/audio_AudioBasicBluetoothPlayback/control.quality
@@ -14,7 +14,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_bluetooth"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests bluetooth playback audio function with quality check.
diff --git a/server/site_tests/audio_AudioBasicBluetoothPlayback/control.suspend b/server/site_tests/audio_AudioBasicBluetoothPlayback/control.suspend
index 61cfe6f..86e91dd 100644
--- a/server/site_tests/audio_AudioBasicBluetoothPlayback/control.suspend
+++ b/server/site_tests/audio_AudioBasicBluetoothPlayback/control.suspend
@@ -14,7 +14,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_bluetooth"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests bluetooth playback audio function.
diff --git a/server/site_tests/audio_AudioBasicBluetoothPlaybackRecord/audio_AudioBasicBluetoothPlaybackRecord.py b/server/site_tests/audio_AudioBasicBluetoothPlaybackRecord/audio_AudioBasicBluetoothPlaybackRecord.py
index 0537602..43f97f1 100644
--- a/server/site_tests/audio_AudioBasicBluetoothPlaybackRecord/audio_AudioBasicBluetoothPlaybackRecord.py
+++ b/server/site_tests/audio_AudioBasicBluetoothPlaybackRecord/audio_AudioBasicBluetoothPlaybackRecord.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/audio_AudioBasicBluetoothPlaybackRecord/control b/server/site_tests/audio_AudioBasicBluetoothPlaybackRecord/control
index 1b2f27b..41c51fb 100644
--- a/server/site_tests/audio_AudioBasicBluetoothPlaybackRecord/control
+++ b/server/site_tests/audio_AudioBasicBluetoothPlaybackRecord/control
@@ -14,7 +14,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_bluetooth"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests bluetooth playback audio function.
diff --git a/server/site_tests/audio_AudioBasicBluetoothPlaybackRecord/control.disable b/server/site_tests/audio_AudioBasicBluetoothPlaybackRecord/control.disable
index be7a951..37f0508 100644
--- a/server/site_tests/audio_AudioBasicBluetoothPlaybackRecord/control.disable
+++ b/server/site_tests/audio_AudioBasicBluetoothPlaybackRecord/control.disable
@@ -14,7 +14,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_bluetooth"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests bluetooth playback audio function
diff --git a/server/site_tests/audio_AudioBasicBluetoothPlaybackRecord/control.disconnect b/server/site_tests/audio_AudioBasicBluetoothPlaybackRecord/control.disconnect
index 1371973..b866c04 100644
--- a/server/site_tests/audio_AudioBasicBluetoothPlaybackRecord/control.disconnect
+++ b/server/site_tests/audio_AudioBasicBluetoothPlaybackRecord/control.disconnect
@@ -14,7 +14,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_bluetooth"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests bluetooth playback audio function
diff --git a/server/site_tests/audio_AudioBasicBluetoothPlaybackRecord/control.quality b/server/site_tests/audio_AudioBasicBluetoothPlaybackRecord/control.quality
index a78670f..a749015 100644
--- a/server/site_tests/audio_AudioBasicBluetoothPlaybackRecord/control.quality
+++ b/server/site_tests/audio_AudioBasicBluetoothPlaybackRecord/control.quality
@@ -14,7 +14,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_bluetooth"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests bluetooth playback and record audio function and quality.
diff --git a/server/site_tests/audio_AudioBasicBluetoothPlaybackRecord/control.suspend b/server/site_tests/audio_AudioBasicBluetoothPlaybackRecord/control.suspend
index 976600f..3aad328 100644
--- a/server/site_tests/audio_AudioBasicBluetoothPlaybackRecord/control.suspend
+++ b/server/site_tests/audio_AudioBasicBluetoothPlaybackRecord/control.suspend
@@ -14,7 +14,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_bluetooth"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests bluetooth playback audio function.
diff --git a/server/site_tests/audio_AudioBasicBluetoothRecord/audio_AudioBasicBluetoothRecord.py b/server/site_tests/audio_AudioBasicBluetoothRecord/audio_AudioBasicBluetoothRecord.py
index 7fc31e6..7a9e711 100644
--- a/server/site_tests/audio_AudioBasicBluetoothRecord/audio_AudioBasicBluetoothRecord.py
+++ b/server/site_tests/audio_AudioBasicBluetoothRecord/audio_AudioBasicBluetoothRecord.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/audio_AudioBasicBluetoothRecord/control b/server/site_tests/audio_AudioBasicBluetoothRecord/control
index 2c05e18..a84f4f5 100644
--- a/server/site_tests/audio_AudioBasicBluetoothRecord/control
+++ b/server/site_tests/audio_AudioBasicBluetoothRecord/control
@@ -14,7 +14,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_bluetooth"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests bluetooth playback audio function.
diff --git a/server/site_tests/audio_AudioBasicBluetoothRecord/control.disable b/server/site_tests/audio_AudioBasicBluetoothRecord/control.disable
index d32c9f3..bd25dae 100644
--- a/server/site_tests/audio_AudioBasicBluetoothRecord/control.disable
+++ b/server/site_tests/audio_AudioBasicBluetoothRecord/control.disable
@@ -14,7 +14,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_bluetooth"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests bluetooth playback audio function
diff --git a/server/site_tests/audio_AudioBasicBluetoothRecord/control.disconnect b/server/site_tests/audio_AudioBasicBluetoothRecord/control.disconnect
index 9ed1898..94e81a4 100644
--- a/server/site_tests/audio_AudioBasicBluetoothRecord/control.disconnect
+++ b/server/site_tests/audio_AudioBasicBluetoothRecord/control.disconnect
@@ -14,7 +14,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_bluetooth"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests bluetooth playback audio function
diff --git a/server/site_tests/audio_AudioBasicBluetoothRecord/control.quality b/server/site_tests/audio_AudioBasicBluetoothRecord/control.quality
index ae35819..821f7a1 100644
--- a/server/site_tests/audio_AudioBasicBluetoothRecord/control.quality
+++ b/server/site_tests/audio_AudioBasicBluetoothRecord/control.quality
@@ -14,7 +14,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_bluetooth"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests bluetooth record audio function and quality.
diff --git a/server/site_tests/audio_AudioBasicBluetoothRecord/control.suspend b/server/site_tests/audio_AudioBasicBluetoothRecord/control.suspend
index 38f2df9..8e7fdf0 100644
--- a/server/site_tests/audio_AudioBasicBluetoothRecord/control.suspend
+++ b/server/site_tests/audio_AudioBasicBluetoothRecord/control.suspend
@@ -14,7 +14,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_bluetooth"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests bluetooth playback audio function.
diff --git a/server/site_tests/audio_AudioBasicExternalMicrophone/audio_AudioBasicExternalMicrophone.py b/server/site_tests/audio_AudioBasicExternalMicrophone/audio_AudioBasicExternalMicrophone.py
index dd4145f..1a34c40 100644
--- a/server/site_tests/audio_AudioBasicExternalMicrophone/audio_AudioBasicExternalMicrophone.py
+++ b/server/site_tests/audio_AudioBasicExternalMicrophone/audio_AudioBasicExternalMicrophone.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -27,12 +28,15 @@
     RECORD_SECONDS = 9
     DELAY_AFTER_BINDING = 0.5
 
-    def run_once(self, check_quality=False):
+    def run_once(self, check_quality=False, blocked_boards=[]):
         """Running basic headphone audio tests.
 
         @param check_quality: flag to check audio quality.
+        @blocked_boards: boards to ignore and exit.
 
         """
+        if self.host.get_board().split(':')[1] in blocked_boards:
+            raise error.TestNAError('Board NOT APPLICABLE to test!')
         if not audio_test_utils.has_audio_jack(self.host):
             raise error.TestNAError(
                     'No audio jack for the DUT.'
diff --git a/server/site_tests/audio_AudioBasicExternalMicrophone/control b/server/site_tests/audio_AudioBasicExternalMicrophone/control
index 763c2c9..52547b2 100644
--- a/server/site_tests/audio_AudioBasicExternalMicrophone/control
+++ b/server/site_tests/audio_AudioBasicExternalMicrophone/control
@@ -15,6 +15,7 @@
 ATTRIBUTES = "suite:audio_essential"
 DEPENDENCIES = "audio_cable"
 JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests external microphone audio function.
@@ -25,6 +26,9 @@
 
 def run(machine):
     host = hosts.create_host(machine, chameleon_args=chameleon_args)
-    job.run_test("audio_AudioBasicExternalMicrophone", host=host)
+    job.run_test("audio_AudioBasicExternalMicrophone", host=host,
+                 blocked_boards=['lars',    # Pending fix for b/147647051
+                                 'lars-kernelnext']
+                )
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/audio_AudioBasicExternalMicrophone/control.quality b/server/site_tests/audio_AudioBasicExternalMicrophone/control.quality
index df26792..2acc24c 100644
--- a/server/site_tests/audio_AudioBasicExternalMicrophone/control.quality
+++ b/server/site_tests/audio_AudioBasicExternalMicrophone/control.quality
@@ -14,7 +14,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_audiojack"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests external microphone audio function and quality.
diff --git a/server/site_tests/audio_AudioBasicHDMI/audio_AudioBasicHDMI.py b/server/site_tests/audio_AudioBasicHDMI/audio_AudioBasicHDMI.py
index 72d7d3c..84aa744 100644
--- a/server/site_tests/audio_AudioBasicHDMI/audio_AudioBasicHDMI.py
+++ b/server/site_tests/audio_AudioBasicHDMI/audio_AudioBasicHDMI.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/audio_AudioBasicHDMI/control b/server/site_tests/audio_AudioBasicHDMI/control
index 95e33b6..f773f26 100644
--- a/server/site_tests/audio_AudioBasicHDMI/control
+++ b/server/site_tests/audio_AudioBasicHDMI/control
@@ -15,6 +15,7 @@
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon:hdmi, audio_board, test_hdmiaudio'
 JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests HDMI audio function.
diff --git a/server/site_tests/audio_AudioBasicHDMI/control.suspend b/server/site_tests/audio_AudioBasicHDMI/control.suspend
index c1d97e4..e60f56c 100644
--- a/server/site_tests/audio_AudioBasicHDMI/control.suspend
+++ b/server/site_tests/audio_AudioBasicHDMI/control.suspend
@@ -16,6 +16,7 @@
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon:hdmi, audio_board, test_hdmiaudio'
 JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests HDMI audio function after suspend-resume.
diff --git a/server/site_tests/audio_AudioBasicHDMI/control.suspend_quality b/server/site_tests/audio_AudioBasicHDMI/control.suspend_quality
index 3513dae..11b71b9 100644
--- a/server/site_tests/audio_AudioBasicHDMI/control.suspend_quality
+++ b/server/site_tests/audio_AudioBasicHDMI/control.suspend_quality
@@ -14,7 +14,8 @@
 TEST_CLASS = "audio"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon:hdmi, audio_board, test_hdmiaudio'
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests HDMI audio function after suspend-resume.
diff --git a/server/site_tests/audio_AudioBasicHDMI/control.suspend_while_playback b/server/site_tests/audio_AudioBasicHDMI/control.suspend_while_playback
index cc9adb5..a646edd 100644
--- a/server/site_tests/audio_AudioBasicHDMI/control.suspend_while_playback
+++ b/server/site_tests/audio_AudioBasicHDMI/control.suspend_while_playback
@@ -14,7 +14,8 @@
 TEST_CLASS = "audio"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon:hdmi, audio_board, test_hdmiaudio'
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests HDMI audio function after suspend-resume.
diff --git a/server/site_tests/audio_AudioBasicHeadphone/audio_AudioBasicHeadphone.py b/server/site_tests/audio_AudioBasicHeadphone/audio_AudioBasicHeadphone.py
index d1cb8d6..abe3b83 100644
--- a/server/site_tests/audio_AudioBasicHeadphone/audio_AudioBasicHeadphone.py
+++ b/server/site_tests/audio_AudioBasicHeadphone/audio_AudioBasicHeadphone.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/audio_AudioBasicHeadphone/control b/server/site_tests/audio_AudioBasicHeadphone/control
index 75d1911..0b3f2e4 100644
--- a/server/site_tests/audio_AudioBasicHeadphone/control
+++ b/server/site_tests/audio_AudioBasicHeadphone/control
@@ -15,6 +15,7 @@
 ATTRIBUTES = "suite:audio_essential"
 DEPENDENCIES = "audio_cable"
 JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests headphone audio function.
diff --git a/server/site_tests/audio_AudioBasicHeadphone/control.quality b/server/site_tests/audio_AudioBasicHeadphone/control.quality
index b76ca62..fc1b489 100644
--- a/server/site_tests/audio_AudioBasicHeadphone/control.quality
+++ b/server/site_tests/audio_AudioBasicHeadphone/control.quality
@@ -14,7 +14,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_audiojack"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests headphone audio function and quality.
diff --git a/server/site_tests/audio_AudioBasicHotwording/audio_AudioBasicHotwording.py b/server/site_tests/audio_AudioBasicHotwording/audio_AudioBasicHotwording.py
index a23bef6..b402427 100644
--- a/server/site_tests/audio_AudioBasicHotwording/audio_AudioBasicHotwording.py
+++ b/server/site_tests/audio_AudioBasicHotwording/audio_AudioBasicHotwording.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/audio_AudioBasicHotwording/control b/server/site_tests/audio_AudioBasicHotwording/control
index 3bb8470..b22f1e1 100644
--- a/server/site_tests/audio_AudioBasicHotwording/control
+++ b/server/site_tests/audio_AudioBasicHotwording/control
@@ -13,9 +13,10 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "audio"
 TEST_TYPE = "server"
-ATTRIBUTES = "suite:audio_basic"
+ATTRIBUTES = "suite:audio_essential"
 DEPENDENCIES = "audio_box, hotwording"
 JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests hotwording function.
diff --git a/server/site_tests/audio_AudioBasicHotwording/control.suspend b/server/site_tests/audio_AudioBasicHotwording/control.suspend
index 9362873..f2caffb 100644
--- a/server/site_tests/audio_AudioBasicHotwording/control.suspend
+++ b/server/site_tests/audio_AudioBasicHotwording/control.suspend
@@ -16,7 +16,8 @@
 # Stop schedule this until it can be passed manually
 # ATTRIBUTES = "suite:audio_advanced"
 DEPENDENCIES = "audio_box, hotwording"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests hotwording function after the DUT suspend.
diff --git a/server/site_tests/audio_AudioBasicInternalMicrophone/audio_AudioBasicInternalMicrophone.py b/server/site_tests/audio_AudioBasicInternalMicrophone/audio_AudioBasicInternalMicrophone.py
index cbd2914..b84f76e 100644
--- a/server/site_tests/audio_AudioBasicInternalMicrophone/audio_AudioBasicInternalMicrophone.py
+++ b/server/site_tests/audio_AudioBasicInternalMicrophone/audio_AudioBasicInternalMicrophone.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/audio_AudioBasicInternalMicrophone/control b/server/site_tests/audio_AudioBasicInternalMicrophone/control
index d339844..887c497 100644
--- a/server/site_tests/audio_AudioBasicInternalMicrophone/control
+++ b/server/site_tests/audio_AudioBasicInternalMicrophone/control
@@ -15,6 +15,7 @@
 ATTRIBUTES = "suite:audio_essential"
 DEPENDENCIES = "audio_box"
 JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests internal microphone audio function.
diff --git a/server/site_tests/audio_AudioBasicInternalSpeaker/audio_AudioBasicInternalSpeaker.py b/server/site_tests/audio_AudioBasicInternalSpeaker/audio_AudioBasicInternalSpeaker.py
index 9161adc..b3b8a10 100644
--- a/server/site_tests/audio_AudioBasicInternalSpeaker/audio_AudioBasicInternalSpeaker.py
+++ b/server/site_tests/audio_AudioBasicInternalSpeaker/audio_AudioBasicInternalSpeaker.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/audio_AudioBasicInternalSpeaker/control b/server/site_tests/audio_AudioBasicInternalSpeaker/control
index 85ad92d..f0c4d72 100644
--- a/server/site_tests/audio_AudioBasicInternalSpeaker/control
+++ b/server/site_tests/audio_AudioBasicInternalSpeaker/control
@@ -15,6 +15,7 @@
 ATTRIBUTES = "suite:audio_essential"
 DEPENDENCIES = "audio_box"
 JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests internal speaker audio function.
diff --git a/server/site_tests/audio_AudioBasicUSBPlayback/audio_AudioBasicUSBPlayback.py b/server/site_tests/audio_AudioBasicUSBPlayback/audio_AudioBasicUSBPlayback.py
index 179130d..12aa2cb 100644
--- a/server/site_tests/audio_AudioBasicUSBPlayback/audio_AudioBasicUSBPlayback.py
+++ b/server/site_tests/audio_AudioBasicUSBPlayback/audio_AudioBasicUSBPlayback.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -12,6 +13,7 @@
 from autotest_lib.client.cros.chameleon import chameleon_audio_ids
 from autotest_lib.client.cros.chameleon import chameleon_audio_helper
 from autotest_lib.server.cros.audio import audio_test
+from autotest_lib.client.common_lib import error
 
 
 class audio_AudioBasicUSBPlayback(audio_test.AudioTest):
@@ -24,13 +26,16 @@
     version = 1
     RECORD_SECONDS = 5
 
-    def run_once(self, suspend=False):
+    def run_once(self, suspend=False, blocked_boards=[]):
         """Runs Basic Audio USB playback test.
 
         @param suspend: True for suspend the device before playback.
                         False for not suspend.
 
         """
+        if self.host.get_board().split(':')[1] in blocked_boards:
+            raise error.TestNAError('Board pending fix for b/233962403!')
+
         golden_file = audio_test_data.GenerateAudioTestData(
                 path=os.path.join(self.bindir, 'fix_1k_440_16.wav'),
                 duration_secs=6,
diff --git a/server/site_tests/audio_AudioBasicUSBPlayback/control b/server/site_tests/audio_AudioBasicUSBPlayback/control
index 70ff743..614ea39 100644
--- a/server/site_tests/audio_AudioBasicUSBPlayback/control
+++ b/server/site_tests/audio_AudioBasicUSBPlayback/control
@@ -15,6 +15,7 @@
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon, audio_board, test_usbaudio'
 JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests USB audio function.
@@ -25,6 +26,9 @@
 
 def run(machine):
     host = hosts.create_host(machine, chameleon_args=chameleon_args)
-    job.run_test("audio_AudioBasicUSBPlayback", host=host)
+    job.run_test("audio_AudioBasicUSBPlayback", host=host,
+                 blocked_boards=['asurada',    # Pending fix for b/233962403
+                                 'cherry', 'elm', 'elm-kernelnext', 'grunt',
+                                 'jacuzzi', 'kukui'])
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/audio_AudioBasicUSBPlayback/control.suspend b/server/site_tests/audio_AudioBasicUSBPlayback/control.suspend
index bd9e57b..1ab7a44 100644
--- a/server/site_tests/audio_AudioBasicUSBPlayback/control.suspend
+++ b/server/site_tests/audio_AudioBasicUSBPlayback/control.suspend
@@ -15,6 +15,7 @@
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon, audio_board, test_usbaudio'
 JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests USB audio function after suspend/resume.
@@ -26,6 +27,9 @@
 def run(machine):
     host = hosts.create_host(machine, chameleon_args=chameleon_args)
     job.run_test("audio_AudioBasicUSBPlayback", host=host, suspend=True,
-                 tag="suspend")
+                 tag="suspend",
+                 blocked_boards=['asurada',    # Pending fix for b/233962403
+                                 'cherry', 'elm', 'elm-kernelnext', 'grunt',
+                                 'jacuzzi', 'kukui'])
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/audio_AudioBasicUSBPlaybackRecord/audio_AudioBasicUSBPlaybackRecord.py b/server/site_tests/audio_AudioBasicUSBPlaybackRecord/audio_AudioBasicUSBPlaybackRecord.py
index d5d38be..85fbafe 100644
--- a/server/site_tests/audio_AudioBasicUSBPlaybackRecord/audio_AudioBasicUSBPlaybackRecord.py
+++ b/server/site_tests/audio_AudioBasicUSBPlaybackRecord/audio_AudioBasicUSBPlaybackRecord.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -15,6 +16,7 @@
 from autotest_lib.client.cros.chameleon import chameleon_audio_ids
 from autotest_lib.client.cros.chameleon import chameleon_audio_helper
 from autotest_lib.server.cros.audio import audio_test
+from autotest_lib.client.common_lib import error
 
 
 class audio_AudioBasicUSBPlaybackRecord(audio_test.AudioTest):
@@ -27,13 +29,16 @@
     version = 1
     RECORD_SECONDS = 5
 
-    def run_once(self, suspend=False):
+    def run_once(self, suspend=False, blocked_boards=[]):
         """Runs Basic Audio USB playback/record test.
 
         @param suspend: True for suspend the device before playback/record.
                         False for not suspend.
 
         """
+        if self.host.get_board().split(':')[1] in blocked_boards:
+            raise error.TestNAError('Board pending fix for b/233962403!')
+
         golden_file = audio_test_data.GenerateAudioTestData(
                 path=os.path.join(self.bindir, 'fix_1k_440_16.wav'),
                 duration_secs=6,
diff --git a/server/site_tests/audio_AudioBasicUSBPlaybackRecord/control b/server/site_tests/audio_AudioBasicUSBPlaybackRecord/control
index 695d9a0..89fc987 100644
--- a/server/site_tests/audio_AudioBasicUSBPlaybackRecord/control
+++ b/server/site_tests/audio_AudioBasicUSBPlaybackRecord/control
@@ -15,6 +15,7 @@
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon, audio_board, test_usbaudio'
 JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests USB audio function for playback and record.
@@ -25,6 +26,9 @@
 
 def run(machine):
     host = hosts.create_host(machine, chameleon_args=chameleon_args)
-    job.run_test("audio_AudioBasicUSBPlaybackRecord", host=host)
+    job.run_test("audio_AudioBasicUSBPlaybackRecord", host=host,
+                 blocked_boards=['asurada',    # Pending fix for b/233962403
+                                 'cherry', 'elm', 'elm-kernelnext', 'grunt',
+                                 'jacuzzi', 'kukui'])
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/audio_AudioBasicUSBPlaybackRecord/control.suspend b/server/site_tests/audio_AudioBasicUSBPlaybackRecord/control.suspend
index 784da81..c57cdec 100644
--- a/server/site_tests/audio_AudioBasicUSBPlaybackRecord/control.suspend
+++ b/server/site_tests/audio_AudioBasicUSBPlaybackRecord/control.suspend
@@ -15,6 +15,7 @@
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon, audio_board, test_usbaudio'
 JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests USB audio function for playback and record after suspend/resume.
@@ -26,6 +27,9 @@
 def run(machine):
     host = hosts.create_host(machine, chameleon_args=chameleon_args)
     job.run_test("audio_AudioBasicUSBPlaybackRecord", host=host, suspend=True,
-                 tag="suspend")
+                 tag="suspend",
+                 blocked_boards=['asurada',    # Pending fix for b/233962403
+                                 'cherry', 'elm', 'elm-kernelnext', 'grunt',
+                                 'jacuzzi', 'kukui'])
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/audio_AudioBasicUSBRecord/audio_AudioBasicUSBRecord.py b/server/site_tests/audio_AudioBasicUSBRecord/audio_AudioBasicUSBRecord.py
index 3bafa58..9f41bea 100644
--- a/server/site_tests/audio_AudioBasicUSBRecord/audio_AudioBasicUSBRecord.py
+++ b/server/site_tests/audio_AudioBasicUSBRecord/audio_AudioBasicUSBRecord.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -12,6 +13,7 @@
 from autotest_lib.client.cros.chameleon import chameleon_audio_helper
 from autotest_lib.client.cros.chameleon import chameleon_audio_ids
 from autotest_lib.server.cros.audio import audio_test
+from autotest_lib.client.common_lib import error
 
 
 class audio_AudioBasicUSBRecord(audio_test.AudioTest):
@@ -24,13 +26,17 @@
     version = 1
     RECORD_SECONDS = 5
 
-    def run_once(self, suspend=False):
+    def run_once(self, suspend=False, blocked_boards=[]):
         """Runs Basic Audio USB recording test.
 
         @param suspend: True for suspend the device before recording.
                         False for not suspend.
 
         """
+
+        if self.host.get_board().split(':')[1] in blocked_boards:
+            raise error.TestNAError('Board pending fix for b/233962403!')
+
         golden_file = audio_test_data.GenerateAudioTestData(
                 path=os.path.join(self.bindir, 'fix_1k_440_16.wav'),
                 duration_secs=6,
diff --git a/server/site_tests/audio_AudioBasicUSBRecord/control b/server/site_tests/audio_AudioBasicUSBRecord/control
index d81c24c..9c9a89f 100644
--- a/server/site_tests/audio_AudioBasicUSBRecord/control
+++ b/server/site_tests/audio_AudioBasicUSBRecord/control
@@ -15,6 +15,7 @@
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon, audio_board, test_usbaudio'
 JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests USB audio function.
@@ -25,6 +26,9 @@
 
 def run(machine):
     host = hosts.create_host(machine, chameleon_args=chameleon_args)
-    job.run_test("audio_AudioBasicUSBRecord", host=host)
+    job.run_test("audio_AudioBasicUSBRecord", host=host,
+                 blocked_boards=['asurada',    # Pending fix for b/233962403
+                                 'cherry', 'elm', 'elm-kernelnext', 'grunt',
+                                 'jacuzzi', 'kukui'])
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/audio_AudioBasicUSBRecord/control.suspend b/server/site_tests/audio_AudioBasicUSBRecord/control.suspend
index 983da84..1322376 100644
--- a/server/site_tests/audio_AudioBasicUSBRecord/control.suspend
+++ b/server/site_tests/audio_AudioBasicUSBRecord/control.suspend
@@ -15,6 +15,7 @@
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon, audio_board, test_usbaudio'
 JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests USB audio function after suspend/resume.
@@ -26,6 +27,9 @@
 def run(machine):
     host = hosts.create_host(machine, chameleon_args=chameleon_args)
     job.run_test("audio_AudioBasicUSBRecord", host=host, suspend=True,
-                 tag="suspend")
+                 tag="suspend",
+                 blocked_boards=['asurada',    # Pending fix for b/233962403
+                                 'cherry', 'elm', 'elm-kernelnext', 'grunt',
+                                 'jacuzzi', 'kukui'])
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/audio_AudioBluetoothConnectionStability/audio_AudioBluetoothConnectionStability.py b/server/site_tests/audio_AudioBluetoothConnectionStability/audio_AudioBluetoothConnectionStability.py
index f9fc671..691fcb3 100644
--- a/server/site_tests/audio_AudioBluetoothConnectionStability/audio_AudioBluetoothConnectionStability.py
+++ b/server/site_tests/audio_AudioBluetoothConnectionStability/audio_AudioBluetoothConnectionStability.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/audio_AudioBluetoothConnectionStability/control b/server/site_tests/audio_AudioBluetoothConnectionStability/control
index dbffc91..6cba989 100644
--- a/server/site_tests/audio_AudioBluetoothConnectionStability/control
+++ b/server/site_tests/audio_AudioBluetoothConnectionStability/control
@@ -14,7 +14,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_bluetooth"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests bluetooth stability.
diff --git a/server/site_tests/audio_AudioNodeSwitch/audio_AudioNodeSwitch.py b/server/site_tests/audio_AudioNodeSwitch/audio_AudioNodeSwitch.py
index 27cf6f0..883266c 100644
--- a/server/site_tests/audio_AudioNodeSwitch/audio_AudioNodeSwitch.py
+++ b/server/site_tests/audio_AudioNodeSwitch/audio_AudioNodeSwitch.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/audio_AudioNodeSwitch/control b/server/site_tests/audio_AudioNodeSwitch/control
index 035faf3..55eb462 100644
--- a/server/site_tests/audio_AudioNodeSwitch/control
+++ b/server/site_tests/audio_AudioNodeSwitch/control
@@ -6,7 +6,7 @@
 from autotest_lib.client.cros.chameleon import chameleon_audio_ids
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "audio_AudioNodeSwitch"
 PURPOSE = "Check if correct audio channel selected."
 CRITERIA = "This test will fail if expected audio channel is not selected."
@@ -16,6 +16,7 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "chameleon_audio_unstable"
 DEPENDENCIES = "audio_box, test_audio_jack_usb_hdmi"
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests audio nodes selection.
diff --git a/server/site_tests/audio_AudioNodeSwitch/control.HDMI b/server/site_tests/audio_AudioNodeSwitch/control.HDMI
index d6c9a32..743e69e 100644
--- a/server/site_tests/audio_AudioNodeSwitch/control.HDMI
+++ b/server/site_tests/audio_AudioNodeSwitch/control.HDMI
@@ -6,7 +6,7 @@
 from autotest_lib.client.cros.chameleon import chameleon_audio_ids
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "audio_AudioNodeSwitch.HDMI"
 PURPOSE = "Check if correct audio channel selected."
 CRITERIA = "This test will fail if expected audio channel is not selected."
@@ -15,6 +15,7 @@
 TEST_CLASS = "audio"
 TEST_TYPE = "server"
 DEPENDENCIES = "audio_box, test_audio_jack_usb_hdmi"
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests audio nodes selection.
diff --git a/server/site_tests/audio_AudioNodeSwitch/control.HDMI_JACK_USB b/server/site_tests/audio_AudioNodeSwitch/control.HDMI_JACK_USB
index b20be57..1d5a1c7 100644
--- a/server/site_tests/audio_AudioNodeSwitch/control.HDMI_JACK_USB
+++ b/server/site_tests/audio_AudioNodeSwitch/control.HDMI_JACK_USB
@@ -6,7 +6,7 @@
 from autotest_lib.client.cros.chameleon import chameleon_audio_ids
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "audio_AudioNodeSwitch.HDMI_JACK_USB"
 PURPOSE = "Check if correct audio channel selected."
 CRITERIA = "This test will fail if expected audio channel is not selected."
@@ -16,6 +16,7 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "audio_box, test_audio_jack_usb_hdmi"
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests audio nodes selection.
diff --git a/server/site_tests/audio_AudioNodeSwitch/control.HDMI_USB b/server/site_tests/audio_AudioNodeSwitch/control.HDMI_USB
index eff1273..a589f6b 100644
--- a/server/site_tests/audio_AudioNodeSwitch/control.HDMI_USB
+++ b/server/site_tests/audio_AudioNodeSwitch/control.HDMI_USB
@@ -6,7 +6,7 @@
 from autotest_lib.client.cros.chameleon import chameleon_audio_ids
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "audio_AudioNodeSwitch.HDMI_USB"
 PURPOSE = "Check if correct audio channel selected."
 CRITERIA = "This test will fail if expected audio channel is not selected."
@@ -16,6 +16,7 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "audio_box, test_audio_jack_usb_hdmi"
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests audio nodes selection.
diff --git a/server/site_tests/audio_AudioNodeSwitch/control.HDMI_while_playing_audio b/server/site_tests/audio_AudioNodeSwitch/control.HDMI_while_playing_audio
index 6684f71..7510f95 100644
--- a/server/site_tests/audio_AudioNodeSwitch/control.HDMI_while_playing_audio
+++ b/server/site_tests/audio_AudioNodeSwitch/control.HDMI_while_playing_audio
@@ -15,6 +15,7 @@
 TEST_CLASS = "audio"
 TEST_TYPE = "server"
 DEPENDENCIES = "audio_box, test_audio_jack_usb_hdmi"
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests audio nodes selection and audio routing.
diff --git a/server/site_tests/audio_AudioNodeSwitch/control.JACK b/server/site_tests/audio_AudioNodeSwitch/control.JACK
index f0591cd..5477b4e 100644
--- a/server/site_tests/audio_AudioNodeSwitch/control.JACK
+++ b/server/site_tests/audio_AudioNodeSwitch/control.JACK
@@ -6,7 +6,7 @@
 from autotest_lib.client.cros.chameleon import chameleon_audio_ids
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "audio_AudioNodeSwitch.JACK"
 PURPOSE = "Check if correct audio channel selected."
 CRITERIA = "This test will fail if expected audio channel is not selected."
@@ -15,6 +15,7 @@
 TEST_CLASS = "audio"
 TEST_TYPE = "server"
 DEPENDENCIES = "audio_box, test_audio_jack_usb_hdmi"
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests audio nodes selection.
diff --git a/server/site_tests/audio_AudioNodeSwitch/control.JACK_HDMI b/server/site_tests/audio_AudioNodeSwitch/control.JACK_HDMI
index 692bf81..af6b534 100644
--- a/server/site_tests/audio_AudioNodeSwitch/control.JACK_HDMI
+++ b/server/site_tests/audio_AudioNodeSwitch/control.JACK_HDMI
@@ -6,7 +6,7 @@
 from autotest_lib.client.cros.chameleon import chameleon_audio_ids
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "audio_AudioNodeSwitch.JACK_HDMI"
 PURPOSE = "Check if correct audio channel selected."
 CRITERIA = "This test will fail if expected audio channel is not selected."
@@ -16,6 +16,7 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "audio_box, test_audio_jack_usb_hdmi"
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests audio nodes selection.
diff --git a/server/site_tests/audio_AudioNodeSwitch/control.JACK_USB b/server/site_tests/audio_AudioNodeSwitch/control.JACK_USB
index 265f8acb..943e676 100644
--- a/server/site_tests/audio_AudioNodeSwitch/control.JACK_USB
+++ b/server/site_tests/audio_AudioNodeSwitch/control.JACK_USB
@@ -6,7 +6,7 @@
 from autotest_lib.client.cros.chameleon import chameleon_audio_ids
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "audio_AudioNodeSwitch.JACK_USB"
 PURPOSE = "Check if correct audio channel selected."
 CRITERIA = "This test will fail if expected audio channel is not selected."
@@ -16,6 +16,7 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "audio_box, test_audio_jack_usb_hdmi"
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests audio nodes selection.
diff --git a/server/site_tests/audio_AudioNodeSwitch/control.USB b/server/site_tests/audio_AudioNodeSwitch/control.USB
index 6cf322d..296a640 100644
--- a/server/site_tests/audio_AudioNodeSwitch/control.USB
+++ b/server/site_tests/audio_AudioNodeSwitch/control.USB
@@ -6,7 +6,7 @@
 from autotest_lib.client.cros.chameleon import chameleon_audio_ids
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "audio_AudioNodeSwitch.USB"
 PURPOSE = "Check if correct audio channel selected."
 CRITERIA = "This test will fail if expected audio channel is not selected."
@@ -15,6 +15,7 @@
 TEST_CLASS = "audio"
 TEST_TYPE = "server"
 DEPENDENCIES = "audio_box, test_audio_jack_usb_hdmi"
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests audio nodes selection.
diff --git a/server/site_tests/audio_AudioNoiseCancellation/audio_AudioNoiseCancellation.py b/server/site_tests/audio_AudioNoiseCancellation/audio_AudioNoiseCancellation.py
new file mode 100644
index 0000000..4ea4a60
--- /dev/null
+++ b/server/site_tests/audio_AudioNoiseCancellation/audio_AudioNoiseCancellation.py
@@ -0,0 +1,313 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""This is a server side noise cancellation test using the Chameleon board."""
+
+import logging
+import os
+import time
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.cros.audio import audio_test_data
+from autotest_lib.client.cros.audio import sox_utils
+from autotest_lib.client.cros.audio import visqol_utils
+from autotest_lib.client.cros.bluetooth.bluetooth_audio_test_data import (
+        download_file_from_bucket, get_visqol_binary)
+from autotest_lib.client.cros.chameleon import audio_test_utils
+from autotest_lib.client.cros.chameleon import chameleon_audio_ids
+from autotest_lib.client.cros.chameleon import chameleon_audio_helper
+from autotest_lib.server.cros.audio import audio_test
+
+DIST_FILES_DIR = 'gs://chromeos-localmirror/distfiles/test_noise_cancellation'
+DATA_DIR = '/tmp'
+
+
+# Verification steps for the Noise Cancellation processing (NC):
+# 1. Prepare the audio source file and reference file.
+# 2. Play the source file by Chameleon.
+# 3. Record by DUT Internal Mic when NC is on and get ViSQOL score A.
+# 4. Repeat step 2.
+# 5. Record by DUT Internal Mic when NC is off and get ViSQOL score B.
+# 6. Check if A - B >= threshold
+#
+# In practice, ViSQOL is not the most suitable metrics for NC due to its
+# intrusive design (reference: go/visqol). However, it is fair enough to compare
+# the relative gain (or degradation) between before and after de-noising.
+#
+# TODO(johnylin): replace ViSQOL with other metrics if applicable.
+# TODO(johnylin): add more speech and noise test inputs for inclusion.
+class audio_AudioNoiseCancellation(audio_test.AudioTest):
+    """Server side input audio noise cancellation test.
+
+    This test talks to a Chameleon board and a Cros device to verify
+    input audio noise cancellation function of the Cros device.
+
+    """
+    version = 1
+    DELAY_BEFORE_PLAYBACK_SECONDS = 3.0
+    DELAY_AFTER_PLAYBACK_SECONDS = 2.0
+    DELAY_AFTER_BINDING = 0.5
+    DELAY_AFTER_NC_TOGGLED = 0.5
+
+    cleanup_files = []
+
+    def cleanup(self):
+        # Restore the default state of bypass blocking mechanism in Cras.
+        # Restarting Cras is only way because we are not able to know the
+        # default state.
+        self.host.run('restart cras')
+
+        # Start Chrome UI.
+        self.host.run('start ui')
+
+        # Remove downloaded files and the temporary generated files.
+        for cleanup_file in self.cleanup_files:
+            if os.path.isfile(cleanup_file):
+                os.remove(cleanup_file)
+
+    def download_file_from_bucket(self, file):
+        """Download the file from GS bucket.
+
+        @param file: the file name for download.
+
+        @raises: error.TestError if failed.
+
+        @returns: the local path of the downloaded file.
+        """
+        remote_path = os.path.join(DIST_FILES_DIR, file)
+        if not download_file_from_bucket(
+                DATA_DIR, remote_path, lambda _, __, p: p.returncode == 0):
+            logging.error('Failed to download %s to %s', remote_path, DATA_DIR)
+            raise error.TestError('Failed to download file %s from bucket.' %
+                                  file)
+
+        return os.path.join(DATA_DIR, file)
+
+    def generate_noisy_speech_file(self, speech_path, noise_path):
+        """Generate the mixed audio file of speech and noise data.
+
+        @param speech_path: the file path of the pure speech audio.
+        @param noise_path: the file path of the noise audio.
+
+        @raises: error.TestError if failed.
+
+        @returns: the file path of the mixed audio.
+        """
+        mixed_wav_path = os.path.join(DATA_DIR, 'speech_noise_mixed.wav')
+        if os.path.exists(mixed_wav_path):
+            os.remove(mixed_wav_path)
+        sox_utils.mix_two_wav_files(speech_path,
+                                    noise_path,
+                                    mixed_wav_path,
+                                    input_volume=1.0)
+        if not os.path.isfile(mixed_wav_path):
+            logging.error('WAV file %s does not exist.', mixed_wav_path)
+            raise error.TestError('Failed to mix %s and %s by sox commands.' %
+                                  (speech_path, noise_path))
+
+        return mixed_wav_path
+
+    def run_once(self, test_data):
+        """Runs Audio Noise Cancellation test.
+
+        Test scenarios can be distinguished by the elements (keys) in test_data.
+        Noisy environment test:
+            test_data = dict(
+                speech_file: the WAV file for the pure speech data.
+                noise_file: the WAV file for the noise data.
+                threshold: the min required score gain for NC effect.)
+        Quiet environment test:
+            test_data = dict(
+                speech_file: the WAV file for the pure speech data.
+                threshold: the min score diff tolerance for NC effect.)
+
+        @param test_data: the dict for files and threshold as mentioned above.
+        """
+        if not self.facade.get_noise_cancellation_supported():
+            logging.warning('Noise Cancellation is not supported.')
+            raise error.TestWarn('Noise Cancellation is not supported.')
+
+        def _remove_at_cleanup(filepath):
+            self.cleanup_files.append(filepath)
+
+        # Download the files from bucket.
+        speech_path = self.download_file_from_bucket(test_data['speech_file'])
+        _remove_at_cleanup(speech_path)
+
+        ref_infos = sox_utils.get_infos_from_wav_file(speech_path)
+        if ref_infos is None:
+            raise error.TestError('Failed to get infos from wav file %s.' %
+                                  speech_path)
+
+        if 'noise_file' in test_data:
+            # Noisy environment test when 'noise_file' is given.
+            noise_path = self.download_file_from_bucket(
+                    test_data['noise_file'])
+            _remove_at_cleanup(noise_path)
+
+            test_audio_path = self.generate_noisy_speech_file(
+                    speech_path, noise_path)
+            _remove_at_cleanup(test_audio_path)
+
+            test_infos = sox_utils.get_infos_from_wav_file(test_audio_path)
+            if test_infos is None:
+                raise error.TestError('Failed to get infos from wav file %s.' %
+                                      test_audio_path)
+        else:
+            # Quiet environment test.
+            test_audio_path = speech_path
+            test_infos = ref_infos
+
+        playback_testdata = audio_test_data.AudioTestData(
+                path=test_audio_path,
+                data_format=dict(file_type='wav',
+                                 sample_format='S{}_LE'.format(
+                                         test_infos['bits']),
+                                 channel=test_infos['channels'],
+                                 rate=test_infos['rate']),
+                duration_secs=test_infos['duration'])
+
+        # Get and set VISQOL working environment.
+        get_visqol_binary()
+
+        # Bypass blocking mechanism in Cras to make sure Noise Cancellation is
+        # enabled.
+        self.facade.set_bypass_block_noise_cancellation(bypass=True)
+
+        source = self.widget_factory.create_widget(
+                chameleon_audio_ids.ChameleonIds.LINEOUT)
+        sink = self.widget_factory.create_widget(
+                chameleon_audio_ids.PeripheralIds.SPEAKER)
+        binder = self.widget_factory.create_binder(source, sink)
+
+        recorder = self.widget_factory.create_widget(
+                chameleon_audio_ids.CrosIds.INTERNAL_MIC)
+
+        # Select and check the node selected by cras is correct.
+        audio_test_utils.check_and_set_chrome_active_node_types(
+                self.facade, None,
+                audio_test_utils.get_internal_mic_node(self.host))
+
+        # Adjust the proper input gain.
+        self.facade.set_chrome_active_input_gain(50)
+
+        # Stop Chrome UI to avoid NC state preference intervened by Chrome.
+        self.host.run('stop ui')
+        logging.info(
+                'UI is stopped to avoid NC preference intervention from Chrome'
+        )
+
+        def _run_routine(recorded_filename, nc_enabled):
+            # Set NC state via D-Bus control.
+            self.facade.set_noise_cancellation_enabled(nc_enabled)
+            time.sleep(self.DELAY_AFTER_NC_TOGGLED)
+
+            with chameleon_audio_helper.bind_widgets(binder):
+                time.sleep(self.DELAY_AFTER_BINDING)
+
+                logfile_suffix = 'nc_on' if nc_enabled else 'nc_off'
+                audio_test_utils.dump_cros_audio_logs(
+                        self.host, self.facade, self.resultsdir,
+                        'after_binding.{}'.format(logfile_suffix))
+
+                logging.info('Set playback data on Chameleon')
+                source.set_playback_data(playback_testdata)
+
+                # Start recording, wait a few seconds, and then start playback.
+                # Make sure the recorded data has silent samples in the
+                # beginning to trim, and includes the entire playback content.
+                logging.info('Start recording from Cros device')
+                recorder.start_recording()
+                time.sleep(self.DELAY_BEFORE_PLAYBACK_SECONDS)
+
+                logging.info('Start playing %s from Chameleon',
+                             playback_testdata.path)
+                source.start_playback()
+
+                time.sleep(test_infos['duration'] +
+                           self.DELAY_AFTER_PLAYBACK_SECONDS)
+
+                recorder.stop_recording()
+                logging.info('Stopped recording from Cros device.')
+
+                audio_test_utils.dump_cros_audio_logs(
+                        self.host, self.facade, self.resultsdir,
+                        'after_recording.{}'.format(logfile_suffix))
+
+                recorder.read_recorded_binary()
+                logging.info('Read recorded binary from Cros device.')
+
+            # Remove the beginning of recorded data. This is to avoid artifact
+            # caused by Cros device codec initialization in the beginning of
+            # recording.
+            recorder.remove_head(1.0)
+
+            recorded_file = os.path.join(self.resultsdir,
+                                         recorded_filename + '.raw')
+            logging.info('Saving recorded data to %s', recorded_file)
+            recorder.save_file(recorded_file)
+            _remove_at_cleanup(recorded_file)
+
+            # WAV file is also saved by recorder.save_file().
+            recorded_wav_path = recorded_file + '.wav'
+            if not os.path.isfile(recorded_wav_path):
+                logging.error('WAV file %s does not exist.', recorded_wav_path)
+                raise error.TestError('Failed to find recorded wav file.')
+            _remove_at_cleanup(recorded_wav_path)
+
+            rec_infos = sox_utils.get_infos_from_wav_file(recorded_wav_path)
+            if rec_infos is None:
+                raise error.TestError('Failed to get infos from wav file %s.' %
+                                      recorded_wav_path)
+
+            # Downsample the recorded data from 48k to 16k rate. It is required
+            # for getting ViSQOL score in speech mode.
+            recorded_16k_path = '{}_16k{}'.format(
+                    *os.path.splitext(recorded_wav_path))
+            sox_utils.convert_format(recorded_wav_path,
+                                     rec_infos['channels'],
+                                     rec_infos['bits'],
+                                     rec_infos['rate'],
+                                     recorded_16k_path,
+                                     ref_infos['channels'],
+                                     ref_infos['bits'],
+                                     ref_infos['rate'],
+                                     1.0,
+                                     use_src_header=True,
+                                     use_dst_header=True)
+
+            # Remove the silence in the beginning and trim to the same duration
+            # as the reference file.
+            trimmed_recorded_16k_path = '{}_trim{}'.format(
+                    *os.path.splitext(recorded_16k_path))
+            sox_utils.trim_silence_from_wav_file(recorded_16k_path,
+                                                 trimmed_recorded_16k_path,
+                                                 ref_infos['duration'],
+                                                 duration_threshold=0.05)
+
+            score = visqol_utils.get_visqol_score(
+                    ref_file=speech_path,
+                    deg_file=trimmed_recorded_16k_path,
+                    log_dir=self.resultsdir,
+                    speech_mode=True)
+
+            logging.info('Recorded audio %s got ViSQOL score: %f',
+                         recorded_filename, score)
+            return score
+
+        logging.info('Run routine with NC enabled...')
+        nc_on_score = _run_routine('record_nc_enabled', nc_enabled=True)
+        logging.info('Run routine with NC disabled...')
+        nc_off_score = _run_routine('record_nc_disabled', nc_enabled=False)
+
+        score_diff = nc_on_score - nc_off_score
+
+        # Track ViSQOL performance score
+        test_desc = 'internal_mic_noise_cancellation_visqol_diff'
+        self.write_perf_keyval({test_desc: score_diff})
+
+        if score_diff < test_data['threshold']:
+            raise error.TestFail(
+                    'ViSQOL score diff for NC(=%f) is lower than threshold(=%f)'
+                    % (score_diff, test_data['threshold']))
diff --git a/server/site_tests/audio_AudioNoiseCancellation/control.noisy_env b/server/site_tests/audio_AudioNoiseCancellation/control.noisy_env
new file mode 100644
index 0000000..f3ffd87
--- /dev/null
+++ b/server/site_tests/audio_AudioNoiseCancellation/control.noisy_env
@@ -0,0 +1,41 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "chromeos-chameleon"
+NAME = "audio_AudioNoiseCancellation.noisy_env"
+PURPOSE = "Remotely controlled input noise cancellation audio test."
+CRITERIA = "This test will fail if the captured audio does not reduce noise from the input."
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "audio"
+TEST_TYPE = "server"
+ATTRIBUTES = "suite:audio_advanced"
+DEPENDENCIES = "board:volteer, audio_box"
+JOB_RETRIES = 1
+PY_VERSION = 3
+
+DOC = """
+This test checks if the speech quality is improved by NC when the input is mixed with noises.
+During the test, the speech and noise files will be mixed and played by Chameleon, while DUT
+records via the internal mic with NC enabled and disabled respectively. The score difference
+calculated by ViSQOL with the speech file as reference should not be less than the threshold
+specified in test_data.
+"""
+
+args_dict = utils.args_to_dict(args)
+chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
+
+# Test files are located in gs://chromeos-localmirror/distfiles/test_noise_cancellation
+# Both files are 15-second, 1-channel, 16k-rate.
+test_data = dict(speech_file='speech_ref.wav',
+             	 noise_file='office_noise.wav',
+	   	 threshold=0.1)
+
+def run(machine):
+    host = hosts.create_host(machine, chameleon_args=chameleon_args)
+    job.run_test("audio_AudioNoiseCancellation", host=host, test_data=test_data, tag='noisy_env')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/audio_AudioNoiseCancellation/control.quiet_env b/server/site_tests/audio_AudioNoiseCancellation/control.quiet_env
new file mode 100644
index 0000000..168cbc4
--- /dev/null
+++ b/server/site_tests/audio_AudioNoiseCancellation/control.quiet_env
@@ -0,0 +1,41 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "chromeos-chameleon"
+NAME = "audio_AudioNoiseCancellation.quiet_env"
+PURPOSE = "Remotely controlled input noise cancellation audio test."
+CRITERIA = "This test will fail if the captured audio quality is degraded by NC in a quiet place."
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "audio"
+TEST_TYPE = "server"
+ATTRIBUTES = "suite:audio_advanced"
+DEPENDENCIES = "board:volteer, audio_box"
+JOB_RETRIES = 1
+PY_VERSION = 3
+
+DOC = """
+This test checks if the speech quality is not degraded by NC when the input is clean. During
+the test, the speech file will be played by Chameleon, while DUT records via the internal mic
+with NC enabled and disabled respectively. The score difference calculated by ViSQOL with the
+speech file as reference should not be less than the threshold specified in test_data.
+"""
+
+args_dict = utils.args_to_dict(args)
+chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
+
+# Test files are located in gs://chromeos-localmirror/distfiles/test_noise_cancellation
+# The speech file is 15-second, 1-channel, 16k-rate.
+# The threshold is negative because we can tolerate the slight degradation as the side effect
+# of NC.
+test_data = dict(speech_file='speech_ref.wav',
+	   	 threshold=-0.05)
+
+def run(machine):
+    host = hosts.create_host(machine, chameleon_args=chameleon_args)
+    job.run_test("audio_AudioNoiseCancellation", host=host, test_data=test_data, tag='quiet_env')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/audio_AudioPinnedStream/audio_AudioPinnedStream.py b/server/site_tests/audio_AudioPinnedStream/audio_AudioPinnedStream.py
index 93af78e..7d99267 100644
--- a/server/site_tests/audio_AudioPinnedStream/audio_AudioPinnedStream.py
+++ b/server/site_tests/audio_AudioPinnedStream/audio_AudioPinnedStream.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/audio_AudioPinnedStream/control.playback b/server/site_tests/audio_AudioPinnedStream/control.playback
index 8f775d1..bf86c09 100644
--- a/server/site_tests/audio_AudioPinnedStream/control.playback
+++ b/server/site_tests/audio_AudioPinnedStream/control.playback
@@ -16,6 +16,7 @@
 TEST_TYPE = 'server'
 #ATTRIBUTES = "suite:audio_advanced"
 DEPENDENCIES = "chameleon, audio_board, test_audiojack, test_usbaudio"
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests pinned stream function.
diff --git a/server/site_tests/audio_AudioPinnedStream/control.recording b/server/site_tests/audio_AudioPinnedStream/control.recording
index cf504a4..6ea69aa 100644
--- a/server/site_tests/audio_AudioPinnedStream/control.recording
+++ b/server/site_tests/audio_AudioPinnedStream/control.recording
@@ -16,6 +16,7 @@
 TEST_TYPE = 'server'
 #ATTRIBUTES = "suite:audio_advanced"
 DEPENDENCIES = "chameleon, audio_board, test_audiojack, test_usbaudio"
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests pinned stream function.
diff --git a/server/site_tests/audio_AudioQualityAfterSuspend/audio_AudioQualityAfterSuspend.py b/server/site_tests/audio_AudioQualityAfterSuspend/audio_AudioQualityAfterSuspend.py
index c2f258b..5cba664 100644
--- a/server/site_tests/audio_AudioQualityAfterSuspend/audio_AudioQualityAfterSuspend.py
+++ b/server/site_tests/audio_AudioQualityAfterSuspend/audio_AudioQualityAfterSuspend.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/audio_AudioQualityAfterSuspend/control.headphone_m4a b/server/site_tests/audio_AudioQualityAfterSuspend/control.headphone_m4a
index 6a266f6..1256bfc 100644
--- a/server/site_tests/audio_AudioQualityAfterSuspend/control.headphone_m4a
+++ b/server/site_tests/audio_AudioQualityAfterSuspend/control.headphone_m4a
@@ -17,7 +17,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_audiojack"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests headphone audio function against DUT after suspend.
diff --git a/server/site_tests/audio_AudioQualityAfterSuspend/control.headphone_mp3 b/server/site_tests/audio_AudioQualityAfterSuspend/control.headphone_mp3
index f43bb41..fb52a78 100644
--- a/server/site_tests/audio_AudioQualityAfterSuspend/control.headphone_mp3
+++ b/server/site_tests/audio_AudioQualityAfterSuspend/control.headphone_mp3
@@ -17,7 +17,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_audiojack"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests headphone audio function against DUT after suspend.
diff --git a/server/site_tests/audio_AudioQualityAfterSuspend/control.headphone_wav b/server/site_tests/audio_AudioQualityAfterSuspend/control.headphone_wav
index 236ad08..b1d9e71 100644
--- a/server/site_tests/audio_AudioQualityAfterSuspend/control.headphone_wav
+++ b/server/site_tests/audio_AudioQualityAfterSuspend/control.headphone_wav
@@ -17,7 +17,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_audiojack"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests headphone audio function against DUT after suspend.
diff --git a/server/site_tests/audio_AudioQualityAfterSuspend/control.internal_speaker_m4a b/server/site_tests/audio_AudioQualityAfterSuspend/control.internal_speaker_m4a
index 6f01387..0d2a78a 100644
--- a/server/site_tests/audio_AudioQualityAfterSuspend/control.internal_speaker_m4a
+++ b/server/site_tests/audio_AudioQualityAfterSuspend/control.internal_speaker_m4a
@@ -18,7 +18,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "audio_box"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests internal_speaker audio function against DUT after suspend.
diff --git a/server/site_tests/audio_AudioQualityAfterSuspend/control.internal_speaker_mp3 b/server/site_tests/audio_AudioQualityAfterSuspend/control.internal_speaker_mp3
index 4c5f9be..4dc3ded 100644
--- a/server/site_tests/audio_AudioQualityAfterSuspend/control.internal_speaker_mp3
+++ b/server/site_tests/audio_AudioQualityAfterSuspend/control.internal_speaker_mp3
@@ -18,7 +18,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "audio_box"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests internal_speaker audio function against DUT after suspend.
diff --git a/server/site_tests/audio_AudioQualityAfterSuspend/control.internal_speaker_wav b/server/site_tests/audio_AudioQualityAfterSuspend/control.internal_speaker_wav
index 6601eb5..92009b9 100644
--- a/server/site_tests/audio_AudioQualityAfterSuspend/control.internal_speaker_wav
+++ b/server/site_tests/audio_AudioQualityAfterSuspend/control.internal_speaker_wav
@@ -18,7 +18,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "audio_box"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests internal_speaker audio function against DUT after suspend.
diff --git a/server/site_tests/audio_AudioRoutingUSB/audio_AudioRoutingUSB.py b/server/site_tests/audio_AudioRoutingUSB/audio_AudioRoutingUSB.py
deleted file mode 100644
index b85093b..0000000
--- a/server/site_tests/audio_AudioRoutingUSB/audio_AudioRoutingUSB.py
+++ /dev/null
@@ -1,80 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging, time
-
-from autotest_lib.server import test
-from autotest_lib.client.common_lib import error
-
-# After connecting/disconnecting the USB headset, we wait a while for the event
-# to be discovered, and CRAS to switch the output device.
-SWITCH_DELAY = 7
-
-class audio_AudioRoutingUSB(test.test):
-    version = 1
-
-    def get_opened_device(self, host):
-        """Returns the opened pcm device under /dev/snd."""
-        output = host.run('lsof -Fn +D /dev/snd', ignore_status=True).stdout
-        return parse_pcm_device(output);
-
-    def run_once(self, host):
-        try:
-            host.run('aplay /dev/zero </dev/null >/dev/null 2>&1 &')
-            self.run_test_while_audio_is_playing(host)
-        finally:
-            host.run('killall aplay')
-
-    def run_test_while_audio_is_playing(self, host):
-        host.servo.set('dut_usb2_prtctl', 'on')
-
-        # First disconnect the headset from DUT
-        host.servo.set('usb_mux_oe2', 'off')
-        time.sleep(SWITCH_DELAY)
-        dev1 = self.get_opened_device(host)
-
-        # Connect the headset to DUT
-        host.servo.set('usb_mux_oe2', 'on')
-        time.sleep(SWITCH_DELAY)
-        dev2 = self.get_opened_device(host)
-
-        # Disconnect the headset from DUT
-        host.servo.set('usb_mux_oe2', 'off')
-        time.sleep(SWITCH_DELAY)
-        dev3 = self.get_opened_device(host)
-
-        logging.info('dev1: %s, dev2: %s, dev3:%s', dev1, dev2, dev3)
-        if dev1 == dev2:
-            raise error.TestFail('Same audio device used when the headset is '
-                                 'connected. Make sure a USB headset is '
-                                 'plugged into DUT_USB (TYPE A/J4), and '
-                                 'DUT_IN (TYPE MICRO-B/J5) is '
-                                 'connected to a USB port on the device')
-        if dev1 != dev3:
-            raise error.TestFail('The audio device didn\'t switch back to the '
-                                 'original one after the USB headset is '
-                                 'unplugged')
-
-def parse_pcm_device(input):
-  """
-  Parses the output of lsof command. Returns the pcm device opened.
-
-  >>> input = '''
-  ... p1847
-  ... n/dev/snd/pcmC0D0p
-  ... n/dev/snd/controlC0
-  ... n/dev/snd/controlC0
-  ... n/dev/snd/controlC0
-  ... '''
-  >>> parse_pcm_device(input)
-  '/dev/snd/pcmC0D0p'
-  """
-  devices = set()
-  for line in input.split('\n'):
-    if line and line.startswith('n/dev/snd/pcmC'):
-      devices.add(line[1:])
-      logging.info('opened devices: %s', devices)
-      if len(devices) != 1:
-        raise error.TestError('Should open one and only one device')
-      return devices.pop()
diff --git a/server/site_tests/audio_AudioRoutingUSB/control b/server/site_tests/audio_AudioRoutingUSB/control
deleted file mode 100644
index fe5eed9..0000000
--- a/server/site_tests/audio_AudioRoutingUSB/control
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = 'The Chromium OS Authors'
-NAME = 'audio_AudioRoutingUSB'
-PURPOSE = 'Verify audio is routed to USB headset correctly.'
-CRITERIA = """
-After a USB headset is plugged in, CRAS should route the audio
-to the new device. After the USB headset is unplugged, the audio
-should be routed back to the original device. We check if the
-correct device file (/dev/snd/pcmCXDY) is opened in each case.
-"""
-TIME='FAST'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = "audio"
-TEST_TYPE = 'server'
-
-DOC = """
-This test uses servo to simulate plugging/unplugging a USB headset. Before
-running the test, a USB headset should be plugged into DUT_USB (TYPE A/J4),
-and DUT_IN (TYPE MICRO-B/J5) should be connected to a USB port on the device.
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test("audio_AudioRoutingUSB", host=host)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/audio_AudioSanityCheck/audio_AudioSanityCheck.py b/server/site_tests/audio_AudioSanityCheck/audio_AudioSanityCheck.py
deleted file mode 100644
index c175c63..0000000
--- a/server/site_tests/audio_AudioSanityCheck/audio_AudioSanityCheck.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-This is a server side audio sanity test testing assumptions other audio tests
-rely on.
-"""
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.chameleon import audio_test_utils
-from autotest_lib.server.cros.audio import audio_test
-
-class audio_AudioSanityCheck(audio_test.AudioTest):
-    """
-    This test talks to a Cros device to verify if some basic functions that
-    other audio tests rely on still work after a suspension.
-    """
-    version = 1
-
-    def verify_chrome_audio(self):
-        """"Verify if chrome.audio API is available"""
-        if not self.facade.get_chrome_audio_availablity():
-            raise error.TestFail("chrome.audio API is not available")
-
-    def verify_suspend(self):
-        """"Verify and trigger a suspension"""
-        audio_test_utils.suspend_resume_and_verify(self.host, self.factory)
-
-    def run_once(self, suspend_only=False):
-        """Runs Audio sanity test to make sure chrome api works. """
-
-        # The suspend_only flag is for crbug:978593, which causes sanity check
-        # to always fail. however, we still want to check the suspend operation
-        # as it also potentially fails the audio tests. This should be removed
-        # once the blocker is fixed
-        if suspend_only:
-            self.verify_suspend()
-            return
-
-        # Check if the chrome.audio API is available
-        self.verify_chrome_audio()
-        # chrome.audio API should remain available after a suspension
-        self.verify_suspend()
-        self.verify_chrome_audio()
diff --git a/server/site_tests/audio_AudioSanityCheck/control b/server/site_tests/audio_AudioSanityCheck/control
deleted file mode 100644
index 2b0c5c9..0000000
--- a/server/site_tests/audio_AudioSanityCheck/control
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "chromeos-chameleon"
-NAME = "audio_AudioSanityCheck"
-PURPOSE = "Sanity check for behavior after a suspension."
-CRITERIA = """
-This test will fail if functions that audio tests rely on are broken
-after a suspension.
-"""
-#ATTRIBUTES = "chameleon_audio_unstable"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "audio"
-TEST_TYPE = "server"
-DEPENDENCIES = 'chameleon, audio_board'
-JOB_RETRIES = 2
-
-DOC = """
-This test remotely tests if some functions, audio tests rely on, work correctly
-after a suspension.
-"""
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test("audio_AudioSanityCheck", host=host)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/audio_AudioSanityCheck/control.suspend b/server/site_tests/audio_AudioSanityCheck/control.suspend
deleted file mode 100644
index 09001fe..0000000
--- a/server/site_tests/audio_AudioSanityCheck/control.suspend
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "chromeos-chameleon"
-NAME = "audio_AudioSanityCheck.suspend"
-PURPOSE = "Sanity check for suspend."
-CRITERIA = """
-This test will fail if target fail to suspend or wake up after a suspension.
-"""
-ATTRIBUTES = "suite:audio_advanced"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "audio"
-TEST_TYPE = "server"
-DEPENDENCIES = 'chameleon, audio_board'
-JOB_RETRIES = 1
-
-DOC = """
-This test remotely tests if suspend, audio tests rely on, work correctly.
-"""
-
-def run(machine):
-    host = hosts.create_host(machine)
-    # The suspend_only flag is for crbug:978593, which causes sanity check to
-    # always fail. however, we still want to check the suspend operation as it
-    # also potentially fails the audio tests. This should be removed once the
-    # blocker is fixed
-    job.run_test("audio_AudioSanityCheck", host=host, suspend_only=True)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/audio_AudioTestAssumptionCheck/audio_AudioTestAssumptionCheck.py b/server/site_tests/audio_AudioTestAssumptionCheck/audio_AudioTestAssumptionCheck.py
new file mode 100644
index 0000000..d9af9e5
--- /dev/null
+++ b/server/site_tests/audio_AudioTestAssumptionCheck/audio_AudioTestAssumptionCheck.py
@@ -0,0 +1,46 @@
+# Lint as: python2, python3
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""
+This is a server side audio test testing assumptions other audio tests
+rely on.
+"""
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.cros.chameleon import audio_test_utils
+from autotest_lib.server.cros.audio import audio_test
+
+
+class audio_AudioTestAssumptionCheck(audio_test.AudioTest):
+    """
+    This test talks to a Cros device to verify if some basic functions that
+    other audio tests rely on still work after a suspension.
+    """
+    version = 1
+
+    def verify_chrome_audio(self):
+        """"Verify if chrome.audio API is available"""
+        if not self.facade.get_chrome_audio_availablity():
+            raise error.TestFail("chrome.audio API is not available")
+
+    def verify_suspend(self):
+        """"Verify and trigger a suspension"""
+        audio_test_utils.suspend_resume_and_verify(self.host, self.factory)
+
+    def run_once(self, suspend_only=False):
+        """Runs Audio confidence test to make sure chrome api works. """
+
+        # Check if the chrome.audio API is available
+        self.verify_chrome_audio()
+
+        self.verify_suspend()
+        # The suspend_only flag is for crbug:978593, which causes the audio API
+        # check to fail. However, we still want to check the suspend operation
+        # as it also potentially fails the audio tests. This should be removed
+        # once the blocker is fixed
+        if suspend_only:
+            return
+
+        # chrome.audio API should remain available after a suspension
+        self.verify_chrome_audio()
diff --git a/server/site_tests/audio_AudioTestAssumptionCheck/control b/server/site_tests/audio_AudioTestAssumptionCheck/control
new file mode 100644
index 0000000..4f178b1
--- /dev/null
+++ b/server/site_tests/audio_AudioTestAssumptionCheck/control
@@ -0,0 +1,32 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "chromeos-chameleon"
+NAME = "audio_AudioTestAssumptionCheck"
+PURPOSE = "Basic functionality check for behavior after a suspension."
+CRITERIA = """
+This test will fail if functions that audio tests rely on are broken
+after a suspension.
+"""
+#ATTRIBUTES = "chameleon_audio_unstable"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "audio"
+TEST_TYPE = "server"
+DEPENDENCIES = 'chameleon, audio_board'
+JOB_RETRIES = 2
+PY_VERSION = 3
+
+DOC = """
+This test remotely tests if some functions, audio tests rely on, work correctly
+after a suspension.
+"""
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test("audio_AudioTestAssumptionCheck", host=host)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/audio_AudioTestAssumptionCheck/control.suspend b/server/site_tests/audio_AudioTestAssumptionCheck/control.suspend
new file mode 100644
index 0000000..2c41bf2
--- /dev/null
+++ b/server/site_tests/audio_AudioTestAssumptionCheck/control.suspend
@@ -0,0 +1,34 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "chromeos-chameleon"
+NAME = "audio_AudioTestAssumptionCheck.suspend"
+PURPOSE = "Functionality check for chrome.audio and suspend."
+CRITERIA = """
+This test will fail if target fail to suspend or wake up after a suspension.
+"""
+ATTRIBUTES = "suite:audio_advanced"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "audio"
+TEST_TYPE = "server"
+DEPENDENCIES = 'chameleon, audio_board'
+JOB_RETRIES = 1
+PY_VERSION = 3
+
+DOC = """
+This test remotely tests if suspend, audio tests rely on, work correctly.
+"""
+
+def run(machine):
+    host = hosts.create_host(machine)
+    # The suspend_only flag is for crbug:978593, which causes the check to
+    # always fail. however, we still want to check the suspend operation as it
+    # also potentially fails the audio tests. This should be removed once the
+    # blocker is fixed
+    job.run_test("audio_AudioTestAssumptionCheck", host=host, suspend_only=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/audio_AudioVolume/audio_AudioVolume.py b/server/site_tests/audio_AudioVolume/audio_AudioVolume.py
index 341edbd..7051410 100644
--- a/server/site_tests/audio_AudioVolume/audio_AudioVolume.py
+++ b/server/site_tests/audio_AudioVolume/audio_AudioVolume.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -181,7 +182,7 @@
             logging.info('high_dominant_spectrals: %s',
                          high_dominant_spectrals)
 
-            for channel in xrange(len(low_dominant_spectrals)):
+            for channel in range(len(low_dominant_spectrals)):
                 _, low_coeff = low_dominant_spectrals[channel]
                 _, high_coeff = high_dominant_spectrals[channel]
                 ratio = low_coeff / high_coeff
diff --git a/server/site_tests/audio_AudioVolume/control.hdmi b/server/site_tests/audio_AudioVolume/control.hdmi
index d5d751d..20b5560 100644
--- a/server/site_tests/audio_AudioVolume/control.hdmi
+++ b/server/site_tests/audio_AudioVolume/control.hdmi
@@ -17,6 +17,7 @@
 ATTRIBUTES = "suite:audio_basic"
 DEPENDENCIES = "chameleon, audio_board, test_hdmiaudio"
 JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests HDMI audio volume function against DUT.
diff --git a/server/site_tests/audio_AudioVolume/control.headphone b/server/site_tests/audio_AudioVolume/control.headphone
index cf8500a..035c17c 100644
--- a/server/site_tests/audio_AudioVolume/control.headphone
+++ b/server/site_tests/audio_AudioVolume/control.headphone
@@ -17,6 +17,7 @@
 ATTRIBUTES = "suite:audio_basic"
 DEPENDENCIES = "audio_cable"
 JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests headphone audio volume function against DUT.
diff --git a/server/site_tests/audio_AudioVolume/control.speaker b/server/site_tests/audio_AudioVolume/control.speaker
index a9d0360..d3616c3 100644
--- a/server/site_tests/audio_AudioVolume/control.speaker
+++ b/server/site_tests/audio_AudioVolume/control.speaker
@@ -17,6 +17,7 @@
 ATTRIBUTES = "suite:audio_basic"
 DEPENDENCIES = "audio_box"
 JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests speaker audio volume function against DUT.
diff --git a/server/site_tests/audio_AudioVolume/control.usb b/server/site_tests/audio_AudioVolume/control.usb
index 7c5773c..e7e0659 100644
--- a/server/site_tests/audio_AudioVolume/control.usb
+++ b/server/site_tests/audio_AudioVolume/control.usb
@@ -17,6 +17,7 @@
 ATTRIBUTES = "suite:audio_basic"
 DEPENDENCIES = "chameleon, audio_board, test_usbaudio"
 JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests USB audio volume function against DUT.
diff --git a/server/site_tests/audio_AudioWebRTCLoopback/audio_AudioWebRTCLoopback.py b/server/site_tests/audio_AudioWebRTCLoopback/audio_AudioWebRTCLoopback.py
index 46c3125..bf1850f 100644
--- a/server/site_tests/audio_AudioWebRTCLoopback/audio_AudioWebRTCLoopback.py
+++ b/server/site_tests/audio_AudioWebRTCLoopback/audio_AudioWebRTCLoopback.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/audio_AudioWebRTCLoopback/control b/server/site_tests/audio_AudioWebRTCLoopback/control
index 68640a3..cb9383b 100644
--- a/server/site_tests/audio_AudioWebRTCLoopback/control
+++ b/server/site_tests/audio_AudioWebRTCLoopback/control
@@ -14,7 +14,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "audio_box, test_audiojack"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests USB and headphone WebRTC loopback audio function.
diff --git a/server/site_tests/audio_AudioWebRTCLoopback/control.quality b/server/site_tests/audio_AudioWebRTCLoopback/control.quality
index 82c6c51..bf0f6eb 100644
--- a/server/site_tests/audio_AudioWebRTCLoopback/control.quality
+++ b/server/site_tests/audio_AudioWebRTCLoopback/control.quality
@@ -14,7 +14,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "audio_box, test_audiojack"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests USB and headphone WebRTC loopback audio function with quality check.
diff --git a/server/site_tests/audio_AudioWebRTCLoopback/control.quality.256 b/server/site_tests/audio_AudioWebRTCLoopback/control.quality.256
index 4754d4c..1ec69c5 100644
--- a/server/site_tests/audio_AudioWebRTCLoopback/control.quality.256
+++ b/server/site_tests/audio_AudioWebRTCLoopback/control.quality.256
@@ -14,7 +14,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "audio_box, test_audiojack"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests USB and headphone WebRTC loopback audio function with quality check.
diff --git a/server/site_tests/audio_InternalCardNodes/audio_InternalCardNodes.py b/server/site_tests/audio_InternalCardNodes/audio_InternalCardNodes.py
index b9719a7..059432b 100644
--- a/server/site_tests/audio_InternalCardNodes/audio_InternalCardNodes.py
+++ b/server/site_tests/audio_InternalCardNodes/audio_InternalCardNodes.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -15,13 +16,6 @@
 
     """
     version = 1
-    _jack_plugger = None
-
-    def cleanup(self):
-        """Cleanup for this test."""
-        if self._jack_plugger is not None:
-            self._jack_plugger.plug()
-        super(audio_InternalCardNodes, self).cleanup()
 
     def get_expected_nodes(self, plugged):
         """Gets expected nodes should should be created for internal cards.
@@ -31,7 +25,10 @@
             a tuple (output, input) containing lists of expected input and
             output nodes.
         """
-        nodes = ([], ['POST_DSP_LOOPBACK', 'POST_MIX_LOOPBACK'])
+        nodes = ([], [
+                'POST_DSP_DELAYED_LOOPBACK', 'POST_DSP_LOOPBACK',
+                'POST_MIX_LOOPBACK'
+        ])
         if plugged:
             # Checks whether line-out or headphone is detected.
             hp_jack_node_type = audio_test_utils.check_hp_or_lineout_plugged(
@@ -49,17 +46,21 @@
             nodes[1].append('ECHO_REFERENCE')
         return nodes
 
-    def run_once(self, plug=True):
+    def run_once(self, plug=True, blocked_boards=[]):
         """Runs InternalCardNodes test."""
+        if self.host.get_board().split(':')[1] in blocked_boards:
+            raise error.TestNAError('Board not applicable to test!')
         if not audio_test_utils.has_audio_jack(self.host):
             audio_test_utils.check_plugged_nodes(
                     self.facade, self.get_expected_nodes(False))
             return
 
-        if not plug:
-            self._jack_plugger = self.host.chameleon.get_audio_board(
-            ).get_jack_plugger()
-            self._jack_plugger.unplug()
+        jack_plugger = self.host.chameleon.get_audio_board().get_jack_plugger()
+
+        if plug:
+            jack_plugger.plug()
+        else:
+            jack_plugger.unplug()
 
         audio_test_utils.check_plugged_nodes(self.facade,
                                              self.get_expected_nodes(plug))
diff --git a/server/site_tests/audio_InternalCardNodes/control.plug b/server/site_tests/audio_InternalCardNodes/control.plug
index c76fbc6..743cdd4 100644
--- a/server/site_tests/audio_InternalCardNodes/control.plug
+++ b/server/site_tests/audio_InternalCardNodes/control.plug
@@ -21,6 +21,7 @@
 # This test doesn't necessarily need to be in a audio box.
 DEPENDENCIES = "audio_box"
 JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests plugged audio nodes for internal cards.
@@ -31,6 +32,9 @@
 
 def run(machine):
     host = hosts.create_host(machine, chameleon_args=chameleon_args)
-    job.run_test("audio_InternalCardNodes", host=host, plug=True)
+    job.run_test("audio_InternalCardNodes", host=host, plug=True,
+                 blocked_boards=['lars',    # Pending fix for b/147647051
+                                 'lars-kernelnext']
+                 )
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/audio_InternalCardNodes/control.unplug b/server/site_tests/audio_InternalCardNodes/control.unplug
index 3ffe31c..d81e78e 100644
--- a/server/site_tests/audio_InternalCardNodes/control.unplug
+++ b/server/site_tests/audio_InternalCardNodes/control.unplug
@@ -21,6 +21,7 @@
 # This test doesn't necessarily need to be in a audio box.
 DEPENDENCIES = "audio_box"
 JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests plugged audio nodes for internal cards.
diff --git a/server/site_tests/audio_LeftRightInternalSpeaker/audio_LeftRightInternalSpeaker.py b/server/site_tests/audio_LeftRightInternalSpeaker/audio_LeftRightInternalSpeaker.py
index c7cdd6e..f0edaf8 100644
--- a/server/site_tests/audio_LeftRightInternalSpeaker/audio_LeftRightInternalSpeaker.py
+++ b/server/site_tests/audio_LeftRightInternalSpeaker/audio_LeftRightInternalSpeaker.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -51,7 +52,7 @@
 
         @param host: A reference to the DUT.
         @param player: A string representing what audio player to use. Could
-                       be 'native' or 'browser'.
+                       be 'internal' or 'browser'.
 
         """
 
@@ -114,7 +115,7 @@
                 'device' % output_nodes)
         self.audio_facade.set_selected_output_volume(80)
 
-        if player == 'native':
+        if player == 'internal':
             if channel == 'left':
                 frequencies = [440, 0]
             else:
diff --git a/server/site_tests/audio_LeftRightInternalSpeaker/control.browser b/server/site_tests/audio_LeftRightInternalSpeaker/control.browser
index b88a4a4..b9423de 100644
--- a/server/site_tests/audio_LeftRightInternalSpeaker/control.browser
+++ b/server/site_tests/audio_LeftRightInternalSpeaker/control.browser
@@ -14,7 +14,8 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "audio_box"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests internal speaker audio function by playing
diff --git a/server/site_tests/audio_LeftRightInternalSpeaker/control.internal b/server/site_tests/audio_LeftRightInternalSpeaker/control.internal
new file mode 100644
index 0000000..8d017e9
--- /dev/null
+++ b/server/site_tests/audio_LeftRightInternalSpeaker/control.internal
@@ -0,0 +1,34 @@
+# Copyright 2016 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "chromeos-chameleon"
+NAME = "audio_LeftRightInternalSpeaker.internal"
+PURPOSE = "Remotely controlled left/right internal speaker audio test."
+CRITERIA = "This test will fail if the captured audio does not match original file."
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "audio"
+TEST_TYPE = "server"
+#ATTRIBUTES = "suite:chameleon_audio_unstable"
+DEPENDENCIES = "audio_box"
+JOB_RETRIES = 1
+PY_VERSION = 3
+
+DOC = """
+This test remotely tests internal speaker audio function by playing
+sounds to left and right channels separately. It can't detect if the
+channel/speaker pairing is incorrect, but it can detect when a certain
+channel is not played at all.
+"""
+
+args_dict = utils.args_to_dict(args)
+chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, chameleon_args=chameleon_args)
+    job.run_test("audio_LeftRightInternalSpeaker", host=host, player='internal')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/audio_LeftRightInternalSpeaker/control.native b/server/site_tests/audio_LeftRightInternalSpeaker/control.native
deleted file mode 100644
index 43785cc..0000000
--- a/server/site_tests/audio_LeftRightInternalSpeaker/control.native
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "chromeos-chameleon"
-NAME = "audio_LeftRightInternalSpeaker.native"
-PURPOSE = "Remotely controlled left/right internal speaker audio test."
-CRITERIA = "This test will fail if the captured audio does not match original file."
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "audio"
-TEST_TYPE = "server"
-#ATTRIBUTES = "suite:chameleon_audio_unstable"
-DEPENDENCIES = "audio_box"
-# JOB_RETRIES = 0
-
-DOC = """
-This test remotely tests internal speaker audio function by playing
-sounds to left and right channels separately. It can't detect if the
-channel/speaker pairing is incorrect, but it can detect when a certain
-channel is not played at all.
-"""
-
-args_dict = utils.args_to_dict(args)
-chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, chameleon_args=chameleon_args)
-    job.run_test("audio_LeftRightInternalSpeaker", host=host, player='native')
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/audio_MediaBasicVerification/audio_MediaBasicVerification.py b/server/site_tests/audio_MediaBasicVerification/audio_MediaBasicVerification.py
index 3fd91f0..da27e0f 100644
--- a/server/site_tests/audio_MediaBasicVerification/audio_MediaBasicVerification.py
+++ b/server/site_tests/audio_MediaBasicVerification/audio_MediaBasicVerification.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/audio_MediaBasicVerification/control.test_m4a b/server/site_tests/audio_MediaBasicVerification/control.test_m4a
index 1bb370f..1be6ff5 100644
--- a/server/site_tests/audio_MediaBasicVerification/control.test_m4a
+++ b/server/site_tests/audio_MediaBasicVerification/control.test_m4a
@@ -14,6 +14,7 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_audiojack"
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests audio quality over headphone.
diff --git a/server/site_tests/audio_MediaBasicVerification/control.test_mp3 b/server/site_tests/audio_MediaBasicVerification/control.test_mp3
index 03c0143..4f74aeb 100644
--- a/server/site_tests/audio_MediaBasicVerification/control.test_mp3
+++ b/server/site_tests/audio_MediaBasicVerification/control.test_mp3
@@ -14,6 +14,7 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_audiojack"
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests audio quality over headphone.
diff --git a/server/site_tests/audio_MediaBasicVerification/control.test_ogg b/server/site_tests/audio_MediaBasicVerification/control.test_ogg
index 062c3c7..9212f97 100644
--- a/server/site_tests/audio_MediaBasicVerification/control.test_ogg
+++ b/server/site_tests/audio_MediaBasicVerification/control.test_ogg
@@ -14,6 +14,7 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_audiojack"
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests audio quality over headphone.
diff --git a/server/site_tests/audio_MediaBasicVerification/control.test_wav b/server/site_tests/audio_MediaBasicVerification/control.test_wav
index 360785c..26770b6 100644
--- a/server/site_tests/audio_MediaBasicVerification/control.test_wav
+++ b/server/site_tests/audio_MediaBasicVerification/control.test_wav
@@ -14,6 +14,7 @@
 TEST_TYPE = "server"
 #ATTRIBUTES = "suite:chameleon_audio_unstable"
 DEPENDENCIES = "chameleon, audio_board, test_audiojack"
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests audio quality over headphone.
diff --git a/server/site_tests/audio_PowerConsumption/control.audio_mp3 b/server/site_tests/audio_PowerConsumption/control.audio_mp3
deleted file mode 100644
index 7bf0e20..0000000
--- a/server/site_tests/audio_PowerConsumption/control.audio_mp3
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "rohitbm@chromium.org, Chrome OS Audio"
-NAME = "audio_PowerConsumption.mp3"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Performance"
-TEST_CLASS = "audio"
-# TODO(ihf): enable once crbug.com/710338 is fixed.
-#ATTRIBUTES = "suite:bvt-perbuild"
-TEST_TYPE = "server"
-DEPENDENCIES = "rpm, power:battery"
-BUG_TEMPLATE = {
-    "labels": ["OS-Chrome"],
-    "components": ["OS>Kernel>Audio"],
-}
-
-DOC = """
-The test outputs the power consumption for various audio formats.
-"""
-
-import logging
-
-from autotest_lib.client.common_lib import error
-
-VIDEO_NAME = "polka_crowd_128kbps_44_1khz.mp3"
-CHECKSUM = "7171529bb34c6e17dd163b03aa2b7c9c"
-
-def _run_client_test(machine):
-    """Runs client test with battery actively discharging."""
-    client = hosts.create_host(machine)
-    if not client.has_power():
-        raise error.TestError("This test requires RPM support.")
-
-    try:
-        client.power_off()
-        client_at = autotest.Autotest(client)
-        client_at.run_test("audio_PlaybackPower", test_file=VIDEO_NAME,
-                           checksum=CHECKSUM)
-    finally:
-        client.power_on()
-
-
-job.parallel_on_machines(_run_client_test, machines)
diff --git a/server/site_tests/audiovideo_AVSync/audiovideo_AVSync.py b/server/site_tests/audiovideo_AVSync/audiovideo_AVSync.py
deleted file mode 100644
index 3156134..0000000
--- a/server/site_tests/audiovideo_AVSync/audiovideo_AVSync.py
+++ /dev/null
@@ -1,298 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import json
-import logging
-import os
-import struct
-import tempfile
-import time
-
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import file_utils
-from autotest_lib.client.common_lib.cros import arc_common
-from autotest_lib.client.cros import constants
-from autotest_lib.client.cros.chameleon import audio_test_utils
-from autotest_lib.client.cros.chameleon import chameleon_port_finder
-from autotest_lib.client.cros.multimedia import arc_resource_common
-from autotest_lib.server import autotest
-from autotest_lib.server import test
-from autotest_lib.server.cros.multimedia import remote_facade_factory
-
-
-class audiovideo_AVSync(test.test):
-    """ Server side HDMI audio/video sync quality measurement
-
-    This test talks to a Chameleon board and a Cros device to measure the
-    audio/video sync quality under playing a 1080p 60fps video.
-    """
-    version = 1
-
-    AUDIO_CAPTURE_RATE = 48000
-    VIDEO_CAPTURE_RATE = 60
-
-    BEEP_THRESHOLD = 10 ** 9
-
-    DELAY_BEFORE_CAPTURING = 2
-    DELAY_BEFORE_PLAYBACK = 2
-    DELAY_AFTER_PLAYBACK = 2
-
-    DEFAULT_VIDEO_URL = ('http://commondatastorage.googleapis.com/'
-                         'chromiumos-test-assets-public/chameleon/'
-                         'audiovideo_AVSync/1080p_60fps.mp4')
-
-    WAIT_CLIENT_READY_TIMEOUT_SECS = 120
-
-    def compute_audio_keypoint(self, data):
-        """Compute audio keypoints. Audio keypoints are the starting times of
-        beeps.
-
-        @param data: Raw captured audio data in S32LE, 8 channels, 48000 Hz.
-
-        @returns: Key points of captured data put in a list.
-        """
-        keypoints = []
-        sample_no = 0
-        last_beep_no = -100
-        for i in xrange(0, len(data), 32):
-            values = struct.unpack('<8i', data[i:i+32])
-            if values[0] > self.BEEP_THRESHOLD:
-                if sample_no - last_beep_no >= 100:
-                    keypoints.append(sample_no / float(self.AUDIO_CAPTURE_RATE))
-                last_beep_no = sample_no
-            sample_no += 1
-        return keypoints
-
-
-    def compute_video_keypoint(self, checksum):
-        """Compute video keypoints. Video keypoints are the times when the
-        checksum changes.
-
-        @param checksum: Checksums of frames put in a list.
-
-        @returns: Key points of captured video data put in a list.
-        """
-        return [i / float(self.VIDEO_CAPTURE_RATE)
-                for i in xrange(1, len(checksum))
-                if checksum[i] != checksum[i - 1]]
-
-
-    def log_result(self, prefix, key_audio, key_video, dropped_frame_count):
-        """Log the test result to result.json and the dashboard.
-
-        @param prefix: A string distinguishes between subtests.
-        @param key_audio: Key points of captured audio data put in a list.
-        @param key_video: Key points of captured video data put in a list.
-        @param dropped_frame_count: Number of dropped frames.
-        """
-        log_path = os.path.join(self.resultsdir, 'result.json')
-        diff = map(lambda x: x[0] - x[1], zip(key_audio, key_video))
-        diff_range = max(diff) - min(diff)
-        result = dict(
-            key_audio=key_audio,
-            key_video=key_video,
-            av_diff=diff,
-            diff_range=diff_range
-        )
-        if dropped_frame_count is not None:
-            result['dropped_frame_count'] = dropped_frame_count
-
-        result = json.dumps(result, indent=2)
-        with open(log_path, 'w') as f:
-            f.write(result)
-        logging.info(str(result))
-
-        dashboard_result = dict(
-            diff_range=[diff_range, 'seconds'],
-            max_diff=[max(diff), 'seconds'],
-            min_diff=[min(diff), 'seconds'],
-            average_diff=[sum(diff) / len(diff), 'seconds']
-        )
-        if dropped_frame_count is not None:
-            dashboard_result['dropped_frame_count'] = [
-                    dropped_frame_count, 'frames']
-
-        for key, value in dashboard_result.iteritems():
-            self.output_perf_value(description=prefix+key, value=value[0],
-                                   units=value[1], higher_is_better=False)
-
-
-    def run_once(self, host, video_hardware_acceleration=True,
-                 video_url=DEFAULT_VIDEO_URL, arc=False):
-        """Running audio/video synchronization quality measurement
-
-        @param host: A host object representing the DUT.
-        @param video_hardware_acceleration: Enables the hardware acceleration
-                                            for video decoding.
-        @param video_url: The ULR of the test video.
-        @param arc: Tests on ARC with an Android Video Player App.
-        """
-        self.host = host
-
-        factory = remote_facade_factory.RemoteFacadeFactory(
-                host, results_dir=self.resultsdir, no_chrome=True)
-
-        chrome_args = {
-            'extension_paths': [constants.AUDIO_TEST_EXTENSION,
-                                constants.DISPLAY_TEST_EXTENSION],
-            'extra_browser_args': [],
-            'arc_mode': arc_common.ARC_MODE_DISABLED,
-            'autotest_ext': True
-        }
-        if not video_hardware_acceleration:
-            chrome_args['extra_browser_args'].append(
-                    '--disable-accelerated-video-decode')
-        if arc:
-            chrome_args['arc_mode'] = arc_common.ARC_MODE_ENABLED
-        browser_facade = factory.create_browser_facade()
-        browser_facade.start_custom_chrome(chrome_args)
-        logging.info("created chrome")
-        if arc:
-            self.setup_video_app()
-
-        chameleon_board = host.chameleon
-        audio_facade = factory.create_audio_facade()
-        display_facade = factory.create_display_facade()
-        video_facade = factory.create_video_facade()
-
-        audio_port_finder = chameleon_port_finder.ChameleonAudioInputFinder(
-                chameleon_board)
-        video_port_finder = chameleon_port_finder.ChameleonVideoInputFinder(
-                chameleon_board, display_facade)
-        audio_port = audio_port_finder.find_port('HDMI')
-        video_port = video_port_finder.find_port('HDMI')
-
-        chameleon_board.setup_and_reset(self.outputdir)
-
-        _, ext = os.path.splitext(video_url)
-        with tempfile.NamedTemporaryFile(prefix='playback_', suffix=ext) as f:
-            # The default permission is 0o600.
-            os.chmod(f.name, 0o644)
-
-            file_utils.download_file(video_url, f.name)
-            if arc:
-                video_facade.prepare_arc_playback(f.name)
-            else:
-                video_facade.prepare_playback(f.name)
-
-        edid_path = os.path.join(
-                self.bindir, 'test_data/edids/HDMI_DELL_U2410.txt')
-
-        video_port.plug()
-        with video_port.use_edid_file(edid_path):
-            audio_facade.set_chrome_active_node_type('HDMI', None)
-            audio_facade.set_chrome_active_volume(100)
-            audio_test_utils.check_audio_nodes(
-                    audio_facade, (['HDMI'], None))
-            display_facade.set_mirrored(True)
-            video_port.start_monitoring_audio_video_capturing_delay()
-
-            time.sleep(self.DELAY_BEFORE_CAPTURING)
-            video_port.start_capturing_video((64, 64, 16, 16))
-            audio_port.start_capturing_audio()
-
-            time.sleep(self.DELAY_BEFORE_PLAYBACK)
-            if arc:
-                video_facade.start_arc_playback(blocking_secs=20)
-            else:
-                video_facade.start_playback(blocking=True)
-            time.sleep(self.DELAY_AFTER_PLAYBACK)
-
-            remote_path, _ = audio_port.stop_capturing_audio()
-            video_port.stop_capturing_video()
-            start_delay = video_port.get_audio_video_capturing_delay()
-
-        local_path = os.path.join(self.resultsdir, 'recorded.raw')
-        chameleon_board.host.get_file(remote_path, local_path)
-
-        audio_data = open(local_path).read()
-        video_data = video_port.get_captured_checksums()
-
-        logging.info("audio capture %d bytes, %f seconds", len(audio_data),
-                     len(audio_data) / float(self.AUDIO_CAPTURE_RATE) / 32)
-        logging.info("video capture %d frames, %f seconds", len(video_data),
-                     len(video_data) / float(self.VIDEO_CAPTURE_RATE))
-
-        key_audio = self.compute_audio_keypoint(audio_data)
-        key_video = self.compute_video_keypoint(video_data)
-        # Use the capturing delay to align A/V
-        key_video = map(lambda x: x + start_delay, key_video)
-
-        dropped_frame_count = None
-        if not arc:
-            video_facade.dropped_frame_count()
-
-        prefix = ''
-        if arc:
-            prefix = 'arc_'
-        elif video_hardware_acceleration:
-            prefix = 'hw_'
-        else:
-            prefix = 'sw_'
-
-        self.log_result(prefix, key_audio, key_video, dropped_frame_count)
-
-
-    def run_client_side_test(self):
-        """Runs a client side test on Cros device in background."""
-        self.client_at = autotest.Autotest(self.host)
-        logging.info('Start running client side test %s',
-                     arc_resource_common.PlayVideoProps.TEST_NAME)
-        self.client_at.run_test(
-                arc_resource_common.PlayVideoProps.TEST_NAME,
-                background=True)
-
-
-    def setup_video_app(self):
-        """Setups Play Video app on Cros device.
-
-        Runs a client side test on Cros device to start Chrome and ARC and
-        install Play Video app.
-        Wait for it to be ready.
-
-        """
-        # Removes ready tag that server side test should wait for later.
-        self.remove_ready_tag()
-
-        # Runs the client side test.
-        self.run_client_side_test()
-
-        logging.info('Waiting for client side Play Video app to be ready')
-
-        # Waits for ready tag to be posted by client side test.
-        utils.poll_for_condition(condition=self.ready_tag_exists,
-                                 timeout=self.WAIT_CLIENT_READY_TIMEOUT_SECS,
-                                 desc='Wait for client side test being ready',
-                                 sleep_interval=1)
-
-        logging.info('Client side Play Video app is ready')
-
-
-    def cleanup(self):
-        """Cleanup of the test."""
-        self.touch_exit_tag()
-        super(audiovideo_AVSync, self).cleanup()
-
-
-    def remove_ready_tag(self):
-        """Removes ready tag on Cros device."""
-        if self.ready_tag_exists():
-            self.host.run(command='rm %s' % (
-                    arc_resource_common.PlayVideoProps.READY_TAG_FILE))
-
-
-    def touch_exit_tag(self):
-        """Touches exit tag on Cros device to stop client side test."""
-        self.host.run(command='touch %s' % (
-                arc_resource_common.PlayVideoProps.EXIT_TAG_FILE))
-
-
-    def ready_tag_exists(self):
-        """Checks if ready tag exists.
-
-        @returns: True if the tag file exists. False otherwise.
-
-        """
-        return self.host.path_exists(
-                arc_resource_common.PlayVideoProps.READY_TAG_FILE)
diff --git a/server/site_tests/audiovideo_AVSync/control.arc b/server/site_tests/audiovideo_AVSync/control.arc
deleted file mode 100644
index 3c8fb50..0000000
--- a/server/site_tests/audiovideo_AVSync/control.arc
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "chromeos-chameleon"
-NAME = 'audiovideo_AVSync.arc'
-PURPOSE = "Remotely controlled HDMI audio/video test."
-#ATTRIBUTES = "suite:chameleon_audio_unstable"
-TIME = 'SHORT'
-TEST_CATEGORY = 'Performance'
-TEST_CLASS = 'audiovideo'
-TEST_TYPE = 'server'
-DEPENDENCIES = 'chameleon:hdmi, arc'
-JOB_RETRIES = 2
-
-DOC = """
-This test measure the audio/video synchronization quality while playing a
-1080p 30fps MP4 video (video: mpeg4, audio: aac) on ARC.
-"""
-
-args_dict = utils.args_to_dict(args)
-chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
-
-VIDEO_URL = ('http://commondatastorage.googleapis.com/'
-             'chromiumos-test-assets-public/chameleon/'
-             'audiovideo_AVSync/1080p_30fps.mp4')
-
-def run(machine):
-    host = hosts.create_host(machine, chameleon_args=chameleon_args)
-    job.run_test("audiovideo_AVSync", host=host, video_url=VIDEO_URL, arc=True)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/audiovideo_AVSync/control.hw_video_acc b/server/site_tests/audiovideo_AVSync/control.hw_video_acc
deleted file mode 100644
index 205d975..0000000
--- a/server/site_tests/audiovideo_AVSync/control.hw_video_acc
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "chromeos-chameleon"
-NAME = 'audiovideo_AVSync.hw_video_acc'
-PURPOSE = "Remotely controlled HDMI audio/video test."
-#ATTRIBUTES = "suite:chameleon_audio_unstable"
-TIME = 'SHORT'
-TEST_CATEGORY = 'Performance'
-TEST_CLASS = 'audiovideo'
-TEST_TYPE = 'server'
-DEPENDENCIES = 'chameleon:hdmi'
-JOB_RETRIES = 2
-
-DOC = """
-This test measure the audio/video synchronization quality while playing a
-1080p 60fps MP4 video (video: h264, audio: aac).
-"""
-
-args_dict = utils.args_to_dict(args)
-chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, chameleon_args=chameleon_args)
-    job.run_test("audiovideo_AVSync", host=host)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/audiovideo_AVSync/control.sw_video_acc b/server/site_tests/audiovideo_AVSync/control.sw_video_acc
deleted file mode 100644
index dfd7796..0000000
--- a/server/site_tests/audiovideo_AVSync/control.sw_video_acc
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "chromeos-chameleon"
-NAME = 'audiovideo_AVSync.sw_video_acc'
-PURPOSE = "Remotely controlled HDMI audio/video test."
-#ATTRIBUTES = "suite:chameleon_audio_unstable"
-TIME = 'SHORT'
-TEST_CATEGORY = 'Performance'
-TEST_CLASS = 'audiovideo'
-TEST_TYPE = 'server'
-DEPENDENCIES = 'chameleon:hdmi'
-JOB_RETRIES = 2
-
-DOC = """
-This test measure the audio/video synchronization quality while playing a
-1080p 60fps MP4 video (video: h264, audio: aac) without hardware acceleration.
-"""
-
-args_dict = utils.args_to_dict(args)
-chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, chameleon_args=chameleon_args)
-    job.run_test("audiovideo_AVSync", host=host,
-                 video_hardware_acceleration=False)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/audiovideo_AVSync/test_data/edids/HDMI_DELL_U2410.txt b/server/site_tests/audiovideo_AVSync/test_data/edids/HDMI_DELL_U2410.txt
deleted file mode 100644
index ca636f5..0000000
--- a/server/site_tests/audiovideo_AVSync/test_data/edids/HDMI_DELL_U2410.txt
+++ /dev/null
@@ -1,16 +0,0 @@
-00ffffffffffff0010ac16f04c4e3830
-1114010380342078ea1ec5ae4f34b126
-0e5054a54b008180a940d100714f0101
-010101010101283c80a070b023403020
-360006442100001a000000ff004a3235
-374d30344d30384e4c0a000000fc0044
-454c4c2055323431300a2020000000fd
-00384c1e5111000a2020202020200161
-020329f15090050403020716011f1213
-14201511062309070767030c00100038
-2d83010000e3050301023a801871382d
-40582c450006442100001e011d801871
-1c1620582c250006442100009e011d00
-7251d01e206e28550006442100001e8c
-0ad08a20e02d10103e96000644210000
-1800000000000000000000000000003e
diff --git a/server/site_tests/audiovideo_AVSyncInternalDisplayAudioJack/audiovideo_AVSyncInternalDisplayAudioJack.py b/server/site_tests/audiovideo_AVSyncInternalDisplayAudioJack/audiovideo_AVSyncInternalDisplayAudioJack.py
deleted file mode 100644
index 41ce01a..0000000
--- a/server/site_tests/audiovideo_AVSyncInternalDisplayAudioJack/audiovideo_AVSyncInternalDisplayAudioJack.py
+++ /dev/null
@@ -1,105 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import tempfile
-
-from autotest_lib.client.common_lib import file_utils
-from autotest_lib.client.cros.chameleon import avsync_probe_utils
-from autotest_lib.server import test
-from autotest_lib.server.cros.multimedia import remote_facade_factory
-
-
-class audiovideo_AVSyncInternalDisplayAudioJack(test.test):
-    """Server side audio/video sync quality measurement.
-
-    This test measure the audio/video sync between internal display and audio
-    jack.
-
-    """
-    version = 1
-
-    def run_once(self, host, video_url, capture_seconds, video_fps,
-                 sound_interval_frames, perf_prefix):
-        """Running audio/video synchronization quality measurement
-
-        @param host: A host object representing the DUT.
-        @param video_url: The ULR of the test video.
-        @param capture_seconds: How long do we capture the data from
-                avsync_probe device.
-        @param video_fps: Video frames per second of the video. We need the
-                data to detect corrupted video frame.
-        @param sound_interval_frames: The period of sound (beep) in the number
-                of video frames.
-        @param perf_prefix: The prefix name of perf graph.
-
-        """
-        factory = remote_facade_factory.RemoteFacadeFactory(
-                host, results_dir=self.resultsdir, no_chrome=True)
-
-        chameleon_board = host.chameleon
-        audio_facade = factory.create_audio_facade()
-        browser_facade = factory.create_browser_facade()
-        video_facade = factory.create_video_facade()
-        avsync_probe = chameleon_board.get_avsync_probe()
-        chameleon_board.setup_and_reset(self.outputdir)
-
-        browser_facade.start_default_chrome()
-
-        _, ext = os.path.splitext(video_url)
-        with tempfile.NamedTemporaryFile(prefix='playback_', suffix=ext) as f:
-            # The default permission is 0o600.
-            os.chmod(f.name, 0o644)
-
-            file_utils.download_file(video_url, f.name)
-            video_facade.prepare_playback(f.name)
-
-        audio_facade.set_chrome_active_volume(100)
-        video_facade.start_playback()
-        capture_data = avsync_probe.Capture(capture_seconds)
-        parser = avsync_probe_utils.AVSyncProbeDataParser(
-                self.resultsdir, capture_data, video_fps, sound_interval_frames)
-
-        logging.info('Video frame stats:')
-        logging.info('average: %f', parser.video_duration_average)
-        logging.info('standard deviation: %f', parser.video_duration_std)
-        logging.info('Sync stats:')
-        logging.info('average: %f', parser.sync_duration_average)
-        logging.info('standard deviation: %f', parser.sync_duration_std)
-        logging.info('Number of total frames: %d',
-                     parser.cumulative_frame_count)
-        logging.info('Number of corrupted frames: %d',
-                     parser.corrupted_frame_count)
-        logging.info('Number of dropoped frames: %d',
-                     parser.dropped_frame_count)
-        logging.info('Number of dropoped frames by player: %d',
-                     video_facade.dropped_frame_count())
-
-        video_graph_name = '%s_video' % perf_prefix
-        audiovideo_graph_name = '%s_audiovideo' % perf_prefix
-        self.output_perf_value(description='Video frame duration average',
-                               value=parser.video_duration_average, units='ms',
-                               graph=video_graph_name)
-        self.output_perf_value(description='Video frame duration std',
-                               value=parser.video_duration_std,
-                               graph=video_graph_name)
-        self.output_perf_value(description='Corrupted video frames',
-                               value=parser.corrupted_frame_count,
-                               higher_is_better=False, graph=video_graph_name)
-        self.output_perf_value(description='Dropped video frames',
-                               value=parser.dropped_frame_count,
-                               higher_is_better=False, graph=video_graph_name)
-        self.output_perf_value(description='Dropped video frames by player',
-                               value=video_facade.dropped_frame_count(),
-                               higher_is_better=False, graph=video_graph_name)
-
-        self.output_perf_value(description='Audio/Video Sync duration average',
-                               value=parser.sync_duration_average, units='ms',
-                               higher_is_better=False,
-                               graph=audiovideo_graph_name)
-        self.output_perf_value(description='Audio/Video Sync std',
-                               value=parser.sync_duration_std,
-                               higher_is_better=False,
-                               graph=audiovideo_graph_name)
diff --git a/server/site_tests/audiovideo_AVSyncInternalDisplayAudioJack/control b/server/site_tests/audiovideo_AVSyncInternalDisplayAudioJack/control
deleted file mode 100644
index 4b27df9..0000000
--- a/server/site_tests/audiovideo_AVSyncInternalDisplayAudioJack/control
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = 'chromeos-chameleon'
-NAME = 'audiovideo_AVSyncInternalDisplayAudioJack'
-PURPOSE = 'Measure audio/video sync from internal display and audio jack.'
-ATTRIBUTES = "suite:chameleon_audiovideo"
-TIME = 'SHORT'
-TEST_CATEGORY = 'Performance'
-TEST_CLASS = 'audiovideo'
-TEST_TYPE = 'server'
-DEPENDENCIES = 'chameleon:avsync_probe'
-JOB_RETRIES = 2
-
-DOC = """
-This test measure the audio/video synchronization quality while playing a
-1080p 30fps MP4 video.
-"""
-
-VIDEO_URL = ('http://commondatastorage.googleapis.com/'
-        'chromiumos-test-assets-public/chameleon/'
-        'audiovideo_AVSyncInternalDisplayAudioJack/'
-        'testvideo.mp4')
-
-args_dict = utils.args_to_dict(args)
-chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, chameleon_args=chameleon_args)
-    job.run_test("audiovideo_AVSyncInternalDisplayAudioJack", host=host,
-                 video_url=VIDEO_URL, capture_seconds=12, video_fps=30,
-                 sound_interval_frames=30, perf_prefix='1080_30fps')
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_Basic/autoupdate_Basic.py b/server/site_tests/autoupdate_Basic/autoupdate_Basic.py
index e256bbd..4bb4bcc 100644
--- a/server/site_tests/autoupdate_Basic/autoupdate_Basic.py
+++ b/server/site_tests/autoupdate_Basic/autoupdate_Basic.py
@@ -1,26 +1,97 @@
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-from autotest_lib.client.common_lib.cros import kernel_utils
-from autotest_lib.server.cros.update_engine import update_engine_test
+import logging
 
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.client.common_lib.cros import kernel_utils
+from autotest_lib.client.cros import cryptohome
+from autotest_lib.server.cros import provisioner
+from autotest_lib.server.cros.update_engine import update_engine_test
 
 class autoupdate_Basic(update_engine_test.UpdateEngineTest):
     """Performs a simple AU using Nebraska."""
     version = 1
 
-    def run_once(self, full_payload, job_repo_url=None):
+    def cleanup(self):
+        super(autoupdate_Basic, self).cleanup()
+
+
+    def run_once(self,
+                 full_payload,
+                 job_repo_url=None,
+                 build=None,
+                 m2n=False,
+                 running_at_desk=False,
+                 pin_login=False):
         """
         Performs a N-to-N autoupdate with Nebraska.
 
+        @param full_payload: True for full payload, False for delta
         @param job_repo_url: A url pointing to the devserver where the autotest
             package for this build should be staged.
+        @param build: An optional parameter to specify the target build for the
+                      update when running locally. job_repo_url will override
+                      this value.
+        @m2n: M -> N update. This means we install the current stable version
+              of this board before updating to ToT.
+        @param running_at_desk: Indicates test is run locally from workstation.
+                                Flag does not work with M2N tests.
+        @param pin_login: True to use login via PIN.
 
         """
+        if pin_login:
+            if not cryptohome.is_low_entropy_credentials_supported(self._host):
+                raise error.TestNAError(
+                        'Skip test: No hardware support for PIN login')
+
         # Get a payload to use for the test.
-        payload_url = self.get_payload_for_nebraska(job_repo_url,
-                                                    full_payload=full_payload)
+        payload_url = self.get_payload_for_nebraska(
+                job_repo_url=job_repo_url,
+                build=build,
+                full_payload=full_payload,
+                public_bucket=running_at_desk)
+
+        self._m2n = m2n
+        if self._m2n:
+            if self._host.get_board().endswith("-kernelnext"):
+                raise error.TestNAError("Skipping test on kernelnext board")
+
+            # Provision latest stable build for the current build.
+            build_name = self._get_latest_serving_stable_build()
+            logging.debug('build name is %s', build_name)
+
+            # Install the matching build with quick provision.
+            if running_at_desk:
+                self._copy_quick_provision_to_dut()
+                update_url = self._get_provision_url_on_public_bucket(
+                        build_name)
+            else:
+                autotest_devserver = dev_server.ImageServer.resolve(
+                        build_name, self._host.hostname)
+                update_url = autotest_devserver.get_update_url(build_name)
+
+            logging.info('Installing source image with update url: %s',
+                         update_url)
+            provisioner.ChromiumOSProvisioner(
+                    update_url,
+                    host=self._host,
+                    is_release_bucket=True,
+                    public_bucket=running_at_desk).run_provision()
+
+        # Login to device before update
+        if pin_login:
+            self._run_client_test_and_check_result(self._LOGIN_TEST_PIN,
+                                                   tag='before')
+        else:
+            self._run_client_test_and_check_result(
+                    self._LOGIN_TEST,
+                    username=self._LOGIN_TEST_USERNAME,
+                    password=self._LOGIN_TEST_PASSWORD,
+                    tag='before')
 
         # Record DUT state before the update.
         active, inactive = kernel_utils.get_kernel_state(self._host)
@@ -34,3 +105,20 @@
         kernel_utils.verify_boot_expectations(inactive, host=self._host)
         rootfs_hostlog, _ = self._create_hostlog_files()
         self.verify_update_events(self._FORCED_UPDATE, rootfs_hostlog)
+
+        if self._m2n:
+            # Bring stateful version to the same version as rootfs.
+            logging.info('Restoring stateful partition to ToT version')
+            self._update_stateful()
+        # Check we can login with the same user after update.
+        if pin_login:
+            self._run_client_test_and_check_result(self._LOGIN_TEST_PIN,
+                                                   tag='after',
+                                                   setup_pin=False)
+        else:
+            self._run_client_test_and_check_result(
+                    self._LOGIN_TEST,
+                    tag='after',
+                    username=self._LOGIN_TEST_USERNAME,
+                    password=self._LOGIN_TEST_PASSWORD,
+                    dont_override_profile=True)
diff --git a/server/site_tests/autoupdate_Basic/control.delta b/server/site_tests/autoupdate_Basic/control.delta
index 59bd005..9e0d043 100644
--- a/server/site_tests/autoupdate_Basic/control.delta
+++ b/server/site_tests/autoupdate_Basic/control.delta
@@ -10,20 +10,26 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:au-perbuild"
+PY_VERSION = 3
 DOC = """
 This tests an N-to-N update. That means it will update to the same version
 that the DUT was provisioned with. This test will be used in the CQ to ensure
 changes don't break autoupdate.
 
-We supply a job_repo_url to the test when running locally. In the lab this will
-be passed directly. The job_repo_url is a link to the autotest packages on a
-devserver. The test uses it to find the correct payload to use.
+In the lab, a job_repo_url will be passed directly to the test. It contains
+information about the build to use and the IP address of lab cache server to
+download update payloads from. Local runs can use cache servers as well but
+the setup is fairly complicated.
 
-To get a list of available devservers to use execute this command:
-atest server list | grep devserver
+Instead you can use the `running_at_desk` arg when running tests locally to
+avoid the extra setup needed to access cache servers from your workstation.
+`running_at_desk` will copy payloads from gs://chromeos-image-archive/ to a
+public bucket that is accessible without additional configuration.
 
 Example usage:
-test_that autoupdate_Basic <DUT> --board=<board> --args="job_repo_url=http://<devserver IP>:8082/static/<board>-release/RXX-XXXXX.X.X/autotest/packages"
+
+The current version on the DUT will be used for the update:
+test_that <DUT> autoupdate_Basic.delta --board=<board> --args="running_at_desk=True"
 """
 
 from autotest_lib.client.common_lib import utils
diff --git a/server/site_tests/autoupdate_Basic/control.full b/server/site_tests/autoupdate_Basic/control.full
index 145d231..791ff14 100644
--- a/server/site_tests/autoupdate_Basic/control.full
+++ b/server/site_tests/autoupdate_Basic/control.full
@@ -9,22 +9,32 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
-ATTRIBUTES = "suite:bvt-inline, suite:infra_qual"
+ATTRIBUTES = "suite:bvt-inline, suite:infra_qual, suite:satlab-qual-bvt-inline"
 JOB_RETRIES = 2
+PY_VERSION = 3
 DOC = """
 This tests an N-to-N update. That means it will update to the same version
 that the DUT was provisioned with. This test will be used in the CQ to ensure
 changes don't break autoupdate.
 
-We supply a job_repo_url to the test when running locally. In the lab this will
-be passed directly. The job_repo_url is a link to the autotest packages on a
-devserver. The test uses it to find the correct payload to use.
+In the lab, a job_repo_url will be passed directly to the test. It contains
+information about the build to use and the IP address of lab cache server to
+download update payloads from. Local runs can use cache servers as well but
+the setup is fairly complicated.
 
-To get a list of available devservers to use execute this command:
-atest server list | grep devserver
+Instead you can use the `running_at_desk` arg when running tests locally to
+avoid the extra setup needed to access cache servers from your workstation.
+`running_at_desk` will copy payloads from gs://chromeos-image-archive/ to a
+public bucket that is accessible without additional configuration.
 
 Example usage:
-test_that autoupdate_Basic <DUT> --board=<board> --args="job_repo_url=http://<devserver IP>:8082/static/<board>-release/RXX-XXXXX.X.X/autotest/packages"
+
+You can specify the build to AU to for full updates. It should be greater
+than or equal to the version currently on the DUT:
+test_that <DUT> autoupdate_Basic.full --board=<board> --args="running_at_desk=True build=R102-14643.0.0"
+
+If no build is specified, the current version on the DUT will be used:
+test_that <DUT> autoupdate_Basic.full --board=<board> --args="running_at_desk=True"
 """
 
 from autotest_lib.client.common_lib import utils
diff --git a/server/site_tests/autoupdate_Basic/control.full.pin b/server/site_tests/autoupdate_Basic/control.full.pin
new file mode 100644
index 0000000..9a2a2d1
--- /dev/null
+++ b/server/site_tests/autoupdate_Basic/control.full.pin
@@ -0,0 +1,47 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "dhaddock, Chromium OS"
+NAME = "autoupdate_Basic.full.pin"
+TIME = "MEDIUM"
+PURPOSE = "Tests an N-to-N update with Nebraska with a PIN login."
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+ATTRIBUTES = "suite:au-perbuild"
+PY_VERSION = 3
+DOC = """
+This tests an N-to-N update. That means it will update to the same version
+that the DUT was provisioned with. This test will be used in the CQ to ensure
+changes don't break autoupdate.
+
+In the lab, a job_repo_url will be passed directly to the test. It contains
+information about the build to use and the IP address of lab cache server to
+download update payloads from. Local runs can use cache servers as well but
+the setup is fairly complicated.
+
+Instead you can use the `running_at_desk` arg when running tests locally to
+avoid the extra setup needed to access cache servers from your workstation.
+`running_at_desk` will copy payloads from gs://chromeos-image-archive/ to a
+public bucket that is accessible without additional configuration.
+
+Example usage:
+
+You can specify the build to AU to for full updates. It should be greater
+than or equal to the version currently on the DUT:
+test_that <DUT> autoupdate_Basic.full.pin --board=<board> --args="running_at_desk=True build=R102-14643.0.0"
+
+If no build is specified, the current version on the DUT will be used:
+test_that <DUT> autoupdate_Basic.full.pin --board=<board> --args="running_at_desk=True"
+"""
+
+from autotest_lib.client.common_lib import utils
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('autoupdate_Basic', host=host, full_payload=True,
+    		 pin_login=True, **args_dict)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_Basic/control.m2n.full b/server/site_tests/autoupdate_Basic/control.m2n.full
new file mode 100644
index 0000000..e622988
--- /dev/null
+++ b/server/site_tests/autoupdate_Basic/control.m2n.full
@@ -0,0 +1,44 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "dhaddock, Chromium OS"
+NAME = "autoupdate_Basic.m2n.full"
+TIME = "MEDIUM"
+PURPOSE = "Tests an M-to-N update with Nebraska."
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+ATTRIBUTES = "suite:bvt-inline"
+JOB_RETRIES = 2
+PY_VERSION = 3
+DOC = """
+This tests an M-to-N update. That means it will update from the current stable
+version for this board to ToT. This test will be used in the CQ to ensure
+changes don't break autoupdate.
+
+In the lab, a job_repo_url will be passed directly to the test. It contains
+information about the build to use and the IP address of lab cache server to
+download update payloads from. Local runs can use cache servers as well but
+the setup is fairly complicated. Instead, local test runs can specify the
+`running_at_desk` and `build` args to run without requiring special setup.
+
+Example usage:
+
+With lab cache server SSH access and a job_repo_url:
+test_that <DUT> autoupdate_Basic.m2n.full --args="job_repo_url='http://<IP>:<port>/static/<board>-release/RXX-XXXX.X.X/autotest/packages'"
+
+Otherwise, the running_at_desk and build args are required:
+test_that <DUT> autoupdate_Basic.m2n.full --board=<board> --args="running_at_desk=True build=R102-14643.0.0"
+
+"""
+
+from autotest_lib.client.common_lib import utils
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('autoupdate_Basic', host=host, full_payload=True,
+    		 m2n=True, **args_dict)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_Basic/control.m2n.full.pin b/server/site_tests/autoupdate_Basic/control.m2n.full.pin
new file mode 100644
index 0000000..5f1a795
--- /dev/null
+++ b/server/site_tests/autoupdate_Basic/control.m2n.full.pin
@@ -0,0 +1,44 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "dhaddock, Chromium OS"
+NAME = "autoupdate_Basic.m2n.full.pin"
+TIME = "MEDIUM"
+PURPOSE = "Tests an M-to-N update with Nebraska using PIN login."
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+PY_VERSION = 3
+ATTRIBUTES = "suite:au-m2n"
+DOC = """
+This tests an M-to-N update. That means it will update from the current stable
+version for this board to ToT. This test will be used in the CQ to ensure
+changes don't break autoupdate.
+
+In the lab, a job_repo_url will be passed directly to the test. It contains
+information about the build to use and the IP address of lab cache server to
+download update payloads from. Local runs can use cache servers as well but
+the setup is fairly complicated. Instead, local test runs can specify the
+`running_at_desk` and `build` args to run without requiring special setup.
+
+Example usage:
+
+With lab cache server SSH access and a job_repo_url:
+test_that <DUT> autoupdate_Basic.m2n.full.pin --args="job_repo_url='http://<IP>:<port>/static/<board>-release/RXX-XXXX.X.X/autotest/packages'"
+
+Otherwise, the running_at_desk and build args are required:
+test_that <DUT> autoupdate_Basic.m2n.full.pin --board=<board> --args="running_at_desk=True build=R102-14643.0.0"
+
+"""
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib import utils
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('autoupdate_Basic', host=host, full_payload=True,
+    		 m2n=True, pin_login=True, **args_dict)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_CatchBadSignatures/autoupdate_CatchBadSignatures.py b/server/site_tests/autoupdate_CatchBadSignatures/autoupdate_CatchBadSignatures.py
index c9ce95e..78b6abc 100755
--- a/server/site_tests/autoupdate_CatchBadSignatures/autoupdate_CatchBadSignatures.py
+++ b/server/site_tests/autoupdate_CatchBadSignatures/autoupdate_CatchBadSignatures.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/autoupdate_CatchBadSignatures/control b/server/site_tests/autoupdate_CatchBadSignatures/control
index 06779f2..f8c7599 100644
--- a/server/site_tests/autoupdate_CatchBadSignatures/control
+++ b/server/site_tests/autoupdate_CatchBadSignatures/control
@@ -10,8 +10,9 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
-ATTRIBUTES = "suite:bvt-inline"
+ATTRIBUTES = "suite:bvt-inline, suite:pvs-bvt-inline, suite:satlab-qual-bvt-inline"
 JOB_RETRIES = 2
+PY_VERSION = 3
 
 DOC = """
 This is a test to verify that update_engine correctly checks signatures in the
@@ -34,7 +35,7 @@
      $ ./build_packages --board=${BOARD}
      $ ./build_image --board=${BOARD} --noenable_rootfs_verification test
 
-     Alternatively, you can use any kind of Chrome OS image you already have or
+     Alternatively, you can use any kind of ChromeOS image you already have or
      downloaded.
 
   2. Reduce the size of Rootfs and Kernel partitions. This is done so these
@@ -52,7 +53,7 @@
      $ sudo mount /dev/loop1p3 rootfs
      $ sudo mount /dev/loop1p4 kernel
 
-     Now you need a lsb-release file copied from any Chrome OS.
+     Now you need a lsb-release file copied from any ChromeOS.
 
      $ mkdir rootfs/etc && touch cp <lsb-release> rootfs/etc
      $ touch kernel/fake-kernel.bin # Optional
diff --git a/server/site_tests/autoupdate_Cellular/autoupdate_Cellular.py b/server/site_tests/autoupdate_Cellular/autoupdate_Cellular.py
index ef6f38d..870195d 100644
--- a/server/site_tests/autoupdate_Cellular/autoupdate_Cellular.py
+++ b/server/site_tests/autoupdate_Cellular/autoupdate_Cellular.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/autoupdate_Cellular/control.delta b/server/site_tests/autoupdate_Cellular/control.delta
deleted file mode 100644
index 7864b69..0000000
--- a/server/site_tests/autoupdate_Cellular/control.delta
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "dhaddock, Chromium OS"
-NAME = "autoupdate_Cellular.delta"
-PURPOSE = "Test autoupdate over cellular with a delta payload."
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-# Disable until test can be fixed: http://b/163367994
-# ATTRIBUTES = "suite:cellular_au_nightly"
-DOC = """
-This tests autoupdate over a cellular connection.
-
-To run locally you need to pass in a job_repo_url arg. This is used to tell
-the test what build it is running. In the lab this is setup when the DUT is
-provisioned.
-
-test_that <hostname>.cros autoupdate_Cellular.delta --args="job_repo_url='http://<IP>:<port>/static/<board>-release/RXX-XXXX.X.X/autotest/packages'"
-
-"""
-
-from autotest_lib.client.common_lib import utils
-
-args_dict = utils.args_to_dict(args)
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('autoupdate_Cellular', host=host, full_payload=False,
-                 **args_dict)
-
-job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_Cellular/control.delta.att b/server/site_tests/autoupdate_Cellular/control.delta.att
new file mode 100644
index 0000000..2afe479
--- /dev/null
+++ b/server/site_tests/autoupdate_Cellular/control.delta.att
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "dhaddock, Chromium OS"
+NAME = "autoupdate_Cellular.delta.att"
+PURPOSE = "Test autoupdate over cellular with a delta payload."
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+ATTRIBUTES = "suite:cellular_au_nightly"
+DEPENDENCIES = "carrier:att"
+PY_VERSION = 3
+DOC = """
+This tests autoupdate over a cellular connection.
+
+To run locally you need to pass in a job_repo_url arg. This is used to tell
+the test what build it is running. In the lab this is setup when the DUT is
+provisioned.
+
+test_that <hostname>.cros autoupdate_Cellular.delta --args="job_repo_url='http://<IP>:<port>/static/<board>-release/RXX-XXXX.X.X/autotest/packages'"
+
+"""
+
+from autotest_lib.client.common_lib import utils
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('autoupdate_Cellular', host=host, full_payload=False,
+                 **args_dict)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_Cellular/control.delta.sprint b/server/site_tests/autoupdate_Cellular/control.delta.sprint
new file mode 100644
index 0000000..7afd4a3
--- /dev/null
+++ b/server/site_tests/autoupdate_Cellular/control.delta.sprint
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "dhaddock, Chromium OS"
+NAME = "autoupdate_Cellular.delta.sprint"
+PURPOSE = "Test autoupdate over cellular with a delta payload."
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+ATTRIBUTES = "suite:cellular_au_nightly"
+DEPENDENCIES = "carrier:sprint"
+PY_VERSION = 3
+DOC = """
+This tests autoupdate over a cellular connection.
+
+To run locally you need to pass in a job_repo_url arg. This is used to tell
+the test what build it is running. In the lab this is setup when the DUT is
+provisioned.
+
+test_that <hostname>.cros autoupdate_Cellular.delta --args="job_repo_url='http://<IP>:<port>/static/<board>-release/RXX-XXXX.X.X/autotest/packages'"
+
+"""
+
+from autotest_lib.client.common_lib import utils
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('autoupdate_Cellular', host=host, full_payload=False,
+                 **args_dict)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_Cellular/control.delta.tmobile b/server/site_tests/autoupdate_Cellular/control.delta.tmobile
new file mode 100644
index 0000000..c71f4eb
--- /dev/null
+++ b/server/site_tests/autoupdate_Cellular/control.delta.tmobile
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "dhaddock, Chromium OS"
+NAME = "autoupdate_Cellular.delta.tmobile"
+PURPOSE = "Test autoupdate over cellular with a delta payload."
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+ATTRIBUTES = "suite:cellular_au_nightly"
+DEPENDENCIES = "carrier:tmobile"
+PY_VERSION = 3
+DOC = """
+This tests autoupdate over a cellular connection.
+
+To run locally you need to pass in a job_repo_url arg. This is used to tell
+the test what build it is running. In the lab this is setup when the DUT is
+provisioned.
+
+test_that <hostname>.cros autoupdate_Cellular.delta --args="job_repo_url='http://<IP>:<port>/static/<board>-release/RXX-XXXX.X.X/autotest/packages'"
+
+"""
+
+from autotest_lib.client.common_lib import utils
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('autoupdate_Cellular', host=host, full_payload=False,
+                 **args_dict)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_Cellular/control.delta.verizon b/server/site_tests/autoupdate_Cellular/control.delta.verizon
new file mode 100644
index 0000000..3d52243
--- /dev/null
+++ b/server/site_tests/autoupdate_Cellular/control.delta.verizon
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "dhaddock, Chromium OS"
+NAME = "autoupdate_Cellular.delta.verizon"
+PURPOSE = "Test autoupdate over cellular with a delta payload."
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+ATTRIBUTES = "suite:cellular_au_nightly"
+DEPENDENCIES = "carrier:verizon"
+PY_VERSION = 3
+DOC = """
+This tests autoupdate over a cellular connection.
+
+To run locally you need to pass in a job_repo_url arg. This is used to tell
+the test what build it is running. In the lab this is setup when the DUT is
+provisioned.
+
+test_that <hostname>.cros autoupdate_Cellular.delta --args="job_repo_url='http://<IP>:<port>/static/<board>-release/RXX-XXXX.X.X/autotest/packages'"
+
+"""
+
+from autotest_lib.client.common_lib import utils
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('autoupdate_Cellular', host=host, full_payload=False,
+                 **args_dict)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_Cellular/control.full b/server/site_tests/autoupdate_Cellular/control.full
deleted file mode 100644
index 0bdf575..0000000
--- a/server/site_tests/autoupdate_Cellular/control.full
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "dhaddock, Chromium OS"
-NAME = "autoupdate_Cellular.full"
-PURPOSE = "Test autoupdate over cellular."
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-# Disable this test until it can be fixed: http://b/163367994
-# ATTRIBUTES = "suite:cellular_au"
-DOC = """
-This tests autoupdate over a cellular connection.
-
-To run locally you need to pass in a job_repo_url arg. This is used to tell
-the test what build it is running. In the lab this is setup when the DUT is
-provisioned.
-
-test_that <hostname>.cros autoupdate_Cellular --args="job_repo_url='http://<IP>:<port>/static/<board>-release/RXX-XXXX.X.X/autotest/packages'"
-
-"""
-
-from autotest_lib.client.common_lib import utils
-
-args_dict = utils.args_to_dict(args)
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('autoupdate_Cellular', host=host, **args_dict)
-
-job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_Cellular/control.full.att b/server/site_tests/autoupdate_Cellular/control.full.att
new file mode 100644
index 0000000..4a9c4da
--- /dev/null
+++ b/server/site_tests/autoupdate_Cellular/control.full.att
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "dhaddock, Chromium OS"
+NAME = "autoupdate_Cellular.full.att"
+PURPOSE = "Test autoupdate over cellular."
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+ATTRIBUTES = "suite:cellular_au"
+DEPENDENCIES = "carrier:att"
+PY_VERSION = 3
+DOC = """
+This tests autoupdate over a cellular connection.
+
+To run locally you need to pass in a job_repo_url arg. This is used to tell
+the test what build it is running. In the lab this is setup when the DUT is
+provisioned.
+
+test_that <hostname>.cros autoupdate_Cellular --args="job_repo_url='http://<IP>:<port>/static/<board>-release/RXX-XXXX.X.X/autotest/packages'"
+
+"""
+
+from autotest_lib.client.common_lib import utils
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('autoupdate_Cellular', host=host, **args_dict)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_Cellular/control.full.sprint b/server/site_tests/autoupdate_Cellular/control.full.sprint
new file mode 100644
index 0000000..3d02df2
--- /dev/null
+++ b/server/site_tests/autoupdate_Cellular/control.full.sprint
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "dhaddock, Chromium OS"
+NAME = "autoupdate_Cellular.full.sprint"
+PURPOSE = "Test autoupdate over cellular."
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+ATTRIBUTES = "suite:cellular_au"
+DEPENDENCIES = "carrier:sprint"
+PY_VERSION = 3
+DOC = """
+This tests autoupdate over a cellular connection.
+
+To run locally you need to pass in a job_repo_url arg. This is used to tell
+the test what build it is running. In the lab this is setup when the DUT is
+provisioned.
+
+test_that <hostname>.cros autoupdate_Cellular --args="job_repo_url='http://<IP>:<port>/static/<board>-release/RXX-XXXX.X.X/autotest/packages'"
+
+"""
+
+from autotest_lib.client.common_lib import utils
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('autoupdate_Cellular', host=host, **args_dict)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_Cellular/control.full.tmobile b/server/site_tests/autoupdate_Cellular/control.full.tmobile
new file mode 100644
index 0000000..deb627a
--- /dev/null
+++ b/server/site_tests/autoupdate_Cellular/control.full.tmobile
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "dhaddock, Chromium OS"
+NAME = "autoupdate_Cellular.full.tmobile"
+PURPOSE = "Test autoupdate over cellular."
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+ATTRIBUTES = "suite:cellular_au"
+DEPENDENCIES = "carrier:tmobile"
+PY_VERSION = 3
+DOC = """
+This tests autoupdate over a cellular connection.
+
+To run locally you need to pass in a job_repo_url arg. This is used to tell
+the test what build it is running. In the lab this is setup when the DUT is
+provisioned.
+
+test_that <hostname>.cros autoupdate_Cellular --args="job_repo_url='http://<IP>:<port>/static/<board>-release/RXX-XXXX.X.X/autotest/packages'"
+
+"""
+
+from autotest_lib.client.common_lib import utils
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('autoupdate_Cellular', host=host, **args_dict)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_Cellular/control.full.verizon b/server/site_tests/autoupdate_Cellular/control.full.verizon
new file mode 100644
index 0000000..1d6a44c
--- /dev/null
+++ b/server/site_tests/autoupdate_Cellular/control.full.verizon
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "dhaddock, Chromium OS"
+NAME = "autoupdate_Cellular.full.verizon"
+PURPOSE = "Test autoupdate over cellular."
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+ATTRIBUTES = "suite:cellular_au"
+DEPENDENCIES = "carrier:verizon"
+PY_VERSION = 3
+DOC = """
+This tests autoupdate over a cellular connection.
+
+To run locally you need to pass in a job_repo_url arg. This is used to tell
+the test what build it is running. In the lab this is setup when the DUT is
+provisioned.
+
+test_that <hostname>.cros autoupdate_Cellular --args="job_repo_url='http://<IP>:<port>/static/<board>-release/RXX-XXXX.X.X/autotest/packages'"
+
+"""
+
+from autotest_lib.client.common_lib import utils
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('autoupdate_Cellular', host=host, **args_dict)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_ConsecutiveUpdatesBeforeReboot/autoupdate_ConsecutiveUpdatesBeforeReboot.py b/server/site_tests/autoupdate_ConsecutiveUpdatesBeforeReboot/autoupdate_ConsecutiveUpdatesBeforeReboot.py
new file mode 100644
index 0000000..b78f92f
--- /dev/null
+++ b/server/site_tests/autoupdate_ConsecutiveUpdatesBeforeReboot/autoupdate_ConsecutiveUpdatesBeforeReboot.py
@@ -0,0 +1,67 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib.cros import kernel_utils
+from autotest_lib.server.cros.update_engine import update_engine_test
+
+
+class autoupdate_ConsecutiveUpdatesBeforeReboot(
+        update_engine_test.UpdateEngineTest):
+    """Performs consecutive updates while waiting for reboot. """
+    version = 1
+
+    def cleanup(self):
+        """Clean up the test state."""
+        # Disable repeated updates using update_engine_client.
+        self._set_feature(feature_name=self._REPEATED_UPDATES_FEATURE,
+                          enable=False)
+
+    def run_once(self, job_repo_url=None, running_at_desk=False):
+        """
+        @param job_repo_url: A url pointing to the devserver where the autotest
+            package for this build should be staged.
+        @param running_at_desk: indicates test is run locally from a workstation.
+
+        """
+        # Enable repeated updates using update_engine_client.
+        self._set_feature(feature_name=self._REPEATED_UPDATES_FEATURE,
+                          enable=True)
+
+        # Get a payload to use for the test.
+        payload_url_full = self.get_payload_for_nebraska(
+                job_repo_url, full_payload=True, public_bucket=running_at_desk)
+
+        # Record DUT state before the update.
+        _, inactive = kernel_utils.get_kernel_state(self._host)
+
+        # Perform an update.
+        self._run_client_test_and_check_result(self._CLIENT_TEST,
+                                               payload_url=payload_url_full)
+
+        # Verify the first update finished successfully.
+        self._wait_for_update_to_complete()
+
+        payload_url_delta = self.get_payload_for_nebraska(
+                job_repo_url,
+                full_payload=False,
+                public_bucket=running_at_desk)
+
+        # Perform another update. This should also succeed because the delta
+        # and full payloads have different fingerprint values.
+        self._run_client_test_and_check_result(self._CLIENT_TEST,
+                                               payload_url=payload_url_delta)
+
+        self._wait_for_update_to_complete()
+        # Verify the both updates completed successfully by checking the logs
+        # for two successful updates.
+        self._check_update_engine_log_for_entry(
+                'Update successfully applied, waiting to reboot.',
+                raise_error=True,
+                min_count=2)
+
+        self._host.reboot()
+        kernel_utils.verify_boot_expectations(inactive, host=self._host)
+        rootfs_hostlog, _ = self._create_hostlog_files()
+        self.verify_update_events(self._FORCED_UPDATE, rootfs_hostlog)
diff --git a/server/site_tests/autoupdate_ConsecutiveUpdatesBeforeReboot/control.full b/server/site_tests/autoupdate_ConsecutiveUpdatesBeforeReboot/control.full
new file mode 100644
index 0000000..c20d56b
--- /dev/null
+++ b/server/site_tests/autoupdate_ConsecutiveUpdatesBeforeReboot/control.full
@@ -0,0 +1,36 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "vyshu@google.com, Chromium OS"
+NAME = "autoupdate_ConsecutiveUpdatesBeforeReboot.full"
+TIME = "MEDIUM"
+PURPOSE = "Tests repeated update with Nebraska."
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+ATTRIBUTES = "suite:au-perbuild"
+PY_VERSION = 3
+DOC = """
+This tests a repeated update by enabling the repeated updates feature.
+This ensures that Update Engine checks for and performs another update even
+when waiting for reboot. The update runs first with a full payload and then
+again with a delta payload to avoid duplicate fingerprint values.
+
+We supply a job_repo_url to the test when running locally. In the lab this will
+be passed directly. The job_repo_url is a link to the autotest packages on a
+devserver. The test uses it to find the correct payload to use.
+
+Example usage:
+test_that autoupdate_ConsecutiveUpdatesBeforeReboot.full <DUT> --board=<board> --args="job_repo_url='http://<devserver IP>:8082/static/<board>-release/RXX-XXXXX.X.X/autotest/packages', running_at_desk=True"
+"""
+
+from autotest_lib.client.common_lib import utils
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('autoupdate_ConsecutiveUpdatesBeforeReboot', host=host,
+                 **args_dict)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_DataPreserved/autoupdate_DataPreserved.py b/server/site_tests/autoupdate_DataPreserved/autoupdate_DataPreserved.py
index b179b6a..4d98a0f 100644
--- a/server/site_tests/autoupdate_DataPreserved/autoupdate_DataPreserved.py
+++ b/server/site_tests/autoupdate_DataPreserved/autoupdate_DataPreserved.py
@@ -1,7 +1,12 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+import logging
+
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.server.cros import provisioner
 from autotest_lib.server.cros.update_engine import update_engine_test
 
 
@@ -15,6 +20,7 @@
     def cleanup(self):
         self._save_extra_update_engine_logs(number_of_logs=2)
         super(autoupdate_DataPreserved, self).cleanup()
+        self._restore_stateful()
 
 
     def run_once(self, full_payload=True, job_repo_url=None):
@@ -29,6 +35,19 @@
                              when run in the lab.
 
         """
+        # Provision latest stable build for the current board.
+        build_name = self._get_latest_serving_stable_build()
+
+        # Install the matching build with quick provision.
+        autotest_devserver = dev_server.ImageServer.resolve(
+                build_name, self._host.hostname)
+        update_url = autotest_devserver.get_update_url(build_name)
+        logging.info('Installing source image with update url: %s', update_url)
+        provisioner.ChromiumOSProvisioner(
+                update_url, host=self._host,
+                is_release_bucket=True).run_provision()
+
+        # Get payload for the update to ToT.
         payload_url = self.get_payload_for_nebraska(job_repo_url,
                                                     full_payload=full_payload)
 
diff --git a/server/site_tests/autoupdate_DataPreserved/control.full b/server/site_tests/autoupdate_DataPreserved/control.full
deleted file mode 100644
index 741fb45..0000000
--- a/server/site_tests/autoupdate_DataPreserved/control.full
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "dhaddock, Chromium OS"
-NAME = "autoupdate_DataPreserved.full"
-PURPOSE = "Tests user data is not reset by updating."
-TIME = "MEDIUM"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:au-perbuild"
-DOC = """
-This tests that user's timezone, input methods, and downloaded files remain
-unchanged after an update.
-
-test_that <hostname>.cros autoupdate_DataPreserved.full --args="job_repo_url='http://<IP>:<port>/static/<board>-release/RXX-XXXX.X.X/autotest/packages'"
-"""
-
-from autotest_lib.client.common_lib import utils
-
-args_dict = utils.args_to_dict(args)
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('autoupdate_DataPreserved', host=host, full_payload=True,
-                 **args_dict)
-
-job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_DataPreserved/control.m2n.full b/server/site_tests/autoupdate_DataPreserved/control.m2n.full
new file mode 100644
index 0000000..a30c65d
--- /dev/null
+++ b/server/site_tests/autoupdate_DataPreserved/control.m2n.full
@@ -0,0 +1,30 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "dhaddock, Chromium OS"
+NAME = "autoupdate_DataPreserved.m2n.full"
+PURPOSE = "Tests user data is not reset by updating."
+TIME = "MEDIUM"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+ATTRIBUTES = "suite:au-m2n"
+PY_VERSION = 3
+DOC = """
+This tests that user's timezone, input methods, and downloaded files remain
+unchanged after an update.
+
+test_that <hostname>.cros autoupdate_DataPreserved.full --args="job_repo_url='http://<IP>:<port>/static/<board>-release/RXX-XXXX.X.X/autotest/packages'"
+"""
+
+from autotest_lib.client.common_lib import utils
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('autoupdate_DataPreserved', host=host, full_payload=True,
+                 **args_dict)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_EndToEndTest/autoupdate_EndToEndTest.py b/server/site_tests/autoupdate_EndToEndTest/autoupdate_EndToEndTest.py
index 7e8cad8..b02d6ba 100755
--- a/server/site_tests/autoupdate_EndToEndTest/autoupdate_EndToEndTest.py
+++ b/server/site_tests/autoupdate_EndToEndTest/autoupdate_EndToEndTest.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -5,15 +6,15 @@
 import logging
 import os
 
+from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib.cros import kernel_utils
 from autotest_lib.client.cros import constants
-from autotest_lib.server import afe_utils
 from autotest_lib.server.cros import provisioner
 from autotest_lib.server.cros.update_engine import update_engine_test
 
 
 class autoupdate_EndToEndTest(update_engine_test.UpdateEngineTest):
-    """Complete update test between two Chrome OS releases.
+    """Complete update test between two ChromeOS releases.
 
     Performs an end-to-end test of updating a ChromeOS device from one version
     to another. The test performs the following steps:
@@ -35,8 +36,6 @@
     """
     version = 1
 
-    _LOGIN_TEST = 'login_LoginSuccess'
-
 
     def cleanup(self):
         """Save the logs from stateful_partition's preserved/log dir."""
@@ -62,10 +61,11 @@
                 test_conf['target_payload_uri'])
         logging.debug(rerun_cmd)
 
-    def run_update_test(self, test_conf):
+    def run_update_test(self, test_conf, m2n):
         """Runs the update test and checks it succeeded.
 
         @param test_conf: A dictionary containing test configuration values.
+        @param m2n: True for an m2n test run.
 
         """
         # Record the active root partition.
@@ -75,7 +75,9 @@
         source_release = test_conf['source_release']
         target_release = test_conf['target_release']
 
-        self.update_device(test_conf['target_payload_uri'], tag='target')
+        self.update_device(test_conf['target_payload_uri'],
+                           tag='target',
+                           m2n=m2n)
 
         # Compare hostlog events from the update to the expected ones.
         rootfs, reboot = self._create_hostlog_files()
@@ -85,38 +87,67 @@
         logging.info('Update successful, test completed')
 
 
-    def run_once(self, test_conf):
+    def run_once(self, test_conf, m2n=False, build=None):
         """Performs a complete auto update test.
 
         @param test_conf: a dictionary containing test configuration values.
+        @param m2n: M -> N update. This means we install the current stable
+                    version of this board before updating to ToT.
+        @param build: target build for the update, i.e. R102-14650.0.0. Optional
+                      argument for running locally.
 
         """
+        if m2n:
+            if self._host.get_board().endswith("-kernelnext"):
+                raise error.TestNAError("Skipping test on kernelnext board")
+            # No test_conf is provided, we need to assemble it ourselves for
+            # the target update information.
+            source_release = self._get_latest_serving_stable_build().rsplit(
+                    '/')[-1]
+            target_release = build.split(
+                    '-')[1] if build else self._host.get_release_version()
+            target_uri = self.get_payload_for_nebraska(build=build)
+            test_conf = {
+                    'target_release': target_release,
+                    'target_payload_uri': target_uri,
+                    'source_release': source_release,
+                    'source_payload_uri': None
+            }
+
         logging.debug('The test configuration supplied: %s', test_conf)
-        self._print_rerun_command(test_conf)
+        if not m2n:
+            self._print_rerun_command(test_conf)
         self._autotest_devserver = self._get_devserver_for_test(test_conf)
 
-        afe_utils.clean_provision_labels(self._host)
-
         # Install source image with quick-provision.
+        build_name = None
         source_payload_uri = test_conf['source_payload_uri']
-        if source_payload_uri:
+        if m2n:
+            build_name = self._get_latest_serving_stable_build()
+        elif source_payload_uri:
             build_name, _ = self._get_update_parameters_from_uri(
                 source_payload_uri)
+
+        if build_name is not None:
             update_url = self._autotest_devserver.get_update_url(
                 build_name)
             logging.info('Installing source image with update url: %s',
                          update_url)
-
             provisioner.ChromiumOSProvisioner(
                     update_url, host=self._host,
                     is_release_bucket=True).run_provision()
 
-            self._run_client_test_and_check_result(self._LOGIN_TEST,
-                                                   tag='source')
+            self._run_client_test_and_check_result(
+                    self._LOGIN_TEST,
+                    tag='source',
+                    username=self._LOGIN_TEST_USERNAME,
+                    password=self._LOGIN_TEST_PASSWORD)
         # Start the update to the target image.
-        self._stage_payloads(test_conf['target_payload_uri'],
-                             test_conf['target_archive_uri'])
-        self.run_update_test(test_conf)
+        self.run_update_test(test_conf, m2n)
 
         # Check we can login after the update.
-        self._run_client_test_and_check_result(self._LOGIN_TEST, tag='target')
+        self._run_client_test_and_check_result(
+                self._LOGIN_TEST,
+                tag='target',
+                username=self._LOGIN_TEST_USERNAME,
+                password=self._LOGIN_TEST_PASSWORD)
diff --git a/server/site_tests/autoupdate_EndToEndTest/control b/server/site_tests/autoupdate_EndToEndTest/control
index 01bb756..152ee9c 100644
--- a/server/site_tests/autoupdate_EndToEndTest/control
+++ b/server/site_tests/autoupdate_EndToEndTest/control
@@ -12,7 +12,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
-
+PY_VERSION = 3
 # Disable server-side packaging support for this test.
 # This control file is used as the template for paygen_au_canary suite, which
 # creates the control files during paygen. Therefore, autotest server package
@@ -20,8 +20,8 @@
 REQUIRE_SSP = False
 
 DOC = """
-This is an end-to-end update test of Chrome OS releases. Given a test
-configuration, it will perform an end-to-end test of a Chrome OS update
+This is an end-to-end update test of ChromeOS releases. Given a test
+configuration, it will perform an end-to-end test of a ChromeOS update
 payload. A test configuration can be given as command-line arguments (see
 below) or instantiated inline as local varibles.
 
@@ -95,7 +95,7 @@
     Run ssh-keygen to generate your id_rsa and id_rsa.pub files into ~/.ssh/
 
     Download the testing_rsa ssh keys from here:
-    https://chromium.googlesource.com/chromiumos/chromite/+/master/ssh_keys
+    https://chromium.googlesource.com/chromiumos/chromite/+/main/ssh_keys
     Save both to ~/.ssh
     chmod 600 ~/.ssh/testing_rsa* (otherwise permissions are too loose and
     will be ignored)
@@ -108,7 +108,7 @@
     You should now be able to ssh into DUTs in the lab without a password.
 
     Follow these instructions to setup the ssh access to lab devservers:
-    http://go/cros-sites-archive/for-team-members/infrastructure/chromeos-admin/devserver-access
+    https://g3doc.corp.google.com/company/teams/chrome/ops/fleet/software/onboarding/creating-work-env.md#devserver-environment-for-running-local-tests-with-lab-duts
         You'll need appropriate group membership to be able to access the
         Valentine secret for the devserver file.
 
diff --git a/server/site_tests/autoupdate_EndToEndTest/control.m2n.full b/server/site_tests/autoupdate_EndToEndTest/control.m2n.full
new file mode 100644
index 0000000..133ba85
--- /dev/null
+++ b/server/site_tests/autoupdate_EndToEndTest/control.m2n.full
@@ -0,0 +1,57 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = "kyleshima, Chromium OS"
+NAME = "autoupdate_EndToEndTest.m2n.full"
+TIME = "MEDIUM"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+PY_VERSION = 3
+ATTRIBUTES = "suite:au-m2n"
+JOB_RETRIES = 2
+
+DOC = """
+This tests an M-to-N update. That means it will update from the current stable
+version for this board to ToT. autoupdate_EndToEndTest runs as part of paygen
+and is kicked off differently from other tests. autoupdateEndToEndTest.m2n.full
+will run the same as the other autoupdate tests, so we can detect any
+differences between the two test environments and catch test-breaking changes
+earlier.
+
+M2N tests require lab cache server access to provision the source stable build,
+so they are not easy to run locally.
+
+You need to enable SSH access to the lab cache servers to run this test.
+Refer to the setup instructions here:
+https://g3doc.corp.google.com/company/teams/chrome/ops/fleet/software/onboarding/creating-work-env.md#devserver-environment-for-running-local-tests-with-lab-duts
+
+A quick way to find active image caching servers (devservers) to add to
+shadow_config.ini is to search for "job_repo_url" in the logs of autoupdate
+tests running in the lab. You can use those IP addresses as valid image caching
+servers. See go/cros-au-tests for quick access to test results and logs.
+
+Example usage:
+
+# The target update will be the currently provisioned version on the DUT.
+test_that <DUT> autoupdate_EndToEndTest.m2n.full
+
+# The target update will be the build specified in the args.
+test_that <DUT> autoupdate_EndToEndTest.m2n.full --args="build=R102-14692.0.0"
+
+"""
+
+from autotest_lib.client.common_lib import utils
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('autoupdate_EndToEndTest', host=host, test_conf=None,
+                 m2n=True, **args_dict)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_ForcedOOBEUpdate/autoupdate_ForcedOOBEUpdate.py b/server/site_tests/autoupdate_ForcedOOBEUpdate/autoupdate_ForcedOOBEUpdate.py
index 7fe8375..6919c4f 100644
--- a/server/site_tests/autoupdate_ForcedOOBEUpdate/autoupdate_ForcedOOBEUpdate.py
+++ b/server/site_tests/autoupdate_ForcedOOBEUpdate/autoupdate_ForcedOOBEUpdate.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -6,10 +7,12 @@
 import random
 import time
 
+from autotest_lib.client.common_lib.cros import dev_server
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib import utils
 from autotest_lib.client.common_lib.cros import kernel_utils
 from autotest_lib.client.common_lib.cros import tpm_utils
+from autotest_lib.server.cros import provisioner
 from autotest_lib.server.cros.update_engine import update_engine_test
 
 class autoupdate_ForcedOOBEUpdate(update_engine_test.UpdateEngineTest):
@@ -23,6 +26,9 @@
 
         self._clear_custom_lsb_release()
 
+        # Clean up the nebraska usr dir.
+        self._clear_nebraska_dir()
+
         self._set_update_over_cellular_setting(False)
 
         # Cancel any update still in progress.
@@ -30,7 +36,8 @@
             logging.debug('Canceling the in-progress update.')
             self._restart_update_engine()
         super(autoupdate_ForcedOOBEUpdate, self).cleanup()
-
+        if self._m2n:
+            self._restore_stateful()
 
     def _wait_for_reboot_after_update(self, timeout_minutes=15):
         """
@@ -67,6 +74,8 @@
                 # if status is IDLE we need to figure out if an error occurred
                 # or the DUT autorebooted.
                 elif self._is_update_engine_idle(status):
+                    self._host.run(
+                            'ls /mnt/stateful_partition/etc/lsb-release')
                     if self._is_update_finished_downloading(last_status):
                         if len(self._get_update_engine_logs()) > logs_before:
                             return
@@ -99,8 +108,13 @@
                                  desc='post-reboot event to fire after reboot')
 
 
-    def run_once(self, full_payload=True, cellular=False,
-                 interrupt=None, job_repo_url=None, moblab=False):
+    def run_once(self,
+                 full_payload=True,
+                 cellular=False,
+                 interrupt=None,
+                 job_repo_url=None,
+                 moblab=False,
+                 m2n=False):
         """
         Runs a forced autoupdate during ChromeOS OOBE.
 
@@ -113,24 +127,37 @@
                              The test will read this from a host argument
                              when run in the lab.
         @param moblab: True if we are running on moblab.
+        @param m2n: True if we should first provision the latest stable version
+                    for the current board so that we can perform a M->N update.
 
         """
         if interrupt and interrupt not in self._SUPPORTED_INTERRUPTS:
             raise error.TestFail('Unknown interrupt type: %s' % interrupt)
         tpm_utils.ClearTPMOwnerRequest(self._host)
 
-        # This test can be used with Nebraska (cellular tests) or a devserver
-        # (non-cellular) tests. Each passes a different value to the client:
-        # An update_url for a devserver or a payload_url for Nebraska.
+        self._m2n = m2n
+        if self._m2n:
+            # Provision latest stable build for the current build.
+            build_name = self._get_latest_serving_stable_build()
+
+            # Install the matching build with quick provision.
+            autotest_devserver = dev_server.ImageServer.resolve(
+                    build_name, self._host.hostname)
+            update_url = autotest_devserver.get_update_url(build_name)
+            logging.info('Installing source image with update url: %s',
+                         update_url)
+            provisioner.ChromiumOSProvisioner(
+                    update_url, host=self._host,
+                    is_release_bucket=True).run_provision()
+
         payload_url = None
-        update_url = None
         if cellular:
             self._set_update_over_cellular_setting(True)
             payload_url = self.get_payload_url_on_public_bucket(
                 job_repo_url, full_payload=full_payload)
         else:
-            update_url = self.get_update_url_for_test(
-                job_repo_url, full_payload=full_payload)
+            payload_url = self.get_payload_for_nebraska(
+                    job_repo_url, full_payload=full_payload)
         before_version = self._host.get_release_version()
 
         # Clear any previously started updates.
@@ -149,11 +176,13 @@
         active, inactive = kernel_utils.get_kernel_state(self._host)
         # Call client test to start the forced OOBE update.
         self._run_client_test_and_check_result(
-            'autoupdate_StartOOBEUpdate', update_url=update_url,
-            payload_url=payload_url, full_payload=full_payload,
-            cellular=cellular, critical_update=True,
-            interrupt_network=interrupt == self._NETWORK_INTERRUPT,
-            interrupt_progress=progress)
+                'autoupdate_StartOOBEUpdate',
+                payload_url=payload_url,
+                full_payload=full_payload,
+                cellular=cellular,
+                critical_update=True,
+                interrupt_network=interrupt == self._NETWORK_INTERRUPT,
+                interrupt_progress=progress)
 
         if interrupt in [self._REBOOT_INTERRUPT, self._SUSPEND_INTERRUPT]:
             logging.info('Waiting to interrupt update.')
@@ -178,9 +207,9 @@
             # Remove screenshots since interrupt test succeeded.
             self._remove_screenshots()
 
-        # Create lsb-release with no_update=True to get post-reboot event.
-        lsb_url = payload_url if cellular else update_url
-        self._create_custom_lsb_release(lsb_url, no_update=True)
+        # Set no_update=True in the nebraska startup config to get the
+        # post-reboot update event.
+        self._edit_nebraska_startup_config(no_update=True)
 
         self._wait_for_oobe_update_to_complete()
 
diff --git a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.cellular.delta b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.cellular.delta
index bdcd64e..7617e93 100644
--- a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.cellular.delta
+++ b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.cellular.delta
@@ -11,6 +11,7 @@
 TEST_TYPE = "server"
 # Disable until test can be fixed: http://b/163367994
 # ATTRIBUTES = "suite:cellular_au_nightly"
+PY_VERSION = 3
 DOC = """
 This tests the forced autoupdate flow at OOBE using cellular.
 
diff --git a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.cellular.full b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.cellular.full
index 1188e1d..e83e46b 100644
--- a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.cellular.full
+++ b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.cellular.full
@@ -11,6 +11,7 @@
 TEST_TYPE = "server"
 # Disable this test until it can be fixed: https://crbug.com/969207
 # ATTRIBUTES = "suite:cellular_au"
+PY_VERSION = 3
 DOC = """
 This tests the forced autoupdate flow at OOBE using cellular.
 
diff --git a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.delta b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.delta
index f9fbbf2..de4d101 100644
--- a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.delta
+++ b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.delta
@@ -10,6 +10,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:au-oobe"
+PY_VERSION = 3
 DOC = """
 This tests the forced autoupdate flow at OOBE.
 
diff --git a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.delta.moblab b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.delta.moblab
index 3d89869..44e7ba0 100644
--- a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.delta.moblab
+++ b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.delta.moblab
@@ -10,6 +10,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:au_fsi"
+PY_VERSION = 3
 DOC = """
 This tests the forced autoupdate flow at OOBE.
 
diff --git a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.full b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.full
index f8402b4..c3cec57 100644
--- a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.full
+++ b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.full
@@ -10,6 +10,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:au-oobe, suite:infra_qual"
+PY_VERSION = 3
 DOC = """
 This tests the forced autoupdate flow at OOBE.
 
diff --git a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.full.moblab b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.full.moblab
index 0bb6936..8d28e66 100644
--- a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.full.moblab
+++ b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.full.moblab
@@ -10,6 +10,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:au_fsi"
+PY_VERSION = 3
 DOC = """
 This tests the forced autoupdate flow at OOBE.
 
diff --git a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.network.full b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.network.full
index 1b39c6e..8bb043c 100644
--- a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.network.full
+++ b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.network.full
@@ -10,6 +10,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:au-oobe"
+PY_VERSION = 3
 DOC = """
 This tests the forced autoupdate flow at OOBE with interruptions.
 
diff --git a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.network.full.moblab b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.network.full.moblab
index 708ff6a..3ac7a74 100644
--- a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.network.full.moblab
+++ b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.network.full.moblab
@@ -10,6 +10,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:au_fsi"
+PY_VERSION = 3
 DOC = """
 This tests the forced autoupdate flow at OOBE with interruptions.
 
diff --git a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.reboot.full b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.reboot.full
index 52a56a9..6f52bdb 100644
--- a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.reboot.full
+++ b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.reboot.full
@@ -10,6 +10,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:au-oobe"
+PY_VERSION = 3
 DOC = """
 This tests the forced autoupdate flow at OOBE with interruptions.
 
diff --git a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.reboot.full.moblab b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.reboot.full.moblab
index b39e37f..a7b9dd4 100644
--- a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.reboot.full.moblab
+++ b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.reboot.full.moblab
@@ -10,6 +10,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:au_fsi"
+PY_VERSION = 3
 DOC = """
 This tests the forced autoupdate flow at OOBE with interruptions.
 
diff --git a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.suspend.full b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.suspend.full
index 617fbcc..41c9242 100644
--- a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.suspend.full
+++ b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.suspend.full
@@ -10,6 +10,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:au-oobe"
+PY_VERSION = 3
 DOC = """
 This tests the forced autoupdate flow at OOBE with interruptions.
 
diff --git a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.suspend.full.moblab b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.suspend.full.moblab
index f44fe90..ea93476 100644
--- a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.suspend.full.moblab
+++ b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.suspend.full.moblab
@@ -10,6 +10,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:au_fsi"
+PY_VERSION = 3
 DOC = """
 This tests the forced autoupdate flow at OOBE with interruptions.
 
diff --git a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.m2n.full b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.m2n.full
new file mode 100644
index 0000000..f6f6d19
--- /dev/null
+++ b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.m2n.full
@@ -0,0 +1,54 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "dhaddock, Chromium OS"
+NAME = "autoupdate_ForcedOOBEUpdate.m2n.full"
+PURPOSE = "Test M->N forced update at OOBE."
+TIME = "MEDIUM"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+ATTRIBUTES = "suite:au-m2n"
+PY_VERSION = 3
+DOC = """
+This tests the forced autoupdate flow at OOBE.
+
+Debugging steps:
+This test cannot be debugged locally easily because
+1. Using a devserver on your workstation is blocked by SNAX
+2. The test relies on reading a job_repo_url which is only set by runs
+initiated in the lab.
+
+You will need to use a lab DUT and a lab devserver but kick it off from your
+workstation. You can start by going to cautotest and locking a DUT.
+
+You will need to pass a job_repo_url param to test_that.
+
+The test needs to be able to ssh into devservers. To ssh into lab devservers
+Follow these instructions to setup the ssh access to lab devservers:
+https://sites.google.com/a/google.com/chromeos/for-team-members/infrastructure/chromeos-admin/devserver-access
+
+Use this instruction to find the list of lab devservers:
+atest server list | grep devserver
+
+Get access to "Access To Chrome Lab or Cautotest" from:
+https://sphinx.corp.google.com/sphinx/#accessChangeRequest:systemName=default-mnp-use-cases
+
+Then run the test as follows:
+test_that <hostname>.cros autoupdate_ForcedOOBEUpdate.m2n.full --args="job_repo_url='http://<IP>:<PORT>/static/<board>-release/RXX-XXXX.X.X/autotest/packages'"
+
+- IP: The IP of the devserver you are running the test agains:
+- PORT: The port of the devserver, normally 8082
+"""
+
+from autotest_lib.client.common_lib import utils
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('autoupdate_ForcedOOBEUpdate', host=host, full_payload=True,
+                 m2n=True, **args_dict)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_FromUI/autoupdate_FromUI.py b/server/site_tests/autoupdate_FromUI/autoupdate_FromUI.py
index 9b81c00..6684d8b 100644
--- a/server/site_tests/autoupdate_FromUI/autoupdate_FromUI.py
+++ b/server/site_tests/autoupdate_FromUI/autoupdate_FromUI.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -10,7 +11,7 @@
     """Trigger an update from the UI.
 
     Start an update by clicking on the 'Check for update' button in the
-    Chrome OS settings menu, instead of calling to update_engine_client
+    ChromeOS settings menu, instead of calling to update_engine_client
     directly.
 
     """
diff --git a/server/site_tests/autoupdate_FromUI/control.full b/server/site_tests/autoupdate_FromUI/control.full
index 2614b50..52cd8b2 100644
--- a/server/site_tests/autoupdate_FromUI/control.full
+++ b/server/site_tests/autoupdate_FromUI/control.full
@@ -10,9 +10,10 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:au-perbuild"
+PY_VERSION = 3
 DOC = """
 This tests that an update can be triggered by clicking the 'Check for updates'
-button in the Chrome OS Settings app.
+button in the ChromeOS Settings app.
 
 test_that <hostname>.cros autoupdate_FromUI.full --args="job_repo_url='http://<IP>:<port>/static/<board>-release/RXX-XXXX.X.X/autotest/packages'"
 """
diff --git a/server/site_tests/autoupdate_Interruptions/autoupdate_Interruptions.py b/server/site_tests/autoupdate_Interruptions/autoupdate_Interruptions.py
index cfc4382..da1e479 100644
--- a/server/site_tests/autoupdate_Interruptions/autoupdate_Interruptions.py
+++ b/server/site_tests/autoupdate_Interruptions/autoupdate_Interruptions.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -16,6 +17,10 @@
 
     def cleanup(self):
         self._save_extra_update_engine_logs(number_of_logs=2)
+
+        # Clean up the nebraska usr dir.
+        self._clear_nebraska_dir()
+
         super(autoupdate_Interruptions, self).cleanup()
 
 
@@ -38,8 +43,8 @@
         self._remove_update_engine_pref(self._UPDATE_CHECK_RESPONSE_HASH)
         self._restart_update_engine(ignore_status=True)
 
-        update_url = self.get_update_url_for_test(job_repo_url,
-                                                  full_payload=full_payload)
+        payload_url = self.get_payload_for_nebraska(job_repo_url,
+                                                    full_payload=full_payload)
         chromeos_version = self._host.get_release_version()
         active, inactive = kernel_utils.get_kernel_state(self._host)
         # Choose a random downloaded progress to interrupt the update.
@@ -48,10 +53,11 @@
 
         # Login, start the update, logout
         self._run_client_test_and_check_result(
-            'autoupdate_LoginStartUpdateLogout', update_url=update_url,
-            progress_to_complete=progress,
-            full_payload=full_payload,
-            interrupt_network=interrupt == self._NETWORK_INTERRUPT)
+                'autoupdate_LoginStartUpdateLogout',
+                payload_url=payload_url,
+                progress_to_complete=progress,
+                full_payload=full_payload,
+                interrupt_network=interrupt == self._NETWORK_INTERRUPT)
 
         if interrupt in [self._REBOOT_INTERRUPT, self._SUSPEND_INTERRUPT]:
             if self._is_update_finished_downloading():
@@ -65,8 +71,9 @@
                 self._host.reboot()
                 utils.poll_for_condition(self._get_update_engine_status,
                                          desc='update engine to start')
-                self._check_for_update(update_url, critical_update=True,
-                                       full_payload=full_payload)
+                # The client test created a nebraska startup config, so
+                # nebraska will be up after the reboot.
+                self._check_for_update(self._get_nebraska_update_url())
             elif interrupt == self._SUSPEND_INTERRUPT:
                 self._suspend_then_resume()
 
@@ -81,12 +88,13 @@
                                      'left off after interruption.')
 
         self._wait_for_update_to_complete()
+        self._edit_nebraska_startup_config(no_update=True)
         self._host.reboot()
         # Check that update engine is ready after reboot.
         utils.poll_for_condition(self._get_update_engine_status,
                                  desc='update engine to start')
         # Do a final update check with no_update=True to get post reboot event.
-        self._check_for_update(update_url, no_update=True)
+        self._check_for_update(self._get_nebraska_update_url())
 
         # Verify the update was successful by checking hostlog and kernel.
         rootfs_hostlog, reboot_hostlog = self._create_hostlog_files()
diff --git a/server/site_tests/autoupdate_Interruptions/control.network.full b/server/site_tests/autoupdate_Interruptions/control.network.full
index d488310..99d21ab 100644
--- a/server/site_tests/autoupdate_Interruptions/control.network.full
+++ b/server/site_tests/autoupdate_Interruptions/control.network.full
@@ -10,6 +10,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:au-perbuild"
+PY_VERSION = 3
 DOC = """
 Tests disabling the network on the chromebook during an update.
 
diff --git a/server/site_tests/autoupdate_Interruptions/control.reboot.full b/server/site_tests/autoupdate_Interruptions/control.reboot.full
index 4675f34..f2ac93b 100644
--- a/server/site_tests/autoupdate_Interruptions/control.reboot.full
+++ b/server/site_tests/autoupdate_Interruptions/control.reboot.full
@@ -10,6 +10,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:au-perbuild"
+PY_VERSION = 3
 DOC = """
 Tests rebooting the chromebook during an update.
 
diff --git a/server/site_tests/autoupdate_Interruptions/control.suspend.full b/server/site_tests/autoupdate_Interruptions/control.suspend.full
index 680f9f8..9ad7fda 100644
--- a/server/site_tests/autoupdate_Interruptions/control.suspend.full
+++ b/server/site_tests/autoupdate_Interruptions/control.suspend.full
@@ -10,6 +10,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:au-perbuild"
+PY_VERSION = 3
 DOC = """
 Tests suspending/resuming the chromebook during an update.
 
diff --git a/server/site_tests/autoupdate_InvalidateUpdateBeforeReboot/autoupdate_InvalidateUpdateBeforeReboot.py b/server/site_tests/autoupdate_InvalidateUpdateBeforeReboot/autoupdate_InvalidateUpdateBeforeReboot.py
new file mode 100644
index 0000000..3d0d135
--- /dev/null
+++ b/server/site_tests/autoupdate_InvalidateUpdateBeforeReboot/autoupdate_InvalidateUpdateBeforeReboot.py
@@ -0,0 +1,45 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server.cros.update_engine import update_engine_test
+
+
+class autoupdate_InvalidateUpdateBeforeReboot(
+        update_engine_test.UpdateEngineTest):
+    """Tests to see if we can invalidate an update before a reboot."""
+    version = 1
+    _CLIENT_TEST = 'autoupdate_InvalidateSuccessfulUpdate'
+
+    def run_once(self,
+                 full_payload=True,
+                 job_repo_url=None,
+                 running_at_desk=False):
+        """
+        Runs the invalidate successful update test.
+
+        @param full_payload: True for full payload, False for delta.
+        @param job_repo_url: A url pointing to the devserver where the autotest
+            package for this build should be staged.
+        @param running_at_desk: indicates test is run locally from a workstation.
+
+        """
+        # Get a payload to use for the test.
+        payload_url = self.get_payload_for_nebraska(
+                job_repo_url,
+                full_payload=full_payload,
+                public_bucket=running_at_desk)
+
+        # Perform an update, invalidate it and verify successful invalidation.
+        self._run_client_test_and_check_result(self._CLIENT_TEST,
+                                               payload_url=payload_url)
+
+        # Verify via the logs the update was applied.
+        self._check_update_engine_log_for_entry(
+                'Update successfully applied, waiting to reboot.',
+                raise_error=True)
+
+        # Verify via the logs the update was invalidated.
+        self._check_update_engine_log_for_entry(
+                'Invalidating previous update.', raise_error=True)
diff --git a/server/site_tests/autoupdate_InvalidateUpdateBeforeReboot/control.delta b/server/site_tests/autoupdate_InvalidateUpdateBeforeReboot/control.delta
new file mode 100644
index 0000000..7ea7a23
--- /dev/null
+++ b/server/site_tests/autoupdate_InvalidateUpdateBeforeReboot/control.delta
@@ -0,0 +1,37 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "hbarnor, Chromium OS"
+NAME = "autoupdate_InvalidateUpdateBeforeReboot.delta"
+TIME = "MEDIUM"
+PURPOSE = "Tests invalidate last update with Nebraska."
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+ATTRIBUTES = "suite:au-perbuild"
+PY_VERSION = 3
+DOC = """
+This tests the invalidate last update feature.
+
+This test ensures that Update Engine checks for and invalidates a previous
+update if it has not rebooted into it. This is achieved by feeding a full update
+to the update_engine followed by a request to invalidate the update.
+
+We supply a job_repo_url to the test when running locally. In the lab this will
+be passed directly. The job_repo_url is a link to the autotest packages on a
+devserver. The test uses it to find the correct payload to use.
+
+Example usage:
+test_that autoupdate_InvalidateUpdateBeforeReboot.full <DUT> --board=<board> --args="job_repo_url='http://<devserver IP>:8082/static/<board>-release/RXX-XXXXX.X.X/autotest/packages', running_at_desk=True"
+"""
+
+from autotest_lib.client.common_lib import utils
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('autoupdate_InvalidateUpdateBeforeReboot', host=host, full_payload=False,
+                 **args_dict)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_InvalidateUpdateBeforeReboot/control.full b/server/site_tests/autoupdate_InvalidateUpdateBeforeReboot/control.full
new file mode 100644
index 0000000..aa1af04
--- /dev/null
+++ b/server/site_tests/autoupdate_InvalidateUpdateBeforeReboot/control.full
@@ -0,0 +1,36 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "hbarnor, Chromium OS"
+NAME = "autoupdate_InvalidateUpdateBeforeReboot.full"
+TIME = "MEDIUM"
+PURPOSE = "Tests invalidate last update with Nebraska."
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+ATTRIBUTES = "suite:au-perbuild"
+PY_VERSION = 3
+DOC = """
+This tests the invalidate last update feature.
+
+This test ensures that Update Engine checks for and invalidates a previous
+update if it has not rebooted into it. This is achieved by feeding a full update
+to the update_engine followed by a request to invalidate the update.
+
+We supply a job_repo_url to the test when running locally. In the lab this will
+be passed directly. The job_repo_url is a link to the autotest packages on a
+devserver. The test uses it to find the correct payload to use.
+
+Example usage:
+test_that autoupdate_InvalidateUpdateBeforeReboot.full <DUT> --board=<board> --args="job_repo_url='http://<devserver IP>:8082/static/<board>-release/RXX-XXXXX.X.X/autotest/packages', running_at_desk=True"
+"""
+
+from autotest_lib.client.common_lib import utils
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('autoupdate_InvalidateUpdateBeforeReboot', host=host, **args_dict)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_Lacros/autoupdate_Lacros.py b/server/site_tests/autoupdate_Lacros/autoupdate_Lacros.py
new file mode 100644
index 0000000..c3169d7
--- /dev/null
+++ b/server/site_tests/autoupdate_Lacros/autoupdate_Lacros.py
@@ -0,0 +1,83 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.client.common_lib.cros import kernel_utils
+from autotest_lib.server.cros import provisioner
+from autotest_lib.server.cros.update_engine import update_engine_test
+
+
+class autoupdate_Lacros(update_engine_test.UpdateEngineTest):
+    """Performs a simple AU test and checks lacros."""
+    version = 1
+
+    def cleanup(self):
+        super(autoupdate_Lacros, self).cleanup()
+
+    def run_once(self,
+                 full_payload,
+                 job_repo_url=None,
+                 m2n=False,
+                 running_at_desk=False):
+        """
+        Performs autoupdate with Nebraska and checks rootfs-lacros.
+
+        @param full_payload: True for full payload, False for delta
+        @param job_repo_url: A url pointing to the devserver where the autotest
+            package for this build should be staged.
+        @param running_at_desk: Indicates test is run locally from workstation.
+                                Flag does not work with M2N tests.
+
+        """
+        if m2n:
+            # Provision latest stable build for the current board.
+            build_name = self._get_latest_serving_stable_build()
+            logging.debug('build name is %s', build_name)
+
+            # Install the matching build with quick provision.
+            autotest_devserver = dev_server.ImageServer.resolve(
+                    build_name, self._host.hostname)
+            update_url = autotest_devserver.get_update_url(build_name)
+            logging.info('Installing source image with update url: %s',
+                         update_url)
+            provisioner.ChromiumOSProvisioner(
+                    update_url, host=self._host,
+                    is_release_bucket=True).run_provision()
+
+        # Login and check rootfs-lacros version
+        self._run_client_test_and_check_result('desktopui_RootfsLacros',
+                                               tag='before')
+        before_version = self._host.run(['cat',
+                                         '/tmp/lacros_version.txt']).stdout
+        logging.info('rootfs-lacros version before update: %s', before_version)
+
+        # Get a payload to use for the test.
+        payload_url = self.get_payload_for_nebraska(
+                job_repo_url,
+                full_payload=full_payload,
+                public_bucket=running_at_desk)
+
+        # Record DUT state before the update.
+        active, inactive = kernel_utils.get_kernel_state(self._host)
+
+        # Perform the update.
+        self._run_client_test_and_check_result('autoupdate_CannedOmahaUpdate',
+                                               payload_url=payload_url)
+
+        # Verify the update completed successfully.
+        self._host.reboot()
+        kernel_utils.verify_boot_expectations(inactive, host=self._host)
+        rootfs_hostlog, _ = self._create_hostlog_files()
+        self.verify_update_events(self._FORCED_UPDATE, rootfs_hostlog)
+
+        # Check the rootfs-lacros version again.
+        self._run_client_test_and_check_result('desktopui_RootfsLacros',
+                                               tag='after',
+                                               dont_override_profile=True)
+        after_version = self._host.run(['cat',
+                                        '/tmp/lacros_version.txt']).stdout
+        logging.info('rootfs-lacros version after update: %s', after_version)
diff --git a/server/site_tests/autoupdate_Lacros/control.full b/server/site_tests/autoupdate_Lacros/control.full
new file mode 100644
index 0000000..400a39d
--- /dev/null
+++ b/server/site_tests/autoupdate_Lacros/control.full
@@ -0,0 +1,31 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "dhaddock, Chromium OS"
+NAME = "autoupdate_Lacros"
+TIME = "MEDIUM"
+PURPOSE = "Tests an N-to-N update and check rootfs-lacros."
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+ATTRIBUTES = "suite:au-perbuild"
+PY_VERSION = 3
+DOC = """
+Logs in and launches rootfs-lacros before and after a N-to-N autoupdate.
+
+Use the `running_at_desk` arg when testing locally.
+
+Example usage:
+test_that autoupdate_Lacros <DUT> --board=<board> --args="job_repo_url=http://<IP>:8082/static/<board>-release/RXX-XXXXX.X.X/autotest/packages running_at_desk=True"
+"""
+
+from autotest_lib.client.common_lib import utils
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('autoupdate_Lacros', host=host, full_payload=True,
+                 **args_dict)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_Lacros/control.full.m2n b/server/site_tests/autoupdate_Lacros/control.full.m2n
new file mode 100644
index 0000000..d0f9c93
--- /dev/null
+++ b/server/site_tests/autoupdate_Lacros/control.full.m2n
@@ -0,0 +1,31 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "dhaddock, Chromium OS"
+NAME = "autoupdate_Lacros.m2n"
+TIME = "MEDIUM"
+PURPOSE = "Tests an M-to-N update and check rootfs-lacros."
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+ATTRIBUTES = "suite:au-m2n"
+PY_VERSION = 3
+DOC = """
+Logs in and launches rootfs-lacros before and after a M-to-N autoupdate.
+
+Use the `running_at_desk` arg when testing locally.
+
+Example usage:
+test_that autoupdate_Lacros <DUT> --board=<board> --args="job_repo_url=http://<IP>:8082/static/<board>-release/RXX-XXXXX.X.X/autotest/packages running_at_desk=True"
+"""
+
+from autotest_lib.client.common_lib import utils
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('autoupdate_Lacros', host=host, full_payload=True, m2n=True,
+                 **args_dict)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_MiniOS/autoupdate_MiniOS.py b/server/site_tests/autoupdate_MiniOS/autoupdate_MiniOS.py
new file mode 100644
index 0000000..05c4043
--- /dev/null
+++ b/server/site_tests/autoupdate_MiniOS/autoupdate_MiniOS.py
@@ -0,0 +1,193 @@
+# Lint as: python3
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib.cros import kernel_utils
+from autotest_lib.server.cros.minios import minios_test
+
+
+class autoupdate_MiniOS(minios_test.MiniOsTest):
+    """Tests MiniOS update. """
+
+    version = 1
+
+    _EXCLUSION_PREFS_DIR = "exclusion"
+    _MINIOS_PREFS_DIR = "minios"
+
+    def initialize(self, host=None):
+        """
+        Clear test related prefs on the DUT before starting the test.
+
+        """
+        super(autoupdate_MiniOS, self).initialize(host=host)
+        self._remove_minios_update_prefs()
+
+    def cleanup(self):
+        super(autoupdate_MiniOS, self).cleanup()
+        self._save_extra_update_engine_logs(number_of_logs=2)
+        self._remove_minios_update_prefs()
+
+    def _remove_minios_update_prefs(self):
+        for pref in ((self._EXCLUSION_PREFS_DIR, True),
+                     (self._MINIOS_PREFS_DIR, True)):
+            self._remove_update_engine_pref(pref=pref[0], is_dir=pref[1])
+
+    def _setup_minios_update(self, has_update, with_exclusion=False):
+        # Get payload URL for the MiniOS update.
+        # We'll always need a full payload for MiniOS update.
+        payload_url = self.get_payload_for_nebraska(
+                job_repo_url=self._job_repo_url,
+                full_payload=True,
+                payload_type=self._PAYLOAD_TYPE.MINIOS,
+                public_bucket=self._running_at_desk)
+        self._payload_urls.append(payload_url)
+
+        # Test that MiniOS payload can be excluded by creating a pref file.
+        # This simulates that the update engine tries to exclude MiniOS payload
+        # after getting certain types of MiniOS update failure.
+        if with_exclusion:
+            self._create_update_engine_pref(
+                    pref_name=self._get_exclusion_name(payload_url),
+                    sub_dir=self._EXCLUSION_PREFS_DIR)
+
+        # MiniOS booting to be verified.
+        if has_update:
+            self._verifications.append(self._boot_minios)
+
+    def _setup_cros_update(self, has_update):
+        if has_update:
+            # Get payload URL for the platform (OS) update.
+            self._payload_urls.append(
+                    self.get_payload_for_nebraska(
+                            job_repo_url=self._job_repo_url,
+                            full_payload=self._full_payload,
+                            public_bucket=self._running_at_desk))
+
+        # Platform (OS) update to be verified.
+        self._verifications.append(lambda: self._verify_cros_update(
+                updated=has_update))
+
+    def _setup_dlc_update(self):
+        # Payload URLs for sample-dlc, a test DLC package.
+        # We'll always need a full payload for DLC installation,
+        # and optionally a delta payload if required by the test.
+        self._payload_urls.append(
+                self.get_payload_for_nebraska(
+                        job_repo_url=self._job_repo_url,
+                        full_payload=True,
+                        payload_type=self._PAYLOAD_TYPE.DLC,
+                        public_bucket=self._running_at_desk))
+        if not self._full_payload:
+            self._payload_urls.append(
+                    self.get_payload_for_nebraska(
+                            job_repo_url=self._job_repo_url,
+                            full_payload=False,
+                            payload_type=self._PAYLOAD_TYPE.DLC,
+                            public_bucket=self._running_at_desk))
+
+        # DLC update to be verified.
+        self._verifications.append(self._verify_dlc_update)
+
+    def _verify_cros_update(self, updated):
+        if updated:
+            # Verify the platform (OS) update completed successfully.
+            kernel_utils.verify_boot_expectations(self._inactive_cros,
+                                                  host=self._host)
+            rootfs_hostlog, _ = self._create_hostlog_files()
+            self.verify_update_events(self._FORCED_UPDATE, rootfs_hostlog)
+        else:
+            # Verify the Platform (OS) boot expectation unchanged.
+            kernel_utils.verify_boot_expectations(self._active_cros,
+                                                  host=self._host)
+
+    def _verify_dlc_update(self):
+        # Verify the DLC update completed successfully.
+        dlc_rootfs_hostlog, _ = self._create_dlc_hostlog_files()
+        logging.info('Checking DLC update events')
+        self.verify_update_events(
+                self._FORCED_UPDATE,
+                dlc_rootfs_hostlog[self._dlc_util._SAMPLE_DLC_ID])
+        # Verify the DLC was successfully installed.
+        self._dlc_util.remove_preloaded(self._dlc_util._SAMPLE_DLC_ID)
+        self._dlc_util.install(self._dlc_util._SAMPLE_DLC_ID,
+                               omaha_url='fake_url')
+        if not self._dlc_util.is_installed(self._dlc_util._SAMPLE_DLC_ID):
+            raise error.TestFail('Test DLC was not installed.')
+
+    def run_once(self,
+                 full_payload=True,
+                 job_repo_url=None,
+                 with_os=False,
+                 with_dlc=False,
+                 with_exclusion=False,
+                 running_at_desk=False):
+        """
+        Tests that we can successfully update MiniOS along with the OS.
+
+        @param full_payload: True for full OS and DLC payloads. False for delta.
+        @param job_repo_url: This is used to figure out the current build and
+                             the devserver to use. The test will read this
+                             from a host argument when run in the lab.
+        @param with_os: True for MiniOS update along with Platform (OS)
+                             update. False for MiniOS only update.
+        @param with_dlc: True for MiniOS update with Platform (OS) and DLC.
+                             False for turning off DLC update.
+        @param with_exclusion: True for excluding MiniOS payload.
+        @param running_at_desk: Indicates test is run locally from a
+                                workstation.
+
+        """
+        self._full_payload = full_payload
+        self._job_repo_url = job_repo_url
+        self._running_at_desk = running_at_desk
+
+        if not with_os and with_dlc:
+            logging.info("DLC only updates with the platform (OS), "
+                         "automatically set with_os to True.")
+            with_os = True
+
+        # Record DUT state before the update.
+        self._active_cros, self._inactive_cros \
+            = kernel_utils.get_kernel_state(self._host)
+        active_minios, inactive_minios \
+            = kernel_utils.get_minios_priority(self._host)
+
+        minios_update = with_os and not with_exclusion
+        # MiniOS update to be verified.
+        self._verifications = [
+                lambda: kernel_utils.verify_minios_priority_after_update(
+                        self._host,
+                        expected=inactive_minios
+                        if minios_update else active_minios)
+        ]
+
+        # Get payload URLs and setup tests.
+        self._payload_urls = []
+        self._setup_cros_update(has_update=with_os)
+        if with_dlc:
+            self._setup_dlc_update()
+        self._setup_minios_update(has_update=minios_update,
+                                  with_exclusion=with_exclusion)
+
+        # Update MiniOS.
+        if with_dlc:
+            self._run_client_test_and_check_result(
+                    'autoupdate_InstallAndUpdateDLC',
+                    payload_urls=self._payload_urls,
+                    allow_failure=not with_os)
+        else:
+            self._run_client_test_and_check_result(
+                    'autoupdate_CannedOmahaUpdate',
+                    payload_url=self._payload_urls,
+                    allow_failure=not with_os)
+
+        if with_os:
+            self._host.reboot()
+
+        # Verify updates.
+        for verify in self._verifications:
+            verify()
diff --git a/server/site_tests/autoupdate_MiniOS/control.only b/server/site_tests/autoupdate_MiniOS/control.only
new file mode 100644
index 0000000..76777db
--- /dev/null
+++ b/server/site_tests/autoupdate_MiniOS/control.only
@@ -0,0 +1,32 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "yuanpengni, Chromium OS"
+NAME = "autoupdate_MiniOS.only"
+TIME = "MEDIUM"
+PURPOSE = "Tests MiniOS update."
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:nbr"
+PY_VERSION = 3
+DOC = """
+This tests the MiniOS update without the platform (OS) or DLC update.
+Use the `running_at_desk` arg when testing locally.
+
+Example usage:
+test_that autoupdate_MiniOS.only <DUT> --board=<board> --args="running_at_desk=True"
+"""
+
+from autotest_lib.client.common_lib import utils
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    job.run_test('autoupdate_MiniOS', host=host, full_payload=True,
+                 **args_dict)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_MiniOS/control.with_dlc.delta b/server/site_tests/autoupdate_MiniOS/control.with_dlc.delta
new file mode 100644
index 0000000..e3cb93f
--- /dev/null
+++ b/server/site_tests/autoupdate_MiniOS/control.with_dlc.delta
@@ -0,0 +1,32 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "yuanpengni, Chromium OS"
+NAME = "autoupdate_MiniOS.with_dlc.delta"
+TIME = "MEDIUM"
+PURPOSE = "Tests MiniOS update."
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:nbr"
+PY_VERSION = 3
+DOC = """
+This tests the MiniOS update with the platform (OS) and DLC update.
+Use the `running_at_desk` arg when testing locally.
+
+Example usage:
+test_that autoupdate_MiniOS.with_dlc.delta <DUT> --board=<board> --args="running_at_desk=True"
+"""
+
+from autotest_lib.client.common_lib import utils
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    job.run_test('autoupdate_MiniOS', host=host, full_payload=False,
+                 with_dlc=True, **args_dict)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_MiniOS/control.with_dlc.full b/server/site_tests/autoupdate_MiniOS/control.with_dlc.full
new file mode 100644
index 0000000..9a90be8
--- /dev/null
+++ b/server/site_tests/autoupdate_MiniOS/control.with_dlc.full
@@ -0,0 +1,32 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "yuanpengni, Chromium OS"
+NAME = "autoupdate_MiniOS.with_dlc.full"
+TIME = "MEDIUM"
+PURPOSE = "Tests MiniOS update."
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:nbr"
+PY_VERSION = 3
+DOC = """
+This tests the MiniOS update with the platform (OS) and DLC update.
+Use the `running_at_desk` arg when testing locally.
+
+Example usage:
+test_that autoupdate_MiniOS.with_dlc.full <DUT> --board=<board> --args="running_at_desk=True"
+"""
+
+from autotest_lib.client.common_lib import utils
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    job.run_test('autoupdate_MiniOS', host=host, full_payload=True,
+                 with_dlc=True, **args_dict)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_MiniOS/control.with_exclusion.delta b/server/site_tests/autoupdate_MiniOS/control.with_exclusion.delta
new file mode 100644
index 0000000..a9cb8fd
--- /dev/null
+++ b/server/site_tests/autoupdate_MiniOS/control.with_exclusion.delta
@@ -0,0 +1,34 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "yuanpengni, Chromium OS"
+NAME = "autoupdate_MiniOS.with_exclusion.delta"
+TIME = "MEDIUM"
+PURPOSE = "Tests MiniOS update."
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:nbr"
+PY_VERSION = 3
+DOC = """
+MiniOS payload will be excluded after encountering certain types of failure.
+This tests that MiniOS payload can be successfully excluded and not blocks the
+OS or DLC update.
+Use the `running_at_desk` arg when testing locally.
+
+Example usage:
+test_that autoupdate_MiniOS.with_exclusion.delta <DUT> --board=<board> --args="running_at_desk=True"
+"""
+
+from autotest_lib.client.common_lib import utils
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    job.run_test('autoupdate_MiniOS', host=host, full_payload=False,
+                 with_dlc=True, with_exclusion=True, **args_dict)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_MiniOS/control.with_exclusion.full b/server/site_tests/autoupdate_MiniOS/control.with_exclusion.full
new file mode 100644
index 0000000..668cea7
--- /dev/null
+++ b/server/site_tests/autoupdate_MiniOS/control.with_exclusion.full
@@ -0,0 +1,34 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "yuanpengni, Chromium OS"
+NAME = "autoupdate_MiniOS.with_exclusion.full"
+TIME = "MEDIUM"
+PURPOSE = "Tests MiniOS update."
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:nbr"
+PY_VERSION = 3
+DOC = """
+MiniOS payload will be excluded after encountering certain types of failure.
+This tests that MiniOS payload can be successfully excluded and not blocks the
+OS or DLC update.
+Use the `running_at_desk` arg when testing locally.
+
+Example usage:
+test_that autoupdate_MiniOS.with_exclusion.full <DUT> --board=<board> --args="running_at_desk=True"
+"""
+
+from autotest_lib.client.common_lib import utils
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    job.run_test('autoupdate_MiniOS', host=host, full_payload=True,
+                 with_dlc=True, with_exclusion=True, **args_dict)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_MiniOS/control.with_os.delta b/server/site_tests/autoupdate_MiniOS/control.with_os.delta
new file mode 100644
index 0000000..01ef15d
--- /dev/null
+++ b/server/site_tests/autoupdate_MiniOS/control.with_os.delta
@@ -0,0 +1,32 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "yuanpengni, Chromium OS"
+NAME = "autoupdate_MiniOS.with_os.delta"
+TIME = "MEDIUM"
+PURPOSE = "Tests MiniOS update."
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:nbr"
+PY_VERSION = 3
+DOC = """
+This tests the MiniOS update with the platform (OS) update.
+Use the `running_at_desk` arg when testing locally.
+
+Example usage:
+test_that autoupdate_MiniOS.with_os.delta <DUT> --board=<board> --args="running_at_desk=True"
+"""
+
+from autotest_lib.client.common_lib import utils
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    job.run_test('autoupdate_MiniOS', host=host, full_payload=False,
+                 with_os=True, **args_dict)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_MiniOS/control.with_os.full b/server/site_tests/autoupdate_MiniOS/control.with_os.full
new file mode 100644
index 0000000..4264895
--- /dev/null
+++ b/server/site_tests/autoupdate_MiniOS/control.with_os.full
@@ -0,0 +1,32 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "yuanpengni, Chromium OS"
+NAME = "autoupdate_MiniOS.with_os.full"
+TIME = "MEDIUM"
+PURPOSE = "Tests MiniOS update."
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:nbr"
+PY_VERSION = 3
+DOC = """
+This tests the MiniOS update with the platform (OS) update.
+Use the `running_at_desk` arg when testing locally.
+
+Example usage:
+test_that autoupdate_MiniOS.with_os.full <DUT> --board=<board> --args="running_at_desk=True"
+"""
+
+from autotest_lib.client.common_lib import utils
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    job.run_test('autoupdate_MiniOS', host=host, full_payload=True,
+                 with_os=True, **args_dict)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_NonBlockingOOBEUpdate/autoupdate_NonBlockingOOBEUpdate.py b/server/site_tests/autoupdate_NonBlockingOOBEUpdate/autoupdate_NonBlockingOOBEUpdate.py
index 0c7a341..c512fb6 100644
--- a/server/site_tests/autoupdate_NonBlockingOOBEUpdate/autoupdate_NonBlockingOOBEUpdate.py
+++ b/server/site_tests/autoupdate_NonBlockingOOBEUpdate/autoupdate_NonBlockingOOBEUpdate.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -19,7 +20,10 @@
         super(autoupdate_NonBlockingOOBEUpdate, self).cleanup()
 
 
-    def run_once(self, full_payload=True, job_repo_url=None):
+    def run_once(self,
+                 full_payload=True,
+                 job_repo_url=None,
+                 running_at_desk=False):
         """
         Tries an autoupdate during ChromeOS OOBE without a deadline.
 
@@ -32,11 +36,15 @@
                              out the current build and the devserver to use.
                              The test will read this from a host argument
                              when run in the lab.
+        @param running_at_desk: Indicates test is run locally from a
+                                workstation.
 
         """
         tpm_utils.ClearTPMOwnerRequest(self._host)
-        payload_url = self.get_payload_for_nebraska(job_repo_url,
-                                                    full_payload=full_payload)
+        payload_url = self.get_payload_for_nebraska(
+                job_repo_url,
+                full_payload=full_payload,
+                public_bucket=running_at_desk)
         self._run_client_test_and_check_result('autoupdate_StartOOBEUpdate',
                                                payload_url=payload_url,
                                                full_payload=full_payload,
diff --git a/server/site_tests/autoupdate_NonBlockingOOBEUpdate/control.delta b/server/site_tests/autoupdate_NonBlockingOOBEUpdate/control.delta
index 3675f5f..3ffe25d 100644
--- a/server/site_tests/autoupdate_NonBlockingOOBEUpdate/control.delta
+++ b/server/site_tests/autoupdate_NonBlockingOOBEUpdate/control.delta
@@ -10,12 +10,13 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:au-oobe, suite:au_fsi"
+PY_VERSION = 3
 DOC = """
 This tests an update is available at OOBE but there is no deadline set.
 
 Kick it off locally using this command:
 
-test_that <hostname>.cros autoupdate_NonBlockingOOBEUpdate.delta --args="job_repo_url='http://<IP>:<port>/static/<board>-release/RXX-XXXX.X.X/autotest/packages'"
+test_that <hostname>.cros autoupdate_NonBlockingOOBEUpdate.delta --args="job_repo_url='http://<IP>:<port>/static/<board>-release/RXX-XXXX.X.X/autotest/packages' running_at_desk=True"
 
 """
 
diff --git a/server/site_tests/autoupdate_OmahaResponse/autoupdate_OmahaResponse.py b/server/site_tests/autoupdate_OmahaResponse/autoupdate_OmahaResponse.py
index 4170ece..c8976e4 100644
--- a/server/site_tests/autoupdate_OmahaResponse/autoupdate_OmahaResponse.py
+++ b/server/site_tests/autoupdate_OmahaResponse/autoupdate_OmahaResponse.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/autoupdate_OmahaResponse/control.backoff_disabled.full b/server/site_tests/autoupdate_OmahaResponse/control.backoff_disabled.full
index d31e23d..9043708 100644
--- a/server/site_tests/autoupdate_OmahaResponse/control.backoff_disabled.full
+++ b/server/site_tests/autoupdate_OmahaResponse/control.backoff_disabled.full
@@ -10,6 +10,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:au-perbuild"
+PY_VERSION = 3
 DOC = """
 Test we can update when we fail the first attempt and backoff is disabled.
 """
diff --git a/server/site_tests/autoupdate_OmahaResponse/control.backoff_enabled.full b/server/site_tests/autoupdate_OmahaResponse/control.backoff_enabled.full
index d2fc6f5..966979f 100644
--- a/server/site_tests/autoupdate_OmahaResponse/control.backoff_enabled.full
+++ b/server/site_tests/autoupdate_OmahaResponse/control.backoff_enabled.full
@@ -10,6 +10,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:au-perbuild"
+PY_VERSION = 3
 DOC = """
 Test we cannot update when we fail the first attempt and backoff is enabled.
 """
diff --git a/server/site_tests/autoupdate_OmahaResponse/control.bad_metadata_size.full b/server/site_tests/autoupdate_OmahaResponse/control.bad_metadata_size.full
index 30c562f..7d9be95 100644
--- a/server/site_tests/autoupdate_OmahaResponse/control.bad_metadata_size.full
+++ b/server/site_tests/autoupdate_OmahaResponse/control.bad_metadata_size.full
@@ -10,6 +10,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:au-perbuild"
+PY_VERSION = 3
 DOC = """
 Tests that an update fails when the metadata size value is invalid.
 """
diff --git a/server/site_tests/autoupdate_OmahaResponse/control.bad_sha256.full b/server/site_tests/autoupdate_OmahaResponse/control.bad_sha256.full
index 12475ae..8b8f1b5 100644
--- a/server/site_tests/autoupdate_OmahaResponse/control.bad_sha256.full
+++ b/server/site_tests/autoupdate_OmahaResponse/control.bad_sha256.full
@@ -10,6 +10,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:au-perbuild"
+PY_VERSION = 3
 DOC = """
 Tests that an update fails when the SHA256 value is invalid.
 """
diff --git a/server/site_tests/autoupdate_OmahaResponse/control.local b/server/site_tests/autoupdate_OmahaResponse/control.local
index 86a1834..dca5789 100644
--- a/server/site_tests/autoupdate_OmahaResponse/control.local
+++ b/server/site_tests/autoupdate_OmahaResponse/control.local
@@ -9,6 +9,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
+PY_VERSION = 3
 DOC = """
 Change the various arguments to the test to test different things in the
 omaha response. Example:
diff --git a/server/site_tests/autoupdate_OmahaResponse/control.url_switch.full b/server/site_tests/autoupdate_OmahaResponse/control.url_switch.full
index 4ca8b98..3456a47 100644
--- a/server/site_tests/autoupdate_OmahaResponse/control.url_switch.full
+++ b/server/site_tests/autoupdate_OmahaResponse/control.url_switch.full
@@ -10,6 +10,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:au-perbuild"
+PY_VERSION = 3
 DOC = """
 This tests that we move to the second Url when we fail with the first Url.
 """
diff --git a/server/site_tests/autoupdate_P2P/autoupdate_P2P.py b/server/site_tests/autoupdate_P2P/autoupdate_P2P.py
index 536ccb1..bf54fcb 100644
--- a/server/site_tests/autoupdate_P2P/autoupdate_P2P.py
+++ b/server/site_tests/autoupdate_P2P/autoupdate_P2P.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -10,7 +11,7 @@
 from autotest_lib.client.common_lib import utils
 from autotest_lib.server.cros.dynamic_suite import tools
 from autotest_lib.server.cros.update_engine import update_engine_test
-from chromite.lib import retry_util
+from autotest_lib.utils.frozen_chromite.lib import retry_util
 
 class autoupdate_P2P(update_engine_test.UpdateEngineTest):
     """Tests a peer to peer (P2P) autoupdate."""
@@ -22,6 +23,8 @@
     _P2P_FIRST_ATTEMPT_TIMESTAMP_PREF = 'p2p-first-attempt-timestamp'
     _P2P_NUM_ATTEMPTS_PREF = 'p2p-num-attempts'
 
+    _CLIENT_TEST_WITH_DLC = 'autoupdate_InstallAndUpdateDLC'
+    _CLIENT_TEST = 'autoupdate_CannedOmahaUpdate'
 
     def cleanup(self):
         logging.info('Disabling p2p_update on hosts.')
@@ -35,6 +38,14 @@
         super(autoupdate_P2P, self).cleanup()
 
 
+    def _cleanup_dlcs(self):
+        """Remove all DLCs on the DUT before starting the test. """
+        installed = self._dlc_util.list().keys()
+        for dlc_id in installed:
+            self._dlc_util.purge(dlc_id)
+        # DLCs may be present but not mounted, so they won't be purged above.
+        self._dlc_util.purge(self._dlc_util._SAMPLE_DLC_ID, ignore_status=True)
+
     def _enable_p2p_update_on_hosts(self):
         """Turn on the option to enable p2p updating on both DUTs."""
         logging.info('Enabling p2p_update on hosts.')
@@ -97,34 +108,46 @@
         self._hosts[1].run('echo 0 > %s' % current_url_index)
 
 
-    def _update_dut(self, host, update_url):
+    def _update_dut(self, host, tag, interactive=True):
         """
         Update the first DUT normally and save the update engine logs.
 
         @param host: the host object for the first DUT.
-        @param update_url: the url to call for updating the DUT.
+        @param interactive: Whether the update should be interactive.
 
         """
-        host.reboot()
+        self._host = host
+        self._set_active_p2p_host(host)
+        self._dlc_util.set_run(self._host.run)
+        if self._with_dlc:
+            self._cleanup_dlcs()
+        self._host.reboot()
         # Sometimes update request is lost if checking right after reboot so
         # make sure update_engine is ready.
-        self._set_active_p2p_host(self._hosts[0])
         utils.poll_for_condition(condition=self._is_update_engine_idle,
                                  desc='Waiting for update engine idle')
 
-        logging.info('Updating first DUT with a regular update.')
-        try:
-            self._check_for_update(update_url, wait_for_completion=True)
-        except error.AutoservRunError:
-            logging.exception('Failed to update the first DUT.')
-            raise error.TestFail('Updating the first DUT failed. Error: %s.' %
-                                 self._get_last_error_string())
-        finally:
-            logging.info('Saving update engine logs to results dir.')
-            host.get_file(self._UPDATE_ENGINE_LOG,
-                          os.path.join(self.resultsdir,
-                                       'update_engine.log_first_dut'))
+        logging.info('Updating %s (%s).', host, tag)
+        if self._with_dlc:
+            self._run_client_test_and_check_result(
+                    self._CLIENT_TEST_WITH_DLC,
+                    payload_urls=self._payload_urls,
+                    tag=tag,
+                    interactive=interactive)
+        else:
+            self._run_client_test_and_check_result(
+                    self._CLIENT_TEST,
+                    payload_url=self._payload_urls[0],
+                    tag=tag,
+                    interactive=interactive)
+        update_engine_log = self._get_update_engine_log()
         host.reboot()
+        return update_engine_log
+        # TODO(ahassani): There is probably a race condition here. We should
+        # wait after the reboot to make sure p2p is up before kicking off the
+        # second host's update check. Otherwise, the second one is not going to
+        # use p2p updates. Another solution would be to not reboot here at all
+        # since p2p server is still running.
 
 
     def _check_p2p_still_enabled(self, host):
@@ -150,40 +173,6 @@
                                  exception=error.TestFail(err))
 
 
-    def _update_via_p2p(self, host, update_url):
-        """
-        Update the second DUT via P2P from the first DUT.
-
-        We perform a non-interactive update and update_engine will check
-        for other devices that have P2P enabled and download from them instead.
-
-        @param host: The second DUT.
-        @param update_url: the url to call for updating the DUT.
-
-        """
-        host.reboot()
-        self._set_active_p2p_host(self._hosts[1])
-        utils.poll_for_condition(condition=self._is_update_engine_idle,
-                                 desc='Waiting for update engine idle')
-
-        logging.info('Updating second host via p2p.')
-        try:
-            self._check_for_update(update_url, wait_for_completion=True,
-                                   interactive=False)
-        except error.AutoservRunError:
-            logging.exception('Failed to update the second DUT via P2P.')
-            raise error.TestFail('Failed to update the second DUT. Error: %s' %
-                                 self._get_last_error_string())
-        finally:
-            logging.info('Saving update engine logs to results dir.')
-            host.get_file(self._UPDATE_ENGINE_LOG,
-                          os.path.join(self.resultsdir,
-                                       'update_engine.log_second_dut'))
-
-        # Return the update_engine logs so we can check for p2p entries.
-        return self._get_update_engine_log()
-
-
     def _check_for_p2p_entries_in_update_log(self, update_engine_log):
         """
         Ensure that the second DUT actually updated via P2P.
@@ -272,33 +261,62 @@
                                                                      build2))
 
 
-    def run_once(self, job_repo_url=None, too_many_attempts=False,
-                 deadline_expired=False):
+    def run_once(self,
+                 companions,
+                 job_repo_url=None,
+                 too_many_attempts=False,
+                 deadline_expired=False,
+                 with_dlc=False,
+                 running_at_desk=False):
         """
         Testing autoupdate via P2P.
 
+        @param companions: List of other DUTs used in the test.
         @param job_repo_url: A url linking to autotest packages.
         @param too_many_attempts: True to test what happens with too many
                                   failed update attempts.
         @param deadline_expired: True to test what happens when the deadline
                                  between peers has expired
+        @param with_dlc: Whether to include sample-dlc in the test.
+        @param running_at_desk: True to stage files on public bucket. Useful
+                                for debugging locally.
 
         """
+        self._hosts = [self._host, companions[0]]
         logging.info('Hosts for this test: %s', self._hosts)
 
         self._too_many_attempts = too_many_attempts
         self._deadline_expired = deadline_expired
+        self._with_dlc = with_dlc
+
         self._verify_hosts(job_repo_url)
         self._enable_p2p_update_on_hosts()
         self._setup_second_hosts_prefs()
 
-        # Get an N-to-N delta payload update url to use for the test.
-        # P2P updates are very slow so we will only update with a delta payload.
-        update_url = self.get_update_url_for_test(job_repo_url,
-                                                  full_payload=False)
+        # Get an N-to-N delta payload update url to use for the test. P2P
+        # updates are very slow so we will only update with a delta payload. In
+        # addition we need the full DLC payload so we can perform its install.
+        self._payload_urls = [
+                self.get_payload_for_nebraska(job_repo_url,
+                                              full_payload=False,
+                                              public_bucket=running_at_desk)
+        ]
+        if self._with_dlc:
+            self._payload_urls += [
+                    self.get_payload_for_nebraska(
+                            job_repo_url=job_repo_url,
+                            full_payload=True,
+                            payload_type=self._PAYLOAD_TYPE.DLC,
+                            public_bucket=running_at_desk),
+                    self.get_payload_for_nebraska(
+                            job_repo_url=job_repo_url,
+                            full_payload=False,
+                            payload_type=self._PAYLOAD_TYPE.DLC,
+                            public_bucket=running_at_desk)
+            ]
 
         # The first device just updates normally.
-        self._update_dut(self._hosts[0], update_url)
+        self._update_dut(self._hosts[0], 'host1')
         self._check_p2p_still_enabled(self._hosts[0])
 
         if too_many_attempts or deadline_expired:
@@ -306,5 +324,7 @@
             self._reset_current_url_index()
 
         # Update the 2nd DUT with the delta payload via P2P from the 1st DUT.
-        update_engine_log = self._update_via_p2p(self._hosts[1], update_url)
+        update_engine_log = self._update_dut(self._hosts[1],
+                                             'host2',
+                                             interactive=False)
         self._check_for_p2p_entries_in_update_log(update_engine_log)
diff --git a/server/site_tests/autoupdate_P2P/control.deadline_expired.delta b/server/site_tests/autoupdate_P2P/control.deadline_expired.delta
index 350663d..628f0d9 100644
--- a/server/site_tests/autoupdate_P2P/control.deadline_expired.delta
+++ b/server/site_tests/autoupdate_P2P/control.deadline_expired.delta
@@ -9,25 +9,19 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
-ATTRIBUTES = "suite:au-perbuild"
-SYNC_COUNT = 2
+ATTRIBUTES = "suite:au-p2p"
+PY_VERSION = 3
 DOC = """
 This tests autoupdate between two devices via peer to peer is disabled when
 deadline has expired.
 
 """
 
-from autotest_lib.server import utils as server_utils
+def run(machine):
+    host = hosts.create_host(machine)
+    companions = hosts.create_companion_hosts(companion_hosts)
+    job.run_test('autoupdate_P2P', host=host, companions=companions,
+                 deadline_expired=True)
 
-def run(ntuple):
-    host_list = []
-    for machine in ntuple:
-        host_list.append(hosts.create_host(machine))
-    job.run_test('autoupdate_P2P', hosts=host_list, deadline_expired=True)
+job.parallel_simple(run, machines)
 
-ntuples, failures = server_utils.form_ntuples_from_machines(machines,
-                                                            SYNC_COUNT)
-
-# Use log=False in parallel_simple to avoid an exception in setting up
-# the incremental parser when SYNC_COUNT > 1.
-job.parallel_simple(run, ntuples, log=False)
diff --git a/server/site_tests/autoupdate_P2P/control.delta b/server/site_tests/autoupdate_P2P/control.delta
index a8b35d1..438867a 100644
--- a/server/site_tests/autoupdate_P2P/control.delta
+++ b/server/site_tests/autoupdate_P2P/control.delta
@@ -9,8 +9,8 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
-ATTRIBUTES = "suite:au-perbuild"
-SYNC_COUNT = 2
+ATTRIBUTES = "suite:au-p2p"
+PY_VERSION = 3
 JOB_RETRIES = 2
 DOC = """
 This tests autoupdate between two devices via peer to peer.
@@ -22,17 +22,9 @@
 
 """
 
-from autotest_lib.server import utils as server_utils
+def run(machine):
+    host = hosts.create_host(machine)
+    companions = hosts.create_companion_hosts(companion_hosts)
+    job.run_test('autoupdate_P2P', host=host, companions=companions)
 
-def run(ntuple):
-    host_list = []
-    for machine in ntuple:
-        host_list.append(hosts.create_host(machine))
-    job.run_test('autoupdate_P2P', hosts=host_list)
-
-ntuples, failures = server_utils.form_ntuples_from_machines(machines,
-                                                            SYNC_COUNT)
-
-# Use log=False in parallel_simple to avoid an exception in setting up
-# the incremental parser when SYNC_COUNT > 1.
-job.parallel_simple(run, ntuples, log=False)
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_P2P/control.local b/server/site_tests/autoupdate_P2P/control.local
index 1731cf7..c52bb1d 100644
--- a/server/site_tests/autoupdate_P2P/control.local
+++ b/server/site_tests/autoupdate_P2P/control.local
@@ -9,7 +9,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
-
+PY_VERSION = 3
 
 DOC = """
 This tests autoupdate between two devices via peer to peer.
@@ -19,10 +19,11 @@
 to do that setup manually before running the test.
 
 Steps to run from your desk:
-1. Lease two DUTs in the lab. They need to be the same make/model.
+1. Lease two DUTs in the lab. They need to be the same make/model and near
+each other.
 2. ssh into both and make sure they can ping each other.
 3. Provision both DUTs to the same build. You can use cros flash to do this.
-4. Pass two args to test_that: host2, job_repo_url.
+4. Pass two args to test_that: companion_hosts and job_repo_url.
 
 A job_repo_url is a URL passed to each test that points to the autotest
 packages. The AU tests use that url to figure out the devserver to use for the
@@ -30,7 +31,7 @@
 example is:
 http://100.115.245.194:8082/static/caroline-release/R82-12887.0.0/autotest/packages
 
-test_that <hostname>.cros autoupdate_P2P.local --args="host2=<hostname>.cros job_repo_url=<job_repo_url>"
+test_that <hostname>.cros autoupdate_P2P.local --companion_hosts="<hostname2>.cros" --args="job_repo_url=<job_repo_url> with_dlc=True"
 
 """
 
@@ -40,11 +41,12 @@
 args_dict = utils.args_to_dict(args)
 
 def run(machine):
-    if all(arg in args_dict for arg in ['host2', 'job_repo_url']):
+    if 'job_repo_url' in args_dict:
         host1 = hosts.create_host(machine)
-        host2 = hosts.create_host(args_dict.get('host2'))
-        job.run_test('autoupdate_P2P', hosts=[host1, host2], job_repo_url=args_dict.get('job_repo_url'))
+        companions = hosts.create_companion_hosts(companion_hosts)
+        with_dlc = bool(args_dict.get('with_dlc', False))
+        job.run_test('autoupdate_P2P', host=host1, companions=companions, running_at_desk=True, job_repo_url=args_dict.get('job_repo_url'), with_dlc=with_dlc)
     else:
-        logging.info('Please provide both "host2" and "job_repo_url" args to test_that.')
+        logging.info('Please provide both "companion_hosts" and "job_repo_url" args to test_that.')
 
 job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_P2P/control.too_many_attempts.delta b/server/site_tests/autoupdate_P2P/control.too_many_attempts.delta
index 752ec8d..d60ab9f 100644
--- a/server/site_tests/autoupdate_P2P/control.too_many_attempts.delta
+++ b/server/site_tests/autoupdate_P2P/control.too_many_attempts.delta
@@ -9,25 +9,19 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
-ATTRIBUTES = "suite:au-perbuild"
-SYNC_COUNT = 2
+ATTRIBUTES = "suite:au-p2p"
+PY_VERSION = 3
 DOC = """
 This tests autoupdate between two devices via peer to peer is disabled when
 they have tried more than 10 times.
 
 """
 
-from autotest_lib.server import utils as server_utils
+def run(machine):
+    host = hosts.create_host(machine)
+    companions = hosts.create_companion_hosts(companion_hosts)
+    job.run_test('autoupdate_P2P', host=host, companions=companions,
+                 too_many_attempts=True)
 
-def run(ntuple):
-    host_list = []
-    for machine in ntuple:
-        host_list.append(hosts.create_host(machine))
-    job.run_test('autoupdate_P2P', hosts=host_list, too_many_attempts=True)
+job.parallel_simple(run, machines)
 
-ntuples, failures = server_utils.form_ntuples_from_machines(machines,
-                                                            SYNC_COUNT)
-
-# Use log=False in parallel_simple to avoid an exception in setting up
-# the incremental parser when SYNC_COUNT > 1.
-job.parallel_simple(run, ntuples, log=False)
diff --git a/server/site_tests/autoupdate_P2P/control.with_dlc b/server/site_tests/autoupdate_P2P/control.with_dlc
new file mode 100644
index 0000000..e3e5621
--- /dev/null
+++ b/server/site_tests/autoupdate_P2P/control.with_dlc
@@ -0,0 +1,31 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "dhaddock, Chromium OS"
+NAME = "autoupdate_P2P.with_dlc"
+PURPOSE = "Test autoupdate via peer to peer(P2P)."
+TIME = "MEDIUM"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+ATTRIBUTES = "suite:au-p2p"
+JOB_RETRIES = 2
+PY_VERSION = 3
+DOC = """
+This tests autoupdate between two devices via peer to peer.
+
+Since the test uses two different DUTs in the lab together it is more
+difficult to debug at your desk.
+
+Use the control.local to run this test locally.
+
+"""
+
+def run(machine):
+    host = hosts.create_host(machine)
+    companions = hosts.create_companion_hosts(companion_hosts)
+    job.run_test('autoupdate_P2P', host=host, companions=companions,
+                 with_dlc=True)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_Periodic/autoupdate_Periodic.py b/server/site_tests/autoupdate_Periodic/autoupdate_Periodic.py
index 2c7f111..5ad7d4e 100644
--- a/server/site_tests/autoupdate_Periodic/autoupdate_Periodic.py
+++ b/server/site_tests/autoupdate_Periodic/autoupdate_Periodic.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -42,6 +43,7 @@
 
         # Verify the update completed successfully.
         self._host.reboot()
-        rootfs_hostlog, _ = self._create_hostlog_files()
+        rootfs_hostlog, _ = self._create_hostlog_files(
+                ignore_event_rootfs=True)
         self.verify_update_events(self._CUSTOM_LSB_VERSION, rootfs_hostlog)
         kernel_utils.verify_boot_expectations(inactive, host=self._host)
diff --git a/server/site_tests/autoupdate_Periodic/control.delta b/server/site_tests/autoupdate_Periodic/control.delta
index 77e2531..8b5f250 100644
--- a/server/site_tests/autoupdate_Periodic/control.delta
+++ b/server/site_tests/autoupdate_Periodic/control.delta
@@ -10,6 +10,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:au-perbuild"
+PY_VERSION = 3
 DOC = """
 Tests that the DUT makes periodic update checks. On test images, periodic
 checks are disabled but by using a pref (test-update-check-interval-timeout) we
diff --git a/server/site_tests/autoupdate_RejectDuplicateUpdate/autoupdate_RejectDuplicateUpdate.py b/server/site_tests/autoupdate_RejectDuplicateUpdate/autoupdate_RejectDuplicateUpdate.py
new file mode 100644
index 0000000..699269a
--- /dev/null
+++ b/server/site_tests/autoupdate_RejectDuplicateUpdate/autoupdate_RejectDuplicateUpdate.py
@@ -0,0 +1,58 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib.cros import kernel_utils
+from autotest_lib.server.cros.update_engine import update_engine_test
+
+
+class autoupdate_RejectDuplicateUpdate(update_engine_test.UpdateEngineTest):
+    """Tests to see if duplicate consecutive updates are rejected. """
+    version = 1
+
+    def cleanup(self):
+        """Clean up the test state."""
+        # Disable repeated updates using update_engine_client.
+        self._set_feature(feature_name=self._REPEATED_UPDATES_FEATURE,
+                          enable=False)
+
+    def run_once(self, job_repo_url=None, running_at_desk=False):
+        """
+        @param job_repo_url: A url pointing to the devserver where the autotest
+            package for this build should be staged.
+        @param running_at_desk: indicates test is run locally from a workstation.
+
+        """
+        # Enable repeated updates using update_engine_client.
+        self._set_feature(feature_name=self._REPEATED_UPDATES_FEATURE,
+                          enable=True)
+        # Get a payload to use for the test.
+        payload_url = self.get_payload_for_nebraska(
+                job_repo_url,
+                full_payload=False,
+                public_bucket=running_at_desk)
+
+        # Record DUT state before the update.
+        _, inactive = kernel_utils.get_kernel_state(self._host)
+
+        # Perform an update.
+        self._run_client_test_and_check_result(self._CLIENT_TEST,
+                                               payload_url=payload_url)
+
+        self._wait_for_update_to_complete()
+
+        # Perform another update. This is a duplicate update and should fail.
+        self._run_client_test_and_check_result(self._CLIENT_TEST,
+                                               payload_url=payload_url,
+                                               allow_failure=True)
+
+        # Check logs to make sure it failed with the correct error.
+        self._check_update_engine_log_for_entry(
+                'finished OmahaRequestAction with code '
+                'ErrorCode::kRepeatedFpFromOmahaError',
+                raise_error=True)
+
+        # Verify the first update can still complete and reboot.
+        self._host.reboot()
+        kernel_utils.verify_boot_expectations(inactive, host=self._host)
diff --git a/server/site_tests/autoupdate_RejectDuplicateUpdate/control.full b/server/site_tests/autoupdate_RejectDuplicateUpdate/control.full
new file mode 100644
index 0000000..067c7c4
--- /dev/null
+++ b/server/site_tests/autoupdate_RejectDuplicateUpdate/control.full
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "vyshu@google.com, Chromium OS"
+NAME = "autoupdate_RejectDuplicateUpdate.full"
+TIME = "MEDIUM"
+PURPOSE = "Tests duplicate repeated update with Nebraska."
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+ATTRIBUTES = "suite:au-perbuild"
+PY_VERSION = 3
+DOC = """
+This tests a repeated update by enabling the repeated updates feature.
+It sends two updates with the same fingerprint value. Client side should reject
+this as a duplicate update.
+
+We supply a job_repo_url to the test when running locally. In the lab this will
+be passed directly. The job_repo_url is a link to the autotest packages on a
+devserver. The test uses it to find the correct payload to use.
+
+Example usage:
+test_that autoupdate_RejectDuplicateUpdate.full <DUT> --board=<board> --args="job_repo_url='http://<devserver IP>:8082/static/<board>-release/RXX-XXXXX.X.X/autotest/packages', running_at_desk=True"
+"""
+
+from autotest_lib.client.common_lib import utils
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('autoupdate_RejectDuplicateUpdate', host=host, **args_dict)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_Rollback/autoupdate_Rollback.py b/server/site_tests/autoupdate_Rollback/autoupdate_Rollback.py
index 7f3f746..9509d2f 100755
--- a/server/site_tests/autoupdate_Rollback/autoupdate_Rollback.py
+++ b/server/site_tests/autoupdate_Rollback/autoupdate_Rollback.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -69,13 +70,14 @@
 
         """
         self._powerwash_attempted = False
-        update_url = self.get_update_url_for_test(job_repo_url, stateful=True)
+        payload_url = self.get_payload_for_nebraska(job_repo_url)
         active, inactive = kernel_utils.get_kernel_state(self._host)
         logging.info('Initial device state: active kernel %s, '
                      'inactive kernel %s.', active, inactive)
 
         logging.info('Performing an update.')
-        self._check_for_update(update_url, wait_for_completion=True)
+        self._run_client_test_and_check_result('autoupdate_CannedOmahaUpdate',
+                                               payload_url=payload_url)
         self._host.reboot()
         # Ensure the update completed successfully.
         rootfs_hostlog, _ = self._create_hostlog_files()
diff --git a/server/site_tests/autoupdate_Rollback/control b/server/site_tests/autoupdate_Rollback/control
index 2e66648..92ddbd2 100644
--- a/server/site_tests/autoupdate_Rollback/control
+++ b/server/site_tests/autoupdate_Rollback/control
@@ -10,10 +10,11 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
-ATTRIBUTES = "suite:bvt-installer"
+ATTRIBUTES = "suite:bvt-installer, suite:satlab-qual-bvt-installer"
+PY_VERSION = 3
 
 DOC = """
-This is a rollback test for Chrome OS releases. It first updates a machine and
+This is a rollback test for ChromeOS releases. It first updates a machine and
 then invokes rollback to boot from its previously booted partition. It tests
 rollback using the update_engine_client rather than manipulating the UI.
 
diff --git a/server/site_tests/autoupdate_Rollback/control.powerwash_before_rollback b/server/site_tests/autoupdate_Rollback/control.powerwash_before_rollback
index be073e4..34e7b04 100644
--- a/server/site_tests/autoupdate_Rollback/control.powerwash_before_rollback
+++ b/server/site_tests/autoupdate_Rollback/control.powerwash_before_rollback
@@ -10,10 +10,11 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
-ATTRIBUTES = "suite:bvt-installer"
+ATTRIBUTES = "suite:bvt-installer, suite:satlab-qual-bvt-installer"
+PY_VERSION = 3
 
 DOC = """
-This is a rollback test for Chrome OS releases. It first updates a machine and
+This is a rollback test for ChromeOS releases. It first updates a machine and
 then invokes rollback to boot from its previously booted partition.
 
 It then powerwashes the device.
diff --git a/server/site_tests/autoupdate_StatefulCompatibility/autoupdate_StatefulCompatibility.py b/server/site_tests/autoupdate_StatefulCompatibility/autoupdate_StatefulCompatibility.py
deleted file mode 100755
index da2fc4c..0000000
--- a/server/site_tests/autoupdate_StatefulCompatibility/autoupdate_StatefulCompatibility.py
+++ /dev/null
@@ -1,318 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import json
-import logging
-import os
-import re
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils as cutils
-from autotest_lib.client.common_lib.cros import kernel_utils
-from autotest_lib.client.cros import constants
-from autotest_lib.server import utils
-from autotest_lib.server.cros import provisioner
-from autotest_lib.server.cros.update_engine import update_engine_test
-
-
-class autoupdate_StatefulCompatibility(update_engine_test.UpdateEngineTest):
-    """Tests autoupdating to/from kernel-next images."""
-    version = 1
-
-    _LOGIN_TEST = 'login_LoginSuccess'
-
-
-    def cleanup(self):
-        """Save the logs from stateful_partition's preserved/log dir."""
-        stateful_preserved_logs = os.path.join(self.resultsdir,
-                                               '~stateful_preserved_logs')
-        os.makedirs(stateful_preserved_logs)
-        self._host.get_file(
-                constants.AUTOUPDATE_PRESERVE_LOG,
-                stateful_preserved_logs,
-                safe_symlinks=True,
-                preserve_perm=False)
-        super(autoupdate_StatefulCompatibility, self).cleanup()
-
-
-    def _get_target_uri(self, target_board, version_regex, max_image_checks):
-        """Checks through all valid builds for the latest green build
-
-        @param target_board: the name of the board to test against
-        @param version_regex: the version regex to test against
-        @param max_image_checks: the number of images to check for stability
-
-        @return the URI for the most recent passing build to test against
-
-        """
-        candidate_uris = self._get_candidate_uris(target_board, version_regex)
-        candidate_uris = candidate_uris[:max_image_checks]
-
-        metadata_uri = None
-        most_recent_version = None
-        most_recent_channel = None
-
-        for uri in candidate_uris:
-            uri = self._to_real_path(uri)
-            metadata = self._get_metadata_dict(uri)
-            chan = self._get_image_channel(metadata)
-            version = cutils.parse_gs_uri_version(uri)
-
-            if not self._stateful_payload_exists(chan, target_board, version):
-                continue
-
-            # Keep track of the first image found that has an available payload
-            if most_recent_version is None:
-                most_recent_version = version
-                most_recent_channel = chan
-
-            if self._is_build_green(metadata):
-                metadata_uri = uri
-                break
-
-        if most_recent_version is None:
-            raise error.TestError('Could not find an acceptable image for %s.' %
-                                  target_board)
-
-        if metadata_uri is None:
-            logging.warning('No image met quality criteria. Checked %d images',
-                            len(candidate_uris))
-            # At this point we've checked as many images as possible up to the
-            # specified maximum, and none of them have qualified with our pass/
-            # fail criteria. Any image is as good as any other, so we might as
-            # well continue with the most recent image. The only other option is
-            # to fail this test
-            version = most_recent_version
-            chan = most_recent_channel
-
-        payload = self._get_payload_uri(chan, target_board, version)
-        if payload is not None:
-            return payload
-
-        raise error.TestError('Could not find an acceptable payload for %s.' %
-                              target_board)
-
-
-    def _get_candidate_uris(self, target_board, version_regex):
-        """Retrieves a list of GS URIs that match the target board and version
-
-        @param target_board: the name of the board to get image URIs for
-        @param version_regex: a regex passed to 'gsutil ls' to match GS URIs
-
-        @return: a list of boards that match the target_board and version_regex
-
-        """
-        logging.info('Going to find candidate image for %s.', target_board)
-
-        payload_uri = 'gs://chromeos-image-archive/%s-release/%s/' % (
-            target_board, version_regex)
-
-        candidate_uris = utils.system_output('gsutil ls -d %s' %
-                                             payload_uri).splitlines()
-        candidate_uris.sort(cutils.compare_gs_uri_build_versions, reverse=True)
-        return candidate_uris
-
-
-    @staticmethod
-    def _to_real_path(uri):
-        """Converts a target image URI from the form LATEST-* to R##-*
-
-        Target images can be referenced by matching against LATEST-* rather than
-        the actual milestone. The LATEST-* files are actually text files that
-        contain the name of the actual GS bucket that contains the image data.
-
-        @param uri: the GS bucket URI of the LATEST-* bucket path
-
-        @return the URI of the dereferenced GS bucket
-
-        """
-        latest_pos = uri.find('LATEST')
-        if latest_pos < 0:
-            # Path is not in the form 'gs://../../LATEST-*'
-            return uri
-
-        relative_path = utils.system_output('gsutil cat %s' % uri).strip()
-        return uri[:latest_pos] + relative_path
-
-
-    @staticmethod
-    def _stateful_payload_exists(channel, target_board, version):
-        """Checks that stateful.tgz exists for the given board and version
-
-        @param channel: The release channel (canary, dev, beta, or stable)
-        @param target_board: The name of the target board
-        @param version: A string containing the build version ('12345.6.7')
-
-        @return True if stateful.gz exists for this image, otherwise False
-
-        """
-
-        if channel is None:
-            return False
-
-        channel_payload_uri = 'gs://chromeos-releases/%s-channel/%s/%s' % (
-                channel, target_board, version)
-        exists = not utils.system('gsutil -q stat %s/stateful.tgz' %
-                                  channel_payload_uri, ignore_status=True)
-        return exists
-
-
-    @staticmethod
-    def _get_payload_uri(channel, board, version):
-        """Gets the location of the update payload for staging on the dev server
-
-        For a given release channel, board, and release version this will return
-        the location for the full signed payload (as opposed to delta payloads).
-
-        @param channel: The release channel (canary, dev, beta, or stable)
-        @param board: The name of the target board
-        @param version: A string containing the build version ('12345.6.7')
-
-        @return The GS URI for the full payload to be staged on the devserver
-
-        """
-        payload_uri = 'gs://chromeos-releases/%s-channel/%s/%s/payloads' % (
-            channel, board, version)
-
-        payloads = utils.system_output('gsutil ls -d %s/*%s*full_test*' % (
-            payload_uri, version)).splitlines()
-        logging.debug('Payloads: %s', str(payloads))
-
-        for payload in payloads:
-            if re.match('.*-[a-z|0-9]{32}$', payload) is not None:
-                return payload
-        return None
-
-
-    @staticmethod
-    def _get_metadata_dict(payload_uri):
-        """Fetches the build metadata from the associated GS bucket
-
-        @param payload_uri: the URI for the GS bucket the image is from.
-
-        @return a dictionary of values representing the metadata json values
-
-        """
-        metadata_uri = payload_uri.strip('/') + '/metadata.json'
-        logging.info('Going to fetch image metadata (%s)', metadata_uri)
-        cat_result = utils.run('gsutil cat %s' % metadata_uri,
-                               ignore_status=True)
-
-        if cat_result.exit_status != 0:
-            logging.info('''Couldn't find metadata at %s.''', metadata_uri)
-            return None
-
-        metadata = json.loads(cat_result.stdout)
-        return metadata
-
-
-    @staticmethod
-    def _get_image_channel(metadata):
-        """Returns the release channel from the image metadata
-
-        @param metadata: A dict of values representing the image metadata
-
-        @return the release channel for the image (canary, dev, beta, stable)
-
-        """
-
-        all_channels = ['Stable', 'Beta', 'Dev', 'Canary']
-
-        if 'tags' not in metadata:
-            return None
-
-        # The metadata tags contains the status for paygen stages on all
-        # channels paygen was run for. This should tell us what channels the
-        # payload is available under.
-        # These tags use the form 'stage_status:PaygenBuild<Channel>'
-        paygen_tags = [t for t in metadata['tags'] if 'PaygenBuild' in t]
-
-        # Find all the channels paygen was run for on this image
-        channels = [c for c in all_channels for t in paygen_tags if c in t]
-
-        if not channels:
-            return None
-
-        # The channels list contains some subset of the elements in the
-        # all_channels list, presented in the same order. If both the Beta and
-        # Stable channels are available, this will return "stable", for example.
-        return channels[0].lower()
-
-
-    @staticmethod
-    def _is_build_green(metadata):
-        """Inspects the image metadata to see if the build is "green"
-
-        @param metadata A dict of values representing the image metadata
-
-        @return True if the image appears to be good enough to test against.
-
-        """
-        if metadata is None:
-            return False
-
-        if not ('tags' in metadata and 'status' in metadata['tags']):
-            return False
-
-        return metadata['tags']['status'] == 'pass'
-
-
-    def run_once(self, test_conf, max_image_checks):
-        """Main entry point of the test."""
-        logging.debug("Using test_conf: %s", test_conf)
-
-        self._source_payload_uri = test_conf['source_payload_uri']
-        self._target_payload_uri = test_conf['target_payload_uri']
-
-        if self._target_payload_uri is None:
-            target_board = test_conf['target_board']
-            target_version_regex = test_conf['target_version_regex']
-
-            self._target_payload_uri = self._get_target_uri(
-                target_board, target_version_regex, max_image_checks)
-
-        logging.debug('Using source image %s', self._source_payload_uri)
-        logging.debug('Using target image %s', self._target_payload_uri)
-
-        self._autotest_devserver = self._get_devserver_for_test(
-            {'target_payload_uri': self._target_payload_uri})
-
-        self._stage_payloads(self._source_payload_uri, None)
-        self._stage_payloads(self._target_payload_uri, None)
-
-        if self._source_payload_uri is not None:
-            build_name, _ = self._get_update_parameters_from_uri(
-                    self._source_payload_uri)
-            update_url = self._autotest_devserver.get_update_url(build_name)
-            logging.info('Installing source image with update url: %s',
-                         update_url)
-
-            provisioner.ChromiumOSProvisioner(
-                    update_url, host=self._host,
-                    is_release_bucket=True).run_provision()
-
-            self._run_client_test_and_check_result(self._LOGIN_TEST,
-                                                   tag='source')
-
-        # Record the active root partition.
-        active, inactive = kernel_utils.get_kernel_state(self._host)
-        logging.info('Source active slot: %s', active)
-
-        # Get the source and target versions for verifying hostlog update events.
-        source_release = self._host.get_release_version()
-        target_release, _ = self._get_update_parameters_from_uri(
-                self._target_payload_uri)
-        target_release = target_release.split('/')[-1]
-
-        logging.debug('Going to install target image on DUT.')
-        self.update_device(
-                self._target_payload_uri, tag='target', ignore_appid=True)
-
-        # Compare hostlog events from the update to the expected ones.
-        rootfs, reboot = self._create_hostlog_files()
-        self.verify_update_events(source_release, rootfs)
-        self.verify_update_events(source_release, reboot, target_release)
-        kernel_utils.verify_boot_expectations(inactive, host=self._host)
-
-        self._run_client_test_and_check_result(self._LOGIN_TEST, tag='target')
diff --git a/server/site_tests/autoupdate_StatefulCompatibility/control b/server/site_tests/autoupdate_StatefulCompatibility/control
deleted file mode 100644
index 96bf0fc..0000000
--- a/server/site_tests/autoupdate_StatefulCompatibility/control
+++ /dev/null
@@ -1,63 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = "Chromium OS"
-NAME = "autoupdate_StatefulCompatibility"
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-
-DOC = """
-This is an auto update test to check the compatibility of the stateful
-partition between updates. This is mostly intended to be used for the
-kernel-next boards to check rollback compatibility but can be used for any
-pair of images.
-
-To specify args for this test invoke this test with:
-    test_that <DUT-IPADDR> autoupdate_StatefulCompatibility --args="<ARGLIST>"
-
-where ARGLIST is a whitespace separated list of the following key=value pairs.
-Values pertaining to the test case include:
-
-    source_payload_uri          (optional) the Google Storage bucket URI for
-                                the source payload to be installed. If not
-                                specified the test will run on the currently
-                                installed image.
-    target_payload_uri          (optional) the Google Storage bucket URI for
-                                the target payload to be installed. If not
-                                specified that target_board and
-                                target_version_regex fields are required.
-    target_board                (optional) the board name for the target image
-                                to be installed.
-    target_version_regex        (optional) the version of the target image to
-                                install. The test will search through all builds
-                                that match this prefix and find the most recent
-                                image that passed all tests.
-                                Examples: 'LATEST-[0-9]*' or 'R75-*'
-
-To run locally see the instructions in the autoupdate_EndToEndTest.
-"""
-
-TEST_CONF_KEYS = (
-    'source_payload_uri', 'target_payload_uri', 'target_board',
-    'target_version_regex')
-
-
-args_dict = utils.args_to_dict(args)
-
-test_conf = {}
-for key in TEST_CONF_KEYS:
-    test_conf[key] = args_dict.get(key) or locals().get(key)
-
-def run_test(machine):
-    """Execute a test configuration on a given machine."""
-    host = hosts.create_host(machine)
-    job.run_test("autoupdate_StatefulCompatibility", host=host,
-                 test_conf=test_conf, max_image_checks=20)
-
-# Invoke parallel tests.
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/autoupdate_StatefulCompatibility/control.kernel_transition b/server/site_tests/autoupdate_StatefulCompatibility/control.kernel_transition
deleted file mode 100644
index 9c2ab6c..0000000
--- a/server/site_tests/autoupdate_StatefulCompatibility/control.kernel_transition
+++ /dev/null
@@ -1,55 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server.cros.dynamic_suite import constants
-
-AUTHOR = "Chromium OS"
-ATTRIBUTES = "suite:kernel_per-build_regression"
-NAME = "autoupdate_StatefulCompatibility.kernel_transition"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-
-DOC = """
-This is an auto update test to check the compatibility of the stateful
-partition between updates. This control file is specifically meant for
--kernelnext boards. Any board that declares USE=kernel-transition will attempt
-to run this test. For boards in the form "<board>-kernelnext" this test will
-search for an image matching "<board>". For boards in any other form this test
-will search for an image matching "<board>-kernelnext".
-
-For example the samus board, which declares USE=kernel-transition, will target a
-samus-kernelnext image to run an upgrade test against, and the samus-kernelnext
-board, which also declares USE=kernel-transition, will target a samus image to
-downgrade test against.
-"""
-
-TEST_CONF_KEYS = (
-    'source_payload_uri', 'target_payload_uri', 'target_board',
-    'target_version_regex')
-
-test_conf = {}
-for key in TEST_CONF_KEYS:
-    test_conf[key] = None
-
-def run_test(machine):
-    """Execute a test configuration on a given machine."""
-    host = hosts.create_host(machine)
-    board = host.get_board().replace(constants.BOARD_PREFIX, '')
-
-    # <board>-kernelnext downgrade tests to <board>
-    # <board> upgrade tests to <board>-kernelnext
-    if board.endswith('kernelnext'):
-        target_board = board.replace('-kernelnext', '')
-    else:
-        target_board = board + '-kernelnext'
-
-    test_conf['target_board'] = target_board
-    test_conf['target_version_regex'] = 'LATEST-[0-9]*'
-    job.run_test("autoupdate_StatefulCompatibility", host=host,
-                 test_conf=test_conf, max_image_checks=20)
-
-# Invoke parallel tests.
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/autoupdate_WithDLC/autoupdate_WithDLC.py b/server/site_tests/autoupdate_WithDLC/autoupdate_WithDLC.py
index 7b95f14..f687832 100644
--- a/server/site_tests/autoupdate_WithDLC/autoupdate_WithDLC.py
+++ b/server/site_tests/autoupdate_WithDLC/autoupdate_WithDLC.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -30,7 +31,10 @@
         super(autoupdate_WithDLC, self).cleanup()
 
 
-    def run_once(self, full_payload=True, job_repo_url=None):
+    def run_once(self,
+                 full_payload=True,
+                 job_repo_url=None,
+                 running_at_desk=False):
         """
         Tests that we can successfully install a DLC, and then update it along
         with the OS.
@@ -40,26 +44,34 @@
                              out the current build and the devserver to use.
                              The test will read this from a host argument
                              when run in the lab.
+        @param running_at_desk: Indicates test is run locally from a
+                                workstation.
 
         """
         payload_urls = []
 
         # Payload URL for the platform (OS) update
         payload_urls.append(
-            self.get_payload_for_nebraska(job_repo_url=job_repo_url,
-                                          full_payload=full_payload))
+                self.get_payload_for_nebraska(job_repo_url=job_repo_url,
+                                              full_payload=full_payload,
+                                              public_bucket=running_at_desk))
 
         # Payload URLs for sample-dlc, a test DLC package.
         # We'll always need a full payload for DLC installation,
         # and optionally a delta payload if required by the test.
         payload_urls.append(
-            self.get_payload_for_nebraska(job_repo_url=job_repo_url,
-                                          full_payload=True, is_dlc=True))
+                self.get_payload_for_nebraska(
+                        job_repo_url=job_repo_url,
+                        full_payload=True,
+                        payload_type=self._PAYLOAD_TYPE.DLC,
+                        public_bucket=running_at_desk))
         if not full_payload:
             payload_urls.append(
-                self.get_payload_for_nebraska(
-                    job_repo_url=job_repo_url, full_payload=False,
-                    is_dlc=True))
+                    self.get_payload_for_nebraska(
+                            job_repo_url=job_repo_url,
+                            full_payload=False,
+                            payload_type=self._PAYLOAD_TYPE.DLC,
+                            public_bucket=running_at_desk))
 
         active, inactive = kernel_utils.get_kernel_state(self._host)
 
@@ -95,4 +107,4 @@
         self._dlc_util.install(self._dlc_util._SAMPLE_DLC_ID,
                                omaha_url='fake_url')
         if not self._dlc_util.is_installed(self._dlc_util._SAMPLE_DLC_ID):
-            raise error.TestFail('Dummy DLC was not installed.')
+            raise error.TestFail('Test DLC was not installed.')
diff --git a/server/site_tests/autoupdate_WithDLC/control.delta b/server/site_tests/autoupdate_WithDLC/control.delta
index 8fecca1..d707eed 100644
--- a/server/site_tests/autoupdate_WithDLC/control.delta
+++ b/server/site_tests/autoupdate_WithDLC/control.delta
@@ -10,11 +10,13 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:au-perbuild"
+PY_VERSION = 3
 DOC = """
 This tests that DLC can be installed, and auto-updated together with the OS
 using a delta payload.
+Use the `running_at_desk` arg when testing locally.
 
-test_that <hostname>.cros autoupdate_WithDLC.delta --args="job_repo_url='http://<IP>:<port>/static/<board>-release/RXX-XXXX.X.X/autotest/packages'"
+test_that <hostname>.cros autoupdate_WithDLC.delta --args="job_repo_url='http://<IP>:<port>/static/<board>-release/RXX-XXXX.X.X/autotest/packages' running_at_desk=True"
 """
 
 from autotest_lib.client.common_lib import utils
diff --git a/server/site_tests/autoupdate_WithDLC/control.full b/server/site_tests/autoupdate_WithDLC/control.full
index 60603c2..2d8c2b2 100644
--- a/server/site_tests/autoupdate_WithDLC/control.full
+++ b/server/site_tests/autoupdate_WithDLC/control.full
@@ -9,12 +9,14 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
-ATTRIBUTES = "suite:bvt-inline, suite:infra_qual"
+ATTRIBUTES = "suite:bvt-inline, suite:infra_qual, suite:satlab-qual-bvt-inline"
 JOB_RETRIES = 2
+PY_VERSION = 3
 DOC = """
 This tests that DLC can be installed, and auto-updated together with the OS.
+Use the `running_at_desk` arg when testing locally.
 
-test_that <hostname>.cros autoupdate_WithDLC.full --args="job_repo_url='http://<IP>:<port>/static/<board>-release/RXX-XXXX.X.X/autotest/packages'"
+test_that <hostname>.cros autoupdate_WithDLC.full --args="job_repo_url='http://<IP>:<port>/static/<board>-release/RXX-XXXX.X.X/autotest/packages' running_at_desk=True"
 """
 
 from autotest_lib.client.common_lib import utils
diff --git a/server/site_tests/bluetooth_AdapterAUHealth/bluetooth_AdapterAUHealth.py b/server/site_tests/bluetooth_AdapterAUHealth/bluetooth_AdapterAUHealth.py
index 6c3d5d2..d003f17 100644
--- a/server/site_tests/bluetooth_AdapterAUHealth/bluetooth_AdapterAUHealth.py
+++ b/server/site_tests/bluetooth_AdapterAUHealth/bluetooth_AdapterAUHealth.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -5,14 +6,18 @@
 """A Batch of Bluetooth AUdio Health tests"""
 
 import time
+import logging
 
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.cros.bluetooth.bluetooth_audio_test_data import (
-        A2DP, A2DP_LONG, AVRCP, HFP_WBS, HFP_NBS)
+        A2DP, A2DP_MEDIUM, A2DP_LONG, AVRCP, HFP_WBS, HFP_NBS, HFP_WBS_MEDIUM,
+        HFP_NBS_MEDIUM)
 from autotest_lib.server.cros.bluetooth.bluetooth_adapter_audio_tests import (
         BluetoothAdapterAudioTests)
 from autotest_lib.server.cros.bluetooth.bluetooth_adapter_quick_tests import (
         BluetoothAdapterQuickTests)
+from autotest_lib.client.cros.chameleon.audio_test_utils import (
+        has_internal_speaker)
 
 
 class bluetooth_AdapterAUHealth(BluetoothAdapterQuickTests,
@@ -43,6 +48,24 @@
         self.cleanup_bluetooth_audio(device, test_profile)
 
 
+    def au_run_test_sequence(self, device, test_sequence, test_profile):
+        """Audio procedure of running a specified test sequence.
+
+        @param device: The Bluetooth peer device.
+        @param test_sequence: The audio test sequence to run.
+        @param test_profile: Which test profile is used,
+                             A2DP, A2DP_MEDIUM, HFP_WBS or HFP_NBS.
+        """
+        # Setup the Bluetooth device.
+        self.test_reset_on_adapter()
+        self.test_bluetoothd_running()
+        self.initialize_bluetooth_audio(device, test_profile)
+
+        test_sequence()
+
+        self.cleanup_bluetooth_audio(device, test_profile)
+
+
     def _au_a2dp_test(self, test_profile, duration=0):
         """A2DP test with sinewaves on the two channels.
 
@@ -57,7 +80,9 @@
                            test_profile)
 
 
-    @test_wrapper('A2DP sinewave test', devices={'BLUETOOTH_AUDIO':1})
+    @test_wrapper('A2DP sinewave test',
+                  devices={'BLUETOOTH_AUDIO': 1},
+                  supports_floss=True)
     def au_a2dp_test(self):
         """A2DP test with sinewaves on the two channels."""
         self._au_a2dp_test(A2DP)
@@ -73,13 +98,49 @@
         self._au_a2dp_test(A2DP_LONG, duration=duration)
 
 
-    def check_wbs_capability(self):
-        """Check if the DUT supports WBS capability.
+    @test_wrapper('A2DP playback and connect test',
+                  devices={'BLUETOOTH_AUDIO': 1})
+    def au_a2dp_playback_and_connect_test(self):
+        """Connect then disconnect an A2DP device while playing stream."""
+        if not has_internal_speaker(self.host):
+            logging.info('SKIPPING TEST A2DP playback and connect test')
+            raise error.TestNAError(
+                    'The DUT does not have an internal speaker')
 
-        @raises: TestNAError if the dut does not support wbs.
-        """
-        capabilities, err = self.bluetooth_facade.get_supported_capabilities()
-        return err is None and bool(capabilities.get('wide band speech'))
+        device = self.devices['BLUETOOTH_AUDIO'][0]
+        test_profile = A2DP_MEDIUM
+        test_sequence = lambda: self.playback_and_connect(device, test_profile)
+        self.au_run_test_sequence(device, test_sequence, test_profile)
+
+
+    @test_wrapper('A2DP playback and disconnect test',
+                  devices={'BLUETOOTH_AUDIO': 1})
+    def au_a2dp_playback_and_disconnect_test(self):
+        """Check the playback stream is still alive after BT disconnected."""
+        device = self.devices['BLUETOOTH_AUDIO'][0]
+        test_profile = A2DP_MEDIUM
+        test_sequence = lambda: self.playback_and_disconnect(
+                device, test_profile)
+        self.au_run_test_sequence(device, test_sequence, test_profile)
+
+
+    @test_wrapper('A2DP playback back2back test',
+                  devices={'BLUETOOTH_AUDIO': 1})
+    def au_a2dp_playback_back2back_test(self):
+        """A2DP playback stream back to back test."""
+        device = self.devices['BLUETOOTH_AUDIO'][0]
+        test_profile = A2DP_MEDIUM
+        test_sequence = lambda: self.playback_back2back(device, test_profile)
+        self.au_run_test_sequence(device, test_sequence, test_profile)
+
+
+    @test_wrapper('A2DP pinned playback test', devices={'BLUETOOTH_AUDIO': 1})
+    def au_a2dp_pinned_playback_test(self):
+        """Pinned playback stream test."""
+        device = self.devices['BLUETOOTH_AUDIO'][0]
+        test_profile = A2DP
+        test_sequence = lambda: self.pinned_playback(device, test_profile)
+        self.au_run_test_sequence(device, test_sequence, test_profile)
 
 
     def au_hfp_run_method(self, device, test_method, test_profile):
@@ -90,7 +151,7 @@
         @param test_profile: which test profile is used, HFP_WBS or HFP_NBS
         """
         if self.check_wbs_capability():
-            if test_profile == HFP_WBS:
+            if test_profile in (HFP_WBS, HFP_WBS_MEDIUM):
                 # Restart cras to ensure that cras goes back to the default
                 # selection of either WBS or NBS.
                 # Any board that supports WBS should use WBS by default, unless
@@ -100,16 +161,16 @@
                 self.restart_cras()
                 # The audio team suggests a simple 2-second sleep.
                 time.sleep(2)
-            elif test_profile == HFP_NBS:
+            elif test_profile in (HFP_NBS, HFP_NBS_MEDIUM):
                 # Cras may be in either WBS or NBS mode. Disable WBS explicitly.
                 if not self.bluetooth_facade.enable_wbs(False):
                     raise error.TestError('failed to disable wbs')
         else:
-            if test_profile == HFP_WBS:
+            if test_profile in (HFP_WBS, HFP_WBS_MEDIUM):
                 # Skip the WBS test on a board that does not support WBS.
                 raise error.TestNAError(
                         'The DUT does not support WBS. Skip the test.')
-            elif test_profile == HFP_NBS:
+            elif test_profile in (HFP_NBS, HFP_NBS_MEDIUM):
                 # Restart cras to ensure that cras goes back to the default
                 # selection of either WBS or NBS.
                 # Any board that does not support WBS should use NBS by default.
@@ -123,40 +184,38 @@
                 device, lambda: test_method(device, test_profile), test_profile)
 
 
-    # TODO(b/163284498) Realtek not ready for WBS yet pending on cras patches.
     @test_wrapper('HFP WBS sinewave test with dut as source',
-                  skip_chipsets=['Realtek-RTL8822C-USB'],
                   devices={'BLUETOOTH_AUDIO':1})
     def au_hfp_wbs_dut_as_source_test(self):
         """HFP WBS test with sinewave streaming from dut to peer."""
         device = self.devices['BLUETOOTH_AUDIO'][0]
-        self.au_hfp_run_method(device, self.test_hfp_dut_as_source, HFP_WBS)
+        self.au_hfp_run_method(device, self.hfp_dut_as_source, HFP_WBS)
 
 
-    # TODO(b/163284498) Realtek not ready for WBS yet pending on cras patches.
     @test_wrapper('HFP WBS sinewave test with dut as sink',
-                  skip_chipsets=['Realtek-RTL8822C-USB'],
                   devices={'BLUETOOTH_AUDIO':1})
     def au_hfp_wbs_dut_as_sink_test(self):
         """HFP WBS test with sinewave streaming from peer to dut."""
         device = self.devices['BLUETOOTH_AUDIO'][0]
-        self.au_hfp_run_method(device, self.test_hfp_dut_as_sink, HFP_WBS)
+        self.au_hfp_run_method(device, self.hfp_dut_as_sink, HFP_WBS)
 
 
     @test_wrapper('HFP NBS sinewave test with dut as source',
-                  devices={'BLUETOOTH_AUDIO':1})
+                  devices={'BLUETOOTH_AUDIO': 1},
+                  supports_floss=True)
     def au_hfp_nbs_dut_as_source_test(self):
         """HFP NBS test with sinewave streaming from dut to peer."""
         device = self.devices['BLUETOOTH_AUDIO'][0]
-        self.au_hfp_run_method(device, self.test_hfp_dut_as_source, HFP_NBS)
+        self.au_hfp_run_method(device, self.hfp_dut_as_source, HFP_NBS)
 
 
     @test_wrapper('HFP NBS sinewave test with dut as sink',
-                  devices={'BLUETOOTH_AUDIO':1})
+                  devices={'BLUETOOTH_AUDIO': 1},
+                  supports_floss=True)
     def au_hfp_nbs_dut_as_sink_test(self):
         """HFP NBS test with sinewave streaming from peer to dut."""
         device = self.devices['BLUETOOTH_AUDIO'][0]
-        self.au_hfp_run_method(device, self.test_hfp_dut_as_sink, HFP_NBS)
+        self.au_hfp_run_method(device, self.hfp_dut_as_sink, HFP_NBS)
 
 
     @test_wrapper('HFP WBS VISQOL test with dut as sink',
@@ -164,7 +223,7 @@
     def au_hfp_wbs_dut_as_sink_visqol_test(self):
         """HFP WBS VISQOL test with audio streaming from peer to dut"""
         device = self.devices['BLUETOOTH_AUDIO'][0]
-        self.au_hfp_run_method(device, self.test_hfp_dut_as_sink_visqol_score,
+        self.au_hfp_run_method(device, self.hfp_dut_as_sink_visqol_score,
                                HFP_WBS)
 
 
@@ -173,7 +232,7 @@
     def au_hfp_wbs_dut_as_source_visqol_test(self):
         """HFP WBS VISQOL test with audio streaming from dut to peer"""
         device = self.devices['BLUETOOTH_AUDIO'][0]
-        self.au_hfp_run_method(device, self.test_hfp_dut_as_source_visqol_score,
+        self.au_hfp_run_method(device, self.hfp_dut_as_source_visqol_score,
                                HFP_WBS)
 
     @test_wrapper('HFP NBS VISQOL test with dut as sink',
@@ -181,7 +240,7 @@
     def au_hfp_nbs_dut_as_sink_visqol_test(self):
         """HFP NBS VISQOL test with audio streaming from peer to dut"""
         device = self.devices['BLUETOOTH_AUDIO'][0]
-        self.au_hfp_run_method(device, self.test_hfp_dut_as_sink_visqol_score,
+        self.au_hfp_run_method(device, self.hfp_dut_as_sink_visqol_score,
                                HFP_NBS)
 
 
@@ -190,10 +249,64 @@
     def au_hfp_nbs_dut_as_source_visqol_test(self):
         """HFP NBS VISQOL test with audio streaming from dut to peer"""
         device = self.devices['BLUETOOTH_AUDIO'][0]
-        self.au_hfp_run_method(device, self.test_hfp_dut_as_source_visqol_score,
+        self.au_hfp_run_method(device, self.hfp_dut_as_source_visqol_score,
                                HFP_NBS)
 
 
+    @test_wrapper('HFP NBS back2back test with dut as source',
+                  devices={'BLUETOOTH_AUDIO': 1})
+    def au_hfp_nbs_dut_as_source_back2back_test(self):
+        """HFP NBS back2back test from dut to peer"""
+        device = self.devices['BLUETOOTH_AUDIO'][0]
+        self.au_hfp_run_method(device, self.hfp_dut_as_source_back2back,
+                               HFP_NBS)
+
+
+    @test_wrapper('HFP WBS back2back test with dut as source',
+                  devices={'BLUETOOTH_AUDIO': 1})
+    def au_hfp_wbs_dut_as_source_back2back_test(self):
+        """HFP WBS back2back test from dut to peer"""
+        device = self.devices['BLUETOOTH_AUDIO'][0]
+        self.au_hfp_run_method(device, self.hfp_dut_as_source_back2back,
+                               HFP_WBS)
+
+
+    @test_wrapper('Switch A2DP to HFP NBS test with dut as source',
+                  devices={'BLUETOOTH_AUDIO': 1})
+    def au_a2dp_to_hfp_nbs_dut_as_source_test(self):
+        """Switch A2DP to HFP NBS test with dut as source."""
+        device = self.devices['BLUETOOTH_AUDIO'][0]
+        self.au_hfp_run_method(device, self.a2dp_to_hfp_dut_as_source,
+                               HFP_NBS_MEDIUM)
+
+
+    @test_wrapper('Switch A2DP to HFP WBS test with dut as source',
+                  devices={'BLUETOOTH_AUDIO': 1})
+    def au_a2dp_to_hfp_wbs_dut_as_source_test(self):
+        """Switch A2DP to HFP WBS test with dut as source."""
+        device = self.devices['BLUETOOTH_AUDIO'][0]
+        self.au_hfp_run_method(device, self.a2dp_to_hfp_dut_as_source,
+                               HFP_WBS_MEDIUM)
+
+
+    @test_wrapper('Switch HFP NBS to A2DP test with dut as source',
+                  devices={'BLUETOOTH_AUDIO': 1})
+    def au_hfp_nbs_to_a2dp_dut_as_source_test(self):
+        """Switch HFP NBS to A2DP test with dut as source."""
+        device = self.devices['BLUETOOTH_AUDIO'][0]
+        self.au_hfp_run_method(device, self.hfp_to_a2dp_dut_as_source,
+                               HFP_NBS_MEDIUM)
+
+
+    @test_wrapper('Switch HFP WBS to A2DP test with dut as source',
+                  devices={'BLUETOOTH_AUDIO': 1})
+    def au_hfp_wbs_to_a2dp_dut_as_source_test(self):
+        """Switch HFP WBS to A2DP test with dut as source."""
+        device = self.devices['BLUETOOTH_AUDIO'][0]
+        self.au_hfp_run_method(device, self.hfp_to_a2dp_dut_as_source,
+                               HFP_WBS_MEDIUM)
+
+
     def au_run_avrcp_method(self, device, test_method):
         """avrcp procedure of running a specified test method.
 
@@ -220,10 +333,7 @@
         self.au_run_avrcp_method(device, self.test_avrcp_commands)
 
 
-    # Add 'Quick Health' to flags to exclude the test from AVL.
-    # When this test is stable enough later, remove the flags here.
-    @test_wrapper('avrcp media info test', devices={'BLUETOOTH_AUDIO':1},
-                  flags=['Quick Health'])
+    @test_wrapper('avrcp media info test', devices={'BLUETOOTH_AUDIO': 1})
     def au_avrcp_media_info_test(self):
         """AVRCP test to examine metadata propgation."""
         device = self.devices['BLUETOOTH_AUDIO'][0]
@@ -250,6 +360,16 @@
         self.au_hfp_nbs_dut_as_sink_visqol_test()
         self.au_avrcp_command_test()
         self.au_avrcp_media_info_test()
+        self.au_a2dp_playback_and_connect_test()
+        self.au_a2dp_playback_and_disconnect_test()
+        self.au_a2dp_playback_back2back_test()
+        self.au_a2dp_pinned_playback_test()
+        self.au_hfp_nbs_dut_as_source_back2back_test()
+        self.au_hfp_wbs_dut_as_source_back2back_test()
+        self.au_a2dp_to_hfp_nbs_dut_as_source_test()
+        self.au_a2dp_to_hfp_wbs_dut_as_source_test()
+        self.au_hfp_nbs_to_a2dp_dut_as_source_test()
+        self.au_hfp_wbs_to_a2dp_dut_as_source_test()
 
 
     def run_once(self,
@@ -257,7 +377,8 @@
                  num_iterations=1,
                  args_dict=None,
                  test_name=None,
-                 flag='Quick Health'):
+                 flag='Quick Health',
+                 floss=False):
         """Run the batch of Bluetooth stand health tests
 
         @param host: the DUT, usually a chromebook
@@ -269,6 +390,7 @@
         self.quick_test_init(host,
                              use_btpeer=True,
                              flag=flag,
-                             args_dict=args_dict)
+                             args_dict=args_dict,
+                             floss=floss)
         self.au_health_batch_run(num_iterations, test_name)
         self.quick_test_cleanup()
diff --git a/server/site_tests/bluetooth_AdapterAUHealth/control b/server/site_tests/bluetooth_AdapterAUHealth/control
index 93bc271..e920156 100644
--- a/server/site_tests/bluetooth_AdapterAUHealth/control
+++ b/server/site_tests/bluetooth_AdapterAUHealth/control
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
     A Batch of Bluetooth Bluetooth audio health tests. This test is written
@@ -36,6 +37,16 @@
     - au_hfp_nbs_dut_as_sink_visqol_test
     - au_avrcp_command_test
     - au_avrcp_media_info_test
+    - au_a2dp_playback_and_connect_test
+    - au_a2dp_playback_and_disconnect_test
+    - au_a2dp_playback_back2back_test
+    - au_a2dp_pinned_playback_test
+    - au_hfp_nbs_dut_as_source_back2back_test
+    - au_hfp_wbs_dut_as_source_back2back_test
+    - au_a2dp_to_hfp_nbs_dut_as_source_test
+    - au_a2dp_to_hfp_wbs_dut_as_source_test
+    - au_hfp_nbs_to_a2dp_dut_as_source_test
+    - au_hfp_wbs_to_a2dp_dut_as_source_test
     """
 
 args_dict = utils.args_to_dict(args)
diff --git a/server/site_tests/bluetooth_AdapterAUHealth/control.all_floss b/server/site_tests/bluetooth_AdapterAUHealth/control.all_floss
new file mode 100644
index 0000000..0ea8285
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterAUHealth/control.all_floss
@@ -0,0 +1,59 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterAUHealth.all_floss'
+PURPOSE = ('Batch of Bluetooth Classic Audio health tests')
+CRITERIA = 'Pass all health test'
+ATTRIBUTES = '' # This control file is used to run all tests locally.
+TIME = 'SHORT'  # Approximately 15 mins
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+    A Batch of Bluetooth Bluetooth audio health tests. This test is written
+    as a batch of tests in order to reduce test time, since auto-test ramp
+    up time is costly. The batch is using BluetoothAdapterQuickTests wrapper
+    methods to start and end a test and a batch of tests.
+
+    This class can be called to run the entire test batch or to run a
+    specific test only
+
+    Currently, the batch contains the following tests:
+    - au_a2dp_test
+    - au_hfp_nbs_dut_as_source_test
+    - au_hfp_nbs_dut_as_sink_test
+    - au_hfp_wbs_dut_as_source_test
+    - au_hfp_wbs_dut_as_sink_test
+    - au_hfp_wbs_dut_as_source_visqol_test
+    - au_hfp_wbs_dut_as_sink_visqol_test
+    - au_hfp_nbs_dut_as_source_visqol_test
+    - au_hfp_nbs_dut_as_sink_visqol_test
+    - au_avrcp_command_test
+    - au_avrcp_media_info_test
+    - au_a2dp_playback_and_connect_test
+    - au_a2dp_playback_and_disconnect_test
+    - au_a2dp_playback_back2back_test
+    - au_a2dp_pinned_playback_test
+    - au_hfp_nbs_dut_as_source_back2back_test
+    - au_hfp_wbs_dut_as_source_back2back_test
+    - au_a2dp_to_hfp_nbs_dut_as_source_test
+    - au_a2dp_to_hfp_wbs_dut_as_source_test
+    - au_hfp_nbs_to_a2dp_dut_as_source_test
+    - au_hfp_wbs_to_a2dp_dut_as_source_test
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterAUHealth', host=host, num_iterations=1,
+                 args_dict=args_dict, floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterAUHealth/control.au_a2dp_long_test b/server/site_tests/bluetooth_AdapterAUHealth/control.au_a2dp_long_test
index abea240..c03bf41 100644
--- a/server/site_tests/bluetooth_AdapterAUHealth/control.au_a2dp_long_test
+++ b/server/site_tests/bluetooth_AdapterAUHealth/control.au_a2dp_long_test
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
     A Batch of Bluetooth Bluetooth audio health tests. This test is written
diff --git a/server/site_tests/bluetooth_AdapterAUHealth/control.au_a2dp_pinned_playback_test b/server/site_tests/bluetooth_AdapterAUHealth/control.au_a2dp_pinned_playback_test
new file mode 100644
index 0000000..e5409b7
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterAUHealth/control.au_a2dp_pinned_playback_test
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterAUHealth.au_a2dp_pinned_playback_test'
+PURPOSE = ('Run the A2DP pinned playback test')
+CRITERIA = 'Pass all audio chunk checking'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'SHORT'  # Approximately 3 mins
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+    To run the A2DP pinned playback test.
+
+    A2DP profile is used. This test does not choose Bluetooth as the output
+    node but directly plays the sound that is pinned to the Bluetooth device
+    and check whether it receives the audio stream correctly.
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterAUHealth', host=host, num_iterations=1,
+                 args_dict=args_dict, test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterAUHealth/control.au_a2dp_playback_and_connect_test b/server/site_tests/bluetooth_AdapterAUHealth/control.au_a2dp_playback_and_connect_test
new file mode 100644
index 0000000..12656da
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterAUHealth/control.au_a2dp_playback_and_connect_test
@@ -0,0 +1,35 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterAUHealth.au_a2dp_playback_and_connect_test'
+PURPOSE = ('Run the A2DP playback and connect test')
+CRITERIA = 'Pass audio chunk testing before and after the BT device connects'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'SHORT'  # Approximately 3 mins
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+    To run the A2DP playback and connect test.
+
+    This test first plays the audio stream and then selects the BT device as
+    output node, checking if the stream has routed to the BT device. After
+    that, disconnect the BT device and also check whether the stream closes
+    on it gracefully.
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterAUHealth', host=host, num_iterations=1,
+                 args_dict=args_dict, test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterAUHealth/control.au_a2dp_playback_and_disconnect_test b/server/site_tests/bluetooth_AdapterAUHealth/control.au_a2dp_playback_and_disconnect_test
new file mode 100644
index 0000000..a1603da
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterAUHealth/control.au_a2dp_playback_and_disconnect_test
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterAUHealth.au_a2dp_playback_and_disconnect_test'
+PURPOSE = ('Run the A2DP playback and disconnect test')
+CRITERIA = 'Audio stream is alive after BT device disconnected'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'SHORT'  # Approximately 3 mins
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+    To run the A2DP playback and disconnect test.
+
+    This test will keep the stream playing and then disconnect the Bluetooth
+    device. The goal is to check the stream is still alive after the Bluetooth
+    device disconnected.
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterAUHealth', host=host, num_iterations=1,
+                 args_dict=args_dict, test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterAUHealth/control.au_a2dp_playback_back2back_test b/server/site_tests/bluetooth_AdapterAUHealth/control.au_a2dp_playback_back2back_test
new file mode 100644
index 0000000..c67f55e
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterAUHealth/control.au_a2dp_playback_back2back_test
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterAUHealth.au_a2dp_playback_back2back_test'
+PURPOSE = ('Run the A2DP playback stream back2back test')
+CRITERIA = 'Pass all audio chunk checking'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'SHORT'  # Approximately 3 mins
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+    To run the A2DP playback stream back to back test.
+
+    This test repeats to start and stop the playback stream and verify that
+    the Bluetooth device receives the stream correctly.
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterAUHealth', host=host, num_iterations=1,
+                 args_dict=args_dict, test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterAUHealth/control.au_a2dp_test b/server/site_tests/bluetooth_AdapterAUHealth/control.au_a2dp_test
index 1ce2ab2..bf93a11 100644
--- a/server/site_tests/bluetooth_AdapterAUHealth/control.au_a2dp_test
+++ b/server/site_tests/bluetooth_AdapterAUHealth/control.au_a2dp_test
@@ -8,12 +8,13 @@
 NAME = 'bluetooth_AdapterAUHealth.au_a2dp_test'
 PURPOSE = ('Batch of Bluetooth Classic Audio health tests')
 CRITERIA = 'Pass all health test'
-ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e, suite:bluetooth_e2e_cq, suite:bluetooth_floss_cq'
 TIME = 'SHORT'  # Approximately 2 mins
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
     A Batch of Bluetooth Bluetooth audio health tests. This test is written
diff --git a/server/site_tests/bluetooth_AdapterAUHealth/control.au_a2dp_test.floss b/server/site_tests/bluetooth_AdapterAUHealth/control.au_a2dp_test.floss
new file mode 100644
index 0000000..bfc1926
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterAUHealth/control.au_a2dp_test.floss
@@ -0,0 +1,30 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterAUHealth.au_a2dp_test.floss'
+PURPOSE = ('Batch of Bluetooth Classic Audio health tests')
+CRITERIA = 'Pass all health test'
+ATTRIBUTES = 'suite:bluetooth_floss,suite:bluetooth_floss_cq'
+TIME = 'SHORT'  # Approximately 2 mins
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+  Tests that A2DP profile works correctly.
+"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterAUHealth', host=host, num_iterations=1,
+                 args_dict=args_dict, test_name=NAME.split('.')[1], floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterAUHealth/control.au_a2dp_to_hfp_nbs_dut_as_source_test b/server/site_tests/bluetooth_AdapterAUHealth/control.au_a2dp_to_hfp_nbs_dut_as_source_test
new file mode 100644
index 0000000..f732021
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterAUHealth/control.au_a2dp_to_hfp_nbs_dut_as_source_test
@@ -0,0 +1,35 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterAUHealth.au_a2dp_to_hfp_nbs_dut_as_source_test'
+PURPOSE = ('Run the test, A2DP to HFP NBS, with DUT as source')
+CRITERIA = 'Pass all audio file checking'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'SHORT'  # Approximately 5 mins
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+    Run the test, A2DP to HFP NBS, with DUT as source.
+
+    HFP_NBS_MEDIUM profile is used. This test first uses A2DP profile and plays
+    the audio stream on the DUT, checking if the peer receives the audio stream
+    correctly. And then switch to the HFP_NBS profile and check the audio
+    stream again.
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterAUHealth', host=host, num_iterations=1,
+                 args_dict=args_dict, test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterAUHealth/control.au_a2dp_to_hfp_wbs_dut_as_source_test b/server/site_tests/bluetooth_AdapterAUHealth/control.au_a2dp_to_hfp_wbs_dut_as_source_test
new file mode 100644
index 0000000..f5a6031
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterAUHealth/control.au_a2dp_to_hfp_wbs_dut_as_source_test
@@ -0,0 +1,35 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterAUHealth.au_a2dp_to_hfp_wbs_dut_as_source_test'
+PURPOSE = ('Run the test, A2DP to HFP WBS, with DUT as source')
+CRITERIA = 'Pass all audio file checking'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'SHORT'  # Approximately 5 mins
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+    Run the test, A2DP to HFP WBS, with DUT as source.
+
+    HFP_WBS_MEDIUM profile is used. This test first uses A2DP profile and plays
+    the audio stream on the DUT, checking if the peer receives the audio stream
+    correctly. And then switch to the HFP_WBS profile and check the audio
+    stream again.
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterAUHealth', host=host, num_iterations=1,
+                 args_dict=args_dict, test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterAUHealth/control.au_avrcp_command_test b/server/site_tests/bluetooth_AdapterAUHealth/control.au_avrcp_command_test
index ff496ca..ad24c2d 100644
--- a/server/site_tests/bluetooth_AdapterAUHealth/control.au_avrcp_command_test
+++ b/server/site_tests/bluetooth_AdapterAUHealth/control.au_avrcp_command_test
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
     A Batch of Bluetooth Bluetooth audio health tests. This test is written
diff --git a/server/site_tests/bluetooth_AdapterAUHealth/control.au_avrcp_media_info_test b/server/site_tests/bluetooth_AdapterAUHealth/control.au_avrcp_media_info_test
index 3d2f8fc..de1685e 100644
--- a/server/site_tests/bluetooth_AdapterAUHealth/control.au_avrcp_media_info_test
+++ b/server/site_tests/bluetooth_AdapterAUHealth/control.au_avrcp_media_info_test
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
     A Batch of Bluetooth audio health tests. This test is written
diff --git a/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_nbs_dut_as_sink_test b/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_nbs_dut_as_sink_test
index 15e9665..ab76afd 100644
--- a/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_nbs_dut_as_sink_test
+++ b/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_nbs_dut_as_sink_test
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
     A Batch of Bluetooth Bluetooth audio health tests. This test is written
diff --git a/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_nbs_dut_as_sink_test.floss b/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_nbs_dut_as_sink_test.floss
new file mode 100644
index 0000000..9ad3935
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_nbs_dut_as_sink_test.floss
@@ -0,0 +1,39 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterAUHealth.au_hfp_nbs_dut_as_sink_test.floss'
+PURPOSE = ('Batch of Bluetooth Classic Audio health tests')
+CRITERIA = 'Pass the HFP NBS test'
+ATTRIBUTES = 'suite:bluetooth_floss'
+TIME = 'SHORT'  # Approximately 2 mins
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+    A Batch of Bluetooth Bluetooth audio health tests. This test is written
+    as a batch of tests in order to reduce test time, since auto-test ramp
+    up time is costly. The batch is using BluetoothAdapterQuickTests wrapper
+    methods to start and end a test and a batch of tests.
+
+    This class can be called to run the entire test batch or to run a
+    specific test only
+
+    This contains the following tests:
+    - au_hfp_nbs_dut_as_sink_test
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterAUHealth', host=host, num_iterations=1,
+                 args_dict=args_dict, test_name=NAME.split('.')[1], floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_nbs_dut_as_sink_visqol_test b/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_nbs_dut_as_sink_visqol_test
index 27e27a3..65c1baa 100644
--- a/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_nbs_dut_as_sink_visqol_test
+++ b/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_nbs_dut_as_sink_visqol_test
@@ -8,12 +8,13 @@
 NAME = 'bluetooth_AdapterAUHealth.au_hfp_nbs_dut_as_sink_visqol_test'
 PURPOSE = ('Batch of Bluetooth Classic Audio health tests')
 CRITERIA = 'All test files pass minimum score required for HFP NBS VISQOL test'
-ATTRIBUTES = 'suite:bluetooth'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
 TIME = 'SHORT'  # Approximately 5 mins
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
     A Batch of Bluetooth Bluetooth audio health tests. This test is written
diff --git a/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_nbs_dut_as_source_back2back_test b/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_nbs_dut_as_source_back2back_test
new file mode 100644
index 0000000..f404d3a
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_nbs_dut_as_source_back2back_test
@@ -0,0 +1,35 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterAUHealth.au_hfp_nbs_dut_as_source_back2back_test'
+PURPOSE = ('Run the test, HFP NBS back to back, with DUT as source')
+CRITERIA = 'Pass all audio file checking'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'SHORT'  # Approximately 5 mins
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+    Run the test, HFP NBS back to back, with DUT as source.
+
+    The test starts then stops the stream playback for three times. In each
+    iteration, it checks the Bluetooth device can successfully receive the
+    stream when it is played; also check the absence of the streama when stop
+    playing.
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterAUHealth', host=host, num_iterations=1,
+                 args_dict=args_dict, test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_nbs_dut_as_source_test b/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_nbs_dut_as_source_test
index d630992..67d6741 100644
--- a/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_nbs_dut_as_source_test
+++ b/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_nbs_dut_as_source_test
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
     A Batch of Bluetooth Bluetooth audio health tests. This test is written
diff --git a/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_nbs_dut_as_source_test.floss b/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_nbs_dut_as_source_test.floss
new file mode 100644
index 0000000..e63f268
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_nbs_dut_as_source_test.floss
@@ -0,0 +1,39 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterAUHealth.au_hfp_nbs_dut_as_source_test.floss'
+PURPOSE = ('Batch of Bluetooth Classic Audio health tests')
+CRITERIA = 'Pass the HFP NBS test'
+ATTRIBUTES = 'suite:bluetooth_floss'
+TIME = 'SHORT'  # Approximately 2 mins
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+    A Batch of Bluetooth Bluetooth audio health tests. This test is written
+    as a batch of tests in order to reduce test time, since auto-test ramp
+    up time is costly. The batch is using BluetoothAdapterQuickTests wrapper
+    methods to start and end a test and a batch of tests.
+
+    This class can be called to run the entire test batch or to run a
+    specific test only
+
+    This contains the following tests:
+    - au_hfp_nbs_dut_as_source_test
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterAUHealth', host=host, num_iterations=1,
+                 args_dict=args_dict, test_name=NAME.split('.')[1], floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_nbs_dut_as_source_visqol_test b/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_nbs_dut_as_source_visqol_test
index a7878db..16c59c6 100644
--- a/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_nbs_dut_as_source_visqol_test
+++ b/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_nbs_dut_as_source_visqol_test
@@ -8,12 +8,13 @@
 NAME = 'bluetooth_AdapterAUHealth.au_hfp_nbs_dut_as_source_visqol_test'
 PURPOSE = ('Batch of Bluetooth Classic Audio health tests')
 CRITERIA = 'All test files pass minimum score required for HFP NBS VISQOL test'
-ATTRIBUTES = 'suite:bluetooth'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
 TIME = 'SHORT'  # Approximately 5 mins
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
     A Batch of Bluetooth Bluetooth audio health tests. This test is written
diff --git a/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_nbs_to_a2dp_dut_as_source_test b/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_nbs_to_a2dp_dut_as_source_test
new file mode 100644
index 0000000..ab1ecfe
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_nbs_to_a2dp_dut_as_source_test
@@ -0,0 +1,35 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterAUHealth.au_hfp_nbs_to_a2dp_dut_as_source_test'
+PURPOSE = ('Run the test, HFP NBS to A2DP, with DUT as source')
+CRITERIA = 'Pass all audio file checking'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'SHORT'  # Approximately 5 mins
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+    Run the test, HFP NBS to A2DP, with DUT as source.
+
+    HFP_NBS_MEDIUM profile is used. This test first uses HFP profile and plays
+    the audio stream on the DUT, checking if the peer receives the audio stream
+    correctly. And then switch to the A2DP profile and check the audio stream
+    again.
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterAUHealth', host=host, num_iterations=1,
+                 args_dict=args_dict, test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_wbs_dut_as_sink_test b/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_wbs_dut_as_sink_test
index cfdb898..f977abc 100644
--- a/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_wbs_dut_as_sink_test
+++ b/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_wbs_dut_as_sink_test
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
     A Batch of Bluetooth Bluetooth audio health tests. This test is written
diff --git a/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_wbs_dut_as_sink_visqol_test b/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_wbs_dut_as_sink_visqol_test
index af3a8b4..e4540ac 100644
--- a/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_wbs_dut_as_sink_visqol_test
+++ b/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_wbs_dut_as_sink_visqol_test
@@ -8,12 +8,13 @@
 NAME = 'bluetooth_AdapterAUHealth.au_hfp_wbs_dut_as_sink_visqol_test'
 PURPOSE = ('Batch of Bluetooth Classic Audio health tests')
 CRITERIA = 'All test files pass minimum score required for HFP WBS VISQOL test'
-ATTRIBUTES = 'suite:bluetooth'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
 TIME = 'SHORT'  # Approximately 5 mins
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
     A Batch of Bluetooth Bluetooth audio health tests. This test is written
diff --git a/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_wbs_dut_as_source_back2back_test b/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_wbs_dut_as_source_back2back_test
new file mode 100644
index 0000000..08a2f86
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_wbs_dut_as_source_back2back_test
@@ -0,0 +1,35 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterAUHealth.au_hfp_wbs_dut_as_source_back2back_test'
+PURPOSE = ('Run the test, HFP WBS back to back, with DUT as source')
+CRITERIA = 'Pass all audio file checking'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'SHORT'  # Approximately 5 mins
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+    Run the test, HFP WBS back to back, with DUT as source.
+
+    The test starts then stops the stream playback for three times. In each
+    iteration, it checks the Bluetooth device can successfully receive the
+    stream when it is played; also check the absence of the streama when stop
+    playing.
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterAUHealth', host=host, num_iterations=1,
+                 args_dict=args_dict, test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_wbs_dut_as_source_test b/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_wbs_dut_as_source_test
index 8b6c8cc..e67078f 100644
--- a/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_wbs_dut_as_source_test
+++ b/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_wbs_dut_as_source_test
@@ -8,12 +8,13 @@
 NAME = 'bluetooth_AdapterAUHealth.au_hfp_wbs_dut_as_source_test'
 PURPOSE = ('Batch of Bluetooth Classic Audio health tests')
 CRITERIA = 'Pass the HFP WBS test'
-ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e, suite:bluetooth_e2e_cq'
 TIME = 'SHORT'  # Approximately 2 mins
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
     A Batch of Bluetooth Bluetooth audio health tests. This test is written
diff --git a/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_wbs_dut_as_source_visqol_test b/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_wbs_dut_as_source_visqol_test
index eacd160..6e3ff93 100644
--- a/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_wbs_dut_as_source_visqol_test
+++ b/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_wbs_dut_as_source_visqol_test
@@ -8,12 +8,13 @@
 NAME = 'bluetooth_AdapterAUHealth.au_hfp_wbs_dut_as_source_visqol_test'
 PURPOSE = ('Batch of Bluetooth Classic Audio health tests')
 CRITERIA = 'All test files pass minimum score required for HFP WBS VISQOL test'
-ATTRIBUTES = 'suite:bluetooth'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
 TIME = 'SHORT'  # Approximately 5 mins
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
     A Batch of Bluetooth Bluetooth audio health tests. This test is written
diff --git a/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_wbs_to_a2dp_dut_as_source_test b/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_wbs_to_a2dp_dut_as_source_test
new file mode 100644
index 0000000..0a222a9
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterAUHealth/control.au_hfp_wbs_to_a2dp_dut_as_source_test
@@ -0,0 +1,35 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterAUHealth.au_hfp_wbs_to_a2dp_dut_as_source_test'
+PURPOSE = ('Run the test, HFP WBS to A2DP, with DUT as source')
+CRITERIA = 'Pass all audio file checking'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'SHORT'  # Approximately 5 mins
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+    Run the test, HFP WBS to A2DP, with DUT as source.
+
+    HFP_WBS_MEDIUM profile is used. This test first uses HFP profile and plays
+    the audio stream on the DUT, checking if the peer receives the audio stream
+    correctly. And then switch to the A2DP profile and check the audio stream
+    again.
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterAUHealth', host=host, num_iterations=1,
+                 args_dict=args_dict, test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterAUHealth/control.stress b/server/site_tests/bluetooth_AdapterAUHealth/control.stress
index 31d8c92..9861268 100644
--- a/server/site_tests/bluetooth_AdapterAUHealth/control.stress
+++ b/server/site_tests/bluetooth_AdapterAUHealth/control.stress
@@ -8,12 +8,13 @@
 NAME = 'bluetooth_AdapterAUHealth.stress'
 PURPOSE = ('Batch of Bluetooth Classic Audio health tests')
 CRITERIA = 'Pass all health test'
-ATTRIBUTES = 'suite:bluetooth_stress'
+ATTRIBUTES = ''
 TIME = 'SHORT'  # an iteration takes approximately 4 mins
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
     A Batch of Bluetooth Bluetooth audio health tests. This test is written
diff --git a/server/site_tests/bluetooth_AdapterAdvHealth/bluetooth_AdapterAdvHealth.py b/server/site_tests/bluetooth_AdapterAdvHealth/bluetooth_AdapterAdvHealth.py
index d5a98d4..e954cf2 100644
--- a/server/site_tests/bluetooth_AdapterAdvHealth/bluetooth_AdapterAdvHealth.py
+++ b/server/site_tests/bluetooth_AdapterAdvHealth/bluetooth_AdapterAdvHealth.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -5,7 +6,8 @@
 """A Batch of Bluetooth advertising tests"""
 
 from autotest_lib.server.cros.bluetooth.bluetooth_adapter_tests import (
-        SUSPEND_POWER_DOWN_CHIPSETS)
+        SUSPEND_POWER_DOWN_CHIPSETS, SUSPEND_RESET_IF_NO_PEER_CHIPSETS,
+        SUSPEND_POWER_DOWN_MODELS)
 from autotest_lib.server.cros.bluetooth import advertisements_data
 from autotest_lib.server.cros.bluetooth.bluetooth_adapter_quick_tests import \
      BluetoothAdapterQuickTests
@@ -28,10 +30,14 @@
     batch_wrapper = BluetoothAdapterQuickTests.quick_test_batch_decorator
 
 
-    # TODO(b/150897528) - Scarlet Dru loses firmware around suspend
+    # TODO(b/192419579) - RTL8822 and 8852 can't advertise 4 connectable
+    #                     advertisements.
     @test_wrapper('Multiple LE advertising test',
-                  skip_models=['dru', 'druwl'],
-                  skip_chipsets=SUSPEND_POWER_DOWN_CHIPSETS)
+                  skip_chipsets=[
+                          'Realtek-RTL8822C-USB', 'Realtek-RTL8822C-UART',
+                          'Realtek-RTL8852A-USB'
+                  ],
+                  skip_common_errors=True)
     def adv_multiple_advertising_test(self):
         """Run all test cases for multiple advertisements."""
         self.run_le_advertising_test(
@@ -48,11 +54,15 @@
 
 
     # TODO(b/150897528) - Scarlet Dru loses firmware around suspend
+    # TODO(b/182172118) - Winky has suspend test issues
+    # TODO(b/189813813) - Scarlet Dumo loses firmware around suspend
     @test_wrapper('Suspend resume LE advertising test',
-                  skip_models=['dru', 'druwl'],
-                  skip_chipsets=SUSPEND_POWER_DOWN_CHIPSETS)
+                  skip_models=SUSPEND_POWER_DOWN_MODELS + ['winky'],
+                  skip_chipsets=SUSPEND_POWER_DOWN_CHIPSETS +
+                  SUSPEND_RESET_IF_NO_PEER_CHIPSETS,
+                  skip_common_errors=True)
     def adv_suspend_resume_advertising_test(self):
-        """Run all test cases for multiple advertisements."""
+        """Run all test cases for advertisements involving suspend resume."""
         self.run_le_advertising_test(
             self.host, advertisements_data.ADVERTISEMENTS,
             'suspend_resume', num_iterations=1)
@@ -60,7 +70,7 @@
 
     @test_wrapper('Reboot LE advertising test')
     def adv_reboot_advertising_test(self):
-        """Run all test cases for single advertisements."""
+        """Run all test cases for advertisements involving reboot."""
         self.run_le_advertising_test(
             self.host, advertisements_data.ADVERTISEMENTS,
             'reboot', num_iterations=1)
@@ -87,13 +97,19 @@
     def adv_broadcast_test(self):
         """Verify broadcast advertising capability"""
 
-        self.test_case_broadcast()
+        self.run_le_advertising_test(self.host,
+                                     advertisements_data.ADVERTISEMENTS,
+                                     'broadcast',
+                                     num_iterations=1)
 
     # TODO(b/150897528) - Scarlet Dru loses firmware around suspend
+    # TODO(b/189813813) - Scarlet Dumo loses firmware around suspend
     @test_wrapper('Advertising suspend peer test',
                   devices={'BLE_MOUSE': 1},
-                  skip_models=['dru', 'druwl'],
-                  skip_chipsets=SUSPEND_POWER_DOWN_CHIPSETS)
+                  skip_models=SUSPEND_POWER_DOWN_MODELS,
+                  skip_chipsets=SUSPEND_POWER_DOWN_CHIPSETS +
+                  SUSPEND_RESET_IF_NO_PEER_CHIPSETS,
+                  skip_common_errors=True)
     def adv_suspend_peer_test(self):
         """Verify advertising around suspend from a peer"""
 
diff --git a/server/site_tests/bluetooth_AdapterAdvHealth/control b/server/site_tests/bluetooth_AdapterAdvHealth/control
index 90ba789..613b7f9 100644
--- a/server/site_tests/bluetooth_AdapterAdvHealth/control
+++ b/server/site_tests/bluetooth_AdapterAdvHealth/control
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
     A Batch of Bluetooth advertising health tests. This test is written as a
diff --git a/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_broadcast_test b/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_broadcast_test
index f334508..98a8b05 100644
--- a/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_broadcast_test
+++ b/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_broadcast_test
@@ -6,13 +6,14 @@
 NAME = 'bluetooth_AdapterAdvHealth.adv_broadcast_test'
 PURPOSE = 'Test broadcast advertising capabilities.'
 CRITERIA = 'Adapter should advertise with correct parameters.'
-ATTRIBUTES = 'suite:bluetooth_flaky'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_standalone'
 TIME = 'SHORT'  # ~2 minutes on hatch
 MAX_RESULT_SIZE_KB = 128000
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth'
+PY_VERSION = 3
 
 DOC = """
 Validate that we can register an advertisement that uses the 'broadcast' mode,
diff --git a/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_multiple_advertising_test b/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_multiple_advertising_test
index d3c5a75..633b739 100644
--- a/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_multiple_advertising_test
+++ b/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_multiple_advertising_test
@@ -6,13 +6,14 @@
 NAME = 'bluetooth_AdapterAdvHealth.adv_multiple_advertising_test'
 PURPOSE = 'Test bluetooth adapter advertising.'
 CRITERIA = 'Adapter should advertise with correct parameters.'
-ATTRIBUTES = 'suite:bluetooth_flaky'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_standalone'
 TIME = 'LONG'  # It takes about 15 minutes on Eve.
 MAX_RESULT_SIZE_KB = 512000
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth'
+PY_VERSION = 3
 
 DOC = """
 This test case verifies that the Bluetooth adapter of the DUT can
diff --git a/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_nearby_test b/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_nearby_test
index f765b5b..02f30de 100644
--- a/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_nearby_test
+++ b/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_nearby_test
@@ -6,13 +6,14 @@
 NAME = 'bluetooth_AdapterAdvHealth.adv_nearby_test'
 PURPOSE = 'Test basic Nearby share advertising requirements.'
 CRITERIA = 'Adapter should advertise with correct parameters.'
-ATTRIBUTES = 'suite:bluetooth_flaky'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_standalone'
 TIME = 'SHORT'  # ~2 minutes on hatch
 MAX_RESULT_SIZE_KB = 128000
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth'
+PY_VERSION = 3
 
 DOC = """
 Validate that we can register an advertisement with the specific format required
diff --git a/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_peer_test b/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_peer_test
index bec524f..f277cd4 100644
--- a/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_peer_test
+++ b/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_peer_test
@@ -8,12 +8,13 @@
 NAME = 'bluetooth_AdapterAdvHealth.adv_peer_test'
 PURPOSE = ('batch of Bluetooth advertising tests')
 CRITERIA = 'Adapter should advertise with correct parameters'
-ATTRIBUTES = 'suite:bluetooth_flaky'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
 TIME = 'SHORT'  # ~3 minutes on Sarien
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
     Advertising peer test.
diff --git a/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_reboot_advertising_test b/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_reboot_advertising_test
index 60a6cf9..6f5ca3c 100644
--- a/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_reboot_advertising_test
+++ b/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_reboot_advertising_test
@@ -6,13 +6,14 @@
 NAME = 'bluetooth_AdapterAdvHealth.adv_reboot_advertising_test'
 PURPOSE = 'Test bluetooth adapter advertising.'
 CRITERIA = 'Adapter should advertise with correct parameters.'
-ATTRIBUTES = 'suite:bluetooth_flaky'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_standalone, suite:py3-beta'
 TIME = 'SHORT'  # ~3 minutes on Eve
 MAX_RESULT_SIZE_KB = 128000
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth'
+PY_VERSION = 3
 
 DOC = """
 This test case verifies that the Bluetooth adapter of the DUT can
diff --git a/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_single_advertising_test b/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_single_advertising_test
index 1ed1cb5..576c972 100644
--- a/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_single_advertising_test
+++ b/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_single_advertising_test
@@ -6,13 +6,14 @@
 NAME = 'bluetooth_AdapterAdvHealth.adv_single_advertising_test'
 PURPOSE = 'Test bluetooth adapter advertising.'
 CRITERIA = 'Adapter should advertise with correct parameters.'
-ATTRIBUTES = 'suite:bluetooth_flaky'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_standalone'
 TIME = 'SHORT'  # ~4 minutes on Eve
 MAX_RESULT_SIZE_KB = 128000
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth'
+PY_VERSION = 3
 
 DOC = """
 This test case verifies that the Bluetooth adapter of the DUT can
diff --git a/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_suspend_peer_test b/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_suspend_peer_test
index 957a8e9..48a2682 100644
--- a/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_suspend_peer_test
+++ b/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_suspend_peer_test
@@ -8,12 +8,13 @@
 NAME = 'bluetooth_AdapterAdvHealth.adv_suspend_peer_test'
 PURPOSE = ('Tests advertising during suspend/resume')
 CRITERIA = 'Advertising should not be detected during suspend'
-ATTRIBUTES = 'suite:bluetooth_flaky'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
 TIME = 'SHORT'  # ~3 minutes on Sarien
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
     Advertising suspend peer test.
diff --git a/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_suspend_resume_advertising_test b/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_suspend_resume_advertising_test
index c587f39..03fe905 100644
--- a/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_suspend_resume_advertising_test
+++ b/server/site_tests/bluetooth_AdapterAdvHealth/control.adv_suspend_resume_advertising_test
@@ -6,13 +6,14 @@
 NAME = 'bluetooth_AdapterAdvHealth.adv_suspend_resume_advertising_test'
 PURPOSE = 'Test bluetooth adapter advertising across suspend/resume.'
 CRITERIA = 'Adapter should advertise with correct parameters.'
-ATTRIBUTES = 'suite:bluetooth_flaky'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_standalone'
 TIME = 'SHORT'  # ~3 minutes on Eve
 MAX_RESULT_SIZE_KB = 128000
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth'
+PY_VERSION = 3
 
 DOC = """
 This test case verifies that the Bluetooth adapter of the DUT can
diff --git a/server/site_tests/bluetooth_AdapterAdvHealth/control.stress b/server/site_tests/bluetooth_AdapterAdvHealth/control.stress
index dd8ca1f..c4feb17 100644
--- a/server/site_tests/bluetooth_AdapterAdvHealth/control.stress
+++ b/server/site_tests/bluetooth_AdapterAdvHealth/control.stress
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
     A Batch of Bluetooth advertising tests. This test is written as a
diff --git a/server/cros/packet_generation/__init__.py b/server/site_tests/bluetooth_AdapterAdvMonitor/__init__.py
similarity index 100%
copy from server/cros/packet_generation/__init__.py
copy to server/site_tests/bluetooth_AdapterAdvMonitor/__init__.py
diff --git a/server/site_tests/bluetooth_AdapterAdvMonitor/bluetooth_AdapterAdvMonitor.py b/server/site_tests/bluetooth_AdapterAdvMonitor/bluetooth_AdapterAdvMonitor.py
index 40724b2..a299681 100644
--- a/server/site_tests/bluetooth_AdapterAdvMonitor/bluetooth_AdapterAdvMonitor.py
+++ b/server/site_tests/bluetooth_AdapterAdvMonitor/bluetooth_AdapterAdvMonitor.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -8,8 +9,9 @@
      import BluetoothAdapterQuickTests
 from autotest_lib.server.cros.bluetooth.bluetooth_adapter_adv_monitor_tests \
      import BluetoothAdapterAdvMonitorTests
-from autotest_lib.server.cros.bluetooth.bluetooth_adapter_tests \
-     import SUSPEND_POWER_DOWN_CHIPSETS
+from autotest_lib.server.cros.bluetooth.bluetooth_adapter_tests import (
+        SUSPEND_POWER_DOWN_CHIPSETS, SUSPEND_RESET_IF_NO_PEER_CHIPSETS,
+        SUSPEND_POWER_DOWN_MODELS)
 
 
 class bluetooth_AdapterAdvMonitor(BluetoothAdapterQuickTests,
@@ -35,25 +37,32 @@
         self.advmon_test_monitor_validity()
 
 
-    # TODO(b/150897528) - Dru loses firmware around suspend, which causes bluez
-    #                     removes all the monitors.
-    @test_wrapper('Pattern Filter Tests',
-                  devices={'BLE_KEYBOARD':1, 'BLE_MOUSE':1},
-                  skip_models=['dru'],
-                  skip_chipsets=SUSPEND_POWER_DOWN_CHIPSETS)
-    def advmon_pattern_filter_tests(self):
-        """Tests monitor functionality for pattern filter only."""
-        self.advmon_test_pattern_filter_only()
-
-
-    @test_wrapper('Single Client Tests',
+    @test_wrapper('Single Client Tests - Pattern Filter',
                   devices={'BLE_KEYBOARD':1, 'BLE_MOUSE':1})
-    def advmon_single_client_tests(self):
-        """Tests monitor functionality for single client."""
-        self.advmon_test_pattern_filter_1()
-        self.advmon_test_rssi_filter_1()
-        self.advmon_test_rssi_filter_2()
-        self.advmon_test_rssi_filter_3()
+    def advmon_pattern_filter_tests(self):
+        """Tests pattern filter for single client."""
+        self.advmon_test_pattern_filter()
+
+
+    @test_wrapper('Single Client Tests - RSSI Filter Range',
+                  devices={'BLE_KEYBOARD':1, 'BLE_MOUSE':1})
+    def advmon_rssi_filter_range_tests(self):
+        """Tests RSSI filter range for single client."""
+        self.advmon_test_rssi_filter_range()
+
+
+    @test_wrapper('Single Client Tests - RSSI Filter Multi Peers',
+                  devices={'BLE_KEYBOARD':1, 'BLE_MOUSE':1})
+    def advmon_rssi_filter_multi_peers_tests(self):
+        """Tests RSSI filter with multiple peers for single client."""
+        self.advmon_test_rssi_filter_multi_peers()
+
+
+    @test_wrapper('Single Client Tests - RSSI Filter Reset',
+                  devices={'BLE_KEYBOARD':1, 'BLE_MOUSE':1})
+    def advmon_rssi_filter_reset_tests(self):
+        """Tests RSSI filter reset for single client."""
+        self.advmon_test_rssi_filter_reset()
 
 
     @test_wrapper('Multi Client Tests',
@@ -63,8 +72,13 @@
         self.advmon_test_multi_client()
 
 
+    # Remove flags=['Quick Health'] when this test is migrated to stable suite.
     @test_wrapper('Foreground Background Combination Tests',
-                  devices={'BLE_KEYBOARD':1, 'BLE_MOUSE':1})
+                  devices={
+                          'BLE_KEYBOARD': 1,
+                          'BLE_MOUSE': 1
+                  },
+                  flags=['Quick Health'])
     def advmon_fg_bg_combination_tests(self):
         """Tests foreground and background scanning working together."""
         self.advmon_test_fg_bg_combination()
@@ -77,8 +91,10 @@
                           'BLE_KEYBOARD': 1,
                           'BLE_MOUSE': 1
                   },
-                  skip_models=['dru'],
-                  skip_chipsets=SUSPEND_POWER_DOWN_CHIPSETS)
+                  skip_models=SUSPEND_POWER_DOWN_MODELS,
+                  skip_chipsets=SUSPEND_POWER_DOWN_CHIPSETS +
+                  SUSPEND_RESET_IF_NO_PEER_CHIPSETS,
+                  flags=['Quick Health'])
     def advmon_suspend_resume_tests(self):
         """Tests working of background scanning with suspend resume."""
         self.advmon_test_suspend_resume()
@@ -88,14 +104,15 @@
     #                     removes all the monitors.
     @test_wrapper('Interleave Scan Tests',
                   devices={'BLE_MOUSE': 1},
-                  skip_models=['dru'],
-                  skip_chipsets=SUSPEND_POWER_DOWN_CHIPSETS)
-    def advmon_interleaved_scan(self):
+                  skip_models=SUSPEND_POWER_DOWN_MODELS,
+                  skip_chipsets=SUSPEND_POWER_DOWN_CHIPSETS +
+                  SUSPEND_RESET_IF_NO_PEER_CHIPSETS)
+    def advmon_interleaved_scan_tests(self):
         """Tests interleave scan."""
         self.advmon_test_interleaved_scan()
 
     @batch_wrapper('Advertisement Monitor API')
-    def advmon_batch_run(self, num_iterations=1, test_name=None):
+    def advmon_health_batch_run(self, num_iterations=1, test_name=None):
         """Run the Advertisement Monitor test batch or a specific given test.
            The wrapper of this method is implemented in batch_decorator.
            Using the decorator a test batch method can implement the only its
@@ -110,11 +127,13 @@
         """
         self.advmon_monitor_health_tests()
         self.advmon_pattern_filter_tests()
-        self.advmon_single_client_tests()
+        self.advmon_rssi_filter_range_tests()
+        self.advmon_rssi_filter_multi_peers_tests()
+        self.advmon_rssi_filter_reset_tests()
         self.advmon_multi_client_tests()
         self.advmon_fg_bg_combination_tests()
         self.advmon_suspend_resume_tests()
-        self.advmon_interleaved_scan()
+        self.advmon_interleaved_scan_tests()
 
     def run_once(self,
                  host,
@@ -136,5 +155,5 @@
                              use_btpeer=peer_required,
                              flag=flag,
                              args_dict=args_dict)
-        self.advmon_batch_run(num_iterations, test_name)
+        self.advmon_health_batch_run(num_iterations, test_name)
         self.quick_test_cleanup()
diff --git a/server/site_tests/bluetooth_AdapterAdvMonitor/control b/server/site_tests/bluetooth_AdapterAdvMonitor/control
index 755c80a..f0ccd99 100644
--- a/server/site_tests/bluetooth_AdapterAdvMonitor/control
+++ b/server/site_tests/bluetooth_AdapterAdvMonitor/control
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
 
 DOC = """
     A Batch of Bluetooth Advertisement Monitor API tests. This test is written
diff --git a/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_fg_bg_combination_tests b/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_fg_bg_combination_tests
index 47564b6..cb2939a 100644
--- a/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_fg_bg_combination_tests
+++ b/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_fg_bg_combination_tests
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
 
 DOC = """ Tests foreground and background scanning working together. """
 
diff --git a/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_interleaved_scan b/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_interleaved_scan
deleted file mode 100644
index 8964af5..0000000
--- a/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_interleaved_scan
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = 'chromeos-bluetooth'
-NAME = 'bluetooth_AdapterAdvMonitor.advmon_interleaved_scan'
-PURPOSE = ('batch of Bluetooth Advertisement Monitor tests')
-CRITERIA = 'All tests should pass'
-TIME = 'MEDIUM'
-TEST_CATEGORY = 'Functional'
-ATTRIBUTES = 'suite:bluetooth_flaky'
-TEST_CLASS = 'bluetooth'
-TEST_TYPE = 'server'
-DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
-
-DOC = """ Tests interleave scan. """
-
-args_dict = utils.args_to_dict(args)
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('bluetooth_AdapterAdvMonitor',
-                 host=host,
-                 num_iterations=1,
-                 args_dict=args_dict,
-                 test_name=NAME.split('.')[1])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_interleaved_scan_tests b/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_interleaved_scan_tests
new file mode 100644
index 0000000..10c102d
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_interleaved_scan_tests
@@ -0,0 +1,31 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterAdvMonitor.advmon_interleaved_scan_tests'
+PURPOSE = ('batch of Bluetooth Advertisement Monitor tests')
+CRITERIA = 'All tests should pass'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """ Tests interleave scan. """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterAdvMonitor',
+                 host=host,
+                 num_iterations=1,
+                 args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_monitor_health_tests b/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_monitor_health_tests
index 68c40bb..cb6262e 100644
--- a/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_monitor_health_tests
+++ b/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_monitor_health_tests
@@ -10,10 +10,11 @@
 CRITERIA = 'All tests should pass'
 TIME = 'MEDIUM'
 TEST_CATEGORY = 'Functional'
-ATTRIBUTES = 'suite:bluetooth_flaky'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_standalone'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth'
+PY_VERSION = 3
 
 DOC = """ Tests advertisement monitor object health. """
 
diff --git a/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_multi_client_tests b/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_multi_client_tests
index 1fee2a9..90c8fc9 100644
--- a/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_multi_client_tests
+++ b/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_multi_client_tests
@@ -10,10 +10,11 @@
 CRITERIA = 'All tests should pass'
 TIME = 'MEDIUM'
 TEST_CATEGORY = 'Functional'
-ATTRIBUTES = 'suite:bluetooth_flaky'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
 
 DOC = """ Tests monitor functionality for multiple clients. """
 
diff --git a/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_pattern_filter_tests b/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_pattern_filter_tests
index 0844a8a..aae440a 100644
--- a/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_pattern_filter_tests
+++ b/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_pattern_filter_tests
@@ -1,4 +1,4 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -10,12 +10,13 @@
 CRITERIA = 'All tests should pass'
 TIME = 'MEDIUM'
 TEST_CATEGORY = 'Functional'
-ATTRIBUTES = 'suite:bluetooth_flaky'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
 
-DOC = """ Tests monitor functionality with patterns filter only. """
+DOC = """ Tests pattern filter for single client. """
 
 args_dict = utils.args_to_dict(args)
 
diff --git a/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_rssi_filter_multi_peers_tests b/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_rssi_filter_multi_peers_tests
new file mode 100644
index 0000000..0e89392
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_rssi_filter_multi_peers_tests
@@ -0,0 +1,31 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterAdvMonitor.advmon_rssi_filter_multi_peers_tests'
+PURPOSE = ('batch of Bluetooth Advertisement Monitor tests')
+CRITERIA = 'All tests should pass'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
+
+DOC = """ Tests RSSI filter with multiple peers for single client. """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterAdvMonitor',
+                 host=host,
+                 num_iterations=1,
+                 args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_rssi_filter_range_tests b/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_rssi_filter_range_tests
new file mode 100644
index 0000000..3058c86
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_rssi_filter_range_tests
@@ -0,0 +1,31 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterAdvMonitor.advmon_rssi_filter_range_tests'
+PURPOSE = ('batch of Bluetooth Advertisement Monitor tests')
+CRITERIA = 'All tests should pass'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
+
+DOC = """ Tests RSSI filter range for single client. """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterAdvMonitor',
+                 host=host,
+                 num_iterations=1,
+                 args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_rssi_filter_reset_tests b/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_rssi_filter_reset_tests
new file mode 100644
index 0000000..b260e32
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_rssi_filter_reset_tests
@@ -0,0 +1,31 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterAdvMonitor.advmon_rssi_filter_reset_tests'
+PURPOSE = ('batch of Bluetooth Advertisement Monitor tests')
+CRITERIA = 'All tests should pass'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
+
+DOC = """ Tests RSSI filter reset for single client. """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterAdvMonitor',
+                 host=host,
+                 num_iterations=1,
+                 args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_single_client_tests b/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_single_client_tests
deleted file mode 100644
index 5e3cc20..0000000
--- a/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_single_client_tests
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = 'chromeos-bluetooth'
-NAME = 'bluetooth_AdapterAdvMonitor.advmon_single_client_tests'
-PURPOSE = ('batch of Bluetooth Advertisement Monitor tests')
-CRITERIA = 'All tests should pass'
-TIME = 'MEDIUM'
-TEST_CATEGORY = 'Functional'
-ATTRIBUTES = 'suite:bluetooth_flaky'
-TEST_CLASS = 'bluetooth'
-TEST_TYPE = 'server'
-DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
-
-DOC = """ Tests monitor functionality for single client. """
-
-args_dict = utils.args_to_dict(args)
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('bluetooth_AdapterAdvMonitor',
-                 host=host,
-                 num_iterations=1,
-                 args_dict=args_dict,
-                 test_name=NAME.split('.')[1])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_suspend_resume_tests b/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_suspend_resume_tests
index 3ee536f..0d6ada4 100644
--- a/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_suspend_resume_tests
+++ b/server/site_tests/bluetooth_AdapterAdvMonitor/control.advmon_suspend_resume_tests
@@ -10,10 +10,12 @@
 CRITERIA = 'All tests should pass'
 TIME = 'MEDIUM'
 TEST_CATEGORY = 'Functional'
+# TODO: Remove ['Quick Health'] from AdapterAdvMonitor when moving to stable.
 ATTRIBUTES = 'suite:bluetooth_flaky'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
 
 DOC = """ Tests monitor functionality with suspend/resume. """
 
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/bluetooth_AdapterCLHealth.py b/server/site_tests/bluetooth_AdapterCLHealth/bluetooth_AdapterCLHealth.py
index 9a25d0f..25d4cc1 100644
--- a/server/site_tests/bluetooth_AdapterCLHealth/bluetooth_AdapterCLHealth.py
+++ b/server/site_tests/bluetooth_AdapterCLHealth/bluetooth_AdapterCLHealth.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -32,7 +33,7 @@
     batch_wrapper = BluetoothAdapterQuickTests.quick_test_batch_decorator
 
 
-    @test_wrapper('Discovery Test', devices={"MOUSE":1})
+    @test_wrapper('Discovery Test', devices={"MOUSE": 1}, supports_floss=True)
     def cl_adapter_discovery_test(self):
         """Performs pairing test with mouse peripheral"""
         device = self.devices['MOUSE'][0]
@@ -41,7 +42,9 @@
         self.test_device_name(device.address, device.name)
 
 
-    @test_wrapper('Discoverable Test', devices={"MOUSE":1})
+    @test_wrapper('Discoverable Test',
+                  devices={"MOUSE": 1},
+                  supports_floss=True)
     def cl_adapter_discoverable_test(self):
         """Verifies that DUT can become discoverable and be discovered"""
 
@@ -54,7 +57,7 @@
         self.test_discover_by_device(device)
 
 
-    @test_wrapper('Pairing Test', devices={"MOUSE":1})
+    @test_wrapper('Pairing Test', devices={"MOUSE": 1}, supports_floss=True)
     def cl_adapter_pairing_test(self):
         """Performs pairing test with mouse peripheral"""
         device = self.devices['MOUSE'][0]
@@ -63,7 +66,9 @@
                           self.test_mouse_right_click)
 
 
-    @test_wrapper('keyboard Pairing Test', devices={"KEYBOARD":1})
+    @test_wrapper('keyboard Pairing Test',
+                  devices={"KEYBOARD": 1},
+                  supports_floss=True)
     def cl_adapter_keyboard_pairing_test(self):
         """Performs pairing test with keyboard peripheral"""
         device = self.devices['KEYBOARD'][0]
@@ -72,7 +77,9 @@
                           self.run_keyboard_tests)
 
 
-    @test_wrapper('Pairing Suspend Resume Test', devices={"MOUSE": 1})
+    @test_wrapper('Pairing Suspend Resume Test',
+                  devices={"MOUSE": 1},
+                  supports_floss=True)
     def cl_adapter_pairing_suspend_resume_test(self):
         """Performs pairing test over resume with mouse peripheral"""
         device = self.devices['MOUSE'][0]
@@ -82,7 +89,9 @@
                           suspend_resume=True)
 
 
-    @test_wrapper('Pairing Twice Test', devices={"MOUSE":1})
+    @test_wrapper('Pairing Twice Test',
+                  devices={"MOUSE": 1},
+                  supports_floss=True)
     def cl_adapter_pairing_twice_test(self):
         """Performs pairing twice test with  mouse peripheral"""
         device = self.devices['MOUSE'][0]
@@ -92,7 +101,9 @@
                           pairing_twice=True)
 
 
-    @test_wrapper('HID Reports Test', devices={"MOUSE":1})
+    @test_wrapper('HID Reports Test',
+                  devices={"MOUSE": 1},
+                  supports_floss=True)
     def cl_HID_reports_test(self):
         """Performs HID report test with mouse peripheral"""
         device = self.devices['MOUSE'][0]
@@ -101,7 +112,9 @@
                           self.test_mouse_right_click)
 
 
-    @test_wrapper('HID keyboard Reports Test', devices={'KEYBOARD':1})
+    @test_wrapper('HID keyboard Reports Test',
+                  devices={'KEYBOARD': 1},
+                  supports_floss=True)
     def cl_HID_keyboard_reports_test(self):
         """Performs HID report test with keyboard peripheral"""
         device = self.devices['KEYBOARD'][0]
@@ -110,7 +123,19 @@
                           self.run_keyboard_tests)
 
 
-    @test_wrapper('HID Reports Suspend Resume Test', devices={"MOUSE": 1})
+    @test_wrapper('HID Reconnect Speed Test',
+                  devices={"MOUSE": 1},
+                  flags=['Quick Health'],
+                  supports_floss=True)
+    def cl_HID_reconnect_speed_test(self):
+        """Performs HID reconnect speed test with mouse peripheral"""
+        device = self.devices['MOUSE'][0]
+        self.hid_reconnect_speed(device=device, device_type='MOUSE')
+
+
+    @test_wrapper('HID Reports Suspend Resume Test',
+                  devices={"MOUSE": 1},
+                  supports_floss=True)
     def cl_HID_reports_suspend_resume_test(self):
         """Performs HID report test over resume with mouse peripheral"""
         device = self.devices['MOUSE'][0]
@@ -119,7 +144,9 @@
                           self.test_mouse_right_click, suspend_resume=True)
 
 
-    @test_wrapper('HID Reports Reboot Test', devices={"MOUSE":1})
+    @test_wrapper('HID Reports Reboot Test',
+                  devices={"MOUSE": 1},
+                  supports_floss=True)
     def cl_HID_reports_reboot_test(self):
         """Performs HID report test over reboot with mouse peripheral"""
         device = self.devices['MOUSE'][0]
@@ -128,7 +155,38 @@
                           self.test_mouse_right_click, reboot=True)
 
 
-    @test_wrapper('Connect Disconnect Loop Test', devices={"MOUSE":1})
+    @test_wrapper('HID Reports Restart Test',
+                  devices={"MOUSE": 1},
+                  flags=['Quick Health'],
+                  supports_floss=True)
+    def cl_HID_reports_restart_test(self):
+        """Performs HID report test over bluetoothd restart with mouse
+           peripheral
+        """
+        device = self.devices['MOUSE'][0]
+        self.run_hid_reports_test(
+                device,
+                check_connected_method=self.test_mouse_move_in_xy,
+                restart=True)
+
+
+    @test_wrapper('Connect Disconnect by Device Loop Test',
+                  devices={"MOUSE": 1},
+                  flags=['Quick Health'],
+                  supports_floss=True)
+    def cl_connect_disconnect_by_device_loop_test(self):
+        """Performs connect/disconnect by device test with mouse peripheral"""
+        device = self.devices['MOUSE'][0]
+        self.connect_disconnect_by_device_loop(
+                device=device,
+                loops=3,
+                device_type='MOUSE',
+                check_connected_method=self.test_mouse_move_in_xy)
+
+
+    @test_wrapper('Connect Disconnect Loop Test',
+                  devices={"MOUSE": 1},
+                  supports_floss=True)
     def cl_connect_disconnect_loop_test(self):
         """Performs connect/disconnect test with mouse peripheral"""
         device = self.devices['MOUSE'][0]
@@ -204,6 +262,7 @@
         """
         self.cl_HID_keyboard_reports_test()
         self.cl_HID_reports_reboot_test()
+        self.cl_HID_reports_restart_test()
         self.cl_HID_reports_suspend_resume_test()
         self.cl_HID_reports_test()
         self.cl_adapter_discoverable_test()
@@ -212,6 +271,7 @@
         self.cl_adapter_pairing_suspend_resume_test()
         self.cl_adapter_pairing_test()
         self.cl_adapter_pairing_twice_test()
+        self.cl_connect_disconnect_by_device_loop_test()
         self.cl_connect_disconnect_loop_test()
         self.cl_sdp_service_attribute_request_test()
         self.cl_sdp_service_browse_test()
@@ -224,7 +284,8 @@
                  num_iterations=1,
                  args_dict=None,
                  test_name=None,
-                 flag='Quick Health'):
+                 flag='Quick Health',
+                 floss=False):
         """Run the batch of Bluetooth Classic health tests
 
         @param host: the DUT, usually a chromebook
@@ -236,6 +297,7 @@
         self.quick_test_init(host,
                              use_btpeer=True,
                              flag=flag,
-                             args_dict=args_dict)
+                             args_dict=args_dict,
+                             floss=floss)
         self.cl_health_batch_run(num_iterations, test_name)
         self.quick_test_cleanup()
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control b/server/site_tests/bluetooth_AdapterCLHealth/control
index 4bc6087..93c63b4 100644
--- a/server/site_tests/bluetooth_AdapterCLHealth/control
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
 A Batch of Bluetooth Classic health tests. This test is written as a batch
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.all_floss b/server/site_tests/bluetooth_AdapterCLHealth/control.all_floss
new file mode 100644
index 0000000..35098d5
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.all_floss
@@ -0,0 +1,32 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterCLHealth.all_floss'
+PURPOSE = ('Batch of Bluetooth Classic health tests for Floss')
+CRITERIA = 'Pass all health test'
+ATTRIBUTES = ''
+TIME = 'LONG' #this test takes about 20 minutes
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+    A Batch of Bluetooth Classic health tests. These tests will run all classic
+    tests that are currently supported on Floss.
+"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterCLHealth', host=host,
+                 num_iterations=1, args_dict=args_dict, floss=True)
+
+parallel_simple(run, machines)
+
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_keyboard_reports_test b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_keyboard_reports_test
index 5c1d0d6..b48d6f0 100644
--- a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_keyboard_reports_test
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_keyboard_reports_test
@@ -15,6 +15,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
 Verify that the bluetooth adapter of the DUT could receive HID reports
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_keyboard_reports_test.floss b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_keyboard_reports_test.floss
new file mode 100644
index 0000000..2e4a514
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_keyboard_reports_test.floss
@@ -0,0 +1,33 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterCLHealth.cl_HID_keyboard_reports_test.floss'
+PURPOSE = ('Test bluetooth adapter receiving reports from '
+           'bluetooth HID devices.')
+CRITERIA = 'Adapter should receive HID events correctly.'
+ATTRIBUTES = 'suite:bluetooth_floss'
+TIME = 'SHORT'  # ~2 minutes on octopus
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+Verify that the bluetooth adapter of the DUT could receive HID reports
+sent from a connected bluetooth device correctly.
+"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterCLHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1], floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reconnect_speed_test b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reconnect_speed_test
new file mode 100644
index 0000000..a5b4981
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reconnect_speed_test
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterCLHealth.cl_HID_reconnect_speed_test'
+PURPOSE = 'Test the speed of a HID device reconnect to DUT.'
+CRITERIA = 'HID device should reconnect fast.'
+# TODO: Remove ['Quick Health'] flag from AdapterCLHealth when moving to stable.
+ATTRIBUTES = 'suite:bluetooth_flaky'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+Test the speed of a HID device reconnect to DUT
+
+Specifically, the following subtests are executed in this autotest.
+    - test_reset_on_adapter
+    - test_pairable
+    - test_discover_device
+    - test_pairing
+    - test_device_is_connected
+    - test_hid_device_created
+    - test_device_set_discoverable
+    - test_disconnection_by_device
+    - iteration start
+    - test_device_is_not_connected
+    - test_connection_by_device
+    - test_hid_device_created_speed
+    - test_disconnection_by_device
+    - iteration end
+    - test_hid_device_reconnect_time
+"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterCLHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reconnect_speed_test.floss b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reconnect_speed_test.floss
new file mode 100644
index 0000000..09243bc
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reconnect_speed_test.floss
@@ -0,0 +1,30 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterCLHealth.cl_HID_reconnect_speed_test.floss'
+PURPOSE = 'Test the speed of a HID device reconnect to DUT.'
+CRITERIA = 'HID device should reconnect fast.'
+# TODO: Remove ['Quick Health'] flag from AdapterCLHealth when moving to stable.
+ATTRIBUTES = 'suite:bluetooth_floss'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """Test the speed of a HID device reconnect to DUT"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterCLHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1], floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reports_reboot_test b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reports_reboot_test
index 3788018..ade1e39 100644
--- a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reports_reboot_test
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reports_reboot_test
@@ -15,6 +15,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
 Verify that the bluetooth adapter of the DUT could receive HID reports
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reports_reboot_test.floss b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reports_reboot_test.floss
new file mode 100644
index 0000000..60d57c8
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reports_reboot_test.floss
@@ -0,0 +1,34 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterCLHealth.cl_HID_reports_reboot_test.floss'
+PURPOSE = ('Test bluetooth adapter receiving reports from '
+           'bluetooth HID devices after a reboot.')
+CRITERIA = 'Adapter should receive HID events correctly.'
+ATTRIBUTES = 'suite:bluetooth_floss'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+Verify that the bluetooth adapter of the DUT could receive HID reports
+sent from a connected bluetooth device correctly before and after a
+reboot.
+"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterCLHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1], floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reports_restart_test b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reports_restart_test
new file mode 100644
index 0000000..057d267
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reports_restart_test
@@ -0,0 +1,51 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterCLHealth.cl_HID_reports_restart_test'
+PURPOSE = ('Test bluetooth adapter receiving reports from '
+           'bluetooth HID devices after bluetoothd restart.')
+CRITERIA = 'Adapter should receive HID events correctly.'
+# TODO: Remove ['Quick Health'] flag from AdapterCLHealth when moving to stable.
+ATTRIBUTES = 'suite:bluetooth_flaky'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+Verify that the bluetooth adapter of the DUT could receive HID reports
+sent from a connected bluetooth device correctly before and after a
+bluetoothd restart.
+
+Specifically, the following subtests are executed in this autotest.
+    - test_reset_on_adapter
+    - test_pairable
+    - test_discover_device
+    - test_pairing
+    - test_device_is_paired
+    - test_connection_by_adapter
+    - test_mouse_move_in_xy
+    - test_stop_bluetoothd
+    - test_start_bluetoothd
+    - test_device_is_connected
+    - test_connection_by_device
+    - test_mouse_move_in_xy
+    - test_disconnection_by_adapter
+    - test_remove_pairing
+"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterCLHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reports_restart_test.floss b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reports_restart_test.floss
new file mode 100644
index 0000000..41b14ad
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reports_restart_test.floss
@@ -0,0 +1,35 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterCLHealth.cl_HID_reports_restart_test.floss'
+PURPOSE = ('Test bluetooth adapter receiving reports from '
+           'bluetooth HID devices after bluetoothd restart.')
+CRITERIA = 'Adapter should receive HID events correctly.'
+# TODO: Remove ['Quick Health'] flag from AdapterCLHealth when moving to stable.
+ATTRIBUTES = 'suite:bluetooth_floss'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+Verify that the bluetooth adapter of the DUT could receive HID reports
+sent from a connected bluetooth device correctly before and after the
+bluetooth daemon restart.
+"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterCLHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1], floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reports_suspend_resume_test b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reports_suspend_resume_test
index 9a2a1d4..67a4ecd 100644
--- a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reports_suspend_resume_test
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reports_suspend_resume_test
@@ -15,6 +15,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
 Verify that the bluetooth adapter of the DUT could receive HID reports
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reports_suspend_resume_test.floss b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reports_suspend_resume_test.floss
new file mode 100644
index 0000000..3c777af
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reports_suspend_resume_test.floss
@@ -0,0 +1,33 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterCLHealth.cl_HID_reports_suspend_resume_test.floss'
+PURPOSE = ('Test bluetooth adapter receiving reports from '
+           'bluetooth HID devices.')
+CRITERIA = 'Adapter should receive HID events correctly after a suspend resume.'
+ATTRIBUTES = 'suite:bluetooth_floss'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+Verify that the bluetooth adapter of the DUT could receive HID reports
+sent from a connected bluetooth device correctly.
+"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterCLHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1], floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reports_test b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reports_test
index c63a42c..d77e1eb 100644
--- a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reports_test
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reports_test
@@ -15,6 +15,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
 Verify that the bluetooth adapter of the DUT could receive HID reports
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reports_test.floss b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reports_test.floss
new file mode 100644
index 0000000..f2e935d
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_HID_reports_test.floss
@@ -0,0 +1,33 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterCLHealth.cl_HID_reports_test.floss'
+PURPOSE = ('Test bluetooth adapter receiving reports from '
+           'bluetooth HID devices.')
+CRITERIA = 'Adapter should receive HID events correctly.'
+ATTRIBUTES = 'suite:bluetooth_floss,suite:bluetooth_floss_cq'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+Verify that the bluetooth adapter of the DUT could receive HID reports
+sent from a connected bluetooth device correctly.
+"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterCLHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1], floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_discoverable_test b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_discoverable_test
index 8f4ec3c..86ec961 100644
--- a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_discoverable_test
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_discoverable_test
@@ -9,6 +9,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
 Verify that the device can be discovered from another Bluetooth device.
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_discoverable_test.floss b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_discoverable_test.floss
new file mode 100644
index 0000000..400c531
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_discoverable_test.floss
@@ -0,0 +1,31 @@
+# Copyright (c) 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterCLHealth.cl_adapter_discoverable_test.floss'
+ATTRIBUTES = 'suite:bluetooth_floss'
+TIME = 'SHORT'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+Verify that the device can be discovered from another Bluetooth device.
+
+This is a useful health check because it tests basic functionality of the
+radio and host subsystem.
+"""
+
+from autotest_lib.server import utils
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterCLHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1], floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_discovery_test b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_discovery_test
index 23d3f87..ac85e4d 100644
--- a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_discovery_test
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_discovery_test
@@ -8,12 +8,13 @@
 NAME = 'bluetooth_AdapterCLHealth.cl_adapter_discovery_test'
 PURPOSE = 'Test bluetooth adapter can discover a bluetooth device.'
 CRITERIA = 'Adapter should work in correct states.'
-ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e, suite:bluetooth_e2e_cq'
 TIME = 'SHORT'	# Just over 1 minute on blooglet
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
 Verify that the bluetooth adapter of the DUT could pair and connect
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_discovery_test.floss b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_discovery_test.floss
new file mode 100644
index 0000000..4ab9aed
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_discovery_test.floss
@@ -0,0 +1,35 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterCLHealth.cl_adapter_discovery_test.floss'
+PURPOSE = 'Test bluetooth adapter can discover a bluetooth device.'
+CRITERIA = 'Adapter should work in correct states.'
+ATTRIBUTES = 'suite:bluetooth_floss'
+TIME = 'SHORT'	# Just over 1 minute on blooglet
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+Verify that the bluetooth adapter of the DUT could pair and connect
+to a bluetooth device.
+
+Specifically, the following subtests are executed in this autotest.
+    - test_discover_device
+"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterCLHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1], floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_keyboard_pairing_test b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_keyboard_pairing_test
index e88d200..92fe202 100644
--- a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_keyboard_pairing_test
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_keyboard_pairing_test
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
 Verify that the bluetooth adapter of the DUT could pair and connect
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_keyboard_pairing_test.floss b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_keyboard_pairing_test.floss
new file mode 100644
index 0000000..268f5f2
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_keyboard_pairing_test.floss
@@ -0,0 +1,32 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterCLHealth.cl_adapter_keyboard_pairing_test.floss'
+PURPOSE = 'Test bluetooth adapter pairing and connecting to a bluetooth device.'
+CRITERIA = 'Adapter should work in correct states.'
+ATTRIBUTES = 'suite:bluetooth_floss'
+TIME = 'SHORT'  # ~3 minutes on octopus
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+Verify that the bluetooth adapter of the DUT could pair and connect
+to a bluetooth device.
+"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterCLHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1], floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_pairing_suspend_resume_test b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_pairing_suspend_resume_test
index e0776a5..8fd020b 100644
--- a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_pairing_suspend_resume_test
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_pairing_suspend_resume_test
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
 Verify that the bluetooth adapter of the DUT could pair and connect
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_pairing_suspend_resume_test.floss b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_pairing_suspend_resume_test.floss
new file mode 100644
index 0000000..a7cdbd0
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_pairing_suspend_resume_test.floss
@@ -0,0 +1,47 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterCLHealth.cl_adapter_pairing_suspend_resume_test.floss'
+PURPOSE = 'Test adapter pairing and connecting to a bluetooth device after suspend/resume'
+CRITERIA = 'Adapter should work in correct states.'
+ATTRIBUTES = 'suite:bluetooth_floss'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+Verify that the bluetooth adapter of the DUT could pair and connect
+to a bluetooth device.
+
+Specifically, the following subtests are executed in this autotest.
+    - test_reset_on_adapter
+    - test_pairable
+    - test_discover_device
+    - test_stop_discovery
+    - test_device_name
+    - test_device_class_of_service
+    - test_device_class_of_device
+    - test_pairing
+    - test_connection_by_adapter
+    - test_disconnection_by_adapter
+    - test_connection_by_device
+    - test_disconnection_by_device
+    - test_remove_pairing
+"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterCLHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1], floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_pairing_test b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_pairing_test
index 4c6b9a5..7fd92e3 100644
--- a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_pairing_test
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_pairing_test
@@ -8,12 +8,13 @@
 NAME = 'bluetooth_AdapterCLHealth.cl_adapter_pairing_test'
 PURPOSE = 'Test bluetooth adapter pairing and connecting to a bluetooth device.'
 CRITERIA = 'Adapter should work in correct states.'
-ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e, suite:bluetooth_e2e_cq, suite:bluetooth_floss_cq'
 TIME = 'SHORT'
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
 Verify that the bluetooth adapter of the DUT could pair and connect
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_pairing_test.floss b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_pairing_test.floss
new file mode 100644
index 0000000..9a3ee8c
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_pairing_test.floss
@@ -0,0 +1,32 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterCLHealth.cl_adapter_pairing_test.floss'
+PURPOSE = 'Test bluetooth adapter pairing and connecting to a bluetooth device.'
+CRITERIA = 'Adapter should work in correct states.'
+ATTRIBUTES = 'suite:bluetooth_floss'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+Verify that the bluetooth adapter of the DUT could pair and connect
+to a bluetooth device.
+"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterCLHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1], floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_pairing_twice_test b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_pairing_twice_test
index 6dd6967..c69b496 100644
--- a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_pairing_twice_test
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_pairing_twice_test
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
 Verify that the bluetooth adapter of the DUT could pair and connect
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_pairing_twice_test.floss b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_pairing_twice_test.floss
new file mode 100644
index 0000000..5a3bf99
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_adapter_pairing_twice_test.floss
@@ -0,0 +1,47 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterCLHealth.cl_adapter_pairing_twice_test.floss'
+PURPOSE = 'Test bluetooth adapter pairing and connecting to a bluetooth device.'
+CRITERIA = 'Adapter should work in correct states.'
+ATTRIBUTES = 'suite:bluetooth_floss'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+Verify that the bluetooth adapter of the DUT could pair and connect
+to a bluetooth device.
+
+Specifically, the following subtests are executed in this autotest.
+    - test_reset_on_adapter
+    - test_pairable
+    - test_discover_device
+    - test_stop_discovery
+    - test_device_name
+    - test_device_class_of_service
+    - test_device_class_of_device
+    - test_pairing
+    - test_connection_by_adapter
+    - test_disconnection_by_adapter
+    - test_connection_by_device
+    - test_disconnection_by_device
+    - test_remove_pairing
+"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterCLHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1], floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_connect_disconnect_by_device_loop_test b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_connect_disconnect_by_device_loop_test
new file mode 100644
index 0000000..824360e
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_connect_disconnect_by_device_loop_test
@@ -0,0 +1,48 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterCLHealth.cl_connect_disconnect_by_device_loop_test'
+PURPOSE = ('Batch of Bluetooth CL health tests')
+CRITERIA = 'Pass all health test'
+# TODO: Remove ['Quick Health'] flag from AdapterCLHealth when moving to stable.
+ATTRIBUTES = 'suite:bluetooth_flaky'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """ Tests connection and disconnection by the device in a loop.
+
+Specifically, the following subtests are executed in this autotest.
+    - test_reset_on_adapter
+    - test_pairable
+    - test_discover_device
+    - test_pairing
+    - test_device_is_connected
+    - test_hid_device_created
+    - test_device_set_discoverable
+    - test_disconnection_by_device
+    - iteration start
+    - test_device_is_not_connected
+    - test_connection_by_device
+    - check_connected_method
+    - test_disconnection_by_device
+    - iteration end
+    - test_remove_pairing
+"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterCLHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_connect_disconnect_by_device_loop_test.floss b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_connect_disconnect_by_device_loop_test.floss
new file mode 100644
index 0000000..2f681aa
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_connect_disconnect_by_device_loop_test.floss
@@ -0,0 +1,48 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterCLHealth.cl_connect_disconnect_by_device_loop_test.floss'
+PURPOSE = ('Batch of Bluetooth CL health tests')
+CRITERIA = 'Pass all health test'
+# TODO: Remove ['Quick Health'] flag from AdapterCLHealth when moving to stable.
+ATTRIBUTES = 'suite:bluetooth_floss'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """ Tests connection and disconnection by the device in a loop.
+
+Specifically, the following subtests are executed in this autotest.
+    - test_reset_on_adapter
+    - test_pairable
+    - test_discover_device
+    - test_pairing
+    - test_device_is_connected
+    - test_hid_device_created
+    - test_device_set_discoverable
+    - test_disconnection_by_device
+    - iteration start
+    - test_device_is_not_connected
+    - test_connection_by_device
+    - check_connected_method
+    - test_disconnection_by_device
+    - iteration end
+    - test_remove_pairing
+"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterCLHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1], floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_connect_disconnect_loop_test b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_connect_disconnect_loop_test
index 1f49054..7c6d9ba 100644
--- a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_connect_disconnect_loop_test
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_connect_disconnect_loop_test
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
 Verify that the bluetooth adapter of the DUT could pair and connect
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_connect_disconnect_loop_test.floss b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_connect_disconnect_loop_test.floss
new file mode 100644
index 0000000..3e9b4ac
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_connect_disconnect_loop_test.floss
@@ -0,0 +1,32 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterCLHealth.cl_connect_disconnect_loop_test.floss'
+PURPOSE = 'Test bluetooth adapter connect to HID after disconnecting'
+CRITERIA = 'Adapter should work in correct states.'
+ATTRIBUTES = 'suite:bluetooth_floss'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+Verify that the bluetooth adapter of the DUT could pair and connect
+to a bluetooth device.
+"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterCLHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1], floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_page_scan_during_inquiry b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_page_scan_during_inquiry
index 816202a..cdebcd5 100644
--- a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_page_scan_during_inquiry
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_page_scan_during_inquiry
@@ -8,12 +8,13 @@
 NAME = 'bluetooth_AdapterCLHealth.cl_page_scan_during_inquiry'
 PURPOSE = ('Test controller page scan is working during inquiry.')
 CRITERIA = 'Connection attempt during inquiry should succeed.'
-ATTRIBUTES = 'suite:bluetooth_flaky'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
 TIME = 'SHORT'
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
 Verify that the DUT could accept a connection during inquiry.
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_sdp_service_attribute_request_test b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_sdp_service_attribute_request_test
index c4dc502..dccc2f8 100644
--- a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_sdp_service_attribute_request_test
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_sdp_service_attribute_request_test
@@ -9,6 +9,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 DOC = """
 Verify the correct behaviour of the device when searching for attributes of
 services.
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_sdp_service_browse_test b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_sdp_service_browse_test
index 61b95c8..b33edd7 100644
--- a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_sdp_service_browse_test
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_sdp_service_browse_test
@@ -9,6 +9,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
 Verify that the IUT behave correct during Service Browse procedure.
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_sdp_service_search_attribute_request_test b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_sdp_service_search_attribute_request_test
index 5512253..7c3257c 100644
--- a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_sdp_service_search_attribute_request_test
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_sdp_service_search_attribute_request_test
@@ -9,6 +9,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
 Verify the correct behaviour of the device when executing Service Search
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_sdp_service_search_request_basic_test b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_sdp_service_search_request_basic_test
index e9e451f..292fe08 100644
--- a/server/site_tests/bluetooth_AdapterCLHealth/control.cl_sdp_service_search_request_basic_test
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.cl_sdp_service_search_request_basic_test
@@ -9,6 +9,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
 Verify the correct behaviour of the device when searching for services.
diff --git a/server/site_tests/bluetooth_AdapterCLHealth/control.stress b/server/site_tests/bluetooth_AdapterCLHealth/control.stress
index b34dd85..91bf787 100644
--- a/server/site_tests/bluetooth_AdapterCLHealth/control.stress
+++ b/server/site_tests/bluetooth_AdapterCLHealth/control.stress
@@ -8,12 +8,13 @@
 NAME = 'bluetooth_AdapterCLHealth.stress'
 PURPOSE = ('Batch of Bluetooth Classic health tests')
 CRITERIA = 'Pass all health test'
-ATTRIBUTES = 'suite:bluetooth_stress'
+ATTRIBUTES = ''
 TIME = 'LONG'
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
 A Batch of Bluetooth Classic health tests. This test is written as a batch
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/bluetooth_AdapterEPHealth.py b/server/site_tests/bluetooth_AdapterEPHealth/bluetooth_AdapterEPHealth.py
new file mode 100644
index 0000000..8f20554
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/bluetooth_AdapterEPHealth.py
@@ -0,0 +1,651 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A Batch of of Bluetooth enterprise policy health tests"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import time
+
+from six.moves import zip
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.cros.bluetooth.bluetooth_audio_test_data import A2DP
+from autotest_lib.server.cros.bluetooth.bluetooth_adapter_quick_tests import (
+        BluetoothAdapterQuickTests)
+from autotest_lib.server.cros.bluetooth.bluetooth_adapter_audio_tests import (
+        BluetoothAdapterAudioTests)
+from autotest_lib.server.cros.bluetooth.bluetooth_adapter_hidreports_tests \
+        import BluetoothAdapterHIDReportTests
+
+from autotest_lib.server.cros.bluetooth.bluetooth_test_utils import (
+        BluetoothPolicy)
+
+
+class bluetooth_AdapterEPHealth(BluetoothAdapterQuickTests,
+                                BluetoothAdapterAudioTests,
+                                BluetoothAdapterHIDReportTests):
+    """A Batch of Bluetooth enterprise policy health tests."""
+
+    # A delay for disconnection to finish.
+    DISCONNECT_SLEEP_SECS = 2
+
+    # With raspberry pi peer, it takes a moment before the device is
+    # registered as an input device. Without delay, the input recorder
+    # doesn't find the device
+    CONNECT_SLEEP_SECS = 1
+
+    test_wrapper = BluetoothAdapterQuickTests.quick_test_test_decorator
+    batch_wrapper = BluetoothAdapterQuickTests.quick_test_batch_decorator
+
+
+    def get_device_verifier(self, device, expected_pass):
+        """Helper function to get a proper test method for verifying device
+           avalibility depending on its type
+
+        @param device: a peer device
+        @param expected_pass: True if the test is expected to pass
+        @returns: a test method if the device type can be recongnized,
+                  None otherwise.
+        """
+        if device.device_type == 'KEYBOARD':
+            return self.run_keyboard_tests
+        elif device.device_type == 'MOUSE':
+            return self.test_mouse_left_click
+        elif device.device_type == 'BLUETOOTH_AUDIO':
+            # If the test is expected to pass, verify the whole audio procedure
+            # Otherwise only make sure A2DP is not connected on peer device.
+            if expected_pass:
+                return lambda device: self.test_a2dp_sinewaves(device, A2DP, 0)
+            else:
+                return lambda device: self.test_device_a2dp_connected(device)
+        else:
+            raise error.TestError('Failed to find verifier for device type %s' %
+                                  device.device_type)
+
+
+    def ep_outgoing_connection(self, device, expected_pass):
+        """Run outoging connection tests
+
+        @param device: the peer device
+        @param expected_pass: True if the test is expected to pass
+        """
+        self.test_discover_device(device.address)
+        time.sleep(self.TEST_SLEEP_SECS)
+
+        self.test_pairing(device.address, device.pin, trusted=True)
+        time.sleep(self.CONNECT_SLEEP_SECS)
+
+
+    def ep_incoming_connection(self, device, expected_pass):
+        """Run incoming connection tests
+
+        @param device: the peer device
+        @param expected_pass: True if the test is expected to pass
+        """
+        self.test_discover_device(device.address)
+        time.sleep(self.TEST_SLEEP_SECS)
+
+        self.test_pairing(device.address, device.pin, trusted=True)
+        time.sleep(self.CONNECT_SLEEP_SECS)
+
+        self.test_disconnection_by_device(device)
+        time.sleep(self.DISCONNECT_SLEEP_SECS)
+
+        if expected_pass:
+            self.test_connection_by_device(device)
+        else:
+            # ignore the result of connection by device since bluez could
+            # disconnect the device connection if there is no service
+            # available
+            adapter_address = self.bluetooth_facade.address
+            device.ConnectToRemoteAddress(adapter_address)
+        time.sleep(self.CONNECT_SLEEP_SECS)
+
+
+    def ep_auto_reconnection(self, device, expected_pass):
+        """Run auto reconnection tests
+
+        @param device: the peer device
+        @param expected_pass: True if the test is expected to pass
+        """
+        self.test_discover_device(device.address)
+        time.sleep(self.TEST_SLEEP_SECS)
+
+        self.test_pairing(device.address, device.pin, trusted=True)
+        time.sleep(self.CONNECT_SLEEP_SECS)
+
+        device.AdapterPowerOff()
+        time.sleep(self.TEST_SLEEP_SECS)
+        # device should be connected after power on
+        device.AdapterPowerOn()
+
+
+    def reset_allowlist_and_raise_fail(self, err_msg):
+        """Reset the allowlist and raise TestFail.
+
+        @param err_msg: the error message
+        """
+        self.test_reset_allowlist()
+        raise error.TestFail(err_msg)
+
+
+    def post_test_method(self, device):
+        """Run tests to make sure the device is not connected and not paired to
+        host
+
+        @param device: the peer device
+        """
+
+        self.test_disconnection_by_adapter(device.address)
+        self.test_remove_pairing(device.address)
+
+    def run_test_method(self, pre_test_method, devices, uuids='',
+                        expected_passes=True):
+        """Run specified pre_test_method and verify devices can be used.
+
+        @param pre_test_method: the test method to run before verification
+        @param devices: a peer device or a list of peer devices
+        @param uuids: the uuids in the allowlist to set.
+                If uuids is None, it means not to set Allowlist.
+                The default value is '' which means to allow all UUIDs.
+        @param expected_passes: a boolean value or a list of boolean value,
+                each element is True if the ep_test_method is expected to pass.
+                The default value is a single value of True.
+        """
+        has_audio_device = False
+
+        if uuids is not None:
+            self.test_check_set_allowlist(uuids, True)
+
+        if type(devices) is not list:
+            devices = [devices]
+
+        if type(expected_passes) is not list:
+            expected_passes = [expected_passes]
+
+        for device, expected_pass in zip(devices, expected_passes):
+            if device.device_type == 'BLUETOOTH_AUDIO':
+                self.initialize_bluetooth_audio(device, A2DP)
+                has_audio_device = True
+            pre_test_method(device, expected_pass)
+
+        # TODO(b:219398837) Remove this once b/219398837 is fixed.
+        # There is an issue on chameleon when a DUT connects to multiple peers
+        # with at least one emulated as an audio device, the other connections
+        # might be dropped for a few seconds then reconnect.
+        # Ensures only one device is connected at a time as a workaround.
+        # The details of the issue is described in b/210379084#comment3
+        # and b/172381798
+        multi_conn_workaround = len(devices) >= 2 and has_audio_device
+        if multi_conn_workaround:
+            for device, expected_pass in zip(devices, expected_passes):
+                # Only disconnect expected_pass devices, since the expected_fail
+                # devices could be disconnected as they don't have connectable
+                # profiles.
+                if expected_pass:
+                    self.test_disconnection_by_adapter(device.address)
+
+        for device, expected_pass in zip(devices, expected_passes):
+            self.check_if_affected_by_policy(device, not expected_pass)
+            verifier = self.get_device_verifier(device, expected_pass)
+
+            if multi_conn_workaround:
+                self.test_connection_by_adapter(device.address)
+
+            # Make sure hid device was created before using it
+            if device.device_type in ['KEYBOARD', 'MOUSE']:
+                self.expect_test(expected_pass, self.test_hid_device_created,
+                                 device.address)
+
+            # Whether the test should pass or fail depends on expected_pass.
+            self.expect_test(expected_pass, verifier, device)
+
+            if multi_conn_workaround:
+                self.test_disconnection_by_adapter(device.address)
+
+        for device in devices:
+            self.post_test_method(device)
+            if device.device_type == 'BLUETOOTH_AUDIO':
+                self.cleanup_bluetooth_audio(device, A2DP)
+
+
+    @test_wrapper('Set Allowlist with Different UUIDs')
+    def ep_check_set_allowlist(self):
+        """The Enterprise Policy set valid and invalid allowlists test."""
+        # Duplicate valid UUIDs
+        self.test_check_set_allowlist('abcd,0xabcd', True)
+
+        # Mix of valid UUID16, UUID32, and UUID128
+        self.test_check_set_allowlist(
+                '0xabcd,abcd1234,'
+                'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee', True)
+
+        # Mix of valid UUID16, UUID32, and UUID128 with duplicate UUIUDs
+        self.test_check_set_allowlist(
+                'abcd,0xabcd,abcd1234,0xabcd1234,'
+                'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee', True)
+
+        # Single valid classic HID UUID
+        self.test_check_set_allowlist(BluetoothPolicy.UUID_HID, True)
+
+        # Empty allowlist
+        self.test_check_set_allowlist('', True)
+
+        # Invalid UUID should fail.
+        self.test_check_set_allowlist(
+                'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee-ffff', False)
+
+        # Invalid UUID should fail.
+        self.test_check_set_allowlist('aaaaaaaa-bbbb-cccc-dddd', False)
+
+
+    @test_wrapper('Outgoing: HID: Service in Allowlist', devices={'KEYBOARD':1})
+    def ep_outgoing_hid_service_in_allowlist(self):
+        """The test with service in allowlist for outgoing connection."""
+        device = self.devices['KEYBOARD'][0]
+        self.run_test_method(self.ep_outgoing_connection, device,
+                             uuids=BluetoothPolicy.UUID_HID, expected_passes=True)
+
+
+    @test_wrapper('Outgoing: Audio: Service in Allowlist',
+                  devices={'BLUETOOTH_AUDIO':1})
+    def ep_outgoing_audio_services_in_allowlist(self):
+        """The test with service in allowlist for outgoing connection."""
+        device = self.devices['BLUETOOTH_AUDIO'][0]
+        self.run_test_method(self.ep_outgoing_connection, device,
+                             uuids=BluetoothPolicy.ALLOWLIST_AUDIO,
+                             expected_passes=True)
+
+
+    @test_wrapper('Outgoing: HID: Service not in Allowlist',
+                  devices={'KEYBOARD':1})
+    def ep_outgoing_hid_service_not_in_allowlist(self):
+        """The test with service not in allowlist for outgoing connection."""
+        device = self.devices['KEYBOARD'][0]
+        self.run_test_method(self.ep_outgoing_connection, device,
+                             uuids='0xaabb', expected_passes=False)
+
+
+    @test_wrapper('Outgoing: Audio: Service not in Allowlist',
+                  devices={'BLUETOOTH_AUDIO':1})
+    def ep_outgoing_audio_services_not_in_allowlist(self):
+        """The test with service not in allowlist for outgoing connection."""
+        device = self.devices['BLUETOOTH_AUDIO'][0]
+        self.run_test_method(self.ep_outgoing_connection,
+                             device,
+                             uuids=BluetoothPolicy.ALLOWLIST_BLE_HID,
+                             expected_passes=False)
+
+
+    @test_wrapper('Outgoing: HID: Empty Allowlist',
+                  devices={'KEYBOARD':1})
+    def ep_outgoing_hid_empty_allowlist(self):
+        """The test with an empty allowlist for outgoing connection."""
+        device = self.devices['KEYBOARD'][0]
+        self.run_test_method(self.ep_outgoing_connection, device,
+                             uuids='', expected_passes=True)
+
+
+    @test_wrapper('Outgoing: Audio: Empty Allowlist',
+                  devices={'BLUETOOTH_AUDIO':1})
+    def ep_outgoing_audio_empty_allowlist(self):
+        """The test with an empty allowlist for outgoing connection."""
+        device = self.devices['BLUETOOTH_AUDIO'][0]
+        self.run_test_method(self.ep_outgoing_connection, device,
+                             uuids='', expected_passes=True)
+
+
+    @test_wrapper('Incoming: HID: Service in Allowlist',
+                  devices={'KEYBOARD':1})
+    def ep_incoming_hid_service_in_allowlist(self):
+        """Service in allowlist for incoming reconnection from device."""
+        device = self.devices['KEYBOARD'][0]
+        self.run_test_method(self.ep_incoming_connection, device,
+                             uuids=BluetoothPolicy.UUID_HID, expected_passes=True)
+
+
+    @test_wrapper('Incoming: Audio: Service in Allowlist',
+                  devices={'BLUETOOTH_AUDIO':1})
+    def ep_incoming_audio_service_in_allowlist(self):
+        """Service in allowlist for incoming reconnection from device."""
+        device = self.devices['BLUETOOTH_AUDIO'][0]
+        self.run_test_method(self.ep_incoming_connection, device,
+                             uuids=BluetoothPolicy.ALLOWLIST_AUDIO,
+                             expected_passes=True)
+
+
+    @test_wrapper('Incoming: HID: Service not in Allowlist',
+                  devices={'KEYBOARD':1})
+    def ep_incoming_hid_service_not_in_allowlist(self):
+        """Service not in allowlist for incoming reconnection from device."""
+        device = self.devices['KEYBOARD'][0]
+        self.run_test_method(self.ep_incoming_connection, device,
+                             uuids='0xaabb', expected_passes=False)
+
+
+    @test_wrapper('Incoming: Audio: Service not in Allowlist',
+                  devices={'BLUETOOTH_AUDIO':1})
+    def ep_incoming_audio_service_not_in_allowlist(self):
+        """Service not in allowlist for incoming reconnection from device."""
+        device = self.devices['BLUETOOTH_AUDIO'][0]
+        self.run_test_method(self.ep_incoming_connection,
+                             device,
+                             uuids=BluetoothPolicy.ALLOWLIST_BLE_HID,
+                             expected_passes=False)
+
+
+    @test_wrapper('Incoming: HID: Service empty Allowlist',
+                  devices={'KEYBOARD':1})
+    def ep_incoming_hid_service_empty_allowlist(self):
+        """The test with an empty allowlist for incoming connection."""
+        device = self.devices['KEYBOARD'][0]
+        self.run_test_method(self.ep_incoming_connection, device,
+                             uuids='',
+                             expected_passes=True)
+
+
+    @test_wrapper('Incoming: Audio: Service empty Allowlist',
+                  devices={'BLUETOOTH_AUDIO':1})
+    def ep_incoming_audio_service_empty_allowlist(self):
+        """The test with an empty allowlist for incoming connection."""
+        device = self.devices['BLUETOOTH_AUDIO'][0]
+        self.run_test_method(self.ep_incoming_connection, device,
+                             uuids='',
+                             expected_passes=True)
+
+
+    @test_wrapper('Outgoing: BLE Keyboard: Services in Allowlist',
+                  devices={'BLE_KEYBOARD':1})
+    def ep_outgoing_ble_hid_services_in_allowlist(self):
+        """The test for BLE gatt services in allowlist."""
+        device = self.devices['BLE_KEYBOARD'][0]
+        self.run_test_method(self.ep_outgoing_connection, device,
+                             uuids=BluetoothPolicy.ALLOWLIST_BLE_HID,
+                             expected_passes=True)
+
+
+    @test_wrapper('Outgoing: BLE Keyboard: Services not in Allowlist',
+                  devices={'BLE_KEYBOARD':1})
+    def ep_outgoing_ble_hid_services_not_in_allowlist(self):
+        """The test for BLE gatt services not in allowlist."""
+        device = self.devices['BLE_KEYBOARD'][0]
+        self.run_test_method(self.ep_outgoing_connection,
+                             device,
+                             uuids=BluetoothPolicy.ALLOWLIST_AUDIO,
+                             expected_passes=False)
+
+
+    @test_wrapper('Outgoing: BLE Keyboard: Empty Allowlist',
+                  devices={'BLE_KEYBOARD':1})
+    def ep_outgoing_ble_hid_empty_allowlist(self):
+        """The test for BLE gatt services and an empty allowlist."""
+        device = self.devices['BLE_KEYBOARD'][0]
+        self.run_test_method(self.ep_outgoing_connection, device,
+                             uuids='', expected_passes=True)
+
+
+    @test_wrapper('Reconnection: BLE Keyboard: Service in Allowlist',
+                  devices={'BLE_KEYBOARD':1})
+    def ep_reconnection_ble_hid_service_in_allowlist(self):
+        """Service in allowlist for auto reconnection from device."""
+        device = self.devices['BLE_KEYBOARD'][0]
+        self.run_test_method(self.ep_auto_reconnection, device,
+                             uuids=BluetoothPolicy.ALLOWLIST_BLE_HID,
+                             expected_passes=True)
+
+
+    @test_wrapper('Reconnection: BLE Keyboard: Service not in Allowlist',
+                  devices={'BLE_KEYBOARD':1})
+    def ep_reconnection_ble_hid_service_not_in_allowlist(self):
+        """Service in allowlist for auto reconnection from device."""
+        device = self.devices['BLE_KEYBOARD'][0]
+        self.run_test_method(self.ep_auto_reconnection,
+                             device,
+                             uuids=BluetoothPolicy.ALLOWLIST_AUDIO,
+                             expected_passes=False)
+
+
+    @test_wrapper('Combo: Set Allowlist and Disconnect', devices={'KEYBOARD':1})
+    def ep_combo_set_allowlist_and_disconnect(self):
+        """Set a new allowlist and current connection should be terminated."""
+        device = self.devices['KEYBOARD'][0]
+        self.run_test_method(self.ep_outgoing_connection, device,
+                             uuids=BluetoothPolicy.UUID_HID, expected_passes=True)
+
+        # Setting a non-HID UUID should disconnect the device.
+        self.test_check_set_allowlist('abcd', True)
+        time.sleep(self.DISCONNECT_SLEEP_SECS)
+        self.test_device_is_not_connected(device.address)
+
+
+    @test_wrapper('Combo: Successive Allowlist', devices={'KEYBOARD':1})
+    def ep_combo_successive_allowlists(self):
+        """A new allowlist overwrites previoius one and allows connection."""
+        device = self.devices['KEYBOARD'][0]
+
+        # Setting a non-HID UUID initially.
+        self.test_check_set_allowlist('abcd', True)
+
+        # A subsequent HID UUID should supersede the previous setting.
+        self.run_test_method(self.ep_outgoing_connection, device,
+                             uuids=BluetoothPolicy.UUID_HID, expected_passes=True)
+
+
+    @test_wrapper('Combo: HID Allowlist Persists Adapter Reset',
+                  devices={'KEYBOARD':1})
+    def ep_combo_hid_persists_adapter_reset(self):
+        """The Allowlist with HID UUID should persist adapter reset."""
+        device = self.devices['KEYBOARD'][0]
+        self.test_check_set_allowlist(BluetoothPolicy.UUID_HID, True)
+        self.test_reset_on_adapter()
+        self.run_test_method(self.ep_outgoing_connection, device,
+                             uuids=None, expected_passes=True)
+
+
+    @test_wrapper('Combo: Non-HID Allowlist Persists Adapter Reset',
+                  devices={'KEYBOARD':1})
+    def ep_combo_non_hid_persists_adapter_reset(self):
+        """The Allowlist with non-HID UUID should persist adapter reset."""
+        device = self.devices['KEYBOARD'][0]
+        self.test_check_set_allowlist('abcd', True)
+        self.test_reset_on_adapter()
+        self.run_test_method(self.ep_outgoing_connection, device,
+                             uuids=None, expected_passes=False)
+
+
+    @test_wrapper('Combo: HID Allowlist Persists bluetoothd restart',
+                  devices={'KEYBOARD':1})
+    def ep_combo_hid_persists_bluetoothd_restart(self):
+        """The Allowlist with HID UUID should persist bluetoothd restart."""
+        device = self.devices['KEYBOARD'][0]
+        self.test_check_set_allowlist(BluetoothPolicy.UUID_HID, True)
+        self.test_stop_bluetoothd()
+        self.test_start_bluetoothd()
+        # Powering on adapter could take a few milliseconds, make sure the power
+        # is on before proceeding.
+        self.test_adapter_work_state()
+        self.run_test_method(self.ep_outgoing_connection, device,
+                             uuids=None, expected_passes=True)
+
+
+    @test_wrapper('Combo: Non-HID Allowlist Persists bluetoothd restart',
+                  devices={'KEYBOARD':1})
+    def ep_combo_non_hid_persists_bluetoothd_restart(self):
+        """The Allowlist with non-HID UUID should persist bluetoothd restart."""
+        device = self.devices['KEYBOARD'][0]
+        self.test_check_set_allowlist('abcd', True)
+        self.test_stop_bluetoothd()
+        self.test_start_bluetoothd()
+        # Powering on adapter could take a few milliseconds, make sure the power
+        # is on before proceeding.
+        self.test_adapter_work_state()
+        self.run_test_method(self.ep_outgoing_connection, device,
+                             uuids=None, expected_passes=False)
+
+
+    @test_wrapper('Combo: HID Allowlist Persists reboot',
+                  devices={'KEYBOARD':1})
+    def ep_combo_hid_persists_reboot(self):
+        """The Allowlist with HID UUID should persist reboot."""
+        device = self.devices['KEYBOARD'][0]
+        self.test_check_set_allowlist(BluetoothPolicy.UUID_HID, True)
+        self.reboot()
+        # Make sure adapter power is on before proceeding.
+        self.test_adapter_work_state()
+        self.run_test_method(self.ep_outgoing_connection, device,
+                             uuids=None, expected_passes=True)
+
+
+    @test_wrapper('Combo: Non-HID Allowlist Persists reboot',
+                  devices={'KEYBOARD':1})
+    def ep_combo_non_hid_persists_reboot(self):
+        """The Allowlist with non-HID UUID should persist reboot."""
+        device = self.devices['KEYBOARD'][0]
+        self.test_check_set_allowlist('aaaa', True)
+        self.reboot()
+        # Make sure adapter power is on before proceeding.
+        self.test_adapter_work_state()
+        self.run_test_method(self.ep_outgoing_connection, device,
+                             uuids=None, expected_passes=False)
+
+
+    @test_wrapper('MD: BLE HID and Audio: Services in Allowlist',
+                  devices={'BLE_MOUSE':1, 'BLUETOOTH_AUDIO':1})
+    def ep_md_ble_hid_and_audio_in_allowlist(self):
+        """The multi-device test for BLE HID and audio services in allowlist."""
+        hid_device = self.devices['BLE_MOUSE'][0]
+        audio_device = self.devices['BLUETOOTH_AUDIO'][0]
+
+        self.run_test_method(self.ep_outgoing_connection,
+                             [hid_device, audio_device],
+                             uuids=BluetoothPolicy.ALLOWLIST_BLE_HID_AUDIO,
+                             expected_passes=[True, True])
+
+
+    @test_wrapper('MD: BLE HID and Audio: Only Audio in Allowlist',
+                  devices={'BLE_MOUSE':1, 'BLUETOOTH_AUDIO':1})
+    def ep_md_audio_in_allowlist(self):
+        """The multi-device test for audio services in allowlist."""
+        hid_device = self.devices['BLE_MOUSE'][0]
+        audio_device = self.devices['BLUETOOTH_AUDIO'][0]
+
+        self.run_test_method(self.ep_outgoing_connection,
+                             [hid_device, audio_device],
+                             uuids=BluetoothPolicy.ALLOWLIST_AUDIO,
+                             expected_passes=[False, True])
+
+
+    @test_wrapper('MD: BLE HID and Audio: Only BLE HID in Allowlist',
+                  devices={'BLE_KEYBOARD':1, 'BLUETOOTH_AUDIO':1})
+    def ep_md_ble_hid_in_allowlist(self):
+        """The multi-device test for audio services in allowlist."""
+        hid_device = self.devices['BLE_KEYBOARD'][0]
+        audio_device = self.devices['BLUETOOTH_AUDIO'][0]
+
+        self.run_test_method(self.ep_outgoing_connection,
+                             [hid_device, audio_device],
+                             uuids=BluetoothPolicy.ALLOWLIST_BLE_HID,
+                             expected_passes=[True, False])
+
+
+    @test_wrapper('MD: Classic and BLE HID: Services in Allowlist',
+                  devices={'BLE_KEYBOARD':1, 'MOUSE':1})
+    def ep_md_hid_and_ble_hid_in_allowlist(self):
+        """The multi-device test for Classic and BLE HID in the allowlist."""
+        keyboard_device = self.devices['BLE_KEYBOARD'][0]
+        mouse_device = self.devices['MOUSE'][0]
+
+        self.run_test_method(self.ep_outgoing_connection,
+                             [keyboard_device, mouse_device],
+                             uuids=BluetoothPolicy.ALLOWLIST_CLASSIC_BLE_HID,
+                             expected_passes=[True, True])
+
+
+    @test_wrapper('MD: BLE HID and Audio: Empty Allowlist',
+                  devices={'BLE_KEYBOARD':1, 'BLUETOOTH_AUDIO':1})
+    def ep_md_ble_hid_and_audio_empty_allowlist(self):
+        """The multi-device test for BLE HID and Audio with empty allowlist."""
+        hid_device = self.devices['BLE_KEYBOARD'][0]
+        audio_device = self.devices['BLUETOOTH_AUDIO'][0]
+
+        self.run_test_method(self.ep_outgoing_connection,
+                             [hid_device, audio_device],
+                             uuids='',
+                             expected_passes=[True, True])
+
+
+    @batch_wrapper('EP Health')
+    def ep_health_batch_run(self, num_iterations=1, test_name=None):
+        """Run the EP health test batch or a specific given test.
+
+        @param num_iterations: how many iterations to run
+        @param test_name: specific test to run otherwise None to run the
+                whole batch
+        """
+
+        self.ep_check_set_allowlist()
+
+        self.ep_outgoing_hid_service_in_allowlist()
+        self.ep_outgoing_hid_service_not_in_allowlist()
+        self.ep_outgoing_hid_empty_allowlist()
+
+        self.ep_outgoing_ble_hid_services_in_allowlist()
+        self.ep_outgoing_ble_hid_services_not_in_allowlist()
+        self.ep_outgoing_ble_hid_empty_allowlist()
+
+        self.ep_incoming_hid_service_in_allowlist()
+        self.ep_incoming_hid_service_not_in_allowlist()
+        self.ep_incoming_hid_service_empty_allowlist()
+
+        self.ep_outgoing_audio_services_in_allowlist()
+        self.ep_outgoing_audio_services_not_in_allowlist()
+        self.ep_outgoing_audio_empty_allowlist()
+
+        self.ep_incoming_audio_service_in_allowlist()
+        self.ep_incoming_audio_service_not_in_allowlist()
+        self.ep_incoming_audio_service_empty_allowlist()
+
+        self.ep_reconnection_ble_hid_service_in_allowlist()
+        self.ep_reconnection_ble_hid_service_not_in_allowlist()
+
+        self.ep_combo_set_allowlist_and_disconnect()
+        self.ep_combo_successive_allowlists()
+        self.ep_combo_hid_persists_adapter_reset()
+        self.ep_combo_non_hid_persists_adapter_reset()
+        self.ep_combo_hid_persists_bluetoothd_restart()
+        self.ep_combo_non_hid_persists_bluetoothd_restart()
+        self.ep_combo_hid_persists_reboot()
+        self.ep_combo_non_hid_persists_reboot()
+
+        self.ep_md_ble_hid_and_audio_in_allowlist()
+        self.ep_md_audio_in_allowlist()
+        self.ep_md_ble_hid_in_allowlist()
+        self.ep_md_hid_and_ble_hid_in_allowlist()
+        self.ep_md_ble_hid_and_audio_empty_allowlist()
+
+    def run_once(self,
+                 host,
+                 num_iterations=1,
+                 peer_required=True,
+                 args_dict=None,
+                 test_name=None,
+                 flag='Quick Health'):
+        """Run the batch of Bluetooth enterprise policy health tests
+
+        @param host: the DUT, usually a chromebook
+        @param num_iterations: the number of rounds to execute the test
+        @param test_name: the test to run, or None for all tests
+        """
+
+        # Initialize and run the test batch or the requested specific test
+        self.quick_test_init(host,
+                             use_btpeer=peer_required,
+                             flag=flag,
+                             args_dict=args_dict)
+        self.ep_health_batch_run(num_iterations, test_name)
+        self.quick_test_cleanup()
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control b/server/site_tests/bluetooth_AdapterEPHealth/control
new file mode 100644
index 0000000..acd3c38
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control
@@ -0,0 +1,47 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass all health test'
+ATTRIBUTES = ''
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+    Server side bluetooth tests about Enterprise Policy Health tests.
+
+    Specifically, this test suite includes the following tests
+    - ep_check_set_allowlist
+    - ep_outgoing_service_in_allowlist
+    - ep_outgoing_service_not_in_allowlist
+    - ep_outgoing_empty_allowlist
+    - ep_incoming_service_in_allowlist
+    - ep_incoming_service_not_in_allowlist
+    - ep_gatt_services_in_allowlist
+    - ep_gatt_services_not_in_allowlist
+    - ep_gatt_empty_allowlist
+    - ep_combo_set_allowlist_and_disconnect
+    - ep_combo_successive_allowlists
+    - ep_combo_hid_persists_adapter_reset
+    - ep_combo_non_hid_persists_adapter_reset
+    - ep_combo_hid_persists_bluetoothd_restart
+    - ep_combo_non_hid_persists_bluetoothd_restart
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_check_set_allowlist b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_check_set_allowlist
new file mode 100644
index 0000000..40754a6
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_check_set_allowlist
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_check_set_allowlist'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_standalone'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL'
+PY_VERSION = 3
+
+DOC = """
+
+    Server side bluetooth test about setting valid and invalid allow lists.
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 peer_required=False,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_combo_hid_persists_adapter_reset b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_combo_hid_persists_adapter_reset
new file mode 100644
index 0000000..95655b0
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_combo_hid_persists_adapter_reset
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_combo_hid_persists_adapter_reset'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+
+    Server side bluetooth test about that the allowlist with HID UUID
+    should persist adapter reset.
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_combo_hid_persists_bluetoothd_restart b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_combo_hid_persists_bluetoothd_restart
new file mode 100644
index 0000000..f652da4
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_combo_hid_persists_bluetoothd_restart
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_combo_hid_persists_bluetoothd_restart'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+
+    Server side bluetooth test about that the allowlist with HID UUID
+    should persist bluetoothd restart.
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_combo_hid_persists_reboot b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_combo_hid_persists_reboot
new file mode 100644
index 0000000..83ddbe2
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_combo_hid_persists_reboot
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_combo_hid_persists_reboot'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+
+    Server side bluetooth test about that the allowlist with HID UUID
+    should persist the DUT reboot.
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_combo_non_hid_persists_adapter_reset b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_combo_non_hid_persists_adapter_reset
new file mode 100644
index 0000000..d582293
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_combo_non_hid_persists_adapter_reset
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_combo_non_hid_persists_adapter_reset'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+
+    Server side bluetooth test about that the allowlist with non-HID UUID
+    should persist adapter reset.
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_combo_non_hid_persists_bluetoothd_restart b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_combo_non_hid_persists_bluetoothd_restart
new file mode 100644
index 0000000..15f2775
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_combo_non_hid_persists_bluetoothd_restart
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_combo_non_hid_persists_bluetoothd_restart'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+
+    Server side bluetooth test about that the allowlist with non-HID UUID
+    should persist bluetoothd restart.
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_combo_non_hid_persists_reboot b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_combo_non_hid_persists_reboot
new file mode 100644
index 0000000..a907563
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_combo_non_hid_persists_reboot
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_combo_non_hid_persists_reboot'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+
+    Server side bluetooth test about that the allowlist with non-HID UUID
+    should persist the DUT reboot.
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_combo_set_allowlist_and_disconnect b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_combo_set_allowlist_and_disconnect
new file mode 100644
index 0000000..1cd4ee5
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_combo_set_allowlist_and_disconnect
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_combo_set_allowlist_and_disconnect'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+
+     Server side bluetooth test about that setting a new allowlist and
+     current connection should be terminated.
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_combo_successive_allowlists b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_combo_successive_allowlists
new file mode 100644
index 0000000..f91b48b
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_combo_successive_allowlists
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_combo_successive_allowlists'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+
+     Server side bluetooth test about that a new allowlist overwrites
+     previoius one and allows connection.
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_incoming_audio_service_empty_allowlist b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_incoming_audio_service_empty_allowlist
new file mode 100644
index 0000000..853d62b
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_incoming_audio_service_empty_allowlist
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_incoming_audio_service_empty_allowlist'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+
+    Server side bluetooth test with service in allowlist for
+    incoming reconnection from device.
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_incoming_audio_service_in_allowlist b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_incoming_audio_service_in_allowlist
new file mode 100644
index 0000000..87276ad
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_incoming_audio_service_in_allowlist
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_incoming_audio_service_in_allowlist'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+
+    Server side bluetooth test with service in allowlist for
+    incoming reconnection from device.
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_incoming_audio_service_not_in_allowlist b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_incoming_audio_service_not_in_allowlist
new file mode 100644
index 0000000..911baf5
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_incoming_audio_service_not_in_allowlist
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_incoming_audio_service_not_in_allowlist'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+
+    Server side bluetooth test with service in allowlist for
+    incoming reconnection from device.
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_incoming_hid_service_empty_allowlist b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_incoming_hid_service_empty_allowlist
new file mode 100644
index 0000000..ddb6636
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_incoming_hid_service_empty_allowlist
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_incoming_hid_service_empty_allowlist'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+
+    Server side bluetooth test with service in allowlist for
+    incoming reconnection from device.
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_incoming_hid_service_in_allowlist b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_incoming_hid_service_in_allowlist
new file mode 100644
index 0000000..05e521c
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_incoming_hid_service_in_allowlist
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_incoming_hid_service_in_allowlist'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+
+    Server side bluetooth test with service in allowlist for
+    incoming reconnection from device.
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_incoming_hid_service_not_in_allowlist b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_incoming_hid_service_not_in_allowlist
new file mode 100644
index 0000000..2599345
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_incoming_hid_service_not_in_allowlist
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_incoming_hid_service_not_in_allowlist'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+
+    Server side bluetooth test with service not in allowlist for
+    incoming reconnection from device.
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_md_audio_service_in_allowlist b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_md_audio_service_in_allowlist
new file mode 100644
index 0000000..40bdc5b
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_md_audio_service_in_allowlist
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_md_audio_in_allowlist'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:2'
+PY_VERSION = 3
+
+DOC = """
+
+     Server side bluetooth test about that setting a new allowlist and
+     current connection should be terminated.
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_md_ble_hid_and_audio_empty_allowlist b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_md_ble_hid_and_audio_empty_allowlist
new file mode 100644
index 0000000..1256b82
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_md_ble_hid_and_audio_empty_allowlist
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_md_ble_hid_and_audio_empty_allowlist'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:2'
+PY_VERSION = 3
+
+DOC = """
+
+     Server side bluetooth test about that setting a new allowlist and
+     current connection should be terminated.
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_md_ble_hid_and_audio_service_in_allowlist b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_md_ble_hid_and_audio_service_in_allowlist
new file mode 100644
index 0000000..4c4341c
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_md_ble_hid_and_audio_service_in_allowlist
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_md_ble_hid_and_audio_in_allowlist'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:2'
+PY_VERSION = 3
+
+DOC = """
+
+     Server side bluetooth test about that setting a new allowlist and
+     current connection should be terminated.
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_md_ble_hid_service_in_allowlist b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_md_ble_hid_service_in_allowlist
new file mode 100644
index 0000000..0dcaf6a
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_md_ble_hid_service_in_allowlist
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_md_ble_hid_in_allowlist'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:2'
+PY_VERSION = 3
+
+DOC = """
+
+     Server side bluetooth test about that setting a new allowlist and
+     current connection should be terminated.
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_md_hid_and_ble_hid_service_in_allowlist b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_md_hid_and_ble_hid_service_in_allowlist
new file mode 100644
index 0000000..a790548
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_md_hid_and_ble_hid_service_in_allowlist
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_md_hid_and_ble_hid_in_allowlist'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:2'
+PY_VERSION = 3
+
+DOC = """
+
+     Server side bluetooth test about that setting a new allowlist and
+     current connection should be terminated.
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_outgoing_audio_empty_allowlist b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_outgoing_audio_empty_allowlist
new file mode 100644
index 0000000..2bc2f13
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_outgoing_audio_empty_allowlist
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_outgoing_audio_empty_allowlist'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+
+    Server side bluetooth test with an empty allowlist for outgoing connection.
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_outgoing_audio_service_in_allowlist b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_outgoing_audio_service_in_allowlist
new file mode 100644
index 0000000..4d5bfbb
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_outgoing_audio_service_in_allowlist
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_outgoing_audio_services_in_allowlist'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+
+    Server side bluetooth test with service in allowlist for
+    outgoing connection."
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_outgoing_audio_service_not_in_allowlist b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_outgoing_audio_service_not_in_allowlist
new file mode 100644
index 0000000..634b343
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_outgoing_audio_service_not_in_allowlist
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_outgoing_audio_services_not_in_allowlist'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+
+    Server side bluetooth test with service not in allowlist for
+    outgoing connection.
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_outgoing_ble_hid_empty_allowlist b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_outgoing_ble_hid_empty_allowlist
new file mode 100644
index 0000000..d90ebf9
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_outgoing_ble_hid_empty_allowlist
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_outgoing_ble_hid_empty_allowlist'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+
+    Server side bluetooth test for BLE gatt services and an empty allowlist.
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_outgoing_ble_hid_services_in_allowlist b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_outgoing_ble_hid_services_in_allowlist
new file mode 100644
index 0000000..e9eb5d2
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_outgoing_ble_hid_services_in_allowlist
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_outgoing_ble_hid_services_in_allowlist'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+
+    Server side bluetooth test for BLE gatt services in allowlist.
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_outgoing_ble_hid_services_not_in_allowlist b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_outgoing_ble_hid_services_not_in_allowlist
new file mode 100644
index 0000000..ea701fc
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_outgoing_ble_hid_services_not_in_allowlist
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_outgoing_ble_hid_services_not_in_allowlist'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+
+     Server side bluetooth test for BLE gatt services not in allowlist.
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_outgoing_hid_empty_allowlist b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_outgoing_hid_empty_allowlist
new file mode 100644
index 0000000..1bbd6ec
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_outgoing_hid_empty_allowlist
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_outgoing_hid_empty_allowlist'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+
+    Server side bluetooth test with an empty allowlist for outgoing connection.
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_outgoing_hid_service_in_allowlist b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_outgoing_hid_service_in_allowlist
new file mode 100644
index 0000000..b9364f2
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_outgoing_hid_service_in_allowlist
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_outgoing_hid_service_in_allowlist'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+
+    Server side bluetooth test with service in allowlist for
+    outgoing connection."
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_outgoing_hid_service_not_in_allowlist b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_outgoing_hid_service_not_in_allowlist
new file mode 100644
index 0000000..cecdb2f
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_outgoing_hid_service_not_in_allowlist
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_outgoing_hid_service_not_in_allowlist'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+
+    Server side bluetooth test with service not in allowlist for
+    outgoing connection.
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_reconnection_ble_hid_service_in_allowlist b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_reconnection_ble_hid_service_in_allowlist
new file mode 100644
index 0000000..318866b
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_reconnection_ble_hid_service_in_allowlist
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_reconnection_ble_hid_service_in_allowlist'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+
+    Server side bluetooth test with service in allowlist for
+    outgoing connection."
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterEPHealth/control.ep_reconnection_ble_hid_service_not_in_allowlist b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_reconnection_ble_hid_service_not_in_allowlist
new file mode 100644
index 0000000..b2e7c4e
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterEPHealth/control.ep_reconnection_ble_hid_service_not_in_allowlist
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterEPHealth.ep_reconnection_ble_hid_service_not_in_allowlist'
+PURPOSE = ('Batch of Bluetooth enterprise policy health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+
+    Server side bluetooth test with service in allowlist for
+    outgoing connection."
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterEPHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterLEBetterTogether/bluetooth_AdapterLEBetterTogether.py b/server/site_tests/bluetooth_AdapterLEBetterTogether/bluetooth_AdapterLEBetterTogether.py
index 59e8c5e..9f3f0d0 100644
--- a/server/site_tests/bluetooth_AdapterLEBetterTogether/bluetooth_AdapterLEBetterTogether.py
+++ b/server/site_tests/bluetooth_AdapterLEBetterTogether/bluetooth_AdapterLEBetterTogether.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/bluetooth_AdapterLEBetterTogether/control.smart_unlock_test b/server/site_tests/bluetooth_AdapterLEBetterTogether/control.smart_unlock_test
index 0313aaf..ee3fc6d 100644
--- a/server/site_tests/bluetooth_AdapterLEBetterTogether/control.smart_unlock_test
+++ b/server/site_tests/bluetooth_AdapterLEBetterTogether/control.smart_unlock_test
@@ -8,12 +8,13 @@
 NAME = 'bluetooth_AdapterLEBetterTogether.smart_unlock_test'
 PURPOSE = ('Better together smart unlock test')
 CRITERIA = 'Pass the test'
-ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e, suite:bluetooth_e2e_cq'
 TIME = 'MEDIUM'
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
 	The test is to verify the Bluetooth features used by Smat Unlock feature of
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/bluetooth_AdapterLEHealth.py b/server/site_tests/bluetooth_AdapterLEHealth/bluetooth_AdapterLEHealth.py
index cae17f0..2462704 100644
--- a/server/site_tests/bluetooth_AdapterLEHealth/bluetooth_AdapterLEHealth.py
+++ b/server/site_tests/bluetooth_AdapterLEHealth/bluetooth_AdapterLEHealth.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -37,7 +38,9 @@
     test_wrapper = BluetoothAdapterQuickTests.quick_test_test_decorator
     batch_wrapper = BluetoothAdapterQuickTests.quick_test_batch_decorator
 
-    @test_wrapper('Discovery Test', devices={"BLE_MOUSE":1})
+    @test_wrapper('Discovery Test',
+                  devices={"BLE_MOUSE": 1},
+                  supports_floss=True)
     def le_discovery_test(self):
         """Performs discovery test with mouse peripheral"""
         device = self.devices['BLE_MOUSE'][0]
@@ -49,6 +52,23 @@
         # self.test_device_name(device.address, device.name)
 
 
+    @test_wrapper('Connect Disconnect by Device Loop',
+                  devices={'BLE_MOUSE': 1},
+                  flags=['Quick Health'])
+    def le_connect_disconnect_by_device_loop(self):
+        """Run connect/disconnect loop initiated by device.
+           The test also checks that there are no undesired
+           reconnections.
+        """
+
+        device = self.devices['BLE_MOUSE'][0]
+        self.connect_disconnect_by_device_loop(
+                device=device,
+                loops=3,
+                device_type='BLE_MOUSE',
+                check_connected_method=self.test_mouse_move_in_xy)
+
+
     @test_wrapper('Connect Disconnect Loop', devices={'BLE_MOUSE':1})
     def le_connect_disconnect_loop(self):
         """Run connect/disconnect loop initiated by DUT.
@@ -61,7 +81,49 @@
         device = self.devices['BLE_MOUSE'][0]
         self.connect_disconnect_loop(device=device, loops=3)
 
-    @test_wrapper('Mouse Reports', devices={'BLE_MOUSE':1})
+
+    @test_wrapper('HID Reconnect Speed',
+                  devices={'BLE_MOUSE': 1},
+                  flags=['Quick Health'],
+                  supports_floss=True)
+    def le_hid_reconnect_speed(self):
+        """Test the speed of a LE HID device reconnect to DUT"""
+
+        device = self.devices['BLE_MOUSE'][0]
+        self.hid_reconnect_speed(device=device, device_type='BLE_MOUSE')
+
+
+    @test_wrapper('HID Report Reboot',
+                  devices={'BLE_MOUSE': 1},
+                  flags=['Quick Health'])
+    def le_hid_reports_reboot(self):
+        """Performs HID report test over reboot with BLE mouse peripheral"""
+
+        device = self.devices['BLE_MOUSE'][0]
+        self.run_hid_reports_test(
+                device,
+                check_connected_method=self.test_mouse_move_in_xy,
+                reboot=True)
+
+    @test_wrapper('HID Report Restart',
+                  devices={'BLE_MOUSE': 1},
+                  flags=['Quick Health'],
+                  supports_floss=True)
+    def le_hid_reports_restart(self):
+        """Performs HID report test over bluetoothd restart with BLE mouse
+           peripheral
+        """
+
+        device = self.devices['BLE_MOUSE'][0]
+        self.run_hid_reports_test(
+                device,
+                check_connected_method=self.test_mouse_move_in_xy,
+                restart=True)
+
+
+    @test_wrapper('Mouse Reports',
+                  devices={'BLE_MOUSE': 1},
+                  supports_floss=True)
     def le_mouse_reports(self):
         """Run all bluetooth mouse reports tests"""
 
@@ -78,7 +140,9 @@
         self.run_mouse_tests(device=device)
 
 
-    @test_wrapper('Keyboard Reports', devices={'BLE_KEYBOARD':1})
+    @test_wrapper('Keyboard Reports',
+                  devices={'BLE_KEYBOARD': 1},
+                  supports_floss=True)
     def le_keyboard_reports(self):
         """Run all bluetooth keyboard reports tests"""
 
@@ -107,7 +171,9 @@
 
         self.run_battery_reporting_tests(device=device)
 
-    @test_wrapper('Auto Reconnect', devices={'BLE_MOUSE':1})
+    @test_wrapper('Auto Reconnect',
+                  devices={'BLE_MOUSE': 1},
+                  supports_floss=True)
     def le_auto_reconnect(self):
         """LE reconnection loop by reseting HID and check reconnection"""
 
@@ -146,7 +212,7 @@
 
 
     # TODO (b/165949047) Flaky behavior on MVL/4.4 kernel causes flakiness when
-    # connection is initiated by slave. Skip the test until 2021 uprev
+    # connection is initiated by the peripheral. Skip the test until 2021 uprev
     @test_wrapper('LE secondary Test',
                   devices={'BLE_KEYBOARD': 1},
                   skip_models=LAB_VEYRON_MODELS + ['bob'])
@@ -268,7 +334,7 @@
 
 
     # TODO (b/165949047) Flaky behavior on MVL/4.4 kernel causes flakiness when
-    # connection is initiated by slave. Skip the test until 2021 uprev
+    # connection is initiated by the peripheral. Skip the test until 2021 uprev
     @test_wrapper('LE Receiver Role Test',
                   devices={'BLE_KEYBOARD': 1},
                   skip_models=LAB_VEYRON_MODELS + ['bob'])
@@ -368,7 +434,11 @@
            @param test_name: specifc test to run otherwise None to run the
                              whole batch
         """
+        self.le_connect_disconnect_by_device_loop()
         self.le_connect_disconnect_loop()
+        self.le_hid_reconnect_speed()
+        self.le_hid_reports_reboot()
+        self.le_hid_reports_restart()
         self.le_power_toggle_connect_loop()
         self.le_mouse_reports()
         self.le_keyboard_reports()
@@ -391,7 +461,8 @@
                  num_iterations=1,
                  args_dict=None,
                  test_name=None,
-                 flag='Quick Health'):
+                 flag='Quick Health',
+                 floss=False):
         """Run the batch of Bluetooth LE health tests
 
         @param host: the DUT, usually a chromebook
@@ -403,6 +474,7 @@
         self.quick_test_init(host,
                              use_btpeer=True,
                              flag=flag,
-                             args_dict=args_dict)
+                             args_dict=args_dict,
+                             floss=floss)
         self.le_health_batch_run(num_iterations, test_name)
         self.quick_test_cleanup()
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.all_floss b/server/site_tests/bluetooth_AdapterLEHealth/control.all_floss
new file mode 100644
index 0000000..22e7736
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.all_floss
@@ -0,0 +1,28 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterLEHealth.all_floss'
+PURPOSE = ('Batch of Bluetooth LE health tests')
+CRITERIA = 'Pass all health test'
+ATTRIBUTES = ''
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
+
+DOC = """ A Batch of Bluetooth LE health tests. """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterLEHealth', host=host,
+                 num_iterations=1, args_dict=args_dict, floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.battery_reporting b/server/site_tests/bluetooth_AdapterLEHealth/control.battery_reporting
index 595dc53..310d16d 100644
--- a/server/site_tests/bluetooth_AdapterLEHealth/control.battery_reporting
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.battery_reporting
@@ -8,12 +8,13 @@
 NAME = 'bluetooth_AdapterLEHealth.battery_reporting'
 PURPOSE = ('Test the GATT Battery Service profile')
 CRITERIA = 'Pass all health test'
-ATTRIBUTES = 'suite:bluetooth_flaky'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
 TIME = 'MEDIUM'
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
 
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.le_auto_reconnect b/server/site_tests/bluetooth_AdapterLEHealth/control.le_auto_reconnect
index f6f11ed..d9aa925 100644
--- a/server/site_tests/bluetooth_AdapterLEHealth/control.le_auto_reconnect
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.le_auto_reconnect
@@ -8,12 +8,13 @@
 NAME = 'bluetooth_AdapterLEHealth.le_auto_reconnect'
 PURPOSE = ('Batch of Bluetooth LE health tests')
 CRITERIA = 'Pass all health test'
-ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e, suite:bluetooth_e2e_cq'
 TIME = 'MEDIUM'
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """ Toggle peer power and verify peer device reconnects. """
 
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.le_auto_reconnect.floss b/server/site_tests/bluetooth_AdapterLEHealth/control.le_auto_reconnect.floss
new file mode 100644
index 0000000..7b61898
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.le_auto_reconnect.floss
@@ -0,0 +1,29 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterLEHealth.le_auto_reconnect.floss'
+PURPOSE = ('Batch of Bluetooth LE health tests')
+CRITERIA = 'Pass all health test'
+ATTRIBUTES = 'suite:bluetooth_floss'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """ Toggle peer power and verify peer device reconnects. """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterLEHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1], floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.le_connect_disconnect_by_device_loop b/server/site_tests/bluetooth_AdapterLEHealth/control.le_connect_disconnect_by_device_loop
new file mode 100644
index 0000000..a360d80
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.le_connect_disconnect_by_device_loop
@@ -0,0 +1,50 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterLEHealth.le_connect_disconnect_by_device_loop'
+PURPOSE = ('Batch of Bluetooth LE health tests')
+CRITERIA = 'Pass all health test'
+# TODO: Remove ['Quick Health'] flag from AdapterLEHealth when moving to stable.
+ATTRIBUTES = 'suite:bluetooth_flaky'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """ Tests connection and disconnection by the device in a loop.
+
+Specifically, the following subtests are executed in this autotest.
+    - test_reset_on_adapter
+    - test_pairable
+    - test_discover_device
+    - test_pairing
+    - test_device_is_connected
+    - test_hid_device_created
+    - test_device_set_discoverable
+    - test_disconnection_by_device
+    - iteration start
+    - test_device_is_not_connected
+    - test_device_set_discoverable
+    - test_device_is_connected
+    - check_connected_method
+    - test_device_set_discoverable
+    - test_disconnection_by_device
+    - iteration end
+    - test_remove_pairing
+"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterLEHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.le_connect_disconnect_loop b/server/site_tests/bluetooth_AdapterLEHealth/control.le_connect_disconnect_loop
index b001bf7..209772f 100644
--- a/server/site_tests/bluetooth_AdapterLEHealth/control.le_connect_disconnect_loop
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.le_connect_disconnect_loop
@@ -8,12 +8,13 @@
 NAME = 'bluetooth_AdapterLEHealth.le_connect_disconnect_loop'
 PURPOSE = ('Batch of Bluetooth LE health tests')
 CRITERIA = 'Pass all health test'
-ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e, suite:bluetooth_e2e_cq'
 TIME = 'MEDIUM'
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """ Tests connection and disconnection by the adapter in a loop. """
 
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.le_discovery_test b/server/site_tests/bluetooth_AdapterLEHealth/control.le_discovery_test
index 4a755f9..24d937c 100644
--- a/server/site_tests/bluetooth_AdapterLEHealth/control.le_discovery_test
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.le_discovery_test
@@ -8,12 +8,13 @@
 NAME = 'bluetooth_AdapterLEHealth.le_discovery_test'
 PURPOSE = ('Batch of Bluetooth LE health tests')
 CRITERIA = 'Pass all health test'
-ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e, suite:bluetooth_e2e_cq'
 TIME = 'SHORT'	# Just over 1 minute on blooglet
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
 
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.le_discovery_test.floss b/server/site_tests/bluetooth_AdapterLEHealth/control.le_discovery_test.floss
new file mode 100644
index 0000000..9aa818c
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.le_discovery_test.floss
@@ -0,0 +1,35 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterLEHealth.le_discovery_test.floss'
+PURPOSE = ('Batch of Bluetooth LE health tests')
+CRITERIA = 'Pass all health test'
+ATTRIBUTES = 'suite:bluetooth_floss'
+TIME = 'SHORT'	# Just over 1 minute on blooglet
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+
+     This test ensures that the DUT is able to discover an advertising device
+
+    - test_discover_device
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterLEHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1], floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.le_gatt_client_attribute_browse_test b/server/site_tests/bluetooth_AdapterLEHealth/control.le_gatt_client_attribute_browse_test
index 2b21046..0000daf 100644
--- a/server/site_tests/bluetooth_AdapterLEHealth/control.le_gatt_client_attribute_browse_test
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.le_gatt_client_attribute_browse_test
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
 	The test is to verify GATT client can browse the whole tree-structured
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.le_health b/server/site_tests/bluetooth_AdapterLEHealth/control.le_health
index f271492..0768c60 100644
--- a/server/site_tests/bluetooth_AdapterLEHealth/control.le_health
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.le_health
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
 
 DOC = """ A Batch of Bluetooth LE health tests. """
 
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.le_hid_reconnect_speed b/server/site_tests/bluetooth_AdapterLEHealth/control.le_hid_reconnect_speed
new file mode 100644
index 0000000..e7492fb
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.le_hid_reconnect_speed
@@ -0,0 +1,52 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterLEHealth.le_hid_reconnect_speed'
+PURPOSE = 'Test the speed of a LE HID device reconnect to DUT.'
+CRITERIA = 'LE HID device should reconnect fast.'
+# TODO: Remove ['Quick Health'] flag from AdapterLEHealth when moving to stable.
+ATTRIBUTES = 'suite:bluetooth_flaky'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+Test the speed of a LE HID device reconnect to DUT
+
+Specifically, the following subtests are executed in this autotest.
+    - test_reset_on_adapter
+    - test_pairable
+    - test_discover_device
+    - test_pairing
+    - test_device_is_connected
+    - test_hid_device_created
+    - test_device_set_discoverable
+    - test_disconnection_by_device
+    - iteration start
+    - test_device_is_not_connected
+    - test_device_set_discoverable
+    - test_device_is_connected
+    - test_hid_device_created_speed
+    - test_device_set_discoverable
+    - test_disconnection_by_device
+    - iteration end
+    - test_remove_pairing
+    - test_hid_device_reconnect_time
+"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterLEHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.le_hid_reconnect_speed.floss b/server/site_tests/bluetooth_AdapterLEHealth/control.le_hid_reconnect_speed.floss
new file mode 100644
index 0000000..ea3c9e2
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.le_hid_reconnect_speed.floss
@@ -0,0 +1,52 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterLEHealth.le_hid_reconnect_speed.floss'
+PURPOSE = 'Test the speed of a LE HID device reconnect to DUT.'
+CRITERIA = 'LE HID device should reconnect fast.'
+# TODO: Remove ['Quick Health'] flag from AdapterLEHealth when moving to stable.
+ATTRIBUTES = 'suite:bluetooth_floss'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+Test the speed of a LE HID device reconnect to DUT
+
+Specifically, the following subtests are executed in this autotest.
+    - test_reset_on_adapter
+    - test_pairable
+    - test_discover_device
+    - test_pairing
+    - test_device_is_connected
+    - test_hid_device_created
+    - test_device_set_discoverable
+    - test_disconnection_by_device
+    - iteration start
+    - test_device_is_not_connected
+    - test_device_set_discoverable
+    - test_device_is_connected
+    - test_hid_device_created_speed
+    - test_device_set_discoverable
+    - test_disconnection_by_device
+    - iteration end
+    - test_remove_pairing
+    - test_hid_device_reconnect_time
+"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterLEHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1], floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.le_hid_reports_reboot b/server/site_tests/bluetooth_AdapterLEHealth/control.le_hid_reports_reboot
new file mode 100644
index 0000000..0bb4832
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.le_hid_reports_reboot
@@ -0,0 +1,51 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterLEHealth.le_hid_reports_reboot'
+PURPOSE = ('Test bluetooth adapter receiving reports from '
+           'bluetooth LE HID devices after a reboot.')
+CRITERIA = 'Adapter should receive LE HID events correctly.'
+# TODO: Remove ['Quick Health'] flag from AdapterLEHealth when moving to stable.
+ATTRIBUTES = 'suite:bluetooth_flaky'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+Verify that the bluetooth adapter of the DUT could receive LE HID
+reports sent from a connected bluetooth device correctly before and
+after a reboot.
+
+Specifically, the following subtests are executed in this autotest.
+    - test_reset_on_adapter
+    - test_pairable
+    - test_discover_device
+    - test_pairing
+    - test_device_is_paired
+    - test_connection_by_adapter
+    - test_mouse_move_in_xy
+    - reboot
+    - test_device_is_paired
+    - test_connection_by_device
+    - test_device_name
+    - test_mouse_move_in_xy
+    - test_disconnection_by_adapter
+    - test_remove_pairing
+"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterLEHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.le_hid_reports_restart b/server/site_tests/bluetooth_AdapterLEHealth/control.le_hid_reports_restart
new file mode 100644
index 0000000..38079f2
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.le_hid_reports_restart
@@ -0,0 +1,50 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterLEHealth.le_hid_reports_restart'
+PURPOSE = ('Test bluetooth adapter receiving reports from '
+           'bluetooth LE HID devices after a bluetoothd restart.')
+CRITERIA = 'Adapter should receive LE HID events correctly.'
+# TODO: Remove ['Quick Health'] flag from AdapterLEHealth when moving to stable.
+ATTRIBUTES = 'suite:bluetooth_flaky'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+Verify that the bluetooth adapter of the DUT could receive LE HID
+reports sent from a connected bluetooth device correctly before and
+after a bluetoothd restart.
+
+Specifically, the following subtests are executed in this autotest.
+    - test_reset_on_adapter
+    - test_pairable
+    - test_discover_device
+    - test_pairing
+    - test_device_is_paired
+    - test_connection_by_adapter
+    - test_mouse_move_in_xy
+    - test_stop_bluetoothd
+    - test_start_bluetoothd
+    - test_device_is_connected
+    - test_mouse_move_in_xy
+    - test_disconnection_by_adapter
+    - test_remove_pairing
+"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterLEHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.le_hid_reports_restart.floss b/server/site_tests/bluetooth_AdapterLEHealth/control.le_hid_reports_restart.floss
new file mode 100644
index 0000000..2b40b6d
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.le_hid_reports_restart.floss
@@ -0,0 +1,50 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterLEHealth.le_hid_reports_restart.floss'
+PURPOSE = ('Test bluetooth adapter receiving reports from '
+           'bluetooth LE HID devices after a bluetoothd restart.')
+CRITERIA = 'Adapter should receive LE HID events correctly.'
+# TODO: Remove ['Quick Health'] flag from AdapterLEHealth when moving to stable.
+ATTRIBUTES = 'suite:bluetooth_floss'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+Verify that the bluetooth adapter of the DUT could receive LE HID
+reports sent from a connected bluetooth device correctly before and
+after a bluetoothd restart.
+
+Specifically, the following subtests are executed in this autotest.
+    - test_reset_on_adapter
+    - test_pairable
+    - test_discover_device
+    - test_pairing
+    - test_device_is_paired
+    - test_connection_by_adapter
+    - test_mouse_move_in_xy
+    - test_stop_bluetoothd
+    - test_start_bluetoothd
+    - test_device_is_connected
+    - test_mouse_move_in_xy
+    - test_disconnection_by_adapter
+    - test_remove_pairing
+"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterLEHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1], floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.le_keyboard_reports b/server/site_tests/bluetooth_AdapterLEHealth/control.le_keyboard_reports
index 2b06c9f..b6ba1c9 100644
--- a/server/site_tests/bluetooth_AdapterLEHealth/control.le_keyboard_reports
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.le_keyboard_reports
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
 
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.le_keyboard_reports.floss b/server/site_tests/bluetooth_AdapterLEHealth/control.le_keyboard_reports.floss
new file mode 100644
index 0000000..d4bb747
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.le_keyboard_reports.floss
@@ -0,0 +1,43 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterLEHealth.le_keyboard_reports.floss'
+PURPOSE = ('Batch of Bluetooth LE health tests')
+CRITERIA = 'Pass all health test'
+ATTRIBUTES = 'suite:bluetooth_floss,suite:bluetooth_floss_cq'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+
+     Server side bluetooth tests about sending bluetooth HID reports.
+
+     This test tries to send HID reports to a DUT and verifies if the DUT
+     could receive the reports correctly.
+
+
+    - test_pairing
+    - test_connection_by_adapter
+    - test_keyboard_input_from_trace
+    - test_disconnection_by_adapter
+    - test_remove_pairing
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterLEHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1], floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.le_mouse_reports b/server/site_tests/bluetooth_AdapterLEHealth/control.le_mouse_reports
index 94a0fde..f067ffe 100644
--- a/server/site_tests/bluetooth_AdapterLEHealth/control.le_mouse_reports
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.le_mouse_reports
@@ -8,12 +8,13 @@
 NAME = 'bluetooth_AdapterLEHealth.le_mouse_reports'
 PURPOSE = ('Batch of Bluetooth LE health tests')
 CRITERIA = 'Pass all health test'
-ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e, suite:bluetooth_e2e_cq, suite:bluetooth_floss_cq'
 TIME = 'MEDIUM'
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
 
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.le_mouse_reports.floss b/server/site_tests/bluetooth_AdapterLEHealth/control.le_mouse_reports.floss
new file mode 100644
index 0000000..3dc0443
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.le_mouse_reports.floss
@@ -0,0 +1,52 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterLEHealth.le_mouse_reports.floss'
+PURPOSE = ('Batch of Bluetooth LE health tests')
+CRITERIA = 'Pass all health test'
+ATTRIBUTES = 'suite:bluetooth_floss,suite:bluetooth_floss_cq'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+
+     Server side bluetooth tests about sending bluetooth HID reports.
+
+     This test tries to send HID reports to a DUT and verifies if the DUT
+     could receive the reports correctly. For the time being, only bluetooth
+     mouse events are tested. Bluetooth keyboard events will be supported
+     later.
+
+
+    - test_pairing
+    - test_connection_by_adapter
+    - test_mouse_left_click
+    - test_mouse_right_click
+    - test_mouse_move_in_x
+    - test_mouse_move_in_y
+    - test_mouse_move_in_xy
+    - test_mouse_scroll_down
+    - test_mouse_scroll_up
+    - test_mouse_click_and_drag
+    - test_disconnection_by_adapter
+    - test_remove_pairing
+
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterLEHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1], floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.le_power_toggle_connect_loop b/server/site_tests/bluetooth_AdapterLEHealth/control.le_power_toggle_connect_loop
index ea3cd5f..e04e00a 100644
--- a/server/site_tests/bluetooth_AdapterLEHealth/control.le_power_toggle_connect_loop
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.le_power_toggle_connect_loop
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """ Toggle adapter power and verify peer devices can reconnect. """
 
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_hid_during_receiver_adv b/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_hid_during_receiver_adv
index a2387ad..318e569 100644
--- a/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_hid_during_receiver_adv
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_hid_during_receiver_adv
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
 
 DOC = """Tests HID capability during Receiver role advertising
 
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_hid_during_receiver_connection b/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_hid_during_receiver_connection
index 02eabef..32611ee 100644
--- a/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_hid_during_receiver_connection
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_hid_during_receiver_connection
@@ -15,6 +15,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
 
 DOC = """Tests HID capability while connected to Nearby device
 
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_hid_during_sender b/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_hid_during_sender
index ede8c4c..5afe9bf 100644
--- a/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_hid_during_sender
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_hid_during_sender
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
 
 DOC = """Tests HID capability during Sender role
 
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_primary_before_secondary b/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_primary_before_secondary
index 64aae10..757afef 100644
--- a/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_primary_before_secondary
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_primary_before_secondary
@@ -15,6 +15,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
 
 DOC = """Tests primary and then secondary connection capability
 
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_receiver b/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_receiver
index b4a1d05..3d46b11 100644
--- a/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_receiver
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_receiver
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """This tests that the basic receiver role can be handled by the DUT."""
 
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_receiver_during_hid b/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_receiver_during_hid
index fefe43b..c737e8c 100644
--- a/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_receiver_during_hid
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_receiver_during_hid
@@ -15,6 +15,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
 
 DOC = """Tests Receiver role during HID Use
 
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_secondary b/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_secondary
index 3a12c38..e684491 100644
--- a/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_secondary
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_secondary
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """Tests basic secondary connection capability"""
 
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_secondary_before_primary b/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_secondary_before_primary
index a6ce107..94aeb8e 100644
--- a/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_secondary_before_primary
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_secondary_before_primary
@@ -15,6 +15,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
 
 DOC = """Tests secondary and then primary connection capability
 
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_sender b/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_sender
index e98d4d5..c373b13 100644
--- a/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_sender
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_sender
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """This test verifies that the DUT can handle the basic Sender role."""
 
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_sender_during_hid b/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_sender_during_hid
index c11b384..2fd5ff8 100644
--- a/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_sender_during_hid
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.le_role_sender_during_hid
@@ -15,6 +15,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
 
 DOC = """Tests Sender role capability during HID Use
 
diff --git a/server/site_tests/bluetooth_AdapterLEHealth/control.stress b/server/site_tests/bluetooth_AdapterLEHealth/control.stress
index cd428bf..7010e16 100644
--- a/server/site_tests/bluetooth_AdapterLEHealth/control.stress
+++ b/server/site_tests/bluetooth_AdapterLEHealth/control.stress
@@ -8,12 +8,13 @@
 NAME = 'bluetooth_AdapterLEHealth.stress'
 PURPOSE = ('Batch of Bluetooth LE health tests')
 CRITERIA = 'Pass all health test'
-ATTRIBUTES = 'suite:bluetooth_stress'
+ATTRIBUTES = ''
 TIME = 'LONG'
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
 
 DOC = """
     A Batch of Bluetooth LE health tests. This test is written as a batch
diff --git a/server/cros/packet_generation/__init__.py b/server/site_tests/bluetooth_AdapterLLTHealth/__init__.py
similarity index 100%
copy from server/cros/packet_generation/__init__.py
copy to server/site_tests/bluetooth_AdapterLLTHealth/__init__.py
diff --git a/server/site_tests/bluetooth_AdapterLLTHealth/bluetooth_AdapterLLTHealth.py b/server/site_tests/bluetooth_AdapterLLTHealth/bluetooth_AdapterLLTHealth.py
new file mode 100644
index 0000000..98e6294
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterLLTHealth/bluetooth_AdapterLLTHealth.py
@@ -0,0 +1,618 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""A Batch of Bluetooth LE LLT health tests"""
+
+from autotest_lib.server.cros.bluetooth import advertisements_data
+
+DEFAULT_MIN_ADV_INTERVAL = 200
+DEFAULT_MAX_ADV_INTERVAL = 500
+
+from autotest_lib.server.cros.bluetooth.\
+     bluetooth_adapter_controller_role_tests \
+     import bluetooth_AdapterControllerRoleTests
+from autotest_lib.server.cros.bluetooth.bluetooth_adapter_quick_tests \
+     import BluetoothAdapterQuickTests
+from autotest_lib.server.cros.bluetooth.bluetooth_adapter_hidreports_tests \
+     import BluetoothAdapterHIDReportTests
+from autotest_lib.server.cros.bluetooth.bluetooth_adapter_better_together \
+     import BluetoothAdapterBetterTogether
+
+
+class bluetooth_AdapterLLTHealth(BluetoothAdapterHIDReportTests,
+                                 bluetooth_AdapterControllerRoleTests,
+                                 BluetoothAdapterBetterTogether):
+    """A Batch of Bluetooth LE LLT health tests. This test is written
+       as a batch of tests in order to reduce test time, since auto-test
+       ramp up time is costly. The batch is using BluetoothAdapterQuickTests
+       wrapper methods to start and end a test and a batch of tests.
+
+       This class can be called to run the entire test batch or to run a
+       specific test only
+    """
+
+    test_wrapper = BluetoothAdapterQuickTests.quick_test_test_decorator
+    batch_wrapper = BluetoothAdapterQuickTests.quick_test_batch_decorator
+
+
+    def discover_and_pair(self, device):
+        """Discovers and pairs given device. Automatically connects too.
+
+           @param device: meta object for bt peer device
+        """
+        self.test_discover_device(device.address)
+        self.test_pairing(device.address, device.pin, trusted=True)
+        self.test_connection_by_adapter(device.address)
+
+
+    def start_connectable_advertisement(self):
+        """ Initiate connectable advertising from DUT """
+        # Register and start advertising instance
+        # We ignore failure because the test isn't able to verify
+        # the min/max advertising intervals, but this is ok.
+        self.test_reset_advertising()
+        self.test_set_advertising_intervals(DEFAULT_MIN_ADV_INTERVAL,
+                                            DEFAULT_MAX_ADV_INTERVAL)
+        self.test_register_advertisement(advertisements_data.ADVERTISEMENTS[0],
+                                         1)
+
+
+    def pair_and_test_central(self, peripheral):
+        """Connects DUT as central to a peripheral device.
+
+           @param peripheral: meta object for bt peer device
+        """
+        # Pair the central device first -
+        # necessary for later connection to peripheral
+        self.pair_adapter_to_device(peripheral)
+        self.test_device_set_discoverable(peripheral, False)
+
+        self.start_connectable_advertisement()
+        # Discover DUT from peer
+        self.test_discover_by_device(peripheral)
+        # Connect to DUT from peer, putting DUT in peripheral role
+        self.test_connection_by_device(peripheral)
+        self.test_reset_advertising()
+
+
+    @test_wrapper('LLT: 1 Central 1 Peripheral. Order of connection CP',
+                  devices={
+                          'BLE_KEYBOARD': 1,
+                          'BLE_MOUSE': 1
+                  })
+    def llt_1c1p_connect_cp(self):
+        """Tests llt with two peer devices.
+           Connects DUT as central to first device
+           and as peripheral to second device,
+           sends small amount of data over the connection
+        """
+
+        self.verify_controller_capability(
+                        required_roles=['central-peripheral'])
+
+        central = self.devices['BLE_MOUSE'][0]
+        peripheral = self.devices['BLE_KEYBOARD'][0]
+        # Establish connection from DUT as LE Central
+        self.discover_and_pair(central)
+
+        self.test_hid_device_created(central.address)
+        # Verify data transfer over the DUT LE central Connection
+        self.test_mouse_left_click(central)
+
+        # Now establish second connection with DUT as LE Peripheral
+        self.bluetooth_le_facade = self.bluetooth_facade
+        self.pair_and_test_central(peripheral)
+        self.run_keyboard_tests(peripheral)
+
+        # Verify data over LE Central connection again
+        self.test_mouse_left_click(central)
+
+        # Disconnect connections from DUT
+        self.test_disconnection_by_adapter(central.address)
+        self.test_disconnection_by_device(peripheral)
+
+
+    @test_wrapper('LLT: 1 Central 1 Peripheral. Order of connection PC',
+                  devices={
+                          'BLE_KEYBOARD': 1,
+                          'BLE_MOUSE': 1
+                  })
+    def llt_1c1p_connect_pc(self):
+        """Tests llt with two peer devices,
+           Connects DUT as peripheral to first device
+           and as central to second device,
+           sends small amount of data over the connection
+        """
+
+        self.verify_controller_capability(
+                        required_roles=['central-peripheral'])
+
+        central = self.devices['BLE_MOUSE'][0]
+        peripheral = self.devices['BLE_KEYBOARD'][0]
+        # Establish the first connection with DUT as LE Peripheral
+        self.bluetooth_le_facade = self.bluetooth_facade
+
+        # Connect to DUT from peer, putting DUT in peripheral role
+        # Try transferring data over connection
+        self.pair_and_test_central(peripheral)
+        self.run_keyboard_tests(peripheral)
+
+        # Establish  second connection from DUT as LE Central
+        self.discover_and_pair(central)
+        self.test_hid_device_created(central.address)
+        # Verify data transfer over the DUT LE Central Connection
+        self.test_mouse_left_click(central)
+        # Verfiy LE peripheral connection again
+        self.run_keyboard_tests(peripheral)
+
+        # Disconnect connections from DUT
+        self.test_disconnection_by_adapter(central.address)
+        self.test_disconnection_by_device(peripheral)
+
+
+    @test_wrapper('LLT: 1 Central 1 Peripheral while DUT advertising.'
+                  'Order of connection PC',
+                  devices={
+                          'BLE_KEYBOARD': 1,
+                          'BLE_MOUSE': 1
+                  })
+    def llt_1c1p_connect_pc_while_adv(self):
+        """Tests llt with two peer devices, while DUT advertising.
+           Connects DUT while advertising
+           as peripheral to first device
+           and as central to second device,
+           sends small amount of data over the connection
+        """
+
+        self.verify_controller_capability(
+                        required_roles=['central-peripheral'])
+
+        central = self.devices['BLE_MOUSE'][0]
+        peripheral = self.devices['BLE_KEYBOARD'][0]
+        # Establish the first connection with DUT as LE Peripheral
+        self.bluetooth_le_facade = self.bluetooth_facade
+
+        # Connect to DUT from peer, putting DUT in peripheral role
+        # Try transferring data over connection
+        self.pair_and_test_central(peripheral)
+        self.run_keyboard_tests(peripheral)
+
+        # Establish second connection from DUT as LE Central
+        # while advertising in progress
+        self.start_connectable_advertisement()
+        self.discover_and_pair(central)
+        self.test_hid_device_created(central.address)
+
+        # Verify data transfer over the DUT LE Central Connection
+        self.test_mouse_left_click(central)
+        # Verfiy LE Peripheral connection again
+        self.run_keyboard_tests(peripheral)
+
+        # Disconnect connections from DUT
+        self.test_disconnection_by_adapter(central.address)
+        self.test_disconnection_by_device(peripheral)
+        self.test_reset_advertising()
+
+
+    @test_wrapper('LLT: 2 Central 1 Peripheral. Order of connection CCP',
+                  devices={
+                          'BLE_KEYBOARD': 1,
+                          'BLE_MOUSE': 1,
+                          'BLE_PHONE': 1
+                  })
+    def llt_2c1p_connect_ccp(self):
+        """Tests llt with three peer devices.
+           Connects DUT as central to first and second devices,
+           connects DUT as peripheral to third device,
+           sends small amount of data over the connection
+        """
+
+        self.verify_controller_capability(
+                        required_roles=['central-peripheral'])
+
+        central_1 = self.devices['BLE_PHONE'][0]
+        central_2 = self.devices['BLE_MOUSE'][0]
+        peripheral = self.devices['BLE_KEYBOARD'][0]
+        # Establish two connections from DUT as LE Central
+        self.discover_and_pair(central_2)
+        self.test_hid_device_created(central_2.address)
+
+        # Verify data transfer over two DUT LE Central Connections
+        self.test_mouse_left_click(central_2)
+
+        central_1.RemoveDevice(self.bluetooth_facade.address)
+        self.test_smart_unlock_llt(address=central_1.address)
+
+        # Establish third connection with DUT as LE Peripheral
+        self.bluetooth_le_facade = self.bluetooth_facade
+        self.pair_and_test_central(peripheral)
+        self.run_keyboard_tests(peripheral)
+
+        # Verify data transfer over two DUT LE Central Connections
+        self.test_mouse_left_click(central_2)
+
+        # Disconnect connections from DUT
+        self.test_disconnection_by_adapter(central_1.address)
+        self.test_disconnection_by_adapter(central_2.address)
+        self.test_disconnection_by_device(peripheral)
+
+
+    @test_wrapper('LLT: 2 Central 1 Peripheral. Order of connection PCC',
+                  devices={
+                          'BLE_KEYBOARD': 1,
+                          'BLE_MOUSE': 1,
+                          'BLE_PHONE': 1
+                  })
+    def llt_2c1p_connect_pcc(self):
+        """Tests llt with three peer devices.
+           Connects DUT as peripheral to first device
+           and as central to second and third device,
+           sends small amount of data over the connection
+        """
+
+        self.verify_controller_capability(
+                        required_roles=['central-peripheral'])
+
+        central_1 = self.devices['BLE_PHONE'][0]
+        central_2 = self.devices['BLE_MOUSE'][0]
+        peripheral = self.devices['BLE_KEYBOARD'][0]
+
+        # Establish the first connection with DUT as LE Peripheral
+        self.bluetooth_le_facade = self.bluetooth_facade
+
+        # Connect to DUT from peer, putting DUT in peripheral role
+        # Try transferring data over connection
+        self.pair_and_test_central(peripheral)
+        self.run_keyboard_tests(peripheral)
+
+        # Establish connections from DUT as LE Central
+        self.discover_and_pair(central_2)
+        self.test_hid_device_created(central_2.address)
+
+        # Verify data transfer over two DUT LE Central Connections
+        self.test_mouse_left_click(central_2)
+
+        # Establish third connection
+        central_1.RemoveDevice(self.bluetooth_facade.address)
+        self.test_smart_unlock_llt(address=central_1.address)
+
+        # Verify once again data transfer over DUT LE Peripheral connection
+        self.run_keyboard_tests(peripheral)
+
+        # Disconnect connections from DUT
+        self.test_disconnection_by_adapter(central_1.address)
+        self.test_disconnection_by_adapter(central_2.address)
+        self.test_disconnection_by_device(peripheral)
+
+
+    @test_wrapper('LLT: 2 Central 1 Peripheral. Order of connection CPC',
+                  devices={
+                          'BLE_KEYBOARD': 1,
+                          'BLE_MOUSE': 1,
+                          'BLE_PHONE': 1
+                  })
+    def llt_2c1p_connect_cpc(self):
+        """Tests llt with three peer devices.
+           Connects DUT as central to first device,
+           as peripheral to second and as central to third device,
+           sends small amount of data over the connection
+        """
+
+        self.verify_controller_capability(
+                        required_roles=['central-peripheral'])
+
+        central_1 = self.devices['BLE_PHONE'][0]
+        central_2 = self.devices['BLE_MOUSE'][0]
+        peripheral = self.devices['BLE_KEYBOARD'][0]
+
+        # Establish the first connection with DUT as LE Central
+        central_1.RemoveDevice(self.bluetooth_facade.address)
+        self.test_smart_unlock_llt(address=central_1.address)
+
+        # Establish the second connection with DUT as LE Peripheral
+        self.bluetooth_le_facade = self.bluetooth_facade
+
+        # Connect to DUT from peer, putting DUT in peripheral role
+        # Try transferring data over connection
+        self.pair_and_test_central(peripheral)
+        self.run_keyboard_tests(peripheral)
+
+        # Establish third connections from DUT as LE Central
+        self.discover_and_pair(central_2)
+        self.test_hid_device_created(central_2.address)
+
+        # Verify data transfer over second LE Central Connections
+        self.test_mouse_left_click(central_2)
+        # Verify once again data transfer over DUT LE Peripheral connection
+        self.run_keyboard_tests(peripheral)
+
+        # Disconnect connections from DUT
+        self.test_disconnection_by_adapter(central_1.address)
+        self.test_disconnection_by_adapter(central_2.address)
+        self.test_disconnection_by_device(peripheral)
+
+
+    @test_wrapper('LLT: 2 Central 1 Peripheral while DUT advertising.'
+                  'Order of connection PCC',
+                  devices={
+                          'BLE_KEYBOARD': 1,
+                          'BLE_MOUSE': 1,
+                          'BLE_PHONE': 1
+                  })
+    def llt_2c1p_connect_pcc_while_adv(self):
+        """Tests llt with three peer devices.
+           Connects DUT as peripheral to first device
+           and as central to second and third device while advertising,
+           sends small amount of data over the connection
+        """
+
+        self.verify_controller_capability(
+                        required_roles=['central-peripheral'])
+
+        central_1 = self.devices['BLE_MOUSE'][0]
+        central_2 = self.devices['BLE_PHONE'][0]
+        peripheral = self.devices['BLE_KEYBOARD'][0]
+
+        # Establish the first connection with DUT as LE Peripheral
+        self.bluetooth_le_facade = self.bluetooth_facade
+
+        # Connect to DUT from peer, putting DUT in peripheral role
+        # Try transferring data over connection
+        self.pair_and_test_central(peripheral)
+        self.run_keyboard_tests(peripheral)
+
+        # Connect as first LE Central while DUT is advertising
+        self.start_connectable_advertisement()
+        self.discover_and_pair(central_1)
+        self.test_hid_device_created(central_1.address)
+
+        # Establish second LE connection from DUT as LE Central
+        central_2.RemoveDevice(self.bluetooth_facade.address)
+        self.test_smart_unlock_llt(address=central_2.address)
+
+        # Verify data transfer over first LE Central Connections
+        self.test_mouse_left_click(central_1)
+        # Verify once again data transfer over DUT LE Peripheral connection
+        self.run_keyboard_tests(peripheral)
+
+        # Disconnect connections from DUT
+        self.test_disconnection_by_adapter(central_1.address)
+        self.test_disconnection_by_adapter(central_2.address)
+        self.test_disconnection_by_device(peripheral)
+        self.test_reset_advertising()
+
+
+    @test_wrapper('LLT: 2 Central 1 Peripheral while DUT Advertising.'
+                  'Order of connection CPC',
+                  devices={
+                          'BLE_KEYBOARD': 1,
+                          'BLE_MOUSE': 1,
+                          'BLE_PHONE': 1
+                  })
+    def llt_2c1p_connect_cpc_while_adv(self):
+        """Tests llt with three peer devices.
+           Connects DUT while advertising as central to first device,
+           as peripheral to second and as central to third device,
+           sends small amount of data over the connection
+        """
+
+        self.verify_controller_capability(
+                        required_roles=['central-peripheral'])
+
+        central_1 = self.devices['BLE_PHONE'][0]
+        central_2 = self.devices['BLE_MOUSE'][0]
+        peripheral = self.devices['BLE_KEYBOARD'][0]
+
+        self.bluetooth_le_facade = self.bluetooth_facade
+        # Establish the first connection with DUT as LE Central
+        # while advertising in progress
+        self.start_connectable_advertisement()
+        central_1.RemoveDevice(self.bluetooth_facade.address)
+        self.test_smart_unlock_llt(address=central_1.address)
+
+        # Establish the second connection with DUT as LE Peripheral
+        # Try transferring data over connection
+        self.pair_and_test_central(peripheral)
+        self.run_keyboard_tests(peripheral)
+
+        # Establish third connections from DUT as LE Central
+        self.discover_and_pair(central_2)
+        self.test_hid_device_created(central_2.address)
+
+        # Verify data transfer over second LE Central Connections
+        self.test_mouse_left_click(central_2)
+        # Verify once again data transfer over DUT LE Peripheral connection
+        self.run_keyboard_tests(peripheral)
+
+        # Disconnect connections from DUT
+        self.test_disconnection_by_adapter(central_1.address)
+        self.test_disconnection_by_adapter(central_2.address)
+        self.test_disconnection_by_device(peripheral)
+        self.test_reset_advertising()
+
+
+    @test_wrapper('LLT: 1 Central 2 Peripheral. Order of connection CPP',
+                  devices={
+                          'BLE_KEYBOARD': 1,
+                          'BLE_MOUSE': 1,
+                          'BLE_PHONE': 1
+                  })
+    def llt_2p1c_connect_cpp(self):
+        """Tests llt with three peer devices.
+           Connects DUT as central to first device
+           and as peripheral to second and third devices,
+           sends small amount of data over the connection
+        """
+
+        self.verify_controller_capability(
+                        required_roles=['central-peripheral'])
+
+        peripheral_1 = self.devices['BLE_KEYBOARD'][0]
+        central_peer = self.devices['BLE_PHONE'][0]
+        peripheral_2 = self.devices['BLE_MOUSE'][0]
+
+        # Establish connection from DUT as LE Central
+        central_peer.RemoveDevice(self.bluetooth_facade.address)
+        self.test_smart_unlock_llt(address=central_peer.address)
+
+        self.bluetooth_le_facade = self.bluetooth_facade
+
+        # Connect to DUT from peer, putting DUT in peripheral role
+        # Try transferring data over connection
+        self.pair_and_test_central(peripheral_1)
+        self.run_keyboard_tests(peripheral_1)
+
+        # Establish and Verify second LE peripheral connection
+        self.pair_and_test_central(peripheral_2)
+
+        # Try transferring data over connection
+        self.test_mouse_left_click(peripheral_2)
+        # Verify traffic from LE Peripheral connections again
+        self.run_keyboard_tests(peripheral_1)
+        self.test_mouse_left_click(peripheral_2)
+
+        # Disconnect connections from DUT
+        self.test_disconnection_by_adapter(central_peer.address)
+        self.test_disconnection_by_device(peripheral_1)
+        self.test_disconnection_by_device(peripheral_2)
+
+
+    @test_wrapper('LLT: 1 Central 2 Peripheral. Order of connection PCP',
+                  devices={
+                          'BLE_KEYBOARD': 1,
+                          'BLE_MOUSE': 1,
+                          'BLE_PHONE': 1
+                  })
+    def llt_2p1c_connect_pcp(self):
+        """Tests llt with three peer devices.
+           Connects DUT as peripheral to first device,
+           as central to second and as peripheral to third devices,
+           sends small amount of data over the connection
+        """
+
+        self.verify_controller_capability(
+                        required_roles=['central-peripheral'])
+
+        peripheral_1 = self.devices['BLE_KEYBOARD'][0]
+        central_peer = self.devices['BLE_PHONE'][0]
+        peripheral_2 = self.devices['BLE_MOUSE'][0]
+
+        self.bluetooth_le_facade = self.bluetooth_facade
+
+        # Connect to DUT from peer, putting DUT in peripheral role
+        # Try transferring data over connection
+        self.pair_and_test_central(peripheral_1)
+        self.run_keyboard_tests(peripheral_1)
+
+        # Establish connection from DUT as LE Central
+        central_peer.RemoveDevice(self.bluetooth_facade.address)
+        self.test_smart_unlock_llt(address=central_peer.address)
+
+        # Establish and Verify second LE peripheral connection
+        self.pair_and_test_central(peripheral_2)
+
+        # Try transferring data over connection
+        self.test_mouse_left_click(peripheral_2)
+        # Verify traffic from LE Peripheral connections again
+        self.run_keyboard_tests(peripheral_1)
+        self.test_mouse_left_click(peripheral_2)
+
+        # Disconnect connections from DUT
+        self.test_disconnection_by_adapter(central_peer.address)
+        self.test_disconnection_by_device(peripheral_1)
+        self.test_disconnection_by_device(peripheral_2)
+
+
+    @test_wrapper('LLT: 1 Central 2 Peripheral. Order of connection PPC',
+                  devices={
+                          'BLE_KEYBOARD': 1,
+                          'BLE_MOUSE': 1,
+                          'BLE_PHONE': 1
+                  })
+    def llt_2p1c_connect_ppc(self):
+        """Tests llt with three peer devices.
+           Connects DUT as peripheral to first and second devices
+           and as central to third device,
+           sends small amount of data over the connection
+        """
+
+        self.verify_controller_capability(
+                        required_roles=['central-peripheral'])
+
+        peripheral_1 = self.devices['BLE_KEYBOARD'][0]
+        central_peer = self.devices['BLE_PHONE'][0]
+        peripheral_2 = self.devices['BLE_MOUSE'][0]
+
+        self.bluetooth_le_facade = self.bluetooth_facade
+
+        # Connect to DUT from peer, putting DUT in peripheral role
+        # Try transferring data over connection
+        self.pair_and_test_central(peripheral_1)
+        self.run_keyboard_tests(peripheral_1)
+
+        # Establish and Verify second LE peripheral connection
+        self.pair_and_test_central(peripheral_2)
+
+        # Try transferring data over connection
+        self.test_mouse_left_click(peripheral_2)
+
+        # Verify data transfer over two DUT LE Central Connections
+        central_peer.RemoveDevice(self.bluetooth_facade.address)
+        self.test_smart_unlock_llt(address=central_peer.address)
+        # Verify traffic from LE Peripheral connections again
+        self.run_keyboard_tests(peripheral_1)
+        self.test_mouse_left_click(peripheral_2)
+
+        # Disconnect connections from DUT
+        self.test_disconnection_by_adapter(central_peer.address)
+        self.test_disconnection_by_device(peripheral_1)
+        self.test_disconnection_by_device(peripheral_2)
+
+
+    @batch_wrapper('LLT Health')
+    def llt_health_batch_run(self, num_iterations=1, test_name=None):
+        """Run the LE LLT health test batch or a specific given test.
+           The wrapper of this method is implemented in batch_decorator.
+           Using the decorator a test batch method can implement the only its
+           core tests invocations and let the decorator handle the wrapper,
+           which is taking care for whether to run a specific test or the
+           batch as a whole, and running the batch in iterations
+
+           @param num_iterations: how many iterations to run
+           @param test_name: specific test to run otherwise None to run the
+                             whole batch
+        """
+        self.llt_1c1p_connect_cp()
+        self.llt_1c1p_connect_pc()
+        self.llt_1c1p_connect_pc_while_adv()
+        self.llt_2c1p_connect_ccp()
+        self.llt_2c1p_connect_pcc()
+        self.llt_2c1p_connect_cpc()
+        self.llt_2c1p_connect_pcc_while_adv()
+        self.llt_2c1p_connect_cpc_while_adv()
+        self.llt_2p1c_connect_cpp()
+        self.llt_2p1c_connect_pcp()
+        self.llt_2p1c_connect_ppc()
+
+
+    def run_once(self,
+                 host,
+                 num_iterations=1,
+                 args_dict=None,
+                 test_name=None,
+                 flag='Quick Health'):
+        """Run the batch of Bluetooth LE LLT health tests
+
+        @param host: the DUT, usually a chromebook
+        @param num_iterations: the number of rounds to execute the test
+        @test_name: the test to run, or None for all tests
+        """
+
+        # Initialize and run the test batch or the requested specific test
+        self.quick_test_init(host,
+                             use_btpeer=True,
+                             flag=flag,
+                             args_dict=args_dict)
+        self.llt_health_batch_run(num_iterations, test_name)
+        self.quick_test_cleanup()
diff --git a/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_1c1p_connect_cp b/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_1c1p_connect_cp
new file mode 100644
index 0000000..da158ea
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_1c1p_connect_cp
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterLLTHealth.llt_1c1p_connect_cp'
+PURPOSE = ('Batch of Bluetooth LLT health tests')
+CRITERIA = 'Pass all health test'
+ATTRIBUTES = '' # TODO(b/186266241)
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
+
+DOC = """
+      Tests link layer topology with two peer devices,
+      connects DUT as central and peripheral,
+      order of connection is central-peripheral
+      """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterLLTHealth', host=host,
+                  num_iterations=1, args_dict=args_dict,
+                  test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_1c1p_connect_pc b/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_1c1p_connect_pc
new file mode 100644
index 0000000..15afc9b
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_1c1p_connect_pc
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterLLTHealth.llt_1c1p_connect_pc'
+PURPOSE = ('Batch of Bluetooth LLT health tests')
+CRITERIA = 'Pass all health test'
+ATTRIBUTES = '' # TODO(b/186266241)
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
+
+DOC = """
+      Tests link layer topology with two peer devices,
+      connects DUT as central and peripheral,
+      order of connection is peripheral-central
+      """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterLLTHealth', host=host,
+                  num_iterations=1, args_dict=args_dict,
+                  test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_1c1p_connect_pc_while_adv b/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_1c1p_connect_pc_while_adv
new file mode 100644
index 0000000..f18ded1
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_1c1p_connect_pc_while_adv
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterLLTHealth.llt_1c1p_connect_pc_while_adv'
+PURPOSE = ('Batch of Bluetooth LLT health tests')
+CRITERIA = 'Pass all health test'
+ATTRIBUTES = '' # TODO(b/186266241)
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
+
+DOC = """
+      Tests link layer topology with two peer devices,
+      connects DUT as central and peripheral while advertising,
+      order of connection is peripheral-central
+      """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterLLTHealth', host=host,
+                  num_iterations=1, args_dict=args_dict,
+                  test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_2c1p_connect_ccp b/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_2c1p_connect_ccp
new file mode 100644
index 0000000..de85ccc
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_2c1p_connect_ccp
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterLLTHealth.llt_2c1p_connect_ccp'
+PURPOSE = ('Batch of Bluetooth LLT health tests')
+CRITERIA = 'Pass all health test'
+ATTRIBUTES = '' # TODO(b/186266241)
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:3'
+PY_VERSION = 3
+
+DOC = """
+      Tests link layer topology with three peer devices,
+      connects DUT as central and peripheral,
+      order of connection is central-central-peripheral
+      """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterLLTHealth', host=host,
+                  num_iterations=1, args_dict=args_dict,
+                  test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_2c1p_connect_cpc b/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_2c1p_connect_cpc
new file mode 100644
index 0000000..efaca54
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_2c1p_connect_cpc
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterLLTHealth.llt_2c1p_connect_cpc'
+PURPOSE = ('Batch of Bluetooth LLT health tests')
+CRITERIA = 'Pass all health test'
+ATTRIBUTES = '' # TODO(b/186266241)
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:3'
+PY_VERSION = 3
+
+DOC = """
+      Tests link layer topology with three peer devices,
+      connects DUT as central and peripheral,
+      order of connection is central-peripheral-central
+      """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterLLTHealth', host=host,
+                  num_iterations=1, args_dict=args_dict,
+                  test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_2c1p_connect_cpc_while_adv b/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_2c1p_connect_cpc_while_adv
new file mode 100644
index 0000000..69eea0a
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_2c1p_connect_cpc_while_adv
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterLLTHealth.llt_2c1p_connect_cpc_while_adv'
+PURPOSE = ('Batch of Bluetooth LLT health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = '' # TODO(b/186266241)
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:3'
+PY_VERSION = 3
+
+DOC = """
+      Tests link layer topology with three peer devices,
+      connects DUT as central and peripheral while advertising,
+      order of connection is central-peripheral-central
+      """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterLLTHealth', host=host,
+                  num_iterations=1, args_dict=args_dict,
+                  test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_2c1p_connect_pcc b/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_2c1p_connect_pcc
new file mode 100644
index 0000000..e1d68ef
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_2c1p_connect_pcc
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterLLTHealth.llt_2c1p_connect_pcc'
+PURPOSE = ('Batch of Bluetooth LLT health tests')
+CRITERIA = 'Pass all health test'
+ATTRIBUTES = '' # TODO(b/186266241)
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:3'
+PY_VERSION = 3
+
+DOC = """
+      Tests link layer topology with three peer devices,
+      connects DUT as central and peripheral,
+      order of connection is peripheral-central-central
+      """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterLLTHealth', host=host,
+                  num_iterations=1, args_dict=args_dict,
+                  test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_2c1p_connect_pcc_while_adv b/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_2c1p_connect_pcc_while_adv
new file mode 100644
index 0000000..51b0e88
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_2c1p_connect_pcc_while_adv
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterLLTHealth.llt_2c1p_connect_pcc_while_adv'
+PURPOSE = ('Batch of LLT health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = '' # TODO(b/186266241)
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:3'
+PY_VERSION = 3
+
+DOC = """
+      Tests link layer topology with three peer devices,
+      connects DUT as central and peripheral while advertising,
+      order of connection is peripheral-central-central
+      """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterLLTHealth', host=host,
+                  num_iterations=1,args_dict=args_dict,
+                  test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_2p1c_connect_cpp b/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_2p1c_connect_cpp
new file mode 100644
index 0000000..e77a71c
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_2p1c_connect_cpp
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterLLTHealth.llt_2p1c_connect_cpp'
+PURPOSE = ('Batch of Bluetooth LLT health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = '' # TODO(b/186266241)
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:3'
+PY_VERSION = 3
+
+DOC = """
+      Tests link layer topology with three peer devices,
+      connects DUT as central and peripheral,
+      order of connection is central-peripheral-peripheral
+      """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterLLTHealth', host=host,
+                  num_iterations=1, args_dict=args_dict,
+                  test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_2p1c_connect_pcp b/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_2p1c_connect_pcp
new file mode 100644
index 0000000..edfb3bd
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_2p1c_connect_pcp
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterLLTHealth.llt_2p1c_connect_pcp'
+PURPOSE = ('Batch of BLuetooth LLT health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = '' # TODO(b/186266241)
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:3'
+PY_VERSION = 3
+
+DOC = """
+      Tests link layer topology with three peer devices,
+      connects DUT as central and peripheral,
+      order of connection is peripheral-central-peripheral
+      """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterLLTHealth', host=host,
+                  num_iterations=1, args_dict=args_dict,
+                  test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_2p1c_connect_ppc b/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_2p1c_connect_ppc
new file mode 100644
index 0000000..a6235dc
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_2p1c_connect_ppc
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterLLTHealth.llt_2p1c_connect_ppc'
+PURPOSE = ('Batch of Bluetooth LLT health tests')
+CRITERIA = 'Pass the health test'
+ATTRIBUTES = '' # TODO(b/186266241)
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:3'
+PY_VERSION = 3
+
+DOC = """
+      Tests link layer topology with three peer devices,
+      connects DUT as central and peripheral,
+      order of connection is peripheral-peripheral-central
+      """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterLLTHealth', host=host,
+                  num_iterations=1, args_dict=args_dict,
+                  test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_health b/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_health
new file mode 100644
index 0000000..d3867f5
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterLLTHealth/control.llt_health
@@ -0,0 +1,37 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterLLTHealth.llt_health'
+PURPOSE = ('Batch of Bluetooth LE LLT health tests')
+CRITERIA = 'Pass all health test'
+ATTRIBUTES = ''
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:3'
+PY_VERSION = 3
+
+DOC = """
+    A Batch of Bluetooth LE health tests. This test is written as a batch
+    of tests in order to reduce test time, since auto-test ramp up time is
+    costy. The batch is using BluetoothAdapterQuickTests wrapper methods to
+    start and end a test and a batch of tests.
+
+    This class can be called to run the entire test batch or to run a
+    specific test only - todo http://b/132199238 [autotest BT quick health]
+    add support for running a single test in quick test
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterLLTHealth', host=host,
+                  num_iterations=1, args_dict=args_dict)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterLLTHealth/control.stress b/server/site_tests/bluetooth_AdapterLLTHealth/control.stress
new file mode 100644
index 0000000..cf00a3b
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterLLTHealth/control.stress
@@ -0,0 +1,37 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterLLTHealth.stress'
+PURPOSE = ('Batch of Bluetooth LLT health tests')
+CRITERIA = 'Pass all health test'
+ATTRIBUTES = ''
+TIME = 'LONG'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:3'
+PY_VERSION = 3
+
+DOC = """
+    A Batch of Bluetooth LE LLT health tests. This test is written as a batch
+    of tests in order to reduce test time, since auto-test ramp up time is
+    costy. The batch is using BluetoothAdapterQuickTests wrapper methods to
+    start and end a test and a batch of tests.
+
+    This class can be called to run the entire test batch or to run a
+    specific test only - todo http://b/132199238 [autotest BT quick health]
+    add support for running a single test in quick test
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterLLTHealth', host=host,
+                  num_iterations=15, args_dict=args_dict)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterMDHealth/bluetooth_AdapterMDHealth.py b/server/site_tests/bluetooth_AdapterMDHealth/bluetooth_AdapterMDHealth.py
index f129e31..d184142 100644
--- a/server/site_tests/bluetooth_AdapterMDHealth/bluetooth_AdapterMDHealth.py
+++ b/server/site_tests/bluetooth_AdapterMDHealth/bluetooth_AdapterMDHealth.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -64,7 +65,7 @@
 
 
     @test_wrapper('One classic and one BLE connection',
-                  devices={'BLE_MOUSE':1, 'KEYBOARD':1})
+                  devices={'BLE_MOUSE':1, 'KEYBOARD':1}, supports_floss=True)
     def md_two_connections_test(self):
         """test whether DUT can connect to classic keyboard and ble mouse at the
            same time
@@ -78,7 +79,11 @@
 
 
     @test_wrapper('Two BLE connections',
-                  devices={'BLE_MOUSE':1, 'BLE_KEYBOARD':1})
+                  devices={
+                          'BLE_MOUSE': 1,
+                          'BLE_KEYBOARD': 1
+                  },
+                  supports_floss=True)
     def md_two_ble_hid_connections_test(self):
         """ test whether DUT can connect to ble keyboard and ble mouse at the
             same time
@@ -91,7 +96,12 @@
         self.pair_and_test_connection(devices)
 
 
-    @test_wrapper('Two classic connections', devices={'MOUSE':1, 'KEYBOARD':1})
+    @test_wrapper('Two classic connections',
+                  devices={
+                          'MOUSE': 1,
+                          'KEYBOARD': 1
+                  },
+                  supports_floss=True)
     def md_two_cl_hid_connections_test(self):
         """ test whether DUT can connect to classic mouse and classic keyboard
             at the same time
@@ -127,7 +137,8 @@
                  num_iterations=1,
                  args_dict=None,
                  test_name=None,
-                 flag='Quick Health'):
+                 flag='Quick Health',
+                 floss=False):
         """Run the batch of Bluetooth stand health tests
 
         @param host: the DUT, usually a chromebook
@@ -137,6 +148,7 @@
         self.quick_test_init(host,
                              use_btpeer=True,
                              flag=flag,
-                             args_dict=args_dict)
+                             args_dict=args_dict,
+                             floss=floss)
         self.md_health_batch_run(num_iterations, test_name)
         self.quick_test_cleanup()
diff --git a/server/site_tests/bluetooth_AdapterMDHealth/control b/server/site_tests/bluetooth_AdapterMDHealth/control
index d8c0cc4..96c2180 100644
--- a/server/site_tests/bluetooth_AdapterMDHealth/control
+++ b/server/site_tests/bluetooth_AdapterMDHealth/control
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
 
 DOC = """
     A Batch of Bluetooth multi-devices health tests. This test is written
diff --git a/server/site_tests/bluetooth_AdapterMDHealth/control.md_two_ble_hid_connections_test b/server/site_tests/bluetooth_AdapterMDHealth/control.md_two_ble_hid_connections_test
index 5775891..b18e8c0 100644
--- a/server/site_tests/bluetooth_AdapterMDHealth/control.md_two_ble_hid_connections_test
+++ b/server/site_tests/bluetooth_AdapterMDHealth/control.md_two_ble_hid_connections_test
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
 
 DOC = """
     This test is to verify that DUT can connect BLE keyboard and
diff --git a/server/site_tests/bluetooth_AdapterMDHealth/control.md_two_ble_hid_connections_test.floss b/server/site_tests/bluetooth_AdapterMDHealth/control.md_two_ble_hid_connections_test.floss
new file mode 100644
index 0000000..704ae49
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterMDHealth/control.md_two_ble_hid_connections_test.floss
@@ -0,0 +1,32 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterMDHealth.md_two_ble_hid_connections_test.floss'
+PURPOSE = ('Verify DUT can connect to both devices')
+CRITERIA = 'DUT can connect to both devices'
+ATTRIBUTES = 'suite:bluetooth_floss'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
+
+DOC = """
+    This test is to verify that DUT can connect BLE keyboard and
+    BLE mouse at the same time.
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterMDHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1], floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterMDHealth/control.md_two_cl_hid_connections_test b/server/site_tests/bluetooth_AdapterMDHealth/control.md_two_cl_hid_connections_test
index 82f1dd4..6709629 100644
--- a/server/site_tests/bluetooth_AdapterMDHealth/control.md_two_cl_hid_connections_test
+++ b/server/site_tests/bluetooth_AdapterMDHealth/control.md_two_cl_hid_connections_test
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
 
 DOC = """
     This test is to verify that DUT can connect classic keyboard and
diff --git a/server/site_tests/bluetooth_AdapterMDHealth/control.md_two_cl_hid_connections_test.floss b/server/site_tests/bluetooth_AdapterMDHealth/control.md_two_cl_hid_connections_test.floss
new file mode 100644
index 0000000..479ccb1
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterMDHealth/control.md_two_cl_hid_connections_test.floss
@@ -0,0 +1,32 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterMDHealth.md_two_cl_hid_connections_test.floss'
+PURPOSE = ('Verify DUT can connect to both devices')
+CRITERIA = 'DUT can connect to both devices'
+ATTRIBUTES = 'suite:bluetooth_floss'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
+
+DOC = """
+    This test is to verify that DUT can connect classic keyboard and
+    classic mouse at the same time.
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterMDHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1], floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterMDHealth/control.md_two_connections_test b/server/site_tests/bluetooth_AdapterMDHealth/control.md_two_connections_test
index 462b495..80604fb 100644
--- a/server/site_tests/bluetooth_AdapterMDHealth/control.md_two_connections_test
+++ b/server/site_tests/bluetooth_AdapterMDHealth/control.md_two_connections_test
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
 
 DOC = """
     This test is to verify that DUT can connect classic keyboard and
diff --git a/server/site_tests/bluetooth_AdapterMDHealth/control.md_two_connections_test.floss b/server/site_tests/bluetooth_AdapterMDHealth/control.md_two_connections_test.floss
new file mode 100644
index 0000000..fdf391b
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterMDHealth/control.md_two_connections_test.floss
@@ -0,0 +1,32 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterMDHealth.md_two_connections_test.floss'
+PURPOSE = ('Verify DUT can connect to both devices')
+CRITERIA = 'DUT can connect to both devices'
+ATTRIBUTES = 'suite:bluetooth_floss'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
+
+DOC = """
+    This test is to verify that DUT can connect classic keyboard and
+    BLE mouse at the same time.
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterMDHealth', host=host,
+                 num_iterations=1, args_dict=args_dict,
+                 test_name=NAME.split('.')[1], floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterMDHealth/control.stress b/server/site_tests/bluetooth_AdapterMDHealth/control.stress
index 43fafd2..14e7fce 100644
--- a/server/site_tests/bluetooth_AdapterMDHealth/control.stress
+++ b/server/site_tests/bluetooth_AdapterMDHealth/control.stress
@@ -8,12 +8,13 @@
 NAME = 'bluetooth_AdapterMDHealth.stress'
 PURPOSE = ('Batch of Bluetooth MD health tests')
 CRITERIA = 'Pass all health test'
-ATTRIBUTES = 'suite:bluetooth_stress'
+ATTRIBUTES = ''
 TIME = 'LONG'
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
 
 DOC = """
     A Batch of Bluetooth multi-devices health tests. This test is written
diff --git a/server/site_tests/bluetooth_AdapterMTBF/bluetooth_AdapterMTBF.py b/server/site_tests/bluetooth_AdapterMTBF/bluetooth_AdapterMTBF.py
index 9655ec6..56efabd 100644
--- a/server/site_tests/bluetooth_AdapterMTBF/bluetooth_AdapterMTBF.py
+++ b/server/site_tests/bluetooth_AdapterMTBF/bluetooth_AdapterMTBF.py
@@ -211,7 +211,7 @@
         """Test the device can connect after suspending and resuming"""
         boot_id = self.host.get_boot_id()
         suspend = self.suspend_async(suspend_time=15)
-        start_time = self.bluetooth_facade.get_device_time()
+        start_time = self.bluetooth_facade.get_device_utc_time()
 
         self.test_device_set_discoverable(mouse, False)
 
@@ -235,30 +235,8 @@
         """Test the device can be waken up by the mouse"""
         if self.skip_wake_test:
             return
-        boot_id = self.host.get_boot_id()
-        suspend = self.suspend_async(
-            suspend_time=60, expect_bt_wake=True)
-        start_time = self.bluetooth_facade.get_device_time()
 
-        self.test_adapter_wake_enabled()
-        self.test_suspend_and_wait_for_sleep(
-            suspend, sleep_timeout=5)
-
-        # Trigger peer wakeup
-        peer_wake = self.device_connect_async('BLE_MOUSE', mouse,
-                                              self.bluetooth_facade.address)
-        peer_wake.start()
-
-        # Expect a quick resume. If a timeout occurs, test fails.
-        self.test_wait_for_resume(boot_id,
-                                  suspend,
-                                  resume_timeout=20,
-                                  test_start_time=start_time,
-                                  fail_on_timeout=True)
-
-        # Finish peer wake process
-        peer_wake.join()
-
+        self.run_peer_wakeup_device('MOUSE', mouse, should_pair=False)
         # Make sure we're actually connected
         self.test_device_is_connected(mouse.address)
         self.test_hid_device_created(mouse.address)
diff --git a/server/site_tests/bluetooth_AdapterMTBF/control.better_together_stress_test b/server/site_tests/bluetooth_AdapterMTBF/control.better_together_stress_test
index 8918101..1e83abe 100644
--- a/server/site_tests/bluetooth_AdapterMTBF/control.better_together_stress_test
+++ b/server/site_tests/bluetooth_AdapterMTBF/control.better_together_stress_test
@@ -15,6 +15,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
 	The test is to stress the better together test and measure the Mean
diff --git a/server/site_tests/bluetooth_AdapterMTBF/control.typical_use_cases_test b/server/site_tests/bluetooth_AdapterMTBF/control.typical_use_cases_test
index c224fbd..a753a96 100644
--- a/server/site_tests/bluetooth_AdapterMTBF/control.typical_use_cases_test
+++ b/server/site_tests/bluetooth_AdapterMTBF/control.typical_use_cases_test
@@ -15,6 +15,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:4'
+PY_VERSION = 3
 
 DOC = """
 	The test is to run some typical bluetooth use cases and measure the Mean
diff --git a/server/site_tests/bluetooth_AdapterPowerMeasure/bluetooth_AdapterPowerMeasure.py b/server/site_tests/bluetooth_AdapterPowerMeasure/bluetooth_AdapterPowerMeasure.py
index f14ab74..5178d04 100644
--- a/server/site_tests/bluetooth_AdapterPowerMeasure/bluetooth_AdapterPowerMeasure.py
+++ b/server/site_tests/bluetooth_AdapterPowerMeasure/bluetooth_AdapterPowerMeasure.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/bluetooth_AdapterPowerMeasure/control.pw_measurement_suspension_test b/server/site_tests/bluetooth_AdapterPowerMeasure/control.pw_measurement_suspension_test
index b9505af..584ce11 100644
--- a/server/site_tests/bluetooth_AdapterPowerMeasure/control.pw_measurement_suspension_test
+++ b/server/site_tests/bluetooth_AdapterPowerMeasure/control.pw_measurement_suspension_test
@@ -6,7 +6,7 @@
 from autotest_lib.server.cros.bluetooth import advertisements_data
 
 
-AUTHOR = 'Chrome OS Team'
+AUTHOR = 'ChromeOS Team'
 NAME = 'bluetooth_AdapterPowerMeasure.pw_measurement_suspension_test'
 PURPOSE = 'Test power consumption of Bluetooth chip during system suspension.'
 CRITERIA = 'Bluetooth chip should consume power less than specified.'
@@ -17,6 +17,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1, servo'
+PY_VERSION = 3
 
 DOC = """
 This test case verifies that the Bluetooth chip of the DUT does
diff --git a/server/site_tests/bluetooth_AdapterPowerMeasure/control.suspension_100 b/server/site_tests/bluetooth_AdapterPowerMeasure/control.suspension_100
index 21dcba0..d8ed3ac 100644
--- a/server/site_tests/bluetooth_AdapterPowerMeasure/control.suspension_100
+++ b/server/site_tests/bluetooth_AdapterPowerMeasure/control.suspension_100
@@ -6,7 +6,7 @@
 from autotest_lib.server.cros.bluetooth import advertisements_data
 
 
-AUTHOR = 'Chrome OS Team'
+AUTHOR = 'ChromeOS Team'
 NAME = 'bluetooth_AdapterPowerMeasure.suspension_100'
 PURPOSE = 'Test power consumption of Bluetooth chip during system suspension.'
 CRITERIA = 'Bluetooth chip should consume power less than specified.'
@@ -16,6 +16,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1, servo'
+PY_VERSION = 3
 
 DOC = """
 This test case verifies that the Bluetooth chip of the DUT does
diff --git a/server/cros/packet_generation/__init__.py b/server/site_tests/bluetooth_AdapterQRHealth/__init__.py
similarity index 100%
copy from server/cros/packet_generation/__init__.py
copy to server/site_tests/bluetooth_AdapterQRHealth/__init__.py
diff --git a/server/site_tests/bluetooth_AdapterQRHealth/bluetooth_AdapterQRHealth.py b/server/site_tests/bluetooth_AdapterQRHealth/bluetooth_AdapterQRHealth.py
new file mode 100644
index 0000000..ca0082a
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterQRHealth/bluetooth_AdapterQRHealth.py
@@ -0,0 +1,257 @@
+# Lint as: python2, python3
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""A Batch of Bluetooth Quality Report tests"""
+
+import collections
+import time
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.cros.bluetooth.bluetooth_audio_test_data import (
+        A2DP_MEDIUM, HFP_WBS, HFP_NBS, HFP_WBS_MEDIUM, HFP_NBS_MEDIUM)
+from autotest_lib.server.cros.bluetooth.bluetooth_adapter_qr_tests import (
+        BluetoothAdapterQRTests, QR_UNSUPPORTED_CHIPSETS)
+from autotest_lib.server.cros.bluetooth.bluetooth_adapter_quick_tests import (
+        BluetoothAdapterQuickTests)
+
+
+class bluetooth_AdapterQRHealth(BluetoothAdapterQuickTests,
+                                BluetoothAdapterQRTests):
+    """A Batch of Bluetooth audio health tests"""
+
+    test_wrapper = BluetoothAdapterQuickTests.quick_test_test_decorator
+    batch_wrapper = BluetoothAdapterQuickTests.quick_test_batch_decorator
+
+    def run_test_method(self,
+                        devices,
+                        test_method,
+                        test_profile,
+                        logging_and_check=True):
+        """Common procedure to run a specific test method.
+
+        @param devices: a list of devices.
+        @param test_method: the test method to run.
+        @param test_profile: audio test profile to use.
+        @param logging_and_check: set this to True to opend the quality
+                                  report log checking.
+        """
+
+        if not isinstance(devices, collections.Iterable):
+            devices = (devices, )
+
+        num_devices = len(devices)
+
+        # Make sure WBS profile works fine.
+        if test_profile in (HFP_WBS, HFP_WBS_MEDIUM):
+            if self.check_wbs_capability():
+                if not self.bluetooth_facade.enable_wbs(True):
+                    raise error.TestError('failed to enble wbs')
+            else:
+                raise error.TestNAError(
+                        'The DUT does not support WBS. Skip the test.')
+        elif test_profile in (HFP_NBS, HFP_NBS_MEDIUM):
+            if not self.bluetooth_facade.enable_wbs(False):
+                raise error.TestError('failed to disable wbs')
+
+        time.sleep(3)
+
+        self.test_reset_on_adapter()
+        self.test_bluetoothd_running()
+
+        for device in devices:
+            if device.device_type == 'BLUETOOTH_AUDIO':
+                self.initialize_bluetooth_audio(device, test_profile)
+
+            self.test_discover_device(device.address)
+            self.test_pairing(device.address, device.pin, trusted=True)
+            self.test_connection_by_adapter(device.address)
+
+            time.sleep(2)
+
+        if logging_and_check:
+            self.dut_btmon_log_path = self.start_new_btmon()
+            self.enable_disable_debug_log(enable=True)
+            self.enable_disable_quality_debug_log(enable=True)
+
+        test_method()
+
+        if logging_and_check:
+            self.test_send_log()
+            self.check_qr_event_log(num_devices=num_devices)
+            self.enable_disable_quality_debug_log(enable=False)
+            self.enable_disable_debug_log(enable=False)
+
+        for device in devices:
+            self.test_disconnection_by_adapter(device.address)
+
+            if device.device_type == 'BLUETOOTH_AUDIO':
+                self.cleanup_bluetooth_audio(device, test_profile)
+
+    # Remove flags=['Quick Health'] when this test is migrated to stable suite.
+    @test_wrapper('Quality Report A2DP test',
+                  devices={'BLUETOOTH_AUDIO': 1},
+                  flags=['Quick Health'],
+                  skip_chipsets=QR_UNSUPPORTED_CHIPSETS)
+    def qr_a2dp_test(self):
+        """Quality Report A2DP test"""
+        device = self.devices['BLUETOOTH_AUDIO'][0]
+        test_profile = A2DP_MEDIUM
+        test_method = lambda: self.qr_a2dp(device, test_profile)
+
+        self.run_test_method(device, test_method, test_profile)
+
+    # Remove flags=['Quick Health'] when this test is migrated to stable suite.
+    @test_wrapper('Quality Report power cycle and A2DP test',
+                  devices={'BLUETOOTH_AUDIO': 1},
+                  flags=['Quick Health'],
+                  skip_chipsets=QR_UNSUPPORTED_CHIPSETS)
+    def qr_power_cycle_a2dp_test(self):
+        """Quality Report power cycle and A2DP test"""
+        device = self.devices['BLUETOOTH_AUDIO'][0]
+        test_profile = A2DP_MEDIUM
+        test_method = lambda: self.qr_power_cycle_a2dp(device, test_profile)
+
+        self.run_test_method(device, test_method, test_profile)
+
+    # Remove flags=['Quick Health'] when this test is migrated to stable suite.
+    @test_wrapper('Quality Report HFP NBS dut as source test',
+                  devices={'BLUETOOTH_AUDIO': 1},
+                  flags=['Quick Health'],
+                  skip_chipsets=QR_UNSUPPORTED_CHIPSETS)
+    def qr_hfp_nbs_dut_as_src_test(self):
+        """Quality Report HFP NBS dut as source test"""
+        device = self.devices['BLUETOOTH_AUDIO'][0]
+        test_profile = HFP_NBS_MEDIUM
+        test_method = lambda: self.qr_hfp_dut_as_src(device, test_profile)
+
+        self.run_test_method(device, test_method, test_profile)
+
+    # Remove flags=['Quick Health'] when this test is migrated to stable suite.
+    @test_wrapper('Quality Report HFP WBS dut as source test',
+                  devices={'BLUETOOTH_AUDIO': 1},
+                  flags=['Quick Health'],
+                  skip_chipsets=QR_UNSUPPORTED_CHIPSETS)
+    def qr_hfp_wbs_dut_as_src_test(self):
+        """Quality Report HFP WBS dut as source test"""
+        device = self.devices['BLUETOOTH_AUDIO'][0]
+        test_profile = HFP_WBS_MEDIUM
+        test_method = lambda: self.qr_hfp_dut_as_src(device, test_profile)
+
+        self.run_test_method(device, test_method, test_profile)
+
+    # Remove flags=['Quick Health'] when this test is migrated to stable suite.
+    @test_wrapper('Quality Report disabled A2DP test',
+                  devices={'BLUETOOTH_AUDIO': 1},
+                  flags=['Quick Health'],
+                  skip_chipsets=QR_UNSUPPORTED_CHIPSETS)
+    def qr_disabled_a2dp_test(self):
+        """Quality Report disabled A2DP test"""
+        device = self.devices['BLUETOOTH_AUDIO'][0]
+        test_profile = A2DP_MEDIUM
+        test_method = lambda: self.qr_disabled_a2dp(device, test_profile)
+
+        self.run_test_method(device,
+                             test_method,
+                             test_profile,
+                             logging_and_check=False)
+
+    # Remove flags=['Quick Health'] when this test is migrated to stable suite.
+    @test_wrapper('Quality Report A2DP and classic keyboard test',
+                  devices={
+                          'BLUETOOTH_AUDIO': 1,
+                          "KEYBOARD": 1
+                  },
+                  flags=['Quick Health'],
+                  skip_chipsets=QR_UNSUPPORTED_CHIPSETS)
+    def qr_a2dp_cl_keyboard_test(self):
+        """Quality Report A2DP and classic keyboard test"""
+        audio_device = self.devices['BLUETOOTH_AUDIO'][0]
+        keyboard_device = self.devices['KEYBOARD'][0]
+        test_profile = A2DP_MEDIUM
+        test_method = lambda: self.qr_a2dp_cl_keyboard(
+                audio_device, keyboard_device, test_profile)
+
+        self.run_test_method((audio_device, keyboard_device),
+                             test_method,
+                             test_profile)
+
+    # Remove flags=['Quick Health'] when this test is migrated to stable suite.
+    @test_wrapper(
+            'Quality Report HFP WBS dut as sink and classic keyboard test',
+            devices={
+                    'BLUETOOTH_AUDIO': 1,
+                    'KEYBOARD': 1
+            },
+            flags=['Quick Health'],
+            skip_chipsets=QR_UNSUPPORTED_CHIPSETS)
+    def qr_hfp_wbs_dut_as_sink_cl_keyboard_test(self):
+        """Quality Report HFP WBS dut as sink and classic keyboard test"""
+        audio_device = self.devices['BLUETOOTH_AUDIO'][0]
+        keyboard_device = self.devices['KEYBOARD'][0]
+        test_profile = HFP_WBS_MEDIUM
+        test_method = lambda: self.qr_hfp_dut_as_sink_cl_keyboard(
+                audio_device, keyboard_device, test_profile)
+
+        self.run_test_method((audio_device, keyboard_device),
+                             test_method,
+                             test_profile)
+
+    # Remove flags=['Quick Health'] when this test is migrated to stable suite.
+    @test_wrapper(
+            'Quality Report HFP NBS dut as sink and classic keyboard test',
+            devices={
+                    'BLUETOOTH_AUDIO': 1,
+                    'KEYBOARD': 1
+            },
+            flags=['Quick Health'],
+            skip_chipsets=QR_UNSUPPORTED_CHIPSETS)
+    def qr_hfp_nbs_dut_as_sink_cl_keyboard_test(self):
+        """Quality Report HFP NBS dut as sink and classic keyboard test"""
+        audio_device = self.devices['BLUETOOTH_AUDIO'][0]
+        keyboard_device = self.devices['KEYBOARD'][0]
+        test_profile = HFP_NBS_MEDIUM
+        test_method = lambda: self.qr_hfp_dut_as_sink_cl_keyboard(
+                audio_device, keyboard_device, test_profile)
+
+        self.run_test_method((audio_device, keyboard_device),
+                             test_method,
+                             test_profile)
+
+    @batch_wrapper('Bluetooth BQR Batch Health Tests')
+    def qr_health_batch_run(self, num_iterations=1, test_name=None):
+        """Run the bluetooth audio health test batch or a specific given test.
+
+        @param num_iterations: how many iterations to run
+        @param test_name: specific test to run otherwise None to run the
+                whole batch
+        """
+        self.qr_a2dp_test()
+        self.qr_power_cycle_a2dp_test()
+        self.qr_hfp_nbs_dut_as_src_test()
+        self.qr_hfp_wbs_dut_as_src_test()
+        self.qr_disabled_a2dp_test()
+        self.qr_a2dp_cl_keyboard_test()
+        self.qr_hfp_wbs_dut_as_sink_cl_keyboard_test()
+        self.qr_hfp_nbs_dut_as_sink_cl_keyboard_test()
+
+    def run_once(self,
+                 host,
+                 num_iterations=1,
+                 args_dict=None,
+                 test_name=None,
+                 flag='Quick Health'):
+        """Run the batch of Bluetooth stand health tests
+
+        @param host: the DUT, usually a chromebook
+        @param num_iterations: the number of rounds to execute the test
+        @param test_name: the test to run, or None for all tests
+        """
+        self.host = host
+
+        self.quick_test_init(host,
+                             use_btpeer=True,
+                             flag=flag,
+                             args_dict=args_dict)
+        self.qr_health_batch_run(num_iterations, test_name)
+        self.quick_test_cleanup()
diff --git a/server/site_tests/bluetooth_AdapterQRHealth/control b/server/site_tests/bluetooth_AdapterQRHealth/control
new file mode 100644
index 0000000..f92230f
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterQRHealth/control
@@ -0,0 +1,45 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterQRHealth'
+PURPOSE = ('Batch of Bluetooth BQR health tests')
+CRITERIA = 'Pass all health test'
+ATTRIBUTES = 'suite:bluetooth' # This control file is used to run all tests locally.
+TIME = 'SHORT'  # Approximately 15 mins
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
+
+DOC = """
+    A Batch of Bluetooth Quality Report health tests. This test is written
+    as a batch of tests in order to reduce test time, since auto-test ramp
+    up time is costly. The batch is using BluetoothAdapterQuickTests wrapper
+    methods to start and end a test and a batch of tests.
+
+    This class can be called to run the entire test batch or to run a
+    specific test only
+
+    Currently, the batch contains the following tests:
+    - qr_a2dp_test
+    - qr_hfp_nbs_dut_as_src_test
+    - qr_hfp_wbs_dut_as_src_test
+    - qr_disabled_a2dp_test
+    - qr_a2dp_cl_keyboard_test
+    - qr_hfp_wbs_dut_as_sink_cl_keyboard_test
+    - qr_hfp_nbs_dut_as_sink_cl_keyboard_test
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterQRHealth', host=host, num_iterations=1,
+                 args_dict=args_dict)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterQRHealth/control.qr_a2dp_cl_keyboard_test b/server/site_tests/bluetooth_AdapterQRHealth/control.qr_a2dp_cl_keyboard_test
new file mode 100644
index 0000000..a6b1c5f
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterQRHealth/control.qr_a2dp_cl_keyboard_test
@@ -0,0 +1,31 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterQRHealth.qr_a2dp_cl_keyboard_test'
+PURPOSE = ('Check Quality function working well with multiple devices')
+CRITERIA = 'Pass all the Bluetooth log checking'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_flaky'
+TIME = 'SHORT'  # Approximately 2 mins
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
+
+DOC = """
+        Checking if the Quality function works well with the
+        Bluetooth keyboard and the A2DP streaming.
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterQRHealth', host=host, num_iterations=1,
+                 args_dict=args_dict, test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterQRHealth/control.qr_a2dp_test b/server/site_tests/bluetooth_AdapterQRHealth/control.qr_a2dp_test
new file mode 100644
index 0000000..02e9ec1
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterQRHealth/control.qr_a2dp_test
@@ -0,0 +1,32 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterQRHealth.qr_a2dp_test'
+PURPOSE = ('Check Quality Report function working well with A2DP '
+           'streaming')
+CRITERIA = 'Pass all the Bluetooth log checking'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_flaky'
+TIME = 'SHORT'  # Approximately 2 mins
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+        Checking if the Quality Report function works well with the
+        A2DP streaming.
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterQRHealth', host=host, num_iterations=1,
+                 args_dict=args_dict, test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterQRHealth/control.qr_disabled_a2dp_test b/server/site_tests/bluetooth_AdapterQRHealth/control.qr_disabled_a2dp_test
new file mode 100644
index 0000000..3968518
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterQRHealth/control.qr_disabled_a2dp_test
@@ -0,0 +1,31 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterQRHealth.qr_disabled_a2dp_test'
+PURPOSE = ('Check Quality function disabled properly with A2DP streaming')
+CRITERIA = 'Pass all the Bluetooth log checking'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_flaky'
+TIME = 'SHORT'  # Approximately 2 mins
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+        Enables then disables Quality function, starts the A2DP streaming
+        and sees if the Quality function was disabled successfully.
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterQRHealth', host=host, num_iterations=1,
+                 args_dict=args_dict, test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterQRHealth/control.qr_hfp_nbs_dut_as_sink_cl_keyboard_test b/server/site_tests/bluetooth_AdapterQRHealth/control.qr_hfp_nbs_dut_as_sink_cl_keyboard_test
new file mode 100644
index 0000000..a402819
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterQRHealth/control.qr_hfp_nbs_dut_as_sink_cl_keyboard_test
@@ -0,0 +1,31 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterQRHealth.qr_hfp_nbs_dut_as_sink_cl_keyboard_test'
+PURPOSE = ('Check Quality function working well with multiple devices')
+CRITERIA = 'Pass all the Bluetooth log checking'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_flaky'
+TIME = 'SHORT'  # Approximately 2 mins
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
+
+DOC = """
+        Checking if the Quality function works well with the
+        Bluetooth keyboard and the HFP NBS streaming.
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterQRHealth', host=host, num_iterations=1,
+                 args_dict=args_dict, test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterQRHealth/control.qr_hfp_nbs_dut_as_src_test b/server/site_tests/bluetooth_AdapterQRHealth/control.qr_hfp_nbs_dut_as_src_test
new file mode 100644
index 0000000..5ab7e3f
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterQRHealth/control.qr_hfp_nbs_dut_as_src_test
@@ -0,0 +1,32 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterQRHealth.qr_hfp_nbs_dut_as_src_test'
+PURPOSE = ('Check Quality Report function working well with HFP NBS '
+           'streaming')
+CRITERIA = 'Pass all the Bluetooth log checking'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_flaky'
+TIME = 'SHORT'  # Approximately 2 mins
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+        Checking if the Quality Report function works well with the
+        HFP NBS streaming.
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterQRHealth', host=host, num_iterations=1,
+                 args_dict=args_dict, test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterQRHealth/control.qr_hfp_wbs_dut_as_sink_cl_keyboard_test b/server/site_tests/bluetooth_AdapterQRHealth/control.qr_hfp_wbs_dut_as_sink_cl_keyboard_test
new file mode 100644
index 0000000..4cf4e0e
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterQRHealth/control.qr_hfp_wbs_dut_as_sink_cl_keyboard_test
@@ -0,0 +1,31 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterQRHealth.qr_hfp_wbs_dut_as_sink_cl_keyboard_test'
+PURPOSE = ('Check Quality function working well with multiple devices')
+CRITERIA = 'Pass all the Bluetooth log checking'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_flaky'
+TIME = 'SHORT'  # Approximately 2 mins
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
+
+DOC = """
+        Checking if the Quality function works well with the
+        Bluetooth keyboard and the HFP WBS streaming.
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterQRHealth', host=host, num_iterations=1,
+                 args_dict=args_dict, test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterQRHealth/control.qr_hfp_wbs_dut_as_src_test b/server/site_tests/bluetooth_AdapterQRHealth/control.qr_hfp_wbs_dut_as_src_test
new file mode 100644
index 0000000..b3f77dd
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterQRHealth/control.qr_hfp_wbs_dut_as_src_test
@@ -0,0 +1,32 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterQRHealth.qr_hfp_wbs_dut_as_src_test'
+PURPOSE = ('Check Quality Report function working well with HFP WBS '
+           'streaming')
+CRITERIA = 'Pass all the Bluetooth log checking'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_flaky'
+TIME = 'SHORT'  # Approximately 2 mins
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+        Checking if the Quality Report function works well with the
+        HFP WBS streaming.
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterQRHealth', host=host, num_iterations=1,
+                 args_dict=args_dict, test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterQRHealth/control.qr_power_cycle_a2dp_test b/server/site_tests/bluetooth_AdapterQRHealth/control.qr_power_cycle_a2dp_test
new file mode 100644
index 0000000..19e3a97
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterQRHealth/control.qr_power_cycle_a2dp_test
@@ -0,0 +1,32 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterQRHealth.qr_power_cycle_a2dp_test'
+PURPOSE = ('Check Quality Report function working well after Bluetooth power '
+           'reset')
+CRITERIA = 'Pass all the Bluetooth log checking'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_flaky'
+TIME = 'SHORT'  # Approximately 2 mins
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+        Checking if the Quality Report function works well after Bluetooth
+        power reset.
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterQRHealth', host=host, num_iterations=1,
+                 args_dict=args_dict, test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterQuickHealth/bluetooth_AdapterQuickHealth.py b/server/site_tests/bluetooth_AdapterQuickHealth/bluetooth_AdapterQuickHealth.py
index 9feaaff..ecab481 100644
--- a/server/site_tests/bluetooth_AdapterQuickHealth/bluetooth_AdapterQuickHealth.py
+++ b/server/site_tests/bluetooth_AdapterQuickHealth/bluetooth_AdapterQuickHealth.py
@@ -8,6 +8,8 @@
 from __future__ import division
 from __future__ import print_function
 
+from autotest_lib.server.site_tests.bluetooth_AdapterAdvMonitor import (
+        bluetooth_AdapterAdvMonitor)
 from autotest_lib.server.site_tests.bluetooth_AdapterAUHealth import (
         bluetooth_AdapterAUHealth)
 from autotest_lib.server.site_tests.bluetooth_AdapterCLHealth import (
@@ -25,6 +27,7 @@
 
 
 class bluetooth_AdapterQuickHealth(
+        bluetooth_AdapterAdvMonitor.bluetooth_AdapterAdvMonitor,
         bluetooth_AdapterAUHealth.bluetooth_AdapterAUHealth,
         bluetooth_AdapterCLHealth.bluetooth_AdapterCLHealth,
         bluetooth_AdapterLEHealth.bluetooth_AdapterLEHealth,
@@ -68,6 +71,7 @@
             self.md_health_batch_run()
             self.sr_health_batch_run()
             self.au_health_batch_run()
+            self.advmon_health_batch_run()
             self.quick_test_print_summary()
 
         # End and cleanup test package
diff --git a/server/site_tests/bluetooth_AdapterQuickHealth/control.AVL b/server/site_tests/bluetooth_AdapterQuickHealth/control.AVL
index bbe2377..d4262e2 100644
--- a/server/site_tests/bluetooth_AdapterQuickHealth/control.AVL
+++ b/server/site_tests/bluetooth_AdapterQuickHealth/control.AVL
@@ -3,17 +3,20 @@
 # found in the LICENSE file.
 
 from autotest_lib.server import utils
+from autotest_lib.server.cros.bluetooth.bluetooth_test_version_control\
+        import test_version_setup
 
 AUTHOR = 'chromeos-bluetooth'
 NAME = 'bluetooth_AdapterQuickHealth.AVL'
 PURPOSE = ('Package of Bluetooth quick health tests')
 CRITERIA = 'Pass all health test'
 ATTRIBUTES = 'suite:bluetooth_qualification'
-TIME = 'MEDIUM' # Takes 10 minutes on Guado
+TIME = 'LONG' # Takes ~1 hour on kukui
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
 
 DOC = """
     The AVL test run a subset of tests in quick health.
@@ -23,9 +26,12 @@
 args_dict = utils.args_to_dict(args)
 
 def run(machine):
+    if not test_version_setup():
+        return
+
     host = hosts.create_host(machine)
     job.run_test('bluetooth_AdapterQuickHealth', host=host,
-                 num_iterations=5, args_dict=args_dict,
+                 num_iterations=1, args_dict=args_dict,
                  flag='AVL')
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterQuickHealth/control.AVL5 b/server/site_tests/bluetooth_AdapterQuickHealth/control.AVL5
new file mode 100644
index 0000000..d23bc75d
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterQuickHealth/control.AVL5
@@ -0,0 +1,32 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterQuickHealth.AVL5'
+PURPOSE = ('Package of Bluetooth quick health tests')
+CRITERIA = 'Pass all health test'
+ATTRIBUTES = 'suite:bluetooth_qualification'
+TIME = 'LONG' # Takes ~4 hours on kukui
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
+
+DOC = """
+    The AVL test run a subset of tests in quick health.
+    Tests included here should be very robust and stable.
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterQuickHealth', host=host,
+                 num_iterations=5, args_dict=args_dict,
+                 flag='AVL')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterQuickHealth/control.quick_health b/server/site_tests/bluetooth_AdapterQuickHealth/control.quick_health
index da19685..52c5049 100644
--- a/server/site_tests/bluetooth_AdapterQuickHealth/control.quick_health
+++ b/server/site_tests/bluetooth_AdapterQuickHealth/control.quick_health
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
 
 DOC = """
     This class implements a Bluetooth quick health package, using
diff --git a/server/site_tests/bluetooth_AdapterRvR/bluetooth_AdapterRvR.py b/server/site_tests/bluetooth_AdapterRvR/bluetooth_AdapterRvR.py
new file mode 100644
index 0000000..1df5a71
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterRvR/bluetooth_AdapterRvR.py
@@ -0,0 +1,93 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""
+
+Bluetooth tests which involve modifying attenuation between the
+DUT and peer using controllable variable attentuator.
+
+These tests can only be run in test bed containing variable attenuator
+and bluetooth peer.
+
+"""
+
+
+
+
+import logging
+
+from autotest_lib.server.cros.bluetooth.bluetooth_adapter_quick_tests import (
+        BluetoothAdapterQuickTests)
+from autotest_lib.server.cros.bluetooth.bluetooth_rvr_tests import (
+        BluetoothAdapterRvRTests)
+
+test_wrapper = BluetoothAdapterQuickTests.quick_test_test_decorator
+batch_wrapper = BluetoothAdapterQuickTests.quick_test_batch_decorator
+
+
+class bluetooth_AdapterRvR(BluetoothAdapterQuickTests,
+                           BluetoothAdapterRvRTests):
+    """ Collection of Bluetooth Range vs Rate tests.
+
+    rvr_show_rssi_vs_attenuation : This is not a test. It display RSSI vs
+    attenutation for the test bed. This is used lab team to verify test beds.
+
+    """
+
+    @test_wrapper(
+            'RSSI vs Attenuation',
+            devices={'MOUSE': 1},
+    )
+    def rvr_show_rssi_vs_attenuation(self):
+        """ Record RSSI at increasing attenuation """
+        try:
+            device_type = 'MOUSE'
+            device = self.devices[device_type][0]
+            logging.debug(' attenutor is %s', self.bt_attenuator)
+            rssi_dict = self.check_rssi_vs_attenuation(device,
+                                                       self.bt_attenuator)
+            if rssi_dict == {}:
+                logging.info(
+                        'check_rssi_vs_attenuation did not return any data')
+                return False
+            else:
+                logging.info('--------------------------')
+                logging.info('Total attenutation : RSSI')
+                for attn in sorted(list(rssi_dict.keys())):
+                    rssi = rssi_dict[attn]
+                    logging.info('%s : %s', attn, rssi)
+                logging.info('--------------------------')
+                return True
+        except Exception as e:
+            logging.error('Exception in rvr_show_rssi_vs_attenuation %s',
+                          str(e))
+            return False
+
+    @batch_wrapper('Range vs Rate tests')
+    def rvr_health_batch_run(self, num_iterations=1, test_name=None):
+        """ Batch of Range vs Rate tests health tests. """
+        self.rvr_show_rssi_vs_attenutation()
+
+    def run_once(self,
+                 host,
+                 num_iterations=1,
+                 args_dict=None,
+                 test_name=None,
+                 flag='Quick Health'):
+        """Running Bluetooth adapter suspend resume with peer autotest.
+
+        @param host: the DUT, usually a chromebook
+        @param num_iterations: the number of times to execute the test
+        @param test_name: the test to run or None for all tests
+        @param flag: run tests with this flag (default: Quick Health)
+
+        """
+
+        # Initialize and run the test batch or the requested specific test
+        self.quick_test_init(host,
+                             use_btpeer=True,
+                             flag=flag,
+                             args_dict=args_dict)
+        self.rvr_health_batch_run(num_iterations, test_name)
+        self.quick_test_cleanup()
diff --git a/server/site_tests/bluetooth_AdapterRvR/control.rvr_show_rssi_vs_attenuation b/server/site_tests/bluetooth_AdapterRvR/control.rvr_show_rssi_vs_attenuation
new file mode 100644
index 0000000..1382d38
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterRvR/control.rvr_show_rssi_vs_attenuation
@@ -0,0 +1,78 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TEST IS DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterRvR.rvr_show_rssi_vs_attenuation'
+PURPOSE = ('Batch of Bluetooth Range vs Rate tests')
+CRITERIA = 'Pass all health test'
+ATTRIBUTES = ''
+TIME = 'MEDIUM' # 5 minutes on Octopus
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3 # Attenuator related code is not python 3 ready yet
+
+DOC = """
+Vary attenuation from minimum to maximum and display the RSSI. This is used
+as sample test and also to verify Bluetooth RvR test beds
+
+This test required a test bed with controllable variable attentuation to vary the attenuation
+between DUT and Bluetooth peer
+
+A sample output will be similar to
+
+Total attenutation : RSSI
+ 55 : -61
+ 56 : -62
+ 57 : -63
+ 58 : -64
+ 59 : -66
+ 60 : -66
+ 61 : -66
+ 62 : -69
+ 63 : -69
+ 64 : -70
+ 65 : -72
+ 66 : -72
+ 67 : -73
+ 68 : -73
+ 69 : -76
+ 70 : -76
+ 71 : -76
+ 72 : -78
+ 73 : -79
+ 74 : -81
+ 75 : -80
+ 76 : -82
+ 77 : -82
+ 78 : -84
+ 79 : -86
+ 80 : -85
+ 81 : -87
+ 82 : -88
+ 83 : -89
+ 84 : -89
+ 85 : -90
+ 86 : -92
+ 87 : -92
+
+
+"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterRvR', host=host, num_iterations=1,
+     args_dict=args_dict, test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterSAHealth/bluetooth_AdapterSAHealth.py b/server/site_tests/bluetooth_AdapterSAHealth/bluetooth_AdapterSAHealth.py
index 0a7149b..bee2d0d 100644
--- a/server/site_tests/bluetooth_AdapterSAHealth/bluetooth_AdapterSAHealth.py
+++ b/server/site_tests/bluetooth_AdapterSAHealth/bluetooth_AdapterSAHealth.py
@@ -36,6 +36,10 @@
     test_wrapper = BluetoothAdapterQuickTests.quick_test_test_decorator
     batch_wrapper = BluetoothAdapterQuickTests.quick_test_batch_decorator
 
+    @test_wrapper('Stand Alone noop test', supports_floss=True)
+    def sa_noop(self):
+        """A no-op test to validate Floss"""
+        logging.info("sa_noop ran")
 
     @test_wrapper('Stand Alone basic test')
     def sa_basic_test(self):
@@ -86,7 +90,22 @@
         self.test_pairable()
 
 
-    @test_wrapper('Adapter suspend resume test')
+    # Remove flags=['Quick Health'] when this test is migrated to stable suite.
+    @test_wrapper('Stand Alone power reset test', flags=['Quick Health'])
+    def sa_power_reset(self):
+        """Adapter power reset test
+
+        Repeatedly reset adapter power to expect that bluetoothd is running.
+        Note that the test takes about 2 minutes to complete and is suitable
+        to run in function verification test suite.
+        """
+        for _ in range(20):
+            self.test_reset_on_adapter()
+            self.test_bluetoothd_running()
+
+
+    # TODO(b/182172118) - Winky has suspend test issues
+    @test_wrapper('Adapter suspend resume test', skip_models=['winky'])
     def sa_adapter_suspend_resume_test(self):
         """Test dapter power states is perserved through suspend resume."""
         def adapter_on_SR_test():
@@ -111,7 +130,7 @@
         adapter_off_SR_test()
 
 
-    @test_wrapper('Adapter present test')
+    @test_wrapper('Adapter present test', supports_floss=True)
     def sa_adapter_present_test(self):
         """Verify that the client has a Bluetooth adapter."""
 
@@ -177,7 +196,7 @@
         self.default_state_test()
 
 
-    @test_wrapper('Valid address test')
+    @test_wrapper('Valid address test', supports_floss=True)
     def sa_valid_address_test(self):
         """Verify that the client Bluetooth adapter has a valid address."""
         self.valid_address_test()
@@ -231,6 +250,7 @@
            @param test_name: specifc test to run otherwise None to run the
                              whole batch
         """
+        self.sa_noop()
         self.sa_basic_test()
         self.sa_adapter_suspend_resume_test()
         self.sa_adapter_present_test()
@@ -239,17 +259,26 @@
         self.sa_default_state_test()
         self.sa_valid_address_test()
         self.sa_dbus_api_tests()
+        self.sa_power_reset()
 
 
-    def run_once(self, host, num_iterations=1, test_name=None,
-                 flag='Quick Health'):
+    def run_once(self,
+                 host,
+                 num_iterations=1,
+                 args_dict=None,
+                 test_name=None,
+                 flag='Quick Health',
+                 floss=False):
         """Run the batch of Bluetooth stand health tests
 
         @param host: the DUT, usually a chromebook
         @param num_iterations: the number of rounds to execute the test
         """
         # Initialize and run the test batch or the requested specific test
-        self.quick_test_init(host, use_btpeer=False, flag=flag,
-                             start_browser=False)
+        self.quick_test_init(host,
+                             use_btpeer=False,
+                             flag=flag,
+                             start_browser=False,
+                             floss=floss)
         self.sa_health_batch_run(num_iterations, test_name)
         self.quick_test_cleanup()
diff --git a/server/site_tests/bluetooth_AdapterSAHealth/control.all_floss b/server/site_tests/bluetooth_AdapterSAHealth/control.all_floss
new file mode 100644
index 0000000..6404d05
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterSAHealth/control.all_floss
@@ -0,0 +1,32 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterSAHealth.all_floss'
+PURPOSE = ('Batch of standalone tests for Floss')
+CRITERIA = 'Pass all health test'
+ATTRIBUTES = ''
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth'
+PY_VERSION = 3
+
+DOC = """
+    A batch of Bluetooth standalone health tests. These tests will run all
+    standalone tests that are currently supported on Floss.
+"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterSAHealth', host=host,
+                  num_iterations=1, floss=True)
+
+parallel_simple(run, machines)
+
diff --git a/server/site_tests/bluetooth_AdapterSAHealth/control.sa_adapter_present_test b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_adapter_present_test
index 7263cd4..6a68e25 100644
--- a/server/site_tests/bluetooth_AdapterSAHealth/control.sa_adapter_present_test
+++ b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_adapter_present_test
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth'
+PY_VERSION = 3
 
 DOC = """
    This test tries to verify the client has a bluetooth adapter
diff --git a/server/site_tests/bluetooth_AdapterSAHealth/control.sa_adapter_present_test.floss b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_adapter_present_test.floss
new file mode 100644
index 0000000..3dfec48
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_adapter_present_test.floss
@@ -0,0 +1,30 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterSAHealth.sa_adapter_present_test.floss'
+PURPOSE = ('batch of Bluetooth stand along health tests')
+CRITERIA = 'Pass all health test'
+ATTRIBUTES = 'suite:bluetooth_floss'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth'
+PY_VERSION = 3
+
+DOC = """
+   This test tries to verify the client has a bluetooth adapter
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterSAHealth', host=host,
+                 num_iterations=1, test_name=NAME.split('.')[1], floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterSAHealth/control.sa_adapter_reboot_test b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_adapter_reboot_test
index 70b19f6..9076947 100644
--- a/server/site_tests/bluetooth_AdapterSAHealth/control.sa_adapter_reboot_test
+++ b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_adapter_reboot_test
@@ -16,6 +16,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth'
+PY_VERSION = 3
 
 DOC = """Server side bluetooth adapter stress tests involving reboot.
     First we test powering on the adapter, reboot the DUT, and make sure
diff --git a/server/site_tests/bluetooth_AdapterSAHealth/control.sa_adapter_suspend_resume_test b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_adapter_suspend_resume_test
index 5bf3532..fe07fe9 100644
--- a/server/site_tests/bluetooth_AdapterSAHealth/control.sa_adapter_suspend_resume_test
+++ b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_adapter_suspend_resume_test
@@ -8,12 +8,13 @@
 NAME = 'bluetooth_AdapterSAHealth.sa_adapter_suspend_resume_test'
 PURPOSE = ('batch of Bluetooth stand along health tests')
 CRITERIA = 'Pass all health test'
-ATTRIBUTES = 'suite:bluetooth_flaky' # TODO(b/148470930)
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_standalone'
 TIME = 'MEDIUM'
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth'
+PY_VERSION = 3
 
 DOC = """
     A Batch of Bluetooth stand alone health tests. This test is written as a
diff --git a/server/site_tests/bluetooth_AdapterSAHealth/control.sa_basic_test b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_basic_test
index 8c06133..5cd7f23 100644
--- a/server/site_tests/bluetooth_AdapterSAHealth/control.sa_basic_test
+++ b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_basic_test
@@ -8,12 +8,13 @@
 NAME = 'bluetooth_AdapterSAHealth.sa_basic_test'
 PURPOSE = ('batch of Bluetooth stand along health tests')
 CRITERIA = 'Pass all health test'
-ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_standalone, suite:bluetooth_standalone_cq, suite:infra_qual'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_standalone, suite:bluetooth_standalone_cq, suite:infra_qual, suite:bluetooth_floss_cq'
 TIME = 'MEDIUM'
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
-DEPENDENCIES = 'bluetooth'
+DEPENDENCIES = 'bluetooth_state:NORMAL'
+PY_VERSION = 3
 
 DOC = """
     A Batch of Bluetooth stand alone health tests. This test is written as a
diff --git a/server/site_tests/bluetooth_AdapterSAHealth/control.sa_dbus_api_tests b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_dbus_api_tests
index 94ca6f5..5e380e2 100644
--- a/server/site_tests/bluetooth_AdapterSAHealth/control.sa_dbus_api_tests
+++ b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_dbus_api_tests
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth'
+PY_VERSION = 3
 
 DOC = """
     This test tries to verify that the bluetooth adapter has correct default
diff --git a/server/site_tests/bluetooth_AdapterSAHealth/control.sa_default_state_test b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_default_state_test
index 2850dce..192de91 100644
--- a/server/site_tests/bluetooth_AdapterSAHealth/control.sa_default_state_test
+++ b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_default_state_test
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth'
+PY_VERSION = 3
 
 DOC = """
     This test tries to verify that the bluetooth adapter has correct default
diff --git a/server/site_tests/bluetooth_AdapterSAHealth/control.sa_discoverable_timeout_test b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_discoverable_timeout_test
index da6f74a..a404f87 100644
--- a/server/site_tests/bluetooth_AdapterSAHealth/control.sa_discoverable_timeout_test
+++ b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_discoverable_timeout_test
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth'
+PY_VERSION = 3
 
 DOC = """
     Verify that DiscoverableTimeout Property works as expected
diff --git a/server/site_tests/bluetooth_AdapterSAHealth/control.sa_eir_max_name_size_test b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_eir_max_name_size_test
index b39bc92..810d7f7 100644
--- a/server/site_tests/bluetooth_AdapterSAHealth/control.sa_eir_max_name_size_test
+++ b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_eir_max_name_size_test
@@ -8,12 +8,13 @@
 NAME = 'bluetooth_AdapterSAHealth.sa_eir_max_name_size_test'
 PURPOSE = ('Test that non-default EIR name length is applied')
 CRITERIA = 'Pass the test'
-ATTRIBUTES = 'suite:bluetooth_flaky'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_standalone'
 TIME = 'SHORT'
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth'
+PY_VERSION = 3
 
 DOC = """
     Tests that the bluetoothd dbus API can be used to set EIR name longer than
diff --git a/server/site_tests/bluetooth_AdapterSAHealth/control.sa_health b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_health
index b0f9e18..dad6456 100644
--- a/server/site_tests/bluetooth_AdapterSAHealth/control.sa_health
+++ b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_health
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth'
+PY_VERSION = 3
 
 DOC = """
     A Batch of Bluetooth stand alone health tests. This test is written as a
diff --git a/server/site_tests/bluetooth_AdapterSAHealth/control.sa_pairable_timeout_test b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_pairable_timeout_test
index c8b756b..bd67766 100644
--- a/server/site_tests/bluetooth_AdapterSAHealth/control.sa_pairable_timeout_test
+++ b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_pairable_timeout_test
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth'
+PY_VERSION = 3
 
 DOC = """
     Verify that PairableTimeout Property works as expected
diff --git a/server/site_tests/bluetooth_AdapterSAHealth/control.sa_power_reset b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_power_reset
new file mode 100644
index 0000000..8a986df
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_power_reset
@@ -0,0 +1,31 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterSAHealth.sa_power_reset'
+PURPOSE = ('Repeated adapter power reset test')
+CRITERIA = 'Pass the repeated power reset test'
+ATTRIBUTES = 'suite:bluetooth_flaky'
+TIME = 'SHORT'  # less than 2 mins on zork
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth_state:NORMAL'
+PY_VERSION = 3
+
+DOC = """
+    This is a test about repeated Bluetooth adapter power reset.
+    It is supposed to be completed in a very short time likely in 2 mins.
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterSAHealth', host=host,
+                  num_iterations=1, test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterSAHealth/control.sa_valid_address_test b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_valid_address_test
index 01a952f..c12905d 100644
--- a/server/site_tests/bluetooth_AdapterSAHealth/control.sa_valid_address_test
+++ b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_valid_address_test
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth'
+PY_VERSION = 3
 
 DOC = """
    This test tries to verify the client bluetooth adapter has a
diff --git a/server/site_tests/bluetooth_AdapterSAHealth/control.sa_valid_address_test.floss b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_valid_address_test.floss
new file mode 100644
index 0000000..dafdff2
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_valid_address_test.floss
@@ -0,0 +1,31 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterSAHealth.sa_valid_address_test.floss'
+PURPOSE = ('batch of Bluetooth stand along health tests')
+CRITERIA = 'Pass all health test'
+ATTRIBUTES = 'suite:bluetooth_floss'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth'
+PY_VERSION = 3
+
+DOC = """
+   This test tries to verify the client bluetooth adapter has a
+   valid address.
+    """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterSAHealth', host=host,
+                  num_iterations=1, test_name=NAME.split('.')[1], floss=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterSAHealth/control.sa_valid_alias_test b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_valid_alias_test
index 3931c47..60ce1d1 100644
--- a/server/site_tests/bluetooth_AdapterSAHealth/control.sa_valid_alias_test
+++ b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_valid_alias_test
@@ -16,6 +16,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth'
+PY_VERSION = 3
 
 DOC = """
 Checks the Bluetooth Alias of the system.  If the alias is not like
diff --git a/server/site_tests/bluetooth_AdapterSAHealth/control.sa_valid_id_test b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_valid_id_test
index d5fc3e4..435661c 100644
--- a/server/site_tests/bluetooth_AdapterSAHealth/control.sa_valid_id_test
+++ b/server/site_tests/bluetooth_AdapterSAHealth/control.sa_valid_id_test
@@ -16,6 +16,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth'
+PY_VERSION = 3
 
 DOC = """
 Checks the Bluetooth ID of the system.  If the ID is not "00E0:24..:0400" the
diff --git a/server/site_tests/bluetooth_AdapterSAHealth/control.stress b/server/site_tests/bluetooth_AdapterSAHealth/control.stress
index f42274a..cff87c7 100644
--- a/server/site_tests/bluetooth_AdapterSAHealth/control.stress
+++ b/server/site_tests/bluetooth_AdapterSAHealth/control.stress
@@ -8,12 +8,13 @@
 NAME = 'bluetooth_AdapterSAHealth.stress'
 PURPOSE = ('batch of Bluetooth stand alone health tests')
 CRITERIA = 'Pass all health test'
-ATTRIBUTES = 'suite:bluetooth_stress'
+ATTRIBUTES = ''
 TIME = 'LONG'
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth'
+PY_VERSION = 3
 
 DOC = """
     A Batch of Bluetooth stand alone health tests. This test is written as a
diff --git a/server/site_tests/bluetooth_AdapterSRHealth/bluetooth_AdapterSRHealth.py b/server/site_tests/bluetooth_AdapterSRHealth/bluetooth_AdapterSRHealth.py
index 9183f0d..db0497c 100644
--- a/server/site_tests/bluetooth_AdapterSRHealth/bluetooth_AdapterSRHealth.py
+++ b/server/site_tests/bluetooth_AdapterSRHealth/bluetooth_AdapterSRHealth.py
@@ -36,23 +36,20 @@
 
 from autotest_lib.client.cros.bluetooth.bluetooth_audio_test_data import A2DP
 from autotest_lib.server.cros.bluetooth.bluetooth_adapter_tests import (
-        TABLET_MODELS, SUSPEND_POWER_DOWN_CHIPSETS)
+        TABLET_MODELS, SUSPEND_POWER_DOWN_CHIPSETS,
+        SUSPEND_RESET_IF_NO_PEER_CHIPSETS, SUSPEND_POWER_DOWN_MODELS)
 from autotest_lib.server.cros.bluetooth.bluetooth_adapter_audio_tests import (
         BluetoothAdapterAudioTests)
 from autotest_lib.server.cros.bluetooth.bluetooth_adapter_quick_tests import (
         BluetoothAdapterQuickTests)
-
+from autotest_lib.server.cros.bluetooth.bluetooth_adapter_quick_tests import (
+        PROFILE_CONNECT_WAIT, SUSPEND_SEC, EXPECT_NO_WAKE_SUSPEND_SEC)
 from six.moves import range
 
 test_wrapper = BluetoothAdapterQuickTests.quick_test_test_decorator
 batch_wrapper = BluetoothAdapterQuickTests.quick_test_batch_decorator
 
-PROFILE_CONNECT_WAIT = 15
-SUSPEND_SEC = 15
-EXPECT_NO_WAKE_SUSPEND_SEC = 30
-EXPECT_PEER_WAKE_SUSPEND_SEC = 60
-
-STRESS_ITERATIONS = 25
+STRESS_ITERATIONS = 50
 
 
 class bluetooth_AdapterSRHealth(BluetoothAdapterQuickTests,
@@ -60,12 +57,15 @@
     """Server side bluetooth adapter suspend resume test with peer."""
 
     def _test_keyboard_with_string(self, device):
-        self.test_hid_device_created(device.address)
-        return self.test_keyboard_input_from_trace(device, "simple_text")
+        return (self.test_hid_device_created(device.address)
+                and self.test_keyboard_input_from_trace(device, "simple_text"))
 
-    def _test_mouse_left_click(self, device):
-        self.test_hid_device_created(device.address)
-        return self.test_mouse_left_click(device)
+    def _test_mouse(self, device):
+        return (self.test_hid_device_created(device.address)
+                and self.test_mouse_left_click(device)
+                and self.test_mouse_move_in_xy(device, -60, 100)
+                and self.test_mouse_scroll_down(device, 70)
+                and self.test_mouse_click_and_drag(device, 90, 30))
 
     # ---------------------------------------------------------------
     # Reconnect after suspend tests
@@ -111,7 +111,7 @@
 
                 # Start the suspend process
                 suspend = self.suspend_async(suspend_time=SUSPEND_SEC)
-                start_time = self.bluetooth_facade.get_device_time()
+                start_time = self.bluetooth_facade.get_device_utc_time()
 
                 # Trigger suspend, wait for regular resume, verify we can reconnect
                 # and run device specific test
@@ -122,11 +122,17 @@
                                           resume_timeout=SUSPEND_SEC,
                                           test_start_time=start_time)
 
-                for device_type, device, device_test in devtuples:
-                    # Only reconnect if we don't expect automatic reconnect
-                    if not auto_reconnect:
+                # Only reconnect if we don't expect automatic reconnect.
+                # Let the devices initiate connections before the DUT initiates
+                # auto reconnections.
+                # Complete reconnecting all peers before running device tests.
+                # Otherwise, we may have a race between auto reconnection
+                # from the dut and peer initiated connection. See b/177870286
+                if not auto_reconnect:
+                    for device_type, device, _ in devtuples:
                         if 'BLE' in device_type:
-                            # LE can't reconnect without advertising/discoverable
+                            # LE can't reconnect without
+                            # advertising/discoverable
                             self.test_device_set_discoverable(device, True)
                             # Make sure we're actually connected
                             self.test_device_is_connected(device.address)
@@ -135,6 +141,7 @@
                             # wake up the dut
                             self.test_connection_by_device(device)
 
+                for _, device, device_test in devtuples:
                     if device_test is not None:
                         device_test(device)
 
@@ -148,7 +155,7 @@
         device_type = 'MOUSE'
         device = self.devices[device_type][0]
         self.run_reconnect_device([(device_type, device,
-                                    self._test_mouse_left_click)])
+                                    self._test_mouse)])
 
     @test_wrapper('Reconnect LE HID', devices={'BLE_MOUSE': 1})
     def sr_reconnect_le_hid(self):
@@ -156,7 +163,7 @@
         device_type = 'BLE_MOUSE'
         device = self.devices[device_type][0]
         self.run_reconnect_device([(device_type, device,
-                                    self._test_mouse_left_click)])
+                                    self._test_mouse)])
 
     # TODO(b/163143005) - Hana can't handle two concurrent HID connections
     @test_wrapper('Reconnect Multiple Classic HID',
@@ -168,7 +175,7 @@
     def sr_reconnect_multiple_classic_hid(self):
         """ Reconnects multiple classic HID devices after suspend/resume. """
         devices = [('MOUSE', self.devices['MOUSE'][0],
-                    self._test_mouse_left_click),
+                    self._test_mouse),
                    ('KEYBOARD', self.devices['KEYBOARD'][0],
                     self._test_keyboard_with_string)]
         self.run_reconnect_device(devices)
@@ -181,7 +188,7 @@
     def sr_reconnect_multiple_le_hid(self):
         """ Reconnects multiple LE HID devices after suspend/resume. """
         devices = [('BLE_MOUSE', self.devices['BLE_MOUSE'][0],
-                    self._test_mouse_left_click),
+                    self._test_mouse),
                    ('BLE_KEYBOARD', self.devices['BLE_KEYBOARD'][0],
                     self._test_keyboard_with_string)]
         self.run_reconnect_device(devices)
@@ -196,7 +203,7 @@
             suspend/resume.
         """
         devices = [('BLE_MOUSE', self.devices['BLE_MOUSE'][0],
-                    self._test_mouse_left_click),
+                    self._test_mouse),
                    ('KEYBOARD', self.devices['KEYBOARD'][0],
                     self._test_keyboard_with_string)]
         self.run_reconnect_device(devices)
@@ -207,7 +214,7 @@
         device_type = 'MOUSE'
         device = self.devices[device_type][0]
         self.run_reconnect_device(
-                [(device_type, device, self._test_mouse_left_click)],
+                [(device_type, device, self._test_mouse)],
                 iterations=STRESS_ITERATIONS)
 
     @test_wrapper('Reconnect LE HID Stress Test', devices={'BLE_MOUSE': 1})
@@ -216,7 +223,7 @@
         device_type = 'BLE_MOUSE'
         device = self.devices[device_type][0]
         self.run_reconnect_device(
-                [(device_type, device, self._test_mouse_left_click)],
+                [(device_type, device, self._test_mouse)],
                 iterations=STRESS_ITERATIONS)
 
     @test_wrapper('Reconnect A2DP',
@@ -231,170 +238,96 @@
                 [(device_type, device, self.test_device_a2dp_connected)],
                 auto_reconnect=True)
 
-    # ---------------------------------------------------------------
-    # Wake from suspend tests
-    # ---------------------------------------------------------------
-
-    def run_peer_wakeup_device(self,
-                               device_type,
-                               device,
-                               device_test=None,
-                               iterations=1,
-                               should_wake=True):
-        """ Uses paired peer device to wake the device from suspend.
-
-        @param device_type: the device type (used to determine if it's LE)
-        @param device: the meta device with the paired device
-        @param device_test: What to test to run after waking and connecting the
-                            adapter/host
-        @param iterations: Number of suspend + peer wake loops to run
-        @param should_wake: Whether wakeup should occur on this test. With HID
-                            peers, this should be True. With non-HID peers, this
-                            should be false.
-        """
-        boot_id = self.host.get_boot_id()
-
-        if should_wake:
-            sleep_time = EXPECT_PEER_WAKE_SUSPEND_SEC
-            resume_time = SUSPEND_SEC
-            resume_slack = 5  # Allow 5s slack for resume timeout
-        else:
-            sleep_time = EXPECT_NO_WAKE_SUSPEND_SEC
-            resume_time = EXPECT_NO_WAKE_SUSPEND_SEC
-            # Negative resume slack lets us wake a bit earlier than expected
-            # If suspend takes a while to enter, this may be necessary to get
-            # the timings right.
-            resume_slack = -5
-
-        # Clear wake before testing
-        self.test_adapter_set_wake_disabled()
-
-        try:
-            self.assert_discover_and_pair(device)
-            self.assert_on_fail(
-                    self.test_device_set_discoverable(device, False))
-
-            # Confirm connection completed
-            self.assert_on_fail(self.test_device_is_connected(device.address))
-
-            # Profile connection may not have completed yet and this will
-            # race with a subsequent disconnection (due to suspend). Use the
-            # device test to force profile connect or wait if no test was
-            # given.
-            if device_test is not None:
-                self.assert_on_fail(device_test(device))
-            else:
-                time.sleep(PROFILE_CONNECT_WAIT)
-
-            for it in range(iterations):
-                logging.info(
-                        'Running iteration {}/{} of suspend peer wake'.format(
-                                it + 1, iterations))
-
-                # Start a new suspend instance
-                suspend = self.suspend_async(suspend_time=sleep_time,
-                                             expect_bt_wake=should_wake)
-                start_time = self.bluetooth_facade.get_device_time()
-
-                if should_wake:
-                    self.test_device_wake_allowed(device.address)
-                    # Also wait until powerd marks adapter as wake enabled
-                    self.test_adapter_wake_enabled()
-                else:
-                    self.test_device_wake_not_allowed(device.address)
-
-                # Trigger suspend, asynchronously wake and wait for resume
-                self.test_suspend_and_wait_for_sleep(suspend, sleep_timeout=5)
-
-                # Trigger peer wakeup
-                adapter_address = self.bluetooth_facade.address
-                peer_wake = self.device_connect_async(device_type,
-                                                      device,
-                                                      adapter_address,
-                                                      delay_wake=5,
-                                                      should_wake=should_wake)
-                peer_wake.start()
-
-                # Expect a quick resume. If a timeout occurs, test fails. Since
-                # we delay sending the wake signal, we should accommodate that
-                # in our expected timeout.
-                self.test_wait_for_resume(boot_id,
-                                          suspend,
-                                          resume_timeout=resume_time,
-                                          test_start_time=start_time,
-                                          resume_slack=resume_slack,
-                                          fail_on_timeout=should_wake,
-                                          fail_early_wake=not should_wake)
-
-                # Finish peer wake process
-                peer_wake.join()
-
-                # Only check peer device connection state if we expected to wake
-                # from it. Otherwise, we may or may not be connected based on
-                # the specific profile's reconnection policy.
-                if should_wake:
-                    # Make sure we're actually connected
-                    self.test_device_is_connected(device.address)
-
-                    # Verify the profile is working
-                    if device_test is not None:
-                        device_test(device)
-
-        finally:
-            self.test_remove_pairing(device.address)
 
     # TODO(b/151332866) - Bob can't wake from suspend due to wrong power/wakeup
     # TODO(b/150897528) - Dru is powered down during suspend, won't wake up
     @test_wrapper('Peer wakeup Classic HID',
                   devices={'MOUSE': 1},
-                  skip_models=TABLET_MODELS + ['bob', 'dru'],
+                  skip_models=TABLET_MODELS + SUSPEND_POWER_DOWN_MODELS +
+                  ['bob'],
                   skip_chipsets=SUSPEND_POWER_DOWN_CHIPSETS)
     def sr_peer_wake_classic_hid(self):
         """ Use classic HID device to wake from suspend. """
         device = self.devices['MOUSE'][0]
         self.run_peer_wakeup_device('MOUSE',
                                     device,
-                                    device_test=self._test_mouse_left_click)
+                                    device_test=self._test_mouse)
 
     # TODO(b/151332866) - Bob can't wake from suspend due to wrong power/wakeup
     # TODO(b/150897528) - Dru is powered down during suspend, won't wake up
     @test_wrapper('Peer wakeup LE HID',
                   devices={'BLE_MOUSE': 1},
-                  skip_models=TABLET_MODELS + ['bob', 'dru'],
+                  skip_models=TABLET_MODELS + SUSPEND_POWER_DOWN_MODELS +
+                  ['bob'],
                   skip_chipsets=SUSPEND_POWER_DOWN_CHIPSETS)
     def sr_peer_wake_le_hid(self):
         """ Use LE HID device to wake from suspend. """
         device = self.devices['BLE_MOUSE'][0]
         self.run_peer_wakeup_device('BLE_MOUSE',
                                     device,
-                                    device_test=self._test_mouse_left_click)
+                                    device_test=self._test_mouse)
+
+
+    # TODO(b/151332866) - Bob can't wake from suspend due to wrong power/wakeup
+    # TODO(b/150897528) - Dru is powered down during suspend, won't wake up
+    @test_wrapper('Peer wakeup LE HID with reconnect LE HID',
+                  devices={
+                          'BLE_MOUSE': 1,
+                          'BLE_KEYBOARD': 1
+                  },
+                  skip_models=TABLET_MODELS + SUSPEND_POWER_DOWN_MODELS +
+                  ['bob'],
+                  skip_chipsets=SUSPEND_POWER_DOWN_CHIPSETS)
+    def sr_peer_wake_le_hid_reconnect_le_hid(self):
+        """ Use LE HID device to wake from suspend. And reconnects a secondary
+            LE HID device afterwards
+        """
+        device = self.devices['BLE_MOUSE'][0]
+        device_reconnect = self.devices['BLE_KEYBOARD'][0]
+
+        self.assert_discover_and_pair(device_reconnect)
+        self.test_device_set_discoverable(device_reconnect, False)
+        self.test_connection_by_adapter(device_reconnect.address)
+        self._test_keyboard_with_string(device_reconnect)
+
+        self.run_peer_wakeup_device('BLE_MOUSE',
+                                    device,
+                                    device_test=self._test_mouse,
+                                    keep_paired=True)
+
+        self.test_device_set_discoverable(device_reconnect, True)
+        self.test_device_is_connected(device_reconnect.address)
+        self._test_keyboard_with_string(device_reconnect)
+
 
     # TODO(b/151332866) - Bob can't wake from suspend due to wrong power/wakeup
     # TODO(b/150897528) - Dru is powered down during suspend, won't wake up
     @test_wrapper('Peer wakeup Classic HID',
                   devices={'MOUSE': 1},
-                  skip_models=TABLET_MODELS + ['bob', 'dru'],
+                  skip_models=TABLET_MODELS + SUSPEND_POWER_DOWN_MODELS +
+                  ['bob'],
                   skip_chipsets=SUSPEND_POWER_DOWN_CHIPSETS)
     def sr_peer_wake_classic_hid_stress(self):
         """ Use classic HID device to wake from suspend. """
         device = self.devices['MOUSE'][0]
         self.run_peer_wakeup_device('MOUSE',
                                     device,
-                                    device_test=self._test_mouse_left_click,
+                                    device_test=self._test_mouse,
                                     iterations=STRESS_ITERATIONS)
 
     # TODO(b/151332866) - Bob can't wake from suspend due to wrong power/wakeup
     # TODO(b/150897528) - Dru is powered down during suspend, won't wake up
     @test_wrapper('Peer wakeup LE HID',
                   devices={'BLE_MOUSE': 1},
-                  skip_models=TABLET_MODELS + ['bob', 'dru'],
+                  skip_models=TABLET_MODELS + SUSPEND_POWER_DOWN_MODELS +
+                  ['bob'],
                   skip_chipsets=SUSPEND_POWER_DOWN_CHIPSETS)
     def sr_peer_wake_le_hid_stress(self):
         """ Use LE HID device to wake from suspend. """
         device = self.devices['BLE_MOUSE'][0]
         self.run_peer_wakeup_device('BLE_MOUSE',
                                     device,
-                                    device_test=self._test_mouse_left_click,
+                                    device_test=self._test_mouse,
                                     iterations=STRESS_ITERATIONS)
 
     @test_wrapper('Peer wakeup with A2DP should fail',
@@ -418,8 +351,9 @@
     # TODO(b/150897528) - Scarlet Dru loses firmware around suspend
     @test_wrapper('Suspend while discovering',
                   devices={'BLE_MOUSE': 1},
-                  skip_models=['dru'],
-                  skip_chipsets=SUSPEND_POWER_DOWN_CHIPSETS)
+                  skip_models=SUSPEND_POWER_DOWN_MODELS,
+                  skip_chipsets=SUSPEND_POWER_DOWN_CHIPSETS +
+                  SUSPEND_RESET_IF_NO_PEER_CHIPSETS)
     def sr_while_discovering(self):
         """ Suspend while discovering. """
         device = self.devices['BLE_MOUSE'][0]
@@ -430,7 +364,7 @@
         # Test discovery without setting discovery filter
         # ----------------------------------------------------------------------
         suspend = self.suspend_async(suspend_time=EXPECT_NO_WAKE_SUSPEND_SEC)
-        start_time = self.bluetooth_facade.get_device_time()
+        start_time = self.bluetooth_facade.get_device_utc_time()
 
         # We don't pair to the peer device because we don't want it in the
         # allowlist. However, we want an advertising peer in this test
@@ -453,7 +387,7 @@
         # Test discovery with discovery filter set
         # ----------------------------------------------------------------------
         suspend = self.suspend_async(suspend_time=EXPECT_NO_WAKE_SUSPEND_SEC)
-        start_time = self.bluetooth_facade.get_device_time()
+        start_time = self.bluetooth_facade.get_device_utc_time()
 
         self.test_set_discovery_filter({'Transport': 'auto'})
         self.test_start_discovery()
@@ -474,14 +408,15 @@
     # TODO(b/150897528) - Scarlet Dru loses firmware around suspend
     @test_wrapper('Suspend while advertising',
                   devices={'MOUSE': 1},
-                  skip_models=['dru'],
-                  skip_chipsets=SUSPEND_POWER_DOWN_CHIPSETS)
+                  skip_models=SUSPEND_POWER_DOWN_MODELS,
+                  skip_chipsets=SUSPEND_POWER_DOWN_CHIPSETS +
+                  SUSPEND_RESET_IF_NO_PEER_CHIPSETS)
     def sr_while_advertising(self):
         """ Suspend while advertising. """
         device = self.devices['MOUSE'][0]
         boot_id = self.host.get_boot_id()
         suspend = self.suspend_async(suspend_time=EXPECT_NO_WAKE_SUSPEND_SEC)
-        start_time = self.bluetooth_facade.get_device_time()
+        start_time = self.bluetooth_facade.get_device_utc_time()
 
         self.test_discoverable()
         self.test_suspend_and_wait_for_sleep(suspend,
@@ -511,7 +446,7 @@
         device = self.devices['MOUSE'][0]
         boot_id = self.host.get_boot_id()
         suspend = self.suspend_async(suspend_time=SUSPEND_SEC)
-        start_time = self.bluetooth_facade.get_device_time()
+        start_time = self.bluetooth_facade.get_device_utc_time()
 
         # Pair device so we have something to do in suspend
         self.assert_discover_and_pair(device)
diff --git a/server/site_tests/bluetooth_AdapterSRHealth/control b/server/site_tests/bluetooth_AdapterSRHealth/control
index a96115e..2c8847d 100644
--- a/server/site_tests/bluetooth_AdapterSRHealth/control
+++ b/server/site_tests/bluetooth_AdapterSRHealth/control
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
 
 DOC = """
     A Batch of Bluetooth suspend+resume with peer health tests. This test is
diff --git a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_peer_wake_a2dp_should_fail b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_peer_wake_a2dp_should_fail
index d4d8dcb..70593b8 100644
--- a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_peer_wake_a2dp_should_fail
+++ b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_peer_wake_a2dp_should_fail
@@ -8,12 +8,13 @@
 NAME = 'bluetooth_AdapterSRHealth.sr_peer_wake_a2dp_should_fail'
 PURPOSE = ('Single run of a test')
 CRITERIA = 'Pass test'
-ATTRIBUTES = 'suite:bluetooth_flaky'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
 TIME = 'SHORT'
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """ A2DP peer should not wake system from suspend. """
 
diff --git a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_peer_wake_classic_hid b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_peer_wake_classic_hid
index e589439..529f974 100644
--- a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_peer_wake_classic_hid
+++ b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_peer_wake_classic_hid
@@ -8,12 +8,13 @@
 NAME = 'bluetooth_AdapterSRHealth.sr_peer_wake_classic_hid'
 PURPOSE = ('Single run of a test')
 CRITERIA = 'Pass test'
-ATTRIBUTES = 'suite:bluetooth_flaky'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
 TIME = 'SHORT'
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """ Single run of a Suspend-Resume health testcase. """
 
diff --git a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_peer_wake_classic_hid_stress b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_peer_wake_classic_hid_stress
index 990fd11..888de3e 100644
--- a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_peer_wake_classic_hid_stress
+++ b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_peer_wake_classic_hid_stress
@@ -6,16 +6,17 @@
 
 AUTHOR = 'chromeos-bluetooth'
 NAME = 'bluetooth_AdapterSRHealth.sr_peer_wake_classic_hid_stress'
-PURPOSE = ('Single run of a test')
+PURPOSE = ('50 iteration of sr_peer_wake_classic_hid_stress test')
 CRITERIA = 'Pass test'
-ATTRIBUTES = ''
-TIME = 'MEDIUM' # ~8 mins on Hatch
+ATTRIBUTES = 'suite:bluetooth_stress'
+TIME = 'LONG' # 40 Minutes
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
-DOC = """ Single run of a Suspend-Resume health testcase. """
+DOC = """ 50 iterations of Suspend-Resume health testcase. """
 
 args_dict = utils.args_to_dict(args)
 
diff --git a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_peer_wake_le_hid b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_peer_wake_le_hid
index 8a98ad0..2d7764b 100644
--- a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_peer_wake_le_hid
+++ b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_peer_wake_le_hid
@@ -8,12 +8,13 @@
 NAME = 'bluetooth_AdapterSRHealth.sr_peer_wake_le_hid'
 PURPOSE = ('Single run of a test')
 CRITERIA = 'Pass test'
-ATTRIBUTES = 'suite:bluetooth_flaky'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
 TIME = 'SHORT'
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """ Single run of a Suspend-Resume health testcase. """
 
diff --git a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_peer_wake_le_hid_reconnect_le_hid b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_peer_wake_le_hid_reconnect_le_hid
new file mode 100644
index 0000000..3dce8d6
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_peer_wake_le_hid_reconnect_le_hid
@@ -0,0 +1,33 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterSRHealth.sr_peer_wake_le_hid_reconnect_le_hid'
+PURPOSE = ('Single run of a test')
+CRITERIA = 'Pass test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_e2e'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
+
+DOC = """ Single run of the Suspend-Resume health testcase:
+sr_peer_wake_le_hid_reconnect_le_hid
+
+It can discover the failure of pairing more than one LE devices after
+suspend-resume (b/219952140).
+"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_AdapterSRHealth', host=host, num_iterations=1,
+                 args_dict=args_dict, test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_peer_wake_le_hid_stress b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_peer_wake_le_hid_stress
index ac9668a..04ec1ad 100644
--- a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_peer_wake_le_hid_stress
+++ b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_peer_wake_le_hid_stress
@@ -6,16 +6,17 @@
 
 AUTHOR = 'chromeos-bluetooth'
 NAME = 'bluetooth_AdapterSRHealth.sr_peer_wake_le_hid_stress'
-PURPOSE = ('Single run of a test')
+PURPOSE = ('50 iterations of sr_peer_wake_le_hid test')
 CRITERIA = 'Pass test'
-ATTRIBUTES = ''
-TIME = 'MEDIUM' # ~8 mins on Hatch
+ATTRIBUTES = 'suite:bluetooth_stress'
+TIME = 'LONG' # 40 Minutes
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
-DOC = """ Single run of a Suspend-Resume health testcase. """
+DOC = """ 50 iterations of  Suspend-Resume health testcase. """
 
 args_dict = utils.args_to_dict(args)
 
diff --git a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_a2dp b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_a2dp
index e2b26b3..2127cc5 100644
--- a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_a2dp
+++ b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_a2dp
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """ Auto-reconnect A2DP peer after suspend/resume. """
 
diff --git a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_classic_hid b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_classic_hid
index bc541d8..05876bd 100644
--- a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_classic_hid
+++ b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_classic_hid
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """ Single run of a Suspend-Resume health testcase. """
 
diff --git a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_classic_hid_stress b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_classic_hid_stress
index dc201d9..cc4c5d0 100644
--- a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_classic_hid_stress
+++ b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_classic_hid_stress
@@ -6,16 +6,17 @@
 
 AUTHOR = 'chromeos-bluetooth'
 NAME = 'bluetooth_AdapterSRHealth.sr_reconnect_classic_hid_stress'
-PURPOSE = ('Single run of a test')
+PURPOSE = ('50 iteration of sr_reconnect_classic_hid test ')
 CRITERIA = 'Pass test'
 ATTRIBUTES = 'suite:bluetooth_stress'
-TIME = 'LONG' # ~15 mins on Morphius
+TIME = 'LONG' # 40 Minutes
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
-DOC = """ Single run of a Suspend-Resume health testcase. """
+DOC = """ 50 iterations of a Suspend-Resume health testcase. """
 
 args_dict = utils.args_to_dict(args)
 
diff --git a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_le_hid b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_le_hid
index 2895a94..ed5bb74 100644
--- a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_le_hid
+++ b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_le_hid
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """ Single run of a Suspend-Resume health testcase. """
 
diff --git a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_le_hid_stress b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_le_hid_stress
index 9f9297c..aa90a0b 100644
--- a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_le_hid_stress
+++ b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_le_hid_stress
@@ -6,16 +6,17 @@
 
 AUTHOR = 'chromeos-bluetooth'
 NAME = 'bluetooth_AdapterSRHealth.sr_reconnect_le_hid_stress'
-PURPOSE = ('Single run of a test')
+PURPOSE = ('50 iteration of sr_reconnect_le_hid test')
 CRITERIA = 'Pass test'
 ATTRIBUTES = 'suite:bluetooth_stress'
-TIME = 'LONG' # ~15 mins on Morphius
+TIME = 'LONG' # 40 Minutes
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
-DOC = """ Single run of a Suspend-Resume health testcase. """
+DOC = """ 50 iterations of a Suspend-Resume health testcase. """
 
 args_dict = utils.args_to_dict(args)
 
diff --git a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_multiple_classic_hid b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_multiple_classic_hid
index d8452e8..3dd014b 100644
--- a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_multiple_classic_hid
+++ b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_multiple_classic_hid
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
 
 DOC = """ Single run of a Suspend-Resume health testcase. """
 
diff --git a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_multiple_classic_le_hid b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_multiple_classic_le_hid
index 4920036..ec09266 100644
--- a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_multiple_classic_le_hid
+++ b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_multiple_classic_le_hid
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
 
 DOC = """ Single run of a Suspend-Resume health testcase. """
 
diff --git a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_multiple_le_hid b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_multiple_le_hid
index f81c06c..9b39841 100644
--- a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_multiple_le_hid
+++ b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_reconnect_multiple_le_hid
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:2'
+PY_VERSION = 3
 
 DOC = """ Single run of a Suspend-Resume health testcase. """
 
diff --git a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_while_advertising b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_while_advertising
index 9aed82a..c0683cb 100644
--- a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_while_advertising
+++ b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_while_advertising
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """ Single run of a Suspend-Resume health testcase. """
 
diff --git a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_while_discovering b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_while_discovering
index bd6c506..8ce8ef6 100644
--- a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_while_discovering
+++ b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_while_discovering
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """ Single run of a Suspend-Resume health testcase. """
 
diff --git a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_while_powered_off b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_while_powered_off
index 903d1fa..bbd2552 100644
--- a/server/site_tests/bluetooth_AdapterSRHealth/control.sr_while_powered_off
+++ b/server/site_tests/bluetooth_AdapterSRHealth/control.sr_while_powered_off
@@ -14,6 +14,7 @@
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """ Single run of suspend/resume with adapter powered off. """
 
diff --git a/server/site_tests/bluetooth_FastPair/bluetooth_FastPair.py b/server/site_tests/bluetooth_FastPair/bluetooth_FastPair.py
new file mode 100644
index 0000000..87e30af
--- /dev/null
+++ b/server/site_tests/bluetooth_FastPair/bluetooth_FastPair.py
@@ -0,0 +1,188 @@
+# Lint as: python2, python3
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+""" Bluetooth test that tests the Fast Pair scenarios."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+from base64 import b64decode
+import logging
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.server.cros.bluetooth.bluetooth_adapter_quick_tests import (
+        BluetoothAdapterQuickTests)
+from autotest_lib.server.cros.bluetooth.bluetooth_adapter_tests import (
+        UNSUPPORTED_BT_HW_FILTERING_CHIPSETS)
+from autotest_lib.server import autotest
+
+imported_password_util = True
+
+try:
+    # Importing this private util fails on public boards (e.g amd64-generic)
+    from autotest_lib.client.common_lib.cros import password_util
+except ImportError:
+    imported_password_util = False
+    logging.error('Failed to import password_util from autotest-private')
+
+test_wrapper = BluetoothAdapterQuickTests.quick_test_test_decorator
+batch_wrapper = BluetoothAdapterQuickTests.quick_test_batch_decorator
+
+
+class bluetooth_FastPair(BluetoothAdapterQuickTests):
+    """Fast Pair tests"""
+
+    UI_TEST = 'bluetooth_FastPairUI'
+
+    KEY_PEM_ARG_KEY = 'fast_pair_antispoofing_key_pem'
+    ACCOUNT_KEY_ARG_KEY = 'fast_pair_account_key'
+    USERNAME_ARG_KEY = 'fast_pair_username'
+    PASSWORD_ARG_KEY = 'fast_pair_password'
+
+    _key_pem = None
+    _account_key = None
+    _username = None
+    _password = None
+
+    def run_ui_test(self):
+        """Runs the UI client test, which clicks through the Fast Pair UI"""
+        client_at = autotest.Autotest(self.host)
+        client_at.run_test(self.UI_TEST,
+                           username=self._username,
+                           password=self._password)
+        client_at._check_client_test_result(self.host, self.UI_TEST)
+
+    @test_wrapper('Fast Pair Initial Pairing',
+                  devices={'BLE_FAST_PAIR': 1},
+                  skip_chipsets=UNSUPPORTED_BT_HW_FILTERING_CHIPSETS)
+    def fast_pair_initial_pairing_test(self):
+        """Test the Fast Pair initial pairing scenario"""
+        try:
+            # Setup the Fast Pair device.
+            device = self.devices['BLE_FAST_PAIR'][0]
+            device.SetAntispoofingKeyPem(self._key_pem)
+
+            # Toggling discoverable here ensures the device starts
+            # advertising during this test.
+            device.SetDiscoverable(False)
+            device.SetDiscoverable(True)
+
+            # Run UI test, which clicks through the pairing UI flow.
+            self.run_ui_test()
+            # Verify device is paired.
+            return self.bluetooth_facade.device_is_paired(device.address)
+        except Exception as e:
+            logging.error('exception in fast_pair_initial_pairing_test %s',
+                          str(e))
+            return False
+
+    @test_wrapper('Fast Pair Subsequent Pairing',
+                  devices={'BLE_FAST_PAIR': 1},
+                  skip_chipsets=UNSUPPORTED_BT_HW_FILTERING_CHIPSETS)
+    def fast_pair_subsequent_pairing_test(self):
+        """Test the Fast Pair subsequent pairing scenario"""
+        try:
+            # Setup the Fast Pair device.
+            device = self.devices['BLE_FAST_PAIR'][0]
+            device.SetAntispoofingKeyPem(None)
+            device.AddAccountKey(self._account_key)
+
+            # Toggling discoverable here ensures the device starts
+            # advertising during this test.
+            device.SetDiscoverable(False)
+            device.SetDiscoverable(True)
+
+            # Run UI test, which clicks through the pairing UI flow.
+            self.run_ui_test()
+
+            # Verify device is paired.
+            return self.bluetooth_facade.device_is_paired(device.address)
+        except Exception as e:
+            logging.error('exception in fast_pair_subsequent_pairing_test %s',
+                          str(e))
+            return False
+
+    def set_key_pem(self, args_dict):
+        if imported_password_util:
+            self._key_pem = b64decode(
+                    password_util.get_fast_pair_anti_spoofing_key())
+
+        elif args_dict is not None and self.KEY_PEM_ARG_KEY in args_dict:
+            self._key_pem = b64decode(args_dict[self.KEY_PEM_ARG_KEY])
+
+        if self._key_pem is None:
+            raise error.TestError('Valid %s arg is missing' %
+                                  self.KEY_PEM_ARG_KEY)
+
+    def set_account_key(self, args_dict):
+        if imported_password_util:
+            self._account_key = b64decode(
+                    password_util.get_fast_pair_account_key())
+
+        elif args_dict is not None and self.ACCOUNT_KEY_ARG_KEY in args_dict:
+            self._account_key = b64decode(args_dict[self.ACCOUNT_KEY_ARG_KEY])
+
+        if self._account_key is None:
+            raise error.TestError('Valid %s arg is missing' %
+                                  self.ACCOUNT_KEY_ARG_KEY)
+
+    def set_username(self, args_dict):
+        if imported_password_util:
+            self._username = (
+                    password_util.get_fast_pair_user_credentials().username)
+
+        elif args_dict is not None and self.USERNAME_ARG_KEY in args_dict:
+            self._username = args_dict[self.USERNAME_ARG_KEY]
+
+        if self._username is None:
+            raise error.TestError('Valid %s arg is missing' %
+                                  self.USERNAME_ARG_KEY)
+
+    def set_password(self, args_dict):
+        if imported_password_util:
+            self._password = (
+                    password_util.get_fast_pair_user_credentials().password)
+
+        elif args_dict is not None and self.PASSWORD_ARG_KEY in args_dict:
+            self._password = args_dict[self.PASSWORD_ARG_KEY]
+
+        if self._password is None:
+            raise error.TestError('Valid %s arg is missing' %
+                                  self.PASSWORD_ARG_KEY)
+
+    @batch_wrapper('Fast Pair')
+    def fast_pair_batch_run(self, num_iterations=1, test_name=None):
+        """ Batch of Fair Pair tests """
+        self.fast_pair_initial_pairing_test()
+        self.fast_pair_subsequent_pairing_test()
+
+    def run_once(self,
+                 host,
+                 num_iterations=1,
+                 args_dict=None,
+                 test_name=None,
+                 flag='Quick Health'):
+        """Running Fast Pair tests.
+
+        @param host: the DUT, usually a chromebook
+        @param num_iterations: the number of times to execute the test
+        @param test_name: the test to run or None for all tests
+        @param flag: run tests with this flag (default: Quick Health)
+
+        """
+
+        # First set required args
+        self.set_key_pem(args_dict)
+        self.set_account_key(args_dict)
+        self.set_username(args_dict)
+        self.set_password(args_dict)
+
+        # Initialize and run the test batch or the requested specific test
+        self.quick_test_init(host,
+                             use_btpeer=True,
+                             flag=flag,
+                             args_dict=args_dict)
+        self.fast_pair_batch_run(num_iterations, test_name)
+        self.quick_test_cleanup()
diff --git a/server/site_tests/bluetooth_FastPair/control b/server/site_tests/bluetooth_FastPair/control
new file mode 100644
index 0000000..4f2dc23
--- /dev/null
+++ b/server/site_tests/bluetooth_FastPair/control
@@ -0,0 +1,41 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "shafit, Chromium OS, chromeos-cross-device-eng@google.com"
+NAME = 'bluetooth_FastPair'
+PURPOSE = ('Tests for the Fast Pair feature')
+CRITERIA = """
+This test will run through the Fast Pair pairing scenarios and confirm the
+device is paired by using the Fast Pair protocol
+"""
+ATTRIBUTES = ''
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+This test runs through the Fast Pair scenarios. This server test is responsible
+for setting up the BT peer device and then invoking the bluetooth_FastPairUI
+client test to click through the UI and attempt to pair the device.
+
+If running locally, required args:
+  - fast_pair_antispoofing_key_pem: Base64 encoded EC Key Pem for the BT peer.
+  - fast_pair_account_key: Base64 encoded account key linked to user.
+  - fast_pair_username: Username to log in to.
+  - fast_pair_password: Password for log in.
+"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_FastPair', host=host, num_iterations=1,
+    args_dict=args_dict)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_FastPair/control.fast_pair_initial_pairing_test b/server/site_tests/bluetooth_FastPair/control.fast_pair_initial_pairing_test
new file mode 100644
index 0000000..cdc03ca
--- /dev/null
+++ b/server/site_tests/bluetooth_FastPair/control.fast_pair_initial_pairing_test
@@ -0,0 +1,41 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "shafit, Chromium OS, chromeos-cross-device-eng@google.com"
+NAME = 'bluetooth_FastPair.fast_pair_initial_pairing_test'
+PURPOSE = ('Tests for the Fast Pair feature inital pairing')
+CRITERIA = """
+This test will run through the Fast Pair pairing scenarios and confirm the
+device is paired by using the Fast Pair protocol
+"""
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_flaky'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+This test runs through the Fast Pair initial pairing scenario. This server test is responsible
+for setting up the BT peer device and then invoking the bluetooth_FastPairUI
+client test to click through the UI and attempt to pair the device.
+
+If running locally, required args:
+  - fast_pair_antispoofing_key_pem: Base64 encoded EC Key Pem for the BT peer.
+  - fast_pair_account_key: Base64 encoded account key linked to user.
+  - fast_pair_username: Username to log in to.
+  - fast_pair_password: Password for log in.
+"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_FastPair', host=host, num_iterations=1,
+    args_dict=args_dict, test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_FastPair/control.fast_pair_subsequent_pairing_test b/server/site_tests/bluetooth_FastPair/control.fast_pair_subsequent_pairing_test
new file mode 100644
index 0000000..43f1dcb
--- /dev/null
+++ b/server/site_tests/bluetooth_FastPair/control.fast_pair_subsequent_pairing_test
@@ -0,0 +1,41 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "shafit, Chromium OS, chromeos-cross-device-eng@google.com"
+NAME = 'bluetooth_FastPair.fast_pair_subsequent_pairing_test'
+PURPOSE = ('Tests for the Fast Pair feature subsequent pairing')
+CRITERIA = """
+This test will run through the Fast Pair pairing scenarios and confirm the
+device is paired by using the Fast Pair protocol
+"""
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_flaky'
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+This test runs through the Fast Pair subsequent pairing scenario. This server test is responsible
+for setting up the BT peer device and then invoking the bluetooth_FastPairUI
+client test to click through the UI and attempt to pair the device.
+
+If running locally, required args:
+  - fast_pair_antispoofing_key_pem: Base64 encoded EC Key Pem for the BT peer.
+  - fast_pair_account_key: Base64 encoded account key linked to user.
+  - fast_pair_username: Username to log in to.
+  - fast_pair_password: Password for log in.
+"""
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_FastPair', host=host, num_iterations=1,
+    args_dict=args_dict, test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_PeerUpdate/bluetooth_PeerUpdate.py b/server/site_tests/bluetooth_PeerUpdate/bluetooth_PeerUpdate.py
index f1d6240..83d2fee 100644
--- a/server/site_tests/bluetooth_PeerUpdate/bluetooth_PeerUpdate.py
+++ b/server/site_tests/bluetooth_PeerUpdate/bluetooth_PeerUpdate.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -11,7 +12,6 @@
 storage.
 """
 
-from autotest_lib.client.common_lib import error
 from autotest_lib.server import test
 from autotest_lib.server.cros.bluetooth import bluetooth_peer_update
 
@@ -28,17 +28,5 @@
 
         @param host: the DUT, usually a chromebook
         """
-        try:
-            self.host = host
-            self.host.initialize_btpeer(btpeer_args=btpeer_args)
-            commit = None
-            (_, commit) = bluetooth_peer_update.get_latest_commit()
-            if commit is None:
-                raise error.TestFail('Unable to get current commit')
-            if not bluetooth_peer_update.download_installation_files(self.host,
-                                                                     commit):
-                raise error.TestFail('Unable to download installation files ')
-            bluetooth_peer_update.update_peers(self.host, commit)
-        finally:
-            if not bluetooth_peer_update.cleanup(host, commit):
-                raise error.TestFail('Cleanup failed')
+        host.initialize_btpeer(btpeer_args=btpeer_args)
+        bluetooth_peer_update.update_all_peers(host, raise_error=True)
diff --git a/server/site_tests/bluetooth_PeerUpdate/control b/server/site_tests/bluetooth_PeerUpdate/control
index e0d6f64..b6f0c1d 100644
--- a/server/site_tests/bluetooth_PeerUpdate/control
+++ b/server/site_tests/bluetooth_PeerUpdate/control
@@ -8,12 +8,13 @@
 NAME = 'bluetooth_PeerUpdate'
 PURPOSE = 'Update chameleond on Bluetooth peer device'
 CRITERIA = 'Bluetooth peer should be present'
-ATTRIBUTES = 'suite:bluetooth_e2e, suite:bluetooth_mtbf'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_wifi_testbed_update'
 TIME = 'SHORT'    # This test takes about 1 minutes while running locally
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'bluetooth'
 TEST_TYPE = 'server'
 DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
 
diff --git a/server/site_tests/bluetooth_PeerVerify/bluetooth_PeerVerify.py b/server/site_tests/bluetooth_PeerVerify/bluetooth_PeerVerify.py
new file mode 100644
index 0000000..da07445
--- /dev/null
+++ b/server/site_tests/bluetooth_PeerVerify/bluetooth_PeerVerify.py
@@ -0,0 +1,77 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+""" Bluetooth test that checks the RSSI of Bluetooth peers
+This to used for checking Bluetooth test beds
+
+"""
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import logging
+
+from autotest_lib.server.cros.bluetooth.bluetooth_adapter_quick_tests import (
+        BluetoothAdapterQuickTests)
+
+from autotest_lib.server.cros.bluetooth.bluetooth_adapter_tests import (
+        test_retry_and_log)
+
+test_wrapper = BluetoothAdapterQuickTests.quick_test_test_decorator
+batch_wrapper = BluetoothAdapterQuickTests.quick_test_batch_decorator
+
+
+class bluetooth_PeerVerify(BluetoothAdapterQuickTests):
+    """Test to check the RSSI of btpeers."""
+
+    @test_wrapper('Check if rssi of peers > -70 ',
+                  devices={'MOUSE': -1},
+                  use_all_peers=True)
+    @test_retry_and_log(False)
+    def check_rssi(self):
+        """Check if RSSI > -70. """
+        try:
+            rssi_list = []
+            self.result = {}
+            for n, device in enumerate(self.devices['MOUSE']):
+                rssi = self.get_device_sample_rssi(device)
+                rssi_list.append(rssi)
+                logging.info('RSSI for peer %s is %s', n, rssi)
+            logging.info('RSSI values are %s', rssi_list)
+            self.results = {'rssi': rssi_list}
+            return all([
+                    True if rssi is not None and rssi > -70 else False
+                    for rssi in rssi_list
+            ])
+        except Exception as e:
+            logging.debug('exception in test_check_rssi %s', str(e))
+            return False
+
+    @batch_wrapper('Verify Peer RSSI')
+    def verify_peer_batch_run(self, num_iterations=1, test_name=None):
+        """ Batch of checks for btpeer. """
+        self.check_rssi()
+
+    def run_once(self,
+                 host,
+                 num_iterations=1,
+                 args_dict=None,
+                 test_name=None,
+                 flag='Quick Health'):
+        """Running Bluetooth adapter suspend resume with peer autotest.
+
+        @param host: the DUT, usually a chromebook
+        @param num_iterations: the number of times to execute the test
+        @param test_name: the test to run or None for all tests
+        @param flag: run tests with this flag (default: Quick Health)
+
+        """
+
+        # Initialize and run the test batch or the requested specific test
+        self.quick_test_init(host,
+                             use_btpeer=True,
+                             flag=flag,
+                             args_dict=args_dict)
+        self.verify_peer_batch_run(num_iterations, test_name)
+        self.quick_test_cleanup()
diff --git a/server/site_tests/bluetooth_PeerVerify/control b/server/site_tests/bluetooth_PeerVerify/control
new file mode 100644
index 0000000..5b908e7
--- /dev/null
+++ b/server/site_tests/bluetooth_PeerVerify/control
@@ -0,0 +1,32 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_PeerVerify'
+PURPOSE = ('Test to check health of Bluetooth peers')
+CRITERIA = 'Pass all health test'
+ATTRIBUTES = 'suite:bluetooth'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+      Batch of tests to check health of Bluetooth peers
+      Currently it contain
+      check_rssi : Check RSSI of peers and confirm it is > - 70
+      """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_PeerVerify', host=host,
+                 num_iterations=1, args_dict=args_dict)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_PeerVerify/control.check_rssi b/server/site_tests/bluetooth_PeerVerify/control.check_rssi
new file mode 100644
index 0000000..a46a241
--- /dev/null
+++ b/server/site_tests/bluetooth_PeerVerify/control.check_rssi
@@ -0,0 +1,30 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_PeerVerify.check_rssi'
+PURPOSE = ('Test to check health of Bluetooth peers')
+CRITERIA = 'Pass all health test'
+ATTRIBUTES = 'suite:bluetooth, suite:bluetooth_wifi_testbed_update, suite:wificell_dut_validation'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+      Test to Check RSSI of peers and confirm it is > - 70
+      """
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('bluetooth_PeerVerify', host=host, num_iterations=1,
+		args_dict=args_dict, test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_RegressionServer/bluetooth_RegressionServer.py b/server/site_tests/bluetooth_RegressionServer/bluetooth_RegressionServer.py
deleted file mode 100644
index 8cf6e91..0000000
--- a/server/site_tests/bluetooth_RegressionServer/bluetooth_RegressionServer.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import autotest
-from autotest_lib.server import hosts
-from autotest_lib.server import test
-
-
-class bluetooth_RegressionServer(test.test):
-    """Server part of the Bluetooth Semi-Automatic Regression Tests."""
-    version = 1
-
-    def run_once(self, client_ip, device_addrs):
-        """Run Server side of Bluetooth Regression tests.
-
-        @param client_ip: Device under test.
-        @param device_addrs: MAC addresses of Bluetooth devices under test.
-        """
-        if not client_ip:
-            raise error.TestError('Must provide client\'s IP address to test')
-
-        client = hosts.create_host(client_ip)
-        client_at = autotest.Autotest(client)
-
-        logging.info('Running client side tests')
-        client_at.run_test('bluetooth_RegressionClient',
-                            addrs=device_addrs, close_browser=False,
-                            test_phase='reboot')
-        logging.info('Starting reboot from Server')
-        client.reboot()
-        logging.info('Returning to Client after reboot')
-        client_at.run_test('bluetooth_RegressionClient',
-                           addrs=device_addrs, test_phase='client')
diff --git a/server/site_tests/bluetooth_RegressionServer/control b/server/site_tests/bluetooth_RegressionServer/control
deleted file mode 100644
index 80dcab3..0000000
--- a/server/site_tests/bluetooth_RegressionServer/control
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-from autotest_lib.server import utils
-
-AUTHOR = "Chrome OS Team"
-NAME = "bluetooth_RegressionServer"
-TIME = "MEDIUM"
-TEST_CATEGORY = "General"
-TEST_CLASS = "bluetooth"
-TEST_TYPE = "server"
-
-DOC = """
-Server side of Bluetooth Semi-Automated Regression tests.
-"""
-
-args_dict = utils.args_to_dict(args)
-
-def run_system_bluetooth_Regression(machine):
-    if 'device_addrs' in args_dict:
-        arg = args_dict.get('device_addrs')
-        device_addrs = arg.split(',') if arg != '' else []
-        job.run_test('bluetooth_RegressionServer', client_ip=machine,
-                     device_addrs=device_addrs)
-    else:
-        logging.info('Please provide Bluetooth device MAC addresses: '
-                     '"device_addrs=addr1,addr2,addr3"')
-
-job.parallel_simple(run_system_bluetooth_Regression, machines)
diff --git a/server/site_tests/camera_HAL3Server/camera_HAL3Server.py b/server/site_tests/camera_HAL3Server/camera_HAL3Server.py
deleted file mode 100644
index ee9b2c9..0000000
--- a/server/site_tests/camera_HAL3Server/camera_HAL3Server.py
+++ /dev/null
@@ -1,54 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-from autotest_lib.server import autotest, test
-from autotest_lib.server.cros.multimedia import remote_facade_factory
-
-
-class camera_HAL3Server(test.test):
-    """
-    Server side camera_HAL3 test for configure dummy image on chart tablet and
-    run test on DUT.
-    """
-    version = 1
-    DISPLAY_LEVEL = 96.0
-    SCENE_NAME = 'scene.pdf'
-    BRIGHTNESS_CMD = 'backlight_tool --get_brightness_percent'
-    SET_BRIGHTNESS_CMD = 'backlight_tool --set_brightness_percent=%s'
-
-    def setup(self, chart_host):
-        # prepare chart device
-        self.chart_dir = chart_host.get_tmp_dir()
-        logging.debug('chart_dir=%s', self.chart_dir)
-        self.display_facade = remote_facade_factory.RemoteFacadeFactory(
-                chart_host).create_display_facade()
-
-        # set chart display brightness
-        self.init_display_level = chart_host.run(
-                self.BRIGHTNESS_CMD).stdout.rstrip()
-        chart_host.run(self.SET_BRIGHTNESS_CMD % self.DISPLAY_LEVEL)
-
-        # keep display always on
-        chart_host.run('stop powerd', ignore_status=True)
-
-        # scp scene to chart_host
-        chart_host.send_file(
-                os.path.join(self.bindir, 'files', self.SCENE_NAME),
-                self.chart_dir)
-        chart_host.run('chmod', args=('-R', '755', self.chart_dir))
-
-        # display scene
-        self.display_facade.load_url(
-                'file://' + os.path.join(self.chart_dir, self.SCENE_NAME))
-        self.display_facade.set_fullscreen(True)
-
-    def run_once(self, host, chart_host, **kwargs):
-        autotest.Autotest(host).run_test('camera_HAL3', **kwargs)
-
-    def cleanup(self, chart_host):
-        # restore display default behavior
-        chart_host.run('start powerd', ignore_status=True)
-        chart_host.run(self.SET_BRIGHTNESS_CMD % self.init_display_level)
diff --git a/server/site_tests/camera_HAL3Server/control.frame.back b/server/site_tests/camera_HAL3Server/control.frame.back
deleted file mode 100644
index 7adba68..0000000
--- a/server/site_tests/camera_HAL3Server/control.frame.back
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'Chrome OS Team, chromeos-camera-eng@google.com'
-NAME = 'camera_HAL3Server.frame.back'
-ATTRIBUTES = ''
-TEST_TYPE = 'server'
-TIME = 'LONG'
-DOC = """
-Server-side test of cros_camera_test control over chart tablet and launch camera_HAL3 on DUT.
-"""
-
-
-def run(machine):
-    chart_ip = utils.args_to_dict(args).get(
-            'chart') or utils.get_lab_chart_address(machine)
-    if not chart_ip:
-        raise error.TestError('missing option --args="chart=<CHART IP>"')
-
-    job.run_test(
-            'camera_HAL3Server',
-            host=hosts.create_host(machine),
-            chart_host=hosts.create_host(chart_ip),
-            options=[
-                    '--gtest_filter=Camera3FrameTest/*', '--camera_facing=back'
-            ])
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/camera_HAL3Server/control.frame.front b/server/site_tests/camera_HAL3Server/control.frame.front
deleted file mode 100644
index 5fc3187..0000000
--- a/server/site_tests/camera_HAL3Server/control.frame.front
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'Chrome OS Team, chromeos-camera-eng@google.com'
-NAME = 'camera_HAL3Server.frame.front'
-ATTRIBUTES = ''
-TEST_TYPE = 'server'
-TIME = 'LONG'
-DOC = """
-Server-side test of cros_camera_test control over chart tablet and launch camera_HAL3 on DUT.
-"""
-
-
-def run(machine):
-    chart_ip = utils.args_to_dict(args).get(
-            'chart') or utils.get_lab_chart_address(machine)
-    if not chart_ip:
-        raise error.TestError('missing option --args="chart=<CHART IP>"')
-
-    job.run_test(
-            'camera_HAL3Server',
-            host=hosts.create_host(machine),
-            chart_host=hosts.create_host(chart_ip),
-            options=[
-                    '--gtest_filter=Camera3FrameTest/*',
-                    '--camera_facing=front'
-            ])
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/camera_HAL3Server/control.jda.back b/server/site_tests/camera_HAL3Server/control.jda.back
deleted file mode 100644
index 754c8be..0000000
--- a/server/site_tests/camera_HAL3Server/control.jda.back
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'Chrome OS Team, chromeos-camera-eng@google.com'
-NAME = 'camera_HAL3Server.jda.back'
-ATTRIBUTES = ''
-TEST_TYPE = 'server'
-TIME = 'LONG'
-DOC = """
-Server-side test of cros_camera_test control over chart tablet and launch camera_HAL3 on DUT.
-"""
-
-
-def run(machine):
-    chart_ip = utils.args_to_dict(args).get(
-            'chart') or utils.get_lab_chart_address(machine)
-    if not chart_ip:
-        raise error.TestError('missing option --args="chart=<CHART IP>"')
-
-    job.run_test(
-            'camera_HAL3Server',
-            host=hosts.create_host(machine),
-            chart_host=hosts.create_host(chart_ip),
-            cmd_timeout=5,
-            camera_hals=['usb.so'],
-            options=[
-                    '--gtest_filter=*/Camera3SingleFrameTest.GetFrame/0',
-                    '--camera_facing=back'
-            ],
-            capability='hw_dec_jpeg',
-            test_config={'force_jpeg_hw_dec': True})
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/camera_HAL3Server/control.jda.front b/server/site_tests/camera_HAL3Server/control.jda.front
deleted file mode 100644
index 1062bd1..0000000
--- a/server/site_tests/camera_HAL3Server/control.jda.front
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'Chrome OS Team, chromeos-camera-eng@google.com'
-NAME = 'camera_HAL3Server.jda.front'
-ATTRIBUTES = ''
-TEST_TYPE = 'server'
-TIME = 'LONG'
-DOC = """
-Server-side test of cros_camera_test control over chart tablet and launch camera_HAL3 on DUT.
-"""
-
-
-def run(machine):
-    chart_ip = utils.args_to_dict(args).get(
-            'chart') or utils.get_lab_chart_address(machine)
-    if not chart_ip:
-        raise error.TestError('missing option --args="chart=<CHART IP>"')
-
-    job.run_test(
-            'camera_HAL3Server',
-            host=hosts.create_host(machine),
-            chart_host=hosts.create_host(chart_ip),
-            cmd_timeout=5,
-            camera_hals=['usb.so'],
-            options=[
-                    '--gtest_filter=*/Camera3SingleFrameTest.GetFrame/0',
-                    '--camera_facing=front'
-            ],
-            capability='hw_dec_jpeg',
-            test_config={'force_jpeg_hw_dec': True})
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/camera_HAL3Server/control.jea.back b/server/site_tests/camera_HAL3Server/control.jea.back
deleted file mode 100644
index 58f1c82..0000000
--- a/server/site_tests/camera_HAL3Server/control.jea.back
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'Chrome OS Team, chromeos-camera-eng@google.com'
-NAME = 'camera_HAL3Server.jea.back'
-ATTRIBUTES = ''
-TEST_TYPE = 'server'
-TIME = 'LONG'
-DOC = """
-Server-side test of cros_camera_test control over chart tablet and launch camera_HAL3 on DUT.
-"""
-
-
-def run(machine):
-    chart_ip = utils.args_to_dict(args).get(
-            'chart') or utils.get_lab_chart_address(machine)
-    if not chart_ip:
-        raise error.TestError('missing option --args="chart=<CHART IP>"')
-
-    job.run_test(
-            'camera_HAL3Server',
-            host=hosts.create_host(machine),
-            chart_host=hosts.create_host(chart_ip),
-            cmd_timeout=5,
-            options=[
-                    '--gtest_filter=*/Camera3SimpleStillCaptureTest.TakePictureTest/0',
-                    '--camera_facing=back'
-            ],
-            capability='hw_enc_jpeg',
-            test_config={'force_jpeg_hw_enc': True})
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/camera_HAL3Server/control.jea.front b/server/site_tests/camera_HAL3Server/control.jea.front
deleted file mode 100644
index 7faab4e..0000000
--- a/server/site_tests/camera_HAL3Server/control.jea.front
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'Chrome OS Team, chromeos-camera-eng@google.com'
-NAME = 'camera_HAL3Server.jea.front'
-ATTRIBUTES = ''
-TEST_TYPE = 'server'
-TIME = 'LONG'
-DOC = """
-Server-side test of cros_camera_test control over chart tablet and launch camera_HAL3 on DUT.
-"""
-
-
-def run(machine):
-    chart_ip = utils.args_to_dict(args).get(
-            'chart') or utils.get_lab_chart_address(machine)
-    if not chart_ip:
-        raise error.TestError('missing option --args="chart=<CHART IP>"')
-
-    job.run_test(
-            'camera_HAL3Server',
-            host=hosts.create_host(machine),
-            chart_host=hosts.create_host(chart_ip),
-            cmd_timeout=5,
-            options=[
-                    '--gtest_filter=*/Camera3SimpleStillCaptureTest.TakePictureTest/0',
-                    '--camera_facing=front'
-            ],
-            capability='hw_enc_jpeg',
-            test_config={'force_jpeg_hw_enc': True})
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/camera_HAL3Server/control.preview.back b/server/site_tests/camera_HAL3Server/control.preview.back
deleted file mode 100644
index 7ebfa42..0000000
--- a/server/site_tests/camera_HAL3Server/control.preview.back
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'Chrome OS Team, chromeos-camera-eng@google.com'
-NAME = 'camera_HAL3Server.preview.back'
-ATTRIBUTES = ''
-TEST_TYPE = 'server'
-TIME = 'LONG'
-DOC = """
-Server-side test of cros_camera_test control over chart tablet and launch camera_HAL3 on DUT.
-"""
-
-
-def run(machine):
-    chart_ip = utils.args_to_dict(args).get(
-            'chart') or utils.get_lab_chart_address(machine)
-    if not chart_ip:
-        raise error.TestError('missing option --args="chart=<CHART IP>"')
-
-    job.run_test(
-            'camera_HAL3Server',
-            host=hosts.create_host(machine),
-            chart_host=hosts.create_host(chart_ip),
-            options=[
-                    '--gtest_filter=Camera3PreviewTest/*',
-                    '--camera_facing=back'
-            ])
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/camera_HAL3Server/control.preview.front b/server/site_tests/camera_HAL3Server/control.preview.front
deleted file mode 100644
index b119c7d..0000000
--- a/server/site_tests/camera_HAL3Server/control.preview.front
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'Chrome OS Team, chromeos-camera-eng@google.com'
-NAME = 'camera_HAL3Server.preview.front'
-ATTRIBUTES = ''
-TEST_TYPE = 'server'
-TIME = 'LONG'
-DOC = """
-Server-side test of cros_camera_test control over chart tablet and launch camera_HAL3 on DUT.
-"""
-
-
-def run(machine):
-    chart_ip = utils.args_to_dict(args).get(
-            'chart') or utils.get_lab_chart_address(machine)
-    if not chart_ip:
-        raise error.TestError('missing option --args="chart=<CHART IP>"')
-
-    job.run_test(
-            'camera_HAL3Server',
-            host=hosts.create_host(machine),
-            chart_host=hosts.create_host(chart_ip),
-            options=[
-                    '--gtest_filter=Camera3PreviewTest/*',
-                    '--camera_facing=front'
-            ])
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/camera_HAL3Server/control.recording.back b/server/site_tests/camera_HAL3Server/control.recording.back
deleted file mode 100644
index add8e1b..0000000
--- a/server/site_tests/camera_HAL3Server/control.recording.back
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'Chrome OS Team, chromeos-camera-eng@google.com'
-NAME = 'camera_HAL3Server.recording.back'
-ATTRIBUTES = ''
-TEST_TYPE = 'server'
-TIME = 'LONG'
-DOC = """
-Server-side test of cros_camera_test control over chart tablet and launch camera_HAL3 on DUT.
-"""
-
-
-def run(machine):
-    chart_ip = utils.args_to_dict(args).get(
-            'chart') or utils.get_lab_chart_address(machine)
-    if not chart_ip:
-        raise error.TestError('missing option --args="chart=<CHART IP>"')
-
-    job.run_test(
-            'camera_HAL3Server',
-            host=hosts.create_host(machine),
-            chart_host=hosts.create_host(chart_ip),
-            options=[
-                    '--gtest_filter=Camera3RecordingFixture/*',
-                    '--camera_facing=back'
-            ])
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/camera_HAL3Server/control.recording.front b/server/site_tests/camera_HAL3Server/control.recording.front
deleted file mode 100644
index fe72631..0000000
--- a/server/site_tests/camera_HAL3Server/control.recording.front
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'Chrome OS Team, chromeos-camera-eng@google.com'
-NAME = 'camera_HAL3Server.recording.front'
-ATTRIBUTES = ''
-TEST_TYPE = 'server'
-TIME = 'LONG'
-DOC = """
-Server-side test of cros_camera_test control over chart tablet and launch camera_HAL3 on DUT.
-"""
-
-
-def run(machine):
-    chart_ip = utils.args_to_dict(args).get(
-            'chart') or utils.get_lab_chart_address(machine)
-    if not chart_ip:
-        raise error.TestError('missing option --args="chart=<CHART IP>"')
-
-    job.run_test(
-            'camera_HAL3Server',
-            host=hosts.create_host(machine),
-            chart_host=hosts.create_host(chart_ip),
-            options=[
-                    '--gtest_filter=Camera3RecordingFixture/*',
-                    '--camera_facing=front'
-            ])
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/camera_HAL3Server/control.still_capture.back b/server/site_tests/camera_HAL3Server/control.still_capture.back
deleted file mode 100644
index ee11f03..0000000
--- a/server/site_tests/camera_HAL3Server/control.still_capture.back
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'Chrome OS Team, chromeos-camera-eng@google.com'
-NAME = 'camera_HAL3Server.still_capture.back'
-ATTRIBUTES = ''
-TEST_TYPE = 'server'
-TIME = 'LONG'
-DOC = """
-Server-side test of cros_camera_test control over chart tablet and launch camera_HAL3 on DUT.
-"""
-
-
-def run(machine):
-    chart_ip = utils.args_to_dict(args).get(
-            'chart') or utils.get_lab_chart_address(machine)
-    if not chart_ip:
-        raise error.TestError('missing option --args="chart=<CHART IP>"')
-
-    job.run_test(
-            'camera_HAL3Server',
-            host=hosts.create_host(machine),
-            chart_host=hosts.create_host(chart_ip),
-            options=[
-                    '--gtest_filter=Camera3StillCaptureTest/*',
-                    '--camera_facing=back'
-            ])
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/camera_HAL3Server/control.still_capture.front b/server/site_tests/camera_HAL3Server/control.still_capture.front
deleted file mode 100644
index a5eeed2..0000000
--- a/server/site_tests/camera_HAL3Server/control.still_capture.front
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'Chrome OS Team, chromeos-camera-eng@google.com'
-NAME = 'camera_HAL3Server.still_capture.front'
-ATTRIBUTES = ''
-TEST_TYPE = 'server'
-TIME = 'LONG'
-DOC = """
-Server-side test of cros_camera_test control over chart tablet and launch camera_HAL3 on DUT.
-"""
-
-
-def run(machine):
-    chart_ip = utils.args_to_dict(args).get(
-            'chart') or utils.get_lab_chart_address(machine)
-    if not chart_ip:
-        raise error.TestError('missing option --args="chart=<CHART IP>"')
-
-    job.run_test(
-            'camera_HAL3Server',
-            host=hosts.create_host(machine),
-            chart_host=hosts.create_host(chart_ip),
-            options=[
-                    '--gtest_filter=Camera3StillCaptureTest/*',
-                    '--camera_facing=front'
-            ])
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/camera_HAL3Server/control.stream.back b/server/site_tests/camera_HAL3Server/control.stream.back
deleted file mode 100644
index 4acf6b7..0000000
--- a/server/site_tests/camera_HAL3Server/control.stream.back
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'Chrome OS Team, chromeos-camera-eng@google.com'
-NAME = 'camera_HAL3Server.stream.back'
-ATTRIBUTES = ''
-TEST_TYPE = 'server'
-TIME = 'LONG'
-DOC = """
-Server-side test of cros_camera_test control over chart tablet and launch camera_HAL3 on DUT.
-"""
-
-
-def run(machine):
-    chart_ip = utils.args_to_dict(args).get(
-            'chart') or utils.get_lab_chart_address(machine)
-    if not chart_ip:
-        raise error.TestError('missing option --args="chart=<CHART IP>"')
-
-    job.run_test(
-            'camera_HAL3Server',
-            host=hosts.create_host(machine),
-            chart_host=hosts.create_host(chart_ip),
-            options=[
-                    '--gtest_filter=Camera3StreamTest/*',
-                    '--camera_facing=back'
-            ])
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/camera_HAL3Server/control.stream.front b/server/site_tests/camera_HAL3Server/control.stream.front
deleted file mode 100644
index e7be50c..0000000
--- a/server/site_tests/camera_HAL3Server/control.stream.front
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'Chrome OS Team, chromeos-camera-eng@google.com'
-NAME = 'camera_HAL3Server.stream.front'
-ATTRIBUTES = ''
-TEST_TYPE = 'server'
-TIME = 'LONG'
-DOC = """
-Server-side test of cros_camera_test control over chart tablet and launch camera_HAL3 on DUT.
-"""
-
-
-def run(machine):
-    chart_ip = utils.args_to_dict(args).get(
-            'chart') or utils.get_lab_chart_address(machine)
-    if not chart_ip:
-        raise error.TestError('missing option --args="chart=<CHART IP>"')
-
-    job.run_test(
-            'camera_HAL3Server',
-            host=hosts.create_host(machine),
-            chart_host=hosts.create_host(chart_ip),
-            options=[
-                    '--gtest_filter=Camera3StreamTest/*',
-                    '--camera_facing=front'
-            ])
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/camera_HAL3Server/files/scene.pdf b/server/site_tests/camera_HAL3Server/files/scene.pdf
deleted file mode 100644
index f4f94e6..0000000
--- a/server/site_tests/camera_HAL3Server/files/scene.pdf
+++ /dev/null
Binary files differ
diff --git a/server/site_tests/cellular_Callbox_AssertCellularData/cellular_Callbox_AssertCellularData.py b/server/site_tests/cellular_Callbox_AssertCellularData/cellular_Callbox_AssertCellularData.py
new file mode 100644
index 0000000..7d9ef4d
--- /dev/null
+++ b/server/site_tests/cellular_Callbox_AssertCellularData/cellular_Callbox_AssertCellularData.py
@@ -0,0 +1,50 @@
+# Lint as: python2, python3
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.server import test
+from autotest_lib.server.cros.cellular.callbox_utils import CallboxLookup as cbl
+from autotest_lib.server.cros.cellular.callbox_utils import cmw500_cellular_simulator as cmw
+from autotest_lib.server.cros.cellular.simulation_utils import ChromebookCellularDut
+from autotest_lib.server.cros.cellular.simulation_utils import LteSimulation
+
+
+class cellular_Callbox_AssertCellularData(test.test):
+    """
+    Asserts that cellular data works.
+
+    The test establishes a connection to the appropriate CMW500 callbox. Then
+    it asserts that the cellular data connection provided to it matches the
+    data connection provided by ethernet. Any differences are considered an
+    error. If the cellular data connection is not provided, the second curl
+    will throw an exception.
+    """
+    version = 1
+
+    def run_once(self, host):
+        """Simple test that asserts that data provided through simulated
+        cellular connection matches network ethernet."""
+        self.log = logging.getLogger()
+        self.sim = cmw.CMW500CellularSimulator(cbl.callboxes[host.hostname],
+                                               5025)
+        self.dut = ChromebookCellularDut.ChromebookCellularDut(host, self.log)
+        self.simulation = LteSimulation.LteSimulation(
+                self.sim, self.log, self.dut, {
+                        'attach_retries': 1,
+                        'attach_timeout': 120
+                }, None)
+        parameter_list = [
+                'band', '2', 'bw', '20', 'mimo', '2x2', 'tm', '1', 'pul', '0',
+                'pdl', 'high'
+        ]
+        self.simulation.parse_parameters(parameter_list)
+        self.simulation.start()
+        a = host.run("curl --interface eth0 google.com")
+        b = host.run("curl --interface rmnet_data0 google.com")
+        if a.stdout != b.stdout:
+            raise error.TestFailure(
+                    "Ethernet and cellular curl output not equal.")
diff --git a/server/site_tests/cellular_Callbox_AssertCellularData/control b/server/site_tests/cellular_Callbox_AssertCellularData/control
new file mode 100644
index 0000000..b4695f8
--- /dev/null
+++ b/server/site_tests/cellular_Callbox_AssertCellularData/control
@@ -0,0 +1,25 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "latware@google.com"
+NAME = "cellular_Callbox_AssertCellularData"
+ATTRIBUTES = "suite:cellular_callbox"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_TYPE = "server"
+PY_VERSION = 3
+DOC = """
+This test asserts the stability of cellular data on DUTs. This does so by
+connecting to a CMW500 Callbox that is connected to the DUT, asserts that
+the CMW500 Callbox is running a cellular simulation that is providing data
+to the DUT via its cellular connection.  Then it asserts that the data
+provided by that connection is identical to the data provided by the ethernet
+connection on the DUT.
+"""
+
+def run_test(machine):
+    host = hosts.create_host(machine)
+    job.run_test('cellular_Callbox_AssertCellularData', host=host)
+
+parallel_simple(run_test, machines)
diff --git a/server/site_tests/cellular_Callbox_AssertSMS/cellular_Callbox_AssertSMS.py b/server/site_tests/cellular_Callbox_AssertSMS/cellular_Callbox_AssertSMS.py
new file mode 100644
index 0000000..d5ac76b
--- /dev/null
+++ b/server/site_tests/cellular_Callbox_AssertSMS/cellular_Callbox_AssertSMS.py
@@ -0,0 +1,66 @@
+# Lint as: python2, python3
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import time
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.server import test
+from autotest_lib.server.cros.cellular.callbox_utils import CallboxLookup as cbl
+from autotest_lib.server.cros.cellular.callbox_utils import cmw500_cellular_simulator as cmw
+from autotest_lib.server.cros.cellular.simulation_utils import ChromebookCellularDut
+from autotest_lib.server.cros.cellular.simulation_utils import LteSimulation
+
+
+class cellular_Callbox_AssertSMS(test.test):
+    """
+    Asserts that SMS functionality works.
+
+    This test asserts that SMS messages are received. It does so by connecting
+    to the callbox, setting the text message to be a unique string, asserting
+    that the text is not in /var/log/net.log, restarting the modemmanager
+    with log level in debug mode so that SMS messages are sent to
+    /var/log/net.log, sending the SMS message, then asserting that the
+    string appears in /var/log/net.log
+    """
+    version = 1
+
+    def run_once(self, host):
+        """Simple test that asserts that SMS messages are received
+        by the Chromebook DUT
+        """
+        self.log = logging.getLogger()
+        self.sim = cmw.CMW500CellularSimulator(cbl.callboxes[host.hostname],
+                                               5025)
+        self.dut = ChromebookCellularDut.ChromebookCellularDut(host, self.log)
+        self.simulation = LteSimulation.LteSimulation(
+                self.sim, self.log, self.dut, {
+                        'attach_retries': 1,
+                        'attach_timeout': 120
+                }, None)
+        parameter_list = [
+                'band', '2', 'bw', '20', 'mimo', '2x2', 'tm', '1', 'pul', '0',
+                'pdl', 'high'
+        ]
+        self.simulation.parse_parameters(parameter_list)
+        self.simulation.start()
+        host.run("stop modemmanager")
+        host.run("start modemmanager MM_LOGLEVEL=DEBUG")
+        text_string = "SMSWRAPPER" + str(time.time()) + "SMSWRAPPER"
+        try:
+            grep_out = host.run("cat /var/log/net.log | grep %s" % text_string)
+        except:
+            pass
+        else:
+            raise error.TestFailure(
+                    "Expected not to find '%s', got '%s'"
+                    % (text_string, grep_out))
+        self.simulation.send_sms(text_string)
+        try:
+            grep_out = host.run("cat /var/log/net.log | grep %s" % text_string)
+        except:
+            raise error.TestFailure(
+                "Expected string (%s) not found in /var/log/net.log"
+                % text_string)
diff --git a/server/site_tests/cellular_Callbox_AssertSMS/control b/server/site_tests/cellular_Callbox_AssertSMS/control
new file mode 100644
index 0000000..2175543
--- /dev/null
+++ b/server/site_tests/cellular_Callbox_AssertSMS/control
@@ -0,0 +1,23 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "latware@google.com"
+NAME = "cellular_Callbox_AssertSMS"
+ATTRIBUTES = "suite:cellular_callbox"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_TYPE = "server"
+PY_VERSION = 3
+DOC = """
+This test asserts the functionality of SMS messages on Chromebooks. It does so
+by connecting to a CMW500 Callbox, setting the SMS message to a unique string,
+sending the message, then asserting that the message is received by the
+Chromebook.
+"""
+
+def run_test(machine):
+    host = hosts.create_host(machine)
+    job.run_test('cellular_Callbox_AssertSMS', host=host)
+
+parallel_simple(run_test, machines)
diff --git a/server/site_tests/cellular_ChromeEndToEnd/cellular_ChromeEndToEnd.py b/server/site_tests/cellular_ChromeEndToEnd/cellular_ChromeEndToEnd.py
index 6a92c1e..4a286f9 100644
--- a/server/site_tests/cellular_ChromeEndToEnd/cellular_ChromeEndToEnd.py
+++ b/server/site_tests/cellular_ChromeEndToEnd/cellular_ChromeEndToEnd.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/cellular_ChromeEndToEnd/control.autoconnectCellular b/server/site_tests/cellular_ChromeEndToEnd/control.autoconnectCellular
index cc7d567..7b6394f 100644
--- a/server/site_tests/cellular_ChromeEndToEnd/control.autoconnectCellular
+++ b/server/site_tests/cellular_ChromeEndToEnd/control.autoconnectCellular
@@ -9,7 +9,7 @@
 NAME = 'cellular_ChromeEndToEnd.autoconnectCellular'
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'servo_state:WORKING'
-
+PY_VERSION = 3
 DOC = """
 Tests that the device auto connects to cellular network..
 """
diff --git a/server/site_tests/cellular_ChromeEndToEnd/control.enableDisableCellular b/server/site_tests/cellular_ChromeEndToEnd/control.enableDisableCellular
deleted file mode 100644
index 6da1427..0000000
--- a/server/site_tests/cellular_ChromeEndToEnd/control.enableDisableCellular
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = 'harpreet'
-TIME = 'SHORT'
-NAME = 'cellular_ChromeEndToEnd.enableDisableCellular'
-TEST_TYPE = 'Server'
-ATTRIBUTES = 'suite:cellular_endtoend'
-DEPENDENCIES = 'servo_state:WORKING'
-
-DOC = """
-Tests that cellular can be enabled or disabled on the DUI.
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test('cellular_ChromeEndToEnd',
-                 host=host,
-                 raw_cmdline_args=args,
-                 test='enableDisableCellular')
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/cellular_ChromeEndToEnd/control.enableDisableCellular_att b/server/site_tests/cellular_ChromeEndToEnd/control.enableDisableCellular_att
new file mode 100644
index 0000000..5ef5235
--- /dev/null
+++ b/server/site_tests/cellular_ChromeEndToEnd/control.enableDisableCellular_att
@@ -0,0 +1,28 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'harpreet'
+TIME = 'SHORT'
+NAME = 'cellular_ChromeEndToEnd.enableDisableCellular_att'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_endtoend'
+DEPENDENCIES = 'servo_state:WORKING, carrier:att'
+PY_VERSION = 3
+DOC = """
+Tests that cellular can be enabled or disabled on the DUI.
+"""
+
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    job.run_test('cellular_ChromeEndToEnd',
+                 host=host,
+                 raw_cmdline_args=args,
+                 test='enableDisableCellular')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/cellular_ChromeEndToEnd/control.enableDisableCellular_tmobile b/server/site_tests/cellular_ChromeEndToEnd/control.enableDisableCellular_tmobile
new file mode 100644
index 0000000..686a4da
--- /dev/null
+++ b/server/site_tests/cellular_ChromeEndToEnd/control.enableDisableCellular_tmobile
@@ -0,0 +1,28 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'harpreet'
+TIME = 'SHORT'
+NAME = 'cellular_ChromeEndToEnd.enableDisableCellular_tmobile'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_endtoend'
+DEPENDENCIES = 'servo_state:WORKING, carrier:tmobile'
+PY_VERSION = 3
+DOC = """
+Tests that cellular can be enabled or disabled on the DUI.
+"""
+
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    job.run_test('cellular_ChromeEndToEnd',
+                 host=host,
+                 raw_cmdline_args=args,
+                 test='enableDisableCellular')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/cellular_ChromeEndToEnd/control.enableDisableCellular_verizon b/server/site_tests/cellular_ChromeEndToEnd/control.enableDisableCellular_verizon
new file mode 100644
index 0000000..a2b7ba2
--- /dev/null
+++ b/server/site_tests/cellular_ChromeEndToEnd/control.enableDisableCellular_verizon
@@ -0,0 +1,28 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'harpreet'
+TIME = 'SHORT'
+NAME = 'cellular_ChromeEndToEnd.enableDisableCellular_verizon'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_endtoend'
+DEPENDENCIES = 'servo_state:WORKING, carrier:verizon'
+PY_VERSION = 3
+DOC = """
+Tests that cellular can be enabled or disabled on the DUI.
+"""
+
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    job.run_test('cellular_ChromeEndToEnd',
+                 host=host,
+                 raw_cmdline_args=args,
+                 test='enableDisableCellular')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/cellular_ChromeEndToEnd/control.ethernetPreferred b/server/site_tests/cellular_ChromeEndToEnd/control.ethernetPreferred
deleted file mode 100644
index e72acda..0000000
--- a/server/site_tests/cellular_ChromeEndToEnd/control.ethernetPreferred
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = 'harpreet'
-TIME = 'SHORT'
-NAME = 'cellular_ChromeEndToEnd.ethernetPreferred'
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'servo_state:WORKING'
-
-DOC = """
-Tests that the device prefers ethernet over cellular network.
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test('cellular_ChromeEndToEnd',
-                 host=host,
-                 raw_cmdline_args=args,
-                 test='ethernetPreferred')
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/cellular_ChromeEndToEnd/control.ethernetPreferred_att b/server/site_tests/cellular_ChromeEndToEnd/control.ethernetPreferred_att
new file mode 100644
index 0000000..7575d5c
--- /dev/null
+++ b/server/site_tests/cellular_ChromeEndToEnd/control.ethernetPreferred_att
@@ -0,0 +1,28 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'harpreet'
+TIME = 'SHORT'
+NAME = 'cellular_ChromeEndToEnd.ethernetPreferred_att'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_endtoend'
+DEPENDENCIES = 'servo_state:WORKING, carrier:att'
+PY_VERSION = 3
+DOC = """
+Tests that the device prefers ethernet over cellular network.
+"""
+
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    job.run_test('cellular_ChromeEndToEnd',
+                 host=host,
+                 raw_cmdline_args=args,
+                 test='ethernetPreferred')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/cellular_ChromeEndToEnd/control.ethernetPreferred_tmobile b/server/site_tests/cellular_ChromeEndToEnd/control.ethernetPreferred_tmobile
new file mode 100644
index 0000000..9970ed4
--- /dev/null
+++ b/server/site_tests/cellular_ChromeEndToEnd/control.ethernetPreferred_tmobile
@@ -0,0 +1,28 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'harpreet'
+TIME = 'SHORT'
+NAME = 'cellular_ChromeEndToEnd.ethernetPreferred_tmobile'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_endtoend'
+DEPENDENCIES = 'servo_state:WORKING, carrier:tmobile'
+PY_VERSION = 3
+DOC = """
+Tests that the device prefers ethernet over cellular network.
+"""
+
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    job.run_test('cellular_ChromeEndToEnd',
+                 host=host,
+                 raw_cmdline_args=args,
+                 test='ethernetPreferred')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/cellular_ChromeEndToEnd/control.ethernetPreferred_verizon b/server/site_tests/cellular_ChromeEndToEnd/control.ethernetPreferred_verizon
new file mode 100644
index 0000000..3cd7d96
--- /dev/null
+++ b/server/site_tests/cellular_ChromeEndToEnd/control.ethernetPreferred_verizon
@@ -0,0 +1,28 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'harpreet'
+TIME = 'SHORT'
+NAME = 'cellular_ChromeEndToEnd.ethernetPreferred_verizon'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_endtoend'
+DEPENDENCIES = 'servo_state:WORKING, carrier:verizon'
+PY_VERSION = 3
+DOC = """
+Tests that the device prefers ethernet over cellular network.
+"""
+
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    job.run_test('cellular_ChromeEndToEnd',
+                 host=host,
+                 raw_cmdline_args=args,
+                 test='ethernetPreferred')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/cellular_StaleModemReboot/cellular_StaleModemReboot.py b/server/site_tests/cellular_StaleModemReboot/cellular_StaleModemReboot.py
index 18fc199..d374948 100644
--- a/server/site_tests/cellular_StaleModemReboot/cellular_StaleModemReboot.py
+++ b/server/site_tests/cellular_StaleModemReboot/cellular_StaleModemReboot.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/cellular_StaleModemReboot/control b/server/site_tests/cellular_StaleModemReboot/control
index 536bc9d..310b0c0 100644
--- a/server/site_tests/cellular_StaleModemReboot/control
+++ b/server/site_tests/cellular_StaleModemReboot/control
@@ -15,9 +15,8 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "network"
 TEST_TYPE = "server"
-ATTRIBUTES = "suite:cellular_modem_repair"
 DEPENDENCIES = "servo_state:WORKING"
-
+PY_VERSION = 3
 DOC = """
   Tests that a cellular modem is available for testing.
 
diff --git a/server/site_tests/cellular_StaleModemReboot/control.att b/server/site_tests/cellular_StaleModemReboot/control.att
new file mode 100644
index 0000000..819c890
--- /dev/null
+++ b/server/site_tests/cellular_StaleModemReboot/control.att
@@ -0,0 +1,77 @@
+# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "Harpreet Grewal <harpreet@chromium.org>"
+NAME = "cellular_StaleModemReboot.att"
+PURPOSE = "Verify modem is available for testing."
+CRITERIA = """
+This test will fail if either there is no response to modem status command or
+modem is in a non testable state after two or more tries.
+"""
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "server"
+ATTRIBUTES = "suite:cellular_modem_repair"
+DEPENDENCIES = "servo_state:WORKING, carrier:att"
+PY_VERSION = 3
+DOC = """
+  Tests that a cellular modem is available for testing.
+
+  The test attempts to detect the modem state by running the modem status
+  command on the DUT and strips the state from its output. The test saves the
+  original modem state and reboots the DUT regardless of what state the modem
+  is in. After the reboot, it tries to get the modem state again and if the
+  modem does not respond or the state is one of the following as listed below,
+  it tries to reboot the DUT for a maximum of two tries by default. User can
+  override the number of tries by passing a tries flag with a desired value.
+
+  REBOOT STATES (If modem is in any of these states, DUT is cold rebooted using
+  the servo.):
+  MODEM STATE FAILED = -1
+  MODEM STATE UNKNOWN = 0
+  MODEM STATE DISABLED = 3
+  MODEM STATE DISABLING = 4
+  MODEM STATE DISCONNECTING = 9
+  GOBI MODEM STATE UNKNOWN = 0
+  GOBI MODEM STATE DISABLED = 10
+  GOBI MODEM STATE DISABLING = 20
+  GOBI MODEM STATE DISCONNECTING = 70
+
+  WAIT DELAY STATES (Waits for a maximum of 120 seconds for the modem to get
+  into a testable state. If the modem is still not in a stable state after the
+  first try, DUT will be rebooted.):
+  MODEM STATE INITIALIZING = 1
+  MODEM STATE ENABLING = 5
+  MODEM STATE ENABLED = 6
+  MODEM STATE SEARCHING = 7
+  GOBI MODEM STATE ENABLING = 30
+  GOBI MODEM STATE ENABLED = 40
+  GOBI MODEM STATE SEARCHING = 50
+
+  STABLE STATES (Test does nothing.):
+  MODEM STATE REGISTERED = 8
+  MODEM STATE CONNECTING = 10
+  MODEM STATE CONNECTED = 11
+  GOBI MODEM STATE REGISTERED = 60
+  GOBI MODEM STATE CONNECTING = 80
+  GOBI MODEM STATE CONNECTED = 90
+
+  LOCKED STATE (Test fails, the device should never be in this state unless
+  the SIM is locked):
+  MODEM STATE LOCKED = 2
+
+"""
+
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run_StaleModemReboot(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    tries = int(args_dict.get('tries', 2))
+    job.run_test('cellular_StaleModemReboot', host=host, tries=tries)
+
+parallel_simple(run_StaleModemReboot, machines)
diff --git a/server/site_tests/cellular_StaleModemReboot/control.tmobile b/server/site_tests/cellular_StaleModemReboot/control.tmobile
new file mode 100644
index 0000000..5b51ff0
--- /dev/null
+++ b/server/site_tests/cellular_StaleModemReboot/control.tmobile
@@ -0,0 +1,77 @@
+# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "Harpreet Grewal <harpreet@chromium.org>"
+NAME = "cellular_StaleModemReboot.tmobile"
+PURPOSE = "Verify modem is available for testing."
+CRITERIA = """
+This test will fail if either there is no response to modem status command or
+modem is in a non testable state after two or more tries.
+"""
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "server"
+ATTRIBUTES = "suite:cellular_modem_repair"
+DEPENDENCIES = "servo_state:WORKING, carrier:tmobile"
+PY_VERSION = 3
+DOC = """
+  Tests that a cellular modem is available for testing.
+
+  The test attempts to detect the modem state by running the modem status
+  command on the DUT and strips the state from its output. The test saves the
+  original modem state and reboots the DUT regardless of what state the modem
+  is in. After the reboot, it tries to get the modem state again and if the
+  modem does not respond or the state is one of the following as listed below,
+  it tries to reboot the DUT for a maximum of two tries by default. User can
+  override the number of tries by passing a tries flag with a desired value.
+
+  REBOOT STATES (If modem is in any of these states, DUT is cold rebooted using
+  the servo.):
+  MODEM STATE FAILED = -1
+  MODEM STATE UNKNOWN = 0
+  MODEM STATE DISABLED = 3
+  MODEM STATE DISABLING = 4
+  MODEM STATE DISCONNECTING = 9
+  GOBI MODEM STATE UNKNOWN = 0
+  GOBI MODEM STATE DISABLED = 10
+  GOBI MODEM STATE DISABLING = 20
+  GOBI MODEM STATE DISCONNECTING = 70
+
+  WAIT DELAY STATES (Waits for a maximum of 120 seconds for the modem to get
+  into a testable state. If the modem is still not in a stable state after the
+  first try, DUT will be rebooted.):
+  MODEM STATE INITIALIZING = 1
+  MODEM STATE ENABLING = 5
+  MODEM STATE ENABLED = 6
+  MODEM STATE SEARCHING = 7
+  GOBI MODEM STATE ENABLING = 30
+  GOBI MODEM STATE ENABLED = 40
+  GOBI MODEM STATE SEARCHING = 50
+
+  STABLE STATES (Test does nothing.):
+  MODEM STATE REGISTERED = 8
+  MODEM STATE CONNECTING = 10
+  MODEM STATE CONNECTED = 11
+  GOBI MODEM STATE REGISTERED = 60
+  GOBI MODEM STATE CONNECTING = 80
+  GOBI MODEM STATE CONNECTED = 90
+
+  LOCKED STATE (Test fails, the device should never be in this state unless
+  the SIM is locked):
+  MODEM STATE LOCKED = 2
+
+"""
+
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run_StaleModemReboot(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    tries = int(args_dict.get('tries', 2))
+    job.run_test('cellular_StaleModemReboot', host=host, tries=tries)
+
+parallel_simple(run_StaleModemReboot, machines)
diff --git a/server/site_tests/cellular_StaleModemReboot/control.verizon b/server/site_tests/cellular_StaleModemReboot/control.verizon
new file mode 100644
index 0000000..a7b795c
--- /dev/null
+++ b/server/site_tests/cellular_StaleModemReboot/control.verizon
@@ -0,0 +1,77 @@
+# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "Harpreet Grewal <harpreet@chromium.org>"
+NAME = "cellular_StaleModemReboot.verizon"
+PURPOSE = "Verify modem is available for testing."
+CRITERIA = """
+This test will fail if either there is no response to modem status command or
+modem is in a non testable state after two or more tries.
+"""
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "network"
+TEST_TYPE = "server"
+ATTRIBUTES = "suite:cellular_modem_repair"
+DEPENDENCIES = "servo_state:WORKING, carrier:verizon"
+PY_VERSION = 3
+DOC = """
+  Tests that a cellular modem is available for testing.
+
+  The test attempts to detect the modem state by running the modem status
+  command on the DUT and strips the state from its output. The test saves the
+  original modem state and reboots the DUT regardless of what state the modem
+  is in. After the reboot, it tries to get the modem state again and if the
+  modem does not respond or the state is one of the following as listed below,
+  it tries to reboot the DUT for a maximum of two tries by default. User can
+  override the number of tries by passing a tries flag with a desired value.
+
+  REBOOT STATES (If modem is in any of these states, DUT is cold rebooted using
+  the servo.):
+  MODEM STATE FAILED = -1
+  MODEM STATE UNKNOWN = 0
+  MODEM STATE DISABLED = 3
+  MODEM STATE DISABLING = 4
+  MODEM STATE DISCONNECTING = 9
+  GOBI MODEM STATE UNKNOWN = 0
+  GOBI MODEM STATE DISABLED = 10
+  GOBI MODEM STATE DISABLING = 20
+  GOBI MODEM STATE DISCONNECTING = 70
+
+  WAIT DELAY STATES (Waits for a maximum of 120 seconds for the modem to get
+  into a testable state. If the modem is still not in a stable state after the
+  first try, DUT will be rebooted.):
+  MODEM STATE INITIALIZING = 1
+  MODEM STATE ENABLING = 5
+  MODEM STATE ENABLED = 6
+  MODEM STATE SEARCHING = 7
+  GOBI MODEM STATE ENABLING = 30
+  GOBI MODEM STATE ENABLED = 40
+  GOBI MODEM STATE SEARCHING = 50
+
+  STABLE STATES (Test does nothing.):
+  MODEM STATE REGISTERED = 8
+  MODEM STATE CONNECTING = 10
+  MODEM STATE CONNECTED = 11
+  GOBI MODEM STATE REGISTERED = 60
+  GOBI MODEM STATE CONNECTING = 80
+  GOBI MODEM STATE CONNECTED = 90
+
+  LOCKED STATE (Test fails, the device should never be in this state unless
+  the SIM is locked):
+  MODEM STATE LOCKED = 2
+
+"""
+
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run_StaleModemReboot(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    tries = int(args_dict.get('tries', 2))
+    job.run_test('cellular_StaleModemReboot', host=host, tries=tries)
+
+parallel_simple(run_StaleModemReboot, machines)
diff --git a/server/site_tests/cheets_CTS_Instant/cheets_CTS_Instant.py b/server/site_tests/cheets_CTS_Instant/cheets_CTS_Instant.py
index 83dde12..73f0a17 100644
--- a/server/site_tests/cheets_CTS_Instant/cheets_CTS_Instant.py
+++ b/server/site_tests/cheets_CTS_Instant/cheets_CTS_Instant.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -23,17 +24,22 @@
 # Maximum default time allowed for each individual CTS module.
 _CTS_TIMEOUT_SECONDS = 3600
 
-# Public download locations for android cts bundles.
 _PUBLIC_CTS = 'https://dl.google.com/dl/android/cts/'
-_PARTNER_CTS = 'gs://chromeos-partner-cts/'
-_CTS_URI = {
-        'arm': _PUBLIC_CTS + 'android-cts_instant-9.0_r14-linux_x86-arm.zip',
-        'x86': _PUBLIC_CTS + 'android-cts_instant-9.0_r14-linux_x86-x86.zip',
+_INTERNAL_CTS = 'gs://chromeos-arc-images/cts/bundle/P/'
+_BUNDLE_MAP = {
+        (None, 'arm'):
+        _PUBLIC_CTS + 'android-cts_instant-9.0_r20-linux_x86-arm.zip',
+        (None, 'x86'):
+        _PUBLIC_CTS + 'android-cts_instant-9.0_r20-linux_x86-x86.zip',
+        ('LATEST', 'arm'):
+        _INTERNAL_CTS + 'android-cts_instant-9.0_r20-linux_x86-arm.zip',
+        ('LATEST', 'x86'):
+        _INTERNAL_CTS + 'android-cts_instant-9.0_r20-linux_x86-x86.zip',
+        # No 'DEV' job for CTS_Instant for now.
 }
 _CTS_MEDIA_URI = _PUBLIC_CTS + 'android-cts-media-1.5.zip'
 _CTS_MEDIA_LOCALPATH = '/tmp/android-cts-media'
 
-
 class cheets_CTS_Instant(tradefed_test.TradefedTest):
     """Sets up tradefed to run CTS tests."""
     version = 1
@@ -79,8 +85,11 @@
         cmd.append('--quiet-output=true')
         return cmd
 
-    def _get_default_bundle_url(self, bundle):
-        return _CTS_URI[bundle]
+    def _get_bundle_url(self, uri, bundle):
+        if uri and (uri.startswith('http') or uri.startswith('gs')):
+            return uri
+        else:
+            return _BUNDLE_MAP[(uri, bundle)]
 
     def _get_tradefed_base_dir(self):
         return 'android-cts_instant'
@@ -140,6 +149,5 @@
                 _CTS_MEDIA_URI if needs_push_media else None,
                 _CTS_MEDIA_LOCALPATH),
             bundle=bundle,
-            cts_uri=_CTS_URI,
             login_precondition_commands=login_precondition_commands,
             precondition_commands=precondition_commands)
diff --git a/server/site_tests/cheets_CTS_Instant/control.9.0_dev.arm.waivers b/server/site_tests/cheets_CTS_Instant/control.9.0_dev.arm.waivers
deleted file mode 100644
index 7ae50f6..0000000
--- a/server/site_tests/cheets_CTS_Instant/control.9.0_dev.arm.waivers
+++ /dev/null
@@ -1,47 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_Instant.9.0_dev.arm.waivers'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_cpu_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run preview version of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_Instant',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_dev.arm.waivers',
-        test_name='cheets_CTS_Instant.9.0_dev.arm.waivers',
-        run_template=['run', 'commandAndExit', 'cts-instant',
-            # HACK. What we really want is including the following two cases.
-            #   android.media.cts.AudioTrackTest#testPlayStaticData
-            #   android.media.cts.MediaCodecListTest#testRequiredMediaCodecList
-            # Since cts-instant does not support --include-filter, roughly
-            # approximate by --module and --exclude of a few expensive classes
-            # (taking 15 out of 20 minutes of CtsMediaTestCases.)
-            '--module', 'CtsMediaTestCases',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.AudioNativeTest',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.AudioRecordTest',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.EncodeVirtualDisplayWithCompositionTest',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.EncoderTest',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.MediaDrmClearkeyTest',
-        ],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='cts-instant-dev',
-        target_plan=None,
-        load_waivers=False,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts_instant-6862722-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_Instant/control.9.0_dev.x86.waivers b/server/site_tests/cheets_CTS_Instant/control.9.0_dev.x86.waivers
deleted file mode 100644
index 85f95b0..0000000
--- a/server/site_tests/cheets_CTS_Instant/control.9.0_dev.x86.waivers
+++ /dev/null
@@ -1,47 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_Instant.9.0_dev.x86.waivers'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_cpu_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run preview version of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_Instant',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_dev.x86.waivers',
-        test_name='cheets_CTS_Instant.9.0_dev.x86.waivers',
-        run_template=['run', 'commandAndExit', 'cts-instant',
-            # HACK. What we really want is including the following two cases.
-            #   android.media.cts.AudioTrackTest#testPlayStaticData
-            #   android.media.cts.MediaCodecListTest#testRequiredMediaCodecList
-            # Since cts-instant does not support --include-filter, roughly
-            # approximate by --module and --exclude of a few expensive classes
-            # (taking 15 out of 20 minutes of CtsMediaTestCases.)
-            '--module', 'CtsMediaTestCases',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.AudioNativeTest',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.AudioRecordTest',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.EncodeVirtualDisplayWithCompositionTest',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.EncoderTest',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.MediaDrmClearkeyTest',
-        ],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='cts-instant-dev',
-        target_plan=None,
-        load_waivers=False,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts_instant-6862722-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_Instant/control.9.0_r14.arm.all.CtsAccessibilityServiceTestCases_-_CtsAutoFillServiceTestCases b/server/site_tests/cheets_CTS_Instant/control.9.0_r14.arm.all.CtsAccessibilityServiceTestCases_-_CtsAutoFillServiceTestCases
deleted file mode 100644
index 954cc38..0000000
--- a/server/site_tests/cheets_CTS_Instant/control.9.0_r14.arm.all.CtsAccessibilityServiceTestCases_-_CtsAutoFillServiceTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_Instant.9.0_r14.arm.all.CtsAccessibilityServiceTestCases_-_CtsAutoFillServiceTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_cpu_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAccessibilityServiceTestCases, CtsAccessibilityTestCases, CtsAccountManagerTestCases, CtsAnimationTestCases, CtsAppSecurityHostTestCases, CtsAppWidgetTestCases, CtsAutoFillServiceTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_Instant',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.arm.all.CtsAccessibilityServiceTestCases_-_CtsAutoFillServiceTestCases',
-        test_name='cheets_CTS_Instant.9.0_r14.arm.all.CtsAccessibilityServiceTestCases_-_CtsAutoFillServiceTestCases',
-        run_template=['run', 'commandAndExit', 'cts-instant', '--exclude-filter', 'CtsPreferenceTestCases', '--exclude-filter', 'CtsPrintTestCases', '--exclude-filter', 'CtsLocationTestCases', '--exclude-filter', 'CtsDpiTestCases', '--exclude-filter', 'CtsDatabaseTestCases', '--exclude-filter', 'CtsPdfTestCases', '--exclude-filter', 'CtsSystemUiTestCases', '--exclude-filter', 'CtsUsbTests', '--exclude-filter', 'CtsBackgroundRestrictionsTestCases', '--exclude-filter', 'CtsWidgetTestCases', '--exclude-filter', 'CtsUsageStatsTestCases', '--exclude-filter', 'CtsSampleHostTestCases', '--exclude-filter', 'CtsPermissionTestCases', '--exclude-filter', 'CtsInputMethodServiceHostTestCases', '--exclude-filter', 'CtsInputMethodTestCases', '--exclude-filter', 'CtsDisplayTestCases', '--exclude-filter', 'CtsWindowManagerDeviceTestCases', '--exclude-filter', 'CtsFragmentTestCases', '--exclude-filter', 'CtsFragmentTestCasesSdk26', '--exclude-filter', 'CtsUiDeviceTestCases', '--exclude-filter', 'CtsHostsideWebViewTests', '--exclude-filter', 'CtsUiAutomationTestCases', '--exclude-filter', 'CtsGestureTestCases', '--exclude-filter', 'CtsTextTestCases', '--exclude-filter', 'CtsColorModeTestCases', '--exclude-filter', 'CtsPreference2TestCases', '--exclude-filter', 'CtsSampleDeviceTestCases', '--exclude-filter', 'CtsOsHostTestCases', '--exclude-filter', 'CtsCameraTestCases', '--exclude-filter', 'CtsUidIsolationTestCases', '--exclude-filter', 'CtsLocation2TestCases', '--exclude-filter', 'CtsExternalSourcesTestCases', '--exclude-filter', 'CtsMediaTestCases', '--exclude-filter', 'CtsViewTestCases', '--exclude-filter', 'CtsFileSystemTestCases', '--exclude-filter', 'CtsPermission2TestCases', '--exclude-filter', 'CtsToastTestCases', '--exclude-filter', 'CtsDreamsTestCases', '--exclude-filter', 'CtsMultiUserTestCases', '--exclude-filter', 'CtsMediaHostTestCases', '--exclude-filter', 'CtsEdiHostTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='all.CtsAccessibilityServiceTestCases_-_CtsAutoFillServiceTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts_instant-9.0_r14-linux_x86-arm.zip',
-        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
-        timeout=18000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_Instant/control.9.0_r14.arm.all.CtsBackgroundRestrictionsTestCases_-_CtsPrintTestCases b/server/site_tests/cheets_CTS_Instant/control.9.0_r14.arm.all.CtsBackgroundRestrictionsTestCases_-_CtsPrintTestCases
deleted file mode 100644
index 6322d43..0000000
--- a/server/site_tests/cheets_CTS_Instant/control.9.0_r14.arm.all.CtsBackgroundRestrictionsTestCases_-_CtsPrintTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_Instant.9.0_r14.arm.all.CtsBackgroundRestrictionsTestCases_-_CtsPrintTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_cpu_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsBackgroundRestrictionsTestCases, CtsCameraTestCases, CtsColorModeTestCases, CtsDatabaseTestCases, CtsDisplayTestCases, CtsDpiTestCases, CtsDreamsTestCases, CtsEdiHostTestCases, CtsExternalSourcesTestCases, CtsFileSystemTestCases, CtsFragmentTestCases, CtsFragmentTestCasesSdk26, CtsGestureTestCases, CtsHostsideWebViewTests, CtsInputMethodServiceHostTestCases, CtsInputMethodTestCases, CtsLocation2TestCases, CtsLocationTestCases, CtsMediaHostTestCases, CtsMediaTestCases, CtsMultiUserTestCases, CtsOsHostTestCases, CtsPdfTestCases, CtsPermission2TestCases, CtsPermissionTestCases, CtsPreference2TestCases, CtsPreferenceTestCases, CtsPrintTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_Instant',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.arm.all.CtsBackgroundRestrictionsTestCases_-_CtsPrintTestCases',
-        test_name='cheets_CTS_Instant.9.0_r14.arm.all.CtsBackgroundRestrictionsTestCases_-_CtsPrintTestCases',
-        run_template=['run', 'commandAndExit', 'cts-instant', '--exclude-filter', 'CtsTextTestCases', '--exclude-filter', 'CtsAnimationTestCases', '--exclude-filter', 'CtsSampleDeviceTestCases', '--exclude-filter', 'CtsAutoFillServiceTestCases', '--exclude-filter', 'CtsAccountManagerTestCases', '--exclude-filter', 'CtsAppWidgetTestCases', '--exclude-filter', 'CtsUiDeviceTestCases', '--exclude-filter', 'CtsAccessibilityTestCases', '--exclude-filter', 'CtsSampleHostTestCases', '--exclude-filter', 'CtsToastTestCases', '--exclude-filter', 'CtsViewTestCases', '--exclude-filter', 'CtsAppSecurityHostTestCases', '--exclude-filter', 'CtsSystemUiTestCases', '--exclude-filter', 'CtsUsbTests', '--exclude-filter', 'CtsWidgetTestCases', '--exclude-filter', 'CtsAccessibilityServiceTestCases', '--exclude-filter', 'CtsUsageStatsTestCases', '--exclude-filter', 'CtsWindowManagerDeviceTestCases', '--exclude-filter', 'CtsUiAutomationTestCases', '--exclude-filter', 'CtsUidIsolationTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='all.CtsBackgroundRestrictionsTestCases_-_CtsPrintTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts_instant-9.0_r14-linux_x86-arm.zip',
-        timeout=18000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_Instant/control.9.0_r14.arm.all.CtsSampleDeviceTestCases_-_CtsWindowManagerDeviceTestCases b/server/site_tests/cheets_CTS_Instant/control.9.0_r14.arm.all.CtsSampleDeviceTestCases_-_CtsWindowManagerDeviceTestCases
deleted file mode 100644
index 226fd4f..0000000
--- a/server/site_tests/cheets_CTS_Instant/control.9.0_r14.arm.all.CtsSampleDeviceTestCases_-_CtsWindowManagerDeviceTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_Instant.9.0_r14.arm.all.CtsSampleDeviceTestCases_-_CtsWindowManagerDeviceTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_cpu_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSampleDeviceTestCases, CtsSampleHostTestCases, CtsSystemUiTestCases, CtsTextTestCases, CtsToastTestCases, CtsUiAutomationTestCases, CtsUiDeviceTestCases, CtsUidIsolationTestCases, CtsUsageStatsTestCases, CtsUsbTests, CtsViewTestCases, CtsWidgetTestCases, CtsWindowManagerDeviceTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_Instant',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.arm.all.CtsSampleDeviceTestCases_-_CtsWindowManagerDeviceTestCases',
-        test_name='cheets_CTS_Instant.9.0_r14.arm.all.CtsSampleDeviceTestCases_-_CtsWindowManagerDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'cts-instant', '--exclude-filter', 'CtsPreferenceTestCases', '--exclude-filter', 'CtsPrintTestCases', '--exclude-filter', 'CtsLocationTestCases', '--exclude-filter', 'CtsDpiTestCases', '--exclude-filter', 'CtsDatabaseTestCases', '--exclude-filter', 'CtsPdfTestCases', '--exclude-filter', 'CtsDisplayTestCases', '--exclude-filter', 'CtsBackgroundRestrictionsTestCases', '--exclude-filter', 'CtsPermissionTestCases', '--exclude-filter', 'CtsInputMethodServiceHostTestCases', '--exclude-filter', 'CtsAccountManagerTestCases', '--exclude-filter', 'CtsInputMethodTestCases', '--exclude-filter', 'CtsAccessibilityTestCases', '--exclude-filter', 'CtsAppSecurityHostTestCases', '--exclude-filter', 'CtsAccessibilityServiceTestCases', '--exclude-filter', 'CtsFragmentTestCasesSdk26', '--exclude-filter', 'CtsGestureTestCases', '--exclude-filter', 'CtsColorModeTestCases', '--exclude-filter', 'CtsAutoFillServiceTestCases', '--exclude-filter', 'CtsPreference2TestCases', '--exclude-filter', 'CtsOsHostTestCases', '--exclude-filter', 'CtsCameraTestCases', '--exclude-filter', 'CtsDreamsTestCases', '--exclude-filter', 'CtsAnimationTestCases', '--exclude-filter', 'CtsLocation2TestCases', '--exclude-filter', 'CtsHostsideWebViewTests', '--exclude-filter', 'CtsAppWidgetTestCases', '--exclude-filter', 'CtsMediaTestCases', '--exclude-filter', 'CtsExternalSourcesTestCases', '--exclude-filter', 'CtsFileSystemTestCases', '--exclude-filter', 'CtsPermission2TestCases', '--exclude-filter', 'CtsMultiUserTestCases', '--exclude-filter', 'CtsMediaHostTestCases', '--exclude-filter', 'CtsEdiHostTestCases', '--exclude-filter', 'CtsFragmentTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='all.CtsSampleDeviceTestCases_-_CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts_instant-9.0_r14-linux_x86-arm.zip',
-        timeout=18000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_Instant/control.9.0_r14.arm.tradefed-run-collect-tests-only-internal b/server/site_tests/cheets_CTS_Instant/control.9.0_r14.arm.tradefed-run-collect-tests-only-internal
deleted file mode 100644
index 9f86b14..0000000
--- a/server/site_tests/cheets_CTS_Instant/control.9.0_r14.arm.tradefed-run-collect-tests-only-internal
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_Instant.9.0_r14.arm.tradefed-run-collect-tests-only-internal'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_cpu_arm'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'LENGTHY'
-MAX_RESULT_SIZE_KB = 1024000
-DOC = 'Run all of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_Instant',
-        hosts=host_list,
-        iterations=1,
-        max_retry=0,
-        tag='9.0_r14.arm.tradefed-run-collect-tests-only-internal',
-        test_name='cheets_CTS_Instant.9.0_r14.arm.tradefed-run-collect-tests-only-internal',
-        run_template=['run', 'commandAndExit', 'collect-tests-only', '--disable-reboot'],
-        retry_template=None,
-        target_module=None,
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts_instant-9.0_r14-linux_x86-arm.zip',
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_Instant/control.9.0_r14.x86.all.CtsAccessibilityServiceTestCases_-_CtsAutoFillServiceTestCases b/server/site_tests/cheets_CTS_Instant/control.9.0_r14.x86.all.CtsAccessibilityServiceTestCases_-_CtsAutoFillServiceTestCases
deleted file mode 100644
index 7e03bbc..0000000
--- a/server/site_tests/cheets_CTS_Instant/control.9.0_r14.x86.all.CtsAccessibilityServiceTestCases_-_CtsAutoFillServiceTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_Instant.9.0_r14.x86.all.CtsAccessibilityServiceTestCases_-_CtsAutoFillServiceTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_cpu_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAccessibilityServiceTestCases, CtsAccessibilityTestCases, CtsAccountManagerTestCases, CtsAnimationTestCases, CtsAppSecurityHostTestCases, CtsAppWidgetTestCases, CtsAutoFillServiceTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_Instant',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.x86.all.CtsAccessibilityServiceTestCases_-_CtsAutoFillServiceTestCases',
-        test_name='cheets_CTS_Instant.9.0_r14.x86.all.CtsAccessibilityServiceTestCases_-_CtsAutoFillServiceTestCases',
-        run_template=['run', 'commandAndExit', 'cts-instant', '--exclude-filter', 'CtsPreferenceTestCases', '--exclude-filter', 'CtsPrintTestCases', '--exclude-filter', 'CtsLocationTestCases', '--exclude-filter', 'CtsDpiTestCases', '--exclude-filter', 'CtsDatabaseTestCases', '--exclude-filter', 'CtsPdfTestCases', '--exclude-filter', 'CtsSystemUiTestCases', '--exclude-filter', 'CtsUsbTests', '--exclude-filter', 'CtsBackgroundRestrictionsTestCases', '--exclude-filter', 'CtsWidgetTestCases', '--exclude-filter', 'CtsUsageStatsTestCases', '--exclude-filter', 'CtsSampleHostTestCases', '--exclude-filter', 'CtsPermissionTestCases', '--exclude-filter', 'CtsInputMethodServiceHostTestCases', '--exclude-filter', 'CtsInputMethodTestCases', '--exclude-filter', 'CtsDisplayTestCases', '--exclude-filter', 'CtsWindowManagerDeviceTestCases', '--exclude-filter', 'CtsFragmentTestCases', '--exclude-filter', 'CtsFragmentTestCasesSdk26', '--exclude-filter', 'CtsUiDeviceTestCases', '--exclude-filter', 'CtsHostsideWebViewTests', '--exclude-filter', 'CtsUiAutomationTestCases', '--exclude-filter', 'CtsGestureTestCases', '--exclude-filter', 'CtsTextTestCases', '--exclude-filter', 'CtsColorModeTestCases', '--exclude-filter', 'CtsPreference2TestCases', '--exclude-filter', 'CtsSampleDeviceTestCases', '--exclude-filter', 'CtsOsHostTestCases', '--exclude-filter', 'CtsCameraTestCases', '--exclude-filter', 'CtsUidIsolationTestCases', '--exclude-filter', 'CtsLocation2TestCases', '--exclude-filter', 'CtsExternalSourcesTestCases', '--exclude-filter', 'CtsMediaTestCases', '--exclude-filter', 'CtsViewTestCases', '--exclude-filter', 'CtsFileSystemTestCases', '--exclude-filter', 'CtsPermission2TestCases', '--exclude-filter', 'CtsToastTestCases', '--exclude-filter', 'CtsDreamsTestCases', '--exclude-filter', 'CtsMultiUserTestCases', '--exclude-filter', 'CtsMediaHostTestCases', '--exclude-filter', 'CtsEdiHostTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='all.CtsAccessibilityServiceTestCases_-_CtsAutoFillServiceTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts_instant-9.0_r14-linux_x86-x86.zip',
-        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
-        timeout=18000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_Instant/control.9.0_r14.x86.all.CtsBackgroundRestrictionsTestCases_-_CtsPrintTestCases b/server/site_tests/cheets_CTS_Instant/control.9.0_r14.x86.all.CtsBackgroundRestrictionsTestCases_-_CtsPrintTestCases
deleted file mode 100644
index e639ee8..0000000
--- a/server/site_tests/cheets_CTS_Instant/control.9.0_r14.x86.all.CtsBackgroundRestrictionsTestCases_-_CtsPrintTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_Instant.9.0_r14.x86.all.CtsBackgroundRestrictionsTestCases_-_CtsPrintTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_cpu_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsBackgroundRestrictionsTestCases, CtsCameraTestCases, CtsColorModeTestCases, CtsDatabaseTestCases, CtsDisplayTestCases, CtsDpiTestCases, CtsDreamsTestCases, CtsEdiHostTestCases, CtsExternalSourcesTestCases, CtsFileSystemTestCases, CtsFragmentTestCases, CtsFragmentTestCasesSdk26, CtsGestureTestCases, CtsHostsideWebViewTests, CtsInputMethodServiceHostTestCases, CtsInputMethodTestCases, CtsLocation2TestCases, CtsLocationTestCases, CtsMediaHostTestCases, CtsMediaTestCases, CtsMultiUserTestCases, CtsOsHostTestCases, CtsPdfTestCases, CtsPermission2TestCases, CtsPermissionTestCases, CtsPreference2TestCases, CtsPreferenceTestCases, CtsPrintTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_Instant',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.x86.all.CtsBackgroundRestrictionsTestCases_-_CtsPrintTestCases',
-        test_name='cheets_CTS_Instant.9.0_r14.x86.all.CtsBackgroundRestrictionsTestCases_-_CtsPrintTestCases',
-        run_template=['run', 'commandAndExit', 'cts-instant', '--exclude-filter', 'CtsTextTestCases', '--exclude-filter', 'CtsAnimationTestCases', '--exclude-filter', 'CtsSampleDeviceTestCases', '--exclude-filter', 'CtsAutoFillServiceTestCases', '--exclude-filter', 'CtsAccountManagerTestCases', '--exclude-filter', 'CtsAppWidgetTestCases', '--exclude-filter', 'CtsUiDeviceTestCases', '--exclude-filter', 'CtsAccessibilityTestCases', '--exclude-filter', 'CtsSampleHostTestCases', '--exclude-filter', 'CtsToastTestCases', '--exclude-filter', 'CtsViewTestCases', '--exclude-filter', 'CtsAppSecurityHostTestCases', '--exclude-filter', 'CtsSystemUiTestCases', '--exclude-filter', 'CtsUsbTests', '--exclude-filter', 'CtsWidgetTestCases', '--exclude-filter', 'CtsAccessibilityServiceTestCases', '--exclude-filter', 'CtsUsageStatsTestCases', '--exclude-filter', 'CtsWindowManagerDeviceTestCases', '--exclude-filter', 'CtsUiAutomationTestCases', '--exclude-filter', 'CtsUidIsolationTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='all.CtsBackgroundRestrictionsTestCases_-_CtsPrintTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts_instant-9.0_r14-linux_x86-x86.zip',
-        timeout=18000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_Instant/control.9.0_r14.x86.all.CtsSampleDeviceTestCases_-_CtsWindowManagerDeviceTestCases b/server/site_tests/cheets_CTS_Instant/control.9.0_r14.x86.all.CtsSampleDeviceTestCases_-_CtsWindowManagerDeviceTestCases
deleted file mode 100644
index af60434..0000000
--- a/server/site_tests/cheets_CTS_Instant/control.9.0_r14.x86.all.CtsSampleDeviceTestCases_-_CtsWindowManagerDeviceTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_Instant.9.0_r14.x86.all.CtsSampleDeviceTestCases_-_CtsWindowManagerDeviceTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_cpu_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSampleDeviceTestCases, CtsSampleHostTestCases, CtsSystemUiTestCases, CtsTextTestCases, CtsToastTestCases, CtsUiAutomationTestCases, CtsUiDeviceTestCases, CtsUidIsolationTestCases, CtsUsageStatsTestCases, CtsUsbTests, CtsViewTestCases, CtsWidgetTestCases, CtsWindowManagerDeviceTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_Instant',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.x86.all.CtsSampleDeviceTestCases_-_CtsWindowManagerDeviceTestCases',
-        test_name='cheets_CTS_Instant.9.0_r14.x86.all.CtsSampleDeviceTestCases_-_CtsWindowManagerDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'cts-instant', '--exclude-filter', 'CtsPreferenceTestCases', '--exclude-filter', 'CtsPrintTestCases', '--exclude-filter', 'CtsLocationTestCases', '--exclude-filter', 'CtsDpiTestCases', '--exclude-filter', 'CtsDatabaseTestCases', '--exclude-filter', 'CtsPdfTestCases', '--exclude-filter', 'CtsDisplayTestCases', '--exclude-filter', 'CtsBackgroundRestrictionsTestCases', '--exclude-filter', 'CtsPermissionTestCases', '--exclude-filter', 'CtsInputMethodServiceHostTestCases', '--exclude-filter', 'CtsAccountManagerTestCases', '--exclude-filter', 'CtsInputMethodTestCases', '--exclude-filter', 'CtsAccessibilityTestCases', '--exclude-filter', 'CtsAppSecurityHostTestCases', '--exclude-filter', 'CtsAccessibilityServiceTestCases', '--exclude-filter', 'CtsFragmentTestCasesSdk26', '--exclude-filter', 'CtsGestureTestCases', '--exclude-filter', 'CtsColorModeTestCases', '--exclude-filter', 'CtsAutoFillServiceTestCases', '--exclude-filter', 'CtsPreference2TestCases', '--exclude-filter', 'CtsOsHostTestCases', '--exclude-filter', 'CtsCameraTestCases', '--exclude-filter', 'CtsDreamsTestCases', '--exclude-filter', 'CtsAnimationTestCases', '--exclude-filter', 'CtsLocation2TestCases', '--exclude-filter', 'CtsHostsideWebViewTests', '--exclude-filter', 'CtsAppWidgetTestCases', '--exclude-filter', 'CtsMediaTestCases', '--exclude-filter', 'CtsExternalSourcesTestCases', '--exclude-filter', 'CtsFileSystemTestCases', '--exclude-filter', 'CtsPermission2TestCases', '--exclude-filter', 'CtsMultiUserTestCases', '--exclude-filter', 'CtsMediaHostTestCases', '--exclude-filter', 'CtsEdiHostTestCases', '--exclude-filter', 'CtsFragmentTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='all.CtsSampleDeviceTestCases_-_CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts_instant-9.0_r14-linux_x86-x86.zip',
-        timeout=18000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_Instant/control.9.0_r14.x86.tradefed-run-collect-tests-only-internal b/server/site_tests/cheets_CTS_Instant/control.9.0_r14.x86.tradefed-run-collect-tests-only-internal
deleted file mode 100644
index 047fb2f..0000000
--- a/server/site_tests/cheets_CTS_Instant/control.9.0_r14.x86.tradefed-run-collect-tests-only-internal
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_Instant.9.0_r14.x86.tradefed-run-collect-tests-only-internal'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_cpu_x86'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'LENGTHY'
-MAX_RESULT_SIZE_KB = 1024000
-DOC = 'Run all of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_Instant',
-        hosts=host_list,
-        iterations=1,
-        max_retry=0,
-        tag='9.0_r14.x86.tradefed-run-collect-tests-only-internal',
-        test_name='cheets_CTS_Instant.9.0_r14.x86.tradefed-run-collect-tests-only-internal',
-        run_template=['run', 'commandAndExit', 'collect-tests-only', '--disable-reboot'],
-        retry_template=None,
-        target_module=None,
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts_instant-9.0_r14-linux_x86-x86.zip',
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_Instant/control.9.0_r20.arm.tradefed-run-collect-tests-only-internal b/server/site_tests/cheets_CTS_Instant/control.9.0_r20.arm.tradefed-run-collect-tests-only-internal
new file mode 100644
index 0000000..f217617
--- /dev/null
+++ b/server/site_tests/cheets_CTS_Instant/control.9.0_r20.arm.tradefed-run-collect-tests-only-internal
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_Instant.9.0_r20.arm.tradefed-run-collect-tests-only-internal'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_cpu_arm'
+JOB_RETRIES = 0
+TEST_TYPE = 'server'
+TIME = 'LENGTHY'
+MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
+DOC = 'Run all of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_Instant',
+        hosts=host_list,
+        iterations=1,
+        max_retry=0,
+        tag='9.0_r20.arm.tradefed-run-collect-tests-only-internal',
+        test_name='cheets_CTS_Instant.9.0_r20.arm.tradefed-run-collect-tests-only-internal',
+        run_template=['run', 'commandAndExit', 'collect-tests-only', '--disable-reboot'],
+        retry_template=None,
+        target_module=None,
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_Instant/control.9.0_r20.x86.tradefed-run-collect-tests-only-internal b/server/site_tests/cheets_CTS_Instant/control.9.0_r20.x86.tradefed-run-collect-tests-only-internal
new file mode 100644
index 0000000..053217b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_Instant/control.9.0_r20.x86.tradefed-run-collect-tests-only-internal
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_Instant.9.0_r20.x86.tradefed-run-collect-tests-only-internal'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_cpu_x86'
+JOB_RETRIES = 0
+TEST_TYPE = 'server'
+TIME = 'LENGTHY'
+MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
+DOC = 'Run all of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_Instant',
+        hosts=host_list,
+        iterations=1,
+        max_retry=0,
+        tag='9.0_r20.x86.tradefed-run-collect-tests-only-internal',
+        test_name='cheets_CTS_Instant.9.0_r20.x86.tradefed-run-collect-tests-only-internal',
+        run_template=['run', 'commandAndExit', 'collect-tests-only', '--disable-reboot'],
+        retry_template=None,
+        target_module=None,
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsAccessibilityServiceTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsAccessibilityServiceTestCases
index 5d60f15..215cd90 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsAccessibilityServiceTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsAccessibilityServiceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAccessibilityServiceTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsAccessibilityTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsAccessibilityTestCases
index 2567978..bd142dc 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsAccessibilityTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsAccessibilityTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAccessibilityTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsAccountManagerTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsAccountManagerTestCases
index d6fed05..8e40093 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsAccountManagerTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsAccountManagerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAccountManagerTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsAnimationTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsAnimationTestCases
index cfd8526..1dc9752 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsAnimationTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsAnimationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAnimationTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsAppSecurityHostTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsAppSecurityHostTestCases
index 1655c7e..afc0a1d 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsAppSecurityHostTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsAppSecurityHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppSecurityHostTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsAppWidgetTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsAppWidgetTestCases
index 84d1097..64d14e4 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsAppWidgetTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsAppWidgetTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppWidgetTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsAutoFillServiceTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsAutoFillServiceTestCases
index c272773..6e100d0 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsAutoFillServiceTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsAutoFillServiceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAutoFillServiceTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsBackgroundRestrictionsTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsBackgroundRestrictionsTestCases
index 4dfe1c7..1255322 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsBackgroundRestrictionsTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsBackgroundRestrictionsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBackgroundRestrictionsTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsCameraTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsCameraTestCases
index 9e86b1b..0eb082b 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsCameraTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsCameraTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'LONG'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsColorModeTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsColorModeTestCases
index 6b786a0..0bfc1f2 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsColorModeTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsColorModeTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsColorModeTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsDatabaseTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsDatabaseTestCases
index d0614d8..2d7cab2 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsDatabaseTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsDatabaseTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDatabaseTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsDisplayTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsDisplayTestCases
index 3149951..0d1df50 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsDisplayTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsDisplayTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDisplayTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsDpiTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsDpiTestCases
index c7ac3c0..f284c54 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsDpiTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsDpiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDpiTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsDreamsTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsDreamsTestCases
index 0cd1f18..05ad4f3 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsDreamsTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsDreamsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDreamsTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsEdiHostTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsEdiHostTestCases
index 13c611a..5f8e4d4 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsEdiHostTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsEdiHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsEdiHostTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsExternalSourcesTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsExternalSourcesTestCases
index a2879db..d08b72a 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsExternalSourcesTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsExternalSourcesTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsExternalSourcesTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsFileSystemTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsFileSystemTestCases
index 544fee7..7e27872 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsFileSystemTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsFileSystemTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsFileSystemTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsFragmentTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsFragmentTestCases
index 94b10fe..4b69867 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsFragmentTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsFragmentTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsFragmentTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsFragmentTestCasesSdk26 b/server/site_tests/cheets_CTS_Instant/control.arm.CtsFragmentTestCasesSdk26
index 478b1d2..082c13a 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsFragmentTestCasesSdk26
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsFragmentTestCasesSdk26
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsFragmentTestCasesSdk26 of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsGestureTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsGestureTestCases
index a03f24f..a800ece 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsGestureTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsGestureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsGestureTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsHostsideWebViewTests b/server/site_tests/cheets_CTS_Instant/control.arm.CtsHostsideWebViewTests
index 9f3f799..bcacc44 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsHostsideWebViewTests
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsHostsideWebViewTests
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHostsideWebViewTests of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsInputMethodServiceHostTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsInputMethodServiceHostTestCases
index bc41e45..4ea6cac 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsInputMethodServiceHostTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsInputMethodServiceHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsInputMethodServiceHostTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsInputMethodTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsInputMethodTestCases
index 1942fbc..ecefc21 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsInputMethodTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsInputMethodTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsInputMethodTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsLocation2TestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsLocation2TestCases
index ec4cfe5..c096e3d 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsLocation2TestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsLocation2TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLocation2TestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsLocationTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsLocationTestCases
index 568c9b1..7256507 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsLocationTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsLocationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLocationTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsMediaHostTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsMediaHostTestCases
index bfe79d9..c80baf0 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsMediaHostTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsMediaHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMediaHostTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsMediaTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsMediaTestCases
index 67c14b9..040e4e3 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsMediaTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsMediaTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'LONG'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsMediaTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsMultiUserTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsMultiUserTestCases
index 7e359e6..3658257 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsMultiUserTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsMultiUserTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMultiUserTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsOsHostTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsOsHostTestCases
index eda978a..c0f7c84 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsOsHostTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsOsHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsOsHostTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsPdfTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsPdfTestCases
index 6ea9530..d9091db 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsPdfTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsPdfTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPdfTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsPermission2TestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsPermission2TestCases
index 4362408..83c62b5 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsPermission2TestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsPermission2TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPermission2TestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsPermissionTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsPermissionTestCases
index 4ac9d2b..2b387e0 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsPermissionTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsPermissionTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPermissionTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsPreference2TestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsPreference2TestCases
index 9b26f21..bd95daa 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsPreference2TestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsPreference2TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPreference2TestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsPreferenceTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsPreferenceTestCases
index af9e901..1ff725c 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsPreferenceTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsPreferenceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPreferenceTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsPrintTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsPrintTestCases
index 4fa08cf..e6d63ff 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsPrintTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsPrintTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPrintTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsSampleDeviceTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsSampleDeviceTestCases
index fc4a372..0931420 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsSampleDeviceTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsSampleDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSampleDeviceTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsSampleHostTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsSampleHostTestCases
index cc43d5c..39acdff 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsSampleHostTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsSampleHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSampleHostTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsSystemUiTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsSystemUiTestCases
index 244370f..77dd5ac 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsSystemUiTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsSystemUiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSystemUiTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsTextTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsTextTestCases
index 45c133e..7b2cf10 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsTextTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsTextTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTextTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsToastTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsToastTestCases
index 9be4f71..b359fdb 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsToastTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsToastTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsToastTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsUiAutomationTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsUiAutomationTestCases
index ee110dd..cb8b808 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsUiAutomationTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsUiAutomationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUiAutomationTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsUiDeviceTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsUiDeviceTestCases
index e5ab62d..39075ed 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsUiDeviceTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsUiDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUiDeviceTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsUidIsolationTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsUidIsolationTestCases
index cea02b8..be319a9 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsUidIsolationTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsUidIsolationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUidIsolationTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsUsageStatsTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsUsageStatsTestCases
index 4f521db..852599d 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsUsageStatsTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsUsageStatsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUsageStatsTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsUsbTests b/server/site_tests/cheets_CTS_Instant/control.arm.CtsUsbTests
index 6cc3422..9cf1a51 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsUsbTests
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsUsbTests
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUsbTests of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsViewTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsViewTestCases
index d1f85ee..0929ee7 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsViewTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsViewTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsViewTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsWidgetTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsWidgetTestCases
index 695dcbc..c439554 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsWidgetTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsWidgetTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWidgetTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.CtsWindowManagerDeviceTestCases b/server/site_tests/cheets_CTS_Instant/control.arm.CtsWindowManagerDeviceTestCases
index cc074fd..6eae056 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.CtsWindowManagerDeviceTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.CtsWindowManagerDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWindowManagerDeviceTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.tradefed-run-collect-tests-only b/server/site_tests/cheets_CTS_Instant/control.arm.tradefed-run-collect-tests-only
index 1115e9d..31e2dc1 100644
--- a/server/site_tests/cheets_CTS_Instant/control.arm.tradefed-run-collect-tests-only
+++ b/server/site_tests/cheets_CTS_Instant/control.arm.tradefed-run-collect-tests-only
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
 PRIORITY = 70
 DOC = 'Run all of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.waivers b/server/site_tests/cheets_CTS_Instant/control.arm.waivers
deleted file mode 100644
index b7c4db3..0000000
--- a/server/site_tests/cheets_CTS_Instant/control.arm.waivers
+++ /dev/null
@@ -1,49 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_Instant.arm.waivers'
-ATTRIBUTES = 'suite:cts_P, suite:cts'
-DEPENDENCIES = 'arc, cts_cpu_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run preview version of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_Instant',
-        hosts=host_list,
-        iterations=1,
-        tag='arm.waivers',
-        test_name='cheets_CTS_Instant.arm.waivers',
-        run_template=['run', 'commandAndExit', 'cts-instant',
-            # HACK. What we really want is including the following two cases.
-            #   android.media.cts.AudioTrackTest#testPlayStaticData
-            #   android.media.cts.MediaCodecListTest#testRequiredMediaCodecList
-            # Since cts-instant does not support --include-filter, roughly
-            # approximate by --module and --exclude of a few expensive classes
-            # (taking 15 out of 20 minutes of CtsMediaTestCases.)
-            '--module', 'CtsMediaTestCases',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.AudioNativeTest',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.AudioRecordTest',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.EncodeVirtualDisplayWithCompositionTest',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.EncoderTest',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.MediaDrmClearkeyTest',
-        ],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='cts-instant-dev',
-        target_plan=None,
-        load_waivers=False,
-        bundle='arm',
-        uri='gs://chromeos-partner-gts/android-cts_instant-6862722-linux_x86-arm.zip',
-        retry_manual_tests=True,
-        warn_on_test_retry=False,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_Instant/control.arm.waivers-collect-tests-only b/server/site_tests/cheets_CTS_Instant/control.arm.waivers-collect-tests-only
deleted file mode 100644
index 767fc56..0000000
--- a/server/site_tests/cheets_CTS_Instant/control.arm.waivers-collect-tests-only
+++ /dev/null
@@ -1,50 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_Instant.arm.waivers-collect-tests-only'
-ATTRIBUTES = 'suite:cts_P, suite:cts'
-DEPENDENCIES = 'arc, cts_cpu_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run preview version of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_Instant',
-        hosts=host_list,
-        iterations=1,
-        max_retry=0,
-        tag='arm.waivers-collect-tests-only',
-        test_name='cheets_CTS_Instant.arm.waivers-collect-tests-only',
-        run_template=['run', 'commandAndExit', 'collect-tests-only',
-            # HACK. What we really want is including the following two cases.
-            #   android.media.cts.AudioTrackTest#testPlayStaticData
-            #   android.media.cts.MediaCodecListTest#testRequiredMediaCodecList
-            # Since cts-instant does not support --include-filter, roughly
-            # approximate by --module and --exclude of a few expensive classes
-            # (taking 15 out of 20 minutes of CtsMediaTestCases.)
-            '--module', 'CtsMediaTestCases',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.AudioNativeTest',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.AudioRecordTest',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.EncodeVirtualDisplayWithCompositionTest',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.EncoderTest',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.MediaDrmClearkeyTest',
-        ],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='cts-instant-dev',
-        target_plan=None,
-        load_waivers=False,
-        bundle='arm',
-        uri='gs://chromeos-partner-gts/android-cts_instant-6862722-linux_x86-arm.zip',
-        retry_manual_tests=True,
-        warn_on_test_retry=False,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_Instant/control.internal.arm.all.CtsAccessibilityServiceTestCases_-_CtsAutoFillServiceTestCases b/server/site_tests/cheets_CTS_Instant/control.internal.arm.all.CtsAccessibilityServiceTestCases_-_CtsAutoFillServiceTestCases
new file mode 100644
index 0000000..be32963
--- /dev/null
+++ b/server/site_tests/cheets_CTS_Instant/control.internal.arm.all.CtsAccessibilityServiceTestCases_-_CtsAutoFillServiceTestCases
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_Instant.internal.arm.all.CtsAccessibilityServiceTestCases_-_CtsAutoFillServiceTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_cpu_arm'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAccessibilityServiceTestCases, CtsAccessibilityTestCases, CtsAccountManagerTestCases, CtsAnimationTestCases, CtsAppSecurityHostTestCases, CtsAppWidgetTestCases, CtsAutoFillServiceTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_Instant',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsAccessibilityServiceTestCases_-_CtsAutoFillServiceTestCases',
+        test_name='cheets_CTS_Instant.internal.arm.all.CtsAccessibilityServiceTestCases_-_CtsAutoFillServiceTestCases',
+        run_template=['run', 'commandAndExit', 'cts-instant', '--exclude-filter', 'CtsBackgroundRestrictionsTestCases', '--exclude-filter', 'CtsCameraTestCases', '--exclude-filter', 'CtsColorModeTestCases', '--exclude-filter', 'CtsDatabaseTestCases', '--exclude-filter', 'CtsDisplayTestCases', '--exclude-filter', 'CtsDpiTestCases', '--exclude-filter', 'CtsDreamsTestCases', '--exclude-filter', 'CtsEdiHostTestCases', '--exclude-filter', 'CtsExternalSourcesTestCases', '--exclude-filter', 'CtsFileSystemTestCases', '--exclude-filter', 'CtsFragmentTestCases', '--exclude-filter', 'CtsFragmentTestCasesSdk26', '--exclude-filter', 'CtsGestureTestCases', '--exclude-filter', 'CtsHostsideWebViewTests', '--exclude-filter', 'CtsInputMethodServiceHostTestCases', '--exclude-filter', 'CtsInputMethodTestCases', '--exclude-filter', 'CtsLocation2TestCases', '--exclude-filter', 'CtsLocationTestCases', '--exclude-filter', 'CtsMediaHostTestCases', '--exclude-filter', 'CtsMediaTestCases', '--exclude-filter', 'CtsMultiUserTestCases', '--exclude-filter', 'CtsOsHostTestCases', '--exclude-filter', 'CtsPdfTestCases', '--exclude-filter', 'CtsPermission2TestCases', '--exclude-filter', 'CtsPermissionTestCases', '--exclude-filter', 'CtsPreference2TestCases', '--exclude-filter', 'CtsPreferenceTestCases', '--exclude-filter', 'CtsPrintTestCases', '--exclude-filter', 'CtsSampleDeviceTestCases', '--exclude-filter', 'CtsSampleHostTestCases', '--exclude-filter', 'CtsSystemUiTestCases', '--exclude-filter', 'CtsTextTestCases', '--exclude-filter', 'CtsToastTestCases', '--exclude-filter', 'CtsUiAutomationTestCases', '--exclude-filter', 'CtsUiDeviceTestCases', '--exclude-filter', 'CtsUidIsolationTestCases', '--exclude-filter', 'CtsUsageStatsTestCases', '--exclude-filter', 'CtsUsbTests', '--exclude-filter', 'CtsViewTestCases', '--exclude-filter', 'CtsWidgetTestCases', '--exclude-filter', 'CtsWindowManagerDeviceTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsAccessibilityServiceTestCases_-_CtsAutoFillServiceTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        timeout=18000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_Instant/control.internal.arm.all.CtsBackgroundRestrictionsTestCases_-_CtsPrintTestCases b/server/site_tests/cheets_CTS_Instant/control.internal.arm.all.CtsBackgroundRestrictionsTestCases_-_CtsPrintTestCases
new file mode 100644
index 0000000..7075133
--- /dev/null
+++ b/server/site_tests/cheets_CTS_Instant/control.internal.arm.all.CtsBackgroundRestrictionsTestCases_-_CtsPrintTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_Instant.internal.arm.all.CtsBackgroundRestrictionsTestCases_-_CtsPrintTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_cpu_arm'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsBackgroundRestrictionsTestCases, CtsCameraTestCases, CtsColorModeTestCases, CtsDatabaseTestCases, CtsDisplayTestCases, CtsDpiTestCases, CtsDreamsTestCases, CtsEdiHostTestCases, CtsExternalSourcesTestCases, CtsFileSystemTestCases, CtsFragmentTestCases, CtsFragmentTestCasesSdk26, CtsGestureTestCases, CtsHostsideWebViewTests, CtsInputMethodServiceHostTestCases, CtsInputMethodTestCases, CtsLocation2TestCases, CtsLocationTestCases, CtsMediaHostTestCases, CtsMediaTestCases, CtsMultiUserTestCases, CtsOsHostTestCases, CtsPdfTestCases, CtsPermission2TestCases, CtsPermissionTestCases, CtsPreference2TestCases, CtsPreferenceTestCases, CtsPrintTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_Instant',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsBackgroundRestrictionsTestCases_-_CtsPrintTestCases',
+        test_name='cheets_CTS_Instant.internal.arm.all.CtsBackgroundRestrictionsTestCases_-_CtsPrintTestCases',
+        run_template=['run', 'commandAndExit', 'cts-instant', '--exclude-filter', 'CtsAccessibilityServiceTestCases', '--exclude-filter', 'CtsAccessibilityTestCases', '--exclude-filter', 'CtsAccountManagerTestCases', '--exclude-filter', 'CtsAnimationTestCases', '--exclude-filter', 'CtsAppSecurityHostTestCases', '--exclude-filter', 'CtsAppWidgetTestCases', '--exclude-filter', 'CtsAutoFillServiceTestCases', '--exclude-filter', 'CtsSampleDeviceTestCases', '--exclude-filter', 'CtsSampleHostTestCases', '--exclude-filter', 'CtsSystemUiTestCases', '--exclude-filter', 'CtsTextTestCases', '--exclude-filter', 'CtsToastTestCases', '--exclude-filter', 'CtsUiAutomationTestCases', '--exclude-filter', 'CtsUiDeviceTestCases', '--exclude-filter', 'CtsUidIsolationTestCases', '--exclude-filter', 'CtsUsageStatsTestCases', '--exclude-filter', 'CtsUsbTests', '--exclude-filter', 'CtsViewTestCases', '--exclude-filter', 'CtsWidgetTestCases', '--exclude-filter', 'CtsWindowManagerDeviceTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsBackgroundRestrictionsTestCases_-_CtsPrintTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=18000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_Instant/control.internal.arm.all.CtsSampleDeviceTestCases_-_CtsWindowManagerDeviceTestCases b/server/site_tests/cheets_CTS_Instant/control.internal.arm.all.CtsSampleDeviceTestCases_-_CtsWindowManagerDeviceTestCases
new file mode 100644
index 0000000..e7513b8
--- /dev/null
+++ b/server/site_tests/cheets_CTS_Instant/control.internal.arm.all.CtsSampleDeviceTestCases_-_CtsWindowManagerDeviceTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_Instant.internal.arm.all.CtsSampleDeviceTestCases_-_CtsWindowManagerDeviceTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_cpu_arm'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSampleDeviceTestCases, CtsSampleHostTestCases, CtsSystemUiTestCases, CtsTextTestCases, CtsToastTestCases, CtsUiAutomationTestCases, CtsUiDeviceTestCases, CtsUidIsolationTestCases, CtsUsageStatsTestCases, CtsUsbTests, CtsViewTestCases, CtsWidgetTestCases, CtsWindowManagerDeviceTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_Instant',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsSampleDeviceTestCases_-_CtsWindowManagerDeviceTestCases',
+        test_name='cheets_CTS_Instant.internal.arm.all.CtsSampleDeviceTestCases_-_CtsWindowManagerDeviceTestCases',
+        run_template=['run', 'commandAndExit', 'cts-instant', '--exclude-filter', 'CtsAccessibilityServiceTestCases', '--exclude-filter', 'CtsAccessibilityTestCases', '--exclude-filter', 'CtsAccountManagerTestCases', '--exclude-filter', 'CtsAnimationTestCases', '--exclude-filter', 'CtsAppSecurityHostTestCases', '--exclude-filter', 'CtsAppWidgetTestCases', '--exclude-filter', 'CtsAutoFillServiceTestCases', '--exclude-filter', 'CtsBackgroundRestrictionsTestCases', '--exclude-filter', 'CtsCameraTestCases', '--exclude-filter', 'CtsColorModeTestCases', '--exclude-filter', 'CtsDatabaseTestCases', '--exclude-filter', 'CtsDisplayTestCases', '--exclude-filter', 'CtsDpiTestCases', '--exclude-filter', 'CtsDreamsTestCases', '--exclude-filter', 'CtsEdiHostTestCases', '--exclude-filter', 'CtsExternalSourcesTestCases', '--exclude-filter', 'CtsFileSystemTestCases', '--exclude-filter', 'CtsFragmentTestCases', '--exclude-filter', 'CtsFragmentTestCasesSdk26', '--exclude-filter', 'CtsGestureTestCases', '--exclude-filter', 'CtsHostsideWebViewTests', '--exclude-filter', 'CtsInputMethodServiceHostTestCases', '--exclude-filter', 'CtsInputMethodTestCases', '--exclude-filter', 'CtsLocation2TestCases', '--exclude-filter', 'CtsLocationTestCases', '--exclude-filter', 'CtsMediaHostTestCases', '--exclude-filter', 'CtsMediaTestCases', '--exclude-filter', 'CtsMultiUserTestCases', '--exclude-filter', 'CtsOsHostTestCases', '--exclude-filter', 'CtsPdfTestCases', '--exclude-filter', 'CtsPermission2TestCases', '--exclude-filter', 'CtsPermissionTestCases', '--exclude-filter', 'CtsPreference2TestCases', '--exclude-filter', 'CtsPreferenceTestCases', '--exclude-filter', 'CtsPrintTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsSampleDeviceTestCases_-_CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=18000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_Instant/control.internal.x86.all.CtsAccessibilityServiceTestCases_-_CtsAutoFillServiceTestCases b/server/site_tests/cheets_CTS_Instant/control.internal.x86.all.CtsAccessibilityServiceTestCases_-_CtsAutoFillServiceTestCases
new file mode 100644
index 0000000..8db3221
--- /dev/null
+++ b/server/site_tests/cheets_CTS_Instant/control.internal.x86.all.CtsAccessibilityServiceTestCases_-_CtsAutoFillServiceTestCases
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_Instant.internal.x86.all.CtsAccessibilityServiceTestCases_-_CtsAutoFillServiceTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_cpu_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAccessibilityServiceTestCases, CtsAccessibilityTestCases, CtsAccountManagerTestCases, CtsAnimationTestCases, CtsAppSecurityHostTestCases, CtsAppWidgetTestCases, CtsAutoFillServiceTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_Instant',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsAccessibilityServiceTestCases_-_CtsAutoFillServiceTestCases',
+        test_name='cheets_CTS_Instant.internal.x86.all.CtsAccessibilityServiceTestCases_-_CtsAutoFillServiceTestCases',
+        run_template=['run', 'commandAndExit', 'cts-instant', '--exclude-filter', 'CtsBackgroundRestrictionsTestCases', '--exclude-filter', 'CtsCameraTestCases', '--exclude-filter', 'CtsColorModeTestCases', '--exclude-filter', 'CtsDatabaseTestCases', '--exclude-filter', 'CtsDisplayTestCases', '--exclude-filter', 'CtsDpiTestCases', '--exclude-filter', 'CtsDreamsTestCases', '--exclude-filter', 'CtsEdiHostTestCases', '--exclude-filter', 'CtsExternalSourcesTestCases', '--exclude-filter', 'CtsFileSystemTestCases', '--exclude-filter', 'CtsFragmentTestCases', '--exclude-filter', 'CtsFragmentTestCasesSdk26', '--exclude-filter', 'CtsGestureTestCases', '--exclude-filter', 'CtsHostsideWebViewTests', '--exclude-filter', 'CtsInputMethodServiceHostTestCases', '--exclude-filter', 'CtsInputMethodTestCases', '--exclude-filter', 'CtsLocation2TestCases', '--exclude-filter', 'CtsLocationTestCases', '--exclude-filter', 'CtsMediaHostTestCases', '--exclude-filter', 'CtsMediaTestCases', '--exclude-filter', 'CtsMultiUserTestCases', '--exclude-filter', 'CtsOsHostTestCases', '--exclude-filter', 'CtsPdfTestCases', '--exclude-filter', 'CtsPermission2TestCases', '--exclude-filter', 'CtsPermissionTestCases', '--exclude-filter', 'CtsPreference2TestCases', '--exclude-filter', 'CtsPreferenceTestCases', '--exclude-filter', 'CtsPrintTestCases', '--exclude-filter', 'CtsSampleDeviceTestCases', '--exclude-filter', 'CtsSampleHostTestCases', '--exclude-filter', 'CtsSystemUiTestCases', '--exclude-filter', 'CtsTextTestCases', '--exclude-filter', 'CtsToastTestCases', '--exclude-filter', 'CtsUiAutomationTestCases', '--exclude-filter', 'CtsUiDeviceTestCases', '--exclude-filter', 'CtsUidIsolationTestCases', '--exclude-filter', 'CtsUsageStatsTestCases', '--exclude-filter', 'CtsUsbTests', '--exclude-filter', 'CtsViewTestCases', '--exclude-filter', 'CtsWidgetTestCases', '--exclude-filter', 'CtsWindowManagerDeviceTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsAccessibilityServiceTestCases_-_CtsAutoFillServiceTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        timeout=18000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_Instant/control.internal.x86.all.CtsBackgroundRestrictionsTestCases_-_CtsPrintTestCases b/server/site_tests/cheets_CTS_Instant/control.internal.x86.all.CtsBackgroundRestrictionsTestCases_-_CtsPrintTestCases
new file mode 100644
index 0000000..d2fb906
--- /dev/null
+++ b/server/site_tests/cheets_CTS_Instant/control.internal.x86.all.CtsBackgroundRestrictionsTestCases_-_CtsPrintTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_Instant.internal.x86.all.CtsBackgroundRestrictionsTestCases_-_CtsPrintTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_cpu_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsBackgroundRestrictionsTestCases, CtsCameraTestCases, CtsColorModeTestCases, CtsDatabaseTestCases, CtsDisplayTestCases, CtsDpiTestCases, CtsDreamsTestCases, CtsEdiHostTestCases, CtsExternalSourcesTestCases, CtsFileSystemTestCases, CtsFragmentTestCases, CtsFragmentTestCasesSdk26, CtsGestureTestCases, CtsHostsideWebViewTests, CtsInputMethodServiceHostTestCases, CtsInputMethodTestCases, CtsLocation2TestCases, CtsLocationTestCases, CtsMediaHostTestCases, CtsMediaTestCases, CtsMultiUserTestCases, CtsOsHostTestCases, CtsPdfTestCases, CtsPermission2TestCases, CtsPermissionTestCases, CtsPreference2TestCases, CtsPreferenceTestCases, CtsPrintTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_Instant',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsBackgroundRestrictionsTestCases_-_CtsPrintTestCases',
+        test_name='cheets_CTS_Instant.internal.x86.all.CtsBackgroundRestrictionsTestCases_-_CtsPrintTestCases',
+        run_template=['run', 'commandAndExit', 'cts-instant', '--exclude-filter', 'CtsAccessibilityServiceTestCases', '--exclude-filter', 'CtsAccessibilityTestCases', '--exclude-filter', 'CtsAccountManagerTestCases', '--exclude-filter', 'CtsAnimationTestCases', '--exclude-filter', 'CtsAppSecurityHostTestCases', '--exclude-filter', 'CtsAppWidgetTestCases', '--exclude-filter', 'CtsAutoFillServiceTestCases', '--exclude-filter', 'CtsSampleDeviceTestCases', '--exclude-filter', 'CtsSampleHostTestCases', '--exclude-filter', 'CtsSystemUiTestCases', '--exclude-filter', 'CtsTextTestCases', '--exclude-filter', 'CtsToastTestCases', '--exclude-filter', 'CtsUiAutomationTestCases', '--exclude-filter', 'CtsUiDeviceTestCases', '--exclude-filter', 'CtsUidIsolationTestCases', '--exclude-filter', 'CtsUsageStatsTestCases', '--exclude-filter', 'CtsUsbTests', '--exclude-filter', 'CtsViewTestCases', '--exclude-filter', 'CtsWidgetTestCases', '--exclude-filter', 'CtsWindowManagerDeviceTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsBackgroundRestrictionsTestCases_-_CtsPrintTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=18000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_Instant/control.internal.x86.all.CtsSampleDeviceTestCases_-_CtsWindowManagerDeviceTestCases b/server/site_tests/cheets_CTS_Instant/control.internal.x86.all.CtsSampleDeviceTestCases_-_CtsWindowManagerDeviceTestCases
new file mode 100644
index 0000000..e565d4c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_Instant/control.internal.x86.all.CtsSampleDeviceTestCases_-_CtsWindowManagerDeviceTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_Instant.internal.x86.all.CtsSampleDeviceTestCases_-_CtsWindowManagerDeviceTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_cpu_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSampleDeviceTestCases, CtsSampleHostTestCases, CtsSystemUiTestCases, CtsTextTestCases, CtsToastTestCases, CtsUiAutomationTestCases, CtsUiDeviceTestCases, CtsUidIsolationTestCases, CtsUsageStatsTestCases, CtsUsbTests, CtsViewTestCases, CtsWidgetTestCases, CtsWindowManagerDeviceTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_Instant',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsSampleDeviceTestCases_-_CtsWindowManagerDeviceTestCases',
+        test_name='cheets_CTS_Instant.internal.x86.all.CtsSampleDeviceTestCases_-_CtsWindowManagerDeviceTestCases',
+        run_template=['run', 'commandAndExit', 'cts-instant', '--exclude-filter', 'CtsAccessibilityServiceTestCases', '--exclude-filter', 'CtsAccessibilityTestCases', '--exclude-filter', 'CtsAccountManagerTestCases', '--exclude-filter', 'CtsAnimationTestCases', '--exclude-filter', 'CtsAppSecurityHostTestCases', '--exclude-filter', 'CtsAppWidgetTestCases', '--exclude-filter', 'CtsAutoFillServiceTestCases', '--exclude-filter', 'CtsBackgroundRestrictionsTestCases', '--exclude-filter', 'CtsCameraTestCases', '--exclude-filter', 'CtsColorModeTestCases', '--exclude-filter', 'CtsDatabaseTestCases', '--exclude-filter', 'CtsDisplayTestCases', '--exclude-filter', 'CtsDpiTestCases', '--exclude-filter', 'CtsDreamsTestCases', '--exclude-filter', 'CtsEdiHostTestCases', '--exclude-filter', 'CtsExternalSourcesTestCases', '--exclude-filter', 'CtsFileSystemTestCases', '--exclude-filter', 'CtsFragmentTestCases', '--exclude-filter', 'CtsFragmentTestCasesSdk26', '--exclude-filter', 'CtsGestureTestCases', '--exclude-filter', 'CtsHostsideWebViewTests', '--exclude-filter', 'CtsInputMethodServiceHostTestCases', '--exclude-filter', 'CtsInputMethodTestCases', '--exclude-filter', 'CtsLocation2TestCases', '--exclude-filter', 'CtsLocationTestCases', '--exclude-filter', 'CtsMediaHostTestCases', '--exclude-filter', 'CtsMediaTestCases', '--exclude-filter', 'CtsMultiUserTestCases', '--exclude-filter', 'CtsOsHostTestCases', '--exclude-filter', 'CtsPdfTestCases', '--exclude-filter', 'CtsPermission2TestCases', '--exclude-filter', 'CtsPermissionTestCases', '--exclude-filter', 'CtsPreference2TestCases', '--exclude-filter', 'CtsPreferenceTestCases', '--exclude-filter', 'CtsPrintTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsSampleDeviceTestCases_-_CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=18000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsAccessibilityServiceTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsAccessibilityServiceTestCases
index 15aa04d..c11a160 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsAccessibilityServiceTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsAccessibilityServiceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAccessibilityServiceTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsAccessibilityTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsAccessibilityTestCases
index 4387a90..6bf3eed 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsAccessibilityTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsAccessibilityTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAccessibilityTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsAccountManagerTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsAccountManagerTestCases
index dd28e5f..7c5f0ab 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsAccountManagerTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsAccountManagerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAccountManagerTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsAnimationTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsAnimationTestCases
index fb3798a..57da227 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsAnimationTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsAnimationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAnimationTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsAppSecurityHostTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsAppSecurityHostTestCases
index 7306996..5bf026a 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsAppSecurityHostTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsAppSecurityHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppSecurityHostTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsAppWidgetTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsAppWidgetTestCases
index 09efd22..2d279a5 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsAppWidgetTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsAppWidgetTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppWidgetTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsAutoFillServiceTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsAutoFillServiceTestCases
index 92c380a..68f9837 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsAutoFillServiceTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsAutoFillServiceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAutoFillServiceTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsBackgroundRestrictionsTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsBackgroundRestrictionsTestCases
index 5740ecb..b59bfa4 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsBackgroundRestrictionsTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsBackgroundRestrictionsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBackgroundRestrictionsTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsCameraTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsCameraTestCases
index a930586..0d75e2f 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsCameraTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsCameraTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'LONG'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsColorModeTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsColorModeTestCases
index c8348ee..17b44f4 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsColorModeTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsColorModeTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsColorModeTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsDatabaseTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsDatabaseTestCases
index 84e7208..f6cc6cd 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsDatabaseTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsDatabaseTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDatabaseTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsDisplayTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsDisplayTestCases
index ea77fa7..c65b426 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsDisplayTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsDisplayTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDisplayTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsDpiTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsDpiTestCases
index 9de3213..1d91be6 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsDpiTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsDpiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDpiTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsDreamsTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsDreamsTestCases
index 4b9fcd8..067598f 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsDreamsTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsDreamsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDreamsTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsEdiHostTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsEdiHostTestCases
index b048e3b..c4df4bc 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsEdiHostTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsEdiHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsEdiHostTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsExternalSourcesTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsExternalSourcesTestCases
index 0f6bdfe..a93a1df 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsExternalSourcesTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsExternalSourcesTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsExternalSourcesTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsFileSystemTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsFileSystemTestCases
index 8fd2d66..ebaaf8e 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsFileSystemTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsFileSystemTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsFileSystemTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsFragmentTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsFragmentTestCases
index 9f7fae4..b05ee32 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsFragmentTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsFragmentTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsFragmentTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsFragmentTestCasesSdk26 b/server/site_tests/cheets_CTS_Instant/control.x86.CtsFragmentTestCasesSdk26
index 23aa4dd..fce903a 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsFragmentTestCasesSdk26
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsFragmentTestCasesSdk26
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsFragmentTestCasesSdk26 of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsGestureTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsGestureTestCases
index e6f98f6..52a0d3e 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsGestureTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsGestureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsGestureTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsHostsideWebViewTests b/server/site_tests/cheets_CTS_Instant/control.x86.CtsHostsideWebViewTests
index 98aad37..4e687e3 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsHostsideWebViewTests
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsHostsideWebViewTests
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHostsideWebViewTests of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsInputMethodServiceHostTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsInputMethodServiceHostTestCases
index 493e82e..cd93306 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsInputMethodServiceHostTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsInputMethodServiceHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsInputMethodServiceHostTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsInputMethodTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsInputMethodTestCases
index 434865f..2ea22bc 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsInputMethodTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsInputMethodTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsInputMethodTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsLocation2TestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsLocation2TestCases
index 8fad361..133225f 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsLocation2TestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsLocation2TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLocation2TestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsLocationTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsLocationTestCases
index 6bf1752..77713e9 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsLocationTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsLocationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLocationTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsMediaHostTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsMediaHostTestCases
index 77e12c9..304ffa3 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsMediaHostTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsMediaHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMediaHostTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsMediaTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsMediaTestCases
index 33a94cf..a760834 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsMediaTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsMediaTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'LONG'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsMediaTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsMultiUserTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsMultiUserTestCases
index c6887ff..396d0c9 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsMultiUserTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsMultiUserTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMultiUserTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsOsHostTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsOsHostTestCases
index 950ecf2..ce2d956 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsOsHostTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsOsHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsOsHostTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsPdfTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsPdfTestCases
index 1f06eea..f58c51b 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsPdfTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsPdfTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPdfTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsPermission2TestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsPermission2TestCases
index 20c5557..2e41c84 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsPermission2TestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsPermission2TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPermission2TestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsPermissionTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsPermissionTestCases
index 4e7dc96..d9ef19f 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsPermissionTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsPermissionTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPermissionTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsPreference2TestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsPreference2TestCases
index a509e96..57aa068 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsPreference2TestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsPreference2TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPreference2TestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsPreferenceTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsPreferenceTestCases
index c029cee..fe3918a 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsPreferenceTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsPreferenceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPreferenceTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsPrintTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsPrintTestCases
index 26550ab..0ace363 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsPrintTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsPrintTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPrintTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsSampleDeviceTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsSampleDeviceTestCases
index db0a01b..fb270b3 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsSampleDeviceTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsSampleDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSampleDeviceTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsSampleHostTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsSampleHostTestCases
index 4528e60..492d0d2 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsSampleHostTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsSampleHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSampleHostTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsSystemUiTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsSystemUiTestCases
index f284987..bbe1209 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsSystemUiTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsSystemUiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSystemUiTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsTextTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsTextTestCases
index 627a7b2..c4a0a8c 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsTextTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsTextTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTextTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsToastTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsToastTestCases
index 4c40559..38b7831 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsToastTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsToastTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsToastTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsUiAutomationTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsUiAutomationTestCases
index b74331e..5d9a1d5 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsUiAutomationTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsUiAutomationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUiAutomationTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsUiDeviceTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsUiDeviceTestCases
index b66421e..9e0409d 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsUiDeviceTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsUiDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUiDeviceTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsUidIsolationTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsUidIsolationTestCases
index df98c1a..193d5de 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsUidIsolationTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsUidIsolationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUidIsolationTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsUsageStatsTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsUsageStatsTestCases
index a03bedd..d631a98 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsUsageStatsTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsUsageStatsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUsageStatsTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsUsbTests b/server/site_tests/cheets_CTS_Instant/control.x86.CtsUsbTests
index 0938dc3..2e5f9b0 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsUsbTests
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsUsbTests
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUsbTests of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsViewTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsViewTestCases
index b478d12..15d35f5 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsViewTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsViewTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsViewTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsWidgetTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsWidgetTestCases
index a0fbe29..8691483 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsWidgetTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsWidgetTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWidgetTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.CtsWindowManagerDeviceTestCases b/server/site_tests/cheets_CTS_Instant/control.x86.CtsWindowManagerDeviceTestCases
index 2482c62..c7d4492 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.CtsWindowManagerDeviceTestCases
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.CtsWindowManagerDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWindowManagerDeviceTestCases of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.tradefed-run-collect-tests-only b/server/site_tests/cheets_CTS_Instant/control.x86.tradefed-run-collect-tests-only
index c1397c6..4439d73 100644
--- a/server/site_tests/cheets_CTS_Instant/control.x86.tradefed-run-collect-tests-only
+++ b/server/site_tests/cheets_CTS_Instant/control.x86.tradefed-run-collect-tests-only
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
 PRIORITY = 70
 DOC = 'Run all of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.waivers b/server/site_tests/cheets_CTS_Instant/control.x86.waivers
deleted file mode 100644
index da5051e..0000000
--- a/server/site_tests/cheets_CTS_Instant/control.x86.waivers
+++ /dev/null
@@ -1,49 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_Instant.x86.waivers'
-ATTRIBUTES = 'suite:cts_P, suite:cts'
-DEPENDENCIES = 'arc, cts_cpu_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run preview version of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_Instant',
-        hosts=host_list,
-        iterations=1,
-        tag='x86.waivers',
-        test_name='cheets_CTS_Instant.x86.waivers',
-        run_template=['run', 'commandAndExit', 'cts-instant',
-            # HACK. What we really want is including the following two cases.
-            #   android.media.cts.AudioTrackTest#testPlayStaticData
-            #   android.media.cts.MediaCodecListTest#testRequiredMediaCodecList
-            # Since cts-instant does not support --include-filter, roughly
-            # approximate by --module and --exclude of a few expensive classes
-            # (taking 15 out of 20 minutes of CtsMediaTestCases.)
-            '--module', 'CtsMediaTestCases',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.AudioNativeTest',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.AudioRecordTest',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.EncodeVirtualDisplayWithCompositionTest',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.EncoderTest',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.MediaDrmClearkeyTest',
-        ],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='cts-instant-dev',
-        target_plan=None,
-        load_waivers=False,
-        bundle='x86',
-        uri='gs://chromeos-partner-gts/android-cts_instant-6862722-linux_x86-x86.zip',
-        retry_manual_tests=True,
-        warn_on_test_retry=False,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_Instant/control.x86.waivers-collect-tests-only b/server/site_tests/cheets_CTS_Instant/control.x86.waivers-collect-tests-only
deleted file mode 100644
index 37143fa..0000000
--- a/server/site_tests/cheets_CTS_Instant/control.x86.waivers-collect-tests-only
+++ /dev/null
@@ -1,50 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_Instant.x86.waivers-collect-tests-only'
-ATTRIBUTES = 'suite:cts_P, suite:cts'
-DEPENDENCIES = 'arc, cts_cpu_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run preview version of the Android Compatibility Test Suite for Instant Apps (CTS Instant) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_Instant',
-        hosts=host_list,
-        iterations=1,
-        max_retry=0,
-        tag='x86.waivers-collect-tests-only',
-        test_name='cheets_CTS_Instant.x86.waivers-collect-tests-only',
-        run_template=['run', 'commandAndExit', 'collect-tests-only',
-            # HACK. What we really want is including the following two cases.
-            #   android.media.cts.AudioTrackTest#testPlayStaticData
-            #   android.media.cts.MediaCodecListTest#testRequiredMediaCodecList
-            # Since cts-instant does not support --include-filter, roughly
-            # approximate by --module and --exclude of a few expensive classes
-            # (taking 15 out of 20 minutes of CtsMediaTestCases.)
-            '--module', 'CtsMediaTestCases',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.AudioNativeTest',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.AudioRecordTest',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.EncodeVirtualDisplayWithCompositionTest',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.EncoderTest',
-            '--exclude-filter', 'CtsMediaTestCases android.media.cts.MediaDrmClearkeyTest',
-        ],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='cts-instant-dev',
-        target_plan=None,
-        load_waivers=False,
-        bundle='x86',
-        uri='gs://chromeos-partner-gts/android-cts_instant-6862722-linux_x86-x86.zip',
-        retry_manual_tests=True,
-        warn_on_test_retry=False,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_Instant/generate_controlfiles.py b/server/site_tests/cheets_CTS_Instant/generate_controlfiles.py
index a575280..d0fb0aa 100755
--- a/server/site_tests/cheets_CTS_Instant/generate_controlfiles.py
+++ b/server/site_tests/cheets_CTS_Instant/generate_controlfiles.py
@@ -1,4 +1,5 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
+# Lint as: python2, python3
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/cheets_CTS_P/cheets_CTS_P.py b/server/site_tests/cheets_CTS_P/cheets_CTS_P.py
index 09155ee..ec896d8 100644
--- a/server/site_tests/cheets_CTS_P/cheets_CTS_P.py
+++ b/server/site_tests/cheets_CTS_P/cheets_CTS_P.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -26,12 +27,22 @@
 # Maximum default time allowed for each individual CTS module.
 _CTS_TIMEOUT_SECONDS = 3600
 
-# Public download locations for android cts bundles.
 _PUBLIC_CTS = 'https://dl.google.com/dl/android/cts/'
-_PARTNER_CTS = 'gs://chromeos-partner-cts/'
-_CTS_URI = {
-        'arm': _PUBLIC_CTS + 'android-cts-9.0_r14-linux_x86-arm.zip',
-        'x86': _PUBLIC_CTS + 'android-cts-9.0_r14-linux_x86-x86.zip',
+_INTERNAL_CTS = 'gs://chromeos-arc-images/cts/bundle/P/'
+_PARTNER_CTS = 'gs://chromeos-partner-gts/P/'
+_OFFICIAL_ZIP_NAME = 'android-cts-9.0_r20-linux_x86-%s.zip'
+_PREVIEW_ZIP_NAME = 'android-cts-8480133-linux_x86-%s.zip'
+_BUNDLE_MAP = {
+        (None, 'arm'): _PUBLIC_CTS + _OFFICIAL_ZIP_NAME % 'arm',
+        (None, 'x86'): _PUBLIC_CTS + _OFFICIAL_ZIP_NAME % 'x86',
+        ('DEV_MOBLAB', 'arm'): _PARTNER_CTS + _PREVIEW_ZIP_NAME % 'arm',
+        ('DEV_MOBLAB', 'x86'): _PARTNER_CTS + _PREVIEW_ZIP_NAME % 'x86',
+        ('LATEST', 'arm'): _INTERNAL_CTS + _OFFICIAL_ZIP_NAME % 'arm',
+        ('LATEST', 'x86'): _INTERNAL_CTS + _OFFICIAL_ZIP_NAME % 'x86',
+        ('DEV', 'arm'): _INTERNAL_CTS + _PREVIEW_ZIP_NAME % 'arm',
+        ('DEV', 'x86'): _INTERNAL_CTS + _PREVIEW_ZIP_NAME % 'x86',
+        ('DEV_WAIVER', 'arm'): _INTERNAL_CTS + _PREVIEW_ZIP_NAME % 'arm',
+        ('DEV_WAIVER', 'x86'): _INTERNAL_CTS + _PREVIEW_ZIP_NAME % 'x86',
 }
 _CTS_MEDIA_URI = _PUBLIC_CTS + 'android-cts-media-1.5.zip'
 _CTS_MEDIA_LOCALPATH = '/tmp/android-cts-media'
@@ -76,8 +87,11 @@
         cmd.append('--quiet-output=true')
         return cmd
 
-    def _get_default_bundle_url(self, bundle):
-        return _CTS_URI[bundle]
+    def _get_bundle_url(self, uri, bundle):
+        if uri and (uri.startswith('http') or uri.startswith('gs')):
+            return uri
+        else:
+            return _BUNDLE_MAP[(uri, bundle)]
 
     def _get_tradefed_base_dir(self):
         return 'android-cts'
@@ -107,8 +121,8 @@
         chart_hosts = [hosts.create_host(ip) for ip in chart_address]
 
         self.chart_fixtures = [
-            camerabox_utils.ChartFixture(h, self._SCENE_URI)
-            for h in chart_hosts
+                camerabox_utils.ChartFixture(h, self._SCENE_URI, self.job)
+                for h in chart_hosts
         ]
         self.dut_fixtures = [
             camerabox_utils.DUTFixture(self, h, camera_facing)
@@ -205,7 +219,6 @@
             bundle=bundle,
             extra_artifacts=extra_artifacts,
             extra_artifacts_host=extra_artifacts_host,
-            cts_uri=_CTS_URI,
             login_precondition_commands=login_precondition_commands,
             precondition_commands=precondition_commands,
             prerequisites=prerequisites)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_dev.arm.waivers b/server/site_tests/cheets_CTS_P/control.9.0_dev.arm.waivers
deleted file mode 100644
index 4257b83..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_dev.arm.waivers
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file is not auto-generated. Don't delete it.
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_dev.arm.waivers'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-qual, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run waived tests of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        needs_push_media=False,
-        tag='9.0_dev.arm.waivers',
-        test_name='cheets_CTS_P.9.0_dev.arm.waivers',
-        run_template=['run', 'commandAndExit', 'cts', '--subplan', 'waivers', '--module-arg', 'CtsMediaTestCases:skip-media-download:true'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--module-arg', 'CtsMediaTestCases:skip-media-download:true'],
-        target_module='cts-dev',
-        target_plan='waivers',
-        load_waivers=False,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-6970114-linux_x86-arm.zip',
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_dev.x86.waivers b/server/site_tests/cheets_CTS_P/control.9.0_dev.x86.waivers
deleted file mode 100644
index 104fe7e..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_dev.x86.waivers
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file is not auto-generated. Don't delete it.
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_dev.x86.waivers'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-qual, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run waived tests of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        needs_push_media=False,
-        tag='9.0_dev.x86.waivers',
-        test_name='cheets_CTS_P.9.0_dev.x86.waivers',
-        run_template=['run', 'commandAndExit', 'cts', '--subplan', 'waivers', '--module-arg', 'CtsMediaTestCases:skip-media-download:true'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--module-arg', 'CtsMediaTestCases:skip-media-download:true'],
-        target_module='cts-dev',
-        target_plan='waivers',
-        load_waivers=False,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-6970114-linux_x86-x86.zip',
-        timeout=14400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAbiOverrideHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAbiOverrideHostTestCases
deleted file mode 100644
index 761be76..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAbiOverrideHostTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsAbiOverrideHostTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAbiOverrideHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsAbiOverrideHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsAbiOverrideHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAbiOverrideHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsAbiOverrideHostTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAccelerationTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAccelerationTestCases
deleted file mode 100644
index a923d43..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAccelerationTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsAccelerationTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-arc'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAccelerationTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=3,
-        tag='9.0_r14.arm.CtsAccelerationTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsAccelerationTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAccelerationTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsAccelerationTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAccessibility b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAccessibility
deleted file mode 100644
index c8d5f6a..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAccessibility
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsAccessibility'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAccessibilityServiceTestCases, CtsAccessibilityTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsAccessibility',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsAccessibility',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAccessibilityServiceTestCases', '--include-filter', 'CtsAccessibilityTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsAccessibility',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAccountManagerTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAccountManagerTestCases
deleted file mode 100644
index 8809fcd..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAccountManagerTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsAccountManagerTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-perbuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAccountManagerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=3,
-        tag='9.0_r14.arm.CtsAccountManagerTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsAccountManagerTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAccountManagerTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsAccountManagerTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsActivityManagerDevice b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsActivityManagerDevice
deleted file mode 100644
index e58a53e..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsActivityManagerDevice
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsActivityManagerDevice'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsActivityManagerDeviceSdk25TestCases, CtsActivityManagerDeviceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsActivityManagerDevice',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsActivityManagerDevice',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsActivityManagerDeviceSdk25TestCases', '--include-filter', 'CtsActivityManagerDeviceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsActivityManagerDevice',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAdmin b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAdmin
deleted file mode 100644
index cfd0044..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAdmin
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsAdmin'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAdminPackageInstallerTestCases, CtsAdminTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsAdmin',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsAdmin',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAdminPackageInstallerTestCases', '--include-filter', 'CtsAdminTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsAdmin',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAlarm b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAlarm
deleted file mode 100644
index 768bead..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAlarm
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsAlarm'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAlarmClockTestCases, CtsAlarmManagerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsAlarm',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsAlarm',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAlarmClockTestCases', '--include-filter', 'CtsAlarmManagerTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsAlarm',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAndroid b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAndroid
deleted file mode 100644
index 63d5204..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAndroid
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsAndroid'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAndroidAppTestCases, CtsAndroidTestBase27ApiSignatureTestCases, CtsAndroidTestMockCurrentApiSignatureTestCases, CtsAndroidTestRunnerCurrentApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsAndroid',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsAndroid',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAndroidAppTestCases', '--include-filter', 'CtsAndroidTestBase27ApiSignatureTestCases', '--include-filter', 'CtsAndroidTestMockCurrentApiSignatureTestCases', '--include-filter', 'CtsAndroidTestRunnerCurrentApiSignatureTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsAndroid',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAnimationTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAnimationTestCases
deleted file mode 100644
index f77b2c1..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAnimationTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsAnimationTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAnimationTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsAnimationTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsAnimationTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAnimationTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsAnimationTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsApacheHttpLegacy b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsApacheHttpLegacy
deleted file mode 100644
index 0a41712..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsApacheHttpLegacy
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsApacheHttpLegacy'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsApacheHttpLegacy27ApiSignatureTestCases, CtsApacheHttpLegacyCurrentApiSignatureTestCases, CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsApacheHttpLegacy',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsApacheHttpLegacy',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsApacheHttpLegacy27ApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacyCurrentApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsApacheHttpLegacy',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsApp b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsApp
deleted file mode 100644
index bd688a0..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsApp
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsApp'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAppComponentFactoryTestCases, CtsAppSecurityHostTestCases, CtsAppTestCases, CtsAppUsageHostTestCases, CtsAppWidgetTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        enable_default_apps=True,
-        tag='9.0_r14.arm.CtsApp',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsApp',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAppComponentFactoryTestCases', '--include-filter', 'CtsAppSecurityHostTestCases', '--include-filter', 'CtsAppTestCases', '--include-filter', 'CtsAppUsageHostTestCases', '--include-filter', 'CtsAppWidgetTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsApp',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
-        timeout=23400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAslrMallocTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAslrMallocTestCases
deleted file mode 100644
index 3396675..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAslrMallocTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsAslrMallocTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAslrMallocTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsAslrMallocTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsAslrMallocTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAslrMallocTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsAslrMallocTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAssistTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAssistTestCases
deleted file mode 100644
index 2d93a65..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAssistTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsAssistTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAssistTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsAssistTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsAssistTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAssistTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsAssistTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAtraceHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAtraceHostTestCases
deleted file mode 100644
index f672975..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAtraceHostTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsAtraceHostTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAtraceHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsAtraceHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsAtraceHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAtraceHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsAtraceHostTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAutoFillServiceTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAutoFillServiceTestCases
deleted file mode 100644
index 611d629..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsAutoFillServiceTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsAutoFillServiceTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAutoFillServiceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsAutoFillServiceTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsAutoFillServiceTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAutoFillServiceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsAutoFillServiceTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=21600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsBackgroundRestrictionsTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsBackgroundRestrictionsTestCases
deleted file mode 100644
index c86faf3..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsBackgroundRestrictionsTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsBackgroundRestrictionsTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsBackgroundRestrictionsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsBackgroundRestrictionsTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsBackgroundRestrictionsTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsBackgroundRestrictionsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsBackgroundRestrictionsTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsBackup b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsBackup
deleted file mode 100644
index 2513a87..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsBackup
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsBackup'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsBackupHostTestCases, CtsBackupTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsBackup',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsBackup',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBackupHostTestCases', '--include-filter', 'CtsBackupTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsBackup',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsBatterySavingTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsBatterySavingTestCases
deleted file mode 100644
index 18e4ab6..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsBatterySavingTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsBatterySavingTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsBatterySavingTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsBatterySavingTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsBatterySavingTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsBatterySavingTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsBatterySavingTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsBionicTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsBionicTestCases
deleted file mode 100644
index ddaca3a..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsBionicTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsBionicTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsBionicTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsBionicTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsBionicTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsBionicTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsBionicTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsBluetoothTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsBluetoothTestCases
deleted file mode 100644
index 7d00c47..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsBluetoothTestCases
+++ /dev/null
@@ -1,48 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-from autotest_lib.server import utils
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsBluetoothTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsBluetoothTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-# For local debugging, if your test setup doesn't have servo, REMOVE these
-# two lines.
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run_TS(machine):
-    # REMOVE 'servo_args=servo_args' arg for local debugging if your test
-    # setup doesn't have servo.
-    try:
-        host_list = [hosts.create_host(machine, servo_args=servo_args)]
-    except:
-        # Just ignore any servo setup flakiness.
-        host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsBluetoothTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsBluetoothTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsBluetoothTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsBluetoothTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        prerequisites=['bluetooth'],
-        hard_reboot_on_failure=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsBootStatsTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsBootStatsTestCases
deleted file mode 100644
index 6671023..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsBootStatsTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsBootStatsTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsBootStatsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsBootStatsTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsBootStatsTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsBootStatsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsBootStatsTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCalendarcommon2TestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCalendarcommon2TestCases
deleted file mode 100644
index dfd08d3..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCalendarcommon2TestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsCalendarcommon2TestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCalendarcommon2TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsCalendarcommon2TestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsCalendarcommon2TestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCalendarcommon2TestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsCalendarcommon2TestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCamera b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCamera
deleted file mode 100644
index 34eb743..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCamera
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsCamera'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCameraApi25TestCases, CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsCamera',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsCamera',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCameraApi25TestCases', '--include-filter', 'CtsCameraTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsCamera',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCameraTestCases.led.camerabox.back b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCameraTestCases.led.camerabox.back
deleted file mode 100644
index d95f33b..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCameraTestCases.led.camerabox.back
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsCameraTestCases.led.camerabox.back'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-camera, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw'
-DEPENDENCIES = 'arc, camerabox_light:led, camerabox_facing:back'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        camera_facing='back',
-        cmdline_args=args,
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsCameraTestCases.led.camerabox.back',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsCameraTestCases.led.camerabox.back',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCameraTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsCameraTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        load_waivers=False,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCameraTestCases.led.camerabox.front b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCameraTestCases.led.camerabox.front
deleted file mode 100644
index 4d471c8..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCameraTestCases.led.camerabox.front
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsCameraTestCases.led.camerabox.front'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-camera, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw'
-DEPENDENCIES = 'arc, camerabox_light:led, camerabox_facing:front'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        camera_facing='front',
-        cmdline_args=args,
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsCameraTestCases.led.camerabox.front',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsCameraTestCases.led.camerabox.front',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCameraTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsCameraTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        load_waivers=False,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCameraTestCases.noled.camerabox.back b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCameraTestCases.noled.camerabox.back
deleted file mode 100644
index c8036bd..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCameraTestCases.noled.camerabox.back
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsCameraTestCases.noled.camerabox.back'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-camera, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw'
-DEPENDENCIES = 'arc, camerabox_light:noled, camerabox_facing:back'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        camera_facing='back',
-        cmdline_args=args,
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsCameraTestCases.noled.camerabox.back',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsCameraTestCases.noled.camerabox.back',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCameraTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsCameraTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        load_waivers=False,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCameraTestCases.noled.camerabox.front b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCameraTestCases.noled.camerabox.front
deleted file mode 100644
index 495a674..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCameraTestCases.noled.camerabox.front
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsCameraTestCases.noled.camerabox.front'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-camera, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw'
-DEPENDENCIES = 'arc, camerabox_light:noled, camerabox_facing:front'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        camera_facing='front',
-        cmdline_args=args,
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsCameraTestCases.noled.camerabox.front',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsCameraTestCases.noled.camerabox.front',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCameraTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsCameraTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        load_waivers=False,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCarTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCarTestCases
deleted file mode 100644
index 2472c50..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCarTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsCarTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCarTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsCarTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsCarTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCarTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsCarTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCarrierApiTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCarrierApiTestCases
deleted file mode 100644
index 850d996..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCarrierApiTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsCarrierApiTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCarrierApiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsCarrierApiTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsCarrierApiTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCarrierApiTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsCarrierApiTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsColorModeTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsColorModeTestCases
deleted file mode 100644
index 052972d..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsColorModeTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsColorModeTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsColorModeTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsColorModeTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsColorModeTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsColorModeTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsColorModeTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCompilationTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCompilationTestCases
deleted file mode 100644
index 6364be4..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCompilationTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsCompilationTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCompilationTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsCompilationTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsCompilationTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCompilationTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsCompilationTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsContactsProviderWipe b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsContactsProviderWipe
deleted file mode 100644
index 72cc026..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsContactsProviderWipe
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsContactsProviderWipe'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsContactsProviderWipe of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsContactsProviderWipe',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsContactsProviderWipe',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsContactsProviderWipe', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsContactsProviderWipe',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsContentTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsContentTestCases
deleted file mode 100644
index 2cbe7a0..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsContentTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsContentTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsContentTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        enable_default_apps=True,
-        tag='9.0_r14.arm.CtsContentTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsContentTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsContentTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsContentTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        prerequisites=['region_us'],
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCppToolsTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCppToolsTestCases
deleted file mode 100644
index ea491e2..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCppToolsTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsCppToolsTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCppToolsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsCppToolsTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsCppToolsTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCppToolsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsCppToolsTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCurrentApiSignatureTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCurrentApiSignatureTestCases
deleted file mode 100644
index 239afdd..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsCurrentApiSignatureTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsCurrentApiSignatureTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCurrentApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsCurrentApiSignatureTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsCurrentApiSignatureTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCurrentApiSignatureTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsCurrentApiSignatureTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDatabaseTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDatabaseTestCases
deleted file mode 100644
index ea8469e..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDatabaseTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsDatabaseTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDatabaseTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsDatabaseTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsDatabaseTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDatabaseTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDatabaseTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDebugTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDebugTestCases
deleted file mode 100644
index 17f4952..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDebugTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsDebugTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDebugTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsDebugTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsDebugTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDebugTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDebugTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDeqpTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDeqpTestCases
deleted file mode 100644
index b284c49..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDeqpTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsDeqpTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-deqp, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 1024000
-DOC = 'Run module CtsDeqpTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=15,
-        tag='9.0_r14.arm.CtsDeqpTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsDeqpTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDeqpTestCases', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDeqpTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=72000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDeqpTestCases.dEQP-EGL b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDeqpTestCases.dEQP-EGL
deleted file mode 100644
index b1fe5aa..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDeqpTestCases.dEQP-EGL
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsDeqpTestCases.dEQP-EGL'
-ATTRIBUTES = 'suite:arc-cts-deqp, suite:graphics_per-week'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDeqpTestCases.dEQP-EGL of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsDeqpTestCases.dEQP-EGL',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsDeqpTestCases.dEQP-EGL',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-EGL.*', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDeqpTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDeqpTestCases.dEQP-GLES2 b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDeqpTestCases.dEQP-GLES2
deleted file mode 100644
index 6fb55f9..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDeqpTestCases.dEQP-GLES2
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsDeqpTestCases.dEQP-GLES2'
-ATTRIBUTES = 'suite:arc-cts-deqp, suite:graphics_per-week'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDeqpTestCases.dEQP-GLES2 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsDeqpTestCases.dEQP-GLES2',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsDeqpTestCases.dEQP-GLES2',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-GLES2.*', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDeqpTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDeqpTestCases.dEQP-GLES3 b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDeqpTestCases.dEQP-GLES3
deleted file mode 100644
index 88f5631..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDeqpTestCases.dEQP-GLES3
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsDeqpTestCases.dEQP-GLES3'
-ATTRIBUTES = 'suite:arc-cts-deqp, suite:graphics_per-week'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDeqpTestCases.dEQP-GLES3 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsDeqpTestCases.dEQP-GLES3',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsDeqpTestCases.dEQP-GLES3',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-GLES3.*', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDeqpTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=21600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDeqpTestCases.dEQP-GLES31 b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDeqpTestCases.dEQP-GLES31
deleted file mode 100644
index 425a650..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDeqpTestCases.dEQP-GLES31
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsDeqpTestCases.dEQP-GLES31'
-ATTRIBUTES = 'suite:arc-cts-deqp, suite:graphics_per-week'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDeqpTestCases.dEQP-GLES31 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsDeqpTestCases.dEQP-GLES31',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsDeqpTestCases.dEQP-GLES31',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-GLES31.*', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDeqpTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=21600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDeqpTestCases.dEQP-VK b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDeqpTestCases.dEQP-VK
deleted file mode 100644
index 0723f70..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDeqpTestCases.dEQP-VK
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsDeqpTestCases.dEQP-VK'
-ATTRIBUTES = 'suite:arc-cts-deqp, suite:graphics_per-week'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDeqpTestCases.dEQP-VK of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsDeqpTestCases.dEQP-VK',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsDeqpTestCases.dEQP-VK',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-VK.*', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDeqpTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=54000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDevice b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDevice
deleted file mode 100644
index 7446ced..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDevice
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsDevice'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDeviceIdleHostTestCases, CtsDevicePolicyManagerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsDevice',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsDevice',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeviceIdleHostTestCases', '--include-filter', 'CtsDevicePolicyManagerTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDevice',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDexMetadataHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDexMetadataHostTestCases
deleted file mode 100644
index 1f5f9df..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDexMetadataHostTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsDexMetadataHostTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDexMetadataHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsDexMetadataHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsDexMetadataHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDexMetadataHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDexMetadataHostTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDisplayTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDisplayTestCases
deleted file mode 100644
index 6846f96..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDisplayTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsDisplayTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDisplayTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsDisplayTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsDisplayTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDisplayTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDisplayTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDpi b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDpi
deleted file mode 100644
index 45228d7..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDpi
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsDpi'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDpiTestCases, CtsDpiTestCases2 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsDpi',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsDpi',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDpiTestCases', '--include-filter', 'CtsDpiTestCases2', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDpi',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDreamsTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDreamsTestCases
deleted file mode 100644
index 517a7d8..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDreamsTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsDreamsTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDreamsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsDreamsTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsDreamsTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDreamsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDreamsTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDrmTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDrmTestCases
deleted file mode 100644
index b4dd0b9..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDrmTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsDrmTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDrmTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsDrmTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsDrmTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDrmTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDrmTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDumpsysHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDumpsysHostTestCases
deleted file mode 100644
index 006e26c..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDumpsysHostTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsDumpsysHostTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDumpsysHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsDumpsysHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsDumpsysHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDumpsysHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDumpsysHostTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDynamicLinkerTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDynamicLinkerTestCases
deleted file mode 100644
index 88debd5..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsDynamicLinkerTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsDynamicLinkerTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDynamicLinkerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsDynamicLinkerTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsDynamicLinkerTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDynamicLinkerTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDynamicLinkerTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsEdiHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsEdiHostTestCases
deleted file mode 100644
index 12a08cf..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsEdiHostTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsEdiHostTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsEdiHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsEdiHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsEdiHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsEdiHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsEdiHostTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsEffectTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsEffectTestCases
deleted file mode 100644
index 617d64c..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsEffectTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsEffectTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsEffectTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsEffectTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsEffectTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsEffectTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsEffectTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsExternalS b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsExternalS
deleted file mode 100644
index b7b420d..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsExternalS
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsExternalS'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsExternalServiceTestCases, CtsExternalSourcesTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsExternalS',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsExternalS',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsExternalServiceTestCases', '--include-filter', 'CtsExternalSourcesTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsExternalS',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsFileSystemTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsFileSystemTestCases
deleted file mode 100644
index 3842ed0..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsFileSystemTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsFileSystemTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsFileSystemTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsFileSystemTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsFileSystemTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsFileSystemTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsFileSystemTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsFragment b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsFragment
deleted file mode 100644
index 219667d..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsFragment
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsFragment'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsFragmentTestCases, CtsFragmentTestCasesSdk26 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsFragment',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsFragment',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsFragmentTestCases', '--include-filter', 'CtsFragmentTestCasesSdk26', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsFragment',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsGestureTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsGestureTestCases
deleted file mode 100644
index e2ae425..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsGestureTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsGestureTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsGestureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsGestureTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsGestureTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsGestureTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsGestureTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsGpuToolsHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsGpuToolsHostTestCases
deleted file mode 100644
index cb8aa7f..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsGpuToolsHostTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsGpuToolsHostTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsGpuToolsHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsGpuToolsHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsGpuToolsHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsGpuToolsHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsGpuToolsHostTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsGraphicsTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsGraphicsTestCases
deleted file mode 100644
index 80e576e..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsGraphicsTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsGraphicsTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-perbuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsGraphicsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=5,
-        tag='9.0_r14.arm.CtsGraphicsTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsGraphicsTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsGraphicsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsGraphicsTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsHardwareTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsHardwareTestCases
deleted file mode 100644
index 4e0a812..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsHardwareTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsHardwareTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsHardwareTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsHardwareTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsHardwareTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsHardwareTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsHardwareTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsHarmfulAppWarningHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsHarmfulAppWarningHostTestCases
deleted file mode 100644
index 6d23540..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsHarmfulAppWarningHostTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsHarmfulAppWarningHostTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsHarmfulAppWarningHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsHarmfulAppWarningHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsHarmfulAppWarningHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsHarmfulAppWarningHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsHarmfulAppWarningHostTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsHiddenApi b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsHiddenApi
deleted file mode 100644
index c5abf3d..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsHiddenApi
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsHiddenApi'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsHiddenApiBlacklistApi27TestCases, CtsHiddenApiBlacklistCurrentApiTestCases, CtsHiddenApiBlacklistDebugClassTestCases, CtsHiddenApiKillswitchDebugClassTestCases, CtsHiddenApiKillswitchWhitelistTestCases, CtsHiddenApiKillswitchWildcardTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsHiddenApi',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsHiddenApi',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHiddenApiBlacklistApi27TestCases', '--include-filter', 'CtsHiddenApiBlacklistCurrentApiTestCases', '--include-filter', 'CtsHiddenApiBlacklistDebugClassTestCases', '--include-filter', 'CtsHiddenApiKillswitchDebugClassTestCases', '--include-filter', 'CtsHiddenApiKillswitchWhitelistTestCases', '--include-filter', 'CtsHiddenApiKillswitchWildcardTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsHiddenApi',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsHostTzDataTests b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsHostTzDataTests
deleted file mode 100644
index e390b2c..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsHostTzDataTests
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsHostTzDataTests'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsHostTzDataTests of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsHostTzDataTests',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsHostTzDataTests',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsHostTzDataTests', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsHostTzDataTests',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsHostside b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsHostside
deleted file mode 100644
index 847fb37..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsHostside
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsHostside'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsHostsideNetworkTests, CtsHostsideNumberBlockingTestCases, CtsHostsideTvTests, CtsHostsideWebViewTests of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsHostside',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsHostside',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHostsideNetworkTests', '--include-filter', 'CtsHostsideNumberBlockingTestCases', '--include-filter', 'CtsHostsideTvTests', '--include-filter', 'CtsHostsideWebViewTests', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsHostside',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsIcuTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsIcuTestCases
deleted file mode 100644
index dbd7a38..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsIcuTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsIcuTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsIcuTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsIcuTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsIcuTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsIcuTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsIcuTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsIncidentHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsIncidentHostTestCases
deleted file mode 100644
index cdf4e61..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsIncidentHostTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsIncidentHostTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsIncidentHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsIncidentHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsIncidentHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsIncidentHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsIncidentHostTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsInlineMockingTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsInlineMockingTestCases
deleted file mode 100644
index 7ec3009..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsInlineMockingTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsInlineMockingTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsInlineMockingTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsInlineMockingTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsInlineMockingTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsInlineMockingTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsInlineMockingTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsInputMethod b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsInputMethod
deleted file mode 100644
index 12a975a..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsInputMethod
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsInputMethod'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsInputMethodServiceHostTestCases, CtsInputMethodTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsInputMethod',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsInputMethod',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsInputMethodServiceHostTestCases', '--include-filter', 'CtsInputMethodTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsInputMethod',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsIntentSignatureTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsIntentSignatureTestCases
deleted file mode 100644
index d0ac044..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsIntentSignatureTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsIntentSignatureTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsIntentSignatureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsIntentSignatureTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsIntentSignatureTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsIntentSignatureTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsIntentSignatureTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsJankDeviceTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsJankDeviceTestCases
deleted file mode 100644
index 4f0e6d6..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsJankDeviceTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsJankDeviceTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-perbuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsJankDeviceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=3,
-        tag='9.0_r14.arm.CtsJankDeviceTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsJankDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsJankDeviceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsJankDeviceTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsJdwp b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsJdwp
deleted file mode 100644
index a91aff8..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsJdwp
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsJdwp'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsJdwpSecurityHostTestCases, CtsJdwpTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsJdwp',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsJdwp',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJdwpSecurityHostTestCases', '--include-filter', 'CtsJdwpTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsJdwp',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsJniTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsJniTestCases
deleted file mode 100644
index be3d1c1..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsJniTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsJniTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsJniTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsJniTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsJniTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsJniTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsJniTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsJobScheduler b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsJobScheduler
deleted file mode 100644
index e2461ca..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsJobScheduler
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsJobScheduler'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsJobSchedulerSharedUidTestCases, CtsJobSchedulerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsJobScheduler',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsJobScheduler',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJobSchedulerSharedUidTestCases', '--include-filter', 'CtsJobSchedulerTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsJobScheduler',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsJvmti b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsJvmti
deleted file mode 100644
index 6696f2f..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsJvmti
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsJvmti'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsJvmtiAttachingHostTestCases, CtsJvmtiAttachingTestCases, CtsJvmtiRedefineClassesHostTestCases, CtsJvmtiRunTest1900HostTestCases, CtsJvmtiRunTest1901HostTestCases, CtsJvmtiRunTest1902HostTestCases, CtsJvmtiRunTest1903HostTestCases, CtsJvmtiRunTest1904HostTestCases, CtsJvmtiRunTest1906HostTestCases, CtsJvmtiRunTest1907HostTestCases, CtsJvmtiRunTest1908HostTestCases, CtsJvmtiRunTest1909HostTestCases, CtsJvmtiRunTest1910HostTestCases, CtsJvmtiRunTest1911HostTestCases, CtsJvmtiRunTest1912HostTestCases, CtsJvmtiRunTest1913HostTestCases, CtsJvmtiRunTest1914HostTestCases, CtsJvmtiRunTest1915HostTestCases, CtsJvmtiRunTest1916HostTestCases, CtsJvmtiRunTest1917HostTestCases, CtsJvmtiRunTest1920HostTestCases, CtsJvmtiRunTest1921HostTestCases, CtsJvmtiRunTest1922HostTestCases, CtsJvmtiRunTest1923HostTestCases, CtsJvmtiRunTest1924HostTestCases, CtsJvmtiRunTest1925HostTestCases, CtsJvmtiRunTest1926HostTestCases, CtsJvmtiRunTest1927HostTestCases, CtsJvmtiRunTest1928HostTestCases, CtsJvmtiRunTest1930HostTestCases, CtsJvmtiRunTest1931HostTestCases, CtsJvmtiRunTest1932HostTestCases, CtsJvmtiRunTest1933HostTestCases, CtsJvmtiRunTest1934HostTestCases, CtsJvmtiRunTest1936HostTestCases, CtsJvmtiRunTest1937HostTestCases, CtsJvmtiRunTest1939HostTestCases, CtsJvmtiRunTest1941HostTestCases, CtsJvmtiRunTest1942HostTestCases, CtsJvmtiRunTest1943HostTestCases, CtsJvmtiRunTest902HostTestCases, CtsJvmtiRunTest903HostTestCases, CtsJvmtiRunTest904HostTestCases, CtsJvmtiRunTest905HostTestCases, CtsJvmtiRunTest906HostTestCases, CtsJvmtiRunTest907HostTestCases, CtsJvmtiRunTest908HostTestCases, CtsJvmtiRunTest910HostTestCases, CtsJvmtiRunTest911HostTestCases, CtsJvmtiRunTest912HostTestCases, CtsJvmtiRunTest913HostTestCases, CtsJvmtiRunTest914HostTestCases, CtsJvmtiRunTest915HostTestCases, CtsJvmtiRunTest917HostTestCases, CtsJvmtiRunTest918HostTestCases, CtsJvmtiRunTest919HostTestCases, CtsJvmtiRunTest920HostTestCases, CtsJvmtiRunTest922HostTestCases, CtsJvmtiRunTest923HostTestCases, CtsJvmtiRunTest924HostTestCases, CtsJvmtiRunTest926HostTestCases, CtsJvmtiRunTest927HostTestCases, CtsJvmtiRunTest928HostTestCases, CtsJvmtiRunTest930HostTestCases, CtsJvmtiRunTest931HostTestCases, CtsJvmtiRunTest932HostTestCases, CtsJvmtiRunTest940HostTestCases, CtsJvmtiRunTest942HostTestCases, CtsJvmtiRunTest944HostTestCases, CtsJvmtiRunTest945HostTestCases, CtsJvmtiRunTest947HostTestCases, CtsJvmtiRunTest951HostTestCases, CtsJvmtiRunTest982HostTestCases, CtsJvmtiRunTest983HostTestCases, CtsJvmtiRunTest984HostTestCases, CtsJvmtiRunTest985HostTestCases, CtsJvmtiRunTest986HostTestCases, CtsJvmtiRunTest988HostTestCases, CtsJvmtiRunTest989HostTestCases, CtsJvmtiRunTest990HostTestCases, CtsJvmtiRunTest991HostTestCases, CtsJvmtiRunTest992HostTestCases, CtsJvmtiRunTest993HostTestCases, CtsJvmtiRunTest994HostTestCases, CtsJvmtiRunTest995HostTestCases, CtsJvmtiRunTest996HostTestCases, CtsJvmtiRunTest997HostTestCases, CtsJvmtiTaggingHostTestCases, CtsJvmtiTrackingHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsJvmti',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsJvmti',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJvmtiAttachingHostTestCases', '--include-filter', 'CtsJvmtiAttachingTestCases', '--include-filter', 'CtsJvmtiRedefineClassesHostTestCases', '--include-filter', 'CtsJvmtiRunTest1900HostTestCases', '--include-filter', 'CtsJvmtiRunTest1901HostTestCases', '--include-filter', 'CtsJvmtiRunTest1902HostTestCases', '--include-filter', 'CtsJvmtiRunTest1903HostTestCases', '--include-filter', 'CtsJvmtiRunTest1904HostTestCases', '--include-filter', 'CtsJvmtiRunTest1906HostTestCases', '--include-filter', 'CtsJvmtiRunTest1907HostTestCases', '--include-filter', 'CtsJvmtiRunTest1908HostTestCases', '--include-filter', 'CtsJvmtiRunTest1909HostTestCases', '--include-filter', 'CtsJvmtiRunTest1910HostTestCases', '--include-filter', 'CtsJvmtiRunTest1911HostTestCases', '--include-filter', 'CtsJvmtiRunTest1912HostTestCases', '--include-filter', 'CtsJvmtiRunTest1913HostTestCases', '--include-filter', 'CtsJvmtiRunTest1914HostTestCases', '--include-filter', 'CtsJvmtiRunTest1915HostTestCases', '--include-filter', 'CtsJvmtiRunTest1916HostTestCases', '--include-filter', 'CtsJvmtiRunTest1917HostTestCases', '--include-filter', 'CtsJvmtiRunTest1920HostTestCases', '--include-filter', 'CtsJvmtiRunTest1921HostTestCases', '--include-filter', 'CtsJvmtiRunTest1922HostTestCases', '--include-filter', 'CtsJvmtiRunTest1923HostTestCases', '--include-filter', 'CtsJvmtiRunTest1924HostTestCases', '--include-filter', 'CtsJvmtiRunTest1925HostTestCases', '--include-filter', 'CtsJvmtiRunTest1926HostTestCases', '--include-filter', 'CtsJvmtiRunTest1927HostTestCases', '--include-filter', 'CtsJvmtiRunTest1928HostTestCases', '--include-filter', 'CtsJvmtiRunTest1930HostTestCases', '--include-filter', 'CtsJvmtiRunTest1931HostTestCases', '--include-filter', 'CtsJvmtiRunTest1932HostTestCases', '--include-filter', 'CtsJvmtiRunTest1933HostTestCases', '--include-filter', 'CtsJvmtiRunTest1934HostTestCases', '--include-filter', 'CtsJvmtiRunTest1936HostTestCases', '--include-filter', 'CtsJvmtiRunTest1937HostTestCases', '--include-filter', 'CtsJvmtiRunTest1939HostTestCases', '--include-filter', 'CtsJvmtiRunTest1941HostTestCases', '--include-filter', 'CtsJvmtiRunTest1942HostTestCases', '--include-filter', 'CtsJvmtiRunTest1943HostTestCases', '--include-filter', 'CtsJvmtiRunTest902HostTestCases', '--include-filter', 'CtsJvmtiRunTest903HostTestCases', '--include-filter', 'CtsJvmtiRunTest904HostTestCases', '--include-filter', 'CtsJvmtiRunTest905HostTestCases', '--include-filter', 'CtsJvmtiRunTest906HostTestCases', '--include-filter', 'CtsJvmtiRunTest907HostTestCases', '--include-filter', 'CtsJvmtiRunTest908HostTestCases', '--include-filter', 'CtsJvmtiRunTest910HostTestCases', '--include-filter', 'CtsJvmtiRunTest911HostTestCases', '--include-filter', 'CtsJvmtiRunTest912HostTestCases', '--include-filter', 'CtsJvmtiRunTest913HostTestCases', '--include-filter', 'CtsJvmtiRunTest914HostTestCases', '--include-filter', 'CtsJvmtiRunTest915HostTestCases', '--include-filter', 'CtsJvmtiRunTest917HostTestCases', '--include-filter', 'CtsJvmtiRunTest918HostTestCases', '--include-filter', 'CtsJvmtiRunTest919HostTestCases', '--include-filter', 'CtsJvmtiRunTest920HostTestCases', '--include-filter', 'CtsJvmtiRunTest922HostTestCases', '--include-filter', 'CtsJvmtiRunTest923HostTestCases', '--include-filter', 'CtsJvmtiRunTest924HostTestCases', '--include-filter', 'CtsJvmtiRunTest926HostTestCases', '--include-filter', 'CtsJvmtiRunTest927HostTestCases', '--include-filter', 'CtsJvmtiRunTest928HostTestCases', '--include-filter', 'CtsJvmtiRunTest930HostTestCases', '--include-filter', 'CtsJvmtiRunTest931HostTestCases', '--include-filter', 'CtsJvmtiRunTest932HostTestCases', '--include-filter', 'CtsJvmtiRunTest940HostTestCases', '--include-filter', 'CtsJvmtiRunTest942HostTestCases', '--include-filter', 'CtsJvmtiRunTest944HostTestCases', '--include-filter', 'CtsJvmtiRunTest945HostTestCases', '--include-filter', 'CtsJvmtiRunTest947HostTestCases', '--include-filter', 'CtsJvmtiRunTest951HostTestCases', '--include-filter', 'CtsJvmtiRunTest982HostTestCases', '--include-filter', 'CtsJvmtiRunTest983HostTestCases', '--include-filter', 'CtsJvmtiRunTest984HostTestCases', '--include-filter', 'CtsJvmtiRunTest985HostTestCases', '--include-filter', 'CtsJvmtiRunTest986HostTestCases', '--include-filter', 'CtsJvmtiRunTest988HostTestCases', '--include-filter', 'CtsJvmtiRunTest989HostTestCases', '--include-filter', 'CtsJvmtiRunTest990HostTestCases', '--include-filter', 'CtsJvmtiRunTest991HostTestCases', '--include-filter', 'CtsJvmtiRunTest992HostTestCases', '--include-filter', 'CtsJvmtiRunTest993HostTestCases', '--include-filter', 'CtsJvmtiRunTest994HostTestCases', '--include-filter', 'CtsJvmtiRunTest995HostTestCases', '--include-filter', 'CtsJvmtiRunTest996HostTestCases', '--include-filter', 'CtsJvmtiRunTest997HostTestCases', '--include-filter', 'CtsJvmtiTaggingHostTestCases', '--include-filter', 'CtsJvmtiTrackingHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsJvmti',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=26700)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsKernelConfigTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsKernelConfigTestCases
deleted file mode 100644
index 6dad9c8..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsKernelConfigTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsKernelConfigTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsKernelConfigTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsKernelConfigTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsKernelConfigTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsKernelConfigTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsKernelConfigTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsKeystoreTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsKeystoreTestCases
deleted file mode 100644
index 17b987c..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsKeystoreTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsKeystoreTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsKeystoreTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsKeystoreTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsKeystoreTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsKeystoreTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsKeystoreTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsLeanbackJankTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsLeanbackJankTestCases
deleted file mode 100644
index 1e2acf3..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsLeanbackJankTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsLeanbackJankTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsLeanbackJankTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsLeanbackJankTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsLeanbackJankTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsLeanbackJankTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsLeanbackJankTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsLegacyNotificationTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsLegacyNotificationTestCases
deleted file mode 100644
index 38ef8e0..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsLegacyNotificationTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsLegacyNotificationTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsLegacyNotificationTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsLegacyNotificationTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsLegacyNotificationTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsLegacyNotificationTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsLegacyNotificationTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsLibcore b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsLibcore
deleted file mode 100644
index a1b1286..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsLibcore
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsLibcore'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsLibcoreFileIOTestCases, CtsLibcoreJsr166TestCases, CtsLibcoreLegacy22TestCases, CtsLibcoreOjTestCases, CtsLibcoreOkHttpTestCases, CtsLibcoreTestCases, CtsLibcoreWycheproofBCTestCases, CtsLibcoreWycheproofConscryptTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsLibcore',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsLibcore',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLibcoreFileIOTestCases', '--include-filter', 'CtsLibcoreJsr166TestCases', '--include-filter', 'CtsLibcoreLegacy22TestCases', '--include-filter', 'CtsLibcoreOjTestCases', '--include-filter', 'CtsLibcoreOkHttpTestCases', '--include-filter', 'CtsLibcoreTestCases', '--include-filter', 'CtsLibcoreWycheproofBCTestCases', '--include-filter', 'CtsLibcoreWycheproofConscryptTestCases', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsLibcore',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=21600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsLiblogTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsLiblogTestCases
deleted file mode 100644
index f0c4d26..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsLiblogTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsLiblogTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsLiblogTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsLiblogTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsLiblogTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsLiblogTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsLiblogTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsLocation b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsLocation
deleted file mode 100644
index aa21197..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsLocation
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsLocation'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsLocation2TestCases, CtsLocationTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsLocation',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsLocation',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLocation2TestCases', '--include-filter', 'CtsLocationTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsLocation',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsLogdTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsLogdTestCases
deleted file mode 100644
index bfd1b15..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsLogdTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsLogdTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsLogdTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsLogdTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsLogdTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsLogdTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsLogdTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsMediaBitstreamsTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsMediaBitstreamsTestCases
deleted file mode 100644
index bcc6517..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsMediaBitstreamsTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsMediaBitstreamsTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaBitstreamsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        needs_push_media=True,
-        tag='9.0_r14.arm.CtsMediaBitstreamsTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsMediaBitstreamsTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaBitstreamsTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMediaBitstreamsTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsMediaHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsMediaHostTestCases
deleted file mode 100644
index 7c4c312..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsMediaHostTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsMediaHostTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsMediaHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsMediaHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsMediaHostTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsMediaStressTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsMediaStressTestCases
deleted file mode 100644
index cf2a76e..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsMediaStressTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsMediaStressTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaStressTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        needs_push_media=True,
-        tag='9.0_r14.arm.CtsMediaStressTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsMediaStressTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaStressTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMediaStressTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=18000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsMediaTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsMediaTestCases
deleted file mode 100644
index ff6b6db..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsMediaTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsMediaTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        needs_push_media=True,
-        tag='9.0_r14.arm.CtsMediaTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsMediaTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMediaTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
-        timeout=36000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsMediaTestCases.audio b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsMediaTestCases.audio
deleted file mode 100644
index ef33614..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsMediaTestCases.audio
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsMediaTestCases.audio'
-ATTRIBUTES = 'suite:arc-cts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaTestCases.audio of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        needs_push_media=True,
-        tag='9.0_r14.arm.CtsMediaTestCases.audio',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsMediaTestCases.audio',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioAttributesTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioEffectTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioFocusTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioFormatTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioManagerTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioNativeTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPlayRoutingNative', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPlaybackConfigurationTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPreProcessingTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPresentationTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordAppOpTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordRoutingNative', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecord_BufferSizeTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordingConfigurationTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackLatencyTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackSurroundTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrack_ListenerTest', '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolAacTest', '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolMidiTest', '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolOggTest', '--include-filter', 'CtsMediaTestCases android.media.cts.VolumeShaperTest', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsMediaTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsMidiTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsMidiTestCases
deleted file mode 100644
index e8f6e9a..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsMidiTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsMidiTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMidiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsMidiTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsMidiTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMidiTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsMidiTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsMocking b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsMocking
deleted file mode 100644
index 9f66bbe..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsMocking
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsMocking'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMockingDebuggableTestCases, CtsMockingTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsMocking',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsMocking',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMockingDebuggableTestCases', '--include-filter', 'CtsMockingTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsMocking',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsMonkeyTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsMonkeyTestCases
deleted file mode 100644
index 602efce..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsMonkeyTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsMonkeyTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMonkeyTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsMonkeyTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsMonkeyTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMonkeyTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsMonkeyTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsMultiUser b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsMultiUser
deleted file mode 100644
index 4c6a5e8..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsMultiUser
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsMultiUser'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMultiUserHostTestCases, CtsMultiUserTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsMultiUser',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsMultiUser',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMultiUserHostTestCases', '--include-filter', 'CtsMultiUserTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsMultiUser',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsNNAPITestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsNNAPITestCases
deleted file mode 100644
index 954ff34..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsNNAPITestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsNNAPITestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsNNAPITestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsNNAPITestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsNNAPITestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsNNAPITestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsNNAPITestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsNative b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsNative
deleted file mode 100644
index ea02aec..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsNative
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsNative'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsNativeHardwareTestCases, CtsNativeMediaAAudioTestCases, CtsNativeMediaSlTestCases, CtsNativeMediaXaTestCases, CtsNativeNetTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsNative',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsNative',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNativeHardwareTestCases', '--include-filter', 'CtsNativeMediaAAudioTestCases', '--include-filter', 'CtsNativeMediaSlTestCases', '--include-filter', 'CtsNativeMediaXaTestCases', '--include-filter', 'CtsNativeNetTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsNative',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsNdefTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsNdefTestCases
deleted file mode 100644
index bf1e9ab..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsNdefTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsNdefTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsNdefTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsNdefTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsNdefTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsNdefTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsNdefTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsNet b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsNet
deleted file mode 100644
index 3f952a0..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsNet
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsNet'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsNetSecConfigAttributeTestCases, CtsNetSecConfigBasicDebugDisabledTestCases, CtsNetSecConfigBasicDebugEnabledTestCases, CtsNetSecConfigBasicDomainConfigTestCases, CtsNetSecConfigCleartextTrafficTestCases, CtsNetSecConfigDownloadManagerTestCases, CtsNetSecConfigInvalidPinTestCases, CtsNetSecConfigNestedDomainConfigTestCases, CtsNetSecConfigPrePCleartextTrafficTestCases, CtsNetSecConfigResourcesSrcTestCases, CtsNetSecPolicyUsesCleartextTrafficFalseTestCases, CtsNetSecPolicyUsesCleartextTrafficTrueTestCases, CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases, CtsNetTestCases, CtsNetTestCasesLegacyApi22, CtsNetTestCasesLegacyPermission22 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsNet',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsNet',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNetSecConfigAttributeTestCases', '--include-filter', 'CtsNetSecConfigBasicDebugDisabledTestCases', '--include-filter', 'CtsNetSecConfigBasicDebugEnabledTestCases', '--include-filter', 'CtsNetSecConfigBasicDomainConfigTestCases', '--include-filter', 'CtsNetSecConfigCleartextTrafficTestCases', '--include-filter', 'CtsNetSecConfigDownloadManagerTestCases', '--include-filter', 'CtsNetSecConfigInvalidPinTestCases', '--include-filter', 'CtsNetSecConfigNestedDomainConfigTestCases', '--include-filter', 'CtsNetSecConfigPrePCleartextTrafficTestCases', '--include-filter', 'CtsNetSecConfigResourcesSrcTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficFalseTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficTrueTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases', '--include-filter', 'CtsNetTestCases', '--include-filter', 'CtsNetTestCasesLegacyApi22', '--include-filter', 'CtsNetTestCasesLegacyPermission22', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsNet',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=30600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsOmapiTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsOmapiTestCases
deleted file mode 100644
index 983877a..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsOmapiTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsOmapiTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsOmapiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsOmapiTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsOmapiTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsOmapiTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsOmapiTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsOpenG b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsOpenG
deleted file mode 100644
index 895a417..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsOpenG
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsOpenG'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-perbuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsOpenGLTestCases, CtsOpenGlPerf2TestCases, CtsOpenGlPerfTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=3,
-        tag='9.0_r14.arm.CtsOpenG',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsOpenG',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsOpenGLTestCases', '--include-filter', 'CtsOpenGlPerf2TestCases', '--include-filter', 'CtsOpenGlPerfTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsOpenG',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsOs b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsOs
deleted file mode 100644
index 054184a..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsOs
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsOs'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsOsHostTestCases, CtsOsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsOs',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsOs',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsOsHostTestCases', '--include-filter', 'CtsOsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsOs',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsPdfTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsPdfTestCases
deleted file mode 100644
index 54e054f..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsPdfTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsPdfTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsPdfTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsPdfTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsPdfTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsPdfTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsPdfTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsPerfettoTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsPerfettoTestCases
deleted file mode 100644
index 5282823..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsPerfettoTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsPerfettoTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsPerfettoTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsPerfettoTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsPerfettoTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsPerfettoTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsPerfettoTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsPermission b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsPermission
deleted file mode 100644
index 1a79ef8..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsPermission
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsPermission'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-perbuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsPermission2TestCases, CtsPermissionTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=3,
-        tag='9.0_r14.arm.CtsPermission',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsPermission',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPermission2TestCases', '--include-filter', 'CtsPermissionTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsPermission',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsPreference b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsPreference
deleted file mode 100644
index a84571d..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsPreference
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsPreference'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsPreference2TestCases, CtsPreferenceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsPreference',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsPreference',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPreference2TestCases', '--include-filter', 'CtsPreferenceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsPreference',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsPrintTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsPrintTestCases
deleted file mode 100644
index 53cee84..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsPrintTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsPrintTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsPrintTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsPrintTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsPrintTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsPrintTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsPrintTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsProtoTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsProtoTestCases
deleted file mode 100644
index 1b0fe7a..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsProtoTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsProtoTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsProtoTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsProtoTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsProtoTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsProtoTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsProtoTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsProviderTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsProviderTestCases
deleted file mode 100644
index 53d6edc..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsProviderTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsProviderTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsProviderTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsProviderTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsProviderTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsProviderTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsProviderTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsRenderscript b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsRenderscript
deleted file mode 100644
index c773668..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsRenderscript
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsRenderscript'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsRenderscriptLegacyTestCases, CtsRenderscriptTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsRenderscript',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsRenderscript',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsRenderscriptLegacyTestCases', '--include-filter', 'CtsRenderscriptTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsRenderscript',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsRs b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsRs
deleted file mode 100644
index 79cc18f..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsRs
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsRs'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsRsBlasTestCases, CtsRsCppTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsRs',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsRs',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsRsBlasTestCases', '--include-filter', 'CtsRsCppTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsRs',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSample b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSample
deleted file mode 100644
index ff77c04..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSample
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsSample'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSampleDeviceTestCases, CtsSampleHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsSample',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsSample',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSampleDeviceTestCases', '--include-filter', 'CtsSampleHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSample',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSaxTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSaxTestCases
deleted file mode 100644
index a0b57a1..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSaxTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsSaxTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSaxTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsSaxTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsSaxTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSaxTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSaxTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSeccompHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSeccompHostTestCases
deleted file mode 100644
index 7e80c44..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSeccompHostTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsSeccompHostTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSeccompHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsSeccompHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsSeccompHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSeccompHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSeccompHostTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSecureElementAccessControl b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSecureElementAccessControl
deleted file mode 100644
index 88f76a2..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSecureElementAccessControl
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsSecureElementAccessControl'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSecureElementAccessControlTestCases1, CtsSecureElementAccessControlTestCases2, CtsSecureElementAccessControlTestCases3 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsSecureElementAccessControl',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsSecureElementAccessControl',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSecureElementAccessControlTestCases1', '--include-filter', 'CtsSecureElementAccessControlTestCases2', '--include-filter', 'CtsSecureElementAccessControlTestCases3', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSecureElementAccessControl',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSecurity b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSecurity
deleted file mode 100644
index dcdf6f5..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSecurity
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsSecurity'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSecurityBulletinHostTestCases, CtsSecurityHostTestCases, CtsSecurityTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsSecurity',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsSecurity',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSecurityBulletinHostTestCases', '--include-filter', 'CtsSecurityHostTestCases', '--include-filter', 'CtsSecurityTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSecurity',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        precondition_commands=['echo 3 > /proc/sys/kernel/perf_event_paranoid', 'modprobe configs'],
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSelinuxTargetSdk b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSelinuxTargetSdk
deleted file mode 100644
index cf678c5..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSelinuxTargetSdk
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsSelinuxTargetSdk'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSelinuxTargetSdk25TestCases, CtsSelinuxTargetSdk27TestCases, CtsSelinuxTargetSdkCurrentTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsSelinuxTargetSdk',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsSelinuxTargetSdk',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSelinuxTargetSdk25TestCases', '--include-filter', 'CtsSelinuxTargetSdk27TestCases', '--include-filter', 'CtsSelinuxTargetSdkCurrentTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSelinuxTargetSdk',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSensorTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSensorTestCases
deleted file mode 100644
index 320b110..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSensorTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsSensorTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSensorTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=30,
-        tag='9.0_r14.arm.CtsSensorTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsSensorTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSensorTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSensorTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsShortcut b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsShortcut
deleted file mode 100644
index 61141c7..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsShortcut
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsShortcut'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsShortcutHostTestCases, CtsShortcutManagerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsShortcut',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsShortcut',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsShortcutHostTestCases', '--include-filter', 'CtsShortcutManagerTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsShortcut',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSimRestrictedApisTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSimRestrictedApisTestCases
deleted file mode 100644
index d7af688..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSimRestrictedApisTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsSimRestrictedApisTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSimRestrictedApisTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsSimRestrictedApisTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsSimRestrictedApisTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSimRestrictedApisTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSimRestrictedApisTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSimpleCpuTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSimpleCpuTestCases
deleted file mode 100644
index c79ac82..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSimpleCpuTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsSimpleCpuTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSimpleCpuTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsSimpleCpuTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsSimpleCpuTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSimpleCpuTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSimpleCpuTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSimpleperfTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSimpleperfTestCases
deleted file mode 100644
index 1f8c9e9..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSimpleperfTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsSimpleperfTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-perbuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSimpleperfTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=3,
-        tag='9.0_r14.arm.CtsSimpleperfTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsSimpleperfTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSimpleperfTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSimpleperfTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSkQPTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSkQPTestCases
deleted file mode 100644
index a2a57d0..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSkQPTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsSkQPTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSkQPTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsSkQPTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsSkQPTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSkQPTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSkQPTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSliceTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSliceTestCases
deleted file mode 100644
index c32a773..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSliceTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsSliceTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSliceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsSliceTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsSliceTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSliceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSliceTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSpeechTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSpeechTestCases
deleted file mode 100644
index f022dc5..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSpeechTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsSpeechTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-perbuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSpeechTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=3,
-        tag='9.0_r14.arm.CtsSpeechTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsSpeechTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSpeechTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSpeechTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsStatsdHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsStatsdHostTestCases
deleted file mode 100644
index bd3a241..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsStatsdHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsStatsdHostTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsStatsdHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsStatsdHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsStatsdHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsStatsdHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsStatsdHostTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        prerequisites=['bluetooth'],
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSustainedPerformanceHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSustainedPerformanceHostTestCases
deleted file mode 100644
index c486351..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSustainedPerformanceHostTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsSustainedPerformanceHostTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSustainedPerformanceHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsSustainedPerformanceHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsSustainedPerformanceHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSustainedPerformanceHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSustainedPerformanceHostTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSync b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSync
deleted file mode 100644
index d6f9a2d..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSync
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsSync'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSyncAccountAccessOtherCertTestCases, CtsSyncContentHostTestCases, CtsSyncManagerTestsCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsSync',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsSync',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSyncAccountAccessOtherCertTestCases', '--include-filter', 'CtsSyncContentHostTestCases', '--include-filter', 'CtsSyncManagerTestsCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSync',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSystem b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSystem
deleted file mode 100644
index c33e43c..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsSystem
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsSystem'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSystemApiAnnotationTestCases, CtsSystemApiSignatureTestCases, CtsSystemIntentTestCases, CtsSystemUiHostTestCases, CtsSystemUiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsSystem',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsSystem',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSystemApiAnnotationTestCases', '--include-filter', 'CtsSystemApiSignatureTestCases', '--include-filter', 'CtsSystemIntentTestCases', '--include-filter', 'CtsSystemUiHostTestCases', '--include-filter', 'CtsSystemUiTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSystem',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsTelecom b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsTelecom
deleted file mode 100644
index 0acd8e7..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsTelecom
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsTelecom'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-perbuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTelecomTestCases, CtsTelecomTestCases2, CtsTelecomTestCases3 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=3,
-        tag='9.0_r14.arm.CtsTelecom',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsTelecom',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTelecomTestCases', '--include-filter', 'CtsTelecomTestCases2', '--include-filter', 'CtsTelecomTestCases3', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsTelecom',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsTelephony b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsTelephony
deleted file mode 100644
index 2523583..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsTelephony
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsTelephony'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-perbuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTelephony2TestCases, CtsTelephonyTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=3,
-        tag='9.0_r14.arm.CtsTelephony',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsTelephony',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTelephony2TestCases', '--include-filter', 'CtsTelephonyTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsTelephony',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsTextTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsTextTestCases
deleted file mode 100644
index bf9f7fd5..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsTextTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsTextTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTextTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsTextTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsTextTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsTextTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsTextTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsTheme b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsTheme
deleted file mode 100644
index d04461e..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsTheme
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsTheme'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-perbuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsThemeDeviceTestCases, CtsThemeHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=3,
-        tag='9.0_r14.arm.CtsTheme',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsTheme',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsThemeDeviceTestCases', '--include-filter', 'CtsThemeHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsTheme',
-        target_plan=None,
-        bundle='arm',
-        extra_artifacts_host=['/tmp/diff_*.png'],
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        prerequisites=['region_us'],
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsToast b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsToast
deleted file mode 100644
index 19a3f3b..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsToast
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsToast'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsToastLegacyTestCases, CtsToastTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsToast',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsToast',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsToastLegacyTestCases', '--include-filter', 'CtsToastTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsToast',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsTransitionTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsTransitionTestCases
deleted file mode 100644
index b63df03..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsTransitionTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsTransitionTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-perbuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTransitionTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=3,
-        tag='9.0_r14.arm.CtsTransitionTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsTransitionTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsTransitionTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsTransitionTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsTrustedVoiceHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsTrustedVoiceHostTestCases
deleted file mode 100644
index 4ea7d22..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsTrustedVoiceHostTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsTrustedVoiceHostTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTrustedVoiceHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsTrustedVoiceHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsTrustedVoiceHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsTrustedVoiceHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsTrustedVoiceHostTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsTv b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsTv
deleted file mode 100644
index 285be9e..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsTv
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsTv'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-perbuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTvProviderTestCases, CtsTvTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=3,
-        tag='9.0_r14.arm.CtsTv',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsTv',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTvProviderTestCases', '--include-filter', 'CtsTvTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsTv',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsUi b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsUi
deleted file mode 100644
index fc7ad0d..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsUi
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsUi'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsUiAutomationTestCases, CtsUiDeviceTestCases, CtsUiRenderingTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsUi',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsUi',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUiAutomationTestCases', '--include-filter', 'CtsUiDeviceTestCases', '--include-filter', 'CtsUiRenderingTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsUi',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsUidIsolationTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsUidIsolationTestCases
deleted file mode 100644
index 2e8de2e..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsUidIsolationTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsUidIsolationTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsUidIsolationTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsUidIsolationTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsUidIsolationTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsUidIsolationTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsUidIsolationTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsUsageStatsTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsUsageStatsTestCases
deleted file mode 100644
index 7ef5bcf..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsUsageStatsTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsUsageStatsTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsUsageStatsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsUsageStatsTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsUsageStatsTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsUsageStatsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsUsageStatsTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsUsbTests b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsUsbTests
deleted file mode 100644
index 4f55a1e..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsUsbTests
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsUsbTests'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-perbuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsUsbTests of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=3,
-        tag='9.0_r14.arm.CtsUsbTests',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsUsbTests',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsUsbTests', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsUsbTests',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsUtilTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsUtilTestCases
deleted file mode 100644
index f0a9db6..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsUtilTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsUtilTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsUtilTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsUtilTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsUtilTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsUtilTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsUtilTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsVideoTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsVideoTestCases
deleted file mode 100644
index f99df86..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsVideoTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsVideoTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsVideoTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsVideoTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsVideoTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsVideoTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsVideoTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsViewTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsViewTestCases
deleted file mode 100644
index c3c5fbc..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsViewTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsViewTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsViewTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsViewTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsViewTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsViewTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsViewTestCases',
-        target_plan=None,
-        bundle='arm',
-        extra_artifacts=['/storage/emulated/0/SurfaceViewSyncTest/'],
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        precondition_commands=['sleep 60'],
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsVmTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsVmTestCases
deleted file mode 100644
index 960ea5e..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsVmTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsVmTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsVmTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsVmTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsVmTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsVmTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsVmTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsVoice b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsVoice
deleted file mode 100644
index 008c128..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsVoice
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsVoice'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-perbuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsVoiceInteractionTestCases, CtsVoiceSettingsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=3,
-        tag='9.0_r14.arm.CtsVoice',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsVoice',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsVoiceInteractionTestCases', '--include-filter', 'CtsVoiceSettingsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsVoice',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsVrTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsVrTestCases
deleted file mode 100644
index 250aecf..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsVrTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsVrTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsVrTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsVrTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsVrTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsVrTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsVrTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsWebkitTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsWebkitTestCases
deleted file mode 100644
index 1b1a3ea..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsWebkitTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsWebkitTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWebkitTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsWebkitTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsWebkitTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsWebkitTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsWebkitTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        prerequisites=['region_us'],
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsWidgetTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsWidgetTestCases
deleted file mode 100644
index b617fd7..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsWidgetTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsWidgetTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWidgetTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsWidgetTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsWidgetTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsWidgetTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsWidgetTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsWindowManagerDeviceTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsWindowManagerDeviceTestCases
deleted file mode 100644
index 590e3ec..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsWindowManagerDeviceTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsWindowManagerDeviceTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManagerDeviceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsWindowManagerDeviceTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsWindowManagerDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsWindowManagerDeviceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsWrap b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsWrap
deleted file mode 100644
index 761c1f5..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.CtsWrap
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.CtsWrap'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWrapNoWrapTestCases, CtsWrapWrapDebugMallocDebugTestCases, CtsWrapWrapDebugTestCases, CtsWrapWrapNoDebugTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.CtsWrap',
-        test_name='cheets_CTS_P.9.0_r14.arm.CtsWrap',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWrapNoWrapTestCases', '--include-filter', 'CtsWrapWrapDebugMallocDebugTestCases', '--include-filter', 'CtsWrapWrapDebugTestCases', '--include-filter', 'CtsWrapWrapNoDebugTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsWrap',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsAbiOverrideHostTestCases_-_CtsAccelerationTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsAbiOverrideHostTestCases_-_CtsAccelerationTestCases
deleted file mode 100644
index c649804..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsAbiOverrideHostTestCases_-_CtsAccelerationTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.all.CtsAbiOverrideHostTestCases_-_CtsAccelerationTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAbiOverrideHostTestCases, CtsAccelerationTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.arm.all.CtsAbiOverrideHostTestCases_-_CtsAccelerationTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.all.CtsAbiOverrideHostTestCases_-_CtsAccelerationTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAbiOverrideHostTestCases', '--include-filter', 'CtsAccelerationTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsAbiOverrideHostTestCases_-_CtsAccelerationTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases
deleted file mode 100644
index 0fd9a37..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAccessibilityServiceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.arm.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAccessibilityServiceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsAccessibilityTestCases_-_CtsAccountManagerTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsAccessibilityTestCases_-_CtsAccountManagerTestCases
deleted file mode 100644
index 1dcc391..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsAccessibilityTestCases_-_CtsAccountManagerTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.all.CtsAccessibilityTestCases_-_CtsAccountManagerTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAccessibilityTestCases, CtsAccountManagerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.arm.all.CtsAccessibilityTestCases_-_CtsAccountManagerTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.all.CtsAccessibilityTestCases_-_CtsAccountManagerTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAccessibilityTestCases', '--include-filter', 'CtsAccountManagerTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsAccessibilityTestCases_-_CtsAccountManagerTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsActivityManagerDeviceSdk25TestCases_-_CtsActivityManagerDeviceTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsActivityManagerDeviceSdk25TestCases_-_CtsActivityManagerDeviceTestCases
deleted file mode 100644
index 3411aa5..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsActivityManagerDeviceSdk25TestCases_-_CtsActivityManagerDeviceTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.all.CtsActivityManagerDeviceSdk25TestCases_-_CtsActivityManagerDeviceTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsActivityManagerDeviceSdk25TestCases, CtsActivityManagerDeviceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.arm.all.CtsActivityManagerDeviceSdk25TestCases_-_CtsActivityManagerDeviceTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.all.CtsActivityManagerDeviceSdk25TestCases_-_CtsActivityManagerDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsActivityManagerDeviceSdk25TestCases', '--include-filter', 'CtsActivityManagerDeviceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsActivityManagerDeviceSdk25TestCases_-_CtsActivityManagerDeviceTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsAdminPackageInstallerTestCases_-_CtsDebugTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsAdminPackageInstallerTestCases_-_CtsDebugTestCases
deleted file mode 100644
index 684c359..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsAdminPackageInstallerTestCases_-_CtsDebugTestCases
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.all.CtsAdminPackageInstallerTestCases_-_CtsDebugTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAdminPackageInstallerTestCases, CtsAdminTestCases, CtsAlarmClockTestCases, CtsAlarmManagerTestCases, CtsAndroidAppTestCases, CtsAndroidTestBase27ApiSignatureTestCases, CtsAndroidTestMockCurrentApiSignatureTestCases, CtsAndroidTestRunnerCurrentApiSignatureTestCases, CtsAnimationTestCases, CtsApacheHttpLegacy27ApiSignatureTestCases, CtsApacheHttpLegacyCurrentApiSignatureTestCases, CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases, CtsAppComponentFactoryTestCases, CtsAppSecurityHostTestCases, CtsAppTestCases, CtsAppUsageHostTestCases, CtsAppWidgetTestCases, CtsAslrMallocTestCases, CtsAssistTestCases, CtsAtraceHostTestCases, CtsAutoFillServiceTestCases, CtsBackgroundRestrictionsTestCases, CtsBackupHostTestCases, CtsBackupTestCases, CtsBatterySavingTestCases, CtsBionicTestCases, CtsBluetoothTestCases, CtsBootStatsTestCases, CtsCalendarcommon2TestCases, CtsCameraApi25TestCases, CtsCameraTestCases, CtsCarTestCases, CtsCarrierApiTestCases, CtsColorModeTestCases, CtsCompilationTestCases, CtsContactsProviderWipe, CtsContentTestCases, CtsCppToolsTestCases, CtsCurrentApiSignatureTestCases, CtsDatabaseTestCases, CtsDebugTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        enable_default_apps=True,
-        tag='9.0_r14.arm.all.CtsAdminPackageInstallerTestCases_-_CtsDebugTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.all.CtsAdminPackageInstallerTestCases_-_CtsDebugTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAdminPackageInstallerTestCases', '--include-filter', 'CtsAdminTestCases', '--include-filter', 'CtsAlarmClockTestCases', '--include-filter', 'CtsAlarmManagerTestCases', '--include-filter', 'CtsAndroidAppTestCases', '--include-filter', 'CtsAndroidTestBase27ApiSignatureTestCases', '--include-filter', 'CtsAndroidTestMockCurrentApiSignatureTestCases', '--include-filter', 'CtsAndroidTestRunnerCurrentApiSignatureTestCases', '--include-filter', 'CtsAnimationTestCases', '--include-filter', 'CtsApacheHttpLegacy27ApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacyCurrentApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases', '--include-filter', 'CtsAppComponentFactoryTestCases', '--include-filter', 'CtsAppSecurityHostTestCases', '--include-filter', 'CtsAppTestCases', '--include-filter', 'CtsAppUsageHostTestCases', '--include-filter', 'CtsAppWidgetTestCases', '--include-filter', 'CtsAslrMallocTestCases', '--include-filter', 'CtsAssistTestCases', '--include-filter', 'CtsAtraceHostTestCases', '--include-filter', 'CtsAutoFillServiceTestCases', '--include-filter', 'CtsBackgroundRestrictionsTestCases', '--include-filter', 'CtsBackupHostTestCases', '--include-filter', 'CtsBackupTestCases', '--include-filter', 'CtsBatterySavingTestCases', '--include-filter', 'CtsBionicTestCases', '--include-filter', 'CtsBluetoothTestCases', '--include-filter', 'CtsBootStatsTestCases', '--include-filter', 'CtsCalendarcommon2TestCases', '--include-filter', 'CtsCameraApi25TestCases', '--include-filter', 'CtsCameraTestCases', '--include-filter', 'CtsCarTestCases', '--include-filter', 'CtsCarrierApiTestCases', '--include-filter', 'CtsColorModeTestCases', '--include-filter', 'CtsCompilationTestCases', '--include-filter', 'CtsContactsProviderWipe', '--include-filter', 'CtsContentTestCases', '--include-filter', 'CtsCppToolsTestCases', '--include-filter', 'CtsCurrentApiSignatureTestCases', '--include-filter', 'CtsDatabaseTestCases', '--include-filter', 'CtsDebugTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsAdminPackageInstallerTestCases_-_CtsDebugTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
-        prerequisites=['region_us', 'bluetooth'],
-        timeout=113400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases
deleted file mode 100644
index 2fe5d87..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 1024000
-DOC = 'Run module CtsDeqpTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=15,
-        tag='9.0_r14.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDeqpTestCases', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsDeqpTestCases_-_CtsDeqpTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=72000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsDeviceIdleHostTestCases_-_CtsExternalSourcesTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsDeviceIdleHostTestCases_-_CtsExternalSourcesTestCases
deleted file mode 100644
index c7b9a3a..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsDeviceIdleHostTestCases_-_CtsExternalSourcesTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.all.CtsDeviceIdleHostTestCases_-_CtsExternalSourcesTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDeviceIdleHostTestCases, CtsDevicePolicyManagerTestCases, CtsDexMetadataHostTestCases, CtsDisplayTestCases, CtsDpiTestCases, CtsDpiTestCases2, CtsDreamsTestCases, CtsDrmTestCases, CtsDumpsysHostTestCases, CtsDynamicLinkerTestCases, CtsEdiHostTestCases, CtsEffectTestCases, CtsExternalServiceTestCases, CtsExternalSourcesTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.arm.all.CtsDeviceIdleHostTestCases_-_CtsExternalSourcesTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.all.CtsDeviceIdleHostTestCases_-_CtsExternalSourcesTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeviceIdleHostTestCases', '--include-filter', 'CtsDevicePolicyManagerTestCases', '--include-filter', 'CtsDexMetadataHostTestCases', '--include-filter', 'CtsDisplayTestCases', '--include-filter', 'CtsDpiTestCases', '--include-filter', 'CtsDpiTestCases2', '--include-filter', 'CtsDreamsTestCases', '--include-filter', 'CtsDrmTestCases', '--include-filter', 'CtsDumpsysHostTestCases', '--include-filter', 'CtsDynamicLinkerTestCases', '--include-filter', 'CtsEdiHostTestCases', '--include-filter', 'CtsEffectTestCases', '--include-filter', 'CtsExternalServiceTestCases', '--include-filter', 'CtsExternalSourcesTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsDeviceIdleHostTestCases_-_CtsExternalSourcesTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=27000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases
deleted file mode 100644
index 3c3801c..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsFileSystemTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.arm.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsFileSystemTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsFileSystemTestCases_-_CtsFileSystemTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases
deleted file mode 100644
index 2763fb6..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsFragmentTestCases, CtsFragmentTestCasesSdk26, CtsGestureTestCases, CtsGpuToolsHostTestCases, CtsGraphicsTestCases, CtsHardwareTestCases, CtsHarmfulAppWarningHostTestCases, CtsHiddenApiBlacklistApi27TestCases, CtsHiddenApiBlacklistCurrentApiTestCases, CtsHiddenApiBlacklistDebugClassTestCases, CtsHiddenApiKillswitchDebugClassTestCases, CtsHiddenApiKillswitchWhitelistTestCases, CtsHiddenApiKillswitchWildcardTestCases, CtsHostTzDataTests, CtsHostsideNetworkTests, CtsHostsideNumberBlockingTestCases, CtsHostsideTvTests, CtsHostsideWebViewTests, CtsIcuTestCases, CtsIncidentHostTestCases, CtsInlineMockingTestCases, CtsInputMethodServiceHostTestCases, CtsInputMethodTestCases, CtsIntentSignatureTestCases, CtsJankDeviceTestCases, CtsJdwpSecurityHostTestCases, CtsJdwpTestCases, CtsJniTestCases, CtsJobSchedulerSharedUidTestCases, CtsJobSchedulerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.arm.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsFragmentTestCases', '--include-filter', 'CtsFragmentTestCasesSdk26', '--include-filter', 'CtsGestureTestCases', '--include-filter', 'CtsGpuToolsHostTestCases', '--include-filter', 'CtsGraphicsTestCases', '--include-filter', 'CtsHardwareTestCases', '--include-filter', 'CtsHarmfulAppWarningHostTestCases', '--include-filter', 'CtsHiddenApiBlacklistApi27TestCases', '--include-filter', 'CtsHiddenApiBlacklistCurrentApiTestCases', '--include-filter', 'CtsHiddenApiBlacklistDebugClassTestCases', '--include-filter', 'CtsHiddenApiKillswitchDebugClassTestCases', '--include-filter', 'CtsHiddenApiKillswitchWhitelistTestCases', '--include-filter', 'CtsHiddenApiKillswitchWildcardTestCases', '--include-filter', 'CtsHostTzDataTests', '--include-filter', 'CtsHostsideNetworkTests', '--include-filter', 'CtsHostsideNumberBlockingTestCases', '--include-filter', 'CtsHostsideTvTests', '--include-filter', 'CtsHostsideWebViewTests', '--include-filter', 'CtsIcuTestCases', '--include-filter', 'CtsIncidentHostTestCases', '--include-filter', 'CtsInlineMockingTestCases', '--include-filter', 'CtsInputMethodServiceHostTestCases', '--include-filter', 'CtsInputMethodTestCases', '--include-filter', 'CtsIntentSignatureTestCases', '--include-filter', 'CtsJankDeviceTestCases', '--include-filter', 'CtsJdwpSecurityHostTestCases', '--include-filter', 'CtsJdwpTestCases', '--include-filter', 'CtsJniTestCases', '--include-filter', 'CtsJobSchedulerSharedUidTestCases', '--include-filter', 'CtsJobSchedulerTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
-        timeout=61200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsJvmtiAttachingHostTestCases_-_CtsLogdTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsJvmtiAttachingHostTestCases_-_CtsLogdTestCases
deleted file mode 100644
index 5dcd15f..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsJvmtiAttachingHostTestCases_-_CtsLogdTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.all.CtsJvmtiAttachingHostTestCases_-_CtsLogdTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsJvmtiAttachingHostTestCases, CtsJvmtiAttachingTestCases, CtsJvmtiRedefineClassesHostTestCases, CtsJvmtiRunTest1900HostTestCases, CtsJvmtiRunTest1901HostTestCases, CtsJvmtiRunTest1902HostTestCases, CtsJvmtiRunTest1903HostTestCases, CtsJvmtiRunTest1904HostTestCases, CtsJvmtiRunTest1906HostTestCases, CtsJvmtiRunTest1907HostTestCases, CtsJvmtiRunTest1908HostTestCases, CtsJvmtiRunTest1909HostTestCases, CtsJvmtiRunTest1910HostTestCases, CtsJvmtiRunTest1911HostTestCases, CtsJvmtiRunTest1912HostTestCases, CtsJvmtiRunTest1913HostTestCases, CtsJvmtiRunTest1914HostTestCases, CtsJvmtiRunTest1915HostTestCases, CtsJvmtiRunTest1916HostTestCases, CtsJvmtiRunTest1917HostTestCases, CtsJvmtiRunTest1920HostTestCases, CtsJvmtiRunTest1921HostTestCases, CtsJvmtiRunTest1922HostTestCases, CtsJvmtiRunTest1923HostTestCases, CtsJvmtiRunTest1924HostTestCases, CtsJvmtiRunTest1925HostTestCases, CtsJvmtiRunTest1926HostTestCases, CtsJvmtiRunTest1927HostTestCases, CtsJvmtiRunTest1928HostTestCases, CtsJvmtiRunTest1930HostTestCases, CtsJvmtiRunTest1931HostTestCases, CtsJvmtiRunTest1932HostTestCases, CtsJvmtiRunTest1933HostTestCases, CtsJvmtiRunTest1934HostTestCases, CtsJvmtiRunTest1936HostTestCases, CtsJvmtiRunTest1937HostTestCases, CtsJvmtiRunTest1939HostTestCases, CtsJvmtiRunTest1941HostTestCases, CtsJvmtiRunTest1942HostTestCases, CtsJvmtiRunTest1943HostTestCases, CtsJvmtiRunTest902HostTestCases, CtsJvmtiRunTest903HostTestCases, CtsJvmtiRunTest904HostTestCases, CtsJvmtiRunTest905HostTestCases, CtsJvmtiRunTest906HostTestCases, CtsJvmtiRunTest907HostTestCases, CtsJvmtiRunTest908HostTestCases, CtsJvmtiRunTest910HostTestCases, CtsJvmtiRunTest911HostTestCases, CtsJvmtiRunTest912HostTestCases, CtsJvmtiRunTest913HostTestCases, CtsJvmtiRunTest914HostTestCases, CtsJvmtiRunTest915HostTestCases, CtsJvmtiRunTest917HostTestCases, CtsJvmtiRunTest918HostTestCases, CtsJvmtiRunTest919HostTestCases, CtsJvmtiRunTest920HostTestCases, CtsJvmtiRunTest922HostTestCases, CtsJvmtiRunTest923HostTestCases, CtsJvmtiRunTest924HostTestCases, CtsJvmtiRunTest926HostTestCases, CtsJvmtiRunTest927HostTestCases, CtsJvmtiRunTest928HostTestCases, CtsJvmtiRunTest930HostTestCases, CtsJvmtiRunTest931HostTestCases, CtsJvmtiRunTest932HostTestCases, CtsJvmtiRunTest940HostTestCases, CtsJvmtiRunTest942HostTestCases, CtsJvmtiRunTest944HostTestCases, CtsJvmtiRunTest945HostTestCases, CtsJvmtiRunTest947HostTestCases, CtsJvmtiRunTest951HostTestCases, CtsJvmtiRunTest982HostTestCases, CtsJvmtiRunTest983HostTestCases, CtsJvmtiRunTest984HostTestCases, CtsJvmtiRunTest985HostTestCases, CtsJvmtiRunTest986HostTestCases, CtsJvmtiRunTest988HostTestCases, CtsJvmtiRunTest989HostTestCases, CtsJvmtiRunTest990HostTestCases, CtsJvmtiRunTest991HostTestCases, CtsJvmtiRunTest992HostTestCases, CtsJvmtiRunTest993HostTestCases, CtsJvmtiRunTest994HostTestCases, CtsJvmtiRunTest995HostTestCases, CtsJvmtiRunTest996HostTestCases, CtsJvmtiRunTest997HostTestCases, CtsJvmtiTaggingHostTestCases, CtsJvmtiTrackingHostTestCases, CtsKernelConfigTestCases, CtsKeystoreTestCases, CtsLeanbackJankTestCases, CtsLegacyNotificationTestCases, CtsLibcoreFileIOTestCases, CtsLibcoreJsr166TestCases, CtsLibcoreLegacy22TestCases, CtsLibcoreOjTestCases, CtsLibcoreOkHttpTestCases, CtsLibcoreTestCases, CtsLibcoreWycheproofBCTestCases, CtsLibcoreWycheproofConscryptTestCases, CtsLiblogTestCases, CtsLocation2TestCases, CtsLocationTestCases, CtsLogdTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.arm.all.CtsJvmtiAttachingHostTestCases_-_CtsLogdTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.all.CtsJvmtiAttachingHostTestCases_-_CtsLogdTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJvmtiAttachingHostTestCases', '--include-filter', 'CtsJvmtiAttachingTestCases', '--include-filter', 'CtsJvmtiRedefineClassesHostTestCases', '--include-filter', 'CtsJvmtiRunTest1900HostTestCases', '--include-filter', 'CtsJvmtiRunTest1901HostTestCases', '--include-filter', 'CtsJvmtiRunTest1902HostTestCases', '--include-filter', 'CtsJvmtiRunTest1903HostTestCases', '--include-filter', 'CtsJvmtiRunTest1904HostTestCases', '--include-filter', 'CtsJvmtiRunTest1906HostTestCases', '--include-filter', 'CtsJvmtiRunTest1907HostTestCases', '--include-filter', 'CtsJvmtiRunTest1908HostTestCases', '--include-filter', 'CtsJvmtiRunTest1909HostTestCases', '--include-filter', 'CtsJvmtiRunTest1910HostTestCases', '--include-filter', 'CtsJvmtiRunTest1911HostTestCases', '--include-filter', 'CtsJvmtiRunTest1912HostTestCases', '--include-filter', 'CtsJvmtiRunTest1913HostTestCases', '--include-filter', 'CtsJvmtiRunTest1914HostTestCases', '--include-filter', 'CtsJvmtiRunTest1915HostTestCases', '--include-filter', 'CtsJvmtiRunTest1916HostTestCases', '--include-filter', 'CtsJvmtiRunTest1917HostTestCases', '--include-filter', 'CtsJvmtiRunTest1920HostTestCases', '--include-filter', 'CtsJvmtiRunTest1921HostTestCases', '--include-filter', 'CtsJvmtiRunTest1922HostTestCases', '--include-filter', 'CtsJvmtiRunTest1923HostTestCases', '--include-filter', 'CtsJvmtiRunTest1924HostTestCases', '--include-filter', 'CtsJvmtiRunTest1925HostTestCases', '--include-filter', 'CtsJvmtiRunTest1926HostTestCases', '--include-filter', 'CtsJvmtiRunTest1927HostTestCases', '--include-filter', 'CtsJvmtiRunTest1928HostTestCases', '--include-filter', 'CtsJvmtiRunTest1930HostTestCases', '--include-filter', 'CtsJvmtiRunTest1931HostTestCases', '--include-filter', 'CtsJvmtiRunTest1932HostTestCases', '--include-filter', 'CtsJvmtiRunTest1933HostTestCases', '--include-filter', 'CtsJvmtiRunTest1934HostTestCases', '--include-filter', 'CtsJvmtiRunTest1936HostTestCases', '--include-filter', 'CtsJvmtiRunTest1937HostTestCases', '--include-filter', 'CtsJvmtiRunTest1939HostTestCases', '--include-filter', 'CtsJvmtiRunTest1941HostTestCases', '--include-filter', 'CtsJvmtiRunTest1942HostTestCases', '--include-filter', 'CtsJvmtiRunTest1943HostTestCases', '--include-filter', 'CtsJvmtiRunTest902HostTestCases', '--include-filter', 'CtsJvmtiRunTest903HostTestCases', '--include-filter', 'CtsJvmtiRunTest904HostTestCases', '--include-filter', 'CtsJvmtiRunTest905HostTestCases', '--include-filter', 'CtsJvmtiRunTest906HostTestCases', '--include-filter', 'CtsJvmtiRunTest907HostTestCases', '--include-filter', 'CtsJvmtiRunTest908HostTestCases', '--include-filter', 'CtsJvmtiRunTest910HostTestCases', '--include-filter', 'CtsJvmtiRunTest911HostTestCases', '--include-filter', 'CtsJvmtiRunTest912HostTestCases', '--include-filter', 'CtsJvmtiRunTest913HostTestCases', '--include-filter', 'CtsJvmtiRunTest914HostTestCases', '--include-filter', 'CtsJvmtiRunTest915HostTestCases', '--include-filter', 'CtsJvmtiRunTest917HostTestCases', '--include-filter', 'CtsJvmtiRunTest918HostTestCases', '--include-filter', 'CtsJvmtiRunTest919HostTestCases', '--include-filter', 'CtsJvmtiRunTest920HostTestCases', '--include-filter', 'CtsJvmtiRunTest922HostTestCases', '--include-filter', 'CtsJvmtiRunTest923HostTestCases', '--include-filter', 'CtsJvmtiRunTest924HostTestCases', '--include-filter', 'CtsJvmtiRunTest926HostTestCases', '--include-filter', 'CtsJvmtiRunTest927HostTestCases', '--include-filter', 'CtsJvmtiRunTest928HostTestCases', '--include-filter', 'CtsJvmtiRunTest930HostTestCases', '--include-filter', 'CtsJvmtiRunTest931HostTestCases', '--include-filter', 'CtsJvmtiRunTest932HostTestCases', '--include-filter', 'CtsJvmtiRunTest940HostTestCases', '--include-filter', 'CtsJvmtiRunTest942HostTestCases', '--include-filter', 'CtsJvmtiRunTest944HostTestCases', '--include-filter', 'CtsJvmtiRunTest945HostTestCases', '--include-filter', 'CtsJvmtiRunTest947HostTestCases', '--include-filter', 'CtsJvmtiRunTest951HostTestCases', '--include-filter', 'CtsJvmtiRunTest982HostTestCases', '--include-filter', 'CtsJvmtiRunTest983HostTestCases', '--include-filter', 'CtsJvmtiRunTest984HostTestCases', '--include-filter', 'CtsJvmtiRunTest985HostTestCases', '--include-filter', 'CtsJvmtiRunTest986HostTestCases', '--include-filter', 'CtsJvmtiRunTest988HostTestCases', '--include-filter', 'CtsJvmtiRunTest989HostTestCases', '--include-filter', 'CtsJvmtiRunTest990HostTestCases', '--include-filter', 'CtsJvmtiRunTest991HostTestCases', '--include-filter', 'CtsJvmtiRunTest992HostTestCases', '--include-filter', 'CtsJvmtiRunTest993HostTestCases', '--include-filter', 'CtsJvmtiRunTest994HostTestCases', '--include-filter', 'CtsJvmtiRunTest995HostTestCases', '--include-filter', 'CtsJvmtiRunTest996HostTestCases', '--include-filter', 'CtsJvmtiRunTest997HostTestCases', '--include-filter', 'CtsJvmtiTaggingHostTestCases', '--include-filter', 'CtsJvmtiTrackingHostTestCases', '--include-filter', 'CtsKernelConfigTestCases', '--include-filter', 'CtsKeystoreTestCases', '--include-filter', 'CtsLeanbackJankTestCases', '--include-filter', 'CtsLegacyNotificationTestCases', '--include-filter', 'CtsLibcoreFileIOTestCases', '--include-filter', 'CtsLibcoreJsr166TestCases', '--include-filter', 'CtsLibcoreLegacy22TestCases', '--include-filter', 'CtsLibcoreOjTestCases', '--include-filter', 'CtsLibcoreOkHttpTestCases', '--include-filter', 'CtsLibcoreTestCases', '--include-filter', 'CtsLibcoreWycheproofBCTestCases', '--include-filter', 'CtsLibcoreWycheproofConscryptTestCases', '--include-filter', 'CtsLiblogTestCases', '--include-filter', 'CtsLocation2TestCases', '--include-filter', 'CtsLocationTestCases', '--include-filter', 'CtsLogdTestCases', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsJvmtiAttachingHostTestCases_-_CtsLogdTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=62700)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsMediaBitstreamsTestCases_-_CtsMediaBitstreamsTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsMediaBitstreamsTestCases_-_CtsMediaBitstreamsTestCases
deleted file mode 100644
index 07da368..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsMediaBitstreamsTestCases_-_CtsMediaBitstreamsTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.all.CtsMediaBitstreamsTestCases_-_CtsMediaBitstreamsTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaBitstreamsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        needs_push_media=True,
-        tag='9.0_r14.arm.all.CtsMediaBitstreamsTestCases_-_CtsMediaBitstreamsTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.all.CtsMediaBitstreamsTestCases_-_CtsMediaBitstreamsTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaBitstreamsTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='all.CtsMediaBitstreamsTestCases_-_CtsMediaBitstreamsTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsMediaHostTestCases_-_CtsMediaHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsMediaHostTestCases_-_CtsMediaHostTestCases
deleted file mode 100644
index 5b95010..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsMediaHostTestCases_-_CtsMediaHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.all.CtsMediaHostTestCases_-_CtsMediaHostTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.arm.all.CtsMediaHostTestCases_-_CtsMediaHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.all.CtsMediaHostTestCases_-_CtsMediaHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsMediaHostTestCases_-_CtsMediaHostTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases
deleted file mode 100644
index b648084..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaStressTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        needs_push_media=True,
-        tag='9.0_r14.arm.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaStressTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='all.CtsMediaStressTestCases_-_CtsMediaStressTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=18000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsMediaTestCases_-_CtsMediaTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsMediaTestCases_-_CtsMediaTestCases
deleted file mode 100644
index f8ad29d..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsMediaTestCases_-_CtsMediaTestCases
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.all.CtsMediaTestCases_-_CtsMediaTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        needs_push_media=True,
-        tag='9.0_r14.arm.all.CtsMediaTestCases_-_CtsMediaTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.all.CtsMediaTestCases_-_CtsMediaTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='all.CtsMediaTestCases_-_CtsMediaTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
-        timeout=36000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsMidiTestCases_-_CtsSecurityBulletinHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsMidiTestCases_-_CtsSecurityBulletinHostTestCases
deleted file mode 100644
index 2f47e5c..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsMidiTestCases_-_CtsSecurityBulletinHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.all.CtsMidiTestCases_-_CtsSecurityBulletinHostTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMidiTestCases, CtsMockingDebuggableTestCases, CtsMockingTestCases, CtsMonkeyTestCases, CtsMultiUserHostTestCases, CtsMultiUserTestCases, CtsNNAPITestCases, CtsNativeHardwareTestCases, CtsNativeMediaAAudioTestCases, CtsNativeMediaSlTestCases, CtsNativeMediaXaTestCases, CtsNativeNetTestCases, CtsNdefTestCases, CtsNetSecConfigAttributeTestCases, CtsNetSecConfigBasicDebugDisabledTestCases, CtsNetSecConfigBasicDebugEnabledTestCases, CtsNetSecConfigBasicDomainConfigTestCases, CtsNetSecConfigCleartextTrafficTestCases, CtsNetSecConfigDownloadManagerTestCases, CtsNetSecConfigInvalidPinTestCases, CtsNetSecConfigNestedDomainConfigTestCases, CtsNetSecConfigPrePCleartextTrafficTestCases, CtsNetSecConfigResourcesSrcTestCases, CtsNetSecPolicyUsesCleartextTrafficFalseTestCases, CtsNetSecPolicyUsesCleartextTrafficTrueTestCases, CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases, CtsNetTestCases, CtsNetTestCasesLegacyApi22, CtsNetTestCasesLegacyPermission22, CtsOmapiTestCases, CtsOpenGLTestCases, CtsOpenGlPerf2TestCases, CtsOpenGlPerfTestCases, CtsOsHostTestCases, CtsOsTestCases, CtsPdfTestCases, CtsPerfettoTestCases, CtsPermission2TestCases, CtsPermissionTestCases, CtsPreference2TestCases, CtsPreferenceTestCases, CtsPrintTestCases, CtsProtoTestCases, CtsProviderTestCases, CtsRenderscriptLegacyTestCases, CtsRenderscriptTestCases, CtsRsBlasTestCases, CtsRsCppTestCases, CtsSampleDeviceTestCases, CtsSampleHostTestCases, CtsSaxTestCases, CtsSeccompHostTestCases, CtsSecureElementAccessControlTestCases1, CtsSecureElementAccessControlTestCases2, CtsSecureElementAccessControlTestCases3, CtsSecurityBulletinHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.arm.all.CtsMidiTestCases_-_CtsSecurityBulletinHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.all.CtsMidiTestCases_-_CtsSecurityBulletinHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMidiTestCases', '--include-filter', 'CtsMockingDebuggableTestCases', '--include-filter', 'CtsMockingTestCases', '--include-filter', 'CtsMonkeyTestCases', '--include-filter', 'CtsMultiUserHostTestCases', '--include-filter', 'CtsMultiUserTestCases', '--include-filter', 'CtsNNAPITestCases', '--include-filter', 'CtsNativeHardwareTestCases', '--include-filter', 'CtsNativeMediaAAudioTestCases', '--include-filter', 'CtsNativeMediaSlTestCases', '--include-filter', 'CtsNativeMediaXaTestCases', '--include-filter', 'CtsNativeNetTestCases', '--include-filter', 'CtsNdefTestCases', '--include-filter', 'CtsNetSecConfigAttributeTestCases', '--include-filter', 'CtsNetSecConfigBasicDebugDisabledTestCases', '--include-filter', 'CtsNetSecConfigBasicDebugEnabledTestCases', '--include-filter', 'CtsNetSecConfigBasicDomainConfigTestCases', '--include-filter', 'CtsNetSecConfigCleartextTrafficTestCases', '--include-filter', 'CtsNetSecConfigDownloadManagerTestCases', '--include-filter', 'CtsNetSecConfigInvalidPinTestCases', '--include-filter', 'CtsNetSecConfigNestedDomainConfigTestCases', '--include-filter', 'CtsNetSecConfigPrePCleartextTrafficTestCases', '--include-filter', 'CtsNetSecConfigResourcesSrcTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficFalseTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficTrueTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases', '--include-filter', 'CtsNetTestCases', '--include-filter', 'CtsNetTestCasesLegacyApi22', '--include-filter', 'CtsNetTestCasesLegacyPermission22', '--include-filter', 'CtsOmapiTestCases', '--include-filter', 'CtsOpenGLTestCases', '--include-filter', 'CtsOpenGlPerf2TestCases', '--include-filter', 'CtsOpenGlPerfTestCases', '--include-filter', 'CtsOsHostTestCases', '--include-filter', 'CtsOsTestCases', '--include-filter', 'CtsPdfTestCases', '--include-filter', 'CtsPerfettoTestCases', '--include-filter', 'CtsPermission2TestCases', '--include-filter', 'CtsPermissionTestCases', '--include-filter', 'CtsPreference2TestCases', '--include-filter', 'CtsPreferenceTestCases', '--include-filter', 'CtsPrintTestCases', '--include-filter', 'CtsProtoTestCases', '--include-filter', 'CtsProviderTestCases', '--include-filter', 'CtsRenderscriptLegacyTestCases', '--include-filter', 'CtsRenderscriptTestCases', '--include-filter', 'CtsRsBlasTestCases', '--include-filter', 'CtsRsCppTestCases', '--include-filter', 'CtsSampleDeviceTestCases', '--include-filter', 'CtsSampleHostTestCases', '--include-filter', 'CtsSaxTestCases', '--include-filter', 'CtsSeccompHostTestCases', '--include-filter', 'CtsSecureElementAccessControlTestCases1', '--include-filter', 'CtsSecureElementAccessControlTestCases2', '--include-filter', 'CtsSecureElementAccessControlTestCases3', '--include-filter', 'CtsSecurityBulletinHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsMidiTestCases_-_CtsSecurityBulletinHostTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
-        timeout=106200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases
deleted file mode 100644
index 8caed00..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSecurityHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.arm.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSecurityHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        precondition_commands=['echo 3 > /proc/sys/kernel/perf_event_paranoid', 'modprobe configs'],
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases
deleted file mode 100644
index 8ee55ea..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSecurityTestCases, CtsSelinuxTargetSdk25TestCases, CtsSelinuxTargetSdk27TestCases, CtsSelinuxTargetSdkCurrentTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.arm.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSecurityTestCases', '--include-filter', 'CtsSelinuxTargetSdk25TestCases', '--include-filter', 'CtsSelinuxTargetSdk27TestCases', '--include-filter', 'CtsSelinuxTargetSdkCurrentTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=14400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsSensorTestCases_-_CtsSensorTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsSensorTestCases_-_CtsSensorTestCases
deleted file mode 100644
index fb1ab27..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsSensorTestCases_-_CtsSensorTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.all.CtsSensorTestCases_-_CtsSensorTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSensorTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=30,
-        tag='9.0_r14.arm.all.CtsSensorTestCases_-_CtsSensorTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.all.CtsSensorTestCases_-_CtsSensorTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSensorTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsSensorTestCases_-_CtsSensorTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsShortcutHostTestCases_-_CtsVideoTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsShortcutHostTestCases_-_CtsVideoTestCases
deleted file mode 100644
index 44b7545..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsShortcutHostTestCases_-_CtsVideoTestCases
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.all.CtsShortcutHostTestCases_-_CtsVideoTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsShortcutHostTestCases, CtsShortcutManagerTestCases, CtsSimRestrictedApisTestCases, CtsSimpleCpuTestCases, CtsSimpleperfTestCases, CtsSkQPTestCases, CtsSliceTestCases, CtsSpeechTestCases, CtsStatsdHostTestCases, CtsSustainedPerformanceHostTestCases, CtsSyncAccountAccessOtherCertTestCases, CtsSyncContentHostTestCases, CtsSyncManagerTestsCases, CtsSystemApiAnnotationTestCases, CtsSystemApiSignatureTestCases, CtsSystemIntentTestCases, CtsSystemUiHostTestCases, CtsSystemUiTestCases, CtsTelecomTestCases, CtsTelecomTestCases2, CtsTelecomTestCases3, CtsTelephony2TestCases, CtsTelephonyTestCases, CtsTextTestCases, CtsThemeDeviceTestCases, CtsThemeHostTestCases, CtsToastLegacyTestCases, CtsToastTestCases, CtsTransitionTestCases, CtsTrustedVoiceHostTestCases, CtsTvProviderTestCases, CtsTvTestCases, CtsUiAutomationTestCases, CtsUiDeviceTestCases, CtsUiRenderingTestCases, CtsUidIsolationTestCases, CtsUsageStatsTestCases, CtsUsbTests, CtsUtilTestCases, CtsVideoTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.arm.all.CtsShortcutHostTestCases_-_CtsVideoTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.all.CtsShortcutHostTestCases_-_CtsVideoTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsShortcutHostTestCases', '--include-filter', 'CtsShortcutManagerTestCases', '--include-filter', 'CtsSimRestrictedApisTestCases', '--include-filter', 'CtsSimpleCpuTestCases', '--include-filter', 'CtsSimpleperfTestCases', '--include-filter', 'CtsSkQPTestCases', '--include-filter', 'CtsSliceTestCases', '--include-filter', 'CtsSpeechTestCases', '--include-filter', 'CtsStatsdHostTestCases', '--include-filter', 'CtsSustainedPerformanceHostTestCases', '--include-filter', 'CtsSyncAccountAccessOtherCertTestCases', '--include-filter', 'CtsSyncContentHostTestCases', '--include-filter', 'CtsSyncManagerTestsCases', '--include-filter', 'CtsSystemApiAnnotationTestCases', '--include-filter', 'CtsSystemApiSignatureTestCases', '--include-filter', 'CtsSystemIntentTestCases', '--include-filter', 'CtsSystemUiHostTestCases', '--include-filter', 'CtsSystemUiTestCases', '--include-filter', 'CtsTelecomTestCases', '--include-filter', 'CtsTelecomTestCases2', '--include-filter', 'CtsTelecomTestCases3', '--include-filter', 'CtsTelephony2TestCases', '--include-filter', 'CtsTelephonyTestCases', '--include-filter', 'CtsTextTestCases', '--include-filter', 'CtsThemeDeviceTestCases', '--include-filter', 'CtsThemeHostTestCases', '--include-filter', 'CtsToastLegacyTestCases', '--include-filter', 'CtsToastTestCases', '--include-filter', 'CtsTransitionTestCases', '--include-filter', 'CtsTrustedVoiceHostTestCases', '--include-filter', 'CtsTvProviderTestCases', '--include-filter', 'CtsTvTestCases', '--include-filter', 'CtsUiAutomationTestCases', '--include-filter', 'CtsUiDeviceTestCases', '--include-filter', 'CtsUiRenderingTestCases', '--include-filter', 'CtsUidIsolationTestCases', '--include-filter', 'CtsUsageStatsTestCases', '--include-filter', 'CtsUsbTests', '--include-filter', 'CtsUtilTestCases', '--include-filter', 'CtsVideoTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsShortcutHostTestCases_-_CtsVideoTestCases',
-        target_plan=None,
-        bundle='arm',
-        extra_artifacts_host=['/tmp/diff_*.png'],
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        prerequisites=['bluetooth', 'region_us'],
-        timeout=77400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsViewTestCases_-_CtsViewTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsViewTestCases_-_CtsViewTestCases
deleted file mode 100644
index 0d1d11a..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsViewTestCases_-_CtsViewTestCases
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.all.CtsViewTestCases_-_CtsViewTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsViewTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.arm.all.CtsViewTestCases_-_CtsViewTestCases',
-        test_name='cheets_CTS_P.9.0_r14.arm.all.CtsViewTestCases_-_CtsViewTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsViewTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsViewTestCases_-_CtsViewTestCases',
-        target_plan=None,
-        bundle='arm',
-        extra_artifacts=['/storage/emulated/0/SurfaceViewSyncTest/'],
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        precondition_commands=['sleep 60'],
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsVmTestCases_-_vm-tests-tf b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsVmTestCases_-_vm-tests-tf
deleted file mode 100644
index 9952efb..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.all.CtsVmTestCases_-_vm-tests-tf
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.all.CtsVmTestCases_-_vm-tests-tf'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsVmTestCases, CtsVoiceInteractionTestCases, CtsVoiceSettingsTestCases, CtsVrTestCases, CtsWebkitTestCases, CtsWidgetTestCases, CtsWindowManagerDeviceTestCases, CtsWrapNoWrapTestCases, CtsWrapWrapDebugMallocDebugTestCases, CtsWrapWrapDebugTestCases, CtsWrapWrapNoDebugTestCases, cts-system-all.api, signed-CtsSecureElementAccessControlTestCases1, signed-CtsSecureElementAccessControlTestCases2, signed-CtsSecureElementAccessControlTestCases3, vm-tests-tf of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.arm.all.CtsVmTestCases_-_vm-tests-tf',
-        test_name='cheets_CTS_P.9.0_r14.arm.all.CtsVmTestCases_-_vm-tests-tf',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsVmTestCases', '--include-filter', 'CtsVoiceInteractionTestCases', '--include-filter', 'CtsVoiceSettingsTestCases', '--include-filter', 'CtsVrTestCases', '--include-filter', 'CtsWebkitTestCases', '--include-filter', 'CtsWidgetTestCases', '--include-filter', 'CtsWindowManagerDeviceTestCases', '--include-filter', 'CtsWrapNoWrapTestCases', '--include-filter', 'CtsWrapWrapDebugMallocDebugTestCases', '--include-filter', 'CtsWrapWrapDebugTestCases', '--include-filter', 'CtsWrapWrapNoDebugTestCases', '--include-filter', 'cts-system-all.api', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases1', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases2', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases3', '--include-filter', 'vm-tests-tf', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsVmTestCases_-_vm-tests-tf',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        prerequisites=['region_us'],
-        timeout=30600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.cts-system-all.api b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.cts-system-all.api
deleted file mode 100644
index 0b6985c..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.cts-system-all.api
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.cts-system-all.api'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module cts-system-all.api of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.cts-system-all.api',
-        test_name='cheets_CTS_P.9.0_r14.arm.cts-system-all.api',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'cts-system-all.api', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='cts-system-all.api',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.signed-CtsSecureElementAccessControl b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.signed-CtsSecureElementAccessControl
deleted file mode 100644
index b19a923..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.signed-CtsSecureElementAccessControl
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.signed-CtsSecureElementAccessControl'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module signed-CtsSecureElementAccessControlTestCases1, signed-CtsSecureElementAccessControlTestCases2, signed-CtsSecureElementAccessControlTestCases3 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.signed-CtsSecureElementAccessControl',
-        test_name='cheets_CTS_P.9.0_r14.arm.signed-CtsSecureElementAccessControl',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases1', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases2', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases3', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='signed-CtsSecureElementAccessControl',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.tradefed-run-collect-tests-only-internal b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.tradefed-run-collect-tests-only-internal
deleted file mode 100644
index c769f80..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.tradefed-run-collect-tests-only-internal
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.tradefed-run-collect-tests-only-internal'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:arc-cts-qual'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'LENGTHY'
-MAX_RESULT_SIZE_KB = 1024000
-DOC = 'Run all of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=0,
-        tag='9.0_r14.arm.tradefed-run-collect-tests-only-internal',
-        test_name='cheets_CTS_P.9.0_r14.arm.tradefed-run-collect-tests-only-internal',
-        run_template=['run', 'commandAndExit', 'collect-tests-only', '--disable-reboot', '--module-arg', 'CtsMediaTestCases:skip-media-download:true', '--module-arg', 'CtsMediaStressTestCases:skip-media-download:true', '--module-arg', 'CtsMediaBitstreamsTestCases:skip-media-download:true', '--dynamic-config-url='],
-        retry_template=None,
-        target_module=None,
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.vm-tests-tf b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.vm-tests-tf
deleted file mode 100644
index 875cb5e..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.vm-tests-tf
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.vm-tests-tf'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vm-tests-tf of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.vm-tests-tf',
-        test_name='cheets_CTS_P.9.0_r14.arm.vm-tests-tf',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'vm-tests-tf', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='vm-tests-tf',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.wm-presubmit b/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.wm-presubmit
deleted file mode 100644
index 7db74d5..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.arm.wm-presubmit
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.arm.wm-presubmit'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:smoke'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module wm-presubmit of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.arm.wm-presubmit',
-        test_name='cheets_CTS_P.9.0_r14.arm.wm-presubmit',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsActivityManagerDeviceSdk25TestCases', '--include-filter', 'CtsActivityManagerDeviceTestCases', '--include-filter', 'CtsAppTestCases android.app.cts.TaskDescriptionTest', '--include-filter', 'CtsWindowManagerDeviceTestCases', '--test-arg', 'com.android.compatibility.common.tradefed.testtype.JarHostTest:include-annotation:android.platform.test.annotations.Presubmit', '--test-arg', 'com.android.tradefed.testtype.AndroidJUnitTest:include-annotation:android.platform.test.annotations.Presubmit', '--test-arg', 'com.android.tradefed.testtype.HostTest:include-annotation:android.platform.test.annotations.Presubmit', '--test-arg', 'com.android.tradefed.testtype.AndroidJUnitTest:exclude-annotation:androidx.test.filters.FlakyTest', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='wm-presubmit',
-        target_plan=None,
-        bundle='arm',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-arm.zip',
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAbiOverrideHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAbiOverrideHostTestCases
deleted file mode 100644
index 2d33a0f..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAbiOverrideHostTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsAbiOverrideHostTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAbiOverrideHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsAbiOverrideHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsAbiOverrideHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAbiOverrideHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsAbiOverrideHostTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAccelerationTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAccelerationTestCases
deleted file mode 100644
index 3f171aa..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAccelerationTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsAccelerationTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAccelerationTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsAccelerationTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsAccelerationTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAccelerationTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsAccelerationTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAccessibility b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAccessibility
deleted file mode 100644
index bfeadbe..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAccessibility
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsAccessibility'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAccessibilityServiceTestCases, CtsAccessibilityTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsAccessibility',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsAccessibility',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAccessibilityServiceTestCases', '--include-filter', 'CtsAccessibilityTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsAccessibility',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAccountManagerTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAccountManagerTestCases
deleted file mode 100644
index 17f0f0d..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAccountManagerTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsAccountManagerTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAccountManagerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsAccountManagerTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsAccountManagerTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAccountManagerTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsAccountManagerTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsActivityManagerDevice b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsActivityManagerDevice
deleted file mode 100644
index beceb2e..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsActivityManagerDevice
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsActivityManagerDevice'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsActivityManagerDeviceSdk25TestCases, CtsActivityManagerDeviceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsActivityManagerDevice',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsActivityManagerDevice',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsActivityManagerDeviceSdk25TestCases', '--include-filter', 'CtsActivityManagerDeviceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsActivityManagerDevice',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAdmin b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAdmin
deleted file mode 100644
index 7370881..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAdmin
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsAdmin'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAdminPackageInstallerTestCases, CtsAdminTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsAdmin',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsAdmin',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAdminPackageInstallerTestCases', '--include-filter', 'CtsAdminTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsAdmin',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAlarm b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAlarm
deleted file mode 100644
index c650fb8..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAlarm
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsAlarm'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAlarmClockTestCases, CtsAlarmManagerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsAlarm',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsAlarm',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAlarmClockTestCases', '--include-filter', 'CtsAlarmManagerTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsAlarm',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAndroid b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAndroid
deleted file mode 100644
index ffb672d..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAndroid
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsAndroid'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAndroidAppTestCases, CtsAndroidTestBase27ApiSignatureTestCases, CtsAndroidTestMockCurrentApiSignatureTestCases, CtsAndroidTestRunnerCurrentApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsAndroid',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsAndroid',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAndroidAppTestCases', '--include-filter', 'CtsAndroidTestBase27ApiSignatureTestCases', '--include-filter', 'CtsAndroidTestMockCurrentApiSignatureTestCases', '--include-filter', 'CtsAndroidTestRunnerCurrentApiSignatureTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsAndroid',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAnimationTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAnimationTestCases
deleted file mode 100644
index 21a3533..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAnimationTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsAnimationTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAnimationTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsAnimationTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsAnimationTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAnimationTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsAnimationTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsApacheHttpLegacy b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsApacheHttpLegacy
deleted file mode 100644
index 81a14e7..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsApacheHttpLegacy
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsApacheHttpLegacy'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsApacheHttpLegacy27ApiSignatureTestCases, CtsApacheHttpLegacyCurrentApiSignatureTestCases, CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsApacheHttpLegacy',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsApacheHttpLegacy',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsApacheHttpLegacy27ApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacyCurrentApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsApacheHttpLegacy',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsApp b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsApp
deleted file mode 100644
index 0be56b2..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsApp
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsApp'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAppComponentFactoryTestCases, CtsAppSecurityHostTestCases, CtsAppTestCases, CtsAppUsageHostTestCases, CtsAppWidgetTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        enable_default_apps=True,
-        tag='9.0_r14.x86.CtsApp',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsApp',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAppComponentFactoryTestCases', '--include-filter', 'CtsAppSecurityHostTestCases', '--include-filter', 'CtsAppTestCases', '--include-filter', 'CtsAppUsageHostTestCases', '--include-filter', 'CtsAppWidgetTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsApp',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
-        timeout=23400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAslrMallocTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAslrMallocTestCases
deleted file mode 100644
index 2ce7b91..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAslrMallocTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsAslrMallocTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAslrMallocTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsAslrMallocTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsAslrMallocTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAslrMallocTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsAslrMallocTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAssistTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAssistTestCases
deleted file mode 100644
index e91538c..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAssistTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsAssistTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAssistTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsAssistTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsAssistTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAssistTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsAssistTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAtraceHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAtraceHostTestCases
deleted file mode 100644
index 3e06f49..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAtraceHostTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsAtraceHostTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAtraceHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsAtraceHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsAtraceHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAtraceHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsAtraceHostTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAutoFillServiceTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAutoFillServiceTestCases
deleted file mode 100644
index f78853f..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsAutoFillServiceTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsAutoFillServiceTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAutoFillServiceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsAutoFillServiceTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsAutoFillServiceTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAutoFillServiceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsAutoFillServiceTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=21600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsBackgroundRestrictionsTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsBackgroundRestrictionsTestCases
deleted file mode 100644
index 52af2b7..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsBackgroundRestrictionsTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsBackgroundRestrictionsTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsBackgroundRestrictionsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsBackgroundRestrictionsTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsBackgroundRestrictionsTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsBackgroundRestrictionsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsBackgroundRestrictionsTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsBackup b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsBackup
deleted file mode 100644
index dc77d13..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsBackup
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsBackup'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsBackupHostTestCases, CtsBackupTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsBackup',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsBackup',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBackupHostTestCases', '--include-filter', 'CtsBackupTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsBackup',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsBatterySavingTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsBatterySavingTestCases
deleted file mode 100644
index 0f57bd5..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsBatterySavingTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsBatterySavingTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsBatterySavingTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsBatterySavingTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsBatterySavingTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsBatterySavingTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsBatterySavingTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsBionicTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsBionicTestCases
deleted file mode 100644
index db518c5..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsBionicTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsBionicTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsBionicTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsBionicTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsBionicTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsBionicTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsBionicTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsBluetoothTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsBluetoothTestCases
deleted file mode 100644
index e6fe9a7..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsBluetoothTestCases
+++ /dev/null
@@ -1,48 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-from autotest_lib.server import utils
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsBluetoothTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsBluetoothTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-# For local debugging, if your test setup doesn't have servo, REMOVE these
-# two lines.
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run_TS(machine):
-    # REMOVE 'servo_args=servo_args' arg for local debugging if your test
-    # setup doesn't have servo.
-    try:
-        host_list = [hosts.create_host(machine, servo_args=servo_args)]
-    except:
-        # Just ignore any servo setup flakiness.
-        host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsBluetoothTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsBluetoothTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsBluetoothTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsBluetoothTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        prerequisites=['bluetooth'],
-        hard_reboot_on_failure=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsBootStatsTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsBootStatsTestCases
deleted file mode 100644
index 27f68f7..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsBootStatsTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsBootStatsTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsBootStatsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsBootStatsTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsBootStatsTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsBootStatsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsBootStatsTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCalendarcommon2TestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCalendarcommon2TestCases
deleted file mode 100644
index 1b7521b..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCalendarcommon2TestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsCalendarcommon2TestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCalendarcommon2TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsCalendarcommon2TestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsCalendarcommon2TestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCalendarcommon2TestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsCalendarcommon2TestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCamera b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCamera
deleted file mode 100644
index cbfa955..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCamera
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsCamera'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCameraApi25TestCases, CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsCamera',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsCamera',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCameraApi25TestCases', '--include-filter', 'CtsCameraTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsCamera',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCameraTestCases.led.camerabox.back b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCameraTestCases.led.camerabox.back
deleted file mode 100644
index 9b45d3e..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCameraTestCases.led.camerabox.back
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsCameraTestCases.led.camerabox.back'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-camera, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86, camerabox_light:led, camerabox_facing:back'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        camera_facing='back',
-        cmdline_args=args,
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsCameraTestCases.led.camerabox.back',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsCameraTestCases.led.camerabox.back',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCameraTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsCameraTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        load_waivers=False,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCameraTestCases.led.camerabox.front b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCameraTestCases.led.camerabox.front
deleted file mode 100644
index 8355383..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCameraTestCases.led.camerabox.front
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsCameraTestCases.led.camerabox.front'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-camera, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86, camerabox_light:led, camerabox_facing:front'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        camera_facing='front',
-        cmdline_args=args,
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsCameraTestCases.led.camerabox.front',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsCameraTestCases.led.camerabox.front',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCameraTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsCameraTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        load_waivers=False,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCameraTestCases.noled.camerabox.back b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCameraTestCases.noled.camerabox.back
deleted file mode 100644
index d39e225..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCameraTestCases.noled.camerabox.back
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsCameraTestCases.noled.camerabox.back'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-camera, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86, camerabox_light:noled, camerabox_facing:back'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        camera_facing='back',
-        cmdline_args=args,
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsCameraTestCases.noled.camerabox.back',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsCameraTestCases.noled.camerabox.back',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCameraTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsCameraTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        load_waivers=False,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCameraTestCases.noled.camerabox.front b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCameraTestCases.noled.camerabox.front
deleted file mode 100644
index e7f030a..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCameraTestCases.noled.camerabox.front
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsCameraTestCases.noled.camerabox.front'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-camera, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86, camerabox_light:noled, camerabox_facing:front'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        camera_facing='front',
-        cmdline_args=args,
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsCameraTestCases.noled.camerabox.front',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsCameraTestCases.noled.camerabox.front',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCameraTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsCameraTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        load_waivers=False,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCarTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCarTestCases
deleted file mode 100644
index ba3cea9..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCarTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsCarTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCarTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsCarTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsCarTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCarTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsCarTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCarrierApiTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCarrierApiTestCases
deleted file mode 100644
index fa70875..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCarrierApiTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsCarrierApiTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCarrierApiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsCarrierApiTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsCarrierApiTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCarrierApiTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsCarrierApiTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsColorModeTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsColorModeTestCases
deleted file mode 100644
index 292dfa9..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsColorModeTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsColorModeTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsColorModeTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsColorModeTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsColorModeTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsColorModeTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsColorModeTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCompilationTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCompilationTestCases
deleted file mode 100644
index 0a72aaa..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCompilationTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsCompilationTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCompilationTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsCompilationTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsCompilationTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCompilationTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsCompilationTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsContactsProviderWipe b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsContactsProviderWipe
deleted file mode 100644
index 6038e34..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsContactsProviderWipe
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsContactsProviderWipe'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsContactsProviderWipe of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsContactsProviderWipe',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsContactsProviderWipe',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsContactsProviderWipe', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsContactsProviderWipe',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsContentTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsContentTestCases
deleted file mode 100644
index b96f052..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsContentTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsContentTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsContentTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        enable_default_apps=True,
-        tag='9.0_r14.x86.CtsContentTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsContentTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsContentTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsContentTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        prerequisites=['region_us'],
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCppToolsTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCppToolsTestCases
deleted file mode 100644
index f0ea16f..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCppToolsTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsCppToolsTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCppToolsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsCppToolsTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsCppToolsTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCppToolsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsCppToolsTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCurrentApiSignatureTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCurrentApiSignatureTestCases
deleted file mode 100644
index 63da6f1..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsCurrentApiSignatureTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsCurrentApiSignatureTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCurrentApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsCurrentApiSignatureTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsCurrentApiSignatureTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCurrentApiSignatureTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsCurrentApiSignatureTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDatabaseTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDatabaseTestCases
deleted file mode 100644
index c16a7fb..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDatabaseTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsDatabaseTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDatabaseTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsDatabaseTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsDatabaseTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDatabaseTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDatabaseTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDebugTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDebugTestCases
deleted file mode 100644
index 111c4d5..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDebugTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsDebugTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDebugTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsDebugTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsDebugTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDebugTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDebugTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDeqpTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDeqpTestCases
deleted file mode 100644
index 4f85725..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDeqpTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsDeqpTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-deqp, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 1024000
-DOC = 'Run module CtsDeqpTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=15,
-        tag='9.0_r14.x86.CtsDeqpTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsDeqpTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDeqpTestCases', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDeqpTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=72000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDeqpTestCases.dEQP-EGL b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDeqpTestCases.dEQP-EGL
deleted file mode 100644
index a87a083..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDeqpTestCases.dEQP-EGL
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsDeqpTestCases.dEQP-EGL'
-ATTRIBUTES = 'suite:arc-cts-deqp, suite:graphics_per-week'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDeqpTestCases.dEQP-EGL of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsDeqpTestCases.dEQP-EGL',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsDeqpTestCases.dEQP-EGL',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-EGL.*', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDeqpTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDeqpTestCases.dEQP-GLES2 b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDeqpTestCases.dEQP-GLES2
deleted file mode 100644
index 0563a4d..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDeqpTestCases.dEQP-GLES2
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsDeqpTestCases.dEQP-GLES2'
-ATTRIBUTES = 'suite:arc-cts-deqp, suite:graphics_per-week'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDeqpTestCases.dEQP-GLES2 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsDeqpTestCases.dEQP-GLES2',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsDeqpTestCases.dEQP-GLES2',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-GLES2.*', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDeqpTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDeqpTestCases.dEQP-GLES3 b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDeqpTestCases.dEQP-GLES3
deleted file mode 100644
index 34b47ce..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDeqpTestCases.dEQP-GLES3
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsDeqpTestCases.dEQP-GLES3'
-ATTRIBUTES = 'suite:arc-cts-deqp, suite:graphics_per-week'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDeqpTestCases.dEQP-GLES3 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsDeqpTestCases.dEQP-GLES3',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsDeqpTestCases.dEQP-GLES3',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-GLES3.*', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDeqpTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=21600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDeqpTestCases.dEQP-GLES31 b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDeqpTestCases.dEQP-GLES31
deleted file mode 100644
index fc2befd..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDeqpTestCases.dEQP-GLES31
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsDeqpTestCases.dEQP-GLES31'
-ATTRIBUTES = 'suite:arc-cts-deqp, suite:graphics_per-week'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDeqpTestCases.dEQP-GLES31 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsDeqpTestCases.dEQP-GLES31',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsDeqpTestCases.dEQP-GLES31',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-GLES31.*', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDeqpTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=21600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDeqpTestCases.dEQP-VK b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDeqpTestCases.dEQP-VK
deleted file mode 100644
index 4105f78..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDeqpTestCases.dEQP-VK
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsDeqpTestCases.dEQP-VK'
-ATTRIBUTES = 'suite:arc-cts-deqp, suite:graphics_per-week'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDeqpTestCases.dEQP-VK of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsDeqpTestCases.dEQP-VK',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsDeqpTestCases.dEQP-VK',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-VK.*', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDeqpTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=54000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDevice b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDevice
deleted file mode 100644
index 0a30e9e..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDevice
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsDevice'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDeviceIdleHostTestCases, CtsDevicePolicyManagerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsDevice',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsDevice',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeviceIdleHostTestCases', '--include-filter', 'CtsDevicePolicyManagerTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDevice',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDexMetadataHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDexMetadataHostTestCases
deleted file mode 100644
index b4f1ab8..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDexMetadataHostTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsDexMetadataHostTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDexMetadataHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsDexMetadataHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsDexMetadataHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDexMetadataHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDexMetadataHostTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDisplayTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDisplayTestCases
deleted file mode 100644
index 05d0d61..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDisplayTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsDisplayTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDisplayTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsDisplayTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsDisplayTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDisplayTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDisplayTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDpi b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDpi
deleted file mode 100644
index 1b06eba..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDpi
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsDpi'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDpiTestCases, CtsDpiTestCases2 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsDpi',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsDpi',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDpiTestCases', '--include-filter', 'CtsDpiTestCases2', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDpi',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDreamsTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDreamsTestCases
deleted file mode 100644
index 521abbc..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDreamsTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsDreamsTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDreamsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsDreamsTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsDreamsTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDreamsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDreamsTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDrmTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDrmTestCases
deleted file mode 100644
index 17f7522..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDrmTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsDrmTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDrmTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsDrmTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsDrmTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDrmTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDrmTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDumpsysHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDumpsysHostTestCases
deleted file mode 100644
index ab894f4..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDumpsysHostTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsDumpsysHostTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDumpsysHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsDumpsysHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsDumpsysHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDumpsysHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDumpsysHostTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDynamicLinkerTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDynamicLinkerTestCases
deleted file mode 100644
index 5e69f25..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsDynamicLinkerTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsDynamicLinkerTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDynamicLinkerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsDynamicLinkerTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsDynamicLinkerTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDynamicLinkerTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsDynamicLinkerTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsEdiHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsEdiHostTestCases
deleted file mode 100644
index 125fea2..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsEdiHostTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsEdiHostTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsEdiHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsEdiHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsEdiHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsEdiHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsEdiHostTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsEffectTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsEffectTestCases
deleted file mode 100644
index 7ea05b0..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsEffectTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsEffectTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsEffectTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsEffectTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsEffectTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsEffectTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsEffectTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsExternalS b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsExternalS
deleted file mode 100644
index 41f1b0d..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsExternalS
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsExternalS'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsExternalServiceTestCases, CtsExternalSourcesTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsExternalS',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsExternalS',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsExternalServiceTestCases', '--include-filter', 'CtsExternalSourcesTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsExternalS',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsFileSystemTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsFileSystemTestCases
deleted file mode 100644
index 5bc452a..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsFileSystemTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsFileSystemTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsFileSystemTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsFileSystemTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsFileSystemTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsFileSystemTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsFileSystemTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsFragment b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsFragment
deleted file mode 100644
index 38459d0..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsFragment
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsFragment'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsFragmentTestCases, CtsFragmentTestCasesSdk26 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsFragment',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsFragment',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsFragmentTestCases', '--include-filter', 'CtsFragmentTestCasesSdk26', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsFragment',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsGestureTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsGestureTestCases
deleted file mode 100644
index e2a583c..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsGestureTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsGestureTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsGestureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsGestureTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsGestureTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsGestureTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsGestureTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsGpuToolsHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsGpuToolsHostTestCases
deleted file mode 100644
index 1418dc3..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsGpuToolsHostTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsGpuToolsHostTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsGpuToolsHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsGpuToolsHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsGpuToolsHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsGpuToolsHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsGpuToolsHostTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsGraphicsTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsGraphicsTestCases
deleted file mode 100644
index 076b1e4..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsGraphicsTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsGraphicsTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsGraphicsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=5,
-        tag='9.0_r14.x86.CtsGraphicsTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsGraphicsTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsGraphicsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsGraphicsTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsHardwareTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsHardwareTestCases
deleted file mode 100644
index a6530a9..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsHardwareTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsHardwareTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational3'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsHardwareTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsHardwareTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsHardwareTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsHardwareTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsHardwareTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsHarmfulAppWarningHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsHarmfulAppWarningHostTestCases
deleted file mode 100644
index 40393f0..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsHarmfulAppWarningHostTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsHarmfulAppWarningHostTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsHarmfulAppWarningHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsHarmfulAppWarningHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsHarmfulAppWarningHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsHarmfulAppWarningHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsHarmfulAppWarningHostTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsHiddenApi b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsHiddenApi
deleted file mode 100644
index b3f34ec..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsHiddenApi
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsHiddenApi'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsHiddenApiBlacklistApi27TestCases, CtsHiddenApiBlacklistCurrentApiTestCases, CtsHiddenApiBlacklistDebugClassTestCases, CtsHiddenApiKillswitchDebugClassTestCases, CtsHiddenApiKillswitchWhitelistTestCases, CtsHiddenApiKillswitchWildcardTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsHiddenApi',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsHiddenApi',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHiddenApiBlacklistApi27TestCases', '--include-filter', 'CtsHiddenApiBlacklistCurrentApiTestCases', '--include-filter', 'CtsHiddenApiBlacklistDebugClassTestCases', '--include-filter', 'CtsHiddenApiKillswitchDebugClassTestCases', '--include-filter', 'CtsHiddenApiKillswitchWhitelistTestCases', '--include-filter', 'CtsHiddenApiKillswitchWildcardTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsHiddenApi',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsHostTzDataTests b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsHostTzDataTests
deleted file mode 100644
index 0905558..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsHostTzDataTests
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsHostTzDataTests'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsHostTzDataTests of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsHostTzDataTests',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsHostTzDataTests',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsHostTzDataTests', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsHostTzDataTests',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsHostside b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsHostside
deleted file mode 100644
index 4ce8169..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsHostside
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsHostside'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsHostsideNetworkTests, CtsHostsideNumberBlockingTestCases, CtsHostsideTvTests, CtsHostsideWebViewTests of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsHostside',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsHostside',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHostsideNetworkTests', '--include-filter', 'CtsHostsideNumberBlockingTestCases', '--include-filter', 'CtsHostsideTvTests', '--include-filter', 'CtsHostsideWebViewTests', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsHostside',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsIcuTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsIcuTestCases
deleted file mode 100644
index df92786..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsIcuTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsIcuTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsIcuTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsIcuTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsIcuTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsIcuTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsIcuTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsIncidentHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsIncidentHostTestCases
deleted file mode 100644
index 48b967c..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsIncidentHostTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsIncidentHostTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsIncidentHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsIncidentHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsIncidentHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsIncidentHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsIncidentHostTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsInlineMockingTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsInlineMockingTestCases
deleted file mode 100644
index 70f146a..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsInlineMockingTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsInlineMockingTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsInlineMockingTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsInlineMockingTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsInlineMockingTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsInlineMockingTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsInlineMockingTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsInputMethod b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsInputMethod
deleted file mode 100644
index cf36691..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsInputMethod
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsInputMethod'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsInputMethodServiceHostTestCases, CtsInputMethodTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsInputMethod',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsInputMethod',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsInputMethodServiceHostTestCases', '--include-filter', 'CtsInputMethodTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsInputMethod',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsIntentSignatureTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsIntentSignatureTestCases
deleted file mode 100644
index 3554406..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsIntentSignatureTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsIntentSignatureTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsIntentSignatureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsIntentSignatureTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsIntentSignatureTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsIntentSignatureTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsIntentSignatureTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsJankDeviceTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsJankDeviceTestCases
deleted file mode 100644
index 786cc0c..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsJankDeviceTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsJankDeviceTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsJankDeviceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsJankDeviceTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsJankDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsJankDeviceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsJankDeviceTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsJdwp b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsJdwp
deleted file mode 100644
index a1be696..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsJdwp
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsJdwp'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsJdwpSecurityHostTestCases, CtsJdwpTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsJdwp',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsJdwp',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJdwpSecurityHostTestCases', '--include-filter', 'CtsJdwpTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsJdwp',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsJniTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsJniTestCases
deleted file mode 100644
index 88305ec..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsJniTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsJniTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsJniTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsJniTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsJniTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsJniTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsJniTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsJobScheduler b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsJobScheduler
deleted file mode 100644
index 571020f..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsJobScheduler
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsJobScheduler'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsJobSchedulerSharedUidTestCases, CtsJobSchedulerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsJobScheduler',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsJobScheduler',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJobSchedulerSharedUidTestCases', '--include-filter', 'CtsJobSchedulerTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsJobScheduler',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsJvmti b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsJvmti
deleted file mode 100644
index 1048099..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsJvmti
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsJvmti'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsJvmtiAttachingHostTestCases, CtsJvmtiAttachingTestCases, CtsJvmtiRedefineClassesHostTestCases, CtsJvmtiRunTest1900HostTestCases, CtsJvmtiRunTest1901HostTestCases, CtsJvmtiRunTest1902HostTestCases, CtsJvmtiRunTest1903HostTestCases, CtsJvmtiRunTest1904HostTestCases, CtsJvmtiRunTest1906HostTestCases, CtsJvmtiRunTest1907HostTestCases, CtsJvmtiRunTest1908HostTestCases, CtsJvmtiRunTest1909HostTestCases, CtsJvmtiRunTest1910HostTestCases, CtsJvmtiRunTest1911HostTestCases, CtsJvmtiRunTest1912HostTestCases, CtsJvmtiRunTest1913HostTestCases, CtsJvmtiRunTest1914HostTestCases, CtsJvmtiRunTest1915HostTestCases, CtsJvmtiRunTest1916HostTestCases, CtsJvmtiRunTest1917HostTestCases, CtsJvmtiRunTest1920HostTestCases, CtsJvmtiRunTest1921HostTestCases, CtsJvmtiRunTest1922HostTestCases, CtsJvmtiRunTest1923HostTestCases, CtsJvmtiRunTest1924HostTestCases, CtsJvmtiRunTest1925HostTestCases, CtsJvmtiRunTest1926HostTestCases, CtsJvmtiRunTest1927HostTestCases, CtsJvmtiRunTest1928HostTestCases, CtsJvmtiRunTest1930HostTestCases, CtsJvmtiRunTest1931HostTestCases, CtsJvmtiRunTest1932HostTestCases, CtsJvmtiRunTest1933HostTestCases, CtsJvmtiRunTest1934HostTestCases, CtsJvmtiRunTest1936HostTestCases, CtsJvmtiRunTest1937HostTestCases, CtsJvmtiRunTest1939HostTestCases, CtsJvmtiRunTest1941HostTestCases, CtsJvmtiRunTest1942HostTestCases, CtsJvmtiRunTest1943HostTestCases, CtsJvmtiRunTest902HostTestCases, CtsJvmtiRunTest903HostTestCases, CtsJvmtiRunTest904HostTestCases, CtsJvmtiRunTest905HostTestCases, CtsJvmtiRunTest906HostTestCases, CtsJvmtiRunTest907HostTestCases, CtsJvmtiRunTest908HostTestCases, CtsJvmtiRunTest910HostTestCases, CtsJvmtiRunTest911HostTestCases, CtsJvmtiRunTest912HostTestCases, CtsJvmtiRunTest913HostTestCases, CtsJvmtiRunTest914HostTestCases, CtsJvmtiRunTest915HostTestCases, CtsJvmtiRunTest917HostTestCases, CtsJvmtiRunTest918HostTestCases, CtsJvmtiRunTest919HostTestCases, CtsJvmtiRunTest920HostTestCases, CtsJvmtiRunTest922HostTestCases, CtsJvmtiRunTest923HostTestCases, CtsJvmtiRunTest924HostTestCases, CtsJvmtiRunTest926HostTestCases, CtsJvmtiRunTest927HostTestCases, CtsJvmtiRunTest928HostTestCases, CtsJvmtiRunTest930HostTestCases, CtsJvmtiRunTest931HostTestCases, CtsJvmtiRunTest932HostTestCases, CtsJvmtiRunTest940HostTestCases, CtsJvmtiRunTest942HostTestCases, CtsJvmtiRunTest944HostTestCases, CtsJvmtiRunTest945HostTestCases, CtsJvmtiRunTest947HostTestCases, CtsJvmtiRunTest951HostTestCases, CtsJvmtiRunTest982HostTestCases, CtsJvmtiRunTest983HostTestCases, CtsJvmtiRunTest984HostTestCases, CtsJvmtiRunTest985HostTestCases, CtsJvmtiRunTest986HostTestCases, CtsJvmtiRunTest988HostTestCases, CtsJvmtiRunTest989HostTestCases, CtsJvmtiRunTest990HostTestCases, CtsJvmtiRunTest991HostTestCases, CtsJvmtiRunTest992HostTestCases, CtsJvmtiRunTest993HostTestCases, CtsJvmtiRunTest994HostTestCases, CtsJvmtiRunTest995HostTestCases, CtsJvmtiRunTest996HostTestCases, CtsJvmtiRunTest997HostTestCases, CtsJvmtiTaggingHostTestCases, CtsJvmtiTrackingHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsJvmti',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsJvmti',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJvmtiAttachingHostTestCases', '--include-filter', 'CtsJvmtiAttachingTestCases', '--include-filter', 'CtsJvmtiRedefineClassesHostTestCases', '--include-filter', 'CtsJvmtiRunTest1900HostTestCases', '--include-filter', 'CtsJvmtiRunTest1901HostTestCases', '--include-filter', 'CtsJvmtiRunTest1902HostTestCases', '--include-filter', 'CtsJvmtiRunTest1903HostTestCases', '--include-filter', 'CtsJvmtiRunTest1904HostTestCases', '--include-filter', 'CtsJvmtiRunTest1906HostTestCases', '--include-filter', 'CtsJvmtiRunTest1907HostTestCases', '--include-filter', 'CtsJvmtiRunTest1908HostTestCases', '--include-filter', 'CtsJvmtiRunTest1909HostTestCases', '--include-filter', 'CtsJvmtiRunTest1910HostTestCases', '--include-filter', 'CtsJvmtiRunTest1911HostTestCases', '--include-filter', 'CtsJvmtiRunTest1912HostTestCases', '--include-filter', 'CtsJvmtiRunTest1913HostTestCases', '--include-filter', 'CtsJvmtiRunTest1914HostTestCases', '--include-filter', 'CtsJvmtiRunTest1915HostTestCases', '--include-filter', 'CtsJvmtiRunTest1916HostTestCases', '--include-filter', 'CtsJvmtiRunTest1917HostTestCases', '--include-filter', 'CtsJvmtiRunTest1920HostTestCases', '--include-filter', 'CtsJvmtiRunTest1921HostTestCases', '--include-filter', 'CtsJvmtiRunTest1922HostTestCases', '--include-filter', 'CtsJvmtiRunTest1923HostTestCases', '--include-filter', 'CtsJvmtiRunTest1924HostTestCases', '--include-filter', 'CtsJvmtiRunTest1925HostTestCases', '--include-filter', 'CtsJvmtiRunTest1926HostTestCases', '--include-filter', 'CtsJvmtiRunTest1927HostTestCases', '--include-filter', 'CtsJvmtiRunTest1928HostTestCases', '--include-filter', 'CtsJvmtiRunTest1930HostTestCases', '--include-filter', 'CtsJvmtiRunTest1931HostTestCases', '--include-filter', 'CtsJvmtiRunTest1932HostTestCases', '--include-filter', 'CtsJvmtiRunTest1933HostTestCases', '--include-filter', 'CtsJvmtiRunTest1934HostTestCases', '--include-filter', 'CtsJvmtiRunTest1936HostTestCases', '--include-filter', 'CtsJvmtiRunTest1937HostTestCases', '--include-filter', 'CtsJvmtiRunTest1939HostTestCases', '--include-filter', 'CtsJvmtiRunTest1941HostTestCases', '--include-filter', 'CtsJvmtiRunTest1942HostTestCases', '--include-filter', 'CtsJvmtiRunTest1943HostTestCases', '--include-filter', 'CtsJvmtiRunTest902HostTestCases', '--include-filter', 'CtsJvmtiRunTest903HostTestCases', '--include-filter', 'CtsJvmtiRunTest904HostTestCases', '--include-filter', 'CtsJvmtiRunTest905HostTestCases', '--include-filter', 'CtsJvmtiRunTest906HostTestCases', '--include-filter', 'CtsJvmtiRunTest907HostTestCases', '--include-filter', 'CtsJvmtiRunTest908HostTestCases', '--include-filter', 'CtsJvmtiRunTest910HostTestCases', '--include-filter', 'CtsJvmtiRunTest911HostTestCases', '--include-filter', 'CtsJvmtiRunTest912HostTestCases', '--include-filter', 'CtsJvmtiRunTest913HostTestCases', '--include-filter', 'CtsJvmtiRunTest914HostTestCases', '--include-filter', 'CtsJvmtiRunTest915HostTestCases', '--include-filter', 'CtsJvmtiRunTest917HostTestCases', '--include-filter', 'CtsJvmtiRunTest918HostTestCases', '--include-filter', 'CtsJvmtiRunTest919HostTestCases', '--include-filter', 'CtsJvmtiRunTest920HostTestCases', '--include-filter', 'CtsJvmtiRunTest922HostTestCases', '--include-filter', 'CtsJvmtiRunTest923HostTestCases', '--include-filter', 'CtsJvmtiRunTest924HostTestCases', '--include-filter', 'CtsJvmtiRunTest926HostTestCases', '--include-filter', 'CtsJvmtiRunTest927HostTestCases', '--include-filter', 'CtsJvmtiRunTest928HostTestCases', '--include-filter', 'CtsJvmtiRunTest930HostTestCases', '--include-filter', 'CtsJvmtiRunTest931HostTestCases', '--include-filter', 'CtsJvmtiRunTest932HostTestCases', '--include-filter', 'CtsJvmtiRunTest940HostTestCases', '--include-filter', 'CtsJvmtiRunTest942HostTestCases', '--include-filter', 'CtsJvmtiRunTest944HostTestCases', '--include-filter', 'CtsJvmtiRunTest945HostTestCases', '--include-filter', 'CtsJvmtiRunTest947HostTestCases', '--include-filter', 'CtsJvmtiRunTest951HostTestCases', '--include-filter', 'CtsJvmtiRunTest982HostTestCases', '--include-filter', 'CtsJvmtiRunTest983HostTestCases', '--include-filter', 'CtsJvmtiRunTest984HostTestCases', '--include-filter', 'CtsJvmtiRunTest985HostTestCases', '--include-filter', 'CtsJvmtiRunTest986HostTestCases', '--include-filter', 'CtsJvmtiRunTest988HostTestCases', '--include-filter', 'CtsJvmtiRunTest989HostTestCases', '--include-filter', 'CtsJvmtiRunTest990HostTestCases', '--include-filter', 'CtsJvmtiRunTest991HostTestCases', '--include-filter', 'CtsJvmtiRunTest992HostTestCases', '--include-filter', 'CtsJvmtiRunTest993HostTestCases', '--include-filter', 'CtsJvmtiRunTest994HostTestCases', '--include-filter', 'CtsJvmtiRunTest995HostTestCases', '--include-filter', 'CtsJvmtiRunTest996HostTestCases', '--include-filter', 'CtsJvmtiRunTest997HostTestCases', '--include-filter', 'CtsJvmtiTaggingHostTestCases', '--include-filter', 'CtsJvmtiTrackingHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsJvmti',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=26700)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsKernelConfigTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsKernelConfigTestCases
deleted file mode 100644
index 29810ea..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsKernelConfigTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsKernelConfigTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsKernelConfigTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsKernelConfigTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsKernelConfigTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsKernelConfigTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsKernelConfigTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsKeystoreTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsKeystoreTestCases
deleted file mode 100644
index 95d6f71..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsKeystoreTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsKeystoreTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsKeystoreTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsKeystoreTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsKeystoreTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsKeystoreTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsKeystoreTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsLeanbackJankTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsLeanbackJankTestCases
deleted file mode 100644
index 1074ee7..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsLeanbackJankTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsLeanbackJankTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsLeanbackJankTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsLeanbackJankTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsLeanbackJankTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsLeanbackJankTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsLeanbackJankTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsLegacyNotificationTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsLegacyNotificationTestCases
deleted file mode 100644
index c316ad9..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsLegacyNotificationTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsLegacyNotificationTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsLegacyNotificationTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsLegacyNotificationTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsLegacyNotificationTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsLegacyNotificationTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsLegacyNotificationTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsLibcore b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsLibcore
deleted file mode 100644
index bb02c16..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsLibcore
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsLibcore'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1, suite:vmtest-informational3'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsLibcoreFileIOTestCases, CtsLibcoreJsr166TestCases, CtsLibcoreLegacy22TestCases, CtsLibcoreOjTestCases, CtsLibcoreOkHttpTestCases, CtsLibcoreTestCases, CtsLibcoreWycheproofBCTestCases, CtsLibcoreWycheproofConscryptTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsLibcore',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsLibcore',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLibcoreFileIOTestCases', '--include-filter', 'CtsLibcoreJsr166TestCases', '--include-filter', 'CtsLibcoreLegacy22TestCases', '--include-filter', 'CtsLibcoreOjTestCases', '--include-filter', 'CtsLibcoreOkHttpTestCases', '--include-filter', 'CtsLibcoreTestCases', '--include-filter', 'CtsLibcoreWycheproofBCTestCases', '--include-filter', 'CtsLibcoreWycheproofConscryptTestCases', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsLibcore',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=21600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsLiblogTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsLiblogTestCases
deleted file mode 100644
index 9a60255..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsLiblogTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsLiblogTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsLiblogTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsLiblogTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsLiblogTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsLiblogTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsLiblogTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsLocation b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsLocation
deleted file mode 100644
index d604e6d..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsLocation
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsLocation'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsLocation2TestCases, CtsLocationTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsLocation',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsLocation',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLocation2TestCases', '--include-filter', 'CtsLocationTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsLocation',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsLogdTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsLogdTestCases
deleted file mode 100644
index 60f892f..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsLogdTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsLogdTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsLogdTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsLogdTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsLogdTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsLogdTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsLogdTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsMediaBitstreamsTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsMediaBitstreamsTestCases
deleted file mode 100644
index 2658c50..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsMediaBitstreamsTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsMediaBitstreamsTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaBitstreamsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        needs_push_media=True,
-        tag='9.0_r14.x86.CtsMediaBitstreamsTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsMediaBitstreamsTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaBitstreamsTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMediaBitstreamsTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsMediaHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsMediaHostTestCases
deleted file mode 100644
index c85f879..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsMediaHostTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsMediaHostTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsMediaHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsMediaHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsMediaHostTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsMediaStressTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsMediaStressTestCases
deleted file mode 100644
index ae0912c..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsMediaStressTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsMediaStressTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational2'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaStressTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        needs_push_media=True,
-        tag='9.0_r14.x86.CtsMediaStressTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsMediaStressTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaStressTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMediaStressTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=18000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsMediaTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsMediaTestCases
deleted file mode 100644
index b8eb4c1..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsMediaTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsMediaTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational2'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        needs_push_media=True,
-        tag='9.0_r14.x86.CtsMediaTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsMediaTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMediaTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
-        timeout=36000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsMediaTestCases.audio b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsMediaTestCases.audio
deleted file mode 100644
index 6913b45..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsMediaTestCases.audio
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsMediaTestCases.audio'
-ATTRIBUTES = 'suite:arc-cts'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaTestCases.audio of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        needs_push_media=True,
-        tag='9.0_r14.x86.CtsMediaTestCases.audio',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsMediaTestCases.audio',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioAttributesTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioEffectTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioFocusTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioFormatTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioManagerTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioNativeTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPlayRoutingNative', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPlaybackConfigurationTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPreProcessingTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPresentationTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordAppOpTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordRoutingNative', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecord_BufferSizeTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordingConfigurationTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackLatencyTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackSurroundTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrack_ListenerTest', '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolAacTest', '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolMidiTest', '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolOggTest', '--include-filter', 'CtsMediaTestCases android.media.cts.VolumeShaperTest', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsMediaTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsMidiTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsMidiTestCases
deleted file mode 100644
index 1f62e0c..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsMidiTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsMidiTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMidiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsMidiTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsMidiTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMidiTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsMidiTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsMocking b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsMocking
deleted file mode 100644
index c8b6e78..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsMocking
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsMocking'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMockingDebuggableTestCases, CtsMockingTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsMocking',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsMocking',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMockingDebuggableTestCases', '--include-filter', 'CtsMockingTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsMocking',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsMonkeyTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsMonkeyTestCases
deleted file mode 100644
index c7b9523..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsMonkeyTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsMonkeyTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMonkeyTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsMonkeyTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsMonkeyTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMonkeyTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsMonkeyTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsMultiUser b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsMultiUser
deleted file mode 100644
index 824792c..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsMultiUser
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsMultiUser'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMultiUserHostTestCases, CtsMultiUserTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsMultiUser',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsMultiUser',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMultiUserHostTestCases', '--include-filter', 'CtsMultiUserTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsMultiUser',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsNNAPITestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsNNAPITestCases
deleted file mode 100644
index 5586376..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsNNAPITestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsNNAPITestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsNNAPITestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsNNAPITestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsNNAPITestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsNNAPITestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsNNAPITestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsNative b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsNative
deleted file mode 100644
index abf2eb9..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsNative
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsNative'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsNativeHardwareTestCases, CtsNativeMediaAAudioTestCases, CtsNativeMediaSlTestCases, CtsNativeMediaXaTestCases, CtsNativeNetTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsNative',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsNative',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNativeHardwareTestCases', '--include-filter', 'CtsNativeMediaAAudioTestCases', '--include-filter', 'CtsNativeMediaSlTestCases', '--include-filter', 'CtsNativeMediaXaTestCases', '--include-filter', 'CtsNativeNetTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsNative',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsNdefTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsNdefTestCases
deleted file mode 100644
index 8e455aa..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsNdefTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsNdefTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsNdefTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsNdefTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsNdefTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsNdefTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsNdefTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsNet b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsNet
deleted file mode 100644
index 9636f16..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsNet
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsNet'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsNetSecConfigAttributeTestCases, CtsNetSecConfigBasicDebugDisabledTestCases, CtsNetSecConfigBasicDebugEnabledTestCases, CtsNetSecConfigBasicDomainConfigTestCases, CtsNetSecConfigCleartextTrafficTestCases, CtsNetSecConfigDownloadManagerTestCases, CtsNetSecConfigInvalidPinTestCases, CtsNetSecConfigNestedDomainConfigTestCases, CtsNetSecConfigPrePCleartextTrafficTestCases, CtsNetSecConfigResourcesSrcTestCases, CtsNetSecPolicyUsesCleartextTrafficFalseTestCases, CtsNetSecPolicyUsesCleartextTrafficTrueTestCases, CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases, CtsNetTestCases, CtsNetTestCasesLegacyApi22, CtsNetTestCasesLegacyPermission22 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsNet',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsNet',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNetSecConfigAttributeTestCases', '--include-filter', 'CtsNetSecConfigBasicDebugDisabledTestCases', '--include-filter', 'CtsNetSecConfigBasicDebugEnabledTestCases', '--include-filter', 'CtsNetSecConfigBasicDomainConfigTestCases', '--include-filter', 'CtsNetSecConfigCleartextTrafficTestCases', '--include-filter', 'CtsNetSecConfigDownloadManagerTestCases', '--include-filter', 'CtsNetSecConfigInvalidPinTestCases', '--include-filter', 'CtsNetSecConfigNestedDomainConfigTestCases', '--include-filter', 'CtsNetSecConfigPrePCleartextTrafficTestCases', '--include-filter', 'CtsNetSecConfigResourcesSrcTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficFalseTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficTrueTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases', '--include-filter', 'CtsNetTestCases', '--include-filter', 'CtsNetTestCasesLegacyApi22', '--include-filter', 'CtsNetTestCasesLegacyPermission22', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsNet',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=30600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsOmapiTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsOmapiTestCases
deleted file mode 100644
index 9c853f0..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsOmapiTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsOmapiTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsOmapiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsOmapiTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsOmapiTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsOmapiTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsOmapiTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsOpenG b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsOpenG
deleted file mode 100644
index 98cc7e3..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsOpenG
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsOpenG'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsOpenGLTestCases, CtsOpenGlPerf2TestCases, CtsOpenGlPerfTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsOpenG',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsOpenG',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsOpenGLTestCases', '--include-filter', 'CtsOpenGlPerf2TestCases', '--include-filter', 'CtsOpenGlPerfTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsOpenG',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsOs b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsOs
deleted file mode 100644
index 419db2d..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsOs
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsOs'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsOsHostTestCases, CtsOsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsOs',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsOs',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsOsHostTestCases', '--include-filter', 'CtsOsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsOs',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsPdfTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsPdfTestCases
deleted file mode 100644
index 9c8947c..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsPdfTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsPdfTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsPdfTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsPdfTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsPdfTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsPdfTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsPdfTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsPerfettoTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsPerfettoTestCases
deleted file mode 100644
index d38d8f1..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsPerfettoTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsPerfettoTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsPerfettoTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsPerfettoTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsPerfettoTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsPerfettoTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsPerfettoTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsPermission b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsPermission
deleted file mode 100644
index a802761..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsPermission
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsPermission'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsPermission2TestCases, CtsPermissionTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsPermission',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsPermission',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPermission2TestCases', '--include-filter', 'CtsPermissionTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsPermission',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsPreference b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsPreference
deleted file mode 100644
index e335647..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsPreference
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsPreference'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsPreference2TestCases, CtsPreferenceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsPreference',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsPreference',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPreference2TestCases', '--include-filter', 'CtsPreferenceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsPreference',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsPrintTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsPrintTestCases
deleted file mode 100644
index a63342b..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsPrintTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsPrintTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsPrintTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsPrintTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsPrintTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsPrintTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsPrintTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsProtoTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsProtoTestCases
deleted file mode 100644
index 7813384..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsProtoTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsProtoTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsProtoTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsProtoTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsProtoTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsProtoTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsProtoTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsProviderTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsProviderTestCases
deleted file mode 100644
index 5bb5f1f..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsProviderTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsProviderTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsProviderTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsProviderTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsProviderTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsProviderTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsProviderTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsRenderscript b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsRenderscript
deleted file mode 100644
index d19d699..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsRenderscript
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsRenderscript'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsRenderscriptLegacyTestCases, CtsRenderscriptTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsRenderscript',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsRenderscript',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsRenderscriptLegacyTestCases', '--include-filter', 'CtsRenderscriptTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsRenderscript',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsRs b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsRs
deleted file mode 100644
index 32cb214..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsRs
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsRs'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsRsBlasTestCases, CtsRsCppTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsRs',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsRs',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsRsBlasTestCases', '--include-filter', 'CtsRsCppTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsRs',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSample b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSample
deleted file mode 100644
index 1e0d293..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSample
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsSample'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSampleDeviceTestCases, CtsSampleHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsSample',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsSample',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSampleDeviceTestCases', '--include-filter', 'CtsSampleHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSample',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSaxTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSaxTestCases
deleted file mode 100644
index 7e4b023..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSaxTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsSaxTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSaxTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsSaxTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsSaxTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSaxTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSaxTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSeccompHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSeccompHostTestCases
deleted file mode 100644
index 602f49f..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSeccompHostTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsSeccompHostTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSeccompHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsSeccompHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsSeccompHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSeccompHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSeccompHostTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSecureElementAccessControl b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSecureElementAccessControl
deleted file mode 100644
index 36db4d2..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSecureElementAccessControl
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsSecureElementAccessControl'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSecureElementAccessControlTestCases1, CtsSecureElementAccessControlTestCases2, CtsSecureElementAccessControlTestCases3 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsSecureElementAccessControl',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsSecureElementAccessControl',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSecureElementAccessControlTestCases1', '--include-filter', 'CtsSecureElementAccessControlTestCases2', '--include-filter', 'CtsSecureElementAccessControlTestCases3', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSecureElementAccessControl',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSecurity b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSecurity
deleted file mode 100644
index f31a7e1..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSecurity
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsSecurity'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSecurityBulletinHostTestCases, CtsSecurityHostTestCases, CtsSecurityTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsSecurity',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsSecurity',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSecurityBulletinHostTestCases', '--include-filter', 'CtsSecurityHostTestCases', '--include-filter', 'CtsSecurityTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSecurity',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        precondition_commands=['echo 3 > /proc/sys/kernel/perf_event_paranoid', 'modprobe configs'],
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSelinuxTargetSdk b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSelinuxTargetSdk
deleted file mode 100644
index 36c1710..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSelinuxTargetSdk
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsSelinuxTargetSdk'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSelinuxTargetSdk25TestCases, CtsSelinuxTargetSdk27TestCases, CtsSelinuxTargetSdkCurrentTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsSelinuxTargetSdk',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsSelinuxTargetSdk',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSelinuxTargetSdk25TestCases', '--include-filter', 'CtsSelinuxTargetSdk27TestCases', '--include-filter', 'CtsSelinuxTargetSdkCurrentTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSelinuxTargetSdk',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSensorTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSensorTestCases
deleted file mode 100644
index 872b340..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSensorTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsSensorTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSensorTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=30,
-        tag='9.0_r14.x86.CtsSensorTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsSensorTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSensorTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSensorTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsShortcut b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsShortcut
deleted file mode 100644
index 1a77d62..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsShortcut
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsShortcut'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsShortcutHostTestCases, CtsShortcutManagerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsShortcut',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsShortcut',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsShortcutHostTestCases', '--include-filter', 'CtsShortcutManagerTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsShortcut',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSimRestrictedApisTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSimRestrictedApisTestCases
deleted file mode 100644
index 6eb2d51..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSimRestrictedApisTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsSimRestrictedApisTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSimRestrictedApisTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsSimRestrictedApisTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsSimRestrictedApisTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSimRestrictedApisTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSimRestrictedApisTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSimpleCpuTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSimpleCpuTestCases
deleted file mode 100644
index 399d268..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSimpleCpuTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsSimpleCpuTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSimpleCpuTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsSimpleCpuTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsSimpleCpuTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSimpleCpuTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSimpleCpuTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSimpleperfTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSimpleperfTestCases
deleted file mode 100644
index 86b2c55..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSimpleperfTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsSimpleperfTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSimpleperfTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsSimpleperfTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsSimpleperfTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSimpleperfTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSimpleperfTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSkQPTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSkQPTestCases
deleted file mode 100644
index 24d27e0..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSkQPTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsSkQPTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSkQPTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsSkQPTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsSkQPTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSkQPTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSkQPTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSliceTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSliceTestCases
deleted file mode 100644
index d79c90c..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSliceTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsSliceTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSliceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsSliceTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsSliceTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSliceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSliceTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSpeechTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSpeechTestCases
deleted file mode 100644
index 7175cc0..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSpeechTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsSpeechTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSpeechTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsSpeechTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsSpeechTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSpeechTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSpeechTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsStatsdHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsStatsdHostTestCases
deleted file mode 100644
index 6ee634a..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsStatsdHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsStatsdHostTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsStatsdHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsStatsdHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsStatsdHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsStatsdHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsStatsdHostTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        prerequisites=['bluetooth'],
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSustainedPerformanceHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSustainedPerformanceHostTestCases
deleted file mode 100644
index 9707f41..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSustainedPerformanceHostTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsSustainedPerformanceHostTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSustainedPerformanceHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsSustainedPerformanceHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsSustainedPerformanceHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSustainedPerformanceHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSustainedPerformanceHostTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSync b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSync
deleted file mode 100644
index 6f2253a..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSync
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsSync'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSyncAccountAccessOtherCertTestCases, CtsSyncContentHostTestCases, CtsSyncManagerTestsCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsSync',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsSync',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSyncAccountAccessOtherCertTestCases', '--include-filter', 'CtsSyncContentHostTestCases', '--include-filter', 'CtsSyncManagerTestsCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSync',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSystem b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSystem
deleted file mode 100644
index b546e12..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsSystem
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsSystem'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSystemApiAnnotationTestCases, CtsSystemApiSignatureTestCases, CtsSystemIntentTestCases, CtsSystemUiHostTestCases, CtsSystemUiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsSystem',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsSystem',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSystemApiAnnotationTestCases', '--include-filter', 'CtsSystemApiSignatureTestCases', '--include-filter', 'CtsSystemIntentTestCases', '--include-filter', 'CtsSystemUiHostTestCases', '--include-filter', 'CtsSystemUiTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsSystem',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsTelecom b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsTelecom
deleted file mode 100644
index 0043fbe..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsTelecom
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsTelecom'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTelecomTestCases, CtsTelecomTestCases2, CtsTelecomTestCases3 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsTelecom',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsTelecom',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTelecomTestCases', '--include-filter', 'CtsTelecomTestCases2', '--include-filter', 'CtsTelecomTestCases3', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsTelecom',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsTelephony b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsTelephony
deleted file mode 100644
index 726c5e6..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsTelephony
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsTelephony'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTelephony2TestCases, CtsTelephonyTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsTelephony',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsTelephony',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTelephony2TestCases', '--include-filter', 'CtsTelephonyTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsTelephony',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsTextTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsTextTestCases
deleted file mode 100644
index 1d0c0ad..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsTextTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsTextTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTextTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsTextTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsTextTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsTextTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsTextTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsTheme b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsTheme
deleted file mode 100644
index 105498b..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsTheme
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsTheme'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1, suite:vmtest-informational3'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsThemeDeviceTestCases, CtsThemeHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsTheme',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsTheme',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsThemeDeviceTestCases', '--include-filter', 'CtsThemeHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsTheme',
-        target_plan=None,
-        bundle='x86',
-        extra_artifacts_host=['/tmp/diff_*.png'],
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        prerequisites=['region_us'],
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsToast b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsToast
deleted file mode 100644
index ba2bbd8..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsToast
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsToast'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsToastLegacyTestCases, CtsToastTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsToast',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsToast',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsToastLegacyTestCases', '--include-filter', 'CtsToastTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsToast',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsTransitionTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsTransitionTestCases
deleted file mode 100644
index 002b539..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsTransitionTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsTransitionTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTransitionTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsTransitionTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsTransitionTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsTransitionTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsTransitionTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsTrustedVoiceHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsTrustedVoiceHostTestCases
deleted file mode 100644
index aa24b6b..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsTrustedVoiceHostTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsTrustedVoiceHostTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTrustedVoiceHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsTrustedVoiceHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsTrustedVoiceHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsTrustedVoiceHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsTrustedVoiceHostTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsTv b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsTv
deleted file mode 100644
index 1c3fb5d..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsTv
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsTv'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTvProviderTestCases, CtsTvTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsTv',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsTv',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTvProviderTestCases', '--include-filter', 'CtsTvTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsTv',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsUi b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsUi
deleted file mode 100644
index 361db45..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsUi
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsUi'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsUiAutomationTestCases, CtsUiDeviceTestCases, CtsUiRenderingTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsUi',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsUi',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUiAutomationTestCases', '--include-filter', 'CtsUiDeviceTestCases', '--include-filter', 'CtsUiRenderingTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsUi',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsUidIsolationTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsUidIsolationTestCases
deleted file mode 100644
index a8946da..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsUidIsolationTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsUidIsolationTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsUidIsolationTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsUidIsolationTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsUidIsolationTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsUidIsolationTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsUidIsolationTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsUsageStatsTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsUsageStatsTestCases
deleted file mode 100644
index 1aaa5be..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsUsageStatsTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsUsageStatsTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsUsageStatsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsUsageStatsTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsUsageStatsTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsUsageStatsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsUsageStatsTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsUsbTests b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsUsbTests
deleted file mode 100644
index a0a9841..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsUsbTests
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsUsbTests'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsUsbTests of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsUsbTests',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsUsbTests',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsUsbTests', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsUsbTests',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsUtilTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsUtilTestCases
deleted file mode 100644
index 0b331a4..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsUtilTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsUtilTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsUtilTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsUtilTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsUtilTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsUtilTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsUtilTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsVideoTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsVideoTestCases
deleted file mode 100644
index 3d2bb56..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsVideoTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsVideoTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsVideoTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsVideoTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsVideoTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsVideoTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsVideoTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsViewTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsViewTestCases
deleted file mode 100644
index c6e128f..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsViewTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsViewTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsViewTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsViewTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsViewTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsViewTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsViewTestCases',
-        target_plan=None,
-        bundle='x86',
-        extra_artifacts=['/storage/emulated/0/SurfaceViewSyncTest/'],
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        precondition_commands=['sleep 60'],
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsVmTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsVmTestCases
deleted file mode 100644
index fd8a3ef..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsVmTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsVmTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsVmTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsVmTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsVmTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsVmTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsVmTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsVoice b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsVoice
deleted file mode 100644
index f510547..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsVoice
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsVoice'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsVoiceInteractionTestCases, CtsVoiceSettingsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsVoice',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsVoice',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsVoiceInteractionTestCases', '--include-filter', 'CtsVoiceSettingsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsVoice',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsVrTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsVrTestCases
deleted file mode 100644
index 6892967..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsVrTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsVrTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsVrTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsVrTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsVrTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsVrTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsVrTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsWebkitTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsWebkitTestCases
deleted file mode 100644
index 29b61d2..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsWebkitTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsWebkitTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWebkitTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsWebkitTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsWebkitTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsWebkitTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsWebkitTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        prerequisites=['region_us'],
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsWidgetTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsWidgetTestCases
deleted file mode 100644
index ce5ba87..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsWidgetTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsWidgetTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWidgetTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsWidgetTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsWidgetTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsWidgetTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsWidgetTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsWindowManagerDeviceTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsWindowManagerDeviceTestCases
deleted file mode 100644
index 1e40a0e..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsWindowManagerDeviceTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsWindowManagerDeviceTestCases'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManagerDeviceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsWindowManagerDeviceTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsWindowManagerDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsWindowManagerDeviceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsWrap b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsWrap
deleted file mode 100644
index 3d11f75..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.CtsWrap
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.CtsWrap'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWrapNoWrapTestCases, CtsWrapWrapDebugMallocDebugTestCases, CtsWrapWrapDebugTestCases, CtsWrapWrapNoDebugTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.CtsWrap',
-        test_name='cheets_CTS_P.9.0_r14.x86.CtsWrap',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWrapNoWrapTestCases', '--include-filter', 'CtsWrapWrapDebugMallocDebugTestCases', '--include-filter', 'CtsWrapWrapDebugTestCases', '--include-filter', 'CtsWrapWrapNoDebugTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='CtsWrap',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsAbiOverrideHostTestCases_-_CtsAccelerationTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsAbiOverrideHostTestCases_-_CtsAccelerationTestCases
deleted file mode 100644
index 224abfc..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsAbiOverrideHostTestCases_-_CtsAccelerationTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.all.CtsAbiOverrideHostTestCases_-_CtsAccelerationTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAbiOverrideHostTestCases, CtsAccelerationTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.x86.all.CtsAbiOverrideHostTestCases_-_CtsAccelerationTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.all.CtsAbiOverrideHostTestCases_-_CtsAccelerationTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAbiOverrideHostTestCases', '--include-filter', 'CtsAccelerationTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsAbiOverrideHostTestCases_-_CtsAccelerationTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases
deleted file mode 100644
index c95918c..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAccessibilityServiceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.x86.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAccessibilityServiceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsAccessibilityTestCases_-_CtsAccountManagerTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsAccessibilityTestCases_-_CtsAccountManagerTestCases
deleted file mode 100644
index f457e85..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsAccessibilityTestCases_-_CtsAccountManagerTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.all.CtsAccessibilityTestCases_-_CtsAccountManagerTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAccessibilityTestCases, CtsAccountManagerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.x86.all.CtsAccessibilityTestCases_-_CtsAccountManagerTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.all.CtsAccessibilityTestCases_-_CtsAccountManagerTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAccessibilityTestCases', '--include-filter', 'CtsAccountManagerTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsAccessibilityTestCases_-_CtsAccountManagerTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsActivityManagerDeviceSdk25TestCases_-_CtsActivityManagerDeviceTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsActivityManagerDeviceSdk25TestCases_-_CtsActivityManagerDeviceTestCases
deleted file mode 100644
index 1219bb8..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsActivityManagerDeviceSdk25TestCases_-_CtsActivityManagerDeviceTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.all.CtsActivityManagerDeviceSdk25TestCases_-_CtsActivityManagerDeviceTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsActivityManagerDeviceSdk25TestCases, CtsActivityManagerDeviceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.x86.all.CtsActivityManagerDeviceSdk25TestCases_-_CtsActivityManagerDeviceTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.all.CtsActivityManagerDeviceSdk25TestCases_-_CtsActivityManagerDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsActivityManagerDeviceSdk25TestCases', '--include-filter', 'CtsActivityManagerDeviceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsActivityManagerDeviceSdk25TestCases_-_CtsActivityManagerDeviceTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsAdminPackageInstallerTestCases_-_CtsDebugTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsAdminPackageInstallerTestCases_-_CtsDebugTestCases
deleted file mode 100644
index d409d4f..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsAdminPackageInstallerTestCases_-_CtsDebugTestCases
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.all.CtsAdminPackageInstallerTestCases_-_CtsDebugTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAdminPackageInstallerTestCases, CtsAdminTestCases, CtsAlarmClockTestCases, CtsAlarmManagerTestCases, CtsAndroidAppTestCases, CtsAndroidTestBase27ApiSignatureTestCases, CtsAndroidTestMockCurrentApiSignatureTestCases, CtsAndroidTestRunnerCurrentApiSignatureTestCases, CtsAnimationTestCases, CtsApacheHttpLegacy27ApiSignatureTestCases, CtsApacheHttpLegacyCurrentApiSignatureTestCases, CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases, CtsAppComponentFactoryTestCases, CtsAppSecurityHostTestCases, CtsAppTestCases, CtsAppUsageHostTestCases, CtsAppWidgetTestCases, CtsAslrMallocTestCases, CtsAssistTestCases, CtsAtraceHostTestCases, CtsAutoFillServiceTestCases, CtsBackgroundRestrictionsTestCases, CtsBackupHostTestCases, CtsBackupTestCases, CtsBatterySavingTestCases, CtsBionicTestCases, CtsBluetoothTestCases, CtsBootStatsTestCases, CtsCalendarcommon2TestCases, CtsCameraApi25TestCases, CtsCameraTestCases, CtsCarTestCases, CtsCarrierApiTestCases, CtsColorModeTestCases, CtsCompilationTestCases, CtsContactsProviderWipe, CtsContentTestCases, CtsCppToolsTestCases, CtsCurrentApiSignatureTestCases, CtsDatabaseTestCases, CtsDebugTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        enable_default_apps=True,
-        tag='9.0_r14.x86.all.CtsAdminPackageInstallerTestCases_-_CtsDebugTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.all.CtsAdminPackageInstallerTestCases_-_CtsDebugTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAdminPackageInstallerTestCases', '--include-filter', 'CtsAdminTestCases', '--include-filter', 'CtsAlarmClockTestCases', '--include-filter', 'CtsAlarmManagerTestCases', '--include-filter', 'CtsAndroidAppTestCases', '--include-filter', 'CtsAndroidTestBase27ApiSignatureTestCases', '--include-filter', 'CtsAndroidTestMockCurrentApiSignatureTestCases', '--include-filter', 'CtsAndroidTestRunnerCurrentApiSignatureTestCases', '--include-filter', 'CtsAnimationTestCases', '--include-filter', 'CtsApacheHttpLegacy27ApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacyCurrentApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases', '--include-filter', 'CtsAppComponentFactoryTestCases', '--include-filter', 'CtsAppSecurityHostTestCases', '--include-filter', 'CtsAppTestCases', '--include-filter', 'CtsAppUsageHostTestCases', '--include-filter', 'CtsAppWidgetTestCases', '--include-filter', 'CtsAslrMallocTestCases', '--include-filter', 'CtsAssistTestCases', '--include-filter', 'CtsAtraceHostTestCases', '--include-filter', 'CtsAutoFillServiceTestCases', '--include-filter', 'CtsBackgroundRestrictionsTestCases', '--include-filter', 'CtsBackupHostTestCases', '--include-filter', 'CtsBackupTestCases', '--include-filter', 'CtsBatterySavingTestCases', '--include-filter', 'CtsBionicTestCases', '--include-filter', 'CtsBluetoothTestCases', '--include-filter', 'CtsBootStatsTestCases', '--include-filter', 'CtsCalendarcommon2TestCases', '--include-filter', 'CtsCameraApi25TestCases', '--include-filter', 'CtsCameraTestCases', '--include-filter', 'CtsCarTestCases', '--include-filter', 'CtsCarrierApiTestCases', '--include-filter', 'CtsColorModeTestCases', '--include-filter', 'CtsCompilationTestCases', '--include-filter', 'CtsContactsProviderWipe', '--include-filter', 'CtsContentTestCases', '--include-filter', 'CtsCppToolsTestCases', '--include-filter', 'CtsCurrentApiSignatureTestCases', '--include-filter', 'CtsDatabaseTestCases', '--include-filter', 'CtsDebugTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsAdminPackageInstallerTestCases_-_CtsDebugTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
-        prerequisites=['region_us', 'bluetooth'],
-        timeout=113400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases
deleted file mode 100644
index 38d07ff..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 1024000
-DOC = 'Run module CtsDeqpTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=15,
-        tag='9.0_r14.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDeqpTestCases', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsDeqpTestCases_-_CtsDeqpTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=72000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsDeviceIdleHostTestCases_-_CtsExternalSourcesTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsDeviceIdleHostTestCases_-_CtsExternalSourcesTestCases
deleted file mode 100644
index 19b0e6f..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsDeviceIdleHostTestCases_-_CtsExternalSourcesTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.all.CtsDeviceIdleHostTestCases_-_CtsExternalSourcesTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDeviceIdleHostTestCases, CtsDevicePolicyManagerTestCases, CtsDexMetadataHostTestCases, CtsDisplayTestCases, CtsDpiTestCases, CtsDpiTestCases2, CtsDreamsTestCases, CtsDrmTestCases, CtsDumpsysHostTestCases, CtsDynamicLinkerTestCases, CtsEdiHostTestCases, CtsEffectTestCases, CtsExternalServiceTestCases, CtsExternalSourcesTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.x86.all.CtsDeviceIdleHostTestCases_-_CtsExternalSourcesTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.all.CtsDeviceIdleHostTestCases_-_CtsExternalSourcesTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeviceIdleHostTestCases', '--include-filter', 'CtsDevicePolicyManagerTestCases', '--include-filter', 'CtsDexMetadataHostTestCases', '--include-filter', 'CtsDisplayTestCases', '--include-filter', 'CtsDpiTestCases', '--include-filter', 'CtsDpiTestCases2', '--include-filter', 'CtsDreamsTestCases', '--include-filter', 'CtsDrmTestCases', '--include-filter', 'CtsDumpsysHostTestCases', '--include-filter', 'CtsDynamicLinkerTestCases', '--include-filter', 'CtsEdiHostTestCases', '--include-filter', 'CtsEffectTestCases', '--include-filter', 'CtsExternalServiceTestCases', '--include-filter', 'CtsExternalSourcesTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsDeviceIdleHostTestCases_-_CtsExternalSourcesTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=27000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases
deleted file mode 100644
index c650982..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsFileSystemTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.x86.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsFileSystemTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsFileSystemTestCases_-_CtsFileSystemTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases
deleted file mode 100644
index aac0ddb..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsFragmentTestCases, CtsFragmentTestCasesSdk26, CtsGestureTestCases, CtsGpuToolsHostTestCases, CtsGraphicsTestCases, CtsHardwareTestCases, CtsHarmfulAppWarningHostTestCases, CtsHiddenApiBlacklistApi27TestCases, CtsHiddenApiBlacklistCurrentApiTestCases, CtsHiddenApiBlacklistDebugClassTestCases, CtsHiddenApiKillswitchDebugClassTestCases, CtsHiddenApiKillswitchWhitelistTestCases, CtsHiddenApiKillswitchWildcardTestCases, CtsHostTzDataTests, CtsHostsideNetworkTests, CtsHostsideNumberBlockingTestCases, CtsHostsideTvTests, CtsHostsideWebViewTests, CtsIcuTestCases, CtsIncidentHostTestCases, CtsInlineMockingTestCases, CtsInputMethodServiceHostTestCases, CtsInputMethodTestCases, CtsIntentSignatureTestCases, CtsJankDeviceTestCases, CtsJdwpSecurityHostTestCases, CtsJdwpTestCases, CtsJniTestCases, CtsJobSchedulerSharedUidTestCases, CtsJobSchedulerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.x86.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsFragmentTestCases', '--include-filter', 'CtsFragmentTestCasesSdk26', '--include-filter', 'CtsGestureTestCases', '--include-filter', 'CtsGpuToolsHostTestCases', '--include-filter', 'CtsGraphicsTestCases', '--include-filter', 'CtsHardwareTestCases', '--include-filter', 'CtsHarmfulAppWarningHostTestCases', '--include-filter', 'CtsHiddenApiBlacklistApi27TestCases', '--include-filter', 'CtsHiddenApiBlacklistCurrentApiTestCases', '--include-filter', 'CtsHiddenApiBlacklistDebugClassTestCases', '--include-filter', 'CtsHiddenApiKillswitchDebugClassTestCases', '--include-filter', 'CtsHiddenApiKillswitchWhitelistTestCases', '--include-filter', 'CtsHiddenApiKillswitchWildcardTestCases', '--include-filter', 'CtsHostTzDataTests', '--include-filter', 'CtsHostsideNetworkTests', '--include-filter', 'CtsHostsideNumberBlockingTestCases', '--include-filter', 'CtsHostsideTvTests', '--include-filter', 'CtsHostsideWebViewTests', '--include-filter', 'CtsIcuTestCases', '--include-filter', 'CtsIncidentHostTestCases', '--include-filter', 'CtsInlineMockingTestCases', '--include-filter', 'CtsInputMethodServiceHostTestCases', '--include-filter', 'CtsInputMethodTestCases', '--include-filter', 'CtsIntentSignatureTestCases', '--include-filter', 'CtsJankDeviceTestCases', '--include-filter', 'CtsJdwpSecurityHostTestCases', '--include-filter', 'CtsJdwpTestCases', '--include-filter', 'CtsJniTestCases', '--include-filter', 'CtsJobSchedulerSharedUidTestCases', '--include-filter', 'CtsJobSchedulerTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
-        timeout=61200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsJvmtiAttachingHostTestCases_-_CtsLogdTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsJvmtiAttachingHostTestCases_-_CtsLogdTestCases
deleted file mode 100644
index c69b821..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsJvmtiAttachingHostTestCases_-_CtsLogdTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.all.CtsJvmtiAttachingHostTestCases_-_CtsLogdTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsJvmtiAttachingHostTestCases, CtsJvmtiAttachingTestCases, CtsJvmtiRedefineClassesHostTestCases, CtsJvmtiRunTest1900HostTestCases, CtsJvmtiRunTest1901HostTestCases, CtsJvmtiRunTest1902HostTestCases, CtsJvmtiRunTest1903HostTestCases, CtsJvmtiRunTest1904HostTestCases, CtsJvmtiRunTest1906HostTestCases, CtsJvmtiRunTest1907HostTestCases, CtsJvmtiRunTest1908HostTestCases, CtsJvmtiRunTest1909HostTestCases, CtsJvmtiRunTest1910HostTestCases, CtsJvmtiRunTest1911HostTestCases, CtsJvmtiRunTest1912HostTestCases, CtsJvmtiRunTest1913HostTestCases, CtsJvmtiRunTest1914HostTestCases, CtsJvmtiRunTest1915HostTestCases, CtsJvmtiRunTest1916HostTestCases, CtsJvmtiRunTest1917HostTestCases, CtsJvmtiRunTest1920HostTestCases, CtsJvmtiRunTest1921HostTestCases, CtsJvmtiRunTest1922HostTestCases, CtsJvmtiRunTest1923HostTestCases, CtsJvmtiRunTest1924HostTestCases, CtsJvmtiRunTest1925HostTestCases, CtsJvmtiRunTest1926HostTestCases, CtsJvmtiRunTest1927HostTestCases, CtsJvmtiRunTest1928HostTestCases, CtsJvmtiRunTest1930HostTestCases, CtsJvmtiRunTest1931HostTestCases, CtsJvmtiRunTest1932HostTestCases, CtsJvmtiRunTest1933HostTestCases, CtsJvmtiRunTest1934HostTestCases, CtsJvmtiRunTest1936HostTestCases, CtsJvmtiRunTest1937HostTestCases, CtsJvmtiRunTest1939HostTestCases, CtsJvmtiRunTest1941HostTestCases, CtsJvmtiRunTest1942HostTestCases, CtsJvmtiRunTest1943HostTestCases, CtsJvmtiRunTest902HostTestCases, CtsJvmtiRunTest903HostTestCases, CtsJvmtiRunTest904HostTestCases, CtsJvmtiRunTest905HostTestCases, CtsJvmtiRunTest906HostTestCases, CtsJvmtiRunTest907HostTestCases, CtsJvmtiRunTest908HostTestCases, CtsJvmtiRunTest910HostTestCases, CtsJvmtiRunTest911HostTestCases, CtsJvmtiRunTest912HostTestCases, CtsJvmtiRunTest913HostTestCases, CtsJvmtiRunTest914HostTestCases, CtsJvmtiRunTest915HostTestCases, CtsJvmtiRunTest917HostTestCases, CtsJvmtiRunTest918HostTestCases, CtsJvmtiRunTest919HostTestCases, CtsJvmtiRunTest920HostTestCases, CtsJvmtiRunTest922HostTestCases, CtsJvmtiRunTest923HostTestCases, CtsJvmtiRunTest924HostTestCases, CtsJvmtiRunTest926HostTestCases, CtsJvmtiRunTest927HostTestCases, CtsJvmtiRunTest928HostTestCases, CtsJvmtiRunTest930HostTestCases, CtsJvmtiRunTest931HostTestCases, CtsJvmtiRunTest932HostTestCases, CtsJvmtiRunTest940HostTestCases, CtsJvmtiRunTest942HostTestCases, CtsJvmtiRunTest944HostTestCases, CtsJvmtiRunTest945HostTestCases, CtsJvmtiRunTest947HostTestCases, CtsJvmtiRunTest951HostTestCases, CtsJvmtiRunTest982HostTestCases, CtsJvmtiRunTest983HostTestCases, CtsJvmtiRunTest984HostTestCases, CtsJvmtiRunTest985HostTestCases, CtsJvmtiRunTest986HostTestCases, CtsJvmtiRunTest988HostTestCases, CtsJvmtiRunTest989HostTestCases, CtsJvmtiRunTest990HostTestCases, CtsJvmtiRunTest991HostTestCases, CtsJvmtiRunTest992HostTestCases, CtsJvmtiRunTest993HostTestCases, CtsJvmtiRunTest994HostTestCases, CtsJvmtiRunTest995HostTestCases, CtsJvmtiRunTest996HostTestCases, CtsJvmtiRunTest997HostTestCases, CtsJvmtiTaggingHostTestCases, CtsJvmtiTrackingHostTestCases, CtsKernelConfigTestCases, CtsKeystoreTestCases, CtsLeanbackJankTestCases, CtsLegacyNotificationTestCases, CtsLibcoreFileIOTestCases, CtsLibcoreJsr166TestCases, CtsLibcoreLegacy22TestCases, CtsLibcoreOjTestCases, CtsLibcoreOkHttpTestCases, CtsLibcoreTestCases, CtsLibcoreWycheproofBCTestCases, CtsLibcoreWycheproofConscryptTestCases, CtsLiblogTestCases, CtsLocation2TestCases, CtsLocationTestCases, CtsLogdTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.x86.all.CtsJvmtiAttachingHostTestCases_-_CtsLogdTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.all.CtsJvmtiAttachingHostTestCases_-_CtsLogdTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJvmtiAttachingHostTestCases', '--include-filter', 'CtsJvmtiAttachingTestCases', '--include-filter', 'CtsJvmtiRedefineClassesHostTestCases', '--include-filter', 'CtsJvmtiRunTest1900HostTestCases', '--include-filter', 'CtsJvmtiRunTest1901HostTestCases', '--include-filter', 'CtsJvmtiRunTest1902HostTestCases', '--include-filter', 'CtsJvmtiRunTest1903HostTestCases', '--include-filter', 'CtsJvmtiRunTest1904HostTestCases', '--include-filter', 'CtsJvmtiRunTest1906HostTestCases', '--include-filter', 'CtsJvmtiRunTest1907HostTestCases', '--include-filter', 'CtsJvmtiRunTest1908HostTestCases', '--include-filter', 'CtsJvmtiRunTest1909HostTestCases', '--include-filter', 'CtsJvmtiRunTest1910HostTestCases', '--include-filter', 'CtsJvmtiRunTest1911HostTestCases', '--include-filter', 'CtsJvmtiRunTest1912HostTestCases', '--include-filter', 'CtsJvmtiRunTest1913HostTestCases', '--include-filter', 'CtsJvmtiRunTest1914HostTestCases', '--include-filter', 'CtsJvmtiRunTest1915HostTestCases', '--include-filter', 'CtsJvmtiRunTest1916HostTestCases', '--include-filter', 'CtsJvmtiRunTest1917HostTestCases', '--include-filter', 'CtsJvmtiRunTest1920HostTestCases', '--include-filter', 'CtsJvmtiRunTest1921HostTestCases', '--include-filter', 'CtsJvmtiRunTest1922HostTestCases', '--include-filter', 'CtsJvmtiRunTest1923HostTestCases', '--include-filter', 'CtsJvmtiRunTest1924HostTestCases', '--include-filter', 'CtsJvmtiRunTest1925HostTestCases', '--include-filter', 'CtsJvmtiRunTest1926HostTestCases', '--include-filter', 'CtsJvmtiRunTest1927HostTestCases', '--include-filter', 'CtsJvmtiRunTest1928HostTestCases', '--include-filter', 'CtsJvmtiRunTest1930HostTestCases', '--include-filter', 'CtsJvmtiRunTest1931HostTestCases', '--include-filter', 'CtsJvmtiRunTest1932HostTestCases', '--include-filter', 'CtsJvmtiRunTest1933HostTestCases', '--include-filter', 'CtsJvmtiRunTest1934HostTestCases', '--include-filter', 'CtsJvmtiRunTest1936HostTestCases', '--include-filter', 'CtsJvmtiRunTest1937HostTestCases', '--include-filter', 'CtsJvmtiRunTest1939HostTestCases', '--include-filter', 'CtsJvmtiRunTest1941HostTestCases', '--include-filter', 'CtsJvmtiRunTest1942HostTestCases', '--include-filter', 'CtsJvmtiRunTest1943HostTestCases', '--include-filter', 'CtsJvmtiRunTest902HostTestCases', '--include-filter', 'CtsJvmtiRunTest903HostTestCases', '--include-filter', 'CtsJvmtiRunTest904HostTestCases', '--include-filter', 'CtsJvmtiRunTest905HostTestCases', '--include-filter', 'CtsJvmtiRunTest906HostTestCases', '--include-filter', 'CtsJvmtiRunTest907HostTestCases', '--include-filter', 'CtsJvmtiRunTest908HostTestCases', '--include-filter', 'CtsJvmtiRunTest910HostTestCases', '--include-filter', 'CtsJvmtiRunTest911HostTestCases', '--include-filter', 'CtsJvmtiRunTest912HostTestCases', '--include-filter', 'CtsJvmtiRunTest913HostTestCases', '--include-filter', 'CtsJvmtiRunTest914HostTestCases', '--include-filter', 'CtsJvmtiRunTest915HostTestCases', '--include-filter', 'CtsJvmtiRunTest917HostTestCases', '--include-filter', 'CtsJvmtiRunTest918HostTestCases', '--include-filter', 'CtsJvmtiRunTest919HostTestCases', '--include-filter', 'CtsJvmtiRunTest920HostTestCases', '--include-filter', 'CtsJvmtiRunTest922HostTestCases', '--include-filter', 'CtsJvmtiRunTest923HostTestCases', '--include-filter', 'CtsJvmtiRunTest924HostTestCases', '--include-filter', 'CtsJvmtiRunTest926HostTestCases', '--include-filter', 'CtsJvmtiRunTest927HostTestCases', '--include-filter', 'CtsJvmtiRunTest928HostTestCases', '--include-filter', 'CtsJvmtiRunTest930HostTestCases', '--include-filter', 'CtsJvmtiRunTest931HostTestCases', '--include-filter', 'CtsJvmtiRunTest932HostTestCases', '--include-filter', 'CtsJvmtiRunTest940HostTestCases', '--include-filter', 'CtsJvmtiRunTest942HostTestCases', '--include-filter', 'CtsJvmtiRunTest944HostTestCases', '--include-filter', 'CtsJvmtiRunTest945HostTestCases', '--include-filter', 'CtsJvmtiRunTest947HostTestCases', '--include-filter', 'CtsJvmtiRunTest951HostTestCases', '--include-filter', 'CtsJvmtiRunTest982HostTestCases', '--include-filter', 'CtsJvmtiRunTest983HostTestCases', '--include-filter', 'CtsJvmtiRunTest984HostTestCases', '--include-filter', 'CtsJvmtiRunTest985HostTestCases', '--include-filter', 'CtsJvmtiRunTest986HostTestCases', '--include-filter', 'CtsJvmtiRunTest988HostTestCases', '--include-filter', 'CtsJvmtiRunTest989HostTestCases', '--include-filter', 'CtsJvmtiRunTest990HostTestCases', '--include-filter', 'CtsJvmtiRunTest991HostTestCases', '--include-filter', 'CtsJvmtiRunTest992HostTestCases', '--include-filter', 'CtsJvmtiRunTest993HostTestCases', '--include-filter', 'CtsJvmtiRunTest994HostTestCases', '--include-filter', 'CtsJvmtiRunTest995HostTestCases', '--include-filter', 'CtsJvmtiRunTest996HostTestCases', '--include-filter', 'CtsJvmtiRunTest997HostTestCases', '--include-filter', 'CtsJvmtiTaggingHostTestCases', '--include-filter', 'CtsJvmtiTrackingHostTestCases', '--include-filter', 'CtsKernelConfigTestCases', '--include-filter', 'CtsKeystoreTestCases', '--include-filter', 'CtsLeanbackJankTestCases', '--include-filter', 'CtsLegacyNotificationTestCases', '--include-filter', 'CtsLibcoreFileIOTestCases', '--include-filter', 'CtsLibcoreJsr166TestCases', '--include-filter', 'CtsLibcoreLegacy22TestCases', '--include-filter', 'CtsLibcoreOjTestCases', '--include-filter', 'CtsLibcoreOkHttpTestCases', '--include-filter', 'CtsLibcoreTestCases', '--include-filter', 'CtsLibcoreWycheproofBCTestCases', '--include-filter', 'CtsLibcoreWycheproofConscryptTestCases', '--include-filter', 'CtsLiblogTestCases', '--include-filter', 'CtsLocation2TestCases', '--include-filter', 'CtsLocationTestCases', '--include-filter', 'CtsLogdTestCases', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsJvmtiAttachingHostTestCases_-_CtsLogdTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=62700)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsMediaBitstreamsTestCases_-_CtsMediaBitstreamsTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsMediaBitstreamsTestCases_-_CtsMediaBitstreamsTestCases
deleted file mode 100644
index 29c3e1e..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsMediaBitstreamsTestCases_-_CtsMediaBitstreamsTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.all.CtsMediaBitstreamsTestCases_-_CtsMediaBitstreamsTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaBitstreamsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        needs_push_media=True,
-        tag='9.0_r14.x86.all.CtsMediaBitstreamsTestCases_-_CtsMediaBitstreamsTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.all.CtsMediaBitstreamsTestCases_-_CtsMediaBitstreamsTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaBitstreamsTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='all.CtsMediaBitstreamsTestCases_-_CtsMediaBitstreamsTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsMediaHostTestCases_-_CtsMediaHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsMediaHostTestCases_-_CtsMediaHostTestCases
deleted file mode 100644
index 3c6d5c1..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsMediaHostTestCases_-_CtsMediaHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.all.CtsMediaHostTestCases_-_CtsMediaHostTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.x86.all.CtsMediaHostTestCases_-_CtsMediaHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.all.CtsMediaHostTestCases_-_CtsMediaHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsMediaHostTestCases_-_CtsMediaHostTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases
deleted file mode 100644
index 27571f5..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaStressTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        needs_push_media=True,
-        tag='9.0_r14.x86.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaStressTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='all.CtsMediaStressTestCases_-_CtsMediaStressTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=18000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsMediaTestCases_-_CtsMediaTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsMediaTestCases_-_CtsMediaTestCases
deleted file mode 100644
index b5593a9..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsMediaTestCases_-_CtsMediaTestCases
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.all.CtsMediaTestCases_-_CtsMediaTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        needs_push_media=True,
-        tag='9.0_r14.x86.all.CtsMediaTestCases_-_CtsMediaTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.all.CtsMediaTestCases_-_CtsMediaTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='all.CtsMediaTestCases_-_CtsMediaTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
-        timeout=36000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsMidiTestCases_-_CtsSecurityBulletinHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsMidiTestCases_-_CtsSecurityBulletinHostTestCases
deleted file mode 100644
index fbb8f8b..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsMidiTestCases_-_CtsSecurityBulletinHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.all.CtsMidiTestCases_-_CtsSecurityBulletinHostTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMidiTestCases, CtsMockingDebuggableTestCases, CtsMockingTestCases, CtsMonkeyTestCases, CtsMultiUserHostTestCases, CtsMultiUserTestCases, CtsNNAPITestCases, CtsNativeHardwareTestCases, CtsNativeMediaAAudioTestCases, CtsNativeMediaSlTestCases, CtsNativeMediaXaTestCases, CtsNativeNetTestCases, CtsNdefTestCases, CtsNetSecConfigAttributeTestCases, CtsNetSecConfigBasicDebugDisabledTestCases, CtsNetSecConfigBasicDebugEnabledTestCases, CtsNetSecConfigBasicDomainConfigTestCases, CtsNetSecConfigCleartextTrafficTestCases, CtsNetSecConfigDownloadManagerTestCases, CtsNetSecConfigInvalidPinTestCases, CtsNetSecConfigNestedDomainConfigTestCases, CtsNetSecConfigPrePCleartextTrafficTestCases, CtsNetSecConfigResourcesSrcTestCases, CtsNetSecPolicyUsesCleartextTrafficFalseTestCases, CtsNetSecPolicyUsesCleartextTrafficTrueTestCases, CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases, CtsNetTestCases, CtsNetTestCasesLegacyApi22, CtsNetTestCasesLegacyPermission22, CtsOmapiTestCases, CtsOpenGLTestCases, CtsOpenGlPerf2TestCases, CtsOpenGlPerfTestCases, CtsOsHostTestCases, CtsOsTestCases, CtsPdfTestCases, CtsPerfettoTestCases, CtsPermission2TestCases, CtsPermissionTestCases, CtsPreference2TestCases, CtsPreferenceTestCases, CtsPrintTestCases, CtsProtoTestCases, CtsProviderTestCases, CtsRenderscriptLegacyTestCases, CtsRenderscriptTestCases, CtsRsBlasTestCases, CtsRsCppTestCases, CtsSampleDeviceTestCases, CtsSampleHostTestCases, CtsSaxTestCases, CtsSeccompHostTestCases, CtsSecureElementAccessControlTestCases1, CtsSecureElementAccessControlTestCases2, CtsSecureElementAccessControlTestCases3, CtsSecurityBulletinHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.x86.all.CtsMidiTestCases_-_CtsSecurityBulletinHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.all.CtsMidiTestCases_-_CtsSecurityBulletinHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMidiTestCases', '--include-filter', 'CtsMockingDebuggableTestCases', '--include-filter', 'CtsMockingTestCases', '--include-filter', 'CtsMonkeyTestCases', '--include-filter', 'CtsMultiUserHostTestCases', '--include-filter', 'CtsMultiUserTestCases', '--include-filter', 'CtsNNAPITestCases', '--include-filter', 'CtsNativeHardwareTestCases', '--include-filter', 'CtsNativeMediaAAudioTestCases', '--include-filter', 'CtsNativeMediaSlTestCases', '--include-filter', 'CtsNativeMediaXaTestCases', '--include-filter', 'CtsNativeNetTestCases', '--include-filter', 'CtsNdefTestCases', '--include-filter', 'CtsNetSecConfigAttributeTestCases', '--include-filter', 'CtsNetSecConfigBasicDebugDisabledTestCases', '--include-filter', 'CtsNetSecConfigBasicDebugEnabledTestCases', '--include-filter', 'CtsNetSecConfigBasicDomainConfigTestCases', '--include-filter', 'CtsNetSecConfigCleartextTrafficTestCases', '--include-filter', 'CtsNetSecConfigDownloadManagerTestCases', '--include-filter', 'CtsNetSecConfigInvalidPinTestCases', '--include-filter', 'CtsNetSecConfigNestedDomainConfigTestCases', '--include-filter', 'CtsNetSecConfigPrePCleartextTrafficTestCases', '--include-filter', 'CtsNetSecConfigResourcesSrcTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficFalseTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficTrueTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases', '--include-filter', 'CtsNetTestCases', '--include-filter', 'CtsNetTestCasesLegacyApi22', '--include-filter', 'CtsNetTestCasesLegacyPermission22', '--include-filter', 'CtsOmapiTestCases', '--include-filter', 'CtsOpenGLTestCases', '--include-filter', 'CtsOpenGlPerf2TestCases', '--include-filter', 'CtsOpenGlPerfTestCases', '--include-filter', 'CtsOsHostTestCases', '--include-filter', 'CtsOsTestCases', '--include-filter', 'CtsPdfTestCases', '--include-filter', 'CtsPerfettoTestCases', '--include-filter', 'CtsPermission2TestCases', '--include-filter', 'CtsPermissionTestCases', '--include-filter', 'CtsPreference2TestCases', '--include-filter', 'CtsPreferenceTestCases', '--include-filter', 'CtsPrintTestCases', '--include-filter', 'CtsProtoTestCases', '--include-filter', 'CtsProviderTestCases', '--include-filter', 'CtsRenderscriptLegacyTestCases', '--include-filter', 'CtsRenderscriptTestCases', '--include-filter', 'CtsRsBlasTestCases', '--include-filter', 'CtsRsCppTestCases', '--include-filter', 'CtsSampleDeviceTestCases', '--include-filter', 'CtsSampleHostTestCases', '--include-filter', 'CtsSaxTestCases', '--include-filter', 'CtsSeccompHostTestCases', '--include-filter', 'CtsSecureElementAccessControlTestCases1', '--include-filter', 'CtsSecureElementAccessControlTestCases2', '--include-filter', 'CtsSecureElementAccessControlTestCases3', '--include-filter', 'CtsSecurityBulletinHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsMidiTestCases_-_CtsSecurityBulletinHostTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
-        timeout=106200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases
deleted file mode 100644
index 40bf38f..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSecurityHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.x86.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSecurityHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        precondition_commands=['echo 3 > /proc/sys/kernel/perf_event_paranoid', 'modprobe configs'],
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases
deleted file mode 100644
index 71fb15a..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSecurityTestCases, CtsSelinuxTargetSdk25TestCases, CtsSelinuxTargetSdk27TestCases, CtsSelinuxTargetSdkCurrentTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.x86.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSecurityTestCases', '--include-filter', 'CtsSelinuxTargetSdk25TestCases', '--include-filter', 'CtsSelinuxTargetSdk27TestCases', '--include-filter', 'CtsSelinuxTargetSdkCurrentTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=14400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsSensorTestCases_-_CtsSensorTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsSensorTestCases_-_CtsSensorTestCases
deleted file mode 100644
index 8def536..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsSensorTestCases_-_CtsSensorTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.all.CtsSensorTestCases_-_CtsSensorTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSensorTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=30,
-        tag='9.0_r14.x86.all.CtsSensorTestCases_-_CtsSensorTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.all.CtsSensorTestCases_-_CtsSensorTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSensorTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsSensorTestCases_-_CtsSensorTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsShortcutHostTestCases_-_CtsVideoTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsShortcutHostTestCases_-_CtsVideoTestCases
deleted file mode 100644
index f00f617..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsShortcutHostTestCases_-_CtsVideoTestCases
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.all.CtsShortcutHostTestCases_-_CtsVideoTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsShortcutHostTestCases, CtsShortcutManagerTestCases, CtsSimRestrictedApisTestCases, CtsSimpleCpuTestCases, CtsSimpleperfTestCases, CtsSkQPTestCases, CtsSliceTestCases, CtsSpeechTestCases, CtsStatsdHostTestCases, CtsSustainedPerformanceHostTestCases, CtsSyncAccountAccessOtherCertTestCases, CtsSyncContentHostTestCases, CtsSyncManagerTestsCases, CtsSystemApiAnnotationTestCases, CtsSystemApiSignatureTestCases, CtsSystemIntentTestCases, CtsSystemUiHostTestCases, CtsSystemUiTestCases, CtsTelecomTestCases, CtsTelecomTestCases2, CtsTelecomTestCases3, CtsTelephony2TestCases, CtsTelephonyTestCases, CtsTextTestCases, CtsThemeDeviceTestCases, CtsThemeHostTestCases, CtsToastLegacyTestCases, CtsToastTestCases, CtsTransitionTestCases, CtsTrustedVoiceHostTestCases, CtsTvProviderTestCases, CtsTvTestCases, CtsUiAutomationTestCases, CtsUiDeviceTestCases, CtsUiRenderingTestCases, CtsUidIsolationTestCases, CtsUsageStatsTestCases, CtsUsbTests, CtsUtilTestCases, CtsVideoTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.x86.all.CtsShortcutHostTestCases_-_CtsVideoTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.all.CtsShortcutHostTestCases_-_CtsVideoTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsShortcutHostTestCases', '--include-filter', 'CtsShortcutManagerTestCases', '--include-filter', 'CtsSimRestrictedApisTestCases', '--include-filter', 'CtsSimpleCpuTestCases', '--include-filter', 'CtsSimpleperfTestCases', '--include-filter', 'CtsSkQPTestCases', '--include-filter', 'CtsSliceTestCases', '--include-filter', 'CtsSpeechTestCases', '--include-filter', 'CtsStatsdHostTestCases', '--include-filter', 'CtsSustainedPerformanceHostTestCases', '--include-filter', 'CtsSyncAccountAccessOtherCertTestCases', '--include-filter', 'CtsSyncContentHostTestCases', '--include-filter', 'CtsSyncManagerTestsCases', '--include-filter', 'CtsSystemApiAnnotationTestCases', '--include-filter', 'CtsSystemApiSignatureTestCases', '--include-filter', 'CtsSystemIntentTestCases', '--include-filter', 'CtsSystemUiHostTestCases', '--include-filter', 'CtsSystemUiTestCases', '--include-filter', 'CtsTelecomTestCases', '--include-filter', 'CtsTelecomTestCases2', '--include-filter', 'CtsTelecomTestCases3', '--include-filter', 'CtsTelephony2TestCases', '--include-filter', 'CtsTelephonyTestCases', '--include-filter', 'CtsTextTestCases', '--include-filter', 'CtsThemeDeviceTestCases', '--include-filter', 'CtsThemeHostTestCases', '--include-filter', 'CtsToastLegacyTestCases', '--include-filter', 'CtsToastTestCases', '--include-filter', 'CtsTransitionTestCases', '--include-filter', 'CtsTrustedVoiceHostTestCases', '--include-filter', 'CtsTvProviderTestCases', '--include-filter', 'CtsTvTestCases', '--include-filter', 'CtsUiAutomationTestCases', '--include-filter', 'CtsUiDeviceTestCases', '--include-filter', 'CtsUiRenderingTestCases', '--include-filter', 'CtsUidIsolationTestCases', '--include-filter', 'CtsUsageStatsTestCases', '--include-filter', 'CtsUsbTests', '--include-filter', 'CtsUtilTestCases', '--include-filter', 'CtsVideoTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsShortcutHostTestCases_-_CtsVideoTestCases',
-        target_plan=None,
-        bundle='x86',
-        extra_artifacts_host=['/tmp/diff_*.png'],
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        prerequisites=['bluetooth', 'region_us'],
-        timeout=77400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsViewTestCases_-_CtsViewTestCases b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsViewTestCases_-_CtsViewTestCases
deleted file mode 100644
index 21a3462..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsViewTestCases_-_CtsViewTestCases
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.all.CtsViewTestCases_-_CtsViewTestCases'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsViewTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.x86.all.CtsViewTestCases_-_CtsViewTestCases',
-        test_name='cheets_CTS_P.9.0_r14.x86.all.CtsViewTestCases_-_CtsViewTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsViewTestCases', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsViewTestCases_-_CtsViewTestCases',
-        target_plan=None,
-        bundle='x86',
-        extra_artifacts=['/storage/emulated/0/SurfaceViewSyncTest/'],
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        precondition_commands=['sleep 60'],
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsVmTestCases_-_vm-tests-tf b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsVmTestCases_-_vm-tests-tf
deleted file mode 100644
index b004fe8..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.all.CtsVmTestCases_-_vm-tests-tf
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.all.CtsVmTestCases_-_vm-tests-tf'
-ATTRIBUTES = 'suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsVmTestCases, CtsVoiceInteractionTestCases, CtsVoiceSettingsTestCases, CtsVrTestCases, CtsWebkitTestCases, CtsWidgetTestCases, CtsWindowManagerDeviceTestCases, CtsWrapNoWrapTestCases, CtsWrapWrapDebugMallocDebugTestCases, CtsWrapWrapDebugTestCases, CtsWrapWrapNoDebugTestCases, cts-system-all.api, signed-CtsSecureElementAccessControlTestCases1, signed-CtsSecureElementAccessControlTestCases2, signed-CtsSecureElementAccessControlTestCases3, vm-tests-tf of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='9.0_r14.x86.all.CtsVmTestCases_-_vm-tests-tf',
-        test_name='cheets_CTS_P.9.0_r14.x86.all.CtsVmTestCases_-_vm-tests-tf',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsVmTestCases', '--include-filter', 'CtsVoiceInteractionTestCases', '--include-filter', 'CtsVoiceSettingsTestCases', '--include-filter', 'CtsVrTestCases', '--include-filter', 'CtsWebkitTestCases', '--include-filter', 'CtsWidgetTestCases', '--include-filter', 'CtsWindowManagerDeviceTestCases', '--include-filter', 'CtsWrapNoWrapTestCases', '--include-filter', 'CtsWrapWrapDebugMallocDebugTestCases', '--include-filter', 'CtsWrapWrapDebugTestCases', '--include-filter', 'CtsWrapWrapNoDebugTestCases', '--include-filter', 'cts-system-all.api', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases1', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases2', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases3', '--include-filter', 'vm-tests-tf', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='all.CtsVmTestCases_-_vm-tests-tf',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        prerequisites=['region_us'],
-        timeout=30600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.cts-system-all.api b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.cts-system-all.api
deleted file mode 100644
index b0dbc20..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.cts-system-all.api
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.cts-system-all.api'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module cts-system-all.api of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.cts-system-all.api',
-        test_name='cheets_CTS_P.9.0_r14.x86.cts-system-all.api',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'cts-system-all.api', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='cts-system-all.api',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.signed-CtsSecureElementAccessControl b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.signed-CtsSecureElementAccessControl
deleted file mode 100644
index c0a3e80..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.signed-CtsSecureElementAccessControl
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.signed-CtsSecureElementAccessControl'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module signed-CtsSecureElementAccessControlTestCases1, signed-CtsSecureElementAccessControlTestCases2, signed-CtsSecureElementAccessControlTestCases3 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.signed-CtsSecureElementAccessControl',
-        test_name='cheets_CTS_P.9.0_r14.x86.signed-CtsSecureElementAccessControl',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases1', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases2', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases3', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='signed-CtsSecureElementAccessControl',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.tradefed-run-collect-tests-only-internal b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.tradefed-run-collect-tests-only-internal
deleted file mode 100644
index eed3728..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.tradefed-run-collect-tests-only-internal
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.tradefed-run-collect-tests-only-internal'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:arc-cts-qual'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'LENGTHY'
-MAX_RESULT_SIZE_KB = 1024000
-DOC = 'Run all of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        max_retry=0,
-        tag='9.0_r14.x86.tradefed-run-collect-tests-only-internal',
-        test_name='cheets_CTS_P.9.0_r14.x86.tradefed-run-collect-tests-only-internal',
-        run_template=['run', 'commandAndExit', 'collect-tests-only', '--disable-reboot', '--module-arg', 'CtsMediaTestCases:skip-media-download:true', '--module-arg', 'CtsMediaStressTestCases:skip-media-download:true', '--module-arg', 'CtsMediaBitstreamsTestCases:skip-media-download:true', '--dynamic-config-url='],
-        retry_template=None,
-        target_module=None,
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.vm-tests-tf b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.vm-tests-tf
deleted file mode 100644
index f2965d5..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.vm-tests-tf
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.vm-tests-tf'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vm-tests-tf of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.vm-tests-tf',
-        test_name='cheets_CTS_P.9.0_r14.x86.vm-tests-tf',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'vm-tests-tf', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='vm-tests-tf',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.wm-presubmit b/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.wm-presubmit
deleted file mode 100644
index 04e0472..0000000
--- a/server/site_tests/cheets_CTS_P/control.9.0_r14.x86.wm-presubmit
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_P.9.0_r14.x86.wm-presubmit'
-ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:vmtest-informational1'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module wm-presubmit of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_P',
-        hosts=host_list,
-        iterations=1,
-        tag='9.0_r14.x86.wm-presubmit',
-        test_name='cheets_CTS_P.9.0_r14.x86.wm-presubmit',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsActivityManagerDeviceSdk25TestCases', '--include-filter', 'CtsActivityManagerDeviceTestCases', '--include-filter', 'CtsAppTestCases android.app.cts.TaskDescriptionTest', '--include-filter', 'CtsWindowManagerDeviceTestCases', '--test-arg', 'com.android.compatibility.common.tradefed.testtype.JarHostTest:include-annotation:android.platform.test.annotations.Presubmit', '--test-arg', 'com.android.tradefed.testtype.AndroidJUnitTest:include-annotation:android.platform.test.annotations.Presubmit', '--test-arg', 'com.android.tradefed.testtype.HostTest:include-annotation:android.platform.test.annotations.Presubmit', '--test-arg', 'com.android.tradefed.testtype.AndroidJUnitTest:exclude-annotation:androidx.test.filters.FlakyTest', '--logcat-on-failure', '--dynamic-config-url='],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
-        target_module='wm-presubmit',
-        target_plan=None,
-        bundle='x86',
-        uri='gs://chromeos-arc-images/cts/bundle/P/android-cts-9.0_r14-linux_x86-x86.zip',
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r20.arm.tradefed-run-collect-tests-only-internal b/server/site_tests/cheets_CTS_P/control.9.0_r20.arm.tradefed-run-collect-tests-only-internal
new file mode 100644
index 0000000..4940d30
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.9.0_r20.arm.tradefed-run-collect-tests-only-internal
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.9.0_r20.arm.tradefed-run-collect-tests-only-internal'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 0
+TEST_TYPE = 'server'
+TIME = 'LENGTHY'
+MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
+DOC = 'Run all of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=0,
+        tag='9.0_r20.arm.tradefed-run-collect-tests-only-internal',
+        test_name='cheets_CTS_P.9.0_r20.arm.tradefed-run-collect-tests-only-internal',
+        run_template=['run', 'commandAndExit', 'collect-tests-only', '--disable-reboot', '--module-arg', 'CtsMediaTestCases:skip-media-download:true', '--module-arg', 'CtsMediaStressTestCases:skip-media-download:true', '--module-arg', 'CtsMediaBitstreamsTestCases:skip-media-download:true', '--dynamic-config-url='],
+        retry_template=None,
+        target_module=None,
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.9.0_r20.x86.tradefed-run-collect-tests-only-internal b/server/site_tests/cheets_CTS_P/control.9.0_r20.x86.tradefed-run-collect-tests-only-internal
new file mode 100644
index 0000000..4774e47
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.9.0_r20.x86.tradefed-run-collect-tests-only-internal
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.9.0_r20.x86.tradefed-run-collect-tests-only-internal'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 0
+TEST_TYPE = 'server'
+TIME = 'LENGTHY'
+MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
+DOC = 'Run all of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=0,
+        tag='9.0_r20.x86.tradefed-run-collect-tests-only-internal',
+        test_name='cheets_CTS_P.9.0_r20.x86.tradefed-run-collect-tests-only-internal',
+        run_template=['run', 'commandAndExit', 'collect-tests-only', '--disable-reboot', '--module-arg', 'CtsMediaTestCases:skip-media-download:true', '--module-arg', 'CtsMediaStressTestCases:skip-media-download:true', '--module-arg', 'CtsMediaBitstreamsTestCases:skip-media-download:true', '--dynamic-config-url='],
+        retry_template=None,
+        target_module=None,
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsAbiOverrideHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsAbiOverrideHostTestCases
index 6b992d7..4630987 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsAbiOverrideHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsAbiOverrideHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAbiOverrideHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsAccelerationTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsAccelerationTestCases
index 356a62e..8e5891d 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsAccelerationTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsAccelerationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAccelerationTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsAccessibilityServiceTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsAccessibilityServiceTestCases
index 7768b2d..bef5ea7 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsAccessibilityServiceTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsAccessibilityServiceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAccessibilityServiceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsAccessibilityTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsAccessibilityTestCases
index 6a50ded..f7e264a 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsAccessibilityTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsAccessibilityTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAccessibilityTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsAccountManagerTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsAccountManagerTestCases
index 778d513..79b1a01 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsAccountManagerTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsAccountManagerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAccountManagerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsActivityManagerDeviceSdk25TestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsActivityManagerDeviceSdk25TestCases
index b5c8dd8..282ef7c 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsActivityManagerDeviceSdk25TestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsActivityManagerDeviceSdk25TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsActivityManagerDeviceSdk25TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsActivityManagerDeviceTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsActivityManagerDeviceTestCases
index 93b3bb5..6b99a85 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsActivityManagerDeviceTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsActivityManagerDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsActivityManagerDeviceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsAdminPackageInstallerTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsAdminPackageInstallerTestCases
index 9d020cc..89d0e98 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsAdminPackageInstallerTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsAdminPackageInstallerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAdminPackageInstallerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsAdminTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsAdminTestCases
index 1491e09..466d402 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsAdminTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsAdminTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAdminTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsAlarmClockTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsAlarmClockTestCases
index 74230bc..aef039c 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsAlarmClockTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsAlarmClockTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAlarmClockTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsAlarmManagerTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsAlarmManagerTestCases
index 17b7e22..974f04a 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsAlarmManagerTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsAlarmManagerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAlarmManagerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsAndroidAppTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsAndroidAppTestCases
index 965d638..342dd2b 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsAndroidAppTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsAndroidAppTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAndroidAppTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsAndroidTestBase27ApiSignatureTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsAndroidTestBase27ApiSignatureTestCases
index e2bc5f2..690d31b 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsAndroidTestBase27ApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsAndroidTestBase27ApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAndroidTestBase27ApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsAndroidTestMockCurrentApiSignatureTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsAndroidTestMockCurrentApiSignatureTestCases
index 23a754d..9b8c4ee 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsAndroidTestMockCurrentApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsAndroidTestMockCurrentApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAndroidTestMockCurrentApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsAndroidTestRunnerCurrentApiSignatureTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsAndroidTestRunnerCurrentApiSignatureTestCases
index cdb4c34..432c94f 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsAndroidTestRunnerCurrentApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsAndroidTestRunnerCurrentApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAndroidTestRunnerCurrentApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsAnimationTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsAnimationTestCases
index 9c32026..8b1d3b0 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsAnimationTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsAnimationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAnimationTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsApacheHttpLegacy27ApiSignatureTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsApacheHttpLegacy27ApiSignatureTestCases
index 095981d..21d29d8 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsApacheHttpLegacy27ApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsApacheHttpLegacy27ApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsApacheHttpLegacy27ApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsApacheHttpLegacyCurrentApiSignatureTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsApacheHttpLegacyCurrentApiSignatureTestCases
index 8e74b1b..03665a1 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsApacheHttpLegacyCurrentApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsApacheHttpLegacyCurrentApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsApacheHttpLegacyCurrentApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases
index 8db3b7c..bd532f2 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsAppComponentFactoryTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsAppComponentFactoryTestCases
index 20c0a7b..e139369 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsAppComponentFactoryTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsAppComponentFactoryTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppComponentFactoryTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsAppSecurityHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsAppSecurityHostTestCases
index 65b02b1..baf07c6 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsAppSecurityHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsAppSecurityHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppSecurityHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsAppTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsAppTestCases
index 3a1c5cd..0902f42 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsAppTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsAppTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsAppTestCases.feature.ctshardware b/server/site_tests/cheets_CTS_P/control.arm.CtsAppTestCases.feature.ctshardware
new file mode 100644
index 0000000..7fe8cb8
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsAppTestCases.feature.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.arm.CtsAppTestCases.feature.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAppTestCases.feature of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='arm.CtsAppTestCases.feature.ctshardware',
+        test_name='cheets_CTS_P.arm.CtsAppTestCases.feature.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAppTestCases android.app.cts.SystemFeaturesTest'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAppTestCases.feature',
+        target_plan=None,
+        bundle='arm',
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsAppUsageHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsAppUsageHostTestCases
index 83a5b7f..919333e 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsAppUsageHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsAppUsageHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppUsageHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsAppWidgetTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsAppWidgetTestCases
index 25d9aed..067bfa5 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsAppWidgetTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsAppWidgetTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppWidgetTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsAslrMallocTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsAslrMallocTestCases
index b20da75..9103a1c 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsAslrMallocTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsAslrMallocTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAslrMallocTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsAssistTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsAssistTestCases
index e4ef1d1..b50e26f 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsAssistTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsAssistTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAssistTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsAtraceHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsAtraceHostTestCases
index 2bafeac..bc77a75 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsAtraceHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsAtraceHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAtraceHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsAutoFillServiceTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsAutoFillServiceTestCases
index b06cd47..53931d2 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsAutoFillServiceTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsAutoFillServiceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAutoFillServiceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsBackgroundRestrictionsTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsBackgroundRestrictionsTestCases
index 3cc5620..68956d8 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsBackgroundRestrictionsTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsBackgroundRestrictionsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBackgroundRestrictionsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsBackupHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsBackupHostTestCases
index e5b40c6..7013988 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsBackupHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsBackupHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBackupHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsBackupTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsBackupTestCases
index b3ea2d6..f1e0283 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsBackupTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsBackupTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBackupTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsBatterySavingTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsBatterySavingTestCases
index d30100c..313f62d 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsBatterySavingTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsBatterySavingTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBatterySavingTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsBionicTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsBionicTestCases
index 0ab3d9f..855be6e 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsBionicTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsBionicTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBionicTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsBluetoothTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsBluetoothTestCases
index c1f69dd..75b8635 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsBluetoothTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsBluetoothTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBluetoothTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsBootStatsTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsBootStatsTestCases
index cbb0a7b..d07a5c6 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsBootStatsTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsBootStatsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBootStatsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsCalendarcommon2TestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsCalendarcommon2TestCases
index 0996ae4..0894dc2 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsCalendarcommon2TestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsCalendarcommon2TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCalendarcommon2TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsCameraApi25TestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsCameraApi25TestCases
index 6e7e553..5765c33 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsCameraApi25TestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsCameraApi25TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCameraApi25TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsCameraTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsCameraTestCases
index 3229a41..a57606b 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsCameraTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsCameraTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'LONG'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsCameraTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.arm.CtsCameraTestCases.ctshardware
new file mode 100644
index 0000000..c55a349
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsCameraTestCases.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.arm.CtsCameraTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, lighting'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
+DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='arm.CtsCameraTestCases.ctshardware',
+        test_name='cheets_CTS_P.arm.CtsCameraTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCameraTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCameraTestCases',
+        target_plan=None,
+        bundle='arm',
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsCarTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsCarTestCases
index 2552b87..a906048 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsCarTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsCarTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCarTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsCarrierApiTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsCarrierApiTestCases
index 4aef96d..dd4ed9e 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsCarrierApiTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsCarrierApiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCarrierApiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsColorModeTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsColorModeTestCases
index 9082e7b..0cd5a56 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsColorModeTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsColorModeTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsColorModeTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsCompilationTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsCompilationTestCases
index a292514..ef9a287 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsCompilationTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsCompilationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCompilationTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsContactsProviderWipe b/server/site_tests/cheets_CTS_P/control.arm.CtsContactsProviderWipe
index 3bb01c8..3808284 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsContactsProviderWipe
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsContactsProviderWipe
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsContactsProviderWipe of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsContentTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsContentTestCases
index b828df4..a867ec2 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsContentTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsContentTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsContentTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsCppToolsTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsCppToolsTestCases
index c96a6d7..e9f1fac 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsCppToolsTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsCppToolsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCppToolsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsCurrentApiSignatureTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsCurrentApiSignatureTestCases
index 7767d31..cd56edc 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsCurrentApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsCurrentApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCurrentApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDatabaseTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsDatabaseTestCases
index 7da56f0..365bfc1 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDatabaseTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDatabaseTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDatabaseTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDebugTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsDebugTestCases
index 71958eb..fe20ce2 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDebugTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDebugTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDebugTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases
index 80fc2ab..b4b7218 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'LONG'
 MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
 PRIORITY = 70
 DOC = 'Run module CtsDeqpTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-EGL b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-EGL
index 655729c..c64c806 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-EGL
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-EGL
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-EGL of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-GLES2 b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-GLES2
index 69795ec..9d381d9 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-GLES2
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-GLES2
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-GLES2 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-GLES3 b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-GLES3
index 5c3bb44..1dfa280 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-GLES3
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-GLES3
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-GLES3 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware
new file mode 100644
index 0000000..84668e7
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.arm.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
+DOC = 'Run module CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='arm.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware',
+        test_name='cheets_CTS_P.arm.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases dEQP-GLES3.functional.prerequisite#*'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite',
+        target_plan=None,
+        bundle='arm',
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-GLES31 b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-GLES31
index e6a81e9..81ac75a 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-GLES31
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-GLES31
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-GLES31 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.api b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.api
index 477f827..06bade9 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.api
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.api
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.api of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.binding_model b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.binding_model
index 30f1ab1..1c2532b 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.binding_model
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.binding_model
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.binding_model of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.clipping b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.clipping
index 2c4b042..6990b80 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.clipping
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.clipping
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.clipping of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.compute b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.compute
index 1b1117a..90a7104 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.compute
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.compute
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.compute of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.device_group b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.device_group
index f3a3f69..6cd1481 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.device_group
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.device_group
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.device_group of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.draw b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.draw
index 96c5394..c47fc71 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.draw
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.draw
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.draw of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.dynamic_state b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.dynamic_state
index ce605ad..f0a35dc 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.dynamic_state
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.dynamic_state
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.dynamic_state of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.fragment_operations b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.fragment_operations
index c090f18..a453a6a 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.fragment_operations
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.fragment_operations
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.fragment_operations of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.geometry b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.geometry
index e8c1d31..ece880f 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.geometry
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.geometry
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.geometry of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.glsl b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.glsl
index 6123aa5..cadcbc4 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.glsl
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.glsl
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.glsl of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.image b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.image
index bbdf821..b99a5da 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.image
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.image
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.image of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.info b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.info
index 6b48d29..84302fe 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.info
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.info
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.info of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.memory b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.memory
index 2486ffe..442e975 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.memory
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.memory
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.memory of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.multiview b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.multiview
index 42f5c05..7bb392e 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.multiview
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.multiview
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.multiview of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.pipeline b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.pipeline
index 30b7d5c..1685e43 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.pipeline
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.pipeline
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.pipeline of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.protected_memory b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.protected_memory
index 24a2a57..f462ec5 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.protected_memory
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.protected_memory
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.protected_memory of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.query_pool b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.query_pool
index 3b47cd4..d305be7 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.query_pool
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.query_pool
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.query_pool of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.rasterization b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.rasterization
index f97c3a4..0d264c1 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.rasterization
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.rasterization
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.rasterization of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.renderpass b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.renderpass
index cd1c76f..c94d4c1 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.renderpass
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.renderpass
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.renderpass of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.renderpass2 b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.renderpass2
index c54bf3c..1be68ae 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.renderpass2
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.renderpass2
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.renderpass2 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.robustness b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.robustness
index 2f6b496..8227961 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.robustness
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.robustness
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.robustness of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.sparse_resources b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.sparse_resources
index 6f1c1fa..71c21db 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.sparse_resources
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.sparse_resources
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.sparse_resources of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.spirv_assembly b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.spirv_assembly
index 4999a67..c15b874 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.spirv_assembly
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.spirv_assembly
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.spirv_assembly of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.ssbo b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.ssbo
index 8e8f8f1..814aa05 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.ssbo
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.ssbo
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.ssbo of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.subgroups b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.subgroups
index 132a127..e6d9904 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.subgroups
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.subgroups
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.subgroups of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.subgroups.arithmetic b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.subgroups.arithmetic
index f89f41c..0305a53 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.subgroups.arithmetic
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.subgroups.arithmetic
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.subgroups.arithmetic of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.subgroups.b b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.subgroups.b
index 40c362d..adea6f8 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.subgroups.b
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.subgroups.b
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.subgroups.b of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.subgroups.clustered b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.subgroups.clustered
index f50d403..6cdf95b 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.subgroups.clustered
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.subgroups.clustered
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.subgroups.clustered of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.subgroups.quad b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.subgroups.quad
index 89f48d6..b662240 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.subgroups.quad
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.subgroups.quad
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.subgroups.quad of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.subgroups.s b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.subgroups.s
index 4535b2a..8c7ccaf 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.subgroups.s
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.subgroups.s
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.subgroups.s of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.subgroups.vote b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.subgroups.vote
index 21d9d7c..48e2ff4 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.subgroups.vote
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.subgroups.vote
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.subgroups.vote of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.synchronization b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.synchronization
index 2c37b02..e9e2586 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.synchronization
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.synchronization
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.synchronization of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.tessellation b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.tessellation
index 7521140..e6fa173 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.tessellation
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.tessellation
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.tessellation of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.texture b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.texture
index 742bd14..a5a8b44 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.texture
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.texture
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.texture of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.ubo b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.ubo
index e6625da..795dc9a 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.ubo
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.ubo
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.ubo of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.wsi b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.wsi
index 8aa3a6f..f15033c 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.wsi
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.wsi
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.wsi of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.ycbcr b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.ycbcr
index e7c1dee..2e130b7 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.ycbcr
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeqpTestCases.dEQP-VK.ycbcr
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.ycbcr of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDeviceIdleHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsDeviceIdleHostTestCases
index bf869fe..1e1ce2f 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDeviceIdleHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDeviceIdleHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDeviceIdleHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDevicePolicyManagerTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsDevicePolicyManagerTestCases
index 0fcc5e6..a386e6c 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDevicePolicyManagerTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDevicePolicyManagerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDevicePolicyManagerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDexMetadataHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsDexMetadataHostTestCases
index 5d4ec2a..7ab6b21 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDexMetadataHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDexMetadataHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDexMetadataHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDisplayTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsDisplayTestCases
index 440d745..fdd2dae 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDisplayTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDisplayTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDisplayTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDpiTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsDpiTestCases
index c96369b..2f72370 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDpiTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDpiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDpiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDpiTestCases2 b/server/site_tests/cheets_CTS_P/control.arm.CtsDpiTestCases2
index f29db06..fb64e24 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDpiTestCases2
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDpiTestCases2
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDpiTestCases2 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDreamsTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsDreamsTestCases
index 3565ae6..83bc087 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDreamsTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDreamsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDreamsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDrmTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsDrmTestCases
index b0f3982..9302883 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDrmTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDrmTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDrmTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDumpsysHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsDumpsysHostTestCases
index 67fe73e..8f76e59 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDumpsysHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDumpsysHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDumpsysHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsDynamicLinkerTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsDynamicLinkerTestCases
index 017426c..c285af2 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsDynamicLinkerTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsDynamicLinkerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDynamicLinkerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsEdiHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsEdiHostTestCases
index 9b51623..af975c2 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsEdiHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsEdiHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsEdiHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsEffectTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsEffectTestCases
index 01f21b1..cd01d4f 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsEffectTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsEffectTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsEffectTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsExternalServiceTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsExternalServiceTestCases
index 2a562b8..6d6f2d1 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsExternalServiceTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsExternalServiceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsExternalServiceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsExternalSourcesTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsExternalSourcesTestCases
index e639dd2..402007a 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsExternalSourcesTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsExternalSourcesTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsExternalSourcesTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsFileSystemTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsFileSystemTestCases
index 6fcd7a9..59ee18a 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsFileSystemTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsFileSystemTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsFileSystemTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsFragmentTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsFragmentTestCases
index 320aaf2..5572140 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsFragmentTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsFragmentTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsFragmentTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsFragmentTestCasesSdk26 b/server/site_tests/cheets_CTS_P/control.arm.CtsFragmentTestCasesSdk26
index a3f884e..50fcd7a 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsFragmentTestCasesSdk26
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsFragmentTestCasesSdk26
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsFragmentTestCasesSdk26 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsGestureTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsGestureTestCases
index 7521233..e2c35a0 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsGestureTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsGestureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsGestureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsGpuToolsHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsGpuToolsHostTestCases
index 0255cd5..9b99fc8 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsGpuToolsHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsGpuToolsHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsGpuToolsHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsGraphicsTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsGraphicsTestCases
index 25144f6..cd8c280 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsGraphicsTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsGraphicsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsGraphicsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsHardwareTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsHardwareTestCases
index 222b416..c08269e 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsHardwareTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsHardwareTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHardwareTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsHarmfulAppWarningHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsHarmfulAppWarningHostTestCases
index f3412a2..4d7c82b 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsHarmfulAppWarningHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsHarmfulAppWarningHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHarmfulAppWarningHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsHiddenApiBlacklistApi27TestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsHiddenApiBlacklistApi27TestCases
index dfcb983..bb2c256 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsHiddenApiBlacklistApi27TestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsHiddenApiBlacklistApi27TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHiddenApiBlacklistApi27TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsHiddenApiBlacklistCurrentApiTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsHiddenApiBlacklistCurrentApiTestCases
index 47cc204..a2c4e0f 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsHiddenApiBlacklistCurrentApiTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsHiddenApiBlacklistCurrentApiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHiddenApiBlacklistCurrentApiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsHiddenApiBlacklistDebugClassTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsHiddenApiBlacklistDebugClassTestCases
index 3dd4379..e0a8bfb 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsHiddenApiBlacklistDebugClassTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsHiddenApiBlacklistDebugClassTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHiddenApiBlacklistDebugClassTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsHiddenApiKillswitchDebugClassTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsHiddenApiKillswitchDebugClassTestCases
index 43a0e2e..f256d11 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsHiddenApiKillswitchDebugClassTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsHiddenApiKillswitchDebugClassTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHiddenApiKillswitchDebugClassTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsHiddenApiKillswitchWhitelistTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsHiddenApiKillswitchWhitelistTestCases
index cf27938..3549a8b 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsHiddenApiKillswitchWhitelistTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsHiddenApiKillswitchWhitelistTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHiddenApiKillswitchWhitelistTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsHiddenApiKillswitchWildcardTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsHiddenApiKillswitchWildcardTestCases
index 65cfebf..a18bc78 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsHiddenApiKillswitchWildcardTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsHiddenApiKillswitchWildcardTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHiddenApiKillswitchWildcardTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsHostTzDataTests b/server/site_tests/cheets_CTS_P/control.arm.CtsHostTzDataTests
index 4831c44..17fd9aa 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsHostTzDataTests
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsHostTzDataTests
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHostTzDataTests of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsHostsideNetworkTests b/server/site_tests/cheets_CTS_P/control.arm.CtsHostsideNetworkTests
index 008599a..42cbbbd 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsHostsideNetworkTests
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsHostsideNetworkTests
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHostsideNetworkTests of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsHostsideNumberBlockingTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsHostsideNumberBlockingTestCases
index ee87f0d..42afe42 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsHostsideNumberBlockingTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsHostsideNumberBlockingTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHostsideNumberBlockingTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsHostsideTvTests b/server/site_tests/cheets_CTS_P/control.arm.CtsHostsideTvTests
index a6e9daf..43a2181 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsHostsideTvTests
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsHostsideTvTests
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHostsideTvTests of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsHostsideWebViewTests b/server/site_tests/cheets_CTS_P/control.arm.CtsHostsideWebViewTests
index 84d3c8b..59770cf 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsHostsideWebViewTests
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsHostsideWebViewTests
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHostsideWebViewTests of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsIcuTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsIcuTestCases
index cd6c457..a092fce 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsIcuTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsIcuTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsIcuTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsIncidentHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsIncidentHostTestCases
index 431d766..1e0589c 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsIncidentHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsIncidentHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsIncidentHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsInlineMockingTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsInlineMockingTestCases
index 97f6d56..a0af93d 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsInlineMockingTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsInlineMockingTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsInlineMockingTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsInputMethodServiceHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsInputMethodServiceHostTestCases
index 8bc7cb1..e085355 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsInputMethodServiceHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsInputMethodServiceHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsInputMethodServiceHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsInputMethodTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsInputMethodTestCases
index 978674a..27fd13e 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsInputMethodTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsInputMethodTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsInputMethodTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsIntentSignatureTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsIntentSignatureTestCases
index 09b8d9d..18e14af 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsIntentSignatureTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsIntentSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsIntentSignatureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJankDeviceTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJankDeviceTestCases
index 1aa5569..558bfc3 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJankDeviceTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJankDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJankDeviceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJdwpSecurityHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJdwpSecurityHostTestCases
index 8ca2f4b..31df9ad 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJdwpSecurityHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJdwpSecurityHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJdwpSecurityHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJdwpTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJdwpTestCases
index 3a112bc..fb387a0 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJdwpTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJdwpTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJdwpTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJniTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJniTestCases
index 961262a..805c2c9 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJniTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJniTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJniTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJobSchedulerSharedUidTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJobSchedulerSharedUidTestCases
index 89a9d60..4c4757d 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJobSchedulerSharedUidTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJobSchedulerSharedUidTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJobSchedulerSharedUidTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJobSchedulerTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJobSchedulerTestCases
index 2e68e51..55e06bb 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJobSchedulerTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJobSchedulerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJobSchedulerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiAttachingHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiAttachingHostTestCases
index 6b354df..b1967e4 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiAttachingHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiAttachingHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiAttachingHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiAttachingTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiAttachingTestCases
index c742317..3013da3 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiAttachingTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiAttachingTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiAttachingTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRedefineClassesHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRedefineClassesHostTestCases
index b7a289e..4df638e 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRedefineClassesHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRedefineClassesHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRedefineClassesHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1900HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1900HostTestCases
index f7c9d52..c4a8b75 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1900HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1900HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1900HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1901HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1901HostTestCases
index 8505eab..0bf1911 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1901HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1901HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1901HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1902HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1902HostTestCases
index 94cdd7f..efb62ed 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1902HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1902HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1902HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1903HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1903HostTestCases
index 77a9a21..c7c3b45 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1903HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1903HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1903HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1904HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1904HostTestCases
index 054f441..4a59201 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1904HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1904HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1904HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1906HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1906HostTestCases
index bea676b..b7f1a34 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1906HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1906HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1906HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1907HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1907HostTestCases
index 4a9e6c8..9deeb5d 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1907HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1907HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1907HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1908HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1908HostTestCases
index 82e007c..554e9fd 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1908HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1908HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1908HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1909HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1909HostTestCases
index cb784db..5d76fcf 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1909HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1909HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1909HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1910HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1910HostTestCases
index 40ba8ff..5705bfd 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1910HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1910HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1910HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1911HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1911HostTestCases
index 46eacc0..e0fc031 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1911HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1911HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1911HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1912HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1912HostTestCases
index d195032..f6e4b9c 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1912HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1912HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1912HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1913HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1913HostTestCases
index 7760632..8ccf8ac 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1913HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1913HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1913HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1914HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1914HostTestCases
index 02e4acc..c01c3de 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1914HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1914HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1914HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1915HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1915HostTestCases
index 631e8dc..b2a2f1b 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1915HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1915HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1915HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1916HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1916HostTestCases
index c55a8b9..2e6e084 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1916HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1916HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1916HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1917HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1917HostTestCases
index 9d4a7e3..6e3ad2d 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1917HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1917HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1917HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1920HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1920HostTestCases
index eee221a..ee1d3db 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1920HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1920HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1920HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1921HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1921HostTestCases
index db589df..2fdc6f7 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1921HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1921HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1921HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1922HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1922HostTestCases
index 1d16fab..75938fc 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1922HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1922HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1922HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1923HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1923HostTestCases
index 929a319..83bd8fa 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1923HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1923HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1923HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1924HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1924HostTestCases
index e37011b..0b8af24 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1924HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1924HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1924HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1925HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1925HostTestCases
index a765943..cceebc3 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1925HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1925HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1925HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1926HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1926HostTestCases
index e2ce52a..01376e7 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1926HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1926HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1926HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1927HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1927HostTestCases
index d08f8bc..fa6d4ca 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1927HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1927HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1927HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1928HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1928HostTestCases
index 8097c4f..57c3966 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1928HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1928HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1928HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1930HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1930HostTestCases
index 70ec9fb..a75376b 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1930HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1930HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1930HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1931HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1931HostTestCases
index 1ae6f84..727a3fa 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1931HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1931HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1931HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1932HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1932HostTestCases
index 0f145b1..39dc62a 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1932HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1932HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1932HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1933HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1933HostTestCases
index ea7d1a0..11e0e56 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1933HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1933HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1933HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1934HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1934HostTestCases
index 6de30da..a9d3665 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1934HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1934HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1934HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1936HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1936HostTestCases
index a6bfbe6..352ed5b 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1936HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1936HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1936HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1937HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1937HostTestCases
index c466373..03896fd 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1937HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1937HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1937HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1939HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1939HostTestCases
index a833a29..168ba37 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1939HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1939HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1939HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1941HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1941HostTestCases
index 019de79..ccd4762 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1941HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1941HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1941HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1942HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1942HostTestCases
index 5610770..60a66c8 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1942HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1942HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1942HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1943HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1943HostTestCases
index 7e2d4cd..0b140fa 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1943HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest1943HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1943HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest902HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest902HostTestCases
index d5fd557..f4dd0ae 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest902HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest902HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest902HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest903HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest903HostTestCases
index afcecf8..6619d34 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest903HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest903HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest903HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest904HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest904HostTestCases
index 21374b9..de50cfc 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest904HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest904HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest904HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest905HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest905HostTestCases
index 70463bf..252831e 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest905HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest905HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest905HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest906HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest906HostTestCases
index 64233bd..6135ed1 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest906HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest906HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest906HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest907HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest907HostTestCases
index 75d7c13..260d3d6 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest907HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest907HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest907HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest908HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest908HostTestCases
index 5b47445..11dd7eb 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest908HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest908HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest908HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest910HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest910HostTestCases
index 414506a..7ffae6f 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest910HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest910HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest910HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest911HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest911HostTestCases
index be9a710..23ce6df 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest911HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest911HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest911HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest912HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest912HostTestCases
index bf82117..21917db 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest912HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest912HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest912HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest913HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest913HostTestCases
index c42c964..4c5eaf2 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest913HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest913HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest913HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest914HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest914HostTestCases
index 4bc3a85..79ca8ec 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest914HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest914HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest914HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest915HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest915HostTestCases
index 4ec4bb3..7e66a45 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest915HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest915HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest915HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest917HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest917HostTestCases
index b55b108..544dfbe 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest917HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest917HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest917HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest918HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest918HostTestCases
index a82f42a..ec61363 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest918HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest918HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest918HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest919HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest919HostTestCases
index 87e1b5e..595a58b 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest919HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest919HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest919HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest920HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest920HostTestCases
index bc5c821..3385f7d 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest920HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest920HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest920HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest922HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest922HostTestCases
index eeadfd5..f4be608 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest922HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest922HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest922HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest923HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest923HostTestCases
index 5e4fa85..0b17521 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest923HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest923HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest923HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest924HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest924HostTestCases
index fea8d72..cb59750 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest924HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest924HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest924HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest926HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest926HostTestCases
index f389737..fa259a9 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest926HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest926HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest926HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest927HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest927HostTestCases
index 74e14bb..21bcf71 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest927HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest927HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest927HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest928HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest928HostTestCases
index f1d89d3..8a32b0c 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest928HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest928HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest928HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest930HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest930HostTestCases
index 8d55cb9..47a26e8 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest930HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest930HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest930HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest931HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest931HostTestCases
index e434e31..daf431c 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest931HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest931HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest931HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest932HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest932HostTestCases
index 8af9ac9..290e2ef 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest932HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest932HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest932HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest940HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest940HostTestCases
index 2d5c6ae..f446c04 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest940HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest940HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest940HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest942HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest942HostTestCases
index 811b335..43473e5 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest942HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest942HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest942HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest944HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest944HostTestCases
index 40c6e38..ec392c7 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest944HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest944HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest944HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest945HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest945HostTestCases
index ab9a43b..fc9c0d9 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest945HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest945HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest945HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest947HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest947HostTestCases
index 37218c0..cd3a3a1 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest947HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest947HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest947HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest951HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest951HostTestCases
index 5795e78..3f6f46c 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest951HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest951HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest951HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest982HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest982HostTestCases
index eef5dde..ce10e26 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest982HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest982HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest982HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest983HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest983HostTestCases
index 4acff1a..8f8208f 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest983HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest983HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest983HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest984HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest984HostTestCases
index 4c0848c..24de4b6 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest984HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest984HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest984HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest985HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest985HostTestCases
index e89148e..6b38071 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest985HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest985HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest985HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest986HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest986HostTestCases
index e9c9dfe..c1d5ec7 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest986HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest986HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest986HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest988HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest988HostTestCases
index 61f0f07..a48eccc 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest988HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest988HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest988HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest989HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest989HostTestCases
index e3d0c35..07d1792 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest989HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest989HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest989HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest990HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest990HostTestCases
index 9c6ac9b..682b306 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest990HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest990HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest990HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest991HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest991HostTestCases
index 8aff563..08ab04e 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest991HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest991HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest991HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest992HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest992HostTestCases
index d57b4a4..4760411 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest992HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest992HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest992HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest993HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest993HostTestCases
index df8f692..0cbf216 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest993HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest993HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest993HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest994HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest994HostTestCases
index 3c98176..5023542 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest994HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest994HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest994HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest995HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest995HostTestCases
index 3254a5c..fcec936 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest995HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest995HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest995HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest996HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest996HostTestCases
index 5373df4..2844be8 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest996HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest996HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest996HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest997HostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest997HostTestCases
index beedcc5..6a478c1 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest997HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiRunTest997HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest997HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiTaggingHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiTaggingHostTestCases
index fd4ad6d..df2e5bf 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiTaggingHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiTaggingHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiTaggingHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiTrackingHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiTrackingHostTestCases
index 5fe6829..3bd0ff9 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiTrackingHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsJvmtiTrackingHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiTrackingHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsKernelConfigTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsKernelConfigTestCases
index 534f2a1..4a4d743 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsKernelConfigTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsKernelConfigTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsKernelConfigTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsKeystoreTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsKeystoreTestCases
index 9d39df2..e26671c 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsKeystoreTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsKeystoreTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsKeystoreTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsLeanbackJankTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsLeanbackJankTestCases
index 06ebd03..fc31e80 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsLeanbackJankTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsLeanbackJankTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLeanbackJankTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsLegacyNotificationTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsLegacyNotificationTestCases
index d930a2f..dddad40 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsLegacyNotificationTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsLegacyNotificationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLegacyNotificationTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreFileIOTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreFileIOTestCases
index 03081e4..c6c3e31 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreFileIOTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreFileIOTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreFileIOTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreJsr166TestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreJsr166TestCases
index be6268c..152f379 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreJsr166TestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreJsr166TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreJsr166TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreLegacy22TestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreLegacy22TestCases
index 5a134a2..ed62aca 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreLegacy22TestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreLegacy22TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreLegacy22TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreOjTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreOjTestCases
index e4c14d4..7ebcc27 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreOjTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreOjTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreOjTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreOkHttpTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreOkHttpTestCases
index fa9c22d..324534c 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreOkHttpTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreOkHttpTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreOkHttpTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreTestCases
index 023bc18..1ada49b 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsLibcoreTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreWycheproofBCTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreWycheproofBCTestCases
index be6a807..cb90e26 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreWycheproofBCTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreWycheproofBCTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreWycheproofBCTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreWycheproofConscryptTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreWycheproofConscryptTestCases
index 5ae5399..02e7838 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreWycheproofConscryptTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsLibcoreWycheproofConscryptTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreWycheproofConscryptTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsLiblogTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsLiblogTestCases
index d4eafa5..b7ee14e2 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsLiblogTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsLiblogTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLiblogTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsLocation2TestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsLocation2TestCases
index 4066690..344d1df 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsLocation2TestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsLocation2TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLocation2TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsLocationTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsLocationTestCases
index 1a8dd4c..409dbc4 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsLocationTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsLocationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLocationTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsLogdTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsLogdTestCases
index 52d645d..d716734 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsLogdTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsLogdTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLogdTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsMediaBitstreamsTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsMediaBitstreamsTestCases
index 02a725b..33b3424 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsMediaBitstreamsTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsMediaBitstreamsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMediaBitstreamsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsMediaHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsMediaHostTestCases
index 56f034f..8eae9fd 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsMediaHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsMediaHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMediaHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsMediaStressTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsMediaStressTestCases
index 386cd00..31811a6 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsMediaStressTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsMediaStressTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'LONG'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsMediaStressTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsMediaStressTestCases.camera.ctshardware b/server/site_tests/cheets_CTS_P/control.arm.CtsMediaStressTestCases.camera.ctshardware
new file mode 100644
index 0000000..ebe2f0d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsMediaStressTestCases.camera.ctshardware
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.arm.CtsMediaStressTestCases.camera.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
+DOC = 'Run module CtsMediaStressTestCases.camera of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='arm.CtsMediaStressTestCases.camera.ctshardware',
+        test_name='cheets_CTS_P.arm.CtsMediaStressTestCases.camera.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaStressTestCases android.mediastress.cts.MediaRecorderStressTest'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaStressTestCases.camera',
+        target_plan=None,
+        bundle='arm',
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsMediaTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsMediaTestCases
index bdbf75f..1c00130 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsMediaTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsMediaTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'LONG'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsMediaTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsMediaTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.arm.CtsMediaTestCases.ctshardware
new file mode 100644
index 0000000..b89061b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsMediaTestCases.ctshardware
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.arm.CtsMediaTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, noloopback'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
+DOC = 'Run module CtsMediaTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='arm.CtsMediaTestCases.ctshardware',
+        test_name='cheets_CTS_P.arm.CtsMediaTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaTestCases',
+        target_plan=None,
+        bundle='arm',
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        timeout=36000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsMidiTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsMidiTestCases
index eddcd9a..dd819ea 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsMidiTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsMidiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMidiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsMockingDebuggableTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsMockingDebuggableTestCases
index 473fb3f..232b0da 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsMockingDebuggableTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsMockingDebuggableTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMockingDebuggableTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsMockingTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsMockingTestCases
index 4ad98a6..199f737 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsMockingTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsMockingTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMockingTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsMonkeyTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsMonkeyTestCases
index 95be8c9..e0a69ae 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsMonkeyTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsMonkeyTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMonkeyTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsMultiUserHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsMultiUserHostTestCases
index eda2342..942e426 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsMultiUserHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsMultiUserHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMultiUserHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsMultiUserTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsMultiUserTestCases
index 4e7b380..3e28951 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsMultiUserTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsMultiUserTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMultiUserTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsNNAPITestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsNNAPITestCases
index 901ef78..70b0115 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsNNAPITestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsNNAPITestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNNAPITestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsNativeHardwareTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsNativeHardwareTestCases
index 3db1356..3cff993 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsNativeHardwareTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsNativeHardwareTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNativeHardwareTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsNativeMediaAAudioTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsNativeMediaAAudioTestCases
index c8d1925..c38ca08 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsNativeMediaAAudioTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsNativeMediaAAudioTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNativeMediaAAudioTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsNativeMediaAAudioTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.arm.CtsNativeMediaAAudioTestCases.ctshardware
new file mode 100644
index 0000000..1be9f54
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsNativeMediaAAudioTestCases.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.arm.CtsNativeMediaAAudioTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNativeMediaAAudioTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='arm.CtsNativeMediaAAudioTestCases.ctshardware',
+        test_name='cheets_CTS_P.arm.CtsNativeMediaAAudioTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNativeMediaAAudioTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsNativeMediaAAudioTestCases',
+        target_plan=None,
+        bundle='arm',
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsNativeMediaSlTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsNativeMediaSlTestCases
index 581385e..8a11a44 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsNativeMediaSlTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsNativeMediaSlTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNativeMediaSlTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsNativeMediaXaTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsNativeMediaXaTestCases
index 07e9403..eab3796 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsNativeMediaXaTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsNativeMediaXaTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNativeMediaXaTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsNativeNetTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsNativeNetTestCases
index 311fedd..2732a1b 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsNativeNetTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsNativeNetTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNativeNetTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsNdefTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsNdefTestCases
index 8f408c4..369bab1 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsNdefTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsNdefTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNdefTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigAttributeTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigAttributeTestCases
index dca4afc..3381f0b 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigAttributeTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigAttributeTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigAttributeTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigBasicDebugDisabledTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigBasicDebugDisabledTestCases
index 0127c7d..4c41fbf 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigBasicDebugDisabledTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigBasicDebugDisabledTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigBasicDebugDisabledTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigBasicDebugEnabledTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigBasicDebugEnabledTestCases
index 125cd0a..91f4587 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigBasicDebugEnabledTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigBasicDebugEnabledTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigBasicDebugEnabledTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigBasicDomainConfigTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigBasicDomainConfigTestCases
index 0757337..87a3f07 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigBasicDomainConfigTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigBasicDomainConfigTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigBasicDomainConfigTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigCleartextTrafficTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigCleartextTrafficTestCases
index 2a3fe3e..c1e1d06 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigCleartextTrafficTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigCleartextTrafficTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigCleartextTrafficTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigDownloadManagerTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigDownloadManagerTestCases
index 5e89551..c7feeaf 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigDownloadManagerTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigDownloadManagerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigDownloadManagerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigInvalidPinTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigInvalidPinTestCases
index ab51b95..b28b97c 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigInvalidPinTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigInvalidPinTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigInvalidPinTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigNestedDomainConfigTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigNestedDomainConfigTestCases
index dec7037..26f575e 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigNestedDomainConfigTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigNestedDomainConfigTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigNestedDomainConfigTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigPrePCleartextTrafficTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigPrePCleartextTrafficTestCases
index 3cee01e..64f4f7f 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigPrePCleartextTrafficTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigPrePCleartextTrafficTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigPrePCleartextTrafficTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigResourcesSrcTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigResourcesSrcTestCases
index c44cfd5..4cbb3fc 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigResourcesSrcTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecConfigResourcesSrcTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigResourcesSrcTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecPolicyUsesCleartextTrafficFalseTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecPolicyUsesCleartextTrafficFalseTestCases
index 6770995..cf162ee 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecPolicyUsesCleartextTrafficFalseTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecPolicyUsesCleartextTrafficFalseTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecPolicyUsesCleartextTrafficFalseTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecPolicyUsesCleartextTrafficTrueTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecPolicyUsesCleartextTrafficTrueTestCases
index d97f8ae..52ebfcd 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecPolicyUsesCleartextTrafficTrueTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecPolicyUsesCleartextTrafficTrueTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecPolicyUsesCleartextTrafficTrueTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases
index d4e903f..cab8402 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsNetTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsNetTestCases
index a86c53b..6481cc7 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsNetTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsNetTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsNetTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
@@ -29,7 +30,7 @@
         target_module='CtsNetTestCases',
         target_plan=None,
         bundle='arm',
-        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), '/usr/local/autotest/cros/scripts/reorder-services-moblab.sh wifi'],
+        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), '/usr/local/autotest/cros/scripts/reorder-services-moblab.sh wifi', 'android-sh -c \'setprop ctl.start mdnsd\''],
         retry_manual_tests=True,
         warn_on_test_retry=False,
         timeout=3600)
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsNetTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.arm.CtsNetTestCases.ctshardware
new file mode 100644
index 0000000..8a31eae
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsNetTestCases.ctshardware
@@ -0,0 +1,38 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.arm.CtsNetTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
+DOC = 'Run module CtsNetTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=10,
+        tag='arm.CtsNetTestCases.ctshardware',
+        test_name='cheets_CTS_P.arm.CtsNetTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNetTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsNetTestCases',
+        target_plan=None,
+        bundle='arm',
+        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), '/usr/local/autotest/cros/scripts/reorder-services-moblab.sh wifi', 'android-sh -c \'setprop ctl.start mdnsd\''],
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsNetTestCasesLegacyApi22 b/server/site_tests/cheets_CTS_P/control.arm.CtsNetTestCasesLegacyApi22
index 7478e77..f855152 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsNetTestCasesLegacyApi22
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsNetTestCasesLegacyApi22
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetTestCasesLegacyApi22 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsNetTestCasesLegacyPermission22 b/server/site_tests/cheets_CTS_P/control.arm.CtsNetTestCasesLegacyPermission22
index 99b0dde..511f852 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsNetTestCasesLegacyPermission22
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsNetTestCasesLegacyPermission22
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetTestCasesLegacyPermission22 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsOmapiTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsOmapiTestCases
index ed37fe2..7e05f6b 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsOmapiTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsOmapiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsOmapiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsOpenGLTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsOpenGLTestCases
index f5d0d77..afd27517 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsOpenGLTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsOpenGLTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsOpenGLTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsOpenGlPerf2TestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsOpenGlPerf2TestCases
index e41bf92..0c71e0e 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsOpenGlPerf2TestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsOpenGlPerf2TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsOpenGlPerf2TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsOpenGlPerfTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsOpenGlPerfTestCases
index 445c9b5..df43d05 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsOpenGlPerfTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsOpenGlPerfTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsOpenGlPerfTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsOsHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsOsHostTestCases
index b51e741..160e024 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsOsHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsOsHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsOsHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsOsTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsOsTestCases
index bcf1016..93f6db0 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsOsTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsOsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsOsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsPdfTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsPdfTestCases
index f9d2387..2d03c52 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsPdfTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsPdfTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPdfTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsPerfettoTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsPerfettoTestCases
index 09854dd..b06c563 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsPerfettoTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsPerfettoTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPerfettoTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsPerfettoTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.arm.CtsPerfettoTestCases.ctshardware
new file mode 100644
index 0000000..aa52f71
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsPerfettoTestCases.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.arm.CtsPerfettoTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPerfettoTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='arm.CtsPerfettoTestCases.ctshardware',
+        test_name='cheets_CTS_P.arm.CtsPerfettoTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPerfettoTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsPerfettoTestCases',
+        target_plan=None,
+        bundle='arm',
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsPermission2TestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsPermission2TestCases
index 956aac0..1e38158 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsPermission2TestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsPermission2TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPermission2TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsPermissionTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsPermissionTestCases
index 6be7e41..1bb85f2 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsPermissionTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsPermissionTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPermissionTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsPermissionTestCases.camera.ctshardware b/server/site_tests/cheets_CTS_P/control.arm.CtsPermissionTestCases.camera.ctshardware
new file mode 100644
index 0000000..98bbe52
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsPermissionTestCases.camera.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.arm.CtsPermissionTestCases.camera.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPermissionTestCases.camera of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='arm.CtsPermissionTestCases.camera.ctshardware',
+        test_name='cheets_CTS_P.arm.CtsPermissionTestCases.camera.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPermissionTestCases android.permission.cts.CameraPermissionTest', '--include-filter', 'CtsPermissionTestCases android.permission.cts.Camera2PermissionTest'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsPermissionTestCases.camera',
+        target_plan=None,
+        bundle='arm',
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsPreference2TestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsPreference2TestCases
index 7949b1c..d32b876 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsPreference2TestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsPreference2TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPreference2TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsPreferenceTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsPreferenceTestCases
index 837e5bc..7ee0640 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsPreferenceTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsPreferenceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPreferenceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsPrintTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsPrintTestCases
index 61fb339..8d89d4a 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsPrintTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsPrintTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPrintTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsProtoTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsProtoTestCases
index 594ed1d..de369cb 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsProtoTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsProtoTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsProtoTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsProviderTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsProviderTestCases
index 8e48c00..6b8ae94 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsProviderTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsProviderTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsProviderTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsRenderscriptLegacyTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsRenderscriptLegacyTestCases
index 627779d..16acfea 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsRenderscriptLegacyTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsRenderscriptLegacyTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsRenderscriptLegacyTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsRenderscriptTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsRenderscriptTestCases
index 4d191d5..3a4ebb8 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsRenderscriptTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsRenderscriptTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsRenderscriptTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsRsBlasTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsRsBlasTestCases
index 1db68a0..9edd84f 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsRsBlasTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsRsBlasTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsRsBlasTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsRsCppTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsRsCppTestCases
index 58fdeee..759e069 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsRsCppTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsRsCppTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsRsCppTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSampleDeviceTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsSampleDeviceTestCases
index 74aad89..d0508d2 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsSampleDeviceTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSampleDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSampleDeviceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSampleHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsSampleHostTestCases
index 163a9da..58beb9b 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsSampleHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSampleHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSampleHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSaxTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsSaxTestCases
index 3cdd5e4..586aabf 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsSaxTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSaxTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSaxTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSeccompHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsSeccompHostTestCases
index ddc6a0d..9305a5e 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsSeccompHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSeccompHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSeccompHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSecureElementAccessControlTestCases1 b/server/site_tests/cheets_CTS_P/control.arm.CtsSecureElementAccessControlTestCases1
index 1578f40..84b0d81 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsSecureElementAccessControlTestCases1
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSecureElementAccessControlTestCases1
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSecureElementAccessControlTestCases1 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSecureElementAccessControlTestCases2 b/server/site_tests/cheets_CTS_P/control.arm.CtsSecureElementAccessControlTestCases2
index 5aa6b93..550faae 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsSecureElementAccessControlTestCases2
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSecureElementAccessControlTestCases2
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSecureElementAccessControlTestCases2 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSecureElementAccessControlTestCases3 b/server/site_tests/cheets_CTS_P/control.arm.CtsSecureElementAccessControlTestCases3
index 8e73cf5..4d5f734 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsSecureElementAccessControlTestCases3
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSecureElementAccessControlTestCases3
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSecureElementAccessControlTestCases3 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSecurityBulletinHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsSecurityBulletinHostTestCases
index e0c4202..b5f5b79 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsSecurityBulletinHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSecurityBulletinHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSecurityBulletinHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSecurityHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsSecurityHostTestCases
index c3459fc..67648e6 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsSecurityHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSecurityHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsSecurityHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSecurityTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsSecurityTestCases
index f39f000..17f71d0 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsSecurityTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSecurityTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'LONG'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsSecurityTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSelinuxTargetSdk25TestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsSelinuxTargetSdk25TestCases
index a98e540..29b23b8 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsSelinuxTargetSdk25TestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSelinuxTargetSdk25TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSelinuxTargetSdk25TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSelinuxTargetSdk27TestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsSelinuxTargetSdk27TestCases
index dfcd3fa..24e3f13 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsSelinuxTargetSdk27TestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSelinuxTargetSdk27TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSelinuxTargetSdk27TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSelinuxTargetSdkCurrentTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsSelinuxTargetSdkCurrentTestCases
index aae9c8c..c8a2fbf 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsSelinuxTargetSdkCurrentTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSelinuxTargetSdkCurrentTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSelinuxTargetSdkCurrentTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSensorTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsSensorTestCases
index 8432fbf..d479086 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsSensorTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSensorTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSensorTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSensorTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.arm.CtsSensorTestCases.ctshardware
new file mode 100644
index 0000000..0be17f1
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSensorTestCases.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.arm.CtsSensorTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSensorTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=12,
+        tag='arm.CtsSensorTestCases.ctshardware',
+        test_name='cheets_CTS_P.arm.CtsSensorTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSensorTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSensorTestCases',
+        target_plan=None,
+        bundle='arm',
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsShortcutHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsShortcutHostTestCases
index 33c76e6..1be6308 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsShortcutHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsShortcutHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsShortcutHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsShortcutManagerTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsShortcutManagerTestCases
index fac29fe..c3e284d 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsShortcutManagerTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsShortcutManagerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsShortcutManagerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSimRestrictedApisTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsSimRestrictedApisTestCases
index 6f510a3..411ea08 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsSimRestrictedApisTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSimRestrictedApisTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSimRestrictedApisTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSimpleCpuTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsSimpleCpuTestCases
index 091a35f..ff29906 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsSimpleCpuTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSimpleCpuTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSimpleCpuTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSimpleperfTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsSimpleperfTestCases
index 04463af..88a04ca 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsSimpleperfTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSimpleperfTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSimpleperfTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSkQPTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsSkQPTestCases
index 01e16bb..09f9368 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsSkQPTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSkQPTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSkQPTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSliceTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsSliceTestCases
index 353729b..8bb04fa 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsSliceTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSliceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSliceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSpeechTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsSpeechTestCases
index c6949a3..2669777 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsSpeechTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSpeechTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSpeechTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsStatsdHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsStatsdHostTestCases
index 35de3ef..be32617 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsStatsdHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsStatsdHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsStatsdHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSustainedPerformanceHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsSustainedPerformanceHostTestCases
index 662d88d..8d8d2f6 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsSustainedPerformanceHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSustainedPerformanceHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSustainedPerformanceHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSustainedPerformanceHostTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.arm.CtsSustainedPerformanceHostTestCases.ctshardware
new file mode 100644
index 0000000..08f10c2
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSustainedPerformanceHostTestCases.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.arm.CtsSustainedPerformanceHostTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSustainedPerformanceHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='arm.CtsSustainedPerformanceHostTestCases.ctshardware',
+        test_name='cheets_CTS_P.arm.CtsSustainedPerformanceHostTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSustainedPerformanceHostTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSustainedPerformanceHostTestCases',
+        target_plan=None,
+        bundle='arm',
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSyncAccountAccessOtherCertTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsSyncAccountAccessOtherCertTestCases
index 7de88d0..f935458 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsSyncAccountAccessOtherCertTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSyncAccountAccessOtherCertTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSyncAccountAccessOtherCertTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSyncContentHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsSyncContentHostTestCases
index 9e0dca1..2058b6b 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsSyncContentHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSyncContentHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSyncContentHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSyncManagerTestsCases b/server/site_tests/cheets_CTS_P/control.arm.CtsSyncManagerTestsCases
index f816013..4cc0f7d 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsSyncManagerTestsCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSyncManagerTestsCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSyncManagerTestsCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSystemApiAnnotationTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsSystemApiAnnotationTestCases
index 0d63821..7f33230 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsSystemApiAnnotationTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSystemApiAnnotationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSystemApiAnnotationTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSystemApiSignatureTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsSystemApiSignatureTestCases
index 215de9b..061be00 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsSystemApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSystemApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSystemApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSystemIntentTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsSystemIntentTestCases
index 53a1357..72bce64 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsSystemIntentTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSystemIntentTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSystemIntentTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSystemUiHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsSystemUiHostTestCases
index 1115f10..00a7e78 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsSystemUiHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSystemUiHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSystemUiHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsSystemUiTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsSystemUiTestCases
index 537393b..78d4616 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsSystemUiTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsSystemUiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSystemUiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsTelecomTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsTelecomTestCases
index c0baee1..a10c405 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsTelecomTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsTelecomTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelecomTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsTelecomTestCases2 b/server/site_tests/cheets_CTS_P/control.arm.CtsTelecomTestCases2
index b69eb60..47b9458 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsTelecomTestCases2
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsTelecomTestCases2
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelecomTestCases2 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsTelecomTestCases3 b/server/site_tests/cheets_CTS_P/control.arm.CtsTelecomTestCases3
index 8bd0e39..a733e86 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsTelecomTestCases3
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsTelecomTestCases3
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelecomTestCases3 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsTelephony2TestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsTelephony2TestCases
index 23ef7a7..36123b7 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsTelephony2TestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsTelephony2TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelephony2TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsTelephonyTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsTelephonyTestCases
index c1bacdd..e3c348c 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsTelephonyTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsTelephonyTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelephonyTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsTextTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsTextTestCases
index d3c227c..636a92c 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsTextTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsTextTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTextTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsThemeDeviceTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsThemeDeviceTestCases
index ca98d78..cc0265d 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsThemeDeviceTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsThemeDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsThemeDeviceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsThemeHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsThemeHostTestCases
index e5a32fa..1cfe6b2 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsThemeHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsThemeHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsThemeHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsToastLegacyTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsToastLegacyTestCases
index c3f5583..d502a65 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsToastLegacyTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsToastLegacyTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsToastLegacyTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsToastTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsToastTestCases
index cb8f026..b33698b 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsToastTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsToastTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsToastTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsTransitionTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsTransitionTestCases
index bdd5e55..535941f 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsTransitionTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsTransitionTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTransitionTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsTrustedVoiceHostTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsTrustedVoiceHostTestCases
index 6ddd31d..a8551d5 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsTrustedVoiceHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsTrustedVoiceHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTrustedVoiceHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsTvProviderTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsTvProviderTestCases
index 1d7c7a7..703b46f 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsTvProviderTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsTvProviderTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTvProviderTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsTvTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsTvTestCases
index 989887b..e06f6f1 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsTvTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsTvTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTvTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsUiAutomationTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsUiAutomationTestCases
index 9318918..768f7e1 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsUiAutomationTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsUiAutomationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUiAutomationTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsUiDeviceTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsUiDeviceTestCases
index 5c4cd53..5af708b 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsUiDeviceTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsUiDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUiDeviceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsUiRenderingTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsUiRenderingTestCases
index 0dea1ac..0a47712 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsUiRenderingTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsUiRenderingTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUiRenderingTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsUidIsolationTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsUidIsolationTestCases
index 27ac015..e5d58c6 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsUidIsolationTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsUidIsolationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUidIsolationTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsUsageStatsTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsUsageStatsTestCases
index 8858c48..cf46592 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsUsageStatsTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsUsageStatsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsUsageStatsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsUsageStatsTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.arm.CtsUsageStatsTestCases.ctshardware
new file mode 100644
index 0000000..bcf756e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsUsageStatsTestCases.ctshardware
@@ -0,0 +1,38 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.arm.CtsUsageStatsTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
+DOC = 'Run module CtsUsageStatsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=10,
+        tag='arm.CtsUsageStatsTestCases.ctshardware',
+        test_name='cheets_CTS_P.arm.CtsUsageStatsTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUsageStatsTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsUsageStatsTestCases',
+        target_plan=None,
+        bundle='arm',
+        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), '/usr/local/autotest/cros/scripts/reorder-services-moblab.sh wifi'],
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsUsbTests b/server/site_tests/cheets_CTS_P/control.arm.CtsUsbTests
index 511b639..a518d55 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsUsbTests
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsUsbTests
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUsbTests of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsUtilTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsUtilTestCases
index 8884f55..c40c5c7 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsUtilTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsUtilTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUtilTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsVideoTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsVideoTestCases
index 4cf8a88..c9e7982 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsVideoTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsVideoTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsVideoTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsViewTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsViewTestCases
index 7a7a301..6590bfe 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsViewTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsViewTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsViewTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsViewTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.arm.CtsViewTestCases.ctshardware
new file mode 100644
index 0000000..b6e2ba4
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsViewTestCases.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.arm.CtsViewTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsViewTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='arm.CtsViewTestCases.ctshardware',
+        test_name='cheets_CTS_P.arm.CtsViewTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsViewTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsViewTestCases',
+        target_plan=None,
+        bundle='arm',
+        extra_artifacts=['/storage/emulated/0/SurfaceViewSyncTest/'],
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsVmTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsVmTestCases
index c77f83f..a3e0a27 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsVmTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsVmTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsVmTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsVoiceInteractionTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsVoiceInteractionTestCases
index e16db10..2e52748 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsVoiceInteractionTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsVoiceInteractionTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsVoiceInteractionTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsVoiceSettingsTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsVoiceSettingsTestCases
index 5d62dbe..cc056a2 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsVoiceSettingsTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsVoiceSettingsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsVoiceSettingsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsVrTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsVrTestCases
index a5cddba..481621b 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsVrTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsVrTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsVrTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsWebkitTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsWebkitTestCases
index b1f03d5..00acebb 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsWebkitTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsWebkitTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWebkitTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsWidgetTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsWidgetTestCases
index 588294f..ab1e305 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsWidgetTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsWidgetTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWidgetTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
@@ -29,6 +30,6 @@
         bundle='arm',
         retry_manual_tests=True,
         warn_on_test_retry=False,
-        timeout=3600)
+        timeout=5400)
 
 parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsWindowManagerDeviceTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsWindowManagerDeviceTestCases
index cbb6428..7b1203f 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsWindowManagerDeviceTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsWindowManagerDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWindowManagerDeviceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsWrapNoWrapTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsWrapNoWrapTestCases
index 8346a28..25da239 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsWrapNoWrapTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsWrapNoWrapTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWrapNoWrapTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsWrapWrapDebugMallocDebugTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsWrapWrapDebugMallocDebugTestCases
index f6645e1..72c1d6d 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsWrapWrapDebugMallocDebugTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsWrapWrapDebugMallocDebugTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWrapWrapDebugMallocDebugTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsWrapWrapDebugTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsWrapWrapDebugTestCases
index da6dbad..689ed35 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsWrapWrapDebugTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsWrapWrapDebugTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWrapWrapDebugTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.CtsWrapWrapNoDebugTestCases b/server/site_tests/cheets_CTS_P/control.arm.CtsWrapWrapNoDebugTestCases
index db1eafd..f881bba 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.CtsWrapWrapNoDebugTestCases
+++ b/server/site_tests/cheets_CTS_P/control.arm.CtsWrapWrapNoDebugTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWrapWrapNoDebugTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.cts-system-all.api b/server/site_tests/cheets_CTS_P/control.arm.cts-system-all.api
index 6db0987..e614cad 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.cts-system-all.api
+++ b/server/site_tests/cheets_CTS_P/control.arm.cts-system-all.api
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module cts-system-all.api of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.signed-CtsSecureElementAccessControlTestCases1 b/server/site_tests/cheets_CTS_P/control.arm.signed-CtsSecureElementAccessControlTestCases1
index 235efa7..27dfc25 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.signed-CtsSecureElementAccessControlTestCases1
+++ b/server/site_tests/cheets_CTS_P/control.arm.signed-CtsSecureElementAccessControlTestCases1
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module signed-CtsSecureElementAccessControlTestCases1 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.signed-CtsSecureElementAccessControlTestCases2 b/server/site_tests/cheets_CTS_P/control.arm.signed-CtsSecureElementAccessControlTestCases2
index 0625455..dd6a8b4 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.signed-CtsSecureElementAccessControlTestCases2
+++ b/server/site_tests/cheets_CTS_P/control.arm.signed-CtsSecureElementAccessControlTestCases2
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module signed-CtsSecureElementAccessControlTestCases2 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.signed-CtsSecureElementAccessControlTestCases3 b/server/site_tests/cheets_CTS_P/control.arm.signed-CtsSecureElementAccessControlTestCases3
index c83e7a8..6381b3d 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.signed-CtsSecureElementAccessControlTestCases3
+++ b/server/site_tests/cheets_CTS_P/control.arm.signed-CtsSecureElementAccessControlTestCases3
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module signed-CtsSecureElementAccessControlTestCases3 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.tradefed-run-collect-tests-only b/server/site_tests/cheets_CTS_P/control.arm.tradefed-run-collect-tests-only
index 29a9360..649e913 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.tradefed-run-collect-tests-only
+++ b/server/site_tests/cheets_CTS_P/control.arm.tradefed-run-collect-tests-only
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
 PRIORITY = 70
 DOC = 'Run all of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.arm.tradefed-run-collect-tests-only-hardware b/server/site_tests/cheets_CTS_P/control.arm.tradefed-run-collect-tests-only-hardware
new file mode 100644
index 0000000..2af8fc0
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.arm.tradefed-run-collect-tests-only-hardware
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.arm.tradefed-run-collect-tests-only-hardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module tradefed-run-collect-tests-only-hardware of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='arm.tradefed-run-collect-tests-only-hardware',
+        test_name='cheets_CTS_P.arm.tradefed-run-collect-tests-only-hardware',
+        run_template=['run', 'commandAndExit', 'collect-tests-only', '--disable-reboot', '--subplan', 'cts-hardware', '--module-arg', 'CtsMediaTestCases:skip-media-download:true', '--module-arg', 'CtsMediaStressTestCases:skip-media-download:true', '--module-arg', 'CtsMediaBitstreamsTestCases:skip-media-download:true'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='tradefed-run-collect-tests-only-hardware',
+        target_plan='cts-hardware',
+        bundle='arm',
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.arm.vm-tests-tf b/server/site_tests/cheets_CTS_P/control.arm.vm-tests-tf
index 9cc0834..d878ccd 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.vm-tests-tf
+++ b/server/site_tests/cheets_CTS_P/control.arm.vm-tests-tf
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module vm-tests-tf of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.arm.waivers b/server/site_tests/cheets_CTS_P/control.arm.waivers
index 7726c4c..1b54e17 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.waivers
+++ b/server/site_tests/cheets_CTS_P/control.arm.waivers
@@ -1,4 +1,4 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -6,12 +6,13 @@
 
 AUTHOR = 'ARC++ Team'
 NAME = 'cheets_CTS_P.arm.waivers'
-ATTRIBUTES = 'suite:cts_P, suite:cts'
+ATTRIBUTES = 'suite:cts_P, suite:cts, suite:cts-hardware'
 DEPENDENCIES = 'arc'
 JOB_RETRIES = 1
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run waived tests of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
@@ -30,7 +31,7 @@
         bundle='arm',
         retry_manual_tests=True,
         warn_on_test_retry=False,
-        uri='gs://chromeos-partner-gts/android-cts-6970114-linux_x86-arm.zip',
+        uri='DEV_MOBLAB',
         timeout=7200)
 
 parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.arm.waivers-collect-tests-only b/server/site_tests/cheets_CTS_P/control.arm.waivers-collect-tests-only
index 93e1b5c..59b05c9 100644
--- a/server/site_tests/cheets_CTS_P/control.arm.waivers-collect-tests-only
+++ b/server/site_tests/cheets_CTS_P/control.arm.waivers-collect-tests-only
@@ -1,4 +1,4 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -6,12 +6,13 @@
 
 AUTHOR = 'ARC++ Team'
 NAME = 'cheets_CTS_P.arm.waivers-collect-tests-only'
-ATTRIBUTES = 'suite:cts_P, suite:cts'
+ATTRIBUTES = 'suite:cts_P, suite:cts, suite:cts-hardware'
 DEPENDENCIES = 'arc'
 JOB_RETRIES = 1
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
 PRIORITY = 70
 DOC = 'Run waived tests of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
@@ -32,7 +33,7 @@
         bundle='arm',
         retry_manual_tests=True,
         warn_on_test_retry=False,
-        uri='gs://chromeos-partner-gts/android-cts-6970114-linux_x86-arm.zip',
+        uri='DEV_MOBLAB',
         timeout=360)
 
 parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAbiOverrideHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAbiOverrideHostTestCases
new file mode 100644
index 0000000..51e9219
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAbiOverrideHostTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsAbiOverrideHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAbiOverrideHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsAbiOverrideHostTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsAbiOverrideHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAbiOverrideHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsAbiOverrideHostTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAccelerationTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAccelerationTestCases
new file mode 100644
index 0000000..181cd11
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAccelerationTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsAccelerationTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-arc'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 2
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAccelerationTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=3,
+        tag='internal.arm.CtsAccelerationTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsAccelerationTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAccelerationTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsAccelerationTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=720)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAccessibility b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAccessibility
new file mode 100644
index 0000000..86d04e5
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAccessibility
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsAccessibility'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAccessibilityServiceTestCases, CtsAccessibilityTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsAccessibility',
+        test_name='cheets_CTS_P.internal.arm.CtsAccessibility',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAccessibilityServiceTestCases', '--include-filter', 'CtsAccessibilityTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsAccessibility',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAccountManagerTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAccountManagerTestCases
new file mode 100644
index 0000000..1719996
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAccountManagerTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsAccountManagerTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-perbuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAccountManagerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=3,
+        tag='internal.arm.CtsAccountManagerTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsAccountManagerTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAccountManagerTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsAccountManagerTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsActivityManagerDevice b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsActivityManagerDevice
new file mode 100644
index 0000000..02ceefb
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsActivityManagerDevice
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsActivityManagerDevice'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsActivityManagerDeviceSdk25TestCases, CtsActivityManagerDeviceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsActivityManagerDevice',
+        test_name='cheets_CTS_P.internal.arm.CtsActivityManagerDevice',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsActivityManagerDeviceSdk25TestCases', '--include-filter', 'CtsActivityManagerDeviceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsActivityManagerDevice',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAdmin b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAdmin
new file mode 100644
index 0000000..30ba700
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAdmin
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsAdmin'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAdminPackageInstallerTestCases, CtsAdminTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsAdmin',
+        test_name='cheets_CTS_P.internal.arm.CtsAdmin',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAdminPackageInstallerTestCases', '--include-filter', 'CtsAdminTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsAdmin',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAlarm b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAlarm
new file mode 100644
index 0000000..3036359
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAlarm
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsAlarm'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAlarmClockTestCases, CtsAlarmManagerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsAlarm',
+        test_name='cheets_CTS_P.internal.arm.CtsAlarm',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAlarmClockTestCases', '--include-filter', 'CtsAlarmManagerTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsAlarm',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAndroid b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAndroid
new file mode 100644
index 0000000..f8ec8f9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAndroid
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsAndroid'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAndroidAppTestCases, CtsAndroidTestBase27ApiSignatureTestCases, CtsAndroidTestMockCurrentApiSignatureTestCases, CtsAndroidTestRunnerCurrentApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsAndroid',
+        test_name='cheets_CTS_P.internal.arm.CtsAndroid',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAndroidAppTestCases', '--include-filter', 'CtsAndroidTestBase27ApiSignatureTestCases', '--include-filter', 'CtsAndroidTestMockCurrentApiSignatureTestCases', '--include-filter', 'CtsAndroidTestRunnerCurrentApiSignatureTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsAndroid',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAnimationTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAnimationTestCases
new file mode 100644
index 0000000..3da859d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAnimationTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsAnimationTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAnimationTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsAnimationTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsAnimationTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAnimationTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsAnimationTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsApacheHttpLegacy b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsApacheHttpLegacy
new file mode 100644
index 0000000..3759c18
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsApacheHttpLegacy
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsApacheHttpLegacy'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsApacheHttpLegacy27ApiSignatureTestCases, CtsApacheHttpLegacyCurrentApiSignatureTestCases, CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsApacheHttpLegacy',
+        test_name='cheets_CTS_P.internal.arm.CtsApacheHttpLegacy',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsApacheHttpLegacy27ApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacyCurrentApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsApacheHttpLegacy',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsApp b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsApp
new file mode 100644
index 0000000..76ef5c3
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsApp
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsApp'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAppComponentFactoryTestCases, CtsAppSecurityHostTestCases, CtsAppTestCases, CtsAppUsageHostTestCases, CtsAppWidgetTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        enable_default_apps=True,
+        tag='internal.arm.CtsApp',
+        test_name='cheets_CTS_P.internal.arm.CtsApp',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAppComponentFactoryTestCases', '--include-filter', 'CtsAppSecurityHostTestCases', '--include-filter', 'CtsAppTestCases', '--include-filter', 'CtsAppUsageHostTestCases', '--include-filter', 'CtsAppWidgetTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsApp',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        timeout=23400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAppTestCases.feature.ctshardware b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAppTestCases.feature.ctshardware
new file mode 100644
index 0000000..fdf63ee
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAppTestCases.feature.ctshardware
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsAppTestCases.feature.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAppTestCases.feature of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsAppTestCases.feature.ctshardware',
+        test_name='cheets_CTS_P.internal.arm.CtsAppTestCases.feature.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAppTestCases android.app.cts.SystemFeaturesTest', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsAppTestCases.feature',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAslrMallocTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAslrMallocTestCases
new file mode 100644
index 0000000..134e769
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAslrMallocTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsAslrMallocTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAslrMallocTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsAslrMallocTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsAslrMallocTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAslrMallocTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsAslrMallocTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAssistTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAssistTestCases
new file mode 100644
index 0000000..a144df3
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAssistTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsAssistTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAssistTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsAssistTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsAssistTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAssistTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsAssistTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAtraceHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAtraceHostTestCases
new file mode 100644
index 0000000..62ffc01
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAtraceHostTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsAtraceHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAtraceHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsAtraceHostTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsAtraceHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAtraceHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsAtraceHostTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAutoFillServiceTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAutoFillServiceTestCases
new file mode 100644
index 0000000..8715507
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsAutoFillServiceTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsAutoFillServiceTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAutoFillServiceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsAutoFillServiceTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsAutoFillServiceTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAutoFillServiceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsAutoFillServiceTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=21600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsBackgroundRestrictionsTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsBackgroundRestrictionsTestCases
new file mode 100644
index 0000000..e9ee352
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsBackgroundRestrictionsTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsBackgroundRestrictionsTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsBackgroundRestrictionsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsBackgroundRestrictionsTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsBackgroundRestrictionsTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsBackgroundRestrictionsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsBackgroundRestrictionsTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsBackup b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsBackup
new file mode 100644
index 0000000..cf57169
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsBackup
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsBackup'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsBackupHostTestCases, CtsBackupTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsBackup',
+        test_name='cheets_CTS_P.internal.arm.CtsBackup',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBackupHostTestCases', '--include-filter', 'CtsBackupTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsBackup',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsBatterySavingTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsBatterySavingTestCases
new file mode 100644
index 0000000..d66015b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsBatterySavingTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsBatterySavingTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsBatterySavingTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsBatterySavingTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsBatterySavingTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsBatterySavingTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsBatterySavingTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsBionicTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsBionicTestCases
new file mode 100644
index 0000000..3834808
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsBionicTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsBionicTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsBionicTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsBionicTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsBionicTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsBionicTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsBionicTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsBluetoothTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsBluetoothTestCases
new file mode 100644
index 0000000..57bbf16
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsBluetoothTestCases
@@ -0,0 +1,48 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+from autotest_lib.server import utils as server_utils
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsBluetoothTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsBluetoothTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+# For local debugging, if your test setup doesn't have servo, REMOVE these
+# two lines.
+args_dict = server_utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run_TS(machine):
+    # REMOVE 'servo_args=servo_args' arg for local debugging if your test
+    # setup doesn't have servo.
+    try:
+        host_list = [hosts.create_host(machine, servo_args=servo_args)]
+    except:
+        # Just ignore any servo setup flakiness.
+        host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsBluetoothTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsBluetoothTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsBluetoothTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsBluetoothTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        prerequisites=['bluetooth'],
+        hard_reboot_on_failure=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsBootStatsTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsBootStatsTestCases
new file mode 100644
index 0000000..ecfbeb8
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsBootStatsTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsBootStatsTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsBootStatsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsBootStatsTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsBootStatsTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsBootStatsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsBootStatsTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCalendarcommon2TestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCalendarcommon2TestCases
new file mode 100644
index 0000000..55958fd
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCalendarcommon2TestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsCalendarcommon2TestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCalendarcommon2TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsCalendarcommon2TestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsCalendarcommon2TestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCalendarcommon2TestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsCalendarcommon2TestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCamera b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCamera
new file mode 100644
index 0000000..205c0fa
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCamera
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsCamera'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCameraApi25TestCases, CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsCamera',
+        test_name='cheets_CTS_P.internal.arm.CtsCamera',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCameraApi25TestCases', '--include-filter', 'CtsCameraTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsCamera',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCameraTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCameraTestCases.ctshardware
new file mode 100644
index 0000000..348b2ea
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCameraTestCases.ctshardware
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsCameraTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsCameraTestCases.ctshardware',
+        test_name='cheets_CTS_P.internal.arm.CtsCameraTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCameraTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsCameraTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCameraTestCases.noled.camerabox.back b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCameraTestCases.noled.camerabox.back
new file mode 100644
index 0000000..f656934
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCameraTestCases.noled.camerabox.back
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsCameraTestCases.noled.camerabox.back'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-camera, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw'
+DEPENDENCIES = 'arc, camerabox_light:noled, camerabox_facing:back'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        camera_facing='back',
+        cmdline_args=args,
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsCameraTestCases.noled.camerabox.back',
+        test_name='cheets_CTS_P.internal.arm.CtsCameraTestCases.noled.camerabox.back',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCameraTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsCameraTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        retry_manual_tests=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCameraTestCases.noled.camerabox.front b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCameraTestCases.noled.camerabox.front
new file mode 100644
index 0000000..1e4b4f5
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCameraTestCases.noled.camerabox.front
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsCameraTestCases.noled.camerabox.front'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-camera, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw'
+DEPENDENCIES = 'arc, camerabox_light:noled, camerabox_facing:front'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        camera_facing='front',
+        cmdline_args=args,
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsCameraTestCases.noled.camerabox.front',
+        test_name='cheets_CTS_P.internal.arm.CtsCameraTestCases.noled.camerabox.front',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCameraTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsCameraTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        retry_manual_tests=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCarTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCarTestCases
new file mode 100644
index 0000000..64fcd92
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCarTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsCarTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCarTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsCarTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsCarTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCarTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsCarTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCarrierApiTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCarrierApiTestCases
new file mode 100644
index 0000000..877815e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCarrierApiTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsCarrierApiTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCarrierApiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsCarrierApiTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsCarrierApiTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCarrierApiTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsCarrierApiTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsColorModeTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsColorModeTestCases
new file mode 100644
index 0000000..e909290
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsColorModeTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsColorModeTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsColorModeTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsColorModeTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsColorModeTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsColorModeTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsColorModeTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCompilationTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCompilationTestCases
new file mode 100644
index 0000000..f7a879c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCompilationTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsCompilationTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCompilationTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsCompilationTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsCompilationTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCompilationTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsCompilationTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsContactsProviderWipe b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsContactsProviderWipe
new file mode 100644
index 0000000..631996b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsContactsProviderWipe
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsContactsProviderWipe'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsContactsProviderWipe of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsContactsProviderWipe',
+        test_name='cheets_CTS_P.internal.arm.CtsContactsProviderWipe',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsContactsProviderWipe', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsContactsProviderWipe',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsContentTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsContentTestCases
new file mode 100644
index 0000000..521a42d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsContentTestCases
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsContentTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsContentTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        enable_default_apps=True,
+        tag='internal.arm.CtsContentTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsContentTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsContentTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsContentTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        prerequisites=['region_us'],
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCppToolsTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCppToolsTestCases
new file mode 100644
index 0000000..0f25085
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCppToolsTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsCppToolsTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCppToolsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsCppToolsTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsCppToolsTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCppToolsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsCppToolsTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCurrentApiSignatureTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCurrentApiSignatureTestCases
new file mode 100644
index 0000000..1887fa4
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsCurrentApiSignatureTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsCurrentApiSignatureTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCurrentApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsCurrentApiSignatureTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsCurrentApiSignatureTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCurrentApiSignatureTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsCurrentApiSignatureTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDatabaseTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDatabaseTestCases
new file mode 100644
index 0000000..ba441ca
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDatabaseTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsDatabaseTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDatabaseTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDatabaseTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsDatabaseTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDatabaseTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDatabaseTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDebugTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDebugTestCases
new file mode 100644
index 0000000..7516f31
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDebugTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsDebugTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDebugTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDebugTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsDebugTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDebugTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDebugTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDeqpTestCases.32 b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDeqpTestCases.32
new file mode 100644
index 0000000..61ab5ee
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDeqpTestCases.32
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsDeqpTestCases.32'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-deqp, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=15,
+        tag='internal.arm.CtsDeqpTestCases.32',
+        test_name='cheets_CTS_P.internal.arm.CtsDeqpTestCases.32',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDeqpTestCases', '--abi', 'armeabi-v7a', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=72000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDeqpTestCases.64 b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDeqpTestCases.64
new file mode 100644
index 0000000..305b19d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDeqpTestCases.64
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsDeqpTestCases.64'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-deqp, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=15,
+        tag='internal.arm.CtsDeqpTestCases.64',
+        test_name='cheets_CTS_P.internal.arm.CtsDeqpTestCases.64',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDeqpTestCases', '--abi', 'arm64-v8a', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=72000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDeqpTestCases.dEQP-EGL b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDeqpTestCases.dEQP-EGL
new file mode 100644
index 0000000..3910a3d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDeqpTestCases.dEQP-EGL
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsDeqpTestCases.dEQP-EGL'
+ATTRIBUTES = 'suite:arc-cts-deqp, suite:graphics_per-week'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 0
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases.dEQP-EGL of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDeqpTestCases.dEQP-EGL',
+        test_name='cheets_CTS_P.internal.arm.CtsDeqpTestCases.dEQP-EGL',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-EGL.*', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDeqpTestCases.dEQP-GLES2 b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDeqpTestCases.dEQP-GLES2
new file mode 100644
index 0000000..4ffe526
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDeqpTestCases.dEQP-GLES2
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsDeqpTestCases.dEQP-GLES2'
+ATTRIBUTES = 'suite:arc-cts-deqp, suite:graphics_per-week'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 0
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases.dEQP-GLES2 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDeqpTestCases.dEQP-GLES2',
+        test_name='cheets_CTS_P.internal.arm.CtsDeqpTestCases.dEQP-GLES2',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-GLES2.*', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDeqpTestCases.dEQP-GLES3 b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDeqpTestCases.dEQP-GLES3
new file mode 100644
index 0000000..bdc994a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDeqpTestCases.dEQP-GLES3
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsDeqpTestCases.dEQP-GLES3'
+ATTRIBUTES = 'suite:arc-cts-deqp, suite:graphics_per-week'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 0
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases.dEQP-GLES3 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDeqpTestCases.dEQP-GLES3',
+        test_name='cheets_CTS_P.internal.arm.CtsDeqpTestCases.dEQP-GLES3',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-GLES3.*', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=21600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware
new file mode 100644
index 0000000..a495ee8
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware',
+        test_name='cheets_CTS_P.internal.arm.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases dEQP-GLES3.functional.prerequisite#*', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDeqpTestCases.dEQP-GLES31 b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDeqpTestCases.dEQP-GLES31
new file mode 100644
index 0000000..349c6e4
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDeqpTestCases.dEQP-GLES31
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsDeqpTestCases.dEQP-GLES31'
+ATTRIBUTES = 'suite:arc-cts-deqp, suite:graphics_per-week'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 0
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases.dEQP-GLES31 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDeqpTestCases.dEQP-GLES31',
+        test_name='cheets_CTS_P.internal.arm.CtsDeqpTestCases.dEQP-GLES31',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-GLES31.*', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=21600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDeqpTestCases.dEQP-VK b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDeqpTestCases.dEQP-VK
new file mode 100644
index 0000000..39cd0dc
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDeqpTestCases.dEQP-VK
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsDeqpTestCases.dEQP-VK'
+ATTRIBUTES = 'suite:arc-cts-deqp, suite:graphics_per-week'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 0
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases.dEQP-VK of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDeqpTestCases.dEQP-VK',
+        test_name='cheets_CTS_P.internal.arm.CtsDeqpTestCases.dEQP-VK',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-VK.*', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=54000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDevice b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDevice
new file mode 100644
index 0000000..ab6f760
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDevice
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsDevice'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeviceIdleHostTestCases, CtsDevicePolicyManagerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDevice',
+        test_name='cheets_CTS_P.internal.arm.CtsDevice',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeviceIdleHostTestCases', '--include-filter', 'CtsDevicePolicyManagerTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDevice',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDexMetadataHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDexMetadataHostTestCases
new file mode 100644
index 0000000..10b8dfd
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDexMetadataHostTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsDexMetadataHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDexMetadataHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDexMetadataHostTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsDexMetadataHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDexMetadataHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDexMetadataHostTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDisplayTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDisplayTestCases
new file mode 100644
index 0000000..36531b9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDisplayTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsDisplayTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDisplayTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDisplayTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsDisplayTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDisplayTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDisplayTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDpi b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDpi
new file mode 100644
index 0000000..3caea4a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDpi
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsDpi'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDpiTestCases, CtsDpiTestCases2 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDpi',
+        test_name='cheets_CTS_P.internal.arm.CtsDpi',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDpiTestCases', '--include-filter', 'CtsDpiTestCases2', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDpi',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDreamsTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDreamsTestCases
new file mode 100644
index 0000000..716ddec
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDreamsTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsDreamsTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDreamsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDreamsTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsDreamsTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDreamsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDreamsTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDrmTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDrmTestCases
new file mode 100644
index 0000000..81bea4a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDrmTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsDrmTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDrmTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDrmTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsDrmTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDrmTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDrmTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDumpsysHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDumpsysHostTestCases
new file mode 100644
index 0000000..af7d9c0
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDumpsysHostTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsDumpsysHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDumpsysHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDumpsysHostTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsDumpsysHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDumpsysHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDumpsysHostTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDynamicLinkerTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDynamicLinkerTestCases
new file mode 100644
index 0000000..2ed3bb4
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsDynamicLinkerTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsDynamicLinkerTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDynamicLinkerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDynamicLinkerTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsDynamicLinkerTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDynamicLinkerTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDynamicLinkerTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsEdiHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsEdiHostTestCases
new file mode 100644
index 0000000..196cca2
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsEdiHostTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsEdiHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsEdiHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsEdiHostTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsEdiHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsEdiHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsEdiHostTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsEffectTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsEffectTestCases
new file mode 100644
index 0000000..f263dda
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsEffectTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsEffectTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsEffectTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsEffectTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsEffectTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsEffectTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsEffectTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsExternalS b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsExternalS
new file mode 100644
index 0000000..cf16639
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsExternalS
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsExternalS'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsExternalServiceTestCases, CtsExternalSourcesTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsExternalS',
+        test_name='cheets_CTS_P.internal.arm.CtsExternalS',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsExternalServiceTestCases', '--include-filter', 'CtsExternalSourcesTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsExternalS',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsFileSystemTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsFileSystemTestCases
new file mode 100644
index 0000000..70607ef
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsFileSystemTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsFileSystemTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsFileSystemTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsFileSystemTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsFileSystemTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsFileSystemTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsFileSystemTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsFragment b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsFragment
new file mode 100644
index 0000000..339a154
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsFragment
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsFragment'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsFragmentTestCases, CtsFragmentTestCasesSdk26 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsFragment',
+        test_name='cheets_CTS_P.internal.arm.CtsFragment',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsFragmentTestCases', '--include-filter', 'CtsFragmentTestCasesSdk26', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsFragment',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsGestureTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsGestureTestCases
new file mode 100644
index 0000000..917a560
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsGestureTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsGestureTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsGestureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsGestureTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsGestureTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsGestureTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsGestureTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsGpuToolsHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsGpuToolsHostTestCases
new file mode 100644
index 0000000..d91d9ab
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsGpuToolsHostTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsGpuToolsHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsGpuToolsHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsGpuToolsHostTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsGpuToolsHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsGpuToolsHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsGpuToolsHostTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsGraphicsTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsGraphicsTestCases
new file mode 100644
index 0000000..3ee9cb1
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsGraphicsTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsGraphicsTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-perbuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsGraphicsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=5,
+        tag='internal.arm.CtsGraphicsTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsGraphicsTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsGraphicsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsGraphicsTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsHardwareTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsHardwareTestCases
new file mode 100644
index 0000000..c0e1c5b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsHardwareTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsHardwareTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsHardwareTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsHardwareTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsHardwareTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsHardwareTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsHardwareTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsHarmfulAppWarningHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsHarmfulAppWarningHostTestCases
new file mode 100644
index 0000000..45e4628
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsHarmfulAppWarningHostTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsHarmfulAppWarningHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsHarmfulAppWarningHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsHarmfulAppWarningHostTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsHarmfulAppWarningHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsHarmfulAppWarningHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsHarmfulAppWarningHostTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsHiddenApi b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsHiddenApi
new file mode 100644
index 0000000..427cae9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsHiddenApi
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsHiddenApi'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsHiddenApiBlacklistApi27TestCases, CtsHiddenApiBlacklistCurrentApiTestCases, CtsHiddenApiBlacklistDebugClassTestCases, CtsHiddenApiKillswitchDebugClassTestCases, CtsHiddenApiKillswitchWhitelistTestCases, CtsHiddenApiKillswitchWildcardTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsHiddenApi',
+        test_name='cheets_CTS_P.internal.arm.CtsHiddenApi',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHiddenApiBlacklistApi27TestCases', '--include-filter', 'CtsHiddenApiBlacklistCurrentApiTestCases', '--include-filter', 'CtsHiddenApiBlacklistDebugClassTestCases', '--include-filter', 'CtsHiddenApiKillswitchDebugClassTestCases', '--include-filter', 'CtsHiddenApiKillswitchWhitelistTestCases', '--include-filter', 'CtsHiddenApiKillswitchWildcardTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsHiddenApi',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsHostTzDataTests b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsHostTzDataTests
new file mode 100644
index 0000000..127f514
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsHostTzDataTests
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsHostTzDataTests'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsHostTzDataTests of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsHostTzDataTests',
+        test_name='cheets_CTS_P.internal.arm.CtsHostTzDataTests',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsHostTzDataTests', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsHostTzDataTests',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsHostside b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsHostside
new file mode 100644
index 0000000..4bc5ad2
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsHostside
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsHostside'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsHostsideNetworkTests, CtsHostsideNumberBlockingTestCases, CtsHostsideTvTests, CtsHostsideWebViewTests of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsHostside',
+        test_name='cheets_CTS_P.internal.arm.CtsHostside',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHostsideNetworkTests', '--include-filter', 'CtsHostsideNumberBlockingTestCases', '--include-filter', 'CtsHostsideTvTests', '--include-filter', 'CtsHostsideWebViewTests', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsHostside',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsIcuTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsIcuTestCases
new file mode 100644
index 0000000..2408487
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsIcuTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsIcuTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsIcuTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsIcuTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsIcuTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsIcuTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsIcuTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsIncidentHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsIncidentHostTestCases
new file mode 100644
index 0000000..c7ecadd
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsIncidentHostTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsIncidentHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsIncidentHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsIncidentHostTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsIncidentHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsIncidentHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsIncidentHostTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsInlineMockingTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsInlineMockingTestCases
new file mode 100644
index 0000000..8686ff8
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsInlineMockingTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsInlineMockingTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsInlineMockingTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsInlineMockingTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsInlineMockingTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsInlineMockingTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsInlineMockingTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsInputMethod b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsInputMethod
new file mode 100644
index 0000000..178903b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsInputMethod
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsInputMethod'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsInputMethodServiceHostTestCases, CtsInputMethodTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsInputMethod',
+        test_name='cheets_CTS_P.internal.arm.CtsInputMethod',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsInputMethodServiceHostTestCases', '--include-filter', 'CtsInputMethodTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsInputMethod',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsIntentSignatureTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsIntentSignatureTestCases
new file mode 100644
index 0000000..f54d679
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsIntentSignatureTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsIntentSignatureTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsIntentSignatureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsIntentSignatureTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsIntentSignatureTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsIntentSignatureTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsIntentSignatureTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsJankDeviceTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsJankDeviceTestCases
new file mode 100644
index 0000000..4b00986
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsJankDeviceTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsJankDeviceTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-perbuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsJankDeviceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=3,
+        tag='internal.arm.CtsJankDeviceTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsJankDeviceTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsJankDeviceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsJankDeviceTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsJdwp b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsJdwp
new file mode 100644
index 0000000..12d45a3
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsJdwp
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsJdwp'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsJdwpSecurityHostTestCases, CtsJdwpTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsJdwp',
+        test_name='cheets_CTS_P.internal.arm.CtsJdwp',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJdwpSecurityHostTestCases', '--include-filter', 'CtsJdwpTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsJdwp',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsJniTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsJniTestCases
new file mode 100644
index 0000000..6c23d29
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsJniTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsJniTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsJniTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsJniTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsJniTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsJniTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsJniTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsJobScheduler b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsJobScheduler
new file mode 100644
index 0000000..cf6b344
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsJobScheduler
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsJobScheduler'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsJobSchedulerSharedUidTestCases, CtsJobSchedulerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsJobScheduler',
+        test_name='cheets_CTS_P.internal.arm.CtsJobScheduler',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJobSchedulerSharedUidTestCases', '--include-filter', 'CtsJobSchedulerTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsJobScheduler',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsJvmti b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsJvmti
new file mode 100644
index 0000000..b04a1e7
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsJvmti
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsJvmti'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsJvmtiAttachingHostTestCases, CtsJvmtiAttachingTestCases, CtsJvmtiRedefineClassesHostTestCases, CtsJvmtiRunTest1900HostTestCases, CtsJvmtiRunTest1901HostTestCases, CtsJvmtiRunTest1902HostTestCases, CtsJvmtiRunTest1903HostTestCases, CtsJvmtiRunTest1904HostTestCases, CtsJvmtiRunTest1906HostTestCases, CtsJvmtiRunTest1907HostTestCases, CtsJvmtiRunTest1908HostTestCases, CtsJvmtiRunTest1909HostTestCases, CtsJvmtiRunTest1910HostTestCases, CtsJvmtiRunTest1911HostTestCases, CtsJvmtiRunTest1912HostTestCases, CtsJvmtiRunTest1913HostTestCases, CtsJvmtiRunTest1914HostTestCases, CtsJvmtiRunTest1915HostTestCases, CtsJvmtiRunTest1916HostTestCases, CtsJvmtiRunTest1917HostTestCases, CtsJvmtiRunTest1920HostTestCases, CtsJvmtiRunTest1921HostTestCases, CtsJvmtiRunTest1922HostTestCases, CtsJvmtiRunTest1923HostTestCases, CtsJvmtiRunTest1924HostTestCases, CtsJvmtiRunTest1925HostTestCases, CtsJvmtiRunTest1926HostTestCases, CtsJvmtiRunTest1927HostTestCases, CtsJvmtiRunTest1928HostTestCases, CtsJvmtiRunTest1930HostTestCases, CtsJvmtiRunTest1931HostTestCases, CtsJvmtiRunTest1932HostTestCases, CtsJvmtiRunTest1933HostTestCases, CtsJvmtiRunTest1934HostTestCases, CtsJvmtiRunTest1936HostTestCases, CtsJvmtiRunTest1937HostTestCases, CtsJvmtiRunTest1939HostTestCases, CtsJvmtiRunTest1941HostTestCases, CtsJvmtiRunTest1942HostTestCases, CtsJvmtiRunTest1943HostTestCases, CtsJvmtiRunTest902HostTestCases, CtsJvmtiRunTest903HostTestCases, CtsJvmtiRunTest904HostTestCases, CtsJvmtiRunTest905HostTestCases, CtsJvmtiRunTest906HostTestCases, CtsJvmtiRunTest907HostTestCases, CtsJvmtiRunTest908HostTestCases, CtsJvmtiRunTest910HostTestCases, CtsJvmtiRunTest911HostTestCases, CtsJvmtiRunTest912HostTestCases, CtsJvmtiRunTest913HostTestCases, CtsJvmtiRunTest914HostTestCases, CtsJvmtiRunTest915HostTestCases, CtsJvmtiRunTest917HostTestCases, CtsJvmtiRunTest918HostTestCases, CtsJvmtiRunTest919HostTestCases, CtsJvmtiRunTest920HostTestCases, CtsJvmtiRunTest922HostTestCases, CtsJvmtiRunTest923HostTestCases, CtsJvmtiRunTest924HostTestCases, CtsJvmtiRunTest926HostTestCases, CtsJvmtiRunTest927HostTestCases, CtsJvmtiRunTest928HostTestCases, CtsJvmtiRunTest930HostTestCases, CtsJvmtiRunTest931HostTestCases, CtsJvmtiRunTest932HostTestCases, CtsJvmtiRunTest940HostTestCases, CtsJvmtiRunTest942HostTestCases, CtsJvmtiRunTest944HostTestCases, CtsJvmtiRunTest945HostTestCases, CtsJvmtiRunTest947HostTestCases, CtsJvmtiRunTest951HostTestCases, CtsJvmtiRunTest982HostTestCases, CtsJvmtiRunTest983HostTestCases, CtsJvmtiRunTest984HostTestCases, CtsJvmtiRunTest985HostTestCases, CtsJvmtiRunTest986HostTestCases, CtsJvmtiRunTest988HostTestCases, CtsJvmtiRunTest989HostTestCases, CtsJvmtiRunTest990HostTestCases, CtsJvmtiRunTest991HostTestCases, CtsJvmtiRunTest992HostTestCases, CtsJvmtiRunTest993HostTestCases, CtsJvmtiRunTest994HostTestCases, CtsJvmtiRunTest995HostTestCases, CtsJvmtiRunTest996HostTestCases, CtsJvmtiRunTest997HostTestCases, CtsJvmtiTaggingHostTestCases, CtsJvmtiTrackingHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsJvmti',
+        test_name='cheets_CTS_P.internal.arm.CtsJvmti',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJvmtiAttachingHostTestCases', '--include-filter', 'CtsJvmtiAttachingTestCases', '--include-filter', 'CtsJvmtiRedefineClassesHostTestCases', '--include-filter', 'CtsJvmtiRunTest1900HostTestCases', '--include-filter', 'CtsJvmtiRunTest1901HostTestCases', '--include-filter', 'CtsJvmtiRunTest1902HostTestCases', '--include-filter', 'CtsJvmtiRunTest1903HostTestCases', '--include-filter', 'CtsJvmtiRunTest1904HostTestCases', '--include-filter', 'CtsJvmtiRunTest1906HostTestCases', '--include-filter', 'CtsJvmtiRunTest1907HostTestCases', '--include-filter', 'CtsJvmtiRunTest1908HostTestCases', '--include-filter', 'CtsJvmtiRunTest1909HostTestCases', '--include-filter', 'CtsJvmtiRunTest1910HostTestCases', '--include-filter', 'CtsJvmtiRunTest1911HostTestCases', '--include-filter', 'CtsJvmtiRunTest1912HostTestCases', '--include-filter', 'CtsJvmtiRunTest1913HostTestCases', '--include-filter', 'CtsJvmtiRunTest1914HostTestCases', '--include-filter', 'CtsJvmtiRunTest1915HostTestCases', '--include-filter', 'CtsJvmtiRunTest1916HostTestCases', '--include-filter', 'CtsJvmtiRunTest1917HostTestCases', '--include-filter', 'CtsJvmtiRunTest1920HostTestCases', '--include-filter', 'CtsJvmtiRunTest1921HostTestCases', '--include-filter', 'CtsJvmtiRunTest1922HostTestCases', '--include-filter', 'CtsJvmtiRunTest1923HostTestCases', '--include-filter', 'CtsJvmtiRunTest1924HostTestCases', '--include-filter', 'CtsJvmtiRunTest1925HostTestCases', '--include-filter', 'CtsJvmtiRunTest1926HostTestCases', '--include-filter', 'CtsJvmtiRunTest1927HostTestCases', '--include-filter', 'CtsJvmtiRunTest1928HostTestCases', '--include-filter', 'CtsJvmtiRunTest1930HostTestCases', '--include-filter', 'CtsJvmtiRunTest1931HostTestCases', '--include-filter', 'CtsJvmtiRunTest1932HostTestCases', '--include-filter', 'CtsJvmtiRunTest1933HostTestCases', '--include-filter', 'CtsJvmtiRunTest1934HostTestCases', '--include-filter', 'CtsJvmtiRunTest1936HostTestCases', '--include-filter', 'CtsJvmtiRunTest1937HostTestCases', '--include-filter', 'CtsJvmtiRunTest1939HostTestCases', '--include-filter', 'CtsJvmtiRunTest1941HostTestCases', '--include-filter', 'CtsJvmtiRunTest1942HostTestCases', '--include-filter', 'CtsJvmtiRunTest1943HostTestCases', '--include-filter', 'CtsJvmtiRunTest902HostTestCases', '--include-filter', 'CtsJvmtiRunTest903HostTestCases', '--include-filter', 'CtsJvmtiRunTest904HostTestCases', '--include-filter', 'CtsJvmtiRunTest905HostTestCases', '--include-filter', 'CtsJvmtiRunTest906HostTestCases', '--include-filter', 'CtsJvmtiRunTest907HostTestCases', '--include-filter', 'CtsJvmtiRunTest908HostTestCases', '--include-filter', 'CtsJvmtiRunTest910HostTestCases', '--include-filter', 'CtsJvmtiRunTest911HostTestCases', '--include-filter', 'CtsJvmtiRunTest912HostTestCases', '--include-filter', 'CtsJvmtiRunTest913HostTestCases', '--include-filter', 'CtsJvmtiRunTest914HostTestCases', '--include-filter', 'CtsJvmtiRunTest915HostTestCases', '--include-filter', 'CtsJvmtiRunTest917HostTestCases', '--include-filter', 'CtsJvmtiRunTest918HostTestCases', '--include-filter', 'CtsJvmtiRunTest919HostTestCases', '--include-filter', 'CtsJvmtiRunTest920HostTestCases', '--include-filter', 'CtsJvmtiRunTest922HostTestCases', '--include-filter', 'CtsJvmtiRunTest923HostTestCases', '--include-filter', 'CtsJvmtiRunTest924HostTestCases', '--include-filter', 'CtsJvmtiRunTest926HostTestCases', '--include-filter', 'CtsJvmtiRunTest927HostTestCases', '--include-filter', 'CtsJvmtiRunTest928HostTestCases', '--include-filter', 'CtsJvmtiRunTest930HostTestCases', '--include-filter', 'CtsJvmtiRunTest931HostTestCases', '--include-filter', 'CtsJvmtiRunTest932HostTestCases', '--include-filter', 'CtsJvmtiRunTest940HostTestCases', '--include-filter', 'CtsJvmtiRunTest942HostTestCases', '--include-filter', 'CtsJvmtiRunTest944HostTestCases', '--include-filter', 'CtsJvmtiRunTest945HostTestCases', '--include-filter', 'CtsJvmtiRunTest947HostTestCases', '--include-filter', 'CtsJvmtiRunTest951HostTestCases', '--include-filter', 'CtsJvmtiRunTest982HostTestCases', '--include-filter', 'CtsJvmtiRunTest983HostTestCases', '--include-filter', 'CtsJvmtiRunTest984HostTestCases', '--include-filter', 'CtsJvmtiRunTest985HostTestCases', '--include-filter', 'CtsJvmtiRunTest986HostTestCases', '--include-filter', 'CtsJvmtiRunTest988HostTestCases', '--include-filter', 'CtsJvmtiRunTest989HostTestCases', '--include-filter', 'CtsJvmtiRunTest990HostTestCases', '--include-filter', 'CtsJvmtiRunTest991HostTestCases', '--include-filter', 'CtsJvmtiRunTest992HostTestCases', '--include-filter', 'CtsJvmtiRunTest993HostTestCases', '--include-filter', 'CtsJvmtiRunTest994HostTestCases', '--include-filter', 'CtsJvmtiRunTest995HostTestCases', '--include-filter', 'CtsJvmtiRunTest996HostTestCases', '--include-filter', 'CtsJvmtiRunTest997HostTestCases', '--include-filter', 'CtsJvmtiTaggingHostTestCases', '--include-filter', 'CtsJvmtiTrackingHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsJvmti',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=26700)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsKernelConfigTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsKernelConfigTestCases
new file mode 100644
index 0000000..fdd7ec4
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsKernelConfigTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsKernelConfigTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsKernelConfigTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsKernelConfigTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsKernelConfigTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsKernelConfigTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsKernelConfigTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsKeystoreTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsKeystoreTestCases
new file mode 100644
index 0000000..4fd0b96
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsKeystoreTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsKeystoreTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsKeystoreTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsKeystoreTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsKeystoreTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsKeystoreTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsKeystoreTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsLeanbackJankTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsLeanbackJankTestCases
new file mode 100644
index 0000000..036df52
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsLeanbackJankTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsLeanbackJankTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsLeanbackJankTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsLeanbackJankTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsLeanbackJankTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsLeanbackJankTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsLeanbackJankTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsLegacyNotificationTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsLegacyNotificationTestCases
new file mode 100644
index 0000000..0b964b2
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsLegacyNotificationTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsLegacyNotificationTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsLegacyNotificationTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsLegacyNotificationTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsLegacyNotificationTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsLegacyNotificationTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsLegacyNotificationTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsLibcore b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsLibcore
new file mode 100644
index 0000000..bea447b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsLibcore
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsLibcore'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsLibcoreFileIOTestCases, CtsLibcoreJsr166TestCases, CtsLibcoreLegacy22TestCases, CtsLibcoreOjTestCases, CtsLibcoreOkHttpTestCases, CtsLibcoreTestCases, CtsLibcoreWycheproofBCTestCases, CtsLibcoreWycheproofConscryptTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsLibcore',
+        test_name='cheets_CTS_P.internal.arm.CtsLibcore',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLibcoreFileIOTestCases', '--include-filter', 'CtsLibcoreJsr166TestCases', '--include-filter', 'CtsLibcoreLegacy22TestCases', '--include-filter', 'CtsLibcoreOjTestCases', '--include-filter', 'CtsLibcoreOkHttpTestCases', '--include-filter', 'CtsLibcoreTestCases', '--include-filter', 'CtsLibcoreWycheproofBCTestCases', '--include-filter', 'CtsLibcoreWycheproofConscryptTestCases', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsLibcore',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=21600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsLiblogTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsLiblogTestCases
new file mode 100644
index 0000000..d74d3ca
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsLiblogTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsLiblogTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsLiblogTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsLiblogTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsLiblogTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsLiblogTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsLiblogTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsLocation b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsLocation
new file mode 100644
index 0000000..f56377b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsLocation
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsLocation'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsLocation2TestCases, CtsLocationTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsLocation',
+        test_name='cheets_CTS_P.internal.arm.CtsLocation',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLocation2TestCases', '--include-filter', 'CtsLocationTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsLocation',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsLogdTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsLogdTestCases
new file mode 100644
index 0000000..f4f2d1b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsLogdTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsLogdTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsLogdTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsLogdTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsLogdTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsLogdTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsLogdTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMediaBitstreamsTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMediaBitstreamsTestCases
new file mode 100644
index 0000000..5525776
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMediaBitstreamsTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsMediaBitstreamsTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaBitstreamsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='internal.arm.CtsMediaBitstreamsTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsMediaBitstreamsTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaBitstreamsTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaBitstreamsTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMediaHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMediaHostTestCases
new file mode 100644
index 0000000..e8352c2
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMediaHostTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsMediaHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsMediaHostTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsMediaHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsMediaHostTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMediaStressTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMediaStressTestCases
new file mode 100644
index 0000000..21d9918
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMediaStressTestCases
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsMediaStressTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaStressTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='internal.arm.CtsMediaStressTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsMediaStressTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaStressTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaStressTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        precondition_commands=['cras_test_client --mute 1'],
+        timeout=18000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMediaStressTestCases.camera.ctshardware b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMediaStressTestCases.camera.ctshardware
new file mode 100644
index 0000000..b1cd6cb
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMediaStressTestCases.camera.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsMediaStressTestCases.camera.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaStressTestCases.camera of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='internal.arm.CtsMediaStressTestCases.camera.ctshardware',
+        test_name='cheets_CTS_P.internal.arm.CtsMediaStressTestCases.camera.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaStressTestCases android.mediastress.cts.MediaRecorderStressTest', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaStressTestCases.camera',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMediaTestCases.32 b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMediaTestCases.32
new file mode 100644
index 0000000..2239334
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMediaTestCases.32
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsMediaTestCases.32'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='internal.arm.CtsMediaTestCases.32',
+        test_name='cheets_CTS_P.internal.arm.CtsMediaTestCases.32',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaTestCases', '--logcat-on-failure', '--abi', 'armeabi-v7a'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        precondition_commands=['cras_test_client --mute 1'],
+        timeout=36000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMediaTestCases.64 b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMediaTestCases.64
new file mode 100644
index 0000000..3e130e9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMediaTestCases.64
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsMediaTestCases.64'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='internal.arm.CtsMediaTestCases.64',
+        test_name='cheets_CTS_P.internal.arm.CtsMediaTestCases.64',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaTestCases', '--logcat-on-failure', '--abi', 'arm64-v8a'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        precondition_commands=['cras_test_client --mute 1'],
+        timeout=36000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMediaTestCases.audio b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMediaTestCases.audio
new file mode 100644
index 0000000..7c2025e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMediaTestCases.audio
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsMediaTestCases.audio'
+ATTRIBUTES = 'suite:arc-cts'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaTestCases.audio of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='internal.arm.CtsMediaTestCases.audio',
+        test_name='cheets_CTS_P.internal.arm.CtsMediaTestCases.audio',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioAttributesTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioEffectTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioFocusTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioFormatTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioManagerTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioNativeTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPlayRoutingNative', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPlaybackConfigurationTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPreProcessingTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPresentationTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordAppOpTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordRoutingNative', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecord_BufferSizeTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordingConfigurationTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackLatencyTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackSurroundTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrack_ListenerTest', '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolAacTest', '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolMidiTest', '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolOggTest', '--include-filter', 'CtsMediaTestCases android.media.cts.VolumeShaperTest', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsMediaTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMediaTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMediaTestCases.ctshardware
new file mode 100644
index 0000000..e98d8f5
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMediaTestCases.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsMediaTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='internal.arm.CtsMediaTestCases.ctshardware',
+        test_name='cheets_CTS_P.internal.arm.CtsMediaTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        timeout=36000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMidiTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMidiTestCases
new file mode 100644
index 0000000..33288a2
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMidiTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsMidiTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMidiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsMidiTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsMidiTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMidiTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsMidiTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMocking b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMocking
new file mode 100644
index 0000000..8675447
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMocking
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsMocking'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMockingDebuggableTestCases, CtsMockingTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsMocking',
+        test_name='cheets_CTS_P.internal.arm.CtsMocking',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMockingDebuggableTestCases', '--include-filter', 'CtsMockingTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsMocking',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMonkeyTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMonkeyTestCases
new file mode 100644
index 0000000..1e4e0f5
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMonkeyTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsMonkeyTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMonkeyTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsMonkeyTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsMonkeyTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMonkeyTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsMonkeyTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMultiUser b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMultiUser
new file mode 100644
index 0000000..494c9dc
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsMultiUser
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsMultiUser'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMultiUserHostTestCases, CtsMultiUserTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsMultiUser',
+        test_name='cheets_CTS_P.internal.arm.CtsMultiUser',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMultiUserHostTestCases', '--include-filter', 'CtsMultiUserTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsMultiUser',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsNNAPITestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsNNAPITestCases
new file mode 100644
index 0000000..24e4b2c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsNNAPITestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsNNAPITestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNNAPITestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsNNAPITestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsNNAPITestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsNNAPITestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsNNAPITestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsNative b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsNative
new file mode 100644
index 0000000..dd9a505
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsNative
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsNative'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNativeHardwareTestCases, CtsNativeMediaAAudioTestCases, CtsNativeMediaSlTestCases, CtsNativeMediaXaTestCases, CtsNativeNetTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsNative',
+        test_name='cheets_CTS_P.internal.arm.CtsNative',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNativeHardwareTestCases', '--include-filter', 'CtsNativeMediaAAudioTestCases', '--include-filter', 'CtsNativeMediaSlTestCases', '--include-filter', 'CtsNativeMediaXaTestCases', '--include-filter', 'CtsNativeNetTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsNative',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsNativeMediaAAudioTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsNativeMediaAAudioTestCases.ctshardware
new file mode 100644
index 0000000..305664c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsNativeMediaAAudioTestCases.ctshardware
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsNativeMediaAAudioTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNativeMediaAAudioTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsNativeMediaAAudioTestCases.ctshardware',
+        test_name='cheets_CTS_P.internal.arm.CtsNativeMediaAAudioTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNativeMediaAAudioTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsNativeMediaAAudioTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsNdefTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsNdefTestCases
new file mode 100644
index 0000000..1da4a9c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsNdefTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsNdefTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNdefTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsNdefTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsNdefTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsNdefTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsNdefTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsNet b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsNet
new file mode 100644
index 0000000..f55ddfe
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsNet
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsNet'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNetSecConfigAttributeTestCases, CtsNetSecConfigBasicDebugDisabledTestCases, CtsNetSecConfigBasicDebugEnabledTestCases, CtsNetSecConfigBasicDomainConfigTestCases, CtsNetSecConfigCleartextTrafficTestCases, CtsNetSecConfigDownloadManagerTestCases, CtsNetSecConfigInvalidPinTestCases, CtsNetSecConfigNestedDomainConfigTestCases, CtsNetSecConfigPrePCleartextTrafficTestCases, CtsNetSecConfigResourcesSrcTestCases, CtsNetSecPolicyUsesCleartextTrafficFalseTestCases, CtsNetSecPolicyUsesCleartextTrafficTrueTestCases, CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases, CtsNetTestCases, CtsNetTestCasesLegacyApi22, CtsNetTestCasesLegacyPermission22 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsNet',
+        test_name='cheets_CTS_P.internal.arm.CtsNet',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNetSecConfigAttributeTestCases', '--include-filter', 'CtsNetSecConfigBasicDebugDisabledTestCases', '--include-filter', 'CtsNetSecConfigBasicDebugEnabledTestCases', '--include-filter', 'CtsNetSecConfigBasicDomainConfigTestCases', '--include-filter', 'CtsNetSecConfigCleartextTrafficTestCases', '--include-filter', 'CtsNetSecConfigDownloadManagerTestCases', '--include-filter', 'CtsNetSecConfigInvalidPinTestCases', '--include-filter', 'CtsNetSecConfigNestedDomainConfigTestCases', '--include-filter', 'CtsNetSecConfigPrePCleartextTrafficTestCases', '--include-filter', 'CtsNetSecConfigResourcesSrcTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficFalseTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficTrueTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases', '--include-filter', 'CtsNetTestCases', '--include-filter', 'CtsNetTestCasesLegacyApi22', '--include-filter', 'CtsNetTestCasesLegacyPermission22', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsNet',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        precondition_commands=['android-sh -c \'setprop ctl.start mdnsd\''],
+        timeout=30600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsNetTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsNetTestCases.ctshardware
new file mode 100644
index 0000000..1f85776
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsNetTestCases.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsNetTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNetTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsNetTestCases.ctshardware',
+        test_name='cheets_CTS_P.internal.arm.CtsNetTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNetTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsNetTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        precondition_commands=['android-sh -c \'setprop ctl.start mdnsd\''],
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsOmapiTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsOmapiTestCases
new file mode 100644
index 0000000..58c7dee
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsOmapiTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsOmapiTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsOmapiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsOmapiTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsOmapiTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsOmapiTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsOmapiTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsOpenG b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsOpenG
new file mode 100644
index 0000000..6364b03
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsOpenG
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsOpenG'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-perbuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsOpenGLTestCases, CtsOpenGlPerf2TestCases, CtsOpenGlPerfTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=3,
+        tag='internal.arm.CtsOpenG',
+        test_name='cheets_CTS_P.internal.arm.CtsOpenG',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsOpenGLTestCases', '--include-filter', 'CtsOpenGlPerf2TestCases', '--include-filter', 'CtsOpenGlPerfTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsOpenG',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsOs b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsOs
new file mode 100644
index 0000000..02af46b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsOs
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsOs'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsOsHostTestCases, CtsOsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsOs',
+        test_name='cheets_CTS_P.internal.arm.CtsOs',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsOsHostTestCases', '--include-filter', 'CtsOsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsOs',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsPdfTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsPdfTestCases
new file mode 100644
index 0000000..2411f5c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsPdfTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsPdfTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPdfTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsPdfTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsPdfTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsPdfTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsPdfTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsPerfettoTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsPerfettoTestCases
new file mode 100644
index 0000000..6911f79
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsPerfettoTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsPerfettoTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPerfettoTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsPerfettoTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsPerfettoTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsPerfettoTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsPerfettoTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsPerfettoTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsPerfettoTestCases.ctshardware
new file mode 100644
index 0000000..099c90d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsPerfettoTestCases.ctshardware
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsPerfettoTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPerfettoTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsPerfettoTestCases.ctshardware',
+        test_name='cheets_CTS_P.internal.arm.CtsPerfettoTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPerfettoTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsPerfettoTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsPermission b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsPermission
new file mode 100644
index 0000000..3ddbf70
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsPermission
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsPermission'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-perbuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPermission2TestCases, CtsPermissionTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=3,
+        tag='internal.arm.CtsPermission',
+        test_name='cheets_CTS_P.internal.arm.CtsPermission',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPermission2TestCases', '--include-filter', 'CtsPermissionTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsPermission',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsPermissionTestCases.camera.ctshardware b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsPermissionTestCases.camera.ctshardware
new file mode 100644
index 0000000..a4eed1d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsPermissionTestCases.camera.ctshardware
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsPermissionTestCases.camera.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPermissionTestCases.camera of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsPermissionTestCases.camera.ctshardware',
+        test_name='cheets_CTS_P.internal.arm.CtsPermissionTestCases.camera.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPermissionTestCases android.permission.cts.CameraPermissionTest', '--include-filter', 'CtsPermissionTestCases android.permission.cts.Camera2PermissionTest', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsPermissionTestCases.camera',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsPreference b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsPreference
new file mode 100644
index 0000000..7991b8f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsPreference
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsPreference'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPreference2TestCases, CtsPreferenceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsPreference',
+        test_name='cheets_CTS_P.internal.arm.CtsPreference',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPreference2TestCases', '--include-filter', 'CtsPreferenceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsPreference',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsPrintTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsPrintTestCases
new file mode 100644
index 0000000..2ab711e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsPrintTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsPrintTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPrintTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsPrintTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsPrintTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsPrintTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsPrintTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsProtoTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsProtoTestCases
new file mode 100644
index 0000000..f676e88
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsProtoTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsProtoTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsProtoTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsProtoTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsProtoTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsProtoTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsProtoTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsProviderTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsProviderTestCases
new file mode 100644
index 0000000..0aa3697
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsProviderTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsProviderTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsProviderTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsProviderTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsProviderTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsProviderTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsProviderTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsRenderscript b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsRenderscript
new file mode 100644
index 0000000..a3513ae
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsRenderscript
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsRenderscript'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsRenderscriptLegacyTestCases, CtsRenderscriptTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsRenderscript',
+        test_name='cheets_CTS_P.internal.arm.CtsRenderscript',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsRenderscriptLegacyTestCases', '--include-filter', 'CtsRenderscriptTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsRenderscript',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsRs b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsRs
new file mode 100644
index 0000000..8ab32b9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsRs
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsRs'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsRsBlasTestCases, CtsRsCppTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsRs',
+        test_name='cheets_CTS_P.internal.arm.CtsRs',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsRsBlasTestCases', '--include-filter', 'CtsRsCppTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsRs',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSample b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSample
new file mode 100644
index 0000000..e591381
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSample
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsSample'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSampleDeviceTestCases, CtsSampleHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSample',
+        test_name='cheets_CTS_P.internal.arm.CtsSample',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSampleDeviceTestCases', '--include-filter', 'CtsSampleHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSample',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSaxTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSaxTestCases
new file mode 100644
index 0000000..1e3fece
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSaxTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsSaxTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSaxTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSaxTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsSaxTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSaxTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSaxTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSeccompHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSeccompHostTestCases
new file mode 100644
index 0000000..5db3e66
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSeccompHostTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsSeccompHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSeccompHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSeccompHostTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsSeccompHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSeccompHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSeccompHostTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSecureElementAccessControl b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSecureElementAccessControl
new file mode 100644
index 0000000..a884b51
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSecureElementAccessControl
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsSecureElementAccessControl'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSecureElementAccessControlTestCases1, CtsSecureElementAccessControlTestCases2, CtsSecureElementAccessControlTestCases3 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSecureElementAccessControl',
+        test_name='cheets_CTS_P.internal.arm.CtsSecureElementAccessControl',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSecureElementAccessControlTestCases1', '--include-filter', 'CtsSecureElementAccessControlTestCases2', '--include-filter', 'CtsSecureElementAccessControlTestCases3', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSecureElementAccessControl',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSecurity b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSecurity
new file mode 100644
index 0000000..d99255b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSecurity
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsSecurity'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSecurityBulletinHostTestCases, CtsSecurityHostTestCases, CtsSecurityTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSecurity',
+        test_name='cheets_CTS_P.internal.arm.CtsSecurity',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSecurityBulletinHostTestCases', '--include-filter', 'CtsSecurityHostTestCases', '--include-filter', 'CtsSecurityTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSecurity',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        precondition_commands=['echo 3 > /proc/sys/kernel/perf_event_paranoid', 'modprobe configs'],
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSelinuxTargetSdk b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSelinuxTargetSdk
new file mode 100644
index 0000000..6761c74
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSelinuxTargetSdk
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsSelinuxTargetSdk'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSelinuxTargetSdk25TestCases, CtsSelinuxTargetSdk27TestCases, CtsSelinuxTargetSdkCurrentTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSelinuxTargetSdk',
+        test_name='cheets_CTS_P.internal.arm.CtsSelinuxTargetSdk',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSelinuxTargetSdk25TestCases', '--include-filter', 'CtsSelinuxTargetSdk27TestCases', '--include-filter', 'CtsSelinuxTargetSdkCurrentTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSelinuxTargetSdk',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSensorTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSensorTestCases
new file mode 100644
index 0000000..f0e00b9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSensorTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsSensorTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSensorTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=30,
+        tag='internal.arm.CtsSensorTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsSensorTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSensorTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSensorTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSensorTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSensorTestCases.ctshardware
new file mode 100644
index 0000000..2ab8eea
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSensorTestCases.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsSensorTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSensorTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=30,
+        tag='internal.arm.CtsSensorTestCases.ctshardware',
+        test_name='cheets_CTS_P.internal.arm.CtsSensorTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSensorTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSensorTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsShortcut b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsShortcut
new file mode 100644
index 0000000..af54d1d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsShortcut
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsShortcut'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsShortcutHostTestCases, CtsShortcutManagerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsShortcut',
+        test_name='cheets_CTS_P.internal.arm.CtsShortcut',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsShortcutHostTestCases', '--include-filter', 'CtsShortcutManagerTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsShortcut',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSimRestrictedApisTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSimRestrictedApisTestCases
new file mode 100644
index 0000000..3adecec
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSimRestrictedApisTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsSimRestrictedApisTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSimRestrictedApisTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSimRestrictedApisTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsSimRestrictedApisTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSimRestrictedApisTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSimRestrictedApisTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSimpleCpuTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSimpleCpuTestCases
new file mode 100644
index 0000000..fd648dd
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSimpleCpuTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsSimpleCpuTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSimpleCpuTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSimpleCpuTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsSimpleCpuTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSimpleCpuTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSimpleCpuTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSimpleperfTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSimpleperfTestCases
new file mode 100644
index 0000000..8b5a4fe
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSimpleperfTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsSimpleperfTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-perbuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSimpleperfTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=3,
+        tag='internal.arm.CtsSimpleperfTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsSimpleperfTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSimpleperfTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSimpleperfTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSkQPTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSkQPTestCases
new file mode 100644
index 0000000..22c59e4
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSkQPTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsSkQPTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSkQPTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSkQPTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsSkQPTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSkQPTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSkQPTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSliceTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSliceTestCases
new file mode 100644
index 0000000..4d3bad2
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSliceTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsSliceTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSliceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSliceTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsSliceTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSliceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSliceTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSpeechTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSpeechTestCases
new file mode 100644
index 0000000..9e5966e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSpeechTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsSpeechTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-perbuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSpeechTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=3,
+        tag='internal.arm.CtsSpeechTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsSpeechTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSpeechTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSpeechTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsStatsdHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsStatsdHostTestCases
new file mode 100644
index 0000000..027afb2
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsStatsdHostTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsStatsdHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsStatsdHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsStatsdHostTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsStatsdHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsStatsdHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsStatsdHostTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        prerequisites=['bluetooth'],
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSustainedPerformanceHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSustainedPerformanceHostTestCases
new file mode 100644
index 0000000..ce85f60
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSustainedPerformanceHostTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsSustainedPerformanceHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSustainedPerformanceHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSustainedPerformanceHostTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsSustainedPerformanceHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSustainedPerformanceHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSustainedPerformanceHostTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSustainedPerformanceHostTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSustainedPerformanceHostTestCases.ctshardware
new file mode 100644
index 0000000..2c5146e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSustainedPerformanceHostTestCases.ctshardware
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsSustainedPerformanceHostTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSustainedPerformanceHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSustainedPerformanceHostTestCases.ctshardware',
+        test_name='cheets_CTS_P.internal.arm.CtsSustainedPerformanceHostTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSustainedPerformanceHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSustainedPerformanceHostTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSync b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSync
new file mode 100644
index 0000000..decf46a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSync
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsSync'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSyncAccountAccessOtherCertTestCases, CtsSyncContentHostTestCases, CtsSyncManagerTestsCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSync',
+        test_name='cheets_CTS_P.internal.arm.CtsSync',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSyncAccountAccessOtherCertTestCases', '--include-filter', 'CtsSyncContentHostTestCases', '--include-filter', 'CtsSyncManagerTestsCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSync',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSystem b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSystem
new file mode 100644
index 0000000..1d4d9f9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsSystem
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsSystem'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSystemApiAnnotationTestCases, CtsSystemApiSignatureTestCases, CtsSystemIntentTestCases, CtsSystemUiHostTestCases, CtsSystemUiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSystem',
+        test_name='cheets_CTS_P.internal.arm.CtsSystem',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSystemApiAnnotationTestCases', '--include-filter', 'CtsSystemApiSignatureTestCases', '--include-filter', 'CtsSystemIntentTestCases', '--include-filter', 'CtsSystemUiHostTestCases', '--include-filter', 'CtsSystemUiTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSystem',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsTelecom b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsTelecom
new file mode 100644
index 0000000..281df98
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsTelecom
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsTelecom'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-perbuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTelecomTestCases, CtsTelecomTestCases2, CtsTelecomTestCases3 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=3,
+        tag='internal.arm.CtsTelecom',
+        test_name='cheets_CTS_P.internal.arm.CtsTelecom',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTelecomTestCases', '--include-filter', 'CtsTelecomTestCases2', '--include-filter', 'CtsTelecomTestCases3', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsTelecom',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsTelephony b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsTelephony
new file mode 100644
index 0000000..a0fa8ac
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsTelephony
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsTelephony'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-perbuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTelephony2TestCases, CtsTelephonyTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=3,
+        tag='internal.arm.CtsTelephony',
+        test_name='cheets_CTS_P.internal.arm.CtsTelephony',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTelephony2TestCases', '--include-filter', 'CtsTelephonyTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsTelephony',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsTextTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsTextTestCases
new file mode 100644
index 0000000..f728fa4
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsTextTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsTextTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTextTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsTextTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsTextTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsTextTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsTextTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsTheme b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsTheme
new file mode 100644
index 0000000..131a5e2
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsTheme
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsTheme'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-perbuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsThemeDeviceTestCases, CtsThemeHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=3,
+        tag='internal.arm.CtsTheme',
+        test_name='cheets_CTS_P.internal.arm.CtsTheme',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsThemeDeviceTestCases', '--include-filter', 'CtsThemeHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsTheme',
+        target_plan=None,
+        bundle='arm',
+        extra_artifacts_host=['/tmp/diff_*.png'],
+        uri='DEV',
+        prerequisites=['region_us'],
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsToast b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsToast
new file mode 100644
index 0000000..993207a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsToast
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsToast'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsToastLegacyTestCases, CtsToastTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsToast',
+        test_name='cheets_CTS_P.internal.arm.CtsToast',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsToastLegacyTestCases', '--include-filter', 'CtsToastTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsToast',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsTransitionTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsTransitionTestCases
new file mode 100644
index 0000000..0fbe76b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsTransitionTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsTransitionTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-perbuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTransitionTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=3,
+        tag='internal.arm.CtsTransitionTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsTransitionTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsTransitionTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsTransitionTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsTrustedVoiceHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsTrustedVoiceHostTestCases
new file mode 100644
index 0000000..2174a3f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsTrustedVoiceHostTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsTrustedVoiceHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTrustedVoiceHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsTrustedVoiceHostTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsTrustedVoiceHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsTrustedVoiceHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsTrustedVoiceHostTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsTv b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsTv
new file mode 100644
index 0000000..a5f1e6b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsTv
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsTv'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-perbuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTvProviderTestCases, CtsTvTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=3,
+        tag='internal.arm.CtsTv',
+        test_name='cheets_CTS_P.internal.arm.CtsTv',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTvProviderTestCases', '--include-filter', 'CtsTvTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsTv',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsUi b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsUi
new file mode 100644
index 0000000..d1b8056
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsUi
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsUi'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsUiAutomationTestCases, CtsUiDeviceTestCases, CtsUiRenderingTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsUi',
+        test_name='cheets_CTS_P.internal.arm.CtsUi',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUiAutomationTestCases', '--include-filter', 'CtsUiDeviceTestCases', '--include-filter', 'CtsUiRenderingTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsUi',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsUidIsolationTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsUidIsolationTestCases
new file mode 100644
index 0000000..123da6a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsUidIsolationTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsUidIsolationTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsUidIsolationTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsUidIsolationTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsUidIsolationTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsUidIsolationTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsUidIsolationTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsUsageStatsTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsUsageStatsTestCases
new file mode 100644
index 0000000..12f7436
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsUsageStatsTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsUsageStatsTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsUsageStatsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsUsageStatsTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsUsageStatsTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsUsageStatsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsUsageStatsTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsUsageStatsTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsUsageStatsTestCases.ctshardware
new file mode 100644
index 0000000..c83cd50
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsUsageStatsTestCases.ctshardware
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsUsageStatsTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsUsageStatsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsUsageStatsTestCases.ctshardware',
+        test_name='cheets_CTS_P.internal.arm.CtsUsageStatsTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUsageStatsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsUsageStatsTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsUsbTests b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsUsbTests
new file mode 100644
index 0000000..2b56ab8
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsUsbTests
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsUsbTests'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-perbuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsUsbTests of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=3,
+        tag='internal.arm.CtsUsbTests',
+        test_name='cheets_CTS_P.internal.arm.CtsUsbTests',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsUsbTests', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsUsbTests',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsUtilTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsUtilTestCases
new file mode 100644
index 0000000..02711f2
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsUtilTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsUtilTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsUtilTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsUtilTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsUtilTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsUtilTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsUtilTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsVideoTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsVideoTestCases
new file mode 100644
index 0000000..5348084
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsVideoTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsVideoTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsVideoTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsVideoTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsVideoTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsVideoTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsVideoTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsViewTestCases.32 b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsViewTestCases.32
new file mode 100644
index 0000000..a3a22de
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsViewTestCases.32
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsViewTestCases.32'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsViewTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsViewTestCases.32',
+        test_name='cheets_CTS_P.internal.arm.CtsViewTestCases.32',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsViewTestCases', '--logcat-on-failure', '--abi', 'armeabi-v7a', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsViewTestCases',
+        target_plan=None,
+        bundle='arm',
+        extra_artifacts=['/storage/emulated/0/SurfaceViewSyncTest/'],
+        uri='DEV',
+        precondition_commands=['sleep 60'],
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsViewTestCases.64 b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsViewTestCases.64
new file mode 100644
index 0000000..aadb969
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsViewTestCases.64
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsViewTestCases.64'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsViewTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsViewTestCases.64',
+        test_name='cheets_CTS_P.internal.arm.CtsViewTestCases.64',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsViewTestCases', '--logcat-on-failure', '--abi', 'arm64-v8a', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsViewTestCases',
+        target_plan=None,
+        bundle='arm',
+        extra_artifacts=['/storage/emulated/0/SurfaceViewSyncTest/'],
+        uri='DEV',
+        precondition_commands=['sleep 60'],
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsViewTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsViewTestCases.ctshardware
new file mode 100644
index 0000000..06db88b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsViewTestCases.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsViewTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsViewTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsViewTestCases.ctshardware',
+        test_name='cheets_CTS_P.internal.arm.CtsViewTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsViewTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsViewTestCases',
+        target_plan=None,
+        bundle='arm',
+        extra_artifacts=['/storage/emulated/0/SurfaceViewSyncTest/'],
+        uri='LATEST',
+        precondition_commands=['sleep 60'],
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsVmTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsVmTestCases
new file mode 100644
index 0000000..e6c8e10
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsVmTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsVmTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsVmTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsVmTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsVmTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsVmTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsVmTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsVoice b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsVoice
new file mode 100644
index 0000000..13cd164
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsVoice
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsVoice'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:bvt-perbuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsVoiceInteractionTestCases, CtsVoiceSettingsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=3,
+        tag='internal.arm.CtsVoice',
+        test_name='cheets_CTS_P.internal.arm.CtsVoice',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsVoiceInteractionTestCases', '--include-filter', 'CtsVoiceSettingsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsVoice',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsVrTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsVrTestCases
new file mode 100644
index 0000000..9dcb5e0
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsVrTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsVrTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsVrTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsVrTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsVrTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsVrTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsVrTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsWebkitTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsWebkitTestCases
new file mode 100644
index 0000000..9842a80
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsWebkitTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsWebkitTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWebkitTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsWebkitTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsWebkitTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsWebkitTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsWebkitTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        prerequisites=['region_us'],
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsWidgetTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsWidgetTestCases
new file mode 100644
index 0000000..907382b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsWidgetTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsWidgetTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWidgetTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsWidgetTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsWidgetTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsWidgetTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsWidgetTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsWindowManagerDeviceTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsWindowManagerDeviceTestCases
new file mode 100644
index 0000000..f96bb5a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsWindowManagerDeviceTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsWindowManagerDeviceTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManagerDeviceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsWindowManagerDeviceTestCases',
+        test_name='cheets_CTS_P.internal.arm.CtsWindowManagerDeviceTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsWindowManagerDeviceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.CtsWrap b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsWrap
new file mode 100644
index 0000000..afd741a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.CtsWrap
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.CtsWrap'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWrapNoWrapTestCases, CtsWrapWrapDebugMallocDebugTestCases, CtsWrapWrapDebugTestCases, CtsWrapWrapNoDebugTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsWrap',
+        test_name='cheets_CTS_P.internal.arm.CtsWrap',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWrapNoWrapTestCases', '--include-filter', 'CtsWrapWrapDebugMallocDebugTestCases', '--include-filter', 'CtsWrapWrapDebugTestCases', '--include-filter', 'CtsWrapWrapNoDebugTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsWrap',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsAbiOverrideHostTestCases_-_CtsAccelerationTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsAbiOverrideHostTestCases_-_CtsAccelerationTestCases
new file mode 100644
index 0000000..5ff390f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsAbiOverrideHostTestCases_-_CtsAccelerationTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.all.CtsAbiOverrideHostTestCases_-_CtsAccelerationTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAbiOverrideHostTestCases, CtsAccelerationTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsAbiOverrideHostTestCases_-_CtsAccelerationTestCases',
+        test_name='cheets_CTS_P.internal.arm.all.CtsAbiOverrideHostTestCases_-_CtsAccelerationTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAbiOverrideHostTestCases', '--include-filter', 'CtsAccelerationTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsAbiOverrideHostTestCases_-_CtsAccelerationTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases
new file mode 100644
index 0000000..7f0fdb9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAccessibilityServiceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases',
+        test_name='cheets_CTS_P.internal.arm.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAccessibilityServiceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsAccessibilityTestCases_-_CtsAccountManagerTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsAccessibilityTestCases_-_CtsAccountManagerTestCases
new file mode 100644
index 0000000..39b637f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsAccessibilityTestCases_-_CtsAccountManagerTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.all.CtsAccessibilityTestCases_-_CtsAccountManagerTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAccessibilityTestCases, CtsAccountManagerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsAccessibilityTestCases_-_CtsAccountManagerTestCases',
+        test_name='cheets_CTS_P.internal.arm.all.CtsAccessibilityTestCases_-_CtsAccountManagerTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAccessibilityTestCases', '--include-filter', 'CtsAccountManagerTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsAccessibilityTestCases_-_CtsAccountManagerTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsActivityManagerDeviceSdk25TestCases_-_CtsActivityManagerDeviceTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsActivityManagerDeviceSdk25TestCases_-_CtsActivityManagerDeviceTestCases
new file mode 100644
index 0000000..94d1ab9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsActivityManagerDeviceSdk25TestCases_-_CtsActivityManagerDeviceTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.all.CtsActivityManagerDeviceSdk25TestCases_-_CtsActivityManagerDeviceTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsActivityManagerDeviceSdk25TestCases, CtsActivityManagerDeviceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsActivityManagerDeviceSdk25TestCases_-_CtsActivityManagerDeviceTestCases',
+        test_name='cheets_CTS_P.internal.arm.all.CtsActivityManagerDeviceSdk25TestCases_-_CtsActivityManagerDeviceTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsActivityManagerDeviceSdk25TestCases', '--include-filter', 'CtsActivityManagerDeviceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsActivityManagerDeviceSdk25TestCases_-_CtsActivityManagerDeviceTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsAdminPackageInstallerTestCases_-_CtsCameraApi25TestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsAdminPackageInstallerTestCases_-_CtsCameraApi25TestCases
new file mode 100644
index 0000000..01dd04c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsAdminPackageInstallerTestCases_-_CtsCameraApi25TestCases
@@ -0,0 +1,51 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+from autotest_lib.server import utils as server_utils
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.all.CtsAdminPackageInstallerTestCases_-_CtsCameraApi25TestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAdminPackageInstallerTestCases, CtsAdminTestCases, CtsAlarmClockTestCases, CtsAlarmManagerTestCases, CtsAndroidAppTestCases, CtsAndroidTestBase27ApiSignatureTestCases, CtsAndroidTestMockCurrentApiSignatureTestCases, CtsAndroidTestRunnerCurrentApiSignatureTestCases, CtsAnimationTestCases, CtsApacheHttpLegacy27ApiSignatureTestCases, CtsApacheHttpLegacyCurrentApiSignatureTestCases, CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases, CtsAppComponentFactoryTestCases, CtsAppSecurityHostTestCases, CtsAppTestCases, CtsAppUsageHostTestCases, CtsAppWidgetTestCases, CtsAslrMallocTestCases, CtsAssistTestCases, CtsAtraceHostTestCases, CtsAutoFillServiceTestCases, CtsBackgroundRestrictionsTestCases, CtsBackupHostTestCases, CtsBackupTestCases, CtsBatterySavingTestCases, CtsBionicTestCases, CtsBluetoothTestCases, CtsBootStatsTestCases, CtsCalendarcommon2TestCases, CtsCameraApi25TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+# For local debugging, if your test setup doesn't have servo, REMOVE these
+# two lines.
+args_dict = server_utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run_TS(machine):
+    # REMOVE 'servo_args=servo_args' arg for local debugging if your test
+    # setup doesn't have servo.
+    try:
+        host_list = [hosts.create_host(machine, servo_args=servo_args)]
+    except:
+        # Just ignore any servo setup flakiness.
+        host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        enable_default_apps=True,
+        tag='internal.arm.all.CtsAdminPackageInstallerTestCases_-_CtsCameraApi25TestCases',
+        test_name='cheets_CTS_P.internal.arm.all.CtsAdminPackageInstallerTestCases_-_CtsCameraApi25TestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAdminPackageInstallerTestCases', '--include-filter', 'CtsAdminTestCases', '--include-filter', 'CtsAlarmClockTestCases', '--include-filter', 'CtsAlarmManagerTestCases', '--include-filter', 'CtsAndroidAppTestCases', '--include-filter', 'CtsAndroidTestBase27ApiSignatureTestCases', '--include-filter', 'CtsAndroidTestMockCurrentApiSignatureTestCases', '--include-filter', 'CtsAndroidTestRunnerCurrentApiSignatureTestCases', '--include-filter', 'CtsAnimationTestCases', '--include-filter', 'CtsApacheHttpLegacy27ApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacyCurrentApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases', '--include-filter', 'CtsAppComponentFactoryTestCases', '--include-filter', 'CtsAppSecurityHostTestCases', '--include-filter', 'CtsAppTestCases', '--include-filter', 'CtsAppUsageHostTestCases', '--include-filter', 'CtsAppWidgetTestCases', '--include-filter', 'CtsAslrMallocTestCases', '--include-filter', 'CtsAssistTestCases', '--include-filter', 'CtsAtraceHostTestCases', '--include-filter', 'CtsAutoFillServiceTestCases', '--include-filter', 'CtsBackgroundRestrictionsTestCases', '--include-filter', 'CtsBackupHostTestCases', '--include-filter', 'CtsBackupTestCases', '--include-filter', 'CtsBatterySavingTestCases', '--include-filter', 'CtsBionicTestCases', '--include-filter', 'CtsBluetoothTestCases', '--include-filter', 'CtsBootStatsTestCases', '--include-filter', 'CtsCalendarcommon2TestCases', '--include-filter', 'CtsCameraApi25TestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsAdminPackageInstallerTestCases_-_CtsCameraApi25TestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        prerequisites=['bluetooth'],
+        hard_reboot_on_failure=True,
+        timeout=88200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsCameraTestCases_-_CtsCameraTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsCameraTestCases_-_CtsCameraTestCases
new file mode 100644
index 0000000..1e1dc78
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsCameraTestCases_-_CtsCameraTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.all.CtsCameraTestCases_-_CtsCameraTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsCameraTestCases_-_CtsCameraTestCases',
+        test_name='cheets_CTS_P.internal.arm.all.CtsCameraTestCases_-_CtsCameraTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCameraTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsCameraTestCases_-_CtsCameraTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsCarTestCases_-_CtsDebugTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsCarTestCases_-_CtsDebugTestCases
new file mode 100644
index 0000000..e284697
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsCarTestCases_-_CtsDebugTestCases
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.all.CtsCarTestCases_-_CtsDebugTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCarTestCases, CtsCarrierApiTestCases, CtsColorModeTestCases, CtsCompilationTestCases, CtsContactsProviderWipe, CtsContentTestCases, CtsCppToolsTestCases, CtsCurrentApiSignatureTestCases, CtsDatabaseTestCases, CtsDebugTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        enable_default_apps=True,
+        tag='internal.arm.all.CtsCarTestCases_-_CtsDebugTestCases',
+        test_name='cheets_CTS_P.internal.arm.all.CtsCarTestCases_-_CtsDebugTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCarTestCases', '--include-filter', 'CtsCarrierApiTestCases', '--include-filter', 'CtsColorModeTestCases', '--include-filter', 'CtsCompilationTestCases', '--include-filter', 'CtsContactsProviderWipe', '--include-filter', 'CtsContentTestCases', '--include-filter', 'CtsCppToolsTestCases', '--include-filter', 'CtsCurrentApiSignatureTestCases', '--include-filter', 'CtsDatabaseTestCases', '--include-filter', 'CtsDebugTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsCarTestCases_-_CtsDebugTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        prerequisites=['region_us'],
+        timeout=19800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases.32 b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases.32
new file mode 100644
index 0000000..1bccc76
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases.32
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases.32'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=15,
+        tag='internal.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases.32',
+        test_name='cheets_CTS_P.internal.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases.32',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDeqpTestCases', '--abi', 'armeabi-v7a', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsDeqpTestCases_-_CtsDeqpTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=72000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases.64 b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases.64
new file mode 100644
index 0000000..7547e55
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases.64
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases.64'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=15,
+        tag='internal.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases.64',
+        test_name='cheets_CTS_P.internal.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases.64',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDeqpTestCases', '--abi', 'arm64-v8a', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsDeqpTestCases_-_CtsDeqpTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=72000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsDeviceIdleHostTestCases_-_CtsExternalSourcesTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsDeviceIdleHostTestCases_-_CtsExternalSourcesTestCases
new file mode 100644
index 0000000..b52e67a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsDeviceIdleHostTestCases_-_CtsExternalSourcesTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.all.CtsDeviceIdleHostTestCases_-_CtsExternalSourcesTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeviceIdleHostTestCases, CtsDevicePolicyManagerTestCases, CtsDexMetadataHostTestCases, CtsDisplayTestCases, CtsDpiTestCases, CtsDpiTestCases2, CtsDreamsTestCases, CtsDrmTestCases, CtsDumpsysHostTestCases, CtsDynamicLinkerTestCases, CtsEdiHostTestCases, CtsEffectTestCases, CtsExternalServiceTestCases, CtsExternalSourcesTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsDeviceIdleHostTestCases_-_CtsExternalSourcesTestCases',
+        test_name='cheets_CTS_P.internal.arm.all.CtsDeviceIdleHostTestCases_-_CtsExternalSourcesTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeviceIdleHostTestCases', '--include-filter', 'CtsDevicePolicyManagerTestCases', '--include-filter', 'CtsDexMetadataHostTestCases', '--include-filter', 'CtsDisplayTestCases', '--include-filter', 'CtsDpiTestCases', '--include-filter', 'CtsDpiTestCases2', '--include-filter', 'CtsDreamsTestCases', '--include-filter', 'CtsDrmTestCases', '--include-filter', 'CtsDumpsysHostTestCases', '--include-filter', 'CtsDynamicLinkerTestCases', '--include-filter', 'CtsEdiHostTestCases', '--include-filter', 'CtsEffectTestCases', '--include-filter', 'CtsExternalServiceTestCases', '--include-filter', 'CtsExternalSourcesTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsDeviceIdleHostTestCases_-_CtsExternalSourcesTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=27000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases
new file mode 100644
index 0000000..5dd1340
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsFileSystemTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases',
+        test_name='cheets_CTS_P.internal.arm.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsFileSystemTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsFileSystemTestCases_-_CtsFileSystemTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases
new file mode 100644
index 0000000..98b1fad
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsFragmentTestCases, CtsFragmentTestCasesSdk26, CtsGestureTestCases, CtsGpuToolsHostTestCases, CtsGraphicsTestCases, CtsHardwareTestCases, CtsHarmfulAppWarningHostTestCases, CtsHiddenApiBlacklistApi27TestCases, CtsHiddenApiBlacklistCurrentApiTestCases, CtsHiddenApiBlacklistDebugClassTestCases, CtsHiddenApiKillswitchDebugClassTestCases, CtsHiddenApiKillswitchWhitelistTestCases, CtsHiddenApiKillswitchWildcardTestCases, CtsHostTzDataTests, CtsHostsideNetworkTests, CtsHostsideNumberBlockingTestCases, CtsHostsideTvTests, CtsHostsideWebViewTests, CtsIcuTestCases, CtsIncidentHostTestCases, CtsInlineMockingTestCases, CtsInputMethodServiceHostTestCases, CtsInputMethodTestCases, CtsIntentSignatureTestCases, CtsJankDeviceTestCases, CtsJdwpSecurityHostTestCases, CtsJdwpTestCases, CtsJniTestCases, CtsJobSchedulerSharedUidTestCases, CtsJobSchedulerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases',
+        test_name='cheets_CTS_P.internal.arm.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsFragmentTestCases', '--include-filter', 'CtsFragmentTestCasesSdk26', '--include-filter', 'CtsGestureTestCases', '--include-filter', 'CtsGpuToolsHostTestCases', '--include-filter', 'CtsGraphicsTestCases', '--include-filter', 'CtsHardwareTestCases', '--include-filter', 'CtsHarmfulAppWarningHostTestCases', '--include-filter', 'CtsHiddenApiBlacklistApi27TestCases', '--include-filter', 'CtsHiddenApiBlacklistCurrentApiTestCases', '--include-filter', 'CtsHiddenApiBlacklistDebugClassTestCases', '--include-filter', 'CtsHiddenApiKillswitchDebugClassTestCases', '--include-filter', 'CtsHiddenApiKillswitchWhitelistTestCases', '--include-filter', 'CtsHiddenApiKillswitchWildcardTestCases', '--include-filter', 'CtsHostTzDataTests', '--include-filter', 'CtsHostsideNetworkTests', '--include-filter', 'CtsHostsideNumberBlockingTestCases', '--include-filter', 'CtsHostsideTvTests', '--include-filter', 'CtsHostsideWebViewTests', '--include-filter', 'CtsIcuTestCases', '--include-filter', 'CtsIncidentHostTestCases', '--include-filter', 'CtsInlineMockingTestCases', '--include-filter', 'CtsInputMethodServiceHostTestCases', '--include-filter', 'CtsInputMethodTestCases', '--include-filter', 'CtsIntentSignatureTestCases', '--include-filter', 'CtsJankDeviceTestCases', '--include-filter', 'CtsJdwpSecurityHostTestCases', '--include-filter', 'CtsJdwpTestCases', '--include-filter', 'CtsJniTestCases', '--include-filter', 'CtsJobSchedulerSharedUidTestCases', '--include-filter', 'CtsJobSchedulerTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        timeout=61200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsJvmtiAttachingHostTestCases_-_CtsLogdTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsJvmtiAttachingHostTestCases_-_CtsLogdTestCases
new file mode 100644
index 0000000..ef9d3a9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsJvmtiAttachingHostTestCases_-_CtsLogdTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.all.CtsJvmtiAttachingHostTestCases_-_CtsLogdTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsJvmtiAttachingHostTestCases, CtsJvmtiAttachingTestCases, CtsJvmtiRedefineClassesHostTestCases, CtsJvmtiRunTest1900HostTestCases, CtsJvmtiRunTest1901HostTestCases, CtsJvmtiRunTest1902HostTestCases, CtsJvmtiRunTest1903HostTestCases, CtsJvmtiRunTest1904HostTestCases, CtsJvmtiRunTest1906HostTestCases, CtsJvmtiRunTest1907HostTestCases, CtsJvmtiRunTest1908HostTestCases, CtsJvmtiRunTest1909HostTestCases, CtsJvmtiRunTest1910HostTestCases, CtsJvmtiRunTest1911HostTestCases, CtsJvmtiRunTest1912HostTestCases, CtsJvmtiRunTest1913HostTestCases, CtsJvmtiRunTest1914HostTestCases, CtsJvmtiRunTest1915HostTestCases, CtsJvmtiRunTest1916HostTestCases, CtsJvmtiRunTest1917HostTestCases, CtsJvmtiRunTest1920HostTestCases, CtsJvmtiRunTest1921HostTestCases, CtsJvmtiRunTest1922HostTestCases, CtsJvmtiRunTest1923HostTestCases, CtsJvmtiRunTest1924HostTestCases, CtsJvmtiRunTest1925HostTestCases, CtsJvmtiRunTest1926HostTestCases, CtsJvmtiRunTest1927HostTestCases, CtsJvmtiRunTest1928HostTestCases, CtsJvmtiRunTest1930HostTestCases, CtsJvmtiRunTest1931HostTestCases, CtsJvmtiRunTest1932HostTestCases, CtsJvmtiRunTest1933HostTestCases, CtsJvmtiRunTest1934HostTestCases, CtsJvmtiRunTest1936HostTestCases, CtsJvmtiRunTest1937HostTestCases, CtsJvmtiRunTest1939HostTestCases, CtsJvmtiRunTest1941HostTestCases, CtsJvmtiRunTest1942HostTestCases, CtsJvmtiRunTest1943HostTestCases, CtsJvmtiRunTest902HostTestCases, CtsJvmtiRunTest903HostTestCases, CtsJvmtiRunTest904HostTestCases, CtsJvmtiRunTest905HostTestCases, CtsJvmtiRunTest906HostTestCases, CtsJvmtiRunTest907HostTestCases, CtsJvmtiRunTest908HostTestCases, CtsJvmtiRunTest910HostTestCases, CtsJvmtiRunTest911HostTestCases, CtsJvmtiRunTest912HostTestCases, CtsJvmtiRunTest913HostTestCases, CtsJvmtiRunTest914HostTestCases, CtsJvmtiRunTest915HostTestCases, CtsJvmtiRunTest917HostTestCases, CtsJvmtiRunTest918HostTestCases, CtsJvmtiRunTest919HostTestCases, CtsJvmtiRunTest920HostTestCases, CtsJvmtiRunTest922HostTestCases, CtsJvmtiRunTest923HostTestCases, CtsJvmtiRunTest924HostTestCases, CtsJvmtiRunTest926HostTestCases, CtsJvmtiRunTest927HostTestCases, CtsJvmtiRunTest928HostTestCases, CtsJvmtiRunTest930HostTestCases, CtsJvmtiRunTest931HostTestCases, CtsJvmtiRunTest932HostTestCases, CtsJvmtiRunTest940HostTestCases, CtsJvmtiRunTest942HostTestCases, CtsJvmtiRunTest944HostTestCases, CtsJvmtiRunTest945HostTestCases, CtsJvmtiRunTest947HostTestCases, CtsJvmtiRunTest951HostTestCases, CtsJvmtiRunTest982HostTestCases, CtsJvmtiRunTest983HostTestCases, CtsJvmtiRunTest984HostTestCases, CtsJvmtiRunTest985HostTestCases, CtsJvmtiRunTest986HostTestCases, CtsJvmtiRunTest988HostTestCases, CtsJvmtiRunTest989HostTestCases, CtsJvmtiRunTest990HostTestCases, CtsJvmtiRunTest991HostTestCases, CtsJvmtiRunTest992HostTestCases, CtsJvmtiRunTest993HostTestCases, CtsJvmtiRunTest994HostTestCases, CtsJvmtiRunTest995HostTestCases, CtsJvmtiRunTest996HostTestCases, CtsJvmtiRunTest997HostTestCases, CtsJvmtiTaggingHostTestCases, CtsJvmtiTrackingHostTestCases, CtsKernelConfigTestCases, CtsKeystoreTestCases, CtsLeanbackJankTestCases, CtsLegacyNotificationTestCases, CtsLibcoreFileIOTestCases, CtsLibcoreJsr166TestCases, CtsLibcoreLegacy22TestCases, CtsLibcoreOjTestCases, CtsLibcoreOkHttpTestCases, CtsLibcoreTestCases, CtsLibcoreWycheproofBCTestCases, CtsLibcoreWycheproofConscryptTestCases, CtsLiblogTestCases, CtsLocation2TestCases, CtsLocationTestCases, CtsLogdTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsJvmtiAttachingHostTestCases_-_CtsLogdTestCases',
+        test_name='cheets_CTS_P.internal.arm.all.CtsJvmtiAttachingHostTestCases_-_CtsLogdTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJvmtiAttachingHostTestCases', '--include-filter', 'CtsJvmtiAttachingTestCases', '--include-filter', 'CtsJvmtiRedefineClassesHostTestCases', '--include-filter', 'CtsJvmtiRunTest1900HostTestCases', '--include-filter', 'CtsJvmtiRunTest1901HostTestCases', '--include-filter', 'CtsJvmtiRunTest1902HostTestCases', '--include-filter', 'CtsJvmtiRunTest1903HostTestCases', '--include-filter', 'CtsJvmtiRunTest1904HostTestCases', '--include-filter', 'CtsJvmtiRunTest1906HostTestCases', '--include-filter', 'CtsJvmtiRunTest1907HostTestCases', '--include-filter', 'CtsJvmtiRunTest1908HostTestCases', '--include-filter', 'CtsJvmtiRunTest1909HostTestCases', '--include-filter', 'CtsJvmtiRunTest1910HostTestCases', '--include-filter', 'CtsJvmtiRunTest1911HostTestCases', '--include-filter', 'CtsJvmtiRunTest1912HostTestCases', '--include-filter', 'CtsJvmtiRunTest1913HostTestCases', '--include-filter', 'CtsJvmtiRunTest1914HostTestCases', '--include-filter', 'CtsJvmtiRunTest1915HostTestCases', '--include-filter', 'CtsJvmtiRunTest1916HostTestCases', '--include-filter', 'CtsJvmtiRunTest1917HostTestCases', '--include-filter', 'CtsJvmtiRunTest1920HostTestCases', '--include-filter', 'CtsJvmtiRunTest1921HostTestCases', '--include-filter', 'CtsJvmtiRunTest1922HostTestCases', '--include-filter', 'CtsJvmtiRunTest1923HostTestCases', '--include-filter', 'CtsJvmtiRunTest1924HostTestCases', '--include-filter', 'CtsJvmtiRunTest1925HostTestCases', '--include-filter', 'CtsJvmtiRunTest1926HostTestCases', '--include-filter', 'CtsJvmtiRunTest1927HostTestCases', '--include-filter', 'CtsJvmtiRunTest1928HostTestCases', '--include-filter', 'CtsJvmtiRunTest1930HostTestCases', '--include-filter', 'CtsJvmtiRunTest1931HostTestCases', '--include-filter', 'CtsJvmtiRunTest1932HostTestCases', '--include-filter', 'CtsJvmtiRunTest1933HostTestCases', '--include-filter', 'CtsJvmtiRunTest1934HostTestCases', '--include-filter', 'CtsJvmtiRunTest1936HostTestCases', '--include-filter', 'CtsJvmtiRunTest1937HostTestCases', '--include-filter', 'CtsJvmtiRunTest1939HostTestCases', '--include-filter', 'CtsJvmtiRunTest1941HostTestCases', '--include-filter', 'CtsJvmtiRunTest1942HostTestCases', '--include-filter', 'CtsJvmtiRunTest1943HostTestCases', '--include-filter', 'CtsJvmtiRunTest902HostTestCases', '--include-filter', 'CtsJvmtiRunTest903HostTestCases', '--include-filter', 'CtsJvmtiRunTest904HostTestCases', '--include-filter', 'CtsJvmtiRunTest905HostTestCases', '--include-filter', 'CtsJvmtiRunTest906HostTestCases', '--include-filter', 'CtsJvmtiRunTest907HostTestCases', '--include-filter', 'CtsJvmtiRunTest908HostTestCases', '--include-filter', 'CtsJvmtiRunTest910HostTestCases', '--include-filter', 'CtsJvmtiRunTest911HostTestCases', '--include-filter', 'CtsJvmtiRunTest912HostTestCases', '--include-filter', 'CtsJvmtiRunTest913HostTestCases', '--include-filter', 'CtsJvmtiRunTest914HostTestCases', '--include-filter', 'CtsJvmtiRunTest915HostTestCases', '--include-filter', 'CtsJvmtiRunTest917HostTestCases', '--include-filter', 'CtsJvmtiRunTest918HostTestCases', '--include-filter', 'CtsJvmtiRunTest919HostTestCases', '--include-filter', 'CtsJvmtiRunTest920HostTestCases', '--include-filter', 'CtsJvmtiRunTest922HostTestCases', '--include-filter', 'CtsJvmtiRunTest923HostTestCases', '--include-filter', 'CtsJvmtiRunTest924HostTestCases', '--include-filter', 'CtsJvmtiRunTest926HostTestCases', '--include-filter', 'CtsJvmtiRunTest927HostTestCases', '--include-filter', 'CtsJvmtiRunTest928HostTestCases', '--include-filter', 'CtsJvmtiRunTest930HostTestCases', '--include-filter', 'CtsJvmtiRunTest931HostTestCases', '--include-filter', 'CtsJvmtiRunTest932HostTestCases', '--include-filter', 'CtsJvmtiRunTest940HostTestCases', '--include-filter', 'CtsJvmtiRunTest942HostTestCases', '--include-filter', 'CtsJvmtiRunTest944HostTestCases', '--include-filter', 'CtsJvmtiRunTest945HostTestCases', '--include-filter', 'CtsJvmtiRunTest947HostTestCases', '--include-filter', 'CtsJvmtiRunTest951HostTestCases', '--include-filter', 'CtsJvmtiRunTest982HostTestCases', '--include-filter', 'CtsJvmtiRunTest983HostTestCases', '--include-filter', 'CtsJvmtiRunTest984HostTestCases', '--include-filter', 'CtsJvmtiRunTest985HostTestCases', '--include-filter', 'CtsJvmtiRunTest986HostTestCases', '--include-filter', 'CtsJvmtiRunTest988HostTestCases', '--include-filter', 'CtsJvmtiRunTest989HostTestCases', '--include-filter', 'CtsJvmtiRunTest990HostTestCases', '--include-filter', 'CtsJvmtiRunTest991HostTestCases', '--include-filter', 'CtsJvmtiRunTest992HostTestCases', '--include-filter', 'CtsJvmtiRunTest993HostTestCases', '--include-filter', 'CtsJvmtiRunTest994HostTestCases', '--include-filter', 'CtsJvmtiRunTest995HostTestCases', '--include-filter', 'CtsJvmtiRunTest996HostTestCases', '--include-filter', 'CtsJvmtiRunTest997HostTestCases', '--include-filter', 'CtsJvmtiTaggingHostTestCases', '--include-filter', 'CtsJvmtiTrackingHostTestCases', '--include-filter', 'CtsKernelConfigTestCases', '--include-filter', 'CtsKeystoreTestCases', '--include-filter', 'CtsLeanbackJankTestCases', '--include-filter', 'CtsLegacyNotificationTestCases', '--include-filter', 'CtsLibcoreFileIOTestCases', '--include-filter', 'CtsLibcoreJsr166TestCases', '--include-filter', 'CtsLibcoreLegacy22TestCases', '--include-filter', 'CtsLibcoreOjTestCases', '--include-filter', 'CtsLibcoreOkHttpTestCases', '--include-filter', 'CtsLibcoreTestCases', '--include-filter', 'CtsLibcoreWycheproofBCTestCases', '--include-filter', 'CtsLibcoreWycheproofConscryptTestCases', '--include-filter', 'CtsLiblogTestCases', '--include-filter', 'CtsLocation2TestCases', '--include-filter', 'CtsLocationTestCases', '--include-filter', 'CtsLogdTestCases', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsJvmtiAttachingHostTestCases_-_CtsLogdTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=62700)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsMediaBitstreamsTestCases_-_CtsMediaBitstreamsTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsMediaBitstreamsTestCases_-_CtsMediaBitstreamsTestCases
new file mode 100644
index 0000000..1d87e9c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsMediaBitstreamsTestCases_-_CtsMediaBitstreamsTestCases
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.all.CtsMediaBitstreamsTestCases_-_CtsMediaBitstreamsTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaBitstreamsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        needs_push_media=True,
+        tag='internal.arm.all.CtsMediaBitstreamsTestCases_-_CtsMediaBitstreamsTestCases',
+        test_name='cheets_CTS_P.internal.arm.all.CtsMediaBitstreamsTestCases_-_CtsMediaBitstreamsTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaBitstreamsTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsMediaBitstreamsTestCases_-_CtsMediaBitstreamsTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsMediaHostTestCases_-_CtsMediaHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsMediaHostTestCases_-_CtsMediaHostTestCases
new file mode 100644
index 0000000..2d34394
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsMediaHostTestCases_-_CtsMediaHostTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.all.CtsMediaHostTestCases_-_CtsMediaHostTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsMediaHostTestCases_-_CtsMediaHostTestCases',
+        test_name='cheets_CTS_P.internal.arm.all.CtsMediaHostTestCases_-_CtsMediaHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsMediaHostTestCases_-_CtsMediaHostTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases
new file mode 100644
index 0000000..108e833
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaStressTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        needs_push_media=True,
+        tag='internal.arm.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases',
+        test_name='cheets_CTS_P.internal.arm.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaStressTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsMediaStressTestCases_-_CtsMediaStressTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=18000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsMediaTestCases_-_CtsMediaTestCases.32 b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsMediaTestCases_-_CtsMediaTestCases.32
new file mode 100644
index 0000000..3733ea3
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsMediaTestCases_-_CtsMediaTestCases.32
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.all.CtsMediaTestCases_-_CtsMediaTestCases.32'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        needs_push_media=True,
+        tag='internal.arm.all.CtsMediaTestCases_-_CtsMediaTestCases.32',
+        test_name='cheets_CTS_P.internal.arm.all.CtsMediaTestCases_-_CtsMediaTestCases.32',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaTestCases', '--logcat-on-failure', '--abi', 'armeabi-v7a'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsMediaTestCases_-_CtsMediaTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        timeout=36000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsMediaTestCases_-_CtsMediaTestCases.64 b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsMediaTestCases_-_CtsMediaTestCases.64
new file mode 100644
index 0000000..93d4390
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsMediaTestCases_-_CtsMediaTestCases.64
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.all.CtsMediaTestCases_-_CtsMediaTestCases.64'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        needs_push_media=True,
+        tag='internal.arm.all.CtsMediaTestCases_-_CtsMediaTestCases.64',
+        test_name='cheets_CTS_P.internal.arm.all.CtsMediaTestCases_-_CtsMediaTestCases.64',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaTestCases', '--logcat-on-failure', '--abi', 'arm64-v8a'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsMediaTestCases_-_CtsMediaTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        timeout=36000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsMidiTestCases_-_CtsSecurityBulletinHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsMidiTestCases_-_CtsSecurityBulletinHostTestCases
new file mode 100644
index 0000000..8bb5bb1
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsMidiTestCases_-_CtsSecurityBulletinHostTestCases
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.all.CtsMidiTestCases_-_CtsSecurityBulletinHostTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMidiTestCases, CtsMockingDebuggableTestCases, CtsMockingTestCases, CtsMonkeyTestCases, CtsMultiUserHostTestCases, CtsMultiUserTestCases, CtsNNAPITestCases, CtsNativeHardwareTestCases, CtsNativeMediaAAudioTestCases, CtsNativeMediaSlTestCases, CtsNativeMediaXaTestCases, CtsNativeNetTestCases, CtsNdefTestCases, CtsNetSecConfigAttributeTestCases, CtsNetSecConfigBasicDebugDisabledTestCases, CtsNetSecConfigBasicDebugEnabledTestCases, CtsNetSecConfigBasicDomainConfigTestCases, CtsNetSecConfigCleartextTrafficTestCases, CtsNetSecConfigDownloadManagerTestCases, CtsNetSecConfigInvalidPinTestCases, CtsNetSecConfigNestedDomainConfigTestCases, CtsNetSecConfigPrePCleartextTrafficTestCases, CtsNetSecConfigResourcesSrcTestCases, CtsNetSecPolicyUsesCleartextTrafficFalseTestCases, CtsNetSecPolicyUsesCleartextTrafficTrueTestCases, CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases, CtsNetTestCases, CtsNetTestCasesLegacyApi22, CtsNetTestCasesLegacyPermission22, CtsOmapiTestCases, CtsOpenGLTestCases, CtsOpenGlPerf2TestCases, CtsOpenGlPerfTestCases, CtsOsHostTestCases, CtsOsTestCases, CtsPdfTestCases, CtsPerfettoTestCases, CtsPermission2TestCases, CtsPermissionTestCases, CtsPreference2TestCases, CtsPreferenceTestCases, CtsPrintTestCases, CtsProtoTestCases, CtsProviderTestCases, CtsRenderscriptLegacyTestCases, CtsRenderscriptTestCases, CtsRsBlasTestCases, CtsRsCppTestCases, CtsSampleDeviceTestCases, CtsSampleHostTestCases, CtsSaxTestCases, CtsSeccompHostTestCases, CtsSecureElementAccessControlTestCases1, CtsSecureElementAccessControlTestCases2, CtsSecureElementAccessControlTestCases3, CtsSecurityBulletinHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsMidiTestCases_-_CtsSecurityBulletinHostTestCases',
+        test_name='cheets_CTS_P.internal.arm.all.CtsMidiTestCases_-_CtsSecurityBulletinHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMidiTestCases', '--include-filter', 'CtsMockingDebuggableTestCases', '--include-filter', 'CtsMockingTestCases', '--include-filter', 'CtsMonkeyTestCases', '--include-filter', 'CtsMultiUserHostTestCases', '--include-filter', 'CtsMultiUserTestCases', '--include-filter', 'CtsNNAPITestCases', '--include-filter', 'CtsNativeHardwareTestCases', '--include-filter', 'CtsNativeMediaAAudioTestCases', '--include-filter', 'CtsNativeMediaSlTestCases', '--include-filter', 'CtsNativeMediaXaTestCases', '--include-filter', 'CtsNativeNetTestCases', '--include-filter', 'CtsNdefTestCases', '--include-filter', 'CtsNetSecConfigAttributeTestCases', '--include-filter', 'CtsNetSecConfigBasicDebugDisabledTestCases', '--include-filter', 'CtsNetSecConfigBasicDebugEnabledTestCases', '--include-filter', 'CtsNetSecConfigBasicDomainConfigTestCases', '--include-filter', 'CtsNetSecConfigCleartextTrafficTestCases', '--include-filter', 'CtsNetSecConfigDownloadManagerTestCases', '--include-filter', 'CtsNetSecConfigInvalidPinTestCases', '--include-filter', 'CtsNetSecConfigNestedDomainConfigTestCases', '--include-filter', 'CtsNetSecConfigPrePCleartextTrafficTestCases', '--include-filter', 'CtsNetSecConfigResourcesSrcTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficFalseTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficTrueTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases', '--include-filter', 'CtsNetTestCases', '--include-filter', 'CtsNetTestCasesLegacyApi22', '--include-filter', 'CtsNetTestCasesLegacyPermission22', '--include-filter', 'CtsOmapiTestCases', '--include-filter', 'CtsOpenGLTestCases', '--include-filter', 'CtsOpenGlPerf2TestCases', '--include-filter', 'CtsOpenGlPerfTestCases', '--include-filter', 'CtsOsHostTestCases', '--include-filter', 'CtsOsTestCases', '--include-filter', 'CtsPdfTestCases', '--include-filter', 'CtsPerfettoTestCases', '--include-filter', 'CtsPermission2TestCases', '--include-filter', 'CtsPermissionTestCases', '--include-filter', 'CtsPreference2TestCases', '--include-filter', 'CtsPreferenceTestCases', '--include-filter', 'CtsPrintTestCases', '--include-filter', 'CtsProtoTestCases', '--include-filter', 'CtsProviderTestCases', '--include-filter', 'CtsRenderscriptLegacyTestCases', '--include-filter', 'CtsRenderscriptTestCases', '--include-filter', 'CtsRsBlasTestCases', '--include-filter', 'CtsRsCppTestCases', '--include-filter', 'CtsSampleDeviceTestCases', '--include-filter', 'CtsSampleHostTestCases', '--include-filter', 'CtsSaxTestCases', '--include-filter', 'CtsSeccompHostTestCases', '--include-filter', 'CtsSecureElementAccessControlTestCases1', '--include-filter', 'CtsSecureElementAccessControlTestCases2', '--include-filter', 'CtsSecureElementAccessControlTestCases3', '--include-filter', 'CtsSecurityBulletinHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsMidiTestCases_-_CtsSecurityBulletinHostTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        precondition_commands=['android-sh -c \'setprop ctl.start mdnsd\''],
+        timeout=106200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases
new file mode 100644
index 0000000..f526089
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSecurityHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases',
+        test_name='cheets_CTS_P.internal.arm.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSecurityHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        precondition_commands=['echo 3 > /proc/sys/kernel/perf_event_paranoid', 'modprobe configs'],
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases
new file mode 100644
index 0000000..c41476a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSecurityTestCases, CtsSelinuxTargetSdk25TestCases, CtsSelinuxTargetSdk27TestCases, CtsSelinuxTargetSdkCurrentTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases',
+        test_name='cheets_CTS_P.internal.arm.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSecurityTestCases', '--include-filter', 'CtsSelinuxTargetSdk25TestCases', '--include-filter', 'CtsSelinuxTargetSdk27TestCases', '--include-filter', 'CtsSelinuxTargetSdkCurrentTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=14400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsSensorTestCases_-_CtsSensorTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsSensorTestCases_-_CtsSensorTestCases
new file mode 100644
index 0000000..dda7de4
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsSensorTestCases_-_CtsSensorTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.all.CtsSensorTestCases_-_CtsSensorTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSensorTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=30,
+        tag='internal.arm.all.CtsSensorTestCases_-_CtsSensorTestCases',
+        test_name='cheets_CTS_P.internal.arm.all.CtsSensorTestCases_-_CtsSensorTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSensorTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsSensorTestCases_-_CtsSensorTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsShortcutHostTestCases_-_CtsVideoTestCases b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsShortcutHostTestCases_-_CtsVideoTestCases
new file mode 100644
index 0000000..73191d3
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsShortcutHostTestCases_-_CtsVideoTestCases
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.all.CtsShortcutHostTestCases_-_CtsVideoTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsShortcutHostTestCases, CtsShortcutManagerTestCases, CtsSimRestrictedApisTestCases, CtsSimpleCpuTestCases, CtsSimpleperfTestCases, CtsSkQPTestCases, CtsSliceTestCases, CtsSpeechTestCases, CtsStatsdHostTestCases, CtsSustainedPerformanceHostTestCases, CtsSyncAccountAccessOtherCertTestCases, CtsSyncContentHostTestCases, CtsSyncManagerTestsCases, CtsSystemApiAnnotationTestCases, CtsSystemApiSignatureTestCases, CtsSystemIntentTestCases, CtsSystemUiHostTestCases, CtsSystemUiTestCases, CtsTelecomTestCases, CtsTelecomTestCases2, CtsTelecomTestCases3, CtsTelephony2TestCases, CtsTelephonyTestCases, CtsTextTestCases, CtsThemeDeviceTestCases, CtsThemeHostTestCases, CtsToastLegacyTestCases, CtsToastTestCases, CtsTransitionTestCases, CtsTrustedVoiceHostTestCases, CtsTvProviderTestCases, CtsTvTestCases, CtsUiAutomationTestCases, CtsUiDeviceTestCases, CtsUiRenderingTestCases, CtsUidIsolationTestCases, CtsUsageStatsTestCases, CtsUsbTests, CtsUtilTestCases, CtsVideoTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsShortcutHostTestCases_-_CtsVideoTestCases',
+        test_name='cheets_CTS_P.internal.arm.all.CtsShortcutHostTestCases_-_CtsVideoTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsShortcutHostTestCases', '--include-filter', 'CtsShortcutManagerTestCases', '--include-filter', 'CtsSimRestrictedApisTestCases', '--include-filter', 'CtsSimpleCpuTestCases', '--include-filter', 'CtsSimpleperfTestCases', '--include-filter', 'CtsSkQPTestCases', '--include-filter', 'CtsSliceTestCases', '--include-filter', 'CtsSpeechTestCases', '--include-filter', 'CtsStatsdHostTestCases', '--include-filter', 'CtsSustainedPerformanceHostTestCases', '--include-filter', 'CtsSyncAccountAccessOtherCertTestCases', '--include-filter', 'CtsSyncContentHostTestCases', '--include-filter', 'CtsSyncManagerTestsCases', '--include-filter', 'CtsSystemApiAnnotationTestCases', '--include-filter', 'CtsSystemApiSignatureTestCases', '--include-filter', 'CtsSystemIntentTestCases', '--include-filter', 'CtsSystemUiHostTestCases', '--include-filter', 'CtsSystemUiTestCases', '--include-filter', 'CtsTelecomTestCases', '--include-filter', 'CtsTelecomTestCases2', '--include-filter', 'CtsTelecomTestCases3', '--include-filter', 'CtsTelephony2TestCases', '--include-filter', 'CtsTelephonyTestCases', '--include-filter', 'CtsTextTestCases', '--include-filter', 'CtsThemeDeviceTestCases', '--include-filter', 'CtsThemeHostTestCases', '--include-filter', 'CtsToastLegacyTestCases', '--include-filter', 'CtsToastTestCases', '--include-filter', 'CtsTransitionTestCases', '--include-filter', 'CtsTrustedVoiceHostTestCases', '--include-filter', 'CtsTvProviderTestCases', '--include-filter', 'CtsTvTestCases', '--include-filter', 'CtsUiAutomationTestCases', '--include-filter', 'CtsUiDeviceTestCases', '--include-filter', 'CtsUiRenderingTestCases', '--include-filter', 'CtsUidIsolationTestCases', '--include-filter', 'CtsUsageStatsTestCases', '--include-filter', 'CtsUsbTests', '--include-filter', 'CtsUtilTestCases', '--include-filter', 'CtsVideoTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsShortcutHostTestCases_-_CtsVideoTestCases',
+        target_plan=None,
+        bundle='arm',
+        extra_artifacts_host=['/tmp/diff_*.png'],
+        uri='LATEST',
+        prerequisites=['bluetooth', 'region_us'],
+        timeout=77400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsViewTestCases_-_CtsViewTestCases.32 b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsViewTestCases_-_CtsViewTestCases.32
new file mode 100644
index 0000000..5fc8c4c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsViewTestCases_-_CtsViewTestCases.32
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.all.CtsViewTestCases_-_CtsViewTestCases.32'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsViewTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsViewTestCases_-_CtsViewTestCases.32',
+        test_name='cheets_CTS_P.internal.arm.all.CtsViewTestCases_-_CtsViewTestCases.32',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsViewTestCases', '--logcat-on-failure', '--abi', 'armeabi-v7a', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsViewTestCases_-_CtsViewTestCases',
+        target_plan=None,
+        bundle='arm',
+        extra_artifacts=['/storage/emulated/0/SurfaceViewSyncTest/'],
+        uri='LATEST',
+        precondition_commands=['sleep 60'],
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsViewTestCases_-_CtsViewTestCases.64 b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsViewTestCases_-_CtsViewTestCases.64
new file mode 100644
index 0000000..aeab19e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsViewTestCases_-_CtsViewTestCases.64
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.all.CtsViewTestCases_-_CtsViewTestCases.64'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsViewTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsViewTestCases_-_CtsViewTestCases.64',
+        test_name='cheets_CTS_P.internal.arm.all.CtsViewTestCases_-_CtsViewTestCases.64',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsViewTestCases', '--logcat-on-failure', '--abi', 'arm64-v8a', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsViewTestCases_-_CtsViewTestCases',
+        target_plan=None,
+        bundle='arm',
+        extra_artifacts=['/storage/emulated/0/SurfaceViewSyncTest/'],
+        uri='LATEST',
+        precondition_commands=['sleep 60'],
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsVmTestCases_-_vm-tests-tf b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsVmTestCases_-_vm-tests-tf
new file mode 100644
index 0000000..90abb02
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.all.CtsVmTestCases_-_vm-tests-tf
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.all.CtsVmTestCases_-_vm-tests-tf'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsVmTestCases, CtsVoiceInteractionTestCases, CtsVoiceSettingsTestCases, CtsVrTestCases, CtsWebkitTestCases, CtsWidgetTestCases, CtsWindowManagerDeviceTestCases, CtsWrapNoWrapTestCases, CtsWrapWrapDebugMallocDebugTestCases, CtsWrapWrapDebugTestCases, CtsWrapWrapNoDebugTestCases, cts-system-all.api, signed-CtsSecureElementAccessControlTestCases1, signed-CtsSecureElementAccessControlTestCases2, signed-CtsSecureElementAccessControlTestCases3, vm-tests-tf of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsVmTestCases_-_vm-tests-tf',
+        test_name='cheets_CTS_P.internal.arm.all.CtsVmTestCases_-_vm-tests-tf',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsVmTestCases', '--include-filter', 'CtsVoiceInteractionTestCases', '--include-filter', 'CtsVoiceSettingsTestCases', '--include-filter', 'CtsVrTestCases', '--include-filter', 'CtsWebkitTestCases', '--include-filter', 'CtsWidgetTestCases', '--include-filter', 'CtsWindowManagerDeviceTestCases', '--include-filter', 'CtsWrapNoWrapTestCases', '--include-filter', 'CtsWrapWrapDebugMallocDebugTestCases', '--include-filter', 'CtsWrapWrapDebugTestCases', '--include-filter', 'CtsWrapWrapNoDebugTestCases', '--include-filter', 'cts-system-all.api', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases1', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases2', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases3', '--include-filter', 'vm-tests-tf', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsVmTestCases_-_vm-tests-tf',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        prerequisites=['region_us'],
+        timeout=34200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.cts-system-all.api b/server/site_tests/cheets_CTS_P/control.internal.arm.cts-system-all.api
new file mode 100644
index 0000000..7c53173
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.cts-system-all.api
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.cts-system-all.api'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module cts-system-all.api of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.cts-system-all.api',
+        test_name='cheets_CTS_P.internal.arm.cts-system-all.api',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'cts-system-all.api', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='cts-system-all.api',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.signed-CtsSecureElementAccessControl b/server/site_tests/cheets_CTS_P/control.internal.arm.signed-CtsSecureElementAccessControl
new file mode 100644
index 0000000..dc177e7
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.signed-CtsSecureElementAccessControl
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.signed-CtsSecureElementAccessControl'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module signed-CtsSecureElementAccessControlTestCases1, signed-CtsSecureElementAccessControlTestCases2, signed-CtsSecureElementAccessControlTestCases3 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.signed-CtsSecureElementAccessControl',
+        test_name='cheets_CTS_P.internal.arm.signed-CtsSecureElementAccessControl',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases1', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases2', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases3', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='signed-CtsSecureElementAccessControl',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.tradefed-run-collect-tests-only-hardware-internal b/server/site_tests/cheets_CTS_P/control.internal.arm.tradefed-run-collect-tests-only-hardware-internal
new file mode 100644
index 0000000..fdb3b42
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.tradefed-run-collect-tests-only-hardware-internal
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.tradefed-run-collect-tests-only-hardware-internal'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module tradefed-run-collect-tests-only-hardware-internal of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.tradefed-run-collect-tests-only-hardware-internal',
+        test_name='cheets_CTS_P.internal.arm.tradefed-run-collect-tests-only-hardware-internal',
+        run_template=['run', 'commandAndExit', 'collect-tests-only', '--disable-reboot', '--subplan', 'cts-hardware', '--module-arg', 'CtsMediaTestCases:skip-media-download:true', '--module-arg', 'CtsMediaStressTestCases:skip-media-download:true', '--module-arg', 'CtsMediaBitstreamsTestCases:skip-media-download:true', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='tradefed-run-collect-tests-only-hardware-internal',
+        target_plan='cts-hardware',
+        bundle='arm',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.vm-tests-tf b/server/site_tests/cheets_CTS_P/control.internal.arm.vm-tests-tf
new file mode 100644
index 0000000..9aa4467
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.vm-tests-tf
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.vm-tests-tf'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module vm-tests-tf of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.vm-tests-tf',
+        test_name='cheets_CTS_P.internal.arm.vm-tests-tf',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'vm-tests-tf', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='vm-tests-tf',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.waivers b/server/site_tests/cheets_CTS_P/control.internal.arm.waivers
new file mode 100644
index 0000000..a0779d1
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.waivers
@@ -0,0 +1,36 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is not auto-generated. Don't delete it.
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.waivers'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-qual, suite:arc-cts-unibuild, suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run waived tests of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=False,
+        tag='internal.arm.waivers',
+        test_name='cheets_CTS_P.internal.arm.waivers',
+        run_template=['run', 'commandAndExit', 'cts', '--subplan', 'waivers', '--module-arg', 'CtsMediaTestCases:skip-media-download:true'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--module-arg', 'CtsMediaTestCases:skip-media-download:true'],
+        target_module='cts-dev',
+        target_plan='waivers',
+        load_waivers=False,
+        bundle='arm',
+        uri='DEV_WAIVER',
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.arm.wm-presubmit b/server/site_tests/cheets_CTS_P/control.internal.arm.wm-presubmit
new file mode 100644
index 0000000..f1f486b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.arm.wm-presubmit
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.arm.wm-presubmit'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:smoke'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module wm-presubmit of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.wm-presubmit',
+        test_name='cheets_CTS_P.internal.arm.wm-presubmit',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsActivityManagerDeviceSdk25TestCases', '--include-filter', 'CtsActivityManagerDeviceTestCases', '--include-filter', 'CtsAppTestCases android.app.cts.TaskDescriptionTest', '--include-filter', 'CtsWindowManagerDeviceTestCases', '--test-arg', 'com.android.compatibility.common.tradefed.testtype.JarHostTest:include-annotation:android.platform.test.annotations.Presubmit', '--test-arg', 'com.android.tradefed.testtype.AndroidJUnitTest:include-annotation:android.platform.test.annotations.Presubmit', '--test-arg', 'com.android.tradefed.testtype.HostTest:include-annotation:android.platform.test.annotations.Presubmit', '--test-arg', 'com.android.tradefed.testtype.AndroidJUnitTest:exclude-annotation:androidx.test.filters.FlakyTest', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='wm-presubmit',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        timeout=720)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAbiOverrideHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAbiOverrideHostTestCases
new file mode 100644
index 0000000..e1b82ab
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAbiOverrideHostTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsAbiOverrideHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAbiOverrideHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAbiOverrideHostTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsAbiOverrideHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAbiOverrideHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsAbiOverrideHostTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAccelerationTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAccelerationTestCases
new file mode 100644
index 0000000..78d9c1c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAccelerationTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsAccelerationTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAccelerationTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAccelerationTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsAccelerationTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAccelerationTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsAccelerationTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAccessibility b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAccessibility
new file mode 100644
index 0000000..d2649af
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAccessibility
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsAccessibility'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAccessibilityServiceTestCases, CtsAccessibilityTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAccessibility',
+        test_name='cheets_CTS_P.internal.x86.CtsAccessibility',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAccessibilityServiceTestCases', '--include-filter', 'CtsAccessibilityTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsAccessibility',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAccountManagerTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAccountManagerTestCases
new file mode 100644
index 0000000..a47ce22
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAccountManagerTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsAccountManagerTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAccountManagerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAccountManagerTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsAccountManagerTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAccountManagerTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsAccountManagerTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsActivityManagerDevice b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsActivityManagerDevice
new file mode 100644
index 0000000..39db52d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsActivityManagerDevice
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsActivityManagerDevice'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsActivityManagerDeviceSdk25TestCases, CtsActivityManagerDeviceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsActivityManagerDevice',
+        test_name='cheets_CTS_P.internal.x86.CtsActivityManagerDevice',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsActivityManagerDeviceSdk25TestCases', '--include-filter', 'CtsActivityManagerDeviceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsActivityManagerDevice',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAdmin b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAdmin
new file mode 100644
index 0000000..26674fd
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAdmin
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsAdmin'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAdminPackageInstallerTestCases, CtsAdminTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAdmin',
+        test_name='cheets_CTS_P.internal.x86.CtsAdmin',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAdminPackageInstallerTestCases', '--include-filter', 'CtsAdminTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsAdmin',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAlarm b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAlarm
new file mode 100644
index 0000000..e668520
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAlarm
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsAlarm'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAlarmClockTestCases, CtsAlarmManagerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAlarm',
+        test_name='cheets_CTS_P.internal.x86.CtsAlarm',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAlarmClockTestCases', '--include-filter', 'CtsAlarmManagerTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsAlarm',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAndroid b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAndroid
new file mode 100644
index 0000000..95a0d27
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAndroid
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsAndroid'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAndroidAppTestCases, CtsAndroidTestBase27ApiSignatureTestCases, CtsAndroidTestMockCurrentApiSignatureTestCases, CtsAndroidTestRunnerCurrentApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAndroid',
+        test_name='cheets_CTS_P.internal.x86.CtsAndroid',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAndroidAppTestCases', '--include-filter', 'CtsAndroidTestBase27ApiSignatureTestCases', '--include-filter', 'CtsAndroidTestMockCurrentApiSignatureTestCases', '--include-filter', 'CtsAndroidTestRunnerCurrentApiSignatureTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsAndroid',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAnimationTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAnimationTestCases
new file mode 100644
index 0000000..8975223
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAnimationTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsAnimationTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAnimationTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAnimationTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsAnimationTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAnimationTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsAnimationTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsApacheHttpLegacy b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsApacheHttpLegacy
new file mode 100644
index 0000000..dd4b617
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsApacheHttpLegacy
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsApacheHttpLegacy'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsApacheHttpLegacy27ApiSignatureTestCases, CtsApacheHttpLegacyCurrentApiSignatureTestCases, CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsApacheHttpLegacy',
+        test_name='cheets_CTS_P.internal.x86.CtsApacheHttpLegacy',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsApacheHttpLegacy27ApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacyCurrentApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsApacheHttpLegacy',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsApp b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsApp
new file mode 100644
index 0000000..fbb2957
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsApp
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsApp'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAppComponentFactoryTestCases, CtsAppSecurityHostTestCases, CtsAppTestCases, CtsAppUsageHostTestCases, CtsAppWidgetTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        enable_default_apps=True,
+        tag='internal.x86.CtsApp',
+        test_name='cheets_CTS_P.internal.x86.CtsApp',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAppComponentFactoryTestCases', '--include-filter', 'CtsAppSecurityHostTestCases', '--include-filter', 'CtsAppTestCases', '--include-filter', 'CtsAppUsageHostTestCases', '--include-filter', 'CtsAppWidgetTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsApp',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        timeout=23400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAppTestCases.feature.ctshardware b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAppTestCases.feature.ctshardware
new file mode 100644
index 0000000..77601bf
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAppTestCases.feature.ctshardware
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsAppTestCases.feature.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAppTestCases.feature of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAppTestCases.feature.ctshardware',
+        test_name='cheets_CTS_P.internal.x86.CtsAppTestCases.feature.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAppTestCases android.app.cts.SystemFeaturesTest', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsAppTestCases.feature',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAslrMallocTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAslrMallocTestCases
new file mode 100644
index 0000000..d9d01e6
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAslrMallocTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsAslrMallocTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAslrMallocTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAslrMallocTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsAslrMallocTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAslrMallocTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsAslrMallocTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAssistTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAssistTestCases
new file mode 100644
index 0000000..bcf5a9d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAssistTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsAssistTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAssistTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAssistTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsAssistTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAssistTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsAssistTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAtraceHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAtraceHostTestCases
new file mode 100644
index 0000000..fd01b3c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAtraceHostTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsAtraceHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAtraceHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAtraceHostTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsAtraceHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAtraceHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsAtraceHostTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAutoFillServiceTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAutoFillServiceTestCases
new file mode 100644
index 0000000..c05bc41
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsAutoFillServiceTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsAutoFillServiceTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAutoFillServiceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAutoFillServiceTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsAutoFillServiceTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAutoFillServiceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsAutoFillServiceTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=21600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsBackgroundRestrictionsTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsBackgroundRestrictionsTestCases
new file mode 100644
index 0000000..f46addc
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsBackgroundRestrictionsTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsBackgroundRestrictionsTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsBackgroundRestrictionsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsBackgroundRestrictionsTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsBackgroundRestrictionsTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsBackgroundRestrictionsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsBackgroundRestrictionsTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsBackup b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsBackup
new file mode 100644
index 0000000..b844588
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsBackup
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsBackup'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsBackupHostTestCases, CtsBackupTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsBackup',
+        test_name='cheets_CTS_P.internal.x86.CtsBackup',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBackupHostTestCases', '--include-filter', 'CtsBackupTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsBackup',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsBatterySavingTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsBatterySavingTestCases
new file mode 100644
index 0000000..596f8c8
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsBatterySavingTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsBatterySavingTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsBatterySavingTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsBatterySavingTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsBatterySavingTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsBatterySavingTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsBatterySavingTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsBionicTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsBionicTestCases
new file mode 100644
index 0000000..b02825d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsBionicTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsBionicTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsBionicTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsBionicTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsBionicTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsBionicTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsBionicTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsBluetoothTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsBluetoothTestCases
new file mode 100644
index 0000000..e153f4a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsBluetoothTestCases
@@ -0,0 +1,48 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+from autotest_lib.server import utils as server_utils
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsBluetoothTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsBluetoothTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+# For local debugging, if your test setup doesn't have servo, REMOVE these
+# two lines.
+args_dict = server_utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run_TS(machine):
+    # REMOVE 'servo_args=servo_args' arg for local debugging if your test
+    # setup doesn't have servo.
+    try:
+        host_list = [hosts.create_host(machine, servo_args=servo_args)]
+    except:
+        # Just ignore any servo setup flakiness.
+        host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsBluetoothTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsBluetoothTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsBluetoothTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsBluetoothTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        prerequisites=['bluetooth'],
+        hard_reboot_on_failure=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsBootStatsTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsBootStatsTestCases
new file mode 100644
index 0000000..d600c09
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsBootStatsTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsBootStatsTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsBootStatsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsBootStatsTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsBootStatsTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsBootStatsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsBootStatsTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCalendarcommon2TestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCalendarcommon2TestCases
new file mode 100644
index 0000000..c65382a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCalendarcommon2TestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsCalendarcommon2TestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCalendarcommon2TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsCalendarcommon2TestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsCalendarcommon2TestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCalendarcommon2TestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsCalendarcommon2TestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCamera b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCamera
new file mode 100644
index 0000000..55f0246
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCamera
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsCamera'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCameraApi25TestCases, CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsCamera',
+        test_name='cheets_CTS_P.internal.x86.CtsCamera',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCameraApi25TestCases', '--include-filter', 'CtsCameraTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsCamera',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCameraTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCameraTestCases.ctshardware
new file mode 100644
index 0000000..66ddf1f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCameraTestCases.ctshardware
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsCameraTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsCameraTestCases.ctshardware',
+        test_name='cheets_CTS_P.internal.x86.CtsCameraTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCameraTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsCameraTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCameraTestCases.noled.camerabox.back b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCameraTestCases.noled.camerabox.back
new file mode 100644
index 0000000..a6f02b0
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCameraTestCases.noled.camerabox.back
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsCameraTestCases.noled.camerabox.back'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-camera, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw'
+DEPENDENCIES = 'arc, cts_abi_x86, camerabox_light:noled, camerabox_facing:back'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        camera_facing='back',
+        cmdline_args=args,
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsCameraTestCases.noled.camerabox.back',
+        test_name='cheets_CTS_P.internal.x86.CtsCameraTestCases.noled.camerabox.back',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCameraTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsCameraTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        retry_manual_tests=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCameraTestCases.noled.camerabox.front b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCameraTestCases.noled.camerabox.front
new file mode 100644
index 0000000..e270464
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCameraTestCases.noled.camerabox.front
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsCameraTestCases.noled.camerabox.front'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-camera, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw'
+DEPENDENCIES = 'arc, cts_abi_x86, camerabox_light:noled, camerabox_facing:front'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        camera_facing='front',
+        cmdline_args=args,
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsCameraTestCases.noled.camerabox.front',
+        test_name='cheets_CTS_P.internal.x86.CtsCameraTestCases.noled.camerabox.front',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCameraTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsCameraTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        retry_manual_tests=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCarTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCarTestCases
new file mode 100644
index 0000000..991bf8a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCarTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsCarTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCarTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsCarTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsCarTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCarTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsCarTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCarrierApiTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCarrierApiTestCases
new file mode 100644
index 0000000..faf70c9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCarrierApiTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsCarrierApiTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCarrierApiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsCarrierApiTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsCarrierApiTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCarrierApiTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsCarrierApiTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsColorModeTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsColorModeTestCases
new file mode 100644
index 0000000..5684847
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsColorModeTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsColorModeTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsColorModeTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsColorModeTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsColorModeTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsColorModeTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsColorModeTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCompilationTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCompilationTestCases
new file mode 100644
index 0000000..e0e7df0
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCompilationTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsCompilationTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCompilationTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsCompilationTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsCompilationTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCompilationTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsCompilationTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsContactsProviderWipe b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsContactsProviderWipe
new file mode 100644
index 0000000..34c7aa6
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsContactsProviderWipe
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsContactsProviderWipe'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsContactsProviderWipe of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsContactsProviderWipe',
+        test_name='cheets_CTS_P.internal.x86.CtsContactsProviderWipe',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsContactsProviderWipe', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsContactsProviderWipe',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsContentTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsContentTestCases
new file mode 100644
index 0000000..19f0c16
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsContentTestCases
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsContentTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsContentTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        enable_default_apps=True,
+        tag='internal.x86.CtsContentTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsContentTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsContentTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsContentTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        prerequisites=['region_us'],
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCppToolsTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCppToolsTestCases
new file mode 100644
index 0000000..6702730
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCppToolsTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsCppToolsTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCppToolsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsCppToolsTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsCppToolsTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCppToolsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsCppToolsTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCurrentApiSignatureTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCurrentApiSignatureTestCases
new file mode 100644
index 0000000..8492b76
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsCurrentApiSignatureTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsCurrentApiSignatureTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCurrentApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsCurrentApiSignatureTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsCurrentApiSignatureTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCurrentApiSignatureTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsCurrentApiSignatureTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDatabaseTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDatabaseTestCases
new file mode 100644
index 0000000..d8d1f13
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDatabaseTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsDatabaseTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDatabaseTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDatabaseTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsDatabaseTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDatabaseTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDatabaseTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDebugTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDebugTestCases
new file mode 100644
index 0000000..db2d38c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDebugTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsDebugTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDebugTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDebugTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsDebugTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDebugTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDebugTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDeqpTestCases.32 b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDeqpTestCases.32
new file mode 100644
index 0000000..d0a91fb
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDeqpTestCases.32
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsDeqpTestCases.32'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-deqp, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=15,
+        tag='internal.x86.CtsDeqpTestCases.32',
+        test_name='cheets_CTS_P.internal.x86.CtsDeqpTestCases.32',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDeqpTestCases', '--abi', 'x86', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=72000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDeqpTestCases.64 b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDeqpTestCases.64
new file mode 100644
index 0000000..a62fb2c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDeqpTestCases.64
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsDeqpTestCases.64'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-deqp, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=15,
+        tag='internal.x86.CtsDeqpTestCases.64',
+        test_name='cheets_CTS_P.internal.x86.CtsDeqpTestCases.64',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDeqpTestCases', '--abi', 'x86_64', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=72000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDeqpTestCases.dEQP-EGL b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDeqpTestCases.dEQP-EGL
new file mode 100644
index 0000000..c6f1d0d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDeqpTestCases.dEQP-EGL
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsDeqpTestCases.dEQP-EGL'
+ATTRIBUTES = 'suite:arc-cts-deqp, suite:graphics_per-week'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 0
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases.dEQP-EGL of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDeqpTestCases.dEQP-EGL',
+        test_name='cheets_CTS_P.internal.x86.CtsDeqpTestCases.dEQP-EGL',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-EGL.*', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDeqpTestCases.dEQP-GLES2 b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDeqpTestCases.dEQP-GLES2
new file mode 100644
index 0000000..3785a89
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDeqpTestCases.dEQP-GLES2
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsDeqpTestCases.dEQP-GLES2'
+ATTRIBUTES = 'suite:arc-cts-deqp, suite:graphics_per-week'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 0
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases.dEQP-GLES2 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDeqpTestCases.dEQP-GLES2',
+        test_name='cheets_CTS_P.internal.x86.CtsDeqpTestCases.dEQP-GLES2',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-GLES2.*', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDeqpTestCases.dEQP-GLES3 b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDeqpTestCases.dEQP-GLES3
new file mode 100644
index 0000000..8c1cdda
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDeqpTestCases.dEQP-GLES3
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsDeqpTestCases.dEQP-GLES3'
+ATTRIBUTES = 'suite:arc-cts-deqp, suite:graphics_per-week'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 0
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases.dEQP-GLES3 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDeqpTestCases.dEQP-GLES3',
+        test_name='cheets_CTS_P.internal.x86.CtsDeqpTestCases.dEQP-GLES3',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-GLES3.*', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=21600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware
new file mode 100644
index 0000000..b80138a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware',
+        test_name='cheets_CTS_P.internal.x86.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases dEQP-GLES3.functional.prerequisite#*', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDeqpTestCases.dEQP-GLES31 b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDeqpTestCases.dEQP-GLES31
new file mode 100644
index 0000000..8d52de3
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDeqpTestCases.dEQP-GLES31
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsDeqpTestCases.dEQP-GLES31'
+ATTRIBUTES = 'suite:arc-cts-deqp, suite:graphics_per-week'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 0
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases.dEQP-GLES31 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDeqpTestCases.dEQP-GLES31',
+        test_name='cheets_CTS_P.internal.x86.CtsDeqpTestCases.dEQP-GLES31',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-GLES31.*', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=21600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDeqpTestCases.dEQP-VK b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDeqpTestCases.dEQP-VK
new file mode 100644
index 0000000..b19ba6c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDeqpTestCases.dEQP-VK
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsDeqpTestCases.dEQP-VK'
+ATTRIBUTES = 'suite:arc-cts-deqp, suite:graphics_per-week'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 0
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases.dEQP-VK of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDeqpTestCases.dEQP-VK',
+        test_name='cheets_CTS_P.internal.x86.CtsDeqpTestCases.dEQP-VK',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-VK.*', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=54000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDevice b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDevice
new file mode 100644
index 0000000..5760c73
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDevice
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsDevice'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeviceIdleHostTestCases, CtsDevicePolicyManagerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDevice',
+        test_name='cheets_CTS_P.internal.x86.CtsDevice',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeviceIdleHostTestCases', '--include-filter', 'CtsDevicePolicyManagerTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDevice',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDexMetadataHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDexMetadataHostTestCases
new file mode 100644
index 0000000..e6ea0ed
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDexMetadataHostTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsDexMetadataHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDexMetadataHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDexMetadataHostTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsDexMetadataHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDexMetadataHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDexMetadataHostTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDisplayTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDisplayTestCases
new file mode 100644
index 0000000..7ac43c5
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDisplayTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsDisplayTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDisplayTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDisplayTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsDisplayTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDisplayTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDisplayTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDpi b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDpi
new file mode 100644
index 0000000..eafefd1
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDpi
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsDpi'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDpiTestCases, CtsDpiTestCases2 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDpi',
+        test_name='cheets_CTS_P.internal.x86.CtsDpi',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDpiTestCases', '--include-filter', 'CtsDpiTestCases2', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDpi',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDreamsTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDreamsTestCases
new file mode 100644
index 0000000..f18b7c1
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDreamsTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsDreamsTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDreamsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDreamsTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsDreamsTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDreamsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDreamsTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDrmTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDrmTestCases
new file mode 100644
index 0000000..903cb68
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDrmTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsDrmTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDrmTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDrmTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsDrmTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDrmTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDrmTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDumpsysHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDumpsysHostTestCases
new file mode 100644
index 0000000..3816a81
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDumpsysHostTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsDumpsysHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDumpsysHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDumpsysHostTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsDumpsysHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDumpsysHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDumpsysHostTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDynamicLinkerTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDynamicLinkerTestCases
new file mode 100644
index 0000000..2211a49
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsDynamicLinkerTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsDynamicLinkerTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDynamicLinkerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDynamicLinkerTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsDynamicLinkerTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDynamicLinkerTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsDynamicLinkerTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsEdiHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsEdiHostTestCases
new file mode 100644
index 0000000..1cdf996
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsEdiHostTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsEdiHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsEdiHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsEdiHostTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsEdiHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsEdiHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsEdiHostTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsEffectTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsEffectTestCases
new file mode 100644
index 0000000..a4bfe6a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsEffectTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsEffectTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsEffectTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsEffectTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsEffectTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsEffectTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsEffectTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsExternalS b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsExternalS
new file mode 100644
index 0000000..ac6e0ea
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsExternalS
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsExternalS'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsExternalServiceTestCases, CtsExternalSourcesTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsExternalS',
+        test_name='cheets_CTS_P.internal.x86.CtsExternalS',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsExternalServiceTestCases', '--include-filter', 'CtsExternalSourcesTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsExternalS',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsFileSystemTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsFileSystemTestCases
new file mode 100644
index 0000000..0885079
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsFileSystemTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsFileSystemTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsFileSystemTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsFileSystemTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsFileSystemTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsFileSystemTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsFileSystemTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsFragment b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsFragment
new file mode 100644
index 0000000..a871cf3
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsFragment
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsFragment'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsFragmentTestCases, CtsFragmentTestCasesSdk26 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsFragment',
+        test_name='cheets_CTS_P.internal.x86.CtsFragment',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsFragmentTestCases', '--include-filter', 'CtsFragmentTestCasesSdk26', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsFragment',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsGestureTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsGestureTestCases
new file mode 100644
index 0000000..9bfefd0
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsGestureTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsGestureTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsGestureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsGestureTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsGestureTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsGestureTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsGestureTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsGpuToolsHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsGpuToolsHostTestCases
new file mode 100644
index 0000000..b369157
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsGpuToolsHostTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsGpuToolsHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsGpuToolsHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsGpuToolsHostTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsGpuToolsHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsGpuToolsHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsGpuToolsHostTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsGraphicsTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsGraphicsTestCases
new file mode 100644
index 0000000..ee1bd88
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsGraphicsTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsGraphicsTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsGraphicsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=5,
+        tag='internal.x86.CtsGraphicsTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsGraphicsTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsGraphicsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsGraphicsTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsHardwareTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsHardwareTestCases
new file mode 100644
index 0000000..8599ac6
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsHardwareTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsHardwareTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsHardwareTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsHardwareTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsHardwareTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsHardwareTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsHardwareTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsHarmfulAppWarningHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsHarmfulAppWarningHostTestCases
new file mode 100644
index 0000000..1ba964c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsHarmfulAppWarningHostTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsHarmfulAppWarningHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsHarmfulAppWarningHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsHarmfulAppWarningHostTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsHarmfulAppWarningHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsHarmfulAppWarningHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsHarmfulAppWarningHostTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsHiddenApi b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsHiddenApi
new file mode 100644
index 0000000..392bc5a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsHiddenApi
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsHiddenApi'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsHiddenApiBlacklistApi27TestCases, CtsHiddenApiBlacklistCurrentApiTestCases, CtsHiddenApiBlacklistDebugClassTestCases, CtsHiddenApiKillswitchDebugClassTestCases, CtsHiddenApiKillswitchWhitelistTestCases, CtsHiddenApiKillswitchWildcardTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsHiddenApi',
+        test_name='cheets_CTS_P.internal.x86.CtsHiddenApi',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHiddenApiBlacklistApi27TestCases', '--include-filter', 'CtsHiddenApiBlacklistCurrentApiTestCases', '--include-filter', 'CtsHiddenApiBlacklistDebugClassTestCases', '--include-filter', 'CtsHiddenApiKillswitchDebugClassTestCases', '--include-filter', 'CtsHiddenApiKillswitchWhitelistTestCases', '--include-filter', 'CtsHiddenApiKillswitchWildcardTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsHiddenApi',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsHostTzDataTests b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsHostTzDataTests
new file mode 100644
index 0000000..1a2a511
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsHostTzDataTests
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsHostTzDataTests'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsHostTzDataTests of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsHostTzDataTests',
+        test_name='cheets_CTS_P.internal.x86.CtsHostTzDataTests',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsHostTzDataTests', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsHostTzDataTests',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsHostside b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsHostside
new file mode 100644
index 0000000..25d0d6c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsHostside
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsHostside'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsHostsideNetworkTests, CtsHostsideNumberBlockingTestCases, CtsHostsideTvTests, CtsHostsideWebViewTests of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsHostside',
+        test_name='cheets_CTS_P.internal.x86.CtsHostside',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHostsideNetworkTests', '--include-filter', 'CtsHostsideNumberBlockingTestCases', '--include-filter', 'CtsHostsideTvTests', '--include-filter', 'CtsHostsideWebViewTests', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsHostside',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsIcuTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsIcuTestCases
new file mode 100644
index 0000000..3377d14
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsIcuTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsIcuTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsIcuTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsIcuTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsIcuTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsIcuTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsIcuTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsIncidentHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsIncidentHostTestCases
new file mode 100644
index 0000000..7090136
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsIncidentHostTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsIncidentHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsIncidentHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsIncidentHostTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsIncidentHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsIncidentHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsIncidentHostTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsInlineMockingTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsInlineMockingTestCases
new file mode 100644
index 0000000..b66d224
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsInlineMockingTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsInlineMockingTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsInlineMockingTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsInlineMockingTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsInlineMockingTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsInlineMockingTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsInlineMockingTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsInputMethod b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsInputMethod
new file mode 100644
index 0000000..a1a531c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsInputMethod
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsInputMethod'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsInputMethodServiceHostTestCases, CtsInputMethodTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsInputMethod',
+        test_name='cheets_CTS_P.internal.x86.CtsInputMethod',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsInputMethodServiceHostTestCases', '--include-filter', 'CtsInputMethodTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsInputMethod',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsIntentSignatureTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsIntentSignatureTestCases
new file mode 100644
index 0000000..a253a55
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsIntentSignatureTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsIntentSignatureTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsIntentSignatureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsIntentSignatureTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsIntentSignatureTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsIntentSignatureTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsIntentSignatureTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsJankDeviceTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsJankDeviceTestCases
new file mode 100644
index 0000000..f3c8898
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsJankDeviceTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsJankDeviceTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsJankDeviceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsJankDeviceTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsJankDeviceTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsJankDeviceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsJankDeviceTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsJdwp b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsJdwp
new file mode 100644
index 0000000..0b3e9e0
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsJdwp
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsJdwp'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsJdwpSecurityHostTestCases, CtsJdwpTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsJdwp',
+        test_name='cheets_CTS_P.internal.x86.CtsJdwp',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJdwpSecurityHostTestCases', '--include-filter', 'CtsJdwpTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsJdwp',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsJniTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsJniTestCases
new file mode 100644
index 0000000..b6bf4b1
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsJniTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsJniTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsJniTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsJniTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsJniTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsJniTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsJniTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsJobScheduler b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsJobScheduler
new file mode 100644
index 0000000..2406e70
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsJobScheduler
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsJobScheduler'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsJobSchedulerSharedUidTestCases, CtsJobSchedulerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsJobScheduler',
+        test_name='cheets_CTS_P.internal.x86.CtsJobScheduler',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJobSchedulerSharedUidTestCases', '--include-filter', 'CtsJobSchedulerTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsJobScheduler',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsJvmti b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsJvmti
new file mode 100644
index 0000000..97da910
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsJvmti
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsJvmti'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsJvmtiAttachingHostTestCases, CtsJvmtiAttachingTestCases, CtsJvmtiRedefineClassesHostTestCases, CtsJvmtiRunTest1900HostTestCases, CtsJvmtiRunTest1901HostTestCases, CtsJvmtiRunTest1902HostTestCases, CtsJvmtiRunTest1903HostTestCases, CtsJvmtiRunTest1904HostTestCases, CtsJvmtiRunTest1906HostTestCases, CtsJvmtiRunTest1907HostTestCases, CtsJvmtiRunTest1908HostTestCases, CtsJvmtiRunTest1909HostTestCases, CtsJvmtiRunTest1910HostTestCases, CtsJvmtiRunTest1911HostTestCases, CtsJvmtiRunTest1912HostTestCases, CtsJvmtiRunTest1913HostTestCases, CtsJvmtiRunTest1914HostTestCases, CtsJvmtiRunTest1915HostTestCases, CtsJvmtiRunTest1916HostTestCases, CtsJvmtiRunTest1917HostTestCases, CtsJvmtiRunTest1920HostTestCases, CtsJvmtiRunTest1921HostTestCases, CtsJvmtiRunTest1922HostTestCases, CtsJvmtiRunTest1923HostTestCases, CtsJvmtiRunTest1924HostTestCases, CtsJvmtiRunTest1925HostTestCases, CtsJvmtiRunTest1926HostTestCases, CtsJvmtiRunTest1927HostTestCases, CtsJvmtiRunTest1928HostTestCases, CtsJvmtiRunTest1930HostTestCases, CtsJvmtiRunTest1931HostTestCases, CtsJvmtiRunTest1932HostTestCases, CtsJvmtiRunTest1933HostTestCases, CtsJvmtiRunTest1934HostTestCases, CtsJvmtiRunTest1936HostTestCases, CtsJvmtiRunTest1937HostTestCases, CtsJvmtiRunTest1939HostTestCases, CtsJvmtiRunTest1941HostTestCases, CtsJvmtiRunTest1942HostTestCases, CtsJvmtiRunTest1943HostTestCases, CtsJvmtiRunTest902HostTestCases, CtsJvmtiRunTest903HostTestCases, CtsJvmtiRunTest904HostTestCases, CtsJvmtiRunTest905HostTestCases, CtsJvmtiRunTest906HostTestCases, CtsJvmtiRunTest907HostTestCases, CtsJvmtiRunTest908HostTestCases, CtsJvmtiRunTest910HostTestCases, CtsJvmtiRunTest911HostTestCases, CtsJvmtiRunTest912HostTestCases, CtsJvmtiRunTest913HostTestCases, CtsJvmtiRunTest914HostTestCases, CtsJvmtiRunTest915HostTestCases, CtsJvmtiRunTest917HostTestCases, CtsJvmtiRunTest918HostTestCases, CtsJvmtiRunTest919HostTestCases, CtsJvmtiRunTest920HostTestCases, CtsJvmtiRunTest922HostTestCases, CtsJvmtiRunTest923HostTestCases, CtsJvmtiRunTest924HostTestCases, CtsJvmtiRunTest926HostTestCases, CtsJvmtiRunTest927HostTestCases, CtsJvmtiRunTest928HostTestCases, CtsJvmtiRunTest930HostTestCases, CtsJvmtiRunTest931HostTestCases, CtsJvmtiRunTest932HostTestCases, CtsJvmtiRunTest940HostTestCases, CtsJvmtiRunTest942HostTestCases, CtsJvmtiRunTest944HostTestCases, CtsJvmtiRunTest945HostTestCases, CtsJvmtiRunTest947HostTestCases, CtsJvmtiRunTest951HostTestCases, CtsJvmtiRunTest982HostTestCases, CtsJvmtiRunTest983HostTestCases, CtsJvmtiRunTest984HostTestCases, CtsJvmtiRunTest985HostTestCases, CtsJvmtiRunTest986HostTestCases, CtsJvmtiRunTest988HostTestCases, CtsJvmtiRunTest989HostTestCases, CtsJvmtiRunTest990HostTestCases, CtsJvmtiRunTest991HostTestCases, CtsJvmtiRunTest992HostTestCases, CtsJvmtiRunTest993HostTestCases, CtsJvmtiRunTest994HostTestCases, CtsJvmtiRunTest995HostTestCases, CtsJvmtiRunTest996HostTestCases, CtsJvmtiRunTest997HostTestCases, CtsJvmtiTaggingHostTestCases, CtsJvmtiTrackingHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsJvmti',
+        test_name='cheets_CTS_P.internal.x86.CtsJvmti',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJvmtiAttachingHostTestCases', '--include-filter', 'CtsJvmtiAttachingTestCases', '--include-filter', 'CtsJvmtiRedefineClassesHostTestCases', '--include-filter', 'CtsJvmtiRunTest1900HostTestCases', '--include-filter', 'CtsJvmtiRunTest1901HostTestCases', '--include-filter', 'CtsJvmtiRunTest1902HostTestCases', '--include-filter', 'CtsJvmtiRunTest1903HostTestCases', '--include-filter', 'CtsJvmtiRunTest1904HostTestCases', '--include-filter', 'CtsJvmtiRunTest1906HostTestCases', '--include-filter', 'CtsJvmtiRunTest1907HostTestCases', '--include-filter', 'CtsJvmtiRunTest1908HostTestCases', '--include-filter', 'CtsJvmtiRunTest1909HostTestCases', '--include-filter', 'CtsJvmtiRunTest1910HostTestCases', '--include-filter', 'CtsJvmtiRunTest1911HostTestCases', '--include-filter', 'CtsJvmtiRunTest1912HostTestCases', '--include-filter', 'CtsJvmtiRunTest1913HostTestCases', '--include-filter', 'CtsJvmtiRunTest1914HostTestCases', '--include-filter', 'CtsJvmtiRunTest1915HostTestCases', '--include-filter', 'CtsJvmtiRunTest1916HostTestCases', '--include-filter', 'CtsJvmtiRunTest1917HostTestCases', '--include-filter', 'CtsJvmtiRunTest1920HostTestCases', '--include-filter', 'CtsJvmtiRunTest1921HostTestCases', '--include-filter', 'CtsJvmtiRunTest1922HostTestCases', '--include-filter', 'CtsJvmtiRunTest1923HostTestCases', '--include-filter', 'CtsJvmtiRunTest1924HostTestCases', '--include-filter', 'CtsJvmtiRunTest1925HostTestCases', '--include-filter', 'CtsJvmtiRunTest1926HostTestCases', '--include-filter', 'CtsJvmtiRunTest1927HostTestCases', '--include-filter', 'CtsJvmtiRunTest1928HostTestCases', '--include-filter', 'CtsJvmtiRunTest1930HostTestCases', '--include-filter', 'CtsJvmtiRunTest1931HostTestCases', '--include-filter', 'CtsJvmtiRunTest1932HostTestCases', '--include-filter', 'CtsJvmtiRunTest1933HostTestCases', '--include-filter', 'CtsJvmtiRunTest1934HostTestCases', '--include-filter', 'CtsJvmtiRunTest1936HostTestCases', '--include-filter', 'CtsJvmtiRunTest1937HostTestCases', '--include-filter', 'CtsJvmtiRunTest1939HostTestCases', '--include-filter', 'CtsJvmtiRunTest1941HostTestCases', '--include-filter', 'CtsJvmtiRunTest1942HostTestCases', '--include-filter', 'CtsJvmtiRunTest1943HostTestCases', '--include-filter', 'CtsJvmtiRunTest902HostTestCases', '--include-filter', 'CtsJvmtiRunTest903HostTestCases', '--include-filter', 'CtsJvmtiRunTest904HostTestCases', '--include-filter', 'CtsJvmtiRunTest905HostTestCases', '--include-filter', 'CtsJvmtiRunTest906HostTestCases', '--include-filter', 'CtsJvmtiRunTest907HostTestCases', '--include-filter', 'CtsJvmtiRunTest908HostTestCases', '--include-filter', 'CtsJvmtiRunTest910HostTestCases', '--include-filter', 'CtsJvmtiRunTest911HostTestCases', '--include-filter', 'CtsJvmtiRunTest912HostTestCases', '--include-filter', 'CtsJvmtiRunTest913HostTestCases', '--include-filter', 'CtsJvmtiRunTest914HostTestCases', '--include-filter', 'CtsJvmtiRunTest915HostTestCases', '--include-filter', 'CtsJvmtiRunTest917HostTestCases', '--include-filter', 'CtsJvmtiRunTest918HostTestCases', '--include-filter', 'CtsJvmtiRunTest919HostTestCases', '--include-filter', 'CtsJvmtiRunTest920HostTestCases', '--include-filter', 'CtsJvmtiRunTest922HostTestCases', '--include-filter', 'CtsJvmtiRunTest923HostTestCases', '--include-filter', 'CtsJvmtiRunTest924HostTestCases', '--include-filter', 'CtsJvmtiRunTest926HostTestCases', '--include-filter', 'CtsJvmtiRunTest927HostTestCases', '--include-filter', 'CtsJvmtiRunTest928HostTestCases', '--include-filter', 'CtsJvmtiRunTest930HostTestCases', '--include-filter', 'CtsJvmtiRunTest931HostTestCases', '--include-filter', 'CtsJvmtiRunTest932HostTestCases', '--include-filter', 'CtsJvmtiRunTest940HostTestCases', '--include-filter', 'CtsJvmtiRunTest942HostTestCases', '--include-filter', 'CtsJvmtiRunTest944HostTestCases', '--include-filter', 'CtsJvmtiRunTest945HostTestCases', '--include-filter', 'CtsJvmtiRunTest947HostTestCases', '--include-filter', 'CtsJvmtiRunTest951HostTestCases', '--include-filter', 'CtsJvmtiRunTest982HostTestCases', '--include-filter', 'CtsJvmtiRunTest983HostTestCases', '--include-filter', 'CtsJvmtiRunTest984HostTestCases', '--include-filter', 'CtsJvmtiRunTest985HostTestCases', '--include-filter', 'CtsJvmtiRunTest986HostTestCases', '--include-filter', 'CtsJvmtiRunTest988HostTestCases', '--include-filter', 'CtsJvmtiRunTest989HostTestCases', '--include-filter', 'CtsJvmtiRunTest990HostTestCases', '--include-filter', 'CtsJvmtiRunTest991HostTestCases', '--include-filter', 'CtsJvmtiRunTest992HostTestCases', '--include-filter', 'CtsJvmtiRunTest993HostTestCases', '--include-filter', 'CtsJvmtiRunTest994HostTestCases', '--include-filter', 'CtsJvmtiRunTest995HostTestCases', '--include-filter', 'CtsJvmtiRunTest996HostTestCases', '--include-filter', 'CtsJvmtiRunTest997HostTestCases', '--include-filter', 'CtsJvmtiTaggingHostTestCases', '--include-filter', 'CtsJvmtiTrackingHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsJvmti',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=26700)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsKernelConfigTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsKernelConfigTestCases
new file mode 100644
index 0000000..eaa6d74
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsKernelConfigTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsKernelConfigTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsKernelConfigTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsKernelConfigTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsKernelConfigTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsKernelConfigTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsKernelConfigTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsKeystoreTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsKeystoreTestCases
new file mode 100644
index 0000000..83dc9f2
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsKeystoreTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsKeystoreTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsKeystoreTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsKeystoreTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsKeystoreTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsKeystoreTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsKeystoreTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsLeanbackJankTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsLeanbackJankTestCases
new file mode 100644
index 0000000..4359d2b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsLeanbackJankTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsLeanbackJankTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsLeanbackJankTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsLeanbackJankTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsLeanbackJankTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsLeanbackJankTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsLeanbackJankTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsLegacyNotificationTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsLegacyNotificationTestCases
new file mode 100644
index 0000000..8aeed34
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsLegacyNotificationTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsLegacyNotificationTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsLegacyNotificationTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsLegacyNotificationTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsLegacyNotificationTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsLegacyNotificationTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsLegacyNotificationTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsLibcore b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsLibcore
new file mode 100644
index 0000000..a230f2f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsLibcore
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsLibcore'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsLibcoreFileIOTestCases, CtsLibcoreJsr166TestCases, CtsLibcoreLegacy22TestCases, CtsLibcoreOjTestCases, CtsLibcoreOkHttpTestCases, CtsLibcoreTestCases, CtsLibcoreWycheproofBCTestCases, CtsLibcoreWycheproofConscryptTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsLibcore',
+        test_name='cheets_CTS_P.internal.x86.CtsLibcore',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLibcoreFileIOTestCases', '--include-filter', 'CtsLibcoreJsr166TestCases', '--include-filter', 'CtsLibcoreLegacy22TestCases', '--include-filter', 'CtsLibcoreOjTestCases', '--include-filter', 'CtsLibcoreOkHttpTestCases', '--include-filter', 'CtsLibcoreTestCases', '--include-filter', 'CtsLibcoreWycheproofBCTestCases', '--include-filter', 'CtsLibcoreWycheproofConscryptTestCases', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsLibcore',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=21600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsLiblogTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsLiblogTestCases
new file mode 100644
index 0000000..fefae68
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsLiblogTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsLiblogTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsLiblogTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsLiblogTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsLiblogTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsLiblogTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsLiblogTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsLocation b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsLocation
new file mode 100644
index 0000000..f413426
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsLocation
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsLocation'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsLocation2TestCases, CtsLocationTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsLocation',
+        test_name='cheets_CTS_P.internal.x86.CtsLocation',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLocation2TestCases', '--include-filter', 'CtsLocationTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsLocation',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsLogdTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsLogdTestCases
new file mode 100644
index 0000000..b61db1d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsLogdTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsLogdTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsLogdTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsLogdTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsLogdTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsLogdTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsLogdTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMediaBitstreamsTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMediaBitstreamsTestCases
new file mode 100644
index 0000000..137d30c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMediaBitstreamsTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsMediaBitstreamsTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaBitstreamsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='internal.x86.CtsMediaBitstreamsTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsMediaBitstreamsTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaBitstreamsTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaBitstreamsTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMediaHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMediaHostTestCases
new file mode 100644
index 0000000..fd83ff3
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMediaHostTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsMediaHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsMediaHostTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsMediaHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsMediaHostTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMediaStressTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMediaStressTestCases
new file mode 100644
index 0000000..9b348e3
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMediaStressTestCases
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsMediaStressTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaStressTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='internal.x86.CtsMediaStressTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsMediaStressTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaStressTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaStressTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        precondition_commands=['cras_test_client --mute 1'],
+        timeout=18000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMediaStressTestCases.camera.ctshardware b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMediaStressTestCases.camera.ctshardware
new file mode 100644
index 0000000..9fa5652
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMediaStressTestCases.camera.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsMediaStressTestCases.camera.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaStressTestCases.camera of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='internal.x86.CtsMediaStressTestCases.camera.ctshardware',
+        test_name='cheets_CTS_P.internal.x86.CtsMediaStressTestCases.camera.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaStressTestCases android.mediastress.cts.MediaRecorderStressTest', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaStressTestCases.camera',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMediaTestCases.32 b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMediaTestCases.32
new file mode 100644
index 0000000..13f2c67
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMediaTestCases.32
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsMediaTestCases.32'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='internal.x86.CtsMediaTestCases.32',
+        test_name='cheets_CTS_P.internal.x86.CtsMediaTestCases.32',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaTestCases', '--logcat-on-failure', '--abi', 'x86'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        precondition_commands=['cras_test_client --mute 1'],
+        timeout=36000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMediaTestCases.64 b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMediaTestCases.64
new file mode 100644
index 0000000..e0901b3
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMediaTestCases.64
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsMediaTestCases.64'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='internal.x86.CtsMediaTestCases.64',
+        test_name='cheets_CTS_P.internal.x86.CtsMediaTestCases.64',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaTestCases', '--logcat-on-failure', '--abi', 'x86_64'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        precondition_commands=['cras_test_client --mute 1'],
+        timeout=36000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMediaTestCases.audio b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMediaTestCases.audio
new file mode 100644
index 0000000..be3b044
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMediaTestCases.audio
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsMediaTestCases.audio'
+ATTRIBUTES = 'suite:arc-cts'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaTestCases.audio of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='internal.x86.CtsMediaTestCases.audio',
+        test_name='cheets_CTS_P.internal.x86.CtsMediaTestCases.audio',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioAttributesTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioEffectTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioFocusTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioFormatTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioManagerTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioNativeTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPlayRoutingNative', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPlaybackConfigurationTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPreProcessingTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPresentationTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordAppOpTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordRoutingNative', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecord_BufferSizeTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordingConfigurationTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackLatencyTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackSurroundTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrack_ListenerTest', '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolAacTest', '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolMidiTest', '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolOggTest', '--include-filter', 'CtsMediaTestCases android.media.cts.VolumeShaperTest', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsMediaTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMediaTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMediaTestCases.ctshardware
new file mode 100644
index 0000000..6aa083a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMediaTestCases.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsMediaTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='internal.x86.CtsMediaTestCases.ctshardware',
+        test_name='cheets_CTS_P.internal.x86.CtsMediaTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        timeout=36000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMidiTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMidiTestCases
new file mode 100644
index 0000000..e13cf5c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMidiTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsMidiTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMidiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsMidiTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsMidiTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMidiTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsMidiTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMocking b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMocking
new file mode 100644
index 0000000..7192543
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMocking
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsMocking'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMockingDebuggableTestCases, CtsMockingTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsMocking',
+        test_name='cheets_CTS_P.internal.x86.CtsMocking',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMockingDebuggableTestCases', '--include-filter', 'CtsMockingTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsMocking',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMonkeyTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMonkeyTestCases
new file mode 100644
index 0000000..12f9883
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMonkeyTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsMonkeyTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMonkeyTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsMonkeyTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsMonkeyTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMonkeyTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsMonkeyTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMultiUser b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMultiUser
new file mode 100644
index 0000000..86ef9c6
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsMultiUser
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsMultiUser'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMultiUserHostTestCases, CtsMultiUserTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsMultiUser',
+        test_name='cheets_CTS_P.internal.x86.CtsMultiUser',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMultiUserHostTestCases', '--include-filter', 'CtsMultiUserTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsMultiUser',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsNNAPITestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsNNAPITestCases
new file mode 100644
index 0000000..348a5a7
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsNNAPITestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsNNAPITestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNNAPITestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsNNAPITestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsNNAPITestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsNNAPITestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsNNAPITestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsNative b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsNative
new file mode 100644
index 0000000..993da3f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsNative
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsNative'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNativeHardwareTestCases, CtsNativeMediaAAudioTestCases, CtsNativeMediaSlTestCases, CtsNativeMediaXaTestCases, CtsNativeNetTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsNative',
+        test_name='cheets_CTS_P.internal.x86.CtsNative',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNativeHardwareTestCases', '--include-filter', 'CtsNativeMediaAAudioTestCases', '--include-filter', 'CtsNativeMediaSlTestCases', '--include-filter', 'CtsNativeMediaXaTestCases', '--include-filter', 'CtsNativeNetTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsNative',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsNativeMediaAAudioTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsNativeMediaAAudioTestCases.ctshardware
new file mode 100644
index 0000000..b090866
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsNativeMediaAAudioTestCases.ctshardware
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsNativeMediaAAudioTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNativeMediaAAudioTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsNativeMediaAAudioTestCases.ctshardware',
+        test_name='cheets_CTS_P.internal.x86.CtsNativeMediaAAudioTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNativeMediaAAudioTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsNativeMediaAAudioTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsNdefTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsNdefTestCases
new file mode 100644
index 0000000..0e6c2c2
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsNdefTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsNdefTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNdefTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsNdefTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsNdefTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsNdefTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsNdefTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsNet b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsNet
new file mode 100644
index 0000000..48b9feb
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsNet
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsNet'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNetSecConfigAttributeTestCases, CtsNetSecConfigBasicDebugDisabledTestCases, CtsNetSecConfigBasicDebugEnabledTestCases, CtsNetSecConfigBasicDomainConfigTestCases, CtsNetSecConfigCleartextTrafficTestCases, CtsNetSecConfigDownloadManagerTestCases, CtsNetSecConfigInvalidPinTestCases, CtsNetSecConfigNestedDomainConfigTestCases, CtsNetSecConfigPrePCleartextTrafficTestCases, CtsNetSecConfigResourcesSrcTestCases, CtsNetSecPolicyUsesCleartextTrafficFalseTestCases, CtsNetSecPolicyUsesCleartextTrafficTrueTestCases, CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases, CtsNetTestCases, CtsNetTestCasesLegacyApi22, CtsNetTestCasesLegacyPermission22 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsNet',
+        test_name='cheets_CTS_P.internal.x86.CtsNet',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNetSecConfigAttributeTestCases', '--include-filter', 'CtsNetSecConfigBasicDebugDisabledTestCases', '--include-filter', 'CtsNetSecConfigBasicDebugEnabledTestCases', '--include-filter', 'CtsNetSecConfigBasicDomainConfigTestCases', '--include-filter', 'CtsNetSecConfigCleartextTrafficTestCases', '--include-filter', 'CtsNetSecConfigDownloadManagerTestCases', '--include-filter', 'CtsNetSecConfigInvalidPinTestCases', '--include-filter', 'CtsNetSecConfigNestedDomainConfigTestCases', '--include-filter', 'CtsNetSecConfigPrePCleartextTrafficTestCases', '--include-filter', 'CtsNetSecConfigResourcesSrcTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficFalseTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficTrueTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases', '--include-filter', 'CtsNetTestCases', '--include-filter', 'CtsNetTestCasesLegacyApi22', '--include-filter', 'CtsNetTestCasesLegacyPermission22', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsNet',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        precondition_commands=['android-sh -c \'setprop ctl.start mdnsd\''],
+        timeout=30600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsNetTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsNetTestCases.ctshardware
new file mode 100644
index 0000000..1f27261
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsNetTestCases.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsNetTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNetTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsNetTestCases.ctshardware',
+        test_name='cheets_CTS_P.internal.x86.CtsNetTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNetTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsNetTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        precondition_commands=['android-sh -c \'setprop ctl.start mdnsd\''],
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsOmapiTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsOmapiTestCases
new file mode 100644
index 0000000..c8f5b1b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsOmapiTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsOmapiTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsOmapiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsOmapiTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsOmapiTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsOmapiTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsOmapiTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsOpenG b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsOpenG
new file mode 100644
index 0000000..cdf3f0d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsOpenG
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsOpenG'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsOpenGLTestCases, CtsOpenGlPerf2TestCases, CtsOpenGlPerfTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsOpenG',
+        test_name='cheets_CTS_P.internal.x86.CtsOpenG',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsOpenGLTestCases', '--include-filter', 'CtsOpenGlPerf2TestCases', '--include-filter', 'CtsOpenGlPerfTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsOpenG',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsOs b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsOs
new file mode 100644
index 0000000..f9fc32d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsOs
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsOs'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsOsHostTestCases, CtsOsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsOs',
+        test_name='cheets_CTS_P.internal.x86.CtsOs',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsOsHostTestCases', '--include-filter', 'CtsOsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsOs',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsPdfTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsPdfTestCases
new file mode 100644
index 0000000..ef2678c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsPdfTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsPdfTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPdfTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsPdfTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsPdfTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsPdfTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsPdfTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsPerfettoTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsPerfettoTestCases
new file mode 100644
index 0000000..48d324b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsPerfettoTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsPerfettoTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPerfettoTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsPerfettoTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsPerfettoTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsPerfettoTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsPerfettoTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsPerfettoTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsPerfettoTestCases.ctshardware
new file mode 100644
index 0000000..bd7cbe8
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsPerfettoTestCases.ctshardware
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsPerfettoTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPerfettoTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsPerfettoTestCases.ctshardware',
+        test_name='cheets_CTS_P.internal.x86.CtsPerfettoTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPerfettoTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsPerfettoTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsPermission b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsPermission
new file mode 100644
index 0000000..e6c9477
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsPermission
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsPermission'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPermission2TestCases, CtsPermissionTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsPermission',
+        test_name='cheets_CTS_P.internal.x86.CtsPermission',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPermission2TestCases', '--include-filter', 'CtsPermissionTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsPermission',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsPermissionTestCases.camera.ctshardware b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsPermissionTestCases.camera.ctshardware
new file mode 100644
index 0000000..6a03207
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsPermissionTestCases.camera.ctshardware
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsPermissionTestCases.camera.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPermissionTestCases.camera of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsPermissionTestCases.camera.ctshardware',
+        test_name='cheets_CTS_P.internal.x86.CtsPermissionTestCases.camera.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPermissionTestCases android.permission.cts.CameraPermissionTest', '--include-filter', 'CtsPermissionTestCases android.permission.cts.Camera2PermissionTest', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsPermissionTestCases.camera',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsPreference b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsPreference
new file mode 100644
index 0000000..be95b92
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsPreference
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsPreference'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPreference2TestCases, CtsPreferenceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsPreference',
+        test_name='cheets_CTS_P.internal.x86.CtsPreference',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPreference2TestCases', '--include-filter', 'CtsPreferenceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsPreference',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsPrintTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsPrintTestCases
new file mode 100644
index 0000000..e47d06b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsPrintTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsPrintTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPrintTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsPrintTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsPrintTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsPrintTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsPrintTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsProtoTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsProtoTestCases
new file mode 100644
index 0000000..5be712f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsProtoTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsProtoTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsProtoTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsProtoTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsProtoTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsProtoTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsProtoTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsProviderTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsProviderTestCases
new file mode 100644
index 0000000..a1bdd18
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsProviderTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsProviderTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsProviderTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsProviderTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsProviderTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsProviderTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsProviderTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsRenderscript b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsRenderscript
new file mode 100644
index 0000000..709f511
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsRenderscript
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsRenderscript'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsRenderscriptLegacyTestCases, CtsRenderscriptTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsRenderscript',
+        test_name='cheets_CTS_P.internal.x86.CtsRenderscript',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsRenderscriptLegacyTestCases', '--include-filter', 'CtsRenderscriptTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsRenderscript',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsRs b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsRs
new file mode 100644
index 0000000..827c462
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsRs
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsRs'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsRsBlasTestCases, CtsRsCppTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsRs',
+        test_name='cheets_CTS_P.internal.x86.CtsRs',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsRsBlasTestCases', '--include-filter', 'CtsRsCppTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsRs',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSample b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSample
new file mode 100644
index 0000000..39cfaa4
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSample
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsSample'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSampleDeviceTestCases, CtsSampleHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSample',
+        test_name='cheets_CTS_P.internal.x86.CtsSample',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSampleDeviceTestCases', '--include-filter', 'CtsSampleHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSample',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSaxTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSaxTestCases
new file mode 100644
index 0000000..d1ea1b4
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSaxTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsSaxTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSaxTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSaxTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsSaxTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSaxTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSaxTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSeccompHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSeccompHostTestCases
new file mode 100644
index 0000000..ff31a0b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSeccompHostTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsSeccompHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSeccompHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSeccompHostTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsSeccompHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSeccompHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSeccompHostTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSecureElementAccessControl b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSecureElementAccessControl
new file mode 100644
index 0000000..d504721
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSecureElementAccessControl
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsSecureElementAccessControl'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSecureElementAccessControlTestCases1, CtsSecureElementAccessControlTestCases2, CtsSecureElementAccessControlTestCases3 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSecureElementAccessControl',
+        test_name='cheets_CTS_P.internal.x86.CtsSecureElementAccessControl',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSecureElementAccessControlTestCases1', '--include-filter', 'CtsSecureElementAccessControlTestCases2', '--include-filter', 'CtsSecureElementAccessControlTestCases3', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSecureElementAccessControl',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSecurity b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSecurity
new file mode 100644
index 0000000..e4fe351
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSecurity
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsSecurity'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSecurityBulletinHostTestCases, CtsSecurityHostTestCases, CtsSecurityTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSecurity',
+        test_name='cheets_CTS_P.internal.x86.CtsSecurity',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSecurityBulletinHostTestCases', '--include-filter', 'CtsSecurityHostTestCases', '--include-filter', 'CtsSecurityTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSecurity',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        precondition_commands=['echo 3 > /proc/sys/kernel/perf_event_paranoid', 'modprobe configs'],
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSelinuxTargetSdk b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSelinuxTargetSdk
new file mode 100644
index 0000000..8764012
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSelinuxTargetSdk
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsSelinuxTargetSdk'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSelinuxTargetSdk25TestCases, CtsSelinuxTargetSdk27TestCases, CtsSelinuxTargetSdkCurrentTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSelinuxTargetSdk',
+        test_name='cheets_CTS_P.internal.x86.CtsSelinuxTargetSdk',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSelinuxTargetSdk25TestCases', '--include-filter', 'CtsSelinuxTargetSdk27TestCases', '--include-filter', 'CtsSelinuxTargetSdkCurrentTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSelinuxTargetSdk',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSensorTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSensorTestCases
new file mode 100644
index 0000000..20e0d7d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSensorTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsSensorTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSensorTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=30,
+        tag='internal.x86.CtsSensorTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsSensorTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSensorTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSensorTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSensorTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSensorTestCases.ctshardware
new file mode 100644
index 0000000..f33164c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSensorTestCases.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsSensorTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSensorTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=30,
+        tag='internal.x86.CtsSensorTestCases.ctshardware',
+        test_name='cheets_CTS_P.internal.x86.CtsSensorTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSensorTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSensorTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsShortcut b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsShortcut
new file mode 100644
index 0000000..2a0fdde
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsShortcut
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsShortcut'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsShortcutHostTestCases, CtsShortcutManagerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsShortcut',
+        test_name='cheets_CTS_P.internal.x86.CtsShortcut',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsShortcutHostTestCases', '--include-filter', 'CtsShortcutManagerTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsShortcut',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSimRestrictedApisTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSimRestrictedApisTestCases
new file mode 100644
index 0000000..b2759f8
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSimRestrictedApisTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsSimRestrictedApisTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSimRestrictedApisTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSimRestrictedApisTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsSimRestrictedApisTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSimRestrictedApisTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSimRestrictedApisTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSimpleCpuTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSimpleCpuTestCases
new file mode 100644
index 0000000..cd528e5
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSimpleCpuTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsSimpleCpuTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSimpleCpuTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSimpleCpuTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsSimpleCpuTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSimpleCpuTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSimpleCpuTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSimpleperfTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSimpleperfTestCases
new file mode 100644
index 0000000..10e65e1
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSimpleperfTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsSimpleperfTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSimpleperfTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSimpleperfTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsSimpleperfTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSimpleperfTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSimpleperfTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSkQPTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSkQPTestCases
new file mode 100644
index 0000000..69060f1
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSkQPTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsSkQPTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSkQPTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSkQPTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsSkQPTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSkQPTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSkQPTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSliceTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSliceTestCases
new file mode 100644
index 0000000..6f1e3a2
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSliceTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsSliceTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSliceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSliceTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsSliceTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSliceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSliceTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSpeechTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSpeechTestCases
new file mode 100644
index 0000000..4f51e9b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSpeechTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsSpeechTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSpeechTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSpeechTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsSpeechTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSpeechTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSpeechTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsStatsdHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsStatsdHostTestCases
new file mode 100644
index 0000000..53d78c5
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsStatsdHostTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsStatsdHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsStatsdHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsStatsdHostTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsStatsdHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsStatsdHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsStatsdHostTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        prerequisites=['bluetooth'],
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSustainedPerformanceHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSustainedPerformanceHostTestCases
new file mode 100644
index 0000000..1593216
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSustainedPerformanceHostTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsSustainedPerformanceHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSustainedPerformanceHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSustainedPerformanceHostTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsSustainedPerformanceHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSustainedPerformanceHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSustainedPerformanceHostTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSustainedPerformanceHostTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSustainedPerformanceHostTestCases.ctshardware
new file mode 100644
index 0000000..43e8dad
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSustainedPerformanceHostTestCases.ctshardware
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsSustainedPerformanceHostTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSustainedPerformanceHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSustainedPerformanceHostTestCases.ctshardware',
+        test_name='cheets_CTS_P.internal.x86.CtsSustainedPerformanceHostTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSustainedPerformanceHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSustainedPerformanceHostTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSync b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSync
new file mode 100644
index 0000000..4fc7c5d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSync
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsSync'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSyncAccountAccessOtherCertTestCases, CtsSyncContentHostTestCases, CtsSyncManagerTestsCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSync',
+        test_name='cheets_CTS_P.internal.x86.CtsSync',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSyncAccountAccessOtherCertTestCases', '--include-filter', 'CtsSyncContentHostTestCases', '--include-filter', 'CtsSyncManagerTestsCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSync',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSystem b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSystem
new file mode 100644
index 0000000..e051f4e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsSystem
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsSystem'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSystemApiAnnotationTestCases, CtsSystemApiSignatureTestCases, CtsSystemIntentTestCases, CtsSystemUiHostTestCases, CtsSystemUiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSystem',
+        test_name='cheets_CTS_P.internal.x86.CtsSystem',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSystemApiAnnotationTestCases', '--include-filter', 'CtsSystemApiSignatureTestCases', '--include-filter', 'CtsSystemIntentTestCases', '--include-filter', 'CtsSystemUiHostTestCases', '--include-filter', 'CtsSystemUiTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsSystem',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsTelecom b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsTelecom
new file mode 100644
index 0000000..82cb8ff
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsTelecom
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsTelecom'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTelecomTestCases, CtsTelecomTestCases2, CtsTelecomTestCases3 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsTelecom',
+        test_name='cheets_CTS_P.internal.x86.CtsTelecom',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTelecomTestCases', '--include-filter', 'CtsTelecomTestCases2', '--include-filter', 'CtsTelecomTestCases3', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsTelecom',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsTelephony b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsTelephony
new file mode 100644
index 0000000..18f8a24
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsTelephony
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsTelephony'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTelephony2TestCases, CtsTelephonyTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsTelephony',
+        test_name='cheets_CTS_P.internal.x86.CtsTelephony',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTelephony2TestCases', '--include-filter', 'CtsTelephonyTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsTelephony',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsTextTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsTextTestCases
new file mode 100644
index 0000000..c737ee4
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsTextTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsTextTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTextTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsTextTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsTextTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsTextTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsTextTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsTheme b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsTheme
new file mode 100644
index 0000000..fae4a9e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsTheme
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsTheme'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsThemeDeviceTestCases, CtsThemeHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsTheme',
+        test_name='cheets_CTS_P.internal.x86.CtsTheme',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsThemeDeviceTestCases', '--include-filter', 'CtsThemeHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsTheme',
+        target_plan=None,
+        bundle='x86',
+        extra_artifacts_host=['/tmp/diff_*.png'],
+        uri='DEV',
+        prerequisites=['region_us'],
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsToast b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsToast
new file mode 100644
index 0000000..c39f8ef
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsToast
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsToast'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsToastLegacyTestCases, CtsToastTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsToast',
+        test_name='cheets_CTS_P.internal.x86.CtsToast',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsToastLegacyTestCases', '--include-filter', 'CtsToastTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsToast',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsTransitionTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsTransitionTestCases
new file mode 100644
index 0000000..3092fb8
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsTransitionTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsTransitionTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTransitionTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsTransitionTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsTransitionTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsTransitionTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsTransitionTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsTrustedVoiceHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsTrustedVoiceHostTestCases
new file mode 100644
index 0000000..872d8db
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsTrustedVoiceHostTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsTrustedVoiceHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTrustedVoiceHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsTrustedVoiceHostTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsTrustedVoiceHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsTrustedVoiceHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsTrustedVoiceHostTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsTv b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsTv
new file mode 100644
index 0000000..1eda6a6
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsTv
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsTv'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTvProviderTestCases, CtsTvTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsTv',
+        test_name='cheets_CTS_P.internal.x86.CtsTv',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTvProviderTestCases', '--include-filter', 'CtsTvTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsTv',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsUi b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsUi
new file mode 100644
index 0000000..e1e5d4b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsUi
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsUi'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsUiAutomationTestCases, CtsUiDeviceTestCases, CtsUiRenderingTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsUi',
+        test_name='cheets_CTS_P.internal.x86.CtsUi',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUiAutomationTestCases', '--include-filter', 'CtsUiDeviceTestCases', '--include-filter', 'CtsUiRenderingTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsUi',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsUidIsolationTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsUidIsolationTestCases
new file mode 100644
index 0000000..07ae77f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsUidIsolationTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsUidIsolationTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsUidIsolationTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsUidIsolationTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsUidIsolationTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsUidIsolationTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsUidIsolationTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsUsageStatsTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsUsageStatsTestCases
new file mode 100644
index 0000000..75f995d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsUsageStatsTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsUsageStatsTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsUsageStatsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsUsageStatsTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsUsageStatsTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsUsageStatsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsUsageStatsTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsUsageStatsTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsUsageStatsTestCases.ctshardware
new file mode 100644
index 0000000..96a26ce
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsUsageStatsTestCases.ctshardware
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsUsageStatsTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsUsageStatsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsUsageStatsTestCases.ctshardware',
+        test_name='cheets_CTS_P.internal.x86.CtsUsageStatsTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUsageStatsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsUsageStatsTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsUsbTests b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsUsbTests
new file mode 100644
index 0000000..30c3646
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsUsbTests
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsUsbTests'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsUsbTests of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsUsbTests',
+        test_name='cheets_CTS_P.internal.x86.CtsUsbTests',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsUsbTests', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsUsbTests',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsUtilTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsUtilTestCases
new file mode 100644
index 0000000..2432afb
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsUtilTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsUtilTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsUtilTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsUtilTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsUtilTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsUtilTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsUtilTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsVideoTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsVideoTestCases
new file mode 100644
index 0000000..4e3d758
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsVideoTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsVideoTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsVideoTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsVideoTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsVideoTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsVideoTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsVideoTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsViewTestCases.32 b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsViewTestCases.32
new file mode 100644
index 0000000..378d3d1
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsViewTestCases.32
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsViewTestCases.32'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsViewTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsViewTestCases.32',
+        test_name='cheets_CTS_P.internal.x86.CtsViewTestCases.32',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsViewTestCases', '--logcat-on-failure', '--abi', 'x86', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsViewTestCases',
+        target_plan=None,
+        bundle='x86',
+        extra_artifacts=['/storage/emulated/0/SurfaceViewSyncTest/'],
+        uri='DEV',
+        precondition_commands=['sleep 60'],
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsViewTestCases.64 b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsViewTestCases.64
new file mode 100644
index 0000000..69b0713
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsViewTestCases.64
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsViewTestCases.64'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsViewTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsViewTestCases.64',
+        test_name='cheets_CTS_P.internal.x86.CtsViewTestCases.64',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsViewTestCases', '--logcat-on-failure', '--abi', 'x86_64', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsViewTestCases',
+        target_plan=None,
+        bundle='x86',
+        extra_artifacts=['/storage/emulated/0/SurfaceViewSyncTest/'],
+        uri='DEV',
+        precondition_commands=['sleep 60'],
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsViewTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsViewTestCases.ctshardware
new file mode 100644
index 0000000..40f9bbd
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsViewTestCases.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsViewTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsViewTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsViewTestCases.ctshardware',
+        test_name='cheets_CTS_P.internal.x86.CtsViewTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsViewTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsViewTestCases',
+        target_plan=None,
+        bundle='x86',
+        extra_artifacts=['/storage/emulated/0/SurfaceViewSyncTest/'],
+        uri='LATEST',
+        precondition_commands=['sleep 60'],
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsVmTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsVmTestCases
new file mode 100644
index 0000000..8404189
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsVmTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsVmTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsVmTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsVmTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsVmTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsVmTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsVmTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsVoice b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsVoice
new file mode 100644
index 0000000..e138b84
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsVoice
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsVoice'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsVoiceInteractionTestCases, CtsVoiceSettingsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsVoice',
+        test_name='cheets_CTS_P.internal.x86.CtsVoice',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsVoiceInteractionTestCases', '--include-filter', 'CtsVoiceSettingsTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsVoice',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsVrTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsVrTestCases
new file mode 100644
index 0000000..d5a59e9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsVrTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsVrTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsVrTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsVrTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsVrTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsVrTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsVrTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsWebkitTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsWebkitTestCases
new file mode 100644
index 0000000..7109266
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsWebkitTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsWebkitTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWebkitTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsWebkitTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsWebkitTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsWebkitTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsWebkitTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        prerequisites=['region_us'],
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsWidgetTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsWidgetTestCases
new file mode 100644
index 0000000..7c01ea1
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsWidgetTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsWidgetTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWidgetTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsWidgetTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsWidgetTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsWidgetTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsWidgetTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsWindowManagerDeviceTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsWindowManagerDeviceTestCases
new file mode 100644
index 0000000..1447c79
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsWindowManagerDeviceTestCases
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsWindowManagerDeviceTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManagerDeviceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsWindowManagerDeviceTestCases',
+        test_name='cheets_CTS_P.internal.x86.CtsWindowManagerDeviceTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsWindowManagerDeviceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.CtsWrap b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsWrap
new file mode 100644
index 0000000..21b887f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.CtsWrap
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.CtsWrap'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWrapNoWrapTestCases, CtsWrapWrapDebugMallocDebugTestCases, CtsWrapWrapDebugTestCases, CtsWrapWrapNoDebugTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsWrap',
+        test_name='cheets_CTS_P.internal.x86.CtsWrap',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWrapNoWrapTestCases', '--include-filter', 'CtsWrapWrapDebugMallocDebugTestCases', '--include-filter', 'CtsWrapWrapDebugTestCases', '--include-filter', 'CtsWrapWrapNoDebugTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='CtsWrap',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsAbiOverrideHostTestCases_-_CtsAccelerationTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsAbiOverrideHostTestCases_-_CtsAccelerationTestCases
new file mode 100644
index 0000000..da98f83
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsAbiOverrideHostTestCases_-_CtsAccelerationTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.all.CtsAbiOverrideHostTestCases_-_CtsAccelerationTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAbiOverrideHostTestCases, CtsAccelerationTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsAbiOverrideHostTestCases_-_CtsAccelerationTestCases',
+        test_name='cheets_CTS_P.internal.x86.all.CtsAbiOverrideHostTestCases_-_CtsAccelerationTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAbiOverrideHostTestCases', '--include-filter', 'CtsAccelerationTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsAbiOverrideHostTestCases_-_CtsAccelerationTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases
new file mode 100644
index 0000000..51f121d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAccessibilityServiceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases',
+        test_name='cheets_CTS_P.internal.x86.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsAccessibilityServiceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsAccessibilityTestCases_-_CtsAccountManagerTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsAccessibilityTestCases_-_CtsAccountManagerTestCases
new file mode 100644
index 0000000..55ab724
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsAccessibilityTestCases_-_CtsAccountManagerTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.all.CtsAccessibilityTestCases_-_CtsAccountManagerTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAccessibilityTestCases, CtsAccountManagerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsAccessibilityTestCases_-_CtsAccountManagerTestCases',
+        test_name='cheets_CTS_P.internal.x86.all.CtsAccessibilityTestCases_-_CtsAccountManagerTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAccessibilityTestCases', '--include-filter', 'CtsAccountManagerTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsAccessibilityTestCases_-_CtsAccountManagerTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsActivityManagerDeviceSdk25TestCases_-_CtsActivityManagerDeviceTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsActivityManagerDeviceSdk25TestCases_-_CtsActivityManagerDeviceTestCases
new file mode 100644
index 0000000..bd19e82
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsActivityManagerDeviceSdk25TestCases_-_CtsActivityManagerDeviceTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.all.CtsActivityManagerDeviceSdk25TestCases_-_CtsActivityManagerDeviceTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsActivityManagerDeviceSdk25TestCases, CtsActivityManagerDeviceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsActivityManagerDeviceSdk25TestCases_-_CtsActivityManagerDeviceTestCases',
+        test_name='cheets_CTS_P.internal.x86.all.CtsActivityManagerDeviceSdk25TestCases_-_CtsActivityManagerDeviceTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsActivityManagerDeviceSdk25TestCases', '--include-filter', 'CtsActivityManagerDeviceTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsActivityManagerDeviceSdk25TestCases_-_CtsActivityManagerDeviceTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsAdminPackageInstallerTestCases_-_CtsCameraApi25TestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsAdminPackageInstallerTestCases_-_CtsCameraApi25TestCases
new file mode 100644
index 0000000..40b0efa
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsAdminPackageInstallerTestCases_-_CtsCameraApi25TestCases
@@ -0,0 +1,51 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+from autotest_lib.server import utils as server_utils
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.all.CtsAdminPackageInstallerTestCases_-_CtsCameraApi25TestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAdminPackageInstallerTestCases, CtsAdminTestCases, CtsAlarmClockTestCases, CtsAlarmManagerTestCases, CtsAndroidAppTestCases, CtsAndroidTestBase27ApiSignatureTestCases, CtsAndroidTestMockCurrentApiSignatureTestCases, CtsAndroidTestRunnerCurrentApiSignatureTestCases, CtsAnimationTestCases, CtsApacheHttpLegacy27ApiSignatureTestCases, CtsApacheHttpLegacyCurrentApiSignatureTestCases, CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases, CtsAppComponentFactoryTestCases, CtsAppSecurityHostTestCases, CtsAppTestCases, CtsAppUsageHostTestCases, CtsAppWidgetTestCases, CtsAslrMallocTestCases, CtsAssistTestCases, CtsAtraceHostTestCases, CtsAutoFillServiceTestCases, CtsBackgroundRestrictionsTestCases, CtsBackupHostTestCases, CtsBackupTestCases, CtsBatterySavingTestCases, CtsBionicTestCases, CtsBluetoothTestCases, CtsBootStatsTestCases, CtsCalendarcommon2TestCases, CtsCameraApi25TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+# For local debugging, if your test setup doesn't have servo, REMOVE these
+# two lines.
+args_dict = server_utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run_TS(machine):
+    # REMOVE 'servo_args=servo_args' arg for local debugging if your test
+    # setup doesn't have servo.
+    try:
+        host_list = [hosts.create_host(machine, servo_args=servo_args)]
+    except:
+        # Just ignore any servo setup flakiness.
+        host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        enable_default_apps=True,
+        tag='internal.x86.all.CtsAdminPackageInstallerTestCases_-_CtsCameraApi25TestCases',
+        test_name='cheets_CTS_P.internal.x86.all.CtsAdminPackageInstallerTestCases_-_CtsCameraApi25TestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAdminPackageInstallerTestCases', '--include-filter', 'CtsAdminTestCases', '--include-filter', 'CtsAlarmClockTestCases', '--include-filter', 'CtsAlarmManagerTestCases', '--include-filter', 'CtsAndroidAppTestCases', '--include-filter', 'CtsAndroidTestBase27ApiSignatureTestCases', '--include-filter', 'CtsAndroidTestMockCurrentApiSignatureTestCases', '--include-filter', 'CtsAndroidTestRunnerCurrentApiSignatureTestCases', '--include-filter', 'CtsAnimationTestCases', '--include-filter', 'CtsApacheHttpLegacy27ApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacyCurrentApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases', '--include-filter', 'CtsAppComponentFactoryTestCases', '--include-filter', 'CtsAppSecurityHostTestCases', '--include-filter', 'CtsAppTestCases', '--include-filter', 'CtsAppUsageHostTestCases', '--include-filter', 'CtsAppWidgetTestCases', '--include-filter', 'CtsAslrMallocTestCases', '--include-filter', 'CtsAssistTestCases', '--include-filter', 'CtsAtraceHostTestCases', '--include-filter', 'CtsAutoFillServiceTestCases', '--include-filter', 'CtsBackgroundRestrictionsTestCases', '--include-filter', 'CtsBackupHostTestCases', '--include-filter', 'CtsBackupTestCases', '--include-filter', 'CtsBatterySavingTestCases', '--include-filter', 'CtsBionicTestCases', '--include-filter', 'CtsBluetoothTestCases', '--include-filter', 'CtsBootStatsTestCases', '--include-filter', 'CtsCalendarcommon2TestCases', '--include-filter', 'CtsCameraApi25TestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsAdminPackageInstallerTestCases_-_CtsCameraApi25TestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        prerequisites=['bluetooth'],
+        hard_reboot_on_failure=True,
+        timeout=88200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsCameraTestCases_-_CtsCameraTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsCameraTestCases_-_CtsCameraTestCases
new file mode 100644
index 0000000..0643293
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsCameraTestCases_-_CtsCameraTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.all.CtsCameraTestCases_-_CtsCameraTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsCameraTestCases_-_CtsCameraTestCases',
+        test_name='cheets_CTS_P.internal.x86.all.CtsCameraTestCases_-_CtsCameraTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCameraTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsCameraTestCases_-_CtsCameraTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsCarTestCases_-_CtsDebugTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsCarTestCases_-_CtsDebugTestCases
new file mode 100644
index 0000000..d1559f6
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsCarTestCases_-_CtsDebugTestCases
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.all.CtsCarTestCases_-_CtsDebugTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCarTestCases, CtsCarrierApiTestCases, CtsColorModeTestCases, CtsCompilationTestCases, CtsContactsProviderWipe, CtsContentTestCases, CtsCppToolsTestCases, CtsCurrentApiSignatureTestCases, CtsDatabaseTestCases, CtsDebugTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        enable_default_apps=True,
+        tag='internal.x86.all.CtsCarTestCases_-_CtsDebugTestCases',
+        test_name='cheets_CTS_P.internal.x86.all.CtsCarTestCases_-_CtsDebugTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCarTestCases', '--include-filter', 'CtsCarrierApiTestCases', '--include-filter', 'CtsColorModeTestCases', '--include-filter', 'CtsCompilationTestCases', '--include-filter', 'CtsContactsProviderWipe', '--include-filter', 'CtsContentTestCases', '--include-filter', 'CtsCppToolsTestCases', '--include-filter', 'CtsCurrentApiSignatureTestCases', '--include-filter', 'CtsDatabaseTestCases', '--include-filter', 'CtsDebugTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsCarTestCases_-_CtsDebugTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        prerequisites=['region_us'],
+        timeout=19800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases.32 b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases.32
new file mode 100644
index 0000000..47cfdeb
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases.32
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases.32'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=15,
+        tag='internal.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases.32',
+        test_name='cheets_CTS_P.internal.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases.32',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDeqpTestCases', '--abi', 'x86', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsDeqpTestCases_-_CtsDeqpTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=72000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases.64 b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases.64
new file mode 100644
index 0000000..1b410c6
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases.64
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases.64'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=15,
+        tag='internal.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases.64',
+        test_name='cheets_CTS_P.internal.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases.64',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDeqpTestCases', '--abi', 'x86_64', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsDeqpTestCases_-_CtsDeqpTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=72000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsDeviceIdleHostTestCases_-_CtsExternalSourcesTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsDeviceIdleHostTestCases_-_CtsExternalSourcesTestCases
new file mode 100644
index 0000000..77bd784
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsDeviceIdleHostTestCases_-_CtsExternalSourcesTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.all.CtsDeviceIdleHostTestCases_-_CtsExternalSourcesTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeviceIdleHostTestCases, CtsDevicePolicyManagerTestCases, CtsDexMetadataHostTestCases, CtsDisplayTestCases, CtsDpiTestCases, CtsDpiTestCases2, CtsDreamsTestCases, CtsDrmTestCases, CtsDumpsysHostTestCases, CtsDynamicLinkerTestCases, CtsEdiHostTestCases, CtsEffectTestCases, CtsExternalServiceTestCases, CtsExternalSourcesTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsDeviceIdleHostTestCases_-_CtsExternalSourcesTestCases',
+        test_name='cheets_CTS_P.internal.x86.all.CtsDeviceIdleHostTestCases_-_CtsExternalSourcesTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeviceIdleHostTestCases', '--include-filter', 'CtsDevicePolicyManagerTestCases', '--include-filter', 'CtsDexMetadataHostTestCases', '--include-filter', 'CtsDisplayTestCases', '--include-filter', 'CtsDpiTestCases', '--include-filter', 'CtsDpiTestCases2', '--include-filter', 'CtsDreamsTestCases', '--include-filter', 'CtsDrmTestCases', '--include-filter', 'CtsDumpsysHostTestCases', '--include-filter', 'CtsDynamicLinkerTestCases', '--include-filter', 'CtsEdiHostTestCases', '--include-filter', 'CtsEffectTestCases', '--include-filter', 'CtsExternalServiceTestCases', '--include-filter', 'CtsExternalSourcesTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsDeviceIdleHostTestCases_-_CtsExternalSourcesTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=27000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases
new file mode 100644
index 0000000..1c7eaa5
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsFileSystemTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases',
+        test_name='cheets_CTS_P.internal.x86.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsFileSystemTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsFileSystemTestCases_-_CtsFileSystemTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases
new file mode 100644
index 0000000..9f02bc4
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsFragmentTestCases, CtsFragmentTestCasesSdk26, CtsGestureTestCases, CtsGpuToolsHostTestCases, CtsGraphicsTestCases, CtsHardwareTestCases, CtsHarmfulAppWarningHostTestCases, CtsHiddenApiBlacklistApi27TestCases, CtsHiddenApiBlacklistCurrentApiTestCases, CtsHiddenApiBlacklistDebugClassTestCases, CtsHiddenApiKillswitchDebugClassTestCases, CtsHiddenApiKillswitchWhitelistTestCases, CtsHiddenApiKillswitchWildcardTestCases, CtsHostTzDataTests, CtsHostsideNetworkTests, CtsHostsideNumberBlockingTestCases, CtsHostsideTvTests, CtsHostsideWebViewTests, CtsIcuTestCases, CtsIncidentHostTestCases, CtsInlineMockingTestCases, CtsInputMethodServiceHostTestCases, CtsInputMethodTestCases, CtsIntentSignatureTestCases, CtsJankDeviceTestCases, CtsJdwpSecurityHostTestCases, CtsJdwpTestCases, CtsJniTestCases, CtsJobSchedulerSharedUidTestCases, CtsJobSchedulerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases',
+        test_name='cheets_CTS_P.internal.x86.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsFragmentTestCases', '--include-filter', 'CtsFragmentTestCasesSdk26', '--include-filter', 'CtsGestureTestCases', '--include-filter', 'CtsGpuToolsHostTestCases', '--include-filter', 'CtsGraphicsTestCases', '--include-filter', 'CtsHardwareTestCases', '--include-filter', 'CtsHarmfulAppWarningHostTestCases', '--include-filter', 'CtsHiddenApiBlacklistApi27TestCases', '--include-filter', 'CtsHiddenApiBlacklistCurrentApiTestCases', '--include-filter', 'CtsHiddenApiBlacklistDebugClassTestCases', '--include-filter', 'CtsHiddenApiKillswitchDebugClassTestCases', '--include-filter', 'CtsHiddenApiKillswitchWhitelistTestCases', '--include-filter', 'CtsHiddenApiKillswitchWildcardTestCases', '--include-filter', 'CtsHostTzDataTests', '--include-filter', 'CtsHostsideNetworkTests', '--include-filter', 'CtsHostsideNumberBlockingTestCases', '--include-filter', 'CtsHostsideTvTests', '--include-filter', 'CtsHostsideWebViewTests', '--include-filter', 'CtsIcuTestCases', '--include-filter', 'CtsIncidentHostTestCases', '--include-filter', 'CtsInlineMockingTestCases', '--include-filter', 'CtsInputMethodServiceHostTestCases', '--include-filter', 'CtsInputMethodTestCases', '--include-filter', 'CtsIntentSignatureTestCases', '--include-filter', 'CtsJankDeviceTestCases', '--include-filter', 'CtsJdwpSecurityHostTestCases', '--include-filter', 'CtsJdwpTestCases', '--include-filter', 'CtsJniTestCases', '--include-filter', 'CtsJobSchedulerSharedUidTestCases', '--include-filter', 'CtsJobSchedulerTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        timeout=61200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsJvmtiAttachingHostTestCases_-_CtsLogdTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsJvmtiAttachingHostTestCases_-_CtsLogdTestCases
new file mode 100644
index 0000000..608b677
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsJvmtiAttachingHostTestCases_-_CtsLogdTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.all.CtsJvmtiAttachingHostTestCases_-_CtsLogdTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsJvmtiAttachingHostTestCases, CtsJvmtiAttachingTestCases, CtsJvmtiRedefineClassesHostTestCases, CtsJvmtiRunTest1900HostTestCases, CtsJvmtiRunTest1901HostTestCases, CtsJvmtiRunTest1902HostTestCases, CtsJvmtiRunTest1903HostTestCases, CtsJvmtiRunTest1904HostTestCases, CtsJvmtiRunTest1906HostTestCases, CtsJvmtiRunTest1907HostTestCases, CtsJvmtiRunTest1908HostTestCases, CtsJvmtiRunTest1909HostTestCases, CtsJvmtiRunTest1910HostTestCases, CtsJvmtiRunTest1911HostTestCases, CtsJvmtiRunTest1912HostTestCases, CtsJvmtiRunTest1913HostTestCases, CtsJvmtiRunTest1914HostTestCases, CtsJvmtiRunTest1915HostTestCases, CtsJvmtiRunTest1916HostTestCases, CtsJvmtiRunTest1917HostTestCases, CtsJvmtiRunTest1920HostTestCases, CtsJvmtiRunTest1921HostTestCases, CtsJvmtiRunTest1922HostTestCases, CtsJvmtiRunTest1923HostTestCases, CtsJvmtiRunTest1924HostTestCases, CtsJvmtiRunTest1925HostTestCases, CtsJvmtiRunTest1926HostTestCases, CtsJvmtiRunTest1927HostTestCases, CtsJvmtiRunTest1928HostTestCases, CtsJvmtiRunTest1930HostTestCases, CtsJvmtiRunTest1931HostTestCases, CtsJvmtiRunTest1932HostTestCases, CtsJvmtiRunTest1933HostTestCases, CtsJvmtiRunTest1934HostTestCases, CtsJvmtiRunTest1936HostTestCases, CtsJvmtiRunTest1937HostTestCases, CtsJvmtiRunTest1939HostTestCases, CtsJvmtiRunTest1941HostTestCases, CtsJvmtiRunTest1942HostTestCases, CtsJvmtiRunTest1943HostTestCases, CtsJvmtiRunTest902HostTestCases, CtsJvmtiRunTest903HostTestCases, CtsJvmtiRunTest904HostTestCases, CtsJvmtiRunTest905HostTestCases, CtsJvmtiRunTest906HostTestCases, CtsJvmtiRunTest907HostTestCases, CtsJvmtiRunTest908HostTestCases, CtsJvmtiRunTest910HostTestCases, CtsJvmtiRunTest911HostTestCases, CtsJvmtiRunTest912HostTestCases, CtsJvmtiRunTest913HostTestCases, CtsJvmtiRunTest914HostTestCases, CtsJvmtiRunTest915HostTestCases, CtsJvmtiRunTest917HostTestCases, CtsJvmtiRunTest918HostTestCases, CtsJvmtiRunTest919HostTestCases, CtsJvmtiRunTest920HostTestCases, CtsJvmtiRunTest922HostTestCases, CtsJvmtiRunTest923HostTestCases, CtsJvmtiRunTest924HostTestCases, CtsJvmtiRunTest926HostTestCases, CtsJvmtiRunTest927HostTestCases, CtsJvmtiRunTest928HostTestCases, CtsJvmtiRunTest930HostTestCases, CtsJvmtiRunTest931HostTestCases, CtsJvmtiRunTest932HostTestCases, CtsJvmtiRunTest940HostTestCases, CtsJvmtiRunTest942HostTestCases, CtsJvmtiRunTest944HostTestCases, CtsJvmtiRunTest945HostTestCases, CtsJvmtiRunTest947HostTestCases, CtsJvmtiRunTest951HostTestCases, CtsJvmtiRunTest982HostTestCases, CtsJvmtiRunTest983HostTestCases, CtsJvmtiRunTest984HostTestCases, CtsJvmtiRunTest985HostTestCases, CtsJvmtiRunTest986HostTestCases, CtsJvmtiRunTest988HostTestCases, CtsJvmtiRunTest989HostTestCases, CtsJvmtiRunTest990HostTestCases, CtsJvmtiRunTest991HostTestCases, CtsJvmtiRunTest992HostTestCases, CtsJvmtiRunTest993HostTestCases, CtsJvmtiRunTest994HostTestCases, CtsJvmtiRunTest995HostTestCases, CtsJvmtiRunTest996HostTestCases, CtsJvmtiRunTest997HostTestCases, CtsJvmtiTaggingHostTestCases, CtsJvmtiTrackingHostTestCases, CtsKernelConfigTestCases, CtsKeystoreTestCases, CtsLeanbackJankTestCases, CtsLegacyNotificationTestCases, CtsLibcoreFileIOTestCases, CtsLibcoreJsr166TestCases, CtsLibcoreLegacy22TestCases, CtsLibcoreOjTestCases, CtsLibcoreOkHttpTestCases, CtsLibcoreTestCases, CtsLibcoreWycheproofBCTestCases, CtsLibcoreWycheproofConscryptTestCases, CtsLiblogTestCases, CtsLocation2TestCases, CtsLocationTestCases, CtsLogdTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsJvmtiAttachingHostTestCases_-_CtsLogdTestCases',
+        test_name='cheets_CTS_P.internal.x86.all.CtsJvmtiAttachingHostTestCases_-_CtsLogdTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJvmtiAttachingHostTestCases', '--include-filter', 'CtsJvmtiAttachingTestCases', '--include-filter', 'CtsJvmtiRedefineClassesHostTestCases', '--include-filter', 'CtsJvmtiRunTest1900HostTestCases', '--include-filter', 'CtsJvmtiRunTest1901HostTestCases', '--include-filter', 'CtsJvmtiRunTest1902HostTestCases', '--include-filter', 'CtsJvmtiRunTest1903HostTestCases', '--include-filter', 'CtsJvmtiRunTest1904HostTestCases', '--include-filter', 'CtsJvmtiRunTest1906HostTestCases', '--include-filter', 'CtsJvmtiRunTest1907HostTestCases', '--include-filter', 'CtsJvmtiRunTest1908HostTestCases', '--include-filter', 'CtsJvmtiRunTest1909HostTestCases', '--include-filter', 'CtsJvmtiRunTest1910HostTestCases', '--include-filter', 'CtsJvmtiRunTest1911HostTestCases', '--include-filter', 'CtsJvmtiRunTest1912HostTestCases', '--include-filter', 'CtsJvmtiRunTest1913HostTestCases', '--include-filter', 'CtsJvmtiRunTest1914HostTestCases', '--include-filter', 'CtsJvmtiRunTest1915HostTestCases', '--include-filter', 'CtsJvmtiRunTest1916HostTestCases', '--include-filter', 'CtsJvmtiRunTest1917HostTestCases', '--include-filter', 'CtsJvmtiRunTest1920HostTestCases', '--include-filter', 'CtsJvmtiRunTest1921HostTestCases', '--include-filter', 'CtsJvmtiRunTest1922HostTestCases', '--include-filter', 'CtsJvmtiRunTest1923HostTestCases', '--include-filter', 'CtsJvmtiRunTest1924HostTestCases', '--include-filter', 'CtsJvmtiRunTest1925HostTestCases', '--include-filter', 'CtsJvmtiRunTest1926HostTestCases', '--include-filter', 'CtsJvmtiRunTest1927HostTestCases', '--include-filter', 'CtsJvmtiRunTest1928HostTestCases', '--include-filter', 'CtsJvmtiRunTest1930HostTestCases', '--include-filter', 'CtsJvmtiRunTest1931HostTestCases', '--include-filter', 'CtsJvmtiRunTest1932HostTestCases', '--include-filter', 'CtsJvmtiRunTest1933HostTestCases', '--include-filter', 'CtsJvmtiRunTest1934HostTestCases', '--include-filter', 'CtsJvmtiRunTest1936HostTestCases', '--include-filter', 'CtsJvmtiRunTest1937HostTestCases', '--include-filter', 'CtsJvmtiRunTest1939HostTestCases', '--include-filter', 'CtsJvmtiRunTest1941HostTestCases', '--include-filter', 'CtsJvmtiRunTest1942HostTestCases', '--include-filter', 'CtsJvmtiRunTest1943HostTestCases', '--include-filter', 'CtsJvmtiRunTest902HostTestCases', '--include-filter', 'CtsJvmtiRunTest903HostTestCases', '--include-filter', 'CtsJvmtiRunTest904HostTestCases', '--include-filter', 'CtsJvmtiRunTest905HostTestCases', '--include-filter', 'CtsJvmtiRunTest906HostTestCases', '--include-filter', 'CtsJvmtiRunTest907HostTestCases', '--include-filter', 'CtsJvmtiRunTest908HostTestCases', '--include-filter', 'CtsJvmtiRunTest910HostTestCases', '--include-filter', 'CtsJvmtiRunTest911HostTestCases', '--include-filter', 'CtsJvmtiRunTest912HostTestCases', '--include-filter', 'CtsJvmtiRunTest913HostTestCases', '--include-filter', 'CtsJvmtiRunTest914HostTestCases', '--include-filter', 'CtsJvmtiRunTest915HostTestCases', '--include-filter', 'CtsJvmtiRunTest917HostTestCases', '--include-filter', 'CtsJvmtiRunTest918HostTestCases', '--include-filter', 'CtsJvmtiRunTest919HostTestCases', '--include-filter', 'CtsJvmtiRunTest920HostTestCases', '--include-filter', 'CtsJvmtiRunTest922HostTestCases', '--include-filter', 'CtsJvmtiRunTest923HostTestCases', '--include-filter', 'CtsJvmtiRunTest924HostTestCases', '--include-filter', 'CtsJvmtiRunTest926HostTestCases', '--include-filter', 'CtsJvmtiRunTest927HostTestCases', '--include-filter', 'CtsJvmtiRunTest928HostTestCases', '--include-filter', 'CtsJvmtiRunTest930HostTestCases', '--include-filter', 'CtsJvmtiRunTest931HostTestCases', '--include-filter', 'CtsJvmtiRunTest932HostTestCases', '--include-filter', 'CtsJvmtiRunTest940HostTestCases', '--include-filter', 'CtsJvmtiRunTest942HostTestCases', '--include-filter', 'CtsJvmtiRunTest944HostTestCases', '--include-filter', 'CtsJvmtiRunTest945HostTestCases', '--include-filter', 'CtsJvmtiRunTest947HostTestCases', '--include-filter', 'CtsJvmtiRunTest951HostTestCases', '--include-filter', 'CtsJvmtiRunTest982HostTestCases', '--include-filter', 'CtsJvmtiRunTest983HostTestCases', '--include-filter', 'CtsJvmtiRunTest984HostTestCases', '--include-filter', 'CtsJvmtiRunTest985HostTestCases', '--include-filter', 'CtsJvmtiRunTest986HostTestCases', '--include-filter', 'CtsJvmtiRunTest988HostTestCases', '--include-filter', 'CtsJvmtiRunTest989HostTestCases', '--include-filter', 'CtsJvmtiRunTest990HostTestCases', '--include-filter', 'CtsJvmtiRunTest991HostTestCases', '--include-filter', 'CtsJvmtiRunTest992HostTestCases', '--include-filter', 'CtsJvmtiRunTest993HostTestCases', '--include-filter', 'CtsJvmtiRunTest994HostTestCases', '--include-filter', 'CtsJvmtiRunTest995HostTestCases', '--include-filter', 'CtsJvmtiRunTest996HostTestCases', '--include-filter', 'CtsJvmtiRunTest997HostTestCases', '--include-filter', 'CtsJvmtiTaggingHostTestCases', '--include-filter', 'CtsJvmtiTrackingHostTestCases', '--include-filter', 'CtsKernelConfigTestCases', '--include-filter', 'CtsKeystoreTestCases', '--include-filter', 'CtsLeanbackJankTestCases', '--include-filter', 'CtsLegacyNotificationTestCases', '--include-filter', 'CtsLibcoreFileIOTestCases', '--include-filter', 'CtsLibcoreJsr166TestCases', '--include-filter', 'CtsLibcoreLegacy22TestCases', '--include-filter', 'CtsLibcoreOjTestCases', '--include-filter', 'CtsLibcoreOkHttpTestCases', '--include-filter', 'CtsLibcoreTestCases', '--include-filter', 'CtsLibcoreWycheproofBCTestCases', '--include-filter', 'CtsLibcoreWycheproofConscryptTestCases', '--include-filter', 'CtsLiblogTestCases', '--include-filter', 'CtsLocation2TestCases', '--include-filter', 'CtsLocationTestCases', '--include-filter', 'CtsLogdTestCases', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsJvmtiAttachingHostTestCases_-_CtsLogdTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=62700)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsMediaBitstreamsTestCases_-_CtsMediaBitstreamsTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsMediaBitstreamsTestCases_-_CtsMediaBitstreamsTestCases
new file mode 100644
index 0000000..4121325
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsMediaBitstreamsTestCases_-_CtsMediaBitstreamsTestCases
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.all.CtsMediaBitstreamsTestCases_-_CtsMediaBitstreamsTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaBitstreamsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        needs_push_media=True,
+        tag='internal.x86.all.CtsMediaBitstreamsTestCases_-_CtsMediaBitstreamsTestCases',
+        test_name='cheets_CTS_P.internal.x86.all.CtsMediaBitstreamsTestCases_-_CtsMediaBitstreamsTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaBitstreamsTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsMediaBitstreamsTestCases_-_CtsMediaBitstreamsTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsMediaHostTestCases_-_CtsMediaHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsMediaHostTestCases_-_CtsMediaHostTestCases
new file mode 100644
index 0000000..a5e2042
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsMediaHostTestCases_-_CtsMediaHostTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.all.CtsMediaHostTestCases_-_CtsMediaHostTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsMediaHostTestCases_-_CtsMediaHostTestCases',
+        test_name='cheets_CTS_P.internal.x86.all.CtsMediaHostTestCases_-_CtsMediaHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsMediaHostTestCases_-_CtsMediaHostTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases
new file mode 100644
index 0000000..563d567
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaStressTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        needs_push_media=True,
+        tag='internal.x86.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases',
+        test_name='cheets_CTS_P.internal.x86.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaStressTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsMediaStressTestCases_-_CtsMediaStressTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=18000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsMediaTestCases_-_CtsMediaTestCases.32 b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsMediaTestCases_-_CtsMediaTestCases.32
new file mode 100644
index 0000000..85d31b9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsMediaTestCases_-_CtsMediaTestCases.32
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.all.CtsMediaTestCases_-_CtsMediaTestCases.32'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        needs_push_media=True,
+        tag='internal.x86.all.CtsMediaTestCases_-_CtsMediaTestCases.32',
+        test_name='cheets_CTS_P.internal.x86.all.CtsMediaTestCases_-_CtsMediaTestCases.32',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaTestCases', '--logcat-on-failure', '--abi', 'x86'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsMediaTestCases_-_CtsMediaTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        timeout=36000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsMediaTestCases_-_CtsMediaTestCases.64 b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsMediaTestCases_-_CtsMediaTestCases.64
new file mode 100644
index 0000000..59ea1b6
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsMediaTestCases_-_CtsMediaTestCases.64
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.all.CtsMediaTestCases_-_CtsMediaTestCases.64'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        needs_push_media=True,
+        tag='internal.x86.all.CtsMediaTestCases_-_CtsMediaTestCases.64',
+        test_name='cheets_CTS_P.internal.x86.all.CtsMediaTestCases_-_CtsMediaTestCases.64',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaTestCases', '--logcat-on-failure', '--abi', 'x86_64'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsMediaTestCases_-_CtsMediaTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        timeout=36000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsMidiTestCases_-_CtsSecurityBulletinHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsMidiTestCases_-_CtsSecurityBulletinHostTestCases
new file mode 100644
index 0000000..3a05f5b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsMidiTestCases_-_CtsSecurityBulletinHostTestCases
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.all.CtsMidiTestCases_-_CtsSecurityBulletinHostTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMidiTestCases, CtsMockingDebuggableTestCases, CtsMockingTestCases, CtsMonkeyTestCases, CtsMultiUserHostTestCases, CtsMultiUserTestCases, CtsNNAPITestCases, CtsNativeHardwareTestCases, CtsNativeMediaAAudioTestCases, CtsNativeMediaSlTestCases, CtsNativeMediaXaTestCases, CtsNativeNetTestCases, CtsNdefTestCases, CtsNetSecConfigAttributeTestCases, CtsNetSecConfigBasicDebugDisabledTestCases, CtsNetSecConfigBasicDebugEnabledTestCases, CtsNetSecConfigBasicDomainConfigTestCases, CtsNetSecConfigCleartextTrafficTestCases, CtsNetSecConfigDownloadManagerTestCases, CtsNetSecConfigInvalidPinTestCases, CtsNetSecConfigNestedDomainConfigTestCases, CtsNetSecConfigPrePCleartextTrafficTestCases, CtsNetSecConfigResourcesSrcTestCases, CtsNetSecPolicyUsesCleartextTrafficFalseTestCases, CtsNetSecPolicyUsesCleartextTrafficTrueTestCases, CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases, CtsNetTestCases, CtsNetTestCasesLegacyApi22, CtsNetTestCasesLegacyPermission22, CtsOmapiTestCases, CtsOpenGLTestCases, CtsOpenGlPerf2TestCases, CtsOpenGlPerfTestCases, CtsOsHostTestCases, CtsOsTestCases, CtsPdfTestCases, CtsPerfettoTestCases, CtsPermission2TestCases, CtsPermissionTestCases, CtsPreference2TestCases, CtsPreferenceTestCases, CtsPrintTestCases, CtsProtoTestCases, CtsProviderTestCases, CtsRenderscriptLegacyTestCases, CtsRenderscriptTestCases, CtsRsBlasTestCases, CtsRsCppTestCases, CtsSampleDeviceTestCases, CtsSampleHostTestCases, CtsSaxTestCases, CtsSeccompHostTestCases, CtsSecureElementAccessControlTestCases1, CtsSecureElementAccessControlTestCases2, CtsSecureElementAccessControlTestCases3, CtsSecurityBulletinHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsMidiTestCases_-_CtsSecurityBulletinHostTestCases',
+        test_name='cheets_CTS_P.internal.x86.all.CtsMidiTestCases_-_CtsSecurityBulletinHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMidiTestCases', '--include-filter', 'CtsMockingDebuggableTestCases', '--include-filter', 'CtsMockingTestCases', '--include-filter', 'CtsMonkeyTestCases', '--include-filter', 'CtsMultiUserHostTestCases', '--include-filter', 'CtsMultiUserTestCases', '--include-filter', 'CtsNNAPITestCases', '--include-filter', 'CtsNativeHardwareTestCases', '--include-filter', 'CtsNativeMediaAAudioTestCases', '--include-filter', 'CtsNativeMediaSlTestCases', '--include-filter', 'CtsNativeMediaXaTestCases', '--include-filter', 'CtsNativeNetTestCases', '--include-filter', 'CtsNdefTestCases', '--include-filter', 'CtsNetSecConfigAttributeTestCases', '--include-filter', 'CtsNetSecConfigBasicDebugDisabledTestCases', '--include-filter', 'CtsNetSecConfigBasicDebugEnabledTestCases', '--include-filter', 'CtsNetSecConfigBasicDomainConfigTestCases', '--include-filter', 'CtsNetSecConfigCleartextTrafficTestCases', '--include-filter', 'CtsNetSecConfigDownloadManagerTestCases', '--include-filter', 'CtsNetSecConfigInvalidPinTestCases', '--include-filter', 'CtsNetSecConfigNestedDomainConfigTestCases', '--include-filter', 'CtsNetSecConfigPrePCleartextTrafficTestCases', '--include-filter', 'CtsNetSecConfigResourcesSrcTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficFalseTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficTrueTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases', '--include-filter', 'CtsNetTestCases', '--include-filter', 'CtsNetTestCasesLegacyApi22', '--include-filter', 'CtsNetTestCasesLegacyPermission22', '--include-filter', 'CtsOmapiTestCases', '--include-filter', 'CtsOpenGLTestCases', '--include-filter', 'CtsOpenGlPerf2TestCases', '--include-filter', 'CtsOpenGlPerfTestCases', '--include-filter', 'CtsOsHostTestCases', '--include-filter', 'CtsOsTestCases', '--include-filter', 'CtsPdfTestCases', '--include-filter', 'CtsPerfettoTestCases', '--include-filter', 'CtsPermission2TestCases', '--include-filter', 'CtsPermissionTestCases', '--include-filter', 'CtsPreference2TestCases', '--include-filter', 'CtsPreferenceTestCases', '--include-filter', 'CtsPrintTestCases', '--include-filter', 'CtsProtoTestCases', '--include-filter', 'CtsProviderTestCases', '--include-filter', 'CtsRenderscriptLegacyTestCases', '--include-filter', 'CtsRenderscriptTestCases', '--include-filter', 'CtsRsBlasTestCases', '--include-filter', 'CtsRsCppTestCases', '--include-filter', 'CtsSampleDeviceTestCases', '--include-filter', 'CtsSampleHostTestCases', '--include-filter', 'CtsSaxTestCases', '--include-filter', 'CtsSeccompHostTestCases', '--include-filter', 'CtsSecureElementAccessControlTestCases1', '--include-filter', 'CtsSecureElementAccessControlTestCases2', '--include-filter', 'CtsSecureElementAccessControlTestCases3', '--include-filter', 'CtsSecurityBulletinHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsMidiTestCases_-_CtsSecurityBulletinHostTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        precondition_commands=['android-sh -c \'setprop ctl.start mdnsd\''],
+        timeout=106200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases
new file mode 100644
index 0000000..bab6568
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSecurityHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases',
+        test_name='cheets_CTS_P.internal.x86.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSecurityHostTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        precondition_commands=['echo 3 > /proc/sys/kernel/perf_event_paranoid', 'modprobe configs'],
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases
new file mode 100644
index 0000000..1e60b62
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSecurityTestCases, CtsSelinuxTargetSdk25TestCases, CtsSelinuxTargetSdk27TestCases, CtsSelinuxTargetSdkCurrentTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases',
+        test_name='cheets_CTS_P.internal.x86.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSecurityTestCases', '--include-filter', 'CtsSelinuxTargetSdk25TestCases', '--include-filter', 'CtsSelinuxTargetSdk27TestCases', '--include-filter', 'CtsSelinuxTargetSdkCurrentTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=14400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsSensorTestCases_-_CtsSensorTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsSensorTestCases_-_CtsSensorTestCases
new file mode 100644
index 0000000..fb7746c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsSensorTestCases_-_CtsSensorTestCases
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.all.CtsSensorTestCases_-_CtsSensorTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSensorTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=30,
+        tag='internal.x86.all.CtsSensorTestCases_-_CtsSensorTestCases',
+        test_name='cheets_CTS_P.internal.x86.all.CtsSensorTestCases_-_CtsSensorTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSensorTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsSensorTestCases_-_CtsSensorTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsShortcutHostTestCases_-_CtsVideoTestCases b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsShortcutHostTestCases_-_CtsVideoTestCases
new file mode 100644
index 0000000..8ba3b3f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsShortcutHostTestCases_-_CtsVideoTestCases
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.all.CtsShortcutHostTestCases_-_CtsVideoTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsShortcutHostTestCases, CtsShortcutManagerTestCases, CtsSimRestrictedApisTestCases, CtsSimpleCpuTestCases, CtsSimpleperfTestCases, CtsSkQPTestCases, CtsSliceTestCases, CtsSpeechTestCases, CtsStatsdHostTestCases, CtsSustainedPerformanceHostTestCases, CtsSyncAccountAccessOtherCertTestCases, CtsSyncContentHostTestCases, CtsSyncManagerTestsCases, CtsSystemApiAnnotationTestCases, CtsSystemApiSignatureTestCases, CtsSystemIntentTestCases, CtsSystemUiHostTestCases, CtsSystemUiTestCases, CtsTelecomTestCases, CtsTelecomTestCases2, CtsTelecomTestCases3, CtsTelephony2TestCases, CtsTelephonyTestCases, CtsTextTestCases, CtsThemeDeviceTestCases, CtsThemeHostTestCases, CtsToastLegacyTestCases, CtsToastTestCases, CtsTransitionTestCases, CtsTrustedVoiceHostTestCases, CtsTvProviderTestCases, CtsTvTestCases, CtsUiAutomationTestCases, CtsUiDeviceTestCases, CtsUiRenderingTestCases, CtsUidIsolationTestCases, CtsUsageStatsTestCases, CtsUsbTests, CtsUtilTestCases, CtsVideoTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsShortcutHostTestCases_-_CtsVideoTestCases',
+        test_name='cheets_CTS_P.internal.x86.all.CtsShortcutHostTestCases_-_CtsVideoTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsShortcutHostTestCases', '--include-filter', 'CtsShortcutManagerTestCases', '--include-filter', 'CtsSimRestrictedApisTestCases', '--include-filter', 'CtsSimpleCpuTestCases', '--include-filter', 'CtsSimpleperfTestCases', '--include-filter', 'CtsSkQPTestCases', '--include-filter', 'CtsSliceTestCases', '--include-filter', 'CtsSpeechTestCases', '--include-filter', 'CtsStatsdHostTestCases', '--include-filter', 'CtsSustainedPerformanceHostTestCases', '--include-filter', 'CtsSyncAccountAccessOtherCertTestCases', '--include-filter', 'CtsSyncContentHostTestCases', '--include-filter', 'CtsSyncManagerTestsCases', '--include-filter', 'CtsSystemApiAnnotationTestCases', '--include-filter', 'CtsSystemApiSignatureTestCases', '--include-filter', 'CtsSystemIntentTestCases', '--include-filter', 'CtsSystemUiHostTestCases', '--include-filter', 'CtsSystemUiTestCases', '--include-filter', 'CtsTelecomTestCases', '--include-filter', 'CtsTelecomTestCases2', '--include-filter', 'CtsTelecomTestCases3', '--include-filter', 'CtsTelephony2TestCases', '--include-filter', 'CtsTelephonyTestCases', '--include-filter', 'CtsTextTestCases', '--include-filter', 'CtsThemeDeviceTestCases', '--include-filter', 'CtsThemeHostTestCases', '--include-filter', 'CtsToastLegacyTestCases', '--include-filter', 'CtsToastTestCases', '--include-filter', 'CtsTransitionTestCases', '--include-filter', 'CtsTrustedVoiceHostTestCases', '--include-filter', 'CtsTvProviderTestCases', '--include-filter', 'CtsTvTestCases', '--include-filter', 'CtsUiAutomationTestCases', '--include-filter', 'CtsUiDeviceTestCases', '--include-filter', 'CtsUiRenderingTestCases', '--include-filter', 'CtsUidIsolationTestCases', '--include-filter', 'CtsUsageStatsTestCases', '--include-filter', 'CtsUsbTests', '--include-filter', 'CtsUtilTestCases', '--include-filter', 'CtsVideoTestCases', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsShortcutHostTestCases_-_CtsVideoTestCases',
+        target_plan=None,
+        bundle='x86',
+        extra_artifacts_host=['/tmp/diff_*.png'],
+        uri='LATEST',
+        prerequisites=['bluetooth', 'region_us'],
+        timeout=77400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsViewTestCases_-_CtsViewTestCases.32 b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsViewTestCases_-_CtsViewTestCases.32
new file mode 100644
index 0000000..1129432
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsViewTestCases_-_CtsViewTestCases.32
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.all.CtsViewTestCases_-_CtsViewTestCases.32'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsViewTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsViewTestCases_-_CtsViewTestCases.32',
+        test_name='cheets_CTS_P.internal.x86.all.CtsViewTestCases_-_CtsViewTestCases.32',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsViewTestCases', '--logcat-on-failure', '--abi', 'x86', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsViewTestCases_-_CtsViewTestCases',
+        target_plan=None,
+        bundle='x86',
+        extra_artifacts=['/storage/emulated/0/SurfaceViewSyncTest/'],
+        uri='LATEST',
+        precondition_commands=['sleep 60'],
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsViewTestCases_-_CtsViewTestCases.64 b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsViewTestCases_-_CtsViewTestCases.64
new file mode 100644
index 0000000..44ec003
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsViewTestCases_-_CtsViewTestCases.64
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.all.CtsViewTestCases_-_CtsViewTestCases.64'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsViewTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsViewTestCases_-_CtsViewTestCases.64',
+        test_name='cheets_CTS_P.internal.x86.all.CtsViewTestCases_-_CtsViewTestCases.64',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsViewTestCases', '--logcat-on-failure', '--abi', 'x86_64', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsViewTestCases_-_CtsViewTestCases',
+        target_plan=None,
+        bundle='x86',
+        extra_artifacts=['/storage/emulated/0/SurfaceViewSyncTest/'],
+        uri='LATEST',
+        precondition_commands=['sleep 60'],
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsVmTestCases_-_vm-tests-tf b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsVmTestCases_-_vm-tests-tf
new file mode 100644
index 0000000..825f00d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.all.CtsVmTestCases_-_vm-tests-tf
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.all.CtsVmTestCases_-_vm-tests-tf'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsVmTestCases, CtsVoiceInteractionTestCases, CtsVoiceSettingsTestCases, CtsVrTestCases, CtsWebkitTestCases, CtsWidgetTestCases, CtsWindowManagerDeviceTestCases, CtsWrapNoWrapTestCases, CtsWrapWrapDebugMallocDebugTestCases, CtsWrapWrapDebugTestCases, CtsWrapWrapNoDebugTestCases, cts-system-all.api, signed-CtsSecureElementAccessControlTestCases1, signed-CtsSecureElementAccessControlTestCases2, signed-CtsSecureElementAccessControlTestCases3, vm-tests-tf of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsVmTestCases_-_vm-tests-tf',
+        test_name='cheets_CTS_P.internal.x86.all.CtsVmTestCases_-_vm-tests-tf',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsVmTestCases', '--include-filter', 'CtsVoiceInteractionTestCases', '--include-filter', 'CtsVoiceSettingsTestCases', '--include-filter', 'CtsVrTestCases', '--include-filter', 'CtsWebkitTestCases', '--include-filter', 'CtsWidgetTestCases', '--include-filter', 'CtsWindowManagerDeviceTestCases', '--include-filter', 'CtsWrapNoWrapTestCases', '--include-filter', 'CtsWrapWrapDebugMallocDebugTestCases', '--include-filter', 'CtsWrapWrapDebugTestCases', '--include-filter', 'CtsWrapWrapNoDebugTestCases', '--include-filter', 'cts-system-all.api', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases1', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases2', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases3', '--include-filter', 'vm-tests-tf', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='all.CtsVmTestCases_-_vm-tests-tf',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        prerequisites=['region_us'],
+        timeout=34200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.cts-system-all.api b/server/site_tests/cheets_CTS_P/control.internal.x86.cts-system-all.api
new file mode 100644
index 0000000..776667e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.cts-system-all.api
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.cts-system-all.api'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module cts-system-all.api of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.cts-system-all.api',
+        test_name='cheets_CTS_P.internal.x86.cts-system-all.api',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'cts-system-all.api', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='cts-system-all.api',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.signed-CtsSecureElementAccessControl b/server/site_tests/cheets_CTS_P/control.internal.x86.signed-CtsSecureElementAccessControl
new file mode 100644
index 0000000..e337df3
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.signed-CtsSecureElementAccessControl
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.signed-CtsSecureElementAccessControl'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module signed-CtsSecureElementAccessControlTestCases1, signed-CtsSecureElementAccessControlTestCases2, signed-CtsSecureElementAccessControlTestCases3 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.signed-CtsSecureElementAccessControl',
+        test_name='cheets_CTS_P.internal.x86.signed-CtsSecureElementAccessControl',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases1', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases2', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases3', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='signed-CtsSecureElementAccessControl',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.tradefed-run-collect-tests-only-hardware-internal b/server/site_tests/cheets_CTS_P/control.internal.x86.tradefed-run-collect-tests-only-hardware-internal
new file mode 100644
index 0000000..90ea625
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.tradefed-run-collect-tests-only-hardware-internal
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.tradefed-run-collect-tests-only-hardware-internal'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module tradefed-run-collect-tests-only-hardware-internal of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.tradefed-run-collect-tests-only-hardware-internal',
+        test_name='cheets_CTS_P.internal.x86.tradefed-run-collect-tests-only-hardware-internal',
+        run_template=['run', 'commandAndExit', 'collect-tests-only', '--disable-reboot', '--subplan', 'cts-hardware', '--module-arg', 'CtsMediaTestCases:skip-media-download:true', '--module-arg', 'CtsMediaStressTestCases:skip-media-download:true', '--module-arg', 'CtsMediaBitstreamsTestCases:skip-media-download:true', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='tradefed-run-collect-tests-only-hardware-internal',
+        target_plan='cts-hardware',
+        bundle='x86',
+        uri='LATEST',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.vm-tests-tf b/server/site_tests/cheets_CTS_P/control.internal.x86.vm-tests-tf
new file mode 100644
index 0000000..5a94040
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.vm-tests-tf
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.vm-tests-tf'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module vm-tests-tf of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.vm-tests-tf',
+        test_name='cheets_CTS_P.internal.x86.vm-tests-tf',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'vm-tests-tf', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='vm-tests-tf',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.waivers b/server/site_tests/cheets_CTS_P/control.internal.x86.waivers
new file mode 100644
index 0000000..6387c9e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.waivers
@@ -0,0 +1,36 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is not auto-generated. Don't delete it.
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.waivers'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-qual, suite:arc-cts-unibuild, suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run waived tests of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=False,
+        tag='internal.x86.waivers',
+        test_name='cheets_CTS_P.internal.x86.waivers',
+        run_template=['run', 'commandAndExit', 'cts', '--subplan', 'waivers', '--module-arg', 'CtsMediaTestCases:skip-media-download:true'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--module-arg', 'CtsMediaTestCases:skip-media-download:true'],
+        target_module='cts-dev',
+        target_plan='waivers',
+        load_waivers=False,
+        bundle='x86',
+        uri='DEV_WAIVER',
+        timeout=14400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.internal.x86.wm-presubmit b/server/site_tests/cheets_CTS_P/control.internal.x86.wm-presubmit
new file mode 100644
index 0000000..2240e2e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.internal.x86.wm-presubmit
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.internal.x86.wm-presubmit'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module wm-presubmit of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.wm-presubmit',
+        test_name='cheets_CTS_P.internal.x86.wm-presubmit',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsActivityManagerDeviceSdk25TestCases', '--include-filter', 'CtsActivityManagerDeviceTestCases', '--include-filter', 'CtsAppTestCases android.app.cts.TaskDescriptionTest', '--include-filter', 'CtsWindowManagerDeviceTestCases', '--test-arg', 'com.android.compatibility.common.tradefed.testtype.JarHostTest:include-annotation:android.platform.test.annotations.Presubmit', '--test-arg', 'com.android.tradefed.testtype.AndroidJUnitTest:include-annotation:android.platform.test.annotations.Presubmit', '--test-arg', 'com.android.tradefed.testtype.HostTest:include-annotation:android.platform.test.annotations.Presubmit', '--test-arg', 'com.android.tradefed.testtype.AndroidJUnitTest:exclude-annotation:androidx.test.filters.FlakyTest', '--logcat-on-failure', '--dynamic-config-url='],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}', '--dynamic-config-url='],
+        target_module='wm-presubmit',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        timeout=720)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.tradefed-run-test b/server/site_tests/cheets_CTS_P/control.tradefed-run-test
index 3979f9d..66d97a0 100644
--- a/server/site_tests/cheets_CTS_P/control.tradefed-run-test
+++ b/server/site_tests/cheets_CTS_P/control.tradefed-run-test
@@ -31,9 +31,9 @@
 cts_abi = 'arm'
 cts_module = ''
 cts_retry = 5
-cts_revision = '9.0_r14'  # TODO(ihf): Set this default value from generator.
+cts_revision = None
 cts_test = ''
-cts_timeout = 600
+cts_timeout = 3600
 
 # Pull parameters either from run_suite or test_that.
 if 'args_dict' in vars():
@@ -56,14 +56,15 @@
 # Basic checks for option validity.
 logging.error('Running module %s with test %s on abi %s and revision %s',
               cts_module, cts_test, cts_abi, cts_revision)
-if not cts_abi or not cts_module or not cts_revision or not cts_test:
+if not cts_abi or not cts_module or not cts_test:
     usage_error()
 
 # And we are getting ready for tradefed.
 uri = ('gs://chromeos-arc-images/cts/bundle/P/android-cts-' + cts_revision +
-       '-linux_x86-' + cts_abi + '.zip')
+       '-linux_x86-' + cts_abi + '.zip') if cts_revision else 'LATEST'
 run_template = ['run', 'commandAndExit', 'cts',
                 '--include-filter', cts_module + ' ' + cts_test,
+                '--skip-device-info',
                 '--logcat-on-failure']
 retry_template = ['run', 'commandAndExit', 'retry',
                   '--retry', '{session_id}']
@@ -86,6 +87,7 @@
 TEST_TYPE = 'server'
 TIME = 'LONG'
 MAX_RESULT_SIZE_KB = 256000
+PY_VERSION = 3
 DOC = ('Run a test of the Android Compatibility Test Suite (CTS) in the ARC++ '
        'container.')
 
@@ -96,6 +98,7 @@
         'cheets_CTS_P',
         hosts=host_list,
         iterations=1,
+        enable_default_apps=True,
         max_retry=cts_retry,
         needs_push_media=True,
         tag=tag,
@@ -106,6 +109,8 @@
         target_plan=None,
         bundle=cts_abi,
         uri=uri,
+        load_waivers=('#' not in cts_test),  # No waivers for single-test runs
+        retry_manual_tests=('#' in cts_test),  # No waivers for single-test runs
         login_precondition_commands=[
             'lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'
         ],
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsAbiOverrideHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsAbiOverrideHostTestCases
index 586ea77..48330c2 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsAbiOverrideHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsAbiOverrideHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAbiOverrideHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsAccelerationTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsAccelerationTestCases
index 026299b..256ea72 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsAccelerationTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsAccelerationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAccelerationTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsAccessibilityServiceTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsAccessibilityServiceTestCases
index ad4b005..b58e07d 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsAccessibilityServiceTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsAccessibilityServiceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAccessibilityServiceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsAccessibilityTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsAccessibilityTestCases
index 8d2cdea..5a167b5 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsAccessibilityTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsAccessibilityTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAccessibilityTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsAccountManagerTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsAccountManagerTestCases
index 86b4249..c0a1e21 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsAccountManagerTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsAccountManagerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAccountManagerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsActivityManagerDeviceSdk25TestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsActivityManagerDeviceSdk25TestCases
index 32b2ad1..8252996 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsActivityManagerDeviceSdk25TestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsActivityManagerDeviceSdk25TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsActivityManagerDeviceSdk25TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsActivityManagerDeviceTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsActivityManagerDeviceTestCases
index da3296b..f8a9f3b 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsActivityManagerDeviceTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsActivityManagerDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsActivityManagerDeviceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsAdminPackageInstallerTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsAdminPackageInstallerTestCases
index d41ebe5..9849a4c 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsAdminPackageInstallerTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsAdminPackageInstallerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAdminPackageInstallerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsAdminTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsAdminTestCases
index 481ad46..5207d46 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsAdminTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsAdminTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAdminTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsAlarmClockTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsAlarmClockTestCases
index 00ce725..9d22228 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsAlarmClockTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsAlarmClockTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAlarmClockTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsAlarmManagerTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsAlarmManagerTestCases
index a2db1d9..014c6ea 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsAlarmManagerTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsAlarmManagerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAlarmManagerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsAndroidAppTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsAndroidAppTestCases
index b109758..47ccb6c 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsAndroidAppTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsAndroidAppTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAndroidAppTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsAndroidTestBase27ApiSignatureTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsAndroidTestBase27ApiSignatureTestCases
index 1d186e1..5ec7364 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsAndroidTestBase27ApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsAndroidTestBase27ApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAndroidTestBase27ApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsAndroidTestMockCurrentApiSignatureTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsAndroidTestMockCurrentApiSignatureTestCases
index 30d47a8..4148e7c 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsAndroidTestMockCurrentApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsAndroidTestMockCurrentApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAndroidTestMockCurrentApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsAndroidTestRunnerCurrentApiSignatureTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsAndroidTestRunnerCurrentApiSignatureTestCases
index b1a8cc2..ebd9bfc 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsAndroidTestRunnerCurrentApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsAndroidTestRunnerCurrentApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAndroidTestRunnerCurrentApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsAnimationTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsAnimationTestCases
index de459a8..66f8873 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsAnimationTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsAnimationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAnimationTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsApacheHttpLegacy27ApiSignatureTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsApacheHttpLegacy27ApiSignatureTestCases
index e44d4e7..3655450 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsApacheHttpLegacy27ApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsApacheHttpLegacy27ApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsApacheHttpLegacy27ApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsApacheHttpLegacyCurrentApiSignatureTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsApacheHttpLegacyCurrentApiSignatureTestCases
index 902132c..f5c87f7 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsApacheHttpLegacyCurrentApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsApacheHttpLegacyCurrentApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsApacheHttpLegacyCurrentApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases
index 3bf682d..575a17b 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsAppComponentFactoryTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsAppComponentFactoryTestCases
index 7be6d18..3d513f1 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsAppComponentFactoryTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsAppComponentFactoryTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppComponentFactoryTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsAppSecurityHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsAppSecurityHostTestCases
index e1e7bec..43169cf 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsAppSecurityHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsAppSecurityHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppSecurityHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsAppTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsAppTestCases
index 600b0db..ca97d6b 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsAppTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsAppTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsAppTestCases.feature.ctshardware b/server/site_tests/cheets_CTS_P/control.x86.CtsAppTestCases.feature.ctshardware
new file mode 100644
index 0000000..cf459af
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsAppTestCases.feature.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.x86.CtsAppTestCases.feature.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAppTestCases.feature of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='x86.CtsAppTestCases.feature.ctshardware',
+        test_name='cheets_CTS_P.x86.CtsAppTestCases.feature.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAppTestCases android.app.cts.SystemFeaturesTest'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAppTestCases.feature',
+        target_plan=None,
+        bundle='x86',
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsAppUsageHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsAppUsageHostTestCases
index 9b85173..bffaa2c 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsAppUsageHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsAppUsageHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppUsageHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsAppWidgetTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsAppWidgetTestCases
index 20e7bb9..20e42cc 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsAppWidgetTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsAppWidgetTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppWidgetTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsAslrMallocTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsAslrMallocTestCases
index 93b1637..85bbbd5 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsAslrMallocTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsAslrMallocTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAslrMallocTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsAssistTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsAssistTestCases
index fb1863c..c977527 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsAssistTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsAssistTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAssistTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsAtraceHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsAtraceHostTestCases
index 105c9c6..7180f2a 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsAtraceHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsAtraceHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAtraceHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsAutoFillServiceTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsAutoFillServiceTestCases
index 4130e6a..8a7a876 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsAutoFillServiceTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsAutoFillServiceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAutoFillServiceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsBackgroundRestrictionsTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsBackgroundRestrictionsTestCases
index 8dcd075..bb05029 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsBackgroundRestrictionsTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsBackgroundRestrictionsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBackgroundRestrictionsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsBackupHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsBackupHostTestCases
index 9ccf782..5fd9144 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsBackupHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsBackupHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBackupHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsBackupTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsBackupTestCases
index e574e14..20a3288 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsBackupTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsBackupTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBackupTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsBatterySavingTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsBatterySavingTestCases
index 9aeee70..13203b0 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsBatterySavingTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsBatterySavingTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBatterySavingTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsBionicTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsBionicTestCases
index f8f69c1..7e53f54 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsBionicTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsBionicTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBionicTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsBluetoothTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsBluetoothTestCases
index 175305e..483cd91 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsBluetoothTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsBluetoothTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBluetoothTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsBootStatsTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsBootStatsTestCases
index 476b2f4..0eaf48a 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsBootStatsTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsBootStatsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBootStatsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsCalendarcommon2TestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsCalendarcommon2TestCases
index 532a634..1e7e33c 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsCalendarcommon2TestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsCalendarcommon2TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCalendarcommon2TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsCameraApi25TestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsCameraApi25TestCases
index 1886c28..ce65fd9 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsCameraApi25TestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsCameraApi25TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCameraApi25TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsCameraTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsCameraTestCases
index 9463624..2c3ad43 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsCameraTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsCameraTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'LONG'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsCameraTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.x86.CtsCameraTestCases.ctshardware
new file mode 100644
index 0000000..28e8054
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsCameraTestCases.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.x86.CtsCameraTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86, lighting'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
+DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='x86.CtsCameraTestCases.ctshardware',
+        test_name='cheets_CTS_P.x86.CtsCameraTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCameraTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCameraTestCases',
+        target_plan=None,
+        bundle='x86',
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsCarTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsCarTestCases
index 047ca78..04021ca 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsCarTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsCarTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCarTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsCarrierApiTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsCarrierApiTestCases
index 0e30432..a27e144 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsCarrierApiTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsCarrierApiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCarrierApiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsColorModeTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsColorModeTestCases
index 247c27d..a156703 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsColorModeTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsColorModeTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsColorModeTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsCompilationTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsCompilationTestCases
index 35e381b..0fe4a16 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsCompilationTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsCompilationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCompilationTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsContactsProviderWipe b/server/site_tests/cheets_CTS_P/control.x86.CtsContactsProviderWipe
index 3aaaf61..76131e0 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsContactsProviderWipe
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsContactsProviderWipe
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsContactsProviderWipe of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsContentTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsContentTestCases
index b8be36e..fd98f34 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsContentTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsContentTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsContentTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsCppToolsTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsCppToolsTestCases
index 5b76ecb..ffad398 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsCppToolsTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsCppToolsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCppToolsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsCurrentApiSignatureTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsCurrentApiSignatureTestCases
index fba72cf..27c9c48 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsCurrentApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsCurrentApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCurrentApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDatabaseTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsDatabaseTestCases
index 60a0d89..28af3e4 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDatabaseTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDatabaseTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDatabaseTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDebugTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsDebugTestCases
index a8b7e49..bb94e3a 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDebugTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDebugTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDebugTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases
index a47e533..c3b87e2 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'LONG'
 MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
 PRIORITY = 70
 DOC = 'Run module CtsDeqpTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-EGL b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-EGL
index f5f5895..bedbcc5 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-EGL
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-EGL
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-EGL of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-GLES2 b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-GLES2
index f32f598..3ab2ecf 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-GLES2
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-GLES2
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-GLES2 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-GLES3 b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-GLES3
index 71d4868..20c65ee 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-GLES3
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-GLES3
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-GLES3 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware
new file mode 100644
index 0000000..2128fdf
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.x86.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
+DOC = 'Run module CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='x86.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware',
+        test_name='cheets_CTS_P.x86.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases dEQP-GLES3.functional.prerequisite#*'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite',
+        target_plan=None,
+        bundle='x86',
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-GLES31 b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-GLES31
index b5773f0..f48367f 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-GLES31
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-GLES31
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-GLES31 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.api b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.api
index eef9812..1f6a1dd 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.api
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.api
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.api of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.binding_model b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.binding_model
index 02a15da..eae8ef4 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.binding_model
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.binding_model
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.binding_model of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.clipping b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.clipping
index 5394624..cad274a 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.clipping
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.clipping
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.clipping of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.compute b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.compute
index 6ba15cd..9de5c19 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.compute
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.compute
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.compute of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.device_group b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.device_group
index 41ac680..06a12ab 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.device_group
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.device_group
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.device_group of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.draw b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.draw
index 2d0d05d..9c6664e 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.draw
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.draw
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.draw of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.dynamic_state b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.dynamic_state
index 471c6b8..0bd54e3 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.dynamic_state
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.dynamic_state
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.dynamic_state of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.fragment_operations b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.fragment_operations
index 5e9ddf4..d715173 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.fragment_operations
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.fragment_operations
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.fragment_operations of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.geometry b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.geometry
index 143915b..e4df402 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.geometry
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.geometry
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.geometry of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.glsl b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.glsl
index 828150d..4f8a860 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.glsl
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.glsl
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.glsl of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.image b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.image
index 057c0af..176ecc6 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.image
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.image
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.image of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.info b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.info
index 99623b8..379f4c9 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.info
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.info
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.info of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.memory b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.memory
index 224b300..cddbd13 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.memory
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.memory
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.memory of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.multiview b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.multiview
index d00f2fe..32ec3b3 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.multiview
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.multiview
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.multiview of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.pipeline b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.pipeline
index 0748a86..3dbce7d 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.pipeline
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.pipeline
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.pipeline of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.protected_memory b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.protected_memory
index c2746fa..1e7dcbd 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.protected_memory
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.protected_memory
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.protected_memory of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.query_pool b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.query_pool
index d2a198e..63a1622 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.query_pool
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.query_pool
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.query_pool of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.rasterization b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.rasterization
index 74c16bd..48bfa44 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.rasterization
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.rasterization
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.rasterization of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.renderpass b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.renderpass
index 1f8aacd..7e3f281 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.renderpass
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.renderpass
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.renderpass of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.renderpass2 b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.renderpass2
index 94d12a8..17072eb 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.renderpass2
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.renderpass2
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.renderpass2 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.robustness b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.robustness
index c12662a..94f3183 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.robustness
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.robustness
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.robustness of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.sparse_resources b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.sparse_resources
index f76dd7f..f5d24a4 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.sparse_resources
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.sparse_resources
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.sparse_resources of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.spirv_assembly b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.spirv_assembly
index 839d089..6ec8c12 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.spirv_assembly
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.spirv_assembly
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.spirv_assembly of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.ssbo b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.ssbo
index 2a8ad5c..8fcaf97 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.ssbo
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.ssbo
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.ssbo of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups
index d9db12f..fabab61 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.subgroups of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups.arithmetic.32 b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups.arithmetic.32
index 660af56..ba29fba 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups.arithmetic.32
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups.arithmetic.32
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.subgroups.arithmetic.32 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups.arithmetic.64 b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups.arithmetic.64
index 0cd717e..a31200d 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups.arithmetic.64
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups.arithmetic.64
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.subgroups.arithmetic.64 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups.b b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups.b
index e5ffb17..1d0f964 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups.b
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups.b
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.subgroups.b of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups.clustered b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups.clustered
index 132a669..7b75891 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups.clustered
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups.clustered
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.subgroups.clustered of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups.quad b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups.quad
index 218be41..7170c90 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups.quad
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups.quad
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.subgroups.quad of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups.s b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups.s
index b77f19d..efa72c4 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups.s
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups.s
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.subgroups.s of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups.vote b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups.vote
index 706b675..9c1b377 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups.vote
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.subgroups.vote
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.subgroups.vote of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.synchronization b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.synchronization
index 49f8a19..0bb21e4 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.synchronization
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.synchronization
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.synchronization of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.tessellation b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.tessellation
index 95f7b93..1ecbdf8 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.tessellation
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.tessellation
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.tessellation of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.texture b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.texture
index 1348e95..d8611c2 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.texture
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.texture
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.texture of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.ubo b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.ubo
index 7f29098..4cf46d8 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.ubo
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.ubo
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.ubo of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.wsi b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.wsi
index a990470..0031c8a 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.wsi
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.wsi
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.wsi of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.ycbcr b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.ycbcr
index ae0f9ef..ab5aebd 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.ycbcr
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeqpTestCases.dEQP-VK.ycbcr
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsDeqpTestCases.dEQP-VK.ycbcr of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDeviceIdleHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsDeviceIdleHostTestCases
index eae7c37..8cbd326 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDeviceIdleHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDeviceIdleHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDeviceIdleHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDevicePolicyManagerTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsDevicePolicyManagerTestCases
index 1714f46..2144edd 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDevicePolicyManagerTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDevicePolicyManagerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDevicePolicyManagerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDexMetadataHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsDexMetadataHostTestCases
index 7e49af9..d1c3b8d 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDexMetadataHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDexMetadataHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDexMetadataHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDisplayTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsDisplayTestCases
index 53507db..318bf62 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDisplayTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDisplayTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDisplayTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDpiTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsDpiTestCases
index 586e2b3..af85863 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDpiTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDpiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDpiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDpiTestCases2 b/server/site_tests/cheets_CTS_P/control.x86.CtsDpiTestCases2
index eebce1a..30ac9e3 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDpiTestCases2
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDpiTestCases2
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDpiTestCases2 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDreamsTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsDreamsTestCases
index 86c6d40..7118dae 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDreamsTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDreamsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDreamsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDrmTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsDrmTestCases
index 8f3684a..1950578 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDrmTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDrmTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDrmTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDumpsysHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsDumpsysHostTestCases
index ed2d6dd..37cbe79 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDumpsysHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDumpsysHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDumpsysHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsDynamicLinkerTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsDynamicLinkerTestCases
index 1a4ef40..8e97baa 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsDynamicLinkerTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsDynamicLinkerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDynamicLinkerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsEdiHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsEdiHostTestCases
index 2f0dbec..e7a3244 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsEdiHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsEdiHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsEdiHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsEffectTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsEffectTestCases
index c981ee6..2fbe818 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsEffectTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsEffectTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsEffectTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsExternalServiceTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsExternalServiceTestCases
index ab3443e..26ecdbc 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsExternalServiceTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsExternalServiceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsExternalServiceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsExternalSourcesTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsExternalSourcesTestCases
index 8e5719d..a8985b3 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsExternalSourcesTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsExternalSourcesTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsExternalSourcesTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsFileSystemTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsFileSystemTestCases
index e5aac8c..a26f4a8 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsFileSystemTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsFileSystemTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsFileSystemTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsFragmentTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsFragmentTestCases
index 5a05208..10b117c 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsFragmentTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsFragmentTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsFragmentTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsFragmentTestCasesSdk26 b/server/site_tests/cheets_CTS_P/control.x86.CtsFragmentTestCasesSdk26
index 1c758ad..0ab407b 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsFragmentTestCasesSdk26
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsFragmentTestCasesSdk26
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsFragmentTestCasesSdk26 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsGestureTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsGestureTestCases
index 6e13476..b62d213 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsGestureTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsGestureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsGestureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsGpuToolsHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsGpuToolsHostTestCases
index eb5cd7f..ace4735 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsGpuToolsHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsGpuToolsHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsGpuToolsHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsGraphicsTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsGraphicsTestCases
index 5ea11f4..39fc3e5 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsGraphicsTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsGraphicsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsGraphicsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsHardwareTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsHardwareTestCases
index 8f3efd1..a3c80d2 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsHardwareTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsHardwareTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHardwareTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsHarmfulAppWarningHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsHarmfulAppWarningHostTestCases
index 5f96306..b56ad07 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsHarmfulAppWarningHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsHarmfulAppWarningHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHarmfulAppWarningHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsHiddenApiBlacklistApi27TestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsHiddenApiBlacklistApi27TestCases
index 6d19a2e..5a74c53 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsHiddenApiBlacklistApi27TestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsHiddenApiBlacklistApi27TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHiddenApiBlacklistApi27TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsHiddenApiBlacklistCurrentApiTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsHiddenApiBlacklistCurrentApiTestCases
index c99ee39..e6a97d7 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsHiddenApiBlacklistCurrentApiTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsHiddenApiBlacklistCurrentApiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHiddenApiBlacklistCurrentApiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsHiddenApiBlacklistDebugClassTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsHiddenApiBlacklistDebugClassTestCases
index 0c7a30d..6fb53b4 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsHiddenApiBlacklistDebugClassTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsHiddenApiBlacklistDebugClassTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHiddenApiBlacklistDebugClassTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsHiddenApiKillswitchDebugClassTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsHiddenApiKillswitchDebugClassTestCases
index b1e7527..38e1bb7 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsHiddenApiKillswitchDebugClassTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsHiddenApiKillswitchDebugClassTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHiddenApiKillswitchDebugClassTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsHiddenApiKillswitchWhitelistTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsHiddenApiKillswitchWhitelistTestCases
index 08574e9..6c6a513 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsHiddenApiKillswitchWhitelistTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsHiddenApiKillswitchWhitelistTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHiddenApiKillswitchWhitelistTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsHiddenApiKillswitchWildcardTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsHiddenApiKillswitchWildcardTestCases
index be2ad65..42154a2 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsHiddenApiKillswitchWildcardTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsHiddenApiKillswitchWildcardTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHiddenApiKillswitchWildcardTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsHostTzDataTests b/server/site_tests/cheets_CTS_P/control.x86.CtsHostTzDataTests
index 5879984..6c11cc9 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsHostTzDataTests
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsHostTzDataTests
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHostTzDataTests of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsHostsideNetworkTests b/server/site_tests/cheets_CTS_P/control.x86.CtsHostsideNetworkTests
index c8bab3a..bd92eca 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsHostsideNetworkTests
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsHostsideNetworkTests
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHostsideNetworkTests of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsHostsideNumberBlockingTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsHostsideNumberBlockingTestCases
index 3a6c123..43b06b4 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsHostsideNumberBlockingTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsHostsideNumberBlockingTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHostsideNumberBlockingTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsHostsideTvTests b/server/site_tests/cheets_CTS_P/control.x86.CtsHostsideTvTests
index 19793d1..d0216e6 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsHostsideTvTests
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsHostsideTvTests
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHostsideTvTests of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsHostsideWebViewTests b/server/site_tests/cheets_CTS_P/control.x86.CtsHostsideWebViewTests
index 1091fc2..b3e9130 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsHostsideWebViewTests
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsHostsideWebViewTests
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHostsideWebViewTests of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsIcuTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsIcuTestCases
index a370161..89ab835 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsIcuTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsIcuTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsIcuTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsIncidentHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsIncidentHostTestCases
index bb6695f..506be1b 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsIncidentHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsIncidentHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsIncidentHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsInlineMockingTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsInlineMockingTestCases
index 4c481df..9e5499b 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsInlineMockingTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsInlineMockingTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsInlineMockingTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsInputMethodServiceHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsInputMethodServiceHostTestCases
index 698aeee..256e106 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsInputMethodServiceHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsInputMethodServiceHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsInputMethodServiceHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsInputMethodTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsInputMethodTestCases
index bfc0e01..cdb55e6 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsInputMethodTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsInputMethodTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsInputMethodTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsIntentSignatureTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsIntentSignatureTestCases
index f4e37e9..ae20ea4 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsIntentSignatureTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsIntentSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsIntentSignatureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJankDeviceTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJankDeviceTestCases
index a1aae0a..98690c8 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJankDeviceTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJankDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJankDeviceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJdwpSecurityHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJdwpSecurityHostTestCases
index 0770881..8ac3712 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJdwpSecurityHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJdwpSecurityHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJdwpSecurityHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJdwpTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJdwpTestCases
index 96c6de8..0ce5088 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJdwpTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJdwpTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJdwpTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJniTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJniTestCases
index 85ba664..81af742 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJniTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJniTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJniTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJobSchedulerSharedUidTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJobSchedulerSharedUidTestCases
index 2da40b3..33d8dba 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJobSchedulerSharedUidTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJobSchedulerSharedUidTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJobSchedulerSharedUidTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJobSchedulerTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJobSchedulerTestCases
index df8981a..b2dddfc 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJobSchedulerTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJobSchedulerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJobSchedulerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiAttachingHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiAttachingHostTestCases
index eb25670..b42dd1f 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiAttachingHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiAttachingHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiAttachingHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiAttachingTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiAttachingTestCases
index 40b0c21..7b40846 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiAttachingTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiAttachingTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiAttachingTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRedefineClassesHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRedefineClassesHostTestCases
index ddb18ef..43a2785 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRedefineClassesHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRedefineClassesHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRedefineClassesHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1900HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1900HostTestCases
index c4865ea..da8a49d 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1900HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1900HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1900HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1901HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1901HostTestCases
index f4a6041..f09633d 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1901HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1901HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1901HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1902HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1902HostTestCases
index aca7ad4..a15cde2 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1902HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1902HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1902HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1903HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1903HostTestCases
index 97c2ba8..da9ee86 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1903HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1903HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1903HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1904HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1904HostTestCases
index cd65c3b..5b5ec92 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1904HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1904HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1904HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1906HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1906HostTestCases
index 72ba1b0..6ec9fc4 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1906HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1906HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1906HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1907HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1907HostTestCases
index e181a07..ebdfc1d 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1907HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1907HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1907HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1908HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1908HostTestCases
index 6a22871..c07b2f6 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1908HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1908HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1908HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1909HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1909HostTestCases
index 072d851..1a63148 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1909HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1909HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1909HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1910HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1910HostTestCases
index b428d8a..ffba7d7 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1910HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1910HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1910HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1911HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1911HostTestCases
index 7e07522..46c7544 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1911HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1911HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1911HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1912HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1912HostTestCases
index 41a06fd..de41973 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1912HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1912HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1912HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1913HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1913HostTestCases
index 1fe3462..f8f50fd 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1913HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1913HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1913HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1914HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1914HostTestCases
index de27ba2..2be1f96 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1914HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1914HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1914HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1915HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1915HostTestCases
index dba939e..bdc75fb 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1915HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1915HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1915HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1916HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1916HostTestCases
index 46c7d36..d74dc22 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1916HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1916HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1916HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1917HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1917HostTestCases
index a3f2976..6d7fb49 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1917HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1917HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1917HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1920HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1920HostTestCases
index fcd9146..82a13f7 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1920HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1920HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1920HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1921HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1921HostTestCases
index f703ba9..66571ed 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1921HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1921HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1921HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1922HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1922HostTestCases
index ae0af14..f85da63 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1922HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1922HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1922HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1923HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1923HostTestCases
index e98b405..06eff2c 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1923HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1923HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1923HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1924HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1924HostTestCases
index 7e6bfd8..4228caa 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1924HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1924HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1924HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1925HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1925HostTestCases
index 074363b..aa10020 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1925HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1925HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1925HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1926HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1926HostTestCases
index 7db5cda..1788620 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1926HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1926HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1926HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1927HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1927HostTestCases
index 90e7bfc..e41b42a 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1927HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1927HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1927HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1928HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1928HostTestCases
index 8611127..0a50fb7 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1928HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1928HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1928HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1930HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1930HostTestCases
index da64a22..1e4783f 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1930HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1930HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1930HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1931HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1931HostTestCases
index 33a53e1..ed7c51e 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1931HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1931HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1931HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1932HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1932HostTestCases
index 5889d56..07a7ae8 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1932HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1932HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1932HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1933HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1933HostTestCases
index 925fcc0..84bce72 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1933HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1933HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1933HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1934HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1934HostTestCases
index 8754815..646b9d2 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1934HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1934HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1934HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1936HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1936HostTestCases
index 9a1f13e..7503677 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1936HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1936HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1936HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1937HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1937HostTestCases
index 4b06e99..89551ea 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1937HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1937HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1937HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1939HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1939HostTestCases
index c00367a..de5d972 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1939HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1939HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1939HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1941HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1941HostTestCases
index fa53cbf..f044ed1 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1941HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1941HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1941HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1942HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1942HostTestCases
index 0e7c905..fdd171f 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1942HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1942HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1942HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1943HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1943HostTestCases
index 69746fa..230adab 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1943HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest1943HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1943HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest902HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest902HostTestCases
index 84c7274..4fca65e 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest902HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest902HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest902HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest903HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest903HostTestCases
index dea6a3b..5efeef2 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest903HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest903HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest903HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest904HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest904HostTestCases
index 718095a..c6f8294 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest904HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest904HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest904HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest905HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest905HostTestCases
index 9c1f06b..dfa726d 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest905HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest905HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest905HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest906HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest906HostTestCases
index 79bdd45..2172bae 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest906HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest906HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest906HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest907HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest907HostTestCases
index 89d985e..2a104ce 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest907HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest907HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest907HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest908HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest908HostTestCases
index b6036b3..3ddac17 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest908HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest908HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest908HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest910HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest910HostTestCases
index fc3c2d8..1b972c0 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest910HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest910HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest910HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest911HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest911HostTestCases
index 5e99414..7c287c5 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest911HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest911HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest911HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest912HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest912HostTestCases
index 5a59fb6..ed778b6 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest912HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest912HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest912HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest913HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest913HostTestCases
index df986c4..94b052e 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest913HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest913HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest913HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest914HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest914HostTestCases
index 2f95db1..24b5f09 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest914HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest914HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest914HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest915HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest915HostTestCases
index dfded08..e0589eb 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest915HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest915HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest915HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest917HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest917HostTestCases
index f6e54a4..af12778 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest917HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest917HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest917HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest918HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest918HostTestCases
index 1504cec..a440b70 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest918HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest918HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest918HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest919HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest919HostTestCases
index c8ffae3..d15ab31 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest919HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest919HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest919HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest920HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest920HostTestCases
index 9cdcb88..54b042b 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest920HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest920HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest920HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest922HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest922HostTestCases
index 12dc046..7b17a54 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest922HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest922HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest922HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest923HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest923HostTestCases
index bacda24..f27ec4d 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest923HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest923HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest923HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest924HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest924HostTestCases
index 29388ff..ccd30e3 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest924HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest924HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest924HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest926HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest926HostTestCases
index 682e723..f0b175d 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest926HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest926HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest926HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest927HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest927HostTestCases
index 02515ce..821015d 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest927HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest927HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest927HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest928HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest928HostTestCases
index 86e1890..bc3a71f 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest928HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest928HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest928HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest930HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest930HostTestCases
index 3c23732..077a4f5 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest930HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest930HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest930HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest931HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest931HostTestCases
index 0f8e50d..46894bb 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest931HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest931HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest931HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest932HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest932HostTestCases
index 2d98cda..f1e0313 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest932HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest932HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest932HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest940HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest940HostTestCases
index e4688a0..d75351b 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest940HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest940HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest940HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest942HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest942HostTestCases
index dd9376f..58cf05e 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest942HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest942HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest942HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest944HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest944HostTestCases
index de46365..ab14a05 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest944HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest944HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest944HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest945HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest945HostTestCases
index 06e9307..dafb9f5 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest945HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest945HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest945HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest947HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest947HostTestCases
index 59bfddf..2082ed4 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest947HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest947HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest947HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest951HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest951HostTestCases
index b81d6d8..acd0478 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest951HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest951HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest951HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest982HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest982HostTestCases
index c2c9efb..4f13cb3 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest982HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest982HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest982HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest983HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest983HostTestCases
index 4a2042c..a3a1270 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest983HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest983HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest983HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest984HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest984HostTestCases
index 69a117a..c773cf4 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest984HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest984HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest984HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest985HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest985HostTestCases
index 70f2a72..ca4afc1 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest985HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest985HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest985HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest986HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest986HostTestCases
index f841b9b..84f689a 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest986HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest986HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest986HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest988HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest988HostTestCases
index 61f6926..09fffe4 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest988HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest988HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest988HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest989HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest989HostTestCases
index 71c3fde..a5843b8 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest989HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest989HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest989HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest990HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest990HostTestCases
index af6df83..6040bb0 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest990HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest990HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest990HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest991HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest991HostTestCases
index 88723a8..557ea13 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest991HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest991HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest991HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest992HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest992HostTestCases
index 6e26082..dca6464 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest992HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest992HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest992HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest993HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest993HostTestCases
index 5dd30d3..3c5d1e7 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest993HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest993HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest993HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest994HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest994HostTestCases
index 4bf8b99..f489b2b 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest994HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest994HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest994HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest995HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest995HostTestCases
index 7af62a3..8ad957e 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest995HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest995HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest995HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest996HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest996HostTestCases
index 7b5898e..5960fa1 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest996HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest996HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest996HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest997HostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest997HostTestCases
index 7bdd660..45b4959 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest997HostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiRunTest997HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest997HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiTaggingHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiTaggingHostTestCases
index 553bbcd..02524c3 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiTaggingHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiTaggingHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiTaggingHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiTrackingHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiTrackingHostTestCases
index a5eb086..4cccada 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiTrackingHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsJvmtiTrackingHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiTrackingHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsKernelConfigTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsKernelConfigTestCases
index e619c9d..050544c 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsKernelConfigTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsKernelConfigTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsKernelConfigTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsKeystoreTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsKeystoreTestCases
index 94ffd46..12ede79 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsKeystoreTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsKeystoreTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsKeystoreTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsLeanbackJankTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsLeanbackJankTestCases
index 8e9a192..440fcfa 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsLeanbackJankTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsLeanbackJankTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLeanbackJankTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsLegacyNotificationTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsLegacyNotificationTestCases
index 97baa9a..0765820 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsLegacyNotificationTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsLegacyNotificationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLegacyNotificationTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreFileIOTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreFileIOTestCases
index 14b9ef5..781fa8b 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreFileIOTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreFileIOTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreFileIOTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreJsr166TestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreJsr166TestCases
index f1abdb2..b98c3f7 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreJsr166TestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreJsr166TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreJsr166TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreLegacy22TestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreLegacy22TestCases
index 73c9334..14375f0 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreLegacy22TestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreLegacy22TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreLegacy22TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreOjTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreOjTestCases
index f927be7..0b2f480 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreOjTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreOjTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreOjTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreOkHttpTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreOkHttpTestCases
index 8524ff2..2b21a88 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreOkHttpTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreOkHttpTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreOkHttpTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreTestCases
index a9c078d..f7d74f6 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsLibcoreTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreWycheproofBCTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreWycheproofBCTestCases
index eabda56..d052eab 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreWycheproofBCTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreWycheproofBCTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreWycheproofBCTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreWycheproofConscryptTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreWycheproofConscryptTestCases
index 03be32f..2498596 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreWycheproofConscryptTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsLibcoreWycheproofConscryptTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreWycheproofConscryptTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsLiblogTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsLiblogTestCases
index 1678b7d..19fe66b 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsLiblogTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsLiblogTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLiblogTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsLocation2TestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsLocation2TestCases
index 7003342..aedf0ce 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsLocation2TestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsLocation2TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLocation2TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsLocationTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsLocationTestCases
index 61f1f60..23d8fcb 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsLocationTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsLocationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLocationTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsLogdTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsLogdTestCases
index fec7551..17419c7 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsLogdTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsLogdTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLogdTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsMediaBitstreamsTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsMediaBitstreamsTestCases
index 080bc5f..e63d815 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsMediaBitstreamsTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsMediaBitstreamsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMediaBitstreamsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsMediaHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsMediaHostTestCases
index e394ab3..7e7dca2 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsMediaHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsMediaHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMediaHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsMediaStressTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsMediaStressTestCases
index 3070eea..6d3206a 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsMediaStressTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsMediaStressTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'LONG'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsMediaStressTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsMediaStressTestCases.camera.ctshardware b/server/site_tests/cheets_CTS_P/control.x86.CtsMediaStressTestCases.camera.ctshardware
new file mode 100644
index 0000000..b80c2d9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsMediaStressTestCases.camera.ctshardware
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.x86.CtsMediaStressTestCases.camera.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
+DOC = 'Run module CtsMediaStressTestCases.camera of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='x86.CtsMediaStressTestCases.camera.ctshardware',
+        test_name='cheets_CTS_P.x86.CtsMediaStressTestCases.camera.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaStressTestCases android.mediastress.cts.MediaRecorderStressTest'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaStressTestCases.camera',
+        target_plan=None,
+        bundle='x86',
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsMediaTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsMediaTestCases
index 3d411fd..838e9d6 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsMediaTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsMediaTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'LONG'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsMediaTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsMediaTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.x86.CtsMediaTestCases.ctshardware
new file mode 100644
index 0000000..6c3cabf
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsMediaTestCases.ctshardware
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.x86.CtsMediaTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86, noloopback'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
+DOC = 'Run module CtsMediaTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='x86.CtsMediaTestCases.ctshardware',
+        test_name='cheets_CTS_P.x86.CtsMediaTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaTestCases',
+        target_plan=None,
+        bundle='x86',
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        timeout=36000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsMidiTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsMidiTestCases
index bc64ef7..111dd2c 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsMidiTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsMidiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMidiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsMockingDebuggableTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsMockingDebuggableTestCases
index 9e816a8..6837c54 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsMockingDebuggableTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsMockingDebuggableTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMockingDebuggableTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsMockingTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsMockingTestCases
index 12930f5..1134b5e 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsMockingTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsMockingTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMockingTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsMonkeyTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsMonkeyTestCases
index 930c272..81ed1eb 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsMonkeyTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsMonkeyTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMonkeyTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsMultiUserHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsMultiUserHostTestCases
index e5a3117..f6db3a2 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsMultiUserHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsMultiUserHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMultiUserHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsMultiUserTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsMultiUserTestCases
index 23cacdd..5d9bb1c 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsMultiUserTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsMultiUserTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMultiUserTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsNNAPITestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsNNAPITestCases
index da2eb66..03ddd28 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsNNAPITestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsNNAPITestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNNAPITestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsNativeHardwareTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsNativeHardwareTestCases
index 9fda64b..a31e944 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsNativeHardwareTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsNativeHardwareTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNativeHardwareTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsNativeMediaAAudioTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsNativeMediaAAudioTestCases
index f8e8833..3edacdb 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsNativeMediaAAudioTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsNativeMediaAAudioTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNativeMediaAAudioTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsNativeMediaAAudioTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.x86.CtsNativeMediaAAudioTestCases.ctshardware
new file mode 100644
index 0000000..cea776e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsNativeMediaAAudioTestCases.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.x86.CtsNativeMediaAAudioTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNativeMediaAAudioTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='x86.CtsNativeMediaAAudioTestCases.ctshardware',
+        test_name='cheets_CTS_P.x86.CtsNativeMediaAAudioTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNativeMediaAAudioTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsNativeMediaAAudioTestCases',
+        target_plan=None,
+        bundle='x86',
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsNativeMediaSlTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsNativeMediaSlTestCases
index c910770..11931eb 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsNativeMediaSlTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsNativeMediaSlTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNativeMediaSlTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsNativeMediaXaTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsNativeMediaXaTestCases
index 69e160f..ae0862d 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsNativeMediaXaTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsNativeMediaXaTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNativeMediaXaTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsNativeNetTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsNativeNetTestCases
index 8c1a934..6f6692bd 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsNativeNetTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsNativeNetTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNativeNetTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsNdefTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsNdefTestCases
index 61315a6..684c46c 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsNdefTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsNdefTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNdefTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigAttributeTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigAttributeTestCases
index f9f8f41..cefbcee 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigAttributeTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigAttributeTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigAttributeTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigBasicDebugDisabledTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigBasicDebugDisabledTestCases
index 9f24fc1..ee3e7bb 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigBasicDebugDisabledTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigBasicDebugDisabledTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigBasicDebugDisabledTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigBasicDebugEnabledTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigBasicDebugEnabledTestCases
index 8710bb3..2cd424b 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigBasicDebugEnabledTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigBasicDebugEnabledTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigBasicDebugEnabledTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigBasicDomainConfigTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigBasicDomainConfigTestCases
index c3d8deb..18def0c 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigBasicDomainConfigTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigBasicDomainConfigTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigBasicDomainConfigTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigCleartextTrafficTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigCleartextTrafficTestCases
index 2b2b90c..96a2f03 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigCleartextTrafficTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigCleartextTrafficTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigCleartextTrafficTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigDownloadManagerTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigDownloadManagerTestCases
index fb58efd..d959ddb 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigDownloadManagerTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigDownloadManagerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigDownloadManagerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigInvalidPinTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigInvalidPinTestCases
index d90f205..b09c705 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigInvalidPinTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigInvalidPinTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigInvalidPinTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigNestedDomainConfigTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigNestedDomainConfigTestCases
index 030aa8f..c700fb7 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigNestedDomainConfigTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigNestedDomainConfigTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigNestedDomainConfigTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigPrePCleartextTrafficTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigPrePCleartextTrafficTestCases
index 84a1177..c23eacc 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigPrePCleartextTrafficTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigPrePCleartextTrafficTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigPrePCleartextTrafficTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigResourcesSrcTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigResourcesSrcTestCases
index f475667..a192191 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigResourcesSrcTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecConfigResourcesSrcTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigResourcesSrcTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecPolicyUsesCleartextTrafficFalseTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecPolicyUsesCleartextTrafficFalseTestCases
index 4fd1a7e..1cf9041 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecPolicyUsesCleartextTrafficFalseTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecPolicyUsesCleartextTrafficFalseTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecPolicyUsesCleartextTrafficFalseTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecPolicyUsesCleartextTrafficTrueTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecPolicyUsesCleartextTrafficTrueTestCases
index 4e7ce12..f4887cd 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecPolicyUsesCleartextTrafficTrueTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecPolicyUsesCleartextTrafficTrueTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecPolicyUsesCleartextTrafficTrueTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases
index 1e6eb23..261d225 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsNetTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsNetTestCases
index e722125..8a28255 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsNetTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsNetTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsNetTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
@@ -29,7 +30,7 @@
         target_module='CtsNetTestCases',
         target_plan=None,
         bundle='x86',
-        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), '/usr/local/autotest/cros/scripts/reorder-services-moblab.sh wifi'],
+        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), '/usr/local/autotest/cros/scripts/reorder-services-moblab.sh wifi', 'android-sh -c \'setprop ctl.start mdnsd\''],
         retry_manual_tests=True,
         warn_on_test_retry=False,
         timeout=3600)
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsNetTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.x86.CtsNetTestCases.ctshardware
new file mode 100644
index 0000000..e903b5b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsNetTestCases.ctshardware
@@ -0,0 +1,38 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.x86.CtsNetTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
+DOC = 'Run module CtsNetTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=10,
+        tag='x86.CtsNetTestCases.ctshardware',
+        test_name='cheets_CTS_P.x86.CtsNetTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNetTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsNetTestCases',
+        target_plan=None,
+        bundle='x86',
+        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), '/usr/local/autotest/cros/scripts/reorder-services-moblab.sh wifi', 'android-sh -c \'setprop ctl.start mdnsd\''],
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsNetTestCasesLegacyApi22 b/server/site_tests/cheets_CTS_P/control.x86.CtsNetTestCasesLegacyApi22
index 224aeb4..ef431ce 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsNetTestCasesLegacyApi22
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsNetTestCasesLegacyApi22
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetTestCasesLegacyApi22 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsNetTestCasesLegacyPermission22 b/server/site_tests/cheets_CTS_P/control.x86.CtsNetTestCasesLegacyPermission22
index 35d1717..c1eefbb 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsNetTestCasesLegacyPermission22
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsNetTestCasesLegacyPermission22
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetTestCasesLegacyPermission22 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsOmapiTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsOmapiTestCases
index 5e18857..932b35a 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsOmapiTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsOmapiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsOmapiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsOpenGLTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsOpenGLTestCases
index 91d5aa3..b1784d4 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsOpenGLTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsOpenGLTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsOpenGLTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsOpenGlPerf2TestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsOpenGlPerf2TestCases
index 7d5aeac..fefce8e 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsOpenGlPerf2TestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsOpenGlPerf2TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsOpenGlPerf2TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsOpenGlPerfTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsOpenGlPerfTestCases
index 7db60fd..6827c81 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsOpenGlPerfTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsOpenGlPerfTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsOpenGlPerfTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsOsHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsOsHostTestCases
index a04f9b7..7afc830 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsOsHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsOsHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsOsHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsOsTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsOsTestCases
index 8dd0135..8fe3ff4 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsOsTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsOsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsOsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsPdfTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsPdfTestCases
index 14f1d99..b678895 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsPdfTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsPdfTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPdfTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsPerfettoTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsPerfettoTestCases
index 9af493a..e2881a7 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsPerfettoTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsPerfettoTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPerfettoTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsPerfettoTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.x86.CtsPerfettoTestCases.ctshardware
new file mode 100644
index 0000000..961de50
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsPerfettoTestCases.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.x86.CtsPerfettoTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPerfettoTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='x86.CtsPerfettoTestCases.ctshardware',
+        test_name='cheets_CTS_P.x86.CtsPerfettoTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPerfettoTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsPerfettoTestCases',
+        target_plan=None,
+        bundle='x86',
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsPermission2TestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsPermission2TestCases
index 6879ad1..b820b66 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsPermission2TestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsPermission2TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPermission2TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsPermissionTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsPermissionTestCases
index e96f5c2..6ec8960 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsPermissionTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsPermissionTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPermissionTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsPermissionTestCases.camera.ctshardware b/server/site_tests/cheets_CTS_P/control.x86.CtsPermissionTestCases.camera.ctshardware
new file mode 100644
index 0000000..9c9c511
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsPermissionTestCases.camera.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.x86.CtsPermissionTestCases.camera.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPermissionTestCases.camera of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='x86.CtsPermissionTestCases.camera.ctshardware',
+        test_name='cheets_CTS_P.x86.CtsPermissionTestCases.camera.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPermissionTestCases android.permission.cts.CameraPermissionTest', '--include-filter', 'CtsPermissionTestCases android.permission.cts.Camera2PermissionTest'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsPermissionTestCases.camera',
+        target_plan=None,
+        bundle='x86',
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsPreference2TestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsPreference2TestCases
index e31ed43..a0573d1 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsPreference2TestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsPreference2TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPreference2TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsPreferenceTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsPreferenceTestCases
index b5221ed..40e153e 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsPreferenceTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsPreferenceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPreferenceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsPrintTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsPrintTestCases
index e0e0c33..b01cfd2 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsPrintTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsPrintTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPrintTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsProtoTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsProtoTestCases
index a5f6300..15b36d6 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsProtoTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsProtoTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsProtoTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsProviderTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsProviderTestCases
index 4d3e7df..1bc65ac 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsProviderTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsProviderTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsProviderTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsRenderscriptLegacyTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsRenderscriptLegacyTestCases
index cfd3270..0c1f7cf 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsRenderscriptLegacyTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsRenderscriptLegacyTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsRenderscriptLegacyTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsRenderscriptTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsRenderscriptTestCases
index 346c951..1e3f28e 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsRenderscriptTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsRenderscriptTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsRenderscriptTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsRsBlasTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsRsBlasTestCases
index 70ec381..28f5f86 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsRsBlasTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsRsBlasTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsRsBlasTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsRsCppTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsRsCppTestCases
index a20877a..3f99c73 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsRsCppTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsRsCppTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsRsCppTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSampleDeviceTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsSampleDeviceTestCases
index 7d460dc..461d3c8 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsSampleDeviceTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSampleDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSampleDeviceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSampleHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsSampleHostTestCases
index a9674e2..fc615ff 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsSampleHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSampleHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSampleHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSaxTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsSaxTestCases
index e39d081..fd0a67d 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsSaxTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSaxTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSaxTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSeccompHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsSeccompHostTestCases
index 7110604..5fb83e0 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsSeccompHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSeccompHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSeccompHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSecureElementAccessControlTestCases1 b/server/site_tests/cheets_CTS_P/control.x86.CtsSecureElementAccessControlTestCases1
index 8451197..deac5f4 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsSecureElementAccessControlTestCases1
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSecureElementAccessControlTestCases1
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSecureElementAccessControlTestCases1 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSecureElementAccessControlTestCases2 b/server/site_tests/cheets_CTS_P/control.x86.CtsSecureElementAccessControlTestCases2
index 12bedfc..4336e98 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsSecureElementAccessControlTestCases2
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSecureElementAccessControlTestCases2
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSecureElementAccessControlTestCases2 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSecureElementAccessControlTestCases3 b/server/site_tests/cheets_CTS_P/control.x86.CtsSecureElementAccessControlTestCases3
index f0dd6ad..f694b16 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsSecureElementAccessControlTestCases3
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSecureElementAccessControlTestCases3
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSecureElementAccessControlTestCases3 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSecurityBulletinHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsSecurityBulletinHostTestCases
index a0ad2c6..ef6a4cd 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsSecurityBulletinHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSecurityBulletinHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSecurityBulletinHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSecurityHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsSecurityHostTestCases
index 0d08725..d6103f9 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsSecurityHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSecurityHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsSecurityHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSecurityTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsSecurityTestCases
index 4844e77..1ed05a3 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsSecurityTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSecurityTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'LONG'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsSecurityTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSelinuxTargetSdk25TestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsSelinuxTargetSdk25TestCases
index 35cbc3f..38e48c2 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsSelinuxTargetSdk25TestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSelinuxTargetSdk25TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSelinuxTargetSdk25TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSelinuxTargetSdk27TestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsSelinuxTargetSdk27TestCases
index 54d7b7f..206ceda 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsSelinuxTargetSdk27TestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSelinuxTargetSdk27TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSelinuxTargetSdk27TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSelinuxTargetSdkCurrentTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsSelinuxTargetSdkCurrentTestCases
index 3942432..34eb867 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsSelinuxTargetSdkCurrentTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSelinuxTargetSdkCurrentTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSelinuxTargetSdkCurrentTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSensorTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsSensorTestCases
index e9ac745..8c472a6 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsSensorTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSensorTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSensorTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSensorTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.x86.CtsSensorTestCases.ctshardware
new file mode 100644
index 0000000..8065329
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSensorTestCases.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.x86.CtsSensorTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSensorTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=12,
+        tag='x86.CtsSensorTestCases.ctshardware',
+        test_name='cheets_CTS_P.x86.CtsSensorTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSensorTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSensorTestCases',
+        target_plan=None,
+        bundle='x86',
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsShortcutHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsShortcutHostTestCases
index b666635..cf95586 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsShortcutHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsShortcutHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsShortcutHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsShortcutManagerTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsShortcutManagerTestCases
index 83769b6..457410f 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsShortcutManagerTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsShortcutManagerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsShortcutManagerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSimRestrictedApisTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsSimRestrictedApisTestCases
index ce7a596..bc251a2 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsSimRestrictedApisTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSimRestrictedApisTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSimRestrictedApisTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSimpleCpuTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsSimpleCpuTestCases
index 5d163c2..fb0876b 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsSimpleCpuTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSimpleCpuTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSimpleCpuTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSimpleperfTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsSimpleperfTestCases
index 25d48b9..ae35612 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsSimpleperfTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSimpleperfTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSimpleperfTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSkQPTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsSkQPTestCases
index 119a451..a3ac56d 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsSkQPTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSkQPTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSkQPTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSliceTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsSliceTestCases
index 0e2ed08..3a797fd 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsSliceTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSliceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSliceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSpeechTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsSpeechTestCases
index e6bcd4d..2b0e3f3 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsSpeechTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSpeechTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSpeechTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsStatsdHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsStatsdHostTestCases
index 209e034..361a1ca 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsStatsdHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsStatsdHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsStatsdHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSustainedPerformanceHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsSustainedPerformanceHostTestCases
index 59d9e35..0f6575b 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsSustainedPerformanceHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSustainedPerformanceHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSustainedPerformanceHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSustainedPerformanceHostTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.x86.CtsSustainedPerformanceHostTestCases.ctshardware
new file mode 100644
index 0000000..04e78db
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSustainedPerformanceHostTestCases.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.x86.CtsSustainedPerformanceHostTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSustainedPerformanceHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='x86.CtsSustainedPerformanceHostTestCases.ctshardware',
+        test_name='cheets_CTS_P.x86.CtsSustainedPerformanceHostTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSustainedPerformanceHostTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSustainedPerformanceHostTestCases',
+        target_plan=None,
+        bundle='x86',
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSyncAccountAccessOtherCertTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsSyncAccountAccessOtherCertTestCases
index 8dc8cc3..2a5dfe0 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsSyncAccountAccessOtherCertTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSyncAccountAccessOtherCertTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSyncAccountAccessOtherCertTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSyncContentHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsSyncContentHostTestCases
index 9b3cebd..20dbaa4 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsSyncContentHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSyncContentHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSyncContentHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSyncManagerTestsCases b/server/site_tests/cheets_CTS_P/control.x86.CtsSyncManagerTestsCases
index 9f48d66..2f5e5bd 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsSyncManagerTestsCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSyncManagerTestsCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSyncManagerTestsCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSystemApiAnnotationTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsSystemApiAnnotationTestCases
index 888e8b2..bfa3821 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsSystemApiAnnotationTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSystemApiAnnotationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSystemApiAnnotationTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSystemApiSignatureTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsSystemApiSignatureTestCases
index c0857dc..06922f7 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsSystemApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSystemApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSystemApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSystemIntentTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsSystemIntentTestCases
index 04473f3..d05adf1 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsSystemIntentTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSystemIntentTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSystemIntentTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSystemUiHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsSystemUiHostTestCases
index cfe4672..f4c6c46 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsSystemUiHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSystemUiHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSystemUiHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsSystemUiTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsSystemUiTestCases
index fd0de14..d856b3b 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsSystemUiTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsSystemUiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSystemUiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsTelecomTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsTelecomTestCases
index 68183a2..b386155 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsTelecomTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsTelecomTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelecomTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsTelecomTestCases2 b/server/site_tests/cheets_CTS_P/control.x86.CtsTelecomTestCases2
index 635cf08..f2777b8 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsTelecomTestCases2
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsTelecomTestCases2
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelecomTestCases2 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsTelecomTestCases3 b/server/site_tests/cheets_CTS_P/control.x86.CtsTelecomTestCases3
index 42e9feb..0c3dba4 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsTelecomTestCases3
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsTelecomTestCases3
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelecomTestCases3 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsTelephony2TestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsTelephony2TestCases
index ad6df3e..0294e93 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsTelephony2TestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsTelephony2TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelephony2TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsTelephonyTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsTelephonyTestCases
index 3f826a9..7b68245 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsTelephonyTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsTelephonyTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelephonyTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsTextTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsTextTestCases
index 0bc33e1..f31efa5 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsTextTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsTextTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTextTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsThemeDeviceTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsThemeDeviceTestCases
index ed369bf..099b328 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsThemeDeviceTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsThemeDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsThemeDeviceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsThemeHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsThemeHostTestCases
index 5be37f0..4fad74e 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsThemeHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsThemeHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsThemeHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsToastLegacyTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsToastLegacyTestCases
index b5add10..2d02cc0 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsToastLegacyTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsToastLegacyTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsToastLegacyTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsToastTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsToastTestCases
index 1ce598a..4219e50 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsToastTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsToastTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsToastTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsTransitionTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsTransitionTestCases
index dd4b2d9..1cb2a7c 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsTransitionTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsTransitionTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTransitionTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsTrustedVoiceHostTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsTrustedVoiceHostTestCases
index b5f09d2..7b048c8 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsTrustedVoiceHostTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsTrustedVoiceHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTrustedVoiceHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsTvProviderTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsTvProviderTestCases
index 0132313..1e0b1ef 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsTvProviderTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsTvProviderTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTvProviderTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsTvTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsTvTestCases
index f915b9a..6608207 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsTvTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsTvTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTvTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsUiAutomationTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsUiAutomationTestCases
index 8e73b9c..b065079 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsUiAutomationTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsUiAutomationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUiAutomationTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsUiDeviceTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsUiDeviceTestCases
index e4bfde8..f19f25e 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsUiDeviceTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsUiDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUiDeviceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsUiRenderingTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsUiRenderingTestCases
index 7900b1f..18af270 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsUiRenderingTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsUiRenderingTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUiRenderingTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsUidIsolationTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsUidIsolationTestCases
index d46d2d5..ba1f8b0 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsUidIsolationTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsUidIsolationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUidIsolationTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsUsageStatsTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsUsageStatsTestCases
index 1d01eed..b08d7ca 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsUsageStatsTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsUsageStatsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsUsageStatsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsUsageStatsTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.x86.CtsUsageStatsTestCases.ctshardware
new file mode 100644
index 0000000..365e786
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsUsageStatsTestCases.ctshardware
@@ -0,0 +1,38 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.x86.CtsUsageStatsTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
+DOC = 'Run module CtsUsageStatsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        max_retry=10,
+        tag='x86.CtsUsageStatsTestCases.ctshardware',
+        test_name='cheets_CTS_P.x86.CtsUsageStatsTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUsageStatsTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsUsageStatsTestCases',
+        target_plan=None,
+        bundle='x86',
+        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), '/usr/local/autotest/cros/scripts/reorder-services-moblab.sh wifi'],
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsUsbTests b/server/site_tests/cheets_CTS_P/control.x86.CtsUsbTests
index 5f1a00d..854ab87 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsUsbTests
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsUsbTests
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUsbTests of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsUtilTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsUtilTestCases
index ee75a48..122bfce 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsUtilTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsUtilTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUtilTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsVideoTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsVideoTestCases
index 974c960..b04e5ec 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsVideoTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsVideoTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsVideoTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsViewTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsViewTestCases
index 5b4c3d4..687f43c 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsViewTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsViewTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsViewTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsViewTestCases.ctshardware b/server/site_tests/cheets_CTS_P/control.x86.CtsViewTestCases.ctshardware
new file mode 100644
index 0000000..3af1c28
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsViewTestCases.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.x86.CtsViewTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsViewTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='x86.CtsViewTestCases.ctshardware',
+        test_name='cheets_CTS_P.x86.CtsViewTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsViewTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsViewTestCases',
+        target_plan=None,
+        bundle='x86',
+        extra_artifacts=['/storage/emulated/0/SurfaceViewSyncTest/'],
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsVmTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsVmTestCases
index a837412..6bd2a5c 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsVmTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsVmTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsVmTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsVoiceInteractionTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsVoiceInteractionTestCases
index 9a50664..a093e1d 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsVoiceInteractionTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsVoiceInteractionTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsVoiceInteractionTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsVoiceSettingsTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsVoiceSettingsTestCases
index f6f9dcb..d91a130 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsVoiceSettingsTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsVoiceSettingsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsVoiceSettingsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsVrTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsVrTestCases
index 56ed2f3..54c7292 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsVrTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsVrTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsVrTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsWebkitTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsWebkitTestCases
index 4c11be4..1bb4b56 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsWebkitTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsWebkitTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWebkitTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsWidgetTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsWidgetTestCases
index de61469..b7cbbb9 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsWidgetTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsWidgetTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWidgetTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
@@ -29,6 +30,6 @@
         bundle='x86',
         retry_manual_tests=True,
         warn_on_test_retry=False,
-        timeout=3600)
+        timeout=5400)
 
 parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsWindowManagerDeviceTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsWindowManagerDeviceTestCases
index a9bc33f..24563d7 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsWindowManagerDeviceTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsWindowManagerDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWindowManagerDeviceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsWrapNoWrapTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsWrapNoWrapTestCases
index 50b5e0f..1aacf58 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsWrapNoWrapTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsWrapNoWrapTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWrapNoWrapTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsWrapWrapDebugMallocDebugTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsWrapWrapDebugMallocDebugTestCases
index 720b9a0..23327f4 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsWrapWrapDebugMallocDebugTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsWrapWrapDebugMallocDebugTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWrapWrapDebugMallocDebugTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsWrapWrapDebugTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsWrapWrapDebugTestCases
index dc456b4..5aac5cd 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsWrapWrapDebugTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsWrapWrapDebugTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWrapWrapDebugTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.CtsWrapWrapNoDebugTestCases b/server/site_tests/cheets_CTS_P/control.x86.CtsWrapWrapNoDebugTestCases
index 765f345..d9f1941 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.CtsWrapWrapNoDebugTestCases
+++ b/server/site_tests/cheets_CTS_P/control.x86.CtsWrapWrapNoDebugTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWrapWrapNoDebugTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.cts-system-all.api b/server/site_tests/cheets_CTS_P/control.x86.cts-system-all.api
index 9318094..bbb9dc2 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.cts-system-all.api
+++ b/server/site_tests/cheets_CTS_P/control.x86.cts-system-all.api
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module cts-system-all.api of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.signed-CtsSecureElementAccessControlTestCases1 b/server/site_tests/cheets_CTS_P/control.x86.signed-CtsSecureElementAccessControlTestCases1
index e14d79b..cec324b 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.signed-CtsSecureElementAccessControlTestCases1
+++ b/server/site_tests/cheets_CTS_P/control.x86.signed-CtsSecureElementAccessControlTestCases1
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module signed-CtsSecureElementAccessControlTestCases1 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.signed-CtsSecureElementAccessControlTestCases2 b/server/site_tests/cheets_CTS_P/control.x86.signed-CtsSecureElementAccessControlTestCases2
index 338fe22..c459bc1 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.signed-CtsSecureElementAccessControlTestCases2
+++ b/server/site_tests/cheets_CTS_P/control.x86.signed-CtsSecureElementAccessControlTestCases2
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module signed-CtsSecureElementAccessControlTestCases2 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.signed-CtsSecureElementAccessControlTestCases3 b/server/site_tests/cheets_CTS_P/control.x86.signed-CtsSecureElementAccessControlTestCases3
index 9f5fad7..a7ce595 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.signed-CtsSecureElementAccessControlTestCases3
+++ b/server/site_tests/cheets_CTS_P/control.x86.signed-CtsSecureElementAccessControlTestCases3
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module signed-CtsSecureElementAccessControlTestCases3 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.tradefed-run-collect-tests-only b/server/site_tests/cheets_CTS_P/control.x86.tradefed-run-collect-tests-only
index 7e773ee..9bffc1c 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.tradefed-run-collect-tests-only
+++ b/server/site_tests/cheets_CTS_P/control.x86.tradefed-run-collect-tests-only
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
 PRIORITY = 70
 DOC = 'Run all of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_P/control.x86.tradefed-run-collect-tests-only-hardware b/server/site_tests/cheets_CTS_P/control.x86.tradefed-run-collect-tests-only-hardware
new file mode 100644
index 0000000..13fb7f3
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/control.x86.tradefed-run-collect-tests-only-hardware
@@ -0,0 +1,35 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_P.x86.tradefed-run-collect-tests-only-hardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module tradefed-run-collect-tests-only-hardware of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_P',
+        hosts=host_list,
+        iterations=1,
+        tag='x86.tradefed-run-collect-tests-only-hardware',
+        test_name='cheets_CTS_P.x86.tradefed-run-collect-tests-only-hardware',
+        run_template=['run', 'commandAndExit', 'collect-tests-only', '--disable-reboot', '--subplan', 'cts-hardware', '--module-arg', 'CtsMediaTestCases:skip-media-download:true', '--module-arg', 'CtsMediaStressTestCases:skip-media-download:true', '--module-arg', 'CtsMediaBitstreamsTestCases:skip-media-download:true'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='tradefed-run-collect-tests-only-hardware',
+        target_plan='cts-hardware',
+        bundle='x86',
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.x86.vm-tests-tf b/server/site_tests/cheets_CTS_P/control.x86.vm-tests-tf
index a7cea7e..e6f4ac3 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.vm-tests-tf
+++ b/server/site_tests/cheets_CTS_P/control.x86.vm-tests-tf
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module vm-tests-tf of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_P/control.x86.waivers b/server/site_tests/cheets_CTS_P/control.x86.waivers
index c9e9056..a2eec32 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.waivers
+++ b/server/site_tests/cheets_CTS_P/control.x86.waivers
@@ -1,4 +1,4 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -6,12 +6,13 @@
 
 AUTHOR = 'ARC++ Team'
 NAME = 'cheets_CTS_P.x86.waivers'
-ATTRIBUTES = 'suite:cts_P, suite:cts'
+ATTRIBUTES = 'suite:cts_P, suite:cts, suite:cts-hardware'
 DEPENDENCIES = 'arc, cts_abi_x86'
 JOB_RETRIES = 1
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run waived tests of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
@@ -30,7 +31,7 @@
         bundle='x86',
         retry_manual_tests=True,
         warn_on_test_retry=False,
-        uri='gs://chromeos-partner-gts/android-cts-6970114-linux_x86-x86.zip',
+        uri='DEV_MOBLAB',
         timeout=7200)
 
 parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/control.x86.waivers-collect-tests-only b/server/site_tests/cheets_CTS_P/control.x86.waivers-collect-tests-only
index dac738f..b19e929 100644
--- a/server/site_tests/cheets_CTS_P/control.x86.waivers-collect-tests-only
+++ b/server/site_tests/cheets_CTS_P/control.x86.waivers-collect-tests-only
@@ -1,4 +1,4 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -6,12 +6,13 @@
 
 AUTHOR = 'ARC++ Team'
 NAME = 'cheets_CTS_P.x86.waivers-collect-tests-only'
-ATTRIBUTES = 'suite:cts_P, suite:cts'
+ATTRIBUTES = 'suite:cts_P, suite:cts, suite:cts-hardware'
 DEPENDENCIES = 'arc, cts_abi_x86'
 JOB_RETRIES = 1
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
 PRIORITY = 70
 DOC = 'Run waived tests of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
@@ -32,7 +33,7 @@
         bundle='x86',
         retry_manual_tests=True,
         warn_on_test_retry=False,
-        uri='gs://chromeos-partner-gts/android-cts-6970114-linux_x86-x86.zip',
+        uri='DEV_MOBLAB',
         timeout=360)
 
 parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_P/generate_controlfiles.py b/server/site_tests/cheets_CTS_P/generate_controlfiles.py
index d26aeac..8c34988 100755
--- a/server/site_tests/cheets_CTS_P/generate_controlfiles.py
+++ b/server/site_tests/cheets_CTS_P/generate_controlfiles.py
@@ -1,4 +1,5 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
+# Lint as: python2, python3
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/cheets_CTS_P/manual_tests/camera_illumination.yaml b/server/site_tests/cheets_CTS_P/manual_tests/camera_illumination.yaml
index bde511c..e564a26 100644
--- a/server/site_tests/cheets_CTS_P/manual_tests/camera_illumination.yaml
+++ b/server/site_tests/cheets_CTS_P/manual_tests/camera_illumination.yaml
@@ -1,9 +1,12 @@
 android.hardware.camera2.cts.AllocationTest#testAllocationFromCameraFlexibleYuv: [reef]
+android.hardware.camera2.cts.CaptureRequestTest#testAntiBandingModes: [strongbad, strongbad-kernelnext]
 android.hardware.camera2.cts.CaptureRequestTest#testEdgeModeControl: [nautilus]
 android.hardware.camera2.cts.CaptureRequestTest#testNoiseReductionModeControl: [nautilus]
+android.hardware.camera2.cts.ImageReaderTest#testFlexibleYuv: [strongbad]
+android.hardware.camera2.cts.ImageReaderTest#testRepeatingJpeg: [strongbad]
 android.hardware.camera2.cts.ImageReaderTest#testYuvAndJpeg: [coral]
 android.hardware.camera2.cts.RobustnessTest#testMandatoryOutputCombinations: [coral]
-android.hardware.camera2.cts.StillCaptureTest#testAeCompensation: [kukui]
+android.hardware.camera2.cts.StillCaptureTest#testAeCompensation: [kukui, strongbad, strongbad-kernelnext]
 android.hardware.camera2.cts.StillCaptureTest#testAePrecaptureTriggerCancelJpegCapture: [scarlet]
 android.hardware.camera2.cts.StillCaptureTest#testAllocateBitmap: [scarlet]
 android.hardware.camera2.cts.StillCaptureTest#testJpegExif: [coral]
diff --git a/server/site_tests/cheets_CTS_P/notest_modules/notest_modules.yaml b/server/site_tests/cheets_CTS_P/notest_modules/notest_modules.yaml
index 5cb1ded..2090340 100644
--- a/server/site_tests/cheets_CTS_P/notest_modules/notest_modules.yaml
+++ b/server/site_tests/cheets_CTS_P/notest_modules/notest_modules.yaml
@@ -1,8 +1,10 @@
-CtsJvmtiAttachingTestCases: [nativebridge]
-CtsInlineMockingTestCases: [nativebridge]
-CtsHiddenApiKillswitchDebugClassTestCases: [nativebridge]
-CtsHiddenApiKillswitchWhitelistTestCases: [nativebridge]
-CtsHiddenApiKillswitchWildcardTestCases: [nativebridge]
+CtsJvmtiAttachingTestCases: [binarytranslated]
+CtsInlineMockingTestCases: [binarytranslated]
+CtsHiddenApiKillswitchDebugClassTestCases: [binarytranslated]
+CtsHiddenApiKillswitchWhitelistTestCases: [binarytranslated]
+CtsHiddenApiKillswitchWildcardTestCases: [binarytranslated]
 CtsLibcoreJavaUtilCollectionsTestCases: [all]
-CtsSliceTestCases: [nativebridge]
-CtsSystemApiAnnotationTestCases: [nativebridge]
+CtsSecurityBulletinHostTestCases: [all]
+CtsSliceTestCases: [binarytranslated]
+CtsSyncContentHostTestCases: [all]
+CtsSystemApiAnnotationTestCases: [binarytranslated]
diff --git a/server/site_tests/cheets_CTS_P/subplans/cts-hardware.xml b/server/site_tests/cheets_CTS_P/subplans/cts-hardware.xml
new file mode 100644
index 0000000..1cbb34d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_P/subplans/cts-hardware.xml
@@ -0,0 +1,17 @@
+<?xml version='1.0' encoding='UTF-8' standalone='no' ?>
+<SubPlan version="2.0">
+  <Entry include="CtsAppTestCases android.app.cts.SystemFeaturesTest" />
+  <Entry include="CtsCameraTestCases" />
+  <Entry include="CtsDeqpTestCases dEQP-GLES3.functional.prerequisite#*" />
+  <Entry include="CtsMediaStressTestCases android.mediastress.cts.MediaRecorderStressTest" />
+  <Entry include="CtsMediaTestCases" />
+  <Entry include="CtsNativeMediaAAudioTestCases" />
+  <Entry include="CtsNetTestCases" />
+  <Entry include="CtsPerfettoTestCases" />
+  <Entry include="CtsPermissionTestCases android.permission.cts.CameraPermissionTest" />
+  <Entry include="CtsPermissionTestCases android.permission.cts.Camera2PermissionTest" />
+  <Entry include="CtsSensorTestCases" />
+  <Entry include="CtsSustainedPerformanceHostTestCases" />
+  <Entry include="CtsUsageStatsTestCases" />
+  <Entry include="CtsViewTestCases" />
+</SubPlan>
diff --git a/server/site_tests/cheets_CTS_P/subplans/waivers.xml b/server/site_tests/cheets_CTS_P/subplans/waivers.xml
index 28a037f..38fdc8f 100644
--- a/server/site_tests/cheets_CTS_P/subplans/waivers.xml
+++ b/server/site_tests/cheets_CTS_P/subplans/waivers.xml
@@ -6,7 +6,6 @@
     is incremented. Otherwise the computation for the canonical list
     of test cases is confused. See b/151779432.
   -->
-  <Entry include="CtsCameraTestCases android.hardware.camera2.cts.CaptureRequestTest#testFlashControl" />
-  <Entry include="CtsGraphicsTestCases android.graphics.cts.BitmapTest#testHardwareBitmapNotLeaking" />
-  <Entry include="CtsGraphicsTestCases android.graphics.cts.BitmapTest#testDrawingHardwareBitmapNotLeaking" />
+  <Entry include="CtsBionicTestCases unistd#exec_argv0_null" />
+  <Entry include="CtsBionicTestCases unistd_nofortify#exec_argv0_null" />
 </SubPlan>
diff --git a/server/site_tests/cheets_CTS_R/cheets_CTS_R.py b/server/site_tests/cheets_CTS_R/cheets_CTS_R.py
index 6a39403..7370f81 100644
--- a/server/site_tests/cheets_CTS_R/cheets_CTS_R.py
+++ b/server/site_tests/cheets_CTS_R/cheets_CTS_R.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -16,6 +17,10 @@
 import os
 
 from autotest_lib.server import utils
+from autotest_lib.client.common_lib import error
+from autotest_lib.server import hosts
+from autotest_lib.server import utils
+from autotest_lib.server.cros import camerabox_utils
 from autotest_lib.server.cros.tradefed import tradefed_test
 
 # Maximum default time allowed for each individual CTS module.
@@ -23,25 +28,34 @@
 
 # Public download locations for android cts bundles.
 _PUBLIC_CTS = 'https://dl.google.com/dl/android/cts/'
-_CTS_URI = {
-    'arm': _PUBLIC_CTS + 'android-cts-11_r2-linux_x86-arm.zip',
-    'x86': _PUBLIC_CTS + 'android-cts-11_r2-linux_x86-x86.zip',
+_INTERNAL_CTS = 'gs://chromeos-arc-images/cts/bundle/R/'
+_PARTNER_CTS = 'gs://chromeos-partner-gts/R/'
+_OFFICIAL_ZIP_NAME = 'android-cts-11_r8-linux_x86-%s.zip'
+_PREVIEW_ZIP_NAME = 'android-cts-8654967-linux_x86-%s.zip'
+_BUNDLE_MAP = {
+        (None, 'arm'): _PUBLIC_CTS + _OFFICIAL_ZIP_NAME % 'arm',
+        (None, 'x86'): _PUBLIC_CTS + _OFFICIAL_ZIP_NAME % 'x86',
+        ('DEV_MOBLAB', 'arm'): _PARTNER_CTS + _PREVIEW_ZIP_NAME % 'arm',
+        ('DEV_MOBLAB', 'x86'): _PARTNER_CTS + _PREVIEW_ZIP_NAME % 'x86',
+        ('LATEST', 'arm'): _INTERNAL_CTS + _OFFICIAL_ZIP_NAME % 'arm',
+        ('LATEST', 'x86'): _INTERNAL_CTS + _OFFICIAL_ZIP_NAME % 'x86',
+        ('DEV', 'arm'): _INTERNAL_CTS + _PREVIEW_ZIP_NAME % 'arm',
+        ('DEV', 'x86'): _INTERNAL_CTS + _PREVIEW_ZIP_NAME % 'x86',
+        ('DEV_WAIVER', 'arm'): _INTERNAL_CTS + _PREVIEW_ZIP_NAME % 'arm',
+        ('DEV_WAIVER', 'x86'): _INTERNAL_CTS + _PREVIEW_ZIP_NAME % 'x86',
 }
 _CTS_MEDIA_URI = _PUBLIC_CTS + 'android-cts-media-1.5.zip'
 _CTS_MEDIA_LOCALPATH = '/tmp/android-cts-media'
 
-# Internal uprev for all CTS modules.
-_INTERNAL_CTS = 'gs://chromeos-arc-images/cts/bundle/R/'
-_CTS_LATEST_URI = {
-        'arm': _INTERNAL_CTS + 'android-cts-7050651-linux_x86-arm.zip',
-        'x86': _INTERNAL_CTS + 'android-cts-7050651-linux_x86-x86.zip',
-}
-
 
 class cheets_CTS_R(tradefed_test.TradefedTest):
     """Sets up tradefed to run CTS tests."""
     version = 1
 
+    _SCENE_URI = (
+            'https://storage.googleapis.com/chromiumos-test-assets-public'
+            '/camerabox/cts_portrait_scene.jpg')
+
     def _tradefed_retry_command(self, template, session_id):
         """Build tradefed 'retry' command from template."""
         cmd = []
@@ -58,11 +72,11 @@
             cmd.append('--log-level-display=DEBUG')
         return cmd
 
-    def _get_default_bundle_url(self, bundle):
-        return _CTS_URI[bundle]
-
-    def _get_latest_bundle_url(self, bundle):
-        return _CTS_LATEST_URI[bundle]
+    def _get_bundle_url(self, uri, bundle):
+        if uri and (uri.startswith('http') or uri.startswith('gs')):
+            return uri
+        else:
+            return _BUNDLE_MAP[(uri, bundle)]
 
     def _get_tradefed_base_dir(self):
         return 'android-cts'
@@ -70,6 +84,67 @@
     def _tradefed_cmd_path(self):
         return os.path.join(self._repository, 'tools', 'cts-tradefed')
 
+    def initialize_camerabox(self, camera_facing, cmdline_args):
+        """Configure DUT and chart running in camerabox environment.
+
+        @param camera_facing: the facing of the DUT used in testing
+                              (e.g. 'front', 'back').
+        """
+        chart_address = camerabox_utils.get_chart_address(
+            [h.hostname for h in self._hosts], cmdline_args)
+        if chart_address is None:
+            raise error.TestFail(
+                'Error: missing option --args="chart=<CHART IP>"')
+        chart_hosts = [hosts.create_host(ip) for ip in chart_address]
+
+        self.chart_fixtures = [
+            camerabox_utils.ChartFixture(h, self._SCENE_URI)
+            for h in chart_hosts
+        ]
+        self.dut_fixtures = [
+            camerabox_utils.DUTFixture(self, h, camera_facing)
+            for h in self._hosts
+        ]
+
+        for chart in self.chart_fixtures:
+            chart.initialize()
+
+        for dut in self.dut_fixtures:
+            dut.log_camera_scene()
+            dut.initialize()
+
+        for host in self._hosts:
+            host.run('cras_test_client --mute 1')
+
+    def initialize(self,
+                   camera_facing=None,
+                   bundle=None,
+                   uri=None,
+                   host=None,
+                   hosts=None,
+                   max_retry=None,
+                   load_waivers=True,
+                   retry_manual_tests=False,
+                   warn_on_test_retry=True,
+                   cmdline_args=None,
+                   hard_reboot_on_failure=False,
+                   use_jdk9=False,
+                   use_old_adb=False):
+        super(cheets_CTS_R,
+              self).initialize(bundle=bundle,
+                               uri=uri,
+                               host=host,
+                               hosts=hosts,
+                               max_retry=max_retry,
+                               load_waivers=load_waivers,
+                               retry_manual_tests=retry_manual_tests,
+                               warn_on_test_retry=warn_on_test_retry,
+                               hard_reboot_on_failure=hard_reboot_on_failure,
+                               use_jdk9=use_jdk9,
+                               use_old_adb=use_old_adb)
+        if camera_facing:
+            self.initialize_camerabox(camera_facing, cmdline_args)
+
     def run_once(self,
                  test_name,
                  run_template,
@@ -77,6 +152,7 @@
                  target_module=None,
                  target_plan=None,
                  needs_push_media=False,
+                 use_helpers=False,
                  enable_default_apps=False,
                  executable_test_count=None,
                  bundle=None,
@@ -98,6 +174,7 @@
         @param target_module: the name of test module to run.
         @param target_plan: the name of the test plan to run.
         @param needs_push_media: need to push test media streams.
+        @param use_helpers: copy interaction helpers from the DUT.
         @param executable_test_count: the known number of tests in the run
         @param bundle: the type of the CTS bundle: 'arm' or 'x86'
         @param precondition_commands: a list of scripts to be run on the
@@ -116,9 +193,26 @@
                 media_asset=tradefed_test.MediaAsset(
                         _CTS_MEDIA_URI if needs_push_media else None,
                         _CTS_MEDIA_LOCALPATH),
+                use_helpers=use_helpers,
                 enable_default_apps=enable_default_apps,
                 executable_test_count=executable_test_count,
                 bundle=bundle,
-                cts_uri=_CTS_URI,
                 login_precondition_commands=login_precondition_commands,
                 precondition_commands=precondition_commands)
+
+    def cleanup_camerabox(self):
+        """Cleanup configuration on DUT and chart tablet for running in
+
+        camerabox environment.
+        """
+        for dut in self.dut_fixtures:
+            dut.cleanup()
+
+        for chart in self.chart_fixtures:
+            chart.cleanup()
+
+    def cleanup(self):
+        if hasattr(self, 'dut_fixtures'):
+            self.cleanup_camerabox()
+
+        super(cheets_CTS_R, self).cleanup()
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAbiOverrideHost b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAbiOverrideHost
deleted file mode 100644
index f9452c3..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAbiOverrideHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsAbiOverrideHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAbiOverrideHostTestCases, CtsAbiOverrideHostTestCases[instant], CtsAbiOverrideHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsAbiOverrideHost',
-        test_name='cheets_CTS_R.11_r3.arm.CtsAbiOverrideHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAbiOverrideHostTestCases', '--include-filter', 'CtsAbiOverrideHostTestCases[instant]', '--include-filter', 'CtsAbiOverrideHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAbiOverrideHost',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAcceleration b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAcceleration
deleted file mode 100644
index 73dfa13..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAcceleration
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsAcceleration'
-ATTRIBUTES = 'suite:arc-cts-r, suite:bvt-perbuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAccelerationTestCases, CtsAccelerationTestCases[instant], CtsAccelerationTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=3,
-        tag='11_r3.arm.CtsAcceleration',
-        test_name='cheets_CTS_R.11_r3.arm.CtsAcceleration',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAccelerationTestCases', '--include-filter', 'CtsAccelerationTestCases[instant]', '--include-filter', 'CtsAccelerationTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAcceleration',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAccessibility b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAccessibility
deleted file mode 100644
index cf56278..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAccessibility
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsAccessibility'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAccessibilityServiceSdk29TestCases, CtsAccessibilityServiceSdk29TestCases[instant], CtsAccessibilityServiceSdk29TestCases[secondary_user], CtsAccessibilityServiceTestCases, CtsAccessibilityServiceTestCases[instant], CtsAccessibilityTestCases, CtsAccessibilityTestCases[instant], CtsAccessibilityTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsAccessibility',
-        test_name='cheets_CTS_R.11_r3.arm.CtsAccessibility',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAccessibilityServiceSdk29TestCases', '--include-filter', 'CtsAccessibilityServiceSdk29TestCases[instant]', '--include-filter', 'CtsAccessibilityServiceSdk29TestCases[secondary_user]', '--include-filter', 'CtsAccessibilityServiceTestCases', '--include-filter', 'CtsAccessibilityServiceTestCases[instant]', '--include-filter', 'CtsAccessibilityTestCases', '--include-filter', 'CtsAccessibilityTestCases[instant]', '--include-filter', 'CtsAccessibilityTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAccessibility',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=16200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAccountManager b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAccountManager
deleted file mode 100644
index d0357ff..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAccountManager
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsAccountManager'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAccountManagerTestCases, CtsAccountManagerTestCases[instant], CtsAccountManagerTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsAccountManager',
-        test_name='cheets_CTS_R.11_r3.arm.CtsAccountManager',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAccountManagerTestCases', '--include-filter', 'CtsAccountManagerTestCases[instant]', '--include-filter', 'CtsAccountManagerTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAccountManager',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAccountsHost b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAccountsHost
deleted file mode 100644
index f1ed03b..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAccountsHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsAccountsHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAccountsHostTestCases, CtsAccountsHostTestCases[instant], CtsAccountsHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsAccountsHost',
-        test_name='cheets_CTS_R.11_r3.arm.CtsAccountsHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAccountsHostTestCases', '--include-filter', 'CtsAccountsHostTestCases[instant]', '--include-filter', 'CtsAccountsHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAccountsHost',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsActivityManagerBackgroundActivity b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsActivityManagerBackgroundActivity
deleted file mode 100644
index 567fe50..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsActivityManagerBackgroundActivity
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsActivityManagerBackgroundActivity'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsActivityManagerBackgroundActivityTestCases, CtsActivityManagerBackgroundActivityTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsActivityManagerBackgroundActivity',
-        test_name='cheets_CTS_R.11_r3.arm.CtsActivityManagerBackgroundActivity',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsActivityManagerBackgroundActivityTestCases', '--include-filter', 'CtsActivityManagerBackgroundActivityTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsActivityManagerBackgroundActivity',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAdb b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAdb
deleted file mode 100644
index b8182e0..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAdb
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsAdb'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAdbHostTestCases, CtsAdbHostTestCases[secondary_user], CtsAdbManagerHostTestCases, CtsAdbManagerHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsAdb',
-        test_name='cheets_CTS_R.11_r3.arm.CtsAdb',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAdbHostTestCases', '--include-filter', 'CtsAdbHostTestCases[secondary_user]', '--include-filter', 'CtsAdbManagerHostTestCases', '--include-filter', 'CtsAdbManagerHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAdb',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAdmin b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAdmin
deleted file mode 100644
index 4120f29..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAdmin
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsAdmin'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAdminPackageInstallerTestCases, CtsAdminTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsAdmin',
-        test_name='cheets_CTS_R.11_r3.arm.CtsAdmin',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAdminPackageInstallerTestCases', '--include-filter', 'CtsAdminTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAdmin',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAlarmManager b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAlarmManager
deleted file mode 100644
index 79c9759..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAlarmManager
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsAlarmManager'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAlarmManagerTestCases, CtsAlarmManagerTestCases[instant], CtsAlarmManagerTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsAlarmManager',
-        test_name='cheets_CTS_R.11_r3.arm.CtsAlarmManager',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAlarmManagerTestCases', '--include-filter', 'CtsAlarmManagerTestCases[instant]', '--include-filter', 'CtsAlarmManagerTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAlarmManager',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAndroid b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAndroid
deleted file mode 100644
index 9217fdc..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAndroid
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsAndroid'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAndroidAppTestCases, CtsAndroidAppTestCases[instant], CtsAndroidAppTestCases[secondary_user], CtsAndroidTestBase28ApiSignatureTestCases, CtsAndroidTestBase28ApiSignatureTestCases[instant], CtsAndroidTestBase28ApiSignatureTestCases[secondary_user], CtsAndroidTestBaseCurrentApiSignatureTestCases, CtsAndroidTestBaseCurrentApiSignatureTestCases[instant], CtsAndroidTestBaseCurrentApiSignatureTestCases[secondary_user], CtsAndroidTestMockCurrentApiSignatureTestCases, CtsAndroidTestMockCurrentApiSignatureTestCases[instant], CtsAndroidTestMockCurrentApiSignatureTestCases[secondary_user], CtsAndroidTestRunnerCurrentApiSignatureTestCases, CtsAndroidTestRunnerCurrentApiSignatureTestCases[instant], CtsAndroidTestRunnerCurrentApiSignatureTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsAndroid',
-        test_name='cheets_CTS_R.11_r3.arm.CtsAndroid',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAndroidAppTestCases', '--include-filter', 'CtsAndroidAppTestCases[instant]', '--include-filter', 'CtsAndroidAppTestCases[secondary_user]', '--include-filter', 'CtsAndroidTestBase28ApiSignatureTestCases', '--include-filter', 'CtsAndroidTestBase28ApiSignatureTestCases[instant]', '--include-filter', 'CtsAndroidTestBase28ApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsAndroidTestBaseCurrentApiSignatureTestCases', '--include-filter', 'CtsAndroidTestBaseCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsAndroidTestBaseCurrentApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsAndroidTestMockCurrentApiSignatureTestCases', '--include-filter', 'CtsAndroidTestMockCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsAndroidTestMockCurrentApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsAndroidTestRunnerCurrentApiSignatureTestCases', '--include-filter', 'CtsAndroidTestRunnerCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsAndroidTestRunnerCurrentApiSignatureTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAndroid',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=28800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAngleIntegrationHost b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAngleIntegrationHost
deleted file mode 100644
index 45ca2ac..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAngleIntegrationHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsAngleIntegrationHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAngleIntegrationHostTestCases, CtsAngleIntegrationHostTestCases[instant], CtsAngleIntegrationHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsAngleIntegrationHost',
-        test_name='cheets_CTS_R.11_r3.arm.CtsAngleIntegrationHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAngleIntegrationHostTestCases', '--include-filter', 'CtsAngleIntegrationHostTestCases[instant]', '--include-filter', 'CtsAngleIntegrationHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAngleIntegrationHost',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAnimation b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAnimation
deleted file mode 100644
index ca01acc..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAnimation
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsAnimation'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAnimationTestCases, CtsAnimationTestCases[instant], CtsAnimationTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsAnimation',
-        test_name='cheets_CTS_R.11_r3.arm.CtsAnimation',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAnimationTestCases', '--include-filter', 'CtsAnimationTestCases[instant]', '--include-filter', 'CtsAnimationTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAnimation',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsApacheHttpLegacy b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsApacheHttpLegacy
deleted file mode 100644
index ada33079..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsApacheHttpLegacy
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsApacheHttpLegacy'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsApacheHttpLegacy27ApiSignatureTestCases, CtsApacheHttpLegacy27ApiSignatureTestCases[instant], CtsApacheHttpLegacy27ApiSignatureTestCases[secondary_user], CtsApacheHttpLegacyCurrentApiSignatureTestCases, CtsApacheHttpLegacyCurrentApiSignatureTestCases[instant], CtsApacheHttpLegacyCurrentApiSignatureTestCases[secondary_user], CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases, CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases[instant], CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsApacheHttpLegacy',
-        test_name='cheets_CTS_R.11_r3.arm.CtsApacheHttpLegacy',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsApacheHttpLegacy27ApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacy27ApiSignatureTestCases[instant]', '--include-filter', 'CtsApacheHttpLegacy27ApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsApacheHttpLegacyCurrentApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacyCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsApacheHttpLegacyCurrentApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases[instant]', '--include-filter', 'CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsApacheHttpLegacy',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=18000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsApex b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsApex
deleted file mode 100644
index 001cddc..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsApex
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsApex'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsApexTestCases, CtsApexTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsApex',
-        test_name='cheets_CTS_R.11_r3.arm.CtsApex',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsApexTestCases', '--include-filter', 'CtsApexTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsApex',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsApp b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsApp
deleted file mode 100644
index b4a52ee..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsApp
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsApp'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAppBindingHostTestCases, CtsAppBindingHostTestCases[secondary_user], CtsAppCompatHostTestCases, CtsAppCompatHostTestCases[instant], CtsAppCompatHostTestCases[secondary_user], CtsAppComponentFactoryTestCases, CtsAppComponentFactoryTestCases[instant], CtsAppComponentFactoryTestCases[secondary_user], CtsAppEnumerationTestCases, CtsAppEnumerationTestCases[secondary_user], CtsAppExitTestCases, CtsAppExitTestCases[instant], CtsAppExitTestCases[secondary_user], CtsAppIntegrityDeviceTestCases, CtsAppOpsTestCases, CtsAppOpsTestCases[instant], CtsAppOpsTestCases[secondary_user], CtsAppPredictionServiceTestCases, CtsAppPredictionServiceTestCases[secondary_user], CtsAppSecurityHostTestCases, CtsAppSecurityHostTestCases[secondary_user], CtsAppTestCases, CtsAppTestCases[instant], CtsAppTestCases[secondary_user], CtsAppUsageHostTestCases, CtsAppUsageHostTestCases[instant], CtsAppUsageHostTestCases[secondary_user], CtsAppWidgetTestCases, CtsAppWidgetTestCases[instant], CtsAppWidgetTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        enable_default_apps=True,
-        tag='11_r3.arm.CtsApp',
-        test_name='cheets_CTS_R.11_r3.arm.CtsApp',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAppBindingHostTestCases', '--include-filter', 'CtsAppBindingHostTestCases[secondary_user]', '--include-filter', 'CtsAppCompatHostTestCases', '--include-filter', 'CtsAppCompatHostTestCases[instant]', '--include-filter', 'CtsAppCompatHostTestCases[secondary_user]', '--include-filter', 'CtsAppComponentFactoryTestCases', '--include-filter', 'CtsAppComponentFactoryTestCases[instant]', '--include-filter', 'CtsAppComponentFactoryTestCases[secondary_user]', '--include-filter', 'CtsAppEnumerationTestCases', '--include-filter', 'CtsAppEnumerationTestCases[secondary_user]', '--include-filter', 'CtsAppExitTestCases', '--include-filter', 'CtsAppExitTestCases[instant]', '--include-filter', 'CtsAppExitTestCases[secondary_user]', '--include-filter', 'CtsAppIntegrityDeviceTestCases', '--include-filter', 'CtsAppOpsTestCases', '--include-filter', 'CtsAppOpsTestCases[instant]', '--include-filter', 'CtsAppOpsTestCases[secondary_user]', '--include-filter', 'CtsAppPredictionServiceTestCases', '--include-filter', 'CtsAppPredictionServiceTestCases[secondary_user]', '--include-filter', 'CtsAppSecurityHostTestCases', '--include-filter', 'CtsAppSecurityHostTestCases[secondary_user]', '--include-filter', 'CtsAppTestCases', '--include-filter', 'CtsAppTestCases[instant]', '--include-filter', 'CtsAppTestCases[secondary_user]', '--include-filter', 'CtsAppUsageHostTestCases', '--include-filter', 'CtsAppUsageHostTestCases[instant]', '--include-filter', 'CtsAppUsageHostTestCases[secondary_user]', '--include-filter', 'CtsAppWidgetTestCases', '--include-filter', 'CtsAppWidgetTestCases[instant]', '--include-filter', 'CtsAppWidgetTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsApp',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=55800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAslrMalloc b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAslrMalloc
deleted file mode 100644
index 2d9d3b6..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAslrMalloc
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsAslrMalloc'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAslrMallocTestCases, CtsAslrMallocTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsAslrMalloc',
-        test_name='cheets_CTS_R.11_r3.arm.CtsAslrMalloc',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAslrMallocTestCases', '--include-filter', 'CtsAslrMallocTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAslrMalloc',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAssist b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAssist
deleted file mode 100644
index 3af23b8..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAssist
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsAssist'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAssistTestCases, CtsAssistTestCases[instant], CtsAssistTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsAssist',
-        test_name='cheets_CTS_R.11_r3.arm.CtsAssist',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAssistTestCases', '--include-filter', 'CtsAssistTestCases[instant]', '--include-filter', 'CtsAssistTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAssist',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAtomicInstall b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAtomicInstall
deleted file mode 100644
index f82f68b..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAtomicInstall
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsAtomicInstall'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAtomicInstallTestCases, CtsAtomicInstallTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsAtomicInstall',
-        test_name='cheets_CTS_R.11_r3.arm.CtsAtomicInstall',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAtomicInstallTestCases', '--include-filter', 'CtsAtomicInstallTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAtomicInstall',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAtraceHost b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAtraceHost
deleted file mode 100644
index 1f12509..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAtraceHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsAtraceHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAtraceHostTestCases, CtsAtraceHostTestCases[instant], CtsAtraceHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsAtraceHost',
-        test_name='cheets_CTS_R.11_r3.arm.CtsAtraceHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAtraceHostTestCases', '--include-filter', 'CtsAtraceHostTestCases[instant]', '--include-filter', 'CtsAtraceHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAtraceHost',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAttentionServiceDevice b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAttentionServiceDevice
deleted file mode 100644
index 0b50fcd..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAttentionServiceDevice
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsAttentionServiceDevice'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAttentionServiceDeviceTestCases, CtsAttentionServiceDeviceTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsAttentionServiceDevice',
-        test_name='cheets_CTS_R.11_r3.arm.CtsAttentionServiceDevice',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAttentionServiceDeviceTestCases', '--include-filter', 'CtsAttentionServiceDeviceTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAttentionServiceDevice',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAutoFillService b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAutoFillService
deleted file mode 100644
index f9a9e6c..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsAutoFillService
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsAutoFillService'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAutoFillServiceTestCases, CtsAutoFillServiceTestCases[instant], CtsAutoFillServiceTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsAutoFillService',
-        test_name='cheets_CTS_R.11_r3.arm.CtsAutoFillService',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAutoFillServiceTestCases', '--include-filter', 'CtsAutoFillServiceTestCases[instant]', '--include-filter', 'CtsAutoFillServiceTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAutoFillService',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=14400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsBackgroundRestrictions b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsBackgroundRestrictions
deleted file mode 100644
index dffca18..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsBackgroundRestrictions
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsBackgroundRestrictions'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsBackgroundRestrictionsTestCases, CtsBackgroundRestrictionsTestCases[instant], CtsBackgroundRestrictionsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsBackgroundRestrictions',
-        test_name='cheets_CTS_R.11_r3.arm.CtsBackgroundRestrictions',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBackgroundRestrictionsTestCases', '--include-filter', 'CtsBackgroundRestrictionsTestCases[instant]', '--include-filter', 'CtsBackgroundRestrictionsTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsBackgroundRestrictions',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsBackup b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsBackup
deleted file mode 100644
index 1bd440e..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsBackup
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsBackup'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsBackupHostTestCases, CtsBackupTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsBackup',
-        test_name='cheets_CTS_R.11_r3.arm.CtsBackup',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBackupHostTestCases', '--include-filter', 'CtsBackupTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsBackup',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsBatterySaving b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsBatterySaving
deleted file mode 100644
index a8a9cc0..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsBatterySaving
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsBatterySaving'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsBatterySavingTestCases, CtsBatterySavingTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsBatterySaving',
-        test_name='cheets_CTS_R.11_r3.arm.CtsBatterySaving',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBatterySavingTestCases', '--include-filter', 'CtsBatterySavingTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsBatterySaving',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsBionic b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsBionic
deleted file mode 100644
index 55c377f..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsBionic
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsBionic'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsBionicAppTestCases, CtsBionicAppTestCases[instant], CtsBionicAppTestCases[secondary_user], CtsBionicTestCases, CtsBionicTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsBionic',
-        test_name='cheets_CTS_R.11_r3.arm.CtsBionic',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBionicAppTestCases', '--include-filter', 'CtsBionicAppTestCases[instant]', '--include-filter', 'CtsBionicAppTestCases[secondary_user]', '--include-filter', 'CtsBionicTestCases', '--include-filter', 'CtsBionicTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsBionic',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsBlobStore b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsBlobStore
deleted file mode 100644
index c40cac4..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsBlobStore
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsBlobStore'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsBlobStoreHostTestCases, CtsBlobStoreHostTestCases[secondary_user], CtsBlobStoreHostTestHelper, CtsBlobStoreHostTestHelper[secondary_user], CtsBlobStoreTestCases, CtsBlobStoreTestCases[secondary_user], CtsBlobStoreTestHelper, CtsBlobStoreTestHelperDiffSig, CtsBlobStoreTestHelperDiffSig2, CtsBlobStoreTestHelperDiffSig2[secondary_user], CtsBlobStoreTestHelperDiffSig[secondary_user], CtsBlobStoreTestHelper[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsBlobStore',
-        test_name='cheets_CTS_R.11_r3.arm.CtsBlobStore',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBlobStoreHostTestCases', '--include-filter', 'CtsBlobStoreHostTestCases[secondary_user]', '--include-filter', 'CtsBlobStoreHostTestHelper', '--include-filter', 'CtsBlobStoreHostTestHelper[secondary_user]', '--include-filter', 'CtsBlobStoreTestCases', '--include-filter', 'CtsBlobStoreTestCases[secondary_user]', '--include-filter', 'CtsBlobStoreTestHelper', '--include-filter', 'CtsBlobStoreTestHelperDiffSig', '--include-filter', 'CtsBlobStoreTestHelperDiffSig2', '--include-filter', 'CtsBlobStoreTestHelperDiffSig2[secondary_user]', '--include-filter', 'CtsBlobStoreTestHelperDiffSig[secondary_user]', '--include-filter', 'CtsBlobStoreTestHelper[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsBlobStore',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=23400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsBluetooth b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsBluetooth
deleted file mode 100644
index 8e0d9ed..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsBluetooth
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsBluetooth'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsBluetoothTestCases, CtsBluetoothTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsBluetooth',
-        test_name='cheets_CTS_R.11_r3.arm.CtsBluetooth',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBluetoothTestCases', '--include-filter', 'CtsBluetoothTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsBluetooth',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsBootStats b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsBootStats
deleted file mode 100644
index 50d6fc8..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsBootStats
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsBootStats'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsBootStatsTestCases, CtsBootStatsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsBootStats',
-        test_name='cheets_CTS_R.11_r3.arm.CtsBootStats',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBootStatsTestCases', '--include-filter', 'CtsBootStatsTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsBootStats',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsCalendarProvider b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsCalendarProvider
deleted file mode 100644
index 4a1f7a7..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsCalendarProvider
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsCalendarProvider'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCalendarProviderTestCases, CtsCalendarProviderTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsCalendarProvider',
-        test_name='cheets_CTS_R.11_r3.arm.CtsCalendarProvider',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCalendarProviderTestCases', '--include-filter', 'CtsCalendarProviderTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsCalendarProvider',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsCalendarcommon2 b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsCalendarcommon2
deleted file mode 100644
index 677ff06..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsCalendarcommon2
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsCalendarcommon2'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCalendarcommon2TestCases, CtsCalendarcommon2TestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsCalendarcommon2',
-        test_name='cheets_CTS_R.11_r3.arm.CtsCalendarcommon2',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCalendarcommon2TestCases', '--include-filter', 'CtsCalendarcommon2TestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsCalendarcommon2',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsCamera b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsCamera
deleted file mode 100644
index d18eade..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsCamera
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsCamera'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCameraApi25TestCases, CtsCameraApi25TestCases[secondary_user], CtsCameraTestCases, CtsCameraTestCases[instant], CtsCameraTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsCamera',
-        test_name='cheets_CTS_R.11_r3.arm.CtsCamera',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCameraApi25TestCases', '--include-filter', 'CtsCameraApi25TestCases[secondary_user]', '--include-filter', 'CtsCameraTestCases', '--include-filter', 'CtsCameraTestCases[instant]', '--include-filter', 'CtsCameraTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsCamera',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsCar b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsCar
deleted file mode 100644
index 16c2d20..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsCar
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsCar'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCarHostTestCases, CtsCarTestCases, CtsCarTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsCar',
-        test_name='cheets_CTS_R.11_r3.arm.CtsCar',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCarHostTestCases', '--include-filter', 'CtsCarTestCases', '--include-filter', 'CtsCarTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsCar',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsCarrierApiTestCases b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsCarrierApiTestCases
deleted file mode 100644
index ac6bc29..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsCarrierApiTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsCarrierApiTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCarrierApiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsCarrierApiTestCases',
-        test_name='cheets_CTS_R.11_r3.arm.CtsCarrierApiTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCarrierApiTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsCarrierApiTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsClassLoaderFactory b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsClassLoaderFactory
deleted file mode 100644
index c39f5c9..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsClassLoaderFactory
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsClassLoaderFactory'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases, CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases[instant], CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases[secondary_user], CtsClassLoaderFactoryPathClassLoaderTestCases, CtsClassLoaderFactoryPathClassLoaderTestCases[instant], CtsClassLoaderFactoryPathClassLoaderTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsClassLoaderFactory',
-        test_name='cheets_CTS_R.11_r3.arm.CtsClassLoaderFactory',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases', '--include-filter', 'CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases[instant]', '--include-filter', 'CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases[secondary_user]', '--include-filter', 'CtsClassLoaderFactoryPathClassLoaderTestCases', '--include-filter', 'CtsClassLoaderFactoryPathClassLoaderTestCases[instant]', '--include-filter', 'CtsClassLoaderFactoryPathClassLoaderTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsClassLoaderFactory',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsClassloaderSplitsHost b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsClassloaderSplitsHost
deleted file mode 100644
index bd34d0a..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsClassloaderSplitsHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsClassloaderSplitsHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsClassloaderSplitsHostTestCases, CtsClassloaderSplitsHostTestCases[instant], CtsClassloaderSplitsHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsClassloaderSplitsHost',
-        test_name='cheets_CTS_R.11_r3.arm.CtsClassloaderSplitsHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsClassloaderSplitsHostTestCases', '--include-filter', 'CtsClassloaderSplitsHostTestCases[instant]', '--include-filter', 'CtsClassloaderSplitsHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsClassloaderSplitsHost',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsCodePathHost b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsCodePathHost
deleted file mode 100644
index a792789..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsCodePathHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsCodePathHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCodePathHostTestCases, CtsCodePathHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsCodePathHost',
-        test_name='cheets_CTS_R.11_r3.arm.CtsCodePathHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCodePathHostTestCases', '--include-filter', 'CtsCodePathHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsCodePathHost',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsColorMode b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsColorMode
deleted file mode 100644
index 49702c8..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsColorMode
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsColorMode'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsColorModeTestCases, CtsColorModeTestCases[instant], CtsColorModeTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsColorMode',
-        test_name='cheets_CTS_R.11_r3.arm.CtsColorMode',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsColorModeTestCases', '--include-filter', 'CtsColorModeTestCases[instant]', '--include-filter', 'CtsColorModeTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsColorMode',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsCompilation b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsCompilation
deleted file mode 100644
index 8bc8d2c..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsCompilation
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsCompilation'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCompilationTestCases, CtsCompilationTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsCompilation',
-        test_name='cheets_CTS_R.11_r3.arm.CtsCompilation',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCompilationTestCases', '--include-filter', 'CtsCompilationTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsCompilation',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsContactsProvider b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsContactsProvider
deleted file mode 100644
index 427fb36..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsContactsProvider
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsContactsProvider'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsContactsProviderTestCases, CtsContactsProviderTestCases[secondary_user], CtsContactsProviderWipe, CtsContactsProviderWipe[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsContactsProvider',
-        test_name='cheets_CTS_R.11_r3.arm.CtsContactsProvider',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsContactsProviderTestCases', '--include-filter', 'CtsContactsProviderTestCases[secondary_user]', '--include-filter', 'CtsContactsProviderWipe', '--include-filter', 'CtsContactsProviderWipe[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsContactsProvider',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsContent b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsContent
deleted file mode 100644
index 00f8090..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsContent
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsContent'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsContentCaptureServiceTestCases, CtsContentCaptureServiceTestCases[instant], CtsContentCaptureServiceTestCases[secondary_user], CtsContentSuggestionsTestCases, CtsContentSuggestionsTestCases[secondary_user], CtsContentTestCases, CtsContentTestCases[instant], CtsContentTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        enable_default_apps=True,
-        tag='11_r3.arm.CtsContent',
-        test_name='cheets_CTS_R.11_r3.arm.CtsContent',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsContentCaptureServiceTestCases', '--include-filter', 'CtsContentCaptureServiceTestCases[instant]', '--include-filter', 'CtsContentCaptureServiceTestCases[secondary_user]', '--include-filter', 'CtsContentSuggestionsTestCases', '--include-filter', 'CtsContentSuggestionsTestCases[secondary_user]', '--include-filter', 'CtsContentTestCases', '--include-filter', 'CtsContentTestCases[instant]', '--include-filter', 'CtsContentTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsContent',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=16200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsControlsDevice b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsControlsDevice
deleted file mode 100644
index 2b4de3c..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsControlsDevice
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsControlsDevice'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsControlsDeviceTestCases, CtsControlsDeviceTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsControlsDevice',
-        test_name='cheets_CTS_R.11_r3.arm.CtsControlsDevice',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsControlsDeviceTestCases', '--include-filter', 'CtsControlsDeviceTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsControlsDevice',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsCppTools b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsCppTools
deleted file mode 100644
index bfedf09..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsCppTools
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsCppTools'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCppToolsTestCases, CtsCppToolsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsCppTools',
-        test_name='cheets_CTS_R.11_r3.arm.CtsCppTools',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCppToolsTestCases', '--include-filter', 'CtsCppToolsTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsCppTools',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsCurrentApiSignature b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsCurrentApiSignature
deleted file mode 100644
index 4323817..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsCurrentApiSignature
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsCurrentApiSignature'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCurrentApiSignatureTestCases, CtsCurrentApiSignatureTestCases[instant], CtsCurrentApiSignatureTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsCurrentApiSignature',
-        test_name='cheets_CTS_R.11_r3.arm.CtsCurrentApiSignature',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCurrentApiSignatureTestCases', '--include-filter', 'CtsCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsCurrentApiSignatureTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsCurrentApiSignature',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDatabase b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDatabase
deleted file mode 100644
index a993ef0..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDatabase
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsDatabase'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDatabaseTestCases, CtsDatabaseTestCases[instant], CtsDatabaseTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsDatabase',
-        test_name='cheets_CTS_R.11_r3.arm.CtsDatabase',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDatabaseTestCases', '--include-filter', 'CtsDatabaseTestCases[instant]', '--include-filter', 'CtsDatabaseTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDatabase',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDeqp b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDeqp
deleted file mode 100644
index 80cb4e4..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDeqp
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsDeqp'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 1024000
-DOC = 'Run module CtsDeqpTestCases, CtsDeqpTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsDeqp',
-        test_name='cheets_CTS_R.11_r3.arm.CtsDeqp',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--include-filter', 'CtsDeqpTestCases[secondary_user]'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDeqp',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=111600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDeqpTestCases.dEQP-EGL b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDeqpTestCases.dEQP-EGL
deleted file mode 100644
index d5ae4af..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDeqpTestCases.dEQP-EGL
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsDeqpTestCases.dEQP-EGL'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDeqpTestCases.dEQP-EGL of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsDeqpTestCases.dEQP-EGL',
-        test_name='cheets_CTS_R.11_r3.arm.CtsDeqpTestCases.dEQP-EGL',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-EGL.*'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDeqpTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDeqpTestCases.dEQP-GLES2 b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDeqpTestCases.dEQP-GLES2
deleted file mode 100644
index e625b1a..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDeqpTestCases.dEQP-GLES2
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsDeqpTestCases.dEQP-GLES2'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDeqpTestCases.dEQP-GLES2 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsDeqpTestCases.dEQP-GLES2',
-        test_name='cheets_CTS_R.11_r3.arm.CtsDeqpTestCases.dEQP-GLES2',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-GLES2.*'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDeqpTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDeqpTestCases.dEQP-GLES3 b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDeqpTestCases.dEQP-GLES3
deleted file mode 100644
index d752850..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDeqpTestCases.dEQP-GLES3
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsDeqpTestCases.dEQP-GLES3'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDeqpTestCases.dEQP-GLES3 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsDeqpTestCases.dEQP-GLES3',
-        test_name='cheets_CTS_R.11_r3.arm.CtsDeqpTestCases.dEQP-GLES3',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-GLES3.*'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDeqpTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=21600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDeqpTestCases.dEQP-GLES31 b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDeqpTestCases.dEQP-GLES31
deleted file mode 100644
index 7b7dcaa..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDeqpTestCases.dEQP-GLES31
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsDeqpTestCases.dEQP-GLES31'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDeqpTestCases.dEQP-GLES31 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsDeqpTestCases.dEQP-GLES31',
-        test_name='cheets_CTS_R.11_r3.arm.CtsDeqpTestCases.dEQP-GLES31',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-GLES31.*'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDeqpTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=21600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDeqpTestCases.dEQP-VK b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDeqpTestCases.dEQP-VK
deleted file mode 100644
index 83d7877..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDeqpTestCases.dEQP-VK
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsDeqpTestCases.dEQP-VK'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDeqpTestCases.dEQP-VK of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsDeqpTestCases.dEQP-VK',
-        test_name='cheets_CTS_R.11_r3.arm.CtsDeqpTestCases.dEQP-VK',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-VK.*'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDeqpTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=54000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDevice b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDevice
deleted file mode 100644
index a673aa1..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDevice
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsDevice'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDeviceConfigTestCases, CtsDeviceConfigTestCases[instant], CtsDeviceConfigTestCases[secondary_user], CtsDeviceIdleHostTestCases, CtsDeviceIdleHostTestCases[secondary_user], CtsDevicePolicyManagerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsDevice',
-        test_name='cheets_CTS_R.11_r3.arm.CtsDevice',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeviceConfigTestCases', '--include-filter', 'CtsDeviceConfigTestCases[instant]', '--include-filter', 'CtsDeviceConfigTestCases[secondary_user]', '--include-filter', 'CtsDeviceIdleHostTestCases', '--include-filter', 'CtsDeviceIdleHostTestCases[secondary_user]', '--include-filter', 'CtsDevicePolicyManagerTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDevice',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDexMetadataHost b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDexMetadataHost
deleted file mode 100644
index 146710c..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDexMetadataHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsDexMetadataHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDexMetadataHostTestCases, CtsDexMetadataHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsDexMetadataHost',
-        test_name='cheets_CTS_R.11_r3.arm.CtsDexMetadataHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDexMetadataHostTestCases', '--include-filter', 'CtsDexMetadataHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDexMetadataHost',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDisplay b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDisplay
deleted file mode 100644
index 3b007f5..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDisplay
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsDisplay'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDisplayTestCases, CtsDisplayTestCases[instant], CtsDisplayTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsDisplay',
-        test_name='cheets_CTS_R.11_r3.arm.CtsDisplay',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDisplayTestCases', '--include-filter', 'CtsDisplayTestCases[instant]', '--include-filter', 'CtsDisplayTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDisplay',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDownloadManager b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDownloadManager
deleted file mode 100644
index de3afd3..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDownloadManager
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsDownloadManager'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDownloadManagerApi28, CtsDownloadManagerApi28[secondary_user], CtsDownloadManagerInstaller, CtsDownloadManagerInstaller[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsDownloadManager',
-        test_name='cheets_CTS_R.11_r3.arm.CtsDownloadManager',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDownloadManagerApi28', '--include-filter', 'CtsDownloadManagerApi28[secondary_user]', '--include-filter', 'CtsDownloadManagerInstaller', '--include-filter', 'CtsDownloadManagerInstaller[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDownloadManager',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDpi b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDpi
deleted file mode 100644
index 0b3bd68..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDpi
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsDpi'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDpiTestCases, CtsDpiTestCases2, CtsDpiTestCases2[secondary_user], CtsDpiTestCases[instant], CtsDpiTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsDpi',
-        test_name='cheets_CTS_R.11_r3.arm.CtsDpi',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDpiTestCases', '--include-filter', 'CtsDpiTestCases2', '--include-filter', 'CtsDpiTestCases2[secondary_user]', '--include-filter', 'CtsDpiTestCases[instant]', '--include-filter', 'CtsDpiTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDpi',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDreams b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDreams
deleted file mode 100644
index a65b4f4..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDreams
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsDreams'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDreamsTestCases, CtsDreamsTestCases[instant], CtsDreamsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsDreams',
-        test_name='cheets_CTS_R.11_r3.arm.CtsDreams',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDreamsTestCases', '--include-filter', 'CtsDreamsTestCases[instant]', '--include-filter', 'CtsDreamsTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDreams',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDrm b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDrm
deleted file mode 100644
index cf8e998..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDrm
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsDrm'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDrmTestCases, CtsDrmTestCases[instant], CtsDrmTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsDrm',
-        test_name='cheets_CTS_R.11_r3.arm.CtsDrm',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDrmTestCases', '--include-filter', 'CtsDrmTestCases[instant]', '--include-filter', 'CtsDrmTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDrm',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDropBoxManagerTestCases b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDropBoxManagerTestCases
deleted file mode 100644
index 9f8e481..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDropBoxManagerTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsDropBoxManagerTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDropBoxManagerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsDropBoxManagerTestCases',
-        test_name='cheets_CTS_R.11_r3.arm.CtsDropBoxManagerTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDropBoxManagerTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDropBoxManagerTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDumpsysHost b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDumpsysHost
deleted file mode 100644
index af4277d..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDumpsysHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsDumpsysHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDumpsysHostTestCases, CtsDumpsysHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsDumpsysHost',
-        test_name='cheets_CTS_R.11_r3.arm.CtsDumpsysHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDumpsysHostTestCases', '--include-filter', 'CtsDumpsysHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDumpsysHost',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDynamic b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDynamic
deleted file mode 100644
index 441119b..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsDynamic
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsDynamic'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDynamicLinkerTestCases, CtsDynamicLinkerTestCases[instant], CtsDynamicLinkerTestCases[secondary_user], CtsDynamicMimeHostTestCases, CtsDynamicMimeHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsDynamic',
-        test_name='cheets_CTS_R.11_r3.arm.CtsDynamic',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDynamicLinkerTestCases', '--include-filter', 'CtsDynamicLinkerTestCases[instant]', '--include-filter', 'CtsDynamicLinkerTestCases[secondary_user]', '--include-filter', 'CtsDynamicMimeHostTestCases', '--include-filter', 'CtsDynamicMimeHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDynamic',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsEdiHost b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsEdiHost
deleted file mode 100644
index f7abb50..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsEdiHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsEdiHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsEdiHostTestCases, CtsEdiHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsEdiHost',
-        test_name='cheets_CTS_R.11_r3.arm.CtsEdiHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsEdiHostTestCases', '--include-filter', 'CtsEdiHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsEdiHost',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsEffect b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsEffect
deleted file mode 100644
index 9332ed1..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsEffect
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsEffect'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsEffectTestCases, CtsEffectTestCases[instant], CtsEffectTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsEffect',
-        test_name='cheets_CTS_R.11_r3.arm.CtsEffect',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsEffectTestCases', '--include-filter', 'CtsEffectTestCases[instant]', '--include-filter', 'CtsEffectTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsEffect',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsExtendedMocking b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsExtendedMocking
deleted file mode 100644
index 724cf70..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsExtendedMocking
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsExtendedMocking'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsExtendedMockingTestCases, CtsExtendedMockingTestCases[instant], CtsExtendedMockingTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsExtendedMocking',
-        test_name='cheets_CTS_R.11_r3.arm.CtsExtendedMocking',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsExtendedMockingTestCases', '--include-filter', 'CtsExtendedMockingTestCases[instant]', '--include-filter', 'CtsExtendedMockingTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsExtendedMocking',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsExternalService b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsExternalService
deleted file mode 100644
index 479daf0..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsExternalService
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsExternalService'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsExternalServiceTestCases, CtsExternalServiceTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsExternalService',
-        test_name='cheets_CTS_R.11_r3.arm.CtsExternalService',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsExternalServiceTestCases', '--include-filter', 'CtsExternalServiceTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsExternalService',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsExtractNativeLibsHost b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsExtractNativeLibsHost
deleted file mode 100644
index 4778af0..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsExtractNativeLibsHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsExtractNativeLibsHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsExtractNativeLibsHostTestCases, CtsExtractNativeLibsHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsExtractNativeLibsHost',
-        test_name='cheets_CTS_R.11_r3.arm.CtsExtractNativeLibsHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsExtractNativeLibsHostTestCases', '--include-filter', 'CtsExtractNativeLibsHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsExtractNativeLibsHost',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsFileSystem b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsFileSystem
deleted file mode 100644
index 7e89a0f..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsFileSystem
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsFileSystem'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsFileSystemTestCases, CtsFileSystemTestCases[instant], CtsFileSystemTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsFileSystem',
-        test_name='cheets_CTS_R.11_r3.arm.CtsFileSystem',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsFileSystemTestCases', '--include-filter', 'CtsFileSystemTestCases[instant]', '--include-filter', 'CtsFileSystemTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsFileSystem',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=16200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsFragment b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsFragment
deleted file mode 100644
index 97666d5..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsFragment
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsFragment'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsFragmentTestCases, CtsFragmentTestCasesSdk26, CtsFragmentTestCasesSdk26[instant], CtsFragmentTestCasesSdk26[secondary_user], CtsFragmentTestCases[instant], CtsFragmentTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsFragment',
-        test_name='cheets_CTS_R.11_r3.arm.CtsFragment',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsFragmentTestCases', '--include-filter', 'CtsFragmentTestCasesSdk26', '--include-filter', 'CtsFragmentTestCasesSdk26[instant]', '--include-filter', 'CtsFragmentTestCasesSdk26[secondary_user]', '--include-filter', 'CtsFragmentTestCases[instant]', '--include-filter', 'CtsFragmentTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsFragment',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsFsMgr b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsFsMgr
deleted file mode 100644
index a891008..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsFsMgr
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsFsMgr'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsFsMgrTestCases, CtsFsMgrTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsFsMgr',
-        test_name='cheets_CTS_R.11_r3.arm.CtsFsMgr',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsFsMgrTestCases', '--include-filter', 'CtsFsMgrTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsFsMgr',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsGesture b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsGesture
deleted file mode 100644
index c4b9350..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsGesture
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsGesture'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsGestureTestCases, CtsGestureTestCases[instant], CtsGestureTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsGesture',
-        test_name='cheets_CTS_R.11_r3.arm.CtsGesture',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsGestureTestCases', '--include-filter', 'CtsGestureTestCases[instant]', '--include-filter', 'CtsGestureTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsGesture',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsGpu b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsGpu
deleted file mode 100644
index 887ab61..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsGpu
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsGpu'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsGpuProfilingDataTestCases, CtsGpuProfilingDataTestCases[secondary_user], CtsGpuToolsHostTestCases, CtsGpuToolsHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsGpu',
-        test_name='cheets_CTS_R.11_r3.arm.CtsGpu',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsGpuProfilingDataTestCases', '--include-filter', 'CtsGpuProfilingDataTestCases[secondary_user]', '--include-filter', 'CtsGpuToolsHostTestCases', '--include-filter', 'CtsGpuToolsHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsGpu',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsGraphics b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsGraphics
deleted file mode 100644
index 92cd0f4..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsGraphics
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsGraphics'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsGraphicsTestCases, CtsGraphicsTestCases[instant], CtsGraphicsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsGraphics',
-        test_name='cheets_CTS_R.11_r3.arm.CtsGraphics',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsGraphicsTestCases', '--include-filter', 'CtsGraphicsTestCases[instant]', '--include-filter', 'CtsGraphicsTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsGraphics',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsGwpAsan b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsGwpAsan
deleted file mode 100644
index 4c8adf8..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsGwpAsan
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsGwpAsan'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsGwpAsanTestCases, CtsGwpAsanTestCases[instant], CtsGwpAsanTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsGwpAsan',
-        test_name='cheets_CTS_R.11_r3.arm.CtsGwpAsan',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsGwpAsanTestCases', '--include-filter', 'CtsGwpAsanTestCases[instant]', '--include-filter', 'CtsGwpAsanTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsGwpAsan',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsHardware b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsHardware
deleted file mode 100644
index 5bdcc81..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsHardware
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsHardware'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsHardwareTestCases, CtsHardwareTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsHardware',
-        test_name='cheets_CTS_R.11_r3.arm.CtsHardware',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHardwareTestCases', '--include-filter', 'CtsHardwareTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsHardware',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsHarmfulAppWarningHost b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsHarmfulAppWarningHost
deleted file mode 100644
index 50bde25..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsHarmfulAppWarningHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsHarmfulAppWarningHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsHarmfulAppWarningHostTestCases, CtsHarmfulAppWarningHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsHarmfulAppWarningHost',
-        test_name='cheets_CTS_R.11_r3.arm.CtsHarmfulAppWarningHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHarmfulAppWarningHostTestCases', '--include-filter', 'CtsHarmfulAppWarningHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsHarmfulAppWarningHost',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsHdmiCecHost b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsHdmiCecHost
deleted file mode 100644
index 15421ed..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsHdmiCecHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsHdmiCecHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsHdmiCecHostTestCases, CtsHdmiCecHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsHdmiCecHost',
-        test_name='cheets_CTS_R.11_r3.arm.CtsHdmiCecHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHdmiCecHostTestCases', '--include-filter', 'CtsHdmiCecHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsHdmiCecHost',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsHiddenApi b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsHiddenApi
deleted file mode 100644
index 9ffb1dc..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsHiddenApi
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsHiddenApi'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsHiddenApiBlacklistApi27TestCases, CtsHiddenApiBlacklistApi27TestCases[instant], CtsHiddenApiBlacklistApi27TestCases[secondary_user], CtsHiddenApiBlacklistApi28TestCases, CtsHiddenApiBlacklistApi28TestCases[instant], CtsHiddenApiBlacklistApi28TestCases[secondary_user], CtsHiddenApiBlacklistCurrentApiTestCases, CtsHiddenApiBlacklistCurrentApiTestCases[instant], CtsHiddenApiBlacklistCurrentApiTestCases[secondary_user], CtsHiddenApiBlacklistDebugClassTestCases, CtsHiddenApiBlacklistDebugClassTestCases[instant], CtsHiddenApiBlacklistDebugClassTestCases[secondary_user], CtsHiddenApiBlacklistTestApiTestCases, CtsHiddenApiBlacklistTestApiTestCases[instant], CtsHiddenApiBlacklistTestApiTestCases[secondary_user], CtsHiddenApiKillswitchDebugClassTestCases, CtsHiddenApiKillswitchDebugClassTestCases[instant], CtsHiddenApiKillswitchDebugClassTestCases[secondary_user], CtsHiddenApiKillswitchWhitelistTestCases, CtsHiddenApiKillswitchWhitelistTestCases[instant], CtsHiddenApiKillswitchWhitelistTestCases[secondary_user], CtsHiddenApiKillswitchWildcardTestCases, CtsHiddenApiKillswitchWildcardTestCases[instant], CtsHiddenApiKillswitchWildcardTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsHiddenApi',
-        test_name='cheets_CTS_R.11_r3.arm.CtsHiddenApi',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHiddenApiBlacklistApi27TestCases', '--include-filter', 'CtsHiddenApiBlacklistApi27TestCases[instant]', '--include-filter', 'CtsHiddenApiBlacklistApi27TestCases[secondary_user]', '--include-filter', 'CtsHiddenApiBlacklistApi28TestCases', '--include-filter', 'CtsHiddenApiBlacklistApi28TestCases[instant]', '--include-filter', 'CtsHiddenApiBlacklistApi28TestCases[secondary_user]', '--include-filter', 'CtsHiddenApiBlacklistCurrentApiTestCases', '--include-filter', 'CtsHiddenApiBlacklistCurrentApiTestCases[instant]', '--include-filter', 'CtsHiddenApiBlacklistCurrentApiTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiBlacklistDebugClassTestCases', '--include-filter', 'CtsHiddenApiBlacklistDebugClassTestCases[instant]', '--include-filter', 'CtsHiddenApiBlacklistDebugClassTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiBlacklistTestApiTestCases', '--include-filter', 'CtsHiddenApiBlacklistTestApiTestCases[instant]', '--include-filter', 'CtsHiddenApiBlacklistTestApiTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiKillswitchDebugClassTestCases', '--include-filter', 'CtsHiddenApiKillswitchDebugClassTestCases[instant]', '--include-filter', 'CtsHiddenApiKillswitchDebugClassTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiKillswitchWhitelistTestCases', '--include-filter', 'CtsHiddenApiKillswitchWhitelistTestCases[instant]', '--include-filter', 'CtsHiddenApiKillswitchWhitelistTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiKillswitchWildcardTestCases', '--include-filter', 'CtsHiddenApiKillswitchWildcardTestCases[instant]', '--include-filter', 'CtsHiddenApiKillswitchWildcardTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsHiddenApi',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=45000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsHostTzData b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsHostTzData
deleted file mode 100644
index b829b11..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsHostTzData
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsHostTzData'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsHostTzDataTests, CtsHostTzDataTests[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsHostTzData',
-        test_name='cheets_CTS_R.11_r3.arm.CtsHostTzData',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHostTzDataTests', '--include-filter', 'CtsHostTzDataTests[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsHostTzData',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsHostside b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsHostside
deleted file mode 100644
index f1ae9b0..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsHostside
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsHostside'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsHostsideNetworkTests, CtsHostsideNetworkTests[instant], CtsHostsideNetworkTests[secondary_user], CtsHostsideNumberBlockingTestCases, CtsHostsideNumberBlockingTestCases[secondary_user], CtsHostsideTvTests, CtsHostsideTvTests[secondary_user], CtsHostsideWebViewTests, CtsHostsideWebViewTests[instant], CtsHostsideWebViewTests[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsHostside',
-        test_name='cheets_CTS_R.11_r3.arm.CtsHostside',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHostsideNetworkTests', '--include-filter', 'CtsHostsideNetworkTests[instant]', '--include-filter', 'CtsHostsideNetworkTests[secondary_user]', '--include-filter', 'CtsHostsideNumberBlockingTestCases', '--include-filter', 'CtsHostsideNumberBlockingTestCases[secondary_user]', '--include-filter', 'CtsHostsideTvTests', '--include-filter', 'CtsHostsideTvTests[secondary_user]', '--include-filter', 'CtsHostsideWebViewTests', '--include-filter', 'CtsHostsideWebViewTests[instant]', '--include-filter', 'CtsHostsideWebViewTests[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsHostside',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=19800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsIcu b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsIcu
deleted file mode 100644
index c389093..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsIcu
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsIcu'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsIcuTestCases, CtsIcuTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsIcu',
-        test_name='cheets_CTS_R.11_r3.arm.CtsIcu',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsIcuTestCases', '--include-filter', 'CtsIcuTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsIcu',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsIdentity b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsIdentity
deleted file mode 100644
index 2ee1e56..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsIdentity
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsIdentity'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsIdentityTestCases, CtsIdentityTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsIdentity',
-        test_name='cheets_CTS_R.11_r3.arm.CtsIdentity',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsIdentityTestCases', '--include-filter', 'CtsIdentityTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsIdentity',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsIke b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsIke
deleted file mode 100644
index 91929a9..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsIke
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsIke'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsIkeTestCases, CtsIkeTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsIke',
-        test_name='cheets_CTS_R.11_r3.arm.CtsIke',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsIkeTestCases', '--include-filter', 'CtsIkeTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsIke',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsIncidentHost b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsIncidentHost
deleted file mode 100644
index 506cfeb..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsIncidentHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsIncidentHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsIncidentHostTestCases, CtsIncidentHostTestCases[instant] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsIncidentHost',
-        test_name='cheets_CTS_R.11_r3.arm.CtsIncidentHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsIncidentHostTestCases', '--include-filter', 'CtsIncidentHostTestCases[instant]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsIncidentHost',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsIncrementalInstallHost b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsIncrementalInstallHost
deleted file mode 100644
index 95d2d3b..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsIncrementalInstallHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsIncrementalInstallHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsIncrementalInstallHostTestCases, CtsIncrementalInstallHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsIncrementalInstallHost',
-        test_name='cheets_CTS_R.11_r3.arm.CtsIncrementalInstallHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsIncrementalInstallHostTestCases', '--include-filter', 'CtsIncrementalInstallHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsIncrementalInstallHost',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsInit b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsInit
deleted file mode 100644
index 1dff965..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsInit
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsInit'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsInitTestCases, CtsInitTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsInit',
-        test_name='cheets_CTS_R.11_r3.arm.CtsInit',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsInitTestCases', '--include-filter', 'CtsInitTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsInit',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsInlineMocking b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsInlineMocking
deleted file mode 100644
index ef2a81e..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsInlineMocking
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsInlineMocking'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsInlineMockingTestCases, CtsInlineMockingTestCases[instant], CtsInlineMockingTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsInlineMocking',
-        test_name='cheets_CTS_R.11_r3.arm.CtsInlineMocking',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsInlineMockingTestCases', '--include-filter', 'CtsInlineMockingTestCases[instant]', '--include-filter', 'CtsInlineMockingTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsInlineMocking',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsInputMethod b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsInputMethod
deleted file mode 100644
index e5377c0..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsInputMethod
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsInputMethod'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsInputMethodServiceHostTestCases, CtsInputMethodServiceHostTestCases[instant], CtsInputMethodServiceHostTestCases[secondary_user], CtsInputMethodTestCases, CtsInputMethodTestCases[instant], CtsInputMethodTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsInputMethod',
-        test_name='cheets_CTS_R.11_r3.arm.CtsInputMethod',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsInputMethodServiceHostTestCases', '--include-filter', 'CtsInputMethodServiceHostTestCases[instant]', '--include-filter', 'CtsInputMethodServiceHostTestCases[secondary_user]', '--include-filter', 'CtsInputMethodTestCases', '--include-filter', 'CtsInputMethodTestCases[instant]', '--include-filter', 'CtsInputMethodTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsInputMethod',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsInstantApp b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsInstantApp
deleted file mode 100644
index 6f6ec8b..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsInstantApp
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsInstantApp'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsInstantAppTests, CtsInstantAppTests[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsInstantApp',
-        test_name='cheets_CTS_R.11_r3.arm.CtsInstantApp',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsInstantAppTests', '--include-filter', 'CtsInstantAppTests[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsInstantApp',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsIntentSignature b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsIntentSignature
deleted file mode 100644
index 9823d8f..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsIntentSignature
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsIntentSignature'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsIntentSignatureTestCases, CtsIntentSignatureTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsIntentSignature',
-        test_name='cheets_CTS_R.11_r3.arm.CtsIntentSignature',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsIntentSignatureTestCases', '--include-filter', 'CtsIntentSignatureTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsIntentSignature',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsJdwp b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsJdwp
deleted file mode 100644
index f4d9f43..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsJdwp
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsJdwp'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsJdwpSecurityHostTestCases, CtsJdwpSecurityHostTestCases[secondary_user], CtsJdwpTestCases, CtsJdwpTestCases[instant], CtsJdwpTestCases[secondary_user], CtsJdwpTunnelHostTestCases, CtsJdwpTunnelHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsJdwp',
-        test_name='cheets_CTS_R.11_r3.arm.CtsJdwp',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJdwpSecurityHostTestCases', '--include-filter', 'CtsJdwpSecurityHostTestCases[secondary_user]', '--include-filter', 'CtsJdwpTestCases', '--include-filter', 'CtsJdwpTestCases[instant]', '--include-filter', 'CtsJdwpTestCases[secondary_user]', '--include-filter', 'CtsJdwpTunnelHostTestCases', '--include-filter', 'CtsJdwpTunnelHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsJdwp',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=14400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsJni b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsJni
deleted file mode 100644
index 25c7829..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsJni
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsJni'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsJniTestCases, CtsJniTestCases[instant], CtsJniTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsJni',
-        test_name='cheets_CTS_R.11_r3.arm.CtsJni',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJniTestCases', '--include-filter', 'CtsJniTestCases[instant]', '--include-filter', 'CtsJniTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsJni',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsJobScheduler b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsJobScheduler
deleted file mode 100644
index c7b5a65..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsJobScheduler
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsJobScheduler'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsJobSchedulerSharedUidTestCases, CtsJobSchedulerSharedUidTestCases[secondary_user], CtsJobSchedulerTestCases, CtsJobSchedulerTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsJobScheduler',
-        test_name='cheets_CTS_R.11_r3.arm.CtsJobScheduler',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJobSchedulerSharedUidTestCases', '--include-filter', 'CtsJobSchedulerSharedUidTestCases[secondary_user]', '--include-filter', 'CtsJobSchedulerTestCases', '--include-filter', 'CtsJobSchedulerTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsJobScheduler',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsJvmti b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsJvmti
deleted file mode 100644
index 5cf52f0..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsJvmti
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsJvmti'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsJvmtiAttachingHostTestCases, CtsJvmtiAttachingHostTestCases[secondary_user], CtsJvmtiAttachingTestCases, CtsJvmtiAttachingTestCases[secondary_user], CtsJvmtiRedefineClassesHostTestCases, CtsJvmtiRedefineClassesHostTestCases[secondary_user], CtsJvmtiRunTest1900HostTestCases, CtsJvmtiRunTest1900HostTestCases[secondary_user], CtsJvmtiRunTest1901HostTestCases, CtsJvmtiRunTest1901HostTestCases[secondary_user], CtsJvmtiRunTest1902HostTestCases, CtsJvmtiRunTest1902HostTestCases[secondary_user], CtsJvmtiRunTest1903HostTestCases, CtsJvmtiRunTest1903HostTestCases[secondary_user], CtsJvmtiRunTest1904HostTestCases, CtsJvmtiRunTest1904HostTestCases[secondary_user], CtsJvmtiRunTest1906HostTestCases, CtsJvmtiRunTest1906HostTestCases[secondary_user], CtsJvmtiRunTest1907HostTestCases, CtsJvmtiRunTest1907HostTestCases[secondary_user], CtsJvmtiRunTest1908HostTestCases, CtsJvmtiRunTest1908HostTestCases[secondary_user], CtsJvmtiRunTest1909HostTestCases, CtsJvmtiRunTest1909HostTestCases[secondary_user], CtsJvmtiRunTest1910HostTestCases, CtsJvmtiRunTest1910HostTestCases[secondary_user], CtsJvmtiRunTest1911HostTestCases, CtsJvmtiRunTest1911HostTestCases[secondary_user], CtsJvmtiRunTest1912HostTestCases, CtsJvmtiRunTest1912HostTestCases[secondary_user], CtsJvmtiRunTest1913HostTestCases, CtsJvmtiRunTest1913HostTestCases[secondary_user], CtsJvmtiRunTest1914HostTestCases, CtsJvmtiRunTest1914HostTestCases[secondary_user], CtsJvmtiRunTest1915HostTestCases, CtsJvmtiRunTest1915HostTestCases[secondary_user], CtsJvmtiRunTest1916HostTestCases, CtsJvmtiRunTest1916HostTestCases[secondary_user], CtsJvmtiRunTest1917HostTestCases, CtsJvmtiRunTest1917HostTestCases[secondary_user], CtsJvmtiRunTest1920HostTestCases, CtsJvmtiRunTest1920HostTestCases[secondary_user], CtsJvmtiRunTest1921HostTestCases, CtsJvmtiRunTest1921HostTestCases[secondary_user], CtsJvmtiRunTest1922HostTestCases, CtsJvmtiRunTest1922HostTestCases[secondary_user], CtsJvmtiRunTest1923HostTestCases, CtsJvmtiRunTest1923HostTestCases[secondary_user], CtsJvmtiRunTest1924HostTestCases, CtsJvmtiRunTest1924HostTestCases[secondary_user], CtsJvmtiRunTest1925HostTestCases, CtsJvmtiRunTest1925HostTestCases[secondary_user], CtsJvmtiRunTest1926HostTestCases, CtsJvmtiRunTest1926HostTestCases[secondary_user], CtsJvmtiRunTest1927HostTestCases, CtsJvmtiRunTest1927HostTestCases[secondary_user], CtsJvmtiRunTest1928HostTestCases, CtsJvmtiRunTest1928HostTestCases[secondary_user], CtsJvmtiRunTest1930HostTestCases, CtsJvmtiRunTest1930HostTestCases[secondary_user], CtsJvmtiRunTest1931HostTestCases, CtsJvmtiRunTest1931HostTestCases[secondary_user], CtsJvmtiRunTest1932HostTestCases, CtsJvmtiRunTest1932HostTestCases[secondary_user], CtsJvmtiRunTest1933HostTestCases, CtsJvmtiRunTest1933HostTestCases[secondary_user], CtsJvmtiRunTest1934HostTestCases, CtsJvmtiRunTest1934HostTestCases[secondary_user], CtsJvmtiRunTest1936HostTestCases, CtsJvmtiRunTest1936HostTestCases[secondary_user], CtsJvmtiRunTest1937HostTestCases, CtsJvmtiRunTest1937HostTestCases[secondary_user], CtsJvmtiRunTest1939HostTestCases, CtsJvmtiRunTest1939HostTestCases[secondary_user], CtsJvmtiRunTest1941HostTestCases, CtsJvmtiRunTest1941HostTestCases[secondary_user], CtsJvmtiRunTest1942HostTestCases, CtsJvmtiRunTest1942HostTestCases[secondary_user], CtsJvmtiRunTest1943HostTestCases, CtsJvmtiRunTest1943HostTestCases[secondary_user], CtsJvmtiRunTest1953HostTestCases, CtsJvmtiRunTest1953HostTestCases[secondary_user], CtsJvmtiRunTest1958HostTestCases, CtsJvmtiRunTest1958HostTestCases[secondary_user], CtsJvmtiRunTest1962HostTestCases, CtsJvmtiRunTest1962HostTestCases[secondary_user], CtsJvmtiRunTest1967HostTestCases, CtsJvmtiRunTest1967HostTestCases[secondary_user], CtsJvmtiRunTest1968HostTestCases, CtsJvmtiRunTest1968HostTestCases[secondary_user], CtsJvmtiRunTest1969HostTestCases, CtsJvmtiRunTest1969HostTestCases[secondary_user], CtsJvmtiRunTest1970HostTestCases, CtsJvmtiRunTest1970HostTestCases[secondary_user], CtsJvmtiRunTest1971HostTestCases, CtsJvmtiRunTest1971HostTestCases[secondary_user], CtsJvmtiRunTest1974HostTestCases, CtsJvmtiRunTest1974HostTestCases[secondary_user], CtsJvmtiRunTest1975HostTestCases, CtsJvmtiRunTest1975HostTestCases[secondary_user], CtsJvmtiRunTest1976HostTestCases, CtsJvmtiRunTest1976HostTestCases[secondary_user], CtsJvmtiRunTest1977HostTestCases, CtsJvmtiRunTest1977HostTestCases[secondary_user], CtsJvmtiRunTest1978HostTestCases, CtsJvmtiRunTest1978HostTestCases[secondary_user], CtsJvmtiRunTest1979HostTestCases, CtsJvmtiRunTest1979HostTestCases[secondary_user], CtsJvmtiRunTest1981HostTestCases, CtsJvmtiRunTest1981HostTestCases[secondary_user], CtsJvmtiRunTest1982HostTestCases, CtsJvmtiRunTest1982HostTestCases[secondary_user], CtsJvmtiRunTest1983HostTestCases, CtsJvmtiRunTest1983HostTestCases[secondary_user], CtsJvmtiRunTest1984HostTestCases, CtsJvmtiRunTest1984HostTestCases[secondary_user], CtsJvmtiRunTest1988HostTestCases, CtsJvmtiRunTest1988HostTestCases[secondary_user], CtsJvmtiRunTest1989HostTestCases, CtsJvmtiRunTest1989HostTestCases[secondary_user], CtsJvmtiRunTest1990HostTestCases, CtsJvmtiRunTest1990HostTestCases[secondary_user], CtsJvmtiRunTest1991HostTestCases, CtsJvmtiRunTest1991HostTestCases[secondary_user], CtsJvmtiRunTest1992HostTestCases, CtsJvmtiRunTest1992HostTestCases[secondary_user], CtsJvmtiRunTest1994HostTestCases, CtsJvmtiRunTest1994HostTestCases[secondary_user], CtsJvmtiRunTest1995HostTestCases, CtsJvmtiRunTest1995HostTestCases[secondary_user], CtsJvmtiRunTest1996HostTestCases, CtsJvmtiRunTest1996HostTestCases[secondary_user], CtsJvmtiRunTest1997HostTestCases, CtsJvmtiRunTest1997HostTestCases[secondary_user], CtsJvmtiRunTest1998HostTestCases, CtsJvmtiRunTest1998HostTestCases[secondary_user], CtsJvmtiRunTest1999HostTestCases, CtsJvmtiRunTest1999HostTestCases[secondary_user], CtsJvmtiRunTest2001HostTestCases, CtsJvmtiRunTest2001HostTestCases[secondary_user], CtsJvmtiRunTest2002HostTestCases, CtsJvmtiRunTest2002HostTestCases[secondary_user], CtsJvmtiRunTest2003HostTestCases, CtsJvmtiRunTest2003HostTestCases[secondary_user], CtsJvmtiRunTest2004HostTestCases, CtsJvmtiRunTest2004HostTestCases[secondary_user], CtsJvmtiRunTest2005HostTestCases, CtsJvmtiRunTest2005HostTestCases[secondary_user], CtsJvmtiRunTest2006HostTestCases, CtsJvmtiRunTest2006HostTestCases[secondary_user], CtsJvmtiRunTest2007HostTestCases, CtsJvmtiRunTest2007HostTestCases[secondary_user], CtsJvmtiRunTest902HostTestCases, CtsJvmtiRunTest902HostTestCases[secondary_user], CtsJvmtiRunTest903HostTestCases, CtsJvmtiRunTest903HostTestCases[secondary_user], CtsJvmtiRunTest904HostTestCases, CtsJvmtiRunTest904HostTestCases[secondary_user], CtsJvmtiRunTest905HostTestCases, CtsJvmtiRunTest905HostTestCases[secondary_user], CtsJvmtiRunTest906HostTestCases, CtsJvmtiRunTest906HostTestCases[secondary_user], CtsJvmtiRunTest907HostTestCases, CtsJvmtiRunTest907HostTestCases[secondary_user], CtsJvmtiRunTest908HostTestCases, CtsJvmtiRunTest908HostTestCases[secondary_user], CtsJvmtiRunTest910HostTestCases, CtsJvmtiRunTest910HostTestCases[secondary_user], CtsJvmtiRunTest911HostTestCases, CtsJvmtiRunTest911HostTestCases[secondary_user], CtsJvmtiRunTest912HostTestCases, CtsJvmtiRunTest912HostTestCases[secondary_user], CtsJvmtiRunTest913HostTestCases, CtsJvmtiRunTest913HostTestCases[secondary_user], CtsJvmtiRunTest914HostTestCases, CtsJvmtiRunTest914HostTestCases[secondary_user], CtsJvmtiRunTest915HostTestCases, CtsJvmtiRunTest915HostTestCases[secondary_user], CtsJvmtiRunTest917HostTestCases, CtsJvmtiRunTest917HostTestCases[secondary_user], CtsJvmtiRunTest918HostTestCases, CtsJvmtiRunTest918HostTestCases[secondary_user], CtsJvmtiRunTest919HostTestCases, CtsJvmtiRunTest919HostTestCases[secondary_user], CtsJvmtiRunTest920HostTestCases, CtsJvmtiRunTest920HostTestCases[secondary_user], CtsJvmtiRunTest922HostTestCases, CtsJvmtiRunTest922HostTestCases[secondary_user], CtsJvmtiRunTest923HostTestCases, CtsJvmtiRunTest923HostTestCases[secondary_user], CtsJvmtiRunTest924HostTestCases, CtsJvmtiRunTest924HostTestCases[secondary_user], CtsJvmtiRunTest926HostTestCases, CtsJvmtiRunTest926HostTestCases[secondary_user], CtsJvmtiRunTest927HostTestCases, CtsJvmtiRunTest927HostTestCases[secondary_user], CtsJvmtiRunTest928HostTestCases, CtsJvmtiRunTest928HostTestCases[secondary_user], CtsJvmtiRunTest930HostTestCases, CtsJvmtiRunTest930HostTestCases[secondary_user], CtsJvmtiRunTest931HostTestCases, CtsJvmtiRunTest931HostTestCases[secondary_user], CtsJvmtiRunTest932HostTestCases, CtsJvmtiRunTest932HostTestCases[secondary_user], CtsJvmtiRunTest940HostTestCases, CtsJvmtiRunTest940HostTestCases[secondary_user], CtsJvmtiRunTest942HostTestCases, CtsJvmtiRunTest942HostTestCases[secondary_user], CtsJvmtiRunTest944HostTestCases, CtsJvmtiRunTest944HostTestCases[secondary_user], CtsJvmtiRunTest945HostTestCases, CtsJvmtiRunTest945HostTestCases[secondary_user], CtsJvmtiRunTest947HostTestCases, CtsJvmtiRunTest947HostTestCases[secondary_user], CtsJvmtiRunTest951HostTestCases, CtsJvmtiRunTest951HostTestCases[secondary_user], CtsJvmtiRunTest982HostTestCases, CtsJvmtiRunTest982HostTestCases[secondary_user], CtsJvmtiRunTest983HostTestCases, CtsJvmtiRunTest983HostTestCases[secondary_user], CtsJvmtiRunTest984HostTestCases, CtsJvmtiRunTest984HostTestCases[secondary_user], CtsJvmtiRunTest985HostTestCases, CtsJvmtiRunTest985HostTestCases[secondary_user], CtsJvmtiRunTest986HostTestCases, CtsJvmtiRunTest986HostTestCases[secondary_user], CtsJvmtiRunTest988HostTestCases, CtsJvmtiRunTest988HostTestCases[secondary_user], CtsJvmtiRunTest989HostTestCases, CtsJvmtiRunTest989HostTestCases[secondary_user], CtsJvmtiRunTest990HostTestCases, CtsJvmtiRunTest990HostTestCases[secondary_user], CtsJvmtiRunTest991HostTestCases, CtsJvmtiRunTest991HostTestCases[secondary_user], CtsJvmtiRunTest992HostTestCases, CtsJvmtiRunTest992HostTestCases[secondary_user], CtsJvmtiRunTest993HostTestCases, CtsJvmtiRunTest993HostTestCases[secondary_user], CtsJvmtiRunTest994HostTestCases, CtsJvmtiRunTest994HostTestCases[secondary_user], CtsJvmtiRunTest995HostTestCases, CtsJvmtiRunTest995HostTestCases[secondary_user], CtsJvmtiRunTest996HostTestCases, CtsJvmtiRunTest996HostTestCases[secondary_user], CtsJvmtiRunTest997HostTestCases, CtsJvmtiRunTest997HostTestCases[secondary_user], CtsJvmtiTaggingHostTestCases, CtsJvmtiTaggingHostTestCases[secondary_user], CtsJvmtiTrackingHostTestCases, CtsJvmtiTrackingHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsJvmti',
-        test_name='cheets_CTS_R.11_r3.arm.CtsJvmti',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJvmtiAttachingHostTestCases', '--include-filter', 'CtsJvmtiAttachingHostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiAttachingTestCases', '--include-filter', 'CtsJvmtiAttachingTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRedefineClassesHostTestCases', '--include-filter', 'CtsJvmtiRedefineClassesHostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1900HostTestCases', '--include-filter', 'CtsJvmtiRunTest1900HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1901HostTestCases', '--include-filter', 'CtsJvmtiRunTest1901HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1902HostTestCases', '--include-filter', 'CtsJvmtiRunTest1902HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1903HostTestCases', '--include-filter', 'CtsJvmtiRunTest1903HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1904HostTestCases', '--include-filter', 'CtsJvmtiRunTest1904HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1906HostTestCases', '--include-filter', 'CtsJvmtiRunTest1906HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1907HostTestCases', '--include-filter', 'CtsJvmtiRunTest1907HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1908HostTestCases', '--include-filter', 'CtsJvmtiRunTest1908HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1909HostTestCases', '--include-filter', 'CtsJvmtiRunTest1909HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1910HostTestCases', '--include-filter', 'CtsJvmtiRunTest1910HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1911HostTestCases', '--include-filter', 'CtsJvmtiRunTest1911HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1912HostTestCases', '--include-filter', 'CtsJvmtiRunTest1912HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1913HostTestCases', '--include-filter', 'CtsJvmtiRunTest1913HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1914HostTestCases', '--include-filter', 'CtsJvmtiRunTest1914HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1915HostTestCases', '--include-filter', 'CtsJvmtiRunTest1915HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1916HostTestCases', '--include-filter', 'CtsJvmtiRunTest1916HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1917HostTestCases', '--include-filter', 'CtsJvmtiRunTest1917HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1920HostTestCases', '--include-filter', 'CtsJvmtiRunTest1920HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1921HostTestCases', '--include-filter', 'CtsJvmtiRunTest1921HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1922HostTestCases', '--include-filter', 'CtsJvmtiRunTest1922HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1923HostTestCases', '--include-filter', 'CtsJvmtiRunTest1923HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1924HostTestCases', '--include-filter', 'CtsJvmtiRunTest1924HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1925HostTestCases', '--include-filter', 'CtsJvmtiRunTest1925HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1926HostTestCases', '--include-filter', 'CtsJvmtiRunTest1926HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1927HostTestCases', '--include-filter', 'CtsJvmtiRunTest1927HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1928HostTestCases', '--include-filter', 'CtsJvmtiRunTest1928HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1930HostTestCases', '--include-filter', 'CtsJvmtiRunTest1930HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1931HostTestCases', '--include-filter', 'CtsJvmtiRunTest1931HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1932HostTestCases', '--include-filter', 'CtsJvmtiRunTest1932HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1933HostTestCases', '--include-filter', 'CtsJvmtiRunTest1933HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1934HostTestCases', '--include-filter', 'CtsJvmtiRunTest1934HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1936HostTestCases', '--include-filter', 'CtsJvmtiRunTest1936HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1937HostTestCases', '--include-filter', 'CtsJvmtiRunTest1937HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1939HostTestCases', '--include-filter', 'CtsJvmtiRunTest1939HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1941HostTestCases', '--include-filter', 'CtsJvmtiRunTest1941HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1942HostTestCases', '--include-filter', 'CtsJvmtiRunTest1942HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1943HostTestCases', '--include-filter', 'CtsJvmtiRunTest1943HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1953HostTestCases', '--include-filter', 'CtsJvmtiRunTest1953HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1958HostTestCases', '--include-filter', 'CtsJvmtiRunTest1958HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1962HostTestCases', '--include-filter', 'CtsJvmtiRunTest1962HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1967HostTestCases', '--include-filter', 'CtsJvmtiRunTest1967HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1968HostTestCases', '--include-filter', 'CtsJvmtiRunTest1968HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1969HostTestCases', '--include-filter', 'CtsJvmtiRunTest1969HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1970HostTestCases', '--include-filter', 'CtsJvmtiRunTest1970HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1971HostTestCases', '--include-filter', 'CtsJvmtiRunTest1971HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1974HostTestCases', '--include-filter', 'CtsJvmtiRunTest1974HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1975HostTestCases', '--include-filter', 'CtsJvmtiRunTest1975HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1976HostTestCases', '--include-filter', 'CtsJvmtiRunTest1976HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1977HostTestCases', '--include-filter', 'CtsJvmtiRunTest1977HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1978HostTestCases', '--include-filter', 'CtsJvmtiRunTest1978HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1979HostTestCases', '--include-filter', 'CtsJvmtiRunTest1979HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1981HostTestCases', '--include-filter', 'CtsJvmtiRunTest1981HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1982HostTestCases', '--include-filter', 'CtsJvmtiRunTest1982HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1983HostTestCases', '--include-filter', 'CtsJvmtiRunTest1983HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1984HostTestCases', '--include-filter', 'CtsJvmtiRunTest1984HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1988HostTestCases', '--include-filter', 'CtsJvmtiRunTest1988HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1989HostTestCases', '--include-filter', 'CtsJvmtiRunTest1989HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1990HostTestCases', '--include-filter', 'CtsJvmtiRunTest1990HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1991HostTestCases', '--include-filter', 'CtsJvmtiRunTest1991HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1992HostTestCases', '--include-filter', 'CtsJvmtiRunTest1992HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1994HostTestCases', '--include-filter', 'CtsJvmtiRunTest1994HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1995HostTestCases', '--include-filter', 'CtsJvmtiRunTest1995HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1996HostTestCases', '--include-filter', 'CtsJvmtiRunTest1996HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1997HostTestCases', '--include-filter', 'CtsJvmtiRunTest1997HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1998HostTestCases', '--include-filter', 'CtsJvmtiRunTest1998HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1999HostTestCases', '--include-filter', 'CtsJvmtiRunTest1999HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2001HostTestCases', '--include-filter', 'CtsJvmtiRunTest2001HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2002HostTestCases', '--include-filter', 'CtsJvmtiRunTest2002HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2003HostTestCases', '--include-filter', 'CtsJvmtiRunTest2003HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2004HostTestCases', '--include-filter', 'CtsJvmtiRunTest2004HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2005HostTestCases', '--include-filter', 'CtsJvmtiRunTest2005HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2006HostTestCases', '--include-filter', 'CtsJvmtiRunTest2006HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2007HostTestCases', '--include-filter', 'CtsJvmtiRunTest2007HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest902HostTestCases', '--include-filter', 'CtsJvmtiRunTest902HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest903HostTestCases', '--include-filter', 'CtsJvmtiRunTest903HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest904HostTestCases', '--include-filter', 'CtsJvmtiRunTest904HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest905HostTestCases', '--include-filter', 'CtsJvmtiRunTest905HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest906HostTestCases', '--include-filter', 'CtsJvmtiRunTest906HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest907HostTestCases', '--include-filter', 'CtsJvmtiRunTest907HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest908HostTestCases', '--include-filter', 'CtsJvmtiRunTest908HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest910HostTestCases', '--include-filter', 'CtsJvmtiRunTest910HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest911HostTestCases', '--include-filter', 'CtsJvmtiRunTest911HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest912HostTestCases', '--include-filter', 'CtsJvmtiRunTest912HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest913HostTestCases', '--include-filter', 'CtsJvmtiRunTest913HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest914HostTestCases', '--include-filter', 'CtsJvmtiRunTest914HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest915HostTestCases', '--include-filter', 'CtsJvmtiRunTest915HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest917HostTestCases', '--include-filter', 'CtsJvmtiRunTest917HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest918HostTestCases', '--include-filter', 'CtsJvmtiRunTest918HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest919HostTestCases', '--include-filter', 'CtsJvmtiRunTest919HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest920HostTestCases', '--include-filter', 'CtsJvmtiRunTest920HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest922HostTestCases', '--include-filter', 'CtsJvmtiRunTest922HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest923HostTestCases', '--include-filter', 'CtsJvmtiRunTest923HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest924HostTestCases', '--include-filter', 'CtsJvmtiRunTest924HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest926HostTestCases', '--include-filter', 'CtsJvmtiRunTest926HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest927HostTestCases', '--include-filter', 'CtsJvmtiRunTest927HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest928HostTestCases', '--include-filter', 'CtsJvmtiRunTest928HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest930HostTestCases', '--include-filter', 'CtsJvmtiRunTest930HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest931HostTestCases', '--include-filter', 'CtsJvmtiRunTest931HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest932HostTestCases', '--include-filter', 'CtsJvmtiRunTest932HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest940HostTestCases', '--include-filter', 'CtsJvmtiRunTest940HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest942HostTestCases', '--include-filter', 'CtsJvmtiRunTest942HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest944HostTestCases', '--include-filter', 'CtsJvmtiRunTest944HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest945HostTestCases', '--include-filter', 'CtsJvmtiRunTest945HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest947HostTestCases', '--include-filter', 'CtsJvmtiRunTest947HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest951HostTestCases', '--include-filter', 'CtsJvmtiRunTest951HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest982HostTestCases', '--include-filter', 'CtsJvmtiRunTest982HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest983HostTestCases', '--include-filter', 'CtsJvmtiRunTest983HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest984HostTestCases', '--include-filter', 'CtsJvmtiRunTest984HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest985HostTestCases', '--include-filter', 'CtsJvmtiRunTest985HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest986HostTestCases', '--include-filter', 'CtsJvmtiRunTest986HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest988HostTestCases', '--include-filter', 'CtsJvmtiRunTest988HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest989HostTestCases', '--include-filter', 'CtsJvmtiRunTest989HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest990HostTestCases', '--include-filter', 'CtsJvmtiRunTest990HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest991HostTestCases', '--include-filter', 'CtsJvmtiRunTest991HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest992HostTestCases', '--include-filter', 'CtsJvmtiRunTest992HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest993HostTestCases', '--include-filter', 'CtsJvmtiRunTest993HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest994HostTestCases', '--include-filter', 'CtsJvmtiRunTest994HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest995HostTestCases', '--include-filter', 'CtsJvmtiRunTest995HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest996HostTestCases', '--include-filter', 'CtsJvmtiRunTest996HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest997HostTestCases', '--include-filter', 'CtsJvmtiRunTest997HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiTaggingHostTestCases', '--include-filter', 'CtsJvmtiTaggingHostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiTrackingHostTestCases', '--include-filter', 'CtsJvmtiTrackingHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsJvmti',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=75000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsKernelConfigTestCases b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsKernelConfigTestCases
deleted file mode 100644
index b261919..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsKernelConfigTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsKernelConfigTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsKernelConfigTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsKernelConfigTestCases',
-        test_name='cheets_CTS_R.11_r3.arm.CtsKernelConfigTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsKernelConfigTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsKernelConfigTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsKeystore b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsKeystore
deleted file mode 100644
index 128a652..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsKeystore
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsKeystore'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsKeystoreTestCases, CtsKeystoreTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsKeystore',
-        test_name='cheets_CTS_R.11_r3.arm.CtsKeystore',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsKeystoreTestCases', '--include-filter', 'CtsKeystoreTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsKeystore',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsLeanbackJank b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsLeanbackJank
deleted file mode 100644
index 7de6798..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsLeanbackJank
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsLeanbackJank'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsLeanbackJankTestCases, CtsLeanbackJankTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsLeanbackJank',
-        test_name='cheets_CTS_R.11_r3.arm.CtsLeanbackJank',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLeanbackJankTestCases', '--include-filter', 'CtsLeanbackJankTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsLeanbackJank',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsLegacyNotification2 b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsLegacyNotification2
deleted file mode 100644
index 87e6392..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsLegacyNotification2
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsLegacyNotification2'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsLegacyNotification20TestCases, CtsLegacyNotification20TestCases[secondary_user], CtsLegacyNotification27TestCases, CtsLegacyNotification27TestCases[secondary_user], CtsLegacyNotification28TestCases, CtsLegacyNotification28TestCases[secondary_user], CtsLegacyNotification29TestCases, CtsLegacyNotification29TestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsLegacyNotification2',
-        test_name='cheets_CTS_R.11_r3.arm.CtsLegacyNotification2',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLegacyNotification20TestCases', '--include-filter', 'CtsLegacyNotification20TestCases[secondary_user]', '--include-filter', 'CtsLegacyNotification27TestCases', '--include-filter', 'CtsLegacyNotification27TestCases[secondary_user]', '--include-filter', 'CtsLegacyNotification28TestCases', '--include-filter', 'CtsLegacyNotification28TestCases[secondary_user]', '--include-filter', 'CtsLegacyNotification29TestCases', '--include-filter', 'CtsLegacyNotification29TestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsLegacyNotification2',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=16200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsLibcore b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsLibcore
deleted file mode 100644
index c1b9d21..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsLibcore
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsLibcore'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsLibcoreApiEvolutionTestCases, CtsLibcoreApiEvolutionTestCases[secondary_user], CtsLibcoreFileIOTestCases, CtsLibcoreFileIOTestCases[secondary_user], CtsLibcoreJsr166TestCases, CtsLibcoreJsr166TestCases[secondary_user], CtsLibcoreLegacy22TestCases, CtsLibcoreLegacy22TestCases[secondary_user], CtsLibcoreOjTestCases, CtsLibcoreOjTestCases[secondary_user], CtsLibcoreOkHttpTestCases, CtsLibcoreOkHttpTestCases[secondary_user], CtsLibcoreTestCases, CtsLibcoreTestCases[secondary_user], CtsLibcoreWycheproofBCTestCases, CtsLibcoreWycheproofBCTestCases[secondary_user], CtsLibcoreWycheproofConscryptTestCases, CtsLibcoreWycheproofConscryptTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsLibcore',
-        test_name='cheets_CTS_R.11_r3.arm.CtsLibcore',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLibcoreApiEvolutionTestCases', '--include-filter', 'CtsLibcoreApiEvolutionTestCases[secondary_user]', '--include-filter', 'CtsLibcoreFileIOTestCases', '--include-filter', 'CtsLibcoreFileIOTestCases[secondary_user]', '--include-filter', 'CtsLibcoreJsr166TestCases', '--include-filter', 'CtsLibcoreJsr166TestCases[secondary_user]', '--include-filter', 'CtsLibcoreLegacy22TestCases', '--include-filter', 'CtsLibcoreLegacy22TestCases[secondary_user]', '--include-filter', 'CtsLibcoreOjTestCases', '--include-filter', 'CtsLibcoreOjTestCases[secondary_user]', '--include-filter', 'CtsLibcoreOkHttpTestCases', '--include-filter', 'CtsLibcoreOkHttpTestCases[secondary_user]', '--include-filter', 'CtsLibcoreTestCases', '--include-filter', 'CtsLibcoreTestCases[secondary_user]', '--include-filter', 'CtsLibcoreWycheproofBCTestCases', '--include-filter', 'CtsLibcoreWycheproofBCTestCases[secondary_user]', '--include-filter', 'CtsLibcoreWycheproofConscryptTestCases', '--include-filter', 'CtsLibcoreWycheproofConscryptTestCases[secondary_user]'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsLibcore',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=39600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsLiblog b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsLiblog
deleted file mode 100644
index 6393fcf..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsLiblog
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsLiblog'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsLiblogTestCases, CtsLiblogTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsLiblog',
-        test_name='cheets_CTS_R.11_r3.arm.CtsLiblog',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLiblogTestCases', '--include-filter', 'CtsLiblogTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsLiblog',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsLocation b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsLocation
deleted file mode 100644
index 18f17f0..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsLocation
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsLocation'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsLocationCoarseTestCases, CtsLocationCoarseTestCases[instant], CtsLocationCoarseTestCases[secondary_user], CtsLocationFineTestCases, CtsLocationFineTestCases[instant], CtsLocationFineTestCases[secondary_user], CtsLocationGnssTestCases, CtsLocationGnssTestCases[instant], CtsLocationGnssTestCases[secondary_user], CtsLocationNoneTestCases, CtsLocationNoneTestCases[instant], CtsLocationNoneTestCases[secondary_user], CtsLocationPrivilegedTestCases, CtsLocationPrivilegedTestCases[instant], CtsLocationPrivilegedTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsLocation',
-        test_name='cheets_CTS_R.11_r3.arm.CtsLocation',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLocationCoarseTestCases', '--include-filter', 'CtsLocationCoarseTestCases[instant]', '--include-filter', 'CtsLocationCoarseTestCases[secondary_user]', '--include-filter', 'CtsLocationFineTestCases', '--include-filter', 'CtsLocationFineTestCases[instant]', '--include-filter', 'CtsLocationFineTestCases[secondary_user]', '--include-filter', 'CtsLocationGnssTestCases', '--include-filter', 'CtsLocationGnssTestCases[instant]', '--include-filter', 'CtsLocationGnssTestCases[secondary_user]', '--include-filter', 'CtsLocationNoneTestCases', '--include-filter', 'CtsLocationNoneTestCases[instant]', '--include-filter', 'CtsLocationNoneTestCases[secondary_user]', '--include-filter', 'CtsLocationPrivilegedTestCases', '--include-filter', 'CtsLocationPrivilegedTestCases[instant]', '--include-filter', 'CtsLocationPrivilegedTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsLocation',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=28800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsLogd b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsLogd
deleted file mode 100644
index 3cb38d7..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsLogd
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsLogd'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsLogdTestCases, CtsLogdTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsLogd',
-        test_name='cheets_CTS_R.11_r3.arm.CtsLogd',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLogdTestCases', '--include-filter', 'CtsLogdTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsLogd',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMatchFlag b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMatchFlag
deleted file mode 100644
index 656c42b..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMatchFlag
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsMatchFlag'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMatchFlagTestCases, CtsMatchFlagTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsMatchFlag',
-        test_name='cheets_CTS_R.11_r3.arm.CtsMatchFlag',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMatchFlagTestCases', '--include-filter', 'CtsMatchFlagTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMatchFlag',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMediaBitstreamsTestCases b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMediaBitstreamsTestCases
deleted file mode 100644
index ae5284a..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMediaBitstreamsTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsMediaBitstreamsTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaBitstreamsTestCases, CtsMediaBitstreamsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        needs_push_media=True,
-        tag='11_r3.arm.CtsMediaBitstreamsTestCases',
-        test_name='cheets_CTS_R.11_r3.arm.CtsMediaBitstreamsTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaBitstreamsTestCases', '--include-filter', 'CtsMediaBitstreamsTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMediaBitstreamsTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMediaHostTestCases b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMediaHostTestCases
deleted file mode 100644
index cd666b4..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMediaHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsMediaHostTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaHostTestCases, CtsMediaHostTestCases[instant] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsMediaHostTestCases',
-        test_name='cheets_CTS_R.11_r3.arm.CtsMediaHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaHostTestCases', '--include-filter', 'CtsMediaHostTestCases[instant]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMediaHostTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMediaParserTestCases b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMediaParserTestCases
deleted file mode 100644
index df7f163..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMediaParserTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsMediaParserTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaParserTestCases, CtsMediaParserTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsMediaParserTestCases',
-        test_name='cheets_CTS_R.11_r3.arm.CtsMediaParserTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaParserTestCases', '--include-filter', 'CtsMediaParserTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMediaParserTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMediaStressTestCases b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMediaStressTestCases
deleted file mode 100644
index a76cdb7..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMediaStressTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsMediaStressTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaStressTestCases, CtsMediaStressTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        needs_push_media=True,
-        tag='11_r3.arm.CtsMediaStressTestCases',
-        test_name='cheets_CTS_R.11_r3.arm.CtsMediaStressTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaStressTestCases', '--include-filter', 'CtsMediaStressTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMediaStressTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=21600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMediaTestCases b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMediaTestCases
deleted file mode 100644
index 59d84c5..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMediaTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsMediaTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaTestCases, CtsMediaTestCases[instant] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        needs_push_media=True,
-        tag='11_r3.arm.CtsMediaTestCases',
-        test_name='cheets_CTS_R.11_r3.arm.CtsMediaTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases', '--include-filter', 'CtsMediaTestCases[instant]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMediaTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=39600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMediaTestCases.audio b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMediaTestCases.audio
deleted file mode 100644
index 027bf27..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMediaTestCases.audio
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsMediaTestCases.audio'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaTestCases.audio of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        needs_push_media=True,
-        tag='11_r3.arm.CtsMediaTestCases.audio',
-        test_name='cheets_CTS_R.11_r3.arm.CtsMediaTestCases.audio',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioAttributesTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioEffectTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioAttributesTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioEffectTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioFocusTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioFormatTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioManagerTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioMetadataTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioNativeTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPlayRoutingNative', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPlaybackCaptureTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPlaybackConfigurationTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPreProcessingTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPresentationTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordAppOpTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordRoutingNative', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecord_BufferSizeTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordingConfigurationTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioSystemTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioSystemUsageTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackLatencyTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackOffloadTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackSurroundTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrack_ListenerTest', '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolAacTest', '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolHapticTest', '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolMidiTest', '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolOggTest', '--include-filter', 'CtsMediaTestCases android.media.cts.VolumeShaperTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMediaTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMediaTestCases.video b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMediaTestCases.video
deleted file mode 100644
index 0edcafa..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMediaTestCases.video
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsMediaTestCases.video'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaTestCases.video of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        needs_push_media=True,
-        tag='11_r3.arm.CtsMediaTestCases.video',
-        test_name='cheets_CTS_R.11_r3.arm.CtsMediaTestCases.video',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases android.media.cts.AdaptivePlaybackTest', '--include-filter', 'CtsMediaTestCases android.media.cts.DecodeAccuracyTest', '--include-filter', 'CtsMediaTestCases android.media.cts.DecodeEditEncodeTest', '--include-filter', 'CtsMediaTestCases android.media.cts.DecoderConformanceTest', '--include-filter', 'CtsMediaTestCases android.media.cts.EncodeDecodeTest', '--include-filter', 'CtsMediaTestCases android.media.cts.ExtractDecodeEditEncodeMuxTest', '--include-filter', 'CtsMediaTestCases android.media.cts.MediaCodecPlayerTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMediaTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=36000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMediaV2TestCases b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMediaV2TestCases
deleted file mode 100644
index 5e75b28..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMediaV2TestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsMediaV2TestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaV2TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsMediaV2TestCases',
-        test_name='cheets_CTS_R.11_r3.arm.CtsMediaV2TestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaV2TestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMediaV2TestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMidiTestCases b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMidiTestCases
deleted file mode 100644
index 74cc6c7..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMidiTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsMidiTestCases'
-ATTRIBUTES = 'suite:arc-cts-r, suite:bvt-perbuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMidiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=3,
-        tag='11_r3.arm.CtsMidiTestCases',
-        test_name='cheets_CTS_R.11_r3.arm.CtsMidiTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMidiTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMidiTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMimeMap b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMimeMap
deleted file mode 100644
index f82a796..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMimeMap
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsMimeMap'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMimeMapTestCases, CtsMimeMapTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsMimeMap',
-        test_name='cheets_CTS_R.11_r3.arm.CtsMimeMap',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMimeMapTestCases', '--include-filter', 'CtsMimeMapTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMimeMap',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMocking b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMocking
deleted file mode 100644
index 2c3ef08..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMocking
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsMocking'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMockingDebuggableTestCases, CtsMockingDebuggableTestCases[instant], CtsMockingDebuggableTestCases[secondary_user], CtsMockingTestCases, CtsMockingTestCases[instant], CtsMockingTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsMocking',
-        test_name='cheets_CTS_R.11_r3.arm.CtsMocking',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMockingDebuggableTestCases', '--include-filter', 'CtsMockingDebuggableTestCases[instant]', '--include-filter', 'CtsMockingDebuggableTestCases[secondary_user]', '--include-filter', 'CtsMockingTestCases', '--include-filter', 'CtsMockingTestCases[instant]', '--include-filter', 'CtsMockingTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMocking',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMonkey b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMonkey
deleted file mode 100644
index d08cde0..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMonkey
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsMonkey'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMonkeyTestCases, CtsMonkeyTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsMonkey',
-        test_name='cheets_CTS_R.11_r3.arm.CtsMonkey',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMonkeyTestCases', '--include-filter', 'CtsMonkeyTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMonkey',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMultiUser b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMultiUser
deleted file mode 100644
index 47dbb50..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsMultiUser
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsMultiUser'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMultiUserHostTestCases, CtsMultiUserHostTestCases[instant], CtsMultiUserHostTestCases[secondary_user], CtsMultiUserTestCases, CtsMultiUserTestCases[instant], CtsMultiUserTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsMultiUser',
-        test_name='cheets_CTS_R.11_r3.arm.CtsMultiUser',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMultiUserHostTestCases', '--include-filter', 'CtsMultiUserHostTestCases[instant]', '--include-filter', 'CtsMultiUserHostTestCases[secondary_user]', '--include-filter', 'CtsMultiUserTestCases', '--include-filter', 'CtsMultiUserTestCases[instant]', '--include-filter', 'CtsMultiUserTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMultiUser',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsNNAPI b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsNNAPI
deleted file mode 100644
index 17e54b9..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsNNAPI
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsNNAPI'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsNNAPITestCases, CtsNNAPITestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsNNAPI',
-        test_name='cheets_CTS_R.11_r3.arm.CtsNNAPI',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNNAPITestCases', '--include-filter', 'CtsNNAPITestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsNNAPI',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsNNAPIBenchmark b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsNNAPIBenchmark
deleted file mode 100644
index b1d5b7d..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsNNAPIBenchmark
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsNNAPIBenchmark'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsNNAPIBenchmarkTestCases, CtsNNAPIBenchmarkTestCases[instant], CtsNNAPIBenchmarkTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsNNAPIBenchmark',
-        test_name='cheets_CTS_R.11_r3.arm.CtsNNAPIBenchmark',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNNAPIBenchmarkTestCases', '--include-filter', 'CtsNNAPIBenchmarkTestCases[instant]', '--include-filter', 'CtsNNAPIBenchmarkTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsNNAPIBenchmark',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsNative b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsNative
deleted file mode 100644
index 1552827..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsNative
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsNative'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsNativeEncryptionTestCases, CtsNativeEncryptionTestCases[instant], CtsNativeEncryptionTestCases[secondary_user], CtsNativeHardwareTestCases, CtsNativeHardwareTestCases[secondary_user], CtsNativeMediaAAudioTestCases, CtsNativeMediaAAudioTestCases[instant], CtsNativeMediaAAudioTestCases[secondary_user], CtsNativeMediaMetricsTestCases, CtsNativeMediaMetricsTestCases[instant], CtsNativeMediaMetricsTestCases[secondary_user], CtsNativeMediaSlTestCases, CtsNativeMediaSlTestCases[instant], CtsNativeMediaSlTestCases[secondary_user], CtsNativeMediaXaTestCases, CtsNativeMediaXaTestCases[instant], CtsNativeMediaXaTestCases[secondary_user], CtsNativeMidiTestCases, CtsNativeMidiTestCases[secondary_user], CtsNativeNetDnsTestCases, CtsNativeNetDnsTestCases[instant], CtsNativeNetDnsTestCases[secondary_user], CtsNativeNetTestCases, CtsNativeNetTestCases[instant], CtsNativeNetTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsNative',
-        test_name='cheets_CTS_R.11_r3.arm.CtsNative',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNativeEncryptionTestCases', '--include-filter', 'CtsNativeEncryptionTestCases[instant]', '--include-filter', 'CtsNativeEncryptionTestCases[secondary_user]', '--include-filter', 'CtsNativeHardwareTestCases', '--include-filter', 'CtsNativeHardwareTestCases[secondary_user]', '--include-filter', 'CtsNativeMediaAAudioTestCases', '--include-filter', 'CtsNativeMediaAAudioTestCases[instant]', '--include-filter', 'CtsNativeMediaAAudioTestCases[secondary_user]', '--include-filter', 'CtsNativeMediaMetricsTestCases', '--include-filter', 'CtsNativeMediaMetricsTestCases[instant]', '--include-filter', 'CtsNativeMediaMetricsTestCases[secondary_user]', '--include-filter', 'CtsNativeMediaSlTestCases', '--include-filter', 'CtsNativeMediaSlTestCases[instant]', '--include-filter', 'CtsNativeMediaSlTestCases[secondary_user]', '--include-filter', 'CtsNativeMediaXaTestCases', '--include-filter', 'CtsNativeMediaXaTestCases[instant]', '--include-filter', 'CtsNativeMediaXaTestCases[secondary_user]', '--include-filter', 'CtsNativeMidiTestCases', '--include-filter', 'CtsNativeMidiTestCases[secondary_user]', '--include-filter', 'CtsNativeNetDnsTestCases', '--include-filter', 'CtsNativeNetDnsTestCases[instant]', '--include-filter', 'CtsNativeNetDnsTestCases[secondary_user]', '--include-filter', 'CtsNativeNetTestCases', '--include-filter', 'CtsNativeNetTestCases[instant]', '--include-filter', 'CtsNativeNetTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsNative',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=46800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsNdef b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsNdef
deleted file mode 100644
index 5d01efe..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsNdef
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsNdef'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsNdefTestCases, CtsNdefTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsNdef',
-        test_name='cheets_CTS_R.11_r3.arm.CtsNdef',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNdefTestCases', '--include-filter', 'CtsNdefTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsNdef',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsNdkBinder b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsNdkBinder
deleted file mode 100644
index c68d214..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsNdkBinder
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsNdkBinder'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsNdkBinderTestCases, CtsNdkBinderTestCases[instant], CtsNdkBinderTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsNdkBinder',
-        test_name='cheets_CTS_R.11_r3.arm.CtsNdkBinder',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNdkBinderTestCases', '--include-filter', 'CtsNdkBinderTestCases[instant]', '--include-filter', 'CtsNdkBinderTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsNdkBinder',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsNet b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsNet
deleted file mode 100644
index 545da54..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsNet
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsNet'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsNetApi23TestCases, CtsNetApi23TestCases[secondary_user], CtsNetSecConfigAttributeTestCases, CtsNetSecConfigAttributeTestCases[instant], CtsNetSecConfigAttributeTestCases[secondary_user], CtsNetSecConfigBasicDebugDisabledTestCases, CtsNetSecConfigBasicDebugDisabledTestCases[instant], CtsNetSecConfigBasicDebugDisabledTestCases[secondary_user], CtsNetSecConfigBasicDebugEnabledTestCases, CtsNetSecConfigBasicDebugEnabledTestCases[instant], CtsNetSecConfigBasicDebugEnabledTestCases[secondary_user], CtsNetSecConfigBasicDomainConfigTestCases, CtsNetSecConfigBasicDomainConfigTestCases[instant], CtsNetSecConfigBasicDomainConfigTestCases[secondary_user], CtsNetSecConfigCleartextTrafficTestCases, CtsNetSecConfigCleartextTrafficTestCases[instant], CtsNetSecConfigCleartextTrafficTestCases[secondary_user], CtsNetSecConfigDownloadManagerTestCases, CtsNetSecConfigDownloadManagerTestCases[secondary_user], CtsNetSecConfigInvalidPinTestCases, CtsNetSecConfigInvalidPinTestCases[instant], CtsNetSecConfigInvalidPinTestCases[secondary_user], CtsNetSecConfigNestedDomainConfigTestCases, CtsNetSecConfigNestedDomainConfigTestCases[instant], CtsNetSecConfigNestedDomainConfigTestCases[secondary_user], CtsNetSecConfigPrePCleartextTrafficTestCases, CtsNetSecConfigPrePCleartextTrafficTestCases[secondary_user], CtsNetSecConfigResourcesSrcTestCases, CtsNetSecConfigResourcesSrcTestCases[instant], CtsNetSecConfigResourcesSrcTestCases[secondary_user], CtsNetSecPolicyUsesCleartextTrafficFalseTestCases, CtsNetSecPolicyUsesCleartextTrafficFalseTestCases[secondary_user], CtsNetSecPolicyUsesCleartextTrafficTrueTestCases, CtsNetSecPolicyUsesCleartextTrafficTrueTestCases[secondary_user], CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases, CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases[secondary_user], CtsNetTestCases, CtsNetTestCasesInternetPermission, CtsNetTestCasesInternetPermission[instant], CtsNetTestCasesInternetPermission[secondary_user], CtsNetTestCasesLegacyApi22, CtsNetTestCasesLegacyApi22[secondary_user], CtsNetTestCasesLegacyPermission22, CtsNetTestCasesLegacyPermission22[secondary_user], CtsNetTestCasesUpdateStatsPermission, CtsNetTestCasesUpdateStatsPermission[instant], CtsNetTestCasesUpdateStatsPermission[secondary_user], CtsNetTestCases[instant], CtsNetTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsNet',
-        test_name='cheets_CTS_R.11_r3.arm.CtsNet',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNetApi23TestCases', '--include-filter', 'CtsNetApi23TestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigAttributeTestCases', '--include-filter', 'CtsNetSecConfigAttributeTestCases[instant]', '--include-filter', 'CtsNetSecConfigAttributeTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigBasicDebugDisabledTestCases', '--include-filter', 'CtsNetSecConfigBasicDebugDisabledTestCases[instant]', '--include-filter', 'CtsNetSecConfigBasicDebugDisabledTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigBasicDebugEnabledTestCases', '--include-filter', 'CtsNetSecConfigBasicDebugEnabledTestCases[instant]', '--include-filter', 'CtsNetSecConfigBasicDebugEnabledTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigBasicDomainConfigTestCases', '--include-filter', 'CtsNetSecConfigBasicDomainConfigTestCases[instant]', '--include-filter', 'CtsNetSecConfigBasicDomainConfigTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigCleartextTrafficTestCases', '--include-filter', 'CtsNetSecConfigCleartextTrafficTestCases[instant]', '--include-filter', 'CtsNetSecConfigCleartextTrafficTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigDownloadManagerTestCases', '--include-filter', 'CtsNetSecConfigDownloadManagerTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigInvalidPinTestCases', '--include-filter', 'CtsNetSecConfigInvalidPinTestCases[instant]', '--include-filter', 'CtsNetSecConfigInvalidPinTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigNestedDomainConfigTestCases', '--include-filter', 'CtsNetSecConfigNestedDomainConfigTestCases[instant]', '--include-filter', 'CtsNetSecConfigNestedDomainConfigTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigPrePCleartextTrafficTestCases', '--include-filter', 'CtsNetSecConfigPrePCleartextTrafficTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigResourcesSrcTestCases', '--include-filter', 'CtsNetSecConfigResourcesSrcTestCases[instant]', '--include-filter', 'CtsNetSecConfigResourcesSrcTestCases[secondary_user]', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficFalseTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficFalseTestCases[secondary_user]', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficTrueTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficTrueTestCases[secondary_user]', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases[secondary_user]', '--include-filter', 'CtsNetTestCases', '--include-filter', 'CtsNetTestCasesInternetPermission', '--include-filter', 'CtsNetTestCasesInternetPermission[instant]', '--include-filter', 'CtsNetTestCasesInternetPermission[secondary_user]', '--include-filter', 'CtsNetTestCasesLegacyApi22', '--include-filter', 'CtsNetTestCasesLegacyApi22[secondary_user]', '--include-filter', 'CtsNetTestCasesLegacyPermission22', '--include-filter', 'CtsNetTestCasesLegacyPermission22[secondary_user]', '--include-filter', 'CtsNetTestCasesUpdateStatsPermission', '--include-filter', 'CtsNetTestCasesUpdateStatsPermission[instant]', '--include-filter', 'CtsNetTestCasesUpdateStatsPermission[secondary_user]', '--include-filter', 'CtsNetTestCases[instant]', '--include-filter', 'CtsNetTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsNet',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=90000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsNfc b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsNfc
deleted file mode 100644
index 6f9c646..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsNfc
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsNfc'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsNfcTestCases, CtsNfcTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsNfc',
-        test_name='cheets_CTS_R.11_r3.arm.CtsNfc',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNfcTestCases', '--include-filter', 'CtsNfcTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsNfc',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsNoPermission b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsNoPermission
deleted file mode 100644
index 9cfd8f3..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsNoPermission
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsNoPermission'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsNoPermissionTestCases, CtsNoPermissionTestCases25, CtsNoPermissionTestCases25[secondary_user], CtsNoPermissionTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsNoPermission',
-        test_name='cheets_CTS_R.11_r3.arm.CtsNoPermission',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNoPermissionTestCases', '--include-filter', 'CtsNoPermissionTestCases25', '--include-filter', 'CtsNoPermissionTestCases25[secondary_user]', '--include-filter', 'CtsNoPermissionTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsNoPermission',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsOmapi b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsOmapi
deleted file mode 100644
index 5ee2333..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsOmapi
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsOmapi'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsOmapiTestCases, CtsOmapiTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsOmapi',
-        test_name='cheets_CTS_R.11_r3.arm.CtsOmapi',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsOmapiTestCases', '--include-filter', 'CtsOmapiTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsOmapi',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsOpenG b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsOpenG
deleted file mode 100644
index 44ee749..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsOpenG
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsOpenG'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsOpenGLTestCases, CtsOpenGLTestCases[secondary_user], CtsOpenGlPerf2TestCases, CtsOpenGlPerf2TestCases[secondary_user], CtsOpenGlPerfTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsOpenG',
-        test_name='cheets_CTS_R.11_r3.arm.CtsOpenG',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsOpenGLTestCases', '--include-filter', 'CtsOpenGLTestCases[secondary_user]', '--include-filter', 'CtsOpenGlPerf2TestCases', '--include-filter', 'CtsOpenGlPerf2TestCases[secondary_user]', '--include-filter', 'CtsOpenGlPerfTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsOpenG',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsOs b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsOs
deleted file mode 100644
index 6697fe4..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsOs
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsOs'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsOsHostTestCases, CtsOsHostTestCases[instant], CtsOsHostTestCases[secondary_user], CtsOsTestCases, CtsOsTestCases[instant] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsOs',
-        test_name='cheets_CTS_R.11_r3.arm.CtsOs',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsOsHostTestCases', '--include-filter', 'CtsOsHostTestCases[instant]', '--include-filter', 'CtsOsHostTestCases[secondary_user]', '--include-filter', 'CtsOsTestCases', '--include-filter', 'CtsOsTestCases[instant]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsOs',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsPackage b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsPackage
deleted file mode 100644
index 90edde8..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsPackage
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsPackage'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsPackageInstallAppOpDefaultTestCases, CtsPackageInstallAppOpDefaultTestCases[instant], CtsPackageInstallAppOpDefaultTestCases[secondary_user], CtsPackageInstallAppOpDeniedTestCases, CtsPackageInstallAppOpDeniedTestCases[instant], CtsPackageInstallAppOpDeniedTestCases[secondary_user], CtsPackageInstallTestCases, CtsPackageInstallTestCases[instant], CtsPackageInstallTestCases[secondary_user], CtsPackageInstallerTapjackingTestCases, CtsPackageInstallerTapjackingTestCases[secondary_user], CtsPackageUninstallTestCases, CtsPackageUninstallTestCases[secondary_user], CtsPackageWatchdogTestCases, CtsPackageWatchdogTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsPackage',
-        test_name='cheets_CTS_R.11_r3.arm.CtsPackage',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPackageInstallAppOpDefaultTestCases', '--include-filter', 'CtsPackageInstallAppOpDefaultTestCases[instant]', '--include-filter', 'CtsPackageInstallAppOpDefaultTestCases[secondary_user]', '--include-filter', 'CtsPackageInstallAppOpDeniedTestCases', '--include-filter', 'CtsPackageInstallAppOpDeniedTestCases[instant]', '--include-filter', 'CtsPackageInstallAppOpDeniedTestCases[secondary_user]', '--include-filter', 'CtsPackageInstallTestCases', '--include-filter', 'CtsPackageInstallTestCases[instant]', '--include-filter', 'CtsPackageInstallTestCases[secondary_user]', '--include-filter', 'CtsPackageInstallerTapjackingTestCases', '--include-filter', 'CtsPackageInstallerTapjackingTestCases[secondary_user]', '--include-filter', 'CtsPackageUninstallTestCases', '--include-filter', 'CtsPackageUninstallTestCases[secondary_user]', '--include-filter', 'CtsPackageWatchdogTestCases', '--include-filter', 'CtsPackageWatchdogTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsPackage',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=28800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsPdf b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsPdf
deleted file mode 100644
index d5e2e32..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsPdf
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsPdf'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsPdfTestCases, CtsPdfTestCases[instant], CtsPdfTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsPdf',
-        test_name='cheets_CTS_R.11_r3.arm.CtsPdf',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPdfTestCases', '--include-filter', 'CtsPdfTestCases[instant]', '--include-filter', 'CtsPdfTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsPdf',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsPerfetto b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsPerfetto
deleted file mode 100644
index 2b0dca3..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsPerfetto
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsPerfetto'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsPerfettoTestCases, CtsPerfettoTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsPerfetto',
-        test_name='cheets_CTS_R.11_r3.arm.CtsPerfetto',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPerfettoTestCases', '--include-filter', 'CtsPerfettoTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsPerfetto',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsPermission b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsPermission
deleted file mode 100644
index 9edd6a5..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsPermission
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsPermission'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsPermission2TestCases, CtsPermission2TestCases[instant], CtsPermission3TestCases, CtsPermission3TestCases[secondary_user], CtsPermissionTestCases, CtsPermissionTestCasesSdk28, CtsPermissionTestCasesSdk28[instant], CtsPermissionTestCasesSdk28[secondary_user], CtsPermissionTestCasesTelephony, CtsPermissionTestCasesTelephony[instant], CtsPermissionTestCasesTelephony[secondary_user], CtsPermissionTestCases[instant] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsPermission',
-        test_name='cheets_CTS_R.11_r3.arm.CtsPermission',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPermission2TestCases', '--include-filter', 'CtsPermission2TestCases[instant]', '--include-filter', 'CtsPermission3TestCases', '--include-filter', 'CtsPermission3TestCases[secondary_user]', '--include-filter', 'CtsPermissionTestCases', '--include-filter', 'CtsPermissionTestCasesSdk28', '--include-filter', 'CtsPermissionTestCasesSdk28[instant]', '--include-filter', 'CtsPermissionTestCasesSdk28[secondary_user]', '--include-filter', 'CtsPermissionTestCasesTelephony', '--include-filter', 'CtsPermissionTestCasesTelephony[instant]', '--include-filter', 'CtsPermissionTestCasesTelephony[secondary_user]', '--include-filter', 'CtsPermissionTestCases[instant]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsPermission',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=23400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsPreference b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsPreference
deleted file mode 100644
index deb629e..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsPreference
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsPreference'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsPreferenceTestCases, CtsPreferenceTestCases[instant], CtsPreferenceTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsPreference',
-        test_name='cheets_CTS_R.11_r3.arm.CtsPreference',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPreferenceTestCases', '--include-filter', 'CtsPreferenceTestCases[instant]', '--include-filter', 'CtsPreferenceTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsPreference',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsPrint b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsPrint
deleted file mode 100644
index b32a964..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsPrint
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsPrint'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsPrintTestCases, CtsPrintTestCases[instant], CtsPrintTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsPrint',
-        test_name='cheets_CTS_R.11_r3.arm.CtsPrint',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPrintTestCases', '--include-filter', 'CtsPrintTestCases[instant]', '--include-filter', 'CtsPrintTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsPrint',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsProto b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsProto
deleted file mode 100644
index 14fddfe..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsProto
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsProto'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsProtoTestCases, CtsProtoTestCases[instant], CtsProtoTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsProto',
-        test_name='cheets_CTS_R.11_r3.arm.CtsProto',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsProtoTestCases', '--include-filter', 'CtsProtoTestCases[instant]', '--include-filter', 'CtsProtoTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsProto',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsProvider b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsProvider
deleted file mode 100644
index 3b576f5..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsProvider
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsProvider'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsProviderTestCases, CtsProviderTestCases[secondary_user], CtsProviderUiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsProvider',
-        test_name='cheets_CTS_R.11_r3.arm.CtsProvider',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsProviderTestCases', '--include-filter', 'CtsProviderTestCases[secondary_user]', '--include-filter', 'CtsProviderUiTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsProvider',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsQuickAccessWallet b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsQuickAccessWallet
deleted file mode 100644
index 4928232..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsQuickAccessWallet
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsQuickAccessWallet'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsQuickAccessWalletTestCases, CtsQuickAccessWalletTestCases[instant], CtsQuickAccessWalletTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsQuickAccessWallet',
-        test_name='cheets_CTS_R.11_r3.arm.CtsQuickAccessWallet',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsQuickAccessWalletTestCases', '--include-filter', 'CtsQuickAccessWalletTestCases[instant]', '--include-filter', 'CtsQuickAccessWalletTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsQuickAccessWallet',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsRenderscript b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsRenderscript
deleted file mode 100644
index 079d3e7..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsRenderscript
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsRenderscript'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsRenderscriptLegacyTestCases, CtsRenderscriptLegacyTestCases[secondary_user], CtsRenderscriptTestCases, CtsRenderscriptTestCases[instant], CtsRenderscriptTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsRenderscript',
-        test_name='cheets_CTS_R.11_r3.arm.CtsRenderscript',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsRenderscriptLegacyTestCases', '--include-filter', 'CtsRenderscriptLegacyTestCases[secondary_user]', '--include-filter', 'CtsRenderscriptTestCases', '--include-filter', 'CtsRenderscriptTestCases[instant]', '--include-filter', 'CtsRenderscriptTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsRenderscript',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsResolverService b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsResolverService
deleted file mode 100644
index 629ca61..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsResolverService
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsResolverService'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsResolverServiceTestCases, CtsResolverServiceTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsResolverService',
-        test_name='cheets_CTS_R.11_r3.arm.CtsResolverService',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsResolverServiceTestCases', '--include-filter', 'CtsResolverServiceTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsResolverService',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsResourcesLoader b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsResourcesLoader
deleted file mode 100644
index 8b19140..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsResourcesLoader
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsResourcesLoader'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsResourcesLoaderTests, CtsResourcesLoaderTests[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsResourcesLoader',
-        test_name='cheets_CTS_R.11_r3.arm.CtsResourcesLoader',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsResourcesLoaderTests', '--include-filter', 'CtsResourcesLoaderTests[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsResourcesLoader',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsRole b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsRole
deleted file mode 100644
index da8c0cc..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsRole
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsRole'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsRoleTestCases, CtsRoleTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsRole',
-        test_name='cheets_CTS_R.11_r3.arm.CtsRole',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsRoleTestCases', '--include-filter', 'CtsRoleTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsRole',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsRollbackManagerHostTestCases b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsRollbackManagerHostTestCases
deleted file mode 100644
index 09f7cfb..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsRollbackManagerHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsRollbackManagerHostTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsRollbackManagerHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsRollbackManagerHostTestCases',
-        test_name='cheets_CTS_R.11_r3.arm.CtsRollbackManagerHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsRollbackManagerHostTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsRollbackManagerHostTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsRs b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsRs
deleted file mode 100644
index 0c9132e..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsRs
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsRs'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsRsBlasTestCases, CtsRsBlasTestCases[secondary_user], CtsRsCppTestCases, CtsRsCppTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsRs',
-        test_name='cheets_CTS_R.11_r3.arm.CtsRs',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsRsBlasTestCases', '--include-filter', 'CtsRsBlasTestCases[secondary_user]', '--include-filter', 'CtsRsCppTestCases', '--include-filter', 'CtsRsCppTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsRs',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSample b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSample
deleted file mode 100644
index 2ddcdb8..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSample
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsSample'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSampleDeviceTestCases, CtsSampleDeviceTestCases[instant], CtsSampleDeviceTestCases[secondary_user], CtsSampleHostTestCases, CtsSampleHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsSample',
-        test_name='cheets_CTS_R.11_r3.arm.CtsSample',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSampleDeviceTestCases', '--include-filter', 'CtsSampleDeviceTestCases[instant]', '--include-filter', 'CtsSampleDeviceTestCases[secondary_user]', '--include-filter', 'CtsSampleHostTestCases', '--include-filter', 'CtsSampleHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSample',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSax b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSax
deleted file mode 100644
index 6d2f573..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSax
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsSax'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSaxTestCases, CtsSaxTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsSax',
-        test_name='cheets_CTS_R.11_r3.arm.CtsSax',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSaxTestCases', '--include-filter', 'CtsSaxTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSax',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsScopedStorageHostTest b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsScopedStorageHostTest
deleted file mode 100644
index 70cdeea..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsScopedStorageHostTest
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsScopedStorageHostTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsScopedStorageHostTest, CtsScopedStorageHostTest[instant] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsScopedStorageHostTest',
-        test_name='cheets_CTS_R.11_r3.arm.CtsScopedStorageHostTest',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsScopedStorageHostTest', '--include-filter', 'CtsScopedStorageHostTest[instant]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsScopedStorageHostTest',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSdkExtensions b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSdkExtensions
deleted file mode 100644
index 7147d23..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSdkExtensions
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsSdkExtensions'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSdkExtensionsTestCases, CtsSdkExtensionsTestCases[instant], CtsSdkExtensionsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsSdkExtensions',
-        test_name='cheets_CTS_R.11_r3.arm.CtsSdkExtensions',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSdkExtensionsTestCases', '--include-filter', 'CtsSdkExtensionsTestCases[instant]', '--include-filter', 'CtsSdkExtensionsTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSdkExtensions',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSeccompHost b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSeccompHost
deleted file mode 100644
index 5c9ee68..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSeccompHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsSeccompHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSeccompHostTestCases, CtsSeccompHostTestCases[instant], CtsSeccompHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsSeccompHost',
-        test_name='cheets_CTS_R.11_r3.arm.CtsSeccompHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSeccompHostTestCases', '--include-filter', 'CtsSeccompHostTestCases[instant]', '--include-filter', 'CtsSeccompHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSeccompHost',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSecure b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSecure
deleted file mode 100644
index 59b7605..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSecure
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsSecure'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSecureElementAccessControlTestCases1, CtsSecureElementAccessControlTestCases1[secondary_user], CtsSecureElementAccessControlTestCases2, CtsSecureElementAccessControlTestCases2[secondary_user], CtsSecureElementAccessControlTestCases3, CtsSecureElementAccessControlTestCases3[secondary_user], CtsSecureFrpInstallTestCases, CtsSecureFrpInstallTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsSecure',
-        test_name='cheets_CTS_R.11_r3.arm.CtsSecure',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSecureElementAccessControlTestCases1', '--include-filter', 'CtsSecureElementAccessControlTestCases1[secondary_user]', '--include-filter', 'CtsSecureElementAccessControlTestCases2', '--include-filter', 'CtsSecureElementAccessControlTestCases2[secondary_user]', '--include-filter', 'CtsSecureElementAccessControlTestCases3', '--include-filter', 'CtsSecureElementAccessControlTestCases3[secondary_user]', '--include-filter', 'CtsSecureFrpInstallTestCases', '--include-filter', 'CtsSecureFrpInstallTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSecure',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=16200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSecurity b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSecurity
deleted file mode 100644
index d1b861f..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSecurity
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsSecurity'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSecurityBulletinHostTestCases, CtsSecurityBulletinHostTestCases[secondary_user], CtsSecurityHostTestCases, CtsSecurityHostTestCases[secondary_user], CtsSecurityTestCases, CtsSecurityTestCases[instant], CtsSecurityTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsSecurity',
-        test_name='cheets_CTS_R.11_r3.arm.CtsSecurity',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSecurityBulletinHostTestCases', '--include-filter', 'CtsSecurityBulletinHostTestCases[secondary_user]', '--include-filter', 'CtsSecurityHostTestCases', '--include-filter', 'CtsSecurityHostTestCases[secondary_user]', '--include-filter', 'CtsSecurityTestCases', '--include-filter', 'CtsSecurityTestCases[instant]', '--include-filter', 'CtsSecurityTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSecurity',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=154800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSelinux b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSelinux
deleted file mode 100644
index 58ec251..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSelinux
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsSelinux'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSelinuxEphemeralTestCases, CtsSelinuxEphemeralTestCases[instant], CtsSelinuxTargetSdk25TestCases, CtsSelinuxTargetSdk25TestCases[secondary_user], CtsSelinuxTargetSdk27TestCases, CtsSelinuxTargetSdk27TestCases[secondary_user], CtsSelinuxTargetSdk28TestCases, CtsSelinuxTargetSdk28TestCases[secondary_user], CtsSelinuxTargetSdk29TestCases, CtsSelinuxTargetSdk29TestCases[secondary_user], CtsSelinuxTargetSdkCurrentTestCases, CtsSelinuxTargetSdkCurrentTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsSelinux',
-        test_name='cheets_CTS_R.11_r3.arm.CtsSelinux',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSelinuxEphemeralTestCases', '--include-filter', 'CtsSelinuxEphemeralTestCases[instant]', '--include-filter', 'CtsSelinuxTargetSdk25TestCases', '--include-filter', 'CtsSelinuxTargetSdk25TestCases[secondary_user]', '--include-filter', 'CtsSelinuxTargetSdk27TestCases', '--include-filter', 'CtsSelinuxTargetSdk27TestCases[secondary_user]', '--include-filter', 'CtsSelinuxTargetSdk28TestCases', '--include-filter', 'CtsSelinuxTargetSdk28TestCases[secondary_user]', '--include-filter', 'CtsSelinuxTargetSdk29TestCases', '--include-filter', 'CtsSelinuxTargetSdk29TestCases[secondary_user]', '--include-filter', 'CtsSelinuxTargetSdkCurrentTestCases', '--include-filter', 'CtsSelinuxTargetSdkCurrentTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSelinux',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=23400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSensor b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSensor
deleted file mode 100644
index bec3ed5..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSensor
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsSensor'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSensorTestCases, CtsSensorTestCases[instant], CtsSensorTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsSensor',
-        test_name='cheets_CTS_R.11_r3.arm.CtsSensor',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSensorTestCases', '--include-filter', 'CtsSensorTestCases[instant]', '--include-filter', 'CtsSensorTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSensor',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSettings b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSettings
deleted file mode 100644
index d9e9f2d..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSettings
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsSettings'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSettingsHostTestCases, CtsSettingsTestCases, CtsSettingsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsSettings',
-        test_name='cheets_CTS_R.11_r3.arm.CtsSettings',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSettingsHostTestCases', '--include-filter', 'CtsSettingsTestCases', '--include-filter', 'CtsSettingsTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSettings',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSharedLibsApiSignature b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSharedLibsApiSignature
deleted file mode 100644
index eb01158..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSharedLibsApiSignature
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsSharedLibsApiSignature'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSharedLibsApiSignatureTestCases, CtsSharedLibsApiSignatureTestCases[instant], CtsSharedLibsApiSignatureTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsSharedLibsApiSignature',
-        test_name='cheets_CTS_R.11_r3.arm.CtsSharedLibsApiSignature',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSharedLibsApiSignatureTestCases', '--include-filter', 'CtsSharedLibsApiSignatureTestCases[instant]', '--include-filter', 'CtsSharedLibsApiSignatureTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSharedLibsApiSignature',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSharesheet b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSharesheet
deleted file mode 100644
index 5971bc9..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSharesheet
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsSharesheet'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSharesheetTestCases, CtsSharesheetTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsSharesheet',
-        test_name='cheets_CTS_R.11_r3.arm.CtsSharesheet',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSharesheetTestCases', '--include-filter', 'CtsSharesheetTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSharesheet',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsShortcut b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsShortcut
deleted file mode 100644
index 5e95b05..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsShortcut
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsShortcut'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsShortcutHostTestCases, CtsShortcutManagerLauncher1, CtsShortcutManagerLauncher1[secondary_user], CtsShortcutManagerLauncher2, CtsShortcutManagerLauncher2[secondary_user], CtsShortcutManagerLauncher3, CtsShortcutManagerLauncher3[secondary_user], CtsShortcutManagerLauncher4, CtsShortcutManagerLauncher4[secondary_user], CtsShortcutManagerPackage1, CtsShortcutManagerPackage1[secondary_user], CtsShortcutManagerPackage2, CtsShortcutManagerPackage2[secondary_user], CtsShortcutManagerPackage3, CtsShortcutManagerPackage3[secondary_user], CtsShortcutManagerPackage4, CtsShortcutManagerPackage4[secondary_user], CtsShortcutManagerTestCases, CtsShortcutManagerTestCases[secondary_user], CtsShortcutManagerThrottlingTest, CtsShortcutManagerThrottlingTest[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsShortcut',
-        test_name='cheets_CTS_R.11_r3.arm.CtsShortcut',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsShortcutHostTestCases', '--include-filter', 'CtsShortcutManagerLauncher1', '--include-filter', 'CtsShortcutManagerLauncher1[secondary_user]', '--include-filter', 'CtsShortcutManagerLauncher2', '--include-filter', 'CtsShortcutManagerLauncher2[secondary_user]', '--include-filter', 'CtsShortcutManagerLauncher3', '--include-filter', 'CtsShortcutManagerLauncher3[secondary_user]', '--include-filter', 'CtsShortcutManagerLauncher4', '--include-filter', 'CtsShortcutManagerLauncher4[secondary_user]', '--include-filter', 'CtsShortcutManagerPackage1', '--include-filter', 'CtsShortcutManagerPackage1[secondary_user]', '--include-filter', 'CtsShortcutManagerPackage2', '--include-filter', 'CtsShortcutManagerPackage2[secondary_user]', '--include-filter', 'CtsShortcutManagerPackage3', '--include-filter', 'CtsShortcutManagerPackage3[secondary_user]', '--include-filter', 'CtsShortcutManagerPackage4', '--include-filter', 'CtsShortcutManagerPackage4[secondary_user]', '--include-filter', 'CtsShortcutManagerTestCases', '--include-filter', 'CtsShortcutManagerTestCases[secondary_user]', '--include-filter', 'CtsShortcutManagerThrottlingTest', '--include-filter', 'CtsShortcutManagerThrottlingTest[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsShortcut',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=39600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSignedConfigHost b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSignedConfigHost
deleted file mode 100644
index 1ffe7e4..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSignedConfigHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsSignedConfigHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSignedConfigHostTestCases, CtsSignedConfigHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsSignedConfigHost',
-        test_name='cheets_CTS_R.11_r3.arm.CtsSignedConfigHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSignedConfigHostTestCases', '--include-filter', 'CtsSignedConfigHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSignedConfigHost',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSimRestrictedApis b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSimRestrictedApis
deleted file mode 100644
index 6d392e1..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSimRestrictedApis
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsSimRestrictedApis'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSimRestrictedApisTestCases, CtsSimRestrictedApisTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsSimRestrictedApis',
-        test_name='cheets_CTS_R.11_r3.arm.CtsSimRestrictedApis',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSimRestrictedApisTestCases', '--include-filter', 'CtsSimRestrictedApisTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSimRestrictedApis',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSimpleCpu b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSimpleCpu
deleted file mode 100644
index d3fa5c2..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSimpleCpu
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsSimpleCpu'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSimpleCpuTestCases, CtsSimpleCpuTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsSimpleCpu',
-        test_name='cheets_CTS_R.11_r3.arm.CtsSimpleCpu',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSimpleCpuTestCases', '--include-filter', 'CtsSimpleCpuTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSimpleCpu',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSimpleperfTestCases b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSimpleperfTestCases
deleted file mode 100644
index 794501b..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSimpleperfTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsSimpleperfTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSimpleperfTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsSimpleperfTestCases',
-        test_name='cheets_CTS_R.11_r3.arm.CtsSimpleperfTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSimpleperfTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSimpleperfTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSkQP b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSkQP
deleted file mode 100644
index 998a744..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSkQP
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsSkQP'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSkQPTestCases, CtsSkQPTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsSkQP',
-        test_name='cheets_CTS_R.11_r3.arm.CtsSkQP',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSkQPTestCases', '--include-filter', 'CtsSkQPTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSkQP',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSlice b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSlice
deleted file mode 100644
index ac19edb..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSlice
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsSlice'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSliceTestCases, CtsSliceTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsSlice',
-        test_name='cheets_CTS_R.11_r3.arm.CtsSlice',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSliceTestCases', '--include-filter', 'CtsSliceTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSlice',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSoundTrigger b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSoundTrigger
deleted file mode 100644
index 9c4009c..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSoundTrigger
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsSoundTrigger'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSoundTriggerTestCases, CtsSoundTriggerTestCases[instant], CtsSoundTriggerTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsSoundTrigger',
-        test_name='cheets_CTS_R.11_r3.arm.CtsSoundTrigger',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSoundTriggerTestCases', '--include-filter', 'CtsSoundTriggerTestCases[instant]', '--include-filter', 'CtsSoundTriggerTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSoundTrigger',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSpeech b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSpeech
deleted file mode 100644
index 7cd623c..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSpeech
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsSpeech'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSpeechTestCases, CtsSpeechTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsSpeech',
-        test_name='cheets_CTS_R.11_r3.arm.CtsSpeech',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSpeechTestCases', '--include-filter', 'CtsSpeechTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSpeech',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsStagedInstallHostTestCases b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsStagedInstallHostTestCases
deleted file mode 100644
index 20b03db..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsStagedInstallHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsStagedInstallHostTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsStagedInstallHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsStagedInstallHostTestCases',
-        test_name='cheets_CTS_R.11_r3.arm.CtsStagedInstallHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsStagedInstallHostTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsStagedInstallHostTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsStatsdHost b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsStatsdHost
deleted file mode 100644
index 34107f9..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsStatsdHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsStatsdHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsStatsdHostTestCases, CtsStatsdHostTestCases[instant], CtsStatsdHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsStatsdHost',
-        test_name='cheets_CTS_R.11_r3.arm.CtsStatsdHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsStatsdHostTestCases', '--include-filter', 'CtsStatsdHostTestCases[instant]', '--include-filter', 'CtsStatsdHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsStatsdHost',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsStrictJavaPackages b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsStrictJavaPackages
deleted file mode 100644
index 564f23b..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsStrictJavaPackages
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsStrictJavaPackages'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsStrictJavaPackagesTestCases, CtsStrictJavaPackagesTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsStrictJavaPackages',
-        test_name='cheets_CTS_R.11_r3.arm.CtsStrictJavaPackages',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsStrictJavaPackagesTestCases', '--include-filter', 'CtsStrictJavaPackagesTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsStrictJavaPackages',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSuspendApps b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSuspendApps
deleted file mode 100644
index e44f734..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSuspendApps
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsSuspendApps'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSuspendAppsPermissionTestCases, CtsSuspendAppsPermissionTestCases[secondary_user], CtsSuspendAppsTestCases, CtsSuspendAppsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsSuspendApps',
-        test_name='cheets_CTS_R.11_r3.arm.CtsSuspendApps',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSuspendAppsPermissionTestCases', '--include-filter', 'CtsSuspendAppsPermissionTestCases[secondary_user]', '--include-filter', 'CtsSuspendAppsTestCases', '--include-filter', 'CtsSuspendAppsTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSuspendApps',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSustainedPerformanceHost b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSustainedPerformanceHost
deleted file mode 100644
index 0c4d869..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSustainedPerformanceHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsSustainedPerformanceHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSustainedPerformanceHostTestCases, CtsSustainedPerformanceHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsSustainedPerformanceHost',
-        test_name='cheets_CTS_R.11_r3.arm.CtsSustainedPerformanceHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSustainedPerformanceHostTestCases', '--include-filter', 'CtsSustainedPerformanceHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSustainedPerformanceHost',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSync b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSync
deleted file mode 100644
index dffa73d..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSync
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsSync'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSyncAccountAccessOtherCertTestCases, CtsSyncAccountAccessOtherCertTestCases[secondary_user], CtsSyncContentHostTestCases, CtsSyncContentHostTestCases[secondary_user], CtsSyncManagerTestsCases, CtsSyncManagerTestsCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsSync',
-        test_name='cheets_CTS_R.11_r3.arm.CtsSync',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSyncAccountAccessOtherCertTestCases', '--include-filter', 'CtsSyncAccountAccessOtherCertTestCases[secondary_user]', '--include-filter', 'CtsSyncContentHostTestCases', '--include-filter', 'CtsSyncContentHostTestCases[secondary_user]', '--include-filter', 'CtsSyncManagerTestsCases', '--include-filter', 'CtsSyncManagerTestsCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSync',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSystem b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSystem
deleted file mode 100644
index 3b3c39e..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsSystem
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsSystem'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSystemApiAnnotationTestCases, CtsSystemApiAnnotationTestCases[instant], CtsSystemApiAnnotationTestCases[secondary_user], CtsSystemApiSignatureTestCases, CtsSystemApiSignatureTestCases[instant], CtsSystemApiSignatureTestCases[secondary_user], CtsSystemIntentTestCases, CtsSystemIntentTestCases[secondary_user], CtsSystemUiHostTestCases, CtsSystemUiHostTestCases[instant], CtsSystemUiHostTestCases[secondary_user], CtsSystemUiTestCases, CtsSystemUiTestCases[instant], CtsSystemUiTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsSystem',
-        test_name='cheets_CTS_R.11_r3.arm.CtsSystem',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSystemApiAnnotationTestCases', '--include-filter', 'CtsSystemApiAnnotationTestCases[instant]', '--include-filter', 'CtsSystemApiAnnotationTestCases[secondary_user]', '--include-filter', 'CtsSystemApiSignatureTestCases', '--include-filter', 'CtsSystemApiSignatureTestCases[instant]', '--include-filter', 'CtsSystemApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsSystemIntentTestCases', '--include-filter', 'CtsSystemIntentTestCases[secondary_user]', '--include-filter', 'CtsSystemUiHostTestCases', '--include-filter', 'CtsSystemUiHostTestCases[instant]', '--include-filter', 'CtsSystemUiHostTestCases[secondary_user]', '--include-filter', 'CtsSystemUiTestCases', '--include-filter', 'CtsSystemUiTestCases[instant]', '--include-filter', 'CtsSystemUiTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSystem',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=27000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTaggingHost b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTaggingHost
deleted file mode 100644
index 3111ebf..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTaggingHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsTaggingHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTaggingHostTestCases, CtsTaggingHostTestCases[instant], CtsTaggingHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsTaggingHost',
-        test_name='cheets_CTS_R.11_r3.arm.CtsTaggingHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTaggingHostTestCases', '--include-filter', 'CtsTaggingHostTestCases[instant]', '--include-filter', 'CtsTaggingHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsTaggingHost',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTelecom b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTelecom
deleted file mode 100644
index a52938b..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTelecom
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsTelecom'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTelecomTestCases, CtsTelecomTestCases2, CtsTelecomTestCases2[secondary_user], CtsTelecomTestCases3, CtsTelecomTestCases3[secondary_user], CtsTelecomTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsTelecom',
-        test_name='cheets_CTS_R.11_r3.arm.CtsTelecom',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTelecomTestCases', '--include-filter', 'CtsTelecomTestCases2', '--include-filter', 'CtsTelecomTestCases2[secondary_user]', '--include-filter', 'CtsTelecomTestCases3', '--include-filter', 'CtsTelecomTestCases3[secondary_user]', '--include-filter', 'CtsTelecomTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsTelecom',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTelephony b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTelephony
deleted file mode 100644
index ea4dcd3..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTelephony
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsTelephony'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTelephony2TestCases, CtsTelephony2TestCases[instant], CtsTelephony2TestCases[secondary_user], CtsTelephony3TestCases, CtsTelephony3TestCases[secondary_user], CtsTelephonyHostCases, CtsTelephonyHostCases[secondary_user], CtsTelephonyProviderHostCases, CtsTelephonyProviderHostCases[secondary_user], CtsTelephonyProviderTestCases, CtsTelephonyProviderTestCases[secondary_user], CtsTelephonySdk28TestCases, CtsTelephonySdk28TestCases[secondary_user], CtsTelephonyTestCases, CtsTelephonyTestCasesPermissionReadPhoneState, CtsTelephonyTestCasesPermissionReadPhoneState[instant], CtsTelephonyTestCasesPermissionReadPhoneState[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsTelephony',
-        test_name='cheets_CTS_R.11_r3.arm.CtsTelephony',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTelephony2TestCases', '--include-filter', 'CtsTelephony2TestCases[instant]', '--include-filter', 'CtsTelephony2TestCases[secondary_user]', '--include-filter', 'CtsTelephony3TestCases', '--include-filter', 'CtsTelephony3TestCases[secondary_user]', '--include-filter', 'CtsTelephonyHostCases', '--include-filter', 'CtsTelephonyHostCases[secondary_user]', '--include-filter', 'CtsTelephonyProviderHostCases', '--include-filter', 'CtsTelephonyProviderHostCases[secondary_user]', '--include-filter', 'CtsTelephonyProviderTestCases', '--include-filter', 'CtsTelephonyProviderTestCases[secondary_user]', '--include-filter', 'CtsTelephonySdk28TestCases', '--include-filter', 'CtsTelephonySdk28TestCases[secondary_user]', '--include-filter', 'CtsTelephonyTestCases', '--include-filter', 'CtsTelephonyTestCasesPermissionReadPhoneState', '--include-filter', 'CtsTelephonyTestCasesPermissionReadPhoneState[instant]', '--include-filter', 'CtsTelephonyTestCasesPermissionReadPhoneState[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsTelephony',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=32400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTestHarnessMode b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTestHarnessMode
deleted file mode 100644
index 4535667..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTestHarnessMode
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsTestHarnessMode'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTestHarnessModeTestCases, CtsTestHarnessModeTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsTestHarnessMode',
-        test_name='cheets_CTS_R.11_r3.arm.CtsTestHarnessMode',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTestHarnessModeTestCases', '--include-filter', 'CtsTestHarnessModeTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsTestHarnessMode',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTetheringTest b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTetheringTest
deleted file mode 100644
index 17548e9..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTetheringTest
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsTetheringTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTetheringTest, CtsTetheringTest[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsTetheringTest',
-        test_name='cheets_CTS_R.11_r3.arm.CtsTetheringTest',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTetheringTest', '--include-filter', 'CtsTetheringTest[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsTetheringTest',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsText b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsText
deleted file mode 100644
index f5f83e5..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsText
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsText'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTextClassifierTestCases, CtsTextClassifierTestCases[secondary_user], CtsTextTestCases, CtsTextTestCases[instant], CtsTextTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsText',
-        test_name='cheets_CTS_R.11_r3.arm.CtsText',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTextClassifierTestCases', '--include-filter', 'CtsTextClassifierTestCases[secondary_user]', '--include-filter', 'CtsTextTestCases', '--include-filter', 'CtsTextTestCases[instant]', '--include-filter', 'CtsTextTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsText',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTfliteNnapiDelegate b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTfliteNnapiDelegate
deleted file mode 100644
index dfa712a..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTfliteNnapiDelegate
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsTfliteNnapiDelegate'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTfliteNnapiDelegateTestCases, CtsTfliteNnapiDelegateTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsTfliteNnapiDelegate',
-        test_name='cheets_CTS_R.11_r3.arm.CtsTfliteNnapiDelegate',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTfliteNnapiDelegateTestCases', '--include-filter', 'CtsTfliteNnapiDelegateTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsTfliteNnapiDelegate',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTheme b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTheme
deleted file mode 100644
index 1fdf690..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTheme
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsTheme'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsThemeDeviceTestCases, CtsThemeDeviceTestCases[secondary_user], CtsThemeHostTestCases, CtsThemeHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsTheme',
-        test_name='cheets_CTS_R.11_r3.arm.CtsTheme',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsThemeDeviceTestCases', '--include-filter', 'CtsThemeDeviceTestCases[secondary_user]', '--include-filter', 'CtsThemeHostTestCases', '--include-filter', 'CtsThemeHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsTheme',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsThermal b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsThermal
deleted file mode 100644
index e29342c..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsThermal
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsThermal'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsThermalTestCases, CtsThermalTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsThermal',
-        test_name='cheets_CTS_R.11_r3.arm.CtsThermal',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsThermalTestCases', '--include-filter', 'CtsThermalTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsThermal',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsToast b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsToast
deleted file mode 100644
index 32cb368..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsToast
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsToast'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsToastLegacyTestCases, CtsToastLegacyTestCases[secondary_user], CtsToastTestCases, CtsToastTestCases[instant], CtsToastTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsToast',
-        test_name='cheets_CTS_R.11_r3.arm.CtsToast',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsToastLegacyTestCases', '--include-filter', 'CtsToastLegacyTestCases[secondary_user]', '--include-filter', 'CtsToastTestCases', '--include-filter', 'CtsToastTestCases[instant]', '--include-filter', 'CtsToastTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsToast',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTransition b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTransition
deleted file mode 100644
index 4b75bb7..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTransition
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsTransition'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTransitionTestCases, CtsTransitionTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsTransition',
-        test_name='cheets_CTS_R.11_r3.arm.CtsTransition',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTransitionTestCases', '--include-filter', 'CtsTransitionTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsTransition',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTrustedVoiceHost b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTrustedVoiceHost
deleted file mode 100644
index b7be2cd..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTrustedVoiceHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsTrustedVoiceHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTrustedVoiceHostTestCases, CtsTrustedVoiceHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsTrustedVoiceHost',
-        test_name='cheets_CTS_R.11_r3.arm.CtsTrustedVoiceHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTrustedVoiceHostTestCases', '--include-filter', 'CtsTrustedVoiceHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsTrustedVoiceHost',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTv b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTv
deleted file mode 100644
index 3f9fc0d..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsTv
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsTv'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTvProviderTestCases, CtsTvProviderTestCases[secondary_user], CtsTvTestCases, CtsTvTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsTv',
-        test_name='cheets_CTS_R.11_r3.arm.CtsTv',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTvProviderTestCases', '--include-filter', 'CtsTvProviderTestCases[secondary_user]', '--include-filter', 'CtsTvTestCases', '--include-filter', 'CtsTvTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsTv',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsUi b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsUi
deleted file mode 100644
index 9d07335..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsUi
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsUi'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsUiAutomationTestCases, CtsUiAutomationTestCases[instant], CtsUiAutomationTestCases[secondary_user], CtsUiRenderingTestCases, CtsUiRenderingTestCases27, CtsUiRenderingTestCases27[instant], CtsUiRenderingTestCases27[secondary_user], CtsUiRenderingTestCases[instant], CtsUiRenderingTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsUi',
-        test_name='cheets_CTS_R.11_r3.arm.CtsUi',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUiAutomationTestCases', '--include-filter', 'CtsUiAutomationTestCases[instant]', '--include-filter', 'CtsUiAutomationTestCases[secondary_user]', '--include-filter', 'CtsUiRenderingTestCases', '--include-filter', 'CtsUiRenderingTestCases27', '--include-filter', 'CtsUiRenderingTestCases27[instant]', '--include-filter', 'CtsUiRenderingTestCases27[secondary_user]', '--include-filter', 'CtsUiRenderingTestCases[instant]', '--include-filter', 'CtsUiRenderingTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsUi',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=18000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsUidIsolation b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsUidIsolation
deleted file mode 100644
index 63af7cc..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsUidIsolation
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsUidIsolation'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsUidIsolationTestCases, CtsUidIsolationTestCases[instant], CtsUidIsolationTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsUidIsolation',
-        test_name='cheets_CTS_R.11_r3.arm.CtsUidIsolation',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUidIsolationTestCases', '--include-filter', 'CtsUidIsolationTestCases[instant]', '--include-filter', 'CtsUidIsolationTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsUidIsolation',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsUsageStats b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsUsageStats
deleted file mode 100644
index a7e7760..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsUsageStats
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsUsageStats'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsUsageStatsTestCases, CtsUsageStatsTestCases[instant], CtsUsageStatsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsUsageStats',
-        test_name='cheets_CTS_R.11_r3.arm.CtsUsageStats',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUsageStatsTestCases', '--include-filter', 'CtsUsageStatsTestCases[instant]', '--include-filter', 'CtsUsageStatsTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsUsageStats',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsUsb b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsUsb
deleted file mode 100644
index a2d6109..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsUsb
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsUsb'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsUsbManagerTestCases, CtsUsbManagerTestCases[secondary_user], CtsUsbTests, CtsUsbTests[instant], CtsUsbTests[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsUsb',
-        test_name='cheets_CTS_R.11_r3.arm.CtsUsb',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUsbManagerTestCases', '--include-filter', 'CtsUsbManagerTestCases[secondary_user]', '--include-filter', 'CtsUsbTests', '--include-filter', 'CtsUsbTests[instant]', '--include-filter', 'CtsUsbTests[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsUsb',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsUserspaceRebootHostSideTestCases b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsUserspaceRebootHostSideTestCases
deleted file mode 100644
index 0cd10a8..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsUserspaceRebootHostSideTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsUserspaceRebootHostSideTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsUserspaceRebootHostSideTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsUserspaceRebootHostSideTestCases',
-        test_name='cheets_CTS_R.11_r3.arm.CtsUserspaceRebootHostSideTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsUserspaceRebootHostSideTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsUserspaceRebootHostSideTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsUsesLibraryHost b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsUsesLibraryHost
deleted file mode 100644
index f822f7d..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsUsesLibraryHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsUsesLibraryHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsUsesLibraryHostTestCases, CtsUsesLibraryHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsUsesLibraryHost',
-        test_name='cheets_CTS_R.11_r3.arm.CtsUsesLibraryHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUsesLibraryHostTestCases', '--include-filter', 'CtsUsesLibraryHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsUsesLibraryHost',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsUtil b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsUtil
deleted file mode 100644
index a9ba445..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsUtil
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsUtil'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsUtilTestCases, CtsUtilTestCases[instant], CtsUtilTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsUtil',
-        test_name='cheets_CTS_R.11_r3.arm.CtsUtil',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUtilTestCases', '--include-filter', 'CtsUtilTestCases[instant]', '--include-filter', 'CtsUtilTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsUtil',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsVideo b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsVideo
deleted file mode 100644
index 2020e07..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsVideo
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsVideo'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsVideoTestCases, CtsVideoTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsVideo',
-        test_name='cheets_CTS_R.11_r3.arm.CtsVideo',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsVideoTestCases', '--include-filter', 'CtsVideoTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsVideo',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsView b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsView
deleted file mode 100644
index 10a92ac..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsView
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsView'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsViewInspectorAnnotationProcessorTestCases, CtsViewInspectorAnnotationProcessorTestCases[instant], CtsViewInspectorAnnotationProcessorTestCases[secondary_user], CtsViewTestCases, CtsViewTestCasesSdk28, CtsViewTestCasesSdk28[instant], CtsViewTestCasesSdk28[secondary_user], CtsViewTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsView',
-        test_name='cheets_CTS_R.11_r3.arm.CtsView',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsViewInspectorAnnotationProcessorTestCases', '--include-filter', 'CtsViewInspectorAnnotationProcessorTestCases[instant]', '--include-filter', 'CtsViewInspectorAnnotationProcessorTestCases[secondary_user]', '--include-filter', 'CtsViewTestCases', '--include-filter', 'CtsViewTestCasesSdk28', '--include-filter', 'CtsViewTestCasesSdk28[instant]', '--include-filter', 'CtsViewTestCasesSdk28[secondary_user]', '--include-filter', 'CtsViewTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsView',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=16200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsVoice b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsVoice
deleted file mode 100644
index 7cd2e7d..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsVoice
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsVoice'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsVoiceInteractionTestCases, CtsVoiceInteractionTestCases[instant], CtsVoiceInteractionTestCases[secondary_user], CtsVoiceSettingsTestCases, CtsVoiceSettingsTestCases[instant], CtsVoiceSettingsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsVoice',
-        test_name='cheets_CTS_R.11_r3.arm.CtsVoice',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsVoiceInteractionTestCases', '--include-filter', 'CtsVoiceInteractionTestCases[instant]', '--include-filter', 'CtsVoiceInteractionTestCases[secondary_user]', '--include-filter', 'CtsVoiceSettingsTestCases', '--include-filter', 'CtsVoiceSettingsTestCases[instant]', '--include-filter', 'CtsVoiceSettingsTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsVoice',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsVr b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsVr
deleted file mode 100644
index 29e4871..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsVr
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsVr'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsVrTestCases, CtsVrTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsVr',
-        test_name='cheets_CTS_R.11_r3.arm.CtsVr',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsVrTestCases', '--include-filter', 'CtsVrTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsVr',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWebkit b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWebkit
deleted file mode 100644
index 9824b79..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWebkit
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsWebkit'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWebkitTestCases, CtsWebkitTestCases[instant], CtsWebkitTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsWebkit',
-        test_name='cheets_CTS_R.11_r3.arm.CtsWebkit',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWebkitTestCases', '--include-filter', 'CtsWebkitTestCases[instant]', '--include-filter', 'CtsWebkitTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWebkit',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWidget b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWidget
deleted file mode 100644
index 3d3e2b5..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWidget
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsWidget'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWidgetTestCases, CtsWidgetTestCases29, CtsWidgetTestCases29[instant], CtsWidgetTestCases29[secondary_user], CtsWidgetTestCases[instant], CtsWidgetTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsWidget',
-        test_name='cheets_CTS_R.11_r3.arm.CtsWidget',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWidgetTestCases', '--include-filter', 'CtsWidgetTestCases29', '--include-filter', 'CtsWidgetTestCases29[instant]', '--include-filter', 'CtsWidgetTestCases29[secondary_user]', '--include-filter', 'CtsWidgetTestCases[instant]', '--include-filter', 'CtsWidgetTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWidget',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=18000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWifi b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWifi
deleted file mode 100644
index aa6b55d..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWifi
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsWifi'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWifiBroadcastsHostTestCases, CtsWifiBroadcastsHostTestCases[instant], CtsWifiBroadcastsHostTestCases[secondary_user], CtsWifiTestCases, CtsWifiTestCases[instant], CtsWifiTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsWifi',
-        test_name='cheets_CTS_R.11_r3.arm.CtsWifi',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWifiBroadcastsHostTestCases', '--include-filter', 'CtsWifiBroadcastsHostTestCases[instant]', '--include-filter', 'CtsWifiBroadcastsHostTestCases[secondary_user]', '--include-filter', 'CtsWifiTestCases', '--include-filter', 'CtsWifiTestCases[instant]', '--include-filter', 'CtsWifiTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWifi',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager
deleted file mode 100644
index 307d14d..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsWindowManager'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManagerDeviceTestCases, CtsWindowManagerDeviceTestCases[secondary_user], CtsWindowManagerJetpackTestCases, CtsWindowManagerJetpackTestCases[secondary_user], CtsWindowManagerSdk25TestCases, CtsWindowManagerSdk25TestCases[secondary_user], CtsWindowManagerSdk28TestCases, CtsWindowManagerSdk28TestCases[secondary_user], CtsWindowManagerSdk29TestCases, CtsWindowManagerSdk29TestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsWindowManager',
-        test_name='cheets_CTS_R.11_r3.arm.CtsWindowManager',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases', '--include-filter', 'CtsWindowManagerDeviceTestCases[secondary_user]', '--include-filter', 'CtsWindowManagerJetpackTestCases', '--include-filter', 'CtsWindowManagerJetpackTestCases[secondary_user]', '--include-filter', 'CtsWindowManagerSdk25TestCases', '--include-filter', 'CtsWindowManagerSdk25TestCases[secondary_user]', '--include-filter', 'CtsWindowManagerSdk28TestCases', '--include-filter', 'CtsWindowManagerSdk28TestCases[secondary_user]', '--include-filter', 'CtsWindowManagerSdk29TestCases', '--include-filter', 'CtsWindowManagerSdk29TestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManager',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=19800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.A b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.A
deleted file mode 100644
index 40ebb49..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.A
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsWindowManager.A'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.A of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsWindowManager.A',
-        test_name='cheets_CTS_R.11_r3.arm.CtsWindowManager.A',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ActivityManagerGetConfigTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ActivityMetricsLoggerTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ActivityTaskAffinityTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ActivityTransitionTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ActivityViewTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ActivityVisibilityTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AddWindowAsUserTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AlertWindowsAppOpsTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AlertWindowsImportanceTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AlertWindowsTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AmProfileTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AmStartOptionsTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AnrTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AppConfigurationTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AspectRatioTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AssistantStackTests', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.C b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.C
deleted file mode 100644
index a793fa6..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.C
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsWindowManager.C'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.C of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsWindowManager.C',
-        test_name='cheets_CTS_R.11_r3.arm.CtsWindowManager.C',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.CloseOnOutsideTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ConfigChangeTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.CrossAppDragAndDropTests', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.D b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.D
deleted file mode 100644
index 16fb7b4..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.D
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsWindowManager.D'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.D of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsWindowManager.D',
-        test_name='cheets_CTS_R.11_r3.arm.CtsWindowManager.D',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DecorInsetTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DeprecatedTargetSdkTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DialogFrameTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DisplayCutoutTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DisplaySizeTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DisplayTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DragDropTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DreamManagerServiceTests', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.Ensure b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.Ensure
deleted file mode 100644
index c93a076..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.Ensure
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsWindowManager.Ensure'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.Ensure of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsWindowManager.Ensure',
-        test_name='cheets_CTS_R.11_r3.arm.CtsWindowManager.Ensure',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.EnsureBarContrastTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.F b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.F
deleted file mode 100644
index 4c94598..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.F
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsWindowManager.F'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.F of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsWindowManager.F',
-        test_name='cheets_CTS_R.11_r3.arm.CtsWindowManager.F',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ForceRelayoutTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.FreeformWindowingModeTests', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.L b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.L
deleted file mode 100644
index caf44eb..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.L
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsWindowManager.L'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.L of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsWindowManager.L',
-        test_name='cheets_CTS_R.11_r3.arm.CtsWindowManager.L',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.LayoutTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.LocationInWindowTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.LocationOnScreenTests', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.M b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.M
deleted file mode 100644
index 555a1da..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.M
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsWindowManager.M'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.M of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsWindowManager.M',
-        test_name='cheets_CTS_R.11_r3.arm.CtsWindowManager.M',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ManifestLayoutTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MinimalPostProcessingTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayActivityLaunchTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayClientTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayKeyguardTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayLockedKeyguardTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayPolicyTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayPrivateDisplayTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplaySecurityTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplaySystemDecorationTests', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.Override b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.Override
deleted file mode 100644
index 9d8b023..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.Override
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsWindowManager.Override'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.Override of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsWindowManager.Override',
-        test_name='cheets_CTS_R.11_r3.arm.CtsWindowManager.Override',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.OverrideConfigTests', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.P b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.P
deleted file mode 100644
index 021c7a8..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.P
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsWindowManager.P'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.P of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsWindowManager.P',
-        test_name='cheets_CTS_R.11_r3.arm.CtsWindowManager.P',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.PinnedStackTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.PrereleaseSdkTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.PresentationTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.R b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.R
deleted file mode 100644
index 026744a..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.R
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsWindowManager.R'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.R of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsWindowManager.R',
-        test_name='cheets_CTS_R.11_r3.arm.CtsWindowManager.R',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ReplaceWindowTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.RobustnessTests', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.S b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.S
deleted file mode 100644
index c695b3a..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.S
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsWindowManager.S'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.S of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsWindowManager.S',
-        test_name='cheets_CTS_R.11_r3.arm.CtsWindowManager.S',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.SplashscreenTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.SplitScreenTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.StartActivityAsUserTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.StartActivityTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.SurfaceControlTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.SurfaceControlViewHostTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.SurfaceViewSurfaceValidatorTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.SurfaceViewTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.T b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.T
deleted file mode 100644
index b0d72d1..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.T
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsWindowManager.T'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.T of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsWindowManager.T',
-        test_name='cheets_CTS_R.11_r3.arm.CtsWindowManager.T',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ToastWindowTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.TransitionSelectionTests', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.Window b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.Window
deleted file mode 100644
index a7c4327..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.Window
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsWindowManager.Window'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.Window of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsWindowManager.Window',
-        test_name='cheets_CTS_R.11_r3.arm.CtsWindowManager.Window',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowContextPolicyTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowContextTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowFocusTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInputTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationCallbackTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationControllerTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationImeTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationSynchronicityTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsControllerTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsLayoutTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsPolicyTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowManager_BadTokenExceptionTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowManager_LayoutParamsTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowMetricsTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.intent b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.intent
deleted file mode 100644
index 00cefef..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.intent
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsWindowManager.intent'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.intent of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsWindowManager.intent',
-        test_name='cheets_CTS_R.11_r3.arm.CtsWindowManager.intent',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.intent.IntentGenerationTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.intent.IntentTests', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.lifecycle b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.lifecycle
deleted file mode 100644
index bb90ecf..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWindowManager.lifecycle
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsWindowManager.lifecycle'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.lifecycle of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsWindowManager.lifecycle',
-        test_name='cheets_CTS_R.11_r3.arm.CtsWindowManager.lifecycle',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleFreeformTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleKeyguardTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecyclePipTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleSplitScreenTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleTopResumedStateTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityStarterTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityTests', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWrap b/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWrap
deleted file mode 100644
index f6af736..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.CtsWrap
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.CtsWrap'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWrapNoWrapTestCases, CtsWrapNoWrapTestCases[secondary_user], CtsWrapWrapDebugMallocDebugTestCases, CtsWrapWrapDebugMallocDebugTestCases[secondary_user], CtsWrapWrapDebugTestCases, CtsWrapWrapDebugTestCases[secondary_user], CtsWrapWrapNoDebugTestCases, CtsWrapWrapNoDebugTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.CtsWrap',
-        test_name='cheets_CTS_R.11_r3.arm.CtsWrap',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWrapNoWrapTestCases', '--include-filter', 'CtsWrapNoWrapTestCases[secondary_user]', '--include-filter', 'CtsWrapWrapDebugMallocDebugTestCases', '--include-filter', 'CtsWrapWrapDebugMallocDebugTestCases[secondary_user]', '--include-filter', 'CtsWrapWrapDebugTestCases', '--include-filter', 'CtsWrapWrapDebugTestCases[secondary_user]', '--include-filter', 'CtsWrapWrapNoDebugTestCases', '--include-filter', 'CtsWrapWrapNoDebugTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWrap',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=16200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.LegacyStorageTest b/server/site_tests/cheets_CTS_R/control.11_r3.arm.LegacyStorageTest
deleted file mode 100644
index a8a76ef..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.LegacyStorageTest
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.LegacyStorageTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module LegacyStorageTest, LegacyStorageTest[instant] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.LegacyStorageTest',
-        test_name='cheets_CTS_R.11_r3.arm.LegacyStorageTest',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'LegacyStorageTest', '--include-filter', 'LegacyStorageTest[instant]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='LegacyStorageTest',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.ScopedStorageTest b/server/site_tests/cheets_CTS_R/control.11_r3.arm.ScopedStorageTest
deleted file mode 100644
index 6660386..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.ScopedStorageTest
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.ScopedStorageTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module ScopedStorageTest, ScopedStorageTest[instant] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.ScopedStorageTest',
-        test_name='cheets_CTS_R.11_r3.arm.ScopedStorageTest',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'ScopedStorageTest', '--include-filter', 'ScopedStorageTest[instant]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='ScopedStorageTest',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.signed-Cts b/server/site_tests/cheets_CTS_R/control.11_r3.arm.signed-Cts
deleted file mode 100644
index b490678..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.signed-Cts
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.signed-Cts'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module signed-CtsOmapiTestCases, signed-CtsOmapiTestCases[secondary_user], signed-CtsSecureElementAccessControlTestCases1, signed-CtsSecureElementAccessControlTestCases1[secondary_user], signed-CtsSecureElementAccessControlTestCases2, signed-CtsSecureElementAccessControlTestCases2[secondary_user], signed-CtsSecureElementAccessControlTestCases3, signed-CtsSecureElementAccessControlTestCases3[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.signed-Cts',
-        test_name='cheets_CTS_R.11_r3.arm.signed-Cts',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'signed-CtsOmapiTestCases', '--include-filter', 'signed-CtsOmapiTestCases[secondary_user]', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases1', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases1[secondary_user]', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases2', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases2[secondary_user]', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases3', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases3[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='signed-Cts',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=16200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.tradefed-run-collect-tests-only-internal b/server/site_tests/cheets_CTS_R/control.11_r3.arm.tradefed-run-collect-tests-only-internal
deleted file mode 100644
index f6a8192..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.tradefed-run-collect-tests-only-internal
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.tradefed-run-collect-tests-only-internal'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'LENGTHY'
-MAX_RESULT_SIZE_KB = 1024000
-DOC = 'Run all of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=0,
-        tag='11_r3.arm.tradefed-run-collect-tests-only-internal',
-        test_name='cheets_CTS_R.11_r3.arm.tradefed-run-collect-tests-only-internal',
-        run_template=['run', 'commandAndExit', 'collect-tests-only', '--disable-reboot', '--module-arg', 'CtsMediaTestCases:skip-media-download:true', '--module-arg', 'CtsMediaStressTestCases:skip-media-download:true', '--module-arg', 'CtsMediaBitstreamsTestCases:skip-media-download:true'],
-        retry_template=None,
-        target_module=None,
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.arm.vm-tests-tf b/server/site_tests/cheets_CTS_R/control.11_r3.arm.vm-tests-tf
deleted file mode 100644
index f95b0b6..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.arm.vm-tests-tf
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.arm.vm-tests-tf'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vm-tests-tf, vm-tests-tf[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.arm.vm-tests-tf',
-        test_name='cheets_CTS_R.11_r3.arm.vm-tests-tf',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'vm-tests-tf', '--include-filter', 'vm-tests-tf[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vm-tests-tf',
-        target_plan=None,
-        bundle='arm',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAbiOverrideHost b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAbiOverrideHost
deleted file mode 100644
index 767859e..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAbiOverrideHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsAbiOverrideHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAbiOverrideHostTestCases, CtsAbiOverrideHostTestCases[instant], CtsAbiOverrideHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsAbiOverrideHost',
-        test_name='cheets_CTS_R.11_r3.x86.CtsAbiOverrideHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAbiOverrideHostTestCases', '--include-filter', 'CtsAbiOverrideHostTestCases[instant]', '--include-filter', 'CtsAbiOverrideHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAbiOverrideHost',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAcceleration b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAcceleration
deleted file mode 100644
index 3f8e9be..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAcceleration
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsAcceleration'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAccelerationTestCases, CtsAccelerationTestCases[instant], CtsAccelerationTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsAcceleration',
-        test_name='cheets_CTS_R.11_r3.x86.CtsAcceleration',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAccelerationTestCases', '--include-filter', 'CtsAccelerationTestCases[instant]', '--include-filter', 'CtsAccelerationTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAcceleration',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAccessibility b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAccessibility
deleted file mode 100644
index eef6487..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAccessibility
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsAccessibility'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAccessibilityServiceSdk29TestCases, CtsAccessibilityServiceSdk29TestCases[instant], CtsAccessibilityServiceSdk29TestCases[secondary_user], CtsAccessibilityServiceTestCases, CtsAccessibilityServiceTestCases[instant], CtsAccessibilityTestCases, CtsAccessibilityTestCases[instant], CtsAccessibilityTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsAccessibility',
-        test_name='cheets_CTS_R.11_r3.x86.CtsAccessibility',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAccessibilityServiceSdk29TestCases', '--include-filter', 'CtsAccessibilityServiceSdk29TestCases[instant]', '--include-filter', 'CtsAccessibilityServiceSdk29TestCases[secondary_user]', '--include-filter', 'CtsAccessibilityServiceTestCases', '--include-filter', 'CtsAccessibilityServiceTestCases[instant]', '--include-filter', 'CtsAccessibilityTestCases', '--include-filter', 'CtsAccessibilityTestCases[instant]', '--include-filter', 'CtsAccessibilityTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAccessibility',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=16200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAccountManager b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAccountManager
deleted file mode 100644
index 917a224..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAccountManager
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsAccountManager'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAccountManagerTestCases, CtsAccountManagerTestCases[instant], CtsAccountManagerTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsAccountManager',
-        test_name='cheets_CTS_R.11_r3.x86.CtsAccountManager',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAccountManagerTestCases', '--include-filter', 'CtsAccountManagerTestCases[instant]', '--include-filter', 'CtsAccountManagerTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAccountManager',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAccountsHost b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAccountsHost
deleted file mode 100644
index e875330..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAccountsHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsAccountsHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAccountsHostTestCases, CtsAccountsHostTestCases[instant], CtsAccountsHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsAccountsHost',
-        test_name='cheets_CTS_R.11_r3.x86.CtsAccountsHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAccountsHostTestCases', '--include-filter', 'CtsAccountsHostTestCases[instant]', '--include-filter', 'CtsAccountsHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAccountsHost',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsActivityManagerBackgroundActivity b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsActivityManagerBackgroundActivity
deleted file mode 100644
index a25fadc..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsActivityManagerBackgroundActivity
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsActivityManagerBackgroundActivity'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsActivityManagerBackgroundActivityTestCases, CtsActivityManagerBackgroundActivityTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsActivityManagerBackgroundActivity',
-        test_name='cheets_CTS_R.11_r3.x86.CtsActivityManagerBackgroundActivity',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsActivityManagerBackgroundActivityTestCases', '--include-filter', 'CtsActivityManagerBackgroundActivityTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsActivityManagerBackgroundActivity',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAdb b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAdb
deleted file mode 100644
index a1aba14..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAdb
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsAdb'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAdbHostTestCases, CtsAdbHostTestCases[secondary_user], CtsAdbManagerHostTestCases, CtsAdbManagerHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsAdb',
-        test_name='cheets_CTS_R.11_r3.x86.CtsAdb',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAdbHostTestCases', '--include-filter', 'CtsAdbHostTestCases[secondary_user]', '--include-filter', 'CtsAdbManagerHostTestCases', '--include-filter', 'CtsAdbManagerHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAdb',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAdmin b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAdmin
deleted file mode 100644
index cdbb7fc..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAdmin
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsAdmin'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAdminPackageInstallerTestCases, CtsAdminTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsAdmin',
-        test_name='cheets_CTS_R.11_r3.x86.CtsAdmin',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAdminPackageInstallerTestCases', '--include-filter', 'CtsAdminTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAdmin',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAlarmManager b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAlarmManager
deleted file mode 100644
index 9bb4a6e..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAlarmManager
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsAlarmManager'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAlarmManagerTestCases, CtsAlarmManagerTestCases[instant], CtsAlarmManagerTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsAlarmManager',
-        test_name='cheets_CTS_R.11_r3.x86.CtsAlarmManager',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAlarmManagerTestCases', '--include-filter', 'CtsAlarmManagerTestCases[instant]', '--include-filter', 'CtsAlarmManagerTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAlarmManager',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAndroid b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAndroid
deleted file mode 100644
index 5e0bc2a..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAndroid
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsAndroid'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAndroidAppTestCases, CtsAndroidAppTestCases[instant], CtsAndroidAppTestCases[secondary_user], CtsAndroidTestBase28ApiSignatureTestCases, CtsAndroidTestBase28ApiSignatureTestCases[instant], CtsAndroidTestBase28ApiSignatureTestCases[secondary_user], CtsAndroidTestBaseCurrentApiSignatureTestCases, CtsAndroidTestBaseCurrentApiSignatureTestCases[instant], CtsAndroidTestBaseCurrentApiSignatureTestCases[secondary_user], CtsAndroidTestMockCurrentApiSignatureTestCases, CtsAndroidTestMockCurrentApiSignatureTestCases[instant], CtsAndroidTestMockCurrentApiSignatureTestCases[secondary_user], CtsAndroidTestRunnerCurrentApiSignatureTestCases, CtsAndroidTestRunnerCurrentApiSignatureTestCases[instant], CtsAndroidTestRunnerCurrentApiSignatureTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsAndroid',
-        test_name='cheets_CTS_R.11_r3.x86.CtsAndroid',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAndroidAppTestCases', '--include-filter', 'CtsAndroidAppTestCases[instant]', '--include-filter', 'CtsAndroidAppTestCases[secondary_user]', '--include-filter', 'CtsAndroidTestBase28ApiSignatureTestCases', '--include-filter', 'CtsAndroidTestBase28ApiSignatureTestCases[instant]', '--include-filter', 'CtsAndroidTestBase28ApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsAndroidTestBaseCurrentApiSignatureTestCases', '--include-filter', 'CtsAndroidTestBaseCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsAndroidTestBaseCurrentApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsAndroidTestMockCurrentApiSignatureTestCases', '--include-filter', 'CtsAndroidTestMockCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsAndroidTestMockCurrentApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsAndroidTestRunnerCurrentApiSignatureTestCases', '--include-filter', 'CtsAndroidTestRunnerCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsAndroidTestRunnerCurrentApiSignatureTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAndroid',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=28800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAngleIntegrationHost b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAngleIntegrationHost
deleted file mode 100644
index 702c060..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAngleIntegrationHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsAngleIntegrationHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAngleIntegrationHostTestCases, CtsAngleIntegrationHostTestCases[instant], CtsAngleIntegrationHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsAngleIntegrationHost',
-        test_name='cheets_CTS_R.11_r3.x86.CtsAngleIntegrationHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAngleIntegrationHostTestCases', '--include-filter', 'CtsAngleIntegrationHostTestCases[instant]', '--include-filter', 'CtsAngleIntegrationHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAngleIntegrationHost',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAnimation b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAnimation
deleted file mode 100644
index b7d29cc..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAnimation
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsAnimation'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAnimationTestCases, CtsAnimationTestCases[instant], CtsAnimationTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsAnimation',
-        test_name='cheets_CTS_R.11_r3.x86.CtsAnimation',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAnimationTestCases', '--include-filter', 'CtsAnimationTestCases[instant]', '--include-filter', 'CtsAnimationTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAnimation',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsApacheHttpLegacy b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsApacheHttpLegacy
deleted file mode 100644
index ff4f3b2..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsApacheHttpLegacy
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsApacheHttpLegacy'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsApacheHttpLegacy27ApiSignatureTestCases, CtsApacheHttpLegacy27ApiSignatureTestCases[instant], CtsApacheHttpLegacy27ApiSignatureTestCases[secondary_user], CtsApacheHttpLegacyCurrentApiSignatureTestCases, CtsApacheHttpLegacyCurrentApiSignatureTestCases[instant], CtsApacheHttpLegacyCurrentApiSignatureTestCases[secondary_user], CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases, CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases[instant], CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsApacheHttpLegacy',
-        test_name='cheets_CTS_R.11_r3.x86.CtsApacheHttpLegacy',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsApacheHttpLegacy27ApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacy27ApiSignatureTestCases[instant]', '--include-filter', 'CtsApacheHttpLegacy27ApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsApacheHttpLegacyCurrentApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacyCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsApacheHttpLegacyCurrentApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases[instant]', '--include-filter', 'CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsApacheHttpLegacy',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=18000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsApex b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsApex
deleted file mode 100644
index d3f8ac9..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsApex
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsApex'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsApexTestCases, CtsApexTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsApex',
-        test_name='cheets_CTS_R.11_r3.x86.CtsApex',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsApexTestCases', '--include-filter', 'CtsApexTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsApex',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsApp b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsApp
deleted file mode 100644
index a861b97..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsApp
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsApp'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAppBindingHostTestCases, CtsAppBindingHostTestCases[secondary_user], CtsAppCompatHostTestCases, CtsAppCompatHostTestCases[instant], CtsAppCompatHostTestCases[secondary_user], CtsAppComponentFactoryTestCases, CtsAppComponentFactoryTestCases[instant], CtsAppComponentFactoryTestCases[secondary_user], CtsAppEnumerationTestCases, CtsAppEnumerationTestCases[secondary_user], CtsAppExitTestCases, CtsAppExitTestCases[instant], CtsAppExitTestCases[secondary_user], CtsAppIntegrityDeviceTestCases, CtsAppOpsTestCases, CtsAppOpsTestCases[instant], CtsAppOpsTestCases[secondary_user], CtsAppPredictionServiceTestCases, CtsAppPredictionServiceTestCases[secondary_user], CtsAppSecurityHostTestCases, CtsAppSecurityHostTestCases[secondary_user], CtsAppTestCases, CtsAppTestCases[instant], CtsAppTestCases[secondary_user], CtsAppUsageHostTestCases, CtsAppUsageHostTestCases[instant], CtsAppUsageHostTestCases[secondary_user], CtsAppWidgetTestCases, CtsAppWidgetTestCases[instant], CtsAppWidgetTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        enable_default_apps=True,
-        tag='11_r3.x86.CtsApp',
-        test_name='cheets_CTS_R.11_r3.x86.CtsApp',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAppBindingHostTestCases', '--include-filter', 'CtsAppBindingHostTestCases[secondary_user]', '--include-filter', 'CtsAppCompatHostTestCases', '--include-filter', 'CtsAppCompatHostTestCases[instant]', '--include-filter', 'CtsAppCompatHostTestCases[secondary_user]', '--include-filter', 'CtsAppComponentFactoryTestCases', '--include-filter', 'CtsAppComponentFactoryTestCases[instant]', '--include-filter', 'CtsAppComponentFactoryTestCases[secondary_user]', '--include-filter', 'CtsAppEnumerationTestCases', '--include-filter', 'CtsAppEnumerationTestCases[secondary_user]', '--include-filter', 'CtsAppExitTestCases', '--include-filter', 'CtsAppExitTestCases[instant]', '--include-filter', 'CtsAppExitTestCases[secondary_user]', '--include-filter', 'CtsAppIntegrityDeviceTestCases', '--include-filter', 'CtsAppOpsTestCases', '--include-filter', 'CtsAppOpsTestCases[instant]', '--include-filter', 'CtsAppOpsTestCases[secondary_user]', '--include-filter', 'CtsAppPredictionServiceTestCases', '--include-filter', 'CtsAppPredictionServiceTestCases[secondary_user]', '--include-filter', 'CtsAppSecurityHostTestCases', '--include-filter', 'CtsAppSecurityHostTestCases[secondary_user]', '--include-filter', 'CtsAppTestCases', '--include-filter', 'CtsAppTestCases[instant]', '--include-filter', 'CtsAppTestCases[secondary_user]', '--include-filter', 'CtsAppUsageHostTestCases', '--include-filter', 'CtsAppUsageHostTestCases[instant]', '--include-filter', 'CtsAppUsageHostTestCases[secondary_user]', '--include-filter', 'CtsAppWidgetTestCases', '--include-filter', 'CtsAppWidgetTestCases[instant]', '--include-filter', 'CtsAppWidgetTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsApp',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=55800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAslrMalloc b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAslrMalloc
deleted file mode 100644
index 8abb782..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAslrMalloc
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsAslrMalloc'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAslrMallocTestCases, CtsAslrMallocTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsAslrMalloc',
-        test_name='cheets_CTS_R.11_r3.x86.CtsAslrMalloc',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAslrMallocTestCases', '--include-filter', 'CtsAslrMallocTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAslrMalloc',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAssist b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAssist
deleted file mode 100644
index 8a8f7e0..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAssist
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsAssist'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAssistTestCases, CtsAssistTestCases[instant], CtsAssistTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsAssist',
-        test_name='cheets_CTS_R.11_r3.x86.CtsAssist',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAssistTestCases', '--include-filter', 'CtsAssistTestCases[instant]', '--include-filter', 'CtsAssistTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAssist',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAtomicInstall b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAtomicInstall
deleted file mode 100644
index 4fc1d48..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAtomicInstall
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsAtomicInstall'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAtomicInstallTestCases, CtsAtomicInstallTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsAtomicInstall',
-        test_name='cheets_CTS_R.11_r3.x86.CtsAtomicInstall',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAtomicInstallTestCases', '--include-filter', 'CtsAtomicInstallTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAtomicInstall',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAtraceHost b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAtraceHost
deleted file mode 100644
index e29bfbc..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAtraceHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsAtraceHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAtraceHostTestCases, CtsAtraceHostTestCases[instant], CtsAtraceHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsAtraceHost',
-        test_name='cheets_CTS_R.11_r3.x86.CtsAtraceHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAtraceHostTestCases', '--include-filter', 'CtsAtraceHostTestCases[instant]', '--include-filter', 'CtsAtraceHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAtraceHost',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAttentionServiceDevice b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAttentionServiceDevice
deleted file mode 100644
index 11b37d5..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAttentionServiceDevice
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsAttentionServiceDevice'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAttentionServiceDeviceTestCases, CtsAttentionServiceDeviceTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsAttentionServiceDevice',
-        test_name='cheets_CTS_R.11_r3.x86.CtsAttentionServiceDevice',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAttentionServiceDeviceTestCases', '--include-filter', 'CtsAttentionServiceDeviceTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAttentionServiceDevice',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAutoFillService b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAutoFillService
deleted file mode 100644
index 72d7e5b..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsAutoFillService
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsAutoFillService'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsAutoFillServiceTestCases, CtsAutoFillServiceTestCases[instant], CtsAutoFillServiceTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsAutoFillService',
-        test_name='cheets_CTS_R.11_r3.x86.CtsAutoFillService',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAutoFillServiceTestCases', '--include-filter', 'CtsAutoFillServiceTestCases[instant]', '--include-filter', 'CtsAutoFillServiceTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsAutoFillService',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=14400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsBackgroundRestrictions b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsBackgroundRestrictions
deleted file mode 100644
index f19338e..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsBackgroundRestrictions
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsBackgroundRestrictions'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsBackgroundRestrictionsTestCases, CtsBackgroundRestrictionsTestCases[instant], CtsBackgroundRestrictionsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsBackgroundRestrictions',
-        test_name='cheets_CTS_R.11_r3.x86.CtsBackgroundRestrictions',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBackgroundRestrictionsTestCases', '--include-filter', 'CtsBackgroundRestrictionsTestCases[instant]', '--include-filter', 'CtsBackgroundRestrictionsTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsBackgroundRestrictions',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsBackup b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsBackup
deleted file mode 100644
index 3af3c06..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsBackup
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsBackup'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsBackupHostTestCases, CtsBackupTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsBackup',
-        test_name='cheets_CTS_R.11_r3.x86.CtsBackup',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBackupHostTestCases', '--include-filter', 'CtsBackupTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsBackup',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsBatterySaving b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsBatterySaving
deleted file mode 100644
index e4aa42d..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsBatterySaving
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsBatterySaving'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsBatterySavingTestCases, CtsBatterySavingTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsBatterySaving',
-        test_name='cheets_CTS_R.11_r3.x86.CtsBatterySaving',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBatterySavingTestCases', '--include-filter', 'CtsBatterySavingTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsBatterySaving',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsBionic b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsBionic
deleted file mode 100644
index 4067ecb..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsBionic
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsBionic'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsBionicAppTestCases, CtsBionicAppTestCases[instant], CtsBionicAppTestCases[secondary_user], CtsBionicTestCases, CtsBionicTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsBionic',
-        test_name='cheets_CTS_R.11_r3.x86.CtsBionic',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBionicAppTestCases', '--include-filter', 'CtsBionicAppTestCases[instant]', '--include-filter', 'CtsBionicAppTestCases[secondary_user]', '--include-filter', 'CtsBionicTestCases', '--include-filter', 'CtsBionicTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsBionic',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsBlobStore b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsBlobStore
deleted file mode 100644
index 64aeafe..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsBlobStore
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsBlobStore'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsBlobStoreHostTestCases, CtsBlobStoreHostTestCases[secondary_user], CtsBlobStoreHostTestHelper, CtsBlobStoreHostTestHelper[secondary_user], CtsBlobStoreTestCases, CtsBlobStoreTestCases[secondary_user], CtsBlobStoreTestHelper, CtsBlobStoreTestHelperDiffSig, CtsBlobStoreTestHelperDiffSig2, CtsBlobStoreTestHelperDiffSig2[secondary_user], CtsBlobStoreTestHelperDiffSig[secondary_user], CtsBlobStoreTestHelper[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsBlobStore',
-        test_name='cheets_CTS_R.11_r3.x86.CtsBlobStore',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBlobStoreHostTestCases', '--include-filter', 'CtsBlobStoreHostTestCases[secondary_user]', '--include-filter', 'CtsBlobStoreHostTestHelper', '--include-filter', 'CtsBlobStoreHostTestHelper[secondary_user]', '--include-filter', 'CtsBlobStoreTestCases', '--include-filter', 'CtsBlobStoreTestCases[secondary_user]', '--include-filter', 'CtsBlobStoreTestHelper', '--include-filter', 'CtsBlobStoreTestHelperDiffSig', '--include-filter', 'CtsBlobStoreTestHelperDiffSig2', '--include-filter', 'CtsBlobStoreTestHelperDiffSig2[secondary_user]', '--include-filter', 'CtsBlobStoreTestHelperDiffSig[secondary_user]', '--include-filter', 'CtsBlobStoreTestHelper[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsBlobStore',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=23400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsBluetooth b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsBluetooth
deleted file mode 100644
index 9b9c7d9..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsBluetooth
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsBluetooth'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsBluetoothTestCases, CtsBluetoothTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsBluetooth',
-        test_name='cheets_CTS_R.11_r3.x86.CtsBluetooth',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBluetoothTestCases', '--include-filter', 'CtsBluetoothTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsBluetooth',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsBootStats b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsBootStats
deleted file mode 100644
index eca8b19..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsBootStats
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsBootStats'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsBootStatsTestCases, CtsBootStatsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsBootStats',
-        test_name='cheets_CTS_R.11_r3.x86.CtsBootStats',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBootStatsTestCases', '--include-filter', 'CtsBootStatsTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsBootStats',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsCalendarProvider b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsCalendarProvider
deleted file mode 100644
index e1cbd33..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsCalendarProvider
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsCalendarProvider'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCalendarProviderTestCases, CtsCalendarProviderTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsCalendarProvider',
-        test_name='cheets_CTS_R.11_r3.x86.CtsCalendarProvider',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCalendarProviderTestCases', '--include-filter', 'CtsCalendarProviderTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsCalendarProvider',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsCalendarcommon2 b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsCalendarcommon2
deleted file mode 100644
index 5ae6377..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsCalendarcommon2
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsCalendarcommon2'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCalendarcommon2TestCases, CtsCalendarcommon2TestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsCalendarcommon2',
-        test_name='cheets_CTS_R.11_r3.x86.CtsCalendarcommon2',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCalendarcommon2TestCases', '--include-filter', 'CtsCalendarcommon2TestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsCalendarcommon2',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsCamera b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsCamera
deleted file mode 100644
index 04853c4..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsCamera
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsCamera'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCameraApi25TestCases, CtsCameraApi25TestCases[secondary_user], CtsCameraTestCases, CtsCameraTestCases[instant], CtsCameraTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsCamera',
-        test_name='cheets_CTS_R.11_r3.x86.CtsCamera',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCameraApi25TestCases', '--include-filter', 'CtsCameraApi25TestCases[secondary_user]', '--include-filter', 'CtsCameraTestCases', '--include-filter', 'CtsCameraTestCases[instant]', '--include-filter', 'CtsCameraTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsCamera',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsCar b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsCar
deleted file mode 100644
index da82bc4..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsCar
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsCar'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCarHostTestCases, CtsCarTestCases, CtsCarTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsCar',
-        test_name='cheets_CTS_R.11_r3.x86.CtsCar',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCarHostTestCases', '--include-filter', 'CtsCarTestCases', '--include-filter', 'CtsCarTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsCar',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsCarrierApiTestCases b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsCarrierApiTestCases
deleted file mode 100644
index 08fd82a..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsCarrierApiTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsCarrierApiTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCarrierApiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsCarrierApiTestCases',
-        test_name='cheets_CTS_R.11_r3.x86.CtsCarrierApiTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCarrierApiTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsCarrierApiTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsClassLoaderFactory b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsClassLoaderFactory
deleted file mode 100644
index eb84e7c..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsClassLoaderFactory
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsClassLoaderFactory'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases, CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases[instant], CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases[secondary_user], CtsClassLoaderFactoryPathClassLoaderTestCases, CtsClassLoaderFactoryPathClassLoaderTestCases[instant], CtsClassLoaderFactoryPathClassLoaderTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsClassLoaderFactory',
-        test_name='cheets_CTS_R.11_r3.x86.CtsClassLoaderFactory',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases', '--include-filter', 'CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases[instant]', '--include-filter', 'CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases[secondary_user]', '--include-filter', 'CtsClassLoaderFactoryPathClassLoaderTestCases', '--include-filter', 'CtsClassLoaderFactoryPathClassLoaderTestCases[instant]', '--include-filter', 'CtsClassLoaderFactoryPathClassLoaderTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsClassLoaderFactory',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsClassloaderSplitsHost b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsClassloaderSplitsHost
deleted file mode 100644
index 4a48878..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsClassloaderSplitsHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsClassloaderSplitsHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsClassloaderSplitsHostTestCases, CtsClassloaderSplitsHostTestCases[instant], CtsClassloaderSplitsHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsClassloaderSplitsHost',
-        test_name='cheets_CTS_R.11_r3.x86.CtsClassloaderSplitsHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsClassloaderSplitsHostTestCases', '--include-filter', 'CtsClassloaderSplitsHostTestCases[instant]', '--include-filter', 'CtsClassloaderSplitsHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsClassloaderSplitsHost',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsCodePathHost b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsCodePathHost
deleted file mode 100644
index 942bf66..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsCodePathHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsCodePathHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCodePathHostTestCases, CtsCodePathHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsCodePathHost',
-        test_name='cheets_CTS_R.11_r3.x86.CtsCodePathHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCodePathHostTestCases', '--include-filter', 'CtsCodePathHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsCodePathHost',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsColorMode b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsColorMode
deleted file mode 100644
index eb8bdfb..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsColorMode
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsColorMode'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsColorModeTestCases, CtsColorModeTestCases[instant], CtsColorModeTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsColorMode',
-        test_name='cheets_CTS_R.11_r3.x86.CtsColorMode',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsColorModeTestCases', '--include-filter', 'CtsColorModeTestCases[instant]', '--include-filter', 'CtsColorModeTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsColorMode',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsCompilation b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsCompilation
deleted file mode 100644
index bbde06a..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsCompilation
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsCompilation'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCompilationTestCases, CtsCompilationTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsCompilation',
-        test_name='cheets_CTS_R.11_r3.x86.CtsCompilation',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCompilationTestCases', '--include-filter', 'CtsCompilationTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsCompilation',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsContactsProvider b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsContactsProvider
deleted file mode 100644
index eb55347..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsContactsProvider
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsContactsProvider'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsContactsProviderTestCases, CtsContactsProviderTestCases[secondary_user], CtsContactsProviderWipe, CtsContactsProviderWipe[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsContactsProvider',
-        test_name='cheets_CTS_R.11_r3.x86.CtsContactsProvider',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsContactsProviderTestCases', '--include-filter', 'CtsContactsProviderTestCases[secondary_user]', '--include-filter', 'CtsContactsProviderWipe', '--include-filter', 'CtsContactsProviderWipe[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsContactsProvider',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsContent b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsContent
deleted file mode 100644
index f7d81f0..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsContent
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsContent'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsContentCaptureServiceTestCases, CtsContentCaptureServiceTestCases[instant], CtsContentCaptureServiceTestCases[secondary_user], CtsContentSuggestionsTestCases, CtsContentSuggestionsTestCases[secondary_user], CtsContentTestCases, CtsContentTestCases[instant], CtsContentTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        enable_default_apps=True,
-        tag='11_r3.x86.CtsContent',
-        test_name='cheets_CTS_R.11_r3.x86.CtsContent',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsContentCaptureServiceTestCases', '--include-filter', 'CtsContentCaptureServiceTestCases[instant]', '--include-filter', 'CtsContentCaptureServiceTestCases[secondary_user]', '--include-filter', 'CtsContentSuggestionsTestCases', '--include-filter', 'CtsContentSuggestionsTestCases[secondary_user]', '--include-filter', 'CtsContentTestCases', '--include-filter', 'CtsContentTestCases[instant]', '--include-filter', 'CtsContentTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsContent',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=16200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsControlsDevice b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsControlsDevice
deleted file mode 100644
index 9e359e9..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsControlsDevice
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsControlsDevice'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsControlsDeviceTestCases, CtsControlsDeviceTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsControlsDevice',
-        test_name='cheets_CTS_R.11_r3.x86.CtsControlsDevice',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsControlsDeviceTestCases', '--include-filter', 'CtsControlsDeviceTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsControlsDevice',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsCppTools b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsCppTools
deleted file mode 100644
index 8cf7792..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsCppTools
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsCppTools'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCppToolsTestCases, CtsCppToolsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsCppTools',
-        test_name='cheets_CTS_R.11_r3.x86.CtsCppTools',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCppToolsTestCases', '--include-filter', 'CtsCppToolsTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsCppTools',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsCurrentApiSignature b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsCurrentApiSignature
deleted file mode 100644
index 66e3881..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsCurrentApiSignature
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsCurrentApiSignature'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsCurrentApiSignatureTestCases, CtsCurrentApiSignatureTestCases[instant], CtsCurrentApiSignatureTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsCurrentApiSignature',
-        test_name='cheets_CTS_R.11_r3.x86.CtsCurrentApiSignature',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCurrentApiSignatureTestCases', '--include-filter', 'CtsCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsCurrentApiSignatureTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsCurrentApiSignature',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDatabase b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDatabase
deleted file mode 100644
index cbe450f..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDatabase
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsDatabase'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDatabaseTestCases, CtsDatabaseTestCases[instant], CtsDatabaseTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsDatabase',
-        test_name='cheets_CTS_R.11_r3.x86.CtsDatabase',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDatabaseTestCases', '--include-filter', 'CtsDatabaseTestCases[instant]', '--include-filter', 'CtsDatabaseTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDatabase',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDeqp b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDeqp
deleted file mode 100644
index 9d24cb2..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDeqp
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsDeqp'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 1024000
-DOC = 'Run module CtsDeqpTestCases, CtsDeqpTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsDeqp',
-        test_name='cheets_CTS_R.11_r3.x86.CtsDeqp',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--include-filter', 'CtsDeqpTestCases[secondary_user]'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDeqp',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=111600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDeqpTestCases.dEQP-EGL b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDeqpTestCases.dEQP-EGL
deleted file mode 100644
index 88fdc45..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDeqpTestCases.dEQP-EGL
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsDeqpTestCases.dEQP-EGL'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDeqpTestCases.dEQP-EGL of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsDeqpTestCases.dEQP-EGL',
-        test_name='cheets_CTS_R.11_r3.x86.CtsDeqpTestCases.dEQP-EGL',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-EGL.*'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDeqpTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDeqpTestCases.dEQP-GLES2 b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDeqpTestCases.dEQP-GLES2
deleted file mode 100644
index d365b16..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDeqpTestCases.dEQP-GLES2
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsDeqpTestCases.dEQP-GLES2'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDeqpTestCases.dEQP-GLES2 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsDeqpTestCases.dEQP-GLES2',
-        test_name='cheets_CTS_R.11_r3.x86.CtsDeqpTestCases.dEQP-GLES2',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-GLES2.*'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDeqpTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDeqpTestCases.dEQP-GLES3 b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDeqpTestCases.dEQP-GLES3
deleted file mode 100644
index 670f7eb..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDeqpTestCases.dEQP-GLES3
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsDeqpTestCases.dEQP-GLES3'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDeqpTestCases.dEQP-GLES3 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsDeqpTestCases.dEQP-GLES3',
-        test_name='cheets_CTS_R.11_r3.x86.CtsDeqpTestCases.dEQP-GLES3',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-GLES3.*'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDeqpTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=21600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDeqpTestCases.dEQP-GLES31 b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDeqpTestCases.dEQP-GLES31
deleted file mode 100644
index 2d11218..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDeqpTestCases.dEQP-GLES31
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsDeqpTestCases.dEQP-GLES31'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDeqpTestCases.dEQP-GLES31 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsDeqpTestCases.dEQP-GLES31',
-        test_name='cheets_CTS_R.11_r3.x86.CtsDeqpTestCases.dEQP-GLES31',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-GLES31.*'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDeqpTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=21600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDeqpTestCases.dEQP-VK b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDeqpTestCases.dEQP-VK
deleted file mode 100644
index 8aebbc0..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDeqpTestCases.dEQP-VK
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsDeqpTestCases.dEQP-VK'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDeqpTestCases.dEQP-VK of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsDeqpTestCases.dEQP-VK',
-        test_name='cheets_CTS_R.11_r3.x86.CtsDeqpTestCases.dEQP-VK',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-VK.*'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDeqpTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=54000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDevice b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDevice
deleted file mode 100644
index 88dba25..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDevice
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsDevice'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDeviceConfigTestCases, CtsDeviceConfigTestCases[instant], CtsDeviceConfigTestCases[secondary_user], CtsDeviceIdleHostTestCases, CtsDeviceIdleHostTestCases[secondary_user], CtsDevicePolicyManagerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsDevice',
-        test_name='cheets_CTS_R.11_r3.x86.CtsDevice',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeviceConfigTestCases', '--include-filter', 'CtsDeviceConfigTestCases[instant]', '--include-filter', 'CtsDeviceConfigTestCases[secondary_user]', '--include-filter', 'CtsDeviceIdleHostTestCases', '--include-filter', 'CtsDeviceIdleHostTestCases[secondary_user]', '--include-filter', 'CtsDevicePolicyManagerTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDevice',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDexMetadataHost b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDexMetadataHost
deleted file mode 100644
index 3bad67e..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDexMetadataHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsDexMetadataHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDexMetadataHostTestCases, CtsDexMetadataHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsDexMetadataHost',
-        test_name='cheets_CTS_R.11_r3.x86.CtsDexMetadataHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDexMetadataHostTestCases', '--include-filter', 'CtsDexMetadataHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDexMetadataHost',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDisplay b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDisplay
deleted file mode 100644
index ba04ddd..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDisplay
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsDisplay'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDisplayTestCases, CtsDisplayTestCases[instant], CtsDisplayTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsDisplay',
-        test_name='cheets_CTS_R.11_r3.x86.CtsDisplay',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDisplayTestCases', '--include-filter', 'CtsDisplayTestCases[instant]', '--include-filter', 'CtsDisplayTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDisplay',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDownloadManager b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDownloadManager
deleted file mode 100644
index d082de3..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDownloadManager
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsDownloadManager'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDownloadManagerApi28, CtsDownloadManagerApi28[secondary_user], CtsDownloadManagerInstaller, CtsDownloadManagerInstaller[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsDownloadManager',
-        test_name='cheets_CTS_R.11_r3.x86.CtsDownloadManager',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDownloadManagerApi28', '--include-filter', 'CtsDownloadManagerApi28[secondary_user]', '--include-filter', 'CtsDownloadManagerInstaller', '--include-filter', 'CtsDownloadManagerInstaller[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDownloadManager',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDpi b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDpi
deleted file mode 100644
index ac8faa2..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDpi
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsDpi'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDpiTestCases, CtsDpiTestCases2, CtsDpiTestCases2[secondary_user], CtsDpiTestCases[instant], CtsDpiTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsDpi',
-        test_name='cheets_CTS_R.11_r3.x86.CtsDpi',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDpiTestCases', '--include-filter', 'CtsDpiTestCases2', '--include-filter', 'CtsDpiTestCases2[secondary_user]', '--include-filter', 'CtsDpiTestCases[instant]', '--include-filter', 'CtsDpiTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDpi',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDreams b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDreams
deleted file mode 100644
index ce035a8..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDreams
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsDreams'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDreamsTestCases, CtsDreamsTestCases[instant], CtsDreamsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsDreams',
-        test_name='cheets_CTS_R.11_r3.x86.CtsDreams',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDreamsTestCases', '--include-filter', 'CtsDreamsTestCases[instant]', '--include-filter', 'CtsDreamsTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDreams',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDrm b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDrm
deleted file mode 100644
index bf6fd73..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDrm
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsDrm'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDrmTestCases, CtsDrmTestCases[instant], CtsDrmTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsDrm',
-        test_name='cheets_CTS_R.11_r3.x86.CtsDrm',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDrmTestCases', '--include-filter', 'CtsDrmTestCases[instant]', '--include-filter', 'CtsDrmTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDrm',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDropBoxManagerTestCases b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDropBoxManagerTestCases
deleted file mode 100644
index 3696d42..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDropBoxManagerTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsDropBoxManagerTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDropBoxManagerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsDropBoxManagerTestCases',
-        test_name='cheets_CTS_R.11_r3.x86.CtsDropBoxManagerTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDropBoxManagerTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDropBoxManagerTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDumpsysHost b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDumpsysHost
deleted file mode 100644
index 1e5333e..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDumpsysHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsDumpsysHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDumpsysHostTestCases, CtsDumpsysHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsDumpsysHost',
-        test_name='cheets_CTS_R.11_r3.x86.CtsDumpsysHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDumpsysHostTestCases', '--include-filter', 'CtsDumpsysHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDumpsysHost',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDynamic b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDynamic
deleted file mode 100644
index 77da4d9..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsDynamic
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsDynamic'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsDynamicLinkerTestCases, CtsDynamicLinkerTestCases[instant], CtsDynamicLinkerTestCases[secondary_user], CtsDynamicMimeHostTestCases, CtsDynamicMimeHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsDynamic',
-        test_name='cheets_CTS_R.11_r3.x86.CtsDynamic',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDynamicLinkerTestCases', '--include-filter', 'CtsDynamicLinkerTestCases[instant]', '--include-filter', 'CtsDynamicLinkerTestCases[secondary_user]', '--include-filter', 'CtsDynamicMimeHostTestCases', '--include-filter', 'CtsDynamicMimeHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDynamic',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsEdiHost b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsEdiHost
deleted file mode 100644
index 2f5e5fd..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsEdiHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsEdiHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsEdiHostTestCases, CtsEdiHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsEdiHost',
-        test_name='cheets_CTS_R.11_r3.x86.CtsEdiHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsEdiHostTestCases', '--include-filter', 'CtsEdiHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsEdiHost',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsEffect b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsEffect
deleted file mode 100644
index 89195c4..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsEffect
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsEffect'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsEffectTestCases, CtsEffectTestCases[instant], CtsEffectTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsEffect',
-        test_name='cheets_CTS_R.11_r3.x86.CtsEffect',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsEffectTestCases', '--include-filter', 'CtsEffectTestCases[instant]', '--include-filter', 'CtsEffectTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsEffect',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsExtendedMocking b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsExtendedMocking
deleted file mode 100644
index 017949f..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsExtendedMocking
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsExtendedMocking'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsExtendedMockingTestCases, CtsExtendedMockingTestCases[instant], CtsExtendedMockingTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsExtendedMocking',
-        test_name='cheets_CTS_R.11_r3.x86.CtsExtendedMocking',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsExtendedMockingTestCases', '--include-filter', 'CtsExtendedMockingTestCases[instant]', '--include-filter', 'CtsExtendedMockingTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsExtendedMocking',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsExternalService b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsExternalService
deleted file mode 100644
index 4d1fa11..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsExternalService
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsExternalService'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsExternalServiceTestCases, CtsExternalServiceTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsExternalService',
-        test_name='cheets_CTS_R.11_r3.x86.CtsExternalService',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsExternalServiceTestCases', '--include-filter', 'CtsExternalServiceTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsExternalService',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsExtractNativeLibsHost b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsExtractNativeLibsHost
deleted file mode 100644
index b0b7a49..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsExtractNativeLibsHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsExtractNativeLibsHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsExtractNativeLibsHostTestCases, CtsExtractNativeLibsHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsExtractNativeLibsHost',
-        test_name='cheets_CTS_R.11_r3.x86.CtsExtractNativeLibsHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsExtractNativeLibsHostTestCases', '--include-filter', 'CtsExtractNativeLibsHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsExtractNativeLibsHost',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsFileSystem b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsFileSystem
deleted file mode 100644
index 56aa431..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsFileSystem
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsFileSystem'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsFileSystemTestCases, CtsFileSystemTestCases[instant], CtsFileSystemTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsFileSystem',
-        test_name='cheets_CTS_R.11_r3.x86.CtsFileSystem',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsFileSystemTestCases', '--include-filter', 'CtsFileSystemTestCases[instant]', '--include-filter', 'CtsFileSystemTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsFileSystem',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=16200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsFragment b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsFragment
deleted file mode 100644
index ffdffef..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsFragment
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsFragment'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsFragmentTestCases, CtsFragmentTestCasesSdk26, CtsFragmentTestCasesSdk26[instant], CtsFragmentTestCasesSdk26[secondary_user], CtsFragmentTestCases[instant], CtsFragmentTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsFragment',
-        test_name='cheets_CTS_R.11_r3.x86.CtsFragment',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsFragmentTestCases', '--include-filter', 'CtsFragmentTestCasesSdk26', '--include-filter', 'CtsFragmentTestCasesSdk26[instant]', '--include-filter', 'CtsFragmentTestCasesSdk26[secondary_user]', '--include-filter', 'CtsFragmentTestCases[instant]', '--include-filter', 'CtsFragmentTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsFragment',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsFsMgr b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsFsMgr
deleted file mode 100644
index d3aa907..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsFsMgr
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsFsMgr'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsFsMgrTestCases, CtsFsMgrTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsFsMgr',
-        test_name='cheets_CTS_R.11_r3.x86.CtsFsMgr',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsFsMgrTestCases', '--include-filter', 'CtsFsMgrTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsFsMgr',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsGesture b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsGesture
deleted file mode 100644
index fcfd943..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsGesture
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsGesture'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsGestureTestCases, CtsGestureTestCases[instant], CtsGestureTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsGesture',
-        test_name='cheets_CTS_R.11_r3.x86.CtsGesture',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsGestureTestCases', '--include-filter', 'CtsGestureTestCases[instant]', '--include-filter', 'CtsGestureTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsGesture',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsGpu b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsGpu
deleted file mode 100644
index a52239a..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsGpu
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsGpu'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsGpuProfilingDataTestCases, CtsGpuProfilingDataTestCases[secondary_user], CtsGpuToolsHostTestCases, CtsGpuToolsHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsGpu',
-        test_name='cheets_CTS_R.11_r3.x86.CtsGpu',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsGpuProfilingDataTestCases', '--include-filter', 'CtsGpuProfilingDataTestCases[secondary_user]', '--include-filter', 'CtsGpuToolsHostTestCases', '--include-filter', 'CtsGpuToolsHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsGpu',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsGraphics b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsGraphics
deleted file mode 100644
index 3291799..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsGraphics
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsGraphics'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsGraphicsTestCases, CtsGraphicsTestCases[instant], CtsGraphicsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsGraphics',
-        test_name='cheets_CTS_R.11_r3.x86.CtsGraphics',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsGraphicsTestCases', '--include-filter', 'CtsGraphicsTestCases[instant]', '--include-filter', 'CtsGraphicsTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsGraphics',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsGwpAsan b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsGwpAsan
deleted file mode 100644
index 1e2a9e9..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsGwpAsan
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsGwpAsan'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsGwpAsanTestCases, CtsGwpAsanTestCases[instant], CtsGwpAsanTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsGwpAsan',
-        test_name='cheets_CTS_R.11_r3.x86.CtsGwpAsan',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsGwpAsanTestCases', '--include-filter', 'CtsGwpAsanTestCases[instant]', '--include-filter', 'CtsGwpAsanTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsGwpAsan',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsHardware b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsHardware
deleted file mode 100644
index 34738fe..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsHardware
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsHardware'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsHardwareTestCases, CtsHardwareTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsHardware',
-        test_name='cheets_CTS_R.11_r3.x86.CtsHardware',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHardwareTestCases', '--include-filter', 'CtsHardwareTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsHardware',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsHarmfulAppWarningHost b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsHarmfulAppWarningHost
deleted file mode 100644
index b5ba100..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsHarmfulAppWarningHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsHarmfulAppWarningHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsHarmfulAppWarningHostTestCases, CtsHarmfulAppWarningHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsHarmfulAppWarningHost',
-        test_name='cheets_CTS_R.11_r3.x86.CtsHarmfulAppWarningHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHarmfulAppWarningHostTestCases', '--include-filter', 'CtsHarmfulAppWarningHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsHarmfulAppWarningHost',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsHdmiCecHost b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsHdmiCecHost
deleted file mode 100644
index d899110..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsHdmiCecHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsHdmiCecHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsHdmiCecHostTestCases, CtsHdmiCecHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsHdmiCecHost',
-        test_name='cheets_CTS_R.11_r3.x86.CtsHdmiCecHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHdmiCecHostTestCases', '--include-filter', 'CtsHdmiCecHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsHdmiCecHost',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsHiddenApi b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsHiddenApi
deleted file mode 100644
index 2107340..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsHiddenApi
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsHiddenApi'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsHiddenApiBlacklistApi27TestCases, CtsHiddenApiBlacklistApi27TestCases[instant], CtsHiddenApiBlacklistApi27TestCases[secondary_user], CtsHiddenApiBlacklistApi28TestCases, CtsHiddenApiBlacklistApi28TestCases[instant], CtsHiddenApiBlacklistApi28TestCases[secondary_user], CtsHiddenApiBlacklistCurrentApiTestCases, CtsHiddenApiBlacklistCurrentApiTestCases[instant], CtsHiddenApiBlacklistCurrentApiTestCases[secondary_user], CtsHiddenApiBlacklistDebugClassTestCases, CtsHiddenApiBlacklistDebugClassTestCases[instant], CtsHiddenApiBlacklistDebugClassTestCases[secondary_user], CtsHiddenApiBlacklistTestApiTestCases, CtsHiddenApiBlacklistTestApiTestCases[instant], CtsHiddenApiBlacklistTestApiTestCases[secondary_user], CtsHiddenApiKillswitchDebugClassTestCases, CtsHiddenApiKillswitchDebugClassTestCases[instant], CtsHiddenApiKillswitchDebugClassTestCases[secondary_user], CtsHiddenApiKillswitchWhitelistTestCases, CtsHiddenApiKillswitchWhitelistTestCases[instant], CtsHiddenApiKillswitchWhitelistTestCases[secondary_user], CtsHiddenApiKillswitchWildcardTestCases, CtsHiddenApiKillswitchWildcardTestCases[instant], CtsHiddenApiKillswitchWildcardTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsHiddenApi',
-        test_name='cheets_CTS_R.11_r3.x86.CtsHiddenApi',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHiddenApiBlacklistApi27TestCases', '--include-filter', 'CtsHiddenApiBlacklistApi27TestCases[instant]', '--include-filter', 'CtsHiddenApiBlacklistApi27TestCases[secondary_user]', '--include-filter', 'CtsHiddenApiBlacklistApi28TestCases', '--include-filter', 'CtsHiddenApiBlacklistApi28TestCases[instant]', '--include-filter', 'CtsHiddenApiBlacklistApi28TestCases[secondary_user]', '--include-filter', 'CtsHiddenApiBlacklistCurrentApiTestCases', '--include-filter', 'CtsHiddenApiBlacklistCurrentApiTestCases[instant]', '--include-filter', 'CtsHiddenApiBlacklistCurrentApiTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiBlacklistDebugClassTestCases', '--include-filter', 'CtsHiddenApiBlacklistDebugClassTestCases[instant]', '--include-filter', 'CtsHiddenApiBlacklistDebugClassTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiBlacklistTestApiTestCases', '--include-filter', 'CtsHiddenApiBlacklistTestApiTestCases[instant]', '--include-filter', 'CtsHiddenApiBlacklistTestApiTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiKillswitchDebugClassTestCases', '--include-filter', 'CtsHiddenApiKillswitchDebugClassTestCases[instant]', '--include-filter', 'CtsHiddenApiKillswitchDebugClassTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiKillswitchWhitelistTestCases', '--include-filter', 'CtsHiddenApiKillswitchWhitelistTestCases[instant]', '--include-filter', 'CtsHiddenApiKillswitchWhitelistTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiKillswitchWildcardTestCases', '--include-filter', 'CtsHiddenApiKillswitchWildcardTestCases[instant]', '--include-filter', 'CtsHiddenApiKillswitchWildcardTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsHiddenApi',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=45000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsHostTzData b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsHostTzData
deleted file mode 100644
index 92f5005..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsHostTzData
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsHostTzData'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsHostTzDataTests, CtsHostTzDataTests[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsHostTzData',
-        test_name='cheets_CTS_R.11_r3.x86.CtsHostTzData',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHostTzDataTests', '--include-filter', 'CtsHostTzDataTests[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsHostTzData',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsHostside b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsHostside
deleted file mode 100644
index f2d955c..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsHostside
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsHostside'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsHostsideNetworkTests, CtsHostsideNetworkTests[instant], CtsHostsideNetworkTests[secondary_user], CtsHostsideNumberBlockingTestCases, CtsHostsideNumberBlockingTestCases[secondary_user], CtsHostsideTvTests, CtsHostsideTvTests[secondary_user], CtsHostsideWebViewTests, CtsHostsideWebViewTests[instant], CtsHostsideWebViewTests[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsHostside',
-        test_name='cheets_CTS_R.11_r3.x86.CtsHostside',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHostsideNetworkTests', '--include-filter', 'CtsHostsideNetworkTests[instant]', '--include-filter', 'CtsHostsideNetworkTests[secondary_user]', '--include-filter', 'CtsHostsideNumberBlockingTestCases', '--include-filter', 'CtsHostsideNumberBlockingTestCases[secondary_user]', '--include-filter', 'CtsHostsideTvTests', '--include-filter', 'CtsHostsideTvTests[secondary_user]', '--include-filter', 'CtsHostsideWebViewTests', '--include-filter', 'CtsHostsideWebViewTests[instant]', '--include-filter', 'CtsHostsideWebViewTests[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsHostside',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=19800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsIcu b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsIcu
deleted file mode 100644
index 55f0c82..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsIcu
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsIcu'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsIcuTestCases, CtsIcuTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsIcu',
-        test_name='cheets_CTS_R.11_r3.x86.CtsIcu',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsIcuTestCases', '--include-filter', 'CtsIcuTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsIcu',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsIdentity b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsIdentity
deleted file mode 100644
index 281de67..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsIdentity
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsIdentity'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsIdentityTestCases, CtsIdentityTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsIdentity',
-        test_name='cheets_CTS_R.11_r3.x86.CtsIdentity',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsIdentityTestCases', '--include-filter', 'CtsIdentityTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsIdentity',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsIke b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsIke
deleted file mode 100644
index 62095a1..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsIke
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsIke'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsIkeTestCases, CtsIkeTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsIke',
-        test_name='cheets_CTS_R.11_r3.x86.CtsIke',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsIkeTestCases', '--include-filter', 'CtsIkeTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsIke',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsIncidentHost b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsIncidentHost
deleted file mode 100644
index f668a3d..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsIncidentHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsIncidentHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsIncidentHostTestCases, CtsIncidentHostTestCases[instant] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsIncidentHost',
-        test_name='cheets_CTS_R.11_r3.x86.CtsIncidentHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsIncidentHostTestCases', '--include-filter', 'CtsIncidentHostTestCases[instant]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsIncidentHost',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsIncrementalInstallHost b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsIncrementalInstallHost
deleted file mode 100644
index 4c575ca..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsIncrementalInstallHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsIncrementalInstallHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsIncrementalInstallHostTestCases, CtsIncrementalInstallHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsIncrementalInstallHost',
-        test_name='cheets_CTS_R.11_r3.x86.CtsIncrementalInstallHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsIncrementalInstallHostTestCases', '--include-filter', 'CtsIncrementalInstallHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsIncrementalInstallHost',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsInit b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsInit
deleted file mode 100644
index 3460486..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsInit
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsInit'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsInitTestCases, CtsInitTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsInit',
-        test_name='cheets_CTS_R.11_r3.x86.CtsInit',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsInitTestCases', '--include-filter', 'CtsInitTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsInit',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsInlineMocking b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsInlineMocking
deleted file mode 100644
index 0bb40bd..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsInlineMocking
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsInlineMocking'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsInlineMockingTestCases, CtsInlineMockingTestCases[instant], CtsInlineMockingTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsInlineMocking',
-        test_name='cheets_CTS_R.11_r3.x86.CtsInlineMocking',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsInlineMockingTestCases', '--include-filter', 'CtsInlineMockingTestCases[instant]', '--include-filter', 'CtsInlineMockingTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsInlineMocking',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsInputMethod b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsInputMethod
deleted file mode 100644
index e185f3a..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsInputMethod
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsInputMethod'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsInputMethodServiceHostTestCases, CtsInputMethodServiceHostTestCases[instant], CtsInputMethodServiceHostTestCases[secondary_user], CtsInputMethodTestCases, CtsInputMethodTestCases[instant], CtsInputMethodTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsInputMethod',
-        test_name='cheets_CTS_R.11_r3.x86.CtsInputMethod',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsInputMethodServiceHostTestCases', '--include-filter', 'CtsInputMethodServiceHostTestCases[instant]', '--include-filter', 'CtsInputMethodServiceHostTestCases[secondary_user]', '--include-filter', 'CtsInputMethodTestCases', '--include-filter', 'CtsInputMethodTestCases[instant]', '--include-filter', 'CtsInputMethodTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsInputMethod',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsInstantApp b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsInstantApp
deleted file mode 100644
index 3a01693..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsInstantApp
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsInstantApp'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsInstantAppTests, CtsInstantAppTests[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsInstantApp',
-        test_name='cheets_CTS_R.11_r3.x86.CtsInstantApp',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsInstantAppTests', '--include-filter', 'CtsInstantAppTests[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsInstantApp',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsIntentSignature b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsIntentSignature
deleted file mode 100644
index 6dcae36..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsIntentSignature
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsIntentSignature'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsIntentSignatureTestCases, CtsIntentSignatureTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsIntentSignature',
-        test_name='cheets_CTS_R.11_r3.x86.CtsIntentSignature',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsIntentSignatureTestCases', '--include-filter', 'CtsIntentSignatureTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsIntentSignature',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsJdwp b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsJdwp
deleted file mode 100644
index 106e893..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsJdwp
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsJdwp'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsJdwpSecurityHostTestCases, CtsJdwpSecurityHostTestCases[secondary_user], CtsJdwpTestCases, CtsJdwpTestCases[instant], CtsJdwpTestCases[secondary_user], CtsJdwpTunnelHostTestCases, CtsJdwpTunnelHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsJdwp',
-        test_name='cheets_CTS_R.11_r3.x86.CtsJdwp',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJdwpSecurityHostTestCases', '--include-filter', 'CtsJdwpSecurityHostTestCases[secondary_user]', '--include-filter', 'CtsJdwpTestCases', '--include-filter', 'CtsJdwpTestCases[instant]', '--include-filter', 'CtsJdwpTestCases[secondary_user]', '--include-filter', 'CtsJdwpTunnelHostTestCases', '--include-filter', 'CtsJdwpTunnelHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsJdwp',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=14400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsJni b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsJni
deleted file mode 100644
index d005d8c..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsJni
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsJni'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsJniTestCases, CtsJniTestCases[instant], CtsJniTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsJni',
-        test_name='cheets_CTS_R.11_r3.x86.CtsJni',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJniTestCases', '--include-filter', 'CtsJniTestCases[instant]', '--include-filter', 'CtsJniTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsJni',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsJobScheduler b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsJobScheduler
deleted file mode 100644
index c2b9acc..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsJobScheduler
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsJobScheduler'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsJobSchedulerSharedUidTestCases, CtsJobSchedulerSharedUidTestCases[secondary_user], CtsJobSchedulerTestCases, CtsJobSchedulerTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsJobScheduler',
-        test_name='cheets_CTS_R.11_r3.x86.CtsJobScheduler',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJobSchedulerSharedUidTestCases', '--include-filter', 'CtsJobSchedulerSharedUidTestCases[secondary_user]', '--include-filter', 'CtsJobSchedulerTestCases', '--include-filter', 'CtsJobSchedulerTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsJobScheduler',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsJvmti b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsJvmti
deleted file mode 100644
index e9382e1..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsJvmti
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsJvmti'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsJvmtiAttachingHostTestCases, CtsJvmtiAttachingHostTestCases[secondary_user], CtsJvmtiAttachingTestCases, CtsJvmtiAttachingTestCases[secondary_user], CtsJvmtiRedefineClassesHostTestCases, CtsJvmtiRedefineClassesHostTestCases[secondary_user], CtsJvmtiRunTest1900HostTestCases, CtsJvmtiRunTest1900HostTestCases[secondary_user], CtsJvmtiRunTest1901HostTestCases, CtsJvmtiRunTest1901HostTestCases[secondary_user], CtsJvmtiRunTest1902HostTestCases, CtsJvmtiRunTest1902HostTestCases[secondary_user], CtsJvmtiRunTest1903HostTestCases, CtsJvmtiRunTest1903HostTestCases[secondary_user], CtsJvmtiRunTest1904HostTestCases, CtsJvmtiRunTest1904HostTestCases[secondary_user], CtsJvmtiRunTest1906HostTestCases, CtsJvmtiRunTest1906HostTestCases[secondary_user], CtsJvmtiRunTest1907HostTestCases, CtsJvmtiRunTest1907HostTestCases[secondary_user], CtsJvmtiRunTest1908HostTestCases, CtsJvmtiRunTest1908HostTestCases[secondary_user], CtsJvmtiRunTest1909HostTestCases, CtsJvmtiRunTest1909HostTestCases[secondary_user], CtsJvmtiRunTest1910HostTestCases, CtsJvmtiRunTest1910HostTestCases[secondary_user], CtsJvmtiRunTest1911HostTestCases, CtsJvmtiRunTest1911HostTestCases[secondary_user], CtsJvmtiRunTest1912HostTestCases, CtsJvmtiRunTest1912HostTestCases[secondary_user], CtsJvmtiRunTest1913HostTestCases, CtsJvmtiRunTest1913HostTestCases[secondary_user], CtsJvmtiRunTest1914HostTestCases, CtsJvmtiRunTest1914HostTestCases[secondary_user], CtsJvmtiRunTest1915HostTestCases, CtsJvmtiRunTest1915HostTestCases[secondary_user], CtsJvmtiRunTest1916HostTestCases, CtsJvmtiRunTest1916HostTestCases[secondary_user], CtsJvmtiRunTest1917HostTestCases, CtsJvmtiRunTest1917HostTestCases[secondary_user], CtsJvmtiRunTest1920HostTestCases, CtsJvmtiRunTest1920HostTestCases[secondary_user], CtsJvmtiRunTest1921HostTestCases, CtsJvmtiRunTest1921HostTestCases[secondary_user], CtsJvmtiRunTest1922HostTestCases, CtsJvmtiRunTest1922HostTestCases[secondary_user], CtsJvmtiRunTest1923HostTestCases, CtsJvmtiRunTest1923HostTestCases[secondary_user], CtsJvmtiRunTest1924HostTestCases, CtsJvmtiRunTest1924HostTestCases[secondary_user], CtsJvmtiRunTest1925HostTestCases, CtsJvmtiRunTest1925HostTestCases[secondary_user], CtsJvmtiRunTest1926HostTestCases, CtsJvmtiRunTest1926HostTestCases[secondary_user], CtsJvmtiRunTest1927HostTestCases, CtsJvmtiRunTest1927HostTestCases[secondary_user], CtsJvmtiRunTest1928HostTestCases, CtsJvmtiRunTest1928HostTestCases[secondary_user], CtsJvmtiRunTest1930HostTestCases, CtsJvmtiRunTest1930HostTestCases[secondary_user], CtsJvmtiRunTest1931HostTestCases, CtsJvmtiRunTest1931HostTestCases[secondary_user], CtsJvmtiRunTest1932HostTestCases, CtsJvmtiRunTest1932HostTestCases[secondary_user], CtsJvmtiRunTest1933HostTestCases, CtsJvmtiRunTest1933HostTestCases[secondary_user], CtsJvmtiRunTest1934HostTestCases, CtsJvmtiRunTest1934HostTestCases[secondary_user], CtsJvmtiRunTest1936HostTestCases, CtsJvmtiRunTest1936HostTestCases[secondary_user], CtsJvmtiRunTest1937HostTestCases, CtsJvmtiRunTest1937HostTestCases[secondary_user], CtsJvmtiRunTest1939HostTestCases, CtsJvmtiRunTest1939HostTestCases[secondary_user], CtsJvmtiRunTest1941HostTestCases, CtsJvmtiRunTest1941HostTestCases[secondary_user], CtsJvmtiRunTest1942HostTestCases, CtsJvmtiRunTest1942HostTestCases[secondary_user], CtsJvmtiRunTest1943HostTestCases, CtsJvmtiRunTest1943HostTestCases[secondary_user], CtsJvmtiRunTest1953HostTestCases, CtsJvmtiRunTest1953HostTestCases[secondary_user], CtsJvmtiRunTest1958HostTestCases, CtsJvmtiRunTest1958HostTestCases[secondary_user], CtsJvmtiRunTest1962HostTestCases, CtsJvmtiRunTest1962HostTestCases[secondary_user], CtsJvmtiRunTest1967HostTestCases, CtsJvmtiRunTest1967HostTestCases[secondary_user], CtsJvmtiRunTest1968HostTestCases, CtsJvmtiRunTest1968HostTestCases[secondary_user], CtsJvmtiRunTest1969HostTestCases, CtsJvmtiRunTest1969HostTestCases[secondary_user], CtsJvmtiRunTest1970HostTestCases, CtsJvmtiRunTest1970HostTestCases[secondary_user], CtsJvmtiRunTest1971HostTestCases, CtsJvmtiRunTest1971HostTestCases[secondary_user], CtsJvmtiRunTest1974HostTestCases, CtsJvmtiRunTest1974HostTestCases[secondary_user], CtsJvmtiRunTest1975HostTestCases, CtsJvmtiRunTest1975HostTestCases[secondary_user], CtsJvmtiRunTest1976HostTestCases, CtsJvmtiRunTest1976HostTestCases[secondary_user], CtsJvmtiRunTest1977HostTestCases, CtsJvmtiRunTest1977HostTestCases[secondary_user], CtsJvmtiRunTest1978HostTestCases, CtsJvmtiRunTest1978HostTestCases[secondary_user], CtsJvmtiRunTest1979HostTestCases, CtsJvmtiRunTest1979HostTestCases[secondary_user], CtsJvmtiRunTest1981HostTestCases, CtsJvmtiRunTest1981HostTestCases[secondary_user], CtsJvmtiRunTest1982HostTestCases, CtsJvmtiRunTest1982HostTestCases[secondary_user], CtsJvmtiRunTest1983HostTestCases, CtsJvmtiRunTest1983HostTestCases[secondary_user], CtsJvmtiRunTest1984HostTestCases, CtsJvmtiRunTest1984HostTestCases[secondary_user], CtsJvmtiRunTest1988HostTestCases, CtsJvmtiRunTest1988HostTestCases[secondary_user], CtsJvmtiRunTest1989HostTestCases, CtsJvmtiRunTest1989HostTestCases[secondary_user], CtsJvmtiRunTest1990HostTestCases, CtsJvmtiRunTest1990HostTestCases[secondary_user], CtsJvmtiRunTest1991HostTestCases, CtsJvmtiRunTest1991HostTestCases[secondary_user], CtsJvmtiRunTest1992HostTestCases, CtsJvmtiRunTest1992HostTestCases[secondary_user], CtsJvmtiRunTest1994HostTestCases, CtsJvmtiRunTest1994HostTestCases[secondary_user], CtsJvmtiRunTest1995HostTestCases, CtsJvmtiRunTest1995HostTestCases[secondary_user], CtsJvmtiRunTest1996HostTestCases, CtsJvmtiRunTest1996HostTestCases[secondary_user], CtsJvmtiRunTest1997HostTestCases, CtsJvmtiRunTest1997HostTestCases[secondary_user], CtsJvmtiRunTest1998HostTestCases, CtsJvmtiRunTest1998HostTestCases[secondary_user], CtsJvmtiRunTest1999HostTestCases, CtsJvmtiRunTest1999HostTestCases[secondary_user], CtsJvmtiRunTest2001HostTestCases, CtsJvmtiRunTest2001HostTestCases[secondary_user], CtsJvmtiRunTest2002HostTestCases, CtsJvmtiRunTest2002HostTestCases[secondary_user], CtsJvmtiRunTest2003HostTestCases, CtsJvmtiRunTest2003HostTestCases[secondary_user], CtsJvmtiRunTest2004HostTestCases, CtsJvmtiRunTest2004HostTestCases[secondary_user], CtsJvmtiRunTest2005HostTestCases, CtsJvmtiRunTest2005HostTestCases[secondary_user], CtsJvmtiRunTest2006HostTestCases, CtsJvmtiRunTest2006HostTestCases[secondary_user], CtsJvmtiRunTest2007HostTestCases, CtsJvmtiRunTest2007HostTestCases[secondary_user], CtsJvmtiRunTest902HostTestCases, CtsJvmtiRunTest902HostTestCases[secondary_user], CtsJvmtiRunTest903HostTestCases, CtsJvmtiRunTest903HostTestCases[secondary_user], CtsJvmtiRunTest904HostTestCases, CtsJvmtiRunTest904HostTestCases[secondary_user], CtsJvmtiRunTest905HostTestCases, CtsJvmtiRunTest905HostTestCases[secondary_user], CtsJvmtiRunTest906HostTestCases, CtsJvmtiRunTest906HostTestCases[secondary_user], CtsJvmtiRunTest907HostTestCases, CtsJvmtiRunTest907HostTestCases[secondary_user], CtsJvmtiRunTest908HostTestCases, CtsJvmtiRunTest908HostTestCases[secondary_user], CtsJvmtiRunTest910HostTestCases, CtsJvmtiRunTest910HostTestCases[secondary_user], CtsJvmtiRunTest911HostTestCases, CtsJvmtiRunTest911HostTestCases[secondary_user], CtsJvmtiRunTest912HostTestCases, CtsJvmtiRunTest912HostTestCases[secondary_user], CtsJvmtiRunTest913HostTestCases, CtsJvmtiRunTest913HostTestCases[secondary_user], CtsJvmtiRunTest914HostTestCases, CtsJvmtiRunTest914HostTestCases[secondary_user], CtsJvmtiRunTest915HostTestCases, CtsJvmtiRunTest915HostTestCases[secondary_user], CtsJvmtiRunTest917HostTestCases, CtsJvmtiRunTest917HostTestCases[secondary_user], CtsJvmtiRunTest918HostTestCases, CtsJvmtiRunTest918HostTestCases[secondary_user], CtsJvmtiRunTest919HostTestCases, CtsJvmtiRunTest919HostTestCases[secondary_user], CtsJvmtiRunTest920HostTestCases, CtsJvmtiRunTest920HostTestCases[secondary_user], CtsJvmtiRunTest922HostTestCases, CtsJvmtiRunTest922HostTestCases[secondary_user], CtsJvmtiRunTest923HostTestCases, CtsJvmtiRunTest923HostTestCases[secondary_user], CtsJvmtiRunTest924HostTestCases, CtsJvmtiRunTest924HostTestCases[secondary_user], CtsJvmtiRunTest926HostTestCases, CtsJvmtiRunTest926HostTestCases[secondary_user], CtsJvmtiRunTest927HostTestCases, CtsJvmtiRunTest927HostTestCases[secondary_user], CtsJvmtiRunTest928HostTestCases, CtsJvmtiRunTest928HostTestCases[secondary_user], CtsJvmtiRunTest930HostTestCases, CtsJvmtiRunTest930HostTestCases[secondary_user], CtsJvmtiRunTest931HostTestCases, CtsJvmtiRunTest931HostTestCases[secondary_user], CtsJvmtiRunTest932HostTestCases, CtsJvmtiRunTest932HostTestCases[secondary_user], CtsJvmtiRunTest940HostTestCases, CtsJvmtiRunTest940HostTestCases[secondary_user], CtsJvmtiRunTest942HostTestCases, CtsJvmtiRunTest942HostTestCases[secondary_user], CtsJvmtiRunTest944HostTestCases, CtsJvmtiRunTest944HostTestCases[secondary_user], CtsJvmtiRunTest945HostTestCases, CtsJvmtiRunTest945HostTestCases[secondary_user], CtsJvmtiRunTest947HostTestCases, CtsJvmtiRunTest947HostTestCases[secondary_user], CtsJvmtiRunTest951HostTestCases, CtsJvmtiRunTest951HostTestCases[secondary_user], CtsJvmtiRunTest982HostTestCases, CtsJvmtiRunTest982HostTestCases[secondary_user], CtsJvmtiRunTest983HostTestCases, CtsJvmtiRunTest983HostTestCases[secondary_user], CtsJvmtiRunTest984HostTestCases, CtsJvmtiRunTest984HostTestCases[secondary_user], CtsJvmtiRunTest985HostTestCases, CtsJvmtiRunTest985HostTestCases[secondary_user], CtsJvmtiRunTest986HostTestCases, CtsJvmtiRunTest986HostTestCases[secondary_user], CtsJvmtiRunTest988HostTestCases, CtsJvmtiRunTest988HostTestCases[secondary_user], CtsJvmtiRunTest989HostTestCases, CtsJvmtiRunTest989HostTestCases[secondary_user], CtsJvmtiRunTest990HostTestCases, CtsJvmtiRunTest990HostTestCases[secondary_user], CtsJvmtiRunTest991HostTestCases, CtsJvmtiRunTest991HostTestCases[secondary_user], CtsJvmtiRunTest992HostTestCases, CtsJvmtiRunTest992HostTestCases[secondary_user], CtsJvmtiRunTest993HostTestCases, CtsJvmtiRunTest993HostTestCases[secondary_user], CtsJvmtiRunTest994HostTestCases, CtsJvmtiRunTest994HostTestCases[secondary_user], CtsJvmtiRunTest995HostTestCases, CtsJvmtiRunTest995HostTestCases[secondary_user], CtsJvmtiRunTest996HostTestCases, CtsJvmtiRunTest996HostTestCases[secondary_user], CtsJvmtiRunTest997HostTestCases, CtsJvmtiRunTest997HostTestCases[secondary_user], CtsJvmtiTaggingHostTestCases, CtsJvmtiTaggingHostTestCases[secondary_user], CtsJvmtiTrackingHostTestCases, CtsJvmtiTrackingHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsJvmti',
-        test_name='cheets_CTS_R.11_r3.x86.CtsJvmti',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJvmtiAttachingHostTestCases', '--include-filter', 'CtsJvmtiAttachingHostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiAttachingTestCases', '--include-filter', 'CtsJvmtiAttachingTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRedefineClassesHostTestCases', '--include-filter', 'CtsJvmtiRedefineClassesHostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1900HostTestCases', '--include-filter', 'CtsJvmtiRunTest1900HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1901HostTestCases', '--include-filter', 'CtsJvmtiRunTest1901HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1902HostTestCases', '--include-filter', 'CtsJvmtiRunTest1902HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1903HostTestCases', '--include-filter', 'CtsJvmtiRunTest1903HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1904HostTestCases', '--include-filter', 'CtsJvmtiRunTest1904HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1906HostTestCases', '--include-filter', 'CtsJvmtiRunTest1906HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1907HostTestCases', '--include-filter', 'CtsJvmtiRunTest1907HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1908HostTestCases', '--include-filter', 'CtsJvmtiRunTest1908HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1909HostTestCases', '--include-filter', 'CtsJvmtiRunTest1909HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1910HostTestCases', '--include-filter', 'CtsJvmtiRunTest1910HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1911HostTestCases', '--include-filter', 'CtsJvmtiRunTest1911HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1912HostTestCases', '--include-filter', 'CtsJvmtiRunTest1912HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1913HostTestCases', '--include-filter', 'CtsJvmtiRunTest1913HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1914HostTestCases', '--include-filter', 'CtsJvmtiRunTest1914HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1915HostTestCases', '--include-filter', 'CtsJvmtiRunTest1915HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1916HostTestCases', '--include-filter', 'CtsJvmtiRunTest1916HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1917HostTestCases', '--include-filter', 'CtsJvmtiRunTest1917HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1920HostTestCases', '--include-filter', 'CtsJvmtiRunTest1920HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1921HostTestCases', '--include-filter', 'CtsJvmtiRunTest1921HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1922HostTestCases', '--include-filter', 'CtsJvmtiRunTest1922HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1923HostTestCases', '--include-filter', 'CtsJvmtiRunTest1923HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1924HostTestCases', '--include-filter', 'CtsJvmtiRunTest1924HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1925HostTestCases', '--include-filter', 'CtsJvmtiRunTest1925HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1926HostTestCases', '--include-filter', 'CtsJvmtiRunTest1926HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1927HostTestCases', '--include-filter', 'CtsJvmtiRunTest1927HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1928HostTestCases', '--include-filter', 'CtsJvmtiRunTest1928HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1930HostTestCases', '--include-filter', 'CtsJvmtiRunTest1930HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1931HostTestCases', '--include-filter', 'CtsJvmtiRunTest1931HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1932HostTestCases', '--include-filter', 'CtsJvmtiRunTest1932HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1933HostTestCases', '--include-filter', 'CtsJvmtiRunTest1933HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1934HostTestCases', '--include-filter', 'CtsJvmtiRunTest1934HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1936HostTestCases', '--include-filter', 'CtsJvmtiRunTest1936HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1937HostTestCases', '--include-filter', 'CtsJvmtiRunTest1937HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1939HostTestCases', '--include-filter', 'CtsJvmtiRunTest1939HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1941HostTestCases', '--include-filter', 'CtsJvmtiRunTest1941HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1942HostTestCases', '--include-filter', 'CtsJvmtiRunTest1942HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1943HostTestCases', '--include-filter', 'CtsJvmtiRunTest1943HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1953HostTestCases', '--include-filter', 'CtsJvmtiRunTest1953HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1958HostTestCases', '--include-filter', 'CtsJvmtiRunTest1958HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1962HostTestCases', '--include-filter', 'CtsJvmtiRunTest1962HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1967HostTestCases', '--include-filter', 'CtsJvmtiRunTest1967HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1968HostTestCases', '--include-filter', 'CtsJvmtiRunTest1968HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1969HostTestCases', '--include-filter', 'CtsJvmtiRunTest1969HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1970HostTestCases', '--include-filter', 'CtsJvmtiRunTest1970HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1971HostTestCases', '--include-filter', 'CtsJvmtiRunTest1971HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1974HostTestCases', '--include-filter', 'CtsJvmtiRunTest1974HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1975HostTestCases', '--include-filter', 'CtsJvmtiRunTest1975HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1976HostTestCases', '--include-filter', 'CtsJvmtiRunTest1976HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1977HostTestCases', '--include-filter', 'CtsJvmtiRunTest1977HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1978HostTestCases', '--include-filter', 'CtsJvmtiRunTest1978HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1979HostTestCases', '--include-filter', 'CtsJvmtiRunTest1979HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1981HostTestCases', '--include-filter', 'CtsJvmtiRunTest1981HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1982HostTestCases', '--include-filter', 'CtsJvmtiRunTest1982HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1983HostTestCases', '--include-filter', 'CtsJvmtiRunTest1983HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1984HostTestCases', '--include-filter', 'CtsJvmtiRunTest1984HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1988HostTestCases', '--include-filter', 'CtsJvmtiRunTest1988HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1989HostTestCases', '--include-filter', 'CtsJvmtiRunTest1989HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1990HostTestCases', '--include-filter', 'CtsJvmtiRunTest1990HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1991HostTestCases', '--include-filter', 'CtsJvmtiRunTest1991HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1992HostTestCases', '--include-filter', 'CtsJvmtiRunTest1992HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1994HostTestCases', '--include-filter', 'CtsJvmtiRunTest1994HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1995HostTestCases', '--include-filter', 'CtsJvmtiRunTest1995HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1996HostTestCases', '--include-filter', 'CtsJvmtiRunTest1996HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1997HostTestCases', '--include-filter', 'CtsJvmtiRunTest1997HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1998HostTestCases', '--include-filter', 'CtsJvmtiRunTest1998HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1999HostTestCases', '--include-filter', 'CtsJvmtiRunTest1999HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2001HostTestCases', '--include-filter', 'CtsJvmtiRunTest2001HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2002HostTestCases', '--include-filter', 'CtsJvmtiRunTest2002HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2003HostTestCases', '--include-filter', 'CtsJvmtiRunTest2003HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2004HostTestCases', '--include-filter', 'CtsJvmtiRunTest2004HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2005HostTestCases', '--include-filter', 'CtsJvmtiRunTest2005HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2006HostTestCases', '--include-filter', 'CtsJvmtiRunTest2006HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2007HostTestCases', '--include-filter', 'CtsJvmtiRunTest2007HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest902HostTestCases', '--include-filter', 'CtsJvmtiRunTest902HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest903HostTestCases', '--include-filter', 'CtsJvmtiRunTest903HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest904HostTestCases', '--include-filter', 'CtsJvmtiRunTest904HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest905HostTestCases', '--include-filter', 'CtsJvmtiRunTest905HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest906HostTestCases', '--include-filter', 'CtsJvmtiRunTest906HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest907HostTestCases', '--include-filter', 'CtsJvmtiRunTest907HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest908HostTestCases', '--include-filter', 'CtsJvmtiRunTest908HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest910HostTestCases', '--include-filter', 'CtsJvmtiRunTest910HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest911HostTestCases', '--include-filter', 'CtsJvmtiRunTest911HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest912HostTestCases', '--include-filter', 'CtsJvmtiRunTest912HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest913HostTestCases', '--include-filter', 'CtsJvmtiRunTest913HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest914HostTestCases', '--include-filter', 'CtsJvmtiRunTest914HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest915HostTestCases', '--include-filter', 'CtsJvmtiRunTest915HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest917HostTestCases', '--include-filter', 'CtsJvmtiRunTest917HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest918HostTestCases', '--include-filter', 'CtsJvmtiRunTest918HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest919HostTestCases', '--include-filter', 'CtsJvmtiRunTest919HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest920HostTestCases', '--include-filter', 'CtsJvmtiRunTest920HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest922HostTestCases', '--include-filter', 'CtsJvmtiRunTest922HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest923HostTestCases', '--include-filter', 'CtsJvmtiRunTest923HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest924HostTestCases', '--include-filter', 'CtsJvmtiRunTest924HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest926HostTestCases', '--include-filter', 'CtsJvmtiRunTest926HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest927HostTestCases', '--include-filter', 'CtsJvmtiRunTest927HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest928HostTestCases', '--include-filter', 'CtsJvmtiRunTest928HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest930HostTestCases', '--include-filter', 'CtsJvmtiRunTest930HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest931HostTestCases', '--include-filter', 'CtsJvmtiRunTest931HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest932HostTestCases', '--include-filter', 'CtsJvmtiRunTest932HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest940HostTestCases', '--include-filter', 'CtsJvmtiRunTest940HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest942HostTestCases', '--include-filter', 'CtsJvmtiRunTest942HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest944HostTestCases', '--include-filter', 'CtsJvmtiRunTest944HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest945HostTestCases', '--include-filter', 'CtsJvmtiRunTest945HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest947HostTestCases', '--include-filter', 'CtsJvmtiRunTest947HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest951HostTestCases', '--include-filter', 'CtsJvmtiRunTest951HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest982HostTestCases', '--include-filter', 'CtsJvmtiRunTest982HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest983HostTestCases', '--include-filter', 'CtsJvmtiRunTest983HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest984HostTestCases', '--include-filter', 'CtsJvmtiRunTest984HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest985HostTestCases', '--include-filter', 'CtsJvmtiRunTest985HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest986HostTestCases', '--include-filter', 'CtsJvmtiRunTest986HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest988HostTestCases', '--include-filter', 'CtsJvmtiRunTest988HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest989HostTestCases', '--include-filter', 'CtsJvmtiRunTest989HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest990HostTestCases', '--include-filter', 'CtsJvmtiRunTest990HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest991HostTestCases', '--include-filter', 'CtsJvmtiRunTest991HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest992HostTestCases', '--include-filter', 'CtsJvmtiRunTest992HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest993HostTestCases', '--include-filter', 'CtsJvmtiRunTest993HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest994HostTestCases', '--include-filter', 'CtsJvmtiRunTest994HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest995HostTestCases', '--include-filter', 'CtsJvmtiRunTest995HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest996HostTestCases', '--include-filter', 'CtsJvmtiRunTest996HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest997HostTestCases', '--include-filter', 'CtsJvmtiRunTest997HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiTaggingHostTestCases', '--include-filter', 'CtsJvmtiTaggingHostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiTrackingHostTestCases', '--include-filter', 'CtsJvmtiTrackingHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsJvmti',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=75000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsKernelConfigTestCases b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsKernelConfigTestCases
deleted file mode 100644
index 6862048..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsKernelConfigTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsKernelConfigTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsKernelConfigTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsKernelConfigTestCases',
-        test_name='cheets_CTS_R.11_r3.x86.CtsKernelConfigTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsKernelConfigTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsKernelConfigTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsKeystore b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsKeystore
deleted file mode 100644
index 06339b9..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsKeystore
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsKeystore'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsKeystoreTestCases, CtsKeystoreTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsKeystore',
-        test_name='cheets_CTS_R.11_r3.x86.CtsKeystore',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsKeystoreTestCases', '--include-filter', 'CtsKeystoreTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsKeystore',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsLeanbackJank b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsLeanbackJank
deleted file mode 100644
index b075577..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsLeanbackJank
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsLeanbackJank'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsLeanbackJankTestCases, CtsLeanbackJankTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsLeanbackJank',
-        test_name='cheets_CTS_R.11_r3.x86.CtsLeanbackJank',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLeanbackJankTestCases', '--include-filter', 'CtsLeanbackJankTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsLeanbackJank',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsLegacyNotification2 b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsLegacyNotification2
deleted file mode 100644
index 8ec59c3..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsLegacyNotification2
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsLegacyNotification2'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsLegacyNotification20TestCases, CtsLegacyNotification20TestCases[secondary_user], CtsLegacyNotification27TestCases, CtsLegacyNotification27TestCases[secondary_user], CtsLegacyNotification28TestCases, CtsLegacyNotification28TestCases[secondary_user], CtsLegacyNotification29TestCases, CtsLegacyNotification29TestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsLegacyNotification2',
-        test_name='cheets_CTS_R.11_r3.x86.CtsLegacyNotification2',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLegacyNotification20TestCases', '--include-filter', 'CtsLegacyNotification20TestCases[secondary_user]', '--include-filter', 'CtsLegacyNotification27TestCases', '--include-filter', 'CtsLegacyNotification27TestCases[secondary_user]', '--include-filter', 'CtsLegacyNotification28TestCases', '--include-filter', 'CtsLegacyNotification28TestCases[secondary_user]', '--include-filter', 'CtsLegacyNotification29TestCases', '--include-filter', 'CtsLegacyNotification29TestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsLegacyNotification2',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=16200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsLibcore b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsLibcore
deleted file mode 100644
index 0e31838..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsLibcore
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsLibcore'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsLibcoreApiEvolutionTestCases, CtsLibcoreApiEvolutionTestCases[secondary_user], CtsLibcoreFileIOTestCases, CtsLibcoreFileIOTestCases[secondary_user], CtsLibcoreJsr166TestCases, CtsLibcoreJsr166TestCases[secondary_user], CtsLibcoreLegacy22TestCases, CtsLibcoreLegacy22TestCases[secondary_user], CtsLibcoreOjTestCases, CtsLibcoreOjTestCases[secondary_user], CtsLibcoreOkHttpTestCases, CtsLibcoreOkHttpTestCases[secondary_user], CtsLibcoreTestCases, CtsLibcoreTestCases[secondary_user], CtsLibcoreWycheproofBCTestCases, CtsLibcoreWycheproofBCTestCases[secondary_user], CtsLibcoreWycheproofConscryptTestCases, CtsLibcoreWycheproofConscryptTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsLibcore',
-        test_name='cheets_CTS_R.11_r3.x86.CtsLibcore',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLibcoreApiEvolutionTestCases', '--include-filter', 'CtsLibcoreApiEvolutionTestCases[secondary_user]', '--include-filter', 'CtsLibcoreFileIOTestCases', '--include-filter', 'CtsLibcoreFileIOTestCases[secondary_user]', '--include-filter', 'CtsLibcoreJsr166TestCases', '--include-filter', 'CtsLibcoreJsr166TestCases[secondary_user]', '--include-filter', 'CtsLibcoreLegacy22TestCases', '--include-filter', 'CtsLibcoreLegacy22TestCases[secondary_user]', '--include-filter', 'CtsLibcoreOjTestCases', '--include-filter', 'CtsLibcoreOjTestCases[secondary_user]', '--include-filter', 'CtsLibcoreOkHttpTestCases', '--include-filter', 'CtsLibcoreOkHttpTestCases[secondary_user]', '--include-filter', 'CtsLibcoreTestCases', '--include-filter', 'CtsLibcoreTestCases[secondary_user]', '--include-filter', 'CtsLibcoreWycheproofBCTestCases', '--include-filter', 'CtsLibcoreWycheproofBCTestCases[secondary_user]', '--include-filter', 'CtsLibcoreWycheproofConscryptTestCases', '--include-filter', 'CtsLibcoreWycheproofConscryptTestCases[secondary_user]'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsLibcore',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=39600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsLiblog b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsLiblog
deleted file mode 100644
index 526ec69..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsLiblog
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsLiblog'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsLiblogTestCases, CtsLiblogTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsLiblog',
-        test_name='cheets_CTS_R.11_r3.x86.CtsLiblog',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLiblogTestCases', '--include-filter', 'CtsLiblogTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsLiblog',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsLocation b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsLocation
deleted file mode 100644
index 0619237..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsLocation
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsLocation'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsLocationCoarseTestCases, CtsLocationCoarseTestCases[instant], CtsLocationCoarseTestCases[secondary_user], CtsLocationFineTestCases, CtsLocationFineTestCases[instant], CtsLocationFineTestCases[secondary_user], CtsLocationGnssTestCases, CtsLocationGnssTestCases[instant], CtsLocationGnssTestCases[secondary_user], CtsLocationNoneTestCases, CtsLocationNoneTestCases[instant], CtsLocationNoneTestCases[secondary_user], CtsLocationPrivilegedTestCases, CtsLocationPrivilegedTestCases[instant], CtsLocationPrivilegedTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsLocation',
-        test_name='cheets_CTS_R.11_r3.x86.CtsLocation',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLocationCoarseTestCases', '--include-filter', 'CtsLocationCoarseTestCases[instant]', '--include-filter', 'CtsLocationCoarseTestCases[secondary_user]', '--include-filter', 'CtsLocationFineTestCases', '--include-filter', 'CtsLocationFineTestCases[instant]', '--include-filter', 'CtsLocationFineTestCases[secondary_user]', '--include-filter', 'CtsLocationGnssTestCases', '--include-filter', 'CtsLocationGnssTestCases[instant]', '--include-filter', 'CtsLocationGnssTestCases[secondary_user]', '--include-filter', 'CtsLocationNoneTestCases', '--include-filter', 'CtsLocationNoneTestCases[instant]', '--include-filter', 'CtsLocationNoneTestCases[secondary_user]', '--include-filter', 'CtsLocationPrivilegedTestCases', '--include-filter', 'CtsLocationPrivilegedTestCases[instant]', '--include-filter', 'CtsLocationPrivilegedTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsLocation',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=28800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsLogd b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsLogd
deleted file mode 100644
index 2726e79..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsLogd
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsLogd'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsLogdTestCases, CtsLogdTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsLogd',
-        test_name='cheets_CTS_R.11_r3.x86.CtsLogd',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLogdTestCases', '--include-filter', 'CtsLogdTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsLogd',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMatchFlag b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMatchFlag
deleted file mode 100644
index 3d4c76c..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMatchFlag
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsMatchFlag'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMatchFlagTestCases, CtsMatchFlagTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsMatchFlag',
-        test_name='cheets_CTS_R.11_r3.x86.CtsMatchFlag',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMatchFlagTestCases', '--include-filter', 'CtsMatchFlagTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMatchFlag',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMediaBitstreamsTestCases b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMediaBitstreamsTestCases
deleted file mode 100644
index 54a4cb9..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMediaBitstreamsTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsMediaBitstreamsTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaBitstreamsTestCases, CtsMediaBitstreamsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        needs_push_media=True,
-        tag='11_r3.x86.CtsMediaBitstreamsTestCases',
-        test_name='cheets_CTS_R.11_r3.x86.CtsMediaBitstreamsTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaBitstreamsTestCases', '--include-filter', 'CtsMediaBitstreamsTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMediaBitstreamsTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMediaHostTestCases b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMediaHostTestCases
deleted file mode 100644
index 05f1af7..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMediaHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsMediaHostTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaHostTestCases, CtsMediaHostTestCases[instant] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsMediaHostTestCases',
-        test_name='cheets_CTS_R.11_r3.x86.CtsMediaHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaHostTestCases', '--include-filter', 'CtsMediaHostTestCases[instant]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMediaHostTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMediaParserTestCases b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMediaParserTestCases
deleted file mode 100644
index e682b29..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMediaParserTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsMediaParserTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaParserTestCases, CtsMediaParserTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsMediaParserTestCases',
-        test_name='cheets_CTS_R.11_r3.x86.CtsMediaParserTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaParserTestCases', '--include-filter', 'CtsMediaParserTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMediaParserTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMediaStressTestCases b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMediaStressTestCases
deleted file mode 100644
index 11ab0df..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMediaStressTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsMediaStressTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaStressTestCases, CtsMediaStressTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        needs_push_media=True,
-        tag='11_r3.x86.CtsMediaStressTestCases',
-        test_name='cheets_CTS_R.11_r3.x86.CtsMediaStressTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaStressTestCases', '--include-filter', 'CtsMediaStressTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMediaStressTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=21600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMediaTestCases b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMediaTestCases
deleted file mode 100644
index 2dde0a0..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMediaTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsMediaTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaTestCases, CtsMediaTestCases[instant] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        needs_push_media=True,
-        tag='11_r3.x86.CtsMediaTestCases',
-        test_name='cheets_CTS_R.11_r3.x86.CtsMediaTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases', '--include-filter', 'CtsMediaTestCases[instant]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMediaTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=39600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMediaTestCases.audio b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMediaTestCases.audio
deleted file mode 100644
index f877a54..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMediaTestCases.audio
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsMediaTestCases.audio'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaTestCases.audio of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        needs_push_media=True,
-        tag='11_r3.x86.CtsMediaTestCases.audio',
-        test_name='cheets_CTS_R.11_r3.x86.CtsMediaTestCases.audio',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioAttributesTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioEffectTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioAttributesTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioEffectTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioFocusTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioFormatTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioManagerTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioMetadataTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioNativeTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPlayRoutingNative', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPlaybackCaptureTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPlaybackConfigurationTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPreProcessingTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPresentationTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordAppOpTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordRoutingNative', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecord_BufferSizeTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordingConfigurationTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioSystemTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioSystemUsageTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackLatencyTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackOffloadTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackSurroundTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrack_ListenerTest', '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolAacTest', '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolHapticTest', '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolMidiTest', '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolOggTest', '--include-filter', 'CtsMediaTestCases android.media.cts.VolumeShaperTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMediaTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMediaTestCases.video b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMediaTestCases.video
deleted file mode 100644
index ab393c3..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMediaTestCases.video
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsMediaTestCases.video'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaTestCases.video of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        needs_push_media=True,
-        tag='11_r3.x86.CtsMediaTestCases.video',
-        test_name='cheets_CTS_R.11_r3.x86.CtsMediaTestCases.video',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases android.media.cts.AdaptivePlaybackTest', '--include-filter', 'CtsMediaTestCases android.media.cts.DecodeAccuracyTest', '--include-filter', 'CtsMediaTestCases android.media.cts.DecodeEditEncodeTest', '--include-filter', 'CtsMediaTestCases android.media.cts.DecoderConformanceTest', '--include-filter', 'CtsMediaTestCases android.media.cts.EncodeDecodeTest', '--include-filter', 'CtsMediaTestCases android.media.cts.ExtractDecodeEditEncodeMuxTest', '--include-filter', 'CtsMediaTestCases android.media.cts.MediaCodecPlayerTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMediaTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=36000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMediaV2TestCases b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMediaV2TestCases
deleted file mode 100644
index 5ec1a18..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMediaV2TestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsMediaV2TestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMediaV2TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsMediaV2TestCases',
-        test_name='cheets_CTS_R.11_r3.x86.CtsMediaV2TestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaV2TestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMediaV2TestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMidiTestCases b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMidiTestCases
deleted file mode 100644
index 9a4a9c5..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMidiTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsMidiTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMidiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsMidiTestCases',
-        test_name='cheets_CTS_R.11_r3.x86.CtsMidiTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMidiTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMidiTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMimeMap b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMimeMap
deleted file mode 100644
index 7e51747..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMimeMap
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsMimeMap'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMimeMapTestCases, CtsMimeMapTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsMimeMap',
-        test_name='cheets_CTS_R.11_r3.x86.CtsMimeMap',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMimeMapTestCases', '--include-filter', 'CtsMimeMapTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMimeMap',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMocking b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMocking
deleted file mode 100644
index b0d4e9f..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMocking
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsMocking'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMockingDebuggableTestCases, CtsMockingDebuggableTestCases[instant], CtsMockingDebuggableTestCases[secondary_user], CtsMockingTestCases, CtsMockingTestCases[instant], CtsMockingTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsMocking',
-        test_name='cheets_CTS_R.11_r3.x86.CtsMocking',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMockingDebuggableTestCases', '--include-filter', 'CtsMockingDebuggableTestCases[instant]', '--include-filter', 'CtsMockingDebuggableTestCases[secondary_user]', '--include-filter', 'CtsMockingTestCases', '--include-filter', 'CtsMockingTestCases[instant]', '--include-filter', 'CtsMockingTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMocking',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMonkey b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMonkey
deleted file mode 100644
index 2fae94c..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMonkey
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsMonkey'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMonkeyTestCases, CtsMonkeyTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsMonkey',
-        test_name='cheets_CTS_R.11_r3.x86.CtsMonkey',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMonkeyTestCases', '--include-filter', 'CtsMonkeyTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMonkey',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMultiUser b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMultiUser
deleted file mode 100644
index 5438a61..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsMultiUser
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsMultiUser'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsMultiUserHostTestCases, CtsMultiUserHostTestCases[instant], CtsMultiUserHostTestCases[secondary_user], CtsMultiUserTestCases, CtsMultiUserTestCases[instant], CtsMultiUserTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsMultiUser',
-        test_name='cheets_CTS_R.11_r3.x86.CtsMultiUser',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMultiUserHostTestCases', '--include-filter', 'CtsMultiUserHostTestCases[instant]', '--include-filter', 'CtsMultiUserHostTestCases[secondary_user]', '--include-filter', 'CtsMultiUserTestCases', '--include-filter', 'CtsMultiUserTestCases[instant]', '--include-filter', 'CtsMultiUserTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsMultiUser',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsNNAPI b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsNNAPI
deleted file mode 100644
index ef6dfa6..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsNNAPI
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsNNAPI'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsNNAPITestCases, CtsNNAPITestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsNNAPI',
-        test_name='cheets_CTS_R.11_r3.x86.CtsNNAPI',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNNAPITestCases', '--include-filter', 'CtsNNAPITestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsNNAPI',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsNNAPIBenchmark b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsNNAPIBenchmark
deleted file mode 100644
index 91afc96..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsNNAPIBenchmark
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsNNAPIBenchmark'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsNNAPIBenchmarkTestCases, CtsNNAPIBenchmarkTestCases[instant], CtsNNAPIBenchmarkTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsNNAPIBenchmark',
-        test_name='cheets_CTS_R.11_r3.x86.CtsNNAPIBenchmark',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNNAPIBenchmarkTestCases', '--include-filter', 'CtsNNAPIBenchmarkTestCases[instant]', '--include-filter', 'CtsNNAPIBenchmarkTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsNNAPIBenchmark',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsNative b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsNative
deleted file mode 100644
index 7bb539e..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsNative
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsNative'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsNativeEncryptionTestCases, CtsNativeEncryptionTestCases[instant], CtsNativeEncryptionTestCases[secondary_user], CtsNativeHardwareTestCases, CtsNativeHardwareTestCases[secondary_user], CtsNativeMediaAAudioTestCases, CtsNativeMediaAAudioTestCases[instant], CtsNativeMediaAAudioTestCases[secondary_user], CtsNativeMediaMetricsTestCases, CtsNativeMediaMetricsTestCases[instant], CtsNativeMediaMetricsTestCases[secondary_user], CtsNativeMediaSlTestCases, CtsNativeMediaSlTestCases[instant], CtsNativeMediaSlTestCases[secondary_user], CtsNativeMediaXaTestCases, CtsNativeMediaXaTestCases[instant], CtsNativeMediaXaTestCases[secondary_user], CtsNativeMidiTestCases, CtsNativeMidiTestCases[secondary_user], CtsNativeNetDnsTestCases, CtsNativeNetDnsTestCases[instant], CtsNativeNetDnsTestCases[secondary_user], CtsNativeNetTestCases, CtsNativeNetTestCases[instant], CtsNativeNetTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsNative',
-        test_name='cheets_CTS_R.11_r3.x86.CtsNative',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNativeEncryptionTestCases', '--include-filter', 'CtsNativeEncryptionTestCases[instant]', '--include-filter', 'CtsNativeEncryptionTestCases[secondary_user]', '--include-filter', 'CtsNativeHardwareTestCases', '--include-filter', 'CtsNativeHardwareTestCases[secondary_user]', '--include-filter', 'CtsNativeMediaAAudioTestCases', '--include-filter', 'CtsNativeMediaAAudioTestCases[instant]', '--include-filter', 'CtsNativeMediaAAudioTestCases[secondary_user]', '--include-filter', 'CtsNativeMediaMetricsTestCases', '--include-filter', 'CtsNativeMediaMetricsTestCases[instant]', '--include-filter', 'CtsNativeMediaMetricsTestCases[secondary_user]', '--include-filter', 'CtsNativeMediaSlTestCases', '--include-filter', 'CtsNativeMediaSlTestCases[instant]', '--include-filter', 'CtsNativeMediaSlTestCases[secondary_user]', '--include-filter', 'CtsNativeMediaXaTestCases', '--include-filter', 'CtsNativeMediaXaTestCases[instant]', '--include-filter', 'CtsNativeMediaXaTestCases[secondary_user]', '--include-filter', 'CtsNativeMidiTestCases', '--include-filter', 'CtsNativeMidiTestCases[secondary_user]', '--include-filter', 'CtsNativeNetDnsTestCases', '--include-filter', 'CtsNativeNetDnsTestCases[instant]', '--include-filter', 'CtsNativeNetDnsTestCases[secondary_user]', '--include-filter', 'CtsNativeNetTestCases', '--include-filter', 'CtsNativeNetTestCases[instant]', '--include-filter', 'CtsNativeNetTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsNative',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=46800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsNdef b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsNdef
deleted file mode 100644
index 67dcdbd..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsNdef
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsNdef'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsNdefTestCases, CtsNdefTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsNdef',
-        test_name='cheets_CTS_R.11_r3.x86.CtsNdef',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNdefTestCases', '--include-filter', 'CtsNdefTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsNdef',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsNdkBinder b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsNdkBinder
deleted file mode 100644
index cf4e9f7..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsNdkBinder
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsNdkBinder'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsNdkBinderTestCases, CtsNdkBinderTestCases[instant], CtsNdkBinderTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsNdkBinder',
-        test_name='cheets_CTS_R.11_r3.x86.CtsNdkBinder',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNdkBinderTestCases', '--include-filter', 'CtsNdkBinderTestCases[instant]', '--include-filter', 'CtsNdkBinderTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsNdkBinder',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsNet b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsNet
deleted file mode 100644
index 42ccd48..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsNet
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsNet'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsNetApi23TestCases, CtsNetApi23TestCases[secondary_user], CtsNetSecConfigAttributeTestCases, CtsNetSecConfigAttributeTestCases[instant], CtsNetSecConfigAttributeTestCases[secondary_user], CtsNetSecConfigBasicDebugDisabledTestCases, CtsNetSecConfigBasicDebugDisabledTestCases[instant], CtsNetSecConfigBasicDebugDisabledTestCases[secondary_user], CtsNetSecConfigBasicDebugEnabledTestCases, CtsNetSecConfigBasicDebugEnabledTestCases[instant], CtsNetSecConfigBasicDebugEnabledTestCases[secondary_user], CtsNetSecConfigBasicDomainConfigTestCases, CtsNetSecConfigBasicDomainConfigTestCases[instant], CtsNetSecConfigBasicDomainConfigTestCases[secondary_user], CtsNetSecConfigCleartextTrafficTestCases, CtsNetSecConfigCleartextTrafficTestCases[instant], CtsNetSecConfigCleartextTrafficTestCases[secondary_user], CtsNetSecConfigDownloadManagerTestCases, CtsNetSecConfigDownloadManagerTestCases[secondary_user], CtsNetSecConfigInvalidPinTestCases, CtsNetSecConfigInvalidPinTestCases[instant], CtsNetSecConfigInvalidPinTestCases[secondary_user], CtsNetSecConfigNestedDomainConfigTestCases, CtsNetSecConfigNestedDomainConfigTestCases[instant], CtsNetSecConfigNestedDomainConfigTestCases[secondary_user], CtsNetSecConfigPrePCleartextTrafficTestCases, CtsNetSecConfigPrePCleartextTrafficTestCases[secondary_user], CtsNetSecConfigResourcesSrcTestCases, CtsNetSecConfigResourcesSrcTestCases[instant], CtsNetSecConfigResourcesSrcTestCases[secondary_user], CtsNetSecPolicyUsesCleartextTrafficFalseTestCases, CtsNetSecPolicyUsesCleartextTrafficFalseTestCases[secondary_user], CtsNetSecPolicyUsesCleartextTrafficTrueTestCases, CtsNetSecPolicyUsesCleartextTrafficTrueTestCases[secondary_user], CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases, CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases[secondary_user], CtsNetTestCases, CtsNetTestCasesInternetPermission, CtsNetTestCasesInternetPermission[instant], CtsNetTestCasesInternetPermission[secondary_user], CtsNetTestCasesLegacyApi22, CtsNetTestCasesLegacyApi22[secondary_user], CtsNetTestCasesLegacyPermission22, CtsNetTestCasesLegacyPermission22[secondary_user], CtsNetTestCasesUpdateStatsPermission, CtsNetTestCasesUpdateStatsPermission[instant], CtsNetTestCasesUpdateStatsPermission[secondary_user], CtsNetTestCases[instant], CtsNetTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsNet',
-        test_name='cheets_CTS_R.11_r3.x86.CtsNet',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNetApi23TestCases', '--include-filter', 'CtsNetApi23TestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigAttributeTestCases', '--include-filter', 'CtsNetSecConfigAttributeTestCases[instant]', '--include-filter', 'CtsNetSecConfigAttributeTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigBasicDebugDisabledTestCases', '--include-filter', 'CtsNetSecConfigBasicDebugDisabledTestCases[instant]', '--include-filter', 'CtsNetSecConfigBasicDebugDisabledTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigBasicDebugEnabledTestCases', '--include-filter', 'CtsNetSecConfigBasicDebugEnabledTestCases[instant]', '--include-filter', 'CtsNetSecConfigBasicDebugEnabledTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigBasicDomainConfigTestCases', '--include-filter', 'CtsNetSecConfigBasicDomainConfigTestCases[instant]', '--include-filter', 'CtsNetSecConfigBasicDomainConfigTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigCleartextTrafficTestCases', '--include-filter', 'CtsNetSecConfigCleartextTrafficTestCases[instant]', '--include-filter', 'CtsNetSecConfigCleartextTrafficTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigDownloadManagerTestCases', '--include-filter', 'CtsNetSecConfigDownloadManagerTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigInvalidPinTestCases', '--include-filter', 'CtsNetSecConfigInvalidPinTestCases[instant]', '--include-filter', 'CtsNetSecConfigInvalidPinTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigNestedDomainConfigTestCases', '--include-filter', 'CtsNetSecConfigNestedDomainConfigTestCases[instant]', '--include-filter', 'CtsNetSecConfigNestedDomainConfigTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigPrePCleartextTrafficTestCases', '--include-filter', 'CtsNetSecConfigPrePCleartextTrafficTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigResourcesSrcTestCases', '--include-filter', 'CtsNetSecConfigResourcesSrcTestCases[instant]', '--include-filter', 'CtsNetSecConfigResourcesSrcTestCases[secondary_user]', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficFalseTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficFalseTestCases[secondary_user]', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficTrueTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficTrueTestCases[secondary_user]', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases[secondary_user]', '--include-filter', 'CtsNetTestCases', '--include-filter', 'CtsNetTestCasesInternetPermission', '--include-filter', 'CtsNetTestCasesInternetPermission[instant]', '--include-filter', 'CtsNetTestCasesInternetPermission[secondary_user]', '--include-filter', 'CtsNetTestCasesLegacyApi22', '--include-filter', 'CtsNetTestCasesLegacyApi22[secondary_user]', '--include-filter', 'CtsNetTestCasesLegacyPermission22', '--include-filter', 'CtsNetTestCasesLegacyPermission22[secondary_user]', '--include-filter', 'CtsNetTestCasesUpdateStatsPermission', '--include-filter', 'CtsNetTestCasesUpdateStatsPermission[instant]', '--include-filter', 'CtsNetTestCasesUpdateStatsPermission[secondary_user]', '--include-filter', 'CtsNetTestCases[instant]', '--include-filter', 'CtsNetTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsNet',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=90000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsNfc b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsNfc
deleted file mode 100644
index a0d556e..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsNfc
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsNfc'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsNfcTestCases, CtsNfcTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsNfc',
-        test_name='cheets_CTS_R.11_r3.x86.CtsNfc',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNfcTestCases', '--include-filter', 'CtsNfcTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsNfc',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsNoPermission b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsNoPermission
deleted file mode 100644
index 2065112..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsNoPermission
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsNoPermission'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsNoPermissionTestCases, CtsNoPermissionTestCases25, CtsNoPermissionTestCases25[secondary_user], CtsNoPermissionTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsNoPermission',
-        test_name='cheets_CTS_R.11_r3.x86.CtsNoPermission',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNoPermissionTestCases', '--include-filter', 'CtsNoPermissionTestCases25', '--include-filter', 'CtsNoPermissionTestCases25[secondary_user]', '--include-filter', 'CtsNoPermissionTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsNoPermission',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsOmapi b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsOmapi
deleted file mode 100644
index f349526..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsOmapi
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsOmapi'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsOmapiTestCases, CtsOmapiTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsOmapi',
-        test_name='cheets_CTS_R.11_r3.x86.CtsOmapi',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsOmapiTestCases', '--include-filter', 'CtsOmapiTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsOmapi',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsOpenG b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsOpenG
deleted file mode 100644
index f54d5de..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsOpenG
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsOpenG'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsOpenGLTestCases, CtsOpenGLTestCases[secondary_user], CtsOpenGlPerf2TestCases, CtsOpenGlPerf2TestCases[secondary_user], CtsOpenGlPerfTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsOpenG',
-        test_name='cheets_CTS_R.11_r3.x86.CtsOpenG',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsOpenGLTestCases', '--include-filter', 'CtsOpenGLTestCases[secondary_user]', '--include-filter', 'CtsOpenGlPerf2TestCases', '--include-filter', 'CtsOpenGlPerf2TestCases[secondary_user]', '--include-filter', 'CtsOpenGlPerfTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsOpenG',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsOs b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsOs
deleted file mode 100644
index 9408393..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsOs
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsOs'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsOsHostTestCases, CtsOsHostTestCases[instant], CtsOsHostTestCases[secondary_user], CtsOsTestCases, CtsOsTestCases[instant] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsOs',
-        test_name='cheets_CTS_R.11_r3.x86.CtsOs',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsOsHostTestCases', '--include-filter', 'CtsOsHostTestCases[instant]', '--include-filter', 'CtsOsHostTestCases[secondary_user]', '--include-filter', 'CtsOsTestCases', '--include-filter', 'CtsOsTestCases[instant]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsOs',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsPackage b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsPackage
deleted file mode 100644
index 0478703..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsPackage
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsPackage'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsPackageInstallAppOpDefaultTestCases, CtsPackageInstallAppOpDefaultTestCases[instant], CtsPackageInstallAppOpDefaultTestCases[secondary_user], CtsPackageInstallAppOpDeniedTestCases, CtsPackageInstallAppOpDeniedTestCases[instant], CtsPackageInstallAppOpDeniedTestCases[secondary_user], CtsPackageInstallTestCases, CtsPackageInstallTestCases[instant], CtsPackageInstallTestCases[secondary_user], CtsPackageInstallerTapjackingTestCases, CtsPackageInstallerTapjackingTestCases[secondary_user], CtsPackageUninstallTestCases, CtsPackageUninstallTestCases[secondary_user], CtsPackageWatchdogTestCases, CtsPackageWatchdogTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsPackage',
-        test_name='cheets_CTS_R.11_r3.x86.CtsPackage',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPackageInstallAppOpDefaultTestCases', '--include-filter', 'CtsPackageInstallAppOpDefaultTestCases[instant]', '--include-filter', 'CtsPackageInstallAppOpDefaultTestCases[secondary_user]', '--include-filter', 'CtsPackageInstallAppOpDeniedTestCases', '--include-filter', 'CtsPackageInstallAppOpDeniedTestCases[instant]', '--include-filter', 'CtsPackageInstallAppOpDeniedTestCases[secondary_user]', '--include-filter', 'CtsPackageInstallTestCases', '--include-filter', 'CtsPackageInstallTestCases[instant]', '--include-filter', 'CtsPackageInstallTestCases[secondary_user]', '--include-filter', 'CtsPackageInstallerTapjackingTestCases', '--include-filter', 'CtsPackageInstallerTapjackingTestCases[secondary_user]', '--include-filter', 'CtsPackageUninstallTestCases', '--include-filter', 'CtsPackageUninstallTestCases[secondary_user]', '--include-filter', 'CtsPackageWatchdogTestCases', '--include-filter', 'CtsPackageWatchdogTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsPackage',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=28800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsPdf b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsPdf
deleted file mode 100644
index f3b9c84..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsPdf
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsPdf'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsPdfTestCases, CtsPdfTestCases[instant], CtsPdfTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsPdf',
-        test_name='cheets_CTS_R.11_r3.x86.CtsPdf',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPdfTestCases', '--include-filter', 'CtsPdfTestCases[instant]', '--include-filter', 'CtsPdfTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsPdf',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsPerfetto b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsPerfetto
deleted file mode 100644
index 7e443e0..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsPerfetto
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsPerfetto'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsPerfettoTestCases, CtsPerfettoTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsPerfetto',
-        test_name='cheets_CTS_R.11_r3.x86.CtsPerfetto',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPerfettoTestCases', '--include-filter', 'CtsPerfettoTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsPerfetto',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsPermission b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsPermission
deleted file mode 100644
index 4a1145a..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsPermission
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsPermission'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsPermission2TestCases, CtsPermission2TestCases[instant], CtsPermission3TestCases, CtsPermission3TestCases[secondary_user], CtsPermissionTestCases, CtsPermissionTestCasesSdk28, CtsPermissionTestCasesSdk28[instant], CtsPermissionTestCasesSdk28[secondary_user], CtsPermissionTestCasesTelephony, CtsPermissionTestCasesTelephony[instant], CtsPermissionTestCasesTelephony[secondary_user], CtsPermissionTestCases[instant] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsPermission',
-        test_name='cheets_CTS_R.11_r3.x86.CtsPermission',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPermission2TestCases', '--include-filter', 'CtsPermission2TestCases[instant]', '--include-filter', 'CtsPermission3TestCases', '--include-filter', 'CtsPermission3TestCases[secondary_user]', '--include-filter', 'CtsPermissionTestCases', '--include-filter', 'CtsPermissionTestCasesSdk28', '--include-filter', 'CtsPermissionTestCasesSdk28[instant]', '--include-filter', 'CtsPermissionTestCasesSdk28[secondary_user]', '--include-filter', 'CtsPermissionTestCasesTelephony', '--include-filter', 'CtsPermissionTestCasesTelephony[instant]', '--include-filter', 'CtsPermissionTestCasesTelephony[secondary_user]', '--include-filter', 'CtsPermissionTestCases[instant]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsPermission',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=23400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsPreference b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsPreference
deleted file mode 100644
index 00bd55c..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsPreference
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsPreference'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsPreferenceTestCases, CtsPreferenceTestCases[instant], CtsPreferenceTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsPreference',
-        test_name='cheets_CTS_R.11_r3.x86.CtsPreference',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPreferenceTestCases', '--include-filter', 'CtsPreferenceTestCases[instant]', '--include-filter', 'CtsPreferenceTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsPreference',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsPrint b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsPrint
deleted file mode 100644
index 0533676..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsPrint
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsPrint'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsPrintTestCases, CtsPrintTestCases[instant], CtsPrintTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsPrint',
-        test_name='cheets_CTS_R.11_r3.x86.CtsPrint',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPrintTestCases', '--include-filter', 'CtsPrintTestCases[instant]', '--include-filter', 'CtsPrintTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsPrint',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsProto b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsProto
deleted file mode 100644
index fd30804..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsProto
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsProto'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsProtoTestCases, CtsProtoTestCases[instant], CtsProtoTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsProto',
-        test_name='cheets_CTS_R.11_r3.x86.CtsProto',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsProtoTestCases', '--include-filter', 'CtsProtoTestCases[instant]', '--include-filter', 'CtsProtoTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsProto',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsProvider b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsProvider
deleted file mode 100644
index 3bcf035..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsProvider
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsProvider'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsProviderTestCases, CtsProviderTestCases[secondary_user], CtsProviderUiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsProvider',
-        test_name='cheets_CTS_R.11_r3.x86.CtsProvider',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsProviderTestCases', '--include-filter', 'CtsProviderTestCases[secondary_user]', '--include-filter', 'CtsProviderUiTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsProvider',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsQuickAccessWallet b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsQuickAccessWallet
deleted file mode 100644
index 744b5f9..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsQuickAccessWallet
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsQuickAccessWallet'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsQuickAccessWalletTestCases, CtsQuickAccessWalletTestCases[instant], CtsQuickAccessWalletTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsQuickAccessWallet',
-        test_name='cheets_CTS_R.11_r3.x86.CtsQuickAccessWallet',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsQuickAccessWalletTestCases', '--include-filter', 'CtsQuickAccessWalletTestCases[instant]', '--include-filter', 'CtsQuickAccessWalletTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsQuickAccessWallet',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsRenderscript b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsRenderscript
deleted file mode 100644
index 280e9a3..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsRenderscript
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsRenderscript'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsRenderscriptLegacyTestCases, CtsRenderscriptLegacyTestCases[secondary_user], CtsRenderscriptTestCases, CtsRenderscriptTestCases[instant], CtsRenderscriptTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsRenderscript',
-        test_name='cheets_CTS_R.11_r3.x86.CtsRenderscript',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsRenderscriptLegacyTestCases', '--include-filter', 'CtsRenderscriptLegacyTestCases[secondary_user]', '--include-filter', 'CtsRenderscriptTestCases', '--include-filter', 'CtsRenderscriptTestCases[instant]', '--include-filter', 'CtsRenderscriptTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsRenderscript',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsResolverService b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsResolverService
deleted file mode 100644
index 5dfbf8d..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsResolverService
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsResolverService'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsResolverServiceTestCases, CtsResolverServiceTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsResolverService',
-        test_name='cheets_CTS_R.11_r3.x86.CtsResolverService',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsResolverServiceTestCases', '--include-filter', 'CtsResolverServiceTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsResolverService',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsResourcesLoader b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsResourcesLoader
deleted file mode 100644
index bd76906..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsResourcesLoader
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsResourcesLoader'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsResourcesLoaderTests, CtsResourcesLoaderTests[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsResourcesLoader',
-        test_name='cheets_CTS_R.11_r3.x86.CtsResourcesLoader',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsResourcesLoaderTests', '--include-filter', 'CtsResourcesLoaderTests[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsResourcesLoader',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsRole b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsRole
deleted file mode 100644
index 70897f6..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsRole
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsRole'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsRoleTestCases, CtsRoleTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsRole',
-        test_name='cheets_CTS_R.11_r3.x86.CtsRole',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsRoleTestCases', '--include-filter', 'CtsRoleTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsRole',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsRollbackManagerHostTestCases b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsRollbackManagerHostTestCases
deleted file mode 100644
index 20bfdaa..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsRollbackManagerHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsRollbackManagerHostTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsRollbackManagerHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsRollbackManagerHostTestCases',
-        test_name='cheets_CTS_R.11_r3.x86.CtsRollbackManagerHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsRollbackManagerHostTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsRollbackManagerHostTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsRs b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsRs
deleted file mode 100644
index 3108b0e..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsRs
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsRs'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsRsBlasTestCases, CtsRsBlasTestCases[secondary_user], CtsRsCppTestCases, CtsRsCppTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsRs',
-        test_name='cheets_CTS_R.11_r3.x86.CtsRs',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsRsBlasTestCases', '--include-filter', 'CtsRsBlasTestCases[secondary_user]', '--include-filter', 'CtsRsCppTestCases', '--include-filter', 'CtsRsCppTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsRs',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSample b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSample
deleted file mode 100644
index 5d4b980..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSample
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsSample'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSampleDeviceTestCases, CtsSampleDeviceTestCases[instant], CtsSampleDeviceTestCases[secondary_user], CtsSampleHostTestCases, CtsSampleHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsSample',
-        test_name='cheets_CTS_R.11_r3.x86.CtsSample',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSampleDeviceTestCases', '--include-filter', 'CtsSampleDeviceTestCases[instant]', '--include-filter', 'CtsSampleDeviceTestCases[secondary_user]', '--include-filter', 'CtsSampleHostTestCases', '--include-filter', 'CtsSampleHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSample',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSax b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSax
deleted file mode 100644
index 85299d6..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSax
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsSax'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSaxTestCases, CtsSaxTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsSax',
-        test_name='cheets_CTS_R.11_r3.x86.CtsSax',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSaxTestCases', '--include-filter', 'CtsSaxTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSax',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsScopedStorageHostTest b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsScopedStorageHostTest
deleted file mode 100644
index 80d70f4..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsScopedStorageHostTest
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsScopedStorageHostTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsScopedStorageHostTest, CtsScopedStorageHostTest[instant] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsScopedStorageHostTest',
-        test_name='cheets_CTS_R.11_r3.x86.CtsScopedStorageHostTest',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsScopedStorageHostTest', '--include-filter', 'CtsScopedStorageHostTest[instant]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsScopedStorageHostTest',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSdkExtensions b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSdkExtensions
deleted file mode 100644
index 4f881c2..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSdkExtensions
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsSdkExtensions'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSdkExtensionsTestCases, CtsSdkExtensionsTestCases[instant], CtsSdkExtensionsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsSdkExtensions',
-        test_name='cheets_CTS_R.11_r3.x86.CtsSdkExtensions',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSdkExtensionsTestCases', '--include-filter', 'CtsSdkExtensionsTestCases[instant]', '--include-filter', 'CtsSdkExtensionsTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSdkExtensions',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSeccompHost b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSeccompHost
deleted file mode 100644
index cef9a08..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSeccompHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsSeccompHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSeccompHostTestCases, CtsSeccompHostTestCases[instant], CtsSeccompHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsSeccompHost',
-        test_name='cheets_CTS_R.11_r3.x86.CtsSeccompHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSeccompHostTestCases', '--include-filter', 'CtsSeccompHostTestCases[instant]', '--include-filter', 'CtsSeccompHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSeccompHost',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSecure b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSecure
deleted file mode 100644
index 2572a73..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSecure
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsSecure'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSecureElementAccessControlTestCases1, CtsSecureElementAccessControlTestCases1[secondary_user], CtsSecureElementAccessControlTestCases2, CtsSecureElementAccessControlTestCases2[secondary_user], CtsSecureElementAccessControlTestCases3, CtsSecureElementAccessControlTestCases3[secondary_user], CtsSecureFrpInstallTestCases, CtsSecureFrpInstallTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsSecure',
-        test_name='cheets_CTS_R.11_r3.x86.CtsSecure',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSecureElementAccessControlTestCases1', '--include-filter', 'CtsSecureElementAccessControlTestCases1[secondary_user]', '--include-filter', 'CtsSecureElementAccessControlTestCases2', '--include-filter', 'CtsSecureElementAccessControlTestCases2[secondary_user]', '--include-filter', 'CtsSecureElementAccessControlTestCases3', '--include-filter', 'CtsSecureElementAccessControlTestCases3[secondary_user]', '--include-filter', 'CtsSecureFrpInstallTestCases', '--include-filter', 'CtsSecureFrpInstallTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSecure',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=16200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSecurity b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSecurity
deleted file mode 100644
index 2f9d4ec..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSecurity
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsSecurity'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSecurityBulletinHostTestCases, CtsSecurityBulletinHostTestCases[secondary_user], CtsSecurityHostTestCases, CtsSecurityHostTestCases[secondary_user], CtsSecurityTestCases, CtsSecurityTestCases[instant], CtsSecurityTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsSecurity',
-        test_name='cheets_CTS_R.11_r3.x86.CtsSecurity',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSecurityBulletinHostTestCases', '--include-filter', 'CtsSecurityBulletinHostTestCases[secondary_user]', '--include-filter', 'CtsSecurityHostTestCases', '--include-filter', 'CtsSecurityHostTestCases[secondary_user]', '--include-filter', 'CtsSecurityTestCases', '--include-filter', 'CtsSecurityTestCases[instant]', '--include-filter', 'CtsSecurityTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSecurity',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=154800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSelinux b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSelinux
deleted file mode 100644
index 274cd5c..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSelinux
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsSelinux'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSelinuxEphemeralTestCases, CtsSelinuxEphemeralTestCases[instant], CtsSelinuxTargetSdk25TestCases, CtsSelinuxTargetSdk25TestCases[secondary_user], CtsSelinuxTargetSdk27TestCases, CtsSelinuxTargetSdk27TestCases[secondary_user], CtsSelinuxTargetSdk28TestCases, CtsSelinuxTargetSdk28TestCases[secondary_user], CtsSelinuxTargetSdk29TestCases, CtsSelinuxTargetSdk29TestCases[secondary_user], CtsSelinuxTargetSdkCurrentTestCases, CtsSelinuxTargetSdkCurrentTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsSelinux',
-        test_name='cheets_CTS_R.11_r3.x86.CtsSelinux',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSelinuxEphemeralTestCases', '--include-filter', 'CtsSelinuxEphemeralTestCases[instant]', '--include-filter', 'CtsSelinuxTargetSdk25TestCases', '--include-filter', 'CtsSelinuxTargetSdk25TestCases[secondary_user]', '--include-filter', 'CtsSelinuxTargetSdk27TestCases', '--include-filter', 'CtsSelinuxTargetSdk27TestCases[secondary_user]', '--include-filter', 'CtsSelinuxTargetSdk28TestCases', '--include-filter', 'CtsSelinuxTargetSdk28TestCases[secondary_user]', '--include-filter', 'CtsSelinuxTargetSdk29TestCases', '--include-filter', 'CtsSelinuxTargetSdk29TestCases[secondary_user]', '--include-filter', 'CtsSelinuxTargetSdkCurrentTestCases', '--include-filter', 'CtsSelinuxTargetSdkCurrentTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSelinux',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=23400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSensor b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSensor
deleted file mode 100644
index c1497f0..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSensor
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsSensor'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSensorTestCases, CtsSensorTestCases[instant], CtsSensorTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsSensor',
-        test_name='cheets_CTS_R.11_r3.x86.CtsSensor',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSensorTestCases', '--include-filter', 'CtsSensorTestCases[instant]', '--include-filter', 'CtsSensorTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSensor',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSettings b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSettings
deleted file mode 100644
index f140692..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSettings
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsSettings'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSettingsHostTestCases, CtsSettingsTestCases, CtsSettingsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsSettings',
-        test_name='cheets_CTS_R.11_r3.x86.CtsSettings',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSettingsHostTestCases', '--include-filter', 'CtsSettingsTestCases', '--include-filter', 'CtsSettingsTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSettings',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSharedLibsApiSignature b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSharedLibsApiSignature
deleted file mode 100644
index fa671bf..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSharedLibsApiSignature
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsSharedLibsApiSignature'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSharedLibsApiSignatureTestCases, CtsSharedLibsApiSignatureTestCases[instant], CtsSharedLibsApiSignatureTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsSharedLibsApiSignature',
-        test_name='cheets_CTS_R.11_r3.x86.CtsSharedLibsApiSignature',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSharedLibsApiSignatureTestCases', '--include-filter', 'CtsSharedLibsApiSignatureTestCases[instant]', '--include-filter', 'CtsSharedLibsApiSignatureTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSharedLibsApiSignature',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSharesheet b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSharesheet
deleted file mode 100644
index 2426887..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSharesheet
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsSharesheet'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSharesheetTestCases, CtsSharesheetTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsSharesheet',
-        test_name='cheets_CTS_R.11_r3.x86.CtsSharesheet',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSharesheetTestCases', '--include-filter', 'CtsSharesheetTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSharesheet',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsShortcut b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsShortcut
deleted file mode 100644
index e2ad5e5..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsShortcut
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsShortcut'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsShortcutHostTestCases, CtsShortcutManagerLauncher1, CtsShortcutManagerLauncher1[secondary_user], CtsShortcutManagerLauncher2, CtsShortcutManagerLauncher2[secondary_user], CtsShortcutManagerLauncher3, CtsShortcutManagerLauncher3[secondary_user], CtsShortcutManagerLauncher4, CtsShortcutManagerLauncher4[secondary_user], CtsShortcutManagerPackage1, CtsShortcutManagerPackage1[secondary_user], CtsShortcutManagerPackage2, CtsShortcutManagerPackage2[secondary_user], CtsShortcutManagerPackage3, CtsShortcutManagerPackage3[secondary_user], CtsShortcutManagerPackage4, CtsShortcutManagerPackage4[secondary_user], CtsShortcutManagerTestCases, CtsShortcutManagerTestCases[secondary_user], CtsShortcutManagerThrottlingTest, CtsShortcutManagerThrottlingTest[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsShortcut',
-        test_name='cheets_CTS_R.11_r3.x86.CtsShortcut',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsShortcutHostTestCases', '--include-filter', 'CtsShortcutManagerLauncher1', '--include-filter', 'CtsShortcutManagerLauncher1[secondary_user]', '--include-filter', 'CtsShortcutManagerLauncher2', '--include-filter', 'CtsShortcutManagerLauncher2[secondary_user]', '--include-filter', 'CtsShortcutManagerLauncher3', '--include-filter', 'CtsShortcutManagerLauncher3[secondary_user]', '--include-filter', 'CtsShortcutManagerLauncher4', '--include-filter', 'CtsShortcutManagerLauncher4[secondary_user]', '--include-filter', 'CtsShortcutManagerPackage1', '--include-filter', 'CtsShortcutManagerPackage1[secondary_user]', '--include-filter', 'CtsShortcutManagerPackage2', '--include-filter', 'CtsShortcutManagerPackage2[secondary_user]', '--include-filter', 'CtsShortcutManagerPackage3', '--include-filter', 'CtsShortcutManagerPackage3[secondary_user]', '--include-filter', 'CtsShortcutManagerPackage4', '--include-filter', 'CtsShortcutManagerPackage4[secondary_user]', '--include-filter', 'CtsShortcutManagerTestCases', '--include-filter', 'CtsShortcutManagerTestCases[secondary_user]', '--include-filter', 'CtsShortcutManagerThrottlingTest', '--include-filter', 'CtsShortcutManagerThrottlingTest[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsShortcut',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=39600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSignedConfigHost b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSignedConfigHost
deleted file mode 100644
index bfb88ec..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSignedConfigHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsSignedConfigHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSignedConfigHostTestCases, CtsSignedConfigHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsSignedConfigHost',
-        test_name='cheets_CTS_R.11_r3.x86.CtsSignedConfigHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSignedConfigHostTestCases', '--include-filter', 'CtsSignedConfigHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSignedConfigHost',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSimRestrictedApis b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSimRestrictedApis
deleted file mode 100644
index 3e770cc..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSimRestrictedApis
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsSimRestrictedApis'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSimRestrictedApisTestCases, CtsSimRestrictedApisTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsSimRestrictedApis',
-        test_name='cheets_CTS_R.11_r3.x86.CtsSimRestrictedApis',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSimRestrictedApisTestCases', '--include-filter', 'CtsSimRestrictedApisTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSimRestrictedApis',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSimpleCpu b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSimpleCpu
deleted file mode 100644
index 922d93c..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSimpleCpu
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsSimpleCpu'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSimpleCpuTestCases, CtsSimpleCpuTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsSimpleCpu',
-        test_name='cheets_CTS_R.11_r3.x86.CtsSimpleCpu',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSimpleCpuTestCases', '--include-filter', 'CtsSimpleCpuTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSimpleCpu',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSimpleperfTestCases b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSimpleperfTestCases
deleted file mode 100644
index 6244e72..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSimpleperfTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsSimpleperfTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSimpleperfTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsSimpleperfTestCases',
-        test_name='cheets_CTS_R.11_r3.x86.CtsSimpleperfTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSimpleperfTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSimpleperfTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSkQP b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSkQP
deleted file mode 100644
index 356efb3..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSkQP
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsSkQP'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSkQPTestCases, CtsSkQPTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsSkQP',
-        test_name='cheets_CTS_R.11_r3.x86.CtsSkQP',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSkQPTestCases', '--include-filter', 'CtsSkQPTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSkQP',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSlice b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSlice
deleted file mode 100644
index 4eafd1a..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSlice
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsSlice'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSliceTestCases, CtsSliceTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsSlice',
-        test_name='cheets_CTS_R.11_r3.x86.CtsSlice',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSliceTestCases', '--include-filter', 'CtsSliceTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSlice',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSoundTrigger b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSoundTrigger
deleted file mode 100644
index c958a48..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSoundTrigger
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsSoundTrigger'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSoundTriggerTestCases, CtsSoundTriggerTestCases[instant], CtsSoundTriggerTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsSoundTrigger',
-        test_name='cheets_CTS_R.11_r3.x86.CtsSoundTrigger',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSoundTriggerTestCases', '--include-filter', 'CtsSoundTriggerTestCases[instant]', '--include-filter', 'CtsSoundTriggerTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSoundTrigger',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSpeech b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSpeech
deleted file mode 100644
index 0467b0e..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSpeech
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsSpeech'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSpeechTestCases, CtsSpeechTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsSpeech',
-        test_name='cheets_CTS_R.11_r3.x86.CtsSpeech',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSpeechTestCases', '--include-filter', 'CtsSpeechTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSpeech',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsStagedInstallHostTestCases b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsStagedInstallHostTestCases
deleted file mode 100644
index 24d61cc..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsStagedInstallHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsStagedInstallHostTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsStagedInstallHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsStagedInstallHostTestCases',
-        test_name='cheets_CTS_R.11_r3.x86.CtsStagedInstallHostTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsStagedInstallHostTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsStagedInstallHostTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsStatsdHost b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsStatsdHost
deleted file mode 100644
index e1673f2..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsStatsdHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsStatsdHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsStatsdHostTestCases, CtsStatsdHostTestCases[instant], CtsStatsdHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsStatsdHost',
-        test_name='cheets_CTS_R.11_r3.x86.CtsStatsdHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsStatsdHostTestCases', '--include-filter', 'CtsStatsdHostTestCases[instant]', '--include-filter', 'CtsStatsdHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsStatsdHost',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsStrictJavaPackages b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsStrictJavaPackages
deleted file mode 100644
index b11a42b..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsStrictJavaPackages
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsStrictJavaPackages'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsStrictJavaPackagesTestCases, CtsStrictJavaPackagesTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsStrictJavaPackages',
-        test_name='cheets_CTS_R.11_r3.x86.CtsStrictJavaPackages',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsStrictJavaPackagesTestCases', '--include-filter', 'CtsStrictJavaPackagesTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsStrictJavaPackages',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSuspendApps b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSuspendApps
deleted file mode 100644
index 9d252d8..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSuspendApps
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsSuspendApps'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSuspendAppsPermissionTestCases, CtsSuspendAppsPermissionTestCases[secondary_user], CtsSuspendAppsTestCases, CtsSuspendAppsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsSuspendApps',
-        test_name='cheets_CTS_R.11_r3.x86.CtsSuspendApps',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSuspendAppsPermissionTestCases', '--include-filter', 'CtsSuspendAppsPermissionTestCases[secondary_user]', '--include-filter', 'CtsSuspendAppsTestCases', '--include-filter', 'CtsSuspendAppsTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSuspendApps',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSustainedPerformanceHost b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSustainedPerformanceHost
deleted file mode 100644
index 835b6df..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSustainedPerformanceHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsSustainedPerformanceHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSustainedPerformanceHostTestCases, CtsSustainedPerformanceHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsSustainedPerformanceHost',
-        test_name='cheets_CTS_R.11_r3.x86.CtsSustainedPerformanceHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSustainedPerformanceHostTestCases', '--include-filter', 'CtsSustainedPerformanceHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSustainedPerformanceHost',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSync b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSync
deleted file mode 100644
index 8227ab6..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSync
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsSync'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSyncAccountAccessOtherCertTestCases, CtsSyncAccountAccessOtherCertTestCases[secondary_user], CtsSyncContentHostTestCases, CtsSyncContentHostTestCases[secondary_user], CtsSyncManagerTestsCases, CtsSyncManagerTestsCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsSync',
-        test_name='cheets_CTS_R.11_r3.x86.CtsSync',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSyncAccountAccessOtherCertTestCases', '--include-filter', 'CtsSyncAccountAccessOtherCertTestCases[secondary_user]', '--include-filter', 'CtsSyncContentHostTestCases', '--include-filter', 'CtsSyncContentHostTestCases[secondary_user]', '--include-filter', 'CtsSyncManagerTestsCases', '--include-filter', 'CtsSyncManagerTestsCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSync',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSystem b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSystem
deleted file mode 100644
index 547198b..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsSystem
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsSystem'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsSystemApiAnnotationTestCases, CtsSystemApiAnnotationTestCases[instant], CtsSystemApiAnnotationTestCases[secondary_user], CtsSystemApiSignatureTestCases, CtsSystemApiSignatureTestCases[instant], CtsSystemApiSignatureTestCases[secondary_user], CtsSystemIntentTestCases, CtsSystemIntentTestCases[secondary_user], CtsSystemUiHostTestCases, CtsSystemUiHostTestCases[instant], CtsSystemUiHostTestCases[secondary_user], CtsSystemUiTestCases, CtsSystemUiTestCases[instant], CtsSystemUiTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsSystem',
-        test_name='cheets_CTS_R.11_r3.x86.CtsSystem',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSystemApiAnnotationTestCases', '--include-filter', 'CtsSystemApiAnnotationTestCases[instant]', '--include-filter', 'CtsSystemApiAnnotationTestCases[secondary_user]', '--include-filter', 'CtsSystemApiSignatureTestCases', '--include-filter', 'CtsSystemApiSignatureTestCases[instant]', '--include-filter', 'CtsSystemApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsSystemIntentTestCases', '--include-filter', 'CtsSystemIntentTestCases[secondary_user]', '--include-filter', 'CtsSystemUiHostTestCases', '--include-filter', 'CtsSystemUiHostTestCases[instant]', '--include-filter', 'CtsSystemUiHostTestCases[secondary_user]', '--include-filter', 'CtsSystemUiTestCases', '--include-filter', 'CtsSystemUiTestCases[instant]', '--include-filter', 'CtsSystemUiTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsSystem',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=27000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTaggingHost b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTaggingHost
deleted file mode 100644
index e3463c5..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTaggingHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsTaggingHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTaggingHostTestCases, CtsTaggingHostTestCases[instant], CtsTaggingHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsTaggingHost',
-        test_name='cheets_CTS_R.11_r3.x86.CtsTaggingHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTaggingHostTestCases', '--include-filter', 'CtsTaggingHostTestCases[instant]', '--include-filter', 'CtsTaggingHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsTaggingHost',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTelecom b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTelecom
deleted file mode 100644
index 9072821..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTelecom
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsTelecom'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTelecomTestCases, CtsTelecomTestCases2, CtsTelecomTestCases2[secondary_user], CtsTelecomTestCases3, CtsTelecomTestCases3[secondary_user], CtsTelecomTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsTelecom',
-        test_name='cheets_CTS_R.11_r3.x86.CtsTelecom',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTelecomTestCases', '--include-filter', 'CtsTelecomTestCases2', '--include-filter', 'CtsTelecomTestCases2[secondary_user]', '--include-filter', 'CtsTelecomTestCases3', '--include-filter', 'CtsTelecomTestCases3[secondary_user]', '--include-filter', 'CtsTelecomTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsTelecom',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTelephony b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTelephony
deleted file mode 100644
index 26b549b..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTelephony
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsTelephony'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTelephony2TestCases, CtsTelephony2TestCases[instant], CtsTelephony2TestCases[secondary_user], CtsTelephony3TestCases, CtsTelephony3TestCases[secondary_user], CtsTelephonyHostCases, CtsTelephonyHostCases[secondary_user], CtsTelephonyProviderHostCases, CtsTelephonyProviderHostCases[secondary_user], CtsTelephonyProviderTestCases, CtsTelephonyProviderTestCases[secondary_user], CtsTelephonySdk28TestCases, CtsTelephonySdk28TestCases[secondary_user], CtsTelephonyTestCases, CtsTelephonyTestCasesPermissionReadPhoneState, CtsTelephonyTestCasesPermissionReadPhoneState[instant], CtsTelephonyTestCasesPermissionReadPhoneState[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsTelephony',
-        test_name='cheets_CTS_R.11_r3.x86.CtsTelephony',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTelephony2TestCases', '--include-filter', 'CtsTelephony2TestCases[instant]', '--include-filter', 'CtsTelephony2TestCases[secondary_user]', '--include-filter', 'CtsTelephony3TestCases', '--include-filter', 'CtsTelephony3TestCases[secondary_user]', '--include-filter', 'CtsTelephonyHostCases', '--include-filter', 'CtsTelephonyHostCases[secondary_user]', '--include-filter', 'CtsTelephonyProviderHostCases', '--include-filter', 'CtsTelephonyProviderHostCases[secondary_user]', '--include-filter', 'CtsTelephonyProviderTestCases', '--include-filter', 'CtsTelephonyProviderTestCases[secondary_user]', '--include-filter', 'CtsTelephonySdk28TestCases', '--include-filter', 'CtsTelephonySdk28TestCases[secondary_user]', '--include-filter', 'CtsTelephonyTestCases', '--include-filter', 'CtsTelephonyTestCasesPermissionReadPhoneState', '--include-filter', 'CtsTelephonyTestCasesPermissionReadPhoneState[instant]', '--include-filter', 'CtsTelephonyTestCasesPermissionReadPhoneState[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsTelephony',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=32400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTestHarnessMode b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTestHarnessMode
deleted file mode 100644
index f08cd93..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTestHarnessMode
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsTestHarnessMode'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTestHarnessModeTestCases, CtsTestHarnessModeTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsTestHarnessMode',
-        test_name='cheets_CTS_R.11_r3.x86.CtsTestHarnessMode',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTestHarnessModeTestCases', '--include-filter', 'CtsTestHarnessModeTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsTestHarnessMode',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTetheringTest b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTetheringTest
deleted file mode 100644
index f4491b4..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTetheringTest
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsTetheringTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTetheringTest, CtsTetheringTest[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsTetheringTest',
-        test_name='cheets_CTS_R.11_r3.x86.CtsTetheringTest',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTetheringTest', '--include-filter', 'CtsTetheringTest[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsTetheringTest',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsText b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsText
deleted file mode 100644
index 9b60b65..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsText
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsText'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTextClassifierTestCases, CtsTextClassifierTestCases[secondary_user], CtsTextTestCases, CtsTextTestCases[instant], CtsTextTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsText',
-        test_name='cheets_CTS_R.11_r3.x86.CtsText',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTextClassifierTestCases', '--include-filter', 'CtsTextClassifierTestCases[secondary_user]', '--include-filter', 'CtsTextTestCases', '--include-filter', 'CtsTextTestCases[instant]', '--include-filter', 'CtsTextTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsText',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTfliteNnapiDelegate b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTfliteNnapiDelegate
deleted file mode 100644
index 4496cc2..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTfliteNnapiDelegate
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsTfliteNnapiDelegate'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTfliteNnapiDelegateTestCases, CtsTfliteNnapiDelegateTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsTfliteNnapiDelegate',
-        test_name='cheets_CTS_R.11_r3.x86.CtsTfliteNnapiDelegate',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTfliteNnapiDelegateTestCases', '--include-filter', 'CtsTfliteNnapiDelegateTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsTfliteNnapiDelegate',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTheme b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTheme
deleted file mode 100644
index 99a85ef..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTheme
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsTheme'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsThemeDeviceTestCases, CtsThemeDeviceTestCases[secondary_user], CtsThemeHostTestCases, CtsThemeHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsTheme',
-        test_name='cheets_CTS_R.11_r3.x86.CtsTheme',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsThemeDeviceTestCases', '--include-filter', 'CtsThemeDeviceTestCases[secondary_user]', '--include-filter', 'CtsThemeHostTestCases', '--include-filter', 'CtsThemeHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsTheme',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsThermal b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsThermal
deleted file mode 100644
index be8adf7..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsThermal
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsThermal'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsThermalTestCases, CtsThermalTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsThermal',
-        test_name='cheets_CTS_R.11_r3.x86.CtsThermal',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsThermalTestCases', '--include-filter', 'CtsThermalTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsThermal',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsToast b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsToast
deleted file mode 100644
index a559f2f..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsToast
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsToast'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsToastLegacyTestCases, CtsToastLegacyTestCases[secondary_user], CtsToastTestCases, CtsToastTestCases[instant], CtsToastTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsToast',
-        test_name='cheets_CTS_R.11_r3.x86.CtsToast',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsToastLegacyTestCases', '--include-filter', 'CtsToastLegacyTestCases[secondary_user]', '--include-filter', 'CtsToastTestCases', '--include-filter', 'CtsToastTestCases[instant]', '--include-filter', 'CtsToastTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsToast',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTransition b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTransition
deleted file mode 100644
index 2cd72c6..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTransition
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsTransition'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTransitionTestCases, CtsTransitionTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsTransition',
-        test_name='cheets_CTS_R.11_r3.x86.CtsTransition',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTransitionTestCases', '--include-filter', 'CtsTransitionTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsTransition',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTrustedVoiceHost b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTrustedVoiceHost
deleted file mode 100644
index a1a847e..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTrustedVoiceHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsTrustedVoiceHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTrustedVoiceHostTestCases, CtsTrustedVoiceHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsTrustedVoiceHost',
-        test_name='cheets_CTS_R.11_r3.x86.CtsTrustedVoiceHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTrustedVoiceHostTestCases', '--include-filter', 'CtsTrustedVoiceHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsTrustedVoiceHost',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTv b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTv
deleted file mode 100644
index 1153b51..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsTv
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsTv'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsTvProviderTestCases, CtsTvProviderTestCases[secondary_user], CtsTvTestCases, CtsTvTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsTv',
-        test_name='cheets_CTS_R.11_r3.x86.CtsTv',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTvProviderTestCases', '--include-filter', 'CtsTvProviderTestCases[secondary_user]', '--include-filter', 'CtsTvTestCases', '--include-filter', 'CtsTvTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsTv',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsUi b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsUi
deleted file mode 100644
index 5c03d33..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsUi
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsUi'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsUiAutomationTestCases, CtsUiAutomationTestCases[instant], CtsUiAutomationTestCases[secondary_user], CtsUiRenderingTestCases, CtsUiRenderingTestCases27, CtsUiRenderingTestCases27[instant], CtsUiRenderingTestCases27[secondary_user], CtsUiRenderingTestCases[instant], CtsUiRenderingTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsUi',
-        test_name='cheets_CTS_R.11_r3.x86.CtsUi',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUiAutomationTestCases', '--include-filter', 'CtsUiAutomationTestCases[instant]', '--include-filter', 'CtsUiAutomationTestCases[secondary_user]', '--include-filter', 'CtsUiRenderingTestCases', '--include-filter', 'CtsUiRenderingTestCases27', '--include-filter', 'CtsUiRenderingTestCases27[instant]', '--include-filter', 'CtsUiRenderingTestCases27[secondary_user]', '--include-filter', 'CtsUiRenderingTestCases[instant]', '--include-filter', 'CtsUiRenderingTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsUi',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=18000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsUidIsolation b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsUidIsolation
deleted file mode 100644
index 45f6a68..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsUidIsolation
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsUidIsolation'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsUidIsolationTestCases, CtsUidIsolationTestCases[instant], CtsUidIsolationTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsUidIsolation',
-        test_name='cheets_CTS_R.11_r3.x86.CtsUidIsolation',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUidIsolationTestCases', '--include-filter', 'CtsUidIsolationTestCases[instant]', '--include-filter', 'CtsUidIsolationTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsUidIsolation',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsUsageStats b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsUsageStats
deleted file mode 100644
index 1bc281a..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsUsageStats
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsUsageStats'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsUsageStatsTestCases, CtsUsageStatsTestCases[instant], CtsUsageStatsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsUsageStats',
-        test_name='cheets_CTS_R.11_r3.x86.CtsUsageStats',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUsageStatsTestCases', '--include-filter', 'CtsUsageStatsTestCases[instant]', '--include-filter', 'CtsUsageStatsTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsUsageStats',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsUsb b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsUsb
deleted file mode 100644
index 1eadf99..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsUsb
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsUsb'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsUsbManagerTestCases, CtsUsbManagerTestCases[secondary_user], CtsUsbTests, CtsUsbTests[instant], CtsUsbTests[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsUsb',
-        test_name='cheets_CTS_R.11_r3.x86.CtsUsb',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUsbManagerTestCases', '--include-filter', 'CtsUsbManagerTestCases[secondary_user]', '--include-filter', 'CtsUsbTests', '--include-filter', 'CtsUsbTests[instant]', '--include-filter', 'CtsUsbTests[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsUsb',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=10800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsUserspaceRebootHostSideTestCases b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsUserspaceRebootHostSideTestCases
deleted file mode 100644
index 91ba35b..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsUserspaceRebootHostSideTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsUserspaceRebootHostSideTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsUserspaceRebootHostSideTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsUserspaceRebootHostSideTestCases',
-        test_name='cheets_CTS_R.11_r3.x86.CtsUserspaceRebootHostSideTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsUserspaceRebootHostSideTestCases', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsUserspaceRebootHostSideTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsUsesLibraryHost b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsUsesLibraryHost
deleted file mode 100644
index bc54c73..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsUsesLibraryHost
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsUsesLibraryHost'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsUsesLibraryHostTestCases, CtsUsesLibraryHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsUsesLibraryHost',
-        test_name='cheets_CTS_R.11_r3.x86.CtsUsesLibraryHost',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUsesLibraryHostTestCases', '--include-filter', 'CtsUsesLibraryHostTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsUsesLibraryHost',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsUtil b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsUtil
deleted file mode 100644
index b28e75b..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsUtil
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsUtil'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsUtilTestCases, CtsUtilTestCases[instant], CtsUtilTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsUtil',
-        test_name='cheets_CTS_R.11_r3.x86.CtsUtil',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUtilTestCases', '--include-filter', 'CtsUtilTestCases[instant]', '--include-filter', 'CtsUtilTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsUtil',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsVideo b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsVideo
deleted file mode 100644
index ff7c2af..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsVideo
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsVideo'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsVideoTestCases, CtsVideoTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsVideo',
-        test_name='cheets_CTS_R.11_r3.x86.CtsVideo',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsVideoTestCases', '--include-filter', 'CtsVideoTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsVideo',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsView b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsView
deleted file mode 100644
index 40c1326..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsView
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsView'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsViewInspectorAnnotationProcessorTestCases, CtsViewInspectorAnnotationProcessorTestCases[instant], CtsViewInspectorAnnotationProcessorTestCases[secondary_user], CtsViewTestCases, CtsViewTestCasesSdk28, CtsViewTestCasesSdk28[instant], CtsViewTestCasesSdk28[secondary_user], CtsViewTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsView',
-        test_name='cheets_CTS_R.11_r3.x86.CtsView',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsViewInspectorAnnotationProcessorTestCases', '--include-filter', 'CtsViewInspectorAnnotationProcessorTestCases[instant]', '--include-filter', 'CtsViewInspectorAnnotationProcessorTestCases[secondary_user]', '--include-filter', 'CtsViewTestCases', '--include-filter', 'CtsViewTestCasesSdk28', '--include-filter', 'CtsViewTestCasesSdk28[instant]', '--include-filter', 'CtsViewTestCasesSdk28[secondary_user]', '--include-filter', 'CtsViewTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsView',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=16200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsVoice b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsVoice
deleted file mode 100644
index 9094e36..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsVoice
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsVoice'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsVoiceInteractionTestCases, CtsVoiceInteractionTestCases[instant], CtsVoiceInteractionTestCases[secondary_user], CtsVoiceSettingsTestCases, CtsVoiceSettingsTestCases[instant], CtsVoiceSettingsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsVoice',
-        test_name='cheets_CTS_R.11_r3.x86.CtsVoice',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsVoiceInteractionTestCases', '--include-filter', 'CtsVoiceInteractionTestCases[instant]', '--include-filter', 'CtsVoiceInteractionTestCases[secondary_user]', '--include-filter', 'CtsVoiceSettingsTestCases', '--include-filter', 'CtsVoiceSettingsTestCases[instant]', '--include-filter', 'CtsVoiceSettingsTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsVoice',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsVr b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsVr
deleted file mode 100644
index 56ff9e5..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsVr
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsVr'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsVrTestCases, CtsVrTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsVr',
-        test_name='cheets_CTS_R.11_r3.x86.CtsVr',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsVrTestCases', '--include-filter', 'CtsVrTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsVr',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWebkit b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWebkit
deleted file mode 100644
index f792124..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWebkit
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsWebkit'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWebkitTestCases, CtsWebkitTestCases[instant], CtsWebkitTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsWebkit',
-        test_name='cheets_CTS_R.11_r3.x86.CtsWebkit',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWebkitTestCases', '--include-filter', 'CtsWebkitTestCases[instant]', '--include-filter', 'CtsWebkitTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWebkit',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=7200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWidget b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWidget
deleted file mode 100644
index 76b41f6..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWidget
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsWidget'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWidgetTestCases, CtsWidgetTestCases29, CtsWidgetTestCases29[instant], CtsWidgetTestCases29[secondary_user], CtsWidgetTestCases[instant], CtsWidgetTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsWidget',
-        test_name='cheets_CTS_R.11_r3.x86.CtsWidget',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWidgetTestCases', '--include-filter', 'CtsWidgetTestCases29', '--include-filter', 'CtsWidgetTestCases29[instant]', '--include-filter', 'CtsWidgetTestCases29[secondary_user]', '--include-filter', 'CtsWidgetTestCases[instant]', '--include-filter', 'CtsWidgetTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWidget',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=18000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWifi b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWifi
deleted file mode 100644
index 04c2b58..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWifi
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsWifi'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWifiBroadcastsHostTestCases, CtsWifiBroadcastsHostTestCases[instant], CtsWifiBroadcastsHostTestCases[secondary_user], CtsWifiTestCases, CtsWifiTestCases[instant], CtsWifiTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsWifi',
-        test_name='cheets_CTS_R.11_r3.x86.CtsWifi',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWifiBroadcastsHostTestCases', '--include-filter', 'CtsWifiBroadcastsHostTestCases[instant]', '--include-filter', 'CtsWifiBroadcastsHostTestCases[secondary_user]', '--include-filter', 'CtsWifiTestCases', '--include-filter', 'CtsWifiTestCases[instant]', '--include-filter', 'CtsWifiTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWifi',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=12600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager
deleted file mode 100644
index 76b839b..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsWindowManager'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManagerDeviceTestCases, CtsWindowManagerDeviceTestCases[secondary_user], CtsWindowManagerJetpackTestCases, CtsWindowManagerJetpackTestCases[secondary_user], CtsWindowManagerSdk25TestCases, CtsWindowManagerSdk25TestCases[secondary_user], CtsWindowManagerSdk28TestCases, CtsWindowManagerSdk28TestCases[secondary_user], CtsWindowManagerSdk29TestCases, CtsWindowManagerSdk29TestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsWindowManager',
-        test_name='cheets_CTS_R.11_r3.x86.CtsWindowManager',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases', '--include-filter', 'CtsWindowManagerDeviceTestCases[secondary_user]', '--include-filter', 'CtsWindowManagerJetpackTestCases', '--include-filter', 'CtsWindowManagerJetpackTestCases[secondary_user]', '--include-filter', 'CtsWindowManagerSdk25TestCases', '--include-filter', 'CtsWindowManagerSdk25TestCases[secondary_user]', '--include-filter', 'CtsWindowManagerSdk28TestCases', '--include-filter', 'CtsWindowManagerSdk28TestCases[secondary_user]', '--include-filter', 'CtsWindowManagerSdk29TestCases', '--include-filter', 'CtsWindowManagerSdk29TestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManager',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=19800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.A b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.A
deleted file mode 100644
index c4ec33b..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.A
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsWindowManager.A'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.A of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsWindowManager.A',
-        test_name='cheets_CTS_R.11_r3.x86.CtsWindowManager.A',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ActivityManagerGetConfigTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ActivityMetricsLoggerTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ActivityTaskAffinityTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ActivityTransitionTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ActivityViewTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ActivityVisibilityTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AddWindowAsUserTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AlertWindowsAppOpsTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AlertWindowsImportanceTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AlertWindowsTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AmProfileTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AmStartOptionsTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AnrTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AppConfigurationTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AspectRatioTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AssistantStackTests', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.C b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.C
deleted file mode 100644
index 33f707e..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.C
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsWindowManager.C'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.C of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsWindowManager.C',
-        test_name='cheets_CTS_R.11_r3.x86.CtsWindowManager.C',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.CloseOnOutsideTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ConfigChangeTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.CrossAppDragAndDropTests', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.D b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.D
deleted file mode 100644
index 53dd74e..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.D
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsWindowManager.D'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.D of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsWindowManager.D',
-        test_name='cheets_CTS_R.11_r3.x86.CtsWindowManager.D',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DecorInsetTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DeprecatedTargetSdkTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DialogFrameTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DisplayCutoutTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DisplaySizeTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DisplayTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DragDropTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DreamManagerServiceTests', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.Ensure b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.Ensure
deleted file mode 100644
index b9af01b..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.Ensure
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsWindowManager.Ensure'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.Ensure of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsWindowManager.Ensure',
-        test_name='cheets_CTS_R.11_r3.x86.CtsWindowManager.Ensure',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.EnsureBarContrastTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.F b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.F
deleted file mode 100644
index 87ed727..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.F
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsWindowManager.F'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.F of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsWindowManager.F',
-        test_name='cheets_CTS_R.11_r3.x86.CtsWindowManager.F',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ForceRelayoutTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.FreeformWindowingModeTests', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.L b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.L
deleted file mode 100644
index 06aceaa..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.L
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsWindowManager.L'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.L of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsWindowManager.L',
-        test_name='cheets_CTS_R.11_r3.x86.CtsWindowManager.L',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.LayoutTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.LocationInWindowTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.LocationOnScreenTests', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.M b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.M
deleted file mode 100644
index ac81eaa..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.M
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsWindowManager.M'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.M of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsWindowManager.M',
-        test_name='cheets_CTS_R.11_r3.x86.CtsWindowManager.M',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ManifestLayoutTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MinimalPostProcessingTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayActivityLaunchTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayClientTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayKeyguardTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayLockedKeyguardTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayPolicyTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayPrivateDisplayTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplaySecurityTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplaySystemDecorationTests', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.Override b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.Override
deleted file mode 100644
index 8552725..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.Override
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsWindowManager.Override'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.Override of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsWindowManager.Override',
-        test_name='cheets_CTS_R.11_r3.x86.CtsWindowManager.Override',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.OverrideConfigTests', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.P b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.P
deleted file mode 100644
index 39a03a2..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.P
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsWindowManager.P'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.P of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsWindowManager.P',
-        test_name='cheets_CTS_R.11_r3.x86.CtsWindowManager.P',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.PinnedStackTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.PrereleaseSdkTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.PresentationTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.R b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.R
deleted file mode 100644
index f43aa71..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.R
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsWindowManager.R'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.R of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsWindowManager.R',
-        test_name='cheets_CTS_R.11_r3.x86.CtsWindowManager.R',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ReplaceWindowTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.RobustnessTests', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.S b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.S
deleted file mode 100644
index 3b5cc87..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.S
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsWindowManager.S'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.S of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsWindowManager.S',
-        test_name='cheets_CTS_R.11_r3.x86.CtsWindowManager.S',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.SplashscreenTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.SplitScreenTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.StartActivityAsUserTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.StartActivityTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.SurfaceControlTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.SurfaceControlViewHostTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.SurfaceViewSurfaceValidatorTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.SurfaceViewTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.T b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.T
deleted file mode 100644
index a673018..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.T
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsWindowManager.T'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.T of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsWindowManager.T',
-        test_name='cheets_CTS_R.11_r3.x86.CtsWindowManager.T',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ToastWindowTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.TransitionSelectionTests', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.Window b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.Window
deleted file mode 100644
index eda5b64..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.Window
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsWindowManager.Window'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.Window of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsWindowManager.Window',
-        test_name='cheets_CTS_R.11_r3.x86.CtsWindowManager.Window',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowContextPolicyTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowContextTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowFocusTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInputTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationCallbackTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationControllerTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationImeTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationSynchronicityTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsControllerTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsLayoutTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsPolicyTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowManager_BadTokenExceptionTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowManager_LayoutParamsTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowMetricsTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.intent b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.intent
deleted file mode 100644
index 0b38d94..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.intent
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsWindowManager.intent'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.intent of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsWindowManager.intent',
-        test_name='cheets_CTS_R.11_r3.x86.CtsWindowManager.intent',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.intent.IntentGenerationTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.intent.IntentTests', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.lifecycle b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.lifecycle
deleted file mode 100644
index 57a79df..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWindowManager.lifecycle
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsWindowManager.lifecycle'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWindowManager.lifecycle of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsWindowManager.lifecycle',
-        test_name='cheets_CTS_R.11_r3.x86.CtsWindowManager.lifecycle',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleFreeformTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleKeyguardTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecyclePipTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleSplitScreenTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleTopResumedStateTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityStarterTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityTests', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWindowManagerDeviceTestCases',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWrap b/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWrap
deleted file mode 100644
index d255288..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.CtsWrap
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.CtsWrap'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsWrapNoWrapTestCases, CtsWrapNoWrapTestCases[secondary_user], CtsWrapWrapDebugMallocDebugTestCases, CtsWrapWrapDebugMallocDebugTestCases[secondary_user], CtsWrapWrapDebugTestCases, CtsWrapWrapDebugTestCases[secondary_user], CtsWrapWrapNoDebugTestCases, CtsWrapWrapNoDebugTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.CtsWrap',
-        test_name='cheets_CTS_R.11_r3.x86.CtsWrap',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWrapNoWrapTestCases', '--include-filter', 'CtsWrapNoWrapTestCases[secondary_user]', '--include-filter', 'CtsWrapWrapDebugMallocDebugTestCases', '--include-filter', 'CtsWrapWrapDebugMallocDebugTestCases[secondary_user]', '--include-filter', 'CtsWrapWrapDebugTestCases', '--include-filter', 'CtsWrapWrapDebugTestCases[secondary_user]', '--include-filter', 'CtsWrapWrapNoDebugTestCases', '--include-filter', 'CtsWrapWrapNoDebugTestCases[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsWrap',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=16200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.LegacyStorageTest b/server/site_tests/cheets_CTS_R/control.11_r3.x86.LegacyStorageTest
deleted file mode 100644
index 4818014..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.LegacyStorageTest
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.LegacyStorageTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module LegacyStorageTest, LegacyStorageTest[instant] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.LegacyStorageTest',
-        test_name='cheets_CTS_R.11_r3.x86.LegacyStorageTest',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'LegacyStorageTest', '--include-filter', 'LegacyStorageTest[instant]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='LegacyStorageTest',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.ScopedStorageTest b/server/site_tests/cheets_CTS_R/control.11_r3.x86.ScopedStorageTest
deleted file mode 100644
index fff19e4..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.ScopedStorageTest
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.ScopedStorageTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module ScopedStorageTest, ScopedStorageTest[instant] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.ScopedStorageTest',
-        test_name='cheets_CTS_R.11_r3.x86.ScopedStorageTest',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'ScopedStorageTest', '--include-filter', 'ScopedStorageTest[instant]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='ScopedStorageTest',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.signed-Cts b/server/site_tests/cheets_CTS_R/control.11_r3.x86.signed-Cts
deleted file mode 100644
index fad9bb6..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.signed-Cts
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.signed-Cts'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module signed-CtsOmapiTestCases, signed-CtsOmapiTestCases[secondary_user], signed-CtsSecureElementAccessControlTestCases1, signed-CtsSecureElementAccessControlTestCases1[secondary_user], signed-CtsSecureElementAccessControlTestCases2, signed-CtsSecureElementAccessControlTestCases2[secondary_user], signed-CtsSecureElementAccessControlTestCases3, signed-CtsSecureElementAccessControlTestCases3[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.signed-Cts',
-        test_name='cheets_CTS_R.11_r3.x86.signed-Cts',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'signed-CtsOmapiTestCases', '--include-filter', 'signed-CtsOmapiTestCases[secondary_user]', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases1', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases1[secondary_user]', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases2', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases2[secondary_user]', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases3', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases3[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='signed-Cts',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=16200)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.tradefed-run-collect-tests-only-internal b/server/site_tests/cheets_CTS_R/control.11_r3.x86.tradefed-run-collect-tests-only-internal
deleted file mode 100644
index c090bea..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.tradefed-run-collect-tests-only-internal
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.tradefed-run-collect-tests-only-internal'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'LENGTHY'
-MAX_RESULT_SIZE_KB = 1024000
-DOC = 'Run all of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=0,
-        tag='11_r3.x86.tradefed-run-collect-tests-only-internal',
-        test_name='cheets_CTS_R.11_r3.x86.tradefed-run-collect-tests-only-internal',
-        run_template=['run', 'commandAndExit', 'collect-tests-only', '--disable-reboot', '--module-arg', 'CtsMediaTestCases:skip-media-download:true', '--module-arg', 'CtsMediaStressTestCases:skip-media-download:true', '--module-arg', 'CtsMediaBitstreamsTestCases:skip-media-download:true'],
-        retry_template=None,
-        target_module=None,
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r3.x86.vm-tests-tf b/server/site_tests/cheets_CTS_R/control.11_r3.x86.vm-tests-tf
deleted file mode 100644
index dd68d18..0000000
--- a/server/site_tests/cheets_CTS_R/control.11_r3.x86.vm-tests-tf
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.11_r3.x86.vm-tests-tf'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vm-tests-tf, vm-tests-tf[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r3.x86.vm-tests-tf',
-        test_name='cheets_CTS_R.11_r3.x86.vm-tests-tf',
-        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'vm-tests-tf', '--include-filter', 'vm-tests-tf[secondary_user]', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vm-tests-tf',
-        target_plan=None,
-        bundle='x86',
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r8.arm.tradefed-run-collect-tests-only-internal b/server/site_tests/cheets_CTS_R/control.11_r8.arm.tradefed-run-collect-tests-only-internal
new file mode 100644
index 0000000..e17974b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.11_r8.arm.tradefed-run-collect-tests-only-internal
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.11_r8.arm.tradefed-run-collect-tests-only-internal'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts, suite:arc-cts-unibuild, suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 0
+TEST_TYPE = 'server'
+TIME = 'LENGTHY'
+MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
+DOC = 'Run all of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=0,
+        tag='11_r8.arm.tradefed-run-collect-tests-only-internal',
+        test_name='cheets_CTS_R.11_r8.arm.tradefed-run-collect-tests-only-internal',
+        run_template=['run', 'commandAndExit', 'collect-tests-only', '--disable-reboot', '--module-arg', 'CtsMediaTestCases:skip-media-download:true', '--module-arg', 'CtsMediaStressTestCases:skip-media-download:true', '--module-arg', 'CtsMediaBitstreamsTestCases:skip-media-download:true'],
+        retry_template=None,
+        target_module=None,
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.11_r8.x86.tradefed-run-collect-tests-only-internal b/server/site_tests/cheets_CTS_R/control.11_r8.x86.tradefed-run-collect-tests-only-internal
new file mode 100644
index 0000000..f1872a3
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.11_r8.x86.tradefed-run-collect-tests-only-internal
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.11_r8.x86.tradefed-run-collect-tests-only-internal'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts, suite:arc-cts-unibuild, suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 0
+TEST_TYPE = 'server'
+TIME = 'LENGTHY'
+MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
+DOC = 'Run all of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=0,
+        tag='11_r8.x86.tradefed-run-collect-tests-only-internal',
+        test_name='cheets_CTS_R.11_r8.x86.tradefed-run-collect-tests-only-internal',
+        run_template=['run', 'commandAndExit', 'collect-tests-only', '--disable-reboot', '--module-arg', 'CtsMediaTestCases:skip-media-download:true', '--module-arg', 'CtsMediaStressTestCases:skip-media-download:true', '--module-arg', 'CtsMediaBitstreamsTestCases:skip-media-download:true'],
+        retry_template=None,
+        target_module=None,
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAbiOverrideHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAbiOverrideHostTestCases
index 56f943c..a9f961d 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAbiOverrideHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAbiOverrideHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAbiOverrideHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAccelerationTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAccelerationTestCases
index 73b4f48..5c0d485 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAccelerationTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAccelerationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAccelerationTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAccessibilityServiceSdk29TestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAccessibilityServiceSdk29TestCases
index 0b058c7..36e0878 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAccessibilityServiceSdk29TestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAccessibilityServiceSdk29TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAccessibilityServiceSdk29TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAccessibilityServiceTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAccessibilityServiceTestCases
index fa4ad00..fc15a65 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAccessibilityServiceTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAccessibilityServiceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAccessibilityServiceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAccessibilityTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAccessibilityTestCases
index 5e9ce5f..be6056d 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAccessibilityTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAccessibilityTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAccessibilityTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAccountManagerTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAccountManagerTestCases
index 23c8dae..22b1197 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAccountManagerTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAccountManagerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAccountManagerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAccountsHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAccountsHostTestCases
index 48bfb4c..c7ef651 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAccountsHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAccountsHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAccountsHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsActivityManagerBackgroundActivityTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsActivityManagerBackgroundActivityTestCases
index 2e747fd..6cdde42 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsActivityManagerBackgroundActivityTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsActivityManagerBackgroundActivityTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsActivityManagerBackgroundActivityTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAdbHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAdbHostTestCases
index 645dbd0..9ccc9a1 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAdbHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAdbHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAdbHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAdbManagerHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAdbManagerHostTestCases
index 9b1a3e6..d375791 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAdbManagerHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAdbManagerHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAdbManagerHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAdminPackageInstallerTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAdminPackageInstallerTestCases
index ddf61f7..27052e1 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAdminPackageInstallerTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAdminPackageInstallerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAdminPackageInstallerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAdminTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAdminTestCases
index 045b25b..8860cb9 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAdminTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAdminTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAdminTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAlarmManagerTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAlarmManagerTestCases
index 5f53a1a..0403876 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAlarmManagerTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAlarmManagerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAlarmManagerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAndroidAppTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAndroidAppTestCases
index 676814d..51f3f8c 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAndroidAppTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAndroidAppTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAndroidAppTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAndroidTestBase28ApiSignatureTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAndroidTestBase28ApiSignatureTestCases
index 66489a1..a00f16c 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAndroidTestBase28ApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAndroidTestBase28ApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAndroidTestBase28ApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAndroidTestBaseCurrentApiSignatureTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAndroidTestBaseCurrentApiSignatureTestCases
index 3b5ad28..228b66d 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAndroidTestBaseCurrentApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAndroidTestBaseCurrentApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAndroidTestBaseCurrentApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAndroidTestMockCurrentApiSignatureTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAndroidTestMockCurrentApiSignatureTestCases
index a6c2d96..1437319 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAndroidTestMockCurrentApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAndroidTestMockCurrentApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAndroidTestMockCurrentApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAndroidTestRunnerCurrentApiSignatureTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAndroidTestRunnerCurrentApiSignatureTestCases
index 3c5f498..c5f4a42 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAndroidTestRunnerCurrentApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAndroidTestRunnerCurrentApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAndroidTestRunnerCurrentApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAngleIntegrationHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAngleIntegrationHostTestCases
index e3632f2..eacc48d 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAngleIntegrationHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAngleIntegrationHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAngleIntegrationHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAnimationTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAnimationTestCases
index b793ac6..cff5420 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAnimationTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAnimationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAnimationTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsApacheHttpLegacy27ApiSignatureTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsApacheHttpLegacy27ApiSignatureTestCases
index e93683d..b20ac9a 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsApacheHttpLegacy27ApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsApacheHttpLegacy27ApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsApacheHttpLegacy27ApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsApacheHttpLegacyCurrentApiSignatureTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsApacheHttpLegacyCurrentApiSignatureTestCases
index 5c310d0..8ac2b99 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsApacheHttpLegacyCurrentApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsApacheHttpLegacyCurrentApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsApacheHttpLegacyCurrentApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases
index 095f240..da80f20 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsApexTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsApexTestCases
index 1bb656d..ef1b805 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsApexTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsApexTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsApexTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAppBindingHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAppBindingHostTestCases
index 30560b1..8d3120b 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAppBindingHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAppBindingHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppBindingHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAppCompatHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAppCompatHostTestCases
index c1f55a3..7628595 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAppCompatHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAppCompatHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppCompatHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAppComponentFactoryTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAppComponentFactoryTestCases
index 1ce9ca2..57f4f5c 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAppComponentFactoryTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAppComponentFactoryTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppComponentFactoryTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAppEnumerationTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAppEnumerationTestCases
index 5038a5b..7b117d3 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAppEnumerationTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAppEnumerationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppEnumerationTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAppExitTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAppExitTestCases
index d751e87..4added1 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAppExitTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAppExitTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppExitTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAppIntegrityDeviceTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAppIntegrityDeviceTestCases
index cf26599..d4307d1 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAppIntegrityDeviceTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAppIntegrityDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppIntegrityDeviceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAppOpsTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAppOpsTestCases
index e166225..c957dc2 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAppOpsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAppOpsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppOpsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAppPredictionServiceTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAppPredictionServiceTestCases
index cae66cd..b8b4613 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAppPredictionServiceTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAppPredictionServiceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppPredictionServiceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAppSecurityHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAppSecurityHostTestCases
index 7899e82..8273849 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAppSecurityHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAppSecurityHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppSecurityHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
@@ -31,6 +32,6 @@
         retry_manual_tests=True,
         use_jdk9=True,
         warn_on_test_retry=False,
-        timeout=3600)
+        timeout=7200)
 
 parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAppTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAppTestCases
index 88e7699..0ec66a5 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAppTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAppTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAppTestCases.feature.ctshardware b/server/site_tests/cheets_CTS_R/control.arm.CtsAppTestCases.feature.ctshardware
new file mode 100644
index 0000000..0b19804
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAppTestCases.feature.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.arm.CtsAppTestCases.feature.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAppTestCases.feature of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='arm.CtsAppTestCases.feature.ctshardware',
+        test_name='cheets_CTS_R.arm.CtsAppTestCases.feature.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAppTestCases android.app.cts.SystemFeaturesTest'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAppTestCases.feature',
+        target_plan=None,
+        bundle='arm',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAppUsageHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAppUsageHostTestCases
index b2a421c..c174792 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAppUsageHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAppUsageHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppUsageHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAppWidgetTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAppWidgetTestCases
index b489c29..b78fa60 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAppWidgetTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAppWidgetTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppWidgetTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAslrMallocTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAslrMallocTestCases
index 14b184c..f4ed5f1 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAslrMallocTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAslrMallocTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAslrMallocTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAssistTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAssistTestCases
index 3229d79..dfbf7fc 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAssistTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAssistTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAssistTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAtomicInstallTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAtomicInstallTestCases
index 016862b..0edb8b0 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAtomicInstallTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAtomicInstallTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAtomicInstallTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAtraceHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAtraceHostTestCases
index 7168ca4..4e4e956 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAtraceHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAtraceHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAtraceHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAttentionServiceDeviceTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAttentionServiceDeviceTestCases
index 2cebcd6..f7eeaa2 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAttentionServiceDeviceTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAttentionServiceDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAttentionServiceDeviceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsAutoFillServiceTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsAutoFillServiceTestCases
index 12dc971..b5588a6 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsAutoFillServiceTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsAutoFillServiceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAutoFillServiceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsBackgroundRestrictionsTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsBackgroundRestrictionsTestCases
index 1a6a6c5..f9e2250 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsBackgroundRestrictionsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsBackgroundRestrictionsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBackgroundRestrictionsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsBackupHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsBackupHostTestCases
index fc74f7e..803e22d 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsBackupHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsBackupHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBackupHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsBackupTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsBackupTestCases
index 924f9c9..75fceab 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsBackupTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsBackupTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBackupTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsBatterySavingTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsBatterySavingTestCases
index d663726..8642b85 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsBatterySavingTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsBatterySavingTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBatterySavingTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsBionicAppTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsBionicAppTestCases
index 0cd59f0..7ab60b8 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsBionicAppTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsBionicAppTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBionicAppTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsBionicTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsBionicTestCases
index b190807..8e83ab2 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsBionicTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsBionicTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBionicTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsBlobStoreHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsBlobStoreHostTestCases
index bebae43..b42c0ca 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsBlobStoreHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsBlobStoreHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBlobStoreHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsBlobStoreHostTestHelper b/server/site_tests/cheets_CTS_R/control.arm.CtsBlobStoreHostTestHelper
index a8a8ea92..4bbb481 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsBlobStoreHostTestHelper
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsBlobStoreHostTestHelper
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBlobStoreHostTestHelper of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsBlobStoreTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsBlobStoreTestCases
index acabb48..7dcea94 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsBlobStoreTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsBlobStoreTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBlobStoreTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsBlobStoreTestHelper b/server/site_tests/cheets_CTS_R/control.arm.CtsBlobStoreTestHelper
index 53069aa..57de1f9 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsBlobStoreTestHelper
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsBlobStoreTestHelper
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBlobStoreTestHelper of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsBlobStoreTestHelperDiffSig b/server/site_tests/cheets_CTS_R/control.arm.CtsBlobStoreTestHelperDiffSig
index 93fe67a..1451841 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsBlobStoreTestHelperDiffSig
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsBlobStoreTestHelperDiffSig
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBlobStoreTestHelperDiffSig of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsBlobStoreTestHelperDiffSig2 b/server/site_tests/cheets_CTS_R/control.arm.CtsBlobStoreTestHelperDiffSig2
index 0976ba7..6f60112 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsBlobStoreTestHelperDiffSig2
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsBlobStoreTestHelperDiffSig2
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBlobStoreTestHelperDiffSig2 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsBluetoothTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsBluetoothTestCases
index e234de7..f49f64a 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsBluetoothTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsBluetoothTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBluetoothTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsBootStatsTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsBootStatsTestCases
index 4d831f7..24aaa53 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsBootStatsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsBootStatsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBootStatsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsCalendarProviderTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsCalendarProviderTestCases
index 80f9fb7..41b7ef8 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsCalendarProviderTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsCalendarProviderTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCalendarProviderTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsCalendarcommon2TestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsCalendarcommon2TestCases
index 1023e69..55dd9a9 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsCalendarcommon2TestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsCalendarcommon2TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCalendarcommon2TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsCameraApi25TestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsCameraApi25TestCases
index c5adb25..638a5c8 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsCameraApi25TestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsCameraApi25TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCameraApi25TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsCameraTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsCameraTestCases
index d47288c..2f18a6d 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsCameraTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsCameraTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'LONG'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 70
 DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
@@ -31,6 +32,6 @@
         retry_manual_tests=True,
         use_jdk9=True,
         warn_on_test_retry=False,
-        timeout=3600)
+        timeout=5400)
 
 parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsCameraTestCases.NativeCameraDeviceTest b/server/site_tests/cheets_CTS_R/control.arm.CtsCameraTestCases.NativeCameraDeviceTest
new file mode 100644
index 0000000..130bf86
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsCameraTestCases.NativeCameraDeviceTest
@@ -0,0 +1,38 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.arm.CtsCameraTestCases.NativeCameraDeviceTest'
+ATTRIBUTES = 'suite:cts'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
+DOC = 'Run module CtsCameraTestCases.NativeCameraDeviceTest of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='arm.CtsCameraTestCases.NativeCameraDeviceTest',
+        test_name='cheets_CTS_R.arm.CtsCameraTestCases.NativeCameraDeviceTest',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCameraTestCases android.hardware.camera2.cts.NativeCameraDeviceTest', '--include-filter', 'CtsCameraTestCases[instant] android.hardware.camera2.cts.NativeCameraDeviceTest', '--include-filter', 'CtsCameraTestCases android.hardware.camera2.cts.RecordingTest#testVideoPreviewSurfaceSharing[1]', '--include-filter', 'CtsCameraTestCases[instant] android.hardware.camera2.cts.RecordingTest#testVideoPreviewSurfaceSharing[1]'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCameraTestCases',
+        target_plan=None,
+        bundle='arm',
+        precondition_commands=['sleep 20', 'android-sh -c \'am start -a android.intent.action.VIEW -d https://webglsamples.org/electricflower/electricflower.html\''],
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsCameraTestCases.NativeCameraDeviceTest.ctshardware b/server/site_tests/cheets_CTS_R/control.arm.CtsCameraTestCases.NativeCameraDeviceTest.ctshardware
new file mode 100644
index 0000000..ed0d0f2
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsCameraTestCases.NativeCameraDeviceTest.ctshardware
@@ -0,0 +1,38 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.arm.CtsCameraTestCases.NativeCameraDeviceTest.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
+DOC = 'Run module CtsCameraTestCases.NativeCameraDeviceTest of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='arm.CtsCameraTestCases.NativeCameraDeviceTest.ctshardware',
+        test_name='cheets_CTS_R.arm.CtsCameraTestCases.NativeCameraDeviceTest.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCameraTestCases android.hardware.camera2.cts.NativeCameraDeviceTest', '--include-filter', 'CtsCameraTestCases android.hardware.camera2.cts.RecordingTest#testVideoPreviewSurfaceSharing[1]'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCameraTestCases',
+        target_plan=None,
+        bundle='arm',
+        precondition_commands=['sleep 20', 'android-sh -c \'am start -a android.intent.action.VIEW -d https://webglsamples.org/electricflower/electricflower.html\''],
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsCameraTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.arm.CtsCameraTestCases.ctshardware
new file mode 100644
index 0000000..1b169b3
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsCameraTestCases.ctshardware
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.arm.CtsCameraTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, lighting'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 70
+DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='arm.CtsCameraTestCases.ctshardware',
+        test_name='cheets_CTS_R.arm.CtsCameraTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCameraTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCameraTestCases',
+        target_plan=None,
+        bundle='arm',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsCarHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsCarHostTestCases
index 64f39ee..040c100 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsCarHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsCarHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCarHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsCarTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsCarTestCases
index 37a4247..7ddaa32 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsCarTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsCarTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCarTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsCarrierApiTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsCarrierApiTestCases
index fea7db9..a541b68 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsCarrierApiTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsCarrierApiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCarrierApiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases
index 256a2e7..c4470d3 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsClassLoaderFactoryPathClassLoaderTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsClassLoaderFactoryPathClassLoaderTestCases
index 8ce1152..7f0a9d8 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsClassLoaderFactoryPathClassLoaderTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsClassLoaderFactoryPathClassLoaderTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsClassLoaderFactoryPathClassLoaderTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsClassloaderSplitsHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsClassloaderSplitsHostTestCases
index aa1d13a..d24ba04 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsClassloaderSplitsHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsClassloaderSplitsHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsClassloaderSplitsHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsCodePathHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsCodePathHostTestCases
index 23f5c1c..605080c 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsCodePathHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsCodePathHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCodePathHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsColorModeTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsColorModeTestCases
index 7f63c69..1a22d06 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsColorModeTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsColorModeTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsColorModeTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsCompilationTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsCompilationTestCases
index f68b856..200caf0 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsCompilationTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsCompilationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCompilationTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsContactsProviderTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsContactsProviderTestCases
index c3b5779..b542841 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsContactsProviderTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsContactsProviderTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsContactsProviderTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsContactsProviderWipe b/server/site_tests/cheets_CTS_R/control.arm.CtsContactsProviderWipe
index d9c5005..7c22cab 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsContactsProviderWipe
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsContactsProviderWipe
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsContactsProviderWipe of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsContentCaptureServiceTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsContentCaptureServiceTestCases
index 13dfa28..dc04856 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsContentCaptureServiceTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsContentCaptureServiceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsContentCaptureServiceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsContentSuggestionsTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsContentSuggestionsTestCases
index 89ef238..2b86dd2 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsContentSuggestionsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsContentSuggestionsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsContentSuggestionsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsContentTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsContentTestCases
index 50c5b9b..9c62e90 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsContentTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsContentTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsContentTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsControlsDeviceTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsControlsDeviceTestCases
index e3f7464..9ae691e 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsControlsDeviceTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsControlsDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsControlsDeviceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsCppToolsTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsCppToolsTestCases
index 29627ac..0f687a4 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsCppToolsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsCppToolsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCppToolsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsCurrentApiSignatureTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsCurrentApiSignatureTestCases
index fc12a85..a479ae6 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsCurrentApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsCurrentApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCurrentApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsDatabaseTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsDatabaseTestCases
index 1437f79..b1c14f5 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsDatabaseTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsDatabaseTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDatabaseTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsDeqpTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsDeqpTestCases
deleted file mode 100644
index aab1bf9..0000000
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsDeqpTestCases
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.arm.CtsDeqpTestCases'
-ATTRIBUTES = 'suite:cts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 1024000
-PRIORITY = 70
-DOC = 'Run module CtsDeqpTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='arm.CtsDeqpTestCases',
-        test_name='cheets_CTS_R.arm.CtsDeqpTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDeqpTestCases'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDeqpTestCases',
-        target_plan=None,
-        bundle='arm',
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=108000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsDeqpTestCases.32 b/server/site_tests/cheets_CTS_R/control.arm.CtsDeqpTestCases.32
new file mode 100644
index 0000000..ce0bfc8
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsDeqpTestCases.32
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.arm.CtsDeqpTestCases.32'
+ATTRIBUTES = 'suite:cts'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
+PRIORITY = 70
+DOC = 'Run module CtsDeqpTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='arm.CtsDeqpTestCases.32',
+        test_name='cheets_CTS_R.arm.CtsDeqpTestCases.32',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDeqpTestCases', '--abi', 'armeabi-v7a'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='arm',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=108000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsDeqpTestCases.64 b/server/site_tests/cheets_CTS_R/control.arm.CtsDeqpTestCases.64
new file mode 100644
index 0000000..6b3ee1e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsDeqpTestCases.64
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.arm.CtsDeqpTestCases.64'
+ATTRIBUTES = 'suite:cts'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
+PRIORITY = 70
+DOC = 'Run module CtsDeqpTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='arm.CtsDeqpTestCases.64',
+        test_name='cheets_CTS_R.arm.CtsDeqpTestCases.64',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDeqpTestCases', '--abi', 'arm64-v8a'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='arm',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=108000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware b/server/site_tests/cheets_CTS_R/control.arm.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware
new file mode 100644
index 0000000..16613d6
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.arm.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
+DOC = 'Run module CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='arm.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware',
+        test_name='cheets_CTS_R.arm.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases dEQP-GLES3.functional.prerequisite#*'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite',
+        target_plan=None,
+        bundle='arm',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsDeviceConfigTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsDeviceConfigTestCases
index 2424f59..952cf2c 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsDeviceConfigTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsDeviceConfigTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDeviceConfigTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsDeviceIdleHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsDeviceIdleHostTestCases
index 7100261..c6df25d 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsDeviceIdleHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsDeviceIdleHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDeviceIdleHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsDevicePolicyManagerTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsDevicePolicyManagerTestCases
index 6fa121f..876138b 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsDevicePolicyManagerTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsDevicePolicyManagerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDevicePolicyManagerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsDexMetadataHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsDexMetadataHostTestCases
index 97e1f60..3efb1f8 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsDexMetadataHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsDexMetadataHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDexMetadataHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsDisplayTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsDisplayTestCases
index 25eee36..997d101 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsDisplayTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsDisplayTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDisplayTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsDownloadManagerApi28 b/server/site_tests/cheets_CTS_R/control.arm.CtsDownloadManagerApi28
index cf8e9ad..0632f01 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsDownloadManagerApi28
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsDownloadManagerApi28
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDownloadManagerApi28 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsDownloadManagerInstaller b/server/site_tests/cheets_CTS_R/control.arm.CtsDownloadManagerInstaller
index 6e0a9b6..c1f087f 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsDownloadManagerInstaller
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsDownloadManagerInstaller
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDownloadManagerInstaller of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsDpiTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsDpiTestCases
index 7b09449..2356364 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsDpiTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsDpiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDpiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsDpiTestCases2 b/server/site_tests/cheets_CTS_R/control.arm.CtsDpiTestCases2
index a2111e8..338d4b0 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsDpiTestCases2
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsDpiTestCases2
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDpiTestCases2 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsDreamsTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsDreamsTestCases
index 14d1e58..c0897e6 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsDreamsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsDreamsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDreamsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsDrmTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsDrmTestCases
index 4dcb413..1c01240 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsDrmTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsDrmTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDrmTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsDropBoxManagerTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsDropBoxManagerTestCases
index 6816f99..a6ca6a6 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsDropBoxManagerTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsDropBoxManagerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDropBoxManagerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsDumpsysHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsDumpsysHostTestCases
index f5b0622..bcdbf11 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsDumpsysHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsDumpsysHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDumpsysHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsDynamicLinkerTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsDynamicLinkerTestCases
index ba4ce5a..3e7567d 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsDynamicLinkerTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsDynamicLinkerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDynamicLinkerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsDynamicMimeHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsDynamicMimeHostTestCases
index 65b46d9..31ca8ea 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsDynamicMimeHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsDynamicMimeHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDynamicMimeHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsEdiHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsEdiHostTestCases
index 18aa554..cd9e789 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsEdiHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsEdiHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsEdiHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsEffectTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsEffectTestCases
index 53822ee..1c666ca 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsEffectTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsEffectTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsEffectTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsExtendedMockingTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsExtendedMockingTestCases
index d5457cd..0a2b020 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsExtendedMockingTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsExtendedMockingTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsExtendedMockingTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsExternalServiceTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsExternalServiceTestCases
index 9f9f25a..0355880 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsExternalServiceTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsExternalServiceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsExternalServiceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsExtractNativeLibsHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsExtractNativeLibsHostTestCases
index 4e2306a..a0a0b34 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsExtractNativeLibsHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsExtractNativeLibsHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsExtractNativeLibsHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsFileSystemTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsFileSystemTestCases
index 443209e..ef6174e 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsFileSystemTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsFileSystemTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsFileSystemTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsFragmentTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsFragmentTestCases
index f6a4ede..5df0990 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsFragmentTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsFragmentTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsFragmentTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsFragmentTestCasesSdk26 b/server/site_tests/cheets_CTS_R/control.arm.CtsFragmentTestCasesSdk26
index 435f12c..d7a57e9 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsFragmentTestCasesSdk26
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsFragmentTestCasesSdk26
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsFragmentTestCasesSdk26 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsFsMgrTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsFsMgrTestCases
index 40e9cd5..9ecd7a6 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsFsMgrTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsFsMgrTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsFsMgrTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsGestureTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsGestureTestCases
index e648dd2..584299b 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsGestureTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsGestureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsGestureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsGpuProfilingDataTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsGpuProfilingDataTestCases
index 1a4bf3a..0a32c85 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsGpuProfilingDataTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsGpuProfilingDataTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsGpuProfilingDataTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsGpuToolsHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsGpuToolsHostTestCases
index 9dc6668..c571412 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsGpuToolsHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsGpuToolsHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsGpuToolsHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsGraphicsTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsGraphicsTestCases
index ddccc74..4e6cd1b 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsGraphicsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsGraphicsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsGraphicsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsGwpAsanTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsGwpAsanTestCases
index 5abbc85..4a99cf6 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsGwpAsanTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsGwpAsanTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsGwpAsanTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsHardwareTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsHardwareTestCases
index 898d563..ebd1e91 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsHardwareTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsHardwareTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHardwareTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
@@ -20,6 +21,7 @@
         'cheets_CTS_R',
         hosts=host_list,
         iterations=1,
+        max_retry=30,
         tag='arm.CtsHardwareTestCases',
         test_name='cheets_CTS_R.arm.CtsHardwareTestCases',
         run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsHardwareTestCases'],
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsHarmfulAppWarningHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsHarmfulAppWarningHostTestCases
index 0bd63ea..f62927d 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsHarmfulAppWarningHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsHarmfulAppWarningHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHarmfulAppWarningHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsHdmiCecHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsHdmiCecHostTestCases
index 3e19cc6..a82f447 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsHdmiCecHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsHdmiCecHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHdmiCecHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiBlacklistApi27TestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiBlacklistApi27TestCases
index 5a675d1..b295ffd 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiBlacklistApi27TestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiBlacklistApi27TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHiddenApiBlacklistApi27TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiBlacklistApi28TestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiBlacklistApi28TestCases
index 0ca1e23..45e7b4f 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiBlacklistApi28TestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiBlacklistApi28TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHiddenApiBlacklistApi28TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiBlacklistCurrentApiTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiBlacklistCurrentApiTestCases
index ac82b71..8eed463 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiBlacklistCurrentApiTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiBlacklistCurrentApiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHiddenApiBlacklistCurrentApiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiBlacklistDebugClassTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiBlacklistDebugClassTestCases
index f98dfb7..323bff6 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiBlacklistDebugClassTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiBlacklistDebugClassTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHiddenApiBlacklistDebugClassTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiBlacklistTestApiTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiBlacklistTestApiTestCases
index 22b95bb..1f9e3de 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiBlacklistTestApiTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiBlacklistTestApiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHiddenApiBlacklistTestApiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiKillswitchDebugClassTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiKillswitchDebugClassTestCases
index f4ad01e..e3edf12 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiKillswitchDebugClassTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiKillswitchDebugClassTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHiddenApiKillswitchDebugClassTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiKillswitchWhitelistTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiKillswitchWhitelistTestCases
index ad7007e..0839819 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiKillswitchWhitelistTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiKillswitchWhitelistTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHiddenApiKillswitchWhitelistTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiKillswitchWildcardTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiKillswitchWildcardTestCases
index e36a442..1c17494 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiKillswitchWildcardTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsHiddenApiKillswitchWildcardTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHiddenApiKillswitchWildcardTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsHostTzDataTests b/server/site_tests/cheets_CTS_R/control.arm.CtsHostTzDataTests
index 924e23b..06ac3ad 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsHostTzDataTests
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsHostTzDataTests
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHostTzDataTests of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsHostsideNetworkTests b/server/site_tests/cheets_CTS_R/control.arm.CtsHostsideNetworkTests
index a6809ca..92be6e8 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsHostsideNetworkTests
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsHostsideNetworkTests
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsHostsideNetworkTests of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
@@ -28,7 +29,7 @@
         target_module='CtsHostsideNetworkTests',
         target_plan=None,
         bundle='arm',
-        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), '/usr/local/autotest/cros/scripts/reorder-services-moblab.sh wifi'],
+        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), 'android-sh -c \'dumpsys wifi transports -eth\''],
         retry_manual_tests=True,
         use_jdk9=True,
         warn_on_test_retry=False,
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsHostsideNumberBlockingTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsHostsideNumberBlockingTestCases
index a180003..0913fc2 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsHostsideNumberBlockingTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsHostsideNumberBlockingTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHostsideNumberBlockingTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsHostsideTvTests b/server/site_tests/cheets_CTS_R/control.arm.CtsHostsideTvTests
index 39cdb49..652b770 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsHostsideTvTests
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsHostsideTvTests
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHostsideTvTests of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsHostsideWebViewTests b/server/site_tests/cheets_CTS_R/control.arm.CtsHostsideWebViewTests
index 43c4d36..7ee5b3c 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsHostsideWebViewTests
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsHostsideWebViewTests
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHostsideWebViewTests of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsIcuTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsIcuTestCases
index 8609d5d..524ae73 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsIcuTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsIcuTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsIcuTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsIdentityTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsIdentityTestCases
index 5260322..01f970b 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsIdentityTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsIdentityTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsIdentityTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsIkeTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsIkeTestCases
index f74f77d..ff867db 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsIkeTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsIkeTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsIkeTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsIncidentHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsIncidentHostTestCases
index 35985f8..d779bdb 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsIncidentHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsIncidentHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsIncidentHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
@@ -20,6 +21,7 @@
         'cheets_CTS_R',
         hosts=host_list,
         iterations=1,
+        max_retry=10,
         tag='arm.CtsIncidentHostTestCases',
         test_name='cheets_CTS_R.arm.CtsIncidentHostTestCases',
         run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsIncidentHostTestCases'],
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsIncrementalInstallHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsIncrementalInstallHostTestCases
index 09b6b44..9606fbd 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsIncrementalInstallHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsIncrementalInstallHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsIncrementalInstallHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsInitTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsInitTestCases
index c814fbb..d55f783 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsInitTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsInitTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsInitTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsInlineMockingTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsInlineMockingTestCases
index 9f654e9..0d07bd9 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsInlineMockingTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsInlineMockingTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsInlineMockingTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsInputMethodServiceHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsInputMethodServiceHostTestCases
index 5011efd..ec10388 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsInputMethodServiceHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsInputMethodServiceHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsInputMethodServiceHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsInputMethodTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsInputMethodTestCases
index 92a0479..0c35307 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsInputMethodTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsInputMethodTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsInputMethodTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsInstantAppTests b/server/site_tests/cheets_CTS_R/control.arm.CtsInstantAppTests
index 9696c60..ac31600 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsInstantAppTests
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsInstantAppTests
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsInstantAppTests of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsIntentSignatureTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsIntentSignatureTestCases
index fdb1ad8..7974134 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsIntentSignatureTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsIntentSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsIntentSignatureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJdwpSecurityHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJdwpSecurityHostTestCases
index 3eade11..a19c72a 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJdwpSecurityHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJdwpSecurityHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJdwpSecurityHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJdwpTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJdwpTestCases
index 53a0093..fa75c10 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJdwpTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJdwpTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJdwpTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJdwpTunnelHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJdwpTunnelHostTestCases
index 6efaf01..2266185 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJdwpTunnelHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJdwpTunnelHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJdwpTunnelHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJniTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJniTestCases
index 4ef39c7..dbcb6bc 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJniTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJniTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJniTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJobSchedulerSharedUidTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJobSchedulerSharedUidTestCases
index 3b1d337..f7555b0 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJobSchedulerSharedUidTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJobSchedulerSharedUidTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJobSchedulerSharedUidTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJobSchedulerTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJobSchedulerTestCases
index c16bb1b..50f6832 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJobSchedulerTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJobSchedulerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsJobSchedulerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
@@ -28,7 +29,7 @@
         target_module='CtsJobSchedulerTestCases',
         target_plan=None,
         bundle='arm',
-        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), '/usr/local/autotest/cros/scripts/reorder-services-moblab.sh wifi'],
+        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), 'android-sh -c \'dumpsys wifi transports -eth\''],
         retry_manual_tests=True,
         use_jdk9=True,
         warn_on_test_retry=False,
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiAttachingHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiAttachingHostTestCases
index 8cb941a..e44dcb0 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiAttachingHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiAttachingHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiAttachingHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiAttachingTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiAttachingTestCases
index 5d2be35..684a4dd 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiAttachingTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiAttachingTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiAttachingTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRedefineClassesHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRedefineClassesHostTestCases
index 969eb8f..28aae42 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRedefineClassesHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRedefineClassesHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRedefineClassesHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1900HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1900HostTestCases
index f58d64e..f5d6b24 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1900HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1900HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1900HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1901HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1901HostTestCases
index a44f4b6..9aa790a 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1901HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1901HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1901HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1902HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1902HostTestCases
index 9b66f59..7ac3545 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1902HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1902HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1902HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1903HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1903HostTestCases
index afffe2e..b3d0927 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1903HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1903HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1903HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1904HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1904HostTestCases
index 11e16db..2526533 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1904HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1904HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1904HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1906HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1906HostTestCases
index 8c74a2d..0f7ab40 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1906HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1906HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1906HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1907HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1907HostTestCases
index 5ba3b2b..8e5b897 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1907HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1907HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1907HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1908HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1908HostTestCases
index 3a57bca..f6254fc 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1908HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1908HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1908HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1909HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1909HostTestCases
index de0dfd7..50b0b0c 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1909HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1909HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1909HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1910HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1910HostTestCases
index 68ff88a..9b4fc98 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1910HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1910HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1910HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1911HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1911HostTestCases
index 415e1ce..b5cfaa8 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1911HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1911HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1911HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1912HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1912HostTestCases
index 446eef8..b28dd46 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1912HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1912HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1912HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1913HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1913HostTestCases
index 42635b5..c5e99db 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1913HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1913HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1913HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1914HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1914HostTestCases
index f9fdb1f..a8fc601 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1914HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1914HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1914HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1915HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1915HostTestCases
index 647f0ec..9ae8998 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1915HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1915HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1915HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1916HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1916HostTestCases
index 1c402e6..e6cd8eb 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1916HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1916HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1916HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1917HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1917HostTestCases
index 35f9a23..2f362dc 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1917HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1917HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1917HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1920HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1920HostTestCases
index 65e9dec..c18965a 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1920HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1920HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1920HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1921HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1921HostTestCases
index 9077002..962eb0b 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1921HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1921HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1921HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1922HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1922HostTestCases
index a8923ee..432cc88 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1922HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1922HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1922HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1923HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1923HostTestCases
index 185d7ca..02f703f 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1923HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1923HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1923HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1924HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1924HostTestCases
index 07d1c50..cece3cb 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1924HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1924HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1924HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1925HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1925HostTestCases
index 829def8..16b69e1 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1925HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1925HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1925HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1926HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1926HostTestCases
index 1ed1b28..2d29edd 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1926HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1926HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1926HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1927HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1927HostTestCases
index 4674cdc..90bdfba 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1927HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1927HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1927HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1928HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1928HostTestCases
index 2b5d1ca..8eedf8e 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1928HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1928HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1928HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1930HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1930HostTestCases
index 5c255fe..f315e65 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1930HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1930HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1930HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1931HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1931HostTestCases
index d11a97c..d842de8 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1931HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1931HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1931HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1932HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1932HostTestCases
index 9d2f3c8..65057b3 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1932HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1932HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1932HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1933HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1933HostTestCases
index 7b8d749..28af522 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1933HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1933HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1933HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1934HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1934HostTestCases
index 68cf7a4..e8ade9b 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1934HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1934HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1934HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1936HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1936HostTestCases
index 1c53f7c..82f0a0e 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1936HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1936HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1936HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1937HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1937HostTestCases
index 2f4a3e1..7c2d62f 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1937HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1937HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1937HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1939HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1939HostTestCases
index 6a1fb2c..625e28c 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1939HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1939HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1939HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1941HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1941HostTestCases
index 9717c70..1f9271e 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1941HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1941HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1941HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1942HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1942HostTestCases
index 16baa09..80d3f20 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1942HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1942HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1942HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1943HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1943HostTestCases
index 41161a1..54b8b00 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1943HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1943HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1943HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1953HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1953HostTestCases
index 38c4893..55ede10 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1953HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1953HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1953HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1958HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1958HostTestCases
index 26f0a76..d085641 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1958HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1958HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1958HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1962HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1962HostTestCases
index c44f2fe..5ebebed 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1962HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1962HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1962HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1967HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1967HostTestCases
index a18a2a9..09815f7 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1967HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1967HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1967HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1968HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1968HostTestCases
index 855de9a..16f4250 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1968HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1968HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1968HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1969HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1969HostTestCases
index 72200ba..9139554 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1969HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1969HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1969HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1970HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1970HostTestCases
index 8ba7486..6d8feb9 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1970HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1970HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1970HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1971HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1971HostTestCases
index ff5e66d..8d55c98 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1971HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1971HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1971HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1974HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1974HostTestCases
index 7d2fa70..a5a1961 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1974HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1974HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1974HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1975HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1975HostTestCases
index 68e9e0a..713291b 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1975HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1975HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1975HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1976HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1976HostTestCases
index 7727c16..008be81 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1976HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1976HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1976HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1977HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1977HostTestCases
index 289425d..00a1763 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1977HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1977HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1977HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1978HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1978HostTestCases
index 5d90703..2b414be 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1978HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1978HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1978HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1979HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1979HostTestCases
index 0e16852..a5f71f2 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1979HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1979HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1979HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1981HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1981HostTestCases
index 45f78a9..18dd1ea 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1981HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1981HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1981HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1982HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1982HostTestCases
index 26dc3c5..dc2d211 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1982HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1982HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1982HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1983HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1983HostTestCases
index a87ddd2..8159cee 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1983HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1983HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1983HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1984HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1984HostTestCases
index 78f497f..82d59c5 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1984HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1984HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1984HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1988HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1988HostTestCases
index 19a3436..994d244 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1988HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1988HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1988HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1989HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1989HostTestCases
index e01fecb..66c88df 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1989HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1989HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1989HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1990HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1990HostTestCases
index 659b22a..1056595 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1990HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1990HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1990HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1991HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1991HostTestCases
index 2932598..0e4374b 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1991HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1991HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1991HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1992HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1992HostTestCases
index b82abbb..97975e0 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1992HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1992HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1992HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1994HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1994HostTestCases
index dc1f9ce..7c6ad8e 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1994HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1994HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1994HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1995HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1995HostTestCases
index 58b68d1..9f7ae3c 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1995HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1995HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1995HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1996HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1996HostTestCases
index 4da7d56..1eaecc4 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1996HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1996HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1996HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1997HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1997HostTestCases
index bc6af81..bde0d7a 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1997HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1997HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1997HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1998HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1998HostTestCases
index 39fd6fd..90fa5bf 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1998HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1998HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1998HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1999HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1999HostTestCases
index 1cb27eb..45f819f 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1999HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest1999HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1999HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest2001HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest2001HostTestCases
index 2396490..b9815c1 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest2001HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest2001HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest2001HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest2002HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest2002HostTestCases
index b1399e1..d3759db 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest2002HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest2002HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest2002HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest2003HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest2003HostTestCases
index a4e0bb7..f44bb16 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest2003HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest2003HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest2003HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest2004HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest2004HostTestCases
index e264171..7277732 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest2004HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest2004HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest2004HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest2005HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest2005HostTestCases
index 4568b67..5c073a8 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest2005HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest2005HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest2005HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest2006HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest2006HostTestCases
index c35ba1a..6b8f692 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest2006HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest2006HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest2006HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest2007HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest2007HostTestCases
index bb44566..1fb6b62 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest2007HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest2007HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest2007HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest902HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest902HostTestCases
index 80ca9c4..5c42328 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest902HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest902HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest902HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest903HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest903HostTestCases
index 390a493..392b68b 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest903HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest903HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest903HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest904HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest904HostTestCases
index f8172e3..35aa60f 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest904HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest904HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest904HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest905HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest905HostTestCases
index 19cdbd4..f32fbf2 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest905HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest905HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest905HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest906HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest906HostTestCases
index adc4691..17452af 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest906HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest906HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest906HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest907HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest907HostTestCases
index cb1dd1f..789a280 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest907HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest907HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest907HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest908HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest908HostTestCases
index 1cf4541..8774129 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest908HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest908HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest908HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest910HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest910HostTestCases
index 016574f..5a49ca0 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest910HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest910HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest910HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest911HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest911HostTestCases
index ed9add5..dea30e8 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest911HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest911HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest911HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest912HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest912HostTestCases
index 2e96f26..5f38cf4 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest912HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest912HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest912HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest913HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest913HostTestCases
index 55761ac..5137f8e 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest913HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest913HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest913HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest914HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest914HostTestCases
index 1c22fbe..d4d5e73 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest914HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest914HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest914HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest915HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest915HostTestCases
index ec05414..6f50536 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest915HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest915HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest915HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest917HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest917HostTestCases
index 17f00e0..0b90f2b 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest917HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest917HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest917HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest918HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest918HostTestCases
index edc2e9b..ce5db9c 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest918HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest918HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest918HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest919HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest919HostTestCases
index 015be4e..1aa9486 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest919HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest919HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest919HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest920HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest920HostTestCases
index 58422ef..435cbe8 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest920HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest920HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest920HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest922HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest922HostTestCases
index 74f235b..bacf4f1 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest922HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest922HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest922HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest923HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest923HostTestCases
index c0f0d93..93c9da6 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest923HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest923HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest923HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest924HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest924HostTestCases
index 2fcc8ab..0ff36fe 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest924HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest924HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest924HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest926HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest926HostTestCases
index 93d2d10..e7c4f6c 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest926HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest926HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest926HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest927HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest927HostTestCases
index a2a9f99..bbed924 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest927HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest927HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest927HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest928HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest928HostTestCases
index 8cc2d4c..c0d4319 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest928HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest928HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest928HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest930HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest930HostTestCases
index 558fe91..8c648ad 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest930HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest930HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest930HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest931HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest931HostTestCases
index 3bed040..b1766de 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest931HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest931HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest931HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest932HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest932HostTestCases
index 5128a15..527229f 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest932HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest932HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest932HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest940HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest940HostTestCases
index 15b50b7..188b1d3 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest940HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest940HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest940HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest942HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest942HostTestCases
index 16c8b1b..ce57e0a 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest942HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest942HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest942HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest944HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest944HostTestCases
index fec476f..8783d24 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest944HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest944HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest944HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest945HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest945HostTestCases
index fd634a7..9355628 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest945HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest945HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest945HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest947HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest947HostTestCases
index 4cbf3ac..6ec5d14 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest947HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest947HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest947HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest951HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest951HostTestCases
index 067818a..eb136bc 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest951HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest951HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest951HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest982HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest982HostTestCases
index cf8715c..b7b89a0 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest982HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest982HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest982HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest983HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest983HostTestCases
index 3614bde..5d54644 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest983HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest983HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest983HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest984HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest984HostTestCases
index 59b95e3..4665b0f 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest984HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest984HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest984HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest985HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest985HostTestCases
index 6f4b177..1dcb8c9 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest985HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest985HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest985HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest986HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest986HostTestCases
index 2952099..84f82f9 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest986HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest986HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest986HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest988HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest988HostTestCases
index 7a393c1..38bc2f1 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest988HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest988HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest988HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest989HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest989HostTestCases
index 2b47055..ce1c788 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest989HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest989HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest989HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest990HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest990HostTestCases
index aa4f8bd..311f207 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest990HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest990HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest990HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest991HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest991HostTestCases
index f1d2066..74dac0a 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest991HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest991HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest991HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest992HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest992HostTestCases
index e0c49d4..5af0b91 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest992HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest992HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest992HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest993HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest993HostTestCases
index 5cb83c4..cb33c82 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest993HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest993HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest993HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest994HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest994HostTestCases
index 1f04516..2c47a9b 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest994HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest994HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest994HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest995HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest995HostTestCases
index a1529de..4fafeb3 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest995HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest995HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest995HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest996HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest996HostTestCases
index ef9f713..338e48d 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest996HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest996HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest996HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest997HostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest997HostTestCases
index 61f130a..0a38061 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest997HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiRunTest997HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest997HostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiTaggingHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiTaggingHostTestCases
index dfcd2e5..c0a17b6 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiTaggingHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiTaggingHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiTaggingHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiTrackingHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiTrackingHostTestCases
index aaba790..97aca24 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiTrackingHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsJvmtiTrackingHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiTrackingHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsKernelConfigTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsKernelConfigTestCases
index d1daf6b..4a29549 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsKernelConfigTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsKernelConfigTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsKernelConfigTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsKeystoreTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsKeystoreTestCases
index 0d1d720..e1b8c20 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsKeystoreTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsKeystoreTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsKeystoreTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
@@ -30,6 +31,6 @@
         retry_manual_tests=True,
         use_jdk9=True,
         warn_on_test_retry=False,
-        timeout=3600)
+        timeout=7200)
 
 parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsLeanbackJankTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsLeanbackJankTestCases
index c483471..c137f37 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsLeanbackJankTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsLeanbackJankTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLeanbackJankTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsLegacyNotification20TestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsLegacyNotification20TestCases
index 089ee7b..6777afe 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsLegacyNotification20TestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsLegacyNotification20TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLegacyNotification20TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsLegacyNotification27TestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsLegacyNotification27TestCases
index d12b556..cc80052 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsLegacyNotification27TestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsLegacyNotification27TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLegacyNotification27TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsLegacyNotification28TestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsLegacyNotification28TestCases
index d1319da..baee006 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsLegacyNotification28TestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsLegacyNotification28TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLegacyNotification28TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsLegacyNotification29TestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsLegacyNotification29TestCases
index a5089e4..b54d3fa 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsLegacyNotification29TestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsLegacyNotification29TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLegacyNotification29TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreApiEvolutionTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreApiEvolutionTestCases
index 08da446..583f746 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreApiEvolutionTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreApiEvolutionTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreApiEvolutionTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreFileIOTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreFileIOTestCases
index 0a2b1cd..9084c6d 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreFileIOTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreFileIOTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreFileIOTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreJsr166TestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreJsr166TestCases
index b5d5035..b861672 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreJsr166TestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreJsr166TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreJsr166TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreLegacy22TestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreLegacy22TestCases
index e6872ed..837a9a7 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreLegacy22TestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreLegacy22TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreLegacy22TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreOjTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreOjTestCases
index e996e52..3a1c04b 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreOjTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreOjTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreOjTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreOkHttpTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreOkHttpTestCases
index 2ca2c8d..702e569 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreOkHttpTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreOkHttpTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreOkHttpTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreTestCases
index 315bd08..f70a4c0 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsLibcoreTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
@@ -28,7 +29,7 @@
         target_module='CtsLibcoreTestCases',
         target_plan=None,
         bundle='arm',
-        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), '/usr/local/autotest/cros/scripts/reorder-services-moblab.sh wifi'],
+        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), 'android-sh -c \'dumpsys wifi transports -eth\''],
         retry_manual_tests=True,
         use_jdk9=True,
         warn_on_test_retry=False,
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreWycheproofBCTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreWycheproofBCTestCases
index f77a861..3c7c04c 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreWycheproofBCTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreWycheproofBCTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreWycheproofBCTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreWycheproofConscryptTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreWycheproofConscryptTestCases
index e27445f..37cf61a 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreWycheproofConscryptTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsLibcoreWycheproofConscryptTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreWycheproofConscryptTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsLiblogTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsLiblogTestCases
index 0bf37bd..12208e4 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsLiblogTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsLiblogTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLiblogTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsLocationCoarseTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsLocationCoarseTestCases
index b95e651..2ba60e3 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsLocationCoarseTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsLocationCoarseTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLocationCoarseTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsLocationFineTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsLocationFineTestCases
index 5cb0601..ea7231d 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsLocationFineTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsLocationFineTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLocationFineTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsLocationGnssTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsLocationGnssTestCases
index 3b2b968..b17cbf9 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsLocationGnssTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsLocationGnssTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLocationGnssTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsLocationNoneTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsLocationNoneTestCases
index e609701..dcf65d8 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsLocationNoneTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsLocationNoneTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLocationNoneTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsLocationPrivilegedTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsLocationPrivilegedTestCases
index 894c85b..b0a5936 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsLocationPrivilegedTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsLocationPrivilegedTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLocationPrivilegedTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsLogdTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsLogdTestCases
index 84a57ba..42bbab9 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsLogdTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsLogdTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLogdTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsMatchFlagTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsMatchFlagTestCases
index 8b20e46..6678197 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsMatchFlagTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsMatchFlagTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMatchFlagTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsMediaBitstreamsTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsMediaBitstreamsTestCases
index 8c07bf8..5f56cbe 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsMediaBitstreamsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsMediaBitstreamsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMediaBitstreamsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsMediaHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsMediaHostTestCases
index b40d0f5..0f8dbeb 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsMediaHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsMediaHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMediaHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsMediaParserTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsMediaParserTestCases
index 108f37a..d256b85 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsMediaParserTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsMediaParserTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMediaParserTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsMediaPerformanceClassTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsMediaPerformanceClassTestCases
new file mode 100644
index 0000000..1bda98d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsMediaPerformanceClassTestCases
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.arm.CtsMediaPerformanceClassTestCases'
+ATTRIBUTES = 'suite:cts'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaPerformanceClassTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='arm.CtsMediaPerformanceClassTestCases',
+        test_name='cheets_CTS_R.arm.CtsMediaPerformanceClassTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaPerformanceClassTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaPerformanceClassTestCases',
+        target_plan=None,
+        bundle='arm',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsMediaStressTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsMediaStressTestCases
index 2bb5c1d..9c72d3b 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsMediaStressTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsMediaStressTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'LONG'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 70
 DOC = 'Run module CtsMediaStressTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsMediaStressTestCases.camera.ctshardware b/server/site_tests/cheets_CTS_R/control.arm.CtsMediaStressTestCases.camera.ctshardware
new file mode 100644
index 0000000..b767488
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsMediaStressTestCases.camera.ctshardware
@@ -0,0 +1,38 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.arm.CtsMediaStressTestCases.camera.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
+DOC = 'Run module CtsMediaStressTestCases.camera of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='arm.CtsMediaStressTestCases.camera.ctshardware',
+        test_name='cheets_CTS_R.arm.CtsMediaStressTestCases.camera.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaStressTestCases android.mediastress.cts.MediaRecorderStressTest'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaStressTestCases.camera',
+        target_plan=None,
+        bundle='arm',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsMediaTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsMediaTestCases
index 54d7a48..56c6a0b 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsMediaTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsMediaTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'LONG'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 70
 DOC = 'Run module CtsMediaTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsMediaTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.arm.CtsMediaTestCases.ctshardware
new file mode 100644
index 0000000..20c4058
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsMediaTestCases.ctshardware
@@ -0,0 +1,38 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.arm.CtsMediaTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, noloopback'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 70
+DOC = 'Run module CtsMediaTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='arm.CtsMediaTestCases.ctshardware',
+        test_name='cheets_CTS_R.arm.CtsMediaTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaTestCases',
+        target_plan=None,
+        bundle='arm',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=36000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsMediaV2TestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsMediaV2TestCases
index 683ccce..02cdec3 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsMediaV2TestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsMediaV2TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMediaV2TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsMidiTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsMidiTestCases
index 2196924..eff8a5d 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsMidiTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsMidiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMidiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsMimeMapTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsMimeMapTestCases
index bfdd0e9..43e4711 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsMimeMapTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsMimeMapTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMimeMapTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsMockingDebuggableTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsMockingDebuggableTestCases
index 187a024..b020d2d 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsMockingDebuggableTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsMockingDebuggableTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMockingDebuggableTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsMockingTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsMockingTestCases
index 93f9377..5986649 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsMockingTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsMockingTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMockingTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsMonkeyTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsMonkeyTestCases
index ae45877..27c8949 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsMonkeyTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsMonkeyTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMonkeyTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsMultiUserHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsMultiUserHostTestCases
index e7a2543..b1fc12e 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsMultiUserHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsMultiUserHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMultiUserHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsMultiUserTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsMultiUserTestCases
index bb00a80..e87870b 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsMultiUserTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsMultiUserTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMultiUserTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNNAPIBenchmarkTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNNAPIBenchmarkTestCases
index bc931e4..f8dab09 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNNAPIBenchmarkTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNNAPIBenchmarkTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNNAPIBenchmarkTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNNAPITestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNNAPITestCases
index dab0236..144c0da 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNNAPITestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNNAPITestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNNAPITestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNativeEncryptionTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNativeEncryptionTestCases
index 87edf3a..8165db1 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNativeEncryptionTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNativeEncryptionTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNativeEncryptionTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNativeHardwareTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNativeHardwareTestCases
index 38175fb1..1345716 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNativeHardwareTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNativeHardwareTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNativeHardwareTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNativeMediaAAudioTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNativeMediaAAudioTestCases
index 6ad2090..79a0e7e 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNativeMediaAAudioTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNativeMediaAAudioTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNativeMediaAAudioTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNativeMediaAAudioTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.arm.CtsNativeMediaAAudioTestCases.ctshardware
new file mode 100644
index 0000000..5561646
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNativeMediaAAudioTestCases.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.arm.CtsNativeMediaAAudioTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNativeMediaAAudioTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='arm.CtsNativeMediaAAudioTestCases.ctshardware',
+        test_name='cheets_CTS_R.arm.CtsNativeMediaAAudioTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNativeMediaAAudioTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsNativeMediaAAudioTestCases',
+        target_plan=None,
+        bundle='arm',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNativeMediaMetricsTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNativeMediaMetricsTestCases
index ec087c3..818900e 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNativeMediaMetricsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNativeMediaMetricsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNativeMediaMetricsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNativeMediaSlTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNativeMediaSlTestCases
index ffe7b2c..c4d5b22 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNativeMediaSlTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNativeMediaSlTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNativeMediaSlTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNativeMediaXaTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNativeMediaXaTestCases
index da36f97..9a4fd2f 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNativeMediaXaTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNativeMediaXaTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNativeMediaXaTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNativeMidiTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNativeMidiTestCases
index 6f9661e..f52416a 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNativeMidiTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNativeMidiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNativeMidiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNativeNetDnsTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNativeNetDnsTestCases
index b49647c..d9920d6 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNativeNetDnsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNativeNetDnsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNativeNetDnsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNativeNetTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNativeNetTestCases
index ad298c8..025573d 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNativeNetTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNativeNetTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNativeNetTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNdefTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNdefTestCases
index 9f6137d..a446151 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNdefTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNdefTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNdefTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNdkBinderTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNdkBinderTestCases
index 71cff86..77feefc 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNdkBinderTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNdkBinderTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNdkBinderTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNetApi23TestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNetApi23TestCases
index 3a4b4fc..c310669 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNetApi23TestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNetApi23TestCases
@@ -12,6 +12,8 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
 DOC = 'Run module CtsNetApi23TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
@@ -27,6 +29,7 @@
         target_module='CtsNetApi23TestCases',
         target_plan=None,
         bundle='arm',
+        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), 'android-sh -c \'dumpsys wifi transports -eth\''],
         retry_manual_tests=True,
         use_jdk9=True,
         warn_on_test_retry=False,
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigAttributeTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigAttributeTestCases
index 1e49579..ed1e32f 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigAttributeTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigAttributeTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigAttributeTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigBasicDebugDisabledTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigBasicDebugDisabledTestCases
index 8a03ae2..ed9a7e1 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigBasicDebugDisabledTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigBasicDebugDisabledTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigBasicDebugDisabledTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigBasicDebugEnabledTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigBasicDebugEnabledTestCases
index 00dc096..625c85d 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigBasicDebugEnabledTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigBasicDebugEnabledTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigBasicDebugEnabledTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigBasicDomainConfigTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigBasicDomainConfigTestCases
index 028adcf..0a18e25 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigBasicDomainConfigTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigBasicDomainConfigTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigBasicDomainConfigTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigCleartextTrafficTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigCleartextTrafficTestCases
index 304682e..dd436d5 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigCleartextTrafficTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigCleartextTrafficTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigCleartextTrafficTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigDownloadManagerTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigDownloadManagerTestCases
index af3db46..26bf4f9 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigDownloadManagerTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigDownloadManagerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigDownloadManagerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigInvalidPinTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigInvalidPinTestCases
index d8ded6d..e32c7ca 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigInvalidPinTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigInvalidPinTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigInvalidPinTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigNestedDomainConfigTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigNestedDomainConfigTestCases
index b7010e0..012e249 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigNestedDomainConfigTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigNestedDomainConfigTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigNestedDomainConfigTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigPrePCleartextTrafficTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigPrePCleartextTrafficTestCases
index cecdafd..369872e 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigPrePCleartextTrafficTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigPrePCleartextTrafficTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigPrePCleartextTrafficTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigResourcesSrcTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigResourcesSrcTestCases
index 0b24df5..7212fb8 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigResourcesSrcTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecConfigResourcesSrcTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigResourcesSrcTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecPolicyUsesCleartextTrafficFalseTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecPolicyUsesCleartextTrafficFalseTestCases
index 68885ca..97b2276 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecPolicyUsesCleartextTrafficFalseTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecPolicyUsesCleartextTrafficFalseTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecPolicyUsesCleartextTrafficFalseTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecPolicyUsesCleartextTrafficTrueTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecPolicyUsesCleartextTrafficTrueTestCases
index 2a3162a..ef42bc6 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecPolicyUsesCleartextTrafficTrueTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecPolicyUsesCleartextTrafficTrueTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecPolicyUsesCleartextTrafficTrueTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases
index 996deb6..6e65f76 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNetTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNetTestCases
index 2eb10d9..b9b5db4 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNetTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNetTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsNetTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
@@ -28,7 +29,7 @@
         target_module='CtsNetTestCases',
         target_plan=None,
         bundle='arm',
-        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), '/usr/local/autotest/cros/scripts/reorder-services-moblab.sh wifi'],
+        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), 'android-sh -c \'dumpsys wifi transports -eth\''],
         retry_manual_tests=True,
         use_jdk9=True,
         warn_on_test_retry=False,
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNetTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.arm.CtsNetTestCases.ctshardware
new file mode 100644
index 0000000..49947c6
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNetTestCases.ctshardware
@@ -0,0 +1,38 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.arm.CtsNetTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
+DOC = 'Run module CtsNetTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='arm.CtsNetTestCases.ctshardware',
+        test_name='cheets_CTS_R.arm.CtsNetTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNetTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsNetTestCases',
+        target_plan=None,
+        bundle='arm',
+        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), 'android-sh -c \'dumpsys wifi transports -eth\''],
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNetTestCasesInternetPermission b/server/site_tests/cheets_CTS_R/control.arm.CtsNetTestCasesInternetPermission
index cf95eb9..68ffac1 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNetTestCasesInternetPermission
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNetTestCasesInternetPermission
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetTestCasesInternetPermission of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNetTestCasesLegacyApi22 b/server/site_tests/cheets_CTS_R/control.arm.CtsNetTestCasesLegacyApi22
index bdfd887..9495b17 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNetTestCasesLegacyApi22
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNetTestCasesLegacyApi22
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetTestCasesLegacyApi22 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNetTestCasesLegacyPermission22 b/server/site_tests/cheets_CTS_R/control.arm.CtsNetTestCasesLegacyPermission22
index 9ac8696..66ddbba 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNetTestCasesLegacyPermission22
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNetTestCasesLegacyPermission22
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetTestCasesLegacyPermission22 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNetTestCasesUpdateStatsPermission b/server/site_tests/cheets_CTS_R/control.arm.CtsNetTestCasesUpdateStatsPermission
index a9b8dfc..2373519 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNetTestCasesUpdateStatsPermission
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNetTestCasesUpdateStatsPermission
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetTestCasesUpdateStatsPermission of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNfcTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNfcTestCases
index 173a6a4..9777521 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNfcTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNfcTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNfcTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNoPermissionTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsNoPermissionTestCases
index 2d06639..498f02b 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNoPermissionTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNoPermissionTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNoPermissionTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsNoPermissionTestCases25 b/server/site_tests/cheets_CTS_R/control.arm.CtsNoPermissionTestCases25
index 5ec710d..061b73b 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsNoPermissionTestCases25
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsNoPermissionTestCases25
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNoPermissionTestCases25 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsOmapiTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsOmapiTestCases
index 7f2bf40..8ea1950 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsOmapiTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsOmapiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsOmapiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsOpenGLTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsOpenGLTestCases
index 92d1509..1b2ec68 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsOpenGLTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsOpenGLTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsOpenGLTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsOpenGlPerf2TestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsOpenGlPerf2TestCases
index ac2a62e..594459e 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsOpenGlPerf2TestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsOpenGlPerf2TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsOpenGlPerf2TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsOpenGlPerfTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsOpenGlPerfTestCases
index 4ccd9d9..cd8bc28 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsOpenGlPerfTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsOpenGlPerfTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsOpenGlPerfTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsOsHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsOsHostTestCases
index 7ed3ee8..b4a2455 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsOsHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsOsHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsOsHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsOsTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsOsTestCases
index 42d77cd..d6f379e 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsOsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsOsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsOsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsPackageInstallAppOpDefaultTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsPackageInstallAppOpDefaultTestCases
index d0e9eb2..82b2a8e 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsPackageInstallAppOpDefaultTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsPackageInstallAppOpDefaultTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPackageInstallAppOpDefaultTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsPackageInstallAppOpDeniedTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsPackageInstallAppOpDeniedTestCases
index 9d1c532..eb2cc19 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsPackageInstallAppOpDeniedTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsPackageInstallAppOpDeniedTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPackageInstallAppOpDeniedTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsPackageInstallTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsPackageInstallTestCases
index 9644692..f2e75f3 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsPackageInstallTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsPackageInstallTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPackageInstallTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsPackageInstallerTapjackingTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsPackageInstallerTapjackingTestCases
index 46d4ed9..035b236 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsPackageInstallerTapjackingTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsPackageInstallerTapjackingTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPackageInstallerTapjackingTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsPackageUninstallTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsPackageUninstallTestCases
index dacfb8a..9912246 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsPackageUninstallTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsPackageUninstallTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPackageUninstallTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsPackageWatchdogTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsPackageWatchdogTestCases
index be6877d..8d8c996 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsPackageWatchdogTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsPackageWatchdogTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPackageWatchdogTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsPdfTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsPdfTestCases
index 722e896..b69cfad 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsPdfTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsPdfTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPdfTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsPerfettoTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsPerfettoTestCases
index 5eb5533..049824e 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsPerfettoTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsPerfettoTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPerfettoTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsPerfettoTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.arm.CtsPerfettoTestCases.ctshardware
new file mode 100644
index 0000000..469e03f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsPerfettoTestCases.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.arm.CtsPerfettoTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPerfettoTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='arm.CtsPerfettoTestCases.ctshardware',
+        test_name='cheets_CTS_R.arm.CtsPerfettoTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPerfettoTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsPerfettoTestCases',
+        target_plan=None,
+        bundle='arm',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsPermission2TestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsPermission2TestCases
index 0b134a9..3b26c43 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsPermission2TestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsPermission2TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPermission2TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsPermission3TestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsPermission3TestCases
index b6976c4..4ae21a1 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsPermission3TestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsPermission3TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPermission3TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsPermissionTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsPermissionTestCases
index 4a1970f..f4e3bb6 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsPermissionTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsPermissionTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPermissionTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsPermissionTestCases.camera.ctshardware b/server/site_tests/cheets_CTS_R/control.arm.CtsPermissionTestCases.camera.ctshardware
new file mode 100644
index 0000000..7babe2e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsPermissionTestCases.camera.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.arm.CtsPermissionTestCases.camera.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPermissionTestCases.camera of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='arm.CtsPermissionTestCases.camera.ctshardware',
+        test_name='cheets_CTS_R.arm.CtsPermissionTestCases.camera.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPermissionTestCases android.permission.cts.CameraPermissionTest', '--include-filter', 'CtsPermissionTestCases android.permission.cts.Camera2PermissionTest'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsPermissionTestCases.camera',
+        target_plan=None,
+        bundle='arm',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsPermissionTestCasesSdk28 b/server/site_tests/cheets_CTS_R/control.arm.CtsPermissionTestCasesSdk28
index 436827a..f849958 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsPermissionTestCasesSdk28
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsPermissionTestCasesSdk28
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPermissionTestCasesSdk28 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsPermissionTestCasesTelephony b/server/site_tests/cheets_CTS_R/control.arm.CtsPermissionTestCasesTelephony
index 06e997d..598f8c0 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsPermissionTestCasesTelephony
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsPermissionTestCasesTelephony
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPermissionTestCasesTelephony of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsPreferenceTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsPreferenceTestCases
index 72b72ce..1c005c1 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsPreferenceTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsPreferenceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPreferenceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsPrintTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsPrintTestCases
index f18fae4..5804414 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsPrintTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsPrintTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPrintTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
@@ -20,6 +21,7 @@
         'cheets_CTS_R',
         hosts=host_list,
         iterations=1,
+        use_helpers=True,
         tag='arm.CtsPrintTestCases',
         test_name='cheets_CTS_R.arm.CtsPrintTestCases',
         run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsPrintTestCases'],
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsProtoTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsProtoTestCases
index 07896ba..8298172 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsProtoTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsProtoTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsProtoTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsProviderTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsProviderTestCases
index 6c37ae3..a143f7b 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsProviderTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsProviderTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsProviderTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsProviderUiTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsProviderUiTestCases
index c23f2d8..5906364 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsProviderUiTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsProviderUiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsProviderUiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsQuickAccessWalletTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsQuickAccessWalletTestCases
index 4286592..15b6197 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsQuickAccessWalletTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsQuickAccessWalletTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsQuickAccessWalletTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsRenderscriptLegacyTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsRenderscriptLegacyTestCases
index 3af537c..d17e22d 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsRenderscriptLegacyTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsRenderscriptLegacyTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsRenderscriptLegacyTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsRenderscriptTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsRenderscriptTestCases
index b28a855..79393ae 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsRenderscriptTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsRenderscriptTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsRenderscriptTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsResolverServiceTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsResolverServiceTestCases
index 68780a0..6bc679c 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsResolverServiceTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsResolverServiceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsResolverServiceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsResourcesLoaderTests b/server/site_tests/cheets_CTS_R/control.arm.CtsResourcesLoaderTests
index 907eb59..9559ec2 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsResourcesLoaderTests
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsResourcesLoaderTests
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsResourcesLoaderTests of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsRoleTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsRoleTestCases
index 36c757e..95636aa 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsRoleTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsRoleTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsRoleTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsRollbackManagerHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsRollbackManagerHostTestCases
index eef8b57..3f12ddc 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsRollbackManagerHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsRollbackManagerHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsRollbackManagerHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsRsBlasTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsRsBlasTestCases
index a76dfb1..8b2bbb8 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsRsBlasTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsRsBlasTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsRsBlasTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsRsCppTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsRsCppTestCases
index c6df03b..555487f 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsRsCppTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsRsCppTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsRsCppTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSampleDeviceTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSampleDeviceTestCases
index 19d9026..d03daa3 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSampleDeviceTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSampleDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSampleDeviceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSampleHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSampleHostTestCases
index 81cdac4..648bac4 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSampleHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSampleHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSampleHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSaxTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSaxTestCases
index 56f8485..f870833 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSaxTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSaxTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSaxTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsScopedStorageHostTest b/server/site_tests/cheets_CTS_R/control.arm.CtsScopedStorageHostTest
index 1e6077a..7b3b7c7 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsScopedStorageHostTest
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsScopedStorageHostTest
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsScopedStorageHostTest of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSdkExtensionsTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSdkExtensionsTestCases
index 4a66465..bc2f5d2 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSdkExtensionsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSdkExtensionsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSdkExtensionsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSeccompHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSeccompHostTestCases
index 1ccfb80..09063d3 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSeccompHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSeccompHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSeccompHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSecureElementAccessControlTestCases1 b/server/site_tests/cheets_CTS_R/control.arm.CtsSecureElementAccessControlTestCases1
index 60b35ac..8f1cf7d 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSecureElementAccessControlTestCases1
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSecureElementAccessControlTestCases1
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSecureElementAccessControlTestCases1 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSecureElementAccessControlTestCases2 b/server/site_tests/cheets_CTS_R/control.arm.CtsSecureElementAccessControlTestCases2
index a8c1101..733480e 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSecureElementAccessControlTestCases2
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSecureElementAccessControlTestCases2
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSecureElementAccessControlTestCases2 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSecureElementAccessControlTestCases3 b/server/site_tests/cheets_CTS_R/control.arm.CtsSecureElementAccessControlTestCases3
index 927073e..80dab14 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSecureElementAccessControlTestCases3
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSecureElementAccessControlTestCases3
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSecureElementAccessControlTestCases3 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSecureFrpInstallTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSecureFrpInstallTestCases
index b5b6fec..8bf6c15 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSecureFrpInstallTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSecureFrpInstallTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSecureFrpInstallTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSecurityBulletinHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSecurityBulletinHostTestCases
index c32642b..0bf96f0 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSecurityBulletinHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSecurityBulletinHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSecurityBulletinHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSecurityHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSecurityHostTestCases
index 6022994..40c9aad 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSecurityHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSecurityHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSecurityHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSecurityTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSecurityTestCases
index 9ea5968..febef28 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSecurityTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSecurityTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'LONG'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 70
 DOC = 'Run module CtsSecurityTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSelinuxEphemeralTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSelinuxEphemeralTestCases
index 6154fe6..1778610 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSelinuxEphemeralTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSelinuxEphemeralTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSelinuxEphemeralTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSelinuxTargetSdk25TestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSelinuxTargetSdk25TestCases
index 123454b..e1186ee 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSelinuxTargetSdk25TestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSelinuxTargetSdk25TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSelinuxTargetSdk25TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSelinuxTargetSdk27TestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSelinuxTargetSdk27TestCases
index d0d1ddd..9a5590c 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSelinuxTargetSdk27TestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSelinuxTargetSdk27TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSelinuxTargetSdk27TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSelinuxTargetSdk28TestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSelinuxTargetSdk28TestCases
index d05c3b9..f9e3d03 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSelinuxTargetSdk28TestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSelinuxTargetSdk28TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSelinuxTargetSdk28TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSelinuxTargetSdk29TestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSelinuxTargetSdk29TestCases
index 578572d..eaf906f 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSelinuxTargetSdk29TestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSelinuxTargetSdk29TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSelinuxTargetSdk29TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSelinuxTargetSdkCurrentTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSelinuxTargetSdkCurrentTestCases
index 2ab5b05..84dc56b 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSelinuxTargetSdkCurrentTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSelinuxTargetSdkCurrentTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSelinuxTargetSdkCurrentTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSensorTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSensorTestCases
index 5e7c4d1..7b8082d 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSensorTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSensorTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSensorTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSensorTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.arm.CtsSensorTestCases.ctshardware
new file mode 100644
index 0000000..1be1469
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSensorTestCases.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.arm.CtsSensorTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSensorTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='arm.CtsSensorTestCases.ctshardware',
+        test_name='cheets_CTS_R.arm.CtsSensorTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSensorTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSensorTestCases',
+        target_plan=None,
+        bundle='arm',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSettingsHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSettingsHostTestCases
index 008b915..756fd2e 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSettingsHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSettingsHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSettingsHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSettingsTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSettingsTestCases
new file mode 100644
index 0000000..10ea529
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSettingsTestCases
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.arm.CtsSettingsTestCases'
+ATTRIBUTES = 'suite:cts'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSettingsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='arm.CtsSettingsTestCases',
+        test_name='cheets_CTS_R.arm.CtsSettingsTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSettingsTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSettingsTestCases',
+        target_plan=None,
+        bundle='arm',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSharedLibsApiSignatureTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSharedLibsApiSignatureTestCases
index d6cfbfb..81f7e80 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSharedLibsApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSharedLibsApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSharedLibsApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSharesheetTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSharesheetTestCases
index 6335fb2..b942eac 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSharesheetTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSharesheetTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSharesheetTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutHostTestCases
index 0e427ff..0e06c6f 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsShortcutHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerLauncher1 b/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerLauncher1
index b267f3c..26ce6c1 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerLauncher1
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerLauncher1
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsShortcutManagerLauncher1 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerLauncher2 b/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerLauncher2
index 3db4778..85ed1cd 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerLauncher2
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerLauncher2
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsShortcutManagerLauncher2 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerLauncher3 b/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerLauncher3
index df4c5cb..7f57e3a 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerLauncher3
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerLauncher3
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsShortcutManagerLauncher3 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerLauncher4 b/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerLauncher4
index 02bd9b3..a18746f 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerLauncher4
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerLauncher4
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsShortcutManagerLauncher4 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerPackage1 b/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerPackage1
index 372f713..cb4b99d 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerPackage1
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerPackage1
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsShortcutManagerPackage1 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerPackage2 b/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerPackage2
index 1085fe3..61cf998 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerPackage2
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerPackage2
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsShortcutManagerPackage2 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerPackage3 b/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerPackage3
index 975e840..aa58bbe 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerPackage3
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerPackage3
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsShortcutManagerPackage3 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerPackage4 b/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerPackage4
index 647b26e..bc9cb5c 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerPackage4
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerPackage4
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsShortcutManagerPackage4 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerTestCases
index a8e284a..64b797c 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsShortcutManagerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerThrottlingTest b/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerThrottlingTest
index f6509af..fa74db1 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerThrottlingTest
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsShortcutManagerThrottlingTest
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsShortcutManagerThrottlingTest of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSignedConfigHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSignedConfigHostTestCases
index f103a52..ffb8c74 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSignedConfigHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSignedConfigHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSignedConfigHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSimRestrictedApisTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSimRestrictedApisTestCases
index ebef01d..6ac078b 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSimRestrictedApisTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSimRestrictedApisTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSimRestrictedApisTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSimpleCpuTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSimpleCpuTestCases
index c1eab7c..f0089a4 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSimpleCpuTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSimpleCpuTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSimpleCpuTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSimpleperfTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSimpleperfTestCases
index c12434b..e1fee4b 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSimpleperfTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSimpleperfTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSimpleperfTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSkQPTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSkQPTestCases
index 94e3791..a1dfc8f 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSkQPTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSkQPTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSkQPTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSliceTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSliceTestCases
index a68931c..b6a9d9b 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSliceTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSliceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSliceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSoundTriggerTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSoundTriggerTestCases
index 5060d7b..31f5ce8 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSoundTriggerTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSoundTriggerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSoundTriggerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSpeechTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSpeechTestCases
index b428268..73e4d7e 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSpeechTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSpeechTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSpeechTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsStagedInstallHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsStagedInstallHostTestCases
index 23787b2..675ca31 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsStagedInstallHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsStagedInstallHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsStagedInstallHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsStatsdHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsStatsdHostTestCases
index b30e2e6..5c24bb3 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsStatsdHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsStatsdHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsStatsdHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
@@ -28,9 +29,10 @@
         target_module='CtsStatsdHostTestCases',
         target_plan=None,
         bundle='arm',
-        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), '/usr/local/autotest/cros/scripts/reorder-services-moblab.sh wifi'],
+        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), 'android-sh -c \'dumpsys wifi transports -eth\''],
         retry_manual_tests=True,
         use_jdk9=True,
+        use_old_adb=True,
         warn_on_test_retry=False,
         timeout=7200)
 
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsStrictJavaPackagesTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsStrictJavaPackagesTestCases
index ba1f2aa..d836ed8 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsStrictJavaPackagesTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsStrictJavaPackagesTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsStrictJavaPackagesTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSuspendAppsPermissionTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSuspendAppsPermissionTestCases
index 2a56e1d..7c7b26b 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSuspendAppsPermissionTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSuspendAppsPermissionTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSuspendAppsPermissionTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSuspendAppsTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSuspendAppsTestCases
index a3fbc14..dd573a0 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSuspendAppsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSuspendAppsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSuspendAppsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSustainedPerformanceHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSustainedPerformanceHostTestCases
index ad89fd2..06c47bf 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSustainedPerformanceHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSustainedPerformanceHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSustainedPerformanceHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSustainedPerformanceHostTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.arm.CtsSustainedPerformanceHostTestCases.ctshardware
new file mode 100644
index 0000000..b688a9c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSustainedPerformanceHostTestCases.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.arm.CtsSustainedPerformanceHostTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSustainedPerformanceHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='arm.CtsSustainedPerformanceHostTestCases.ctshardware',
+        test_name='cheets_CTS_R.arm.CtsSustainedPerformanceHostTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSustainedPerformanceHostTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSustainedPerformanceHostTestCases',
+        target_plan=None,
+        bundle='arm',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSyncAccountAccessOtherCertTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSyncAccountAccessOtherCertTestCases
index c7763e3..c4c247a 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSyncAccountAccessOtherCertTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSyncAccountAccessOtherCertTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSyncAccountAccessOtherCertTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSyncContentHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSyncContentHostTestCases
index 1a99e11..b4db458 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSyncContentHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSyncContentHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSyncContentHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSyncManagerTestsCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSyncManagerTestsCases
index 5413f40..cfdf5ef 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSyncManagerTestsCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSyncManagerTestsCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSyncManagerTestsCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSystemApiAnnotationTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSystemApiAnnotationTestCases
index e5ab564..afeda13 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSystemApiAnnotationTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSystemApiAnnotationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSystemApiAnnotationTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSystemApiSignatureTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSystemApiSignatureTestCases
index fd4ead2..764520b 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSystemApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSystemApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSystemApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSystemIntentTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSystemIntentTestCases
index 6481b68..d4dcc41 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSystemIntentTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSystemIntentTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSystemIntentTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSystemUiHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSystemUiHostTestCases
index 90b2e97..1b0dccd 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSystemUiHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSystemUiHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSystemUiHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsSystemUiTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsSystemUiTestCases
index f21f784..6a00ab6 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsSystemUiTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsSystemUiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSystemUiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsTaggingHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsTaggingHostTestCases
index 60a3e17..e4e890d 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsTaggingHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsTaggingHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTaggingHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsTelecomTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsTelecomTestCases
index 337b823..4b8ddec 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsTelecomTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsTelecomTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelecomTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsTelecomTestCases2 b/server/site_tests/cheets_CTS_R/control.arm.CtsTelecomTestCases2
index 14d9132..0e8ac22 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsTelecomTestCases2
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsTelecomTestCases2
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelecomTestCases2 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsTelecomTestCases3 b/server/site_tests/cheets_CTS_R/control.arm.CtsTelecomTestCases3
index 703392e..4156b66 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsTelecomTestCases3
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsTelecomTestCases3
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelecomTestCases3 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsTelephony2TestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsTelephony2TestCases
index ea40186..bfc47dd 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsTelephony2TestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsTelephony2TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelephony2TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsTelephony3TestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsTelephony3TestCases
index cf22763..566f484 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsTelephony3TestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsTelephony3TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelephony3TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsTelephonyHostCases b/server/site_tests/cheets_CTS_R/control.arm.CtsTelephonyHostCases
index 10fa8fd..5449ec4 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsTelephonyHostCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsTelephonyHostCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelephonyHostCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsTelephonyProviderHostCases b/server/site_tests/cheets_CTS_R/control.arm.CtsTelephonyProviderHostCases
index cadb8b4..ad4f193 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsTelephonyProviderHostCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsTelephonyProviderHostCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelephonyProviderHostCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsTelephonyProviderTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsTelephonyProviderTestCases
index f1bef99..69a0ee8 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsTelephonyProviderTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsTelephonyProviderTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelephonyProviderTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsTelephonySdk28TestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsTelephonySdk28TestCases
index 0eee39f..6f61c06 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsTelephonySdk28TestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsTelephonySdk28TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelephonySdk28TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsTelephonyTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsTelephonyTestCases
index 0c25a3b..83ab29f 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsTelephonyTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsTelephonyTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelephonyTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsTelephonyTestCasesPermissionReadPhoneState b/server/site_tests/cheets_CTS_R/control.arm.CtsTelephonyTestCasesPermissionReadPhoneState
index 5a09329..fda3aa2 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsTelephonyTestCasesPermissionReadPhoneState
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsTelephonyTestCasesPermissionReadPhoneState
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelephonyTestCasesPermissionReadPhoneState of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsTestHarnessModeTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsTestHarnessModeTestCases
index 24e1ab3..ca3aa7b 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsTestHarnessModeTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsTestHarnessModeTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTestHarnessModeTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsTetheringTest b/server/site_tests/cheets_CTS_R/control.arm.CtsTetheringTest
index 235b607..e86c14f 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsTetheringTest
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsTetheringTest
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTetheringTest of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsTextClassifierTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsTextClassifierTestCases
index d0e75e6..6d7f9ff 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsTextClassifierTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsTextClassifierTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTextClassifierTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsTextTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsTextTestCases
index a6f8a93..ec887ce 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsTextTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsTextTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTextTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsTfliteNnapiDelegateTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsTfliteNnapiDelegateTestCases
index c94b3c6..1fcb1fd 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsTfliteNnapiDelegateTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsTfliteNnapiDelegateTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTfliteNnapiDelegateTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsThemeDeviceTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsThemeDeviceTestCases
index c56a6e6..14570bc 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsThemeDeviceTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsThemeDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsThemeDeviceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsThemeHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsThemeHostTestCases
index 6083a55..466ddd5 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsThemeHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsThemeHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsThemeHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsThermalTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsThermalTestCases
index 40bdfc5..f1bf062 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsThermalTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsThermalTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsThermalTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsToastLegacyTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsToastLegacyTestCases
index a67752d..1ab595c 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsToastLegacyTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsToastLegacyTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsToastLegacyTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsToastTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsToastTestCases
index be38f22..2475a73 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsToastTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsToastTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsToastTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsTransitionTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsTransitionTestCases
index f1e9bb5..dccb382 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsTransitionTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsTransitionTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTransitionTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsTrustedVoiceHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsTrustedVoiceHostTestCases
index bfc7eae..e0d2f02 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsTrustedVoiceHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsTrustedVoiceHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTrustedVoiceHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsTvProviderTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsTvProviderTestCases
index 230d4b5..5733f87 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsTvProviderTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsTvProviderTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTvProviderTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsTvTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsTvTestCases
index 343fa7f..32c0a4a 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsTvTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsTvTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTvTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsUiAutomationTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsUiAutomationTestCases
index 7bf3d54..cd2a1fe 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsUiAutomationTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsUiAutomationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUiAutomationTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsUiRenderingTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsUiRenderingTestCases
index a0593c2..8cd75b7 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsUiRenderingTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsUiRenderingTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUiRenderingTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsUiRenderingTestCases27 b/server/site_tests/cheets_CTS_R/control.arm.CtsUiRenderingTestCases27
index 7309e32..ad29998 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsUiRenderingTestCases27
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsUiRenderingTestCases27
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUiRenderingTestCases27 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsUidIsolationTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsUidIsolationTestCases
index 2044d86..a905283 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsUidIsolationTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsUidIsolationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUidIsolationTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsUsageStatsTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsUsageStatsTestCases
index a92406d..1af4b9c 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsUsageStatsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsUsageStatsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsUsageStatsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
@@ -28,7 +29,7 @@
         target_module='CtsUsageStatsTestCases',
         target_plan=None,
         bundle='arm',
-        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), '/usr/local/autotest/cros/scripts/reorder-services-moblab.sh wifi'],
+        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), 'android-sh -c \'dumpsys wifi transports -eth\''],
         retry_manual_tests=True,
         use_jdk9=True,
         warn_on_test_retry=False,
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsUsageStatsTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.arm.CtsUsageStatsTestCases.ctshardware
new file mode 100644
index 0000000..2623483
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsUsageStatsTestCases.ctshardware
@@ -0,0 +1,38 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.arm.CtsUsageStatsTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
+DOC = 'Run module CtsUsageStatsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='arm.CtsUsageStatsTestCases.ctshardware',
+        test_name='cheets_CTS_R.arm.CtsUsageStatsTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUsageStatsTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsUsageStatsTestCases',
+        target_plan=None,
+        bundle='arm',
+        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), 'android-sh -c \'dumpsys wifi transports -eth\''],
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsUsbManagerTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsUsbManagerTestCases
index 61d7fd1..14c1529 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsUsbManagerTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsUsbManagerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUsbManagerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsUsbTests b/server/site_tests/cheets_CTS_R/control.arm.CtsUsbTests
index be69bb2..c8dfaa5 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsUsbTests
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsUsbTests
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUsbTests of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsUserspaceRebootHostSideTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsUserspaceRebootHostSideTestCases
deleted file mode 100644
index 61d9c3d..0000000
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsUserspaceRebootHostSideTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.arm.CtsUserspaceRebootHostSideTestCases'
-ATTRIBUTES = 'suite:cts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsUserspaceRebootHostSideTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='arm.CtsUserspaceRebootHostSideTestCases',
-        test_name='cheets_CTS_R.arm.CtsUserspaceRebootHostSideTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsUserspaceRebootHostSideTestCases'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsUserspaceRebootHostSideTestCases',
-        target_plan=None,
-        bundle='arm',
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsUsesLibraryHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsUsesLibraryHostTestCases
index 829c67f..6d09154 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsUsesLibraryHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsUsesLibraryHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUsesLibraryHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsUtilTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsUtilTestCases
index 1ad4c61..407b6c4 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsUtilTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsUtilTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUtilTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsVideoTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsVideoTestCases
index 6162e4d..340d289 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsVideoTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsVideoTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsVideoTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsViewInspectorAnnotationProcessorTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsViewInspectorAnnotationProcessorTestCases
index 0a4174c..1c90eaa 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsViewInspectorAnnotationProcessorTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsViewInspectorAnnotationProcessorTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsViewInspectorAnnotationProcessorTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsViewTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsViewTestCases
index 06595a3..4f03eca 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsViewTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsViewTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsViewTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
@@ -30,6 +31,6 @@
         retry_manual_tests=True,
         use_jdk9=True,
         warn_on_test_retry=False,
-        timeout=3600)
+        timeout=9000)
 
 parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsViewTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.arm.CtsViewTestCases.ctshardware
new file mode 100644
index 0000000..6020b7b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsViewTestCases.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.arm.CtsViewTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsViewTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='arm.CtsViewTestCases.ctshardware',
+        test_name='cheets_CTS_R.arm.CtsViewTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsViewTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsViewTestCases',
+        target_plan=None,
+        bundle='arm',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsViewTestCasesSdk28 b/server/site_tests/cheets_CTS_R/control.arm.CtsViewTestCasesSdk28
index be92bb8..1096910 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsViewTestCasesSdk28
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsViewTestCasesSdk28
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsViewTestCasesSdk28 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsVoiceInteractionTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsVoiceInteractionTestCases
index b489330..740e097 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsVoiceInteractionTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsVoiceInteractionTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsVoiceInteractionTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsVoiceSettingsTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsVoiceSettingsTestCases
index 31874dc..9e08e41 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsVoiceSettingsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsVoiceSettingsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsVoiceSettingsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsVrTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsVrTestCases
index 2deb7d4..b65c601 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsVrTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsVrTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsVrTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsWebkitTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsWebkitTestCases
index 8082229..33fd847 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsWebkitTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsWebkitTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWebkitTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsWidgetTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsWidgetTestCases
index 1d7c294..fd7d6e4 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsWidgetTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsWidgetTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWidgetTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsWidgetTestCases29 b/server/site_tests/cheets_CTS_R/control.arm.CtsWidgetTestCases29
index aa0d7aa..19943c4 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsWidgetTestCases29
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsWidgetTestCases29
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWidgetTestCases29 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsWifiBroadcastsHostTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsWifiBroadcastsHostTestCases
index 56cc858..88d7724 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsWifiBroadcastsHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsWifiBroadcastsHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWifiBroadcastsHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsWifiTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsWifiTestCases
index 36980be..e64ea63 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsWifiTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsWifiTestCases
@@ -12,6 +12,8 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
 DOC = 'Run module CtsWifiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
@@ -27,6 +29,7 @@
         target_module='CtsWifiTestCases',
         target_plan=None,
         bundle='arm',
+        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), 'android-sh -c \'dumpsys wifi transports -eth\''],
         retry_manual_tests=True,
         use_jdk9=True,
         warn_on_test_retry=False,
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsWifiTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.arm.CtsWifiTestCases.ctshardware
new file mode 100644
index 0000000..be51f8b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsWifiTestCases.ctshardware
@@ -0,0 +1,38 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.arm.CtsWifiTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
+DOC = 'Run module CtsWifiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='arm.CtsWifiTestCases.ctshardware',
+        test_name='cheets_CTS_R.arm.CtsWifiTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWifiTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWifiTestCases',
+        target_plan=None,
+        bundle='arm',
+        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), 'android-sh -c \'dumpsys wifi transports -eth\''],
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsWindowManagerDeviceTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsWindowManagerDeviceTestCases
index bf582f4..f4aec4a 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsWindowManagerDeviceTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsWindowManagerDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWindowManagerDeviceTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
@@ -20,6 +21,7 @@
         'cheets_CTS_R',
         hosts=host_list,
         iterations=1,
+        max_retry=10,
         tag='arm.CtsWindowManagerDeviceTestCases',
         test_name='cheets_CTS_R.arm.CtsWindowManagerDeviceTestCases',
         run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsWindowManagerDeviceTestCases'],
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsWindowManagerJetpackTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsWindowManagerJetpackTestCases
index 6848ccf..dcb32b6 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsWindowManagerJetpackTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsWindowManagerJetpackTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWindowManagerJetpackTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsWindowManagerSdk25TestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsWindowManagerSdk25TestCases
index dc490a8..4a06a34 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsWindowManagerSdk25TestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsWindowManagerSdk25TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWindowManagerSdk25TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsWindowManagerSdk28TestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsWindowManagerSdk28TestCases
index 2268c3b..5d88934 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsWindowManagerSdk28TestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsWindowManagerSdk28TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWindowManagerSdk28TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsWindowManagerSdk29TestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsWindowManagerSdk29TestCases
index 2ca6ee4..abbb18d 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsWindowManagerSdk29TestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsWindowManagerSdk29TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWindowManagerSdk29TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsWrapNoWrapTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsWrapNoWrapTestCases
index 5e383b5..01dc31b 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsWrapNoWrapTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsWrapNoWrapTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWrapNoWrapTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsWrapWrapDebugMallocDebugTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsWrapWrapDebugMallocDebugTestCases
index 3a87355..2903e85 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsWrapWrapDebugMallocDebugTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsWrapWrapDebugMallocDebugTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWrapWrapDebugMallocDebugTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsWrapWrapDebugTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsWrapWrapDebugTestCases
index 6772846..52ed854 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsWrapWrapDebugTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsWrapWrapDebugTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWrapWrapDebugTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.CtsWrapWrapNoDebugTestCases b/server/site_tests/cheets_CTS_R/control.arm.CtsWrapWrapNoDebugTestCases
index 177bcd2..96f8a2f 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.CtsWrapWrapNoDebugTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.CtsWrapWrapNoDebugTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWrapWrapNoDebugTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.LegacyStorageTest b/server/site_tests/cheets_CTS_R/control.arm.LegacyStorageTest
index 4e5b30d..53194f6 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.LegacyStorageTest
+++ b/server/site_tests/cheets_CTS_R/control.arm.LegacyStorageTest
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module LegacyStorageTest of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.ScopedStorageTest b/server/site_tests/cheets_CTS_R/control.arm.ScopedStorageTest
index 261957f..0db7b18 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.ScopedStorageTest
+++ b/server/site_tests/cheets_CTS_R/control.arm.ScopedStorageTest
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module ScopedStorageTest of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.cts-platform-version-check b/server/site_tests/cheets_CTS_R/control.arm.cts-platform-version-check
deleted file mode 100644
index 929ef3e..0000000
--- a/server/site_tests/cheets_CTS_R/control.arm.cts-platform-version-check
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.arm.cts-platform-version-check'
-ATTRIBUTES = 'suite:cts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module cts-platform-version-check of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='arm.cts-platform-version-check',
-        test_name='cheets_CTS_R.arm.cts-platform-version-check',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'cts-platform-version-check'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='cts-platform-version-check',
-        target_plan=None,
-        bundle='arm',
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.cts-system-all.api b/server/site_tests/cheets_CTS_R/control.arm.cts-system-all.api
deleted file mode 100644
index 708310d..0000000
--- a/server/site_tests/cheets_CTS_R/control.arm.cts-system-all.api
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.arm.cts-system-all.api'
-ATTRIBUTES = 'suite:cts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module cts-system-all.api of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='arm.cts-system-all.api',
-        test_name='cheets_CTS_R.arm.cts-system-all.api',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'cts-system-all.api'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='cts-system-all.api',
-        target_plan=None,
-        bundle='arm',
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.signed-CtsOmapiTestCases b/server/site_tests/cheets_CTS_R/control.arm.signed-CtsOmapiTestCases
index 028a5a1..df48754 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.signed-CtsOmapiTestCases
+++ b/server/site_tests/cheets_CTS_R/control.arm.signed-CtsOmapiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module signed-CtsOmapiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.signed-CtsSecureElementAccessControlTestCases1 b/server/site_tests/cheets_CTS_R/control.arm.signed-CtsSecureElementAccessControlTestCases1
index c9b2bdc..362caaa 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.signed-CtsSecureElementAccessControlTestCases1
+++ b/server/site_tests/cheets_CTS_R/control.arm.signed-CtsSecureElementAccessControlTestCases1
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module signed-CtsSecureElementAccessControlTestCases1 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.signed-CtsSecureElementAccessControlTestCases2 b/server/site_tests/cheets_CTS_R/control.arm.signed-CtsSecureElementAccessControlTestCases2
index 2cbb9b1..8cdd109 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.signed-CtsSecureElementAccessControlTestCases2
+++ b/server/site_tests/cheets_CTS_R/control.arm.signed-CtsSecureElementAccessControlTestCases2
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module signed-CtsSecureElementAccessControlTestCases2 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.signed-CtsSecureElementAccessControlTestCases3 b/server/site_tests/cheets_CTS_R/control.arm.signed-CtsSecureElementAccessControlTestCases3
index c466f16..198c846 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.signed-CtsSecureElementAccessControlTestCases3
+++ b/server/site_tests/cheets_CTS_R/control.arm.signed-CtsSecureElementAccessControlTestCases3
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module signed-CtsSecureElementAccessControlTestCases3 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.tradefed-run-collect-tests-only b/server/site_tests/cheets_CTS_R/control.arm.tradefed-run-collect-tests-only
index e170847..2debb7f 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.tradefed-run-collect-tests-only
+++ b/server/site_tests/cheets_CTS_R/control.arm.tradefed-run-collect-tests-only
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'LENGTHY'
 MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
 PRIORITY = 70
 DOC = 'Run all of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_R/control.arm.tradefed-run-collect-tests-only-hardware b/server/site_tests/cheets_CTS_R/control.arm.tradefed-run-collect-tests-only-hardware
new file mode 100644
index 0000000..b9cc76f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.arm.tradefed-run-collect-tests-only-hardware
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.arm.tradefed-run-collect-tests-only-hardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module tradefed-run-collect-tests-only-hardware of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='arm.tradefed-run-collect-tests-only-hardware',
+        test_name='cheets_CTS_R.arm.tradefed-run-collect-tests-only-hardware',
+        run_template=['run', 'commandAndExit', 'collect-tests-only', '--disable-reboot', '--subplan', 'cts-hardware', '--module-arg', 'CtsMediaTestCases:skip-media-download:true', '--module-arg', 'CtsMediaStressTestCases:skip-media-download:true', '--module-arg', 'CtsMediaBitstreamsTestCases:skip-media-download:true'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='tradefed-run-collect-tests-only-hardware',
+        target_plan='cts-hardware',
+        bundle='arm',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.vm-tests-tf b/server/site_tests/cheets_CTS_R/control.arm.vm-tests-tf
index 5617ce2..4a546b6 100644
--- a/server/site_tests/cheets_CTS_R/control.arm.vm-tests-tf
+++ b/server/site_tests/cheets_CTS_R/control.arm.vm-tests-tf
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module vm-tests-tf of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.arm.waivers b/server/site_tests/cheets_CTS_R/control.arm.waivers
new file mode 100644
index 0000000..3e4b841
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.arm.waivers
@@ -0,0 +1,39 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is not auto-generated. Don't delete it.
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.arm.waivers'
+ATTRIBUTES = 'suite:cts, suite:cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run waived tests of the Android Compatibility Test Suite (CTS) using arm ABI in ARC.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=10,
+        tag='arm.waivers',
+        test_name='cheets_CTS_R.arm.waivers',
+        run_template=['run', 'commandAndExit', 'cts', '--subplan', 'waivers'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='cts-dev',
+        target_plan='waivers',
+        load_waivers=False,
+        bundle='arm',
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        uri='DEV_MOBLAB',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.arm.waivers-collect-tests-only b/server/site_tests/cheets_CTS_R/control.arm.waivers-collect-tests-only
new file mode 100644
index 0000000..d2cdeba
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.arm.waivers-collect-tests-only
@@ -0,0 +1,40 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is not auto-generated. Don't delete it.
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.arm.waivers-collect-tests-only'
+ATTRIBUTES = 'suite:cts, suite:cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
+PRIORITY = 70
+DOC = 'Run waived tests of the Android Compatibility Test Suite (CTS) using arm ABI in ARC.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=0,
+        tag='arm.waivers-collect-tests-only',
+        test_name='cheets_CTS_R.arm.waivers-collect-tests-only',
+        run_template=['run', 'commandAndExit', 'collect-tests-only', '--subplan', 'waivers', '--disable-reboot', '--module-arg', 'CtsMediaTestCases:skip-media-download:true', '--module-arg', 'CtsMediaStressTestCases:skip-media-download:true', '--module-arg', 'CtsMediaBitstreamsTestCases:skip-media-download:true'],
+        retry_template=None,
+        target_module=None,
+        target_plan='waivers',
+        load_waivers=False,
+        bundle='arm',
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        uri='DEV_MOBLAB',
+        use_jdk9=True,
+        timeout=360)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAbiOverrideHost b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAbiOverrideHost
new file mode 100644
index 0000000..7665ae7
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAbiOverrideHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsAbiOverrideHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAbiOverrideHostTestCases, CtsAbiOverrideHostTestCases[instant], CtsAbiOverrideHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsAbiOverrideHost',
+        test_name='cheets_CTS_R.internal.arm.CtsAbiOverrideHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAbiOverrideHostTestCases', '--include-filter', 'CtsAbiOverrideHostTestCases[instant]', '--include-filter', 'CtsAbiOverrideHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAbiOverrideHost',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAcceleration b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAcceleration
new file mode 100644
index 0000000..7f8c236
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAcceleration
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsAcceleration'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild, suite:bvt-perbuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAccelerationTestCases, CtsAccelerationTestCases[instant], CtsAccelerationTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=3,
+        tag='internal.arm.CtsAcceleration',
+        test_name='cheets_CTS_R.internal.arm.CtsAcceleration',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAccelerationTestCases', '--include-filter', 'CtsAccelerationTestCases[instant]', '--include-filter', 'CtsAccelerationTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAcceleration',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAccessibility b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAccessibility
new file mode 100644
index 0000000..33ccbd4
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAccessibility
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsAccessibility'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAccessibilityServiceSdk29TestCases, CtsAccessibilityServiceSdk29TestCases[instant], CtsAccessibilityServiceSdk29TestCases[secondary_user], CtsAccessibilityServiceTestCases, CtsAccessibilityServiceTestCases[instant], CtsAccessibilityTestCases, CtsAccessibilityTestCases[instant], CtsAccessibilityTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsAccessibility',
+        test_name='cheets_CTS_R.internal.arm.CtsAccessibility',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAccessibilityServiceSdk29TestCases', '--include-filter', 'CtsAccessibilityServiceSdk29TestCases[instant]', '--include-filter', 'CtsAccessibilityServiceSdk29TestCases[secondary_user]', '--include-filter', 'CtsAccessibilityServiceTestCases', '--include-filter', 'CtsAccessibilityServiceTestCases[instant]', '--include-filter', 'CtsAccessibilityTestCases', '--include-filter', 'CtsAccessibilityTestCases[instant]', '--include-filter', 'CtsAccessibilityTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAccessibility',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=16200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAccountManager b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAccountManager
new file mode 100644
index 0000000..01c7b87
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAccountManager
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsAccountManager'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAccountManagerTestCases, CtsAccountManagerTestCases[instant], CtsAccountManagerTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsAccountManager',
+        test_name='cheets_CTS_R.internal.arm.CtsAccountManager',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAccountManagerTestCases', '--include-filter', 'CtsAccountManagerTestCases[instant]', '--include-filter', 'CtsAccountManagerTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAccountManager',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAccountsHost b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAccountsHost
new file mode 100644
index 0000000..474c6ac
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAccountsHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsAccountsHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAccountsHostTestCases, CtsAccountsHostTestCases[instant], CtsAccountsHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsAccountsHost',
+        test_name='cheets_CTS_R.internal.arm.CtsAccountsHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAccountsHostTestCases', '--include-filter', 'CtsAccountsHostTestCases[instant]', '--include-filter', 'CtsAccountsHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAccountsHost',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsActivityManagerBackgroundActivity b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsActivityManagerBackgroundActivity
new file mode 100644
index 0000000..675b504
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsActivityManagerBackgroundActivity
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsActivityManagerBackgroundActivity'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsActivityManagerBackgroundActivityTestCases, CtsActivityManagerBackgroundActivityTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsActivityManagerBackgroundActivity',
+        test_name='cheets_CTS_R.internal.arm.CtsActivityManagerBackgroundActivity',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsActivityManagerBackgroundActivityTestCases', '--include-filter', 'CtsActivityManagerBackgroundActivityTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsActivityManagerBackgroundActivity',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAdb b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAdb
new file mode 100644
index 0000000..4d3753b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAdb
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsAdb'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAdbHostTestCases, CtsAdbHostTestCases[secondary_user], CtsAdbManagerHostTestCases, CtsAdbManagerHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsAdb',
+        test_name='cheets_CTS_R.internal.arm.CtsAdb',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAdbHostTestCases', '--include-filter', 'CtsAdbHostTestCases[secondary_user]', '--include-filter', 'CtsAdbManagerHostTestCases', '--include-filter', 'CtsAdbManagerHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAdb',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAdmin b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAdmin
new file mode 100644
index 0000000..fcf552d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAdmin
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsAdmin'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAdminPackageInstallerTestCases, CtsAdminTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsAdmin',
+        test_name='cheets_CTS_R.internal.arm.CtsAdmin',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAdminPackageInstallerTestCases', '--include-filter', 'CtsAdminTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAdmin',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAlarmManager b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAlarmManager
new file mode 100644
index 0000000..0c68ccb
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAlarmManager
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsAlarmManager'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAlarmManagerTestCases, CtsAlarmManagerTestCases[instant], CtsAlarmManagerTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsAlarmManager',
+        test_name='cheets_CTS_R.internal.arm.CtsAlarmManager',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAlarmManagerTestCases', '--include-filter', 'CtsAlarmManagerTestCases[instant]', '--include-filter', 'CtsAlarmManagerTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAlarmManager',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAndroid b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAndroid
new file mode 100644
index 0000000..761efc0
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAndroid
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsAndroid'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAndroidAppTestCases, CtsAndroidAppTestCases[instant], CtsAndroidAppTestCases[secondary_user], CtsAndroidTestBase28ApiSignatureTestCases, CtsAndroidTestBase28ApiSignatureTestCases[instant], CtsAndroidTestBase28ApiSignatureTestCases[secondary_user], CtsAndroidTestBaseCurrentApiSignatureTestCases, CtsAndroidTestBaseCurrentApiSignatureTestCases[instant], CtsAndroidTestBaseCurrentApiSignatureTestCases[secondary_user], CtsAndroidTestMockCurrentApiSignatureTestCases, CtsAndroidTestMockCurrentApiSignatureTestCases[instant], CtsAndroidTestMockCurrentApiSignatureTestCases[secondary_user], CtsAndroidTestRunnerCurrentApiSignatureTestCases, CtsAndroidTestRunnerCurrentApiSignatureTestCases[instant], CtsAndroidTestRunnerCurrentApiSignatureTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsAndroid',
+        test_name='cheets_CTS_R.internal.arm.CtsAndroid',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAndroidAppTestCases', '--include-filter', 'CtsAndroidAppTestCases[instant]', '--include-filter', 'CtsAndroidAppTestCases[secondary_user]', '--include-filter', 'CtsAndroidTestBase28ApiSignatureTestCases', '--include-filter', 'CtsAndroidTestBase28ApiSignatureTestCases[instant]', '--include-filter', 'CtsAndroidTestBase28ApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsAndroidTestBaseCurrentApiSignatureTestCases', '--include-filter', 'CtsAndroidTestBaseCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsAndroidTestBaseCurrentApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsAndroidTestMockCurrentApiSignatureTestCases', '--include-filter', 'CtsAndroidTestMockCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsAndroidTestMockCurrentApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsAndroidTestRunnerCurrentApiSignatureTestCases', '--include-filter', 'CtsAndroidTestRunnerCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsAndroidTestRunnerCurrentApiSignatureTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAndroid',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=28800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAngleIntegrationHost b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAngleIntegrationHost
new file mode 100644
index 0000000..53e0935
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAngleIntegrationHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsAngleIntegrationHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAngleIntegrationHostTestCases, CtsAngleIntegrationHostTestCases[instant], CtsAngleIntegrationHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsAngleIntegrationHost',
+        test_name='cheets_CTS_R.internal.arm.CtsAngleIntegrationHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAngleIntegrationHostTestCases', '--include-filter', 'CtsAngleIntegrationHostTestCases[instant]', '--include-filter', 'CtsAngleIntegrationHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAngleIntegrationHost',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAnimation b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAnimation
new file mode 100644
index 0000000..eca5ec7
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAnimation
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsAnimation'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAnimationTestCases, CtsAnimationTestCases[instant], CtsAnimationTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsAnimation',
+        test_name='cheets_CTS_R.internal.arm.CtsAnimation',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAnimationTestCases', '--include-filter', 'CtsAnimationTestCases[instant]', '--include-filter', 'CtsAnimationTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAnimation',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsApacheHttpLegacy b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsApacheHttpLegacy
new file mode 100644
index 0000000..7028150
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsApacheHttpLegacy
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsApacheHttpLegacy'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsApacheHttpLegacy27ApiSignatureTestCases, CtsApacheHttpLegacy27ApiSignatureTestCases[instant], CtsApacheHttpLegacy27ApiSignatureTestCases[secondary_user], CtsApacheHttpLegacyCurrentApiSignatureTestCases, CtsApacheHttpLegacyCurrentApiSignatureTestCases[instant], CtsApacheHttpLegacyCurrentApiSignatureTestCases[secondary_user], CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases, CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases[instant], CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsApacheHttpLegacy',
+        test_name='cheets_CTS_R.internal.arm.CtsApacheHttpLegacy',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsApacheHttpLegacy27ApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacy27ApiSignatureTestCases[instant]', '--include-filter', 'CtsApacheHttpLegacy27ApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsApacheHttpLegacyCurrentApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacyCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsApacheHttpLegacyCurrentApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases[instant]', '--include-filter', 'CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsApacheHttpLegacy',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=18000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsApex b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsApex
new file mode 100644
index 0000000..aa7f2b0
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsApex
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsApex'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsApexTestCases, CtsApexTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsApex',
+        test_name='cheets_CTS_R.internal.arm.CtsApex',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsApexTestCases', '--include-filter', 'CtsApexTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsApex',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsApp b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsApp
new file mode 100644
index 0000000..4a182ff
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsApp
@@ -0,0 +1,50 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+from autotest_lib.server import utils as server_utils
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsApp'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAppBindingHostTestCases, CtsAppBindingHostTestCases[secondary_user], CtsAppCompatHostTestCases, CtsAppCompatHostTestCases[instant], CtsAppCompatHostTestCases[secondary_user], CtsAppComponentFactoryTestCases, CtsAppComponentFactoryTestCases[instant], CtsAppComponentFactoryTestCases[secondary_user], CtsAppEnumerationTestCases, CtsAppEnumerationTestCases[secondary_user], CtsAppExitTestCases, CtsAppExitTestCases[instant], CtsAppExitTestCases[secondary_user], CtsAppIntegrityDeviceTestCases, CtsAppOpsTestCases, CtsAppOpsTestCases[instant], CtsAppOpsTestCases[secondary_user], CtsAppPredictionServiceTestCases, CtsAppPredictionServiceTestCases[secondary_user], CtsAppSecurityHostTestCases, CtsAppSecurityHostTestCases[secondary_user], CtsAppTestCases, CtsAppTestCases[instant], CtsAppTestCases[secondary_user], CtsAppUsageHostTestCases, CtsAppUsageHostTestCases[instant], CtsAppUsageHostTestCases[secondary_user], CtsAppWidgetTestCases, CtsAppWidgetTestCases[instant], CtsAppWidgetTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+# For local debugging, if your test setup doesn't have servo, REMOVE these
+# two lines.
+args_dict = server_utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run_TS(machine):
+    # REMOVE 'servo_args=servo_args' arg for local debugging if your test
+    # setup doesn't have servo.
+    try:
+        host_list = [hosts.create_host(machine, servo_args=servo_args)]
+    except:
+        # Just ignore any servo setup flakiness.
+        host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        enable_default_apps=True,
+        tag='internal.arm.CtsApp',
+        test_name='cheets_CTS_R.internal.arm.CtsApp',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAppBindingHostTestCases', '--include-filter', 'CtsAppBindingHostTestCases[secondary_user]', '--include-filter', 'CtsAppCompatHostTestCases', '--include-filter', 'CtsAppCompatHostTestCases[instant]', '--include-filter', 'CtsAppCompatHostTestCases[secondary_user]', '--include-filter', 'CtsAppComponentFactoryTestCases', '--include-filter', 'CtsAppComponentFactoryTestCases[instant]', '--include-filter', 'CtsAppComponentFactoryTestCases[secondary_user]', '--include-filter', 'CtsAppEnumerationTestCases', '--include-filter', 'CtsAppEnumerationTestCases[secondary_user]', '--include-filter', 'CtsAppExitTestCases', '--include-filter', 'CtsAppExitTestCases[instant]', '--include-filter', 'CtsAppExitTestCases[secondary_user]', '--include-filter', 'CtsAppIntegrityDeviceTestCases', '--include-filter', 'CtsAppOpsTestCases', '--include-filter', 'CtsAppOpsTestCases[instant]', '--include-filter', 'CtsAppOpsTestCases[secondary_user]', '--include-filter', 'CtsAppPredictionServiceTestCases', '--include-filter', 'CtsAppPredictionServiceTestCases[secondary_user]', '--include-filter', 'CtsAppSecurityHostTestCases', '--include-filter', 'CtsAppSecurityHostTestCases[secondary_user]', '--include-filter', 'CtsAppTestCases', '--include-filter', 'CtsAppTestCases[instant]', '--include-filter', 'CtsAppTestCases[secondary_user]', '--include-filter', 'CtsAppUsageHostTestCases', '--include-filter', 'CtsAppUsageHostTestCases[instant]', '--include-filter', 'CtsAppUsageHostTestCases[secondary_user]', '--include-filter', 'CtsAppWidgetTestCases', '--include-filter', 'CtsAppWidgetTestCases[instant]', '--include-filter', 'CtsAppWidgetTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsApp',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        use_jdk9=True,
+        hard_reboot_on_failure=True,
+        timeout=61200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAppTestCases.feature.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAppTestCases.feature.ctshardware
new file mode 100644
index 0000000..9bf0212
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAppTestCases.feature.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsAppTestCases.feature.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAppTestCases.feature of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsAppTestCases.feature.ctshardware',
+        test_name='cheets_CTS_R.internal.arm.CtsAppTestCases.feature.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAppTestCases android.app.cts.SystemFeaturesTest', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAppTestCases.feature',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAslrMalloc b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAslrMalloc
new file mode 100644
index 0000000..2351bb2
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAslrMalloc
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsAslrMalloc'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAslrMallocTestCases, CtsAslrMallocTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsAslrMalloc',
+        test_name='cheets_CTS_R.internal.arm.CtsAslrMalloc',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAslrMallocTestCases', '--include-filter', 'CtsAslrMallocTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAslrMalloc',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAssist b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAssist
new file mode 100644
index 0000000..78fad04
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAssist
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsAssist'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAssistTestCases, CtsAssistTestCases[instant], CtsAssistTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsAssist',
+        test_name='cheets_CTS_R.internal.arm.CtsAssist',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAssistTestCases', '--include-filter', 'CtsAssistTestCases[instant]', '--include-filter', 'CtsAssistTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAssist',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAtomicInstall b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAtomicInstall
new file mode 100644
index 0000000..b0de21a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAtomicInstall
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsAtomicInstall'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAtomicInstallTestCases, CtsAtomicInstallTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsAtomicInstall',
+        test_name='cheets_CTS_R.internal.arm.CtsAtomicInstall',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAtomicInstallTestCases', '--include-filter', 'CtsAtomicInstallTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAtomicInstall',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAtraceHost b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAtraceHost
new file mode 100644
index 0000000..1a40069
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAtraceHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsAtraceHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAtraceHostTestCases, CtsAtraceHostTestCases[instant], CtsAtraceHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsAtraceHost',
+        test_name='cheets_CTS_R.internal.arm.CtsAtraceHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAtraceHostTestCases', '--include-filter', 'CtsAtraceHostTestCases[instant]', '--include-filter', 'CtsAtraceHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAtraceHost',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAttentionServiceDevice b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAttentionServiceDevice
new file mode 100644
index 0000000..8a633e6
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAttentionServiceDevice
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsAttentionServiceDevice'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAttentionServiceDeviceTestCases, CtsAttentionServiceDeviceTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsAttentionServiceDevice',
+        test_name='cheets_CTS_R.internal.arm.CtsAttentionServiceDevice',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAttentionServiceDeviceTestCases', '--include-filter', 'CtsAttentionServiceDeviceTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAttentionServiceDevice',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAutoFillService b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAutoFillService
new file mode 100644
index 0000000..ab06830
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsAutoFillService
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsAutoFillService'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAutoFillServiceTestCases, CtsAutoFillServiceTestCases[instant], CtsAutoFillServiceTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsAutoFillService',
+        test_name='cheets_CTS_R.internal.arm.CtsAutoFillService',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAutoFillServiceTestCases', '--include-filter', 'CtsAutoFillServiceTestCases[instant]', '--include-filter', 'CtsAutoFillServiceTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAutoFillService',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=14400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsBackgroundRestrictions b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsBackgroundRestrictions
new file mode 100644
index 0000000..aab8a54
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsBackgroundRestrictions
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsBackgroundRestrictions'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsBackgroundRestrictionsTestCases, CtsBackgroundRestrictionsTestCases[instant], CtsBackgroundRestrictionsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsBackgroundRestrictions',
+        test_name='cheets_CTS_R.internal.arm.CtsBackgroundRestrictions',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBackgroundRestrictionsTestCases', '--include-filter', 'CtsBackgroundRestrictionsTestCases[instant]', '--include-filter', 'CtsBackgroundRestrictionsTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsBackgroundRestrictions',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsBackup b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsBackup
new file mode 100644
index 0000000..a8444f1
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsBackup
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsBackup'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsBackupHostTestCases, CtsBackupTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsBackup',
+        test_name='cheets_CTS_R.internal.arm.CtsBackup',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBackupHostTestCases', '--include-filter', 'CtsBackupTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsBackup',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsBatterySaving b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsBatterySaving
new file mode 100644
index 0000000..22c11e5
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsBatterySaving
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsBatterySaving'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsBatterySavingTestCases, CtsBatterySavingTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsBatterySaving',
+        test_name='cheets_CTS_R.internal.arm.CtsBatterySaving',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBatterySavingTestCases', '--include-filter', 'CtsBatterySavingTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsBatterySaving',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsBionic b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsBionic
new file mode 100644
index 0000000..b6a65db
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsBionic
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsBionic'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsBionicAppTestCases, CtsBionicAppTestCases[instant], CtsBionicAppTestCases[secondary_user], CtsBionicTestCases, CtsBionicTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsBionic',
+        test_name='cheets_CTS_R.internal.arm.CtsBionic',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBionicAppTestCases', '--include-filter', 'CtsBionicAppTestCases[instant]', '--include-filter', 'CtsBionicAppTestCases[secondary_user]', '--include-filter', 'CtsBionicTestCases', '--include-filter', 'CtsBionicTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsBionic',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsBlobStore b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsBlobStore
new file mode 100644
index 0000000..1b90469
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsBlobStore
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsBlobStore'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsBlobStoreHostTestCases, CtsBlobStoreHostTestCases[secondary_user], CtsBlobStoreHostTestHelper, CtsBlobStoreHostTestHelper[secondary_user], CtsBlobStoreTestCases, CtsBlobStoreTestCases[secondary_user], CtsBlobStoreTestHelper, CtsBlobStoreTestHelperDiffSig, CtsBlobStoreTestHelperDiffSig2, CtsBlobStoreTestHelperDiffSig2[secondary_user], CtsBlobStoreTestHelperDiffSig[secondary_user], CtsBlobStoreTestHelper[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsBlobStore',
+        test_name='cheets_CTS_R.internal.arm.CtsBlobStore',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBlobStoreHostTestCases', '--include-filter', 'CtsBlobStoreHostTestCases[secondary_user]', '--include-filter', 'CtsBlobStoreHostTestHelper', '--include-filter', 'CtsBlobStoreHostTestHelper[secondary_user]', '--include-filter', 'CtsBlobStoreTestCases', '--include-filter', 'CtsBlobStoreTestCases[secondary_user]', '--include-filter', 'CtsBlobStoreTestHelper', '--include-filter', 'CtsBlobStoreTestHelperDiffSig', '--include-filter', 'CtsBlobStoreTestHelperDiffSig2', '--include-filter', 'CtsBlobStoreTestHelperDiffSig2[secondary_user]', '--include-filter', 'CtsBlobStoreTestHelperDiffSig[secondary_user]', '--include-filter', 'CtsBlobStoreTestHelper[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsBlobStore',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=23400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsBluetooth b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsBluetooth
new file mode 100644
index 0000000..70a08b6
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsBluetooth
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsBluetooth'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsBluetoothTestCases, CtsBluetoothTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsBluetooth',
+        test_name='cheets_CTS_R.internal.arm.CtsBluetooth',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBluetoothTestCases', '--include-filter', 'CtsBluetoothTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsBluetooth',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsBootStats b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsBootStats
new file mode 100644
index 0000000..e6b528a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsBootStats
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsBootStats'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsBootStatsTestCases, CtsBootStatsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsBootStats',
+        test_name='cheets_CTS_R.internal.arm.CtsBootStats',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBootStatsTestCases', '--include-filter', 'CtsBootStatsTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsBootStats',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCalendarProvider b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCalendarProvider
new file mode 100644
index 0000000..d2a2a6b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCalendarProvider
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsCalendarProvider'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCalendarProviderTestCases, CtsCalendarProviderTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsCalendarProvider',
+        test_name='cheets_CTS_R.internal.arm.CtsCalendarProvider',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCalendarProviderTestCases', '--include-filter', 'CtsCalendarProviderTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCalendarProvider',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCalendarcommon2 b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCalendarcommon2
new file mode 100644
index 0000000..1c3a22f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCalendarcommon2
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsCalendarcommon2'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCalendarcommon2TestCases, CtsCalendarcommon2TestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsCalendarcommon2',
+        test_name='cheets_CTS_R.internal.arm.CtsCalendarcommon2',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCalendarcommon2TestCases', '--include-filter', 'CtsCalendarcommon2TestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCalendarcommon2',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCamera b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCamera
new file mode 100644
index 0000000..c7d3799
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCamera
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsCamera'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCameraApi25TestCases, CtsCameraApi25TestCases[secondary_user], CtsCameraTestCases, CtsCameraTestCases[instant], CtsCameraTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsCamera',
+        test_name='cheets_CTS_R.internal.arm.CtsCamera',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCameraApi25TestCases', '--include-filter', 'CtsCameraApi25TestCases[secondary_user]', '--include-filter', 'CtsCameraTestCases', '--include-filter', 'CtsCameraTestCases[instant]', '--include-filter', 'CtsCameraTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCamera',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=14400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCameraTestCases.NativeCameraDeviceTest b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCameraTestCases.NativeCameraDeviceTest
new file mode 100644
index 0000000..23eceea
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCameraTestCases.NativeCameraDeviceTest
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsCameraTestCases.NativeCameraDeviceTest'
+ATTRIBUTES = 'suite:arc-cts-qual, suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCameraTestCases.NativeCameraDeviceTest of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.CtsCameraTestCases.NativeCameraDeviceTest',
+        test_name='cheets_CTS_R.internal.arm.CtsCameraTestCases.NativeCameraDeviceTest',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCameraTestCases android.hardware.camera2.cts.NativeCameraDeviceTest', '--include-filter', 'CtsCameraTestCases[instant] android.hardware.camera2.cts.NativeCameraDeviceTest', '--include-filter', 'CtsCameraTestCases android.hardware.camera2.cts.RecordingTest#testVideoPreviewSurfaceSharing[1]', '--include-filter', 'CtsCameraTestCases[instant] android.hardware.camera2.cts.RecordingTest#testVideoPreviewSurfaceSharing[1]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCameraTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        precondition_commands=['sleep 20', 'android-sh -c \'am start -a android.intent.action.VIEW -d https://webglsamples.org/electricflower/electricflower.html\''],
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCameraTestCases.NativeCameraDeviceTest.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCameraTestCases.NativeCameraDeviceTest.ctshardware
new file mode 100644
index 0000000..a7be139
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCameraTestCases.NativeCameraDeviceTest.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsCameraTestCases.NativeCameraDeviceTest.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCameraTestCases.NativeCameraDeviceTest of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsCameraTestCases.NativeCameraDeviceTest.ctshardware',
+        test_name='cheets_CTS_R.internal.arm.CtsCameraTestCases.NativeCameraDeviceTest.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCameraTestCases android.hardware.camera2.cts.NativeCameraDeviceTest', '--include-filter', 'CtsCameraTestCases android.hardware.camera2.cts.RecordingTest#testVideoPreviewSurfaceSharing[1]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCameraTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        precondition_commands=['sleep 20', 'android-sh -c \'am start -a android.intent.action.VIEW -d https://webglsamples.org/electricflower/electricflower.html\''],
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCameraTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCameraTestCases.ctshardware
new file mode 100644
index 0000000..5afd973
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCameraTestCases.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsCameraTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsCameraTestCases.ctshardware',
+        test_name='cheets_CTS_R.internal.arm.CtsCameraTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCameraTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCameraTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCameraTestCases.noled.camerabox.back b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCameraTestCases.noled.camerabox.back
new file mode 100644
index 0000000..029d2a7
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCameraTestCases.noled.camerabox.back
@@ -0,0 +1,38 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsCameraTestCases.noled.camerabox.back'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-camera, suite:arc-cts-r, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw'
+DEPENDENCIES = 'arc, camerabox_light:noled, camerabox_facing:back'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        camera_facing='back',
+        cmdline_args=args,
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsCameraTestCases.noled.camerabox.back',
+        test_name='cheets_CTS_R.internal.arm.CtsCameraTestCases.noled.camerabox.back',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCameraTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCameraTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        retry_manual_tests=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCameraTestCases.noled.camerabox.front b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCameraTestCases.noled.camerabox.front
new file mode 100644
index 0000000..fcaced8
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCameraTestCases.noled.camerabox.front
@@ -0,0 +1,38 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsCameraTestCases.noled.camerabox.front'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-camera, suite:arc-cts-r, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw'
+DEPENDENCIES = 'arc, camerabox_light:noled, camerabox_facing:front'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        camera_facing='front',
+        cmdline_args=args,
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsCameraTestCases.noled.camerabox.front',
+        test_name='cheets_CTS_R.internal.arm.CtsCameraTestCases.noled.camerabox.front',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCameraTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCameraTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        retry_manual_tests=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCar b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCar
new file mode 100644
index 0000000..990bb37
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCar
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsCar'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCarHostTestCases, CtsCarTestCases, CtsCarTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsCar',
+        test_name='cheets_CTS_R.internal.arm.CtsCar',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCarHostTestCases', '--include-filter', 'CtsCarTestCases', '--include-filter', 'CtsCarTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCar',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCarrierApiTestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCarrierApiTestCases
new file mode 100644
index 0000000..834de28
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCarrierApiTestCases
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsCarrierApiTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCarrierApiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsCarrierApiTestCases',
+        test_name='cheets_CTS_R.internal.arm.CtsCarrierApiTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCarrierApiTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCarrierApiTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsClassLoaderFactory b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsClassLoaderFactory
new file mode 100644
index 0000000..fabe6af
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsClassLoaderFactory
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsClassLoaderFactory'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases, CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases[instant], CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases[secondary_user], CtsClassLoaderFactoryPathClassLoaderTestCases, CtsClassLoaderFactoryPathClassLoaderTestCases[instant], CtsClassLoaderFactoryPathClassLoaderTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsClassLoaderFactory',
+        test_name='cheets_CTS_R.internal.arm.CtsClassLoaderFactory',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases', '--include-filter', 'CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases[instant]', '--include-filter', 'CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases[secondary_user]', '--include-filter', 'CtsClassLoaderFactoryPathClassLoaderTestCases', '--include-filter', 'CtsClassLoaderFactoryPathClassLoaderTestCases[instant]', '--include-filter', 'CtsClassLoaderFactoryPathClassLoaderTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsClassLoaderFactory',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsClassloaderSplitsHost b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsClassloaderSplitsHost
new file mode 100644
index 0000000..9436ab3
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsClassloaderSplitsHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsClassloaderSplitsHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsClassloaderSplitsHostTestCases, CtsClassloaderSplitsHostTestCases[instant], CtsClassloaderSplitsHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsClassloaderSplitsHost',
+        test_name='cheets_CTS_R.internal.arm.CtsClassloaderSplitsHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsClassloaderSplitsHostTestCases', '--include-filter', 'CtsClassloaderSplitsHostTestCases[instant]', '--include-filter', 'CtsClassloaderSplitsHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsClassloaderSplitsHost',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCodePathHost b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCodePathHost
new file mode 100644
index 0000000..8dbd04e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCodePathHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsCodePathHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCodePathHostTestCases, CtsCodePathHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsCodePathHost',
+        test_name='cheets_CTS_R.internal.arm.CtsCodePathHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCodePathHostTestCases', '--include-filter', 'CtsCodePathHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCodePathHost',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsColorMode b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsColorMode
new file mode 100644
index 0000000..8a6ada8
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsColorMode
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsColorMode'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsColorModeTestCases, CtsColorModeTestCases[instant], CtsColorModeTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsColorMode',
+        test_name='cheets_CTS_R.internal.arm.CtsColorMode',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsColorModeTestCases', '--include-filter', 'CtsColorModeTestCases[instant]', '--include-filter', 'CtsColorModeTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsColorMode',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCompilation b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCompilation
new file mode 100644
index 0000000..adcb153
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCompilation
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsCompilation'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCompilationTestCases, CtsCompilationTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsCompilation',
+        test_name='cheets_CTS_R.internal.arm.CtsCompilation',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCompilationTestCases', '--include-filter', 'CtsCompilationTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCompilation',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsContactsProvider b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsContactsProvider
new file mode 100644
index 0000000..834bbd6
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsContactsProvider
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsContactsProvider'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsContactsProviderTestCases, CtsContactsProviderTestCases[secondary_user], CtsContactsProviderWipe, CtsContactsProviderWipe[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsContactsProvider',
+        test_name='cheets_CTS_R.internal.arm.CtsContactsProvider',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsContactsProviderTestCases', '--include-filter', 'CtsContactsProviderTestCases[secondary_user]', '--include-filter', 'CtsContactsProviderWipe', '--include-filter', 'CtsContactsProviderWipe[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsContactsProvider',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsContent b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsContent
new file mode 100644
index 0000000..b56b3fb
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsContent
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsContent'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsContentCaptureServiceTestCases, CtsContentCaptureServiceTestCases[instant], CtsContentCaptureServiceTestCases[secondary_user], CtsContentSuggestionsTestCases, CtsContentSuggestionsTestCases[secondary_user], CtsContentTestCases, CtsContentTestCases[instant], CtsContentTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        enable_default_apps=True,
+        tag='internal.arm.CtsContent',
+        test_name='cheets_CTS_R.internal.arm.CtsContent',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsContentCaptureServiceTestCases', '--include-filter', 'CtsContentCaptureServiceTestCases[instant]', '--include-filter', 'CtsContentCaptureServiceTestCases[secondary_user]', '--include-filter', 'CtsContentSuggestionsTestCases', '--include-filter', 'CtsContentSuggestionsTestCases[secondary_user]', '--include-filter', 'CtsContentTestCases', '--include-filter', 'CtsContentTestCases[instant]', '--include-filter', 'CtsContentTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsContent',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=16200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsControlsDevice b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsControlsDevice
new file mode 100644
index 0000000..28bd01f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsControlsDevice
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsControlsDevice'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsControlsDeviceTestCases, CtsControlsDeviceTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsControlsDevice',
+        test_name='cheets_CTS_R.internal.arm.CtsControlsDevice',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsControlsDeviceTestCases', '--include-filter', 'CtsControlsDeviceTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsControlsDevice',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCppTools b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCppTools
new file mode 100644
index 0000000..fa18d01
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCppTools
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsCppTools'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCppToolsTestCases, CtsCppToolsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsCppTools',
+        test_name='cheets_CTS_R.internal.arm.CtsCppTools',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCppToolsTestCases', '--include-filter', 'CtsCppToolsTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCppTools',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCurrentApiSignature b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCurrentApiSignature
new file mode 100644
index 0000000..f808d52
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsCurrentApiSignature
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsCurrentApiSignature'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCurrentApiSignatureTestCases, CtsCurrentApiSignatureTestCases[instant], CtsCurrentApiSignatureTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsCurrentApiSignature',
+        test_name='cheets_CTS_R.internal.arm.CtsCurrentApiSignature',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCurrentApiSignatureTestCases', '--include-filter', 'CtsCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsCurrentApiSignatureTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCurrentApiSignature',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDatabase b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDatabase
new file mode 100644
index 0000000..6272bdb
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDatabase
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsDatabase'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDatabaseTestCases, CtsDatabaseTestCases[instant], CtsDatabaseTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDatabase',
+        test_name='cheets_CTS_R.internal.arm.CtsDatabase',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDatabaseTestCases', '--include-filter', 'CtsDatabaseTestCases[instant]', '--include-filter', 'CtsDatabaseTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDatabase',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDeqp.32 b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDeqp.32
new file mode 100644
index 0000000..22ecfdc
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDeqp.32
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsDeqp.32'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases, CtsDeqpTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDeqp.32',
+        test_name='cheets_CTS_R.internal.arm.CtsDeqp.32',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--abi', 'armeabi-v7a'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDeqp',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=111600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDeqp.64 b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDeqp.64
new file mode 100644
index 0000000..ea13ccf
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDeqp.64
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsDeqp.64'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases, CtsDeqpTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDeqp.64',
+        test_name='cheets_CTS_R.internal.arm.CtsDeqp.64',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--include-filter', 'CtsDeqpTestCases[secondary_user]', '--abi', 'arm64-v8a'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDeqp',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=111600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDeqpTestCases.dEQP-EGL b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDeqpTestCases.dEQP-EGL
new file mode 100644
index 0000000..49a18ee
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDeqpTestCases.dEQP-EGL
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsDeqpTestCases.dEQP-EGL'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 0
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases.dEQP-EGL of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.CtsDeqpTestCases.dEQP-EGL',
+        test_name='cheets_CTS_R.internal.arm.CtsDeqpTestCases.dEQP-EGL',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-EGL.*'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDeqpTestCases.dEQP-GLES2 b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDeqpTestCases.dEQP-GLES2
new file mode 100644
index 0000000..8c29305
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDeqpTestCases.dEQP-GLES2
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsDeqpTestCases.dEQP-GLES2'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 0
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases.dEQP-GLES2 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.CtsDeqpTestCases.dEQP-GLES2',
+        test_name='cheets_CTS_R.internal.arm.CtsDeqpTestCases.dEQP-GLES2',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-GLES2.*'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDeqpTestCases.dEQP-GLES3 b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDeqpTestCases.dEQP-GLES3
new file mode 100644
index 0000000..b5f19db
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDeqpTestCases.dEQP-GLES3
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsDeqpTestCases.dEQP-GLES3'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 0
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases.dEQP-GLES3 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.CtsDeqpTestCases.dEQP-GLES3',
+        test_name='cheets_CTS_R.internal.arm.CtsDeqpTestCases.dEQP-GLES3',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-GLES3.*'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware
new file mode 100644
index 0000000..7d9c6f9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware',
+        test_name='cheets_CTS_R.internal.arm.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases dEQP-GLES3.functional.prerequisite#*', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDeqpTestCases.dEQP-GLES31 b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDeqpTestCases.dEQP-GLES31
new file mode 100644
index 0000000..84c30dc
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDeqpTestCases.dEQP-GLES31
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsDeqpTestCases.dEQP-GLES31'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 0
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases.dEQP-GLES31 of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.CtsDeqpTestCases.dEQP-GLES31',
+        test_name='cheets_CTS_R.internal.arm.CtsDeqpTestCases.dEQP-GLES31',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-GLES31.*'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDeqpTestCases.dEQP-VK.32 b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDeqpTestCases.dEQP-VK.32
new file mode 100644
index 0000000..147cd2b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDeqpTestCases.dEQP-VK.32
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsDeqpTestCases.dEQP-VK.32'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 0
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases.dEQP-VK of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.CtsDeqpTestCases.dEQP-VK.32',
+        test_name='cheets_CTS_R.internal.arm.CtsDeqpTestCases.dEQP-VK.32',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-VK.*', '--abi', 'armeabi-v7a'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDeqpTestCases.dEQP-VK.64 b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDeqpTestCases.dEQP-VK.64
new file mode 100644
index 0000000..5d081d7
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDeqpTestCases.dEQP-VK.64
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsDeqpTestCases.dEQP-VK.64'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 0
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases.dEQP-VK of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.CtsDeqpTestCases.dEQP-VK.64',
+        test_name='cheets_CTS_R.internal.arm.CtsDeqpTestCases.dEQP-VK.64',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-VK.*', '--abi', 'arm64-v8a'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDevice b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDevice
new file mode 100644
index 0000000..9f6ece0
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDevice
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsDevice'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeviceConfigTestCases, CtsDeviceConfigTestCases[instant], CtsDeviceConfigTestCases[secondary_user], CtsDeviceIdleHostTestCases, CtsDeviceIdleHostTestCases[secondary_user], CtsDevicePolicyManagerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDevice',
+        test_name='cheets_CTS_R.internal.arm.CtsDevice',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeviceConfigTestCases', '--include-filter', 'CtsDeviceConfigTestCases[instant]', '--include-filter', 'CtsDeviceConfigTestCases[secondary_user]', '--include-filter', 'CtsDeviceIdleHostTestCases', '--include-filter', 'CtsDeviceIdleHostTestCases[secondary_user]', '--include-filter', 'CtsDevicePolicyManagerTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDevice',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDexMetadataHost b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDexMetadataHost
new file mode 100644
index 0000000..3dfffb5
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDexMetadataHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsDexMetadataHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDexMetadataHostTestCases, CtsDexMetadataHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDexMetadataHost',
+        test_name='cheets_CTS_R.internal.arm.CtsDexMetadataHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDexMetadataHostTestCases', '--include-filter', 'CtsDexMetadataHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDexMetadataHost',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDisplay b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDisplay
new file mode 100644
index 0000000..09a0fd1
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDisplay
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsDisplay'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDisplayTestCases, CtsDisplayTestCases[instant], CtsDisplayTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDisplay',
+        test_name='cheets_CTS_R.internal.arm.CtsDisplay',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDisplayTestCases', '--include-filter', 'CtsDisplayTestCases[instant]', '--include-filter', 'CtsDisplayTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDisplay',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDownloadManager b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDownloadManager
new file mode 100644
index 0000000..5a6d701
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDownloadManager
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsDownloadManager'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDownloadManagerApi28, CtsDownloadManagerApi28[secondary_user], CtsDownloadManagerInstaller, CtsDownloadManagerInstaller[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDownloadManager',
+        test_name='cheets_CTS_R.internal.arm.CtsDownloadManager',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDownloadManagerApi28', '--include-filter', 'CtsDownloadManagerApi28[secondary_user]', '--include-filter', 'CtsDownloadManagerInstaller', '--include-filter', 'CtsDownloadManagerInstaller[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDownloadManager',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDpi b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDpi
new file mode 100644
index 0000000..0cd282b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDpi
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsDpi'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDpiTestCases, CtsDpiTestCases2, CtsDpiTestCases2[secondary_user], CtsDpiTestCases[instant], CtsDpiTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDpi',
+        test_name='cheets_CTS_R.internal.arm.CtsDpi',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDpiTestCases', '--include-filter', 'CtsDpiTestCases2', '--include-filter', 'CtsDpiTestCases2[secondary_user]', '--include-filter', 'CtsDpiTestCases[instant]', '--include-filter', 'CtsDpiTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDpi',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDreams b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDreams
new file mode 100644
index 0000000..080a8fa
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDreams
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsDreams'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDreamsTestCases, CtsDreamsTestCases[instant], CtsDreamsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDreams',
+        test_name='cheets_CTS_R.internal.arm.CtsDreams',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDreamsTestCases', '--include-filter', 'CtsDreamsTestCases[instant]', '--include-filter', 'CtsDreamsTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDreams',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDrm b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDrm
new file mode 100644
index 0000000..2088e86
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDrm
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsDrm'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDrmTestCases, CtsDrmTestCases[instant], CtsDrmTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDrm',
+        test_name='cheets_CTS_R.internal.arm.CtsDrm',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDrmTestCases', '--include-filter', 'CtsDrmTestCases[instant]', '--include-filter', 'CtsDrmTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDrm',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDropBoxManagerTestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDropBoxManagerTestCases
new file mode 100644
index 0000000..3951839
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDropBoxManagerTestCases
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsDropBoxManagerTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDropBoxManagerTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDropBoxManagerTestCases',
+        test_name='cheets_CTS_R.internal.arm.CtsDropBoxManagerTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDropBoxManagerTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDropBoxManagerTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDumpsysHost b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDumpsysHost
new file mode 100644
index 0000000..4fabed8
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDumpsysHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsDumpsysHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDumpsysHostTestCases, CtsDumpsysHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDumpsysHost',
+        test_name='cheets_CTS_R.internal.arm.CtsDumpsysHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDumpsysHostTestCases', '--include-filter', 'CtsDumpsysHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDumpsysHost',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDynamic b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDynamic
new file mode 100644
index 0000000..e236500
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsDynamic
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsDynamic'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDynamicLinkerTestCases, CtsDynamicLinkerTestCases[instant], CtsDynamicLinkerTestCases[secondary_user], CtsDynamicMimeHostTestCases, CtsDynamicMimeHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsDynamic',
+        test_name='cheets_CTS_R.internal.arm.CtsDynamic',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDynamicLinkerTestCases', '--include-filter', 'CtsDynamicLinkerTestCases[instant]', '--include-filter', 'CtsDynamicLinkerTestCases[secondary_user]', '--include-filter', 'CtsDynamicMimeHostTestCases', '--include-filter', 'CtsDynamicMimeHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDynamic',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsEdiHost b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsEdiHost
new file mode 100644
index 0000000..9b72e3d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsEdiHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsEdiHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsEdiHostTestCases, CtsEdiHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsEdiHost',
+        test_name='cheets_CTS_R.internal.arm.CtsEdiHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsEdiHostTestCases', '--include-filter', 'CtsEdiHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsEdiHost',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsEffect b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsEffect
new file mode 100644
index 0000000..adda109
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsEffect
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsEffect'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsEffectTestCases, CtsEffectTestCases[instant], CtsEffectTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsEffect',
+        test_name='cheets_CTS_R.internal.arm.CtsEffect',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsEffectTestCases', '--include-filter', 'CtsEffectTestCases[instant]', '--include-filter', 'CtsEffectTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsEffect',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsExtendedMocking b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsExtendedMocking
new file mode 100644
index 0000000..08fd598
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsExtendedMocking
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsExtendedMocking'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsExtendedMockingTestCases, CtsExtendedMockingTestCases[instant], CtsExtendedMockingTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsExtendedMocking',
+        test_name='cheets_CTS_R.internal.arm.CtsExtendedMocking',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsExtendedMockingTestCases', '--include-filter', 'CtsExtendedMockingTestCases[instant]', '--include-filter', 'CtsExtendedMockingTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsExtendedMocking',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsExternalService b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsExternalService
new file mode 100644
index 0000000..f240a4a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsExternalService
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsExternalService'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsExternalServiceTestCases, CtsExternalServiceTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsExternalService',
+        test_name='cheets_CTS_R.internal.arm.CtsExternalService',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsExternalServiceTestCases', '--include-filter', 'CtsExternalServiceTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsExternalService',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsExtractNativeLibsHost b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsExtractNativeLibsHost
new file mode 100644
index 0000000..1118a41
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsExtractNativeLibsHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsExtractNativeLibsHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsExtractNativeLibsHostTestCases, CtsExtractNativeLibsHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsExtractNativeLibsHost',
+        test_name='cheets_CTS_R.internal.arm.CtsExtractNativeLibsHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsExtractNativeLibsHostTestCases', '--include-filter', 'CtsExtractNativeLibsHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsExtractNativeLibsHost',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsFileSystem b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsFileSystem
new file mode 100644
index 0000000..556896d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsFileSystem
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsFileSystem'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsFileSystemTestCases, CtsFileSystemTestCases[instant], CtsFileSystemTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsFileSystem',
+        test_name='cheets_CTS_R.internal.arm.CtsFileSystem',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsFileSystemTestCases', '--include-filter', 'CtsFileSystemTestCases[instant]', '--include-filter', 'CtsFileSystemTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsFileSystem',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=16200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsFragment b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsFragment
new file mode 100644
index 0000000..11cb50b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsFragment
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsFragment'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsFragmentTestCases, CtsFragmentTestCasesSdk26, CtsFragmentTestCasesSdk26[instant], CtsFragmentTestCasesSdk26[secondary_user], CtsFragmentTestCases[instant], CtsFragmentTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsFragment',
+        test_name='cheets_CTS_R.internal.arm.CtsFragment',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsFragmentTestCases', '--include-filter', 'CtsFragmentTestCasesSdk26', '--include-filter', 'CtsFragmentTestCasesSdk26[instant]', '--include-filter', 'CtsFragmentTestCasesSdk26[secondary_user]', '--include-filter', 'CtsFragmentTestCases[instant]', '--include-filter', 'CtsFragmentTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsFragment',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsFsMgr b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsFsMgr
new file mode 100644
index 0000000..7a57c92
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsFsMgr
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsFsMgr'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsFsMgrTestCases, CtsFsMgrTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsFsMgr',
+        test_name='cheets_CTS_R.internal.arm.CtsFsMgr',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsFsMgrTestCases', '--include-filter', 'CtsFsMgrTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsFsMgr',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsGesture b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsGesture
new file mode 100644
index 0000000..1f4b910
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsGesture
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsGesture'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsGestureTestCases, CtsGestureTestCases[instant], CtsGestureTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsGesture',
+        test_name='cheets_CTS_R.internal.arm.CtsGesture',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsGestureTestCases', '--include-filter', 'CtsGestureTestCases[instant]', '--include-filter', 'CtsGestureTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsGesture',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsGpu b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsGpu
new file mode 100644
index 0000000..20ad134
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsGpu
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsGpu'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsGpuProfilingDataTestCases, CtsGpuProfilingDataTestCases[secondary_user], CtsGpuToolsHostTestCases, CtsGpuToolsHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsGpu',
+        test_name='cheets_CTS_R.internal.arm.CtsGpu',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsGpuProfilingDataTestCases', '--include-filter', 'CtsGpuProfilingDataTestCases[secondary_user]', '--include-filter', 'CtsGpuToolsHostTestCases', '--include-filter', 'CtsGpuToolsHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsGpu',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsGraphics b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsGraphics
new file mode 100644
index 0000000..f5a11ea
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsGraphics
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsGraphics'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsGraphicsTestCases, CtsGraphicsTestCases[instant], CtsGraphicsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsGraphics',
+        test_name='cheets_CTS_R.internal.arm.CtsGraphics',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsGraphicsTestCases', '--include-filter', 'CtsGraphicsTestCases[instant]', '--include-filter', 'CtsGraphicsTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsGraphics',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsGwpAsan b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsGwpAsan
new file mode 100644
index 0000000..eee9eea
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsGwpAsan
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsGwpAsan'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsGwpAsanTestCases, CtsGwpAsanTestCases[instant], CtsGwpAsanTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsGwpAsan',
+        test_name='cheets_CTS_R.internal.arm.CtsGwpAsan',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsGwpAsanTestCases', '--include-filter', 'CtsGwpAsanTestCases[instant]', '--include-filter', 'CtsGwpAsanTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsGwpAsan',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsHardware b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsHardware
new file mode 100644
index 0000000..48b8547
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsHardware
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsHardware'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsHardwareTestCases, CtsHardwareTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=30,
+        tag='internal.arm.CtsHardware',
+        test_name='cheets_CTS_R.internal.arm.CtsHardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHardwareTestCases', '--include-filter', 'CtsHardwareTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsHardware',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsHarmfulAppWarningHost b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsHarmfulAppWarningHost
new file mode 100644
index 0000000..864edf5
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsHarmfulAppWarningHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsHarmfulAppWarningHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsHarmfulAppWarningHostTestCases, CtsHarmfulAppWarningHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsHarmfulAppWarningHost',
+        test_name='cheets_CTS_R.internal.arm.CtsHarmfulAppWarningHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHarmfulAppWarningHostTestCases', '--include-filter', 'CtsHarmfulAppWarningHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsHarmfulAppWarningHost',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsHdmiCecHost b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsHdmiCecHost
new file mode 100644
index 0000000..044632f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsHdmiCecHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsHdmiCecHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsHdmiCecHostTestCases, CtsHdmiCecHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsHdmiCecHost',
+        test_name='cheets_CTS_R.internal.arm.CtsHdmiCecHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHdmiCecHostTestCases', '--include-filter', 'CtsHdmiCecHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsHdmiCecHost',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsHiddenApi b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsHiddenApi
new file mode 100644
index 0000000..c6511bf
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsHiddenApi
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsHiddenApi'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsHiddenApiBlacklistApi27TestCases, CtsHiddenApiBlacklistApi27TestCases[secondary_user], CtsHiddenApiBlacklistApi28TestCases, CtsHiddenApiBlacklistApi28TestCases[secondary_user], CtsHiddenApiBlacklistCurrentApiTestCases, CtsHiddenApiBlacklistCurrentApiTestCases[secondary_user], CtsHiddenApiBlacklistDebugClassTestCases, CtsHiddenApiBlacklistDebugClassTestCases[secondary_user], CtsHiddenApiBlacklistTestApiTestCases, CtsHiddenApiBlacklistTestApiTestCases[secondary_user], CtsHiddenApiKillswitchDebugClassTestCases, CtsHiddenApiKillswitchDebugClassTestCases[instant], CtsHiddenApiKillswitchDebugClassTestCases[secondary_user], CtsHiddenApiKillswitchWhitelistTestCases, CtsHiddenApiKillswitchWhitelistTestCases[instant], CtsHiddenApiKillswitchWhitelistTestCases[secondary_user], CtsHiddenApiKillswitchWildcardTestCases, CtsHiddenApiKillswitchWildcardTestCases[instant], CtsHiddenApiKillswitchWildcardTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsHiddenApi',
+        test_name='cheets_CTS_R.internal.arm.CtsHiddenApi',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHiddenApiBlacklistApi27TestCases', '--include-filter', 'CtsHiddenApiBlacklistApi27TestCases[secondary_user]', '--include-filter', 'CtsHiddenApiBlacklistApi28TestCases', '--include-filter', 'CtsHiddenApiBlacklistApi28TestCases[secondary_user]', '--include-filter', 'CtsHiddenApiBlacklistCurrentApiTestCases', '--include-filter', 'CtsHiddenApiBlacklistCurrentApiTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiBlacklistDebugClassTestCases', '--include-filter', 'CtsHiddenApiBlacklistDebugClassTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiBlacklistTestApiTestCases', '--include-filter', 'CtsHiddenApiBlacklistTestApiTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiKillswitchDebugClassTestCases', '--include-filter', 'CtsHiddenApiKillswitchDebugClassTestCases[instant]', '--include-filter', 'CtsHiddenApiKillswitchDebugClassTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiKillswitchWhitelistTestCases', '--include-filter', 'CtsHiddenApiKillswitchWhitelistTestCases[instant]', '--include-filter', 'CtsHiddenApiKillswitchWhitelistTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiKillswitchWildcardTestCases', '--include-filter', 'CtsHiddenApiKillswitchWildcardTestCases[instant]', '--include-filter', 'CtsHiddenApiKillswitchWildcardTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsHiddenApi',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=36000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsHostTzData b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsHostTzData
new file mode 100644
index 0000000..7d790a6
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsHostTzData
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsHostTzData'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsHostTzDataTests, CtsHostTzDataTests[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsHostTzData',
+        test_name='cheets_CTS_R.internal.arm.CtsHostTzData',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHostTzDataTests', '--include-filter', 'CtsHostTzDataTests[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsHostTzData',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsHostside b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsHostside
new file mode 100644
index 0000000..15ef03e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsHostside
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsHostside'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsHostsideNetworkTests, CtsHostsideNetworkTests[instant], CtsHostsideNetworkTests[secondary_user], CtsHostsideNumberBlockingTestCases, CtsHostsideNumberBlockingTestCases[secondary_user], CtsHostsideTvTests, CtsHostsideTvTests[secondary_user], CtsHostsideWebViewTests, CtsHostsideWebViewTests[instant], CtsHostsideWebViewTests[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsHostside',
+        test_name='cheets_CTS_R.internal.arm.CtsHostside',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHostsideNetworkTests', '--include-filter', 'CtsHostsideNetworkTests[instant]', '--include-filter', 'CtsHostsideNetworkTests[secondary_user]', '--include-filter', 'CtsHostsideNumberBlockingTestCases', '--include-filter', 'CtsHostsideNumberBlockingTestCases[secondary_user]', '--include-filter', 'CtsHostsideTvTests', '--include-filter', 'CtsHostsideTvTests[secondary_user]', '--include-filter', 'CtsHostsideWebViewTests', '--include-filter', 'CtsHostsideWebViewTests[instant]', '--include-filter', 'CtsHostsideWebViewTests[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsHostside',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=19800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsIcu b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsIcu
new file mode 100644
index 0000000..8c1b1df
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsIcu
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsIcu'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsIcuTestCases, CtsIcuTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsIcu',
+        test_name='cheets_CTS_R.internal.arm.CtsIcu',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsIcuTestCases', '--include-filter', 'CtsIcuTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsIcu',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsIdentity b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsIdentity
new file mode 100644
index 0000000..ebc2fb1
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsIdentity
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsIdentity'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsIdentityTestCases, CtsIdentityTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsIdentity',
+        test_name='cheets_CTS_R.internal.arm.CtsIdentity',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsIdentityTestCases', '--include-filter', 'CtsIdentityTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsIdentity',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsIke b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsIke
new file mode 100644
index 0000000..efc1fb5
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsIke
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsIke'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsIkeTestCases, CtsIkeTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsIke',
+        test_name='cheets_CTS_R.internal.arm.CtsIke',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsIkeTestCases', '--include-filter', 'CtsIkeTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsIke',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsIncidentHost b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsIncidentHost
new file mode 100644
index 0000000..dbca139
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsIncidentHost
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsIncidentHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsIncidentHostTestCases, CtsIncidentHostTestCases[instant] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=10,
+        tag='internal.arm.CtsIncidentHost',
+        test_name='cheets_CTS_R.internal.arm.CtsIncidentHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsIncidentHostTestCases', '--include-filter', 'CtsIncidentHostTestCases[instant]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsIncidentHost',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsIncrementalInstallHost b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsIncrementalInstallHost
new file mode 100644
index 0000000..0334a5b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsIncrementalInstallHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsIncrementalInstallHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsIncrementalInstallHostTestCases, CtsIncrementalInstallHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsIncrementalInstallHost',
+        test_name='cheets_CTS_R.internal.arm.CtsIncrementalInstallHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsIncrementalInstallHostTestCases', '--include-filter', 'CtsIncrementalInstallHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsIncrementalInstallHost',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsInit b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsInit
new file mode 100644
index 0000000..d3fe418
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsInit
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsInit'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsInitTestCases, CtsInitTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsInit',
+        test_name='cheets_CTS_R.internal.arm.CtsInit',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsInitTestCases', '--include-filter', 'CtsInitTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsInit',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsInlineMocking b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsInlineMocking
new file mode 100644
index 0000000..f1be6fe
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsInlineMocking
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsInlineMocking'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsInlineMockingTestCases, CtsInlineMockingTestCases[instant], CtsInlineMockingTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsInlineMocking',
+        test_name='cheets_CTS_R.internal.arm.CtsInlineMocking',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsInlineMockingTestCases', '--include-filter', 'CtsInlineMockingTestCases[instant]', '--include-filter', 'CtsInlineMockingTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsInlineMocking',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsInputMethod b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsInputMethod
new file mode 100644
index 0000000..b76b351
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsInputMethod
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsInputMethod'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsInputMethodServiceHostTestCases, CtsInputMethodServiceHostTestCases[instant], CtsInputMethodServiceHostTestCases[secondary_user], CtsInputMethodTestCases, CtsInputMethodTestCases[instant], CtsInputMethodTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsInputMethod',
+        test_name='cheets_CTS_R.internal.arm.CtsInputMethod',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsInputMethodServiceHostTestCases', '--include-filter', 'CtsInputMethodServiceHostTestCases[instant]', '--include-filter', 'CtsInputMethodServiceHostTestCases[secondary_user]', '--include-filter', 'CtsInputMethodTestCases', '--include-filter', 'CtsInputMethodTestCases[instant]', '--include-filter', 'CtsInputMethodTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsInputMethod',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsInstantApp b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsInstantApp
new file mode 100644
index 0000000..41f19f8
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsInstantApp
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsInstantApp'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsInstantAppTests, CtsInstantAppTests[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsInstantApp',
+        test_name='cheets_CTS_R.internal.arm.CtsInstantApp',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsInstantAppTests', '--include-filter', 'CtsInstantAppTests[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsInstantApp',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsIntentSignature b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsIntentSignature
new file mode 100644
index 0000000..5ea1d02
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsIntentSignature
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsIntentSignature'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsIntentSignatureTestCases, CtsIntentSignatureTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsIntentSignature',
+        test_name='cheets_CTS_R.internal.arm.CtsIntentSignature',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsIntentSignatureTestCases', '--include-filter', 'CtsIntentSignatureTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsIntentSignature',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsJdwp b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsJdwp
new file mode 100644
index 0000000..934ac2f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsJdwp
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsJdwp'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsJdwpSecurityHostTestCases, CtsJdwpSecurityHostTestCases[secondary_user], CtsJdwpTestCases, CtsJdwpTestCases[instant], CtsJdwpTestCases[secondary_user], CtsJdwpTunnelHostTestCases, CtsJdwpTunnelHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsJdwp',
+        test_name='cheets_CTS_R.internal.arm.CtsJdwp',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJdwpSecurityHostTestCases', '--include-filter', 'CtsJdwpSecurityHostTestCases[secondary_user]', '--include-filter', 'CtsJdwpTestCases', '--include-filter', 'CtsJdwpTestCases[instant]', '--include-filter', 'CtsJdwpTestCases[secondary_user]', '--include-filter', 'CtsJdwpTunnelHostTestCases', '--include-filter', 'CtsJdwpTunnelHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsJdwp',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=14400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsJni b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsJni
new file mode 100644
index 0000000..e50d15b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsJni
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsJni'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsJniTestCases, CtsJniTestCases[instant], CtsJniTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsJni',
+        test_name='cheets_CTS_R.internal.arm.CtsJni',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJniTestCases', '--include-filter', 'CtsJniTestCases[instant]', '--include-filter', 'CtsJniTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsJni',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsJobScheduler b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsJobScheduler
new file mode 100644
index 0000000..6415307
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsJobScheduler
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsJobScheduler'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsJobSchedulerSharedUidTestCases, CtsJobSchedulerSharedUidTestCases[secondary_user], CtsJobSchedulerTestCases, CtsJobSchedulerTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsJobScheduler',
+        test_name='cheets_CTS_R.internal.arm.CtsJobScheduler',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJobSchedulerSharedUidTestCases', '--include-filter', 'CtsJobSchedulerSharedUidTestCases[secondary_user]', '--include-filter', 'CtsJobSchedulerTestCases', '--include-filter', 'CtsJobSchedulerTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsJobScheduler',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        use_jdk9=True,
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsJvmti b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsJvmti
new file mode 100644
index 0000000..ca2cea8
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsJvmti
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsJvmti'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsJvmtiAttachingHostTestCases, CtsJvmtiAttachingHostTestCases[secondary_user], CtsJvmtiAttachingTestCases, CtsJvmtiAttachingTestCases[secondary_user], CtsJvmtiRedefineClassesHostTestCases, CtsJvmtiRedefineClassesHostTestCases[secondary_user], CtsJvmtiRunTest1900HostTestCases, CtsJvmtiRunTest1900HostTestCases[secondary_user], CtsJvmtiRunTest1901HostTestCases, CtsJvmtiRunTest1901HostTestCases[secondary_user], CtsJvmtiRunTest1902HostTestCases, CtsJvmtiRunTest1902HostTestCases[secondary_user], CtsJvmtiRunTest1903HostTestCases, CtsJvmtiRunTest1903HostTestCases[secondary_user], CtsJvmtiRunTest1904HostTestCases, CtsJvmtiRunTest1904HostTestCases[secondary_user], CtsJvmtiRunTest1906HostTestCases, CtsJvmtiRunTest1906HostTestCases[secondary_user], CtsJvmtiRunTest1907HostTestCases, CtsJvmtiRunTest1907HostTestCases[secondary_user], CtsJvmtiRunTest1908HostTestCases, CtsJvmtiRunTest1908HostTestCases[secondary_user], CtsJvmtiRunTest1909HostTestCases, CtsJvmtiRunTest1909HostTestCases[secondary_user], CtsJvmtiRunTest1910HostTestCases, CtsJvmtiRunTest1910HostTestCases[secondary_user], CtsJvmtiRunTest1911HostTestCases, CtsJvmtiRunTest1911HostTestCases[secondary_user], CtsJvmtiRunTest1912HostTestCases, CtsJvmtiRunTest1912HostTestCases[secondary_user], CtsJvmtiRunTest1913HostTestCases, CtsJvmtiRunTest1913HostTestCases[secondary_user], CtsJvmtiRunTest1914HostTestCases, CtsJvmtiRunTest1914HostTestCases[secondary_user], CtsJvmtiRunTest1915HostTestCases, CtsJvmtiRunTest1915HostTestCases[secondary_user], CtsJvmtiRunTest1916HostTestCases, CtsJvmtiRunTest1916HostTestCases[secondary_user], CtsJvmtiRunTest1917HostTestCases, CtsJvmtiRunTest1917HostTestCases[secondary_user], CtsJvmtiRunTest1920HostTestCases, CtsJvmtiRunTest1920HostTestCases[secondary_user], CtsJvmtiRunTest1921HostTestCases, CtsJvmtiRunTest1921HostTestCases[secondary_user], CtsJvmtiRunTest1922HostTestCases, CtsJvmtiRunTest1922HostTestCases[secondary_user], CtsJvmtiRunTest1923HostTestCases, CtsJvmtiRunTest1923HostTestCases[secondary_user], CtsJvmtiRunTest1924HostTestCases, CtsJvmtiRunTest1924HostTestCases[secondary_user], CtsJvmtiRunTest1925HostTestCases, CtsJvmtiRunTest1925HostTestCases[secondary_user], CtsJvmtiRunTest1926HostTestCases, CtsJvmtiRunTest1926HostTestCases[secondary_user], CtsJvmtiRunTest1927HostTestCases, CtsJvmtiRunTest1927HostTestCases[secondary_user], CtsJvmtiRunTest1928HostTestCases, CtsJvmtiRunTest1928HostTestCases[secondary_user], CtsJvmtiRunTest1930HostTestCases, CtsJvmtiRunTest1930HostTestCases[secondary_user], CtsJvmtiRunTest1931HostTestCases, CtsJvmtiRunTest1931HostTestCases[secondary_user], CtsJvmtiRunTest1932HostTestCases, CtsJvmtiRunTest1932HostTestCases[secondary_user], CtsJvmtiRunTest1933HostTestCases, CtsJvmtiRunTest1933HostTestCases[secondary_user], CtsJvmtiRunTest1934HostTestCases, CtsJvmtiRunTest1934HostTestCases[secondary_user], CtsJvmtiRunTest1936HostTestCases, CtsJvmtiRunTest1936HostTestCases[secondary_user], CtsJvmtiRunTest1937HostTestCases, CtsJvmtiRunTest1937HostTestCases[secondary_user], CtsJvmtiRunTest1939HostTestCases, CtsJvmtiRunTest1939HostTestCases[secondary_user], CtsJvmtiRunTest1941HostTestCases, CtsJvmtiRunTest1941HostTestCases[secondary_user], CtsJvmtiRunTest1942HostTestCases, CtsJvmtiRunTest1942HostTestCases[secondary_user], CtsJvmtiRunTest1943HostTestCases, CtsJvmtiRunTest1943HostTestCases[secondary_user], CtsJvmtiRunTest1953HostTestCases, CtsJvmtiRunTest1953HostTestCases[secondary_user], CtsJvmtiRunTest1958HostTestCases, CtsJvmtiRunTest1958HostTestCases[secondary_user], CtsJvmtiRunTest1962HostTestCases, CtsJvmtiRunTest1962HostTestCases[secondary_user], CtsJvmtiRunTest1967HostTestCases, CtsJvmtiRunTest1967HostTestCases[secondary_user], CtsJvmtiRunTest1968HostTestCases, CtsJvmtiRunTest1968HostTestCases[secondary_user], CtsJvmtiRunTest1969HostTestCases, CtsJvmtiRunTest1969HostTestCases[secondary_user], CtsJvmtiRunTest1970HostTestCases, CtsJvmtiRunTest1970HostTestCases[secondary_user], CtsJvmtiRunTest1971HostTestCases, CtsJvmtiRunTest1971HostTestCases[secondary_user], CtsJvmtiRunTest1974HostTestCases, CtsJvmtiRunTest1974HostTestCases[secondary_user], CtsJvmtiRunTest1975HostTestCases, CtsJvmtiRunTest1975HostTestCases[secondary_user], CtsJvmtiRunTest1976HostTestCases, CtsJvmtiRunTest1976HostTestCases[secondary_user], CtsJvmtiRunTest1977HostTestCases, CtsJvmtiRunTest1977HostTestCases[secondary_user], CtsJvmtiRunTest1978HostTestCases, CtsJvmtiRunTest1978HostTestCases[secondary_user], CtsJvmtiRunTest1979HostTestCases, CtsJvmtiRunTest1979HostTestCases[secondary_user], CtsJvmtiRunTest1981HostTestCases, CtsJvmtiRunTest1981HostTestCases[secondary_user], CtsJvmtiRunTest1982HostTestCases, CtsJvmtiRunTest1982HostTestCases[secondary_user], CtsJvmtiRunTest1983HostTestCases, CtsJvmtiRunTest1983HostTestCases[secondary_user], CtsJvmtiRunTest1984HostTestCases, CtsJvmtiRunTest1984HostTestCases[secondary_user], CtsJvmtiRunTest1988HostTestCases, CtsJvmtiRunTest1988HostTestCases[secondary_user], CtsJvmtiRunTest1989HostTestCases, CtsJvmtiRunTest1989HostTestCases[secondary_user], CtsJvmtiRunTest1990HostTestCases, CtsJvmtiRunTest1990HostTestCases[secondary_user], CtsJvmtiRunTest1991HostTestCases, CtsJvmtiRunTest1991HostTestCases[secondary_user], CtsJvmtiRunTest1992HostTestCases, CtsJvmtiRunTest1992HostTestCases[secondary_user], CtsJvmtiRunTest1994HostTestCases, CtsJvmtiRunTest1994HostTestCases[secondary_user], CtsJvmtiRunTest1995HostTestCases, CtsJvmtiRunTest1995HostTestCases[secondary_user], CtsJvmtiRunTest1996HostTestCases, CtsJvmtiRunTest1996HostTestCases[secondary_user], CtsJvmtiRunTest1997HostTestCases, CtsJvmtiRunTest1997HostTestCases[secondary_user], CtsJvmtiRunTest1998HostTestCases, CtsJvmtiRunTest1998HostTestCases[secondary_user], CtsJvmtiRunTest1999HostTestCases, CtsJvmtiRunTest1999HostTestCases[secondary_user], CtsJvmtiRunTest2001HostTestCases, CtsJvmtiRunTest2001HostTestCases[secondary_user], CtsJvmtiRunTest2002HostTestCases, CtsJvmtiRunTest2002HostTestCases[secondary_user], CtsJvmtiRunTest2003HostTestCases, CtsJvmtiRunTest2003HostTestCases[secondary_user], CtsJvmtiRunTest2004HostTestCases, CtsJvmtiRunTest2004HostTestCases[secondary_user], CtsJvmtiRunTest2005HostTestCases, CtsJvmtiRunTest2005HostTestCases[secondary_user], CtsJvmtiRunTest2006HostTestCases, CtsJvmtiRunTest2006HostTestCases[secondary_user], CtsJvmtiRunTest2007HostTestCases, CtsJvmtiRunTest2007HostTestCases[secondary_user], CtsJvmtiRunTest902HostTestCases, CtsJvmtiRunTest902HostTestCases[secondary_user], CtsJvmtiRunTest903HostTestCases, CtsJvmtiRunTest903HostTestCases[secondary_user], CtsJvmtiRunTest904HostTestCases, CtsJvmtiRunTest904HostTestCases[secondary_user], CtsJvmtiRunTest905HostTestCases, CtsJvmtiRunTest905HostTestCases[secondary_user], CtsJvmtiRunTest906HostTestCases, CtsJvmtiRunTest906HostTestCases[secondary_user], CtsJvmtiRunTest907HostTestCases, CtsJvmtiRunTest907HostTestCases[secondary_user], CtsJvmtiRunTest908HostTestCases, CtsJvmtiRunTest908HostTestCases[secondary_user], CtsJvmtiRunTest910HostTestCases, CtsJvmtiRunTest910HostTestCases[secondary_user], CtsJvmtiRunTest911HostTestCases, CtsJvmtiRunTest911HostTestCases[secondary_user], CtsJvmtiRunTest912HostTestCases, CtsJvmtiRunTest912HostTestCases[secondary_user], CtsJvmtiRunTest913HostTestCases, CtsJvmtiRunTest913HostTestCases[secondary_user], CtsJvmtiRunTest914HostTestCases, CtsJvmtiRunTest914HostTestCases[secondary_user], CtsJvmtiRunTest915HostTestCases, CtsJvmtiRunTest915HostTestCases[secondary_user], CtsJvmtiRunTest917HostTestCases, CtsJvmtiRunTest917HostTestCases[secondary_user], CtsJvmtiRunTest918HostTestCases, CtsJvmtiRunTest918HostTestCases[secondary_user], CtsJvmtiRunTest919HostTestCases, CtsJvmtiRunTest919HostTestCases[secondary_user], CtsJvmtiRunTest920HostTestCases, CtsJvmtiRunTest920HostTestCases[secondary_user], CtsJvmtiRunTest922HostTestCases, CtsJvmtiRunTest922HostTestCases[secondary_user], CtsJvmtiRunTest923HostTestCases, CtsJvmtiRunTest923HostTestCases[secondary_user], CtsJvmtiRunTest924HostTestCases, CtsJvmtiRunTest924HostTestCases[secondary_user], CtsJvmtiRunTest926HostTestCases, CtsJvmtiRunTest926HostTestCases[secondary_user], CtsJvmtiRunTest927HostTestCases, CtsJvmtiRunTest927HostTestCases[secondary_user], CtsJvmtiRunTest928HostTestCases, CtsJvmtiRunTest928HostTestCases[secondary_user], CtsJvmtiRunTest930HostTestCases, CtsJvmtiRunTest930HostTestCases[secondary_user], CtsJvmtiRunTest931HostTestCases, CtsJvmtiRunTest931HostTestCases[secondary_user], CtsJvmtiRunTest932HostTestCases, CtsJvmtiRunTest932HostTestCases[secondary_user], CtsJvmtiRunTest940HostTestCases, CtsJvmtiRunTest940HostTestCases[secondary_user], CtsJvmtiRunTest942HostTestCases, CtsJvmtiRunTest942HostTestCases[secondary_user], CtsJvmtiRunTest944HostTestCases, CtsJvmtiRunTest944HostTestCases[secondary_user], CtsJvmtiRunTest945HostTestCases, CtsJvmtiRunTest945HostTestCases[secondary_user], CtsJvmtiRunTest947HostTestCases, CtsJvmtiRunTest947HostTestCases[secondary_user], CtsJvmtiRunTest951HostTestCases, CtsJvmtiRunTest951HostTestCases[secondary_user], CtsJvmtiRunTest982HostTestCases, CtsJvmtiRunTest982HostTestCases[secondary_user], CtsJvmtiRunTest983HostTestCases, CtsJvmtiRunTest983HostTestCases[secondary_user], CtsJvmtiRunTest984HostTestCases, CtsJvmtiRunTest984HostTestCases[secondary_user], CtsJvmtiRunTest985HostTestCases, CtsJvmtiRunTest985HostTestCases[secondary_user], CtsJvmtiRunTest986HostTestCases, CtsJvmtiRunTest986HostTestCases[secondary_user], CtsJvmtiRunTest988HostTestCases, CtsJvmtiRunTest988HostTestCases[secondary_user], CtsJvmtiRunTest989HostTestCases, CtsJvmtiRunTest989HostTestCases[secondary_user], CtsJvmtiRunTest990HostTestCases, CtsJvmtiRunTest990HostTestCases[secondary_user], CtsJvmtiRunTest991HostTestCases, CtsJvmtiRunTest991HostTestCases[secondary_user], CtsJvmtiRunTest992HostTestCases, CtsJvmtiRunTest992HostTestCases[secondary_user], CtsJvmtiRunTest993HostTestCases, CtsJvmtiRunTest993HostTestCases[secondary_user], CtsJvmtiRunTest994HostTestCases, CtsJvmtiRunTest994HostTestCases[secondary_user], CtsJvmtiRunTest995HostTestCases, CtsJvmtiRunTest995HostTestCases[secondary_user], CtsJvmtiRunTest996HostTestCases, CtsJvmtiRunTest996HostTestCases[secondary_user], CtsJvmtiRunTest997HostTestCases, CtsJvmtiRunTest997HostTestCases[secondary_user], CtsJvmtiTaggingHostTestCases, CtsJvmtiTaggingHostTestCases[secondary_user], CtsJvmtiTrackingHostTestCases, CtsJvmtiTrackingHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsJvmti',
+        test_name='cheets_CTS_R.internal.arm.CtsJvmti',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJvmtiAttachingHostTestCases', '--include-filter', 'CtsJvmtiAttachingHostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiAttachingTestCases', '--include-filter', 'CtsJvmtiAttachingTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRedefineClassesHostTestCases', '--include-filter', 'CtsJvmtiRedefineClassesHostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1900HostTestCases', '--include-filter', 'CtsJvmtiRunTest1900HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1901HostTestCases', '--include-filter', 'CtsJvmtiRunTest1901HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1902HostTestCases', '--include-filter', 'CtsJvmtiRunTest1902HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1903HostTestCases', '--include-filter', 'CtsJvmtiRunTest1903HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1904HostTestCases', '--include-filter', 'CtsJvmtiRunTest1904HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1906HostTestCases', '--include-filter', 'CtsJvmtiRunTest1906HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1907HostTestCases', '--include-filter', 'CtsJvmtiRunTest1907HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1908HostTestCases', '--include-filter', 'CtsJvmtiRunTest1908HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1909HostTestCases', '--include-filter', 'CtsJvmtiRunTest1909HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1910HostTestCases', '--include-filter', 'CtsJvmtiRunTest1910HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1911HostTestCases', '--include-filter', 'CtsJvmtiRunTest1911HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1912HostTestCases', '--include-filter', 'CtsJvmtiRunTest1912HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1913HostTestCases', '--include-filter', 'CtsJvmtiRunTest1913HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1914HostTestCases', '--include-filter', 'CtsJvmtiRunTest1914HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1915HostTestCases', '--include-filter', 'CtsJvmtiRunTest1915HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1916HostTestCases', '--include-filter', 'CtsJvmtiRunTest1916HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1917HostTestCases', '--include-filter', 'CtsJvmtiRunTest1917HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1920HostTestCases', '--include-filter', 'CtsJvmtiRunTest1920HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1921HostTestCases', '--include-filter', 'CtsJvmtiRunTest1921HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1922HostTestCases', '--include-filter', 'CtsJvmtiRunTest1922HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1923HostTestCases', '--include-filter', 'CtsJvmtiRunTest1923HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1924HostTestCases', '--include-filter', 'CtsJvmtiRunTest1924HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1925HostTestCases', '--include-filter', 'CtsJvmtiRunTest1925HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1926HostTestCases', '--include-filter', 'CtsJvmtiRunTest1926HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1927HostTestCases', '--include-filter', 'CtsJvmtiRunTest1927HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1928HostTestCases', '--include-filter', 'CtsJvmtiRunTest1928HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1930HostTestCases', '--include-filter', 'CtsJvmtiRunTest1930HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1931HostTestCases', '--include-filter', 'CtsJvmtiRunTest1931HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1932HostTestCases', '--include-filter', 'CtsJvmtiRunTest1932HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1933HostTestCases', '--include-filter', 'CtsJvmtiRunTest1933HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1934HostTestCases', '--include-filter', 'CtsJvmtiRunTest1934HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1936HostTestCases', '--include-filter', 'CtsJvmtiRunTest1936HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1937HostTestCases', '--include-filter', 'CtsJvmtiRunTest1937HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1939HostTestCases', '--include-filter', 'CtsJvmtiRunTest1939HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1941HostTestCases', '--include-filter', 'CtsJvmtiRunTest1941HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1942HostTestCases', '--include-filter', 'CtsJvmtiRunTest1942HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1943HostTestCases', '--include-filter', 'CtsJvmtiRunTest1943HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1953HostTestCases', '--include-filter', 'CtsJvmtiRunTest1953HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1958HostTestCases', '--include-filter', 'CtsJvmtiRunTest1958HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1962HostTestCases', '--include-filter', 'CtsJvmtiRunTest1962HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1967HostTestCases', '--include-filter', 'CtsJvmtiRunTest1967HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1968HostTestCases', '--include-filter', 'CtsJvmtiRunTest1968HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1969HostTestCases', '--include-filter', 'CtsJvmtiRunTest1969HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1970HostTestCases', '--include-filter', 'CtsJvmtiRunTest1970HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1971HostTestCases', '--include-filter', 'CtsJvmtiRunTest1971HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1974HostTestCases', '--include-filter', 'CtsJvmtiRunTest1974HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1975HostTestCases', '--include-filter', 'CtsJvmtiRunTest1975HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1976HostTestCases', '--include-filter', 'CtsJvmtiRunTest1976HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1977HostTestCases', '--include-filter', 'CtsJvmtiRunTest1977HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1978HostTestCases', '--include-filter', 'CtsJvmtiRunTest1978HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1979HostTestCases', '--include-filter', 'CtsJvmtiRunTest1979HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1981HostTestCases', '--include-filter', 'CtsJvmtiRunTest1981HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1982HostTestCases', '--include-filter', 'CtsJvmtiRunTest1982HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1983HostTestCases', '--include-filter', 'CtsJvmtiRunTest1983HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1984HostTestCases', '--include-filter', 'CtsJvmtiRunTest1984HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1988HostTestCases', '--include-filter', 'CtsJvmtiRunTest1988HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1989HostTestCases', '--include-filter', 'CtsJvmtiRunTest1989HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1990HostTestCases', '--include-filter', 'CtsJvmtiRunTest1990HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1991HostTestCases', '--include-filter', 'CtsJvmtiRunTest1991HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1992HostTestCases', '--include-filter', 'CtsJvmtiRunTest1992HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1994HostTestCases', '--include-filter', 'CtsJvmtiRunTest1994HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1995HostTestCases', '--include-filter', 'CtsJvmtiRunTest1995HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1996HostTestCases', '--include-filter', 'CtsJvmtiRunTest1996HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1997HostTestCases', '--include-filter', 'CtsJvmtiRunTest1997HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1998HostTestCases', '--include-filter', 'CtsJvmtiRunTest1998HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1999HostTestCases', '--include-filter', 'CtsJvmtiRunTest1999HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2001HostTestCases', '--include-filter', 'CtsJvmtiRunTest2001HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2002HostTestCases', '--include-filter', 'CtsJvmtiRunTest2002HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2003HostTestCases', '--include-filter', 'CtsJvmtiRunTest2003HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2004HostTestCases', '--include-filter', 'CtsJvmtiRunTest2004HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2005HostTestCases', '--include-filter', 'CtsJvmtiRunTest2005HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2006HostTestCases', '--include-filter', 'CtsJvmtiRunTest2006HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2007HostTestCases', '--include-filter', 'CtsJvmtiRunTest2007HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest902HostTestCases', '--include-filter', 'CtsJvmtiRunTest902HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest903HostTestCases', '--include-filter', 'CtsJvmtiRunTest903HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest904HostTestCases', '--include-filter', 'CtsJvmtiRunTest904HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest905HostTestCases', '--include-filter', 'CtsJvmtiRunTest905HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest906HostTestCases', '--include-filter', 'CtsJvmtiRunTest906HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest907HostTestCases', '--include-filter', 'CtsJvmtiRunTest907HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest908HostTestCases', '--include-filter', 'CtsJvmtiRunTest908HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest910HostTestCases', '--include-filter', 'CtsJvmtiRunTest910HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest911HostTestCases', '--include-filter', 'CtsJvmtiRunTest911HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest912HostTestCases', '--include-filter', 'CtsJvmtiRunTest912HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest913HostTestCases', '--include-filter', 'CtsJvmtiRunTest913HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest914HostTestCases', '--include-filter', 'CtsJvmtiRunTest914HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest915HostTestCases', '--include-filter', 'CtsJvmtiRunTest915HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest917HostTestCases', '--include-filter', 'CtsJvmtiRunTest917HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest918HostTestCases', '--include-filter', 'CtsJvmtiRunTest918HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest919HostTestCases', '--include-filter', 'CtsJvmtiRunTest919HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest920HostTestCases', '--include-filter', 'CtsJvmtiRunTest920HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest922HostTestCases', '--include-filter', 'CtsJvmtiRunTest922HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest923HostTestCases', '--include-filter', 'CtsJvmtiRunTest923HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest924HostTestCases', '--include-filter', 'CtsJvmtiRunTest924HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest926HostTestCases', '--include-filter', 'CtsJvmtiRunTest926HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest927HostTestCases', '--include-filter', 'CtsJvmtiRunTest927HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest928HostTestCases', '--include-filter', 'CtsJvmtiRunTest928HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest930HostTestCases', '--include-filter', 'CtsJvmtiRunTest930HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest931HostTestCases', '--include-filter', 'CtsJvmtiRunTest931HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest932HostTestCases', '--include-filter', 'CtsJvmtiRunTest932HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest940HostTestCases', '--include-filter', 'CtsJvmtiRunTest940HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest942HostTestCases', '--include-filter', 'CtsJvmtiRunTest942HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest944HostTestCases', '--include-filter', 'CtsJvmtiRunTest944HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest945HostTestCases', '--include-filter', 'CtsJvmtiRunTest945HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest947HostTestCases', '--include-filter', 'CtsJvmtiRunTest947HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest951HostTestCases', '--include-filter', 'CtsJvmtiRunTest951HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest982HostTestCases', '--include-filter', 'CtsJvmtiRunTest982HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest983HostTestCases', '--include-filter', 'CtsJvmtiRunTest983HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest984HostTestCases', '--include-filter', 'CtsJvmtiRunTest984HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest985HostTestCases', '--include-filter', 'CtsJvmtiRunTest985HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest986HostTestCases', '--include-filter', 'CtsJvmtiRunTest986HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest988HostTestCases', '--include-filter', 'CtsJvmtiRunTest988HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest989HostTestCases', '--include-filter', 'CtsJvmtiRunTest989HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest990HostTestCases', '--include-filter', 'CtsJvmtiRunTest990HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest991HostTestCases', '--include-filter', 'CtsJvmtiRunTest991HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest992HostTestCases', '--include-filter', 'CtsJvmtiRunTest992HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest993HostTestCases', '--include-filter', 'CtsJvmtiRunTest993HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest994HostTestCases', '--include-filter', 'CtsJvmtiRunTest994HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest995HostTestCases', '--include-filter', 'CtsJvmtiRunTest995HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest996HostTestCases', '--include-filter', 'CtsJvmtiRunTest996HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest997HostTestCases', '--include-filter', 'CtsJvmtiRunTest997HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiTaggingHostTestCases', '--include-filter', 'CtsJvmtiTaggingHostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiTrackingHostTestCases', '--include-filter', 'CtsJvmtiTrackingHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsJvmti',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=75000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsKernelConfigTestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsKernelConfigTestCases
new file mode 100644
index 0000000..62d69dd
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsKernelConfigTestCases
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsKernelConfigTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsKernelConfigTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsKernelConfigTestCases',
+        test_name='cheets_CTS_R.internal.arm.CtsKernelConfigTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsKernelConfigTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsKernelConfigTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsKeystore b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsKeystore
new file mode 100644
index 0000000..ac3b725
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsKeystore
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsKeystore'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsKeystoreTestCases, CtsKeystoreTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsKeystore',
+        test_name='cheets_CTS_R.internal.arm.CtsKeystore',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsKeystoreTestCases', '--include-filter', 'CtsKeystoreTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsKeystore',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsLeanbackJank b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsLeanbackJank
new file mode 100644
index 0000000..63b8187
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsLeanbackJank
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsLeanbackJank'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsLeanbackJankTestCases, CtsLeanbackJankTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsLeanbackJank',
+        test_name='cheets_CTS_R.internal.arm.CtsLeanbackJank',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLeanbackJankTestCases', '--include-filter', 'CtsLeanbackJankTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsLeanbackJank',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsLegacyNotification2 b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsLegacyNotification2
new file mode 100644
index 0000000..7370316
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsLegacyNotification2
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsLegacyNotification2'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsLegacyNotification20TestCases, CtsLegacyNotification20TestCases[secondary_user], CtsLegacyNotification27TestCases, CtsLegacyNotification27TestCases[secondary_user], CtsLegacyNotification28TestCases, CtsLegacyNotification28TestCases[secondary_user], CtsLegacyNotification29TestCases, CtsLegacyNotification29TestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsLegacyNotification2',
+        test_name='cheets_CTS_R.internal.arm.CtsLegacyNotification2',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLegacyNotification20TestCases', '--include-filter', 'CtsLegacyNotification20TestCases[secondary_user]', '--include-filter', 'CtsLegacyNotification27TestCases', '--include-filter', 'CtsLegacyNotification27TestCases[secondary_user]', '--include-filter', 'CtsLegacyNotification28TestCases', '--include-filter', 'CtsLegacyNotification28TestCases[secondary_user]', '--include-filter', 'CtsLegacyNotification29TestCases', '--include-filter', 'CtsLegacyNotification29TestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsLegacyNotification2',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=16200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsLibcore b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsLibcore
new file mode 100644
index 0000000..b1bfe0d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsLibcore
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsLibcore'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsLibcoreApiEvolutionTestCases, CtsLibcoreApiEvolutionTestCases[secondary_user], CtsLibcoreFileIOTestCases, CtsLibcoreFileIOTestCases[secondary_user], CtsLibcoreJsr166TestCases, CtsLibcoreJsr166TestCases[secondary_user], CtsLibcoreLegacy22TestCases, CtsLibcoreLegacy22TestCases[secondary_user], CtsLibcoreOjTestCases, CtsLibcoreOjTestCases[secondary_user], CtsLibcoreOkHttpTestCases, CtsLibcoreOkHttpTestCases[secondary_user], CtsLibcoreTestCases, CtsLibcoreTestCases[secondary_user], CtsLibcoreWycheproofBCTestCases, CtsLibcoreWycheproofBCTestCases[secondary_user], CtsLibcoreWycheproofConscryptTestCases, CtsLibcoreWycheproofConscryptTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsLibcore',
+        test_name='cheets_CTS_R.internal.arm.CtsLibcore',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLibcoreApiEvolutionTestCases', '--include-filter', 'CtsLibcoreApiEvolutionTestCases[secondary_user]', '--include-filter', 'CtsLibcoreFileIOTestCases', '--include-filter', 'CtsLibcoreFileIOTestCases[secondary_user]', '--include-filter', 'CtsLibcoreJsr166TestCases', '--include-filter', 'CtsLibcoreJsr166TestCases[secondary_user]', '--include-filter', 'CtsLibcoreLegacy22TestCases', '--include-filter', 'CtsLibcoreLegacy22TestCases[secondary_user]', '--include-filter', 'CtsLibcoreOjTestCases', '--include-filter', 'CtsLibcoreOjTestCases[secondary_user]', '--include-filter', 'CtsLibcoreOkHttpTestCases', '--include-filter', 'CtsLibcoreOkHttpTestCases[secondary_user]', '--include-filter', 'CtsLibcoreTestCases', '--include-filter', 'CtsLibcoreTestCases[secondary_user]', '--include-filter', 'CtsLibcoreWycheproofBCTestCases', '--include-filter', 'CtsLibcoreWycheproofBCTestCases[secondary_user]', '--include-filter', 'CtsLibcoreWycheproofConscryptTestCases', '--include-filter', 'CtsLibcoreWycheproofConscryptTestCases[secondary_user]'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsLibcore',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=39600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsLiblog b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsLiblog
new file mode 100644
index 0000000..ee36322
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsLiblog
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsLiblog'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsLiblogTestCases, CtsLiblogTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsLiblog',
+        test_name='cheets_CTS_R.internal.arm.CtsLiblog',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLiblogTestCases', '--include-filter', 'CtsLiblogTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsLiblog',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsLocation b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsLocation
new file mode 100644
index 0000000..154ba5b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsLocation
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsLocation'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsLocationCoarseTestCases, CtsLocationCoarseTestCases[instant], CtsLocationCoarseTestCases[secondary_user], CtsLocationFineTestCases, CtsLocationFineTestCases[instant], CtsLocationFineTestCases[secondary_user], CtsLocationGnssTestCases, CtsLocationGnssTestCases[instant], CtsLocationGnssTestCases[secondary_user], CtsLocationNoneTestCases, CtsLocationNoneTestCases[instant], CtsLocationNoneTestCases[secondary_user], CtsLocationPrivilegedTestCases, CtsLocationPrivilegedTestCases[instant], CtsLocationPrivilegedTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsLocation',
+        test_name='cheets_CTS_R.internal.arm.CtsLocation',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLocationCoarseTestCases', '--include-filter', 'CtsLocationCoarseTestCases[instant]', '--include-filter', 'CtsLocationCoarseTestCases[secondary_user]', '--include-filter', 'CtsLocationFineTestCases', '--include-filter', 'CtsLocationFineTestCases[instant]', '--include-filter', 'CtsLocationFineTestCases[secondary_user]', '--include-filter', 'CtsLocationGnssTestCases', '--include-filter', 'CtsLocationGnssTestCases[instant]', '--include-filter', 'CtsLocationGnssTestCases[secondary_user]', '--include-filter', 'CtsLocationNoneTestCases', '--include-filter', 'CtsLocationNoneTestCases[instant]', '--include-filter', 'CtsLocationNoneTestCases[secondary_user]', '--include-filter', 'CtsLocationPrivilegedTestCases', '--include-filter', 'CtsLocationPrivilegedTestCases[instant]', '--include-filter', 'CtsLocationPrivilegedTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsLocation',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=28800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsLogd b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsLogd
new file mode 100644
index 0000000..236c3ea
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsLogd
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsLogd'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsLogdTestCases, CtsLogdTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsLogd',
+        test_name='cheets_CTS_R.internal.arm.CtsLogd',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLogdTestCases', '--include-filter', 'CtsLogdTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsLogd',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMatchFlag b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMatchFlag
new file mode 100644
index 0000000..9453bba
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMatchFlag
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsMatchFlag'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMatchFlagTestCases, CtsMatchFlagTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsMatchFlag',
+        test_name='cheets_CTS_R.internal.arm.CtsMatchFlag',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMatchFlagTestCases', '--include-filter', 'CtsMatchFlagTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMatchFlag',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaBitstreamsTestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaBitstreamsTestCases
new file mode 100644
index 0000000..f388ac9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaBitstreamsTestCases
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsMediaBitstreamsTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaBitstreamsTestCases, CtsMediaBitstreamsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='internal.arm.CtsMediaBitstreamsTestCases',
+        test_name='cheets_CTS_R.internal.arm.CtsMediaBitstreamsTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaBitstreamsTestCases', '--include-filter', 'CtsMediaBitstreamsTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaBitstreamsTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaHostTestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaHostTestCases
new file mode 100644
index 0000000..a0cafc3
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaHostTestCases
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsMediaHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaHostTestCases, CtsMediaHostTestCases[instant] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsMediaHostTestCases',
+        test_name='cheets_CTS_R.internal.arm.CtsMediaHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaHostTestCases', '--include-filter', 'CtsMediaHostTestCases[instant]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaHostTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaParserTestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaParserTestCases
new file mode 100644
index 0000000..f21a162
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaParserTestCases
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsMediaParserTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaParserTestCases, CtsMediaParserTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsMediaParserTestCases',
+        test_name='cheets_CTS_R.internal.arm.CtsMediaParserTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaParserTestCases', '--include-filter', 'CtsMediaParserTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaParserTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaPerformanceClassTestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaPerformanceClassTestCases
new file mode 100644
index 0000000..b3bc5b3
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaPerformanceClassTestCases
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsMediaPerformanceClassTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaPerformanceClassTestCases, CtsMediaPerformanceClassTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsMediaPerformanceClassTestCases',
+        test_name='cheets_CTS_R.internal.arm.CtsMediaPerformanceClassTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaPerformanceClassTestCases', '--include-filter', 'CtsMediaPerformanceClassTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaPerformanceClassTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaStressTestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaStressTestCases
new file mode 100644
index 0000000..b110c6b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaStressTestCases
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsMediaStressTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaStressTestCases, CtsMediaStressTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='internal.arm.CtsMediaStressTestCases',
+        test_name='cheets_CTS_R.internal.arm.CtsMediaStressTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaStressTestCases', '--include-filter', 'CtsMediaStressTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaStressTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=21600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaStressTestCases.camera.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaStressTestCases.camera.ctshardware
new file mode 100644
index 0000000..aadf36c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaStressTestCases.camera.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsMediaStressTestCases.camera.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaStressTestCases.camera of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='internal.arm.CtsMediaStressTestCases.camera.ctshardware',
+        test_name='cheets_CTS_R.internal.arm.CtsMediaStressTestCases.camera.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaStressTestCases android.mediastress.cts.MediaRecorderStressTest', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaStressTestCases.camera',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaTestCases.32 b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaTestCases.32
new file mode 100644
index 0000000..68969cd
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaTestCases.32
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsMediaTestCases.32'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaTestCases, CtsMediaTestCases[instant] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='internal.arm.CtsMediaTestCases.32',
+        test_name='cheets_CTS_R.internal.arm.CtsMediaTestCases.32',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases', '--logcat-on-failure', '--abi', 'armeabi-v7a'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        use_jdk9=True,
+        timeout=39600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaTestCases.64 b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaTestCases.64
new file mode 100644
index 0000000..13f27e0
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaTestCases.64
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsMediaTestCases.64'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaTestCases, CtsMediaTestCases[instant] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='internal.arm.CtsMediaTestCases.64',
+        test_name='cheets_CTS_R.internal.arm.CtsMediaTestCases.64',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases', '--include-filter', 'CtsMediaTestCases[instant]', '--logcat-on-failure', '--abi', 'arm64-v8a'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        use_jdk9=True,
+        timeout=39600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaTestCases.audio b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaTestCases.audio
new file mode 100644
index 0000000..2f5a1ae
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaTestCases.audio
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsMediaTestCases.audio'
+ATTRIBUTES = 'suite:arc-cts-qual, suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaTestCases.audio of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        needs_push_media=True,
+        tag='internal.arm.CtsMediaTestCases.audio',
+        test_name='cheets_CTS_R.internal.arm.CtsMediaTestCases.audio',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioAttributesTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioEffectTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioAttributesTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioEffectTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioFocusTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioFormatTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioManagerTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioMetadataTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioNativeTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPlayRoutingNative', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPlaybackCaptureTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPlaybackConfigurationTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPreProcessingTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPresentationTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordAppOpTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordRoutingNative', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecord_BufferSizeTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordingConfigurationTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioSystemTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioSystemUsageTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackLatencyTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackOffloadTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackSurroundTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrack_ListenerTest', '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolAacTest', '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolHapticTest', '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolMidiTest', '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolOggTest', '--include-filter', 'CtsMediaTestCases android.media.cts.VolumeShaperTest', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaTestCases.ctshardware
new file mode 100644
index 0000000..23cb978
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaTestCases.ctshardware
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsMediaTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='internal.arm.CtsMediaTestCases.ctshardware',
+        test_name='cheets_CTS_R.internal.arm.CtsMediaTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        use_jdk9=True,
+        timeout=36000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaTestCases.perf b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaTestCases.perf
new file mode 100644
index 0000000..af19ac6
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaTestCases.perf
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsMediaTestCases.perf'
+ATTRIBUTES = 'suite:arc-cts-qual, suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaTestCases.perf of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        needs_push_media=True,
+        tag='internal.arm.CtsMediaTestCases.perf',
+        test_name='cheets_CTS_R.internal.arm.CtsMediaTestCases.perf',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases android.media.cts.VideoDecoderPerfTest', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaTestCases.video b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaTestCases.video
new file mode 100644
index 0000000..39d9980
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaTestCases.video
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsMediaTestCases.video'
+ATTRIBUTES = 'suite:arc-cts-qual, suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaTestCases.video of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        needs_push_media=True,
+        tag='internal.arm.CtsMediaTestCases.video',
+        test_name='cheets_CTS_R.internal.arm.CtsMediaTestCases.video',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases android.media.cts.AdaptivePlaybackTest', '--include-filter', 'CtsMediaTestCases android.media.cts.DecodeAccuracyTest', '--include-filter', 'CtsMediaTestCases android.media.cts.DecodeEditEncodeTest', '--include-filter', 'CtsMediaTestCases android.media.cts.DecoderConformanceTest', '--include-filter', 'CtsMediaTestCases android.media.cts.EncodeDecodeTest', '--include-filter', 'CtsMediaTestCases android.media.cts.ExtractDecodeEditEncodeMuxTest', '--include-filter', 'CtsMediaTestCases android.media.cts.MediaCodecPlayerTest', '--include-filter', 'CtsMediaTestCases android.media.cts.MediaCodecPlayerTest', '--include-filter', 'CtsMediaTestCases android.media.cts.MediaDrmClearkeyTest', '--include-filter', 'CtsMediaTestCases android.media.cts.MediaRecorderTest', '--include-filter', 'CtsMediaTestCases android.media.cts.MediaSynctest#testPlayVideo', '--include-filter', 'CtsMediaTestCases android.media.cts.VideoCodecTest', '--include-filter', 'CtsMediaTestCases android.media.cts.VideoEncoderTest', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaV2TestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaV2TestCases
new file mode 100644
index 0000000..9adde8a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMediaV2TestCases
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsMediaV2TestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaV2TestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsMediaV2TestCases',
+        test_name='cheets_CTS_R.internal.arm.CtsMediaV2TestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaV2TestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaV2TestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMidiTestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMidiTestCases
new file mode 100644
index 0000000..4db4300
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMidiTestCases
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsMidiTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild, suite:bvt-perbuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMidiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=3,
+        tag='internal.arm.CtsMidiTestCases',
+        test_name='cheets_CTS_R.internal.arm.CtsMidiTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMidiTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMidiTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMimeMap b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMimeMap
new file mode 100644
index 0000000..de63c0c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMimeMap
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsMimeMap'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMimeMapTestCases, CtsMimeMapTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsMimeMap',
+        test_name='cheets_CTS_R.internal.arm.CtsMimeMap',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMimeMapTestCases', '--include-filter', 'CtsMimeMapTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMimeMap',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMocking b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMocking
new file mode 100644
index 0000000..5c6c5fb
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMocking
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsMocking'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMockingDebuggableTestCases, CtsMockingDebuggableTestCases[instant], CtsMockingDebuggableTestCases[secondary_user], CtsMockingTestCases, CtsMockingTestCases[instant], CtsMockingTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsMocking',
+        test_name='cheets_CTS_R.internal.arm.CtsMocking',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMockingDebuggableTestCases', '--include-filter', 'CtsMockingDebuggableTestCases[instant]', '--include-filter', 'CtsMockingDebuggableTestCases[secondary_user]', '--include-filter', 'CtsMockingTestCases', '--include-filter', 'CtsMockingTestCases[instant]', '--include-filter', 'CtsMockingTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMocking',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMonkey b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMonkey
new file mode 100644
index 0000000..88a8eb7
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMonkey
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsMonkey'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMonkeyTestCases, CtsMonkeyTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsMonkey',
+        test_name='cheets_CTS_R.internal.arm.CtsMonkey',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMonkeyTestCases', '--include-filter', 'CtsMonkeyTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMonkey',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMultiUser b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMultiUser
new file mode 100644
index 0000000..98353f3
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsMultiUser
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsMultiUser'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMultiUserHostTestCases, CtsMultiUserHostTestCases[instant], CtsMultiUserHostTestCases[secondary_user], CtsMultiUserTestCases, CtsMultiUserTestCases[instant], CtsMultiUserTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsMultiUser',
+        test_name='cheets_CTS_R.internal.arm.CtsMultiUser',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMultiUserHostTestCases', '--include-filter', 'CtsMultiUserHostTestCases[instant]', '--include-filter', 'CtsMultiUserHostTestCases[secondary_user]', '--include-filter', 'CtsMultiUserTestCases', '--include-filter', 'CtsMultiUserTestCases[instant]', '--include-filter', 'CtsMultiUserTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMultiUser',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNNAPI b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNNAPI
new file mode 100644
index 0000000..28409e5
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNNAPI
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsNNAPI'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNNAPITestCases, CtsNNAPITestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsNNAPI',
+        test_name='cheets_CTS_R.internal.arm.CtsNNAPI',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNNAPITestCases', '--include-filter', 'CtsNNAPITestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsNNAPI',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNNAPIBenchmark b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNNAPIBenchmark
new file mode 100644
index 0000000..0ffadc1
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNNAPIBenchmark
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsNNAPIBenchmark'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNNAPIBenchmarkTestCases, CtsNNAPIBenchmarkTestCases[instant], CtsNNAPIBenchmarkTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsNNAPIBenchmark',
+        test_name='cheets_CTS_R.internal.arm.CtsNNAPIBenchmark',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNNAPIBenchmarkTestCases', '--include-filter', 'CtsNNAPIBenchmarkTestCases[instant]', '--include-filter', 'CtsNNAPIBenchmarkTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsNNAPIBenchmark',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNative b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNative
new file mode 100644
index 0000000..1c7d844
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNative
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsNative'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNativeEncryptionTestCases, CtsNativeEncryptionTestCases[instant], CtsNativeEncryptionTestCases[secondary_user], CtsNativeHardwareTestCases, CtsNativeHardwareTestCases[secondary_user], CtsNativeMediaAAudioTestCases, CtsNativeMediaAAudioTestCases[instant], CtsNativeMediaAAudioTestCases[secondary_user], CtsNativeMediaMetricsTestCases, CtsNativeMediaMetricsTestCases[instant], CtsNativeMediaMetricsTestCases[secondary_user], CtsNativeMediaSlTestCases, CtsNativeMediaSlTestCases[instant], CtsNativeMediaSlTestCases[secondary_user], CtsNativeMediaXaTestCases, CtsNativeMediaXaTestCases[instant], CtsNativeMediaXaTestCases[secondary_user], CtsNativeMidiTestCases, CtsNativeMidiTestCases[secondary_user], CtsNativeNetDnsTestCases, CtsNativeNetDnsTestCases[instant], CtsNativeNetDnsTestCases[secondary_user], CtsNativeNetTestCases, CtsNativeNetTestCases[instant], CtsNativeNetTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsNative',
+        test_name='cheets_CTS_R.internal.arm.CtsNative',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNativeEncryptionTestCases', '--include-filter', 'CtsNativeEncryptionTestCases[instant]', '--include-filter', 'CtsNativeEncryptionTestCases[secondary_user]', '--include-filter', 'CtsNativeHardwareTestCases', '--include-filter', 'CtsNativeHardwareTestCases[secondary_user]', '--include-filter', 'CtsNativeMediaAAudioTestCases', '--include-filter', 'CtsNativeMediaAAudioTestCases[instant]', '--include-filter', 'CtsNativeMediaAAudioTestCases[secondary_user]', '--include-filter', 'CtsNativeMediaMetricsTestCases', '--include-filter', 'CtsNativeMediaMetricsTestCases[instant]', '--include-filter', 'CtsNativeMediaMetricsTestCases[secondary_user]', '--include-filter', 'CtsNativeMediaSlTestCases', '--include-filter', 'CtsNativeMediaSlTestCases[instant]', '--include-filter', 'CtsNativeMediaSlTestCases[secondary_user]', '--include-filter', 'CtsNativeMediaXaTestCases', '--include-filter', 'CtsNativeMediaXaTestCases[instant]', '--include-filter', 'CtsNativeMediaXaTestCases[secondary_user]', '--include-filter', 'CtsNativeMidiTestCases', '--include-filter', 'CtsNativeMidiTestCases[secondary_user]', '--include-filter', 'CtsNativeNetDnsTestCases', '--include-filter', 'CtsNativeNetDnsTestCases[instant]', '--include-filter', 'CtsNativeNetDnsTestCases[secondary_user]', '--include-filter', 'CtsNativeNetTestCases', '--include-filter', 'CtsNativeNetTestCases[instant]', '--include-filter', 'CtsNativeNetTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsNative',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=46800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNativeMediaAAudioTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNativeMediaAAudioTestCases.ctshardware
new file mode 100644
index 0000000..54f29de
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNativeMediaAAudioTestCases.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsNativeMediaAAudioTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNativeMediaAAudioTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsNativeMediaAAudioTestCases.ctshardware',
+        test_name='cheets_CTS_R.internal.arm.CtsNativeMediaAAudioTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNativeMediaAAudioTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsNativeMediaAAudioTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNdef b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNdef
new file mode 100644
index 0000000..76412a5
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNdef
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsNdef'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNdefTestCases, CtsNdefTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsNdef',
+        test_name='cheets_CTS_R.internal.arm.CtsNdef',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNdefTestCases', '--include-filter', 'CtsNdefTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsNdef',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNdkBinder b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNdkBinder
new file mode 100644
index 0000000..c38a4d2
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNdkBinder
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsNdkBinder'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNdkBinderTestCases, CtsNdkBinderTestCases[instant], CtsNdkBinderTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsNdkBinder',
+        test_name='cheets_CTS_R.internal.arm.CtsNdkBinder',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNdkBinderTestCases', '--include-filter', 'CtsNdkBinderTestCases[instant]', '--include-filter', 'CtsNdkBinderTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsNdkBinder',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNet b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNet
new file mode 100644
index 0000000..580e70b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNet
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsNet'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNetApi23TestCases, CtsNetApi23TestCases[secondary_user], CtsNetSecConfigAttributeTestCases, CtsNetSecConfigAttributeTestCases[instant], CtsNetSecConfigAttributeTestCases[secondary_user], CtsNetSecConfigBasicDebugDisabledTestCases, CtsNetSecConfigBasicDebugDisabledTestCases[instant], CtsNetSecConfigBasicDebugDisabledTestCases[secondary_user], CtsNetSecConfigBasicDebugEnabledTestCases, CtsNetSecConfigBasicDebugEnabledTestCases[instant], CtsNetSecConfigBasicDebugEnabledTestCases[secondary_user], CtsNetSecConfigBasicDomainConfigTestCases, CtsNetSecConfigBasicDomainConfigTestCases[instant], CtsNetSecConfigBasicDomainConfigTestCases[secondary_user], CtsNetSecConfigCleartextTrafficTestCases, CtsNetSecConfigCleartextTrafficTestCases[instant], CtsNetSecConfigCleartextTrafficTestCases[secondary_user], CtsNetSecConfigDownloadManagerTestCases, CtsNetSecConfigDownloadManagerTestCases[secondary_user], CtsNetSecConfigInvalidPinTestCases, CtsNetSecConfigInvalidPinTestCases[instant], CtsNetSecConfigInvalidPinTestCases[secondary_user], CtsNetSecConfigNestedDomainConfigTestCases, CtsNetSecConfigNestedDomainConfigTestCases[instant], CtsNetSecConfigNestedDomainConfigTestCases[secondary_user], CtsNetSecConfigPrePCleartextTrafficTestCases, CtsNetSecConfigPrePCleartextTrafficTestCases[secondary_user], CtsNetSecConfigResourcesSrcTestCases, CtsNetSecConfigResourcesSrcTestCases[instant], CtsNetSecConfigResourcesSrcTestCases[secondary_user], CtsNetSecPolicyUsesCleartextTrafficFalseTestCases, CtsNetSecPolicyUsesCleartextTrafficFalseTestCases[secondary_user], CtsNetSecPolicyUsesCleartextTrafficTrueTestCases, CtsNetSecPolicyUsesCleartextTrafficTrueTestCases[secondary_user], CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases, CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases[secondary_user], CtsNetTestCases, CtsNetTestCasesInternetPermission, CtsNetTestCasesInternetPermission[instant], CtsNetTestCasesInternetPermission[secondary_user], CtsNetTestCasesLegacyApi22, CtsNetTestCasesLegacyApi22[secondary_user], CtsNetTestCasesLegacyPermission22, CtsNetTestCasesLegacyPermission22[secondary_user], CtsNetTestCasesUpdateStatsPermission, CtsNetTestCasesUpdateStatsPermission[instant], CtsNetTestCasesUpdateStatsPermission[secondary_user], CtsNetTestCases[instant], CtsNetTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsNet',
+        test_name='cheets_CTS_R.internal.arm.CtsNet',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNetApi23TestCases', '--include-filter', 'CtsNetApi23TestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigAttributeTestCases', '--include-filter', 'CtsNetSecConfigAttributeTestCases[instant]', '--include-filter', 'CtsNetSecConfigAttributeTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigBasicDebugDisabledTestCases', '--include-filter', 'CtsNetSecConfigBasicDebugDisabledTestCases[instant]', '--include-filter', 'CtsNetSecConfigBasicDebugDisabledTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigBasicDebugEnabledTestCases', '--include-filter', 'CtsNetSecConfigBasicDebugEnabledTestCases[instant]', '--include-filter', 'CtsNetSecConfigBasicDebugEnabledTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigBasicDomainConfigTestCases', '--include-filter', 'CtsNetSecConfigBasicDomainConfigTestCases[instant]', '--include-filter', 'CtsNetSecConfigBasicDomainConfigTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigCleartextTrafficTestCases', '--include-filter', 'CtsNetSecConfigCleartextTrafficTestCases[instant]', '--include-filter', 'CtsNetSecConfigCleartextTrafficTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigDownloadManagerTestCases', '--include-filter', 'CtsNetSecConfigDownloadManagerTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigInvalidPinTestCases', '--include-filter', 'CtsNetSecConfigInvalidPinTestCases[instant]', '--include-filter', 'CtsNetSecConfigInvalidPinTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigNestedDomainConfigTestCases', '--include-filter', 'CtsNetSecConfigNestedDomainConfigTestCases[instant]', '--include-filter', 'CtsNetSecConfigNestedDomainConfigTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigPrePCleartextTrafficTestCases', '--include-filter', 'CtsNetSecConfigPrePCleartextTrafficTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigResourcesSrcTestCases', '--include-filter', 'CtsNetSecConfigResourcesSrcTestCases[instant]', '--include-filter', 'CtsNetSecConfigResourcesSrcTestCases[secondary_user]', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficFalseTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficFalseTestCases[secondary_user]', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficTrueTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficTrueTestCases[secondary_user]', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases[secondary_user]', '--include-filter', 'CtsNetTestCases', '--include-filter', 'CtsNetTestCasesInternetPermission', '--include-filter', 'CtsNetTestCasesInternetPermission[instant]', '--include-filter', 'CtsNetTestCasesInternetPermission[secondary_user]', '--include-filter', 'CtsNetTestCasesLegacyApi22', '--include-filter', 'CtsNetTestCasesLegacyApi22[secondary_user]', '--include-filter', 'CtsNetTestCasesLegacyPermission22', '--include-filter', 'CtsNetTestCasesLegacyPermission22[secondary_user]', '--include-filter', 'CtsNetTestCasesUpdateStatsPermission', '--include-filter', 'CtsNetTestCasesUpdateStatsPermission[instant]', '--include-filter', 'CtsNetTestCasesUpdateStatsPermission[secondary_user]', '--include-filter', 'CtsNetTestCases[instant]', '--include-filter', 'CtsNetTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsNet',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=90000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNetTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNetTestCases.ctshardware
new file mode 100644
index 0000000..a454565
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNetTestCases.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsNetTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNetTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsNetTestCases.ctshardware',
+        test_name='cheets_CTS_R.internal.arm.CtsNetTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNetTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsNetTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNfc b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNfc
new file mode 100644
index 0000000..10b522a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNfc
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsNfc'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNfcTestCases, CtsNfcTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsNfc',
+        test_name='cheets_CTS_R.internal.arm.CtsNfc',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNfcTestCases', '--include-filter', 'CtsNfcTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsNfc',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNoPermission b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNoPermission
new file mode 100644
index 0000000..a1a453a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsNoPermission
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsNoPermission'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNoPermissionTestCases, CtsNoPermissionTestCases25, CtsNoPermissionTestCases25[secondary_user], CtsNoPermissionTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsNoPermission',
+        test_name='cheets_CTS_R.internal.arm.CtsNoPermission',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNoPermissionTestCases', '--include-filter', 'CtsNoPermissionTestCases25', '--include-filter', 'CtsNoPermissionTestCases25[secondary_user]', '--include-filter', 'CtsNoPermissionTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsNoPermission',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsOmapi b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsOmapi
new file mode 100644
index 0000000..1045b92
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsOmapi
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsOmapi'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsOmapiTestCases, CtsOmapiTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsOmapi',
+        test_name='cheets_CTS_R.internal.arm.CtsOmapi',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsOmapiTestCases', '--include-filter', 'CtsOmapiTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsOmapi',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsOpenG b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsOpenG
new file mode 100644
index 0000000..4c80fc3
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsOpenG
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsOpenG'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsOpenGLTestCases, CtsOpenGLTestCases[secondary_user], CtsOpenGlPerf2TestCases, CtsOpenGlPerf2TestCases[secondary_user], CtsOpenGlPerfTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsOpenG',
+        test_name='cheets_CTS_R.internal.arm.CtsOpenG',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsOpenGLTestCases', '--include-filter', 'CtsOpenGLTestCases[secondary_user]', '--include-filter', 'CtsOpenGlPerf2TestCases', '--include-filter', 'CtsOpenGlPerf2TestCases[secondary_user]', '--include-filter', 'CtsOpenGlPerfTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsOpenG',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsOs b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsOs
new file mode 100644
index 0000000..d793db8
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsOs
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsOs'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsOsHostTestCases, CtsOsHostTestCases[instant], CtsOsHostTestCases[secondary_user], CtsOsTestCases, CtsOsTestCases[instant] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsOs',
+        test_name='cheets_CTS_R.internal.arm.CtsOs',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsOsHostTestCases', '--include-filter', 'CtsOsHostTestCases[instant]', '--include-filter', 'CtsOsHostTestCases[secondary_user]', '--include-filter', 'CtsOsTestCases', '--include-filter', 'CtsOsTestCases[instant]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsOs',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        use_jdk9=True,
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsPackage b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsPackage
new file mode 100644
index 0000000..d082c50
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsPackage
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsPackage'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPackageInstallAppOpDefaultTestCases, CtsPackageInstallAppOpDefaultTestCases[instant], CtsPackageInstallAppOpDefaultTestCases[secondary_user], CtsPackageInstallAppOpDeniedTestCases, CtsPackageInstallAppOpDeniedTestCases[instant], CtsPackageInstallAppOpDeniedTestCases[secondary_user], CtsPackageInstallTestCases, CtsPackageInstallTestCases[instant], CtsPackageInstallTestCases[secondary_user], CtsPackageInstallerTapjackingTestCases, CtsPackageInstallerTapjackingTestCases[secondary_user], CtsPackageUninstallTestCases, CtsPackageUninstallTestCases[secondary_user], CtsPackageWatchdogTestCases, CtsPackageWatchdogTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsPackage',
+        test_name='cheets_CTS_R.internal.arm.CtsPackage',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPackageInstallAppOpDefaultTestCases', '--include-filter', 'CtsPackageInstallAppOpDefaultTestCases[instant]', '--include-filter', 'CtsPackageInstallAppOpDefaultTestCases[secondary_user]', '--include-filter', 'CtsPackageInstallAppOpDeniedTestCases', '--include-filter', 'CtsPackageInstallAppOpDeniedTestCases[instant]', '--include-filter', 'CtsPackageInstallAppOpDeniedTestCases[secondary_user]', '--include-filter', 'CtsPackageInstallTestCases', '--include-filter', 'CtsPackageInstallTestCases[instant]', '--include-filter', 'CtsPackageInstallTestCases[secondary_user]', '--include-filter', 'CtsPackageInstallerTapjackingTestCases', '--include-filter', 'CtsPackageInstallerTapjackingTestCases[secondary_user]', '--include-filter', 'CtsPackageUninstallTestCases', '--include-filter', 'CtsPackageUninstallTestCases[secondary_user]', '--include-filter', 'CtsPackageWatchdogTestCases', '--include-filter', 'CtsPackageWatchdogTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsPackage',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=28800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsPdf b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsPdf
new file mode 100644
index 0000000..4f2f2c4
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsPdf
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsPdf'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPdfTestCases, CtsPdfTestCases[instant], CtsPdfTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsPdf',
+        test_name='cheets_CTS_R.internal.arm.CtsPdf',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPdfTestCases', '--include-filter', 'CtsPdfTestCases[instant]', '--include-filter', 'CtsPdfTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsPdf',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsPerfetto b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsPerfetto
new file mode 100644
index 0000000..7f2d394
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsPerfetto
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsPerfetto'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPerfettoTestCases, CtsPerfettoTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsPerfetto',
+        test_name='cheets_CTS_R.internal.arm.CtsPerfetto',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPerfettoTestCases', '--include-filter', 'CtsPerfettoTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsPerfetto',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsPerfettoTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsPerfettoTestCases.ctshardware
new file mode 100644
index 0000000..9478c06
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsPerfettoTestCases.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsPerfettoTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPerfettoTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsPerfettoTestCases.ctshardware',
+        test_name='cheets_CTS_R.internal.arm.CtsPerfettoTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPerfettoTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsPerfettoTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsPermission b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsPermission
new file mode 100644
index 0000000..0736f77
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsPermission
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsPermission'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPermission2TestCases, CtsPermission2TestCases[instant], CtsPermission3TestCases, CtsPermission3TestCases[secondary_user], CtsPermissionTestCases, CtsPermissionTestCasesSdk28, CtsPermissionTestCasesSdk28[instant], CtsPermissionTestCasesSdk28[secondary_user], CtsPermissionTestCasesTelephony, CtsPermissionTestCasesTelephony[instant], CtsPermissionTestCasesTelephony[secondary_user], CtsPermissionTestCases[instant] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsPermission',
+        test_name='cheets_CTS_R.internal.arm.CtsPermission',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPermission2TestCases', '--include-filter', 'CtsPermission2TestCases[instant]', '--include-filter', 'CtsPermission3TestCases', '--include-filter', 'CtsPermission3TestCases[secondary_user]', '--include-filter', 'CtsPermissionTestCases', '--include-filter', 'CtsPermissionTestCasesSdk28', '--include-filter', 'CtsPermissionTestCasesSdk28[instant]', '--include-filter', 'CtsPermissionTestCasesSdk28[secondary_user]', '--include-filter', 'CtsPermissionTestCasesTelephony', '--include-filter', 'CtsPermissionTestCasesTelephony[instant]', '--include-filter', 'CtsPermissionTestCasesTelephony[secondary_user]', '--include-filter', 'CtsPermissionTestCases[instant]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsPermission',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=23400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsPermissionTestCases.camera.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsPermissionTestCases.camera.ctshardware
new file mode 100644
index 0000000..89d278f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsPermissionTestCases.camera.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsPermissionTestCases.camera.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPermissionTestCases.camera of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsPermissionTestCases.camera.ctshardware',
+        test_name='cheets_CTS_R.internal.arm.CtsPermissionTestCases.camera.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPermissionTestCases android.permission.cts.CameraPermissionTest', '--include-filter', 'CtsPermissionTestCases android.permission.cts.Camera2PermissionTest', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsPermissionTestCases.camera',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsPreference b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsPreference
new file mode 100644
index 0000000..23db2e4
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsPreference
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsPreference'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPreferenceTestCases, CtsPreferenceTestCases[instant], CtsPreferenceTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsPreference',
+        test_name='cheets_CTS_R.internal.arm.CtsPreference',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPreferenceTestCases', '--include-filter', 'CtsPreferenceTestCases[instant]', '--include-filter', 'CtsPreferenceTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsPreference',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsPrint b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsPrint
new file mode 100644
index 0000000..c063ffa
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsPrint
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsPrint'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPrintTestCases, CtsPrintTestCases[instant], CtsPrintTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        use_helpers=True,
+        tag='internal.arm.CtsPrint',
+        test_name='cheets_CTS_R.internal.arm.CtsPrint',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPrintTestCases', '--include-filter', 'CtsPrintTestCases[instant]', '--include-filter', 'CtsPrintTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsPrint',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsProto b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsProto
new file mode 100644
index 0000000..10398a2
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsProto
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsProto'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsProtoTestCases, CtsProtoTestCases[instant], CtsProtoTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsProto',
+        test_name='cheets_CTS_R.internal.arm.CtsProto',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsProtoTestCases', '--include-filter', 'CtsProtoTestCases[instant]', '--include-filter', 'CtsProtoTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsProto',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsProvider b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsProvider
new file mode 100644
index 0000000..e49cc91
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsProvider
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsProvider'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsProviderTestCases, CtsProviderTestCases[secondary_user], CtsProviderUiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsProvider',
+        test_name='cheets_CTS_R.internal.arm.CtsProvider',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsProviderTestCases', '--include-filter', 'CtsProviderTestCases[secondary_user]', '--include-filter', 'CtsProviderUiTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsProvider',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsQuickAccessWallet b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsQuickAccessWallet
new file mode 100644
index 0000000..b98d081
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsQuickAccessWallet
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsQuickAccessWallet'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsQuickAccessWalletTestCases, CtsQuickAccessWalletTestCases[instant], CtsQuickAccessWalletTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsQuickAccessWallet',
+        test_name='cheets_CTS_R.internal.arm.CtsQuickAccessWallet',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsQuickAccessWalletTestCases', '--include-filter', 'CtsQuickAccessWalletTestCases[instant]', '--include-filter', 'CtsQuickAccessWalletTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsQuickAccessWallet',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsRenderscript b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsRenderscript
new file mode 100644
index 0000000..03d8c07
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsRenderscript
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsRenderscript'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsRenderscriptLegacyTestCases, CtsRenderscriptLegacyTestCases[secondary_user], CtsRenderscriptTestCases, CtsRenderscriptTestCases[instant], CtsRenderscriptTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsRenderscript',
+        test_name='cheets_CTS_R.internal.arm.CtsRenderscript',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsRenderscriptLegacyTestCases', '--include-filter', 'CtsRenderscriptLegacyTestCases[secondary_user]', '--include-filter', 'CtsRenderscriptTestCases', '--include-filter', 'CtsRenderscriptTestCases[instant]', '--include-filter', 'CtsRenderscriptTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsRenderscript',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsResolverService b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsResolverService
new file mode 100644
index 0000000..fd58bc9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsResolverService
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsResolverService'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsResolverServiceTestCases, CtsResolverServiceTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsResolverService',
+        test_name='cheets_CTS_R.internal.arm.CtsResolverService',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsResolverServiceTestCases', '--include-filter', 'CtsResolverServiceTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsResolverService',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsResourcesLoader b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsResourcesLoader
new file mode 100644
index 0000000..eb18a08
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsResourcesLoader
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsResourcesLoader'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsResourcesLoaderTests, CtsResourcesLoaderTests[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsResourcesLoader',
+        test_name='cheets_CTS_R.internal.arm.CtsResourcesLoader',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsResourcesLoaderTests', '--include-filter', 'CtsResourcesLoaderTests[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsResourcesLoader',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsRole b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsRole
new file mode 100644
index 0000000..06d756a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsRole
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsRole'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsRoleTestCases, CtsRoleTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsRole',
+        test_name='cheets_CTS_R.internal.arm.CtsRole',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsRoleTestCases', '--include-filter', 'CtsRoleTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsRole',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsRollbackManagerHostTestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsRollbackManagerHostTestCases
new file mode 100644
index 0000000..c3e2da9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsRollbackManagerHostTestCases
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsRollbackManagerHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsRollbackManagerHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsRollbackManagerHostTestCases',
+        test_name='cheets_CTS_R.internal.arm.CtsRollbackManagerHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsRollbackManagerHostTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsRollbackManagerHostTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsRs b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsRs
new file mode 100644
index 0000000..e0c745c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsRs
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsRs'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsRsBlasTestCases, CtsRsBlasTestCases[secondary_user], CtsRsCppTestCases, CtsRsCppTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsRs',
+        test_name='cheets_CTS_R.internal.arm.CtsRs',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsRsBlasTestCases', '--include-filter', 'CtsRsBlasTestCases[secondary_user]', '--include-filter', 'CtsRsCppTestCases', '--include-filter', 'CtsRsCppTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsRs',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSample b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSample
new file mode 100644
index 0000000..17b6839
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSample
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsSample'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSampleDeviceTestCases, CtsSampleDeviceTestCases[instant], CtsSampleDeviceTestCases[secondary_user], CtsSampleHostTestCases, CtsSampleHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSample',
+        test_name='cheets_CTS_R.internal.arm.CtsSample',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSampleDeviceTestCases', '--include-filter', 'CtsSampleDeviceTestCases[instant]', '--include-filter', 'CtsSampleDeviceTestCases[secondary_user]', '--include-filter', 'CtsSampleHostTestCases', '--include-filter', 'CtsSampleHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSample',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSax b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSax
new file mode 100644
index 0000000..f004721
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSax
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsSax'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSaxTestCases, CtsSaxTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSax',
+        test_name='cheets_CTS_R.internal.arm.CtsSax',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSaxTestCases', '--include-filter', 'CtsSaxTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSax',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsScopedStorageHostTest b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsScopedStorageHostTest
new file mode 100644
index 0000000..5ada045
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsScopedStorageHostTest
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsScopedStorageHostTest'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsScopedStorageHostTest, CtsScopedStorageHostTest[instant] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsScopedStorageHostTest',
+        test_name='cheets_CTS_R.internal.arm.CtsScopedStorageHostTest',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsScopedStorageHostTest', '--include-filter', 'CtsScopedStorageHostTest[instant]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsScopedStorageHostTest',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSdkExtensions b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSdkExtensions
new file mode 100644
index 0000000..bef4977
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSdkExtensions
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsSdkExtensions'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSdkExtensionsTestCases, CtsSdkExtensionsTestCases[instant], CtsSdkExtensionsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSdkExtensions',
+        test_name='cheets_CTS_R.internal.arm.CtsSdkExtensions',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSdkExtensionsTestCases', '--include-filter', 'CtsSdkExtensionsTestCases[instant]', '--include-filter', 'CtsSdkExtensionsTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSdkExtensions',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSeccompHost b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSeccompHost
new file mode 100644
index 0000000..c80cc49
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSeccompHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsSeccompHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSeccompHostTestCases, CtsSeccompHostTestCases[instant], CtsSeccompHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSeccompHost',
+        test_name='cheets_CTS_R.internal.arm.CtsSeccompHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSeccompHostTestCases', '--include-filter', 'CtsSeccompHostTestCases[instant]', '--include-filter', 'CtsSeccompHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSeccompHost',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSecure b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSecure
new file mode 100644
index 0000000..720cadd
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSecure
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsSecure'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSecureElementAccessControlTestCases1, CtsSecureElementAccessControlTestCases1[secondary_user], CtsSecureElementAccessControlTestCases2, CtsSecureElementAccessControlTestCases2[secondary_user], CtsSecureElementAccessControlTestCases3, CtsSecureElementAccessControlTestCases3[secondary_user], CtsSecureFrpInstallTestCases, CtsSecureFrpInstallTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSecure',
+        test_name='cheets_CTS_R.internal.arm.CtsSecure',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSecureElementAccessControlTestCases1', '--include-filter', 'CtsSecureElementAccessControlTestCases1[secondary_user]', '--include-filter', 'CtsSecureElementAccessControlTestCases2', '--include-filter', 'CtsSecureElementAccessControlTestCases2[secondary_user]', '--include-filter', 'CtsSecureElementAccessControlTestCases3', '--include-filter', 'CtsSecureElementAccessControlTestCases3[secondary_user]', '--include-filter', 'CtsSecureFrpInstallTestCases', '--include-filter', 'CtsSecureFrpInstallTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSecure',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=16200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSecurity b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSecurity
new file mode 100644
index 0000000..89ffddc
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSecurity
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsSecurity'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSecurityBulletinHostTestCases, CtsSecurityBulletinHostTestCases[secondary_user], CtsSecurityHostTestCases, CtsSecurityHostTestCases[secondary_user], CtsSecurityTestCases, CtsSecurityTestCases[instant], CtsSecurityTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSecurity',
+        test_name='cheets_CTS_R.internal.arm.CtsSecurity',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSecurityBulletinHostTestCases', '--include-filter', 'CtsSecurityBulletinHostTestCases[secondary_user]', '--include-filter', 'CtsSecurityHostTestCases', '--include-filter', 'CtsSecurityHostTestCases[secondary_user]', '--include-filter', 'CtsSecurityTestCases', '--include-filter', 'CtsSecurityTestCases[instant]', '--include-filter', 'CtsSecurityTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSecurity',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=154800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSelinux b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSelinux
new file mode 100644
index 0000000..e5827d9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSelinux
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsSelinux'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSelinuxEphemeralTestCases, CtsSelinuxEphemeralTestCases[instant], CtsSelinuxTargetSdk25TestCases, CtsSelinuxTargetSdk25TestCases[secondary_user], CtsSelinuxTargetSdk27TestCases, CtsSelinuxTargetSdk27TestCases[secondary_user], CtsSelinuxTargetSdk28TestCases, CtsSelinuxTargetSdk28TestCases[secondary_user], CtsSelinuxTargetSdk29TestCases, CtsSelinuxTargetSdk29TestCases[secondary_user], CtsSelinuxTargetSdkCurrentTestCases, CtsSelinuxTargetSdkCurrentTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSelinux',
+        test_name='cheets_CTS_R.internal.arm.CtsSelinux',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSelinuxEphemeralTestCases', '--include-filter', 'CtsSelinuxEphemeralTestCases[instant]', '--include-filter', 'CtsSelinuxTargetSdk25TestCases', '--include-filter', 'CtsSelinuxTargetSdk25TestCases[secondary_user]', '--include-filter', 'CtsSelinuxTargetSdk27TestCases', '--include-filter', 'CtsSelinuxTargetSdk27TestCases[secondary_user]', '--include-filter', 'CtsSelinuxTargetSdk28TestCases', '--include-filter', 'CtsSelinuxTargetSdk28TestCases[secondary_user]', '--include-filter', 'CtsSelinuxTargetSdk29TestCases', '--include-filter', 'CtsSelinuxTargetSdk29TestCases[secondary_user]', '--include-filter', 'CtsSelinuxTargetSdkCurrentTestCases', '--include-filter', 'CtsSelinuxTargetSdkCurrentTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSelinux',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=23400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSensor b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSensor
new file mode 100644
index 0000000..15c6c66
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSensor
@@ -0,0 +1,48 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+from autotest_lib.server import utils as server_utils
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsSensor'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSensorTestCases, CtsSensorTestCases[instant], CtsSensorTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+# For local debugging, if your test setup doesn't have servo, REMOVE these
+# two lines.
+args_dict = server_utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run_TS(machine):
+    # REMOVE 'servo_args=servo_args' arg for local debugging if your test
+    # setup doesn't have servo.
+    try:
+        host_list = [hosts.create_host(machine, servo_args=servo_args)]
+    except:
+        # Just ignore any servo setup flakiness.
+        host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSensor',
+        test_name='cheets_CTS_R.internal.arm.CtsSensor',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSensorTestCases', '--include-filter', 'CtsSensorTestCases[instant]', '--include-filter', 'CtsSensorTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSensor',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        hard_reboot_on_failure=True,
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSensorTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSensorTestCases.ctshardware
new file mode 100644
index 0000000..7af2ce9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSensorTestCases.ctshardware
@@ -0,0 +1,48 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+from autotest_lib.server import utils as server_utils
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsSensorTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSensorTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+# For local debugging, if your test setup doesn't have servo, REMOVE these
+# two lines.
+args_dict = server_utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run_TS(machine):
+    # REMOVE 'servo_args=servo_args' arg for local debugging if your test
+    # setup doesn't have servo.
+    try:
+        host_list = [hosts.create_host(machine, servo_args=servo_args)]
+    except:
+        # Just ignore any servo setup flakiness.
+        host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSensorTestCases.ctshardware',
+        test_name='cheets_CTS_R.internal.arm.CtsSensorTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSensorTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSensorTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        hard_reboot_on_failure=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSettings b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSettings
new file mode 100644
index 0000000..7370743
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSettings
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsSettings'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSettingsHostTestCases, CtsSettingsTestCases, CtsSettingsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSettings',
+        test_name='cheets_CTS_R.internal.arm.CtsSettings',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSettingsHostTestCases', '--include-filter', 'CtsSettingsTestCases', '--include-filter', 'CtsSettingsTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSettings',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSharedLibsApiSignature b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSharedLibsApiSignature
new file mode 100644
index 0000000..58e28b3
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSharedLibsApiSignature
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsSharedLibsApiSignature'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSharedLibsApiSignatureTestCases, CtsSharedLibsApiSignatureTestCases[instant], CtsSharedLibsApiSignatureTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSharedLibsApiSignature',
+        test_name='cheets_CTS_R.internal.arm.CtsSharedLibsApiSignature',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSharedLibsApiSignatureTestCases', '--include-filter', 'CtsSharedLibsApiSignatureTestCases[instant]', '--include-filter', 'CtsSharedLibsApiSignatureTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSharedLibsApiSignature',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSharesheet b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSharesheet
new file mode 100644
index 0000000..72dfe2f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSharesheet
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsSharesheet'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSharesheetTestCases, CtsSharesheetTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSharesheet',
+        test_name='cheets_CTS_R.internal.arm.CtsSharesheet',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSharesheetTestCases', '--include-filter', 'CtsSharesheetTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSharesheet',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsShortcut b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsShortcut
new file mode 100644
index 0000000..1f06fe9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsShortcut
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsShortcut'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsShortcutHostTestCases, CtsShortcutManagerLauncher1, CtsShortcutManagerLauncher1[secondary_user], CtsShortcutManagerLauncher2, CtsShortcutManagerLauncher2[secondary_user], CtsShortcutManagerLauncher3, CtsShortcutManagerLauncher3[secondary_user], CtsShortcutManagerLauncher4, CtsShortcutManagerLauncher4[secondary_user], CtsShortcutManagerPackage1, CtsShortcutManagerPackage1[secondary_user], CtsShortcutManagerPackage2, CtsShortcutManagerPackage2[secondary_user], CtsShortcutManagerPackage3, CtsShortcutManagerPackage3[secondary_user], CtsShortcutManagerPackage4, CtsShortcutManagerPackage4[secondary_user], CtsShortcutManagerTestCases, CtsShortcutManagerTestCases[secondary_user], CtsShortcutManagerThrottlingTest, CtsShortcutManagerThrottlingTest[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsShortcut',
+        test_name='cheets_CTS_R.internal.arm.CtsShortcut',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsShortcutHostTestCases', '--include-filter', 'CtsShortcutManagerLauncher1', '--include-filter', 'CtsShortcutManagerLauncher1[secondary_user]', '--include-filter', 'CtsShortcutManagerLauncher2', '--include-filter', 'CtsShortcutManagerLauncher2[secondary_user]', '--include-filter', 'CtsShortcutManagerLauncher3', '--include-filter', 'CtsShortcutManagerLauncher3[secondary_user]', '--include-filter', 'CtsShortcutManagerLauncher4', '--include-filter', 'CtsShortcutManagerLauncher4[secondary_user]', '--include-filter', 'CtsShortcutManagerPackage1', '--include-filter', 'CtsShortcutManagerPackage1[secondary_user]', '--include-filter', 'CtsShortcutManagerPackage2', '--include-filter', 'CtsShortcutManagerPackage2[secondary_user]', '--include-filter', 'CtsShortcutManagerPackage3', '--include-filter', 'CtsShortcutManagerPackage3[secondary_user]', '--include-filter', 'CtsShortcutManagerPackage4', '--include-filter', 'CtsShortcutManagerPackage4[secondary_user]', '--include-filter', 'CtsShortcutManagerTestCases', '--include-filter', 'CtsShortcutManagerTestCases[secondary_user]', '--include-filter', 'CtsShortcutManagerThrottlingTest', '--include-filter', 'CtsShortcutManagerThrottlingTest[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsShortcut',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=39600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSignedConfigHost b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSignedConfigHost
new file mode 100644
index 0000000..3cf7d5c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSignedConfigHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsSignedConfigHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSignedConfigHostTestCases, CtsSignedConfigHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSignedConfigHost',
+        test_name='cheets_CTS_R.internal.arm.CtsSignedConfigHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSignedConfigHostTestCases', '--include-filter', 'CtsSignedConfigHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSignedConfigHost',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSimRestrictedApis b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSimRestrictedApis
new file mode 100644
index 0000000..4900f39
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSimRestrictedApis
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsSimRestrictedApis'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSimRestrictedApisTestCases, CtsSimRestrictedApisTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSimRestrictedApis',
+        test_name='cheets_CTS_R.internal.arm.CtsSimRestrictedApis',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSimRestrictedApisTestCases', '--include-filter', 'CtsSimRestrictedApisTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSimRestrictedApis',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSimpleCpu b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSimpleCpu
new file mode 100644
index 0000000..7c9c1a4
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSimpleCpu
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsSimpleCpu'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSimpleCpuTestCases, CtsSimpleCpuTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSimpleCpu',
+        test_name='cheets_CTS_R.internal.arm.CtsSimpleCpu',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSimpleCpuTestCases', '--include-filter', 'CtsSimpleCpuTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSimpleCpu',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSimpleperfTestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSimpleperfTestCases
new file mode 100644
index 0000000..3df63cf
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSimpleperfTestCases
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsSimpleperfTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSimpleperfTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSimpleperfTestCases',
+        test_name='cheets_CTS_R.internal.arm.CtsSimpleperfTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSimpleperfTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSimpleperfTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSkQP b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSkQP
new file mode 100644
index 0000000..42aa03b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSkQP
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsSkQP'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSkQPTestCases, CtsSkQPTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSkQP',
+        test_name='cheets_CTS_R.internal.arm.CtsSkQP',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSkQPTestCases', '--include-filter', 'CtsSkQPTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSkQP',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSlice b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSlice
new file mode 100644
index 0000000..ccf7f9f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSlice
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsSlice'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSliceTestCases, CtsSliceTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSlice',
+        test_name='cheets_CTS_R.internal.arm.CtsSlice',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSliceTestCases', '--include-filter', 'CtsSliceTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSlice',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSoundTrigger b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSoundTrigger
new file mode 100644
index 0000000..a44a475
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSoundTrigger
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsSoundTrigger'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSoundTriggerTestCases, CtsSoundTriggerTestCases[instant], CtsSoundTriggerTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSoundTrigger',
+        test_name='cheets_CTS_R.internal.arm.CtsSoundTrigger',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSoundTriggerTestCases', '--include-filter', 'CtsSoundTriggerTestCases[instant]', '--include-filter', 'CtsSoundTriggerTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSoundTrigger',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSpeech b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSpeech
new file mode 100644
index 0000000..c02fc1e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSpeech
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsSpeech'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSpeechTestCases, CtsSpeechTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSpeech',
+        test_name='cheets_CTS_R.internal.arm.CtsSpeech',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSpeechTestCases', '--include-filter', 'CtsSpeechTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSpeech',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsStagedInstallHostTestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsStagedInstallHostTestCases
new file mode 100644
index 0000000..4de84a9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsStagedInstallHostTestCases
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsStagedInstallHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsStagedInstallHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsStagedInstallHostTestCases',
+        test_name='cheets_CTS_R.internal.arm.CtsStagedInstallHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsStagedInstallHostTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsStagedInstallHostTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsStatsdHost b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsStatsdHost
new file mode 100644
index 0000000..644301b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsStatsdHost
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsStatsdHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsStatsdHostTestCases, CtsStatsdHostTestCases[instant], CtsStatsdHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsStatsdHost',
+        test_name='cheets_CTS_R.internal.arm.CtsStatsdHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsStatsdHostTestCases', '--include-filter', 'CtsStatsdHostTestCases[instant]', '--include-filter', 'CtsStatsdHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsStatsdHost',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        use_old_adb=True,
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsStrictJavaPackages b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsStrictJavaPackages
new file mode 100644
index 0000000..c218d6b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsStrictJavaPackages
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsStrictJavaPackages'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsStrictJavaPackagesTestCases, CtsStrictJavaPackagesTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsStrictJavaPackages',
+        test_name='cheets_CTS_R.internal.arm.CtsStrictJavaPackages',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsStrictJavaPackagesTestCases', '--include-filter', 'CtsStrictJavaPackagesTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsStrictJavaPackages',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSuspendApps b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSuspendApps
new file mode 100644
index 0000000..c8e81b2
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSuspendApps
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsSuspendApps'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSuspendAppsPermissionTestCases, CtsSuspendAppsPermissionTestCases[secondary_user], CtsSuspendAppsTestCases, CtsSuspendAppsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSuspendApps',
+        test_name='cheets_CTS_R.internal.arm.CtsSuspendApps',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSuspendAppsPermissionTestCases', '--include-filter', 'CtsSuspendAppsPermissionTestCases[secondary_user]', '--include-filter', 'CtsSuspendAppsTestCases', '--include-filter', 'CtsSuspendAppsTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSuspendApps',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSustainedPerformanceHost b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSustainedPerformanceHost
new file mode 100644
index 0000000..1e082b6
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSustainedPerformanceHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsSustainedPerformanceHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSustainedPerformanceHostTestCases, CtsSustainedPerformanceHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSustainedPerformanceHost',
+        test_name='cheets_CTS_R.internal.arm.CtsSustainedPerformanceHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSustainedPerformanceHostTestCases', '--include-filter', 'CtsSustainedPerformanceHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSustainedPerformanceHost',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSustainedPerformanceHostTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSustainedPerformanceHostTestCases.ctshardware
new file mode 100644
index 0000000..94ab16c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSustainedPerformanceHostTestCases.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsSustainedPerformanceHostTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSustainedPerformanceHostTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSustainedPerformanceHostTestCases.ctshardware',
+        test_name='cheets_CTS_R.internal.arm.CtsSustainedPerformanceHostTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSustainedPerformanceHostTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSustainedPerformanceHostTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSync b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSync
new file mode 100644
index 0000000..d5837b0
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSync
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsSync'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSyncAccountAccessOtherCertTestCases, CtsSyncAccountAccessOtherCertTestCases[secondary_user], CtsSyncContentHostTestCases, CtsSyncContentHostTestCases[secondary_user], CtsSyncManagerTestsCases, CtsSyncManagerTestsCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSync',
+        test_name='cheets_CTS_R.internal.arm.CtsSync',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSyncAccountAccessOtherCertTestCases', '--include-filter', 'CtsSyncAccountAccessOtherCertTestCases[secondary_user]', '--include-filter', 'CtsSyncContentHostTestCases', '--include-filter', 'CtsSyncContentHostTestCases[secondary_user]', '--include-filter', 'CtsSyncManagerTestsCases', '--include-filter', 'CtsSyncManagerTestsCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSync',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSystem b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSystem
new file mode 100644
index 0000000..21cea72
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsSystem
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsSystem'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSystemApiAnnotationTestCases, CtsSystemApiAnnotationTestCases[secondary_user], CtsSystemApiSignatureTestCases, CtsSystemApiSignatureTestCases[secondary_user], CtsSystemIntentTestCases, CtsSystemIntentTestCases[secondary_user], CtsSystemUiHostTestCases, CtsSystemUiHostTestCases[instant], CtsSystemUiHostTestCases[secondary_user], CtsSystemUiTestCases, CtsSystemUiTestCases[instant], CtsSystemUiTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsSystem',
+        test_name='cheets_CTS_R.internal.arm.CtsSystem',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSystemApiAnnotationTestCases', '--include-filter', 'CtsSystemApiAnnotationTestCases[secondary_user]', '--include-filter', 'CtsSystemApiSignatureTestCases', '--include-filter', 'CtsSystemApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsSystemIntentTestCases', '--include-filter', 'CtsSystemIntentTestCases[secondary_user]', '--include-filter', 'CtsSystemUiHostTestCases', '--include-filter', 'CtsSystemUiHostTestCases[instant]', '--include-filter', 'CtsSystemUiHostTestCases[secondary_user]', '--include-filter', 'CtsSystemUiTestCases', '--include-filter', 'CtsSystemUiTestCases[instant]', '--include-filter', 'CtsSystemUiTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSystem',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=23400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTaggingHost b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTaggingHost
new file mode 100644
index 0000000..f6471a0
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTaggingHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsTaggingHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTaggingHostTestCases, CtsTaggingHostTestCases[instant], CtsTaggingHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsTaggingHost',
+        test_name='cheets_CTS_R.internal.arm.CtsTaggingHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTaggingHostTestCases', '--include-filter', 'CtsTaggingHostTestCases[instant]', '--include-filter', 'CtsTaggingHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsTaggingHost',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTelecom b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTelecom
new file mode 100644
index 0000000..6e3f540
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTelecom
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsTelecom'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTelecomTestCases, CtsTelecomTestCases2, CtsTelecomTestCases2[secondary_user], CtsTelecomTestCases3, CtsTelecomTestCases3[secondary_user], CtsTelecomTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsTelecom',
+        test_name='cheets_CTS_R.internal.arm.CtsTelecom',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTelecomTestCases', '--include-filter', 'CtsTelecomTestCases2', '--include-filter', 'CtsTelecomTestCases2[secondary_user]', '--include-filter', 'CtsTelecomTestCases3', '--include-filter', 'CtsTelecomTestCases3[secondary_user]', '--include-filter', 'CtsTelecomTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsTelecom',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTelephony b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTelephony
new file mode 100644
index 0000000..1b96812
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTelephony
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsTelephony'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTelephony2TestCases, CtsTelephony2TestCases[instant], CtsTelephony2TestCases[secondary_user], CtsTelephony3TestCases, CtsTelephony3TestCases[secondary_user], CtsTelephonyHostCases, CtsTelephonyHostCases[secondary_user], CtsTelephonyProviderHostCases, CtsTelephonyProviderHostCases[secondary_user], CtsTelephonyProviderTestCases, CtsTelephonyProviderTestCases[secondary_user], CtsTelephonySdk28TestCases, CtsTelephonySdk28TestCases[secondary_user], CtsTelephonyTestCases, CtsTelephonyTestCasesPermissionReadPhoneState, CtsTelephonyTestCasesPermissionReadPhoneState[instant], CtsTelephonyTestCasesPermissionReadPhoneState[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsTelephony',
+        test_name='cheets_CTS_R.internal.arm.CtsTelephony',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTelephony2TestCases', '--include-filter', 'CtsTelephony2TestCases[instant]', '--include-filter', 'CtsTelephony2TestCases[secondary_user]', '--include-filter', 'CtsTelephony3TestCases', '--include-filter', 'CtsTelephony3TestCases[secondary_user]', '--include-filter', 'CtsTelephonyHostCases', '--include-filter', 'CtsTelephonyHostCases[secondary_user]', '--include-filter', 'CtsTelephonyProviderHostCases', '--include-filter', 'CtsTelephonyProviderHostCases[secondary_user]', '--include-filter', 'CtsTelephonyProviderTestCases', '--include-filter', 'CtsTelephonyProviderTestCases[secondary_user]', '--include-filter', 'CtsTelephonySdk28TestCases', '--include-filter', 'CtsTelephonySdk28TestCases[secondary_user]', '--include-filter', 'CtsTelephonyTestCases', '--include-filter', 'CtsTelephonyTestCasesPermissionReadPhoneState', '--include-filter', 'CtsTelephonyTestCasesPermissionReadPhoneState[instant]', '--include-filter', 'CtsTelephonyTestCasesPermissionReadPhoneState[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsTelephony',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=32400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTestHarnessMode b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTestHarnessMode
new file mode 100644
index 0000000..2910308
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTestHarnessMode
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsTestHarnessMode'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTestHarnessModeTestCases, CtsTestHarnessModeTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsTestHarnessMode',
+        test_name='cheets_CTS_R.internal.arm.CtsTestHarnessMode',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTestHarnessModeTestCases', '--include-filter', 'CtsTestHarnessModeTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsTestHarnessMode',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTetheringTest b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTetheringTest
new file mode 100644
index 0000000..f084a3f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTetheringTest
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsTetheringTest'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTetheringTest, CtsTetheringTest[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsTetheringTest',
+        test_name='cheets_CTS_R.internal.arm.CtsTetheringTest',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTetheringTest', '--include-filter', 'CtsTetheringTest[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsTetheringTest',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsText b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsText
new file mode 100644
index 0000000..34079b4
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsText
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsText'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTextClassifierTestCases, CtsTextClassifierTestCases[secondary_user], CtsTextTestCases, CtsTextTestCases[instant], CtsTextTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsText',
+        test_name='cheets_CTS_R.internal.arm.CtsText',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTextClassifierTestCases', '--include-filter', 'CtsTextClassifierTestCases[secondary_user]', '--include-filter', 'CtsTextTestCases', '--include-filter', 'CtsTextTestCases[instant]', '--include-filter', 'CtsTextTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsText',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTfliteNnapiDelegate b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTfliteNnapiDelegate
new file mode 100644
index 0000000..7a75b1f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTfliteNnapiDelegate
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsTfliteNnapiDelegate'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTfliteNnapiDelegateTestCases, CtsTfliteNnapiDelegateTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsTfliteNnapiDelegate',
+        test_name='cheets_CTS_R.internal.arm.CtsTfliteNnapiDelegate',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTfliteNnapiDelegateTestCases', '--include-filter', 'CtsTfliteNnapiDelegateTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsTfliteNnapiDelegate',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTheme b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTheme
new file mode 100644
index 0000000..2c230d8
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTheme
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsTheme'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsThemeDeviceTestCases, CtsThemeDeviceTestCases[secondary_user], CtsThemeHostTestCases, CtsThemeHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsTheme',
+        test_name='cheets_CTS_R.internal.arm.CtsTheme',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsThemeDeviceTestCases', '--include-filter', 'CtsThemeDeviceTestCases[secondary_user]', '--include-filter', 'CtsThemeHostTestCases', '--include-filter', 'CtsThemeHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsTheme',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsThermal b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsThermal
new file mode 100644
index 0000000..ae718a3
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsThermal
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsThermal'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsThermalTestCases, CtsThermalTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsThermal',
+        test_name='cheets_CTS_R.internal.arm.CtsThermal',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsThermalTestCases', '--include-filter', 'CtsThermalTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsThermal',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsToast b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsToast
new file mode 100644
index 0000000..b675432
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsToast
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsToast'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsToastLegacyTestCases, CtsToastLegacyTestCases[secondary_user], CtsToastTestCases, CtsToastTestCases[instant], CtsToastTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsToast',
+        test_name='cheets_CTS_R.internal.arm.CtsToast',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsToastLegacyTestCases', '--include-filter', 'CtsToastLegacyTestCases[secondary_user]', '--include-filter', 'CtsToastTestCases', '--include-filter', 'CtsToastTestCases[instant]', '--include-filter', 'CtsToastTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsToast',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTransition b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTransition
new file mode 100644
index 0000000..350e61f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTransition
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsTransition'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTransitionTestCases, CtsTransitionTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsTransition',
+        test_name='cheets_CTS_R.internal.arm.CtsTransition',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTransitionTestCases', '--include-filter', 'CtsTransitionTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsTransition',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTrustedVoiceHost b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTrustedVoiceHost
new file mode 100644
index 0000000..9d0df19
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTrustedVoiceHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsTrustedVoiceHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTrustedVoiceHostTestCases, CtsTrustedVoiceHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsTrustedVoiceHost',
+        test_name='cheets_CTS_R.internal.arm.CtsTrustedVoiceHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTrustedVoiceHostTestCases', '--include-filter', 'CtsTrustedVoiceHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsTrustedVoiceHost',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTv b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTv
new file mode 100644
index 0000000..78a4a01
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsTv
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsTv'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTvProviderTestCases, CtsTvProviderTestCases[secondary_user], CtsTvTestCases, CtsTvTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsTv',
+        test_name='cheets_CTS_R.internal.arm.CtsTv',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTvProviderTestCases', '--include-filter', 'CtsTvProviderTestCases[secondary_user]', '--include-filter', 'CtsTvTestCases', '--include-filter', 'CtsTvTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsTv',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsUi b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsUi
new file mode 100644
index 0000000..5997c97
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsUi
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsUi'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsUiAutomationTestCases, CtsUiAutomationTestCases[instant], CtsUiAutomationTestCases[secondary_user], CtsUiRenderingTestCases, CtsUiRenderingTestCases27, CtsUiRenderingTestCases27[instant], CtsUiRenderingTestCases27[secondary_user], CtsUiRenderingTestCases[instant], CtsUiRenderingTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsUi',
+        test_name='cheets_CTS_R.internal.arm.CtsUi',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUiAutomationTestCases', '--include-filter', 'CtsUiAutomationTestCases[instant]', '--include-filter', 'CtsUiAutomationTestCases[secondary_user]', '--include-filter', 'CtsUiRenderingTestCases', '--include-filter', 'CtsUiRenderingTestCases27', '--include-filter', 'CtsUiRenderingTestCases27[instant]', '--include-filter', 'CtsUiRenderingTestCases27[secondary_user]', '--include-filter', 'CtsUiRenderingTestCases[instant]', '--include-filter', 'CtsUiRenderingTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsUi',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=18000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsUidIsolation b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsUidIsolation
new file mode 100644
index 0000000..7813994
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsUidIsolation
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsUidIsolation'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsUidIsolationTestCases, CtsUidIsolationTestCases[instant], CtsUidIsolationTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsUidIsolation',
+        test_name='cheets_CTS_R.internal.arm.CtsUidIsolation',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUidIsolationTestCases', '--include-filter', 'CtsUidIsolationTestCases[instant]', '--include-filter', 'CtsUidIsolationTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsUidIsolation',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsUsageStats b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsUsageStats
new file mode 100644
index 0000000..43ce853
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsUsageStats
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsUsageStats'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsUsageStatsTestCases, CtsUsageStatsTestCases[instant], CtsUsageStatsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsUsageStats',
+        test_name='cheets_CTS_R.internal.arm.CtsUsageStats',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUsageStatsTestCases', '--include-filter', 'CtsUsageStatsTestCases[instant]', '--include-filter', 'CtsUsageStatsTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsUsageStats',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsUsageStatsTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsUsageStatsTestCases.ctshardware
new file mode 100644
index 0000000..3aba66b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsUsageStatsTestCases.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsUsageStatsTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsUsageStatsTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsUsageStatsTestCases.ctshardware',
+        test_name='cheets_CTS_R.internal.arm.CtsUsageStatsTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUsageStatsTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsUsageStatsTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsUsb b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsUsb
new file mode 100644
index 0000000..9b2e4b7
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsUsb
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsUsb'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsUsbManagerTestCases, CtsUsbManagerTestCases[secondary_user], CtsUsbTests, CtsUsbTests[instant], CtsUsbTests[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsUsb',
+        test_name='cheets_CTS_R.internal.arm.CtsUsb',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUsbManagerTestCases', '--include-filter', 'CtsUsbManagerTestCases[secondary_user]', '--include-filter', 'CtsUsbTests', '--include-filter', 'CtsUsbTests[instant]', '--include-filter', 'CtsUsbTests[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsUsb',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsUsesLibraryHost b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsUsesLibraryHost
new file mode 100644
index 0000000..c61d341
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsUsesLibraryHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsUsesLibraryHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsUsesLibraryHostTestCases, CtsUsesLibraryHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsUsesLibraryHost',
+        test_name='cheets_CTS_R.internal.arm.CtsUsesLibraryHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUsesLibraryHostTestCases', '--include-filter', 'CtsUsesLibraryHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsUsesLibraryHost',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsUtil b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsUtil
new file mode 100644
index 0000000..25e9687
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsUtil
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsUtil'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsUtilTestCases, CtsUtilTestCases[instant], CtsUtilTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsUtil',
+        test_name='cheets_CTS_R.internal.arm.CtsUtil',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUtilTestCases', '--include-filter', 'CtsUtilTestCases[instant]', '--include-filter', 'CtsUtilTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsUtil',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsVideo b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsVideo
new file mode 100644
index 0000000..108c71b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsVideo
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsVideo'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsVideoTestCases, CtsVideoTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsVideo',
+        test_name='cheets_CTS_R.internal.arm.CtsVideo',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsVideoTestCases', '--include-filter', 'CtsVideoTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsVideo',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsView b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsView
new file mode 100644
index 0000000..19f01a8
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsView
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsView'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsViewInspectorAnnotationProcessorTestCases, CtsViewInspectorAnnotationProcessorTestCases[instant], CtsViewInspectorAnnotationProcessorTestCases[secondary_user], CtsViewTestCases, CtsViewTestCasesSdk28, CtsViewTestCasesSdk28[instant], CtsViewTestCasesSdk28[secondary_user], CtsViewTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsView',
+        test_name='cheets_CTS_R.internal.arm.CtsView',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsViewInspectorAnnotationProcessorTestCases', '--include-filter', 'CtsViewInspectorAnnotationProcessorTestCases[instant]', '--include-filter', 'CtsViewInspectorAnnotationProcessorTestCases[secondary_user]', '--include-filter', 'CtsViewTestCases', '--include-filter', 'CtsViewTestCasesSdk28', '--include-filter', 'CtsViewTestCasesSdk28[instant]', '--include-filter', 'CtsViewTestCasesSdk28[secondary_user]', '--include-filter', 'CtsViewTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsView',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=23400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsViewTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsViewTestCases.ctshardware
new file mode 100644
index 0000000..16a720f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsViewTestCases.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsViewTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsViewTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsViewTestCases.ctshardware',
+        test_name='cheets_CTS_R.internal.arm.CtsViewTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsViewTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsViewTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsVoice b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsVoice
new file mode 100644
index 0000000..8997395
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsVoice
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsVoice'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsVoiceInteractionTestCases, CtsVoiceInteractionTestCases[instant], CtsVoiceInteractionTestCases[secondary_user], CtsVoiceSettingsTestCases, CtsVoiceSettingsTestCases[instant], CtsVoiceSettingsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsVoice',
+        test_name='cheets_CTS_R.internal.arm.CtsVoice',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsVoiceInteractionTestCases', '--include-filter', 'CtsVoiceInteractionTestCases[instant]', '--include-filter', 'CtsVoiceInteractionTestCases[secondary_user]', '--include-filter', 'CtsVoiceSettingsTestCases', '--include-filter', 'CtsVoiceSettingsTestCases[instant]', '--include-filter', 'CtsVoiceSettingsTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsVoice',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsVr b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsVr
new file mode 100644
index 0000000..3361a36
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsVr
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsVr'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsVrTestCases, CtsVrTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsVr',
+        test_name='cheets_CTS_R.internal.arm.CtsVr',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsVrTestCases', '--include-filter', 'CtsVrTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsVr',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWebkit b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWebkit
new file mode 100644
index 0000000..d1c2e7d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWebkit
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsWebkit'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWebkitTestCases, CtsWebkitTestCases[instant], CtsWebkitTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsWebkit',
+        test_name='cheets_CTS_R.internal.arm.CtsWebkit',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWebkitTestCases', '--include-filter', 'CtsWebkitTestCases[instant]', '--include-filter', 'CtsWebkitTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWebkit',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWidget b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWidget
new file mode 100644
index 0000000..98c7765
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWidget
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsWidget'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWidgetTestCases, CtsWidgetTestCases29, CtsWidgetTestCases29[instant], CtsWidgetTestCases29[secondary_user], CtsWidgetTestCases[instant], CtsWidgetTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsWidget',
+        test_name='cheets_CTS_R.internal.arm.CtsWidget',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWidgetTestCases', '--include-filter', 'CtsWidgetTestCases29', '--include-filter', 'CtsWidgetTestCases29[instant]', '--include-filter', 'CtsWidgetTestCases29[secondary_user]', '--include-filter', 'CtsWidgetTestCases[instant]', '--include-filter', 'CtsWidgetTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWidget',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=18000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWifi b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWifi
new file mode 100644
index 0000000..dd9fefa
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWifi
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsWifi'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWifiBroadcastsHostTestCases, CtsWifiBroadcastsHostTestCases[instant], CtsWifiBroadcastsHostTestCases[secondary_user], CtsWifiTestCases, CtsWifiTestCases[instant], CtsWifiTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsWifi',
+        test_name='cheets_CTS_R.internal.arm.CtsWifi',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWifiBroadcastsHostTestCases', '--include-filter', 'CtsWifiBroadcastsHostTestCases[instant]', '--include-filter', 'CtsWifiBroadcastsHostTestCases[secondary_user]', '--include-filter', 'CtsWifiTestCases', '--include-filter', 'CtsWifiTestCases[instant]', '--include-filter', 'CtsWifiTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWifi',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWifiTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWifiTestCases.ctshardware
new file mode 100644
index 0000000..b615263
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWifiTestCases.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsWifiTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWifiTestCases of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsWifiTestCases.ctshardware',
+        test_name='cheets_CTS_R.internal.arm.CtsWifiTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWifiTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWifiTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager
new file mode 100644
index 0000000..f2f3d6c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsWindowManager'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManagerDeviceTestCases, CtsWindowManagerDeviceTestCases[secondary_user], CtsWindowManagerJetpackTestCases, CtsWindowManagerJetpackTestCases[secondary_user], CtsWindowManagerSdk25TestCases, CtsWindowManagerSdk25TestCases[secondary_user], CtsWindowManagerSdk28TestCases, CtsWindowManagerSdk28TestCases[secondary_user], CtsWindowManagerSdk29TestCases, CtsWindowManagerSdk29TestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=10,
+        tag='internal.arm.CtsWindowManager',
+        test_name='cheets_CTS_R.internal.arm.CtsWindowManager',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases', '--include-filter', 'CtsWindowManagerDeviceTestCases[secondary_user]', '--include-filter', 'CtsWindowManagerJetpackTestCases', '--include-filter', 'CtsWindowManagerJetpackTestCases[secondary_user]', '--include-filter', 'CtsWindowManagerSdk25TestCases', '--include-filter', 'CtsWindowManagerSdk25TestCases[secondary_user]', '--include-filter', 'CtsWindowManagerSdk28TestCases', '--include-filter', 'CtsWindowManagerSdk28TestCases[secondary_user]', '--include-filter', 'CtsWindowManagerSdk29TestCases', '--include-filter', 'CtsWindowManagerSdk29TestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManager',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=19800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.A b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.A
new file mode 100644
index 0000000..9fffea5
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.A
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsWindowManager.A'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManager.A of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsWindowManager.A',
+        test_name='cheets_CTS_R.internal.arm.CtsWindowManager.A',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ActivityManagerGetConfigTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ActivityMetricsLoggerTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ActivityTaskAffinityTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ActivityTransitionTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ActivityViewTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ActivityVisibilityTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AddWindowAsUserTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AlertWindowsAppOpsTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AlertWindowsImportanceTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AlertWindowsTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AmProfileTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AmStartOptionsTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AnrTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AppConfigurationTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AspectRatioTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AssistantStackTests', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.C b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.C
new file mode 100644
index 0000000..10f078c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.C
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsWindowManager.C'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManager.C of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsWindowManager.C',
+        test_name='cheets_CTS_R.internal.arm.CtsWindowManager.C',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.CloseOnOutsideTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ConfigChangeTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.CrossAppDragAndDropTests', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.D b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.D
new file mode 100644
index 0000000..3f9eaca
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.D
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsWindowManager.D'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManager.D of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsWindowManager.D',
+        test_name='cheets_CTS_R.internal.arm.CtsWindowManager.D',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DecorInsetTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DeprecatedTargetSdkTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DialogFrameTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DisplayCutoutTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DisplaySizeTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DisplayTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DragDropTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DreamManagerServiceTests', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.F b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.F
new file mode 100644
index 0000000..3216d78
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.F
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsWindowManager.F'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManager.F of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsWindowManager.F',
+        test_name='cheets_CTS_R.internal.arm.CtsWindowManager.F',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ForceRelayoutTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.FreeformWindowingModeTests', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.L b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.L
new file mode 100644
index 0000000..9e238a3
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.L
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsWindowManager.L'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManager.L of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsWindowManager.L',
+        test_name='cheets_CTS_R.internal.arm.CtsWindowManager.L',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.LayoutTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.LocationInWindowTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.LocationOnScreenTests', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.M b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.M
new file mode 100644
index 0000000..0a88f7e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.M
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsWindowManager.M'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManager.M of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsWindowManager.M',
+        test_name='cheets_CTS_R.internal.arm.CtsWindowManager.M',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ManifestLayoutTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MinimalPostProcessingTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayActivityLaunchTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayClientTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayKeyguardTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayLockedKeyguardTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayPolicyTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayPrivateDisplayTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplaySecurityTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplaySystemDecorationTests', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.Override b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.Override
new file mode 100644
index 0000000..75684af
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.Override
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsWindowManager.Override'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManager.Override of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsWindowManager.Override',
+        test_name='cheets_CTS_R.internal.arm.CtsWindowManager.Override',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.OverrideConfigTests', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.P b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.P
new file mode 100644
index 0000000..399535a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.P
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsWindowManager.P'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManager.P of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsWindowManager.P',
+        test_name='cheets_CTS_R.internal.arm.CtsWindowManager.P',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.PinnedStackTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.PrereleaseSdkTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.PresentationTest', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.R b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.R
new file mode 100644
index 0000000..d5116a2
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.R
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsWindowManager.R'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManager.R of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsWindowManager.R',
+        test_name='cheets_CTS_R.internal.arm.CtsWindowManager.R',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ReplaceWindowTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.RobustnessTests', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.S b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.S
new file mode 100644
index 0000000..89971b2
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.S
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsWindowManager.S'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManager.S of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsWindowManager.S',
+        test_name='cheets_CTS_R.internal.arm.CtsWindowManager.S',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.SplashscreenTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.SplitScreenTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.StartActivityAsUserTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.StartActivityTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.SurfaceControlTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.SurfaceControlViewHostTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.SurfaceViewSurfaceValidatorTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.SurfaceViewTest', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.T b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.T
new file mode 100644
index 0000000..304c823
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.T
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsWindowManager.T'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManager.T of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsWindowManager.T',
+        test_name='cheets_CTS_R.internal.arm.CtsWindowManager.T',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ToastWindowTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.TransitionSelectionTests', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.Window b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.Window
new file mode 100644
index 0000000..57dd51e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.Window
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsWindowManager.Window'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManager.Window of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsWindowManager.Window',
+        test_name='cheets_CTS_R.internal.arm.CtsWindowManager.Window',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowContextPolicyTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowContextTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowFocusTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInputTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationCallbackTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationControllerTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationImeTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationSynchronicityTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsControllerTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsLayoutTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsPolicyTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowManager_BadTokenExceptionTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowManager_LayoutParamsTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowMetricsTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowTest', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.intent b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.intent
new file mode 100644
index 0000000..a0bdced
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.intent
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsWindowManager.intent'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManager.intent of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsWindowManager.intent',
+        test_name='cheets_CTS_R.internal.arm.CtsWindowManager.intent',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.intent.IntentGenerationTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.intent.IntentTests', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.lifecycle b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.lifecycle
new file mode 100644
index 0000000..3c017e4
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWindowManager.lifecycle
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsWindowManager.lifecycle'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManager.lifecycle of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsWindowManager.lifecycle',
+        test_name='cheets_CTS_R.internal.arm.CtsWindowManager.lifecycle',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleFreeformTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleKeyguardTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecyclePipTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleSplitScreenTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleTopResumedStateTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityStarterTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityTests', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWrap b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWrap
new file mode 100644
index 0000000..15cea79
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.CtsWrap
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.CtsWrap'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWrapNoWrapTestCases, CtsWrapNoWrapTestCases[secondary_user], CtsWrapWrapDebugMallocDebugTestCases, CtsWrapWrapDebugMallocDebugTestCases[secondary_user], CtsWrapWrapDebugTestCases, CtsWrapWrapDebugTestCases[secondary_user], CtsWrapWrapNoDebugTestCases, CtsWrapWrapNoDebugTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.CtsWrap',
+        test_name='cheets_CTS_R.internal.arm.CtsWrap',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWrapNoWrapTestCases', '--include-filter', 'CtsWrapNoWrapTestCases[secondary_user]', '--include-filter', 'CtsWrapWrapDebugMallocDebugTestCases', '--include-filter', 'CtsWrapWrapDebugMallocDebugTestCases[secondary_user]', '--include-filter', 'CtsWrapWrapDebugTestCases', '--include-filter', 'CtsWrapWrapDebugTestCases[secondary_user]', '--include-filter', 'CtsWrapWrapNoDebugTestCases', '--include-filter', 'CtsWrapWrapNoDebugTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWrap',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=16200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.LegacyStorageTest b/server/site_tests/cheets_CTS_R/control.internal.arm.LegacyStorageTest
new file mode 100644
index 0000000..bf7a67e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.LegacyStorageTest
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.LegacyStorageTest'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module LegacyStorageTest, LegacyStorageTest[instant] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.LegacyStorageTest',
+        test_name='cheets_CTS_R.internal.arm.LegacyStorageTest',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'LegacyStorageTest', '--include-filter', 'LegacyStorageTest[instant]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='LegacyStorageTest',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.ScopedStorageTest b/server/site_tests/cheets_CTS_R/control.internal.arm.ScopedStorageTest
new file mode 100644
index 0000000..ab34ec9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.ScopedStorageTest
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.ScopedStorageTest'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module ScopedStorageTest, ScopedStorageTest[instant] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.ScopedStorageTest',
+        test_name='cheets_CTS_R.internal.arm.ScopedStorageTest',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'ScopedStorageTest', '--include-filter', 'ScopedStorageTest[instant]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='ScopedStorageTest',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsAbiOverrideHostTestCases_-_CtsAccessibilityServiceSdk29TestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsAbiOverrideHostTestCases_-_CtsAccessibilityServiceSdk29TestCases
new file mode 100644
index 0000000..eabfc5e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsAbiOverrideHostTestCases_-_CtsAccessibilityServiceSdk29TestCases
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.all.CtsAbiOverrideHostTestCases_-_CtsAccessibilityServiceSdk29TestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAbiOverrideHostTestCases, CtsAbiOverrideHostTestCases[instant], CtsAbiOverrideHostTestCases[secondary_user], CtsAccelerationTestCases, CtsAccelerationTestCases[instant], CtsAccelerationTestCases[secondary_user], CtsAccessibilityServiceSdk29TestCases, CtsAccessibilityServiceSdk29TestCases[instant], CtsAccessibilityServiceSdk29TestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsAbiOverrideHostTestCases_-_CtsAccessibilityServiceSdk29TestCases',
+        test_name='cheets_CTS_R.internal.arm.all.CtsAbiOverrideHostTestCases_-_CtsAccessibilityServiceSdk29TestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAbiOverrideHostTestCases', '--include-filter', 'CtsAbiOverrideHostTestCases[instant]', '--include-filter', 'CtsAbiOverrideHostTestCases[secondary_user]', '--include-filter', 'CtsAccelerationTestCases', '--include-filter', 'CtsAccelerationTestCases[instant]', '--include-filter', 'CtsAccelerationTestCases[secondary_user]', '--include-filter', 'CtsAccessibilityServiceSdk29TestCases', '--include-filter', 'CtsAccessibilityServiceSdk29TestCases[instant]', '--include-filter', 'CtsAccessibilityServiceSdk29TestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsAbiOverrideHostTestCases_-_CtsAccessibilityServiceSdk29TestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases
new file mode 100644
index 0000000..9d1f5f9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAccessibilityServiceTestCases, CtsAccessibilityServiceTestCases[instant] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases',
+        test_name='cheets_CTS_R.internal.arm.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAccessibilityServiceTestCases', '--include-filter', 'CtsAccessibilityServiceTestCases[instant]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsAccessibilityTestCases_-_CtsCameraApi25TestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsAccessibilityTestCases_-_CtsCameraApi25TestCases
new file mode 100644
index 0000000..73e3497
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsAccessibilityTestCases_-_CtsCameraApi25TestCases
@@ -0,0 +1,51 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+from autotest_lib.server import utils as server_utils
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.all.CtsAccessibilityTestCases_-_CtsCameraApi25TestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAccessibilityTestCases, CtsAccessibilityTestCases[instant], CtsAccessibilityTestCases[secondary_user], CtsAccountManagerTestCases, CtsAccountManagerTestCases[instant], CtsAccountManagerTestCases[secondary_user], CtsAccountsHostTestCases, CtsAccountsHostTestCases[instant], CtsAccountsHostTestCases[secondary_user], CtsActivityManagerBackgroundActivityTestCases, CtsActivityManagerBackgroundActivityTestCases[secondary_user], CtsAdbHostTestCases, CtsAdbHostTestCases[secondary_user], CtsAdbManagerHostTestCases, CtsAdbManagerHostTestCases[secondary_user], CtsAdminPackageInstallerTestCases, CtsAdminTestCases, CtsAlarmManagerTestCases, CtsAlarmManagerTestCases[instant], CtsAlarmManagerTestCases[secondary_user], CtsAndroidAppTestCases, CtsAndroidAppTestCases[instant], CtsAndroidAppTestCases[secondary_user], CtsAndroidTestBase28ApiSignatureTestCases, CtsAndroidTestBase28ApiSignatureTestCases[instant], CtsAndroidTestBase28ApiSignatureTestCases[secondary_user], CtsAndroidTestBaseCurrentApiSignatureTestCases, CtsAndroidTestBaseCurrentApiSignatureTestCases[instant], CtsAndroidTestBaseCurrentApiSignatureTestCases[secondary_user], CtsAndroidTestMockCurrentApiSignatureTestCases, CtsAndroidTestMockCurrentApiSignatureTestCases[instant], CtsAndroidTestMockCurrentApiSignatureTestCases[secondary_user], CtsAndroidTestRunnerCurrentApiSignatureTestCases, CtsAndroidTestRunnerCurrentApiSignatureTestCases[instant], CtsAndroidTestRunnerCurrentApiSignatureTestCases[secondary_user], CtsAngleIntegrationHostTestCases, CtsAngleIntegrationHostTestCases[instant], CtsAngleIntegrationHostTestCases[secondary_user], CtsAnimationTestCases, CtsAnimationTestCases[instant], CtsAnimationTestCases[secondary_user], CtsApacheHttpLegacy27ApiSignatureTestCases, CtsApacheHttpLegacy27ApiSignatureTestCases[instant], CtsApacheHttpLegacy27ApiSignatureTestCases[secondary_user], CtsApacheHttpLegacyCurrentApiSignatureTestCases, CtsApacheHttpLegacyCurrentApiSignatureTestCases[instant], CtsApacheHttpLegacyCurrentApiSignatureTestCases[secondary_user], CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases, CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases[instant], CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases[secondary_user], CtsApexTestCases, CtsApexTestCases[secondary_user], CtsAppBindingHostTestCases, CtsAppBindingHostTestCases[secondary_user], CtsAppCompatHostTestCases, CtsAppCompatHostTestCases[instant], CtsAppCompatHostTestCases[secondary_user], CtsAppComponentFactoryTestCases, CtsAppComponentFactoryTestCases[instant], CtsAppComponentFactoryTestCases[secondary_user], CtsAppEnumerationTestCases, CtsAppEnumerationTestCases[secondary_user], CtsAppExitTestCases, CtsAppExitTestCases[instant], CtsAppExitTestCases[secondary_user], CtsAppIntegrityDeviceTestCases, CtsAppOpsTestCases, CtsAppOpsTestCases[instant], CtsAppOpsTestCases[secondary_user], CtsAppPredictionServiceTestCases, CtsAppPredictionServiceTestCases[secondary_user], CtsAppSecurityHostTestCases, CtsAppSecurityHostTestCases[secondary_user], CtsAppTestCases, CtsAppTestCases[instant], CtsAppTestCases[secondary_user], CtsAppUsageHostTestCases, CtsAppUsageHostTestCases[instant], CtsAppUsageHostTestCases[secondary_user], CtsAppWidgetTestCases, CtsAppWidgetTestCases[instant], CtsAppWidgetTestCases[secondary_user], CtsAslrMallocTestCases, CtsAslrMallocTestCases[secondary_user], CtsAssistTestCases, CtsAssistTestCases[instant], CtsAssistTestCases[secondary_user], CtsAtomicInstallTestCases, CtsAtomicInstallTestCases[secondary_user], CtsAtraceHostTestCases, CtsAtraceHostTestCases[instant], CtsAtraceHostTestCases[secondary_user], CtsAttentionServiceDeviceTestCases, CtsAttentionServiceDeviceTestCases[secondary_user], CtsAutoFillServiceTestCases, CtsAutoFillServiceTestCases[instant], CtsAutoFillServiceTestCases[secondary_user], CtsBackgroundRestrictionsTestCases, CtsBackgroundRestrictionsTestCases[instant], CtsBackgroundRestrictionsTestCases[secondary_user], CtsBackupHostTestCases, CtsBackupTestCases, CtsBatterySavingTestCases, CtsBatterySavingTestCases[secondary_user], CtsBionicAppTestCases, CtsBionicAppTestCases[instant], CtsBionicAppTestCases[secondary_user], CtsBionicTestCases, CtsBionicTestCases[secondary_user], CtsBlobStoreHostTestCases, CtsBlobStoreHostTestCases[secondary_user], CtsBlobStoreHostTestHelper, CtsBlobStoreHostTestHelper[secondary_user], CtsBlobStoreTestCases, CtsBlobStoreTestCases[secondary_user], CtsBlobStoreTestHelper, CtsBlobStoreTestHelperDiffSig, CtsBlobStoreTestHelperDiffSig2, CtsBlobStoreTestHelperDiffSig2[secondary_user], CtsBlobStoreTestHelperDiffSig[secondary_user], CtsBlobStoreTestHelper[secondary_user], CtsBluetoothTestCases, CtsBluetoothTestCases[secondary_user], CtsBootStatsTestCases, CtsBootStatsTestCases[secondary_user], CtsCalendarProviderTestCases, CtsCalendarProviderTestCases[secondary_user], CtsCalendarcommon2TestCases, CtsCalendarcommon2TestCases[secondary_user], CtsCameraApi25TestCases, CtsCameraApi25TestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+# For local debugging, if your test setup doesn't have servo, REMOVE these
+# two lines.
+args_dict = server_utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run_TS(machine):
+    # REMOVE 'servo_args=servo_args' arg for local debugging if your test
+    # setup doesn't have servo.
+    try:
+        host_list = [hosts.create_host(machine, servo_args=servo_args)]
+    except:
+        # Just ignore any servo setup flakiness.
+        host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        enable_default_apps=True,
+        tag='internal.arm.all.CtsAccessibilityTestCases_-_CtsCameraApi25TestCases',
+        test_name='cheets_CTS_R.internal.arm.all.CtsAccessibilityTestCases_-_CtsCameraApi25TestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAccessibilityTestCases', '--include-filter', 'CtsAccessibilityTestCases[instant]', '--include-filter', 'CtsAccessibilityTestCases[secondary_user]', '--include-filter', 'CtsAccountManagerTestCases', '--include-filter', 'CtsAccountManagerTestCases[instant]', '--include-filter', 'CtsAccountManagerTestCases[secondary_user]', '--include-filter', 'CtsAccountsHostTestCases', '--include-filter', 'CtsAccountsHostTestCases[instant]', '--include-filter', 'CtsAccountsHostTestCases[secondary_user]', '--include-filter', 'CtsActivityManagerBackgroundActivityTestCases', '--include-filter', 'CtsActivityManagerBackgroundActivityTestCases[secondary_user]', '--include-filter', 'CtsAdbHostTestCases', '--include-filter', 'CtsAdbHostTestCases[secondary_user]', '--include-filter', 'CtsAdbManagerHostTestCases', '--include-filter', 'CtsAdbManagerHostTestCases[secondary_user]', '--include-filter', 'CtsAdminPackageInstallerTestCases', '--include-filter', 'CtsAdminTestCases', '--include-filter', 'CtsAlarmManagerTestCases', '--include-filter', 'CtsAlarmManagerTestCases[instant]', '--include-filter', 'CtsAlarmManagerTestCases[secondary_user]', '--include-filter', 'CtsAndroidAppTestCases', '--include-filter', 'CtsAndroidAppTestCases[instant]', '--include-filter', 'CtsAndroidAppTestCases[secondary_user]', '--include-filter', 'CtsAndroidTestBase28ApiSignatureTestCases', '--include-filter', 'CtsAndroidTestBase28ApiSignatureTestCases[instant]', '--include-filter', 'CtsAndroidTestBase28ApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsAndroidTestBaseCurrentApiSignatureTestCases', '--include-filter', 'CtsAndroidTestBaseCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsAndroidTestBaseCurrentApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsAndroidTestMockCurrentApiSignatureTestCases', '--include-filter', 'CtsAndroidTestMockCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsAndroidTestMockCurrentApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsAndroidTestRunnerCurrentApiSignatureTestCases', '--include-filter', 'CtsAndroidTestRunnerCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsAndroidTestRunnerCurrentApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsAngleIntegrationHostTestCases', '--include-filter', 'CtsAngleIntegrationHostTestCases[instant]', '--include-filter', 'CtsAngleIntegrationHostTestCases[secondary_user]', '--include-filter', 'CtsAnimationTestCases', '--include-filter', 'CtsAnimationTestCases[instant]', '--include-filter', 'CtsAnimationTestCases[secondary_user]', '--include-filter', 'CtsApacheHttpLegacy27ApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacy27ApiSignatureTestCases[instant]', '--include-filter', 'CtsApacheHttpLegacy27ApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsApacheHttpLegacyCurrentApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacyCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsApacheHttpLegacyCurrentApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases[instant]', '--include-filter', 'CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsApexTestCases', '--include-filter', 'CtsApexTestCases[secondary_user]', '--include-filter', 'CtsAppBindingHostTestCases', '--include-filter', 'CtsAppBindingHostTestCases[secondary_user]', '--include-filter', 'CtsAppCompatHostTestCases', '--include-filter', 'CtsAppCompatHostTestCases[instant]', '--include-filter', 'CtsAppCompatHostTestCases[secondary_user]', '--include-filter', 'CtsAppComponentFactoryTestCases', '--include-filter', 'CtsAppComponentFactoryTestCases[instant]', '--include-filter', 'CtsAppComponentFactoryTestCases[secondary_user]', '--include-filter', 'CtsAppEnumerationTestCases', '--include-filter', 'CtsAppEnumerationTestCases[secondary_user]', '--include-filter', 'CtsAppExitTestCases', '--include-filter', 'CtsAppExitTestCases[instant]', '--include-filter', 'CtsAppExitTestCases[secondary_user]', '--include-filter', 'CtsAppIntegrityDeviceTestCases', '--include-filter', 'CtsAppOpsTestCases', '--include-filter', 'CtsAppOpsTestCases[instant]', '--include-filter', 'CtsAppOpsTestCases[secondary_user]', '--include-filter', 'CtsAppPredictionServiceTestCases', '--include-filter', 'CtsAppPredictionServiceTestCases[secondary_user]', '--include-filter', 'CtsAppSecurityHostTestCases', '--include-filter', 'CtsAppSecurityHostTestCases[secondary_user]', '--include-filter', 'CtsAppTestCases', '--include-filter', 'CtsAppTestCases[instant]', '--include-filter', 'CtsAppTestCases[secondary_user]', '--include-filter', 'CtsAppUsageHostTestCases', '--include-filter', 'CtsAppUsageHostTestCases[instant]', '--include-filter', 'CtsAppUsageHostTestCases[secondary_user]', '--include-filter', 'CtsAppWidgetTestCases', '--include-filter', 'CtsAppWidgetTestCases[instant]', '--include-filter', 'CtsAppWidgetTestCases[secondary_user]', '--include-filter', 'CtsAslrMallocTestCases', '--include-filter', 'CtsAslrMallocTestCases[secondary_user]', '--include-filter', 'CtsAssistTestCases', '--include-filter', 'CtsAssistTestCases[instant]', '--include-filter', 'CtsAssistTestCases[secondary_user]', '--include-filter', 'CtsAtomicInstallTestCases', '--include-filter', 'CtsAtomicInstallTestCases[secondary_user]', '--include-filter', 'CtsAtraceHostTestCases', '--include-filter', 'CtsAtraceHostTestCases[instant]', '--include-filter', 'CtsAtraceHostTestCases[secondary_user]', '--include-filter', 'CtsAttentionServiceDeviceTestCases', '--include-filter', 'CtsAttentionServiceDeviceTestCases[secondary_user]', '--include-filter', 'CtsAutoFillServiceTestCases', '--include-filter', 'CtsAutoFillServiceTestCases[instant]', '--include-filter', 'CtsAutoFillServiceTestCases[secondary_user]', '--include-filter', 'CtsBackgroundRestrictionsTestCases', '--include-filter', 'CtsBackgroundRestrictionsTestCases[instant]', '--include-filter', 'CtsBackgroundRestrictionsTestCases[secondary_user]', '--include-filter', 'CtsBackupHostTestCases', '--include-filter', 'CtsBackupTestCases', '--include-filter', 'CtsBatterySavingTestCases', '--include-filter', 'CtsBatterySavingTestCases[secondary_user]', '--include-filter', 'CtsBionicAppTestCases', '--include-filter', 'CtsBionicAppTestCases[instant]', '--include-filter', 'CtsBionicAppTestCases[secondary_user]', '--include-filter', 'CtsBionicTestCases', '--include-filter', 'CtsBionicTestCases[secondary_user]', '--include-filter', 'CtsBlobStoreHostTestCases', '--include-filter', 'CtsBlobStoreHostTestCases[secondary_user]', '--include-filter', 'CtsBlobStoreHostTestHelper', '--include-filter', 'CtsBlobStoreHostTestHelper[secondary_user]', '--include-filter', 'CtsBlobStoreTestCases', '--include-filter', 'CtsBlobStoreTestCases[secondary_user]', '--include-filter', 'CtsBlobStoreTestHelper', '--include-filter', 'CtsBlobStoreTestHelperDiffSig', '--include-filter', 'CtsBlobStoreTestHelperDiffSig2', '--include-filter', 'CtsBlobStoreTestHelperDiffSig2[secondary_user]', '--include-filter', 'CtsBlobStoreTestHelperDiffSig[secondary_user]', '--include-filter', 'CtsBlobStoreTestHelper[secondary_user]', '--include-filter', 'CtsBluetoothTestCases', '--include-filter', 'CtsBluetoothTestCases[secondary_user]', '--include-filter', 'CtsBootStatsTestCases', '--include-filter', 'CtsBootStatsTestCases[secondary_user]', '--include-filter', 'CtsCalendarProviderTestCases', '--include-filter', 'CtsCalendarProviderTestCases[secondary_user]', '--include-filter', 'CtsCalendarcommon2TestCases', '--include-filter', 'CtsCalendarcommon2TestCases[secondary_user]', '--include-filter', 'CtsCameraApi25TestCases', '--include-filter', 'CtsCameraApi25TestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsAccessibilityTestCases_-_CtsCameraApi25TestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        use_jdk9=True,
+        hard_reboot_on_failure=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsCameraTestCases_-_CtsCameraTestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsCameraTestCases_-_CtsCameraTestCases
new file mode 100644
index 0000000..594c659
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsCameraTestCases_-_CtsCameraTestCases
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.all.CtsCameraTestCases_-_CtsCameraTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCameraTestCases, CtsCameraTestCases[instant], CtsCameraTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsCameraTestCases_-_CtsCameraTestCases',
+        test_name='cheets_CTS_R.internal.arm.all.CtsCameraTestCases_-_CtsCameraTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCameraTestCases', '--include-filter', 'CtsCameraTestCases[instant]', '--include-filter', 'CtsCameraTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsCameraTestCases_-_CtsCameraTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsCarHostTestCases_-_CtsDatabaseTestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsCarHostTestCases_-_CtsDatabaseTestCases
new file mode 100644
index 0000000..3278cec
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsCarHostTestCases_-_CtsDatabaseTestCases
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.all.CtsCarHostTestCases_-_CtsDatabaseTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCarHostTestCases, CtsCarTestCases, CtsCarTestCases[secondary_user], CtsCarrierApiTestCases, CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases, CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases[instant], CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases[secondary_user], CtsClassLoaderFactoryPathClassLoaderTestCases, CtsClassLoaderFactoryPathClassLoaderTestCases[instant], CtsClassLoaderFactoryPathClassLoaderTestCases[secondary_user], CtsClassloaderSplitsHostTestCases, CtsClassloaderSplitsHostTestCases[instant], CtsClassloaderSplitsHostTestCases[secondary_user], CtsCodePathHostTestCases, CtsCodePathHostTestCases[secondary_user], CtsColorModeTestCases, CtsColorModeTestCases[instant], CtsColorModeTestCases[secondary_user], CtsCompilationTestCases, CtsCompilationTestCases[secondary_user], CtsContactsProviderTestCases, CtsContactsProviderTestCases[secondary_user], CtsContactsProviderWipe, CtsContactsProviderWipe[secondary_user], CtsContentCaptureServiceTestCases, CtsContentCaptureServiceTestCases[instant], CtsContentCaptureServiceTestCases[secondary_user], CtsContentSuggestionsTestCases, CtsContentSuggestionsTestCases[secondary_user], CtsContentTestCases, CtsContentTestCases[instant], CtsContentTestCases[secondary_user], CtsControlsDeviceTestCases, CtsControlsDeviceTestCases[secondary_user], CtsCppToolsTestCases, CtsCppToolsTestCases[secondary_user], CtsCurrentApiSignatureTestCases, CtsCurrentApiSignatureTestCases[instant], CtsCurrentApiSignatureTestCases[secondary_user], CtsDatabaseTestCases, CtsDatabaseTestCases[instant], CtsDatabaseTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        enable_default_apps=True,
+        tag='internal.arm.all.CtsCarHostTestCases_-_CtsDatabaseTestCases',
+        test_name='cheets_CTS_R.internal.arm.all.CtsCarHostTestCases_-_CtsDatabaseTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCarHostTestCases', '--include-filter', 'CtsCarTestCases', '--include-filter', 'CtsCarTestCases[secondary_user]', '--include-filter', 'CtsCarrierApiTestCases', '--include-filter', 'CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases', '--include-filter', 'CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases[instant]', '--include-filter', 'CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases[secondary_user]', '--include-filter', 'CtsClassLoaderFactoryPathClassLoaderTestCases', '--include-filter', 'CtsClassLoaderFactoryPathClassLoaderTestCases[instant]', '--include-filter', 'CtsClassLoaderFactoryPathClassLoaderTestCases[secondary_user]', '--include-filter', 'CtsClassloaderSplitsHostTestCases', '--include-filter', 'CtsClassloaderSplitsHostTestCases[instant]', '--include-filter', 'CtsClassloaderSplitsHostTestCases[secondary_user]', '--include-filter', 'CtsCodePathHostTestCases', '--include-filter', 'CtsCodePathHostTestCases[secondary_user]', '--include-filter', 'CtsColorModeTestCases', '--include-filter', 'CtsColorModeTestCases[instant]', '--include-filter', 'CtsColorModeTestCases[secondary_user]', '--include-filter', 'CtsCompilationTestCases', '--include-filter', 'CtsCompilationTestCases[secondary_user]', '--include-filter', 'CtsContactsProviderTestCases', '--include-filter', 'CtsContactsProviderTestCases[secondary_user]', '--include-filter', 'CtsContactsProviderWipe', '--include-filter', 'CtsContactsProviderWipe[secondary_user]', '--include-filter', 'CtsContentCaptureServiceTestCases', '--include-filter', 'CtsContentCaptureServiceTestCases[instant]', '--include-filter', 'CtsContentCaptureServiceTestCases[secondary_user]', '--include-filter', 'CtsContentSuggestionsTestCases', '--include-filter', 'CtsContentSuggestionsTestCases[secondary_user]', '--include-filter', 'CtsContentTestCases', '--include-filter', 'CtsContentTestCases[instant]', '--include-filter', 'CtsContentTestCases[secondary_user]', '--include-filter', 'CtsControlsDeviceTestCases', '--include-filter', 'CtsControlsDeviceTestCases[secondary_user]', '--include-filter', 'CtsCppToolsTestCases', '--include-filter', 'CtsCppToolsTestCases[secondary_user]', '--include-filter', 'CtsCurrentApiSignatureTestCases', '--include-filter', 'CtsCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsCurrentApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsDatabaseTestCases', '--include-filter', 'CtsDatabaseTestCases[instant]', '--include-filter', 'CtsDatabaseTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsCarHostTestCases_-_CtsDatabaseTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases.32 b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases.32
new file mode 100644
index 0000000..3f8db2d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases.32
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases.32'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases, CtsDeqpTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases.32',
+        test_name='cheets_CTS_R.internal.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases.32',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--abi', 'armeabi-v7a'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsDeqpTestCases_-_CtsDeqpTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases.64 b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases.64
new file mode 100644
index 0000000..1bebeb5
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases.64
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases.64'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases, CtsDeqpTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases.64',
+        test_name='cheets_CTS_R.internal.arm.all.CtsDeqpTestCases_-_CtsDeqpTestCases.64',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--include-filter', 'CtsDeqpTestCases[secondary_user]', '--abi', 'arm64-v8a'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsDeqpTestCases_-_CtsDeqpTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsDeviceConfigTestCases_-_CtsExtractNativeLibsHostTestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsDeviceConfigTestCases_-_CtsExtractNativeLibsHostTestCases
new file mode 100644
index 0000000..b61b5ad
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsDeviceConfigTestCases_-_CtsExtractNativeLibsHostTestCases
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.all.CtsDeviceConfigTestCases_-_CtsExtractNativeLibsHostTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeviceConfigTestCases, CtsDeviceConfigTestCases[instant], CtsDeviceConfigTestCases[secondary_user], CtsDeviceIdleHostTestCases, CtsDeviceIdleHostTestCases[secondary_user], CtsDevicePolicyManagerTestCases, CtsDexMetadataHostTestCases, CtsDexMetadataHostTestCases[secondary_user], CtsDisplayTestCases, CtsDisplayTestCases[instant], CtsDisplayTestCases[secondary_user], CtsDownloadManagerApi28, CtsDownloadManagerApi28[secondary_user], CtsDownloadManagerInstaller, CtsDownloadManagerInstaller[secondary_user], CtsDpiTestCases, CtsDpiTestCases2, CtsDpiTestCases2[secondary_user], CtsDpiTestCases[instant], CtsDpiTestCases[secondary_user], CtsDreamsTestCases, CtsDreamsTestCases[instant], CtsDreamsTestCases[secondary_user], CtsDrmTestCases, CtsDrmTestCases[instant], CtsDrmTestCases[secondary_user], CtsDropBoxManagerTestCases, CtsDumpsysHostTestCases, CtsDumpsysHostTestCases[secondary_user], CtsDynamicLinkerTestCases, CtsDynamicLinkerTestCases[instant], CtsDynamicLinkerTestCases[secondary_user], CtsDynamicMimeHostTestCases, CtsDynamicMimeHostTestCases[secondary_user], CtsEdiHostTestCases, CtsEdiHostTestCases[secondary_user], CtsEffectTestCases, CtsEffectTestCases[instant], CtsEffectTestCases[secondary_user], CtsExtendedMockingTestCases, CtsExtendedMockingTestCases[instant], CtsExtendedMockingTestCases[secondary_user], CtsExternalServiceTestCases, CtsExternalServiceTestCases[secondary_user], CtsExtractNativeLibsHostTestCases, CtsExtractNativeLibsHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsDeviceConfigTestCases_-_CtsExtractNativeLibsHostTestCases',
+        test_name='cheets_CTS_R.internal.arm.all.CtsDeviceConfigTestCases_-_CtsExtractNativeLibsHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeviceConfigTestCases', '--include-filter', 'CtsDeviceConfigTestCases[instant]', '--include-filter', 'CtsDeviceConfigTestCases[secondary_user]', '--include-filter', 'CtsDeviceIdleHostTestCases', '--include-filter', 'CtsDeviceIdleHostTestCases[secondary_user]', '--include-filter', 'CtsDevicePolicyManagerTestCases', '--include-filter', 'CtsDexMetadataHostTestCases', '--include-filter', 'CtsDexMetadataHostTestCases[secondary_user]', '--include-filter', 'CtsDisplayTestCases', '--include-filter', 'CtsDisplayTestCases[instant]', '--include-filter', 'CtsDisplayTestCases[secondary_user]', '--include-filter', 'CtsDownloadManagerApi28', '--include-filter', 'CtsDownloadManagerApi28[secondary_user]', '--include-filter', 'CtsDownloadManagerInstaller', '--include-filter', 'CtsDownloadManagerInstaller[secondary_user]', '--include-filter', 'CtsDpiTestCases', '--include-filter', 'CtsDpiTestCases2', '--include-filter', 'CtsDpiTestCases2[secondary_user]', '--include-filter', 'CtsDpiTestCases[instant]', '--include-filter', 'CtsDpiTestCases[secondary_user]', '--include-filter', 'CtsDreamsTestCases', '--include-filter', 'CtsDreamsTestCases[instant]', '--include-filter', 'CtsDreamsTestCases[secondary_user]', '--include-filter', 'CtsDrmTestCases', '--include-filter', 'CtsDrmTestCases[instant]', '--include-filter', 'CtsDrmTestCases[secondary_user]', '--include-filter', 'CtsDropBoxManagerTestCases', '--include-filter', 'CtsDumpsysHostTestCases', '--include-filter', 'CtsDumpsysHostTestCases[secondary_user]', '--include-filter', 'CtsDynamicLinkerTestCases', '--include-filter', 'CtsDynamicLinkerTestCases[instant]', '--include-filter', 'CtsDynamicLinkerTestCases[secondary_user]', '--include-filter', 'CtsDynamicMimeHostTestCases', '--include-filter', 'CtsDynamicMimeHostTestCases[secondary_user]', '--include-filter', 'CtsEdiHostTestCases', '--include-filter', 'CtsEdiHostTestCases[secondary_user]', '--include-filter', 'CtsEffectTestCases', '--include-filter', 'CtsEffectTestCases[instant]', '--include-filter', 'CtsEffectTestCases[secondary_user]', '--include-filter', 'CtsExtendedMockingTestCases', '--include-filter', 'CtsExtendedMockingTestCases[instant]', '--include-filter', 'CtsExtendedMockingTestCases[secondary_user]', '--include-filter', 'CtsExternalServiceTestCases', '--include-filter', 'CtsExternalServiceTestCases[secondary_user]', '--include-filter', 'CtsExtractNativeLibsHostTestCases', '--include-filter', 'CtsExtractNativeLibsHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsDeviceConfigTestCases_-_CtsExtractNativeLibsHostTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases
new file mode 100644
index 0000000..4381a13
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsFileSystemTestCases, CtsFileSystemTestCases[instant], CtsFileSystemTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases',
+        test_name='cheets_CTS_R.internal.arm.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsFileSystemTestCases', '--include-filter', 'CtsFileSystemTestCases[instant]', '--include-filter', 'CtsFileSystemTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsFileSystemTestCases_-_CtsFileSystemTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases
new file mode 100644
index 0000000..5d02e1e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsFragmentTestCases, CtsFragmentTestCasesSdk26, CtsFragmentTestCasesSdk26[instant], CtsFragmentTestCasesSdk26[secondary_user], CtsFragmentTestCases[instant], CtsFragmentTestCases[secondary_user], CtsFsMgrTestCases, CtsFsMgrTestCases[secondary_user], CtsGestureTestCases, CtsGestureTestCases[instant], CtsGestureTestCases[secondary_user], CtsGpuProfilingDataTestCases, CtsGpuProfilingDataTestCases[secondary_user], CtsGpuToolsHostTestCases, CtsGpuToolsHostTestCases[secondary_user], CtsGraphicsTestCases, CtsGraphicsTestCases[instant], CtsGraphicsTestCases[secondary_user], CtsGwpAsanTestCases, CtsGwpAsanTestCases[instant], CtsGwpAsanTestCases[secondary_user], CtsHardwareTestCases, CtsHardwareTestCases[secondary_user], CtsHarmfulAppWarningHostTestCases, CtsHarmfulAppWarningHostTestCases[secondary_user], CtsHdmiCecHostTestCases, CtsHdmiCecHostTestCases[secondary_user], CtsHiddenApiBlacklistApi27TestCases, CtsHiddenApiBlacklistApi27TestCases[secondary_user], CtsHiddenApiBlacklistApi28TestCases, CtsHiddenApiBlacklistApi28TestCases[secondary_user], CtsHiddenApiBlacklistCurrentApiTestCases, CtsHiddenApiBlacklistCurrentApiTestCases[secondary_user], CtsHiddenApiBlacklistDebugClassTestCases, CtsHiddenApiBlacklistDebugClassTestCases[secondary_user], CtsHiddenApiBlacklistTestApiTestCases, CtsHiddenApiBlacklistTestApiTestCases[secondary_user], CtsHiddenApiKillswitchDebugClassTestCases, CtsHiddenApiKillswitchDebugClassTestCases[instant], CtsHiddenApiKillswitchDebugClassTestCases[secondary_user], CtsHiddenApiKillswitchWhitelistTestCases, CtsHiddenApiKillswitchWhitelistTestCases[instant], CtsHiddenApiKillswitchWhitelistTestCases[secondary_user], CtsHiddenApiKillswitchWildcardTestCases, CtsHiddenApiKillswitchWildcardTestCases[instant], CtsHiddenApiKillswitchWildcardTestCases[secondary_user], CtsHostTzDataTests, CtsHostTzDataTests[secondary_user], CtsHostsideNetworkTests, CtsHostsideNetworkTests[instant], CtsHostsideNetworkTests[secondary_user], CtsHostsideNumberBlockingTestCases, CtsHostsideNumberBlockingTestCases[secondary_user], CtsHostsideTvTests, CtsHostsideTvTests[secondary_user], CtsHostsideWebViewTests, CtsHostsideWebViewTests[instant], CtsHostsideWebViewTests[secondary_user], CtsIcuTestCases, CtsIcuTestCases[secondary_user], CtsIdentityTestCases, CtsIdentityTestCases[secondary_user], CtsIkeTestCases, CtsIkeTestCases[secondary_user], CtsIncidentHostTestCases, CtsIncidentHostTestCases[instant], CtsIncrementalInstallHostTestCases, CtsIncrementalInstallHostTestCases[secondary_user], CtsInitTestCases, CtsInitTestCases[secondary_user], CtsInlineMockingTestCases, CtsInlineMockingTestCases[instant], CtsInlineMockingTestCases[secondary_user], CtsInputMethodServiceHostTestCases, CtsInputMethodServiceHostTestCases[instant], CtsInputMethodServiceHostTestCases[secondary_user], CtsInputMethodTestCases, CtsInputMethodTestCases[instant], CtsInputMethodTestCases[secondary_user], CtsInstantAppTests, CtsInstantAppTests[secondary_user], CtsIntentSignatureTestCases, CtsIntentSignatureTestCases[secondary_user], CtsJdwpSecurityHostTestCases, CtsJdwpSecurityHostTestCases[secondary_user], CtsJdwpTestCases, CtsJdwpTestCases[instant], CtsJdwpTestCases[secondary_user], CtsJdwpTunnelHostTestCases, CtsJdwpTunnelHostTestCases[secondary_user], CtsJniTestCases, CtsJniTestCases[instant], CtsJniTestCases[secondary_user], CtsJobSchedulerSharedUidTestCases, CtsJobSchedulerSharedUidTestCases[secondary_user], CtsJobSchedulerTestCases, CtsJobSchedulerTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=30,
+        tag='internal.arm.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases',
+        test_name='cheets_CTS_R.internal.arm.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsFragmentTestCases', '--include-filter', 'CtsFragmentTestCasesSdk26', '--include-filter', 'CtsFragmentTestCasesSdk26[instant]', '--include-filter', 'CtsFragmentTestCasesSdk26[secondary_user]', '--include-filter', 'CtsFragmentTestCases[instant]', '--include-filter', 'CtsFragmentTestCases[secondary_user]', '--include-filter', 'CtsFsMgrTestCases', '--include-filter', 'CtsFsMgrTestCases[secondary_user]', '--include-filter', 'CtsGestureTestCases', '--include-filter', 'CtsGestureTestCases[instant]', '--include-filter', 'CtsGestureTestCases[secondary_user]', '--include-filter', 'CtsGpuProfilingDataTestCases', '--include-filter', 'CtsGpuProfilingDataTestCases[secondary_user]', '--include-filter', 'CtsGpuToolsHostTestCases', '--include-filter', 'CtsGpuToolsHostTestCases[secondary_user]', '--include-filter', 'CtsGraphicsTestCases', '--include-filter', 'CtsGraphicsTestCases[instant]', '--include-filter', 'CtsGraphicsTestCases[secondary_user]', '--include-filter', 'CtsGwpAsanTestCases', '--include-filter', 'CtsGwpAsanTestCases[instant]', '--include-filter', 'CtsGwpAsanTestCases[secondary_user]', '--include-filter', 'CtsHardwareTestCases', '--include-filter', 'CtsHardwareTestCases[secondary_user]', '--include-filter', 'CtsHarmfulAppWarningHostTestCases', '--include-filter', 'CtsHarmfulAppWarningHostTestCases[secondary_user]', '--include-filter', 'CtsHdmiCecHostTestCases', '--include-filter', 'CtsHdmiCecHostTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiBlacklistApi27TestCases', '--include-filter', 'CtsHiddenApiBlacklistApi27TestCases[secondary_user]', '--include-filter', 'CtsHiddenApiBlacklistApi28TestCases', '--include-filter', 'CtsHiddenApiBlacklistApi28TestCases[secondary_user]', '--include-filter', 'CtsHiddenApiBlacklistCurrentApiTestCases', '--include-filter', 'CtsHiddenApiBlacklistCurrentApiTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiBlacklistDebugClassTestCases', '--include-filter', 'CtsHiddenApiBlacklistDebugClassTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiBlacklistTestApiTestCases', '--include-filter', 'CtsHiddenApiBlacklistTestApiTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiKillswitchDebugClassTestCases', '--include-filter', 'CtsHiddenApiKillswitchDebugClassTestCases[instant]', '--include-filter', 'CtsHiddenApiKillswitchDebugClassTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiKillswitchWhitelistTestCases', '--include-filter', 'CtsHiddenApiKillswitchWhitelistTestCases[instant]', '--include-filter', 'CtsHiddenApiKillswitchWhitelistTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiKillswitchWildcardTestCases', '--include-filter', 'CtsHiddenApiKillswitchWildcardTestCases[instant]', '--include-filter', 'CtsHiddenApiKillswitchWildcardTestCases[secondary_user]', '--include-filter', 'CtsHostTzDataTests', '--include-filter', 'CtsHostTzDataTests[secondary_user]', '--include-filter', 'CtsHostsideNetworkTests', '--include-filter', 'CtsHostsideNetworkTests[instant]', '--include-filter', 'CtsHostsideNetworkTests[secondary_user]', '--include-filter', 'CtsHostsideNumberBlockingTestCases', '--include-filter', 'CtsHostsideNumberBlockingTestCases[secondary_user]', '--include-filter', 'CtsHostsideTvTests', '--include-filter', 'CtsHostsideTvTests[secondary_user]', '--include-filter', 'CtsHostsideWebViewTests', '--include-filter', 'CtsHostsideWebViewTests[instant]', '--include-filter', 'CtsHostsideWebViewTests[secondary_user]', '--include-filter', 'CtsIcuTestCases', '--include-filter', 'CtsIcuTestCases[secondary_user]', '--include-filter', 'CtsIdentityTestCases', '--include-filter', 'CtsIdentityTestCases[secondary_user]', '--include-filter', 'CtsIkeTestCases', '--include-filter', 'CtsIkeTestCases[secondary_user]', '--include-filter', 'CtsIncidentHostTestCases', '--include-filter', 'CtsIncidentHostTestCases[instant]', '--include-filter', 'CtsIncrementalInstallHostTestCases', '--include-filter', 'CtsIncrementalInstallHostTestCases[secondary_user]', '--include-filter', 'CtsInitTestCases', '--include-filter', 'CtsInitTestCases[secondary_user]', '--include-filter', 'CtsInlineMockingTestCases', '--include-filter', 'CtsInlineMockingTestCases[instant]', '--include-filter', 'CtsInlineMockingTestCases[secondary_user]', '--include-filter', 'CtsInputMethodServiceHostTestCases', '--include-filter', 'CtsInputMethodServiceHostTestCases[instant]', '--include-filter', 'CtsInputMethodServiceHostTestCases[secondary_user]', '--include-filter', 'CtsInputMethodTestCases', '--include-filter', 'CtsInputMethodTestCases[instant]', '--include-filter', 'CtsInputMethodTestCases[secondary_user]', '--include-filter', 'CtsInstantAppTests', '--include-filter', 'CtsInstantAppTests[secondary_user]', '--include-filter', 'CtsIntentSignatureTestCases', '--include-filter', 'CtsIntentSignatureTestCases[secondary_user]', '--include-filter', 'CtsJdwpSecurityHostTestCases', '--include-filter', 'CtsJdwpSecurityHostTestCases[secondary_user]', '--include-filter', 'CtsJdwpTestCases', '--include-filter', 'CtsJdwpTestCases[instant]', '--include-filter', 'CtsJdwpTestCases[secondary_user]', '--include-filter', 'CtsJdwpTunnelHostTestCases', '--include-filter', 'CtsJdwpTunnelHostTestCases[secondary_user]', '--include-filter', 'CtsJniTestCases', '--include-filter', 'CtsJniTestCases[instant]', '--include-filter', 'CtsJniTestCases[secondary_user]', '--include-filter', 'CtsJobSchedulerSharedUidTestCases', '--include-filter', 'CtsJobSchedulerSharedUidTestCases[secondary_user]', '--include-filter', 'CtsJobSchedulerTestCases', '--include-filter', 'CtsJobSchedulerTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsJvmtiAttachingHostTestCases_-_CtsMediaPerformanceClassTestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsJvmtiAttachingHostTestCases_-_CtsMediaPerformanceClassTestCases
new file mode 100644
index 0000000..bdef5e5
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsJvmtiAttachingHostTestCases_-_CtsMediaPerformanceClassTestCases
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.all.CtsJvmtiAttachingHostTestCases_-_CtsMediaPerformanceClassTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsJvmtiAttachingHostTestCases, CtsJvmtiAttachingHostTestCases[secondary_user], CtsJvmtiAttachingTestCases, CtsJvmtiAttachingTestCases[secondary_user], CtsJvmtiRedefineClassesHostTestCases, CtsJvmtiRedefineClassesHostTestCases[secondary_user], CtsJvmtiRunTest1900HostTestCases, CtsJvmtiRunTest1900HostTestCases[secondary_user], CtsJvmtiRunTest1901HostTestCases, CtsJvmtiRunTest1901HostTestCases[secondary_user], CtsJvmtiRunTest1902HostTestCases, CtsJvmtiRunTest1902HostTestCases[secondary_user], CtsJvmtiRunTest1903HostTestCases, CtsJvmtiRunTest1903HostTestCases[secondary_user], CtsJvmtiRunTest1904HostTestCases, CtsJvmtiRunTest1904HostTestCases[secondary_user], CtsJvmtiRunTest1906HostTestCases, CtsJvmtiRunTest1906HostTestCases[secondary_user], CtsJvmtiRunTest1907HostTestCases, CtsJvmtiRunTest1907HostTestCases[secondary_user], CtsJvmtiRunTest1908HostTestCases, CtsJvmtiRunTest1908HostTestCases[secondary_user], CtsJvmtiRunTest1909HostTestCases, CtsJvmtiRunTest1909HostTestCases[secondary_user], CtsJvmtiRunTest1910HostTestCases, CtsJvmtiRunTest1910HostTestCases[secondary_user], CtsJvmtiRunTest1911HostTestCases, CtsJvmtiRunTest1911HostTestCases[secondary_user], CtsJvmtiRunTest1912HostTestCases, CtsJvmtiRunTest1912HostTestCases[secondary_user], CtsJvmtiRunTest1913HostTestCases, CtsJvmtiRunTest1913HostTestCases[secondary_user], CtsJvmtiRunTest1914HostTestCases, CtsJvmtiRunTest1914HostTestCases[secondary_user], CtsJvmtiRunTest1915HostTestCases, CtsJvmtiRunTest1915HostTestCases[secondary_user], CtsJvmtiRunTest1916HostTestCases, CtsJvmtiRunTest1916HostTestCases[secondary_user], CtsJvmtiRunTest1917HostTestCases, CtsJvmtiRunTest1917HostTestCases[secondary_user], CtsJvmtiRunTest1920HostTestCases, CtsJvmtiRunTest1920HostTestCases[secondary_user], CtsJvmtiRunTest1921HostTestCases, CtsJvmtiRunTest1921HostTestCases[secondary_user], CtsJvmtiRunTest1922HostTestCases, CtsJvmtiRunTest1922HostTestCases[secondary_user], CtsJvmtiRunTest1923HostTestCases, CtsJvmtiRunTest1923HostTestCases[secondary_user], CtsJvmtiRunTest1924HostTestCases, CtsJvmtiRunTest1924HostTestCases[secondary_user], CtsJvmtiRunTest1925HostTestCases, CtsJvmtiRunTest1925HostTestCases[secondary_user], CtsJvmtiRunTest1926HostTestCases, CtsJvmtiRunTest1926HostTestCases[secondary_user], CtsJvmtiRunTest1927HostTestCases, CtsJvmtiRunTest1927HostTestCases[secondary_user], CtsJvmtiRunTest1928HostTestCases, CtsJvmtiRunTest1928HostTestCases[secondary_user], CtsJvmtiRunTest1930HostTestCases, CtsJvmtiRunTest1930HostTestCases[secondary_user], CtsJvmtiRunTest1931HostTestCases, CtsJvmtiRunTest1931HostTestCases[secondary_user], CtsJvmtiRunTest1932HostTestCases, CtsJvmtiRunTest1932HostTestCases[secondary_user], CtsJvmtiRunTest1933HostTestCases, CtsJvmtiRunTest1933HostTestCases[secondary_user], CtsJvmtiRunTest1934HostTestCases, CtsJvmtiRunTest1934HostTestCases[secondary_user], CtsJvmtiRunTest1936HostTestCases, CtsJvmtiRunTest1936HostTestCases[secondary_user], CtsJvmtiRunTest1937HostTestCases, CtsJvmtiRunTest1937HostTestCases[secondary_user], CtsJvmtiRunTest1939HostTestCases, CtsJvmtiRunTest1939HostTestCases[secondary_user], CtsJvmtiRunTest1941HostTestCases, CtsJvmtiRunTest1941HostTestCases[secondary_user], CtsJvmtiRunTest1942HostTestCases, CtsJvmtiRunTest1942HostTestCases[secondary_user], CtsJvmtiRunTest1943HostTestCases, CtsJvmtiRunTest1943HostTestCases[secondary_user], CtsJvmtiRunTest1953HostTestCases, CtsJvmtiRunTest1953HostTestCases[secondary_user], CtsJvmtiRunTest1958HostTestCases, CtsJvmtiRunTest1958HostTestCases[secondary_user], CtsJvmtiRunTest1962HostTestCases, CtsJvmtiRunTest1962HostTestCases[secondary_user], CtsJvmtiRunTest1967HostTestCases, CtsJvmtiRunTest1967HostTestCases[secondary_user], CtsJvmtiRunTest1968HostTestCases, CtsJvmtiRunTest1968HostTestCases[secondary_user], CtsJvmtiRunTest1969HostTestCases, CtsJvmtiRunTest1969HostTestCases[secondary_user], CtsJvmtiRunTest1970HostTestCases, CtsJvmtiRunTest1970HostTestCases[secondary_user], CtsJvmtiRunTest1971HostTestCases, CtsJvmtiRunTest1971HostTestCases[secondary_user], CtsJvmtiRunTest1974HostTestCases, CtsJvmtiRunTest1974HostTestCases[secondary_user], CtsJvmtiRunTest1975HostTestCases, CtsJvmtiRunTest1975HostTestCases[secondary_user], CtsJvmtiRunTest1976HostTestCases, CtsJvmtiRunTest1976HostTestCases[secondary_user], CtsJvmtiRunTest1977HostTestCases, CtsJvmtiRunTest1977HostTestCases[secondary_user], CtsJvmtiRunTest1978HostTestCases, CtsJvmtiRunTest1978HostTestCases[secondary_user], CtsJvmtiRunTest1979HostTestCases, CtsJvmtiRunTest1979HostTestCases[secondary_user], CtsJvmtiRunTest1981HostTestCases, CtsJvmtiRunTest1981HostTestCases[secondary_user], CtsJvmtiRunTest1982HostTestCases, CtsJvmtiRunTest1982HostTestCases[secondary_user], CtsJvmtiRunTest1983HostTestCases, CtsJvmtiRunTest1983HostTestCases[secondary_user], CtsJvmtiRunTest1984HostTestCases, CtsJvmtiRunTest1984HostTestCases[secondary_user], CtsJvmtiRunTest1988HostTestCases, CtsJvmtiRunTest1988HostTestCases[secondary_user], CtsJvmtiRunTest1989HostTestCases, CtsJvmtiRunTest1989HostTestCases[secondary_user], CtsJvmtiRunTest1990HostTestCases, CtsJvmtiRunTest1990HostTestCases[secondary_user], CtsJvmtiRunTest1991HostTestCases, CtsJvmtiRunTest1991HostTestCases[secondary_user], CtsJvmtiRunTest1992HostTestCases, CtsJvmtiRunTest1992HostTestCases[secondary_user], CtsJvmtiRunTest1994HostTestCases, CtsJvmtiRunTest1994HostTestCases[secondary_user], CtsJvmtiRunTest1995HostTestCases, CtsJvmtiRunTest1995HostTestCases[secondary_user], CtsJvmtiRunTest1996HostTestCases, CtsJvmtiRunTest1996HostTestCases[secondary_user], CtsJvmtiRunTest1997HostTestCases, CtsJvmtiRunTest1997HostTestCases[secondary_user], CtsJvmtiRunTest1998HostTestCases, CtsJvmtiRunTest1998HostTestCases[secondary_user], CtsJvmtiRunTest1999HostTestCases, CtsJvmtiRunTest1999HostTestCases[secondary_user], CtsJvmtiRunTest2001HostTestCases, CtsJvmtiRunTest2001HostTestCases[secondary_user], CtsJvmtiRunTest2002HostTestCases, CtsJvmtiRunTest2002HostTestCases[secondary_user], CtsJvmtiRunTest2003HostTestCases, CtsJvmtiRunTest2003HostTestCases[secondary_user], CtsJvmtiRunTest2004HostTestCases, CtsJvmtiRunTest2004HostTestCases[secondary_user], CtsJvmtiRunTest2005HostTestCases, CtsJvmtiRunTest2005HostTestCases[secondary_user], CtsJvmtiRunTest2006HostTestCases, CtsJvmtiRunTest2006HostTestCases[secondary_user], CtsJvmtiRunTest2007HostTestCases, CtsJvmtiRunTest2007HostTestCases[secondary_user], CtsJvmtiRunTest902HostTestCases, CtsJvmtiRunTest902HostTestCases[secondary_user], CtsJvmtiRunTest903HostTestCases, CtsJvmtiRunTest903HostTestCases[secondary_user], CtsJvmtiRunTest904HostTestCases, CtsJvmtiRunTest904HostTestCases[secondary_user], CtsJvmtiRunTest905HostTestCases, CtsJvmtiRunTest905HostTestCases[secondary_user], CtsJvmtiRunTest906HostTestCases, CtsJvmtiRunTest906HostTestCases[secondary_user], CtsJvmtiRunTest907HostTestCases, CtsJvmtiRunTest907HostTestCases[secondary_user], CtsJvmtiRunTest908HostTestCases, CtsJvmtiRunTest908HostTestCases[secondary_user], CtsJvmtiRunTest910HostTestCases, CtsJvmtiRunTest910HostTestCases[secondary_user], CtsJvmtiRunTest911HostTestCases, CtsJvmtiRunTest911HostTestCases[secondary_user], CtsJvmtiRunTest912HostTestCases, CtsJvmtiRunTest912HostTestCases[secondary_user], CtsJvmtiRunTest913HostTestCases, CtsJvmtiRunTest913HostTestCases[secondary_user], CtsJvmtiRunTest914HostTestCases, CtsJvmtiRunTest914HostTestCases[secondary_user], CtsJvmtiRunTest915HostTestCases, CtsJvmtiRunTest915HostTestCases[secondary_user], CtsJvmtiRunTest917HostTestCases, CtsJvmtiRunTest917HostTestCases[secondary_user], CtsJvmtiRunTest918HostTestCases, CtsJvmtiRunTest918HostTestCases[secondary_user], CtsJvmtiRunTest919HostTestCases, CtsJvmtiRunTest919HostTestCases[secondary_user], CtsJvmtiRunTest920HostTestCases, CtsJvmtiRunTest920HostTestCases[secondary_user], CtsJvmtiRunTest922HostTestCases, CtsJvmtiRunTest922HostTestCases[secondary_user], CtsJvmtiRunTest923HostTestCases, CtsJvmtiRunTest923HostTestCases[secondary_user], CtsJvmtiRunTest924HostTestCases, CtsJvmtiRunTest924HostTestCases[secondary_user], CtsJvmtiRunTest926HostTestCases, CtsJvmtiRunTest926HostTestCases[secondary_user], CtsJvmtiRunTest927HostTestCases, CtsJvmtiRunTest927HostTestCases[secondary_user], CtsJvmtiRunTest928HostTestCases, CtsJvmtiRunTest928HostTestCases[secondary_user], CtsJvmtiRunTest930HostTestCases, CtsJvmtiRunTest930HostTestCases[secondary_user], CtsJvmtiRunTest931HostTestCases, CtsJvmtiRunTest931HostTestCases[secondary_user], CtsJvmtiRunTest932HostTestCases, CtsJvmtiRunTest932HostTestCases[secondary_user], CtsJvmtiRunTest940HostTestCases, CtsJvmtiRunTest940HostTestCases[secondary_user], CtsJvmtiRunTest942HostTestCases, CtsJvmtiRunTest942HostTestCases[secondary_user], CtsJvmtiRunTest944HostTestCases, CtsJvmtiRunTest944HostTestCases[secondary_user], CtsJvmtiRunTest945HostTestCases, CtsJvmtiRunTest945HostTestCases[secondary_user], CtsJvmtiRunTest947HostTestCases, CtsJvmtiRunTest947HostTestCases[secondary_user], CtsJvmtiRunTest951HostTestCases, CtsJvmtiRunTest951HostTestCases[secondary_user], CtsJvmtiRunTest982HostTestCases, CtsJvmtiRunTest982HostTestCases[secondary_user], CtsJvmtiRunTest983HostTestCases, CtsJvmtiRunTest983HostTestCases[secondary_user], CtsJvmtiRunTest984HostTestCases, CtsJvmtiRunTest984HostTestCases[secondary_user], CtsJvmtiRunTest985HostTestCases, CtsJvmtiRunTest985HostTestCases[secondary_user], CtsJvmtiRunTest986HostTestCases, CtsJvmtiRunTest986HostTestCases[secondary_user], CtsJvmtiRunTest988HostTestCases, CtsJvmtiRunTest988HostTestCases[secondary_user], CtsJvmtiRunTest989HostTestCases, CtsJvmtiRunTest989HostTestCases[secondary_user], CtsJvmtiRunTest990HostTestCases, CtsJvmtiRunTest990HostTestCases[secondary_user], CtsJvmtiRunTest991HostTestCases, CtsJvmtiRunTest991HostTestCases[secondary_user], CtsJvmtiRunTest992HostTestCases, CtsJvmtiRunTest992HostTestCases[secondary_user], CtsJvmtiRunTest993HostTestCases, CtsJvmtiRunTest993HostTestCases[secondary_user], CtsJvmtiRunTest994HostTestCases, CtsJvmtiRunTest994HostTestCases[secondary_user], CtsJvmtiRunTest995HostTestCases, CtsJvmtiRunTest995HostTestCases[secondary_user], CtsJvmtiRunTest996HostTestCases, CtsJvmtiRunTest996HostTestCases[secondary_user], CtsJvmtiRunTest997HostTestCases, CtsJvmtiRunTest997HostTestCases[secondary_user], CtsJvmtiTaggingHostTestCases, CtsJvmtiTaggingHostTestCases[secondary_user], CtsJvmtiTrackingHostTestCases, CtsJvmtiTrackingHostTestCases[secondary_user], CtsKernelConfigTestCases, CtsKeystoreTestCases, CtsKeystoreTestCases[secondary_user], CtsLeanbackJankTestCases, CtsLeanbackJankTestCases[secondary_user], CtsLegacyNotification20TestCases, CtsLegacyNotification20TestCases[secondary_user], CtsLegacyNotification27TestCases, CtsLegacyNotification27TestCases[secondary_user], CtsLegacyNotification28TestCases, CtsLegacyNotification28TestCases[secondary_user], CtsLegacyNotification29TestCases, CtsLegacyNotification29TestCases[secondary_user], CtsLibcoreApiEvolutionTestCases, CtsLibcoreApiEvolutionTestCases[secondary_user], CtsLibcoreFileIOTestCases, CtsLibcoreFileIOTestCases[secondary_user], CtsLibcoreJsr166TestCases, CtsLibcoreJsr166TestCases[secondary_user], CtsLibcoreLegacy22TestCases, CtsLibcoreLegacy22TestCases[secondary_user], CtsLibcoreOjTestCases, CtsLibcoreOjTestCases[secondary_user], CtsLibcoreOkHttpTestCases, CtsLibcoreOkHttpTestCases[secondary_user], CtsLibcoreTestCases, CtsLibcoreTestCases[secondary_user], CtsLibcoreWycheproofBCTestCases, CtsLibcoreWycheproofBCTestCases[secondary_user], CtsLibcoreWycheproofConscryptTestCases, CtsLibcoreWycheproofConscryptTestCases[secondary_user], CtsLiblogTestCases, CtsLiblogTestCases[secondary_user], CtsLocationCoarseTestCases, CtsLocationCoarseTestCases[instant], CtsLocationCoarseTestCases[secondary_user], CtsLocationFineTestCases, CtsLocationFineTestCases[instant], CtsLocationFineTestCases[secondary_user], CtsLocationGnssTestCases, CtsLocationGnssTestCases[instant], CtsLocationGnssTestCases[secondary_user], CtsLocationNoneTestCases, CtsLocationNoneTestCases[instant], CtsLocationNoneTestCases[secondary_user], CtsLocationPrivilegedTestCases, CtsLocationPrivilegedTestCases[instant], CtsLocationPrivilegedTestCases[secondary_user], CtsLogdTestCases, CtsLogdTestCases[secondary_user], CtsMatchFlagTestCases, CtsMatchFlagTestCases[secondary_user], CtsMediaBitstreamsTestCases, CtsMediaBitstreamsTestCases[secondary_user], CtsMediaHostTestCases, CtsMediaHostTestCases[instant], CtsMediaParserTestCases, CtsMediaParserTestCases[secondary_user], CtsMediaPerformanceClassTestCases, CtsMediaPerformanceClassTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        needs_push_media=True,
+        tag='internal.arm.all.CtsJvmtiAttachingHostTestCases_-_CtsMediaPerformanceClassTestCases',
+        test_name='cheets_CTS_R.internal.arm.all.CtsJvmtiAttachingHostTestCases_-_CtsMediaPerformanceClassTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJvmtiAttachingHostTestCases', '--include-filter', 'CtsJvmtiAttachingHostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiAttachingTestCases', '--include-filter', 'CtsJvmtiAttachingTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRedefineClassesHostTestCases', '--include-filter', 'CtsJvmtiRedefineClassesHostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1900HostTestCases', '--include-filter', 'CtsJvmtiRunTest1900HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1901HostTestCases', '--include-filter', 'CtsJvmtiRunTest1901HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1902HostTestCases', '--include-filter', 'CtsJvmtiRunTest1902HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1903HostTestCases', '--include-filter', 'CtsJvmtiRunTest1903HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1904HostTestCases', '--include-filter', 'CtsJvmtiRunTest1904HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1906HostTestCases', '--include-filter', 'CtsJvmtiRunTest1906HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1907HostTestCases', '--include-filter', 'CtsJvmtiRunTest1907HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1908HostTestCases', '--include-filter', 'CtsJvmtiRunTest1908HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1909HostTestCases', '--include-filter', 'CtsJvmtiRunTest1909HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1910HostTestCases', '--include-filter', 'CtsJvmtiRunTest1910HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1911HostTestCases', '--include-filter', 'CtsJvmtiRunTest1911HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1912HostTestCases', '--include-filter', 'CtsJvmtiRunTest1912HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1913HostTestCases', '--include-filter', 'CtsJvmtiRunTest1913HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1914HostTestCases', '--include-filter', 'CtsJvmtiRunTest1914HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1915HostTestCases', '--include-filter', 'CtsJvmtiRunTest1915HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1916HostTestCases', '--include-filter', 'CtsJvmtiRunTest1916HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1917HostTestCases', '--include-filter', 'CtsJvmtiRunTest1917HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1920HostTestCases', '--include-filter', 'CtsJvmtiRunTest1920HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1921HostTestCases', '--include-filter', 'CtsJvmtiRunTest1921HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1922HostTestCases', '--include-filter', 'CtsJvmtiRunTest1922HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1923HostTestCases', '--include-filter', 'CtsJvmtiRunTest1923HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1924HostTestCases', '--include-filter', 'CtsJvmtiRunTest1924HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1925HostTestCases', '--include-filter', 'CtsJvmtiRunTest1925HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1926HostTestCases', '--include-filter', 'CtsJvmtiRunTest1926HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1927HostTestCases', '--include-filter', 'CtsJvmtiRunTest1927HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1928HostTestCases', '--include-filter', 'CtsJvmtiRunTest1928HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1930HostTestCases', '--include-filter', 'CtsJvmtiRunTest1930HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1931HostTestCases', '--include-filter', 'CtsJvmtiRunTest1931HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1932HostTestCases', '--include-filter', 'CtsJvmtiRunTest1932HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1933HostTestCases', '--include-filter', 'CtsJvmtiRunTest1933HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1934HostTestCases', '--include-filter', 'CtsJvmtiRunTest1934HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1936HostTestCases', '--include-filter', 'CtsJvmtiRunTest1936HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1937HostTestCases', '--include-filter', 'CtsJvmtiRunTest1937HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1939HostTestCases', '--include-filter', 'CtsJvmtiRunTest1939HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1941HostTestCases', '--include-filter', 'CtsJvmtiRunTest1941HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1942HostTestCases', '--include-filter', 'CtsJvmtiRunTest1942HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1943HostTestCases', '--include-filter', 'CtsJvmtiRunTest1943HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1953HostTestCases', '--include-filter', 'CtsJvmtiRunTest1953HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1958HostTestCases', '--include-filter', 'CtsJvmtiRunTest1958HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1962HostTestCases', '--include-filter', 'CtsJvmtiRunTest1962HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1967HostTestCases', '--include-filter', 'CtsJvmtiRunTest1967HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1968HostTestCases', '--include-filter', 'CtsJvmtiRunTest1968HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1969HostTestCases', '--include-filter', 'CtsJvmtiRunTest1969HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1970HostTestCases', '--include-filter', 'CtsJvmtiRunTest1970HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1971HostTestCases', '--include-filter', 'CtsJvmtiRunTest1971HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1974HostTestCases', '--include-filter', 'CtsJvmtiRunTest1974HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1975HostTestCases', '--include-filter', 'CtsJvmtiRunTest1975HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1976HostTestCases', '--include-filter', 'CtsJvmtiRunTest1976HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1977HostTestCases', '--include-filter', 'CtsJvmtiRunTest1977HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1978HostTestCases', '--include-filter', 'CtsJvmtiRunTest1978HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1979HostTestCases', '--include-filter', 'CtsJvmtiRunTest1979HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1981HostTestCases', '--include-filter', 'CtsJvmtiRunTest1981HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1982HostTestCases', '--include-filter', 'CtsJvmtiRunTest1982HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1983HostTestCases', '--include-filter', 'CtsJvmtiRunTest1983HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1984HostTestCases', '--include-filter', 'CtsJvmtiRunTest1984HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1988HostTestCases', '--include-filter', 'CtsJvmtiRunTest1988HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1989HostTestCases', '--include-filter', 'CtsJvmtiRunTest1989HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1990HostTestCases', '--include-filter', 'CtsJvmtiRunTest1990HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1991HostTestCases', '--include-filter', 'CtsJvmtiRunTest1991HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1992HostTestCases', '--include-filter', 'CtsJvmtiRunTest1992HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1994HostTestCases', '--include-filter', 'CtsJvmtiRunTest1994HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1995HostTestCases', '--include-filter', 'CtsJvmtiRunTest1995HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1996HostTestCases', '--include-filter', 'CtsJvmtiRunTest1996HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1997HostTestCases', '--include-filter', 'CtsJvmtiRunTest1997HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1998HostTestCases', '--include-filter', 'CtsJvmtiRunTest1998HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1999HostTestCases', '--include-filter', 'CtsJvmtiRunTest1999HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2001HostTestCases', '--include-filter', 'CtsJvmtiRunTest2001HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2002HostTestCases', '--include-filter', 'CtsJvmtiRunTest2002HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2003HostTestCases', '--include-filter', 'CtsJvmtiRunTest2003HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2004HostTestCases', '--include-filter', 'CtsJvmtiRunTest2004HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2005HostTestCases', '--include-filter', 'CtsJvmtiRunTest2005HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2006HostTestCases', '--include-filter', 'CtsJvmtiRunTest2006HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2007HostTestCases', '--include-filter', 'CtsJvmtiRunTest2007HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest902HostTestCases', '--include-filter', 'CtsJvmtiRunTest902HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest903HostTestCases', '--include-filter', 'CtsJvmtiRunTest903HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest904HostTestCases', '--include-filter', 'CtsJvmtiRunTest904HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest905HostTestCases', '--include-filter', 'CtsJvmtiRunTest905HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest906HostTestCases', '--include-filter', 'CtsJvmtiRunTest906HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest907HostTestCases', '--include-filter', 'CtsJvmtiRunTest907HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest908HostTestCases', '--include-filter', 'CtsJvmtiRunTest908HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest910HostTestCases', '--include-filter', 'CtsJvmtiRunTest910HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest911HostTestCases', '--include-filter', 'CtsJvmtiRunTest911HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest912HostTestCases', '--include-filter', 'CtsJvmtiRunTest912HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest913HostTestCases', '--include-filter', 'CtsJvmtiRunTest913HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest914HostTestCases', '--include-filter', 'CtsJvmtiRunTest914HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest915HostTestCases', '--include-filter', 'CtsJvmtiRunTest915HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest917HostTestCases', '--include-filter', 'CtsJvmtiRunTest917HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest918HostTestCases', '--include-filter', 'CtsJvmtiRunTest918HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest919HostTestCases', '--include-filter', 'CtsJvmtiRunTest919HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest920HostTestCases', '--include-filter', 'CtsJvmtiRunTest920HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest922HostTestCases', '--include-filter', 'CtsJvmtiRunTest922HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest923HostTestCases', '--include-filter', 'CtsJvmtiRunTest923HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest924HostTestCases', '--include-filter', 'CtsJvmtiRunTest924HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest926HostTestCases', '--include-filter', 'CtsJvmtiRunTest926HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest927HostTestCases', '--include-filter', 'CtsJvmtiRunTest927HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest928HostTestCases', '--include-filter', 'CtsJvmtiRunTest928HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest930HostTestCases', '--include-filter', 'CtsJvmtiRunTest930HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest931HostTestCases', '--include-filter', 'CtsJvmtiRunTest931HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest932HostTestCases', '--include-filter', 'CtsJvmtiRunTest932HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest940HostTestCases', '--include-filter', 'CtsJvmtiRunTest940HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest942HostTestCases', '--include-filter', 'CtsJvmtiRunTest942HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest944HostTestCases', '--include-filter', 'CtsJvmtiRunTest944HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest945HostTestCases', '--include-filter', 'CtsJvmtiRunTest945HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest947HostTestCases', '--include-filter', 'CtsJvmtiRunTest947HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest951HostTestCases', '--include-filter', 'CtsJvmtiRunTest951HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest982HostTestCases', '--include-filter', 'CtsJvmtiRunTest982HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest983HostTestCases', '--include-filter', 'CtsJvmtiRunTest983HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest984HostTestCases', '--include-filter', 'CtsJvmtiRunTest984HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest985HostTestCases', '--include-filter', 'CtsJvmtiRunTest985HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest986HostTestCases', '--include-filter', 'CtsJvmtiRunTest986HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest988HostTestCases', '--include-filter', 'CtsJvmtiRunTest988HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest989HostTestCases', '--include-filter', 'CtsJvmtiRunTest989HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest990HostTestCases', '--include-filter', 'CtsJvmtiRunTest990HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest991HostTestCases', '--include-filter', 'CtsJvmtiRunTest991HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest992HostTestCases', '--include-filter', 'CtsJvmtiRunTest992HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest993HostTestCases', '--include-filter', 'CtsJvmtiRunTest993HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest994HostTestCases', '--include-filter', 'CtsJvmtiRunTest994HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest995HostTestCases', '--include-filter', 'CtsJvmtiRunTest995HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest996HostTestCases', '--include-filter', 'CtsJvmtiRunTest996HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest997HostTestCases', '--include-filter', 'CtsJvmtiRunTest997HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiTaggingHostTestCases', '--include-filter', 'CtsJvmtiTaggingHostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiTrackingHostTestCases', '--include-filter', 'CtsJvmtiTrackingHostTestCases[secondary_user]', '--include-filter', 'CtsKernelConfigTestCases', '--include-filter', 'CtsKeystoreTestCases', '--include-filter', 'CtsKeystoreTestCases[secondary_user]', '--include-filter', 'CtsLeanbackJankTestCases', '--include-filter', 'CtsLeanbackJankTestCases[secondary_user]', '--include-filter', 'CtsLegacyNotification20TestCases', '--include-filter', 'CtsLegacyNotification20TestCases[secondary_user]', '--include-filter', 'CtsLegacyNotification27TestCases', '--include-filter', 'CtsLegacyNotification27TestCases[secondary_user]', '--include-filter', 'CtsLegacyNotification28TestCases', '--include-filter', 'CtsLegacyNotification28TestCases[secondary_user]', '--include-filter', 'CtsLegacyNotification29TestCases', '--include-filter', 'CtsLegacyNotification29TestCases[secondary_user]', '--include-filter', 'CtsLibcoreApiEvolutionTestCases', '--include-filter', 'CtsLibcoreApiEvolutionTestCases[secondary_user]', '--include-filter', 'CtsLibcoreFileIOTestCases', '--include-filter', 'CtsLibcoreFileIOTestCases[secondary_user]', '--include-filter', 'CtsLibcoreJsr166TestCases', '--include-filter', 'CtsLibcoreJsr166TestCases[secondary_user]', '--include-filter', 'CtsLibcoreLegacy22TestCases', '--include-filter', 'CtsLibcoreLegacy22TestCases[secondary_user]', '--include-filter', 'CtsLibcoreOjTestCases', '--include-filter', 'CtsLibcoreOjTestCases[secondary_user]', '--include-filter', 'CtsLibcoreOkHttpTestCases', '--include-filter', 'CtsLibcoreOkHttpTestCases[secondary_user]', '--include-filter', 'CtsLibcoreTestCases', '--include-filter', 'CtsLibcoreTestCases[secondary_user]', '--include-filter', 'CtsLibcoreWycheproofBCTestCases', '--include-filter', 'CtsLibcoreWycheproofBCTestCases[secondary_user]', '--include-filter', 'CtsLibcoreWycheproofConscryptTestCases', '--include-filter', 'CtsLibcoreWycheproofConscryptTestCases[secondary_user]', '--include-filter', 'CtsLiblogTestCases', '--include-filter', 'CtsLiblogTestCases[secondary_user]', '--include-filter', 'CtsLocationCoarseTestCases', '--include-filter', 'CtsLocationCoarseTestCases[instant]', '--include-filter', 'CtsLocationCoarseTestCases[secondary_user]', '--include-filter', 'CtsLocationFineTestCases', '--include-filter', 'CtsLocationFineTestCases[instant]', '--include-filter', 'CtsLocationFineTestCases[secondary_user]', '--include-filter', 'CtsLocationGnssTestCases', '--include-filter', 'CtsLocationGnssTestCases[instant]', '--include-filter', 'CtsLocationGnssTestCases[secondary_user]', '--include-filter', 'CtsLocationNoneTestCases', '--include-filter', 'CtsLocationNoneTestCases[instant]', '--include-filter', 'CtsLocationNoneTestCases[secondary_user]', '--include-filter', 'CtsLocationPrivilegedTestCases', '--include-filter', 'CtsLocationPrivilegedTestCases[instant]', '--include-filter', 'CtsLocationPrivilegedTestCases[secondary_user]', '--include-filter', 'CtsLogdTestCases', '--include-filter', 'CtsLogdTestCases[secondary_user]', '--include-filter', 'CtsMatchFlagTestCases', '--include-filter', 'CtsMatchFlagTestCases[secondary_user]', '--include-filter', 'CtsMediaBitstreamsTestCases', '--include-filter', 'CtsMediaBitstreamsTestCases[secondary_user]', '--include-filter', 'CtsMediaHostTestCases', '--include-filter', 'CtsMediaHostTestCases[instant]', '--include-filter', 'CtsMediaParserTestCases', '--include-filter', 'CtsMediaParserTestCases[secondary_user]', '--include-filter', 'CtsMediaPerformanceClassTestCases', '--include-filter', 'CtsMediaPerformanceClassTestCases[secondary_user]'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsJvmtiAttachingHostTestCases_-_CtsMediaPerformanceClassTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases
new file mode 100644
index 0000000..94cef6d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaStressTestCases, CtsMediaStressTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        needs_push_media=True,
+        tag='internal.arm.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases',
+        test_name='cheets_CTS_R.internal.arm.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaStressTestCases', '--include-filter', 'CtsMediaStressTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsMediaStressTestCases_-_CtsMediaStressTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsMediaTestCases_-_CtsMediaTestCases.32 b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsMediaTestCases_-_CtsMediaTestCases.32
new file mode 100644
index 0000000..7e47f91
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsMediaTestCases_-_CtsMediaTestCases.32
@@ -0,0 +1,38 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.all.CtsMediaTestCases_-_CtsMediaTestCases.32'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaTestCases, CtsMediaTestCases[instant] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        needs_push_media=True,
+        tag='internal.arm.all.CtsMediaTestCases_-_CtsMediaTestCases.32',
+        test_name='cheets_CTS_R.internal.arm.all.CtsMediaTestCases_-_CtsMediaTestCases.32',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases', '--logcat-on-failure', '--abi', 'armeabi-v7a'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsMediaTestCases_-_CtsMediaTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsMediaTestCases_-_CtsMediaTestCases.64 b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsMediaTestCases_-_CtsMediaTestCases.64
new file mode 100644
index 0000000..f217bd7
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsMediaTestCases_-_CtsMediaTestCases.64
@@ -0,0 +1,38 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.all.CtsMediaTestCases_-_CtsMediaTestCases.64'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaTestCases, CtsMediaTestCases[instant] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        needs_push_media=True,
+        tag='internal.arm.all.CtsMediaTestCases_-_CtsMediaTestCases.64',
+        test_name='cheets_CTS_R.internal.arm.all.CtsMediaTestCases_-_CtsMediaTestCases.64',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases', '--include-filter', 'CtsMediaTestCases[instant]', '--logcat-on-failure', '--abi', 'arm64-v8a'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsMediaTestCases_-_CtsMediaTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsMediaV2TestCases_-_CtsProtoTestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsMediaV2TestCases_-_CtsProtoTestCases
new file mode 100644
index 0000000..e0144a2
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsMediaV2TestCases_-_CtsProtoTestCases
@@ -0,0 +1,38 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.all.CtsMediaV2TestCases_-_CtsProtoTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaV2TestCases, CtsMidiTestCases, CtsMimeMapTestCases, CtsMimeMapTestCases[secondary_user], CtsMockingDebuggableTestCases, CtsMockingDebuggableTestCases[instant], CtsMockingDebuggableTestCases[secondary_user], CtsMockingTestCases, CtsMockingTestCases[instant], CtsMockingTestCases[secondary_user], CtsMonkeyTestCases, CtsMonkeyTestCases[secondary_user], CtsMultiUserHostTestCases, CtsMultiUserHostTestCases[instant], CtsMultiUserHostTestCases[secondary_user], CtsMultiUserTestCases, CtsMultiUserTestCases[instant], CtsMultiUserTestCases[secondary_user], CtsNNAPIBenchmarkTestCases, CtsNNAPIBenchmarkTestCases[instant], CtsNNAPIBenchmarkTestCases[secondary_user], CtsNNAPITestCases, CtsNNAPITestCases[secondary_user], CtsNativeEncryptionTestCases, CtsNativeEncryptionTestCases[instant], CtsNativeEncryptionTestCases[secondary_user], CtsNativeHardwareTestCases, CtsNativeHardwareTestCases[secondary_user], CtsNativeMediaAAudioTestCases, CtsNativeMediaAAudioTestCases[instant], CtsNativeMediaAAudioTestCases[secondary_user], CtsNativeMediaMetricsTestCases, CtsNativeMediaMetricsTestCases[instant], CtsNativeMediaMetricsTestCases[secondary_user], CtsNativeMediaSlTestCases, CtsNativeMediaSlTestCases[instant], CtsNativeMediaSlTestCases[secondary_user], CtsNativeMediaXaTestCases, CtsNativeMediaXaTestCases[instant], CtsNativeMediaXaTestCases[secondary_user], CtsNativeMidiTestCases, CtsNativeMidiTestCases[secondary_user], CtsNativeNetDnsTestCases, CtsNativeNetDnsTestCases[instant], CtsNativeNetDnsTestCases[secondary_user], CtsNativeNetTestCases, CtsNativeNetTestCases[instant], CtsNativeNetTestCases[secondary_user], CtsNdefTestCases, CtsNdefTestCases[secondary_user], CtsNdkBinderTestCases, CtsNdkBinderTestCases[instant], CtsNdkBinderTestCases[secondary_user], CtsNetApi23TestCases, CtsNetApi23TestCases[secondary_user], CtsNetSecConfigAttributeTestCases, CtsNetSecConfigAttributeTestCases[instant], CtsNetSecConfigAttributeTestCases[secondary_user], CtsNetSecConfigBasicDebugDisabledTestCases, CtsNetSecConfigBasicDebugDisabledTestCases[instant], CtsNetSecConfigBasicDebugDisabledTestCases[secondary_user], CtsNetSecConfigBasicDebugEnabledTestCases, CtsNetSecConfigBasicDebugEnabledTestCases[instant], CtsNetSecConfigBasicDebugEnabledTestCases[secondary_user], CtsNetSecConfigBasicDomainConfigTestCases, CtsNetSecConfigBasicDomainConfigTestCases[instant], CtsNetSecConfigBasicDomainConfigTestCases[secondary_user], CtsNetSecConfigCleartextTrafficTestCases, CtsNetSecConfigCleartextTrafficTestCases[instant], CtsNetSecConfigCleartextTrafficTestCases[secondary_user], CtsNetSecConfigDownloadManagerTestCases, CtsNetSecConfigDownloadManagerTestCases[secondary_user], CtsNetSecConfigInvalidPinTestCases, CtsNetSecConfigInvalidPinTestCases[instant], CtsNetSecConfigInvalidPinTestCases[secondary_user], CtsNetSecConfigNestedDomainConfigTestCases, CtsNetSecConfigNestedDomainConfigTestCases[instant], CtsNetSecConfigNestedDomainConfigTestCases[secondary_user], CtsNetSecConfigPrePCleartextTrafficTestCases, CtsNetSecConfigPrePCleartextTrafficTestCases[secondary_user], CtsNetSecConfigResourcesSrcTestCases, CtsNetSecConfigResourcesSrcTestCases[instant], CtsNetSecConfigResourcesSrcTestCases[secondary_user], CtsNetSecPolicyUsesCleartextTrafficFalseTestCases, CtsNetSecPolicyUsesCleartextTrafficFalseTestCases[secondary_user], CtsNetSecPolicyUsesCleartextTrafficTrueTestCases, CtsNetSecPolicyUsesCleartextTrafficTrueTestCases[secondary_user], CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases, CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases[secondary_user], CtsNetTestCases, CtsNetTestCasesInternetPermission, CtsNetTestCasesInternetPermission[instant], CtsNetTestCasesInternetPermission[secondary_user], CtsNetTestCasesLegacyApi22, CtsNetTestCasesLegacyApi22[secondary_user], CtsNetTestCasesLegacyPermission22, CtsNetTestCasesLegacyPermission22[secondary_user], CtsNetTestCasesUpdateStatsPermission, CtsNetTestCasesUpdateStatsPermission[instant], CtsNetTestCasesUpdateStatsPermission[secondary_user], CtsNetTestCases[instant], CtsNetTestCases[secondary_user], CtsNfcTestCases, CtsNfcTestCases[secondary_user], CtsNoPermissionTestCases, CtsNoPermissionTestCases25, CtsNoPermissionTestCases25[secondary_user], CtsNoPermissionTestCases[secondary_user], CtsOmapiTestCases, CtsOmapiTestCases[secondary_user], CtsOpenGLTestCases, CtsOpenGLTestCases[secondary_user], CtsOpenGlPerf2TestCases, CtsOpenGlPerf2TestCases[secondary_user], CtsOpenGlPerfTestCases, CtsOsHostTestCases, CtsOsHostTestCases[instant], CtsOsHostTestCases[secondary_user], CtsOsTestCases, CtsOsTestCases[instant], CtsPackageInstallAppOpDefaultTestCases, CtsPackageInstallAppOpDefaultTestCases[instant], CtsPackageInstallAppOpDefaultTestCases[secondary_user], CtsPackageInstallAppOpDeniedTestCases, CtsPackageInstallAppOpDeniedTestCases[instant], CtsPackageInstallAppOpDeniedTestCases[secondary_user], CtsPackageInstallTestCases, CtsPackageInstallTestCases[instant], CtsPackageInstallTestCases[secondary_user], CtsPackageInstallerTapjackingTestCases, CtsPackageInstallerTapjackingTestCases[secondary_user], CtsPackageUninstallTestCases, CtsPackageUninstallTestCases[secondary_user], CtsPackageWatchdogTestCases, CtsPackageWatchdogTestCases[secondary_user], CtsPdfTestCases, CtsPdfTestCases[instant], CtsPdfTestCases[secondary_user], CtsPerfettoTestCases, CtsPerfettoTestCases[secondary_user], CtsPermission2TestCases, CtsPermission2TestCases[instant], CtsPermission3TestCases, CtsPermission3TestCases[secondary_user], CtsPermissionTestCases, CtsPermissionTestCasesSdk28, CtsPermissionTestCasesSdk28[instant], CtsPermissionTestCasesSdk28[secondary_user], CtsPermissionTestCasesTelephony, CtsPermissionTestCasesTelephony[instant], CtsPermissionTestCasesTelephony[secondary_user], CtsPermissionTestCases[instant], CtsPreferenceTestCases, CtsPreferenceTestCases[instant], CtsPreferenceTestCases[secondary_user], CtsPrintTestCases, CtsPrintTestCases[instant], CtsPrintTestCases[secondary_user], CtsProtoTestCases, CtsProtoTestCases[instant], CtsProtoTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        use_helpers=True,
+        tag='internal.arm.all.CtsMediaV2TestCases_-_CtsProtoTestCases',
+        test_name='cheets_CTS_R.internal.arm.all.CtsMediaV2TestCases_-_CtsProtoTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaV2TestCases', '--include-filter', 'CtsMidiTestCases', '--include-filter', 'CtsMimeMapTestCases', '--include-filter', 'CtsMimeMapTestCases[secondary_user]', '--include-filter', 'CtsMockingDebuggableTestCases', '--include-filter', 'CtsMockingDebuggableTestCases[instant]', '--include-filter', 'CtsMockingDebuggableTestCases[secondary_user]', '--include-filter', 'CtsMockingTestCases', '--include-filter', 'CtsMockingTestCases[instant]', '--include-filter', 'CtsMockingTestCases[secondary_user]', '--include-filter', 'CtsMonkeyTestCases', '--include-filter', 'CtsMonkeyTestCases[secondary_user]', '--include-filter', 'CtsMultiUserHostTestCases', '--include-filter', 'CtsMultiUserHostTestCases[instant]', '--include-filter', 'CtsMultiUserHostTestCases[secondary_user]', '--include-filter', 'CtsMultiUserTestCases', '--include-filter', 'CtsMultiUserTestCases[instant]', '--include-filter', 'CtsMultiUserTestCases[secondary_user]', '--include-filter', 'CtsNNAPIBenchmarkTestCases', '--include-filter', 'CtsNNAPIBenchmarkTestCases[instant]', '--include-filter', 'CtsNNAPIBenchmarkTestCases[secondary_user]', '--include-filter', 'CtsNNAPITestCases', '--include-filter', 'CtsNNAPITestCases[secondary_user]', '--include-filter', 'CtsNativeEncryptionTestCases', '--include-filter', 'CtsNativeEncryptionTestCases[instant]', '--include-filter', 'CtsNativeEncryptionTestCases[secondary_user]', '--include-filter', 'CtsNativeHardwareTestCases', '--include-filter', 'CtsNativeHardwareTestCases[secondary_user]', '--include-filter', 'CtsNativeMediaAAudioTestCases', '--include-filter', 'CtsNativeMediaAAudioTestCases[instant]', '--include-filter', 'CtsNativeMediaAAudioTestCases[secondary_user]', '--include-filter', 'CtsNativeMediaMetricsTestCases', '--include-filter', 'CtsNativeMediaMetricsTestCases[instant]', '--include-filter', 'CtsNativeMediaMetricsTestCases[secondary_user]', '--include-filter', 'CtsNativeMediaSlTestCases', '--include-filter', 'CtsNativeMediaSlTestCases[instant]', '--include-filter', 'CtsNativeMediaSlTestCases[secondary_user]', '--include-filter', 'CtsNativeMediaXaTestCases', '--include-filter', 'CtsNativeMediaXaTestCases[instant]', '--include-filter', 'CtsNativeMediaXaTestCases[secondary_user]', '--include-filter', 'CtsNativeMidiTestCases', '--include-filter', 'CtsNativeMidiTestCases[secondary_user]', '--include-filter', 'CtsNativeNetDnsTestCases', '--include-filter', 'CtsNativeNetDnsTestCases[instant]', '--include-filter', 'CtsNativeNetDnsTestCases[secondary_user]', '--include-filter', 'CtsNativeNetTestCases', '--include-filter', 'CtsNativeNetTestCases[instant]', '--include-filter', 'CtsNativeNetTestCases[secondary_user]', '--include-filter', 'CtsNdefTestCases', '--include-filter', 'CtsNdefTestCases[secondary_user]', '--include-filter', 'CtsNdkBinderTestCases', '--include-filter', 'CtsNdkBinderTestCases[instant]', '--include-filter', 'CtsNdkBinderTestCases[secondary_user]', '--include-filter', 'CtsNetApi23TestCases', '--include-filter', 'CtsNetApi23TestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigAttributeTestCases', '--include-filter', 'CtsNetSecConfigAttributeTestCases[instant]', '--include-filter', 'CtsNetSecConfigAttributeTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigBasicDebugDisabledTestCases', '--include-filter', 'CtsNetSecConfigBasicDebugDisabledTestCases[instant]', '--include-filter', 'CtsNetSecConfigBasicDebugDisabledTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigBasicDebugEnabledTestCases', '--include-filter', 'CtsNetSecConfigBasicDebugEnabledTestCases[instant]', '--include-filter', 'CtsNetSecConfigBasicDebugEnabledTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigBasicDomainConfigTestCases', '--include-filter', 'CtsNetSecConfigBasicDomainConfigTestCases[instant]', '--include-filter', 'CtsNetSecConfigBasicDomainConfigTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigCleartextTrafficTestCases', '--include-filter', 'CtsNetSecConfigCleartextTrafficTestCases[instant]', '--include-filter', 'CtsNetSecConfigCleartextTrafficTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigDownloadManagerTestCases', '--include-filter', 'CtsNetSecConfigDownloadManagerTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigInvalidPinTestCases', '--include-filter', 'CtsNetSecConfigInvalidPinTestCases[instant]', '--include-filter', 'CtsNetSecConfigInvalidPinTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigNestedDomainConfigTestCases', '--include-filter', 'CtsNetSecConfigNestedDomainConfigTestCases[instant]', '--include-filter', 'CtsNetSecConfigNestedDomainConfigTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigPrePCleartextTrafficTestCases', '--include-filter', 'CtsNetSecConfigPrePCleartextTrafficTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigResourcesSrcTestCases', '--include-filter', 'CtsNetSecConfigResourcesSrcTestCases[instant]', '--include-filter', 'CtsNetSecConfigResourcesSrcTestCases[secondary_user]', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficFalseTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficFalseTestCases[secondary_user]', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficTrueTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficTrueTestCases[secondary_user]', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases[secondary_user]', '--include-filter', 'CtsNetTestCases', '--include-filter', 'CtsNetTestCasesInternetPermission', '--include-filter', 'CtsNetTestCasesInternetPermission[instant]', '--include-filter', 'CtsNetTestCasesInternetPermission[secondary_user]', '--include-filter', 'CtsNetTestCasesLegacyApi22', '--include-filter', 'CtsNetTestCasesLegacyApi22[secondary_user]', '--include-filter', 'CtsNetTestCasesLegacyPermission22', '--include-filter', 'CtsNetTestCasesLegacyPermission22[secondary_user]', '--include-filter', 'CtsNetTestCasesUpdateStatsPermission', '--include-filter', 'CtsNetTestCasesUpdateStatsPermission[instant]', '--include-filter', 'CtsNetTestCasesUpdateStatsPermission[secondary_user]', '--include-filter', 'CtsNetTestCases[instant]', '--include-filter', 'CtsNetTestCases[secondary_user]', '--include-filter', 'CtsNfcTestCases', '--include-filter', 'CtsNfcTestCases[secondary_user]', '--include-filter', 'CtsNoPermissionTestCases', '--include-filter', 'CtsNoPermissionTestCases25', '--include-filter', 'CtsNoPermissionTestCases25[secondary_user]', '--include-filter', 'CtsNoPermissionTestCases[secondary_user]', '--include-filter', 'CtsOmapiTestCases', '--include-filter', 'CtsOmapiTestCases[secondary_user]', '--include-filter', 'CtsOpenGLTestCases', '--include-filter', 'CtsOpenGLTestCases[secondary_user]', '--include-filter', 'CtsOpenGlPerf2TestCases', '--include-filter', 'CtsOpenGlPerf2TestCases[secondary_user]', '--include-filter', 'CtsOpenGlPerfTestCases', '--include-filter', 'CtsOsHostTestCases', '--include-filter', 'CtsOsHostTestCases[instant]', '--include-filter', 'CtsOsHostTestCases[secondary_user]', '--include-filter', 'CtsOsTestCases', '--include-filter', 'CtsOsTestCases[instant]', '--include-filter', 'CtsPackageInstallAppOpDefaultTestCases', '--include-filter', 'CtsPackageInstallAppOpDefaultTestCases[instant]', '--include-filter', 'CtsPackageInstallAppOpDefaultTestCases[secondary_user]', '--include-filter', 'CtsPackageInstallAppOpDeniedTestCases', '--include-filter', 'CtsPackageInstallAppOpDeniedTestCases[instant]', '--include-filter', 'CtsPackageInstallAppOpDeniedTestCases[secondary_user]', '--include-filter', 'CtsPackageInstallTestCases', '--include-filter', 'CtsPackageInstallTestCases[instant]', '--include-filter', 'CtsPackageInstallTestCases[secondary_user]', '--include-filter', 'CtsPackageInstallerTapjackingTestCases', '--include-filter', 'CtsPackageInstallerTapjackingTestCases[secondary_user]', '--include-filter', 'CtsPackageUninstallTestCases', '--include-filter', 'CtsPackageUninstallTestCases[secondary_user]', '--include-filter', 'CtsPackageWatchdogTestCases', '--include-filter', 'CtsPackageWatchdogTestCases[secondary_user]', '--include-filter', 'CtsPdfTestCases', '--include-filter', 'CtsPdfTestCases[instant]', '--include-filter', 'CtsPdfTestCases[secondary_user]', '--include-filter', 'CtsPerfettoTestCases', '--include-filter', 'CtsPerfettoTestCases[secondary_user]', '--include-filter', 'CtsPermission2TestCases', '--include-filter', 'CtsPermission2TestCases[instant]', '--include-filter', 'CtsPermission3TestCases', '--include-filter', 'CtsPermission3TestCases[secondary_user]', '--include-filter', 'CtsPermissionTestCases', '--include-filter', 'CtsPermissionTestCasesSdk28', '--include-filter', 'CtsPermissionTestCasesSdk28[instant]', '--include-filter', 'CtsPermissionTestCasesSdk28[secondary_user]', '--include-filter', 'CtsPermissionTestCasesTelephony', '--include-filter', 'CtsPermissionTestCasesTelephony[instant]', '--include-filter', 'CtsPermissionTestCasesTelephony[secondary_user]', '--include-filter', 'CtsPermissionTestCases[instant]', '--include-filter', 'CtsPreferenceTestCases', '--include-filter', 'CtsPreferenceTestCases[instant]', '--include-filter', 'CtsPreferenceTestCases[secondary_user]', '--include-filter', 'CtsPrintTestCases', '--include-filter', 'CtsPrintTestCases[instant]', '--include-filter', 'CtsPrintTestCases[secondary_user]', '--include-filter', 'CtsProtoTestCases', '--include-filter', 'CtsProtoTestCases[instant]', '--include-filter', 'CtsProtoTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsMediaV2TestCases_-_CtsProtoTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsProviderTestCases_-_CtsSecurityBulletinHostTestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsProviderTestCases_-_CtsSecurityBulletinHostTestCases
new file mode 100644
index 0000000..b0bf1cf
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsProviderTestCases_-_CtsSecurityBulletinHostTestCases
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.all.CtsProviderTestCases_-_CtsSecurityBulletinHostTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsProviderTestCases, CtsProviderTestCases[secondary_user], CtsProviderUiTestCases, CtsQuickAccessWalletTestCases, CtsQuickAccessWalletTestCases[instant], CtsQuickAccessWalletTestCases[secondary_user], CtsRenderscriptLegacyTestCases, CtsRenderscriptLegacyTestCases[secondary_user], CtsRenderscriptTestCases, CtsRenderscriptTestCases[instant], CtsRenderscriptTestCases[secondary_user], CtsResolverServiceTestCases, CtsResolverServiceTestCases[secondary_user], CtsResourcesLoaderTests, CtsResourcesLoaderTests[secondary_user], CtsRoleTestCases, CtsRoleTestCases[secondary_user], CtsRollbackManagerHostTestCases, CtsRsBlasTestCases, CtsRsBlasTestCases[secondary_user], CtsRsCppTestCases, CtsRsCppTestCases[secondary_user], CtsSampleDeviceTestCases, CtsSampleDeviceTestCases[instant], CtsSampleDeviceTestCases[secondary_user], CtsSampleHostTestCases, CtsSampleHostTestCases[secondary_user], CtsSaxTestCases, CtsSaxTestCases[secondary_user], CtsScopedStorageHostTest, CtsScopedStorageHostTest[instant], CtsSdkExtensionsTestCases, CtsSdkExtensionsTestCases[instant], CtsSdkExtensionsTestCases[secondary_user], CtsSeccompHostTestCases, CtsSeccompHostTestCases[instant], CtsSeccompHostTestCases[secondary_user], CtsSecureElementAccessControlTestCases1, CtsSecureElementAccessControlTestCases1[secondary_user], CtsSecureElementAccessControlTestCases2, CtsSecureElementAccessControlTestCases2[secondary_user], CtsSecureElementAccessControlTestCases3, CtsSecureElementAccessControlTestCases3[secondary_user], CtsSecureFrpInstallTestCases, CtsSecureFrpInstallTestCases[secondary_user], CtsSecurityBulletinHostTestCases, CtsSecurityBulletinHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsProviderTestCases_-_CtsSecurityBulletinHostTestCases',
+        test_name='cheets_CTS_R.internal.arm.all.CtsProviderTestCases_-_CtsSecurityBulletinHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsProviderTestCases', '--include-filter', 'CtsProviderTestCases[secondary_user]', '--include-filter', 'CtsProviderUiTestCases', '--include-filter', 'CtsQuickAccessWalletTestCases', '--include-filter', 'CtsQuickAccessWalletTestCases[instant]', '--include-filter', 'CtsQuickAccessWalletTestCases[secondary_user]', '--include-filter', 'CtsRenderscriptLegacyTestCases', '--include-filter', 'CtsRenderscriptLegacyTestCases[secondary_user]', '--include-filter', 'CtsRenderscriptTestCases', '--include-filter', 'CtsRenderscriptTestCases[instant]', '--include-filter', 'CtsRenderscriptTestCases[secondary_user]', '--include-filter', 'CtsResolverServiceTestCases', '--include-filter', 'CtsResolverServiceTestCases[secondary_user]', '--include-filter', 'CtsResourcesLoaderTests', '--include-filter', 'CtsResourcesLoaderTests[secondary_user]', '--include-filter', 'CtsRoleTestCases', '--include-filter', 'CtsRoleTestCases[secondary_user]', '--include-filter', 'CtsRollbackManagerHostTestCases', '--include-filter', 'CtsRsBlasTestCases', '--include-filter', 'CtsRsBlasTestCases[secondary_user]', '--include-filter', 'CtsRsCppTestCases', '--include-filter', 'CtsRsCppTestCases[secondary_user]', '--include-filter', 'CtsSampleDeviceTestCases', '--include-filter', 'CtsSampleDeviceTestCases[instant]', '--include-filter', 'CtsSampleDeviceTestCases[secondary_user]', '--include-filter', 'CtsSampleHostTestCases', '--include-filter', 'CtsSampleHostTestCases[secondary_user]', '--include-filter', 'CtsSaxTestCases', '--include-filter', 'CtsSaxTestCases[secondary_user]', '--include-filter', 'CtsScopedStorageHostTest', '--include-filter', 'CtsScopedStorageHostTest[instant]', '--include-filter', 'CtsSdkExtensionsTestCases', '--include-filter', 'CtsSdkExtensionsTestCases[instant]', '--include-filter', 'CtsSdkExtensionsTestCases[secondary_user]', '--include-filter', 'CtsSeccompHostTestCases', '--include-filter', 'CtsSeccompHostTestCases[instant]', '--include-filter', 'CtsSeccompHostTestCases[secondary_user]', '--include-filter', 'CtsSecureElementAccessControlTestCases1', '--include-filter', 'CtsSecureElementAccessControlTestCases1[secondary_user]', '--include-filter', 'CtsSecureElementAccessControlTestCases2', '--include-filter', 'CtsSecureElementAccessControlTestCases2[secondary_user]', '--include-filter', 'CtsSecureElementAccessControlTestCases3', '--include-filter', 'CtsSecureElementAccessControlTestCases3[secondary_user]', '--include-filter', 'CtsSecureFrpInstallTestCases', '--include-filter', 'CtsSecureFrpInstallTestCases[secondary_user]', '--include-filter', 'CtsSecurityBulletinHostTestCases', '--include-filter', 'CtsSecurityBulletinHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsProviderTestCases_-_CtsSecurityBulletinHostTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases
new file mode 100644
index 0000000..a7dc725
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSecurityHostTestCases, CtsSecurityHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases',
+        test_name='cheets_CTS_R.internal.arm.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSecurityHostTestCases', '--include-filter', 'CtsSecurityHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases
new file mode 100644
index 0000000..2bcc5b4
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSecurityTestCases, CtsSecurityTestCases[instant], CtsSecurityTestCases[secondary_user], CtsSelinuxEphemeralTestCases, CtsSelinuxEphemeralTestCases[instant], CtsSelinuxTargetSdk25TestCases, CtsSelinuxTargetSdk25TestCases[secondary_user], CtsSelinuxTargetSdk27TestCases, CtsSelinuxTargetSdk27TestCases[secondary_user], CtsSelinuxTargetSdk28TestCases, CtsSelinuxTargetSdk28TestCases[secondary_user], CtsSelinuxTargetSdk29TestCases, CtsSelinuxTargetSdk29TestCases[secondary_user], CtsSelinuxTargetSdkCurrentTestCases, CtsSelinuxTargetSdkCurrentTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases',
+        test_name='cheets_CTS_R.internal.arm.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSecurityTestCases', '--include-filter', 'CtsSecurityTestCases[instant]', '--include-filter', 'CtsSecurityTestCases[secondary_user]', '--include-filter', 'CtsSelinuxEphemeralTestCases', '--include-filter', 'CtsSelinuxEphemeralTestCases[instant]', '--include-filter', 'CtsSelinuxTargetSdk25TestCases', '--include-filter', 'CtsSelinuxTargetSdk25TestCases[secondary_user]', '--include-filter', 'CtsSelinuxTargetSdk27TestCases', '--include-filter', 'CtsSelinuxTargetSdk27TestCases[secondary_user]', '--include-filter', 'CtsSelinuxTargetSdk28TestCases', '--include-filter', 'CtsSelinuxTargetSdk28TestCases[secondary_user]', '--include-filter', 'CtsSelinuxTargetSdk29TestCases', '--include-filter', 'CtsSelinuxTargetSdk29TestCases[secondary_user]', '--include-filter', 'CtsSelinuxTargetSdkCurrentTestCases', '--include-filter', 'CtsSelinuxTargetSdkCurrentTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsSensorTestCases_-_CtsSensorTestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsSensorTestCases_-_CtsSensorTestCases
new file mode 100644
index 0000000..c62e510
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsSensorTestCases_-_CtsSensorTestCases
@@ -0,0 +1,49 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+from autotest_lib.server import utils as server_utils
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.all.CtsSensorTestCases_-_CtsSensorTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSensorTestCases, CtsSensorTestCases[instant], CtsSensorTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+# For local debugging, if your test setup doesn't have servo, REMOVE these
+# two lines.
+args_dict = server_utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run_TS(machine):
+    # REMOVE 'servo_args=servo_args' arg for local debugging if your test
+    # setup doesn't have servo.
+    try:
+        host_list = [hosts.create_host(machine, servo_args=servo_args)]
+    except:
+        # Just ignore any servo setup flakiness.
+        host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsSensorTestCases_-_CtsSensorTestCases',
+        test_name='cheets_CTS_R.internal.arm.all.CtsSensorTestCases_-_CtsSensorTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSensorTestCases', '--include-filter', 'CtsSensorTestCases[instant]', '--include-filter', 'CtsSensorTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsSensorTestCases_-_CtsSensorTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        hard_reboot_on_failure=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsSettingsHostTestCases_-_CtsSyncManagerTestsCases b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsSettingsHostTestCases_-_CtsSyncManagerTestsCases
new file mode 100644
index 0000000..b549ab5
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsSettingsHostTestCases_-_CtsSyncManagerTestsCases
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.all.CtsSettingsHostTestCases_-_CtsSyncManagerTestsCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSettingsHostTestCases, CtsSettingsTestCases, CtsSettingsTestCases[secondary_user], CtsSharedLibsApiSignatureTestCases, CtsSharedLibsApiSignatureTestCases[instant], CtsSharedLibsApiSignatureTestCases[secondary_user], CtsSharesheetTestCases, CtsSharesheetTestCases[secondary_user], CtsShortcutHostTestCases, CtsShortcutManagerLauncher1, CtsShortcutManagerLauncher1[secondary_user], CtsShortcutManagerLauncher2, CtsShortcutManagerLauncher2[secondary_user], CtsShortcutManagerLauncher3, CtsShortcutManagerLauncher3[secondary_user], CtsShortcutManagerLauncher4, CtsShortcutManagerLauncher4[secondary_user], CtsShortcutManagerPackage1, CtsShortcutManagerPackage1[secondary_user], CtsShortcutManagerPackage2, CtsShortcutManagerPackage2[secondary_user], CtsShortcutManagerPackage3, CtsShortcutManagerPackage3[secondary_user], CtsShortcutManagerPackage4, CtsShortcutManagerPackage4[secondary_user], CtsShortcutManagerTestCases, CtsShortcutManagerTestCases[secondary_user], CtsShortcutManagerThrottlingTest, CtsShortcutManagerThrottlingTest[secondary_user], CtsSignedConfigHostTestCases, CtsSignedConfigHostTestCases[secondary_user], CtsSimRestrictedApisTestCases, CtsSimRestrictedApisTestCases[secondary_user], CtsSimpleCpuTestCases, CtsSimpleCpuTestCases[secondary_user], CtsSimpleperfTestCases, CtsSkQPTestCases, CtsSkQPTestCases[secondary_user], CtsSliceTestCases, CtsSliceTestCases[secondary_user], CtsSoundTriggerTestCases, CtsSoundTriggerTestCases[instant], CtsSoundTriggerTestCases[secondary_user], CtsSpeechTestCases, CtsSpeechTestCases[secondary_user], CtsStagedInstallHostTestCases, CtsStatsdHostTestCases, CtsStatsdHostTestCases[instant], CtsStatsdHostTestCases[secondary_user], CtsStrictJavaPackagesTestCases, CtsStrictJavaPackagesTestCases[secondary_user], CtsSuspendAppsPermissionTestCases, CtsSuspendAppsPermissionTestCases[secondary_user], CtsSuspendAppsTestCases, CtsSuspendAppsTestCases[secondary_user], CtsSustainedPerformanceHostTestCases, CtsSustainedPerformanceHostTestCases[secondary_user], CtsSyncAccountAccessOtherCertTestCases, CtsSyncAccountAccessOtherCertTestCases[secondary_user], CtsSyncContentHostTestCases, CtsSyncContentHostTestCases[secondary_user], CtsSyncManagerTestsCases, CtsSyncManagerTestsCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsSettingsHostTestCases_-_CtsSyncManagerTestsCases',
+        test_name='cheets_CTS_R.internal.arm.all.CtsSettingsHostTestCases_-_CtsSyncManagerTestsCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSettingsHostTestCases', '--include-filter', 'CtsSettingsTestCases', '--include-filter', 'CtsSettingsTestCases[secondary_user]', '--include-filter', 'CtsSharedLibsApiSignatureTestCases', '--include-filter', 'CtsSharedLibsApiSignatureTestCases[instant]', '--include-filter', 'CtsSharedLibsApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsSharesheetTestCases', '--include-filter', 'CtsSharesheetTestCases[secondary_user]', '--include-filter', 'CtsShortcutHostTestCases', '--include-filter', 'CtsShortcutManagerLauncher1', '--include-filter', 'CtsShortcutManagerLauncher1[secondary_user]', '--include-filter', 'CtsShortcutManagerLauncher2', '--include-filter', 'CtsShortcutManagerLauncher2[secondary_user]', '--include-filter', 'CtsShortcutManagerLauncher3', '--include-filter', 'CtsShortcutManagerLauncher3[secondary_user]', '--include-filter', 'CtsShortcutManagerLauncher4', '--include-filter', 'CtsShortcutManagerLauncher4[secondary_user]', '--include-filter', 'CtsShortcutManagerPackage1', '--include-filter', 'CtsShortcutManagerPackage1[secondary_user]', '--include-filter', 'CtsShortcutManagerPackage2', '--include-filter', 'CtsShortcutManagerPackage2[secondary_user]', '--include-filter', 'CtsShortcutManagerPackage3', '--include-filter', 'CtsShortcutManagerPackage3[secondary_user]', '--include-filter', 'CtsShortcutManagerPackage4', '--include-filter', 'CtsShortcutManagerPackage4[secondary_user]', '--include-filter', 'CtsShortcutManagerTestCases', '--include-filter', 'CtsShortcutManagerTestCases[secondary_user]', '--include-filter', 'CtsShortcutManagerThrottlingTest', '--include-filter', 'CtsShortcutManagerThrottlingTest[secondary_user]', '--include-filter', 'CtsSignedConfigHostTestCases', '--include-filter', 'CtsSignedConfigHostTestCases[secondary_user]', '--include-filter', 'CtsSimRestrictedApisTestCases', '--include-filter', 'CtsSimRestrictedApisTestCases[secondary_user]', '--include-filter', 'CtsSimpleCpuTestCases', '--include-filter', 'CtsSimpleCpuTestCases[secondary_user]', '--include-filter', 'CtsSimpleperfTestCases', '--include-filter', 'CtsSkQPTestCases', '--include-filter', 'CtsSkQPTestCases[secondary_user]', '--include-filter', 'CtsSliceTestCases', '--include-filter', 'CtsSliceTestCases[secondary_user]', '--include-filter', 'CtsSoundTriggerTestCases', '--include-filter', 'CtsSoundTriggerTestCases[instant]', '--include-filter', 'CtsSoundTriggerTestCases[secondary_user]', '--include-filter', 'CtsSpeechTestCases', '--include-filter', 'CtsSpeechTestCases[secondary_user]', '--include-filter', 'CtsStagedInstallHostTestCases', '--include-filter', 'CtsStatsdHostTestCases', '--include-filter', 'CtsStatsdHostTestCases[instant]', '--include-filter', 'CtsStatsdHostTestCases[secondary_user]', '--include-filter', 'CtsStrictJavaPackagesTestCases', '--include-filter', 'CtsStrictJavaPackagesTestCases[secondary_user]', '--include-filter', 'CtsSuspendAppsPermissionTestCases', '--include-filter', 'CtsSuspendAppsPermissionTestCases[secondary_user]', '--include-filter', 'CtsSuspendAppsTestCases', '--include-filter', 'CtsSuspendAppsTestCases[secondary_user]', '--include-filter', 'CtsSustainedPerformanceHostTestCases', '--include-filter', 'CtsSustainedPerformanceHostTestCases[secondary_user]', '--include-filter', 'CtsSyncAccountAccessOtherCertTestCases', '--include-filter', 'CtsSyncAccountAccessOtherCertTestCases[secondary_user]', '--include-filter', 'CtsSyncContentHostTestCases', '--include-filter', 'CtsSyncContentHostTestCases[secondary_user]', '--include-filter', 'CtsSyncManagerTestsCases', '--include-filter', 'CtsSyncManagerTestsCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsSettingsHostTestCases_-_CtsSyncManagerTestsCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        use_old_adb=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsSystemApiAnnotationTestCases_-_CtsViewInspectorAnnotationProcessorTestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsSystemApiAnnotationTestCases_-_CtsViewInspectorAnnotationProcessorTestCases
new file mode 100644
index 0000000..94e0658
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsSystemApiAnnotationTestCases_-_CtsViewInspectorAnnotationProcessorTestCases
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.all.CtsSystemApiAnnotationTestCases_-_CtsViewInspectorAnnotationProcessorTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSystemApiAnnotationTestCases, CtsSystemApiAnnotationTestCases[secondary_user], CtsSystemApiSignatureTestCases, CtsSystemApiSignatureTestCases[secondary_user], CtsSystemIntentTestCases, CtsSystemIntentTestCases[secondary_user], CtsSystemUiHostTestCases, CtsSystemUiHostTestCases[instant], CtsSystemUiHostTestCases[secondary_user], CtsSystemUiTestCases, CtsSystemUiTestCases[instant], CtsSystemUiTestCases[secondary_user], CtsTaggingHostTestCases, CtsTaggingHostTestCases[instant], CtsTaggingHostTestCases[secondary_user], CtsTelecomTestCases, CtsTelecomTestCases2, CtsTelecomTestCases2[secondary_user], CtsTelecomTestCases3, CtsTelecomTestCases3[secondary_user], CtsTelecomTestCases[secondary_user], CtsTelephony2TestCases, CtsTelephony2TestCases[instant], CtsTelephony2TestCases[secondary_user], CtsTelephony3TestCases, CtsTelephony3TestCases[secondary_user], CtsTelephonyHostCases, CtsTelephonyHostCases[secondary_user], CtsTelephonyProviderHostCases, CtsTelephonyProviderHostCases[secondary_user], CtsTelephonyProviderTestCases, CtsTelephonyProviderTestCases[secondary_user], CtsTelephonySdk28TestCases, CtsTelephonySdk28TestCases[secondary_user], CtsTelephonyTestCases, CtsTelephonyTestCasesPermissionReadPhoneState, CtsTelephonyTestCasesPermissionReadPhoneState[instant], CtsTelephonyTestCasesPermissionReadPhoneState[secondary_user], CtsTestHarnessModeTestCases, CtsTestHarnessModeTestCases[secondary_user], CtsTetheringTest, CtsTetheringTest[secondary_user], CtsTextClassifierTestCases, CtsTextClassifierTestCases[secondary_user], CtsTextTestCases, CtsTextTestCases[instant], CtsTextTestCases[secondary_user], CtsTfliteNnapiDelegateTestCases, CtsTfliteNnapiDelegateTestCases[secondary_user], CtsThemeDeviceTestCases, CtsThemeDeviceTestCases[secondary_user], CtsThemeHostTestCases, CtsThemeHostTestCases[secondary_user], CtsThermalTestCases, CtsThermalTestCases[secondary_user], CtsToastLegacyTestCases, CtsToastLegacyTestCases[secondary_user], CtsToastTestCases, CtsToastTestCases[instant], CtsToastTestCases[secondary_user], CtsTransitionTestCases, CtsTransitionTestCases[secondary_user], CtsTrustedVoiceHostTestCases, CtsTrustedVoiceHostTestCases[secondary_user], CtsTvProviderTestCases, CtsTvProviderTestCases[secondary_user], CtsTvTestCases, CtsTvTestCases[secondary_user], CtsUiAutomationTestCases, CtsUiAutomationTestCases[instant], CtsUiAutomationTestCases[secondary_user], CtsUiRenderingTestCases, CtsUiRenderingTestCases27, CtsUiRenderingTestCases27[instant], CtsUiRenderingTestCases27[secondary_user], CtsUiRenderingTestCases[instant], CtsUiRenderingTestCases[secondary_user], CtsUidIsolationTestCases, CtsUidIsolationTestCases[instant], CtsUidIsolationTestCases[secondary_user], CtsUsageStatsTestCases, CtsUsageStatsTestCases[instant], CtsUsageStatsTestCases[secondary_user], CtsUsbManagerTestCases, CtsUsbManagerTestCases[secondary_user], CtsUsbTests, CtsUsbTests[instant], CtsUsbTests[secondary_user], CtsUsesLibraryHostTestCases, CtsUsesLibraryHostTestCases[secondary_user], CtsUtilTestCases, CtsUtilTestCases[instant], CtsUtilTestCases[secondary_user], CtsVideoTestCases, CtsVideoTestCases[secondary_user], CtsViewInspectorAnnotationProcessorTestCases, CtsViewInspectorAnnotationProcessorTestCases[instant], CtsViewInspectorAnnotationProcessorTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsSystemApiAnnotationTestCases_-_CtsViewInspectorAnnotationProcessorTestCases',
+        test_name='cheets_CTS_R.internal.arm.all.CtsSystemApiAnnotationTestCases_-_CtsViewInspectorAnnotationProcessorTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSystemApiAnnotationTestCases', '--include-filter', 'CtsSystemApiAnnotationTestCases[secondary_user]', '--include-filter', 'CtsSystemApiSignatureTestCases', '--include-filter', 'CtsSystemApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsSystemIntentTestCases', '--include-filter', 'CtsSystemIntentTestCases[secondary_user]', '--include-filter', 'CtsSystemUiHostTestCases', '--include-filter', 'CtsSystemUiHostTestCases[instant]', '--include-filter', 'CtsSystemUiHostTestCases[secondary_user]', '--include-filter', 'CtsSystemUiTestCases', '--include-filter', 'CtsSystemUiTestCases[instant]', '--include-filter', 'CtsSystemUiTestCases[secondary_user]', '--include-filter', 'CtsTaggingHostTestCases', '--include-filter', 'CtsTaggingHostTestCases[instant]', '--include-filter', 'CtsTaggingHostTestCases[secondary_user]', '--include-filter', 'CtsTelecomTestCases', '--include-filter', 'CtsTelecomTestCases2', '--include-filter', 'CtsTelecomTestCases2[secondary_user]', '--include-filter', 'CtsTelecomTestCases3', '--include-filter', 'CtsTelecomTestCases3[secondary_user]', '--include-filter', 'CtsTelecomTestCases[secondary_user]', '--include-filter', 'CtsTelephony2TestCases', '--include-filter', 'CtsTelephony2TestCases[instant]', '--include-filter', 'CtsTelephony2TestCases[secondary_user]', '--include-filter', 'CtsTelephony3TestCases', '--include-filter', 'CtsTelephony3TestCases[secondary_user]', '--include-filter', 'CtsTelephonyHostCases', '--include-filter', 'CtsTelephonyHostCases[secondary_user]', '--include-filter', 'CtsTelephonyProviderHostCases', '--include-filter', 'CtsTelephonyProviderHostCases[secondary_user]', '--include-filter', 'CtsTelephonyProviderTestCases', '--include-filter', 'CtsTelephonyProviderTestCases[secondary_user]', '--include-filter', 'CtsTelephonySdk28TestCases', '--include-filter', 'CtsTelephonySdk28TestCases[secondary_user]', '--include-filter', 'CtsTelephonyTestCases', '--include-filter', 'CtsTelephonyTestCasesPermissionReadPhoneState', '--include-filter', 'CtsTelephonyTestCasesPermissionReadPhoneState[instant]', '--include-filter', 'CtsTelephonyTestCasesPermissionReadPhoneState[secondary_user]', '--include-filter', 'CtsTestHarnessModeTestCases', '--include-filter', 'CtsTestHarnessModeTestCases[secondary_user]', '--include-filter', 'CtsTetheringTest', '--include-filter', 'CtsTetheringTest[secondary_user]', '--include-filter', 'CtsTextClassifierTestCases', '--include-filter', 'CtsTextClassifierTestCases[secondary_user]', '--include-filter', 'CtsTextTestCases', '--include-filter', 'CtsTextTestCases[instant]', '--include-filter', 'CtsTextTestCases[secondary_user]', '--include-filter', 'CtsTfliteNnapiDelegateTestCases', '--include-filter', 'CtsTfliteNnapiDelegateTestCases[secondary_user]', '--include-filter', 'CtsThemeDeviceTestCases', '--include-filter', 'CtsThemeDeviceTestCases[secondary_user]', '--include-filter', 'CtsThemeHostTestCases', '--include-filter', 'CtsThemeHostTestCases[secondary_user]', '--include-filter', 'CtsThermalTestCases', '--include-filter', 'CtsThermalTestCases[secondary_user]', '--include-filter', 'CtsToastLegacyTestCases', '--include-filter', 'CtsToastLegacyTestCases[secondary_user]', '--include-filter', 'CtsToastTestCases', '--include-filter', 'CtsToastTestCases[instant]', '--include-filter', 'CtsToastTestCases[secondary_user]', '--include-filter', 'CtsTransitionTestCases', '--include-filter', 'CtsTransitionTestCases[secondary_user]', '--include-filter', 'CtsTrustedVoiceHostTestCases', '--include-filter', 'CtsTrustedVoiceHostTestCases[secondary_user]', '--include-filter', 'CtsTvProviderTestCases', '--include-filter', 'CtsTvProviderTestCases[secondary_user]', '--include-filter', 'CtsTvTestCases', '--include-filter', 'CtsTvTestCases[secondary_user]', '--include-filter', 'CtsUiAutomationTestCases', '--include-filter', 'CtsUiAutomationTestCases[instant]', '--include-filter', 'CtsUiAutomationTestCases[secondary_user]', '--include-filter', 'CtsUiRenderingTestCases', '--include-filter', 'CtsUiRenderingTestCases27', '--include-filter', 'CtsUiRenderingTestCases27[instant]', '--include-filter', 'CtsUiRenderingTestCases27[secondary_user]', '--include-filter', 'CtsUiRenderingTestCases[instant]', '--include-filter', 'CtsUiRenderingTestCases[secondary_user]', '--include-filter', 'CtsUidIsolationTestCases', '--include-filter', 'CtsUidIsolationTestCases[instant]', '--include-filter', 'CtsUidIsolationTestCases[secondary_user]', '--include-filter', 'CtsUsageStatsTestCases', '--include-filter', 'CtsUsageStatsTestCases[instant]', '--include-filter', 'CtsUsageStatsTestCases[secondary_user]', '--include-filter', 'CtsUsbManagerTestCases', '--include-filter', 'CtsUsbManagerTestCases[secondary_user]', '--include-filter', 'CtsUsbTests', '--include-filter', 'CtsUsbTests[instant]', '--include-filter', 'CtsUsbTests[secondary_user]', '--include-filter', 'CtsUsesLibraryHostTestCases', '--include-filter', 'CtsUsesLibraryHostTestCases[secondary_user]', '--include-filter', 'CtsUtilTestCases', '--include-filter', 'CtsUtilTestCases[instant]', '--include-filter', 'CtsUtilTestCases[secondary_user]', '--include-filter', 'CtsVideoTestCases', '--include-filter', 'CtsVideoTestCases[secondary_user]', '--include-filter', 'CtsViewInspectorAnnotationProcessorTestCases', '--include-filter', 'CtsViewInspectorAnnotationProcessorTestCases[instant]', '--include-filter', 'CtsViewInspectorAnnotationProcessorTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsSystemApiAnnotationTestCases_-_CtsViewInspectorAnnotationProcessorTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsViewTestCases_-_CtsViewTestCases b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsViewTestCases_-_CtsViewTestCases
new file mode 100644
index 0000000..c56bfd0
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsViewTestCases_-_CtsViewTestCases
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.all.CtsViewTestCases_-_CtsViewTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsViewTestCases, CtsViewTestCasesSdk28, CtsViewTestCasesSdk28[instant], CtsViewTestCasesSdk28[secondary_user], CtsViewTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.arm.all.CtsViewTestCases_-_CtsViewTestCases',
+        test_name='cheets_CTS_R.internal.arm.all.CtsViewTestCases_-_CtsViewTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsViewTestCases', '--include-filter', 'CtsViewTestCasesSdk28', '--include-filter', 'CtsViewTestCasesSdk28[instant]', '--include-filter', 'CtsViewTestCasesSdk28[secondary_user]', '--include-filter', 'CtsViewTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsViewTestCases_-_CtsViewTestCases',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsVoiceInteractionTestCases_-_vm-tests-tf b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsVoiceInteractionTestCases_-_vm-tests-tf
new file mode 100644
index 0000000..394c772
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.all.CtsVoiceInteractionTestCases_-_vm-tests-tf
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.all.CtsVoiceInteractionTestCases_-_vm-tests-tf'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsVoiceInteractionTestCases, CtsVoiceInteractionTestCases[instant], CtsVoiceInteractionTestCases[secondary_user], CtsVoiceSettingsTestCases, CtsVoiceSettingsTestCases[instant], CtsVoiceSettingsTestCases[secondary_user], CtsVrTestCases, CtsVrTestCases[secondary_user], CtsWebkitTestCases, CtsWebkitTestCases[instant], CtsWebkitTestCases[secondary_user], CtsWidgetTestCases, CtsWidgetTestCases29, CtsWidgetTestCases29[instant], CtsWidgetTestCases29[secondary_user], CtsWidgetTestCases[instant], CtsWidgetTestCases[secondary_user], CtsWifiBroadcastsHostTestCases, CtsWifiBroadcastsHostTestCases[instant], CtsWifiBroadcastsHostTestCases[secondary_user], CtsWifiTestCases, CtsWifiTestCases[instant], CtsWifiTestCases[secondary_user], CtsWindowManagerDeviceTestCases, CtsWindowManagerDeviceTestCases[secondary_user], CtsWindowManagerJetpackTestCases, CtsWindowManagerJetpackTestCases[secondary_user], CtsWindowManagerSdk25TestCases, CtsWindowManagerSdk25TestCases[secondary_user], CtsWindowManagerSdk28TestCases, CtsWindowManagerSdk28TestCases[secondary_user], CtsWindowManagerSdk29TestCases, CtsWindowManagerSdk29TestCases[secondary_user], CtsWrapNoWrapTestCases, CtsWrapNoWrapTestCases[secondary_user], CtsWrapWrapDebugMallocDebugTestCases, CtsWrapWrapDebugMallocDebugTestCases[secondary_user], CtsWrapWrapDebugTestCases, CtsWrapWrapDebugTestCases[secondary_user], CtsWrapWrapNoDebugTestCases, CtsWrapWrapNoDebugTestCases[secondary_user], LegacyStorageTest, LegacyStorageTest[instant], ScopedStorageTest, ScopedStorageTest[instant], signed-CtsOmapiTestCases, signed-CtsOmapiTestCases[secondary_user], signed-CtsSecureElementAccessControlTestCases1, signed-CtsSecureElementAccessControlTestCases1[secondary_user], signed-CtsSecureElementAccessControlTestCases2, signed-CtsSecureElementAccessControlTestCases2[secondary_user], signed-CtsSecureElementAccessControlTestCases3, signed-CtsSecureElementAccessControlTestCases3[secondary_user], vm-tests-tf, vm-tests-tf[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=10,
+        tag='internal.arm.all.CtsVoiceInteractionTestCases_-_vm-tests-tf',
+        test_name='cheets_CTS_R.internal.arm.all.CtsVoiceInteractionTestCases_-_vm-tests-tf',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsVoiceInteractionTestCases', '--include-filter', 'CtsVoiceInteractionTestCases[instant]', '--include-filter', 'CtsVoiceInteractionTestCases[secondary_user]', '--include-filter', 'CtsVoiceSettingsTestCases', '--include-filter', 'CtsVoiceSettingsTestCases[instant]', '--include-filter', 'CtsVoiceSettingsTestCases[secondary_user]', '--include-filter', 'CtsVrTestCases', '--include-filter', 'CtsVrTestCases[secondary_user]', '--include-filter', 'CtsWebkitTestCases', '--include-filter', 'CtsWebkitTestCases[instant]', '--include-filter', 'CtsWebkitTestCases[secondary_user]', '--include-filter', 'CtsWidgetTestCases', '--include-filter', 'CtsWidgetTestCases29', '--include-filter', 'CtsWidgetTestCases29[instant]', '--include-filter', 'CtsWidgetTestCases29[secondary_user]', '--include-filter', 'CtsWidgetTestCases[instant]', '--include-filter', 'CtsWidgetTestCases[secondary_user]', '--include-filter', 'CtsWifiBroadcastsHostTestCases', '--include-filter', 'CtsWifiBroadcastsHostTestCases[instant]', '--include-filter', 'CtsWifiBroadcastsHostTestCases[secondary_user]', '--include-filter', 'CtsWifiTestCases', '--include-filter', 'CtsWifiTestCases[instant]', '--include-filter', 'CtsWifiTestCases[secondary_user]', '--include-filter', 'CtsWindowManagerDeviceTestCases', '--include-filter', 'CtsWindowManagerDeviceTestCases[secondary_user]', '--include-filter', 'CtsWindowManagerJetpackTestCases', '--include-filter', 'CtsWindowManagerJetpackTestCases[secondary_user]', '--include-filter', 'CtsWindowManagerSdk25TestCases', '--include-filter', 'CtsWindowManagerSdk25TestCases[secondary_user]', '--include-filter', 'CtsWindowManagerSdk28TestCases', '--include-filter', 'CtsWindowManagerSdk28TestCases[secondary_user]', '--include-filter', 'CtsWindowManagerSdk29TestCases', '--include-filter', 'CtsWindowManagerSdk29TestCases[secondary_user]', '--include-filter', 'CtsWrapNoWrapTestCases', '--include-filter', 'CtsWrapNoWrapTestCases[secondary_user]', '--include-filter', 'CtsWrapWrapDebugMallocDebugTestCases', '--include-filter', 'CtsWrapWrapDebugMallocDebugTestCases[secondary_user]', '--include-filter', 'CtsWrapWrapDebugTestCases', '--include-filter', 'CtsWrapWrapDebugTestCases[secondary_user]', '--include-filter', 'CtsWrapWrapNoDebugTestCases', '--include-filter', 'CtsWrapWrapNoDebugTestCases[secondary_user]', '--include-filter', 'LegacyStorageTest', '--include-filter', 'LegacyStorageTest[instant]', '--include-filter', 'ScopedStorageTest', '--include-filter', 'ScopedStorageTest[instant]', '--include-filter', 'signed-CtsOmapiTestCases', '--include-filter', 'signed-CtsOmapiTestCases[secondary_user]', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases1', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases1[secondary_user]', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases2', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases2[secondary_user]', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases3', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases3[secondary_user]', '--include-filter', 'vm-tests-tf', '--include-filter', 'vm-tests-tf[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsVoiceInteractionTestCases_-_vm-tests-tf',
+        target_plan=None,
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.signed-Cts b/server/site_tests/cheets_CTS_R/control.internal.arm.signed-Cts
new file mode 100644
index 0000000..76704a1
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.signed-Cts
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.signed-Cts'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module signed-CtsOmapiTestCases, signed-CtsOmapiTestCases[secondary_user], signed-CtsSecureElementAccessControlTestCases1, signed-CtsSecureElementAccessControlTestCases1[secondary_user], signed-CtsSecureElementAccessControlTestCases2, signed-CtsSecureElementAccessControlTestCases2[secondary_user], signed-CtsSecureElementAccessControlTestCases3, signed-CtsSecureElementAccessControlTestCases3[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.signed-Cts',
+        test_name='cheets_CTS_R.internal.arm.signed-Cts',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'signed-CtsOmapiTestCases', '--include-filter', 'signed-CtsOmapiTestCases[secondary_user]', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases1', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases1[secondary_user]', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases2', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases2[secondary_user]', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases3', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases3[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='signed-Cts',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=16200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.tradefed-run-collect-tests-only-hardware-internal b/server/site_tests/cheets_CTS_R/control.internal.arm.tradefed-run-collect-tests-only-hardware-internal
new file mode 100644
index 0000000..98f7285
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.tradefed-run-collect-tests-only-hardware-internal
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.tradefed-run-collect-tests-only-hardware-internal'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module tradefed-run-collect-tests-only-hardware-internal of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.tradefed-run-collect-tests-only-hardware-internal',
+        test_name='cheets_CTS_R.internal.arm.tradefed-run-collect-tests-only-hardware-internal',
+        run_template=['run', 'commandAndExit', 'collect-tests-only', '--disable-reboot', '--subplan', 'cts-hardware', '--module-arg', 'CtsMediaTestCases:skip-media-download:true', '--module-arg', 'CtsMediaStressTestCases:skip-media-download:true', '--module-arg', 'CtsMediaBitstreamsTestCases:skip-media-download:true'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='tradefed-run-collect-tests-only-hardware-internal',
+        target_plan='cts-hardware',
+        bundle='arm',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.vm-tests-tf b/server/site_tests/cheets_CTS_R/control.internal.arm.vm-tests-tf
new file mode 100644
index 0000000..5783938
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.vm-tests-tf
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.vm-tests-tf'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module vm-tests-tf, vm-tests-tf[secondary_user] of the Android Compatibility Test Suite (CTS) using arm ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.arm.vm-tests-tf',
+        test_name='cheets_CTS_R.internal.arm.vm-tests-tf',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'vm-tests-tf', '--include-filter', 'vm-tests-tf[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='vm-tests-tf',
+        target_plan=None,
+        bundle='arm',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.arm.waivers b/server/site_tests/cheets_CTS_R/control.internal.arm.waivers
new file mode 100644
index 0000000..383aa36
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.arm.waivers
@@ -0,0 +1,42 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is not auto-generated. Don't delete it.
+from autotest_lib.client.common_lib import utils, global_config
+import pipes
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.arm.waivers'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts, suite:arc-cts-qual, suite:arc-cts-hardware'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run waived tests of the Android Compatibility Test Suite (CTS) using arm ABI in ARC.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    ssid = utils.get_wireless_ssid(machine['hostname'])
+    wifipass = global_config.global_config.get_config_value('CLIENT',
+                'wireless_password', default=None)
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=10,
+        tag='internal.arm.waivers',
+        test_name='cheets_CTS_R.internal.arm.waivers',
+        run_template=['run', 'commandAndExit', 'cts', '--subplan', 'waivers'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='cts-dev',
+        target_plan='waivers',
+        load_waivers=False,
+        bundle='arm',
+        uri='DEV_WAIVER',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAbiOverrideHost b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAbiOverrideHost
new file mode 100644
index 0000000..fb31cc3
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAbiOverrideHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsAbiOverrideHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAbiOverrideHostTestCases, CtsAbiOverrideHostTestCases[instant], CtsAbiOverrideHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAbiOverrideHost',
+        test_name='cheets_CTS_R.internal.x86.CtsAbiOverrideHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAbiOverrideHostTestCases', '--include-filter', 'CtsAbiOverrideHostTestCases[instant]', '--include-filter', 'CtsAbiOverrideHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAbiOverrideHost',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAcceleration b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAcceleration
new file mode 100644
index 0000000..adb7477
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAcceleration
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsAcceleration'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAccelerationTestCases, CtsAccelerationTestCases[instant], CtsAccelerationTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAcceleration',
+        test_name='cheets_CTS_R.internal.x86.CtsAcceleration',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAccelerationTestCases', '--include-filter', 'CtsAccelerationTestCases[instant]', '--include-filter', 'CtsAccelerationTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAcceleration',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAccessibility b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAccessibility
new file mode 100644
index 0000000..5f050d7
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAccessibility
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsAccessibility'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAccessibilityServiceSdk29TestCases, CtsAccessibilityServiceSdk29TestCases[instant], CtsAccessibilityServiceSdk29TestCases[secondary_user], CtsAccessibilityServiceTestCases, CtsAccessibilityServiceTestCases[instant], CtsAccessibilityTestCases, CtsAccessibilityTestCases[instant], CtsAccessibilityTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAccessibility',
+        test_name='cheets_CTS_R.internal.x86.CtsAccessibility',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAccessibilityServiceSdk29TestCases', '--include-filter', 'CtsAccessibilityServiceSdk29TestCases[instant]', '--include-filter', 'CtsAccessibilityServiceSdk29TestCases[secondary_user]', '--include-filter', 'CtsAccessibilityServiceTestCases', '--include-filter', 'CtsAccessibilityServiceTestCases[instant]', '--include-filter', 'CtsAccessibilityTestCases', '--include-filter', 'CtsAccessibilityTestCases[instant]', '--include-filter', 'CtsAccessibilityTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAccessibility',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=16200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAccountManager b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAccountManager
new file mode 100644
index 0000000..d5ac3d7
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAccountManager
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsAccountManager'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAccountManagerTestCases, CtsAccountManagerTestCases[instant], CtsAccountManagerTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAccountManager',
+        test_name='cheets_CTS_R.internal.x86.CtsAccountManager',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAccountManagerTestCases', '--include-filter', 'CtsAccountManagerTestCases[instant]', '--include-filter', 'CtsAccountManagerTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAccountManager',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAccountsHost b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAccountsHost
new file mode 100644
index 0000000..77d868d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAccountsHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsAccountsHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAccountsHostTestCases, CtsAccountsHostTestCases[instant], CtsAccountsHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAccountsHost',
+        test_name='cheets_CTS_R.internal.x86.CtsAccountsHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAccountsHostTestCases', '--include-filter', 'CtsAccountsHostTestCases[instant]', '--include-filter', 'CtsAccountsHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAccountsHost',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsActivityManagerBackgroundActivity b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsActivityManagerBackgroundActivity
new file mode 100644
index 0000000..da282b5
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsActivityManagerBackgroundActivity
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsActivityManagerBackgroundActivity'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsActivityManagerBackgroundActivityTestCases, CtsActivityManagerBackgroundActivityTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsActivityManagerBackgroundActivity',
+        test_name='cheets_CTS_R.internal.x86.CtsActivityManagerBackgroundActivity',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsActivityManagerBackgroundActivityTestCases', '--include-filter', 'CtsActivityManagerBackgroundActivityTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsActivityManagerBackgroundActivity',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAdb b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAdb
new file mode 100644
index 0000000..5dd0fd1
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAdb
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsAdb'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAdbHostTestCases, CtsAdbHostTestCases[secondary_user], CtsAdbManagerHostTestCases, CtsAdbManagerHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAdb',
+        test_name='cheets_CTS_R.internal.x86.CtsAdb',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAdbHostTestCases', '--include-filter', 'CtsAdbHostTestCases[secondary_user]', '--include-filter', 'CtsAdbManagerHostTestCases', '--include-filter', 'CtsAdbManagerHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAdb',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAdmin b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAdmin
new file mode 100644
index 0000000..044a1d1
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAdmin
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsAdmin'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAdminPackageInstallerTestCases, CtsAdminTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAdmin',
+        test_name='cheets_CTS_R.internal.x86.CtsAdmin',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAdminPackageInstallerTestCases', '--include-filter', 'CtsAdminTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAdmin',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAlarmManager b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAlarmManager
new file mode 100644
index 0000000..4e0e649
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAlarmManager
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsAlarmManager'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAlarmManagerTestCases, CtsAlarmManagerTestCases[instant], CtsAlarmManagerTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAlarmManager',
+        test_name='cheets_CTS_R.internal.x86.CtsAlarmManager',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAlarmManagerTestCases', '--include-filter', 'CtsAlarmManagerTestCases[instant]', '--include-filter', 'CtsAlarmManagerTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAlarmManager',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAndroid b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAndroid
new file mode 100644
index 0000000..6f8d302
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAndroid
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsAndroid'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAndroidAppTestCases, CtsAndroidAppTestCases[instant], CtsAndroidAppTestCases[secondary_user], CtsAndroidTestBase28ApiSignatureTestCases, CtsAndroidTestBase28ApiSignatureTestCases[instant], CtsAndroidTestBase28ApiSignatureTestCases[secondary_user], CtsAndroidTestBaseCurrentApiSignatureTestCases, CtsAndroidTestBaseCurrentApiSignatureTestCases[instant], CtsAndroidTestBaseCurrentApiSignatureTestCases[secondary_user], CtsAndroidTestMockCurrentApiSignatureTestCases, CtsAndroidTestMockCurrentApiSignatureTestCases[instant], CtsAndroidTestMockCurrentApiSignatureTestCases[secondary_user], CtsAndroidTestRunnerCurrentApiSignatureTestCases, CtsAndroidTestRunnerCurrentApiSignatureTestCases[instant], CtsAndroidTestRunnerCurrentApiSignatureTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAndroid',
+        test_name='cheets_CTS_R.internal.x86.CtsAndroid',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAndroidAppTestCases', '--include-filter', 'CtsAndroidAppTestCases[instant]', '--include-filter', 'CtsAndroidAppTestCases[secondary_user]', '--include-filter', 'CtsAndroidTestBase28ApiSignatureTestCases', '--include-filter', 'CtsAndroidTestBase28ApiSignatureTestCases[instant]', '--include-filter', 'CtsAndroidTestBase28ApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsAndroidTestBaseCurrentApiSignatureTestCases', '--include-filter', 'CtsAndroidTestBaseCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsAndroidTestBaseCurrentApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsAndroidTestMockCurrentApiSignatureTestCases', '--include-filter', 'CtsAndroidTestMockCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsAndroidTestMockCurrentApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsAndroidTestRunnerCurrentApiSignatureTestCases', '--include-filter', 'CtsAndroidTestRunnerCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsAndroidTestRunnerCurrentApiSignatureTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAndroid',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=28800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAngleIntegrationHost b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAngleIntegrationHost
new file mode 100644
index 0000000..96cfb22
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAngleIntegrationHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsAngleIntegrationHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAngleIntegrationHostTestCases, CtsAngleIntegrationHostTestCases[instant], CtsAngleIntegrationHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAngleIntegrationHost',
+        test_name='cheets_CTS_R.internal.x86.CtsAngleIntegrationHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAngleIntegrationHostTestCases', '--include-filter', 'CtsAngleIntegrationHostTestCases[instant]', '--include-filter', 'CtsAngleIntegrationHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAngleIntegrationHost',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAnimation b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAnimation
new file mode 100644
index 0000000..47cce2e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAnimation
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsAnimation'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAnimationTestCases, CtsAnimationTestCases[instant], CtsAnimationTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAnimation',
+        test_name='cheets_CTS_R.internal.x86.CtsAnimation',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAnimationTestCases', '--include-filter', 'CtsAnimationTestCases[instant]', '--include-filter', 'CtsAnimationTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAnimation',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsApacheHttpLegacy b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsApacheHttpLegacy
new file mode 100644
index 0000000..2950898
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsApacheHttpLegacy
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsApacheHttpLegacy'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsApacheHttpLegacy27ApiSignatureTestCases, CtsApacheHttpLegacy27ApiSignatureTestCases[instant], CtsApacheHttpLegacy27ApiSignatureTestCases[secondary_user], CtsApacheHttpLegacyCurrentApiSignatureTestCases, CtsApacheHttpLegacyCurrentApiSignatureTestCases[instant], CtsApacheHttpLegacyCurrentApiSignatureTestCases[secondary_user], CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases, CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases[instant], CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsApacheHttpLegacy',
+        test_name='cheets_CTS_R.internal.x86.CtsApacheHttpLegacy',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsApacheHttpLegacy27ApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacy27ApiSignatureTestCases[instant]', '--include-filter', 'CtsApacheHttpLegacy27ApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsApacheHttpLegacyCurrentApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacyCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsApacheHttpLegacyCurrentApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases[instant]', '--include-filter', 'CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsApacheHttpLegacy',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=18000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsApex b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsApex
new file mode 100644
index 0000000..23360b8
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsApex
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsApex'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsApexTestCases, CtsApexTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsApex',
+        test_name='cheets_CTS_R.internal.x86.CtsApex',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsApexTestCases', '--include-filter', 'CtsApexTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsApex',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsApp b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsApp
new file mode 100644
index 0000000..52ba969
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsApp
@@ -0,0 +1,50 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+from autotest_lib.server import utils as server_utils
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsApp'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAppBindingHostTestCases, CtsAppBindingHostTestCases[secondary_user], CtsAppCompatHostTestCases, CtsAppCompatHostTestCases[instant], CtsAppCompatHostTestCases[secondary_user], CtsAppComponentFactoryTestCases, CtsAppComponentFactoryTestCases[instant], CtsAppComponentFactoryTestCases[secondary_user], CtsAppEnumerationTestCases, CtsAppEnumerationTestCases[secondary_user], CtsAppExitTestCases, CtsAppExitTestCases[instant], CtsAppExitTestCases[secondary_user], CtsAppIntegrityDeviceTestCases, CtsAppOpsTestCases, CtsAppOpsTestCases[instant], CtsAppOpsTestCases[secondary_user], CtsAppPredictionServiceTestCases, CtsAppPredictionServiceTestCases[secondary_user], CtsAppSecurityHostTestCases, CtsAppSecurityHostTestCases[secondary_user], CtsAppTestCases, CtsAppTestCases[instant], CtsAppTestCases[secondary_user], CtsAppUsageHostTestCases, CtsAppUsageHostTestCases[instant], CtsAppUsageHostTestCases[secondary_user], CtsAppWidgetTestCases, CtsAppWidgetTestCases[instant], CtsAppWidgetTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+# For local debugging, if your test setup doesn't have servo, REMOVE these
+# two lines.
+args_dict = server_utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run_TS(machine):
+    # REMOVE 'servo_args=servo_args' arg for local debugging if your test
+    # setup doesn't have servo.
+    try:
+        host_list = [hosts.create_host(machine, servo_args=servo_args)]
+    except:
+        # Just ignore any servo setup flakiness.
+        host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        enable_default_apps=True,
+        tag='internal.x86.CtsApp',
+        test_name='cheets_CTS_R.internal.x86.CtsApp',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAppBindingHostTestCases', '--include-filter', 'CtsAppBindingHostTestCases[secondary_user]', '--include-filter', 'CtsAppCompatHostTestCases', '--include-filter', 'CtsAppCompatHostTestCases[instant]', '--include-filter', 'CtsAppCompatHostTestCases[secondary_user]', '--include-filter', 'CtsAppComponentFactoryTestCases', '--include-filter', 'CtsAppComponentFactoryTestCases[instant]', '--include-filter', 'CtsAppComponentFactoryTestCases[secondary_user]', '--include-filter', 'CtsAppEnumerationTestCases', '--include-filter', 'CtsAppEnumerationTestCases[secondary_user]', '--include-filter', 'CtsAppExitTestCases', '--include-filter', 'CtsAppExitTestCases[instant]', '--include-filter', 'CtsAppExitTestCases[secondary_user]', '--include-filter', 'CtsAppIntegrityDeviceTestCases', '--include-filter', 'CtsAppOpsTestCases', '--include-filter', 'CtsAppOpsTestCases[instant]', '--include-filter', 'CtsAppOpsTestCases[secondary_user]', '--include-filter', 'CtsAppPredictionServiceTestCases', '--include-filter', 'CtsAppPredictionServiceTestCases[secondary_user]', '--include-filter', 'CtsAppSecurityHostTestCases', '--include-filter', 'CtsAppSecurityHostTestCases[secondary_user]', '--include-filter', 'CtsAppTestCases', '--include-filter', 'CtsAppTestCases[instant]', '--include-filter', 'CtsAppTestCases[secondary_user]', '--include-filter', 'CtsAppUsageHostTestCases', '--include-filter', 'CtsAppUsageHostTestCases[instant]', '--include-filter', 'CtsAppUsageHostTestCases[secondary_user]', '--include-filter', 'CtsAppWidgetTestCases', '--include-filter', 'CtsAppWidgetTestCases[instant]', '--include-filter', 'CtsAppWidgetTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsApp',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        use_jdk9=True,
+        hard_reboot_on_failure=True,
+        timeout=61200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAppTestCases.feature.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAppTestCases.feature.ctshardware
new file mode 100644
index 0000000..bb79d6c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAppTestCases.feature.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsAppTestCases.feature.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAppTestCases.feature of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAppTestCases.feature.ctshardware',
+        test_name='cheets_CTS_R.internal.x86.CtsAppTestCases.feature.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAppTestCases android.app.cts.SystemFeaturesTest', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAppTestCases.feature',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAslrMalloc b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAslrMalloc
new file mode 100644
index 0000000..d13b6ab
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAslrMalloc
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsAslrMalloc'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAslrMallocTestCases, CtsAslrMallocTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAslrMalloc',
+        test_name='cheets_CTS_R.internal.x86.CtsAslrMalloc',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAslrMallocTestCases', '--include-filter', 'CtsAslrMallocTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAslrMalloc',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAssist b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAssist
new file mode 100644
index 0000000..de9547e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAssist
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsAssist'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAssistTestCases, CtsAssistTestCases[instant], CtsAssistTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAssist',
+        test_name='cheets_CTS_R.internal.x86.CtsAssist',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAssistTestCases', '--include-filter', 'CtsAssistTestCases[instant]', '--include-filter', 'CtsAssistTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAssist',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAtomicInstall b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAtomicInstall
new file mode 100644
index 0000000..2648f89
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAtomicInstall
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsAtomicInstall'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAtomicInstallTestCases, CtsAtomicInstallTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAtomicInstall',
+        test_name='cheets_CTS_R.internal.x86.CtsAtomicInstall',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAtomicInstallTestCases', '--include-filter', 'CtsAtomicInstallTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAtomicInstall',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAtraceHost b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAtraceHost
new file mode 100644
index 0000000..8e08047
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAtraceHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsAtraceHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAtraceHostTestCases, CtsAtraceHostTestCases[instant], CtsAtraceHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAtraceHost',
+        test_name='cheets_CTS_R.internal.x86.CtsAtraceHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAtraceHostTestCases', '--include-filter', 'CtsAtraceHostTestCases[instant]', '--include-filter', 'CtsAtraceHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAtraceHost',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAttentionServiceDevice b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAttentionServiceDevice
new file mode 100644
index 0000000..167a0be
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAttentionServiceDevice
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsAttentionServiceDevice'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAttentionServiceDeviceTestCases, CtsAttentionServiceDeviceTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAttentionServiceDevice',
+        test_name='cheets_CTS_R.internal.x86.CtsAttentionServiceDevice',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAttentionServiceDeviceTestCases', '--include-filter', 'CtsAttentionServiceDeviceTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAttentionServiceDevice',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAutoFillService b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAutoFillService
new file mode 100644
index 0000000..ad798e6
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsAutoFillService
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsAutoFillService'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAutoFillServiceTestCases, CtsAutoFillServiceTestCases[instant], CtsAutoFillServiceTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsAutoFillService',
+        test_name='cheets_CTS_R.internal.x86.CtsAutoFillService',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAutoFillServiceTestCases', '--include-filter', 'CtsAutoFillServiceTestCases[instant]', '--include-filter', 'CtsAutoFillServiceTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAutoFillService',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=14400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsBackgroundRestrictions b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsBackgroundRestrictions
new file mode 100644
index 0000000..8ffd036
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsBackgroundRestrictions
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsBackgroundRestrictions'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsBackgroundRestrictionsTestCases, CtsBackgroundRestrictionsTestCases[instant], CtsBackgroundRestrictionsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsBackgroundRestrictions',
+        test_name='cheets_CTS_R.internal.x86.CtsBackgroundRestrictions',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBackgroundRestrictionsTestCases', '--include-filter', 'CtsBackgroundRestrictionsTestCases[instant]', '--include-filter', 'CtsBackgroundRestrictionsTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsBackgroundRestrictions',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsBackup b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsBackup
new file mode 100644
index 0000000..c80bb01
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsBackup
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsBackup'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsBackupHostTestCases, CtsBackupTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsBackup',
+        test_name='cheets_CTS_R.internal.x86.CtsBackup',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBackupHostTestCases', '--include-filter', 'CtsBackupTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsBackup',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsBatterySaving b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsBatterySaving
new file mode 100644
index 0000000..8aaa63d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsBatterySaving
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsBatterySaving'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsBatterySavingTestCases, CtsBatterySavingTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsBatterySaving',
+        test_name='cheets_CTS_R.internal.x86.CtsBatterySaving',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBatterySavingTestCases', '--include-filter', 'CtsBatterySavingTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsBatterySaving',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsBionic b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsBionic
new file mode 100644
index 0000000..c8be494
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsBionic
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsBionic'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsBionicAppTestCases, CtsBionicAppTestCases[instant], CtsBionicAppTestCases[secondary_user], CtsBionicTestCases, CtsBionicTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsBionic',
+        test_name='cheets_CTS_R.internal.x86.CtsBionic',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBionicAppTestCases', '--include-filter', 'CtsBionicAppTestCases[instant]', '--include-filter', 'CtsBionicAppTestCases[secondary_user]', '--include-filter', 'CtsBionicTestCases', '--include-filter', 'CtsBionicTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsBionic',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsBlobStore b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsBlobStore
new file mode 100644
index 0000000..6ce7d11
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsBlobStore
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsBlobStore'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsBlobStoreHostTestCases, CtsBlobStoreHostTestCases[secondary_user], CtsBlobStoreHostTestHelper, CtsBlobStoreHostTestHelper[secondary_user], CtsBlobStoreTestCases, CtsBlobStoreTestCases[secondary_user], CtsBlobStoreTestHelper, CtsBlobStoreTestHelperDiffSig, CtsBlobStoreTestHelperDiffSig2, CtsBlobStoreTestHelperDiffSig2[secondary_user], CtsBlobStoreTestHelperDiffSig[secondary_user], CtsBlobStoreTestHelper[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsBlobStore',
+        test_name='cheets_CTS_R.internal.x86.CtsBlobStore',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBlobStoreHostTestCases', '--include-filter', 'CtsBlobStoreHostTestCases[secondary_user]', '--include-filter', 'CtsBlobStoreHostTestHelper', '--include-filter', 'CtsBlobStoreHostTestHelper[secondary_user]', '--include-filter', 'CtsBlobStoreTestCases', '--include-filter', 'CtsBlobStoreTestCases[secondary_user]', '--include-filter', 'CtsBlobStoreTestHelper', '--include-filter', 'CtsBlobStoreTestHelperDiffSig', '--include-filter', 'CtsBlobStoreTestHelperDiffSig2', '--include-filter', 'CtsBlobStoreTestHelperDiffSig2[secondary_user]', '--include-filter', 'CtsBlobStoreTestHelperDiffSig[secondary_user]', '--include-filter', 'CtsBlobStoreTestHelper[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsBlobStore',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=23400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsBluetooth b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsBluetooth
new file mode 100644
index 0000000..a1b3c61
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsBluetooth
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsBluetooth'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsBluetoothTestCases, CtsBluetoothTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsBluetooth',
+        test_name='cheets_CTS_R.internal.x86.CtsBluetooth',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBluetoothTestCases', '--include-filter', 'CtsBluetoothTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsBluetooth',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsBootStats b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsBootStats
new file mode 100644
index 0000000..205ca1a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsBootStats
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsBootStats'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsBootStatsTestCases, CtsBootStatsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsBootStats',
+        test_name='cheets_CTS_R.internal.x86.CtsBootStats',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsBootStatsTestCases', '--include-filter', 'CtsBootStatsTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsBootStats',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCalendarProvider b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCalendarProvider
new file mode 100644
index 0000000..01de389
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCalendarProvider
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsCalendarProvider'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCalendarProviderTestCases, CtsCalendarProviderTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsCalendarProvider',
+        test_name='cheets_CTS_R.internal.x86.CtsCalendarProvider',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCalendarProviderTestCases', '--include-filter', 'CtsCalendarProviderTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCalendarProvider',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCalendarcommon2 b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCalendarcommon2
new file mode 100644
index 0000000..7b359ad
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCalendarcommon2
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsCalendarcommon2'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCalendarcommon2TestCases, CtsCalendarcommon2TestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsCalendarcommon2',
+        test_name='cheets_CTS_R.internal.x86.CtsCalendarcommon2',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCalendarcommon2TestCases', '--include-filter', 'CtsCalendarcommon2TestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCalendarcommon2',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCamera b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCamera
new file mode 100644
index 0000000..432f642
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCamera
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsCamera'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCameraApi25TestCases, CtsCameraApi25TestCases[secondary_user], CtsCameraTestCases, CtsCameraTestCases[instant], CtsCameraTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsCamera',
+        test_name='cheets_CTS_R.internal.x86.CtsCamera',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCameraApi25TestCases', '--include-filter', 'CtsCameraApi25TestCases[secondary_user]', '--include-filter', 'CtsCameraTestCases', '--include-filter', 'CtsCameraTestCases[instant]', '--include-filter', 'CtsCameraTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCamera',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=14400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCameraTestCases.NativeCameraDeviceTest b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCameraTestCases.NativeCameraDeviceTest
new file mode 100644
index 0000000..a3f643d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCameraTestCases.NativeCameraDeviceTest
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsCameraTestCases.NativeCameraDeviceTest'
+ATTRIBUTES = 'suite:arc-cts-qual, suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCameraTestCases.NativeCameraDeviceTest of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.CtsCameraTestCases.NativeCameraDeviceTest',
+        test_name='cheets_CTS_R.internal.x86.CtsCameraTestCases.NativeCameraDeviceTest',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCameraTestCases android.hardware.camera2.cts.NativeCameraDeviceTest', '--include-filter', 'CtsCameraTestCases[instant] android.hardware.camera2.cts.NativeCameraDeviceTest', '--include-filter', 'CtsCameraTestCases android.hardware.camera2.cts.RecordingTest#testVideoPreviewSurfaceSharing[1]', '--include-filter', 'CtsCameraTestCases[instant] android.hardware.camera2.cts.RecordingTest#testVideoPreviewSurfaceSharing[1]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCameraTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        precondition_commands=['sleep 20', 'android-sh -c \'am start -a android.intent.action.VIEW -d https://webglsamples.org/electricflower/electricflower.html\''],
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCameraTestCases.NativeCameraDeviceTest.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCameraTestCases.NativeCameraDeviceTest.ctshardware
new file mode 100644
index 0000000..9308f46
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCameraTestCases.NativeCameraDeviceTest.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsCameraTestCases.NativeCameraDeviceTest.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCameraTestCases.NativeCameraDeviceTest of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsCameraTestCases.NativeCameraDeviceTest.ctshardware',
+        test_name='cheets_CTS_R.internal.x86.CtsCameraTestCases.NativeCameraDeviceTest.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCameraTestCases android.hardware.camera2.cts.NativeCameraDeviceTest', '--include-filter', 'CtsCameraTestCases android.hardware.camera2.cts.RecordingTest#testVideoPreviewSurfaceSharing[1]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCameraTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        precondition_commands=['sleep 20', 'android-sh -c \'am start -a android.intent.action.VIEW -d https://webglsamples.org/electricflower/electricflower.html\''],
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCameraTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCameraTestCases.ctshardware
new file mode 100644
index 0000000..958089d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCameraTestCases.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsCameraTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsCameraTestCases.ctshardware',
+        test_name='cheets_CTS_R.internal.x86.CtsCameraTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCameraTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCameraTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCameraTestCases.noled.camerabox.back b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCameraTestCases.noled.camerabox.back
new file mode 100644
index 0000000..98f0a84
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCameraTestCases.noled.camerabox.back
@@ -0,0 +1,38 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsCameraTestCases.noled.camerabox.back'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-camera, suite:arc-cts-r, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw'
+DEPENDENCIES = 'arc, cts_abi_x86, camerabox_light:noled, camerabox_facing:back'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        camera_facing='back',
+        cmdline_args=args,
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsCameraTestCases.noled.camerabox.back',
+        test_name='cheets_CTS_R.internal.x86.CtsCameraTestCases.noled.camerabox.back',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCameraTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCameraTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        retry_manual_tests=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCameraTestCases.noled.camerabox.front b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCameraTestCases.noled.camerabox.front
new file mode 100644
index 0000000..d764f8d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCameraTestCases.noled.camerabox.front
@@ -0,0 +1,38 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsCameraTestCases.noled.camerabox.front'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-camera, suite:arc-cts-r, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw'
+DEPENDENCIES = 'arc, cts_abi_x86, camerabox_light:noled, camerabox_facing:front'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        camera_facing='front',
+        cmdline_args=args,
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsCameraTestCases.noled.camerabox.front',
+        test_name='cheets_CTS_R.internal.x86.CtsCameraTestCases.noled.camerabox.front',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCameraTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCameraTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        retry_manual_tests=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCar b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCar
new file mode 100644
index 0000000..7abcac7
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCar
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsCar'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCarHostTestCases, CtsCarTestCases, CtsCarTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsCar',
+        test_name='cheets_CTS_R.internal.x86.CtsCar',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCarHostTestCases', '--include-filter', 'CtsCarTestCases', '--include-filter', 'CtsCarTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCar',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCarrierApiTestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCarrierApiTestCases
new file mode 100644
index 0000000..c10d998
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCarrierApiTestCases
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsCarrierApiTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCarrierApiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsCarrierApiTestCases',
+        test_name='cheets_CTS_R.internal.x86.CtsCarrierApiTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsCarrierApiTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCarrierApiTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsClassLoaderFactory b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsClassLoaderFactory
new file mode 100644
index 0000000..992912f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsClassLoaderFactory
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsClassLoaderFactory'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases, CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases[instant], CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases[secondary_user], CtsClassLoaderFactoryPathClassLoaderTestCases, CtsClassLoaderFactoryPathClassLoaderTestCases[instant], CtsClassLoaderFactoryPathClassLoaderTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsClassLoaderFactory',
+        test_name='cheets_CTS_R.internal.x86.CtsClassLoaderFactory',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases', '--include-filter', 'CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases[instant]', '--include-filter', 'CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases[secondary_user]', '--include-filter', 'CtsClassLoaderFactoryPathClassLoaderTestCases', '--include-filter', 'CtsClassLoaderFactoryPathClassLoaderTestCases[instant]', '--include-filter', 'CtsClassLoaderFactoryPathClassLoaderTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsClassLoaderFactory',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsClassloaderSplitsHost b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsClassloaderSplitsHost
new file mode 100644
index 0000000..21dcaf7
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsClassloaderSplitsHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsClassloaderSplitsHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsClassloaderSplitsHostTestCases, CtsClassloaderSplitsHostTestCases[instant], CtsClassloaderSplitsHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsClassloaderSplitsHost',
+        test_name='cheets_CTS_R.internal.x86.CtsClassloaderSplitsHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsClassloaderSplitsHostTestCases', '--include-filter', 'CtsClassloaderSplitsHostTestCases[instant]', '--include-filter', 'CtsClassloaderSplitsHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsClassloaderSplitsHost',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCodePathHost b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCodePathHost
new file mode 100644
index 0000000..8615368
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCodePathHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsCodePathHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCodePathHostTestCases, CtsCodePathHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsCodePathHost',
+        test_name='cheets_CTS_R.internal.x86.CtsCodePathHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCodePathHostTestCases', '--include-filter', 'CtsCodePathHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCodePathHost',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsColorMode b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsColorMode
new file mode 100644
index 0000000..f43b990
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsColorMode
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsColorMode'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsColorModeTestCases, CtsColorModeTestCases[instant], CtsColorModeTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsColorMode',
+        test_name='cheets_CTS_R.internal.x86.CtsColorMode',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsColorModeTestCases', '--include-filter', 'CtsColorModeTestCases[instant]', '--include-filter', 'CtsColorModeTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsColorMode',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCompilation b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCompilation
new file mode 100644
index 0000000..09e906e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCompilation
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsCompilation'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCompilationTestCases, CtsCompilationTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsCompilation',
+        test_name='cheets_CTS_R.internal.x86.CtsCompilation',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCompilationTestCases', '--include-filter', 'CtsCompilationTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCompilation',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsContactsProvider b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsContactsProvider
new file mode 100644
index 0000000..628b5fc
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsContactsProvider
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsContactsProvider'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsContactsProviderTestCases, CtsContactsProviderTestCases[secondary_user], CtsContactsProviderWipe, CtsContactsProviderWipe[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsContactsProvider',
+        test_name='cheets_CTS_R.internal.x86.CtsContactsProvider',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsContactsProviderTestCases', '--include-filter', 'CtsContactsProviderTestCases[secondary_user]', '--include-filter', 'CtsContactsProviderWipe', '--include-filter', 'CtsContactsProviderWipe[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsContactsProvider',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsContent b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsContent
new file mode 100644
index 0000000..0332183
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsContent
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsContent'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsContentCaptureServiceTestCases, CtsContentCaptureServiceTestCases[instant], CtsContentCaptureServiceTestCases[secondary_user], CtsContentSuggestionsTestCases, CtsContentSuggestionsTestCases[secondary_user], CtsContentTestCases, CtsContentTestCases[instant], CtsContentTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        enable_default_apps=True,
+        tag='internal.x86.CtsContent',
+        test_name='cheets_CTS_R.internal.x86.CtsContent',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsContentCaptureServiceTestCases', '--include-filter', 'CtsContentCaptureServiceTestCases[instant]', '--include-filter', 'CtsContentCaptureServiceTestCases[secondary_user]', '--include-filter', 'CtsContentSuggestionsTestCases', '--include-filter', 'CtsContentSuggestionsTestCases[secondary_user]', '--include-filter', 'CtsContentTestCases', '--include-filter', 'CtsContentTestCases[instant]', '--include-filter', 'CtsContentTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsContent',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=16200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsControlsDevice b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsControlsDevice
new file mode 100644
index 0000000..a0ae53b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsControlsDevice
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsControlsDevice'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsControlsDeviceTestCases, CtsControlsDeviceTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsControlsDevice',
+        test_name='cheets_CTS_R.internal.x86.CtsControlsDevice',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsControlsDeviceTestCases', '--include-filter', 'CtsControlsDeviceTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsControlsDevice',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCppTools b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCppTools
new file mode 100644
index 0000000..c9470c7
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCppTools
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsCppTools'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCppToolsTestCases, CtsCppToolsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsCppTools',
+        test_name='cheets_CTS_R.internal.x86.CtsCppTools',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCppToolsTestCases', '--include-filter', 'CtsCppToolsTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCppTools',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCurrentApiSignature b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCurrentApiSignature
new file mode 100644
index 0000000..49afcd3
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsCurrentApiSignature
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsCurrentApiSignature'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCurrentApiSignatureTestCases, CtsCurrentApiSignatureTestCases[instant], CtsCurrentApiSignatureTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsCurrentApiSignature',
+        test_name='cheets_CTS_R.internal.x86.CtsCurrentApiSignature',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCurrentApiSignatureTestCases', '--include-filter', 'CtsCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsCurrentApiSignatureTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCurrentApiSignature',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDatabase b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDatabase
new file mode 100644
index 0000000..c838cfa
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDatabase
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsDatabase'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDatabaseTestCases, CtsDatabaseTestCases[instant], CtsDatabaseTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDatabase',
+        test_name='cheets_CTS_R.internal.x86.CtsDatabase',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDatabaseTestCases', '--include-filter', 'CtsDatabaseTestCases[instant]', '--include-filter', 'CtsDatabaseTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDatabase',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDeqp.32 b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDeqp.32
new file mode 100644
index 0000000..da79a40
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDeqp.32
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsDeqp.32'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases, CtsDeqpTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDeqp.32',
+        test_name='cheets_CTS_R.internal.x86.CtsDeqp.32',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--abi', 'x86'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDeqp',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=111600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDeqp.64 b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDeqp.64
new file mode 100644
index 0000000..f8ee9e2
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDeqp.64
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsDeqp.64'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases, CtsDeqpTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDeqp.64',
+        test_name='cheets_CTS_R.internal.x86.CtsDeqp.64',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--include-filter', 'CtsDeqpTestCases[secondary_user]', '--abi', 'x86_64'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDeqp',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=111600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDeqpTestCases.dEQP-EGL b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDeqpTestCases.dEQP-EGL
new file mode 100644
index 0000000..40cf818
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDeqpTestCases.dEQP-EGL
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsDeqpTestCases.dEQP-EGL'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 0
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases.dEQP-EGL of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.CtsDeqpTestCases.dEQP-EGL',
+        test_name='cheets_CTS_R.internal.x86.CtsDeqpTestCases.dEQP-EGL',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-EGL.*'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDeqpTestCases.dEQP-GLES2 b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDeqpTestCases.dEQP-GLES2
new file mode 100644
index 0000000..f4508cf
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDeqpTestCases.dEQP-GLES2
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsDeqpTestCases.dEQP-GLES2'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 0
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases.dEQP-GLES2 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.CtsDeqpTestCases.dEQP-GLES2',
+        test_name='cheets_CTS_R.internal.x86.CtsDeqpTestCases.dEQP-GLES2',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-GLES2.*'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDeqpTestCases.dEQP-GLES3 b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDeqpTestCases.dEQP-GLES3
new file mode 100644
index 0000000..0b98724
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDeqpTestCases.dEQP-GLES3
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsDeqpTestCases.dEQP-GLES3'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 0
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases.dEQP-GLES3 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.CtsDeqpTestCases.dEQP-GLES3',
+        test_name='cheets_CTS_R.internal.x86.CtsDeqpTestCases.dEQP-GLES3',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-GLES3.*'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware
new file mode 100644
index 0000000..869e4e3
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware',
+        test_name='cheets_CTS_R.internal.x86.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases dEQP-GLES3.functional.prerequisite#*', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDeqpTestCases.dEQP-GLES31 b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDeqpTestCases.dEQP-GLES31
new file mode 100644
index 0000000..ca06367
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDeqpTestCases.dEQP-GLES31
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsDeqpTestCases.dEQP-GLES31'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 0
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases.dEQP-GLES31 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.CtsDeqpTestCases.dEQP-GLES31',
+        test_name='cheets_CTS_R.internal.x86.CtsDeqpTestCases.dEQP-GLES31',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-GLES31.*'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDeqpTestCases.dEQP-VK.32 b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDeqpTestCases.dEQP-VK.32
new file mode 100644
index 0000000..8bce81d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDeqpTestCases.dEQP-VK.32
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsDeqpTestCases.dEQP-VK.32'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 0
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases.dEQP-VK of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.CtsDeqpTestCases.dEQP-VK.32',
+        test_name='cheets_CTS_R.internal.x86.CtsDeqpTestCases.dEQP-VK.32',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-VK.*', '--abi', 'x86'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDeqpTestCases.dEQP-VK.64 b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDeqpTestCases.dEQP-VK.64
new file mode 100644
index 0000000..01e5753
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDeqpTestCases.dEQP-VK.64
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsDeqpTestCases.dEQP-VK.64'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 0
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases.dEQP-VK of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.CtsDeqpTestCases.dEQP-VK.64',
+        test_name='cheets_CTS_R.internal.x86.CtsDeqpTestCases.dEQP-VK.64',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--module', 'CtsDeqpTestCases', '--test', 'dEQP-VK.*', '--abi', 'x86_64'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDevice b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDevice
new file mode 100644
index 0000000..7428257
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDevice
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsDevice'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeviceConfigTestCases, CtsDeviceConfigTestCases[instant], CtsDeviceConfigTestCases[secondary_user], CtsDeviceIdleHostTestCases, CtsDeviceIdleHostTestCases[secondary_user], CtsDevicePolicyManagerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDevice',
+        test_name='cheets_CTS_R.internal.x86.CtsDevice',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeviceConfigTestCases', '--include-filter', 'CtsDeviceConfigTestCases[instant]', '--include-filter', 'CtsDeviceConfigTestCases[secondary_user]', '--include-filter', 'CtsDeviceIdleHostTestCases', '--include-filter', 'CtsDeviceIdleHostTestCases[secondary_user]', '--include-filter', 'CtsDevicePolicyManagerTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDevice',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDexMetadataHost b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDexMetadataHost
new file mode 100644
index 0000000..544698d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDexMetadataHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsDexMetadataHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDexMetadataHostTestCases, CtsDexMetadataHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDexMetadataHost',
+        test_name='cheets_CTS_R.internal.x86.CtsDexMetadataHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDexMetadataHostTestCases', '--include-filter', 'CtsDexMetadataHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDexMetadataHost',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDisplay b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDisplay
new file mode 100644
index 0000000..44f5740
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDisplay
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsDisplay'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDisplayTestCases, CtsDisplayTestCases[instant], CtsDisplayTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDisplay',
+        test_name='cheets_CTS_R.internal.x86.CtsDisplay',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDisplayTestCases', '--include-filter', 'CtsDisplayTestCases[instant]', '--include-filter', 'CtsDisplayTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDisplay',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDownloadManager b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDownloadManager
new file mode 100644
index 0000000..6711fdd
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDownloadManager
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsDownloadManager'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDownloadManagerApi28, CtsDownloadManagerApi28[secondary_user], CtsDownloadManagerInstaller, CtsDownloadManagerInstaller[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDownloadManager',
+        test_name='cheets_CTS_R.internal.x86.CtsDownloadManager',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDownloadManagerApi28', '--include-filter', 'CtsDownloadManagerApi28[secondary_user]', '--include-filter', 'CtsDownloadManagerInstaller', '--include-filter', 'CtsDownloadManagerInstaller[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDownloadManager',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDpi b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDpi
new file mode 100644
index 0000000..ad80ddb
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDpi
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsDpi'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDpiTestCases, CtsDpiTestCases2, CtsDpiTestCases2[secondary_user], CtsDpiTestCases[instant], CtsDpiTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDpi',
+        test_name='cheets_CTS_R.internal.x86.CtsDpi',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDpiTestCases', '--include-filter', 'CtsDpiTestCases2', '--include-filter', 'CtsDpiTestCases2[secondary_user]', '--include-filter', 'CtsDpiTestCases[instant]', '--include-filter', 'CtsDpiTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDpi',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDreams b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDreams
new file mode 100644
index 0000000..31adc36
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDreams
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsDreams'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDreamsTestCases, CtsDreamsTestCases[instant], CtsDreamsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDreams',
+        test_name='cheets_CTS_R.internal.x86.CtsDreams',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDreamsTestCases', '--include-filter', 'CtsDreamsTestCases[instant]', '--include-filter', 'CtsDreamsTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDreams',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDrm b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDrm
new file mode 100644
index 0000000..946583e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDrm
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsDrm'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDrmTestCases, CtsDrmTestCases[instant], CtsDrmTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDrm',
+        test_name='cheets_CTS_R.internal.x86.CtsDrm',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDrmTestCases', '--include-filter', 'CtsDrmTestCases[instant]', '--include-filter', 'CtsDrmTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDrm',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDropBoxManagerTestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDropBoxManagerTestCases
new file mode 100644
index 0000000..60cfa28
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDropBoxManagerTestCases
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsDropBoxManagerTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDropBoxManagerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDropBoxManagerTestCases',
+        test_name='cheets_CTS_R.internal.x86.CtsDropBoxManagerTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDropBoxManagerTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDropBoxManagerTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDumpsysHost b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDumpsysHost
new file mode 100644
index 0000000..059f35b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDumpsysHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsDumpsysHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDumpsysHostTestCases, CtsDumpsysHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDumpsysHost',
+        test_name='cheets_CTS_R.internal.x86.CtsDumpsysHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDumpsysHostTestCases', '--include-filter', 'CtsDumpsysHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDumpsysHost',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDynamic b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDynamic
new file mode 100644
index 0000000..d15b361
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsDynamic
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsDynamic'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDynamicLinkerTestCases, CtsDynamicLinkerTestCases[instant], CtsDynamicLinkerTestCases[secondary_user], CtsDynamicMimeHostTestCases, CtsDynamicMimeHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsDynamic',
+        test_name='cheets_CTS_R.internal.x86.CtsDynamic',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDynamicLinkerTestCases', '--include-filter', 'CtsDynamicLinkerTestCases[instant]', '--include-filter', 'CtsDynamicLinkerTestCases[secondary_user]', '--include-filter', 'CtsDynamicMimeHostTestCases', '--include-filter', 'CtsDynamicMimeHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDynamic',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsEdiHost b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsEdiHost
new file mode 100644
index 0000000..fae4621
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsEdiHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsEdiHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsEdiHostTestCases, CtsEdiHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsEdiHost',
+        test_name='cheets_CTS_R.internal.x86.CtsEdiHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsEdiHostTestCases', '--include-filter', 'CtsEdiHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsEdiHost',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsEffect b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsEffect
new file mode 100644
index 0000000..13a86c3
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsEffect
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsEffect'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsEffectTestCases, CtsEffectTestCases[instant], CtsEffectTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsEffect',
+        test_name='cheets_CTS_R.internal.x86.CtsEffect',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsEffectTestCases', '--include-filter', 'CtsEffectTestCases[instant]', '--include-filter', 'CtsEffectTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsEffect',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsExtendedMocking b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsExtendedMocking
new file mode 100644
index 0000000..7f0a3a4
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsExtendedMocking
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsExtendedMocking'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsExtendedMockingTestCases, CtsExtendedMockingTestCases[instant], CtsExtendedMockingTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsExtendedMocking',
+        test_name='cheets_CTS_R.internal.x86.CtsExtendedMocking',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsExtendedMockingTestCases', '--include-filter', 'CtsExtendedMockingTestCases[instant]', '--include-filter', 'CtsExtendedMockingTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsExtendedMocking',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsExternalService b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsExternalService
new file mode 100644
index 0000000..0531d16
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsExternalService
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsExternalService'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsExternalServiceTestCases, CtsExternalServiceTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsExternalService',
+        test_name='cheets_CTS_R.internal.x86.CtsExternalService',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsExternalServiceTestCases', '--include-filter', 'CtsExternalServiceTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsExternalService',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsExtractNativeLibsHost b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsExtractNativeLibsHost
new file mode 100644
index 0000000..2e8bc26
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsExtractNativeLibsHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsExtractNativeLibsHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsExtractNativeLibsHostTestCases, CtsExtractNativeLibsHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsExtractNativeLibsHost',
+        test_name='cheets_CTS_R.internal.x86.CtsExtractNativeLibsHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsExtractNativeLibsHostTestCases', '--include-filter', 'CtsExtractNativeLibsHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsExtractNativeLibsHost',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsFileSystem b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsFileSystem
new file mode 100644
index 0000000..7262c88
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsFileSystem
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsFileSystem'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsFileSystemTestCases, CtsFileSystemTestCases[instant], CtsFileSystemTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsFileSystem',
+        test_name='cheets_CTS_R.internal.x86.CtsFileSystem',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsFileSystemTestCases', '--include-filter', 'CtsFileSystemTestCases[instant]', '--include-filter', 'CtsFileSystemTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsFileSystem',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=16200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsFragment b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsFragment
new file mode 100644
index 0000000..1ae9624
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsFragment
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsFragment'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsFragmentTestCases, CtsFragmentTestCasesSdk26, CtsFragmentTestCasesSdk26[instant], CtsFragmentTestCasesSdk26[secondary_user], CtsFragmentTestCases[instant], CtsFragmentTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsFragment',
+        test_name='cheets_CTS_R.internal.x86.CtsFragment',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsFragmentTestCases', '--include-filter', 'CtsFragmentTestCasesSdk26', '--include-filter', 'CtsFragmentTestCasesSdk26[instant]', '--include-filter', 'CtsFragmentTestCasesSdk26[secondary_user]', '--include-filter', 'CtsFragmentTestCases[instant]', '--include-filter', 'CtsFragmentTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsFragment',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsFsMgr b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsFsMgr
new file mode 100644
index 0000000..94d8350
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsFsMgr
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsFsMgr'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsFsMgrTestCases, CtsFsMgrTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsFsMgr',
+        test_name='cheets_CTS_R.internal.x86.CtsFsMgr',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsFsMgrTestCases', '--include-filter', 'CtsFsMgrTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsFsMgr',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsGesture b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsGesture
new file mode 100644
index 0000000..eb1a12b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsGesture
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsGesture'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsGestureTestCases, CtsGestureTestCases[instant], CtsGestureTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsGesture',
+        test_name='cheets_CTS_R.internal.x86.CtsGesture',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsGestureTestCases', '--include-filter', 'CtsGestureTestCases[instant]', '--include-filter', 'CtsGestureTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsGesture',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsGpu b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsGpu
new file mode 100644
index 0000000..eacb9af
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsGpu
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsGpu'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsGpuProfilingDataTestCases, CtsGpuProfilingDataTestCases[secondary_user], CtsGpuToolsHostTestCases, CtsGpuToolsHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsGpu',
+        test_name='cheets_CTS_R.internal.x86.CtsGpu',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsGpuProfilingDataTestCases', '--include-filter', 'CtsGpuProfilingDataTestCases[secondary_user]', '--include-filter', 'CtsGpuToolsHostTestCases', '--include-filter', 'CtsGpuToolsHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsGpu',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsGraphics b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsGraphics
new file mode 100644
index 0000000..f990f7e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsGraphics
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsGraphics'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsGraphicsTestCases, CtsGraphicsTestCases[instant], CtsGraphicsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsGraphics',
+        test_name='cheets_CTS_R.internal.x86.CtsGraphics',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsGraphicsTestCases', '--include-filter', 'CtsGraphicsTestCases[instant]', '--include-filter', 'CtsGraphicsTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsGraphics',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsGwpAsan b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsGwpAsan
new file mode 100644
index 0000000..a3d5919
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsGwpAsan
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsGwpAsan'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsGwpAsanTestCases, CtsGwpAsanTestCases[instant], CtsGwpAsanTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsGwpAsan',
+        test_name='cheets_CTS_R.internal.x86.CtsGwpAsan',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsGwpAsanTestCases', '--include-filter', 'CtsGwpAsanTestCases[instant]', '--include-filter', 'CtsGwpAsanTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsGwpAsan',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsHardware b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsHardware
new file mode 100644
index 0000000..4802b03
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsHardware
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsHardware'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsHardwareTestCases, CtsHardwareTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=30,
+        tag='internal.x86.CtsHardware',
+        test_name='cheets_CTS_R.internal.x86.CtsHardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHardwareTestCases', '--include-filter', 'CtsHardwareTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsHardware',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsHarmfulAppWarningHost b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsHarmfulAppWarningHost
new file mode 100644
index 0000000..21a1e82
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsHarmfulAppWarningHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsHarmfulAppWarningHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsHarmfulAppWarningHostTestCases, CtsHarmfulAppWarningHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsHarmfulAppWarningHost',
+        test_name='cheets_CTS_R.internal.x86.CtsHarmfulAppWarningHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHarmfulAppWarningHostTestCases', '--include-filter', 'CtsHarmfulAppWarningHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsHarmfulAppWarningHost',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsHdmiCecHost b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsHdmiCecHost
new file mode 100644
index 0000000..99955dd
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsHdmiCecHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsHdmiCecHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsHdmiCecHostTestCases, CtsHdmiCecHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsHdmiCecHost',
+        test_name='cheets_CTS_R.internal.x86.CtsHdmiCecHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHdmiCecHostTestCases', '--include-filter', 'CtsHdmiCecHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsHdmiCecHost',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsHiddenApi b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsHiddenApi
new file mode 100644
index 0000000..6fbbdcc
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsHiddenApi
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsHiddenApi'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsHiddenApiBlacklistApi27TestCases, CtsHiddenApiBlacklistApi27TestCases[secondary_user], CtsHiddenApiBlacklistApi28TestCases, CtsHiddenApiBlacklistApi28TestCases[secondary_user], CtsHiddenApiBlacklistCurrentApiTestCases, CtsHiddenApiBlacklistCurrentApiTestCases[secondary_user], CtsHiddenApiBlacklistDebugClassTestCases, CtsHiddenApiBlacklistDebugClassTestCases[secondary_user], CtsHiddenApiBlacklistTestApiTestCases, CtsHiddenApiBlacklistTestApiTestCases[secondary_user], CtsHiddenApiKillswitchDebugClassTestCases, CtsHiddenApiKillswitchDebugClassTestCases[instant], CtsHiddenApiKillswitchDebugClassTestCases[secondary_user], CtsHiddenApiKillswitchWhitelistTestCases, CtsHiddenApiKillswitchWhitelistTestCases[instant], CtsHiddenApiKillswitchWhitelistTestCases[secondary_user], CtsHiddenApiKillswitchWildcardTestCases, CtsHiddenApiKillswitchWildcardTestCases[instant], CtsHiddenApiKillswitchWildcardTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsHiddenApi',
+        test_name='cheets_CTS_R.internal.x86.CtsHiddenApi',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHiddenApiBlacklistApi27TestCases', '--include-filter', 'CtsHiddenApiBlacklistApi27TestCases[secondary_user]', '--include-filter', 'CtsHiddenApiBlacklistApi28TestCases', '--include-filter', 'CtsHiddenApiBlacklistApi28TestCases[secondary_user]', '--include-filter', 'CtsHiddenApiBlacklistCurrentApiTestCases', '--include-filter', 'CtsHiddenApiBlacklistCurrentApiTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiBlacklistDebugClassTestCases', '--include-filter', 'CtsHiddenApiBlacklistDebugClassTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiBlacklistTestApiTestCases', '--include-filter', 'CtsHiddenApiBlacklistTestApiTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiKillswitchDebugClassTestCases', '--include-filter', 'CtsHiddenApiKillswitchDebugClassTestCases[instant]', '--include-filter', 'CtsHiddenApiKillswitchDebugClassTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiKillswitchWhitelistTestCases', '--include-filter', 'CtsHiddenApiKillswitchWhitelistTestCases[instant]', '--include-filter', 'CtsHiddenApiKillswitchWhitelistTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiKillswitchWildcardTestCases', '--include-filter', 'CtsHiddenApiKillswitchWildcardTestCases[instant]', '--include-filter', 'CtsHiddenApiKillswitchWildcardTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsHiddenApi',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=36000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsHostTzData b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsHostTzData
new file mode 100644
index 0000000..3bd1643
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsHostTzData
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsHostTzData'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsHostTzDataTests, CtsHostTzDataTests[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsHostTzData',
+        test_name='cheets_CTS_R.internal.x86.CtsHostTzData',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHostTzDataTests', '--include-filter', 'CtsHostTzDataTests[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsHostTzData',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsHostside b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsHostside
new file mode 100644
index 0000000..cd98143
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsHostside
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsHostside'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsHostsideNetworkTests, CtsHostsideNetworkTests[instant], CtsHostsideNetworkTests[secondary_user], CtsHostsideNumberBlockingTestCases, CtsHostsideNumberBlockingTestCases[secondary_user], CtsHostsideTvTests, CtsHostsideTvTests[secondary_user], CtsHostsideWebViewTests, CtsHostsideWebViewTests[instant], CtsHostsideWebViewTests[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsHostside',
+        test_name='cheets_CTS_R.internal.x86.CtsHostside',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsHostsideNetworkTests', '--include-filter', 'CtsHostsideNetworkTests[instant]', '--include-filter', 'CtsHostsideNetworkTests[secondary_user]', '--include-filter', 'CtsHostsideNumberBlockingTestCases', '--include-filter', 'CtsHostsideNumberBlockingTestCases[secondary_user]', '--include-filter', 'CtsHostsideTvTests', '--include-filter', 'CtsHostsideTvTests[secondary_user]', '--include-filter', 'CtsHostsideWebViewTests', '--include-filter', 'CtsHostsideWebViewTests[instant]', '--include-filter', 'CtsHostsideWebViewTests[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsHostside',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=19800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsIcu b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsIcu
new file mode 100644
index 0000000..8229b53
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsIcu
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsIcu'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsIcuTestCases, CtsIcuTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsIcu',
+        test_name='cheets_CTS_R.internal.x86.CtsIcu',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsIcuTestCases', '--include-filter', 'CtsIcuTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsIcu',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsIdentity b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsIdentity
new file mode 100644
index 0000000..c771c41
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsIdentity
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsIdentity'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsIdentityTestCases, CtsIdentityTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsIdentity',
+        test_name='cheets_CTS_R.internal.x86.CtsIdentity',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsIdentityTestCases', '--include-filter', 'CtsIdentityTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsIdentity',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsIke b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsIke
new file mode 100644
index 0000000..715c894
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsIke
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsIke'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsIkeTestCases, CtsIkeTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsIke',
+        test_name='cheets_CTS_R.internal.x86.CtsIke',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsIkeTestCases', '--include-filter', 'CtsIkeTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsIke',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsIncidentHost b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsIncidentHost
new file mode 100644
index 0000000..d041b35
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsIncidentHost
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsIncidentHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsIncidentHostTestCases, CtsIncidentHostTestCases[instant] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=10,
+        tag='internal.x86.CtsIncidentHost',
+        test_name='cheets_CTS_R.internal.x86.CtsIncidentHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsIncidentHostTestCases', '--include-filter', 'CtsIncidentHostTestCases[instant]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsIncidentHost',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsIncrementalInstallHost b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsIncrementalInstallHost
new file mode 100644
index 0000000..1217666
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsIncrementalInstallHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsIncrementalInstallHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsIncrementalInstallHostTestCases, CtsIncrementalInstallHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsIncrementalInstallHost',
+        test_name='cheets_CTS_R.internal.x86.CtsIncrementalInstallHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsIncrementalInstallHostTestCases', '--include-filter', 'CtsIncrementalInstallHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsIncrementalInstallHost',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsInit b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsInit
new file mode 100644
index 0000000..3960e12
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsInit
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsInit'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsInitTestCases, CtsInitTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsInit',
+        test_name='cheets_CTS_R.internal.x86.CtsInit',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsInitTestCases', '--include-filter', 'CtsInitTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsInit',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsInlineMocking b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsInlineMocking
new file mode 100644
index 0000000..0d5c9d0
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsInlineMocking
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsInlineMocking'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsInlineMockingTestCases, CtsInlineMockingTestCases[instant], CtsInlineMockingTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsInlineMocking',
+        test_name='cheets_CTS_R.internal.x86.CtsInlineMocking',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsInlineMockingTestCases', '--include-filter', 'CtsInlineMockingTestCases[instant]', '--include-filter', 'CtsInlineMockingTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsInlineMocking',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsInputMethod b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsInputMethod
new file mode 100644
index 0000000..338d0fe
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsInputMethod
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsInputMethod'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsInputMethodServiceHostTestCases, CtsInputMethodServiceHostTestCases[instant], CtsInputMethodServiceHostTestCases[secondary_user], CtsInputMethodTestCases, CtsInputMethodTestCases[instant], CtsInputMethodTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsInputMethod',
+        test_name='cheets_CTS_R.internal.x86.CtsInputMethod',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsInputMethodServiceHostTestCases', '--include-filter', 'CtsInputMethodServiceHostTestCases[instant]', '--include-filter', 'CtsInputMethodServiceHostTestCases[secondary_user]', '--include-filter', 'CtsInputMethodTestCases', '--include-filter', 'CtsInputMethodTestCases[instant]', '--include-filter', 'CtsInputMethodTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsInputMethod',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsInstantApp b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsInstantApp
new file mode 100644
index 0000000..72a0b8e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsInstantApp
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsInstantApp'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsInstantAppTests, CtsInstantAppTests[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsInstantApp',
+        test_name='cheets_CTS_R.internal.x86.CtsInstantApp',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsInstantAppTests', '--include-filter', 'CtsInstantAppTests[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsInstantApp',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsIntentSignature b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsIntentSignature
new file mode 100644
index 0000000..8e26900
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsIntentSignature
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsIntentSignature'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsIntentSignatureTestCases, CtsIntentSignatureTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsIntentSignature',
+        test_name='cheets_CTS_R.internal.x86.CtsIntentSignature',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsIntentSignatureTestCases', '--include-filter', 'CtsIntentSignatureTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsIntentSignature',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsJdwp b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsJdwp
new file mode 100644
index 0000000..0b13bbf
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsJdwp
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsJdwp'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsJdwpSecurityHostTestCases, CtsJdwpSecurityHostTestCases[secondary_user], CtsJdwpTestCases, CtsJdwpTestCases[instant], CtsJdwpTestCases[secondary_user], CtsJdwpTunnelHostTestCases, CtsJdwpTunnelHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsJdwp',
+        test_name='cheets_CTS_R.internal.x86.CtsJdwp',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJdwpSecurityHostTestCases', '--include-filter', 'CtsJdwpSecurityHostTestCases[secondary_user]', '--include-filter', 'CtsJdwpTestCases', '--include-filter', 'CtsJdwpTestCases[instant]', '--include-filter', 'CtsJdwpTestCases[secondary_user]', '--include-filter', 'CtsJdwpTunnelHostTestCases', '--include-filter', 'CtsJdwpTunnelHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsJdwp',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=14400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsJni b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsJni
new file mode 100644
index 0000000..94f4da0
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsJni
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsJni'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsJniTestCases, CtsJniTestCases[instant], CtsJniTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsJni',
+        test_name='cheets_CTS_R.internal.x86.CtsJni',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJniTestCases', '--include-filter', 'CtsJniTestCases[instant]', '--include-filter', 'CtsJniTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsJni',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsJobScheduler b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsJobScheduler
new file mode 100644
index 0000000..d0461c6
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsJobScheduler
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsJobScheduler'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsJobSchedulerSharedUidTestCases, CtsJobSchedulerSharedUidTestCases[secondary_user], CtsJobSchedulerTestCases, CtsJobSchedulerTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsJobScheduler',
+        test_name='cheets_CTS_R.internal.x86.CtsJobScheduler',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJobSchedulerSharedUidTestCases', '--include-filter', 'CtsJobSchedulerSharedUidTestCases[secondary_user]', '--include-filter', 'CtsJobSchedulerTestCases', '--include-filter', 'CtsJobSchedulerTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsJobScheduler',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        use_jdk9=True,
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsJvmti b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsJvmti
new file mode 100644
index 0000000..6789dda
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsJvmti
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsJvmti'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsJvmtiAttachingHostTestCases, CtsJvmtiAttachingHostTestCases[secondary_user], CtsJvmtiAttachingTestCases, CtsJvmtiAttachingTestCases[secondary_user], CtsJvmtiRedefineClassesHostTestCases, CtsJvmtiRedefineClassesHostTestCases[secondary_user], CtsJvmtiRunTest1900HostTestCases, CtsJvmtiRunTest1900HostTestCases[secondary_user], CtsJvmtiRunTest1901HostTestCases, CtsJvmtiRunTest1901HostTestCases[secondary_user], CtsJvmtiRunTest1902HostTestCases, CtsJvmtiRunTest1902HostTestCases[secondary_user], CtsJvmtiRunTest1903HostTestCases, CtsJvmtiRunTest1903HostTestCases[secondary_user], CtsJvmtiRunTest1904HostTestCases, CtsJvmtiRunTest1904HostTestCases[secondary_user], CtsJvmtiRunTest1906HostTestCases, CtsJvmtiRunTest1906HostTestCases[secondary_user], CtsJvmtiRunTest1907HostTestCases, CtsJvmtiRunTest1907HostTestCases[secondary_user], CtsJvmtiRunTest1908HostTestCases, CtsJvmtiRunTest1908HostTestCases[secondary_user], CtsJvmtiRunTest1909HostTestCases, CtsJvmtiRunTest1909HostTestCases[secondary_user], CtsJvmtiRunTest1910HostTestCases, CtsJvmtiRunTest1910HostTestCases[secondary_user], CtsJvmtiRunTest1911HostTestCases, CtsJvmtiRunTest1911HostTestCases[secondary_user], CtsJvmtiRunTest1912HostTestCases, CtsJvmtiRunTest1912HostTestCases[secondary_user], CtsJvmtiRunTest1913HostTestCases, CtsJvmtiRunTest1913HostTestCases[secondary_user], CtsJvmtiRunTest1914HostTestCases, CtsJvmtiRunTest1914HostTestCases[secondary_user], CtsJvmtiRunTest1915HostTestCases, CtsJvmtiRunTest1915HostTestCases[secondary_user], CtsJvmtiRunTest1916HostTestCases, CtsJvmtiRunTest1916HostTestCases[secondary_user], CtsJvmtiRunTest1917HostTestCases, CtsJvmtiRunTest1917HostTestCases[secondary_user], CtsJvmtiRunTest1920HostTestCases, CtsJvmtiRunTest1920HostTestCases[secondary_user], CtsJvmtiRunTest1921HostTestCases, CtsJvmtiRunTest1921HostTestCases[secondary_user], CtsJvmtiRunTest1922HostTestCases, CtsJvmtiRunTest1922HostTestCases[secondary_user], CtsJvmtiRunTest1923HostTestCases, CtsJvmtiRunTest1923HostTestCases[secondary_user], CtsJvmtiRunTest1924HostTestCases, CtsJvmtiRunTest1924HostTestCases[secondary_user], CtsJvmtiRunTest1925HostTestCases, CtsJvmtiRunTest1925HostTestCases[secondary_user], CtsJvmtiRunTest1926HostTestCases, CtsJvmtiRunTest1926HostTestCases[secondary_user], CtsJvmtiRunTest1927HostTestCases, CtsJvmtiRunTest1927HostTestCases[secondary_user], CtsJvmtiRunTest1928HostTestCases, CtsJvmtiRunTest1928HostTestCases[secondary_user], CtsJvmtiRunTest1930HostTestCases, CtsJvmtiRunTest1930HostTestCases[secondary_user], CtsJvmtiRunTest1931HostTestCases, CtsJvmtiRunTest1931HostTestCases[secondary_user], CtsJvmtiRunTest1932HostTestCases, CtsJvmtiRunTest1932HostTestCases[secondary_user], CtsJvmtiRunTest1933HostTestCases, CtsJvmtiRunTest1933HostTestCases[secondary_user], CtsJvmtiRunTest1934HostTestCases, CtsJvmtiRunTest1934HostTestCases[secondary_user], CtsJvmtiRunTest1936HostTestCases, CtsJvmtiRunTest1936HostTestCases[secondary_user], CtsJvmtiRunTest1937HostTestCases, CtsJvmtiRunTest1937HostTestCases[secondary_user], CtsJvmtiRunTest1939HostTestCases, CtsJvmtiRunTest1939HostTestCases[secondary_user], CtsJvmtiRunTest1941HostTestCases, CtsJvmtiRunTest1941HostTestCases[secondary_user], CtsJvmtiRunTest1942HostTestCases, CtsJvmtiRunTest1942HostTestCases[secondary_user], CtsJvmtiRunTest1943HostTestCases, CtsJvmtiRunTest1943HostTestCases[secondary_user], CtsJvmtiRunTest1953HostTestCases, CtsJvmtiRunTest1953HostTestCases[secondary_user], CtsJvmtiRunTest1958HostTestCases, CtsJvmtiRunTest1958HostTestCases[secondary_user], CtsJvmtiRunTest1962HostTestCases, CtsJvmtiRunTest1962HostTestCases[secondary_user], CtsJvmtiRunTest1967HostTestCases, CtsJvmtiRunTest1967HostTestCases[secondary_user], CtsJvmtiRunTest1968HostTestCases, CtsJvmtiRunTest1968HostTestCases[secondary_user], CtsJvmtiRunTest1969HostTestCases, CtsJvmtiRunTest1969HostTestCases[secondary_user], CtsJvmtiRunTest1970HostTestCases, CtsJvmtiRunTest1970HostTestCases[secondary_user], CtsJvmtiRunTest1971HostTestCases, CtsJvmtiRunTest1971HostTestCases[secondary_user], CtsJvmtiRunTest1974HostTestCases, CtsJvmtiRunTest1974HostTestCases[secondary_user], CtsJvmtiRunTest1975HostTestCases, CtsJvmtiRunTest1975HostTestCases[secondary_user], CtsJvmtiRunTest1976HostTestCases, CtsJvmtiRunTest1976HostTestCases[secondary_user], CtsJvmtiRunTest1977HostTestCases, CtsJvmtiRunTest1977HostTestCases[secondary_user], CtsJvmtiRunTest1978HostTestCases, CtsJvmtiRunTest1978HostTestCases[secondary_user], CtsJvmtiRunTest1979HostTestCases, CtsJvmtiRunTest1979HostTestCases[secondary_user], CtsJvmtiRunTest1981HostTestCases, CtsJvmtiRunTest1981HostTestCases[secondary_user], CtsJvmtiRunTest1982HostTestCases, CtsJvmtiRunTest1982HostTestCases[secondary_user], CtsJvmtiRunTest1983HostTestCases, CtsJvmtiRunTest1983HostTestCases[secondary_user], CtsJvmtiRunTest1984HostTestCases, CtsJvmtiRunTest1984HostTestCases[secondary_user], CtsJvmtiRunTest1988HostTestCases, CtsJvmtiRunTest1988HostTestCases[secondary_user], CtsJvmtiRunTest1989HostTestCases, CtsJvmtiRunTest1989HostTestCases[secondary_user], CtsJvmtiRunTest1990HostTestCases, CtsJvmtiRunTest1990HostTestCases[secondary_user], CtsJvmtiRunTest1991HostTestCases, CtsJvmtiRunTest1991HostTestCases[secondary_user], CtsJvmtiRunTest1992HostTestCases, CtsJvmtiRunTest1992HostTestCases[secondary_user], CtsJvmtiRunTest1994HostTestCases, CtsJvmtiRunTest1994HostTestCases[secondary_user], CtsJvmtiRunTest1995HostTestCases, CtsJvmtiRunTest1995HostTestCases[secondary_user], CtsJvmtiRunTest1996HostTestCases, CtsJvmtiRunTest1996HostTestCases[secondary_user], CtsJvmtiRunTest1997HostTestCases, CtsJvmtiRunTest1997HostTestCases[secondary_user], CtsJvmtiRunTest1998HostTestCases, CtsJvmtiRunTest1998HostTestCases[secondary_user], CtsJvmtiRunTest1999HostTestCases, CtsJvmtiRunTest1999HostTestCases[secondary_user], CtsJvmtiRunTest2001HostTestCases, CtsJvmtiRunTest2001HostTestCases[secondary_user], CtsJvmtiRunTest2002HostTestCases, CtsJvmtiRunTest2002HostTestCases[secondary_user], CtsJvmtiRunTest2003HostTestCases, CtsJvmtiRunTest2003HostTestCases[secondary_user], CtsJvmtiRunTest2004HostTestCases, CtsJvmtiRunTest2004HostTestCases[secondary_user], CtsJvmtiRunTest2005HostTestCases, CtsJvmtiRunTest2005HostTestCases[secondary_user], CtsJvmtiRunTest2006HostTestCases, CtsJvmtiRunTest2006HostTestCases[secondary_user], CtsJvmtiRunTest2007HostTestCases, CtsJvmtiRunTest2007HostTestCases[secondary_user], CtsJvmtiRunTest902HostTestCases, CtsJvmtiRunTest902HostTestCases[secondary_user], CtsJvmtiRunTest903HostTestCases, CtsJvmtiRunTest903HostTestCases[secondary_user], CtsJvmtiRunTest904HostTestCases, CtsJvmtiRunTest904HostTestCases[secondary_user], CtsJvmtiRunTest905HostTestCases, CtsJvmtiRunTest905HostTestCases[secondary_user], CtsJvmtiRunTest906HostTestCases, CtsJvmtiRunTest906HostTestCases[secondary_user], CtsJvmtiRunTest907HostTestCases, CtsJvmtiRunTest907HostTestCases[secondary_user], CtsJvmtiRunTest908HostTestCases, CtsJvmtiRunTest908HostTestCases[secondary_user], CtsJvmtiRunTest910HostTestCases, CtsJvmtiRunTest910HostTestCases[secondary_user], CtsJvmtiRunTest911HostTestCases, CtsJvmtiRunTest911HostTestCases[secondary_user], CtsJvmtiRunTest912HostTestCases, CtsJvmtiRunTest912HostTestCases[secondary_user], CtsJvmtiRunTest913HostTestCases, CtsJvmtiRunTest913HostTestCases[secondary_user], CtsJvmtiRunTest914HostTestCases, CtsJvmtiRunTest914HostTestCases[secondary_user], CtsJvmtiRunTest915HostTestCases, CtsJvmtiRunTest915HostTestCases[secondary_user], CtsJvmtiRunTest917HostTestCases, CtsJvmtiRunTest917HostTestCases[secondary_user], CtsJvmtiRunTest918HostTestCases, CtsJvmtiRunTest918HostTestCases[secondary_user], CtsJvmtiRunTest919HostTestCases, CtsJvmtiRunTest919HostTestCases[secondary_user], CtsJvmtiRunTest920HostTestCases, CtsJvmtiRunTest920HostTestCases[secondary_user], CtsJvmtiRunTest922HostTestCases, CtsJvmtiRunTest922HostTestCases[secondary_user], CtsJvmtiRunTest923HostTestCases, CtsJvmtiRunTest923HostTestCases[secondary_user], CtsJvmtiRunTest924HostTestCases, CtsJvmtiRunTest924HostTestCases[secondary_user], CtsJvmtiRunTest926HostTestCases, CtsJvmtiRunTest926HostTestCases[secondary_user], CtsJvmtiRunTest927HostTestCases, CtsJvmtiRunTest927HostTestCases[secondary_user], CtsJvmtiRunTest928HostTestCases, CtsJvmtiRunTest928HostTestCases[secondary_user], CtsJvmtiRunTest930HostTestCases, CtsJvmtiRunTest930HostTestCases[secondary_user], CtsJvmtiRunTest931HostTestCases, CtsJvmtiRunTest931HostTestCases[secondary_user], CtsJvmtiRunTest932HostTestCases, CtsJvmtiRunTest932HostTestCases[secondary_user], CtsJvmtiRunTest940HostTestCases, CtsJvmtiRunTest940HostTestCases[secondary_user], CtsJvmtiRunTest942HostTestCases, CtsJvmtiRunTest942HostTestCases[secondary_user], CtsJvmtiRunTest944HostTestCases, CtsJvmtiRunTest944HostTestCases[secondary_user], CtsJvmtiRunTest945HostTestCases, CtsJvmtiRunTest945HostTestCases[secondary_user], CtsJvmtiRunTest947HostTestCases, CtsJvmtiRunTest947HostTestCases[secondary_user], CtsJvmtiRunTest951HostTestCases, CtsJvmtiRunTest951HostTestCases[secondary_user], CtsJvmtiRunTest982HostTestCases, CtsJvmtiRunTest982HostTestCases[secondary_user], CtsJvmtiRunTest983HostTestCases, CtsJvmtiRunTest983HostTestCases[secondary_user], CtsJvmtiRunTest984HostTestCases, CtsJvmtiRunTest984HostTestCases[secondary_user], CtsJvmtiRunTest985HostTestCases, CtsJvmtiRunTest985HostTestCases[secondary_user], CtsJvmtiRunTest986HostTestCases, CtsJvmtiRunTest986HostTestCases[secondary_user], CtsJvmtiRunTest988HostTestCases, CtsJvmtiRunTest988HostTestCases[secondary_user], CtsJvmtiRunTest989HostTestCases, CtsJvmtiRunTest989HostTestCases[secondary_user], CtsJvmtiRunTest990HostTestCases, CtsJvmtiRunTest990HostTestCases[secondary_user], CtsJvmtiRunTest991HostTestCases, CtsJvmtiRunTest991HostTestCases[secondary_user], CtsJvmtiRunTest992HostTestCases, CtsJvmtiRunTest992HostTestCases[secondary_user], CtsJvmtiRunTest993HostTestCases, CtsJvmtiRunTest993HostTestCases[secondary_user], CtsJvmtiRunTest994HostTestCases, CtsJvmtiRunTest994HostTestCases[secondary_user], CtsJvmtiRunTest995HostTestCases, CtsJvmtiRunTest995HostTestCases[secondary_user], CtsJvmtiRunTest996HostTestCases, CtsJvmtiRunTest996HostTestCases[secondary_user], CtsJvmtiRunTest997HostTestCases, CtsJvmtiRunTest997HostTestCases[secondary_user], CtsJvmtiTaggingHostTestCases, CtsJvmtiTaggingHostTestCases[secondary_user], CtsJvmtiTrackingHostTestCases, CtsJvmtiTrackingHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsJvmti',
+        test_name='cheets_CTS_R.internal.x86.CtsJvmti',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJvmtiAttachingHostTestCases', '--include-filter', 'CtsJvmtiAttachingHostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiAttachingTestCases', '--include-filter', 'CtsJvmtiAttachingTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRedefineClassesHostTestCases', '--include-filter', 'CtsJvmtiRedefineClassesHostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1900HostTestCases', '--include-filter', 'CtsJvmtiRunTest1900HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1901HostTestCases', '--include-filter', 'CtsJvmtiRunTest1901HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1902HostTestCases', '--include-filter', 'CtsJvmtiRunTest1902HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1903HostTestCases', '--include-filter', 'CtsJvmtiRunTest1903HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1904HostTestCases', '--include-filter', 'CtsJvmtiRunTest1904HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1906HostTestCases', '--include-filter', 'CtsJvmtiRunTest1906HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1907HostTestCases', '--include-filter', 'CtsJvmtiRunTest1907HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1908HostTestCases', '--include-filter', 'CtsJvmtiRunTest1908HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1909HostTestCases', '--include-filter', 'CtsJvmtiRunTest1909HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1910HostTestCases', '--include-filter', 'CtsJvmtiRunTest1910HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1911HostTestCases', '--include-filter', 'CtsJvmtiRunTest1911HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1912HostTestCases', '--include-filter', 'CtsJvmtiRunTest1912HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1913HostTestCases', '--include-filter', 'CtsJvmtiRunTest1913HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1914HostTestCases', '--include-filter', 'CtsJvmtiRunTest1914HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1915HostTestCases', '--include-filter', 'CtsJvmtiRunTest1915HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1916HostTestCases', '--include-filter', 'CtsJvmtiRunTest1916HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1917HostTestCases', '--include-filter', 'CtsJvmtiRunTest1917HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1920HostTestCases', '--include-filter', 'CtsJvmtiRunTest1920HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1921HostTestCases', '--include-filter', 'CtsJvmtiRunTest1921HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1922HostTestCases', '--include-filter', 'CtsJvmtiRunTest1922HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1923HostTestCases', '--include-filter', 'CtsJvmtiRunTest1923HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1924HostTestCases', '--include-filter', 'CtsJvmtiRunTest1924HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1925HostTestCases', '--include-filter', 'CtsJvmtiRunTest1925HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1926HostTestCases', '--include-filter', 'CtsJvmtiRunTest1926HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1927HostTestCases', '--include-filter', 'CtsJvmtiRunTest1927HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1928HostTestCases', '--include-filter', 'CtsJvmtiRunTest1928HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1930HostTestCases', '--include-filter', 'CtsJvmtiRunTest1930HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1931HostTestCases', '--include-filter', 'CtsJvmtiRunTest1931HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1932HostTestCases', '--include-filter', 'CtsJvmtiRunTest1932HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1933HostTestCases', '--include-filter', 'CtsJvmtiRunTest1933HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1934HostTestCases', '--include-filter', 'CtsJvmtiRunTest1934HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1936HostTestCases', '--include-filter', 'CtsJvmtiRunTest1936HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1937HostTestCases', '--include-filter', 'CtsJvmtiRunTest1937HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1939HostTestCases', '--include-filter', 'CtsJvmtiRunTest1939HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1941HostTestCases', '--include-filter', 'CtsJvmtiRunTest1941HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1942HostTestCases', '--include-filter', 'CtsJvmtiRunTest1942HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1943HostTestCases', '--include-filter', 'CtsJvmtiRunTest1943HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1953HostTestCases', '--include-filter', 'CtsJvmtiRunTest1953HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1958HostTestCases', '--include-filter', 'CtsJvmtiRunTest1958HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1962HostTestCases', '--include-filter', 'CtsJvmtiRunTest1962HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1967HostTestCases', '--include-filter', 'CtsJvmtiRunTest1967HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1968HostTestCases', '--include-filter', 'CtsJvmtiRunTest1968HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1969HostTestCases', '--include-filter', 'CtsJvmtiRunTest1969HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1970HostTestCases', '--include-filter', 'CtsJvmtiRunTest1970HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1971HostTestCases', '--include-filter', 'CtsJvmtiRunTest1971HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1974HostTestCases', '--include-filter', 'CtsJvmtiRunTest1974HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1975HostTestCases', '--include-filter', 'CtsJvmtiRunTest1975HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1976HostTestCases', '--include-filter', 'CtsJvmtiRunTest1976HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1977HostTestCases', '--include-filter', 'CtsJvmtiRunTest1977HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1978HostTestCases', '--include-filter', 'CtsJvmtiRunTest1978HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1979HostTestCases', '--include-filter', 'CtsJvmtiRunTest1979HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1981HostTestCases', '--include-filter', 'CtsJvmtiRunTest1981HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1982HostTestCases', '--include-filter', 'CtsJvmtiRunTest1982HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1983HostTestCases', '--include-filter', 'CtsJvmtiRunTest1983HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1984HostTestCases', '--include-filter', 'CtsJvmtiRunTest1984HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1988HostTestCases', '--include-filter', 'CtsJvmtiRunTest1988HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1989HostTestCases', '--include-filter', 'CtsJvmtiRunTest1989HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1990HostTestCases', '--include-filter', 'CtsJvmtiRunTest1990HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1991HostTestCases', '--include-filter', 'CtsJvmtiRunTest1991HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1992HostTestCases', '--include-filter', 'CtsJvmtiRunTest1992HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1994HostTestCases', '--include-filter', 'CtsJvmtiRunTest1994HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1995HostTestCases', '--include-filter', 'CtsJvmtiRunTest1995HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1996HostTestCases', '--include-filter', 'CtsJvmtiRunTest1996HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1997HostTestCases', '--include-filter', 'CtsJvmtiRunTest1997HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1998HostTestCases', '--include-filter', 'CtsJvmtiRunTest1998HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1999HostTestCases', '--include-filter', 'CtsJvmtiRunTest1999HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2001HostTestCases', '--include-filter', 'CtsJvmtiRunTest2001HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2002HostTestCases', '--include-filter', 'CtsJvmtiRunTest2002HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2003HostTestCases', '--include-filter', 'CtsJvmtiRunTest2003HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2004HostTestCases', '--include-filter', 'CtsJvmtiRunTest2004HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2005HostTestCases', '--include-filter', 'CtsJvmtiRunTest2005HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2006HostTestCases', '--include-filter', 'CtsJvmtiRunTest2006HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2007HostTestCases', '--include-filter', 'CtsJvmtiRunTest2007HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest902HostTestCases', '--include-filter', 'CtsJvmtiRunTest902HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest903HostTestCases', '--include-filter', 'CtsJvmtiRunTest903HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest904HostTestCases', '--include-filter', 'CtsJvmtiRunTest904HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest905HostTestCases', '--include-filter', 'CtsJvmtiRunTest905HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest906HostTestCases', '--include-filter', 'CtsJvmtiRunTest906HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest907HostTestCases', '--include-filter', 'CtsJvmtiRunTest907HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest908HostTestCases', '--include-filter', 'CtsJvmtiRunTest908HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest910HostTestCases', '--include-filter', 'CtsJvmtiRunTest910HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest911HostTestCases', '--include-filter', 'CtsJvmtiRunTest911HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest912HostTestCases', '--include-filter', 'CtsJvmtiRunTest912HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest913HostTestCases', '--include-filter', 'CtsJvmtiRunTest913HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest914HostTestCases', '--include-filter', 'CtsJvmtiRunTest914HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest915HostTestCases', '--include-filter', 'CtsJvmtiRunTest915HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest917HostTestCases', '--include-filter', 'CtsJvmtiRunTest917HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest918HostTestCases', '--include-filter', 'CtsJvmtiRunTest918HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest919HostTestCases', '--include-filter', 'CtsJvmtiRunTest919HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest920HostTestCases', '--include-filter', 'CtsJvmtiRunTest920HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest922HostTestCases', '--include-filter', 'CtsJvmtiRunTest922HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest923HostTestCases', '--include-filter', 'CtsJvmtiRunTest923HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest924HostTestCases', '--include-filter', 'CtsJvmtiRunTest924HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest926HostTestCases', '--include-filter', 'CtsJvmtiRunTest926HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest927HostTestCases', '--include-filter', 'CtsJvmtiRunTest927HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest928HostTestCases', '--include-filter', 'CtsJvmtiRunTest928HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest930HostTestCases', '--include-filter', 'CtsJvmtiRunTest930HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest931HostTestCases', '--include-filter', 'CtsJvmtiRunTest931HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest932HostTestCases', '--include-filter', 'CtsJvmtiRunTest932HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest940HostTestCases', '--include-filter', 'CtsJvmtiRunTest940HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest942HostTestCases', '--include-filter', 'CtsJvmtiRunTest942HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest944HostTestCases', '--include-filter', 'CtsJvmtiRunTest944HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest945HostTestCases', '--include-filter', 'CtsJvmtiRunTest945HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest947HostTestCases', '--include-filter', 'CtsJvmtiRunTest947HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest951HostTestCases', '--include-filter', 'CtsJvmtiRunTest951HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest982HostTestCases', '--include-filter', 'CtsJvmtiRunTest982HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest983HostTestCases', '--include-filter', 'CtsJvmtiRunTest983HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest984HostTestCases', '--include-filter', 'CtsJvmtiRunTest984HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest985HostTestCases', '--include-filter', 'CtsJvmtiRunTest985HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest986HostTestCases', '--include-filter', 'CtsJvmtiRunTest986HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest988HostTestCases', '--include-filter', 'CtsJvmtiRunTest988HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest989HostTestCases', '--include-filter', 'CtsJvmtiRunTest989HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest990HostTestCases', '--include-filter', 'CtsJvmtiRunTest990HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest991HostTestCases', '--include-filter', 'CtsJvmtiRunTest991HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest992HostTestCases', '--include-filter', 'CtsJvmtiRunTest992HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest993HostTestCases', '--include-filter', 'CtsJvmtiRunTest993HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest994HostTestCases', '--include-filter', 'CtsJvmtiRunTest994HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest995HostTestCases', '--include-filter', 'CtsJvmtiRunTest995HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest996HostTestCases', '--include-filter', 'CtsJvmtiRunTest996HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest997HostTestCases', '--include-filter', 'CtsJvmtiRunTest997HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiTaggingHostTestCases', '--include-filter', 'CtsJvmtiTaggingHostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiTrackingHostTestCases', '--include-filter', 'CtsJvmtiTrackingHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsJvmti',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=75000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsKernelConfigTestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsKernelConfigTestCases
new file mode 100644
index 0000000..51d2b94
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsKernelConfigTestCases
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsKernelConfigTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsKernelConfigTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsKernelConfigTestCases',
+        test_name='cheets_CTS_R.internal.x86.CtsKernelConfigTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsKernelConfigTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsKernelConfigTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsKeystore b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsKeystore
new file mode 100644
index 0000000..35c2c60
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsKeystore
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsKeystore'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsKeystoreTestCases, CtsKeystoreTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsKeystore',
+        test_name='cheets_CTS_R.internal.x86.CtsKeystore',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsKeystoreTestCases', '--include-filter', 'CtsKeystoreTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsKeystore',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsLeanbackJank b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsLeanbackJank
new file mode 100644
index 0000000..3345725
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsLeanbackJank
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsLeanbackJank'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsLeanbackJankTestCases, CtsLeanbackJankTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsLeanbackJank',
+        test_name='cheets_CTS_R.internal.x86.CtsLeanbackJank',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLeanbackJankTestCases', '--include-filter', 'CtsLeanbackJankTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsLeanbackJank',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsLegacyNotification2 b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsLegacyNotification2
new file mode 100644
index 0000000..a6eca8f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsLegacyNotification2
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsLegacyNotification2'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsLegacyNotification20TestCases, CtsLegacyNotification20TestCases[secondary_user], CtsLegacyNotification27TestCases, CtsLegacyNotification27TestCases[secondary_user], CtsLegacyNotification28TestCases, CtsLegacyNotification28TestCases[secondary_user], CtsLegacyNotification29TestCases, CtsLegacyNotification29TestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsLegacyNotification2',
+        test_name='cheets_CTS_R.internal.x86.CtsLegacyNotification2',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLegacyNotification20TestCases', '--include-filter', 'CtsLegacyNotification20TestCases[secondary_user]', '--include-filter', 'CtsLegacyNotification27TestCases', '--include-filter', 'CtsLegacyNotification27TestCases[secondary_user]', '--include-filter', 'CtsLegacyNotification28TestCases', '--include-filter', 'CtsLegacyNotification28TestCases[secondary_user]', '--include-filter', 'CtsLegacyNotification29TestCases', '--include-filter', 'CtsLegacyNotification29TestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsLegacyNotification2',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=16200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsLibcore b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsLibcore
new file mode 100644
index 0000000..947eeaa
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsLibcore
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsLibcore'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsLibcoreApiEvolutionTestCases, CtsLibcoreApiEvolutionTestCases[secondary_user], CtsLibcoreFileIOTestCases, CtsLibcoreFileIOTestCases[secondary_user], CtsLibcoreJsr166TestCases, CtsLibcoreJsr166TestCases[secondary_user], CtsLibcoreLegacy22TestCases, CtsLibcoreLegacy22TestCases[secondary_user], CtsLibcoreOjTestCases, CtsLibcoreOjTestCases[secondary_user], CtsLibcoreOkHttpTestCases, CtsLibcoreOkHttpTestCases[secondary_user], CtsLibcoreTestCases, CtsLibcoreTestCases[secondary_user], CtsLibcoreWycheproofBCTestCases, CtsLibcoreWycheproofBCTestCases[secondary_user], CtsLibcoreWycheproofConscryptTestCases, CtsLibcoreWycheproofConscryptTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsLibcore',
+        test_name='cheets_CTS_R.internal.x86.CtsLibcore',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLibcoreApiEvolutionTestCases', '--include-filter', 'CtsLibcoreApiEvolutionTestCases[secondary_user]', '--include-filter', 'CtsLibcoreFileIOTestCases', '--include-filter', 'CtsLibcoreFileIOTestCases[secondary_user]', '--include-filter', 'CtsLibcoreJsr166TestCases', '--include-filter', 'CtsLibcoreJsr166TestCases[secondary_user]', '--include-filter', 'CtsLibcoreLegacy22TestCases', '--include-filter', 'CtsLibcoreLegacy22TestCases[secondary_user]', '--include-filter', 'CtsLibcoreOjTestCases', '--include-filter', 'CtsLibcoreOjTestCases[secondary_user]', '--include-filter', 'CtsLibcoreOkHttpTestCases', '--include-filter', 'CtsLibcoreOkHttpTestCases[secondary_user]', '--include-filter', 'CtsLibcoreTestCases', '--include-filter', 'CtsLibcoreTestCases[secondary_user]', '--include-filter', 'CtsLibcoreWycheproofBCTestCases', '--include-filter', 'CtsLibcoreWycheproofBCTestCases[secondary_user]', '--include-filter', 'CtsLibcoreWycheproofConscryptTestCases', '--include-filter', 'CtsLibcoreWycheproofConscryptTestCases[secondary_user]'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsLibcore',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=39600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsLiblog b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsLiblog
new file mode 100644
index 0000000..558fbd9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsLiblog
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsLiblog'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsLiblogTestCases, CtsLiblogTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsLiblog',
+        test_name='cheets_CTS_R.internal.x86.CtsLiblog',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLiblogTestCases', '--include-filter', 'CtsLiblogTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsLiblog',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsLocation b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsLocation
new file mode 100644
index 0000000..fac39a7
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsLocation
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsLocation'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsLocationCoarseTestCases, CtsLocationCoarseTestCases[instant], CtsLocationCoarseTestCases[secondary_user], CtsLocationFineTestCases, CtsLocationFineTestCases[instant], CtsLocationFineTestCases[secondary_user], CtsLocationGnssTestCases, CtsLocationGnssTestCases[instant], CtsLocationGnssTestCases[secondary_user], CtsLocationNoneTestCases, CtsLocationNoneTestCases[instant], CtsLocationNoneTestCases[secondary_user], CtsLocationPrivilegedTestCases, CtsLocationPrivilegedTestCases[instant], CtsLocationPrivilegedTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsLocation',
+        test_name='cheets_CTS_R.internal.x86.CtsLocation',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLocationCoarseTestCases', '--include-filter', 'CtsLocationCoarseTestCases[instant]', '--include-filter', 'CtsLocationCoarseTestCases[secondary_user]', '--include-filter', 'CtsLocationFineTestCases', '--include-filter', 'CtsLocationFineTestCases[instant]', '--include-filter', 'CtsLocationFineTestCases[secondary_user]', '--include-filter', 'CtsLocationGnssTestCases', '--include-filter', 'CtsLocationGnssTestCases[instant]', '--include-filter', 'CtsLocationGnssTestCases[secondary_user]', '--include-filter', 'CtsLocationNoneTestCases', '--include-filter', 'CtsLocationNoneTestCases[instant]', '--include-filter', 'CtsLocationNoneTestCases[secondary_user]', '--include-filter', 'CtsLocationPrivilegedTestCases', '--include-filter', 'CtsLocationPrivilegedTestCases[instant]', '--include-filter', 'CtsLocationPrivilegedTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsLocation',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=28800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsLogd b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsLogd
new file mode 100644
index 0000000..9cf39f2
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsLogd
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsLogd'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsLogdTestCases, CtsLogdTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsLogd',
+        test_name='cheets_CTS_R.internal.x86.CtsLogd',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsLogdTestCases', '--include-filter', 'CtsLogdTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsLogd',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMatchFlag b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMatchFlag
new file mode 100644
index 0000000..30f6c79
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMatchFlag
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsMatchFlag'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMatchFlagTestCases, CtsMatchFlagTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsMatchFlag',
+        test_name='cheets_CTS_R.internal.x86.CtsMatchFlag',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMatchFlagTestCases', '--include-filter', 'CtsMatchFlagTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMatchFlag',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaBitstreamsTestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaBitstreamsTestCases
new file mode 100644
index 0000000..7803e9a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaBitstreamsTestCases
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsMediaBitstreamsTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaBitstreamsTestCases, CtsMediaBitstreamsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='internal.x86.CtsMediaBitstreamsTestCases',
+        test_name='cheets_CTS_R.internal.x86.CtsMediaBitstreamsTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaBitstreamsTestCases', '--include-filter', 'CtsMediaBitstreamsTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaBitstreamsTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaHostTestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaHostTestCases
new file mode 100644
index 0000000..9719b35
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaHostTestCases
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsMediaHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaHostTestCases, CtsMediaHostTestCases[instant] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsMediaHostTestCases',
+        test_name='cheets_CTS_R.internal.x86.CtsMediaHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaHostTestCases', '--include-filter', 'CtsMediaHostTestCases[instant]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaHostTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaParserTestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaParserTestCases
new file mode 100644
index 0000000..9cc855b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaParserTestCases
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsMediaParserTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaParserTestCases, CtsMediaParserTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsMediaParserTestCases',
+        test_name='cheets_CTS_R.internal.x86.CtsMediaParserTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaParserTestCases', '--include-filter', 'CtsMediaParserTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaParserTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaPerformanceClassTestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaPerformanceClassTestCases
new file mode 100644
index 0000000..f790387
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaPerformanceClassTestCases
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsMediaPerformanceClassTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaPerformanceClassTestCases, CtsMediaPerformanceClassTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsMediaPerformanceClassTestCases',
+        test_name='cheets_CTS_R.internal.x86.CtsMediaPerformanceClassTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaPerformanceClassTestCases', '--include-filter', 'CtsMediaPerformanceClassTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaPerformanceClassTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaStressTestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaStressTestCases
new file mode 100644
index 0000000..0b3eb20
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaStressTestCases
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsMediaStressTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaStressTestCases, CtsMediaStressTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='internal.x86.CtsMediaStressTestCases',
+        test_name='cheets_CTS_R.internal.x86.CtsMediaStressTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaStressTestCases', '--include-filter', 'CtsMediaStressTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaStressTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=21600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaStressTestCases.camera.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaStressTestCases.camera.ctshardware
new file mode 100644
index 0000000..79bca50
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaStressTestCases.camera.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsMediaStressTestCases.camera.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaStressTestCases.camera of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='internal.x86.CtsMediaStressTestCases.camera.ctshardware',
+        test_name='cheets_CTS_R.internal.x86.CtsMediaStressTestCases.camera.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaStressTestCases android.mediastress.cts.MediaRecorderStressTest', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaStressTestCases.camera',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaTestCases.32 b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaTestCases.32
new file mode 100644
index 0000000..b87c5c9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaTestCases.32
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsMediaTestCases.32'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaTestCases, CtsMediaTestCases[instant] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='internal.x86.CtsMediaTestCases.32',
+        test_name='cheets_CTS_R.internal.x86.CtsMediaTestCases.32',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases', '--logcat-on-failure', '--abi', 'x86'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        use_jdk9=True,
+        timeout=39600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaTestCases.64 b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaTestCases.64
new file mode 100644
index 0000000..45ab5b7
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaTestCases.64
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsMediaTestCases.64'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaTestCases, CtsMediaTestCases[instant] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='internal.x86.CtsMediaTestCases.64',
+        test_name='cheets_CTS_R.internal.x86.CtsMediaTestCases.64',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases', '--include-filter', 'CtsMediaTestCases[instant]', '--logcat-on-failure', '--abi', 'x86_64'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        use_jdk9=True,
+        timeout=39600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaTestCases.audio b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaTestCases.audio
new file mode 100644
index 0000000..fbbf628
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaTestCases.audio
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsMediaTestCases.audio'
+ATTRIBUTES = 'suite:arc-cts-qual, suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaTestCases.audio of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        needs_push_media=True,
+        tag='internal.x86.CtsMediaTestCases.audio',
+        test_name='cheets_CTS_R.internal.x86.CtsMediaTestCases.audio',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioAttributesTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioEffectTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioAttributesTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioEffectTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioFocusTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioFormatTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioManagerTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioMetadataTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioNativeTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPlayRoutingNative', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPlaybackCaptureTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPlaybackConfigurationTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPreProcessingTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioPresentationTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordAppOpTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordRoutingNative', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecord_BufferSizeTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioRecordingConfigurationTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioSystemTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioSystemUsageTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackLatencyTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackOffloadTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackSurroundTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrackTest', '--include-filter', 'CtsMediaTestCases android.media.cts.AudioTrack_ListenerTest', '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolAacTest', '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolHapticTest', '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolMidiTest', '--include-filter', 'CtsMediaTestCases android.media.cts.SoundPoolOggTest', '--include-filter', 'CtsMediaTestCases android.media.cts.VolumeShaperTest', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaTestCases.ctshardware
new file mode 100644
index 0000000..ccb9685
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaTestCases.ctshardware
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsMediaTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='internal.x86.CtsMediaTestCases.ctshardware',
+        test_name='cheets_CTS_R.internal.x86.CtsMediaTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        use_jdk9=True,
+        timeout=36000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaTestCases.perf b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaTestCases.perf
new file mode 100644
index 0000000..615b446
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaTestCases.perf
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsMediaTestCases.perf'
+ATTRIBUTES = 'suite:arc-cts-qual, suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaTestCases.perf of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        needs_push_media=True,
+        tag='internal.x86.CtsMediaTestCases.perf',
+        test_name='cheets_CTS_R.internal.x86.CtsMediaTestCases.perf',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases android.media.cts.VideoDecoderPerfTest', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaTestCases.video b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaTestCases.video
new file mode 100644
index 0000000..58782db
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaTestCases.video
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsMediaTestCases.video'
+ATTRIBUTES = 'suite:arc-cts-qual, suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaTestCases.video of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        needs_push_media=True,
+        tag='internal.x86.CtsMediaTestCases.video',
+        test_name='cheets_CTS_R.internal.x86.CtsMediaTestCases.video',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases android.media.cts.AdaptivePlaybackTest', '--include-filter', 'CtsMediaTestCases android.media.cts.DecodeAccuracyTest', '--include-filter', 'CtsMediaTestCases android.media.cts.DecodeEditEncodeTest', '--include-filter', 'CtsMediaTestCases android.media.cts.DecoderConformanceTest', '--include-filter', 'CtsMediaTestCases android.media.cts.EncodeDecodeTest', '--include-filter', 'CtsMediaTestCases android.media.cts.ExtractDecodeEditEncodeMuxTest', '--include-filter', 'CtsMediaTestCases android.media.cts.MediaCodecPlayerTest', '--include-filter', 'CtsMediaTestCases android.media.cts.MediaCodecPlayerTest', '--include-filter', 'CtsMediaTestCases android.media.cts.MediaDrmClearkeyTest', '--include-filter', 'CtsMediaTestCases android.media.cts.MediaRecorderTest', '--include-filter', 'CtsMediaTestCases android.media.cts.MediaSynctest#testPlayVideo', '--include-filter', 'CtsMediaTestCases android.media.cts.VideoCodecTest', '--include-filter', 'CtsMediaTestCases android.media.cts.VideoEncoderTest', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaV2TestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaV2TestCases
new file mode 100644
index 0000000..9d5d186
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMediaV2TestCases
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsMediaV2TestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaV2TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsMediaV2TestCases',
+        test_name='cheets_CTS_R.internal.x86.CtsMediaV2TestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaV2TestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaV2TestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMidiTestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMidiTestCases
new file mode 100644
index 0000000..fc4749c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMidiTestCases
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsMidiTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMidiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsMidiTestCases',
+        test_name='cheets_CTS_R.internal.x86.CtsMidiTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMidiTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMidiTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMimeMap b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMimeMap
new file mode 100644
index 0000000..8d6e47d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMimeMap
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsMimeMap'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMimeMapTestCases, CtsMimeMapTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsMimeMap',
+        test_name='cheets_CTS_R.internal.x86.CtsMimeMap',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMimeMapTestCases', '--include-filter', 'CtsMimeMapTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMimeMap',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMocking b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMocking
new file mode 100644
index 0000000..4dacf2f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMocking
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsMocking'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMockingDebuggableTestCases, CtsMockingDebuggableTestCases[instant], CtsMockingDebuggableTestCases[secondary_user], CtsMockingTestCases, CtsMockingTestCases[instant], CtsMockingTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsMocking',
+        test_name='cheets_CTS_R.internal.x86.CtsMocking',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMockingDebuggableTestCases', '--include-filter', 'CtsMockingDebuggableTestCases[instant]', '--include-filter', 'CtsMockingDebuggableTestCases[secondary_user]', '--include-filter', 'CtsMockingTestCases', '--include-filter', 'CtsMockingTestCases[instant]', '--include-filter', 'CtsMockingTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMocking',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMonkey b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMonkey
new file mode 100644
index 0000000..105d1ef
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMonkey
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsMonkey'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMonkeyTestCases, CtsMonkeyTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsMonkey',
+        test_name='cheets_CTS_R.internal.x86.CtsMonkey',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMonkeyTestCases', '--include-filter', 'CtsMonkeyTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMonkey',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMultiUser b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMultiUser
new file mode 100644
index 0000000..2f4dde1
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsMultiUser
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsMultiUser'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMultiUserHostTestCases, CtsMultiUserHostTestCases[instant], CtsMultiUserHostTestCases[secondary_user], CtsMultiUserTestCases, CtsMultiUserTestCases[instant], CtsMultiUserTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsMultiUser',
+        test_name='cheets_CTS_R.internal.x86.CtsMultiUser',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMultiUserHostTestCases', '--include-filter', 'CtsMultiUserHostTestCases[instant]', '--include-filter', 'CtsMultiUserHostTestCases[secondary_user]', '--include-filter', 'CtsMultiUserTestCases', '--include-filter', 'CtsMultiUserTestCases[instant]', '--include-filter', 'CtsMultiUserTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMultiUser',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNNAPI b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNNAPI
new file mode 100644
index 0000000..7c4daff
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNNAPI
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsNNAPI'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNNAPITestCases, CtsNNAPITestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsNNAPI',
+        test_name='cheets_CTS_R.internal.x86.CtsNNAPI',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNNAPITestCases', '--include-filter', 'CtsNNAPITestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsNNAPI',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNNAPIBenchmark b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNNAPIBenchmark
new file mode 100644
index 0000000..1b30952
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNNAPIBenchmark
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsNNAPIBenchmark'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNNAPIBenchmarkTestCases, CtsNNAPIBenchmarkTestCases[instant], CtsNNAPIBenchmarkTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsNNAPIBenchmark',
+        test_name='cheets_CTS_R.internal.x86.CtsNNAPIBenchmark',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNNAPIBenchmarkTestCases', '--include-filter', 'CtsNNAPIBenchmarkTestCases[instant]', '--include-filter', 'CtsNNAPIBenchmarkTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsNNAPIBenchmark',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNative b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNative
new file mode 100644
index 0000000..f0c262b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNative
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsNative'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNativeEncryptionTestCases, CtsNativeEncryptionTestCases[instant], CtsNativeEncryptionTestCases[secondary_user], CtsNativeHardwareTestCases, CtsNativeHardwareTestCases[secondary_user], CtsNativeMediaAAudioTestCases, CtsNativeMediaAAudioTestCases[instant], CtsNativeMediaAAudioTestCases[secondary_user], CtsNativeMediaMetricsTestCases, CtsNativeMediaMetricsTestCases[instant], CtsNativeMediaMetricsTestCases[secondary_user], CtsNativeMediaSlTestCases, CtsNativeMediaSlTestCases[instant], CtsNativeMediaSlTestCases[secondary_user], CtsNativeMediaXaTestCases, CtsNativeMediaXaTestCases[instant], CtsNativeMediaXaTestCases[secondary_user], CtsNativeMidiTestCases, CtsNativeMidiTestCases[secondary_user], CtsNativeNetDnsTestCases, CtsNativeNetDnsTestCases[instant], CtsNativeNetDnsTestCases[secondary_user], CtsNativeNetTestCases, CtsNativeNetTestCases[instant], CtsNativeNetTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsNative',
+        test_name='cheets_CTS_R.internal.x86.CtsNative',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNativeEncryptionTestCases', '--include-filter', 'CtsNativeEncryptionTestCases[instant]', '--include-filter', 'CtsNativeEncryptionTestCases[secondary_user]', '--include-filter', 'CtsNativeHardwareTestCases', '--include-filter', 'CtsNativeHardwareTestCases[secondary_user]', '--include-filter', 'CtsNativeMediaAAudioTestCases', '--include-filter', 'CtsNativeMediaAAudioTestCases[instant]', '--include-filter', 'CtsNativeMediaAAudioTestCases[secondary_user]', '--include-filter', 'CtsNativeMediaMetricsTestCases', '--include-filter', 'CtsNativeMediaMetricsTestCases[instant]', '--include-filter', 'CtsNativeMediaMetricsTestCases[secondary_user]', '--include-filter', 'CtsNativeMediaSlTestCases', '--include-filter', 'CtsNativeMediaSlTestCases[instant]', '--include-filter', 'CtsNativeMediaSlTestCases[secondary_user]', '--include-filter', 'CtsNativeMediaXaTestCases', '--include-filter', 'CtsNativeMediaXaTestCases[instant]', '--include-filter', 'CtsNativeMediaXaTestCases[secondary_user]', '--include-filter', 'CtsNativeMidiTestCases', '--include-filter', 'CtsNativeMidiTestCases[secondary_user]', '--include-filter', 'CtsNativeNetDnsTestCases', '--include-filter', 'CtsNativeNetDnsTestCases[instant]', '--include-filter', 'CtsNativeNetDnsTestCases[secondary_user]', '--include-filter', 'CtsNativeNetTestCases', '--include-filter', 'CtsNativeNetTestCases[instant]', '--include-filter', 'CtsNativeNetTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsNative',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=46800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNativeMediaAAudioTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNativeMediaAAudioTestCases.ctshardware
new file mode 100644
index 0000000..fd88db6
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNativeMediaAAudioTestCases.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsNativeMediaAAudioTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNativeMediaAAudioTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsNativeMediaAAudioTestCases.ctshardware',
+        test_name='cheets_CTS_R.internal.x86.CtsNativeMediaAAudioTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNativeMediaAAudioTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsNativeMediaAAudioTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNdef b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNdef
new file mode 100644
index 0000000..398c4a6
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNdef
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsNdef'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNdefTestCases, CtsNdefTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsNdef',
+        test_name='cheets_CTS_R.internal.x86.CtsNdef',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNdefTestCases', '--include-filter', 'CtsNdefTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsNdef',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNdkBinder b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNdkBinder
new file mode 100644
index 0000000..c050eb9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNdkBinder
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsNdkBinder'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNdkBinderTestCases, CtsNdkBinderTestCases[instant], CtsNdkBinderTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsNdkBinder',
+        test_name='cheets_CTS_R.internal.x86.CtsNdkBinder',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNdkBinderTestCases', '--include-filter', 'CtsNdkBinderTestCases[instant]', '--include-filter', 'CtsNdkBinderTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsNdkBinder',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNet b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNet
new file mode 100644
index 0000000..ae2df6e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNet
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsNet'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNetApi23TestCases, CtsNetApi23TestCases[secondary_user], CtsNetSecConfigAttributeTestCases, CtsNetSecConfigAttributeTestCases[instant], CtsNetSecConfigAttributeTestCases[secondary_user], CtsNetSecConfigBasicDebugDisabledTestCases, CtsNetSecConfigBasicDebugDisabledTestCases[instant], CtsNetSecConfigBasicDebugDisabledTestCases[secondary_user], CtsNetSecConfigBasicDebugEnabledTestCases, CtsNetSecConfigBasicDebugEnabledTestCases[instant], CtsNetSecConfigBasicDebugEnabledTestCases[secondary_user], CtsNetSecConfigBasicDomainConfigTestCases, CtsNetSecConfigBasicDomainConfigTestCases[instant], CtsNetSecConfigBasicDomainConfigTestCases[secondary_user], CtsNetSecConfigCleartextTrafficTestCases, CtsNetSecConfigCleartextTrafficTestCases[instant], CtsNetSecConfigCleartextTrafficTestCases[secondary_user], CtsNetSecConfigDownloadManagerTestCases, CtsNetSecConfigDownloadManagerTestCases[secondary_user], CtsNetSecConfigInvalidPinTestCases, CtsNetSecConfigInvalidPinTestCases[instant], CtsNetSecConfigInvalidPinTestCases[secondary_user], CtsNetSecConfigNestedDomainConfigTestCases, CtsNetSecConfigNestedDomainConfigTestCases[instant], CtsNetSecConfigNestedDomainConfigTestCases[secondary_user], CtsNetSecConfigPrePCleartextTrafficTestCases, CtsNetSecConfigPrePCleartextTrafficTestCases[secondary_user], CtsNetSecConfigResourcesSrcTestCases, CtsNetSecConfigResourcesSrcTestCases[instant], CtsNetSecConfigResourcesSrcTestCases[secondary_user], CtsNetSecPolicyUsesCleartextTrafficFalseTestCases, CtsNetSecPolicyUsesCleartextTrafficFalseTestCases[secondary_user], CtsNetSecPolicyUsesCleartextTrafficTrueTestCases, CtsNetSecPolicyUsesCleartextTrafficTrueTestCases[secondary_user], CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases, CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases[secondary_user], CtsNetTestCases, CtsNetTestCasesInternetPermission, CtsNetTestCasesInternetPermission[instant], CtsNetTestCasesInternetPermission[secondary_user], CtsNetTestCasesLegacyApi22, CtsNetTestCasesLegacyApi22[secondary_user], CtsNetTestCasesLegacyPermission22, CtsNetTestCasesLegacyPermission22[secondary_user], CtsNetTestCasesUpdateStatsPermission, CtsNetTestCasesUpdateStatsPermission[instant], CtsNetTestCasesUpdateStatsPermission[secondary_user], CtsNetTestCases[instant], CtsNetTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsNet',
+        test_name='cheets_CTS_R.internal.x86.CtsNet',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNetApi23TestCases', '--include-filter', 'CtsNetApi23TestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigAttributeTestCases', '--include-filter', 'CtsNetSecConfigAttributeTestCases[instant]', '--include-filter', 'CtsNetSecConfigAttributeTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigBasicDebugDisabledTestCases', '--include-filter', 'CtsNetSecConfigBasicDebugDisabledTestCases[instant]', '--include-filter', 'CtsNetSecConfigBasicDebugDisabledTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigBasicDebugEnabledTestCases', '--include-filter', 'CtsNetSecConfigBasicDebugEnabledTestCases[instant]', '--include-filter', 'CtsNetSecConfigBasicDebugEnabledTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigBasicDomainConfigTestCases', '--include-filter', 'CtsNetSecConfigBasicDomainConfigTestCases[instant]', '--include-filter', 'CtsNetSecConfigBasicDomainConfigTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigCleartextTrafficTestCases', '--include-filter', 'CtsNetSecConfigCleartextTrafficTestCases[instant]', '--include-filter', 'CtsNetSecConfigCleartextTrafficTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigDownloadManagerTestCases', '--include-filter', 'CtsNetSecConfigDownloadManagerTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigInvalidPinTestCases', '--include-filter', 'CtsNetSecConfigInvalidPinTestCases[instant]', '--include-filter', 'CtsNetSecConfigInvalidPinTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigNestedDomainConfigTestCases', '--include-filter', 'CtsNetSecConfigNestedDomainConfigTestCases[instant]', '--include-filter', 'CtsNetSecConfigNestedDomainConfigTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigPrePCleartextTrafficTestCases', '--include-filter', 'CtsNetSecConfigPrePCleartextTrafficTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigResourcesSrcTestCases', '--include-filter', 'CtsNetSecConfigResourcesSrcTestCases[instant]', '--include-filter', 'CtsNetSecConfigResourcesSrcTestCases[secondary_user]', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficFalseTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficFalseTestCases[secondary_user]', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficTrueTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficTrueTestCases[secondary_user]', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases[secondary_user]', '--include-filter', 'CtsNetTestCases', '--include-filter', 'CtsNetTestCasesInternetPermission', '--include-filter', 'CtsNetTestCasesInternetPermission[instant]', '--include-filter', 'CtsNetTestCasesInternetPermission[secondary_user]', '--include-filter', 'CtsNetTestCasesLegacyApi22', '--include-filter', 'CtsNetTestCasesLegacyApi22[secondary_user]', '--include-filter', 'CtsNetTestCasesLegacyPermission22', '--include-filter', 'CtsNetTestCasesLegacyPermission22[secondary_user]', '--include-filter', 'CtsNetTestCasesUpdateStatsPermission', '--include-filter', 'CtsNetTestCasesUpdateStatsPermission[instant]', '--include-filter', 'CtsNetTestCasesUpdateStatsPermission[secondary_user]', '--include-filter', 'CtsNetTestCases[instant]', '--include-filter', 'CtsNetTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsNet',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=90000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNetTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNetTestCases.ctshardware
new file mode 100644
index 0000000..f64590d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNetTestCases.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsNetTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNetTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsNetTestCases.ctshardware',
+        test_name='cheets_CTS_R.internal.x86.CtsNetTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNetTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsNetTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNfc b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNfc
new file mode 100644
index 0000000..0625bb5
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNfc
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsNfc'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNfcTestCases, CtsNfcTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsNfc',
+        test_name='cheets_CTS_R.internal.x86.CtsNfc',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNfcTestCases', '--include-filter', 'CtsNfcTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsNfc',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNoPermission b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNoPermission
new file mode 100644
index 0000000..554fcd7
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsNoPermission
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsNoPermission'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNoPermissionTestCases, CtsNoPermissionTestCases25, CtsNoPermissionTestCases25[secondary_user], CtsNoPermissionTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsNoPermission',
+        test_name='cheets_CTS_R.internal.x86.CtsNoPermission',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNoPermissionTestCases', '--include-filter', 'CtsNoPermissionTestCases25', '--include-filter', 'CtsNoPermissionTestCases25[secondary_user]', '--include-filter', 'CtsNoPermissionTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsNoPermission',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsOmapi b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsOmapi
new file mode 100644
index 0000000..9e3508c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsOmapi
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsOmapi'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsOmapiTestCases, CtsOmapiTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsOmapi',
+        test_name='cheets_CTS_R.internal.x86.CtsOmapi',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsOmapiTestCases', '--include-filter', 'CtsOmapiTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsOmapi',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsOpenG b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsOpenG
new file mode 100644
index 0000000..f4befae
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsOpenG
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsOpenG'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsOpenGLTestCases, CtsOpenGLTestCases[secondary_user], CtsOpenGlPerf2TestCases, CtsOpenGlPerf2TestCases[secondary_user], CtsOpenGlPerfTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsOpenG',
+        test_name='cheets_CTS_R.internal.x86.CtsOpenG',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsOpenGLTestCases', '--include-filter', 'CtsOpenGLTestCases[secondary_user]', '--include-filter', 'CtsOpenGlPerf2TestCases', '--include-filter', 'CtsOpenGlPerf2TestCases[secondary_user]', '--include-filter', 'CtsOpenGlPerfTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsOpenG',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsOs b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsOs
new file mode 100644
index 0000000..b616a25
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsOs
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsOs'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsOsHostTestCases, CtsOsHostTestCases[instant], CtsOsHostTestCases[secondary_user], CtsOsTestCases, CtsOsTestCases[instant] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsOs',
+        test_name='cheets_CTS_R.internal.x86.CtsOs',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsOsHostTestCases', '--include-filter', 'CtsOsHostTestCases[instant]', '--include-filter', 'CtsOsHostTestCases[secondary_user]', '--include-filter', 'CtsOsTestCases', '--include-filter', 'CtsOsTestCases[instant]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsOs',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        use_jdk9=True,
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsPackage b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsPackage
new file mode 100644
index 0000000..eca7aa6
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsPackage
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsPackage'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPackageInstallAppOpDefaultTestCases, CtsPackageInstallAppOpDefaultTestCases[instant], CtsPackageInstallAppOpDefaultTestCases[secondary_user], CtsPackageInstallAppOpDeniedTestCases, CtsPackageInstallAppOpDeniedTestCases[instant], CtsPackageInstallAppOpDeniedTestCases[secondary_user], CtsPackageInstallTestCases, CtsPackageInstallTestCases[instant], CtsPackageInstallTestCases[secondary_user], CtsPackageInstallerTapjackingTestCases, CtsPackageInstallerTapjackingTestCases[secondary_user], CtsPackageUninstallTestCases, CtsPackageUninstallTestCases[secondary_user], CtsPackageWatchdogTestCases, CtsPackageWatchdogTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsPackage',
+        test_name='cheets_CTS_R.internal.x86.CtsPackage',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPackageInstallAppOpDefaultTestCases', '--include-filter', 'CtsPackageInstallAppOpDefaultTestCases[instant]', '--include-filter', 'CtsPackageInstallAppOpDefaultTestCases[secondary_user]', '--include-filter', 'CtsPackageInstallAppOpDeniedTestCases', '--include-filter', 'CtsPackageInstallAppOpDeniedTestCases[instant]', '--include-filter', 'CtsPackageInstallAppOpDeniedTestCases[secondary_user]', '--include-filter', 'CtsPackageInstallTestCases', '--include-filter', 'CtsPackageInstallTestCases[instant]', '--include-filter', 'CtsPackageInstallTestCases[secondary_user]', '--include-filter', 'CtsPackageInstallerTapjackingTestCases', '--include-filter', 'CtsPackageInstallerTapjackingTestCases[secondary_user]', '--include-filter', 'CtsPackageUninstallTestCases', '--include-filter', 'CtsPackageUninstallTestCases[secondary_user]', '--include-filter', 'CtsPackageWatchdogTestCases', '--include-filter', 'CtsPackageWatchdogTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsPackage',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=28800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsPdf b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsPdf
new file mode 100644
index 0000000..3296b87
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsPdf
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsPdf'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPdfTestCases, CtsPdfTestCases[instant], CtsPdfTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsPdf',
+        test_name='cheets_CTS_R.internal.x86.CtsPdf',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPdfTestCases', '--include-filter', 'CtsPdfTestCases[instant]', '--include-filter', 'CtsPdfTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsPdf',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsPerfetto b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsPerfetto
new file mode 100644
index 0000000..d900b16
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsPerfetto
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsPerfetto'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPerfettoTestCases, CtsPerfettoTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsPerfetto',
+        test_name='cheets_CTS_R.internal.x86.CtsPerfetto',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPerfettoTestCases', '--include-filter', 'CtsPerfettoTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsPerfetto',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsPerfettoTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsPerfettoTestCases.ctshardware
new file mode 100644
index 0000000..4c476cb
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsPerfettoTestCases.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsPerfettoTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPerfettoTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsPerfettoTestCases.ctshardware',
+        test_name='cheets_CTS_R.internal.x86.CtsPerfettoTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPerfettoTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsPerfettoTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsPermission b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsPermission
new file mode 100644
index 0000000..24c49e3
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsPermission
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsPermission'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPermission2TestCases, CtsPermission2TestCases[instant], CtsPermission3TestCases, CtsPermission3TestCases[secondary_user], CtsPermissionTestCases, CtsPermissionTestCasesSdk28, CtsPermissionTestCasesSdk28[instant], CtsPermissionTestCasesSdk28[secondary_user], CtsPermissionTestCasesTelephony, CtsPermissionTestCasesTelephony[instant], CtsPermissionTestCasesTelephony[secondary_user], CtsPermissionTestCases[instant] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsPermission',
+        test_name='cheets_CTS_R.internal.x86.CtsPermission',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPermission2TestCases', '--include-filter', 'CtsPermission2TestCases[instant]', '--include-filter', 'CtsPermission3TestCases', '--include-filter', 'CtsPermission3TestCases[secondary_user]', '--include-filter', 'CtsPermissionTestCases', '--include-filter', 'CtsPermissionTestCasesSdk28', '--include-filter', 'CtsPermissionTestCasesSdk28[instant]', '--include-filter', 'CtsPermissionTestCasesSdk28[secondary_user]', '--include-filter', 'CtsPermissionTestCasesTelephony', '--include-filter', 'CtsPermissionTestCasesTelephony[instant]', '--include-filter', 'CtsPermissionTestCasesTelephony[secondary_user]', '--include-filter', 'CtsPermissionTestCases[instant]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsPermission',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=23400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsPermissionTestCases.camera.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsPermissionTestCases.camera.ctshardware
new file mode 100644
index 0000000..67a0764
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsPermissionTestCases.camera.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsPermissionTestCases.camera.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPermissionTestCases.camera of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsPermissionTestCases.camera.ctshardware',
+        test_name='cheets_CTS_R.internal.x86.CtsPermissionTestCases.camera.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPermissionTestCases android.permission.cts.CameraPermissionTest', '--include-filter', 'CtsPermissionTestCases android.permission.cts.Camera2PermissionTest', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsPermissionTestCases.camera',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsPreference b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsPreference
new file mode 100644
index 0000000..0913dc2
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsPreference
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsPreference'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPreferenceTestCases, CtsPreferenceTestCases[instant], CtsPreferenceTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsPreference',
+        test_name='cheets_CTS_R.internal.x86.CtsPreference',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPreferenceTestCases', '--include-filter', 'CtsPreferenceTestCases[instant]', '--include-filter', 'CtsPreferenceTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsPreference',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsPrint b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsPrint
new file mode 100644
index 0000000..d40bf6f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsPrint
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsPrint'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPrintTestCases, CtsPrintTestCases[instant], CtsPrintTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        use_helpers=True,
+        tag='internal.x86.CtsPrint',
+        test_name='cheets_CTS_R.internal.x86.CtsPrint',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPrintTestCases', '--include-filter', 'CtsPrintTestCases[instant]', '--include-filter', 'CtsPrintTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsPrint',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsProto b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsProto
new file mode 100644
index 0000000..d19f913
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsProto
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsProto'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsProtoTestCases, CtsProtoTestCases[instant], CtsProtoTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsProto',
+        test_name='cheets_CTS_R.internal.x86.CtsProto',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsProtoTestCases', '--include-filter', 'CtsProtoTestCases[instant]', '--include-filter', 'CtsProtoTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsProto',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsProvider b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsProvider
new file mode 100644
index 0000000..7adb01b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsProvider
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsProvider'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsProviderTestCases, CtsProviderTestCases[secondary_user], CtsProviderUiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsProvider',
+        test_name='cheets_CTS_R.internal.x86.CtsProvider',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsProviderTestCases', '--include-filter', 'CtsProviderTestCases[secondary_user]', '--include-filter', 'CtsProviderUiTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsProvider',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsQuickAccessWallet b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsQuickAccessWallet
new file mode 100644
index 0000000..3b90e53
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsQuickAccessWallet
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsQuickAccessWallet'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsQuickAccessWalletTestCases, CtsQuickAccessWalletTestCases[instant], CtsQuickAccessWalletTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsQuickAccessWallet',
+        test_name='cheets_CTS_R.internal.x86.CtsQuickAccessWallet',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsQuickAccessWalletTestCases', '--include-filter', 'CtsQuickAccessWalletTestCases[instant]', '--include-filter', 'CtsQuickAccessWalletTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsQuickAccessWallet',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsRenderscript b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsRenderscript
new file mode 100644
index 0000000..48ab27b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsRenderscript
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsRenderscript'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsRenderscriptLegacyTestCases, CtsRenderscriptLegacyTestCases[secondary_user], CtsRenderscriptTestCases, CtsRenderscriptTestCases[instant], CtsRenderscriptTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsRenderscript',
+        test_name='cheets_CTS_R.internal.x86.CtsRenderscript',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsRenderscriptLegacyTestCases', '--include-filter', 'CtsRenderscriptLegacyTestCases[secondary_user]', '--include-filter', 'CtsRenderscriptTestCases', '--include-filter', 'CtsRenderscriptTestCases[instant]', '--include-filter', 'CtsRenderscriptTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsRenderscript',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsResolverService b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsResolverService
new file mode 100644
index 0000000..d1f6674
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsResolverService
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsResolverService'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsResolverServiceTestCases, CtsResolverServiceTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsResolverService',
+        test_name='cheets_CTS_R.internal.x86.CtsResolverService',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsResolverServiceTestCases', '--include-filter', 'CtsResolverServiceTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsResolverService',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsResourcesLoader b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsResourcesLoader
new file mode 100644
index 0000000..7721840
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsResourcesLoader
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsResourcesLoader'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsResourcesLoaderTests, CtsResourcesLoaderTests[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsResourcesLoader',
+        test_name='cheets_CTS_R.internal.x86.CtsResourcesLoader',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsResourcesLoaderTests', '--include-filter', 'CtsResourcesLoaderTests[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsResourcesLoader',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsRole b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsRole
new file mode 100644
index 0000000..ca5d5c1
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsRole
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsRole'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsRoleTestCases, CtsRoleTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsRole',
+        test_name='cheets_CTS_R.internal.x86.CtsRole',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsRoleTestCases', '--include-filter', 'CtsRoleTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsRole',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsRollbackManagerHostTestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsRollbackManagerHostTestCases
new file mode 100644
index 0000000..3d375cc
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsRollbackManagerHostTestCases
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsRollbackManagerHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsRollbackManagerHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsRollbackManagerHostTestCases',
+        test_name='cheets_CTS_R.internal.x86.CtsRollbackManagerHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsRollbackManagerHostTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsRollbackManagerHostTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsRs b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsRs
new file mode 100644
index 0000000..8ce397a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsRs
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsRs'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsRsBlasTestCases, CtsRsBlasTestCases[secondary_user], CtsRsCppTestCases, CtsRsCppTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsRs',
+        test_name='cheets_CTS_R.internal.x86.CtsRs',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsRsBlasTestCases', '--include-filter', 'CtsRsBlasTestCases[secondary_user]', '--include-filter', 'CtsRsCppTestCases', '--include-filter', 'CtsRsCppTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsRs',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSample b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSample
new file mode 100644
index 0000000..8bf877f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSample
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsSample'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSampleDeviceTestCases, CtsSampleDeviceTestCases[instant], CtsSampleDeviceTestCases[secondary_user], CtsSampleHostTestCases, CtsSampleHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSample',
+        test_name='cheets_CTS_R.internal.x86.CtsSample',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSampleDeviceTestCases', '--include-filter', 'CtsSampleDeviceTestCases[instant]', '--include-filter', 'CtsSampleDeviceTestCases[secondary_user]', '--include-filter', 'CtsSampleHostTestCases', '--include-filter', 'CtsSampleHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSample',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSax b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSax
new file mode 100644
index 0000000..7ff1843
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSax
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsSax'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSaxTestCases, CtsSaxTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSax',
+        test_name='cheets_CTS_R.internal.x86.CtsSax',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSaxTestCases', '--include-filter', 'CtsSaxTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSax',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsScopedStorageHostTest b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsScopedStorageHostTest
new file mode 100644
index 0000000..1cf54ad
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsScopedStorageHostTest
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsScopedStorageHostTest'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsScopedStorageHostTest, CtsScopedStorageHostTest[instant] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsScopedStorageHostTest',
+        test_name='cheets_CTS_R.internal.x86.CtsScopedStorageHostTest',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsScopedStorageHostTest', '--include-filter', 'CtsScopedStorageHostTest[instant]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsScopedStorageHostTest',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSdkExtensions b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSdkExtensions
new file mode 100644
index 0000000..677bd49
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSdkExtensions
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsSdkExtensions'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSdkExtensionsTestCases, CtsSdkExtensionsTestCases[instant], CtsSdkExtensionsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSdkExtensions',
+        test_name='cheets_CTS_R.internal.x86.CtsSdkExtensions',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSdkExtensionsTestCases', '--include-filter', 'CtsSdkExtensionsTestCases[instant]', '--include-filter', 'CtsSdkExtensionsTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSdkExtensions',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSeccompHost b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSeccompHost
new file mode 100644
index 0000000..e4bc700
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSeccompHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsSeccompHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSeccompHostTestCases, CtsSeccompHostTestCases[instant], CtsSeccompHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSeccompHost',
+        test_name='cheets_CTS_R.internal.x86.CtsSeccompHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSeccompHostTestCases', '--include-filter', 'CtsSeccompHostTestCases[instant]', '--include-filter', 'CtsSeccompHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSeccompHost',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSecure b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSecure
new file mode 100644
index 0000000..1072cf0
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSecure
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsSecure'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSecureElementAccessControlTestCases1, CtsSecureElementAccessControlTestCases1[secondary_user], CtsSecureElementAccessControlTestCases2, CtsSecureElementAccessControlTestCases2[secondary_user], CtsSecureElementAccessControlTestCases3, CtsSecureElementAccessControlTestCases3[secondary_user], CtsSecureFrpInstallTestCases, CtsSecureFrpInstallTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSecure',
+        test_name='cheets_CTS_R.internal.x86.CtsSecure',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSecureElementAccessControlTestCases1', '--include-filter', 'CtsSecureElementAccessControlTestCases1[secondary_user]', '--include-filter', 'CtsSecureElementAccessControlTestCases2', '--include-filter', 'CtsSecureElementAccessControlTestCases2[secondary_user]', '--include-filter', 'CtsSecureElementAccessControlTestCases3', '--include-filter', 'CtsSecureElementAccessControlTestCases3[secondary_user]', '--include-filter', 'CtsSecureFrpInstallTestCases', '--include-filter', 'CtsSecureFrpInstallTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSecure',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=16200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSecurity b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSecurity
new file mode 100644
index 0000000..0f15271
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSecurity
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsSecurity'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSecurityBulletinHostTestCases, CtsSecurityBulletinHostTestCases[secondary_user], CtsSecurityHostTestCases, CtsSecurityHostTestCases[secondary_user], CtsSecurityTestCases, CtsSecurityTestCases[instant], CtsSecurityTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSecurity',
+        test_name='cheets_CTS_R.internal.x86.CtsSecurity',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSecurityBulletinHostTestCases', '--include-filter', 'CtsSecurityBulletinHostTestCases[secondary_user]', '--include-filter', 'CtsSecurityHostTestCases', '--include-filter', 'CtsSecurityHostTestCases[secondary_user]', '--include-filter', 'CtsSecurityTestCases', '--include-filter', 'CtsSecurityTestCases[instant]', '--include-filter', 'CtsSecurityTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSecurity',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=154800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSelinux b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSelinux
new file mode 100644
index 0000000..b41e6d2
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSelinux
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsSelinux'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSelinuxEphemeralTestCases, CtsSelinuxEphemeralTestCases[instant], CtsSelinuxTargetSdk25TestCases, CtsSelinuxTargetSdk25TestCases[secondary_user], CtsSelinuxTargetSdk27TestCases, CtsSelinuxTargetSdk27TestCases[secondary_user], CtsSelinuxTargetSdk28TestCases, CtsSelinuxTargetSdk28TestCases[secondary_user], CtsSelinuxTargetSdk29TestCases, CtsSelinuxTargetSdk29TestCases[secondary_user], CtsSelinuxTargetSdkCurrentTestCases, CtsSelinuxTargetSdkCurrentTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSelinux',
+        test_name='cheets_CTS_R.internal.x86.CtsSelinux',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSelinuxEphemeralTestCases', '--include-filter', 'CtsSelinuxEphemeralTestCases[instant]', '--include-filter', 'CtsSelinuxTargetSdk25TestCases', '--include-filter', 'CtsSelinuxTargetSdk25TestCases[secondary_user]', '--include-filter', 'CtsSelinuxTargetSdk27TestCases', '--include-filter', 'CtsSelinuxTargetSdk27TestCases[secondary_user]', '--include-filter', 'CtsSelinuxTargetSdk28TestCases', '--include-filter', 'CtsSelinuxTargetSdk28TestCases[secondary_user]', '--include-filter', 'CtsSelinuxTargetSdk29TestCases', '--include-filter', 'CtsSelinuxTargetSdk29TestCases[secondary_user]', '--include-filter', 'CtsSelinuxTargetSdkCurrentTestCases', '--include-filter', 'CtsSelinuxTargetSdkCurrentTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSelinux',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=23400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSensor b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSensor
new file mode 100644
index 0000000..f845c4c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSensor
@@ -0,0 +1,48 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+from autotest_lib.server import utils as server_utils
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsSensor'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild, suite:arc-cts-unibuild-hw'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSensorTestCases, CtsSensorTestCases[instant], CtsSensorTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+# For local debugging, if your test setup doesn't have servo, REMOVE these
+# two lines.
+args_dict = server_utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run_TS(machine):
+    # REMOVE 'servo_args=servo_args' arg for local debugging if your test
+    # setup doesn't have servo.
+    try:
+        host_list = [hosts.create_host(machine, servo_args=servo_args)]
+    except:
+        # Just ignore any servo setup flakiness.
+        host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSensor',
+        test_name='cheets_CTS_R.internal.x86.CtsSensor',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSensorTestCases', '--include-filter', 'CtsSensorTestCases[instant]', '--include-filter', 'CtsSensorTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSensor',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        hard_reboot_on_failure=True,
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSensorTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSensorTestCases.ctshardware
new file mode 100644
index 0000000..1cc977a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSensorTestCases.ctshardware
@@ -0,0 +1,48 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+from autotest_lib.server import utils as server_utils
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsSensorTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSensorTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+# For local debugging, if your test setup doesn't have servo, REMOVE these
+# two lines.
+args_dict = server_utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run_TS(machine):
+    # REMOVE 'servo_args=servo_args' arg for local debugging if your test
+    # setup doesn't have servo.
+    try:
+        host_list = [hosts.create_host(machine, servo_args=servo_args)]
+    except:
+        # Just ignore any servo setup flakiness.
+        host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSensorTestCases.ctshardware',
+        test_name='cheets_CTS_R.internal.x86.CtsSensorTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSensorTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSensorTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        hard_reboot_on_failure=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSettings b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSettings
new file mode 100644
index 0000000..a0e14ed
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSettings
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsSettings'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSettingsHostTestCases, CtsSettingsTestCases, CtsSettingsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSettings',
+        test_name='cheets_CTS_R.internal.x86.CtsSettings',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSettingsHostTestCases', '--include-filter', 'CtsSettingsTestCases', '--include-filter', 'CtsSettingsTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSettings',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSharedLibsApiSignature b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSharedLibsApiSignature
new file mode 100644
index 0000000..b9405e9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSharedLibsApiSignature
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsSharedLibsApiSignature'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSharedLibsApiSignatureTestCases, CtsSharedLibsApiSignatureTestCases[instant], CtsSharedLibsApiSignatureTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSharedLibsApiSignature',
+        test_name='cheets_CTS_R.internal.x86.CtsSharedLibsApiSignature',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSharedLibsApiSignatureTestCases', '--include-filter', 'CtsSharedLibsApiSignatureTestCases[instant]', '--include-filter', 'CtsSharedLibsApiSignatureTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSharedLibsApiSignature',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSharesheet b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSharesheet
new file mode 100644
index 0000000..0206818
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSharesheet
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsSharesheet'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSharesheetTestCases, CtsSharesheetTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSharesheet',
+        test_name='cheets_CTS_R.internal.x86.CtsSharesheet',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSharesheetTestCases', '--include-filter', 'CtsSharesheetTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSharesheet',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsShortcut b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsShortcut
new file mode 100644
index 0000000..1f3daa1
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsShortcut
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsShortcut'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsShortcutHostTestCases, CtsShortcutManagerLauncher1, CtsShortcutManagerLauncher1[secondary_user], CtsShortcutManagerLauncher2, CtsShortcutManagerLauncher2[secondary_user], CtsShortcutManagerLauncher3, CtsShortcutManagerLauncher3[secondary_user], CtsShortcutManagerLauncher4, CtsShortcutManagerLauncher4[secondary_user], CtsShortcutManagerPackage1, CtsShortcutManagerPackage1[secondary_user], CtsShortcutManagerPackage2, CtsShortcutManagerPackage2[secondary_user], CtsShortcutManagerPackage3, CtsShortcutManagerPackage3[secondary_user], CtsShortcutManagerPackage4, CtsShortcutManagerPackage4[secondary_user], CtsShortcutManagerTestCases, CtsShortcutManagerTestCases[secondary_user], CtsShortcutManagerThrottlingTest, CtsShortcutManagerThrottlingTest[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsShortcut',
+        test_name='cheets_CTS_R.internal.x86.CtsShortcut',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsShortcutHostTestCases', '--include-filter', 'CtsShortcutManagerLauncher1', '--include-filter', 'CtsShortcutManagerLauncher1[secondary_user]', '--include-filter', 'CtsShortcutManagerLauncher2', '--include-filter', 'CtsShortcutManagerLauncher2[secondary_user]', '--include-filter', 'CtsShortcutManagerLauncher3', '--include-filter', 'CtsShortcutManagerLauncher3[secondary_user]', '--include-filter', 'CtsShortcutManagerLauncher4', '--include-filter', 'CtsShortcutManagerLauncher4[secondary_user]', '--include-filter', 'CtsShortcutManagerPackage1', '--include-filter', 'CtsShortcutManagerPackage1[secondary_user]', '--include-filter', 'CtsShortcutManagerPackage2', '--include-filter', 'CtsShortcutManagerPackage2[secondary_user]', '--include-filter', 'CtsShortcutManagerPackage3', '--include-filter', 'CtsShortcutManagerPackage3[secondary_user]', '--include-filter', 'CtsShortcutManagerPackage4', '--include-filter', 'CtsShortcutManagerPackage4[secondary_user]', '--include-filter', 'CtsShortcutManagerTestCases', '--include-filter', 'CtsShortcutManagerTestCases[secondary_user]', '--include-filter', 'CtsShortcutManagerThrottlingTest', '--include-filter', 'CtsShortcutManagerThrottlingTest[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsShortcut',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=39600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSignedConfigHost b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSignedConfigHost
new file mode 100644
index 0000000..9cd4968
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSignedConfigHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsSignedConfigHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSignedConfigHostTestCases, CtsSignedConfigHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSignedConfigHost',
+        test_name='cheets_CTS_R.internal.x86.CtsSignedConfigHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSignedConfigHostTestCases', '--include-filter', 'CtsSignedConfigHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSignedConfigHost',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSimRestrictedApis b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSimRestrictedApis
new file mode 100644
index 0000000..899a215
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSimRestrictedApis
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsSimRestrictedApis'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSimRestrictedApisTestCases, CtsSimRestrictedApisTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSimRestrictedApis',
+        test_name='cheets_CTS_R.internal.x86.CtsSimRestrictedApis',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSimRestrictedApisTestCases', '--include-filter', 'CtsSimRestrictedApisTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSimRestrictedApis',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSimpleCpu b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSimpleCpu
new file mode 100644
index 0000000..7c61527
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSimpleCpu
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsSimpleCpu'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSimpleCpuTestCases, CtsSimpleCpuTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSimpleCpu',
+        test_name='cheets_CTS_R.internal.x86.CtsSimpleCpu',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSimpleCpuTestCases', '--include-filter', 'CtsSimpleCpuTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSimpleCpu',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSimpleperfTestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSimpleperfTestCases
new file mode 100644
index 0000000..ab8a4df
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSimpleperfTestCases
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsSimpleperfTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSimpleperfTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSimpleperfTestCases',
+        test_name='cheets_CTS_R.internal.x86.CtsSimpleperfTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSimpleperfTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSimpleperfTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSkQP b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSkQP
new file mode 100644
index 0000000..f35fcc5
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSkQP
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsSkQP'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSkQPTestCases, CtsSkQPTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSkQP',
+        test_name='cheets_CTS_R.internal.x86.CtsSkQP',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSkQPTestCases', '--include-filter', 'CtsSkQPTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSkQP',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSlice b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSlice
new file mode 100644
index 0000000..d5abaad
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSlice
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsSlice'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSliceTestCases, CtsSliceTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSlice',
+        test_name='cheets_CTS_R.internal.x86.CtsSlice',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSliceTestCases', '--include-filter', 'CtsSliceTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSlice',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSoundTrigger b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSoundTrigger
new file mode 100644
index 0000000..437de9a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSoundTrigger
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsSoundTrigger'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSoundTriggerTestCases, CtsSoundTriggerTestCases[instant], CtsSoundTriggerTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSoundTrigger',
+        test_name='cheets_CTS_R.internal.x86.CtsSoundTrigger',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSoundTriggerTestCases', '--include-filter', 'CtsSoundTriggerTestCases[instant]', '--include-filter', 'CtsSoundTriggerTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSoundTrigger',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSpeech b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSpeech
new file mode 100644
index 0000000..8966899
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSpeech
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsSpeech'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSpeechTestCases, CtsSpeechTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSpeech',
+        test_name='cheets_CTS_R.internal.x86.CtsSpeech',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSpeechTestCases', '--include-filter', 'CtsSpeechTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSpeech',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsStagedInstallHostTestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsStagedInstallHostTestCases
new file mode 100644
index 0000000..eea5424
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsStagedInstallHostTestCases
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsStagedInstallHostTestCases'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsStagedInstallHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsStagedInstallHostTestCases',
+        test_name='cheets_CTS_R.internal.x86.CtsStagedInstallHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsStagedInstallHostTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsStagedInstallHostTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsStatsdHost b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsStatsdHost
new file mode 100644
index 0000000..4588718
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsStatsdHost
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsStatsdHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsStatsdHostTestCases, CtsStatsdHostTestCases[instant], CtsStatsdHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsStatsdHost',
+        test_name='cheets_CTS_R.internal.x86.CtsStatsdHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsStatsdHostTestCases', '--include-filter', 'CtsStatsdHostTestCases[instant]', '--include-filter', 'CtsStatsdHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsStatsdHost',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        use_old_adb=True,
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsStrictJavaPackages b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsStrictJavaPackages
new file mode 100644
index 0000000..9b6c755
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsStrictJavaPackages
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsStrictJavaPackages'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsStrictJavaPackagesTestCases, CtsStrictJavaPackagesTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsStrictJavaPackages',
+        test_name='cheets_CTS_R.internal.x86.CtsStrictJavaPackages',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsStrictJavaPackagesTestCases', '--include-filter', 'CtsStrictJavaPackagesTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsStrictJavaPackages',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSuspendApps b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSuspendApps
new file mode 100644
index 0000000..30c5f45
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSuspendApps
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsSuspendApps'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSuspendAppsPermissionTestCases, CtsSuspendAppsPermissionTestCases[secondary_user], CtsSuspendAppsTestCases, CtsSuspendAppsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSuspendApps',
+        test_name='cheets_CTS_R.internal.x86.CtsSuspendApps',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSuspendAppsPermissionTestCases', '--include-filter', 'CtsSuspendAppsPermissionTestCases[secondary_user]', '--include-filter', 'CtsSuspendAppsTestCases', '--include-filter', 'CtsSuspendAppsTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSuspendApps',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSustainedPerformanceHost b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSustainedPerformanceHost
new file mode 100644
index 0000000..fcecc72
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSustainedPerformanceHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsSustainedPerformanceHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSustainedPerformanceHostTestCases, CtsSustainedPerformanceHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSustainedPerformanceHost',
+        test_name='cheets_CTS_R.internal.x86.CtsSustainedPerformanceHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSustainedPerformanceHostTestCases', '--include-filter', 'CtsSustainedPerformanceHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSustainedPerformanceHost',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSustainedPerformanceHostTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSustainedPerformanceHostTestCases.ctshardware
new file mode 100644
index 0000000..403768f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSustainedPerformanceHostTestCases.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsSustainedPerformanceHostTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSustainedPerformanceHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSustainedPerformanceHostTestCases.ctshardware',
+        test_name='cheets_CTS_R.internal.x86.CtsSustainedPerformanceHostTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSustainedPerformanceHostTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSustainedPerformanceHostTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSync b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSync
new file mode 100644
index 0000000..1cba475
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSync
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsSync'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSyncAccountAccessOtherCertTestCases, CtsSyncAccountAccessOtherCertTestCases[secondary_user], CtsSyncContentHostTestCases, CtsSyncContentHostTestCases[secondary_user], CtsSyncManagerTestsCases, CtsSyncManagerTestsCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSync',
+        test_name='cheets_CTS_R.internal.x86.CtsSync',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSyncAccountAccessOtherCertTestCases', '--include-filter', 'CtsSyncAccountAccessOtherCertTestCases[secondary_user]', '--include-filter', 'CtsSyncContentHostTestCases', '--include-filter', 'CtsSyncContentHostTestCases[secondary_user]', '--include-filter', 'CtsSyncManagerTestsCases', '--include-filter', 'CtsSyncManagerTestsCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSync',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSystem b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSystem
new file mode 100644
index 0000000..6ec59bb
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsSystem
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsSystem'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSystemApiAnnotationTestCases, CtsSystemApiAnnotationTestCases[secondary_user], CtsSystemApiSignatureTestCases, CtsSystemApiSignatureTestCases[secondary_user], CtsSystemIntentTestCases, CtsSystemIntentTestCases[secondary_user], CtsSystemUiHostTestCases, CtsSystemUiHostTestCases[instant], CtsSystemUiHostTestCases[secondary_user], CtsSystemUiTestCases, CtsSystemUiTestCases[instant], CtsSystemUiTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsSystem',
+        test_name='cheets_CTS_R.internal.x86.CtsSystem',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSystemApiAnnotationTestCases', '--include-filter', 'CtsSystemApiAnnotationTestCases[secondary_user]', '--include-filter', 'CtsSystemApiSignatureTestCases', '--include-filter', 'CtsSystemApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsSystemIntentTestCases', '--include-filter', 'CtsSystemIntentTestCases[secondary_user]', '--include-filter', 'CtsSystemUiHostTestCases', '--include-filter', 'CtsSystemUiHostTestCases[instant]', '--include-filter', 'CtsSystemUiHostTestCases[secondary_user]', '--include-filter', 'CtsSystemUiTestCases', '--include-filter', 'CtsSystemUiTestCases[instant]', '--include-filter', 'CtsSystemUiTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSystem',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=23400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTaggingHost b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTaggingHost
new file mode 100644
index 0000000..9f7aa74
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTaggingHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsTaggingHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTaggingHostTestCases, CtsTaggingHostTestCases[instant], CtsTaggingHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsTaggingHost',
+        test_name='cheets_CTS_R.internal.x86.CtsTaggingHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTaggingHostTestCases', '--include-filter', 'CtsTaggingHostTestCases[instant]', '--include-filter', 'CtsTaggingHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsTaggingHost',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTelecom b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTelecom
new file mode 100644
index 0000000..ce75672
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTelecom
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsTelecom'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTelecomTestCases, CtsTelecomTestCases2, CtsTelecomTestCases2[secondary_user], CtsTelecomTestCases3, CtsTelecomTestCases3[secondary_user], CtsTelecomTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsTelecom',
+        test_name='cheets_CTS_R.internal.x86.CtsTelecom',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTelecomTestCases', '--include-filter', 'CtsTelecomTestCases2', '--include-filter', 'CtsTelecomTestCases2[secondary_user]', '--include-filter', 'CtsTelecomTestCases3', '--include-filter', 'CtsTelecomTestCases3[secondary_user]', '--include-filter', 'CtsTelecomTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsTelecom',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTelephony b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTelephony
new file mode 100644
index 0000000..7ab13d9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTelephony
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsTelephony'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTelephony2TestCases, CtsTelephony2TestCases[instant], CtsTelephony2TestCases[secondary_user], CtsTelephony3TestCases, CtsTelephony3TestCases[secondary_user], CtsTelephonyHostCases, CtsTelephonyHostCases[secondary_user], CtsTelephonyProviderHostCases, CtsTelephonyProviderHostCases[secondary_user], CtsTelephonyProviderTestCases, CtsTelephonyProviderTestCases[secondary_user], CtsTelephonySdk28TestCases, CtsTelephonySdk28TestCases[secondary_user], CtsTelephonyTestCases, CtsTelephonyTestCasesPermissionReadPhoneState, CtsTelephonyTestCasesPermissionReadPhoneState[instant], CtsTelephonyTestCasesPermissionReadPhoneState[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsTelephony',
+        test_name='cheets_CTS_R.internal.x86.CtsTelephony',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTelephony2TestCases', '--include-filter', 'CtsTelephony2TestCases[instant]', '--include-filter', 'CtsTelephony2TestCases[secondary_user]', '--include-filter', 'CtsTelephony3TestCases', '--include-filter', 'CtsTelephony3TestCases[secondary_user]', '--include-filter', 'CtsTelephonyHostCases', '--include-filter', 'CtsTelephonyHostCases[secondary_user]', '--include-filter', 'CtsTelephonyProviderHostCases', '--include-filter', 'CtsTelephonyProviderHostCases[secondary_user]', '--include-filter', 'CtsTelephonyProviderTestCases', '--include-filter', 'CtsTelephonyProviderTestCases[secondary_user]', '--include-filter', 'CtsTelephonySdk28TestCases', '--include-filter', 'CtsTelephonySdk28TestCases[secondary_user]', '--include-filter', 'CtsTelephonyTestCases', '--include-filter', 'CtsTelephonyTestCasesPermissionReadPhoneState', '--include-filter', 'CtsTelephonyTestCasesPermissionReadPhoneState[instant]', '--include-filter', 'CtsTelephonyTestCasesPermissionReadPhoneState[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsTelephony',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=32400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTestHarnessMode b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTestHarnessMode
new file mode 100644
index 0000000..f1ed5e4
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTestHarnessMode
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsTestHarnessMode'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTestHarnessModeTestCases, CtsTestHarnessModeTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsTestHarnessMode',
+        test_name='cheets_CTS_R.internal.x86.CtsTestHarnessMode',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTestHarnessModeTestCases', '--include-filter', 'CtsTestHarnessModeTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsTestHarnessMode',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTetheringTest b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTetheringTest
new file mode 100644
index 0000000..62992f8
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTetheringTest
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsTetheringTest'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTetheringTest, CtsTetheringTest[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsTetheringTest',
+        test_name='cheets_CTS_R.internal.x86.CtsTetheringTest',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTetheringTest', '--include-filter', 'CtsTetheringTest[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsTetheringTest',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsText b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsText
new file mode 100644
index 0000000..c0132b6
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsText
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsText'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTextClassifierTestCases, CtsTextClassifierTestCases[secondary_user], CtsTextTestCases, CtsTextTestCases[instant], CtsTextTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsText',
+        test_name='cheets_CTS_R.internal.x86.CtsText',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTextClassifierTestCases', '--include-filter', 'CtsTextClassifierTestCases[secondary_user]', '--include-filter', 'CtsTextTestCases', '--include-filter', 'CtsTextTestCases[instant]', '--include-filter', 'CtsTextTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsText',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTfliteNnapiDelegate b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTfliteNnapiDelegate
new file mode 100644
index 0000000..813b4d3
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTfliteNnapiDelegate
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsTfliteNnapiDelegate'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTfliteNnapiDelegateTestCases, CtsTfliteNnapiDelegateTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsTfliteNnapiDelegate',
+        test_name='cheets_CTS_R.internal.x86.CtsTfliteNnapiDelegate',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTfliteNnapiDelegateTestCases', '--include-filter', 'CtsTfliteNnapiDelegateTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsTfliteNnapiDelegate',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTheme b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTheme
new file mode 100644
index 0000000..36df3dc
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTheme
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsTheme'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsThemeDeviceTestCases, CtsThemeDeviceTestCases[secondary_user], CtsThemeHostTestCases, CtsThemeHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsTheme',
+        test_name='cheets_CTS_R.internal.x86.CtsTheme',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsThemeDeviceTestCases', '--include-filter', 'CtsThemeDeviceTestCases[secondary_user]', '--include-filter', 'CtsThemeHostTestCases', '--include-filter', 'CtsThemeHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsTheme',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsThermal b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsThermal
new file mode 100644
index 0000000..ac92055
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsThermal
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsThermal'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsThermalTestCases, CtsThermalTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsThermal',
+        test_name='cheets_CTS_R.internal.x86.CtsThermal',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsThermalTestCases', '--include-filter', 'CtsThermalTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsThermal',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsToast b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsToast
new file mode 100644
index 0000000..03c31df
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsToast
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsToast'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsToastLegacyTestCases, CtsToastLegacyTestCases[secondary_user], CtsToastTestCases, CtsToastTestCases[instant], CtsToastTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsToast',
+        test_name='cheets_CTS_R.internal.x86.CtsToast',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsToastLegacyTestCases', '--include-filter', 'CtsToastLegacyTestCases[secondary_user]', '--include-filter', 'CtsToastTestCases', '--include-filter', 'CtsToastTestCases[instant]', '--include-filter', 'CtsToastTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsToast',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTransition b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTransition
new file mode 100644
index 0000000..6f683d4
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTransition
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsTransition'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTransitionTestCases, CtsTransitionTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsTransition',
+        test_name='cheets_CTS_R.internal.x86.CtsTransition',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTransitionTestCases', '--include-filter', 'CtsTransitionTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsTransition',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTrustedVoiceHost b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTrustedVoiceHost
new file mode 100644
index 0000000..f65d63a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTrustedVoiceHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsTrustedVoiceHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTrustedVoiceHostTestCases, CtsTrustedVoiceHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsTrustedVoiceHost',
+        test_name='cheets_CTS_R.internal.x86.CtsTrustedVoiceHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTrustedVoiceHostTestCases', '--include-filter', 'CtsTrustedVoiceHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsTrustedVoiceHost',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTv b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTv
new file mode 100644
index 0000000..9061726
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsTv
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsTv'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsTvProviderTestCases, CtsTvProviderTestCases[secondary_user], CtsTvTestCases, CtsTvTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsTv',
+        test_name='cheets_CTS_R.internal.x86.CtsTv',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsTvProviderTestCases', '--include-filter', 'CtsTvProviderTestCases[secondary_user]', '--include-filter', 'CtsTvTestCases', '--include-filter', 'CtsTvTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsTv',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsUi b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsUi
new file mode 100644
index 0000000..71bf151
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsUi
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsUi'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsUiAutomationTestCases, CtsUiAutomationTestCases[instant], CtsUiAutomationTestCases[secondary_user], CtsUiRenderingTestCases, CtsUiRenderingTestCases27, CtsUiRenderingTestCases27[instant], CtsUiRenderingTestCases27[secondary_user], CtsUiRenderingTestCases[instant], CtsUiRenderingTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsUi',
+        test_name='cheets_CTS_R.internal.x86.CtsUi',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUiAutomationTestCases', '--include-filter', 'CtsUiAutomationTestCases[instant]', '--include-filter', 'CtsUiAutomationTestCases[secondary_user]', '--include-filter', 'CtsUiRenderingTestCases', '--include-filter', 'CtsUiRenderingTestCases27', '--include-filter', 'CtsUiRenderingTestCases27[instant]', '--include-filter', 'CtsUiRenderingTestCases27[secondary_user]', '--include-filter', 'CtsUiRenderingTestCases[instant]', '--include-filter', 'CtsUiRenderingTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsUi',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=18000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsUidIsolation b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsUidIsolation
new file mode 100644
index 0000000..892a22e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsUidIsolation
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsUidIsolation'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsUidIsolationTestCases, CtsUidIsolationTestCases[instant], CtsUidIsolationTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsUidIsolation',
+        test_name='cheets_CTS_R.internal.x86.CtsUidIsolation',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUidIsolationTestCases', '--include-filter', 'CtsUidIsolationTestCases[instant]', '--include-filter', 'CtsUidIsolationTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsUidIsolation',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsUsageStats b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsUsageStats
new file mode 100644
index 0000000..0e06717
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsUsageStats
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsUsageStats'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsUsageStatsTestCases, CtsUsageStatsTestCases[instant], CtsUsageStatsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsUsageStats',
+        test_name='cheets_CTS_R.internal.x86.CtsUsageStats',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUsageStatsTestCases', '--include-filter', 'CtsUsageStatsTestCases[instant]', '--include-filter', 'CtsUsageStatsTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsUsageStats',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsUsageStatsTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsUsageStatsTestCases.ctshardware
new file mode 100644
index 0000000..a01f085
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsUsageStatsTestCases.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsUsageStatsTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsUsageStatsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsUsageStatsTestCases.ctshardware',
+        test_name='cheets_CTS_R.internal.x86.CtsUsageStatsTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUsageStatsTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsUsageStatsTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsUsb b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsUsb
new file mode 100644
index 0000000..efbdcb7
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsUsb
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsUsb'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsUsbManagerTestCases, CtsUsbManagerTestCases[secondary_user], CtsUsbTests, CtsUsbTests[instant], CtsUsbTests[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsUsb',
+        test_name='cheets_CTS_R.internal.x86.CtsUsb',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUsbManagerTestCases', '--include-filter', 'CtsUsbManagerTestCases[secondary_user]', '--include-filter', 'CtsUsbTests', '--include-filter', 'CtsUsbTests[instant]', '--include-filter', 'CtsUsbTests[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsUsb',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=10800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsUsesLibraryHost b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsUsesLibraryHost
new file mode 100644
index 0000000..d6e7572
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsUsesLibraryHost
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsUsesLibraryHost'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsUsesLibraryHostTestCases, CtsUsesLibraryHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsUsesLibraryHost',
+        test_name='cheets_CTS_R.internal.x86.CtsUsesLibraryHost',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUsesLibraryHostTestCases', '--include-filter', 'CtsUsesLibraryHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsUsesLibraryHost',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsUtil b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsUtil
new file mode 100644
index 0000000..d31641d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsUtil
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsUtil'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsUtilTestCases, CtsUtilTestCases[instant], CtsUtilTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsUtil',
+        test_name='cheets_CTS_R.internal.x86.CtsUtil',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUtilTestCases', '--include-filter', 'CtsUtilTestCases[instant]', '--include-filter', 'CtsUtilTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsUtil',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsVideo b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsVideo
new file mode 100644
index 0000000..a09b044
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsVideo
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsVideo'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsVideoTestCases, CtsVideoTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsVideo',
+        test_name='cheets_CTS_R.internal.x86.CtsVideo',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsVideoTestCases', '--include-filter', 'CtsVideoTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsVideo',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsView b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsView
new file mode 100644
index 0000000..7f562fb
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsView
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsView'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsViewInspectorAnnotationProcessorTestCases, CtsViewInspectorAnnotationProcessorTestCases[instant], CtsViewInspectorAnnotationProcessorTestCases[secondary_user], CtsViewTestCases, CtsViewTestCasesSdk28, CtsViewTestCasesSdk28[instant], CtsViewTestCasesSdk28[secondary_user], CtsViewTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsView',
+        test_name='cheets_CTS_R.internal.x86.CtsView',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsViewInspectorAnnotationProcessorTestCases', '--include-filter', 'CtsViewInspectorAnnotationProcessorTestCases[instant]', '--include-filter', 'CtsViewInspectorAnnotationProcessorTestCases[secondary_user]', '--include-filter', 'CtsViewTestCases', '--include-filter', 'CtsViewTestCasesSdk28', '--include-filter', 'CtsViewTestCasesSdk28[instant]', '--include-filter', 'CtsViewTestCasesSdk28[secondary_user]', '--include-filter', 'CtsViewTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsView',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=23400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsViewTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsViewTestCases.ctshardware
new file mode 100644
index 0000000..d6f5bf2
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsViewTestCases.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsViewTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsViewTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsViewTestCases.ctshardware',
+        test_name='cheets_CTS_R.internal.x86.CtsViewTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsViewTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsViewTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsVoice b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsVoice
new file mode 100644
index 0000000..9209766
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsVoice
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsVoice'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsVoiceInteractionTestCases, CtsVoiceInteractionTestCases[instant], CtsVoiceInteractionTestCases[secondary_user], CtsVoiceSettingsTestCases, CtsVoiceSettingsTestCases[instant], CtsVoiceSettingsTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsVoice',
+        test_name='cheets_CTS_R.internal.x86.CtsVoice',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsVoiceInteractionTestCases', '--include-filter', 'CtsVoiceInteractionTestCases[instant]', '--include-filter', 'CtsVoiceInteractionTestCases[secondary_user]', '--include-filter', 'CtsVoiceSettingsTestCases', '--include-filter', 'CtsVoiceSettingsTestCases[instant]', '--include-filter', 'CtsVoiceSettingsTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsVoice',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsVr b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsVr
new file mode 100644
index 0000000..8433ce9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsVr
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsVr'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsVrTestCases, CtsVrTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsVr',
+        test_name='cheets_CTS_R.internal.x86.CtsVr',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsVrTestCases', '--include-filter', 'CtsVrTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsVr',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWebkit b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWebkit
new file mode 100644
index 0000000..a0a0707
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWebkit
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsWebkit'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWebkitTestCases, CtsWebkitTestCases[instant], CtsWebkitTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsWebkit',
+        test_name='cheets_CTS_R.internal.x86.CtsWebkit',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWebkitTestCases', '--include-filter', 'CtsWebkitTestCases[instant]', '--include-filter', 'CtsWebkitTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWebkit',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWidget b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWidget
new file mode 100644
index 0000000..22f0f2c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWidget
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsWidget'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWidgetTestCases, CtsWidgetTestCases29, CtsWidgetTestCases29[instant], CtsWidgetTestCases29[secondary_user], CtsWidgetTestCases[instant], CtsWidgetTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsWidget',
+        test_name='cheets_CTS_R.internal.x86.CtsWidget',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWidgetTestCases', '--include-filter', 'CtsWidgetTestCases29', '--include-filter', 'CtsWidgetTestCases29[instant]', '--include-filter', 'CtsWidgetTestCases29[secondary_user]', '--include-filter', 'CtsWidgetTestCases[instant]', '--include-filter', 'CtsWidgetTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWidget',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=18000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWifi b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWifi
new file mode 100644
index 0000000..2093e3d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWifi
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsWifi'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWifiBroadcastsHostTestCases, CtsWifiBroadcastsHostTestCases[instant], CtsWifiBroadcastsHostTestCases[secondary_user], CtsWifiTestCases, CtsWifiTestCases[instant], CtsWifiTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsWifi',
+        test_name='cheets_CTS_R.internal.x86.CtsWifi',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWifiBroadcastsHostTestCases', '--include-filter', 'CtsWifiBroadcastsHostTestCases[instant]', '--include-filter', 'CtsWifiBroadcastsHostTestCases[secondary_user]', '--include-filter', 'CtsWifiTestCases', '--include-filter', 'CtsWifiTestCases[instant]', '--include-filter', 'CtsWifiTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWifi',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=12600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWifiTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWifiTestCases.ctshardware
new file mode 100644
index 0000000..491d774
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWifiTestCases.ctshardware
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsWifiTestCases.ctshardware'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWifiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsWifiTestCases.ctshardware',
+        test_name='cheets_CTS_R.internal.x86.CtsWifiTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWifiTestCases', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWifiTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager
new file mode 100644
index 0000000..91a61ff
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsWindowManager'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManagerDeviceTestCases, CtsWindowManagerDeviceTestCases[secondary_user], CtsWindowManagerJetpackTestCases, CtsWindowManagerJetpackTestCases[secondary_user], CtsWindowManagerSdk25TestCases, CtsWindowManagerSdk25TestCases[secondary_user], CtsWindowManagerSdk28TestCases, CtsWindowManagerSdk28TestCases[secondary_user], CtsWindowManagerSdk29TestCases, CtsWindowManagerSdk29TestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=10,
+        tag='internal.x86.CtsWindowManager',
+        test_name='cheets_CTS_R.internal.x86.CtsWindowManager',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases', '--include-filter', 'CtsWindowManagerDeviceTestCases[secondary_user]', '--include-filter', 'CtsWindowManagerJetpackTestCases', '--include-filter', 'CtsWindowManagerJetpackTestCases[secondary_user]', '--include-filter', 'CtsWindowManagerSdk25TestCases', '--include-filter', 'CtsWindowManagerSdk25TestCases[secondary_user]', '--include-filter', 'CtsWindowManagerSdk28TestCases', '--include-filter', 'CtsWindowManagerSdk28TestCases[secondary_user]', '--include-filter', 'CtsWindowManagerSdk29TestCases', '--include-filter', 'CtsWindowManagerSdk29TestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManager',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=19800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.A b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.A
new file mode 100644
index 0000000..d9ac840
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.A
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsWindowManager.A'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManager.A of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsWindowManager.A',
+        test_name='cheets_CTS_R.internal.x86.CtsWindowManager.A',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ActivityManagerGetConfigTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ActivityMetricsLoggerTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ActivityTaskAffinityTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ActivityTransitionTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ActivityViewTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ActivityVisibilityTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AddWindowAsUserTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AlertWindowsAppOpsTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AlertWindowsImportanceTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AlertWindowsTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AmProfileTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AmStartOptionsTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AnrTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AppConfigurationTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AspectRatioTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.AssistantStackTests', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.C b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.C
new file mode 100644
index 0000000..f5f3ace
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.C
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsWindowManager.C'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManager.C of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsWindowManager.C',
+        test_name='cheets_CTS_R.internal.x86.CtsWindowManager.C',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.CloseOnOutsideTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ConfigChangeTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.CrossAppDragAndDropTests', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.D b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.D
new file mode 100644
index 0000000..c7d5aaf
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.D
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsWindowManager.D'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManager.D of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsWindowManager.D',
+        test_name='cheets_CTS_R.internal.x86.CtsWindowManager.D',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DecorInsetTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DeprecatedTargetSdkTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DialogFrameTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DisplayCutoutTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DisplaySizeTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DisplayTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DragDropTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.DreamManagerServiceTests', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.F b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.F
new file mode 100644
index 0000000..572211e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.F
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsWindowManager.F'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManager.F of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsWindowManager.F',
+        test_name='cheets_CTS_R.internal.x86.CtsWindowManager.F',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ForceRelayoutTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.FreeformWindowingModeTests', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.L b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.L
new file mode 100644
index 0000000..57ff51d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.L
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsWindowManager.L'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManager.L of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsWindowManager.L',
+        test_name='cheets_CTS_R.internal.x86.CtsWindowManager.L',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.LayoutTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.LocationInWindowTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.LocationOnScreenTests', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.M b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.M
new file mode 100644
index 0000000..55654fa
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.M
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsWindowManager.M'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManager.M of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsWindowManager.M',
+        test_name='cheets_CTS_R.internal.x86.CtsWindowManager.M',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ManifestLayoutTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MinimalPostProcessingTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayActivityLaunchTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayClientTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayKeyguardTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayLockedKeyguardTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayPolicyTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplayPrivateDisplayTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplaySecurityTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.MultiDisplaySystemDecorationTests', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.Override b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.Override
new file mode 100644
index 0000000..ae76c1a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.Override
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsWindowManager.Override'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManager.Override of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsWindowManager.Override',
+        test_name='cheets_CTS_R.internal.x86.CtsWindowManager.Override',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.OverrideConfigTests', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.P b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.P
new file mode 100644
index 0000000..5fcfedb
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.P
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsWindowManager.P'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManager.P of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsWindowManager.P',
+        test_name='cheets_CTS_R.internal.x86.CtsWindowManager.P',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.PinnedStackTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.PrereleaseSdkTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.PresentationTest', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.R b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.R
new file mode 100644
index 0000000..c8ba40d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.R
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsWindowManager.R'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManager.R of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsWindowManager.R',
+        test_name='cheets_CTS_R.internal.x86.CtsWindowManager.R',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ReplaceWindowTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.RobustnessTests', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.S b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.S
new file mode 100644
index 0000000..bb7472f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.S
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsWindowManager.S'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManager.S of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsWindowManager.S',
+        test_name='cheets_CTS_R.internal.x86.CtsWindowManager.S',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.SplashscreenTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.SplitScreenTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.StartActivityAsUserTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.StartActivityTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.SurfaceControlTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.SurfaceControlViewHostTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.SurfaceViewSurfaceValidatorTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.SurfaceViewTest', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.T b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.T
new file mode 100644
index 0000000..677a56b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.T
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsWindowManager.T'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManager.T of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsWindowManager.T',
+        test_name='cheets_CTS_R.internal.x86.CtsWindowManager.T',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.ToastWindowTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.TransitionSelectionTests', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.Window b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.Window
new file mode 100644
index 0000000..ccc5b23
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.Window
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsWindowManager.Window'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManager.Window of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsWindowManager.Window',
+        test_name='cheets_CTS_R.internal.x86.CtsWindowManager.Window',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowContextPolicyTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowContextTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowFocusTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInputTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationCallbackTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationControllerTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationImeTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationSynchronicityTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsAnimationTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsControllerTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsLayoutTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsPolicyTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowInsetsTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowManager_BadTokenExceptionTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowManager_LayoutParamsTest', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowMetricsTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.WindowTest', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.intent b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.intent
new file mode 100644
index 0000000..dfc1709
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.intent
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsWindowManager.intent'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManager.intent of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsWindowManager.intent',
+        test_name='cheets_CTS_R.internal.x86.CtsWindowManager.intent',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.intent.IntentGenerationTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.intent.IntentTests', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.lifecycle b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.lifecycle
new file mode 100644
index 0000000..9142513
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWindowManager.lifecycle
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsWindowManager.lifecycle'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWindowManager.lifecycle of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsWindowManager.lifecycle',
+        test_name='cheets_CTS_R.internal.x86.CtsWindowManager.lifecycle',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleFreeformTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleKeyguardTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecyclePipTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleSplitScreenTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityLifecycleTopResumedStateTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityStarterTests', '--include-filter', 'CtsWindowManagerDeviceTestCases android.server.wm.lifecycle.ActivityTests', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWindowManagerDeviceTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWrap b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWrap
new file mode 100644
index 0000000..3dffc87
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.CtsWrap
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.CtsWrap'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsWrapNoWrapTestCases, CtsWrapNoWrapTestCases[secondary_user], CtsWrapWrapDebugMallocDebugTestCases, CtsWrapWrapDebugMallocDebugTestCases[secondary_user], CtsWrapWrapDebugTestCases, CtsWrapWrapDebugTestCases[secondary_user], CtsWrapWrapNoDebugTestCases, CtsWrapWrapNoDebugTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.CtsWrap',
+        test_name='cheets_CTS_R.internal.x86.CtsWrap',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWrapNoWrapTestCases', '--include-filter', 'CtsWrapNoWrapTestCases[secondary_user]', '--include-filter', 'CtsWrapWrapDebugMallocDebugTestCases', '--include-filter', 'CtsWrapWrapDebugMallocDebugTestCases[secondary_user]', '--include-filter', 'CtsWrapWrapDebugTestCases', '--include-filter', 'CtsWrapWrapDebugTestCases[secondary_user]', '--include-filter', 'CtsWrapWrapNoDebugTestCases', '--include-filter', 'CtsWrapWrapNoDebugTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWrap',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=16200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.LegacyStorageTest b/server/site_tests/cheets_CTS_R/control.internal.x86.LegacyStorageTest
new file mode 100644
index 0000000..a0fe742
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.LegacyStorageTest
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.LegacyStorageTest'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module LegacyStorageTest, LegacyStorageTest[instant] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.LegacyStorageTest',
+        test_name='cheets_CTS_R.internal.x86.LegacyStorageTest',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'LegacyStorageTest', '--include-filter', 'LegacyStorageTest[instant]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='LegacyStorageTest',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.ScopedStorageTest b/server/site_tests/cheets_CTS_R/control.internal.x86.ScopedStorageTest
new file mode 100644
index 0000000..a1b21da
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.ScopedStorageTest
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.ScopedStorageTest'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module ScopedStorageTest, ScopedStorageTest[instant] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.ScopedStorageTest',
+        test_name='cheets_CTS_R.internal.x86.ScopedStorageTest',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'ScopedStorageTest', '--include-filter', 'ScopedStorageTest[instant]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='ScopedStorageTest',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsAbiOverrideHostTestCases_-_CtsAccessibilityServiceSdk29TestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsAbiOverrideHostTestCases_-_CtsAccessibilityServiceSdk29TestCases
new file mode 100644
index 0000000..271de6b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsAbiOverrideHostTestCases_-_CtsAccessibilityServiceSdk29TestCases
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.all.CtsAbiOverrideHostTestCases_-_CtsAccessibilityServiceSdk29TestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAbiOverrideHostTestCases, CtsAbiOverrideHostTestCases[instant], CtsAbiOverrideHostTestCases[secondary_user], CtsAccelerationTestCases, CtsAccelerationTestCases[instant], CtsAccelerationTestCases[secondary_user], CtsAccessibilityServiceSdk29TestCases, CtsAccessibilityServiceSdk29TestCases[instant], CtsAccessibilityServiceSdk29TestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsAbiOverrideHostTestCases_-_CtsAccessibilityServiceSdk29TestCases',
+        test_name='cheets_CTS_R.internal.x86.all.CtsAbiOverrideHostTestCases_-_CtsAccessibilityServiceSdk29TestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAbiOverrideHostTestCases', '--include-filter', 'CtsAbiOverrideHostTestCases[instant]', '--include-filter', 'CtsAbiOverrideHostTestCases[secondary_user]', '--include-filter', 'CtsAccelerationTestCases', '--include-filter', 'CtsAccelerationTestCases[instant]', '--include-filter', 'CtsAccelerationTestCases[secondary_user]', '--include-filter', 'CtsAccessibilityServiceSdk29TestCases', '--include-filter', 'CtsAccessibilityServiceSdk29TestCases[instant]', '--include-filter', 'CtsAccessibilityServiceSdk29TestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsAbiOverrideHostTestCases_-_CtsAccessibilityServiceSdk29TestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases
new file mode 100644
index 0000000..99880d1
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAccessibilityServiceTestCases, CtsAccessibilityServiceTestCases[instant] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases',
+        test_name='cheets_CTS_R.internal.x86.all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAccessibilityServiceTestCases', '--include-filter', 'CtsAccessibilityServiceTestCases[instant]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsAccessibilityServiceTestCases_-_CtsAccessibilityServiceTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsAccessibilityTestCases_-_CtsCameraApi25TestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsAccessibilityTestCases_-_CtsCameraApi25TestCases
new file mode 100644
index 0000000..9c69bd5
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsAccessibilityTestCases_-_CtsCameraApi25TestCases
@@ -0,0 +1,51 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+from autotest_lib.server import utils as server_utils
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.all.CtsAccessibilityTestCases_-_CtsCameraApi25TestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAccessibilityTestCases, CtsAccessibilityTestCases[instant], CtsAccessibilityTestCases[secondary_user], CtsAccountManagerTestCases, CtsAccountManagerTestCases[instant], CtsAccountManagerTestCases[secondary_user], CtsAccountsHostTestCases, CtsAccountsHostTestCases[instant], CtsAccountsHostTestCases[secondary_user], CtsActivityManagerBackgroundActivityTestCases, CtsActivityManagerBackgroundActivityTestCases[secondary_user], CtsAdbHostTestCases, CtsAdbHostTestCases[secondary_user], CtsAdbManagerHostTestCases, CtsAdbManagerHostTestCases[secondary_user], CtsAdminPackageInstallerTestCases, CtsAdminTestCases, CtsAlarmManagerTestCases, CtsAlarmManagerTestCases[instant], CtsAlarmManagerTestCases[secondary_user], CtsAndroidAppTestCases, CtsAndroidAppTestCases[instant], CtsAndroidAppTestCases[secondary_user], CtsAndroidTestBase28ApiSignatureTestCases, CtsAndroidTestBase28ApiSignatureTestCases[instant], CtsAndroidTestBase28ApiSignatureTestCases[secondary_user], CtsAndroidTestBaseCurrentApiSignatureTestCases, CtsAndroidTestBaseCurrentApiSignatureTestCases[instant], CtsAndroidTestBaseCurrentApiSignatureTestCases[secondary_user], CtsAndroidTestMockCurrentApiSignatureTestCases, CtsAndroidTestMockCurrentApiSignatureTestCases[instant], CtsAndroidTestMockCurrentApiSignatureTestCases[secondary_user], CtsAndroidTestRunnerCurrentApiSignatureTestCases, CtsAndroidTestRunnerCurrentApiSignatureTestCases[instant], CtsAndroidTestRunnerCurrentApiSignatureTestCases[secondary_user], CtsAngleIntegrationHostTestCases, CtsAngleIntegrationHostTestCases[instant], CtsAngleIntegrationHostTestCases[secondary_user], CtsAnimationTestCases, CtsAnimationTestCases[instant], CtsAnimationTestCases[secondary_user], CtsApacheHttpLegacy27ApiSignatureTestCases, CtsApacheHttpLegacy27ApiSignatureTestCases[instant], CtsApacheHttpLegacy27ApiSignatureTestCases[secondary_user], CtsApacheHttpLegacyCurrentApiSignatureTestCases, CtsApacheHttpLegacyCurrentApiSignatureTestCases[instant], CtsApacheHttpLegacyCurrentApiSignatureTestCases[secondary_user], CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases, CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases[instant], CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases[secondary_user], CtsApexTestCases, CtsApexTestCases[secondary_user], CtsAppBindingHostTestCases, CtsAppBindingHostTestCases[secondary_user], CtsAppCompatHostTestCases, CtsAppCompatHostTestCases[instant], CtsAppCompatHostTestCases[secondary_user], CtsAppComponentFactoryTestCases, CtsAppComponentFactoryTestCases[instant], CtsAppComponentFactoryTestCases[secondary_user], CtsAppEnumerationTestCases, CtsAppEnumerationTestCases[secondary_user], CtsAppExitTestCases, CtsAppExitTestCases[instant], CtsAppExitTestCases[secondary_user], CtsAppIntegrityDeviceTestCases, CtsAppOpsTestCases, CtsAppOpsTestCases[instant], CtsAppOpsTestCases[secondary_user], CtsAppPredictionServiceTestCases, CtsAppPredictionServiceTestCases[secondary_user], CtsAppSecurityHostTestCases, CtsAppSecurityHostTestCases[secondary_user], CtsAppTestCases, CtsAppTestCases[instant], CtsAppTestCases[secondary_user], CtsAppUsageHostTestCases, CtsAppUsageHostTestCases[instant], CtsAppUsageHostTestCases[secondary_user], CtsAppWidgetTestCases, CtsAppWidgetTestCases[instant], CtsAppWidgetTestCases[secondary_user], CtsAslrMallocTestCases, CtsAslrMallocTestCases[secondary_user], CtsAssistTestCases, CtsAssistTestCases[instant], CtsAssistTestCases[secondary_user], CtsAtomicInstallTestCases, CtsAtomicInstallTestCases[secondary_user], CtsAtraceHostTestCases, CtsAtraceHostTestCases[instant], CtsAtraceHostTestCases[secondary_user], CtsAttentionServiceDeviceTestCases, CtsAttentionServiceDeviceTestCases[secondary_user], CtsAutoFillServiceTestCases, CtsAutoFillServiceTestCases[instant], CtsAutoFillServiceTestCases[secondary_user], CtsBackgroundRestrictionsTestCases, CtsBackgroundRestrictionsTestCases[instant], CtsBackgroundRestrictionsTestCases[secondary_user], CtsBackupHostTestCases, CtsBackupTestCases, CtsBatterySavingTestCases, CtsBatterySavingTestCases[secondary_user], CtsBionicAppTestCases, CtsBionicAppTestCases[instant], CtsBionicAppTestCases[secondary_user], CtsBionicTestCases, CtsBionicTestCases[secondary_user], CtsBlobStoreHostTestCases, CtsBlobStoreHostTestCases[secondary_user], CtsBlobStoreHostTestHelper, CtsBlobStoreHostTestHelper[secondary_user], CtsBlobStoreTestCases, CtsBlobStoreTestCases[secondary_user], CtsBlobStoreTestHelper, CtsBlobStoreTestHelperDiffSig, CtsBlobStoreTestHelperDiffSig2, CtsBlobStoreTestHelperDiffSig2[secondary_user], CtsBlobStoreTestHelperDiffSig[secondary_user], CtsBlobStoreTestHelper[secondary_user], CtsBluetoothTestCases, CtsBluetoothTestCases[secondary_user], CtsBootStatsTestCases, CtsBootStatsTestCases[secondary_user], CtsCalendarProviderTestCases, CtsCalendarProviderTestCases[secondary_user], CtsCalendarcommon2TestCases, CtsCalendarcommon2TestCases[secondary_user], CtsCameraApi25TestCases, CtsCameraApi25TestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+# For local debugging, if your test setup doesn't have servo, REMOVE these
+# two lines.
+args_dict = server_utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run_TS(machine):
+    # REMOVE 'servo_args=servo_args' arg for local debugging if your test
+    # setup doesn't have servo.
+    try:
+        host_list = [hosts.create_host(machine, servo_args=servo_args)]
+    except:
+        # Just ignore any servo setup flakiness.
+        host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        enable_default_apps=True,
+        tag='internal.x86.all.CtsAccessibilityTestCases_-_CtsCameraApi25TestCases',
+        test_name='cheets_CTS_R.internal.x86.all.CtsAccessibilityTestCases_-_CtsCameraApi25TestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAccessibilityTestCases', '--include-filter', 'CtsAccessibilityTestCases[instant]', '--include-filter', 'CtsAccessibilityTestCases[secondary_user]', '--include-filter', 'CtsAccountManagerTestCases', '--include-filter', 'CtsAccountManagerTestCases[instant]', '--include-filter', 'CtsAccountManagerTestCases[secondary_user]', '--include-filter', 'CtsAccountsHostTestCases', '--include-filter', 'CtsAccountsHostTestCases[instant]', '--include-filter', 'CtsAccountsHostTestCases[secondary_user]', '--include-filter', 'CtsActivityManagerBackgroundActivityTestCases', '--include-filter', 'CtsActivityManagerBackgroundActivityTestCases[secondary_user]', '--include-filter', 'CtsAdbHostTestCases', '--include-filter', 'CtsAdbHostTestCases[secondary_user]', '--include-filter', 'CtsAdbManagerHostTestCases', '--include-filter', 'CtsAdbManagerHostTestCases[secondary_user]', '--include-filter', 'CtsAdminPackageInstallerTestCases', '--include-filter', 'CtsAdminTestCases', '--include-filter', 'CtsAlarmManagerTestCases', '--include-filter', 'CtsAlarmManagerTestCases[instant]', '--include-filter', 'CtsAlarmManagerTestCases[secondary_user]', '--include-filter', 'CtsAndroidAppTestCases', '--include-filter', 'CtsAndroidAppTestCases[instant]', '--include-filter', 'CtsAndroidAppTestCases[secondary_user]', '--include-filter', 'CtsAndroidTestBase28ApiSignatureTestCases', '--include-filter', 'CtsAndroidTestBase28ApiSignatureTestCases[instant]', '--include-filter', 'CtsAndroidTestBase28ApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsAndroidTestBaseCurrentApiSignatureTestCases', '--include-filter', 'CtsAndroidTestBaseCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsAndroidTestBaseCurrentApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsAndroidTestMockCurrentApiSignatureTestCases', '--include-filter', 'CtsAndroidTestMockCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsAndroidTestMockCurrentApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsAndroidTestRunnerCurrentApiSignatureTestCases', '--include-filter', 'CtsAndroidTestRunnerCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsAndroidTestRunnerCurrentApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsAngleIntegrationHostTestCases', '--include-filter', 'CtsAngleIntegrationHostTestCases[instant]', '--include-filter', 'CtsAngleIntegrationHostTestCases[secondary_user]', '--include-filter', 'CtsAnimationTestCases', '--include-filter', 'CtsAnimationTestCases[instant]', '--include-filter', 'CtsAnimationTestCases[secondary_user]', '--include-filter', 'CtsApacheHttpLegacy27ApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacy27ApiSignatureTestCases[instant]', '--include-filter', 'CtsApacheHttpLegacy27ApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsApacheHttpLegacyCurrentApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacyCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsApacheHttpLegacyCurrentApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases', '--include-filter', 'CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases[instant]', '--include-filter', 'CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsApexTestCases', '--include-filter', 'CtsApexTestCases[secondary_user]', '--include-filter', 'CtsAppBindingHostTestCases', '--include-filter', 'CtsAppBindingHostTestCases[secondary_user]', '--include-filter', 'CtsAppCompatHostTestCases', '--include-filter', 'CtsAppCompatHostTestCases[instant]', '--include-filter', 'CtsAppCompatHostTestCases[secondary_user]', '--include-filter', 'CtsAppComponentFactoryTestCases', '--include-filter', 'CtsAppComponentFactoryTestCases[instant]', '--include-filter', 'CtsAppComponentFactoryTestCases[secondary_user]', '--include-filter', 'CtsAppEnumerationTestCases', '--include-filter', 'CtsAppEnumerationTestCases[secondary_user]', '--include-filter', 'CtsAppExitTestCases', '--include-filter', 'CtsAppExitTestCases[instant]', '--include-filter', 'CtsAppExitTestCases[secondary_user]', '--include-filter', 'CtsAppIntegrityDeviceTestCases', '--include-filter', 'CtsAppOpsTestCases', '--include-filter', 'CtsAppOpsTestCases[instant]', '--include-filter', 'CtsAppOpsTestCases[secondary_user]', '--include-filter', 'CtsAppPredictionServiceTestCases', '--include-filter', 'CtsAppPredictionServiceTestCases[secondary_user]', '--include-filter', 'CtsAppSecurityHostTestCases', '--include-filter', 'CtsAppSecurityHostTestCases[secondary_user]', '--include-filter', 'CtsAppTestCases', '--include-filter', 'CtsAppTestCases[instant]', '--include-filter', 'CtsAppTestCases[secondary_user]', '--include-filter', 'CtsAppUsageHostTestCases', '--include-filter', 'CtsAppUsageHostTestCases[instant]', '--include-filter', 'CtsAppUsageHostTestCases[secondary_user]', '--include-filter', 'CtsAppWidgetTestCases', '--include-filter', 'CtsAppWidgetTestCases[instant]', '--include-filter', 'CtsAppWidgetTestCases[secondary_user]', '--include-filter', 'CtsAslrMallocTestCases', '--include-filter', 'CtsAslrMallocTestCases[secondary_user]', '--include-filter', 'CtsAssistTestCases', '--include-filter', 'CtsAssistTestCases[instant]', '--include-filter', 'CtsAssistTestCases[secondary_user]', '--include-filter', 'CtsAtomicInstallTestCases', '--include-filter', 'CtsAtomicInstallTestCases[secondary_user]', '--include-filter', 'CtsAtraceHostTestCases', '--include-filter', 'CtsAtraceHostTestCases[instant]', '--include-filter', 'CtsAtraceHostTestCases[secondary_user]', '--include-filter', 'CtsAttentionServiceDeviceTestCases', '--include-filter', 'CtsAttentionServiceDeviceTestCases[secondary_user]', '--include-filter', 'CtsAutoFillServiceTestCases', '--include-filter', 'CtsAutoFillServiceTestCases[instant]', '--include-filter', 'CtsAutoFillServiceTestCases[secondary_user]', '--include-filter', 'CtsBackgroundRestrictionsTestCases', '--include-filter', 'CtsBackgroundRestrictionsTestCases[instant]', '--include-filter', 'CtsBackgroundRestrictionsTestCases[secondary_user]', '--include-filter', 'CtsBackupHostTestCases', '--include-filter', 'CtsBackupTestCases', '--include-filter', 'CtsBatterySavingTestCases', '--include-filter', 'CtsBatterySavingTestCases[secondary_user]', '--include-filter', 'CtsBionicAppTestCases', '--include-filter', 'CtsBionicAppTestCases[instant]', '--include-filter', 'CtsBionicAppTestCases[secondary_user]', '--include-filter', 'CtsBionicTestCases', '--include-filter', 'CtsBionicTestCases[secondary_user]', '--include-filter', 'CtsBlobStoreHostTestCases', '--include-filter', 'CtsBlobStoreHostTestCases[secondary_user]', '--include-filter', 'CtsBlobStoreHostTestHelper', '--include-filter', 'CtsBlobStoreHostTestHelper[secondary_user]', '--include-filter', 'CtsBlobStoreTestCases', '--include-filter', 'CtsBlobStoreTestCases[secondary_user]', '--include-filter', 'CtsBlobStoreTestHelper', '--include-filter', 'CtsBlobStoreTestHelperDiffSig', '--include-filter', 'CtsBlobStoreTestHelperDiffSig2', '--include-filter', 'CtsBlobStoreTestHelperDiffSig2[secondary_user]', '--include-filter', 'CtsBlobStoreTestHelperDiffSig[secondary_user]', '--include-filter', 'CtsBlobStoreTestHelper[secondary_user]', '--include-filter', 'CtsBluetoothTestCases', '--include-filter', 'CtsBluetoothTestCases[secondary_user]', '--include-filter', 'CtsBootStatsTestCases', '--include-filter', 'CtsBootStatsTestCases[secondary_user]', '--include-filter', 'CtsCalendarProviderTestCases', '--include-filter', 'CtsCalendarProviderTestCases[secondary_user]', '--include-filter', 'CtsCalendarcommon2TestCases', '--include-filter', 'CtsCalendarcommon2TestCases[secondary_user]', '--include-filter', 'CtsCameraApi25TestCases', '--include-filter', 'CtsCameraApi25TestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsAccessibilityTestCases_-_CtsCameraApi25TestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        use_jdk9=True,
+        hard_reboot_on_failure=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsCameraTestCases_-_CtsCameraTestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsCameraTestCases_-_CtsCameraTestCases
new file mode 100644
index 0000000..ae40bd6
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsCameraTestCases_-_CtsCameraTestCases
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.all.CtsCameraTestCases_-_CtsCameraTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCameraTestCases, CtsCameraTestCases[instant], CtsCameraTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsCameraTestCases_-_CtsCameraTestCases',
+        test_name='cheets_CTS_R.internal.x86.all.CtsCameraTestCases_-_CtsCameraTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCameraTestCases', '--include-filter', 'CtsCameraTestCases[instant]', '--include-filter', 'CtsCameraTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsCameraTestCases_-_CtsCameraTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsCarHostTestCases_-_CtsDatabaseTestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsCarHostTestCases_-_CtsDatabaseTestCases
new file mode 100644
index 0000000..0d693d5
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsCarHostTestCases_-_CtsDatabaseTestCases
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.all.CtsCarHostTestCases_-_CtsDatabaseTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsCarHostTestCases, CtsCarTestCases, CtsCarTestCases[secondary_user], CtsCarrierApiTestCases, CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases, CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases[instant], CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases[secondary_user], CtsClassLoaderFactoryPathClassLoaderTestCases, CtsClassLoaderFactoryPathClassLoaderTestCases[instant], CtsClassLoaderFactoryPathClassLoaderTestCases[secondary_user], CtsClassloaderSplitsHostTestCases, CtsClassloaderSplitsHostTestCases[instant], CtsClassloaderSplitsHostTestCases[secondary_user], CtsCodePathHostTestCases, CtsCodePathHostTestCases[secondary_user], CtsColorModeTestCases, CtsColorModeTestCases[instant], CtsColorModeTestCases[secondary_user], CtsCompilationTestCases, CtsCompilationTestCases[secondary_user], CtsContactsProviderTestCases, CtsContactsProviderTestCases[secondary_user], CtsContactsProviderWipe, CtsContactsProviderWipe[secondary_user], CtsContentCaptureServiceTestCases, CtsContentCaptureServiceTestCases[instant], CtsContentCaptureServiceTestCases[secondary_user], CtsContentSuggestionsTestCases, CtsContentSuggestionsTestCases[secondary_user], CtsContentTestCases, CtsContentTestCases[instant], CtsContentTestCases[secondary_user], CtsControlsDeviceTestCases, CtsControlsDeviceTestCases[secondary_user], CtsCppToolsTestCases, CtsCppToolsTestCases[secondary_user], CtsCurrentApiSignatureTestCases, CtsCurrentApiSignatureTestCases[instant], CtsCurrentApiSignatureTestCases[secondary_user], CtsDatabaseTestCases, CtsDatabaseTestCases[instant], CtsDatabaseTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        enable_default_apps=True,
+        tag='internal.x86.all.CtsCarHostTestCases_-_CtsDatabaseTestCases',
+        test_name='cheets_CTS_R.internal.x86.all.CtsCarHostTestCases_-_CtsDatabaseTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCarHostTestCases', '--include-filter', 'CtsCarTestCases', '--include-filter', 'CtsCarTestCases[secondary_user]', '--include-filter', 'CtsCarrierApiTestCases', '--include-filter', 'CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases', '--include-filter', 'CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases[instant]', '--include-filter', 'CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases[secondary_user]', '--include-filter', 'CtsClassLoaderFactoryPathClassLoaderTestCases', '--include-filter', 'CtsClassLoaderFactoryPathClassLoaderTestCases[instant]', '--include-filter', 'CtsClassLoaderFactoryPathClassLoaderTestCases[secondary_user]', '--include-filter', 'CtsClassloaderSplitsHostTestCases', '--include-filter', 'CtsClassloaderSplitsHostTestCases[instant]', '--include-filter', 'CtsClassloaderSplitsHostTestCases[secondary_user]', '--include-filter', 'CtsCodePathHostTestCases', '--include-filter', 'CtsCodePathHostTestCases[secondary_user]', '--include-filter', 'CtsColorModeTestCases', '--include-filter', 'CtsColorModeTestCases[instant]', '--include-filter', 'CtsColorModeTestCases[secondary_user]', '--include-filter', 'CtsCompilationTestCases', '--include-filter', 'CtsCompilationTestCases[secondary_user]', '--include-filter', 'CtsContactsProviderTestCases', '--include-filter', 'CtsContactsProviderTestCases[secondary_user]', '--include-filter', 'CtsContactsProviderWipe', '--include-filter', 'CtsContactsProviderWipe[secondary_user]', '--include-filter', 'CtsContentCaptureServiceTestCases', '--include-filter', 'CtsContentCaptureServiceTestCases[instant]', '--include-filter', 'CtsContentCaptureServiceTestCases[secondary_user]', '--include-filter', 'CtsContentSuggestionsTestCases', '--include-filter', 'CtsContentSuggestionsTestCases[secondary_user]', '--include-filter', 'CtsContentTestCases', '--include-filter', 'CtsContentTestCases[instant]', '--include-filter', 'CtsContentTestCases[secondary_user]', '--include-filter', 'CtsControlsDeviceTestCases', '--include-filter', 'CtsControlsDeviceTestCases[secondary_user]', '--include-filter', 'CtsCppToolsTestCases', '--include-filter', 'CtsCppToolsTestCases[secondary_user]', '--include-filter', 'CtsCurrentApiSignatureTestCases', '--include-filter', 'CtsCurrentApiSignatureTestCases[instant]', '--include-filter', 'CtsCurrentApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsDatabaseTestCases', '--include-filter', 'CtsDatabaseTestCases[instant]', '--include-filter', 'CtsDatabaseTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsCarHostTestCases_-_CtsDatabaseTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases.32 b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases.32
new file mode 100644
index 0000000..313784f
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases.32
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases.32'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases, CtsDeqpTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases.32',
+        test_name='cheets_CTS_R.internal.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases.32',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--abi', 'x86'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsDeqpTestCases_-_CtsDeqpTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases.64 b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases.64
new file mode 100644
index 0000000..8310f3d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases.64
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases.64'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
+DOC = 'Run module CtsDeqpTestCases, CtsDeqpTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases.64',
+        test_name='cheets_CTS_R.internal.x86.all.CtsDeqpTestCases_-_CtsDeqpTestCases.64',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases', '--include-filter', 'CtsDeqpTestCases[secondary_user]', '--abi', 'x86_64'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsDeqpTestCases_-_CtsDeqpTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsDeviceConfigTestCases_-_CtsExtractNativeLibsHostTestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsDeviceConfigTestCases_-_CtsExtractNativeLibsHostTestCases
new file mode 100644
index 0000000..ebf7dd8
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsDeviceConfigTestCases_-_CtsExtractNativeLibsHostTestCases
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.all.CtsDeviceConfigTestCases_-_CtsExtractNativeLibsHostTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsDeviceConfigTestCases, CtsDeviceConfigTestCases[instant], CtsDeviceConfigTestCases[secondary_user], CtsDeviceIdleHostTestCases, CtsDeviceIdleHostTestCases[secondary_user], CtsDevicePolicyManagerTestCases, CtsDexMetadataHostTestCases, CtsDexMetadataHostTestCases[secondary_user], CtsDisplayTestCases, CtsDisplayTestCases[instant], CtsDisplayTestCases[secondary_user], CtsDownloadManagerApi28, CtsDownloadManagerApi28[secondary_user], CtsDownloadManagerInstaller, CtsDownloadManagerInstaller[secondary_user], CtsDpiTestCases, CtsDpiTestCases2, CtsDpiTestCases2[secondary_user], CtsDpiTestCases[instant], CtsDpiTestCases[secondary_user], CtsDreamsTestCases, CtsDreamsTestCases[instant], CtsDreamsTestCases[secondary_user], CtsDrmTestCases, CtsDrmTestCases[instant], CtsDrmTestCases[secondary_user], CtsDropBoxManagerTestCases, CtsDumpsysHostTestCases, CtsDumpsysHostTestCases[secondary_user], CtsDynamicLinkerTestCases, CtsDynamicLinkerTestCases[instant], CtsDynamicLinkerTestCases[secondary_user], CtsDynamicMimeHostTestCases, CtsDynamicMimeHostTestCases[secondary_user], CtsEdiHostTestCases, CtsEdiHostTestCases[secondary_user], CtsEffectTestCases, CtsEffectTestCases[instant], CtsEffectTestCases[secondary_user], CtsExtendedMockingTestCases, CtsExtendedMockingTestCases[instant], CtsExtendedMockingTestCases[secondary_user], CtsExternalServiceTestCases, CtsExternalServiceTestCases[secondary_user], CtsExtractNativeLibsHostTestCases, CtsExtractNativeLibsHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsDeviceConfigTestCases_-_CtsExtractNativeLibsHostTestCases',
+        test_name='cheets_CTS_R.internal.x86.all.CtsDeviceConfigTestCases_-_CtsExtractNativeLibsHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeviceConfigTestCases', '--include-filter', 'CtsDeviceConfigTestCases[instant]', '--include-filter', 'CtsDeviceConfigTestCases[secondary_user]', '--include-filter', 'CtsDeviceIdleHostTestCases', '--include-filter', 'CtsDeviceIdleHostTestCases[secondary_user]', '--include-filter', 'CtsDevicePolicyManagerTestCases', '--include-filter', 'CtsDexMetadataHostTestCases', '--include-filter', 'CtsDexMetadataHostTestCases[secondary_user]', '--include-filter', 'CtsDisplayTestCases', '--include-filter', 'CtsDisplayTestCases[instant]', '--include-filter', 'CtsDisplayTestCases[secondary_user]', '--include-filter', 'CtsDownloadManagerApi28', '--include-filter', 'CtsDownloadManagerApi28[secondary_user]', '--include-filter', 'CtsDownloadManagerInstaller', '--include-filter', 'CtsDownloadManagerInstaller[secondary_user]', '--include-filter', 'CtsDpiTestCases', '--include-filter', 'CtsDpiTestCases2', '--include-filter', 'CtsDpiTestCases2[secondary_user]', '--include-filter', 'CtsDpiTestCases[instant]', '--include-filter', 'CtsDpiTestCases[secondary_user]', '--include-filter', 'CtsDreamsTestCases', '--include-filter', 'CtsDreamsTestCases[instant]', '--include-filter', 'CtsDreamsTestCases[secondary_user]', '--include-filter', 'CtsDrmTestCases', '--include-filter', 'CtsDrmTestCases[instant]', '--include-filter', 'CtsDrmTestCases[secondary_user]', '--include-filter', 'CtsDropBoxManagerTestCases', '--include-filter', 'CtsDumpsysHostTestCases', '--include-filter', 'CtsDumpsysHostTestCases[secondary_user]', '--include-filter', 'CtsDynamicLinkerTestCases', '--include-filter', 'CtsDynamicLinkerTestCases[instant]', '--include-filter', 'CtsDynamicLinkerTestCases[secondary_user]', '--include-filter', 'CtsDynamicMimeHostTestCases', '--include-filter', 'CtsDynamicMimeHostTestCases[secondary_user]', '--include-filter', 'CtsEdiHostTestCases', '--include-filter', 'CtsEdiHostTestCases[secondary_user]', '--include-filter', 'CtsEffectTestCases', '--include-filter', 'CtsEffectTestCases[instant]', '--include-filter', 'CtsEffectTestCases[secondary_user]', '--include-filter', 'CtsExtendedMockingTestCases', '--include-filter', 'CtsExtendedMockingTestCases[instant]', '--include-filter', 'CtsExtendedMockingTestCases[secondary_user]', '--include-filter', 'CtsExternalServiceTestCases', '--include-filter', 'CtsExternalServiceTestCases[secondary_user]', '--include-filter', 'CtsExtractNativeLibsHostTestCases', '--include-filter', 'CtsExtractNativeLibsHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsDeviceConfigTestCases_-_CtsExtractNativeLibsHostTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases
new file mode 100644
index 0000000..bbb000c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsFileSystemTestCases, CtsFileSystemTestCases[instant], CtsFileSystemTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases',
+        test_name='cheets_CTS_R.internal.x86.all.CtsFileSystemTestCases_-_CtsFileSystemTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsFileSystemTestCases', '--include-filter', 'CtsFileSystemTestCases[instant]', '--include-filter', 'CtsFileSystemTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsFileSystemTestCases_-_CtsFileSystemTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases
new file mode 100644
index 0000000..710fcc4
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsFragmentTestCases, CtsFragmentTestCasesSdk26, CtsFragmentTestCasesSdk26[instant], CtsFragmentTestCasesSdk26[secondary_user], CtsFragmentTestCases[instant], CtsFragmentTestCases[secondary_user], CtsFsMgrTestCases, CtsFsMgrTestCases[secondary_user], CtsGestureTestCases, CtsGestureTestCases[instant], CtsGestureTestCases[secondary_user], CtsGpuProfilingDataTestCases, CtsGpuProfilingDataTestCases[secondary_user], CtsGpuToolsHostTestCases, CtsGpuToolsHostTestCases[secondary_user], CtsGraphicsTestCases, CtsGraphicsTestCases[instant], CtsGraphicsTestCases[secondary_user], CtsGwpAsanTestCases, CtsGwpAsanTestCases[instant], CtsGwpAsanTestCases[secondary_user], CtsHardwareTestCases, CtsHardwareTestCases[secondary_user], CtsHarmfulAppWarningHostTestCases, CtsHarmfulAppWarningHostTestCases[secondary_user], CtsHdmiCecHostTestCases, CtsHdmiCecHostTestCases[secondary_user], CtsHiddenApiBlacklistApi27TestCases, CtsHiddenApiBlacklistApi27TestCases[secondary_user], CtsHiddenApiBlacklistApi28TestCases, CtsHiddenApiBlacklistApi28TestCases[secondary_user], CtsHiddenApiBlacklistCurrentApiTestCases, CtsHiddenApiBlacklistCurrentApiTestCases[secondary_user], CtsHiddenApiBlacklistDebugClassTestCases, CtsHiddenApiBlacklistDebugClassTestCases[secondary_user], CtsHiddenApiBlacklistTestApiTestCases, CtsHiddenApiBlacklistTestApiTestCases[secondary_user], CtsHiddenApiKillswitchDebugClassTestCases, CtsHiddenApiKillswitchDebugClassTestCases[instant], CtsHiddenApiKillswitchDebugClassTestCases[secondary_user], CtsHiddenApiKillswitchWhitelistTestCases, CtsHiddenApiKillswitchWhitelistTestCases[instant], CtsHiddenApiKillswitchWhitelistTestCases[secondary_user], CtsHiddenApiKillswitchWildcardTestCases, CtsHiddenApiKillswitchWildcardTestCases[instant], CtsHiddenApiKillswitchWildcardTestCases[secondary_user], CtsHostTzDataTests, CtsHostTzDataTests[secondary_user], CtsHostsideNetworkTests, CtsHostsideNetworkTests[instant], CtsHostsideNetworkTests[secondary_user], CtsHostsideNumberBlockingTestCases, CtsHostsideNumberBlockingTestCases[secondary_user], CtsHostsideTvTests, CtsHostsideTvTests[secondary_user], CtsHostsideWebViewTests, CtsHostsideWebViewTests[instant], CtsHostsideWebViewTests[secondary_user], CtsIcuTestCases, CtsIcuTestCases[secondary_user], CtsIdentityTestCases, CtsIdentityTestCases[secondary_user], CtsIkeTestCases, CtsIkeTestCases[secondary_user], CtsIncidentHostTestCases, CtsIncidentHostTestCases[instant], CtsIncrementalInstallHostTestCases, CtsIncrementalInstallHostTestCases[secondary_user], CtsInitTestCases, CtsInitTestCases[secondary_user], CtsInlineMockingTestCases, CtsInlineMockingTestCases[instant], CtsInlineMockingTestCases[secondary_user], CtsInputMethodServiceHostTestCases, CtsInputMethodServiceHostTestCases[instant], CtsInputMethodServiceHostTestCases[secondary_user], CtsInputMethodTestCases, CtsInputMethodTestCases[instant], CtsInputMethodTestCases[secondary_user], CtsInstantAppTests, CtsInstantAppTests[secondary_user], CtsIntentSignatureTestCases, CtsIntentSignatureTestCases[secondary_user], CtsJdwpSecurityHostTestCases, CtsJdwpSecurityHostTestCases[secondary_user], CtsJdwpTestCases, CtsJdwpTestCases[instant], CtsJdwpTestCases[secondary_user], CtsJdwpTunnelHostTestCases, CtsJdwpTunnelHostTestCases[secondary_user], CtsJniTestCases, CtsJniTestCases[instant], CtsJniTestCases[secondary_user], CtsJobSchedulerSharedUidTestCases, CtsJobSchedulerSharedUidTestCases[secondary_user], CtsJobSchedulerTestCases, CtsJobSchedulerTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=30,
+        tag='internal.x86.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases',
+        test_name='cheets_CTS_R.internal.x86.all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsFragmentTestCases', '--include-filter', 'CtsFragmentTestCasesSdk26', '--include-filter', 'CtsFragmentTestCasesSdk26[instant]', '--include-filter', 'CtsFragmentTestCasesSdk26[secondary_user]', '--include-filter', 'CtsFragmentTestCases[instant]', '--include-filter', 'CtsFragmentTestCases[secondary_user]', '--include-filter', 'CtsFsMgrTestCases', '--include-filter', 'CtsFsMgrTestCases[secondary_user]', '--include-filter', 'CtsGestureTestCases', '--include-filter', 'CtsGestureTestCases[instant]', '--include-filter', 'CtsGestureTestCases[secondary_user]', '--include-filter', 'CtsGpuProfilingDataTestCases', '--include-filter', 'CtsGpuProfilingDataTestCases[secondary_user]', '--include-filter', 'CtsGpuToolsHostTestCases', '--include-filter', 'CtsGpuToolsHostTestCases[secondary_user]', '--include-filter', 'CtsGraphicsTestCases', '--include-filter', 'CtsGraphicsTestCases[instant]', '--include-filter', 'CtsGraphicsTestCases[secondary_user]', '--include-filter', 'CtsGwpAsanTestCases', '--include-filter', 'CtsGwpAsanTestCases[instant]', '--include-filter', 'CtsGwpAsanTestCases[secondary_user]', '--include-filter', 'CtsHardwareTestCases', '--include-filter', 'CtsHardwareTestCases[secondary_user]', '--include-filter', 'CtsHarmfulAppWarningHostTestCases', '--include-filter', 'CtsHarmfulAppWarningHostTestCases[secondary_user]', '--include-filter', 'CtsHdmiCecHostTestCases', '--include-filter', 'CtsHdmiCecHostTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiBlacklistApi27TestCases', '--include-filter', 'CtsHiddenApiBlacklistApi27TestCases[secondary_user]', '--include-filter', 'CtsHiddenApiBlacklistApi28TestCases', '--include-filter', 'CtsHiddenApiBlacklistApi28TestCases[secondary_user]', '--include-filter', 'CtsHiddenApiBlacklistCurrentApiTestCases', '--include-filter', 'CtsHiddenApiBlacklistCurrentApiTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiBlacklistDebugClassTestCases', '--include-filter', 'CtsHiddenApiBlacklistDebugClassTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiBlacklistTestApiTestCases', '--include-filter', 'CtsHiddenApiBlacklistTestApiTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiKillswitchDebugClassTestCases', '--include-filter', 'CtsHiddenApiKillswitchDebugClassTestCases[instant]', '--include-filter', 'CtsHiddenApiKillswitchDebugClassTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiKillswitchWhitelistTestCases', '--include-filter', 'CtsHiddenApiKillswitchWhitelistTestCases[instant]', '--include-filter', 'CtsHiddenApiKillswitchWhitelistTestCases[secondary_user]', '--include-filter', 'CtsHiddenApiKillswitchWildcardTestCases', '--include-filter', 'CtsHiddenApiKillswitchWildcardTestCases[instant]', '--include-filter', 'CtsHiddenApiKillswitchWildcardTestCases[secondary_user]', '--include-filter', 'CtsHostTzDataTests', '--include-filter', 'CtsHostTzDataTests[secondary_user]', '--include-filter', 'CtsHostsideNetworkTests', '--include-filter', 'CtsHostsideNetworkTests[instant]', '--include-filter', 'CtsHostsideNetworkTests[secondary_user]', '--include-filter', 'CtsHostsideNumberBlockingTestCases', '--include-filter', 'CtsHostsideNumberBlockingTestCases[secondary_user]', '--include-filter', 'CtsHostsideTvTests', '--include-filter', 'CtsHostsideTvTests[secondary_user]', '--include-filter', 'CtsHostsideWebViewTests', '--include-filter', 'CtsHostsideWebViewTests[instant]', '--include-filter', 'CtsHostsideWebViewTests[secondary_user]', '--include-filter', 'CtsIcuTestCases', '--include-filter', 'CtsIcuTestCases[secondary_user]', '--include-filter', 'CtsIdentityTestCases', '--include-filter', 'CtsIdentityTestCases[secondary_user]', '--include-filter', 'CtsIkeTestCases', '--include-filter', 'CtsIkeTestCases[secondary_user]', '--include-filter', 'CtsIncidentHostTestCases', '--include-filter', 'CtsIncidentHostTestCases[instant]', '--include-filter', 'CtsIncrementalInstallHostTestCases', '--include-filter', 'CtsIncrementalInstallHostTestCases[secondary_user]', '--include-filter', 'CtsInitTestCases', '--include-filter', 'CtsInitTestCases[secondary_user]', '--include-filter', 'CtsInlineMockingTestCases', '--include-filter', 'CtsInlineMockingTestCases[instant]', '--include-filter', 'CtsInlineMockingTestCases[secondary_user]', '--include-filter', 'CtsInputMethodServiceHostTestCases', '--include-filter', 'CtsInputMethodServiceHostTestCases[instant]', '--include-filter', 'CtsInputMethodServiceHostTestCases[secondary_user]', '--include-filter', 'CtsInputMethodTestCases', '--include-filter', 'CtsInputMethodTestCases[instant]', '--include-filter', 'CtsInputMethodTestCases[secondary_user]', '--include-filter', 'CtsInstantAppTests', '--include-filter', 'CtsInstantAppTests[secondary_user]', '--include-filter', 'CtsIntentSignatureTestCases', '--include-filter', 'CtsIntentSignatureTestCases[secondary_user]', '--include-filter', 'CtsJdwpSecurityHostTestCases', '--include-filter', 'CtsJdwpSecurityHostTestCases[secondary_user]', '--include-filter', 'CtsJdwpTestCases', '--include-filter', 'CtsJdwpTestCases[instant]', '--include-filter', 'CtsJdwpTestCases[secondary_user]', '--include-filter', 'CtsJdwpTunnelHostTestCases', '--include-filter', 'CtsJdwpTunnelHostTestCases[secondary_user]', '--include-filter', 'CtsJniTestCases', '--include-filter', 'CtsJniTestCases[instant]', '--include-filter', 'CtsJniTestCases[secondary_user]', '--include-filter', 'CtsJobSchedulerSharedUidTestCases', '--include-filter', 'CtsJobSchedulerSharedUidTestCases[secondary_user]', '--include-filter', 'CtsJobSchedulerTestCases', '--include-filter', 'CtsJobSchedulerTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsFragmentTestCases_-_CtsJobSchedulerTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsJvmtiAttachingHostTestCases_-_CtsMediaPerformanceClassTestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsJvmtiAttachingHostTestCases_-_CtsMediaPerformanceClassTestCases
new file mode 100644
index 0000000..a8a8ea9b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsJvmtiAttachingHostTestCases_-_CtsMediaPerformanceClassTestCases
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.all.CtsJvmtiAttachingHostTestCases_-_CtsMediaPerformanceClassTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsJvmtiAttachingHostTestCases, CtsJvmtiAttachingHostTestCases[secondary_user], CtsJvmtiAttachingTestCases, CtsJvmtiAttachingTestCases[secondary_user], CtsJvmtiRedefineClassesHostTestCases, CtsJvmtiRedefineClassesHostTestCases[secondary_user], CtsJvmtiRunTest1900HostTestCases, CtsJvmtiRunTest1900HostTestCases[secondary_user], CtsJvmtiRunTest1901HostTestCases, CtsJvmtiRunTest1901HostTestCases[secondary_user], CtsJvmtiRunTest1902HostTestCases, CtsJvmtiRunTest1902HostTestCases[secondary_user], CtsJvmtiRunTest1903HostTestCases, CtsJvmtiRunTest1903HostTestCases[secondary_user], CtsJvmtiRunTest1904HostTestCases, CtsJvmtiRunTest1904HostTestCases[secondary_user], CtsJvmtiRunTest1906HostTestCases, CtsJvmtiRunTest1906HostTestCases[secondary_user], CtsJvmtiRunTest1907HostTestCases, CtsJvmtiRunTest1907HostTestCases[secondary_user], CtsJvmtiRunTest1908HostTestCases, CtsJvmtiRunTest1908HostTestCases[secondary_user], CtsJvmtiRunTest1909HostTestCases, CtsJvmtiRunTest1909HostTestCases[secondary_user], CtsJvmtiRunTest1910HostTestCases, CtsJvmtiRunTest1910HostTestCases[secondary_user], CtsJvmtiRunTest1911HostTestCases, CtsJvmtiRunTest1911HostTestCases[secondary_user], CtsJvmtiRunTest1912HostTestCases, CtsJvmtiRunTest1912HostTestCases[secondary_user], CtsJvmtiRunTest1913HostTestCases, CtsJvmtiRunTest1913HostTestCases[secondary_user], CtsJvmtiRunTest1914HostTestCases, CtsJvmtiRunTest1914HostTestCases[secondary_user], CtsJvmtiRunTest1915HostTestCases, CtsJvmtiRunTest1915HostTestCases[secondary_user], CtsJvmtiRunTest1916HostTestCases, CtsJvmtiRunTest1916HostTestCases[secondary_user], CtsJvmtiRunTest1917HostTestCases, CtsJvmtiRunTest1917HostTestCases[secondary_user], CtsJvmtiRunTest1920HostTestCases, CtsJvmtiRunTest1920HostTestCases[secondary_user], CtsJvmtiRunTest1921HostTestCases, CtsJvmtiRunTest1921HostTestCases[secondary_user], CtsJvmtiRunTest1922HostTestCases, CtsJvmtiRunTest1922HostTestCases[secondary_user], CtsJvmtiRunTest1923HostTestCases, CtsJvmtiRunTest1923HostTestCases[secondary_user], CtsJvmtiRunTest1924HostTestCases, CtsJvmtiRunTest1924HostTestCases[secondary_user], CtsJvmtiRunTest1925HostTestCases, CtsJvmtiRunTest1925HostTestCases[secondary_user], CtsJvmtiRunTest1926HostTestCases, CtsJvmtiRunTest1926HostTestCases[secondary_user], CtsJvmtiRunTest1927HostTestCases, CtsJvmtiRunTest1927HostTestCases[secondary_user], CtsJvmtiRunTest1928HostTestCases, CtsJvmtiRunTest1928HostTestCases[secondary_user], CtsJvmtiRunTest1930HostTestCases, CtsJvmtiRunTest1930HostTestCases[secondary_user], CtsJvmtiRunTest1931HostTestCases, CtsJvmtiRunTest1931HostTestCases[secondary_user], CtsJvmtiRunTest1932HostTestCases, CtsJvmtiRunTest1932HostTestCases[secondary_user], CtsJvmtiRunTest1933HostTestCases, CtsJvmtiRunTest1933HostTestCases[secondary_user], CtsJvmtiRunTest1934HostTestCases, CtsJvmtiRunTest1934HostTestCases[secondary_user], CtsJvmtiRunTest1936HostTestCases, CtsJvmtiRunTest1936HostTestCases[secondary_user], CtsJvmtiRunTest1937HostTestCases, CtsJvmtiRunTest1937HostTestCases[secondary_user], CtsJvmtiRunTest1939HostTestCases, CtsJvmtiRunTest1939HostTestCases[secondary_user], CtsJvmtiRunTest1941HostTestCases, CtsJvmtiRunTest1941HostTestCases[secondary_user], CtsJvmtiRunTest1942HostTestCases, CtsJvmtiRunTest1942HostTestCases[secondary_user], CtsJvmtiRunTest1943HostTestCases, CtsJvmtiRunTest1943HostTestCases[secondary_user], CtsJvmtiRunTest1953HostTestCases, CtsJvmtiRunTest1953HostTestCases[secondary_user], CtsJvmtiRunTest1958HostTestCases, CtsJvmtiRunTest1958HostTestCases[secondary_user], CtsJvmtiRunTest1962HostTestCases, CtsJvmtiRunTest1962HostTestCases[secondary_user], CtsJvmtiRunTest1967HostTestCases, CtsJvmtiRunTest1967HostTestCases[secondary_user], CtsJvmtiRunTest1968HostTestCases, CtsJvmtiRunTest1968HostTestCases[secondary_user], CtsJvmtiRunTest1969HostTestCases, CtsJvmtiRunTest1969HostTestCases[secondary_user], CtsJvmtiRunTest1970HostTestCases, CtsJvmtiRunTest1970HostTestCases[secondary_user], CtsJvmtiRunTest1971HostTestCases, CtsJvmtiRunTest1971HostTestCases[secondary_user], CtsJvmtiRunTest1974HostTestCases, CtsJvmtiRunTest1974HostTestCases[secondary_user], CtsJvmtiRunTest1975HostTestCases, CtsJvmtiRunTest1975HostTestCases[secondary_user], CtsJvmtiRunTest1976HostTestCases, CtsJvmtiRunTest1976HostTestCases[secondary_user], CtsJvmtiRunTest1977HostTestCases, CtsJvmtiRunTest1977HostTestCases[secondary_user], CtsJvmtiRunTest1978HostTestCases, CtsJvmtiRunTest1978HostTestCases[secondary_user], CtsJvmtiRunTest1979HostTestCases, CtsJvmtiRunTest1979HostTestCases[secondary_user], CtsJvmtiRunTest1981HostTestCases, CtsJvmtiRunTest1981HostTestCases[secondary_user], CtsJvmtiRunTest1982HostTestCases, CtsJvmtiRunTest1982HostTestCases[secondary_user], CtsJvmtiRunTest1983HostTestCases, CtsJvmtiRunTest1983HostTestCases[secondary_user], CtsJvmtiRunTest1984HostTestCases, CtsJvmtiRunTest1984HostTestCases[secondary_user], CtsJvmtiRunTest1988HostTestCases, CtsJvmtiRunTest1988HostTestCases[secondary_user], CtsJvmtiRunTest1989HostTestCases, CtsJvmtiRunTest1989HostTestCases[secondary_user], CtsJvmtiRunTest1990HostTestCases, CtsJvmtiRunTest1990HostTestCases[secondary_user], CtsJvmtiRunTest1991HostTestCases, CtsJvmtiRunTest1991HostTestCases[secondary_user], CtsJvmtiRunTest1992HostTestCases, CtsJvmtiRunTest1992HostTestCases[secondary_user], CtsJvmtiRunTest1994HostTestCases, CtsJvmtiRunTest1994HostTestCases[secondary_user], CtsJvmtiRunTest1995HostTestCases, CtsJvmtiRunTest1995HostTestCases[secondary_user], CtsJvmtiRunTest1996HostTestCases, CtsJvmtiRunTest1996HostTestCases[secondary_user], CtsJvmtiRunTest1997HostTestCases, CtsJvmtiRunTest1997HostTestCases[secondary_user], CtsJvmtiRunTest1998HostTestCases, CtsJvmtiRunTest1998HostTestCases[secondary_user], CtsJvmtiRunTest1999HostTestCases, CtsJvmtiRunTest1999HostTestCases[secondary_user], CtsJvmtiRunTest2001HostTestCases, CtsJvmtiRunTest2001HostTestCases[secondary_user], CtsJvmtiRunTest2002HostTestCases, CtsJvmtiRunTest2002HostTestCases[secondary_user], CtsJvmtiRunTest2003HostTestCases, CtsJvmtiRunTest2003HostTestCases[secondary_user], CtsJvmtiRunTest2004HostTestCases, CtsJvmtiRunTest2004HostTestCases[secondary_user], CtsJvmtiRunTest2005HostTestCases, CtsJvmtiRunTest2005HostTestCases[secondary_user], CtsJvmtiRunTest2006HostTestCases, CtsJvmtiRunTest2006HostTestCases[secondary_user], CtsJvmtiRunTest2007HostTestCases, CtsJvmtiRunTest2007HostTestCases[secondary_user], CtsJvmtiRunTest902HostTestCases, CtsJvmtiRunTest902HostTestCases[secondary_user], CtsJvmtiRunTest903HostTestCases, CtsJvmtiRunTest903HostTestCases[secondary_user], CtsJvmtiRunTest904HostTestCases, CtsJvmtiRunTest904HostTestCases[secondary_user], CtsJvmtiRunTest905HostTestCases, CtsJvmtiRunTest905HostTestCases[secondary_user], CtsJvmtiRunTest906HostTestCases, CtsJvmtiRunTest906HostTestCases[secondary_user], CtsJvmtiRunTest907HostTestCases, CtsJvmtiRunTest907HostTestCases[secondary_user], CtsJvmtiRunTest908HostTestCases, CtsJvmtiRunTest908HostTestCases[secondary_user], CtsJvmtiRunTest910HostTestCases, CtsJvmtiRunTest910HostTestCases[secondary_user], CtsJvmtiRunTest911HostTestCases, CtsJvmtiRunTest911HostTestCases[secondary_user], CtsJvmtiRunTest912HostTestCases, CtsJvmtiRunTest912HostTestCases[secondary_user], CtsJvmtiRunTest913HostTestCases, CtsJvmtiRunTest913HostTestCases[secondary_user], CtsJvmtiRunTest914HostTestCases, CtsJvmtiRunTest914HostTestCases[secondary_user], CtsJvmtiRunTest915HostTestCases, CtsJvmtiRunTest915HostTestCases[secondary_user], CtsJvmtiRunTest917HostTestCases, CtsJvmtiRunTest917HostTestCases[secondary_user], CtsJvmtiRunTest918HostTestCases, CtsJvmtiRunTest918HostTestCases[secondary_user], CtsJvmtiRunTest919HostTestCases, CtsJvmtiRunTest919HostTestCases[secondary_user], CtsJvmtiRunTest920HostTestCases, CtsJvmtiRunTest920HostTestCases[secondary_user], CtsJvmtiRunTest922HostTestCases, CtsJvmtiRunTest922HostTestCases[secondary_user], CtsJvmtiRunTest923HostTestCases, CtsJvmtiRunTest923HostTestCases[secondary_user], CtsJvmtiRunTest924HostTestCases, CtsJvmtiRunTest924HostTestCases[secondary_user], CtsJvmtiRunTest926HostTestCases, CtsJvmtiRunTest926HostTestCases[secondary_user], CtsJvmtiRunTest927HostTestCases, CtsJvmtiRunTest927HostTestCases[secondary_user], CtsJvmtiRunTest928HostTestCases, CtsJvmtiRunTest928HostTestCases[secondary_user], CtsJvmtiRunTest930HostTestCases, CtsJvmtiRunTest930HostTestCases[secondary_user], CtsJvmtiRunTest931HostTestCases, CtsJvmtiRunTest931HostTestCases[secondary_user], CtsJvmtiRunTest932HostTestCases, CtsJvmtiRunTest932HostTestCases[secondary_user], CtsJvmtiRunTest940HostTestCases, CtsJvmtiRunTest940HostTestCases[secondary_user], CtsJvmtiRunTest942HostTestCases, CtsJvmtiRunTest942HostTestCases[secondary_user], CtsJvmtiRunTest944HostTestCases, CtsJvmtiRunTest944HostTestCases[secondary_user], CtsJvmtiRunTest945HostTestCases, CtsJvmtiRunTest945HostTestCases[secondary_user], CtsJvmtiRunTest947HostTestCases, CtsJvmtiRunTest947HostTestCases[secondary_user], CtsJvmtiRunTest951HostTestCases, CtsJvmtiRunTest951HostTestCases[secondary_user], CtsJvmtiRunTest982HostTestCases, CtsJvmtiRunTest982HostTestCases[secondary_user], CtsJvmtiRunTest983HostTestCases, CtsJvmtiRunTest983HostTestCases[secondary_user], CtsJvmtiRunTest984HostTestCases, CtsJvmtiRunTest984HostTestCases[secondary_user], CtsJvmtiRunTest985HostTestCases, CtsJvmtiRunTest985HostTestCases[secondary_user], CtsJvmtiRunTest986HostTestCases, CtsJvmtiRunTest986HostTestCases[secondary_user], CtsJvmtiRunTest988HostTestCases, CtsJvmtiRunTest988HostTestCases[secondary_user], CtsJvmtiRunTest989HostTestCases, CtsJvmtiRunTest989HostTestCases[secondary_user], CtsJvmtiRunTest990HostTestCases, CtsJvmtiRunTest990HostTestCases[secondary_user], CtsJvmtiRunTest991HostTestCases, CtsJvmtiRunTest991HostTestCases[secondary_user], CtsJvmtiRunTest992HostTestCases, CtsJvmtiRunTest992HostTestCases[secondary_user], CtsJvmtiRunTest993HostTestCases, CtsJvmtiRunTest993HostTestCases[secondary_user], CtsJvmtiRunTest994HostTestCases, CtsJvmtiRunTest994HostTestCases[secondary_user], CtsJvmtiRunTest995HostTestCases, CtsJvmtiRunTest995HostTestCases[secondary_user], CtsJvmtiRunTest996HostTestCases, CtsJvmtiRunTest996HostTestCases[secondary_user], CtsJvmtiRunTest997HostTestCases, CtsJvmtiRunTest997HostTestCases[secondary_user], CtsJvmtiTaggingHostTestCases, CtsJvmtiTaggingHostTestCases[secondary_user], CtsJvmtiTrackingHostTestCases, CtsJvmtiTrackingHostTestCases[secondary_user], CtsKernelConfigTestCases, CtsKeystoreTestCases, CtsKeystoreTestCases[secondary_user], CtsLeanbackJankTestCases, CtsLeanbackJankTestCases[secondary_user], CtsLegacyNotification20TestCases, CtsLegacyNotification20TestCases[secondary_user], CtsLegacyNotification27TestCases, CtsLegacyNotification27TestCases[secondary_user], CtsLegacyNotification28TestCases, CtsLegacyNotification28TestCases[secondary_user], CtsLegacyNotification29TestCases, CtsLegacyNotification29TestCases[secondary_user], CtsLibcoreApiEvolutionTestCases, CtsLibcoreApiEvolutionTestCases[secondary_user], CtsLibcoreFileIOTestCases, CtsLibcoreFileIOTestCases[secondary_user], CtsLibcoreJsr166TestCases, CtsLibcoreJsr166TestCases[secondary_user], CtsLibcoreLegacy22TestCases, CtsLibcoreLegacy22TestCases[secondary_user], CtsLibcoreOjTestCases, CtsLibcoreOjTestCases[secondary_user], CtsLibcoreOkHttpTestCases, CtsLibcoreOkHttpTestCases[secondary_user], CtsLibcoreTestCases, CtsLibcoreTestCases[secondary_user], CtsLibcoreWycheproofBCTestCases, CtsLibcoreWycheproofBCTestCases[secondary_user], CtsLibcoreWycheproofConscryptTestCases, CtsLibcoreWycheproofConscryptTestCases[secondary_user], CtsLiblogTestCases, CtsLiblogTestCases[secondary_user], CtsLocationCoarseTestCases, CtsLocationCoarseTestCases[instant], CtsLocationCoarseTestCases[secondary_user], CtsLocationFineTestCases, CtsLocationFineTestCases[instant], CtsLocationFineTestCases[secondary_user], CtsLocationGnssTestCases, CtsLocationGnssTestCases[instant], CtsLocationGnssTestCases[secondary_user], CtsLocationNoneTestCases, CtsLocationNoneTestCases[instant], CtsLocationNoneTestCases[secondary_user], CtsLocationPrivilegedTestCases, CtsLocationPrivilegedTestCases[instant], CtsLocationPrivilegedTestCases[secondary_user], CtsLogdTestCases, CtsLogdTestCases[secondary_user], CtsMatchFlagTestCases, CtsMatchFlagTestCases[secondary_user], CtsMediaBitstreamsTestCases, CtsMediaBitstreamsTestCases[secondary_user], CtsMediaHostTestCases, CtsMediaHostTestCases[instant], CtsMediaParserTestCases, CtsMediaParserTestCases[secondary_user], CtsMediaPerformanceClassTestCases, CtsMediaPerformanceClassTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        needs_push_media=True,
+        tag='internal.x86.all.CtsJvmtiAttachingHostTestCases_-_CtsMediaPerformanceClassTestCases',
+        test_name='cheets_CTS_R.internal.x86.all.CtsJvmtiAttachingHostTestCases_-_CtsMediaPerformanceClassTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsJvmtiAttachingHostTestCases', '--include-filter', 'CtsJvmtiAttachingHostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiAttachingTestCases', '--include-filter', 'CtsJvmtiAttachingTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRedefineClassesHostTestCases', '--include-filter', 'CtsJvmtiRedefineClassesHostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1900HostTestCases', '--include-filter', 'CtsJvmtiRunTest1900HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1901HostTestCases', '--include-filter', 'CtsJvmtiRunTest1901HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1902HostTestCases', '--include-filter', 'CtsJvmtiRunTest1902HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1903HostTestCases', '--include-filter', 'CtsJvmtiRunTest1903HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1904HostTestCases', '--include-filter', 'CtsJvmtiRunTest1904HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1906HostTestCases', '--include-filter', 'CtsJvmtiRunTest1906HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1907HostTestCases', '--include-filter', 'CtsJvmtiRunTest1907HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1908HostTestCases', '--include-filter', 'CtsJvmtiRunTest1908HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1909HostTestCases', '--include-filter', 'CtsJvmtiRunTest1909HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1910HostTestCases', '--include-filter', 'CtsJvmtiRunTest1910HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1911HostTestCases', '--include-filter', 'CtsJvmtiRunTest1911HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1912HostTestCases', '--include-filter', 'CtsJvmtiRunTest1912HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1913HostTestCases', '--include-filter', 'CtsJvmtiRunTest1913HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1914HostTestCases', '--include-filter', 'CtsJvmtiRunTest1914HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1915HostTestCases', '--include-filter', 'CtsJvmtiRunTest1915HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1916HostTestCases', '--include-filter', 'CtsJvmtiRunTest1916HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1917HostTestCases', '--include-filter', 'CtsJvmtiRunTest1917HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1920HostTestCases', '--include-filter', 'CtsJvmtiRunTest1920HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1921HostTestCases', '--include-filter', 'CtsJvmtiRunTest1921HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1922HostTestCases', '--include-filter', 'CtsJvmtiRunTest1922HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1923HostTestCases', '--include-filter', 'CtsJvmtiRunTest1923HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1924HostTestCases', '--include-filter', 'CtsJvmtiRunTest1924HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1925HostTestCases', '--include-filter', 'CtsJvmtiRunTest1925HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1926HostTestCases', '--include-filter', 'CtsJvmtiRunTest1926HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1927HostTestCases', '--include-filter', 'CtsJvmtiRunTest1927HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1928HostTestCases', '--include-filter', 'CtsJvmtiRunTest1928HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1930HostTestCases', '--include-filter', 'CtsJvmtiRunTest1930HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1931HostTestCases', '--include-filter', 'CtsJvmtiRunTest1931HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1932HostTestCases', '--include-filter', 'CtsJvmtiRunTest1932HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1933HostTestCases', '--include-filter', 'CtsJvmtiRunTest1933HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1934HostTestCases', '--include-filter', 'CtsJvmtiRunTest1934HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1936HostTestCases', '--include-filter', 'CtsJvmtiRunTest1936HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1937HostTestCases', '--include-filter', 'CtsJvmtiRunTest1937HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1939HostTestCases', '--include-filter', 'CtsJvmtiRunTest1939HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1941HostTestCases', '--include-filter', 'CtsJvmtiRunTest1941HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1942HostTestCases', '--include-filter', 'CtsJvmtiRunTest1942HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1943HostTestCases', '--include-filter', 'CtsJvmtiRunTest1943HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1953HostTestCases', '--include-filter', 'CtsJvmtiRunTest1953HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1958HostTestCases', '--include-filter', 'CtsJvmtiRunTest1958HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1962HostTestCases', '--include-filter', 'CtsJvmtiRunTest1962HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1967HostTestCases', '--include-filter', 'CtsJvmtiRunTest1967HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1968HostTestCases', '--include-filter', 'CtsJvmtiRunTest1968HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1969HostTestCases', '--include-filter', 'CtsJvmtiRunTest1969HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1970HostTestCases', '--include-filter', 'CtsJvmtiRunTest1970HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1971HostTestCases', '--include-filter', 'CtsJvmtiRunTest1971HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1974HostTestCases', '--include-filter', 'CtsJvmtiRunTest1974HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1975HostTestCases', '--include-filter', 'CtsJvmtiRunTest1975HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1976HostTestCases', '--include-filter', 'CtsJvmtiRunTest1976HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1977HostTestCases', '--include-filter', 'CtsJvmtiRunTest1977HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1978HostTestCases', '--include-filter', 'CtsJvmtiRunTest1978HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1979HostTestCases', '--include-filter', 'CtsJvmtiRunTest1979HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1981HostTestCases', '--include-filter', 'CtsJvmtiRunTest1981HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1982HostTestCases', '--include-filter', 'CtsJvmtiRunTest1982HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1983HostTestCases', '--include-filter', 'CtsJvmtiRunTest1983HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1984HostTestCases', '--include-filter', 'CtsJvmtiRunTest1984HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1988HostTestCases', '--include-filter', 'CtsJvmtiRunTest1988HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1989HostTestCases', '--include-filter', 'CtsJvmtiRunTest1989HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1990HostTestCases', '--include-filter', 'CtsJvmtiRunTest1990HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1991HostTestCases', '--include-filter', 'CtsJvmtiRunTest1991HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1992HostTestCases', '--include-filter', 'CtsJvmtiRunTest1992HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1994HostTestCases', '--include-filter', 'CtsJvmtiRunTest1994HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1995HostTestCases', '--include-filter', 'CtsJvmtiRunTest1995HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1996HostTestCases', '--include-filter', 'CtsJvmtiRunTest1996HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1997HostTestCases', '--include-filter', 'CtsJvmtiRunTest1997HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1998HostTestCases', '--include-filter', 'CtsJvmtiRunTest1998HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest1999HostTestCases', '--include-filter', 'CtsJvmtiRunTest1999HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2001HostTestCases', '--include-filter', 'CtsJvmtiRunTest2001HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2002HostTestCases', '--include-filter', 'CtsJvmtiRunTest2002HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2003HostTestCases', '--include-filter', 'CtsJvmtiRunTest2003HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2004HostTestCases', '--include-filter', 'CtsJvmtiRunTest2004HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2005HostTestCases', '--include-filter', 'CtsJvmtiRunTest2005HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2006HostTestCases', '--include-filter', 'CtsJvmtiRunTest2006HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest2007HostTestCases', '--include-filter', 'CtsJvmtiRunTest2007HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest902HostTestCases', '--include-filter', 'CtsJvmtiRunTest902HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest903HostTestCases', '--include-filter', 'CtsJvmtiRunTest903HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest904HostTestCases', '--include-filter', 'CtsJvmtiRunTest904HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest905HostTestCases', '--include-filter', 'CtsJvmtiRunTest905HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest906HostTestCases', '--include-filter', 'CtsJvmtiRunTest906HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest907HostTestCases', '--include-filter', 'CtsJvmtiRunTest907HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest908HostTestCases', '--include-filter', 'CtsJvmtiRunTest908HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest910HostTestCases', '--include-filter', 'CtsJvmtiRunTest910HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest911HostTestCases', '--include-filter', 'CtsJvmtiRunTest911HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest912HostTestCases', '--include-filter', 'CtsJvmtiRunTest912HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest913HostTestCases', '--include-filter', 'CtsJvmtiRunTest913HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest914HostTestCases', '--include-filter', 'CtsJvmtiRunTest914HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest915HostTestCases', '--include-filter', 'CtsJvmtiRunTest915HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest917HostTestCases', '--include-filter', 'CtsJvmtiRunTest917HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest918HostTestCases', '--include-filter', 'CtsJvmtiRunTest918HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest919HostTestCases', '--include-filter', 'CtsJvmtiRunTest919HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest920HostTestCases', '--include-filter', 'CtsJvmtiRunTest920HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest922HostTestCases', '--include-filter', 'CtsJvmtiRunTest922HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest923HostTestCases', '--include-filter', 'CtsJvmtiRunTest923HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest924HostTestCases', '--include-filter', 'CtsJvmtiRunTest924HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest926HostTestCases', '--include-filter', 'CtsJvmtiRunTest926HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest927HostTestCases', '--include-filter', 'CtsJvmtiRunTest927HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest928HostTestCases', '--include-filter', 'CtsJvmtiRunTest928HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest930HostTestCases', '--include-filter', 'CtsJvmtiRunTest930HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest931HostTestCases', '--include-filter', 'CtsJvmtiRunTest931HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest932HostTestCases', '--include-filter', 'CtsJvmtiRunTest932HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest940HostTestCases', '--include-filter', 'CtsJvmtiRunTest940HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest942HostTestCases', '--include-filter', 'CtsJvmtiRunTest942HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest944HostTestCases', '--include-filter', 'CtsJvmtiRunTest944HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest945HostTestCases', '--include-filter', 'CtsJvmtiRunTest945HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest947HostTestCases', '--include-filter', 'CtsJvmtiRunTest947HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest951HostTestCases', '--include-filter', 'CtsJvmtiRunTest951HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest982HostTestCases', '--include-filter', 'CtsJvmtiRunTest982HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest983HostTestCases', '--include-filter', 'CtsJvmtiRunTest983HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest984HostTestCases', '--include-filter', 'CtsJvmtiRunTest984HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest985HostTestCases', '--include-filter', 'CtsJvmtiRunTest985HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest986HostTestCases', '--include-filter', 'CtsJvmtiRunTest986HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest988HostTestCases', '--include-filter', 'CtsJvmtiRunTest988HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest989HostTestCases', '--include-filter', 'CtsJvmtiRunTest989HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest990HostTestCases', '--include-filter', 'CtsJvmtiRunTest990HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest991HostTestCases', '--include-filter', 'CtsJvmtiRunTest991HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest992HostTestCases', '--include-filter', 'CtsJvmtiRunTest992HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest993HostTestCases', '--include-filter', 'CtsJvmtiRunTest993HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest994HostTestCases', '--include-filter', 'CtsJvmtiRunTest994HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest995HostTestCases', '--include-filter', 'CtsJvmtiRunTest995HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest996HostTestCases', '--include-filter', 'CtsJvmtiRunTest996HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiRunTest997HostTestCases', '--include-filter', 'CtsJvmtiRunTest997HostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiTaggingHostTestCases', '--include-filter', 'CtsJvmtiTaggingHostTestCases[secondary_user]', '--include-filter', 'CtsJvmtiTrackingHostTestCases', '--include-filter', 'CtsJvmtiTrackingHostTestCases[secondary_user]', '--include-filter', 'CtsKernelConfigTestCases', '--include-filter', 'CtsKeystoreTestCases', '--include-filter', 'CtsKeystoreTestCases[secondary_user]', '--include-filter', 'CtsLeanbackJankTestCases', '--include-filter', 'CtsLeanbackJankTestCases[secondary_user]', '--include-filter', 'CtsLegacyNotification20TestCases', '--include-filter', 'CtsLegacyNotification20TestCases[secondary_user]', '--include-filter', 'CtsLegacyNotification27TestCases', '--include-filter', 'CtsLegacyNotification27TestCases[secondary_user]', '--include-filter', 'CtsLegacyNotification28TestCases', '--include-filter', 'CtsLegacyNotification28TestCases[secondary_user]', '--include-filter', 'CtsLegacyNotification29TestCases', '--include-filter', 'CtsLegacyNotification29TestCases[secondary_user]', '--include-filter', 'CtsLibcoreApiEvolutionTestCases', '--include-filter', 'CtsLibcoreApiEvolutionTestCases[secondary_user]', '--include-filter', 'CtsLibcoreFileIOTestCases', '--include-filter', 'CtsLibcoreFileIOTestCases[secondary_user]', '--include-filter', 'CtsLibcoreJsr166TestCases', '--include-filter', 'CtsLibcoreJsr166TestCases[secondary_user]', '--include-filter', 'CtsLibcoreLegacy22TestCases', '--include-filter', 'CtsLibcoreLegacy22TestCases[secondary_user]', '--include-filter', 'CtsLibcoreOjTestCases', '--include-filter', 'CtsLibcoreOjTestCases[secondary_user]', '--include-filter', 'CtsLibcoreOkHttpTestCases', '--include-filter', 'CtsLibcoreOkHttpTestCases[secondary_user]', '--include-filter', 'CtsLibcoreTestCases', '--include-filter', 'CtsLibcoreTestCases[secondary_user]', '--include-filter', 'CtsLibcoreWycheproofBCTestCases', '--include-filter', 'CtsLibcoreWycheproofBCTestCases[secondary_user]', '--include-filter', 'CtsLibcoreWycheproofConscryptTestCases', '--include-filter', 'CtsLibcoreWycheproofConscryptTestCases[secondary_user]', '--include-filter', 'CtsLiblogTestCases', '--include-filter', 'CtsLiblogTestCases[secondary_user]', '--include-filter', 'CtsLocationCoarseTestCases', '--include-filter', 'CtsLocationCoarseTestCases[instant]', '--include-filter', 'CtsLocationCoarseTestCases[secondary_user]', '--include-filter', 'CtsLocationFineTestCases', '--include-filter', 'CtsLocationFineTestCases[instant]', '--include-filter', 'CtsLocationFineTestCases[secondary_user]', '--include-filter', 'CtsLocationGnssTestCases', '--include-filter', 'CtsLocationGnssTestCases[instant]', '--include-filter', 'CtsLocationGnssTestCases[secondary_user]', '--include-filter', 'CtsLocationNoneTestCases', '--include-filter', 'CtsLocationNoneTestCases[instant]', '--include-filter', 'CtsLocationNoneTestCases[secondary_user]', '--include-filter', 'CtsLocationPrivilegedTestCases', '--include-filter', 'CtsLocationPrivilegedTestCases[instant]', '--include-filter', 'CtsLocationPrivilegedTestCases[secondary_user]', '--include-filter', 'CtsLogdTestCases', '--include-filter', 'CtsLogdTestCases[secondary_user]', '--include-filter', 'CtsMatchFlagTestCases', '--include-filter', 'CtsMatchFlagTestCases[secondary_user]', '--include-filter', 'CtsMediaBitstreamsTestCases', '--include-filter', 'CtsMediaBitstreamsTestCases[secondary_user]', '--include-filter', 'CtsMediaHostTestCases', '--include-filter', 'CtsMediaHostTestCases[instant]', '--include-filter', 'CtsMediaParserTestCases', '--include-filter', 'CtsMediaParserTestCases[secondary_user]', '--include-filter', 'CtsMediaPerformanceClassTestCases', '--include-filter', 'CtsMediaPerformanceClassTestCases[secondary_user]'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsJvmtiAttachingHostTestCases_-_CtsMediaPerformanceClassTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases
new file mode 100644
index 0000000..d9a3376
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaStressTestCases, CtsMediaStressTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        needs_push_media=True,
+        tag='internal.x86.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases',
+        test_name='cheets_CTS_R.internal.x86.all.CtsMediaStressTestCases_-_CtsMediaStressTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaStressTestCases', '--include-filter', 'CtsMediaStressTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsMediaStressTestCases_-_CtsMediaStressTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsMediaTestCases_-_CtsMediaTestCases.32 b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsMediaTestCases_-_CtsMediaTestCases.32
new file mode 100644
index 0000000..928934a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsMediaTestCases_-_CtsMediaTestCases.32
@@ -0,0 +1,38 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.all.CtsMediaTestCases_-_CtsMediaTestCases.32'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaTestCases, CtsMediaTestCases[instant] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        needs_push_media=True,
+        tag='internal.x86.all.CtsMediaTestCases_-_CtsMediaTestCases.32',
+        test_name='cheets_CTS_R.internal.x86.all.CtsMediaTestCases_-_CtsMediaTestCases.32',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases', '--logcat-on-failure', '--abi', 'x86'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsMediaTestCases_-_CtsMediaTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsMediaTestCases_-_CtsMediaTestCases.64 b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsMediaTestCases_-_CtsMediaTestCases.64
new file mode 100644
index 0000000..1ea9449
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsMediaTestCases_-_CtsMediaTestCases.64
@@ -0,0 +1,38 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.all.CtsMediaTestCases_-_CtsMediaTestCases.64'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaTestCases, CtsMediaTestCases[instant] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        needs_push_media=True,
+        tag='internal.x86.all.CtsMediaTestCases_-_CtsMediaTestCases.64',
+        test_name='cheets_CTS_R.internal.x86.all.CtsMediaTestCases_-_CtsMediaTestCases.64',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases', '--include-filter', 'CtsMediaTestCases[instant]', '--logcat-on-failure', '--abi', 'x86_64'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsMediaTestCases_-_CtsMediaTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsMediaV2TestCases_-_CtsProtoTestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsMediaV2TestCases_-_CtsProtoTestCases
new file mode 100644
index 0000000..1eb6e06
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsMediaV2TestCases_-_CtsProtoTestCases
@@ -0,0 +1,38 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.all.CtsMediaV2TestCases_-_CtsProtoTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaV2TestCases, CtsMidiTestCases, CtsMimeMapTestCases, CtsMimeMapTestCases[secondary_user], CtsMockingDebuggableTestCases, CtsMockingDebuggableTestCases[instant], CtsMockingDebuggableTestCases[secondary_user], CtsMockingTestCases, CtsMockingTestCases[instant], CtsMockingTestCases[secondary_user], CtsMonkeyTestCases, CtsMonkeyTestCases[secondary_user], CtsMultiUserHostTestCases, CtsMultiUserHostTestCases[instant], CtsMultiUserHostTestCases[secondary_user], CtsMultiUserTestCases, CtsMultiUserTestCases[instant], CtsMultiUserTestCases[secondary_user], CtsNNAPIBenchmarkTestCases, CtsNNAPIBenchmarkTestCases[instant], CtsNNAPIBenchmarkTestCases[secondary_user], CtsNNAPITestCases, CtsNNAPITestCases[secondary_user], CtsNativeEncryptionTestCases, CtsNativeEncryptionTestCases[instant], CtsNativeEncryptionTestCases[secondary_user], CtsNativeHardwareTestCases, CtsNativeHardwareTestCases[secondary_user], CtsNativeMediaAAudioTestCases, CtsNativeMediaAAudioTestCases[instant], CtsNativeMediaAAudioTestCases[secondary_user], CtsNativeMediaMetricsTestCases, CtsNativeMediaMetricsTestCases[instant], CtsNativeMediaMetricsTestCases[secondary_user], CtsNativeMediaSlTestCases, CtsNativeMediaSlTestCases[instant], CtsNativeMediaSlTestCases[secondary_user], CtsNativeMediaXaTestCases, CtsNativeMediaXaTestCases[instant], CtsNativeMediaXaTestCases[secondary_user], CtsNativeMidiTestCases, CtsNativeMidiTestCases[secondary_user], CtsNativeNetDnsTestCases, CtsNativeNetDnsTestCases[instant], CtsNativeNetDnsTestCases[secondary_user], CtsNativeNetTestCases, CtsNativeNetTestCases[instant], CtsNativeNetTestCases[secondary_user], CtsNdefTestCases, CtsNdefTestCases[secondary_user], CtsNdkBinderTestCases, CtsNdkBinderTestCases[instant], CtsNdkBinderTestCases[secondary_user], CtsNetApi23TestCases, CtsNetApi23TestCases[secondary_user], CtsNetSecConfigAttributeTestCases, CtsNetSecConfigAttributeTestCases[instant], CtsNetSecConfigAttributeTestCases[secondary_user], CtsNetSecConfigBasicDebugDisabledTestCases, CtsNetSecConfigBasicDebugDisabledTestCases[instant], CtsNetSecConfigBasicDebugDisabledTestCases[secondary_user], CtsNetSecConfigBasicDebugEnabledTestCases, CtsNetSecConfigBasicDebugEnabledTestCases[instant], CtsNetSecConfigBasicDebugEnabledTestCases[secondary_user], CtsNetSecConfigBasicDomainConfigTestCases, CtsNetSecConfigBasicDomainConfigTestCases[instant], CtsNetSecConfigBasicDomainConfigTestCases[secondary_user], CtsNetSecConfigCleartextTrafficTestCases, CtsNetSecConfigCleartextTrafficTestCases[instant], CtsNetSecConfigCleartextTrafficTestCases[secondary_user], CtsNetSecConfigDownloadManagerTestCases, CtsNetSecConfigDownloadManagerTestCases[secondary_user], CtsNetSecConfigInvalidPinTestCases, CtsNetSecConfigInvalidPinTestCases[instant], CtsNetSecConfigInvalidPinTestCases[secondary_user], CtsNetSecConfigNestedDomainConfigTestCases, CtsNetSecConfigNestedDomainConfigTestCases[instant], CtsNetSecConfigNestedDomainConfigTestCases[secondary_user], CtsNetSecConfigPrePCleartextTrafficTestCases, CtsNetSecConfigPrePCleartextTrafficTestCases[secondary_user], CtsNetSecConfigResourcesSrcTestCases, CtsNetSecConfigResourcesSrcTestCases[instant], CtsNetSecConfigResourcesSrcTestCases[secondary_user], CtsNetSecPolicyUsesCleartextTrafficFalseTestCases, CtsNetSecPolicyUsesCleartextTrafficFalseTestCases[secondary_user], CtsNetSecPolicyUsesCleartextTrafficTrueTestCases, CtsNetSecPolicyUsesCleartextTrafficTrueTestCases[secondary_user], CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases, CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases[secondary_user], CtsNetTestCases, CtsNetTestCasesInternetPermission, CtsNetTestCasesInternetPermission[instant], CtsNetTestCasesInternetPermission[secondary_user], CtsNetTestCasesLegacyApi22, CtsNetTestCasesLegacyApi22[secondary_user], CtsNetTestCasesLegacyPermission22, CtsNetTestCasesLegacyPermission22[secondary_user], CtsNetTestCasesUpdateStatsPermission, CtsNetTestCasesUpdateStatsPermission[instant], CtsNetTestCasesUpdateStatsPermission[secondary_user], CtsNetTestCases[instant], CtsNetTestCases[secondary_user], CtsNfcTestCases, CtsNfcTestCases[secondary_user], CtsNoPermissionTestCases, CtsNoPermissionTestCases25, CtsNoPermissionTestCases25[secondary_user], CtsNoPermissionTestCases[secondary_user], CtsOmapiTestCases, CtsOmapiTestCases[secondary_user], CtsOpenGLTestCases, CtsOpenGLTestCases[secondary_user], CtsOpenGlPerf2TestCases, CtsOpenGlPerf2TestCases[secondary_user], CtsOpenGlPerfTestCases, CtsOsHostTestCases, CtsOsHostTestCases[instant], CtsOsHostTestCases[secondary_user], CtsOsTestCases, CtsOsTestCases[instant], CtsPackageInstallAppOpDefaultTestCases, CtsPackageInstallAppOpDefaultTestCases[instant], CtsPackageInstallAppOpDefaultTestCases[secondary_user], CtsPackageInstallAppOpDeniedTestCases, CtsPackageInstallAppOpDeniedTestCases[instant], CtsPackageInstallAppOpDeniedTestCases[secondary_user], CtsPackageInstallTestCases, CtsPackageInstallTestCases[instant], CtsPackageInstallTestCases[secondary_user], CtsPackageInstallerTapjackingTestCases, CtsPackageInstallerTapjackingTestCases[secondary_user], CtsPackageUninstallTestCases, CtsPackageUninstallTestCases[secondary_user], CtsPackageWatchdogTestCases, CtsPackageWatchdogTestCases[secondary_user], CtsPdfTestCases, CtsPdfTestCases[instant], CtsPdfTestCases[secondary_user], CtsPerfettoTestCases, CtsPerfettoTestCases[secondary_user], CtsPermission2TestCases, CtsPermission2TestCases[instant], CtsPermission3TestCases, CtsPermission3TestCases[secondary_user], CtsPermissionTestCases, CtsPermissionTestCasesSdk28, CtsPermissionTestCasesSdk28[instant], CtsPermissionTestCasesSdk28[secondary_user], CtsPermissionTestCasesTelephony, CtsPermissionTestCasesTelephony[instant], CtsPermissionTestCasesTelephony[secondary_user], CtsPermissionTestCases[instant], CtsPreferenceTestCases, CtsPreferenceTestCases[instant], CtsPreferenceTestCases[secondary_user], CtsPrintTestCases, CtsPrintTestCases[instant], CtsPrintTestCases[secondary_user], CtsProtoTestCases, CtsProtoTestCases[instant], CtsProtoTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        use_helpers=True,
+        tag='internal.x86.all.CtsMediaV2TestCases_-_CtsProtoTestCases',
+        test_name='cheets_CTS_R.internal.x86.all.CtsMediaV2TestCases_-_CtsProtoTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaV2TestCases', '--include-filter', 'CtsMidiTestCases', '--include-filter', 'CtsMimeMapTestCases', '--include-filter', 'CtsMimeMapTestCases[secondary_user]', '--include-filter', 'CtsMockingDebuggableTestCases', '--include-filter', 'CtsMockingDebuggableTestCases[instant]', '--include-filter', 'CtsMockingDebuggableTestCases[secondary_user]', '--include-filter', 'CtsMockingTestCases', '--include-filter', 'CtsMockingTestCases[instant]', '--include-filter', 'CtsMockingTestCases[secondary_user]', '--include-filter', 'CtsMonkeyTestCases', '--include-filter', 'CtsMonkeyTestCases[secondary_user]', '--include-filter', 'CtsMultiUserHostTestCases', '--include-filter', 'CtsMultiUserHostTestCases[instant]', '--include-filter', 'CtsMultiUserHostTestCases[secondary_user]', '--include-filter', 'CtsMultiUserTestCases', '--include-filter', 'CtsMultiUserTestCases[instant]', '--include-filter', 'CtsMultiUserTestCases[secondary_user]', '--include-filter', 'CtsNNAPIBenchmarkTestCases', '--include-filter', 'CtsNNAPIBenchmarkTestCases[instant]', '--include-filter', 'CtsNNAPIBenchmarkTestCases[secondary_user]', '--include-filter', 'CtsNNAPITestCases', '--include-filter', 'CtsNNAPITestCases[secondary_user]', '--include-filter', 'CtsNativeEncryptionTestCases', '--include-filter', 'CtsNativeEncryptionTestCases[instant]', '--include-filter', 'CtsNativeEncryptionTestCases[secondary_user]', '--include-filter', 'CtsNativeHardwareTestCases', '--include-filter', 'CtsNativeHardwareTestCases[secondary_user]', '--include-filter', 'CtsNativeMediaAAudioTestCases', '--include-filter', 'CtsNativeMediaAAudioTestCases[instant]', '--include-filter', 'CtsNativeMediaAAudioTestCases[secondary_user]', '--include-filter', 'CtsNativeMediaMetricsTestCases', '--include-filter', 'CtsNativeMediaMetricsTestCases[instant]', '--include-filter', 'CtsNativeMediaMetricsTestCases[secondary_user]', '--include-filter', 'CtsNativeMediaSlTestCases', '--include-filter', 'CtsNativeMediaSlTestCases[instant]', '--include-filter', 'CtsNativeMediaSlTestCases[secondary_user]', '--include-filter', 'CtsNativeMediaXaTestCases', '--include-filter', 'CtsNativeMediaXaTestCases[instant]', '--include-filter', 'CtsNativeMediaXaTestCases[secondary_user]', '--include-filter', 'CtsNativeMidiTestCases', '--include-filter', 'CtsNativeMidiTestCases[secondary_user]', '--include-filter', 'CtsNativeNetDnsTestCases', '--include-filter', 'CtsNativeNetDnsTestCases[instant]', '--include-filter', 'CtsNativeNetDnsTestCases[secondary_user]', '--include-filter', 'CtsNativeNetTestCases', '--include-filter', 'CtsNativeNetTestCases[instant]', '--include-filter', 'CtsNativeNetTestCases[secondary_user]', '--include-filter', 'CtsNdefTestCases', '--include-filter', 'CtsNdefTestCases[secondary_user]', '--include-filter', 'CtsNdkBinderTestCases', '--include-filter', 'CtsNdkBinderTestCases[instant]', '--include-filter', 'CtsNdkBinderTestCases[secondary_user]', '--include-filter', 'CtsNetApi23TestCases', '--include-filter', 'CtsNetApi23TestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigAttributeTestCases', '--include-filter', 'CtsNetSecConfigAttributeTestCases[instant]', '--include-filter', 'CtsNetSecConfigAttributeTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigBasicDebugDisabledTestCases', '--include-filter', 'CtsNetSecConfigBasicDebugDisabledTestCases[instant]', '--include-filter', 'CtsNetSecConfigBasicDebugDisabledTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigBasicDebugEnabledTestCases', '--include-filter', 'CtsNetSecConfigBasicDebugEnabledTestCases[instant]', '--include-filter', 'CtsNetSecConfigBasicDebugEnabledTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigBasicDomainConfigTestCases', '--include-filter', 'CtsNetSecConfigBasicDomainConfigTestCases[instant]', '--include-filter', 'CtsNetSecConfigBasicDomainConfigTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigCleartextTrafficTestCases', '--include-filter', 'CtsNetSecConfigCleartextTrafficTestCases[instant]', '--include-filter', 'CtsNetSecConfigCleartextTrafficTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigDownloadManagerTestCases', '--include-filter', 'CtsNetSecConfigDownloadManagerTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigInvalidPinTestCases', '--include-filter', 'CtsNetSecConfigInvalidPinTestCases[instant]', '--include-filter', 'CtsNetSecConfigInvalidPinTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigNestedDomainConfigTestCases', '--include-filter', 'CtsNetSecConfigNestedDomainConfigTestCases[instant]', '--include-filter', 'CtsNetSecConfigNestedDomainConfigTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigPrePCleartextTrafficTestCases', '--include-filter', 'CtsNetSecConfigPrePCleartextTrafficTestCases[secondary_user]', '--include-filter', 'CtsNetSecConfigResourcesSrcTestCases', '--include-filter', 'CtsNetSecConfigResourcesSrcTestCases[instant]', '--include-filter', 'CtsNetSecConfigResourcesSrcTestCases[secondary_user]', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficFalseTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficFalseTestCases[secondary_user]', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficTrueTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficTrueTestCases[secondary_user]', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases', '--include-filter', 'CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases[secondary_user]', '--include-filter', 'CtsNetTestCases', '--include-filter', 'CtsNetTestCasesInternetPermission', '--include-filter', 'CtsNetTestCasesInternetPermission[instant]', '--include-filter', 'CtsNetTestCasesInternetPermission[secondary_user]', '--include-filter', 'CtsNetTestCasesLegacyApi22', '--include-filter', 'CtsNetTestCasesLegacyApi22[secondary_user]', '--include-filter', 'CtsNetTestCasesLegacyPermission22', '--include-filter', 'CtsNetTestCasesLegacyPermission22[secondary_user]', '--include-filter', 'CtsNetTestCasesUpdateStatsPermission', '--include-filter', 'CtsNetTestCasesUpdateStatsPermission[instant]', '--include-filter', 'CtsNetTestCasesUpdateStatsPermission[secondary_user]', '--include-filter', 'CtsNetTestCases[instant]', '--include-filter', 'CtsNetTestCases[secondary_user]', '--include-filter', 'CtsNfcTestCases', '--include-filter', 'CtsNfcTestCases[secondary_user]', '--include-filter', 'CtsNoPermissionTestCases', '--include-filter', 'CtsNoPermissionTestCases25', '--include-filter', 'CtsNoPermissionTestCases25[secondary_user]', '--include-filter', 'CtsNoPermissionTestCases[secondary_user]', '--include-filter', 'CtsOmapiTestCases', '--include-filter', 'CtsOmapiTestCases[secondary_user]', '--include-filter', 'CtsOpenGLTestCases', '--include-filter', 'CtsOpenGLTestCases[secondary_user]', '--include-filter', 'CtsOpenGlPerf2TestCases', '--include-filter', 'CtsOpenGlPerf2TestCases[secondary_user]', '--include-filter', 'CtsOpenGlPerfTestCases', '--include-filter', 'CtsOsHostTestCases', '--include-filter', 'CtsOsHostTestCases[instant]', '--include-filter', 'CtsOsHostTestCases[secondary_user]', '--include-filter', 'CtsOsTestCases', '--include-filter', 'CtsOsTestCases[instant]', '--include-filter', 'CtsPackageInstallAppOpDefaultTestCases', '--include-filter', 'CtsPackageInstallAppOpDefaultTestCases[instant]', '--include-filter', 'CtsPackageInstallAppOpDefaultTestCases[secondary_user]', '--include-filter', 'CtsPackageInstallAppOpDeniedTestCases', '--include-filter', 'CtsPackageInstallAppOpDeniedTestCases[instant]', '--include-filter', 'CtsPackageInstallAppOpDeniedTestCases[secondary_user]', '--include-filter', 'CtsPackageInstallTestCases', '--include-filter', 'CtsPackageInstallTestCases[instant]', '--include-filter', 'CtsPackageInstallTestCases[secondary_user]', '--include-filter', 'CtsPackageInstallerTapjackingTestCases', '--include-filter', 'CtsPackageInstallerTapjackingTestCases[secondary_user]', '--include-filter', 'CtsPackageUninstallTestCases', '--include-filter', 'CtsPackageUninstallTestCases[secondary_user]', '--include-filter', 'CtsPackageWatchdogTestCases', '--include-filter', 'CtsPackageWatchdogTestCases[secondary_user]', '--include-filter', 'CtsPdfTestCases', '--include-filter', 'CtsPdfTestCases[instant]', '--include-filter', 'CtsPdfTestCases[secondary_user]', '--include-filter', 'CtsPerfettoTestCases', '--include-filter', 'CtsPerfettoTestCases[secondary_user]', '--include-filter', 'CtsPermission2TestCases', '--include-filter', 'CtsPermission2TestCases[instant]', '--include-filter', 'CtsPermission3TestCases', '--include-filter', 'CtsPermission3TestCases[secondary_user]', '--include-filter', 'CtsPermissionTestCases', '--include-filter', 'CtsPermissionTestCasesSdk28', '--include-filter', 'CtsPermissionTestCasesSdk28[instant]', '--include-filter', 'CtsPermissionTestCasesSdk28[secondary_user]', '--include-filter', 'CtsPermissionTestCasesTelephony', '--include-filter', 'CtsPermissionTestCasesTelephony[instant]', '--include-filter', 'CtsPermissionTestCasesTelephony[secondary_user]', '--include-filter', 'CtsPermissionTestCases[instant]', '--include-filter', 'CtsPreferenceTestCases', '--include-filter', 'CtsPreferenceTestCases[instant]', '--include-filter', 'CtsPreferenceTestCases[secondary_user]', '--include-filter', 'CtsPrintTestCases', '--include-filter', 'CtsPrintTestCases[instant]', '--include-filter', 'CtsPrintTestCases[secondary_user]', '--include-filter', 'CtsProtoTestCases', '--include-filter', 'CtsProtoTestCases[instant]', '--include-filter', 'CtsProtoTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsMediaV2TestCases_-_CtsProtoTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsProviderTestCases_-_CtsSecurityBulletinHostTestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsProviderTestCases_-_CtsSecurityBulletinHostTestCases
new file mode 100644
index 0000000..7d50bac
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsProviderTestCases_-_CtsSecurityBulletinHostTestCases
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.all.CtsProviderTestCases_-_CtsSecurityBulletinHostTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsProviderTestCases, CtsProviderTestCases[secondary_user], CtsProviderUiTestCases, CtsQuickAccessWalletTestCases, CtsQuickAccessWalletTestCases[instant], CtsQuickAccessWalletTestCases[secondary_user], CtsRenderscriptLegacyTestCases, CtsRenderscriptLegacyTestCases[secondary_user], CtsRenderscriptTestCases, CtsRenderscriptTestCases[instant], CtsRenderscriptTestCases[secondary_user], CtsResolverServiceTestCases, CtsResolverServiceTestCases[secondary_user], CtsResourcesLoaderTests, CtsResourcesLoaderTests[secondary_user], CtsRoleTestCases, CtsRoleTestCases[secondary_user], CtsRollbackManagerHostTestCases, CtsRsBlasTestCases, CtsRsBlasTestCases[secondary_user], CtsRsCppTestCases, CtsRsCppTestCases[secondary_user], CtsSampleDeviceTestCases, CtsSampleDeviceTestCases[instant], CtsSampleDeviceTestCases[secondary_user], CtsSampleHostTestCases, CtsSampleHostTestCases[secondary_user], CtsSaxTestCases, CtsSaxTestCases[secondary_user], CtsScopedStorageHostTest, CtsScopedStorageHostTest[instant], CtsSdkExtensionsTestCases, CtsSdkExtensionsTestCases[instant], CtsSdkExtensionsTestCases[secondary_user], CtsSeccompHostTestCases, CtsSeccompHostTestCases[instant], CtsSeccompHostTestCases[secondary_user], CtsSecureElementAccessControlTestCases1, CtsSecureElementAccessControlTestCases1[secondary_user], CtsSecureElementAccessControlTestCases2, CtsSecureElementAccessControlTestCases2[secondary_user], CtsSecureElementAccessControlTestCases3, CtsSecureElementAccessControlTestCases3[secondary_user], CtsSecureFrpInstallTestCases, CtsSecureFrpInstallTestCases[secondary_user], CtsSecurityBulletinHostTestCases, CtsSecurityBulletinHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsProviderTestCases_-_CtsSecurityBulletinHostTestCases',
+        test_name='cheets_CTS_R.internal.x86.all.CtsProviderTestCases_-_CtsSecurityBulletinHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsProviderTestCases', '--include-filter', 'CtsProviderTestCases[secondary_user]', '--include-filter', 'CtsProviderUiTestCases', '--include-filter', 'CtsQuickAccessWalletTestCases', '--include-filter', 'CtsQuickAccessWalletTestCases[instant]', '--include-filter', 'CtsQuickAccessWalletTestCases[secondary_user]', '--include-filter', 'CtsRenderscriptLegacyTestCases', '--include-filter', 'CtsRenderscriptLegacyTestCases[secondary_user]', '--include-filter', 'CtsRenderscriptTestCases', '--include-filter', 'CtsRenderscriptTestCases[instant]', '--include-filter', 'CtsRenderscriptTestCases[secondary_user]', '--include-filter', 'CtsResolverServiceTestCases', '--include-filter', 'CtsResolverServiceTestCases[secondary_user]', '--include-filter', 'CtsResourcesLoaderTests', '--include-filter', 'CtsResourcesLoaderTests[secondary_user]', '--include-filter', 'CtsRoleTestCases', '--include-filter', 'CtsRoleTestCases[secondary_user]', '--include-filter', 'CtsRollbackManagerHostTestCases', '--include-filter', 'CtsRsBlasTestCases', '--include-filter', 'CtsRsBlasTestCases[secondary_user]', '--include-filter', 'CtsRsCppTestCases', '--include-filter', 'CtsRsCppTestCases[secondary_user]', '--include-filter', 'CtsSampleDeviceTestCases', '--include-filter', 'CtsSampleDeviceTestCases[instant]', '--include-filter', 'CtsSampleDeviceTestCases[secondary_user]', '--include-filter', 'CtsSampleHostTestCases', '--include-filter', 'CtsSampleHostTestCases[secondary_user]', '--include-filter', 'CtsSaxTestCases', '--include-filter', 'CtsSaxTestCases[secondary_user]', '--include-filter', 'CtsScopedStorageHostTest', '--include-filter', 'CtsScopedStorageHostTest[instant]', '--include-filter', 'CtsSdkExtensionsTestCases', '--include-filter', 'CtsSdkExtensionsTestCases[instant]', '--include-filter', 'CtsSdkExtensionsTestCases[secondary_user]', '--include-filter', 'CtsSeccompHostTestCases', '--include-filter', 'CtsSeccompHostTestCases[instant]', '--include-filter', 'CtsSeccompHostTestCases[secondary_user]', '--include-filter', 'CtsSecureElementAccessControlTestCases1', '--include-filter', 'CtsSecureElementAccessControlTestCases1[secondary_user]', '--include-filter', 'CtsSecureElementAccessControlTestCases2', '--include-filter', 'CtsSecureElementAccessControlTestCases2[secondary_user]', '--include-filter', 'CtsSecureElementAccessControlTestCases3', '--include-filter', 'CtsSecureElementAccessControlTestCases3[secondary_user]', '--include-filter', 'CtsSecureFrpInstallTestCases', '--include-filter', 'CtsSecureFrpInstallTestCases[secondary_user]', '--include-filter', 'CtsSecurityBulletinHostTestCases', '--include-filter', 'CtsSecurityBulletinHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsProviderTestCases_-_CtsSecurityBulletinHostTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        login_precondition_commands=['lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'],
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases
new file mode 100644
index 0000000..0212f34
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSecurityHostTestCases, CtsSecurityHostTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases',
+        test_name='cheets_CTS_R.internal.x86.all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSecurityHostTestCases', '--include-filter', 'CtsSecurityHostTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsSecurityHostTestCases_-_CtsSecurityHostTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases
new file mode 100644
index 0000000..5a7c248
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSecurityTestCases, CtsSecurityTestCases[instant], CtsSecurityTestCases[secondary_user], CtsSelinuxEphemeralTestCases, CtsSelinuxEphemeralTestCases[instant], CtsSelinuxTargetSdk25TestCases, CtsSelinuxTargetSdk25TestCases[secondary_user], CtsSelinuxTargetSdk27TestCases, CtsSelinuxTargetSdk27TestCases[secondary_user], CtsSelinuxTargetSdk28TestCases, CtsSelinuxTargetSdk28TestCases[secondary_user], CtsSelinuxTargetSdk29TestCases, CtsSelinuxTargetSdk29TestCases[secondary_user], CtsSelinuxTargetSdkCurrentTestCases, CtsSelinuxTargetSdkCurrentTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases',
+        test_name='cheets_CTS_R.internal.x86.all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSecurityTestCases', '--include-filter', 'CtsSecurityTestCases[instant]', '--include-filter', 'CtsSecurityTestCases[secondary_user]', '--include-filter', 'CtsSelinuxEphemeralTestCases', '--include-filter', 'CtsSelinuxEphemeralTestCases[instant]', '--include-filter', 'CtsSelinuxTargetSdk25TestCases', '--include-filter', 'CtsSelinuxTargetSdk25TestCases[secondary_user]', '--include-filter', 'CtsSelinuxTargetSdk27TestCases', '--include-filter', 'CtsSelinuxTargetSdk27TestCases[secondary_user]', '--include-filter', 'CtsSelinuxTargetSdk28TestCases', '--include-filter', 'CtsSelinuxTargetSdk28TestCases[secondary_user]', '--include-filter', 'CtsSelinuxTargetSdk29TestCases', '--include-filter', 'CtsSelinuxTargetSdk29TestCases[secondary_user]', '--include-filter', 'CtsSelinuxTargetSdkCurrentTestCases', '--include-filter', 'CtsSelinuxTargetSdkCurrentTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsSecurityTestCases_-_CtsSelinuxTargetSdkCurrentTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsSensorTestCases_-_CtsSensorTestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsSensorTestCases_-_CtsSensorTestCases
new file mode 100644
index 0000000..a1cd80a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsSensorTestCases_-_CtsSensorTestCases
@@ -0,0 +1,49 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+from autotest_lib.server import utils as server_utils
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.all.CtsSensorTestCases_-_CtsSensorTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSensorTestCases, CtsSensorTestCases[instant], CtsSensorTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+# For local debugging, if your test setup doesn't have servo, REMOVE these
+# two lines.
+args_dict = server_utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run_TS(machine):
+    # REMOVE 'servo_args=servo_args' arg for local debugging if your test
+    # setup doesn't have servo.
+    try:
+        host_list = [hosts.create_host(machine, servo_args=servo_args)]
+    except:
+        # Just ignore any servo setup flakiness.
+        host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsSensorTestCases_-_CtsSensorTestCases',
+        test_name='cheets_CTS_R.internal.x86.all.CtsSensorTestCases_-_CtsSensorTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSensorTestCases', '--include-filter', 'CtsSensorTestCases[instant]', '--include-filter', 'CtsSensorTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsSensorTestCases_-_CtsSensorTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        hard_reboot_on_failure=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsSettingsHostTestCases_-_CtsSyncManagerTestsCases b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsSettingsHostTestCases_-_CtsSyncManagerTestsCases
new file mode 100644
index 0000000..4027bd1
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsSettingsHostTestCases_-_CtsSyncManagerTestsCases
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.all.CtsSettingsHostTestCases_-_CtsSyncManagerTestsCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSettingsHostTestCases, CtsSettingsTestCases, CtsSettingsTestCases[secondary_user], CtsSharedLibsApiSignatureTestCases, CtsSharedLibsApiSignatureTestCases[instant], CtsSharedLibsApiSignatureTestCases[secondary_user], CtsSharesheetTestCases, CtsSharesheetTestCases[secondary_user], CtsShortcutHostTestCases, CtsShortcutManagerLauncher1, CtsShortcutManagerLauncher1[secondary_user], CtsShortcutManagerLauncher2, CtsShortcutManagerLauncher2[secondary_user], CtsShortcutManagerLauncher3, CtsShortcutManagerLauncher3[secondary_user], CtsShortcutManagerLauncher4, CtsShortcutManagerLauncher4[secondary_user], CtsShortcutManagerPackage1, CtsShortcutManagerPackage1[secondary_user], CtsShortcutManagerPackage2, CtsShortcutManagerPackage2[secondary_user], CtsShortcutManagerPackage3, CtsShortcutManagerPackage3[secondary_user], CtsShortcutManagerPackage4, CtsShortcutManagerPackage4[secondary_user], CtsShortcutManagerTestCases, CtsShortcutManagerTestCases[secondary_user], CtsShortcutManagerThrottlingTest, CtsShortcutManagerThrottlingTest[secondary_user], CtsSignedConfigHostTestCases, CtsSignedConfigHostTestCases[secondary_user], CtsSimRestrictedApisTestCases, CtsSimRestrictedApisTestCases[secondary_user], CtsSimpleCpuTestCases, CtsSimpleCpuTestCases[secondary_user], CtsSimpleperfTestCases, CtsSkQPTestCases, CtsSkQPTestCases[secondary_user], CtsSliceTestCases, CtsSliceTestCases[secondary_user], CtsSoundTriggerTestCases, CtsSoundTriggerTestCases[instant], CtsSoundTriggerTestCases[secondary_user], CtsSpeechTestCases, CtsSpeechTestCases[secondary_user], CtsStagedInstallHostTestCases, CtsStatsdHostTestCases, CtsStatsdHostTestCases[instant], CtsStatsdHostTestCases[secondary_user], CtsStrictJavaPackagesTestCases, CtsStrictJavaPackagesTestCases[secondary_user], CtsSuspendAppsPermissionTestCases, CtsSuspendAppsPermissionTestCases[secondary_user], CtsSuspendAppsTestCases, CtsSuspendAppsTestCases[secondary_user], CtsSustainedPerformanceHostTestCases, CtsSustainedPerformanceHostTestCases[secondary_user], CtsSyncAccountAccessOtherCertTestCases, CtsSyncAccountAccessOtherCertTestCases[secondary_user], CtsSyncContentHostTestCases, CtsSyncContentHostTestCases[secondary_user], CtsSyncManagerTestsCases, CtsSyncManagerTestsCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsSettingsHostTestCases_-_CtsSyncManagerTestsCases',
+        test_name='cheets_CTS_R.internal.x86.all.CtsSettingsHostTestCases_-_CtsSyncManagerTestsCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSettingsHostTestCases', '--include-filter', 'CtsSettingsTestCases', '--include-filter', 'CtsSettingsTestCases[secondary_user]', '--include-filter', 'CtsSharedLibsApiSignatureTestCases', '--include-filter', 'CtsSharedLibsApiSignatureTestCases[instant]', '--include-filter', 'CtsSharedLibsApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsSharesheetTestCases', '--include-filter', 'CtsSharesheetTestCases[secondary_user]', '--include-filter', 'CtsShortcutHostTestCases', '--include-filter', 'CtsShortcutManagerLauncher1', '--include-filter', 'CtsShortcutManagerLauncher1[secondary_user]', '--include-filter', 'CtsShortcutManagerLauncher2', '--include-filter', 'CtsShortcutManagerLauncher2[secondary_user]', '--include-filter', 'CtsShortcutManagerLauncher3', '--include-filter', 'CtsShortcutManagerLauncher3[secondary_user]', '--include-filter', 'CtsShortcutManagerLauncher4', '--include-filter', 'CtsShortcutManagerLauncher4[secondary_user]', '--include-filter', 'CtsShortcutManagerPackage1', '--include-filter', 'CtsShortcutManagerPackage1[secondary_user]', '--include-filter', 'CtsShortcutManagerPackage2', '--include-filter', 'CtsShortcutManagerPackage2[secondary_user]', '--include-filter', 'CtsShortcutManagerPackage3', '--include-filter', 'CtsShortcutManagerPackage3[secondary_user]', '--include-filter', 'CtsShortcutManagerPackage4', '--include-filter', 'CtsShortcutManagerPackage4[secondary_user]', '--include-filter', 'CtsShortcutManagerTestCases', '--include-filter', 'CtsShortcutManagerTestCases[secondary_user]', '--include-filter', 'CtsShortcutManagerThrottlingTest', '--include-filter', 'CtsShortcutManagerThrottlingTest[secondary_user]', '--include-filter', 'CtsSignedConfigHostTestCases', '--include-filter', 'CtsSignedConfigHostTestCases[secondary_user]', '--include-filter', 'CtsSimRestrictedApisTestCases', '--include-filter', 'CtsSimRestrictedApisTestCases[secondary_user]', '--include-filter', 'CtsSimpleCpuTestCases', '--include-filter', 'CtsSimpleCpuTestCases[secondary_user]', '--include-filter', 'CtsSimpleperfTestCases', '--include-filter', 'CtsSkQPTestCases', '--include-filter', 'CtsSkQPTestCases[secondary_user]', '--include-filter', 'CtsSliceTestCases', '--include-filter', 'CtsSliceTestCases[secondary_user]', '--include-filter', 'CtsSoundTriggerTestCases', '--include-filter', 'CtsSoundTriggerTestCases[instant]', '--include-filter', 'CtsSoundTriggerTestCases[secondary_user]', '--include-filter', 'CtsSpeechTestCases', '--include-filter', 'CtsSpeechTestCases[secondary_user]', '--include-filter', 'CtsStagedInstallHostTestCases', '--include-filter', 'CtsStatsdHostTestCases', '--include-filter', 'CtsStatsdHostTestCases[instant]', '--include-filter', 'CtsStatsdHostTestCases[secondary_user]', '--include-filter', 'CtsStrictJavaPackagesTestCases', '--include-filter', 'CtsStrictJavaPackagesTestCases[secondary_user]', '--include-filter', 'CtsSuspendAppsPermissionTestCases', '--include-filter', 'CtsSuspendAppsPermissionTestCases[secondary_user]', '--include-filter', 'CtsSuspendAppsTestCases', '--include-filter', 'CtsSuspendAppsTestCases[secondary_user]', '--include-filter', 'CtsSustainedPerformanceHostTestCases', '--include-filter', 'CtsSustainedPerformanceHostTestCases[secondary_user]', '--include-filter', 'CtsSyncAccountAccessOtherCertTestCases', '--include-filter', 'CtsSyncAccountAccessOtherCertTestCases[secondary_user]', '--include-filter', 'CtsSyncContentHostTestCases', '--include-filter', 'CtsSyncContentHostTestCases[secondary_user]', '--include-filter', 'CtsSyncManagerTestsCases', '--include-filter', 'CtsSyncManagerTestsCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsSettingsHostTestCases_-_CtsSyncManagerTestsCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        use_old_adb=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsSystemApiAnnotationTestCases_-_CtsViewInspectorAnnotationProcessorTestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsSystemApiAnnotationTestCases_-_CtsViewInspectorAnnotationProcessorTestCases
new file mode 100644
index 0000000..1648783
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsSystemApiAnnotationTestCases_-_CtsViewInspectorAnnotationProcessorTestCases
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.all.CtsSystemApiAnnotationTestCases_-_CtsViewInspectorAnnotationProcessorTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSystemApiAnnotationTestCases, CtsSystemApiAnnotationTestCases[secondary_user], CtsSystemApiSignatureTestCases, CtsSystemApiSignatureTestCases[secondary_user], CtsSystemIntentTestCases, CtsSystemIntentTestCases[secondary_user], CtsSystemUiHostTestCases, CtsSystemUiHostTestCases[instant], CtsSystemUiHostTestCases[secondary_user], CtsSystemUiTestCases, CtsSystemUiTestCases[instant], CtsSystemUiTestCases[secondary_user], CtsTaggingHostTestCases, CtsTaggingHostTestCases[instant], CtsTaggingHostTestCases[secondary_user], CtsTelecomTestCases, CtsTelecomTestCases2, CtsTelecomTestCases2[secondary_user], CtsTelecomTestCases3, CtsTelecomTestCases3[secondary_user], CtsTelecomTestCases[secondary_user], CtsTelephony2TestCases, CtsTelephony2TestCases[instant], CtsTelephony2TestCases[secondary_user], CtsTelephony3TestCases, CtsTelephony3TestCases[secondary_user], CtsTelephonyHostCases, CtsTelephonyHostCases[secondary_user], CtsTelephonyProviderHostCases, CtsTelephonyProviderHostCases[secondary_user], CtsTelephonyProviderTestCases, CtsTelephonyProviderTestCases[secondary_user], CtsTelephonySdk28TestCases, CtsTelephonySdk28TestCases[secondary_user], CtsTelephonyTestCases, CtsTelephonyTestCasesPermissionReadPhoneState, CtsTelephonyTestCasesPermissionReadPhoneState[instant], CtsTelephonyTestCasesPermissionReadPhoneState[secondary_user], CtsTestHarnessModeTestCases, CtsTestHarnessModeTestCases[secondary_user], CtsTetheringTest, CtsTetheringTest[secondary_user], CtsTextClassifierTestCases, CtsTextClassifierTestCases[secondary_user], CtsTextTestCases, CtsTextTestCases[instant], CtsTextTestCases[secondary_user], CtsTfliteNnapiDelegateTestCases, CtsTfliteNnapiDelegateTestCases[secondary_user], CtsThemeDeviceTestCases, CtsThemeDeviceTestCases[secondary_user], CtsThemeHostTestCases, CtsThemeHostTestCases[secondary_user], CtsThermalTestCases, CtsThermalTestCases[secondary_user], CtsToastLegacyTestCases, CtsToastLegacyTestCases[secondary_user], CtsToastTestCases, CtsToastTestCases[instant], CtsToastTestCases[secondary_user], CtsTransitionTestCases, CtsTransitionTestCases[secondary_user], CtsTrustedVoiceHostTestCases, CtsTrustedVoiceHostTestCases[secondary_user], CtsTvProviderTestCases, CtsTvProviderTestCases[secondary_user], CtsTvTestCases, CtsTvTestCases[secondary_user], CtsUiAutomationTestCases, CtsUiAutomationTestCases[instant], CtsUiAutomationTestCases[secondary_user], CtsUiRenderingTestCases, CtsUiRenderingTestCases27, CtsUiRenderingTestCases27[instant], CtsUiRenderingTestCases27[secondary_user], CtsUiRenderingTestCases[instant], CtsUiRenderingTestCases[secondary_user], CtsUidIsolationTestCases, CtsUidIsolationTestCases[instant], CtsUidIsolationTestCases[secondary_user], CtsUsageStatsTestCases, CtsUsageStatsTestCases[instant], CtsUsageStatsTestCases[secondary_user], CtsUsbManagerTestCases, CtsUsbManagerTestCases[secondary_user], CtsUsbTests, CtsUsbTests[instant], CtsUsbTests[secondary_user], CtsUsesLibraryHostTestCases, CtsUsesLibraryHostTestCases[secondary_user], CtsUtilTestCases, CtsUtilTestCases[instant], CtsUtilTestCases[secondary_user], CtsVideoTestCases, CtsVideoTestCases[secondary_user], CtsViewInspectorAnnotationProcessorTestCases, CtsViewInspectorAnnotationProcessorTestCases[instant], CtsViewInspectorAnnotationProcessorTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsSystemApiAnnotationTestCases_-_CtsViewInspectorAnnotationProcessorTestCases',
+        test_name='cheets_CTS_R.internal.x86.all.CtsSystemApiAnnotationTestCases_-_CtsViewInspectorAnnotationProcessorTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSystemApiAnnotationTestCases', '--include-filter', 'CtsSystemApiAnnotationTestCases[secondary_user]', '--include-filter', 'CtsSystemApiSignatureTestCases', '--include-filter', 'CtsSystemApiSignatureTestCases[secondary_user]', '--include-filter', 'CtsSystemIntentTestCases', '--include-filter', 'CtsSystemIntentTestCases[secondary_user]', '--include-filter', 'CtsSystemUiHostTestCases', '--include-filter', 'CtsSystemUiHostTestCases[instant]', '--include-filter', 'CtsSystemUiHostTestCases[secondary_user]', '--include-filter', 'CtsSystemUiTestCases', '--include-filter', 'CtsSystemUiTestCases[instant]', '--include-filter', 'CtsSystemUiTestCases[secondary_user]', '--include-filter', 'CtsTaggingHostTestCases', '--include-filter', 'CtsTaggingHostTestCases[instant]', '--include-filter', 'CtsTaggingHostTestCases[secondary_user]', '--include-filter', 'CtsTelecomTestCases', '--include-filter', 'CtsTelecomTestCases2', '--include-filter', 'CtsTelecomTestCases2[secondary_user]', '--include-filter', 'CtsTelecomTestCases3', '--include-filter', 'CtsTelecomTestCases3[secondary_user]', '--include-filter', 'CtsTelecomTestCases[secondary_user]', '--include-filter', 'CtsTelephony2TestCases', '--include-filter', 'CtsTelephony2TestCases[instant]', '--include-filter', 'CtsTelephony2TestCases[secondary_user]', '--include-filter', 'CtsTelephony3TestCases', '--include-filter', 'CtsTelephony3TestCases[secondary_user]', '--include-filter', 'CtsTelephonyHostCases', '--include-filter', 'CtsTelephonyHostCases[secondary_user]', '--include-filter', 'CtsTelephonyProviderHostCases', '--include-filter', 'CtsTelephonyProviderHostCases[secondary_user]', '--include-filter', 'CtsTelephonyProviderTestCases', '--include-filter', 'CtsTelephonyProviderTestCases[secondary_user]', '--include-filter', 'CtsTelephonySdk28TestCases', '--include-filter', 'CtsTelephonySdk28TestCases[secondary_user]', '--include-filter', 'CtsTelephonyTestCases', '--include-filter', 'CtsTelephonyTestCasesPermissionReadPhoneState', '--include-filter', 'CtsTelephonyTestCasesPermissionReadPhoneState[instant]', '--include-filter', 'CtsTelephonyTestCasesPermissionReadPhoneState[secondary_user]', '--include-filter', 'CtsTestHarnessModeTestCases', '--include-filter', 'CtsTestHarnessModeTestCases[secondary_user]', '--include-filter', 'CtsTetheringTest', '--include-filter', 'CtsTetheringTest[secondary_user]', '--include-filter', 'CtsTextClassifierTestCases', '--include-filter', 'CtsTextClassifierTestCases[secondary_user]', '--include-filter', 'CtsTextTestCases', '--include-filter', 'CtsTextTestCases[instant]', '--include-filter', 'CtsTextTestCases[secondary_user]', '--include-filter', 'CtsTfliteNnapiDelegateTestCases', '--include-filter', 'CtsTfliteNnapiDelegateTestCases[secondary_user]', '--include-filter', 'CtsThemeDeviceTestCases', '--include-filter', 'CtsThemeDeviceTestCases[secondary_user]', '--include-filter', 'CtsThemeHostTestCases', '--include-filter', 'CtsThemeHostTestCases[secondary_user]', '--include-filter', 'CtsThermalTestCases', '--include-filter', 'CtsThermalTestCases[secondary_user]', '--include-filter', 'CtsToastLegacyTestCases', '--include-filter', 'CtsToastLegacyTestCases[secondary_user]', '--include-filter', 'CtsToastTestCases', '--include-filter', 'CtsToastTestCases[instant]', '--include-filter', 'CtsToastTestCases[secondary_user]', '--include-filter', 'CtsTransitionTestCases', '--include-filter', 'CtsTransitionTestCases[secondary_user]', '--include-filter', 'CtsTrustedVoiceHostTestCases', '--include-filter', 'CtsTrustedVoiceHostTestCases[secondary_user]', '--include-filter', 'CtsTvProviderTestCases', '--include-filter', 'CtsTvProviderTestCases[secondary_user]', '--include-filter', 'CtsTvTestCases', '--include-filter', 'CtsTvTestCases[secondary_user]', '--include-filter', 'CtsUiAutomationTestCases', '--include-filter', 'CtsUiAutomationTestCases[instant]', '--include-filter', 'CtsUiAutomationTestCases[secondary_user]', '--include-filter', 'CtsUiRenderingTestCases', '--include-filter', 'CtsUiRenderingTestCases27', '--include-filter', 'CtsUiRenderingTestCases27[instant]', '--include-filter', 'CtsUiRenderingTestCases27[secondary_user]', '--include-filter', 'CtsUiRenderingTestCases[instant]', '--include-filter', 'CtsUiRenderingTestCases[secondary_user]', '--include-filter', 'CtsUidIsolationTestCases', '--include-filter', 'CtsUidIsolationTestCases[instant]', '--include-filter', 'CtsUidIsolationTestCases[secondary_user]', '--include-filter', 'CtsUsageStatsTestCases', '--include-filter', 'CtsUsageStatsTestCases[instant]', '--include-filter', 'CtsUsageStatsTestCases[secondary_user]', '--include-filter', 'CtsUsbManagerTestCases', '--include-filter', 'CtsUsbManagerTestCases[secondary_user]', '--include-filter', 'CtsUsbTests', '--include-filter', 'CtsUsbTests[instant]', '--include-filter', 'CtsUsbTests[secondary_user]', '--include-filter', 'CtsUsesLibraryHostTestCases', '--include-filter', 'CtsUsesLibraryHostTestCases[secondary_user]', '--include-filter', 'CtsUtilTestCases', '--include-filter', 'CtsUtilTestCases[instant]', '--include-filter', 'CtsUtilTestCases[secondary_user]', '--include-filter', 'CtsVideoTestCases', '--include-filter', 'CtsVideoTestCases[secondary_user]', '--include-filter', 'CtsViewInspectorAnnotationProcessorTestCases', '--include-filter', 'CtsViewInspectorAnnotationProcessorTestCases[instant]', '--include-filter', 'CtsViewInspectorAnnotationProcessorTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsSystemApiAnnotationTestCases_-_CtsViewInspectorAnnotationProcessorTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsViewTestCases_-_CtsViewTestCases b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsViewTestCases_-_CtsViewTestCases
new file mode 100644
index 0000000..d05032c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsViewTestCases_-_CtsViewTestCases
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.all.CtsViewTestCases_-_CtsViewTestCases'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsViewTestCases, CtsViewTestCasesSdk28, CtsViewTestCasesSdk28[instant], CtsViewTestCasesSdk28[secondary_user], CtsViewTestCases[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=9,
+        tag='internal.x86.all.CtsViewTestCases_-_CtsViewTestCases',
+        test_name='cheets_CTS_R.internal.x86.all.CtsViewTestCases_-_CtsViewTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsViewTestCases', '--include-filter', 'CtsViewTestCasesSdk28', '--include-filter', 'CtsViewTestCasesSdk28[instant]', '--include-filter', 'CtsViewTestCasesSdk28[secondary_user]', '--include-filter', 'CtsViewTestCases[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsViewTestCases_-_CtsViewTestCases',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsVoiceInteractionTestCases_-_vm-tests-tf b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsVoiceInteractionTestCases_-_vm-tests-tf
new file mode 100644
index 0000000..3047cd0
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.all.CtsVoiceInteractionTestCases_-_vm-tests-tf
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.all.CtsVoiceInteractionTestCases_-_vm-tests-tf'
+ATTRIBUTES = 'suite:arc-cts-qual'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsVoiceInteractionTestCases, CtsVoiceInteractionTestCases[instant], CtsVoiceInteractionTestCases[secondary_user], CtsVoiceSettingsTestCases, CtsVoiceSettingsTestCases[instant], CtsVoiceSettingsTestCases[secondary_user], CtsVrTestCases, CtsVrTestCases[secondary_user], CtsWebkitTestCases, CtsWebkitTestCases[instant], CtsWebkitTestCases[secondary_user], CtsWidgetTestCases, CtsWidgetTestCases29, CtsWidgetTestCases29[instant], CtsWidgetTestCases29[secondary_user], CtsWidgetTestCases[instant], CtsWidgetTestCases[secondary_user], CtsWifiBroadcastsHostTestCases, CtsWifiBroadcastsHostTestCases[instant], CtsWifiBroadcastsHostTestCases[secondary_user], CtsWifiTestCases, CtsWifiTestCases[instant], CtsWifiTestCases[secondary_user], CtsWindowManagerDeviceTestCases, CtsWindowManagerDeviceTestCases[secondary_user], CtsWindowManagerJetpackTestCases, CtsWindowManagerJetpackTestCases[secondary_user], CtsWindowManagerSdk25TestCases, CtsWindowManagerSdk25TestCases[secondary_user], CtsWindowManagerSdk28TestCases, CtsWindowManagerSdk28TestCases[secondary_user], CtsWindowManagerSdk29TestCases, CtsWindowManagerSdk29TestCases[secondary_user], CtsWrapNoWrapTestCases, CtsWrapNoWrapTestCases[secondary_user], CtsWrapWrapDebugMallocDebugTestCases, CtsWrapWrapDebugMallocDebugTestCases[secondary_user], CtsWrapWrapDebugTestCases, CtsWrapWrapDebugTestCases[secondary_user], CtsWrapWrapNoDebugTestCases, CtsWrapWrapNoDebugTestCases[secondary_user], LegacyStorageTest, LegacyStorageTest[instant], ScopedStorageTest, ScopedStorageTest[instant], signed-CtsOmapiTestCases, signed-CtsOmapiTestCases[secondary_user], signed-CtsSecureElementAccessControlTestCases1, signed-CtsSecureElementAccessControlTestCases1[secondary_user], signed-CtsSecureElementAccessControlTestCases2, signed-CtsSecureElementAccessControlTestCases2[secondary_user], signed-CtsSecureElementAccessControlTestCases3, signed-CtsSecureElementAccessControlTestCases3[secondary_user], vm-tests-tf, vm-tests-tf[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=10,
+        tag='internal.x86.all.CtsVoiceInteractionTestCases_-_vm-tests-tf',
+        test_name='cheets_CTS_R.internal.x86.all.CtsVoiceInteractionTestCases_-_vm-tests-tf',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsVoiceInteractionTestCases', '--include-filter', 'CtsVoiceInteractionTestCases[instant]', '--include-filter', 'CtsVoiceInteractionTestCases[secondary_user]', '--include-filter', 'CtsVoiceSettingsTestCases', '--include-filter', 'CtsVoiceSettingsTestCases[instant]', '--include-filter', 'CtsVoiceSettingsTestCases[secondary_user]', '--include-filter', 'CtsVrTestCases', '--include-filter', 'CtsVrTestCases[secondary_user]', '--include-filter', 'CtsWebkitTestCases', '--include-filter', 'CtsWebkitTestCases[instant]', '--include-filter', 'CtsWebkitTestCases[secondary_user]', '--include-filter', 'CtsWidgetTestCases', '--include-filter', 'CtsWidgetTestCases29', '--include-filter', 'CtsWidgetTestCases29[instant]', '--include-filter', 'CtsWidgetTestCases29[secondary_user]', '--include-filter', 'CtsWidgetTestCases[instant]', '--include-filter', 'CtsWidgetTestCases[secondary_user]', '--include-filter', 'CtsWifiBroadcastsHostTestCases', '--include-filter', 'CtsWifiBroadcastsHostTestCases[instant]', '--include-filter', 'CtsWifiBroadcastsHostTestCases[secondary_user]', '--include-filter', 'CtsWifiTestCases', '--include-filter', 'CtsWifiTestCases[instant]', '--include-filter', 'CtsWifiTestCases[secondary_user]', '--include-filter', 'CtsWindowManagerDeviceTestCases', '--include-filter', 'CtsWindowManagerDeviceTestCases[secondary_user]', '--include-filter', 'CtsWindowManagerJetpackTestCases', '--include-filter', 'CtsWindowManagerJetpackTestCases[secondary_user]', '--include-filter', 'CtsWindowManagerSdk25TestCases', '--include-filter', 'CtsWindowManagerSdk25TestCases[secondary_user]', '--include-filter', 'CtsWindowManagerSdk28TestCases', '--include-filter', 'CtsWindowManagerSdk28TestCases[secondary_user]', '--include-filter', 'CtsWindowManagerSdk29TestCases', '--include-filter', 'CtsWindowManagerSdk29TestCases[secondary_user]', '--include-filter', 'CtsWrapNoWrapTestCases', '--include-filter', 'CtsWrapNoWrapTestCases[secondary_user]', '--include-filter', 'CtsWrapWrapDebugMallocDebugTestCases', '--include-filter', 'CtsWrapWrapDebugMallocDebugTestCases[secondary_user]', '--include-filter', 'CtsWrapWrapDebugTestCases', '--include-filter', 'CtsWrapWrapDebugTestCases[secondary_user]', '--include-filter', 'CtsWrapWrapNoDebugTestCases', '--include-filter', 'CtsWrapWrapNoDebugTestCases[secondary_user]', '--include-filter', 'LegacyStorageTest', '--include-filter', 'LegacyStorageTest[instant]', '--include-filter', 'ScopedStorageTest', '--include-filter', 'ScopedStorageTest[instant]', '--include-filter', 'signed-CtsOmapiTestCases', '--include-filter', 'signed-CtsOmapiTestCases[secondary_user]', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases1', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases1[secondary_user]', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases2', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases2[secondary_user]', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases3', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases3[secondary_user]', '--include-filter', 'vm-tests-tf', '--include-filter', 'vm-tests-tf[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='all.CtsVoiceInteractionTestCases_-_vm-tests-tf',
+        target_plan=None,
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=172800)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.signed-Cts b/server/site_tests/cheets_CTS_R/control.internal.x86.signed-Cts
new file mode 100644
index 0000000..f2c8e32
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.signed-Cts
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.signed-Cts'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module signed-CtsOmapiTestCases, signed-CtsOmapiTestCases[secondary_user], signed-CtsSecureElementAccessControlTestCases1, signed-CtsSecureElementAccessControlTestCases1[secondary_user], signed-CtsSecureElementAccessControlTestCases2, signed-CtsSecureElementAccessControlTestCases2[secondary_user], signed-CtsSecureElementAccessControlTestCases3, signed-CtsSecureElementAccessControlTestCases3[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.signed-Cts',
+        test_name='cheets_CTS_R.internal.x86.signed-Cts',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'signed-CtsOmapiTestCases', '--include-filter', 'signed-CtsOmapiTestCases[secondary_user]', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases1', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases1[secondary_user]', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases2', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases2[secondary_user]', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases3', '--include-filter', 'signed-CtsSecureElementAccessControlTestCases3[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='signed-Cts',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=16200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.tradefed-run-collect-tests-only-hardware-internal b/server/site_tests/cheets_CTS_R/control.internal.x86.tradefed-run-collect-tests-only-hardware-internal
new file mode 100644
index 0000000..259f00c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.tradefed-run-collect-tests-only-hardware-internal
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.tradefed-run-collect-tests-only-hardware-internal'
+ATTRIBUTES = 'suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module tradefed-run-collect-tests-only-hardware-internal of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.tradefed-run-collect-tests-only-hardware-internal',
+        test_name='cheets_CTS_R.internal.x86.tradefed-run-collect-tests-only-hardware-internal',
+        run_template=['run', 'commandAndExit', 'collect-tests-only', '--disable-reboot', '--subplan', 'cts-hardware', '--module-arg', 'CtsMediaTestCases:skip-media-download:true', '--module-arg', 'CtsMediaStressTestCases:skip-media-download:true', '--module-arg', 'CtsMediaBitstreamsTestCases:skip-media-download:true'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='tradefed-run-collect-tests-only-hardware-internal',
+        target_plan='cts-hardware',
+        bundle='x86',
+        uri='LATEST',
+        use_jdk9=True,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.vm-tests-tf b/server/site_tests/cheets_CTS_R/control.internal.x86.vm-tests-tf
new file mode 100644
index 0000000..7723e1a
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.vm-tests-tf
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.vm-tests-tf'
+ATTRIBUTES = 'suite:arc-cts, suite:arc-cts-r, suite:arc-cts-unibuild'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module vm-tests-tf, vm-tests-tf[secondary_user] of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='internal.x86.vm-tests-tf',
+        test_name='cheets_CTS_R.internal.x86.vm-tests-tf',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'vm-tests-tf', '--include-filter', 'vm-tests-tf[secondary_user]', '--logcat-on-failure'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='vm-tests-tf',
+        target_plan=None,
+        bundle='x86',
+        uri='DEV',
+        use_jdk9=True,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.internal.x86.waivers b/server/site_tests/cheets_CTS_R/control.internal.x86.waivers
new file mode 100644
index 0000000..fffcfe4
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.internal.x86.waivers
@@ -0,0 +1,42 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is not auto-generated. Don't delete it.
+from autotest_lib.client.common_lib import utils, global_config
+import pipes
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.internal.x86.waivers'
+ATTRIBUTES = 'suite:arc-cts-r, suite:arc-cts-qual, suite:arc-cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run waived tests of the Android Compatibility Test Suite (CTS) using x86 ABI in ARC.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    ssid = utils.get_wireless_ssid(machine['hostname'])
+    wifipass = global_config.global_config.get_config_value('CLIENT',
+                'wireless_password', default=None)
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=10,
+        tag='internal.x86.waivers',
+        test_name='cheets_CTS_R.internal.x86.waivers',
+        run_template=['run', 'commandAndExit', 'cts', '--subplan', 'waivers'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='cts-dev',
+        target_plan='waivers',
+        load_waivers=False,
+        bundle='x86',
+        uri='DEV_WAIVER',
+        use_jdk9=True,
+        timeout=14400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.tradefed-run-test b/server/site_tests/cheets_CTS_R/control.tradefed-run-test
index 31f1556..7d4540a 100644
--- a/server/site_tests/cheets_CTS_R/control.tradefed-run-test
+++ b/server/site_tests/cheets_CTS_R/control.tradefed-run-test
@@ -33,7 +33,7 @@
 cts_retry = 5
 cts_revision = None
 cts_test = ''
-cts_timeout = 600
+cts_timeout = 3600
 
 # Pull parameters either from run_suite or test_that.
 if 'args_dict' in vars():
@@ -64,6 +64,7 @@
        '-linux_x86-' + cts_abi + '.zip') if cts_revision else 'LATEST'
 run_template = ['run', 'commandAndExit', 'cts',
                 '--include-filter', cts_module + ' ' + cts_test,
+                '--skip-device-info',
                 '--logcat-on-failure']
 retry_template = ['run', 'commandAndExit', 'retry',
                   '--retry', '{session_id}']
@@ -86,6 +87,7 @@
 TEST_TYPE = 'server'
 TIME = 'LONG'
 MAX_RESULT_SIZE_KB = 256000
+PY_VERSION = 3
 DOC = 'Run a test of the Android Compatibility Test Suite (CTS) in ARC++'
 
 # And launch.
@@ -95,6 +97,7 @@
         'cheets_CTS_R',
         hosts=host_list,
         iterations=1,
+        enable_default_apps=True,
         max_retry=cts_retry,
         needs_push_media=True,
         tag=tag,
@@ -106,6 +109,11 @@
         bundle=cts_abi,
         use_jdk9=True,
         uri=uri,
+        load_waivers=('#' not in cts_test),  # No waivers for single-test runs
+        retry_manual_tests=('#' in cts_test),  # No waivers for single-test runs
+        login_precondition_commands=[
+            'lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'
+        ],
         timeout=cts_timeout)
 
 parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAbiOverrideHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAbiOverrideHostTestCases
index 025bc39..3d2d201 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAbiOverrideHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAbiOverrideHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAbiOverrideHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAccelerationTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAccelerationTestCases
index 860d718..45d5731 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAccelerationTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAccelerationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAccelerationTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAccessibilityServiceSdk29TestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAccessibilityServiceSdk29TestCases
index bee7abb..d3ff293 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAccessibilityServiceSdk29TestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAccessibilityServiceSdk29TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAccessibilityServiceSdk29TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAccessibilityServiceTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAccessibilityServiceTestCases
index cfe6bfa..45f9b3e 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAccessibilityServiceTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAccessibilityServiceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAccessibilityServiceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAccessibilityTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAccessibilityTestCases
index debece3..5d25deb 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAccessibilityTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAccessibilityTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAccessibilityTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAccountManagerTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAccountManagerTestCases
index 341f581..66660b2 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAccountManagerTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAccountManagerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAccountManagerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAccountsHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAccountsHostTestCases
index ed303a9..2925253 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAccountsHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAccountsHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAccountsHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsActivityManagerBackgroundActivityTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsActivityManagerBackgroundActivityTestCases
index 02152aa..ca9bdc0 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsActivityManagerBackgroundActivityTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsActivityManagerBackgroundActivityTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsActivityManagerBackgroundActivityTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAdbHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAdbHostTestCases
index 4750d1d..a3c9e6d 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAdbHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAdbHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAdbHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAdbManagerHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAdbManagerHostTestCases
index d448389..65e432d 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAdbManagerHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAdbManagerHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAdbManagerHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAdminPackageInstallerTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAdminPackageInstallerTestCases
index 79f2cb4..e89b135 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAdminPackageInstallerTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAdminPackageInstallerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAdminPackageInstallerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAdminTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAdminTestCases
index 49f34eb..88edd8d 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAdminTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAdminTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAdminTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAlarmManagerTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAlarmManagerTestCases
index e3ec814..e329056 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAlarmManagerTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAlarmManagerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAlarmManagerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAndroidAppTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAndroidAppTestCases
index c329b42..ca7a867 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAndroidAppTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAndroidAppTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAndroidAppTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAndroidTestBase28ApiSignatureTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAndroidTestBase28ApiSignatureTestCases
index fe518a8..9f4ee2b 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAndroidTestBase28ApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAndroidTestBase28ApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAndroidTestBase28ApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAndroidTestBaseCurrentApiSignatureTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAndroidTestBaseCurrentApiSignatureTestCases
index 93a4543..7ff8ad0 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAndroidTestBaseCurrentApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAndroidTestBaseCurrentApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAndroidTestBaseCurrentApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAndroidTestMockCurrentApiSignatureTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAndroidTestMockCurrentApiSignatureTestCases
index a120e63..2232086 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAndroidTestMockCurrentApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAndroidTestMockCurrentApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAndroidTestMockCurrentApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAndroidTestRunnerCurrentApiSignatureTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAndroidTestRunnerCurrentApiSignatureTestCases
index 48f9248..6666c83 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAndroidTestRunnerCurrentApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAndroidTestRunnerCurrentApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAndroidTestRunnerCurrentApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAngleIntegrationHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAngleIntegrationHostTestCases
index 7acb664..cfe4d22 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAngleIntegrationHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAngleIntegrationHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAngleIntegrationHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAnimationTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAnimationTestCases
index 2a92923..f6e66cf 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAnimationTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAnimationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAnimationTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsApacheHttpLegacy27ApiSignatureTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsApacheHttpLegacy27ApiSignatureTestCases
index d82b084..728a418 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsApacheHttpLegacy27ApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsApacheHttpLegacy27ApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsApacheHttpLegacy27ApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsApacheHttpLegacyCurrentApiSignatureTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsApacheHttpLegacyCurrentApiSignatureTestCases
index 3b4fe59..de6659d 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsApacheHttpLegacyCurrentApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsApacheHttpLegacyCurrentApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsApacheHttpLegacyCurrentApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases
index ec4f254..6890222 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsApacheHttpLegacyUsesLibraryApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsApexTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsApexTestCases
index 1943150..fcfab04 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsApexTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsApexTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsApexTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAppBindingHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAppBindingHostTestCases
index b600524..8e58459 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAppBindingHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAppBindingHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppBindingHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAppCompatHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAppCompatHostTestCases
index 9c91310..c313158 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAppCompatHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAppCompatHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppCompatHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAppComponentFactoryTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAppComponentFactoryTestCases
index 6148044..935b2b6 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAppComponentFactoryTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAppComponentFactoryTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppComponentFactoryTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAppEnumerationTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAppEnumerationTestCases
index f0e30ee..63ef20a 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAppEnumerationTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAppEnumerationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppEnumerationTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAppExitTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAppExitTestCases
index c60c4f0..b34fa30 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAppExitTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAppExitTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppExitTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAppIntegrityDeviceTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAppIntegrityDeviceTestCases
index f259ce7..850f02b 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAppIntegrityDeviceTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAppIntegrityDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppIntegrityDeviceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAppOpsTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAppOpsTestCases
index b005ce7..65a8c48 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAppOpsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAppOpsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppOpsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAppPredictionServiceTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAppPredictionServiceTestCases
index 4fe1687..dca1f5b 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAppPredictionServiceTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAppPredictionServiceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppPredictionServiceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAppSecurityHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAppSecurityHostTestCases
index 5a2d481..f8d9db2 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAppSecurityHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAppSecurityHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppSecurityHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
@@ -31,6 +32,6 @@
         retry_manual_tests=True,
         use_jdk9=True,
         warn_on_test_retry=False,
-        timeout=3600)
+        timeout=7200)
 
 parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAppTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAppTestCases
index fa4d23d..9ce6773 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAppTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAppTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAppTestCases.feature.ctshardware b/server/site_tests/cheets_CTS_R/control.x86.CtsAppTestCases.feature.ctshardware
new file mode 100644
index 0000000..279965e
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAppTestCases.feature.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.x86.CtsAppTestCases.feature.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsAppTestCases.feature of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='x86.CtsAppTestCases.feature.ctshardware',
+        test_name='cheets_CTS_R.x86.CtsAppTestCases.feature.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsAppTestCases android.app.cts.SystemFeaturesTest'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsAppTestCases.feature',
+        target_plan=None,
+        bundle='x86',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAppUsageHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAppUsageHostTestCases
index e74eaab..4517add 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAppUsageHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAppUsageHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppUsageHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAppWidgetTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAppWidgetTestCases
index acfac31..9a953a7 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAppWidgetTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAppWidgetTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAppWidgetTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAslrMallocTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAslrMallocTestCases
index 8386d78..9bbd22c 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAslrMallocTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAslrMallocTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAslrMallocTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAssistTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAssistTestCases
index 47fa287..d5696cf 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAssistTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAssistTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAssistTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAtomicInstallTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAtomicInstallTestCases
index 3fdc93a..35edb7a 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAtomicInstallTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAtomicInstallTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAtomicInstallTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAtraceHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAtraceHostTestCases
index 0875f1d..cb6e638 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAtraceHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAtraceHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAtraceHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAttentionServiceDeviceTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAttentionServiceDeviceTestCases
index 008f59c..c92171d 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAttentionServiceDeviceTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAttentionServiceDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAttentionServiceDeviceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsAutoFillServiceTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsAutoFillServiceTestCases
index 680c5cc..11e35d8 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsAutoFillServiceTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsAutoFillServiceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsAutoFillServiceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsBackgroundRestrictionsTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsBackgroundRestrictionsTestCases
index 147a7f2..1b8944e 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsBackgroundRestrictionsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsBackgroundRestrictionsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBackgroundRestrictionsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsBackupHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsBackupHostTestCases
index 5b62102..ec3974f 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsBackupHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsBackupHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBackupHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsBackupTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsBackupTestCases
index a5bb713..6de23c1 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsBackupTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsBackupTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBackupTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsBatterySavingTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsBatterySavingTestCases
index 43cd965..50cd256 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsBatterySavingTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsBatterySavingTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBatterySavingTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsBionicAppTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsBionicAppTestCases
index d30bee4..543b151 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsBionicAppTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsBionicAppTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBionicAppTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsBionicTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsBionicTestCases
index 0186982..ac3db10 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsBionicTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsBionicTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBionicTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsBlobStoreHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsBlobStoreHostTestCases
index ff431c0..8c4ef88 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsBlobStoreHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsBlobStoreHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBlobStoreHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsBlobStoreHostTestHelper b/server/site_tests/cheets_CTS_R/control.x86.CtsBlobStoreHostTestHelper
index cda580a..b13f021 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsBlobStoreHostTestHelper
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsBlobStoreHostTestHelper
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBlobStoreHostTestHelper of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsBlobStoreTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsBlobStoreTestCases
index c095bc7..ae498be 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsBlobStoreTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsBlobStoreTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBlobStoreTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsBlobStoreTestHelper b/server/site_tests/cheets_CTS_R/control.x86.CtsBlobStoreTestHelper
index 80c071b..d25b6d1 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsBlobStoreTestHelper
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsBlobStoreTestHelper
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBlobStoreTestHelper of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsBlobStoreTestHelperDiffSig b/server/site_tests/cheets_CTS_R/control.x86.CtsBlobStoreTestHelperDiffSig
index 52629d0..d471be5 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsBlobStoreTestHelperDiffSig
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsBlobStoreTestHelperDiffSig
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBlobStoreTestHelperDiffSig of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsBlobStoreTestHelperDiffSig2 b/server/site_tests/cheets_CTS_R/control.x86.CtsBlobStoreTestHelperDiffSig2
index 0129cb9..16c6d35 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsBlobStoreTestHelperDiffSig2
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsBlobStoreTestHelperDiffSig2
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBlobStoreTestHelperDiffSig2 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsBluetoothTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsBluetoothTestCases
index 4207274..2675ccd 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsBluetoothTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsBluetoothTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBluetoothTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsBootStatsTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsBootStatsTestCases
index 47f0852..ba7567b 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsBootStatsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsBootStatsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsBootStatsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsCalendarProviderTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsCalendarProviderTestCases
index f0b0bbb..4ab3585 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsCalendarProviderTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsCalendarProviderTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCalendarProviderTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsCalendarcommon2TestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsCalendarcommon2TestCases
index 4daf32a..78f7eb0 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsCalendarcommon2TestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsCalendarcommon2TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCalendarcommon2TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsCameraApi25TestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsCameraApi25TestCases
index 7fc17d0..2a3305d 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsCameraApi25TestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsCameraApi25TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCameraApi25TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsCameraTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsCameraTestCases
index 8725320..8ef7ab6 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsCameraTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsCameraTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'LONG'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 70
 DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
@@ -31,6 +32,6 @@
         retry_manual_tests=True,
         use_jdk9=True,
         warn_on_test_retry=False,
-        timeout=3600)
+        timeout=5400)
 
 parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsCameraTestCases.NativeCameraDeviceTest b/server/site_tests/cheets_CTS_R/control.x86.CtsCameraTestCases.NativeCameraDeviceTest
new file mode 100644
index 0000000..4d470f9
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsCameraTestCases.NativeCameraDeviceTest
@@ -0,0 +1,38 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.x86.CtsCameraTestCases.NativeCameraDeviceTest'
+ATTRIBUTES = 'suite:cts'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
+DOC = 'Run module CtsCameraTestCases.NativeCameraDeviceTest of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='x86.CtsCameraTestCases.NativeCameraDeviceTest',
+        test_name='cheets_CTS_R.x86.CtsCameraTestCases.NativeCameraDeviceTest',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCameraTestCases android.hardware.camera2.cts.NativeCameraDeviceTest', '--include-filter', 'CtsCameraTestCases[instant] android.hardware.camera2.cts.NativeCameraDeviceTest', '--include-filter', 'CtsCameraTestCases android.hardware.camera2.cts.RecordingTest#testVideoPreviewSurfaceSharing[1]', '--include-filter', 'CtsCameraTestCases[instant] android.hardware.camera2.cts.RecordingTest#testVideoPreviewSurfaceSharing[1]'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCameraTestCases',
+        target_plan=None,
+        bundle='x86',
+        precondition_commands=['sleep 20', 'android-sh -c \'am start -a android.intent.action.VIEW -d https://webglsamples.org/electricflower/electricflower.html\''],
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsCameraTestCases.NativeCameraDeviceTest.ctshardware b/server/site_tests/cheets_CTS_R/control.x86.CtsCameraTestCases.NativeCameraDeviceTest.ctshardware
new file mode 100644
index 0000000..afa0268
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsCameraTestCases.NativeCameraDeviceTest.ctshardware
@@ -0,0 +1,38 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.x86.CtsCameraTestCases.NativeCameraDeviceTest.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
+DOC = 'Run module CtsCameraTestCases.NativeCameraDeviceTest of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='x86.CtsCameraTestCases.NativeCameraDeviceTest.ctshardware',
+        test_name='cheets_CTS_R.x86.CtsCameraTestCases.NativeCameraDeviceTest.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCameraTestCases android.hardware.camera2.cts.NativeCameraDeviceTest', '--include-filter', 'CtsCameraTestCases android.hardware.camera2.cts.RecordingTest#testVideoPreviewSurfaceSharing[1]'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCameraTestCases',
+        target_plan=None,
+        bundle='x86',
+        precondition_commands=['sleep 20', 'android-sh -c \'am start -a android.intent.action.VIEW -d https://webglsamples.org/electricflower/electricflower.html\''],
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsCameraTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.x86.CtsCameraTestCases.ctshardware
new file mode 100644
index 0000000..30be6aa
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsCameraTestCases.ctshardware
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.x86.CtsCameraTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86, lighting'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 70
+DOC = 'Run module CtsCameraTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='x86.CtsCameraTestCases.ctshardware',
+        test_name='cheets_CTS_R.x86.CtsCameraTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsCameraTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsCameraTestCases',
+        target_plan=None,
+        bundle='x86',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=5400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsCarHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsCarHostTestCases
index 5b63924..a3fec44 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsCarHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsCarHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCarHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsCarTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsCarTestCases
index 1e32878..7cabe33 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsCarTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsCarTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCarTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsCarrierApiTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsCarrierApiTestCases
index 29e3f84..4e41d8d 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsCarrierApiTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsCarrierApiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCarrierApiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases
index b886285..1bac7f4 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsClassLoaderFactoryInMemoryDexClassLoaderTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsClassLoaderFactoryPathClassLoaderTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsClassLoaderFactoryPathClassLoaderTestCases
index a67f49b..a827ed8 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsClassLoaderFactoryPathClassLoaderTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsClassLoaderFactoryPathClassLoaderTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsClassLoaderFactoryPathClassLoaderTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsClassloaderSplitsHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsClassloaderSplitsHostTestCases
index aef6eb4..9e3b4a9 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsClassloaderSplitsHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsClassloaderSplitsHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsClassloaderSplitsHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsCodePathHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsCodePathHostTestCases
index 116edd8..7e85ba9 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsCodePathHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsCodePathHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCodePathHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsColorModeTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsColorModeTestCases
index fe329fd..0872305 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsColorModeTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsColorModeTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsColorModeTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsCompilationTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsCompilationTestCases
index ac0bd84..4ebd8fb 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsCompilationTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsCompilationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCompilationTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsContactsProviderTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsContactsProviderTestCases
index 604d8ab..3e4ba97 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsContactsProviderTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsContactsProviderTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsContactsProviderTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsContactsProviderWipe b/server/site_tests/cheets_CTS_R/control.x86.CtsContactsProviderWipe
index 823c54f..7e9837a 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsContactsProviderWipe
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsContactsProviderWipe
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsContactsProviderWipe of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsContentCaptureServiceTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsContentCaptureServiceTestCases
index 3d6e18d..f28671c 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsContentCaptureServiceTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsContentCaptureServiceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsContentCaptureServiceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsContentSuggestionsTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsContentSuggestionsTestCases
index 82f6f17..7a7c376 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsContentSuggestionsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsContentSuggestionsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsContentSuggestionsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsContentTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsContentTestCases
index 17e201f..c367fe9 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsContentTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsContentTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsContentTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsControlsDeviceTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsControlsDeviceTestCases
index 5602170..abf06fc 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsControlsDeviceTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsControlsDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsControlsDeviceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsCppToolsTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsCppToolsTestCases
index 5096562..a9dcfef 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsCppToolsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsCppToolsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCppToolsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsCurrentApiSignatureTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsCurrentApiSignatureTestCases
index 4f714c7..0998329 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsCurrentApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsCurrentApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsCurrentApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsDatabaseTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsDatabaseTestCases
index dccd5ea..1231b16 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsDatabaseTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsDatabaseTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDatabaseTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsDeqpTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsDeqpTestCases
deleted file mode 100644
index 2c0e47b..0000000
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsDeqpTestCases
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.x86.CtsDeqpTestCases'
-ATTRIBUTES = 'suite:cts'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 1024000
-PRIORITY = 70
-DOC = 'Run module CtsDeqpTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='x86.CtsDeqpTestCases',
-        test_name='cheets_CTS_R.x86.CtsDeqpTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDeqpTestCases'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsDeqpTestCases',
-        target_plan=None,
-        bundle='x86',
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=108000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsDeqpTestCases.32 b/server/site_tests/cheets_CTS_R/control.x86.CtsDeqpTestCases.32
new file mode 100644
index 0000000..c06fa11
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsDeqpTestCases.32
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.x86.CtsDeqpTestCases.32'
+ATTRIBUTES = 'suite:cts'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
+PRIORITY = 70
+DOC = 'Run module CtsDeqpTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='x86.CtsDeqpTestCases.32',
+        test_name='cheets_CTS_R.x86.CtsDeqpTestCases.32',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDeqpTestCases', '--abi', 'x86'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='x86',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=108000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsDeqpTestCases.64 b/server/site_tests/cheets_CTS_R/control.x86.CtsDeqpTestCases.64
new file mode 100644
index 0000000..1fc5ccd
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsDeqpTestCases.64
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.x86.CtsDeqpTestCases.64'
+ATTRIBUTES = 'suite:cts'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
+PRIORITY = 70
+DOC = 'Run module CtsDeqpTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='x86.CtsDeqpTestCases.64',
+        test_name='cheets_CTS_R.x86.CtsDeqpTestCases.64',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsDeqpTestCases', '--abi', 'x86_64'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDeqpTestCases',
+        target_plan=None,
+        bundle='x86',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=108000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware b/server/site_tests/cheets_CTS_R/control.x86.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware
new file mode 100644
index 0000000..4164a29
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.x86.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
+DOC = 'Run module CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='x86.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware',
+        test_name='cheets_CTS_R.x86.CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsDeqpTestCases dEQP-GLES3.functional.prerequisite#*'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsDeqpTestCases.dEQP-GLES3.functional.prerequisite',
+        target_plan=None,
+        bundle='x86',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsDeviceConfigTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsDeviceConfigTestCases
index 552ded4..5455077 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsDeviceConfigTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsDeviceConfigTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDeviceConfigTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsDeviceIdleHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsDeviceIdleHostTestCases
index 727fe33..c579cda 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsDeviceIdleHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsDeviceIdleHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDeviceIdleHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsDevicePolicyManagerTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsDevicePolicyManagerTestCases
index 3b361f0..1d5e948 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsDevicePolicyManagerTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsDevicePolicyManagerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDevicePolicyManagerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsDexMetadataHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsDexMetadataHostTestCases
index de76a85..4d38738 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsDexMetadataHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsDexMetadataHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDexMetadataHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsDisplayTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsDisplayTestCases
index e03f926..9a790fe 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsDisplayTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsDisplayTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDisplayTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsDownloadManagerApi28 b/server/site_tests/cheets_CTS_R/control.x86.CtsDownloadManagerApi28
index 4f5dcc6..27456f2 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsDownloadManagerApi28
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsDownloadManagerApi28
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDownloadManagerApi28 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsDownloadManagerInstaller b/server/site_tests/cheets_CTS_R/control.x86.CtsDownloadManagerInstaller
index f85e33f..e317098 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsDownloadManagerInstaller
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsDownloadManagerInstaller
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDownloadManagerInstaller of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsDpiTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsDpiTestCases
index 51b30e1..2b6eb95 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsDpiTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsDpiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDpiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsDpiTestCases2 b/server/site_tests/cheets_CTS_R/control.x86.CtsDpiTestCases2
index 630ff9d..344a339 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsDpiTestCases2
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsDpiTestCases2
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDpiTestCases2 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsDreamsTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsDreamsTestCases
index e371fc2..001ac50 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsDreamsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsDreamsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDreamsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsDrmTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsDrmTestCases
index c422abc..8d29542 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsDrmTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsDrmTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDrmTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsDropBoxManagerTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsDropBoxManagerTestCases
index 8ee0f1b..6939c30 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsDropBoxManagerTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsDropBoxManagerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDropBoxManagerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsDumpsysHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsDumpsysHostTestCases
index 3700d53..3dd3f94 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsDumpsysHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsDumpsysHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDumpsysHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsDynamicLinkerTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsDynamicLinkerTestCases
index 9da2b8a..32887ea 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsDynamicLinkerTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsDynamicLinkerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDynamicLinkerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsDynamicMimeHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsDynamicMimeHostTestCases
index e9f20d4..43c31ed 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsDynamicMimeHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsDynamicMimeHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsDynamicMimeHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsEdiHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsEdiHostTestCases
index d26df96..f774381 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsEdiHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsEdiHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsEdiHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsEffectTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsEffectTestCases
index b541305..1d37667 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsEffectTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsEffectTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsEffectTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsExtendedMockingTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsExtendedMockingTestCases
index b072954..c4b2f37 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsExtendedMockingTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsExtendedMockingTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsExtendedMockingTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsExternalServiceTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsExternalServiceTestCases
index fdec6c7..ca0ee5a 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsExternalServiceTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsExternalServiceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsExternalServiceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsExtractNativeLibsHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsExtractNativeLibsHostTestCases
index e154b6e..8a075cd 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsExtractNativeLibsHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsExtractNativeLibsHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsExtractNativeLibsHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsFileSystemTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsFileSystemTestCases
index f9e38d5..ab32e70 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsFileSystemTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsFileSystemTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsFileSystemTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsFragmentTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsFragmentTestCases
index a0b2cd2..e58cae4 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsFragmentTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsFragmentTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsFragmentTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsFragmentTestCasesSdk26 b/server/site_tests/cheets_CTS_R/control.x86.CtsFragmentTestCasesSdk26
index 9315872..c46debf 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsFragmentTestCasesSdk26
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsFragmentTestCasesSdk26
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsFragmentTestCasesSdk26 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsFsMgrTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsFsMgrTestCases
index 4131674..6819c2c 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsFsMgrTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsFsMgrTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsFsMgrTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsGestureTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsGestureTestCases
index 992f861..8017a6f 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsGestureTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsGestureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsGestureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsGpuProfilingDataTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsGpuProfilingDataTestCases
index 41bdbb7..5621cfa 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsGpuProfilingDataTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsGpuProfilingDataTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsGpuProfilingDataTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsGpuToolsHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsGpuToolsHostTestCases
index 89c9b5d..dbc944b 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsGpuToolsHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsGpuToolsHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsGpuToolsHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsGraphicsTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsGraphicsTestCases
index dee9d66..98c91b1 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsGraphicsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsGraphicsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsGraphicsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsGwpAsanTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsGwpAsanTestCases
index 9e3152d..f0a317c 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsGwpAsanTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsGwpAsanTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsGwpAsanTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsHardwareTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsHardwareTestCases
index cad3b9e..0899bc2 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsHardwareTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsHardwareTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHardwareTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
@@ -20,6 +21,7 @@
         'cheets_CTS_R',
         hosts=host_list,
         iterations=1,
+        max_retry=30,
         tag='x86.CtsHardwareTestCases',
         test_name='cheets_CTS_R.x86.CtsHardwareTestCases',
         run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsHardwareTestCases'],
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsHarmfulAppWarningHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsHarmfulAppWarningHostTestCases
index 5363f84..0d5330c 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsHarmfulAppWarningHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsHarmfulAppWarningHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHarmfulAppWarningHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsHdmiCecHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsHdmiCecHostTestCases
index 1652286..c4b00cc 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsHdmiCecHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsHdmiCecHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHdmiCecHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiBlacklistApi27TestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiBlacklistApi27TestCases
index bee2d3f..4a9073c 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiBlacklistApi27TestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiBlacklistApi27TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHiddenApiBlacklistApi27TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiBlacklistApi28TestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiBlacklistApi28TestCases
index 8c352e2..1682e78 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiBlacklistApi28TestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiBlacklistApi28TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHiddenApiBlacklistApi28TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiBlacklistCurrentApiTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiBlacklistCurrentApiTestCases
index 3c4bbb8..3745fb7 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiBlacklistCurrentApiTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiBlacklistCurrentApiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHiddenApiBlacklistCurrentApiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiBlacklistDebugClassTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiBlacklistDebugClassTestCases
index b82b432..0c6afaf 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiBlacklistDebugClassTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiBlacklistDebugClassTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHiddenApiBlacklistDebugClassTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiBlacklistTestApiTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiBlacklistTestApiTestCases
index 1110d9c..43850e7 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiBlacklistTestApiTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiBlacklistTestApiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHiddenApiBlacklistTestApiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiKillswitchDebugClassTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiKillswitchDebugClassTestCases
index 63df9f8..df4e650 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiKillswitchDebugClassTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiKillswitchDebugClassTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHiddenApiKillswitchDebugClassTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiKillswitchWhitelistTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiKillswitchWhitelistTestCases
index bca40d9..08f6342 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiKillswitchWhitelistTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiKillswitchWhitelistTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHiddenApiKillswitchWhitelistTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiKillswitchWildcardTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiKillswitchWildcardTestCases
index f42ad13..c6ab171 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiKillswitchWildcardTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsHiddenApiKillswitchWildcardTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHiddenApiKillswitchWildcardTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsHostTzDataTests b/server/site_tests/cheets_CTS_R/control.x86.CtsHostTzDataTests
index f9e7500..9f80288 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsHostTzDataTests
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsHostTzDataTests
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHostTzDataTests of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsHostsideNetworkTests b/server/site_tests/cheets_CTS_R/control.x86.CtsHostsideNetworkTests
index 03874b6..e27f7e5 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsHostsideNetworkTests
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsHostsideNetworkTests
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsHostsideNetworkTests of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
@@ -28,7 +29,7 @@
         target_module='CtsHostsideNetworkTests',
         target_plan=None,
         bundle='x86',
-        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), '/usr/local/autotest/cros/scripts/reorder-services-moblab.sh wifi'],
+        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), 'android-sh -c \'dumpsys wifi transports -eth\''],
         retry_manual_tests=True,
         use_jdk9=True,
         warn_on_test_retry=False,
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsHostsideNumberBlockingTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsHostsideNumberBlockingTestCases
index 482f689..ff7c264 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsHostsideNumberBlockingTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsHostsideNumberBlockingTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHostsideNumberBlockingTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsHostsideTvTests b/server/site_tests/cheets_CTS_R/control.x86.CtsHostsideTvTests
index ecf3583..6e3fdbd 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsHostsideTvTests
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsHostsideTvTests
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHostsideTvTests of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsHostsideWebViewTests b/server/site_tests/cheets_CTS_R/control.x86.CtsHostsideWebViewTests
index 5cf0265..af7d6a6 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsHostsideWebViewTests
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsHostsideWebViewTests
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsHostsideWebViewTests of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsIcuTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsIcuTestCases
index c351a17..69d831f 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsIcuTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsIcuTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsIcuTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsIdentityTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsIdentityTestCases
index 5869db1..96074d4 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsIdentityTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsIdentityTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsIdentityTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsIkeTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsIkeTestCases
index 4c8f0a8..c0636ad 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsIkeTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsIkeTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsIkeTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsIncidentHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsIncidentHostTestCases
index e82c696..2127f09 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsIncidentHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsIncidentHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsIncidentHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
@@ -20,6 +21,7 @@
         'cheets_CTS_R',
         hosts=host_list,
         iterations=1,
+        max_retry=10,
         tag='x86.CtsIncidentHostTestCases',
         test_name='cheets_CTS_R.x86.CtsIncidentHostTestCases',
         run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsIncidentHostTestCases'],
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsIncrementalInstallHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsIncrementalInstallHostTestCases
index efadd26..c38fd55 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsIncrementalInstallHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsIncrementalInstallHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsIncrementalInstallHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsInitTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsInitTestCases
index 403f60e..495d759 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsInitTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsInitTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsInitTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsInlineMockingTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsInlineMockingTestCases
index 522208c..a222fc2 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsInlineMockingTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsInlineMockingTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsInlineMockingTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsInputMethodServiceHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsInputMethodServiceHostTestCases
index 1e1835e..0b25132 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsInputMethodServiceHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsInputMethodServiceHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsInputMethodServiceHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsInputMethodTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsInputMethodTestCases
index b62935a..4bc9588 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsInputMethodTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsInputMethodTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsInputMethodTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsInstantAppTests b/server/site_tests/cheets_CTS_R/control.x86.CtsInstantAppTests
index 7aec8b6..3828a02 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsInstantAppTests
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsInstantAppTests
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsInstantAppTests of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsIntentSignatureTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsIntentSignatureTestCases
index 56af702..791a95f 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsIntentSignatureTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsIntentSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsIntentSignatureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJdwpSecurityHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJdwpSecurityHostTestCases
index 7bf2d83..cb318ac 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJdwpSecurityHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJdwpSecurityHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJdwpSecurityHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJdwpTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJdwpTestCases
index e62434f..75b7c6d 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJdwpTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJdwpTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJdwpTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJdwpTunnelHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJdwpTunnelHostTestCases
index fe00435..be338ef7 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJdwpTunnelHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJdwpTunnelHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJdwpTunnelHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJniTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJniTestCases
index 7072406..e46e164 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJniTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJniTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJniTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJobSchedulerSharedUidTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJobSchedulerSharedUidTestCases
index 6abd029..4afde61 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJobSchedulerSharedUidTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJobSchedulerSharedUidTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJobSchedulerSharedUidTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJobSchedulerTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJobSchedulerTestCases
index b4aa099..8804fb9 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJobSchedulerTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJobSchedulerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsJobSchedulerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
@@ -28,7 +29,7 @@
         target_module='CtsJobSchedulerTestCases',
         target_plan=None,
         bundle='x86',
-        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), '/usr/local/autotest/cros/scripts/reorder-services-moblab.sh wifi'],
+        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), 'android-sh -c \'dumpsys wifi transports -eth\''],
         retry_manual_tests=True,
         use_jdk9=True,
         warn_on_test_retry=False,
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiAttachingHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiAttachingHostTestCases
index 919bdd2..1bdb1f9 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiAttachingHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiAttachingHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiAttachingHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiAttachingTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiAttachingTestCases
index 2866e28..44da759 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiAttachingTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiAttachingTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiAttachingTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRedefineClassesHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRedefineClassesHostTestCases
index 69eec92..c2dc3f5 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRedefineClassesHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRedefineClassesHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRedefineClassesHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1900HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1900HostTestCases
index 2ce79e4..e0d199e 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1900HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1900HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1900HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1901HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1901HostTestCases
index ccea630..0cf4259 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1901HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1901HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1901HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1902HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1902HostTestCases
index 6288031..9daa589 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1902HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1902HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1902HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1903HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1903HostTestCases
index 8438f0c..8cd0ade 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1903HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1903HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1903HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1904HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1904HostTestCases
index 253e3c7..8c708c8 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1904HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1904HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1904HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1906HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1906HostTestCases
index fe8ba21..8395239a 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1906HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1906HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1906HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1907HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1907HostTestCases
index b2ce68c..2a49e24 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1907HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1907HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1907HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1908HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1908HostTestCases
index 09969cc..0ea19e3 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1908HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1908HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1908HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1909HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1909HostTestCases
index 46e8917..df2bc53 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1909HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1909HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1909HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1910HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1910HostTestCases
index ea5c50f..1daac86 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1910HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1910HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1910HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1911HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1911HostTestCases
index 4b4af14..843a2ae 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1911HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1911HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1911HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1912HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1912HostTestCases
index a0ed2da..a7481cc 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1912HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1912HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1912HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1913HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1913HostTestCases
index 2f22ff7..6bf9220 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1913HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1913HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1913HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1914HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1914HostTestCases
index 44e83b2..3f85271 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1914HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1914HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1914HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1915HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1915HostTestCases
index 28c6a45..8399cde 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1915HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1915HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1915HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1916HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1916HostTestCases
index 169abf5..31800c9 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1916HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1916HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1916HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1917HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1917HostTestCases
index 291ec2b..cf65626 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1917HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1917HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1917HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1920HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1920HostTestCases
index 484235a..7b0fa39 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1920HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1920HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1920HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1921HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1921HostTestCases
index 502ddeb..972ad1c 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1921HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1921HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1921HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1922HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1922HostTestCases
index 5427f21..94a0537 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1922HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1922HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1922HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1923HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1923HostTestCases
index 31508e0..1018e90 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1923HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1923HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1923HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1924HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1924HostTestCases
index 988a4c1..722a424 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1924HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1924HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1924HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1925HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1925HostTestCases
index 2e63639..dfd21dd 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1925HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1925HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1925HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1926HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1926HostTestCases
index c19e9a6..41d8afa 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1926HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1926HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1926HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1927HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1927HostTestCases
index d3dee83..16debd9 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1927HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1927HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1927HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1928HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1928HostTestCases
index a5eae7f..182168d 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1928HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1928HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1928HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1930HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1930HostTestCases
index 50972a1..c8659f7 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1930HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1930HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1930HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1931HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1931HostTestCases
index cdf1e5f..82849b3 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1931HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1931HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1931HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1932HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1932HostTestCases
index c0906c2..a9327eb 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1932HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1932HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1932HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1933HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1933HostTestCases
index 0157afd..8998b3c 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1933HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1933HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1933HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1934HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1934HostTestCases
index 2d65644..df941c0 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1934HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1934HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1934HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1936HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1936HostTestCases
index 9a1104f..94b8050 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1936HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1936HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1936HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1937HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1937HostTestCases
index bb33803..a87c13b 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1937HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1937HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1937HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1939HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1939HostTestCases
index fada16a..ffa3f07 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1939HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1939HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1939HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1941HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1941HostTestCases
index fbc78de..076c08e 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1941HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1941HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1941HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1942HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1942HostTestCases
index 5daa29f..ba12b3d 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1942HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1942HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1942HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1943HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1943HostTestCases
index 1beb8a9..9396a20 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1943HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1943HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1943HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1953HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1953HostTestCases
index 76b3ac0..2527cda 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1953HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1953HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1953HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1958HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1958HostTestCases
index fedbdfe..7cfd532 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1958HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1958HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1958HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1962HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1962HostTestCases
index 8d25d15..7eeec50 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1962HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1962HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1962HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1967HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1967HostTestCases
index f0bbb6a..53fc8d9 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1967HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1967HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1967HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1968HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1968HostTestCases
index d944532..63c24ae 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1968HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1968HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1968HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1969HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1969HostTestCases
index 13f5e1a..39431a0 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1969HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1969HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1969HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1970HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1970HostTestCases
index d5fcf4d..f105e72 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1970HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1970HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1970HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1971HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1971HostTestCases
index 9db7541..f3b75b4 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1971HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1971HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1971HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1974HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1974HostTestCases
index d63804e..842196b 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1974HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1974HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1974HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1975HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1975HostTestCases
index 7706d1a..9d97fdc 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1975HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1975HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1975HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1976HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1976HostTestCases
index fc65458..09555ce 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1976HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1976HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1976HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1977HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1977HostTestCases
index 4b448a4..8919ea1 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1977HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1977HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1977HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1978HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1978HostTestCases
index c97b343..bac1a8f 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1978HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1978HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1978HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1979HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1979HostTestCases
index 41543c5..e792346 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1979HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1979HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1979HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1981HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1981HostTestCases
index 98057d4..76290fd 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1981HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1981HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1981HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1982HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1982HostTestCases
index 7a64c05..747cf75 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1982HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1982HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1982HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1983HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1983HostTestCases
index b47acaf..79cbb2c 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1983HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1983HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1983HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1984HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1984HostTestCases
index 61f92b8..d5034ba 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1984HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1984HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1984HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1988HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1988HostTestCases
index 03c08c4..4f26351 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1988HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1988HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1988HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1989HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1989HostTestCases
index 786e735..f212e9f 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1989HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1989HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1989HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1990HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1990HostTestCases
index e9afded..4552e5e 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1990HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1990HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1990HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1991HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1991HostTestCases
index 9aab8b0..e887c16 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1991HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1991HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1991HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1992HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1992HostTestCases
index 582c18e..76fa97e 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1992HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1992HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1992HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1994HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1994HostTestCases
index b110e6d..ad77545 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1994HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1994HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1994HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1995HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1995HostTestCases
index 75842bf..b28e96a 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1995HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1995HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1995HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1996HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1996HostTestCases
index 60c4d47..bca6773 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1996HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1996HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1996HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1997HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1997HostTestCases
index 337e865..bd6c6d1 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1997HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1997HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1997HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1998HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1998HostTestCases
index 5cb5a58..2d4d6ba 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1998HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1998HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1998HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1999HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1999HostTestCases
index 8ebbce7..fa765e9 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1999HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest1999HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest1999HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest2001HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest2001HostTestCases
index 871c1f9..5ed8371 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest2001HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest2001HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest2001HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest2002HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest2002HostTestCases
index e3ffc21..98cd6c2 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest2002HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest2002HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest2002HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest2003HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest2003HostTestCases
index 238ea97..b811104 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest2003HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest2003HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest2003HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest2004HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest2004HostTestCases
index d77dcf9..2972126 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest2004HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest2004HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest2004HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest2005HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest2005HostTestCases
index 16236bf..57f7b20 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest2005HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest2005HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest2005HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest2006HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest2006HostTestCases
index 76edced..83ff5a7 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest2006HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest2006HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest2006HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest2007HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest2007HostTestCases
index b52f8d3..9184bc7 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest2007HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest2007HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest2007HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest902HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest902HostTestCases
index c6fc610..7f9c4ec 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest902HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest902HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest902HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest903HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest903HostTestCases
index a28b3d6..f3986a3 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest903HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest903HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest903HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest904HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest904HostTestCases
index 1b72222..517e23e 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest904HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest904HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest904HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest905HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest905HostTestCases
index 7185b73..f587455 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest905HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest905HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest905HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest906HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest906HostTestCases
index 2a15342..c72bf02 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest906HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest906HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest906HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest907HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest907HostTestCases
index 20dafb2..a631beb 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest907HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest907HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest907HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest908HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest908HostTestCases
index 6e63245..52c794d 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest908HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest908HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest908HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest910HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest910HostTestCases
index 6c9a33c..1b5b765 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest910HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest910HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest910HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest911HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest911HostTestCases
index 321567a..03f53ee 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest911HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest911HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest911HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest912HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest912HostTestCases
index 3fac3c3..099126b 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest912HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest912HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest912HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest913HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest913HostTestCases
index cea8449..ab80abe 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest913HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest913HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest913HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest914HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest914HostTestCases
index 48eaf59..520bf6e 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest914HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest914HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest914HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest915HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest915HostTestCases
index fd30e01..ec11b8b 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest915HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest915HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest915HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest917HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest917HostTestCases
index d2eafaf..8735437 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest917HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest917HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest917HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest918HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest918HostTestCases
index 114ea9b..3eb7965 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest918HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest918HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest918HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest919HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest919HostTestCases
index 6749a03..07acfa7 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest919HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest919HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest919HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest920HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest920HostTestCases
index b57fe53..de1e6ed 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest920HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest920HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest920HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest922HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest922HostTestCases
index 0fdce44..c66015e 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest922HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest922HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest922HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest923HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest923HostTestCases
index 1cc7ae1..8e972ac 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest923HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest923HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest923HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest924HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest924HostTestCases
index 22d95b9..e1cebdf 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest924HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest924HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest924HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest926HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest926HostTestCases
index 93f1c78..9535e6e 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest926HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest926HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest926HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest927HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest927HostTestCases
index 1e0d632..cabe337 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest927HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest927HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest927HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest928HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest928HostTestCases
index ab384ea..1bed039 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest928HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest928HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest928HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest930HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest930HostTestCases
index f324659..0573294 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest930HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest930HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest930HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest931HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest931HostTestCases
index 24f819e..d72ae5f 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest931HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest931HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest931HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest932HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest932HostTestCases
index 4e07275..7db663b 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest932HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest932HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest932HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest940HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest940HostTestCases
index a309ee0..574afae 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest940HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest940HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest940HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest942HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest942HostTestCases
index 4a79b87..d977885 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest942HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest942HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest942HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest944HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest944HostTestCases
index a2c4e9b..6467e94 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest944HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest944HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest944HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest945HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest945HostTestCases
index b3073af..a491889 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest945HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest945HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest945HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest947HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest947HostTestCases
index 70d8fef..7cb5cc4 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest947HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest947HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest947HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest951HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest951HostTestCases
index 23e0c66..04b864d 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest951HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest951HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest951HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest982HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest982HostTestCases
index 23a95a0..ad0b246 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest982HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest982HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest982HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest983HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest983HostTestCases
index 08c5993..03d7840 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest983HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest983HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest983HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest984HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest984HostTestCases
index 993ae7f..2b02437 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest984HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest984HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest984HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest985HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest985HostTestCases
index f3f3061..19e2a82 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest985HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest985HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest985HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest986HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest986HostTestCases
index 7e9bd88..b22b9a1 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest986HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest986HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest986HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest988HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest988HostTestCases
index 7b19384..651a8a5 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest988HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest988HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest988HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest989HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest989HostTestCases
index 8eeeef5..ffb5fd8 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest989HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest989HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest989HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest990HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest990HostTestCases
index c789647..5412a51 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest990HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest990HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest990HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest991HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest991HostTestCases
index 251fdcb..c2e1adc 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest991HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest991HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest991HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest992HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest992HostTestCases
index c062ca8..7327abe 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest992HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest992HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest992HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest993HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest993HostTestCases
index 24473d0..82a88c6 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest993HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest993HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest993HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest994HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest994HostTestCases
index 92f21e1..dea7d21 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest994HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest994HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest994HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest995HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest995HostTestCases
index 1072457..916ba5f 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest995HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest995HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest995HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest996HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest996HostTestCases
index e8db52d..179f6a7 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest996HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest996HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest996HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest997HostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest997HostTestCases
index d9d9f7b..40bb3a1 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest997HostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiRunTest997HostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiRunTest997HostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiTaggingHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiTaggingHostTestCases
index 8008070..6aa8264 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiTaggingHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiTaggingHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiTaggingHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiTrackingHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiTrackingHostTestCases
index 373c5e9..397801d 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiTrackingHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsJvmtiTrackingHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsJvmtiTrackingHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsKernelConfigTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsKernelConfigTestCases
index d5170c7..6178840 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsKernelConfigTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsKernelConfigTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsKernelConfigTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsKeystoreTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsKeystoreTestCases
index 44893f2..db1a700 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsKeystoreTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsKeystoreTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsKeystoreTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
@@ -30,6 +31,6 @@
         retry_manual_tests=True,
         use_jdk9=True,
         warn_on_test_retry=False,
-        timeout=3600)
+        timeout=7200)
 
 parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsLeanbackJankTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsLeanbackJankTestCases
index 31e5af6..39aee91 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsLeanbackJankTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsLeanbackJankTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLeanbackJankTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsLegacyNotification20TestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsLegacyNotification20TestCases
index 34cd103..76e0726 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsLegacyNotification20TestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsLegacyNotification20TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLegacyNotification20TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsLegacyNotification27TestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsLegacyNotification27TestCases
index 39957e2..11172fb 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsLegacyNotification27TestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsLegacyNotification27TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLegacyNotification27TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsLegacyNotification28TestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsLegacyNotification28TestCases
index 94ad027..235be78 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsLegacyNotification28TestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsLegacyNotification28TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLegacyNotification28TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsLegacyNotification29TestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsLegacyNotification29TestCases
index 921350f..32aaa7e 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsLegacyNotification29TestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsLegacyNotification29TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLegacyNotification29TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreApiEvolutionTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreApiEvolutionTestCases
index ac7cafe..5f26f0d 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreApiEvolutionTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreApiEvolutionTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreApiEvolutionTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreFileIOTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreFileIOTestCases
index 40f5e55..76e746c 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreFileIOTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreFileIOTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreFileIOTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreJsr166TestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreJsr166TestCases
index d19a58e..a75d6a3 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreJsr166TestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreJsr166TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreJsr166TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreLegacy22TestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreLegacy22TestCases
index 678b2c3..fb2ef63 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreLegacy22TestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreLegacy22TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreLegacy22TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreOjTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreOjTestCases
index 44a32c4..b9e3ad0 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreOjTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreOjTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreOjTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreOkHttpTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreOkHttpTestCases
index 4c5febc..c4514b6 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreOkHttpTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreOkHttpTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreOkHttpTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreTestCases
index 72ffd41..6152eed 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsLibcoreTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
@@ -28,7 +29,7 @@
         target_module='CtsLibcoreTestCases',
         target_plan=None,
         bundle='x86',
-        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), '/usr/local/autotest/cros/scripts/reorder-services-moblab.sh wifi'],
+        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), 'android-sh -c \'dumpsys wifi transports -eth\''],
         retry_manual_tests=True,
         use_jdk9=True,
         warn_on_test_retry=False,
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreWycheproofBCTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreWycheproofBCTestCases
index cf55f38..8af2868 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreWycheproofBCTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreWycheproofBCTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreWycheproofBCTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreWycheproofConscryptTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreWycheproofConscryptTestCases
index bba38fb..3c30ac4 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreWycheproofConscryptTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsLibcoreWycheproofConscryptTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLibcoreWycheproofConscryptTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsLiblogTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsLiblogTestCases
index e4af1bd..2bf71fa 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsLiblogTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsLiblogTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLiblogTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsLocationCoarseTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsLocationCoarseTestCases
index ab7e578..a9a79a3 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsLocationCoarseTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsLocationCoarseTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLocationCoarseTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsLocationFineTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsLocationFineTestCases
index c044e85..107f82f 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsLocationFineTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsLocationFineTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLocationFineTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsLocationGnssTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsLocationGnssTestCases
index a5e40a0..03318e9 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsLocationGnssTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsLocationGnssTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLocationGnssTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsLocationNoneTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsLocationNoneTestCases
index 5ee92d0..9c85cb3 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsLocationNoneTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsLocationNoneTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLocationNoneTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsLocationPrivilegedTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsLocationPrivilegedTestCases
index 75dae4a..6055f48 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsLocationPrivilegedTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsLocationPrivilegedTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLocationPrivilegedTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsLogdTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsLogdTestCases
index 0861c59..81e041f 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsLogdTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsLogdTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsLogdTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsMatchFlagTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsMatchFlagTestCases
index 59faeb6..b6e0357 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsMatchFlagTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsMatchFlagTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMatchFlagTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsMediaBitstreamsTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsMediaBitstreamsTestCases
index 2c32bb1..5b8e7a3 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsMediaBitstreamsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsMediaBitstreamsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMediaBitstreamsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsMediaHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsMediaHostTestCases
index 533ab97..63eb579 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsMediaHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsMediaHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMediaHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsMediaParserTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsMediaParserTestCases
index 6411b08..5ee9695 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsMediaParserTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsMediaParserTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMediaParserTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsMediaPerformanceClassTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsMediaPerformanceClassTestCases
new file mode 100644
index 0000000..daddff2
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsMediaPerformanceClassTestCases
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.x86.CtsMediaPerformanceClassTestCases'
+ATTRIBUTES = 'suite:cts'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsMediaPerformanceClassTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='x86.CtsMediaPerformanceClassTestCases',
+        test_name='cheets_CTS_R.x86.CtsMediaPerformanceClassTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsMediaPerformanceClassTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaPerformanceClassTestCases',
+        target_plan=None,
+        bundle='x86',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsMediaStressTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsMediaStressTestCases
index e6de871..413a803 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsMediaStressTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsMediaStressTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'LONG'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 70
 DOC = 'Run module CtsMediaStressTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsMediaStressTestCases.camera.ctshardware b/server/site_tests/cheets_CTS_R/control.x86.CtsMediaStressTestCases.camera.ctshardware
new file mode 100644
index 0000000..f40485c
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsMediaStressTestCases.camera.ctshardware
@@ -0,0 +1,38 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.x86.CtsMediaStressTestCases.camera.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
+DOC = 'Run module CtsMediaStressTestCases.camera of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='x86.CtsMediaStressTestCases.camera.ctshardware',
+        test_name='cheets_CTS_R.x86.CtsMediaStressTestCases.camera.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaStressTestCases android.mediastress.cts.MediaRecorderStressTest'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaStressTestCases.camera',
+        target_plan=None,
+        bundle='x86',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsMediaTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsMediaTestCases
index 906479d..5d24ad4 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsMediaTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsMediaTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'LONG'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 70
 DOC = 'Run module CtsMediaTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsMediaTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.x86.CtsMediaTestCases.ctshardware
new file mode 100644
index 0000000..fa375bf
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsMediaTestCases.ctshardware
@@ -0,0 +1,38 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.x86.CtsMediaTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86, noloopback'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 70
+DOC = 'Run module CtsMediaTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        needs_push_media=True,
+        tag='x86.CtsMediaTestCases.ctshardware',
+        test_name='cheets_CTS_R.x86.CtsMediaTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsMediaTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsMediaTestCases',
+        target_plan=None,
+        bundle='x86',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=36000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsMediaV2TestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsMediaV2TestCases
index f877532..de40cb6 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsMediaV2TestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsMediaV2TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMediaV2TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsMidiTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsMidiTestCases
index 443a9e8..a57ff5a 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsMidiTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsMidiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMidiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsMimeMapTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsMimeMapTestCases
index 9de0afa..c42f8c0 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsMimeMapTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsMimeMapTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMimeMapTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsMockingDebuggableTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsMockingDebuggableTestCases
index f8184d8..bec14f4 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsMockingDebuggableTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsMockingDebuggableTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMockingDebuggableTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsMockingTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsMockingTestCases
index cb4cc7b..9cdcf6b 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsMockingTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsMockingTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMockingTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsMonkeyTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsMonkeyTestCases
index 7e70428..c783fd5 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsMonkeyTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsMonkeyTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMonkeyTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsMultiUserHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsMultiUserHostTestCases
index 8def576..92ef1aa 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsMultiUserHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsMultiUserHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMultiUserHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsMultiUserTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsMultiUserTestCases
index b563355..927a88a 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsMultiUserTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsMultiUserTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsMultiUserTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNNAPIBenchmarkTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNNAPIBenchmarkTestCases
index 371f08b..8b36239 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNNAPIBenchmarkTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNNAPIBenchmarkTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNNAPIBenchmarkTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNNAPITestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNNAPITestCases
index 3f066ec..9b2443a 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNNAPITestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNNAPITestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNNAPITestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNativeEncryptionTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNativeEncryptionTestCases
index e8ffb85..28ef6a3 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNativeEncryptionTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNativeEncryptionTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNativeEncryptionTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNativeHardwareTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNativeHardwareTestCases
index c445856..e76075b 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNativeHardwareTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNativeHardwareTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNativeHardwareTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNativeMediaAAudioTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNativeMediaAAudioTestCases
index c4f18a7..692f369 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNativeMediaAAudioTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNativeMediaAAudioTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNativeMediaAAudioTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNativeMediaAAudioTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.x86.CtsNativeMediaAAudioTestCases.ctshardware
new file mode 100644
index 0000000..e0e4829
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNativeMediaAAudioTestCases.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.x86.CtsNativeMediaAAudioTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsNativeMediaAAudioTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='x86.CtsNativeMediaAAudioTestCases.ctshardware',
+        test_name='cheets_CTS_R.x86.CtsNativeMediaAAudioTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNativeMediaAAudioTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsNativeMediaAAudioTestCases',
+        target_plan=None,
+        bundle='x86',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNativeMediaMetricsTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNativeMediaMetricsTestCases
index e52b5e1..d2a7d15 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNativeMediaMetricsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNativeMediaMetricsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNativeMediaMetricsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNativeMediaSlTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNativeMediaSlTestCases
index 77ca559..461de83 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNativeMediaSlTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNativeMediaSlTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNativeMediaSlTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNativeMediaXaTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNativeMediaXaTestCases
index dcceb2a..7eb7320 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNativeMediaXaTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNativeMediaXaTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNativeMediaXaTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNativeMidiTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNativeMidiTestCases
index 3818beb..7e0e9ea 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNativeMidiTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNativeMidiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNativeMidiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNativeNetDnsTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNativeNetDnsTestCases
index ab33a29..ef2e9c0 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNativeNetDnsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNativeNetDnsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNativeNetDnsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNativeNetTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNativeNetTestCases
index 6f12691..5bfeb25 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNativeNetTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNativeNetTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNativeNetTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNdefTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNdefTestCases
index 90e14fa..9d6bcf7 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNdefTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNdefTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNdefTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNdkBinderTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNdkBinderTestCases
index 1df35df..8d44386 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNdkBinderTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNdkBinderTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNdkBinderTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNetApi23TestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNetApi23TestCases
index b4dbfc8..ed9dedf 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNetApi23TestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNetApi23TestCases
@@ -12,6 +12,8 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
 DOC = 'Run module CtsNetApi23TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
@@ -27,6 +29,7 @@
         target_module='CtsNetApi23TestCases',
         target_plan=None,
         bundle='x86',
+        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), 'android-sh -c \'dumpsys wifi transports -eth\''],
         retry_manual_tests=True,
         use_jdk9=True,
         warn_on_test_retry=False,
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigAttributeTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigAttributeTestCases
index 8238bee..ff79629 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigAttributeTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigAttributeTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigAttributeTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigBasicDebugDisabledTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigBasicDebugDisabledTestCases
index 3a81bfb..1bceb90 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigBasicDebugDisabledTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigBasicDebugDisabledTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigBasicDebugDisabledTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigBasicDebugEnabledTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigBasicDebugEnabledTestCases
index de902bc..eae4848 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigBasicDebugEnabledTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigBasicDebugEnabledTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigBasicDebugEnabledTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigBasicDomainConfigTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigBasicDomainConfigTestCases
index 1ff4bf5..0e5eaf4 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigBasicDomainConfigTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigBasicDomainConfigTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigBasicDomainConfigTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigCleartextTrafficTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigCleartextTrafficTestCases
index 5f2a614..e8c1a38 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigCleartextTrafficTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigCleartextTrafficTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigCleartextTrafficTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigDownloadManagerTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigDownloadManagerTestCases
index 6d9ac2f..117bccb 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigDownloadManagerTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigDownloadManagerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigDownloadManagerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigInvalidPinTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigInvalidPinTestCases
index e5f53c4..9eeda3c 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigInvalidPinTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigInvalidPinTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigInvalidPinTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigNestedDomainConfigTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigNestedDomainConfigTestCases
index dba930f..c43b73a 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigNestedDomainConfigTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigNestedDomainConfigTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigNestedDomainConfigTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigPrePCleartextTrafficTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigPrePCleartextTrafficTestCases
index e6a9c87..02fd61c 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigPrePCleartextTrafficTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigPrePCleartextTrafficTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigPrePCleartextTrafficTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigResourcesSrcTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigResourcesSrcTestCases
index 08d8a45..09cc5a6 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigResourcesSrcTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecConfigResourcesSrcTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecConfigResourcesSrcTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecPolicyUsesCleartextTrafficFalseTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecPolicyUsesCleartextTrafficFalseTestCases
index 72412bd..f082861 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecPolicyUsesCleartextTrafficFalseTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecPolicyUsesCleartextTrafficFalseTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecPolicyUsesCleartextTrafficFalseTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecPolicyUsesCleartextTrafficTrueTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecPolicyUsesCleartextTrafficTrueTestCases
index 0f1b512..e328724 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecPolicyUsesCleartextTrafficTrueTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecPolicyUsesCleartextTrafficTrueTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecPolicyUsesCleartextTrafficTrueTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases
index 88c5d89..0dde326 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetSecPolicyUsesCleartextTrafficUnspecifiedTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNetTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNetTestCases
index defc971..7d865a6 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNetTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNetTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsNetTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
@@ -28,7 +29,7 @@
         target_module='CtsNetTestCases',
         target_plan=None,
         bundle='x86',
-        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), '/usr/local/autotest/cros/scripts/reorder-services-moblab.sh wifi'],
+        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), 'android-sh -c \'dumpsys wifi transports -eth\''],
         retry_manual_tests=True,
         use_jdk9=True,
         warn_on_test_retry=False,
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNetTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.x86.CtsNetTestCases.ctshardware
new file mode 100644
index 0000000..c3c807d
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNetTestCases.ctshardware
@@ -0,0 +1,38 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.x86.CtsNetTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
+DOC = 'Run module CtsNetTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='x86.CtsNetTestCases.ctshardware',
+        test_name='cheets_CTS_R.x86.CtsNetTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsNetTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsNetTestCases',
+        target_plan=None,
+        bundle='x86',
+        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), 'android-sh -c \'dumpsys wifi transports -eth\''],
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNetTestCasesInternetPermission b/server/site_tests/cheets_CTS_R/control.x86.CtsNetTestCasesInternetPermission
index 4799fca..d282a1c 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNetTestCasesInternetPermission
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNetTestCasesInternetPermission
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetTestCasesInternetPermission of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNetTestCasesLegacyApi22 b/server/site_tests/cheets_CTS_R/control.x86.CtsNetTestCasesLegacyApi22
index e67b39c..db710e4 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNetTestCasesLegacyApi22
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNetTestCasesLegacyApi22
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetTestCasesLegacyApi22 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNetTestCasesLegacyPermission22 b/server/site_tests/cheets_CTS_R/control.x86.CtsNetTestCasesLegacyPermission22
index a29c11f..9074dd7 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNetTestCasesLegacyPermission22
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNetTestCasesLegacyPermission22
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetTestCasesLegacyPermission22 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNetTestCasesUpdateStatsPermission b/server/site_tests/cheets_CTS_R/control.x86.CtsNetTestCasesUpdateStatsPermission
index 0e03bdb..785087c 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNetTestCasesUpdateStatsPermission
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNetTestCasesUpdateStatsPermission
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNetTestCasesUpdateStatsPermission of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNfcTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNfcTestCases
index 1db1c4c..948fadf 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNfcTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNfcTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNfcTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNoPermissionTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsNoPermissionTestCases
index 7d4dc8b..a746b0e 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNoPermissionTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNoPermissionTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNoPermissionTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsNoPermissionTestCases25 b/server/site_tests/cheets_CTS_R/control.x86.CtsNoPermissionTestCases25
index 2d8ea5a..e05388b 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsNoPermissionTestCases25
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsNoPermissionTestCases25
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsNoPermissionTestCases25 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsOmapiTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsOmapiTestCases
index 3587bf3..a1326d7 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsOmapiTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsOmapiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsOmapiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsOpenGLTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsOpenGLTestCases
index a290654..a64c5d0 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsOpenGLTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsOpenGLTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsOpenGLTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsOpenGlPerf2TestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsOpenGlPerf2TestCases
index 7896eeb..837d6f7 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsOpenGlPerf2TestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsOpenGlPerf2TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsOpenGlPerf2TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsOpenGlPerfTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsOpenGlPerfTestCases
index 1413ddf..aaf20cb 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsOpenGlPerfTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsOpenGlPerfTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsOpenGlPerfTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsOsHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsOsHostTestCases
index c5c4b3e..b76f8b6 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsOsHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsOsHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsOsHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsOsTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsOsTestCases
index cc6d0d6..4503131 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsOsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsOsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsOsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsPackageInstallAppOpDefaultTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsPackageInstallAppOpDefaultTestCases
index 5d88688..813e217 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsPackageInstallAppOpDefaultTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsPackageInstallAppOpDefaultTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPackageInstallAppOpDefaultTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsPackageInstallAppOpDeniedTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsPackageInstallAppOpDeniedTestCases
index 463f766..9a302353 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsPackageInstallAppOpDeniedTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsPackageInstallAppOpDeniedTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPackageInstallAppOpDeniedTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsPackageInstallTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsPackageInstallTestCases
index b64c4cb..23e8a2d 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsPackageInstallTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsPackageInstallTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPackageInstallTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsPackageInstallerTapjackingTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsPackageInstallerTapjackingTestCases
index c551c91..24576fb 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsPackageInstallerTapjackingTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsPackageInstallerTapjackingTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPackageInstallerTapjackingTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsPackageUninstallTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsPackageUninstallTestCases
index 766697c..b61ea1b 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsPackageUninstallTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsPackageUninstallTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPackageUninstallTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsPackageWatchdogTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsPackageWatchdogTestCases
index cacb5f5..1c211c2 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsPackageWatchdogTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsPackageWatchdogTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPackageWatchdogTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsPdfTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsPdfTestCases
index 1d8b01a..73f132c 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsPdfTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsPdfTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPdfTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsPerfettoTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsPerfettoTestCases
index 88e70e3..94e9d83 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsPerfettoTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsPerfettoTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPerfettoTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsPerfettoTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.x86.CtsPerfettoTestCases.ctshardware
new file mode 100644
index 0000000..e74c9fb
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsPerfettoTestCases.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.x86.CtsPerfettoTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPerfettoTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='x86.CtsPerfettoTestCases.ctshardware',
+        test_name='cheets_CTS_R.x86.CtsPerfettoTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPerfettoTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsPerfettoTestCases',
+        target_plan=None,
+        bundle='x86',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsPermission2TestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsPermission2TestCases
index 1bed521..2883988 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsPermission2TestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsPermission2TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPermission2TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsPermission3TestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsPermission3TestCases
index 333a26b..232487d 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsPermission3TestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsPermission3TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPermission3TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsPermissionTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsPermissionTestCases
index 4063bad..9cc88ac 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsPermissionTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsPermissionTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPermissionTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsPermissionTestCases.camera.ctshardware b/server/site_tests/cheets_CTS_R/control.x86.CtsPermissionTestCases.camera.ctshardware
new file mode 100644
index 0000000..de758b8
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsPermissionTestCases.camera.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.x86.CtsPermissionTestCases.camera.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsPermissionTestCases.camera of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='x86.CtsPermissionTestCases.camera.ctshardware',
+        test_name='cheets_CTS_R.x86.CtsPermissionTestCases.camera.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsPermissionTestCases android.permission.cts.CameraPermissionTest', '--include-filter', 'CtsPermissionTestCases android.permission.cts.Camera2PermissionTest'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsPermissionTestCases.camera',
+        target_plan=None,
+        bundle='x86',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsPermissionTestCasesSdk28 b/server/site_tests/cheets_CTS_R/control.x86.CtsPermissionTestCasesSdk28
index 4e2a92f..5cf6a06 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsPermissionTestCasesSdk28
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsPermissionTestCasesSdk28
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPermissionTestCasesSdk28 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsPermissionTestCasesTelephony b/server/site_tests/cheets_CTS_R/control.x86.CtsPermissionTestCasesTelephony
index b5083d5..2e683aa 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsPermissionTestCasesTelephony
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsPermissionTestCasesTelephony
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPermissionTestCasesTelephony of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsPreferenceTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsPreferenceTestCases
index 0a1db05..21d94e4 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsPreferenceTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsPreferenceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPreferenceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsPrintTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsPrintTestCases
index 54e9f92..d66efc6 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsPrintTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsPrintTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsPrintTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
@@ -20,6 +21,7 @@
         'cheets_CTS_R',
         hosts=host_list,
         iterations=1,
+        use_helpers=True,
         tag='x86.CtsPrintTestCases',
         test_name='cheets_CTS_R.x86.CtsPrintTestCases',
         run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsPrintTestCases'],
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsProtoTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsProtoTestCases
index 5465072..87add72 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsProtoTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsProtoTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsProtoTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsProviderTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsProviderTestCases
index 7ddf04f..5df8f8c 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsProviderTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsProviderTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsProviderTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsProviderUiTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsProviderUiTestCases
index a558895..12af5d5 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsProviderUiTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsProviderUiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsProviderUiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsQuickAccessWalletTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsQuickAccessWalletTestCases
index 380f8da..ca1397d 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsQuickAccessWalletTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsQuickAccessWalletTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsQuickAccessWalletTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsRenderscriptLegacyTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsRenderscriptLegacyTestCases
index 3e57946..2178217 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsRenderscriptLegacyTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsRenderscriptLegacyTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsRenderscriptLegacyTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsRenderscriptTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsRenderscriptTestCases
index d9146cc..a4f3a65 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsRenderscriptTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsRenderscriptTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsRenderscriptTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsResolverServiceTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsResolverServiceTestCases
index 745f7b0..cf1a636 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsResolverServiceTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsResolverServiceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsResolverServiceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsResourcesLoaderTests b/server/site_tests/cheets_CTS_R/control.x86.CtsResourcesLoaderTests
index 623772e..8742ad8 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsResourcesLoaderTests
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsResourcesLoaderTests
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsResourcesLoaderTests of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsRoleTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsRoleTestCases
index 3031933..0deeba0 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsRoleTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsRoleTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsRoleTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsRollbackManagerHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsRollbackManagerHostTestCases
index ea7e44c..5e89965 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsRollbackManagerHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsRollbackManagerHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsRollbackManagerHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsRsBlasTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsRsBlasTestCases
index e17893a..e24a8b8 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsRsBlasTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsRsBlasTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsRsBlasTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsRsCppTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsRsCppTestCases
index 1280986..4601cc5 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsRsCppTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsRsCppTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsRsCppTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSampleDeviceTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSampleDeviceTestCases
index 3bc2c99..bd368ed 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSampleDeviceTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSampleDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSampleDeviceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSampleHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSampleHostTestCases
index 6f6cc8a..487bfc8 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSampleHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSampleHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSampleHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSaxTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSaxTestCases
index 8b91976..a15d481 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSaxTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSaxTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSaxTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsScopedStorageHostTest b/server/site_tests/cheets_CTS_R/control.x86.CtsScopedStorageHostTest
index c124443..f48408e 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsScopedStorageHostTest
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsScopedStorageHostTest
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsScopedStorageHostTest of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSdkExtensionsTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSdkExtensionsTestCases
index 39f7e0d..2adefff 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSdkExtensionsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSdkExtensionsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSdkExtensionsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSeccompHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSeccompHostTestCases
index 66e569d..ebc41c3 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSeccompHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSeccompHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSeccompHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSecureElementAccessControlTestCases1 b/server/site_tests/cheets_CTS_R/control.x86.CtsSecureElementAccessControlTestCases1
index dedfead..42636db 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSecureElementAccessControlTestCases1
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSecureElementAccessControlTestCases1
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSecureElementAccessControlTestCases1 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSecureElementAccessControlTestCases2 b/server/site_tests/cheets_CTS_R/control.x86.CtsSecureElementAccessControlTestCases2
index e395a76..7128a07 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSecureElementAccessControlTestCases2
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSecureElementAccessControlTestCases2
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSecureElementAccessControlTestCases2 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSecureElementAccessControlTestCases3 b/server/site_tests/cheets_CTS_R/control.x86.CtsSecureElementAccessControlTestCases3
index 5659a58..53f10d0 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSecureElementAccessControlTestCases3
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSecureElementAccessControlTestCases3
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSecureElementAccessControlTestCases3 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSecureFrpInstallTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSecureFrpInstallTestCases
index 6ac64f0..80a3351 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSecureFrpInstallTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSecureFrpInstallTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSecureFrpInstallTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSecurityBulletinHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSecurityBulletinHostTestCases
index 376e0de..a4b5ca1 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSecurityBulletinHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSecurityBulletinHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSecurityBulletinHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSecurityHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSecurityHostTestCases
index a9cefbd..756e81e 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSecurityHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSecurityHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSecurityHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSecurityTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSecurityTestCases
index a58f011..906d4d6 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSecurityTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSecurityTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'LONG'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 70
 DOC = 'Run module CtsSecurityTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSelinuxEphemeralTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSelinuxEphemeralTestCases
index e3a5c08..c978f11 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSelinuxEphemeralTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSelinuxEphemeralTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSelinuxEphemeralTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSelinuxTargetSdk25TestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSelinuxTargetSdk25TestCases
index d2fbc42..431a774 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSelinuxTargetSdk25TestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSelinuxTargetSdk25TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSelinuxTargetSdk25TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSelinuxTargetSdk27TestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSelinuxTargetSdk27TestCases
index da85afe..fdb6331 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSelinuxTargetSdk27TestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSelinuxTargetSdk27TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSelinuxTargetSdk27TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSelinuxTargetSdk28TestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSelinuxTargetSdk28TestCases
index 6f9aa87..c4dd06c 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSelinuxTargetSdk28TestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSelinuxTargetSdk28TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSelinuxTargetSdk28TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSelinuxTargetSdk29TestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSelinuxTargetSdk29TestCases
index 5223e33..6214399 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSelinuxTargetSdk29TestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSelinuxTargetSdk29TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSelinuxTargetSdk29TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSelinuxTargetSdkCurrentTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSelinuxTargetSdkCurrentTestCases
index 64a8844..9c24cea 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSelinuxTargetSdkCurrentTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSelinuxTargetSdkCurrentTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSelinuxTargetSdkCurrentTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSensorTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSensorTestCases
index b89b5ef..a08ffb9 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSensorTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSensorTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSensorTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSensorTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.x86.CtsSensorTestCases.ctshardware
new file mode 100644
index 0000000..6f8ec34
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSensorTestCases.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.x86.CtsSensorTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSensorTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='x86.CtsSensorTestCases.ctshardware',
+        test_name='cheets_CTS_R.x86.CtsSensorTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSensorTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSensorTestCases',
+        target_plan=None,
+        bundle='x86',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSettingsHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSettingsHostTestCases
index 7996d9b..23c0001 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSettingsHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSettingsHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSettingsHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSettingsTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSettingsTestCases
new file mode 100644
index 0000000..b90c7c3
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSettingsTestCases
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.x86.CtsSettingsTestCases'
+ATTRIBUTES = 'suite:cts'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSettingsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='x86.CtsSettingsTestCases',
+        test_name='cheets_CTS_R.x86.CtsSettingsTestCases',
+        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsSettingsTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSettingsTestCases',
+        target_plan=None,
+        bundle='x86',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSharedLibsApiSignatureTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSharedLibsApiSignatureTestCases
index 7366ac4..8e492e9 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSharedLibsApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSharedLibsApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSharedLibsApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSharesheetTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSharesheetTestCases
index 98b65ea..f0f0b8f 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSharesheetTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSharesheetTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSharesheetTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutHostTestCases
index 9b9c335..5df9dc8 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsShortcutHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerLauncher1 b/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerLauncher1
index 082d0a3..4a7c2d1 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerLauncher1
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerLauncher1
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsShortcutManagerLauncher1 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerLauncher2 b/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerLauncher2
index 2bd19f0..2605ba7 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerLauncher2
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerLauncher2
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsShortcutManagerLauncher2 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerLauncher3 b/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerLauncher3
index 5935b12..91a162a 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerLauncher3
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerLauncher3
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsShortcutManagerLauncher3 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerLauncher4 b/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerLauncher4
index a427d9c..432d120 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerLauncher4
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerLauncher4
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsShortcutManagerLauncher4 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerPackage1 b/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerPackage1
index 61b6e9b..226a87a 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerPackage1
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerPackage1
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsShortcutManagerPackage1 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerPackage2 b/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerPackage2
index 49d325d..f523a30 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerPackage2
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerPackage2
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsShortcutManagerPackage2 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerPackage3 b/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerPackage3
index 37be526..c8d5833 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerPackage3
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerPackage3
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsShortcutManagerPackage3 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerPackage4 b/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerPackage4
index efbefd7..8c3aa38 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerPackage4
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerPackage4
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsShortcutManagerPackage4 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerTestCases
index 745c525..ab96e2f 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsShortcutManagerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerThrottlingTest b/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerThrottlingTest
index c1c86d0..16665c1 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerThrottlingTest
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsShortcutManagerThrottlingTest
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsShortcutManagerThrottlingTest of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSignedConfigHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSignedConfigHostTestCases
index eeaa479..dd0448f 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSignedConfigHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSignedConfigHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSignedConfigHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSimRestrictedApisTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSimRestrictedApisTestCases
index e1521a6..f68ae2d 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSimRestrictedApisTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSimRestrictedApisTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSimRestrictedApisTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSimpleCpuTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSimpleCpuTestCases
index a01133e..8666766 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSimpleCpuTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSimpleCpuTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSimpleCpuTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSimpleperfTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSimpleperfTestCases
index e75b735..9aced0f 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSimpleperfTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSimpleperfTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSimpleperfTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSkQPTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSkQPTestCases
index 2d6f6b8..d59559d 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSkQPTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSkQPTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSkQPTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSliceTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSliceTestCases
index d75ea2d..53e6467 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSliceTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSliceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSliceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSoundTriggerTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSoundTriggerTestCases
index f889538..9dbcaab 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSoundTriggerTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSoundTriggerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSoundTriggerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSpeechTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSpeechTestCases
index 21e6c8b..11e4c94 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSpeechTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSpeechTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSpeechTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsStagedInstallHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsStagedInstallHostTestCases
index d9374fd..98e8c7c 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsStagedInstallHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsStagedInstallHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsStagedInstallHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsStatsdHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsStatsdHostTestCases
index 39a2766..2605d12 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsStatsdHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsStatsdHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsStatsdHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
@@ -28,9 +29,10 @@
         target_module='CtsStatsdHostTestCases',
         target_plan=None,
         bundle='x86',
-        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), '/usr/local/autotest/cros/scripts/reorder-services-moblab.sh wifi'],
+        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), 'android-sh -c \'dumpsys wifi transports -eth\''],
         retry_manual_tests=True,
         use_jdk9=True,
+        use_old_adb=True,
         warn_on_test_retry=False,
         timeout=7200)
 
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsStrictJavaPackagesTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsStrictJavaPackagesTestCases
index aeb0c9d..5b144db 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsStrictJavaPackagesTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsStrictJavaPackagesTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsStrictJavaPackagesTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSuspendAppsPermissionTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSuspendAppsPermissionTestCases
index 2dedfce..89a5580 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSuspendAppsPermissionTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSuspendAppsPermissionTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSuspendAppsPermissionTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSuspendAppsTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSuspendAppsTestCases
index d264dc0..9924609 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSuspendAppsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSuspendAppsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSuspendAppsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSustainedPerformanceHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSustainedPerformanceHostTestCases
index 1a4be7a..ec9f041 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSustainedPerformanceHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSustainedPerformanceHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSustainedPerformanceHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSustainedPerformanceHostTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.x86.CtsSustainedPerformanceHostTestCases.ctshardware
new file mode 100644
index 0000000..aadc69b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSustainedPerformanceHostTestCases.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.x86.CtsSustainedPerformanceHostTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsSustainedPerformanceHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='x86.CtsSustainedPerformanceHostTestCases.ctshardware',
+        test_name='cheets_CTS_R.x86.CtsSustainedPerformanceHostTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsSustainedPerformanceHostTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsSustainedPerformanceHostTestCases',
+        target_plan=None,
+        bundle='x86',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSyncAccountAccessOtherCertTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSyncAccountAccessOtherCertTestCases
index 7b134eb..3ba61f6 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSyncAccountAccessOtherCertTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSyncAccountAccessOtherCertTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSyncAccountAccessOtherCertTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSyncContentHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSyncContentHostTestCases
index 4a4167a..a938974 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSyncContentHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSyncContentHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSyncContentHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSyncManagerTestsCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSyncManagerTestsCases
index 5c0a5a2..91a08e9 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSyncManagerTestsCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSyncManagerTestsCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSyncManagerTestsCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSystemApiAnnotationTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSystemApiAnnotationTestCases
index f7bb896..818d5b8 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSystemApiAnnotationTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSystemApiAnnotationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSystemApiAnnotationTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSystemApiSignatureTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSystemApiSignatureTestCases
index 6e195ae..34deee8 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSystemApiSignatureTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSystemApiSignatureTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSystemApiSignatureTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSystemIntentTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSystemIntentTestCases
index ce0eb0f..c52b396 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSystemIntentTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSystemIntentTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSystemIntentTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSystemUiHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSystemUiHostTestCases
index d7f1981..530f92d 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSystemUiHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSystemUiHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSystemUiHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsSystemUiTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsSystemUiTestCases
index 9db82e0..518a51f 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsSystemUiTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsSystemUiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsSystemUiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsTaggingHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsTaggingHostTestCases
index 0e45043..2eee308 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsTaggingHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsTaggingHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTaggingHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsTelecomTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsTelecomTestCases
index aae1582..520c394 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsTelecomTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsTelecomTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelecomTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsTelecomTestCases2 b/server/site_tests/cheets_CTS_R/control.x86.CtsTelecomTestCases2
index 39cdaf0..de94a35 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsTelecomTestCases2
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsTelecomTestCases2
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelecomTestCases2 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsTelecomTestCases3 b/server/site_tests/cheets_CTS_R/control.x86.CtsTelecomTestCases3
index dd8430e..084ab33 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsTelecomTestCases3
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsTelecomTestCases3
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelecomTestCases3 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsTelephony2TestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsTelephony2TestCases
index d6e3dfd..87abf3e 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsTelephony2TestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsTelephony2TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelephony2TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsTelephony3TestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsTelephony3TestCases
index ee60698..7812edf 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsTelephony3TestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsTelephony3TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelephony3TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsTelephonyHostCases b/server/site_tests/cheets_CTS_R/control.x86.CtsTelephonyHostCases
index 8a3f099..e930a68 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsTelephonyHostCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsTelephonyHostCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelephonyHostCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsTelephonyProviderHostCases b/server/site_tests/cheets_CTS_R/control.x86.CtsTelephonyProviderHostCases
index 5e8a52e..07f5cf0 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsTelephonyProviderHostCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsTelephonyProviderHostCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelephonyProviderHostCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsTelephonyProviderTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsTelephonyProviderTestCases
index f69d34a..4c33a6d 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsTelephonyProviderTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsTelephonyProviderTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelephonyProviderTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsTelephonySdk28TestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsTelephonySdk28TestCases
index ce6b589..11b1da4 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsTelephonySdk28TestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsTelephonySdk28TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelephonySdk28TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsTelephonyTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsTelephonyTestCases
index 3bd2668..fb30866 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsTelephonyTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsTelephonyTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelephonyTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsTelephonyTestCasesPermissionReadPhoneState b/server/site_tests/cheets_CTS_R/control.x86.CtsTelephonyTestCasesPermissionReadPhoneState
index f5f2327..3e5457e 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsTelephonyTestCasesPermissionReadPhoneState
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsTelephonyTestCasesPermissionReadPhoneState
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTelephonyTestCasesPermissionReadPhoneState of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsTestHarnessModeTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsTestHarnessModeTestCases
index 2ae174c..b480a9f 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsTestHarnessModeTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsTestHarnessModeTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTestHarnessModeTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsTetheringTest b/server/site_tests/cheets_CTS_R/control.x86.CtsTetheringTest
index 3d85bc8..91691c4 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsTetheringTest
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsTetheringTest
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTetheringTest of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsTextClassifierTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsTextClassifierTestCases
index 102297b..df3622c 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsTextClassifierTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsTextClassifierTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTextClassifierTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsTextTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsTextTestCases
index d82166e..524394e 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsTextTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsTextTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTextTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsTfliteNnapiDelegateTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsTfliteNnapiDelegateTestCases
index 270809e..02d43b0 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsTfliteNnapiDelegateTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsTfliteNnapiDelegateTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTfliteNnapiDelegateTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsThemeDeviceTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsThemeDeviceTestCases
index 28abecb..c105f4b 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsThemeDeviceTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsThemeDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsThemeDeviceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsThemeHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsThemeHostTestCases
index e7981b0..66eea6d 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsThemeHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsThemeHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsThemeHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsThermalTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsThermalTestCases
index e6c99ce..689be87 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsThermalTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsThermalTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsThermalTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsToastLegacyTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsToastLegacyTestCases
index af7b6dd..1ba798f 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsToastLegacyTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsToastLegacyTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsToastLegacyTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsToastTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsToastTestCases
index d74b04d..d8f8ba8 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsToastTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsToastTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsToastTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsTransitionTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsTransitionTestCases
index bea1fb5..1ec5dde 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsTransitionTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsTransitionTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTransitionTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsTrustedVoiceHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsTrustedVoiceHostTestCases
index d610725..bd579b3 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsTrustedVoiceHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsTrustedVoiceHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTrustedVoiceHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsTvProviderTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsTvProviderTestCases
index 16f21b3..3b71da9 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsTvProviderTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsTvProviderTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTvProviderTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsTvTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsTvTestCases
index 79a3b50..a23dcf3 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsTvTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsTvTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsTvTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsUiAutomationTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsUiAutomationTestCases
index c4620e6..95f0a48 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsUiAutomationTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsUiAutomationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUiAutomationTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsUiRenderingTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsUiRenderingTestCases
index d1eab83..387660e 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsUiRenderingTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsUiRenderingTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUiRenderingTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsUiRenderingTestCases27 b/server/site_tests/cheets_CTS_R/control.x86.CtsUiRenderingTestCases27
index 48849fb..7afda4e 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsUiRenderingTestCases27
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsUiRenderingTestCases27
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUiRenderingTestCases27 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsUidIsolationTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsUidIsolationTestCases
index 2103ef6..13bf8d4 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsUidIsolationTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsUidIsolationTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUidIsolationTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsUsageStatsTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsUsageStatsTestCases
index 632da4e..eff81ab 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsUsageStatsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsUsageStatsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 PRIORITY = 50
 DOC = 'Run module CtsUsageStatsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
@@ -28,7 +29,7 @@
         target_module='CtsUsageStatsTestCases',
         target_plan=None,
         bundle='x86',
-        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), '/usr/local/autotest/cros/scripts/reorder-services-moblab.sh wifi'],
+        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), 'android-sh -c \'dumpsys wifi transports -eth\''],
         retry_manual_tests=True,
         use_jdk9=True,
         warn_on_test_retry=False,
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsUsageStatsTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.x86.CtsUsageStatsTestCases.ctshardware
new file mode 100644
index 0000000..02510e7
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsUsageStatsTestCases.ctshardware
@@ -0,0 +1,38 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.x86.CtsUsageStatsTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
+DOC = 'Run module CtsUsageStatsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='x86.CtsUsageStatsTestCases.ctshardware',
+        test_name='cheets_CTS_R.x86.CtsUsageStatsTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsUsageStatsTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsUsageStatsTestCases',
+        target_plan=None,
+        bundle='x86',
+        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), 'android-sh -c \'dumpsys wifi transports -eth\''],
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsUsbManagerTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsUsbManagerTestCases
index ea1e84c..66ea558 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsUsbManagerTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsUsbManagerTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUsbManagerTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsUsbTests b/server/site_tests/cheets_CTS_R/control.x86.CtsUsbTests
index 1c6fb38..4fc6b77 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsUsbTests
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsUsbTests
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUsbTests of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsUserspaceRebootHostSideTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsUserspaceRebootHostSideTestCases
deleted file mode 100644
index ec95f4e..0000000
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsUserspaceRebootHostSideTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.x86.CtsUserspaceRebootHostSideTestCases'
-ATTRIBUTES = 'suite:cts'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module CtsUserspaceRebootHostSideTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='x86.CtsUserspaceRebootHostSideTestCases',
-        test_name='cheets_CTS_R.x86.CtsUserspaceRebootHostSideTestCases',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsUserspaceRebootHostSideTestCases'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='CtsUserspaceRebootHostSideTestCases',
-        target_plan=None,
-        bundle='x86',
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsUsesLibraryHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsUsesLibraryHostTestCases
index d3931c4..18a1bc6 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsUsesLibraryHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsUsesLibraryHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUsesLibraryHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsUtilTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsUtilTestCases
index 6285c5c..323b234 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsUtilTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsUtilTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsUtilTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsVideoTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsVideoTestCases
index f8a7fd8..4cdec70 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsVideoTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsVideoTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsVideoTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsViewInspectorAnnotationProcessorTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsViewInspectorAnnotationProcessorTestCases
index 771c47c..cc4a201 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsViewInspectorAnnotationProcessorTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsViewInspectorAnnotationProcessorTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsViewInspectorAnnotationProcessorTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsViewTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsViewTestCases
index 5b9a414..983c266 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsViewTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsViewTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsViewTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
@@ -30,6 +31,6 @@
         retry_manual_tests=True,
         use_jdk9=True,
         warn_on_test_retry=False,
-        timeout=3600)
+        timeout=9000)
 
 parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsViewTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.x86.CtsViewTestCases.ctshardware
new file mode 100644
index 0000000..8a8c539
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsViewTestCases.ctshardware
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.x86.CtsViewTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module CtsViewTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='x86.CtsViewTestCases.ctshardware',
+        test_name='cheets_CTS_R.x86.CtsViewTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsViewTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsViewTestCases',
+        target_plan=None,
+        bundle='x86',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=9000)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsViewTestCasesSdk28 b/server/site_tests/cheets_CTS_R/control.x86.CtsViewTestCasesSdk28
index 24e1436..8b865fb 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsViewTestCasesSdk28
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsViewTestCasesSdk28
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsViewTestCasesSdk28 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsVoiceInteractionTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsVoiceInteractionTestCases
index 2f31ca9..29e3034 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsVoiceInteractionTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsVoiceInteractionTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsVoiceInteractionTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsVoiceSettingsTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsVoiceSettingsTestCases
index 2395dec..db4206a 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsVoiceSettingsTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsVoiceSettingsTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsVoiceSettingsTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsVrTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsVrTestCases
index 9261e30..d58d56a 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsVrTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsVrTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsVrTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsWebkitTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsWebkitTestCases
index 2c92752..dc48fb3 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsWebkitTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsWebkitTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWebkitTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsWidgetTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsWidgetTestCases
index 887b7e3..99906ca 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsWidgetTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsWidgetTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWidgetTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsWidgetTestCases29 b/server/site_tests/cheets_CTS_R/control.x86.CtsWidgetTestCases29
index 87a33ce..e3cf6d2 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsWidgetTestCases29
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsWidgetTestCases29
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWidgetTestCases29 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsWifiBroadcastsHostTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsWifiBroadcastsHostTestCases
index 2dac2c1..fdafe18 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsWifiBroadcastsHostTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsWifiBroadcastsHostTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWifiBroadcastsHostTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsWifiTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsWifiTestCases
index 9962384..ad74997 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsWifiTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsWifiTestCases
@@ -12,6 +12,8 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
 DOC = 'Run module CtsWifiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
@@ -27,6 +29,7 @@
         target_module='CtsWifiTestCases',
         target_plan=None,
         bundle='x86',
+        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), 'android-sh -c \'dumpsys wifi transports -eth\''],
         retry_manual_tests=True,
         use_jdk9=True,
         warn_on_test_retry=False,
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsWifiTestCases.ctshardware b/server/site_tests/cheets_CTS_R/control.x86.CtsWifiTestCases.ctshardware
new file mode 100644
index 0000000..b6f3170
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsWifiTestCases.ctshardware
@@ -0,0 +1,38 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.x86.CtsWifiTestCases.ctshardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+PRIORITY = 50
+DOC = 'Run module CtsWifiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='x86.CtsWifiTestCases.ctshardware',
+        test_name='cheets_CTS_R.x86.CtsWifiTestCases.ctshardware',
+        run_template=['run', 'commandAndExit', 'cts', '--include-filter', 'CtsWifiTestCases'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='CtsWifiTestCases',
+        target_plan=None,
+        bundle='x86',
+        precondition_commands=['/usr/local/autotest/cros/scripts/wifi connect %s %s' % (ssid, wifipass), 'android-sh -c \'dumpsys wifi transports -eth\''],
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsWindowManagerDeviceTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsWindowManagerDeviceTestCases
index 785f5a8..1a2817e 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsWindowManagerDeviceTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsWindowManagerDeviceTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWindowManagerDeviceTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
@@ -20,6 +21,7 @@
         'cheets_CTS_R',
         hosts=host_list,
         iterations=1,
+        max_retry=10,
         tag='x86.CtsWindowManagerDeviceTestCases',
         test_name='cheets_CTS_R.x86.CtsWindowManagerDeviceTestCases',
         run_template=['run', 'commandAndExit', 'cts', '--module', 'CtsWindowManagerDeviceTestCases'],
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsWindowManagerJetpackTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsWindowManagerJetpackTestCases
index bc8fb5e..c137ccd 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsWindowManagerJetpackTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsWindowManagerJetpackTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWindowManagerJetpackTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsWindowManagerSdk25TestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsWindowManagerSdk25TestCases
index e8d8f9a..808632f 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsWindowManagerSdk25TestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsWindowManagerSdk25TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWindowManagerSdk25TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsWindowManagerSdk28TestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsWindowManagerSdk28TestCases
index 8ed43c7..abc29c5 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsWindowManagerSdk28TestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsWindowManagerSdk28TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWindowManagerSdk28TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsWindowManagerSdk29TestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsWindowManagerSdk29TestCases
index 38283f1..a3a09bf 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsWindowManagerSdk29TestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsWindowManagerSdk29TestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWindowManagerSdk29TestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsWrapNoWrapTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsWrapNoWrapTestCases
index b985334..361820a 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsWrapNoWrapTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsWrapNoWrapTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWrapNoWrapTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsWrapWrapDebugMallocDebugTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsWrapWrapDebugMallocDebugTestCases
index baa287e..c85ea47 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsWrapWrapDebugMallocDebugTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsWrapWrapDebugMallocDebugTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWrapWrapDebugMallocDebugTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsWrapWrapDebugTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsWrapWrapDebugTestCases
index 2571546..5a95fcf 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsWrapWrapDebugTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsWrapWrapDebugTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWrapWrapDebugTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.CtsWrapWrapNoDebugTestCases b/server/site_tests/cheets_CTS_R/control.x86.CtsWrapWrapNoDebugTestCases
index 685fe75..c866af8 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.CtsWrapWrapNoDebugTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.CtsWrapWrapNoDebugTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module CtsWrapWrapNoDebugTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.LegacyStorageTest b/server/site_tests/cheets_CTS_R/control.x86.LegacyStorageTest
index 41f929e..064c7f5 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.LegacyStorageTest
+++ b/server/site_tests/cheets_CTS_R/control.x86.LegacyStorageTest
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module LegacyStorageTest of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.ScopedStorageTest b/server/site_tests/cheets_CTS_R/control.x86.ScopedStorageTest
index 042b5a0..aa36d38 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.ScopedStorageTest
+++ b/server/site_tests/cheets_CTS_R/control.x86.ScopedStorageTest
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module ScopedStorageTest of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.cts-platform-version-check b/server/site_tests/cheets_CTS_R/control.x86.cts-platform-version-check
deleted file mode 100644
index f70a74c..0000000
--- a/server/site_tests/cheets_CTS_R/control.x86.cts-platform-version-check
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.x86.cts-platform-version-check'
-ATTRIBUTES = 'suite:cts'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module cts-platform-version-check of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='x86.cts-platform-version-check',
-        test_name='cheets_CTS_R.x86.cts-platform-version-check',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'cts-platform-version-check'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='cts-platform-version-check',
-        target_plan=None,
-        bundle='x86',
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.cts-system-all.api b/server/site_tests/cheets_CTS_R/control.x86.cts-system-all.api
deleted file mode 100644
index e68e56a..0000000
--- a/server/site_tests/cheets_CTS_R/control.x86.cts-system-all.api
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_CTS_R.x86.cts-system-all.api'
-ATTRIBUTES = 'suite:cts'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module cts-system-all.api of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_CTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='x86.cts-system-all.api',
-        test_name='cheets_CTS_R.x86.cts-system-all.api',
-        run_template=['run', 'commandAndExit', 'cts', '--module', 'cts-system-all.api'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='cts-system-all.api',
-        target_plan=None,
-        bundle='x86',
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.signed-CtsOmapiTestCases b/server/site_tests/cheets_CTS_R/control.x86.signed-CtsOmapiTestCases
index 77f590a..ee3be19 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.signed-CtsOmapiTestCases
+++ b/server/site_tests/cheets_CTS_R/control.x86.signed-CtsOmapiTestCases
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module signed-CtsOmapiTestCases of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.signed-CtsSecureElementAccessControlTestCases1 b/server/site_tests/cheets_CTS_R/control.x86.signed-CtsSecureElementAccessControlTestCases1
index 8fee0d2..1f51f52 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.signed-CtsSecureElementAccessControlTestCases1
+++ b/server/site_tests/cheets_CTS_R/control.x86.signed-CtsSecureElementAccessControlTestCases1
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module signed-CtsSecureElementAccessControlTestCases1 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.signed-CtsSecureElementAccessControlTestCases2 b/server/site_tests/cheets_CTS_R/control.x86.signed-CtsSecureElementAccessControlTestCases2
index 0e2411a..1b0b7d5 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.signed-CtsSecureElementAccessControlTestCases2
+++ b/server/site_tests/cheets_CTS_R/control.x86.signed-CtsSecureElementAccessControlTestCases2
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module signed-CtsSecureElementAccessControlTestCases2 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.signed-CtsSecureElementAccessControlTestCases3 b/server/site_tests/cheets_CTS_R/control.x86.signed-CtsSecureElementAccessControlTestCases3
index 80d63b8..4dfb3d5 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.signed-CtsSecureElementAccessControlTestCases3
+++ b/server/site_tests/cheets_CTS_R/control.x86.signed-CtsSecureElementAccessControlTestCases3
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module signed-CtsSecureElementAccessControlTestCases3 of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.tradefed-run-collect-tests-only b/server/site_tests/cheets_CTS_R/control.x86.tradefed-run-collect-tests-only
index abff915..a5fbb7f 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.tradefed-run-collect-tests-only
+++ b/server/site_tests/cheets_CTS_R/control.x86.tradefed-run-collect-tests-only
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'LENGTHY'
 MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
 PRIORITY = 70
 DOC = 'Run all of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
diff --git a/server/site_tests/cheets_CTS_R/control.x86.tradefed-run-collect-tests-only-hardware b/server/site_tests/cheets_CTS_R/control.x86.tradefed-run-collect-tests-only-hardware
new file mode 100644
index 0000000..a158fca
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.x86.tradefed-run-collect-tests-only-hardware
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.x86.tradefed-run-collect-tests-only-hardware'
+ATTRIBUTES = 'suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run module tradefed-run-collect-tests-only-hardware of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        tag='x86.tradefed-run-collect-tests-only-hardware',
+        test_name='cheets_CTS_R.x86.tradefed-run-collect-tests-only-hardware',
+        run_template=['run', 'commandAndExit', 'collect-tests-only', '--disable-reboot', '--subplan', 'cts-hardware', '--module-arg', 'CtsMediaTestCases:skip-media-download:true', '--module-arg', 'CtsMediaStressTestCases:skip-media-download:true', '--module-arg', 'CtsMediaBitstreamsTestCases:skip-media-download:true'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='tradefed-run-collect-tests-only-hardware',
+        target_plan='cts-hardware',
+        bundle='x86',
+        retry_manual_tests=True,
+        use_jdk9=True,
+        warn_on_test_retry=False,
+        timeout=3600)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.vm-tests-tf b/server/site_tests/cheets_CTS_R/control.x86.vm-tests-tf
index 580e361..a79a6e4 100644
--- a/server/site_tests/cheets_CTS_R/control.x86.vm-tests-tf
+++ b/server/site_tests/cheets_CTS_R/control.x86.vm-tests-tf
@@ -12,6 +12,7 @@
 TEST_TYPE = 'server'
 TIME = 'MEDIUM'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 DOC = 'Run module vm-tests-tf of the Android Compatibility Test Suite (CTS) using x86 ABI in the ARC++ container.'
 
 def run_TS(machine):
diff --git a/server/site_tests/cheets_CTS_R/control.x86.waivers b/server/site_tests/cheets_CTS_R/control.x86.waivers
new file mode 100644
index 0000000..d5e03b7
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.x86.waivers
@@ -0,0 +1,39 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is not auto-generated. Don't delete it.
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.x86.waivers'
+ATTRIBUTES = 'suite:cts, suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
+DOC = 'Run waived tests of the Android Compatibility Test Suite (CTS) using x86 ABI in ARC.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=10,
+        tag='x86.waivers',
+        test_name='cheets_CTS_R.x86.waivers',
+        run_template=['run', 'commandAndExit', 'cts', '--subplan', 'waivers'],
+        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+        target_module='cts-dev',
+        target_plan='waivers',
+        load_waivers=False,
+        bundle='x86',
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        uri='DEV_MOBLAB',
+        use_jdk9=True,
+        timeout=7200)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/control.x86.waivers-collect-tests-only b/server/site_tests/cheets_CTS_R/control.x86.waivers-collect-tests-only
new file mode 100644
index 0000000..e83bf25
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/control.x86.waivers-collect-tests-only
@@ -0,0 +1,40 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is not auto-generated. Don't delete it.
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_CTS_R.x86.waivers-collect-tests-only'
+ATTRIBUTES = 'suite:cts, suite:cts-hardware'
+DEPENDENCIES = 'arc, cts_abi_x86'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'MEDIUM'
+MAX_RESULT_SIZE_KB = 1024000
+PY_VERSION = 3
+PRIORITY = 70
+DOC = 'Run waived tests of the Android Compatibility Test Suite (CTS) using x86 ABI in ARC.'
+
+def run_TS(machine):
+    host_list = [hosts.create_host(machine)]
+    job.run_test(
+        'cheets_CTS_R',
+        hosts=host_list,
+        iterations=1,
+        max_retry=0,
+        tag='x86.waivers-collect-tests-only',
+        test_name='cheets_CTS_R.x86.waivers-collect-tests-only',
+        run_template=['run', 'commandAndExit', 'collect-tests-only', '--subplan', 'waivers', '--disable-reboot', '--module-arg', 'CtsMediaTestCases:skip-media-download:true', '--module-arg', 'CtsMediaStressTestCases:skip-media-download:true', '--module-arg', 'CtsMediaBitstreamsTestCases:skip-media-download:true'],
+        retry_template=None,
+        target_module=None,
+        target_plan='waivers',
+        load_waivers=False,
+        bundle='x86',
+        retry_manual_tests=True,
+        warn_on_test_retry=False,
+        uri='DEV_MOBLAB',
+        use_jdk9=True,
+        timeout=360)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_CTS_R/generate_controlfiles.py b/server/site_tests/cheets_CTS_R/generate_controlfiles.py
index ca9ea03..917554c 100755
--- a/server/site_tests/cheets_CTS_R/generate_controlfiles.py
+++ b/server/site_tests/cheets_CTS_R/generate_controlfiles.py
@@ -1,4 +1,5 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/cheets_CTS_R/manual_tests/camera_illumination.yaml b/server/site_tests/cheets_CTS_R/manual_tests/camera_illumination.yaml
new file mode 100644
index 0000000..6857047
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/manual_tests/camera_illumination.yaml
@@ -0,0 +1,5 @@
+android.hardware.camera2.cts.CaptureRequestTest#testAntiBandingModes[1]: [drawcia, maglia, magolor, storo360, lalala]
+android.hardware.camera2.cts.CaptureRequestTest#testNoiseReductionModeControl[1]: [nipperkin]
+android.hardware.camera2.cts.MultiViewTest#testTextureImageWriterReaderOperation[1]: [sparky]
+android.hardware.camera2.cts.StillCaptureTest#testAeCompensation[1]: [drawcia, maglia, magolor, storo360, lalala]
+android.hardware.cts.CameraGLTest#testCameraToSurfaceTextureMetadata: [nipperkin]
diff --git a/server/site_tests/cheets_CTS_R/notest_modules/notest_combined_modules.yaml b/server/site_tests/cheets_CTS_R/notest_modules/notest_combined_modules.yaml
index 5952f24..57ebdcb 100644
--- a/server/site_tests/cheets_CTS_R/notest_modules/notest_combined_modules.yaml
+++ b/server/site_tests/cheets_CTS_R/notest_modules/notest_combined_modules.yaml
@@ -3,10 +3,12 @@
 CtsAngleIntegrationHost: [all]
 CtsAttentionServiceDevice: [all]
 CtsCar: [all]
-CtsExtendedMocking: [nativebridge]
-CtsIncrementalInstallHost: [shipatN, shipatP]
-CtsInit: [nativebridge]
-CtsInlineMocking: [nativebridge]
+CtsExtendedMocking: [binarytranslated]
+CtsInit: [binarytranslated]
+CtsInlineMocking: [binarytranslated]
 CtsNfc: [all]
-CtsSlice: [nativebridge]
+CtsOmapi: [shipatN]
+CtsPrint: [all]
+CtsSlice: [binarytranslated]
 CtsTestHarnessMode: [all]
+signed-Cts: [shipatN]
diff --git a/server/site_tests/cheets_CTS_R/notest_modules/notest_modules.yaml b/server/site_tests/cheets_CTS_R/notest_modules/notest_modules.yaml
index ebeabf0..f30f49e 100644
--- a/server/site_tests/cheets_CTS_R/notest_modules/notest_modules.yaml
+++ b/server/site_tests/cheets_CTS_R/notest_modules/notest_modules.yaml
@@ -1,20 +1,37 @@
 CtsAccountsHostTestCases: [all]
-CtsAdbHostTestCases: [all]
-CtsAdbManagerHostTestCases: [all]
+CtsAdbHostTestCases: [shipatN, shipatP]
 CtsAngleIntegrationHostTestCases: [all]
+CtsAppPredictionServiceTestCases: [all]
+CtsAppWidgetTestCases: [all]
 CtsAttentionServiceDeviceTestCases: [all]
 CtsCarHostTestCases: [all]
 CtsCarTestCases: [all]
-CtsExtendedMockingTestCases: [nativebridge]
-CtsIncrementalInstallHostTestCases: [shipatN, shipatP]
-CtsInitTestCases: [nativebridge]
-CtsInlineMockingTestCases: [nativebridge]
+CtsContentSuggestionsTestCases: [all]
+CtsExtendedMockingTestCases: [binarytranslated]
+CtsHiddenApiKillswitchDebugClassTestCases: [binarytranslated]
+CtsHiddenApiKillswitchWhitelistTestCases: [binarytranslated]
+CtsHiddenApiKillswitchWildcardTestCases: [binarytranslated]
+CtsInitTestCases: [binarytranslated]
+CtsInlineMockingTestCases: [binarytranslated]
+CtsJvmtiAttachingTestCases: [binarytranslated]
 CtsMediaBitstreamsTestCases: [all]
+CtsNativeMediaMetricsTestCases: [binarytranslated]
 CtsNfcTestCases: [all]
-CtsRollbackManagerHostTestCases: [nativebridge]
-CtsSliceTestCases: [nativebridge]
-CtsStagedInstallHostTestCases: [nativebridge]
+CtsOmapiTestCases: [shipatN]
+CtsPrintTestCases: [all]
+CtsRollbackManagerHostTestCases: [binarytranslated]
+CtsSecurityBulletinHostTestCases: [all]
+CtsSliceTestCases: [binarytranslated]
+CtsStagedInstallHostTestCases: [binarytranslated]
+CtsSecureElementAccessControlTestCases1: [all]
+CtsSecureElementAccessControlTestCases2: [all]
+CtsSecureElementAccessControlTestCases3: [all]
+CtsSyncContentHostTestCases: [all]
+CtsSystemApiAnnotationTestCases: [binarytranslated]
 CtsTestHarnessModeTestCases: [all]
+CtsWindowManagerJetpackTestCases: [all]
+CtsWindowManagerSdk28TestCases: [all]
+signed-CtsOmapiTestCases: [shipatN]
 signed-CtsSecureElementAccessControlTestCases1: [all]
 signed-CtsSecureElementAccessControlTestCases2: [all]
 signed-CtsSecureElementAccessControlTestCases3: [all]
diff --git a/server/site_tests/cheets_CTS_R/subplans/cts-hardware.xml b/server/site_tests/cheets_CTS_R/subplans/cts-hardware.xml
new file mode 100644
index 0000000..228bedc
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/subplans/cts-hardware.xml
@@ -0,0 +1,18 @@
+<?xml version='1.0' encoding='UTF-8' standalone='no' ?>
+<SubPlan version="2.0">
+  <Entry include="CtsAppTestCases android.app.cts.SystemFeaturesTest" />
+  <Entry include="CtsCameraTestCases" />
+  <Entry include="CtsDeqpTestCases dEQP-GLES3.functional.prerequisite#*" />
+  <Entry include="CtsMediaStressTestCases android.mediastress.cts.MediaRecorderStressTest" />
+  <Entry include="CtsMediaTestCases" />
+  <Entry include="CtsNativeMediaAAudioTestCases" />
+  <Entry include="CtsNetTestCases" />
+  <Entry include="CtsPerfettoTestCases" />
+  <Entry include="CtsPermissionTestCases android.permission.cts.CameraPermissionTest" />
+  <Entry include="CtsPermissionTestCases android.permission.cts.Camera2PermissionTest" />
+  <Entry include="CtsSensorTestCases" />
+  <Entry include="CtsSustainedPerformanceHostTestCases" />
+  <Entry include="CtsUsageStatsTestCases" />
+  <Entry include="CtsViewTestCases" />
+  <Entry include="CtsWifiTestCases" />
+</SubPlan>
diff --git a/server/site_tests/cheets_CTS_R/subplans/waivers.xml b/server/site_tests/cheets_CTS_R/subplans/waivers.xml
new file mode 100644
index 0000000..3d5d99b
--- /dev/null
+++ b/server/site_tests/cheets_CTS_R/subplans/waivers.xml
@@ -0,0 +1,18 @@
+<?xml version='1.0' encoding='UTF-8' standalone='no' ?>
+<SubPlan version="2.0">
+  <!--
+    Unless there's special reason, the entries can be removed only
+    when the DEV version bundle's provisional version number (like 9.0r13)
+    is incremented. Otherwise the computation for the canonical list
+    of test cases is confused. See b/151779432.
+  -->
+  <Entry include="CtsAppTestCases android.app.cts.DisplayTest#testRotation" />
+  <Entry include="CtsAppTestCases[instant] android.app.cts.DisplayTest#testRotation" />
+  <Entry include="CtsAutoFillServiceTestCases android.autofillservice.cts.MultiWindowLoginActivityTest#testSplitWindow" />
+  <Entry include="CtsIcuTestCases android.icu.dev.test.timezone.TimeZoneTest#TestCanonicalID" />
+  <Entry include="CtsStatsdHostTestCases android.cts.statsd.alert.AnomalyDetectionTests#testPulledAnomalyDetection" />
+  <Entry include="CtsStatsdHostTestCases[instant] android.cts.statsd.alert.AnomalyDetectionTests#testPulledAnomalyDetection" />
+  <Entry include="CtsStatsdHostTestCases android.cts.statsd.metric.MetricActivationTests#testMultipleActivations" />
+  <Entry include="CtsStatsdHostTestCases[instant] android.cts.statsd.metric.MetricActivationTests#testMultipleActivations" />
+  <Entry include="CtsWindowManagerDeviceTestCases android.server.wm.AppConfigurationTests#testSameConfigurationSplitFullSplitRelaunch" />
+</SubPlan>
diff --git a/server/site_tests/cheets_GTS/OWNERS b/server/site_tests/cheets_GTS/OWNERS
deleted file mode 100644
index 3895a7b..0000000
--- a/server/site_tests/cheets_GTS/OWNERS
+++ /dev/null
@@ -1 +0,0 @@
-include /CTS_OWNERS
diff --git a/server/site_tests/cheets_GTS/cheets_GTS.py b/server/site_tests/cheets_GTS/cheets_GTS.py
deleted file mode 100644
index 9b7ded1..0000000
--- a/server/site_tests/cheets_GTS/cheets_GTS.py
+++ /dev/null
@@ -1,132 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# repohooks/pre-upload.py currently does not run pylint. But for developers who
-# want to check their code manually we disable several harmless pylint warnings
-# which just distract from more serious remaining issues.
-#
-# The instance variable _android_gts is not defined in __init__().
-# pylint: disable=attribute-defined-outside-init
-#
-# Many short variable names don't follow the naming convention.
-# pylint: disable=invalid-name
-
-import logging
-import os
-import shutil
-import tempfile
-
-from autotest_lib.server import utils
-from autotest_lib.server.cros.tradefed import tradefed_test
-
-# Maximum default time allowed for each individual GTS module.
-_GTS_TIMEOUT_SECONDS = 3600
-_PARTNER_GTS_BUCKET = 'gs://chromeos-partner-gts/'
-_PARTNER_GTS_LOCATION = _PARTNER_GTS_BUCKET + 'gts-8-R2-6955212.zip'
-_PARTNER_GTS_AUTHKEY = _PARTNER_GTS_BUCKET + 'gts-arc.json'
-_GTS_MEDIA_URI = ('https://storage.googleapis.com/youtube-test-media/gts/' +
-                  'GtsYouTubeTestCases-media-1.2.zip')
-_GTS_MEDIA_LOCALPATH = '/tmp/android-gts-media/GtsYouTubeTestCases'
-
-
-class cheets_GTS(tradefed_test.TradefedTest):
-    """Sets up tradefed to run GTS tests."""
-    version = 1
-
-    _SHARD_CMD = '--shard-count'
-
-    def _tradefed_retry_command(self, template, session_id):
-        """Build tradefed 'retry' command from template."""
-        cmd = []
-        for arg in template:
-            cmd.append(arg.format(session_id=session_id))
-        return cmd
-
-    def _tradefed_run_command(self, template):
-        """Build tradefed 'run' command from template."""
-        cmd = template[:]
-        # If we are running outside of the lab we can collect more data.
-        if not utils.is_in_container():
-            logging.info('Running outside of lab, adding extra debug options.')
-            cmd.append('--log-level-display=DEBUG')
-
-        return cmd
-
-    def _get_default_bundle_url(self, bundle):
-        return _PARTNER_GTS_LOCATION
-
-    def _get_default_authkey(self):
-        return _PARTNER_GTS_AUTHKEY
-
-    def _get_tradefed_base_dir(self):
-        return 'android-gts'
-
-    def _tradefed_cmd_path(self):
-        return os.path.join(self._repository, 'tools', 'gts-tradefed')
-
-    def _tradefed_env(self):
-        if self._authkey:
-            return dict(os.environ, APE_API_KEY=self._authkey)
-        return None
-
-    def run_once(self,
-                 test_name,
-                 run_template,
-                 retry_template=None,
-                 target_module=None,
-                 target_plan=None,
-                 needs_push_media=False,
-                 enable_default_apps=False,
-                 executable_test_count=None,
-                 precondition_commands=[],
-                 login_precondition_commands=[],
-                 authkey=None,
-                 prerequisites=[],
-                 timeout=_GTS_TIMEOUT_SECONDS):
-        """Runs the specified GTS once, but with several retries.
-
-        Run an arbitrary tradefed command.
-
-        @param test_name: the name of test. Used for logging.
-        @param run_template: the template to construct the run command.
-                             Example: ['run', 'commandAndExit', 'cts',
-                                       '--skip-media-download']
-        @param retry_template: the template to construct the retry command.
-                               Example: ['run', 'commandAndExit', 'retry',
-                                         '--skip-media-download', '--retry',
-                                         '{session_id}']
-        @param target_module: the name of test module to run.
-        @param target_plan: the name of the test plan to run.
-        @param needs_push_media: need to push test media streams.
-        @param executable_test_count: the known number of tests in the run.
-        @param timeout: time after which tradefed can be interrupted.
-        @param precondition_commands: a list of scripts to be run on the
-        dut before the test is run, the scripts must already be installed.
-        @param login_precondition_commands: a list of scripts to be run on the
-        dut before the log-in for the test is performed.
-        @param prerequisites: a list of prerequisites that identify rogue DUTs.
-        """
-        # Download the GTS auth key to the local temp directory.
-        tmpdir = tempfile.mkdtemp()
-        try:
-            self._authkey = self._download_to_dir(
-                authkey or self._get_default_authkey(), tmpdir)
-
-            self._run_tradefed_with_retries(
-                test_name=test_name,
-                run_template=run_template,
-                retry_template=retry_template,
-                timeout=timeout,
-                target_module=target_module,
-                target_plan=target_plan,
-                media_asset=tradefed_test.MediaAsset(
-                    _GTS_MEDIA_URI if needs_push_media else None,
-                    _GTS_MEDIA_LOCALPATH),
-                enable_default_apps=enable_default_apps,
-                executable_test_count=executable_test_count,
-                login_precondition_commands=login_precondition_commands,
-                precondition_commands=precondition_commands,
-                prerequisites=prerequisites)
-        finally:
-            shutil.rmtree(tmpdir)
diff --git a/server/site_tests/cheets_GTS/control.8.0_dev.waivers b/server/site_tests/cheets_GTS/control.8.0_dev.waivers
deleted file mode 100644
index dfecbcc..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_dev.waivers
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_dev.waivers'
-ATTRIBUTES = 'suite:arc-gts, suite:arc-gts-qual'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run waived testcases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_dev.waivers',
-        test_name='cheets_GTS.8.0_dev.waivers',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--subplan', 'waivers', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='gts-dev',
-        target_plan='waivers',
-        load_waivers=False,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R3-P-Preview3-7012566.zip',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsAccountsHostTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsAccountsHostTestCases
deleted file mode 100644
index bfdc794..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsAccountsHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsAccountsHostTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAccountsHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsAccountsHostTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsAccountsHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAccountsHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAccountsHostTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsAdminTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsAdminTestCases
deleted file mode 100644
index bc22cc8..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsAdminTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsAdminTestCases'
-ATTRIBUTES = 'suite:arc-gts, suite:bvt-perbuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAdminTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=3,
-        tag='8.0_r2.GtsAdminTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsAdminTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAdminTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAdminTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsAfwTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsAfwTestCases
deleted file mode 100644
index 0001db9..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsAfwTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsAfwTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAfwTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsAfwTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsAfwTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAfwTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAfwTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsAndroidAutoDeviceTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsAndroidAutoDeviceTestCases
deleted file mode 100644
index 31bf0c1..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsAndroidAutoDeviceTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsAndroidAutoDeviceTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAndroidAutoDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsAndroidAutoDeviceTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsAndroidAutoDeviceTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAndroidAutoDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAndroidAutoDeviceTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsApp b/server/site_tests/cheets_GTS/control.8.0_r2.GtsApp
deleted file mode 100644
index bce4224..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsApp
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsApp'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAppBlacklistDeviceTestCases, GtsAppTestCases, GtsAppVisibilityDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsApp',
-        test_name='cheets_GTS.8.0_r2.GtsApp',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsAppBlacklistDeviceTestCases', '--include-filter', 'GtsAppTestCases', '--include-filter', 'GtsAppVisibilityDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsApp',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=1440)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsArtManagerHostTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsArtManagerHostTestCases
deleted file mode 100644
index f5185b0..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsArtManagerHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsArtManagerHostTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsArtManagerHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsArtManagerHostTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsArtManagerHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsArtManagerHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsArtManagerHostTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsAssistIntentTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsAssistIntentTestCases
deleted file mode 100644
index b36df8cb..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsAssistIntentTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsAssistIntentTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAssistIntentTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsAssistIntentTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsAssistIntentTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAssistIntentTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAssistIntentTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsAssistant b/server/site_tests/cheets_GTS/control.8.0_r2.GtsAssistant
deleted file mode 100644
index c93efe8..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsAssistant
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsAssistant'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAssistantHostTestCases, GtsAssistantMicHostTestCases, GtsAssistantWorkProfileHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsAssistant',
-        test_name='cheets_GTS.8.0_r2.GtsAssistant',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsAssistantHostTestCases', '--include-filter', 'GtsAssistantMicHostTestCases', '--include-filter', 'GtsAssistantWorkProfileHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAssistant',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        # This module has a known waived abort bug (b/173331969). As long as the
-        # executed test count matches the known number, assume all tests ran.
-        executable_test_count=[1, 2, 3, 4],
-        timeout=1440)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsAudioTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsAudioTestCases
deleted file mode 100644
index e6b5b3b..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsAudioTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsAudioTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAudioTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsAudioTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsAudioTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAudioTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAudioTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsBackup b/server/site_tests/cheets_GTS/control.8.0_r2.GtsBackup
deleted file mode 100644
index b147fba..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsBackup
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsBackup'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsBackupHostTestCases, GtsBackupTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsBackup',
-        test_name='cheets_GTS.8.0_r2.GtsBackup',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsBackupHostTestCases', '--include-filter', 'GtsBackupTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsBackup',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsBoot b/server/site_tests/cheets_GTS/control.8.0_r2.GtsBoot
deleted file mode 100644
index 8010b56..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsBoot
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsBoot'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsBootHealthHostTestCases, GtsBootStatsTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsBoot',
-        test_name='cheets_GTS.8.0_r2.GtsBoot',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsBootHealthHostTestCases', '--include-filter', 'GtsBootStatsTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsBoot',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsCallLogTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsCallLogTestCases
deleted file mode 100644
index fcdfaf8..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsCallLogTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsCallLogTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsCallLogTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsCallLogTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsCallLogTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsCallLogTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsCallLogTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsCameraTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsCameraTestCases
deleted file mode 100644
index d4f9d99..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsCameraTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsCameraTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsCameraTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsCameraTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsCameraTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsCameraTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsCameraTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsCastHostTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsCastHostTestCases
deleted file mode 100644
index 230e03f..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsCastHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsCastHostTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsCastHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsCastHostTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsCastHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsCastHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsCastHostTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsContacts b/server/site_tests/cheets_GTS/control.8.0_r2.GtsContacts
deleted file mode 100644
index d54d442..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsContacts
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsContacts'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsContactsAppDeviceTestCases, GtsContactsTest of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsContacts',
-        test_name='cheets_GTS.8.0_r2.GtsContacts',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsContactsAppDeviceTestCases', '--include-filter', 'GtsContactsTest', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsContacts',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsContent b/server/site_tests/cheets_GTS/control.8.0_r2.GtsContent
deleted file mode 100644
index 1724fbd..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsContent
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsContent'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsContentHostTestCases, GtsContentTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsContent',
-        test_name='cheets_GTS.8.0_r2.GtsContent',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsContentHostTestCases', '--include-filter', 'GtsContentTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsContent',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsContextHubPermissionDeviceTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsContextHubPermissionDeviceTestCases
deleted file mode 100644
index ca2ed91..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsContextHubPermissionDeviceTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsContextHubPermissionDeviceTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsContextHubPermissionDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsContextHubPermissionDeviceTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsContextHubPermissionDeviceTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsContextHubPermissionDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsContextHubPermissionDeviceTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsDebugfsMountTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsDebugfsMountTestCases
deleted file mode 100644
index 855c752..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsDebugfsMountTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsDebugfsMountTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsDebugfsMountTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsDebugfsMountTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsDebugfsMountTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsDebugfsMountTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsDebugfsMountTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsDeviceConfigTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsDeviceConfigTestCases
deleted file mode 100644
index c73c347..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsDeviceConfigTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsDeviceConfigTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsDeviceConfigTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsDeviceConfigTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsDeviceConfigTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsDeviceConfigTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsDeviceConfigTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsDexModuleRegistrationTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsDexModuleRegistrationTestCases
deleted file mode 100644
index dd7663a..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsDexModuleRegistrationTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsDexModuleRegistrationTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsDexModuleRegistrationTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsDexModuleRegistrationTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsDexModuleRegistrationTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsDexModuleRegistrationTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsDexModuleRegistrationTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsDialer b/server/site_tests/cheets_GTS/control.8.0_r2.GtsDialer
deleted file mode 100644
index 2c4da68..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsDialer
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsDialer'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsDialerAudioTestCases, GtsDialerDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsDialer',
-        test_name='cheets_GTS.8.0_r2.GtsDialer',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsDialerAudioTestCases', '--include-filter', 'GtsDialerDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsDialer',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsDoze b/server/site_tests/cheets_GTS/control.8.0_r2.GtsDoze
deleted file mode 100644
index e6cd9a8..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsDoze
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsDoze'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsDozeDeviceTestCases, GtsDozeHostSideTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsDoze',
-        test_name='cheets_GTS.8.0_r2.GtsDoze',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsDozeDeviceTestCases', '--include-filter', 'GtsDozeHostSideTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsDoze',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsDuoReadyTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsDuoReadyTestCases
deleted file mode 100644
index 6e57fcc..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsDuoReadyTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsDuoReadyTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsDuoReadyTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsDuoReadyTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsDuoReadyTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsDuoReadyTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsDuoReadyTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsEdiHostTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsEdiHostTestCases
deleted file mode 100644
index e286eb1..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsEdiHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsEdiHostTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsEdiHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsEdiHostTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsEdiHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsEdiHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsEdiHostTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsExoPlayerTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsExoPlayerTestCases
deleted file mode 100644
index cfc6607..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsExoPlayerTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsExoPlayerTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsExoPlayerTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsExoPlayerTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsExoPlayerTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsExoPlayerTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsExoPlayerTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsFeaturesTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsFeaturesTestCases
deleted file mode 100644
index ed21ab1..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsFeaturesTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsFeaturesTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsFeaturesTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsFeaturesTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsFeaturesTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsFeaturesTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsFeaturesTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsGmscoreHostTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsGmscoreHostTestCases
deleted file mode 100644
index bda2ab9..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsGmscoreHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsGmscoreHostTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsGmscoreHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsGmscoreHostTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsGmscoreHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsGmscoreHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsGmscoreHostTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        prerequisites=['bluetooth'],
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsGraphicsHostTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsGraphicsHostTestCases
deleted file mode 100644
index 7c7283b..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsGraphicsHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsGraphicsHostTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsGraphicsHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsGraphicsHostTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsGraphicsHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsGraphicsHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsGraphicsHostTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsHomeHostTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsHomeHostTestCases
deleted file mode 100644
index 9b818aa..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsHomeHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsHomeHostTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsHomeHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsHomeHostTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsHomeHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsHomeHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsHomeHostTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsIncident b/server/site_tests/cheets_GTS/control.8.0_r2.GtsIncident
deleted file mode 100644
index 4a7ecb0..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsIncident
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsIncident'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsIncidentConfirmationTestCases, GtsIncidentManagerTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsIncident',
-        test_name='cheets_GTS.8.0_r2.GtsIncident',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsIncidentConfirmationTestCases', '--include-filter', 'GtsIncidentManagerTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsIncident',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsIncrementalInstall b/server/site_tests/cheets_GTS/control.8.0_r2.GtsIncrementalInstall
deleted file mode 100644
index 20c0b4f..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsIncrementalInstall
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsIncrementalInstall'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsIncrementalInstallProxyHostTestCases, GtsIncrementalInstallTestCases, GtsIncrementalInstallTriggerApp of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsIncrementalInstall',
-        test_name='cheets_GTS.8.0_r2.GtsIncrementalInstall',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsIncrementalInstallProxyHostTestCases', '--include-filter', 'GtsIncrementalInstallTestCases', '--include-filter', 'GtsIncrementalInstallTriggerApp', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsIncrementalInstall',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=1440)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsInstallPackagesWhitelistDeviceTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsInstallPackagesWhitelistDeviceTestCases
deleted file mode 100644
index 1b3fbb4..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsInstallPackagesWhitelistDeviceTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsInstallPackagesWhitelistDeviceTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsInstallPackagesWhitelistDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsInstallPackagesWhitelistDeviceTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsInstallPackagesWhitelistDeviceTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsInstallPackagesWhitelistDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsInstallPackagesWhitelistDeviceTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsInstallerV2TestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsInstallerV2TestCases
deleted file mode 100644
index bb90aee..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsInstallerV2TestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsInstallerV2TestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsInstallerV2TestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsInstallerV2TestCases',
-        test_name='cheets_GTS.8.0_r2.GtsInstallerV2TestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsInstallerV2TestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsInstallerV2TestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsInstantAppsHostTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsInstantAppsHostTestCases
deleted file mode 100644
index 78cd213..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsInstantAppsHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsInstantAppsHostTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsInstantAppsHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsInstantAppsHostTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsInstantAppsHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsInstantAppsHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsInstantAppsHostTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsLargeApkHostTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsLargeApkHostTestCases
deleted file mode 100644
index 61ae1fc..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsLargeApkHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsLargeApkHostTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsLargeApkHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsLargeApkHostTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsLargeApkHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsLargeApkHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsLargeApkHostTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsLensTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsLensTestCases
deleted file mode 100644
index c404fbd..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsLensTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsLensTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsLensTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsLensTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsLensTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsLensTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsLensTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsLinkerConfig b/server/site_tests/cheets_GTS/control.8.0_r2.GtsLinkerConfig
deleted file mode 100644
index 08c8457..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsLinkerConfig
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsLinkerConfig'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsLinkerConfigTestCases, GtsLinkerConfigTestCases[secondary_user] of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsLinkerConfig',
-        test_name='cheets_GTS.8.0_r2.GtsLinkerConfig',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsLinkerConfigTestCases', '--include-filter', 'GtsLinkerConfigTestCases[secondary_user]', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsLinkerConfig',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsLocation b/server/site_tests/cheets_GTS/control.8.0_r2.GtsLocation
deleted file mode 100644
index 2d2bb10..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsLocation
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsLocation'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsLocationHostTestCases, GtsLocationTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsLocation',
-        test_name='cheets_GTS.8.0_r2.GtsLocation',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsLocationHostTestCases', '--include-filter', 'GtsLocationTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsLocation',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsMediaTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsMediaTestCases
deleted file mode 100644
index c5de66b..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsMediaTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsMediaTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsMediaTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsMediaTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsMediaTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsMediaTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsMediaTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=14400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsMemory b/server/site_tests/cheets_GTS/control.8.0_r2.GtsMemory
deleted file mode 100644
index fbd8303..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsMemory
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsMemory'
-ATTRIBUTES = 'suite:arc-gts, suite:bvt-perbuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsMemoryHostTestCases, GtsMemoryTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=3,
-        tag='8.0_r2.GtsMemory',
-        test_name='cheets_GTS.8.0_r2.GtsMemory',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsMemoryHostTestCases', '--include-filter', 'GtsMemoryTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsMemory',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsModuleMetadataTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsModuleMetadataTestCases
deleted file mode 100644
index efdf36e..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsModuleMetadataTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsModuleMetadataTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsModuleMetadataTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsModuleMetadataTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsModuleMetadataTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsModuleMetadataTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsModuleMetadataTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsNet b/server/site_tests/cheets_GTS/control.8.0_r2.GtsNet
deleted file mode 100644
index 0ebce70..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsNet
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsNet'
-ATTRIBUTES = 'suite:arc-gts, suite:bvt-perbuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsNetStatsHostTestCases, GtsNetTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=3,
-        tag='8.0_r2.GtsNet',
-        test_name='cheets_GTS.8.0_r2.GtsNet',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsNetStatsHostTestCases', '--include-filter', 'GtsNetTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsNet',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsNetwork b/server/site_tests/cheets_GTS/control.8.0_r2.GtsNetwork
deleted file mode 100644
index 6445ca2..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsNetwork
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsNetwork'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsNetworkStackHostTestCases, GtsNetworkWatchlistTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsNetwork',
-        test_name='cheets_GTS.8.0_r2.GtsNetwork',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsNetworkStackHostTestCases', '--include-filter', 'GtsNetworkWatchlistTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsNetwork',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=4320)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsNmgiarcTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsNmgiarcTestCases
deleted file mode 100644
index 6bedc36..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsNmgiarcTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsNmgiarcTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsNmgiarcTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsNmgiarcTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsNmgiarcTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsNmgiarcTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsNmgiarcTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsNoPermission b/server/site_tests/cheets_GTS/control.8.0_r2.GtsNoPermission
deleted file mode 100644
index ff82744..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsNoPermission
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsNoPermission'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsNoPermissionTestCases, GtsNoPermissionTestCases25 of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsNoPermission',
-        test_name='cheets_GTS.8.0_r2.GtsNoPermission',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsNoPermissionTestCases', '--include-filter', 'GtsNoPermissionTestCases25', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsNoPermission',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsNotificationTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsNotificationTestCases
deleted file mode 100644
index e66c2e6..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsNotificationTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsNotificationTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsNotificationTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsNotificationTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsNotificationTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsNotificationTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsNotificationTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsOemLockServiceTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsOemLockServiceTestCases
deleted file mode 100644
index 42652d1..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsOemLockServiceTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsOemLockServiceTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsOemLockServiceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsOemLockServiceTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsOemLockServiceTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsOemLockServiceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsOemLockServiceTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsOsTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsOsTestCases
deleted file mode 100644
index dddce8e..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsOsTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsOsTestCases'
-ATTRIBUTES = 'suite:arc-gts, suite:bvt-perbuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsOsTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=3,
-        tag='8.0_r2.GtsOsTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsOsTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsOsTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsOsTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsPackage b/server/site_tests/cheets_GTS/control.8.0_r2.GtsPackage
deleted file mode 100644
index decff01..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsPackage
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsPackage'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPackageInstallTestCases, GtsPackageInstallerTapjackingTestCases, GtsPackageManagerHostTestCases, GtsPackageNameCertPairsDeviceTestCases, GtsPackageUninstallTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsPackage',
-        test_name='cheets_GTS.8.0_r2.GtsPackage',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsPackageInstallTestCases', '--include-filter', 'GtsPackageInstallerTapjackingTestCases', '--include-filter', 'GtsPackageManagerHostTestCases', '--include-filter', 'GtsPackageNameCertPairsDeviceTestCases', '--include-filter', 'GtsPackageUninstallTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPackage',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=2160)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsPartnerBookmarksTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsPartnerBookmarksTestCases
deleted file mode 100644
index 4f94659..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsPartnerBookmarksTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsPartnerBookmarksTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPartnerBookmarksTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsPartnerBookmarksTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsPartnerBookmarksTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPartnerBookmarksTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPartnerBookmarksTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsPermission b/server/site_tests/cheets_GTS/control.8.0_r2.GtsPermission
deleted file mode 100644
index 90cfe55..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsPermission
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsPermission'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPermissionControllerHostTestCases, GtsPermissionTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsPermission',
-        test_name='cheets_GTS.8.0_r2.GtsPermission',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsPermissionControllerHostTestCases', '--include-filter', 'GtsPermissionTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPermission',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsPlacementTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsPlacementTestCases
deleted file mode 100644
index ab19aca..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsPlacementTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsPlacementTestCases'
-ATTRIBUTES = 'suite:arc-gts, suite:bvt-perbuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPlacementTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=3,
-        tag='8.0_r2.GtsPlacementTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsPlacementTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPlacementTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPlacementTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsPlay b/server/site_tests/cheets_GTS/control.8.0_r2.GtsPlay
deleted file mode 100644
index c7d039b..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsPlay
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsPlay'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPlayAutoInstallTestCases, GtsPlayFsiHostTestCases, GtsPlayFsiTestCases, GtsPlayStoreHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsPlay',
-        test_name='cheets_GTS.8.0_r2.GtsPlay',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsPlayAutoInstallTestCases', '--include-filter', 'GtsPlayFsiHostTestCases', '--include-filter', 'GtsPlayFsiTestCases', '--include-filter', 'GtsPlayStoreHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPlay',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=1800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsPrintTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsPrintTestCases
deleted file mode 100644
index 1c51026..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsPrintTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsPrintTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPrintTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsPrintTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsPrintTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPrintTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPrintTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsPrivacyTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsPrivacyTestCases
deleted file mode 100644
index ecca9e7..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsPrivacyTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsPrivacyTestCases'
-ATTRIBUTES = 'suite:arc-gts, suite:bvt-perbuild'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPrivacyTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=3,
-        tag='8.0_r2.GtsPrivacyTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsPrivacyTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPrivacyTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPrivacyTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsPropertiesTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsPropertiesTestCases
deleted file mode 100644
index df7c978..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsPropertiesTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsPropertiesTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPropertiesTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsPropertiesTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsPropertiesTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPropertiesTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPropertiesTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsRegulationComplianceTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsRegulationComplianceTestCases
deleted file mode 100644
index 10ad46b..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsRegulationComplianceTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsRegulationComplianceTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsRegulationComplianceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsRegulationComplianceTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsRegulationComplianceTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsRegulationComplianceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsRegulationComplianceTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsRlzTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsRlzTestCases
deleted file mode 100644
index 3318e86..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsRlzTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsRlzTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsRlzTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsRlzTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsRlzTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsRlzTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsRlzTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsSample b/server/site_tests/cheets_GTS/control.8.0_r2.GtsSample
deleted file mode 100644
index 5f3a9a9..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsSample
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsSample'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSampleDeviceTestCases, GtsSampleDynamicConfigTestCases, GtsSampleHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsSample',
-        test_name='cheets_GTS.8.0_r2.GtsSample',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsSampleDeviceTestCases', '--include-filter', 'GtsSampleDynamicConfigTestCases', '--include-filter', 'GtsSampleHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSample',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=1440)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsScreenshotHostTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsScreenshotHostTestCases
deleted file mode 100644
index 3df0343..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsScreenshotHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsScreenshotHostTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsScreenshotHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsScreenshotHostTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsScreenshotHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsScreenshotHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsScreenshotHostTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsSearchHostTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsSearchHostTestCases
deleted file mode 100644
index bf1c868..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsSearchHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsSearchHostTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSearchHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsSearchHostTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsSearchHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSearchHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSearchHostTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsSecurityHostTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsSecurityHostTestCases
deleted file mode 100644
index ed65f25..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsSecurityHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsSecurityHostTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSecurityHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsSecurityHostTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsSecurityHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSecurityHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSecurityHostTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsSensorHostTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsSensorHostTestCases
deleted file mode 100644
index eeeeae1..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsSensorHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsSensorHostTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSensorHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsSensorHostTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsSensorHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSensorHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSensorHostTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsSettings b/server/site_tests/cheets_GTS/control.8.0_r2.GtsSettings
deleted file mode 100644
index 74a95b9..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsSettings
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsSettings'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSettingsHostTestCases, GtsSettingsTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsSettings',
-        test_name='cheets_GTS.8.0_r2.GtsSettings',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsSettingsHostTestCases', '--include-filter', 'GtsSettingsTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSettings',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsSetupWizard b/server/site_tests/cheets_GTS/control.8.0_r2.GtsSetupWizard
deleted file mode 100644
index 6fe728c..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsSetupWizard
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsSetupWizard'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSetupWizardHostTestCases, GtsSetupWizardNoPermissionTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsSetupWizard',
-        test_name='cheets_GTS.8.0_r2.GtsSetupWizard',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsSetupWizardHostTestCases', '--include-filter', 'GtsSetupWizardNoPermissionTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSetupWizard',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsSimAppDialogTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsSimAppDialogTestCases
deleted file mode 100644
index a0ebd16..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsSimAppDialogTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsSimAppDialogTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSimAppDialogTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsSimAppDialogTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsSimAppDialogTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSimAppDialogTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSimAppDialogTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsSmartBatteryDeviceTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsSmartBatteryDeviceTestCases
deleted file mode 100644
index c06238b..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsSmartBatteryDeviceTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsSmartBatteryDeviceTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSmartBatteryDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsSmartBatteryDeviceTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsSmartBatteryDeviceTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSmartBatteryDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSmartBatteryDeviceTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsSmsCallLogTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsSmsCallLogTestCases
deleted file mode 100644
index ba1c5d9..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsSmsCallLogTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsSmsCallLogTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSmsCallLogTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsSmsCallLogTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsSmsCallLogTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSmsCallLogTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSmsCallLogTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsSsaidHostTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsSsaidHostTestCases
deleted file mode 100644
index 429a9a2..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsSsaidHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsSsaidHostTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSsaidHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsSsaidHostTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsSsaidHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSsaidHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSsaidHostTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsStagedInstallHostTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsStagedInstallHostTestCases
deleted file mode 100644
index b56c326..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsStagedInstallHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsStagedInstallHostTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsStagedInstallHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsStagedInstallHostTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsStagedInstallHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsStagedInstallHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsStagedInstallHostTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsStatsdHostTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsStatsdHostTestCases
deleted file mode 100644
index 6bd6583..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsStatsdHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsStatsdHostTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsStatsdHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsStatsdHostTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsStatsdHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsStatsdHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsStatsdHostTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsStorageTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsStorageTestCases
deleted file mode 100644
index 79315f7..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsStorageTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsStorageTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsStorageTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsStorageTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsStorageTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsStorageTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsStorageTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsSupervisionTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsSupervisionTestCases
deleted file mode 100644
index 303708c..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsSupervisionTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsSupervisionTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSupervisionTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsSupervisionTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsSupervisionTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSupervisionTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSupervisionTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsSuspendApps b/server/site_tests/cheets_GTS/control.8.0_r2.GtsSuspendApps
deleted file mode 100644
index 67c1825..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsSuspendApps
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsSuspendApps'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSuspendAppsPermissionTestCases, GtsSuspendAppsTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsSuspendApps',
-        test_name='cheets_GTS.8.0_r2.GtsSuspendApps',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsSuspendAppsPermissionTestCases', '--include-filter', 'GtsSuspendAppsTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSuspendApps',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsTelecomManagerTests b/server/site_tests/cheets_GTS/control.8.0_r2.GtsTelecomManagerTests
deleted file mode 100644
index c9539b7..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsTelecomManagerTests
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsTelecomManagerTests'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsTelecomManagerTests of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsTelecomManagerTests',
-        test_name='cheets_GTS.8.0_r2.GtsTelecomManagerTests',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsTelecomManagerTests', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsTelecomManagerTests',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsTelephony b/server/site_tests/cheets_GTS/control.8.0_r2.GtsTelephony
deleted file mode 100644
index 8df2932..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsTelephony
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsTelephony'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsTelephonyNumberVerificationHostCases, GtsTelephonyTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsTelephony',
-        test_name='cheets_GTS.8.0_r2.GtsTelephony',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsTelephonyNumberVerificationHostCases', '--include-filter', 'GtsTelephonyTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsTelephony',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsTestHarnessModeTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsTestHarnessModeTestCases
deleted file mode 100644
index b8cab17..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsTestHarnessModeTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsTestHarnessModeTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsTestHarnessModeTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsTestHarnessModeTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsTestHarnessModeTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsTestHarnessModeTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsTestHarnessModeTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsTetheringTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsTetheringTestCases
deleted file mode 100644
index df5b7b0..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsTetheringTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsTetheringTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsTetheringTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsTetheringTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsTetheringTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsTetheringTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsTetheringTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsTv b/server/site_tests/cheets_GTS/control.8.0_r2.GtsTv
deleted file mode 100644
index 2fc88b8..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsTv
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsTv'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsTvBugReportTestCases, GtsTvHostTestCases, GtsTvTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsTv',
-        test_name='cheets_GTS.8.0_r2.GtsTv',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsTvBugReportTestCases', '--include-filter', 'GtsTvHostTestCases', '--include-filter', 'GtsTvTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsTv',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=1440)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsUnofficialApisUsageTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsUnofficialApisUsageTestCases
deleted file mode 100644
index 8db5e60..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsUnofficialApisUsageTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsUnofficialApisUsageTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsUnofficialApisUsageTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsUnofficialApisUsageTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsUnofficialApisUsageTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsUnofficialApisUsageTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsUnofficialApisUsageTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsUsageStatsTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsUsageStatsTestCases
deleted file mode 100644
index cb1d5a8..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsUsageStatsTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsUsageStatsTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsUsageStatsTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsUsageStatsTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsUsageStatsTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsUsageStatsTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsUsageStatsTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsUserspaceRebootHostSideTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsUserspaceRebootHostSideTestCases
deleted file mode 100644
index 4dff604..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsUserspaceRebootHostSideTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsUserspaceRebootHostSideTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsUserspaceRebootHostSideTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsUserspaceRebootHostSideTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsUserspaceRebootHostSideTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsUserspaceRebootHostSideTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsUserspaceRebootHostSideTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsViewTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsViewTestCases
deleted file mode 100644
index 7759252..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsViewTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsViewTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsViewTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsViewTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsViewTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsViewTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsViewTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsVndkDependencyTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsVndkDependencyTestCases
deleted file mode 100644
index 6e97d06..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsVndkDependencyTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsVndkDependencyTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsVndkDependencyTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsVndkDependencyTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsVndkDependencyTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsVndkDependencyTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsVndkDependencyTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsWebView b/server/site_tests/cheets_GTS/control.8.0_r2.GtsWebView
deleted file mode 100644
index 71e4550..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsWebView
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsWebView'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsWebViewHostTestCases, GtsWebViewTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsWebView',
-        test_name='cheets_GTS.8.0_r2.GtsWebView',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsWebViewHostTestCases', '--include-filter', 'GtsWebViewTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsWebView',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsWellbeing b/server/site_tests/cheets_GTS/control.8.0_r2.GtsWellbeing
deleted file mode 100644
index 333c48f..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsWellbeing
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsWellbeing'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsWellbeingHostTestCases, GtsWellbeingPermissionPolicyTestCases, GtsWellbeingTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r2.GtsWellbeing',
-        test_name='cheets_GTS.8.0_r2.GtsWellbeing',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsWellbeingHostTestCases', '--include-filter', 'GtsWellbeingPermissionPolicyTestCases', '--include-filter', 'GtsWellbeingTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsWellbeing',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=1440)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.GtsYouTubeTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.GtsYouTubeTestCases
deleted file mode 100644
index f1bcda4..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.GtsYouTubeTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.GtsYouTubeTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsYouTubeTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        needs_push_media=True,
-        tag='8.0_r2.GtsYouTubeTestCases',
-        test_name='cheets_GTS.8.0_r2.GtsYouTubeTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsYouTubeTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsYouTubeTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.all.GtsAccountsHostTestCases_-_GtsEdiHostTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.all.GtsAccountsHostTestCases_-_GtsEdiHostTestCases
deleted file mode 100644
index 5cf1f90..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.all.GtsAccountsHostTestCases_-_GtsEdiHostTestCases
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.all.GtsAccountsHostTestCases_-_GtsEdiHostTestCases'
-ATTRIBUTES = 'suite:arc-gts-qual'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAccountsHostTestCases, GtsAdminTestCases, GtsAfwTestCases, GtsAndroidAutoDeviceTestCases, GtsAppBlacklistDeviceTestCases, GtsAppTestCases, GtsAppVisibilityDeviceTestCases, GtsArtManagerHostTestCases, GtsAssistIntentTestCases, GtsAssistantHostTestCases, GtsAssistantMicHostTestCases, GtsAssistantWorkProfileHostTestCases, GtsAudioTestCases, GtsBackupHostTestCases, GtsBackupTestCases, GtsBootHealthHostTestCases, GtsBootStatsTestCases, GtsCallLogTestCases, GtsCameraTestCases, GtsCastHostTestCases, GtsContactsAppDeviceTestCases, GtsContactsTest, GtsContentHostTestCases, GtsContentTestCases, GtsContextHubPermissionDeviceTestCases, GtsDebugfsMountTestCases, GtsDeviceConfigTestCases, GtsDexModuleRegistrationTestCases, GtsDialerAudioTestCases, GtsDialerDeviceTestCases, GtsDozeDeviceTestCases, GtsDozeHostSideTestCases, GtsDuoReadyTestCases, GtsEdiHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='8.0_r2.all.GtsAccountsHostTestCases_-_GtsEdiHostTestCases',
-        test_name='cheets_GTS.8.0_r2.all.GtsAccountsHostTestCases_-_GtsEdiHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsAccountsHostTestCases', '--include-filter', 'GtsAdminTestCases', '--include-filter', 'GtsAfwTestCases', '--include-filter', 'GtsAndroidAutoDeviceTestCases', '--include-filter', 'GtsAppBlacklistDeviceTestCases', '--include-filter', 'GtsAppTestCases', '--include-filter', 'GtsAppVisibilityDeviceTestCases', '--include-filter', 'GtsArtManagerHostTestCases', '--include-filter', 'GtsAssistIntentTestCases', '--include-filter', 'GtsAssistantHostTestCases', '--include-filter', 'GtsAssistantMicHostTestCases', '--include-filter', 'GtsAssistantWorkProfileHostTestCases', '--include-filter', 'GtsAudioTestCases', '--include-filter', 'GtsBackupHostTestCases', '--include-filter', 'GtsBackupTestCases', '--include-filter', 'GtsBootHealthHostTestCases', '--include-filter', 'GtsBootStatsTestCases', '--include-filter', 'GtsCallLogTestCases', '--include-filter', 'GtsCameraTestCases', '--include-filter', 'GtsCastHostTestCases', '--include-filter', 'GtsContactsAppDeviceTestCases', '--include-filter', 'GtsContactsTest', '--include-filter', 'GtsContentHostTestCases', '--include-filter', 'GtsContentTestCases', '--include-filter', 'GtsContextHubPermissionDeviceTestCases', '--include-filter', 'GtsDebugfsMountTestCases', '--include-filter', 'GtsDeviceConfigTestCases', '--include-filter', 'GtsDexModuleRegistrationTestCases', '--include-filter', 'GtsDialerAudioTestCases', '--include-filter', 'GtsDialerDeviceTestCases', '--include-filter', 'GtsDozeDeviceTestCases', '--include-filter', 'GtsDozeHostSideTestCases', '--include-filter', 'GtsDuoReadyTestCases', '--include-filter', 'GtsEdiHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='all.GtsAccountsHostTestCases_-_GtsEdiHostTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        # This module has a known waived abort bug (b/173331969). As long as the
-        # executed test count matches the known number, assume all tests ran.
-        executable_test_count=[161, 322, 483, 644],
-        timeout=86400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.all.GtsExoPlayerTestCases_-_GtsLocationTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.all.GtsExoPlayerTestCases_-_GtsLocationTestCases
deleted file mode 100644
index 5370e01..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.all.GtsExoPlayerTestCases_-_GtsLocationTestCases
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.all.GtsExoPlayerTestCases_-_GtsLocationTestCases'
-ATTRIBUTES = 'suite:arc-gts-qual'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsExoPlayerTestCases, GtsFeaturesTestCases, GtsGmscoreHostTestCases, GtsGraphicsHostTestCases, GtsHomeHostTestCases, GtsIncidentConfirmationTestCases, GtsIncidentManagerTestCases, GtsIncrementalInstallProxyHostTestCases, GtsIncrementalInstallTestCases, GtsIncrementalInstallTriggerApp, GtsInstallPackagesWhitelistDeviceTestCases, GtsInstallerV2TestCases, GtsInstantAppsHostTestCases, GtsLargeApkHostTestCases, GtsLensTestCases, GtsLinkerConfigTestCases, GtsLinkerConfigTestCases[secondary_user], GtsLocationHostTestCases, GtsLocationTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='8.0_r2.all.GtsExoPlayerTestCases_-_GtsLocationTestCases',
-        test_name='cheets_GTS.8.0_r2.all.GtsExoPlayerTestCases_-_GtsLocationTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsExoPlayerTestCases', '--include-filter', 'GtsFeaturesTestCases', '--include-filter', 'GtsGmscoreHostTestCases', '--include-filter', 'GtsGraphicsHostTestCases', '--include-filter', 'GtsHomeHostTestCases', '--include-filter', 'GtsIncidentConfirmationTestCases', '--include-filter', 'GtsIncidentManagerTestCases', '--include-filter', 'GtsIncrementalInstallProxyHostTestCases', '--include-filter', 'GtsIncrementalInstallTestCases', '--include-filter', 'GtsIncrementalInstallTriggerApp', '--include-filter', 'GtsInstallPackagesWhitelistDeviceTestCases', '--include-filter', 'GtsInstallerV2TestCases', '--include-filter', 'GtsInstantAppsHostTestCases', '--include-filter', 'GtsLargeApkHostTestCases', '--include-filter', 'GtsLensTestCases', '--include-filter', 'GtsLinkerConfigTestCases', '--include-filter', 'GtsLinkerConfigTestCases[secondary_user]', '--include-filter', 'GtsLocationHostTestCases', '--include-filter', 'GtsLocationTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='all.GtsExoPlayerTestCases_-_GtsLocationTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        prerequisites=['bluetooth'],
-        use_jdk9=True,
-        timeout=86400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.all.GtsMediaTestCases_-_GtsMediaTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.all.GtsMediaTestCases_-_GtsMediaTestCases
deleted file mode 100644
index 8be7154..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.all.GtsMediaTestCases_-_GtsMediaTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.all.GtsMediaTestCases_-_GtsMediaTestCases'
-ATTRIBUTES = 'suite:arc-gts-qual'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsMediaTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        tag='8.0_r2.all.GtsMediaTestCases_-_GtsMediaTestCases',
-        test_name='cheets_GTS.8.0_r2.all.GtsMediaTestCases_-_GtsMediaTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsMediaTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='all.GtsMediaTestCases_-_GtsMediaTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=86400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.all.GtsMemoryHostTestCases_-_GtsYouTubeTestCases b/server/site_tests/cheets_GTS/control.8.0_r2.all.GtsMemoryHostTestCases_-_GtsYouTubeTestCases
deleted file mode 100644
index 4ad3a90..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.all.GtsMemoryHostTestCases_-_GtsYouTubeTestCases
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.all.GtsMemoryHostTestCases_-_GtsYouTubeTestCases'
-ATTRIBUTES = 'suite:arc-gts-qual'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsMemoryHostTestCases, GtsMemoryTestCases, GtsModuleMetadataTestCases, GtsNetStatsHostTestCases, GtsNetTestCases, GtsNetworkStackHostTestCases, GtsNetworkWatchlistTestCases, GtsNmgiarcTestCases, GtsNoPermissionTestCases, GtsNoPermissionTestCases25, GtsNotificationTestCases, GtsOemLockServiceTestCases, GtsOsTestCases, GtsPackageInstallTestCases, GtsPackageInstallerTapjackingTestCases, GtsPackageManagerHostTestCases, GtsPackageNameCertPairsDeviceTestCases, GtsPackageUninstallTestCases, GtsPartnerBookmarksTestCases, GtsPermissionControllerHostTestCases, GtsPermissionTestCases, GtsPlacementTestCases, GtsPlayAutoInstallTestCases, GtsPlayFsiHostTestCases, GtsPlayFsiTestCases, GtsPlayStoreHostTestCases, GtsPrintTestCases, GtsPrivacyTestCases, GtsPropertiesTestCases, GtsRegulationComplianceTestCases, GtsRlzTestCases, GtsSampleDeviceTestCases, GtsSampleDynamicConfigTestCases, GtsSampleHostTestCases, GtsScreenshotHostTestCases, GtsSearchHostTestCases, GtsSecurityHostTestCases, GtsSensorHostTestCases, GtsSettingsHostTestCases, GtsSettingsTestCases, GtsSetupWizardHostTestCases, GtsSetupWizardNoPermissionTestCases, GtsSimAppDialogTestCases, GtsSmartBatteryDeviceTestCases, GtsSmsCallLogTestCases, GtsSsaidHostTestCases, GtsStagedInstallHostTestCases, GtsStatsdHostTestCases, GtsStorageTestCases, GtsSupervisionTestCases, GtsSuspendAppsPermissionTestCases, GtsSuspendAppsTestCases, GtsTelecomManagerTests, GtsTelephonyNumberVerificationHostCases, GtsTelephonyTestCases, GtsTestHarnessModeTestCases, GtsTetheringTestCases, GtsTvBugReportTestCases, GtsTvHostTestCases, GtsTvTestCases, GtsUnofficialApisUsageTestCases, GtsUsageStatsTestCases, GtsUserspaceRebootHostSideTestCases, GtsViewTestCases, GtsVndkDependencyTestCases, GtsWebViewHostTestCases, GtsWebViewTestCases, GtsWellbeingHostTestCases, GtsWellbeingPermissionPolicyTestCases, GtsWellbeingTestCases, GtsYouTubeTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=9,
-        needs_push_media=True,
-        tag='8.0_r2.all.GtsMemoryHostTestCases_-_GtsYouTubeTestCases',
-        test_name='cheets_GTS.8.0_r2.all.GtsMemoryHostTestCases_-_GtsYouTubeTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsMemoryHostTestCases', '--include-filter', 'GtsMemoryTestCases', '--include-filter', 'GtsModuleMetadataTestCases', '--include-filter', 'GtsNetStatsHostTestCases', '--include-filter', 'GtsNetTestCases', '--include-filter', 'GtsNetworkStackHostTestCases', '--include-filter', 'GtsNetworkWatchlistTestCases', '--include-filter', 'GtsNmgiarcTestCases', '--include-filter', 'GtsNoPermissionTestCases', '--include-filter', 'GtsNoPermissionTestCases25', '--include-filter', 'GtsNotificationTestCases', '--include-filter', 'GtsOemLockServiceTestCases', '--include-filter', 'GtsOsTestCases', '--include-filter', 'GtsPackageInstallTestCases', '--include-filter', 'GtsPackageInstallerTapjackingTestCases', '--include-filter', 'GtsPackageManagerHostTestCases', '--include-filter', 'GtsPackageNameCertPairsDeviceTestCases', '--include-filter', 'GtsPackageUninstallTestCases', '--include-filter', 'GtsPartnerBookmarksTestCases', '--include-filter', 'GtsPermissionControllerHostTestCases', '--include-filter', 'GtsPermissionTestCases', '--include-filter', 'GtsPlacementTestCases', '--include-filter', 'GtsPlayAutoInstallTestCases', '--include-filter', 'GtsPlayFsiHostTestCases', '--include-filter', 'GtsPlayFsiTestCases', '--include-filter', 'GtsPlayStoreHostTestCases', '--include-filter', 'GtsPrintTestCases', '--include-filter', 'GtsPrivacyTestCases', '--include-filter', 'GtsPropertiesTestCases', '--include-filter', 'GtsRegulationComplianceTestCases', '--include-filter', 'GtsRlzTestCases', '--include-filter', 'GtsSampleDeviceTestCases', '--include-filter', 'GtsSampleDynamicConfigTestCases', '--include-filter', 'GtsSampleHostTestCases', '--include-filter', 'GtsScreenshotHostTestCases', '--include-filter', 'GtsSearchHostTestCases', '--include-filter', 'GtsSecurityHostTestCases', '--include-filter', 'GtsSensorHostTestCases', '--include-filter', 'GtsSettingsHostTestCases', '--include-filter', 'GtsSettingsTestCases', '--include-filter', 'GtsSetupWizardHostTestCases', '--include-filter', 'GtsSetupWizardNoPermissionTestCases', '--include-filter', 'GtsSimAppDialogTestCases', '--include-filter', 'GtsSmartBatteryDeviceTestCases', '--include-filter', 'GtsSmsCallLogTestCases', '--include-filter', 'GtsSsaidHostTestCases', '--include-filter', 'GtsStagedInstallHostTestCases', '--include-filter', 'GtsStatsdHostTestCases', '--include-filter', 'GtsStorageTestCases', '--include-filter', 'GtsSupervisionTestCases', '--include-filter', 'GtsSuspendAppsPermissionTestCases', '--include-filter', 'GtsSuspendAppsTestCases', '--include-filter', 'GtsTelecomManagerTests', '--include-filter', 'GtsTelephonyNumberVerificationHostCases', '--include-filter', 'GtsTelephonyTestCases', '--include-filter', 'GtsTestHarnessModeTestCases', '--include-filter', 'GtsTetheringTestCases', '--include-filter', 'GtsTvBugReportTestCases', '--include-filter', 'GtsTvHostTestCases', '--include-filter', 'GtsTvTestCases', '--include-filter', 'GtsUnofficialApisUsageTestCases', '--include-filter', 'GtsUsageStatsTestCases', '--include-filter', 'GtsUserspaceRebootHostSideTestCases', '--include-filter', 'GtsViewTestCases', '--include-filter', 'GtsVndkDependencyTestCases', '--include-filter', 'GtsWebViewHostTestCases', '--include-filter', 'GtsWebViewTestCases', '--include-filter', 'GtsWellbeingHostTestCases', '--include-filter', 'GtsWellbeingPermissionPolicyTestCases', '--include-filter', 'GtsWellbeingTestCases', '--include-filter', 'GtsYouTubeTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='all.GtsMemoryHostTestCases_-_GtsYouTubeTestCases',
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=86400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.8.0_r2.tradefed-run-collect-tests-only-internal b/server/site_tests/cheets_GTS/control.8.0_r2.tradefed-run-collect-tests-only-internal
deleted file mode 100644
index f5b0129..0000000
--- a/server/site_tests/cheets_GTS/control.8.0_r2.tradefed-run-collect-tests-only-internal
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.8.0_r2.tradefed-run-collect-tests-only-internal'
-ATTRIBUTES = 'suite:arc-gts, suite:arc-gts-qual'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'LENGTHY'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run all of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=0,
-        tag='8.0_r2.tradefed-run-collect-tests-only-internal',
-        test_name='cheets_GTS.8.0_r2.tradefed-run-collect-tests-only-internal',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'collect-tests-only', '--disable-reboot', '--module-arg', 'GtsYouTubeTestCases:skip-media-download:true'],
-        retry_template=None,
-        target_module=None,
-        target_plan=None,
-        uri='gs://chromeos-arc-images/cts/bundle/android-gts-8-R2-P-6955212.zip',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsAccountsHostTestCases b/server/site_tests/cheets_GTS/control.GtsAccountsHostTestCases
deleted file mode 100644
index 5ccbabb..0000000
--- a/server/site_tests/cheets_GTS/control.GtsAccountsHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsAccountsHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAccountsHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsAccountsHostTestCases',
-        test_name='cheets_GTS.GtsAccountsHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAccountsHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAccountsHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsAdminTestCases b/server/site_tests/cheets_GTS/control.GtsAdminTestCases
deleted file mode 100644
index 94fc116..0000000
--- a/server/site_tests/cheets_GTS/control.GtsAdminTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsAdminTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAdminTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsAdminTestCases',
-        test_name='cheets_GTS.GtsAdminTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAdminTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAdminTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsAfwTestCases b/server/site_tests/cheets_GTS/control.GtsAfwTestCases
deleted file mode 100644
index 5334947..0000000
--- a/server/site_tests/cheets_GTS/control.GtsAfwTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsAfwTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAfwTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsAfwTestCases',
-        test_name='cheets_GTS.GtsAfwTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAfwTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAfwTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsAndroidAutoDeviceTestCases b/server/site_tests/cheets_GTS/control.GtsAndroidAutoDeviceTestCases
deleted file mode 100644
index 2c23763..0000000
--- a/server/site_tests/cheets_GTS/control.GtsAndroidAutoDeviceTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsAndroidAutoDeviceTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAndroidAutoDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsAndroidAutoDeviceTestCases',
-        test_name='cheets_GTS.GtsAndroidAutoDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAndroidAutoDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAndroidAutoDeviceTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsAppBlacklistDeviceTestCases b/server/site_tests/cheets_GTS/control.GtsAppBlacklistDeviceTestCases
deleted file mode 100644
index 788a4b7..0000000
--- a/server/site_tests/cheets_GTS/control.GtsAppBlacklistDeviceTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsAppBlacklistDeviceTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAppBlacklistDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsAppBlacklistDeviceTestCases',
-        test_name='cheets_GTS.GtsAppBlacklistDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAppBlacklistDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAppBlacklistDeviceTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsAppTestCases b/server/site_tests/cheets_GTS/control.GtsAppTestCases
deleted file mode 100644
index dab852a..0000000
--- a/server/site_tests/cheets_GTS/control.GtsAppTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsAppTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAppTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsAppTestCases',
-        test_name='cheets_GTS.GtsAppTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAppTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAppTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsAppVisibilityDeviceTestCases b/server/site_tests/cheets_GTS/control.GtsAppVisibilityDeviceTestCases
deleted file mode 100644
index dd9c886..0000000
--- a/server/site_tests/cheets_GTS/control.GtsAppVisibilityDeviceTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsAppVisibilityDeviceTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAppVisibilityDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsAppVisibilityDeviceTestCases',
-        test_name='cheets_GTS.GtsAppVisibilityDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAppVisibilityDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAppVisibilityDeviceTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsArtManagerHostTestCases b/server/site_tests/cheets_GTS/control.GtsArtManagerHostTestCases
deleted file mode 100644
index 94fdb5e..0000000
--- a/server/site_tests/cheets_GTS/control.GtsArtManagerHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsArtManagerHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsArtManagerHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsArtManagerHostTestCases',
-        test_name='cheets_GTS.GtsArtManagerHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsArtManagerHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsArtManagerHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsAssistIntentTestCases b/server/site_tests/cheets_GTS/control.GtsAssistIntentTestCases
deleted file mode 100644
index f2996c7..0000000
--- a/server/site_tests/cheets_GTS/control.GtsAssistIntentTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsAssistIntentTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAssistIntentTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsAssistIntentTestCases',
-        test_name='cheets_GTS.GtsAssistIntentTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAssistIntentTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAssistIntentTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsAssistantHostTestCases b/server/site_tests/cheets_GTS/control.GtsAssistantHostTestCases
deleted file mode 100644
index 04d9e0f..0000000
--- a/server/site_tests/cheets_GTS/control.GtsAssistantHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsAssistantHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAssistantHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsAssistantHostTestCases',
-        test_name='cheets_GTS.GtsAssistantHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAssistantHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAssistantHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsAssistantMicHostTestCases b/server/site_tests/cheets_GTS/control.GtsAssistantMicHostTestCases
deleted file mode 100644
index e684d2d..0000000
--- a/server/site_tests/cheets_GTS/control.GtsAssistantMicHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsAssistantMicHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAssistantMicHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsAssistantMicHostTestCases',
-        test_name='cheets_GTS.GtsAssistantMicHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAssistantMicHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAssistantMicHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=1800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsAssistantWorkProfileHostTestCases b/server/site_tests/cheets_GTS/control.GtsAssistantWorkProfileHostTestCases
deleted file mode 100644
index b90da3e..0000000
--- a/server/site_tests/cheets_GTS/control.GtsAssistantWorkProfileHostTestCases
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsAssistantWorkProfileHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAssistantWorkProfileHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsAssistantWorkProfileHostTestCases',
-        test_name='cheets_GTS.GtsAssistantWorkProfileHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAssistantWorkProfileHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAssistantWorkProfileHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        # This module has a known waived abort bug (b/173331969). As long as the
-        # executed test count matches the known number, assume all tests ran.
-        executable_test_count=[0],
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsAudioTestCases b/server/site_tests/cheets_GTS/control.GtsAudioTestCases
deleted file mode 100644
index 5c973b3..0000000
--- a/server/site_tests/cheets_GTS/control.GtsAudioTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsAudioTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAudioTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsAudioTestCases',
-        test_name='cheets_GTS.GtsAudioTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAudioTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAudioTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsBackupHostTestCases b/server/site_tests/cheets_GTS/control.GtsBackupHostTestCases
deleted file mode 100644
index ab18fb5..0000000
--- a/server/site_tests/cheets_GTS/control.GtsBackupHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsBackupHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsBackupHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsBackupHostTestCases',
-        test_name='cheets_GTS.GtsBackupHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsBackupHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsBackupHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsBackupTestCases b/server/site_tests/cheets_GTS/control.GtsBackupTestCases
deleted file mode 100644
index ddbda21..0000000
--- a/server/site_tests/cheets_GTS/control.GtsBackupTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsBackupTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsBackupTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsBackupTestCases',
-        test_name='cheets_GTS.GtsBackupTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsBackupTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsBackupTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsBootHealthHostTestCases b/server/site_tests/cheets_GTS/control.GtsBootHealthHostTestCases
deleted file mode 100644
index 5d7c657..0000000
--- a/server/site_tests/cheets_GTS/control.GtsBootHealthHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsBootHealthHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsBootHealthHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsBootHealthHostTestCases',
-        test_name='cheets_GTS.GtsBootHealthHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsBootHealthHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsBootHealthHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsBootStatsTestCases b/server/site_tests/cheets_GTS/control.GtsBootStatsTestCases
deleted file mode 100644
index 3b490d2..0000000
--- a/server/site_tests/cheets_GTS/control.GtsBootStatsTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsBootStatsTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsBootStatsTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsBootStatsTestCases',
-        test_name='cheets_GTS.GtsBootStatsTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsBootStatsTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsBootStatsTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsCallLogTestCases b/server/site_tests/cheets_GTS/control.GtsCallLogTestCases
deleted file mode 100644
index 50aef7b..0000000
--- a/server/site_tests/cheets_GTS/control.GtsCallLogTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsCallLogTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsCallLogTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsCallLogTestCases',
-        test_name='cheets_GTS.GtsCallLogTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsCallLogTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsCallLogTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsCameraTestCases b/server/site_tests/cheets_GTS/control.GtsCameraTestCases
deleted file mode 100644
index e02ad83..0000000
--- a/server/site_tests/cheets_GTS/control.GtsCameraTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsCameraTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsCameraTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsCameraTestCases',
-        test_name='cheets_GTS.GtsCameraTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsCameraTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsCameraTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsCastHostTestCases b/server/site_tests/cheets_GTS/control.GtsCastHostTestCases
deleted file mode 100644
index a4aa291..0000000
--- a/server/site_tests/cheets_GTS/control.GtsCastHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsCastHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsCastHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsCastHostTestCases',
-        test_name='cheets_GTS.GtsCastHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsCastHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsCastHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsContactsAppDeviceTestCases b/server/site_tests/cheets_GTS/control.GtsContactsAppDeviceTestCases
deleted file mode 100644
index 5f17a8a..0000000
--- a/server/site_tests/cheets_GTS/control.GtsContactsAppDeviceTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsContactsAppDeviceTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsContactsAppDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsContactsAppDeviceTestCases',
-        test_name='cheets_GTS.GtsContactsAppDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsContactsAppDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsContactsAppDeviceTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsContactsTest b/server/site_tests/cheets_GTS/control.GtsContactsTest
deleted file mode 100644
index e25dea3..0000000
--- a/server/site_tests/cheets_GTS/control.GtsContactsTest
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsContactsTest'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsContactsTest of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsContactsTest',
-        test_name='cheets_GTS.GtsContactsTest',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsContactsTest', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsContactsTest',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsContentHostTestCases b/server/site_tests/cheets_GTS/control.GtsContentHostTestCases
deleted file mode 100644
index e20e8d9..0000000
--- a/server/site_tests/cheets_GTS/control.GtsContentHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsContentHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsContentHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsContentHostTestCases',
-        test_name='cheets_GTS.GtsContentHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsContentHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsContentHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsContentTestCases b/server/site_tests/cheets_GTS/control.GtsContentTestCases
deleted file mode 100644
index 5e2a1ea..0000000
--- a/server/site_tests/cheets_GTS/control.GtsContentTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsContentTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsContentTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsContentTestCases',
-        test_name='cheets_GTS.GtsContentTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsContentTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsContentTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsContextHubPermissionDeviceTestCases b/server/site_tests/cheets_GTS/control.GtsContextHubPermissionDeviceTestCases
deleted file mode 100644
index 7793f3b..0000000
--- a/server/site_tests/cheets_GTS/control.GtsContextHubPermissionDeviceTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsContextHubPermissionDeviceTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsContextHubPermissionDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsContextHubPermissionDeviceTestCases',
-        test_name='cheets_GTS.GtsContextHubPermissionDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsContextHubPermissionDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsContextHubPermissionDeviceTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsDebugfsMountTestCases b/server/site_tests/cheets_GTS/control.GtsDebugfsMountTestCases
deleted file mode 100644
index cc7710a..0000000
--- a/server/site_tests/cheets_GTS/control.GtsDebugfsMountTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsDebugfsMountTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsDebugfsMountTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsDebugfsMountTestCases',
-        test_name='cheets_GTS.GtsDebugfsMountTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsDebugfsMountTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsDebugfsMountTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsDeviceConfigTestCases b/server/site_tests/cheets_GTS/control.GtsDeviceConfigTestCases
deleted file mode 100644
index 1247ce7..0000000
--- a/server/site_tests/cheets_GTS/control.GtsDeviceConfigTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsDeviceConfigTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsDeviceConfigTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsDeviceConfigTestCases',
-        test_name='cheets_GTS.GtsDeviceConfigTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsDeviceConfigTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsDeviceConfigTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsDexModuleRegistrationTestCases b/server/site_tests/cheets_GTS/control.GtsDexModuleRegistrationTestCases
deleted file mode 100644
index db57da4..0000000
--- a/server/site_tests/cheets_GTS/control.GtsDexModuleRegistrationTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsDexModuleRegistrationTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsDexModuleRegistrationTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsDexModuleRegistrationTestCases',
-        test_name='cheets_GTS.GtsDexModuleRegistrationTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsDexModuleRegistrationTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsDexModuleRegistrationTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsDialerAudioTestCases b/server/site_tests/cheets_GTS/control.GtsDialerAudioTestCases
deleted file mode 100644
index 379a86b..0000000
--- a/server/site_tests/cheets_GTS/control.GtsDialerAudioTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsDialerAudioTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsDialerAudioTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsDialerAudioTestCases',
-        test_name='cheets_GTS.GtsDialerAudioTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsDialerAudioTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsDialerAudioTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsDialerDeviceTestCases b/server/site_tests/cheets_GTS/control.GtsDialerDeviceTestCases
deleted file mode 100644
index b0a9479..0000000
--- a/server/site_tests/cheets_GTS/control.GtsDialerDeviceTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsDialerDeviceTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsDialerDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsDialerDeviceTestCases',
-        test_name='cheets_GTS.GtsDialerDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsDialerDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsDialerDeviceTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsDozeDeviceTestCases b/server/site_tests/cheets_GTS/control.GtsDozeDeviceTestCases
deleted file mode 100644
index 7ab4be7..0000000
--- a/server/site_tests/cheets_GTS/control.GtsDozeDeviceTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsDozeDeviceTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsDozeDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsDozeDeviceTestCases',
-        test_name='cheets_GTS.GtsDozeDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsDozeDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsDozeDeviceTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsDozeHostSideTestCases b/server/site_tests/cheets_GTS/control.GtsDozeHostSideTestCases
deleted file mode 100644
index e43f8b9..0000000
--- a/server/site_tests/cheets_GTS/control.GtsDozeHostSideTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsDozeHostSideTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsDozeHostSideTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsDozeHostSideTestCases',
-        test_name='cheets_GTS.GtsDozeHostSideTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsDozeHostSideTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsDozeHostSideTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsDuoReadyTestCases b/server/site_tests/cheets_GTS/control.GtsDuoReadyTestCases
deleted file mode 100644
index e765df1..0000000
--- a/server/site_tests/cheets_GTS/control.GtsDuoReadyTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsDuoReadyTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsDuoReadyTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsDuoReadyTestCases',
-        test_name='cheets_GTS.GtsDuoReadyTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsDuoReadyTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsDuoReadyTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsEdiHostTestCases b/server/site_tests/cheets_GTS/control.GtsEdiHostTestCases
deleted file mode 100644
index 4dcca56..0000000
--- a/server/site_tests/cheets_GTS/control.GtsEdiHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsEdiHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsEdiHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsEdiHostTestCases',
-        test_name='cheets_GTS.GtsEdiHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsEdiHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsEdiHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsExoPlayerTestCases b/server/site_tests/cheets_GTS/control.GtsExoPlayerTestCases
deleted file mode 100644
index d2db0cc..0000000
--- a/server/site_tests/cheets_GTS/control.GtsExoPlayerTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsExoPlayerTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsExoPlayerTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=5,
-        tag='GtsExoPlayerTestCases',
-        test_name='cheets_GTS.GtsExoPlayerTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsExoPlayerTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsExoPlayerTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsFeaturesTestCases b/server/site_tests/cheets_GTS/control.GtsFeaturesTestCases
deleted file mode 100644
index 55e8c9a..0000000
--- a/server/site_tests/cheets_GTS/control.GtsFeaturesTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsFeaturesTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsFeaturesTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsFeaturesTestCases',
-        test_name='cheets_GTS.GtsFeaturesTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsFeaturesTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsFeaturesTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsGmscoreHostTestCases b/server/site_tests/cheets_GTS/control.GtsGmscoreHostTestCases
deleted file mode 100644
index 8626c73..0000000
--- a/server/site_tests/cheets_GTS/control.GtsGmscoreHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsGmscoreHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsGmscoreHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsGmscoreHostTestCases',
-        test_name='cheets_GTS.GtsGmscoreHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsGmscoreHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsGmscoreHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsGraphicsHostTestCases b/server/site_tests/cheets_GTS/control.GtsGraphicsHostTestCases
deleted file mode 100644
index 69a03f9..0000000
--- a/server/site_tests/cheets_GTS/control.GtsGraphicsHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsGraphicsHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsGraphicsHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsGraphicsHostTestCases',
-        test_name='cheets_GTS.GtsGraphicsHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsGraphicsHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsGraphicsHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsHomeHostTestCases b/server/site_tests/cheets_GTS/control.GtsHomeHostTestCases
deleted file mode 100644
index e9e6c0b..0000000
--- a/server/site_tests/cheets_GTS/control.GtsHomeHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsHomeHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsHomeHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsHomeHostTestCases',
-        test_name='cheets_GTS.GtsHomeHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsHomeHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsHomeHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsIncidentConfirmationTestCases b/server/site_tests/cheets_GTS/control.GtsIncidentConfirmationTestCases
deleted file mode 100644
index 99ad250..0000000
--- a/server/site_tests/cheets_GTS/control.GtsIncidentConfirmationTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsIncidentConfirmationTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsIncidentConfirmationTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsIncidentConfirmationTestCases',
-        test_name='cheets_GTS.GtsIncidentConfirmationTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsIncidentConfirmationTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsIncidentConfirmationTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsIncidentManagerTestCases b/server/site_tests/cheets_GTS/control.GtsIncidentManagerTestCases
deleted file mode 100644
index b630b98..0000000
--- a/server/site_tests/cheets_GTS/control.GtsIncidentManagerTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsIncidentManagerTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsIncidentManagerTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsIncidentManagerTestCases',
-        test_name='cheets_GTS.GtsIncidentManagerTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsIncidentManagerTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsIncidentManagerTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsIncrementalInstallProxyHostTestCases b/server/site_tests/cheets_GTS/control.GtsIncrementalInstallProxyHostTestCases
deleted file mode 100644
index a9c2ce4..0000000
--- a/server/site_tests/cheets_GTS/control.GtsIncrementalInstallProxyHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsIncrementalInstallProxyHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsIncrementalInstallProxyHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsIncrementalInstallProxyHostTestCases',
-        test_name='cheets_GTS.GtsIncrementalInstallProxyHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsIncrementalInstallProxyHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsIncrementalInstallProxyHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsIncrementalInstallTestCases b/server/site_tests/cheets_GTS/control.GtsIncrementalInstallTestCases
deleted file mode 100644
index 7ae546b..0000000
--- a/server/site_tests/cheets_GTS/control.GtsIncrementalInstallTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsIncrementalInstallTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsIncrementalInstallTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsIncrementalInstallTestCases',
-        test_name='cheets_GTS.GtsIncrementalInstallTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsIncrementalInstallTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsIncrementalInstallTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsIncrementalInstallTriggerApp b/server/site_tests/cheets_GTS/control.GtsIncrementalInstallTriggerApp
deleted file mode 100644
index 3b43def..0000000
--- a/server/site_tests/cheets_GTS/control.GtsIncrementalInstallTriggerApp
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsIncrementalInstallTriggerApp'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsIncrementalInstallTriggerApp of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsIncrementalInstallTriggerApp',
-        test_name='cheets_GTS.GtsIncrementalInstallTriggerApp',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsIncrementalInstallTriggerApp', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsIncrementalInstallTriggerApp',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsInstallPackagesWhitelistDeviceTestCases b/server/site_tests/cheets_GTS/control.GtsInstallPackagesWhitelistDeviceTestCases
deleted file mode 100644
index c707d1f..0000000
--- a/server/site_tests/cheets_GTS/control.GtsInstallPackagesWhitelistDeviceTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsInstallPackagesWhitelistDeviceTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsInstallPackagesWhitelistDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsInstallPackagesWhitelistDeviceTestCases',
-        test_name='cheets_GTS.GtsInstallPackagesWhitelistDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsInstallPackagesWhitelistDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsInstallPackagesWhitelistDeviceTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsInstallerV2TestCases b/server/site_tests/cheets_GTS/control.GtsInstallerV2TestCases
deleted file mode 100644
index a509225..0000000
--- a/server/site_tests/cheets_GTS/control.GtsInstallerV2TestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsInstallerV2TestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsInstallerV2TestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsInstallerV2TestCases',
-        test_name='cheets_GTS.GtsInstallerV2TestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsInstallerV2TestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsInstallerV2TestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsInstantAppsHostTestCases b/server/site_tests/cheets_GTS/control.GtsInstantAppsHostTestCases
deleted file mode 100644
index 5f6c7d5..0000000
--- a/server/site_tests/cheets_GTS/control.GtsInstantAppsHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsInstantAppsHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsInstantAppsHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsInstantAppsHostTestCases',
-        test_name='cheets_GTS.GtsInstantAppsHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsInstantAppsHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsInstantAppsHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsLargeApkHostTestCases b/server/site_tests/cheets_GTS/control.GtsLargeApkHostTestCases
deleted file mode 100644
index c3a3172..0000000
--- a/server/site_tests/cheets_GTS/control.GtsLargeApkHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsLargeApkHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsLargeApkHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsLargeApkHostTestCases',
-        test_name='cheets_GTS.GtsLargeApkHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsLargeApkHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsLargeApkHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsLensTestCases b/server/site_tests/cheets_GTS/control.GtsLensTestCases
deleted file mode 100644
index 4a4be49..0000000
--- a/server/site_tests/cheets_GTS/control.GtsLensTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsLensTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsLensTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsLensTestCases',
-        test_name='cheets_GTS.GtsLensTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsLensTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsLensTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsLinkerConfigTestCases b/server/site_tests/cheets_GTS/control.GtsLinkerConfigTestCases
deleted file mode 100644
index 1eadd96..0000000
--- a/server/site_tests/cheets_GTS/control.GtsLinkerConfigTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsLinkerConfigTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsLinkerConfigTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsLinkerConfigTestCases',
-        test_name='cheets_GTS.GtsLinkerConfigTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsLinkerConfigTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsLinkerConfigTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsLocationHostTestCases b/server/site_tests/cheets_GTS/control.GtsLocationHostTestCases
deleted file mode 100644
index c8d5757..0000000
--- a/server/site_tests/cheets_GTS/control.GtsLocationHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsLocationHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsLocationHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsLocationHostTestCases',
-        test_name='cheets_GTS.GtsLocationHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsLocationHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsLocationHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsLocationTestCases b/server/site_tests/cheets_GTS/control.GtsLocationTestCases
deleted file mode 100644
index 2b922a1..0000000
--- a/server/site_tests/cheets_GTS/control.GtsLocationTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsLocationTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsLocationTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsLocationTestCases',
-        test_name='cheets_GTS.GtsLocationTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsLocationTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsLocationTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsMediaTestCases b/server/site_tests/cheets_GTS/control.GtsMediaTestCases
deleted file mode 100644
index 6ab6379..0000000
--- a/server/site_tests/cheets_GTS/control.GtsMediaTestCases
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsMediaTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 307200
-PRIORITY = 50
-DOC = 'Run module GtsMediaTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=5,
-        tag='GtsMediaTestCases',
-        test_name='cheets_GTS.GtsMediaTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsMediaTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsMediaTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=14400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsMemoryHostTestCases b/server/site_tests/cheets_GTS/control.GtsMemoryHostTestCases
deleted file mode 100644
index 35dee583..0000000
--- a/server/site_tests/cheets_GTS/control.GtsMemoryHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsMemoryHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsMemoryHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsMemoryHostTestCases',
-        test_name='cheets_GTS.GtsMemoryHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsMemoryHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsMemoryHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsMemoryTestCases b/server/site_tests/cheets_GTS/control.GtsMemoryTestCases
deleted file mode 100644
index fc4c437..0000000
--- a/server/site_tests/cheets_GTS/control.GtsMemoryTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsMemoryTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsMemoryTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsMemoryTestCases',
-        test_name='cheets_GTS.GtsMemoryTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsMemoryTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsMemoryTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsModuleMetadataTestCases b/server/site_tests/cheets_GTS/control.GtsModuleMetadataTestCases
deleted file mode 100644
index 3493dab..0000000
--- a/server/site_tests/cheets_GTS/control.GtsModuleMetadataTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsModuleMetadataTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsModuleMetadataTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsModuleMetadataTestCases',
-        test_name='cheets_GTS.GtsModuleMetadataTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsModuleMetadataTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsModuleMetadataTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsNetStatsHostTestCases b/server/site_tests/cheets_GTS/control.GtsNetStatsHostTestCases
deleted file mode 100644
index 4da51f0..0000000
--- a/server/site_tests/cheets_GTS/control.GtsNetStatsHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsNetStatsHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsNetStatsHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsNetStatsHostTestCases',
-        test_name='cheets_GTS.GtsNetStatsHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsNetStatsHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsNetStatsHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsNetTestCases b/server/site_tests/cheets_GTS/control.GtsNetTestCases
deleted file mode 100644
index 3b2bdd5..0000000
--- a/server/site_tests/cheets_GTS/control.GtsNetTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsNetTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsNetTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsNetTestCases',
-        test_name='cheets_GTS.GtsNetTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsNetTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsNetTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsNetworkStackHostTestCases b/server/site_tests/cheets_GTS/control.GtsNetworkStackHostTestCases
deleted file mode 100644
index ca74a4a..0000000
--- a/server/site_tests/cheets_GTS/control.GtsNetworkStackHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsNetworkStackHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsNetworkStackHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsNetworkStackHostTestCases',
-        test_name='cheets_GTS.GtsNetworkStackHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsNetworkStackHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsNetworkStackHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsNetworkWatchlistTestCases b/server/site_tests/cheets_GTS/control.GtsNetworkWatchlistTestCases
deleted file mode 100644
index ab25633..0000000
--- a/server/site_tests/cheets_GTS/control.GtsNetworkWatchlistTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsNetworkWatchlistTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsNetworkWatchlistTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsNetworkWatchlistTestCases',
-        test_name='cheets_GTS.GtsNetworkWatchlistTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsNetworkWatchlistTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsNetworkWatchlistTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsNmgiarcTestCases b/server/site_tests/cheets_GTS/control.GtsNmgiarcTestCases
deleted file mode 100644
index b61c34c..0000000
--- a/server/site_tests/cheets_GTS/control.GtsNmgiarcTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsNmgiarcTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsNmgiarcTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsNmgiarcTestCases',
-        test_name='cheets_GTS.GtsNmgiarcTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsNmgiarcTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsNmgiarcTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsNoPermissionTestCases b/server/site_tests/cheets_GTS/control.GtsNoPermissionTestCases
deleted file mode 100644
index ce5060d..0000000
--- a/server/site_tests/cheets_GTS/control.GtsNoPermissionTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsNoPermissionTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsNoPermissionTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsNoPermissionTestCases',
-        test_name='cheets_GTS.GtsNoPermissionTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsNoPermissionTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsNoPermissionTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsNoPermissionTestCases25 b/server/site_tests/cheets_GTS/control.GtsNoPermissionTestCases25
deleted file mode 100644
index 7b6ec57..0000000
--- a/server/site_tests/cheets_GTS/control.GtsNoPermissionTestCases25
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsNoPermissionTestCases25'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsNoPermissionTestCases25 of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsNoPermissionTestCases25',
-        test_name='cheets_GTS.GtsNoPermissionTestCases25',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsNoPermissionTestCases25', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsNoPermissionTestCases25',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsNotificationTestCases b/server/site_tests/cheets_GTS/control.GtsNotificationTestCases
deleted file mode 100644
index 1b663b0..0000000
--- a/server/site_tests/cheets_GTS/control.GtsNotificationTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsNotificationTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsNotificationTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsNotificationTestCases',
-        test_name='cheets_GTS.GtsNotificationTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsNotificationTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsNotificationTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsOemLockServiceTestCases b/server/site_tests/cheets_GTS/control.GtsOemLockServiceTestCases
deleted file mode 100644
index 0e115a8..0000000
--- a/server/site_tests/cheets_GTS/control.GtsOemLockServiceTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsOemLockServiceTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsOemLockServiceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsOemLockServiceTestCases',
-        test_name='cheets_GTS.GtsOemLockServiceTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsOemLockServiceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsOemLockServiceTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsOsTestCases b/server/site_tests/cheets_GTS/control.GtsOsTestCases
deleted file mode 100644
index 8147c4a..0000000
--- a/server/site_tests/cheets_GTS/control.GtsOsTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsOsTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsOsTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsOsTestCases',
-        test_name='cheets_GTS.GtsOsTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsOsTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsOsTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsPackageInstallTestCases b/server/site_tests/cheets_GTS/control.GtsPackageInstallTestCases
deleted file mode 100644
index b0eb161..0000000
--- a/server/site_tests/cheets_GTS/control.GtsPackageInstallTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsPackageInstallTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPackageInstallTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPackageInstallTestCases',
-        test_name='cheets_GTS.GtsPackageInstallTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPackageInstallTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPackageInstallTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsPackageInstallerTapjackingTestCases b/server/site_tests/cheets_GTS/control.GtsPackageInstallerTapjackingTestCases
deleted file mode 100644
index c127601..0000000
--- a/server/site_tests/cheets_GTS/control.GtsPackageInstallerTapjackingTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsPackageInstallerTapjackingTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPackageInstallerTapjackingTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPackageInstallerTapjackingTestCases',
-        test_name='cheets_GTS.GtsPackageInstallerTapjackingTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPackageInstallerTapjackingTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPackageInstallerTapjackingTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsPackageManagerHostTestCases b/server/site_tests/cheets_GTS/control.GtsPackageManagerHostTestCases
deleted file mode 100644
index fcc34d5..0000000
--- a/server/site_tests/cheets_GTS/control.GtsPackageManagerHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsPackageManagerHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPackageManagerHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPackageManagerHostTestCases',
-        test_name='cheets_GTS.GtsPackageManagerHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPackageManagerHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPackageManagerHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsPackageNameCertPairsDeviceTestCases b/server/site_tests/cheets_GTS/control.GtsPackageNameCertPairsDeviceTestCases
deleted file mode 100644
index 0be8d14..0000000
--- a/server/site_tests/cheets_GTS/control.GtsPackageNameCertPairsDeviceTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsPackageNameCertPairsDeviceTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPackageNameCertPairsDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPackageNameCertPairsDeviceTestCases',
-        test_name='cheets_GTS.GtsPackageNameCertPairsDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPackageNameCertPairsDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPackageNameCertPairsDeviceTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsPackageUninstallTestCases b/server/site_tests/cheets_GTS/control.GtsPackageUninstallTestCases
deleted file mode 100644
index e7b43bc..0000000
--- a/server/site_tests/cheets_GTS/control.GtsPackageUninstallTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsPackageUninstallTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPackageUninstallTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPackageUninstallTestCases',
-        test_name='cheets_GTS.GtsPackageUninstallTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPackageUninstallTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPackageUninstallTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsPartnerBookmarksTestCases b/server/site_tests/cheets_GTS/control.GtsPartnerBookmarksTestCases
deleted file mode 100644
index e43c32a..0000000
--- a/server/site_tests/cheets_GTS/control.GtsPartnerBookmarksTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsPartnerBookmarksTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPartnerBookmarksTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPartnerBookmarksTestCases',
-        test_name='cheets_GTS.GtsPartnerBookmarksTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPartnerBookmarksTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPartnerBookmarksTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsPermissionControllerHostTestCases b/server/site_tests/cheets_GTS/control.GtsPermissionControllerHostTestCases
deleted file mode 100644
index aa331eb..0000000
--- a/server/site_tests/cheets_GTS/control.GtsPermissionControllerHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsPermissionControllerHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPermissionControllerHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPermissionControllerHostTestCases',
-        test_name='cheets_GTS.GtsPermissionControllerHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPermissionControllerHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPermissionControllerHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsPermissionTestCases b/server/site_tests/cheets_GTS/control.GtsPermissionTestCases
deleted file mode 100644
index a968b5d..0000000
--- a/server/site_tests/cheets_GTS/control.GtsPermissionTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsPermissionTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPermissionTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPermissionTestCases',
-        test_name='cheets_GTS.GtsPermissionTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPermissionTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPermissionTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsPlacementTestCases b/server/site_tests/cheets_GTS/control.GtsPlacementTestCases
deleted file mode 100644
index 3ad25d0..0000000
--- a/server/site_tests/cheets_GTS/control.GtsPlacementTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsPlacementTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPlacementTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPlacementTestCases',
-        test_name='cheets_GTS.GtsPlacementTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPlacementTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPlacementTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsPlayAutoInstallTestCases b/server/site_tests/cheets_GTS/control.GtsPlayAutoInstallTestCases
deleted file mode 100644
index 9e150fc..0000000
--- a/server/site_tests/cheets_GTS/control.GtsPlayAutoInstallTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsPlayAutoInstallTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPlayAutoInstallTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPlayAutoInstallTestCases',
-        test_name='cheets_GTS.GtsPlayAutoInstallTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPlayAutoInstallTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPlayAutoInstallTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsPlayFsiHostTestCases b/server/site_tests/cheets_GTS/control.GtsPlayFsiHostTestCases
deleted file mode 100644
index f1650fb..0000000
--- a/server/site_tests/cheets_GTS/control.GtsPlayFsiHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsPlayFsiHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPlayFsiHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPlayFsiHostTestCases',
-        test_name='cheets_GTS.GtsPlayFsiHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPlayFsiHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPlayFsiHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsPlayFsiTestCases b/server/site_tests/cheets_GTS/control.GtsPlayFsiTestCases
deleted file mode 100644
index e303914..0000000
--- a/server/site_tests/cheets_GTS/control.GtsPlayFsiTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsPlayFsiTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPlayFsiTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPlayFsiTestCases',
-        test_name='cheets_GTS.GtsPlayFsiTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPlayFsiTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPlayFsiTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsPlayStoreHostTestCases b/server/site_tests/cheets_GTS/control.GtsPlayStoreHostTestCases
deleted file mode 100644
index 50d388d..0000000
--- a/server/site_tests/cheets_GTS/control.GtsPlayStoreHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsPlayStoreHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPlayStoreHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPlayStoreHostTestCases',
-        test_name='cheets_GTS.GtsPlayStoreHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPlayStoreHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPlayStoreHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsPrintTestCases b/server/site_tests/cheets_GTS/control.GtsPrintTestCases
deleted file mode 100644
index cdca45a..0000000
--- a/server/site_tests/cheets_GTS/control.GtsPrintTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsPrintTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPrintTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPrintTestCases',
-        test_name='cheets_GTS.GtsPrintTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPrintTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPrintTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsPrivacyTestCases b/server/site_tests/cheets_GTS/control.GtsPrivacyTestCases
deleted file mode 100644
index b312800..0000000
--- a/server/site_tests/cheets_GTS/control.GtsPrivacyTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsPrivacyTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPrivacyTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPrivacyTestCases',
-        test_name='cheets_GTS.GtsPrivacyTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPrivacyTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPrivacyTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsPropertiesTestCases b/server/site_tests/cheets_GTS/control.GtsPropertiesTestCases
deleted file mode 100644
index fd8e01b..0000000
--- a/server/site_tests/cheets_GTS/control.GtsPropertiesTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsPropertiesTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPropertiesTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPropertiesTestCases',
-        test_name='cheets_GTS.GtsPropertiesTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPropertiesTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPropertiesTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsRegulationComplianceTestCases b/server/site_tests/cheets_GTS/control.GtsRegulationComplianceTestCases
deleted file mode 100644
index 5be3653..0000000
--- a/server/site_tests/cheets_GTS/control.GtsRegulationComplianceTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsRegulationComplianceTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsRegulationComplianceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsRegulationComplianceTestCases',
-        test_name='cheets_GTS.GtsRegulationComplianceTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsRegulationComplianceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsRegulationComplianceTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsRlzTestCases b/server/site_tests/cheets_GTS/control.GtsRlzTestCases
deleted file mode 100644
index 63fa1e6..0000000
--- a/server/site_tests/cheets_GTS/control.GtsRlzTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsRlzTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsRlzTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsRlzTestCases',
-        test_name='cheets_GTS.GtsRlzTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsRlzTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsRlzTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsSampleDeviceTestCases b/server/site_tests/cheets_GTS/control.GtsSampleDeviceTestCases
deleted file mode 100644
index 564cf58..0000000
--- a/server/site_tests/cheets_GTS/control.GtsSampleDeviceTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsSampleDeviceTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSampleDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSampleDeviceTestCases',
-        test_name='cheets_GTS.GtsSampleDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSampleDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSampleDeviceTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsSampleDynamicConfigTestCases b/server/site_tests/cheets_GTS/control.GtsSampleDynamicConfigTestCases
deleted file mode 100644
index 0cb0c07..0000000
--- a/server/site_tests/cheets_GTS/control.GtsSampleDynamicConfigTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsSampleDynamicConfigTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSampleDynamicConfigTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSampleDynamicConfigTestCases',
-        test_name='cheets_GTS.GtsSampleDynamicConfigTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSampleDynamicConfigTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSampleDynamicConfigTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsSampleHostTestCases b/server/site_tests/cheets_GTS/control.GtsSampleHostTestCases
deleted file mode 100644
index 64adaea..0000000
--- a/server/site_tests/cheets_GTS/control.GtsSampleHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsSampleHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSampleHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSampleHostTestCases',
-        test_name='cheets_GTS.GtsSampleHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSampleHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSampleHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsScreenshotHostTestCases b/server/site_tests/cheets_GTS/control.GtsScreenshotHostTestCases
deleted file mode 100644
index 42d797f..0000000
--- a/server/site_tests/cheets_GTS/control.GtsScreenshotHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsScreenshotHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsScreenshotHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsScreenshotHostTestCases',
-        test_name='cheets_GTS.GtsScreenshotHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsScreenshotHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsScreenshotHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsSearchHostTestCases b/server/site_tests/cheets_GTS/control.GtsSearchHostTestCases
deleted file mode 100644
index 1df012f..0000000
--- a/server/site_tests/cheets_GTS/control.GtsSearchHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsSearchHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSearchHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSearchHostTestCases',
-        test_name='cheets_GTS.GtsSearchHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSearchHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSearchHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsSecurityHostTestCases b/server/site_tests/cheets_GTS/control.GtsSecurityHostTestCases
deleted file mode 100644
index 2565f74..0000000
--- a/server/site_tests/cheets_GTS/control.GtsSecurityHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsSecurityHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSecurityHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSecurityHostTestCases',
-        test_name='cheets_GTS.GtsSecurityHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSecurityHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSecurityHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsSensorHostTestCases b/server/site_tests/cheets_GTS/control.GtsSensorHostTestCases
deleted file mode 100644
index 46cb513..0000000
--- a/server/site_tests/cheets_GTS/control.GtsSensorHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsSensorHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSensorHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSensorHostTestCases',
-        test_name='cheets_GTS.GtsSensorHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSensorHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSensorHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsSettingsHostTestCases b/server/site_tests/cheets_GTS/control.GtsSettingsHostTestCases
deleted file mode 100644
index ab2dff8..0000000
--- a/server/site_tests/cheets_GTS/control.GtsSettingsHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsSettingsHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSettingsHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSettingsHostTestCases',
-        test_name='cheets_GTS.GtsSettingsHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSettingsHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSettingsHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsSettingsTestCases b/server/site_tests/cheets_GTS/control.GtsSettingsTestCases
deleted file mode 100644
index a19b65c..0000000
--- a/server/site_tests/cheets_GTS/control.GtsSettingsTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsSettingsTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSettingsTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSettingsTestCases',
-        test_name='cheets_GTS.GtsSettingsTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSettingsTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSettingsTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsSetupWizardHostTestCases b/server/site_tests/cheets_GTS/control.GtsSetupWizardHostTestCases
deleted file mode 100644
index 446e483..0000000
--- a/server/site_tests/cheets_GTS/control.GtsSetupWizardHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsSetupWizardHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSetupWizardHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSetupWizardHostTestCases',
-        test_name='cheets_GTS.GtsSetupWizardHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSetupWizardHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSetupWizardHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsSetupWizardNoPermissionTestCases b/server/site_tests/cheets_GTS/control.GtsSetupWizardNoPermissionTestCases
deleted file mode 100644
index afb3882..0000000
--- a/server/site_tests/cheets_GTS/control.GtsSetupWizardNoPermissionTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsSetupWizardNoPermissionTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSetupWizardNoPermissionTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSetupWizardNoPermissionTestCases',
-        test_name='cheets_GTS.GtsSetupWizardNoPermissionTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSetupWizardNoPermissionTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSetupWizardNoPermissionTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsSimAppDialogTestCases b/server/site_tests/cheets_GTS/control.GtsSimAppDialogTestCases
deleted file mode 100644
index 4d68e3c..0000000
--- a/server/site_tests/cheets_GTS/control.GtsSimAppDialogTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsSimAppDialogTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSimAppDialogTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSimAppDialogTestCases',
-        test_name='cheets_GTS.GtsSimAppDialogTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSimAppDialogTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSimAppDialogTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsSmartBatteryDeviceTestCases b/server/site_tests/cheets_GTS/control.GtsSmartBatteryDeviceTestCases
deleted file mode 100644
index b8f60bf..0000000
--- a/server/site_tests/cheets_GTS/control.GtsSmartBatteryDeviceTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsSmartBatteryDeviceTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSmartBatteryDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSmartBatteryDeviceTestCases',
-        test_name='cheets_GTS.GtsSmartBatteryDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSmartBatteryDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSmartBatteryDeviceTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsSmsCallLogTestCases b/server/site_tests/cheets_GTS/control.GtsSmsCallLogTestCases
deleted file mode 100644
index 8d389d5..0000000
--- a/server/site_tests/cheets_GTS/control.GtsSmsCallLogTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsSmsCallLogTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSmsCallLogTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSmsCallLogTestCases',
-        test_name='cheets_GTS.GtsSmsCallLogTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSmsCallLogTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSmsCallLogTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsSsaidHostTestCases b/server/site_tests/cheets_GTS/control.GtsSsaidHostTestCases
deleted file mode 100644
index dfcae6c..0000000
--- a/server/site_tests/cheets_GTS/control.GtsSsaidHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsSsaidHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSsaidHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSsaidHostTestCases',
-        test_name='cheets_GTS.GtsSsaidHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSsaidHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSsaidHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsStagedInstallHostTestCases b/server/site_tests/cheets_GTS/control.GtsStagedInstallHostTestCases
deleted file mode 100644
index d2e088b..0000000
--- a/server/site_tests/cheets_GTS/control.GtsStagedInstallHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsStagedInstallHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsStagedInstallHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsStagedInstallHostTestCases',
-        test_name='cheets_GTS.GtsStagedInstallHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsStagedInstallHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsStagedInstallHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsStatsdHostTestCases b/server/site_tests/cheets_GTS/control.GtsStatsdHostTestCases
deleted file mode 100644
index 084ae65..0000000
--- a/server/site_tests/cheets_GTS/control.GtsStatsdHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsStatsdHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsStatsdHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsStatsdHostTestCases',
-        test_name='cheets_GTS.GtsStatsdHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsStatsdHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsStatsdHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsStorageTestCases b/server/site_tests/cheets_GTS/control.GtsStorageTestCases
deleted file mode 100644
index 386cea8..0000000
--- a/server/site_tests/cheets_GTS/control.GtsStorageTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsStorageTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsStorageTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsStorageTestCases',
-        test_name='cheets_GTS.GtsStorageTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsStorageTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsStorageTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsSupervisionTestCases b/server/site_tests/cheets_GTS/control.GtsSupervisionTestCases
deleted file mode 100644
index f71479e..0000000
--- a/server/site_tests/cheets_GTS/control.GtsSupervisionTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsSupervisionTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSupervisionTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSupervisionTestCases',
-        test_name='cheets_GTS.GtsSupervisionTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSupervisionTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSupervisionTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsSuspendAppsPermissionTestCases b/server/site_tests/cheets_GTS/control.GtsSuspendAppsPermissionTestCases
deleted file mode 100644
index c3419ca..0000000
--- a/server/site_tests/cheets_GTS/control.GtsSuspendAppsPermissionTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsSuspendAppsPermissionTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSuspendAppsPermissionTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSuspendAppsPermissionTestCases',
-        test_name='cheets_GTS.GtsSuspendAppsPermissionTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSuspendAppsPermissionTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSuspendAppsPermissionTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsSuspendAppsTestCases b/server/site_tests/cheets_GTS/control.GtsSuspendAppsTestCases
deleted file mode 100644
index 39820d25..0000000
--- a/server/site_tests/cheets_GTS/control.GtsSuspendAppsTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsSuspendAppsTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSuspendAppsTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSuspendAppsTestCases',
-        test_name='cheets_GTS.GtsSuspendAppsTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSuspendAppsTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSuspendAppsTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsTelecomManagerTests b/server/site_tests/cheets_GTS/control.GtsTelecomManagerTests
deleted file mode 100644
index 58763db..0000000
--- a/server/site_tests/cheets_GTS/control.GtsTelecomManagerTests
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsTelecomManagerTests'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsTelecomManagerTests of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsTelecomManagerTests',
-        test_name='cheets_GTS.GtsTelecomManagerTests',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsTelecomManagerTests', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsTelecomManagerTests',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsTelephonyNumberVerificationHostCases b/server/site_tests/cheets_GTS/control.GtsTelephonyNumberVerificationHostCases
deleted file mode 100644
index c6cf032..0000000
--- a/server/site_tests/cheets_GTS/control.GtsTelephonyNumberVerificationHostCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsTelephonyNumberVerificationHostCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsTelephonyNumberVerificationHostCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsTelephonyNumberVerificationHostCases',
-        test_name='cheets_GTS.GtsTelephonyNumberVerificationHostCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsTelephonyNumberVerificationHostCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsTelephonyNumberVerificationHostCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsTelephonyTestCases b/server/site_tests/cheets_GTS/control.GtsTelephonyTestCases
deleted file mode 100644
index 47c5e27..0000000
--- a/server/site_tests/cheets_GTS/control.GtsTelephonyTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsTelephonyTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsTelephonyTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsTelephonyTestCases',
-        test_name='cheets_GTS.GtsTelephonyTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsTelephonyTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsTelephonyTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsTestHarnessModeTestCases b/server/site_tests/cheets_GTS/control.GtsTestHarnessModeTestCases
deleted file mode 100644
index 2f2c8eb..0000000
--- a/server/site_tests/cheets_GTS/control.GtsTestHarnessModeTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsTestHarnessModeTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsTestHarnessModeTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsTestHarnessModeTestCases',
-        test_name='cheets_GTS.GtsTestHarnessModeTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsTestHarnessModeTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsTestHarnessModeTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsTetheringTestCases b/server/site_tests/cheets_GTS/control.GtsTetheringTestCases
deleted file mode 100644
index 858a5ef..0000000
--- a/server/site_tests/cheets_GTS/control.GtsTetheringTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsTetheringTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsTetheringTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsTetheringTestCases',
-        test_name='cheets_GTS.GtsTetheringTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsTetheringTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsTetheringTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsTvBugReportTestCases b/server/site_tests/cheets_GTS/control.GtsTvBugReportTestCases
deleted file mode 100644
index caa8c1f..0000000
--- a/server/site_tests/cheets_GTS/control.GtsTvBugReportTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsTvBugReportTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsTvBugReportTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsTvBugReportTestCases',
-        test_name='cheets_GTS.GtsTvBugReportTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsTvBugReportTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsTvBugReportTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsTvHostTestCases b/server/site_tests/cheets_GTS/control.GtsTvHostTestCases
deleted file mode 100644
index dedf9f7..0000000
--- a/server/site_tests/cheets_GTS/control.GtsTvHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsTvHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsTvHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsTvHostTestCases',
-        test_name='cheets_GTS.GtsTvHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsTvHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsTvHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsTvTestCases b/server/site_tests/cheets_GTS/control.GtsTvTestCases
deleted file mode 100644
index 17bfb48..0000000
--- a/server/site_tests/cheets_GTS/control.GtsTvTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsTvTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsTvTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsTvTestCases',
-        test_name='cheets_GTS.GtsTvTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsTvTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsTvTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsUnofficialApisUsageTestCases b/server/site_tests/cheets_GTS/control.GtsUnofficialApisUsageTestCases
deleted file mode 100644
index 4c30484..0000000
--- a/server/site_tests/cheets_GTS/control.GtsUnofficialApisUsageTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsUnofficialApisUsageTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsUnofficialApisUsageTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsUnofficialApisUsageTestCases',
-        test_name='cheets_GTS.GtsUnofficialApisUsageTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsUnofficialApisUsageTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsUnofficialApisUsageTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsUsageStatsTestCases b/server/site_tests/cheets_GTS/control.GtsUsageStatsTestCases
deleted file mode 100644
index cd95f0e..0000000
--- a/server/site_tests/cheets_GTS/control.GtsUsageStatsTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsUsageStatsTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsUsageStatsTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsUsageStatsTestCases',
-        test_name='cheets_GTS.GtsUsageStatsTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsUsageStatsTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsUsageStatsTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsUserspaceRebootHostSideTestCases b/server/site_tests/cheets_GTS/control.GtsUserspaceRebootHostSideTestCases
deleted file mode 100644
index c653bb2..0000000
--- a/server/site_tests/cheets_GTS/control.GtsUserspaceRebootHostSideTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsUserspaceRebootHostSideTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsUserspaceRebootHostSideTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsUserspaceRebootHostSideTestCases',
-        test_name='cheets_GTS.GtsUserspaceRebootHostSideTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsUserspaceRebootHostSideTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsUserspaceRebootHostSideTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsViewTestCases b/server/site_tests/cheets_GTS/control.GtsViewTestCases
deleted file mode 100644
index 7d32d46..0000000
--- a/server/site_tests/cheets_GTS/control.GtsViewTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsViewTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsViewTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsViewTestCases',
-        test_name='cheets_GTS.GtsViewTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsViewTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsViewTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsVndkDependencyTestCases b/server/site_tests/cheets_GTS/control.GtsVndkDependencyTestCases
deleted file mode 100644
index b9080c5..0000000
--- a/server/site_tests/cheets_GTS/control.GtsVndkDependencyTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsVndkDependencyTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsVndkDependencyTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsVndkDependencyTestCases',
-        test_name='cheets_GTS.GtsVndkDependencyTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsVndkDependencyTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsVndkDependencyTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsWebViewHostTestCases b/server/site_tests/cheets_GTS/control.GtsWebViewHostTestCases
deleted file mode 100644
index 6546112..0000000
--- a/server/site_tests/cheets_GTS/control.GtsWebViewHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsWebViewHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsWebViewHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsWebViewHostTestCases',
-        test_name='cheets_GTS.GtsWebViewHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsWebViewHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsWebViewHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsWebViewTestCases b/server/site_tests/cheets_GTS/control.GtsWebViewTestCases
deleted file mode 100644
index 8582559..0000000
--- a/server/site_tests/cheets_GTS/control.GtsWebViewTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsWebViewTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsWebViewTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsWebViewTestCases',
-        test_name='cheets_GTS.GtsWebViewTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsWebViewTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsWebViewTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsWellbeingHostTestCases b/server/site_tests/cheets_GTS/control.GtsWellbeingHostTestCases
deleted file mode 100644
index 9ae65dd..0000000
--- a/server/site_tests/cheets_GTS/control.GtsWellbeingHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsWellbeingHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsWellbeingHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsWellbeingHostTestCases',
-        test_name='cheets_GTS.GtsWellbeingHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsWellbeingHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsWellbeingHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsWellbeingPermissionPolicyTestCases b/server/site_tests/cheets_GTS/control.GtsWellbeingPermissionPolicyTestCases
deleted file mode 100644
index 3283fa1..0000000
--- a/server/site_tests/cheets_GTS/control.GtsWellbeingPermissionPolicyTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsWellbeingPermissionPolicyTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsWellbeingPermissionPolicyTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsWellbeingPermissionPolicyTestCases',
-        test_name='cheets_GTS.GtsWellbeingPermissionPolicyTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsWellbeingPermissionPolicyTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsWellbeingPermissionPolicyTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsWellbeingTestCases b/server/site_tests/cheets_GTS/control.GtsWellbeingTestCases
deleted file mode 100644
index 0a35c4a..0000000
--- a/server/site_tests/cheets_GTS/control.GtsWellbeingTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsWellbeingTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsWellbeingTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsWellbeingTestCases',
-        test_name='cheets_GTS.GtsWellbeingTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsWellbeingTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsWellbeingTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsYouTubeTestCases b/server/site_tests/cheets_GTS/control.GtsYouTubeTestCases
deleted file mode 100644
index 35642fe..0000000
--- a/server/site_tests/cheets_GTS/control.GtsYouTubeTestCases
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.GtsYouTubeTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsYouTubeTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=5,
-        needs_push_media=True,
-        tag='GtsYouTubeTestCases',
-        test_name='cheets_GTS.GtsYouTubeTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsYouTubeTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsYouTubeTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.tradefed-run-collect-tests-only b/server/site_tests/cheets_GTS/control.tradefed-run-collect-tests-only
deleted file mode 100644
index 3cfa012..0000000
--- a/server/site_tests/cheets_GTS/control.tradefed-run-collect-tests-only
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.tradefed-run-collect-tests-only'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-PRIORITY = 70
-DOC = 'Run all of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=0,
-        tag='tradefed-run-collect-tests-only',
-        test_name='cheets_GTS.tradefed-run-collect-tests-only',
-        run_template=['run', 'commandAndExit', 'collect-tests-only', '--disable-reboot', '--module-arg', 'GtsYouTubeTestCases:skip-media-download:true'],
-        retry_template=None,
-        target_module=None,
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.tradefed-run-test b/server/site_tests/cheets_GTS/control.tradefed-run-test
deleted file mode 100644
index d2c6fa9..0000000
--- a/server/site_tests/cheets_GTS/control.tradefed-run-test
+++ /dev/null
@@ -1,114 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file is not auto-generated. Don't delete it.
-
-# Boring.
-import logging
-import pprint
-from autotest_lib.client.bin import utils
-
-usage = """
-1) To run agains a particular $DUT use
-   test_that --args="module=GtsMediaTestCases test=com.google.android.media.gts.MediaCodecStressTest#testDecodeDecodeCompositeDisplay1080p" $DUT cheets_GTS.tradefed-run-test
-
-2) To run against a lab pool use
-    run_suite.py --board=eve --build=$TRYJOB_BUILD --suite_name arc-gts-test --pool cts --no_wait True --priority CQ --timeout_mins 6160 --retry False --num 1 --suite_min_duts 1 --test_args="{'module' : 'GtsMediaTestCases', 'test' : 'com.google.android.media.gts.MediaCodecStressTest#testDecodeDecodeCompositeDisplay1080p'}"
-"""
-
-def usage_error():
-    logging.info('Example usage:')
-    logging.info(usage)
-    raise SystemExit
-
-pp = pprint.PrettyPrinter()
-logging.info(
-    '***********************************************************************')
-
-# Define the variables that we are going to use and set sensible defaults.
-gts_module = ''
-gts_retry = 5
-gts_revision = '8-R2-P-6955212'  # TODO(ihf): Set this default value from generator.
-gts_test = ''
-gts_timeout = 600
-
-# Pull parameters either from run_suite or test_that.
-if 'args_dict' in vars():
-    logging.info('Raw test options from run_suite:')
-    pp.pprint(args_dict)
-elif args:
-    logging.info('Raw test options from test_that:')
-    pp.pprint(args)
-    args_dict = utils.args_to_dict(args)
-else:
-    usage_error()
-
-gts_module = args_dict.get('module', gts_module)
-gts_revision = args_dict.get('revision', gts_revision)
-gts_test = args_dict.get('test', gts_test)
-gts_timeout = float(args_dict.get('timeout', gts_timeout))
-gts_retry = int(args_dict.get('max_retry', gts_retry))
-
-# Basic checks for option validity.
-logging.error('Running module %s with test %s on revision %s',
-              gts_module, gts_test, gts_revision)
-if not gts_module or not gts_revision or not gts_test:
-    usage_error()
-
-# And we are getting ready for tradefed.
-uri = ('gs://chromeos-arc-images/cts/bundle/android-gts-' + gts_revision + '.zip')
-run_template = ['run', 'commandAndExit', 'gts',
-                '--include-filter', gts_module + ' ' + gts_test,
-                '--ignore-business-logic-failure']
-retry_template = ['run', 'commandAndExit', 'retry',
-                  '--retry', '{session_id}']
-# Unfortunately super long test names can cause problems. Try to get the
-# rightmost element and use that as a simplified name.
-# TODO(ihf): fix pipeline so it works with super long names.
-simplified_test = gts_test
-if '#' in gts_test:
-    simplified_test = gts_test.split('#')[-1]
-elif '.' in gts_test:
-    simplified_test = gts_test.split('.')[-1]
-tag = 'tradefed-run-test.%s.%s' % (gts_module, simplified_test)
-
-# The usual testing stanza. We are suppressing some DEPENDENCIES on purpose.
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.tradefed-run-test'
-ATTRIBUTES = ''
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 256000
-DOC = ('Run a test of the Android Google Test Suite (GTS) in the ARC++ '
-       'container.')
-
-# And launch.
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=gts_retry,
-        needs_push_media=True,
-        tag=tag,
-        test_name=NAME,
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=run_template,
-        retry_template=retry_template,
-        target_module=None,
-        target_plan=None,
-        uri=uri,
-        login_precondition_commands=[
-            'lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'
-        ],
-        precondition_commands=[
-            'echo $(({0} % 2 * 2 + 1)) > /proc/sys/kernel/perf_event_paranoid',
-            'modprobe configs'
-        ],
-        timeout=gts_timeout)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.waivers b/server/site_tests/cheets_GTS/control.waivers
deleted file mode 100644
index fb96823..0000000
--- a/server/site_tests/cheets_GTS/control.waivers
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.waivers'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run preview version of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='waivers',
-        test_name='cheets_GTS.waivers',
-        run_template=['run', 'commandAndExit', 'gts', '--subplan', 'waivers', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='gts-dev',
-        target_plan='waivers',
-        load_waivers=False,
-        uri='gs://chromeos-partner-gts/android-gts-8-R3-P-Preview3-7012566.zip',
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.waivers-collect-tests-only b/server/site_tests/cheets_GTS/control.waivers-collect-tests-only
deleted file mode 100644
index e3ee3ad..0000000
--- a/server/site_tests/cheets_GTS/control.waivers-collect-tests-only
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.waivers-collect-tests-only'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run preview version of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS',
-        hosts=host_list,
-        iterations=1,
-        max_retry=0,
-        tag='waivers-collect-tests-only',
-        test_name='cheets_GTS.waivers-collect-tests-only',
-        run_template=['run', 'commandAndExit', 'collect-tests-only', '--subplan', 'waivers', '--ignore-business-logic-failure', '--disable-reboot'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='gts-dev-collect',
-        target_plan='waivers',
-        load_waivers=False,
-        uri='gs://chromeos-partner-gts/android-gts-8-R3-P-Preview3-7012566.zip',
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/generate_controlfiles.py b/server/site_tests/cheets_GTS/generate_controlfiles.py
deleted file mode 100755
index 1d3e413..0000000
--- a/server/site_tests/cheets_GTS/generate_controlfiles.py
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/usr/bin/env python2
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This is a trampoline script to invoke the actual generator script.
-
-import os
-import sys
-
-target_script_name = 'generate_controlfiles_GTS.py'
-target_script_path = os.path.abspath(os.path.join(os.path.dirname(__file__),
-    '..', '..', 'cros', 'tradefed', target_script_name))
-os.execv(target_script_path, sys.argv)
diff --git a/server/site_tests/cheets_GTS/notest_modules/notest_combined_modules.yaml b/server/site_tests/cheets_GTS/notest_modules/notest_combined_modules.yaml
deleted file mode 100644
index 3f8e252..0000000
--- a/server/site_tests/cheets_GTS/notest_modules/notest_combined_modules.yaml
+++ /dev/null
@@ -1,5 +0,0 @@
-GtsIncident: [all]
-GtsIncrementalInstall: [all]
-GtsMemory: [all]
-GtsSample: [all]
-gts-dev: [all]
diff --git a/server/site_tests/cheets_GTS/notest_modules/notest_modules.yaml b/server/site_tests/cheets_GTS/notest_modules/notest_modules.yaml
deleted file mode 100644
index b9e34a2..0000000
--- a/server/site_tests/cheets_GTS/notest_modules/notest_modules.yaml
+++ /dev/null
@@ -1,41 +0,0 @@
-GtsAccountsHostTestCases: [all]
-GtsAssistantHostTestCases: [all]
-GtsAudioTestCases: [all]
-GtsBootHealthHostTestCases: [all]
-GtsContextHubPermissionDeviceTestCases: [all]
-GtsDebugfsMountTestCases: [all]
-GtsDeviceConfigTestCases: [all]
-GtsDexModuleRegistrationTestCases: [all]
-GtsDozeHostSideTestCases: [all]
-GtsGraphicsHostTestCases: [all]
-GtsIncidentConfirmationTestCases: [all]
-GtsIncidentManagerTestCases: [all]
-GtsIncrementalInstallProxyHostTestCases: [all]
-GtsIncrementalInstallTestCases: [all]
-GtsIncrementalInstallTriggerApp: [all]
-GtsInstallerV2TestCases: [all]
-GtsLensTestCases: [all]
-GtsMemoryHostTestCases: [all]
-GtsMemoryTestCases: [all]
-GtsModuleMetadataTestCases: [all]
-GtsNetworkStackHostTestCases: [all]
-GtsOemLockServiceTestCases: [all]
-GtsPermissionControllerHostTestCases: [all]
-GtsPlayFsiHostTestCases: [all]
-GtsPlayFsiTestCases: [all]
-GtsSampleDeviceTestCases: [all]
-GtsSampleDynamicConfigTestCases: [all]
-GtsSampleHostTestCases: [all]
-GtsSetupWizardNoPermissionTestCases: [all]
-GtsSmartBatteryDeviceTestCases: [all]
-GtsSmsCallLogTestCases: [all]
-GtsStagedInstallHostTestCases: [all]
-GtsStorageTestCases: [all]
-GtsSupervisionTestCases: [all]
-GtsSuspendAppsTestCases: [all]
-GtsTelephonyNumberVerificationHostCases: [all]
-GtsTestHarnessModeTestCases: [all]
-GtsTvTestCases: [all]
-GtsUserspaceRebootHostSideTestCases: [all]
-GtsWellbeingHostTestCases: [all]
-GtsWellbeingTestCases: [all]
diff --git a/server/site_tests/cheets_GTS/subplans/waivers.xml b/server/site_tests/cheets_GTS/subplans/waivers.xml
deleted file mode 100644
index f29acb9..0000000
--- a/server/site_tests/cheets_GTS/subplans/waivers.xml
+++ /dev/null
@@ -1,10 +0,0 @@
-<?xml version='1.0' encoding='UTF-8' standalone='no' ?>
-<SubPlan version="2.0">
-  <!--
-    Unless there's special reason, the entries can be removed only
-    when the DEV version bundle's provisional version number (like 9.0r13)
-    is incremented. Otherwise the computation for the canonical list
-    of test cases is confused. See b/151779432.
-  -->
-  <Entry include="GtsAssistantWorkProfileHostTestCases" />
-</SubPlan>
diff --git a/server/site_tests/cheets_GTS_R/OWNERS b/server/site_tests/cheets_GTS_R/OWNERS
deleted file mode 100644
index 3895a7b..0000000
--- a/server/site_tests/cheets_GTS_R/OWNERS
+++ /dev/null
@@ -1 +0,0 @@
-include /CTS_OWNERS
diff --git a/server/site_tests/cheets_GTS_R/cheets_GTS_R.py b/server/site_tests/cheets_GTS_R/cheets_GTS_R.py
deleted file mode 100644
index 5b0dc67..0000000
--- a/server/site_tests/cheets_GTS_R/cheets_GTS_R.py
+++ /dev/null
@@ -1,136 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# repohooks/pre-upload.py currently does not run pylint. But for developers who
-# want to check their code manually we disable several harmless pylint warnings
-# which just distract from more serious remaining issues.
-#
-# The instance variable _android_gts is not defined in __init__().
-# pylint: disable=attribute-defined-outside-init
-#
-# Many short variable names don't follow the naming convention.
-# pylint: disable=invalid-name
-
-import logging
-import os
-import shutil
-import tempfile
-
-from autotest_lib.server import utils
-from autotest_lib.server.cros.tradefed import tradefed_test
-
-# Maximum default time allowed for each individual GTS module.
-_GTS_TIMEOUT_SECONDS = 3600
-# TODO: fix it when we prepare the public control files.
-_PARTNER_GTS_BUCKET = 'gs://chromeos-partner-gts/'
-_PARTNER_GTS_LOCATION = _PARTNER_GTS_BUCKET + 'gts-8-R2-R-6955212.zip'
-_PARTNER_GTS_AUTHKEY = _PARTNER_GTS_BUCKET + 'gts-arc.json'
-_GTS_MEDIA_URI = ('https://storage.googleapis.com/youtube-test-media/gts/' +
-                  'GtsYouTubeTestCases-media-1.2.zip')
-_GTS_MEDIA_LOCALPATH = '/tmp/android-gts-media/GtsYouTubeTestCases'
-
-# Internal uprev for all GTS modules.
-_GTS_LATEST_URI = 'gs://chromeos-arc-images/cts/bundle/android-gts-8-R3-R-Preview3-7012566.zip'
-
-
-class cheets_GTS_R(tradefed_test.TradefedTest):
-    """Sets up tradefed to run GTS tests."""
-    version = 1
-
-    _SHARD_CMD = '--shard-count'
-
-    def _tradefed_retry_command(self, template, session_id):
-        """Build tradefed 'retry' command from template."""
-        cmd = []
-        for arg in template:
-            cmd.append(arg.format(session_id=session_id))
-        return cmd
-
-    def _tradefed_run_command(self, template):
-        """Build tradefed 'run' command from template."""
-        cmd = template[:]
-        # If we are running outside of the lab we can collect more data.
-        if not utils.is_in_container():
-            logging.info('Running outside of lab, adding extra debug options.')
-            cmd.append('--log-level-display=DEBUG')
-
-        return cmd
-
-    def _get_default_bundle_url(self, bundle):
-        return _PARTNER_GTS_LOCATION
-
-    def _get_latest_bundle_url(self, bundle):
-        return _GTS_LATEST_URI
-
-    def _get_default_authkey(self):
-        return _PARTNER_GTS_AUTHKEY
-
-    def _get_tradefed_base_dir(self):
-        return 'android-gts'
-
-    def _tradefed_cmd_path(self):
-        return os.path.join(self._repository, 'tools', 'gts-tradefed')
-
-    def _tradefed_env(self):
-        if self._authkey:
-            return dict(os.environ, APE_API_KEY=self._authkey)
-        return None
-
-    def run_once(self,
-                 test_name,
-                 run_template,
-                 retry_template=None,
-                 target_module=None,
-                 target_plan=None,
-                 needs_push_media=False,
-                 enable_default_apps=False,
-                 precondition_commands=[],
-                 login_precondition_commands=[],
-                 authkey=None,
-                 prerequisites=[],
-                 timeout=_GTS_TIMEOUT_SECONDS):
-        """Runs the specified GTS once, but with several retries.
-
-        Run an arbitrary tradefed command.
-
-        @param test_name: the name of test. Used for logging.
-        @param run_template: the template to construct the run command.
-                             Example: ['run', 'commandAndExit', 'cts',
-                                       '--skip-media-download']
-        @param retry_template: the template to construct the retry command.
-                               Example: ['run', 'commandAndExit', 'retry',
-                                         '--skip-media-download', '--retry',
-                                         '{session_id}']
-        @param target_module: the name of test module to run.
-        @param target_plan: the name of the test plan to run.
-        @param needs_push_media: need to push test media streams.
-        @param timeout: time after which tradefed can be interrupted.
-        @param precondition_commands: a list of scripts to be run on the
-        dut before the test is run, the scripts must already be installed.
-        @param login_precondition_commands: a list of scripts to be run on the
-        dut before the log-in for the test is performed.
-        @param prerequisites: a list of prerequisites that identify rogue DUTs.
-        """
-        # Download the GTS auth key to the local temp directory.
-        tmpdir = tempfile.mkdtemp()
-        try:
-            self._authkey = self._download_to_dir(
-                authkey or self._get_default_authkey(), tmpdir)
-
-            self._run_tradefed_with_retries(
-                test_name=test_name,
-                run_template=run_template,
-                retry_template=retry_template,
-                timeout=timeout,
-                target_module=target_module,
-                target_plan=target_plan,
-                media_asset=tradefed_test.MediaAsset(
-                    _GTS_MEDIA_URI if needs_push_media else None,
-                    _GTS_MEDIA_LOCALPATH),
-                enable_default_apps=enable_default_apps,
-                login_precondition_commands=login_precondition_commands,
-                precondition_commands=precondition_commands,
-                prerequisites=prerequisites)
-        finally:
-            shutil.rmtree(tmpdir)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsAccountsHostTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsAccountsHostTestCases
deleted file mode 100644
index 8b30a84..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsAccountsHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsAccountsHostTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAccountsHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsAccountsHostTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsAccountsHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAccountsHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAccountsHostTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsAdminTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsAdminTestCases
deleted file mode 100644
index 91e63cd..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsAdminTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsAdminTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAdminTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsAdminTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsAdminTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAdminTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAdminTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsAfwTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsAfwTestCases
deleted file mode 100644
index 7bb8688..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsAfwTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsAfwTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAfwTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsAfwTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsAfwTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAfwTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAfwTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsAndroidAutoDeviceTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsAndroidAutoDeviceTestCases
deleted file mode 100644
index 6f64b75..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsAndroidAutoDeviceTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsAndroidAutoDeviceTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAndroidAutoDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsAndroidAutoDeviceTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsAndroidAutoDeviceTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAndroidAutoDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAndroidAutoDeviceTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsApp b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsApp
deleted file mode 100644
index 6bd7c73..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsApp
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsApp'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAppBlacklistDeviceTestCases, GtsAppTestCases, GtsAppVisibilityDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsApp',
-        test_name='cheets_GTS_R.8.0_r3.GtsApp',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsAppBlacklistDeviceTestCases', '--include-filter', 'GtsAppTestCases', '--include-filter', 'GtsAppVisibilityDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsApp',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=1440)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsArtManagerHostTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsArtManagerHostTestCases
deleted file mode 100644
index 2d90e31..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsArtManagerHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsArtManagerHostTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsArtManagerHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsArtManagerHostTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsArtManagerHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsArtManagerHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsArtManagerHostTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsAssistIntentTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsAssistIntentTestCases
deleted file mode 100644
index 1d2ba3d..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsAssistIntentTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsAssistIntentTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAssistIntentTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsAssistIntentTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsAssistIntentTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAssistIntentTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAssistIntentTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsAssistant b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsAssistant
deleted file mode 100644
index 41d7e30..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsAssistant
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsAssistant'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAssistantHostTestCases, GtsAssistantMicHostTestCases, GtsAssistantWorkProfileHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsAssistant',
-        test_name='cheets_GTS_R.8.0_r3.GtsAssistant',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsAssistantHostTestCases', '--include-filter', 'GtsAssistantMicHostTestCases', '--include-filter', 'GtsAssistantWorkProfileHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAssistant',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=1440)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsAudioTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsAudioTestCases
deleted file mode 100644
index 38345f9..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsAudioTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsAudioTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAudioTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsAudioTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsAudioTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAudioTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAudioTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsBackup b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsBackup
deleted file mode 100644
index 2c983bd..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsBackup
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsBackup'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsBackupHostTestCases, GtsBackupTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsBackup',
-        test_name='cheets_GTS_R.8.0_r3.GtsBackup',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsBackupHostTestCases', '--include-filter', 'GtsBackupTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsBackup',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsBoot b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsBoot
deleted file mode 100644
index 5781db0..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsBoot
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsBoot'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsBootHealthHostTestCases, GtsBootStatsTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsBoot',
-        test_name='cheets_GTS_R.8.0_r3.GtsBoot',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsBootHealthHostTestCases', '--include-filter', 'GtsBootStatsTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsBoot',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsCallLogTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsCallLogTestCases
deleted file mode 100644
index 9946b43..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsCallLogTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsCallLogTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsCallLogTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsCallLogTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsCallLogTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsCallLogTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsCallLogTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsCameraTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsCameraTestCases
deleted file mode 100644
index 2d3ca7b..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsCameraTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsCameraTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsCameraTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsCameraTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsCameraTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsCameraTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsCameraTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsCastHostTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsCastHostTestCases
deleted file mode 100644
index 32a3591..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsCastHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsCastHostTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsCastHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsCastHostTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsCastHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsCastHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsCastHostTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsContacts b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsContacts
deleted file mode 100644
index 31f3be2..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsContacts
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsContacts'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsContactsAppDeviceTestCases, GtsContactsTest of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsContacts',
-        test_name='cheets_GTS_R.8.0_r3.GtsContacts',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsContactsAppDeviceTestCases', '--include-filter', 'GtsContactsTest', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsContacts',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsContent b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsContent
deleted file mode 100644
index 4586acf..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsContent
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsContent'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsContentHostTestCases, GtsContentTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsContent',
-        test_name='cheets_GTS_R.8.0_r3.GtsContent',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsContentHostTestCases', '--include-filter', 'GtsContentTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsContent',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsContextHubPermissionDeviceTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsContextHubPermissionDeviceTestCases
deleted file mode 100644
index 4d917f0..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsContextHubPermissionDeviceTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsContextHubPermissionDeviceTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsContextHubPermissionDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsContextHubPermissionDeviceTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsContextHubPermissionDeviceTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsContextHubPermissionDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsContextHubPermissionDeviceTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsDebugfsMountTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsDebugfsMountTestCases
deleted file mode 100644
index aa949d7..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsDebugfsMountTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsDebugfsMountTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsDebugfsMountTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsDebugfsMountTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsDebugfsMountTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsDebugfsMountTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsDebugfsMountTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsDeviceConfigTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsDeviceConfigTestCases
deleted file mode 100644
index 74e9e90..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsDeviceConfigTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsDeviceConfigTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsDeviceConfigTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsDeviceConfigTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsDeviceConfigTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsDeviceConfigTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsDeviceConfigTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsDexModuleRegistrationTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsDexModuleRegistrationTestCases
deleted file mode 100644
index a22a8a7..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsDexModuleRegistrationTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsDexModuleRegistrationTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsDexModuleRegistrationTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsDexModuleRegistrationTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsDexModuleRegistrationTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsDexModuleRegistrationTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsDexModuleRegistrationTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsDialer b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsDialer
deleted file mode 100644
index a769d89..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsDialer
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsDialer'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsDialerAudioTestCases, GtsDialerDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsDialer',
-        test_name='cheets_GTS_R.8.0_r3.GtsDialer',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsDialerAudioTestCases', '--include-filter', 'GtsDialerDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsDialer',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsDoze b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsDoze
deleted file mode 100644
index 7490fe6..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsDoze
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsDoze'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsDozeDeviceTestCases, GtsDozeHostSideTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsDoze',
-        test_name='cheets_GTS_R.8.0_r3.GtsDoze',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsDozeDeviceTestCases', '--include-filter', 'GtsDozeHostSideTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsDoze',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsDuoReadyTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsDuoReadyTestCases
deleted file mode 100644
index 195b26b..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsDuoReadyTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsDuoReadyTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsDuoReadyTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsDuoReadyTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsDuoReadyTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsDuoReadyTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsDuoReadyTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsEdiHostTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsEdiHostTestCases
deleted file mode 100644
index 47cbb91..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsEdiHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsEdiHostTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsEdiHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsEdiHostTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsEdiHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsEdiHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsEdiHostTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsExoPlayerTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsExoPlayerTestCases
deleted file mode 100644
index 86373dd..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsExoPlayerTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsExoPlayerTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsExoPlayerTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsExoPlayerTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsExoPlayerTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsExoPlayerTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsExoPlayerTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsFeaturesTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsFeaturesTestCases
deleted file mode 100644
index 87ec94a..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsFeaturesTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsFeaturesTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsFeaturesTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsFeaturesTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsFeaturesTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsFeaturesTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsFeaturesTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsGmscoreHostTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsGmscoreHostTestCases
deleted file mode 100644
index f724dd9..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsGmscoreHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsGmscoreHostTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsGmscoreHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsGmscoreHostTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsGmscoreHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsGmscoreHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsGmscoreHostTestCases',
-        target_plan=None,
-        uri='LATEST',
-        prerequisites=['bluetooth'],
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsGraphicsHostTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsGraphicsHostTestCases
deleted file mode 100644
index 22d112a..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsGraphicsHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsGraphicsHostTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsGraphicsHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsGraphicsHostTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsGraphicsHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsGraphicsHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsGraphicsHostTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsHomeHostTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsHomeHostTestCases
deleted file mode 100644
index 37a107e..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsHomeHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsHomeHostTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsHomeHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsHomeHostTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsHomeHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsHomeHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsHomeHostTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsIncident b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsIncident
deleted file mode 100644
index eb340b8..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsIncident
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsIncident'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsIncidentConfirmationTestCases, GtsIncidentManagerTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsIncident',
-        test_name='cheets_GTS_R.8.0_r3.GtsIncident',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsIncidentConfirmationTestCases', '--include-filter', 'GtsIncidentManagerTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsIncident',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsIncrementalInstall b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsIncrementalInstall
deleted file mode 100644
index 205249a..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsIncrementalInstall
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsIncrementalInstall'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsIncrementalInstallProxyHostTestCases, GtsIncrementalInstallTestCases, GtsIncrementalInstallTriggerApp of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsIncrementalInstall',
-        test_name='cheets_GTS_R.8.0_r3.GtsIncrementalInstall',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsIncrementalInstallProxyHostTestCases', '--include-filter', 'GtsIncrementalInstallTestCases', '--include-filter', 'GtsIncrementalInstallTriggerApp', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsIncrementalInstall',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=1440)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsInstallPackagesWhitelistDeviceTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsInstallPackagesWhitelistDeviceTestCases
deleted file mode 100644
index 2df8ecd..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsInstallPackagesWhitelistDeviceTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsInstallPackagesWhitelistDeviceTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsInstallPackagesWhitelistDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsInstallPackagesWhitelistDeviceTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsInstallPackagesWhitelistDeviceTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsInstallPackagesWhitelistDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsInstallPackagesWhitelistDeviceTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsInstallerV2TestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsInstallerV2TestCases
deleted file mode 100644
index e621584..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsInstallerV2TestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsInstallerV2TestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsInstallerV2TestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsInstallerV2TestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsInstallerV2TestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsInstallerV2TestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsInstallerV2TestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsInstantAppsHostTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsInstantAppsHostTestCases
deleted file mode 100644
index d893dd1..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsInstantAppsHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsInstantAppsHostTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsInstantAppsHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsInstantAppsHostTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsInstantAppsHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsInstantAppsHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsInstantAppsHostTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsLargeApkHostTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsLargeApkHostTestCases
deleted file mode 100644
index cfb4679..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsLargeApkHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsLargeApkHostTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsLargeApkHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsLargeApkHostTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsLargeApkHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsLargeApkHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsLargeApkHostTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsLensTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsLensTestCases
deleted file mode 100644
index 7d39706..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsLensTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsLensTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsLensTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsLensTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsLensTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsLensTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsLensTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsLinkerConfig b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsLinkerConfig
deleted file mode 100644
index 5204c06..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsLinkerConfig
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsLinkerConfig'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsLinkerConfigTestCases, GtsLinkerConfigTestCases[secondary_user] of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsLinkerConfig',
-        test_name='cheets_GTS_R.8.0_r3.GtsLinkerConfig',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsLinkerConfigTestCases', '--include-filter', 'GtsLinkerConfigTestCases[secondary_user]', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsLinkerConfig',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsLocation b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsLocation
deleted file mode 100644
index f096fdb..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsLocation
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsLocation'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsLocationHostTestCases, GtsLocationTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsLocation',
-        test_name='cheets_GTS_R.8.0_r3.GtsLocation',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsLocationHostTestCases', '--include-filter', 'GtsLocationTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsLocation',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsMediaTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsMediaTestCases
deleted file mode 100644
index 0a106a6..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsMediaTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsMediaTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsMediaTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsMediaTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsMediaTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsMediaTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsMediaTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=14400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsMemory b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsMemory
deleted file mode 100644
index 5212515..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsMemory
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsMemory'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsMemoryHostTestCases, GtsMemoryTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsMemory',
-        test_name='cheets_GTS_R.8.0_r3.GtsMemory',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsMemoryHostTestCases', '--include-filter', 'GtsMemoryTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsMemory',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsModuleMetadataTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsModuleMetadataTestCases
deleted file mode 100644
index 91ae017..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsModuleMetadataTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsModuleMetadataTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsModuleMetadataTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsModuleMetadataTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsModuleMetadataTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsModuleMetadataTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsModuleMetadataTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsNet b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsNet
deleted file mode 100644
index 6ffff37..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsNet
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsNet'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsNetStatsHostTestCases, GtsNetTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsNet',
-        test_name='cheets_GTS_R.8.0_r3.GtsNet',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsNetStatsHostTestCases', '--include-filter', 'GtsNetTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsNet',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsNetwork b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsNetwork
deleted file mode 100644
index 7cb7695..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsNetwork
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsNetwork'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsNetworkStackHostTestCases, GtsNetworkWatchlistTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsNetwork',
-        test_name='cheets_GTS_R.8.0_r3.GtsNetwork',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsNetworkStackHostTestCases', '--include-filter', 'GtsNetworkWatchlistTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsNetwork',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsNmgiarcTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsNmgiarcTestCases
deleted file mode 100644
index 9a898bc..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsNmgiarcTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsNmgiarcTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsNmgiarcTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsNmgiarcTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsNmgiarcTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsNmgiarcTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsNmgiarcTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsNoPermission b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsNoPermission
deleted file mode 100644
index 02004c5..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsNoPermission
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsNoPermission'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsNoPermissionTestCases, GtsNoPermissionTestCases25 of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsNoPermission',
-        test_name='cheets_GTS_R.8.0_r3.GtsNoPermission',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsNoPermissionTestCases', '--include-filter', 'GtsNoPermissionTestCases25', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsNoPermission',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsNotificationTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsNotificationTestCases
deleted file mode 100644
index 81c78c7..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsNotificationTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsNotificationTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsNotificationTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsNotificationTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsNotificationTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsNotificationTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsNotificationTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsOemLockServiceTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsOemLockServiceTestCases
deleted file mode 100644
index bdb4d51..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsOemLockServiceTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsOemLockServiceTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsOemLockServiceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsOemLockServiceTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsOemLockServiceTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsOemLockServiceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsOemLockServiceTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsOsTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsOsTestCases
deleted file mode 100644
index 0ff1e54..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsOsTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsOsTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsOsTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsOsTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsOsTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsOsTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsOsTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsPackage b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsPackage
deleted file mode 100644
index 0fb7823..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsPackage
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsPackage'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPackageInstallTestCases, GtsPackageInstallerTapjackingTestCases, GtsPackageManagerHostTestCases, GtsPackageNameCertPairsDeviceTestCases, GtsPackageUninstallTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsPackage',
-        test_name='cheets_GTS_R.8.0_r3.GtsPackage',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsPackageInstallTestCases', '--include-filter', 'GtsPackageInstallerTapjackingTestCases', '--include-filter', 'GtsPackageManagerHostTestCases', '--include-filter', 'GtsPackageNameCertPairsDeviceTestCases', '--include-filter', 'GtsPackageUninstallTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPackage',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=2160)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsPartnerBookmarksTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsPartnerBookmarksTestCases
deleted file mode 100644
index 91f66ac..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsPartnerBookmarksTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsPartnerBookmarksTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPartnerBookmarksTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsPartnerBookmarksTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsPartnerBookmarksTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPartnerBookmarksTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPartnerBookmarksTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsPermission b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsPermission
deleted file mode 100644
index 71e80f7..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsPermission
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsPermission'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPermissionControllerHostTestCases, GtsPermissionTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsPermission',
-        test_name='cheets_GTS_R.8.0_r3.GtsPermission',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsPermissionControllerHostTestCases', '--include-filter', 'GtsPermissionTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPermission',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsPlacementTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsPlacementTestCases
deleted file mode 100644
index 44cb5ec..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsPlacementTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsPlacementTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPlacementTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsPlacementTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsPlacementTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPlacementTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPlacementTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsPlay b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsPlay
deleted file mode 100644
index 2a5b672..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsPlay
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsPlay'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPlayAutoInstallTestCases, GtsPlayFsiHostTestCases, GtsPlayFsiTestCases, GtsPlayStoreHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsPlay',
-        test_name='cheets_GTS_R.8.0_r3.GtsPlay',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsPlayAutoInstallTestCases', '--include-filter', 'GtsPlayFsiHostTestCases', '--include-filter', 'GtsPlayFsiTestCases', '--include-filter', 'GtsPlayStoreHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPlay',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=1800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsPrintTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsPrintTestCases
deleted file mode 100644
index 2fd18c4..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsPrintTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsPrintTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPrintTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsPrintTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsPrintTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPrintTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPrintTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsPrivacyTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsPrivacyTestCases
deleted file mode 100644
index a79ef8e..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsPrivacyTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsPrivacyTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPrivacyTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsPrivacyTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsPrivacyTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPrivacyTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPrivacyTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsPropertiesTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsPropertiesTestCases
deleted file mode 100644
index 9ff138e..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsPropertiesTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsPropertiesTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPropertiesTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsPropertiesTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsPropertiesTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPropertiesTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPropertiesTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsRegulationComplianceTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsRegulationComplianceTestCases
deleted file mode 100644
index 829a539..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsRegulationComplianceTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsRegulationComplianceTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsRegulationComplianceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsRegulationComplianceTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsRegulationComplianceTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsRegulationComplianceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsRegulationComplianceTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsRlzTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsRlzTestCases
deleted file mode 100644
index b5c95b4..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsRlzTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsRlzTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsRlzTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsRlzTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsRlzTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsRlzTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsRlzTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSample b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSample
deleted file mode 100644
index 0f10f95..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSample
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsSample'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSampleDeviceTestCases, GtsSampleDynamicConfigTestCases, GtsSampleHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsSample',
-        test_name='cheets_GTS_R.8.0_r3.GtsSample',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsSampleDeviceTestCases', '--include-filter', 'GtsSampleDynamicConfigTestCases', '--include-filter', 'GtsSampleHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSample',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=1440)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsScreenshotHostTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsScreenshotHostTestCases
deleted file mode 100644
index 9bc9936..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsScreenshotHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsScreenshotHostTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsScreenshotHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsScreenshotHostTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsScreenshotHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsScreenshotHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsScreenshotHostTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSearchHostTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSearchHostTestCases
deleted file mode 100644
index 3151771..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSearchHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsSearchHostTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSearchHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsSearchHostTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsSearchHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSearchHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSearchHostTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSecurityHostTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSecurityHostTestCases
deleted file mode 100644
index 310283e..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSecurityHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsSecurityHostTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSecurityHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsSecurityHostTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsSecurityHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSecurityHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSecurityHostTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSensorHostTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSensorHostTestCases
deleted file mode 100644
index 1f9a5b8..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSensorHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsSensorHostTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSensorHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsSensorHostTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsSensorHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSensorHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSensorHostTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSettings b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSettings
deleted file mode 100644
index 0b19fb0..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSettings
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsSettings'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSettingsHostTestCases, GtsSettingsTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsSettings',
-        test_name='cheets_GTS_R.8.0_r3.GtsSettings',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsSettingsHostTestCases', '--include-filter', 'GtsSettingsTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSettings',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSetupWizard b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSetupWizard
deleted file mode 100644
index 81e474e..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSetupWizard
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsSetupWizard'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSetupWizardHostTestCases, GtsSetupWizardNoPermissionTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsSetupWizard',
-        test_name='cheets_GTS_R.8.0_r3.GtsSetupWizard',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsSetupWizardHostTestCases', '--include-filter', 'GtsSetupWizardNoPermissionTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSetupWizard',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSimAppDialogTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSimAppDialogTestCases
deleted file mode 100644
index 29c3795..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSimAppDialogTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsSimAppDialogTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSimAppDialogTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsSimAppDialogTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsSimAppDialogTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSimAppDialogTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSimAppDialogTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSmartBatteryDeviceTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSmartBatteryDeviceTestCases
deleted file mode 100644
index f0ca993..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSmartBatteryDeviceTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsSmartBatteryDeviceTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSmartBatteryDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsSmartBatteryDeviceTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsSmartBatteryDeviceTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSmartBatteryDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSmartBatteryDeviceTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSmsCallLogTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSmsCallLogTestCases
deleted file mode 100644
index 9114951..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSmsCallLogTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsSmsCallLogTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSmsCallLogTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsSmsCallLogTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsSmsCallLogTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSmsCallLogTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSmsCallLogTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSsaidHostTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSsaidHostTestCases
deleted file mode 100644
index e45a1d5..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSsaidHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsSsaidHostTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSsaidHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsSsaidHostTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsSsaidHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSsaidHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSsaidHostTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsStagedInstallHostTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsStagedInstallHostTestCases
deleted file mode 100644
index 4e2c637..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsStagedInstallHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsStagedInstallHostTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsStagedInstallHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsStagedInstallHostTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsStagedInstallHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsStagedInstallHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsStagedInstallHostTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsStatsdHostTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsStatsdHostTestCases
deleted file mode 100644
index b573025..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsStatsdHostTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsStatsdHostTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsStatsdHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsStatsdHostTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsStatsdHostTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsStatsdHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsStatsdHostTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsStorageTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsStorageTestCases
deleted file mode 100644
index 4842a08..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsStorageTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsStorageTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsStorageTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsStorageTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsStorageTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsStorageTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsStorageTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSupervisionTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSupervisionTestCases
deleted file mode 100644
index 9bf5ce9..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSupervisionTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsSupervisionTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSupervisionTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsSupervisionTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsSupervisionTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSupervisionTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSupervisionTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSuspendApps b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSuspendApps
deleted file mode 100644
index 5618728..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsSuspendApps
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsSuspendApps'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSuspendAppsPermissionTestCases, GtsSuspendAppsTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsSuspendApps',
-        test_name='cheets_GTS_R.8.0_r3.GtsSuspendApps',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsSuspendAppsPermissionTestCases', '--include-filter', 'GtsSuspendAppsTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSuspendApps',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsTelecomManagerTests b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsTelecomManagerTests
deleted file mode 100644
index 11d2eca..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsTelecomManagerTests
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsTelecomManagerTests'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsTelecomManagerTests of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsTelecomManagerTests',
-        test_name='cheets_GTS_R.8.0_r3.GtsTelecomManagerTests',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsTelecomManagerTests', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsTelecomManagerTests',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsTelephony b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsTelephony
deleted file mode 100644
index ce85f66..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsTelephony
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsTelephony'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsTelephonyNumberVerificationHostCases, GtsTelephonyTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsTelephony',
-        test_name='cheets_GTS_R.8.0_r3.GtsTelephony',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsTelephonyNumberVerificationHostCases', '--include-filter', 'GtsTelephonyTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsTelephony',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsTestHarnessModeTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsTestHarnessModeTestCases
deleted file mode 100644
index 8540681..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsTestHarnessModeTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsTestHarnessModeTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsTestHarnessModeTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsTestHarnessModeTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsTestHarnessModeTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsTestHarnessModeTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsTestHarnessModeTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsTetheringTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsTetheringTestCases
deleted file mode 100644
index b2d30b5..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsTetheringTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsTetheringTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsTetheringTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsTetheringTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsTetheringTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsTetheringTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsTetheringTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsTv b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsTv
deleted file mode 100644
index 5385333..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsTv
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsTv'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsTvBugReportTestCases, GtsTvHostTestCases, GtsTvTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsTv',
-        test_name='cheets_GTS_R.8.0_r3.GtsTv',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsTvBugReportTestCases', '--include-filter', 'GtsTvHostTestCases', '--include-filter', 'GtsTvTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsTv',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=1440)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsUnofficialApisUsageTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsUnofficialApisUsageTestCases
deleted file mode 100644
index c6c3fc5..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsUnofficialApisUsageTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsUnofficialApisUsageTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsUnofficialApisUsageTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsUnofficialApisUsageTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsUnofficialApisUsageTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsUnofficialApisUsageTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsUnofficialApisUsageTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsUsageStatsTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsUsageStatsTestCases
deleted file mode 100644
index f471eb9..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsUsageStatsTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsUsageStatsTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsUsageStatsTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsUsageStatsTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsUsageStatsTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsUsageStatsTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsUsageStatsTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsUserspaceRebootHostSideTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsUserspaceRebootHostSideTestCases
deleted file mode 100644
index f118b6c..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsUserspaceRebootHostSideTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsUserspaceRebootHostSideTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsUserspaceRebootHostSideTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsUserspaceRebootHostSideTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsUserspaceRebootHostSideTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsUserspaceRebootHostSideTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsUserspaceRebootHostSideTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsViewTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsViewTestCases
deleted file mode 100644
index 79d7b2b..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsViewTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsViewTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsViewTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsViewTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsViewTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsViewTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsViewTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsVndkDependencyTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsVndkDependencyTestCases
deleted file mode 100644
index dcebe10..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsVndkDependencyTestCases
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsVndkDependencyTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsVndkDependencyTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsVndkDependencyTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsVndkDependencyTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsVndkDependencyTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsVndkDependencyTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsWebView b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsWebView
deleted file mode 100644
index ee1d44b..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsWebView
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsWebView'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsWebViewHostTestCases, GtsWebViewTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsWebView',
-        test_name='cheets_GTS_R.8.0_r3.GtsWebView',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsWebViewHostTestCases', '--include-filter', 'GtsWebViewTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsWebView',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=1080)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsWellbeing b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsWellbeing
deleted file mode 100644
index 37446818..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsWellbeing
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsWellbeing'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsWellbeingHostTestCases, GtsWellbeingPermissionPolicyTestCases, GtsWellbeingTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='8.0_r3.GtsWellbeing',
-        test_name='cheets_GTS_R.8.0_r3.GtsWellbeing',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsWellbeingHostTestCases', '--include-filter', 'GtsWellbeingPermissionPolicyTestCases', '--include-filter', 'GtsWellbeingTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsWellbeing',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=1440)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsYouTubeTestCases b/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsYouTubeTestCases
deleted file mode 100644
index 001e00b..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.GtsYouTubeTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.GtsYouTubeTestCases'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsYouTubeTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        needs_push_media=True,
-        tag='8.0_r3.GtsYouTubeTestCases',
-        test_name='cheets_GTS_R.8.0_r3.GtsYouTubeTestCases',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsYouTubeTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsYouTubeTestCases',
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.8.0_r3.tradefed-run-collect-tests-only-internal b/server/site_tests/cheets_GTS_R/control.8.0_r3.tradefed-run-collect-tests-only-internal
deleted file mode 100644
index 9efdc64..0000000
--- a/server/site_tests/cheets_GTS_R/control.8.0_r3.tradefed-run-collect-tests-only-internal
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.8.0_r3.tradefed-run-collect-tests-only-internal'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'LENGTHY'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run all of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=0,
-        tag='8.0_r3.tradefed-run-collect-tests-only-internal',
-        test_name='cheets_GTS_R.8.0_r3.tradefed-run-collect-tests-only-internal',
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=['run', 'commandAndExit', 'collect-tests-only', '--disable-reboot', '--module-arg', 'GtsYouTubeTestCases:skip-media-download:true'],
-        retry_template=None,
-        target_module=None,
-        target_plan=None,
-        uri='LATEST',
-        use_jdk9=True,
-        timeout=1800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsAccountsHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsAccountsHostTestCases
deleted file mode 100644
index 7fd7160..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsAccountsHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsAccountsHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAccountsHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsAccountsHostTestCases',
-        test_name='cheets_GTS_R.GtsAccountsHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAccountsHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAccountsHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsAdminTestCases b/server/site_tests/cheets_GTS_R/control.GtsAdminTestCases
deleted file mode 100644
index b65ebc3..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsAdminTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsAdminTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAdminTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsAdminTestCases',
-        test_name='cheets_GTS_R.GtsAdminTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAdminTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAdminTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsAfwTestCases b/server/site_tests/cheets_GTS_R/control.GtsAfwTestCases
deleted file mode 100644
index 62be533..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsAfwTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsAfwTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAfwTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsAfwTestCases',
-        test_name='cheets_GTS_R.GtsAfwTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAfwTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAfwTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsAndroidAutoDeviceTestCases b/server/site_tests/cheets_GTS_R/control.GtsAndroidAutoDeviceTestCases
deleted file mode 100644
index 0afaac6..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsAndroidAutoDeviceTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsAndroidAutoDeviceTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAndroidAutoDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsAndroidAutoDeviceTestCases',
-        test_name='cheets_GTS_R.GtsAndroidAutoDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAndroidAutoDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAndroidAutoDeviceTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsAppBlacklistDeviceTestCases b/server/site_tests/cheets_GTS_R/control.GtsAppBlacklistDeviceTestCases
deleted file mode 100644
index 0d80230..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsAppBlacklistDeviceTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsAppBlacklistDeviceTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAppBlacklistDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsAppBlacklistDeviceTestCases',
-        test_name='cheets_GTS_R.GtsAppBlacklistDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAppBlacklistDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAppBlacklistDeviceTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsAppTestCases b/server/site_tests/cheets_GTS_R/control.GtsAppTestCases
deleted file mode 100644
index 6126780..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsAppTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsAppTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAppTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsAppTestCases',
-        test_name='cheets_GTS_R.GtsAppTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAppTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAppTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsAppVisibilityDeviceTestCases b/server/site_tests/cheets_GTS_R/control.GtsAppVisibilityDeviceTestCases
deleted file mode 100644
index eebf911..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsAppVisibilityDeviceTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsAppVisibilityDeviceTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAppVisibilityDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsAppVisibilityDeviceTestCases',
-        test_name='cheets_GTS_R.GtsAppVisibilityDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAppVisibilityDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAppVisibilityDeviceTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsArtManagerHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsArtManagerHostTestCases
deleted file mode 100644
index 753f7ae..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsArtManagerHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsArtManagerHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsArtManagerHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsArtManagerHostTestCases',
-        test_name='cheets_GTS_R.GtsArtManagerHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsArtManagerHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsArtManagerHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsAssistIntentTestCases b/server/site_tests/cheets_GTS_R/control.GtsAssistIntentTestCases
deleted file mode 100644
index 9fd0ba0..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsAssistIntentTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsAssistIntentTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAssistIntentTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsAssistIntentTestCases',
-        test_name='cheets_GTS_R.GtsAssistIntentTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAssistIntentTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAssistIntentTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsAssistantHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsAssistantHostTestCases
deleted file mode 100644
index 99b60f2..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsAssistantHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsAssistantHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAssistantHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsAssistantHostTestCases',
-        test_name='cheets_GTS_R.GtsAssistantHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAssistantHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAssistantHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsAssistantMicHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsAssistantMicHostTestCases
deleted file mode 100644
index 7add8cf..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsAssistantMicHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsAssistantMicHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAssistantMicHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsAssistantMicHostTestCases',
-        test_name='cheets_GTS_R.GtsAssistantMicHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAssistantMicHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAssistantMicHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsAssistantWorkProfileHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsAssistantWorkProfileHostTestCases
deleted file mode 100644
index d181e89..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsAssistantWorkProfileHostTestCases
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsAssistantWorkProfileHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAssistantWorkProfileHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsAssistantWorkProfileHostTestCases',
-        test_name='cheets_GTS_R.GtsAssistantWorkProfileHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAssistantWorkProfileHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAssistantWorkProfileHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        # This module has a known waived abort bug (b/173331969). As long as the
-        # executed test count matches the known number, assume all tests ran.
-        executable_test_count=[0],
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsAudioTestCases b/server/site_tests/cheets_GTS_R/control.GtsAudioTestCases
deleted file mode 100644
index 29c6bb6..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsAudioTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsAudioTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAudioTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsAudioTestCases',
-        test_name='cheets_GTS_R.GtsAudioTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAudioTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsAudioTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsBackupHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsBackupHostTestCases
deleted file mode 100644
index 88eae51..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsBackupHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsBackupHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsBackupHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsBackupHostTestCases',
-        test_name='cheets_GTS_R.GtsBackupHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsBackupHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsBackupHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsBackupTestCases b/server/site_tests/cheets_GTS_R/control.GtsBackupTestCases
deleted file mode 100644
index 7485772..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsBackupTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsBackupTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsBackupTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsBackupTestCases',
-        test_name='cheets_GTS_R.GtsBackupTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsBackupTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsBackupTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsBootHealthHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsBootHealthHostTestCases
deleted file mode 100644
index 34ce297..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsBootHealthHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsBootHealthHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsBootHealthHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsBootHealthHostTestCases',
-        test_name='cheets_GTS_R.GtsBootHealthHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsBootHealthHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsBootHealthHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsBootStatsTestCases b/server/site_tests/cheets_GTS_R/control.GtsBootStatsTestCases
deleted file mode 100644
index 93bd3b5..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsBootStatsTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsBootStatsTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsBootStatsTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsBootStatsTestCases',
-        test_name='cheets_GTS_R.GtsBootStatsTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsBootStatsTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsBootStatsTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsCallLogTestCases b/server/site_tests/cheets_GTS_R/control.GtsCallLogTestCases
deleted file mode 100644
index 5b85e59..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsCallLogTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsCallLogTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsCallLogTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsCallLogTestCases',
-        test_name='cheets_GTS_R.GtsCallLogTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsCallLogTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsCallLogTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsCameraTestCases b/server/site_tests/cheets_GTS_R/control.GtsCameraTestCases
deleted file mode 100644
index e6591ec..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsCameraTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsCameraTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsCameraTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsCameraTestCases',
-        test_name='cheets_GTS_R.GtsCameraTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsCameraTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsCameraTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsCastHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsCastHostTestCases
deleted file mode 100644
index 580cf8e..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsCastHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsCastHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsCastHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsCastHostTestCases',
-        test_name='cheets_GTS_R.GtsCastHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsCastHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsCastHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsContactsAppDeviceTestCases b/server/site_tests/cheets_GTS_R/control.GtsContactsAppDeviceTestCases
deleted file mode 100644
index 6bf13fe..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsContactsAppDeviceTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsContactsAppDeviceTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsContactsAppDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsContactsAppDeviceTestCases',
-        test_name='cheets_GTS_R.GtsContactsAppDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsContactsAppDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsContactsAppDeviceTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsContactsTest b/server/site_tests/cheets_GTS_R/control.GtsContactsTest
deleted file mode 100644
index c1e5905..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsContactsTest
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsContactsTest'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsContactsTest of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsContactsTest',
-        test_name='cheets_GTS_R.GtsContactsTest',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsContactsTest', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsContactsTest',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsContentHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsContentHostTestCases
deleted file mode 100644
index ac8452e..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsContentHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsContentHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsContentHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsContentHostTestCases',
-        test_name='cheets_GTS_R.GtsContentHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsContentHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsContentHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsContentTestCases b/server/site_tests/cheets_GTS_R/control.GtsContentTestCases
deleted file mode 100644
index a0c55ef..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsContentTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsContentTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsContentTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsContentTestCases',
-        test_name='cheets_GTS_R.GtsContentTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsContentTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsContentTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsContextHubPermissionDeviceTestCases b/server/site_tests/cheets_GTS_R/control.GtsContextHubPermissionDeviceTestCases
deleted file mode 100644
index c4155ef..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsContextHubPermissionDeviceTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsContextHubPermissionDeviceTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsContextHubPermissionDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsContextHubPermissionDeviceTestCases',
-        test_name='cheets_GTS_R.GtsContextHubPermissionDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsContextHubPermissionDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsContextHubPermissionDeviceTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsDebugfsMountTestCases b/server/site_tests/cheets_GTS_R/control.GtsDebugfsMountTestCases
deleted file mode 100644
index f5c094b..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsDebugfsMountTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsDebugfsMountTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsDebugfsMountTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsDebugfsMountTestCases',
-        test_name='cheets_GTS_R.GtsDebugfsMountTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsDebugfsMountTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsDebugfsMountTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsDeviceConfigTestCases b/server/site_tests/cheets_GTS_R/control.GtsDeviceConfigTestCases
deleted file mode 100644
index bd0208f..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsDeviceConfigTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsDeviceConfigTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsDeviceConfigTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsDeviceConfigTestCases',
-        test_name='cheets_GTS_R.GtsDeviceConfigTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsDeviceConfigTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsDeviceConfigTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsDexModuleRegistrationTestCases b/server/site_tests/cheets_GTS_R/control.GtsDexModuleRegistrationTestCases
deleted file mode 100644
index c125464..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsDexModuleRegistrationTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsDexModuleRegistrationTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsDexModuleRegistrationTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsDexModuleRegistrationTestCases',
-        test_name='cheets_GTS_R.GtsDexModuleRegistrationTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsDexModuleRegistrationTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsDexModuleRegistrationTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsDialerAudioTestCases b/server/site_tests/cheets_GTS_R/control.GtsDialerAudioTestCases
deleted file mode 100644
index 2b62dcd..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsDialerAudioTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsDialerAudioTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsDialerAudioTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsDialerAudioTestCases',
-        test_name='cheets_GTS_R.GtsDialerAudioTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsDialerAudioTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsDialerAudioTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsDialerDeviceTestCases b/server/site_tests/cheets_GTS_R/control.GtsDialerDeviceTestCases
deleted file mode 100644
index 4884d13..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsDialerDeviceTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsDialerDeviceTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsDialerDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsDialerDeviceTestCases',
-        test_name='cheets_GTS_R.GtsDialerDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsDialerDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsDialerDeviceTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsDozeDeviceTestCases b/server/site_tests/cheets_GTS_R/control.GtsDozeDeviceTestCases
deleted file mode 100644
index 4ab15f9..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsDozeDeviceTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsDozeDeviceTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsDozeDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsDozeDeviceTestCases',
-        test_name='cheets_GTS_R.GtsDozeDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsDozeDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsDozeDeviceTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsDozeHostSideTestCases b/server/site_tests/cheets_GTS_R/control.GtsDozeHostSideTestCases
deleted file mode 100644
index 3d466d5..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsDozeHostSideTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsDozeHostSideTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsDozeHostSideTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsDozeHostSideTestCases',
-        test_name='cheets_GTS_R.GtsDozeHostSideTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsDozeHostSideTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsDozeHostSideTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsDuoReadyTestCases b/server/site_tests/cheets_GTS_R/control.GtsDuoReadyTestCases
deleted file mode 100644
index c7307b2..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsDuoReadyTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsDuoReadyTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsDuoReadyTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsDuoReadyTestCases',
-        test_name='cheets_GTS_R.GtsDuoReadyTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsDuoReadyTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsDuoReadyTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsEdiHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsEdiHostTestCases
deleted file mode 100644
index cd4c1c3..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsEdiHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsEdiHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsEdiHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsEdiHostTestCases',
-        test_name='cheets_GTS_R.GtsEdiHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsEdiHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsEdiHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsExoPlayerTestCases b/server/site_tests/cheets_GTS_R/control.GtsExoPlayerTestCases
deleted file mode 100644
index c22222807..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsExoPlayerTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsExoPlayerTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsExoPlayerTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsExoPlayerTestCases',
-        test_name='cheets_GTS_R.GtsExoPlayerTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsExoPlayerTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsExoPlayerTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsFeaturesTestCases b/server/site_tests/cheets_GTS_R/control.GtsFeaturesTestCases
deleted file mode 100644
index fcb6b09..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsFeaturesTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsFeaturesTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsFeaturesTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsFeaturesTestCases',
-        test_name='cheets_GTS_R.GtsFeaturesTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsFeaturesTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsFeaturesTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsGmscoreHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsGmscoreHostTestCases
deleted file mode 100644
index a9cddd2..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsGmscoreHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsGmscoreHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsGmscoreHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsGmscoreHostTestCases',
-        test_name='cheets_GTS_R.GtsGmscoreHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsGmscoreHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsGmscoreHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsGraphicsHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsGraphicsHostTestCases
deleted file mode 100644
index 2261b8c..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsGraphicsHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsGraphicsHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsGraphicsHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsGraphicsHostTestCases',
-        test_name='cheets_GTS_R.GtsGraphicsHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsGraphicsHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsGraphicsHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsHomeHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsHomeHostTestCases
deleted file mode 100644
index fa571d8..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsHomeHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsHomeHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsHomeHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsHomeHostTestCases',
-        test_name='cheets_GTS_R.GtsHomeHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsHomeHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsHomeHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsIncidentConfirmationTestCases b/server/site_tests/cheets_GTS_R/control.GtsIncidentConfirmationTestCases
deleted file mode 100644
index f8b0b20..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsIncidentConfirmationTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsIncidentConfirmationTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsIncidentConfirmationTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsIncidentConfirmationTestCases',
-        test_name='cheets_GTS_R.GtsIncidentConfirmationTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsIncidentConfirmationTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsIncidentConfirmationTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsIncidentManagerTestCases b/server/site_tests/cheets_GTS_R/control.GtsIncidentManagerTestCases
deleted file mode 100644
index 715fddb..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsIncidentManagerTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsIncidentManagerTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsIncidentManagerTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsIncidentManagerTestCases',
-        test_name='cheets_GTS_R.GtsIncidentManagerTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsIncidentManagerTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsIncidentManagerTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsIncrementalInstallProxyHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsIncrementalInstallProxyHostTestCases
deleted file mode 100644
index ce6d9f0..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsIncrementalInstallProxyHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsIncrementalInstallProxyHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsIncrementalInstallProxyHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsIncrementalInstallProxyHostTestCases',
-        test_name='cheets_GTS_R.GtsIncrementalInstallProxyHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsIncrementalInstallProxyHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsIncrementalInstallProxyHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsIncrementalInstallTestCases b/server/site_tests/cheets_GTS_R/control.GtsIncrementalInstallTestCases
deleted file mode 100644
index 92e1430..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsIncrementalInstallTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsIncrementalInstallTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsIncrementalInstallTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsIncrementalInstallTestCases',
-        test_name='cheets_GTS_R.GtsIncrementalInstallTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsIncrementalInstallTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsIncrementalInstallTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsIncrementalInstallTriggerApp b/server/site_tests/cheets_GTS_R/control.GtsIncrementalInstallTriggerApp
deleted file mode 100644
index cb626d5..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsIncrementalInstallTriggerApp
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsIncrementalInstallTriggerApp'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsIncrementalInstallTriggerApp of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsIncrementalInstallTriggerApp',
-        test_name='cheets_GTS_R.GtsIncrementalInstallTriggerApp',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsIncrementalInstallTriggerApp', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsIncrementalInstallTriggerApp',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsInstallPackagesWhitelistDeviceTestCases b/server/site_tests/cheets_GTS_R/control.GtsInstallPackagesWhitelistDeviceTestCases
deleted file mode 100644
index f405802..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsInstallPackagesWhitelistDeviceTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsInstallPackagesWhitelistDeviceTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsInstallPackagesWhitelistDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsInstallPackagesWhitelistDeviceTestCases',
-        test_name='cheets_GTS_R.GtsInstallPackagesWhitelistDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsInstallPackagesWhitelistDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsInstallPackagesWhitelistDeviceTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsInstallerV2TestCases b/server/site_tests/cheets_GTS_R/control.GtsInstallerV2TestCases
deleted file mode 100644
index c71c5f5..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsInstallerV2TestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsInstallerV2TestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsInstallerV2TestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsInstallerV2TestCases',
-        test_name='cheets_GTS_R.GtsInstallerV2TestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsInstallerV2TestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsInstallerV2TestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsInstantAppsHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsInstantAppsHostTestCases
deleted file mode 100644
index 15afe7c6..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsInstantAppsHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsInstantAppsHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsInstantAppsHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsInstantAppsHostTestCases',
-        test_name='cheets_GTS_R.GtsInstantAppsHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsInstantAppsHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsInstantAppsHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsLargeApkHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsLargeApkHostTestCases
deleted file mode 100644
index 0a79d5e..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsLargeApkHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsLargeApkHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsLargeApkHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsLargeApkHostTestCases',
-        test_name='cheets_GTS_R.GtsLargeApkHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsLargeApkHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsLargeApkHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsLensTestCases b/server/site_tests/cheets_GTS_R/control.GtsLensTestCases
deleted file mode 100644
index 9c0634c..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsLensTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsLensTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsLensTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsLensTestCases',
-        test_name='cheets_GTS_R.GtsLensTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsLensTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsLensTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsLinkerConfigTestCases b/server/site_tests/cheets_GTS_R/control.GtsLinkerConfigTestCases
deleted file mode 100644
index ddbf8e4..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsLinkerConfigTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsLinkerConfigTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsLinkerConfigTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsLinkerConfigTestCases',
-        test_name='cheets_GTS_R.GtsLinkerConfigTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsLinkerConfigTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsLinkerConfigTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsLocationHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsLocationHostTestCases
deleted file mode 100644
index a39e5c7..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsLocationHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsLocationHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsLocationHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsLocationHostTestCases',
-        test_name='cheets_GTS_R.GtsLocationHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsLocationHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsLocationHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsLocationTestCases b/server/site_tests/cheets_GTS_R/control.GtsLocationTestCases
deleted file mode 100644
index c585a3c..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsLocationTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsLocationTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsLocationTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsLocationTestCases',
-        test_name='cheets_GTS_R.GtsLocationTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsLocationTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsLocationTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsMediaTestCases b/server/site_tests/cheets_GTS_R/control.GtsMediaTestCases
deleted file mode 100644
index 37c5b3c..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsMediaTestCases
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsMediaTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 307200
-PRIORITY = 50
-DOC = 'Run module GtsMediaTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=5,
-        tag='GtsMediaTestCases',
-        test_name='cheets_GTS_R.GtsMediaTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsMediaTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsMediaTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=14400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsMemoryHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsMemoryHostTestCases
deleted file mode 100644
index 186e03c..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsMemoryHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsMemoryHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsMemoryHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsMemoryHostTestCases',
-        test_name='cheets_GTS_R.GtsMemoryHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsMemoryHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsMemoryHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsMemoryTestCases b/server/site_tests/cheets_GTS_R/control.GtsMemoryTestCases
deleted file mode 100644
index 64d9e4a..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsMemoryTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsMemoryTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsMemoryTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsMemoryTestCases',
-        test_name='cheets_GTS_R.GtsMemoryTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsMemoryTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsMemoryTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsModuleMetadataTestCases b/server/site_tests/cheets_GTS_R/control.GtsModuleMetadataTestCases
deleted file mode 100644
index 9a726f6..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsModuleMetadataTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsModuleMetadataTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsModuleMetadataTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsModuleMetadataTestCases',
-        test_name='cheets_GTS_R.GtsModuleMetadataTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsModuleMetadataTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsModuleMetadataTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsNetStatsHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsNetStatsHostTestCases
deleted file mode 100644
index 5d0d5ae..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsNetStatsHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsNetStatsHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsNetStatsHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsNetStatsHostTestCases',
-        test_name='cheets_GTS_R.GtsNetStatsHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsNetStatsHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsNetStatsHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsNetTestCases b/server/site_tests/cheets_GTS_R/control.GtsNetTestCases
deleted file mode 100644
index 7226fe5..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsNetTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsNetTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsNetTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsNetTestCases',
-        test_name='cheets_GTS_R.GtsNetTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsNetTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsNetTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsNetworkStackHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsNetworkStackHostTestCases
deleted file mode 100644
index d67ad27..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsNetworkStackHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsNetworkStackHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsNetworkStackHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsNetworkStackHostTestCases',
-        test_name='cheets_GTS_R.GtsNetworkStackHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsNetworkStackHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsNetworkStackHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsNetworkWatchlistTestCases b/server/site_tests/cheets_GTS_R/control.GtsNetworkWatchlistTestCases
deleted file mode 100644
index 2121b0f..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsNetworkWatchlistTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsNetworkWatchlistTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsNetworkWatchlistTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsNetworkWatchlistTestCases',
-        test_name='cheets_GTS_R.GtsNetworkWatchlistTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsNetworkWatchlistTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsNetworkWatchlistTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsNmgiarcTestCases b/server/site_tests/cheets_GTS_R/control.GtsNmgiarcTestCases
deleted file mode 100644
index 0dd9383..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsNmgiarcTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsNmgiarcTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsNmgiarcTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsNmgiarcTestCases',
-        test_name='cheets_GTS_R.GtsNmgiarcTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsNmgiarcTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsNmgiarcTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsNoPermissionTestCases b/server/site_tests/cheets_GTS_R/control.GtsNoPermissionTestCases
deleted file mode 100644
index 359fd13..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsNoPermissionTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsNoPermissionTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsNoPermissionTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsNoPermissionTestCases',
-        test_name='cheets_GTS_R.GtsNoPermissionTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsNoPermissionTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsNoPermissionTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsNoPermissionTestCases25 b/server/site_tests/cheets_GTS_R/control.GtsNoPermissionTestCases25
deleted file mode 100644
index b014075..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsNoPermissionTestCases25
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsNoPermissionTestCases25'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsNoPermissionTestCases25 of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsNoPermissionTestCases25',
-        test_name='cheets_GTS_R.GtsNoPermissionTestCases25',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsNoPermissionTestCases25', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsNoPermissionTestCases25',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsNotificationTestCases b/server/site_tests/cheets_GTS_R/control.GtsNotificationTestCases
deleted file mode 100644
index 7e06b6a..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsNotificationTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsNotificationTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsNotificationTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsNotificationTestCases',
-        test_name='cheets_GTS_R.GtsNotificationTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsNotificationTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsNotificationTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsOemLockServiceTestCases b/server/site_tests/cheets_GTS_R/control.GtsOemLockServiceTestCases
deleted file mode 100644
index bae1d3e..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsOemLockServiceTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsOemLockServiceTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsOemLockServiceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsOemLockServiceTestCases',
-        test_name='cheets_GTS_R.GtsOemLockServiceTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsOemLockServiceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsOemLockServiceTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsOsTestCases b/server/site_tests/cheets_GTS_R/control.GtsOsTestCases
deleted file mode 100644
index 5072a01..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsOsTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsOsTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsOsTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsOsTestCases',
-        test_name='cheets_GTS_R.GtsOsTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsOsTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsOsTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsPackageInstallTestCases b/server/site_tests/cheets_GTS_R/control.GtsPackageInstallTestCases
deleted file mode 100644
index d73ae27..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsPackageInstallTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsPackageInstallTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPackageInstallTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPackageInstallTestCases',
-        test_name='cheets_GTS_R.GtsPackageInstallTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPackageInstallTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPackageInstallTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsPackageInstallerTapjackingTestCases b/server/site_tests/cheets_GTS_R/control.GtsPackageInstallerTapjackingTestCases
deleted file mode 100644
index 5f352b7..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsPackageInstallerTapjackingTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsPackageInstallerTapjackingTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPackageInstallerTapjackingTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPackageInstallerTapjackingTestCases',
-        test_name='cheets_GTS_R.GtsPackageInstallerTapjackingTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPackageInstallerTapjackingTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPackageInstallerTapjackingTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsPackageManagerHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsPackageManagerHostTestCases
deleted file mode 100644
index 372b377..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsPackageManagerHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsPackageManagerHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPackageManagerHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPackageManagerHostTestCases',
-        test_name='cheets_GTS_R.GtsPackageManagerHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPackageManagerHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPackageManagerHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsPackageNameCertPairsDeviceTestCases b/server/site_tests/cheets_GTS_R/control.GtsPackageNameCertPairsDeviceTestCases
deleted file mode 100644
index 1474e31..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsPackageNameCertPairsDeviceTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsPackageNameCertPairsDeviceTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPackageNameCertPairsDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPackageNameCertPairsDeviceTestCases',
-        test_name='cheets_GTS_R.GtsPackageNameCertPairsDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPackageNameCertPairsDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPackageNameCertPairsDeviceTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsPackageUninstallTestCases b/server/site_tests/cheets_GTS_R/control.GtsPackageUninstallTestCases
deleted file mode 100644
index 240a70a..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsPackageUninstallTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsPackageUninstallTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPackageUninstallTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPackageUninstallTestCases',
-        test_name='cheets_GTS_R.GtsPackageUninstallTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPackageUninstallTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPackageUninstallTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsPartnerBookmarksTestCases b/server/site_tests/cheets_GTS_R/control.GtsPartnerBookmarksTestCases
deleted file mode 100644
index 45493e9..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsPartnerBookmarksTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsPartnerBookmarksTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPartnerBookmarksTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPartnerBookmarksTestCases',
-        test_name='cheets_GTS_R.GtsPartnerBookmarksTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPartnerBookmarksTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPartnerBookmarksTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsPermissionControllerHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsPermissionControllerHostTestCases
deleted file mode 100644
index 72532f8..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsPermissionControllerHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsPermissionControllerHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPermissionControllerHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPermissionControllerHostTestCases',
-        test_name='cheets_GTS_R.GtsPermissionControllerHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPermissionControllerHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPermissionControllerHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsPermissionTestCases b/server/site_tests/cheets_GTS_R/control.GtsPermissionTestCases
deleted file mode 100644
index 77df72f..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsPermissionTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsPermissionTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPermissionTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPermissionTestCases',
-        test_name='cheets_GTS_R.GtsPermissionTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPermissionTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPermissionTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsPlacementTestCases b/server/site_tests/cheets_GTS_R/control.GtsPlacementTestCases
deleted file mode 100644
index deea03c..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsPlacementTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsPlacementTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPlacementTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPlacementTestCases',
-        test_name='cheets_GTS_R.GtsPlacementTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPlacementTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPlacementTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsPlayAutoInstallTestCases b/server/site_tests/cheets_GTS_R/control.GtsPlayAutoInstallTestCases
deleted file mode 100644
index 0efdfc8..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsPlayAutoInstallTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsPlayAutoInstallTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPlayAutoInstallTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPlayAutoInstallTestCases',
-        test_name='cheets_GTS_R.GtsPlayAutoInstallTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPlayAutoInstallTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPlayAutoInstallTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsPlayFsiHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsPlayFsiHostTestCases
deleted file mode 100644
index c688dc5..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsPlayFsiHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsPlayFsiHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPlayFsiHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPlayFsiHostTestCases',
-        test_name='cheets_GTS_R.GtsPlayFsiHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPlayFsiHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPlayFsiHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsPlayFsiTestCases b/server/site_tests/cheets_GTS_R/control.GtsPlayFsiTestCases
deleted file mode 100644
index 0b8a997..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsPlayFsiTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsPlayFsiTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPlayFsiTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPlayFsiTestCases',
-        test_name='cheets_GTS_R.GtsPlayFsiTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPlayFsiTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPlayFsiTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsPlayStoreHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsPlayStoreHostTestCases
deleted file mode 100644
index af0d7fc..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsPlayStoreHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsPlayStoreHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPlayStoreHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPlayStoreHostTestCases',
-        test_name='cheets_GTS_R.GtsPlayStoreHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPlayStoreHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPlayStoreHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsPrintTestCases b/server/site_tests/cheets_GTS_R/control.GtsPrintTestCases
deleted file mode 100644
index 00a1b58..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsPrintTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsPrintTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPrintTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPrintTestCases',
-        test_name='cheets_GTS_R.GtsPrintTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPrintTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPrintTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsPrivacyTestCases b/server/site_tests/cheets_GTS_R/control.GtsPrivacyTestCases
deleted file mode 100644
index ec9fe41..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsPrivacyTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsPrivacyTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPrivacyTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPrivacyTestCases',
-        test_name='cheets_GTS_R.GtsPrivacyTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPrivacyTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPrivacyTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsPropertiesTestCases b/server/site_tests/cheets_GTS_R/control.GtsPropertiesTestCases
deleted file mode 100644
index 7d973a9..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsPropertiesTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsPropertiesTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPropertiesTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsPropertiesTestCases',
-        test_name='cheets_GTS_R.GtsPropertiesTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPropertiesTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsPropertiesTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsRegulationComplianceTestCases b/server/site_tests/cheets_GTS_R/control.GtsRegulationComplianceTestCases
deleted file mode 100644
index 61641f0..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsRegulationComplianceTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsRegulationComplianceTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsRegulationComplianceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsRegulationComplianceTestCases',
-        test_name='cheets_GTS_R.GtsRegulationComplianceTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsRegulationComplianceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsRegulationComplianceTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsRlzTestCases b/server/site_tests/cheets_GTS_R/control.GtsRlzTestCases
deleted file mode 100644
index 6cdc906..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsRlzTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsRlzTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsRlzTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsRlzTestCases',
-        test_name='cheets_GTS_R.GtsRlzTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsRlzTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsRlzTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsSampleDeviceTestCases b/server/site_tests/cheets_GTS_R/control.GtsSampleDeviceTestCases
deleted file mode 100644
index 683c421..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsSampleDeviceTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsSampleDeviceTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSampleDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSampleDeviceTestCases',
-        test_name='cheets_GTS_R.GtsSampleDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSampleDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSampleDeviceTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsSampleDynamicConfigTestCases b/server/site_tests/cheets_GTS_R/control.GtsSampleDynamicConfigTestCases
deleted file mode 100644
index 3f8c0e1..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsSampleDynamicConfigTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsSampleDynamicConfigTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSampleDynamicConfigTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSampleDynamicConfigTestCases',
-        test_name='cheets_GTS_R.GtsSampleDynamicConfigTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSampleDynamicConfigTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSampleDynamicConfigTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsSampleHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsSampleHostTestCases
deleted file mode 100644
index 9245ada..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsSampleHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsSampleHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSampleHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSampleHostTestCases',
-        test_name='cheets_GTS_R.GtsSampleHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSampleHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSampleHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsScreenshotHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsScreenshotHostTestCases
deleted file mode 100644
index a6d9e66..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsScreenshotHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsScreenshotHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsScreenshotHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsScreenshotHostTestCases',
-        test_name='cheets_GTS_R.GtsScreenshotHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsScreenshotHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsScreenshotHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsSearchHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsSearchHostTestCases
deleted file mode 100644
index 7679d7f..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsSearchHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsSearchHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSearchHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSearchHostTestCases',
-        test_name='cheets_GTS_R.GtsSearchHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSearchHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSearchHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsSecurityHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsSecurityHostTestCases
deleted file mode 100644
index 355e181..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsSecurityHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsSecurityHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSecurityHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSecurityHostTestCases',
-        test_name='cheets_GTS_R.GtsSecurityHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSecurityHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSecurityHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsSensorHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsSensorHostTestCases
deleted file mode 100644
index 4f1fd79..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsSensorHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsSensorHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSensorHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSensorHostTestCases',
-        test_name='cheets_GTS_R.GtsSensorHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSensorHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSensorHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsSettingsHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsSettingsHostTestCases
deleted file mode 100644
index 52df101..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsSettingsHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsSettingsHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSettingsHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSettingsHostTestCases',
-        test_name='cheets_GTS_R.GtsSettingsHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSettingsHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSettingsHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsSettingsTestCases b/server/site_tests/cheets_GTS_R/control.GtsSettingsTestCases
deleted file mode 100644
index a75a90d..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsSettingsTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsSettingsTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSettingsTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSettingsTestCases',
-        test_name='cheets_GTS_R.GtsSettingsTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSettingsTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSettingsTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsSetupWizardHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsSetupWizardHostTestCases
deleted file mode 100644
index a1f604e..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsSetupWizardHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsSetupWizardHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSetupWizardHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSetupWizardHostTestCases',
-        test_name='cheets_GTS_R.GtsSetupWizardHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSetupWizardHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSetupWizardHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsSetupWizardNoPermissionTestCases b/server/site_tests/cheets_GTS_R/control.GtsSetupWizardNoPermissionTestCases
deleted file mode 100644
index 8395c01..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsSetupWizardNoPermissionTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsSetupWizardNoPermissionTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSetupWizardNoPermissionTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSetupWizardNoPermissionTestCases',
-        test_name='cheets_GTS_R.GtsSetupWizardNoPermissionTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSetupWizardNoPermissionTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSetupWizardNoPermissionTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsSimAppDialogTestCases b/server/site_tests/cheets_GTS_R/control.GtsSimAppDialogTestCases
deleted file mode 100644
index 4bb0d55..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsSimAppDialogTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsSimAppDialogTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSimAppDialogTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSimAppDialogTestCases',
-        test_name='cheets_GTS_R.GtsSimAppDialogTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSimAppDialogTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSimAppDialogTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsSmartBatteryDeviceTestCases b/server/site_tests/cheets_GTS_R/control.GtsSmartBatteryDeviceTestCases
deleted file mode 100644
index 8415b14..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsSmartBatteryDeviceTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsSmartBatteryDeviceTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSmartBatteryDeviceTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSmartBatteryDeviceTestCases',
-        test_name='cheets_GTS_R.GtsSmartBatteryDeviceTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSmartBatteryDeviceTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSmartBatteryDeviceTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsSmsCallLogTestCases b/server/site_tests/cheets_GTS_R/control.GtsSmsCallLogTestCases
deleted file mode 100644
index 37ab598..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsSmsCallLogTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsSmsCallLogTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSmsCallLogTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSmsCallLogTestCases',
-        test_name='cheets_GTS_R.GtsSmsCallLogTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSmsCallLogTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSmsCallLogTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsSsaidHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsSsaidHostTestCases
deleted file mode 100644
index d28f265..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsSsaidHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsSsaidHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSsaidHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSsaidHostTestCases',
-        test_name='cheets_GTS_R.GtsSsaidHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSsaidHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSsaidHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsStagedInstallHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsStagedInstallHostTestCases
deleted file mode 100644
index 752939e..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsStagedInstallHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsStagedInstallHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsStagedInstallHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsStagedInstallHostTestCases',
-        test_name='cheets_GTS_R.GtsStagedInstallHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsStagedInstallHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsStagedInstallHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsStatsdHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsStatsdHostTestCases
deleted file mode 100644
index d7f7824..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsStatsdHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsStatsdHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsStatsdHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsStatsdHostTestCases',
-        test_name='cheets_GTS_R.GtsStatsdHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsStatsdHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsStatsdHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsStorageTestCases b/server/site_tests/cheets_GTS_R/control.GtsStorageTestCases
deleted file mode 100644
index 0f8fb9f..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsStorageTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsStorageTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsStorageTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsStorageTestCases',
-        test_name='cheets_GTS_R.GtsStorageTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsStorageTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsStorageTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsSupervisionTestCases b/server/site_tests/cheets_GTS_R/control.GtsSupervisionTestCases
deleted file mode 100644
index f46929e..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsSupervisionTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsSupervisionTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSupervisionTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSupervisionTestCases',
-        test_name='cheets_GTS_R.GtsSupervisionTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSupervisionTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSupervisionTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsSuspendAppsPermissionTestCases b/server/site_tests/cheets_GTS_R/control.GtsSuspendAppsPermissionTestCases
deleted file mode 100644
index 061cfc4..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsSuspendAppsPermissionTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsSuspendAppsPermissionTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSuspendAppsPermissionTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSuspendAppsPermissionTestCases',
-        test_name='cheets_GTS_R.GtsSuspendAppsPermissionTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSuspendAppsPermissionTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSuspendAppsPermissionTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsSuspendAppsTestCases b/server/site_tests/cheets_GTS_R/control.GtsSuspendAppsTestCases
deleted file mode 100644
index 882d1f8..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsSuspendAppsTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsSuspendAppsTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsSuspendAppsTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsSuspendAppsTestCases',
-        test_name='cheets_GTS_R.GtsSuspendAppsTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSuspendAppsTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsSuspendAppsTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsTelecomManagerTests b/server/site_tests/cheets_GTS_R/control.GtsTelecomManagerTests
deleted file mode 100644
index 737cfbc..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsTelecomManagerTests
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsTelecomManagerTests'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsTelecomManagerTests of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsTelecomManagerTests',
-        test_name='cheets_GTS_R.GtsTelecomManagerTests',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsTelecomManagerTests', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsTelecomManagerTests',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsTelephonyNumberVerificationHostCases b/server/site_tests/cheets_GTS_R/control.GtsTelephonyNumberVerificationHostCases
deleted file mode 100644
index 4b2f565..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsTelephonyNumberVerificationHostCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsTelephonyNumberVerificationHostCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsTelephonyNumberVerificationHostCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsTelephonyNumberVerificationHostCases',
-        test_name='cheets_GTS_R.GtsTelephonyNumberVerificationHostCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsTelephonyNumberVerificationHostCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsTelephonyNumberVerificationHostCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsTelephonyTestCases b/server/site_tests/cheets_GTS_R/control.GtsTelephonyTestCases
deleted file mode 100644
index 12f3c7c..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsTelephonyTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsTelephonyTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsTelephonyTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsTelephonyTestCases',
-        test_name='cheets_GTS_R.GtsTelephonyTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsTelephonyTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsTelephonyTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsTestHarnessModeTestCases b/server/site_tests/cheets_GTS_R/control.GtsTestHarnessModeTestCases
deleted file mode 100644
index 1556c46..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsTestHarnessModeTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsTestHarnessModeTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsTestHarnessModeTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsTestHarnessModeTestCases',
-        test_name='cheets_GTS_R.GtsTestHarnessModeTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsTestHarnessModeTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsTestHarnessModeTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsTetheringTestCases b/server/site_tests/cheets_GTS_R/control.GtsTetheringTestCases
deleted file mode 100644
index c4af1c5..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsTetheringTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsTetheringTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsTetheringTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsTetheringTestCases',
-        test_name='cheets_GTS_R.GtsTetheringTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsTetheringTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsTetheringTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsTvBugReportTestCases b/server/site_tests/cheets_GTS_R/control.GtsTvBugReportTestCases
deleted file mode 100644
index a1277d3..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsTvBugReportTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsTvBugReportTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsTvBugReportTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsTvBugReportTestCases',
-        test_name='cheets_GTS_R.GtsTvBugReportTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsTvBugReportTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsTvBugReportTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsTvHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsTvHostTestCases
deleted file mode 100644
index a407c97..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsTvHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsTvHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsTvHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsTvHostTestCases',
-        test_name='cheets_GTS_R.GtsTvHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsTvHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsTvHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsTvTestCases b/server/site_tests/cheets_GTS_R/control.GtsTvTestCases
deleted file mode 100644
index 8df35f4..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsTvTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsTvTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsTvTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsTvTestCases',
-        test_name='cheets_GTS_R.GtsTvTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsTvTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsTvTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsUnofficialApisUsageTestCases b/server/site_tests/cheets_GTS_R/control.GtsUnofficialApisUsageTestCases
deleted file mode 100644
index cb18bfe..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsUnofficialApisUsageTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsUnofficialApisUsageTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsUnofficialApisUsageTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsUnofficialApisUsageTestCases',
-        test_name='cheets_GTS_R.GtsUnofficialApisUsageTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsUnofficialApisUsageTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsUnofficialApisUsageTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsUsageStatsTestCases b/server/site_tests/cheets_GTS_R/control.GtsUsageStatsTestCases
deleted file mode 100644
index 4d3ccd4..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsUsageStatsTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsUsageStatsTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsUsageStatsTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsUsageStatsTestCases',
-        test_name='cheets_GTS_R.GtsUsageStatsTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsUsageStatsTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsUsageStatsTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsUserspaceRebootHostSideTestCases b/server/site_tests/cheets_GTS_R/control.GtsUserspaceRebootHostSideTestCases
deleted file mode 100644
index 70bd3aa..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsUserspaceRebootHostSideTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsUserspaceRebootHostSideTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsUserspaceRebootHostSideTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsUserspaceRebootHostSideTestCases',
-        test_name='cheets_GTS_R.GtsUserspaceRebootHostSideTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsUserspaceRebootHostSideTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsUserspaceRebootHostSideTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsViewTestCases b/server/site_tests/cheets_GTS_R/control.GtsViewTestCases
deleted file mode 100644
index c9464bb..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsViewTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsViewTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsViewTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsViewTestCases',
-        test_name='cheets_GTS_R.GtsViewTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsViewTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsViewTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsVndkDependencyTestCases b/server/site_tests/cheets_GTS_R/control.GtsVndkDependencyTestCases
deleted file mode 100644
index 6754c79..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsVndkDependencyTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsVndkDependencyTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsVndkDependencyTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsVndkDependencyTestCases',
-        test_name='cheets_GTS_R.GtsVndkDependencyTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsVndkDependencyTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsVndkDependencyTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsWebViewHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsWebViewHostTestCases
deleted file mode 100644
index 5fd0a07..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsWebViewHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsWebViewHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsWebViewHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsWebViewHostTestCases',
-        test_name='cheets_GTS_R.GtsWebViewHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsWebViewHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsWebViewHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsWebViewTestCases b/server/site_tests/cheets_GTS_R/control.GtsWebViewTestCases
deleted file mode 100644
index f6c8262..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsWebViewTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsWebViewTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsWebViewTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsWebViewTestCases',
-        test_name='cheets_GTS_R.GtsWebViewTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsWebViewTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsWebViewTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsWellbeingHostTestCases b/server/site_tests/cheets_GTS_R/control.GtsWellbeingHostTestCases
deleted file mode 100644
index bf3350f..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsWellbeingHostTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsWellbeingHostTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsWellbeingHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsWellbeingHostTestCases',
-        test_name='cheets_GTS_R.GtsWellbeingHostTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsWellbeingHostTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsWellbeingHostTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsWellbeingPermissionPolicyTestCases b/server/site_tests/cheets_GTS_R/control.GtsWellbeingPermissionPolicyTestCases
deleted file mode 100644
index cab0b5d..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsWellbeingPermissionPolicyTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsWellbeingPermissionPolicyTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsWellbeingPermissionPolicyTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsWellbeingPermissionPolicyTestCases',
-        test_name='cheets_GTS_R.GtsWellbeingPermissionPolicyTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsWellbeingPermissionPolicyTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsWellbeingPermissionPolicyTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsWellbeingTestCases b/server/site_tests/cheets_GTS_R/control.GtsWellbeingTestCases
deleted file mode 100644
index 6c0104d..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsWellbeingTestCases
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsWellbeingTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsWellbeingTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=2,
-        tag='GtsWellbeingTestCases',
-        test_name='cheets_GTS_R.GtsWellbeingTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsWellbeingTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsWellbeingTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.GtsYouTubeTestCases b/server/site_tests/cheets_GTS_R/control.GtsYouTubeTestCases
deleted file mode 100644
index 642aa96..0000000
--- a/server/site_tests/cheets_GTS_R/control.GtsYouTubeTestCases
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.GtsYouTubeTestCases'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsYouTubeTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=5,
-        needs_push_media=True,
-        tag='GtsYouTubeTestCases',
-        test_name='cheets_GTS_R.GtsYouTubeTestCases',
-        run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsYouTubeTestCases', '--ignore-business-logic-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='GtsYouTubeTestCases',
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.tradefed-run-collect-tests-only b/server/site_tests/cheets_GTS_R/control.tradefed-run-collect-tests-only
deleted file mode 100644
index 3e51dfa..0000000
--- a/server/site_tests/cheets_GTS_R/control.tradefed-run-collect-tests-only
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.tradefed-run-collect-tests-only'
-ATTRIBUTES = 'suite:gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 2
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-PRIORITY = 70
-DOC = 'Run all of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=0,
-        tag='tradefed-run-collect-tests-only',
-        test_name='cheets_GTS_R.tradefed-run-collect-tests-only',
-        run_template=['run', 'commandAndExit', 'collect-tests-only', '--disable-reboot', '--module-arg', 'GtsYouTubeTestCases:skip-media-download:true'],
-        retry_template=None,
-        target_module=None,
-        target_plan=None,
-        retry_manual_tests=True,
-        use_jdk9=True,
-        warn_on_test_retry=False,
-        timeout=1800)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/control.tradefed-run-test b/server/site_tests/cheets_GTS_R/control.tradefed-run-test
deleted file mode 100644
index e214921..0000000
--- a/server/site_tests/cheets_GTS_R/control.tradefed-run-test
+++ /dev/null
@@ -1,115 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file is not auto-generated. Don't delete it.
-
-# Boring.
-import logging
-import pprint
-from autotest_lib.client.bin import utils
-
-usage = """
-1) To run agains a particular $DUT use
-   test_that --args="module=GtsMediaTestCases test=com.google.android.media.gts.MediaCodecStressTest#testDecodeDecodeCompositeDisplay1080p" $DUT cheets_GTS_R.tradefed-run-test
-
-2) To run against a lab pool use
-    run_suite.py --board=eve --build=$TRYJOB_BUILD --suite_name arc-gts-test --pool cts --no_wait True --priority CQ --timeout_mins 6160 --retry False --num 1 --suite_min_duts 1 --test_args="{'module' : 'GtsMediaTestCases', 'test' : 'com.google.android.media.gts.MediaCodecStressTest#testDecodeDecodeCompositeDisplay1080p'}"
-"""
-
-def usage_error():
-    logging.info('Example usage:')
-    logging.info(usage)
-    raise SystemExit
-
-pp = pprint.PrettyPrinter()
-logging.info(
-    '***********************************************************************')
-
-# Define the variables that we are going to use and set sensible defaults.
-gts_module = ''
-gts_retry = 5
-gts_revision = None
-gts_test = ''
-gts_timeout = 600
-
-# Pull parameters either from run_suite or test_that.
-if 'args_dict' in vars():
-    logging.info('Raw test options from run_suite:')
-    pp.pprint(args_dict)
-elif args:
-    logging.info('Raw test options from test_that:')
-    pp.pprint(args)
-    args_dict = utils.args_to_dict(args)
-else:
-    usage_error()
-
-gts_module = args_dict.get('module', gts_module)
-gts_revision = args_dict.get('revision', gts_revision)
-gts_test = args_dict.get('test', gts_test)
-gts_timeout = float(args_dict.get('timeout', gts_timeout))
-gts_retry = int(args_dict.get('max_retry', gts_retry))
-
-# Basic checks for option validity.
-logging.error('Running module %s with test %s on revision %s',
-              gts_module, gts_test, gts_revision)
-if not gts_module or not gts_test:
-    usage_error()
-
-# And we are getting ready for tradefed.
-uri = ('gs://chromeos-arc-images/cts/bundle/android-gts-' + gts_revision +
-       '.zip') if gts_revision else 'LATEST'
-run_template = ['run', 'commandAndExit', 'gts',
-                '--include-filter', gts_module + ' ' + gts_test,
-                '--ignore-business-logic-failure']
-retry_template = ['run', 'commandAndExit', 'retry',
-                  '--retry', '{session_id}']
-# Unfortunately super long test names can cause problems. Try to get the
-# rightmost element and use that as a simplified name.
-# TODO(ihf): fix pipeline so it works with super long names.
-simplified_test = gts_test
-if '#' in gts_test:
-    simplified_test = gts_test.split('#')[-1]
-elif '.' in gts_test:
-    simplified_test = gts_test.split('.')[-1]
-tag = 'tradefed-run-test.%s.%s' % (gts_module, simplified_test)
-
-# The usual testing stanza. We are suppressing some DEPENDENCIES on purpose.
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS_R.tradefed-run-test'
-ATTRIBUTES = ''
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 0
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 256000
-DOC = ('Run a test of the Android Google Test Suite (GTS) in the ARC++ '
-       'container.')
-
-# And launch.
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_GTS_R',
-        hosts=host_list,
-        iterations=1,
-        max_retry=gts_retry,
-        needs_push_media=True,
-        tag=tag,
-        test_name=NAME,
-        authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
-        run_template=run_template,
-        retry_template=retry_template,
-        target_module=None,
-        target_plan=None,
-        uri=uri,
-        login_precondition_commands=[
-            'lsblk -do NAME,RM | sed -n s/1$//p | xargs -n1 eject'
-        ],
-        precondition_commands=[
-            'echo $(({0} % 2 * 2 + 1)) > /proc/sys/kernel/perf_event_paranoid',
-            'modprobe configs'
-        ],
-        timeout=gts_timeout)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS_R/generate_controlfiles.py b/server/site_tests/cheets_GTS_R/generate_controlfiles.py
deleted file mode 100755
index 0f18408..0000000
--- a/server/site_tests/cheets_GTS_R/generate_controlfiles.py
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/usr/bin/env python2
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This is a trampoline script to invoke the actual generator script.
-
-import os
-import sys
-
-target_script_name = 'generate_controlfiles_GTS_R.py'
-target_script_path = os.path.abspath(os.path.join(os.path.dirname(__file__),
-    '..', '..', 'cros', 'tradefed', target_script_name))
-os.execv(target_script_path, sys.argv)
diff --git a/server/site_tests/cheets_GTS_R/notest_modules/notest_combined_modules.yaml b/server/site_tests/cheets_GTS_R/notest_modules/notest_combined_modules.yaml
deleted file mode 100644
index ff7bc86..0000000
--- a/server/site_tests/cheets_GTS_R/notest_modules/notest_combined_modules.yaml
+++ /dev/null
@@ -1,4 +0,0 @@
-GtsIncrementalInstall: [all]
-GtsMemory: [all]
-GtsNoPermission: [all]
-GtsSample: [all]
diff --git a/server/site_tests/cheets_GTS_R/notest_modules/notest_modules.yaml b/server/site_tests/cheets_GTS_R/notest_modules/notest_modules.yaml
deleted file mode 100644
index 59c9c41..0000000
--- a/server/site_tests/cheets_GTS_R/notest_modules/notest_modules.yaml
+++ /dev/null
@@ -1,30 +0,0 @@
-GtsAccountsHostTestCases: [all]
-GtsAssistantWorkProfileHostTestCases: [all]
-GtsBootHealthHostTestCases: [all]
-GtsDebugfsMountTestCases: [shipatN, shipatP]
-GtsDexModuleRegistrationTestCases: [all]
-GtsDozeHostSideTestCases: [all]
-GtsGraphicsHostTestCases: [shipatN, shipatP]
-GtsIncrementalInstallProxyHostTestCases: [all]
-GtsIncrementalInstallTestCases: [all]
-GtsIncrementalInstallTriggerApp: [all]
-GtsInstallerV2TestCases: [all]
-GtsLensTestCases: [all]
-GtsMemoryHostTestCases: [all]
-GtsMemoryTestCases: [all]
-GtsModuleMetadataTestCases: [all]
-GtsNoPermissionTestCases: [all]
-GtsNoPermissionTestCases25: [all]
-GtsOemLockServiceTestCases: [all]
-GtsSampleDeviceTestCases: [all]
-GtsSampleDynamicConfigTestCases: [all]
-GtsSampleHostTestCases: [all]
-GtsSmartBatteryDeviceTestCases: [all]
-GtsStagedInstallHostTestCases: [all]
-GtsStorageTestCases: [all]
-GtsSuspendAppsTestCases: [all]
-GtsTestHarnessModeTestCases: [all]
-GtsUsageStatsTestCases: [all]
-GtsUserspaceRebootHostSideTestCases: [all]
-GtsWellbeingHostTestCases: [all]
-GtsWellbeingTestCases: [all]
diff --git a/server/site_tests/cheets_LabDependencies/cheets_LabDependencies.py b/server/site_tests/cheets_LabDependencies/cheets_LabDependencies.py
deleted file mode 100644
index 4294877..0000000
--- a/server/site_tests/cheets_LabDependencies/cheets_LabDependencies.py
+++ /dev/null
@@ -1,93 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This test only checks whether the directory of the dependency package exists.
-# It is expected that the necessary path configuration will be done in the test.
-
-# IMPORTANT: This test should be updated whenever the dependencies specified change
-# for any of the environments.
-
-import logging
-import os
-import subprocess
-
-from autotest_lib.client.bin import utils as client_utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import test, utils
-
-
-class cheets_LabDependencies(test.test):
-    """Check basic dependency requirements for running CTS tests."""
-    version = 1
-    """
-    Context: xTS P requires JDK 8 or 9, and xTS R requires JDK 9 or above.
-
-    LXC (container for running server-side autotest)
-    Runs xTS P and R. Uses JDK 8 as default, and JDK 9 on xTS R.
-    JDK 9 path is hardcoded in cheets_CTS_R.
-
-    moblab
-    Only runs CTS P. Uses JDK 9 (single JDK) on CTS P.
-    Currently this test does not run on moblab.
-    Because is_moblab does not work, test ignores the moblab component.
-    Need to move JDK's to /java. TODO(haddowk)
-    Will run when new distribution is released.
-
-    chroot (PFQ environment)
-    Runs CTS P and R. Uses JDK 11 as default, and JDK 8 on CTS P.
-    JDK 8 Path is hardcoded in cheets_CTS_P.
-    """
-
-    ENV_EXPECTED = {
-            'LXC': {
-                    'JDK': [
-                            '/usr/lib/jvm/jdk-9.0.4',
-                            '/usr/lib/jvm/java-8-openjdk-amd64',
-                    ],
-                    'DIR': '/usr/lib/jvm'
-            },
-            'moblab': {
-                    'JDK': [
-                            '/java/jdk-9.0.4',
-                    ],
-                    'DIR': '/java'
-            },
-            'chroot': {
-                    'JDK': [
-                            '/opt/icedtea-bin-3.4.0',
-                            '/opt/openjdk-bin-11',
-                    ],
-                    'DIR': '/opt'
-            }
-    }
-
-    def check_JDK(self, env):
-        """Check whether required JDK directories exist for the environment."""
-        for dep_path in self.ENV_EXPECTED[env]['JDK']:
-            """Raise TestFail if specified JDK directories do not exist."""
-            if not os.path.isdir(dep_path):
-                java_path = subprocess.check_output([
-                                'find', self.ENV_EXPECTED[env]['DIR'], '-path',
-                                '*bin/java'], stderr=subprocess.STDOUT)
-                if java_path:
-                    java_path = ', '.join(java_path.split('\n')[:-1])
-                raise error.TestFail(
-                        'Missing required JDK dependency %s for %s. '
-                        'Quick search shows currently installed versions are %s.'
-                        % (dep_path, env, java_path))
-
-    def run_once(self, host=None):
-        """Check the type of environment, and see if all dependencies are satisfied."""
-        if utils.is_in_container():
-            logging.info('[ENV] Running inside the LXC container')
-            env = 'LXC'
-        # is_moblab does not work. Run in moblab when new distribution is released.
-        elif client_utils.is_moblab():
-            logging.info('[ENV] Running inside moblab')
-            env = 'moblab'
-        else:
-            logging.info('[ENV] Running inside chroot environment')
-            env = 'chroot'
-        self.host = host
-        self.check_JDK(env)
diff --git a/server/site_tests/cheets_LabDependencies/control b/server/site_tests/cheets_LabDependencies/control
deleted file mode 100644
index d780595..0000000
--- a/server/site_tests/cheets_LabDependencies/control
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'jiyounha'
-NAME = 'cheets_LabDependencies'
-TIME = 'FAST'
-TEST_CATEGORY = ""
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'arc'
-ATTRIBUTES = ('suite:bvt-perbuild')
-JOB_RETRIES = 0
-
-DOC = """
-This test will fail if correct dependencies are not installed in the testing environment.
-"""
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('cheets_LabDependencies', host=host)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/cheets_VTS_R/cheets_VTS_R.py b/server/site_tests/cheets_VTS_R/cheets_VTS_R.py
deleted file mode 100644
index 00ace18..0000000
--- a/server/site_tests/cheets_VTS_R/cheets_VTS_R.py
+++ /dev/null
@@ -1,115 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# repohooks/pre-upload.py currently does not run pylint. But for developers who
-# want to check their code manually we disable several harmless pylint warnings
-# which just distract from more serious remaining issues.
-#
-# The instance variable _android_vts is not defined in __init__().
-# pylint: disable=attribute-defined-outside-init
-#
-# Many short variable names don't follow the naming convention.
-# pylint: disable=invalid-name
-
-import logging
-import os
-
-from autotest_lib.server import utils
-from autotest_lib.server.cros.tradefed import tradefed_test
-
-# Maximum default time allowed for each individual CTS module.
-_CTS_TIMEOUT_SECONDS = 3600
-
-# Internal download locations for android vts bundles.
-_INTERNAL_VTS = 'gs://chromeos-arc-images/vts/'
-_VTS_URI = {
-        'arm': _INTERNAL_VTS + 'android-vts-6722941-linux_x86-arm.zip',
-        'x86': _INTERNAL_VTS + 'android-vts-6722941-linux_x86-x86.zip',
-}
-
-
-class cheets_VTS_R(tradefed_test.TradefedTest):
-    """Sets up tradefed to run VTS tests."""
-    version = 1
-
-    _SHARD_CMD = '--shard-count'
-
-    def _tradefed_retry_command(self, template, session_id):
-        """Build tradefed 'retry' command from template."""
-        cmd = []
-        for arg in template:
-            cmd.append(arg.format(session_id=session_id))
-        return cmd
-
-    def _tradefed_run_command(self, template):
-        """Build tradefed 'run' command from template."""
-        cmd = template[:]
-        # If we are running outside of the lab we can collect more data.
-        if not utils.is_in_container():
-            logging.info('Running outside of lab, adding extra debug options.')
-            cmd.append('--log-level-display=DEBUG')
-        return cmd
-
-    def _get_default_bundle_url(self, bundle):
-        return _VTS_URI[bundle]
-
-    def _get_tradefed_base_dir(self):
-        return 'android-vts'
-
-    def _tradefed_cmd_path(self):
-        return os.path.join(self._repository, 'tools', 'vts-tradefed')
-
-    def _should_skip_test(self, bundle):
-        """Some tests are expected to fail and are skipped."""
-        # novato* are x86 VMs without binary translation. Skip the ARM tests.
-        no_ARM_ABI_test_boards = ('novato', 'novato-arc64', 'novato-arcnext')
-        if self._get_board_name(
-        ) in no_ARM_ABI_test_boards and bundle == 'arm':
-            return True
-        return False
-
-    def run_once(self,
-                 test_name,
-                 run_template,
-                 retry_template=None,
-                 target_module=None,
-                 target_plan=None,
-                 needs_push_media=False,
-                 bundle=None,
-                 precondition_commands=[],
-                 login_precondition_commands=[],
-                 timeout=_CTS_TIMEOUT_SECONDS):
-        """Runs the specified CTS once, but with several retries.
-
-        Run an arbitrary tradefed command.
-
-        @param test_name: the name of test. Used for logging.
-        @param run_template: the template to construct the run command.
-                             Example: ['run', 'commandAndExit', 'cts',
-                                       '--skip-media-download']
-        @param retry_template: the template to construct the retry command.
-                               Example: ['run', 'commandAndExit', 'retry',
-                                         '--skip-media-download', '--retry',
-                                         '{session_id}']
-        @param target_module: the name of test module to run.
-        @param target_plan: the name of the test plan to run.
-        @param needs_push_media: need to push test media streams.
-        @param bundle: the type of the CTS bundle: 'arm' or 'x86'
-        @param precondition_commands: a list of scripts to be run on the
-        dut before the test is run, the scripts must already be installed.
-        @param login_precondition_commands: a list of scripts to be run on the
-        dut before the log-in for the test is performed.
-        @param timeout: time after which tradefed can be interrupted.
-        """
-        self._run_tradefed_with_retries(
-                test_name=test_name,
-                run_template=run_template,
-                retry_template=retry_template,
-                timeout=timeout,
-                target_module=target_module,
-                target_plan=target_plan,
-                bundle=bundle,
-                cts_uri=_VTS_URI,
-                login_precondition_commands=login_precondition_commands,
-                precondition_commands=precondition_commands)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.ApkVerityTest b/server/site_tests/cheets_VTS_R/control.11_r1.arm.ApkVerityTest
deleted file mode 100644
index c7465fd..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.ApkVerityTest
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.ApkVerityTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module ApkVerityTest of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.ApkVerityTest',
-        test_name='cheets_VTS_R.11_r1.arm.ApkVerityTest',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'ApkVerityTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='ApkVerityTest',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.FastbootGetvarUserspaceTest b/server/site_tests/cheets_VTS_R/control.11_r1.arm.FastbootGetvarUserspaceTest
deleted file mode 100644
index e4564e1..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.FastbootGetvarUserspaceTest
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.FastbootGetvarUserspaceTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module FastbootGetvarUserspaceTest of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.FastbootGetvarUserspaceTest',
-        test_name='cheets_VTS_R.11_r1.arm.FastbootGetvarUserspaceTest',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'FastbootGetvarUserspaceTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='FastbootGetvarUserspaceTest',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.FastbootVerifyUserspaceTest b/server/site_tests/cheets_VTS_R/control.11_r1.arm.FastbootVerifyUserspaceTest
deleted file mode 100644
index 6e85862..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.FastbootVerifyUserspaceTest
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.FastbootVerifyUserspaceTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module FastbootVerifyUserspaceTest of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.FastbootVerifyUserspaceTest',
-        test_name='cheets_VTS_R.11_r1.arm.FastbootVerifyUserspaceTest',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'FastbootVerifyUserspaceTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='FastbootVerifyUserspaceTest',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.FirmwareBootHeaderVerification b/server/site_tests/cheets_VTS_R/control.11_r1.arm.FirmwareBootHeaderVerification
deleted file mode 100644
index adb2c94..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.FirmwareBootHeaderVerification
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.FirmwareBootHeaderVerification'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module FirmwareBootHeaderVerification of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.FirmwareBootHeaderVerification',
-        test_name='cheets_VTS_R.11_r1.arm.FirmwareBootHeaderVerification',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'FirmwareBootHeaderVerification', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='FirmwareBootHeaderVerification',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.FirmwareDtboVerification b/server/site_tests/cheets_VTS_R/control.11_r1.arm.FirmwareDtboVerification
deleted file mode 100644
index d220237..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.FirmwareDtboVerification
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.FirmwareDtboVerification'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module FirmwareDtboVerification of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.FirmwareDtboVerification',
-        test_name='cheets_VTS_R.11_r1.arm.FirmwareDtboVerification',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'FirmwareDtboVerification', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='FirmwareDtboVerification',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.HalUsbGadgetV1_0HostTest b/server/site_tests/cheets_VTS_R/control.11_r1.arm.HalUsbGadgetV1_0HostTest
deleted file mode 100644
index 96e67a7..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.HalUsbGadgetV1_0HostTest
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.HalUsbGadgetV1_0HostTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module HalUsbGadgetV1_0HostTest of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.HalUsbGadgetV1_0HostTest',
-        test_name='cheets_VTS_R.11_r1.arm.HalUsbGadgetV1_0HostTest',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'HalUsbGadgetV1_0HostTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='HalUsbGadgetV1_0HostTest',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.KernelApiSysfsTest b/server/site_tests/cheets_VTS_R/control.11_r1.arm.KernelApiSysfsTest
deleted file mode 100644
index 7a1376b..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.KernelApiSysfsTest
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.KernelApiSysfsTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module KernelApiSysfsTest of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.KernelApiSysfsTest',
-        test_name='cheets_VTS_R.11_r1.arm.KernelApiSysfsTest',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'KernelApiSysfsTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='KernelApiSysfsTest',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.KernelDynamicPartitionsTest b/server/site_tests/cheets_VTS_R/control.11_r1.arm.KernelDynamicPartitionsTest
deleted file mode 100644
index 9694698..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.KernelDynamicPartitionsTest
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.KernelDynamicPartitionsTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module KernelDynamicPartitionsTest of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.KernelDynamicPartitionsTest',
-        test_name='cheets_VTS_R.11_r1.arm.KernelDynamicPartitionsTest',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'KernelDynamicPartitionsTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='KernelDynamicPartitionsTest',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.KernelLibcutilsTest b/server/site_tests/cheets_VTS_R/control.11_r1.arm.KernelLibcutilsTest
deleted file mode 100644
index 460026f..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.KernelLibcutilsTest
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.KernelLibcutilsTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module KernelLibcutilsTest of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.KernelLibcutilsTest',
-        test_name='cheets_VTS_R.11_r1.arm.KernelLibcutilsTest',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'KernelLibcutilsTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='KernelLibcutilsTest',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.KernelSelinuxFileApiTest b/server/site_tests/cheets_VTS_R/control.11_r1.arm.KernelSelinuxFileApiTest
deleted file mode 100644
index c031c1d..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.KernelSelinuxFileApiTest
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.KernelSelinuxFileApiTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module KernelSelinuxFileApiTest of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.KernelSelinuxFileApiTest',
-        test_name='cheets_VTS_R.11_r1.arm.KernelSelinuxFileApiTest',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'KernelSelinuxFileApiTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='KernelSelinuxFileApiTest',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.SdcardfsTest b/server/site_tests/cheets_VTS_R/control.11_r1.arm.SdcardfsTest
deleted file mode 100644
index 540c4cc..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.SdcardfsTest
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.SdcardfsTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module SdcardfsTest of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.SdcardfsTest',
-        test_name='cheets_VTS_R.11_r1.arm.SdcardfsTest',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'SdcardfsTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='SdcardfsTest',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.VtsFwkDisplayServiceV1_0TargetTest b/server/site_tests/cheets_VTS_R/control.11_r1.arm.VtsFwkDisplayServiceV1_0TargetTest
deleted file mode 100644
index 6a489ad..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.VtsFwkDisplayServiceV1_0TargetTest
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.VtsFwkDisplayServiceV1_0TargetTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module VtsFwkDisplayServiceV1_0TargetTest of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.VtsFwkDisplayServiceV1_0TargetTest',
-        test_name='cheets_VTS_R.11_r1.arm.VtsFwkDisplayServiceV1_0TargetTest',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'VtsFwkDisplayServiceV1_0TargetTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='VtsFwkDisplayServiceV1_0TargetTest',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.VtsHal b/server/site_tests/cheets_VTS_R/control.11_r1.arm.VtsHal
deleted file mode 100644
index 9772e3a..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.VtsHal
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.VtsHal'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module VtsHalAtraceV1_0TargetTest, VtsHalAudioControlV1_0TargetTest, VtsHalAudioControlV2_0TargetTest, VtsHalAudioEffectV2_0TargetTest, VtsHalAudioEffectV4_0TargetTest, VtsHalAudioEffectV5_0TargetTest, VtsHalAudioEffectV6_0TargetTest, VtsHalAudioPolicyV1_0TargetTest, VtsHalAudioV2_0TargetTest, VtsHalAudioV4_0TargetTest, VtsHalAudioV5_0TargetTest, VtsHalAudioV6_0TargetTest, VtsHalAuthSecretV1_0TargetTest, VtsHalAutomotiveDisplayV1_0TargetTest, VtsHalBiometricsFaceV1_0TargetTest, VtsHalBiometricsFingerprintV2_1TargetTest, VtsHalBiometricsFingerprintV2_2TargetTest, VtsHalBluetoothA2dpV1_0TargetTest, VtsHalBluetoothAudioV2_0TargetTest, VtsHalBluetoothV1_0TargetTest, VtsHalBluetoothV1_1TargetTest, VtsHalBootV1_0TargetTest, VtsHalBootV1_1TargetTest, VtsHalBroadcastradioV1_0TargetTest, VtsHalBroadcastradioV1_1TargetTest, VtsHalBroadcastradioV2_0TargetTest, VtsHalCameraProviderV2_4TargetTest, VtsHalCameraServiceV2_0TargetTest, VtsHalCanBusV1_0TargetTest, VtsHalCanBusVirtualV1_0TargetTest, VtsHalCanControllerV1_0TargetTest, VtsHalCasV1_0TargetTest, VtsHalCasV1_1TargetTest, VtsHalCasV1_2TargetTest, VtsHalConfigstoreV1_0TargetTest, VtsHalConfirmationUIV1_0TargetTest, VtsHalContexthubV1_0TargetTest, VtsHalContexthubV1_1TargetTest, VtsHalDrmV1_0TargetTest, VtsHalDrmV1_1TargetTest, VtsHalDrmV1_2TargetTest, VtsHalDrmV1_3TargetTest, VtsHalDumpstateV1_0TargetTest, VtsHalDumpstateV1_1TargetTest, VtsHalEvsV1_0TargetTest, VtsHalEvsV1_1TargetTest, VtsHalGatekeeperV1_0TargetTest, VtsHalGnssV1_0TargetTest, VtsHalGnssV1_1TargetTest, VtsHalGnssV2_0TargetTest, VtsHalGnssV2_1TargetTest, VtsHalGraphicsComposerV2_1TargetTest, VtsHalGraphicsComposerV2_2TargetTest, VtsHalGraphicsComposerV2_3TargetTest, VtsHalGraphicsComposerV2_4TargetTest, VtsHalGraphicsMapperV2_0TargetTest, VtsHalGraphicsMapperV2_1TargetTest, VtsHalGraphicsMapperV3_0TargetTest, VtsHalGraphicsMapperV4_0TargetTest, VtsHalHealthStorageV1_0TargetTest, VtsHalHealthV1_0TargetTest, VtsHalHealthV2_0TargetTest, VtsHalHealthV2_1TargetTest, VtsHalIdentityTargetTest, VtsHalInputClassifierV1_0TargetTest, VtsHalIrV1_0TargetTest, VtsHalKeymasterV3_0TargetTest, VtsHalKeymasterV4_0TargetTest, VtsHalKeymasterV4_1TargetTest, VtsHalLightTargetTest, VtsHalLightV2_0TargetTest, VtsHalMediaC2V1_0TargetAudioDecTest, VtsHalMediaC2V1_0TargetAudioEncTest, VtsHalMediaC2V1_0TargetComponentTest, VtsHalMediaC2V1_0TargetMasterTest, VtsHalMediaC2V1_0TargetVideoDecTest, VtsHalMediaC2V1_0TargetVideoEncTest, VtsHalMediaOmxV1_0TargetAudioDecTest, VtsHalMediaOmxV1_0TargetAudioEncTest, VtsHalMediaOmxV1_0TargetComponentTest, VtsHalMediaOmxV1_0TargetMasterTest, VtsHalMediaOmxV1_0TargetVideoDecTest, VtsHalMediaOmxV1_0TargetVideoEncTest, VtsHalMemtrackV1_0TargetTest, VtsHalNetNetdV1_0TargetTest, VtsHalNetNetdV1_1TargetTest, VtsHalNeuralnetworksV1_0TargetTest, VtsHalNeuralnetworksV1_1TargetTest, VtsHalNeuralnetworksV1_2BenchmarkTestCases, VtsHalNeuralnetworksV1_2TargetTest, VtsHalNeuralnetworksV1_3BenchmarkTestCases, VtsHalNeuralnetworksV1_3TargetTest, VtsHalNfcV1_0TargetTest, VtsHalNfcV1_1TargetTest, VtsHalNfcV1_2TargetTest, VtsHalOccupantAwarenessV1_0TargetTest, VtsHalOemLockV1_0TargetTest, VtsHalPowerStatsV1_0TargetTest, VtsHalPowerTargetTest, VtsHalPowerV1_0TargetTest, VtsHalPowerV1_1TargetTest, VtsHalPowerV1_2TargetTest, VtsHalPowerV1_3TargetTest, VtsHalRadioConfigV1_0TargetTest, VtsHalRadioConfigV1_1TargetTest, VtsHalRadioConfigV1_2TargetTest, VtsHalRadioV1_0TargetTest, VtsHalRadioV1_1TargetTest, VtsHalRadioV1_2TargetTest, VtsHalRadioV1_3TargetTest, VtsHalRadioV1_4TargetTest, VtsHalRadioV1_5TargetTest, VtsHalRebootEscrowTargetTest, VtsHalRenderscriptV1_0TargetTest, VtsHalSapV1_0TargetTest, VtsHalSecureElementV1_0TargetTest, VtsHalSecureElementV1_1TargetTest, VtsHalSecureElementV1_2TargetTest, VtsHalSensorManagerV1_0TargetTest, VtsHalSensorsV1_0TargetTest, VtsHalSensorsV2_0TargetTest, VtsHalSensorsV2_1TargetTest, VtsHalSoundtriggerV2_0TargetTest, VtsHalSoundtriggerV2_1TargetTest, VtsHalSoundtriggerV2_2TargetTest, VtsHalSoundtriggerV2_3TargetTest, VtsHalStatsV1_0TargetTest, VtsHalSurroundViewV1_0TargetTest, VtsHalTetheroffloadConfigV1_0TargetTest, VtsHalTetheroffloadControlV1_0TargetTest, VtsHalThermalV1_0TargetTest, VtsHalThermalV1_1TargetTest, VtsHalThermalV2_0TargetTest, VtsHalTvInputV1_0TargetTest, VtsHalTvTunerV1_0TargetTest, VtsHalUsbGadgetV1_1HostTest, VtsHalUsbV1_0TargetTest, VtsHalUsbV1_1TargetTest, VtsHalUsbV1_2TargetTest, VtsHalVibratorTargetTest, VtsHalVibratorV1_0TargetTest, VtsHalVibratorV1_1TargetTest, VtsHalVibratorV1_2TargetTest, VtsHalVibratorV1_3TargetTest, VtsHalVrV1_0TargetTest, VtsHalWeaverV1_0TargetTest, VtsHalWifiApV1_0TargetTest, VtsHalWifiApV1_4TargetTest, VtsHalWifiHostapdV1_0TargetTest, VtsHalWifiHostapdV1_1TargetTest, VtsHalWifiHostapdV1_2TargetTest, VtsHalWifiKeystoreV1_0TargetTest, VtsHalWifiNanV1_0TargetTest, VtsHalWifiNanV1_2TargetTest, VtsHalWifiNanV1_4TargetTest, VtsHalWifiOffloadV1_0TargetTest, VtsHalWifiRttV1_4TargetTest, VtsHalWifiSupplicantP2pV1_0TargetTest, VtsHalWifiSupplicantP2pV1_2TargetTest, VtsHalWifiSupplicantV1_0TargetTest, VtsHalWifiSupplicantV1_1TargetTest, VtsHalWifiSupplicantV1_2TargetTest, VtsHalWifiSupplicantV1_3TargetTest, VtsHalWifiV1_0TargetTest, VtsHalWifiV1_1TargetTest, VtsHalWifiV1_2TargetTest, VtsHalWifiV1_3TargetTest of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.VtsHal',
-        test_name='cheets_VTS_R.11_r1.arm.VtsHal',
-        run_template=['run', 'commandAndExit', 'vts', '--include-filter', 'VtsHalAtraceV1_0TargetTest', '--include-filter', 'VtsHalAudioControlV1_0TargetTest', '--include-filter', 'VtsHalAudioControlV2_0TargetTest', '--include-filter', 'VtsHalAudioEffectV2_0TargetTest', '--include-filter', 'VtsHalAudioEffectV4_0TargetTest', '--include-filter', 'VtsHalAudioEffectV5_0TargetTest', '--include-filter', 'VtsHalAudioEffectV6_0TargetTest', '--include-filter', 'VtsHalAudioPolicyV1_0TargetTest', '--include-filter', 'VtsHalAudioV2_0TargetTest', '--include-filter', 'VtsHalAudioV4_0TargetTest', '--include-filter', 'VtsHalAudioV5_0TargetTest', '--include-filter', 'VtsHalAudioV6_0TargetTest', '--include-filter', 'VtsHalAuthSecretV1_0TargetTest', '--include-filter', 'VtsHalAutomotiveDisplayV1_0TargetTest', '--include-filter', 'VtsHalBiometricsFaceV1_0TargetTest', '--include-filter', 'VtsHalBiometricsFingerprintV2_1TargetTest', '--include-filter', 'VtsHalBiometricsFingerprintV2_2TargetTest', '--include-filter', 'VtsHalBluetoothA2dpV1_0TargetTest', '--include-filter', 'VtsHalBluetoothAudioV2_0TargetTest', '--include-filter', 'VtsHalBluetoothV1_0TargetTest', '--include-filter', 'VtsHalBluetoothV1_1TargetTest', '--include-filter', 'VtsHalBootV1_0TargetTest', '--include-filter', 'VtsHalBootV1_1TargetTest', '--include-filter', 'VtsHalBroadcastradioV1_0TargetTest', '--include-filter', 'VtsHalBroadcastradioV1_1TargetTest', '--include-filter', 'VtsHalBroadcastradioV2_0TargetTest', '--include-filter', 'VtsHalCameraProviderV2_4TargetTest', '--include-filter', 'VtsHalCameraServiceV2_0TargetTest', '--include-filter', 'VtsHalCanBusV1_0TargetTest', '--include-filter', 'VtsHalCanBusVirtualV1_0TargetTest', '--include-filter', 'VtsHalCanControllerV1_0TargetTest', '--include-filter', 'VtsHalCasV1_0TargetTest', '--include-filter', 'VtsHalCasV1_1TargetTest', '--include-filter', 'VtsHalCasV1_2TargetTest', '--include-filter', 'VtsHalConfigstoreV1_0TargetTest', '--include-filter', 'VtsHalConfirmationUIV1_0TargetTest', '--include-filter', 'VtsHalContexthubV1_0TargetTest', '--include-filter', 'VtsHalContexthubV1_1TargetTest', '--include-filter', 'VtsHalDrmV1_0TargetTest', '--include-filter', 'VtsHalDrmV1_1TargetTest', '--include-filter', 'VtsHalDrmV1_2TargetTest', '--include-filter', 'VtsHalDrmV1_3TargetTest', '--include-filter', 'VtsHalDumpstateV1_0TargetTest', '--include-filter', 'VtsHalDumpstateV1_1TargetTest', '--include-filter', 'VtsHalEvsV1_0TargetTest', '--include-filter', 'VtsHalEvsV1_1TargetTest', '--include-filter', 'VtsHalGatekeeperV1_0TargetTest', '--include-filter', 'VtsHalGnssV1_0TargetTest', '--include-filter', 'VtsHalGnssV1_1TargetTest', '--include-filter', 'VtsHalGnssV2_0TargetTest', '--include-filter', 'VtsHalGnssV2_1TargetTest', '--include-filter', 'VtsHalGraphicsComposerV2_1TargetTest', '--include-filter', 'VtsHalGraphicsComposerV2_2TargetTest', '--include-filter', 'VtsHalGraphicsComposerV2_3TargetTest', '--include-filter', 'VtsHalGraphicsComposerV2_4TargetTest', '--include-filter', 'VtsHalGraphicsMapperV2_0TargetTest', '--include-filter', 'VtsHalGraphicsMapperV2_1TargetTest', '--include-filter', 'VtsHalGraphicsMapperV3_0TargetTest', '--include-filter', 'VtsHalGraphicsMapperV4_0TargetTest', '--include-filter', 'VtsHalHealthStorageV1_0TargetTest', '--include-filter', 'VtsHalHealthV1_0TargetTest', '--include-filter', 'VtsHalHealthV2_0TargetTest', '--include-filter', 'VtsHalHealthV2_1TargetTest', '--include-filter', 'VtsHalIdentityTargetTest', '--include-filter', 'VtsHalInputClassifierV1_0TargetTest', '--include-filter', 'VtsHalIrV1_0TargetTest', '--include-filter', 'VtsHalKeymasterV3_0TargetTest', '--include-filter', 'VtsHalKeymasterV4_0TargetTest', '--include-filter', 'VtsHalKeymasterV4_1TargetTest', '--include-filter', 'VtsHalLightTargetTest', '--include-filter', 'VtsHalLightV2_0TargetTest', '--include-filter', 'VtsHalMediaC2V1_0TargetAudioDecTest', '--include-filter', 'VtsHalMediaC2V1_0TargetAudioEncTest', '--include-filter', 'VtsHalMediaC2V1_0TargetComponentTest', '--include-filter', 'VtsHalMediaC2V1_0TargetMasterTest', '--include-filter', 'VtsHalMediaC2V1_0TargetVideoDecTest', '--include-filter', 'VtsHalMediaC2V1_0TargetVideoEncTest', '--include-filter', 'VtsHalMediaOmxV1_0TargetAudioDecTest', '--include-filter', 'VtsHalMediaOmxV1_0TargetAudioEncTest', '--include-filter', 'VtsHalMediaOmxV1_0TargetComponentTest', '--include-filter', 'VtsHalMediaOmxV1_0TargetMasterTest', '--include-filter', 'VtsHalMediaOmxV1_0TargetVideoDecTest', '--include-filter', 'VtsHalMediaOmxV1_0TargetVideoEncTest', '--include-filter', 'VtsHalMemtrackV1_0TargetTest', '--include-filter', 'VtsHalNetNetdV1_0TargetTest', '--include-filter', 'VtsHalNetNetdV1_1TargetTest', '--include-filter', 'VtsHalNeuralnetworksV1_0TargetTest', '--include-filter', 'VtsHalNeuralnetworksV1_1TargetTest', '--include-filter', 'VtsHalNeuralnetworksV1_2BenchmarkTestCases', '--include-filter', 'VtsHalNeuralnetworksV1_2TargetTest', '--include-filter', 'VtsHalNeuralnetworksV1_3BenchmarkTestCases', '--include-filter', 'VtsHalNeuralnetworksV1_3TargetTest', '--include-filter', 'VtsHalNfcV1_0TargetTest', '--include-filter', 'VtsHalNfcV1_1TargetTest', '--include-filter', 'VtsHalNfcV1_2TargetTest', '--include-filter', 'VtsHalOccupantAwarenessV1_0TargetTest', '--include-filter', 'VtsHalOemLockV1_0TargetTest', '--include-filter', 'VtsHalPowerStatsV1_0TargetTest', '--include-filter', 'VtsHalPowerTargetTest', '--include-filter', 'VtsHalPowerV1_0TargetTest', '--include-filter', 'VtsHalPowerV1_1TargetTest', '--include-filter', 'VtsHalPowerV1_2TargetTest', '--include-filter', 'VtsHalPowerV1_3TargetTest', '--include-filter', 'VtsHalRadioConfigV1_0TargetTest', '--include-filter', 'VtsHalRadioConfigV1_1TargetTest', '--include-filter', 'VtsHalRadioConfigV1_2TargetTest', '--include-filter', 'VtsHalRadioV1_0TargetTest', '--include-filter', 'VtsHalRadioV1_1TargetTest', '--include-filter', 'VtsHalRadioV1_2TargetTest', '--include-filter', 'VtsHalRadioV1_3TargetTest', '--include-filter', 'VtsHalRadioV1_4TargetTest', '--include-filter', 'VtsHalRadioV1_5TargetTest', '--include-filter', 'VtsHalRebootEscrowTargetTest', '--include-filter', 'VtsHalRenderscriptV1_0TargetTest', '--include-filter', 'VtsHalSapV1_0TargetTest', '--include-filter', 'VtsHalSecureElementV1_0TargetTest', '--include-filter', 'VtsHalSecureElementV1_1TargetTest', '--include-filter', 'VtsHalSecureElementV1_2TargetTest', '--include-filter', 'VtsHalSensorManagerV1_0TargetTest', '--include-filter', 'VtsHalSensorsV1_0TargetTest', '--include-filter', 'VtsHalSensorsV2_0TargetTest', '--include-filter', 'VtsHalSensorsV2_1TargetTest', '--include-filter', 'VtsHalSoundtriggerV2_0TargetTest', '--include-filter', 'VtsHalSoundtriggerV2_1TargetTest', '--include-filter', 'VtsHalSoundtriggerV2_2TargetTest', '--include-filter', 'VtsHalSoundtriggerV2_3TargetTest', '--include-filter', 'VtsHalStatsV1_0TargetTest', '--include-filter', 'VtsHalSurroundViewV1_0TargetTest', '--include-filter', 'VtsHalTetheroffloadConfigV1_0TargetTest', '--include-filter', 'VtsHalTetheroffloadControlV1_0TargetTest', '--include-filter', 'VtsHalThermalV1_0TargetTest', '--include-filter', 'VtsHalThermalV1_1TargetTest', '--include-filter', 'VtsHalThermalV2_0TargetTest', '--include-filter', 'VtsHalTvInputV1_0TargetTest', '--include-filter', 'VtsHalTvTunerV1_0TargetTest', '--include-filter', 'VtsHalUsbGadgetV1_1HostTest', '--include-filter', 'VtsHalUsbV1_0TargetTest', '--include-filter', 'VtsHalUsbV1_1TargetTest', '--include-filter', 'VtsHalUsbV1_2TargetTest', '--include-filter', 'VtsHalVibratorTargetTest', '--include-filter', 'VtsHalVibratorV1_0TargetTest', '--include-filter', 'VtsHalVibratorV1_1TargetTest', '--include-filter', 'VtsHalVibratorV1_2TargetTest', '--include-filter', 'VtsHalVibratorV1_3TargetTest', '--include-filter', 'VtsHalVrV1_0TargetTest', '--include-filter', 'VtsHalWeaverV1_0TargetTest', '--include-filter', 'VtsHalWifiApV1_0TargetTest', '--include-filter', 'VtsHalWifiApV1_4TargetTest', '--include-filter', 'VtsHalWifiHostapdV1_0TargetTest', '--include-filter', 'VtsHalWifiHostapdV1_1TargetTest', '--include-filter', 'VtsHalWifiHostapdV1_2TargetTest', '--include-filter', 'VtsHalWifiKeystoreV1_0TargetTest', '--include-filter', 'VtsHalWifiNanV1_0TargetTest', '--include-filter', 'VtsHalWifiNanV1_2TargetTest', '--include-filter', 'VtsHalWifiNanV1_4TargetTest', '--include-filter', 'VtsHalWifiOffloadV1_0TargetTest', '--include-filter', 'VtsHalWifiRttV1_4TargetTest', '--include-filter', 'VtsHalWifiSupplicantP2pV1_0TargetTest', '--include-filter', 'VtsHalWifiSupplicantP2pV1_2TargetTest', '--include-filter', 'VtsHalWifiSupplicantV1_0TargetTest', '--include-filter', 'VtsHalWifiSupplicantV1_1TargetTest', '--include-filter', 'VtsHalWifiSupplicantV1_2TargetTest', '--include-filter', 'VtsHalWifiSupplicantV1_3TargetTest', '--include-filter', 'VtsHalWifiV1_0TargetTest', '--include-filter', 'VtsHalWifiV1_1TargetTest', '--include-filter', 'VtsHalWifiV1_2TargetTest', '--include-filter', 'VtsHalWifiV1_3TargetTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='VtsHal',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=302400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.VtsHidlAllocatorV1_0TargetTest b/server/site_tests/cheets_VTS_R/control.11_r1.arm.VtsHidlAllocatorV1_0TargetTest
deleted file mode 100644
index 7f0be02..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.VtsHidlAllocatorV1_0TargetTest
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.VtsHidlAllocatorV1_0TargetTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module VtsHidlAllocatorV1_0TargetTest of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.VtsHidlAllocatorV1_0TargetTest',
-        test_name='cheets_VTS_R.11_r1.arm.VtsHidlAllocatorV1_0TargetTest',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'VtsHidlAllocatorV1_0TargetTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='VtsHidlAllocatorV1_0TargetTest',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.binderDriverInterfaceTest b/server/site_tests/cheets_VTS_R/control.11_r1.arm.binderDriverInterfaceTest
deleted file mode 100644
index 55df7c1..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.binderDriverInterfaceTest
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.binderDriverInterfaceTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module binderDriverInterfaceTest, binderDriverInterfaceTest_IPC_32 of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.binderDriverInterfaceTest',
-        test_name='cheets_VTS_R.11_r1.arm.binderDriverInterfaceTest',
-        run_template=['run', 'commandAndExit', 'vts', '--include-filter', 'binderDriverInterfaceTest', '--include-filter', 'binderDriverInterfaceTest_IPC_32', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='binderDriverInterfaceTest',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.binderLibTest b/server/site_tests/cheets_VTS_R/control.11_r1.arm.binderLibTest
deleted file mode 100644
index 8054d27..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.binderLibTest
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.binderLibTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module binderLibTest, binderLibTest_IPC_32 of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.binderLibTest',
-        test_name='cheets_VTS_R.11_r1.arm.binderLibTest',
-        run_template=['run', 'commandAndExit', 'vts', '--include-filter', 'binderLibTest', '--include-filter', 'binderLibTest_IPC_32', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='binderLibTest',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.binderSafeInterfaceTest b/server/site_tests/cheets_VTS_R/control.11_r1.arm.binderSafeInterfaceTest
deleted file mode 100644
index 7a0a787..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.binderSafeInterfaceTest
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.binderSafeInterfaceTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module binderSafeInterfaceTest of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.binderSafeInterfaceTest',
-        test_name='cheets_VTS_R.11_r1.arm.binderSafeInterfaceTest',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'binderSafeInterfaceTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='binderSafeInterfaceTest',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.bpf_module_test b/server/site_tests/cheets_VTS_R/control.11_r1.arm.bpf_module_test
deleted file mode 100644
index 93de8f5..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.bpf_module_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.bpf_module_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module bpf_module_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.bpf_module_test',
-        test_name='cheets_VTS_R.11_r1.arm.bpf_module_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'bpf_module_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='bpf_module_test',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.fiemap_writer_test b/server/site_tests/cheets_VTS_R/control.11_r1.arm.fiemap_writer_test
deleted file mode 100644
index 2b75ad7..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.fiemap_writer_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.fiemap_writer_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module fiemap_writer_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.fiemap_writer_test',
-        test_name='cheets_VTS_R.11_r1.arm.fiemap_writer_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'fiemap_writer_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='fiemap_writer_test',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.memunreachable_binder_test b/server/site_tests/cheets_VTS_R/control.11_r1.arm.memunreachable_binder_test
deleted file mode 100644
index b3190d2..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.memunreachable_binder_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.memunreachable_binder_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module memunreachable_binder_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.memunreachable_binder_test',
-        test_name='cheets_VTS_R.11_r1.arm.memunreachable_binder_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'memunreachable_binder_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='memunreachable_binder_test',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.netd_integration_test b/server/site_tests/cheets_VTS_R/control.11_r1.arm.netd_integration_test
deleted file mode 100644
index e3ead46..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.netd_integration_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.netd_integration_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module netd_integration_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.netd_integration_test',
-        test_name='cheets_VTS_R.11_r1.arm.netd_integration_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'netd_integration_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='netd_integration_test',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_compatibilityMatrix_validate_test b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_compatibilityMatrix_validate_test
deleted file mode 100644
index 4d292dc..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_compatibilityMatrix_validate_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_compatibilityMatrix_validate_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_compatibilityMatrix_validate_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_compatibilityMatrix_validate_test',
-        test_name='cheets_VTS_R.11_r1.arm.vts_compatibilityMatrix_validate_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_compatibilityMatrix_validate_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_compatibilityMatrix_validate_test',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_core_liblp_test b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_core_liblp_test
deleted file mode 100644
index bde8770..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_core_liblp_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_core_liblp_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_core_liblp_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_core_liblp_test',
-        test_name='cheets_VTS_R.11_r1.arm.vts_core_liblp_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_core_liblp_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_core_liblp_test',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_core_meminfo_test b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_core_meminfo_test
deleted file mode 100644
index 8389857..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_core_meminfo_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_core_meminfo_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_core_meminfo_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_core_meminfo_test',
-        test_name='cheets_VTS_R.11_r1.arm.vts_core_meminfo_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_core_meminfo_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_core_meminfo_test',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_core_test_binary_qtaguid_module b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_core_test_binary_qtaguid_module
deleted file mode 100644
index 070ca7b..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_core_test_binary_qtaguid_module
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_core_test_binary_qtaguid_module'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_core_test_binary_qtaguid_module of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_core_test_binary_qtaguid_module',
-        test_name='cheets_VTS_R.11_r1.arm.vts_core_test_binary_qtaguid_module',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_core_test_binary_qtaguid_module', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_core_test_binary_qtaguid_module',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_defaultPermissions_validate_test b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_defaultPermissions_validate_test
deleted file mode 100644
index ba9d03f..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_defaultPermissions_validate_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_defaultPermissions_validate_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_defaultPermissions_validate_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_defaultPermissions_validate_test',
-        test_name='cheets_VTS_R.11_r1.arm.vts_defaultPermissions_validate_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_defaultPermissions_validate_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_defaultPermissions_validate_test',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_gsi_boot_test b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_gsi_boot_test
deleted file mode 100644
index c1930c2..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_gsi_boot_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_gsi_boot_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_gsi_boot_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_gsi_boot_test',
-        test_name='cheets_VTS_R.11_r1.arm.vts_gsi_boot_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_gsi_boot_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_gsi_boot_test',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_halManifest_validate_test b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_halManifest_validate_test
deleted file mode 100644
index f5c7c10..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_halManifest_validate_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_halManifest_validate_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_halManifest_validate_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_halManifest_validate_test',
-        test_name='cheets_VTS_R.11_r1.arm.vts_halManifest_validate_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_halManifest_validate_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_halManifest_validate_test',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_ibase_test b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_ibase_test
deleted file mode 100644
index 20d6c6c..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_ibase_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_ibase_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_ibase_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_ibase_test',
-        test_name='cheets_VTS_R.11_r1.arm.vts_ibase_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_ibase_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_ibase_test',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_kernel_checkpoint_test b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_kernel_checkpoint_test
deleted file mode 100644
index 90035b4..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_kernel_checkpoint_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_kernel_checkpoint_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_kernel_checkpoint_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_kernel_checkpoint_test',
-        test_name='cheets_VTS_R.11_r1.arm.vts_kernel_checkpoint_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_kernel_checkpoint_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_kernel_checkpoint_test',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_kernel_encryption_test b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_kernel_encryption_test
deleted file mode 100644
index 19f70f9..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_kernel_encryption_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_kernel_encryption_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_kernel_encryption_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_kernel_encryption_test',
-        test_name='cheets_VTS_R.11_r1.arm.vts_kernel_encryption_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_kernel_encryption_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_kernel_encryption_test',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_kernel_kheaders b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_kernel_kheaders
deleted file mode 100644
index 3d16d55..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_kernel_kheaders
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_kernel_kheaders'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_kernel_kheaders of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_kernel_kheaders',
-        test_name='cheets_VTS_R.11_r1.arm.vts_kernel_kheaders',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_kernel_kheaders', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_kernel_kheaders',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_kernel_loopconfig_test b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_kernel_loopconfig_test
deleted file mode 100644
index cd1c250..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_kernel_loopconfig_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_kernel_loopconfig_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_kernel_loopconfig_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_kernel_loopconfig_test',
-        test_name='cheets_VTS_R.11_r1.arm.vts_kernel_loopconfig_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_kernel_loopconfig_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_kernel_loopconfig_test',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_kernel_net_tests b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_kernel_net_tests
deleted file mode 100644
index 58f3421..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_kernel_net_tests
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_kernel_net_tests'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_kernel_net_tests of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_kernel_net_tests',
-        test_name='cheets_VTS_R.11_r1.arm.vts_kernel_net_tests',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_kernel_net_tests', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_kernel_net_tests',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_kernel_proc_file_api_test b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_kernel_proc_file_api_test
deleted file mode 100644
index 62f8e4e..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_kernel_proc_file_api_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_kernel_proc_file_api_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_kernel_proc_file_api_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_kernel_proc_file_api_test',
-        test_name='cheets_VTS_R.11_r1.arm.vts_kernel_proc_file_api_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_kernel_proc_file_api_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_kernel_proc_file_api_test',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_kernel_toolchain b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_kernel_toolchain
deleted file mode 100644
index f718dc9..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_kernel_toolchain
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_kernel_toolchain'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_kernel_toolchain of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_kernel_toolchain',
-        test_name='cheets_VTS_R.11_r1.arm.vts_kernel_toolchain',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_kernel_toolchain', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_kernel_toolchain',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_kernel_tun_test b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_kernel_tun_test
deleted file mode 100644
index f4dc103..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_kernel_tun_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_kernel_tun_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_kernel_tun_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_kernel_tun_test',
-        test_name='cheets_VTS_R.11_r1.arm.vts_kernel_tun_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_kernel_tun_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_kernel_tun_test',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_libdm_test b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_libdm_test
deleted file mode 100644
index 3de9d7e..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_libdm_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_libdm_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_libdm_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_libdm_test',
-        test_name='cheets_VTS_R.11_r1.arm.vts_libdm_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_libdm_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_libdm_test',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_libsnapshot_test b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_libsnapshot_test
deleted file mode 100644
index d47a97d..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_libsnapshot_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_libsnapshot_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_libsnapshot_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_libsnapshot_test',
-        test_name='cheets_VTS_R.11_r1.arm.vts_libsnapshot_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_libsnapshot_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_libsnapshot_test',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_linux_kselftest_arm_ b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_linux_kselftest_arm_
deleted file mode 100644
index e16dfc9..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_linux_kselftest_arm_
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_linux_kselftest_arm_'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_linux_kselftest_arm_32, vts_linux_kselftest_arm_64 of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_linux_kselftest_arm_',
-        test_name='cheets_VTS_R.11_r1.arm.vts_linux_kselftest_arm_',
-        run_template=['run', 'commandAndExit', 'vts', '--include-filter', 'vts_linux_kselftest_arm_32', '--include-filter', 'vts_linux_kselftest_arm_64', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_linux_kselftest_arm_',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_linux_kselftest_x86_ b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_linux_kselftest_x86_
deleted file mode 100644
index 62425e0..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_linux_kselftest_x86_
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_linux_kselftest_x86_'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_linux_kselftest_x86_32, vts_linux_kselftest_x86_64 of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_linux_kselftest_x86_',
-        test_name='cheets_VTS_R.11_r1.arm.vts_linux_kselftest_x86_',
-        run_template=['run', 'commandAndExit', 'vts', '--include-filter', 'vts_linux_kselftest_x86_32', '--include-filter', 'vts_linux_kselftest_x86_64', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_linux_kselftest_x86_',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_ltp_test_arm b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_ltp_test_arm
deleted file mode 100644
index 4523968..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_ltp_test_arm
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_ltp_test_arm'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_ltp_test_arm of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_ltp_test_arm',
-        test_name='cheets_VTS_R.11_r1.arm.vts_ltp_test_arm',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_ltp_test_arm', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_ltp_test_arm',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_ltp_test_arm_64 b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_ltp_test_arm_64
deleted file mode 100644
index e9077b9..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_ltp_test_arm_64
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_ltp_test_arm_64'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_ltp_test_arm_64, vts_ltp_test_arm_64_hwasan, vts_ltp_test_arm_64_lowmem, vts_ltp_test_arm_64_lowmem_hwasan of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_ltp_test_arm_64',
-        test_name='cheets_VTS_R.11_r1.arm.vts_ltp_test_arm_64',
-        run_template=['run', 'commandAndExit', 'vts', '--include-filter', 'vts_ltp_test_arm_64', '--include-filter', 'vts_ltp_test_arm_64_hwasan', '--include-filter', 'vts_ltp_test_arm_64_lowmem', '--include-filter', 'vts_ltp_test_arm_64_lowmem_hwasan', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_ltp_test_arm_64',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_ltp_test_arm_lowmem b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_ltp_test_arm_lowmem
deleted file mode 100644
index 27264bc..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_ltp_test_arm_lowmem
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_ltp_test_arm_lowmem'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_ltp_test_arm_lowmem of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_ltp_test_arm_lowmem',
-        test_name='cheets_VTS_R.11_r1.arm.vts_ltp_test_arm_lowmem',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_ltp_test_arm_lowmem', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_ltp_test_arm_lowmem',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_ltp_test_x86 b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_ltp_test_x86
deleted file mode 100644
index 4ec3f3c..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_ltp_test_x86
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_ltp_test_x86'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_ltp_test_x86, vts_ltp_test_x86_64 of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_ltp_test_x86',
-        test_name='cheets_VTS_R.11_r1.arm.vts_ltp_test_x86',
-        run_template=['run', 'commandAndExit', 'vts', '--include-filter', 'vts_ltp_test_x86', '--include-filter', 'vts_ltp_test_x86_64', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_ltp_test_x86',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_media b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_media
deleted file mode 100644
index f3091dc..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_media
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_media'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_mediaCodecs_validate_test, vts_mediaProfiles_validate_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_media',
-        test_name='cheets_VTS_R.11_r1.arm.vts_media',
-        run_template=['run', 'commandAndExit', 'vts', '--include-filter', 'vts_mediaCodecs_validate_test', '--include-filter', 'vts_mediaProfiles_validate_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_media',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_permission_validate_test b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_permission_validate_test
deleted file mode 100644
index d0e520a..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_permission_validate_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_permission_validate_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_permission_validate_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_permission_validate_test',
-        test_name='cheets_VTS_R.11_r1.arm.vts_permission_validate_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_permission_validate_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_permission_validate_test',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_processgroup_validate_test b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_processgroup_validate_test
deleted file mode 100644
index 7057d45..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_processgroup_validate_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_processgroup_validate_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_processgroup_validate_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_processgroup_validate_test',
-        test_name='cheets_VTS_R.11_r1.arm.vts_processgroup_validate_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_processgroup_validate_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_processgroup_validate_test',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_security_avb_test b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_security_avb_test
deleted file mode 100644
index 081a0e4..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_security_avb_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_security_avb_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_security_avb_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_security_avb_test',
-        test_name='cheets_VTS_R.11_r1.arm.vts_security_avb_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_security_avb_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_security_avb_test',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_test_binary_bow_module b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_test_binary_bow_module
deleted file mode 100644
index 3f009a2..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_test_binary_bow_module
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_test_binary_bow_module'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_test_binary_bow_module of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_test_binary_bow_module',
-        test_name='cheets_VTS_R.11_r1.arm.vts_test_binary_bow_module',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_test_binary_bow_module', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_test_binary_bow_module',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_treble_platform_version_test b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_treble_platform_version_test
deleted file mode 100644
index c69f489..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_treble_platform_version_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_treble_platform_version_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_treble_platform_version_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_treble_platform_version_test',
-        test_name='cheets_VTS_R.11_r1.arm.vts_treble_platform_version_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_treble_platform_version_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_treble_platform_version_test',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_treble_sys_prop_test b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_treble_sys_prop_test
deleted file mode 100644
index 9d3237e..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_treble_sys_prop_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_treble_sys_prop_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_treble_sys_prop_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_treble_sys_prop_test',
-        test_name='cheets_VTS_R.11_r1.arm.vts_treble_sys_prop_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_treble_sys_prop_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_treble_sys_prop_test',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_treble_vintf_framework_test b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_treble_vintf_framework_test
deleted file mode 100644
index b1369b1..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_treble_vintf_framework_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_treble_vintf_framework_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_treble_vintf_framework_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_treble_vintf_framework_test',
-        test_name='cheets_VTS_R.11_r1.arm.vts_treble_vintf_framework_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_treble_vintf_framework_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_treble_vintf_framework_test',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_treble_vintf_vendor_test b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_treble_vintf_vendor_test
deleted file mode 100644
index 8833e27..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_treble_vintf_vendor_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_treble_vintf_vendor_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_treble_vintf_vendor_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_treble_vintf_vendor_test',
-        test_name='cheets_VTS_R.11_r1.arm.vts_treble_vintf_vendor_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_treble_vintf_vendor_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_treble_vintf_vendor_test',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_virtual_ab_test b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_virtual_ab_test
deleted file mode 100644
index 60e5a6a..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_virtual_ab_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_virtual_ab_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_virtual_ab_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_virtual_ab_test',
-        test_name='cheets_VTS_R.11_r1.arm.vts_virtual_ab_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_virtual_ab_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_virtual_ab_test',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_vndk_abi_test b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_vndk_abi_test
deleted file mode 100644
index 4a38213..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_vndk_abi_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_vndk_abi_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_vndk_abi_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_vndk_abi_test',
-        test_name='cheets_VTS_R.11_r1.arm.vts_vndk_abi_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_vndk_abi_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_vndk_abi_test',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_vndk_dependency_test b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_vndk_dependency_test
deleted file mode 100644
index 8b455c1..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_vndk_dependency_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_vndk_dependency_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_vndk_dependency_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_vndk_dependency_test',
-        test_name='cheets_VTS_R.11_r1.arm.vts_vndk_dependency_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_vndk_dependency_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_vndk_dependency_test',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_vndk_files_test b/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_vndk_files_test
deleted file mode 100644
index 93c2f63..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.arm.vts_vndk_files_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.arm.vts_vndk_files_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_arm'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_vndk_files_test of the Vendor Test Suite (VTS) using arm ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.arm.vts_vndk_files_test',
-        test_name='cheets_VTS_R.11_r1.arm.vts_vndk_files_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_vndk_files_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_vndk_files_test',
-        target_plan=None,
-        bundle='arm',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.ApkVerityTest b/server/site_tests/cheets_VTS_R/control.11_r1.x86.ApkVerityTest
deleted file mode 100644
index 98049ec..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.ApkVerityTest
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.ApkVerityTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module ApkVerityTest of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.ApkVerityTest',
-        test_name='cheets_VTS_R.11_r1.x86.ApkVerityTest',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'ApkVerityTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='ApkVerityTest',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.FastbootGetvarUserspaceTest b/server/site_tests/cheets_VTS_R/control.11_r1.x86.FastbootGetvarUserspaceTest
deleted file mode 100644
index d66c482..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.FastbootGetvarUserspaceTest
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.FastbootGetvarUserspaceTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module FastbootGetvarUserspaceTest of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.FastbootGetvarUserspaceTest',
-        test_name='cheets_VTS_R.11_r1.x86.FastbootGetvarUserspaceTest',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'FastbootGetvarUserspaceTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='FastbootGetvarUserspaceTest',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.FastbootVerifyUserspaceTest b/server/site_tests/cheets_VTS_R/control.11_r1.x86.FastbootVerifyUserspaceTest
deleted file mode 100644
index c2bbc22..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.FastbootVerifyUserspaceTest
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.FastbootVerifyUserspaceTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module FastbootVerifyUserspaceTest of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.FastbootVerifyUserspaceTest',
-        test_name='cheets_VTS_R.11_r1.x86.FastbootVerifyUserspaceTest',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'FastbootVerifyUserspaceTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='FastbootVerifyUserspaceTest',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.FirmwareBootHeaderVerification b/server/site_tests/cheets_VTS_R/control.11_r1.x86.FirmwareBootHeaderVerification
deleted file mode 100644
index d159dfd..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.FirmwareBootHeaderVerification
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.FirmwareBootHeaderVerification'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module FirmwareBootHeaderVerification of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.FirmwareBootHeaderVerification',
-        test_name='cheets_VTS_R.11_r1.x86.FirmwareBootHeaderVerification',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'FirmwareBootHeaderVerification', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='FirmwareBootHeaderVerification',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.FirmwareDtboVerification b/server/site_tests/cheets_VTS_R/control.11_r1.x86.FirmwareDtboVerification
deleted file mode 100644
index 38a9b63..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.FirmwareDtboVerification
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.FirmwareDtboVerification'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module FirmwareDtboVerification of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.FirmwareDtboVerification',
-        test_name='cheets_VTS_R.11_r1.x86.FirmwareDtboVerification',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'FirmwareDtboVerification', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='FirmwareDtboVerification',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.HalUsbGadgetV1_0HostTest b/server/site_tests/cheets_VTS_R/control.11_r1.x86.HalUsbGadgetV1_0HostTest
deleted file mode 100644
index 2d39371..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.HalUsbGadgetV1_0HostTest
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.HalUsbGadgetV1_0HostTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module HalUsbGadgetV1_0HostTest of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.HalUsbGadgetV1_0HostTest',
-        test_name='cheets_VTS_R.11_r1.x86.HalUsbGadgetV1_0HostTest',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'HalUsbGadgetV1_0HostTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='HalUsbGadgetV1_0HostTest',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.KernelApiSysfsTest b/server/site_tests/cheets_VTS_R/control.11_r1.x86.KernelApiSysfsTest
deleted file mode 100644
index 2f01711..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.KernelApiSysfsTest
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.KernelApiSysfsTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module KernelApiSysfsTest of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.KernelApiSysfsTest',
-        test_name='cheets_VTS_R.11_r1.x86.KernelApiSysfsTest',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'KernelApiSysfsTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='KernelApiSysfsTest',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.KernelDynamicPartitionsTest b/server/site_tests/cheets_VTS_R/control.11_r1.x86.KernelDynamicPartitionsTest
deleted file mode 100644
index 87e0b09..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.KernelDynamicPartitionsTest
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.KernelDynamicPartitionsTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module KernelDynamicPartitionsTest of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.KernelDynamicPartitionsTest',
-        test_name='cheets_VTS_R.11_r1.x86.KernelDynamicPartitionsTest',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'KernelDynamicPartitionsTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='KernelDynamicPartitionsTest',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.KernelLibcutilsTest b/server/site_tests/cheets_VTS_R/control.11_r1.x86.KernelLibcutilsTest
deleted file mode 100644
index 4433cf7..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.KernelLibcutilsTest
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.KernelLibcutilsTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module KernelLibcutilsTest of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.KernelLibcutilsTest',
-        test_name='cheets_VTS_R.11_r1.x86.KernelLibcutilsTest',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'KernelLibcutilsTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='KernelLibcutilsTest',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.KernelSelinuxFileApiTest b/server/site_tests/cheets_VTS_R/control.11_r1.x86.KernelSelinuxFileApiTest
deleted file mode 100644
index 79277b5..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.KernelSelinuxFileApiTest
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.KernelSelinuxFileApiTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module KernelSelinuxFileApiTest of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.KernelSelinuxFileApiTest',
-        test_name='cheets_VTS_R.11_r1.x86.KernelSelinuxFileApiTest',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'KernelSelinuxFileApiTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='KernelSelinuxFileApiTest',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.SdcardfsTest b/server/site_tests/cheets_VTS_R/control.11_r1.x86.SdcardfsTest
deleted file mode 100644
index 0516009..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.SdcardfsTest
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.SdcardfsTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module SdcardfsTest of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.SdcardfsTest',
-        test_name='cheets_VTS_R.11_r1.x86.SdcardfsTest',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'SdcardfsTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='SdcardfsTest',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.VtsFwkDisplayServiceV1_0TargetTest b/server/site_tests/cheets_VTS_R/control.11_r1.x86.VtsFwkDisplayServiceV1_0TargetTest
deleted file mode 100644
index f831394..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.VtsFwkDisplayServiceV1_0TargetTest
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.VtsFwkDisplayServiceV1_0TargetTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module VtsFwkDisplayServiceV1_0TargetTest of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.VtsFwkDisplayServiceV1_0TargetTest',
-        test_name='cheets_VTS_R.11_r1.x86.VtsFwkDisplayServiceV1_0TargetTest',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'VtsFwkDisplayServiceV1_0TargetTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='VtsFwkDisplayServiceV1_0TargetTest',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.VtsHal b/server/site_tests/cheets_VTS_R/control.11_r1.x86.VtsHal
deleted file mode 100644
index c4ff035..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.VtsHal
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.VtsHal'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module VtsHalAtraceV1_0TargetTest, VtsHalAudioControlV1_0TargetTest, VtsHalAudioControlV2_0TargetTest, VtsHalAudioEffectV2_0TargetTest, VtsHalAudioEffectV4_0TargetTest, VtsHalAudioEffectV5_0TargetTest, VtsHalAudioEffectV6_0TargetTest, VtsHalAudioPolicyV1_0TargetTest, VtsHalAudioV2_0TargetTest, VtsHalAudioV4_0TargetTest, VtsHalAudioV5_0TargetTest, VtsHalAudioV6_0TargetTest, VtsHalAuthSecretV1_0TargetTest, VtsHalAutomotiveDisplayV1_0TargetTest, VtsHalBiometricsFaceV1_0TargetTest, VtsHalBiometricsFingerprintV2_1TargetTest, VtsHalBiometricsFingerprintV2_2TargetTest, VtsHalBluetoothA2dpV1_0TargetTest, VtsHalBluetoothAudioV2_0TargetTest, VtsHalBluetoothV1_0TargetTest, VtsHalBluetoothV1_1TargetTest, VtsHalBootV1_0TargetTest, VtsHalBootV1_1TargetTest, VtsHalBroadcastradioV1_0TargetTest, VtsHalBroadcastradioV1_1TargetTest, VtsHalBroadcastradioV2_0TargetTest, VtsHalCameraProviderV2_4TargetTest, VtsHalCameraServiceV2_0TargetTest, VtsHalCanBusV1_0TargetTest, VtsHalCanBusVirtualV1_0TargetTest, VtsHalCanControllerV1_0TargetTest, VtsHalCasV1_0TargetTest, VtsHalCasV1_1TargetTest, VtsHalCasV1_2TargetTest, VtsHalConfigstoreV1_0TargetTest, VtsHalConfirmationUIV1_0TargetTest, VtsHalContexthubV1_0TargetTest, VtsHalContexthubV1_1TargetTest, VtsHalDrmV1_0TargetTest, VtsHalDrmV1_1TargetTest, VtsHalDrmV1_2TargetTest, VtsHalDrmV1_3TargetTest, VtsHalDumpstateV1_0TargetTest, VtsHalDumpstateV1_1TargetTest, VtsHalEvsV1_0TargetTest, VtsHalEvsV1_1TargetTest, VtsHalGatekeeperV1_0TargetTest, VtsHalGnssV1_0TargetTest, VtsHalGnssV1_1TargetTest, VtsHalGnssV2_0TargetTest, VtsHalGnssV2_1TargetTest, VtsHalGraphicsComposerV2_1TargetTest, VtsHalGraphicsComposerV2_2TargetTest, VtsHalGraphicsComposerV2_3TargetTest, VtsHalGraphicsComposerV2_4TargetTest, VtsHalGraphicsMapperV2_0TargetTest, VtsHalGraphicsMapperV2_1TargetTest, VtsHalGraphicsMapperV3_0TargetTest, VtsHalGraphicsMapperV4_0TargetTest, VtsHalHealthStorageV1_0TargetTest, VtsHalHealthV1_0TargetTest, VtsHalHealthV2_0TargetTest, VtsHalHealthV2_1TargetTest, VtsHalIdentityTargetTest, VtsHalInputClassifierV1_0TargetTest, VtsHalIrV1_0TargetTest, VtsHalKeymasterV3_0TargetTest, VtsHalKeymasterV4_0TargetTest, VtsHalKeymasterV4_1TargetTest, VtsHalLightTargetTest, VtsHalLightV2_0TargetTest, VtsHalMediaC2V1_0TargetAudioDecTest, VtsHalMediaC2V1_0TargetAudioEncTest, VtsHalMediaC2V1_0TargetComponentTest, VtsHalMediaC2V1_0TargetMasterTest, VtsHalMediaC2V1_0TargetVideoDecTest, VtsHalMediaC2V1_0TargetVideoEncTest, VtsHalMediaOmxV1_0TargetAudioDecTest, VtsHalMediaOmxV1_0TargetAudioEncTest, VtsHalMediaOmxV1_0TargetComponentTest, VtsHalMediaOmxV1_0TargetMasterTest, VtsHalMediaOmxV1_0TargetVideoDecTest, VtsHalMediaOmxV1_0TargetVideoEncTest, VtsHalMemtrackV1_0TargetTest, VtsHalNetNetdV1_0TargetTest, VtsHalNetNetdV1_1TargetTest, VtsHalNeuralnetworksV1_0TargetTest, VtsHalNeuralnetworksV1_1TargetTest, VtsHalNeuralnetworksV1_2BenchmarkTestCases, VtsHalNeuralnetworksV1_2TargetTest, VtsHalNeuralnetworksV1_3BenchmarkTestCases, VtsHalNeuralnetworksV1_3TargetTest, VtsHalNfcV1_0TargetTest, VtsHalNfcV1_1TargetTest, VtsHalNfcV1_2TargetTest, VtsHalOccupantAwarenessV1_0TargetTest, VtsHalOemLockV1_0TargetTest, VtsHalPowerStatsV1_0TargetTest, VtsHalPowerTargetTest, VtsHalPowerV1_0TargetTest, VtsHalPowerV1_1TargetTest, VtsHalPowerV1_2TargetTest, VtsHalPowerV1_3TargetTest, VtsHalRadioConfigV1_0TargetTest, VtsHalRadioConfigV1_1TargetTest, VtsHalRadioConfigV1_2TargetTest, VtsHalRadioV1_0TargetTest, VtsHalRadioV1_1TargetTest, VtsHalRadioV1_2TargetTest, VtsHalRadioV1_3TargetTest, VtsHalRadioV1_4TargetTest, VtsHalRadioV1_5TargetTest, VtsHalRebootEscrowTargetTest, VtsHalRenderscriptV1_0TargetTest, VtsHalSapV1_0TargetTest, VtsHalSecureElementV1_0TargetTest, VtsHalSecureElementV1_1TargetTest, VtsHalSecureElementV1_2TargetTest, VtsHalSensorManagerV1_0TargetTest, VtsHalSensorsV1_0TargetTest, VtsHalSensorsV2_0TargetTest, VtsHalSensorsV2_1TargetTest, VtsHalSoundtriggerV2_0TargetTest, VtsHalSoundtriggerV2_1TargetTest, VtsHalSoundtriggerV2_2TargetTest, VtsHalSoundtriggerV2_3TargetTest, VtsHalStatsV1_0TargetTest, VtsHalSurroundViewV1_0TargetTest, VtsHalTetheroffloadConfigV1_0TargetTest, VtsHalTetheroffloadControlV1_0TargetTest, VtsHalThermalV1_0TargetTest, VtsHalThermalV1_1TargetTest, VtsHalThermalV2_0TargetTest, VtsHalTvInputV1_0TargetTest, VtsHalTvTunerV1_0TargetTest, VtsHalUsbGadgetV1_1HostTest, VtsHalUsbV1_0TargetTest, VtsHalUsbV1_1TargetTest, VtsHalUsbV1_2TargetTest, VtsHalVibratorTargetTest, VtsHalVibratorV1_0TargetTest, VtsHalVibratorV1_1TargetTest, VtsHalVibratorV1_2TargetTest, VtsHalVibratorV1_3TargetTest, VtsHalVrV1_0TargetTest, VtsHalWeaverV1_0TargetTest, VtsHalWifiApV1_0TargetTest, VtsHalWifiApV1_4TargetTest, VtsHalWifiHostapdV1_0TargetTest, VtsHalWifiHostapdV1_1TargetTest, VtsHalWifiHostapdV1_2TargetTest, VtsHalWifiKeystoreV1_0TargetTest, VtsHalWifiNanV1_0TargetTest, VtsHalWifiNanV1_2TargetTest, VtsHalWifiNanV1_4TargetTest, VtsHalWifiOffloadV1_0TargetTest, VtsHalWifiRttV1_4TargetTest, VtsHalWifiSupplicantP2pV1_0TargetTest, VtsHalWifiSupplicantP2pV1_2TargetTest, VtsHalWifiSupplicantV1_0TargetTest, VtsHalWifiSupplicantV1_1TargetTest, VtsHalWifiSupplicantV1_2TargetTest, VtsHalWifiSupplicantV1_3TargetTest, VtsHalWifiV1_0TargetTest, VtsHalWifiV1_1TargetTest, VtsHalWifiV1_2TargetTest, VtsHalWifiV1_3TargetTest of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.VtsHal',
-        test_name='cheets_VTS_R.11_r1.x86.VtsHal',
-        run_template=['run', 'commandAndExit', 'vts', '--include-filter', 'VtsHalAtraceV1_0TargetTest', '--include-filter', 'VtsHalAudioControlV1_0TargetTest', '--include-filter', 'VtsHalAudioControlV2_0TargetTest', '--include-filter', 'VtsHalAudioEffectV2_0TargetTest', '--include-filter', 'VtsHalAudioEffectV4_0TargetTest', '--include-filter', 'VtsHalAudioEffectV5_0TargetTest', '--include-filter', 'VtsHalAudioEffectV6_0TargetTest', '--include-filter', 'VtsHalAudioPolicyV1_0TargetTest', '--include-filter', 'VtsHalAudioV2_0TargetTest', '--include-filter', 'VtsHalAudioV4_0TargetTest', '--include-filter', 'VtsHalAudioV5_0TargetTest', '--include-filter', 'VtsHalAudioV6_0TargetTest', '--include-filter', 'VtsHalAuthSecretV1_0TargetTest', '--include-filter', 'VtsHalAutomotiveDisplayV1_0TargetTest', '--include-filter', 'VtsHalBiometricsFaceV1_0TargetTest', '--include-filter', 'VtsHalBiometricsFingerprintV2_1TargetTest', '--include-filter', 'VtsHalBiometricsFingerprintV2_2TargetTest', '--include-filter', 'VtsHalBluetoothA2dpV1_0TargetTest', '--include-filter', 'VtsHalBluetoothAudioV2_0TargetTest', '--include-filter', 'VtsHalBluetoothV1_0TargetTest', '--include-filter', 'VtsHalBluetoothV1_1TargetTest', '--include-filter', 'VtsHalBootV1_0TargetTest', '--include-filter', 'VtsHalBootV1_1TargetTest', '--include-filter', 'VtsHalBroadcastradioV1_0TargetTest', '--include-filter', 'VtsHalBroadcastradioV1_1TargetTest', '--include-filter', 'VtsHalBroadcastradioV2_0TargetTest', '--include-filter', 'VtsHalCameraProviderV2_4TargetTest', '--include-filter', 'VtsHalCameraServiceV2_0TargetTest', '--include-filter', 'VtsHalCanBusV1_0TargetTest', '--include-filter', 'VtsHalCanBusVirtualV1_0TargetTest', '--include-filter', 'VtsHalCanControllerV1_0TargetTest', '--include-filter', 'VtsHalCasV1_0TargetTest', '--include-filter', 'VtsHalCasV1_1TargetTest', '--include-filter', 'VtsHalCasV1_2TargetTest', '--include-filter', 'VtsHalConfigstoreV1_0TargetTest', '--include-filter', 'VtsHalConfirmationUIV1_0TargetTest', '--include-filter', 'VtsHalContexthubV1_0TargetTest', '--include-filter', 'VtsHalContexthubV1_1TargetTest', '--include-filter', 'VtsHalDrmV1_0TargetTest', '--include-filter', 'VtsHalDrmV1_1TargetTest', '--include-filter', 'VtsHalDrmV1_2TargetTest', '--include-filter', 'VtsHalDrmV1_3TargetTest', '--include-filter', 'VtsHalDumpstateV1_0TargetTest', '--include-filter', 'VtsHalDumpstateV1_1TargetTest', '--include-filter', 'VtsHalEvsV1_0TargetTest', '--include-filter', 'VtsHalEvsV1_1TargetTest', '--include-filter', 'VtsHalGatekeeperV1_0TargetTest', '--include-filter', 'VtsHalGnssV1_0TargetTest', '--include-filter', 'VtsHalGnssV1_1TargetTest', '--include-filter', 'VtsHalGnssV2_0TargetTest', '--include-filter', 'VtsHalGnssV2_1TargetTest', '--include-filter', 'VtsHalGraphicsComposerV2_1TargetTest', '--include-filter', 'VtsHalGraphicsComposerV2_2TargetTest', '--include-filter', 'VtsHalGraphicsComposerV2_3TargetTest', '--include-filter', 'VtsHalGraphicsComposerV2_4TargetTest', '--include-filter', 'VtsHalGraphicsMapperV2_0TargetTest', '--include-filter', 'VtsHalGraphicsMapperV2_1TargetTest', '--include-filter', 'VtsHalGraphicsMapperV3_0TargetTest', '--include-filter', 'VtsHalGraphicsMapperV4_0TargetTest', '--include-filter', 'VtsHalHealthStorageV1_0TargetTest', '--include-filter', 'VtsHalHealthV1_0TargetTest', '--include-filter', 'VtsHalHealthV2_0TargetTest', '--include-filter', 'VtsHalHealthV2_1TargetTest', '--include-filter', 'VtsHalIdentityTargetTest', '--include-filter', 'VtsHalInputClassifierV1_0TargetTest', '--include-filter', 'VtsHalIrV1_0TargetTest', '--include-filter', 'VtsHalKeymasterV3_0TargetTest', '--include-filter', 'VtsHalKeymasterV4_0TargetTest', '--include-filter', 'VtsHalKeymasterV4_1TargetTest', '--include-filter', 'VtsHalLightTargetTest', '--include-filter', 'VtsHalLightV2_0TargetTest', '--include-filter', 'VtsHalMediaC2V1_0TargetAudioDecTest', '--include-filter', 'VtsHalMediaC2V1_0TargetAudioEncTest', '--include-filter', 'VtsHalMediaC2V1_0TargetComponentTest', '--include-filter', 'VtsHalMediaC2V1_0TargetMasterTest', '--include-filter', 'VtsHalMediaC2V1_0TargetVideoDecTest', '--include-filter', 'VtsHalMediaC2V1_0TargetVideoEncTest', '--include-filter', 'VtsHalMediaOmxV1_0TargetAudioDecTest', '--include-filter', 'VtsHalMediaOmxV1_0TargetAudioEncTest', '--include-filter', 'VtsHalMediaOmxV1_0TargetComponentTest', '--include-filter', 'VtsHalMediaOmxV1_0TargetMasterTest', '--include-filter', 'VtsHalMediaOmxV1_0TargetVideoDecTest', '--include-filter', 'VtsHalMediaOmxV1_0TargetVideoEncTest', '--include-filter', 'VtsHalMemtrackV1_0TargetTest', '--include-filter', 'VtsHalNetNetdV1_0TargetTest', '--include-filter', 'VtsHalNetNetdV1_1TargetTest', '--include-filter', 'VtsHalNeuralnetworksV1_0TargetTest', '--include-filter', 'VtsHalNeuralnetworksV1_1TargetTest', '--include-filter', 'VtsHalNeuralnetworksV1_2BenchmarkTestCases', '--include-filter', 'VtsHalNeuralnetworksV1_2TargetTest', '--include-filter', 'VtsHalNeuralnetworksV1_3BenchmarkTestCases', '--include-filter', 'VtsHalNeuralnetworksV1_3TargetTest', '--include-filter', 'VtsHalNfcV1_0TargetTest', '--include-filter', 'VtsHalNfcV1_1TargetTest', '--include-filter', 'VtsHalNfcV1_2TargetTest', '--include-filter', 'VtsHalOccupantAwarenessV1_0TargetTest', '--include-filter', 'VtsHalOemLockV1_0TargetTest', '--include-filter', 'VtsHalPowerStatsV1_0TargetTest', '--include-filter', 'VtsHalPowerTargetTest', '--include-filter', 'VtsHalPowerV1_0TargetTest', '--include-filter', 'VtsHalPowerV1_1TargetTest', '--include-filter', 'VtsHalPowerV1_2TargetTest', '--include-filter', 'VtsHalPowerV1_3TargetTest', '--include-filter', 'VtsHalRadioConfigV1_0TargetTest', '--include-filter', 'VtsHalRadioConfigV1_1TargetTest', '--include-filter', 'VtsHalRadioConfigV1_2TargetTest', '--include-filter', 'VtsHalRadioV1_0TargetTest', '--include-filter', 'VtsHalRadioV1_1TargetTest', '--include-filter', 'VtsHalRadioV1_2TargetTest', '--include-filter', 'VtsHalRadioV1_3TargetTest', '--include-filter', 'VtsHalRadioV1_4TargetTest', '--include-filter', 'VtsHalRadioV1_5TargetTest', '--include-filter', 'VtsHalRebootEscrowTargetTest', '--include-filter', 'VtsHalRenderscriptV1_0TargetTest', '--include-filter', 'VtsHalSapV1_0TargetTest', '--include-filter', 'VtsHalSecureElementV1_0TargetTest', '--include-filter', 'VtsHalSecureElementV1_1TargetTest', '--include-filter', 'VtsHalSecureElementV1_2TargetTest', '--include-filter', 'VtsHalSensorManagerV1_0TargetTest', '--include-filter', 'VtsHalSensorsV1_0TargetTest', '--include-filter', 'VtsHalSensorsV2_0TargetTest', '--include-filter', 'VtsHalSensorsV2_1TargetTest', '--include-filter', 'VtsHalSoundtriggerV2_0TargetTest', '--include-filter', 'VtsHalSoundtriggerV2_1TargetTest', '--include-filter', 'VtsHalSoundtriggerV2_2TargetTest', '--include-filter', 'VtsHalSoundtriggerV2_3TargetTest', '--include-filter', 'VtsHalStatsV1_0TargetTest', '--include-filter', 'VtsHalSurroundViewV1_0TargetTest', '--include-filter', 'VtsHalTetheroffloadConfigV1_0TargetTest', '--include-filter', 'VtsHalTetheroffloadControlV1_0TargetTest', '--include-filter', 'VtsHalThermalV1_0TargetTest', '--include-filter', 'VtsHalThermalV1_1TargetTest', '--include-filter', 'VtsHalThermalV2_0TargetTest', '--include-filter', 'VtsHalTvInputV1_0TargetTest', '--include-filter', 'VtsHalTvTunerV1_0TargetTest', '--include-filter', 'VtsHalUsbGadgetV1_1HostTest', '--include-filter', 'VtsHalUsbV1_0TargetTest', '--include-filter', 'VtsHalUsbV1_1TargetTest', '--include-filter', 'VtsHalUsbV1_2TargetTest', '--include-filter', 'VtsHalVibratorTargetTest', '--include-filter', 'VtsHalVibratorV1_0TargetTest', '--include-filter', 'VtsHalVibratorV1_1TargetTest', '--include-filter', 'VtsHalVibratorV1_2TargetTest', '--include-filter', 'VtsHalVibratorV1_3TargetTest', '--include-filter', 'VtsHalVrV1_0TargetTest', '--include-filter', 'VtsHalWeaverV1_0TargetTest', '--include-filter', 'VtsHalWifiApV1_0TargetTest', '--include-filter', 'VtsHalWifiApV1_4TargetTest', '--include-filter', 'VtsHalWifiHostapdV1_0TargetTest', '--include-filter', 'VtsHalWifiHostapdV1_1TargetTest', '--include-filter', 'VtsHalWifiHostapdV1_2TargetTest', '--include-filter', 'VtsHalWifiKeystoreV1_0TargetTest', '--include-filter', 'VtsHalWifiNanV1_0TargetTest', '--include-filter', 'VtsHalWifiNanV1_2TargetTest', '--include-filter', 'VtsHalWifiNanV1_4TargetTest', '--include-filter', 'VtsHalWifiOffloadV1_0TargetTest', '--include-filter', 'VtsHalWifiRttV1_4TargetTest', '--include-filter', 'VtsHalWifiSupplicantP2pV1_0TargetTest', '--include-filter', 'VtsHalWifiSupplicantP2pV1_2TargetTest', '--include-filter', 'VtsHalWifiSupplicantV1_0TargetTest', '--include-filter', 'VtsHalWifiSupplicantV1_1TargetTest', '--include-filter', 'VtsHalWifiSupplicantV1_2TargetTest', '--include-filter', 'VtsHalWifiSupplicantV1_3TargetTest', '--include-filter', 'VtsHalWifiV1_0TargetTest', '--include-filter', 'VtsHalWifiV1_1TargetTest', '--include-filter', 'VtsHalWifiV1_2TargetTest', '--include-filter', 'VtsHalWifiV1_3TargetTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='VtsHal',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=302400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.VtsHidlAllocatorV1_0TargetTest b/server/site_tests/cheets_VTS_R/control.11_r1.x86.VtsHidlAllocatorV1_0TargetTest
deleted file mode 100644
index 303a216..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.VtsHidlAllocatorV1_0TargetTest
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.VtsHidlAllocatorV1_0TargetTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module VtsHidlAllocatorV1_0TargetTest of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.VtsHidlAllocatorV1_0TargetTest',
-        test_name='cheets_VTS_R.11_r1.x86.VtsHidlAllocatorV1_0TargetTest',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'VtsHidlAllocatorV1_0TargetTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='VtsHidlAllocatorV1_0TargetTest',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.binderDriverInterfaceTest b/server/site_tests/cheets_VTS_R/control.11_r1.x86.binderDriverInterfaceTest
deleted file mode 100644
index 8b705d0..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.binderDriverInterfaceTest
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.binderDriverInterfaceTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module binderDriverInterfaceTest, binderDriverInterfaceTest_IPC_32 of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.binderDriverInterfaceTest',
-        test_name='cheets_VTS_R.11_r1.x86.binderDriverInterfaceTest',
-        run_template=['run', 'commandAndExit', 'vts', '--include-filter', 'binderDriverInterfaceTest', '--include-filter', 'binderDriverInterfaceTest_IPC_32', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='binderDriverInterfaceTest',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.binderLibTest b/server/site_tests/cheets_VTS_R/control.11_r1.x86.binderLibTest
deleted file mode 100644
index 457eea5..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.binderLibTest
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.binderLibTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module binderLibTest, binderLibTest_IPC_32 of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.binderLibTest',
-        test_name='cheets_VTS_R.11_r1.x86.binderLibTest',
-        run_template=['run', 'commandAndExit', 'vts', '--include-filter', 'binderLibTest', '--include-filter', 'binderLibTest_IPC_32', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='binderLibTest',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.binderSafeInterfaceTest b/server/site_tests/cheets_VTS_R/control.11_r1.x86.binderSafeInterfaceTest
deleted file mode 100644
index d84de2d..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.binderSafeInterfaceTest
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.binderSafeInterfaceTest'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module binderSafeInterfaceTest of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.binderSafeInterfaceTest',
-        test_name='cheets_VTS_R.11_r1.x86.binderSafeInterfaceTest',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'binderSafeInterfaceTest', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='binderSafeInterfaceTest',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.bpf_module_test b/server/site_tests/cheets_VTS_R/control.11_r1.x86.bpf_module_test
deleted file mode 100644
index f829167..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.bpf_module_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.bpf_module_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module bpf_module_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.bpf_module_test',
-        test_name='cheets_VTS_R.11_r1.x86.bpf_module_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'bpf_module_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='bpf_module_test',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.fiemap_writer_test b/server/site_tests/cheets_VTS_R/control.11_r1.x86.fiemap_writer_test
deleted file mode 100644
index da2b232..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.fiemap_writer_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.fiemap_writer_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module fiemap_writer_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.fiemap_writer_test',
-        test_name='cheets_VTS_R.11_r1.x86.fiemap_writer_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'fiemap_writer_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='fiemap_writer_test',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.memunreachable_binder_test b/server/site_tests/cheets_VTS_R/control.11_r1.x86.memunreachable_binder_test
deleted file mode 100644
index 954cc88..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.memunreachable_binder_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.memunreachable_binder_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module memunreachable_binder_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.memunreachable_binder_test',
-        test_name='cheets_VTS_R.11_r1.x86.memunreachable_binder_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'memunreachable_binder_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='memunreachable_binder_test',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.netd_integration_test b/server/site_tests/cheets_VTS_R/control.11_r1.x86.netd_integration_test
deleted file mode 100644
index 6592281..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.netd_integration_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.netd_integration_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module netd_integration_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.netd_integration_test',
-        test_name='cheets_VTS_R.11_r1.x86.netd_integration_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'netd_integration_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='netd_integration_test',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_compatibilityMatrix_validate_test b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_compatibilityMatrix_validate_test
deleted file mode 100644
index 9f1ebdd..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_compatibilityMatrix_validate_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_compatibilityMatrix_validate_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_compatibilityMatrix_validate_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_compatibilityMatrix_validate_test',
-        test_name='cheets_VTS_R.11_r1.x86.vts_compatibilityMatrix_validate_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_compatibilityMatrix_validate_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_compatibilityMatrix_validate_test',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_core_liblp_test b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_core_liblp_test
deleted file mode 100644
index faf7565..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_core_liblp_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_core_liblp_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_core_liblp_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_core_liblp_test',
-        test_name='cheets_VTS_R.11_r1.x86.vts_core_liblp_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_core_liblp_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_core_liblp_test',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_core_meminfo_test b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_core_meminfo_test
deleted file mode 100644
index b508261..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_core_meminfo_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_core_meminfo_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_core_meminfo_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_core_meminfo_test',
-        test_name='cheets_VTS_R.11_r1.x86.vts_core_meminfo_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_core_meminfo_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_core_meminfo_test',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_core_test_binary_qtaguid_module b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_core_test_binary_qtaguid_module
deleted file mode 100644
index 5fd3601..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_core_test_binary_qtaguid_module
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_core_test_binary_qtaguid_module'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_core_test_binary_qtaguid_module of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_core_test_binary_qtaguid_module',
-        test_name='cheets_VTS_R.11_r1.x86.vts_core_test_binary_qtaguid_module',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_core_test_binary_qtaguid_module', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_core_test_binary_qtaguid_module',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_defaultPermissions_validate_test b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_defaultPermissions_validate_test
deleted file mode 100644
index 56a7c0c..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_defaultPermissions_validate_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_defaultPermissions_validate_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_defaultPermissions_validate_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_defaultPermissions_validate_test',
-        test_name='cheets_VTS_R.11_r1.x86.vts_defaultPermissions_validate_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_defaultPermissions_validate_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_defaultPermissions_validate_test',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_gsi_boot_test b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_gsi_boot_test
deleted file mode 100644
index 16c1337..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_gsi_boot_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_gsi_boot_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_gsi_boot_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_gsi_boot_test',
-        test_name='cheets_VTS_R.11_r1.x86.vts_gsi_boot_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_gsi_boot_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_gsi_boot_test',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_halManifest_validate_test b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_halManifest_validate_test
deleted file mode 100644
index 70b60fb..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_halManifest_validate_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_halManifest_validate_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_halManifest_validate_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_halManifest_validate_test',
-        test_name='cheets_VTS_R.11_r1.x86.vts_halManifest_validate_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_halManifest_validate_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_halManifest_validate_test',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_ibase_test b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_ibase_test
deleted file mode 100644
index 57ed8c0..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_ibase_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_ibase_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_ibase_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_ibase_test',
-        test_name='cheets_VTS_R.11_r1.x86.vts_ibase_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_ibase_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_ibase_test',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_kernel_checkpoint_test b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_kernel_checkpoint_test
deleted file mode 100644
index b38b5eb..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_kernel_checkpoint_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_kernel_checkpoint_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_kernel_checkpoint_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_kernel_checkpoint_test',
-        test_name='cheets_VTS_R.11_r1.x86.vts_kernel_checkpoint_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_kernel_checkpoint_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_kernel_checkpoint_test',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_kernel_encryption_test b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_kernel_encryption_test
deleted file mode 100644
index ca5c453..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_kernel_encryption_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_kernel_encryption_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_kernel_encryption_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_kernel_encryption_test',
-        test_name='cheets_VTS_R.11_r1.x86.vts_kernel_encryption_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_kernel_encryption_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_kernel_encryption_test',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_kernel_kheaders b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_kernel_kheaders
deleted file mode 100644
index 7372f77..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_kernel_kheaders
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_kernel_kheaders'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_kernel_kheaders of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_kernel_kheaders',
-        test_name='cheets_VTS_R.11_r1.x86.vts_kernel_kheaders',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_kernel_kheaders', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_kernel_kheaders',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_kernel_loopconfig_test b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_kernel_loopconfig_test
deleted file mode 100644
index f6d829f..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_kernel_loopconfig_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_kernel_loopconfig_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_kernel_loopconfig_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_kernel_loopconfig_test',
-        test_name='cheets_VTS_R.11_r1.x86.vts_kernel_loopconfig_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_kernel_loopconfig_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_kernel_loopconfig_test',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_kernel_net_tests b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_kernel_net_tests
deleted file mode 100644
index 7a13b44..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_kernel_net_tests
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_kernel_net_tests'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_kernel_net_tests of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_kernel_net_tests',
-        test_name='cheets_VTS_R.11_r1.x86.vts_kernel_net_tests',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_kernel_net_tests', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_kernel_net_tests',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_kernel_proc_file_api_test b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_kernel_proc_file_api_test
deleted file mode 100644
index e6fadfc..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_kernel_proc_file_api_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_kernel_proc_file_api_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_kernel_proc_file_api_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_kernel_proc_file_api_test',
-        test_name='cheets_VTS_R.11_r1.x86.vts_kernel_proc_file_api_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_kernel_proc_file_api_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_kernel_proc_file_api_test',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_kernel_toolchain b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_kernel_toolchain
deleted file mode 100644
index 20336d0..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_kernel_toolchain
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_kernel_toolchain'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_kernel_toolchain of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_kernel_toolchain',
-        test_name='cheets_VTS_R.11_r1.x86.vts_kernel_toolchain',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_kernel_toolchain', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_kernel_toolchain',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_kernel_tun_test b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_kernel_tun_test
deleted file mode 100644
index 6a4c075..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_kernel_tun_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_kernel_tun_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_kernel_tun_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_kernel_tun_test',
-        test_name='cheets_VTS_R.11_r1.x86.vts_kernel_tun_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_kernel_tun_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_kernel_tun_test',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_libdm_test b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_libdm_test
deleted file mode 100644
index c790832..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_libdm_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_libdm_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_libdm_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_libdm_test',
-        test_name='cheets_VTS_R.11_r1.x86.vts_libdm_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_libdm_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_libdm_test',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_libsnapshot_test b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_libsnapshot_test
deleted file mode 100644
index 36a00cc..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_libsnapshot_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_libsnapshot_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_libsnapshot_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_libsnapshot_test',
-        test_name='cheets_VTS_R.11_r1.x86.vts_libsnapshot_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_libsnapshot_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_libsnapshot_test',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_linux_kselftest_arm_ b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_linux_kselftest_arm_
deleted file mode 100644
index 3eb0640..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_linux_kselftest_arm_
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_linux_kselftest_arm_'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_linux_kselftest_arm_32, vts_linux_kselftest_arm_64 of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_linux_kselftest_arm_',
-        test_name='cheets_VTS_R.11_r1.x86.vts_linux_kselftest_arm_',
-        run_template=['run', 'commandAndExit', 'vts', '--include-filter', 'vts_linux_kselftest_arm_32', '--include-filter', 'vts_linux_kselftest_arm_64', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_linux_kselftest_arm_',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_linux_kselftest_x86_ b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_linux_kselftest_x86_
deleted file mode 100644
index b1b680f..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_linux_kselftest_x86_
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_linux_kselftest_x86_'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_linux_kselftest_x86_32, vts_linux_kselftest_x86_64 of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_linux_kselftest_x86_',
-        test_name='cheets_VTS_R.11_r1.x86.vts_linux_kselftest_x86_',
-        run_template=['run', 'commandAndExit', 'vts', '--include-filter', 'vts_linux_kselftest_x86_32', '--include-filter', 'vts_linux_kselftest_x86_64', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_linux_kselftest_x86_',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_ltp_test_arm b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_ltp_test_arm
deleted file mode 100644
index fbe22ca..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_ltp_test_arm
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_ltp_test_arm'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_ltp_test_arm of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_ltp_test_arm',
-        test_name='cheets_VTS_R.11_r1.x86.vts_ltp_test_arm',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_ltp_test_arm', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_ltp_test_arm',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_ltp_test_arm_64 b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_ltp_test_arm_64
deleted file mode 100644
index e41721e..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_ltp_test_arm_64
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_ltp_test_arm_64'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_ltp_test_arm_64, vts_ltp_test_arm_64_hwasan, vts_ltp_test_arm_64_lowmem, vts_ltp_test_arm_64_lowmem_hwasan of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_ltp_test_arm_64',
-        test_name='cheets_VTS_R.11_r1.x86.vts_ltp_test_arm_64',
-        run_template=['run', 'commandAndExit', 'vts', '--include-filter', 'vts_ltp_test_arm_64', '--include-filter', 'vts_ltp_test_arm_64_hwasan', '--include-filter', 'vts_ltp_test_arm_64_lowmem', '--include-filter', 'vts_ltp_test_arm_64_lowmem_hwasan', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_ltp_test_arm_64',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=9000)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_ltp_test_arm_lowmem b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_ltp_test_arm_lowmem
deleted file mode 100644
index dc24df7..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_ltp_test_arm_lowmem
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_ltp_test_arm_lowmem'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_ltp_test_arm_lowmem of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_ltp_test_arm_lowmem',
-        test_name='cheets_VTS_R.11_r1.x86.vts_ltp_test_arm_lowmem',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_ltp_test_arm_lowmem', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_ltp_test_arm_lowmem',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_ltp_test_x86 b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_ltp_test_x86
deleted file mode 100644
index 4d7877e..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_ltp_test_x86
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_ltp_test_x86'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_ltp_test_x86, vts_ltp_test_x86_64 of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_ltp_test_x86',
-        test_name='cheets_VTS_R.11_r1.x86.vts_ltp_test_x86',
-        run_template=['run', 'commandAndExit', 'vts', '--include-filter', 'vts_ltp_test_x86', '--include-filter', 'vts_ltp_test_x86_64', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_ltp_test_x86',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_media b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_media
deleted file mode 100644
index 87f2231..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_media
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_media'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_mediaCodecs_validate_test, vts_mediaProfiles_validate_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_media',
-        test_name='cheets_VTS_R.11_r1.x86.vts_media',
-        run_template=['run', 'commandAndExit', 'vts', '--include-filter', 'vts_mediaCodecs_validate_test', '--include-filter', 'vts_mediaProfiles_validate_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_media',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=5400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_permission_validate_test b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_permission_validate_test
deleted file mode 100644
index a79a04c..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_permission_validate_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_permission_validate_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_permission_validate_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_permission_validate_test',
-        test_name='cheets_VTS_R.11_r1.x86.vts_permission_validate_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_permission_validate_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_permission_validate_test',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_processgroup_validate_test b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_processgroup_validate_test
deleted file mode 100644
index 30f6f8e..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_processgroup_validate_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_processgroup_validate_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_processgroup_validate_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_processgroup_validate_test',
-        test_name='cheets_VTS_R.11_r1.x86.vts_processgroup_validate_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_processgroup_validate_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_processgroup_validate_test',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_security_avb_test b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_security_avb_test
deleted file mode 100644
index 8f821c7..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_security_avb_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_security_avb_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_security_avb_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_security_avb_test',
-        test_name='cheets_VTS_R.11_r1.x86.vts_security_avb_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_security_avb_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_security_avb_test',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_test_binary_bow_module b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_test_binary_bow_module
deleted file mode 100644
index 071ce9b..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_test_binary_bow_module
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_test_binary_bow_module'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_test_binary_bow_module of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_test_binary_bow_module',
-        test_name='cheets_VTS_R.11_r1.x86.vts_test_binary_bow_module',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_test_binary_bow_module', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_test_binary_bow_module',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_treble_platform_version_test b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_treble_platform_version_test
deleted file mode 100644
index 24ee854..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_treble_platform_version_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_treble_platform_version_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_treble_platform_version_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_treble_platform_version_test',
-        test_name='cheets_VTS_R.11_r1.x86.vts_treble_platform_version_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_treble_platform_version_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_treble_platform_version_test',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_treble_sys_prop_test b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_treble_sys_prop_test
deleted file mode 100644
index 6cf0ffd..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_treble_sys_prop_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_treble_sys_prop_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_treble_sys_prop_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_treble_sys_prop_test',
-        test_name='cheets_VTS_R.11_r1.x86.vts_treble_sys_prop_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_treble_sys_prop_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_treble_sys_prop_test',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_treble_vintf_framework_test b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_treble_vintf_framework_test
deleted file mode 100644
index 59a4ab6..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_treble_vintf_framework_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_treble_vintf_framework_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_treble_vintf_framework_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_treble_vintf_framework_test',
-        test_name='cheets_VTS_R.11_r1.x86.vts_treble_vintf_framework_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_treble_vintf_framework_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_treble_vintf_framework_test',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_treble_vintf_vendor_test b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_treble_vintf_vendor_test
deleted file mode 100644
index 17ef763..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_treble_vintf_vendor_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_treble_vintf_vendor_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_treble_vintf_vendor_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_treble_vintf_vendor_test',
-        test_name='cheets_VTS_R.11_r1.x86.vts_treble_vintf_vendor_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_treble_vintf_vendor_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_treble_vintf_vendor_test',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_virtual_ab_test b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_virtual_ab_test
deleted file mode 100644
index 1b83d40..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_virtual_ab_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_virtual_ab_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_virtual_ab_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_virtual_ab_test',
-        test_name='cheets_VTS_R.11_r1.x86.vts_virtual_ab_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_virtual_ab_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_virtual_ab_test',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_vndk_abi_test b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_vndk_abi_test
deleted file mode 100644
index 438c14a..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_vndk_abi_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_vndk_abi_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_vndk_abi_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_vndk_abi_test',
-        test_name='cheets_VTS_R.11_r1.x86.vts_vndk_abi_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_vndk_abi_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_vndk_abi_test',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_vndk_dependency_test b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_vndk_dependency_test
deleted file mode 100644
index 5f048ee..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_vndk_dependency_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_vndk_dependency_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_vndk_dependency_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_vndk_dependency_test',
-        test_name='cheets_VTS_R.11_r1.x86.vts_vndk_dependency_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_vndk_dependency_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_vndk_dependency_test',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_vndk_files_test b/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_vndk_files_test
deleted file mode 100644
index 5886246..0000000
--- a/server/site_tests/cheets_VTS_R/control.11_r1.x86.vts_vndk_files_test
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_VTS_R.11_r1.x86.vts_vndk_files_test'
-ATTRIBUTES = 'suite:arc-cts-r'
-DEPENDENCIES = 'arc, cts_abi_x86'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-DOC = 'Run module vts_vndk_files_test of the Vendor Test Suite (VTS) using x86 ABI in the ARC++ container.'
-
-def run_TS(machine):
-    host_list = [hosts.create_host(machine)]
-    job.run_test(
-        'cheets_VTS_R',
-        hosts=host_list,
-        iterations=1,
-        tag='11_r1.x86.vts_vndk_files_test',
-        test_name='cheets_VTS_R.11_r1.x86.vts_vndk_files_test',
-        run_template=['run', 'commandAndExit', 'vts', '--module', 'vts_vndk_files_test', '--logcat-on-failure'],
-        retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
-        target_module='vts_vndk_files_test',
-        target_plan=None,
-        bundle='x86',
-        use_jdk9=True,
-        timeout=3600)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_VTS_R/generate_controlfiles.py b/server/site_tests/cheets_VTS_R/generate_controlfiles.py
deleted file mode 100755
index db0631b..0000000
--- a/server/site_tests/cheets_VTS_R/generate_controlfiles.py
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/usr/bin/env python2
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This is a trampoline script to invoke the actual generator script.
-
-import os
-import sys
-
-target_script_name = 'generate_controlfiles_VTS_R.py'
-target_script_path = os.path.abspath(
-        os.path.join(os.path.dirname(__file__), '..', '..', 'cros', 'tradefed',
-                     target_script_name))
-sys.path.append('../../../../autotest/files/server/cros/tradefed/')
-
-os.execv(target_script_path, sys.argv)
diff --git a/server/site_tests/chromium/chromium.py b/server/site_tests/chromium/chromium.py
new file mode 100644
index 0000000..1751094
--- /dev/null
+++ b/server/site_tests/chromium/chromium.py
@@ -0,0 +1,86 @@
+# Lint as: python2, python3
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import base64
+import logging
+import os
+
+from autotest_lib.client.common_lib.error import TestFail
+from autotest_lib.server import test
+from autotest_lib.server import utils
+
+
+class chromium(test.test):
+    """Run Chromium tests built on a Skylab DUT."""
+
+    version = 1
+
+    PROVISION_POINT = '/var/lib/imageloader/lacros'
+    MOUNT_POINT = '/usr/local/tmp/chromium'
+    CHRONOS_RES_DIR = '/home/chronos/user/results'
+
+    def initialize(self, host=None, args=None):
+        self.host = host
+        assert host.path_exists(self.PROVISION_POINT), (
+                'chromium test artifact is not provisioned by CTP. '
+                'Please check the CTP request.')
+        self._mount_runtime()
+        args_dict = utils.args_to_dict(args)
+        self.exe_rel_path = args_dict.get('exe_rel_path', '')
+        path_to_executable = os.path.join(self.MOUNT_POINT, self.exe_rel_path)
+        assert self.host.path_exists(path_to_executable), (
+                'chromium test executable is not mounted at the '
+                'expected path, %s' % path_to_executable)
+
+        test_args = args_dict.get('test_args')
+        if not test_args:
+            test_args_b64 = args_dict.get('test_args_b64')
+            if test_args_b64:
+                test_args = base64.b64decode(test_args_b64)
+        if isinstance(test_args, bytes):
+            test_args = test_args.decode()
+        self.test_args = test_args
+
+        self.shard_number = args_dict.get('shard_number', 1)
+        self.shard_index = args_dict.get('shard_index', 0)
+
+    def _mount_runtime(self):
+        try:
+            self.host.run(
+                    'mkdir -p {mount} && '
+                    'imageloader --mount --mount_component=lacros'
+                    ' --mount_point={mount}'.format(mount=self.MOUNT_POINT))
+        except Exception as e:
+            raise TestFail('Exception while mount test artifact: %s', e)
+
+    def cleanup(self):
+        try:
+            self.host.run('imageloader --unmount --mount_point={mount};'
+                          'rm -rf {mount} {chronos_res}'.format(
+                                  chronos_res=self.CHRONOS_RES_DIR,
+                                  mount=self.MOUNT_POINT))
+        except Exception as e:
+            logging.exception('Exception while clear test files: %s', e)
+
+    def run_once(self):
+        cmd = ('{mount}/{exe} '
+               '--test-launcher-summary-output {chronos_res}/output.json '
+               '--test-launcher-shard-index {idx} '
+               '--test-launcher-total-shards {num} '.format(
+                       mount=self.MOUNT_POINT,
+                       exe=self.exe_rel_path,
+                       chronos_res=self.CHRONOS_RES_DIR,
+                       idx=self.shard_index,
+                       num=self.shard_number))
+        if self.test_args:
+            cmd += '-- %s' % self.test_args
+        try:
+            self.host.run('su chronos -c -- "%s"' % cmd)
+        except Exception as e:
+            raise TestFail('Exception while execute test: %s', e)
+        finally:
+            self.host.get_file('%s/*' % self.CHRONOS_RES_DIR,
+                               self.resultsdir,
+                               delete_dest=True)
diff --git a/server/site_tests/chromium/control b/server/site_tests/chromium/control
new file mode 100644
index 0000000..b6cff34
--- /dev/null
+++ b/server/site_tests/chromium/control
@@ -0,0 +1,32 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TEST IS DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
+AUTHOR = 'Chrome browser infra team'
+NAME = 'chromium'
+TIME = 'MEDIUM'
+TEST_TYPE = "Server"
+PY_VERSION = 3
+
+MAX_RESULT_SIZE_KB = 256 * 1024
+
+REQUIRE_SSP = True
+
+DOC = '''
+Invoke the test executable built by chromium builders.
+
+This is a generic wrapper for chromium tests. The test executable and runtime
+deps are provisioned by TLS. The wrapper mounts it, kicks off the script and
+copies the result to autotest/results.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    job.run_test('chromium', host=host, args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/chromium_Telemetry/chromium_Telemetry.py b/server/site_tests/chromium_Telemetry/chromium_Telemetry.py
new file mode 100644
index 0000000..aa1d200
--- /dev/null
+++ b/server/site_tests/chromium_Telemetry/chromium_Telemetry.py
@@ -0,0 +1,82 @@
+# Lint as: python2, python3
+# Copyright (c) 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import logging
+import os
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import tpm_utils
+from autotest_lib.server import test
+from autotest_lib.server.cros import chrome_sideloader
+from autotest_lib.server.cros import telemetry_runner
+from autotest_lib.server.cros.crosperf import device_setup_utils
+
+
+class chromium_Telemetry(test.test):
+    """Run a telemetry benchmark on browser infra built Chrome."""
+    version = 1
+
+    # The path where TLS provisioned the lacros image.
+    CHROME_PROVISION = '/var/lib/imageloader/lacros'
+
+    # The path where we install chromium/src. In this experimental
+    # stage, we may use existing lacros image, which built at src.
+    CHROME_BUILD = '/usr/local/lacros-build'
+
+    # The path that telemetry backend can find chrome.
+    # See go/lacros_browser_backend.
+    CHROME_DIR = '/usr/local/lacros-chrome'
+
+    def initialize(self, host=None, args=None):
+        self.host = host
+        assert self.host.path_exists(self.CHROME_PROVISION), (
+                'lacros artifact'
+                'is not provisioned by CTP. Please check the CTP request.')
+
+        chrome_sideloader.setup_host(self.host, self.CHROME_BUILD, None)
+
+        self.args_dict = utils.args_to_dict(args)
+        path_to_chrome = os.path.join(
+                self.CHROME_BUILD, self.args_dict.get('exe_rel_path',
+                                                      'chrome'))
+        logging.info('provisioned lacros to %s', path_to_chrome)
+
+        self.host.run(['rm', '-rf', self.CHROME_DIR])
+        self.host.run(['mkdir', '-p', '--mode', '0755', self.CHROME_DIR])
+        self.host.run([
+                'mv',
+                '%s/*' % os.path.dirname(path_to_chrome),
+                '%s/' % self.CHROME_DIR
+        ])
+
+        tpm_utils.ClearTPMOwnerRequest(self.host, wait_for_ready=True)
+
+        # TODO(crbug/1233676): Read benchmark and filters from test_args.
+        self.benchmark = 'speedometer2'
+        self.telemetry_args = '--story-filter=Speedometer2'.split()
+        repeat = self.args_dict.get('pageset_repeat')
+        if repeat is not None:
+            self.telemetry_args.append('--pageset-repeat=%s' % repeat)
+
+    def run_once(self):
+        """Run a telemetry benchmark."""
+
+        dut_config_str = self.args_dict.get('dut_config', '{}')
+        dut_config = json.loads(dut_config_str)
+        if dut_config:
+            device_setup_utils.setup_device(self.host, dut_config)
+
+        with telemetry_runner.TelemetryRunnerFactory().get_runner(
+                self.host, telemetry_on_dut=False,
+                is_lacros=True) as telemetry:
+            perf_value_writer = self
+            telemetry.run_telemetry_benchmark(self.benchmark,
+                                              perf_value_writer,
+                                              *self.telemetry_args)
+
+    def cleanup(self):
+        chrome_sideloader.cleanup_host(self.host, self.CHROME_BUILD, None)
+        chrome_sideloader.cleanup_host(self.host, self.CHROME_DIR, None)
diff --git a/server/site_tests/chromium_Telemetry/control b/server/site_tests/chromium_Telemetry/control
new file mode 100644
index 0000000..3d3f87a
--- /dev/null
+++ b/server/site_tests/chromium_Telemetry/control
@@ -0,0 +1,26 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chrome Browsre Infra Team'
+NAME = 'chromium_Telemetry'
+TIME = 'LONG'
+TEST_CATEGORY = 'Benchmark'
+TEST_CLASS = 'performance'
+TEST_TYPE = 'server'
+PY_VERSION = 3
+
+DOC = '''
+This server side test suite executes the Telemetry Benchmark:
+loading.desktop.
+
+This benchmark will run on lacros built by browser infra.
+
+This control file is still in experimental stage.
+'''
+
+def run_benchmark(machine):
+    host = hosts.create_host(machine)
+    job.run_test('chromium_Telemetry', host=host, args=args)
+
+parallel_simple(run_benchmark, machines)
\ No newline at end of file
diff --git a/server/site_tests/crosperf_Wrapper/OWNERS b/server/site_tests/crosperf_Wrapper/OWNERS
deleted file mode 100644
index 31243ec..0000000
--- a/server/site_tests/crosperf_Wrapper/OWNERS
+++ /dev/null
@@ -1 +0,0 @@
-include chromiumos/third_party/toolchain-utils:/OWNERS.toolchain
diff --git a/server/site_tests/crosperf_Wrapper/control b/server/site_tests/crosperf_Wrapper/control
deleted file mode 100644
index 211cdb1..0000000
--- a/server/site_tests/crosperf_Wrapper/control
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright (c) 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = "zhizhouy@chromium.org, chromeos-toolchain@google.com"
-NAME = "crosperf_Wrapper"
-TIME = "LONG"
-TEST_CATEGORY = "Benchmark"
-TEST_CLASS = "performance"
-TEST_TYPE = "server"
-JOB_RETRIES = 0
-
-DOC = """
-This wraps up various performance client tests under the crosperf script.
-This is part of Chrome OS Toolchain testing platform.
-This is only used by crosperf to run a client test on certain device.
-"""
-
-# Put the args into the args_dict.
-args_dict = utils.args_to_dict(args)
-
-def run_crosperf_Wrapper(machine):
-    dut = hosts.create_host(machine)
-    job.run_test('crosperf_Wrapper',
-                 test_name=args_dict.get('test', ''),
-                 test_args=args_dict.get('test_args', ''),
-                 dut_config_str=args_dict.get('dut_config', ''),
-                 dut=dut)
-
-# run the test in multiple machines
-
-job.parallel_simple(run_crosperf_Wrapper, machines)
-
diff --git a/server/site_tests/crosperf_Wrapper/crosperf_Wrapper.py b/server/site_tests/crosperf_Wrapper/crosperf_Wrapper.py
deleted file mode 100644
index 94661aa..0000000
--- a/server/site_tests/crosperf_Wrapper/crosperf_Wrapper.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# Copyright (c) 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import json
-import logging
-import os
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import autotest
-from autotest_lib.server import test
-from autotest_lib.server.cros.crosperf import device_setup_utils
-
-WAIT_TIME_LOG = 'wait_time.log'
-
-class crosperf_Wrapper(test.test):
-    """
-    Client test wrapper for crosperf.
-
-    This is a class to run client tests under the crosperf script.
-
-    """
-    version = 1
-
-    def run_once(self, test_name, test_args, dut_config_str, dut=None):
-        """
-        Run a single telemetry test.
-
-        @param test_name: Name of the client test.
-        @param test_args: Arguments need to be passed to test.
-        @param dut_config_str: A string dumped from json representing DUT
-                               configurations.
-        @param dut: The autotest host object representing DUT.
-
-        @returns A result of this execution.
-
-        """
-        if not test_name:
-            raise RuntimeError('Missing client test name to run.')
-
-        if dut_config_str:
-            dut_config = json.loads(dut_config_str)
-            # Setup device with dut_config arguments before running test.
-            wait_time = device_setup_utils.setup_device(dut, dut_config)
-            # Wait time can be used to accumulate cooldown time in Crosperf.
-            with open(os.path.join(self.resultsdir, WAIT_TIME_LOG), 'w') as f:
-                f.write(str(wait_time))
-
-        try:
-            # Execute the client side test.
-            client_at = autotest.Autotest(dut)
-            result = client_at.run_test(test_name, args=test_args)
-        except (error.TestFail, error.TestWarn):
-            logging.debug('Test did not succeed while executing client test.')
-            raise
-        except:
-            logging.debug('Unexpected failure on client test %s.', test_name)
-            raise
-
-        return result
diff --git a/server/site_tests/debugd_DevTools/control b/server/site_tests/debugd_DevTools/control
deleted file mode 100644
index 41ed6e8..0000000
--- a/server/site_tests/debugd_DevTools/control
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "dpursell"
-NAME = "debugd_DevTools"
-PURPOSE = "Debug daemon dev tools test"
-CRITERIA = "This test will fail if any tool or query fails to respond properly"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-
-DOC = """
-This test check debugd dev tools functionality. Currently this test must be run
-manually on a machine that is already in dev mode and does not yet have an
-owner, otherwise the test will throw a TestNAError and stop. Additionally, if
-the device rootfs is verified, this test will disable verification and reboot.
-
-To automate this test for use in the lab, the following should be implemented:
-  - Dev mode control
-    - Verify tools are unavailable when not in dev mode.
-    - Transition to dev mode before running tests on the tools.
-    - Return to original dev mode state after the test completes.
-  - Login control
-    - Verify tools are unavailable after user login.
-    - Wipe the login record before running tests on the tools.
-    - Return to original login state after the test completes.
-  - Restore rootfs verification
-    - May require installing a fresh test image.
-TODO(dpursell): Implement the remaining features, see crbug.com/428971.
-"""
-
-job.run_test("debugd_DevTools", host=hosts.create_host(machines[0]))
diff --git a/server/site_tests/debugd_DevTools/debugd_DevTools.py b/server/site_tests/debugd_DevTools/debugd_DevTools.py
deleted file mode 100644
index 1e3308d..0000000
--- a/server/site_tests/debugd_DevTools/debugd_DevTools.py
+++ /dev/null
@@ -1,110 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-import common
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import test
-from autotest_lib.server.cros import debugd_dev_tools
-
-
-class debugd_DevTools(test.test):
-    """
-    Debugd dev tools test. See control file for details.
-    """
-    version = 1
-
-
-    def create_tools(self, host):
-        """
-        Creates and initializes the tools needed for the test.
-
-        Saves a RootfsVerificationTool to self.rootfs_tool and the rest
-        to self.tools. The RootfsVerificationTool is handled separately
-        because it can't be disabled and is required first for the
-        other tools to function properly.
-
-        @param host: Host device.
-
-        @throw error.TestNAError: Dev tools are unavailable.
-        """
-        if not debugd_dev_tools.are_dev_tools_available(host):
-            raise error.TestNAError('Cannot access dev tools. Make sure the '
-                                    'device is in dev mode with no owner and '
-                                    'the boot lockbox is not finalized.')
-
-        logging.debug('Creating dev tools.')
-        self.rootfs_tool = debugd_dev_tools.RootfsVerificationTool()
-        self.tools = (debugd_dev_tools.BootFromUsbTool(),
-                      debugd_dev_tools.SshServerTool(),
-                      debugd_dev_tools.SystemPasswordTool())
-
-        logging.debug('Initializing dev tools.')
-        self.rootfs_tool.initialize(host)
-        for tool in self.tools:
-            tool.initialize(host, save_initial_state=True)
-
-
-    def cleanup_tools(self, host):
-        """
-        Performs cleanup to return the device to its initial state.
-
-        Any tools that fail to clean up will print a warning but will
-        not register a test failure.
-
-        @param host: Host device.
-        """
-        logging.debug('Cleaning up tools.')
-        for tool in self.tools:
-            try:
-                tool.restore_state()
-            except debugd_dev_tools.FeatureUnavailableError as e:
-                logging.warning('Could not restore %s - device state may be '
-                                'altered by test (%s).', tool, e)
-        debugd_dev_tools.remove_temp_files(host)
-
-
-    def test_tool(self, tool):
-        """
-        Tests an individual tool by disabling, enabling, then disabling again.
-
-        @param tool: Tool object to test.
-
-        @throw debugd_dev_tools.AccessError: Dev tool access failed.
-        @throw error.TestFail: A tool failed to affect device state.
-        """
-        # Start by disabling the tool. If disable fails we may still be
-        # able to test enabling the tool.
-        logging.debug('Disabling %s.', tool)
-        tool.disable()
-        if tool.is_enabled():
-            raise error.TestFail('%s did not disable correctly.' % tool)
-
-        # Now enable the tool and make sure it worked.
-        logging.debug('Enabling %s.', tool)
-        tool.enable()
-        if not tool.is_enabled():
-            raise error.TestFail('%s did not enable correctly.' % tool)
-
-        # Disable one more time to confirm our disable routine works.
-        logging.debug('Disabling %s.', tool)
-        tool.disable()
-        if tool.is_enabled():
-            raise error.TestFail('%s did not disable correctly.' % tool)
-
-
-    def run_once(self, host=None):
-        """Main test function."""
-        self.create_tools(host)
-        try:
-            # First remove rootfs verification if it's not already.
-            if not self.rootfs_tool.is_enabled():
-                logging.debug('Removing rootfs verification.')
-                self.rootfs_tool.enable()
-
-            for tool in self.tools:
-                self.test_tool(tool)
-        finally:
-            self.cleanup_tools(host)
diff --git a/server/site_tests/display_CheckModesAfterSignOutSignIn/control.mirrored b/server/site_tests/display_CheckModesAfterSignOutSignIn/control.mirrored
index 12e4286..b1099e0 100644
--- a/server/site_tests/display_CheckModesAfterSignOutSignIn/control.mirrored
+++ b/server/site_tests/display_CheckModesAfterSignOutSignIn/control.mirrored
@@ -14,7 +14,9 @@
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:display, suite:chameleon_hdmi"
 DEPENDENCIES = "chameleon, servo_state:WORKING"
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
+
 DOC = """
 1. Boot the Chromebook and login
 2. Set Mirrored mode
diff --git a/server/site_tests/display_CheckModesAfterSignOutSignIn/display_CheckModesAfterSignOutSignIn.py b/server/site_tests/display_CheckModesAfterSignOutSignIn/display_CheckModesAfterSignOutSignIn.py
index 124e35a..dd213b4 100644
--- a/server/site_tests/display_CheckModesAfterSignOutSignIn/display_CheckModesAfterSignOutSignIn.py
+++ b/server/site_tests/display_CheckModesAfterSignOutSignIn/display_CheckModesAfterSignOutSignIn.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/display_EdidStress/control b/server/site_tests/display_EdidStress/control
index dd213e6..9e8feaa 100644
--- a/server/site_tests/display_EdidStress/control
+++ b/server/site_tests/display_EdidStress/control
@@ -14,6 +14,7 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon'
+PY_VERSION = 3
 
 DOC = """
 This test switches Chameleon EDID from among a large pool of EDIDs, tests
diff --git a/server/site_tests/display_EdidStress/control.weekly b/server/site_tests/display_EdidStress/control.weekly
index 084699c..5b82cab 100644
--- a/server/site_tests/display_EdidStress/control.weekly
+++ b/server/site_tests/display_EdidStress/control.weekly
@@ -17,6 +17,7 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon'
+PY_VERSION = 3
 
 DOC = """
 This test switches Chameleon EDID from among a large pool of EDIDs, tests
diff --git a/server/site_tests/display_EdidStress/display_EdidStress.py b/server/site_tests/display_EdidStress/display_EdidStress.py
index 9995811..d6c9c4c 100644
--- a/server/site_tests/display_EdidStress/display_EdidStress.py
+++ b/server/site_tests/display_EdidStress/display_EdidStress.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -7,7 +8,6 @@
 import glob
 import logging
 import os
-import xmlrpclib
 
 from autotest_lib.client.bin import utils
 from autotest_lib.client.common_lib import error
@@ -78,7 +78,7 @@
                             raise error.TestFail('No external display detected on DUT')
                         if screen_test.test_resolution(resolution):
                             raise error.TestFail('Resolution test failed')
-                except (error.TestFail, xmlrpclib.Fault) as e:
+                except (error.TestFail) as e:
                     logging.warning(e)
                     logging.error('EDID not supported: %s', filename)
                     failed_edids.append(filename)
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1006_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1006_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1006_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1006_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1062_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1062_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1062_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1062_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1316_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1316_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1316_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1316_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1375_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1375_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1375_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1375_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1382_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1382_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1382_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1382_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1413_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1413_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1413_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1413_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1415_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1415_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1415_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1415_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1441_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1441_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1441_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1441_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1477_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1477_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1477_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1477_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1555_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1555_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1555_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1555_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_157_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_157_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_157_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_157_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1623_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1623_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1623_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1623_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1769_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1769_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1769_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_1769_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_187_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_187_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_187_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_187_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_2112_SyncMaster_DP.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_2112_SyncM_DP.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_2112_SyncMaster_DP.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_2112_SyncM_DP.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_2414_SyncMaster_DP.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_2414_SyncM_DP.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_2414_SyncMaster_DP.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_2414_SyncM_DP.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_267_SyncMaster_VGA.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_267_SyncM_VGA.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_267_SyncMaster_VGA.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_267_SyncM_VGA.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_2894_SyncMaster_DP.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_2894_SyncM_DP.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_2894_SyncMaster_DP.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_2894_SyncM_DP.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_2894_SyncMaster_DVI.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_2894_SyncM_DVI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_2894_SyncMaster_DVI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_2894_SyncM_DVI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_2895_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_2895_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_2895_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_2895_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_2966_SyncMaster_DVI.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_2966_SyncM_DVI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_2966_SyncMaster_DVI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_2966_SyncM_DVI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_2967_SyncMaster_DP.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_2967_SyncM_DP.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_2967_SyncMaster_DP.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_2967_SyncM_DP.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_404_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_404_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_404_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_404_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_463_SyncMaster_VGA.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_463_SyncM_VGA.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_463_SyncMaster_VGA.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_463_SyncM_VGA.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_532_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_532_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_532_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_532_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_542_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_542_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_542_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_542_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_639_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_639_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_639_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_639_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_800_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_800_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_800_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_800_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_890_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_890_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_890_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_890_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_892_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_892_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_892_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/rest_edids/SAM_892_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_1087_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_1087_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_1087_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_1087_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_1242_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_1242_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_1242_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_1242_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_1520_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_1520_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_1520_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_1520_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_1777_SyncMaster_DVI.txt b/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_1777_SyncM_DVI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_1777_SyncMaster_DVI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_1777_SyncM_DVI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2114_SyncMaster_DP.txt b/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2114_SyncM_DP.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2114_SyncMaster_DP.txt
rename to server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2114_SyncM_DP.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2412_SyncMaster_DP.txt b/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2412_SyncM_DP.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2412_SyncMaster_DP.txt
rename to server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2412_SyncM_DP.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2412_SyncMaster_DVI.txt b/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2412_SyncM_DVI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2412_SyncMaster_DVI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2412_SyncM_DVI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2412_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2412_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2412_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2412_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2413_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2413_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2413_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2413_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2763_SyncMaster_DP.txt b/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2763_SyncM_DP.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2763_SyncMaster_DP.txt
rename to server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2763_SyncM_DP.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2763_SyncMaster_DVI.txt b/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2763_SyncM_DVI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2763_SyncMaster_DVI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2763_SyncM_DVI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2763_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2763_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2763_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2763_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2764_SyncMaster_DP.txt b/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2764_SyncM_DP.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2764_SyncMaster_DP.txt
rename to server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2764_SyncM_DP.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2765_SyncMaster_DP.txt b/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2765_SyncM_DP.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2765_SyncMaster_DP.txt
rename to server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2765_SyncM_DP.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2765_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2765_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2765_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2765_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2894_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2894_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2894_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2894_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2965_SyncMaster_DP.txt b/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2965_SyncM_DP.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2965_SyncMaster_DP.txt
rename to server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2965_SyncM_DP.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2965_SyncMaster_DVI.txt b/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2965_SyncM_DVI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2965_SyncMaster_DVI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2965_SyncM_DVI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2966_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2966_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2966_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2966_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2978_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2978_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2978_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2978_SyncM_HDMI.txt
diff --git a/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2979_SyncMaster_HDMI.txt b/server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2979_SyncM_HDMI.txt
similarity index 100%
rename from server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2979_SyncMaster_HDMI.txt
rename to server/site_tests/display_EdidStress/test_data/edids/weekly/SAM_2979_SyncM_HDMI.txt
diff --git a/server/site_tests/display_HDCPScreen/control.extended b/server/site_tests/display_HDCPScreen/control.extended
deleted file mode 100644
index ff78de6..0000000
--- a/server/site_tests/display_HDCPScreen/control.extended
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "chromeos-chameleon"
-NAME = "display_HDCPScreen.extended"
-PURPOSE = "Remotely controlled display to enable HDCP and check screen."
-CRITERIA = "This test will fail if Chameleon sees a wrong screen."
-# HDCP control/query is broken on DUT. Disable the test until it is fixed.
-#ATTRIBUTES = "suite:chameleon_hdmi"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "display"
-TEST_TYPE = "server"
-DEPENDENCIES = "chameleon"
-
-DOC = """
-This test forces CrOS to enable HDCP and compares screens between CrOS
-and Chameleon.
-"""
-
-args_dict = utils.args_to_dict(args)
-chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, chameleon_args=chameleon_args)
-    job.run_test('display_HDCPScreen', host=host, tag='extended')
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/display_HDCPScreen/control.mirrored b/server/site_tests/display_HDCPScreen/control.mirrored
deleted file mode 100644
index c973be8..0000000
--- a/server/site_tests/display_HDCPScreen/control.mirrored
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "chromeos-chameleon"
-NAME = "display_HDCPScreen.mirrored"
-PURPOSE = "Remotely controlled display to enable HDCP and check screen."
-CRITERIA = "This test will fail if Chameleon sees a wrong screen."
-# HDCP control/query is broken on DUT. Disable the test until it is fixed.
-#ATTRIBUTES = "suite:chameleon_hdmi"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "display"
-TEST_TYPE = "server"
-DEPENDENCIES = "chameleon"
-
-DOC = """
-This test forces CrOS to enable HDCP and compares screens between CrOS
-and Chameleon.
-"""
-
-args_dict = utils.args_to_dict(args)
-chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, chameleon_args=chameleon_args)
-    job.run_test('display_HDCPScreen', host=host, test_mirrored=True,
-                 tag='mirrored')
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/display_HDCPScreen/display_HDCPScreen.py b/server/site_tests/display_HDCPScreen/display_HDCPScreen.py
deleted file mode 100644
index f1abfbc..0000000
--- a/server/site_tests/display_HDCPScreen/display_HDCPScreen.py
+++ /dev/null
@@ -1,113 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""This is a server side to enable HDCP and verify screen."""
-
-import logging
-import time
-
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.chameleon import chameleon_port_finder
-from autotest_lib.client.cros.chameleon import chameleon_screen_test
-from autotest_lib.server import test
-from autotest_lib.server.cros.multimedia import remote_facade_factory
-
-
-class display_HDCPScreen(test.test):
-    """Server side test to enable HDCP and verify screen.
-
-    This test forces CrOS to enable HDCP and compares screens between CrOS
-    and Chameleon.
-    """
-    version = 1
-
-    TEST_CONFIGS = [
-        # (enable_chameleon, request_cros, expected_cros_state,
-        #  expected_chameleon_state)
-        (True, 'Desired', 'Enabled', True),
-        (False, 'Desired', 'Desired', False),
-        # TODO: Investigate the case below which was disabled as it failed.
-        # Check http://crbug.com/447493
-        #(True, 'Undesired', 'Undesired', False),
-        (False, 'Undesired', 'Undesired', False),
-    ]
-
-    DURATION_UNPLUG_FOR_HDCP = 1
-    TIMEOUT_HDCP_SWITCH = 10
-
-    def run_once(self, host, test_mirrored=False):
-        if host.get_architecture() != 'arm':
-            raise error.TestNAError('HDCP is not supported on a non-ARM device')
-
-        factory = remote_facade_factory.RemoteFacadeFactory(host)
-        display_facade = factory.create_display_facade()
-        chameleon_board = host.chameleon
-
-        chameleon_board.setup_and_reset(self.outputdir)
-        finder = chameleon_port_finder.ChameleonVideoInputFinder(
-                chameleon_board, display_facade)
-
-        errors = []
-        for chameleon_port in finder.iterate_all_ports():
-            screen_test = chameleon_screen_test.ChameleonScreenTest(
-                    host, chameleon_port, display_facade, self.outputdir)
-
-            logging.info('See the display on Chameleon: port %d (%s)',
-                         chameleon_port.get_connector_id(),
-                         chameleon_port.get_connector_type())
-
-            logging.info('Set mirrored: %s', test_mirrored)
-            display_facade.set_mirrored(test_mirrored)
-
-            resolution = display_facade.get_external_resolution()
-            logging.info('Detected resolution on CrOS: %r', resolution)
-
-            original_cros_state = display_facade.get_content_protection()
-            was_chameleon_enabled = (
-                    chameleon_port.is_content_protection_enabled())
-            try:
-                for (enable_chameleon, request_cros, expected_cros_state,
-                     expected_chameleon_state) in self.TEST_CONFIGS:
-                    # Do unplug and plug to emulate switching to a different
-                    # display with a different content protection state.
-                    chameleon_port.unplug()
-                    logging.info('Set Chameleon HDCP: %r', enable_chameleon)
-                    chameleon_port.set_content_protection(enable_chameleon)
-                    time.sleep(self.DURATION_UNPLUG_FOR_HDCP)
-                    chameleon_port.plug()
-                    chameleon_port.wait_video_input_stable()
-
-                    logging.info('Request CrOS HDCP: %s', request_cros)
-                    display_facade.set_content_protection(request_cros)
-
-                    state = utils.wait_for_value(
-                            display_facade.get_content_protection, 'Enabled',
-                            timeout_sec=self.TIMEOUT_HDCP_SWITCH)
-                    logging.info('Got CrOS state: %s', state)
-                    if state != expected_cros_state:
-                        error_message = ('Failed to enable HDCP, state: %r' %
-                                         state)
-                        logging.error(error_message)
-                        errors.append(error_message)
-
-                    encrypted = chameleon_port.is_video_input_encrypted()
-                    logging.info('Got Chameleon state: %r', encrypted)
-                    if encrypted != expected_chameleon_state:
-                        error_message = ('Chameleon found HDCP in wrong state: '
-                                         'expected %r but got %r' %
-                                         (expected_chameleon_state, encrypted))
-                        logging.error(error_message)
-                        errors.append(error_message)
-
-                    logging.info('Test screen under HDCP %s...',
-                                 'enabled' if encrypted else 'disabled')
-                    screen_test.test_screen_with_image(
-                            resolution, test_mirrored, errors)
-            finally:
-                display_facade.set_content_protection(original_cros_state)
-                chameleon_port.set_content_protection(was_chameleon_enabled)
-
-        if errors:
-            raise error.TestFail('; '.join(set(errors)))
diff --git a/server/site_tests/display_HotPlugAtBoot/control.extended b/server/site_tests/display_HotPlugAtBoot/control.extended
index 80a7291..13e56fd 100644
--- a/server/site_tests/display_HotPlugAtBoot/control.extended
+++ b/server/site_tests/display_HotPlugAtBoot/control.extended
@@ -8,13 +8,14 @@
 NAME = "display_HotPlugAtBoot.extended"
 PURPOSE = "Remotely controlled display hot-plug and reboot test."
 CRITERIA = "This test will fail if DUT doesn't see the display after boot."
-ATTRIBUTES = "suite:chameleon_dp, suite:chameleon_dp_hdmi, suite:chameleon_hdmi_perbuild, suite:chameleon_hdmi, suite:chameleon_vga"
+ATTRIBUTES = "suite:chameleon_hdmi_perbuild"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon'
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely emulates external display hot-plug and reboot.
diff --git a/server/site_tests/display_HotPlugAtBoot/control.extended.unplug b/server/site_tests/display_HotPlugAtBoot/control.extended.unplug
index 60723f4..d0b79a5 100644
--- a/server/site_tests/display_HotPlugAtBoot/control.extended.unplug
+++ b/server/site_tests/display_HotPlugAtBoot/control.extended.unplug
@@ -14,7 +14,8 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon'
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely emulates external display hot-plug and reboot.
diff --git a/server/site_tests/display_HotPlugAtBoot/control.mirrored b/server/site_tests/display_HotPlugAtBoot/control.mirrored
index 88d368a..07c75c0 100644
--- a/server/site_tests/display_HotPlugAtBoot/control.mirrored
+++ b/server/site_tests/display_HotPlugAtBoot/control.mirrored
@@ -8,13 +8,14 @@
 NAME = "display_HotPlugAtBoot.mirrored"
 PURPOSE = "Remotely controlled display hot-plug and reboot test."
 CRITERIA = "This test will fail if DUT doesn't see the display after boot."
-ATTRIBUTES = "suite:chameleon_dp, suite:chameleon_dp_hdmi, suite:chameleon_hdmi_perbuild, suite:chameleon_hdmi"
+#ATTRIBUTES = "suite:chameleon_hdmi_perbuild"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon'
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely emulates external display hot-plug and reboot.
diff --git a/server/site_tests/display_HotPlugAtBoot/control.mirrored.unplug b/server/site_tests/display_HotPlugAtBoot/control.mirrored.unplug
index 675a5e3..c4dca62 100644
--- a/server/site_tests/display_HotPlugAtBoot/control.mirrored.unplug
+++ b/server/site_tests/display_HotPlugAtBoot/control.mirrored.unplug
@@ -14,7 +14,8 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon'
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely emulates external display hot-plug and reboot.
diff --git a/server/site_tests/display_HotPlugAtBoot/display_HotPlugAtBoot.py b/server/site_tests/display_HotPlugAtBoot/display_HotPlugAtBoot.py
index 5a3845b..bfad585 100644
--- a/server/site_tests/display_HotPlugAtBoot/display_HotPlugAtBoot.py
+++ b/server/site_tests/display_HotPlugAtBoot/display_HotPlugAtBoot.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/display_HotPlugAtSuspend/control.extended b/server/site_tests/display_HotPlugAtSuspend/control.extended
index c3c279a..6f17d33 100644
--- a/server/site_tests/display_HotPlugAtSuspend/control.extended
+++ b/server/site_tests/display_HotPlugAtSuspend/control.extended
@@ -14,7 +14,8 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon'
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely emulates external display hot-plug and suspend/resume.
diff --git a/server/site_tests/display_HotPlugAtSuspend/control.extended_unplugged b/server/site_tests/display_HotPlugAtSuspend/control.extended_unplugged
index 20e4ef1..bde3958 100644
--- a/server/site_tests/display_HotPlugAtSuspend/control.extended_unplugged
+++ b/server/site_tests/display_HotPlugAtSuspend/control.extended_unplugged
@@ -14,7 +14,8 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon'
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely emulates external display hot-plug and suspend/resume.
diff --git a/server/site_tests/display_HotPlugAtSuspend/control.mirrored b/server/site_tests/display_HotPlugAtSuspend/control.mirrored
index 8e47ffd..f92ba5b 100644
--- a/server/site_tests/display_HotPlugAtSuspend/control.mirrored
+++ b/server/site_tests/display_HotPlugAtSuspend/control.mirrored
@@ -14,7 +14,8 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon'
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely emulates external display hot-plug and suspend/resume.
diff --git a/server/site_tests/display_HotPlugAtSuspend/control.mirrored_unplugged b/server/site_tests/display_HotPlugAtSuspend/control.mirrored_unplugged
index 113215f..0b028eb 100644
--- a/server/site_tests/display_HotPlugAtSuspend/control.mirrored_unplugged
+++ b/server/site_tests/display_HotPlugAtSuspend/control.mirrored_unplugged
@@ -14,7 +14,8 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon'
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely emulates external display hot-plug and suspend/resume.
diff --git a/server/site_tests/display_HotPlugAtSuspend/control.suspend_only b/server/site_tests/display_HotPlugAtSuspend/control.suspend_only
index 0865948..4a63187 100644
--- a/server/site_tests/display_HotPlugAtSuspend/control.suspend_only
+++ b/server/site_tests/display_HotPlugAtSuspend/control.suspend_only
@@ -14,7 +14,8 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon'
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This is a control group of only doing suspend/resume without display hot-plug.
diff --git a/server/site_tests/display_HotPlugAtSuspend/display_HotPlugAtSuspend.py b/server/site_tests/display_HotPlugAtSuspend/display_HotPlugAtSuspend.py
index e7cadf9..389e436 100644
--- a/server/site_tests/display_HotPlugAtSuspend/display_HotPlugAtSuspend.py
+++ b/server/site_tests/display_HotPlugAtSuspend/display_HotPlugAtSuspend.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/display_HotPlugNoisy/control.extended b/server/site_tests/display_HotPlugNoisy/control.extended
deleted file mode 100644
index 78756f0..0000000
--- a/server/site_tests/display_HotPlugNoisy/control.extended
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "chromeos-chameleon"
-NAME = "display_HotPlugNoisy.extended"
-PURPOSE = "Remotely controlled noisy display HPD test."
-CRITERIA = "This test will fail if screen pixels do not match."
-ATTRIBUTES = "suite:chameleon_hdmi_unstable"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "display"
-TEST_TYPE = "server"
-DEPENDENCIES = 'chameleon'
-
-DOC = """
-This test remotely emulates noisy HPD line when connecting to an external
-display in extended mode using the Chameleon board.
-"""
-
-args_dict = utils.args_to_dict(args)
-chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, chameleon_args=chameleon_args)
-    job.run_test("display_HotPlugNoisy", host=host, tag="extended")
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/display_HotPlugNoisy/control.mirrored b/server/site_tests/display_HotPlugNoisy/control.mirrored
deleted file mode 100644
index 7cda5a8..0000000
--- a/server/site_tests/display_HotPlugNoisy/control.mirrored
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "chromeos-chameleon"
-NAME = "display_HotPlugNoisy.mirrored"
-PURPOSE = "Remotely controlled noisy display HPD test."
-CRITERIA = "This test will fail if screen pixels do not match."
-ATTRIBUTES = "suite:chameleon_hdmi_unstable"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "display"
-TEST_TYPE = "server"
-DEPENDENCIES = 'chameleon'
-
-DOC = """
-This test remotely emulates noisy HPD line when connecting to an external
-display in mirrored mode using the Chameleon board.
-"""
-
-args_dict = utils.args_to_dict(args)
-chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, chameleon_args=chameleon_args)
-    job.run_test("display_HotPlugNoisy", host=host, test_mirrored=True,
-                 tag="mirrored")
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/display_HotPlugNoisy/display_HotPlugNoisy.py b/server/site_tests/display_HotPlugNoisy/display_HotPlugNoisy.py
deleted file mode 100644
index d11ff00..0000000
--- a/server/site_tests/display_HotPlugNoisy/display_HotPlugNoisy.py
+++ /dev/null
@@ -1,133 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""This test remotely emulates noisy HPD line when connecting to an external
-display in extended mode using the Chameleon board."""
-
-import logging
-import time
-
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.chameleon import chameleon_port_finder
-from autotest_lib.client.cros.chameleon import chameleon_screen_test
-from autotest_lib.server import test
-from autotest_lib.server.cros.multimedia import remote_facade_factory
-
-
-class display_HotPlugNoisy(test.test):
-    """Noisy display HPD test.
-
-    This test talks to a Chameleon board and a DUT to set up, run, and verify
-    DUT behavior in response to noisy HPD line.
-    """
-    version = 1
-    PLUG_CONFIGS = [
-        # (plugged_before_noise, plugged_after_noise)
-
-        (False, False),
-        (False, True),
-        (True, False),
-        (True, True),
-    ]
-
-    # pulse segments in msec that end with plugged state
-    PULSES_PLUGGED = [1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024]
-    # pulse segments in msec that end with unplugged state
-    PULSES_UNPLUGGED = PULSES_PLUGGED + [2048]
-
-    REPLUG_DELAY_SEC = 1
-
-
-    def run_once(self, host, test_mirrored=False):
-        if test_mirrored and not host.get_board_type() == 'CHROMEBOOK':
-            raise error.TestNAError('DUT is not Chromebook. Test Skipped')
-
-        factory = remote_facade_factory.RemoteFacadeFactory(host)
-        display_facade = factory.create_display_facade()
-        chameleon_board = host.chameleon
-
-        chameleon_board.setup_and_reset(self.outputdir)
-        finder = chameleon_port_finder.ChameleonVideoInputFinder(
-                chameleon_board, display_facade)
-
-        errors = []
-        warns = []
-        for chameleon_port in finder.iterate_all_ports():
-            screen_test = chameleon_screen_test.ChameleonScreenTest(
-                    host, chameleon_port, display_facade, self.outputdir)
-
-            logging.info('See the display on Chameleon: port %d (%s)',
-                         chameleon_port.get_connector_id(),
-                         chameleon_port.get_connector_type())
-
-            logging.info('Set mirrored: %s', test_mirrored)
-            display_facade.set_mirrored(test_mirrored)
-
-            # Keep the original connector name, for later comparison.
-            expected_connector = display_facade.get_external_connector_name()
-            resolution = display_facade.get_external_resolution()
-            logging.info('See the display on DUT: %s %r',
-                         expected_connector, resolution)
-
-            for (plugged_before_noise,
-                 plugged_after_noise) in self.PLUG_CONFIGS:
-                logging.info('TESTING THE CASE: %s > noise > %s',
-                             'plug' if plugged_before_noise else 'unplug',
-                             'plug' if plugged_after_noise else 'unplug')
-
-                chameleon_port.set_plug(plugged_before_noise)
-
-                if screen_test.check_external_display_connected(
-                        expected_connector if plugged_before_noise else False,
-                        errors):
-                    # Skip the following test if an unexpected display detected.
-                    continue
-
-                chameleon_port.fire_mixed_hpd_pulses(
-                        self.PULSES_PLUGGED if plugged_after_noise
-                                            else self.PULSES_UNPLUGGED)
-
-                if plugged_after_noise:
-                    chameleon_port.wait_video_input_stable()
-                    if test_mirrored:
-                        # Wait for resolution change to make sure the resolution
-                        # is stable before moving on. This is to deal with the
-                        # case where DUT may respond slowly after the noise.
-                        # If the resolution doesn't change, then we are
-                        # confident that it is stable. Otherwise, a slow
-                        # response is caught.
-                        r = display_facade.get_internal_resolution()
-                        utils.wait_for_value_changed(
-                                display_facade.get_internal_resolution,
-                                old_value=r)
-
-                    err = screen_test.check_external_display_connected(
-                            expected_connector)
-
-                    if not err:
-                        err = screen_test.test_screen_with_image(
-                                resolution, test_mirrored)
-                    if err:
-                        # When something goes wrong after the noise, a normal
-                        # user would try to re-plug the cable to recover.
-                        # We emulate this behavior below and report error if
-                        # the problem persists.
-                        logging.warn('Possibly flaky: %s', err)
-                        warns.append('Possibly flaky: %s' % err)
-                        logging.info('Replug and retry the screen test...')
-                        chameleon_port.unplug()
-                        time.sleep(self.REPLUG_DELAY_SEC)
-                        chameleon_port.plug()
-                        chameleon_port.wait_video_input_stable()
-                        screen_test.test_screen_with_image(
-                                resolution, test_mirrored, errors)
-                else:
-                    screen_test.check_external_display_connected(False, errors)
-                    time.sleep(1)
-
-        if errors:
-            raise error.TestFail('; '.join(set(errors)))
-        elif warns:
-            raise error.TestWarn('; '.join(set(warns)))
diff --git a/server/site_tests/display_LidCloseOpen/control.extended b/server/site_tests/display_LidCloseOpen/control.extended
index 2065416..a51707b 100644
--- a/server/site_tests/display_LidCloseOpen/control.extended
+++ b/server/site_tests/display_LidCloseOpen/control.extended
@@ -8,13 +8,14 @@
 NAME = "display_LidCloseOpen.extended"
 PURPOSE = "Remotely controlled close and open lid test."
 CRITERIA = "This test will fail if checks mentioned below fail."
-ATTRIBUTES = "suite:chameleon_dp, suite:chameleon_dp_hdmi, suite:chameleon_hdmi, suite:chameleon_hdmi_perbuild"
+ATTRIBUTES = "suite:chameleon_hdmi_perbuild"
 TIME = "LONG"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon,servo_state:WORKING'
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely emulates external display lid close/open scenario.
diff --git a/server/site_tests/display_LidCloseOpen/control.extended_plug_close_unplug_open_plug b/server/site_tests/display_LidCloseOpen/control.extended_plug_close_unplug_open_plug
index 3a463d1..be1a34c 100644
--- a/server/site_tests/display_LidCloseOpen/control.extended_plug_close_unplug_open_plug
+++ b/server/site_tests/display_LidCloseOpen/control.extended_plug_close_unplug_open_plug
@@ -14,7 +14,8 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon,servo_state:WORKING'
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely emulates external display lid close/open scenario.
diff --git a/server/site_tests/display_LidCloseOpen/control.extended_plug_close_unplug_plug_open b/server/site_tests/display_LidCloseOpen/control.extended_plug_close_unplug_plug_open
index 74cc49d..ff27d49 100644
--- a/server/site_tests/display_LidCloseOpen/control.extended_plug_close_unplug_plug_open
+++ b/server/site_tests/display_LidCloseOpen/control.extended_plug_close_unplug_plug_open
@@ -14,7 +14,8 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon,servo_state:WORKING'
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely emulates external display lid close/open scenario.
diff --git a/server/site_tests/display_LidCloseOpen/control.extended_unplug_close_plug_open b/server/site_tests/display_LidCloseOpen/control.extended_unplug_close_plug_open
deleted file mode 100644
index 7e701df..0000000
--- a/server/site_tests/display_LidCloseOpen/control.extended_unplug_close_plug_open
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "chromeos-chameleon"
-NAME = "display_LidCloseOpen.extended_uplug_close_plug_open"
-PURPOSE = "Remotely controlled close and open lid test."
-CRITERIA = "This test will fail if checks mentioned below fail."
-#ATTRIBUTES = "suite:chameleon_hdmi_unstable"
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "display"
-TEST_TYPE = "server"
-DEPENDENCIES = 'chameleon,servo_state:WORKING'
-JOB_RETRIES = 2
-
-DOC = """
-This test remotely emulates external display lid close/open scenario.
-This test will fail if DUT doesn't
- - go to docked mode
- - return to the original mode
- - suspend after unplug in docked mode
- - resume after open
- - pass image test
-"""
-
-args_dict = utils.args_to_dict(args)
-chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-# Plugged status (before_close, after_close, before_open)
-plug_status=[(False, True, True)]
-
-def run(machine):
-    host = hosts.create_host(machine, chameleon_args=chameleon_args,
-                             servo_args=servo_args)
-    job.run_test("display_LidCloseOpen", host=host, plug_status=plug_status,
-                 tag="extended_uplug_close_plug_open")
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/display_LidCloseOpen/control.extended_unplug_close_plug_unplug_open_plug b/server/site_tests/display_LidCloseOpen/control.extended_unplug_close_plug_unplug_open_plug
index 52bd8ca..78c2ddf 100644
--- a/server/site_tests/display_LidCloseOpen/control.extended_unplug_close_plug_unplug_open_plug
+++ b/server/site_tests/display_LidCloseOpen/control.extended_unplug_close_plug_unplug_open_plug
@@ -14,7 +14,8 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon,servo_state:WORKING'
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely emulates external display lid close/open scenario.
diff --git a/server/site_tests/display_LidCloseOpen/control.extended_unplugged_combined b/server/site_tests/display_LidCloseOpen/control.extended_unplugged_combined
index db0fd7b..d65a088 100644
--- a/server/site_tests/display_LidCloseOpen/control.extended_unplugged_combined
+++ b/server/site_tests/display_LidCloseOpen/control.extended_unplugged_combined
@@ -13,6 +13,7 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon,servo_state:WORKING'
+PY_VERSION = 3
 
 DOC = """
 This test remotely emulates external display lid close/open scenario.
diff --git a/server/site_tests/display_LidCloseOpen/control.mirrored b/server/site_tests/display_LidCloseOpen/control.mirrored
index a4b1126..79ed041 100644
--- a/server/site_tests/display_LidCloseOpen/control.mirrored
+++ b/server/site_tests/display_LidCloseOpen/control.mirrored
@@ -8,13 +8,14 @@
 NAME = "display_LidCloseOpen.mirrored"
 PURPOSE = "Remotely controlled close and open lid test."
 CRITERIA = "This test will fail if checks mentioned below fail."
-ATTRIBUTES = "suite:chameleon_dp, suite:chameleon_dp_hdmi, suite:chameleon_hdmi, suite:chameleon_hdmi_perbuild"
+#ATTRIBUTES = "suite:chameleon_hdmi_perbuild"
 TIME = "LONG"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon,servo_state:WORKING'
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely emulates external display lid close/open scenario.
diff --git a/server/site_tests/display_LidCloseOpen/control.mirrored_plug_close_unplug_open_plug b/server/site_tests/display_LidCloseOpen/control.mirrored_plug_close_unplug_open_plug
index 0bfba79..8a31db0 100644
--- a/server/site_tests/display_LidCloseOpen/control.mirrored_plug_close_unplug_open_plug
+++ b/server/site_tests/display_LidCloseOpen/control.mirrored_plug_close_unplug_open_plug
@@ -14,7 +14,8 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon,servo_state:WORKING'
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely emulates external display lid close/open scenario.
diff --git a/server/site_tests/display_LidCloseOpen/control.mirrored_plug_close_unplug_plug_open b/server/site_tests/display_LidCloseOpen/control.mirrored_plug_close_unplug_plug_open
index 0bd6022..e1881ec 100644
--- a/server/site_tests/display_LidCloseOpen/control.mirrored_plug_close_unplug_plug_open
+++ b/server/site_tests/display_LidCloseOpen/control.mirrored_plug_close_unplug_plug_open
@@ -14,7 +14,8 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon,servo_state:WORKING'
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely emulates external display lid close/open scenario.
diff --git a/server/site_tests/display_LidCloseOpen/control.mirrored_unplug_close_plug_open b/server/site_tests/display_LidCloseOpen/control.mirrored_unplug_close_plug_open
index 67a7e44..5da986f 100644
--- a/server/site_tests/display_LidCloseOpen/control.mirrored_unplug_close_plug_open
+++ b/server/site_tests/display_LidCloseOpen/control.mirrored_unplug_close_plug_open
@@ -14,7 +14,8 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon,servo_state:WORKING'
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely emulates external display lid close/open scenario.
diff --git a/server/site_tests/display_LidCloseOpen/control.mirrored_unplug_close_plug_unplug_open_plug b/server/site_tests/display_LidCloseOpen/control.mirrored_unplug_close_plug_unplug_open_plug
index 6bf1a0d..a4ca52b 100644
--- a/server/site_tests/display_LidCloseOpen/control.mirrored_unplug_close_plug_unplug_open_plug
+++ b/server/site_tests/display_LidCloseOpen/control.mirrored_unplug_close_plug_unplug_open_plug
@@ -14,7 +14,8 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon,servo_state:WORKING'
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely emulates external display lid close/open scenario.
diff --git a/server/site_tests/display_LidCloseOpen/control.mirrored_unplugged_combined b/server/site_tests/display_LidCloseOpen/control.mirrored_unplugged_combined
index 89a530c..c4a378f 100644
--- a/server/site_tests/display_LidCloseOpen/control.mirrored_unplugged_combined
+++ b/server/site_tests/display_LidCloseOpen/control.mirrored_unplugged_combined
@@ -13,6 +13,7 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon,servo_state:WORKING'
+PY_VERSION = 3
 
 DOC = """
 This test remotely emulates external display lid close/open scenario.
diff --git a/server/site_tests/display_LidCloseOpen/display_LidCloseOpen.py b/server/site_tests/display_LidCloseOpen/display_LidCloseOpen.py
index ea4754e..d8bb300 100644
--- a/server/site_tests/display_LidCloseOpen/display_LidCloseOpen.py
+++ b/server/site_tests/display_LidCloseOpen/display_LidCloseOpen.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/display_NoEdid/control.extended b/server/site_tests/display_NoEdid/control.extended
deleted file mode 100644
index 43baad5..0000000
--- a/server/site_tests/display_NoEdid/control.extended
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "chromeos-chameleon"
-NAME = "display_NoEdid.extended"
-PURPOSE = "Remotely controlled display with no EDID test."
-CRITERIA = "This test will fail if DUT fails to switch to a standard mode."
-ATTRIBUTES = "suite:chameleon_vga"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "display"
-TEST_TYPE = "server"
-DEPENDENCIES = "chameleon"
-
-DOC = """
-This test remotely controls Chameleon to not response an EDID request.
-It then tests if DUT switches to a standard mode as no EDID found.
-"""
-
-args_dict = utils.args_to_dict(args)
-chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, chameleon_args=chameleon_args)
-    job.run_test('display_NoEdid', host=host, tag='extended')
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/display_NoEdid/control.mirrored b/server/site_tests/display_NoEdid/control.mirrored
deleted file mode 100644
index 2c33a2a..0000000
--- a/server/site_tests/display_NoEdid/control.mirrored
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "chromeos-chameleon"
-NAME = "display_NoEdid.mirrored"
-PURPOSE = "Remotely controlled display with no EDID test."
-CRITERIA = "This test will fail if DUT fails to switch to a standard mode."
-ATTRIBUTES = "suite:chameleon_vga"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "display"
-TEST_TYPE = "server"
-DEPENDENCIES = "chameleon"
-
-DOC = """
-This test remotely controls Chameleon to not response an EDID request.
-It then tests if DUT switches to a standard mode as no EDID found.
-"""
-
-args_dict = utils.args_to_dict(args)
-chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, chameleon_args=chameleon_args)
-    job.run_test('display_NoEdid', host=host, test_mirrored=True,
-                 tag='mirrored')
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/display_NoEdid/display_NoEdid.py b/server/site_tests/display_NoEdid/display_NoEdid.py
deleted file mode 100644
index 5a8415f..0000000
--- a/server/site_tests/display_NoEdid/display_NoEdid.py
+++ /dev/null
@@ -1,67 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""This is a server side test to check no EDID on external display."""
-
-import logging
-
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.chameleon import chameleon_port_finder
-from autotest_lib.client.cros.chameleon import chameleon_screen_test
-from autotest_lib.client.cros.chameleon import edid
-from autotest_lib.server import test
-from autotest_lib.server.cros.multimedia import remote_facade_factory
-
-
-class display_NoEdid(test.test):
-    """Server side test to check no EDID on external display.
-
-    This test talks to a Chameleon board and a DUT to set up, run, and verify
-    the case that no EDID on the external display.
-    """
-    version = 1
-
-    STANDARD_MODE_RESOLUTIONS = [(1024, 768), (1024, 720), (800, 600)]
-
-    def run_once(self, host, test_mirrored=False):
-        factory = remote_facade_factory.RemoteFacadeFactory(host)
-        display_facade = factory.create_display_facade()
-        chameleon_board = host.chameleon
-
-        chameleon_board.setup_and_reset(self.outputdir)
-        finder = chameleon_port_finder.ChameleonVideoInputFinder(
-                chameleon_board, display_facade)
-
-        errors = []
-        for chameleon_port in finder.iterate_all_ports():
-            screen_test = chameleon_screen_test.ChameleonScreenTest(
-                    host, chameleon_port, display_facade, self.outputdir)
-
-            with chameleon_port.use_edid(edid.NO_EDID):
-                connector_name = utils.wait_for_value_changed(
-                            display_facade.get_external_connector_name,
-                            old_value=False)
-                if not connector_name:
-                    error_message = 'Failed to detect display without an EDID'
-                    logging.error(error_message)
-                    errors.append(error_message)
-                    continue
-
-                logging.info('Set mirrored: %s', test_mirrored)
-                display_facade.set_mirrored(test_mirrored)
-
-                resolution = display_facade.get_external_resolution()
-                if resolution not in self.STANDARD_MODE_RESOLUTIONS:
-                    error_message = ('Switched to a non-standard mode: %r' %
-                                     resolution)
-                    logging.error(error_message)
-                    errors.append(error_message)
-                    continue
-
-                screen_test.test_screen_with_image(
-                        resolution, test_mirrored, errors)
-
-        if errors:
-            raise error.TestFail('; '.join(set(errors)))
diff --git a/server/site_tests/display_Resolution/control.4K.extended b/server/site_tests/display_Resolution/control.4K.extended
index b3e91f4..a7b5858 100644
--- a/server/site_tests/display_Resolution/control.4K.extended
+++ b/server/site_tests/display_Resolution/control.4K.extended
@@ -14,6 +14,7 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon, 4k_resolution'
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests display resolution functions.
diff --git a/server/site_tests/display_Resolution/control.4K.mirrored b/server/site_tests/display_Resolution/control.4K.mirrored
index a84f7f3..ec1639f 100644
--- a/server/site_tests/display_Resolution/control.4K.mirrored
+++ b/server/site_tests/display_Resolution/control.4K.mirrored
@@ -14,6 +14,7 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon, 4k_resolution'
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests display resolution functions.
diff --git a/server/site_tests/display_Resolution/control.extended b/server/site_tests/display_Resolution/control.extended
index fad7b4a..93d8453 100644
--- a/server/site_tests/display_Resolution/control.extended
+++ b/server/site_tests/display_Resolution/control.extended
@@ -8,13 +8,14 @@
 NAME = "display_Resolution.extended"
 PURPOSE = "Remotely controlled display resolution test."
 CRITERIA = "This test will fail if the captured display checksum mismatches."
-ATTRIBUTES = "suite:chameleon_dp, suite:chameleon_dp_hdmi, suite:chameleon_hdmi_perbuild, suite:chameleon_hdmi"
+ATTRIBUTES = "suite:chameleon_hdmi_perbuild"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon'
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests display resolution functions.
diff --git a/server/site_tests/display_Resolution/control.mirrored b/server/site_tests/display_Resolution/control.mirrored
index 25a1931..d36e303 100644
--- a/server/site_tests/display_Resolution/control.mirrored
+++ b/server/site_tests/display_Resolution/control.mirrored
@@ -8,13 +8,14 @@
 NAME = "display_Resolution.mirrored"
 PURPOSE = "Remotely controlled display resolution test."
 CRITERIA = "This test will fail if the captured display checksum mismatches."
-ATTRIBUTES = "suite:chameleon_dp, suite:chameleon_dp_hdmi, suite:chameleon_hdmi_perbuild, suite:chameleon_hdmi"
+#ATTRIBUTES = "suite:chameleon_hdmi_perbuild"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon'
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests display resolution functions.
diff --git a/server/site_tests/display_Resolution/control.reboot b/server/site_tests/display_Resolution/control.reboot
index 73c2bff..fc1b232 100644
--- a/server/site_tests/display_Resolution/control.reboot
+++ b/server/site_tests/display_Resolution/control.reboot
@@ -14,6 +14,7 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon'
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests display resolution functions, by switching EDIDs
diff --git a/server/site_tests/display_Resolution/control.relid_extended b/server/site_tests/display_Resolution/control.relid_extended
index 9701373..f19c44c 100644
--- a/server/site_tests/display_Resolution/control.relid_extended
+++ b/server/site_tests/display_Resolution/control.relid_extended
@@ -14,7 +14,8 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon,servo_state:WORKING'
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests display resolution functions, by switching EDIDs
diff --git a/server/site_tests/display_Resolution/control.relid_mirrored b/server/site_tests/display_Resolution/control.relid_mirrored
index a5bf5f9..7641c9b 100644
--- a/server/site_tests/display_Resolution/control.relid_mirrored
+++ b/server/site_tests/display_Resolution/control.relid_mirrored
@@ -14,7 +14,8 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon,servo_state:WORKING'
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests display resolution functions, by switching EDIDs
diff --git a/server/site_tests/display_Resolution/control.suspend_resume b/server/site_tests/display_Resolution/control.suspend_resume
index 6c1063f..15a8404 100644
--- a/server/site_tests/display_Resolution/control.suspend_resume
+++ b/server/site_tests/display_Resolution/control.suspend_resume
@@ -14,7 +14,8 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon'
-JOB_RETRIES = 2
+JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests display resolution functions.
diff --git a/server/site_tests/display_Resolution/display_Resolution.py b/server/site_tests/display_Resolution/display_Resolution.py
index 0abf9ac..4648683 100644
--- a/server/site_tests/display_Resolution/display_Resolution.py
+++ b/server/site_tests/display_Resolution/display_Resolution.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/display_ResolutionList/control.4K.extended b/server/site_tests/display_ResolutionList/control.4K.extended
index 7b30118..a8bb9c6 100644
--- a/server/site_tests/display_ResolutionList/control.4K.extended
+++ b/server/site_tests/display_ResolutionList/control.4K.extended
@@ -14,6 +14,7 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon, 4k_resolution'
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests all display resolutions available from the display
diff --git a/server/site_tests/display_ResolutionList/control.4K.mirrored b/server/site_tests/display_ResolutionList/control.4K.mirrored
index f1a4add..5e0dde6 100644
--- a/server/site_tests/display_ResolutionList/control.4K.mirrored
+++ b/server/site_tests/display_ResolutionList/control.4K.mirrored
@@ -14,6 +14,7 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon, 4k_resolution'
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests all display resolutions available from the display
diff --git a/server/site_tests/display_ResolutionList/control.extended b/server/site_tests/display_ResolutionList/control.extended
index fd8b2a0..f0abf30 100644
--- a/server/site_tests/display_ResolutionList/control.extended
+++ b/server/site_tests/display_ResolutionList/control.extended
@@ -14,6 +14,7 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon'
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests all display resolutions available from the display
diff --git a/server/site_tests/display_ResolutionList/control.mirrored b/server/site_tests/display_ResolutionList/control.mirrored
index 320c8a3..fbb9e35 100644
--- a/server/site_tests/display_ResolutionList/control.mirrored
+++ b/server/site_tests/display_ResolutionList/control.mirrored
@@ -14,6 +14,7 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon'
+PY_VERSION = 3
 
 DOC = """
 This test remotely tests all display resolutions available from the display
diff --git a/server/site_tests/display_ResolutionList/display_ResolutionList.py b/server/site_tests/display_ResolutionList/display_ResolutionList.py
index 713def7..6958ad2 100644
--- a/server/site_tests/display_ResolutionList/display_ResolutionList.py
+++ b/server/site_tests/display_ResolutionList/display_ResolutionList.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -65,7 +66,7 @@
                     display_id = utils.wait_for_value_changed(
                             display_facade.get_first_external_display_id,
                             old_value=False)
-                    if display_id < 0:
+                    if int(display_id) < 0:
                         raise error.TestFail("No external display is found.")
 
                     # In mirror mode only display id is '0', as external
diff --git a/server/site_tests/display_ServerChameleonConnection/control b/server/site_tests/display_ServerChameleonConnection/control
index 1af8830..e90d96d 100644
--- a/server/site_tests/display_ServerChameleonConnection/control
+++ b/server/site_tests/display_ServerChameleonConnection/control
@@ -8,12 +8,13 @@
 NAME = "display_ServerChameleonConnection"
 PURPOSE = "Chameleon connection test from server-side."
 CRITERIA = 'This test fails if DUT and Chameleon are not connected properly.'
-ATTRIBUTES = "suite:chameleon_hdmi_unstable"
+ATTRIBUTES = "suite:chameleon_hdmi_perbuild"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon'
+PY_VERSION = 3
 
 DOC = """
 This test checks the connection between DUT and Chameleon.
diff --git a/server/site_tests/display_ServerChameleonConnection/display_ServerChameleonConnection.py b/server/site_tests/display_ServerChameleonConnection/display_ServerChameleonConnection.py
index 506667c..3c37add 100644
--- a/server/site_tests/display_ServerChameleonConnection/display_ServerChameleonConnection.py
+++ b/server/site_tests/display_ServerChameleonConnection/display_ServerChameleonConnection.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/display_SuspendStress/control.extended b/server/site_tests/display_SuspendStress/control.extended
deleted file mode 100644
index f6764e3..0000000
--- a/server/site_tests/display_SuspendStress/control.extended
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "chromeos-chameleon"
-NAME = "display_SuspendStress.extended"
-PURPOSE = "Remotely controlled suspend/resume-stressed display test."
-CRITERIA = "This test will fail if the captured screen pixels mismatch."
-ATTRIBUTES = "suite:chameleon_hdmi_unstable"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "display"
-TEST_TYPE = "server"
-DEPENDENCIES = 'chameleon'
-JOB_RETRIES = 2
-
-DOC = """
-This test remotely tests external display function with DUT being
-repeatedly suspended and resumed in extended mode.
-"""
-
-args_dict = utils.args_to_dict(args)
-chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, chameleon_args=chameleon_args)
-    job.run_test("display_SuspendStress", host=host, tag="extended")
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/display_SuspendStress/control.extended_10min b/server/site_tests/display_SuspendStress/control.extended_10min
deleted file mode 100644
index be860b6..0000000
--- a/server/site_tests/display_SuspendStress/control.extended_10min
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "chromeos-chameleon"
-NAME = "display_SuspendStress.extended_10min"
-PURPOSE = "Remotely controlled suspend/resume-stressed display test."
-CRITERIA = "This test will fail if the captured screen pixels mismatch."
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "display"
-TEST_TYPE = "server"
-DEPENDENCIES = 'chameleon'
-
-DOC = """
-This test remotely tests external display function with DUT being
-repeatedly suspended and resumed in extended mode.
-"""
-
-args_dict = utils.args_to_dict(args)
-chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, chameleon_args=chameleon_args)
-    job.run_test("display_SuspendStress", host=host, tag="extended",
-                 repeat_count=20)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/display_SuspendStress/control.extended_30min b/server/site_tests/display_SuspendStress/control.extended_30min
deleted file mode 100644
index 6d493a9..0000000
--- a/server/site_tests/display_SuspendStress/control.extended_30min
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "chromeos-chameleon"
-NAME = "display_SuspendStress.extended_30min"
-PURPOSE = "Remotely controlled suspend/resume-stressed display test."
-CRITERIA = "This test will fail if the captured screen pixels mismatch."
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "display"
-TEST_TYPE = "server"
-DEPENDENCIES = 'chameleon'
-
-DOC = """
-This test remotely tests external display function with DUT being
-repeatedly suspended and resumed in extended mode.
-"""
-
-args_dict = utils.args_to_dict(args)
-chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, chameleon_args=chameleon_args)
-    job.run_test("display_SuspendStress", host=host, tag="extended",
-                 repeat_count=60)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/display_SuspendStress/control.mirrored b/server/site_tests/display_SuspendStress/control.mirrored
deleted file mode 100644
index 8866247..0000000
--- a/server/site_tests/display_SuspendStress/control.mirrored
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "chromeos-chameleon"
-NAME = "display_SuspendStress.mirrored"
-PURPOSE = "Remotely controlled suspend/resume-stressed display test."
-CRITERIA = "This test will fail if the captured screen pixels mismatch."
-ATTRIBUTES = "suite:chameleon_hdmi_unstable"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "display"
-TEST_TYPE = "server"
-DEPENDENCIES = 'chameleon'
-JOB_RETRIES = 2
-
-DOC = """
-This test remotely tests external display function with DUT being
-repeatedly suspended and resumed in mirrored mode.
-"""
-
-args_dict = utils.args_to_dict(args)
-chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, chameleon_args=chameleon_args)
-    job.run_test("display_SuspendStress", host=host, test_mirrored=True,
-                 tag="mirrored")
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/display_SuspendStress/control.mirrored_10min b/server/site_tests/display_SuspendStress/control.mirrored_10min
deleted file mode 100644
index 62f95ca..0000000
--- a/server/site_tests/display_SuspendStress/control.mirrored_10min
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "chromeos-chameleon"
-NAME = "display_SuspendStress.mirrored_10min"
-PURPOSE = "Remotely controlled suspend/resume-stressed display test."
-CRITERIA = "This test will fail if the captured screen pixels mismatch."
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "display"
-TEST_TYPE = "server"
-DEPENDENCIES = 'chameleon'
-
-DOC = """
-This test remotely tests external display function with DUT being
-repeatedly suspended and resumed in mirrored mode.
-"""
-
-args_dict = utils.args_to_dict(args)
-chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, chameleon_args=chameleon_args)
-    job.run_test("display_SuspendStress", host=host, test_mirrored=True,
-                 repeat_count=20, tag="mirrored")
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/display_SuspendStress/control.mirrored_30min b/server/site_tests/display_SuspendStress/control.mirrored_30min
deleted file mode 100644
index 3eb14c8..0000000
--- a/server/site_tests/display_SuspendStress/control.mirrored_30min
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "chromeos-chameleon"
-NAME = "display_SuspendStress.mirrored_30min"
-PURPOSE = "Remotely controlled suspend/resume-stressed display test."
-CRITERIA = "This test will fail if the captured screen pixels mismatch."
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "display"
-TEST_TYPE = "server"
-DEPENDENCIES = 'chameleon'
-
-DOC = """
-This test remotely tests external display function with DUT being
-repeatedly suspended and resumed in mirrored mode.
-"""
-
-args_dict = utils.args_to_dict(args)
-chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, chameleon_args=chameleon_args)
-    job.run_test("display_SuspendStress", host=host, test_mirrored=True,
-                 repeat_count=60, tag="mirrored")
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/display_SuspendStress/display_SuspendStress.py b/server/site_tests/display_SuspendStress/display_SuspendStress.py
deleted file mode 100644
index 0b96259..0000000
--- a/server/site_tests/display_SuspendStress/display_SuspendStress.py
+++ /dev/null
@@ -1,96 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""This is a server side external display test using the Chameleon board."""
-
-import logging
-import os
-import random
-import time
-
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.chameleon import chameleon_port_finder
-from autotest_lib.client.cros.chameleon import chameleon_screen_test
-from autotest_lib.client.cros.chameleon import edid
-from autotest_lib.server import test
-from autotest_lib.server.cros.multimedia import remote_facade_factory
-
-
-class display_SuspendStress(test.test):
-    """Server side external display test.
-
-    This test talks to a Chameleon board and a DUT to set up, run, and verify
-    external display function of the DUT with DUT being repeatedly
-    suspended and resumed.
-    """
-    version = 1
-    DEFAULT_TESTCASE_SPEC = ('HDMI', 1920, 1080)
-
-    # TODO: Allow reading testcase_spec from command line.
-    def run_once(self, host, test_mirrored=False, testcase_spec=None,
-                 repeat_count=3, suspend_time_range=(5,7)):
-        if test_mirrored and not host.get_board_type() == 'CHROMEBOOK':
-            raise error.TestNAError('DUT is not Chromebook. Test Skipped')
-
-        if testcase_spec is None:
-            testcase_spec = self.DEFAULT_TESTCASE_SPEC
-
-        test_name = "%s_%dx%d" % testcase_spec
-        _, width, height = testcase_spec
-        test_resolution = (width, height)
-
-        if not edid.is_edid_supported(host, testcase_spec[1], testcase_spec[2]):
-            raise error.TestFail('Error: EDID is not supported by the platform'
-                    ': %s', test_name)
-
-        edid_path = os.path.join(self.bindir, 'test_data', 'edids', test_name)
-
-        factory = remote_facade_factory.RemoteFacadeFactory(host)
-        display_facade = factory.create_display_facade()
-        chameleon_board = host.chameleon
-
-        chameleon_board.setup_and_reset(self.outputdir)
-        finder = chameleon_port_finder.ChameleonVideoInputFinder(
-                chameleon_board, display_facade)
-        for chameleon_port in finder.iterate_all_ports():
-            screen_test = chameleon_screen_test.ChameleonScreenTest(
-                    host, chameleon_port, display_facade, self.outputdir)
-
-            logging.info('Use EDID: %s', test_name)
-            with chameleon_port.use_edid_file(edid_path):
-                # Keep the original connector name, for later comparison.
-                expected_connector = utils.wait_for_value_changed(
-                        display_facade.get_external_connector_name,
-                        old_value=False)
-                logging.info('See the display on DUT: %s', expected_connector)
-
-                if not expected_connector:
-                    raise error.TestFail('Error: Failed to see external display'
-                            ' (chameleon) from DUT: %s', test_name)
-
-                logging.info('Set mirrored: %s', test_mirrored)
-                display_facade.set_mirrored(test_mirrored)
-                logging.info('Repeat %d times Suspend and resume', repeat_count)
-
-                count = repeat_count
-                while count > 0:
-                    count -= 1
-                    if test_mirrored:
-                        # magic sleep to make nyan_big wake up in mirrored mode
-                        # TODO: find root cause
-                        time.sleep(6)
-                    suspend_time = random.randint(*suspend_time_range)
-                    logging.info('Going to suspend, for %d seconds...',
-                                 suspend_time)
-                    display_facade.suspend_resume(suspend_time)
-                    logging.info('Resumed back')
-
-                    message = screen_test.check_external_display_connected(
-                            expected_connector)
-                    if not message:
-                        message = screen_test.test_screen_with_image(
-                                test_resolution, test_mirrored)
-                    if message:
-                        raise error.TestFail(message)
diff --git a/server/site_tests/display_SuspendStress/test_data/edids/HDMI_1920x1080 b/server/site_tests/display_SuspendStress/test_data/edids/HDMI_1920x1080
deleted file mode 100644
index 64036f0..0000000
--- a/server/site_tests/display_SuspendStress/test_data/edids/HDMI_1920x1080
+++ /dev/null
Binary files differ
diff --git a/server/site_tests/display_SwitchMode/control b/server/site_tests/display_SwitchMode/control
index d1701d8..820c6fc 100644
--- a/server/site_tests/display_SwitchMode/control
+++ b/server/site_tests/display_SwitchMode/control
@@ -8,12 +8,13 @@
 NAME = "display_SwitchMode"
 PURPOSE = "Switch external display mode in a loop."
 CRITERIA = "This test will fail if status checks fail after mode change."
-ATTRIBUTES = "suite:chameleon_dp, suite:chameleon_dp_hdmi, suite:chameleon_hdmi, suite:chameleon_hdmi_perbuild"
+#ATTRIBUTES = "suite:chameleon_hdmi_perbuild"
 TIME = "LONG"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon'
+PY_VERSION = 3
 
 DOC = """
 This test switches display modes (extended and mirrored).
diff --git a/server/site_tests/display_SwitchMode/control.fast_switch b/server/site_tests/display_SwitchMode/control.fast_switch
index b4b15e6..89cfeb8 100644
--- a/server/site_tests/display_SwitchMode/control.fast_switch
+++ b/server/site_tests/display_SwitchMode/control.fast_switch
@@ -8,12 +8,13 @@
 NAME = "display_SwitchMode.fast_switch"
 PURPOSE = "Switch external display mode in a loop."
 CRITERIA = "This test will fail if status checks fail after mode change."
-ATTRIBUTES = "suite:chameleon_dp, suite:chameleon_dp_hdmi, suite:chameleon_hdmi, suite:chameleon_hdmi_perbuild"
+#ATTRIBUTES = "suite:chameleon_hdmi_perbuild"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon'
+PY_VERSION = 3
 
 DOC = """
 This test switches display modes (extended and mirrored).
diff --git a/server/site_tests/display_SwitchMode/display_SwitchMode.py b/server/site_tests/display_SwitchMode/display_SwitchMode.py
index c1df6e8..44d2a4d 100644
--- a/server/site_tests/display_SwitchMode/display_SwitchMode.py
+++ b/server/site_tests/display_SwitchMode/display_SwitchMode.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -77,7 +78,7 @@
             self.connector_used = (
                     self.display_facade.get_external_connector_name())
 
-            for i in xrange(repeat):
+            for i in range(repeat):
                 logging.info("Iteration %d", (i + 1))
                 self.set_mode_and_check(True, no_check)
                 self.set_mode_and_check(False, no_check)
diff --git a/server/site_tests/display_Tearing/control.extended b/server/site_tests/display_Tearing/control.extended
index f32bdfa..940b011 100644
--- a/server/site_tests/display_Tearing/control.extended
+++ b/server/site_tests/display_Tearing/control.extended
@@ -14,6 +14,7 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon'
+PY_VERSION = 3
 
 DOC = """
 This is a test for screen tearing using the Chameleon board.
diff --git a/server/site_tests/display_Tearing/control.mirrored b/server/site_tests/display_Tearing/control.mirrored
index 8f43841..9869abf 100644
--- a/server/site_tests/display_Tearing/control.mirrored
+++ b/server/site_tests/display_Tearing/control.mirrored
@@ -14,6 +14,7 @@
 TEST_CLASS = "display"
 TEST_TYPE = "server"
 DEPENDENCIES = 'chameleon'
+PY_VERSION = 3
 
 DOC = """
 This is a test for screen tearing using the Chameleon board.
diff --git a/server/site_tests/display_Tearing/display_Tearing.py b/server/site_tests/display_Tearing/display_Tearing.py
index d468766..e8e8596 100644
--- a/server/site_tests/display_Tearing/display_Tearing.py
+++ b/server/site_tests/display_Tearing/display_Tearing.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -165,7 +166,7 @@
         """
         def _discard_delayed_frames(sequence):
             return [sequence[i]
-                    for i in xrange(len(sequence))
+                    for i in range(len(sequence))
                     if i == 0 or sequence[i] != sequence[i-1]]
 
         captured_color_sequence = _discard_delayed_frames(
@@ -193,7 +194,7 @@
                 chameleon_port, self.TEST_COLOR_SEQUENCE)
         self._display_facade.close_tab(self._test_tab_descriptor)
         delay_time = [timestamp_list[i] - timestamp_list[i-1]
-                      for i in xrange(1, len(timestamp_list))]
+                      for i in range(1, len(timestamp_list))]
         logging.info('Captured %d frames\n'
                      'Checksum_table: %s\n'
                      'Captured_checksums: %s\n'
diff --git a/server/site_tests/dummy_FailServer/control b/server/site_tests/dummy_FailServer/control
deleted file mode 100644
index 55f48a9..0000000
--- a/server/site_tests/dummy_FailServer/control
+++ /dev/null
@@ -1,17 +0,0 @@
-NAME = 'dummy_FailServer'
-AUTHOR = 'scottz'
-TIME = 'SHORT'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'General'
-TEST_TYPE = 'server'
-
-
-DOC = """
-Test that always fails.
-"""
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('dummy_FailServer', disable_sysinfo=False)
-
-job.parallel_simple(run, machines)
diff --git a/server/site_tests/dummy_FailServer/dummy_FailServer.py b/server/site_tests/dummy_FailServer/dummy_FailServer.py
deleted file mode 100644
index 738b9d4..0000000
--- a/server/site_tests/dummy_FailServer/dummy_FailServer.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import test
-from autotest_lib.client.common_lib import error
-
-class dummy_FailServer(test.test):
-    """A test that always fails."""
-    version = 1
-
-    def run_once(self):
-        """Run the test that always fails, once"""
-        raise error.TestFail('Test always fails intentionally.')
\ No newline at end of file
diff --git a/server/site_tests/dummy_PassServer/control b/server/site_tests/dummy_PassServer/control
deleted file mode 100644
index 045263b..0000000
--- a/server/site_tests/dummy_PassServer/control
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "dummy_PassServer"
-PURPOSE = "Demonstrate success methods of autotests."
-CRITERIA = "This test will always succeed."
-ATTRIBUTES = "suite:dummy_server"
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "dummy"
-TEST_TYPE = "server"
-MAX_RESULT_SIZE_KB = 6000
-
-DOC = """
-This is a helper test that will succeed.
-"""
-
-def run(machine):
-    job.run_test('dummy_PassServer',
-                 host=hosts.create_host(machine))
-
-m = 'chromeos/autotest/infra_benchmark/dummy_pass_server/duration'
-parallel_simple(run, machines)
diff --git a/server/site_tests/dummy_PassServer/control.nossp b/server/site_tests/dummy_PassServer/control.nossp
deleted file mode 100644
index faa3e2b..0000000
--- a/server/site_tests/dummy_PassServer/control.nossp
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "dummy_PassServer_nossp"
-PURPOSE = "Demonstrate success methods of autotests."
-CRITERIA = "This test will always succeed."
-ATTRIBUTES = (
-        "suite:dummy_server,"
-        " suite:dummy_server_nossp,"
-        " suite:skylab_staging_test,"
-        " suite:dev_drone_image_test"
-)
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "dummy"
-TEST_TYPE = "server"
-# Force not to use server side package for this test.
-REQUIRE_SSP = False
-
-DOC = """
-This is a helper test that will succeed and force not to use server side
-packaging.
-"""
-
-def run(machine):
-    job.run_test('dummy_PassServer', host=hosts.create_host(machine),
-                 expect_ssp=False)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/dummy_PassServer/control.sanity b/server/site_tests/dummy_PassServer/control.sanity
deleted file mode 100644
index 718fd66..0000000
--- a/server/site_tests/dummy_PassServer/control.sanity
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "fdeng, chromeos-lab-infrastructure"
-NAME = "dummy_PassServer.sanity"
-PURPOSE = "An empty control file that does nothing."
-CRITERIA = "This test will always succeed."
-ATTRIBUTES = "suite:sanity"
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "dummy"
-TEST_TYPE = "server"
-
-DOC = """
-It belongs to sanity suite, which verifies provisioning.
-The only purpose of this test is to trigger a provision task.
-We explicitly make it not call job.runtest to avoid overhead of
-installing/uninstalling autotest packages.
-"""
-
-# We need to record the state, so that tko paser can still parse it
-# as a test.
-job.record('START', None, NAME)
-job.record('END GOOD', None, NAME)
diff --git a/server/site_tests/dummy_PassServer/control.ssp b/server/site_tests/dummy_PassServer/control.ssp
deleted file mode 100644
index 7344251..0000000
--- a/server/site_tests/dummy_PassServer/control.ssp
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "dummy_PassServer.ssp"
-PURPOSE = "Demonstrate success methods of autotests."
-CRITERIA = "This test will always succeed when running in a container."
-ATTRIBUTES = "suite:dummy_server, suite:push_to_prod, suite:skylab_staging_test, suite:dev_drone_image_test"
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "dummy"
-TEST_TYPE = "server"
-
-DOC = """
-This is a helper test that will succeed and force to use server-side packaging.
-"""
-
-def run(machine):
-    job.run_test('dummy_PassServer', host=hosts.create_host(machine),
-                 expect_ssp=True)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/dummy_PassServer/dummy_PassServer.py b/server/site_tests/dummy_PassServer/dummy_PassServer.py
deleted file mode 100644
index 05b7fe6..0000000
--- a/server/site_tests/dummy_PassServer/dummy_PassServer.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-from autotest_lib.server import test
-
-class dummy_PassServer(test.test):
-    """Tests that server tests can pass."""
-    version = 1
-
-    def run_once(self, expect_ssp=None):
-        """There is no body for this test.
-
-        @param expect_ssp: If True, ensure test is running inside a container.
-                If False, ensure test is not running inside a container.
-                If None (default), do nothing.
-        """
-        if expect_ssp is not None:
-            if expect_ssp and not utils.is_in_container():
-                raise error.TestFail('The test is not running inside container')
-            if not expect_ssp and utils.is_in_container():
-                raise error.TestFail('Test test is running inside container')
diff --git a/server/site_tests/dummy_RepeatArgs/control b/server/site_tests/dummy_RepeatArgs/control
deleted file mode 100644
index a39706d..0000000
--- a/server/site_tests/dummy_RepeatArgs/control
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "dummy_RepeatArgs"
-PURPOSE = "Log the control file args."
-CRITERIA = "This test will always succeed."
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "dummy"
-TEST_TYPE = "server"
-
-DOC = """
-This is a helper test that will succeed.
-"""
-
-logging.info(args)
-
diff --git a/server/site_tests/dummy_SynchronousOffloadServer/control b/server/site_tests/dummy_SynchronousOffloadServer/control
deleted file mode 100644
index 13eaeba..0000000
--- a/server/site_tests/dummy_SynchronousOffloadServer/control
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "jkop@google.com"
-NAME = "dummy_SynchronousOffloadServer"
-PURPOSE = "Verify tests can offload output from servers."
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "dummy"
-TEST_TYPE = "server"
-# Disable this test until it can be fixed: http://b/171572182
-# ATTRIBUTES = "suite:offloads"
-
-DOC = """
-This test creates a file in $SYNCHRONOUS_OFFLOAD_DIR on the server and succeeds.
-The task will fail only if the creation or offload of that file fails.
-"""
-
-def run(machine):
-    job.run_test('dummy_SynchronousOffloadServer',
-                 host=hosts.create_host(machine))
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/dummy_SynchronousOffloadServer/dummy_SynchronousOffloadServer.py b/server/site_tests/dummy_SynchronousOffloadServer/dummy_SynchronousOffloadServer.py
deleted file mode 100644
index 8d440cc..0000000
--- a/server/site_tests/dummy_SynchronousOffloadServer/dummy_SynchronousOffloadServer.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import test
-
-class dummy_SynchronousOffloadServer(test.test):
-  version = 1
-
-  def run_once(self):
-    DIR = os.getenv('SYNCHRONOUS_OFFLOAD_DIR', "")
-    if DIR == "":
-      raise error.TestFail("Did not find value for SYNCHRONOUS_OFFLOAD_DIR")
-    if not os.path.isdir(DIR):
-      raise error.TestFail("$SYNCHRONOUS_OFFLOAD_DIR=%s, which is not "
-                           "a valid directory." % DIR)
-    logging.debug("Writing to directory %s", DIR)
-    with open(os.path.join(DIR,"test_file"), "w") as f:
-      f.write("Test string which should be offloaded")
-      logging.debug("Wrote string to test file.")
diff --git a/server/site_tests/enterprise_CFM_AutoZoomSanity/control b/server/site_tests/enterprise_CFM_AutoZoomSanity/control
deleted file mode 100644
index 74cfedf..0000000
--- a/server/site_tests/enterprise_CFM_AutoZoomSanity/control
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "harpreet@chromium.org"
-NAME = "enterprise_CFM_AutoZoomSanity"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-# TODO(https://crbug.com/887848): Disabled since it is constantly failing.
-# ATTRIBUTES = "suite:hotrod"
-DEPENDENCIES = "meet_app, huddly, mimo"
-JOB_RETRIES = 3
-
-DOC = """
-Sanity test for AutoZoom feature available with Huddly and Mimo on bluestreak.
-"""
-
-def run_test(machine):
-    peripheral_dict = {'Huddly GO': '2bd9:0011',
-                       'Hangouts Meet speakermic': '18d1:8001',
-                       'MIMO VUE HD': '17e9:016b'}
-    host = hosts.create_host(machine)
-    job.run_test('enterprise_CFM_AutoZoomSanity', host=host,
-                 session_length=100, peripheral_dict=peripheral_dict)
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_AutoZoomSanity/control.partners b/server/site_tests/enterprise_CFM_AutoZoomSanity/control.partners
deleted file mode 100644
index 8f8b1a4..0000000
--- a/server/site_tests/enterprise_CFM_AutoZoomSanity/control.partners
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "denniswu@chromium.org"
-NAME = "enterprise_CFM_AutoZoomSanity.partners"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:bluestreak-partners"
-DEPENDENCIES = "autozoom"
-JOB_RETRIES = 3
-
-DOC = """
-Sanity test for AutoZoom feature available with Huddly and Mimo on bluestreak.
-This test bypasses servo check and is used by third party vendor PAL Acoustics.
-"""
-
-args_dict = utils.args_to_dict(args)
-
-
-def run_test(machine):
-    peripheral_dict = {'Huddly GO': '2bd9:0011',
-                       'Hangouts Meet speakermic': '18d1:8001',
-                       'MIMO VUE HD': '17e9:016b'}
-    host = hosts.create_host(machine, servo_args=None)
-    job.run_test('enterprise_CFM_AutoZoomSanity', host=host,
-                 session_length=100, peripheral_dict=peripheral_dict)
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_AutoZoomSanity/enterprise_CFM_AutoZoomSanity.py b/server/site_tests/enterprise_CFM_AutoZoomSanity/enterprise_CFM_AutoZoomSanity.py
deleted file mode 100644
index c788597..0000000
--- a/server/site_tests/enterprise_CFM_AutoZoomSanity/enterprise_CFM_AutoZoomSanity.py
+++ /dev/null
@@ -1,152 +0,0 @@
-# Copyright (c) 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import glob
-import logging
-import os
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import perf_stat_lib
-from autotest_lib.server.cros.cfm import cfm_base_test
-from autotest_lib.server.cros import cfm_jmidata_log_collector
-
-_BASE_DIR = '/home/chronos/user/Storage/ext/'
-_EXT_ID = 'ikfcpmgefdpheiiomgmhlmmkihchmdlj'
-_JMI_DIR = '/0*/File\ System/000/t/00/*'
-_JMI_SOURCE_DIR = _BASE_DIR + _EXT_ID + _JMI_DIR
-_PA_LOGS_PATTERN = _BASE_DIR + _EXT_ID + '/def/File\ System/primary/p/00/0*'
-
-_USB_DIR = '/sys/bus/usb/devices'
-_AUTOZOOM_IS_RUNNING_STRING = 'AutoZoom running successfully.'
-
-_LONG_TIMEOUT = 15
-
-class enterprise_CFM_AutoZoomSanity(cfm_base_test.CfmBaseTest):
-    """Auto Zoom Sanity test."""
-    version = 1
-
-    def get_data_from_jmifile(self, data_type, jmidata):
-        """
-        Gets data from jmidata log for given data type.
-
-        @param data_type: Type of data to be retrieved from jmi data log.
-        @param jmidata: Raw jmi data log to parse.
-        @returns Data for given data type from jmidata log.
-        """
-        return cfm_jmidata_log_collector.GetDataFromLogs(
-                self, data_type, jmidata)
-
-
-    def get_file_to_parse(self):
-        """
-        Copy jmi logs from client to test's results directory.
-
-        @returns The newest jmi log file.
-        """
-        self._host.get_file(_JMI_SOURCE_DIR, self.resultsdir)
-        source_jmi_files = self.resultsdir + '/0*'
-        if not source_jmi_files:
-            raise error.TestNAError('JMI data file not found.')
-        newest_file = max(glob.iglob(source_jmi_files), key=os.path.getctime)
-        return newest_file
-
-
-    def verify_cfm_sent_resolution(self):
-        """Check / verify CFM sent video resolution data from JMI logs."""
-        jmi_file = self.get_file_to_parse()
-        jmifile_to_parse = open(jmi_file, 'r')
-        jmidata = jmifile_to_parse.read()
-
-        cfm_sent_res_list = self.get_data_from_jmifile(
-                'video_sent_frame_height', jmidata)
-        percentile_95 = perf_stat_lib.get_kth_percentile(
-                cfm_sent_res_list, 0.95)
-
-        self.output_perf_value(description='video_sent_frame_height',
-                               value=cfm_sent_res_list,
-                               units='resolution',
-                               higher_is_better=True)
-        self.output_perf_value(description='95th percentile res sent',
-                               value=percentile_95,
-                               units='resolution',
-                               higher_is_better=True)
-
-        # TODO(dkaeding): Add logic to examine the cfm sent resolution and
-        # take appropriate action.
-        logging.info('95th percentile of outgoing video resolution: %s',
-                     percentile_95)
-
-
-    def check_verify_callgrok_logs(self):
-        """Verify needed information in callgrok logs."""
-        # TODO(dkaeding): Implement this method.
-        return NotImplemented
-
-
-    def verify_autozoom_running_in_packaged_app_logs(self):
-        """Checks logs from the device to verify that AutoZoom is running."""
-        self.save_all_packaged_app_logs()
-        pa_log_files = glob.glob(os.path.join(self.debugdir,
-                                              'packaged_app_log*.txt'))
-        for log_file in pa_log_files:
-          with open(log_file, 'r') as fhandle:
-              if _AUTOZOOM_IS_RUNNING_STRING in fhandle.read():
-                return
-        raise error.TestFail('AutoZoom not running on device.')
-
-    def get_usb_device_dirs(self):
-        """Gets usb device dirs from _USB_DIR path.
-
-        @returns list with number of device dirs else None
-        """
-        usb_dir_list = list()
-        cmd = 'ls %s' % _USB_DIR
-        cmd_output = self._host.run(cmd).stdout.strip().split('\n')
-        for d in cmd_output:
-            usb_dir_list.append(os.path.join(_USB_DIR, d))
-        return usb_dir_list
-
-
-    def file_exists_on_host(self, path):
-        """
-        Checks if file exists on host.
-
-        @param path: File path
-        @returns True or False
-        """
-        return self._host.run('ls %s' % path,
-                              ignore_status=True).exit_status == 0
-
-
-    def check_peripherals(self, peripheral_dict):
-        """
-        Check and verify correct peripherals are attached.
-
-        @param peripheral_dict: dict of peripherals that should be connected
-        """
-        usb_dir_list = self.get_usb_device_dirs()
-        peripherals_found = list()
-        for d_path in usb_dir_list:
-            file_name = os.path.join(d_path, 'product')
-            if self.file_exists_on_host(file_name):
-                peripherals_found.append(self._host.run(
-                        'cat %s' % file_name).stdout.strip())
-
-        logging.info('Attached peripherals: %s', peripherals_found)
-
-        for peripheral in peripheral_dict:
-            if peripheral not in peripherals_found:
-                raise error.TestFail('%s not found.' % peripheral)
-
-
-    def run_once(self, session_length, peripheral_dict):
-        """Runs the sanity test."""
-        self.cfm_facade.wait_for_telemetry_commands()
-        self.check_peripherals(peripheral_dict)
-        self.cfm_facade.start_meeting_session()
-        time.sleep(_LONG_TIMEOUT)
-        self.cfm_facade.end_meeting_session()
-        self.verify_autozoom_running_in_packaged_app_logs()
-
diff --git a/server/site_tests/enterprise_CFM_AutotestSmokeTest/control b/server/site_tests/enterprise_CFM_AutotestSmokeTest/control
deleted file mode 100644
index 0ce9665..0000000
--- a/server/site_tests/enterprise_CFM_AutotestSmokeTest/control
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "dtosic@google.com, chromeos-meetings@google.com"
-NAME = "enterprise_CFM_AutotestSmokeTest"
-PURPOSE = ("Server-side smoke test used for tracking the stability of the "
-           "Autotest framework on CFM platforms.")
-CRITERIA = "Fails if the Autotest framework doesn't work as expected."
-ATTRIBUTES = "suite:hotrod,suite:bluestreak-pre-cq"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_TYPE = "server"
-BUG_TEMPLATE = {
-    "labels": ["OS-Chrome"],
-}
-
-DOC = """
-This test tracks the stability of the Auotest framework for server tests.
-The test opens a web browser, loads 'chrome://version' and verifies that nothing
-crashes.
-"""
-
-def run_test(machine):
-    host = hosts.create_host(machine)
-    job.run_test('enterprise_CFM_AutotestSmokeTest', host=host)
-
-
-parallel_simple(run_test, machines)
-
diff --git a/server/site_tests/enterprise_CFM_AutotestSmokeTest/enterprise_CFM_AutotestSmokeTest.py b/server/site_tests/enterprise_CFM_AutotestSmokeTest/enterprise_CFM_AutotestSmokeTest.py
deleted file mode 100644
index 4ff19bf..0000000
--- a/server/site_tests/enterprise_CFM_AutotestSmokeTest/enterprise_CFM_AutotestSmokeTest.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import test
-from autotest_lib.server.cros.multimedia import remote_facade_factory
-
-
-class enterprise_CFM_AutotestSmokeTest(test.test):
-    """
-    Server side autotest smoke test for CfM platforms.
-    Start the chrome browser, opens 'chrome://version' and then closes the tab.
-    """
-    version = 1
-
-
-    def run_once(self, host):
-        """Runs the smoke test."""
-        factory = remote_facade_factory.RemoteFacadeFactory(host)
-        browser_facade = factory.create_browser_facade()
-        tab_descriptor = browser_facade.new_tab('chrome://version')
-        browser_facade.close_tab(tab_descriptor)
-
diff --git a/server/site_tests/enterprise_CFM_Aver520Updater/aver_520_18.02/0.0.0018.02.dat b/server/site_tests/enterprise_CFM_Aver520Updater/aver_520_18.02/0.0.0018.02.dat
deleted file mode 100644
index 086e7b3..0000000
--- a/server/site_tests/enterprise_CFM_Aver520Updater/aver_520_18.02/0.0.0018.02.dat
+++ /dev/null
Binary files differ
diff --git a/server/site_tests/enterprise_CFM_Aver520Updater/control.cam520 b/server/site_tests/enterprise_CFM_Aver520Updater/control.cam520
deleted file mode 100644
index 6ff1028..0000000
--- a/server/site_tests/enterprise_CFM_Aver520Updater/control.cam520
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib.cros.cfm.usb import cfm_usb_devices
-from autotest_lib.server import utils
-
-AUTHOR = "shijinabraham@chromium.org"
-NAME = "enterprise_CFM_Aver520Updater.cam520"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-ATTRIBUTES = "suite:hotrod"
-TEST_TYPE = "server"
-DEPENDENCIES = "aver-cam520"
-
-
-DOC = """
-This test verifies that the Aver CAM520 camera firmware updater is working
-as intended. This test performs the following
-- Make the rootfs writable.
-- Backup the original firmware.
-- Copy older firmware bundled with test.
-- Force upgrade the Aver device to older firmware.
-- Powercycle the usb port to trigger the firmware updater.
-- Confirm firmware has been updated.
-- Cleanup
-This test will work on guado and fizz Chromeboxes with Aver CAM520 Camera
-connected
-"""
-
-args_dict = utils.args_to_dict(args)
-
-def run_test(machine):
-    test_name = "enterprise_CFM_Aver520Updater"
-    host = hosts.create_host(machine, servo_args=None)
-    job.run_test(test_name, host=host, camera=cfm_usb_devices.AVER_CAM520_CAMERA)
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_Aver520Updater/control.vc520 b/server/site_tests/enterprise_CFM_Aver520Updater/control.vc520
deleted file mode 100644
index ad8ac79..0000000
--- a/server/site_tests/enterprise_CFM_Aver520Updater/control.vc520
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib.cros.cfm.usb import cfm_usb_devices
-from autotest_lib.server import utils
-
-
-AUTHOR = "shijinabraham@chromium.org"
-NAME = "enterprise_CFM_Aver520Updater.vc520"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-ATTRIBUTES = "suite:hotrod"
-TEST_TYPE = "server"
-DEPENDENCIES = "aver-vc520"
-
-
-DOC = """
-This test verifies that the Aver VC520 camera firmware updater is working
-as intended. This test performs the following
-- Make the rootfs writable.
-- Backup the original firmware.
-- Copy older firmware bundled with test.
-- Force upgrade the Aver device to older firmware.
-- Powercycle the usb port to trigger the firmware updater.
-- Confirm firmware has been updated.
-- Cleanup
-This test will work on guado and fizz Chromeboxes with Aver VC520 Camera
-connected
-"""
-
-args_dict = utils.args_to_dict(args)
-
-def run_test(machine):
-    test_name = "enterprise_CFM_Aver520Updater"
-    host = hosts.create_host(machine, servo_args=None)
-    job.run_test(test_name, host=host, camera=cfm_usb_devices.AVER_VC520_CAMERA)
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_Aver520Updater/enterprise_CFM_Aver520Updater.py b/server/site_tests/enterprise_CFM_Aver520Updater/enterprise_CFM_Aver520Updater.py
deleted file mode 100644
index 8a7b012..0000000
--- a/server/site_tests/enterprise_CFM_Aver520Updater/enterprise_CFM_Aver520Updater.py
+++ /dev/null
@@ -1,403 +0,0 @@
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Autotest for Aver VC520/CAM520 camera firmware updater."""
-
-from __future__ import print_function
-
-import logging
-import os
-import re
-import time
-
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import power_cycle_usb_util
-from autotest_lib.client.common_lib.cros.cfm.usb import usb_device_collector
-from autotest_lib.server import test
-from autotest_lib.server.cros import filesystem_util
-
-
-FW_PATH_BASE = '/lib/firmware'
-FW_PKG_ORIGIN = 'aver'
-FW_PKG_BACKUP = 'aver_backup'
-FW_PKG_TEST = 'aver_520_18.02'
-LOG_FILE = '/tmp/aver-updater.log'
-POWER_CYCLE_WAIT_TIME_SEC = 240
-
-
-class enterprise_CFM_Aver520Updater(test.test):
-    """
-    Aver camera firmware test on Chrome For Meeting devices
-    This test works for both Aver VC520 and CAM520.
-    The test follows the following steps
-        1) Check if the filesystem is writable
-           If not make the filesystem writable and reboot
-        2) Backup the existing firmware file on DUT
-        3) Copy the older firmware files to DUT
-        4) Force update older firmware on Aver Camera
-        5) Restore the original firmware files on DUT
-        4) Power cycle usb port to simulate unplug/replug of device which
-           should initiate a firmware update
-        5) Wait for firmware update to finish and check firmware version
-        6) Cleanup
-
-    """
-
-    version = 1
-
-    def initialize(self, host, camera):
-        """
-        Initializes the class.
-
-        Stores the firmware file path.
-        Gets the board type.
-        Reads the current firmware versions.
-        """
-
-        self.host = host
-        self.camera = camera
-        self.fw_path_test = os.path.join(FW_PATH_BASE,
-                                         FW_PKG_TEST)
-        self.fw_path_origin = os.path.join(FW_PATH_BASE,
-                                           FW_PKG_ORIGIN)
-        self.fw_path_backup = os.path.join(FW_PATH_BASE,
-                                           FW_PKG_BACKUP)
-        self.board = self.host.get_board().split(':')[1]
-
-        self.device_collector = usb_device_collector.UsbDeviceCollector(
-            self.host)
-
-        self.vid_pid = self.camera.vid_pid
-        self.usb_spec = self.camera.get_usb_device_spec(self.vid_pid)
-
-        self.org_fw_ver = self.get_image_fw_ver()
-
-    def cleanup(self):
-        """
-        Cleanups after tests.
-
-        Removes the test firmware.
-        Restores the original firmware files.
-        Flashes the camera to original firmware if needed.
-        """
-
-        # Delete test firmware package.
-        cmd = 'rm -rf {}'.format(self.fw_path_test)
-        self.host.run(cmd)
-
-        # Delete the symlink created.
-        cmd = 'rm {}'.format(self.fw_path_origin)
-        self.host.run(cmd)
-
-        # Move the backup package back.
-        cmd = 'mv {} {}'.format(self.fw_path_backup, self.fw_path_origin)
-        self.host.run(cmd)
-
-        # Do not leave the camera with test (older) firmware.
-        if not self.is_device_firmware_equal_to(self.org_fw_ver):
-            logging.debug('Aver 520 camera has old firmware after test'
-                          'Flashing new firmware')
-            self.flash_fw()
-
-        super(enterprise_CFM_Aver520Updater, self).cleanup()
-
-    def _run_cmd(self, command):
-        """
-        Runs command line on DUT, wait for completion and return the output.
-
-        @param command: command line to run in dut.
-
-        @returns the command output
-        """
-
-        logging.debug('Execute: %s', command)
-
-        result = self.host.run(command, ignore_status=True)
-        output = result.stderr if result.stderr else result.stdout
-        logging.debug('Output: %s', output)
-        return output
-
-    def fw_ver_from_output_str(self, cmd_output):
-        """
-        Parse firmware version of aver-updater output.
-
-        aver-updater output differs slightly for image_version and
-        device_version.
-        For image_version strip ".dat" from output
-        This function will fail if version is not in the format
-        x.x.xxxx.xx where x is in  [0-9]
-
-        The actual output is given below.
-
-        aver-updater --image_version
-        [INFO:main.cc(79)] image_version: 0.0.0018.07.dat
-
-        aver-updater --device_version
-        [INFO:main.cc(71)] device_version: 0.0.0018.08
-
-        """
-
-        logging.debug('Parsing output from updater %s', cmd_output)
-        if 'Error(2) opening /lib/firmware/aver/' in cmd_output:
-            raise error.TestFail('Aver firmware image not found on DUT')
-
-        if ('device_version' not in cmd_output and
-            'image_version' not in cmd_output):
-            raise error.TestFail('Parsing aver firmware version output failed')
-
-        version = ''
-        output = cmd_output.split('\n')
-        for line in output:
-            logging.debug('parsing line %s from output', line)
-            if 'device_version' not in line and 'image_version' not in line:
-                continue
-            parts = line.split(' ')
-
-            if parts[1] == 'device_version:':
-                version = parts[2]
-            elif parts[1] == 'image_version:':
-                version = parts[2].strip('.dat')
-            else:
-                raise error.TestFail('Unexpected output from updater %s'
-                                     % parts)
-
-            version = version.strip('\0')  #  Remove null characters
-            logging.debug('Version parsed from output is %s', version)
-
-            if not bool(re.match(r'^\d\.\d\.\d\d\d\d\.\d\d$', version)):
-                logging.debug('parsed version is %s ', version)
-                raise error.TestFail('Version %s not in'
-                                     'expected format' % version)
-
-            logging.debug('Version is %s', str(version))
-            return version
-
-    def get_updater_output(self, cmd):
-        """Get updater output while avoiding transient failures."""
-
-        NUM_RETRIES = 5
-        WAIT_TIME_SEC = 20
-        for _ in range(NUM_RETRIES):
-            output = self._run_cmd(cmd)
-            if ('Open hid fd fail' in output or
-                'query data size fail' in output or
-                'There is another aver-updater running.' in output or
-                'Failed to open the device' in output):
-                time.sleep(WAIT_TIME_SEC)
-                continue
-            return output
-
-    def get_image_fw_ver(self):
-        """Get the version of firmware on DUT."""
-
-        output = self.get_updater_output('aver-updater --image_version'
-                                         ' --log_to=stdout --lock')
-        return self.fw_ver_from_output_str(output)
-
-    def get_device_fw_ver(self):
-        """Get the version of firmware on Aver 520 camera."""
-
-        output = self.get_updater_output('aver-updater --device_version'
-                                         ' --log_to=stdout --lock')
-        return self.fw_ver_from_output_str(output)
-
-    def copy_test_firmware(self):
-        """Copy test firmware from server to DUT."""
-
-        current_dir = os.path.dirname(os.path.realpath(__file__))
-        src_firmware_path = os.path.join(current_dir, FW_PKG_TEST)
-        dst_firmware_path = FW_PATH_BASE
-        logging.info('Copy firmware from (%s) to (%s).', src_firmware_path,
-                     dst_firmware_path)
-        self.host.send_file(src_firmware_path, dst_firmware_path,
-                            delete_dest=True)
-
-    def trigger_updater(self):
-        """Trigger udev rule to run fw updater by power cycling the usb."""
-
-        try:
-            vid = self.camera.vendor_id
-            pid = self.camera.product_id
-            power_cycle_usb_util.power_cycle_usb_vidpid(self.host, self.board,
-                                                        vid, pid)
-        except KeyError:
-            raise error.TestFail('Could not find target device: '
-                                 '{}'.format(self.camera))
-
-    def wait_for_aver_camera(self, wait_time=30):
-        """
-        Wait for Aver 520 camera to be enumerated.
-
-        Check if a device with given (vid,pid) is present.
-        Timeout after wait_time seconds. Default 30 seconds
-        """
-
-        TIME_SLEEP = 10
-        NUM_ITERATIONS = max(wait_time / TIME_SLEEP, 1)
-
-        logging.debug('Waiting for Aver 520 camera')
-        for _ in range(NUM_ITERATIONS):
-            if self.device_collector.get_devices_by_spec(self.usb_spec):
-                logging.debug('Aver 520 camera detected')
-                return
-            else:
-                logging.debug('Aver 520 camera not detected.'
-                              'Waiting for (%s) seconds', TIME_SLEEP)
-                time.sleep(TIME_SLEEP)
-
-        logging.error('Unable to detect the device after (%s) seconds.'
-                      'Timing out...\n Target device %s not  detected',
-                      wait_time, self.camera)
-        raise error.TestFail('Target device not detected')
-
-    def setup_fw(self, firmware_package):
-        """Setup firmware package that is going to be used for updating."""
-
-        firmware_path = os.path.join(FW_PATH_BASE, firmware_package)
-        cmd = 'ln -sfn {} {}'.format(firmware_path, self.fw_path_origin)
-        logging.debug('executing cmd %s ', cmd)
-        self.host.run(cmd)
-
-    def flash_fw(self, force=False):
-        """Flash certain firmware to device.
-
-        Run logitech firmware updater on DUT to flash the firmware setuped
-        to target device (PTZ Pro 2).
-
-        @param force: run with force update, will bypass fw version check.
-
-        """
-
-        cmd = ('/usr/sbin/aver-updater --log_to=stdout --update'
-               ' --lock')
-        if force:
-            cmd += ' --force'
-        output = self.get_updater_output(cmd)
-        return output
-
-    def print_fw_version(self, version, info_str=''):
-        """Pretty print Aver 520 camera firmware version."""
-
-        if info_str:
-            print(info_str, end="")
-        print(' Firmware version:', version)
-
-    def is_device_firmware_equal_to(self, expected_ver):
-        """Check that the device fw version is equal to given version."""
-
-        device_fw_version = self.get_device_fw_ver()
-        if  device_fw_version != expected_ver:
-            logging.error('Device firmware version is not the expected version')
-            self.print_fw_version(device_fw_version, 'Device firmware version')
-            self.print_fw_version(expected_ver, 'Expected firmware version')
-            return False
-
-        return True
-
-    def flash_old_firmware(self):
-        """Flash old (test) version of firmware on the device."""
-
-        # Flash old FW to device.
-        self.setup_fw(FW_PKG_TEST)
-        test_fw_ver = self.get_image_fw_ver()
-        self.print_fw_version(test_fw_ver, 'Test firmware version')
-        logging.debug('flashing test firmware on the device')
-        output = self.flash_fw(force=True)
-        time.sleep(POWER_CYCLE_WAIT_TIME_SEC)
-        with open(LOG_FILE, 'w') as f:
-            delim = '-' * 8
-            f.write('{}Log info for writing old firmware{}'
-                    '\n'.format(delim, delim))
-            f.write(output)
-        if not self.is_device_firmware_equal_to(test_fw_ver):
-            raise error.TestFail('Flashing old firmware failed')
-        logging.info('Device flashed with test firmware')
-
-    def backup_original_firmware(self):
-        """Backup existing firmware on DUT."""
-        # Copy old FW to device.
-        cmd = 'mv {} {}'.format(self.fw_path_origin, self.fw_path_backup)
-        self.host.run(cmd)
-
-    def is_updater_running(self):
-        """Checks if the aver-updater is running."""
-
-        cmd = 'aver-updater --lock --device_version --log_to=stdout'
-        output = self._run_cmd(cmd)
-        return 'There is another aver-updater running. Exiting now...' in output
-
-    def wait_for_updater(self):
-        """Wait aver-updater to stop or timeout after 6 minutes."""
-
-        NUM_ITERATION = 12
-        WAIT_TIME_SEC = 30
-        logging.debug('Wait for any currently running updater to finish')
-        for _ in range(NUM_ITERATION):
-            if self.is_updater_running():
-                logging.debug('aver-updater is running.'
-                              'Waiting for %s seconds', WAIT_TIME_SEC)
-                time.sleep(WAIT_TIME_SEC)
-            else:
-                logging.debug('aver-updater not running')
-                return
-        logging.error('aver-updater is still running after 6 minutes')
-
-    def test_firmware_update(self):
-        """Trigger firmware updater and check device firmware version."""
-
-        # Simulate hotplug to run FW updater.
-        logging.info('Setup original firmware')
-        self.setup_fw(FW_PKG_BACKUP)
-        self.print_fw_version(self.get_image_fw_ver(), 'Firmware on disk')
-        logging.info('Simulate hot plugging the device')
-        self.trigger_updater()
-        self.wait_for_aver_camera()
-
-        # The firmware check will fail if the check runs in a short window
-        # between the device being detected and the firmware updater starting.
-        # Adding a delay to reduce the chance of that scenerio.
-        logging.info('Waiting for the updater to update the firmware')
-        time.sleep(POWER_CYCLE_WAIT_TIME_SEC)
-
-        self.wait_for_updater()
-
-        if not self.is_device_firmware_equal_to(self.org_fw_ver):
-            raise error.TestFail('Camera not updated to new firmware')
-        logging.info('Firmware update was completed successfully')
-
-    def run_once(self):
-        """
-        Entry point for test.
-
-        The following actions are performed in this test.
-        - Device is flashed with older firmware.
-        - Powercycle usb port to simulate hotplug inorder to start the updater.
-        - Check that the device is updated with newer firmware.
-        """
-
-        # Check if updater is already running
-        logging.info('Testing firmware update on Aver %s camera',
-                     self.camera)
-        logging.info('Confirm that camera is present')
-
-        self.wait_for_aver_camera(wait_time=0)
-
-        self.wait_for_updater()
-
-        self.print_fw_version(self.org_fw_ver,
-                              'Original firmware version on DUT')
-        self.print_fw_version(self.get_device_fw_ver(),
-                              'Firmware version on  device')
-
-        filesystem_util.make_rootfs_writable(self.host)
-        self.backup_original_firmware()
-
-        # Flash test firmware version
-        self.copy_test_firmware()
-        self.flash_old_firmware()
-
-        # Test firmware update
-        self.test_firmware_update()
-        logging.info('Aver %s camera firmware updater'
-                     'test was successful', self.camera)
diff --git a/server/site_tests/enterprise_CFM_BizlinkUpdater/control b/server/site_tests/enterprise_CFM_BizlinkUpdater/control
deleted file mode 100644
index 9612c68..0000000
--- a/server/site_tests/enterprise_CFM_BizlinkUpdater/control
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "frankhu@chromium.org"
-NAME = "enterprise_CFM_BizlinkUpdater"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-JOB_RETRIES = 3
-
-DOC = """
-Tests the Bizlink FW updater. The test does the following:
-1) flashes an older version megachips firmware to the Bizlink dongle
-2) reboots the device trigger the firmware updater to flash the new firmware
-3) verifies udev triggered the Bizlink FW updater to flash the latest FW to the
- dongle by checking the FW version on the dongle
-"""
-
-args_dict = utils.args_to_dict(args)
-
-def run_test(machine):
-    host = hosts.create_host(machine, servo_args=None)
-    job.run_test(NAME, host=host)
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_BizlinkUpdater/enterprise_CFM_BizlinkUpdater.py b/server/site_tests/enterprise_CFM_BizlinkUpdater/enterprise_CFM_BizlinkUpdater.py
deleted file mode 100644
index 55c79b8..0000000
--- a/server/site_tests/enterprise_CFM_BizlinkUpdater/enterprise_CFM_BizlinkUpdater.py
+++ /dev/null
@@ -1,169 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Auto test for Bizlink firmware updater functionality and udev rule."""
-
-from __future__ import print_function
-import logging
-import os
-import re
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import test
-
-UPDATER_WAIT_TIME = 180     # seconds
-CMD_CHECK_FW_UPDATE_OUTPUT = 'grep bizlink-updater /var/log/messages'
-FW_PATH = '/lib/firmware/bizlink/'
-OLD_FW_NAME = 'megachips-firmware-old.bin'
-NEW_FW_NAME = 'megachips-firmware.bin'
-
-
-class enterprise_CFM_BizlinkUpdater(test.test):
-    """
-    Bizlink dongle firmware updater functionality test in Chrome Box.
-
-    The procedure of the test is:
-    1. flash old version FW to device,
-    2. Reboot the device, which should be able to trigger udev rule and run the
-         updater,
-    3. wait for the updater to finish,
-    4. run fw updater again and verify that the FW in device is consistent with
-         latest FW within system by checking the output.
-    """
-
-    version = 1
-
-
-    def initialize(self, host):
-        self.host = host
-        self.old_fw_path = os.path.join(FW_PATH, OLD_FW_NAME)
-        self.new_fw_path = os.path.join(FW_PATH, NEW_FW_NAME)
-
-    def cleanup(self):
-        cmd = 'rm -f {}'.format(self.old_fw_path)
-        self.host.run(cmd)
-        super(enterprise_CFM_BizlinkUpdater, self).cleanup()
-
-    def check_update_result(self, expected_output=''):
-        """
-        Checks FW update result.
-
-        Queries the syslog and checks if expected_output occurs in it.
-
-        @param expected_output: the string to query syslog for.
-
-        @returns True if expected_output is in syslog. Otherwise return false.
-        """
-        result = self.host.run(CMD_CHECK_FW_UPDATE_OUTPUT)
-        # Only check last 5 logs for the most recent run.
-        messages = result.stdout.strip().split('\n')
-        if len(messages) < 5:
-            return False
-        messages = ''.join(messages[-5:])
-        if expected_output in messages:
-            return True
-        else:
-            return False
-
-    def convert_rootfs_writable(self):
-        """
-        Removes rootfs verification on DUT and reboots.
-        """
-        logging.info('Disabling rootfs verification...')
-        self.remove_rootfs_verification()
-
-        logging.info('Rebooting...')
-        self.host.reboot()
-
-    def remove_rootfs_verification(self):
-        """Removes rootfs verification."""
-        # 2 & 4 are default partitions, and the system boots from one of them.
-        # Code from chromite/scripts/deploy_chrome.py
-        KERNEL_A_PARTITION = 2
-        KERNEL_B_PARTITION = 4
-
-        cmd_template = ('/usr/share/vboot/bin/make_dev_ssd.sh --partitions %d '
-                        '--remove_rootfs_verification --force')
-        for partition in (KERNEL_A_PARTITION, KERNEL_B_PARTITION):
-            cmd = cmd_template % partition
-            self.host.run(cmd)
-
-    def is_filesystem_readwrite(self):
-        """Checks if the root file system is read-writable.
-
-        Queries the DUT's filesystem /dev/root, checks for keyword 'rw'.
-
-        @returns True if /dev/root is read-writable. False otherwise.
-        """
-        cmd = 'cat /proc/mounts | grep "/dev/root"'
-        result = self.host.run(cmd)
-        if result.stderr:
-            output = result.stderr
-        else:
-            output = result.stdout
-        fields = re.split(' |,', output)
-        return True if fields.__len__() >= 4 and fields[3] == 'rw' else False
-
-    def copy_firmware(self):
-        """Copies test firmware from server to DUT."""
-        current_dir = os.path.dirname(os.path.realpath(__file__))
-        src_firmware_path = os.path.join(current_dir, OLD_FW_NAME)
-        dst_firmware_path = FW_PATH
-        logging.info('Copy firmware from {} to {}.'.format(src_firmware_path,
-                                                           dst_firmware_path))
-        self.host.send_file(src_firmware_path, dst_firmware_path,
-                            delete_dest=True)
-
-    def triger_updater(self):
-        """Trigers udev rule to run fw updater."""
-        self.host.reboot()
-
-    def flash_fw(self, fw_path):
-        """
-        Flashes certain firmware to device.
-
-        Runs Bizlink firmware updater on DUT to flashes the firmware given
-        by fw_path to target device.
-
-        @param fw_path: the path to the firmware to flash.
-
-        """
-        cmd_run_updater = ('/usr/sbin/bizlink-updater --update=true '
-                           '--fw_path={}'.format(fw_path))
-        logging.info('executing {}'.format(cmd_run_updater))
-        self.host.run(cmd_run_updater)
-
-    def run_once(self):
-        # Make the DUT filesystem writable.
-        if not self.is_filesystem_readwrite():
-            logging.info('DUT root file system is not read-writable. '
-                         'Converting it read-writable...')
-            self.convert_rootfs_writable()
-        else:
-            logging.info('DUT is read-writable.')
-
-        # Copy old FW to device.
-        self.copy_firmware()
-
-        # Flash old FW to device.
-        self.flash_fw(self.old_fw_path)
-        expect_output = 'FW update succeed.'
-        succeed = self.check_update_result(expected_output=expect_output)
-        if not succeed:
-            raise error.TestFail('Expect \'{}\' in output, '
-                                 'but didn\'t find it.'.format(expect_output))
-
-        self.triger_updater()
-
-        # Wait for fw updater to finish.
-        time.sleep(UPDATER_WAIT_TIME)
-
-        # Try flash the new firmware, should detect same fw version.
-        expect_output = 'Same FW version, no update required.'
-        self.flash_fw(self.new_fw_path)
-        succeed = self.check_update_result(expected_output=expect_output)
-        if not succeed:
-            raise error.TestFail('Expect {} in output '
-                                 'but didn\'t find it.'.format(expect_output))
-
diff --git a/server/site_tests/enterprise_CFM_BizlinkUpdater/megachips-firmware-old.bin b/server/site_tests/enterprise_CFM_BizlinkUpdater/megachips-firmware-old.bin
deleted file mode 100644
index aceaec5..0000000
--- a/server/site_tests/enterprise_CFM_BizlinkUpdater/megachips-firmware-old.bin
+++ /dev/null
Binary files differ
diff --git a/server/site_tests/enterprise_CFM_CEC/chameleon_cecservice/cec_service b/server/site_tests/enterprise_CFM_CEC/chameleon_cecservice/cec_service
deleted file mode 100755
index faafc71..0000000
--- a/server/site_tests/enterprise_CFM_CEC/chameleon_cecservice/cec_service
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/usr/bin/python2
-
-import sys
-import time
-from time import sleep
-import util
-import it6803
-
-usage = """\
-Usage:
-  cec_service               -- print command usage
-  cec_service start         -- run cec service
-"""
-
-powerOn = True
-
-def main(cmdline):
-    args = [''] * 4
-    for i, x in enumerate(cmdline):
-        args[i] = x
-    cmd = args[1]
-
-    if cmd == '': cmd = 'help'
-    fname = 'cmd_' + cmd
-
-    if fname in globals():
-        if args[2] == '':
-            globals()[fname]()
-        else:
-            globals()[fname](args[2])
-    else:
-        print 'Unknown command', cmd
-
-def cmd_start():
-    it6803.cec_open()
-    it6803.cec_init()
-    while True:
-        code = it6803.cec_msg_receive()
-        if code is not None:
-            if code == 0x36:
-                handle_standBy()
-            elif code == 0x04:
-                handle_imageOn()
-            elif code == 0x8F:
-                handle_powerStatus()
-            else:
-                print 'Unknow command'
-    it6803.cec_close()
-    return
-
-def handle_standBy():
-    global powerOn
-    powerOn = False
-    return
-
-def handle_imageOn():
-    global powerOn
-    powerOn = True
-    return
-
-def handle_powerStatus():
-    global powerOn
-    print 'power status: {}'.format(powerOn)
-    if powerOn:
-        it6803.cec_msg_poweron()
-    else:
-        it6803.cec_msg_poweroff()
-    it6803.cec_transmit()
-    return
-
-
-if __name__ == '__main__':
-    main(sys.argv)
diff --git a/server/site_tests/enterprise_CFM_CEC/chameleon_cecservice/it6803.py b/server/site_tests/enterprise_CFM_CEC/chameleon_cecservice/it6803.py
deleted file mode 100755
index c44e41b..0000000
--- a/server/site_tests/enterprise_CFM_CEC/chameleon_cecservice/it6803.py
+++ /dev/null
@@ -1,287 +0,0 @@
-#!/usr/bin/python2
-
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Reference[1]: IT680x example code:
-# https://drive.google.com/corp/drive/u/0/folders/0B8Lcp5hqbjaqaE5WdDA5alVWOXc
-
-# Reference[2]: IT6803 Programming Guide:
-# https://docs.google.com/viewer?a=v&pid=sites&srcid=\
-# Y2hyb21pdW0ub3JnfGRldnxneDoyNGVmNGFiMDE4ZWJiZDM2
-
-# This code is a library for using IT680X chip in chameleon.
-
-from __future__ import print_function
-
-import sys
-import util
-from time import sleep
-
-usage = """\
-Usage:
-  it6803                        -- print command usage
-  it6803 cec_reg_print          -- print all cec registers value
-  it6803 cec_msg_receive        -- print receiving cec message
-  it6803 cec_msg {cmd}          -- send cec message
-"""
-
-QUEUE_SIZE = 3
-q_head = 0
-q_tail = 0
-regTxOutState = 3
-
-logicalAddr = 0
-initiatorAddr = 0x0F
-cecTxState = 0
-
-txCmdBuf = [0x00] * 19
-rxCecBuf = [0x00] * 19
-queue = [[0x00 for i in range(19)] for j in range(QUEUE_SIZE)]
-
-# Chameleon register address
-I2C_HDMI = 0x48
-I2C_CEC = 0x4A
-
-# Chameleon CEC control registers
-# (name starts with REG is register addr, followings are values for this reg)
-REG06         = 0x06
-REG_EMPTY     = 0x00
-REG07         = 0x07
-ENABLE_CEC_INTERRUPT_PIN = 0x40
-
-REG08         = 0x08
-FIRE_FRAME          = 0x80
-DEBUG_CEC_CLEAR     = 0x40
-CEC_SCHMITT_TRIGGER = 0x08
-CEC_INTERRUPT       = 0x01
-
-REG09         = 0x09
-REGION_SELECT    = 0x40
-INITAITOR_RX_CEC = 0x20
-ACKNOWLEDGE      = 0x01
-
-REG_MIN_BIT   = 0x0B
-REG_TIME_UNIT = 0x0C
-
-REG0F         = 0x0F
-IO_PULL_UP    = 0x50
-
-REG_TARG_ADDR = 0x22
-REG_MSCOUNT_L = 0x45
-REG_MSCOUNT_M = 0x46
-REG_MSCOUNT_H = 0x47
-REF_INT_STATUS= 0x4C
-
-def main(cmdline):
-    """ Main function. """
-    args = [''] * 4
-    for i, x in enumerate(cmdline):
-        args[i] = x
-    cmd = args[1]
-
-    if cmd == '': cmd = 'help'
-    fname = 'cmd_' + cmd
-
-    cec_open()
-    if fname in globals():
-        if args[2] == '':
-            globals()[fname]()
-        else:
-            globals()[fname](args[2])
-    else:
-        print('Unknown command', cmd)
-    cec_close()
-
-
-def cmd_help():
-    """ Print help message. """
-    print(usage)
-
-
-def cec_open():
-    """ Enable cec port. """
-    # enable IT6803 CEC port: enable cec clock and assign slave addr
-    i2cset(I2C_HDMI, 0x0E, 0xFF)
-    i2cset(I2C_HDMI, 0x86, 0x95)
-
-def cec_close():
-    """ Close cec port. """
-    # disable cec slave addr
-    i2cset(I2C_HDMI, 0x86, 0x94)
-
-
-def cec_init():
-    """ Initialize cec port in chameleon. """
-    # initial CEC register. From reference[1] Ln480
-
-    # enable it680x cec
-    i2cset(I2C_CEC, 0xF8, 0xC3)
-    i2cset(I2C_CEC, 0xF8, 0xA5)
-    q_head = 0
-    q_tail = 0
-    regTxOutState = 3
-
-    # get 100ms timer, according to ref [1,2]
-    i2cset(I2C_CEC, REG09, ACKNOWLEDGE)
-    sleep(0.099)
-    i2cset(I2C_CEC, REG09, REG_EMPTY)
-    high  = util.i2c_read(0, I2C_CEC, REG_MSCOUNT_H, 1)[0] * 0x10000
-    mid   = util.i2c_read(0, I2C_CEC, REG_MSCOUNT_M, 1)[0] * 0x100
-    low   = util.i2c_read(0, I2C_CEC, REG_MSCOUNT_L, 1)[0]
-    tus = (high + mid + low) / 1000
-    # print tus
-
-    # CEC configuration
-    i2cset(I2C_CEC, REG09, INITAITOR_RX_CEC | REGION_SELECT)
-    i2cset(I2C_CEC, REG_MIN_BIT, 0x14)
-    i2cset(I2C_CEC, REG_TIME_UNIT, tus)
-    i2cset(I2C_CEC, REG_TARG_ADDR, logicalAddr)
-    i2cset(I2C_CEC, REG08, CEC_SCHMITT_TRIGGER)
-    uc = util.i2c_read(0, I2C_CEC, REG09, 1)[0]
-    # i2cset(I2C_CEC, REG09, uc|0x02)
-    # cec_clr_int
-    i2cset(I2C_CEC, REG08, CEC_INTERRUPT|DEBUG_CEC_CLEAR|CEC_SCHMITT_TRIGGER)
-    i2cset(I2C_CEC, REG08, CEC_SCHMITT_TRIGGER|DEBUG_CEC_CLEAR)
-    # print 'logicalAddr: {}, TimeUnit: {}'.format(logicalAddr,tus)
-
-    # Enable CEC interrupt pin
-    reg07_val = util.i2c_read(0, I2C_CEC, REG07, 1)[0]
-    i2cset(I2C_CEC, REG07, reg07_val | ENABLE_CEC_INTERRUPT_PIN)
-
-    # Enable ALL interrupt mask
-    i2cset(I2C_CEC, REG06, REG_EMPTY)
-
-    # IO pull up enable
-    i2cset(I2C_CEC, REG0F, IO_PULL_UP)
-
-def cec_msg_receive():
-    """ Read message received. """
-    # 0x3F means all interrupts are on
-    cecInt = cec_reg_read(REF_INT_STATUS) & 0x3F
-    if 0 != (cecInt & 0x10):
-        if not cec_msg_read():
-            raise Exception('Queue is full!')
-    ## TODO check interrupt register Status
-    i2c_cec_set(REF_INT_STATUS, cecInt)
-    # Decode received message
-    return cec_decode()
-
-
-def cmd_cec_msg(message):
-    """ parent function for a cec message. """
-    cec_init()
-    fname = 'cec_msg_' + message
-    globals()[fname]()
-    cec_transmit()
-
-def cec_msg_standby():
-    """ Send a stand by message. """
-    # F = boardcast, 0x36 = stand by message
-    cec_cmd_set(0xF, 0x36, None, None)
-    # other operations need more assignments
-
-def cec_msg_viewon():
-    """ Send a view on message. """
-    # 0 = TV, 0x04 = image on
-    cec_cmd_set(0x0, 0x04, None, None)
-
-def cec_msg_poweron():
-    """ Make a power on cec message. """
-    global initiatorAddr
-    # 0x90 = power status message
-    cec_cmd_set(initiatorAddr, 0x90, 0x00, None)
-
-def cec_msg_poweroff():
-    """ Make a power off cec message. """
-    global initiatorAddr
-    # 0x90 = power status message
-    cec_cmd_set(initiatorAddr, 0x90, 0x01, None)
-
-def cec_reg_read(offset):
-    """ read it6803's register value from i2c line. """
-    return util.i2c_read(0, I2C_CEC, offset, 1)[0]
-
-def cec_cmd_set(follower, txCmd, operand1, operand2):
-    """ Compose a cec message. """
-    # print 'follower: {}, cmd: {}'.format(follower, txCmd)
-    # TODO set variables
-    txCmdBuf[0] = 2
-    txCmdBuf[1] = (logicalAddr<<4) + follower
-    txCmdBuf[2] = txCmd
-    txCmdBuf[3] = 0
-    txCmdBuf[4] = 0
-    if operand1 is not None:
-        txCmdBuf[3] = operand1
-        txCmdBuf[0] = 3
-    if operand2 is not None:
-        txCmdBuf[4] = operand2
-        txCmdBuf[0] = 4
-    # print txCmdBuf
-    return
-
-def cec_transmit():
-    """ File a cec message out. """
-    # Assume the state is cecTransfer
-    # Set values from 0x10 to 0x23
-    i2c_cec_set(0x23, txCmdBuf[0])
-    for i in range (0, txCmdBuf[0]):
-        i2c_cec_set(0x10+i, txCmdBuf[i+1])
-
-    # Fire command
-    i2c_cec_set(REG08, FIRE_FRAME | CEC_SCHMITT_TRIGGER | DEBUG_CEC_CLEAR)
-    i2c_cec_set(REG08, CEC_SCHMITT_TRIGGER | DEBUG_CEC_CLEAR)
-    return
-
-def cec_msg_read():
-    """ Read incoming cec messages from memory. """
-    global q_head, q_tail
-    if (q_head % QUEUE_SIZE) != (q_tail % QUEUE_SIZE):
-        return False
-    q_tail += 1
-    i = q_tail % QUEUE_SIZE
-    # 0x30 is starting point for receiving message
-    data = util.i2c_read(0, I2C_CEC, 0x30, 19)
-    for j in range(1, 19):
-        queue[i][j] = data[j-1]
-    queue[i][0] = data[18]
-    return True
-
-def cec_decode():
-    """ Process incoming cec message. """
-    global q_head, q_tail, initiatorAddr
-    if (q_head % QUEUE_SIZE) == (q_tail % QUEUE_SIZE):
-        # Queue is empty
-        return
-    q_head += 1
-    rxCecBuf = queue[q_head % QUEUE_SIZE]
-    #print rxCecBuf
-
-    if (rxCecBuf[0] == 1):
-        if logicalAddr == (rxCecBuf[1] & 0x0F):
-            # eReportPhysicalAddress
-            return
-    # Validate message
-    initiatorAddr = (rxCecBuf[1] >> 4) & 0x0F
-    followerAddr = rxCecBuf[1] & 0x0F
-    print('Initiator: {} Follower: {}'.format(initiatorAddr, followerAddr))
-
-    if (rxCecBuf[2] == 0x04):
-        print('received image-view-on')
-    elif (rxCecBuf[2] == 0x36):
-        print('received standby')
-    else:
-        print('other command: {}'.format(rxCecBuf[2]))
-    return rxCecBuf[2]
-
-def i2cset(addr, offset, value):
-    """ set some register value via i2c line. """
-    util.i2c_write(0, addr, offset, [value])
-
-def i2c_cec_set(offset, value):
-    """ set it6803's register value via i2c line. """
-    i2cset(I2C_CEC, offset, value)
-
-if __name__ == '__main__':
-    main(sys.argv)
diff --git a/server/site_tests/enterprise_CFM_CEC/control b/server/site_tests/enterprise_CFM_CEC/control
deleted file mode 100644
index 8d61fff..0000000
--- a/server/site_tests/enterprise_CFM_CEC/control
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "dianwa@google.com"
-NAME = "enterprise_CFM_CEC"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-
-JOB_RETRIES = 0
-DEPENDENCIES = ""
-
-DOC = """
-This test performs the CEC feature in HDMI cable in Teemo
-"""
-
-args_dict = utils.args_to_dict(args)
-chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
-
-def run_test(machine):
-    host = hosts.create_host(machine, chameleon_args=chameleon_args)
-    job.run_test(NAME, host=host)
-
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_CEC/enterprise_CFM_CEC.py b/server/site_tests/enterprise_CFM_CEC/enterprise_CFM_CEC.py
deleted file mode 100644
index d8d691c..0000000
--- a/server/site_tests/enterprise_CFM_CEC/enterprise_CFM_CEC.py
+++ /dev/null
@@ -1,226 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LISENCE file.
-
-import logging
-import re
-import os
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import test
-
-CEC_COMMAND = "sudo -u chronos dbus-send --system --print-reply \
---type=method_call --dest=org.chromium.CecService   \
-/org/chromium/CecService org.chromium.CecService."
-CEC_CMD_STANDBY = "SendStandByToAllDevices"
-CEC_CMD_IMAGEON = "SendWakeUpToAllDevices"
-CEC_CMD_DISP_STATUS = "GetTvsPowerStatus"
-
-CHAMELEON_ROOT = '/home/root/'
-
-STATUS_ERROR = 0
-STATUS_ON = 3
-STATUS_OFF = 4
-STATUS_TO_ON = 5
-STATUS_TO_OFF = 6
-
-class enterprise_CFM_CEC(test.test):
-    """
-    Test CEC feature for display power control.
-    """
-
-    version = 1
-    # TODO: how to get whether it connects to chameleon board
-    chameleon_mode = True
-
-    def initialize(self):
-        """ initialize is a stub function."""
-        pass
-
-    # Simply run a command in machine and return result messages.
-    def _shcmd(self, cmd):
-        """ A simple wrapper for remote shell command execution.
-        @param cmd: shell command for Fizz
-        """
-        logging.info('CMD: [%s]', cmd)
-        try:
-            result = self._client.run(cmd)
-            if result is None:
-                return result
-            if result.stderr:
-                logging.info('CMD ERR:\n' + result.stderr)
-            logging.info('CMD OUT:\n' + result.stdout)
-            return result
-        except Exception as e:
-            logging.info('run command failed. ' + str(e))
-
-    def run_once(self, host=None):
-        """
-        Test scenario:
-
-            If system does not support cec port, we simply throw an exception.
-
-            Generally we are using the build-in cecservice for this test. This
-        service supports multiple features including turning TV(display)
-        power on/off and monitor its power status.
-
-            Following is test plan:
-            0.0 Copy two python files to chameleon
-            0.1 enable chameleon as hdmi mode
-            0.2 run cec service on chameleon
-
-            0.3 Make sure chrome box is running cecservice
-            0.4 Make sure chrome box's /dev/cecX is open
-            0.5 Run TV power status to check configuration correct
-            (end of step 0)
-
-
-            1.0 Send TV turn-off command
-            1.1 Check TV power status to check whether it is off.
-            (end of step 1)
-
-            2.0 Send TV turn-on command
-            2.1 Check TV power status to check whether it is on.
-            (end of step 2)
-            At every end of step, we have to decide whether we stop this test
-        right now or continue next ones.
-            Note that we could turn on TV first and then turn off it,
-        according to its initial power status.
-
-            3.0 Stop cec service on chameleon
-            3.1 Remove python files from chameleon
-
-        @param host: host object machine.
-        """
-
-        self._client = host
-        self.chameleon = host.chameleon
-
-        ## TODO check same port
-        #Step 0.0 - 0.2
-        self.copy_cecservice()
-        self.cec_service_init()
-
-
-        # Step 0.3 - 0.5
-        if not self.is_cecservice_running():
-            self.cec_cleanup()
-            raise error.TestFail("CEC service is not running.")
-        if not self.is_cec_available():
-            self.cec_cleanup()
-            raise error.TestFail("/dev/cecX port is not open.")
-        status = self.check_display_status()
-        if STATUS_ERROR == status:
-            self.cec_cleanup()
-            raise error.TestFail("CEC communication is not good.")
-
-        # Step 1 & 2
-        if STATUS_ON == status:
-            self.test_turn_off()
-            if not self.chameleon_mode:
-                time.sleep(5)
-            self.test_turn_on()
-        else:
-            self.test_turn_on()
-            if not self.chameleon_mode:
-                time.sleep(5)
-            self.test_turn_off()
-
-        # Step 3
-        self.cec_cleanup()
-
-    # Chameleon
-    def copy_cecservice(self):
-        """ copy python files under ./chameleon_cecservice to chameleon.
-            In that folder, we have two files for cecservice.
-        """
-        current_dir = os.path.dirname(os.path.realpath(__file__))
-        base_dir = current_dir + '/chameleon_cecservice/'
-        self.chameleon.host.send_file(base_dir + 'cec_service', CHAMELEON_ROOT)
-        self.chameleon.host.send_file(base_dir + 'it6803.py', CHAMELEON_ROOT)
-
-    def cec_service_init(self):
-        """ Setup chameleon board as a hdmi mode.
-            Run cec service on chameleon
-        """
-        self.chameleon.host.run('/home/root/setup hdmi')
-        self.chameleon.host.run('(/home/root/cec_service start) '\
-            '</dev/null >/dev/null 2>&1 & echo -n $!')
-
-    def cec_cleanup(self):
-        """ Stop cec service on chameleon.
-            Delete files new coming on chameleon.
-        """
-        if self.chameleon_mode:
-            stop_cmd = 'kill $(ps | grep \'cec_service\' | awk \'{print $1}\')'
-            self.chameleon.host.run(stop_cmd)
-            cleanup_cmd = 'rm /home/root/cec_service /home/root/it6803*'
-            self.chameleon.host.run(cleanup_cmd)
-
-    # Fizz
-    def is_cecservice_running(self):
-        """ try to confirm that current box is running cecservice.
-        @return: whether cecservice is running in Fizz.
-        """
-        cmd = 'initctl list | grep cecservice'
-        result = self._shcmd(cmd)
-        if result is None:
-            return False;
-        if "running" not in result.stdout:
-            return False;
-        return True;
-
-    def is_cec_available(self):
-        """ try to get whether the system makes /dev/cecX open.
-        @return: whether /dev/cecX is open in Fizz
-        """
-        cmd = "ls /dev/cec*"
-        result = self._shcmd(cmd)
-        if result is None:
-            return False;
-        return True;
-
-    def check_display_status(self):
-        """ try to confirm that current box connected to a display
-        which supports cec feature.
-        @return: current display power status
-        """
-        cmd = CEC_COMMAND + CEC_CMD_DISP_STATUS
-        result = self._shcmd(cmd).stdout
-        status = re.findall('int32 \\d', result)
-        for s in status:
-            code = int(s[-1]);
-            if code == STATUS_ON or code == STATUS_TO_ON:
-                return STATUS_ON
-            if code == STATUS_OFF or code == STATUS_TO_OFF:
-                return STATUS_OFF
-        return STATUS_ERROR
-
-    def display_on(self):
-        """ send a power turn on cec message """
-        self._shcmd(CEC_COMMAND + CEC_CMD_IMAGEON)
-
-    def display_off(self):
-        """ send a power turn off cec message"""
-        self._shcmd(CEC_COMMAND + CEC_CMD_STANDBY)
-
-    def test_turn_on(self):
-        """ test sending turn_on cec message process. """
-        self.display_on()
-        if not self.chameleon_mode:
-            time.sleep(10)
-        status = self.check_display_status()
-        if STATUS_ON != status:
-            self.cec_cleanup()
-            raise error.TestFail("CEC display image on does not work.")
-
-    def test_turn_off(self):
-        """ test sending turn_off cec message process. """
-        self.display_off()
-        if not self.chameleon_mode:
-            time.sleep(1)
-        status = self.check_display_status()
-        if STATUS_OFF != status:
-            self.cec_cleanup()
-            raise error.TestFail("CEC display standby does not work.")
diff --git a/server/site_tests/enterprise_CFM_ConfigurableCfmTestSanity/control.join_leave b/server/site_tests/enterprise_CFM_ConfigurableCfmTestSanity/control.join_leave
deleted file mode 100644
index f4b8fe5..0000000
--- a/server/site_tests/enterprise_CFM_ConfigurableCfmTestSanity/control.join_leave
+++ /dev/null
@@ -1,41 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-from autotest_lib.server.cros.cfm.configurable_test.dsl import *
-from autotest_lib.server import utils
-
-AUTHOR = "kerl@google.com, chromeos-meetings@google.com"
-NAME = "enterprise_CFM_ConfigurableCfmTestSanity.join_leave"
-PURPOSE = "Verifies the configurable CfM test infra with a simple scenario"
-CRITERIA = "No errors occur"
-ATTRIBUTES = "suite:hotrod-remora, suite:bluestreak-pre-cq"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_TYPE = "server"
-
-DOC = """
-Verifies that we can specify and run a configurable cfm test.
-"""
-
-cfm_test = CfmTest(
-    scenario=Scenario(
-        RebootDut(restart_chrome_for_cfm=True),
-        CreateMeeting(),
-        RepeatTimes(5, Scenario(
-            MuteMicrophone(),
-            UnmuteMicrophone()
-        )),
-        LeaveMeeting()
-    ),
-    configuration=Configuration(
-        run_test_only = False
-    )
-)
-
-def run_test(machine):
-    job.run_test("enterprise_CFM_ConfigurableCfmTestSanity",
-                 cfm_test = cfm_test,
-                 tag = 'join_leave',
-                 host = hosts.create_host(machine))
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_ConfigurableCfmTestSanity/control.verifications b/server/site_tests/enterprise_CFM_ConfigurableCfmTestSanity/control.verifications
deleted file mode 100644
index b2c5e29..0000000
--- a/server/site_tests/enterprise_CFM_ConfigurableCfmTestSanity/control.verifications
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-from autotest_lib.server.cros.cfm.configurable_test.dsl import *
-from autotest_lib.server import utils
-
-AUTHOR = "kerl@google.com, chromeos-meetings@google.com"
-NAME = "enterprise_CFM_ConfigurableCfmTestSanity.verifications"
-PURPOSE = "Verifies the configurable CfM test infra with a simple scenario"
-CRITERIA = "No errors occur"
-ATTRIBUTES = "suite:hotrod-remora, suite:bluestreak-pre-cq"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_TYPE = "server"
-
-DOC = """
-Verifies that we can specify and run verification and assertion steps in a
-configurable CFM test.
-"""
-
-cfm_test = CfmTest(
-    scenario=Scenario(
-        AssertUsbDevices([ATRUS], lambda devices: True),
-        RetryAssertAction(
-            AssertFileDoesNotContain('/var/log/messages', ['FOOERRORBAR']),
-            5,
-            0.1),
-        Sleep(0.1),
-        AssertFileDoesNotContain('/var/log/eventlog.txt', ['FOOERRORBAR']),
-        # Create some silly scenarios to be selected at random. The purpose
-        # of this is only to test the SelectScenarioAtRandom action.
-        # Since the predicate always return true for the assert actions
-        # we do not actually verify the devices exist.
-        SelectScenarioAtRandom(scenarios=[
-                Scenario(AssertUsbDevices([ATRUS], lambda devices: True),
-                    AssertFileDoesNotContain('/var/log/messages',
-                                             ['FOOERRORBAR'])),
-                Scenario(AssertUsbDevices([HUDDLY_GO], lambda devices: True)),
-                Scenario(AssertUsbDevices([ATRUS], lambda devices: True))],
-            run_times=3),
-        AssertNoNewCrashes()
-    ),
-
-    configuration=Configuration(
-        run_test_only = True
-    )
-)
-
-def run_test(machine):
-    job.run_test("enterprise_CFM_ConfigurableCfmTestSanity",
-                 cfm_test = cfm_test,
-                 tag = 'verifications',
-                 host = hosts.create_host(machine))
-
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_ConfigurableCfmTestSanity/enterprise_CFM_ConfigurableCfmTestSanity.py b/server/site_tests/enterprise_CFM_ConfigurableCfmTestSanity/enterprise_CFM_ConfigurableCfmTestSanity.py
deleted file mode 100644
index c4ca7c9..0000000
--- a/server/site_tests/enterprise_CFM_ConfigurableCfmTestSanity/enterprise_CFM_ConfigurableCfmTestSanity.py
+++ /dev/null
@@ -1,8 +0,0 @@
-from autotest_lib.server.cros.cfm.configurable_test import configurable_cfm_test
-
-class enterprise_CFM_ConfigurableCfmTestSanity(
-        configurable_cfm_test.ConfigurableCfmTest):
-    """
-    Sanity test that verifies configurable CFM tests can run.
-    """
-    pass
diff --git a/server/site_tests/enterprise_CFM_HuddlyMonitor/control b/server/site_tests/enterprise_CFM_HuddlyMonitor/control
deleted file mode 100644
index 5b20147..0000000
--- a/server/site_tests/enterprise_CFM_HuddlyMonitor/control
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "egemih@chromium.org"
-NAME = "enterprise_CFM_HuddlyMonitor"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-# TODO(egemih): enable once crbug.com/781734 is fixed.
-# ATTRIBUTES = "suite:hotrod"
-JOB_RETRIES = 3
-
-DOC = """
-This test performs Huddly error detection and subsequent revival.
-"""
-
-args_dict = utils.args_to_dict(args)
-
-def run_test(machine):
-    host = hosts.create_host(machine)
-    job.run_test(NAME, host=host)
-
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_HuddlyMonitor/enterprise_CFM_HuddlyMonitor.py b/server/site_tests/enterprise_CFM_HuddlyMonitor/enterprise_CFM_HuddlyMonitor.py
deleted file mode 100644
index 66905be..0000000
--- a/server/site_tests/enterprise_CFM_HuddlyMonitor/enterprise_CFM_HuddlyMonitor.py
+++ /dev/null
@@ -1,115 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import dbus_send
-from autotest_lib.server import test
-from autotest_lib.server.cros.cfm import cfm_base_test
-from autotest_lib.server.cros.multimedia import remote_facade_factory
-
-LONG_TIMEOUT = 20
-SHORT_TIMEOUT = 1
-OBJ_INTERFACE = "org.chromium.huddlymonitor"
-
-class enterprise_CFM_HuddlyMonitor(cfm_base_test.CfmBaseTest):
-    """ Autotests for huddly-monitor, within cfm-device-monitor.
-
-    All autotests involve being in a cfm meeting, without loss of generality.
-
-    All test scenarios are in a function of their own, with the explanation
-    as docstring.
-    """
-    version = 1
-
-    def is_monitor_alive(self):
-        """Check if huddly-monitor is alive and registered on Dbus."""
-        result = dbus_send.dbus_send("org.freedesktop.DBus",
-                  "org.freedesktop.DBus",
-                  "/org/freedesktop/DBus",
-                  "ListNames",
-                  None,
-                  self._host,
-                  2,
-                  False,
-                  "cfm-monitor")
-        return OBJ_INTERFACE in result.response
-
-    def fake_error_should_remediate(self):
-        """ Enter an error message in kernel log buffer. Wait to see if
-        monitor detects it and remediates the camera accordingly """
-        err_msg = "sudo echo \"<3>uvcvideo: Failed AUTOTEST\" >> /dev/kmsg"
-
-        self._host.run(err_msg)
-
-        # Wait till camera reboots
-        time.sleep(LONG_TIMEOUT)
-        # Make sure camera is turned on
-        self.cfm_facade.unmute_camera()
-
-        # Check if camera operational
-        if(self.cfm_facade.is_camera_muted()):
-            raise error.TestFail("Camera still not functional.")
-
-    def fake_error_monitor_sleeping_no_action(self):
-        """Enter an error message in kernel log buffer when monitor is
-        sleeping. Make sure it does not detect it."""
-        err_msg = "sudo echo \"<3>uvcvideo: Failed AUTOTEST\" >> /dev/kmsg"
-
-        # Force-sleep monitor
-        self._host.run("/usr/bin/huddlymonitor_update false")
-
-        # Fake error
-        self._host.run(err_msg)
-
-        # Check camera is not hotplugged, since monitor asleep
-        if(self.cfm_facade.is_camera_muted()):
-            raise error.TestFail("Should not have hotplug.")
-
-
-    def monitor_woke_detect_earlier_error(self):
-        """Wake up monitor. Check to see if it detects an error message
-        that was entered earlier. This test assumes there already was an
-        error message in the kernel log buffer. Typically used after
-        fake_error_monitor_sleeping_no_action."""
-        # Wake up monitor
-        self._host.run("/usr/bin/huddlymonitor_update true")
-
-        # Wait till camera reboots, takes some time for monitor to wake up.
-        time.sleep(LONG_TIMEOUT)
-
-        self.cfm_facade.unmute_camera()
-
-        # Check if camera operational
-        if(self.cfm_facade.is_camera_muted()):
-            raise error.TestFail("Camera still not functional.")
-
-    def monitor_skip_unrelated_error(self):
-        """Enter a bogus kernel message. Check to see if the monitor detects
-        it. """
-        # Send error message not intended for monitor
-        self._host.run("sudo echo \"<4>uvcvideo: Failed \" >> /dev/kmsg")
-
-        # Make sure no action was taken
-        if(self.cfm_facade.is_camera_muted()):
-            raise error.Testfail("Should not have hotplug")
-
-    def run_once(self):
-        """Run the autotest suite once."""
-
-        self.cfm_facade.wait_for_meetings_telemetry_commands()
-        self.cfm_facade.start_meeting_session()
-        # Quit early if monitor is not alive.
-        result = self.is_monitor_alive()
-        if(not result):
-            raise error.TestFail("Monitor not alive")
-
-        # Enough time to enter meeting
-        time.sleep(SHORT_TIMEOUT)
-
-        self.fake_error_should_remediate()
-        self.fake_error_monitor_sleeping_no_action()
-        self.monitor_woke_detect_earlier_error()
-        self.monitor_skip_unrelated_error()
diff --git a/server/site_tests/enterprise_CFM_HuddlyUpdater/control b/server/site_tests/enterprise_CFM_HuddlyUpdater/control
deleted file mode 100644
index ba1c17e..0000000
--- a/server/site_tests/enterprise_CFM_HuddlyUpdater/control
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "porce@chromium.org"
-NAME = "enterprise_CFM_HuddlyUpdater"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-# TODO(crbug/763474) Disabled since it is currently failing constantly.
-# ATTRIBUTES = "suite:hotrod"
-JOB_RETRIES = 3
-DEPENDENCIES = "huddly"
-
-DOC = """
-This test performs the firmware update of HuddlyGo camera.
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = None
-
-def run_test(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test(NAME, host=host)
-
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_HuddlyUpdater/enterprise_CFM_HuddlyUpdater.py b/server/site_tests/enterprise_CFM_HuddlyUpdater/enterprise_CFM_HuddlyUpdater.py
deleted file mode 100644
index bbbadf1..0000000
--- a/server/site_tests/enterprise_CFM_HuddlyUpdater/enterprise_CFM_HuddlyUpdater.py
+++ /dev/null
@@ -1,250 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import re
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import power_cycle_usb_util
-from autotest_lib.server import test
-import parse
-
-
-class enterprise_CFM_HuddlyUpdater(test.test):
-    """Tests the firmware updatability of HuddlyGo camera.
-
-    An event to trigger the firmware update is to power recycle of a USB port
-    which the HuddlyGo camera is attached to. The power recycle emulates
-    the power recycle of the ChromeBox or a reconnection of the peripheral
-    to the ChromeBox.
-
-    The test scenario involves the power recycling of a specific USB port
-    of the Guado ChromeBox: Front-left one. This imposes a restriction in the
-    testbed setup. This limitation is to be alleviated after the development
-    of full-fledged usb power recycle code. TODO(frankhu).
-    """
-
-    version = 1
-    _failed_test_list = []
-
-    UPDATER_WAIT_TIME = 60  # sec
-
-    FIRMWARE_PKG_ORG = 'huddly'
-    FIRMWARE_PKG_TO_TEST = 'huddly052'
-    FIRMWARE_PKG_BACKUP = 'huddly.backup'
-
-    DUT_FIRMWARE_BASE = '/lib/firmware/'
-    DUT_FIRMWARE_SRC = os.path.join(DUT_FIRMWARE_BASE, FIRMWARE_PKG_ORG)
-    DUT_FIRMWARE_SRC_BACKUP = os.path.join(DUT_FIRMWARE_BASE,
-                                           FIRMWARE_PKG_BACKUP)
-    DUT_FIRMWARE_SRC_TEST = os.path.join(DUT_FIRMWARE_BASE,
-                                         FIRMWARE_PKG_TO_TEST)
-
-    def initialize(self):
-        """initialize is a stub function."""
-        # Placeholder.
-        pass
-
-    def ls(self):
-        """ls tracks the directories of interest."""
-        cmd = 'ls -l /lib/firmware/ | grep huddly'
-        result = self._shcmd(cmd)
-
-    def cleanup(self):
-        """Bring the originally bundled firmware package back."""
-        cmd = '[ -f {} ] && rm -rf {}'.format(self.DUT_FIRMWARE_SRC,
-                                              self.DUT_FIRMWARE_SRC)
-        self._shcmd(cmd)
-
-        cmd = 'mv {} {} && rm -rf {}'.format(self.DUT_FIRMWARE_SRC_BACKUP,
-                                             self.DUT_FIRMWARE_SRC,
-                                             self.DUT_FIRMWARE_SRC_TEST)
-        self._shcmd(cmd)
-
-    def _shcmd(self, cmd):
-        """A simple wrapper for remote shell command execution."""
-        logging.info('CMD: [%s]', cmd)
-        # result is an object with following attributes:
-        # ['__class__', '__delattr__', '__dict__', '__doc__', '__eq__',
-        # '__format__', '__getattribute__', '__hash__', '__init__',
-        # '__module__', '__new__', '__reduce__', '__reduce_ex__',
-        # '__repr__', '__setattr__', '__sizeof__', '__str__',
-        # '__subclasshook__', '__weakref__', 'command', 'duration',
-        # 'exit_status', 'stderr', 'stdout']
-        try:
-            result = self._client.run(cmd)
-            if result.stderr:
-                logging.info('CMD ERR:\n' + result.stderr)
-            logging.info('CMD OUT:\n' + result.stdout)
-            return result
-        except:
-            pass
-
-    def copy_firmware(self):
-        """Copy test firmware package from server to the DUT."""
-        current_dir = os.path.dirname(os.path.realpath(__file__))
-        src_firmware_path = os.path.join(current_dir, self.FIRMWARE_PKG_TO_TEST)
-        dst_firmware_path = self.DUT_FIRMWARE_BASE
-
-        msg = 'copy firmware from {} to {}'.format(src_firmware_path,
-                                                   dst_firmware_path)
-        logging.info(msg)
-        self._client.send_file(
-            src_firmware_path, dst_firmware_path, delete_dest=True)
-
-    def update_firmware(self, firmware_pkg):
-        """Update the peripheral's firmware with the specified package.
-
-        @param firmware_pkg: A string of package name specified by the leaf
-                directory name in /lib/firmware/. See class constants
-                DUT_FIRMWARE_SRC*.
-        """
-        # Set up the firmware package to test with
-        firmware_path = os.path.join(self.DUT_FIRMWARE_BASE, firmware_pkg)
-        cmd = 'ln -sfn {} {}'.format(firmware_path, self.DUT_FIRMWARE_SRC)
-        self._shcmd(cmd)
-
-        ver_dic = self.get_fw_vers()
-        had = ver_dic.get('peripheral', {}).get('app', '')
-        want = ver_dic.get('package', {}).get('app', '')
-
-        msg = 'Update plan: from {} to {} with package: {}'.format(
-            had, want, firmware_pkg)
-        logging.info(msg)
-
-        logging.info('Recycle the power to the USB port '
-                     'to which HuddlyGo is attached.')
-        self.usb_power_recycle()
-        time.sleep(self.UPDATER_WAIT_TIME)
-
-        got = self.get_fw_vers().get('peripheral', {}).get('app', '')
-
-        msg = 'Update result: had {} want {} got {}'.format(
-            had, want, got)
-        logging.info(msg)
-
-        if want != got:
-            self._failed_test_list.append(
-                'update_firmware({})'.format(firmware_pkg))
-
-    def run_once(self, host=None):
-        """Update two times. First with test package, second with the original.
-
-        Test scenario:
-          1. Copy test firmware from the server to the DUT.
-          2. Update with the test package. Wait about 50 sec till completion.
-             Confirm if the peripheral is updated with the test version.
-          3. Update with the original package. Wait about 50 sec.
-             Confirm if the peripheral is updated with the original version.
-        """
-        self._client = host
-
-        if not self.is_filesystem_readwrite():
-            # Make the file system read-writable, reboot, and continue the test
-            logging.info('DUT root file system is not read-writable. '
-                         'Converting it read-wriable...')
-            self.convert_rootfs_writable()
-        else:
-            logging.info('DUT is read-writable')
-
-
-        try:
-            self.ls()
-            cmd = 'mv {} {}'.format(self.DUT_FIRMWARE_SRC,
-                                    self.DUT_FIRMWARE_SRC_BACKUP)
-            self._shcmd(cmd)
-
-            self.ls()
-            self.copy_firmware()
-            self.ls()
-            self.update_firmware(self.FIRMWARE_PKG_TO_TEST)
-            self.ls()
-            self.update_firmware(self.FIRMWARE_PKG_BACKUP)
-
-            if self._failed_test_list:
-              msg = 'Test failed in {}'.format(
-                  ', '.join(map(str, self._failed_test_list)))
-              raise error.TestFail(msg)
-        except:
-            pass
-        finally:
-            self.cleanup()
-
-    def convert_rootfs_writable(self):
-        """Remove rootfs verification on DUT, reboot,
-        and remount the filesystem read-writable"""
-
-        logging.info('Disabling rootfs verification...')
-        self.remove_rootfs_verification()
-
-        logging.info('Rebooting...')
-        self.reboot()
-
-        logging.info('Remounting..')
-        cmd = 'mount -o remount,rw /'
-        self._shcmd(cmd)
-
-    def remove_rootfs_verification(self):
-        """Remove rootfs verification."""
-        # 2 & 4 are default partitions, and the system boots from one of them.
-        # Code from chromite/scripts/deploy_chrome.py
-        KERNEL_A_PARTITION = 2
-        KERNEL_B_PARTITION = 4
-
-        cmd_template = ('/usr/share/vboot/bin/make_dev_ssd.sh --partitions %d '
-               '--remove_rootfs_verification --force')
-        for partition in (KERNEL_A_PARTITION, KERNEL_B_PARTITION):
-            cmd = cmd_template % partition
-            self._client.run(cmd)
-
-    def reboot(self):
-        """Reboots the DUT."""
-        self._client.reboot()
-
-    def get_fw_vers(self):
-        """Queries the firmware versions.
-
-        Utilizes the output of the command 'huddly-updater --info'.
-        It queries and parses the firmware versions of app and bootloader of
-        firmware package and the peripheral's running firmwares, respectively.
-
-        @returns a dictionary hierachically storing the firmware versions.
-        """
-
-        # TODO(porce): The updater's output is to stdout, but Auto test
-        # command output comes to stderr. Investigate.
-        cmd = 'huddly-updater --info --log_to=stdout'
-        result = self._shcmd(cmd).stderr
-        ver_dic = parse.parse_fw_vers(result)
-        return ver_dic
-
-    def usb_power_recycle(self):
-        """Recycle the power to a USB port.
-
-        # Use Power cycle usb util to recycle power.
-        """
-
-        try:
-            power_cycle_usb_util.power_cycle_usb_vidpid(self.host,
-                                    self.board, self.vid, self.pid)
-        except KeyError:
-            raise error.TestFail('Couldn\'t find target device: '
-                                 'vid:pid {}:{}'.format(self.vid, self.pid))
-
-
-    def is_filesystem_readwrite(self):
-        """Check if the root file system is read-writable.
-
-        Query the DUT's filesystem /dev/root, often manifested as /dev/dm-0
-        or  is mounted as read-only or not.
-
-        @returns True if the /dev/root is read-writable. False otherwise.
-        """
-
-        cmd = 'cat /proc/mounts | grep "/dev/root"'
-        result = self._shcmd(cmd).stdout
-        fields = re.split(' |,', result)
-        return True if fields.__len__() >= 4 and fields[3] == 'rw' else False
diff --git a/server/site_tests/enterprise_CFM_HuddlyUpdater/huddly052/bin/huddly.bin b/server/site_tests/enterprise_CFM_HuddlyUpdater/huddly052/bin/huddly.bin
deleted file mode 100644
index 13250aa..0000000
--- a/server/site_tests/enterprise_CFM_HuddlyUpdater/huddly052/bin/huddly.bin
+++ /dev/null
Binary files differ
diff --git a/server/site_tests/enterprise_CFM_HuddlyUpdater/huddly052/bin/huddly_boot.bin b/server/site_tests/enterprise_CFM_HuddlyUpdater/huddly052/bin/huddly_boot.bin
deleted file mode 100644
index 2681f29..0000000
--- a/server/site_tests/enterprise_CFM_HuddlyUpdater/huddly052/bin/huddly_boot.bin
+++ /dev/null
Binary files differ
diff --git a/server/site_tests/enterprise_CFM_HuddlyUpdater/huddly052/manifest.json b/server/site_tests/enterprise_CFM_HuddlyUpdater/huddly052/manifest.json
deleted file mode 100644
index dd8ecae..0000000
--- a/server/site_tests/enterprise_CFM_HuddlyUpdater/huddly052/manifest.json
+++ /dev/null
@@ -1,43 +0,0 @@
-{
-    "manifest_version": 2,
-    "compatible_hw": [
-        {
-            "pids": [
-                16,
-                17
-            ],
-            "vid": 11225,
-            "hwrevs": [
-                6
-            ]
-        }
-    ],
-    "files": [
-        {
-            "type": "mv2_app",
-            "version": {
-                "numerical": [
-                    0,
-                    5,
-                    2
-                ],
-                "git-descr": "huddly-0.5.2-release"
-            },
-            "name": "bin/huddly.bin",
-            "sha256": "7ab084861bee29b713a76290a897255463fdfd8673b7fddf0f26a9d5e9614ba3"
-        },
-        {
-            "type": "mv2_boot",
-            "version": {
-                "numerical": [
-                    0,
-                    2,
-                    2
-                ],
-                "git-descr": "huddly-bootloader-0.2.2-release"
-            },
-            "name": "bin/huddly_boot.bin",
-            "sha256": "d1163c7505e5a222c648b40a1eb4b5549dd352de96fc64ffcbee48fc1e206555"
-        }
-    ]
-}
\ No newline at end of file
diff --git a/server/site_tests/enterprise_CFM_HuddlyUpdater/huddly052/manifest.txt b/server/site_tests/enterprise_CFM_HuddlyUpdater/huddly052/manifest.txt
deleted file mode 100644
index 394b0e4..0000000
--- a/server/site_tests/enterprise_CFM_HuddlyUpdater/huddly052/manifest.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-mv2_app.version:0.5.2
-hw_rev:6
-mv2_boot.version:0.2.2
diff --git a/server/site_tests/enterprise_CFM_HuddlyUpdater/parse.py b/server/site_tests/enterprise_CFM_HuddlyUpdater/parse.py
deleted file mode 100755
index d2dcd6e..0000000
--- a/server/site_tests/enterprise_CFM_HuddlyUpdater/parse.py
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/usr/bin/env python2
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Parse the output of 'huddly-updater --info --log_to=stdout'.
-"""
-
-from __future__ import print_function
-
-TOKEN_FW_CHUNK_HEADER = 'Firmware package:'
-TOKEN_PERIPHERAL_CHUNK_HEADER = 'Camera Peripheral:'
-TOKEN_BOOT = 'bootloader:'
-TOKEN_APP = 'app:'
-TOKEN_REV = 'hw_rev:'
-
-
-def parse_fw_vers(chunk):
-    """Parse huddly-updater command output.
-
-    The parser logic heavily depends on the output format.
-
-    @param chunk: The huddly-updater output. See CHUNK_FILENAME for example.
-
-    @returns a dictionary containing the version strings
-            for the firmware package and for the peripheral.
-    """
-    dic = {}
-    target = ''
-    for line in chunk.split('\n'):
-        if TOKEN_FW_CHUNK_HEADER in line:
-            target = 'package'
-            dic[target] = {}
-            continue
-        elif TOKEN_PERIPHERAL_CHUNK_HEADER in line:
-            target = 'peripheral'
-            dic[target] = {}
-            continue
-
-        if not target:
-            continue
-
-        fields = line.split(':')
-        if fields.__len__() < 2:
-            continue
-
-        val = fields[1].strip()
-
-        if TOKEN_BOOT in line:
-            dic[target]['boot'] = val
-        elif TOKEN_APP in line:
-            dic[target]['app'] = val
-        elif TOKEN_REV in line:
-            dic[target]['hw_rev'] = val
-        else:
-            continue
-
-    return dic
diff --git a/server/site_tests/enterprise_CFM_HuddlyUpdater/parse_unittest.py b/server/site_tests/enterprise_CFM_HuddlyUpdater/parse_unittest.py
deleted file mode 100644
index 64ba0c7..0000000
--- a/server/site_tests/enterprise_CFM_HuddlyUpdater/parse_unittest.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import unittest
-
-import parse
-
-
-class ParseHuddlyInfoTest(unittest.TestCase):
-    """Tests the output of huddly-updater --info."""
-
-    CHUNK_FILENAME = './samples/huddly-updater-info.log'
-
-    def test_parser(self):
-        want = {
-            'package': {
-                'app': '0.5.1',
-                'boot': '0.2.1',
-                'hw_rev': '6'
-            },
-            'peripheral': {
-                'app': '0.5.1',
-                'boot': '0.2.1',
-                'hw_rev': '6'
-            }
-        }
-
-        with open(filename, 'r') as fhandle:
-            chunk = fhandle.read()
-
-        got = parse.parse_fw_vers(chunk)
-        self.assertDictEqual(want, got)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/server/site_tests/enterprise_CFM_HuddlyUpdater/samples/huddly-updater-info.log b/server/site_tests/enterprise_CFM_HuddlyUpdater/samples/huddly-updater-info.log
deleted file mode 100644
index 1c79524..0000000
--- a/server/site_tests/enterprise_CFM_HuddlyUpdater/samples/huddly-updater-info.log
+++ /dev/null
@@ -1,13 +0,0 @@
-[0530/130919:INFO:main.cc(78)] Starting Huddly Package Updater ..
-[0530/130919:INFO:main.cc(81)] Show info..
-[0530/130919:INFO:firmware.cc(171)]
-Firmware package:
-  dir:           /lib/firmware/huddly/
-  bootloader:    0.2.1
-  app:           0.5.1
-  hw_rev:        6
-[0530/130920:INFO:minicam_device.cc(142)]
-Camera Peripheral:
-  bootloader:  0.2.1
-  app:         0.5.1
-  hw_rev:      6
diff --git a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/control b/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/control
deleted file mode 100644
index 7637b7c..0000000
--- a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/control
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "shijinabraham@chromium.org"
-NAME = "enterprise_CFM_LogitechMeetupUpdater"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-ATTRIBUTES = "suite:hotrod"
-TEST_TYPE = "server"
-JOB_RETRIES = 0
-DEPENDENCIES = "meetup"
-
-
-DOC = """
-This test verifies that the Logitech Meetup firmware updater is working
-as intended. This test performs the following
-- Make the rootfs writable.
-- Backup the original firmware.
-- Copy older firmware bundled with test.
-- Force upgrade the Meetup device to older firmware.
-- Powercycle the usb port to trigger the firmware updater.
-- Confirm firmware has been updated.
-- Cleanup
-This test will work on guado and fizz Chromeboxes with Logitech Meetup
-device connected
-"""
-
-args_dict = utils.args_to_dict(args)
-
-def run_test(machine):
-    host = hosts.create_host(machine, servo_args=None)
-    job.run_test(NAME, host=host)
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/enterprise_CFM_LogitechMeetupUpdater.py b/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/enterprise_CFM_LogitechMeetupUpdater.py
deleted file mode 100644
index c7e17f6..0000000
--- a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/enterprise_CFM_LogitechMeetupUpdater.py
+++ /dev/null
@@ -1,472 +0,0 @@
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Autotest for Logitech Meetup firmware updater."""
-
-from __future__ import print_function
-
-import logging
-import os
-import re
-import time
-
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import power_cycle_usb_util
-from autotest_lib.client.common_lib.cros.cfm.usb import cfm_usb_devices
-from autotest_lib.server import test
-
-
-POWER_CYCLE_WAIT_TIME_SEC = 20
-
-
-class enterprise_CFM_LogitechMeetupUpdater(test.test):
-    """
-    Logitech Meetup firmware test on Chrome For Meeting devices
-    The test follows the following steps
-        1) Check if the filesystem is writable
-           If not make the filesystem writable and reboot
-        2) Backup the existing firmware file on DUT
-        3) Copy the older firmware files to DUT
-        4) Force update older firmware on Meetup Camera
-        5) Restore the original firmware files on DUT
-        4) Power cycle usb port to simulate unplug/replug of device which
-           should initiate a firmware update
-        5) Wait for firmware update to finish and check firmware version
-        6) Cleanup
-
-    """
-
-    version = 1
-
-    def initialize(self, host):
-        """
-        Initializes the class.
-
-        Stores the firmware file path.
-        Gets the board type.
-        Reads the current firmware versions.
-        """
-
-        self.host = host
-        self.log_file = '/tmp/logitech-updater.log'
-        self.fw_path_base = '/lib/firmware/logitech'
-        self.fw_pkg_origin = 'meetup'
-        self.fw_pkg_backup = 'meetup_backup'
-        self.fw_pkg_test = 'meetup_184'
-        self.fw_pkg_files = ['meetup_audio.bin',
-                             'meetup_audio_logicool.bin',
-                             'meetup_ble.bin',
-                             'meetup_codec.bin',
-                             'meetup_eeprom_logicool.s19',
-                             'meetup_eeprom.s19',
-                             'meetup_video.bin',
-                             'meetup_audio.bin.sig',
-                             'meetup_audio_logicool.bin.sig',
-                             'meetup_ble.bin.sig',
-                             'meetup_codec.bin.sig',
-                             'meetup_eeprom_logicool.s19.sig',
-                             'meetup_eeprom.s19.sig',
-                             'meetup_video.bin.sig']
-        self.fw_path_test = os.path.join(self.fw_path_base,
-                                         self.fw_pkg_test)
-        self.fw_path_origin = os.path.join(self.fw_path_base,
-                                           self.fw_pkg_origin)
-        self.fw_path_backup = os.path.join(self.fw_path_base,
-                                           self.fw_pkg_backup)
-        self.board = self.host.get_board().split(':')[1]
-        self.vid = cfm_usb_devices.LOGITECH_MEETUP.vendor_id
-        self.pid = cfm_usb_devices.LOGITECH_MEETUP.product_id
-        self.org_fw_ver = self.get_image_fw_ver()
-
-    def cleanup(self):
-        """
-        Cleanups after tests.
-
-        Removes the test firmware.
-        Restores the original firmware files.
-        Flashes the camera to original firmware if needed.
-        """
-
-        # Delete test firmware package.
-        cmd = 'rm -rf {}'.format(self.fw_path_test)
-        self.host.run(cmd)
-
-        # Delete the symlink created.
-        cmd = 'rm {}'.format(self.fw_path_origin)
-        self.host.run(cmd)
-
-        # Move the backup package back.
-        cmd = 'mv {} {}'.format(self.fw_path_backup, self.fw_path_origin)
-        self.host.run(cmd)
-
-        # Do not leave the camera with test (older) firmware.
-        if not self.is_device_firmware_equal_to(self.org_fw_ver):
-            logging.debug('Meetup device has old firmware after test'
-                          'Flashing new firmware')
-            self.flash_fw()
-
-        super(enterprise_CFM_LogitechMeetupUpdater, self).cleanup()
-
-    def _run_cmd(self, command, ignore_status=True):
-        """
-        Runs command line on DUT, wait for completion and return the output.
-
-        @param command: command line to run in dut.
-        @param ignore_status: if true ignore the status return by command
-
-        @returns the command output
-
-        """
-
-        logging.debug('Execute: %s', command)
-
-        result = self.host.run(command, ignore_status=ignore_status)
-        if result.stderr:
-            output = result.stderr
-        else:
-            output = result.stdout
-        logging.debug('Output: %s', output)
-        return output
-
-    def make_rootfs_writable(self):
-        """Checks and makes root filesystem writable."""
-
-        if not self.is_filesystem_readwrite():
-            logging.info('DUT root file system is not writable. '
-                         'Converting it writable...')
-            self.convert_rootfs_writable()
-        else:
-            logging.info('DUT root file system is writable.')
-
-    def convert_rootfs_writable(self):
-        """Makes DUT rootfs writable."""
-
-        logging.info('Disabling rootfs verification...')
-        self.remove_rootfs_verification()
-
-        logging.info('Rebooting...')
-        self.host.reboot()
-
-        logging.info('Remounting..')
-        cmd = 'mount -o remount,rw /'
-        self.host.run(cmd)
-
-    def remove_rootfs_verification(self):
-        """Removes rootfs verification."""
-
-        # 2 & 4 are default partitions, and the system boots from one of them.
-        # Code from chromite/scripts/deploy_chrome.py
-        KERNEL_A_PARTITION = 2
-        KERNEL_B_PARTITION = 4
-
-        cmd_template = ('/usr/share/vboot/bin/make_dev_ssd.sh'
-                        ' --partitions "%d %d"'
-                        ' --remove_rootfs_verification --force')
-        cmd = cmd_template % (KERNEL_A_PARTITION, KERNEL_B_PARTITION)
-        self.host.run(cmd)
-
-    def is_filesystem_readwrite(self):
-        """Checks if the root file system is writable."""
-
-        # Query the DUT's filesystem /dev/root and check whether it is rw
-
-        cmd = 'cat /proc/mounts | grep "/dev/root"'
-        result = self._run_cmd(cmd)
-        fields = re.split(' |,', result)
-
-        # Result of grep will be of the following format
-        # /dev/root / ext2 ro,seclabel <....truncated...> => readonly
-        # /dev/root / ext2 rw,seclabel <....truncated...> => readwrite
-        is_writable = fields.__len__() >= 4 and fields[3] == 'rw'
-        return is_writable
-
-    def fw_ver_from_output_str(self, cmd_output):
-        """
-        Parse firmware version of logitech-updater output.
-
-        logitech-updater output differs for image_version and device_version
-        This function finds the line which contains string "Meetup" and parses
-        succeding lines. Each line is split on spaces (after collapsing spaces)
-        and index 1 gives component name (ex. Eeprom) and index 3 gives the
-        firmware version (ex. 1.14)
-        The actual output is given below.
-
-        logitech-updater --image_version
-
-        [INFO:main.cc(105)] PTZ Pro 2 Versions:
-        [INFO:main.cc(59)] Video version:  2.0.175
-        [INFO:main.cc(61)] Eeprom version: 1.6
-        [INFO:main.cc(63)] Mcu2 version:   3.9
-
-        [INFO:main.cc(105)] MeetUp Versions:
-        [INFO:main.cc(59)] Video version:  1.0.197
-        [INFO:main.cc(61)] Eeprom version: 1.14
-        [INFO:main.cc(65)] Audio version:  1.0.239
-        [INFO:main.cc(67)] Codec version:  8.0.216
-        [INFO:main.cc(69)] BLE version:    1.0.121
-
-        logitech-updater  --device_version
-
-        [INFO:main.cc(88)] Device name:    Logitech MeetUp
-        [INFO:main.cc(59)] Video version:  1.0.197
-        [INFO:main.cc(61)] Eeprom version: 1.14
-        [INFO:main.cc(65)] Audio version:  1.0.239
-        [INFO:main.cc(67)] Codec version:  8.0.216
-        [INFO:main.cc(69)] BLE version:    1.0.121
-
-
-        """
-
-        logging.debug('Parsing output from updater %s', cmd_output)
-        if 'MeetUp image not found' in cmd_output or 'MeetUp' not in cmd_output:
-            raise error.TestFail('MeetUp image not found on DUT')
-        try:
-            version = {}
-            output = cmd_output.split('\n')
-            start_line = -1
-
-            # Find the line of the output with string "Meetup
-            for i, l in enumerate(output):
-                if 'MeetUp' in l:
-                    start_line = i
-                    break
-
-            if start_line == -1:
-                raise error.TestFail('Meetup version not found'
-                                     ' in updater output')
-
-            output = output[start_line+1:start_line+6]
-            logging.debug('Parsing Meetup firmware info %s', str(output))
-            for l in output:
-
-                # Output lines are of the format
-                # [INFO:main.cc(59)] Video version:  1.0.197
-                l = ' '.join(l.split())  # Collapse multiple spaces to one space
-                parts = l.split(' ')  # parts[1] is "Video" parts[3] is 1.0.197
-                version[parts[1]] = parts[3]
-            logging.debug('Version is %s', str(version))
-            return version
-        except:
-            logging.error('Error while parsing logitech-updater output')
-            raise
-
-    def get_updater_output(self, cmd):
-        """Get updater output while avoiding transient failures."""
-
-        NUM_RETRIES = 3
-        WAIT_TIME = 5
-        for _ in range(NUM_RETRIES):
-            output = self._run_cmd(cmd)
-            if 'Failed to read' in output:
-                time.sleep(WAIT_TIME)
-                continue
-            return output
-
-    def get_image_fw_ver(self):
-        """Get the version of firmware on DUT."""
-
-        output = self.get_updater_output('logitech-updater --image_version'
-                                         ' --log_to=stdout')
-        return self.fw_ver_from_output_str(output)
-
-    def get_device_fw_ver(self):
-        """Get the version of firmware on Meetup device."""
-
-        output = self.get_updater_output('logitech-updater --device_version'
-                                         ' --log_to=stdout')
-        return self.fw_ver_from_output_str(output)
-
-    def copy_test_firmware(self):
-        """Copy test firmware from server to DUT."""
-
-        current_dir = os.path.dirname(os.path.realpath(__file__))
-        src_firmware_path = os.path.join(current_dir, self.fw_pkg_test)
-        dst_firmware_path = self.fw_path_base
-        logging.info('Copy firmware from (%s) to (%s).', src_firmware_path,
-                     dst_firmware_path)
-        self.host.send_file(src_firmware_path, dst_firmware_path,
-                            delete_dest=True)
-
-    def trigger_updater(self):
-        """Trigger udev rule to run fw updater by power cycling the usb."""
-
-        try:
-            power_cycle_usb_util.power_cycle_usb_vidpid(self.host, self.board,
-                                                        self.vid, self.pid)
-        except KeyError:
-            raise error.TestFail('Counld\'t find target device: '
-                                 'vid:pid {}:{}'.format(self.vid, self.pid))
-
-    def wait_for_meetup_device(self):
-        """
-        Wait for Meetup device device to be enumerated.
-
-        Check if a device with given (vid,pid) is present.
-        Timeout after wait_time seconds. Default 30 seconds
-        """
-
-        TIME_SLEEP = 10
-        NUM_ITERATIONS = 3
-        WAIT_TIME = TIME_SLEEP * NUM_ITERATIONS
-
-        logging.debug('Waiting for Meetup device')
-        for _ in range(NUM_ITERATIONS):
-            res = power_cycle_usb_util.get_port_number_from_vidpid(
-                self.host, self.vid, self.pid)
-            (bus_num, port_num) = res
-            if bus_num is not None and port_num is not None:
-                logging.debug('Meetup device detected')
-                return
-            else:
-                logging.debug('Meetup device not detected.'
-                              'Waiting for (%s) seconds', TIME_SLEEP)
-                time.sleep(TIME_SLEEP)
-
-        logging.error('Unable to detect the device after (%s) seconds.'
-                      'Timing out...', WAIT_TIME)
-        raise error.TestFail('Target device not detected.')
-
-    def setup_fw(self, firmware_package):
-        """Setup firmware package that is going to be used for updating."""
-
-        firmware_path = os.path.join(self.fw_path_base, firmware_package)
-        cmd = 'ln -sfn {} {}'.format(firmware_path, self.fw_path_origin)
-        self.host.run(cmd)
-
-    def flash_fw(self, force=False):
-        """Flash certain firmware to device.
-
-        Run logitech firmware updater on DUT to flash the firmware setuped
-        to target device (PTZ Pro 2).
-
-        @param force: run with force update, will bypass fw version check.
-
-        """
-
-        cmd = ('/usr/sbin/logitech-updater --log_to=stdout --update_components'
-               ' --lock')
-        if force:
-            cmd += ' --force'
-        output = self._run_cmd(cmd)
-        return output
-
-    def print_fw_version(self, version, info_str=''):
-        """Pretty print Meetup firmware version."""
-
-        if info_str:
-            print(info_str)
-        print('Video version: ', version['Video'])
-        print('Eeprom version: ', version['Eeprom'])
-        print('Audio version: ', version['Audio'])
-        print('Codec version: ', version['Codec'])
-        print('BLE version: ', version['BLE'])
-
-    def is_device_firmware_equal_to(self, expected_ver):
-        """Check that the device fw version is equal to given version."""
-
-        device_fw_version = self.get_device_fw_ver()
-        if  device_fw_version != expected_ver:
-            logging.error('Device firmware version is not the expected version')
-            self.print_fw_version(device_fw_version, 'Device firmware version')
-            self.print_fw_version(expected_ver, 'Expected firmware version')
-            return False
-        else:
-            return True
-
-    def flash_old_firmware(self):
-        """Flash old (test) version of firmware on the device."""
-
-        # Flash old FW to device.
-        self.setup_fw(self.fw_pkg_test)
-        test_fw_ver = self.get_image_fw_ver()
-        self.print_fw_version(test_fw_ver, 'Test firmware version')
-        output = self.flash_fw(force=True)
-        time.sleep(POWER_CYCLE_WAIT_TIME_SEC)
-        with open(self.log_file, 'w') as f:
-            delim = '-' * 8
-            f.write('{}Log info for writing old firmware{}'
-                    '\n'.format(delim, delim))
-            f.write(output)
-        if not self.is_device_firmware_equal_to(test_fw_ver):
-            raise error.TestFail('Flashing old firmware failed')
-        logging.info('Device flashed with test firmware')
-
-    def backup_original_firmware(self):
-        """Backup existing firmware on DUT."""
-        # Copy old FW to device.
-        cmd = 'mv {} {}'.format(self.fw_path_origin, self.fw_path_backup)
-        self.host.run(cmd)
-
-    def is_updater_running(self):
-        """Checks if the logitech-updater is running."""
-
-        cmd = 'logitech-updater --lock --device_version --log_to=stdout'
-        output = self._run_cmd(cmd)
-        return 'There is another logitech-updater running' in output
-
-    def wait_for_updater(self):
-        """Wait logitech-updater to stop or timeout after 6 minutes."""
-
-        NUM_ITERATION = 12
-        WAIT_TIME = 30  # seconds
-        logging.debug('Wait for any currently running updater to finish')
-        for _ in range(NUM_ITERATION):
-            if self.is_updater_running():
-                logging.debug('logitech-updater is running.'
-                              'Waiting for 30 seconds')
-                time.sleep(WAIT_TIME)
-            else:
-                logging.debug('logitech-updater not running')
-                return
-        logging.error('logitech-updater is still running after 6 minutes')
-
-    def test_firmware_update(self):
-        """Trigger firmware updater and check device firmware version."""
-
-        # Simulate hotplug to run FW updater.
-        logging.info('Setup original firmware')
-        self.setup_fw(self.fw_pkg_backup)
-        logging.info('Simulate hot plugging the device')
-        self.trigger_updater()
-        self.wait_for_meetup_device()
-
-        # The firmware check will fail if the check runs in a short window
-        # between the device being detected and the firmware updater starting.
-        # Adding a delay to reduce the chance of that scenerio.
-        time.sleep(POWER_CYCLE_WAIT_TIME_SEC)
-
-        self.wait_for_updater()
-
-        if not self.is_device_firmware_equal_to(self.org_fw_ver):
-            raise error.TestFail('Camera not updated to new firmware')
-        logging.info('Firmware update was completed successfully')
-
-    def run_once(self):
-        """
-        Entry point for test.
-
-        The following actions are performed in this test.
-        - Device is flashed with older firmware.
-        - Powercycle usb port to simulate hotplug inorder to start the updater.
-        - Check that the device is updated with newer firmware.
-        """
-
-        # Check if updater is already running
-        self.wait_for_updater()
-
-        self.print_fw_version(self.org_fw_ver,
-                              'Original firmware version on DUT')
-        self.print_fw_version(self.get_device_fw_ver(),
-                              'Firmware version on Meetup device')
-
-        self.make_rootfs_writable()
-        self.backup_original_firmware()
-
-        # Flash test firmware version
-        self.copy_test_firmware()
-        self.flash_old_firmware()
-
-        # Test firmware update
-        self.test_firmware_update()
-        logging.info('Logitech Meetup firmware updater test was successful')
diff --git a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_audio.bin b/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_audio.bin
deleted file mode 100644
index 2487efd..0000000
--- a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_audio.bin
+++ /dev/null
Binary files differ
diff --git a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_audio.bin.sig b/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_audio.bin.sig
deleted file mode 100644
index ae1d6ee..0000000
--- a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_audio.bin.sig
+++ /dev/null
@@ -1 +0,0 @@
-23b2d0a67479b5786344dbb9e93cd5d4238dc5d592bf475b454c9c719381bd779b66e6a8f8c555693b9f8686b492634a7d5996316ee5c18ea87883cdadb64c17
\ No newline at end of file
diff --git a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_audio_logicool.bin b/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_audio_logicool.bin
deleted file mode 100644
index 54f7167..0000000
--- a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_audio_logicool.bin
+++ /dev/null
Binary files differ
diff --git a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_audio_logicool.bin.sig b/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_audio_logicool.bin.sig
deleted file mode 100644
index f4d2ab5..0000000
--- a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_audio_logicool.bin.sig
+++ /dev/null
@@ -1 +0,0 @@
-1e0f6548b221b40940561b43296b03eeaecbbcd124213fe1710568b886ef61b7dff65bcccbc1caa87b5506dcb20e20efae8cf75902849bd4e6d58f9e35d4ec5c
\ No newline at end of file
diff --git a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_ble.bin b/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_ble.bin
deleted file mode 100644
index 8c1ed24..0000000
--- a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_ble.bin
+++ /dev/null
Binary files differ
diff --git a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_ble.bin.sig b/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_ble.bin.sig
deleted file mode 100644
index 459ad64..0000000
--- a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_ble.bin.sig
+++ /dev/null
@@ -1 +0,0 @@
-1ff03a7db79d0b7fd94a669181846609ad08da11f065a28e1f7267ad00c3a6655b59926723bf8ffb4b99e945b8e9cee5a4e51debded77f4b67b520c2a1575416
\ No newline at end of file
diff --git a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_codec.bin b/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_codec.bin
deleted file mode 100644
index 54836fb4..0000000
--- a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_codec.bin
+++ /dev/null
Binary files differ
diff --git a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_codec.bin.sig b/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_codec.bin.sig
deleted file mode 100644
index db79ce6..0000000
--- a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_codec.bin.sig
+++ /dev/null
@@ -1 +0,0 @@
-1a3ab248fca83ff9bba8bb412913e23516e8739372d0a4f83b03f91d0c352104e9220c19f18eeb50dd6897ef1a891a2b5160f2d2b760e749c5d5ca98c68ec08c
\ No newline at end of file
diff --git a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_eeprom.s19 b/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_eeprom.s19
deleted file mode 100644
index dfcf0a6..0000000
--- a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_eeprom.s19
+++ /dev/null
@@ -1,258 +0,0 @@
-S00600004844521B

-S1130000AA550000006D0466081700123456780DD6

-S11300100101011E4C006F00670069007400650057

-S11300206300680020004D00650065007400550001

-S113003070000000000000000000000000008003C9

-S1130040050A010AB42A143202000000000000006C

-S1130050003D3000000000000000000064000000CB

-S11300600000000012120000000000000000000068

-S1130070000000000000000000000000000000007C

-S1130080000000000000000000000000000000006C

-S1130090000000000000000000000000000004C098

-S11300A00A1014000000000000000100000000001D

-S11300B00000006400640000000000000000000074

-S11300C0000000000000000000000000000000AA82

-S11300D055000080008000028000800000000000C5

-S11300E0000000000000000000000000000000000C

-S11300F000000000000000000000000000000000FC

-S113010000000000000000000000000000000000EB

-S113011000000000000000000000000000000000DB

-S113012000000000000000000000000000000000CB

-S113013000000000640000000000640000000000F3

-S1130140640000000000640000000000640000007F

-S11301500000640000000000000000000000000037

-S1130160000000000000000000000000000000008B

-S1130170000000000000000000000000000000007B

-S1130180000000000000000000000000000000006B

-S1130190000000000000000000000000000000005B

-S11301A0000000000000000000000000000000004B

-S11301B0000000000000000000000000000000003B

-S11301C0000000000000000000000000000000002B

-S11301D0000000000000000000000000000000001B

-S11301E0000000000000000000000000000000000B

-S11301F000000000000000000000000000000000FB

-S113020000000000000000000000000000000000EA

-S113021000000000000000000000000000000000DA

-S113022000000000000000000000000000000000CA

-S113023000000000000000000000000000000000BA

-S113024000000000000000000000000000000000AA

-S1130250000000000000000000000000000000009A

-S1130260000000000000000000000000000000008A

-S1130270000000000000000000000000000000007A

-S1130280000000000000000000000000000000006A

-S1130290000000000000000000000000000000005A

-S11302A0000000000000000000000000000000004A

-S11302B0000000000000000000000000000000003A

-S11302C0000000000000000000000000000000002A

-S11302D0000000000000000000000000000000001A

-S11302E0000000000000000000000000000000000A

-S11302F000000000000000000000000000000000FA

-S113030000000000000000000000000000000000E9

-S113031000000000000000000000000000000000D9

-S113032000000000000000000000000000000000C9

-S113033000000000000000000000000000000000B9

-S113034000000000000000000000000000000000A9

-S11303500000000000000000000000000000000099

-S11303600000000000000000000000000000000089

-S11303700000000000000000000000000000000079

-S11303800000000000000000000000000000000069

-S11303900000000000000000000000000000000059

-S11303A00000000000000000000000000000000049

-S11303B00000000000000000000000000000000039

-S11303C00000000000000000000000000000000029

-S11303D00000000000000000000000000000000019

-S11303E00000000000000000000000000000000009

-S11303F000000000000000000000000000000000F9

-S113040000000000000000000000000000000000E8

-S113041000000000000000000000000000000000D8

-S113042000000000000000000000000000000000C8

-S113043000000000000000000000000000000000B8

-S113044000000000000000000000000000000000A8

-S11304500000000000000000000000000000000098

-S11304600000000000000000000000000000000088

-S11304700000000000000000000000000000000078

-S11304800000000000000000000000000000000068

-S11304900000000000000000000000000000000058

-S11304A00000000000000000000000000000000048

-S11304B00000000000000000000000000000000038

-S11304C00000000000000000000000000000000028

-S11304D00000000000000000000000000000000018

-S11304E00000000000000000000000000000000008

-S11304F000000000000000000000000000000000F8

-S113050000000000000000000000000000000000E7

-S113051000000000000000000000000000000000D7

-S113052000000000000000000000000000000000C7

-S113053000000000000000000000000000000000B7

-S113054000000000000000000000000000000000A7

-S11305500000000000000000000000000000000097

-S11305600000000000000000000000000000000087

-S11305700000000000000000000000000000000077

-S11305800000000000000000000000000000000067

-S11305900000000000000000000000000000000057

-S11305A00000000000000000000000000000000047

-S11305B00000000000000000000000000000000037

-S11305C00000000000000000000000000000000027

-S11305D00000000000000000000000000000000017

-S11305E00000000000000000000000000000000007

-S11305F000000000000000000000000000000000F7

-S113060000000000000000000000000000000000E6

-S113061000000000000000000000000000000000D6

-S113062000000000000000000000000000000000C6

-S113063000000000000000000000000000000000B6

-S113064000000000000000000000000000000000A6

-S11306500000000000000000000000000000000096

-S11306600000000000000000000000000000000086

-S11306700000000000000000000000000000000076

-S11306800000000000000000000000000000000066

-S11306900000000000000000000000000000000056

-S11306A00000000000000000000000000000000046

-S11306B00000000000000000000000000000000036

-S11306C00000000000000000000000000000000026

-S11306D00000000000000000000000000000000016

-S11306E00000000000000000000000000000000006

-S11306F000000000000000000000000000000000F6

-S113070000000000000000000000000000000000E5

-S113071000000000000000000000000000000000D5

-S113072000000000000000000000000000000000C5

-S113073000000000000000000000000000000000B5

-S113074000000000000000000000000000000000A5

-S11307500000000000000000000000000000000095

-S11307600000000000000000000000000000000085

-S11307700000000000000000000000000000000075

-S11307800000000000000000000000000000000065

-S11307900000000000000000000000000000000055

-S11307A00000000000000000000000000000000045

-S11307B00000000000000000000000000000000035

-S11307C00000000000000000000000000000000025

-S11307D00000000000000000000000000000000015

-S11307E00000000000000000000000000000000005

-S11307F000000000000000000000000000000000F5

-S113080000000000000000000000000000000000E4

-S113081000000000000000000000000000000000D4

-S113082000000000000000000000000000000000C4

-S113083000000000000000000000000000000000B4

-S113084000000000000000000000000000000000A4

-S11308500000000000000000000000000000000094

-S11308600000000000000000000000000000000084

-S11308700000000000000000000000000000000074

-S11308800000000000000000000000000000000064

-S11308900000000000000000000000000000000054

-S11308A00000000000000000000000000000000044

-S11308B00000000000000000000000000000000034

-S11308C00000000000000000000000000000000024

-S11308D00000000000000000000000000000000014

-S11308E00000000000000000000000000000000004

-S11308F000000000000000000000000000000000F4

-S113090000000000000000000000000000000000E3

-S113091000000000000000000000000000000000D3

-S113092000000000000000000000000000000000C3

-S113093000000000000000000000000000000000B3

-S113094000000000000000000000000000000000A3

-S11309500000000000000000000000000000000093

-S11309600000000000000000000000000000000083

-S11309700000000000000000000000000000000073

-S11309800000000000000000000000000000000063

-S11309900000000000000000000000000000000053

-S11309A00000000000000000000000000000000043

-S11309B00000000000000000000000000000000033

-S11309C00000000000000000000000000000000023

-S11309D00000000000000000000000000000000013

-S11309E00000000000000000000000000000000003

-S11309F000000000000000000000000000000000F3

-S1130A0000000000000000000000000000000000E2

-S1130A1000000000000000000000000000000000D2

-S1130A2000000000000000000000000000000000C2

-S1130A3000000000000000000000000000000000B2

-S1130A4000000000000000000000000000000000A2

-S1130A500000000000000000000000000000000092

-S1130A600000000000000000000000000000000082

-S1130A700000000000000000000000000000000072

-S1130A800000000000000000000000000000000062

-S1130A900000000000000000000000000000000052

-S1130AA00000000000000000000000000000000042

-S1130AB00000000000000000000000000000000032

-S1130AC00000000000000000000000000000000022

-S1130AD00000000000000000000000000000000012

-S1130AE00000000000000000000000000000000002

-S1130AF000000000000000000000000000000000F2

-S1130B0000000000000000000000000000000000E1

-S1130B1000000000000000000000000000000000D1

-S1130B2000000000000000000000000000000000C1

-S1130B3000000000000000000000000000000000B1

-S1130B4000000000000000000000000000000000A1

-S1130B500000000000000000000000000000000091

-S1130B600000000000000000000000000000000081

-S1130B700000000000000000000000000000000071

-S1130B800000000000000000000000000000000061

-S1130B900000000000000000000000000000000051

-S1130BA00000000000000000000000000000000041

-S1130BB00000000000000000000000000000000031

-S1130BC00000000000000000000000000000000021

-S1130BD00000000000000000000000000000000011

-S1130BE00000000000000000000000000000000001

-S1130BF000000000000000000000000000000000F1

-S1130C0000000000000000000000000000000000E0

-S1130C1000000000000000000000000000000000D0

-S1130C2000000000000000000000000000000000C0

-S1130C3000000000000000000000000000000000B0

-S1130C4000000000000000000000000000000000A0

-S1130C500000000000000000000000000000000090

-S1130C600000000000000000000000000000000080

-S1130C700000000000000000000000000000000070

-S1130C800000000000000000000000000000000060

-S1130C900000000000000000000000000000000050

-S1130CA00000000000000000000000000000000040

-S1130CB00000000000000000000000000000000030

-S1130CC00000000000000000000000000000000020

-S1130CD00000000000000000000000000000000010

-S1130CE00000000000000000000000000000000000

-S1130CF000000000000000000000000000000000F0

-S1130D0000000000000000000000000000000000DF

-S1130D1000000000000000000000000000000000CF

-S1130D2000000000000000000000000000000000BF

-S1130D3000000000000000000000000000000000AF

-S1130D40000000000000000000000000000000009F

-S1130D50000000000000000000000000000000008F

-S1130D60000000000000000000000000000000007F

-S1130D70000000000000000000000000000000006F

-S1130D80000000000000000000000000000000005F

-S1130D90000000000000000000000000000000004F

-S1130DA0000000000000000000000000000000003F

-S1130DB0000000000000000000000000000000002F

-S1130DC0000000000000000000000000000000001F

-S1130DD0000000000000000000000000000000000F

-S1130DE000000000000000000000000000000000FF

-S1130DF000000000000000000000000000000000EF

-S1130E0000000000000000000000000000000000DE

-S1130E1000000000000000000000000000000000CE

-S1130E2000000000000000000000000000000000BE

-S1130E3000000000000000000000000000000000AE

-S1130E40000000000000000000000000000000009E

-S1130E50000000000000000000000000000000008E

-S1130E60000000000000000000000000000000007E

-S1130E70000000000000000000000000000000006E

-S1130E80000000000000000000000000000000005E

-S1130E90000000000000000000000000000000004E

-S1130EA0000000000000000000000000000000003E

-S1130EB0000000000000000000000000000000002E

-S1130EC0000000000000000000000000000000001E

-S1130ED0000000000000000000000000000000000E

-S1130EE000000000000000000000000000000000FE

-S1130EF000000000000000000000000000000000EE

-S1130F0000000000000000000000000000000000DD

-S1130F1000000000000000000000000000000000CD

-S1130F2000000000000000000000000000000000BD

-S1130F3000000000000000000000000000000000AD

-S1130F40000000000000000000000000000000009D

-S1130F50000000000000000000000000000000008D

-S1130F60000000000000000000000000000000007D

-S1130F70000000000000000000000000000000006D

-S1130F80000000000000000000000000000000005D

-S1130F90000000000000000000000000000000004D

-S1130FA0000000000000000000000000000000003D

-S1130FB0000000000000000000000000000000002D

-S1130FC0000000000000000000000000000000001D

-S1130FD0000000000000000000000000000000000D

-S1130FE000000000000000000000000000000000FD

-S1130FF000000000000000000000000000000000ED

-S9030000FC

diff --git a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_eeprom.s19.sig b/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_eeprom.s19.sig
deleted file mode 100644
index 03c13b9..0000000
--- a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_eeprom.s19.sig
+++ /dev/null
@@ -1 +0,0 @@
-6c37cc3599cdda1b7df6aa714ccf0fa90d7592a9963a8d0ead97be3286ad0cd768d2b4f975fbc01a7678234afc47c6b345aca3f6d78441cfdcdba4fb534c3903
\ No newline at end of file
diff --git a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_eeprom_logicool.s19 b/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_eeprom_logicool.s19
deleted file mode 100644
index f3d8462..0000000
--- a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_eeprom_logicool.s19
+++ /dev/null
@@ -1,258 +0,0 @@
-S00600004844521B

-S1130000AA550000006D0466081700123456780DD6

-S11300100201011E4C006F006700690063006f005D

-S11300206f006C0020004D006500650074005500F1

-S113003070000000000000000000000000008003C9

-S1130040050A010AB42A143202000000000000006C

-S1130050003D3000000000000000000064000000CB

-S11300600000000012120000000000000000000068

-S1130070000000000000000000000000000000007C

-S1130080000000000000000000000000000000006C

-S1130090000000000000000000000000000004C098

-S11300A00A1014000000000000000100000000001D

-S11300B00000006400640000000000000000000074

-S11300C0000000000000000000000000000000AA82

-S11300D055000080008000028000800000000000C5

-S11300E0000000000000000000000000000000000C

-S11300F000000000000000000000000000000000FC

-S113010000000000000000000000000000000000EB

-S113011000000000000000000000000000000000DB

-S113012000000000000000000000000000000000CB

-S113013000000000640000000000640000000000F3

-S1130140640000000000640000000000640000007F

-S11301500000640000000000000000000000000037

-S1130160000000000000000000000000000000008B

-S1130170000000000000000000000000000000007B

-S1130180000000000000000000000000000000006B

-S1130190000000000000000000000000000000005B

-S11301A0000000000000000000000000000000004B

-S11301B0000000000000000000000000000000003B

-S11301C0000000000000000000000000000000002B

-S11301D0000000000000000000000000000000001B

-S11301E0000000000000000000000000000000000B

-S11301F000000000000000000000000000000000FB

-S113020000000000000000000000000000000000EA

-S113021000000000000000000000000000000000DA

-S113022000000000000000000000000000000000CA

-S113023000000000000000000000000000000000BA

-S113024000000000000000000000000000000000AA

-S1130250000000000000000000000000000000009A

-S1130260000000000000000000000000000000008A

-S1130270000000000000000000000000000000007A

-S1130280000000000000000000000000000000006A

-S1130290000000000000000000000000000000005A

-S11302A0000000000000000000000000000000004A

-S11302B0000000000000000000000000000000003A

-S11302C0000000000000000000000000000000002A

-S11302D0000000000000000000000000000000001A

-S11302E0000000000000000000000000000000000A

-S11302F000000000000000000000000000000000FA

-S113030000000000000000000000000000000000E9

-S113031000000000000000000000000000000000D9

-S113032000000000000000000000000000000000C9

-S113033000000000000000000000000000000000B9

-S113034000000000000000000000000000000000A9

-S11303500000000000000000000000000000000099

-S11303600000000000000000000000000000000089

-S11303700000000000000000000000000000000079

-S11303800000000000000000000000000000000069

-S11303900000000000000000000000000000000059

-S11303A00000000000000000000000000000000049

-S11303B00000000000000000000000000000000039

-S11303C00000000000000000000000000000000029

-S11303D00000000000000000000000000000000019

-S11303E00000000000000000000000000000000009

-S11303F000000000000000000000000000000000F9

-S113040000000000000000000000000000000000E8

-S113041000000000000000000000000000000000D8

-S113042000000000000000000000000000000000C8

-S113043000000000000000000000000000000000B8

-S113044000000000000000000000000000000000A8

-S11304500000000000000000000000000000000098

-S11304600000000000000000000000000000000088

-S11304700000000000000000000000000000000078

-S11304800000000000000000000000000000000068

-S11304900000000000000000000000000000000058

-S11304A00000000000000000000000000000000048

-S11304B00000000000000000000000000000000038

-S11304C00000000000000000000000000000000028

-S11304D00000000000000000000000000000000018

-S11304E00000000000000000000000000000000008

-S11304F000000000000000000000000000000000F8

-S113050000000000000000000000000000000000E7

-S113051000000000000000000000000000000000D7

-S113052000000000000000000000000000000000C7

-S113053000000000000000000000000000000000B7

-S113054000000000000000000000000000000000A7

-S11305500000000000000000000000000000000097

-S11305600000000000000000000000000000000087

-S11305700000000000000000000000000000000077

-S11305800000000000000000000000000000000067

-S11305900000000000000000000000000000000057

-S11305A00000000000000000000000000000000047

-S11305B00000000000000000000000000000000037

-S11305C00000000000000000000000000000000027

-S11305D00000000000000000000000000000000017

-S11305E00000000000000000000000000000000007

-S11305F000000000000000000000000000000000F7

-S113060000000000000000000000000000000000E6

-S113061000000000000000000000000000000000D6

-S113062000000000000000000000000000000000C6

-S113063000000000000000000000000000000000B6

-S113064000000000000000000000000000000000A6

-S11306500000000000000000000000000000000096

-S11306600000000000000000000000000000000086

-S11306700000000000000000000000000000000076

-S11306800000000000000000000000000000000066

-S11306900000000000000000000000000000000056

-S11306A00000000000000000000000000000000046

-S11306B00000000000000000000000000000000036

-S11306C00000000000000000000000000000000026

-S11306D00000000000000000000000000000000016

-S11306E00000000000000000000000000000000006

-S11306F000000000000000000000000000000000F6

-S113070000000000000000000000000000000000E5

-S113071000000000000000000000000000000000D5

-S113072000000000000000000000000000000000C5

-S113073000000000000000000000000000000000B5

-S113074000000000000000000000000000000000A5

-S11307500000000000000000000000000000000095

-S11307600000000000000000000000000000000085

-S11307700000000000000000000000000000000075

-S11307800000000000000000000000000000000065

-S11307900000000000000000000000000000000055

-S11307A00000000000000000000000000000000045

-S11307B00000000000000000000000000000000035

-S11307C00000000000000000000000000000000025

-S11307D00000000000000000000000000000000015

-S11307E00000000000000000000000000000000005

-S11307F000000000000000000000000000000000F5

-S113080000000000000000000000000000000000E4

-S113081000000000000000000000000000000000D4

-S113082000000000000000000000000000000000C4

-S113083000000000000000000000000000000000B4

-S113084000000000000000000000000000000000A4

-S11308500000000000000000000000000000000094

-S11308600000000000000000000000000000000084

-S11308700000000000000000000000000000000074

-S11308800000000000000000000000000000000064

-S11308900000000000000000000000000000000054

-S11308A00000000000000000000000000000000044

-S11308B00000000000000000000000000000000034

-S11308C00000000000000000000000000000000024

-S11308D00000000000000000000000000000000014

-S11308E00000000000000000000000000000000004

-S11308F000000000000000000000000000000000F4

-S113090000000000000000000000000000000000E3

-S113091000000000000000000000000000000000D3

-S113092000000000000000000000000000000000C3

-S113093000000000000000000000000000000000B3

-S113094000000000000000000000000000000000A3

-S11309500000000000000000000000000000000093

-S11309600000000000000000000000000000000083

-S11309700000000000000000000000000000000073

-S11309800000000000000000000000000000000063

-S11309900000000000000000000000000000000053

-S11309A00000000000000000000000000000000043

-S11309B00000000000000000000000000000000033

-S11309C00000000000000000000000000000000023

-S11309D00000000000000000000000000000000013

-S11309E00000000000000000000000000000000003

-S11309F000000000000000000000000000000000F3

-S1130A0000000000000000000000000000000000E2

-S1130A1000000000000000000000000000000000D2

-S1130A2000000000000000000000000000000000C2

-S1130A3000000000000000000000000000000000B2

-S1130A4000000000000000000000000000000000A2

-S1130A500000000000000000000000000000000092

-S1130A600000000000000000000000000000000082

-S1130A700000000000000000000000000000000072

-S1130A800000000000000000000000000000000062

-S1130A900000000000000000000000000000000052

-S1130AA00000000000000000000000000000000042

-S1130AB00000000000000000000000000000000032

-S1130AC00000000000000000000000000000000022

-S1130AD00000000000000000000000000000000012

-S1130AE00000000000000000000000000000000002

-S1130AF000000000000000000000000000000000F2

-S1130B0000000000000000000000000000000000E1

-S1130B1000000000000000000000000000000000D1

-S1130B2000000000000000000000000000000000C1

-S1130B3000000000000000000000000000000000B1

-S1130B4000000000000000000000000000000000A1

-S1130B500000000000000000000000000000000091

-S1130B600000000000000000000000000000000081

-S1130B700000000000000000000000000000000071

-S1130B800000000000000000000000000000000061

-S1130B900000000000000000000000000000000051

-S1130BA00000000000000000000000000000000041

-S1130BB00000000000000000000000000000000031

-S1130BC00000000000000000000000000000000021

-S1130BD00000000000000000000000000000000011

-S1130BE00000000000000000000000000000000001

-S1130BF000000000000000000000000000000000F1

-S1130C0000000000000000000000000000000000E0

-S1130C1000000000000000000000000000000000D0

-S1130C2000000000000000000000000000000000C0

-S1130C3000000000000000000000000000000000B0

-S1130C4000000000000000000000000000000000A0

-S1130C500000000000000000000000000000000090

-S1130C600000000000000000000000000000000080

-S1130C700000000000000000000000000000000070

-S1130C800000000000000000000000000000000060

-S1130C900000000000000000000000000000000050

-S1130CA00000000000000000000000000000000040

-S1130CB00000000000000000000000000000000030

-S1130CC00000000000000000000000000000000020

-S1130CD00000000000000000000000000000000010

-S1130CE00000000000000000000000000000000000

-S1130CF000000000000000000000000000000000F0

-S1130D0000000000000000000000000000000000DF

-S1130D1000000000000000000000000000000000CF

-S1130D2000000000000000000000000000000000BF

-S1130D3000000000000000000000000000000000AF

-S1130D40000000000000000000000000000000009F

-S1130D50000000000000000000000000000000008F

-S1130D60000000000000000000000000000000007F

-S1130D70000000000000000000000000000000006F

-S1130D80000000000000000000000000000000005F

-S1130D90000000000000000000000000000000004F

-S1130DA0000000000000000000000000000000003F

-S1130DB0000000000000000000000000000000002F

-S1130DC0000000000000000000000000000000001F

-S1130DD0000000000000000000000000000000000F

-S1130DE000000000000000000000000000000000FF

-S1130DF000000000000000000000000000000000EF

-S1130E0000000000000000000000000000000000DE

-S1130E1000000000000000000000000000000000CE

-S1130E2000000000000000000000000000000000BE

-S1130E3000000000000000000000000000000000AE

-S1130E40000000000000000000000000000000009E

-S1130E50000000000000000000000000000000008E

-S1130E60000000000000000000000000000000007E

-S1130E70000000000000000000000000000000006E

-S1130E80000000000000000000000000000000005E

-S1130E90000000000000000000000000000000004E

-S1130EA0000000000000000000000000000000003E

-S1130EB0000000000000000000000000000000002E

-S1130EC0000000000000000000000000000000001E

-S1130ED0000000000000000000000000000000000E

-S1130EE000000000000000000000000000000000FE

-S1130EF000000000000000000000000000000000EE

-S1130F0000000000000000000000000000000000DD

-S1130F1000000000000000000000000000000000CD

-S1130F2000000000000000000000000000000000BD

-S1130F3000000000000000000000000000000000AD

-S1130F40000000000000000000000000000000009D

-S1130F50000000000000000000000000000000008D

-S1130F60000000000000000000000000000000007D

-S1130F70000000000000000000000000000000006D

-S1130F80000000000000000000000000000000005D

-S1130F90000000000000000000000000000000004D

-S1130FA0000000000000000000000000000000003D

-S1130FB0000000000000000000000000000000002D

-S1130FC0000000000000000000000000000000001D

-S1130FD0000000000000000000000000000000000D

-S1130FE000000000000000000000000000000000FD

-S1130FF000000000000000000000000000000000ED

-S9030000FC

diff --git a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_eeprom_logicool.s19.sig b/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_eeprom_logicool.s19.sig
deleted file mode 100644
index fbe6d0c..0000000
--- a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_eeprom_logicool.s19.sig
+++ /dev/null
@@ -1 +0,0 @@
-82744d2b2a0cefdd04594ef593624bae953ce49d540ff26b20048cb1c62f71373aff6fd25d97d3542488d567c5c9c924a7856fd67c7209af7903921589a21144
\ No newline at end of file
diff --git a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_video.bin b/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_video.bin
deleted file mode 100644
index 2d4253d..0000000
--- a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_video.bin
+++ /dev/null
Binary files differ
diff --git a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_video.bin.sig b/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_video.bin.sig
deleted file mode 100644
index 3353bf5..0000000
--- a/server/site_tests/enterprise_CFM_LogitechMeetupUpdater/meetup_184/meetup_video.bin.sig
+++ /dev/null
@@ -1 +0,0 @@
-88c9e4f08824db43f37ea4841823df41a63a5ac33a1f0b06e39e29999af1ee126d79ccea277f7f0806a3958c5a71f21b43e67163369e8541bf1a0bf9e5187798
\ No newline at end of file
diff --git a/server/site_tests/enterprise_CFM_LogitechPtzUpdater/control b/server/site_tests/enterprise_CFM_LogitechPtzUpdater/control
deleted file mode 100644
index a8136e2..0000000
--- a/server/site_tests/enterprise_CFM_LogitechPtzUpdater/control
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "frankhu@chromium.org"
-NAME = "enterprise_CFM_LogitechPtzUpdater"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-ATTRIBUTES = "suite:hotrod"
-TEST_TYPE = "server"
-JOB_RETRIES = 3
-DEPENDENCIES = "ptzpro2"
-
-DOC = """
-This test first flash the older version Logitech Ptz Pro 2 firmware to the
-Camera and then power cycle the corresponding usb port. This should triger the
-udev rule to invoke the Logitech FW updater to flash the latest FW to Ptz Pro 2.
-This is validated by running the updater again and checking the log.
-"""
-
-args_dict = utils.args_to_dict(args)
-
-def run_test(machine):
-    host = hosts.create_host(machine, servo_args=None)
-    job.run_test(NAME, host=host)
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_LogitechPtzUpdater/enterprise_CFM_LogitechPtzUpdater.py b/server/site_tests/enterprise_CFM_LogitechPtzUpdater/enterprise_CFM_LogitechPtzUpdater.py
deleted file mode 100644
index 77c3da2..0000000
--- a/server/site_tests/enterprise_CFM_LogitechPtzUpdater/enterprise_CFM_LogitechPtzUpdater.py
+++ /dev/null
@@ -1,239 +0,0 @@
-# Copyright (c) 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Autotest for Logitech PTZPro firmware updater functionality and udev rule."""
-
-from __future__ import print_function
-import logging
-import os
-import re
-import time
-
-from autotest_lib.client.common_lib.cros import power_cycle_usb_util
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import test
-
-POWER_CYCLE_WAIT_TIME = 1  # seconds
-UPDATER_WAIT_TIME = 100  # seconds
-
-
-class enterprise_CFM_LogitechPtzUpdater(test.test):
-    """Logitech Ptz Pro firmware updater functionality test in Chrome Box.
-
-    The procedure of the test is:
-    1. setup old firmware as dufault firmware on DUT
-    2. flash old version FW to device,
-    3. setup new firmware as default firmware on DUT
-    4. power cycle usb port to simulate unplug and replug of device, which
-       should be able to trigger udev rule and run the updater,
-    5. wait for the updater to finish,
-    6. run fw updater again and verify that the FW in device is consistent
-       with latest FW within system by checking the output.
-    """
-
-    version = 1
-
-    _LOG_FILE_PATH = '/tmp/logitech-updater.log'
-    _FW_PATH_BASE = '/lib/firmware/logitech'
-    _FW_PKG_ORIGIN = 'ptzpro2'
-    _FW_PKG_BACKUP = 'ptzpro2_backup'
-    _FW_PKG_TEST = 'ptzpro2_154'
-    _FW_PATH_ORIGIN = os.path.join(_FW_PATH_BASE, _FW_PKG_ORIGIN)
-    _FW_PATH_BACKUP = os.path.join(_FW_PATH_BASE, _FW_PKG_BACKUP)
-    _FW_PATH_TEST = os.path.join(_FW_PATH_BASE, _FW_PKG_TEST)
-    _DUT_BOARD = 'guado'
-    _SIS_VID = '046d'
-    _SIS_PID = '085f'
-
-    def initialize(self, host):
-        self.host = host
-        self.log_file = self._LOG_FILE_PATH
-        self.board = self._DUT_BOARD
-        self.vid = self._SIS_VID
-        self.pid = self._SIS_PID
-        # Open log file object.
-        self.log_file_obj = open(self.log_file, 'w')
-
-    def cleanup(self):
-        self.log_file_obj.close()
-        test.test.cleanup(self)
-
-        # Delete test firmware package.
-        cmd = 'rm -rf {}'.format(self._FW_PATH_TEST)
-        self._run_cmd(cmd)
-
-        # Delete the symlink created.
-        cmd = 'rm {}'.format(self._FW_PATH_ORIGIN)
-        self._run_cmd(cmd)
-
-        # Move the backup package back.
-        cmd = 'mv {} {}'.format(self._FW_PATH_BACKUP, self._FW_PATH_ORIGIN)
-        self._run_cmd(cmd)
-
-    def _run_cmd(self, command, str_compare='', print_output=False):
-        """Run command line on DUT.
-
-        Run commands on DUT. Wait for command to complete, then check
-        the output for expected string.
-
-        @param command: command line to run in dut.
-        @param str_compare: a piece of string we want to see in the
-                output of running the command.
-        @param print_output: if true, print command output in log.
-
-        @returns the command output and a bool value. If str_compare is
-               in command output, return true. Otherwise return false.
-
-        """
-
-        logging.info('Execute: %s', command)
-        result = self.host.run(command, ignore_status=True)
-        if result.stderr:
-            output = result.stderr
-        else:
-            output = result.stdout
-        if print_output:
-            logging.info('Output: %s', output.split('\n'))
-        if str_compare and str_compare not in ''.join(output):
-            return output, False
-        else:
-            return output, True
-
-    def convert_rootfs_writable(self):
-        """Remove rootfs verification on DUT, reboot,
-        and remount the filesystem read-writable"""
-
-        logging.info('Disabling rootfs verification...')
-        self.remove_rootfs_verification()
-
-        logging.info('Rebooting...')
-        self.reboot()
-
-        logging.info('Remounting..')
-        cmd = 'mount -o remount,rw /'
-        self._run_cmd(cmd)
-
-    def remove_rootfs_verification(self):
-        """Remove rootfs verification."""
-
-        # 2 & 4 are default partitions, and the system boots from one of them.
-        # Code from chromite/scripts/deploy_chrome.py
-        KERNEL_A_PARTITION = 2
-        KERNEL_B_PARTITION = 4
-
-        cmd_template = ('/usr/share/vboot/bin/make_dev_ssd.sh --partitions %d '
-                        '--remove_rootfs_verification --force')
-        for partition in (KERNEL_A_PARTITION, KERNEL_B_PARTITION):
-            cmd = cmd_template % partition
-            self._run_cmd(cmd)
-
-    def reboot(self):
-        """Reboots the DUT."""
-
-        self.host.reboot()
-
-    def is_filesystem_readwrite(self):
-        """Check if the root file system is read-writable.
-
-        Query the DUT's filesystem /dev/root, often manifested as
-        /dev/dm-0 or is mounted as read-only or not.
-
-        @returns True if the /dev/root is read-writable. False otherwise.
-        """
-
-        cmd = 'cat /proc/mounts | grep "/dev/root"'
-        result, _ = self._run_cmd(cmd)
-        fields = re.split(' |,', result)
-        return True if fields.__len__() >= 4 and fields[3] == 'rw' else False
-
-    def copy_firmware(self):
-        """Copy test firmware from server to DUT."""
-
-        current_dir = os.path.dirname(os.path.realpath(__file__))
-        src_firmware_path = os.path.join(current_dir, self._FW_PKG_TEST)
-        dst_firmware_path = self._FW_PATH_BASE
-        logging.info('Copy firmware from {} to {}.'.format(src_firmware_path,
-                                                           dst_firmware_path))
-        self.host.send_file(src_firmware_path, dst_firmware_path, delete_dest=True)
-
-    def triger_updater(self):
-        """Triger udev rule to run fw updater by power cycling the usb."""
-
-        try:
-            power_cycle_usb_util.power_cycle_usb_vidpid(self.host, self.board,
-                                                        self.vid, self.pid)
-        except KeyError:
-            raise error.TestFail('Counld\'t find target device: '
-                                 'vid:pid {}:{}'.format(self.vid, self.pid))
-
-    def setup_fw(self, firmware_package):
-        """Setup firmware package that is going to be used for updating."""
-
-        firmware_path = os.path.join(self._FW_PATH_BASE, firmware_package)
-        cmd = 'ln -sfn {} {}'.format(firmware_path, self._FW_PATH_ORIGIN)
-        self._run_cmd(cmd)
-
-    def flash_fw(self, str_compare='', print_output=False, force=False):
-        """Flash certain firmware to device.
-
-        Run logitech firmware updater on DUT to flash the firmware setuped
-        to target device (PTZ Pro 2).
-
-        @param force: run with force update, will bypass fw version check.
-        @param str_compare, print_output: the same as function _run_cmd.
-
-        """
-
-        if force:
-            cmd_run_updater = ('/usr/sbin/logitech-updater'
-                               ' --log_to=stdout --update --force')
-        else:
-            cmd_run_updater = ('/usr/sbin/logitech-updater --log_to=stdout --update')
-        output, succeed = self._run_cmd(
-            cmd_run_updater, str_compare=str_compare, print_output=print_output)
-        return output, succeed
-
-    def run_once(self):
-        """Main test procedure."""
-
-        # Make the DUT filesystem writable.
-        if not self.is_filesystem_readwrite():
-            logging.info('DUT root file system is not read-writable. '
-                         'Converting it read-writable...')
-            self.convert_rootfs_writable()
-        else:
-            logging.info('DUT is read-writable.')
-
-        # Copy old FW to device.
-        cmd = 'mv {} {}'.format(self._FW_PATH_ORIGIN, self._FW_PATH_BACKUP)
-        self._run_cmd(cmd)
-        self.copy_firmware()
-
-        # Flash old FW to device.
-        self.setup_fw(self._FW_PKG_TEST)
-        expect_output = 'Done. Updated firmwares successfully.'
-        output, succeed = self.flash_fw(str_compare=expect_output, force=True)
-        self.log_file_obj.write('{}Log info for writing '
-                                'old firmware{}\n'.format('-' * 8, '-' * 8))
-        self.log_file_obj.write(output)
-        if not succeed:
-            raise error.TestFail('Expect \'{}\' in output, '
-                                 'but didn\'t find it.'.format(expect_output))
-
-        # Triger udev to run FW updater.
-        self.setup_fw(self._FW_PKG_BACKUP)
-        self.triger_updater()
-
-        # Wait for fw updater to finish.
-        time.sleep(UPDATER_WAIT_TIME)
-
-        # Try flash the new firmware, should detect same fw version.
-        expect_output = 'Firmware is up to date.'
-        output, succeed = self.flash_fw(str_compare=expect_output)
-        self.log_file_obj.write('{}Log info for writing '
-                                'new firmware{}\n'.format('-' * 8, '-' * 8))
-        self.log_file_obj.write(output)
-        if not succeed:
-            raise error.TestFail('Expect {} in output '
-                                 'but didn\'t find it.'.format(expect_output))
diff --git a/server/site_tests/enterprise_CFM_LogitechPtzUpdater/ptzpro2_154/ptzpro2_eeprom.s19 b/server/site_tests/enterprise_CFM_LogitechPtzUpdater/ptzpro2_154/ptzpro2_eeprom.s19
deleted file mode 100644
index a34828c..0000000
--- a/server/site_tests/enterprise_CFM_LogitechPtzUpdater/ptzpro2_154/ptzpro2_eeprom.s19
+++ /dev/null
@@ -1,258 +0,0 @@
-S00600004844521B

-S1130000AA550000006D045F0817000000000005F9

-S113001001010112500054005A00200050007200E7

-S11300206F002000320020000000000000000000EB

-S11300300000000000000000000000000000800339

-S1130040030A010AB42A143202000000000000006E

-S1130050003D3000000000000000000064000000CB

-S1130060000000000000000000000000000000008C

-S1130070000000000000000000000000000000007C

-S1130080000000000000000000000000000000006C

-S1130090000000000000000000000000000004C098

-S11300A00A1014B004010000000000000000000069

-S11300B0000000000000000000000000000000003C

-S11300C0000000000000000000000000000000002C

-S11300D00000000000000000000000000000AA551D

-S11300E0000080008000008000800000000000000C

-S11300F064006400640064006400640064006400DC

-S113010000000000000000000000000000000000EB

-S113011000000000000000000000000000000000DB

-S113012000000000000000000000000000000000CB

-S113013000000000000000000000000000000000BB

-S113014000000000000000000000000000000000AB

-S1130150000000000000000000000000000000009B

-S1130160000000000000000000000000000000008B

-S1130170000000000000000000000000000000007B

-S1130180000000000000000000000000000000006B

-S1130190000000000000000000000000000000005B

-S11301A0000000000000000000000000000000004B

-S11301B0000000000000000000000000000000003B

-S11301C0000000000000000000000000000000002B

-S11301D0000000000000000000000000000000001B

-S11301E0000000000000000000000000000000000B

-S11301F000000000000000000000000000000000FB

-S113020000000000000000000000000000000000EA

-S113021000000000000000000000000000000000DA

-S113022000000000000000000000000000000000CA

-S113023000000000000000000000000000000000BA

-S113024000000000000000000000000000000000AA

-S1130250000000000000000000000000000000009A

-S1130260000000000000000000000000000000008A

-S1130270000000000000000000000000000000007A

-S1130280000000000000000000000000000000006A

-S1130290000000000000000000000000000000005A

-S11302A0000000000000000000000000000000004A

-S11302B0000000000000000000000000000000003A

-S11302C0000000000000000000000000000000002A

-S11302D0000000000000000000000000000000001A

-S11302E0000000000000000000000000000000000A

-S11302F000000000000000000000000000000000FA

-S113030000000000000000000000000000000000E9

-S113031000000000000000000000000000000000D9

-S113032000000000000000000000000000000000C9

-S113033000000000000000000000000000000000B9

-S113034000000000000000000000000000000000A9

-S11303500000000000000000000000000000000099

-S11303600000000000000000000000000000000089

-S11303700000000000000000000000000000000079

-S11303800000000000000000000000000000000069

-S11303900000000000000000000000000000000059

-S11303A00000000000000000000000000000000049

-S11303B00000000000000000000000000000000039

-S11303C00000000000000000000000000000000029

-S11303D00000000000000000000000000000000019

-S11303E00000000000000000000000000000000009

-S11303F000000000000000000000000000000000F9

-S113040000000000000000000000000000000000E8

-S113041000000000000000000000000000000000D8

-S113042000000000000000000000000000000000C8

-S113043000000000000000000000000000000000B8

-S113044000000000000000000000000000000000A8

-S11304500000000000000000000000000000000098

-S11304600000000000000000000000000000000088

-S11304700000000000000000000000000000000078

-S11304800000000000000000000000000000000068

-S11304900000000000000000000000000000000058

-S11304A00000000000000000000000000000000048

-S11304B00000000000000000000000000000000038

-S11304C00000000000000000000000000000000028

-S11304D00000000000000000000000000000000018

-S11304E00000000000000000000000000000000008

-S11304F000000000000000000000000000000000F8

-S113050000000000000000000000000000000000E7

-S113051000000000000000000000000000000000D7

-S113052000000000000000000000000000000000C7

-S113053000000000000000000000000000000000B7

-S113054000000000000000000000000000000000A7

-S11305500000000000000000000000000000000097

-S11305600000000000000000000000000000000087

-S11305700000000000000000000000000000000077

-S11305800000000000000000000000000000000067

-S11305900000000000000000000000000000000057

-S11305A00000000000000000000000000000000047

-S11305B00000000000000000000000000000000037

-S11305C00000000000000000000000000000000027

-S11305D00000000000000000000000000000000017

-S11305E00000000000000000000000000000000007

-S11305F000000000000000000000000000000000F7

-S113060000000000000000000000000000000000E6

-S113061000000000000000000000000000000000D6

-S113062000000000000000000000000000000000C6

-S113063000000000000000000000000000000000B6

-S113064000000000000000000000000000000000A6

-S11306500000000000000000000000000000000096

-S11306600000000000000000000000000000000086

-S11306700000000000000000000000000000000076

-S11306800000000000000000000000000000000066

-S11306900000000000000000000000000000000056

-S11306A00000000000000000000000000000000046

-S11306B00000000000000000000000000000000036

-S11306C00000000000000000000000000000000026

-S11306D00000000000000000000000000000000016

-S11306E00000000000000000000000000000000006

-S11306F000000000000000000000000000000000F6

-S113070000000000000000000000000000000000E5

-S113071000000000000000000000000000000000D5

-S113072000000000000000000000000000000000C5

-S113073000000000000000000000000000000000B5

-S113074000000000000000000000000000000000A5

-S11307500000000000000000000000000000000095

-S11307600000000000000000000000000000000085

-S11307700000000000000000000000000000000075

-S11307800000000000000000000000000000000065

-S11307900000000000000000000000000000000055

-S11307A00000000000000000000000000000000045

-S11307B00000000000000000000000000000000035

-S11307C00000000000000000000000000000000025

-S11307D00000000000000000000000000000000015

-S11307E00000000000000000000000000000000005

-S11307F000000000000000000000000000000000F5

-S113080000000000000000000000000000000000E4

-S113081000000000000000000000000000000000D4

-S113082000000000000000000000000000000000C4

-S113083000000000000000000000000000000000B4

-S113084000000000000000000000000000000000A4

-S11308500000000000000000000000000000000094

-S11308600000000000000000000000000000000084

-S11308700000000000000000000000000000000074

-S11308800000000000000000000000000000000064

-S11308900000000000000000000000000000000054

-S11308A00000000000000000000000000000000044

-S11308B00000000000000000000000000000000034

-S11308C00000000000000000000000000000000024

-S11308D00000000000000000000000000000000014

-S11308E00000000000000000000000000000000004

-S11308F000000000000000000000000000000000F4

-S113090000000000000000000000000000000000E3

-S113091000000000000000000000000000000000D3

-S113092000000000000000000000000000000000C3

-S113093000000000000000000000000000000000B3

-S113094000000000000000000000000000000000A3

-S11309500000000000000000000000000000000093

-S11309600000000000000000000000000000000083

-S11309700000000000000000000000000000000073

-S11309800000000000000000000000000000000063

-S11309900000000000000000000000000000000053

-S11309A00000000000000000000000000000000043

-S11309B00000000000000000000000000000000033

-S11309C00000000000000000000000000000000023

-S11309D00000000000000000000000000000000013

-S11309E00000000000000000000000000000000003

-S11309F000000000000000000000000000000000F3

-S1130A0000000000000000000000000000000000E2

-S1130A1000000000000000000000000000000000D2

-S1130A2000000000000000000000000000000000C2

-S1130A3000000000000000000000000000000000B2

-S1130A4000000000000000000000000000000000A2

-S1130A500000000000000000000000000000000092

-S1130A600000000000000000000000000000000082

-S1130A700000000000000000000000000000000072

-S1130A800000000000000000000000000000000062

-S1130A900000000000000000000000000000000052

-S1130AA00000000000000000000000000000000042

-S1130AB00000000000000000000000000000000032

-S1130AC00000000000000000000000000000000022

-S1130AD00000000000000000000000000000000012

-S1130AE00000000000000000000000000000000002

-S1130AF000000000000000000000000000000000F2

-S1130B0000000000000000000000000000000000E1

-S1130B1000000000000000000000000000000000D1

-S1130B2000000000000000000000000000000000C1

-S1130B3000000000000000000000000000000000B1

-S1130B4000000000000000000000000000000000A1

-S1130B500000000000000000000000000000000091

-S1130B600000000000000000000000000000000081

-S1130B700000000000000000000000000000000071

-S1130B800000000000000000000000000000000061

-S1130B900000000000000000000000000000000051

-S1130BA00000000000000000000000000000000041

-S1130BB00000000000000000000000000000000031

-S1130BC00000000000000000000000000000000021

-S1130BD00000000000000000000000000000000011

-S1130BE00000000000000000000000000000000001

-S1130BF000000000000000000000000000000000F1

-S1130C0000000000000000000000000000000000E0

-S1130C1000000000000000000000000000000000D0

-S1130C2000000000000000000000000000000000C0

-S1130C3000000000000000000000000000000000B0

-S1130C4000000000000000000000000000000000A0

-S1130C500000000000000000000000000000000090

-S1130C600000000000000000000000000000000080

-S1130C700000000000000000000000000000000070

-S1130C800000000000000000000000000000000060

-S1130C900000000000000000000000000000000050

-S1130CA00000000000000000000000000000000040

-S1130CB00000000000000000000000000000000030

-S1130CC00000000000000000000000000000000020

-S1130CD00000000000000000000000000000000010

-S1130CE00000000000000000000000000000000000

-S1130CF000000000000000000000000000000000F0

-S1130D0000000000000000000000000000000000DF

-S1130D1000000000000000000000000000000000CF

-S1130D2000000000000000000000000000000000BF

-S1130D3000000000000000000000000000000000AF

-S1130D40000000000000000000000000000000009F

-S1130D50000000000000000000000000000000008F

-S1130D60000000000000000000000000000000007F

-S1130D70000000000000000000000000000000006F

-S1130D80000000000000000000000000000000005F

-S1130D90000000000000000000000000000000004F

-S1130DA0000000000000000000000000000000003F

-S1130DB0000000000000000000000000000000002F

-S1130DC0000000000000000000000000000000001F

-S1130DD0000000000000000000000000000000000F

-S1130DE000000000000000000000000000000000FF

-S1130DF000000000000000000000000000000000EF

-S1130E0000000000000000000000000000000000DE

-S1130E1000000000000000000000000000000000CE

-S1130E2000000000000000000000000000000000BE

-S1130E3000000000000000000000000000000000AE

-S1130E40000000000000000000000000000000009E

-S1130E50000000000000000000000000000000008E

-S1130E60000000000000000000000000000000007E

-S1130E70000000000000000000000000000000006E

-S1130E80000000000000000000000000000000005E

-S1130E90000000000000000000000000000000004E

-S1130EA0000000000000000000000000000000003E

-S1130EB0000000000000000000000000000000002E

-S1130EC0000000000000000000000000000000001E

-S1130ED0000000000000000000000000000000000E

-S1130EE000000000000000000000000000000000FE

-S1130EF000000000000000000000000000000000EE

-S1130F0000000000000000000000000000000000DD

-S1130F1000000000000000000000000000000000CD

-S1130F2000000000000000000000000000000000BD

-S1130F3000000000000000000000000000000000AD

-S1130F40000000000000000000000000000000009D

-S1130F50000000000000000000000000000000008D

-S1130F60000000000000000000000000000000007D

-S1130F70000000000000000000000000000000006D

-S1130F80000000000000000000000000000000005D

-S1130F90000000000000000000000000000000004D

-S1130FA0000000000000000000000000000000003D

-S1130FB0000000000000000000000000000000002D

-S1130FC0000000000000000000000000000000001D

-S1130FD0000000000000000000000000000000000D

-S1130FE000000000000000000000000000000000FD

-S1130FF000000000000000000000000000000000ED

-S9030000FC

diff --git a/server/site_tests/enterprise_CFM_LogitechPtzUpdater/ptzpro2_154/ptzpro2_mcu2.bin b/server/site_tests/enterprise_CFM_LogitechPtzUpdater/ptzpro2_154/ptzpro2_mcu2.bin
deleted file mode 100644
index 731065e..0000000
--- a/server/site_tests/enterprise_CFM_LogitechPtzUpdater/ptzpro2_154/ptzpro2_mcu2.bin
+++ /dev/null
Binary files differ
diff --git a/server/site_tests/enterprise_CFM_LogitechPtzUpdater/ptzpro2_154/ptzpro2_video.bin b/server/site_tests/enterprise_CFM_LogitechPtzUpdater/ptzpro2_154/ptzpro2_video.bin
deleted file mode 100644
index 245de6d..0000000
--- a/server/site_tests/enterprise_CFM_LogitechPtzUpdater/ptzpro2_154/ptzpro2_video.bin
+++ /dev/null
Binary files differ
diff --git a/server/site_tests/enterprise_CFM_LowLevelPeripheralTest/README.md b/server/site_tests/enterprise_CFM_LowLevelPeripheralTest/README.md
deleted file mode 100644
index c1ab06f..0000000
--- a/server/site_tests/enterprise_CFM_LowLevelPeripheralTest/README.md
+++ /dev/null
@@ -1,4 +0,0 @@
-This directory contains low level tests for peripherals. The tests are
-implemented as ConfigurableCfmTests, i.e. the scenarios are defined in the
-control files. Low level meaning that Chrome or the Meet app is not involved.
-The devices are tested closer to the operating system.
diff --git a/server/site_tests/enterprise_CFM_LowLevelPeripheralTest/control.atrus_power_cycle_stress b/server/site_tests/enterprise_CFM_LowLevelPeripheralTest/control.atrus_power_cycle_stress
deleted file mode 100644
index ddefcb0..0000000
--- a/server/site_tests/enterprise_CFM_LowLevelPeripheralTest/control.atrus_power_cycle_stress
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-from autotest_lib.server.cros.cfm.configurable_test.dsl import *
-from autotest_lib.server import utils
-
-AUTHOR = "kerl@google.com, chromeos-meetings@google.com"
-NAME = "enterprise_CFM_LowLevelPeripheralTest.atrus_power_cycle_stress"
-PURPOSE = "Stresses an Atrus device by repeatedly power cycling its USB port."
-CRITERIA = ("Atrus detectable as a USB device after each cycle "
-            "and no crash files appear")
-ATTRIBUTES = "suite:hotrod-remora"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Stress"
-TEST_TYPE = "server"
-DEPENDENCIES="atrus"
-
-DOC = """
-Repeatedly power cycle the connected Atrus and verify it appears
-as a USB device after each cycle. Verifies that no new crash files
-appear.
-"""
-
-cfm_test = CfmTest(
-    configuration=Configuration(skip_enrollment=True),
-    scenario=Scenario(
-        AssertUsbDevices([ATRUS]),
-        RepeatTimes(10, Scenario(
-            # Atruses take a while to boot, often close to 10 seconds.
-            # Increase the timeout to 30 to ensure we give it enough time.
-            PowerCycleUsbPort([ATRUS], wait_for_change_timeout=30),
-            AssertNoNewCrashes()
-        ))
-    ),
-)
-
-def run_test(machine):
-    job.run_test("enterprise_CFM_LowLevelPeripheralTest",
-                 cfm_test = cfm_test,
-                 tag = 'atrus_power_cycle_stress',
-                 host = hosts.create_host(machine))
-
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_LowLevelPeripheralTest/control.huddly_power_cycle_stress b/server/site_tests/enterprise_CFM_LowLevelPeripheralTest/control.huddly_power_cycle_stress
deleted file mode 100644
index f93ca5f..0000000
--- a/server/site_tests/enterprise_CFM_LowLevelPeripheralTest/control.huddly_power_cycle_stress
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-from autotest_lib.server.cros.cfm.configurable_test.dsl import *
-from autotest_lib.server import utils
-
-AUTHOR = "kerl@google.com, chromeos-meetings@google.com"
-NAME = "enterprise_CFM_LowLevelPeripheralTest.huddly_power_cycle_stress"
-PURPOSE = "Stresses an Huddly device by repeatedly power cycling its USB port."
-CRITERIA = ("Huddly detectable as a USB device after each cycle "
-            "and no crash files appear")
-ATTRIBUTES = "suite:hotrod-remora"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Stress"
-TEST_TYPE = "server"
-DEPENDENCIES="huddly"
-
-DOC = """
-Repeatedly power cycle the connected Huddly and verify it appears
-as a USB device after each cycle. Verifies that no new crash files
-appear.
-"""
-
-cfm_test = CfmTest(
-    configuration=Configuration(skip_enrollment=True),
-    scenario=Scenario(
-        AssertUsbDevices([HUDDLY_GO]),
-        RepeatTimes(10, Scenario(
-            PowerCycleUsbPort([HUDDLY_GO]),
-            AssertNoNewCrashes()
-        ))
-    ),
-)
-
-def run_test(machine):
-    job.run_test("enterprise_CFM_LowLevelPeripheralTest",
-                 cfm_test = cfm_test,
-                 tag = 'huddly_power_cycle_stress',
-                 host = hosts.create_host(machine))
-
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_LowLevelPeripheralTest/control.reboot_stress b/server/site_tests/enterprise_CFM_LowLevelPeripheralTest/control.reboot_stress
deleted file mode 100644
index fbe0d78..0000000
--- a/server/site_tests/enterprise_CFM_LowLevelPeripheralTest/control.reboot_stress
+++ /dev/null
@@ -1,48 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-from autotest_lib.server.cros.cfm.configurable_test.dsl import *
-from autotest_lib.server import utils
-
-AUTHOR = "kerl@google.com, chromeos-meetings@google.com"
-NAME = "enterprise_CFM_LowLevelPeripheralTest.reboot_stress"
-PURPOSE = "Stresses peripheral devices by repeatedly rebooting the DUT."
-CRITERIA = ("Atrus,Mimo, and Huddly detectable as USB devices after reboot "
-            "and no crash files appear")
-ATTRIBUTES = "suite:hotrod-remora"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Stress"
-TEST_TYPE = "server"
-DEPENDENCIES="atrus,mimo,huddly"
-
-DOC = """
-Repeatedly reboots the DUT and verifies that the Atrus device can be enumerated
-after each reboot.
-"""
-
-cfm_test = CfmTest(
-    configuration=Configuration(skip_enrollment=True),
-    scenario=Scenario(
-        AssertUsbDevices([ATRUS]),
-        AssertUsbDevices(ALL_MIMO_DISPLAYS),
-        AssertUsbDevices([HUDDLY_GO]),
-        RepeatTimes(10, Scenario(
-            RebootDut(),
-            AssertUsbDevices([ATRUS]),
-            AssertUsbDevices(ALL_MIMO_DISPLAYS),
-            AssertUsbDevices([HUDDLY_GO]),
-            # TODO(crbug.com/814775): mosys-info always crashes on reboot, why
-            # we always have new crash files. Enable this check when that is
-            # fixed.
-            # AssertNoNewCrashes()
-        ))
-    ),
-)
-
-def run_test(machine):
-    job.run_test("enterprise_CFM_LowLevelPeripheralTest",
-                 cfm_test = cfm_test,
-                 tag = 'reboot_stress',
-                 host = hosts.create_host(machine))
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_LowLevelPeripheralTest/enterprise_CFM_LowLevelPeripheralTest.py b/server/site_tests/enterprise_CFM_LowLevelPeripheralTest/enterprise_CFM_LowLevelPeripheralTest.py
deleted file mode 100644
index 2ab493a..0000000
--- a/server/site_tests/enterprise_CFM_LowLevelPeripheralTest/enterprise_CFM_LowLevelPeripheralTest.py
+++ /dev/null
@@ -1,8 +0,0 @@
-from autotest_lib.server.cros.cfm.configurable_test import configurable_cfm_test
-
-class enterprise_CFM_LowLevelPeripheralTest(
-        configurable_cfm_test.ConfigurableCfmTest):
-    """
-    Low level (Chrome os level) tests of peripherals.
-    """
-    pass  # Base class contains all necessary code.
diff --git a/server/site_tests/enterprise_CFM_MeetAppSanity/control b/server/site_tests/enterprise_CFM_MeetAppSanity/control
deleted file mode 100644
index ab2cf97..0000000
--- a/server/site_tests/enterprise_CFM_MeetAppSanity/control
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "harpreet@chromium.org"
-NAME = "enterprise_CFM_MeetAppSanity"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:hotrod-remora"
-JOB_RETRIES = 3
-
-DOC = """
-This test clears the TPM if necessary, enrolls the device into CFM and runs a
-series of sanity test actions in the meet app.
-It clears the TPM at the end of the test run. Every time the TPM is cleared,
-the system is rebooted.
-"""
-
-def run_test(machine):
-    host = hosts.create_host(machine)
-    job.run_test('enterprise_CFM_MeetAppSanity', host=host)
-
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_MeetAppSanity/control.partners b/server/site_tests/enterprise_CFM_MeetAppSanity/control.partners
deleted file mode 100644
index a25cfba..0000000
--- a/server/site_tests/enterprise_CFM_MeetAppSanity/control.partners
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "denniswu@chromium.org"
-NAME = "enterprise_CFM_MeetAppSanity.partners"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:bluestreak-partners"
-JOB_RETRIES = 3
-
-DOC = """
-This test clears the TPM if necessary, enrolls the device into CFM and runs a
-series of sanity test actions in the meet app.
-It clears the TPM at the end of the test run. Every time the TPM is cleared,
-the system is rebooted.
-This test bypasses servo check and is used by third party vendor PAL Acoustics.
-"""
-
-args_dict = utils.args_to_dict(args)
-
-
-def run_test(machine):
-    host = hosts.create_host(machine, servo_args=None)
-    job.run_test('enterprise_CFM_MeetAppSanity', host=host, tag='partners')
-
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_MeetAppSanity/enterprise_CFM_MeetAppSanity.py b/server/site_tests/enterprise_CFM_MeetAppSanity/enterprise_CFM_MeetAppSanity.py
deleted file mode 100644
index eb9cb04..0000000
--- a/server/site_tests/enterprise_CFM_MeetAppSanity/enterprise_CFM_MeetAppSanity.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import time
-
-from autotest_lib.server.cros.cfm import cfm_base_test
-
-LONG_TIMEOUT = 10
-
-
-class enterprise_CFM_MeetAppSanity(cfm_base_test.CfmBaseTest):
-    """
-    Basic sanity test for Meet App to be expanded to cover more cases like
-    enterprise_CFM_Sanity test.
-    """
-    version = 1
-
-
-    def run_once(self):
-        """Runs the test."""
-        # Following triggers new Thor/Meetings APIs.
-        self.cfm_facade.wait_for_telemetry_commands()
-        self.cfm_facade.start_meeting_session()
-        time.sleep(LONG_TIMEOUT)
-        self.cfm_facade.end_meeting_session()
-
diff --git a/server/site_tests/enterprise_CFM_MeetingRoomScenario/README.md b/server/site_tests/enterprise_CFM_MeetingRoomScenario/README.md
deleted file mode 100644
index 40cd5da..0000000
--- a/server/site_tests/enterprise_CFM_MeetingRoomScenario/README.md
+++ /dev/null
@@ -1 +0,0 @@
-This directory contains automated tests covering typical meeting room scenarios.
diff --git a/server/site_tests/enterprise_CFM_MeetingRoomScenario/control.idle_in_meeting_with_bots b/server/site_tests/enterprise_CFM_MeetingRoomScenario/control.idle_in_meeting_with_bots
deleted file mode 100644
index 2c9598c..0000000
--- a/server/site_tests/enterprise_CFM_MeetingRoomScenario/control.idle_in_meeting_with_bots
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-from autotest_lib.server.cros.cfm.configurable_test.dsl import *
-
-AUTHOR = "dtosic@google.com, chromeos-meetings@google.com"
-NAME = "enterprise_CFM_MeetingRoomScenario.idle_in_meeting_with_bots"
-PURPOSE = "Tests what happens when CfM stays idle in a meeting."
-CRITERIA = "No unexpected resource leaks."
-TIME = "LONG"
-TEST_CATEGORY = "Performance"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-
-DOC = """
-Stay idle in a meeting for a long time.
-"""
-
-DURATION_MIN = 20
-BOT_COUNT = 7
-
-cfm_test = CfmTest(
-    scenario=Scenario(
-        WaitForMeetingsLandingPage(),
-        CreateMeetingWithBots(BOT_COUNT, DURATION_MIN, muted=True),
-        JoinMeetingWithBots(),
-        StartPerfMetricsCollection(),
-        Sleep(DURATION_MIN * 60),
-        StopPerfMetricsCollection(),
-        UploadPerfMetrics(),
-    ),
-)
-
-def run_test(machine):
-    job.run_test("enterprise_CFM_MeetingRoomScenario",
-                 cfm_test = cfm_test,
-                 tag = 'idle_in_meeting_with_bots',
-                 host = hosts.create_host(machine))
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_MeetingRoomScenario/control.idle_on_landing_page b/server/site_tests/enterprise_CFM_MeetingRoomScenario/control.idle_on_landing_page
deleted file mode 100644
index 6cfd422..0000000
--- a/server/site_tests/enterprise_CFM_MeetingRoomScenario/control.idle_on_landing_page
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-from autotest_lib.server.cros.cfm.configurable_test.dsl import *
-from autotest_lib.server import utils
-
-AUTHOR = "malmnas@google.com, chromeos-meetings@google.com"
-NAME = "enterprise_CFM_MeetingRoomScenario.idle_on_landing_page"
-PURPOSE = "Tests what happens when CfM stays idle on landing page."
-CRITERIA = "No unexpected resource leakes."
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_TYPE = "server"
-
-DOC = """
-Stay idle on the landing page for a long time.
-"""
-
-cfm_test = CfmTest(
-    scenario=Scenario(
-        RebootDut(restart_chrome_for_cfm=True),
-        WaitForMeetingsLandingPage(),
-        StartPerfMetricsCollection(),
-        Sleep(15 * 60),
-        StopPerfMetricsCollection(),
-        UploadPerfMetrics(),
-    ),
-)
-
-def run_test(machine):
-    job.run_test("enterprise_CFM_MeetingRoomScenario",
-                 cfm_test = cfm_test,
-                 tag = 'idle_on_landing_page',
-                 host = hosts.create_host(machine))
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_MeetingRoomScenario/control.join_leave_meeting_with_bots b/server/site_tests/enterprise_CFM_MeetingRoomScenario/control.join_leave_meeting_with_bots
deleted file mode 100644
index f771c2a..0000000
--- a/server/site_tests/enterprise_CFM_MeetingRoomScenario/control.join_leave_meeting_with_bots
+++ /dev/null
@@ -1,55 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-from autotest_lib.server.cros.cfm.configurable_test.dsl import *
-
-AUTHOR = "dtosic@google.com, chromeos-meetings@google.com"
-NAME = "enterprise_CFM_MeetingRoomScenario.join_leave_meeting_with_bots"
-PURPOSE = "Tests what happens when CfM repeatedly joins and leaves a meeting."
-CRITERIA = "No unexpected resource leaks."
-TIME = "LONG"
-TEST_CATEGORY = "Performance"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-
-DOC = """
-Repeat for 6 cycles:
-    1) Join an existing meeting with 7 speaking bot participants.
-    2) Idle in the meeting for 5 minutes
-    3) Leave the meeting
-
-The total test duration is ~30 minutes.
-"""
-
-IN_CALL_IDLE_DURATION_SEC = 5 * 60
-IDLE_ON_LANDING_PAGE_SEC = 30
-REPETITION_COUNT = 6
-BOT_COUNT = 7
-BOT_TTL_MIN = ((IN_CALL_IDLE_DURATION_SEC + IDLE_ON_LANDING_PAGE_SEC) * REPETITION_COUNT + 60) / 60
-
-cfm_test = CfmTest(
-    scenario=Scenario(
-        CreateMeetingWithBots(BOT_COUNT, BOT_TTL_MIN, muted=False),
-        WaitForMeetingsLandingPage(),
-        StartPerfMetricsCollection(),
-        RepeatTimes(REPETITION_COUNT,
-            Scenario(
-                JoinMeetingWithBots(),
-                Sleep(IN_CALL_IDLE_DURATION_SEC),
-                LeaveMeeting(),
-                # Idle a bit after leaving a call.
-                # This is closer to real-user behaviour.
-                Sleep(IDLE_ON_LANDING_PAGE_SEC),
-        )),
-        StopPerfMetricsCollection(),
-        UploadPerfMetrics(),
-    ),
-)
-
-def run_test(machine):
-    job.run_test("enterprise_CFM_MeetingRoomScenario",
-                 cfm_test = cfm_test,
-                 tag = 'join_leave_meeting_with_bots',
-                 host = hosts.create_host(machine))
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_MeetingRoomScenario/enterprise_CFM_MeetingRoomScenario.py b/server/site_tests/enterprise_CFM_MeetingRoomScenario/enterprise_CFM_MeetingRoomScenario.py
deleted file mode 100644
index 4faa92d..0000000
--- a/server/site_tests/enterprise_CFM_MeetingRoomScenario/enterprise_CFM_MeetingRoomScenario.py
+++ /dev/null
@@ -1,8 +0,0 @@
-from autotest_lib.server.cros.cfm.configurable_test import configurable_cfm_test
-
-class enterprise_CFM_MeetingRoomScenario(
-        configurable_cfm_test.ConfigurableCfmTest):
-    """
-    Meeting Room Scenario Test.
-    """
-    pass
diff --git a/server/site_tests/enterprise_CFM_MimoSanity/control.meet_app b/server/site_tests/enterprise_CFM_MimoSanity/control.meet_app
deleted file mode 100644
index 4f12faa..0000000
--- a/server/site_tests/enterprise_CFM_MimoSanity/control.meet_app
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "mzhuo@chromium.org"
-NAME = "enterprise_CFM_MimoSanity.meet_app"
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:hotrod-remora"
-DEPENDENCIES = "mimo, atrus, huddly"
-JOB_RETRIES = 3
-
-DOC = """
-This test consistens of 3 steps to that verify the MIMO is always availble to
-the CFM. It verifies:
-  1.) that the MIMO is available after a CFM reboot
-  2.) that the MIMO is available after joining and leaving a meeting
-  3.) that the MIMO is available after being power cycled from the USB level
-
- "Available" means that the MIMO is visible on the system/USB level.
-"""
-
-args_dict = utils.args_to_dict(args)
-
-def run_test(machine):
-    repetitions = int(args_dict.get('repetitions', 3))
-    host = hosts.create_host(machine)
-    job.run_test('enterprise_CFM_MimoSanity', host=host,
-                 repetitions=repetitions, is_meeting=True, tag='meet_app')
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_MimoSanity/enterprise_CFM_MimoSanity.py b/server/site_tests/enterprise_CFM_MimoSanity/enterprise_CFM_MimoSanity.py
deleted file mode 100644
index 2a579ff..0000000
--- a/server/site_tests/enterprise_CFM_MimoSanity/enterprise_CFM_MimoSanity.py
+++ /dev/null
@@ -1,164 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import time
-import random
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server.cros.cfm import cfm_base_test
-from autotest_lib.client.common_lib.cros import power_cycle_usb_util
-from autotest_lib.client.common_lib.cros.cfm.usb import cfm_usb_devices
-from autotest_lib.client.common_lib.cros.cfm.usb import usb_device_collector
-
-
-LONG_TIMEOUT = 20
-SHORT_TIMEOUT = 5
-
-class enterprise_CFM_MimoSanity(cfm_base_test.CfmBaseTest):
-    """Tests the following fuctionality works on CFM enrolled devices:
-           1. Verify CfM has Camera, Speaker and Mimo connected.
-           2. Verify all peripherals have expected usb interfaces.
-           3. Verify after rebooting CfM Mimo is present.
-           4. Verify after powercycle Mimo Mimo comes back.
-    """
-    version = 1
-
-
-    def _power_cycle_mimo_device(self):
-        """Power Cycle Mimo device"""
-        logging.info('Plan to power cycle Mimo')
-        try:
-            power_cycle_usb_util.power_cycle_usb_vidpid(
-                self._host, self._board,
-                self._mimo.vendor_id, self._mimo.product_id)
-        except KeyError:
-           raise error.TestFail('Could not find target device: %s',
-                                self._mimo.product)
-
-
-    def _test_power_cycle_mimo(self):
-        """Power Cycle Mimo device for multiple times"""
-        self._power_cycle_mimo_device()
-        logging.info('Powercycle done for %s (%s)',
-                     self._mimo.product, self._mimo.vid_pid)
-        time.sleep(LONG_TIMEOUT)
-        self._kernel_usb_sanity_test()
-
-
-    def _check_peripherals(self):
-        """
-        Check CfM has camera, speaker and MiMO connected.
-        @returns list of peripherals found.
-        """
-        atruses = self.device_collector.get_devices_by_spec(
-                cfm_usb_devices.ATRUS)
-        if not atruses:
-            raise error.TestFail('Expected to find connected speakers.')
-        self._atrus = atruses[0]
-
-        huddlys = self.device_collector.get_devices_by_spec(
-                cfm_usb_devices.HUDDLY_GO)
-        if not huddlys:
-            raise error.TestFail('Expected to find a connected camera.')
-        self._huddly = huddlys[0]
-
-
-        displays = self.device_collector.get_devices_by_spec(
-                *cfm_usb_devices.ALL_MIMO_DISPLAYS)
-        if not displays:
-            raise error.TestFail('Expected a MiMO display to be connected.')
-        if len(displays) != 1:
-            raise error.TestFail('Expected exactly one MiMO display to be '
-                                 'connected. Found %d' % len(displays))
-        self._mimo = displays[0]
-
-
-        controllers = self.device_collector.get_devices_by_spec(
-            cfm_usb_devices.MIMO_VUE_HID_TOUCH_CONTROLLER)
-        if not controllers:
-            raise error.TestFail('Expected a MiMO controller to be connected.')
-        if len(controllers) != 1:
-            raise error.TestFail('Expected exactly one MiMO controller to be '
-                                 'connected. Found %d' % len(controllers))
-        self._touch_controller = controllers[0]
-
-    def _check_device_interfaces_match_spec(self, spec):
-        for device in self.device_collector.get_devices_by_spec(spec):
-            if not device.interfaces_match_spec(spec):
-                raise error.TestFail(
-                    'Device %s has unexpected interfaces.'
-                    'Expected: %s. Actual: %s' % (device, spec.interfaces,
-                                                  spec.interfaces))
-
-    def _kernel_usb_sanity_test(self):
-        """
-        Check connected camera, speaker and Mimo have expected usb interfaces.
-        """
-        self._check_device_interfaces_match_spec(self._atrus)
-        self._check_device_interfaces_match_spec(self._huddly)
-        self._check_device_interfaces_match_spec(self._mimo)
-        self._check_device_interfaces_match_spec(self._touch_controller)
-
-    def _test_reboot(self):
-        """Reboot testing for Mimo."""
-
-        boot_id = self._host.get_boot_id()
-        self._host.reboot()
-        self._host.wait_for_restart(old_boot_id=boot_id)
-        self.cfm_facade.restart_chrome_for_cfm()
-        time.sleep(SHORT_TIMEOUT)
-        self.cfm_facade.wait_for_telemetry_commands()
-        self._kernel_usb_sanity_test()
-
-
-    def _test_mimo_in_call(self) :
-        """
-        Start a hangout session and end the session after random time.
-
-        @raises error.TestFail if any of the checks fail.
-        """
-        logging.info('Joining meeting...')
-        if self._is_meeting:
-            self.cfm_facade.start_meeting_session()
-        else:
-            self.cfm_facade.start_new_hangout_session('mimo-sanity-test')
-        time.sleep(random.randrange(SHORT_TIMEOUT, LONG_TIMEOUT))
-
-        # Verify USB data in-call.
-        self._kernel_usb_sanity_test()
-
-        if self._is_meeting:
-            self.cfm_facade.end_meeting_session()
-        else:
-            self.cfm_facade.end_hangout_session()
-        logging.info('Session has ended.')
-
-        # Verify USB devices after leaving the call.
-        self._kernel_usb_sanity_test()
-        time.sleep(SHORT_TIMEOUT)
-
-
-    def run_once(self, repetitions, is_meeting):
-        """
-        Runs the test.
-
-        @param repetitions: amount of reboot cycles to perform.
-        """
-        # Remove 'board:' prefix.
-        self._board = self._host.get_board().split(':')[1]
-        self._is_meeting = is_meeting
-
-        self.device_collector = usb_device_collector.UsbDeviceCollector(
-            self._host)
-        self._check_peripherals()
-        self._kernel_usb_sanity_test()
-
-        self.cfm_facade.wait_for_telemetry_commands()
-
-        for i in xrange(1, repetitions + 1):
-            logging.info('Running test cycle %d/%d', i, repetitions)
-            self._test_reboot()
-            self._test_mimo_in_call()
-            self._test_power_cycle_mimo()
diff --git a/server/site_tests/enterprise_CFM_MimoUpdater/control b/server/site_tests/enterprise_CFM_MimoUpdater/control
deleted file mode 100644
index 11afe09..0000000
--- a/server/site_tests/enterprise_CFM_MimoUpdater/control
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "yrizk@chromium.org"
-NAME = "enterprise_CFM_MimoUpdater"
-TEST_TYPE = "server"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-PURPOSE = "Server-side test for sanity check on mimo-updater."
-CRITERIA = "Fails if the Autotest framework doesn't work as expected."
-ATTRIBUTES = "suite:hotrod-remora"
-
-DOC = """
-This test verifies that all large pieces of the mimo displaylink 
-firmware updater are working correctly: namely that the updater 
-determines that an update is needed correctly and that it does 
-the update correctly.
-"""
-
-def run_test(machine):
-    host = hosts.create_host(machine)
-    job.run_test("enterprise_CFM_MimoUpdater", host=host)
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_MimoUpdater/enterprise_CFM_MimoUpdater.py b/server/site_tests/enterprise_CFM_MimoUpdater/enterprise_CFM_MimoUpdater.py
deleted file mode 100644
index dddd33a..0000000
--- a/server/site_tests/enterprise_CFM_MimoUpdater/enterprise_CFM_MimoUpdater.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Autotest for mimo-updater."""
-
-from autotest_lib.server import test
-from autotest_lib.client.common_lib import error
-
-class enterprise_CFM_MimoUpdater(test.test):
-    """
-    Tests that mimo-updater runs with a firmware version check
-    mimo-updater is also responsible for determining if a fw update is needed.
-    However, those parts are currently untestable.
-    """
-    version = 1
-
-    def run_once(self, host):
-        """Top level function that is called by autoserv."""
-        host.run("rm --force /var/log/messages")
-        host.reboot()
-        host.wait_up()
-        # grep's exit status is 0 if pattern matched, 1 ow.
-        # utils.grep() doesn't use extended regexp
-        cmd = 'grep -E "Firmware Version: 0x[0-9]+" /var/log/messages'
-        output = host.run(cmd, ignore_status=True)
-        if output.stderr:
-            raise error.TestFail(output.stderr)
-
-
diff --git a/server/site_tests/enterprise_CFM_PTZStress/control.PTZPro2 b/server/site_tests/enterprise_CFM_PTZStress/control.PTZPro2
deleted file mode 100644
index 7202536..0000000
--- a/server/site_tests/enterprise_CFM_PTZStress/control.PTZPro2
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-from autotest_lib.client.common_lib.cros.cfm.usb import cfm_usb_devices
-
-AUTHOR = "denniswu@chromium.org"
-NAME = "enterprise_CFM_PTZStress.PTZPro2"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:hotrod"
-DEPENDENCIES = "meet_app, ptzpro2"
-JOB_RETRIES = 1
-
-DOC = """
-Test scenario:
-    1. Enroll the device and start a meeting.
-    2. During meeting PTZ the camera according to the control file.
-Verify the following functionalities:
-    1. Camera is enumerated.
-    2. Verify PTZ signals are sent to the camera.
-"""
-
-args_dict = utils.args_to_dict(args)
-
-test_config = {
-    'camera': cfm_usb_devices.PTZ_PRO_2_CAMERA,
-    'repeat': 10,
-    'motion_duration': 2,
-    'usb_trace_path': '/tmp/camera.mon.out'
-}
-
-ptz_motion_sequence = ['panLeft', 'panStop', 'tiltUp', 'tiltStop', 'zoomIn', 'resetPosition']
-
-def run_test(machine):
-    host = hosts.create_host(machine)
-    job.run_test('enterprise_CFM_PTZStress',
-        host=host,
-        test_config=test_config,
-        ptz_motion_sequence=ptz_motion_sequence)
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_PTZStress/enterprise_CFM_PTZStress.py b/server/site_tests/enterprise_CFM_PTZStress/enterprise_CFM_PTZStress.py
deleted file mode 100644
index 1d8f334..0000000
--- a/server/site_tests/enterprise_CFM_PTZStress/enterprise_CFM_PTZStress.py
+++ /dev/null
@@ -1,102 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import time
-import re
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.cfm.usb import usb_device_collector
-from autotest_lib.server.cros.cfm import cfm_base_test
-
-class enterprise_CFM_PTZStress(cfm_base_test.CfmBaseTest):
-    """
-    Executes the following tests on CFM devices:
-
-       1. Enroll the device and join a meeting.
-       2. During meeting PTZ the camera according to the control file.
-    Verify the following functionalities:
-
-       1. Camera is enumerated.
-       2. Verify PTZ signal is sent to the camera.
-    """
-    version = 1
-
-    def check_camera_enumeration(self, camera_name):
-        """
-        Checks if there is any camera connected to the DUT.
-            If so, return the USB bus number the camera is on.
-        @param camera_name the camera's name under test
-        @returns The USB bus number the camera is on, if there is only
-            1 camera connected, false otherwise.
-        """
-        collector = usb_device_collector.UsbDeviceCollector(self._host)
-        camera = collector.get_devices_by_spec(camera_name)
-        if len(camera) == 1:
-            bus_number = camera[0].bus
-            logging.info('Camera enumerated: {} on bus {}'.
-                format(camera_name,bus_number))
-            return bus_number
-        raise error.TestFail('Camera failed to enumerate')
-
-
-    def dump_usbmon_traffic(self, bus, usb_trace_path):
-        """
-        Start usbmon with specified bus and dump the traffic to file
-        @param bus bus number the camera is on
-        @param usb_trace_path the USB traces file path
-        """
-        cmd = ('cat /sys/kernel/debug/usb/usbmon/{}u > {} &'.
-            format(bus, usb_trace_path))
-        try:
-            self._host.run(cmd, ignore_status = True)
-        except Exception as e:
-            logging.info('Fail to run cmd {}. Error: {}'.
-                format(cmd, str(e)))
-        logging.info('Usbmon traffic dumped to {}'.format(usb_trace_path))
-
-
-    def check_usbmon_traffic(self, usb_trace_path):
-        """
-        Check traces
-        @param usb_trace_path the USB traces file path
-        """
-        cmd = ('cat {} & '.format(usb_trace_path))
-        try:
-            traces = self._host.run_output(cmd, ignore_status = True)
-            if re.search('C Ii', traces) and re.search('S Ii', traces):
-                logging.info('PTZ signal verified')
-            else:
-                raise error.TestFail('PTZ signal did not go through')
-        except Exception as e:
-            logging.info('Fail to run cmd {}. Error: {}'.format(cmd, str(e)))
-
-
-    def clean_usb_traces_file(self, usb_trace_path):
-        """
-        Clean traces file
-        @param usb_trace_path the USB traces file path
-        """
-        cmd = ('rm {}'.format(usb_trace_path))
-        try:
-            self._host.run(cmd, ignore_status = True)
-        except Exception as e:
-            raise error.TestFail('Fail to run cmd {}. Error: {}'.format(cmd, str(e)))
-        logging.info('Cleaned up traces in {}'.format(usb_trace_path))
-
-
-    def run_once(self, host, test_config, ptz_motion_sequence):
-        """Runs the test."""
-        self.cfm_facade.wait_for_telemetry_commands()
-        for loop_no in xrange(1, test_config['repeat'] + 1):
-            logging.info('Test Loop : {}'.format(loop_no))
-            bus = self.check_camera_enumeration(test_config['camera'])
-            self.cfm_facade.start_meeting_session()
-            self.dump_usbmon_traffic(bus, test_config['usb_trace_path'])
-            for motion in ptz_motion_sequence:
-                self.cfm_facade.move_camera(motion)
-                time.sleep(test_config['motion_duration'])
-            self.check_usbmon_traffic(test_config['usb_trace_path'])
-            self.cfm_facade.end_meeting_session()
-            self.clean_usb_traces_file(test_config['usb_trace_path'])
diff --git a/server/site_tests/enterprise_CFM_Perf/control.meet_app b/server/site_tests/enterprise_CFM_Perf/control.meet_app
deleted file mode 100644
index 6c4cd2d..0000000
--- a/server/site_tests/enterprise_CFM_Perf/control.meet_app
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "dtosic@chromium.org"
-NAME = "enterprise_CFM_Perf.meet_app"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Performance"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-JOB_RETRIES = 3
-
-DOC = """
-This test enrolls a ChromeOS device in to hotrod mode running the meet app
-and captures device usage data including cpu, memory, temperature and JMI
-logs and uploads it to Google Cloud Storage as part of the test logs.
-"""
-
-def run_test(machine):
-    host = hosts.create_host(machine)
-    job.run_test('enterprise_CFM_Perf', host=host, tag='meet_app')
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_Perf/enterprise_CFM_Perf.py b/server/site_tests/enterprise_CFM_Perf/enterprise_CFM_Perf.py
deleted file mode 100644
index 22f9361..0000000
--- a/server/site_tests/enterprise_CFM_Perf/enterprise_CFM_Perf.py
+++ /dev/null
@@ -1,166 +0,0 @@
-# Copyright (c) 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import file_utils
-from autotest_lib.client.common_lib.cros import system_metrics_collector
-from autotest_lib.server.cros.cfm import cfm_base_test
-from autotest_lib.server.cros.cfm.utils import bond_http_api
-from autotest_lib.server.cros.cfm.utils import perf_metrics_collector
-
-
-_BOT_PARTICIPANTS_COUNT = 10
-_TOTAL_TEST_DURATION_SECONDS = 15 * 60 # 15 minutes
-
-_DOWNLOAD_BASE = ('http://commondatastorage.googleapis.com/'
-                  'chromiumos-test-assets-public/crowd/')
-_VIDEO_NAME = 'crowd720_25frames.y4m'
-
-
-class ParticipantCountMetric(system_metrics_collector.Metric):
-    """
-    Metric for getting the current participant count in a call.
-    """
-    def __init__(self, cfm_facade):
-        """
-        Initializes with a cfm_facade.
-
-        @param cfm_facade object having a get_participant_count() method.
-        """
-        super(ParticipantCountMetric, self).__init__(
-                'participant_count',
-                'participants',
-                higher_is_better=True)
-        self.cfm_facade = cfm_facade
-
-    def collect_metric(self):
-        """
-        Collects one metric value.
-        """
-        self.values.append(self.cfm_facade.get_participant_count())
-
-class enterprise_CFM_Perf(cfm_base_test.CfmBaseTest):
-    """This is a server test which clears device TPM and runs
-    enterprise_RemoraRequisition client test to enroll the device in to hotrod
-    mode. After enrollment is successful, it collects and logs cpu, memory and
-    temperature data from the device under test."""
-    version = 1
-
-    def _download_test_video(self):
-        """
-        Downloads the test video to a temporary directory on host.
-
-        @return the remote path of the downloaded video.
-        """
-        url = _DOWNLOAD_BASE + _VIDEO_NAME
-        local_path = os.path.join(self.tmpdir, _VIDEO_NAME)
-        logging.info('Downloading %s to %s', url, local_path)
-        file_utils.download_file(url, local_path)
-        # The directory returned by get_tmp_dir() is automatically deleted.
-        tmp_dir = self._host.get_tmp_dir()
-        remote_path = os.path.join(tmp_dir, _VIDEO_NAME)
-        # The temporary directory has mode 700 by default. Chrome runs with a
-        # different user so cannot access it unless we change the permissions.
-        logging.info('chmodding tmpdir %s to 755', tmp_dir)
-        self._host.run('chmod 755 %s' % tmp_dir)
-        logging.info('Sending %s to %s on DUT', local_path, remote_path)
-        self._host.send_file(local_path, remote_path)
-        os.remove(local_path)
-        return remote_path
-
-    def initialize(self, host, run_test_only=False, use_bond=True):
-        """
-        Initializes common test properties.
-
-        @param host: a host object representing the DUT.
-        @param run_test_only: Whether to run only the test or to also perform
-            deprovisioning, enrollment and system reboot. See cfm_base_test.
-        @param use_bond: Whether to use BonD to add bots to the meeting. Useful
-            for local testing.
-        """
-        super(enterprise_CFM_Perf, self).initialize(host, run_test_only)
-        self._host = host
-        self._use_bond = use_bond
-        system_facade = self._facade_factory.create_system_facade()
-        self._perf_metrics_collector = (
-            perf_metrics_collector.PerfMetricsCollector(
-                system_facade,
-                self.cfm_facade,
-                self.output_perf_value,
-                additional_system_metrics=[
-                    ParticipantCountMetric(self.cfm_facade),
-                ]))
-
-    def setup(self):
-        """
-        Download video for fake media and restart Chrome with fake media flags.
-
-        This runs after initialize().
-        """
-        super(enterprise_CFM_Perf, self).setup()
-        remote_video_path = self._download_test_video()
-        # Restart chrome with fake media flags.
-        extra_chrome_args=[
-                '--use-fake-device-for-media-stream',
-                '--use-file-for-fake-video-capture=%s' % remote_video_path
-        ]
-        self.cfm_facade.restart_chrome_for_cfm(extra_chrome_args)
-        if self._use_bond:
-            self.bond = bond_http_api.BondHttpApi()
-
-    def run_once(self):
-        """Joins a meeting and collects perf data."""
-        self.cfm_facade.wait_for_meetings_landing_page()
-
-        if self._use_bond:
-            meeting_code = self.bond.CreateConference()
-            logging.info('Started meeting "%s"', meeting_code)
-            self._add_bots(_BOT_PARTICIPANTS_COUNT, meeting_code)
-            self.cfm_facade.join_meeting_session(meeting_code)
-        else:
-            self.cfm_facade.start_meeting_session()
-
-        self.cfm_facade.unmute_mic()
-
-        self._perf_metrics_collector.start()
-        time.sleep(_TOTAL_TEST_DURATION_SECONDS)
-        self._perf_metrics_collector.stop()
-
-        self.cfm_facade.end_meeting_session()
-        self._perf_metrics_collector.upload_metrics()
-
-    def _add_bots(self, bot_count, meeting_code):
-        """Adds bots to a meeting and configures audio and pinning settings.
-
-        If we were not able to start enough bots end the test run.
-        """
-        botIds = self.bond.AddBotsRequest(
-            meeting_code,
-            bot_count,
-            _TOTAL_TEST_DURATION_SECONDS + 30);
-
-        if len(botIds) < bot_count:
-            # If we did not manage to start enough bots, free up the
-            # resources and end the test run.
-            self.bond.ExecuteScript('@all leave', meeting_code)
-            raise error.TestNAError("Not enough bot resources.\n"
-                "Wanted: %d. Started: %d" % (bot_count, len(botIds)))
-
-        # Configure philosopher audio for one bot.
-        self._start_philosopher_audio(botIds[0], meeting_code)
-
-        # Pin the CfM from one bot so the device always sends HD.
-        self.bond.ExecuteScript(
-            '@b%d pin_participant_by_name "Unknown"' % botIds[0], meeting_code)
-        # Explicitly request HD video from the CfM.
-        self.bond.ExecuteScript(
-            '@b%d set_resolution 1280 720' % botIds[0], meeting_code)
-
-    def _start_philosopher_audio(self, bot_id, meeting_code):
-        self.bond.ExecuteScript(
-            '@b%d start_philosopher_audio' % bot_id, meeting_code)
diff --git a/server/site_tests/enterprise_CFM_PeripheralQualification/control.power_cycle b/server/site_tests/enterprise_CFM_PeripheralQualification/control.power_cycle
deleted file mode 100644
index fff088c..0000000
--- a/server/site_tests/enterprise_CFM_PeripheralQualification/control.power_cycle
+++ /dev/null
@@ -1,60 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-from autotest_lib.client.common_lib.cros.cfm.usb import usb_device_spec
-from autotest_lib.server.cros.cfm.configurable_test.dsl import *
-from autotest_lib.server import utils
-
-AUTHOR = "kerl@google.com, chromeos-meetings@google.com"
-NAME = "enterprise_CFM_PeripheralQualification.power_cycle"
-PURPOSE = "Stresses a peripheral device by repeatedly power cycling its USB port."
-CRITERIA = ("The device detectable as a USB device after each cycle "
-            "and no crash files appear")
-TIME = "MEDIUM"
-TEST_CATEGORY = "Stress"
-TEST_TYPE = "server"
-
-DOC = """
-Repeatedly power cycle the connected device and verify it appears
-as a USB device after each cycle. Verifies that no new crash files
-appear.
-
-The test requires a vid_pid argument (e.g. 18d1:8001) that determines which
-device to power cycle. This enables testing custom devices from Moblab or from
-a local workstation.
-
-In Moblab, add vid_pid=<vid:pid> under Advanced Options -> Args.
-
-Locally, add arguments with --args when running test_that. Example:
-test_that --autotest_dir ~/trunk/src/third_party/autotest/files/ \
-    --board=guado --args 'servo_host= vid_pid=18d1:8001' \
-    chromeos6-row22-rack13-host7 enterprise_CFM_PeripheralQualification.power_cycle
-"""
-
-args_dict = utils.args_to_dict(args)
-vid,pid = args_dict['vid_pid'].split(':')
-# The product is only informational, set it to whatever.
-product = args_dict.get('product', 'customProduct')
-# Interfaces are only needed when verifying them, set them to empty in this case.
-interfaces = []
-device = usb_device_spec.UsbDeviceSpec(vid, pid, product, interfaces)
-repeat = int(args_dict.get('repeat', 10))
-
-cfm_test = CfmTest(
-    configuration=Configuration(skip_enrollment=True),
-    scenario=Scenario(
-        AssertUsbDevices([device]),
-        RepeatTimes(repeat, Scenario(
-            PowerCycleUsbPort([device]),
-            AssertNoNewCrashes()
-        ))
-    ),
-)
-
-def run_test(machine):
-    job.run_test("enterprise_CFM_PeripheralQualification",
-                 cfm_test = cfm_test,
-                 tag = 'power_cycle',
-                 host = hosts.create_host(machine))
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_PeripheralQualification/control.reboot_stress b/server/site_tests/enterprise_CFM_PeripheralQualification/control.reboot_stress
deleted file mode 100644
index 9507cc5..0000000
--- a/server/site_tests/enterprise_CFM_PeripheralQualification/control.reboot_stress
+++ /dev/null
@@ -1,54 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-from autotest_lib.server.cros.cfm.configurable_test.dsl import *
-from autotest_lib.server import utils
-
-AUTHOR = "wilhelmsson@google.com, kerl@google.com, chromeos-meetings@google.com"
-NAME = "enterprise_CFM_PeripheralQualification.reboot_stress"
-PURPOSE = "Stresses a peripheral device by repeatedly rebooting the CfM."
-CRITERIA = "The device is detectable as a USB device after reboot"
-TIME = "LONG"
-TEST_CATEGORY = "Stress"
-TEST_TYPE = "server"
-
-DOC = """
-Repeatedly reboots the CfM and verifies that the device can be enumerated
-after each reboot.
-
-The test requires a vid_pid argument (e.g. 18d1:8001) that determines which
-device to detect after reboot. This enables testing custom devices from
-Moblab or from a local workstation.
-"""
-
-args_dict = utils.args_to_dict(args)
-vid,pid = args_dict['vid_pid'].split(':')
-# The product is only informational, set it to whatever.
-product = args_dict.get('product', 'customProduct')
-# Interfaces are only needed when verifying them, set them to empty in this case.
-interfaces = []
-device = usb_device_spec.UsbDeviceSpec(vid, pid, product, interfaces)
-repeat = int(args_dict.get('repeat', 10))
-
-cfm_test = CfmTest(
-    configuration=Configuration(skip_enrollment=True),
-    scenario=Scenario(
-        AssertUsbDevices([device]),
-        RepeatTimes(repeat, Scenario(
-            RebootDut(),
-            AssertUsbDevices([device])
-            # TODO(crbug.com/814775): mosys-info always crashes on reboot, why
-            # we always have new crash files. Enable this check when that is
-            # fixed.
-            # AssertNoNewCrashes()
-        ))
-    ),
-)
-
-def run_test(machine):
-    job.run_test("enterprise_CFM_PeripheralQualification",
-                 cfm_test = cfm_test,
-                 tag = 'reboot_stress',
-                 host = hosts.create_host(machine))
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_PeripheralQualification/enterprise_CFM_PeripheralQualification.py b/server/site_tests/enterprise_CFM_PeripheralQualification/enterprise_CFM_PeripheralQualification.py
deleted file mode 100644
index c0f755d..0000000
--- a/server/site_tests/enterprise_CFM_PeripheralQualification/enterprise_CFM_PeripheralQualification.py
+++ /dev/null
@@ -1,8 +0,0 @@
-from autotest_lib.server.cros.cfm.configurable_test import configurable_cfm_test
-
-class enterprise_CFM_PeripheralQualification(
-        configurable_cfm_test.ConfigurableCfmTest):
-    """
-    Qualification tests for peripherals.
-    """
-    pass  # Base class contains all necessary code.
diff --git a/server/site_tests/enterprise_CFM_RebootStress/control.meet_app b/server/site_tests/enterprise_CFM_RebootStress/control.meet_app
deleted file mode 100644
index 8e83b0c..0000000
--- a/server/site_tests/enterprise_CFM_RebootStress/control.meet_app
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "harpreet@chromium.org"
-NAME = "enterprise_CFM_RebootStress.meet_app"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:hotrod-remora"
-JOB_RETRIES = 3
-
-DOC = """
-This test clears the TPM, enrolls the device into CFM, launches the Meet app and
-stress tests by rebooting the device multiple times using Chrome runtime
-restart() API. It clear the TPM at the end of the test to reset the device.
-"""
-
-def run_test(machine):
-    host = hosts.create_host(machine)
-    job.run_test('enterprise_CFM_RebootStress',
-                 host=host,
-                 reboot_cycles=25,
-                 is_meeting=True,
-                 tag='meet_app')
-
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_RebootStress/control.partners b/server/site_tests/enterprise_CFM_RebootStress/control.partners
deleted file mode 100644
index dea3c91..0000000
--- a/server/site_tests/enterprise_CFM_RebootStress/control.partners
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "denniswu@chromium.org"
-NAME = "enterprise_CFM_RebootStress.partners"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:bluestreak-partners"
-JOB_RETRIES = 3
-
-DOC = """
-This test clears the TPM, enrolls the device into CFM, launches the Hangout app
-and stress tests by rebooting the device multiple times using Chrome runtime
-restart() API. It clears the TPM at the end of the test run to reset the device.
-This test bypasses servo check and is used by third party vendor PAL Acoustics.
-"""
-
-args_dict = utils.args_to_dict(args)
-
-
-def run_test(machine):
-    host = hosts.create_host(machine, servo_args=None)
-    job.run_test('enterprise_CFM_RebootStress',
-                 host=host,
-                 reboot_cycles=100,
-                 is_meeting=False,
-                 tag='partners')
-
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_RebootStress/enterprise_CFM_RebootStress.py b/server/site_tests/enterprise_CFM_RebootStress/enterprise_CFM_RebootStress.py
deleted file mode 100644
index 8d6dd4f..0000000
--- a/server/site_tests/enterprise_CFM_RebootStress/enterprise_CFM_RebootStress.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.server.cros.cfm import cfm_base_test
-
-
-class enterprise_CFM_RebootStress(cfm_base_test.CfmBaseTest):
-    """
-    Stress tests the CFM enrolled device by rebooting it multiple times using
-    Chrome runtime restart() API and ensuring the packaged app launches as
-    expected after every reboot.
-    """
-    version = 1
-
-
-    def run_once(self, reboot_cycles, is_meeting):
-        """
-        Runs the test.
-
-        @param reboot_cycles: The amount of times to reboot the DUT.
-        @is_meeting: True for Hangouts Meet, False for classic Hangouts.
-        """
-        logging.info("Performing in total %d reboot cycles...", reboot_cycles)
-        for cycle in range(reboot_cycles):
-            logging.info("Started reboot cycle %d.", cycle)
-            boot_id = self._host.get_boot_id()
-            self.cfm_facade.wait_for_telemetry_commands()
-            self.cfm_facade.reboot_device_with_chrome_api()
-            self._host.wait_for_restart(old_boot_id=boot_id)
-            self.cfm_facade.restart_chrome_for_cfm()
-
diff --git a/server/site_tests/enterprise_CFM_Sanity/control.partners b/server/site_tests/enterprise_CFM_Sanity/control.partners
deleted file mode 100644
index 94fc2c2..0000000
--- a/server/site_tests/enterprise_CFM_Sanity/control.partners
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "denniswu@chromium.org"
-NAME = "enterprise_CFM_Sanity.partners"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-JOB_RETRIES = 3
-
-DOC = """
-This test clears the TPM if necessary, enrolls the device as a remora device
-and runs a series of test actions and performs verifications to make sure CFM
-enrolled devices behave as expected. It clears the TPM at the end of the test
-run. Every time the TPM is cleared, the system is rebooted.
-This test bypasses servo check and is used by third party vendor PAL Acoustics.
-"""
-
-args_dict = utils.args_to_dict(args)
-
-
-def run_test(machine):
-    host = hosts.create_host(machine, servo_args=None)
-    job.run_test('enterprise_CFM_Sanity', host=host, tag='partners')
-
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_Sanity/enterprise_CFM_Sanity.py b/server/site_tests/enterprise_CFM_Sanity/enterprise_CFM_Sanity.py
deleted file mode 100644
index 70b7720..0000000
--- a/server/site_tests/enterprise_CFM_Sanity/enterprise_CFM_Sanity.py
+++ /dev/null
@@ -1,129 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import datetime
-import logging
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server.cros.cfm import cfm_base_test
-
-
-LONG_TIMEOUT = 10
-SHORT_TIMEOUT = 5
-FAILED_TEST_LIST = list()
-
-
-class enterprise_CFM_Sanity(cfm_base_test.CfmBaseTest):
-    """Tests the following fuctionality works on CFM enrolled devices:
-           1. Is able to reach the oobe screen
-           2. Is able to start a hangout session
-           3. Should not be able to start a hangout session if already in a
-              session.
-           4. Exits hangout session successfully.
-           5. Should be able to start a hangout session if currently not in
-              a session.
-           6. Is able to detect attached peripherals: mic, speaker, camera.
-           7. Is able to run hotrod diagnostics.
-    """
-    version = 1
-
-    def _hangouts_sanity_test(self):
-        """Execute a series of test actions and perform verifications.
-
-        @raises error.TestFail if any of the checks fail.
-        """
-        current_time = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
-        hangout_name = 'auto-hangout-' + current_time
-
-        if self.cfm_facade.is_ready_to_start_hangout_session():
-            self.cfm_facade.start_new_hangout_session(hangout_name)
-
-        if not self.cfm_facade.is_in_hangout_session():
-            raise error.TestFail('CFM was not able to start hangout session.')
-
-        time.sleep(LONG_TIMEOUT)
-        self.cfm_facade.unmute_mic()
-
-        if self.cfm_facade.is_ready_to_start_hangout_session():
-            raise error.TestFail('Is already in hangout session and should not '
-                                 'be able to start another session.')
-
-        if self.cfm_facade.is_oobe_start_page():
-            raise error.TestFail('CFM should be in hangout session and not on '
-                                 'oobe screen.')
-
-        time.sleep(SHORT_TIMEOUT)
-        self.cfm_facade.mute_mic()
-        time.sleep(SHORT_TIMEOUT)
-        self.cfm_facade.end_hangout_session()
-
-        if self.cfm_facade.is_in_hangout_session():
-            raise error.TestFail('CFM should not be in hangout session.')
-
-        if self.cfm_facade.is_oobe_start_page():
-            raise error.TestFail('CFM should not be on oobe screen.')
-
-        if not self.cfm_facade.is_ready_to_start_hangout_session():
-            raise error.TestFail('CFM should be in read state to start hangout '
-                           'session.')
-
-
-    def _peripherals_sanity_test(self):
-        """Checks for connected peripherals."""
-        self.cfm_facade.wait_for_telemetry_commands()
-
-        time.sleep(SHORT_TIMEOUT)
-
-        if not self.cfm_facade.get_mic_devices():
-            FAILED_TEST_LIST.append('No mic detected')
-
-        if not self.cfm_facade.get_speaker_devices():
-            FAILED_TEST_LIST.append('No speaker detected')
-
-        if not self.cfm_facade.get_camera_devices():
-            FAILED_TEST_LIST.append('No camera detected')
-
-        if not self.cfm_facade.get_preferred_mic():
-            FAILED_TEST_LIST.append('No preferred mic')
-
-        if not self.cfm_facade.get_preferred_speaker():
-            FAILED_TEST_LIST.append('No preferred speaker')
-
-        if not self.cfm_facade.get_preferred_camera():
-            FAILED_TEST_LIST.append('No preferred camera')
-
-
-    def _diagnostics_sanity_test(self):
-        """Runs hotrod diagnostics and checks status.
-
-        @raise error.TestFail if diagnostic checks fail.
-        """
-        self.cfm_facade.wait_for_telemetry_commands()
-
-        if self.cfm_facade.is_diagnostic_run_in_progress():
-            raise error.TestFail('Diagnostics should not be running.')
-
-        self.cfm_facade.run_diagnostics()
-
-        if not self.cfm_facade.is_diagnostic_run_in_progress():
-            raise error.TestFail('Diagnostics should be running.')
-
-        diag_results = self.cfm_facade.get_last_diagnostics_results()
-
-        if diag_results['status'] not in 'success':
-            logging.debug(diag_results['childrens'])
-            FAILED_TEST_LIST.append('Diagnostics failed')
-
-
-    def run_once(self):
-        """Runs the test."""
-        self.cfm_facade.wait_for_telemetry_commands()
-        self._hangouts_sanity_test()
-        self._peripherals_sanity_test()
-        self._diagnostics_sanity_test()
-
-        if FAILED_TEST_LIST:
-            raise error.TestFail('Test failed because of following reasons: %s'
-                                 % ', '.join(map(str, FAILED_TEST_LIST)))
diff --git a/server/site_tests/enterprise_CFM_SiSFwUpdater/FW_Watchdog_0110.bin b/server/site_tests/enterprise_CFM_SiSFwUpdater/FW_Watchdog_0110.bin
deleted file mode 100644
index 2c9a44f..0000000
--- a/server/site_tests/enterprise_CFM_SiSFwUpdater/FW_Watchdog_0110.bin
+++ /dev/null
Binary files differ
diff --git a/server/site_tests/enterprise_CFM_SiSFwUpdater/control b/server/site_tests/enterprise_CFM_SiSFwUpdater/control
deleted file mode 100644
index d086eee..0000000
--- a/server/site_tests/enterprise_CFM_SiSFwUpdater/control
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "frankhu@chromium.org"
-NAME = "enterprise_CFM_SiSFwUpdater"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-JOB_RETRIES = 3
-DEPENDENCIES = "mimo"
-
-DOC = """
-This test first flash the older version SiS firmware to the Mimo and then power
-cycle the corresponding usb port. This should triger the udev rule to invoke
-the SiS FW updater to flash the latest FW to Mimo. This is validated by running
-the updater again and checking the log.
-"""
-
-args_dict = utils.args_to_dict(args)
-
-def run_test(machine):
-    host = hosts.create_host(machine, servo_args=None)
-    job.run_test(NAME, host=host)
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_SiSFwUpdater/enterprise_CFM_SiSFwUpdater.py b/server/site_tests/enterprise_CFM_SiSFwUpdater/enterprise_CFM_SiSFwUpdater.py
deleted file mode 100644
index f32d97f..0000000
--- a/server/site_tests/enterprise_CFM_SiSFwUpdater/enterprise_CFM_SiSFwUpdater.py
+++ /dev/null
@@ -1,220 +0,0 @@
-# Copyright (c) 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Auto test for SiS firmware updater functionality and udev rule."""
-
-from __future__ import print_function
-import logging
-import os
-import re
-import time
-
-from autotest_lib.client.common_lib.cros import power_cycle_usb_util
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import test
-
-POWER_CYCLE_WAIT_TIME = 1   # seconds
-UPDATER_WAIT_TIME = 80      # seconds
-# This is the GPIO on guado.
-FRONT_LEFT_USB_GPIO = 218
-
-
-class enterprise_CFM_SiSFwUpdater(test.test):
-    """
-    SiS firmware updater functionality test in Chrome Box.
-
-    The procedure of the test is:
-    1. flash old version FW to device,
-    2. power cycle usb port to simulate unplug and replug of device, which
-         should be able to trigger udev rule and run the updater,
-    3. wait for the updater to finish,
-    4. run fw updater again and verify that the FW in device is consistent with
-         latest FW within system by checking the output.
-    """
-
-    version = 1
-
-    _LOG_FILE_PATH = '/tmp/sis-updater.log'
-    _FW_PATH = '/lib/firmware/sis/'
-    _OLD_FW_NAME = 'FW_Watchdog_0110.bin'
-    _NEW_FW_NAME = 'WYD_101_WYD_9255_A353_V04.bin'
-    _DUT_BOARD = 'guado'
-    _SIS_VID = '266e'
-    _SIS_PID = '0110'
-
-    def initialize(self, host):
-        self.host = host
-        self.log_file = self._LOG_FILE_PATH
-        self.old_fw_path = os.path.join(self._FW_PATH, self._OLD_FW_NAME)
-        self.new_fw_path = os.path.join(self._FW_PATH, self._NEW_FW_NAME)
-        self.usb_port_gpio_number = FRONT_LEFT_USB_GPIO
-        self.board = self._DUT_BOARD
-        self.vid = self._SIS_VID
-        self.pid = self._SIS_PID
-        # Open log file object.
-        self.log_file_obj = open(self.log_file, 'w')
-
-    def cleanup(self):
-        self.log_file_obj.close()
-        test.test.cleanup(self)
-        cmd = 'rm -f {}'.format(self.old_fw_path)
-        self._run_cmd(cmd)
-
-    def _run_cmd(self, command, str_compare='', print_output=False):
-        """
-        Run command line on DUT.
-
-        Run commands on DUT. Wait for command to complete, then check the
-        output for expected string.
-
-        @param command: command line to run in dut.
-        @param str_compare: a piece of string we want to see in the output of
-                running the command.
-        @param print_output: if true, print command output in log.
-
-        @returns the command output and a bool value. If str_compare is in
-              command output, return true. Otherwise return false.
-
-        """
-
-        logging.info('Execute: %s', command)
-        result = self.host.run(command, ignore_status=True)
-        if result.stderr:
-            output = result.stderr
-        else:
-            output = result.stdout
-        if print_output:
-            logging.info('Output: %s', ''.join(output))
-        if str_compare and str_compare not in ''.join(output):
-            return output, False
-        else:
-            return output, True
-
-    def convert_rootfs_writable(self):
-        """Remove rootfs verification on DUT, reboot,
-        and remount the filesystem read-writable"""
-
-        logging.info('Disabling rootfs verification...')
-        self.remove_rootfs_verification()
-
-        logging.info('Rebooting...')
-        self.reboot()
-
-        logging.info('Remounting..')
-        cmd = 'mount -o remount,rw /'
-        self._run_cmd(cmd)
-
-    def remove_rootfs_verification(self):
-        """Remove rootfs verification."""
-
-        # 2 & 4 are default partitions, and the system boots from one of them.
-        # Code from chromite/scripts/deploy_chrome.py
-        KERNEL_A_PARTITION = 2
-        KERNEL_B_PARTITION = 4
-
-        cmd_template = ('/usr/share/vboot/bin/make_dev_ssd.sh --partitions %d '
-                        '--remove_rootfs_verification --force')
-        for partition in (KERNEL_A_PARTITION, KERNEL_B_PARTITION):
-            cmd = cmd_template % partition
-            self._run_cmd(cmd)
-
-    def reboot(self):
-        """Reboots the DUT."""
-
-        self.host.reboot()
-
-    def is_filesystem_readwrite(self):
-        """Check if the root file system is read-writable.
-
-        Query the DUT's filesystem /dev/root, often manifested as /dev/dm-0
-        or  is mounted as read-only or not.
-
-        @returns True if the /dev/root is read-writable. False otherwise.
-        """
-
-        cmd = 'cat /proc/mounts | grep "/dev/root"'
-        result, _ = self._run_cmd(cmd)
-        fields = re.split(' |,', result)
-        return True if fields.__len__() >= 4 and fields[3] == 'rw' else False
-
-    def copy_firmware(self):
-        """Copy test firmware from server to DUT."""
-
-        current_dir = os.path.dirname(os.path.realpath(__file__))
-        src_firmware_path = os.path.join(current_dir, self._OLD_FW_NAME)
-        dst_firmware_path = self._FW_PATH
-        logging.info('Copy firmware from {} to {}.'.format(src_firmware_path,
-                                                           dst_firmware_path))
-        self.host.send_file(src_firmware_path, dst_firmware_path,
-                            delete_dest=True)
-
-    def triger_updater(self):
-        """Triger udev rule to run fw updater."""
-
-        try:
-            power_cycle_usb_util.power_cycle_usb_vidpid(self.host, self.board,
-                                                        self.vid, self.pid)
-        except KeyError:
-            raise error.TestFail('Counld\'t find target device: '
-                                 'vid:pid {}:{}'.format(self.vid, self.pid))
-
-    def flash_fw(self, fw_path, str_compare='', print_output=False):
-        """
-        Flash certain firmware to device.
-
-        Run SiS firmware updater on DUT to flash the firmware given
-        by fw_path to target device (Mimo).
-
-        @param fw_path: the path to the firmware to flash.
-        @param str_compare, print_output: the same as function _run_cmd.
-
-        """
-        cmd_run_updater = ('/usr/sbin/sis-updater '
-                           '-ba -log_to=stdout {}'.format(fw_path))
-        output, succeed = self._run_cmd(
-            cmd_run_updater, str_compare=str_compare, print_output=print_output)
-        return output, succeed
-
-    def run_once(self):
-        """Main test procedure."""
-
-        # Make the DUT filesystem writable.
-        if not self.is_filesystem_readwrite():
-            logging.info('DUT root file system is not read-writable. '
-                         'Converting it read-writable...')
-            self.convert_rootfs_writable()
-        else:
-            logging.info('DUT is read-writable.')
-
-        # Copy old FW to device.
-        self.copy_firmware()
-
-        # Flash old FW to device.
-        expect_output = 'update firmware complete'
-        output, succeed = self.flash_fw(self.old_fw_path,
-                                        str_compare=expect_output)
-        self.log_file_obj.write('{}Log info for writing '
-                                'old firmware{}\n'.format('-'*8, '-'*8))
-        self.log_file_obj.write(output)
-        if not succeed:
-            raise error.TestFail('Expect \'{}\' in output, '
-                                 'but didn\'t find it.'.format(expect_output))
-
-        # No need to manually triger udev to run FW updater here.
-        # Previous FW updating process will reset SiS after it finish.
-
-        # Wait for fw updater to finish.
-        time.sleep(UPDATER_WAIT_TIME)
-
-        # Try flash the new firmware, should detect same fw version.
-        expect_output = 'The device has the same FW as system'
-        output, succeed = self.flash_fw(self.new_fw_path,
-                                        str_compare=expect_output)
-        self.log_file_obj.write('{}Log info for writing '
-                                'new firmware{}\n'.format('-'*8, '-'*8))
-        self.log_file_obj.write(output)
-        if not succeed:
-            raise error.TestFail('Expect {} in output '
-                                 'but didn\'t find it.'.format(expect_output))
-
diff --git a/server/site_tests/enterprise_CFM_Test/control.atrus b/server/site_tests/enterprise_CFM_Test/control.atrus
deleted file mode 100644
index 39888c5..0000000
--- a/server/site_tests/enterprise_CFM_Test/control.atrus
+++ /dev/null
@@ -1,250 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "mzhuo@chromium.org"
-NAME = "enterprise_CFM_Test.atrus"
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-DEPENDENCIES = ""
-JOB_RETRIES = 1
-
-DOC = """
-This test verifies peripherals for Bluestreak.
-Tests and verifications can be selected in control file.
-action_list defines the list of all available tests, which are keys for dict
-action_list, value for the key defines how many times this test will be run
-in one loop.
-
-verification_list defineds the list of all available verifications,
-which are keys for dict verification_list. True means this verification specified
-by this key will be performed.
-
-Testing flow can be set in control file.
-test_flow_control defines:
-1. runtestonly, if True, no cleartmp and enrollment are done before test,
-                and no cleartpm is done after test.  Before running test on CfM
-                we assume the CfM is enrolled,
-                else, cleartpm and enrollment will be done before test,
-                and cleartmp is done afterwards.
-2. setupcleanup, (TODO)if True, before exit from test cleanup is done,
-                 else, no cleanup before exit.
-                 This provides way to keep setup stays in original status when test
-                 is aborted.
-3. abort_on_failure: if True, test is aborted if test fails or verification fails,
-                     else, test continutes.
-4. random_mode: if True, sequence of multiple test in one loop is randomized everytime.
-                else, sequence is same for all loops.
-5. recovery_on_fatal_failure: if True, reboot CfM when fatal failure occurs,
-                              else, no reboot.
-6. skip_cfm_check: All working CfM should have speaker, camera connected.
-             For Bluestreak in addition to speaker/camera, Mimo should be connected.
-             If True script checks the above statement before starting test,
-             else, no check will be done before test starts.
-7. debug: If True, script prints more informations, for example, cli output, name of
-          test and verification. This is intend to give user more output for information
-          purpose. This is different from logging.debug which is turned on by "--debug"
-          when  kickstarting script,
-          else, less output.
-8. report: If True, after each loop script prints out summary for test and verification.
-
-Dict error_key_words provides the list of error logs which script checks and verify.
-If script finds any of them in the log, script claims test failure. This list can be
-updated on the fly. Dict has keys, the value for each key is the list which contains
-the list of error log script looks for when scanning log files.
-
-How to run it:
-1. Run cros_sdk
-2. Go to ~/chromiumos/src/third_party/autotest/files/server/site_tests/enterprise_CFM_Test
-3. Edit control file. Replace meeting_code working for CfM to be tested.
-4. Update test_config, action_config, verification_config, test_flow_control ane etc properly.
-5. Issue command line to kickstart autotest:
-test_that --autotest_dir ~/trunk/src/third_party/autotest/files/ --board=guado
-100.123.174.2 enterprise_CFM_Test.demo
-
-"""
-
-args_dict = utils.args_to_dict(args)
-
-error_key_words = {'usb': [
-                   'Setup ERROR: setup context command',
-                   'URB transfer length is wrong',
-                   'device descriptor read',
-                   'unable to enumerate USB device',
-                   'hub_port_status failed',
-                   'cannot reset port',
-                   'nonzero write bulk status received',
-                   'Device not responding to setup address',
-                   'device not accepting address',
-                   'Set SEL for device-initiated U2 failed',
-                   'Set SEL for device-initiated U1 failed',
-                   'Disable of device-initiated U1 failed',
-                   'Disable of device-initiated U2 failed',
-                   'usb_set_interface failed',
-                   'MIMO has wedged; issuing hub reset'
-                  ],
-                   'kernel': [
-                   'ERROR Transfer event TRB DMA ptr',
-                   'crashes with segfault',
-                   'segfault at',
-                   'cut here',
-                   'end trace',
-                   'Failed to resubmit video URB',
-                   'go2001_watchdog:',
-                   'go2001_ctx_error_locked:'
-                  ],
-                   'video': [
-                   'uvcvideo: Failed to query',
-                   'VIDIOC_DQBUF failed',
-                   'uvcvideo: Non-zero status'
-                   'uvcvideo: Failed to set UVC commit control',
-                   'uvcvideo: UVC non compliance',
-                   'Failed to resubmit video URB',
-                   'No streaming interface found',
-                   'Dequeued v4l2 buffer contains corrupted data'
-                  ],
-                   'audio': [
-                   'hw_params: Input/output error: rate:',
-                   'Init device retry failed',
-                   'Init Hangouts Meet speakermic: USB Audio',
-                   'usb_set_interface failed',
-                   'hw_params: Input/output error: rate:',
-                   'Init device retry failed',
-                  ],
-                   'chrome': [
-                   '#No match for getting seqNum',
-                   'Cannot get RenderProcess',
-                   'segfault at',
-                   'ERR crash_reporter',
-                   'Watchdog resetting firmware',
-                   'Failed to create scanout buffer',
-                   'Failed to enable controller',
-                   'Failed to configure: device',
-                   'Failed to export buffer to dma_buf',
-                   'Failed to take control of the display',
-                   'WARNING: texture bound to texture unit 0 is not renderable',
-                   'Failed to modeset controller'
-                  ],
-                   'atrus': [
-                   '#Internal TrueVoice parameters',
-                   'write: Connection timed out',
-                   'Error: report failed',
-                   'write: Broken pipe',
-                   'protocol error'
-                  ],
-                   'usb_stability': [
-                   '#autotest',
-                   'USB disconnect',
-                   'New USB device found',
-                  ]
-}
-
-
-# !!! Please change meeting code working for CfM.
-# !!! If meeting_code for different domain is used, meeting might be timeout.
-# vol_change_mode: if set to 1, make one call to set voluem to target volume,
-#                  else, make multiple calls to update volume until volume
-#                  equals to target volume.
-test_config = {
-               'gpio_list': ['218','219', '209'],
-               'gpiopause': 8,
-               'puts': "18d1:8001",
-               'is_meeting': 1,
-               'meeting_code': 'otg-dkps-ovj', # this works for crosprq4.com
-               'repeat': 200,
-               'reboot_timeout': 30,
-               'loop_timeout': 60,
-               'action_timeout': 10,
-               'min_timeout': 5,
-               'debug_timeout': 9999,
-               'vol_change_step': 6,
-               'vol_change_mode': 1,
-               'reboot_after_min_meets': 5,
-               'gpio_after_min_meets': 10
-}
-
-#action_config['meeting_test'] should be 0, 1.
-#  0: no meeting test to be done
-#  1: In each loop first CfM joins meeting, after all tests are done,
-#     CfM leaves meeting.
-#the value of 'mute_unmute_camera_test', 'mute_unmute_mic_test' and
-#'speaker_volume_test" and etc is number of test for each key to be done in
-#one meeting.
-action_config = {
-                 'meeting_test': 1,
-                 'mute_unmute_camera_test': 0,
-                 'mute_unmute_mic_test': 3,
-                 'speaker_volume_test': 3,
-                 'gpio_test': 1,
-                 'reboot_test': 1,
-                 'reset_usb_test': 0,
-                 'flap_monitor_test': 0
-}
-
-
-verification_config = {
-                      'check_usb_enumeration':True,
-                      'check_usb_inf_init': True,
-                      'check_v4l2_interface': False,
-                      'check_audio_card':  True,
-                      'check_cras_speaker': True,
-                      'check_cras_mic': True,
-                      'check_cras_pspeaker': True,
-                      'check_cras_pmic': True,
-                      'check_cras_speaker_vol': True,
-                      'check_cras_mic_mute': True,
-                      'check_prefer_camera': False,
-                      'check_camera_mute': False,
-                      'check_audio_stream': True,
-                      'check_video_stream': False,
-                      'check_hotrod_speaker': False,
-                      'check_hotrod_mic': False,
-                      'check_hotrod_camera': False,
-                      'check_hotrod_pspeaker': False,
-                      'check_hotrod_pmic': False,
-                      'check_hotrod_pcamera': False,
-                      'check_hotrod_speaker_vol': False,
-                      'check_hotrod_mic_state': False,
-                      'check_hotrod_camera_state': False,
-                      'check_usb_errorlog': True,
-                      'check_kernel_errorlog': True,
-                      'check_video_errorlog': True,
-                      'check_audio_errorlog': True,
-                      'check_chrome_errorlog': True,
-                      'check_atrus_errorlog': True,
-                      'check_usb_stability': True,
-                      'check_process_crash': False,
-                      'check_kernel_panic': False
-}
-
-
-test_flow_control = {
-		     'reboot_before_start' : False,
-                     'runtestonly': True,
-                     'setupcleanup': True,
-                     'abort_on_failure': True,
-                     'random_mode': True,
-                     'recovery_on_fatal_failure': False,
-                     'skip_cfm_check': True,
-                     'debug': True,
-                     'report': True
-}
-
-
-def run_test(machine):
-    host = hosts.create_host(machine)
-    run_test_only = test_flow_control['runtestonly']
-    job.run_test('enterprise_CFM_Test', host=host,
-                  run_test_only=run_test_only,
-                  test_config=test_config, action_config=action_config,
-                  verification_config=verification_config,
-                  error_key_words=error_key_words,
-                  test_flow_control=test_flow_control,
-                  tag='atrus')
-
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_Test/control.demo b/server/site_tests/enterprise_CFM_Test/control.demo
deleted file mode 100644
index 108210f..0000000
--- a/server/site_tests/enterprise_CFM_Test/control.demo
+++ /dev/null
@@ -1,189 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "mzhuo@chromium.org"
-NAME = "enterprise_CFM_Test.demo"
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-DEPENDENCIES = ""
-JOB_RETRIES = 1
-
-DOC = """
-This test runs various tests and verifications for CfM devices.
-Tests and verifications can be selected in control file.
-action_list defines the list of all available tests, which are keys for dict
-action_list, value for the key defines how many times this test will be run
-in each loop.
-verification_list defineds the list of all available verifications,
-which are keys for dict verification_list. True means this verification specified
-by this key will be performed.
-Testing flow can be set in control file.
-
-Example to run it:
-test_that --autotest_dir ~/trunk/src/third_party/autotest/files/ --board=guado
-100.123.174.2 enterprise_CFM_Testd.demo
-
-Note:
-1. Modify dict error_key_words to meet each test's need.
-2. Modify meeting code which works for domain device is enrolled in.
-"""
-
-args_dict = utils.args_to_dict(args)
-
-error_key_words = {'usb': [
-                   'unable to enumerate USB device',
-                   'hub_port_status failed',
-                   'Device not responding to setup address',
-                   'device not accepting address'
-                  ],
-                   'kernel': [
-                   'stuck on bsd ring',
-                   'stuck on render ring',
-                   'GPU crash dump saved to',
-                   'ERROR Transfer event TRB DMA ptr',
-                   'crashes with segfault',
-                   'segfault at',
-                   'Out of memory: Kill process',
-                   'Killed process'
-                  ],
-                   'video': [
-                   'uvcvideo: Failed to set UVC commit control',
-                   'uvcvideo: UVC non compliance',
-                   'No streaming interface found'
-                  ],
-                   'audio': [
-                   'hw_params: Input/output error: rate:',
-                   'usb_set_interface failed',
-                  ],
-                   'chrome': [
-                   'segfault at',
-                   'Watchdog resetting firmware',
-                   'Failed to create scanout buffer',
-                   'Failed to enable controller',
-                   'Failed to export buffer to dma_buf',
-                   'Failed to take control of the display',
-                   'Failed to modeset controller',
-                   'Failed to encode frame.'
-                  ],
-                   'atrus': [
-                   'write: Connection timed out',
-                   'Error: report failed',
-                   'write: Broken pipe',
-                   'protocol error'
-                  ],
-                   'usb_stability': [
-                   'USB disconnect',
-                   'New USB device found'
-                  ]
-}
-
-
-# !!! Please change meeting code working for CfM.
-# !!! If meeting_code for different domain is used, meeting might be timeout.
-# vol_change_mode: if set to 1, make one call to set voluem to target volume,
-#                  else, make multiple calls to update volume until volume
-#                  equals to target volume.
-test_config = {
-               'gpio_list': ['218','219', '209'],
-               'gpiopause': 8,
-               'puts': "",
-               'is_meeting': 1,
-               'meeting_code': 'otg-dkps-ovj', # this works for crosprq4.com
-               'repeat': 2,
-               'rebeot_timeout': 60,
-               'loop_timeout': 30,
-               'action_timeout': 20,
-               'min_timeout': 10,
-               'debug_timeout': 5,
-               'vol_change_step': 6,
-               'vol_change_mode': 1,
-               'reboot_after_min_meets': 1,
-               'gpio_after_min_meets': 1
-}
-
-#action_config['meeting_test'] should be 0, 1.
-#  0: no meeting test to be done
-#  1: In each loop first CfM joins meeting, after all tests are done,
-#     CfM leaves meeting.
-#the value of 'mute_unmute_camera_test', 'mute_unmute_mic_test' and
-#'speaker_volume_test" and etc is number of test for each key to be done in
-#one meeting.
-action_config = {
-                 'meeting_test': 1,
-                 'mute_unmute_camera_test': 1,
-                 'mute_unmute_mic_test': 1,
-                 'speaker_volume_test': 1,
-                 'gpio_test': 0,
-                 'reboot_test': 0,
-                 'reset_usb_test': 0,
-                 'flap_monitor_test': 0
-}
-
-
-verification_config = {
-                      'check_usb_enumeration':True,
-                      'check_usb_inf_init': True,
-                      'check_v4l2_interface': True,
-                      'check_audio_card':  False,
-                      'check_cras_speaker': True,
-                      'check_cras_mic': True,
-                      'check_cras_pspeaker': True,
-                      'check_cras_pmic': True,
-                      'check_cras_speaker_vol': True,
-                      'check_cras_mic_mute': True,
-                      'check_prefer_camera': False,
-                      'check_camera_mute': False,
-                      'check_audio_stream': True,
-                      'check_video_stream': True,
-                      'check_hotrod_speaker': False,
-                      'check_hotrod_mic': False,
-                      'check_hotrod_camera': False,
-                      'check_hotrod_pspeaker': False,
-                      'check_hotrod_pmic': False,
-                      'check_hotrod_pcamera': False,
-                      'check_hotrod_speaker_vol': False,
-                      'check_hotrod_mic_state': False,
-                      'check_hotrod_camera_state': False,
-                      'check_usb_errorlog': False,
-                      'check_kernel_errorlog': False,
-                      'check_video_errorlog': False,
-                      'check_audio_errorlog': False,
-                      'check_chrome_errorlog': False,
-                      'check_atrus_errorlog': False,
-                      'check_usb_stability': False,
-                      'check_process_crash': True,
-                      'check_kernel_panic': False,
-                      'check_chrome_restarted': True,
-                      'check_cfm_rebooted': True
-}
-
-
-test_flow_control = {
-                     'reboot_before_start': False,
-                     'run_test_only': True,
-                     'setup_cleanup': True,
-                     'abort_on_failure': True,
-                     'random_mode': True,
-                     'recovery_on_fatal_failure': True,
-                     'skip_cfm_check': True,
-                     'report': True
-}
-
-def run_test(machine):
-    host = hosts.create_host(machine)
-    run_test_only = test_flow_control['run_test_only']
-    job.run_test('enterprise_CFM_Test', host=host,
-                  run_test_only=run_test_only,
-                  test_config=test_config, action_config=action_config,
-                  verification_config=verification_config,
-                  error_key_words=error_key_words,
-                  test_flow_control=test_flow_control,
-                  tag='demo')
-
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_Test/control.huddly b/server/site_tests/enterprise_CFM_Test/control.huddly
deleted file mode 100644
index b5091ff..0000000
--- a/server/site_tests/enterprise_CFM_Test/control.huddly
+++ /dev/null
@@ -1,248 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "mzhuo@chromium.org"
-NAME = "enterprise_CFM_Test.huddly"
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-DEPENDENCIES = ""
-JOB_RETRIES = 1
-
-DOC = """
-This test verifies peripherals for Bluestreak.
-Tests and verifications can be selected in control file.
-action_list defines the list of all available tests, which are keys for dict
-action_list, value for the key defines how many times this test will be run
-in one loop.
-
-verification_list defineds the list of all available verifications,
-which are keys for dict verification_list. True means this verification specified
-by this key will be performed.
-
-Testing flow can be set in control file.
-test_flow_control defines:
-1. runtestonly, if True, no cleartmp and enrollment are done before test,
-                and no cleartpm is done after test.  Before running test on CfM
-                we assume the CfM is enrolled,
-                else, cleartpm and enrollment will be done before test,
-                and cleartmp is done afterwards.
-2. setupcleanup, (TODO)if True, before exit from test cleanup is done,
-                 else, no cleanup before exit.
-                 This provides way to keep setup stays in original status when test
-                 is aborted.
-3. abort_on_failure: if True, test is aborted if test fails or verification fails,
-                     else, test continutes.
-4. random_mode: if True, sequence of multiple test in one loop is randomized everytime.
-                else, sequence is same for all loops.
-5. recovery_on_fatal_failure: if True, reboot CfM when fatal failure occurs,
-                              else, no reboot.
-6. skip_cfm_check: All working CfM should have speaker, camera connected.
-             For Bluestreak in addition to speaker/camera, Mimo should be connected.
-             If True script checks the above statement before starting test,
-             else, no check will be done before test starts.
-7. debug: If True, script prints more informations, for example, cli output, name of
-          test and verification. This is intend to give user more output for information
-          purpose. This is different from logging.debug which is turned on by "--debug"
-          when  kickstarting script,
-          else, less output.
-8. report: If True, after each loop script prints out summary for test and verification.
-
-Dict error_key_words provides the list of error logs which script checks and verify.
-If script finds any of them in the log, script claims test failure. This list can be
-updated on the fly. Dict has keys, the value for each key is the list which contains
-the list of error log script looks for when scanning log files.
-
-How to run it:
-1. Run cros_sdk
-2. Go to ~/chromiumos/src/third_party/autotest/files/server/site_tests/enterprise_CFM_Test
-3. Edit control file. Replace meeting_code working for CfM to be tested.
-4. Update test_config, action_config, verification_config, test_flow_control ane etc properly.
-5. Issue command line to kickstart autotest:
-test_that --autotest_dir ~/trunk/src/third_party/autotest/files/ --board=guado
-100.123.174.2 enterprise_CFM_Test.demo
-
-"""
-
-args_dict = utils.args_to_dict(args)
-
-error_key_words = {'usb': [
-                   'Setup ERROR: setup context command',
-                   'URB transfer length is wrong',
-                   'device descriptor read',
-                   'unable to enumerate USB device',
-                   'hub_port_status failed',
-                   'cannot reset port',
-                   'nonzero write bulk status received',
-                   'Device not responding to setup address',
-                   'device not accepting address',
-                   'Set SEL for device-initiated U2 failed',
-                   'Set SEL for device-initiated U1 failed',
-                   'Disable of device-initiated U1 failed',
-                   'Disable of device-initiated U2 failed',
-                   'usb_set_interface failed',
-                   'MIMO has wedged; issuing hub reset'
-                  ],
-                   'kernel': [
-                   'ERROR Transfer event TRB DMA ptr',
-                   'crashes with segfault',
-                   'segfault at',
-                   'cut here',
-                   'end trace',
-                   'Failed to resubmit video URB',
-                   'go2001_watchdog:',
-                   'go2001_ctx_error_locked:'
-                  ],
-                   'video': [
-                   'uvcvideo: Failed to query',
-                   'VIDIOC_DQBUF failed',
-                   'uvcvideo: Non-zero status'
-                   'uvcvideo: Failed to set UVC commit control',
-                   'uvcvideo: UVC non compliance',
-                   'Failed to resubmit video URB',
-                   'No streaming interface found',
-                   'Dequeued v4l2 buffer contains corrupted data'
-                  ],
-                   'audio': [
-                   'hw_params: Input/output error: rate:',
-                   'Init device retry failed',
-                   'Init Hangouts Meet speakermic: USB Audio',
-                   'usb_set_interface failed',
-                   'hw_params: Input/output error: rate:',
-                   'Init device retry failed',
-                  ],
-                   'chrome': [
-                   '#No match for getting seqNum',
-                   'Cannot get RenderProcess',
-                   'segfault at',
-                   'ERR crash_reporter',
-                   'Watchdog resetting firmware',
-                   'Failed to create scanout buffer',
-                   'Failed to enable controller',
-                   'Failed to configure: device',
-                   'Failed to export buffer to dma_buf',
-                   'Failed to take control of the display',
-                   'WARNING: texture bound to texture unit 0 is not renderable',
-                   'Failed to modeset controller'
-                  ],
-                   'atrus': [
-                   '#Internal TrueVoice parameters',
-                   'write: Connection timed out',
-                   'Error: report failed',
-                   'write: Broken pipe',
-                   'protocol error'
-                  ],
-                   'usb_stability': [
-                   '#autotest',
-                   'USB disconnect',
-                   'New USB device found',
-                  ]
-}
-
-
-# !!! Please change meeting code working for CfM.
-# !!! If meeting_code for different domain is used, meeting might be timeout.
-# vol_change_mode: if set to 1, make one call to set voluem to target volume,
-#                  else, make multiple calls to update volume until volume
-#                  equals to target volume.
-test_config = {
-               'gpio_list': ['218','219', '209'],
-               'gpiopause': 8,
-               'puts': "2bd9:0011",
-               'is_meeting': 1,
-               'meeting_code': 'otg-dkps-ovj', # this works for crosprq4.com
-               'repeat': 200,
-               'reboot_timeout': 30,
-               'loop_timeout': 60,
-               'action_timeout': 10,
-               'min_timeout': 5,
-               'debug_timeout': 9999,
-               'vol_change_step': 6,
-               'vol_change_mode': 1,
-               'reboot_after_min_meets': 5,
-               'gpio_after_min_meets': 10
-}
-
-#action_config['meeting_test'] should be 0, 1.
-#  0: no meeting test to be done
-#  1: In each loop first CfM joins meeting, after all tests are done,
-#     CfM leaves meeting.
-#the value of 'mute_unmute_camera_test', 'mute_unmute_mic_test' and
-#'speaker_volume_test" and etc is number of test for each key to be done in
-#one meeting.
-action_config = {
-                 'meeting_test': 1,
-                 'mute_unmute_camera_test': 3,
-                 'mute_unmute_mic_test': 0,
-                 'speaker_volume_test': 0,
-                 'gpio_test': 1,
-                 'reboot_test': 1,
-                 'reset_usb_test': 0,
-                 'flap_monitor_test': 0
-}
-
-
-verification_config = {
-                      'check_usb_enumeration':True,
-                      'check_usb_inf_init': True,
-                      'check_v4l2_interface': True,
-                      'check_audio_card':  False,
-                      'check_cras_speaker': False,
-                      'check_cras_mic': False,
-                      'check_cras_pspeaker': False,
-                      'check_cras_pmic': False,
-                      'check_cras_speaker_vol': False,
-                      'check_cras_mic_mute': False,
-                      'check_prefer_camera': False,
-                      'check_camera_mute': False,
-                      'check_audio_stream': False,
-                      'check_video_stream': True,
-                      'check_hotrod_speaker': False,
-                      'check_hotrod_mic': False,
-                      'check_hotrod_camera': False,
-                      'check_hotrod_pspeaker': False,
-                      'check_hotrod_pmic': False,
-                      'check_hotrod_pcamera': False,
-                      'check_hotrod_speaker_vol': False,
-                      'check_hotrod_mic_state': False,
-                      'check_hotrod_camera_state': False,
-                      'check_usb_errorlog': True,
-                      'check_kernel_errorlog': True,
-                      'check_video_errorlog': True,
-                      'check_audio_errorlog': True,
-                      'check_chrome_errorlog': True,
-                      'check_atrus_errorlog': False,
-                      'check_usb_stability': True,
-                      'check_process_crash': False,
-                      'check_kernel_panic': False
-}
-
-
-test_flow_control = {
-                     'runtestonly': True,
-                     'setupcleanup': True,
-                     'abort_on_failure': True,
-                     'random_mode': True,
-                     'recovery_on_fatal_failure': False,
-                     'skip_cfm_check': True,
-                     'debug': True,
-                     'report': True
-}
-
-def run_test(machine):
-    host = hosts.create_host(machine)
-    run_test_only = test_flow_control['runtestonly']
-    job.run_test('enterprise_CFM_Test', host=host,
-                  run_test_only=run_test_only,
-                  test_config=test_config, action_config=action_config,
-                  verification_config=verification_config,
-                  error_key_words=error_key_words,
-                  test_flow_control=test_flow_control,
-                  tag='huddly')
-
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_Test/control.mimo b/server/site_tests/enterprise_CFM_Test/control.mimo
deleted file mode 100644
index 7196983..0000000
--- a/server/site_tests/enterprise_CFM_Test/control.mimo
+++ /dev/null
@@ -1,249 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "mzhuo@chromium.org"
-NAME = "enterprise_CFM_Test.mimo"
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-DEPENDENCIES = ""
-JOB_RETRIES = 1
-
-DOC = """
-This test verifies peripherals for Bluestreak.
-Tests and verifications can be selected in control file.
-action_list defines the list of all available tests, which are keys for dict
-action_list, value for the key defines how many times this test will be run
-in one loop.
-
-verification_list defineds the list of all available verifications,
-which are keys for dict verification_list. True means this verification specified
-by this key will be performed.
-
-Testing flow can be set in control file.
-test_flow_control defines:
-1. runtestonly, if True, no cleartmp and enrollment are done before test,
-                and no cleartpm is done after test.  Before running test on CfM
-                we assume the CfM is enrolled,
-                else, cleartpm and enrollment will be done before test,
-                and cleartmp is done afterwards.
-2. setupcleanup, (TODO)if True, before exit from test cleanup is done,
-                 else, no cleanup before exit.
-                 This provides way to keep setup stays in original status when test
-                 is aborted.
-3. abort_on_failure: if True, test is aborted if test fails or verification fails,
-                     else, test continutes.
-4. random_mode: if True, sequence of multiple test in one loop is randomized everytime.
-                else, sequence is same for all loops.
-5. recovery_on_fatal_failure: if True, reboot CfM when fatal failure occurs,
-                              else, no reboot.
-6. skip_cfm_check: All working CfM should have speaker, camera connected.
-             For Bluestreak in addition to speaker/camera, Mimo should be connected.
-             If True script checks the above statement before starting test,
-             else, no check will be done before test starts.
-7. debug: If True, script prints more informations, for example, cli output, name of
-          test and verification. This is intend to give user more output for information
-          purpose. This is different from logging.debug which is turned on by "--debug"
-          when  kickstarting script,
-          else, less output.
-8. report: If True, after each loop script prints out summary for test and verification.
-
-Dict error_key_words provides the list of error logs which script checks and verify.
-If script finds any of them in the log, script claims test failure. This list can be
-updated on the fly. Dict has keys, the value for each key is the list which contains
-the list of error log script looks for when scanning log files.
-
-How to run it:
-1. Run cros_sdk
-2. Go to ~/chromiumos/src/third_party/autotest/files/server/site_tests/enterprise_CFM_Test
-3. Edit control file. Replace meeting_code working for CfM to be tested.
-4. Update test_config, action_config, verification_config, test_flow_control ane etc properly.
-5. Issue command line to kickstart autotest:
-test_that --autotest_dir ~/trunk/src/third_party/autotest/files/ --board=guado
-100.123.174.2 enterprise_CFM_Test.demo
-
-"""
-
-args_dict = utils.args_to_dict(args)
-
-error_key_words = {'usb': [
-                   'Setup ERROR: setup context command',
-                   'URB transfer length is wrong',
-                   'device descriptor read',
-                   'unable to enumerate USB device',
-                   'hub_port_status failed',
-                   'cannot reset port',
-                   'nonzero write bulk status received',
-                   'Device not responding to setup address',
-                   'device not accepting address',
-                   'Set SEL for device-initiated U2 failed',
-                   'Set SEL for device-initiated U1 failed',
-                   'Disable of device-initiated U1 failed',
-                   'Disable of device-initiated U2 failed',
-                   'usb_set_interface failed',
-                   'MIMO has wedged; issuing hub reset'
-                  ],
-                   'kernel': [
-                   'ERROR Transfer event TRB DMA ptr',
-                   'crashes with segfault',
-                   'segfault at',
-                   'cut here',
-                   'end trace',
-                   'Failed to resubmit video URB',
-                   'go2001_watchdog:',
-                   'go2001_ctx_error_locked:'
-                  ],
-                   'video': [
-                   'uvcvideo: Failed to query',
-                   'VIDIOC_DQBUF failed',
-                   'uvcvideo: Non-zero status'
-                   'uvcvideo: Failed to set UVC commit control',
-                   'uvcvideo: UVC non compliance',
-                   'Failed to resubmit video URB',
-                   'No streaming interface found',
-                   'Dequeued v4l2 buffer contains corrupted data'
-                  ],
-                   'audio': [
-                   'hw_params: Input/output error: rate:',
-                   'Init device retry failed',
-                   'Init Hangouts Meet speakermic: USB Audio',
-                   'usb_set_interface failed',
-                   'hw_params: Input/output error: rate:',
-                   'Init device retry failed',
-                  ],
-                   'chrome': [
-                   '#No match for getting seqNum',
-                   'Cannot get RenderProcess',
-                   'segfault at',
-                   'ERR crash_reporter',
-                   'Watchdog resetting firmware',
-                   'Failed to create scanout buffer',
-                   'Failed to enable controller',
-                   'Failed to configure: device',
-                   'Failed to export buffer to dma_buf',
-                   'Failed to take control of the display',
-                   'WARNING: texture bound to texture unit 0 is not renderable',
-                   'Failed to modeset controller'
-                  ],
-                   'atrus': [
-                   '#Internal TrueVoice parameters',
-                   'write: Connection timed out',
-                   'Error: report failed',
-                   'write: Broken pipe',
-                   'protocol error'
-                  ],
-                   'usb_stability': [
-                   '#autotest',
-                   'USB disconnect',
-                   'New USB device found',
-                  ]
-}
-
-
-# !!! Please change meeting code working for CfM.
-# !!! If meeting_code for different domain is used, meeting might be timeout.
-# vol_change_mode: if set to 1, make one call to set voluem to target volume,
-#                  else, make multiple calls to update volume until volume
-#                  equals to target volume.
-test_config = {
-               'gpio_list': ['218','219', '209'],
-               'gpiopause': 8,
-               'puts': "17e9:016b,266e:0110",
-               'is_meeting': 1,
-               'meeting_code': 'otg-dkps-ovj', # this works for crosprq4.com
-               'repeat': 200,
-               'reboot_timeout': 30,
-               'loop_timeout': 60,
-               'action_timeout': 10,
-               'min_timeout': 5,
-               'debug_timeout': 9999,
-               'vol_change_step': 6,
-               'vol_change_mode': 1,
-               'reboot_after_min_meets': 5,
-               'gpio_after_min_meets': 10
-}
-
-#action_config['meeting_test'] should be 0, 1.
-#  0: no meeting test to be done
-#  1: In each loop first CfM joins meeting, after all tests are done,
-#     CfM leaves meeting.
-#the value of 'mute_unmute_camera_test', 'mute_unmute_mic_test' and
-#'speaker_volume_test" and etc is number of test for each key to be done in
-#one meeting.
-action_config = {
-                 'meeting_test': 1,
-                 'mute_unmute_camera_test': 0,
-                 'mute_unmute_mic_test': 0,
-                 'speaker_volume_test': 0,
-                 'gpio_test': 1,
-                 'reboot_test': 1,
-                 'reset_usb_test': 0,
-                 'flap_monitor_test': 0
-}
-
-
-verification_config = {
-                      'check_usb_enumeration':True,
-                      'check_usb_inf_init': True,
-                      'check_v4l2_interface': True,
-                      'check_audio_card':  True,
-                      'check_cras_speaker': False,
-                      'check_cras_mic': False,
-                      'check_cras_pspeaker': False,
-                      'check_cras_pmic': False,
-                      'check_cras_speaker_vol': False,
-                      'check_cras_mic_mute': False,
-                      'check_prefer_camera': False,
-                      'check_camera_mute': False,
-                      'check_audio_stream': False,
-                      'check_video_stream': False,
-                      'check_hotrod_speaker': False,
-                      'check_hotrod_mic': False,
-                      'check_hotrod_camera': False,
-                      'check_hotrod_pspeaker': False,
-                      'check_hotrod_pmic': False,
-                      'check_hotrod_pcamera': False,
-                      'check_hotrod_speaker_vol': False,
-                      'check_hotrod_mic_state': False,
-                      'check_hotrod_camera_state': False,
-                      'check_usb_errorlog': True,
-                      'check_kernel_errorlog': True,
-                      'check_video_errorlog': False,
-                      'check_audio_errorlog': True,
-                      'check_chrome_errorlog': True,
-                      'check_atrus_errorlog': True,
-                      'check_usb_stability': True,
-                      'check_process_crash': False,
-                      'check_kernel_panic': False
-}
-
-
-test_flow_control = {
-                     'runtestonly': True,
-                     'setupcleanup': True,
-                     'abort_on_failure': True,
-                     'random_mode': True,
-                     'recovery_on_fatal_failure': False,
-                     'skip_cfm_check': True,
-                     'debug': True,
-                     'report': True
-}
-
-
-def run_test(machine):
-    host = hosts.create_host(machine)
-    run_test_only = test_flow_control['runtestonly']
-    job.run_test('enterprise_CFM_Test', host=host,
-                  run_test_only=run_test_only,
-                  test_config=test_config, action_config=action_config,
-                  verification_config=verification_config,
-                  error_key_words=error_key_words,
-                  test_flow_control=test_flow_control,
-                  tag='mimo')
-
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_Test/control.qual b/server/site_tests/enterprise_CFM_Test/control.qual
deleted file mode 100644
index fa3ed73..0000000
--- a/server/site_tests/enterprise_CFM_Test/control.qual
+++ /dev/null
@@ -1,218 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "mzhuo@chromium.org"
-NAME = "enterprise_CFM_Test.qual"
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-DEPENDENCIES = ""
-JOB_RETRIES = 1
-
-DOC = """
-This test runs various tests and verifications for CfM devices.
-Tests and verifications can be selected in control file.
-action_list defines the list of all available tests, which are keys for dict
-action_list, value for the key defines how many times this test will be run
-in each loop.
-verification_list defineds the list of all available verifications,
-which are keys for dict verification_list. True means this verification specified
-by this key will be performed.
-Testing flow can be set in control file.
-
-Example to run it:
-test_that --autotest_dir ~/trunk/src/third_party/autotest/files/ --board=guado
-100.123.174.2 enterprise_CFM_Test.sanity
-
-Note:
-1. Modify dict error_key_words to meet each test's need.
-2. Modify meeting code which works for domain device is enrolled in.
-"""
-
-args_dict = utils.args_to_dict(args)
-
-error_key_words = {'usb': [
-                   'Setup ERROR: setup context command',
-                   'URB transfer length is wrong',
-                   'device descriptor read',
-                   'unable to enumerate USB device',
-                   'hub_port_status failed',
-                   'cannot reset port',
-                   'nonzero write bulk status received',
-                   'Device not responding to setup address',
-                   'device not accepting address',
-                   'Set SEL for device-initiated U2 failed',
-                   'Set SEL for device-initiated U1 failed',
-                   'Disable of device-initiated U1 failed',
-                   'Disable of device-initiated U2 failed',
-                   'usb_set_interface failed',
-                   'MIMO has wedged; issuing hub reset'
-                  ],
-                   'kernel': [
-                   'Error reading udev log info crash_reporter-udev-collection',
-                   'stuck on bsd ring',
-                   'stuck on render ring',
-                   'GPU hangs can indicate a bug anywhere in the entire gfx stack, including userspace',
-                   'GPU crash dump saved to',
-                   'ERROR Transfer event TRB DMA ptr',
-                   'crashes with segfault',
-                   'segfault at',
-                   'Stored minidump to',
-                   'Leaving core file',
-                   'cut here',
-                   'end trace',
-                   'Failed to resubmit video URB',
-                   'go2001_watchdog:',
-                   'go2001_ctx_error_locked:',
-                   'Out of memory: Kill process',
-                   'Killed process'
-                  ],
-                   'video': [
-                   'uvcvideo: Failed to query',
-                   'uvcvideo: Non-zero status'
-                   'uvcvideo: Failed to set UVC commit control',
-                   'uvcvideo: UVC non compliance',
-                   'Failed to resubmit video URB',
-                   'No streaming interface found',
-                   'Dequeued v4l2 buffer contains corrupted data'
-                  ],
-                   'audio': [
-                   'hw_params: Input/output error: rate:',
-                   'usb_set_interface failed',
-                  ],
-                   'chrome': [
-                   'Cannot get RenderProcess',
-                   'segfault at',
-                   'ERR crash_reporter',
-                   'Watchdog resetting firmware',
-                   'Failed to create scanout buffer',
-                   'Failed to enable controller',
-                   'Failed to configure: device',
-                   'Failed to export buffer to dma_buf',
-                   'Failed to take control of the display',
-                   'Failed to modeset controller',
-                   'Failed to encode frame.'
-                  ],
-                   'atrus': [
-                   'write: Connection timed out',
-                   'Error: report failed',
-                   'write: Broken pipe',
-                   'protocol error'
-                  ],
-                   'usb_stability': [
-                   'USB disconnect',
-                   'New USB device found'
-                  ]
-}
-
-
-# !!! Please change meeting code working for CfM.
-# !!! If meeting_code for different domain is used, meeting might be timeout.
-# vol_change_mode: if set to 1, make one call to set voluem to target volume,
-#                  else, make multiple calls to update volume until volume
-#                  equals to target volume.
-test_config = {
-               'gpio_list': ['218','219', '209'],
-               'gpiopause': 8,
-               'puts': "",
-               'is_meeting': 1,
-               'meeting_code': 'otg-dkps-ovj', # this works for crosprq4.com
-               'repeat': 50,
-               'reboot_timeout': 60,
-               'loop_timeout': 30,
-               'action_timeout': 20,
-               'min_timeout': 10,
-               'debug_timeout': 5,
-               'vol_change_step': 6,
-               'vol_change_mode': 1,
-               'reboot_after_min_meets': 1,
-               'gpio_after_min_meets': 1
-}
-
-#action_config['meeting_test'] should be 0, 1.
-#  0: no meeting test to be done
-#  1: In each loop first CfM joins meeting, after all tests are done,
-#     CfM leaves meeting.
-#the value of 'mute_unmute_camera_test', 'mute_unmute_mic_test' and
-#'speaker_volume_test" and etc is number of test for each key to be done in
-#one meeting.
-action_config = {
-                 'meeting_test': 1,
-                 'mute_unmute_camera_test': 4,
-                 'mute_unmute_mic_test': 4,
-                 'speaker_volume_test': 4,
-                 'gpio_test': 2,
-                 'reboot_test': 2,
-                 'reset_usb_test': 0,
-                 'flap_monitor_test': 0
-}
-
-
-verification_config = {
-                      'check_usb_enumeration':True,
-                      'check_usb_inf_init': True,
-                      'check_v4l2_interface': True,
-                      'check_audio_card':  False,
-                      'check_cras_speaker': True,
-                      'check_cras_mic': True,
-                      'check_cras_pspeaker': True,
-                      'check_cras_pmic': True,
-                      'check_cras_speaker_vol': True,
-                      'check_cras_mic_mute': True,
-                      'check_prefer_camera': False,
-                      'check_camera_mute': False,
-                      'check_audio_stream': True,
-                      'check_video_stream': True,
-                      'check_hotrod_speaker': False,
-                      'check_hotrod_mic': False,
-                      'check_hotrod_camera': False,
-                      'check_hotrod_pspeaker': False,
-                      'check_hotrod_pmic': False,
-                      'check_hotrod_pcamera': False,
-                      'check_hotrod_speaker_vol': False,
-                      'check_hotrod_mic_state': False,
-                      'check_hotrod_camera_state': False,
-                      'check_usb_errorlog': False,
-                      'check_kernel_errorlog': False,
-                      'check_video_errorlog': False,
-                      'check_audio_errorlog': False,
-                      'check_chrome_errorlog': False,
-                      'check_atrus_errorlog': False,
-                      'check_usb_stability': False,
-                      'check_process_crash': True,
-                      'check_kernel_panic': False,
-                      'check_chrome_restarted': True,
-                      'check_cfm_rebooted': True
-}
-
-
-test_flow_control = {
-                     'reboot_before_start': True,
-                     'run_test_only': True,
-                     'setup_cleanup': True,
-                     'abort_on_failure': False,
-                     'random_mode': True,
-                     'recovery_on_fatal_failure': True,
-                     'skip_cfm_check': True,
-                     'report': True
-}
-
-
-
-def run_test(machine):
-    host = hosts.create_host(machine)
-    run_test_only = test_flow_control['run_test_only']
-    job.run_test('enterprise_CFM_Test', host=host,
-                  run_test_only=run_test_only,
-                  test_config=test_config, action_config=action_config,
-                  verification_config=verification_config,
-                  error_key_words=error_key_words,
-                  test_flow_control=test_flow_control,
-                  tag='qual')
-
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_Test/control.sanity b/server/site_tests/enterprise_CFM_Test/control.sanity
deleted file mode 100644
index 7594610..0000000
--- a/server/site_tests/enterprise_CFM_Test/control.sanity
+++ /dev/null
@@ -1,216 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "mzhuo@chromium.org"
-NAME = "enterprise_CFM_Test.sanity"
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-DEPENDENCIES = ""
-JOB_RETRIES = 1
-
-DOC = """
-This test runs various tests and verifications for CfM devices.
-Tests and verifications can be selected in control file.
-action_list defines the list of all available tests, which are keys for dict
-action_list, value for the key defines how many times this test will be run
-in each loop.
-verification_list defineds the list of all available verifications,
-which are keys for dict verification_list. True means this verification specified
-by this key will be performed.
-Testing flow can be set in control file.
-
-Example to run it:
-test_that --autotest_dir ~/trunk/src/third_party/autotest/files/ --board=guado
-100.123.174.2 enterprise_CFM_Test.sanity
-
-Note:
-1. Modify dict error_key_words to meet each test's need.
-2. Modify meeting code which works for domain device is enrolled in.
-"""
-
-args_dict = utils.args_to_dict(args)
-
-error_key_words = {'usb': [
-                   'Setup ERROR: setup context command',
-                   'URB transfer length is wrong',
-                   'device descriptor read',
-                   'unable to enumerate USB device',
-                   'hub_port_status failed',
-                   'cannot reset port',
-                   'nonzero write bulk status received',
-                   'Device not responding to setup address',
-                   'device not accepting address',
-                   'Set SEL for device-initiated U2 failed',
-                   'Set SEL for device-initiated U1 failed',
-                   'Disable of device-initiated U1 failed',
-                   'Disable of device-initiated U2 failed',
-                   'usb_set_interface failed',
-                   'MIMO has wedged; issuing hub reset'
-                  ],
-                   'kernel': [
-                   'Error reading udev log info crash_reporter-udev-collection',
-                   'stuck on bsd ring',
-                   'stuck on render ring',
-                   'GPU hangs can indicate a bug anywhere in the entire gfx stack, including userspace',
-                   'GPU crash dump saved to',
-                   'ERROR Transfer event TRB DMA ptr',
-                   'crashes with segfault',
-                   'segfault at',
-                   'Stored minidump to',
-                   'Leaving core file',
-                   'cut here',
-                   'end trace',
-                   'Failed to resubmit video URB',
-                   'go2001_watchdog:',
-                   'go2001_ctx_error_locked:',
-                   'Out of memory: Kill process',
-                   'Killed process'
-                  ],
-                   'video': [
-                   'uvcvideo: Failed to query',
-                   'uvcvideo: Non-zero status'
-                   'uvcvideo: Failed to set UVC commit control',
-                   'uvcvideo: UVC non compliance',
-                   'Failed to resubmit video URB',
-                   'No streaming interface found',
-                   'Dequeued v4l2 buffer contains corrupted data'
-                  ],
-                   'audio': [
-                   'hw_params: Input/output error: rate:',
-                   'usb_set_interface failed',
-                  ],
-                   'chrome': [
-                   'Cannot get RenderProcess',
-                   'segfault at',
-                   'ERR crash_reporter',
-                   'Watchdog resetting firmware',
-                   'Failed to create scanout buffer',
-                   'Failed to enable controller',
-                   'Failed to configure: device',
-                   'Failed to export buffer to dma_buf',
-                   'Failed to take control of the display',
-                   'Failed to modeset controller',
-                   'Failed to encode frame.'
-                  ],
-                   'atrus': [
-                   'write: Connection timed out',
-                   'Error: report failed',
-                   'write: Broken pipe',
-                   'protocol error'
-                  ],
-                   'usb_stability': [
-                   'USB disconnect',
-                   'New USB device found'
-                  ]
-}
-
-
-# !!! Please change meeting code working for CfM.
-# !!! If meeting_code for different domain is used, meeting might be timeout.
-# vol_change_mode: if set to 1, make one call to set voluem to target volume,
-#                  else, make multiple calls to update volume until volume
-#                  equals to target volume.
-test_config = {
-               'gpio_list': ['218','219', '209'],
-               'gpiopause': 8,
-               'puts': "",
-               'is_meeting': 1,
-               'meeting_code': 'otg-dkps-ovj', # this works for crosprq4.com
-               'repeat': 5,
-               'reboot_timeout': 60,
-               'loop_timeout': 30,
-               'action_timeout': 20,
-               'min_timeout': 10,
-               'debug_timeout': 5,
-               'vol_change_step': 6,
-               'vol_change_mode': 1,
-               'reboot_after_min_meets': 1,
-               'gpio_after_min_meets': 1
-}
-
-#action_config['meeting_test'] should be 0, 1.
-#  0: no meeting test to be done
-#  1: In each loop first CfM joins meeting, after all tests are done,
-#     CfM leaves meeting.
-#the value of 'mute_unmute_camera_test', 'mute_unmute_mic_test' and
-#'speaker_volume_test" and etc is number of test for each key to be done in
-#one meeting.
-action_config = {
-                 'meeting_test': 1,
-                 'mute_unmute_camera_test': 4,
-                 'mute_unmute_mic_test': 4,
-                 'speaker_volume_test': 4,
-                 'gpio_test': 2,
-                 'reboot_test': 2,
-                 'reset_usb_test': 0,
-                 'flap_monitor_test': 0
-}
-
-
-verification_config = {
-                      'check_usb_enumeration':True,
-                      'check_usb_inf_init': True,
-                      'check_v4l2_interface': True,
-                      'check_audio_card':  False,
-                      'check_cras_speaker': True,
-                      'check_cras_mic': True,
-                      'check_cras_pspeaker': True,
-                      'check_cras_pmic': True,
-                      'check_cras_speaker_vol': True,
-                      'check_cras_mic_mute': True,
-                      'check_prefer_camera': False,
-                      'check_camera_mute': False,
-                      'check_audio_stream': True,
-                      'check_video_stream': True,
-                      'check_hotrod_speaker': False,
-                      'check_hotrod_mic': False,
-                      'check_hotrod_camera': False,
-                      'check_hotrod_pspeaker': False,
-                      'check_hotrod_pmic': False,
-                      'check_hotrod_pcamera': False,
-                      'check_hotrod_speaker_vol': False,
-                      'check_hotrod_mic_state': False,
-                      'check_hotrod_camera_state': False,
-                      'check_usb_errorlog': False,
-                      'check_kernel_errorlog': False,
-                      'check_video_errorlog': False,
-                      'check_audio_errorlog': False,
-                      'check_chrome_errorlog': False,
-                      'check_atrus_errorlog': False,
-                      'check_usb_stability': False,
-                      'check_process_crash': True,
-                      'check_kernel_panic': False,
-                      'check_chrome_restarted': True,
-                      'check_cfm_rebooted': True
-}
-
-
-test_flow_control = {
-                     'reboot_before_start': True,
-                     'run_test_only': True,
-                     'setup_cleanup': True,
-                     'abort_on_failure': True,
-                     'random_mode': True,
-                     'recovery_on_fatal_failure': True,
-                     'skip_cfm_check': False,
-                     'report': True
-}
-
-def run_test(machine):
-    host = hosts.create_host(machine)
-    run_test_only = test_flow_control['run_test_only']
-    job.run_test('enterprise_CFM_Test', host=host,
-                  run_test_only=run_test_only,
-                  test_config=test_config, action_config=action_config,
-                  verification_config=verification_config,
-                  error_key_words=error_key_words,
-                  test_flow_control=test_flow_control,
-                  tag='sanity')
-
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_Test/control.test b/server/site_tests/enterprise_CFM_Test/control.test
deleted file mode 100644
index efaec85..0000000
--- a/server/site_tests/enterprise_CFM_Test/control.test
+++ /dev/null
@@ -1,216 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "mzhuo@chromium.org"
-NAME = "enterprise_CFM_Test.test"
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-DEPENDENCIES = ""
-JOB_RETRIES = 1
-
-DOC = """
-This test runs various tests and verifications for CfM devices.
-Tests and verifications can be selected in control file.
-action_list defines the list of all available tests, which are keys for dict
-action_list, value for the key defines how many times this test will be run
-in each loop.
-verification_list defineds the list of all available verifications,
-which are keys for dict verification_list. True means this verification specified
-by this key will be performed.
-Testing flow can be set in control file.
-
-Example to run it:
-test_that --autotest_dir ~/trunk/src/third_party/autotest/files/ --board=guado
-100.123.174.2 enterprise_CFM_Test.sanity
-
-Note:
-1. Modify dict error_key_words to meet each test's need.
-2. Modify meeting code which works for domain device is enrolled in.
-"""
-
-args_dict = utils.args_to_dict(args)
-
-error_key_words = {'usb': [
-                   'Setup ERROR: setup context command',
-                   'URB transfer length is wrong',
-                   'device descriptor read',
-                   'unable to enumerate USB device',
-                   'hub_port_status failed',
-                   'cannot reset port',
-                   'nonzero write bulk status received',
-                   'Device not responding to setup address',
-                   'device not accepting address',
-                   'Set SEL for device-initiated U2 failed',
-                   'Set SEL for device-initiated U1 failed',
-                   'Disable of device-initiated U1 failed',
-                   'Disable of device-initiated U2 failed',
-                   'usb_set_interface failed',
-                   'MIMO has wedged; issuing hub reset'
-                  ],
-                   'kernel': [
-                   'Error reading udev log info crash_reporter-udev-collection',
-                   'stuck on bsd ring',
-                   'stuck on render ring',
-                   'GPU hangs can indicate a bug anywhere in the entire gfx stack, including userspace',
-                   'GPU crash dump saved to',
-                   'ERROR Transfer event TRB DMA ptr',
-                   'crashes with segfault',
-                   'segfault at',
-                   'Stored minidump to',
-                   'Leaving core file',
-                   'cut here',
-                   'end trace',
-                   'Failed to resubmit video URB',
-                   'go2001_watchdog:',
-                   'go2001_ctx_error_locked:',
-                   'Out of memory: Kill process',
-                   'Killed process'
-                  ],
-                   'video': [
-                   'uvcvideo: Failed to query',
-                   'uvcvideo: Non-zero status'
-                   'uvcvideo: Failed to set UVC commit control',
-                   'uvcvideo: UVC non compliance',
-                   'Failed to resubmit video URB',
-                   'No streaming interface found',
-                   'Dequeued v4l2 buffer contains corrupted data'
-                  ],
-                   'audio': [
-                   'hw_params: Input/output error: rate:',
-                   'usb_set_interface failed',
-                  ],
-                   'chrome': [
-                   'Cannot get RenderProcess',
-                   'segfault at',
-                   'ERR crash_reporter',
-                   'Watchdog resetting firmware',
-                   'Failed to create scanout buffer',
-                   'Failed to enable controller',
-                   'Failed to configure: device',
-                   'Failed to export buffer to dma_buf',
-                   'Failed to take control of the display',
-                   'Failed to modeset controller',
-                   'Failed to encode frame.'
-                  ],
-                   'atrus': [
-                   'write: Connection timed out',
-                   'Error: report failed',
-                   'write: Broken pipe',
-                   'protocol error'
-                  ],
-                   'usb_stability': [
-                   'USB disconnect',
-                   'New USB device found'
-                  ]
-}
-
-
-# !!! Please change meeting code working for CfM.
-# !!! If meeting_code for different domain is used, meeting might be timeout.
-# vol_change_mode: if set to 1, make one call to set voluem to target volume,
-#                  else, make multiple calls to update volume until volume
-#                  equals to target volume.
-test_config = {
-               'gpio_list': ['218','219', '209'],
-               'gpiopause': 8,
-               'puts': "",
-               'is_meeting': 1,
-               'meeting_code': 'otg-dkps-ovj', # this works for crosprq4.com
-               'repeat': 10,
-               'reboot_timeout': 60,
-               'loop_timeout': 30,
-               'action_timeout': 20,
-               'min_timeout': 10,
-               'debug_timeout': 9999,
-               'vol_change_step': 6,
-               'vol_change_mode': 1,
-               'reboot_after_min_meets': 1,
-               'gpio_after_min_meets': 1
-}
-
-#action_config['meeting_test'] should be 0, 1.
-#  0: no meeting test to be done
-#  1: In each loop first CfM joins meeting, after all tests are done,
-#     CfM leaves meeting.
-#the value of 'mute_unmute_camera_test', 'mute_unmute_mic_test' and
-#'speaker_volume_test" and etc is number of test for each key to be done in
-#one meeting.
-action_config = {
-                 'meeting_test': 1,
-                 'mute_unmute_camera_test': 10,
-                 'mute_unmute_mic_test': 10,
-                 'speaker_volume_test': 10,
-                 'gpio_test': 5,
-                 'reboot_test': 5,
-                 'reset_usb_test': 0,
-                 'flap_monitor_test': 0
-}
-
-
-verification_config = {
-                      'check_usb_enumeration':True,
-                      'check_usb_inf_init': True,
-                      'check_v4l2_interface': True,
-                      'check_audio_card':  True,
-                      'check_cras_speaker': True,
-                      'check_cras_mic': True,
-                      'check_cras_pspeaker': True,
-                      'check_cras_pmic': True,
-                      'check_cras_speaker_vol': True,
-                      'check_cras_mic_mute': True,
-                      'check_prefer_camera': False,
-                      'check_camera_mute': False,
-                      'check_audio_stream': True,
-                      'check_video_stream': True,
-                      'check_hotrod_speaker': False,
-                      'check_hotrod_mic': False,
-                      'check_hotrod_camera': False,
-                      'check_hotrod_pspeaker': False,
-                      'check_hotrod_pmic': False,
-                      'check_hotrod_pcamera': False,
-                      'check_hotrod_speaker_vol': False,
-                      'check_hotrod_mic_state': False,
-                      'check_hotrod_camera_state': False,
-                      'check_usb_errorlog':  True,
-                      'check_kernel_errorlog': True,
-                      'check_video_errorlog': True,
-                      'check_audio_errorlog': True,
-                      'check_chrome_errorlog': True,
-                      'check_atrus_errorlog': True,
-                      'check_usb_stability': True,
-                      'check_process_crash': True,
-                      'check_kernel_panic': False,
-                      'check_chrome_restarted': True,
-                      'check_cfm_rebooted': True
-}
-
-
-test_flow_control = {
-                     'reboot_before_start': False,
-                     'run_test_only': True,
-                     'setup_cleanup': True,
-                     'abort_on_failure': True,
-                     'random_mode': True,
-                     'recovery_on_fatal_failure': True,
-                     'skip_cfm_check': True,
-                     'report': True
-}
-
-def run_test(machine):
-    host = hosts.create_host(machine)
-    run_test_only = test_flow_control['run_test_only']
-    job.run_test('enterprise_CFM_Test', host=host,
-                  run_test_only=run_test_only,
-                  test_config=test_config, action_config=action_config,
-                  verification_config=verification_config,
-                  error_key_words=error_key_words,
-                  test_flow_control=test_flow_control,
-                  tag='test')
-
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_Test/enterprise_CFM_Test.py b/server/site_tests/enterprise_CFM_Test/enterprise_CFM_Test.py
deleted file mode 100644
index 3987169..0000000
--- a/server/site_tests/enterprise_CFM_Test/enterprise_CFM_Test.py
+++ /dev/null
@@ -1,981 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import random
-import logging
-import time
-from autotest_lib.client.common_lib import error
-from autotest_lib.server.cros.cfm import cfm_base_test
-from autotest_lib.client.common_lib.cros.manual import audio_helper
-from autotest_lib.client.common_lib.cros.manual import cfm_helper
-from autotest_lib.client.common_lib.cros.manual import meet_helper
-from autotest_lib.client.common_lib.cros.manual import video_helper
-from autotest_lib.client.common_lib.cros.manual import get_usb_devices
-
-ATRUS = "Hangouts Meet speakermic"
-CORE_DIR_LINES = 5
-FAILURE_REASON = {
-                 'telemetry':
-                     ['No hangouts or meet telemetry API available',
-                      'telemetry api',
-                      'RPC: cfm_main_screen.',
-                      'Failed RPC',
-                      'cfm_main_screen',
-                      'Webview with screen param',
-                      'autotest_lib.client.common_lib.error.TestFail'],
-                 'chrome':[],
-                 'kernel':[],
-                 'atrus':[]
-}
-NUM_AUDIO_STREAM_IN_MEETING = 3
-LOG_CHECK_LIST = ['check_kernel_errorlog',
-                  'check_video_errorlog',
-                  'check_audio_errorlog',
-                  'check_chrome_errorlog',
-                  'check_atrus_errorlog',
-                  'check_usb_stability']
-TEST_DELAY = 0
-
-class enterprise_CFM_Test(cfm_base_test.CfmBaseTest):
-    """Executes multiple tests on CFM device based on control file,
-    after each test, perform mulitple verifications based on control
-    file. Test flow can be controlled by control file, such as abort
-    on failure, or continue on failure.
-    """
-    version = 1
-
-    def gpio_test(self):
-        """
-        Powercycle USB port on Guado.
-        """
-        if self.run_meeting_test:
-            if self.random:
-                min_meetings = random.randrange(-1, self.gpio_min_meets)
-            else:
-                min_meetings = self.gpio_min_meets
-            if self.meets_last_gpio <=  min_meetings:
-                logging.debug('Skip gpio test.')
-                return True, None
-        if cfm_helper.check_is_platform(self.client, 'guado'):
-            status, errmsg =  cfm_helper.gpio_usb_test(self.client,
-                              self.gpio_list,
-                              self.puts, self.gpio_pause,
-                              'guado')
-            self.gpio_no += 1
-            self.meets_last_gpio = 0
-        else:
-            logging.info('Skip gpio_test for non-guado CfM.')
-            return True, None
-
-        ## workaround for bug b/69261543
-        if self.is_in_meet:
-            self.is_camera_muted = self.cfm_facade.is_camera_muted()
-        return status, errmsg
-
-
-    def meeting_test(self):
-        """
-        Join/leave meeting.
-        """
-        if self.is_in_meet:
-            status, errmsg = meet_helper.leave_meeting(self.cfm_facade,
-                             self.is_meeting)
-            if status:
-                self.is_camera_muted = True
-                self.is_in_meet = False
-                return True, None
-            else:
-                if self.recover_on_failure or self.meets_last_reboot == 0:
-                    self.is_in_meet = False
-                else:
-                    self.is_in_meet = True
-                return False, errmsg
-        else:
-            status, errmsg = meet_helper.join_meeting(self.cfm_facade,
-                             self.is_meeting, self.meeting_code)
-            if status:
-                self.is_camera_muted = False
-                self.is_in_meet = True
-                self.meet_no += 1
-                self.meets_last_reboot += 1
-                self.meets_last_gpio += 1
-                return True, None
-            else:
-                if self.recover_on_failure or self.meets_last_reboot == 0:
-                    self.is_in_meet = True
-                else:
-                    self.is_in_meet = False
-                return False, errmsg
-
-    def reboot_test(self):
-        """
-        Reboot CfM.
-        """
-        if self.run_meeting_test:
-            if self.random:
-                min_meetings = random.randrange(-1, self.reboot_min_meets)
-            else:
-                min_meetings = self.reboot_min_meets
-            if self.meets_last_reboot <  min_meetings:
-                logging.info('Skip reboot CfM test')
-                return True, None
-        try:
-            self.client.reboot()
-            time.sleep(self.reboot_timeout)
-        except Exception as e:
-            errmsg = 'Reboot test fails for %s' % self.ip_addr
-            logging.exception(errmsg)
-            self.reboot_log = cfm_helper.check_last_reboot(self.client)
-            self.chrome_file = cfm_helper.check_chrome_logfile(self.client)
-            return False, errmsg
-        self.reboot_no += 1
-        self.meets_last_reboot = 0
-        if 'meeting_test' in self.action_config:
-            self.reboot_log = cfm_helper.check_last_reboot(self.client)
-            if list(set(self.verification_config) & set(LOG_CHECK_LIST)):
-                self.log_checking_point = cfm_helper.find_last_log(self.client,
-                                                                   self.speaker)
-            return self.restart_chrome_and_meeting(True)
-        self.reboot_log = cfm_helper.check_last_reboot(self.client)
-        self.chrome_file = cfm_helper.check_chrome_logfile(self.client)
-        if list(set(self.verification_config) & set(LOG_CHECK_LIST)):
-            self.log_checking_point = cfm_helper.find_last_log(self.client,
-                                                               self.speaker)
-        return True, None
-
-    def restart_chrome_and_meeting(self, recovery):
-        """
-        Restart Chrome and Join/Start meeting if previous state is in meeting.
-        """
-        result, errmsg = meet_helper.restart_chrome(self.cfm_facade,
-                                                    self.is_meeting,
-                                                    recovery)
-        logging.info('restart chrome result:%s, msg = %s', result,errmsg)
-        if not result:
-            logging.info('restart chrome result: False, msg = %s', errmsg)
-            self.chrome_file = cfm_helper.check_chrome_logfile(self.client)
-            return False, errmsg
-        if self.is_in_meet:
-            logging.info('start meeting if needed')
-            self.is_in_meet = False
-            self.chrome_file = cfm_helper.check_chrome_logfile(self.client)
-            test_result, ret_msg =  self.meeting_test()
-            if test_result:
-                try:
-                    self.is_camera_muted =  self.cfm_facade.is_camera_muted()
-                    self.is_mic_muted = self.cfm_facade.is_mic_muted()
-                except Exception as e:
-                    errmsg = 'Fail to run telemetry api to check camera..'
-                    logging.exception(errmsg)
-                    return False, errmsg
-        self.chrome_file = cfm_helper.check_chrome_logfile(self.client)
-        return True, None
-
-    # TODO(mzhuo): Adding resetusb test.
-    def reset_usb_test(self):
-        """
-        Reset USB port
-        """
-        return True, None
-
-    def mute_unmute_camera_test(self):
-        """
-        Mute or unmute camera.
-        """
-        if not self.camera:
-            logging.info('Skip mute/unmute camera testing.')
-            return True, None
-        if self.is_in_meet:
-            if self.is_camera_muted:
-                status, errmsg = meet_helper.mute_unmute_camera(
-                                 self.cfm_facade, True)
-                if status:
-                    self.is_camera_muted = False
-                else:
-                    return False, errmsg
-            else:
-                status, errmsg =  meet_helper.mute_unmute_camera(
-                                  self.cfm_facade, False)
-                if status:
-                    self.is_camera_muted = True
-                else:
-                    return False, errmsg
-        return True, None
-
-    def mute_unmute_mic_test(self):
-        """
-        Mute or unmute microphone.
-        """
-        if not self.speaker and not cfm_helper.check_is_platform(self.client,
-                                                                 'buddy'):
-            logging.info('Skip mute/unmute microphone testing.')
-            return True, None
-        if self.is_in_meet:
-            if self.is_mic_muted:
-                status, errmsg =  meet_helper.mute_unmute_mic(self.cfm_facade,
-                                  True)
-                if status:
-                    self.is_mic_muted = False
-                else:
-                    return False, errmsg
-            else:
-                status, errmsg =  meet_helper.mute_unmute_mic(self.cfm_facade,
-                                  False)
-                if status:
-                    self.is_mic_muted = True
-                else:
-                    return False, errmsg
-        return True, None
-
-
-    def speaker_volume_test(self):
-        """
-        Update speaker volume.
-        """
-        if not self.speaker and not cfm_helper.check_is_platform(self.client,
-                                                                 'buddy'):
-            logging.info('Skip update volume of speaker testing.')
-            return True, None
-        if self.is_in_meet:
-            test_result, ret_msg =  meet_helper.speaker_volume_test(
-                                   self.cfm_facade,
-                                   self.vol_change_step,
-                                   self.vol_change_mode, self.random)
-            if test_result:
-                self.speaker_volume = int(ret_msg)
-                return True, None
-            else:
-                return False, ret_msg
-        else:
-            return True, None
-
-    # TODO(mzhuo): Adding test to turn on/off monite.
-    def flap_monitor_test(self):
-        """
-        Connect or disconnect monitor.
-        """
-        return True, None
-
-    def check_usb_enumeration(self):
-        """
-        Verify all usb devices which were enumerated originally are enumerated.
-        """
-        return cfm_helper.check_usb_enumeration(self.client,
-                                                self.puts)
-
-    def check_usb_inf_init(self):
-        """
-        Verify all usb devices which were enumerated originally have
-        valid interfaces: video interface, audio interface or touch
-        interface.
-        """
-        return cfm_helper.check_usb_interface_initializion(self.client,
-               self.puts)
-
-    def check_v4l2_interface(self):
-        """
-        Verify camera has v4l2 file handler created.
-        """
-        if not self.camera:
-            return True, None
-        return video_helper.check_v4l2_interface(self.client,
-               self.camera, self.name_camera)
-
-    def check_audio_card(self):
-        """
-        Verify speaker/microphone has audip file handler created.
-        """
-        if not self.speaker:
-            return True, None
-        return audio_helper.check_soundcard_by_name(self.client,
-               self.name_speaker)
-
-    def check_cras_speaker(self):
-        """
-        Verify cras server detects speaker.
-        """
-        if not self.speaker:
-            return True, None
-        return audio_helper.check_speaker_exist_cras(self.client,
-               self.name_speaker)
-
-    def check_cras_mic(self):
-        """
-        Verify cras server detects microphone.
-        """
-        if not self.speaker:
-            return True, None
-        return audio_helper.check_microphone_exist_cras(self.client,
-               self.name_speaker)
-
-    def check_cras_mic_mute(self):
-        """
-        Verify cras shows mic muted or unmuted as expected.
-        """
-        if not self.speaker or not self.is_in_meet:
-            return True, None
-        return audio_helper.check_cras_mic_mute(self.client, self.cfm_facade)
-
-    def check_cras_pspeaker(self):
-        """
-        Verify cras shows correct preferred speaker.
-        """
-        if not self.speaker:
-            return True, None
-        return  audio_helper.check_is_preferred_speaker(self.client,
-                self.name_speaker)
-
-    def check_cras_speaker_vol(self):
-        """
-        Verify cras shows correct volume for speaker.
-        """
-        if not self.speaker or not self.is_in_meet:
-            return True, None
-        return audio_helper.check_default_speaker_volume(self.client,
-               self.cfm_facade)
-
-    def check_cras_pmic(self):
-        """
-        Verify cras shows correct preferred microphone.
-        """
-        if not self.speaker:
-            return True, None
-        return  audio_helper.check_is_preferred_mic(self.client,
-                self.name_speaker)
-
-    # TODO(mzhuo): add verification for preferred camera
-    def check_prefer_camera(self):
-        """
-        Verify preferred camera is correct.
-        """
-        return True, None
-
-    #TODO(mzhuo): add verification to verify camera is muted or unmuted
-    #in video stack in kernel space.
-    def check_camera_mute(self):
-        """
-        Verify camera is muted or unmuted as expected.
-        """
-        return True, None
-
-    def check_video_stream(self):
-        """
-        Verify camera is streaming or not streaming as expected.
-        """
-        if not self.camera:
-            return True, None
-        return video_helper.check_video_stream(self.client,
-               self.is_camera_muted, self.camera, self.name_camera)
-
-    def check_audio_stream(self):
-        """
-        Verify speaker is streaming or not streaming as expected.
-        """
-        if not self.speaker:
-            return True, None
-        return audio_helper.check_audio_stream(self.client,
-               self.is_in_meet)
-
-    # TODO(mzhuo): Adding verification for speaker in Hotrod App
-    def check_hotrod_speaker(self):
-        """
-        Verify hotrod shows all speakers.
-        """
-        return True, None
-
-    # TODO(mzhuo): Adding verification for mic in Hotrod App
-    def check_hotrod_mic(self):
-        """
-        Verify hotrod shows all microphone.
-        """
-        return True, None
-
-    # TODO(mzhuo): Adding verification for camera in Hotrod App
-    def check_hotrod_camera(self):
-        """
-        Verify hotrod shows all cameras.
-        """
-        return True, None
-
-     # TODO(mzhuo): Adding verification for speaker in Hotrod App
-    def check_hotrod_pspeaker(self):
-        """
-        Verify hotrod selects correct preferred speaker.
-        """
-        return True, None
-
-    # TODO(mzhuo): Adding verification for mic in Hotrod App
-    def check_hotrod_pmic(self):
-        """
-        Verify hotrod selects correct preferred microphone.
-        """
-        return True, None
-
-
-    # TODO(mzhuo): Adding verification for camera in Hotrod App
-    def check_hotrod_pcamera(self):
-        """
-        Verify hotrod selects correct preferred camera.
-        """
-        return True, None
-
-    #TODO(mzhuo): Adding verififaction in hotrod layer for speaker volume
-    def check_hotrod_speaker_vol(self):
-        """
-        Verify hotrod can set volume for speaker.
-        """
-        return True, None
-
-    #TODO(mzhuo): Adding verififaction in hotrod layer for mic mute status
-    def check_hotrod_mic_state(self):
-        """
-        Verify hotrod can mute or unmute microphone.
-        """
-        return True, None
-
-    #TODO(mzhuo): Adding verififaction in hotrod layer for camera status
-    def check_hotrod_camera_state(self):
-        """
-        Verify hotrod can mute or unmute camera.
-        """
-        return True, None
-
-    def check_kernel_errorlog(self):
-        """
-        Check /var/log/message does not contain any element in
-        error_key_words['kernel'].
-        """
-        return cfm_helper.check_log(self.client, self.log_checking_point,
-                                    self.error_key_words, 'kernel',
-                                    'messages')
-
-    def check_chrome_errorlog(self):
-        """
-        Check /var/log/chrome/chrome does not contain any element in
-        error_key_words['chrome'].
-        """
-        return cfm_helper.check_log(self.client, self.log_checking_point,
-                                    self.error_key_words, 'chrome',
-                                    'chrome')
-
-    def check_atrus_errorlog(self):
-        """
-        Check /var/log/atrus.log does not contain any element in
-        error_key_words['atrus'].
-        """
-        if self.current_test in ['gpio_test', 'resetusb_test']:
-            return True, None
-        if not self.name_speaker:
-            return True, None
-        if self.name_speaker not in ATRUS:
-            logging.info('Speaker %s', self.name_speaker)
-            return True, None
-        if cfm_helper.check_is_platform(self.client, 'guado'):
-            return cfm_helper.check_log(self.client, self.log_checking_point,
-                                        self.error_key_words, 'atrus', 'atrus')
-        else:
-            return True, None
-
-    def check_video_errorlog(self):
-        """
-        Check /var/log/message does not contain any element in
-        error_key_words['video'].
-        """
-        return cfm_helper.check_log(self.client, self.log_checking_point,
-                                    self.error_key_words, 'video',
-                                    'messages')
-
-    def check_audio_errorlog(self):
-        """
-        Check /var/log/message does not contain any element in
-        error_key_words['audio'].
-        """
-        return cfm_helper.check_log(self.client, self.log_checking_point,
-                                    self.error_key_words, 'audio',
-                                    'messages')
-
-    def check_usb_errorlog(self):
-        """
-        Check /var/log/message does not contain any element in
-        error_key_words['usb'].
-        """
-        return cfm_helper.check_log(self.client, self.log_checking_point,
-               self.error_key_words, 'usb', 'messages')
-
-    def check_usb_stability(self):
-        """
-        Check if no disruptive test done, USB device should not go offline.
-        """
-        if self.current_test in ['gpio_test', 'reboot_test', 'resetusb_test']:
-            return True, None
-        return cfm_helper.check_log(self.client, self.log_checking_point,
-                                    self.error_key_words,
-                                    'usb_stability', 'messages')
-
-    def check_process_crash(self):
-        """
-        check no process crashing.
-        """
-        test_result, self.cdlines = cfm_helper.check_process_crash(self.client,
-                                self.cdlines)
-        return test_result, str(self.cdlines)
-
-    #TODO(mzhuo): Adding verififaction to check whether there is kernel panic
-    def check_kernel_panic(self):
-        """
-        Check no kernel panic reported.
-        """
-        return True, None
-
-
-    def check_chrome_restarted(self):
-        """
-        Check whether chrome is killed and restarted.
-        """
-        if self.chrome_file == cfm_helper.check_chrome_logfile(self.client):
-            return True, None
-        else:
-            self.chrome_file = cfm_helper.check_chrome_logfile(self.client)
-            return False, 'Chrome is restarted unexpectly.'
-
-    def check_cfm_rebooted(self):
-        """
-        Check whether CfM is rebooted.
-        """
-        logging.info('Last reboot: %s', self.reboot_log)
-        if self.reboot_log == cfm_helper.check_last_reboot(self.client):
-            return True, None
-        else:
-            self.reboot_log = cfm_helper.check_last_reboot(self.client)
-            return False, 'CfM is rebooted unexpectly.'
-
-    def initialize_action_check_config(self, action_config, verification_config,
-                                       fixedmode):
-        """
-        Initialize action list based on control file.
-        @param action_config: dict defines the number of test should be done
-                              for each test
-        @param fixedmode: if True all tests are executed in fixed order;
-                     if False all tests are executed in random order.
-        """
-        self.action_config =  []
-        if action_config['meeting_test'] == 1:
-            self.action_config = ['meeting_test']
-        if not self.camera:
-            action_config['mute_unmute_camera_test'] = 0
-            verification_config['check_v4l2_interface'] = False
-            verification_config['check_video_stream'] = False
-            verification_config['check_video_errorlog'] = False
-        if not self.speaker:
-            if not cfm_helper.check_is_platform(self.client, 'buddy'):
-                action_config['mute_unmute_mic_test'] = 0
-                action_config['speaker_volume_test']  = 0
-            verification_config['check_audio_card'] = False
-            verification_config['check_cras_speaker'] = False
-            verification_config['check_cras_mic'] = False
-            verification_config['check_cras_pspeaker'] = False
-            verification_config['check_cras_pmic'] = False
-            verification_config['check_audio_stream'] = False
-            verification_config['check_audio_errorlog'] = False
-            verification_config['check_cras_speaker_vol'] = False
-            verification_config['check_cras_mic_mute'] = False
-        not_in_meeting_action = ['meeting_test', 'gpio_test', 'reboot_test']
-
-        if fixedmode:
-            for action, nof_times in action_config.iteritems():
-                if not action in not_in_meeting_action:
-                    self.action_config.extend(nof_times * [action])
-        else:
-            for action, nof_times in action_config.iteritems():
-                if not action in not_in_meeting_action and nof_times  > 0:
-                    dup_test = max(1, random.randrange(0, nof_times))
-                    for _ in range(dup_test):
-                        self.action_config.insert(max(1, random.randrange(-1,
-                             len(self.action_config))), action)
-
-        if action_config['meeting_test'] == 1:
-            self.action_config.append('meeting_test')
-
-        for action, nof_times in action_config.iteritems():
-            if action == 'meeting_test':
-                continue
-            if action in not_in_meeting_action and nof_times  > 0:
-                dup_test = max(1, random.randrange(0, nof_times))
-                for _ in range(dup_test):
-                    self.action_config.insert(max(0, random.randrange(-1,
-                         len(self.action_config))), action)
-
-        logging.info('Test list = %s', self.action_config)
-        self.verification_config = [v for v in verification_config.keys()
-                                    if verification_config[v]]
-        logging.info('Verification list = %s', self.verification_config)
-
-
-    def initialize_test(self, test_config, action_config, verification_config,
-                        error_key_words, test_flow_control):
-        """
-        Initialize the list of tests and verifications;
-        and polulate data needed for test:
-            USB devices: which can be retrieved from control file or
-            automatically detected by script;
-            Test loop, meeting mode, meeting code, test flow contro
-            variables.
-        """
-        self.gpio_pause = test_config['gpiopause']
-        self.reboot_timeout =  test_config['reboot_timeout']
-        self.vol_change_step = test_config['vol_change_step']
-        self.vol_change_mode = test_config['vol_change_mode']
-        self.gpio_list = test_config['gpio_list']
-        self.is_meeting = test_config['is_meeting']
-        self.meeting_code = test_config ['meeting_code']
-        self.reboot_min_meets = test_config['reboot_after_min_meets']
-        self.gpio_min_meets = test_config['gpio_after_min_meets']
-        self.run_meeting_test = action_config['meeting_test']
-        self.random = test_flow_control['random_mode']
-        self.recover_on_failure = test_flow_control['recovery_on_fatal_failure']
-        self.error_key_words = error_key_words
-        if test_config['puts']:
-            self.puts = test_config['puts'].split(',')
-        else:
-            self.puts = None
-
-        if verification_config['check_process_crash']:
-            cfm_helper.clear_core_file(self.client)
-
-        self.action_fun = {
-            'gpio_test': self.gpio_test,
-            'meeting_test': self.meeting_test,
-            'reboot_test': self.reboot_test,
-            'reset_usb_test': self.reset_usb_test,
-            'mute_unmute_camera_test': self.mute_unmute_camera_test,
-            'mute_unmute_mic_test': self.mute_unmute_mic_test,
-            'speaker_volume_test': self.speaker_volume_test,
-            'flap_monitor_test': self.flap_monitor_test
-            }
-        self.veri_fun = {
-            'check_usb_enumeration': self.check_usb_enumeration,
-            'check_usb_inf_init': self.check_usb_inf_init,
-            'check_v4l2_interface': self.check_v4l2_interface,
-            'check_audio_card': self.check_audio_card,
-            'check_cras_speaker': self.check_cras_speaker,
-            'check_cras_mic': self.check_cras_mic,
-            'check_cras_pspeaker': self.check_cras_pspeaker,
-            'check_cras_pmic': self.check_cras_pmic,
-            'check_cras_speaker_vol': self.check_cras_speaker_vol,
-            'check_cras_mic_mute': self.check_cras_mic_mute,
-            'check_prefer_camera': self.check_prefer_camera,
-            'check_camera_mute': self.check_camera_mute,
-            'check_audio_stream': self.check_audio_stream,
-            'check_video_stream': self.check_video_stream,
-            'check_hotrod_speaker': self.check_hotrod_speaker,
-            'check_hotrod_mic': self.check_hotrod_mic,
-            'check_hotrod_camera': self.check_hotrod_camera,
-            'check_hotrod_pspeaker': self.check_hotrod_pspeaker,
-            'check_hotrod_pmic': self.check_hotrod_pmic,
-            'check_hotrod_pcamera': self.check_hotrod_pcamera,
-            'check_hotrod_speaker_vol': self.check_hotrod_speaker_vol,
-            'check_hotrod_mic_state': self.check_hotrod_mic_state,
-            'check_hotrod_camera_state': self.check_hotrod_camera_state,
-            'check_usb_errorlog': self.check_usb_errorlog,
-            'check_kernel_errorlog': self.check_kernel_errorlog,
-            'check_video_errorlog': self.check_video_errorlog,
-            'check_audio_errorlog': self.check_audio_errorlog,
-            'check_chrome_errorlog': self.check_chrome_errorlog,
-            'check_atrus_errorlog': self.check_atrus_errorlog,
-            'check_usb_stability': self.check_usb_stability,
-            'check_process_crash': self.check_process_crash,
-            'check_kernel_panic': self.check_kernel_panic,
-            'check_cfm_rebooted':self.check_cfm_rebooted,
-            'check_chrome_restarted':self.check_chrome_restarted
-             }
-
-        self.usb_data = []
-        self.speaker = None
-        self.speaker_volume = None
-        self.camera = None
-        self.name_speaker = None
-        self.mimo_sis = None
-        self.mimo_display = None
-        self.is_in_meet = False
-        self.is_camera_muted = True
-        self.is_mic_muted = False
-        self.meets_last_reboot = 0
-        self.meets_last_gpio = 0
-        self.meet_no = 0
-        self.reboot_no = 0
-        self.gpio_no = 0
-        self.cdlines = CORE_DIR_LINES
-
-        usb_data = cfm_helper.retrieve_usb_devices(self.client)
-        if not usb_data:
-            logging.info('\n\nEnterprise_CFM_Test_Failed.')
-            raise error.TestFail('Fails to find any usb devices on CfM.')
-        peripherals = cfm_helper.extract_peripherals_for_cfm(usb_data)
-        if not peripherals:
-            logging.info('\n\nEnterprise_CFM_Test_Failed.')
-            raise error.TestFail('Fails to find any periphereal on CfM.')
-        if not self.puts:
-            self.puts = peripherals.keys()
-        else:
-            if [put for put in self.puts if not put in peripherals.keys()]:
-                logging.info('Fails to find target device %s', put)
-                logging.info('\nEnterprise_CFM_Test_Failed.')
-                raise error.TestFail('Fails to find device')
-        for _put in self.puts:
-            if _put in get_usb_devices.CAMERA_MAP.keys():
-                self.camera = _put
-            if _put in get_usb_devices.SPEAKER_MAP.keys():
-                self.speaker = _put
-        if self.camera:
-            self.name_camera = get_usb_devices.get_device_prod(self.camera)
-            logging.info('Camera under test: %s %s',
-                          self.camera, self.name_camera)
-        if self.speaker:
-            self.name_speaker = get_usb_devices.get_device_prod(self.speaker)
-            logging.info('Speaker under test: %s %s',
-                          self.speaker, self.name_speaker)
-        if not test_flow_control['skip_cfm_check']:
-            if cfm_helper.check_is_platform(self.client, 'guado'):
-                if not cfm_helper.check_peripherals_for_cfm(peripherals):
-                    logging.info('Sanity Check on CfM fails.')
-                    logging.info('\n\nEnterprise_CFM_Test_Failed.')
-                    raise error.TestFail('Sanity Check on CfM fails.')
-        self.ip_addr = cfm_helper.get_mgmt_ipv4(self.client)
-        logging.info('CfM %s passes sanity check, will start test.',
-                      self.ip_addr)
-
-        self.initialize_action_check_config(action_config,
-                                            verification_config, True)
-
-        if list(set(self.verification_config) & set(LOG_CHECK_LIST)):
-            self.log_checking_point = cfm_helper.find_last_log(self.client,
-                                      self.speaker)
-        self.chrome_file = cfm_helper.check_chrome_logfile(self.client)
-        self.reboot_log = cfm_helper.check_last_reboot(self.client)
-
-
-    def recovery_routine(self, failure_msg):
-        """
-        Telemetry api often returns API timeout, or javascript timeout due to
-        various reasons. To workout the problem before the fix is ready script
-        checks if errmsg from test failure or verification failure has keywords
-        defined in FAILURE_REASON['telemetry']. If yes, script checks whether
-        CfM is rebooted or Chrome browser is rebooted unexpectly. If no, script
-        restarts chrome. If yes no chrome restart is done to preserve valid
-        failure state.
-        @param failure_msg: failure message returned from test failure or
-                            verification failure
-        @return True if recovery is successfully done,
-                False, if recovery is not done, or fails.
-        """
-        loop_result = False
-        to_be_recovered = False
-        if not self.check_chrome_restarted():
-            self.chrome_file = cfm_helper.check_chrome_logfile(self.client)
-            return False
-        if not self.check_cfm_rebooted():
-            self.reboot_log = cfm_helper.check_last_reboot(self.client)
-            return False
-        for _err in FAILURE_REASON['telemetry']:
-            if _err in failure_msg:
-                to_be_recovered = True
-                break
-        if to_be_recovered:
-            logging.info('Restart Chrome to recover......')
-            result, emsg = self.restart_chrome_and_meeting(True)
-            if result:
-                loop_result = True
-                if list(set(self.verification_config) & set(LOG_CHECK_LIST)):
-                    self.log_checking_point = cfm_helper.find_last_log(
-                                              self.client,
-                                              self.speaker)
-        return loop_result
-
-
-    def process_test_result(self, loop_result, loop_no, test_no,
-                            failed_tests, failed_verifications,
-                            failed_tests_loop,
-                            failed_verifications_loop, test_flow_control,
-                            test_config, finished_tests_verifications,
-                            test_done):
-        """
-        Proceess test result data, and print out test report.
-        @params loop_result: True when all tests and verifications pass,
-                             False if any test or verification fails.
-        @param loop_no: sequence number of the loop.
-        @param test_no: sequence number of the test.
-        @param failed_tests: failed tests.
-        @param failed_verifications: failed verifications.
-        @param failed_tests_loop: failed tests in the loop.
-        @param failed_verifications_loop: failed verifications in the loop.
-        @param test_flow_control: variable of flow control defined in
-                                  control file
-        @param test_config: variable of test config defined in control file
-        @param finished_tests_verifications: data to keep track number of
-               tests and verifications performed.
-        @param test_done: True if all loops are done; False otherwose.
-        """
-        if 'reboot_test' in finished_tests_verifications.keys():
-            finished_tests_verifications['reboot_test'] = self.reboot_no
-        if not loop_result and not test_done:
-            logging.info('\n\nVerification_or_Test_Fail on %s for loop NO:'
-                         ' %d, Test: %d', self.ip_addr, loop_no, test_no)
-            if failed_tests_loop:
-                logging.info('----- Failed_Tests: %s', failed_tests_loop)
-            if failed_verifications_loop:
-                logging.info('----- Failed_Verifications: %s',
-                             failed_verifications_loop)
-        if test_flow_control['report']:
-            logging.info('\n\n\n----------------Summary---------------')
-            logging.info('---Loop %d, Test: %d, Meet: %d, Reboot: %d, Gpio: %s',
-                         loop_no, test_no, self.meet_no, self.reboot_no,
-                         self.gpio_no)
-            for testname, counter in failed_tests.iteritems():
-                logging.info('----Test: %s, Failed times: %d, Total Run: %d',
-                           testname, counter,
-                           finished_tests_verifications[testname])
-            for veriname, counter in failed_verifications.iteritems():
-                logging.info('----Verification: %s, Failed times: %d,'
-                             'Total Run: %d',
-                             veriname, counter,
-                             finished_tests_verifications[veriname])
-            if self.random:
-                time.sleep(random.randrange(0, test_config['loop_timeout']))
-            else:
-                 time.sleep(test_config['loop_timeout'])
-        if not test_done:
-            if list(set(self.verification_config) & set(LOG_CHECK_LIST)):
-                self.log_checking_point = cfm_helper.find_last_log(self.client,
-                                          self.speaker)
-
-    def run_once(self, host, run_test_only, test_config, action_config,
-                 verification_config,
-                 error_key_words, test_flow_control):
-        """Runs the test."""
-        logging.info('Start_Test_Script:Enterprise_CFM_Test')
-        self.client = host
-        if test_flow_control['reboot_before_start']:
-            try:
-                self.client.reboot()
-            except Exception as e:
-                errmsg = ('Reboot test fails for %s.'
-                          '% self.ip_addr')
-                logging.exception(errmsg)
-                raise error.TestFail(errmsg)
-            if action_config['meeting_test'] > 0:
-                result, errmsg = meet_helper.restart_chrome(self.cfm_facade,
-                                 test_config['is_meeting'], True)
-                if not result:
-                    logging.info('Restart chrome fails, msg = %s', errmsg)
-                    raise error.TestFail(errmsg)
-
-        self.initialize_test(test_config, action_config, verification_config,
-                              error_key_words, test_flow_control)
-        test_no = 0
-        failed_tests = {}
-        failed_verifications = {}
-        finished_tests_verifications = {}
-        test_failure_reason = []
-        verification_failure_reason = []
-
-
-        for loop_no in xrange(1, test_config['repeat'] + 1):
-            logging.info('=============%s:Test_Loop_No:%d=============',
-                         self.ip_addr, loop_no)
-            logging.info('Action list: %s', self.action_config)
-            failed_tests_loop = []
-            failed_verifications_loop = []
-            for test in self.action_config:
-                loop_result = True
-                if not test in finished_tests_verifications.keys():
-                    finished_tests_verifications[test] = 1
-                else:
-                    finished_tests_verifications[test] += 1
-                self.current_test = test
-                logging.info('\nStart_test:%s', test)
-                test_result, test_msg = self.action_fun[test]()
-                test_no += 1
-                if test_result:
-                    logging.info('Test_Result:%s:SUCCESS', test)
-                else:
-                    logging.info('Test_Result:%s:FAILURE:%s', test, test_msg)
-                    test_failure_reason.append(test_msg)
-                    failed_tests_loop.append(test)
-                    loop_result = False
-                    if not test in failed_tests.keys():
-                        failed_tests[test] = 1
-                    else:
-                        failed_tests[test] += 1
-                    logging.info('\n%s:Test_failure:%s:%s', self.ip_addr,
-                                 test, test_msg)
-                    if self.recover_on_failure or self.meets_last_reboot < 1:
-                        loop_result = self.recovery_routine(test_msg)
-                if self.random:
-                    time.sleep(random.randrange(test_config['min_timeout'],
-                                                test_config['action_timeout']))
-                else:
-                    time.sleep(test_config['min_timeout'])
-
-                for verification in self.verification_config:
-                    if not verification in finished_tests_verifications.keys():
-                        finished_tests_verifications[verification] = 1
-                    else:
-                        finished_tests_verifications[verification] += 1
-
-                    logging.info('\nStart_verification:%s', verification)
-                    veri_result, veri_msg = self.veri_fun[verification]()
-                    if veri_result:
-                        logging.info('Verification_Result:%s:SUCCESS',
-                                     verification)
-                    else:
-                        logging.info('Verification_Result:%s:FAILURE:%s',
-                                     verification, veri_msg)
-                        verification_failure_reason.append(veri_msg)
-                        failed_verifications_loop.append(verification)
-                        if not verification in failed_verifications.keys():
-                            failed_verifications[verification] = 1
-                        else:
-                            failed_verifications[verification] += 1
-                        logging.info('%s:Verification_fail:%s:%s',
-                                     self.ip_addr, verification, veri_msg)
-                        loop_result = False
-                        if self.recover_on_failure:
-                            loop_result = self.recovery_routine(veri_msg)
-
-                self.process_test_result(loop_result, loop_no, test_no,
-                                         failed_tests,
-                                         failed_verifications,
-                                         failed_tests_loop,
-                                         failed_verifications_loop,
-                                         test_flow_control,
-                                         test_config,
-                                         finished_tests_verifications, False)
-                if not loop_result:
-                    if test_flow_control['abort_on_failure']:
-                        logging.info('Enterprise_CFM_Test_Failed.')
-                        time.sleep(test_config['debug_timeout'])
-                        logging.info('Enterprise_CFM_Test_Finished.')
-                        raise error.TestFail(
-                            'Test_or_Verification_fails after {}.'.format(test))
-                    else:
-                        logging.info('Enterprise_CFM_Test_Failure_Detected.')
-
-            if self.random:
-                self.initialize_action_check_config(action_config,
-                                                    verification_config, False)
-
-        logging.info('Enterprise_CFM_Test_Finished.')
-        self.process_test_result(loop_result, loop_no, test_no,
-                                 failed_tests,
-                                 failed_verifications,
-                                 failed_tests_loop,
-                                 failed_verifications_loop,
-                                 test_flow_control,
-                                 test_config,
-                                 finished_tests_verifications, True)
-        if test_failure_reason:
-            logging.debug('Test failure reason %s', test_failure_reason)
-        if verification_failure_reason:
-            logging.debug('Verification failure reason %s',
-                         verification_failure_reason)
diff --git a/server/site_tests/enterprise_CFM_VolumeChange/control.internal_speaker b/server/site_tests/enterprise_CFM_VolumeChange/control.internal_speaker
deleted file mode 100644
index 22e3b6e..0000000
--- a/server/site_tests/enterprise_CFM_VolumeChange/control.internal_speaker
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "harpreet@chromium.org"
-NAME = "enterprise_CFM_VolumeChange.internal_speaker"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:hotrod-remora"
-DEPENDENCIES = "board:buddy, hangout_app"
-JOB_RETRIES = 3
-
-DOC = """
-This test clears the TPM and enables the appropriate usb port on the servo
-before kicking off a client side test that enrolls the device into CFM. Once
-the device in enrolled, a different client test is kicked off to change the
-hangouts volume using hotrod kiosk app JS hooks. These changes are then
-validated against the cras_test_client output to make sure the volume matches.
-"""
-
-args_dict = utils.args_to_dict(args)
-
-def run_test(machine):
-    host = hosts.create_host(machine)
-    repeat = int(args_dict.get('repeat', 10))
-    cmd = "cras_test_client --dump_server_info | awk '/Output Nodes:/," \
-          "/Input Devices:/' | grep -E 'INTERNAL_*' | awk -v N=3 '{print $N}'"
-
-    job.run_test('enterprise_CFM_VolumeChange', host=host, repeat=repeat,
-            cmd=cmd, tag="internal_speaker")
-
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_VolumeChange/control.usb_speaker b/server/site_tests/enterprise_CFM_VolumeChange/control.usb_speaker
deleted file mode 100644
index d34ab28..0000000
--- a/server/site_tests/enterprise_CFM_VolumeChange/control.usb_speaker
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "harpreet@chromium.org"
-NAME = "enterprise_CFM_VolumeChange.usb_speaker"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:hotrod-remora"
-DEPENDENCIES = "atrus"
-JOB_RETRIES = 3
-
-DOC = """
-This test clears the TPM and enables the appropriate usb port on the servo
-before kicking off a client side test that enrolls the device into CFM. Once
-the device in enrolled, a different client test is kicked off to change the
-hangouts volume using hotrod kiosk app JS hooks. These changes are then
-validated against the cras_test_client output to make sure the volume matches.
-"""
-
-args_dict = utils.args_to_dict(args)
-
-def run_test(machine):
-    host = hosts.create_host(machine)
-    repeat = int(args_dict.get('repeat', 10))
-    cmd = "cras_test_client --dump_server_info | awk '/Output Nodes:/," \
-          "/Input Devices:/' | grep -E 'USB' | awk -v N=3 '{print $N}'"
-
-    job.run_test('enterprise_CFM_VolumeChange', host=host, repeat=repeat,
-            cmd=cmd, tag="usb_speaker")
-
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_CFM_VolumeChange/enterprise_CFM_VolumeChange.py b/server/site_tests/enterprise_CFM_VolumeChange/enterprise_CFM_VolumeChange.py
deleted file mode 100644
index 07831cc..0000000
--- a/server/site_tests/enterprise_CFM_VolumeChange/enterprise_CFM_VolumeChange.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import random
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server.cros.cfm import cfm_base_test
-
-_SHORT_TIMEOUT = 2
-
-
-class enterprise_CFM_VolumeChange(cfm_base_test.CfmBaseTest):
-    """
-    Volume changes made in the CFM / hotrod app should be accurately reflected
-    in CrOS.
-    """
-    version = 1
-
-
-    def _change_volume(self, repeat, cmd):
-        """
-        Change volume using CFM api and cross check with cras_test_client
-        output.
-
-        @param repeat: Number of times the volume should be changed.
-        @param cmd: cras_test_client command to run.
-        @raises error.TestFail if cras volume does not match volume set by CFM.
-        """
-        # This is used to trigger crbug.com/614885
-        for volume in range(55, 85):
-            self.cfm_facade.set_speaker_volume(volume)
-            time.sleep(random.uniform(0.01, 0.05))
-
-        for _ in xrange(repeat):
-            # There is a minimal volume threshold so we can't start at 0%.
-            # See crbug.com/633809 for more info.
-            cfm_volume = random.randrange(2, 100, 1)
-            self.cfm_facade.set_speaker_volume(cfm_volume)
-            time.sleep(_SHORT_TIMEOUT)
-
-            # Get the volume report from cras_test_client
-            cras_volume = int(
-                self._host.run_output(cmd).splitlines()[0].strip())
-
-            if cras_volume != cfm_volume:
-                raise error.TestFail('Cras volume (%d) does not match '
-                                     'volume set by CFM (%d).' %
-                                     (cras_volume, cfm_volume))
-            else:
-                logging.info('Cras volume (%d) matches volume set by CFM (%d)',
-                             cras_volume, cfm_volume)
-
-    def run_once(self, repeat, cmd):
-        """Runs the test."""
-        self.cfm_facade.wait_for_telemetry_commands()
-        self.cfm_facade.start_meeting_session()
-        if self.cfm_facade.is_mic_muted():
-            self.cfm_facade.unmute_mic()
-        self._change_volume(repeat, cmd)
-        self.cfm_facade.end_meeting_session()
diff --git a/server/site_tests/enterprise_ClearTPM/control b/server/site_tests/enterprise_ClearTPM/control
deleted file mode 100644
index b8b2383..0000000
--- a/server/site_tests/enterprise_ClearTPM/control
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "kathrelkeld"
-NAME = "enterprise_ClearTPM"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-
-DOC = """
-Not meant to be run in the lab.  A convienient way to clear the TPM on
-a test image while running other tests, if needed.
-"""
-
-
-def run_test(machine):
-    host = hosts.create_host(machine)
-    job.run_test('enterprise_ClearTPM', host=host)
-
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_ClearTPM/enterprise_ClearTPM.py b/server/site_tests/enterprise_ClearTPM/enterprise_ClearTPM.py
deleted file mode 100644
index e4814ef..0000000
--- a/server/site_tests/enterprise_ClearTPM/enterprise_ClearTPM.py
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib.cros import tpm_utils
-from autotest_lib.server import test
-
-
-class enterprise_ClearTPM(test.test):
-    """A utility test that clears the TPM."""
-    version = 1
-
-    def run_once(self, host):
-        """Entry point of this test."""
-        tpm_utils.ClearTPMOwnerRequest(host)
diff --git a/server/site_tests/enterprise_KioskEnrollmentServer/control b/server/site_tests/enterprise_KioskEnrollmentServer/control
deleted file mode 100644
index 2c05a06..0000000
--- a/server/site_tests/enterprise_KioskEnrollmentServer/control
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "harpreet"
-NAME = "enterprise_KioskEnrollmentServer"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:enterprise"
-DEPENDENCIES = "chromesign"
-
-DOC = """
-This test clears the TPM if necessary, kicks off a client side test that enrolls
-a device in enterprise and clears the TPM again. Every time the TPM is cleared,
-the system is rebooted.
-"""
-
-
-def run_test(machine):
-    host = hosts.create_host(machine)
-    job.run_test('enterprise_KioskEnrollmentServer', host=host)
-
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_KioskEnrollmentServer/enterprise_KioskEnrollmentServer.py b/server/site_tests/enterprise_KioskEnrollmentServer/enterprise_KioskEnrollmentServer.py
deleted file mode 100644
index 02fcefb..0000000
--- a/server/site_tests/enterprise_KioskEnrollmentServer/enterprise_KioskEnrollmentServer.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib.cros import tpm_utils
-from autotest_lib.server import test, autotest
-
-
-class enterprise_KioskEnrollmentServer(test.test):
-    """A test that runs enterprise_KioskEnrollment and clears the TPM as
-    necessary."""
-    version = 1
-
-    def run_once(self, host=None, kiosk_app_attributes=None):
-        self.client = host
-
-        tpm_utils.ClearTPMOwnerRequest(self.client)
-        autotest.Autotest(self.client).run_test('enterprise_KioskEnrollment',
-                kiosk_app_attributes=kiosk_app_attributes,
-                check_client_result=True)
-        tpm_utils.ClearTPMOwnerRequest(self.client)
diff --git a/server/site_tests/enterprise_LongevityTrackerServer/control.chromesign_20min b/server/site_tests/enterprise_LongevityTrackerServer/control.chromesign_20min
deleted file mode 100644
index 86d8217..0000000
--- a/server/site_tests/enterprise_LongevityTrackerServer/control.chromesign_20min
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "krishnargv"
-NAME = "enterprise_LongevityTrackerServer.chromesign_20min"
-PURPOSE = "Tracks Chrome OS performance over a 20min period."
-TIME = "LONG"
-TEST_CATEGORY = "Performance"
-TEST_CLASS = "performance"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:longevity"
-DEPENDENCIES = "chromesign"
-JOB_RETRIES = 3
-
-DOC = """
-This test enrolls ChromeOS device into kiosk mode and captures device resource
-data including cpu and memory usage, and temperature data after the kiosk app
-auto launches. This test runs for 20 minutes, the perf metrics are captured every 60 seconds.
-"""
-
-
-def run_test(machine):
-    host = hosts.create_host(machine)
-    # Kiosk app attributes include app name, extension id and extension page
-    # See README.txt for perf_params format
-    perf_params = {
-        'perf_capture_iterations': 1,
-        'perf_capture_duration': 1200,
-        'sample_interval': 60,
-        'metric_interval': 300,
-        'test_type': 'single_sample',
-        'kiosk_app_attributes': ('chromesign:'
-                            'odjaaghiehpobimgdjjfofmablbaleem:'
-                            'viewer.html')
-    }
-
-    job.run_test('enterprise_LongevityTrackerServer',
-            host=host,
-            perf_params=perf_params,
-            tag='chromesign_20min')
-
-parallel_simple(run_test, machines)
\ No newline at end of file
diff --git a/server/site_tests/enterprise_LongevityTrackerServer/control.chromesign_3hr b/server/site_tests/enterprise_LongevityTrackerServer/control.chromesign_3hr
deleted file mode 100644
index 1fb0194..0000000
--- a/server/site_tests/enterprise_LongevityTrackerServer/control.chromesign_3hr
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "krishnargv"
-NAME = "enterprise_LongevityTrackerServer.chromesign_3hr"
-PURPOSE = "Tracks Chrome OS performance over a 3hour period."
-TIME = "LONG"
-TEST_CATEGORY = "Performance"
-TEST_CLASS = "performance"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:kiosk_longevity"
-JOB_RETRIES = 3
-
-DOC = """
-This test enrolls ChromeOS device into kiosk mode and captures device resource
-data including cpu and memory usage, and temperature data after the kiosk app
-auto launches. This test runs for 3 hours, the perf metrics are captured every 2 mins.
-"""
-
-
-def run_test(machine):
-    host = hosts.create_host(machine)
-    # Kiosk app attributes include app name, extension id and extension page
-    # See README.txt for perf_params format
-    perf_params = {
-        'perf_capture_iterations': 1,
-        'perf_capture_duration': 10800,
-        'sample_interval': 120,
-        'metric_interval': 1800,
-        'test_type': 'single_sample',
-        'kiosk_app_attributes': ('chromesign:'
-                            'odjaaghiehpobimgdjjfofmablbaleem:'
-                            'viewer.html')
-    }
-
-    job.run_test('enterprise_LongevityTrackerServer',
-            host=host,
-            perf_params=perf_params,
-            tag='chromesign_3hr')
-
-parallel_simple(run_test, machines)
\ No newline at end of file
diff --git a/server/site_tests/enterprise_LongevityTrackerServer/control.riseplayer b/server/site_tests/enterprise_LongevityTrackerServer/control.riseplayer
deleted file mode 100644
index eebc562..0000000
--- a/server/site_tests/enterprise_LongevityTrackerServer/control.riseplayer
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "harpreet"
-NAME = "enterprise_LongevityTrackerServer.riseplayer"
-PURPOSE = "Tracks Chrome OS performance over long periods."
-TIME = "LONG"
-TEST_CATEGORY = "Performance"
-TEST_CLASS = "performance"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:kiosk_longevity"
-
-DOC = """
-This test enrolls ChromeOS device into kiosk mode and captures device resource
-data including cpu and memory usage, and temperature data after the kiosk app
-auto launches.
-"""
-
-
-def run_test(machine):
-    host = hosts.create_host(machine)
-    # Kiosk app attributes include app name, extension id and extension page
-    # See README.txt for perf_params format
-    perf_params = {
-    	'perf_capture_iterations': 1,
-		'perf_capture_duration': 1200,
-		'sample_interval': 60,
-		'metric_interval': 300,
-		'test_type': 'single_sample',
-		'kiosk_app_attributes': ('riseplayer:'
-				'mfpgpdablffhbfofnhlpgmokokbahooi:'
-				'index.html')
-    }
-
-    job.run_test('enterprise_LongevityTrackerServer',
-    		host=host,
-    		perf_params=perf_params,
-            tag='riseplayer')
-
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_LongevityTrackerServer/control.stratosmedia b/server/site_tests/enterprise_LongevityTrackerServer/control.stratosmedia
deleted file mode 100644
index 1a874de..0000000
--- a/server/site_tests/enterprise_LongevityTrackerServer/control.stratosmedia
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "scunningham"
-NAME = "enterprise_LongevityTrackerServer.stratosmedia"
-PURPOSE = "Tracks Chrome OS performance over long periods."
-TIME = "LONG"
-TEST_CATEGORY = "Performance"
-TEST_CLASS = "performance"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:longevity"
-DEPENDENCIES = "stratosmedia"
-
-DOC = """
-This test enrolls ChromeOS device into kiosk mode and captures device resource
-data including cpu and memory usage, and temperature data after the kiosk app
-auto launches.
-"""
-
-
-def run_test(machine):
-    host = hosts.create_host(machine)
-    # Kiosk app attributes include app name, extension id and extension page
-    kiosk_app_attributes = \
-            'stratosmedia:alhlkpgheiefedomljbenmkpconkffhk:index.html'
-    job.run_test('enterprise_LongevityTrackerServer', host=host,
-            kiosk_app_attributes=kiosk_app_attributes, tag='stratosmedia')
-
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_LongevityTrackerServer/enterprise_LongevityTrackerServer.py b/server/site_tests/enterprise_LongevityTrackerServer/enterprise_LongevityTrackerServer.py
deleted file mode 100644
index 9e4cf06..0000000
--- a/server/site_tests/enterprise_LongevityTrackerServer/enterprise_LongevityTrackerServer.py
+++ /dev/null
@@ -1,376 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import csv
-import json
-import time
-import urllib
-import urllib2
-import logging
-import httplib
-
-import enterprise_longevity_helper
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import tpm_utils
-from autotest_lib.server import autotest
-from autotest_lib.server import test
-from autotest_lib.server.cros.multimedia import remote_facade_factory
-
-
-STABILIZATION_DURATION = 60
-MEASUREMENT_DURATION_SECONDS = 10
-TMP_DIRECTORY = '/tmp/'
-PERF_FILE_NAME_PREFIX = 'perf'
-VERSION_PATTERN = r'^(\d+)\.(\d+)\.(\d+)$'
-DASHBOARD_UPLOAD_URL = 'https://chromeperf.appspot.com/add_point'
-EXPECTED_PARAMS = ['perf_capture_iterations',  'perf_capture_duration',
-                   'sample_interval', 'metric_interval', 'test_type',
-                   'kiosk_app_attributes']
-
-
-class PerfUploadingError(Exception):
-    """Exception raised in perf_uploader."""
-    pass
-
-
-class enterprise_LongevityTrackerServer(test.test):
-    """
-    Run Longevity Test: Collect performance data over long duration.
-
-    Run enterprise_KioskEnrollment and clear the TPM as necessary. After
-    enterprise enrollment is successful, collect and log cpu, memory, and
-    temperature data from the device under test.
-
-    """
-    version = 1
-
-
-    def initialize(self):
-        self.temp_dir = os.path.split(self.tmpdir)[0]
-
-
-    #TODO(krishnargv@): Add a method to retrieve the version of the
-    #                   Kiosk app from its manifest.
-    def _initialize_test_variables(self):
-        """Initialize test variables that will be uploaded to the dashboard."""
-        self.board_name = self.system_facade.get_current_board()
-        self.chromeos_version = self.system_facade.get_chromeos_release_version()
-        epoch_minutes = str(int(time.time() / 60))
-        self.point_id = enterprise_longevity_helper.get_point_id(
-                self.chromeos_version, epoch_minutes, VERSION_PATTERN)
-        self.test_suite_name = self.tagged_testname
-        self.perf_capture_duration = self.perf_params['perf_capture_duration']
-        self.sample_interval = self.perf_params['sample_interval']
-        self.metric_interval = self.perf_params['metric_interval']
-        self.perf_results = {'cpu': '0', 'mem': '0', 'temp': '0'}
-
-
-    def elapsed_time(self, mark_time):
-        """
-        Get time elapsed since |mark_time|.
-
-        @param mark_time: point in time from which elapsed time is measured.
-
-        @returns time elapsed since the marked time.
-
-        """
-        return time.time() - mark_time
-
-
-    #TODO(krishnargv):  Replace _format_data_for_upload with a call to the
-    #                   _format_for_upload method of the perf_uploader.py
-    def _format_data_for_upload(self, chart_data):
-        """
-        Collect chart data into an uploadable data JSON object.
-
-        @param chart_data: performance results formatted as chart data.
-
-        """
-        perf_values = {
-            'format_version': '1.0',
-            'benchmark_name': self.test_suite_name,
-            'charts': chart_data,
-        }
-        #TODO(krishnargv): Add a method to capture the chrome_version.
-        dash_entry = {
-            'master': 'ChromeOS_Enterprise',
-            'bot': 'cros-%s' % self.board_name,
-            'point_id': self.point_id,
-            'versions': {
-                'cros_version': self.chromeos_version,
-
-            },
-            'supplemental': {
-                'default_rev': 'r_cros_version',
-                'kiosk_app_name': 'a_' + self.kiosk_app_name,
-
-            },
-            'chart_data': perf_values
-        }
-        return {'data': json.dumps(dash_entry)}
-
-
-    #TODO(krishnargv):  Replace _send_to_dashboard with a call to the
-    #                   _send_to_dashboard method of the perf_uploader.py
-    def _send_to_dashboard(self, data_obj):
-        """
-        Send formatted perf data to the perf dashboard.
-
-        @param data_obj: data object as returned by _format_data_for_upload().
-
-        @raises PerfUploadingError if an exception was raised when uploading.
-
-        """
-        logging.debug('Data_obj to be uploaded: %s', data_obj)
-        encoded = urllib.urlencode(data_obj)
-        req = urllib2.Request(DASHBOARD_UPLOAD_URL, encoded)
-        try:
-            urllib2.urlopen(req)
-        except urllib2.HTTPError as e:
-            raise PerfUploadingError('HTTPError: %d %s for JSON %s\n' %
-                                     (e.code, e.msg, data_obj['data']))
-        except urllib2.URLError as e:
-            raise PerfUploadingError('URLError: %s for JSON %s\n' %
-                                     (str(e.reason), data_obj['data']))
-        except httplib.HTTPException:
-            raise PerfUploadingError('HTTPException for JSON %s\n' %
-                                     data_obj['data'])
-
-
-    def _write_perf_keyvals(self, perf_results):
-        """
-        Write perf results to keyval file for AutoTest results.
-
-        @param perf_results: dict of attribute performance metrics.
-
-        """
-        perf_keyval = {}
-        perf_keyval['cpu_usage'] = perf_results['cpu']
-        perf_keyval['memory_usage'] = perf_results['mem']
-        perf_keyval['temperature'] = perf_results['temp']
-        self.write_perf_keyval(perf_keyval)
-
-
-    def _write_perf_results(self, perf_results):
-        """
-        Write perf results to results-chart.json file for Perf Dashboard.
-
-        @param perf_results: dict of attribute performance metrics.
-
-        """
-        cpu_metric = perf_results['cpu']
-        mem_metric = perf_results['mem']
-        ec_metric = perf_results['temp']
-        self.output_perf_value(description='cpu_usage', value=cpu_metric,
-                               units='percent', higher_is_better=False)
-        self.output_perf_value(description='mem_usage', value=mem_metric,
-                               units='percent', higher_is_better=False)
-        self.output_perf_value(description='max_temp', value=ec_metric,
-                               units='Celsius', higher_is_better=False)
-
-
-    def _record_perf_measurements(self, perf_values, perf_writer):
-        """
-        Record attribute performance measurements, and write to file.
-
-        @param perf_values: dict of attribute performance values.
-        @param perf_writer: file to write performance measurements.
-
-        """
-        # Get performance measurements.
-        cpu_usage = '%.3f' % enterprise_longevity_helper.get_cpu_usage(
-                self.system_facade, MEASUREMENT_DURATION_SECONDS)
-        mem_usage = '%.3f' % enterprise_longevity_helper.get_memory_usage(
-                    self.system_facade)
-        max_temp = '%.3f' % enterprise_longevity_helper.get_temperature_data(
-                self.client, self.system_facade)
-
-        # Append measurements to attribute lists in perf values dictionary.
-        perf_values['cpu'].append(float(cpu_usage))
-        perf_values['mem'].append(float(mem_usage))
-        perf_values['temp'].append(float(max_temp))
-
-        # Write performance measurements to perf timestamped file.
-        time_stamp = time.strftime('%Y/%m/%d %H:%M:%S')
-        perf_writer.writerow([time_stamp, cpu_usage, mem_usage, max_temp])
-        logging.info('Time: %s, CPU: %r, Mem: %r, Temp: %r',
-                     time_stamp, cpu_usage, mem_usage, max_temp)
-
-
-    def _setup_kiosk_app_on_dut(self, kiosk_app_attributes=None):
-        """Enroll the DUT and setup a Kiosk app."""
-        info = self.client.host_info_store.get()
-        app_config_id = info.get_label_value('app_config_id')
-        if app_config_id and app_config_id.startswith(':'):
-            app_config_id = app_config_id[1:]
-        if kiosk_app_attributes:
-            kiosk_app_attributes = kiosk_app_attributes.rstrip()
-            self.kiosk_app_name, ext_id = kiosk_app_attributes.split(':')[:2]
-
-        tpm_utils.ClearTPMOwnerRequest(self.client)
-        logging.info("Enrolling the DUT to Kiosk mode")
-        autotest.Autotest(self.client).run_test(
-                'enterprise_KioskEnrollment',
-                kiosk_app_attributes=kiosk_app_attributes,
-                check_client_result=True)
-
-        #if self.kiosk_app_name == 'riseplayer':
-        #    self.kiosk_facade.config_rise_player(ext_id, app_config_id)
-
-
-    def _run_perf_capture_cycle(self):
-        """
-        Track performance of Chrome OS over a long period of time.
-
-        This method collects performance measurements, and calculates metrics
-        to upload to the performance dashboard. It creates two files to
-        collect and store performance values and results: perf_<timestamp>.csv
-        and perf_aggregated.csv.
-
-        At the start, it creates a unique perf timestamped file in the test's
-        temp_dir. As the cycle runs, it saves a time-stamped performance
-        value after each sample interval. Periodically, it calculates
-        the 90th percentile performance metrics from these values.
-
-        The perf_<timestamp> files on the device will survive multiple runs
-        of the longevity_Tracker by the server-side test, and will also
-        survive multiple runs of the server-side test.
-
-        At the end, it opens the perf aggregated file in the test's temp_dir,
-        and appends the contents of the perf timestamped file. It then
-        copies the perf aggregated file to the results directory as perf.csv.
-        This perf.csv file will be consumed by the AutoTest backend when the
-        server-side test ends.
-
-        Note that the perf_aggregated.csv file will grow larger with each run
-        of longevity_Tracker on the device by the server-side test.
-
-        This method will capture perf metrics every SAMPLE_INTERVAL secs, at
-        each METRIC_INTERVAL the 90 percentile of the collected metrics is
-        calculated and saved. The perf capture runs for PERF_CAPTURE_DURATION
-        secs. At the end of the PERF_CAPTURE_DURATION time interval the median
-        value of all 90th percentile metrics is returned.
-
-        @returns list of median performance metrics.
-
-        """
-        test_start_time = time.time()
-
-        perf_values = {'cpu': [], 'mem': [], 'temp': []}
-        perf_metrics = {'cpu': [], 'mem': [], 'temp': []}
-
-         # Create perf_<timestamp> file and writer.
-        timestamp_fname = (PERF_FILE_NAME_PREFIX +
-                           time.strftime('_%Y-%m-%d_%H-%M') + '.csv')
-        timestamp_fpath = os.path.join(self.temp_dir, timestamp_fname)
-        timestamp_file = enterprise_longevity_helper.open_perf_file(
-                timestamp_fpath)
-        timestamp_writer = csv.writer(timestamp_file)
-
-        # Align time of loop start with the sample interval.
-        test_elapsed_time = self.elapsed_time(test_start_time)
-        time.sleep(enterprise_longevity_helper.syncup_time(
-                test_elapsed_time, self.sample_interval))
-        test_elapsed_time = self.elapsed_time(test_start_time)
-
-        metric_start_time = time.time()
-        metric_prev_time = metric_start_time
-
-        metric_elapsed_prev_time = self.elapsed_time(metric_prev_time)
-        offset = enterprise_longevity_helper.modulo_time(
-                metric_elapsed_prev_time, self.metric_interval)
-        metric_timer = metric_elapsed_prev_time + offset
-
-        while self.elapsed_time(test_start_time) <= self.perf_capture_duration:
-            self._record_perf_measurements(perf_values, timestamp_writer)
-
-            # Periodically calculate and record 90th percentile metrics.
-            metric_elapsed_prev_time = self.elapsed_time(metric_prev_time)
-            metric_timer = metric_elapsed_prev_time + offset
-            if metric_timer >= self.metric_interval:
-                enterprise_longevity_helper.record_90th_metrics(
-                        perf_values, perf_metrics)
-                perf_values = {'cpu': [], 'mem': [], 'temp': []}
-
-            # Set previous time to current time.
-                metric_prev_time = time.time()
-                metric_elapsed_prev_time = self.elapsed_time(metric_prev_time)
-
-                metric_elapsed_time = self.elapsed_time(metric_start_time)
-                offset = enterprise_longevity_helper.modulo_time(
-                    metric_elapsed_time, self.metric_interval)
-
-                # Set the timer to time elapsed plus offset to next interval.
-                metric_timer = metric_elapsed_prev_time + offset
-
-            # Sync the loop time to the sample interval.
-            test_elapsed_time = self.elapsed_time(test_start_time)
-            time.sleep(enterprise_longevity_helper.syncup_time(
-                    test_elapsed_time, self.sample_interval))
-
-        # Close perf timestamp file.
-        timestamp_file.close()
-
-         # Open perf timestamp file to read, and aggregated file to append.
-        timestamp_file = open(timestamp_fpath, 'r')
-        aggregated_fname = (PERF_FILE_NAME_PREFIX + '_aggregated.csv')
-        aggregated_fpath = os.path.join(self.temp_dir, aggregated_fname)
-        aggregated_file = enterprise_longevity_helper.open_perf_file(
-                aggregated_fpath)
-
-         # Append contents of perf timestamp file to perf aggregated file.
-        enterprise_longevity_helper.append_to_aggregated_file(
-                timestamp_file, aggregated_file)
-        timestamp_file.close()
-        aggregated_file.close()
-
-        # Copy perf aggregated file to test results directory.
-        enterprise_longevity_helper.copy_aggregated_to_resultsdir(
-                self.resultsdir, aggregated_fpath, 'perf.csv')
-
-        # Return median of each attribute performance metric.
-        logging.info("Perf_metrics: %r ", perf_metrics)
-        return enterprise_longevity_helper.get_median_metrics(perf_metrics)
-
-
-    def run_once(self, host=None, perf_params=None):
-        self.client = host
-        self.kiosk_app_name = None
-        self.perf_params = perf_params
-        logging.info('Perf params: %r', self.perf_params)
-
-        if not enterprise_longevity_helper.verify_perf_params(
-                EXPECTED_PARAMS, self.perf_params):
-            raise error.TestFail('Missing or incorrect perf_params in the'
-                                 ' control file. Refer to the README.txt for'
-                                 ' info on perf params.: %r'
-                                  %(self.perf_params))
-
-        factory = remote_facade_factory.RemoteFacadeFactory(
-                host, no_chrome=True)
-        self.system_facade = factory.create_system_facade()
-        self.kiosk_facade = factory.create_kiosk_facade()
-
-        self._setup_kiosk_app_on_dut(self.perf_params['kiosk_app_attributes'])
-        time.sleep(STABILIZATION_DURATION)
-
-        self._initialize_test_variables()
-        for iteration in range(self.perf_params['perf_capture_iterations']):
-            #TODO(krishnargv@): Add a method to verify that the Kiosk app is
-            #                   active and is running on the DUT.
-            logging.info("Running perf_capture Iteration: %d", iteration+1)
-            self.perf_results = self._run_perf_capture_cycle()
-            self._write_perf_keyvals(self.perf_results)
-            self._write_perf_results(self.perf_results)
-
-            # Post perf results directly to performance dashboard. You may view
-            # uploaded data at https://chromeperf.appspot.com/new_points,
-            # with test path pattern=ChromeOS_Enterprise/cros-*/longevity*/*
-            if perf_params['test_type'] == 'multiple_samples':
-                chart_data = enterprise_longevity_helper.read_perf_results(
-                        self.resultsdir, 'results-chart.json')
-                data_obj = self._format_data_for_upload(chart_data)
-                self._send_to_dashboard(data_obj)
-        tpm_utils.ClearTPMOwnerRequest(self.client)
diff --git a/server/site_tests/enterprise_LongevityTrackerServer/enterprise_longevity_helper.py b/server/site_tests/enterprise_LongevityTrackerServer/enterprise_longevity_helper.py
deleted file mode 100644
index c5341f2..0000000
--- a/server/site_tests/enterprise_LongevityTrackerServer/enterprise_longevity_helper.py
+++ /dev/null
@@ -1,270 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import re
-import json
-import time
-import shutil
-import logging
-
-from autotest_lib.client.common_lib.cros import perf_stat_lib
-
-
-def get_cpu_usage(system_facade, measurement_duration_seconds):
-    """
-    Returns cpu usage in %.
-
-    @param system_facade: A SystemFacadeRemoteAdapter to access
-                          the CPU capture functionality from the DUT.
-    @param measurement_duration_seconds: CPU metric capture duration.
-
-    @returns current CPU usage percentage.
-
-    """
-    cpu_usage_start = system_facade.get_cpu_usage()
-    time.sleep(measurement_duration_seconds)
-    cpu_usage_end = system_facade.get_cpu_usage()
-    return system_facade.compute_active_cpu_time(
-            cpu_usage_start, cpu_usage_end) * 100
-
-
-def get_memory_usage(system_facade):
-    """
-    Returns total used memory in %.
-
-    @param system_facade: A SystemFacadeRemoteAdapter to access
-                          the memory capture functionality from the DUT.
-
-    @returns current memory used.
-
-    """
-    total_memory = system_facade.get_mem_total()
-    return ((total_memory - system_facade.get_mem_free())
-            * 100 / total_memory)
-
-
-def get_temperature_data(client, system_facade):
-    """
-    Returns temperature sensor data in Celcius.
-
-    @param system_facade: A SystemFacadeRemoteAdapter to access the temperature
-                          capture functionality from the DUT.
-
-    @returns current CPU temperature.
-
-    """
-    ectool = client.run('ectool version', ignore_status=True)
-    if not ectool.exit_status:
-        ec_temp = system_facade.get_ec_temperatures()
-        return ec_temp[1]
-    else:
-        temp_sensor_name = 'temp0'
-        if not temp_sensor_name:
-            return 0
-        MOSYS_OUTPUT_RE = re.compile('(\w+)="(.*?)"')
-        values = {}
-        cmd = 'mosys -k sensor print thermal %s' % temp_sensor_name
-        for kv in MOSYS_OUTPUT_RE.finditer(client.run_output(cmd)):
-            key, value = kv.groups()
-            if key == 'reading':
-                value = int(value)
-            values[key] = value
-        return values['reading']
-
-
-#TODO(krishnargv): Replace _get_point_id with a call to the
-#                  _get_id_from_version method of the perf_uploader.py.
-def get_point_id(cros_version, epoch_minutes, version_pattern):
-    """
-    Compute point ID from ChromeOS version number and epoch minutes.
-
-    @param cros_version: String of ChromeOS version number.
-    @param epoch_minutes: String of minutes since 1970.
-
-    @returns unique integer ID computed from given version and epoch.
-
-    """
-    # Number of digits from each part of the Chrome OS version string.
-    cros_version_col_widths = [0, 4, 3, 2]
-
-    def get_digits(version_num, column_widths):
-        if re.match(version_pattern, version_num):
-            computed_string = ''
-            version_parts = version_num.split('.')
-            for i, version_part in enumerate(version_parts):
-                if column_widths[i]:
-                    computed_string += version_part.zfill(column_widths[i])
-            return computed_string
-        else:
-            return None
-
-    cros_digits = get_digits(cros_version, cros_version_col_widths)
-    epoch_digits = epoch_minutes[-8:]
-    if not cros_digits:
-        return None
-    return int(epoch_digits + cros_digits)
-
-
-def open_perf_file(file_path):
-    """
-    Open a perf file. Write header line if new. Return file object.
-
-    If the file on |file_path| already exists, then open file for
-    appending only. Otherwise open for writing only.
-
-    @param file_path: file path for perf file.
-
-    @returns file object for the perf file.
-
-    """
-    if os.path.isfile(file_path):
-        perf_file = open(file_path, 'a+')
-    else:
-        perf_file = open(file_path, 'w')
-        perf_file.write('Time,CPU,Memory,Temperature (C)\r\n')
-    return perf_file
-
-
-def modulo_time(timer, interval):
-    """
-    Get time eplased on |timer| for the |interval| modulus.
-
-    Value returned is used to adjust the timer so that it is
-    synchronized with the current interval.
-
-    @param timer: time on timer, in seconds.
-    @param interval: period of time in seconds.
-
-    @returns time elapsed from the start of the current interval.
-
-    """
-    return timer % int(interval)
-
-
-def syncup_time(timer, interval):
-    """
-    Get time remaining on |timer| for the |interval| modulus.
-
-    Value returned is used to induce sleep just long enough to put the
-    process back in sync with the timer.
-
-    @param timer: time on timer, in seconds.
-    @param interval: period of time in seconds.
-
-    @returns time remaining till the end of the current interval.
-
-    """
-    return interval - (timer % int(interval))
-
-
-def append_to_aggregated_file(ts_file, ag_file):
-    """
-    Append contents of perf timestamp file to perf aggregated file.
-
-    @param ts_file: file handle for performance timestamped file.
-    @param ag_file: file handle for performance aggregated file.
-
-    """
-    next(ts_file)  # Skip fist line (the header) of timestamped file.
-    for line in ts_file:
-        ag_file.write(line)
-
-
-def copy_aggregated_to_resultsdir(resultsdir, aggregated_fpath, f_name):
-    """Copy perf aggregated file to results dir for AutoTest results.
-
-    Note: The AutoTest results default directory is located at /usr/local/
-    autotest/results/default/longevity_Tracker/results
-
-    @param resultsdir: Directory name where the perf results are stored.
-    @param aggregated_fpath: file path to Aggregated performance values.
-    @param f_name: Name of the perf File
-    """
-    results_fpath = os.path.join(resultsdir, f_name)
-    shutil.copy(aggregated_fpath, results_fpath)
-    logging.info('Copied %s to %s)', aggregated_fpath, results_fpath)
-
-
-def record_90th_metrics(perf_values, perf_metrics):
-    """Record 90th percentile metric of attribute performance values.
-
-    @param perf_values: dict attribute performance values.
-    @param perf_metrics: dict attribute 90%-ile performance metrics.
-    """
-    # Calculate 90th percentile for each attribute.
-    cpu_values = perf_values['cpu']
-    mem_values = perf_values['mem']
-    temp_values = perf_values['temp']
-    cpu_metric = perf_stat_lib.get_kth_percentile(cpu_values, .90)
-    mem_metric = perf_stat_lib.get_kth_percentile(mem_values, .90)
-    temp_metric = perf_stat_lib.get_kth_percentile(temp_values, .90)
-
-    logging.info('Performance values: %s', perf_values)
-    logging.info('90th percentile: cpu: %s, mem: %s, temp: %s',
-                 cpu_metric, mem_metric, temp_metric)
-
-    # Append 90th percentile to each attribute performance metric.
-    perf_metrics['cpu'].append(cpu_metric)
-    perf_metrics['mem'].append(mem_metric)
-    perf_metrics['temp'].append(temp_metric)
-
-
-def get_median_metrics(metrics):
-    """
-    Returns median of each attribute performance metric.
-
-    If no metric values were recorded, return 0 for each metric.
-
-    @param metrics: dict of attribute performance metric lists.
-
-    @returns dict of attribute performance metric medians.
-
-    """
-    if len(metrics['cpu']):
-        cpu_metric = perf_stat_lib.get_median(metrics['cpu'])
-        mem_metric = perf_stat_lib.get_median(metrics['mem'])
-        temp_metric = perf_stat_lib.get_median(metrics['temp'])
-    else:
-        cpu_metric = 0
-        mem_metric = 0
-        temp_metric = 0
-    logging.info('Median of 90th percentile: cpu: %s, mem: %s, temp: %s',
-                 cpu_metric, mem_metric, temp_metric)
-    return {'cpu': cpu_metric, 'mem': mem_metric, 'temp': temp_metric}
-
-
-def read_perf_results(resultsdir, resultsfile):
-    """
-    Read perf results from results-chart.json file for Perf Dashboard.
-
-    @returns dict of perf results, formatted as JSON chart data.
-
-    """
-    results_file = os.path.join(resultsdir, resultsfile)
-    with open(results_file, 'r') as fp:
-        contents = fp.read()
-        chart_data = json.loads(contents)
-    # TODO(krishnargv): refactor this with a better method to delete.
-    open(results_file, 'w').close()
-    return chart_data
-
-
-def verify_perf_params(expected_params, perf_params):
-    """
-    Verify that all the expected paramaters were passed to the test.
-
-    Return True if the perf_params dict passed via the control file
-    has all of the the expected parameters and have valid values.
-
-    @param expected_params: list of expected parameters
-    @param perf_params: dict of the paramaters passed via control file.
-
-    @returns True if the perf_params dict is valid, else returns False.
-
-    """
-    for param in expected_params:
-        if param not in perf_params or not perf_params[param]:
-            return False
-    return True
\ No newline at end of file
diff --git a/server/site_tests/enterprise_OnlineDemoMode/control b/server/site_tests/enterprise_OnlineDemoMode/control
deleted file mode 100644
index f7936b4..0000000
--- a/server/site_tests/enterprise_OnlineDemoMode/control
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "wzang"
-NAME = "enterprise_OnlineDemoMode"
-# stop test from running until it can be fixd: http://go/crb/1104747
-# ATTRIBUTES = "suite:bvt-perbuild"
-TIME = "SHORT"
-TEST_CATEGORY = "Enterprise"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-JOB_RETRIES = 2
-
-DOC = """
-This test enrolls a Chrome OS device to online demo mode.
-"""
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('enterprise_OnlineDemoMode', host=host)
-
-job.parallel_simple(run, machines)
diff --git a/server/site_tests/enterprise_OnlineDemoMode/enterprise_OnlineDemoMode.py b/server/site_tests/enterprise_OnlineDemoMode/enterprise_OnlineDemoMode.py
deleted file mode 100644
index 7143f04..0000000
--- a/server/site_tests/enterprise_OnlineDemoMode/enterprise_OnlineDemoMode.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib.cros import tpm_utils
-from autotest_lib.server import autotest
-from autotest_lib.server import test
-
-_CLIENT_TEST = 'enterprise_OnlineDemoModeEnrollment'
-
-class enterprise_OnlineDemoMode(test.test):
-    """Enrolls to online demo mode."""
-    version = 1
-
-
-    def run_once(self, host):
-        """Runs the client test and clears TPM owner on DUT after it's done."""
-
-        client_at = autotest.Autotest(host)
-        client_at.run_test(_CLIENT_TEST)
-        client_at._check_client_test_result(host, _CLIENT_TEST)
-
-        tpm_utils.ClearTPMOwnerRequest(host)
\ No newline at end of file
diff --git a/server/site_tests/enterprise_RemoraRequisitionDisplayUsageServer/control b/server/site_tests/enterprise_RemoraRequisitionDisplayUsageServer/control
deleted file mode 100644
index 5f880ea..0000000
--- a/server/site_tests/enterprise_RemoraRequisitionDisplayUsageServer/control
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "felixe"
-NAME = "enterprise_RemoraRequisitionDisplayUsageServer"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-ATTRIBUTES = 'suite:hotrod-remora'
-DEPENDENCIES='mimo'
-JOB_RETRIES = 3
-
-DOC = """
-This test clears the TPM if necessary, kicks off a client side test that verify
-that the enrollment screen is shown on the Mimo display. Every time the TPM is
-cleared, the system is rebooted.
-"""
-
-
-def run_test(machine):
-    host = hosts.create_host(machine)
-    job.run_test('enterprise_RemoraRequisitionDisplayUsageServer', host=host)
-
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_RemoraRequisitionDisplayUsageServer/enterprise_RemoraRequisitionDisplayUsageServer.py b/server/site_tests/enterprise_RemoraRequisitionDisplayUsageServer/enterprise_RemoraRequisitionDisplayUsageServer.py
deleted file mode 100644
index ff3a4fa..0000000
--- a/server/site_tests/enterprise_RemoraRequisitionDisplayUsageServer/enterprise_RemoraRequisitionDisplayUsageServer.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib.cros import tpm_utils
-from autotest_lib.server import test, autotest
-
-
-class enterprise_RemoraRequisitionDisplayUsageServer(test.test):
-    """A test that runs enterprise_RemoraRequisitionDisplayUsage and clears
-    the TPM as necessary."""
-    version = 1
-
-    def run_once(self, host=None):
-        self.client = host
-
-        tpm_utils.ClearTPMOwnerRequest(self.client)
-        autotest.Autotest(self.client).run_test(
-            'enterprise_RemoraRequisitionDisplayUsage',
-             check_client_result=True)
-        tpm_utils.ClearTPMOwnerRequest(self.client)
diff --git a/server/site_tests/enterprise_RemoraRequisitionServer/control b/server/site_tests/enterprise_RemoraRequisitionServer/control
deleted file mode 100644
index 41caf11..0000000
--- a/server/site_tests/enterprise_RemoraRequisitionServer/control
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "achuith, zelidrag"
-NAME = "enterprise_RemoraRequisitionServer"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-# stop test from running until it can be fixd: https://crbug.com/1116132
-# ATTRIBUTES = "suite:hotrod-remora"
-JOB_RETRIES = 3
-
-DOC = """
-This test clears the TPM if necessary, kicks off a client side test that enrolls
-a device as a remora device and clears the TPM again. Every time the TPM is
-cleared, the system is rebooted.
-"""
-
-
-def run_test(machine):
-    host = hosts.create_host(machine)
-    job.run_test('enterprise_RemoraRequisitionServer', host=host)
-
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/enterprise_RemoraRequisitionServer/enterprise_RemoraRequisitionServer.py b/server/site_tests/enterprise_RemoraRequisitionServer/enterprise_RemoraRequisitionServer.py
deleted file mode 100644
index 10316cb..0000000
--- a/server/site_tests/enterprise_RemoraRequisitionServer/enterprise_RemoraRequisitionServer.py
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2009 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib.cros import tpm_utils
-from autotest_lib.server import test, autotest
-
-
-class enterprise_RemoraRequisitionServer(test.test):
-    """A test that runs enterprise_RemoraRequisition and clears the TPM as
-    necessary."""
-    version = 1
-
-    def run_once(self, host=None):
-        self.client = host
-
-        tpm_utils.ClearTPMOwnerRequest(self.client)
-        autotest.Autotest(self.client).run_test('enterprise_RemoraRequisition',
-                                                check_client_result=True)
-        tpm_utils.ClearTPMOwnerRequest(self.client)
diff --git a/server/site_tests/example_UnitTestServer/control b/server/site_tests/example_UnitTestServer/control
deleted file mode 100644
index 153742c..0000000
--- a/server/site_tests/example_UnitTestServer/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2009 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "example_UnitTestServer"
-TIME = "SHORT"
-TEST_CATEGORY = "Benchmark"
-TEST_CLASS = "example"
-TEST_TYPE = "server"
-
-DOC = """
-This test executes an example unit test and then scrapes the code coverage
-information for processing on the server
-"""
-
-def run_unit_test(machine):
-    host = hosts.create_host(machine)
-    job.run_test("example_UnitTestServer", host=host)
-
-parallel_simple(run_unit_test, machines)
diff --git a/server/site_tests/example_UnitTestServer/example_UnitTestServer.py b/server/site_tests/example_UnitTestServer/example_UnitTestServer.py
deleted file mode 100644
index 6de7810..0000000
--- a/server/site_tests/example_UnitTestServer/example_UnitTestServer.py
+++ /dev/null
@@ -1,10 +0,0 @@
-# Copyright (c) 2009 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server.bin import unit_test_server
-
-class example_UnitTestServer(unit_test_server.unit_test_server):
-    version = 1
-    client_test = 'example_UnitTest'
-    test_files = ['main.cc']
diff --git a/server/site_tests/factory_Basic/control b/server/site_tests/factory_Basic/control
deleted file mode 100644
index f48ad8a..0000000
--- a/server/site_tests/factory_Basic/control
+++ /dev/null
@@ -1,41 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-
-NAME = "factory_Basic"
-AUTHOR = "beeps@google.com, chromeos-test@google.com"
-ATTRIBUTES = "suite:factory"
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_TYPE = "server"
-
-DOC = """Basic factory tests."""
-
-# Set these variables to the path of a test list reachable within the chroot
-# and the name of the test list respectively. If the test list is already on
-# the DUT you don't need a test_list_path.Eg of a local test_list_path:/home/\
-# <username>/trunk/src/third_party/autotest/files/server/site_tests/\
-# factory_Basic/test_list.suspend_basic, for which the test_list_name
-# is suspend_basic.
-args_dict = utils.args_to_dict(args)
-test_list_path = args_dict.get('test_list_path', '')
-test_list_name = args_dict.get('test_list_name')
-
-def run(machine):
-    if test_list_name:
-        host = hosts.create_host(machine)
-        job.run_test("factory_Basic", host=host, disable_sysinfo=True,
-                     test_list_name=test_list_name,
-                     test_list_path=test_list_path)
-    else:
-        raise error.AutoservError('Please specify --args="test_list_path=<path '
-                        'to test list> test_list_name=<name of test list>" '
-                        'when invoking test_that. If your test_list is already '
-                        'on the DUT, just the test_list_name will suffice.')
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/factory_Basic/factory_Basic.py b/server/site_tests/factory_Basic/factory_Basic.py
deleted file mode 100644
index 1203973..0000000
--- a/server/site_tests/factory_Basic/factory_Basic.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-
-from autotest_lib.server import test
-from autotest_lib.server.cros import goofy_client
-
-
-class factory_Basic(test.test):
-    """Basic factory wrapper."""
-    version = 1
-    REMOTE_TEST_LIST_DIR = '/usr/local/factory/test_lists'
-
-    def initialize(self, host, test_list_path, test_list_name):
-        """Initialize a goofy proxy and copy over the test lists.
-
-        @param host: The host to run this test on.
-        @param test_list_path: The local path of the test_list to copy
-                               over to the DUT.
-        @param test_list_name: The name of the test list.
-        """
-        self._goofy_client = goofy_client.GoofyProxy(host)
-        if test_list_path:
-            host.send_file(test_list_path,
-                           os.path.join(self.REMOTE_TEST_LIST_DIR,
-                                        'test_list.%s' % test_list_name))
-
-            # For goofy to load any new tests lists we need a factory restart.
-            host.run('factory_restart -a')
-
-
-    def run_once(self, host, test_list_name):
-        """Wait on all the tests in a test_list to finish.
-
-        @param test_list_name: The name of the tests list to wait on.
-        """
-        self._goofy_client.monitor_tests(test_list_name)
-        self._goofy_client.get_results(self.resultsdir)
diff --git a/server/site_tests/factory_Basic/test_list.fwteam_small b/server/site_tests/factory_Basic/test_list.fwteam_small
deleted file mode 100644
index 28f37d3..0000000
--- a/server/site_tests/factory_Basic/test_list.fwteam_small
+++ /dev/null
@@ -1,437 +0,0 @@
-# -*- mode: python; coding: utf-8 -*-
-#
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# DOCUMENTATION:
-#
-# Define below an ordered list if tests for execution on factory
-# assembly lines.  This list can be customized for specific factory
-# needs, within the limits set by the below comments -- specifically,
-# ODMs should not remove any Google required tests.
-#
-# This file is loaded via a call to execfile() in the primary factory
-# autotest control file.
-
-# This particular test list is meant to represent the RunIn portion of the
-# normal factory test_list for internal testing purposes.
-
-TEST_LIST_NAME = 'Firmware Team Stress Tests'
-
-# -- CUSTOMIZABLE SETTINGS -------------------------------------------
-# The number of suspend/resume tests you want during run-in.
-_RUNIN_RESUME_ITERATIONS = 1
-
-# The duration of stress test + repeated suspend/resume during run-in.
-# This may detect bit flips between suspend/resume.
-# Running for 5 hours to start.
-_RUNIN_DOZING_SAT_DURATION_SECS = int(1 * 60 * 60)
-
-# The number of reboots you want during run-in.
-_RUNIN_REBOOT_SEQ_ITERATIONS = 1
-
-# The duration of stress test during run-in (suggested 10+ mins).
-# Running for 24 hours to start.
-_RUNIN_SAT_DURATION_SECS = 1 * 60 * 60
-
-# The interval of logging events in seconds during run-in.
-_RUNIN_LOG_INTERVAL_SECS = 10
-
-# Enable requirement for RunIn finish test.
-_REQUIRE_RUN_FOR_RUNIN_FINISH = False
-
-# Enable options that apply only in a real factory environment.
-_FACTORY_ENVIRONMENT = False
-
-# Enable options that only apply in a full factory image (i.e. an image that
-# contains a finalize-able image).
-_FULL_FACTORY_IMAGE = False
-
-# We use the defaults value for global options unless
-# _FACTORY_ENVIRONMENT is specified, but if desired, you may also
-# specify options outside the if block.
-if _FACTORY_ENVIRONMENT:
-  # echo -n 'passwordgoeshere' | sha1sum
-  # - Use operator mode by default and require a password to enable
-  #   engineering mode
-  options.engineering_password_sha1 = '6848bf3617a2cf82bd5260a146d4a3236a20badc'
-  # - Default to Chinese language
-  options.ui_lang = 'zh'
-  options.wlans = [WLAN(ssid='selshopfloor',
-                        security='psk', passphrase='ganondorf')]
-
-  # Choose another access point as a contingency plan in case the main
-  # access point gets overloaded.
-  try:
-    _AP_COUNT = 2
-    import hashlib
-    mac_address = open('/sys/class/net/mlan0/address').read().strip()
-    # Choose based on a hash of the MAC address.  (Don't use the MAC
-    # address directly since it may have certain bit patterns.)
-    ap_number = int(hashlib.md5(mac_address).hexdigest(), 16) % _AP_COUNT
-    options.wlans.append(WLAN(ssid=('selshopfloor%d' % (ap_number + 1)),
-                              security='psk', passphrase='ganondorf'))
-    import logging
-    logging.info('Set wlans to %s', ', '.join([x.ssid for x in options.wlans]))
-  except:
-    # This shouldn't happen, but let's not prevent Goofy from starting up.
-    import logging
-    logging.exception('Unable to choose random WLAN access point')
-
-  # - Enable background event log syncing
-  options.sync_event_log_period_secs = 300
-  options.update_period_secs = 300
-  # - Enable clock syncing with shopfloor server
-  options.sync_time_period_secs = None
-  options.shopfloor_server_url = 'http://10.3.0.11:8082/'
-  # - Disable ChromeOS keys.
-  options.disable_cros_shortcut_keys = True
-
-  #TODO (cychiang) open these options after cherry-picking the changes from
-  # factory branch.
-  #options.sync_log_period_secs = None
-  options.core_dump_watchlist = ['*glmark2*']
-  #options.log_disk_space_period_secs = 120
-  #options.stateful_usage_threshold = 90
-
-options.min_charge_pct = 87
-options.max_charge_pct = 88
-
-#
-# For details on available options, see the Options class in
-# py/test/factory.py.
-
-# Change this to enable using shop floor system.
-_ENABLE_SHOP_FLOOR = False
-
-# Change this to match your local report upload site.
-# Default is to use shopfloor if available, otherwise discard reports.
-# WARNING: 'none' is ONLY ALLOWED FOR DEBUGGING.
-_REPORT_UPLOAD_METHOD = 'shopfloor' if _ENABLE_SHOP_FLOOR else 'none'
-#
-# * For LAN environments, setup a FTP server and build FTP URL.
-#   Example: ftp://user:pass@host:port/directory/
-#
-#_REPORT_UPLOAD_METHOD = 'ftp://LOCAL_FTP_HOST/'
-#
-# * If you have access to public internet, use Google CPFE URL.
-#   Example: ("cpfe:https://www.google.com/chromeos/partner/fe/report_upload?"
-#             "device_name=mario&report_type=rma")
-#
-#_REPORT_UPLOAD_METHOD = (
-#  "cpfe:https://www.google.com/chromeos/partner/fe/report_upload?"
-#  "device_name=PLATFORM_NAME&report_type=REPORT_TYPE")
-#
-# REPORT UPLOADING IS REQUIRED TO BE SET AT BUILD TIME.
-# REPORTS MUST BE PROVIDED TO GOOGLE, MACHINES WITHOUT LOGS CANNOT BE SUPPORTED.
-# -- END OF CUSTOMIZABLE SETTINGS ------------------------------------
-
-def SyncShopFloor(id_suffix=None):
-  """Creates a step to sync with the shopfloor server.
-
-  If _FACTORY_ENVIRONMENT is False, None is returned (since there is no
-  shopfloor server to sync to).
-
-  Args:
-    id_suffix: An optional suffix in case multiple SyncShopFloor steps
-      are needed in the same group (since they cannot have the same ID).
-  """
-  if _FACTORY_ENVIRONMENT:
-    suffix_str = '_' + str(id_suffix) if id_suffix else ''
-    return OperatorTest(
-        id='SyncShopFloor' + suffix_str,
-        pytest_name='flush_event_logs',
-        label_zh=u'同步事件记录 ' + suffix_str)
-  else:
-    return None
-
-def Barrier(id_suffix):
-  return  OperatorTest(
-      id='Barrier_' + str(id_suffix),
-      label_zh=u'检查关卡' + str(id_suffix),
-      pytest_name='summary',
-      never_fails=True,
-      disable_abort=True,
-      dargs={'disable_input_on_fail':True})
-
-# Tests in the test_list will be run in the order below, unless the
-# operator interrupts the flow.
-
-TEST_LIST = [
-
-TestGroup(
-    id='RunIn',
-    subtests=[
-
-    OperatorTest(
-        id='ShopFloor1',
-        label_zh=u'ShopFloor1',
-        subtests=[
-            SyncShopFloor(),
-
-            # Read device data from VPD (most importantly,
-            # 'mlb_serial_number' and 'smt_complete').  If SMT is
-            # already complete, we need not (and cannot!) run the
-            # shopfloor steps again.
-            OperatorTest(
-                label_en='Read Device Data from VPD',
-                label_zh='从 VPD 读机器资料',
-                pytest_name='read_device_data_from_vpd'),
-
-            OperatorTest(
-                label_en='Call ShopFloor (GetDeviceInfo)',
-                label_zh=u'连到 ShopFloor (GetDeviceInfo)',
-                pytest_name='call_shopfloor',
-                dargs={'method': 'GetDeviceInfo',
-                       'args': lambda env: [
-                           env.GetDeviceData()['mlb_serial_number'],
-                           ],
-                       'action': 'update_device_data'}),
-
-            OperatorTest(
-                id='VPD',
-                label_zh=u'产品资讯 (VPD)',
-                pytest_name='vpd',
-                dargs={'use_shopfloor_device_data': True}),
-
-            OperatorTest(
-                id='WriteHWIDv3',
-                label_en='Write HWID (v3)',
-                label_zh=u'硬体代号 (HWID v3)',
-                pytest_name='hwid_v3')
-            ])
-    if _FACTORY_ENVIRONMENT else None,
-
-    # TODO(bhthompson): add in video and audio tests
-    FactoryTest(
-        id='Stress',
-        label_zh=u'集合压力测试',
-        subtests=[
-            SyncShopFloor(),
-
-            FactoryTest(
-                id='ThermalSensor',
-                label_zh=u'温度感应器',
-                pytest_name='i2c_probe',
-                backgroundable=True,
-                dargs={'bus': 7,
-                       'addr': 0x4c}),
-
-            FactoryTest(
-                label_en='VerifyRootPartition',
-                label_zh=u'验证根磁區',
-                pytest_name='verify_root_partition',
-                backgroundable=True,
-                dargs={'kern_a_device': 'mmcblk0p4',
-                       'root_device': 'mmcblk0p5'})
-            if _FULL_FACTORY_IMAGE else None,
-
-            FactoryTest(
-                id='BadBlocks',
-                label_zh=u'毁损扇區',
-                backgroundable=True,
-                pytest_name='bad_blocks',
-                # It takes ~3200s per gigabyte.  Target ~75% of RunIn.
-                # NOTE: as Spring uses eMMC, we should be careful to limit the
-                # number of erase/write, eMMCs typically have much lower
-                # endurance than SSDs, so w/e cycles on a particular block may
-                # wear out after a few thousand cycles.
-                dargs={'timeout_secs': 120,
-                       'log_threshold_secs': 10,
-                       'max_bytes': int(1024 * 1024 * 1024 * 0.75 *
-                                        _RUNIN_SAT_DURATION_SECS / 3200.0) })
-            if _FULL_FACTORY_IMAGE else
-            FactoryTest(
-                label_en='BadBlocks',
-                label_zh=u'毁损扇區',
-                backgroundable=True,
-                pytest_name='bad_blocks',
-                # It takes ~1700s per round @1G space, we can stress
-                # ~75% of RunIn.
-                iterations=int((_RUNIN_SAT_DURATION_SECS / 1700.0) * 0.75),
-                dargs={'mode': 'file',
-                       'timeout_secs': 120,
-                       'log_threshold_secs': 10,
-                       'max_bytes': 1 * 1024 * 1024 * 1024}),
-
-            FactoryTest(
-                id='StressAppTest',
-                label_zh=u'压力测试',
-                autotest_name='hardware_SAT',
-                backgroundable=True,
-                dargs={'drop_caches': True,
-                       'free_memory_fraction': 0.85,
-                       'seconds': _RUNIN_SAT_DURATION_SECS}),
-
-            OperatorTest(
-                id='Graphics',
-                label_zh=u'图像',
-                pytest_name='webgl_aquarium',
-                backgroundable=True,
-                dargs={'duration_secs': _RUNIN_SAT_DURATION_SECS}),
-
-            FactoryTest(
-                label_en='Camera',
-                label_zh=u'相机',
-                backgroundable=True,
-                pytest_name='camera',
-                dargs={'face_recognition': False,
-                       'timeout_secs': _RUNIN_SAT_DURATION_SECS,
-                       'show_image': False,
-                       'timeout_run': True}),
-
-            FactoryTest(
-                id='RandomNumberGen',
-                label_zh=u'乱数产生',
-                backgroundable=True,
-                pytest_name='urandom',
-                dargs={'duration_secs': _RUNIN_SAT_DURATION_SECS}),
-
-            OperatorTest(
-                id='Countdown',
-                label_zh=u'倒数计时',
-                backgroundable=True,
-                pytest_name='countdown',
-                dargs={'title_en': 'Run-In Tests',
-                       'title_zh': '烧机测试',
-                       'duration_secs': _RUNIN_SAT_DURATION_SECS,
-                       'log_interval': _RUNIN_LOG_INTERVAL_SECS,
-                       'grace_secs': 8*60,
-                       'temp_max_delta': 10,
-                       'temp_criteria': [
-                         # name, temp_index, warning_temp, critical_temp.
-                         ('CPU', 0, 95, 105)]}),
-    ]),
-
-    RebootStep(
-        label_en='Reboot (%s %s)' % (
-            _RUNIN_REBOOT_SEQ_ITERATIONS,
-            'time' if _RUNIN_REBOOT_SEQ_ITERATIONS == 1 else 'times'),
-        label_zh=u'重新开机 (%s 次)' % _RUNIN_REBOOT_SEQ_ITERATIONS,
-        iterations=_RUNIN_REBOOT_SEQ_ITERATIONS),
-
-    OperatorTest(
-        label_en='Suspend/Resume (%s %s)' % (
-            _RUNIN_RESUME_ITERATIONS,
-            'time' if _RUNIN_RESUME_ITERATIONS == 1 else 'times'),
-        label_zh=u'睡眠、唤醒 (%s 次)' % _RUNIN_RESUME_ITERATIONS,
-        pytest_name='suspend_resume',
-        dargs={'cycles': _RUNIN_RESUME_ITERATIONS,
-               'suspend_delay_min_secs': 10,
-               'suspend_delay_max_secs': 15,
-               'suspend_worst_case_secs': 120}),
-
-    FactoryTest(
-        id='DozingStress',
-        label_zh=u'睡眠内存压力测试',
-        subtests=[
-            SyncShopFloor(),
-
-            # if StressAppTest fails here, it's likely memory issue.
-            FactoryTest(
-                id='StressAppTest',
-                label_zh=u'压力测试',
-                autotest_name='hardware_SAT',
-                backgroundable=True,
-                dargs={'drop_caches': True,
-                       'free_memory_fraction': 0.85,
-                       'seconds': _RUNIN_DOZING_SAT_DURATION_SECS}),
-
-            FactoryTest(
-                label_en='Suspend/Resume (%d %s)' % (
-                    _RUNIN_RESUME_ITERATIONS,
-                    'time' if _RUNIN_RESUME_ITERATIONS == 1 else 'times'),
-                label_zh=u'睡眠、唤醒 (%s 次)' % _RUNIN_RESUME_ITERATIONS,
-                pytest_name='suspend_resume',
-                backgroundable=True,
-                dargs={'cycles': _RUNIN_RESUME_ITERATIONS,
-                       'suspend_delay_min_secs': 28,
-                       'suspend_delay_max_secs': 30,
-                       'resume_early_margin_secs': 1,
-                       'suspend_worst_case_secs': 120}),
-    ]),
-
-    OperatorTest(
-        id='ShopFloor2',
-        label_zh=u'ShopFloor2',
-        subtests=[
-            SyncShopFloor(),
-
-            OperatorTest(
-                label_en='Call ShopFloor (FinishRunIn)',
-                label_zh=u'连到 ShopFloor (FinishRunIn)',
-                pytest_name='call_shopfloor',
-                dargs={'method': 'FinishRunIn',
-                       'args': lambda env: [
-                           env.GetDeviceData()['mlb_serial_number'],
-                           ]}),
-            ])
-    if _FACTORY_ENVIRONMENT else None,
-
-    # 87% =~ 2450 mAh. Typical charging current under load 0 is 500-600 mA. Give
-    # it 7hr to regulate battery to *_starting_charge_pct if starting with empty
-    # battery.
-    # Discharging current under 2 load is about -700mA.  Empirically, it takes
-    # ~150 seconds to discharge 30mAh.
-    # Charging current under 1 load is about 60-150mA. Empirically, it takes
-    # ~800 seconds to charge 30mAh.
-    # Note: If a SW bug causes Python task to occupy 100% CPU, this test may
-    # fail due to lower-than-expected charging current.
-    OperatorTest(
-        id='Charger',
-        label_zh=u'充电器',
-        exclusive=['CHARGER'],
-        pytest_name='charger',
-        dargs={'min_starting_charge_pct': 87,
-               'max_starting_charge_pct': 88,
-               'starting_timeout_secs': 25000,
-               'check_battery_current': False,
-               'use_percentage': False,
-               'spec_list': [(30, 1600, 1), (-30, 600, 2)]}),
-
-    RebootStep(
-        id='RebootAfterCharger',
-        label_en='Reboot',
-        label_zh=u'重新开机'),
-
-    OperatorTest(
-        id='Charge',
-        label_zh=u'充电',
-        pytest_name='blocking_charge',
-        exclusive=['CHARGER'],
-        dargs={'timeout_secs': 7200,
-               'target_charge_pct': 87}),
-
-    OperatorTest(
-        id='ShopFloor3',
-        label_zh=u'ShopFloor3',
-        subtests=[
-            SyncShopFloor(),
-
-            OperatorTest(
-                label_en='Call ShopFloor (FinishBattery)',
-                label_zh=u'连到 ShopFloor (FinishBattery)',
-                pytest_name='call_shopfloor',
-                dargs={'method': 'FinishBattery',
-                       'args': lambda env: [
-                           env.GetDeviceData()['mlb_serial_number'],
-                           ]}),
-            ])
-    if _FACTORY_ENVIRONMENT else None,
-
-    Barrier('RUNIN'),
-
-    OperatorTest(
-        label_en='Finish',
-        label_zh=u'结束',
-        pytest_name='message',
-        require_run=(Passed('RunIn.Barrier_RUNIN')
-                     if _REQUIRE_RUN_FOR_RUNIN_FINISH else None),
-        never_fails=True,
-        dargs={'html_en': 'RunIn tests finished, press SPACE to continue.\n',
-               'html_zh': 'RunIn 测试结束,按下空白键继续\n'}),
-
-]),  # End of RUNIN test group.
-]
-
diff --git a/server/site_tests/factory_Basic/test_list.suspend_basic b/server/site_tests/factory_Basic/test_list.suspend_basic
deleted file mode 100755
index 819b69d..0000000
--- a/server/site_tests/factory_Basic/test_list.suspend_basic
+++ /dev/null
@@ -1,97 +0,0 @@
-# -*- mode: python; coding: utf-8 -*-
-#
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-TEST_LIST_NAME = 'Dozing Basic'
-
-options.auto_run_on_start = False
-
-# -- CUSTOMIZABLE SETTINGS -------------------------------------------
-# The number of suspend/resume tests you want during run-in.
-_RUNIN_RESUME_ITERATIONS = 1
-
-# The duration of stress test + repeated suspend/resume during run-in.
-_RUNIN_DOZING_SAT_DURATION_SECS = int(24 * 60 * 60)
-
-# The interval of logging events in seconds during run-in.
-_RUNIN_LOG_INTERVAL_SECS = 10
-
-_TEST_ARGS = {'cycles': _RUNIN_RESUME_ITERATIONS,
-              'suspend_delay_min_secs': 28,
-              'suspend_delay_max_secs': 30,
-              'resume_early_margin_secs': 1,
-              'suspend_worst_case_secs': 120}
-
-_NATIVE_TEST_LABEL = u'睡眠/唤醒 (%s次)' % _RUNIN_RESUME_ITERATIONS
-
-_ENGLISH_TEST_LABEL = 'Suspend/Resume (%s %s)' % (
-                         _RUNIN_RESUME_ITERATIONS,
-                         'time' if _RUNIN_RESUME_ITERATIONS == 1 else 'times')
-
-_TEST_NAME = 'suspend_resume'
-
-def Barrier(id_suffix):
-  return OperatorTest(
-      id='Barrier_' + str(id_suffix),
-      label_zh=u'检查关卡' + str(id_suffix),
-      pytest_name='summary',
-      never_fails=True,
-      disable_abort=True,
-      dargs={'disable_input_on_fail':True})
-
-# Tests in the test_list will be run in the order below, unless the
-# operator interrupts the flow.
-
-TEST_LIST = [
-TestGroup(
-    id='RunIn',
-    subtests=[
-    FactoryTest(
-        id='DozingBasic',
-        label_zh=u'Label',
-        subtests=[
-
-            FactoryTest(
-                label_en=_ENGLISH_TEST_LABEL,
-                label_zh=_NATIVE_TEST_LABEL,
-                pytest_name=_TEST_NAME,
-                backgroundable=True,
-                dargs=_TEST_ARGS),
-    ]),
-]),  # End of RUNIN test group.
-
-TestGroup(
-    id='RunIn2',
-    subtests=[
-
-    FactoryTest(
-        id='DozingBasic2',
-        label_zh=u'Label2',
-        subtests=[
-
-            FactoryTest(
-                label_en=_ENGLISH_TEST_LABEL,
-                label_zh=_NATIVE_TEST_LABEL,
-                pytest_name=_TEST_NAME,
-                backgroundable=False,
-                dargs=_TEST_ARGS),
-    ]),
-    FactoryTest(
-        id='DozingBasic3',
-        label_zh=u'Label3',
-        subtests=[
-
-            FactoryTest(
-                label_en=_ENGLISH_TEST_LABEL,
-                label_zh=_NATIVE_TEST_LABEL,
-                pytest_name=_TEST_NAME,
-                backgroundable=False,
-                dargs=_TEST_ARGS),
-    ]),
-    Barrier('RUNIN'),
-]),  # End of RUNIN2 test group.
-
-]
-
diff --git a/server/site_tests/factory_InstallServo/control b/server/site_tests/factory_InstallServo/control
deleted file mode 100644
index 9d2b04d..0000000
--- a/server/site_tests/factory_InstallServo/control
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "Chrome OS Team"
-NAME = "factory_InstallServo"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "factory"
-TEST_TYPE = "server"
-
-DOC = """
-This test creates and runs a mini-Omaha server, images a ChromeOS
-factory install shim onto a USB image using the servo, installs ChromeOS on a
-real device, runs a subset of factory tests, and finally boots into ChromeOS.
-"""
-
-
-job.run_test('factory_InstallServo', **utils.args_to_dict(args))
diff --git a/server/site_tests/factory_InstallServo/factory_InstallServo.py b/server/site_tests/factory_InstallServo/factory_InstallServo.py
deleted file mode 100644
index 3574d94..0000000
--- a/server/site_tests/factory_InstallServo/factory_InstallServo.py
+++ /dev/null
@@ -1,99 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Factory install servo tests.
-
-This test supports the flags documented in FactoryInstallTest, plus:
-
-    servo_host: the host running the servod (defaults to localhost)
-    servo_port: the port on which to run servod (defaults to an unused
-        port)
-    debug_image_usb: whether to image the USB disk in servo mode (default to
-        true, may be set to false for debugging only if the USB disk is
-        already imaged)
-"""
-
-
-import glob, logging, os, re, time
-
-from autotest_lib.client.bin import utils as client_utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import hosts
-from autotest_lib.server import utils
-from autotest_lib.server.cros.factory_install_test import FactoryInstallTest
-from autotest_lib.server.cros.servo import servo
-
-
-class factory_InstallServo(FactoryInstallTest):
-    """
-    Factory install VM tests.
-
-    See file-level docstring for more information.
-    """
-
-    def _create_servo(self, servo_host, servo_port):
-        self.servo = servo.Servo(
-                hosts.ServoHost(servo_host=servo_host, servo_port=servo_port))
-        def kill_servo():
-            del self.servo
-        self.cleanup_tasks.append(kill_servo)
-        self.servo.initialize_dut(cold_reset=True)
-
-        self.servo.enable_usb_hub()
-        self.servo_usb_disk = self.servo.probe_host_usb_dev()
-        if not self.servo_usb_disk:
-            raise error.TestError("Unable to find USB disk")
-        logging.info("Servo USB device detected at %s", self.servo_usb_disk)
-
-    def get_hwid_cfg(self):
-        """
-        Overridden from superclass.
-        """
-        return "servo"
-
-    def get_dut_client(self):
-        """
-        Overridden from superclass.
-        """
-        return hosts.SSHHost(self.dut_ip)
-
-    def run_factory_install(self, shim_image):
-        """
-        Overridden from superclass.
-        """
-        self.servo.install_recovery_image(image_path=shim_image)
-
-        # Wait for the IP address of the DUT to appear in the Miniohama
-        # server logs.
-        def get_dut_ip():
-            match = re.search(r"(\d+\.\d+\.\d+\.\d+) - -.*htpdate",
-                              open(self.miniomaha_output).read())
-            return match.group(1) if match else None
-
-        self.dut_ip = client_utils.poll_for_condition(
-            get_dut_ip, timeout=FactoryInstallTest.FACTORY_INSTALL_TIMEOUT_SEC,
-            desc="Get DUT IP")
-
-        logging.debug("DUT IP is %s", self.dut_ip)
-
-        if not self.get_dut_client().wait_up(
-            FactoryInstallTest.FACTORY_INSTALL_TIMEOUT_SEC):
-            raise error.TestFail("DUT never came up at %s" % self.dut_ip)
-
-    def reboot_for_wipe(self):
-        """
-        Overridden from superclass.
-        """
-        self.get_dut_client().reboot(
-            timeout=FactoryInstallTest.FIRST_BOOT_TIMEOUT_SEC)
-
-    def run_once(self, servo_host="localhost", servo_port=None,
-                 debug_image_usb=True,
-                 **args):
-        self.image_usb = self.parse_boolean(debug_image_usb)
-        self._create_servo(
-            servo_host,
-            int(servo_port) if servo_port else utils.get_unused_port())
-        super(factory_InstallServo, self).run_once(**args)
diff --git a/server/site_tests/factory_InstallVM/control b/server/site_tests/factory_InstallVM/control
deleted file mode 100644
index 834b086..0000000
--- a/server/site_tests/factory_InstallVM/control
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "Chrome OS Team"
-NAME = "factory_InstallVM"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "factory"
-TEST_TYPE = "server"
-
-DOC = """
-This test creates and runs a mini-Omaha server, boots a VM from the
-factory install shim, installs ChromeOS, runs a subset of factory
-tests, and finally boots into ChromeOS.
-"""
-
-
-parallel_simple(
-    lambda machine:
-        job.run_test('factory_InstallVM',
-                     host=hosts.create_host(machine),
-                     **utils.args_to_dict(args)),
-    machines)
diff --git a/server/site_tests/factory_InstallVM/factory_InstallVM.py b/server/site_tests/factory_InstallVM/factory_InstallVM.py
deleted file mode 100644
index d2517fe..0000000
--- a/server/site_tests/factory_InstallVM/factory_InstallVM.py
+++ /dev/null
@@ -1,162 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Factory install VM tests.
-
-This test supports the flags documented in FactoryInstallTest, plus:
-
-    debug_vnc: whether to run VNC on the KVM (for debugging only)
-    debug_save_hda: if specified, path to save the hda.bin to after
-        running the factory install shim (for debugging only)
-    debug_reuse_hda: if specified, path to an existing hda.bin image
-        to reuse (for debugging only)
-"""
-
-
-import os, re
-
-from autotest_lib.client.bin import utils as client_utils
-from autotest_lib.server.cros.factory_install_test import FactoryInstallTest
-from autotest_lib.server.hosts import ssh_host
-
-
-# How long to wait after killing KVMs.
-_KILL_KVM_WAIT_SEC = 5
-
-# The size of the "SSD" in the KVM.
-_HDA_SIZE_MB = 8192
-
-# The name of the image used for hda.  This should be unique to this test,
-# since we will kill all stray KVM processes using this disk image.
-_HDA_FILENAME = "factory_InstallVM_hda.bin"
-
-
-class factory_InstallVM(FactoryInstallTest):
-    """
-    Factory install VM tests.
-
-    See file-level docstring for more information.
-    """
-
-    def _get_kvm_command(self, kvm_args=[]):
-        """
-        Returns the command to run KVM.
-
-        @param kvm_args: A list of extra args to pass to KVM.
-        """
-        kvm_base_args = [
-            "kvm",
-            "-m", "2048",
-            "-net", "nic,model=virtio",
-            "-net", "user,hostfwd=tcp::%d-:22" % self.ssh_tunnel_port,
-            "-vga", "vmware",  # Because -vga std is slow
-            ]
-
-        if self.vnc:
-            # Without nographic, we need to explicitly add "-serial stdio"
-            # (or output will go to vc).  Use 127.0.0.1 to ensure that kvm
-            # listens with IPv4.
-            kvm_base_args.extend(["-serial", "stdio", "-vnc", "127.0.0.1:1"])
-        else:
-            kvm_base_args.append("-nographic")
-
-        return " ".join(kvm_base_args + kvm_args)
-
-    def _kill_kvm(self):
-        """
-        Kills the KVM on the client machine.
-
-        This will kill any KVM whose command line contains _HDA_FILENAME
-        (which is specific to this test).
-        """
-        def try_kill_kvm():
-            pattern = "^kvm.*%s" % _HDA_FILENAME,
-            if (self.client.run("pgrep -f '%s'" % pattern, ignore_status=True)
-                .exit_status == 1):
-                return True
-            self.client.run("pkill -f '%s'" % (pattern))
-            return False
-
-        client_utils.poll_for_condition(
-            try_kill_kvm, timeout=_KILL_KVM_WAIT_SEC, desc="Kill KVM")
-
-    def get_hwid_cfg(self):
-        """
-        Overridden from superclass.
-        """
-        return "vm"
-
-    def get_dut_client(self):
-        """
-        Overridden from superclass.
-        """
-        return ssh_host.SSHHost("localhost", port=self.ssh_tunnel_port)
-
-    def run_factory_install(self, local_hdb):
-        """
-        Overridden from superclass.
-        """
-        self.hda = os.path.join(self.client.get_tmp_dir(), _HDA_FILENAME)
-
-        if self.reuse_hda is not None:
-            self.client.run("cp %s %s" % (self.reuse_hda, self.hda))
-        else:
-            # Mount partition 12 the image and modify it to enable serial
-            # logging.
-            mount = self._mount_partition(local_hdb, 12)
-            self._modify_file(
-                os.path.join(mount, "syslinux/usb.A.cfg"),
-                lambda contents: re.sub(r"console=\w+", "console=ttyS0",
-                                        contents))
-            self._umount_partition(mount)
-
-            # On the client, create a nice big sparse file for hda
-            # (a.k.a. the SSD).
-            self.client.run("truncate -s %dM %s" % (_HDA_SIZE_MB, self.hda))
-            hdb = os.path.join(self.client.get_tmp_dir(), "hdb.bin")
-            self.client.send_file(local_hdb, hdb)
-
-            # Fire up the KVM and wait for the factory install to complete.
-            self._kill_kvm()  # Just in case
-            self.client.run_grep(
-                self._get_kvm_command(
-                    ["-drive", "file=%s,boot=off" % self.hda,
-                     "-drive", "file=%s,boot=on" % hdb,
-                     "-no-reboot"]),
-                timeout=FactoryInstallTest.FACTORY_INSTALL_TIMEOUT_SEC,
-                stdout_ok_regexp="Factory Installer Complete")
-            self._kill_kvm()
-
-            if self.save_hda is not None:
-                self.client.run("cp %s %s" % (self.hda, self.save_hda))
-
-        # Run KVM again (the factory tests should now come up).
-        kvm = self.client.run(self._get_kvm_command([
-                    "-hda", self.hda,
-                    "-daemonize",
-                    "-no-reboot"]))
-
-    def reboot_for_wipe(self):
-        """
-        Overridden from superclass.
-        """
-        # Use halt instead of reboot; reboot doesn't work consistently in KVM.
-        self.get_dut_client().halt()
-        self._kill_kvm()
-
-        # Start KVM again.  The ChromeOS test image should now come up.
-        kvm = self.client.run(
-            self._get_kvm_command(["-hda", self.hda, "-daemonize"]))
-
-    def run_once(self, host, debug_reuse_hda=None, debug_save_hda=None,
-                 debug_vnc=False, **args):
-        self.client = host
-        self.reuse_hda = debug_reuse_hda
-        self.save_hda = debug_save_hda
-        self.vnc = self.parse_boolean(debug_vnc)
-
-        self.cleanup_tasks.append(self._kill_kvm)
-
-        super(factory_InstallVM, self).run_once(**args)
diff --git a/server/site_tests/firmware_BaseECKeyboard/control b/server/site_tests/firmware_BaseECKeyboard/control
index 08382cb..46fdba9 100644
--- a/server/site_tests/firmware_BaseECKeyboard/control
+++ b/server/site_tests/firmware_BaseECKeyboard/control
@@ -4,14 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_BaseECKeyboard"
 PURPOSE = "Servo-based BaseEC keyboard test"
 CRITERIA = "This test will fail if BaseEC keyboard misbehaved."
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 The base should be connected to the servo v4 board through an extra
diff --git a/server/site_tests/firmware_Bmpblk/control b/server/site_tests/firmware_Bmpblk/control
index ebd0ce1..ef130ca 100644
--- a/server/site_tests/firmware_Bmpblk/control
+++ b/server/site_tests/firmware_Bmpblk/control
@@ -6,13 +6,14 @@
 NAME = "firmware_Bmpblk"
 PURPOSE = "Check that the firmware screens have been correctly configured"
 CRITERIA = "This test will fail if firmware screens have not been explicitly scaled for this device"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_normal, suite:faft_lv4, suite:faft_bios_ec3po, suite:faft_bios_tot"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_normal, suite:faft_lv4, suite:faft_bios_tot, suite:distributed_lab_qual_faft"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test checks whether the BIOS was built with a correctly configured bmpblk
diff --git a/server/site_tests/firmware_Bmpblk/firmware_Bmpblk.py b/server/site_tests/firmware_Bmpblk/firmware_Bmpblk.py
index c97844b..3b21652 100644
--- a/server/site_tests/firmware_Bmpblk/firmware_Bmpblk.py
+++ b/server/site_tests/firmware_Bmpblk/firmware_Bmpblk.py
@@ -30,14 +30,14 @@
         logging.debug('cbfstool layout output:\n\n%s', layout)
         print_cbfs_cmd_options=''
         if 'BOOT_STUB' in layout:
-          print_cbfs_cmd_options=' -r BOOT_STUB'
+            print_cbfs_cmd_options=' -r BOOT_STUB'
         try:
             files = self.faft_client.system.run_shell_command_get_output(
                     PRINT_CBFS_CMD + print_cbfs_cmd_options)
             files = '\n'.join(files)
             logging.debug('cbfstool print output:\n\n%s', files)
             if 'romstage' not in files:
-                raise error.TestError("Sanity check failed: Can't read CBFS")
+                raise error.TestError("Quick check failed: Can't read CBFS")
             if 'vbgfx.bin' not in files:
                 raise error.TestNAError('This board has no firmware screens')
             if 'vbgfx_not_scaled' in files:
diff --git a/server/site_tests/firmware_CgptStress/control b/server/site_tests/firmware_CgptStress/control
index 8bd2184..0bc7736 100644
--- a/server/site_tests/firmware_CgptStress/control
+++ b/server/site_tests/firmware_CgptStress/control
@@ -4,15 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_CgptStress"
 PURPOSE = "Servo based cgpt stress test"
 CRITERIA = "This test will fail if changing cgpt fails to switch kernel"
 ATTRIBUTES = "suite:faft_normal, suite:faft_stress"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test is intended to be run with many iterations to ensure that changing
diff --git a/server/site_tests/firmware_CgptStress/control.dev b/server/site_tests/firmware_CgptStress/control.dev
index 147dba7..7fec8d7 100644
--- a/server/site_tests/firmware_CgptStress/control.dev
+++ b/server/site_tests/firmware_CgptStress/control.dev
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_CgptStress.dev"
 PURPOSE = "Servo based cgpt stress test"
 CRITERIA = "This test will fail if changing cgpt fails to switch kernel"
 ATTRIBUTES = "suite:faft_dev, suite:faft_stress"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test is intended to be run with many iterations to ensure that changing
diff --git a/server/site_tests/firmware_CgptStress/firmware_CgptStress.py b/server/site_tests/firmware_CgptStress/firmware_CgptStress.py
index 652e60a..82a0b5b 100644
--- a/server/site_tests/firmware_CgptStress/firmware_CgptStress.py
+++ b/server/site_tests/firmware_CgptStress/firmware_CgptStress.py
@@ -21,7 +21,8 @@
         self.faft_iterations = int(dict_args.get('faft_iterations', 1))
         super(firmware_CgptStress, self).initialize(host, cmdline_args)
         self.backup_cgpt_attributes()
-        self.switcher.setup_mode('dev' if dev_mode else 'normal')
+        self.switcher.setup_mode('dev' if dev_mode else 'normal',
+                                 allow_gbb_force=True)
         self.setup_usbkey(usbkey=False)
         self.setup_kernel('a')
 
@@ -34,7 +35,7 @@
 
     def run_once(self):
         """Runs a single iteration of the test."""
-        for i in xrange(self.faft_iterations):
+        for i in range(self.faft_iterations):
             logging.info('======== Running FAFT ITERATION %d/%s ========',
                          i + 1, self.faft_iterations)
             logging.info("Expected kernel A boot and prioritize kernel B.")
diff --git a/server/site_tests/firmware_ChipFwUpdate/control b/server/site_tests/firmware_ChipFwUpdate/control
index 64dbf85..756ea52 100644
--- a/server/site_tests/firmware_ChipFwUpdate/control
+++ b/server/site_tests/firmware_ChipFwUpdate/control
@@ -2,14 +2,17 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ChipFwUpdate"
 PURPOSE = "TCPC firmware update test."
 CRITERIA = "This test will pass if the updated bios.bin booted successfully."
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test replaces chip (i.e. TCPC) firmware on the DUT's bios.bin
diff --git a/server/site_tests/firmware_ChipFwUpdate/firmware_ChipFwUpdate.py b/server/site_tests/firmware_ChipFwUpdate/firmware_ChipFwUpdate.py
index be1b433..04dc42a 100644
--- a/server/site_tests/firmware_ChipFwUpdate/firmware_ChipFwUpdate.py
+++ b/server/site_tests/firmware_ChipFwUpdate/firmware_ChipFwUpdate.py
@@ -73,7 +73,7 @@
 
         self.clear_set_gbb_flags(
             vboot.GBB_FLAG_DISABLE_EC_SOFTWARE_SYNC |
-            vboot.GBB_FLAG_DISABLE_PD_SOFTWARE_SYNC, 0)
+            vboot.GBB_FLAG_DISABLE_AUXFW_SOFTWARE_SYNC, 0)
 
         self.dut_bios_path = None
         self.cbfs_work_dir = None
@@ -126,7 +126,7 @@
         cbfs_work_dir.
         """
 
-        for chip in self.req_chip_updates.itervalues():
+        for chip in self.req_chip_updates.values():
             logging.info('checking for %s firmware in CBFS', chip.chip_name)
 
             if not self.faft_client.updater.cbfs_extract_chip(
diff --git a/server/site_tests/firmware_ClearTPMOwnerAndReset/control b/server/site_tests/firmware_ClearTPMOwnerAndReset/control
index 259778f..afd64ce 100644
--- a/server/site_tests/firmware_ClearTPMOwnerAndReset/control
+++ b/server/site_tests/firmware_ClearTPMOwnerAndReset/control
@@ -7,7 +7,9 @@
 PURPOSE = "Verify normal boot after clearing the tpm owner and rebooting the ec"
 TIME = "SHORT"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This test verifies the device doesn't get into a bad state when the EC is reset
diff --git a/server/site_tests/firmware_ClearTPMOwnerAndReset/control.stress b/server/site_tests/firmware_ClearTPMOwnerAndReset/control.stress
index de80a4c..8506b20 100644
--- a/server/site_tests/firmware_ClearTPMOwnerAndReset/control.stress
+++ b/server/site_tests/firmware_ClearTPMOwnerAndReset/control.stress
@@ -9,6 +9,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This test verifies the device doesn't get into a bad state when the EC is reset
diff --git a/server/site_tests/firmware_CompareChipFwToShellBall/control b/server/site_tests/firmware_CompareChipFwToShellBall/control
index 6a2a3e0..d6cb4c0 100644
--- a/server/site_tests/firmware_CompareChipFwToShellBall/control
+++ b/server/site_tests/firmware_CompareChipFwToShellBall/control
@@ -2,14 +2,17 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_CompareChipFwToShellBall"
 PURPOSE = "Compare the chip firmware versions to those in the shellball"
 CRITERIA = "This test will pass if installed and available firmware match."
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test compares the installed chip firmware versions to those in the
diff --git a/server/site_tests/firmware_CompareChipFwToShellBall/firmware_CompareChipFwToShellBall.py b/server/site_tests/firmware_CompareChipFwToShellBall/firmware_CompareChipFwToShellBall.py
index 6177d3c..0a3f6bd 100644
--- a/server/site_tests/firmware_CompareChipFwToShellBall/firmware_CompareChipFwToShellBall.py
+++ b/server/site_tests/firmware_CompareChipFwToShellBall/firmware_CompareChipFwToShellBall.py
@@ -94,7 +94,7 @@
 
         chip_types = set()
         port2chip = []
-        for port in xrange(self.MAXPORTS):
+        for port in range(self.MAXPORTS):
             chip = self.dut_get_chip(port)
             if not chip:
                 return (chip_types, port2chip)
@@ -125,7 +125,7 @@
 
         Finds bios.bin on the DUT and sets up a temp dir to operate on
         bios.bin.  If a bios.bin was specified, it is copied to the DUT
-        and used instead of the native bios.bin.
+        and used instead of the builtin bios.bin.
         """
 
         cbfs_path = self.faft_client.updater.cbfs_setup_work_dir()
diff --git a/server/site_tests/firmware_ConsecutiveBoot/control b/server/site_tests/firmware_ConsecutiveBoot/control
index 0ea700f..fff5b74 100644
--- a/server/site_tests/firmware_ConsecutiveBoot/control
+++ b/server/site_tests/firmware_ConsecutiveBoot/control
@@ -4,19 +4,22 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ConsecutiveBoot"
 PURPOSE = "Servo based consecutive boot test"
 CRITERIA = "This test will fail if DUT fails to boot from power-off"
 ATTRIBUTES = "suite:faft_stress"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test is intended to be run with many iterations to ensure that the DUT
-does boot into Chrome OS and then does power off later.
+does boot into ChromeOS and then does power off later.
 
 The iteration should be specified by the parameter -a "faft_iterations=10".
 """
diff --git a/server/site_tests/firmware_ConsecutiveBoot/control.100 b/server/site_tests/firmware_ConsecutiveBoot/control.100
index 78c06f3..93dd0f5 100644
--- a/server/site_tests/firmware_ConsecutiveBoot/control.100
+++ b/server/site_tests/firmware_ConsecutiveBoot/control.100
@@ -4,20 +4,21 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ConsecutiveBoot.100"
 PURPOSE = "Servo based consecutive boot test (100 iterations)"
 CRITERIA = "This test will fail if DUT fails to boot from power-off"
 ATTRIBUTES = "suite:stress"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "LONG"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This test is intended to be run with many iterations to ensure that the DUT
-does boot into Chrome OS and then does power off later.
+does boot into ChromeOS and then does power off later.
 
 Runs 100 boot iterations.
 """
diff --git a/server/site_tests/firmware_ConsecutiveBoot/control.1000 b/server/site_tests/firmware_ConsecutiveBoot/control.1000
index a00694f..5850e12 100644
--- a/server/site_tests/firmware_ConsecutiveBoot/control.1000
+++ b/server/site_tests/firmware_ConsecutiveBoot/control.1000
@@ -4,19 +4,21 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ConsecutiveBoot.1000"
 PURPOSE = "Servo based consecutive boot test (1000 iterations)"
 CRITERIA = "This test will fail if DUT fails to boot from power-off"
 ATTRIBUTES = "suite:faft_stress"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "LONG"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test is intended to be run with many iterations to ensure that the DUT
-does boot into Chrome OS and then does power off later.
+does boot into ChromeOS and then does power off later.
 
 Runs 1000 iterations.
 """
diff --git a/server/site_tests/firmware_ConsecutiveBoot/control.2500 b/server/site_tests/firmware_ConsecutiveBoot/control.2500
index 4e0ba62..b877ef1 100644
--- a/server/site_tests/firmware_ConsecutiveBoot/control.2500
+++ b/server/site_tests/firmware_ConsecutiveBoot/control.2500
@@ -4,19 +4,21 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ConsecutiveBoot.2500"
 PURPOSE = "Servo based consecutive boot test (2500 iterations)"
 CRITERIA = "This test will fail if DUT fails to boot from power-off"
 ATTRIBUTES = "suite:faft_stress"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "LONG"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test is intended to be run with many iterations to ensure that the DUT
-does boot into Chrome OS and then does power off later.
+does boot into ChromeOS and then does power off later.
 
 Runs 2500 iterations.
 """
diff --git a/server/site_tests/firmware_ConsecutiveBoot/control.500 b/server/site_tests/firmware_ConsecutiveBoot/control.500
index e3b39ff..b5f2d03 100644
--- a/server/site_tests/firmware_ConsecutiveBoot/control.500
+++ b/server/site_tests/firmware_ConsecutiveBoot/control.500
@@ -4,19 +4,21 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ConsecutiveBoot.500"
 PURPOSE = "Servo based consecutive boot test (500 iterations)"
 CRITERIA = "This test will fail if DUT fails to boot from power-off"
 ATTRIBUTES = "suite:faft_stress"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test is intended to be run with many iterations to ensure that the DUT
-does boot into Chrome OS and then does power off later.
+does boot into ChromeOS and then does power off later.
 
 Runs 500 iterations.
 """
diff --git a/server/site_tests/firmware_ConsecutiveBoot/control.5000 b/server/site_tests/firmware_ConsecutiveBoot/control.5000
index 9dbbe6a..1150bd6 100644
--- a/server/site_tests/firmware_ConsecutiveBoot/control.5000
+++ b/server/site_tests/firmware_ConsecutiveBoot/control.5000
@@ -4,19 +4,21 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ConsecutiveBoot.5000"
 PURPOSE = "Servo based consecutive boot test (5000 iterations)"
 CRITERIA = "This test will fail if DUT fails to boot from power-off"
 ATTRIBUTES = "suite:faft_stress"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "LONG"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test is intended to be run with many iterations to ensure that the DUT
-does boot into Chrome OS and then does power off later.
+does boot into ChromeOS and then does power off later.
 
 Runs 5000 iterations.
 """
diff --git a/server/site_tests/firmware_ConsecutiveBoot/control.dev b/server/site_tests/firmware_ConsecutiveBoot/control.dev
index 3c9f791..5e84771 100644
--- a/server/site_tests/firmware_ConsecutiveBoot/control.dev
+++ b/server/site_tests/firmware_ConsecutiveBoot/control.dev
@@ -4,19 +4,21 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ConsecutiveBoot.dev"
 PURPOSE = "Servo based consecutive boot test"
 CRITERIA = "This test will fail if DUT fails to boot from power-off"
 ATTRIBUTES = "suite:faft_dev, suite:faft_stress"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test is intended to be run with many iterations to ensure that the DUT
-does boot into Chrome OS and then does power off later in developer mode.
+does boot into ChromeOS and then does power off later in developer mode.
 
 The iteration should be specified by the parameter -a "faft_iterations=10".
 """
diff --git a/server/site_tests/firmware_ConsecutiveBoot/control.dev.100 b/server/site_tests/firmware_ConsecutiveBoot/control.dev.100
index 9dbf999..9914e01 100644
--- a/server/site_tests/firmware_ConsecutiveBoot/control.dev.100
+++ b/server/site_tests/firmware_ConsecutiveBoot/control.dev.100
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ConsecutiveBoot.dev.100"
 PURPOSE = "Servo based consecutive boot test (100 iterations)"
 CRITERIA = "This test will fail if DUT fails to boot from power-off"
@@ -14,10 +14,11 @@
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This test is intended to be run with many iterations to ensure that the DUT
-does boot into Chrome OS and then does power off later in developer mode.
+does boot into ChromeOS and then does power off later in developer mode.
 
 Runs 100 boot iterations.
 """
diff --git a/server/site_tests/firmware_ConsecutiveBoot/control.dev.1000 b/server/site_tests/firmware_ConsecutiveBoot/control.dev.1000
index d47f70d..f5b1554 100644
--- a/server/site_tests/firmware_ConsecutiveBoot/control.dev.1000
+++ b/server/site_tests/firmware_ConsecutiveBoot/control.dev.1000
@@ -4,19 +4,21 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ConsecutiveBoot.dev.1000"
 PURPOSE = "Servo based consecutive boot test"
 CRITERIA = "This test will fail if DUT fails to boot from power-off"
 ATTRIBUTES = "suite:faft_dev, suite:faft_stress"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "LONG"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test is intended to be run with many iterations to ensure that the DUT
-does boot into Chrome OS and then does power off later in developer mode.
+does boot into ChromeOS and then does power off later in developer mode.
 
 Runs 1000 iterations in dev mode.
 """
diff --git a/server/site_tests/firmware_ConsecutiveBoot/control.dev.500 b/server/site_tests/firmware_ConsecutiveBoot/control.dev.500
index 908791b..d649047 100644
--- a/server/site_tests/firmware_ConsecutiveBoot/control.dev.500
+++ b/server/site_tests/firmware_ConsecutiveBoot/control.dev.500
@@ -4,19 +4,21 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ConsecutiveBoot.dev.500"
 PURPOSE = "Servo based consecutive boot test"
 CRITERIA = "This test will fail if DUT fails to boot from power-off"
 ATTRIBUTES = "suite:faft_dev, suite:faft_stress"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test is intended to be run with many iterations to ensure that the DUT
-does boot into Chrome OS and then does power off later in developer mode.
+does boot into ChromeOS and then does power off later in developer mode.
 
 Runs 500 iterations in dev mode.
 """
diff --git a/server/site_tests/firmware_ConsecutiveBoot/firmware_ConsecutiveBoot.py b/server/site_tests/firmware_ConsecutiveBoot/firmware_ConsecutiveBoot.py
index 83c4cf3..4c001fd 100644
--- a/server/site_tests/firmware_ConsecutiveBoot/firmware_ConsecutiveBoot.py
+++ b/server/site_tests/firmware_ConsecutiveBoot/firmware_ConsecutiveBoot.py
@@ -5,6 +5,7 @@
 import logging
 import time
 
+from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib import utils
 from autotest_lib.server.cros import vboot_constants as vboot
 from autotest_lib.server.cros.faft.firmware_test import FirmwareTest
@@ -17,7 +18,7 @@
     /sbin/shutdown command to turn off DUT.
 
     This test is intended to be run with many iterations to ensure that the DUT
-    does boot into Chrome OS and then does power off later.
+    does boot into ChromeOS and then does power off later.
 
     The iteration should be specified by the parameter -a "faft_iterations=10".
     """
@@ -33,11 +34,28 @@
         self.faft_waitup_time = int(dict_args.get('faft_waitup_time', 0))
         self.faft_localrun = int(dict_args.get('faft_localrun', 0))
         super(firmware_ConsecutiveBoot, self).initialize(host, cmdline_args)
-        self.switcher.setup_mode('dev' if dev_mode else 'normal')
+        self.console_checker()
+        self.switcher.setup_mode('dev' if dev_mode else 'normal',
+                                 allow_gbb_force=True)
         if dev_mode:
-          self.clear_set_gbb_flags(0, vboot.GBB_FLAG_DEV_SCREEN_SHORT_DELAY)
+            self.clear_set_gbb_flags(0, vboot.GBB_FLAG_DEV_SCREEN_SHORT_DELAY)
         self.setup_usbkey(usbkey=False)
 
+    def console_checker(self):
+        """Verify EC console is available if using Chrome EC."""
+        if not self.check_ec_capability(suppress_warning=True):
+            # Not Chrome EC. Nothing to check.
+            return True
+        try:
+            if self.ec.get_version():
+                return True
+        except:
+            pass
+
+        raise error.TestFail(
+                "Failed EC console check. Maybe CCD close. Please check ccd open state."
+        )
+
     def wait_for_client_aux(self):
         """Use test specific timeout to wait for system to come up,
            otherwise use default (180s).
@@ -67,7 +85,7 @@
                     self.POWER_STATE_G3, pwr_retries=13, orig_boot_id=boot_id)
 
         # Retry in case power_short_press was not registered.
-        for i in xrange(self.POWER_ON_RETRY):
+        for i in range(self.POWER_ON_RETRY):
             logging.info("sleep %d, tap power key to boot.",
                          self.faft_config.powerup_ready)
             time.sleep(self.faft_config.powerup_ready)
@@ -91,7 +109,7 @@
 
     def run_once(self, host, dev_mode=False):
         """Runs a single iteration of the test."""
-        for i in xrange(self.faft_iterations):
+        for i in range(self.faft_iterations):
             logging.info('======== Running FAFT ITERATION %d/%s ========',
                          i+1, self.faft_iterations)
             logging.info("Expected boot fine, full power off DUT and on.")
diff --git a/server/site_tests/firmware_ConsecutiveBootPowerButton/control b/server/site_tests/firmware_ConsecutiveBootPowerButton/control
index 7b9eedc..350faf4 100644
--- a/server/site_tests/firmware_ConsecutiveBootPowerButton/control
+++ b/server/site_tests/firmware_ConsecutiveBootPowerButton/control
@@ -4,19 +4,22 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ConsecutiveBootPowerButton"
 PURPOSE = "Servo based consecutive boot test"
 CRITERIA = "This test will fail if DUT fails to boot from power-off"
 ATTRIBUTES = "suite:faft_stress"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test is intended to be run with many iterations to ensure that the DUT
-does boot into Chrome OS and then does power off later.
+does boot into ChromeOS and then does power off later.
 
 The iteration should be specified by the parameter -a "faft_iterations=10".
 """
diff --git a/server/site_tests/firmware_ConsecutiveBootPowerButton/control.100 b/server/site_tests/firmware_ConsecutiveBootPowerButton/control.100
index 52ad5ee..2e801d1 100644
--- a/server/site_tests/firmware_ConsecutiveBootPowerButton/control.100
+++ b/server/site_tests/firmware_ConsecutiveBootPowerButton/control.100
@@ -4,20 +4,21 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ConsecutiveBootPowerButton.100"
 PURPOSE = "Servo based consecutive boot test (100 iterations)"
 CRITERIA = "This test will fail if DUT fails to boot from power-off"
 ATTRIBUTES = "suite:faft_stress"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "LONG"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This test is intended to be run with many iterations to ensure that the DUT
-does boot into Chrome OS and then does power off later.
+does boot into ChromeOS and then does power off later.
 
 Runs 100 boot iterations.
 """
diff --git a/server/site_tests/firmware_ConsecutiveBootPowerButton/control.dev b/server/site_tests/firmware_ConsecutiveBootPowerButton/control.dev
index 39d598d..fe3485a 100644
--- a/server/site_tests/firmware_ConsecutiveBootPowerButton/control.dev
+++ b/server/site_tests/firmware_ConsecutiveBootPowerButton/control.dev
@@ -4,19 +4,21 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ConsecutiveBootPowerButton.dev"
 PURPOSE = "Servo based consecutive boot test"
 CRITERIA = "This test will fail if DUT fails to boot from power-off"
 ATTRIBUTES = "suite:faft_stress,suite:faft_dev"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test is intended to be run with many iterations to ensure that the DUT
-does boot into Chrome OS and then does power off later in developer mode.
+does boot into ChromeOS and then does power off later in developer mode.
 
 The iteration should be specified by the parameter -a "faft_iterations=10".
 """
diff --git a/server/site_tests/firmware_ConsecutiveBootPowerButton/control.dev.100 b/server/site_tests/firmware_ConsecutiveBootPowerButton/control.dev.100
index 31f6c0b..bbad912 100644
--- a/server/site_tests/firmware_ConsecutiveBootPowerButton/control.dev.100
+++ b/server/site_tests/firmware_ConsecutiveBootPowerButton/control.dev.100
@@ -4,19 +4,21 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ConsecutiveBootPowerButton.dev.100"
 PURPOSE = "Servo based consecutive boot test"
 CRITERIA = "This test will fail if DUT fails to boot from power-off"
 ATTRIBUTES = "suite:faft_dev, suite:faft_stress"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "LONG"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test is intended to be run with many iterations to ensure that the DUT
-does boot into Chrome OS and then does power off later.
+does boot into ChromeOS and then does power off later.
 
 The iteration should be specified by the parameter -a "faft_iterations=10".
 """
diff --git a/server/site_tests/firmware_ConsecutiveBootPowerButton/firmware_ConsecutiveBootPowerButton.py b/server/site_tests/firmware_ConsecutiveBootPowerButton/firmware_ConsecutiveBootPowerButton.py
index e826e49..2d7bc03 100644
--- a/server/site_tests/firmware_ConsecutiveBootPowerButton/firmware_ConsecutiveBootPowerButton.py
+++ b/server/site_tests/firmware_ConsecutiveBootPowerButton/firmware_ConsecutiveBootPowerButton.py
@@ -13,7 +13,7 @@
     Servo based consecutive boot test via power button for both on and off.
 
     This test is intended to be run with many iterations to ensure that the DUT
-    does boot into Chrome OS and then does power off later.
+    does boot into ChromeOS and then does power off later.
 
     The iteration should be specified by the parameter -a "faft_iterations=10".
     """
@@ -26,13 +26,14 @@
         self.faft_iterations = int(dict_args.get('faft_iterations', 1))
         super(firmware_ConsecutiveBootPowerButton,
               self).initialize(host, cmdline_args)
-        self.switcher.setup_mode('dev' if dev_mode else 'normal')
+        self.switcher.setup_mode('dev' if dev_mode else 'normal',
+                                 allow_gbb_force=True)
         self.setup_usbkey(usbkey=False)
 
 
     def run_once(self, dev_mode=False):
         """Runs a single iteration of the test."""
-        for i in xrange(self.faft_iterations):
+        for i in range(self.faft_iterations):
             logging.info('======== Running FAFT ITERATION %d/%s ========',
                          i+1, self.faft_iterations)
             logging.info("Expected boot fine, full power off DUT and on.")
diff --git a/server/site_tests/firmware_ConsecutiveLidSwitch/control b/server/site_tests/firmware_ConsecutiveLidSwitch/control
index a5b9768..9b814dc 100644
--- a/server/site_tests/firmware_ConsecutiveLidSwitch/control
+++ b/server/site_tests/firmware_ConsecutiveLidSwitch/control
@@ -4,15 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ConsecutiveLidSwitch"
 PURPOSE = "Servo based consecutive lid switch test"
 CRITERIA = "This test will fail if DUT fails to suspend/resume"
 ATTRIBUTES = "suite:faft_stress"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test is intended to be run with many iterations to ensure that closing
diff --git a/server/site_tests/firmware_ConsecutiveLidSwitch/control.100 b/server/site_tests/firmware_ConsecutiveLidSwitch/control.100
index b501d76..5175e96 100644
--- a/server/site_tests/firmware_ConsecutiveLidSwitch/control.100
+++ b/server/site_tests/firmware_ConsecutiveLidSwitch/control.100
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ConsecutiveLidSwitch.100"
 PURPOSE = "Servo based consecutive lid switch test"
 CRITERIA = "This test will fail if DUT fails to suspend/resume"
 ATTRIBUTES = "suite:faft_stress"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test is intended to be run with many iterations to ensure that closing
diff --git a/server/site_tests/firmware_ConsecutiveLidSwitch/firmware_ConsecutiveLidSwitch.py b/server/site_tests/firmware_ConsecutiveLidSwitch/firmware_ConsecutiveLidSwitch.py
index e241582..02042c8 100644
--- a/server/site_tests/firmware_ConsecutiveLidSwitch/firmware_ConsecutiveLidSwitch.py
+++ b/server/site_tests/firmware_ConsecutiveLidSwitch/firmware_ConsecutiveLidSwitch.py
@@ -54,7 +54,7 @@
 
         original_boot_id = host.get_boot_id()
 
-        for i in xrange(self.faft_iterations):
+        for i in range(self.faft_iterations):
             logging.info('======== Running FAFT ITERATION %d/%d ========',
                          i + 1, self.faft_iterations)
 
diff --git a/server/site_tests/firmware_CorruptBothFwBodyAB/control b/server/site_tests/firmware_CorruptBothFwBodyAB/control
index 5afef9c..8b00195 100644
--- a/server/site_tests/firmware_CorruptBothFwBodyAB/control
+++ b/server/site_tests/firmware_CorruptBothFwBodyAB/control
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_CorruptBothFwBodyAB"
 PURPOSE = "Servo based both firmware body A and B corruption test"
 CRITERIA = "This test will fail if firmware does not enter recovery mode"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv3, suite:faft_normal, suite:faft_bios_ec3po, suite:faft_bios_tot"
-DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv3, suite:faft_normal, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test corrupts both firmware body A and B and checks the next boot results.
@@ -25,7 +26,7 @@
 not load and verify the RW firmware body. In the case USE_RO_NORMAL OFF,
 the firmware verification fails on loading RW firmware and enters recovery
 mode. In this case, it requires a USB disk plugged-in, which contains a
-Chrome OS test image (built by "build_image --test").
+ChromeOS test image (built by "build_image --test").
 """
 
 args_dict = utils.args_to_dict(args)
diff --git a/server/site_tests/firmware_CorruptBothFwBodyAB/control.dev b/server/site_tests/firmware_CorruptBothFwBodyAB/control.dev
index 3c00d9a..deaa6f7 100644
--- a/server/site_tests/firmware_CorruptBothFwBodyAB/control.dev
+++ b/server/site_tests/firmware_CorruptBothFwBodyAB/control.dev
@@ -4,17 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_CorruptBothFwBodyAB.dev"
 PURPOSE = "Servo based both firmware body A and B corruption test"
 CRITERIA = "This test will fail if firmware does not enter recovery mode"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv3, suite:faft_bios_ec3po, suite:faft_bios_tot"
-DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv3, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test corrupts both firmware body A and B and checks the next boot results.
@@ -25,7 +25,7 @@
 not load and verify the RW firmware body. In the case USE_RO_NORMAL OFF,
 the firmware verification fails on loading RW firmware and enters recovery
 mode. In this case, it requires a USB disk plugged-in, which contains a
-Chrome OS test image (built by "build_image --test").
+ChromeOS test image (built by "build_image --test").
 """
 
 args_dict = utils.args_to_dict(args)
diff --git a/server/site_tests/firmware_CorruptBothFwBodyAB/firmware_CorruptBothFwBodyAB.py b/server/site_tests/firmware_CorruptBothFwBodyAB/firmware_CorruptBothFwBodyAB.py
index fcf6362..f99b57f 100644
--- a/server/site_tests/firmware_CorruptBothFwBodyAB/firmware_CorruptBothFwBodyAB.py
+++ b/server/site_tests/firmware_CorruptBothFwBodyAB/firmware_CorruptBothFwBodyAB.py
@@ -14,7 +14,7 @@
 
     The firmware verification fails on loading RW firmware and enters recovery
     mode. It requires a USB disk plugged-in, which contains a
-    Chrome OS test image (built by "build_image --test").
+    ChromeOS test image (built by "build_image --test").
     """
     version = 1
     NEEDS_SERVO_USB = True
@@ -41,8 +41,10 @@
         self.check_state((self.checkers.crossystem_checker, {
                     'mainfw_type': 'developer' if dev_mode else 'normal',
                     }))
-        self.faft_client.bios.corrupt_body('a')
-        self.faft_client.bios.corrupt_body('b')
+        offset_a, byte_a = self.faft_client.bios.get_body_one_byte('a')
+        offset_b, byte_b = self.faft_client.bios.get_body_one_byte('b')
+        self.faft_client.bios.modify_body('a', offset_a, byte_a + 1)
+        self.faft_client.bios.modify_body('b', offset_b, byte_b + 1)
 
         # Older devices (without BROKEN screen) didn't wait for removal in
         # dev mode. Make sure the USB key is not plugged in so they won't
@@ -59,8 +61,8 @@
                               (vboot.RECOVERY_REASON['RO_INVALID_RW'],
                               vboot.RECOVERY_REASON['RW_VERIFY_BODY']),
                               }))
-        self.faft_client.bios.restore_body('a')
-        self.faft_client.bios.restore_body('b')
+        self.faft_client.bios.modify_body('a', offset_a, byte_a)
+        self.faft_client.bios.modify_body('b', offset_b, byte_b)
         self.switcher.mode_aware_reboot()
 
         logging.info("Expected normal boot, done.")
diff --git a/server/site_tests/firmware_CorruptBothFwSigAB/control b/server/site_tests/firmware_CorruptBothFwSigAB/control
index 61b83b0..d7539d8 100644
--- a/server/site_tests/firmware_CorruptBothFwSigAB/control
+++ b/server/site_tests/firmware_CorruptBothFwSigAB/control
@@ -4,20 +4,21 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_CorruptBothFwSigAB"
 PURPOSE = "Servo based both firmware signature A and B corruption test"
 CRITERIA = "This test will fail if firmware does not enter recovery mode"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv3, suite:faft_normal, suite:faft_bios_ec3po, suite:faft_bios_tot"
-DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv3, suite:faft_normal, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
-This test requires a USB disk plugged-in, which contains a Chrome OS test
+This test requires a USB disk plugged-in, which contains a ChromeOS test
 image (built by "build_image --test"). On runtime, this test corrupts
 both firmware signature A and B. On next reboot, the firmware verification
 fails and enters recovery mode. This test then checks the success of the
diff --git a/server/site_tests/firmware_CorruptBothFwSigAB/control.dev b/server/site_tests/firmware_CorruptBothFwSigAB/control.dev
index d423511..46b06db 100644
--- a/server/site_tests/firmware_CorruptBothFwSigAB/control.dev
+++ b/server/site_tests/firmware_CorruptBothFwSigAB/control.dev
@@ -4,20 +4,20 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_CorruptBothFwSigAB.dev"
 PURPOSE = "Servo based both firmware signature A and B corruption test"
 CRITERIA = "This test will fail if firmware does not enter recovery mode"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv3, suite:faft_bios_ec3po, suite:faft_bios_tot"
-DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv3, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
-This test requires a USB disk plugged-in, which contains a Chrome OS test
+This test requires a USB disk plugged-in, which contains a ChromeOS test
 image (built by "build_image --test"). On runtime, this test corrupts
 both firmware signature A and B. On next reboot, the firmware verification
 fails and enters recovery mode. This test then checks the success of the
diff --git a/server/site_tests/firmware_CorruptBothFwSigAB/firmware_CorruptBothFwSigAB.py b/server/site_tests/firmware_CorruptBothFwSigAB/firmware_CorruptBothFwSigAB.py
index 5b3bed7..1f66fce 100644
--- a/server/site_tests/firmware_CorruptBothFwSigAB/firmware_CorruptBothFwSigAB.py
+++ b/server/site_tests/firmware_CorruptBothFwSigAB/firmware_CorruptBothFwSigAB.py
@@ -12,7 +12,7 @@
     """
     Servo based both firmware signature A and B corruption test.
 
-    This test requires a USB disk plugged-in, which contains a Chrome OS test
+    This test requires a USB disk plugged-in, which contains a ChromeOS test
     image (built by "build_image --test"). On runtime, this test corrupts
     both firmware signature A and B. On next reboot, the firmware verification
     fails and enters recovery mode. This test then checks the success of the
@@ -43,8 +43,10 @@
         self.check_state((self.checkers.crossystem_checker, {
                           'mainfw_type': 'developer' if dev_mode else 'normal',
                           }))
-        self.faft_client.bios.corrupt_sig('a')
-        self.faft_client.bios.corrupt_sig('b')
+        offset_a, byte_a = self.faft_client.bios.get_sig_one_byte('a')
+        offset_b, byte_b = self.faft_client.bios.get_sig_one_byte('b')
+        self.faft_client.bios.modify_sig('a', offset_a, byte_a + 1)
+        self.faft_client.bios.modify_sig('b', offset_b, byte_b + 1)
 
         # Older devices (without BROKEN screen) didn't wait for removal in
         # dev mode. Make sure the USB key is not plugged in so they won't
@@ -75,8 +77,8 @@
                               vboot.RECOVERY_REASON['RO_INVALID_RW'],
                               vboot.RECOVERY_REASON['RW_VERIFY_KEYBLOCK']),
                           }))
-        self.faft_client.bios.restore_sig('a')
-        self.faft_client.bios.restore_sig('b')
+        self.faft_client.bios.modify_sig('a', offset_a, byte_a)
+        self.faft_client.bios.modify_sig('b', offset_b, byte_b)
         self.switcher.mode_aware_reboot()
 
         logging.info("Expected normal boot, done.")
diff --git a/server/site_tests/firmware_CorruptBothKernelAB/control b/server/site_tests/firmware_CorruptBothKernelAB/control
index 9c2f4cf..d6b1614 100644
--- a/server/site_tests/firmware_CorruptBothKernelAB/control
+++ b/server/site_tests/firmware_CorruptBothKernelAB/control
@@ -4,22 +4,23 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_CorruptBothKernelAB"
 PURPOSE = "Servo based both kernel A and B corruption test"
 CRITERIA = "This test will fail if firmware does not enter recovery mode"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv3, suite:faft_normal, suite:faft_bios_ec3po, suite:faft_bios_tot"
-DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv3, suite:faft_normal, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test corrupts both kernel A and B and and checks for recovery boot.
 
-This test requires a USB disk plugged-in, which contains a Chrome OS test
+This test requires a USB disk plugged-in, which contains a ChromeOS test
 image (built by "build_image --test"). On runtime, this test corrupts
 both kernel A and B. On next reboot, the kernel verification fails
 and enters recovery mode. This test then checks the success of the
diff --git a/server/site_tests/firmware_CorruptBothKernelAB/control.dev b/server/site_tests/firmware_CorruptBothKernelAB/control.dev
deleted file mode 100644
index 8cfcaca..0000000
--- a/server/site_tests/firmware_CorruptBothKernelAB/control.dev
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "Chrome OS Team"
-NAME = "firmware_CorruptBothKernelAB.dev"
-PURPOSE = "Servo based both kernel A and B corruption test"
-CRITERIA = "This test will fail if firmware does not enter recovery mode"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv3, suite:faft_bios_ec3po, suite:faft_bios_tot"
-DEPENDENCIES = "servo_state:WORKING"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "firmware"
-TEST_TYPE = "server"
-JOB_RETRIES = 4
-
-DOC = """
-This test corrupts both kernel A and B and and checks for recovery boot.
-
-This test requires a USB disk plugged-in, which contains a Chrome OS test
-image (built by "build_image --test"). On runtime, this test corrupts
-both kernel A and B. On next reboot, the kernel verification fails
-and enters recovery mode. This test then checks the success of the
-recovery boot.
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run_corruptbothkernelab(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test("firmware_CorruptBothKernelAB", host=host, cmdline_args=args,
-                 disable_sysinfo=True, dev_mode=True, tag="dev")
-
-parallel_simple(run_corruptbothkernelab, machines)
diff --git a/server/site_tests/firmware_CorruptBothKernelAB/firmware_CorruptBothKernelAB.py b/server/site_tests/firmware_CorruptBothKernelAB/firmware_CorruptBothKernelAB.py
index 855597c..92f9e24 100644
--- a/server/site_tests/firmware_CorruptBothKernelAB/firmware_CorruptBothKernelAB.py
+++ b/server/site_tests/firmware_CorruptBothKernelAB/firmware_CorruptBothKernelAB.py
@@ -12,7 +12,7 @@
     """
     Servo based both kernel A and B corruption test.
 
-    This test requires a USB disk plugged-in, which contains a Chrome OS test
+    This test requires a USB disk plugged-in, which contains a ChromeOS test
     image (built by "build_image --test"). On runtime, this test corrupts
     both kernel A and B. On next reboot, the kernel verification fails
     and enters recovery mode. This test then checks the success of the
diff --git a/server/site_tests/firmware_CorruptBothMiniosAB/control b/server/site_tests/firmware_CorruptBothMiniosAB/control
new file mode 100644
index 0000000..b07f7a5
--- /dev/null
+++ b/server/site_tests/firmware_CorruptBothMiniosAB/control
@@ -0,0 +1,34 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "ChromeOS Team"
+NAME = "firmware_CorruptBothMiniosAB"
+PURPOSE = "Corrupt both MiniOS kernels, and ensure that we can not boot MiniOS"
+CRITERIA = "This test will fail if the device boots to MiniOS when both MiniOS kernels are corrupted"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_lv4, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING"
+TIME = "MEDIUM"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "firmware"
+TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
+
+DOC = """
+This test requires the device support MiniOS. On runtime, this test uses the
+KernelHandler to corrupt both MiniOS partitions, tries to boot MiniOS from
+firmware manual recovery screen, and expects a failed boot.
+"""
+
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    job.run_test("firmware_CorruptBothMiniosAB", host=host, cmdline_args=args,
+                 disable_sysinfo=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/firmware_CorruptBothMiniosAB/firmware_CorruptBothMiniosAB.py b/server/site_tests/firmware_CorruptBothMiniosAB/firmware_CorruptBothMiniosAB.py
new file mode 100644
index 0000000..6f54d08
--- /dev/null
+++ b/server/site_tests/firmware_CorruptBothMiniosAB/firmware_CorruptBothMiniosAB.py
@@ -0,0 +1,72 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.server.cros.faft.firmware_test import FirmwareTest
+
+
+class firmware_CorruptBothMiniosAB(FirmwareTest):
+    """
+    Servo based corrupt minios test.
+
+    This test requires the device support MiniOS. On runtime, this test uses the
+    KernelHandler to corrupt both MiniOS partitions, tries to boot MiniOS from
+    firmware manual recovery screen, and expects a failed boot.
+    """
+    version = 1
+
+    def initialize(self, host, cmdline_args):
+        super(firmware_CorruptBothMiniosAB,
+              self).initialize(host, cmdline_args)
+
+        self.test_skipped = True
+        if not self.menu_switcher:
+            raise error.TestNAError('Test skipped for menuless UI')
+        if not self.faft_config.chrome_ec:
+            raise error.TestNAError('Cannot check power state without EC')
+        if not self.faft_config.minios_enabled:
+            raise error.TestNAError('MiniOS is not enabled for this board')
+        self.test_skipped = False
+
+        self.backup_kernel(kernel_type='MINIOS')
+
+        self.host = host
+        self.switcher.setup_mode('normal')
+        self.setup_usbkey(usbkey=True, host=True, used_for_recovery=True)
+
+    def cleanup(self):
+        if not self.test_skipped:
+            try:
+                self.switcher.leave_minios()
+                self.restore_kernel(kernel_type='MINIOS')
+            except Exception as e:
+                logging.error('Caught exception: %s', str(e))
+        super(firmware_CorruptBothMiniosAB, self).cleanup()
+
+    def run_once(self):
+        """Run a single iteration of the test."""
+        logging.info('Corrupt both MiniOS sections')
+        self.faft_client.minios.corrupt_sig('a')
+        self.faft_client.minios.corrupt_sig('b')
+
+        # Try to boot to MiniOS and expect a failed boot
+        self.switcher.launch_minios()
+        logging.info('DUT should fail to boot MiniOS, verifying...')
+        if self.host.ping_wait_up(
+                timeout=self.faft_config.delay_reboot_to_ping):
+            raise error.TestFail('DUT should not come back up!')
+
+        # Verify that DUT stayed in recovery screen by trying a USB boot
+        logging.info('Boot from USB to verify that DUT stayed in recovery')
+        self.servo.switch_usbkey('dut')
+        self.switcher.wait_for_client()
+        self.check_state((self.checkers.mode_checker, 'rec',
+                          'Device didn\'t boot from USB in recovery screen'))
+        self.switcher.mode_aware_reboot()
+
+        logging.info('Restore both MiniOS sections')
+        self.faft_client.minios.restore_sig('a')
+        self.faft_client.minios.restore_sig('b')
diff --git a/server/site_tests/firmware_CorruptFwBodyA/control b/server/site_tests/firmware_CorruptFwBodyA/control
index af8c657..d5ff4b3 100644
--- a/server/site_tests/firmware_CorruptFwBodyA/control
+++ b/server/site_tests/firmware_CorruptFwBodyA/control
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_CorruptFwBodyA"
 PURPOSE = "Servo based firmware A body corruption test"
 CRITERIA = "This test will fail if firmware verification mis-behaved"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv3, suite:faft_normal, suite:faft_bios_ec3po, suite:faft_bios_tot"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv3, suite:faft_normal, suite:faft_bios_tot"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test corrupts firmware body A and checks the next boot results.
diff --git a/server/site_tests/firmware_CorruptFwBodyA/control.dev b/server/site_tests/firmware_CorruptFwBodyA/control.dev
index 0502166..e0ebc65 100644
--- a/server/site_tests/firmware_CorruptFwBodyA/control.dev
+++ b/server/site_tests/firmware_CorruptFwBodyA/control.dev
@@ -4,17 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_CorruptFwBodyA.dev"
 PURPOSE = "Servo based firmware A body corruption test"
 CRITERIA = "This test will fail if firmware verification mis-behaved"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv3, suite:faft_bios_ec3po, suite:faft_bios_tot"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv3, suite:faft_bios_tot, suite:distributed_lab_qual_faft"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test corrupts firmware body A and checks the next boot results.
diff --git a/server/site_tests/firmware_CorruptFwBodyA/firmware_CorruptFwBodyA.py b/server/site_tests/firmware_CorruptFwBodyA/firmware_CorruptFwBodyA.py
index af6e70e..d7c0fa0 100644
--- a/server/site_tests/firmware_CorruptFwBodyA/firmware_CorruptFwBodyA.py
+++ b/server/site_tests/firmware_CorruptFwBodyA/firmware_CorruptFwBodyA.py
@@ -19,7 +19,8 @@
     def initialize(self, host, cmdline_args, dev_mode=False):
         super(firmware_CorruptFwBodyA, self).initialize(host, cmdline_args)
         self.backup_firmware()
-        self.switcher.setup_mode('dev' if dev_mode else 'normal')
+        self.switcher.setup_mode('dev' if dev_mode else 'normal',
+                                 allow_gbb_force=True)
         self.setup_usbkey(usbkey=False)
 
     def cleanup(self):
@@ -34,12 +35,13 @@
         """Runs a single iteration of the test."""
         logging.info("Corrupt firmware body A.")
         self.check_state((self.checkers.fw_tries_checker, 'A'))
-        self.faft_client.bios.corrupt_body('a')
+        offset_a, byte_a = self.faft_client.bios.get_body_one_byte('a')
+        self.faft_client.bios.modify_body('a', offset_a, byte_a + 1)
         self.switcher.mode_aware_reboot()
 
         logging.info("Expected firmware B boot and restore firmware A.")
         self.check_state((self.checkers.fw_tries_checker, ('B', False)))
-        self.faft_client.bios.restore_body('a')
+        self.faft_client.bios.modify_body('a', offset_a, byte_a)
         self.switcher.mode_aware_reboot()
 
         expected_slot = 'B' if self.fw_vboot2 else 'A'
diff --git a/server/site_tests/firmware_CorruptFwBodyB/control b/server/site_tests/firmware_CorruptFwBodyB/control
index ebdb621..3e9a0f1 100644
--- a/server/site_tests/firmware_CorruptFwBodyB/control
+++ b/server/site_tests/firmware_CorruptFwBodyB/control
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_CorruptFwBodyB"
 PURPOSE = "Servo based firmware B body corruption test"
 CRITERIA = "This test will fail if firmware verification mis-behaved"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv3, suite:faft_normal, suite:faft_bios_ec3po, suite:faft_bios_tot"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv3, suite:faft_normal, suite:faft_bios_tot"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test corrupts firmware body B and checks the next boot results.
diff --git a/server/site_tests/firmware_CorruptFwBodyB/control.dev b/server/site_tests/firmware_CorruptFwBodyB/control.dev
index 2f7de8e..38279ad 100644
--- a/server/site_tests/firmware_CorruptFwBodyB/control.dev
+++ b/server/site_tests/firmware_CorruptFwBodyB/control.dev
@@ -4,17 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_CorruptFwBodyB.dev"
 PURPOSE = "Servo based firmware B body corruption test"
 CRITERIA = "This test will fail if firmware verification mis-behaved"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv3, suite:faft_bios_ec3po, suite:faft_bios_tot"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv3, suite:faft_bios_tot"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test corrupts firmware body B and checks the next boot results.
diff --git a/server/site_tests/firmware_CorruptFwBodyB/firmware_CorruptFwBodyB.py b/server/site_tests/firmware_CorruptFwBodyB/firmware_CorruptFwBodyB.py
index 2de2de4..9a9c08d 100644
--- a/server/site_tests/firmware_CorruptFwBodyB/firmware_CorruptFwBodyB.py
+++ b/server/site_tests/firmware_CorruptFwBodyB/firmware_CorruptFwBodyB.py
@@ -24,7 +24,8 @@
     def initialize(self, host, cmdline_args, dev_mode=False):
         super(firmware_CorruptFwBodyB, self).initialize(host, cmdline_args)
         self.backup_firmware()
-        self.switcher.setup_mode('dev' if dev_mode else 'normal')
+        self.switcher.setup_mode('dev' if dev_mode else 'normal',
+                                 allow_gbb_force=True)
         self.setup_usbkey(usbkey=False)
 
     def cleanup(self):
@@ -41,7 +42,8 @@
                       vboot.PREAMBLE_USE_RO_NORMAL)
         logging.info("Corrupt firmware body B.")
         self.check_state((self.checkers.fw_tries_checker, 'A'))
-        self.faft_client.bios.corrupt_body('b')
+        offset_b, byte_b = self.faft_client.bios.get_body_one_byte('b')
+        self.faft_client.bios.modify_body('b', offset_b, byte_b + 1)
         self.switcher.mode_aware_reboot()
 
         logging.info("Expected firmware A boot and set try_fwb flag.")
@@ -55,7 +57,7 @@
             self.check_state((self.checkers.fw_tries_checker, 'B'))
         else:
             self.check_state((self.checkers.fw_tries_checker, ('A', False)))
-        self.faft_client.bios.restore_body('b')
+        self.faft_client.bios.modify_body('b', offset_b, byte_b)
         self.switcher.mode_aware_reboot()
 
         logging.info("Final check and done.")
diff --git a/server/site_tests/firmware_CorruptFwSigA/control b/server/site_tests/firmware_CorruptFwSigA/control
index 25a2eda..dccfffd 100644
--- a/server/site_tests/firmware_CorruptFwSigA/control
+++ b/server/site_tests/firmware_CorruptFwSigA/control
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_CorruptFwSigA"
 PURPOSE = "Servo based firmware A signature corruption test"
 CRITERIA = "This test will fail if firmware verification mis-behaved"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv3, suite:faft_normal, suite:faft_bios_ec3po, suite:faft_bios_tot"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv3, suite:faft_normal, suite:faft_bios_tot"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test corrupts firmware signature A and checks the next boot results.
diff --git a/server/site_tests/firmware_CorruptFwSigA/control.dev b/server/site_tests/firmware_CorruptFwSigA/control.dev
index 01e935b..9d9f441 100644
--- a/server/site_tests/firmware_CorruptFwSigA/control.dev
+++ b/server/site_tests/firmware_CorruptFwSigA/control.dev
@@ -4,17 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_CorruptFwSigA.dev"
 PURPOSE = "Servo based firmware A signature corruption test"
 CRITERIA = "This test will fail if firmware verification mis-behaved"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv3, suite:faft_bios_ec3po, suite:faft_bios_tot"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv3, suite:faft_bios_tot"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test corrupts firmware signature A and checks the next boot results.
diff --git a/server/site_tests/firmware_CorruptFwSigA/firmware_CorruptFwSigA.py b/server/site_tests/firmware_CorruptFwSigA/firmware_CorruptFwSigA.py
index 1443431..3dbb6ad 100644
--- a/server/site_tests/firmware_CorruptFwSigA/firmware_CorruptFwSigA.py
+++ b/server/site_tests/firmware_CorruptFwSigA/firmware_CorruptFwSigA.py
@@ -17,7 +17,8 @@
     def initialize(self, host, cmdline_args, dev_mode=False):
         super(firmware_CorruptFwSigA, self).initialize(host, cmdline_args)
         self.backup_firmware()
-        self.switcher.setup_mode('dev' if dev_mode else 'normal')
+        self.switcher.setup_mode('dev' if dev_mode else 'normal',
+                                 allow_gbb_force=True)
         self.setup_usbkey(usbkey=False)
 
     def cleanup(self):
@@ -32,7 +33,8 @@
         """Runs a single iteration of the test."""
         logging.info("Corrupt firmware signature A.")
         self.check_state((self.checkers.fw_tries_checker, 'A'))
-        self.faft_client.bios.corrupt_sig('a')
+        offset_a, byte_a = self.faft_client.bios.get_sig_one_byte('a')
+        self.faft_client.bios.modify_sig('a', offset_a, byte_a + 1)
         self.switcher.mode_aware_reboot()
 
         logging.info("Expected firmware B boot and set fwb_tries flag.")
@@ -43,7 +45,7 @@
 
         logging.info("Still expected firmware B boot and restore firmware A.")
         self.check_state((self.checkers.fw_tries_checker, 'B'))
-        self.faft_client.bios.restore_sig('a')
+        self.faft_client.bios.modify_sig('a', offset_a, byte_a)
         self.switcher.mode_aware_reboot()
 
         expected_slot = 'B' if self.fw_vboot2 else 'A'
diff --git a/server/site_tests/firmware_CorruptFwSigB/control b/server/site_tests/firmware_CorruptFwSigB/control
index f2571b0..a2ede48 100644
--- a/server/site_tests/firmware_CorruptFwSigB/control
+++ b/server/site_tests/firmware_CorruptFwSigB/control
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_CorruptFwSigB"
 PURPOSE = "Servo based firmware signature B corruption test"
 CRITERIA = "This test will fail if firmware verification mis-behaved"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv3, suite:faft_normal, suite:faft_bios_ec3po, suite:faft_bios_tot"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv3, suite:faft_normal, suite:faft_bios_tot"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test corrupts firmware signature B and checks the next boot results.
diff --git a/server/site_tests/firmware_CorruptFwSigB/control.dev b/server/site_tests/firmware_CorruptFwSigB/control.dev
index 7688810..a0d2eca 100644
--- a/server/site_tests/firmware_CorruptFwSigB/control.dev
+++ b/server/site_tests/firmware_CorruptFwSigB/control.dev
@@ -4,17 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_CorruptFwSigB.dev"
 PURPOSE = "Servo based firmware signature B corruption test"
 CRITERIA = "This test will fail if firmware verification mis-behaved"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv3, suite:faft_bios_ec3po, suite:faft_bios_tot"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv3, suite:faft_bios_tot"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test corrupts firmware signature B and checks the next boot results.
diff --git a/server/site_tests/firmware_CorruptFwSigB/firmware_CorruptFwSigB.py b/server/site_tests/firmware_CorruptFwSigB/firmware_CorruptFwSigB.py
index a84f088..402aae1 100644
--- a/server/site_tests/firmware_CorruptFwSigB/firmware_CorruptFwSigB.py
+++ b/server/site_tests/firmware_CorruptFwSigB/firmware_CorruptFwSigB.py
@@ -17,7 +17,8 @@
     def initialize(self, host, cmdline_args, dev_mode=False):
         super(firmware_CorruptFwSigB, self).initialize(host, cmdline_args)
         self.backup_firmware()
-        self.switcher.setup_mode('dev' if dev_mode else 'normal')
+        self.switcher.setup_mode('dev' if dev_mode else 'normal',
+                                 allow_gbb_force=True)
         self.setup_usbkey(usbkey=False)
 
     def cleanup(self):
@@ -33,7 +34,8 @@
         logging.info("Expected firmware A boot and corrupt "
                      "firmware signature B.")
         self.check_state((self.checkers.fw_tries_checker, 'A'))
-        self.faft_client.bios.corrupt_sig('b')
+        offset_b, byte_b = self.faft_client.bios.get_sig_one_byte('b')
+        self.faft_client.bios.modify_sig('b', offset_b, byte_b + 1)
         self.switcher.mode_aware_reboot()
 
         logging.info("Expected firmware A boot and set try_fwb flag.")
@@ -43,7 +45,7 @@
 
         logging.info("Expected firmware A boot and restore firmware B.")
         self.check_state((self.checkers.fw_tries_checker, ('A', False)))
-        self.faft_client.bios.restore_sig('b')
+        self.faft_client.bios.modify_sig('b', offset_b, byte_b)
         self.switcher.mode_aware_reboot()
 
         logging.info("Final check and done.")
diff --git a/server/site_tests/firmware_CorruptKernelA/control b/server/site_tests/firmware_CorruptKernelA/control
index d7e5b3f..3a7969f 100644
--- a/server/site_tests/firmware_CorruptKernelA/control
+++ b/server/site_tests/firmware_CorruptKernelA/control
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_CorruptKernelA"
 PURPOSE = "Servo based kernel A corruption test"
 CRITERIA = "This test will fail if kernel verification mis-behaved"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv3, suite:faft_normal, suite:faft_bios_ec3po, suite:faft_bios_tot"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv3, suite:faft_normal, suite:faft_bios_tot"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test corrupts kernel A and checks for kernel B on the next boot.
diff --git a/server/site_tests/firmware_CorruptKernelA/control.dev b/server/site_tests/firmware_CorruptKernelA/control.dev
index 4c65dc1..fe7e527 100644
--- a/server/site_tests/firmware_CorruptKernelA/control.dev
+++ b/server/site_tests/firmware_CorruptKernelA/control.dev
@@ -4,17 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_CorruptKernelA.dev"
 PURPOSE = "Servo based kernel A corruption test"
 CRITERIA = "This test will fail if kernel verification mis-behaved"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv3, suite:faft_bios_ec3po, suite:faft_bios_tot"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv3, suite:faft_bios_tot"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test corrupts kernel A and checks for kernel B on the next boot.
diff --git a/server/site_tests/firmware_CorruptKernelA/firmware_CorruptKernelA.py b/server/site_tests/firmware_CorruptKernelA/firmware_CorruptKernelA.py
index 4034494..c1b014a 100644
--- a/server/site_tests/firmware_CorruptKernelA/firmware_CorruptKernelA.py
+++ b/server/site_tests/firmware_CorruptKernelA/firmware_CorruptKernelA.py
@@ -20,7 +20,8 @@
         super(firmware_CorruptKernelA, self).initialize(host, cmdline_args)
         self.backup_kernel()
         self.backup_cgpt_attributes()
-        self.switcher.setup_mode('dev' if dev_mode else 'normal')
+        self.switcher.setup_mode('dev' if dev_mode else 'normal',
+                                 allow_gbb_force=True)
         self.setup_usbkey(usbkey=False)
         self.setup_kernel('a')
 
diff --git a/server/site_tests/firmware_CorruptKernelB/control b/server/site_tests/firmware_CorruptKernelB/control
index f8d4dc7..d1a33af 100644
--- a/server/site_tests/firmware_CorruptKernelB/control
+++ b/server/site_tests/firmware_CorruptKernelB/control
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_CorruptKernelB"
 PURPOSE = "Servo based kernel B corruption test"
 CRITERIA = "This test will fail if kernel verification mis-behaved"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv3, suite:faft_normal, suite:faft_bios_ec3po, suite:faft_bios_tot"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv3, suite:faft_normal, suite:faft_bios_tot"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test sets kernel B boot and then corrupts kernel B. The firmware
diff --git a/server/site_tests/firmware_CorruptKernelB/control.dev b/server/site_tests/firmware_CorruptKernelB/control.dev
index f1a2a72..7b42583 100644
--- a/server/site_tests/firmware_CorruptKernelB/control.dev
+++ b/server/site_tests/firmware_CorruptKernelB/control.dev
@@ -4,17 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_CorruptKernelB.dev"
 PURPOSE = "Servo based kernel B corruption test"
 CRITERIA = "This test will fail if kernel verification mis-behaved"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv3, suite:faft_bios_ec3po, suite:faft_bios_tot"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv3, suite:faft_bios_tot"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test sets kernel B boot and then corrupts kernel B. The firmware
diff --git a/server/site_tests/firmware_CorruptKernelB/firmware_CorruptKernelB.py b/server/site_tests/firmware_CorruptKernelB/firmware_CorruptKernelB.py
index 25fef85..0e05356 100644
--- a/server/site_tests/firmware_CorruptKernelB/firmware_CorruptKernelB.py
+++ b/server/site_tests/firmware_CorruptKernelB/firmware_CorruptKernelB.py
@@ -21,7 +21,8 @@
         super(firmware_CorruptKernelB, self).initialize(host, cmdline_args)
         self.backup_kernel()
         self.backup_cgpt_attributes()
-        self.switcher.setup_mode('dev' if dev_mode else 'normal')
+        self.switcher.setup_mode('dev' if dev_mode else 'normal',
+                                 allow_gbb_force=True)
         self.setup_usbkey(usbkey=False)
         self.setup_kernel('a')
 
diff --git a/server/site_tests/firmware_CorruptMinios/control.minios_a b/server/site_tests/firmware_CorruptMinios/control.minios_a
new file mode 100644
index 0000000..52bfe4e
--- /dev/null
+++ b/server/site_tests/firmware_CorruptMinios/control.minios_a
@@ -0,0 +1,34 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "ChromeOS Team"
+NAME = "firmware_CorruptMinios.minios_a"
+PURPOSE = "Corrupt one MiniOS kernel, set the priority to that, and ensure that both priority can boot"
+CRITERIA = "This test will fail if the device fail to boot to MiniOS when one MiniOS kernel is corrupted"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_lv4, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING"
+TIME = "MEDIUM"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "firmware"
+TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
+
+DOC = """
+This test requires the device to support MiniOS. On runtime, this test uses
+the dd tool to corrupt the MiniOS partition and try to boot MiniOS from
+firmware manual recovery screen.
+"""
+
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    job.run_test("firmware_CorruptMinios", host=host, cmdline_args=args,
+                 disable_sysinfo=True, minios_section='a')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/firmware_CorruptMinios/control.minios_b b/server/site_tests/firmware_CorruptMinios/control.minios_b
new file mode 100644
index 0000000..20d4f37
--- /dev/null
+++ b/server/site_tests/firmware_CorruptMinios/control.minios_b
@@ -0,0 +1,34 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "ChromeOS Team"
+NAME = "firmware_CorruptMinios.minios_b"
+PURPOSE = "Corrupt one MiniOS kernel, set the priority to that, and ensure that both priority can boot"
+CRITERIA = "This test will fail if the device fail to boot to MiniOS when one MiniOS kernel is corrupted"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_lv4, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING"
+TIME = "MEDIUM"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "firmware"
+TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
+
+DOC = """
+This test requires the device to support MiniOS. On runtime, this test uses
+the dd tool to corrupt the MiniOS partition and try to boot MiniOS from
+firmware manual recovery screen.
+"""
+
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    job.run_test("firmware_CorruptMinios", host=host, cmdline_args=args,
+                 disable_sysinfo=True, minios_section='b')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/firmware_CorruptMinios/firmware_CorruptMinios.py b/server/site_tests/firmware_CorruptMinios/firmware_CorruptMinios.py
new file mode 100644
index 0000000..0aa32d1
--- /dev/null
+++ b/server/site_tests/firmware_CorruptMinios/firmware_CorruptMinios.py
@@ -0,0 +1,63 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.server.cros.faft.firmware_test import FirmwareTest
+
+
+class firmware_CorruptMinios(FirmwareTest):
+    """
+    Servo based corrupt minios test.
+
+    This test requires the device to support MiniOS. On runtime, this test uses
+    the dd tool to corrupt the MiniOS partition and try to boot MiniOS from
+    firmware manual recovery screen.
+    """
+    version = 1
+
+    def initialize(self, host, cmdline_args, minios_section):
+        super(firmware_CorruptMinios, self).initialize(host, cmdline_args)
+
+        self.test_skipped = True
+        if not self.menu_switcher:
+            raise error.TestNAError('Test skipped for menuless UI')
+        if not self.faft_config.chrome_ec:
+            raise error.TestNAError('Cannot check power state without EC')
+        if not self.faft_config.minios_enabled:
+            raise error.TestNAError('MiniOS is not enabled for this board')
+        self.test_skipped = False
+
+        self.backup_kernel(kernel_type='MINIOS')
+
+        self.host = host
+        self.switcher.setup_mode('normal')
+        self.setup_usbkey(usbkey=False)
+        self.minios_section = minios_section
+        self.restored_priority = self.faft_client.system.get_minios_priority()
+
+    def cleanup(self):
+        if not self.test_skipped:
+            try:
+                self.switcher.leave_minios()
+                self.restore_kernel(kernel_type='MINIOS')
+                self.faft_client.system.set_minios_priority(
+                        self.restored_priority)
+            except Exception as e:
+                logging.error('Caught exception: %s', str(e))
+        super(firmware_CorruptMinios, self).cleanup()
+
+    def run_once(self):
+        """Run a single iteration of the test."""
+        logging.info('Corrupt MiniOS section: %r', self.minios_section)
+        self.faft_client.minios.corrupt_sig(self.minios_section)
+
+        logging.info('Try to boot with prioritizing the corrupted section')
+        self.switcher.launch_minios(self.minios_section)
+        self.check_state(self.checkers.minios_checker)
+        self.switcher.leave_minios()
+
+        logging.info('Restore MiniOS section: %r', self.minios_section)
+        self.faft_client.minios.restore_sig(self.minios_section)
diff --git a/server/site_tests/firmware_CorruptRecoveryCache/control b/server/site_tests/firmware_CorruptRecoveryCache/control
index e38632a..6ba11c8 100644
--- a/server/site_tests/firmware_CorruptRecoveryCache/control
+++ b/server/site_tests/firmware_CorruptRecoveryCache/control
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_CorruptRecoveryCache"
 PURPOSE = "Servo based RECOVERY_MRC_CACHE corruption test"
 CRITERIA = "This test will fail if the cache doesn't retrain and boot into recovery"
 ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_lv3"
-DEPENDENCIES = "servo_state:WORKING"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 1
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test corrupts RECOVERY_MRC_CACHE and makes sure the DUT recreates the
diff --git a/server/site_tests/firmware_CorruptRecoveryCache/control.dev b/server/site_tests/firmware_CorruptRecoveryCache/control.dev
index b2a90b3..1c830ad 100644
--- a/server/site_tests/firmware_CorruptRecoveryCache/control.dev
+++ b/server/site_tests/firmware_CorruptRecoveryCache/control.dev
@@ -4,17 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_CorruptRecoveryCache.dev"
 PURPOSE = "Servo based RECOVERY_MRC_CACHE corruption test"
 CRITERIA = "This test will fail if the cache doesn't retrain and boot into recovery"
 ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_lv3"
-DEPENDENCIES = "servo_state:WORKING"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test corrupts RECOVERY_MRC_CACHE and makes sure the DUT recreates the
diff --git a/server/site_tests/firmware_CorruptRecoveryCache/firmware_CorruptRecoveryCache.py b/server/site_tests/firmware_CorruptRecoveryCache/firmware_CorruptRecoveryCache.py
index ec0f8fa..251ea60 100644
--- a/server/site_tests/firmware_CorruptRecoveryCache/firmware_CorruptRecoveryCache.py
+++ b/server/site_tests/firmware_CorruptRecoveryCache/firmware_CorruptRecoveryCache.py
@@ -28,7 +28,8 @@
         super(firmware_CorruptRecoveryCache, self).initialize(
                 host, cmdline_args)
         self.backup_firmware()
-        self.switcher.setup_mode('dev' if dev_mode else 'normal')
+        self.switcher.setup_mode('dev' if dev_mode else 'normal',
+                                 allow_gbb_force=True)
         self.setup_usbkey(usbkey=True, host=False)
 
     def cleanup(self):
@@ -76,7 +77,7 @@
         if not self.cache_exist():
             raise error.TestNAError('No RECOVERY_MRC_CACHE was found on DUT.')
 
-        self.faft_client.bios.corrupt_body('rec', True)
+        self.faft_client.bios.corrupt_mrc_cache()
         self.boot_to_recovery()
 
         if not self.check_cache_rebuilt():
diff --git a/server/site_tests/firmware_Cr50BID/control b/server/site_tests/firmware_Cr50BID/control
index d64c4ec..dc8a64b 100644
--- a/server/site_tests/firmware_Cr50BID/control
+++ b/server/site_tests/firmware_Cr50BID/control
@@ -11,6 +11,7 @@
 TIME = "LONG"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Verify cr50 board id behavior on a board id locked image.
diff --git a/server/site_tests/firmware_Cr50BID/firmware_Cr50BID.py b/server/site_tests/firmware_Cr50BID/firmware_Cr50BID.py
index 55a6ff4..88f27fc 100644
--- a/server/site_tests/firmware_Cr50BID/firmware_Cr50BID.py
+++ b/server/site_tests/firmware_Cr50BID/firmware_Cr50BID.py
@@ -8,7 +8,6 @@
 
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib.cros import cr50_utils
-from autotest_lib.server.cros import filesystem_util
 from autotest_lib.server.cros.faft.cr50_test import Cr50Test
 
 
@@ -37,9 +36,9 @@
     # - Complete support for SPI PLT_RST straps was added in 0.3.18
     # - 4us INT_AP_L pulse was added in 0.3.25
     # - EC-EFS2 support was added in 0.5.4
-    # - 100us INT_AP_L pulse was added in 0.5.5 (Planned)
-    # TODO: use 5.5, so boards that require a 100us pulse can boot.
-    BID_SUPPORT = '0.5.4'
+    # - 100us INT_AP_L pulse was added in 0.5.5
+    # - third rollback bit blown 0.5.20
+    BID_SUPPORT = '0.5.20'
 
     BID_MISMATCH = ['Board ID mismatched, but can not reboot.']
     BID_ERROR = 5
@@ -254,7 +253,7 @@
 
         if install_image:
             # Disable rootfs verification so we can copy the image to the DUT
-            filesystem_util.make_rootfs_writable(self.host)
+            self.make_rootfs_writable()
             # Copy the universal image onto the DUT.
             dest, ver = cr50_utils.InstallImage(self.host, self.universal_path,
                     path)
diff --git a/server/site_tests/firmware_Cr50CCDFirmwareUpdate/control b/server/site_tests/firmware_Cr50CCDFirmwareUpdate/control
index 9c68cd9..8708560 100644
--- a/server/site_tests/firmware_Cr50CCDFirmwareUpdate/control
+++ b/server/site_tests/firmware_Cr50CCDFirmwareUpdate/control
@@ -7,7 +7,7 @@
 AUTHOR = "Cr50 FW team"
 NAME = "firmware_Cr50CCDFirmwareUpdate"
 PURPOSE = "Verify flashing firmware through ccd_cr50"
-ATTRIBUTES = "suite:faft_cr50_prepvt, suite:faft_cr50_pvt"
+ATTRIBUTES = "suite:faft_ccd, suite:faft_cr50_prepvt, suite:faft_cr50_pvt"
 # We mark TIME as 'FAST' intentionally to have this test scheduled later than
 # other tests (having longer TIME attribute) among the test suite.
 # This is to minimize a side-effect of this test failures on the rest of
@@ -15,6 +15,7 @@
 TIME = "FAST"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This is a test to program both EC and AP firmware through ccd_cr50 device.
@@ -31,7 +32,8 @@
 
 def run(machine):
     host = hosts.create_host(machine, servo_args=servo_args)
+    fw_path = args_dict.get("fw_path", None)
     job.run_test('firmware_Cr50CCDFirmwareUpdate', host=host, cmdline_args=args,
-                 full_args=args_dict, rw_only=False)
+                 full_args=args_dict, rw_only=False, fw_path=fw_path)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/firmware_Cr50CCDFirmwareUpdate/control.faft_cr50_tot b/server/site_tests/firmware_Cr50CCDFirmwareUpdate/control.faft_cr50_tot
index b2671e0..ec0d4ed 100644
--- a/server/site_tests/firmware_Cr50CCDFirmwareUpdate/control.faft_cr50_tot
+++ b/server/site_tests/firmware_Cr50CCDFirmwareUpdate/control.faft_cr50_tot
@@ -11,6 +11,7 @@
 TIME = "FAST"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This is a test to program both EC and AP firmware through ccd_cr50 device.
diff --git a/server/site_tests/firmware_Cr50CCDFirmwareUpdate/firmware_Cr50CCDFirmwareUpdate.py b/server/site_tests/firmware_Cr50CCDFirmwareUpdate/firmware_Cr50CCDFirmwareUpdate.py
index 6bd70a4..b279684 100644
--- a/server/site_tests/firmware_Cr50CCDFirmwareUpdate/firmware_Cr50CCDFirmwareUpdate.py
+++ b/server/site_tests/firmware_Cr50CCDFirmwareUpdate/firmware_Cr50CCDFirmwareUpdate.py
@@ -19,7 +19,7 @@
     version = 1
     should_restore_fw = False
 
-    def initialize(self, host, cmdline_args, full_args):
+    def initialize(self, host, cmdline_args, full_args, fw_path=None):
         """Initialize the test and check if cr50 exists.
 
         Raises:
@@ -33,8 +33,10 @@
         if not self.check_ec_capability():
             raise error.TestNAError('Nothing needs to be tested on this device')
 
+        self.fw_path = fw_path
+        self.b_ver = ''
         servo_type = self.servo.get_servo_version()
-        if 'ccd_cr50' not in servo_type:
+        if 'ccd' not in servo_type:
             raise error.TestNAError('unsupported servo type: %s' % servo_type)
 
         if eval(full_args.get('backup_fw', 'False')):
@@ -95,6 +97,7 @@
 
             try:
                 self.cros_host.firmware_install(build=self.b_ver,
+                                                local_tarball=self.fw_path,
                                                 install_bios=False)
             except Exception as e:
                 logging.error('firmware_install failed: %s', str(e))
@@ -121,20 +124,26 @@
         # have its own release directory, but its parent, gru does.
         parent = getattr(self.faft_config, 'parent', None)
 
-        self.b_ver = host.get_latest_release_version(self.faft_config.platform,
-                                                     parent)
-        if not self.b_ver:
-            raise error.TestError('Cannot locate the latest release for %s' %
-                                  self.faft_config.platform)
+        if not self.fw_path:
+            self.b_ver = host.get_latest_release_version(
+                    self.faft_config.platform, parent)
+            if not self.b_ver:
+                raise error.TestError(
+                        'Cannot locate the latest release for %s' %
+                        self.faft_config.platform)
 
         # Fast open cr50 and check if testlab is enabled.
         self.fast_ccd_open(enable_testlab=True)
-        if self.servo.has_control('active_v4_device'):
-            try:
-                self.servo.set('active_v4_device', 'ccd_cr50')
-            except error.TestFail as e:
-                raise error.TestNAError('cannot change active_v4_device: %s' %
-                                        str(e))
+        if not self.servo.enable_ccd_servo_device():
+            raise error.TestNAError('Cannot make ccd active')
+        # TODO(b/196824029): remove when servod supports using the power state
+        # controller with the ccd device.
+        try:
+            self.host.servo.get_power_state_controller().reset()
+        except Exception as e:
+            logging.info(e)
+            raise error.TestNAError('Unable to do power state reset with '
+                                    'active ccd device')
 
         # If it is ITE EC, then ccd reset factory.
         if self.servo.get('ec_chip') == 'it83xx':
@@ -142,7 +151,9 @@
 
         self.should_restore_fw = True
         try:
-            self.cros_host.firmware_install(build=self.b_ver, rw_only=rw_only,
+            self.cros_host.firmware_install(build=self.b_ver,
+                                            rw_only=rw_only,
+                                            local_tarball=self.fw_path,
                                             dest=self.resultsdir,
                                             verify_version=True)
         except Exception as e:
diff --git a/server/site_tests/firmware_Cr50CCDServoCap/control b/server/site_tests/firmware_Cr50CCDServoCap/control
index 157a820..fc3ae22 100644
--- a/server/site_tests/firmware_Cr50CCDServoCap/control
+++ b/server/site_tests/firmware_Cr50CCDServoCap/control
@@ -7,10 +7,11 @@
 AUTHOR = "mruthven"
 NAME = "firmware_Cr50CCDServoCap"
 PURPOSE = "Verify uart control"
-ATTRIBUTES = "suite:faft_cr50_pvt, suite:faft_cr50_prepvt, suite:infra_qual"
+ATTRIBUTES = "suite:faft_ccd, suite:faft_cr50_pvt, suite:faft_cr50_prepvt"
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Verify cr50 handles enabling/disabling uart, i2c, and spi if servo is
diff --git a/server/site_tests/firmware_Cr50CCDServoCap/control.faft_cr50_tot b/server/site_tests/firmware_Cr50CCDServoCap/control.faft_cr50_tot
index 56ce366..22d51c2 100644
--- a/server/site_tests/firmware_Cr50CCDServoCap/control.faft_cr50_tot
+++ b/server/site_tests/firmware_Cr50CCDServoCap/control.faft_cr50_tot
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Verify cr50 handles enabling/disabling uart, i2c, and spi if servo is
diff --git a/server/site_tests/firmware_Cr50CCDServoCap/firmware_Cr50CCDServoCap.py b/server/site_tests/firmware_Cr50CCDServoCap/firmware_Cr50CCDServoCap.py
index 7ebc0a9..be9577e 100644
--- a/server/site_tests/firmware_Cr50CCDServoCap/firmware_Cr50CCDServoCap.py
+++ b/server/site_tests/firmware_Cr50CCDServoCap/firmware_Cr50CCDServoCap.py
@@ -7,6 +7,7 @@
 
 from autotest_lib.client.common_lib import error
 from autotest_lib.server.cros.faft.cr50_test import Cr50Test
+from autotest_lib.server.cros.servo import chrome_ti50
 
 
 class firmware_Cr50CCDServoCap(Cr50Test):
@@ -41,17 +42,23 @@
     ON_MAP = [ 'on', 'off', '' ]
     ENABLED_MAP = [ 'enabled', 'disabled', '' ]
     CONNECTED_MAP = [ 'connected', 'disconnected', 'undetectable' ]
+    ASSERTED_MAP = ['asserted', 'deasserted', '']
     VALID_STATES = {
-        'AP' : ON_MAP,
-        'EC' : ON_MAP,
-        'AP UART' : ON_MAP,
-        'Rdd' : CONNECTED_MAP,
-        'Servo' : CONNECTED_MAP,
-        'CCD EXT' : ENABLED_MAP,
+            'AP': ON_MAP,
+            'EC': ON_MAP,
+            'AP UART': ON_MAP,
+            'Rdd': CONNECTED_MAP,
+            'Servo': CONNECTED_MAP,
+            'CCD EXT': ENABLED_MAP,
+            'CCD_MODE': ASSERTED_MAP,
     }
+    # TODO(mruthven): remove CCD_ENABLED_KEYS and mentions of 'CCD EXT' once
+    # prepvt and mp images use CCD_MODE.
+    # Old ccdstate uses CCD EXT. The new ccdstate output uses CCD_MODE.
+    CCD_ENABLED_KEYS = ['CCD EXT', 'CCD_MODE']
     # RESULT_ORDER is a list of the CCD state strings. The order corresponds
     # with the order of the key states in EXPECTED_RESULTS.
-    RESULT_ORDER = ['Rdd', 'CCD EXT', 'Servo']
+    RESULT_ORDER = ['Rdd', 'CCD_MODE', 'Servo']
     # A dictionary containing an order of steps to verify and the expected ccd
     # states as the value.
     #
@@ -105,9 +112,10 @@
             raise error.TestNAError('Test can only be run on devices with '
                                     'access to the Cr50 console')
 
-        if (self.servo.get_servo_version(active=True) !=
-            'servo_v4_with_servo_micro'):
-            raise error.TestNAError('Must use servo v4 with servo micro')
+        if ('servo_v4' not in self.servo.get_servo_type()
+                    or not self.servo.main_device_is_flex()):
+            raise error.TestNAError('Must use servo v4 with flex(c2d2 or '
+                                    'servo_micro)')
 
         if not self.cr50.servo_dts_mode_is_valid():
             raise error.TestNAError('Need working servo v4 DTS control')
@@ -120,13 +128,16 @@
         self.fast_ccd_open(enable_testlab=True)
         if not self.cr50.testlab_is_on():
             raise error.TestNAError('Cr50 testlab mode needs to be enabled')
-        logging.info('Cr50 is %s', self.servo.get('cr50_ccd_level'))
+        logging.info('Cr50 is %s', self.servo.get('gsc_ccd_level'))
         self.cr50.set_cap('UartGscTxECRx', 'Always')
         self.ec_efs_support = (
                 self.cr50.uses_board_property('BOARD_EC_CR50_COMM_SUPPORT'))
+        self._ccd_prefix = ('' if self.servo.main_device_is_ccd() else
+                            self.servo.get_ccd_servo_device())
         # Check EC uart if servo has ccd controls and the board has an EC.
-        self.check_ec_uart = (self.servo.has_control('ccd_cr50.ec_board') and
-                              self.check_ec_capability(suppress_warning=True))
+        self.check_ec_uart = (
+                self.servo.has_control('ec_board', prefix=self._ccd_prefix)
+                and self.check_ec_capability(suppress_warning=True))
 
 
     def cleanup(self):
@@ -141,6 +152,12 @@
 
     def state_matches(self, state_dict, state_name, expected_value):
         """Check the current state. Make sure it matches expected value"""
+        if state_name in self.CCD_ENABLED_KEYS:
+            for state_name in self.CCD_ENABLED_KEYS:
+                if state_name in state_dict:
+                    logging.info('Using %r for ccd enabled key', state_name)
+                    break
+
         valid_state = self.VALID_STATES[state_name][expected_value]
         # I2C isn't a reliable flag, because the hardware often doesn't support
         # it. Remove any I2C flags from the ccdstate output.
@@ -158,7 +175,7 @@
     def ccd_ec_uart_works(self):
         """Returns True if the CCD ec uart works."""
         try:
-            self.servo.get('ccd_cr50.ec_board')
+            self.servo.get('ec_board', prefix=self._ccd_prefix)
             logging.info('ccd ec console is responsive')
             return True
         except:
@@ -186,18 +203,19 @@
             output_enabled |= ec_uart_tx_enabled
             ccd_enabled |= ec_uart_enabled
 
-        ccd_ext_is_enabled = ccdstate['CCD EXT'] == 'enabled'
+        ccd_mode_is_asserted = self.state_is_on(ccdstate, 'CCD_MODE')
         mismatch = []
         logging.info('checking state flags')
-        if ccd_enabled and not ccd_ext_is_enabled:
-            mismatch.append('CCD functionality enabled without CCD EXT')
-        if ccd_ext_is_enabled:
+        if ccd_enabled and not ccd_mode_is_asserted:
+            mismatch.append('CCD functionality enabled CCD_MODE asserted')
+        if ccd_mode_is_asserted:
             if output_enabled and self.state_is_on(ccdstate, 'Servo'):
                 mismatch.append('CCD output is enabled with servo attached')
-            if ap_uart_enabled != self.state_is_on(ccdstate, 'AP UART'):
-                mismatch.append('AP UART enabled without AP UART on')
-            if ec_uart_enabled != self.state_is_on(ccdstate, 'EC'):
-                mismatch.append('EC UART enabled without EC on')
+            if not isinstance(self.cr50, chrome_ti50.ChromeTi50):
+                if ap_uart_enabled != self.state_is_on(ccdstate, 'AP UART'):
+                    mismatch.append('AP UART enabled without AP UART on')
+                if ec_uart_enabled != self.state_is_on(ccdstate, 'EC'):
+                    mismatch.append('EC UART enabled without EC on')
             if self.check_ec_uart:
                 ccd_ec_uart_works = self.ccd_ec_uart_works()
                 if (self.servo.get('ec_uart_en') == 'off'
diff --git a/server/site_tests/firmware_Cr50CCDUartStress/control b/server/site_tests/firmware_Cr50CCDUartStress/control
index 9334c9d..185e7f3 100644
--- a/server/site_tests/firmware_Cr50CCDUartStress/control
+++ b/server/site_tests/firmware_Cr50CCDUartStress/control
@@ -7,10 +7,11 @@
 AUTHOR = "Cr50 FW team"
 NAME = "firmware_Cr50CCDUartStress"
 PURPOSE = "Uart Stress Test in ccd mode"
-ATTRIBUTES = "suite:faft_cr50_prepvt, suite:faft_cr50_pvt"
+ATTRIBUTES = "suite:faft_ccd, suite:faft_cr50_prepvt, suite:faft_cr50_pvt"
 TIME = "MEDIUM"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This is a test for Uart-USB bridging qualification.
diff --git a/server/site_tests/firmware_Cr50CCDUartStress/control.faft_cr50_tot b/server/site_tests/firmware_Cr50CCDUartStress/control.faft_cr50_tot
index fdf26b1..8d761b3 100644
--- a/server/site_tests/firmware_Cr50CCDUartStress/control.faft_cr50_tot
+++ b/server/site_tests/firmware_Cr50CCDUartStress/control.faft_cr50_tot
@@ -11,6 +11,7 @@
 TIME = "MEDIUM"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This is a test for Uart-USB bridging qualification.
diff --git a/server/site_tests/firmware_Cr50CCDUartStress/firmware_Cr50CCDUartStress.py b/server/site_tests/firmware_Cr50CCDUartStress/firmware_Cr50CCDUartStress.py
index 1ae8494..4aacdbe 100644
--- a/server/site_tests/firmware_Cr50CCDUartStress/firmware_Cr50CCDUartStress.py
+++ b/server/site_tests/firmware_Cr50CCDUartStress/firmware_Cr50CCDUartStress.py
@@ -47,7 +47,7 @@
 
         # Check CCD is in servo_type.
         servo_type = self.servo.get_servo_version()
-        if 'ccd_cr50' not in servo_type:
+        if 'ccd' not in servo_type:
             raise error.TestNAError('unsupported servo type: %s' % servo_type)
         logging.info('Checked the servo type is %r.', servo_type)
 
@@ -55,17 +55,10 @@
         self.fast_ccd_open(enable_testlab=True)
         logging.info('CCD opened.')
 
-        # Change active device as ccd_cr50.
-        if self.servo.has_control('active_v4_device'):
-            try:
-                self.active_dev = 'ccd_cr50'
-                self.servo.set('active_v4_device', self.active_dev)
-            except error.TestFail as e:
-                raise error.TestNAError('cannot change active_v4_device: %s' %
-                                        str(e))
-            logging.info('Set the active v4 device as %r.', self.active_dev)
-        else:
-            self.active_dev = ''
+        # Change active device to the ccd device
+        if not self.servo.enable_ccd_servo_device():
+            raise error.TestNAError('Cannot make ccd active')
+        self.active_dev = self.servo.get_active_device_prefix()
 
         # Store the original status of EC ec3po_interp_connect.
         self.ec_ec3po_connect = self.servo.get('ec_ec3po_interp_connect',
diff --git a/server/site_tests/firmware_Cr50CheckCap/control.ccd_open_restricted b/server/site_tests/firmware_Cr50CheckCap/control.ccd_open_restricted
index 2fcff94..8902653 100644
--- a/server/site_tests/firmware_Cr50CheckCap/control.ccd_open_restricted
+++ b/server/site_tests/firmware_Cr50CheckCap/control.ccd_open_restricted
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """Verify cr50 ccd capabilities.
 
diff --git a/server/site_tests/firmware_Cr50CheckCap/control.ccd_open_unrestricted b/server/site_tests/firmware_Cr50CheckCap/control.ccd_open_unrestricted
index 0c46f9b..5b6b927 100644
--- a/server/site_tests/firmware_Cr50CheckCap/control.ccd_open_unrestricted
+++ b/server/site_tests/firmware_Cr50CheckCap/control.ccd_open_unrestricted
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """Verify cr50 ccd capabilities.
 
diff --git a/server/site_tests/firmware_Cr50CheckCap/control.faft_cr50_tot b/server/site_tests/firmware_Cr50CheckCap/control.faft_cr50_tot
index ca32b4f..4cdd8c6 100644
--- a/server/site_tests/firmware_Cr50CheckCap/control.faft_cr50_tot
+++ b/server/site_tests/firmware_Cr50CheckCap/control.faft_cr50_tot
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """Verify cr50 ccd capabilities.
 
diff --git a/server/site_tests/firmware_Cr50CheckCap/firmware_Cr50CheckCap.py b/server/site_tests/firmware_Cr50CheckCap/firmware_Cr50CheckCap.py
index 4a79242..88a3c00 100644
--- a/server/site_tests/firmware_Cr50CheckCap/firmware_Cr50CheckCap.py
+++ b/server/site_tests/firmware_Cr50CheckCap/firmware_Cr50CheckCap.py
@@ -2,8 +2,8 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import pprint
 import logging
+import pprint
 
 from autotest_lib.client.common_lib import error
 from autotest_lib.server.cros.faft.cr50_test import Cr50Test
@@ -19,16 +19,21 @@
     EXPECTED_REQ_PREPVT = 'Always'
     EXPECTED_REQ_PROD = 'IfOpened'
 
-    def check_cap_command(self, command, enable_factory, reset_caps):
+    def check_cap_command(self, enable_factory, reset_caps):
         """Verify the cr50 cap response after running the given command"""
-        self.cr50.send_command(command)
+        if enable_factory:
+            self.cr50.ccd_reset_factory()
+        else:
+            # Testlab mode is enabled, so it's ok to reset ccd without enabling
+            # capabilities necessary for ccd.
+            self.cr50.ccd_reset(servo_en=False)
         caps = self.cr50.get_cap_dict()
         logging.info(caps)
         in_factory_mode, is_reset = self.cr50.get_cap_overview(caps)
         if reset_caps and not is_reset:
-            raise error.TestFail('%r did not reset capabilities' % command)
+            raise error.TestFail('did not reset capabilities')
         if enable_factory and not in_factory_mode:
-            raise error.TestFail('%r did not enable factory mode' % command)
+            raise error.TestFail('did not enable factory mode')
 
 
     def check_cap_req(self, cap_dict, cap, expected_req):
@@ -66,6 +71,11 @@
         Raises:
             TestFail if expect_accessible doesn't match the accessibility state.
         """
+
+        if (ccd_level == 'unlock' or cap_setting == 'UnlessLocked') \
+            and not self.cr50.unlock_is_supported():
+            return
+
         # Run testlab open, so we won't have to do physical presence stuff.
         self.cr50.send_command('ccd testlab open')
 
@@ -83,7 +93,7 @@
                      cap_setting, pprint.pformat(cap_dict))
 
         # Check the accessiblity
-        for cap, cap_info in cap_dict.iteritems():
+        for cap, cap_info in cap_dict.items():
             if cap_info[self.cr50.CAP_IS_ACCESSIBLE] != expect_accessible:
                 raise error.TestFail('%r is %raccessible' % (cap,
                                      'not ' if expect_accessible else ''))
@@ -99,24 +109,27 @@
         """Check cr50 capabilities work correctly."""
         self.fast_ccd_open(enable_testlab=True)
 
-        self._ec_prefix = '' if self.servo.main_device_is_ccd() else 'ccd_cr50'
+
+        # Check servo monitoring before changing the active device. There's no
+        # need for servo detection if ccd is the only device.
+        servo_detect_ok = (self.servo.main_device_is_ccd()
+                           or self.cr50.check_servo_monitor())
+
+        set_ccd = self.servo.enable_ccd_servo_device()
+        self._ec_prefix = self.servo.get_active_device_prefix()
         # Only check EC uart if the board has a working EC and cr50 can detect
         # servo connect/disconnect.
         self.check_ec_uart = (
-                self.check_ec_capability(suppress_warning=True) and
-                self.cr50.check_servo_monitor() and
-                self.servo.has_control('ec_board', self._ec_prefix))
-        if self.check_ec_uart and self._ec_prefix:
-            try:
-                self.servo.set('active_v4_device', self._ec_prefix)
-            except:
-                self.check_ec_uart = False
+                set_ccd and servo_detect_ok
+                and self.check_ec_capability(suppress_warning=True)
+                and self.servo.active_device_is_ccd()
+                and self.servo.has_control('ec_board', self._ec_prefix))
 
         # Make sure factory reset sets all capabilities to Always
-        self.check_cap_command('ccd reset factory', True, False)
+        self.check_cap_command(True, False)
 
-        # Make sure ccd reset sets all capabilites to Default
-        self.check_cap_command('ccd reset', False, True)
+        # Make sure ccd reset sets all capabilities to Default
+        self.check_cap_command(False, True)
 
         expected_req = (self.EXPECTED_REQ_PROD if ccd_open_restricted else
                         self.EXPECTED_REQ_PREPVT)
@@ -127,7 +140,9 @@
 
         # Set the password so we can change the ccd level from the console
         self.cr50.send_command('ccd testlab open')
-        self.cr50.send_command('ccd reset')
+        # Testlab mode is enabled, so it's ok to reset ccd without enabling
+        # capabilities necessary for ccd.
+        self.cr50.ccd_reset(servo_en=False)
         self.set_ccd_password(self.CCD_PASSWORD)
 
         # Make sure ccd accessiblity behaves as expected based on the cap
diff --git a/server/site_tests/firmware_Cr50ConsoleCommands/control b/server/site_tests/firmware_Cr50ConsoleCommands/control
index 316ac65..d1f44d4 100644
--- a/server/site_tests/firmware_Cr50ConsoleCommands/control
+++ b/server/site_tests/firmware_Cr50ConsoleCommands/control
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This test verifies only expected console commands are in the Cr50 image and
diff --git a/server/site_tests/firmware_Cr50ConsoleCommands/control.faft_cr50_tot b/server/site_tests/firmware_Cr50ConsoleCommands/control.faft_cr50_tot
index 591e403..da0770f 100644
--- a/server/site_tests/firmware_Cr50ConsoleCommands/control.faft_cr50_tot
+++ b/server/site_tests/firmware_Cr50ConsoleCommands/control.faft_cr50_tot
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This test verifies only expected console commands are in the Cr50 image and
diff --git a/server/site_tests/firmware_Cr50ConsoleCommands/firmware_Cr50ConsoleCommands.py b/server/site_tests/firmware_Cr50ConsoleCommands/firmware_Cr50ConsoleCommands.py
index b2c698d..4c19c1f 100644
--- a/server/site_tests/firmware_Cr50ConsoleCommands/firmware_Cr50ConsoleCommands.py
+++ b/server/site_tests/firmware_Cr50ConsoleCommands/firmware_Cr50ConsoleCommands.py
@@ -43,21 +43,30 @@
     # exclude can be none if there is no label that shoud be excluded based on
     # the property.
     BOARD_PROPERTIES = [
-        ['BOARD_SLAVE_CONFIG_SPI', 'sps', 'i2cs'],
-        ['BOARD_SLAVE_CONFIG_I2C', 'i2cs', 'sps,sps_ds_resume'],
-        ['BOARD_USE_PLT_RESET', 'plt_rst', 'sys_rst'],
-        ['BOARD_CLOSED_SOURCE_SET1', 'closed_source_set1', 'open_source_set'],
-        ['BOARD_EC_CR50_COMM_SUPPORT', 'ec_comm', 'no_ec_comm'],
-        ['BOARD_CCD_REC_LID_PIN_DIOA1', 'rec_lid_a1',
-         'rec_lid_a9,rec_lid_a12, i2cs,sps_ds_resume'],
-        ['BOARD_CCD_REC_LID_PIN_DIOA9', 'rec_lid_a9',
-         'rec_lid_a1,rec_lid_a12,i2cs'],
-        ['BOARD_CCD_REC_LID_PIN_DIOA12', 'rec_lid_a12',
-         'rec_lid_a1,rec_lid_a9,sps'],
+            ['BOARD_PERIPH_CONFIG_SPI', 'sps', 'i2cs'],
+            ['BOARD_PERIPH_CONFIG_I2C', 'i2cs', 'sps,sps_ds_resume'],
+            ['BOARD_USE_PLT_RESET', 'plt_rst', 'sys_rst'],
+            [
+                    'BOARD_CLOSED_SOURCE_SET1', 'closed_source_set1',
+                    'open_source_set'
+            ],
+            ['BOARD_EC_CR50_COMM_SUPPORT', 'ec_comm', 'no_ec_comm'],
+            [
+                    'BOARD_CCD_REC_LID_PIN_DIOA1', 'rec_lid_a1',
+                    'rec_lid_a9,rec_lid_a12, i2cs,sps_ds_resume'
+            ],
+            [
+                    'BOARD_CCD_REC_LID_PIN_DIOA9', 'rec_lid_a9',
+                    'rec_lid_a1,rec_lid_a12,i2cs'
+            ],
+            [
+                    'BOARD_CCD_REC_LID_PIN_DIOA12', 'rec_lid_a12',
+                    'rec_lid_a1,rec_lid_a9,sps'
+            ],
     ]
     GUC_BRANCH_STR = 'cr50_v1.9308_26_0.'
-    MP_BRANCH_STR = 'cr50_v1.9308_87_mp.'
-    PREPVT_BRANCH_STR = 'cr50_v1.9308_B.'
+    MP_BRANCH_STR = 'cr50_v2.94_mp'
+    PREPVT_BRANCH_STR = 'cr50_v3.94_pp'
     TOT_STR = 'cr50_v2.0.'
     OPTIONAL_EXT = '_optional'
 
@@ -144,7 +153,7 @@
                 #
                 # Make sure if matches for any keys existed before, they exist
                 # now and if they didn't exist, they don't exist now.
-                for k, v in match.groupdict().iteritems():
+                for k, v in match.groupdict().items():
                     old_val = self.past_matches.get(k, [v, v])[0]
 
                     # If there's an optional key, then the value may or may not
diff --git a/server/site_tests/firmware_Cr50ConsoleCommands/gpiocfg b/server/site_tests/firmware_Cr50ConsoleCommands/gpiocfg
index 251acda..9457762 100644
--- a/server/site_tests/firmware_Cr50ConsoleCommands/gpiocfg
+++ b/server/site_tests/firmware_Cr50ConsoleCommands/gpiocfg
@@ -11,3 +11,4 @@
 GPIO1_GPIO5:    read 0 drive 1
 (?P<ec_comm>GPIO1_GPIO7: read 0 INT_RISING)?
 (?P<ec_comm>GPIO1_GPIO8: read 0 INT_FALLING)?
+gpio sleepmask: 00000000
diff --git a/server/site_tests/firmware_Cr50ConsoleCommands/help b/server/site_tests/firmware_Cr50ConsoleCommands/help
index ae515f2..feb9a6e 100644
--- a/server/site_tests/firmware_Cr50ConsoleCommands/help
+++ b/server/site_tests/firmware_Cr50ConsoleCommands/help
@@ -1,5 +1,4 @@
 ap_ro_info
-ap_ro_verstate
 bid
 -bitbang
 bpforce
@@ -12,6 +11,7 @@
 ec_comm
 ecrst
 (?P<guc>eraseflashinfo)?
+fips
 gettime
 gpiocfg
 gpioget
@@ -19,17 +19,16 @@
 help
 history
 -i2cscan
-i2cstpm
+i2cptpm
 -i2cxfer
 idle
 -panicinfo
 pinmux
 -powerbtn
--rddkeepalive
+rddkeepalive
 -reboot
 recbtnforce
 rma_auth
--serialno
 shmem
 sleepmask
 sn
diff --git a/server/site_tests/firmware_Cr50ConsoleCommands/pinmux b/server/site_tests/firmware_Cr50ConsoleCommands/pinmux
index e9d14a1..824fc7a 100644
--- a/server/site_tests/firmware_Cr50ConsoleCommands/pinmux
+++ b/server/site_tests/firmware_Cr50ConsoleCommands/pinmux
@@ -1,7 +1,6 @@
 40060000: DIOM0    5  IN (?P<plt_rst>GPIO0_GPIO4)?(?P<sys_rst>PU  GPIO0_GPIO4  WAKE_LOW)?
 40060008: DIOM1    6  IN PU GPIO0_GPIO5
 40060010: DIOM2    0  IN
-40060020: DIOM4    4  IN GPIO0_GPIO3
 (?P<plt_rst>40060018: DIOM3    0  IN  WAKE_LOW)?
 40060028: DIOA0   70   UART0_TX
 (?P<sps_ds_resume>40060030: DIOA1    0  IN)?
@@ -24,7 +23,7 @@
 400600a0: DIOB0   (?P<open_source_set>0 IN)?(?P<closed_source_set1>13 IN GPIO0_GPIO12)?
 400600a8: DIOB1   (?P<open_source_set>0 IN)?(?P<closed_source_set1>14 IN GPIO0_GPIO13)?
 400600b0: DIOB2    2  IN  GPIO0_GPIO1
-400600b8: DIOB3    (?P<no_ec_comm>3 IN GPIO0_GPIO2)?(?P<ec_comm>0 IN)?
+400600b8: DIOB3    (?P<no_ec_comm>3 IN GPIO0_GPIO2)?(?P<ec_comm>0 IN WAKE_HIGH)?
 400600c0: DIOB4    (?P<no_ec_comm>0 IN PD)?(?P<ec_comm>3 IN PD GPIO0_GPIO2)?
 400600c8: DIOB5    0  IN PD
 400600d0: DIOB6   16  IN  GPIO0_GPIO15
diff --git a/server/site_tests/firmware_Cr50DeepSleepStress/control b/server/site_tests/firmware_Cr50DeepSleepStress/control
index 8e62144..64f86d5 100644
--- a/server/site_tests/firmware_Cr50DeepSleepStress/control
+++ b/server/site_tests/firmware_Cr50DeepSleepStress/control
@@ -11,6 +11,7 @@
 TIME = "LONG"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Cr50 should enter deep sleep every time the system goes into S3. Run
diff --git a/server/site_tests/firmware_Cr50DeepSleepStress/control.faft_cr50_tot b/server/site_tests/firmware_Cr50DeepSleepStress/control.faft_cr50_tot
index 0cd5938..b783960 100644
--- a/server/site_tests/firmware_Cr50DeepSleepStress/control.faft_cr50_tot
+++ b/server/site_tests/firmware_Cr50DeepSleepStress/control.faft_cr50_tot
@@ -11,6 +11,7 @@
 TIME = "LONG"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Cr50 should enter deep sleep every time the system goes into S3. Run
diff --git a/server/site_tests/firmware_Cr50DeepSleepStress/control.reboot b/server/site_tests/firmware_Cr50DeepSleepStress/control.reboot
index 7306d72..730f7ca 100644
--- a/server/site_tests/firmware_Cr50DeepSleepStress/control.reboot
+++ b/server/site_tests/firmware_Cr50DeepSleepStress/control.reboot
@@ -11,6 +11,7 @@
 TIME = "LONG"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Cr50 should enter deep sleep every time the system goes into S5. Run
diff --git a/server/site_tests/firmware_Cr50DeepSleepStress/control.reboot_faft_cr50_tot b/server/site_tests/firmware_Cr50DeepSleepStress/control.reboot_faft_cr50_tot
index 27a19eb..b802dea 100644
--- a/server/site_tests/firmware_Cr50DeepSleepStress/control.reboot_faft_cr50_tot
+++ b/server/site_tests/firmware_Cr50DeepSleepStress/control.reboot_faft_cr50_tot
@@ -11,6 +11,7 @@
 TIME = "LONG"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Cr50 should enter deep sleep every time the system goes into S5. Run
diff --git a/server/site_tests/firmware_Cr50DeepSleepStress/firmware_Cr50DeepSleepStress.py b/server/site_tests/firmware_Cr50DeepSleepStress/firmware_Cr50DeepSleepStress.py
index 4090395..c650909 100644
--- a/server/site_tests/firmware_Cr50DeepSleepStress/firmware_Cr50DeepSleepStress.py
+++ b/server/site_tests/firmware_Cr50DeepSleepStress/firmware_Cr50DeepSleepStress.py
@@ -16,20 +16,19 @@
 
 
 class firmware_Cr50DeepSleepStress(FirmwareTest):
-    """Verify cr50 deep sleep after running power_SuspendStress.
+    """Verify Cr50 deep sleep after running power_SuspendStress.
 
     Cr50 should enter deep sleep every suspend. Verify that by checking the
     idle deep sleep count.
 
     @param suspend_count: The number of times to reboot or suspend the device.
-    @param reset_type: a str with the cycle type: 'mem' or 'reboot'
+    @param reset_type: a str with the cycle type: 'freeze', 'mem', or 'reboot'
     """
     version = 1
 
     SLEEP_DELAY = 20
     MIN_RESUME = 15
     MIN_SUSPEND = 15
-    MEM = 'mem'
     # Initialize the FWMP with a non-zero value. Use 100, because it's an
     # unused flag and it wont do anything like lock out dev mode or ccd.
     FWMP_FLAGS = '0x100'
@@ -39,7 +38,7 @@
     TOLERATED_ERROR = 0.05
 
     def initialize(self, host, cmdline_args, suspend_count, reset_type):
-        """Make sure the test is running with access to the cr50 console"""
+        """Make sure the test is running with access to the Cr50 console"""
         self.host = host
         super(firmware_Cr50DeepSleepStress, self).initialize(host, cmdline_args)
         if not hasattr(self, 'cr50'):
@@ -53,10 +52,16 @@
         # Reset the device
         self.host.reset_via_servo()
 
-        # Save the original version, so we can make sure cr50 doesn't rollback.
+        # Save the original version, so we can make sure Cr50 doesn't rollback.
         self.original_cr50_version = self.cr50.get_active_version_info()
         self._suspend_diff = 0
 
+        # TODO(b/218492933) : find better way to disable rddkeepalive
+        # Disable rddkeepalive, so the test can disable ccd.
+        self.cr50.send_command('ccd testlab open')
+        self.cr50.send_command('rddkeepalive disable')
+        # Lock cr50 so the console will be restricted
+        self.cr50.set_ccd_level('lock')
 
     def cleanup(self):
         """Clear the fwmp."""
@@ -123,7 +128,7 @@
             time.sleep(self.MIN_SUSPEND)
 
             # Power on the device
-            self.servo.power_short_press()
+            self.servo.power_normal_press()
             time.sleep(self.MIN_RESUME)
 
             rv = self.check_cr50_deep_sleep(i + 1)
@@ -149,6 +154,9 @@
         @param enable: True to enable ccd. False to disable it.
         @returns an error message
         """
+        start_msg = ('' if self._dut_is_responsive() else
+                     'DUT unresponsive after suspend/resume')
+        logging.info('SSH state afters suspend resume %r', start_msg or 'ok')
         if enable:
             self.cr50.ccd_enable()
         else:
@@ -165,14 +173,19 @@
         # TODO(b/135147658): Raise an error once CCD disable is fixed.
         logging.info('Resetting DUT')
         self.host.reset_via_servo()
-        if not self._dut_is_responsive():
-            return msg
+
+        is_sshable = self._dut_is_responsive()
+
+        rv = start_msg or ('' if is_sshable else msg)
+        logging.info('ssh state: %r', rv or 'ok')
+        return rv
 
 
-    def run_suspend_resume(self, suspend_count):
+    def run_suspend_resume(self, suspend_count, suspend_type):
         """Suspend the device the requested number of times
 
         @param suspend_count: the number of times to suspend the device.
+        @param suspend_type: the type of suspend to issue("mem" or "freeze")
         """
         # Disable CCD so Cr50 can enter deep sleep
         rv = self.wait_for_client_after_changing_ccd(False)
@@ -185,24 +198,25 @@
         client_at = autotest.Autotest(self.host)
         # Duration is set to 0, because it is required but unused when
         # iterations is given.
-        client_at.run_test('power_SuspendStress', tag='idle',
+        client_at.run_test('power_SuspendStress',
+                           tag='idle',
                            duration=0,
                            min_suspend=self.MIN_SUSPEND,
                            min_resume=self.MIN_RESUME,
                            check_connection=False,
                            suspend_iterations=suspend_count,
-                           suspend_state=self.MEM,
+                           suspend_state=suspend_type,
                            check_client_result=True)
 
 
     def check_cr50_deep_sleep(self, suspend_count):
-        """Verify cr50 has entered deep sleep the correct number of times.
+        """Verify Cr50 has entered deep sleep the correct number of times.
 
         Also print ccdstate and sleepmask output to get some basic information
-        about the cr50 state.
-        - sleepmask will show what may be preventing cr50 from entering sleep.
-        - ccdstate will show what cr50 thinks the AP state is. If the AP is 'on'
-          cr50 won't enter deep sleep.
+        about the Cr50 state.
+        - sleepmask will show what may be preventing Cr50 from entering sleep.
+        - ccdstate will show what Cr50 thinks the AP state is. If the AP is 'on'
+          Cr50 won't enter deep sleep.
         All of these functions log the state, so no need to log the return
         values.
 
@@ -233,7 +247,7 @@
         if exp_count and not hibernate:
             errors.append('reset during suspend')
 
-        # Use the absolute value, because cr50 shouldn't suspend more or less
+        # Use the absolute value, because Cr50 shouldn't suspend more or less
         # than expected.
         if abs(act_diff) > tolerated_diff:
             errors.append('count mismatch expected %d got %d' % (exp_count,
@@ -263,20 +277,22 @@
     def run_once(self, host, suspend_count, reset_type):
         """Verify deep sleep after suspending for the given number of cycles
 
-        The test either suspends to s3 or reboots the device depending on
-        reset_type. There are two valid reset types: mem and reboot. The test
-        will make sure that the device is off or in s3 long enough to ensure
-        Cr50 should be able to enter deep sleep. At the end of the test, it
-        checks that Cr50 entered deep sleep the same number of times it
-        suspended.
+        The test either suspends to s0i3/s3 or reboots the device depending on
+        reset_type. There are three valid reset types: freeze, mem, and reboot.
+        The test will make sure that the device is off or in s0i3/s3 long enough
+        to ensure Cr50 should be able to enter the corresponding suspend state.
+        At the end of the test, it checks that Cr50 entered the suspend state
+        the same number of times the DUT suspended.
 
         @param host: the host object representing the DUT.
         @param suspend_count: The number of cycles to suspend or reboot the
                 device.
-        @param reset_type: a str with the cycle type: 'mem' or 'reboot'
+        @param reset_type: a str with the cycle type: 'freeze', 'mem' or
+                'reboot'
         """
-        if reset_type not in ['reboot', 'mem']:
-            raise error.TestNAError('Invalid reset_type. Use "mem" or "reboot"')
+        if reset_type not in ['reboot', 'freeze', 'mem']:
+            raise error.TestNAError('Invalid reset_type. Use "freeze", "mem" '
+                                    'or "reboot"')
         if self.MIN_SUSPEND + self.MIN_RESUME < self.SLEEP_DELAY:
             logging.info('Minimum suspend-resume cycle is %ds. This is '
                          'shorter than the Cr50 idle timeout. Cr50 may not '
@@ -287,13 +303,50 @@
         original_flog = cr50_utils.DumpFlog(self.host).strip()
         logging.debug('Initial FLOG output:\n%s', original_flog)
 
+        suspend_type = reset_type
+
         # x86 devices should suspend once per reset. ARM will only suspend
         # if the device enters s5.
         if reset_type == 'reboot':
             self._enters_deep_sleep = True
         else:
             is_arm = self.check_ec_capability(['arm'], suppress_warning=True)
-            self._enters_deep_sleep = not is_arm
+
+            # Check if the device supports S0ix.
+            self.s0ix_supported = not self.host.run(
+                    'check_powerd_config --suspend_to_idle',
+                    ignore_status=True).exit_status
+
+            # Check if the device supports S3.
+            self.s3_supported = not self.host.run(
+                    'grep -q deep /sys/power/mem_sleep',
+                    ignore_status=True).exit_status
+
+            if not self.s0ix_supported and not self.s3_supported:
+                raise error.TestError(
+                        'S3 and S0ix unsupported, can not run test')
+
+            if not self.s0ix_supported and \
+               self.check_cr50_capability(['deep_sleep_in_s0i3']):
+                raise error.TestError(
+                        'Invalid configuration, S0ix not supported, but '
+                        'deep_sleep_in_s0i3 is true')
+
+            if self.check_cr50_capability(['deep_sleep_in_s0i3']) and \
+               self.s0ix_supported and not self.s3_supported:
+                logging.info('Switching suspend type from "mem" to "freeze" '
+                             'to support s0ix(S3 unsupported)')
+                suspend_type = 'freeze'
+
+            # Check if the Cr50 enters deep sleep on this device.
+            # This variable is used to determine error checks to be performed
+            # at the end of testing(Suspend/Resume count vs Cr50 Deep Sleep)
+            # Cr50 does not deep sleep on ARM
+            # Cr50 does deep sleep in S3
+            # Cr50 will only deep sleep in S0i3 on select systems.
+            self._enters_deep_sleep = not is_arm and \
+                ((suspend_type != 'freeze' or \
+                self.check_cr50_capability(['deep_sleep_in_s0i3'])))
 
         self.create_fwmp()
 
@@ -301,33 +354,38 @@
         try:
             if reset_type == 'reboot':
                 self.run_reboots(suspend_count)
-            elif reset_type == 'mem':
-                self.run_suspend_resume(suspend_count)
+            elif reset_type == 'mem' or reset_type == 'freeze':
+                self.run_suspend_resume(suspend_count, suspend_type)
+            else:
+                raise error.TestError('Test can only be run with reset types:'
+                                      'reboot, mem, or freeze')
         except Exception as e:
             main_error = e
 
         errors = []
-        # Collect logs for debugging
         # Autotest has some stages in between run_once and cleanup that may
         # be run if the test succeeds. Do this here to make sure this is
         # always run immediately after the suspend/resume cycles.
+        # Collect logs for debugging
+        # Console information
         self.cr50.dump_nvmem()
-        # Reenable CCD. Reestablish network connection.
-        rv = self.wait_for_client_after_changing_ccd(True)
-        if rv:
-            errors.append(rv)
-        rv = self.check_flog_output(original_flog)
-        if rv:
-            errors.append(rv)
-        rv = self.check_fwmp()
-        if rv:
-            errors.append(rv)
         rv = self.check_cr50_deep_sleep(suspend_count)
         if rv:
             errors.append(rv)
         rv = self.check_cr50_version(self.original_cr50_version)
         if rv:
             errors.append(rv)
+        # Reenable CCD. Reestablish network connection.
+        rv = self.wait_for_client_after_changing_ccd(True)
+        if rv:
+            errors.append(rv)
+        # Information that requires ssh
+        rv = self.check_fwmp()
+        if rv:
+            errors.append(rv)
+        rv = self.check_flog_output(original_flog)
+        if rv:
+            errors.append(rv)
         secondary_error = 'Suspend issues: %s' % ', '.join(errors)
         if main_error:
             logging.info(secondary_error)
diff --git a/server/site_tests/firmware_Cr50DeferredECReset/control b/server/site_tests/firmware_Cr50DeferredECReset/control
index 05ae612..e8b06c3 100644
--- a/server/site_tests/firmware_Cr50DeferredECReset/control
+++ b/server/site_tests/firmware_Cr50DeferredECReset/control
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_Cr50DeferredECReset"
 PURPOSE = "Verify Deferred EC Reset."
 # TODO(b/139537748): should verify this test running on varius platforms
@@ -15,6 +15,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Verify EC_RST_L stays asserted while RDD is plugged in until the power button
diff --git a/server/site_tests/firmware_Cr50DeferredECReset/control.faft_cr50_tot b/server/site_tests/firmware_Cr50DeferredECReset/control.faft_cr50_tot
index 0d22ed9..d180cd2 100644
--- a/server/site_tests/firmware_Cr50DeferredECReset/control.faft_cr50_tot
+++ b/server/site_tests/firmware_Cr50DeferredECReset/control.faft_cr50_tot
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_Cr50DeferredECReset.faft_cr50_tot"
 PURPOSE = "Verify Deferred EC Reset."
 # TODO(b/139537748): should verify this test running on varius platforms
@@ -17,6 +17,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Verify EC_RST_L stays asserted while RDD is plugged in until the power button
diff --git a/server/site_tests/firmware_Cr50DeferredECReset/firmware_Cr50DeferredECReset.py b/server/site_tests/firmware_Cr50DeferredECReset/firmware_Cr50DeferredECReset.py
index 88128b4..1078902 100644
--- a/server/site_tests/firmware_Cr50DeferredECReset/firmware_Cr50DeferredECReset.py
+++ b/server/site_tests/firmware_Cr50DeferredECReset/firmware_Cr50DeferredECReset.py
@@ -28,6 +28,7 @@
     version = 1
     CUTOFF_DELAY = 10
     PD_SETTLE_TIME = 3
+    WAIT_DUT_UP = 5
     HAS_CR50_RESET_ODL = False
 
     def cr50_power_on_reset(self):
@@ -44,7 +45,7 @@
         else:
             # Stop power delivery to dut
             logging.info('Stop charging')
-            self.servo.set('servo_v4_role', 'snk')
+            self.servo.set('servo_pd_role', 'snk')
 
             # Battery Cutoff
             logging.info('Cut battery off')
@@ -54,7 +55,7 @@
 
             # Enable power delivery to dut
             logging.info('Start charging')
-            self.servo.set('servo_v4_role', 'src')
+            self.servo.set('servo_pd_role', 'src')
 
         time.sleep(self.PD_SETTLE_TIME)
 
@@ -69,6 +70,15 @@
                                              [r'ac\s*=\s*(0|1)\s*'])[0][1]
         return rv == '1'
 
+    def cleanup(self):
+        """Restore dts mode."""
+        try:
+            if hasattr(self, 'HAS_CR50_RESET_ODL'):
+                self.restore_dut(self.HAS_CR50_RESET_ODL)
+                self.servo.set_dts_mode(self.dts_restore)
+        finally:
+            super(firmware_Cr50DeferredECReset, self).cleanup()
+
     def initialize(self, host, cmdline_args, full_args):
         """Initialize the test and check if cr50 exists, DTS is controllable,
            and power delivery mode and power button is adjustable.
@@ -95,7 +105,7 @@
         # Check 'rdd_leakage' is marked in cr50 capability.
         if self.check_cr50_capability(['rdd_leakage']):
             self.rdd_leakage = True
-            logging.warn('RDD leakage is marked in cr50 cap config')
+            logging.warning('RDD leakage is marked in cr50 cap config')
         else:
             self.rdd_leakage = False
 
@@ -111,14 +121,14 @@
             self.HAS_CR50_RESET_ODL = False
 
             # Test the external power delivery
-            self.servo.set('servo_v4_role', 'snk')
+            self.servo.set('servo_pd_role', 'snk')
             time.sleep(self.PD_SETTLE_TIME)
 
             if self.ac_is_plugged_in():
                 raise error.TestFail('Failed to set servo_v4_role sink')
 
             # Test stopping the external power delivery
-            self.servo.set('servo_v4_role', 'src')
+            self.servo.set('servo_pd_role', 'src')
             time.sleep(self.PD_SETTLE_TIME)
 
             if not self.ac_is_plugged_in():
@@ -139,14 +149,38 @@
                 raise error.TestError('RDD leakage does not match capability'
                                       ' configuration.')
         finally:
-            self.servo.set_dts_mode(self.dts_restore)
-            self.servo.set_nocheck('pwr_button', 'release')
-            time.sleep(self.PD_SETTLE_TIME)
-
-            self.servo.power_short_press()            # Wake up AP
+            self.restore_dut(False)
 
         logging.info('Initialization is done')
 
+    def restore_dut(self, use_cr50_reset):
+        """Restore the dut state."""
+        logging.info('Restore the dut')
+        self.servo.set('pwr_button', 'release')
+
+        if use_cr50_reset:
+            self.servo.set_nocheck('cr50_reset_odl', 'off')
+        else:
+            time.sleep(self.PD_SETTLE_TIME)
+            self.servo.set_nocheck('servo_pd_role', 'snk')
+            time.sleep(self.PD_SETTLE_TIME)
+            self.servo.set_nocheck('servo_pd_role', 'src')
+
+        # Give the EC some time to come up before resetting cr50.
+        time.sleep(self.WAIT_DUT_UP)
+
+        # Reboot cr50 to ensure EC_RST_L is deasserted.
+        self.fast_ccd_open(enable_testlab=True)
+        self.cr50.reboot()
+
+        time.sleep(self.WAIT_DUT_UP)
+
+        # Press power button to wake up AP, and releases it soon
+        # in any cases.
+        if not self.cr50.ap_is_on():
+            self.servo.power_short_press()
+        logging.info('Restoration done')
+
     def check_ecrst_asserted(self, expect_assert):
         """Ask CR50 whether EC_RST_L is asserted or deasserted.
 
@@ -163,11 +197,16 @@
         logging.info('Checking if ecrst is %s', expected_txt)
 
         try:
-            rv = self.cr50.send_command_get_output('ecrst',
-                                        [r'EC_RST_L is (%s)' % expected_txt])
+            rv = self.cr50.send_command_retry_get_output(
+                    'ecrst', [r'EC_RST_L is ((de)?asserted)'], safe=True)
             logging.info(rv)
         except error.TestError as e:
             raise error.TestFail(str(e))
+        actual_txt = rv[0][1]
+        logging.info('ecrst is %s', actual_txt)
+        if actual_txt != expected_txt:
+            raise error.TestFail('EC_RST_L mismatch: expected %r got %r' %
+                                 (expected_txt, actual_txt))
 
     def ping_ec(self, expect_response):
         """Check if EC is running and responding.
@@ -243,17 +282,7 @@
             self.ping_ec(True)
 
         finally:
-            if self.HAS_CR50_RESET_ODL:
-                self.servo.set_nocheck('cr50_reset_odl', 'off')
-            else:
-                self.servo.set_nocheck('servo_v4_role', 'src')
-
-            self.servo.set_dts_mode(self.dts_restore)
-            time.sleep(1)
-
-            # Press power button to wake up AP, and releases it soon
-            # in any cases.
-            self.servo.power_short_press()
+            self.restore_dut(self.HAS_CR50_RESET_ODL)
 
     def run_once(self):
         """Test deferred EC reset feature. """
diff --git a/server/site_tests/firmware_Cr50DevMode/control b/server/site_tests/firmware_Cr50DevMode/control
index 417e965..4c5df32 100644
--- a/server/site_tests/firmware_Cr50DevMode/control
+++ b/server/site_tests/firmware_Cr50DevMode/control
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """Verify cr50 can tell the state of the dev mode switch."""
 
diff --git a/server/site_tests/firmware_Cr50DevMode/control.faft_cr50_tot b/server/site_tests/firmware_Cr50DevMode/control.faft_cr50_tot
index 1238d2b..e383625 100644
--- a/server/site_tests/firmware_Cr50DevMode/control.faft_cr50_tot
+++ b/server/site_tests/firmware_Cr50DevMode/control.faft_cr50_tot
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Verify cr50 can tell the state of the dev mode switch.
diff --git a/server/site_tests/firmware_Cr50DeviceState/control b/server/site_tests/firmware_Cr50DeviceState/control
index 3e8ec14..e002767 100644
--- a/server/site_tests/firmware_Cr50DeviceState/control
+++ b/server/site_tests/firmware_Cr50DeviceState/control
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Verify Cr50 tracks the EC and AP state correctly.
diff --git a/server/site_tests/firmware_Cr50DeviceState/control.faft_cr50_tot b/server/site_tests/firmware_Cr50DeviceState/control.faft_cr50_tot
index eebf684..addfc59 100644
--- a/server/site_tests/firmware_Cr50DeviceState/control.faft_cr50_tot
+++ b/server/site_tests/firmware_Cr50DeviceState/control.faft_cr50_tot
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Verify Cr50 tracks the EC and AP state correctly.
diff --git a/server/site_tests/firmware_Cr50DeviceState/firmware_Cr50DeviceState.py b/server/site_tests/firmware_Cr50DeviceState/firmware_Cr50DeviceState.py
index 25585e3..5665f54 100644
--- a/server/site_tests/firmware_Cr50DeviceState/firmware_Cr50DeviceState.py
+++ b/server/site_tests/firmware_Cr50DeviceState/firmware_Cr50DeviceState.py
@@ -103,12 +103,18 @@
     INCREASE = '+'
     DS_RESUME = 'DS'
 
-    MEM_SLEEP_PATH = '/sys/power/mem_sleep'
-    MEM_SLEEP_S0IX = 'echo %s > %s ; sleep 1' % ('s2idle', MEM_SLEEP_PATH)
-    MEM_SLEEP_S3 = 'echo %s > %s ; sleep 1' % ('deep', MEM_SLEEP_PATH)
-    POWER_STATE_PATH = '/sys/power/state'
-    POWER_STATE_S0IX = 'echo %s > %s' % ('freeze', POWER_STATE_PATH)
-    POWER_STATE_S3 = 'echo %s > %s' % ('mem', POWER_STATE_PATH)
+    TMP_POWER_MANAGER_PATH = '/tmp/power_manager'
+    POWER_MANAGER_PATH = '/var/lib/power_manager'
+    # TODO(mruthven): remove ec chan restriction once soraka stops spamming host
+    # command output. The extra activity makes it look like a interrupt storm on
+    # the EC uart.
+    CHAN_ALL = 0xffffffff
+    CHAN_EVENTS = 0x20
+    CHAN_ACPI = 0x400
+    CHAN_HOSTCMD = 0x80
+    CHAN_USBCHARGE = 0x200000
+    CHAN_RESTRICTED = CHAN_ALL ^ (CHAN_EVENTS | CHAN_ACPI | CHAN_HOSTCMD
+                                  | CHAN_USBCHARGE)
 
 
     def initialize(self, host, cmdline_args, full_args):
@@ -118,22 +124,36 @@
         if not self.check_ec_capability():
             raise error.TestNAError("Nothing needs to be tested on this device")
 
-        self.generate_suspend_commands()
+        # If the TPM is reset in S0i3, the CR50 may enter deep sleep during S0i3.
+        # Cr50 may enter deep sleep an extra time, because of how the test
+        # collects taskinfo counts. So the range is set conservatively to 0-2.
+        if self.check_cr50_capability(['deep_sleep_in_s0i3']):
+            irq_s0ix_deep_sleep_key = 'S0ix' + self.DEEP_SLEEP_STEP_SUFFIX
+            self.EXPECTED_IRQ_COUNT_RANGE[irq_s0ix_deep_sleep_key] = [0, 2]
 
+    def mount_power_config(self):
+        """Mounts power_manager settings to tmp,
+        ensuring that any changes do not persist across reboots
+        """
+        self.faft_client.system.run_shell_command(
+                'mkdir -p %s && \
+            echo 0 > %s/suspend_to_idle && \
+            mount --bind %s %s && \
+            restart powerd' %
+                (self.TMP_POWER_MANAGER_PATH, self.TMP_POWER_MANAGER_PATH,
+                 self.TMP_POWER_MANAGER_PATH, self.POWER_MANAGER_PATH), True)
 
-    def generate_suspend_commands(self):
-        """Generate the S3 and S0ix suspend commands"""
-        s0ix_cmds = []
-        s3_cmds = []
-        if self.host.path_exists(self.MEM_SLEEP_PATH):
-            s0ix_cmds.append(self.MEM_SLEEP_S0IX)
-            s3_cmds.append(self.MEM_SLEEP_S3)
-        s0ix_cmds.append(self.POWER_STATE_S0IX)
-        s3_cmds.append(self.POWER_STATE_S3)
-        self._s0ix_cmds = '; '.join(s0ix_cmds)
-        self._s3_cmds = '; '.join(s3_cmds)
-        logging.info('S0ix cmd: %r', self._s0ix_cmds)
-        logging.info('S3 cmd: %r', self._s3_cmds)
+    def umount_power_config(self):
+        """Unmounts power_manager settings"""
+        self.faft_client.system.run_shell_command(
+                'umount %s && restart powerd' % self.POWER_MANAGER_PATH, True)
+
+    def set_suspend_to_idle(self, value):
+        """Set suspend_to_idle by writing to power_manager settings"""
+        # Suspend to idle expects 0/1 so %d is used
+        self.faft_client.system.run_shell_command(
+                'echo %d > %s/suspend_to_idle' %
+                (value, self.TMP_POWER_MANAGER_PATH), True)
 
 
     def log_sleep_debug_information(self):
@@ -375,9 +395,7 @@
 
     def ap_is_on_after_power_button_press(self):
         """Returns True if the AP is on after pressing the power button"""
-        # TODO (mruthven): use self.servo.power_short_press() once kukui power
-        # button issues are figured out.
-        self.servo.power_key(1)
+        self.servo.power_normal_press()
         # Give the AP some time to turn on
         time.sleep(self.cr50.SHORT_WAIT)
         return self.cr50.ap_is_on()
@@ -398,18 +416,23 @@
         block = True
         if state == 'S0':
             self.trigger_s0()
+            # Suppress host command output, so it doesn't look like an interrupt
+            # storm. Set it whenever the system enters S0 to ensure the setting
+            # is restored if the EC enters hibernate.
+            time.sleep(2)
+            logging.info('Setting EC chan %x', self.CHAN_RESTRICTED)
+            self.ec.send_command('chan 0x%x' % self.CHAN_RESTRICTED)
         else:
             if state == 'S0ix':
-                full_command = self._s0ix_cmds
-                block = False
+                self.set_suspend_to_idle(True)
+                self.suspend()
             elif state == 'S3':
-                full_command = self._s3_cmds
-                block = False
+                self.set_suspend_to_idle(False)
+                self.suspend()
             elif state == 'G3':
-                full_command = 'poweroff'
-            self.faft_client.system.run_shell_command(full_command, block)
+                self.faft_client.system.run_shell_command('poweroff', True)
 
-        time.sleep(self.SHORT_WAIT);
+        time.sleep(self.SHORT_WAIT)
         # check state transition
         if not self.wait_power_state(state, self.SHORT_WAIT):
             raise error.TestFail('Platform failed to reach %s state.' % state)
@@ -469,6 +492,10 @@
         finally:
             # reset the system to S0 no matter what happens
             self.trigger_s0()
+            # Reenable EC chan output.
+            time.sleep(2)
+            logging.info('Setting EC chan %x', self.CHAN_ALL)
+            self.ec.send_command('chan 0x%x' % self.CHAN_ALL)
 
         # Check that the progress of the irq counts seems reasonable
         self.check_for_errors(state)
@@ -497,15 +524,15 @@
         client_at = autotest.Autotest(self.host)
         client_at.run_test('login_LoginSuccess')
 
-        # Check if the device supports S0ix. The exit status will be 0 if it
-        # does 1 if it doesn't.
-        result = self.host.run('check_powerd_config --suspend_to_idle',
-                ignore_status=True)
-        if not result.exit_status:
-            self.verify_state('S0ix')
+        self.mount_power_config()
+        try:
+            if self.s0ix_supported:
+                self.verify_state('S0ix')
 
-        # Enter S3
-        self.verify_state('S3')
+            if self.s3_supported:
+                self.verify_state('S3')
+        finally:
+            self.umount_power_config()
 
         # Enter G3
         self.verify_state('G3')
@@ -524,6 +551,15 @@
             self.cr50.ccd_disable(raise_error=True)
 
         self.ccd_enabled = self.cr50.ccd_is_enabled()
+        # Check if the device supports S0ix.
+        self.s0ix_supported = not self.host.run(
+                'check_powerd_config --suspend_to_idle',
+                ignore_status=True).exit_status
+        # Check if the device supports S3.
+        self.s3_supported = not self.host.run(
+                'grep -q deep /sys/power/mem_sleep',
+                ignore_status=True).exit_status
+
         self.run_through_power_states()
 
         if supports_dts_control:
diff --git a/server/site_tests/firmware_Cr50ECReset/control b/server/site_tests/firmware_Cr50ECReset/control
index eecf337..ea0651a 100644
--- a/server/site_tests/firmware_Cr50ECReset/control
+++ b/server/site_tests/firmware_Cr50ECReset/control
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """Make sure 'cr50 ecrst' works as intended
 
diff --git a/server/site_tests/firmware_Cr50ECReset/control.faft_cr50_tot b/server/site_tests/firmware_Cr50ECReset/control.faft_cr50_tot
index 5700a12..56b2671 100644
--- a/server/site_tests/firmware_Cr50ECReset/control.faft_cr50_tot
+++ b/server/site_tests/firmware_Cr50ECReset/control.faft_cr50_tot
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """Make sure 'cr50 ecrst' works as intended
 
diff --git a/server/site_tests/firmware_Cr50ECReset/firmware_Cr50ECReset.py b/server/site_tests/firmware_Cr50ECReset/firmware_Cr50ECReset.py
index 2c2f11e..8529883 100644
--- a/server/site_tests/firmware_Cr50ECReset/firmware_Cr50ECReset.py
+++ b/server/site_tests/firmware_Cr50ECReset/firmware_Cr50ECReset.py
@@ -34,17 +34,35 @@
             raise error.TestNAError("This DUT has a hardware limitation that "
                                     "prevents cr50 from waking the EC with "
                                     "EC_RST_L.")
+
+        # TODO(b/186535695): EC hibernate puts cr50 into reset, so the test
+        # can't verify cr50 behavior while the EC is hibernate.
+        if 'c2d2' in self.servo.get_servo_type():
+            raise error.TestNAError('Cannot run test with c2d2')
+
         # Don't bother if there is no Chrome EC or if EC hibernate doesn't work.
         if not self.check_ec_capability():
             raise error.TestNAError("Nothing needs to be tested on this device")
-        self.check_ec_hibernate()
+
+        # Verify the EC can wake from hibernate with a power button press. If it
+        # can't, it's a device or servo issue.
+        try:
+            self.check_ec_hibernate()
+        except error.TestError as e:
+            raise error.TestNAError('Unsupported setup: %s' % str(e))
 
 
     def cleanup(self):
         """Make sure the EC is on, if there is a Chrome EC."""
-        if self.check_ec_capability():
-            self.guarantee_ec_is_up()
-        super(firmware_Cr50ECReset, self).cleanup()
+        try:
+            if self.check_ec_capability():
+                self.guarantee_ec_is_up()
+        except Exception as e:
+            logging.info('Issue recovering EC: %r', e)
+            logging.info('Trying power state reset')
+            self.host.servo.get_power_state_controller().reset()
+        finally:
+            super(firmware_Cr50ECReset, self).cleanup()
 
 
     def ec_is_up(self):
@@ -104,7 +122,7 @@
         time.sleep(self.RELEASE_RESET_DELAY)
         self.wake_ec(self.power_button)
         if not self.ec_is_up():
-            raise error.TestError('Could not recover EC')
+            raise error.TestError('Could not recover EC with power button')
 
 
     def can_wake_ec(self, wake_method):
diff --git a/server/site_tests/firmware_Cr50FIPSDS/control b/server/site_tests/firmware_Cr50FIPSDS/control
new file mode 100644
index 0000000..bf7272b
--- /dev/null
+++ b/server/site_tests/firmware_Cr50FIPSDS/control
@@ -0,0 +1,32 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "mruthven"
+NAME = "firmware_Cr50FIPSDS"
+PURPOSE = "Verify FIPS works after deep sleep"
+ATTRIBUTES = "suite:faft_cr50_pvt, suite:faft_cr50_prepvt"
+TIME = "SHORT"
+TEST_TYPE = "server"
+DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
+
+DOC = """
+Verify FIPS works after deep sleep resume.
+"""
+
+if 'args_dict' not in locals():
+    args_dict = {}
+
+args_dict.update(utils.args_to_dict(args))
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+
+    job.run_test("firmware_Cr50FIPSDS", host=host,
+                 cmdline_args=args, full_args=args_dict)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/firmware_Cr50FIPSDS/firmware_Cr50FIPSDS.py b/server/site_tests/firmware_Cr50FIPSDS/firmware_Cr50FIPSDS.py
new file mode 100644
index 0000000..b3626f9
--- /dev/null
+++ b/server/site_tests/firmware_Cr50FIPSDS/firmware_Cr50FIPSDS.py
@@ -0,0 +1,54 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import time
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib.cros import cr50_utils
+from autotest_lib.server.cros.faft.cr50_test import Cr50Test
+
+
+class firmware_Cr50FIPSDS(Cr50Test):
+    """
+    Verify cr50 fips works after coming out of deep sleep.
+    """
+    version = 1
+
+    def apshutdown(self):
+        """Shutdown the AP and give cr50 enough time to enter deep sleep."""
+        self.cr50.ccd_disable()
+        self.set_ap_off_power_mode('shutdown')
+        self.cr50.clear_deep_sleep_count()
+        time.sleep(30)
+
+    def check_ds_resume(self):
+        """Check the system resumed ok."""
+
+        if not self.cr50.fips_crypto_allowed():
+            raise error.TestFail('Crypto not allowed after deep sleep')
+        # Make sure the EC jumped to RW. This could catch ec-efs issues.
+        logging.info(
+                self.ec.send_command_get_output('sysinfo', ['Jumped: yes']))
+        if not self.cr50.get_deep_sleep_count():
+            raise error.TestError('Cr50 did not enter deep sleep')
+        # Make sure the DUT fully booted and is sshable.
+        logging.info('Running %r', cr50_utils.GetRunningVersion(self.host))
+
+    def run_once(self, host):
+        """Verify FIPS after deep sleep."""
+        if not self.cr50.has_command('fips'):
+            raise error.TestNAError('Cr50 does not support fips')
+
+        # Verify EC sysjump works on deep sleep resume.
+        self.apshutdown()
+        self.ec.reboot()
+        time.sleep(7)
+        self.check_ds_resume()
+
+        # Verify the AP can boot after resume without EC reset.
+        self.apshutdown()
+        self.servo.power_normal_press()
+        time.sleep(7)
+        self.check_ds_resume()
diff --git a/server/site_tests/firmware_Cr50FactoryResetVC/control b/server/site_tests/firmware_Cr50FactoryResetVC/control
index 0463322..96d56a9 100644
--- a/server/site_tests/firmware_Cr50FactoryResetVC/control
+++ b/server/site_tests/firmware_Cr50FactoryResetVC/control
@@ -11,6 +11,7 @@
 TIME = "MEDIUM"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This test will make sure the FWMP, write protect, and ccd password can disable
diff --git a/server/site_tests/firmware_Cr50FactoryResetVC/control.faft_cr50_tot b/server/site_tests/firmware_Cr50FactoryResetVC/control.faft_cr50_tot
index edb09e7..4900b1b 100644
--- a/server/site_tests/firmware_Cr50FactoryResetVC/control.faft_cr50_tot
+++ b/server/site_tests/firmware_Cr50FactoryResetVC/control.faft_cr50_tot
@@ -11,6 +11,7 @@
 TIME = "MEDIUM"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This test will make sure the FWMP, write protect, and ccd password can disable
diff --git a/server/site_tests/firmware_Cr50FactoryResetVC/firmware_Cr50FactoryResetVC.py b/server/site_tests/firmware_Cr50FactoryResetVC/firmware_Cr50FactoryResetVC.py
index 4a63f86..627cf1e 100644
--- a/server/site_tests/firmware_Cr50FactoryResetVC/firmware_Cr50FactoryResetVC.py
+++ b/server/site_tests/firmware_Cr50FactoryResetVC/firmware_Cr50FactoryResetVC.py
@@ -21,6 +21,7 @@
     # Short wait to make sure cr50 has had enough time to update the ccd state
     SLEEP = 2
     BOOL_VALUES = (True, False)
+    TPM_ERR = 'Problems reading from TPM'
 
     def initialize(self, host, cmdline_args, full_args):
         """Initialize servo check if cr50 exists."""
@@ -30,7 +31,7 @@
             raise error.TestNAError('Cannot run test without bpforce')
         self.fast_ccd_open(enable_testlab=True)
         # Reset ccd completely.
-        self.cr50.send_command('ccd reset')
+        self.cr50.ccd_reset()
 
         # If we can fake battery connect/disconnect, then we can test the vendor
         # command.
@@ -148,29 +149,34 @@
             logging.info('EXPECT: %s', 'failure' if enable_fail else 'success')
         cmd = 'enable' if enable else 'disable'
 
-        result = self.host.run('gsctool -a -F %s' % cmd,
-                ignore_status=(enable_fail or not enable))
-        logging.debug(result)
+        self.host.run('gsctool -a -F %s' % cmd,
+                      ignore_status=(enable_fail or not enable))
         expect_enabled = enable and not enable_fail
 
+        # Wait long enoug for cr50 to update the ccd state.
+        time.sleep(self.SLEEP)
         if expect_enabled:
-            # Cr50 will reboot after it enables factory mode.
-            self.cr50.wait_for_reboot(timeout=10)
-        else:
-            # Wait long enoug for cr50 to udpate the ccd state.
-            time.sleep(self.SLEEP)
+            # Verify the tpm is disabled.
+            result = self.host.run('gsctool -af', ignore_status=True)
+            if result.exit_status != 3 or self.TPM_ERR not in result.stderr:
+                raise error.TestFail('TPM enabled after entering factory mode')
+            # Reboot the DUT to reenable TPM communications.
+            self.host.reboot()
+
         if self.factory_mode_enabled() != expect_enabled:
             raise error.TestFail('Unexpected factory mode %s result' % cmd)
 
 
     def clear_state(self):
         """Clear the FWMP and reset CCD"""
+        self.host.reboot()
+        self._try_to_bring_dut_up()
         # Clear the FWMP
         self.clear_fwmp()
         # make sure all of the ccd stuff is reset
         self.cr50.send_command('ccd testlab open')
         # Run ccd reset to make sure all ccd state is cleared
-        self.cr50.send_command('ccd reset')
+        self.cr50.ccd_reset()
         # Clear the TPM owner, so we can set the ccd password and
         # create the FWMP
         tpm_utils.ClearTPMOwnerRequest(self.host, wait_for_ready=True)
diff --git a/server/site_tests/firmware_Cr50GetName/control b/server/site_tests/firmware_Cr50GetName/control
index 27eb4ce..42177cc 100644
--- a/server/site_tests/firmware_Cr50GetName/control
+++ b/server/site_tests/firmware_Cr50GetName/control
@@ -11,6 +11,7 @@
 TIME = "MEDIUM"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Verify cr50-get-name.sh returns the right cr50 image based on the board id.
diff --git a/server/site_tests/firmware_Cr50GetName/firmware_Cr50GetName.py b/server/site_tests/firmware_Cr50GetName/firmware_Cr50GetName.py
index 59877dd..be5e115 100644
--- a/server/site_tests/firmware_Cr50GetName/firmware_Cr50GetName.py
+++ b/server/site_tests/firmware_Cr50GetName/firmware_Cr50GetName.py
@@ -8,7 +8,6 @@
 
 from autotest_lib.client.common_lib import error, utils
 from autotest_lib.client.common_lib.cros import cr50_utils
-from autotest_lib.server.cros import filesystem_util
 from autotest_lib.server.cros.faft.cr50_test import Cr50Test
 
 
@@ -36,7 +35,7 @@
 
         efi_path = self.get_saved_eraseflashinfo_image_path()
 
-        filesystem_util.make_rootfs_writable(self.host)
+        self.make_rootfs_writable()
         cr50_utils.InstallImage(self.host, efi_path, cr50_utils.CR50_PROD)
         cr50_utils.InstallImage(self.host, efi_path, cr50_utils.CR50_PREPVT)
 
diff --git a/server/site_tests/firmware_Cr50InvalidateRW/control b/server/site_tests/firmware_Cr50InvalidateRW/control
index 9cdcec5..e501828 100644
--- a/server/site_tests/firmware_Cr50InvalidateRW/control
+++ b/server/site_tests/firmware_Cr50InvalidateRW/control
@@ -10,6 +10,7 @@
 ATTRIBUTES = "suite:faft_cr50_pvt, suite:faft_cr50_prepvt"
 TIME = "MEDIUM"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 Verify the inactive Cr50 header on the first login after cryptohome restarts.
diff --git a/server/site_tests/firmware_Cr50InvalidateRW/control.faft_cr50_tot b/server/site_tests/firmware_Cr50InvalidateRW/control.faft_cr50_tot
index dea0522..7835fa2 100644
--- a/server/site_tests/firmware_Cr50InvalidateRW/control.faft_cr50_tot
+++ b/server/site_tests/firmware_Cr50InvalidateRW/control.faft_cr50_tot
@@ -10,6 +10,7 @@
 ATTRIBUTES = "suite:faft_cr50_tot"
 TIME = "MEDIUM"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 Verify the inactive Cr50 header on the first login after cryptohome restarts.
diff --git a/server/site_tests/firmware_Cr50InvalidateRW/firmware_Cr50InvalidateRW.py b/server/site_tests/firmware_Cr50InvalidateRW/firmware_Cr50InvalidateRW.py
index 11164ba..7bba490 100644
--- a/server/site_tests/firmware_Cr50InvalidateRW/firmware_Cr50InvalidateRW.py
+++ b/server/site_tests/firmware_Cr50InvalidateRW/firmware_Cr50InvalidateRW.py
@@ -12,13 +12,13 @@
 class firmware_Cr50InvalidateRW(test.test):
     """
     Verify the inactive Cr50 header on the first login after cryptohome
-    restarts.
+    starts.
 
     There are two special cases this test covers: logging in after the TPM
     owner is cleared and logging in as guest.
 
     After the tpm owner is cleared, corrupting the header will be done on
-    the second login. During guest login the owner wont be cleared.
+    the first non-guest login. During guest login the owner wont be cleared.
     """
     version = 1
 
@@ -94,7 +94,7 @@
             that isn't corrupt_login or if an attepmt to corrupt the header
             fails.
         """
-        for i in xrange(self.LOGIN_ATTEMPTS):
+        for i in range(self.LOGIN_ATTEMPTS):
             attempt = i + 1
 
             self.login(use_guest)
@@ -129,6 +129,10 @@
         logging.info('Clearing the TPM owner')
         tpm_utils.ClearTPMOwnerRequest(self.host, wait_for_ready=True)
 
+    def take_tpm_owner(self):
+        """Take the tpm owner."""
+        logging.info('Taking the TPM owner')
+        self.host.run('tpm_manager_client take_ownership')
 
     def after_run_once(self):
         """Print the run information after each successful run"""
@@ -137,21 +141,10 @@
 
     def run_once(self, host):
         """Login to validate ChromeOS corrupts the inactive header"""
-        # After clearing the tpm owner the header will be corrupted on the
-        # second login
+        # The header is corrupted on the first non-guest login after clearing
+        # the tpm owner
         self.clear_tpm_owner()
-        self.login_and_verify(corrupt_login=2)
+        self.take_tpm_owner()
 
-        # The header is corrupted on the first login after cryptohome is reset
-        self.restart_cryptohome()
-        self.login_and_verify(corrupt_login=1)
-
-        # Cryptohome is reset after reboot
-        self.host.reboot()
-        self.login_and_verify(corrupt_login=1)
-
-        # The header is not corrupted after guest login, but will be corrupted
-        # on the first login after that.
-        self.restart_cryptohome()
         self.login_and_verify(use_guest=True)
         self.login_and_verify(corrupt_login=1)
diff --git a/server/site_tests/firmware_Cr50Keygen/control.ecc b/server/site_tests/firmware_Cr50Keygen/control.ecc
new file mode 100644
index 0000000..735468c
--- /dev/null
+++ b/server/site_tests/firmware_Cr50Keygen/control.ecc
@@ -0,0 +1,32 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "mruthven"
+NAME = "firmware_Cr50Keygen.ecc"
+PURPOSE = "Verify ecc key generation."
+ATTRIBUTES = "suite:faft_cr50_prepvt, suite:faft_cr50_pvt"
+DEPENDENCIES = "servo_state:WORKING"
+TIME = "SHORT"
+TEST_TYPE = "server"
+PY_VERSION = 3
+
+DOC = """Verify ecc keygen."""
+
+if 'args_dict' not in locals():
+    args_dict = {}
+
+args_dict.update(utils.args_to_dict(args))
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+
+    iterations = int(args_dict.get("iterations", 1))
+
+    job.run_test("firmware_Cr50Keygen", host=host, cmdline_args=args,
+                 key_type="ecc", iterations=iterations)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/firmware_Cr50Keygen/control.rsa b/server/site_tests/firmware_Cr50Keygen/control.rsa
new file mode 100644
index 0000000..bfc2aa7
--- /dev/null
+++ b/server/site_tests/firmware_Cr50Keygen/control.rsa
@@ -0,0 +1,32 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "mruthven"
+NAME = "firmware_Cr50Keygen.rsa"
+PURPOSE = "Verify cr50 rsa key generation."
+ATTRIBUTES = "suite:faft_cr50_prepvt, suite:faft_cr50_pvt"
+DEPENDENCIES = "servo_state:WORKING"
+TIME = "SHORT"
+TEST_TYPE = "server"
+PY_VERSION = 3
+
+DOC = """Verify cr50 rsa keygen."""
+
+if 'args_dict' not in locals():
+    args_dict = {}
+
+args_dict.update(utils.args_to_dict(args))
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+
+    iterations = int(args_dict.get("iterations", 1))
+
+    job.run_test("firmware_Cr50Keygen", host=host, cmdline_args=args,
+                 key_type="rsa", iterations=iterations)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/firmware_Cr50Keygen/firmware_Cr50Keygen.py b/server/site_tests/firmware_Cr50Keygen/firmware_Cr50Keygen.py
new file mode 100644
index 0000000..9dfd602
--- /dev/null
+++ b/server/site_tests/firmware_Cr50Keygen/firmware_Cr50Keygen.py
@@ -0,0 +1,113 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import re
+import time
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.server.cros.faft.firmware_test import FirmwareTest
+
+
+class firmware_Cr50Keygen(FirmwareTest):
+    """Verify cr50 can tell the state of the dev mode switch."""
+    version = 1
+
+    RUNS = 20
+    TIME_RE = r'KeyPair took (\d+) '
+    TRUNKS_BASE = (
+            'trunks_client --key_create --key_blob=/tmp/key --print_time '
+            '--usage=decrypt ')
+    RSA_CMD_ARGS = '--rsa=2048'
+    # TODO(mruthven): look at results to see if 5000 is a reasonable average and
+    # 30s is a reasonable max across the test devices. Start a low threshold to
+    # get an idea for how the lab devices are operating.
+    # Raise an error if the average RSA key generation time takes longer than
+    # this threshold in ms.
+    RSA_AVG_THRESHOLD = 8000
+    # Raise an error if the max RSA key generation time takes longer than this
+    # threshold in ms.
+    RSA_MAX_THRESHOLD = 30000
+    ECC_CMD_ARGS = '--ecc'
+    # TODO(mruthven): look at results to see if 150 is a reasonable average and
+    # 500 is a reasonable max across the test devices. Start a low threshold to
+    # get an idea for how the lab devices are operating.
+    # Raise an error if the average ECC key generation time takes longer than
+    # this threshold.
+    ECC_AVG_THRESHOLD = 150
+    # Raise an error if the max ECC key generation time takes longer than this
+    # threshold in ms.
+    ECC_MAX_THRESHOLD = 500
+
+    def wait_for_client_after_changing_ccd(self, enable):
+        """Change CCD and wait for client.
+
+        @param enable: True to enable ccd. False to disable it.
+        @raises TestError if the DUT isn't pingable after changing ccd.
+        """
+        if not hasattr(self, 'cr50') or not self.cr50:
+            return
+
+        if enable:
+            self.cr50.ccd_enable()
+        else:
+            self.cr50.ccd_disable()
+
+        time.sleep(5)
+
+        if self.host.ping_wait_up(180):
+            return
+        msg = ('DUT is not pingable after %sabling ccd' %
+               'en' if enable else 'dis')
+        logging.info(msg)
+        logging.info('Resetting DUT')
+        self.host.reset_via_servo()
+        if not self.host.ping_wait_up(180):
+            raise error.TestError(msg)
+
+    def get_key_attr(self, attr):
+        """Get the attribute for the type of key the test is generating."""
+        return getattr(self, self.key_type + '_' + attr)
+
+    def get_keygen_cmd(self):
+        """Generate the trunks_client key_create command."""
+        return self.TRUNKS_BASE + self.get_key_attr('CMD_ARGS')
+
+    def run_once(self, host, key_type='RSA'):
+        """Check ECC and RSA Keygen times."""
+        self.host = host
+        self.key_type = key_type.upper()
+
+        # TODO(b/218492933) : find better way to disable rddkeepalive
+        # Disable rddkeepalive, so the test can disable ccd.
+        self.cr50.send_command('ccd testlab open')
+        self.cr50.send_command('rddkeepalive disable')
+        # Lock cr50 so the console will be restricted
+        self.cr50.set_ccd_level('lock')
+
+        self.wait_for_client_after_changing_ccd(False)
+
+        cmd = self.get_keygen_cmd()
+        logging.info(cmd)
+        full_cmd = ('for i in {1..%d} ; do echo $i ; %s || break; done' %
+                    (self.RUNS, cmd))
+        response = host.run(full_cmd)
+        logging.debug(response.stdout)
+        times = [int(t) for t in re.findall(self.TIME_RE, response.stdout)]
+        logging.info(times)
+        avg_time = sum(times) / len(times)
+        max_time = max(times)
+        logging.info('Average time: %s', avg_time)
+        logging.info('Max time: %s', max_time)
+        self.wait_for_client_after_changing_ccd(True)
+        if len(times) != self.RUNS:
+            raise error.TestFail('did not generate %d keys' % self.RUNS)
+        max_threshold = self.get_key_attr('MAX_THRESHOLD')
+        if max_time > max_threshold:
+            raise error.TestFail('MAX time %r is over the acceptable '
+                                 'threshold(%dms)' % (max_time, max_threshold))
+        avg_threshold = self.get_key_attr('AVG_THRESHOLD')
+        if avg_time > avg_threshold:
+            raise error.TestFail('Average time %r is over the acceptable '
+                                 'threshold(%dms)' % (avg_time, avg_threshold))
diff --git a/server/site_tests/firmware_Cr50Open/control.ccd_open_restricted b/server/site_tests/firmware_Cr50Open/control.ccd_open_restricted
index c635440..3e03219 100644
--- a/server/site_tests/firmware_Cr50Open/control.ccd_open_restricted
+++ b/server/site_tests/firmware_Cr50Open/control.ccd_open_restricted
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Verify the console can be opened by pressing the power button after the device
diff --git a/server/site_tests/firmware_Cr50Open/control.ccd_open_unrestricted b/server/site_tests/firmware_Cr50Open/control.ccd_open_unrestricted
index 3be9ab0..a093a1a 100644
--- a/server/site_tests/firmware_Cr50Open/control.ccd_open_unrestricted
+++ b/server/site_tests/firmware_Cr50Open/control.ccd_open_unrestricted
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Verify the console can be opened by pressing the power button after the device
diff --git a/server/site_tests/firmware_Cr50Open/control.faft_cr50_tot b/server/site_tests/firmware_Cr50Open/control.faft_cr50_tot
index 5928707..11e8a62 100644
--- a/server/site_tests/firmware_Cr50Open/control.faft_cr50_tot
+++ b/server/site_tests/firmware_Cr50Open/control.faft_cr50_tot
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Verify the console can be opened by pressing the power button after the device
diff --git a/server/site_tests/firmware_Cr50Open/firmware_Cr50Open.py b/server/site_tests/firmware_Cr50Open/firmware_Cr50Open.py
index cb114e3..b76bb7e 100644
--- a/server/site_tests/firmware_Cr50Open/firmware_Cr50Open.py
+++ b/server/site_tests/firmware_Cr50Open/firmware_Cr50Open.py
@@ -26,7 +26,7 @@
 
         self.ccd_open_restricted = ccd_open_restricted
         self.fast_ccd_open(enable_testlab=True)
-        self.cr50.send_command('ccd reset')
+        self.cr50.ccd_reset()
         self.cr50.set_ccd_level('lock')
 
 
@@ -51,6 +51,7 @@
         try:
             self.cr50.set_ccd_level('open')
         except error.TestFail as e:
+            self.cr50.check_for_console_errors('ccd open from console')
             if not batt_pres:
                 raise error.TestFail('Unable to open cr50 from console with '
                                      'batt disconnected: %s' % str(e))
@@ -68,7 +69,8 @@
         self.cr50.set_ccd_level('lock')
 
         if not batt_pres:
-            cr50_utils.GSCTool(self.host, ['-a', '-o'])
+            cr50_utils.GSCTool(self.host, ['-a', '-o'],
+                               expect_reboot=not batt_pres)
             # Wait long enough for cr50 to open ccd and wipe the tpm.
             time.sleep(10)
             if self.cr50.OPEN != self.cr50.get_ccd_level():
@@ -80,6 +82,7 @@
             self.ccd_open_from_ap()
         except error.TestFail as e:
             logging.info(e)
+            self.cr50.check_for_console_errors('ccd open from ap')
             # ccd open should work if the device is in dev mode or ccd open
             # isn't restricted. If open failed for some reason raise the error.
             if dev_mode or not self.ccd_open_restricted:
diff --git a/server/site_tests/firmware_Cr50OpenWhileAPOff/control b/server/site_tests/firmware_Cr50OpenWhileAPOff/control
index 808c2ee..674b67a 100644
--- a/server/site_tests/firmware_Cr50OpenWhileAPOff/control
+++ b/server/site_tests/firmware_Cr50OpenWhileAPOff/control
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Verify the console can be opened while the AP is off.
diff --git a/server/site_tests/firmware_Cr50OpenWhileAPOff/control.faft_cr50_tot b/server/site_tests/firmware_Cr50OpenWhileAPOff/control.faft_cr50_tot
index 932c174..55eda46 100644
--- a/server/site_tests/firmware_Cr50OpenWhileAPOff/control.faft_cr50_tot
+++ b/server/site_tests/firmware_Cr50OpenWhileAPOff/control.faft_cr50_tot
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Verify the console can be opened while the AP is off.
diff --git a/server/site_tests/firmware_Cr50OpenWhileAPOff/firmware_Cr50OpenWhileAPOff.py b/server/site_tests/firmware_Cr50OpenWhileAPOff/firmware_Cr50OpenWhileAPOff.py
index c3b0194..2ff9891 100644
--- a/server/site_tests/firmware_Cr50OpenWhileAPOff/firmware_Cr50OpenWhileAPOff.py
+++ b/server/site_tests/firmware_Cr50OpenWhileAPOff/firmware_Cr50OpenWhileAPOff.py
@@ -35,11 +35,16 @@
             raise error.TestNAError('Test can only be run on devices with '
                                     'access to the Cr50 console')
 
+        # c2d2 uses cr50 for ec reset. The setting doesn't survive deep sleep.
+        # This test needs ec reset to survive deep sleep to keep the AP off.
+        if 'c2d2' in self.servo.get_servo_type():
+            raise error.TestNAError('Cannot rely on ecrst with c2d2')
+
         # TODO(mruthven): replace with dependency on servo v4 with servo micro
         # and type c cable.
-        if (self.servo.get_servo_version(active=True) !=
-            'servo_v4_with_servo_micro'):
-            raise error.TestNAError('Run using servo v4 with servo micro')
+        if ('servo_v4' not in self.servo.get_servo_type()
+                    or not self.servo.main_device_is_flex()):
+            raise error.TestNAError('Must use servo v4 with servo_micro')
 
         if not self.cr50.servo_dts_mode_is_valid():
             raise error.TestNAError('Plug in servo v4 type c cable into ccd '
@@ -47,7 +52,7 @@
 
         self.fast_ccd_open(enable_testlab=True)
         # make sure password is cleared.
-        self.cr50.send_command('ccd reset')
+        self.cr50.ccd_reset()
         # Set GscFullConsole to Always, so we can always use gpioset.
         self.cr50.set_cap('GscFullConsole', 'Always')
         # You can only open cr50 from the console if a password is set. Set
@@ -141,7 +146,7 @@
         # ccdstate which is useful for debugging. Do that first, so it always
         # happens.
         if not self.cr50.ap_is_on() and state == 'on':
-            self.servo.power_short_press()
+            self.servo.power_normal_press()
             time.sleep(self.SHORT_DELAY)
 
 
diff --git a/server/site_tests/firmware_Cr50PartialBoardId/control b/server/site_tests/firmware_Cr50PartialBoardId/control
index d145c4f..97de5ab 100644
--- a/server/site_tests/firmware_Cr50PartialBoardId/control
+++ b/server/site_tests/firmware_Cr50PartialBoardId/control
@@ -11,6 +11,7 @@
 TIME = "MEDIUM"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Verify Cr50 handles partial board ids correctly. If only the flags are set, the
diff --git a/server/site_tests/firmware_Cr50PartialBoardId/firmware_Cr50PartialBoardId.py b/server/site_tests/firmware_Cr50PartialBoardId/firmware_Cr50PartialBoardId.py
index bb8529b..f9b291b 100644
--- a/server/site_tests/firmware_Cr50PartialBoardId/firmware_Cr50PartialBoardId.py
+++ b/server/site_tests/firmware_Cr50PartialBoardId/firmware_Cr50PartialBoardId.py
@@ -3,6 +3,7 @@
 # found in the LICENSE file.
 
 import logging
+import six
 
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib.cros import cr50_utils
@@ -20,7 +21,7 @@
     # Brand used for testing. It doesn't matter what this is.
     DEFAULT_BRAND = 'ZZAF'
 
-    WHITELABEL_FLAGS = 0x3f80
+    ALLOW_FLAGS = 0x3f80
     OTHER_FLAGS = 0x7f7f
 
     SUCCESS = ''
@@ -45,9 +46,9 @@
         logging.info('Test Brand: %r', self.test_brand)
         self.image_flags = int(bid.rsplit(':', 1)[-1], 16) if bid else 0
         # The image may have non-zero flags. Use test flags as close to the
-        # whitelabel flags as possible, but make sure they can be used with
+        # allowed flags as possible, but make sure they can be used with
         # the running image.
-        self.test_flags = self.WHITELABEL_FLAGS | self.image_flags
+        self.test_flags = self.ALLOW_FLAGS | self.image_flags
         self.other_flags = self.OTHER_FLAGS | self.image_flags
 
 
@@ -71,7 +72,7 @@
         """Returns a string representation of the board id tuple."""
         bid_str_fields = []
         for field in bid:
-            if isinstance(field, str):
+            if isinstance(field, six.string_types):
                 bid_str_fields.append(field)
             elif isinstance(field, int):
                 bid_str_fields.append(hex(field))
@@ -159,7 +160,7 @@
 
         self.eraseflashinfo()
         # Plain whitelabel flags will run on any board id locked image.
-        bid = (cr50_utils.ERASED_BID_STR, None, self.WHITELABEL_FLAGS)
+        bid = (cr50_utils.ERASED_BID_STR, None, self.ALLOW_FLAGS)
         self.set_board_id_check_response(bid, cr50_utils.ERASED_CHIP_BID,
                                          self.ERR_BID_MISMATCH)
         # Previous board id was rejected. The board id can still be set.
diff --git a/server/site_tests/firmware_Cr50Password/control b/server/site_tests/firmware_Cr50Password/control
index a39d3a3..d6a15c1 100644
--- a/server/site_tests/firmware_Cr50Password/control
+++ b/server/site_tests/firmware_Cr50Password/control
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """Verify the password can be set and changed."""
 
diff --git a/server/site_tests/firmware_Cr50Password/control.faft_cr50_tot b/server/site_tests/firmware_Cr50Password/control.faft_cr50_tot
index 10bb8f9..9ce0ba5 100644
--- a/server/site_tests/firmware_Cr50Password/control.faft_cr50_tot
+++ b/server/site_tests/firmware_Cr50Password/control.faft_cr50_tot
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Verify the password can be set and changed.
diff --git a/server/site_tests/firmware_Cr50Password/firmware_Cr50Password.py b/server/site_tests/firmware_Cr50Password/firmware_Cr50Password.py
index 1bfe59e..28756e2 100644
--- a/server/site_tests/firmware_Cr50Password/firmware_Cr50Password.py
+++ b/server/site_tests/firmware_Cr50Password/firmware_Cr50Password.py
@@ -2,6 +2,8 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+import logging
+
 from autotest_lib.client.common_lib import error
 from autotest_lib.server.cros.faft.cr50_test import Cr50Test
 
@@ -17,7 +19,7 @@
         # Make sure to enable testlab mode, so we can guarantee the password
         # can be cleared.
         self.fast_ccd_open(enable_testlab=True)
-        self.cr50.send_command('ccd reset')
+        self.cr50.ccd_reset()
 
         # Set the password.
         self.set_ccd_password(self.CCD_PASSWORD)
@@ -25,9 +27,13 @@
             raise error.TestFail('Failed to set password')
 
         # Test 'ccd reset' clears the password.
-        self.cr50.send_command('ccd reset')
+        self.cr50.ccd_reset()
         if not self.cr50.password_is_reset():
             raise error.TestFail('ccd reset did not clear the password')
+        # Set OpenFromUSB to IfOpened, so the test will only be able to open
+        # ccd with a console command if the password is set. This is cleared
+        # in Cr50Test cleanup.
+        self.cr50.set_cap('OpenFromUSB', 'IfOpened')
 
         # Set the password again while cr50 is open.
         self.set_ccd_password(self.CCD_PASSWORD)
@@ -38,17 +44,35 @@
         # It needs to be cleared first.
         self.set_ccd_password(self.NEW_PASSWORD, expect_error=True)
 
+        self.cr50.reboot()
+        if self.cr50.password_is_reset():
+            raise error.TestFail('Password cleared after reboot')
+
+        # Verify ccd can't be opened with the wrong password.
+        try:
+            self.cr50.set_ccd_level('open', self.NEW_PASSWORD)
+            raise error.TestFail('Opened ccd with incorrect password')
+        except error.TestFail as e:
+            logging.info('Cr50 successfully rejected ccd open')
+
+        # Verify ccd can be opened with the correct password.
+        self.cr50.set_ccd_level('open', self.CCD_PASSWORD)
+
         self.cr50.set_ccd_level('lock')
         # The password can't be cleared while the console is locked.
         self.set_ccd_password('clear:' + self.CCD_PASSWORD, expect_error=True)
 
-        self.cr50.send_command('ccd unlock ' + self.CCD_PASSWORD)
-        # The password can be cleared while the console is unlocked.
-        self.set_ccd_password('clear:' + self.CCD_PASSWORD)
+        if self.cr50.unlock_is_supported():
+            self.cr50.send_command('ccd unlock ' + self.CCD_PASSWORD)
+            # The password can be cleared while the console is unlocked.
+            self.set_ccd_password('clear:' + self.CCD_PASSWORD)
 
-        # Set the password again and lock the console.
-        self.cr50.send_command('ccd testlab open')
-        self.set_ccd_password(self.CCD_PASSWORD)
+            # Open the console, set the password again.
+            self.cr50.send_command('ccd testlab open')
+            self.set_ccd_password(self.CCD_PASSWORD)
+        else:
+            # Open the console.
+            self.cr50.send_command('ccd testlab open')
 
         # The password can't be cleared using the wrong password.
         self.set_ccd_password('clear:' + self.CCD_PASSWORD.lower(),
@@ -61,16 +85,20 @@
         # The password can be set to anything when there isn't one set.
         self.set_ccd_password(self.NEW_PASSWORD)
         if self.cr50.password_is_reset():
-            raise error.TestFail('Failed to clear password')
+            raise error.TestFail('Failed to set password')
 
 
         self.cr50.send_command('ccd testlab open')
-        self.cr50.send_command('ccd reset')
-        self.host.run('gsctool -a -U')
+        self.cr50.ccd_reset()
+
+        if not self.cr50.unlock_is_supported():
+            return
 
         # Run through the same steps when the password was set with the console
         # unlocked.
 
+        self.host.run('gsctool -a -U')
+
         # Set the password when the console is unlocked.
         self.set_ccd_password(self.CCD_PASSWORD)
 
diff --git a/server/site_tests/firmware_Cr50PinWeaverServer/control b/server/site_tests/firmware_Cr50PinWeaverServer/control
index 2280176..9220afe 100644
--- a/server/site_tests/firmware_Cr50PinWeaverServer/control
+++ b/server/site_tests/firmware_Cr50PinWeaverServer/control
@@ -7,9 +7,10 @@
 AUTHOR = "allenwebb"
 NAME = "firmware_Cr50PinWeaverServer"
 PURPOSE = "Validate PinWeaver functionality on Cr50"
-ATTRIBUTES = "suite:experimental, suite:faft_cr50_pvt, suite:faft_cr50_prepvt"
+ATTRIBUTES = "suite:experimental, suite:faft_cr50_pvt, suite:faft_cr50_prepvt, suite:distributed_lab_qual_faft"
 TIME = "SHORT"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 Check that PinWeaver functionality on Cr50 works as intended across hard
diff --git a/server/site_tests/firmware_Cr50PinWeaverServer/control.faft_cr50_tot b/server/site_tests/firmware_Cr50PinWeaverServer/control.faft_cr50_tot
index a217ced..cc3d35d 100644
--- a/server/site_tests/firmware_Cr50PinWeaverServer/control.faft_cr50_tot
+++ b/server/site_tests/firmware_Cr50PinWeaverServer/control.faft_cr50_tot
@@ -10,6 +10,7 @@
 ATTRIBUTES = "suite:faft_cr50_tot"
 TIME = "SHORT"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 Check that PinWeaver functionality on Cr50 works as intended across hard
diff --git a/server/site_tests/firmware_Cr50PinWeaverServer/firmware_Cr50PinWeaverServer.py b/server/site_tests/firmware_Cr50PinWeaverServer/firmware_Cr50PinWeaverServer.py
index 63990e9..b59b796 100644
--- a/server/site_tests/firmware_Cr50PinWeaverServer/firmware_Cr50PinWeaverServer.py
+++ b/server/site_tests/firmware_Cr50PinWeaverServer/firmware_Cr50PinWeaverServer.py
@@ -16,8 +16,8 @@
     empty path in a Merkle tree with the specified parameters.
     """
     num_siblings = 2 ^ bits_per_level - 1
-    child = '\0' * 32
-    result = ''
+    child = b'\0' * 32
+    result = b''
     for _ in range(height):
         part = child * num_siblings
         child = sha256(part + child).digest()
@@ -56,10 +56,13 @@
         # Label 0 is guaranteed to be empty because the self test above resets
         # the tree and removes the leaf it adds.
         label = 0
-        h_aux = compute_empty_tree_auxilary_hashes().encode('hex')
-        le_secret = sha256('1234').hexdigest()
-        he_secret = sha256('ag3#l4Z9').hexdigest()
-        reset_secret = sha256('W8oE@Ja2mq.R1').hexdigest()
+        hashes = compute_empty_tree_auxilary_hashes()
+        # TODO(mruthven): always use hashes.hex() after python3 migration.
+        h_aux = hashes.hex() if hasattr(hashes,
+                                        'hex') else hashes.encode('hex')
+        le_secret = sha256(b'1234').hexdigest()
+        he_secret = sha256(b'ag3#l4Z9').hexdigest()
+        reset_secret = sha256(b'W8oE@Ja2mq.R1').hexdigest()
         delay_schedule = '5 %d' % 0x00ffffffff
         result = pinweaver_client.InsertLeaf(host, label, h_aux, le_secret,
                                              he_secret, reset_secret,
diff --git a/server/site_tests/firmware_Cr50RMAOpen/control b/server/site_tests/firmware_Cr50RMAOpen/control
index 455b576..08372c5 100644
--- a/server/site_tests/firmware_Cr50RMAOpen/control
+++ b/server/site_tests/firmware_Cr50RMAOpen/control
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Verify Cr50 RMA behavoior
diff --git a/server/site_tests/firmware_Cr50RMAOpen/control.faft_cr50_tot b/server/site_tests/firmware_Cr50RMAOpen/control.faft_cr50_tot
index c3ec345..54a8135 100644
--- a/server/site_tests/firmware_Cr50RMAOpen/control.faft_cr50_tot
+++ b/server/site_tests/firmware_Cr50RMAOpen/control.faft_cr50_tot
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Verify Cr50 RMA behavoior
diff --git a/server/site_tests/firmware_Cr50RMAOpen/firmware_Cr50RMAOpen.py b/server/site_tests/firmware_Cr50RMAOpen/firmware_Cr50RMAOpen.py
index 03f4b3d..3bad2af 100644
--- a/server/site_tests/firmware_Cr50RMAOpen/firmware_Cr50RMAOpen.py
+++ b/server/site_tests/firmware_Cr50RMAOpen/firmware_Cr50RMAOpen.py
@@ -40,8 +40,6 @@
     LIMIT_CLI = '(RMA Auth error 0x504|rma_auth\s+>)'
     LIMIT_AP = 'error 4'
     ERR_DISABLE_AP = 'error 7'
-    DISABLE_WARNING = ('mux_client_request_session: read from master failed: '
-            'Broken pipe')
     # GSCTool exit statuses
     UPDATE_ERROR = 3
     SUCCESS = 0
@@ -75,7 +73,7 @@
         # Disable all capabilities at the start of the test. Go ahead and enable
         # testlab mode if it isn't enabled.
         self.fast_ccd_open(enable_testlab=True)
-        self.cr50.send_command('ccd reset')
+        self.cr50.ccd_reset(servo_en=False)
         self.cr50.set_ccd_level('lock')
         # Make sure all capabilities are set to default.
         try:
@@ -197,7 +195,7 @@
     def fake_rma_open(self):
         """Use individual commands to enter the same state as factory mode"""
         self.cr50.send_command('ccd testlab open')
-        self.cr50.send_command('ccd reset factory')
+        self.cr50.ccd_reset_factory()
         self.cr50.send_command('wp disable atboot')
 
 
diff --git a/server/site_tests/firmware_Cr50RddG3/control b/server/site_tests/firmware_Cr50RddG3/control
index 7214599..ea6836b 100644
--- a/server/site_tests/firmware_Cr50RddG3/control
+++ b/server/site_tests/firmware_Cr50RddG3/control
@@ -7,10 +7,11 @@
 AUTHOR = "mruthven"
 NAME = "firmware_Cr50RddG3"
 PURPOSE = "Verify Rdd in G3."
-ATTRIBUTES = "suite:faft_cr50_prepvt, suite:faft_cr50_pvt"
+ATTRIBUTES = "suite:faft_ccd, suite:faft_cr50_prepvt, suite:faft_cr50_pvt"
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """Verify Rdd connect and disconnect in G3.
 
diff --git a/server/site_tests/firmware_Cr50RddG3/control.faft_cr50_tot b/server/site_tests/firmware_Cr50RddG3/control.faft_cr50_tot
index 613ef3e..d4461d1 100644
--- a/server/site_tests/firmware_Cr50RddG3/control.faft_cr50_tot
+++ b/server/site_tests/firmware_Cr50RddG3/control.faft_cr50_tot
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """Verify Rdd in G3.
 
diff --git a/server/site_tests/firmware_Cr50RddG3/firmware_Cr50RddG3.py b/server/site_tests/firmware_Cr50RddG3/firmware_Cr50RddG3.py
index c953478..ef980b6 100644
--- a/server/site_tests/firmware_Cr50RddG3/firmware_Cr50RddG3.py
+++ b/server/site_tests/firmware_Cr50RddG3/firmware_Cr50RddG3.py
@@ -17,6 +17,16 @@
     # Cr50 debounces disconnects. We need to wait before checking Rdd state
     RDD_DEBOUNCE = 3
 
+    def initialize(self, host, cmdline_args, full_args):
+        """Initialize the test"""
+        super(firmware_Cr50RddG3, self).initialize(host, cmdline_args,
+                                                   full_args)
+
+        # TODO(b/186535695): EC hibernate puts cr50 into reset, so the test
+        # can't verify cr50 behavior while the EC is hibernate.
+        if 'c2d2' in self.servo.get_servo_type():
+            raise error.TestNAError('Cannot run test with c2d2')
+
     def rdd_is_connected(self):
         """Return True if Cr50 detects Rdd."""
         time.sleep(2)
@@ -32,8 +42,7 @@
         return None
 
 
-    def check_rdd_status(self, dts_mode, err_desc, capabilities=None,
-                         irregular_cap=False):
+    def check_rdd_status(self, dts_mode, err_desc, capabilities=None):
         """Check the rdd state.
 
         @param dts_mode: 'on' if Rdd should be connected. 'off' if it should be
@@ -41,8 +50,6 @@
         @param err_desc: Description of the rdd error.
         @param capabilities: ignore err_desc if any of the capabilities from
                              this list are found in the faft board config.
-        @param irregular_cap: If True, don't fail when the behavior doesn't
-                              happen and the board capability is present.
         @param raises TestFail if rdd state doesn't match the expected rdd state
                       or if it does and the board has the capability set.
         """
@@ -59,11 +66,12 @@
             else:
                 err_msg = err_desc
         elif board_cap:
-            err_msg = 'Board has %r, but %r did not occur.' % (board_cap,
-                                                               err_desc)
-            if irregular_cap:
-                logging.info('Irregular Cap behavior %s', err_msg)
-                err_msg = None
+            # Log a warning if the board has a Rdd issue, but it didn't show up
+            # during this test run.
+            logging.warning(
+                    'Irregular Cap behavior: Board has %r, but %r did '
+                    'not occur.', board_cap, err_desc)
+            err_msg = None
         if err_msg:
             logging.warning(err_msg)
             self.rdd_failures.append(err_msg)
@@ -112,8 +120,9 @@
 
         logging.info('Checking Rdd can be disconnected in G3.')
         self.servo.set_dts_mode('off')
-        self.check_rdd_status('off', 'Cr50 did not detect Rdd disconnect in G3',
-                              ['rdd_leakage'], irregular_cap=True)
+        self.check_rdd_status('off',
+                              'Cr50 did not detect Rdd disconnect in G3',
+                              ['rdd_leakage'])
         self._try_to_bring_dut_up()
         if self.rdd_failures:
             raise error.TestFail('Found Rdd issues: %s' % (self.rdd_failures))
diff --git a/server/site_tests/firmware_Cr50RejectUpdate/control b/server/site_tests/firmware_Cr50RejectUpdate/control
index 5be56bd..57d0536 100644
--- a/server/site_tests/firmware_Cr50RejectUpdate/control
+++ b/server/site_tests/firmware_Cr50RejectUpdate/control
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """Verify cr50 rejects images before trying to update."""
 
diff --git a/server/site_tests/firmware_Cr50RejectUpdate/control.faft_cr50_tot b/server/site_tests/firmware_Cr50RejectUpdate/control.faft_cr50_tot
index c25647e..1c698cf 100644
--- a/server/site_tests/firmware_Cr50RejectUpdate/control.faft_cr50_tot
+++ b/server/site_tests/firmware_Cr50RejectUpdate/control.faft_cr50_tot
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Verify cr50 rejects images before trying to update.
diff --git a/server/site_tests/firmware_Cr50SetBoardId/control b/server/site_tests/firmware_Cr50SetBoardId/control
index e920c3e..d49d501 100644
--- a/server/site_tests/firmware_Cr50SetBoardId/control
+++ b/server/site_tests/firmware_Cr50SetBoardId/control
@@ -11,6 +11,7 @@
 TIME = "MEDIUM"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Verify cr50-set-board-id.sh sets the right board id and flags with the given
diff --git a/server/site_tests/firmware_Cr50SetBoardId/firmware_Cr50SetBoardId.py b/server/site_tests/firmware_Cr50SetBoardId/firmware_Cr50SetBoardId.py
index 7741ea5..9783ebd 100644
--- a/server/site_tests/firmware_Cr50SetBoardId/firmware_Cr50SetBoardId.py
+++ b/server/site_tests/firmware_Cr50SetBoardId/firmware_Cr50SetBoardId.py
@@ -6,7 +6,6 @@
 
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib.cros import cr50_utils
-from autotest_lib.server.cros import filesystem_util
 from autotest_lib.server.cros.faft.cr50_test import Cr50Test
 
 
@@ -70,7 +69,7 @@
         if self._bid_flags == self.TEST_MP_FLAGS:
             raise error.TestNAError('cr50-set-board-id cannot be used with '
                                     'test mp images.')
-        filesystem_util.make_rootfs_writable(self.host)
+        self.make_rootfs_writable()
         self.host.run('rm %s' % cr50_utils.CR50_PREPVT, ignore_status=True)
         self.host.run('rm %s' % cr50_utils.CR50_PROD, ignore_status=True)
 
@@ -142,7 +141,7 @@
         self.run_script(self.ERROR_ALREADY_SET, 'dvt', 'TEST')
 
         # Verify each stage sets the right flags
-        for phase, flags in self.PHASE_FLAGS_DICT.iteritems():
+        for phase, flags in self.PHASE_FLAGS_DICT.items():
             self.eraseflashinfo()
 
             expected_response = self.SUCCESS
diff --git a/server/site_tests/firmware_Cr50ShortECC/control b/server/site_tests/firmware_Cr50ShortECC/control
index bff26d7..2bc6bdc 100644
--- a/server/site_tests/firmware_Cr50ShortECC/control
+++ b/server/site_tests/firmware_Cr50ShortECC/control
@@ -11,7 +11,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test uses trunks_client to verify that Cr50 can handle short ECC keys.
diff --git a/server/site_tests/firmware_Cr50Testlab/control b/server/site_tests/firmware_Cr50Testlab/control
index bbdb21d..6fe3c36 100644
--- a/server/site_tests/firmware_Cr50Testlab/control
+++ b/server/site_tests/firmware_Cr50Testlab/control
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Verify the testlab mode can be changed when ccd is open.
diff --git a/server/site_tests/firmware_Cr50Testlab/control.faft_cr50_tot b/server/site_tests/firmware_Cr50Testlab/control.faft_cr50_tot
index 799c419..7760287 100644
--- a/server/site_tests/firmware_Cr50Testlab/control.faft_cr50_tot
+++ b/server/site_tests/firmware_Cr50Testlab/control.faft_cr50_tot
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Verify the testlab mode can be changed when ccd is open.
diff --git a/server/site_tests/firmware_Cr50Testlab/firmware_Cr50Testlab.py b/server/site_tests/firmware_Cr50Testlab/firmware_Cr50Testlab.py
index 6ee0abf..ebf5a28 100644
--- a/server/site_tests/firmware_Cr50Testlab/firmware_Cr50Testlab.py
+++ b/server/site_tests/firmware_Cr50Testlab/firmware_Cr50Testlab.py
@@ -6,6 +6,7 @@
 
 from autotest_lib.client.common_lib import error
 from autotest_lib.server.cros.faft.cr50_test import Cr50Test
+from autotest_lib.server.cros.servo import chrome_ti50
 
 
 class firmware_Cr50Testlab(Cr50Test):
@@ -25,11 +26,24 @@
                                     'access to the Cr50 console')
         if self.servo.main_device_is_ccd():
             raise error.TestNAError('Use a flex cable instead of CCD cable.')
+        if self.servo.main_device_uses_gsc_drv():
+            raise error.TestNAError('Cannot run with c2d2 until cold_reset '
+                                    'issue is resolved')
+
+        if isinstance(self.cr50, chrome_ti50.ChromeTi50):
+            self.BASIC_ERROR = 'Command \'ccd\' failed'
+            self.INVALID_PARAM = 'Param2'
 
         # Get the current reset count, so we can check that there haven't been
         # any cr50 resets at any point during the test.
         self.start_reset_count = self.cr50.get_reset_count()
 
+    def cleanup(self):
+        """Reenable testlab mode."""
+        try:
+            self.fast_ccd_open(enable_testlab=True)
+        finally:
+            super(firmware_Cr50Testlab, self).cleanup()
 
     def try_testlab(self, mode, err=''):
         """Try to modify ccd testlab mode.
diff --git a/server/site_tests/firmware_Cr50TpmManufactured/control b/server/site_tests/firmware_Cr50TpmManufactured/control
index ebe37ff..4ff4431 100644
--- a/server/site_tests/firmware_Cr50TpmManufactured/control
+++ b/server/site_tests/firmware_Cr50TpmManufactured/control
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """Check if the TPM is manufactured.
 
diff --git a/server/site_tests/firmware_Cr50TpmMode/control b/server/site_tests/firmware_Cr50TpmMode/control
index 96ba46e..8a4ee50 100644
--- a/server/site_tests/firmware_Cr50TpmMode/control
+++ b/server/site_tests/firmware_Cr50TpmMode/control
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """Verify TPM disabling and getting back enabled after reset."""
 
diff --git a/server/site_tests/firmware_Cr50TpmMode/control.faft_cr50_tot b/server/site_tests/firmware_Cr50TpmMode/control.faft_cr50_tot
index 2b1f13a..386cf64 100644
--- a/server/site_tests/firmware_Cr50TpmMode/control.faft_cr50_tot
+++ b/server/site_tests/firmware_Cr50TpmMode/control.faft_cr50_tot
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Verify TPM disabling and getting back enabled after reset.
diff --git a/server/site_tests/firmware_Cr50TpmMode/firmware_Cr50TpmMode.py b/server/site_tests/firmware_Cr50TpmMode/firmware_Cr50TpmMode.py
index b2b67d9..047f74d 100644
--- a/server/site_tests/firmware_Cr50TpmMode/firmware_Cr50TpmMode.py
+++ b/server/site_tests/firmware_Cr50TpmMode/firmware_Cr50TpmMode.py
@@ -9,27 +9,14 @@
 
 
 class firmware_Cr50TpmMode(Cr50Test):
-    """Verify TPM disabling and getting back enabled after reset.
-
-    Attributes:
-        can_set_tpm: True if board property has 'BOARD_ALLOW_CHANGE_TPM_MODE'.
-                     False, otherwise.
-    """
+    """Verify TPM disabling and getting back enabled after reset."""
     version = 1
 
-    def initialize(self, host, cmdline_args, full_args):
-        super(firmware_Cr50TpmMode, self).initialize(host, cmdline_args,
-                full_args)
-
-        self.can_set_tpm = self.cr50.uses_board_property(
-                                                  'BOARD_ALLOW_CHANGE_TPM_MODE')
-
     def init_tpm_mode(self):
         """Reset the device."""
-        if self.can_set_tpm:
-            logging.info('Reset')
-            self.host.reset_via_servo()
-            self.switcher.wait_for_client()
+        logging.info('Reset')
+        self.host.reset_via_servo()
+        self.switcher.wait_for_client()
 
     def cleanup(self):
         """Initialize TPM mode by resetting CR50"""
@@ -60,8 +47,10 @@
         """
         mode_param = 'disable' if disable_tpm else 'enable'
         opt_text = '--tpm_mode' if long_opt else '-m'
-        return cr50_utils.GSCTool(self.host,
-                 ['-a', opt_text, mode_param]).stdout.strip()
+        result = cr50_utils.GSCTool(
+                self.host, ['-a', opt_text, mode_param]).stdout.strip()
+        logging.info('TPM Mode: %r', result)
+        return result
 
     def run_test_tpm_mode(self, disable_tpm, long_opt):
         """Run a test for the case of either disabling TPM or enabling.
@@ -100,20 +89,8 @@
         # Change TPM Mode
         logging.info('Set TPM Mode')
 
-        if self.can_set_tpm:
-            output_log = self.set_tpm_mode(disable_tpm, long_opt)
-            logging.info(output_log)
-        else:
-            try:
-                output_log = self.set_tpm_mode(disable_tpm, long_opt)
-            except error.AutoservRunError as e:
-                logging.info('Failed to set TPM mode as expected')
-                logging.info(str(e))
-            else:
-                raise error.TestFail('Setting TPM mode should not be allowed')
-            finally:
-                logging.info(output_log)
-            return
+        output_log = self.set_tpm_mode(disable_tpm, long_opt)
+        logging.info(output_log)
 
         # Check the result of TPM Mode.
         if disable_tpm:
diff --git a/server/site_tests/firmware_Cr50U2fCommands/control b/server/site_tests/firmware_Cr50U2fCommands/control
index c7a1ccc..694b38d 100644
--- a/server/site_tests/firmware_Cr50U2fCommands/control
+++ b/server/site_tests/firmware_Cr50U2fCommands/control
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Tests basic key generation and signing U2F commands.
diff --git a/server/site_tests/firmware_Cr50U2fCommands/control.faft_cr50_tot b/server/site_tests/firmware_Cr50U2fCommands/control.faft_cr50_tot
index 317e249..50340da 100644
--- a/server/site_tests/firmware_Cr50U2fCommands/control.faft_cr50_tot
+++ b/server/site_tests/firmware_Cr50U2fCommands/control.faft_cr50_tot
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Tests basic key generation and signing U2F commands.
diff --git a/server/site_tests/firmware_Cr50U2fCommands/firmware_Cr50U2fCommands.py b/server/site_tests/firmware_Cr50U2fCommands/firmware_Cr50U2fCommands.py
index a9b02b9..81ae62f 100644
--- a/server/site_tests/firmware_Cr50U2fCommands/firmware_Cr50U2fCommands.py
+++ b/server/site_tests/firmware_Cr50U2fCommands/firmware_Cr50U2fCommands.py
@@ -74,540 +74,555 @@
 UNUSED_AUTH_TIME_SECRET_HASH = '00' * 32
 
 def get_bytes(tpm_str, start, length):
-  return tpm_str[(start * 2):(start * 2 + length * 2)]
+    return tpm_str[(start * 2):(start * 2 + length * 2)]
 
 
 def assert_byte_length(str, len_bytes):
-  """Assert str represents a byte sequence len_bytes long"""
-  assert (len(str) / 2) == len_bytes
+    """Assert str represents a byte sequence len_bytes long"""
+    assert (len(str) / 2) == len_bytes
 
 
 def get_str_length_as_hex(str, additional_len=0):
-  """Get the length of str plus any additional_len as a hex string."""
-  assert (len(str) % 2) == 0
-  length_bytes = len(str) / 2
-  # hex() returns strings with a '0x' prefix, which we remove.
-  return hex(length_bytes + additional_len)[2:]
+    """Get the length of str plus any additional_len as a hex string."""
+    assert (len(str) % 2) == 0
+    length_bytes = len(str) // 2
+    # hex() returns strings with a '0x' prefix, which we remove.
+    return hex(length_bytes + additional_len)[2:]
 
 
 def check_response_size(response, expected_response, success_size):
-  """If the response is expected to be success, check it's size is as expected,
+    """If the response is expected to be success, check it's size is as expected,
 
      otherwise, check it is 0.
   """
-  response_size = response['length']
-  if expected_response == VENDOR_CMD_RESPONSE_SUCCESS:
-    if response_size != success_size:
-      raise error.TestFail(
-          'Invalid successful response size: {}'.format(response_size))
-  elif response_size != 0:
-    raise error.TestFail(
-        'Non-zero response size on failure: {}'.format(response_size))
+    response_size = response['length']
+    if expected_response == VENDOR_CMD_RESPONSE_SUCCESS:
+        if response_size != success_size:
+            raise error.TestFail('Invalid successful response size: {}'.format(
+                    response_size))
+    elif response_size != 0:
+        raise error.TestFail(
+                'Non-zero response size on failure: {}'.format(response_size))
 
 
 class firmware_Cr50U2fCommands(FirmwareTest):
-  """Tests the custom U2F commands in cr50"""
+    """Tests the custom U2F commands in cr50"""
 
-  version = 1
+    version = 1
 
-  def __send_vendor_cmd(self,
-                        vendor_cc,
-                        cmd_body,
-                        expected_response_code=VENDOR_CMD_RESPONSE_SUCCESS):
-    assert_byte_length(vendor_cc, VENDOR_CC_SIZE_BYTES)
+    def __send_vendor_cmd(self,
+                          vendor_cc,
+                          cmd_body,
+                          expected_response_code=VENDOR_CMD_RESPONSE_SUCCESS):
+        assert_byte_length(vendor_cc, VENDOR_CC_SIZE_BYTES)
 
-    cmd_size_str = get_str_length_as_hex(cmd_body, VENDOR_CMD_HEADER_SIZE_BYTES)
+        cmd_size_str = get_str_length_as_hex(cmd_body,
+                                             VENDOR_CMD_HEADER_SIZE_BYTES)
 
-    cmd = (
-        '8001'  # TPM_ST_NO_SESSIONS
-        '{:0>8}'  # Command Size (UINT32)
-        '20000000'  # CR50 Vendor Command (TPM CC)
-        '{}'  # Vendor Command Code (Subcommand Code, UINT16)
-        '{}'  # Command Body
-    ).format(cmd_size_str, vendor_cc, cmd_body)
+        cmd = (
+                '8001'  # TPM_ST_NO_SESSIONS
+                '{:0>8}'  # Command Size (UINT32)
+                '20000000'  # CR50 Vendor Command (TPM CC)
+                '{}'  # Vendor Command Code (Subcommand Code, UINT16)
+                '{}'  # Command Body
+        ).format(cmd_size_str, vendor_cc, cmd_body)
 
-    result = self.client.run('trunks_send --raw {}'.format(cmd)).stdout.strip()
+        result = self.client.run(
+                'trunks_send --raw {}'.format(cmd)).stdout.strip()
 
-    if get_bytes(result, 0, TPM_TAG_SIZE_BYTES) != '8001':
-      raise error.TestFail(
-          'Unexpected response tag from vendor command: {}'.format(result))
+        if get_bytes(result, 0, TPM_TAG_SIZE_BYTES) != '8001':
+            raise error.TestFail(
+                    'Unexpected response tag from vendor command: {}'.format(
+                            result))
 
-    response_size_bytes = int(
-        get_bytes(result, VENDOR_CMD_RESPONSE_SIZE_OFFSET,
-                  VENDOR_CMD_RESPONSE_SIZE_BYTES), 16)
+        response_size_bytes = int(
+                get_bytes(result, VENDOR_CMD_RESPONSE_SIZE_OFFSET,
+                          VENDOR_CMD_RESPONSE_SIZE_BYTES), 16)
 
-    if response_size_bytes < VENDOR_CMD_HEADER_SIZE_BYTES:
-      raise error.TestFail(
-          'Unexpected response length from vendor command: {}'.format(result))
+        if response_size_bytes < VENDOR_CMD_HEADER_SIZE_BYTES:
+            raise error.TestFail(
+                    'Unexpected response length from vendor command: {}'.
+                    format(result))
 
-    response_code = get_bytes(result, VENDOR_CMD_RESPONSE_CODE_OFFSET,
-                              VENDOR_CMD_RESPONSE_CODE_SIZE_BYTES)
+        response_code = get_bytes(result, VENDOR_CMD_RESPONSE_CODE_OFFSET,
+                                  VENDOR_CMD_RESPONSE_CODE_SIZE_BYTES)
 
-    if response_code != expected_response_code:
-      raise error.TestFail(
-          'Unexpected response received from vendor command: {}'.format(
-              response_code))
+        if response_code != expected_response_code:
+            raise error.TestFail(
+                    'Unexpected response received from vendor command: {}'.
+                    format(response_code))
 
-    response_vendor_cc = get_bytes(result, VENDOR_CMD_RESPONSE_CC_OFFSET,
-                                   VENDOR_CC_SIZE_BYTES)
+        response_vendor_cc = get_bytes(result, VENDOR_CMD_RESPONSE_CC_OFFSET,
+                                       VENDOR_CC_SIZE_BYTES)
 
-    if response_vendor_cc != vendor_cc:
-      raise error.TestFail(
-          'Received response for unexpected vendor command code: {}'.format(
-              response_vendor_cc))
+        if response_vendor_cc != vendor_cc:
+            raise error.TestFail(
+                    'Received response for unexpected vendor command code: {}'.
+                    format(response_vendor_cc))
 
-    response_body_size_bytes = (
-        response_size_bytes - VENDOR_CMD_HEADER_SIZE_BYTES)
+        response_body_size_bytes = (response_size_bytes -
+                                    VENDOR_CMD_HEADER_SIZE_BYTES)
 
-    return {
-        'length':
-            response_body_size_bytes,
-        'value':
-            get_bytes(result, VENDOR_CMD_HEADER_SIZE_BYTES,
-                      response_body_size_bytes)
-    }
+        return {
+                'length':
+                response_body_size_bytes,
+                'value':
+                get_bytes(result, VENDOR_CMD_HEADER_SIZE_BYTES,
+                          response_body_size_bytes)
+        }
 
-  def __u2f_sign(self, app_id, user_secret, key_handle, hash, flags,
-                 expected_response):
-    assert_byte_length(app_id, 32)
-    assert_byte_length(user_secret, 32)
-    assert_byte_length(key_handle, 64)
-    assert_byte_length(flags, 1)
+    def __u2f_sign(self, app_id, user_secret, key_handle, hash, flags,
+                   expected_response):
+        assert_byte_length(app_id, 32)
+        assert_byte_length(user_secret, 32)
+        assert_byte_length(key_handle, 64)
+        assert_byte_length(flags, 1)
 
-    response = self.__send_vendor_cmd(
-        VENDOR_CC_U2F_SIGN, '{}{}{}{}{}'.format(app_id, user_secret, key_handle,
-                                                hash, flags), expected_response)
+        response = self.__send_vendor_cmd(
+                VENDOR_CC_U2F_SIGN,
+                '{}{}{}{}{}'.format(app_id, user_secret, key_handle, hash,
+                                    flags), expected_response)
 
-    expected_response_size = VENDOR_CC_U2F_SIGN_RESPONSE_SIZE_BYTES
-    # 'check-only' requests don't have a response body.
-    if flags == '07':
-      expected_response_size = 0
+        expected_response_size = VENDOR_CC_U2F_SIGN_RESPONSE_SIZE_BYTES
+        # 'check-only' requests don't have a response body.
+        if flags == '07':
+            expected_response_size = 0
 
-    check_response_size(response, expected_response, expected_response_size)
+        check_response_size(response, expected_response,
+                            expected_response_size)
 
-  def __u2f_generate(self,
-                     app_id,
+    def __u2f_generate(self,
+                       app_id,
+                       user_secret,
+                       flags,
+                       expected_response=VENDOR_CMD_RESPONSE_SUCCESS):
+        assert_byte_length(app_id, 32)
+        assert_byte_length(user_secret, 32)
+        assert_byte_length(flags, 1)
+
+        response = self.__send_vendor_cmd(
+                VENDOR_CC_U2F_GENERATE,
+                '{}{}{}{}'.format(app_id, user_secret, flags,
+                                  UNUSED_AUTH_TIME_SECRET_HASH),
+                expected_response)
+
+        check_response_size(response, expected_response,
+                            VENDOR_CC_U2F_GENERATE_RESPONSE_SIZE_BYTES)
+
+        return {
+                'pubKey': response['value'][0:130],
+                'keyHandle': response['value'][130:258]
+        }
+
+    def __u2f_attest(self,
                      user_secret,
-                     flags,
-                     expected_response=VENDOR_CMD_RESPONSE_SUCCESS):
-    assert_byte_length(app_id, 32)
-    assert_byte_length(user_secret, 32)
-    assert_byte_length(flags, 1)
+                     format,
+                     data,
+                     expected_response=VENDOR_CMD_RESPONSE_SUCCESS,
+                     pad=False,
+                     truncated=False):
+        assert_byte_length(user_secret, 32)
+        assert_byte_length(format, 1)
 
-    response = self.__send_vendor_cmd(
-        VENDOR_CC_U2F_GENERATE, '{}{}{}{}'.format(app_id, user_secret, flags,
-                                                  UNUSED_AUTH_TIME_SECRET_HASH),
-        expected_response)
+        data_len_str = get_str_length_as_hex(data)
 
-    check_response_size(response, expected_response,
-                        VENDOR_CC_U2F_GENERATE_RESPONSE_SIZE_BYTES)
+        if truncated:
+            # Send 1 less byte of data than will be advertised in data_len field
+            assert pad == False
+            assert len(data) >= 2
+            data = data[:len(data) - 2]
 
-    return {
-        'pubKey': response['value'][0:130],
-        'keyHandle': response['value'][130:258]
-    }
+        if pad:
+            # Max data size is 256 bytes
+            data = data + '0' * (512 - len(data))
 
-  def __u2f_attest(self,
-                   user_secret,
-                   format,
-                   data,
-                   expected_response=VENDOR_CMD_RESPONSE_SUCCESS,
-                   pad=False,
-                   truncated=False):
-    assert_byte_length(user_secret, 32)
-    assert_byte_length(format, 1)
+        response = self.__send_vendor_cmd(
+                VENDOR_CC_U2F_ATTEST,
+                '{}{}{}{}'.format(user_secret, format, data_len_str,
+                                  data), expected_response)
 
-    data_len_str = get_str_length_as_hex(data)
+        check_response_size(response, expected_response,
+                            VENDOR_CC_U2F_ATTEST_RESPONSE_SIZE_BYTES)
 
-    if truncated:
-      # Send 1 less byte of data than will be advertised in data_len field
-      assert pad == False
-      assert len(data) >= 2
-      data = data[:len(data) - 2]
+    def __test_generate_unique(self):
+        registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
+        registration_2 = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
 
-    if pad:
-      # Max data size is 256 bytes
-      data = data + '0' * (512 - len(data))
+        if registration['pubKey'] == registration_2['pubKey']:
+            raise error.TestFail('Public keys not unique')
 
-    response = self.__send_vendor_cmd(
-        VENDOR_CC_U2F_ATTEST, '{}{}{}{}'.format(
-            user_secret, format, data_len_str, data), expected_response)
+        if registration['keyHandle'] == registration_2['keyHandle']:
+            raise error.TestFail('Key handles not unique')
 
-    check_response_size(response, expected_response,
-                        VENDOR_CC_U2F_ATTEST_RESPONSE_SIZE_BYTES)
+    def _safe_power_short_press(self):
+        """Stop powerd before pressing the power button."""
+        # Validating U2F requires pressing the power button. If those power button
+        # presses power off the AP, stop powerd before the test to ignore them.
+        if self.faft_config.ec_forwards_short_pp_press:
+            self.stop_powerd()
+        self.servo.power_short_press()
 
-  def __test_generate_unique(self):
-    registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
-    registration_2 = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
+    def __test_generate_sign_simple(self):
+        registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
 
-    if registration['pubKey'] == registration_2['pubKey']:
-      raise error.TestFail('Public keys not unique')
+        self._safe_power_short_press()
 
-    if registration['keyHandle'] == registration_2['keyHandle']:
-      raise error.TestFail('Key handles not unique')
+        self.__u2f_sign(APP_ID, USER_SECRET_1, registration['keyHandle'],
+                        HASH_TO_SIGN, '00', VENDOR_CMD_RESPONSE_SUCCESS)
 
-  def _safe_power_short_press(self):
-    """Stop powerd before pressing the power button."""
-    # Validating U2F requires pressing the power button. If those power button
-    # presses power off the AP, stop powerd before the test to ignore them.
-    if self.faft_config.ec_forwards_short_pp_press:
-        self.stop_powerd()
-    self.servo.power_short_press()
+    def __test_generate_with_presence(self):
+        # Wait 11 seconds to ensure no presence.
 
-  def __test_generate_sign_simple(self):
-    registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
+        time.sleep(11)
 
-    self._safe_power_short_press()
+        self.__u2f_generate(
+                APP_ID,
+                USER_SECRET_1,
+                '01',  # U2F_AUTH_FLAG_TUP
+                VENDOR_CMD_RESPONSE_NOT_ALLOWED)
 
-    self.__u2f_sign(APP_ID, USER_SECRET_1, registration['keyHandle'],
-                    HASH_TO_SIGN, '00', VENDOR_CMD_RESPONSE_SUCCESS)
+        self._safe_power_short_press()
 
-  def __test_generate_with_presence(self):
-    # Wait 11 seconds to ensure no presence.
+        self.__u2f_generate(
+                APP_ID,
+                USER_SECRET_1,
+                '01',  # U2F_AUTH_FLAG_TUP
+                VENDOR_CMD_RESPONSE_SUCCESS)
 
-    time.sleep(11)
+    def __test_generate_consume_presence(self):
+        self._safe_power_short_press()
 
-    self.__u2f_generate(
-        APP_ID,
-        USER_SECRET_1,
-        '01',  # U2F_AUTH_FLAG_TUP
-        VENDOR_CMD_RESPONSE_NOT_ALLOWED)
+        self.__u2f_generate(
+                APP_ID,
+                USER_SECRET_1,
+                '03',  # U2F_AUTH_FLAG_TUP | G2F_CONSUME
+                VENDOR_CMD_RESPONSE_SUCCESS)
 
-    self._safe_power_short_press()
+        self.__u2f_generate(
+                APP_ID,
+                USER_SECRET_1,
+                '01',  # U2F_AUTH_FLAG_TUP
+                VENDOR_CMD_RESPONSE_NOT_ALLOWED)
 
-    self.__u2f_generate(
-        APP_ID,
-        USER_SECRET_1,
-        '01',  # U2F_AUTH_FLAG_TUP
-        VENDOR_CMD_RESPONSE_SUCCESS)
+    def __test_sign_requires_presence(self):
+        registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
 
-  def __test_generate_consume_presence(self):
-    self._safe_power_short_press()
+        # U2F asserts presence by checking for a power button press within the
+        # last 10 seconds, sleep so that we are sure there was not one.
 
-    self.__u2f_generate(
-        APP_ID,
-        USER_SECRET_1,
-        '03',  # U2F_AUTH_FLAG_TUP | G2F_CONSUME
-        VENDOR_CMD_RESPONSE_SUCCESS)
+        time.sleep(11)
 
-    self.__u2f_generate(
-        APP_ID,
-        USER_SECRET_1,
-        '01',  # U2F_AUTH_FLAG_TUP
-        VENDOR_CMD_RESPONSE_NOT_ALLOWED)
+        self.__u2f_sign(APP_ID, USER_SECRET_1, registration['keyHandle'],
+                        HASH_TO_SIGN, '00', VENDOR_CMD_RESPONSE_NOT_ALLOWED)
 
-  def __test_sign_requires_presence(self):
-    registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
+    def __test_sign_multiple_no_consume(self):
+        registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
 
-    # U2F asserts presence by checking for a power button press within the
-    # last 10 seconds, sleep so that we are sure there was not one.
+        self._safe_power_short_press()
 
-    time.sleep(11)
+        self.__u2f_sign(APP_ID, USER_SECRET_1, registration['keyHandle'],
+                        HASH_TO_SIGN, '00', VENDOR_CMD_RESPONSE_SUCCESS)
 
-    self.__u2f_sign(APP_ID, USER_SECRET_1, registration['keyHandle'],
-                    HASH_TO_SIGN, '00', VENDOR_CMD_RESPONSE_NOT_ALLOWED)
+        # We should be able to sign again, as this will happen within 10
+        # seconds of the power button press, and we did not consume.
 
-  def __test_sign_multiple_no_consume(self):
-    registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
+        self.__u2f_sign(APP_ID, USER_SECRET_1, registration['keyHandle'],
+                        HASH_TO_SIGN, '00', VENDOR_CMD_RESPONSE_SUCCESS)
 
-    self._safe_power_short_press()
+    def __test_sign_consume(self):
+        registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
 
-    self.__u2f_sign(APP_ID, USER_SECRET_1, registration['keyHandle'],
-                    HASH_TO_SIGN, '00', VENDOR_CMD_RESPONSE_SUCCESS)
+        self._safe_power_short_press()
 
-    # We should be able to sign again, as this will happen within 10
-    # seconds of the power button press, and we did not consume.
+        self.__u2f_sign(
+                APP_ID,
+                USER_SECRET_1,
+                registration['keyHandle'],
+                HASH_TO_SIGN,
+                '02',  # G2F_CONSUME
+                VENDOR_CMD_RESPONSE_SUCCESS)
 
-    self.__u2f_sign(APP_ID, USER_SECRET_1, registration['keyHandle'],
-                    HASH_TO_SIGN, '00', VENDOR_CMD_RESPONSE_SUCCESS)
+        # We should have consumed the power button press, so we should not be
+        # able to sign again.
 
-  def __test_sign_consume(self):
-    registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
+        self.__u2f_sign(APP_ID, USER_SECRET_1, registration['keyHandle'],
+                        HASH_TO_SIGN, '00', VENDOR_CMD_RESPONSE_NOT_ALLOWED)
 
-    self._safe_power_short_press()
+    def __test_sign_wrong_user_secret(self):
+        registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
 
-    self.__u2f_sign(
-        APP_ID,
-        USER_SECRET_1,
-        registration['keyHandle'],
-        HASH_TO_SIGN,
-        '02',  # G2F_CONSUME
-        VENDOR_CMD_RESPONSE_SUCCESS)
+        self._safe_power_short_press()
 
-    # We should have consumed the power button press, so we should not be
-    # able to sign again.
+        # Check.
+        self.__u2f_sign(APP_ID, USER_SECRET_1, registration['keyHandle'],
+                        HASH_TO_SIGN, '00', VENDOR_CMD_RESPONSE_SUCCESS)
 
-    self.__u2f_sign(APP_ID, USER_SECRET_1, registration['keyHandle'],
-                    HASH_TO_SIGN, '00', VENDOR_CMD_RESPONSE_NOT_ALLOWED)
+        self.__u2f_sign(APP_ID, USER_SECRET_2, registration['keyHandle'],
+                        HASH_TO_SIGN, '00',
+                        VENDOR_CMD_RESPONSE_PASSWORD_REQUIRED)
 
-  def __test_sign_wrong_user_secret(self):
-    registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
+    def __test_sign_wrong_app_id(self):
+        registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
 
-    self._safe_power_short_press()
+        self._safe_power_short_press()
 
-    # Sanity check.
-    self.__u2f_sign(APP_ID, USER_SECRET_1, registration['keyHandle'],
-                    HASH_TO_SIGN, '00', VENDOR_CMD_RESPONSE_SUCCESS)
+        # Check.
+        self.__u2f_sign(APP_ID, USER_SECRET_1, registration['keyHandle'],
+                        HASH_TO_SIGN, '00', VENDOR_CMD_RESPONSE_SUCCESS)
 
-    self.__u2f_sign(APP_ID, USER_SECRET_2, registration['keyHandle'],
-                    HASH_TO_SIGN, '00', VENDOR_CMD_RESPONSE_PASSWORD_REQUIRED)
+        self.__u2f_sign(APP_ID_2, USER_SECRET_1, registration['keyHandle'],
+                        HASH_TO_SIGN, '00',
+                        VENDOR_CMD_RESPONSE_PASSWORD_REQUIRED)
 
-  def __test_sign_wrong_app_id(self):
-    registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
+    def __test_sign_invalid_kh(self):
+        # U2F asserts presence by checking for a power button press within the
+        # last 10 seconds, sleep so that we are sure there was not one.
 
-    self._safe_power_short_press()
+        time.sleep(11)
 
-    # Sanity check.
-    self.__u2f_sign(APP_ID, USER_SECRET_1, registration['keyHandle'],
-                    HASH_TO_SIGN, '00', VENDOR_CMD_RESPONSE_SUCCESS)
+        self.__u2f_sign(
+                APP_ID,
+                USER_SECRET_1,
+                RANDOM_32 + RANDOM_32,  # KH is 64 bytes long
+                HASH_TO_SIGN,
+                '00',
+                VENDOR_CMD_RESPONSE_PASSWORD_REQUIRED)
 
-    self.__u2f_sign(APP_ID_2, USER_SECRET_1, registration['keyHandle'],
-                    HASH_TO_SIGN, '00', VENDOR_CMD_RESPONSE_PASSWORD_REQUIRED)
+        self.__u2f_sign(
+                APP_ID,
+                USER_SECRET_1,
+                RANDOM_32 + RANDOM_32,  # KH is 64 bytes long
+                HASH_TO_SIGN,
+                '02',  # G2F_CONSUME
+                VENDOR_CMD_RESPONSE_PASSWORD_REQUIRED)
 
-  def __test_sign_invalid_kh(self):
-    # U2F asserts presence by checking for a power button press within the
-    # last 10 seconds, sleep so that we are sure there was not one.
+    def __test_sign_invalid_kh_with_presence(self):
+        registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
 
-    time.sleep(11)
+        self._safe_power_short_press()
 
-    self.__u2f_sign(
-        APP_ID,
-        USER_SECRET_1,
-        RANDOM_32 + RANDOM_32,  # KH is 64 bytes long
-        HASH_TO_SIGN,
-        '00',
-        VENDOR_CMD_RESPONSE_PASSWORD_REQUIRED)
+        # Should return invalid KH error, without consuming presence.
+        self.__u2f_sign(
+                APP_ID,
+                USER_SECRET_1,
+                RANDOM_32 + RANDOM_32,  # KH is 64 bytes long
+                HASH_TO_SIGN,
+                '02',  # G2F_CONSUME
+                VENDOR_CMD_RESPONSE_PASSWORD_REQUIRED)
 
-    self.__u2f_sign(
-        APP_ID,
-        USER_SECRET_1,
-        RANDOM_32 + RANDOM_32,  # KH is 64 bytes long
-        HASH_TO_SIGN,
-        '02',  # G2F_CONSUME
-        VENDOR_CMD_RESPONSE_PASSWORD_REQUIRED)
+        # Check presence was not consumed.
+        self.__u2f_sign(APP_ID, USER_SECRET_1, registration['keyHandle'],
+                        HASH_TO_SIGN, '00', VENDOR_CMD_RESPONSE_SUCCESS)
 
-  def __test_sign_invalid_kh_with_presence(self):
-    registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
+    def __test_sign_check_only(self):
+        registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
 
-    self._safe_power_short_press()
+        # U2F asserts presence by checking for a power button press within the
+        # last 10 seconds, sleep so that we are sure there was not one.
 
-    # Should return invalid KH error, without consuming presence.
-    self.__u2f_sign(
-        APP_ID,
-        USER_SECRET_1,
-        RANDOM_32 + RANDOM_32,  # KH is 64 bytes long
-        HASH_TO_SIGN,
-        '02',  # G2F_CONSUME
-        VENDOR_CMD_RESPONSE_PASSWORD_REQUIRED)
+        time.sleep(11)
 
-    # Check presence was not consumed.
-    self.__u2f_sign(APP_ID, USER_SECRET_1, registration['keyHandle'],
-                    HASH_TO_SIGN, '00', VENDOR_CMD_RESPONSE_SUCCESS)
+        self.__u2f_sign(APP_ID, USER_SECRET_1, registration['keyHandle'],
+                        HASH_TO_SIGN, '07', VENDOR_CMD_RESPONSE_SUCCESS)
 
-  def __test_sign_check_only(self):
-    registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
-
-    # U2F asserts presence by checking for a power button press within the
-    # last 10 seconds, sleep so that we are sure there was not one.
-
-    time.sleep(11)
-
-    self.__u2f_sign(APP_ID, USER_SECRET_1, registration['keyHandle'],
-                    HASH_TO_SIGN, '07', VENDOR_CMD_RESPONSE_SUCCESS)
-
-  def __test_sign_check_only_with_presence(self):
-    registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
+    def __test_sign_check_only_with_presence(self):
+        registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
 
-    self._safe_power_short_press()
+        self._safe_power_short_press()
 
-    self.__u2f_sign(APP_ID, USER_SECRET_1, registration['keyHandle'],
-                    HASH_TO_SIGN, '07', VENDOR_CMD_RESPONSE_SUCCESS)
+        self.__u2f_sign(APP_ID, USER_SECRET_1, registration['keyHandle'],
+                        HASH_TO_SIGN, '07', VENDOR_CMD_RESPONSE_SUCCESS)
 
-  def __test_sign_check_only_invalid_kh(self):
-    # U2F asserts presence by checking for a power button press within the
-    # last 10 seconds, sleep so that we are sure there was not one.
+    def __test_sign_check_only_invalid_kh(self):
+        # U2F asserts presence by checking for a power button press within the
+        # last 10 seconds, sleep so that we are sure there was not one.
 
-    time.sleep(11)
+        time.sleep(11)
 
-    self.__u2f_sign(APP_ID,
-                    USER_SECRET_1,
-                    RANDOM_32 + RANDOM_32,  # KH is 64 bytes long
-                    HASH_TO_SIGN,
-                    '07',
-                    VENDOR_CMD_RESPONSE_PASSWORD_REQUIRED)
+        self.__u2f_sign(
+                APP_ID,
+                USER_SECRET_1,
+                RANDOM_32 + RANDOM_32,  # KH is 64 bytes long
+                HASH_TO_SIGN,
+                '07',
+                VENDOR_CMD_RESPONSE_PASSWORD_REQUIRED)
 
-  def __test_sign_check_only_invalid_kh_with_presence(self):
-    registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
+    def __test_sign_check_only_invalid_kh_with_presence(self):
+        registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
 
-    self._safe_power_short_press()
+        self._safe_power_short_press()
 
-    self.__u2f_sign(APP_ID,
-                    USER_SECRET_1,
-                    RANDOM_32 + RANDOM_32,  # KH is 64 bytes long
-                    HASH_TO_SIGN,
-                    '07',
-                    VENDOR_CMD_RESPONSE_PASSWORD_REQUIRED)
+        self.__u2f_sign(
+                APP_ID,
+                USER_SECRET_1,
+                RANDOM_32 + RANDOM_32,  # KH is 64 bytes long
+                HASH_TO_SIGN,
+                '07',
+                VENDOR_CMD_RESPONSE_PASSWORD_REQUIRED)
 
-  def __check_attest_reg_resp(self,
-                              app_id,
-                              key_handle,
-                              pub_key,
-                              user_secret,
-                              expected_response,
-                              pad=False):
-    register_resp = '00{}{}{}{}'.format(
-        app_id,
-        RANDOM_32,  # challenge
-        key_handle,
-        pub_key)
+    def __check_attest_reg_resp(self,
+                                app_id,
+                                key_handle,
+                                pub_key,
+                                user_secret,
+                                expected_response,
+                                pad=False):
+        register_resp = '00{}{}{}{}'.format(
+                app_id,
+                RANDOM_32,  # challenge
+                key_handle,
+                pub_key)
 
-    self.__u2f_attest(user_secret, U2F_ATTEST_FORMAT_REG_RESP, register_resp,
-                      expected_response, pad)
+        self.__u2f_attest(user_secret, U2F_ATTEST_FORMAT_REG_RESP,
+                          register_resp, expected_response, pad)
 
-  def __test_attest_simple(self):
-    # Attest does not require user presence
-    time.sleep(11)
+    def __test_attest_simple(self):
+        # Attest does not require user presence
+        time.sleep(11)
 
-    registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
+        registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
 
-    self.__check_attest_reg_resp(APP_ID, registration['keyHandle'],
-                                 registration['pubKey'], USER_SECRET_1,
-                                 VENDOR_CMD_RESPONSE_SUCCESS)
+        self.__check_attest_reg_resp(APP_ID, registration['keyHandle'],
+                                     registration['pubKey'], USER_SECRET_1,
+                                     VENDOR_CMD_RESPONSE_SUCCESS)
 
-  def __test_attest_simple_padded(self):
-    registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
+    def __test_attest_simple_padded(self):
+        registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
 
-    self.__check_attest_reg_resp(
-        APP_ID,
-        registration['keyHandle'],
-        registration['pubKey'],
-        USER_SECRET_1,
-        VENDOR_CMD_RESPONSE_SUCCESS,
-        pad=True)
+        self.__check_attest_reg_resp(APP_ID,
+                                     registration['keyHandle'],
+                                     registration['pubKey'],
+                                     USER_SECRET_1,
+                                     VENDOR_CMD_RESPONSE_SUCCESS,
+                                     pad=True)
 
-  def __test_attest_wrong_user(self):
-    registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
+    def __test_attest_wrong_user(self):
+        registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
 
-    self.__check_attest_reg_resp(APP_ID, registration['keyHandle'],
-                                 registration['pubKey'], USER_SECRET_2,
-                                 VENDOR_CMD_RESPONSE_NOT_ALLOWED)
+        self.__check_attest_reg_resp(APP_ID, registration['keyHandle'],
+                                     registration['pubKey'], USER_SECRET_2,
+                                     VENDOR_CMD_RESPONSE_NOT_ALLOWED)
 
-  def __test_attest_wrong_app_id(self):
-    registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
+    def __test_attest_wrong_app_id(self):
+        registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
 
-    self.__check_attest_reg_resp(APP_ID_2, registration['keyHandle'],
-                                 registration['pubKey'], USER_SECRET_1,
-                                 VENDOR_CMD_RESPONSE_NOT_ALLOWED)
+        self.__check_attest_reg_resp(APP_ID_2, registration['keyHandle'],
+                                     registration['pubKey'], USER_SECRET_1,
+                                     VENDOR_CMD_RESPONSE_NOT_ALLOWED)
 
-  def __test_attest_wrong_pub_key(self):
-    registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
+    def __test_attest_wrong_pub_key(self):
+        registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
 
-    self.__check_attest_reg_resp(APP_ID, registration['keyHandle'],
-                                 'FF' * 65, USER_SECRET_1,
-                                 VENDOR_CMD_RESPONSE_NOT_ALLOWED)
+        self.__check_attest_reg_resp(APP_ID, registration['keyHandle'],
+                                     'FF' * 65, USER_SECRET_1,
+                                     VENDOR_CMD_RESPONSE_NOT_ALLOWED)
 
-  def __test_attest_garbage_data(self):
-    self.__u2f_attest(USER_SECRET_1, U2F_ATTEST_FORMAT_REG_RESP,
-                      'ff' * U2F_ATTEST_REG_RESP_SIZE_BYTES,
-                      VENDOR_CMD_RESPONSE_NOT_ALLOWED)
+    def __test_attest_garbage_data(self):
+        self.__u2f_attest(USER_SECRET_1, U2F_ATTEST_FORMAT_REG_RESP,
+                          'ff' * U2F_ATTEST_REG_RESP_SIZE_BYTES,
+                          VENDOR_CMD_RESPONSE_NOT_ALLOWED)
 
-  def __test_attest_truncated_data(self):
-    registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
+    def __test_attest_truncated_data(self):
+        registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
 
-    register_resp = '00{}{}{}{}'.format(
-        APP_ID,
-        RANDOM_32,  # challenge
-        registration['keyHandle'],
-        registration['pubKey'])
+        register_resp = '00{}{}{}{}'.format(
+                APP_ID,
+                RANDOM_32,  # challenge
+                registration['keyHandle'],
+                registration['pubKey'])
 
-    # Attempt to attest to valid data with invalid format.
-    self.__u2f_attest(USER_SECRET_1, U2F_ATTEST_FORMAT_REG_RESP, register_resp,
-                      VENDOR_CMD_RESPONSE_BOGUS_ARGS, truncated=True)
+        # Attempt to attest to valid data with invalid format.
+        self.__u2f_attest(USER_SECRET_1,
+                          U2F_ATTEST_FORMAT_REG_RESP,
+                          register_resp,
+                          VENDOR_CMD_RESPONSE_BOGUS_ARGS,
+                          truncated=True)
 
-  def __test_attest_invalid_format(self):
-    registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
+    def __test_attest_invalid_format(self):
+        registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
 
-    register_resp = '00{}{}{}{}'.format(
-        APP_ID,
-        RANDOM_32,  # challenge
-        registration['keyHandle'],
-        registration['pubKey'])
+        register_resp = '00{}{}{}{}'.format(
+                APP_ID,
+                RANDOM_32,  # challenge
+                registration['keyHandle'],
+                registration['pubKey'])
 
-    # Attempt to attest to valid data with invalid format.
-    self.__u2f_attest(USER_SECRET_1, 'ff', register_resp,
-                      VENDOR_CMD_RESPONSE_NOT_ALLOWED)
+        # Attempt to attest to valid data with invalid format.
+        self.__u2f_attest(USER_SECRET_1, 'ff', register_resp,
+                          VENDOR_CMD_RESPONSE_NOT_ALLOWED)
 
-  def __test_attest_invalid_reserved_byte(self):
-    registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
+    def __test_attest_invalid_reserved_byte(self):
+        registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
 
-    register_resp = '{}{}{}{}{}'.format(
-        '01', # unexpected reserved byte
-        APP_ID,
-        RANDOM_32,  # challenge
-        registration['keyHandle'],
-        registration['pubKey'])
+        register_resp = '{}{}{}{}{}'.format(
+                '01',  # unexpected reserved byte
+                APP_ID,
+                RANDOM_32,  # challenge
+                registration['keyHandle'],
+                registration['pubKey'])
 
-    # Attempt to attest to valid data with invalid format.
-    self.__u2f_attest(USER_SECRET_1, U2F_ATTEST_FORMAT_REG_RESP, register_resp,
-                      VENDOR_CMD_RESPONSE_NOT_ALLOWED)
+        # Attempt to attest to valid data with invalid format.
+        self.__u2f_attest(USER_SECRET_1, U2F_ATTEST_FORMAT_REG_RESP,
+                          register_resp, VENDOR_CMD_RESPONSE_NOT_ALLOWED)
 
-  def __test_kh_invalidated_by_powerwash(self):
-    registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
+    def __test_kh_invalidated_by_powerwash(self):
+        registration = self.__u2f_generate(APP_ID, USER_SECRET_1, '00')
 
-    self._safe_power_short_press()
+        self._safe_power_short_press()
 
-    # Sanity check
-    self.__u2f_sign(APP_ID, USER_SECRET_1, registration['keyHandle'],
-                    HASH_TO_SIGN, '00', VENDOR_CMD_RESPONSE_SUCCESS)
+        # Check
+        self.__u2f_sign(APP_ID, USER_SECRET_1, registration['keyHandle'],
+                        HASH_TO_SIGN, '00', VENDOR_CMD_RESPONSE_SUCCESS)
 
-    # Clear TPM. We should no longer be able to authenticate with the
-    # key handle after this.
-    tpm_utils.ClearTPMOwnerRequest(self.client, wait_for_ready=True)
+        # Clear TPM. We should no longer be able to authenticate with the
+        # key handle after this.
+        tpm_utils.ClearTPMOwnerRequest(self.client, wait_for_ready=True)
 
-    self._safe_power_short_press()
+        self._safe_power_short_press()
 
-    self.__u2f_sign(APP_ID, USER_SECRET_1, registration['keyHandle'],
-                    HASH_TO_SIGN, '00', VENDOR_CMD_RESPONSE_PASSWORD_REQUIRED)
+        self.__u2f_sign(APP_ID, USER_SECRET_1, registration['keyHandle'],
+                        HASH_TO_SIGN, '00',
+                        VENDOR_CMD_RESPONSE_PASSWORD_REQUIRED)
 
-  def run_once(self, host=None):
-    """Run the tests."""
+    def run_once(self, host=None):
+        """Run the tests."""
 
-    self.client = host
+        self.client = host
 
-    # Basic functionality
-    self.__test_generate_unique()
-    self.__test_generate_sign_simple()
+        # Basic functionality
+        self.__test_generate_unique()
+        self.__test_generate_sign_simple()
 
-    # Generate - presence
-    self.__test_generate_with_presence()
-    self.__test_generate_consume_presence()
+        # Generate - presence
+        self.__test_generate_with_presence()
+        self.__test_generate_consume_presence()
 
-    # Sign - presence
-    self.__test_sign_requires_presence()
-    self.__test_sign_multiple_no_consume()
-    self.__test_sign_consume()
+        # Sign - presence
+        self.__test_sign_requires_presence()
+        self.__test_sign_multiple_no_consume()
+        self.__test_sign_consume()
 
-    # Sign - key handle
-    self.__test_sign_wrong_user_secret()
-    self.__test_sign_wrong_app_id()
-    self.__test_sign_invalid_kh()
+        # Sign - key handle
+        self.__test_sign_wrong_user_secret()
+        self.__test_sign_wrong_app_id()
+        self.__test_sign_invalid_kh()
 
-    # Sign - check only
-    self.__test_sign_check_only()
-    self.__test_sign_check_only_with_presence()
-    self.__test_sign_check_only_invalid_kh()
-    self.__test_sign_check_only_invalid_kh_with_presence()
+        # Sign - check only
+        self.__test_sign_check_only()
+        self.__test_sign_check_only_with_presence()
+        self.__test_sign_check_only_invalid_kh()
+        self.__test_sign_check_only_invalid_kh_with_presence()
 
-    # Attest
-    self.__test_attest_simple()
-    self.__test_attest_simple_padded()
-    self.__test_attest_wrong_user()
-    self.__test_attest_wrong_app_id()
-    self.__test_attest_wrong_pub_key()
-    self.__test_attest_garbage_data()
-    self.__test_attest_truncated_data()
-    self.__test_attest_invalid_format()
-    self.__test_attest_invalid_reserved_byte()
+        # Attest
+        self.__test_attest_simple()
+        self.__test_attest_simple_padded()
+        self.__test_attest_wrong_user()
+        self.__test_attest_wrong_app_id()
+        self.__test_attest_wrong_pub_key()
+        self.__test_attest_garbage_data()
+        self.__test_attest_truncated_data()
+        self.__test_attest_invalid_format()
+        self.__test_attest_invalid_reserved_byte()
 
-    # Powerwash
-    self.__test_kh_invalidated_by_powerwash()
+        # Powerwash
+        self.__test_kh_invalidated_by_powerwash()
diff --git a/server/site_tests/firmware_Cr50U2fPowerwash/control b/server/site_tests/firmware_Cr50U2fPowerwash/control
index 311ccb1..7ba58f7 100644
--- a/server/site_tests/firmware_Cr50U2fPowerwash/control
+++ b/server/site_tests/firmware_Cr50U2fPowerwash/control
@@ -12,6 +12,7 @@
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
 DEPENDENCIES="servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This test issues a U2F_REGISTER command, clears the TPM, and checks that
diff --git a/server/site_tests/firmware_Cr50U2fPowerwash/firmware_Cr50U2fPowerwash.py b/server/site_tests/firmware_Cr50U2fPowerwash/firmware_Cr50U2fPowerwash.py
index 844d098..a6b035b 100644
--- a/server/site_tests/firmware_Cr50U2fPowerwash/firmware_Cr50U2fPowerwash.py
+++ b/server/site_tests/firmware_Cr50U2fPowerwash/firmware_Cr50U2fPowerwash.py
@@ -12,11 +12,14 @@
 
 class firmware_Cr50U2fPowerwash(FirmwareTest):
     """
-    A test that runs sanity checks for U2F register and authenticate functions,
-    and checks that key handles are invalidated after TPM clear.
+    A test that runs confidence checks for U2F register and authenticate
+    functions, and checks that key handles are invalidated after TPM clear.
     """
     version = 1
 
+    TEST_CHALLENGE_DIGEST = hashlib.sha256(b'test_challenge').hexdigest()
+    TEST_APPLICATION_DIGEST = hashlib.sha256(b'test_application').hexdigest()
+
     def _safe_power_short_press(self):
         """Stop powerd before pressing the power button."""
         # Validating U2F requires pressing the power button. If those power button
@@ -52,16 +55,14 @@
         self._safe_power_short_press()
 
         # Register to create a new key handle.
-        g2f_reg = g2f_utils.G2fRegister(
-            self.client,
-            cr50_dev,
-            hashlib.sha256('test_challenge').hexdigest(),
-            hashlib.sha256('test_application').hexdigest(),
-            U2F_AUTH_ENFORCE)
+        g2f_reg = g2f_utils.G2fRegister(self.client, cr50_dev,
+                                        self.TEST_CHALLENGE_DIGEST,
+                                        self.TEST_APPLICATION_DIGEST,
+                                        U2F_AUTH_ENFORCE)
 
-        # Sanity check that we managed to register.
+        # Check that we managed to register.
         if not g2f_reg.exit_status == 0:
-          raise error.TestError('Register failed.')
+            raise error.TestError('Register failed.')
 
         # Extract newly created key handle.
         key_handle = self.parse_g2ftool_output(g2f_reg.stdout)['key_handle']
@@ -69,17 +70,14 @@
         # Auth requires physical presence.
         self._safe_power_short_press()
 
-        # Sanity check that we can authenticate with the new key handle.
-        g2f_auth = g2f_utils.G2fAuth(
-            self.client,
-            cr50_dev,
-            hashlib.sha256('test_challenge').hexdigest(),
-            hashlib.sha256('test_application').hexdigest(),
-            key_handle,
-            U2F_AUTH_ENFORCE)
+        # Check that we can authenticate with the new key handle.
+        g2f_auth = g2f_utils.G2fAuth(self.client, cr50_dev,
+                                     self.TEST_CHALLENGE_DIGEST,
+                                     self.TEST_APPLICATION_DIGEST, key_handle,
+                                     U2F_AUTH_ENFORCE)
 
         if not g2f_auth.exit_status == 0:
-          raise error.TestError('Authenticate failed.')
+            raise error.TestError('Authenticate failed.')
 
         # Clear TPM. We should no longer be able to authenticate with the
         # key handle after this.
@@ -94,16 +92,13 @@
 
         # Check the key handle is no longer valid.
         self._safe_power_short_press()
-        g2f_auth_clear = g2f_utils.G2fAuth(
-            self.client,
-            cr50_dev,
-            hashlib.sha256('test_challenge').hexdigest(),
-            hashlib.sha256('test_application').hexdigest(),
-            key_handle,
-            U2F_AUTH_ENFORCE)
+        g2f_auth_clear = g2f_utils.G2fAuth(self.client, cr50_dev,
+                                           self.TEST_CHALLENGE_DIGEST,
+                                           self.TEST_APPLICATION_DIGEST,
+                                           key_handle, U2F_AUTH_ENFORCE)
 
         if g2f_auth_clear.exit_status == 0:
-          raise error.TestError('Authenticate succeeded; should have failed')
+            raise error.TestError('Authenticate succeeded; should have failed')
 
 
     def cleanup(self):
diff --git a/server/site_tests/firmware_Cr50USB/control b/server/site_tests/firmware_Cr50USB/control
index 5bb8552..e2d69e1 100644
--- a/server/site_tests/firmware_Cr50USB/control
+++ b/server/site_tests/firmware_Cr50USB/control
@@ -10,6 +10,7 @@
 TIME = "MEDIUM"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This test runs 'usb_updater -f' many times to verify the cr50 to ap usb
diff --git a/server/site_tests/firmware_Cr50USB/firmware_Cr50USB.py b/server/site_tests/firmware_Cr50USB/firmware_Cr50USB.py
index 1c77f50..672bc74 100644
--- a/server/site_tests/firmware_Cr50USB/firmware_Cr50USB.py
+++ b/server/site_tests/firmware_Cr50USB/firmware_Cr50USB.py
@@ -40,6 +40,13 @@
         self.host = host
         # Disable CCD so it doesn't interfere with the Cr50 AP usb connection.
         if hasattr(self, "cr50"):
+            # TODO(b/218492933) : find better way to disable rddkeepalive
+            # Disable rddkeepalive, so the test can disable ccd.
+            self.cr50.send_command('ccd testlab open')
+            self.cr50.send_command('rddkeepalive disable')
+            # Lock cr50 so the console will be restricted
+            self.cr50.set_ccd_level('lock')
+
             self.cr50.ccd_disable()
 
         # Make sure the device is logged in so TPM activity doesn't keep it
@@ -52,7 +59,7 @@
         logging.info("Running Cr50 USB stress test for %d iterations",
                      num_iterations)
 
-        for iteration in xrange(num_iterations):
+        for iteration in range(num_iterations):
             if iteration:
                 time.sleep(self.SLEEP_DELAY)
 
diff --git a/server/site_tests/firmware_Cr50Unlock/control b/server/site_tests/firmware_Cr50Unlock/control
index a644f29..143af5e 100644
--- a/server/site_tests/firmware_Cr50Unlock/control
+++ b/server/site_tests/firmware_Cr50Unlock/control
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """Verify cr50 can be unlocked with a password"""
 
diff --git a/server/site_tests/firmware_Cr50Unlock/control.faft_cr50_tot b/server/site_tests/firmware_Cr50Unlock/control.faft_cr50_tot
index 313bfd1..bf6dc5a 100644
--- a/server/site_tests/firmware_Cr50Unlock/control.faft_cr50_tot
+++ b/server/site_tests/firmware_Cr50Unlock/control.faft_cr50_tot
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Verify cr50 can be unlocked with a password
diff --git a/server/site_tests/firmware_Cr50Unlock/firmware_Cr50Unlock.py b/server/site_tests/firmware_Cr50Unlock/firmware_Cr50Unlock.py
index 2e31fd2..20448a9 100644
--- a/server/site_tests/firmware_Cr50Unlock/firmware_Cr50Unlock.py
+++ b/server/site_tests/firmware_Cr50Unlock/firmware_Cr50Unlock.py
@@ -21,14 +21,17 @@
 
     def run_once(self):
         """Check cr50 can see dev mode open works correctly"""
-        # Make sure testlab mode is enabled, so we can guarantee the password
-        # can be cleared.
+
+        if not self.cr50.unlock_is_supported():
+            raise error.TestNAError('Unlock not supported')
+
         if not self.faft_config.has_powerbutton:
             raise error.TestNAError('Can not run test without power button')
-            return
 
+        # Make sure testlab mode is enabled, so we can guarantee the password
+        # can be cleared.
         self.fast_ccd_open(enable_testlab=True)
-        self.cr50.send_command('ccd reset')
+        self.cr50.ccd_reset()
         # Set the password
         self.set_ccd_password(self.CCD_PASSWORD)
         if self.cr50.password_is_reset():
@@ -78,4 +81,4 @@
         # Clear the password which has set at the beginning of this test.
         self.set_ccd_password('clear:' + self.CCD_PASSWORD)
         if not self.cr50.password_is_reset():
-           raise error.TestFail('Unable to clear password')
+            raise error.TestFail('Unable to clear password')
diff --git a/server/site_tests/firmware_Cr50Update/control b/server/site_tests/firmware_Cr50Update/control
index 11faa42..609189b 100644
--- a/server/site_tests/firmware_Cr50Update/control
+++ b/server/site_tests/firmware_Cr50Update/control
@@ -9,6 +9,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This test verifies Cr50 update works or recovery from erased nvmem.
diff --git a/server/site_tests/firmware_Cr50Update/control.post_install b/server/site_tests/firmware_Cr50Update/control.post_install
index e970fae..8c2f8f0 100644
--- a/server/site_tests/firmware_Cr50Update/control.post_install
+++ b/server/site_tests/firmware_Cr50Update/control.post_install
@@ -9,6 +9,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This test verifies Cr50 update works or recovery from erased nvmem.
diff --git a/server/site_tests/firmware_Cr50Update/firmware_Cr50Update.py b/server/site_tests/firmware_Cr50Update/firmware_Cr50Update.py
index c00548d..b1b14b9 100644
--- a/server/site_tests/firmware_Cr50Update/firmware_Cr50Update.py
+++ b/server/site_tests/firmware_Cr50Update/firmware_Cr50Update.py
@@ -7,7 +7,6 @@
 
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib.cros import cr50_utils
-from autotest_lib.server.cros import filesystem_util
 from autotest_lib.server.cros.faft.cr50_test import Cr50Test
 
 
@@ -50,7 +49,7 @@
         # Make sure ccd is disabled so it won't interfere with the update
         self.cr50.ccd_disable()
 
-        filesystem_util.make_rootfs_writable(self.host)
+        self.make_rootfs_writable()
 
         self.host = host
 
diff --git a/server/site_tests/firmware_Cr50UpdateScriptStress/control b/server/site_tests/firmware_Cr50UpdateScriptStress/control
index 34871dc..2e0c468 100644
--- a/server/site_tests/firmware_Cr50UpdateScriptStress/control
+++ b/server/site_tests/firmware_Cr50UpdateScriptStress/control
@@ -11,6 +11,7 @@
 TIME = "LONG"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This test clears the cr50 update state and reboots the device many times
diff --git a/server/site_tests/firmware_Cr50UpdateScriptStress/control.faft_cr50_tot b/server/site_tests/firmware_Cr50UpdateScriptStress/control.faft_cr50_tot
index 9776cd9..7f57e5c 100644
--- a/server/site_tests/firmware_Cr50UpdateScriptStress/control.faft_cr50_tot
+++ b/server/site_tests/firmware_Cr50UpdateScriptStress/control.faft_cr50_tot
@@ -11,6 +11,7 @@
 TIME = "LONG"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This test clears the cr50 update state and reboots the device many times
diff --git a/server/site_tests/firmware_Cr50VerifyEK/control b/server/site_tests/firmware_Cr50VerifyEK/control
new file mode 100644
index 0000000..f4391a2
--- /dev/null
+++ b/server/site_tests/firmware_Cr50VerifyEK/control
@@ -0,0 +1,32 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "mruthven"
+NAME = "firmware_Cr50VerifyEK"
+PURPOSE = "Run 'TPM Verify EK'."
+ATTRIBUTES = "suite:faft_cr50_prepvt, suite:faft_cr50_pvt"
+TIME = "SHORT"
+TEST_TYPE = "server"
+DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
+
+DOC = """Run 'TPM Verify EK' test steps."""
+
+if 'args_dict' not in locals():
+    args_dict = {}
+
+args_dict.update(utils.args_to_dict(args))
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+
+    iterations = int(args_dict.get("iterations", 1))
+
+    job.run_test("firmware_Cr50VerifyEK", host=host, cmdline_args=args,
+                 full_args=args_dict, iterations=iterations)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/firmware_Cr50VerifyEK/firmware_Cr50VerifyEK.py b/server/site_tests/firmware_Cr50VerifyEK/firmware_Cr50VerifyEK.py
new file mode 100644
index 0000000..b86f307
--- /dev/null
+++ b/server/site_tests/firmware_Cr50VerifyEK/firmware_Cr50VerifyEK.py
@@ -0,0 +1,15 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server.cros.faft.cr50_test import Cr50Test
+
+
+class firmware_Cr50VerifyEK(Cr50Test):
+    """Verify tpm verify ek."""
+    version = 1
+
+    def run_once(self, host):
+        """Run tpm verify ek."""
+        host.run('tpm-manager initialize')
+        host.run('tpm-manager verify_endorsement')
diff --git a/server/site_tests/firmware_Cr50VirtualNVRamServer/control b/server/site_tests/firmware_Cr50VirtualNVRamServer/control
index 032cf8a..030556f 100644
--- a/server/site_tests/firmware_Cr50VirtualNVRamServer/control
+++ b/server/site_tests/firmware_Cr50VirtualNVRamServer/control
@@ -11,7 +11,7 @@
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:faft_cr50_prepvt"
-JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test clears the TPM if necessary, the runs a client side test that checks
diff --git a/server/site_tests/firmware_Cr50VirtualNVRamServer/control.faft_cr50_tot b/server/site_tests/firmware_Cr50VirtualNVRamServer/control.faft_cr50_tot
index 9041f4b..a4108e9 100644
--- a/server/site_tests/firmware_Cr50VirtualNVRamServer/control.faft_cr50_tot
+++ b/server/site_tests/firmware_Cr50VirtualNVRamServer/control.faft_cr50_tot
@@ -11,7 +11,7 @@
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:faft_cr50_tot"
-JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test clears the TPM if necessary, the runs a client side test that checks
diff --git a/server/site_tests/firmware_Cr50WPG3/control b/server/site_tests/firmware_Cr50WPG3/control
index 5aaee34..ea8df87 100644
--- a/server/site_tests/firmware_Cr50WPG3/control
+++ b/server/site_tests/firmware_Cr50WPG3/control
@@ -12,6 +12,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """Verify WP in G3.
 
diff --git a/server/site_tests/firmware_Cr50WPG3/control.faft_cr50_tot b/server/site_tests/firmware_Cr50WPG3/control.faft_cr50_tot
index 8755b3e..79ae48b 100644
--- a/server/site_tests/firmware_Cr50WPG3/control.faft_cr50_tot
+++ b/server/site_tests/firmware_Cr50WPG3/control.faft_cr50_tot
@@ -10,6 +10,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """Verify WP in G3.
 
diff --git a/server/site_tests/firmware_Cr50WilcoEcrst/control b/server/site_tests/firmware_Cr50WilcoEcrst/control
index 987d681..175a58d 100644
--- a/server/site_tests/firmware_Cr50WilcoEcrst/control
+++ b/server/site_tests/firmware_Cr50WilcoEcrst/control
@@ -12,6 +12,7 @@
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING, board:sarien"
 JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """Make sure Cr50's ecrst command works as intended on Wilco.
 
diff --git a/server/site_tests/firmware_Cr50WilcoRmaFactoryMode/control b/server/site_tests/firmware_Cr50WilcoRmaFactoryMode/control
index 62a4fb0..30b5eff 100644
--- a/server/site_tests/firmware_Cr50WilcoRmaFactoryMode/control
+++ b/server/site_tests/firmware_Cr50WilcoRmaFactoryMode/control
@@ -12,6 +12,7 @@
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING, board:sarien"
 JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """Make sure Cr50's factory mode sets GPIOs correctly.
 
diff --git a/server/site_tests/firmware_Cr50WilcoRmaFactoryMode/firmware_Cr50WilcoRmaFactoryMode.py b/server/site_tests/firmware_Cr50WilcoRmaFactoryMode/firmware_Cr50WilcoRmaFactoryMode.py
index dff350c..6014c18 100644
--- a/server/site_tests/firmware_Cr50WilcoRmaFactoryMode/firmware_Cr50WilcoRmaFactoryMode.py
+++ b/server/site_tests/firmware_Cr50WilcoRmaFactoryMode/firmware_Cr50WilcoRmaFactoryMode.py
@@ -38,14 +38,11 @@
         if not self.cr50.has_command('bpforce'):
             raise error.TestNAError('Cannot run test without bpforce')
 
-        # Switch to dev mode and open CCD, so the test has access to gsctool
-        # and bpforce.
-        self.fast_ccd_open(enable_testlab=True)
-        self.switcher.setup_mode('dev')
-
         # Keep track of whether Cr50 is in factory mode to minimize cleanup.
         self._in_factory_mode = False
 
+        # Open CCD, so the test has access to bpforce
+        self.fast_ccd_open(enable_testlab=True)
 
     def cleanup(self):
         try:
diff --git a/server/site_tests/firmware_CsmeFwUpdate/control b/server/site_tests/firmware_CsmeFwUpdate/control
index 360989a..26a3a29 100644
--- a/server/site_tests/firmware_CsmeFwUpdate/control
+++ b/server/site_tests/firmware_CsmeFwUpdate/control
@@ -4,14 +4,17 @@
 from autotest_lib.server import utils
 
 NAME = "firmware_CsmeFwUpdate"
-AUTHOR = "Chrome OS Team"
-SUITE = "suite:faft_bios"
+AUTHOR = "ChromeOS Team"
+ATTRIBUTES = "suite:faft_bios"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 JOB_RETRIES = 0
 TEST_CASE_ID = "CSME_UPDATE"
+PY_VERSION = 3
+
 DOC = """
 This test updates the RW section of the current bios with another image which contain
 a differnt ME RW version and verifies if MW firmware udpate happens successfully.
@@ -23,4 +26,3 @@
     host = hosts.create_host(machine, servo_args=servo_args)
     job.run_test("firmware_CsmeFwUpdate", host=host, cmdline_args=args, dev_mode = False)
 parallel_simple(run, machines)
-
diff --git a/server/site_tests/firmware_CsmeFwUpdate/control.dev b/server/site_tests/firmware_CsmeFwUpdate/control.dev
index d49e190..27fbc29 100644
--- a/server/site_tests/firmware_CsmeFwUpdate/control.dev
+++ b/server/site_tests/firmware_CsmeFwUpdate/control.dev
@@ -3,15 +3,18 @@
 # found in the LICENSE file.
 from autotest_lib.server import utils
 
-NAME = "firmware_CsmeFwUpdate"
-AUTHOR = "Chrome OS Team"
-SUITE = "suite:faft_bios"
+NAME = "firmware_CsmeFwUpdate.dev"
+AUTHOR = "ChromeOS Team"
+ATTRIBUTES = "suite:faft_bios"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 JOB_RETRIES = 0
 TEST_CASE_ID = "CSME_UPDATE"
+PY_VERSION = 3
+
 DOC = """
 This test updates the RW section of the current bios with another image which contain
 a differnt ME RW version and verifies if MW firmware udpate happens successfully.
diff --git a/server/site_tests/firmware_CsmeFwUpdate/firmware_CsmeFwUpdate.py b/server/site_tests/firmware_CsmeFwUpdate/firmware_CsmeFwUpdate.py
index a2a3fc3..0c8845b 100644
--- a/server/site_tests/firmware_CsmeFwUpdate/firmware_CsmeFwUpdate.py
+++ b/server/site_tests/firmware_CsmeFwUpdate/firmware_CsmeFwUpdate.py
@@ -4,7 +4,7 @@
 
 import logging, re
 import os
-import xmlrpclib
+import six
 
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib import utils
@@ -27,6 +27,8 @@
     # Region to use for flashrom wp-region commands
     WP_REGION = 'WP_RO'
     MODE = 'recovery'
+    CBFSTOOL = 'cbfstool'
+    CMPTOOL = 'cmp'
 
     def initialize(self, host, cmdline_args, dev_mode = False):
         # Parse arguments from command line
@@ -54,7 +56,8 @@
                         "shellball bios for downgrade")
 
         self.backup_firmware()
-        self.switcher.setup_mode('dev' if dev_mode else 'normal')
+        self.switcher.setup_mode('dev' if dev_mode else 'normal',
+                                 allow_gbb_force=True)
 
         # Save write protect configuration and enable it
         logging.info("Enabling Write protection")
@@ -80,7 +83,7 @@
             if self.is_firmware_saved() and self.restore_required:
                 logging.info("Restoring Original Image")
                 self.restore_firmware()
-        except (EnvironmentError, xmlrpclib.Fault,
+        except (EnvironmentError, six.moves.xmlrpc_client.Fault,
                 error.AutoservError, error.TestBaseException):
             logging.error("Problem restoring firmware:", exc_info=True)
 
@@ -92,7 +95,7 @@
                         self._orig_sw_wp['start'],
                         self._orig_sw_wp['length'],
                         self._orig_sw_wp['enabled'])
-        except (EnvironmentError, xmlrpclib.Fault,
+        except (EnvironmentError, six.moves.xmlrpc_client.Fault,
                 error.AutoservError, error.TestBaseException):
             logging.error("Problem restoring software write-protect:",
                           exc_info = True)
@@ -113,18 +116,18 @@
         # Dump the current spi bios to file
         self.spi_bios = self.ORIGINAL_BIOS
         logging.info("Copying current bios image to %s for upgrade " \
-                     "test" % self.spi_bios)
+                     "test", self.spi_bios)
         self.faft_client.bios.dump_whole(self.spi_bios)
 
         # Get the downgrade bios image from user or from shellball
         self.downgrade_bios = self.DOWNGRADE_BIOS
         if self.bios_input:
             logging.info("Copying user given bios image to %s for downgrade " \
-                    "test" % self.downgrade_bios)
+                    "test", self.downgrade_bios)
             self._client.send_file(self.bios_input, self.downgrade_bios)
         else:
             logging.info("Copying bios image from update shellball to %s " \
-                    "for downgrade test" % self.downgrade_bios)
+                    "for downgrade test", self.downgrade_bios)
             self.faft_client.updater.extract_shellball()
             cbfs_work_dir = self.faft_client.updater.cbfs_setup_work_dir()
             shellball_bios = os.path.join(cbfs_work_dir,
@@ -142,7 +145,7 @@
         """
         # Check if ME_RW_A is present in the image
         logging.info("Checking if seperate CBFS is used for CSE RW in " \
-                     "image : %s" % image_path)
+                     "image : %s", image_path)
         command = "futility dump_fmap -F %s | grep ME_RW_A" % image_path
         output = self.faft_client.system.run_shell_command_get_output(
                     command, True)
@@ -160,19 +163,19 @@
         @returns True if present else False
 
         """
-        # Check if me_rw.metadata present FW_MAIN region
-        logging.info("Checking if me_rw.metadata file " \
-                     "present in image : %s" % image_path )
+        # Check if me_rw.version present FW_MAIN region
+        logging.info("Checking if me_rw.version file " \
+                     "present in image : %s", image_path )
         command = "cbfstool %s print -r FW_MAIN_A " \
-                            "| grep me_rw.metadata" % image_path
+                            "| grep me_rw.version" % image_path
         output = self.faft_client.system.run_shell_command_get_output(
                     command, True)
         if output:
             available = True
-            logging.info("me_rw.metadata present in image")
+            logging.info("me_rw.version present in image")
         else:
             available = False
-            logging.info("me_rw.metadata not present in image")
+            logging.info("me_rw.version not present in image")
 
         return available
 
@@ -181,23 +184,19 @@
         Extract me_rw version from given me_rw blob. Version is first 8
         bytes in the blob
 
-        @param me_blob: me_rw blob (old fmap) or me_rw_metadata blob
+        @param me_blob: me_rw blob (old fmap) or me_rw.version blob
         @param version_offset: version filed offset in the blob
         @returns the CSME RW version string
 
         """
         ver_res = ""
         logging.info("Extracting version field from ME blob")
-        command = ("hexdump -n 8 -s %s %s | cut -c 9- |sed 's/ //g' |" \
-                   "sed 's/.\{4\}/&./g;s/ $//' | head -c19" % ( \
-                    str(int(version_offset)), me_blob))
+
+        command = ("hexdump -C %s |  cut -c 9- | cut -d'|' -f 2"%me_blob )
         output = self.faft_client.system.run_shell_command_get_output(
                     command, True)
-        for each_word in output[0].split("."):
-            version = (int(each_word, 16))
-            ver_res = "".join((ver_res, "".join((str(version),"."))))
-        ver_res = ver_res[:-1]
-        logging.info("Version : %s" % ver_res)
+        ver_res = output[0].strip(".")
+        logging.info("Version : %s", ver_res)
         return ver_res
 
     def get_image_fwmain_me_rw_version(self,
@@ -212,8 +211,8 @@
         @returns the CSME RW version string
 
         """
-        # Extract me_rw.metadata and check version.
-        cbfs_name = "me_rw.metadata"
+        # Extract me_rw.version and check version.
+        cbfs_name = "me_rw.version"
         temp_dir = self.faft_client.system.create_temp_dir()
         me_blob = os.path.join(temp_dir, cbfs_name)
 
@@ -271,9 +270,8 @@
         logging.info("Expected mainfw_act    : %s\n" \
                      "Current mainfw_act     : %s\n" \
                      "Expected ME RW Version : %s\n" \
-                     "Current ME RW Version  : %s\n" % (
-                          expected_slot, main_fw_act,
-                          expected_version, me_version))
+                     "Current ME RW Version  : %s\n",
+                      expected_slot, main_fw_act, expected_version, me_version)
 
         if (expected_version not in me_version) or \
                  (expected_slot not in main_fw_act):
@@ -281,13 +279,140 @@
         else:
             return True
 
+    def cmp_local_files(self,
+                        local_filename_1,
+                        local_filename_2):
+        """
+        Compare two local files
+
+        @param local_filename_1: Path to first local file to compare
+        @param local_filename_2: Path to second local file to compare
+
+        @returns "None" if files are identical, or
+                 string response from "cmp" command if files differ.
+        """
+        compare_cmd = ('%s %s %s' %
+                       (self.CMPTOOL, local_filename_1, local_filename_2))
+        try:
+            return self.faft_client.system.run_shell_command_get_output(
+                        compare_cmd, True)
+        except error.CmdError:
+            # already logged by run_shell_command()
+            return None
+
+    def cbfs_read(self,
+                  filename,
+                  extension,
+                  region='ME_RW_A',
+                  local_filename=None,
+                  arch=None,
+                  bios=None):
+        """
+        Reads an arbitrary file from cbfs.
+
+        @param filename: Filename in cbfs, including extension
+        @param extension: Extension of the file, including '.'
+        @param region: region (the default is just 'ME_RW_A')
+        @param local_filename: Path to use on the DUT, overriding the default in
+                           the cbfs work dir.
+        @param arch: Specific machine architecture to extract (default unset)
+        @param bios: Image from which the cbfs file to be read
+        @return: The full path of the read file, or None
+        """
+        if bios is None:
+            bios = os.path.join(self._cbfs_work_path, self._bios_path)
+
+        cbfs_filename = filename + extension
+        if local_filename is None:
+            local_filename = os.path.join(self._cbfs_work_path,
+                                          filename + extension)
+
+        extract_cmd = ('%s %s extract -r %s -n %s%s -f %s' %
+                       (self.CBFSTOOL, bios, region, filename,
+                        extension, local_filename))
+        if arch:
+            extract_cmd += ' -m %s' % arch
+
+        try:
+            self.faft_client.system.run_shell_command(extract_cmd)
+            return os.path.abspath(local_filename)
+        except error.CmdError:
+            # already logged by run_shell_command()
+            return None
+
+    def abort_if_me_rw_blobs_identical(self,
+                                       downgrade_bios,
+                                       spi_bios):
+        """
+        Determine if the CSME RW blob in the downgrade bios image is
+        different from the CSME RW blob in the spi bios image.
+
+        @param downgrade_bios: Downgrade bios path
+        @param spi_bios: Bios from spi flash path
+        @returns "None" if CSME RW blobs are identical, or
+                 string response from "diff" command if blobs differ.
+        """
+        # Extract me_rw blobs
+        cbfs_name = "me_rw"
+        downgrade_name = "me_rw"
+        spi_me_a_name = "me_rw_spi_a"
+        spi_me_b_name = "me_rw_spi_b"
+        temp_dir = self.faft_client.system.create_temp_dir()
+        downgrade_me_blob = os.path.join(temp_dir, downgrade_name)
+        spi_me_a_blob = os.path.join(temp_dir, spi_me_a_name)
+        spi_me_b_blob = os.path.join(temp_dir, spi_me_b_name)
+
+        downgrade_rw_path = self.cbfs_read(cbfs_name, '','ME_RW_A',
+                                           downgrade_me_blob, 'x86',
+                                           downgrade_bios)
+        if downgrade_rw_path is None:
+            self.faft_client.system.remove_dir(temp_dir)
+            raise error.TestError("Failed to read %s me_rw blob from " \
+                                  "the downgrade bios %s" % (downgrade_me_blob,
+                                                             downgrade_bios))
+
+        spi_rw_a_path = self.cbfs_read(cbfs_name, '','ME_RW_A', spi_me_a_blob,
+                                       'x86', spi_bios)
+        if spi_rw_a_path is None:
+            self.faft_client.system.remove_dir(temp_dir)
+            raise error.TestError("Failed to read %s me_rw_a blob from " \
+                                  "the downgrade bios %s" % (spi_me_a_blob,
+                                                             spi_bios))
+
+        spi_rw_b_path = self.cbfs_read(cbfs_name, '','ME_RW_B', spi_me_b_blob,
+                                       'x86', spi_bios)
+        if spi_rw_b_path is None:
+            self.faft_client.system.remove_dir(temp_dir)
+            raise error.TestError("Failed to read %s me_rw_b blob from " \
+                                  "the spi bios %s" % (spi_me_b_blob, spi_bios))
+
+        # Are the blobs different?
+        diff_a = self.cmp_local_files(downgrade_rw_path, spi_rw_a_path)
+        diff_b = self.cmp_local_files(downgrade_rw_path, spi_rw_b_path)
+        if diff_a and diff_b:
+            logging.info("CSME RW version is same, but downgrade me_rw " \
+                         "differs from both me_rw blobs in spi flash.")
+        elif diff_a:
+            logging.info("CSME RW version is same, but downgrade me_rw and " \
+                         "FW_MAIN_A me_rw differ.")
+        elif diff_b:
+            logging.info("CSME RW version is same, but downgrade me_rw and " \
+                         "FW_MAIN_B me_rw differ.")
+        else:
+            # Blobs are the same
+            self.faft_client.system.remove_dir(temp_dir)
+            raise error.TestNAError("CSME RW blobs are the same in downgrade " \
+                                    "and spi bios. Test skipped.")
+
+        self.faft_client.system.remove_dir(temp_dir)
+
     def prepare_shellball(self, bios_image, append = None):
         """Prepare a shellball with the given bios image.
 
         @param bios_image: bios image with shellball to be created
         @param append: string to be updated with shellball name
         """
-        logging.info("Preparing shellball with %s" % bios_image)
+        logging.info("Preparing shellball with %s", bios_image)
         self.faft_client.updater.reset_shellball()
         # Copy the given bois to shellball
         extract_dir = self.faft_client.updater.get_work_path()
@@ -315,8 +440,9 @@
         options = ['--host_only', '--wp=1']
         logging.info("Updating RW firmware using " \
                      "chromeos_firmwareupdate")
-        logging.info("Update command : chromeos_firmwareupdate-%s --mode=%s "
-                     " %s" % (append,self.MODE,' '.join(options)))
+        logging.info(
+                "Update command : chromeos_firmwareupdate-%s --mode=%s "
+                " %s", append, self.MODE, ' '.join(options))
         result = self.run_chromeos_firmwareupdate(
                 self.MODE, append, options, ignore_status = True)
 
@@ -330,26 +456,27 @@
                                       "failed (rc=%s)" % result.exit_status)
 
     def run_once(self):
-        if not self.faft_config.intel_cse_lite:
-            raise error.TestNAError("CSELite feature not supported " \
-                                    "on this device. Test Skipped")
+        if not ('x86' in self.faft_config.ec_capability):
+            raise error.TestNAError("The firmware_CsmeFwUpdate test is only " \
+                                    "applicable to Intel platforms. Skipping " \
+                                    "test.")
 
         # Read current bios from SPI and create a backup copy
         self.read_current_bios_and_save()
 
+        if not self.check_if_me_blob_exist_in_image(self.spi_bios):
+            raise error.TestNAError("The me_rw blob is not present in the " \
+                                    "current bios.  Skipping test.")
+
         # Check fmap scheme of the bios read from SPI
         spi_bios_fmap_ver = self.check_fmap_format(self.spi_bios)
 
-        if not self.check_if_me_blob_exist_in_image(self.spi_bios):
-            raise error.TestError("Test setup issue : me_rw blob is not " \
-                                "present in the current bios.!")
-
         # Check fmap scheme of the default bios in shellball
         downgrade_bios_fmap = self.check_fmap_format(self.downgrade_bios)
 
         # Check if me_rw blob is present in FW_MAIN
         if not self.check_if_me_blob_exist_in_image(self.downgrade_bios):
-            raise error.TestError("Test setup issue : me_rw blob is not " \
+            raise error.TestNAError("Test setup issue : me_rw blob is not " \
                                     "present in downgrade bios.")
 
         # Check if both of the bios versions use same fmap structure for me_rw
@@ -369,14 +496,15 @@
 
         logging.info("Active CSME RW Version                 : %s\n" \
                      "FW main CSME RW Version SPI Image      : %s\n" \
-                     "FW main CSME RW Version downgrade Image: %s\n" % (
+                     "FW main CSME RW Version downgrade Image: %s\n",
                      active_csme_rw_version, spi_me_version,
-                     downgrade_me_version ))
+                     downgrade_me_version)
 
         # Abort if downgrade me_rw version is same as spi me_rw version
         if (spi_me_version in downgrade_me_version):
-            raise error.TestError("Test setup issue : CSME RW version is " \
-                                    "same in both of the images.")
+            # Version is the same, abort test if blob content is the same
+            self.abort_if_me_rw_blobs_identical(self.downgrade_bios,
+                                                self.spi_bios)
 
         for slot in ["A", "B"]:
             operation = "downgrade"
@@ -384,12 +512,12 @@
             self.prepare_shellball(self.downgrade_bios, operation)
 
             logging.info("Downgrading RW section. Downgrade ME " \
-                        "Version: %s" % downgrade_me_version)
+                        "Version: %s", downgrade_me_version)
             # Run firmware updater downgrade the bios RW
             self.run_shellball(operation)
 
             # Set fw_try_next to slot and reboot to trigger csme update
-            logging.info("Setting fw_try_next to %s: " % slot)
+            logging.info("Setting fw_try_next to %s: ", slot)
             self.faft_client.system.set_fw_try_next(slot)
             self.switcher.mode_aware_reboot(reboot_type = 'cold')
 
@@ -398,19 +526,19 @@
                 raise error.TestError("CSME RW Downgrade using "
                                     "FW_MAIN_%s is Failed!" % slot)
             logging.info("CSME RW Downgrade using FW_MAIN_%s is "
-                        "successful" % slot)
+                         "successful", slot)
 
             operation = "upgrade"
             # Create a shellball with the original spi bios
             self.prepare_shellball(self.spi_bios, operation)
 
             logging.info("Upgrading RW Section. Upgrade ME " \
-                        "Version: %s" % spi_me_version)
+                        "Version: %s", spi_me_version)
             # Run firmware updater and update RW section with shellball
             self.run_shellball(operation)
 
             # Set fw_try_next to slot and reboot to trigger csme update
-            logging.info("Setting fw_try_next to %s: " % slot)
+            logging.info("Setting fw_try_next to %s: ", slot)
             self.faft_client.system.set_fw_try_next(slot)
             self.switcher.mode_aware_reboot(reboot_type = 'cold')
 
@@ -419,4 +547,4 @@
                 raise error.TestError("CSME RW Upgrade using "
                                     "FW_MAIN_%s is Failed!" % slot)
             logging.info("CSME RW Upgrade using FW_MAIN_%s is "
-                        "successful" % slot)
+                         "successful", slot)
diff --git a/server/site_tests/firmware_DevBootUSB/control b/server/site_tests/firmware_DevBootUSB/control
index fbda3f9..40cdcea 100644
--- a/server/site_tests/firmware_DevBootUSB/control
+++ b/server/site_tests/firmware_DevBootUSB/control
@@ -4,20 +4,21 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_DevBootUSB"
 PURPOSE = "Press Ctrl-U on developer screen to boot USB disk"
 CRITERIA = "This test will fail if firmware does not boot USB disk"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv2, suite:faft_bios_ec3po, suite:faft_bios_tot"
-DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv2, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
-This test requires a USB disk plugged-in, which contains a Chrome OS test
+This test requires a USB disk plugged-in, which contains a ChromeOS test
 image (built by "build_image test"). On runtime, this test first switches
 DUT to developer mode. When dev_boot_usb=0, pressing Ctrl-U on developer
 screen should not boot the USB disk. When dev_boot_usb=1, pressing Ctrl-U
diff --git a/server/site_tests/firmware_DevBootUSB/firmware_DevBootUSB.py b/server/site_tests/firmware_DevBootUSB/firmware_DevBootUSB.py
index 775fc60..498eaef 100644
--- a/server/site_tests/firmware_DevBootUSB/firmware_DevBootUSB.py
+++ b/server/site_tests/firmware_DevBootUSB/firmware_DevBootUSB.py
@@ -12,7 +12,7 @@
     """
     Servo based Ctrl-U developer USB boot test.
 
-    This test requires a USB disk plugged-in, which contains a Chrome OS test
+    This test requires a USB disk plugged-in, which contains a ChromeOS test
     image (built by "build_image test"). On runtime, this test first switches
     DUT to developer mode. When dev_boot_usb=0, pressing Ctrl-U on developer
     screen should not boot the USB disk. When dev_boot_usb=1, pressing Ctrl-U
diff --git a/server/site_tests/firmware_DevDefaultBoot/control b/server/site_tests/firmware_DevDefaultBoot/control
index e5eda46..fafde3e 100644
--- a/server/site_tests/firmware_DevDefaultBoot/control
+++ b/server/site_tests/firmware_DevDefaultBoot/control
@@ -4,20 +4,21 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_DevDefaultBoot"
 PURPOSE = "Ensure that dev_default_boot=usb actually boots usb by default"
 CRITERIA = "This test will fail if firmware does not boot USB disk by default"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv2, suite:faft_bios_ec3po, suite:faft_bios_tot"
-DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv2, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
-This test requires a USB disk plugged in, containing a Chrome OS test image.
+This test requires a USB disk plugged in, containing a ChromeOS test image.
 On runtime, this test first switches DUT to developer mode, and modifies
 the dev_default_boot crossystem value.
 After waiting at the dev warning, or pressing enter in the detachable UI dev
diff --git a/server/site_tests/firmware_DevDefaultBoot/firmware_DevDefaultBoot.py b/server/site_tests/firmware_DevDefaultBoot/firmware_DevDefaultBoot.py
index fb55b6c..4765ced 100644
--- a/server/site_tests/firmware_DevDefaultBoot/firmware_DevDefaultBoot.py
+++ b/server/site_tests/firmware_DevDefaultBoot/firmware_DevDefaultBoot.py
@@ -10,7 +10,7 @@
 
 class firmware_DevDefaultBoot(FirmwareTest):
     """
-    This test requires a USB disk plugged in, containing a Chrome OS test image.
+    This test requires a USB disk plugged in, containing a ChromeOS test image.
     On runtime, this test first switches DUT to developer mode, and modifies
     the dev_default_boot crossystem value.
     After waiting at the dev warning, or pressing enter in the detachable UI
diff --git a/server/site_tests/firmware_DevMode/control b/server/site_tests/firmware_DevMode/control
index c7cf0a6..a7610d4 100644
--- a/server/site_tests/firmware_DevMode/control
+++ b/server/site_tests/firmware_DevMode/control
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_DevMode"
 PURPOSE = "Servo based developer firmware boot test"
 CRITERIA = "This test will fail if firmware does not switch to developer mode"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv1, suite:faft_normal, suite:faft_bios_ec3po, suite:faft_bios_tot, suite:faft_smoke, suite:bvt-faft, suite:labqual"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv1, suite:faft_normal, suite:faft_bios_tot, suite:faft_smoke, suite:labqual"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test changes dev switch and checks the developer firmware boot.
diff --git a/server/site_tests/firmware_DevMode/firmware_DevMode.py b/server/site_tests/firmware_DevMode/firmware_DevMode.py
index 345e37d..a2b69fe 100644
--- a/server/site_tests/firmware_DevMode/firmware_DevMode.py
+++ b/server/site_tests/firmware_DevMode/firmware_DevMode.py
@@ -29,8 +29,7 @@
         }))
 
         logging.info("Enable dev mode.")
-        self.switcher.reboot_to_mode(
-                'dev', from_mode='normal', sync_before_boot=False)
+        self.switcher.reboot_to_mode("dev", sync_before_boot=False)
 
         logging.info("Expected developer mode boot and enable normal mode.")
         self.check_state((self.checkers.crossystem_checker, {
@@ -45,10 +44,9 @@
                 'mainfw_type': 'normal',
         }))
 
-        if (
-                self.check_ec_capability() and
-                self.faft_config.mode_switcher_type == 'jetstream_switcher'):
-            if self.gbb_flags & vboot.GBB_FLAG_DISABLE_EC_SOFTWARE_SYNC:
+        if self.check_ec_capability():
+            gbb = self.faft_client.bios.get_gbb_flags()
+            if gbb & vboot.GBB_FLAG_DISABLE_EC_SOFTWARE_SYNC:
                 # In order to test that entering dev mode does not work when
                 # EC_IN_RW=1, EC software sync must be enabled.  If EC software
                 # sync is disabled, then we must skip this portion of the test.
diff --git a/server/site_tests/firmware_DevModeStress/control b/server/site_tests/firmware_DevModeStress/control
index a82ccd5..d2dde34 100644
--- a/server/site_tests/firmware_DevModeStress/control
+++ b/server/site_tests/firmware_DevModeStress/control
@@ -4,15 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_DevModeStress"
 PURPOSE = "Servo based developer firmware boot test"
 CRITERIA = "This test will fail if firmware does not remain in developer mode"
 ATTRIBUTES = "suite:faft_dev, suite:faft_stress"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test is intended to be run with many iterations to ensure that the
diff --git a/server/site_tests/firmware_DevModeStress/firmware_DevModeStress.py b/server/site_tests/firmware_DevModeStress/firmware_DevModeStress.py
index 30af41a..d0188c8 100644
--- a/server/site_tests/firmware_DevModeStress/firmware_DevModeStress.py
+++ b/server/site_tests/firmware_DevModeStress/firmware_DevModeStress.py
@@ -20,12 +20,12 @@
         dict_args = utils.args_to_dict(cmdline_args)
         self.faft_iterations = int(dict_args.get('faft_iterations', 1))
         super(firmware_DevModeStress, self).initialize(host, cmdline_args)
-        self.switcher.setup_mode('dev')
+        self.switcher.setup_mode('dev', allow_gbb_force=True)
         self.setup_usbkey(usbkey=False)
 
     def run_once(self):
         """Runs a single iteration of the test."""
-        for i in xrange(self.faft_iterations):
+        for i in range(self.faft_iterations):
             logging.info('======== Running FAFT ITERATION %d/%s ========',
                          i + 1, self.faft_iterations)
             logging.info("Verify dev mode.")
diff --git a/server/site_tests/firmware_DevScreenTimeout/control b/server/site_tests/firmware_DevScreenTimeout/control
index 9c33b9b..1c6856f 100644
--- a/server/site_tests/firmware_DevScreenTimeout/control
+++ b/server/site_tests/firmware_DevScreenTimeout/control
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_DevScreenTimeout"
 PURPOSE = "Servo based developer firmware screen timeout test"
 CRITERIA = "This test will fail if the timeout period does not match our spec."
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv4, suite:faft_bios_ec3po, suite:faft_bios_tot"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv4, suite:faft_bios_tot"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 When booting in developer mode, the firmware shows a screen to warn user
diff --git a/server/site_tests/firmware_DevScreenTimeout/firmware_DevScreenTimeout.py b/server/site_tests/firmware_DevScreenTimeout/firmware_DevScreenTimeout.py
index b4d679b..12e10c8 100644
--- a/server/site_tests/firmware_DevScreenTimeout/firmware_DevScreenTimeout.py
+++ b/server/site_tests/firmware_DevScreenTimeout/firmware_DevScreenTimeout.py
@@ -29,7 +29,8 @@
     # If the margin is too small and firmware initialization is too fast,
     # the test will fail incorrectly.
     TIMEOUT_MARGIN = 5
-    RUN_SHELL_READY_TIME_MARGIN = 5
+    # Time for the concerned files to be added to the filesystem.
+    RUN_SHELL_READY_TIME_MARGIN = 10
 
     fw_time_record = {}
 
@@ -61,11 +62,23 @@
         @param tag: A tag about this boot.
         @raise TestError: If the firmware-boot-time file does not exist.
         """
-        time.sleep(self.RUN_SHELL_READY_TIME_MARGIN)
-        [fw_time] = self.faft_client.system.run_shell_command_get_output(
-                'cat /tmp/firmware-boot-time')
-        logging.info('Got firmware boot time [%s]: %s', tag, fw_time)
-        if fw_time:
+        elapsed_time = 0
+        fw_time = 0
+        # Try getting the firmware boot time for 10 seconds at 1 second retry
+        # interval. This is required to allow the send_boot_metrics upstart job
+        # to create that file.
+        while elapsed_time < self.RUN_SHELL_READY_TIME_MARGIN:
+            time.sleep(1)
+            status = self.faft_client.system.run_shell_command_get_status(
+                    '[ -s /tmp/firmware-boot-time ]')
+            if status == 0:
+                [fw_time] = self.faft_client.system.run_shell_command_get_output(
+                        'cat /tmp/firmware-boot-time')
+                break
+            elapsed_time += 1
+
+        if fw_time and elapsed_time < self.RUN_SHELL_READY_TIME_MARGIN:
+            logging.info('Got firmware boot time [%s]: %s', tag, fw_time)
             self.fw_time_record[tag] = float(fw_time)
         else:
             raise error.TestError('Failed to get the firmware boot time.')
diff --git a/server/site_tests/firmware_ECAdc/control b/server/site_tests/firmware_ECAdc/control
index 0632ad3..033c3c2 100644
--- a/server/site_tests/firmware_ECAdc/control
+++ b/server/site_tests/firmware_ECAdc/control
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ECAdc"
 PURPOSE = "Servo based EC ADC test"
 CRITERIA = "This test will fail if EC ADC misbehalved."
@@ -12,7 +12,9 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-DEPENDENCIES = "ec:cros"
+JOB_RETRIES = 0
+DEPENDENCIES = "ec:cros, servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This is a stress test for EC ADC. We keep polling EC internal temperature
diff --git a/server/site_tests/firmware_ECAdc/firmware_ECAdc.py b/server/site_tests/firmware_ECAdc/firmware_ECAdc.py
index e3bd107..36f89ca 100644
--- a/server/site_tests/firmware_ECAdc/firmware_ECAdc.py
+++ b/server/site_tests/firmware_ECAdc/firmware_ECAdc.py
@@ -34,5 +34,5 @@
             raise error.TestNAError("Nothing needs to be tested on this device")
         logging.info("Reading EC internal temperature for %d times.",
                      self.READ_COUNT)
-        for _ in xrange(self.READ_COUNT):
+        for _ in range(self.READ_COUNT):
             self._check_read()
diff --git a/server/site_tests/firmware_ECBattery/control b/server/site_tests/firmware_ECBattery/control
index 2b84479..dbf7dbf 100644
--- a/server/site_tests/firmware_ECBattery/control
+++ b/server/site_tests/firmware_ECBattery/control
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ECBattery"
 PURPOSE = "Servo based EC battery status report test"
 CRITERIA = "This test will fail if EC battery status report misbehalved."
-ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec3po, suite:faft_ec_tot, suite:bvt-faft, suite:faft_smoke, suite:labqual"
+ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec_tot, suite:faft_smoke, suite:labqual, suite:distributed_lab_qual_faft"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
 DEPENDENCIES = "ec:cros, servo_state:WORKING"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test check battery status read from EC console and kernel sysfs match.
diff --git a/server/site_tests/firmware_ECBootTime/control b/server/site_tests/firmware_ECBootTime/control
deleted file mode 100644
index 43c92fb..0000000
--- a/server/site_tests/firmware_ECBootTime/control
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "Chrome OS Team"
-NAME = "firmware_ECBootTime"
-PURPOSE = "Servo based EC boot time test"
-CRITERIA = "This test will fail if EC did not meet boot time requirement."
-ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec3po, suite:faft_ec_tot, suite:faft_smoke, suite:bvt-faft"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "firmware"
-TEST_TYPE = "server"
-DEPENDENCIES = "ec:cros, servo_state:WORKING"
-JOB_RETRIES = 4
-
-DOC = """
-This test measures EC boot time.
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run_ecboottime(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test("firmware_ECBootTime", host=host, cmdline_args=args,
-                 disable_sysinfo=True)
-
-parallel_simple(run_ecboottime, machines)
diff --git a/server/site_tests/firmware_ECBootTime/firmware_ECBootTime.py b/server/site_tests/firmware_ECBootTime/firmware_ECBootTime.py
deleted file mode 100644
index 81bed35..0000000
--- a/server/site_tests/firmware_ECBootTime/firmware_ECBootTime.py
+++ /dev/null
@@ -1,153 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server.cros.faft.firmware_test import FirmwareTest
-
-
-class firmware_ECBootTime(FirmwareTest):
-    """
-    Servo based EC boot time test.
-    """
-    version = 1
-
-    def initialize(self, host, cmdline_args):
-        super(firmware_ECBootTime, self).initialize(host, cmdline_args)
-        # Don't bother if there is no Chrome EC.
-        if not self.check_ec_capability():
-            raise error.TestNAError("Nothing needs to be tested on this device")
-        # Only run in normal mode
-        self.switcher.setup_mode('normal')
-        self.host = host
-
-    def check_boot_time(self):
-        """Check EC and AP boot times"""
-        # Initialize a list of two strings, one printed by the EC when the AP
-        # is taken out of reset, and another one printed when the EC observes
-        # the AP running. These strings are used as for console output anchors
-        # when calculating the AP boot time.
-        #
-        # This is very approximate, a better long term solution would be to
-        # have the EC print the same fixed strings for these two events on all
-        # platforms. http://crosbug.com/p/21628 has been opened to track this
-        # issue.
-        if self._x86:
-            boot_anchors = ["\[([0-9\.]+) PB",
-                            "\[([0-9\.]+) [^\r\n]*(HC 0x|Port 80|ACPI query)"]
-        elif self._arm_legacy:
-            boot_anchors = ["\[([0-9\.]+) AP running ...",
-                            "\[([0-9\.]+) XPSHOLD seen"]
-        else:
-            boot_anchors = ["\[([0-9\.]+) power state 1 = S5",
-                            "\[([0-9\.]+) power state 3 = S0"]
-
-        # regular expression to say that EC is ready. For systems that
-        # run out of ram there is a second boot where the PMIC is
-        # asked to power cycle the EC to be 100% sure (I wish) that
-        # the code is clean. Looking for the "Inits done" generates a
-        # match after the first boot, and introduces a race between
-        # the EC booting the second time and the test sending the
-        # power_cmd.
-        if self._doubleboot:
-            ec_ready = ["(?ms)UART.*UART.*?\[([0-9.]+) "]
-        else:
-            ec_ready = ["([0-9.]+) Inits done"]
-
-        if self.faft_config.ec_has_powerbtn_cmd:
-            # powerbtn takes ms while hold_pwr_button_powero is seconds.
-            hold_ms = int(1000 * self.faft_config.hold_pwr_button_poweron)
-            power_cmd = 'powerbtn %s' % hold_ms
-        else:
-            power_cmd = 'power on'
-
-        # Try the EC reboot command several times in case the console
-        # output is not clean enough for the full string to be found.
-        retry = 10
-        while retry > 0:
-            retry = retry - 1
-            try:
-                reboot = self.ec.send_command_get_output(
-                    "reboot ap-off", ec_ready)
-                break
-            except error.TestFail:
-                logging.info("Unable to parse EC console output, "
-                             "%d more attempts", retry)
-        if retry == 0:
-            raise error.TestFail("Unable to reboot EC cleanly, " +
-                                 "Please try removing AC power")
-        logging.debug("reboot: %r", reboot)
-
-        # The EC console must be available 1 second after startup
-        time.sleep(1)
-
-        version = self.ec.get_version()
-
-        if not version:
-            raise error.TestFail("Unable to get EC console.")
-
-        # Wait until the ap enter the G3
-        time.sleep(self.faft_config.ec_reboot_to_g3_delay)
-
-        # Enable printing host commands. Some boards have it disabled by default
-        # After reboot it will be restored to default
-        self.ec.send_command("hcdebug normal")
-
-        # Enable port80 output for x86 devices so there is an early signal from
-        # the host that it is booting, instead of relying on an EC transaction.
-        if self._x86:
-            self.ec.send_command("port80 intprint")
-
-        # Switch on the AP
-        power_press = self.ec.send_command_get_output(
-            power_cmd, boot_anchors)
-
-        # TODO(crbug.com/847289): reboot_time only measures the time spent in
-        # EC's main function, which is not a good measure of "EC cold boot time"
-        reboot_time = float(reboot[0][1])
-        power_press_time = float(power_press[0][1])
-        firmware_resp_time = float(power_press[1][1])
-        boot_time = firmware_resp_time - power_press_time
-        logging.info("EC cold boot time: %f s", reboot_time)
-        if reboot_time > 1.0:
-            raise error.TestFail("EC cold boot time longer than 1 second.")
-        logging.info("EC boot time: %f s", boot_time)
-        if boot_time > 1.0:
-            raise error.TestFail("Boot time longer than 1 second.")
-
-    def is_arm_legacy_board(self):
-        """Detect whether the board is a legacy ARM board.
-
-        This group of boards prints specific strings on the EC console when the
-        EC and AP come out of reset.
-        """
-
-        arm_legacy = ('snow', 'spring', 'pit', 'pi', 'big', 'blaze', 'kitty')
-        output = self.faft_client.system.get_platform_name()
-        return output.lower() in arm_legacy
-
-    def run_once(self):
-        """Execute the main body of the test.
-        """
-
-        self._x86 = ('x86' in self.faft_config.ec_capability)
-        self._doubleboot = ('doubleboot' in self.faft_config.ec_capability)
-        self._arm_legacy = self.is_arm_legacy_board()
-        logging.info("Reboot and check EC cold boot time and host boot time.")
-        self.switcher.mode_aware_reboot('custom', self.check_boot_time)
-
-    def cleanup(self):
-        try:
-            # Restore the ec_uart_regexp to None
-            self.ec.set_uart_regexp('None')
-
-            # Reboot the EC and wait for the host to come up so it is ready for
-            # the next test.
-            self.ec.reboot()
-            self.host.wait_up(timeout=30)
-        except Exception as e:
-            logging.error("Caught exception: %s", str(e))
-        super(firmware_ECBootTime, self).cleanup()
diff --git a/server/site_tests/firmware_ECCbiEeprom/control b/server/site_tests/firmware_ECCbiEeprom/control
index 22a6d5c..f079019 100644
--- a/server/site_tests/firmware_ECCbiEeprom/control
+++ b/server/site_tests/firmware_ECCbiEeprom/control
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ECCbiEeprom"
 PURPOSE = "Servo-based CBI EEPRROM functionality test"
 CRITERIA = "Tests minimum requirements of CBI EEPROM, e.g. write protect."
@@ -15,6 +15,7 @@
 TEST_TYPE = "server"
 DEPENDENCIES = "ec:cros, servo_state:WORKING"
 JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 Ensure that the EEPROM for Cros Board Info meets minimum hardware requirements.
diff --git a/server/site_tests/firmware_ECCbiEeprom/firmware_ECCbiEeprom.py b/server/site_tests/firmware_ECCbiEeprom/firmware_ECCbiEeprom.py
index 051ca56..ec89a5c 100644
--- a/server/site_tests/firmware_ECCbiEeprom/firmware_ECCbiEeprom.py
+++ b/server/site_tests/firmware_ECCbiEeprom/firmware_ECCbiEeprom.py
@@ -33,6 +33,7 @@
         # Don't bother if CBI isn't on this device.
         if not self.check_ec_capability(['cbi']):
             raise error.TestNAError("Nothing needs to be tested on this device")
+        self.host = host
         cmd = 'ectool locatechip %d %d' % (self.EEPROM_LOCATE_TYPE,
                                            self.EEPROM_LOCATE_INDEX)
         cmd_out = self.faft_client.system.run_shell_command_get_output(
@@ -63,6 +64,12 @@
         except servo.ControlUnavailableError:
             logging.info("i2c_mux_en does not exist. Ignoring.")
 
+        # Check to see if the CBI WP is decoupled.  If it's decoupled, the EC
+        # will have its own signal to control the CBI WP called `EC_CBI_WP`.
+        cmd = 'ectool gpioget ec_cbi_wp'
+        cmd_status = self.faft_client.system.run_shell_command_get_status(cmd)
+        self._wp_is_decoupled = True if cmd_status == 0 else False
+
     def _gen_write_command(self, offset, data):
         return ('ectool i2cxfer %d %d %d %d %s' %
                (self.i2c_port, self.i2c_addr, self.NO_READ, offset, data))
@@ -102,11 +109,31 @@
 
         return before, write_data, after
 
+    def _reset_ec_and_wait_up(self):
+        self.servo.set('cold_reset', 'on')
+        self.servo.set('cold_reset', 'off')
+        self.host.wait_up(timeout=30)
+
     def check_eeprom_write_protected(self):
         """Checks that CBI EEPROM cannot be written to when WP is asserted"""
         self.set_hardware_write_protect(True)
         offset = 0
 
+        if self._wp_is_decoupled:
+            # When the CBI WP is decoupled from the main system write protect,
+            # the EC drives a latch which sets the CBI WP.  This latch is only
+            # reset when EC_RST_ODL is asserted.  Since the WP has changed
+            # above, toggle EC_RST_ODL in order to clear this latch.
+            logging.info(
+                    "CBI WP is EC driven, resetting EC before continuing...")
+            self._reset_ec_and_wait_up()
+
+            # Additionally, EC SW WP must be set in order for the system to be
+            # locked, which is the criteria that the EC uses to assert CBI
+            # EEPROM WP or not.
+            cmd = 'flashrom -p ec --wp-enable'
+            self.faft_client.system.run_shell_command(cmd)
+
         for offset in range(0, self.MAX_BYTES, self.PAGE_SIZE):
             before, write_data, after = self._read_write_data(offset)
 
@@ -124,6 +151,15 @@
         self.set_hardware_write_protect(False)
         offset = 0
 
+        if self._wp_is_decoupled:
+            # When the CBI WP is decoupled from the main system write protect,
+            # the EC drives a latch which sets the CBI WP.  This latch is only
+            # reset when EC_RST_ODL is asserted.  Since the WP has changed
+            # above, toggle EC_RST_ODL in order to clear this latch.
+            logging.info(
+                    "CBI WP is EC driven, resetting EC before continuing...")
+            self._reset_ec_and_wait_up()
+
         for offset in range(0, self.MAX_BYTES, self.PAGE_SIZE):
             before, write_data, after = self._read_write_data(offset)
 
@@ -136,6 +172,16 @@
 
         return True
 
+    def cleanup(self):
+        # Make sure to remove EC SW WP since we enabled it when testing
+        if self._wp_is_decoupled:
+            logging.debug("Disabling EC HW & SW WP...")
+            self.set_hardware_write_protect(False)
+            self._reset_ec_and_wait_up()
+            cmd = 'flashrom -p ec --wp-disable'
+            self.faft_client.system.run_shell_command(cmd)
+        return super(firmware_ECCbiEeprom, self).cleanup()
+
     def run_once(self):
         """Execute the main body of the test."""
 
diff --git a/server/site_tests/firmware_ECCharging/control b/server/site_tests/firmware_ECCharging/control
index 4ad60cc..b84f945a 100644
--- a/server/site_tests/firmware_ECCharging/control
+++ b/server/site_tests/firmware_ECCharging/control
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ECCharging"
 PURPOSE = "Servo based EC charging control test"
 CRITERIA = "This test will fail if EC charging control misbehaved."
-ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec3po, suite:faft_ec_tot"
+ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec_tot"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
 DEPENDENCIES = "ec:cros, servo_state:WORKING"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_ECCharging/firmware_ECCharging.py b/server/site_tests/firmware_ECCharging/firmware_ECCharging.py
index acc53ae..8223004 100644
--- a/server/site_tests/firmware_ECCharging/firmware_ECCharging.py
+++ b/server/site_tests/firmware_ECCharging/firmware_ECCharging.py
@@ -3,9 +3,13 @@
 # found in the LICENSE file.
 
 import logging
+import time
+from xml.parsers import expat
 
 from autotest_lib.client.common_lib import error
 from autotest_lib.server.cros.faft.firmware_test import FirmwareTest
+from autotest_lib.server.cros.servo import servo
+
 
 class firmware_ECCharging(FirmwareTest):
     """
@@ -20,19 +24,36 @@
     # Threshold of trickle charging current in mA
     TRICKLE_CHARGE_THRESHOLD = 100
 
-    # The dict to cache the battery information
-    BATTERY_INFO = {}
+    # We wait for up to 60 minutes for the battery to allow charging.
+    # kodama in particular takes a long time to discharge
+    DISCHARGE_TIMEOUT = 60 * 60
+
+    # The period to check battery state while discharging.
+    CHECK_BATT_STATE_WAIT = 60
+
+    # The delay to wait for the AC state to update.
+    AC_STATE_UPDATE_DELAY = 3
+
+    # Wait a few seconds after discharging for voltage to stabilize
+    BEGIN_CHARGING_TIMEOUT = 120
+
+    # Sleep for a second between retries when waiting for voltage to stabilize
+    BEGIN_CHARGING_RETRY_TIME = 1
+
+    # After the battery reports it is not full, keep discharging for this long.
+    # This should be >= BEGIN_CHARGING_TIMEOUT
+    EXTRA_DISCHARGE_TIME = BEGIN_CHARGING_TIMEOUT + 30
 
     def initialize(self, host, cmdline_args):
         super(firmware_ECCharging, self).initialize(host, cmdline_args)
         # Don't bother if there is no Chrome EC.
         if not self.check_ec_capability():
-            raise error.TestNAError("Nothing needs to be tested on this device")
+            raise error.TestNAError(
+                    "Nothing needs to be tested on this device")
         # Only run in normal mode
         self.switcher.setup_mode('normal')
         self.ec.send_command("chan 0")
 
-
     def cleanup(self):
         try:
             self.ec.send_command("chan 0xffffffff")
@@ -40,121 +61,90 @@
             logging.error("Caught exception: %s", str(e))
         super(firmware_ECCharging, self).cleanup()
 
-    def _update_battery_info(self):
-        """Get the battery info we care for this test."""
-        # The battery parameters we care for this test. The order must match
-        # the output of EC battery command.
-        battery_params = ['V', 'V-desired', 'I', 'I-desired', 'Charge']
-        regex_str_list = []
+    def _retry_send_cmd(self, command, regex_list):
+        """Send an EC command, and retry if it fails."""
+        retries = 3
+        while retries > 0:
+            retries -= 1
+            try:
+                return self.ec.send_command_get_output(command, regex_list)
+            except (servo.UnresponsiveConsoleError,
+                    servo.ResponsiveConsoleError, expat.ExpatError) as e:
+                if retries <= 0:
+                    raise
+                logging.warning('Failed to send EC cmd. %s', e)
 
-        for p in battery_params:
-            if p == 'Charge':
-                regex_str_list.append(p + ':\s+(\d+)\s+')
-            else:
-                regex_str_list.append(p + ':\s+0x[0-9a-f]*\s+=\s+([0-9-]+)\s+')
-
-        battery_regex_match = self.ec.send_command_get_output('battery',
-                                                              regex_str_list)
-        for i in range(len(battery_params)):
-            self.BATTERY_INFO[battery_params[i]] = int(
-                    battery_regex_match[i][1])
-
-
-    def _get_battery_desired_voltage(self):
-        """Get battery desired voltage value."""
-        if not self.BATTERY_INFO:
-            self._update_battery_info()
-        logging.info('Battery desired voltage = %d mV',
-                     self.BATTERY_INFO['V-desired'])
-        return self.BATTERY_INFO['V-desired']
-
-
-    def _get_battery_desired_current(self):
-        """Get battery desired current value."""
-        if not self.BATTERY_INFO:
-            self._update_battery_info()
-        logging.info('Battery desired current = %d mA',
-                     self.BATTERY_INFO['I-desired'])
-        return self.BATTERY_INFO['I-desired']
-
-
-    def _get_battery_actual_voltage(self):
-        """Get the actual voltage from charger to battery."""
-        if not self.BATTERY_INFO:
-            self._update_battery_info()
-        logging.info('Battery actual voltage = %d mV', self.BATTERY_INFO['V'])
-        return self.BATTERY_INFO['V']
-
-
-    def _get_battery_actual_current(self):
-        """Get the actual current from charger to battery."""
-        if not self.BATTERY_INFO:
-            self._update_battery_info()
-        logging.info('Battery actual current = %d mA', self.BATTERY_INFO['I'])
-        return self.BATTERY_INFO['I']
-
-
-    def _get_battery_charge(self):
-        """Get battery charge state."""
-        if not self.BATTERY_INFO:
-            self._update_battery_info()
-        logging.info("Battery charge = %d %%", self.BATTERY_INFO['Charge'])
-        return self.BATTERY_INFO['Charge']
-
-
-    def _get_charger_target_voltage(self):
-        """Get target charging voltage set in charger."""
-        voltage = int(self.ec.send_command_get_output("charger",
-                ["V_batt:\s+(\d+)\s"])[0][1])
-        logging.info("Charger target voltage = %d mV", voltage)
-        return voltage
-
-
-    def _get_charger_target_current(self):
-        """Get target charging current set in charger."""
-        current = int(self.ec.send_command_get_output("charger",
-                ["I_batt:\s+(\d+)\s"])[0][1])
-        logging.info("Charger target current = %d mA", current)
-        return current
-
+    def _get_charge_state(self):
+        """Get charger and battery information in a single call."""
+        output = self._retry_send_cmd("chgstate", [
+                r"chg\.\*:",
+                r"voltage = (-?\d+)mV",
+                r"current = (-?\d+)mA",
+                r"batt\.\*:",
+                r"voltage = (-?\d+)mV",
+                r"current = (-?\d+)mA",
+                r"desired_voltage = (-?\d+)mV",
+                r"desired_current = (-?\d+)mA",
+        ])
+        result = {
+                "charger_target_voltage": int(output[1][1]),
+                "charger_target_current": int(output[2][1]),
+                "battery_actual_voltage": int(output[4][1]),
+                "battery_actual_current": int(output[5][1]),
+                "battery_desired_voltage": int(output[6][1]),
+                "battery_desired_current": int(output[7][1]),
+        }
+        logging.info("Charger & battery info: %s", result)
+        return result
 
     def _get_trickle_charging(self):
         """Check if we are trickle charging battery."""
-        return (self._get_battery_desired_current() <
+        return (self.ec.get_battery_desired_current() <
                 self.TRICKLE_CHARGE_THRESHOLD)
 
-
-    def _check_target_value(self):
-        """Check charger target values are correct.
+    def _check_voltages_and_currents(self):
+        """Check that the battery and charger voltages and currents are within
+        acceptable limits.
 
         Raise:
           error.TestFail: Raised when check fails.
         """
-        if (self._get_charger_target_voltage() >=
-                1.05 * self._get_battery_desired_voltage()):
-            raise error.TestFail("Charger target voltage is too high.")
-        if (self._get_charger_target_current() >=
-                1.05 * self._get_battery_desired_current()):
-            raise error.TestFail("Charger target current is too high.")
+        state = self._get_charge_state()
+        target_voltage = state['charger_target_voltage']
+        desired_voltage = state['battery_desired_voltage']
+        target_current = state['charger_target_current']
+        desired_current = state['battery_desired_current']
+        actual_voltage = state['battery_actual_voltage']
+        actual_current = state['battery_actual_current']
+        logging.info("Checking charger target values...")
+        if (target_voltage >= 1.05 * desired_voltage):
+            raise error.TestFail(
+                    "Charger target voltage is too high. %d/%d=%f" %
+                    (target_voltage, desired_voltage,
+                     float(target_voltage) / desired_voltage))
+        if (target_current >= 1.05 * desired_current):
+            raise error.TestFail(
+                    "Charger target current is too high. %d/%d=%f" %
+                    (target_current, desired_current,
+                     float(target_current) / desired_current))
 
-
-    def _check_actual_value(self):
-        """Check actual voltage/current values are correct.
-
-        Raise:
-          error.TestFail: Raised when check fails.
-        """
-        if (self._get_battery_actual_voltage() >=
-                1.05 * self._get_charger_target_voltage()):
-            raise error.TestFail("Battery actual voltage is too high.")
-        if (self._get_battery_actual_current() >=
-                1.05 * self._get_charger_target_current()):
-            raise error.TestFail("Battery actual current is too high.")
+        logging.info("Checking battery actual values...")
+        if (actual_voltage >= 1.05 * target_voltage):
+            raise error.TestFail(
+                    "Battery actual voltage is too high. %d/%d=%f" %
+                    (actual_voltage, target_voltage,
+                     float(actual_voltage) / target_voltage))
+        if (actual_current >= 1.05 * target_current):
+            raise error.TestFail(
+                    "Battery actual current is too high. %d/%d=%f" %
+                    (actual_current, target_current,
+                     float(actual_current) / target_current))
 
     def _check_if_discharge_on_ac(self):
         """Check if DUT is performing discharge on AC"""
-        match = self.ec.send_command_get_output("battery",
-                ["Status:\s*(0x[0-9a-f]+)\s", "Param flags:\s*([0-9a-f]+)\s"])
+        match = self._retry_send_cmd("battery", [
+                r"Status:\s*(0x[0-9a-f]+)\s", r"Param flags:\s*([0-9a-f]+)\s"
+        ])
         status = int(match[0][1], 16)
         params = int(match[1][1], 16)
 
@@ -164,25 +154,131 @@
 
         return False
 
+    def _check_battery_discharging(self):
+        """Check if AC is attached and if charge control is normal."""
+        # chg_ctl_mode may look like: chg_ctl_mode = 2
+        # or: chg_ctl_mode = DISCHARGE (2)
+        # The regex needs to match either one.
+        output = self._retry_send_cmd("chgstate", [
+                r"ac\s*=\s*(\d)\s*",
+                r"chg_ctl_mode\s*=\s*(\S* \(\d+\)|\d+)\r\n"
+        ])
+        ac_state = int(output[0][1])
+        chg_ctl_mode = output[1][1]
+        if ac_state == 0:
+            return True
+        if chg_ctl_mode == "2" or chg_ctl_mode == "DISCHARGE (2)":
+            return True
+        return False
+
+    def _set_battery_discharge(self):
+        """Instruct the EC to drain the battery."""
+        # Ask EC to drain the battery
+        output = self._retry_send_cmd("chgstate discharge on", [
+                r"state =|Parameter 1 invalid",
+        ])
+        logging.debug("chgstate returned %s", output)
+        if output[0] == 'Parameter 1 invalid':
+            raise error.TestNAError(
+                    "Device doesn't support CHARGER_DISCHARGE_ON_AC, "
+                    "please drain battery below full and run the test again.")
+        time.sleep(self.AC_STATE_UPDATE_DELAY)
+
+        # Verify discharging. Either AC off or charge control discharge is
+        # good.
+        if not self._check_battery_discharging():
+            raise error.TestFail("Battery is not discharging.")
+
+    def _set_battery_normal(self):
+        """Instruct the EC to charge the battery as normal."""
+        self.ec.send_command("chgstate discharge off")
+        time.sleep(self.AC_STATE_UPDATE_DELAY)
+
+        # Verify AC is on and charge control is normal.
+        if self._check_battery_discharging():
+            raise error.TestFail("Fail to plug AC and enable charging.")
+        self.ec.update_battery_info()
+
+    def _consume_battery(self, deadline):
+        """Perform battery intensive operation to make the battery discharge
+        faster."""
+        # Switch to servo drain after b/140965614.
+        stress_time = deadline - time.time()
+        if stress_time > self.CHECK_BATT_STATE_WAIT:
+            stress_time = self.CHECK_BATT_STATE_WAIT
+        self._client.run("stressapptest -s %d " % stress_time,
+                         ignore_status=True)
+
+    def _discharge_below_100(self):
+        """Remove AC power until the battery is not full."""
+        self._set_battery_discharge()
+        logging.info(
+                "Keep discharging until the battery reports charging allowed.")
+
+        try:
+            # Wait until DISCHARGE_TIMEOUT or charging allowed
+            deadline = time.time() + self.DISCHARGE_TIMEOUT
+            while time.time() < deadline:
+                self.ec.update_battery_info()
+                if self.ec.get_battery_charging_allowed():
+                    break
+                logging.info("Wait for the battery to discharge (%d mAh).",
+                             self.ec.get_battery_remaining())
+                self._consume_battery(deadline)
+            else:
+                raise error.TestFail(
+                        "The battery does not report charging allowed "
+                        "before timeout is reached.")
+
+            # Wait another EXTRA_DISCHARGE_TIME just to be sure
+            deadline = time.time() + self.EXTRA_DISCHARGE_TIME
+            while time.time() < deadline:
+                self.ec.update_battery_info()
+                logging.info(
+                        "Wait for the battery to discharge even more (%d mAh).",
+                        self.ec.get_battery_remaining())
+                self._consume_battery(deadline)
+        finally:
+            self._set_battery_normal()
+
+        # For many devices, it takes some time after discharging for the
+        # battery to actually start charging.
+        deadline = time.time() + self.BEGIN_CHARGING_TIMEOUT
+        while time.time() < deadline:
+            self.ec.update_battery_info()
+            if self.ec.get_battery_actual_current() >= 0:
+                break
+            logging.info(
+                    'Battery actual current (%d) too low, wait a bit. (%d mAh)',
+                    self.ec.get_battery_actual_current(),
+                    self.ec.get_battery_remaining())
+            self._consume_battery(deadline)
+
     def run_once(self):
         """Execute the main body of the test.
         """
         if not self.check_ec_capability(['battery', 'charging']):
-            raise error.TestNAError("Nothing needs to be tested on this device")
-        if self._get_battery_charge() == 100:
-            logging.info("Battery is full. Unable to test.")
-            return
+            raise error.TestNAError(
+                    "Nothing needs to be tested on this device")
+        if not self.ec.get_battery_charging_allowed(
+        ) or self.ec.get_battery_actual_current() < 0:
+            logging.info(
+                    "Battery is full or discharging. Forcing battery discharge "
+                    "to test charging.")
+            self._discharge_below_100()
+            if not self.ec.get_battery_charging_allowed():
+                raise error.TestFail(
+                        "Battery reports charging is not allowed, even after "
+                        "discharging.")
         if self._check_if_discharge_on_ac():
-            logging.info("DUT is performing discharge on AC. Unable to test.")
-            return
+            raise error.TestNAError(
+                    "DUT is performing discharge on AC. Unable to test.")
         if self._get_trickle_charging():
-            logging.info("Trickling charging battery. Unable to test.")
-            return
-        if self._get_battery_actual_current() < 0:
-            raise error.TestFail("This test must be run with AC power.")
+            raise error.TestNAError(
+                    "Trickling charging battery. Unable to test.")
+        if self.ec.get_battery_actual_current() < 0:
+            raise error.TestFail(
+                    "The device is not charging. Is the test run with AC "
+                    "plugged?")
 
-        logging.info("Checking charger target values...")
-        self._check_target_value()
-
-        logging.info("Checking battery actual values...")
-        self._check_actual_value()
+        self._check_voltages_and_currents()
diff --git a/server/site_tests/firmware_ECChargingState/control b/server/site_tests/firmware_ECChargingState/control
index 9580a69..b5f6a97 100644
--- a/server/site_tests/firmware_ECChargingState/control
+++ b/server/site_tests/firmware_ECChargingState/control
@@ -8,13 +8,14 @@
 NAME = "firmware_ECChargingState"
 PURPOSE = "Servo based EC charging state test"
 CRITERIA = "This test will fail if EC charging state is incorrect."
-ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec3po, suite:faft_ec_tot"
+ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec_tot"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
 DEPENDENCIES = "ec:cros, servo_state:WORKING"
-JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test checks if the power_supply_info command reports the correct charging
diff --git a/server/site_tests/firmware_ECChargingState/firmware_ECChargingState.py b/server/site_tests/firmware_ECChargingState/firmware_ECChargingState.py
index db9afb8..7ac964f 100644
--- a/server/site_tests/firmware_ECChargingState/firmware_ECChargingState.py
+++ b/server/site_tests/firmware_ECChargingState/firmware_ECChargingState.py
@@ -4,8 +4,10 @@
 
 import logging
 import time
+from xml.parsers import expat
 
 from autotest_lib.client.common_lib import error
+from autotest_lib.server.cros.servo import servo
 from autotest_lib.server.cros.faft.firmware_test import FirmwareTest
 
 
@@ -24,14 +26,28 @@
     # The period to check battery state while charging.
     CHECK_BATT_STATE_WAIT = 60
 
+    # The min battery charged percentage that can be considered "full" by
+    # powerd. Should be kPowerSupplyFullFactorPref, which defaults to 98%, but
+    # that is a pref so set it a little lower to be safe.
+    FULL_BATTERY_PERCENT = 95
+
+    # Battery status
+    STATUS_FULLY_CHARGED = 0x20
+    STATUS_DISCHARGING = 0x40
+    STATUS_TERMINATE_CHARGE_ALARM = 0x4000
+    STATUS_OVER_CHARGED_ALARM = 0x8000
+    # TERMINATE_CHARGE_ALARM and OVER_CHARGED_ALARM are alarms that shows up during normal use.
+    # Other alarms should not appear during testing.
+    STATUS_ALARM_MASK = (0xFF00 & ~STATUS_TERMINATE_CHARGE_ALARM
+                         & ~STATUS_OVER_CHARGED_ALARM)
+
     def initialize(self, host, cmdline_args):
         super(firmware_ECChargingState, self).initialize(host, cmdline_args)
         if not self.check_ec_capability(['battery', 'charging']):
             raise error.TestNAError("Nothing needs to be tested on this DUT")
-        if self.servo.get_servo_version() != 'servo_v4_with_ccd_cr50':
-            raise error.TestNAError("This test can only be run with servo-v4 "
-                    "+ CCD. If you don't have a Type-C servo-v4, please run "
-                    "the test manually.")
+        if not self.servo.is_servo_v4_type_c():
+            raise error.TestNAError(
+                    "This test can only be run with servo-v4 Type-C.")
         if host.is_ac_connected() != True:
             raise error.TestFail("This test must be run with AC power.")
         self.switcher.setup_mode('normal')
@@ -49,8 +65,9 @@
 
     def check_ac_state(self):
         """Check if AC is plugged."""
-        ac_state = int(self.ec.send_command_get_output("chgstate",
-            ["ac\s*=\s*(0|1)\s*"])[0][1])
+        ac_state = int(
+                self.ec.send_command_get_output("chgstate",
+                                                ["ac\s*=\s*(0|1)\s*"])[0][1])
         if ac_state == 1:
             return 'on'
         elif ac_state == 0:
@@ -58,11 +75,84 @@
         else:
             return 'unknown'
 
-    def get_battery_level(self):
-        """Get battery charge percentage."""
-        batt_level = int(self.ec.send_command_get_output("battery",
-                ["Charge:\s+(\d+)\s+"])[0][1])
-        return batt_level
+    def _retry_send_cmd(self, command, regex_list):
+        """Send an EC command, and retry if it fails."""
+        retries = 3
+        while retries > 0:
+            retries -= 1
+            try:
+                return self.ec.send_command_get_output(command, regex_list)
+            except (servo.UnresponsiveConsoleError,
+                    servo.ResponsiveConsoleError, expat.ExpatError) as e:
+                if retries <= 0:
+                    raise
+                logging.warning('Failed to send EC cmd. %s', e)
+
+    def _get_battery_info(self):
+        """Return information about the battery in a dict."""
+        match = self._retry_send_cmd("battery", [
+                r"Status:\s*(0x[0-9a-f]+)\s",
+                r"Param flags:\s*([0-9a-f]+)\s",
+                r"Charge:\s+(\d+)\s+",
+        ])
+        status = int(match[0][1], 16)
+        params = int(match[1][1], 16)
+        level = int(match[2][1])
+
+        result = {
+                "status": status,
+                "flags": params,
+                "level": level,
+        }
+
+        if status & self.STATUS_ALARM_MASK != 0:
+            raise error.TestFail("Battery should not throw alarms: %s" %
+                                 result)
+
+        # The battery may raise a TERMINATE_CHARGE alarm transiently as
+        # it becomes fully charged. Exempt that case, but catch cases where
+        # it's yelling to stop for something like invalid charge parameters.
+        if (status & \
+            (self.STATUS_TERMINATE_CHARGE_ALARM | \
+             self.STATUS_FULLY_CHARGED)) == \
+            self.STATUS_TERMINATE_CHARGE_ALARM:
+            raise error.TestFail(
+                    "Battery raising TERMINATE_CHARGE alarm non-full: %s" %
+                    result)
+        return result
+
+    def _check_kernel_battery_state(
+            self,
+            sysfs_battery_state,
+            ec_battery_info,
+    ):
+        if sysfs_battery_state == 'Charging':
+            # Charging is just not-discharging. There is no ec battery status
+            # for charging.
+            if ec_battery_info['status'] & self.STATUS_DISCHARGING != 0:
+                raise error.TestFail(
+                        'Kernel reports battery %s, but actual state is %s',
+                        sysfs_battery_state, ec_battery_info)
+        elif sysfs_battery_state == 'Fully charged':
+            # Powerd has it's own creative way of determining full, it doesn't
+            # use the status from the EC. So we will consider it acceptable if
+            # the battery level is actually full, or above
+            if (
+                    ec_battery_info['status'] & self.STATUS_FULLY_CHARGED == 0
+                    and ec_battery_info['level'] < self.FULL_BATTERY_PERCENT):
+                raise error.TestFail(
+                        'Kernel reports battery %s, but actual state is %s',
+                        sysfs_battery_state, ec_battery_info)
+        elif (sysfs_battery_state == 'Not charging'
+              or sysfs_battery_state == 'Discharging'):
+            if ec_battery_info['status'] & self.STATUS_DISCHARGING == 0:
+                raise error.TestFail(
+                        'Kernel reports battery %s, but actual state is %s',
+                        sysfs_battery_state, ec_battery_info)
+        else:
+            raise error.TestFail(
+                    'Kernel reports battery %s, but actual state is %s',
+                    sysfs_battery_state, ec_battery_info)
 
     def run_once(self, host):
         """Execute the main body of the test."""
@@ -87,10 +177,12 @@
         self.servo.power_normal_press()
         self.switcher.wait_for_client()
 
-        batt_state = host.get_battery_state()
-        if batt_state != 'Discharging':
-            raise error.TestFail("Wrong battery state. Expected: "
-                    "Discharging, got: %s." % batt_state)
+        battery = self._get_battery_info()
+        sysfs_battery_state = host.get_battery_state()
+        if battery['status'] & self.STATUS_DISCHARGING == 0:
+            raise error.TestFail("Wrong battery status. Expected: "
+                                 "Discharging, got: %s." % battery)
+        self._check_kernel_battery_state(sysfs_battery_state, battery)
 
         logging.info("Suspend, plug AC, and then wake up the device.")
         self.suspend()
@@ -105,27 +197,33 @@
         self.servo.power_normal_press()
         self.switcher.wait_for_client()
 
-        batt_state = host.get_battery_state()
-        if batt_state != 'Charging' and batt_state != 'Fully charged':
+        battery = self._get_battery_info()
+        sysfs_battery_state = host.get_battery_state()
+        if (battery['status'] & self.STATUS_FULLY_CHARGED == 0
+                    and battery['status'] & self.STATUS_DISCHARGING != 0):
             raise error.TestFail("Wrong battery state. Expected: "
-                    "Charging/Fully charged, got: %s." % batt_state)
-
+                                 "Charging/Fully charged, got: %s." % battery)
+        self._check_kernel_battery_state(host.get_battery_state(), battery)
         logging.info("Keep charging until the battery reports fully charged.")
         deadline = time.time() + self.FULL_CHARGE_TIMEOUT
         while time.time() < deadline:
-            batt_state = host.get_battery_state()
-            if batt_state == 'Fully charged':
+            battery = self._get_battery_info()
+            if battery['status'] & self.STATUS_FULLY_CHARGED != 0:
                 logging.info("The battery reports fully charged.")
+                self._check_kernel_battery_state(host.get_battery_state(),
+                                                 battery)
                 return
-            elif batt_state == 'Charging':
-                logging.info("Wait for the battery to be fully charged. "
-                        "The current battery level is %d%%.",
-                        self.get_battery_level())
+            elif battery['status'] & self.STATUS_DISCHARGING == 0:
+                logging.info(
+                        "Wait for the battery to be fully charged. "
+                        "The current battery level is %d%%.", battery['level'])
             else:
-                raise error.TestFail("The battery state is %s. "
-                        "Is AC unplugged?", batt_state)
+                raise error.TestFail("Wrong battery state. Expected: "
+                                     "Charging/Fully charged, got: %s." %
+                                     battery)
             time.sleep(self.CHECK_BATT_STATE_WAIT)
 
-        raise error.TestFail("The battery does not report fully charged "
-                "before timeout is reached. The final battery level is %d%%.",
-                self.get_battery_level())
+        raise error.TestFail(
+                "The battery does not report fully charged "
+                "before timeout is reached. The final battery "
+                "level is %d%%.", battery['level'])
diff --git a/server/site_tests/firmware_ECHash/control b/server/site_tests/firmware_ECHash/control
index 89977ed..796b65d 100644
--- a/server/site_tests/firmware_ECHash/control
+++ b/server/site_tests/firmware_ECHash/control
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ECHash"
 PURPOSE = "Servo based EC hash recompute test"
 CRITERIA = "This test will fail if EC failed to recompute its hash."
-ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec3po, suite:faft_ec_tot"
+ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec_tot"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
 DEPENDENCIES = "ec:cros, servo_state:WORKING"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test ensures that the AP will ask the EC to recompute the hash if
diff --git a/server/site_tests/firmware_ECHash/firmware_ECHash.py b/server/site_tests/firmware_ECHash/firmware_ECHash.py
index bebfadd..a69e0e8 100644
--- a/server/site_tests/firmware_ECHash/firmware_ECHash.py
+++ b/server/site_tests/firmware_ECHash/firmware_ECHash.py
@@ -23,6 +23,9 @@
 
     def initialize(self, host, cmdline_args):
         super(firmware_ECHash, self).initialize(host, cmdline_args)
+        if self._no_ec_sync:
+            raise error.TestNAError(
+                    "User selected to disable EC software sync")
         self.backup_firmware()
         self.switcher.setup_mode('normal')
         self.setup_usbkey(usbkey=False)
diff --git a/server/site_tests/firmware_ECKeyboard/control b/server/site_tests/firmware_ECKeyboard/control
index c69d112..80909cf 100644
--- a/server/site_tests/firmware_ECKeyboard/control
+++ b/server/site_tests/firmware_ECKeyboard/control
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ECKeyboard"
 PURPOSE = "Servo based EC keyboard test"
 CRITERIA = "This test will fail if EC keyboard misbehalved."
-ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec3po, suite:faft_ec_tot"
+ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec_tot"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
 DEPENDENCIES = "ec:cros, servo_state:WORKING"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test check if EC can correctly send keyboard event to host.
diff --git a/server/site_tests/firmware_ECKeyboard/firmware_ECKeyboard.py b/server/site_tests/firmware_ECKeyboard/firmware_ECKeyboard.py
index 5c52953..abc415b 100644
--- a/server/site_tests/firmware_ECKeyboard/firmware_ECKeyboard.py
+++ b/server/site_tests/firmware_ECKeyboard/firmware_ECKeyboard.py
@@ -3,8 +3,9 @@
 # found in the LICENSE file.
 
 import logging
-import time
+from threading import Timer
 
+from autotest_lib.client.bin.input import linux_input
 from autotest_lib.client.common_lib import error
 from autotest_lib.server.cros.faft.firmware_test import FirmwareTest
 
@@ -15,43 +16,60 @@
     """
     version = 1
 
-    # Delay between commands
-    CMD_DELAY = 1
+    # Delay to ensure client is ready to read the key press.
+    KEY_PRESS_DELAY = 2
 
-    # Delay to wait until developer console is open.
-    DEV_CONSOLE_DELAY = 2
+    # Map the to-be-tested keys to the expected linux keycodes.
+    TEST_KEY_MAP = {
+            '0': linux_input.KEY_0,
+            'b': linux_input.KEY_B,
+            'e': linux_input.KEY_E,
+            'o': linux_input.KEY_O,
+            'r': linux_input.KEY_R,
+            's': linux_input.KEY_S,
+            't': linux_input.KEY_T,
+            '<enter>': linux_input.KEY_ENTER,
+            '<ctrl_l>': linux_input.KEY_LEFTCTRL,
+            '<alt_l>': linux_input.KEY_LEFTALT
+    }
 
     def initialize(self, host, cmdline_args):
         super(firmware_ECKeyboard, self).initialize(host, cmdline_args)
         # Only run in normal mode
         self.switcher.setup_mode('normal')
 
-    def switch_tty2(self):
-        """Switch to tty2 console."""
-        self.ec.key_down('<ctrl_l>')
-        self.ec.key_down('<alt_l>')
-        self.ec.key_down('<f2>')
-        self.ec.key_up('<f2>')
-        self.ec.key_up('<alt_l>')
-        self.ec.key_up('<ctrl_l>')
-        time.sleep(self.DEV_CONSOLE_DELAY)
+    def cleanup(self):
+        self.faft_client.system.run_shell_command('start ui')
+        super(firmware_ECKeyboard, self).cleanup()
 
-    def reboot_by_keyboard(self):
-        """
-        Simulate key press sequence to log into console and then issue reboot
-        command.
-        """
-        self.switch_tty2()
-        self.ec.send_key_string('root<enter>')
-        time.sleep(self.CMD_DELAY)
-        self.ec.send_key_string('test0000<enter>')
-        time.sleep(self.CMD_DELAY)
-        self.ec.send_key_string('reboot<enter>')
+    def send_string(self, keys):
+        """Send a string over a servo"""
+        for key in keys:
+            self.servo.set_nocheck('arb_key_config', key)
+            self.servo.set_nocheck('arb_key', 'tab')
 
     def run_once(self):
         """Runs a single iteration of the test."""
         if not self.check_ec_capability(['keyboard']):
             raise error.TestNAError("Nothing needs to be tested on this device")
 
-        logging.info("Use key press simulation to issue reboot command.")
-        self.switcher.mode_aware_reboot('custom', self.reboot_by_keyboard)
+        test_keys = []
+        expected_keycodes = []
+
+        for key in self.TEST_KEY_MAP:
+            test_keys.append(key)
+            expected_keycodes.append(self.TEST_KEY_MAP[key])
+
+        # Stop UI so that key presses don't go to Chrome.
+        self.faft_client.system.run_shell_command('stop ui')
+
+        if self.servo.has_control('init_usb_keyboard'):
+            logging.debug('Turning off HID keyboard emulator.')
+            self.servo.set_nocheck('init_usb_keyboard', 'off')
+
+        Timer(self.KEY_PRESS_DELAY, lambda: self.send_string(test_keys)).start(
+        )
+        keys_matched = self.faft_client.system.check_keys(expected_keycodes)
+        logging.debug("Matched %d keys", keys_matched)
+        if (keys_matched < 0):
+            raise error.TestFail("Some test keys are not captured.")
diff --git a/server/site_tests/firmware_ECKeyboardReboot/control b/server/site_tests/firmware_ECKeyboardReboot/control
index b770296..66bc7c6 100644
--- a/server/site_tests/firmware_ECKeyboardReboot/control
+++ b/server/site_tests/firmware_ECKeyboardReboot/control
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ECKeyboardReboot"
 PURPOSE = "Servo based EC reboot test"
 CRITERIA = "This test will fail if failed to reboot via dut-control ec_uart_cmd:reboot."
-ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec3po, suite:faft_ec_tot"
+ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec_tot"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
 DEPENDENCIES = "ec:cros, servo_state:WORKING"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test runs dut-control ec_uart_cmd:reboot command.
diff --git a/server/site_tests/firmware_ECLidShutdown/control b/server/site_tests/firmware_ECLidShutdown/control
index 61f8925..9c1e982 100644
--- a/server/site_tests/firmware_ECLidShutdown/control
+++ b/server/site_tests/firmware_ECLidShutdown/control
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ECLidShutdown"
 PURPOSE = "Verify functionality of DUT with GBB_FLAG_DISABLE_LID_SHUTDOWN"
 CRITERIA = "This test will ensure disable lid shutdown GBB flag working"
-ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec3po, suite:faft_ec_tot"
+ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec_tot"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
 DEPENDENCIES = "ec:cros, servo_state:WORKING"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test checks the functionality of the disable lid shutdown GBB flag.
diff --git a/server/site_tests/firmware_ECLidShutdown/firmware_ECLidShutdown.py b/server/site_tests/firmware_ECLidShutdown/firmware_ECLidShutdown.py
index 68fa591..ad7ff8e 100644
--- a/server/site_tests/firmware_ECLidShutdown/firmware_ECLidShutdown.py
+++ b/server/site_tests/firmware_ECLidShutdown/firmware_ECLidShutdown.py
@@ -37,7 +37,7 @@
             self._reset_ec_regexp()
             logging.info('The screen should turn back on now, during cleanup.')
             self.servo.set_nocheck('lid_open', 'yes')
-            time.sleep(self.LID_POWER_STATE_DELAY)
+            time.sleep(self.faft_config.firmware_screen)
             if self.servo.get('lid_open') != 'yes':
                 raise error.TestFail('The device did not stay in a mechanical'
                                      'on state after a lid open.')
diff --git a/server/site_tests/firmware_ECLidSwitch/control b/server/site_tests/firmware_ECLidSwitch/control
index c2077ee..3f8f711 100644
--- a/server/site_tests/firmware_ECLidSwitch/control
+++ b/server/site_tests/firmware_ECLidSwitch/control
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ECLidSwitch"
 PURPOSE = "Servo based EC lid switch functional test"
 CRITERIA = "This test will fail if EC lid switch misbehalved."
-ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec3po, suite:faft_ec_tot"
+ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec_tot"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
 DEPENDENCIES = "ec:cros, servo_state:WORKING"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test check the functionality of EC lid switch handling. This includes:
diff --git a/server/site_tests/firmware_ECLidSwitch/firmware_ECLidSwitch.py b/server/site_tests/firmware_ECLidSwitch/firmware_ECLidSwitch.py
index 7850f60..48f4f97 100644
--- a/server/site_tests/firmware_ECLidSwitch/firmware_ECLidSwitch.py
+++ b/server/site_tests/firmware_ECLidSwitch/firmware_ECLidSwitch.py
@@ -35,11 +35,23 @@
     # Delay between shutdown and wakeup by lid switch
     WAKE_DELAY = 10
 
+    # Number of tries when checking power state
+    POWER_STATE_CHECK_TRIES = 50
+
+    # Delay between checking power state
+    POWER_STATE_CHECK_DELAY = 0.5
+
     def initialize(self, host, cmdline_args):
         super(firmware_ECLidSwitch, self).initialize(host, cmdline_args)
         # Only run in normal mode
         self.switcher.setup_mode('normal')
 
+    def cleanup(self):
+        self.faft_client.system.run_shell_command_get_status(
+                "rm -rf /tmp/power_manager")
+
+        return super().cleanup()
+
     def _open_lid(self):
         """Open lid by servo."""
         self.servo.set('lid_open', 'yes')
@@ -66,28 +78,56 @@
 
     def delayed_wake(self):
         """
-        Confirm the device is in G3, wait for WAKE_DELAY, and then wake DUT
-        with lid switch.
+        Wait for WAKE_DELAY, and then wake DUT with lid switch.
         """
-        self.check_shutdown_power_state(self.POWER_STATE_G3, pwr_retries=10)
         time.sleep(self.WAKE_DELAY)
         self._wake_by_lid_switch()
 
     def immediate_wake(self):
-        """Confirm the device is in G3 and then wake DUT with lid switch."""
-        self.check_shutdown_power_state(self.POWER_STATE_G3, pwr_retries=10)
+        """Wake DUT with lid switch."""
         self._wake_by_lid_switch()
 
+    def shutdown_cmd(self):
+        """Shut down the DUT but don't wait for ping failures."""
+        self.run_shutdown_cmd(wait_for_offline=False)
+
     def shutdown_and_wake(self, shutdown_func, wake_func):
-        """Software shutdown and wake.
+        """Software shutdown and wake with check for power state
 
         Args:
           shutdown_func: Function to shut down DUT.
           wake_func: Delayed function to wake DUT.
         """
+
+        # Call shutdown function to power down device
+        logging.debug('calling shutdown_func')
         shutdown_func()
+
+        # Check device shutdown to correct power state
+        shutdown_power_states = '|'.join(
+                [self.POWER_STATE_S5, self.POWER_STATE_G3])
+        if not self.wait_power_state(shutdown_power_states,
+                                     self.POWER_STATE_CHECK_TRIES,
+                                     self.POWER_STATE_CHECK_DELAY):
+            raise error.TestFail(
+                    'The device failed to reach %s after calling shutdown function.',
+                    shutdown_power_states)
+
+        # Call wake function to wake up device
+        logging.debug('calling wake_func')
         wake_func()
 
+        # Check power state to verify device woke up to S0
+        wake_power_state = self.POWER_STATE_S0
+        if not self.wait_power_state(wake_power_state,
+                                     self.POWER_STATE_CHECK_TRIES,
+                                     self.POWER_STATE_CHECK_DELAY):
+            raise error.TestFail(
+                    'The device failed to reach %s after calling wake function.',
+                    wake_power_state)
+        # Wait for the DUT to boot and respond to ssh before we move on.
+        self.switcher.wait_for_client()
+
     def _get_keyboard_backlight(self):
         """Get keyboard backlight brightness.
 
@@ -168,8 +208,12 @@
         if lid switch event controls keycode and backlight as we expected.
         """
         ok = True
-        logging.info("Stopping powerd")
-        self.faft_client.system.run_shell_command('stop powerd')
+        logging.info("Disable use_lid in powerd")
+        self.faft_client.system.run_shell_command(
+                "mkdir -p /tmp/power_manager && "
+                "echo 0 > /tmp/power_manager/use_lid && "
+                "mount --bind /tmp/power_manager /var/lib/power_manager && "
+                "restart powerd")
         if not self.check_keycode():
             logging.error("check_keycode failed.")
             ok = False
@@ -177,7 +221,8 @@
             logging.error("check_backlight failed.")
             ok = False
         logging.info("Restarting powerd")
-        self.faft_client.system.run_shell_command('start powerd')
+        self.faft_client.system.run_shell_command(
+                'umount /var/lib/power_manager && restart powerd')
         return ok
 
     def run_once(self):
@@ -186,22 +231,16 @@
             raise error.TestNAError("Nothing needs to be tested on this device")
 
         logging.info("Shut down and then wake up DUT after a delay.")
-        self.switcher.mode_aware_reboot(
-                'custom',
-                lambda:self.shutdown_and_wake(
-                        shutdown_func=self.run_shutdown_cmd,
-                        wake_func=self.delayed_wake))
+        self.shutdown_and_wake(shutdown_func=self.shutdown_cmd,
+                               wake_func=self.delayed_wake)
+
         logging.info("Shut down and then wake up DUT immediately.")
-        self.switcher.mode_aware_reboot(
-                'custom',
-                lambda:self.shutdown_and_wake(
-                        shutdown_func=self.run_shutdown_cmd,
-                        wake_func=self.immediate_wake))
+        self.shutdown_and_wake(shutdown_func=self.shutdown_cmd,
+                               wake_func=self.immediate_wake)
+
         logging.info("Close and then open the lid when not logged in.")
-        self.switcher.mode_aware_reboot(
-                'custom',
-                lambda:self.shutdown_and_wake(
-                        shutdown_func=self._close_lid,
-                        wake_func=self.immediate_wake))
+        self.shutdown_and_wake(shutdown_func=self._close_lid,
+                               wake_func=self.immediate_wake)
+
         logging.info("Check keycode and backlight.")
         self.check_state(self.check_keycode_and_backlight)
diff --git a/server/site_tests/firmware_ECPowerButton/control b/server/site_tests/firmware_ECPowerButton/control
index 425ed6e..5ec4aa1 100644
--- a/server/site_tests/firmware_ECPowerButton/control
+++ b/server/site_tests/firmware_ECPowerButton/control
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ECPowerButton"
 PURPOSE = "Servo based EC power button functional test"
 CRITERIA = "This test will fail if EC power button misbehalved."
-ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec3po, suite:faft_ec_tot, suite:bvt-faft, suite:labqual"
+ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec_tot, suite:labqual, suite:faft_cr50_pvt, suite:faft_cr50_prepvt"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
 DEPENDENCIES = "ec:cros, servo_state:WORKING"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test check the functionality of EC power button handling. This includes:
diff --git a/server/site_tests/firmware_ECPowerButton/firmware_ECPowerButton.py b/server/site_tests/firmware_ECPowerButton/firmware_ECPowerButton.py
index c3adec8..7c9323a 100644
--- a/server/site_tests/firmware_ECPowerButton/firmware_ECPowerButton.py
+++ b/server/site_tests/firmware_ECPowerButton/firmware_ECPowerButton.py
@@ -16,25 +16,28 @@
     """
     version = 1
 
-    # Delay between shutdown and wake by power button
-    LONG_WAKE_DELAY = 13
-    SHORT_WAKE_DELAY = 7
-
     # Delay between recovery screen and shutdown by power button
     RECOVERY_SCREEN_SHUTDOWN_DELAY = 3
 
-    # Duration of holding down power button to shut down with powerd
-    POWER_BUTTON_POWERD_DURATION = 6
-
     # Duration of holding down power button to test ignoring power button press
     POWER_BUTTON_IGNORE_PRESS_DURATION = 0.2
 
     # Delay after pressing power button to check power state
     POWER_BUTTON_IGNORE_PRESS_DELAY = 10
 
+    # Number of tries when checking power state
+    POWER_STATE_CHECK_TRIES = 20
+
+    # After the device has reached the wanted shutdown power states (S5 or G3),
+    # wait for a short time before executing a power button wakeup.
+    SHUTDOWN_STABLE_DELAY = 1
+
     def initialize(self, host, cmdline_args):
         super(firmware_ECPowerButton, self).initialize(host, cmdline_args)
 
+        # Duration of holding down power button to shut down with powerd
+        self.POWER_BUTTON_POWERD_DURATION = (
+                self.faft_config.hold_pwr_button_poweroff)
         # Duration of holding down power button to shut down without powerd
         self.POWER_BUTTON_NO_POWERD_DURATION = max(
                 self.faft_config.hold_pwr_button_nopowerd_shutdown, 11)
@@ -46,7 +49,8 @@
                 self.faft_config.hold_pwr_button_poweron, 1)
         # Only run in normal mode
         self.switcher.setup_mode('normal')
-        self.has_internal_display = host.has_internal_display()
+        self.has_display = host.has_internal_display() or \
+                host.has_external_display()
 
     def kill_powerd(self):
         """Stop powerd on client."""
@@ -64,52 +68,67 @@
         Timer(3, self.servo.power_key, [0.001]).start()
         return self.faft_client.system.check_keys([116])
 
-    def shutdown_and_wake(self,
-                          shutdown_powerkey_duration,
-                          wake_delay,
+    def shutdown_and_wake(self, shutdown_powerkey_duration, power_state,
                           wake_powerkey_duration):
         """
-        Shutdown the system by power button, delay, and then power on
-        by power button again.
+        Shutdown the system by power button, delay, wait for requested power
+        states and then power on by power button again.
         """
+
+        # Shutdown the system by pressing the power button
         self.servo.power_key(shutdown_powerkey_duration)
 
+        # Wait for the system to enter the requested power mode
+        if not self.wait_power_state(power_state,
+                                     self.POWER_STATE_CHECK_TRIES):
+            raise error.TestFail('The device failed to reach %s.' %
+                                 power_state)
+
+        # Add a delay to confirm the system is stabily shut down
+        time.sleep(self.SHUTDOWN_STABLE_DELAY)
+
         # Send a new line to wakeup EC from deepsleep,
         # it can happen if the EC console is not used for some time.
-        # Offset the wake_delay time by the delay (if any) waiting for the AP
-        # to start, so that the wake_delay time is the time to wait after the
-        # AP is actually up and running.
-        wake_delay += self.faft_config.delay_powerinfo_stable
-        if wake_delay > 2:
-            Timer(wake_delay - 1, self.ec.send_command, [""]).start()
+        self.ec.send_command("")
 
-        Timer(wake_delay,
-              self.servo.power_key,
-              [wake_powerkey_duration]).start()
+        # Power on the system by pressing the power button
+        self.servo.power_key(wake_powerkey_duration)
+
+        # Some platforms undergo extra power state transitions during power-on.
+        # We need to wait for longer time for the power state to be stable.
+        time.sleep(self.faft_config.delay_powerinfo_stable)
 
     def run_once(self):
         """Runs a single iteration of the test."""
         if not self.check_ec_capability():
             raise error.TestNAError("Nothing needs to be tested on this device")
 
-        logging.info("Boot to recovery screen.")
-        self.switcher.enable_rec_mode_and_reboot(usb_state='host')
-        time.sleep(self.faft_config.firmware_screen)
-        if self.get_power_state() != self.POWER_STATE_S0:
-            raise error.TestFail("DUT didn't boot to recovery screen")
+        # Ensure that detachable is in OFF State for following test
+        if self.faft_config.is_detachable:
+            # Skip this test step for detachable
+            # Setting Power State to off for entry to next step
+            logging.info("Setting Power Off")
+            self.servo.get_power_state_controller().power_off()
+        else:
+            # Run these test steps for non detachable devices
+            logging.info("Boot to recovery screen.")
+            self.switcher.enable_rec_mode_and_reboot(usb_state='host')
+            time.sleep(self.faft_config.firmware_screen)
+            if self.get_power_state() != self.POWER_STATE_S0:
+                raise error.TestFail("DUT didn't boot to recovery screen")
 
-        logging.info("Shutdown by short power button press.")
-        self.servo.power_key(self.faft_config.hold_pwr_button_poweron)
-        time.sleep(self.RECOVERY_SCREEN_SHUTDOWN_DELAY)
-        power_state = self.get_power_state()
-        if (power_state != self.POWER_STATE_S5 and
-            power_state != self.POWER_STATE_G3):
-            raise error.TestFail("DUT didn't shutdown by "
-                                 "short power button press")
-        if self.ec.check_feature('EC_FEATURE_EFS2'):
-            logging.info("Check if EC jumped to RW.")
-            if not self.ec.check_ro_rw('RW'):
-                raise error.TestFail("EC didn't jump to RW")
+            logging.info("Shutdown by short power button press.")
+            self.servo.power_key(self.faft_config.hold_pwr_button_poweron)
+            time.sleep(self.RECOVERY_SCREEN_SHUTDOWN_DELAY)
+            power_state = self.get_power_state()
+            if (power_state != self.POWER_STATE_S5
+                        and power_state != self.POWER_STATE_G3):
+                raise error.TestFail("DUT didn't shutdown by "
+                                     "short power button press")
+            if self.ec.check_feature('EC_FEATURE_EFS2'):
+                logging.info("Check if EC jumped to RW.")
+                if not self.ec.check_ro_rw('RW'):
+                    raise error.TestFail("EC didn't jump to RW")
 
         logging.info("Boot by short power button press.")
         self.servo.power_key(self.faft_config.hold_pwr_button_poweron)
@@ -117,7 +136,7 @@
         if self.get_power_state() != self.POWER_STATE_S0:
             raise error.TestFail("DUT didn't boot by short power button press")
 
-        if self.has_internal_display:
+        if self.has_display:
             logging.info("Display connected, check system ignores short 200ms "
                          "power button press.")
             old_boot_id = self.get_bootid(retry=1)
@@ -147,42 +166,39 @@
                 self._reset_client()
                 raise error.TestFail("DUT didn't boot by short power button press")
 
-        logging.info("Shutdown when powerd is still running and wake from S5 "
-                     "with short power button press.")
-        if self.servo.is_localhost() and self.has_internal_display:
+        logging.info(
+                "Shutdown when powerd is still running and wake from S5/G3 "
+                "with short power button press.")
+        if self.servo.is_localhost() and self.has_display:
             self.check_state(self.debounce_power_button)
         self.switcher.mode_aware_reboot(
-                'custom',
-                lambda:self.shutdown_and_wake(
+                'custom', lambda: self.shutdown_and_wake(
                         self.POWER_BUTTON_POWERD_DURATION,
-                        self.SHORT_WAKE_DELAY,
+                        self.POWER_STATE_S5 + '|' + self.POWER_STATE_G3,
                         self.POWER_BUTTON_SHORT_POWER_ON_DURATION))
 
         logging.info("Shutdown when powerd is stopped and wake from G3 "
                           "with short power button press.")
         self.kill_powerd()
         self.switcher.mode_aware_reboot(
-                'custom',
-                lambda:self.shutdown_and_wake(
-                        self.POWER_BUTTON_NO_POWERD_DURATION,
-                        self.LONG_WAKE_DELAY,
+                'custom', lambda: self.shutdown_and_wake(
+                        self.POWER_BUTTON_NO_POWERD_DURATION, self.
+                        POWER_STATE_G3,
                         self.POWER_BUTTON_SHORT_POWER_ON_DURATION))
 
         logging.info("Shutdown when powerd is still running and wake from G3 "
                      "with long power button press.")
         self.switcher.mode_aware_reboot(
-                'custom',
-                lambda:self.shutdown_and_wake(
+                'custom', lambda: self.shutdown_and_wake(
                         self.POWER_BUTTON_POWERD_DURATION,
-                        self.LONG_WAKE_DELAY,
+                        self.POWER_STATE_G3,
                         self.POWER_BUTTON_LONG_POWER_ON_DURATION))
 
-        logging.info("Shutdown when powerd is stopped and wake from S5 "
+        logging.info("Shutdown when powerd is stopped and wake from S5/G3 "
                      "with long power button press.")
         self.kill_powerd()
         self.switcher.mode_aware_reboot(
-                'custom',
-                lambda:self.shutdown_and_wake(
+                'custom', lambda: self.shutdown_and_wake(
                         self.POWER_BUTTON_NO_POWERD_DURATION,
-                        self.SHORT_WAKE_DELAY,
+                        self.POWER_STATE_S5 + '|' + self.POWER_STATE_G3,
                         self.POWER_BUTTON_LONG_POWER_ON_DURATION))
diff --git a/server/site_tests/firmware_ECPowerG3/control b/server/site_tests/firmware_ECPowerG3/control
index 68cf6cc..492ce77 100644
--- a/server/site_tests/firmware_ECPowerG3/control
+++ b/server/site_tests/firmware_ECPowerG3/control
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ECPowerG3"
 PURPOSE = "Servo based EC X86 power G3 drop test"
 CRITERIA = "This test will fail if EC fails to drop to G3 correctly."
-ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec3po, suite:faft_ec_tot"
+ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec_tot"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
 DEPENDENCIES = "ec:cros, servo_state:WORKING"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test check EC drop X86 into G3 after S5 for 10 seconds.
diff --git a/server/site_tests/firmware_ECRestoreFW/control b/server/site_tests/firmware_ECRestoreFW/control
index 13a47ab..0b10e3c 100644
--- a/server/site_tests/firmware_ECRestoreFW/control
+++ b/server/site_tests/firmware_ECRestoreFW/control
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ECRestoreFW"
 PURPOSE = "Verify the FW restoration capability even from a bad state."
 ATTRIBUTES = ""
@@ -12,13 +12,15 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-DEPENDENCIES = "ec:cros"
+JOB_RETRIES = 0
+DEPENDENCIES = "ec:cros, servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This test flashes a wrong EC RW firmware and checks FW gets restored.
 
-If you want to have any specific fake board name (e.g. coral) to test against,
-add --args="board_as=coral" to test_that command line.
+If you want to use a local tarball file to test,
+add --args="local_tarball=<path>" to test_that command line.
 """
 
 if 'args_dict' not in locals():
diff --git a/server/site_tests/firmware_ECRestoreFW/firmware_ECRestoreFW.py b/server/site_tests/firmware_ECRestoreFW/firmware_ECRestoreFW.py
index b4e3aba..7790e8c 100644
--- a/server/site_tests/firmware_ECRestoreFW/firmware_ECRestoreFW.py
+++ b/server/site_tests/firmware_ECRestoreFW/firmware_ECRestoreFW.py
@@ -16,11 +16,6 @@
 
     version = 1
 
-    # A set of fake board candidates per ec type.
-    FAKE_BOARD_DICT = {'npcx':['coral', 'reef'],
-                       'stm32':['samus', 'nami'],
-                       'it83':['dragonegg', 'waddledee']}
-
     def initialize(self, host, cmdline_args, full_args):
         """Initialize the test and pick a fake board to use for corruption. """
         super(firmware_ECRestoreFW, self).initialize(host, cmdline_args,
@@ -30,33 +25,33 @@
         if not self.check_ec_capability():
             raise error.TestNAError('Nothing needs to be tested on this device')
 
-        self.board_as = None
-        # find if "board_as" was given in the command line arguments.
+        self.local_tarball = None
+        self.build = None
+        # find if "local_tarball" was given in the command line arguments.
         for arg in cmdline_args:
-            match = re.search(r'^board_as=(.+)', arg)
+            match = re.search(r'^local_tarball=(.+)', arg)
             if match:
-                self.board_as = match.group(1)
+                self.local_tarball = match.group(1)
+                logging.info('Use local tarball %s', self.local_tarball)
                 break
         else:
-            # if "board_as" was not given, then pick one from FAKE_BOARD_DICT.
-            ec_chip = self.servo.get('ec_chip')
-            if 'stm32' in ec_chip:
-                ec_type = 'stm32'
-            elif 'it83' in ec_chip:
-                ec_type = 'it83'
-            else:
-                ec_type = 'npcx'
+            # Get the latest firmware release from the server.
+            # Even this test uses a fake EC image, it needs to download
+            # the release to get some subsidiary binary (like npcx_monitor.bin).
+            platform = self.faft_config.platform
 
-            for board in self.FAKE_BOARD_DICT[ec_type]:
-                if board not in self.faft_config.platform:
-                    self.board_as = board
-                    break
+            # Get the parent (a.k.a. reference board or baseboard), and hand it
+            # to get_latest_release_version so that it can use it in search as
+            # secondary candidate. For example, bob doesn't have its own release
+            # directory, but its parent, gru does.
+            parent = getattr(self.faft_config, 'parent', None)
 
-        if not self.board_as:
-            raise error.TestError('fake board is not selected.')
+            self.build = host.get_latest_release_version(platform, parent)
 
-        logging.info('A fake board to use for corruption: %s', self.board_as)
-
+            if not self.build:
+                raise error.TestError(
+                        'Cannot locate the latest release for %s' % platform)
+            logging.info('Will use the build %s', self.build)
         self.backup_firmware()
 
     def cleanup(self):
@@ -80,16 +75,13 @@
           host:  a CrosHost object of the machine to update.
         """
 
-        logging.info('Downloading a firmware of %s', self.board_as)
-        value = host.get_latest_release_version(self.board_as)
-        if not value:
-            raise error.TestError('Cannot locate the latest release for %s' %
-                                  self.board_as)
-
         try:
-            host.firmware_install(build=value, dest=self.resultsdir,
-                                  install_ec=True, install_bios=False,
-                                  board_as=self.board_as)
+            host.firmware_install(build=self.build,
+                                  dest=self.resultsdir,
+                                  local_tarball=self.local_tarball,
+                                  install_ec=True,
+                                  install_bios=False,
+                                  corrupt_ec=True)
         except error.TestError as e:
             # It failed before the test attempts to install firmware.
             # It could be either devserver timeout or servo device error.
diff --git a/server/site_tests/firmware_ECSharedMem/control b/server/site_tests/firmware_ECSharedMem/control
index 564d60c..e04bb33 100644
--- a/server/site_tests/firmware_ECSharedMem/control
+++ b/server/site_tests/firmware_ECSharedMem/control
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ECSharedMem"
 PURPOSE = "Servo based EC shared memory test"
 CRITERIA = "This test will fail if EC shared memory misbehaved."
-ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec3po, suite:faft_ec_tot"
+ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec_tot"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
 DEPENDENCIES = "ec:cros, servo_state:WORKING"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test checks EC has enough shared memory under following conditions:
diff --git a/server/site_tests/firmware_ECSharedMem/firmware_ECSharedMem.py b/server/site_tests/firmware_ECSharedMem/firmware_ECSharedMem.py
index ed29906..4f2e7e7 100644
--- a/server/site_tests/firmware_ECSharedMem/firmware_ECSharedMem.py
+++ b/server/site_tests/firmware_ECSharedMem/firmware_ECSharedMem.py
@@ -37,7 +37,7 @@
         match = self.ec.send_command_get_output("shmem",
                                                 ["Size:\s*([0-9-]+)\r"])[0]
         shmem_size = int(match[1])
-        logging.info("EC shared memory size if %d bytes", shmem_size)
+        logging.info("EC shared memory size is %d bytes", shmem_size)
         if shmem_size <= 0:
             return False
         elif shmem_size <= 256:
@@ -45,10 +45,20 @@
         return True
 
     def jump_checker(self):
-        """Check for available EC shared memory after jumping to RW image.
+        """Check for available EC shared memory after jumping to RW image, if
+        necessary.
+
+        Does not jump to RW if the EC is already in RW or RW_B.
         """
-        self.ec.send_command("sysjump RW")
-        time.sleep(self.faft_config.ec_boot_to_console)
+        ec_image = self.servo.get_ec_active_copy()
+        # If we are not currently in RW, switch there first before testing.
+        if ec_image != 'RW' and ec_image != 'RW_B':
+            self.ec.send_command("sysjump RW")
+            time.sleep(self.faft_config.ec_boot_to_console)
+            ec_image = self.servo.get_ec_active_copy()
+            if ec_image != 'RW':
+                raise error.TestFail('Expected EC to be in RW, but was ' +
+                                     ec_image)
         return self.shared_mem_checker()
 
     def run_once(self):
diff --git a/server/site_tests/firmware_ECSystemLocked/control b/server/site_tests/firmware_ECSystemLocked/control
new file mode 100644
index 0000000..18c7184
--- /dev/null
+++ b/server/site_tests/firmware_ECSystemLocked/control
@@ -0,0 +1,33 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "ChromeOS Team"
+NAME = "firmware_ECSystemLocked"
+PURPOSE = "Ensure that CONFIG_SYSTEM_UNLOCKED is unset."
+CRITERIA = "This test will fail if CONFIG_SYSTEM_UNLOCKED is set."
+ATTRIBUTES = "suite:faft_ec_fw_qual"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "firmware"
+TEST_TYPE = "server"
+JOB_RETRIES = 0
+DEPENDENCIES = "ec:cros, servo_state:WORKING"
+PY_VERSION = 3
+
+DOC = """
+This test ensures that the EC can be locked by trying to lock it
+then querying its lock state through the 'sysinfo' command.
+"""
+
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run_ecsystemlocked(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    job.run_test("firmware_ECSystemLocked", host=host, cmdline_args=args,
+                 disable_sysinfo=True)
+
+parallel_simple(run_ecsystemlocked, machines)
diff --git a/server/site_tests/firmware_ECSystemLocked/firmware_ECSystemLocked.py b/server/site_tests/firmware_ECSystemLocked/firmware_ECSystemLocked.py
new file mode 100644
index 0000000..f145e89
--- /dev/null
+++ b/server/site_tests/firmware_ECSystemLocked/firmware_ECSystemLocked.py
@@ -0,0 +1,35 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.server.cros.faft.firmware_test import FirmwareTest
+
+
+class firmware_ECSystemLocked(FirmwareTest):
+    """
+    Ensure that CONFIG_SYSTEM_UNLOCKED is not set.
+    """
+    version = 1
+
+    def run_once(self):
+        """Runs a single iteration of the test."""
+        if not self.check_ec_capability():
+            raise error.TestNAError(
+                    "Nothing needs to be tested on this device")
+
+        self.set_ec_write_protect_and_reboot(True)
+
+        logging.info("Querying sysinfo.")
+        verdict = self.ec.send_command_get_output("sysinfo",
+                                                  ["Flags:\s+([^\s]+)\s*.*$"])
+
+        if len(verdict) > 0 and len(verdict[0]) > 1:
+            if verdict[0][1] != 'locked':
+                raise error.TestFail(
+                        "Device is not locked, sysinfo returned %s" %
+                        verdict[0][0])
+        else:
+            raise error.TestFail("Could not parse sysinfo")
diff --git a/server/site_tests/firmware_ECThermal/control b/server/site_tests/firmware_ECThermal/control
index cb9f6ca..9bda43f 100644
--- a/server/site_tests/firmware_ECThermal/control
+++ b/server/site_tests/firmware_ECThermal/control
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ECThermal"
 PURPOSE = "Servo based EC thermal engine test"
 CRITERIA = "This test will fail if EC thermal engine misbehalved."
@@ -12,8 +12,9 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
 DEPENDENCIES = "ec:cros, servo_state:WORKING"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test check the functionality of EC thermal engine. Host temperature
diff --git a/server/site_tests/firmware_ECThermal/firmware_ECThermal.py b/server/site_tests/firmware_ECThermal/firmware_ECThermal.py
index 521c7ad..fc8e8e3 100644
--- a/server/site_tests/firmware_ECThermal/firmware_ECThermal.py
+++ b/server/site_tests/firmware_ECThermal/firmware_ECThermal.py
@@ -4,11 +4,13 @@
 
 import logging
 import re
+import six
 import time
-import xmlrpclib
 
 from autotest_lib.client.common_lib import error
 from autotest_lib.server.cros.faft.firmware_test import FirmwareTest
+from functools import reduce
+
 
 class firmware_ECThermal(FirmwareTest):
     """
@@ -79,7 +81,7 @@
             try:
                 lines = self.faft_client.system.run_shell_command_get_output(
                         'ectool thermalget %d %d' % (type_id, current_id))
-            except xmlrpclib.Fault:
+            except six.moves.xmlrpc_client.Fault:
                 break
             pattern = re.compile('Threshold \d* [a-z ]* \d* is (\d*) K.')
             for line in lines:
@@ -105,7 +107,7 @@
             self._fan_steps.append(int(m[1]))
 
         # Get the actual value of each fan step
-        for i in xrange(num_steps + 1):
+        for i in range(num_steps + 1):
             if self._fan_steps[i] == 0:
                 continue
             self.servo.set_nocheck('fan_target_rpm', "%d" % self._fan_steps[i])
@@ -133,7 +135,7 @@
                 self.faft_client.system.run_shell_command('ectool temps %d' %
                                                    self._num_temp_sensor)
                 self._num_temp_sensor = self._num_temp_sensor + 1
-            except xmlrpclib.Fault:
+            except six.moves.xmlrpc_client.Fault:
                 break
         logging.info("Number of temperature sensor: %d", self._num_temp_sensor)
 
@@ -146,7 +148,7 @@
         self.ec.send_command("chan 0")
         try:
             self.faft_client.system.run_shell_command('stop temp_metrics')
-        except xmlrpclib.Fault:
+        except six.moves.xmlrpc_client.Fault:
             self._has_temp_metrics = False
         else:
             logging.info('Stopped temp_metrics')
@@ -202,7 +204,8 @@
           Temperature reading in degree C.
 
         Raises:
-          xmlrpclib.Fault: Raised when we fail to read temperature.
+          six.moves.xmlrpc_client.Fault: Raised when we fail to read
+          temperature.
           error.TestError: Raised if ectool doesn't behave as we expected.
         """
         assert sensor_id < self._num_temp_sensor
@@ -263,7 +266,7 @@
         pid_cmd = "ps -ef | grep '[d]d if=/dev/urandom' | awk '{print $2}'"
         block = False
         self._stress_pid = list()
-        for _ in xrange(threads):
+        for _ in range(threads):
             self.faft_client.system.run_shell_command(stress_cmd, block)
         lines = self.faft_client.system.run_shell_command_get_output(
                     pid_cmd)
diff --git a/server/site_tests/firmware_ECUpdateId/control b/server/site_tests/firmware_ECUpdateId/control
index ec7ba8c..e438b77 100644
--- a/server/site_tests/firmware_ECUpdateId/control
+++ b/server/site_tests/firmware_ECUpdateId/control
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ECUpdateId"
 PURPOSE = "Servo based EC test for updating EC ID for verifying EC EFS"
 CRITERIA = "This test will fail if EC EFS misbehaves"
@@ -13,8 +13,9 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
 DEPENDENCIES = "ec:cros, servo_state:WORKING"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test modifies the EC ID in AP firmware, reboots EC, and checks the next
diff --git a/server/site_tests/firmware_ECUpdateId/control.dev b/server/site_tests/firmware_ECUpdateId/control.dev
index d72eb39..b758abf 100644
--- a/server/site_tests/firmware_ECUpdateId/control.dev
+++ b/server/site_tests/firmware_ECUpdateId/control.dev
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ECUpdateId.dev"
 PURPOSE = "Servo based EC test for updating EC ID for verifying EC EFS"
 CRITERIA = "This test will fail if EC EFS misbehaves"
@@ -14,7 +14,7 @@
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
 DEPENDENCIES = "ec:cros, servo_state:WORKING"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test modifies the EC ID in AP firmware, reboots EC, and checks the next
diff --git a/server/site_tests/firmware_ECUpdateId/firmware_ECUpdateId.py b/server/site_tests/firmware_ECUpdateId/firmware_ECUpdateId.py
index 71e4fb1..66fd8a4 100644
--- a/server/site_tests/firmware_ECUpdateId/firmware_ECUpdateId.py
+++ b/server/site_tests/firmware_ECUpdateId/firmware_ECUpdateId.py
@@ -17,18 +17,22 @@
     version = 1
 
     def initialize(self, host, cmdline_args, dev_mode=False):
-        # If EC isn't write-protected, it won't do EFS. Should enable WP.
-        super(firmware_ECUpdateId, self).initialize(host, cmdline_args,
-                                                    ec_wp=True)
+        super(firmware_ECUpdateId, self).initialize(host, cmdline_args)
         # Don't bother if there is no Chrome EC or if the EC is non-EFS.
         if not self.check_ec_capability():
             raise error.TestNAError("Nothing needs to be tested on this device")
         if not self.faft_client.ec.is_efs():
             raise error.TestNAError("Nothing needs to be tested for non-EFS")
+        if self._no_ec_sync:
+            raise error.TestNAError(
+                    "User selected to disable EC software sync")
+        # If EC isn't write-protected, it won't do EFS. Should enable WP.
+        self._setup_ec_write_protect(True)
         # In order to test software sync, it must be enabled.
         self.clear_set_gbb_flags(vboot.GBB_FLAG_DISABLE_EC_SOFTWARE_SYNC, 0)
         self.backup_firmware()
-        self.switcher.setup_mode('dev' if dev_mode else 'normal')
+        self.switcher.setup_mode('dev' if dev_mode else 'normal',
+                                 allow_gbb_force=True)
         # It makes updater-related RPCs to use the active AP/EC firmware,
         # instead of the firmware in the shellball.
         self.setup_firmwareupdate_shellball()
@@ -96,15 +100,6 @@
         logging.info("Corrupt the EC section: %s", section)
         self.faft_client.ec.corrupt_body(section)
 
-    def wait_software_sync_and_boot(self):
-        """Wait for software sync to update EC."""
-        if self.dev_mode:
-            time.sleep(self.faft_config.software_sync_update +
-                       self.faft_config.firmware_screen)
-            self.servo.ctrl_d()
-        else:
-            time.sleep(self.faft_config.software_sync_update)
-
     def run_once(self):
         """Execute the main body of the test.
         """
@@ -118,7 +113,7 @@
 
         logging.info("Reboot EC. Verify if EFS works as intended.")
         self.sync_and_ec_reboot('hard')
-        self.wait_software_sync_and_boot()
+        time.sleep(self.faft_config.software_sync_update)
         self.switcher.wait_for_client()
 
         logging.info("Expect EC in another RW slot (the modified hash).")
diff --git a/server/site_tests/firmware_ECUsbPorts/control b/server/site_tests/firmware_ECUsbPorts/control
index b77fc2d..3eee7df 100644
--- a/server/site_tests/firmware_ECUsbPorts/control
+++ b/server/site_tests/firmware_ECUsbPorts/control
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ECUsbPorts"
 PURPOSE = "Servo based EC USB ports test"
 CRITERIA = "This test will fail if EC USB port control misbehalved."
-ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec3po, suite:faft_ec_tot"
+ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec_tot"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
 DEPENDENCIES = "ec:cros, servo_state:WORKING"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test check if EC can correctly controls USB port mode.
diff --git a/server/site_tests/firmware_ECUsbPorts/firmware_ECUsbPorts.py b/server/site_tests/firmware_ECUsbPorts/firmware_ECUsbPorts.py
index 66e034a..d7a6dc2 100644
--- a/server/site_tests/firmware_ECUsbPorts/firmware_ECUsbPorts.py
+++ b/server/site_tests/firmware_ECUsbPorts/firmware_ECUsbPorts.py
@@ -79,30 +79,67 @@
             is_ioex = self.faft_config.custom_usb_enable_pins[idx].get(
                     'ioex', False)
             gpio_name = self.faft_config.custom_usb_enable_pins[idx]['name']
+            # change the unicode to ascii
+            gpio_name = str(gpio_name)
         _, val = self.ec.send_command_get_output(
                 '%sget %s' % (('gpio', 'ioex')[is_ioex], gpio_name),
-                ['([01])[^\n\r]*\s%s' % gpio_name])[0]
+                ['(?i)([01])[^\n\r]*\s%s' % gpio_name])[0]
         return val == '1'
 
-    def get_port_count(self):
-        """Get the number of USB ports."""
-        for cnt in xrange(10):
+    def probe_port_count(self):
+        """Probe the EC's gpio pins to determine the number of USB-A ports"""
+        for cnt in range(10):
             try:
                 self.__check_usb_enabled(cnt)
             except error.TestFail:
-                logging.info("Found %d USB ports", cnt)
+                # Enforce that zero ports are specified explicitly. Without
+                # this, the TEST_NA result tends to get ignored until final
+                # FSI signoff, at which point it becomes an emergency when
+                # someone notices this device does indeed have USB-A ports.
+                if cnt == 0:
+                    raise error.TestFail(
+                            "No USB A ports could be found. If this device has "
+                            "no USB-A ports, specify usb_a_port_count: 0 in your "
+                            "fw-testing-configs")
+
+                logging.info("Found %d USB ports via probe", cnt)
                 return cnt
         # Limit reached. Probably something went wrong.
         raise error.TestFail("Unexpected error while trying to determine " +
                              "number of USB ports")
 
+    def get_port_count(self):
+        """Get the number of USB ports."""
+
+        # Prefer an explicit count.
+        count = self.faft_config.usb_a_port_count
+        if count is not None:
+            logging.info("%d USB ports specified via config", count)
+            # Allow -1 as an escape hatch back to dynamic probing for
+            # devices whose USB-A port counts may vary by SKU.
+            if count == -1:
+                try:
+                    count = self.probe_port_count()
+                except error.TestFail:
+                    count = 0
+
+            return count
+
+        # Next use the custom enable as a proxy, if it's non-zero.
+        if self.faft_config.custom_usb_enable_pins:
+            count = len(self.faft_config.custom_usb_enable_pins)
+            logging.info("%d USB ports counted by pins", count)
+            return count
+
+        # Finally, fall back to probing if unspecified.
+        return self.probe_port_count()
 
     def check_power_off_mode(self):
         """Shutdown the system and check USB ports are disabled."""
         self.run_shutdown_cmd()
         self.wait_for('shutdown', 'Checking that all USB-A ports are disabled')
         # Check that all USB-A ports are disabled
-        for idx in xrange(self._port_count):
+        for idx in range(self._port_count):
             if self.__check_usb_enabled(idx):
                 raise error.TestFail(
                         'Not all USB-A ports are disabled after shutdown')
@@ -121,11 +158,9 @@
 
         if self.servo.main_device_is_ccd():
             logging.info("Using CCD, ignore checking USB port connection.")
-        elif (self.servo.has_control('servo_v4_type') and
-              self.servo.get('servo_v4_type') == 'type-c'):
+        elif self.servo.is_servo_v4_type_c():
             logging.info("Using type-c servo, ignore checking USB port connection.")
-        elif (self.servo.has_control('servo_v4_type') and
-              self.servo.get('servo_v4_type') != 'type-c'):
+        elif self.servo.get_servo_v4_type() is not None:
             # When only one USB-A port control is available, turning off the
             # USB-A port disconnects the network connection from the DUT.
             raise error.TestNAError("Only one USB-A port control; servo v4 type-C required")
diff --git a/server/site_tests/firmware_ECWakeFromULP/control b/server/site_tests/firmware_ECWakeFromULP/control
new file mode 100644
index 0000000..6c39bac
--- /dev/null
+++ b/server/site_tests/firmware_ECWakeFromULP/control
@@ -0,0 +1,35 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "ChromeOS Team"
+NAME = "firmware_ECWakeFromULP"
+PURPOSE = "Servo based EC wake from ULP test"
+CRITERIA = "This test will fail if EC wake source misbehaved."
+ATTRIBUTES = "suite:faft_pd"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "firmware"
+TEST_TYPE = "server"
+JOB_RETRIES = 0
+DEPENDENCIES = "ec:cros, servo_state:WORKING"
+PY_VERSION = 3
+
+DOC = """
+This test check the functionality of EC waking host from suspend. This includes
+power button, AC on, and lid open (if supported).
+"""
+
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+pdtester_args = hosts.CrosHost.get_pdtester_arguments(args_dict)
+
+def run_ecwakesource(machine):
+    host = hosts.create_host(machine, servo_args=servo_args,
+                             pdtester_args=pdtester_args)
+    job.run_test("firmware_ECWakeFromULP", host=host, cmdline_args=args,
+                 disable_sysinfo=True)
+
+parallel_simple(run_ecwakesource, machines)
diff --git a/server/site_tests/firmware_ECWakeFromULP/firmware_ECWakeFromULP.py b/server/site_tests/firmware_ECWakeFromULP/firmware_ECWakeFromULP.py
new file mode 100644
index 0000000..7fb685b
--- /dev/null
+++ b/server/site_tests/firmware_ECWakeFromULP/firmware_ECWakeFromULP.py
@@ -0,0 +1,145 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import time
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.server.cros.faft.firmware_test import FirmwareTest
+from autotest_lib.server.cros.power import servo_charger
+from autotest_lib.server.cros.servo import servo
+
+
+class firmware_ECWakeFromULP(FirmwareTest):
+    """
+    Servo based EC wake from ULP test.
+    """
+    version = 1
+
+    # Retries allowed for reaching designed states.
+    POWER_STATE_RETRY_COUNT = 10
+
+    def initialize(self, host, cmdline_args):
+        super(firmware_ECWakeFromULP, self).initialize(host, cmdline_args)
+        self.setup_pdtester(min_batt_level=10)
+        # Only run in normal mode
+        self.switcher.setup_mode('normal')
+        self.charge_manager = servo_charger.ServoV4ChargeManager(
+                host, host.servo)
+        # stop charging to test hibernate
+        self.charge_manager.stop_charging()
+
+    def cleanup(self):
+        # The DUT might be still hibernated. Force the reboot.
+        if not self.is_ec_console_responsive():
+            logging.info('System is still hibernated; reboot.')
+            self.switcher.simple_reboot('cold', sync_before_boot=False)
+
+        if not self.wait_power_state(self.POWER_STATE_S0,
+                                     self.POWER_STATE_RETRY_COUNT):
+            logging.info('System is S5/G3; press pwrbtn to boot to S0.')
+            self.servo.power_short_press()
+
+        # Restore the lid_open switch in case the test failed in the middle.
+        if self.check_ec_capability(['lid']):
+            self.servo.set('lid_open', 'yes')
+
+        self.charge_manager.start_charging()
+
+        super(firmware_ECWakeFromULP, self).cleanup()
+
+    def hibernate_and_wake(self, host, wake_func, wake_state):
+        """Shutdown to G3/S5, hibernate EC, and then wake by power button."""
+        self.run_shutdown_cmd()
+        if not self.wait_power_state(self.POWER_STATE_G3,
+                                     self.POWER_STATE_RETRY_COUNT):
+            raise error.TestFail('Platform failed to reach G3 state.')
+
+        self.ec.send_command('hibernate')
+        time.sleep(self.WAKE_DELAY)
+
+        if self.is_ec_console_responsive():
+            raise error.TestFail('The DUT is not in hibernate mode.')
+        else:
+            logging.info('Hibernated. EC console in not responsive. ')
+
+        # wake system
+        wake_func()
+        if not self.wait_power_state(wake_state, self.POWER_STATE_RETRY_COUNT):
+            raise error.TestFail('Platform failed to reach %s state.' %
+                                 wake_state)
+        if wake_state == self.POWER_STATE_S0:
+            self.switcher.wait_for_client()
+
+    def is_ec_console_responsive(self):
+        """Test if EC console is responsive."""
+        try:
+            self.ec.send_command_get_output('help', ['.*>'])
+            return True
+        except servo.UnresponsiveConsoleError:
+            return False
+
+    def wake_by_lid_switch(self):
+        """Wake up the device by lid switch."""
+        self.servo.set('lid_open', 'no')
+        time.sleep(self.LID_DELAY)
+        self.servo.set('lid_open', 'yes')
+
+    def run_once(self, host):
+        """Runs a single iteration of the test."""
+        if not self.check_ec_capability():
+            raise error.TestNAError(
+                    "Nothing needs to be tested on this device")
+
+        if self.servo.main_device_is_ccd():
+            raise error.TestNAError(
+                    'With CCD, we can\'t wake up the DUT from '
+                    'hibernate by power button. Skip hibernate '
+                    'test.')
+        elif not self.faft_config.hibernate:
+            raise error.TestNAError('The device does not support hibernate. '
+                                    'Skip hibernate test.')
+        elif not self._client.has_battery():
+            raise error.TestNAError(
+                    'The device claims to have hibernate support, but does not '
+                    'have a battery. It probably does not actually have '
+                    'hibernate support, edit the device.json file in '
+                    'fw-testing-configs. Skip hibernate test.')
+
+        # Test hibernate and wake by power button
+        wake_src = 'power button'
+        logging.info('EC hibernate and wake by power button.')
+        self.hibernate_and_wake(host, self.servo.power_short_press,
+                                self.POWER_STATE_S0)
+
+        # Test hibernate and wake by lid switch
+        wake_src = 'lid switch'
+        if not self.check_ec_capability(['lid']):
+            logging.info(
+                    'The device has no lid. '
+                    'Skip testing hibernate/wake by %s.', wake_src)
+        elif 'c2d2' in self.servo.get_servo_type():
+            logging.info('The servo is c2d2. We can\'t wake up the DUT from '
+                         'hibernate by lid open. Skip hibernate test')
+        else:
+            logging.info('Hibernate and wake by %s.', wake_src)
+            self.hibernate_and_wake(host, self.wake_by_lid_switch,
+                                    self.POWER_STATE_S0)
+
+        # Test hibernate and wake by AC on
+        wake_src = 'AC on'
+        self.charge_manager.stop_charging()
+        logging.info('Hibernate and wake by %s.', wake_src)
+        if self.faft_config.ac_on_can_wake_ap_from_ulp:
+            logging.info('AC on event can wake AP from ULP.')
+            wake_state = self.POWER_STATE_S0
+        else:
+            logging.info('AC on event cannot wake AP from ULP.')
+            wake_state = self.POWER_STATE_G3
+        self.hibernate_and_wake(host, self.charge_manager.start_charging,
+                                wake_state)
+
+        if not self.faft_config.ac_on_can_wake_ap_from_ulp:
+            # Put AP back to S0
+            self.servo.power_short_press()
diff --git a/server/site_tests/firmware_ECWakeSource/control b/server/site_tests/firmware_ECWakeSource/control
index 9e803a6..8ea83c4 100644
--- a/server/site_tests/firmware_ECWakeSource/control
+++ b/server/site_tests/firmware_ECWakeSource/control
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ECWakeSource"
 PURPOSE = "Servo based EC wake source test"
 CRITERIA = "This test will fail if EC wake source misbehalved."
-ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec3po, suite:faft_ec_tot"
+ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec_tot"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
 DEPENDENCIES = "ec:cros, servo_state:WORKING"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test check the functionality of EC waking host from suspend. This includes
diff --git a/server/site_tests/firmware_ECWakeSource/firmware_ECWakeSource.py b/server/site_tests/firmware_ECWakeSource/firmware_ECWakeSource.py
index 28f0307..bc0d862 100644
--- a/server/site_tests/firmware_ECWakeSource/firmware_ECWakeSource.py
+++ b/server/site_tests/firmware_ECWakeSource/firmware_ECWakeSource.py
@@ -40,25 +40,6 @@
             self.servo.set('lid_open', 'yes')
         super(firmware_ECWakeSource, self).cleanup()
 
-    def hibernate_and_wake_by_power_button(self, host):
-        """Shutdown to G3/S5, hibernate EC, and then wake by power button."""
-        is_ac = host.is_ac_connected()
-        self.run_shutdown_cmd()
-        if not self.wait_power_state(self.POWER_STATE_G3,
-                                     self.POWER_STATE_RETRY_COUNT):
-            raise error.TestFail('Platform failed to reach G3 state.')
-
-        self.ec.send_command('hibernate')
-        time.sleep(self.WAKE_DELAY)
-
-        # If AC is plugged during the test, the DUT would wake up right after
-        # entering hibernate mode. So skip the verification for EC console
-        # responsiveness.
-        if is_ac != True and self.is_ec_console_responsive():
-            raise error.TestFail('The DUT is not in hibernate mode.')
-        self.servo.power_short_press()
-        self.switcher.wait_for_client()
-
     def is_ec_console_responsive(self):
         """Test if EC console is responsive."""
         try:
@@ -85,7 +66,7 @@
         if not self.wait_power_state(self.POWER_STATE_SUSPEND,
                                      self.POWER_STATE_RETRY_COUNT):
             raise error.TestFail('Platform failed to reach S0ix or S3 state.')
-        time.sleep(self.SUSPEND_WAIT_TIME_SECONDS);
+        time.sleep(self.SUSPEND_WAIT_TIME_SECONDS)
         wake_func()
         if not self.wait_power_state(self.POWER_STATE_S0,
                                      self.POWER_STATE_RETRY_COUNT):
@@ -116,8 +97,25 @@
                 raise error.TestFail('Platform failed to reach S0 state.')
         self.switcher.wait_for_client(timeout=self.RESUME_TIMEOUT)
 
+    def check_boot_id(self, host, orig_boot_id, wake_method):
+        """Check current boot id matches original boot id.
+
+        Args:
+            host: test host object
+            orig_boot_id: original boot_id to compare against
+            wake_method: string indicating the method used to wake the device
+        """
+        boot_id = host.get_boot_id()
+        if boot_id != orig_boot_id:
+            raise error.TestFail('Unexpected reboot by suspend and wake: ' +
+                                 wake_method)
+
     def run_once(self, host):
         """Runs a single iteration of the test."""
+        if not self.check_ec_capability():
+            raise error.TestNAError(
+                    "Nothing needs to be tested on this device")
+
         # Login as a normal user and stay there, such that closing lid triggers
         # suspend, instead of shutdown.
         autotest_client = autotest.Autotest(host)
@@ -125,38 +123,54 @@
                                  exit_without_logout=True)
         original_boot_id = host.get_boot_id()
 
-        # With no display connected, pressing the power button in suspend mode
-        # would lead to shutdown.
-        if self.has_internal_display:
-            logging.info('Suspend and wake by power button.')
+        # Test suspend and wake by power button
+        wake_src = 'power button'
+        if not self.has_internal_display:
+            # With no display connected, pressing the power button in suspend mode
+            # would lead to shutdown.
+            logging.info(
+                    'The device has no internal display. '
+                    'Skip testing suspend/resume by %s.', wake_src)
+        else:
+            logging.info('Suspend and wake by %s.', wake_src)
             self.suspend_and_wake(self.suspend, self.servo.power_normal_press)
+            self.check_boot_id(host, original_boot_id, wake_src)
 
+        # Test suspend and wake by internal key press
+        wake_src = 'internal key press'
         if not self.check_ec_capability(['keyboard']):
-            logging.info('The device has no internal keyboard. '
-                         'Skip testing suspend/resume by internal keyboard.')
+            logging.info(
+                    'The device has no internal keyboard. '
+                    'Skip testing suspend/resume by %s.', wake_src)
         elif not self.ec.has_command('ksstate'):
-            logging.info('The device does not support the ksstate command. '
-                         'Skip testing suspend/resume by internal keyboard.')
+            logging.info(
+                    'The device does not support the ksstate command. '
+                    'Skip testing suspend/resume by %s.', wake_src)
         else:
             result = self.ec.send_command_get_output(
                     'ksstate',
                     ['Keyboard scan disable mask: 0x([0-9a-fA-F]{8})'])
             kb_scan_disable_mask = int(result[0][1], 16)
             if kb_scan_disable_mask == 0:
-                logging.info('Suspend and wake by internal key press.')
+                logging.info('Suspend and wake by %s.', wake_src)
                 self.suspend_and_wake(self.suspend,
                                       lambda: self.ec.key_press('<enter>'))
             else:
-                logging.info('Tablet mode enabled; suspend and check device '
-                             'does not wake by internal key press.')
+                logging.info(
+                        'Tablet mode enabled; suspend and check device '
+                        'does not wake by %s.', wake_src)
                 self.suspend_and_dont_wake(
                         self.suspend, lambda: self.ec.key_press('<enter>'))
+            self.check_boot_id(host, original_boot_id, wake_src)
 
+        # Test suspend and wake by USB HID key press
+        wake_src = 'USB HID key press'
         if not self.faft_config.usb_hid_wake_enabled:
-            logging.info('Device does not support wake by USB HID. '
-                         'Skip suspend and wake by USB HID key press.')
+            logging.info(
+                    'Device does not support wake by USB HID. '
+                    'Skip suspend and wake by %s.', wake_src)
         else:
-            logging.info('Suspend and wake by USB HID key press.')
+            logging.info('Suspend and wake by %s.', wake_src)
 
             logging.debug('Initializing HID keyboard emulator.')
             self.servo.set_nocheck('init_usb_keyboard', 'on')
@@ -172,32 +186,21 @@
                         'update firmware for Atmel USB KB emulator by running '
                         'firmware_FlashServoKeyboardMap test and then try again?'
                 )
+            self.check_boot_id(host, original_boot_id, wake_src)
 
             logging.debug('Turning off HID keyboard emulator.')
             self.servo.set_nocheck('init_usb_keyboard', 'off')
 
+        # Test suspend and wake by lid switch
+        wake_src = 'lid switch'
         if not self.check_ec_capability(['lid']):
-            logging.info('The device has no lid. '
-                         'Skip testing suspend/resume by lid switch.')
+            logging.info(
+                    'The device has no lid. '
+                    'Skip testing suspend/resume by %s.', wake_src)
         else:
-            logging.info('Suspend and wake by lid switch.')
+            logging.info('Suspend and wake by %s.', wake_src)
             self.suspend_and_wake(self.suspend, self.wake_by_lid_switch)
-            logging.info('Close lid to suspend and wake by lid switch.')
+            logging.info('Close lid to suspend and wake by %s.', wake_src)
             self.suspend_and_wake(lambda:self.servo.set('lid_open', 'no'),
                                   self.wake_by_lid_switch)
-
-        boot_id = host.get_boot_id()
-        if boot_id != original_boot_id:
-            raise error.TestFail('Different boot_id. Unexpected reboot.')
-
-        if self.servo.main_device_is_ccd():
-            logging.info('With CCD, we can\'t wake up the DUT from hibernate '
-                         'by power button. Skip hibernate test.')
-        elif not self.faft_config.ec_has_hibernate_cmd:
-            logging.info('EC does not support hibernate. Skip hibernate test.')
-        elif not self.has_internal_display:
-            logging.info('For the form factors without internal display, '
-                         'hibernate is not useful. Skip hibernate test.')
-        else:
-            logging.info('EC hibernate and wake by power button.')
-            self.hibernate_and_wake_by_power_button(host)
+            self.check_boot_id(host, original_boot_id, wake_src)
diff --git a/server/site_tests/firmware_ECWatchdog/control b/server/site_tests/firmware_ECWatchdog/control
index 841ffe8..d0fea30 100644
--- a/server/site_tests/firmware_ECWatchdog/control
+++ b/server/site_tests/firmware_ECWatchdog/control
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_ECWatchdog"
 PURPOSE = "Servo based EC watchdog test"
 CRITERIA = "This test will fail if EC watchdog misbehalved."
-ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec3po, suite:faft_ec_tot"
+ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec_tot"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
 DEPENDENCIES = "ec:cros, servo_state:WORKING"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test check if EC watchdog is functioning correctly.
diff --git a/server/site_tests/firmware_ECWatchdog/firmware_ECWatchdog.py b/server/site_tests/firmware_ECWatchdog/firmware_ECWatchdog.py
index 0817b5c..ef259b8 100644
--- a/server/site_tests/firmware_ECWatchdog/firmware_ECWatchdog.py
+++ b/server/site_tests/firmware_ECWatchdog/firmware_ECWatchdog.py
@@ -15,8 +15,12 @@
     version = 1
 
 
-    # Delay of spin-wait in ms. Should be long enough to trigger watchdog reset.
-    WATCHDOG_DELAY = 3000
+    # Delay of spin-wait in ms. Nuvoton boards set the hardware watchdog to
+    # 3187.5ms and also sets a timer to 2200ms. Set the timeout long enough to
+    # exceed the hardware watchdog timer because the timer isn't 100% reliable.
+    # If there are other platforms that use a longer watchdog timeout, this
+    # may need to be adjusted.
+    WATCHDOG_DELAY = 3700  # 3187.5ms + 500ms safety margin, rounded up.
 
     # Delay of EC power on.
     EC_BOOT_DELAY = 1000
diff --git a/server/site_tests/firmware_EmmcWriteLoad/control b/server/site_tests/firmware_EmmcWriteLoad/control
deleted file mode 100644
index d5d365a..0000000
--- a/server/site_tests/firmware_EmmcWriteLoad/control
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright (c) 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "Chrome OS Team"
-NAME = "firmware_EmmcWriteLoad"
-PURPOSE = "To ensure eMMC functions properly during heavy loads."
-CRITERIA = "This test will fail if eMMC timesout and chromeos install fails."
-ATTRIBUTES = "suite:faft_stress"
-TIME = "LONG"
-TEST_CATEGORY = "Stress"
-TEST_CLASS = "firmware"
-TEST_TYPE = "server"
-
-
-DOC = """
-This test requires a USB disk plugged-in, which contains a Chrome OS test
-image (built by "build_image test"). On runtime, this test first switches
-DUT to developer mode. When dev_boot_usb=0, pressing Ctrl-U on developer
-screen should not boot the USB disk. When dev_boot_usb=1, pressing Ctrl-U
-should boot the USB disk. It will then continually install chromeos while
-monitoring dmesg for errors.
-
-The length of time in minutes should be specified by the parameter
--a 'minutes_to_run=240'
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run_emmcwriteload(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test("firmware_EmmcWriteLoad", host=host, cmdline_args=args,
-                 disable_sysinfo=True, dev_mode=True, tag="dev")
-
-parallel_simple(run_emmcwriteload, machines)
diff --git a/server/site_tests/firmware_EmmcWriteLoad/control.4hours b/server/site_tests/firmware_EmmcWriteLoad/control.4hours
deleted file mode 100644
index 541a71d..0000000
--- a/server/site_tests/firmware_EmmcWriteLoad/control.4hours
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright (c) 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "Chrome OS Team"
-NAME = "firmware_EmmcWriteLoad"
-PURPOSE = "To ensure eMMC functions properly during heavy loads."
-CRITERIA = "This test will fail if eMMC timesout and chromeos install fails."
-ATTRIBUTES = "suite:faft_stress"
-TIME = "LONG"
-TEST_CATEGORY = "Stress"
-TEST_CLASS = "firmware"
-TEST_TYPE = "server"
-
-
-DOC = """
-This test requires a USB disk plugged-in, which contains a Chrome OS test
-image (built by "build_image test"). On runtime, this test first switches
-DUT to developer mode. When dev_boot_usb=0, pressing Ctrl-U on developer
-screen should not boot the USB disk. When dev_boot_usb=1, pressing Ctrl-U
-should boot the USB disk. It will then continually install chromeos while
-monitoring dmesg for errors.
-
-The length of time in minutes should be specified by the parameter
--a 'minutes_to_run=240'
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-args.append('minutes_to_run=240')
-
-def run_emmcwriteload(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test("firmware_EmmcWriteLoad", host=host, cmdline_args=args,
-                 disable_sysinfo=True, dev_mode=True, tag="dev")
-
-parallel_simple(run_emmcwriteload, machines)
diff --git a/server/site_tests/firmware_EmmcWriteLoad/firmware_EmmcWriteLoad.py b/server/site_tests/firmware_EmmcWriteLoad/firmware_EmmcWriteLoad.py
deleted file mode 100644
index f6daae8..0000000
--- a/server/site_tests/firmware_EmmcWriteLoad/firmware_EmmcWriteLoad.py
+++ /dev/null
@@ -1,136 +0,0 @@
-# Copyright (c) 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import datetime
-import logging
-import os
-import re
-import time
-
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.server.cros import stress
-from autotest_lib.server.cros.faft.firmware_test import FirmwareTest
-
-class firmware_EmmcWriteLoad(FirmwareTest):
-    """
-    Runs chromeos-install repeatedly while monitoring dmesg output for EMMC
-    timeout errors.
-
-    This test requires a USB disk plugged-in, which contains a Chrome OS test
-    image (built by "build_image test"). On runtime, this test first switches
-    DUT to developer mode. When dev_boot_usb=0, pressing Ctrl-U on developer
-    screen should not boot the USB disk. When dev_boot_usb=1, pressing Ctrl-U
-    should boot the USB disk.
-
-    The length of time in minutes should be specified by the parameter
-    -a 'minutes_to_run=240'
-    """
-    version = 1
-    NEEDS_SERVO_USB = True
-
-    INSTALL_COMMAND = '/usr/sbin/chromeos-install --yes'
-    ERROR_MESSAGE_REGEX = re.compile(
-            r'mmc[0-9]+: Timeout waiting for hardware interrupt', re.MULTILINE)
-
-    def initialize(self, host, cmdline_args, ec_wp=None):
-        """Initialize the test"""
-        dict_args = utils.args_to_dict(cmdline_args)
-        self.minutes_to_run = int(dict_args.get('minutes_to_run', 5))
-        super(firmware_EmmcWriteLoad, self).initialize(
-            host, cmdline_args, ec_wp=ec_wp)
-
-        self.switcher.setup_mode('dev')
-        # Use the USB key for Ctrl-U dev boot, not recovery.
-        self.setup_usbkey(usbkey=True, host=False, used_for_recovery=False)
-
-        self.original_dev_boot_usb = self.faft_client.system.get_dev_boot_usb()
-        logging.info('Original dev_boot_usb value: %s',
-                     str(self.original_dev_boot_usb))
-
-
-    def read_dmesg(self, filename):
-        """Put the contents of 'dmesg -cT' into the given file.
-
-        @param filename: The file to write 'dmesg -cT' into.
-        """
-        with open(filename, 'w') as f:
-            self._client.run('dmesg -cT', stdout_tee=f)
-
-        return utils.read_file(filename)
-
-    def check_for_emmc_error(self, dmesg):
-        """Check the current dmesg output for the specified error message regex.
-
-        @param dmesg: Contents of the dmesg buffer.
-
-        @return True if error found.
-        """
-        for line in dmesg.splitlines():
-            if self.ERROR_MESSAGE_REGEX.search(line):
-                return True
-
-        return False
-
-    def install_chrome_os(self):
-        """Runs the install command. """
-        self.faft_client.system.run_shell_command(self.INSTALL_COMMAND)
-
-    def poll_for_emmc_error(self, dmesg_file, poll_seconds=20):
-        """Continuously polls the contents of dmesg for the emmc failure message
-
-        @param dmesg_file: Contents of the dmesg buffer.
-        @param poll_seconds: Time to wait before checking dmesg again.
-
-        @return True if error found.
-        """
-        end_time = datetime.datetime.now() + \
-                   datetime.timedelta(minutes=self.minutes_to_run)
-
-        while datetime.datetime.now() <= end_time:
-            dmesg = self.read_dmesg(dmesg_file)
-            contains_error = self.check_for_emmc_error(dmesg)
-
-            if contains_error:
-                raise error.TestFail('eMMC error found. Dmesg output: %s' %
-                                     dmesg)
-            time.sleep(poll_seconds)
-
-    def cleanup(self):
-        """Cleanup the test"""
-        try:
-            self.ensure_dev_internal_boot(self.original_dev_boot_usb)
-        except Exception as e:
-            logging.error("Caught exception: %s", str(e))
-        super(firmware_EmmcWriteLoad, self).cleanup()
-
-    def run_once(self):
-        """Main test logic"""
-        self.faft_client.system.set_dev_boot_usb(1)
-        self.switcher.simple_reboot()
-        self.switcher.bypass_dev_boot_usb()
-        self.switcher.wait_for_client()
-
-        logging.info('Expected USB boot, set dev_boot_usb to the original.')
-        self.check_state((self.checkers.dev_boot_usb_checker, (True, True),
-                          'Device not booted from USB image properly.'))
-        stressor = stress.ControlledStressor(self.install_chrome_os)
-
-        dmesg_filename = os.path.join(self.resultsdir, 'dmesg')
-
-        logging.info('===== Starting OS install loop. =====')
-        logging.info('===== Running install for %s minutes. =====',
-                     self.minutes_to_run)
-        stressor.start()
-
-        self.poll_for_emmc_error(dmesg_file=dmesg_filename)
-
-        logging.info('Stopping install loop.')
-        # Usually takes a little over 3 minutes to install so make sure we
-        # wait long enough for a install iteration to complete.
-        stressor.stop(timeout=300)
-
-        logging.info("Installing OS one more time.")
-        # Installing OS one more time to ensure DUT is left in a good state
-        self.install_chrome_os()
diff --git a/server/site_tests/firmware_EventLog/control b/server/site_tests/firmware_EventLog/control
index 51ad342..6dd1937 100644
--- a/server/site_tests/firmware_EventLog/control
+++ b/server/site_tests/firmware_EventLog/control
@@ -8,13 +8,14 @@
 NAME = "firmware_EventLog"
 PURPOSE = "Ensure eventlog is written correctly on boot and suspend/resume."
 CRITERIA = "This test will fail if entries are missing or have a bad timestamp."
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv4, suite:faft_normal, suite:faft_bios_ec3po, suite:faft_bios_tot"
-DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv4, suite:faft_normal, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test ensures that correct event log entries are written after a boot and
diff --git a/server/site_tests/firmware_EventLog/firmware_EventLog.py b/server/site_tests/firmware_EventLog/firmware_EventLog.py
index b7c6aaf..58708ee 100644
--- a/server/site_tests/firmware_EventLog/firmware_EventLog.py
+++ b/server/site_tests/firmware_EventLog/firmware_EventLog.py
@@ -12,6 +12,8 @@
 POWER_DIR = '/var/lib/power_manager'
 TMP_POWER_DIR = '/tmp/power_manager'
 
+
+# CAUTION: This test is being migrated to TAST: b/174800291
 class firmware_EventLog(FirmwareTest):
     """
     Test to ensure eventlog is written on boot and suspend/resume.
@@ -28,11 +30,12 @@
         self.setup_usbkey(usbkey=True, host=False)
 
     def _has_event(self, pattern):
-        return bool(filter(re.compile(pattern).search, self._events))
+        # list since bool() casts empty filter objs to True in py3
+        return bool(list(filter(re.compile(pattern).search, self._events)))
 
     def _gather_events(self):
         entries = self.faft_client.system.run_shell_command_get_output(
-                'mosys eventlog list')
+                'elogtool list')
         now = self._now()
         self._events = []
         for line in reversed(entries):
@@ -47,8 +50,8 @@
             logging.info('Found event: "%s"', line)
             self._events.append(event)
 
-    # This assumes that Linux and the firmware use the same RTC. mosys converts
-    # timestamps to localtime, and so do we (by calling date without --utc).
+    # This assumes that Linux and the firmware use the same RTC. elogtool uses
+    # timestamps in localtime, and so do we (by calling date without --utc).
     def _now(self):
         time_string = self.faft_client.system.run_shell_command_get_output(
                 'date +"%s"' % self._TIME_FORMAT)[0]
@@ -72,6 +75,12 @@
 
     def run_once(self):
         """Runs a single iteration of the test."""
+        model_name = self.faft_client.system.get_model_name()
+
+        def _leona_bug(event):
+            """Return whether this event should be allowed per b/184778308."""
+            return model_name == "leona" and event == "ACPI Wake | Deep S5"
+
         if not self.faft_config.has_eventlog:
             raise error.TestNAError('This board has no eventlog support.')
 
@@ -86,11 +95,15 @@
         if not self._has_event(r'System boot'):
             raise error.TestFail('No "System boot" event on normal boot.')
         # ' Wake' to match 'FW Wake' and 'ACPI Wake' but not 'Wake Source'
-        if self._has_event(r'Developer Mode|Recovery Mode|Sleep| Wake'):
-            raise error.TestFail('Incorrect event logged on normal boot.')
+        disallowedEvents = re.compile(
+                r'Developer Mode|Recovery Mode|Sleep| Wake')
+        for event in self._events:
+            if disallowedEvents.search(event) and not _leona_bug(event):
+                raise error.TestFail(
+                        'Incorrect event logged on normal boot: ' + event)
 
         logging.debug('Transitioning to dev mode for next test')
-        self.switcher.reboot_to_mode(to_mode='dev')
+        self.switcher.reboot_to_mode(to_mode='dev', allow_gbb_force=True)
 
         logging.info('Verifying eventlog behavior on developer mode boot')
         self._cutoff_time = self._now()
@@ -103,8 +116,11 @@
         if (not self._has_event(r'System boot') or
             not self._has_event(r'Chrome OS Developer Mode')):
             raise error.TestFail('Missing required event on dev mode boot.')
-        if self._has_event(r'Recovery Mode|Sleep| Wake'):
-            raise error.TestFail('Incorrect event logged on dev mode boot.')
+        disallowedEvents = re.compile(r'Recovery Mode|Sleep| Wake')
+        for event in self._events:
+            if disallowedEvents.search(event) and not _leona_bug(event):
+                raise error.TestFail(
+                        'Incorrect event logged on dev mode boot: ' + event)
 
         logging.debug('Transitioning back to normal mode for final tests')
         self.switcher.reboot_to_mode(to_mode='normal')
@@ -132,7 +148,7 @@
         logging.info('Verifying eventlog behavior on suspend/resume')
         self._cutoff_time = self._now()
         self.faft_client.system.run_shell_command(
-                'powerd_dbus_suspend -wakeup_timeout=10')
+                'powerd_dbus_suspend --wakeup_timeout=10')
         time.sleep(5)   # a little slack time for powerd to write the 'Wake'
         self._gather_events()
 
@@ -151,7 +167,7 @@
             self.disable_suspend_to_idle()
             self._cutoff_time = self._now()
             self.faft_client.system.run_shell_command(
-                'powerd_dbus_suspend -wakeup_timeout=10')
+                    'powerd_dbus_suspend --wakeup_timeout=10')
             time.sleep(5)   # a little slack time for powerd to write the 'Wake'
             self.teardown_powerd_prefs()
             self._gather_events()
diff --git a/server/site_tests/firmware_FAFTModeTransitions/control.all_single b/server/site_tests/firmware_FAFTModeTransitions/control.all_single
index 88cb1ff..b3f2f5b 100644
--- a/server/site_tests/firmware_FAFTModeTransitions/control.all_single
+++ b/server/site_tests/firmware_FAFTModeTransitions/control.all_single
@@ -9,11 +9,12 @@
 PURPOSE = "Test FAFT ability to switch between different modes"
 CRITERIA = "This test will fail if FAFT can not switch between any two modes."
 ATTRIBUTES = "suite:faft_smoke"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "MEDIUM"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test checks the following mode transitions:
@@ -32,9 +33,7 @@
             host=host,
             cmdline_args=args,
             disable_sysinfo=True,
-            mode_seq=[
-                    "normal", "dev", "rec", "normal", "rec", "dev", "normal"
-            ])
+            mode_seq=["normal", "dev", "rec", "normal", "rec", "dev", "normal"])
 
 
 parallel_simple(run_faftmodetransitions, machines)
diff --git a/server/site_tests/firmware_FAFTModeTransitions/control.normal_u b/server/site_tests/firmware_FAFTModeTransitions/control.normal_u
index 46ba314..c8d3898 100644
--- a/server/site_tests/firmware_FAFTModeTransitions/control.normal_u
+++ b/server/site_tests/firmware_FAFTModeTransitions/control.normal_u
@@ -9,11 +9,12 @@
 PURPOSE = "Test FAFT ability to switch from normal to another mode and back"
 CRITERIA = "This test will fail if FAFT can not switch from normal and back again."
 ATTRIBUTES = "suite:faft_smoke"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "MEDIUM"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test checks the following mode transitions:
diff --git a/server/site_tests/firmware_FAFTModeTransitions/control.rec_u b/server/site_tests/firmware_FAFTModeTransitions/control.rec_u
index 1d31408..37496a4 100644
--- a/server/site_tests/firmware_FAFTModeTransitions/control.rec_u
+++ b/server/site_tests/firmware_FAFTModeTransitions/control.rec_u
@@ -9,11 +9,12 @@
 PURPOSE = "Test FAFT ability to switch from rec to another mode and back"
 CRITERIA = "This test will fail if FAFT can not switch from rec and back again."
 ATTRIBUTES = "suite:faft_smoke"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "MEDIUM"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test checks the following mode transitions:
diff --git a/server/site_tests/firmware_FAFTModeTransitions/firmware_FAFTModeTransitions.py b/server/site_tests/firmware_FAFTModeTransitions/firmware_FAFTModeTransitions.py
index 65c8ed2..2d07803 100644
--- a/server/site_tests/firmware_FAFTModeTransitions/firmware_FAFTModeTransitions.py
+++ b/server/site_tests/firmware_FAFTModeTransitions/firmware_FAFTModeTransitions.py
@@ -4,17 +4,16 @@
 
 import logging
 
-from autotest_lib.client.common_lib import common
+from autotest_lib.client.common_lib import error
 from autotest_lib.server.cros.faft.firmware_test import FirmwareTest
 
-
 class firmware_FAFTModeTransitions(FirmwareTest):
     """This test checks FAFT mode transitions work."""
     version = 1
     NEEDS_SERVO_USB = True
 
     def _checked_reboot(self, to_mode):
-        """Reboots DUT to mode and sanity checks that it has done so.
+        """Reboots DUT to mode and checks that it has done so.
 
         @param to_mode: mode_switcher mode to reboot into
         @type to_mode: string
@@ -22,7 +21,10 @@
         @see: autotest_lib.server.cros.faft.utils.mode_switcher
         """
         self.switcher.reboot_to_mode(to_mode)
-        self.check_state((self.checkers.mode_checker, to_mode))
+        boot_mode = self.faft_client.system.get_boot_mode()
+        if boot_mode != to_mode:
+            raise error.TestFail("Expected boot mode %s, got %s" %
+                                 (to_mode, boot_mode))
 
     def run_once(self, mode_seq=[]):
         """Main test logic.
diff --git a/server/site_tests/firmware_FAFTRPC/control.all b/server/site_tests/firmware_FAFTRPC/control.all
index 6255972..7ba32aa 100644
--- a/server/site_tests/firmware_FAFTRPC/control.all
+++ b/server/site_tests/firmware_FAFTRPC/control.all
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "gredelston, kmshelton, waihong"
+AUTHOR = "kmshelton, waihong"
 NAME = "firmware_FAFTRPC.all"
 PURPOSE = "Verify that the RPC server, and all RPC functions, work as expected."
 CRITERIA = "This test will fail if the FAFT RPC system is not set up correctly."
+ATTRIBUTES = "suite:infra_qual, suite:py3-beta"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 2
+PY_VERSION = 3
 
 DOC = """
 This test checks that all RPC functions on all subsystems are connected,
diff --git a/server/site_tests/firmware_FAFTRPC/control.bios b/server/site_tests/firmware_FAFTRPC/control.bios
index 0581700..622b229 100644
--- a/server/site_tests/firmware_FAFTRPC/control.bios
+++ b/server/site_tests/firmware_FAFTRPC/control.bios
@@ -4,16 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "gredelston, kmshelton, waihong"
+AUTHOR = "kmshelton, waihong"
 NAME = "firmware_FAFTRPC.bios"
 PURPOSE = "Verify that the RPC system, and all BIOS RPCs, work as expected."
 CRITERIA = "This test will fail if the BIOS system is not set up correctly."
 ATTRIBUTES = "suite:faft_smoke"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 2
+PY_VERSION = 3
 
 DOC = """
 This test checks that all RPC functions on the BIOS subsystem are connected,
diff --git a/server/site_tests/firmware_FAFTRPC/control.cgpt b/server/site_tests/firmware_FAFTRPC/control.cgpt
index 04589f5..70af7e0 100644
--- a/server/site_tests/firmware_FAFTRPC/control.cgpt
+++ b/server/site_tests/firmware_FAFTRPC/control.cgpt
@@ -4,16 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "gredelston, kmshelton, waihong"
+AUTHOR = "kmshelton, waihong"
 NAME = "firmware_FAFTRPC.cgpt"
 PURPOSE = "Verify that the RPC system, and all CGPT RPCs, work as expected."
 CRITERIA = "This test will fail if the CGPT system is not set up correctly."
 ATTRIBUTES = "suite:faft_smoke"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 2
+PY_VERSION = 3
 
 DOC = """
 This test checks that all RPC functions on the CGPT subsystem are connected,
diff --git a/server/site_tests/firmware_FAFTRPC/control.client b/server/site_tests/firmware_FAFTRPC/control.client
index dc5dba1..625b43c 100644
--- a/server/site_tests/firmware_FAFTRPC/control.client
+++ b/server/site_tests/firmware_FAFTRPC/control.client
@@ -4,16 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "gredelston, kmshelton, waihong"
+AUTHOR = "kmshelton, waihong"
 NAME = "firmware_FAFTRPC.client"
 PURPOSE = "Verify that the faft rpc client side methods work as expected."
 CRITERIA = "This test will fail if the faft rpc client code is broken."
 ATTRIBUTES = "suite:faft_smoke"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 2
+PY_VERSION = 3
 
 DOC = """
 This test checks that the client-side methods function properly:
diff --git a/server/site_tests/firmware_FAFTRPC/control.ec b/server/site_tests/firmware_FAFTRPC/control.ec
index 3efb880..294342b 100644
--- a/server/site_tests/firmware_FAFTRPC/control.ec
+++ b/server/site_tests/firmware_FAFTRPC/control.ec
@@ -4,16 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "gredelston, kmshelton, waihong"
+AUTHOR = "kmshelton, waihong"
 NAME = "firmware_FAFTRPC.ec"
 PURPOSE = "Verify that the RPC system, and all EC RPCs, work as expected."
 CRITERIA = "This test will fail if the EC system is not set up correctly."
 ATTRIBUTES = "suite:faft_smoke"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 2
+PY_VERSION = 3
 
 DOC = """
 This test checks that all RPC functions on the EC subsystem are connected,
diff --git a/server/site_tests/firmware_FAFTRPC/control.kernel b/server/site_tests/firmware_FAFTRPC/control.kernel
index aa1a4a1..16832fb 100644
--- a/server/site_tests/firmware_FAFTRPC/control.kernel
+++ b/server/site_tests/firmware_FAFTRPC/control.kernel
@@ -4,16 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "gredelston, kmshelton, waihong"
+AUTHOR = "kmshelton, waihong"
 NAME = "firmware_FAFTRPC.kernel"
 PURPOSE = "Verify that the RPC system, and all kernel RPCs, work as expected."
 CRITERIA = "This test will fail if the kernel system is not set up correctly."
 ATTRIBUTES = "suite:faft_smoke"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 2
+PY_VERSION = 3
 
 DOC = """
 This test checks that all RPC functions on the kernel subsystem are connected,
diff --git a/server/site_tests/firmware_FAFTRPC/control.rootfs b/server/site_tests/firmware_FAFTRPC/control.rootfs
index 7308297..8b149bd 100644
--- a/server/site_tests/firmware_FAFTRPC/control.rootfs
+++ b/server/site_tests/firmware_FAFTRPC/control.rootfs
@@ -4,16 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "gredelston, kmshelton, waihong"
+AUTHOR = "kmshelton, waihong"
 NAME = "firmware_FAFTRPC.rootfs"
 PURPOSE = "Verify that the RPC system, and all rootfs RPCs, work as expected."
 CRITERIA = "This test will fail if the rootfs system is not set up correctly."
 ATTRIBUTES = "suite:faft_smoke"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 2
+PY_VERSION = 3
 
 DOC = """
 This test checks that all RPC functions on the rootfs subsystem are connected,
diff --git a/server/site_tests/firmware_FAFTRPC/control.system b/server/site_tests/firmware_FAFTRPC/control.system
index 954394b..d604339 100644
--- a/server/site_tests/firmware_FAFTRPC/control.system
+++ b/server/site_tests/firmware_FAFTRPC/control.system
@@ -4,16 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "gredelston, kmshelton, waihong"
+AUTHOR = "kmshelton, waihong"
 NAME = "firmware_FAFTRPC.system"
 PURPOSE = "Verify that the RPC system, and all system RPCs, work as expected."
 CRITERIA = "This test will fail if the `system` system is not set up correctly."
 ATTRIBUTES = "suite:faft_smoke"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 2
+PY_VERSION = 3
 
 DOC = """
 This test checks that all RPC functions on the `system` subsystem are connected,
diff --git a/server/site_tests/firmware_FAFTRPC/control.tpm b/server/site_tests/firmware_FAFTRPC/control.tpm
index b6edbbd..baaa74c 100644
--- a/server/site_tests/firmware_FAFTRPC/control.tpm
+++ b/server/site_tests/firmware_FAFTRPC/control.tpm
@@ -4,16 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "gredelston, kmshelton, waihong"
+AUTHOR = "kmshelton, waihong"
 NAME = "firmware_FAFTRPC.tpm"
 PURPOSE = "Verify that the RPC system, and all TPM RPCs, work as expected."
 CRITERIA = "This test will fail if the TPM system is not set up correctly."
 ATTRIBUTES = "suite:faft_smoke"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 2
+PY_VERSION = 3
 
 DOC = """
 This test checks that all RPC functions on the TPM subsystem are connected,
diff --git a/server/site_tests/firmware_FAFTRPC/control.updater b/server/site_tests/firmware_FAFTRPC/control.updater
index 7f3db59..a814357 100644
--- a/server/site_tests/firmware_FAFTRPC/control.updater
+++ b/server/site_tests/firmware_FAFTRPC/control.updater
@@ -4,16 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "gredelston, kmshelton, waihong"
+AUTHOR = "kmshelton, waihong"
 NAME = "firmware_FAFTRPC.updater"
 PURPOSE = "Verify that the RPC system, and all updater RPCs, work as expected."
 CRITERIA = "This test will fail if the updater system is not set up correctly."
 ATTRIBUTES = "suite:faft_smoke"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 2
+PY_VERSION = 3
 
 DOC = """
 This test checks that all RPC functions on the updater subsystem are connected,
diff --git a/server/site_tests/firmware_FAFTRPC/firmware_FAFTRPC.py b/server/site_tests/firmware_FAFTRPC/firmware_FAFTRPC.py
index e1c05d4..5cffca9 100644
--- a/server/site_tests/firmware_FAFTRPC/firmware_FAFTRPC.py
+++ b/server/site_tests/firmware_FAFTRPC/firmware_FAFTRPC.py
@@ -5,8 +5,8 @@
 import logging
 import operator
 import re
+import six
 import sys
-import xmlrpclib
 
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib.cros import chip_utils
@@ -17,7 +17,7 @@
 ONE_INT_ARG = (1, )
 ONE_STR_ARG = ("foo", )
 SAMPLE_FILE = "/tmp/foo"
-CHIP_FW_NAMES = (chip.fw_name for chip in chip_utils.chip_id_map.itervalues())
+CHIP_FW_NAMES = (chip.fw_name for chip in chip_utils.chip_id_map.values())
 SAMPLE_CGPT_A = {
     "UUID": "93EF7B23-606B-014B-A10C-E9D7CF53DFD3",
     "successful": 1,
@@ -178,7 +178,7 @@
             rpc_name = method
         try:
             result = rpc_function(*params)
-        except xmlrpclib.Fault as e:
+        except six.moves.xmlrpc_client.Fault as e:
             if allow_error_msg is not None and \
                     re.search(allow_error_msg, str(e)):
                 success_msg = "raised an acceptable error during RPC handling"
@@ -223,7 +223,7 @@
         @param params: A tuple containing params to pass into the RPC function
 
         @raise error.TestFail: If the RPC raises no error, or if it raises any
-                               error other than xmlrpclib.Fault
+                               error other than six.moves.xmlrpc_client.Fault
 
         @return: Not meaningful.
 
@@ -235,7 +235,7 @@
             rpc_name = method
         try:
             result = rpc_function(*params)
-        except xmlrpclib.Fault as e:
+        except six.moves.xmlrpc_client.Fault as e:
             self._log_success(rpc_name, params, "raised RPC error")
         except:
             error_msg = "Unexpected misc error: %s" % sys.exc_info()[0]
@@ -317,7 +317,8 @@
         self.reboot_after_completion = reboot_after_completion
         for rpc_category in rpc_categories_to_test:
             category_name = rpc_category["category_name"]
-            if category_name == "ec" and not self.check_ec_capability():
+            if category_name == "ec" and not self.check_ec_capability(
+                    suppress_warning=True):
                 logging.info("No EC found on DUT. Skipping EC category.")
                 continue
 
@@ -330,6 +331,7 @@
             for test_case in test_cases:
                 method_names = get_rpc_method_names_from_test_case(test_case)
                 passing_args = test_case.get("passing_args", [])
+                ec_passing_args = test_case.get("ec_passing_args", [])
                 failing_args = test_case.get("failing_args", [])
                 allow_error_msg = test_case.get("allow_error_msg", None)
                 expected_return_type = test_case.get("expected_return_type",
@@ -352,6 +354,18 @@
                                 failing_arg_tuple)
                         self._assert_fails(category_name, method_name,
                                            failing_arg_tuple)
+                    for arg_tuple in ec_passing_args:
+                        arg_tuple = self._retrieve_stored_values(arg_tuple)
+                        if self.check_ec_capability(suppress_warning=True):
+                            result = self._assert_passes(
+                                    category_name, method_name, arg_tuple,
+                                    allow_error_msg, expected_return_type,
+                                    silence_result)
+                            if store_result_as is not None:
+                                self._stored_values[store_result_as] = result
+                        else:
+                            self._assert_fails(category_name, method_name,
+                                               arg_tuple)
 
 
 """
@@ -408,8 +422,10 @@
                                         "get_platform_name",
                                         "get_model_name",
                                         "dev_tpm_present",
+                                        "get_boot_mode",
                                         "get_root_dev",
                                         "get_root_part",
+                                        "get_minios_priority",
                                         "get_fw_vboot2",
                                         "request_recovery_boot",
                                         "is_removable_device_boot",
@@ -419,19 +435,6 @@
                                 "failing_args": [ONE_INT_ARG, ONE_STR_ARG],
                         },
                         {
-                                "method_name": "dump_log",
-                                "passing_args": [
-                                        NO_ARGS,
-                                        (True, ),
-                                        (False, ),
-                                ],
-                                "failing_args": [
-                                        (True, False),
-                                ],
-                                "expected_return_type": str,
-                                "silence_result": True,
-                        },
-                        {
                                 "method_name":
                                 "run_shell_command",
                                 "passing_args": [("ls -l", ), ("ls -l", False),
@@ -511,6 +514,17 @@
                                 ],
                         },
                         {
+                                "method_name": "set_minios_priority",
+                                "passing_args": [
+                                        ("A"),
+                                        ("B"),
+                                ],
+                                "failing_args": [
+                                        NO_ARGS,
+                                        ("A", 1),
+                                ],
+                        },
+                        {
                                 "method_name": "get_dev_boot_usb",
                                 "passing_args": [NO_ARGS],
                                 "failing_args": [ONE_INT_ARG, ONE_STR_ARG],
@@ -651,13 +665,21 @@
                                 ]
                         },
                         {
+                                "method_name": "set_version",
+                                "passing_args": [
+                                        ("a", 0),
+                                        ("b", 1),
+                                ],
+                                "failing_args": [
+                                        NO_ARGS,
+                                        ("a", ),
+                                        ("b", -1),
+                                ],
+                        },
+                        {
                                 "method_names": [
-                                        "corrupt_sig",
-                                        "restore_sig",
-                                        "corrupt_body",
-                                        "restore_body",
-                                        "move_version_backward",
-                                        "move_version_forward",
+                                        "get_sig_one_byte",
+                                        "get_body_one_byte",
                                 ],
                                 "passing_args": [
                                         ("a", ),
@@ -671,6 +693,20 @@
                         },
                         {
                                 "method_names": [
+                                        "modify_sig",
+                                        "modify_body",
+                                ],
+                                "passing_args": [
+                                        ("a", 0, 0xff),
+                                        ("b", 1, 0xff),
+                                ],
+                                "failing_args": [
+                                        NO_ARGS,
+                                        ONE_INT_ARG,
+                                ],
+                        },
+                        {
+                                "method_names": [
                                         "dump_whole",
                                         "write_whole",
                                 ],
@@ -723,7 +759,6 @@
                         {
                                 "method_names": [
                                         "reload",
-                                        "get_version",
                                         "get_active_hash",
                                         "is_efs",
                                 ],
@@ -733,6 +768,16 @@
                                 "list index out of range",
                         },
                         {
+                                "method_name":
+                                "get_version",
+                                "passing_args": [
+                                        NO_ARGS,
+                                        ("ro", ),
+                                        ("RW", ),
+                                        (None, ),
+                                ],
+                        },
+                        {
                                 "method_names":
                                 ["dump_whole", "write_whole", "dump_firmware"],
                                 "passing_args": [
@@ -921,9 +966,6 @@
                 "category_name":
                 "updater",
                 "test_cases": [
-                        # TODO (gredelston):
-                        # Uncomment the methods which write to flash memory,
-                        # once we are able to set the firmware_updater to "emulate" mode.
                         {
                                 "method_names": [
                                         "cleanup",
@@ -984,6 +1026,8 @@
                                 "passing_args": [
                                         NO_ARGS,
                                         ("bios", ),
+                                ],
+                                "ec_passing_args": [
                                         ("ec", ),
                                 ],
                                 "failing_args": [
diff --git a/server/site_tests/firmware_FAFTSetup/control b/server/site_tests/firmware_FAFTSetup/control
index d9aff05..23a3a6f 100644
--- a/server/site_tests/firmware_FAFTSetup/control
+++ b/server/site_tests/firmware_FAFTSetup/control
@@ -4,17 +4,20 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_FAFTSetup"
 PURPOSE = "Servo based diagnose of FAFT hardware setup"
 CRITERIA = "This test will fail if FAFT hardware is not set up correctly."
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lab, suite:faft_lv2, suite:faft_normal, suite:faft_bios_ec3po, suite:faft_bios_tot, suite:faft_setup, suite:bvt-faft, suite:labqual, suite:infra_qual"
-DEPENDENCIES = "servo_state:WORKING"
+# This test cannot be added to suite:infra_qual due to unreliability of
+# interaction with USB sticks in the ChromeOS CQ device environment.
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lab, suite:faft_lv2, suite:faft_normal, suite:faft_bios_tot, suite:faft_setup, suite:labqual, suite:servo_lab"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test checks the following FAFT hardware requirement:
diff --git a/server/site_tests/firmware_FAFTSetup/firmware_FAFTSetup.py b/server/site_tests/firmware_FAFTSetup/firmware_FAFTSetup.py
index 188eaeb..684d053 100644
--- a/server/site_tests/firmware_FAFTSetup/firmware_FAFTSetup.py
+++ b/server/site_tests/firmware_FAFTSetup/firmware_FAFTSetup.py
@@ -5,6 +5,7 @@
 import logging
 from threading import Timer
 
+from autotest_lib.client.bin.input import linux_input
 from autotest_lib.client.common_lib import common
 from autotest_lib.client.common_lib import error
 from autotest_lib.server.cros.faft.firmware_test import FirmwareTest
@@ -55,7 +56,10 @@
         Timer(self.KEY_PRESS_DELAY, press_action).start()
 
         # Invoke client side script to monitor keystrokes
-        if not self.faft_client.system.check_keys([28, 29, 32]):
+        if self.faft_client.system.check_keys([
+                linux_input.KEY_LEFTCTRL, linux_input.KEY_D,
+                linux_input.KEY_ENTER
+        ]) < 0:
             result = False
 
         # Turn UI back on
@@ -63,10 +67,10 @@
         return result
 
     def keyboard_checker(self):
-        """Press 'd', Ctrl, ENTER by servo and check from DUT."""
+        """Press '<ctrl_l>', 'd', '<enter>' by servo and check from DUT."""
 
         def keypress():
-            """Press 'd', Ctrl, ENTER"""
+            """Press <ctrl_l>, 'd', '<enter>'"""
             self.servo.ctrl_d()
             self.servo.enter_key()
 
@@ -91,7 +95,7 @@
 
         if self.faft_config.mode_switcher_type in (
                 'menu_switcher',
-                'keyboard_dev_switcher'):
+                'keyboard_dev_switcher') and not self.faft_config.is_detachable:
             logging.info("Check keyboard simulation")
             self.check_state(self.keyboard_checker)
         else:
diff --git a/server/site_tests/firmware_FMap/control b/server/site_tests/firmware_FMap/control
index 4f8b9d9..4199b95 100644
--- a/server/site_tests/firmware_FMap/control
+++ b/server/site_tests/firmware_FMap/control
@@ -4,17 +4,18 @@
 
 #from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_FMap"
 PURPOSE = "Check the existence of BIOS and EC FMap and the required FMap areas"
 CRITERIA = "This test will fail if the required FMap areas not existed"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_normal, suite:faft_lv1, suite:faft_bios_ec3po, suite:faft_bios_tot"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_normal, suite:faft_lv1, suite:faft_bios_tot, suite:distributed_lab_qual_faft"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test checks the active BIOS and EC firmware contains the required
diff --git a/server/site_tests/firmware_FMap/control.dev b/server/site_tests/firmware_FMap/control.dev
index 5f16d2d..5d7990d 100644
--- a/server/site_tests/firmware_FMap/control.dev
+++ b/server/site_tests/firmware_FMap/control.dev
@@ -4,17 +4,17 @@
 
 #from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_FMap.dev"
 PURPOSE = "Check the existence of BIOS and EC FMap and the required FMap areas"
 CRITERIA = "This test will fail if the required FMap areas not existed"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv1, suite:faft_bios_ec3po, suite:faft_bios_tot"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv1, suite:faft_bios_tot, suite:distributed_lab_qual_faft"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test checks the active BIOS and EC firmware contains the required
diff --git a/server/site_tests/firmware_FMap/firmware_FMap.py b/server/site_tests/firmware_FMap/firmware_FMap.py
index 7f14f82..2235bbe 100644
--- a/server/site_tests/firmware_FMap/firmware_FMap.py
+++ b/server/site_tests/firmware_FMap/firmware_FMap.py
@@ -82,7 +82,8 @@
 
     def initialize(self, host, cmdline_args, dev_mode=False):
         super(firmware_FMap, self).initialize(host, cmdline_args)
-        self.switcher.setup_mode('dev' if dev_mode else 'normal')
+        self.switcher.setup_mode('dev' if dev_mode else 'normal',
+                                 allow_gbb_force=True)
 
     def run_cmd(self, command):
         """
@@ -160,7 +161,7 @@
         # Parse map into dictionary.
         bios = {}
         for e in self._TARGET_AREA[TARGET_BIOS]:
-           bios[e['name']] = {'offset': e['offset'], 'size': e['size']}
+            bios[e['name']] = {'offset': e['offset'], 'size': e['size']}
         succeed = True
         # Check RW_SECTION_[AB] sections.
         if 'RW_SECTION_A' not in bios:
diff --git a/server/site_tests/firmware_FWMPDisableCCD/control b/server/site_tests/firmware_FWMPDisableCCD/control
index 7948ae2..1b88843 100644
--- a/server/site_tests/firmware_FWMPDisableCCD/control
+++ b/server/site_tests/firmware_FWMPDisableCCD/control
@@ -10,7 +10,9 @@
 TIME = "MEDIUM"
 ATTRIBUTES = "suite:faft_cr50_pvt, suite:faft_cr50_prepvt"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This test will set FWMP flags. If the test has access to the cr50 console, it
diff --git a/server/site_tests/firmware_FWMPDisableCCD/control.faft_cr50_tot b/server/site_tests/firmware_FWMPDisableCCD/control.faft_cr50_tot
index 1874ff0..01a733b 100644
--- a/server/site_tests/firmware_FWMPDisableCCD/control.faft_cr50_tot
+++ b/server/site_tests/firmware_FWMPDisableCCD/control.faft_cr50_tot
@@ -11,6 +11,7 @@
 ATTRIBUTES = "suite:faft_cr50_tot"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This test will set FWMP flags. If the test has access to the cr50 console, it
diff --git a/server/site_tests/firmware_FWMPDisableCCD/firmware_FWMPDisableCCD.py b/server/site_tests/firmware_FWMPDisableCCD/firmware_FWMPDisableCCD.py
index 103f279..851cf58 100644
--- a/server/site_tests/firmware_FWMPDisableCCD/firmware_FWMPDisableCCD.py
+++ b/server/site_tests/firmware_FWMPDisableCCD/firmware_FWMPDisableCCD.py
@@ -66,7 +66,7 @@
         """
         # Clear the password and relock the console
         self.cr50.send_command('ccd testlab open')
-        self.cr50.send_command('ccd reset')
+        self.cr50.ccd_reset()
         # Set this so when we run the open test, it won't clear the FWMP
         self.cr50.set_cap('OpenNoTPMWipe', 'Always')
 
@@ -104,7 +104,8 @@
         # run ccd commands with the password. ccd open and unlock should fail
         # when the FWMP has disabled ccd.
         self.try_set_ccd_level('open', fwmp_disabled_ccd)
-        self.try_set_ccd_level('unlock', fwmp_disabled_ccd)
+        if self.cr50.unlock_is_supported():
+            self.try_set_ccd_level('unlock', fwmp_disabled_ccd)
 
         # Clear the password.
         self.open_cr50_and_setup_ccd()
diff --git a/server/site_tests/firmware_FWtries/control b/server/site_tests/firmware_FWtries/control
deleted file mode 100644
index dba5484..0000000
--- a/server/site_tests/firmware_FWtries/control
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "firmware_FWtries"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_normal, suite:faft_lv1, suite:faft_bios_ec3po, suite:faft_bios_tot"
-DEPENDENCIES = "servo_state:WORKING"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "firmware"
-TEST_TYPE = "server"
-JOB_RETRIES = 4
-
-DOC = """
-Boot with firmware B until fwb_tries count down to 0
-
-After each reboot check the value of mainfw_act, mainfw_type, fwb_tries,
-tried_fwb
-"""
-
-from autotest_lib.client.common_lib import utils
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run_fwtries(machine):
-    # Setup the client machine.
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test('firmware_FWtries', host=host, cmdline_args=args,
-                 dev_mode=False, tag="normal")
-
-parallel_simple(run_fwtries, machines)
diff --git a/server/site_tests/firmware_FWtries/control.dev b/server/site_tests/firmware_FWtries/control.dev
deleted file mode 100644
index 865b4c3..0000000
--- a/server/site_tests/firmware_FWtries/control.dev
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "firmware_FWtries.dev"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv1, suite:faft_bios_ec3po, suite:faft_bios_tot"
-DEPENDENCIES = "servo_state:WORKING"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "firmware"
-TEST_TYPE = "server"
-JOB_RETRIES = 4
-
-DOC = """
-Boot with firmware B until fwb_tries count down to 0
-
-After each reboot check the value of mainfw_act, mainfw_type, fwb_tries,
-tried_fwb
-"""
-
-from autotest_lib.client.common_lib import utils
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run_fwtries(machine):
-    # Setup the client machine.
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test('firmware_FWtries', host=host, cmdline_args=args,
-                 dev_mode=True, tag="dev")
-
-parallel_simple(run_fwtries, machines)
diff --git a/server/site_tests/firmware_FWtries/firmware_FWtries.py b/server/site_tests/firmware_FWtries/firmware_FWtries.py
deleted file mode 100644
index 8d1a69f..0000000
--- a/server/site_tests/firmware_FWtries/firmware_FWtries.py
+++ /dev/null
@@ -1,66 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server.cros.faft.firmware_test import FirmwareTest
-
-
-class firmware_FWtries(FirmwareTest):
-    """
-    Boot with firmware B until fwb_tries/fw_try_count counts down to
-    0.  vboot1 only needs to set fwb_tries in order to boot into FWB,
-    but vboot2 needs to set two fields: fw_try_next and fw_try_count
-    in order to do so.
-
-    Setup Steps:
-    1. Make the device in normal/dev mode.
-
-    Test Steps:
-    2. Set # of tries to 2 (through try_fwb)
-      a.  For vboot1:
-        set fwb_tries=2
-        [fwb_tries can be > 0 and <= 15. Value will be auto reset to 15 If
-        the value is < 0 or > 15
-      b.  For vboot2:
-        set fw_try_next=B fw_try_count=2
-    3. Reboot 1
-    4. Reboot 2
-    5. Reboot 3
-
-    Verification Steps:
-    1. After reboot 1, fw_tries_checker checks that
-    mainfw_act = B
-    fwb_tries/fw_try_count = 1
-
-    2. After reboot 2, fw_tries_checker checks that
-    mainfw_act = B
-    fwb_tries/fw_try_count = 0
-
-    3. After reboot 3, fw_tries_checker
-    mainfw_act = A
-    fwb_tries/fw_try_count = 0
-    """
-
-    version = 1
-
-    def initialize(self, host, cmdline_args, dev_mode=False):
-        super(firmware_FWtries, self).initialize(host, cmdline_args)
-        self.switcher.setup_mode('dev' if dev_mode else 'normal')
-
-    def run_once(self, host):
-        """Runs a single iteration of the test."""
-        self.check_state((self.checkers.fw_tries_checker, ('A', True, 0)))
-
-        self.try_fwb(2);
-
-        self.check_state((self.checkers.fw_tries_checker, ('A', True, 2)))
-        self.switcher.mode_aware_reboot()
-
-        # ChromeOS: Blocks init file on bootup from setting try_count to 0
-        # Thus, each reboot is never successful, thus when try_count
-        # decrements to 0, will reboot into FW A due to failure
-        self.check_state((self.checkers.fw_tries_checker, ('B', True, 1)))
-        self.switcher.mode_aware_reboot()
-        self.check_state((self.checkers.fw_tries_checker, ('B', True, 0)))
-        self.switcher.mode_aware_reboot()
-        self.check_state((self.checkers.fw_tries_checker, ('A', True, 0)))
diff --git a/server/site_tests/firmware_FWupdate/control b/server/site_tests/firmware_FWupdate/control
index 15cc4e4..b651e63 100644
--- a/server/site_tests/firmware_FWupdate/control
+++ b/server/site_tests/firmware_FWupdate/control
@@ -2,18 +2,24 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_FWupdate"
 TIME = "MEDIUM"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+DEPENDENCIES = "servo_state:WORKING"
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 RO+RW firmware update using chromeos-firmwareupdate --mode=recovery
 
 Required arguments:
-    test_that ... --args "old_bios=/path/to/old.bin new_bios=/path/to/new.bin"
+    test_that ... --args "old_bios=/path/to/old.bin \
+                          new_bios=/path/to/new.bin \
+                          servo_host=my_servo_host \
+                          servo_port=my_servo_port"
 
 The *_bios arguments can also be split into separate _bios_ro and _bios_rw args.
 """
diff --git a/server/site_tests/firmware_FWupdate/control.downgrade_rw b/server/site_tests/firmware_FWupdate/control.downgrade_rw
index 7ad3fc4..0ea7a8f 100644
--- a/server/site_tests/firmware_FWupdate/control.downgrade_rw
+++ b/server/site_tests/firmware_FWupdate/control.downgrade_rw
@@ -2,19 +2,24 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_FWupdate.downgrade_rw"
 TIME = "MEDIUM"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 RO+RW firmware update using chromeos-firmwareupdate --mode=recovery
 This variant is RO=old, RW=old->new->old.
 
 Required arguments:
-    test_that ... --args "old_bios=/path/to/old.bin new_bios=/path/to/new.bin"
+    test_that ... --args "old_bios=/path/to/old.bin \
+                          new_bios=/path/to/new.bin \
+                          servo_host=my_servo_host \
+			  servo_port=my_servo_port"
 
 The *_bios arguments can also be split into separate _bios_ro and _bios_rw args.
 """
diff --git a/server/site_tests/firmware_FWupdate/control.new b/server/site_tests/firmware_FWupdate/control.new
index 0df8a8d..5f118ce 100644
--- a/server/site_tests/firmware_FWupdate/control.new
+++ b/server/site_tests/firmware_FWupdate/control.new
@@ -2,19 +2,23 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_FWupdate.new"
 TIME = "MEDIUM"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 RO+RW firmware update using chromeos-firmwareupdate --mode=recovery
 This variant is RO=new, RW=new.
 
 Required arguments:
-    test_that ... --args "new_bios=/path/to/new.bin"
+    test_that ... --args "new_bios=/path/to/new.bin \
+                          servo_host=my_servo_host \
+			  servo_port=my_servo_port"
 
 The *_bios arguments can also be split into separate _bios_ro and _bios_rw args.
 """
diff --git a/server/site_tests/firmware_FWupdate/control.old b/server/site_tests/firmware_FWupdate/control.old
index e7e7da4..120e1c3 100644
--- a/server/site_tests/firmware_FWupdate/control.old
+++ b/server/site_tests/firmware_FWupdate/control.old
@@ -2,19 +2,23 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_FWupdate.old"
 TIME = "MEDIUM"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 RO+RW firmware update using chromeos-firmwareupdate --mode=recovery
 This variant is RO=old, RW=old (useful for comparing without changing args).
 
 Required arguments:
-    test_that ... --args "new_bios=/path/to/new.bin"
+    test_that ... --args "new_bios=/path/to/new.bin \
+                          servo_host=my_servo_host \
+			  servo_port=my_servo_port"
 
 The *_bios arguments can also be split into separate _bios_ro and _bios_rw args.
 """
diff --git a/server/site_tests/firmware_FWupdate/control.upgrade_rw b/server/site_tests/firmware_FWupdate/control.upgrade_rw
index 0a5cfeb..3eb133a 100644
--- a/server/site_tests/firmware_FWupdate/control.upgrade_rw
+++ b/server/site_tests/firmware_FWupdate/control.upgrade_rw
@@ -2,19 +2,24 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_FWupdate.upgrade_rw"
 TIME = "MEDIUM"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 RO+RW firmware update using chromeos-firmwareupdate --mode=recovery
 This variant is RO=old, RW=new.
 
 Required arguments:
-    test_that ... --args "old_bios=/path/to/old.bin new_bios=/path/to/new.bin"
+    test_that ... --args "old_bios=/path/to/old.bin \
+                          new_bios=/path/to/new.bin \
+                          servo_host=my_servo_host \
+			  servo_port=my_servo_port"
 
 The *_bios arguments can also be split into separate _bios_ro and _bios_rw args.
 """
diff --git a/server/site_tests/firmware_FWupdate/firmware_FWupdate.py b/server/site_tests/firmware_FWupdate/firmware_FWupdate.py
index 3084c04..08df46e 100644
--- a/server/site_tests/firmware_FWupdate/firmware_FWupdate.py
+++ b/server/site_tests/firmware_FWupdate/firmware_FWupdate.py
@@ -4,7 +4,7 @@
 
 import logging
 import os
-import xmlrpclib
+import six
 
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib import utils
@@ -107,7 +107,7 @@
         try:
             if self.flashed and self._want_restore and self.is_firmware_saved():
                 self.restore_firmware()
-        except (EnvironmentError, xmlrpclib.Fault,
+        except (EnvironmentError, six.moves.xmlrpc_client.Fault,
                 error.AutoservError, error.TestBaseException):
             logging.error("Problem restoring firmware:", exc_info=True)
 
@@ -118,7 +118,7 @@
                         self._orig_sw_wp['start'],
                         self._orig_sw_wp['length'],
                         self._orig_sw_wp['enabled'])
-        except (EnvironmentError, xmlrpclib.Fault,
+        except (EnvironmentError, six.moves.xmlrpc_client.Fault,
                 error.AutoservError, error.TestBaseException):
             logging.error("Problem restoring software write-protect:",
                           exc_info=True)
@@ -268,7 +268,7 @@
                 self.MODE, append, options, ignore_status=True)
 
         if result.exit_status == 255:
-            logging.warn("DUT network dropped during update.")
+            logging.warning("DUT network dropped during update.")
         elif result.exit_status != 0:
             if (image_fwids == before_fwids and
                     'Good. It seems nothing was changed.' in result.stdout):
@@ -409,7 +409,7 @@
         if self.new_bios or (self.new_bios_ro and self.new_bios_rw):
             errors += self.test_new(raise_error=False)
         else:
-            logging.warn("No 'new_bios_ro' given, skipping: %s",
+            logging.warning("No 'new_bios_ro' given, skipping: %s",
                          self.test_new.__doc__)
         if errors:
             if len(errors) > 1:
diff --git a/server/site_tests/firmware_FWupdateThenSleep/control b/server/site_tests/firmware_FWupdateThenSleep/control
index d6953c0..83cbb51 100644
--- a/server/site_tests/firmware_FWupdateThenSleep/control
+++ b/server/site_tests/firmware_FWupdateThenSleep/control
@@ -2,13 +2,16 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_FWupdateThenSleep"
 ATTRIBUTES = "suite:faft_bios_ro_qual, suite:faft_bios_rw_qual"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "MEDIUM"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 RO+RW firmware update using chromeos-firmwareupdate --mode=recovery --wp=0, then
diff --git a/server/site_tests/firmware_FWupdateThenSleep/control.batteryonly b/server/site_tests/firmware_FWupdateThenSleep/control.batteryonly
index 91a774d..3e35419 100644
--- a/server/site_tests/firmware_FWupdateThenSleep/control.batteryonly
+++ b/server/site_tests/firmware_FWupdateThenSleep/control.batteryonly
@@ -2,13 +2,15 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_FWupdateThenSleep.batteryonly"
 ATTRIBUTES = "suite:faft_bios_ro_qual, suite:faft_bios_rw_qual"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "MEDIUM"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 Run RW firmware update using chromeos-firmwareupdate --mode=recovery --wp=1
diff --git a/server/site_tests/firmware_FWupdateThenSleep/control.wp b/server/site_tests/firmware_FWupdateThenSleep/control.wp
index 1c432f0..93524a7 100644
--- a/server/site_tests/firmware_FWupdateThenSleep/control.wp
+++ b/server/site_tests/firmware_FWupdateThenSleep/control.wp
@@ -2,13 +2,15 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_FWupdateThenSleep.wp"
 ATTRIBUTES = "suite:faft_bios_ro_qual, suite:faft_bios_rw_qual"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "MEDIUM"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 RW firmware update using chromeos-firmwareupdate --mode=recovery --wp=1, then
diff --git a/server/site_tests/firmware_FWupdateThenSleep/firmware_FWupdateThenSleep.py b/server/site_tests/firmware_FWupdateThenSleep/firmware_FWupdateThenSleep.py
index 176b28b..e0aef69 100644
--- a/server/site_tests/firmware_FWupdateThenSleep/firmware_FWupdateThenSleep.py
+++ b/server/site_tests/firmware_FWupdateThenSleep/firmware_FWupdateThenSleep.py
@@ -3,7 +3,7 @@
 # found in the LICENSE file.
 
 import logging
-import xmlrpclib
+import six
 
 from autotest_lib.client.common_lib import error
 from autotest_lib.server.cros.faft.firmware_test import FirmwareTest
@@ -74,7 +74,7 @@
         try:
             if self.flashed and self.is_firmware_saved():
                 self.restore_firmware()
-        except (EnvironmentError, xmlrpclib.Fault,
+        except (EnvironmentError, six.moves.xmlrpc_client.Fault,
                 error.AutoservError, error.TestBaseException):
             logging.error("Problem restoring firmware:", exc_info=True)
 
@@ -85,7 +85,7 @@
                         self._original_sw_wp['start'],
                         self._original_sw_wp['length'],
                         self._original_sw_wp['enabled'])
-        except (EnvironmentError, xmlrpclib.Fault,
+        except (EnvironmentError, six.moves.xmlrpc_client.Fault,
                 error.AutoservError, error.TestBaseException):
             logging.error("Problem restoring SW write-protect:", exc_info=True)
 
@@ -151,7 +151,7 @@
                 self.MODE, append, options, ignore_status=True)
 
         if result.exit_status == 255:
-            logging.warn("DUT network dropped during update.")
+            logging.warning("DUT network dropped during update.")
         elif result.exit_status != 0:
             if (image_fwids == before_fwids and
                     'Good. It seems nothing was changed.' in result.stdout):
diff --git a/server/site_tests/firmware_FWupdateWP/control b/server/site_tests/firmware_FWupdateWP/control
index 57114cc..5012e64 100644
--- a/server/site_tests/firmware_FWupdateWP/control
+++ b/server/site_tests/firmware_FWupdateWP/control
@@ -2,12 +2,15 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_FWupdateWP"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "MEDIUM"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 RO+RW firmware update using chromeos-firmwareupdate --mode=recovery
diff --git a/server/site_tests/firmware_FWupdateWP/firmware_FWupdateWP.py b/server/site_tests/firmware_FWupdateWP/firmware_FWupdateWP.py
index 50baa87..e29f10c 100644
--- a/server/site_tests/firmware_FWupdateWP/firmware_FWupdateWP.py
+++ b/server/site_tests/firmware_FWupdateWP/firmware_FWupdateWP.py
@@ -28,14 +28,14 @@
 
         stripped_bios = self.faft_client.bios.strip_modified_fwids()
         if stripped_bios:
-            logging.warn(
+            logging.warning(
                     "Fixed the previously modified BIOS FWID(s): %s",
                     stripped_bios)
 
         if self.faft_config.chrome_ec:
             stripped_ec = self.faft_client.ec.strip_modified_fwids()
             if stripped_ec:
-                logging.warn(
+                logging.warning(
                         "Fixed the previously modified EC FWID(s): %s",
                         stripped_ec)
 
diff --git a/server/site_tests/firmware_Fingerprint/control.add_entropy b/server/site_tests/firmware_Fingerprint/control.add_entropy
index f76442a..a2ee9d5 100644
--- a/server/site_tests/firmware_Fingerprint/control.add_entropy
+++ b/server/site_tests/firmware_Fingerprint/control.add_entropy
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_Fingerprint.AddEntropy"
 PURPOSE = """
 Verify that the add_entropy behavior works correctly.
@@ -19,6 +19,7 @@
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING, fingerprint"
 JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 Attempts to add entropy while running RW firmware and verifies that it fails.
diff --git a/server/site_tests/firmware_Fingerprint/control.obeys_rollback b/server/site_tests/firmware_Fingerprint/control.obeys_rollback
index d5dc4c4..b1eeeb9 100644
--- a/server/site_tests/firmware_Fingerprint/control.obeys_rollback
+++ b/server/site_tests/firmware_Fingerprint/control.obeys_rollback
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_Fingerprint.ObeysRollback"
 PURPOSE = """
 Verify that fingerprint firmware rollback functions correctly.
@@ -20,6 +20,7 @@
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING, fingerprint"
 JOB_RETRIES = 0
+PY_VERSION = 3
 
 # This test uses futility and dev keys from autotest/files/server/cros/faft.
 REQUIRE_SSP = True
diff --git a/server/site_tests/firmware_Fingerprint/control.rdp0 b/server/site_tests/firmware_Fingerprint/control.rdp0
index 8ace462..a311453 100644
--- a/server/site_tests/firmware_Fingerprint/control.rdp0
+++ b/server/site_tests/firmware_Fingerprint/control.rdp0
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_Fingerprint.RDP0"
 PURPOSE = """
 Verify that we can read flash when RDP (readout protection) is set to level 0.
@@ -20,6 +20,7 @@
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING, fingerprint"
 JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 The test setup ensures that neither hardware or software write protect is
diff --git a/server/site_tests/firmware_Fingerprint/control.rdp1 b/server/site_tests/firmware_Fingerprint/control.rdp1
index 1527127..4fbeb88 100644
--- a/server/site_tests/firmware_Fingerprint/control.rdp1
+++ b/server/site_tests/firmware_Fingerprint/control.rdp1
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_Fingerprint.RDP1"
 PURPOSE = """
 Verify that when RDP (readout protection) is set to level 1 and then changed to
@@ -21,6 +21,7 @@
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING, fingerprint"
 JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 The test setup enables RDP level 1 by enabling hardware write protect and then
diff --git a/server/site_tests/firmware_Fingerprint/control.read_flash b/server/site_tests/firmware_Fingerprint/control.read_flash
index 703333a..090c336 100644
--- a/server/site_tests/firmware_Fingerprint/control.read_flash
+++ b/server/site_tests/firmware_Fingerprint/control.read_flash
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_Fingerprint.ReadFlash"
 PURPOSE = """
 Verify that fingerprint flash cannot be read.
@@ -19,6 +19,7 @@
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING, fingerprint"
 JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 Attempts to read flash from RW firmware and validates that it fails. Then
diff --git a/server/site_tests/firmware_Fingerprint/control.reboot_to_ro b/server/site_tests/firmware_Fingerprint/control.reboot_to_ro
index b1cb4a4..1507ca6 100644
--- a/server/site_tests/firmware_Fingerprint/control.reboot_to_ro
+++ b/server/site_tests/firmware_Fingerprint/control.reboot_to_ro
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_Fingerprint.RebootToRO"
 PURPOSE = """
 Validates that booting into RO fingerprint firmware succeeds.
@@ -19,6 +19,7 @@
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING, fingerprint"
 JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 Attempts to reboot into RO firmware and validates that it succeeds. Then
diff --git a/server/site_tests/firmware_Fingerprint/control.ro_can_update_rw b/server/site_tests/firmware_Fingerprint/control.ro_can_update_rw
index d99e3d8..079db18 100644
--- a/server/site_tests/firmware_Fingerprint/control.ro_can_update_rw
+++ b/server/site_tests/firmware_Fingerprint/control.ro_can_update_rw
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_Fingerprint.ROCanUpdateRW"
 PURPOSE = """
 Validates that the RO fingerprint firmware can update the RW firmware.
@@ -19,6 +19,7 @@
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING, fingerprint"
 JOB_RETRIES = 0
+PY_VERSION = 3
 
 # This test uses futility and dev keys from autotest/files/server/cros/faft.
 REQUIRE_SSP = True
diff --git a/server/site_tests/firmware_Fingerprint/control.ro_only_boots_valid_rw b/server/site_tests/firmware_Fingerprint/control.ro_only_boots_valid_rw
index 3dba66a..df4de13 100644
--- a/server/site_tests/firmware_Fingerprint/control.ro_only_boots_valid_rw
+++ b/server/site_tests/firmware_Fingerprint/control.ro_only_boots_valid_rw
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_Fingerprint.ROOnlyBootsValidRW"
 PURPOSE = """
 Verify the RO fingerprint firmware only boots valid RW firmware.
@@ -19,6 +19,7 @@
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING, fingerprint"
 JOB_RETRIES = 0
+PY_VERSION = 3
 
 # This test uses futility and dev keys from autotest/files/server/cros/faft.
 REQUIRE_SSP = True
diff --git a/server/site_tests/firmware_Fingerprint/control.rw_no_update_ro b/server/site_tests/firmware_Fingerprint/control.rw_no_update_ro
index 23335f2..46ba34c 100644
--- a/server/site_tests/firmware_Fingerprint/control.rw_no_update_ro
+++ b/server/site_tests/firmware_Fingerprint/control.rw_no_update_ro
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_Fingerprint.RWNoUpdateRO"
 PURPOSE = """
 Verify HW write protect prevents RO fingerprint firmware modification.
@@ -19,6 +19,7 @@
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING, fingerprint"
 JOB_RETRIES = 0
+PY_VERSION = 3
 
 # This test uses futility and dev keys from autotest/files/server/cros/faft.
 REQUIRE_SSP = True
diff --git a/server/site_tests/firmware_Fingerprint/control.sw_write_protect b/server/site_tests/firmware_Fingerprint/control.sw_write_protect
index 6356d2f..45e40fc 100644
--- a/server/site_tests/firmware_Fingerprint/control.sw_write_protect
+++ b/server/site_tests/firmware_Fingerprint/control.sw_write_protect
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_Fingerprint.SoftwareWriteProtect"
 PURPOSE = """
 Verify that software write protect cannot be disabled when hardware write
@@ -20,6 +20,7 @@
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING, fingerprint"
 JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 Reboots to RO, attempts to disable software write protect while hardware write
diff --git a/server/site_tests/firmware_Fingerprint/control.system_is_locked b/server/site_tests/firmware_Fingerprint/control.system_is_locked
index 4f19eb8..0dc6db4 100644
--- a/server/site_tests/firmware_Fingerprint/control.system_is_locked
+++ b/server/site_tests/firmware_Fingerprint/control.system_is_locked
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_Fingerprint.SystemIsLocked"
 PURPOSE = """
 Verify that system_is_locked() is true in the firmware (i.e.,
@@ -20,6 +20,7 @@
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING, fingerprint"
 JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test enables hardware and software write protection when setting up the
diff --git a/server/site_tests/firmware_Fingerprint/firmware_Fingerprint.py b/server/site_tests/firmware_Fingerprint/firmware_Fingerprint.py
index 15f506d..89e5fd6 100644
--- a/server/site_tests/firmware_Fingerprint/firmware_Fingerprint.py
+++ b/server/site_tests/firmware_Fingerprint/firmware_Fingerprint.py
@@ -34,6 +34,8 @@
             enable_software_write_protect, force_firmware_flashing,
             init_entropy)
 
+        # Check if FPMCU firmware needs to be re-flashed during cleanup
+        self._need_fw_restore = True
         self._test_exe = test_exe
 
         # Convert the arguments (test image names) to the actual filenames of
@@ -44,11 +46,10 @@
                 image_args.append(getattr(self, arg))
         self._test_exe_args = image_args
 
-        if self.get_host_board() == 'zork':
+        if self.is_uart_device():
             # TODO(b/170770251): Move the rdp1 and rdp0 tests to separate files
             #
-            # Zork's RDP1 and RDP0 tests requires an AP reboot, so do it in
-            # this class
+            # On devices with UART, RDP1 and RDP0 tests requires an AP reboot.
             if self._test_exe == 'rdp1.sh':
                 self.test_rdp1()
             elif self._test_exe == 'rdp0.sh':
@@ -66,12 +67,12 @@
         if self.get_fp_board() == 'bloonchipper':
             _HW_WP_OFF_AND_SW_WP_ON = (
                     'Flash protect flags: 0x00000407 ro_at_boot ro_now rollback_now all_now\n'
-                    'Valid flags:         0x0000003f wp_gpio_asserted ro_at_boot ro_now all_now STUCK INCONSISTENT\n'
+                    'Valid flags:         0x0000083f wp_gpio_asserted ro_at_boot ro_now all_now STUCK INCONSISTENT UNKNOWN_ERROR\n'
                     'Writable flags:      0x00000000\n')
         else:
             _HW_WP_OFF_AND_SW_WP_ON = (
                     'Flash protect flags: 0x00000003 ro_at_boot ro_now\n'
-                    'Valid flags:         0x0000003f wp_gpio_asserted ro_at_boot ro_now all_now STUCK INCONSISTENT\n'
+                    'Valid flags:         0x0000083f wp_gpio_asserted ro_at_boot ro_now all_now STUCK INCONSISTENT UNKNOWN_ERROR\n'
                     'Writable flags:      0x00000000\n')
 
         logging.info('Running test to validate RDP level 1')
@@ -103,7 +104,7 @@
         """
         _HW_AND_SW_WP_OFF = (
                 'Flash protect flags: 0x00000000\n'
-                'Valid flags:         0x0000003f wp_gpio_asserted ro_at_boot ro_now all_now STUCK INCONSISTENT\n'
+                'Valid flags:         0x0000083f wp_gpio_asserted ro_at_boot ro_now all_now STUCK INCONSISTENT UNKNOWN_ERROR\n'
                 'Writable flags:      0x00000001 ro_at_boot\n')
 
         logging.info('Running test to validate RDP level 0')
@@ -147,7 +148,8 @@
         # This should fail and the file should be empty
         file_read_from_flash = os.path.join(self._dut_working_dir,
                                             'test_keep_rdp.bin')
-        cmd = 'flash_fp_mcu --read --noremove_flash_read_protect %s' % file_read_from_flash
+        cmd = 'flash_fp_mcu --noservices --read' + \
+            ' --noremove_flash_read_protect %s' % file_read_from_flash
         result = self.run_cmd(cmd)
         if result.exit_status == 0:
             raise error.TestFail('Should not be able to read from flash')
@@ -156,8 +158,8 @@
         if self.get_file_size(file_read_from_flash) != 0:
             raise error.TestFail('File read from flash is not empty')
 
-        # On zork, an AP reboot is needed after using flash_fp_mcu.
-        if self.get_host_board() == 'zork':
+        # On devices with UART, an AP reboot is needed after using flash_fp_mcu.
+        if self.is_uart_device():
             self.host.reboot()
 
         self.check_firmware_is_functional()
@@ -185,7 +187,7 @@
 
         file_read_from_flash = os.path.join(self._dut_working_dir,
                                             'test_change_rdp.bin')
-        cmd = 'flash_fp_mcu --read %s' % file_read_from_flash
+        cmd = 'flash_fp_mcu --noservices --read %s' % file_read_from_flash
         self.run_cmd(cmd)
 
         logging.info(
@@ -197,8 +199,8 @@
                     'Flash read output size doesn\'t match original fw size')
         self.check_file_contains_all_0xFF_bytes(file_read_from_flash)
 
-        # On zork, an AP reboot is needed after using flash_fp_mcu.
-        if self.get_host_board() == 'zork':
+        # On devices with UART, an AP reboot is needed after using flash_fp_mcu.
+        if self.is_uart_device():
             self.host.reboot()
 
         logging.info('Checking that firmware is non-functional')
@@ -224,7 +226,8 @@
 
         file_read_from_flash = os.path.join(self._dut_working_dir,
                                             'test_keep_rdp.bin')
-        cmd = 'flash_fp_mcu --read --noremove_flash_read_protect %s' % file_read_from_flash
+        cmd = 'flash_fp_mcu --noservices --read' + \
+            ' --noremove_flash_read_protect %s' % file_read_from_flash
         result = self.run_cmd(cmd)
         if result.exit_status != 0:
             raise error.TestFail('Failed to read from flash')
@@ -235,8 +238,8 @@
             raise error.TestFail(
                     'File read from flash does not match original fw file')
 
-        # On zork, an AP reboot is needed after using flash_fp_mcu.
-        if self.get_host_board() == 'zork':
+        # On devices with UART, an AP reboot is needed after using flash_fp_mcu.
+        if self.is_uart_device():
             self.host.reboot()
 
         self.check_firmware_is_functional()
@@ -258,7 +261,7 @@
 
         file_read_from_flash = os.path.join(self._dut_working_dir,
                                             'test_change_rdp.bin')
-        cmd = 'flash_fp_mcu --read %s' % file_read_from_flash
+        cmd = 'flash_fp_mcu --noservices --read %s' % file_read_from_flash
         result = self.run_cmd(cmd)
         if result.exit_status != 0:
             raise error.TestFail('Failed to read from flash')
@@ -269,8 +272,8 @@
             raise error.TestFail(
                     'File read from flash does not match original fw file')
 
-        # On zork, an AP reboot is needed after using flash_fp_mcu.
-        if self.get_host_board() == 'zork':
+        # On devices with UART, an AP reboot is needed after using flash_fp_mcu.
+        if self.is_uart_device():
             self.host.reboot()
 
         self.check_firmware_is_functional()
diff --git a/server/site_tests/firmware_Fingerprint/tests/common.sh b/server/site_tests/firmware_Fingerprint/tests/common.sh
index 3461669..fc3b057 100644
--- a/server/site_tests/firmware_Fingerprint/tests/common.sh
+++ b/server/site_tests/firmware_Fingerprint/tests/common.sh
@@ -14,33 +14,29 @@
 if [[ "${_BOARD}" == "bloonchipper" ]]; then
   readonly _FLASHPROTECT_OUTPUT_HW_AND_SW_WRITE_PROTECT_ENABLED="$(cat <<SETVAR
 Flash protect flags: 0x0000040f wp_gpio_asserted ro_at_boot ro_now rollback_now all_now
-Valid flags:         0x0000003f wp_gpio_asserted ro_at_boot ro_now all_now STUCK INCONSISTENT
+Valid flags:         0x0000083f wp_gpio_asserted ro_at_boot ro_now all_now STUCK INCONSISTENT UNKNOWN_ERROR
 Writable flags:      0x00000000
 SETVAR
   )"
 else
   readonly _FLASHPROTECT_OUTPUT_HW_AND_SW_WRITE_PROTECT_ENABLED="$(cat <<SETVAR
 Flash protect flags: 0x0000000b wp_gpio_asserted ro_at_boot ro_now
-Valid flags:         0x0000003f wp_gpio_asserted ro_at_boot ro_now all_now STUCK INCONSISTENT
+Valid flags:         0x0000083f wp_gpio_asserted ro_at_boot ro_now all_now STUCK INCONSISTENT UNKNOWN_ERROR
 Writable flags:      0x00000004 all_now
 SETVAR
   )"
 fi
 
-if [[ "${_BOARD}" == "bloonchipper" ]]; then
-  readonly _FLASHPROTECT_OUTPUT_HW_AND_SW_WRITE_PROTECT_ENABLED_RO="$(cat <<SETVAR
+readonly _FLASHPROTECT_OUTPUT_HW_AND_SW_WRITE_PROTECT_ENABLED_RO="$(cat <<SETVAR
 Flash protect flags: 0x0000000b wp_gpio_asserted ro_at_boot ro_now
 Valid flags:         0x0000003f wp_gpio_asserted ro_at_boot ro_now all_now STUCK INCONSISTENT
 Writable flags:      0x00000004 all_now
 SETVAR
-  )"
-else
-  readonly _FLASHPROTECT_OUTPUT_HW_AND_SW_WRITE_PROTECT_ENABLED_RO="${_FLASHPROTECT_OUTPUT_HW_AND_SW_WRITE_PROTECT_ENABLED}"
-fi
+)"
 
 readonly _FLASHPROTECT_OUTPUT_HW_AND_SW_WRITE_PROTECT_DISABLED="$(cat <<SETVAR
 Flash protect flags: 0x00000000
-Valid flags:         0x0000003f wp_gpio_asserted ro_at_boot ro_now all_now STUCK INCONSISTENT
+Valid flags:         0x0000083f wp_gpio_asserted ro_at_boot ro_now all_now STUCK INCONSISTENT UNKNOWN_ERROR
 Writable flags:      0x00000001 ro_at_boot
 SETVAR
 )"
@@ -49,14 +45,14 @@
 if [[ "${_BOARD}" == "bloonchipper" ]]; then
   readonly _FLASHPROTECT_OUTPUT_HW_WRITE_PROTECT_DISABLED_AND_SW_WRITE_PROTECT_ENABLED="$(cat <<SETVAR
 Flash protect flags: 0x00000407 ro_at_boot ro_now rollback_now all_now
-Valid flags:         0x0000003f wp_gpio_asserted ro_at_boot ro_now all_now STUCK INCONSISTENT
+Valid flags:         0x0000083f wp_gpio_asserted ro_at_boot ro_now all_now STUCK INCONSISTENT UNKNOWN_ERROR
 Writable flags:      0x00000000
 SETVAR
 )"
 else
   readonly _FLASHPROTECT_OUTPUT_HW_WRITE_PROTECT_DISABLED_AND_SW_WRITE_PROTECT_ENABLED="$(cat <<SETVAR
 Flash protect flags: 0x00000003 ro_at_boot ro_now
-Valid flags:         0x0000003f wp_gpio_asserted ro_at_boot ro_now all_now STUCK INCONSISTENT
+Valid flags:         0x0000083f wp_gpio_asserted ro_at_boot ro_now all_now STUCK INCONSISTENT UNKNOWN_ERROR
 Writable flags:      0x00000000
 SETVAR
   )"
@@ -80,13 +76,21 @@
 SETVAR
 )"
 
+readonly _FP_FRAME_RAW_ACCESS_DENIED_ERROR2="$(cat <<SETVAR
+ioctl -1, errno 13 (Permission denied), EC result 255 (<unknown>)
+ioctl -1, errno 13 (Permission denied), EC result 255 (<unknown>)
+ioctl -1, errno 13 (Permission denied), EC result 255 (<unknown>)
+Failed to get FP sensor frame
+SETVAR
+)"
+
 readonly _FW_NAMES="rb0 rb1 rb9 dev"
 readonly _FW_TYPES="ro rw"
 
 flash_rw_firmware() {
   local fw_file="${1}"
   check_file_exists "${fw_file}"
-  flashrom --fast-verify -V -p ec:type=fp -i EC_RW -w "${fw_file}"
+  flashrom --noverify-all -V -p ec:type=fp -i EC_RW -w "${fw_file}"
 }
 
 get_ectool_output_val() {
@@ -140,7 +144,8 @@
   fi
 
   local stderr_output="$(cat "${stderr_output_file}")"
-  if [[ "${stderr_output}" != "${_FP_FRAME_RAW_ACCESS_DENIED_ERROR}" ]]; then
+  if [[ "${stderr_output}" != "${_FP_FRAME_RAW_ACCESS_DENIED_ERROR}" && \
+    "${stderr_output}" != "${_FP_FRAME_RAW_ACCESS_DENIED_ERROR2}" ]]; then
     echo "raw fpframe command returned unexpected value"
     echo "stderr_output: ${stderr_output}"
     exit 1
@@ -154,12 +159,13 @@
 
 read_from_flash_in_bootloader_mode_without_modifying_RDP_level() {
   local output_file="${1}"
-  flash_fp_mcu --read --noremove_flash_read_protect "${output_file}"
+  flash_fp_mcu --noservices --read --noremove_flash_read_protect \
+    "${output_file}"
 }
 
 read_from_flash_in_bootloader_mode_while_setting_RDP_to_level_0() {
   local output_file="${1}"
-  flash_fp_mcu --read "${output_file}"
+  flash_fp_mcu --noservices --read "${output_file}"
 }
 
 
diff --git a/server/site_tests/firmware_Fingerprint/tests/flash_fp_rw.sh b/server/site_tests/firmware_Fingerprint/tests/flash_fp_rw.sh
index 7dbca58..dc40efd 100755
--- a/server/site_tests/firmware_Fingerprint/tests/flash_fp_rw.sh
+++ b/server/site_tests/firmware_Fingerprint/tests/flash_fp_rw.sh
@@ -11,4 +11,4 @@
   exit 1
 fi
 
-flashrom --fast-verify -V -p ec:type=fp -i EC_RW -w "${FW_FILE}"
+flashrom --noverify-all -V -p ec:type=fp -i EC_RW -w "${FW_FILE}"
diff --git a/server/site_tests/firmware_Fingerprint/tests/rw_no_update_ro.sh b/server/site_tests/firmware_Fingerprint/tests/rw_no_update_ro.sh
index 0eb0d36..483b68a 100755
--- a/server/site_tests/firmware_Fingerprint/tests/rw_no_update_ro.sh
+++ b/server/site_tests/firmware_Fingerprint/tests/rw_no_update_ro.sh
@@ -25,7 +25,7 @@
 check_is_rollback_set_to_initial_val
 
 echo "Flashing RO firmware (expected to fail)"
-flash_ro_cmd="flashrom --fast-verify -V -p ec:type=fp -i EC_RO -w ${fw_file}"
+flash_ro_cmd="flashrom --noverify-all -V -p ec:type=fp -i EC_RO -w ${fw_file}"
 if ${flash_ro_cmd}; then
   echo "Expected flashing of read-only firmware to fail"
   exit 1
diff --git a/server/site_tests/firmware_FingerprintCrosConfig/control.cros_config b/server/site_tests/firmware_FingerprintCrosConfig/control.cros_config
new file mode 100644
index 0000000..7485b98
--- /dev/null
+++ b/server/site_tests/firmware_FingerprintCrosConfig/control.cros_config
@@ -0,0 +1,35 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "ChromeOS Team"
+NAME = "firmware_Fingerprint.CrosConfig"
+PURPOSE = """
+Debug cros_config failures. See http://b/160271883.
+"""
+CRITERIA = """
+Fails if cros_config call fails."
+"""
+ATTRIBUTES = "suite:fingerprint"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "firmware"
+TEST_TYPE = "server"
+DEPENDENCIES = "servo_state:WORKING, fingerprint"
+JOB_RETRIES = 0
+PY_VERSION = 3
+
+DOC = """
+Attempts to call cros_config.
+"""
+
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    job.run_test("firmware_FingerprintCrosConfig", host=host)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/firmware_FingerprintCrosConfig/firmware_FingerprintCrosConfig.py b/server/site_tests/firmware_FingerprintCrosConfig/firmware_FingerprintCrosConfig.py
new file mode 100644
index 0000000..de4bc26
--- /dev/null
+++ b/server/site_tests/firmware_FingerprintCrosConfig/firmware_FingerprintCrosConfig.py
@@ -0,0 +1,38 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.server import test
+
+
+class firmware_FingerprintCrosConfig(test.test):
+    """Test ChromeOS config behavior for http://b/160271883."""
+    version = 1
+
+    def initialize(self, host):
+        self.host = host
+
+    def run_cmd(self, command, timeout=300):
+        """Runs command on the DUT; return result with output and exit code."""
+        logging.debug('DUT Execute: %s', command)
+        result = self.host.run(command, timeout=timeout, ignore_status=True)
+        logging.info('exit_code: %d', result.exit_status)
+        logging.info('stdout:\n%s', result.stdout)
+        logging.info('stderr:\n%s', result.stderr)
+        return result
+
+    def _run_cros_config_cmd_cat(self, command):
+        """Runs cat /run/chromeos-config/v1 on DUT; return result."""
+        cmd = "cat /run/chromeos-config/v1/{}".format(command)
+        return self.run_cmd(cmd)
+
+    def run_once(self):
+        """Run the test."""
+        result = self._run_cros_config_cmd_cat('fingerprint/board')
+        if result.exit_status != 0:
+            raise error.TestFail(
+                'Unable to get fingerprint board with cros_config')
+        logging.info('fingerprint board: %s\n', result.stdout.rstrip())
diff --git a/server/site_tests/firmware_FingerprintSigner/control b/server/site_tests/firmware_FingerprintSigner/control
index c013a26..8f3516e 100644
--- a/server/site_tests/firmware_FingerprintSigner/control
+++ b/server/site_tests/firmware_FingerprintSigner/control
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_FingerprintSigner"
 PURPOSE = """
 Verify that the signer ID is correct
@@ -17,8 +17,9 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-DEPENDENCIES = "fingerprint"
+DEPENDENCIES = "fingerprint, servo_state:WORKING"
 JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This variant of firmware_FingerprintSigner requires the on-disk firmware image
diff --git a/server/site_tests/firmware_FingerprintSigner/control.fsi b/server/site_tests/firmware_FingerprintSigner/control.fsi
index fd65e52..0f5ab7c 100644
--- a/server/site_tests/firmware_FingerprintSigner/control.fsi
+++ b/server/site_tests/firmware_FingerprintSigner/control.fsi
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_FingerprintSigner.fsi"
 PURPOSE = """
 Verify that the signer ID is correct
@@ -16,8 +16,9 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-DEPENDENCIES = "fingerprint"
+DEPENDENCIES = "fingerprint, servo_state:WORKING"
 JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test makes sure the firmware stored in the OS image is signed with MP keys,
diff --git a/server/site_tests/firmware_FlashServoKeyboardMap/control b/server/site_tests/firmware_FlashServoKeyboardMap/control
index e2c5275..c70c6ee 100644
--- a/server/site_tests/firmware_FlashServoKeyboardMap/control
+++ b/server/site_tests/firmware_FlashServoKeyboardMap/control
@@ -4,14 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_FlashServoKeyboardMap"
 PURPOSE = "Flash the servo v3 keyboard map"
 CRITERIA = "This test will fail if the servo keyboard map can't be updated"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 For using the keyboard emulator atmega chip on the servo v3, the keyboard
diff --git a/server/site_tests/firmware_FwScreenCloseLid/control b/server/site_tests/firmware_FwScreenCloseLid/control
index e83f297..780d1a7 100644
--- a/server/site_tests/firmware_FwScreenCloseLid/control
+++ b/server/site_tests/firmware_FwScreenCloseLid/control
@@ -4,20 +4,21 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_FwScreenCloseLid"
 PURPOSE = "Servo based lid close triggered shutdown during firmware screens."
 CRITERIA = "This test will fail if DUT doesn't shutdown"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv4, suite:faft_bios_ec3po, suite:faft_bios_tot"
-DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv4, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "MEDIUM"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
-This test requires a USB disk plugged-in, which contains a Chrome OS test
+This test requires a USB disk plugged-in, which contains a ChromeOS test
 image (built by "build_image --test"). On runtime, this test triggers
 firmware screens (developer, remove, insert, yuck, to_norm screens),
 and then closes the lid in order to power the machine down.
diff --git a/server/site_tests/firmware_FwScreenCloseLid/firmware_FwScreenCloseLid.py b/server/site_tests/firmware_FwScreenCloseLid/firmware_FwScreenCloseLid.py
index 1f3724b..e05608d 100644
--- a/server/site_tests/firmware_FwScreenCloseLid/firmware_FwScreenCloseLid.py
+++ b/server/site_tests/firmware_FwScreenCloseLid/firmware_FwScreenCloseLid.py
@@ -13,7 +13,7 @@
     """
     Servo based lid close triggered shutdown test during firmware screens.
 
-    This test requires a USB disk plugged-in, which contains a Chrome OS test
+    This test requires a USB disk plugged-in, which contains a ChromeOS test
     image (built by "build_image --test"). On runtime, this test triggers
     firmware screens (developer, remove, insert, yuck, to_norm screens),
     and then closes the lid in order to power the machine down.
diff --git a/server/site_tests/firmware_FwScreenPressPower/control b/server/site_tests/firmware_FwScreenPressPower/control
index 017f56f..8f64f4d 100644
--- a/server/site_tests/firmware_FwScreenPressPower/control
+++ b/server/site_tests/firmware_FwScreenPressPower/control
@@ -4,20 +4,21 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_FwScreenPressPower"
 PURPOSE = "Servo based power button triggered shutdown during firmware screens."
 CRITERIA = "This test will fail if DUT doesn't shutdown"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv4, suite:faft_bios_ec3po, suite:faft_bios_tot"
-DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv4, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "MEDIUM"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
-This test requires a USB disk plugged-in, which contains a Chrome OS test
+This test requires a USB disk plugged-in, which contains a ChromeOS test
 image (built by "build_image --test"). On runtime, this test triggers
 firmware screens (developer, remove, insert, yuck, and to_norm screens),
 and then presses the power button in order to power the machine down.
diff --git a/server/site_tests/firmware_FwScreenPressPower/firmware_FwScreenPressPower.py b/server/site_tests/firmware_FwScreenPressPower/firmware_FwScreenPressPower.py
index 48b8107..362b7da 100644
--- a/server/site_tests/firmware_FwScreenPressPower/firmware_FwScreenPressPower.py
+++ b/server/site_tests/firmware_FwScreenPressPower/firmware_FwScreenPressPower.py
@@ -13,7 +13,7 @@
     """
     Servo based power button triggered shutdown test during firmware screens.
 
-    This test requires a USB disk plugged-in, which contains a Chrome OS test
+    This test requires a USB disk plugged-in, which contains a ChromeOS test
     image (built by "build_image --test"). On runtime, this test triggers
     firmware screens (developer, remove, insert, yuck, and to_norm screens),
     and then presses the power button in order to power the machine down.
@@ -29,7 +29,17 @@
         # While the firmware screen, the power button probing loop sleeps
         # 0.25 second on every scan. Use the normal delay (1.2 second) for
         # power press.
-        self.servo.power_normal_press()
+
+        if self.faft_config.is_detachable and self.faft_config.mode_switcher_type == 'menu_switcher':
+            # Since power button has been overridden as a select button in the
+            # fw screens for detachables, we can just skip this part of the test
+            # and shut down the DUT using the power state controller instead.
+            logging.info("Setting Power Off")
+            self.servo.get_power_state_controller().power_off()
+        else:
+            # Otherwise use the power button
+            logging.info("Pressing Power Button")
+            self.servo.power_normal_press()
 
     def wait_longer_fw_screen_and_press_power(self):
         """Wait for firmware screen without timeout and press power button."""
@@ -95,26 +105,24 @@
         self.switcher.wait_for_client()
 
         if self.faft_config.power_button_dev_switch:
-                logging.info(
-                        "Skipping TO_NORM screen test. The power button is "
-                        "used to confirm DEV mode to NORM mode.")
+            logging.info("Skipping TO_NORM screen test. The power button is "
+                         "used to confirm DEV mode to NORM mode.")
         else:
-                logging.info(
-                        "Reboot. When the developer screen shown, press "
-                        "enter key to trigger either TO_NORM screen (new) or "
-                        "RECOVERY INSERT screen (old). Then press power button "
-                        "to make DUT shutdown.")
-                self.check_state((self.checkers.crossystem_checker, {
-                        'devsw_boot': '1',
-                        'mainfw_type': 'developer',
-                }))
-                self.switcher.simple_reboot()
-                self.run_shutdown_process(
-                        self.wait_second_screen_and_press_power,
-                        post_power_action=self.switcher.bypass_dev_mode,
-                        shutdown_timeout=self.SHORT_SHUTDOWN_CONFIRMATION_PERIOD
-                )
-                self.switcher.wait_for_client()
+            logging.info(
+                    "Reboot. When the developer screen shown, press "
+                    "enter key to trigger either TO_NORM screen (new) or "
+                    "RECOVERY INSERT screen (old). Then press power button "
+                    "to make DUT shutdown.")
+            self.check_state((self.checkers.crossystem_checker, {
+                    'devsw_boot': '1',
+                    'mainfw_type': 'developer',
+            }))
+            self.switcher.simple_reboot()
+            self.run_shutdown_process(
+                    self.wait_second_screen_and_press_power,
+                    post_power_action=self.switcher.bypass_dev_mode,
+                    shutdown_timeout=self.SHORT_SHUTDOWN_CONFIRMATION_PERIOD)
+            self.switcher.wait_for_client()
 
         logging.info("Request recovery boot. When the RECOVERY INSERT "
                      "screen shows, press power button to make DUT shutdown.")
@@ -122,8 +130,7 @@
                 'devsw_boot': '1',
                 'mainfw_type': 'developer',
         }))
-        self.faft_client.system.request_recovery_boot()
-        self.switcher.simple_reboot('cold')
+        self.servo.set_nocheck('power_state', 'rec')
         self.run_shutdown_process(
                 self.wait_longer_fw_screen_and_press_power,
                 post_power_action=self.switcher.bypass_dev_mode,
@@ -138,8 +145,7 @@
                 'devsw_boot': '1',
                 'mainfw_type': 'developer',
         }))
-        self.faft_client.system.request_recovery_boot()
-        self.switcher.simple_reboot('cold')
+        self.servo.set_nocheck('power_state', 'rec')
         self.run_shutdown_process(
                 self.wait_yuck_screen_and_press_power,
                 post_power_action=self.switcher.bypass_dev_mode,
@@ -160,8 +166,7 @@
                 'devsw_boot': '0',
                 'mainfw_type': 'normal',
         }))
-        self.faft_client.system.request_recovery_boot()
-        self.switcher.simple_reboot('cold')
+        self.servo.set_nocheck('power_state', 'rec')
         self.run_shutdown_process(
                 self.wait_longer_fw_screen_and_press_power,
                 shutdown_timeout=self.SHORT_SHUTDOWN_CONFIRMATION_PERIOD)
diff --git a/server/site_tests/firmware_GSCAPROV1Trigger/control b/server/site_tests/firmware_GSCAPROV1Trigger/control
new file mode 100644
index 0000000..b38b606
--- /dev/null
+++ b/server/site_tests/firmware_GSCAPROV1Trigger/control
@@ -0,0 +1,33 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "mruthven"
+NAME = "firmware_GSCAPROV1Trigger"
+PURPOSE = "Test triggering AP RO verification."
+ATTRIBUTES = "suite:faft_cr50_experimental"
+TIME = "SHORT"
+TEST_TYPE = "server"
+DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
+
+DOC = """
+Verify triggering AP RO verification on GSC.
+"""
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.server import utils
+
+if 'args_dict' not in locals():
+    args_dict = {}
+
+args_dict.update(utils.args_to_dict(args))
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+
+    job.run_test("firmware_GSCAPROV1Trigger", host=host, cmdline_args=args,
+                 full_args=args_dict)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/firmware_GSCAPROV1Trigger/firmware_GSCAPROV1Trigger.py b/server/site_tests/firmware_GSCAPROV1Trigger/firmware_GSCAPROV1Trigger.py
new file mode 100644
index 0000000..c7dc408
--- /dev/null
+++ b/server/site_tests/firmware_GSCAPROV1Trigger/firmware_GSCAPROV1Trigger.py
@@ -0,0 +1,183 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import re
+import time
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib.cros import cr50_utils
+from autotest_lib.server.cros.faft.cr50_test import Cr50Test
+
+
+class firmware_GSCAPROV1Trigger(Cr50Test):
+    """Verify GSC response after triggering AP RO V1 verification."""
+    version = 1
+
+    # This only verifies V1 output right now.
+    TEST_AP_RO_VER = 1
+
+    # DBG image has to be able to set the AP RO hash with the board id set.
+    MIN_DBG_VER = '1.6.100'
+
+    VERIFICATION_PASSED = 1
+    VERIFICATION_FAILED = 2
+
+    DIGEST_RE = r' digest ([0-9a-f]{64})'
+    CALCULATED_DIGEST_RE = 'Calculated' + DIGEST_RE
+    STORED_DIGEST_RE = 'Stored' + DIGEST_RE
+
+    def initialize(self, host, cmdline_args, full_args={}):
+        """Initialize servo"""
+        self.ran_test = False
+        super(firmware_GSCAPROV1Trigger,
+              self).initialize(host,
+                               cmdline_args,
+                               full_args,
+                               restore_cr50_image=True)
+        if not self.cr50.ap_ro_version_is_supported(self.TEST_AP_RO_VER):
+            raise error.TestNAError('GSC does not support AP RO v%s' %
+                                    self.TEST_AP_RO_VER)
+
+        dbg_ver = cr50_utils.InstallImage(self.host,
+                                          self.get_saved_dbg_image_path(),
+                                          '/tmp/cr50.bin')[1][1]
+        if cr50_utils.GetNewestVersion(dbg_ver,
+                                       self.MIN_DBG_VER) == self.MIN_DBG_VER:
+            raise error.TestNAError('Update DBG image to 6.100 or newer.')
+
+    def update_to_dbg_and_clear_hash(self):
+        """Clear the Hash."""
+        # Make sure the AP is up before trying to update.
+        self.recover_dut()
+        self._retry_cr50_update(self._dbg_image_path, 3, False)
+        self.cr50.send_command('ap_ro_info erase')
+        time.sleep(3)
+        ap_ro_info = self.cr50.get_ap_ro_info()
+        logging.info(ap_ro_info)
+        if ap_ro_info['hash']:
+            raise error.TestError('Could not erase hash')
+
+    def after_run_once(self):
+        """Reboot cr50 to recover the dut."""
+        try:
+            self.recover_dut()
+        finally:
+            super(firmware_GSCAPROV1Trigger, self).after_run_once()
+
+    def set_hash(self):
+        """Set the Hash."""
+        self.recover_dut()
+        result = self.host.run('ap_ro_hash.py -v True GBB')
+        logging.info(result)
+        time.sleep(3)
+        ap_ro_info = self.cr50.get_ap_ro_info()
+        logging.info(ap_ro_info)
+        if not ap_ro_info['hash']:
+            raise error.TestError('Could not set hash %r' % result)
+
+    def rollback_to_release_image(self):
+        """Update to the release image."""
+        self._retry_cr50_update(self.get_saved_cr50_original_path(),
+                                3,
+                                rollback=True)
+        logging.info(self.cr50.get_ap_ro_info())
+
+    def cleanup(self):
+        """Clear the AP RO hash."""
+        try:
+            if not self.ran_test:
+                return
+            logging.info('Cleanup')
+            self.recover_dut()
+            self.update_to_dbg_and_clear_hash()
+            self.rollback_to_release_image()
+        finally:
+            super(firmware_GSCAPROV1Trigger, self).cleanup()
+
+    def recover_dut(self):
+        """Reboot gsc to recover the dut."""
+        logging.info('Recover DUT')
+        ap_ro_info = self.cr50.get_ap_ro_info()
+        logging.info(ap_ro_info)
+        if ap_ro_info['result'] != self.VERIFICATION_FAILED:
+            self._try_to_bring_dut_up()
+            return
+        time.sleep(3)
+        self.cr50.send_command('ccd testlab open')
+        time.sleep(3)
+        self.cr50.reboot()
+        time.sleep(self.faft_config.delay_reboot_to_ping)
+        logging.info(self.cr50.get_ap_ro_info())
+        self._try_to_bring_dut_up()
+        self.cr50.send_command('ccd testlab open')
+
+    def trigger_verification(self):
+        """Trigger verification."""
+        try:
+            self.recover_dut()
+            result = self.host.run('gsctool -aB start',
+                                   ignore_timeout=True,
+                                   ignore_status=True,
+                                   timeout=20)
+            logging.info(result)
+        finally:
+            time.sleep(5)
+            ap_ro_info = self.cr50.get_ap_ro_info()
+            logging.info(ap_ro_info)
+            self.hash_results.append(ap_ro_info['result'])
+            self.servo.record_uart_capture()
+
+    def run_once(self):
+        """Save hash and trigger verification"""
+        self.ran_test = True
+        self.hash_results = []
+        # The DBG image can set the hash when the board id is saved. The release
+        # image can't. Set the hash with the DBG image, so the test doesn't need
+        # to erase the board id. This test verifies triggering AP RO
+        # verification. It's not about saving the hash.
+        self.update_to_dbg_and_clear_hash()
+        self.set_hash()
+        self.rollback_to_release_image()
+        # CCD has to be open to trigger verification.
+        self.fast_ccd_open(True)
+
+        # Trigger verification multiple times. Make sure it doesn't fail or
+        # change.
+        self.trigger_verification()
+        self.trigger_verification()
+        self.trigger_verification()
+        self.trigger_verification()
+
+        self.servo.record_uart_capture()
+        cr50_uart_file = self.servo.get_uart_logfile('cr50')
+        if not cr50_uart_file:
+            logging.info('No cr50 uart file')
+            return
+        with open(cr50_uart_file, 'r') as f:
+            contents = f.read()
+
+        self.recover_dut()
+
+        # GSC only prints calculated and stored hashes after AP RO verificaiton
+        # fails. These sets will be empty if verification passed every time.
+        calculated = set(re.findall(self.CALCULATED_DIGEST_RE, contents))
+        stored = set(re.findall(self.STORED_DIGEST_RE, contents))
+        logging.info('Stored: %r', stored)
+        logging.info('Calculated: %r', calculated)
+        logging.info('Results: %r', self.hash_results)
+
+        if self.VERIFICATION_FAILED in self.hash_results:
+            raise error.TestFail(
+                    'Verification failed -- stored: %r calculated: %r' %
+                    (stored, calculated))
+        if len(calculated) > 1:
+            raise error.TestFail('Multiple calculated digests %r' % calculated)
+        # This shouldn't happen. Raise TestNA, so it's easy to see.
+        if self.VERIFICATION_PASSED not in self.hash_results:
+            raise error.TestNAError(
+                    'Verification Not Run -- stored: %r calculated: %r' %
+                    (stored, calculated))
+
+        # TODO(b/218705748): change the hash and verify verification fails.
diff --git a/server/site_tests/firmware_GSCSetAPROV1/control b/server/site_tests/firmware_GSCSetAPROV1/control
new file mode 100644
index 0000000..d17342a
--- /dev/null
+++ b/server/site_tests/firmware_GSCSetAPROV1/control
@@ -0,0 +1,34 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "mruthven"
+NAME = "firmware_GSCSetAPROV1"
+PURPOSE = "Verify GSC can set the AP RO hash"
+ATTRIBUTES = "suite:faft_cr50_experimental"
+TIME = "SHORT"
+TEST_TYPE = "server"
+DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
+
+DOC = """
+This test verifies GSC can set and clear the AP RO hash when the board id type
+is erased.
+"""
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.server import utils
+
+if 'args_dict' not in locals():
+    args_dict = {}
+
+args_dict.update(utils.args_to_dict(args))
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+
+    job.run_test("firmware_GSCSetAPROV1", host=host, cmdline_args=args,
+                 full_args=args_dict)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/firmware_GSCSetAPROV1/firmware_GSCSetAPROV1.py b/server/site_tests/firmware_GSCSetAPROV1/firmware_GSCSetAPROV1.py
new file mode 100644
index 0000000..71e27cb
--- /dev/null
+++ b/server/site_tests/firmware_GSCSetAPROV1/firmware_GSCSetAPROV1.py
@@ -0,0 +1,99 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import time
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib.cros import cr50_utils
+from autotest_lib.server.cros.faft.cr50_test import Cr50Test
+
+
+class firmware_GSCSetAPROV1(Cr50Test):
+    """
+    Verify a dut can set the AP RO hash when board id type is erased.
+    """
+    version = 1
+
+    TEST_AP_RO_VER = 1
+
+    # gsctool errors.
+    ERR_UNPROGRAMMED = 'AP RO hash unprogrammed'
+    ERR_BID_PROGRAMMED = 'BID already programmed'
+
+    # ap_ro_hash.py errors.
+    AP_RO_ERR_ALREADY_PROGRAMMED = 'Already programmed'
+    AP_RO_ERR_BID_PROGRAMMED = 'BID programmed'
+
+    def initialize(self, host, cmdline_args, full_args={}):
+        """Initialize servo"""
+        super(firmware_GSCSetAPROV1,
+              self).initialize(host,
+                               cmdline_args,
+                               full_args,
+                               restore_cr50_image=True,
+                               restore_cr50_board_id=True)
+
+        if not self.cr50.ap_ro_version_is_supported(self.TEST_AP_RO_VER):
+            raise error.TestNAError('GSC does not support AP RO v%s' %
+                                    self.TEST_AP_RO_VER)
+
+    def get_hash(self):
+        """Get the hash."""
+        time.sleep(10)
+        result = cr50_utils.GSCTool(self.host, ['-a', '-A'])
+        saved_hash = result.stdout.split(':')[-1].strip()
+        logging.info('hash: %s', saved_hash)
+        return None if self.ERR_UNPROGRAMMED in saved_hash else saved_hash
+
+    def clear_hash(self, expect_error=False):
+        """Clear the Hash."""
+        result = cr50_utils.GSCTool(self.host, ['-a', '-H'],
+                                    ignore_status=expect_error)
+        if expect_error and (result.exit_status != 3
+                             or self.ERR_BID_PROGRAMMED not in result.stderr):
+            raise error.TestFail('Unexpected error clearing hash %r',
+                                 result.stderr)
+        self.get_hash()
+
+    def set_hash(self, expected_error=None):
+        """Set the Hash."""
+        result = self.host.run('ap_ro_hash.py -v True GBB',
+                               ignore_status=not not expected_error)
+        if expected_error:
+            if expected_error not in result.stderr:
+                raise error.TestFail('Did not find %r in error' %
+                                     expected_error)
+        elif result.exit_status:
+            raise error.TestFail('Error saving hash')
+        return self.get_hash()
+
+    def run_once(self):
+        """Verify the AP RO hash can be updated when the BID type isn't set"""
+        brand = self.get_device_brand()
+        if not brand:
+            raise error.TestNAError('Cannot run without brand')
+
+        # Erase the board id if its set.
+        if not self.cr50.get_board_id()[1]:
+            logging.info('Erasing BID')
+            self.eraseflashinfo_and_restore_image()
+        bid = self.get_saved_cr50_original_version()[2]
+        flags = int(bid.split(':')[-1] if bid else '0', 16)
+
+        self.clear_hash()
+        self.set_hash()
+        self.set_hash(expected_error=self.AP_RO_ERR_ALREADY_PROGRAMMED)
+
+        cr50_utils.SetChipBoardId(self.host, '0xffffffff', flags)
+
+        self.clear_hash()
+        self.set_hash()
+        self.set_hash(expected_error=self.AP_RO_ERR_ALREADY_PROGRAMMED)
+        self.clear_hash()
+
+        cr50_utils.SetChipBoardId(self.host, brand, flags)
+
+        self.clear_hash(expect_error=True)
+        self.set_hash(expected_error=self.AP_RO_ERR_BID_PROGRAMMED)
diff --git a/server/site_tests/firmware_IntegratedU2F/control b/server/site_tests/firmware_IntegratedU2F/control
index 74de01e..aac7f8a 100644
--- a/server/site_tests/firmware_IntegratedU2F/control
+++ b/server/site_tests/firmware_IntegratedU2F/control
@@ -10,7 +10,9 @@
 ATTRIBUTES = "suite:faft_cr50_pvt, suite:faft_cr50_prepvt"
 TIME = "SHORT"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Verify second-factor authentication (U2F) using the on-board cr50 firmware
diff --git a/server/site_tests/firmware_IntegratedU2F/control.faft_cr50_tot b/server/site_tests/firmware_IntegratedU2F/control.faft_cr50_tot
index 5626999..6f84421 100644
--- a/server/site_tests/firmware_IntegratedU2F/control.faft_cr50_tot
+++ b/server/site_tests/firmware_IntegratedU2F/control.faft_cr50_tot
@@ -11,6 +11,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Verify second-factor authentication (U2F) using the on-board cr50 firmware
diff --git a/server/site_tests/firmware_IntegratedU2F/firmware_IntegratedU2F.py b/server/site_tests/firmware_IntegratedU2F/firmware_IntegratedU2F.py
index e0e67e5..3857555 100644
--- a/server/site_tests/firmware_IntegratedU2F/firmware_IntegratedU2F.py
+++ b/server/site_tests/firmware_IntegratedU2F/firmware_IntegratedU2F.py
@@ -6,7 +6,7 @@
 
 import logging
 import time
-import StringIO
+import six
 import subprocess
 
 from autotest_lib.client.common_lib import error, utils
@@ -161,7 +161,8 @@
             logging.info('pressed power button')
             time.sleep(self.SHORT_WAIT)
             # send enter to the test process
-            self.u2ftest_job.sp.stdin.write('\n')
+            self.u2ftest_job.sp.stdin.write(b'\n')
+            self.u2ftest_job.sp.stdin.flush()
             logging.info('hit enter')
             self.output = ''
         return self.u2ftest_job.sp.poll() is not None
@@ -170,10 +171,10 @@
     def get_u2ftest_output(self):
         """Read the new output"""
         self.u2ftest_job.process_output()
+        output = self.stdout.getvalue()
         self.stdout.seek(self.last_len)
-        output = self.stdout.read().strip()
-        self.last_len = self.stdout.len
-        return output
+        self.last_len = len(output)
+        return self.stdout.read().strip()
 
     def run_u2ftest(self):
         """Run U2FTest with the U2F device"""
@@ -184,7 +185,7 @@
                                                  self.dev_path))
         full_ssh_command = '%s "%s"' % (self.host.ssh_command(options='-tt'),
             u2ftest_cmd)
-        self.stdout = StringIO.StringIO()
+        self.stdout = six.StringIO()
         # Start running U2FTest in the background.
         self.u2ftest_job = utils.BgJob(full_ssh_command,
                                        nickname='u2ftest',
diff --git a/server/site_tests/firmware_InvalidUSB/control b/server/site_tests/firmware_InvalidUSB/control
index d5b1911..5d7fef6 100644
--- a/server/site_tests/firmware_InvalidUSB/control
+++ b/server/site_tests/firmware_InvalidUSB/control
@@ -4,20 +4,21 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_InvalidUSB"
 PURPOSE = "Servo based booting an invalid USB image test"
 CRITERIA = "This test will fail if the invalid USB boots successfully"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv4, suite:faft_normal, suite:faft_bios_ec3po, suite:faft_bios_tot"
-DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv4, suite:faft_normal, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
-This test requires a USB disk plugged-in, which contains a Chrome OS test
+This test requires a USB disk plugged-in, which contains a ChromeOS test
 image (built by "build_image --test"). On runtime, this test corrupts the
 USB image and tries to boot into it. A failure is expected. It then
 restores the USB image and boots into it again.
diff --git a/server/site_tests/firmware_InvalidUSB/firmware_InvalidUSB.py b/server/site_tests/firmware_InvalidUSB/firmware_InvalidUSB.py
index 55d9650..46a2543 100644
--- a/server/site_tests/firmware_InvalidUSB/firmware_InvalidUSB.py
+++ b/server/site_tests/firmware_InvalidUSB/firmware_InvalidUSB.py
@@ -14,7 +14,7 @@
     """
     Servo based booting an invalid USB image test.
 
-    This test requires a USB disk plugged-in, which contains a Chrome OS test
+    This test requires a USB disk plugged-in, which contains a ChromeOS test
     image (built by "build_image --test"). On runtime, this test corrupts the
     USB image and tries to boot into it. A failure is expected. It then
     restores the USB image and boots into it again.
diff --git a/server/site_tests/firmware_LegacyRecovery/control b/server/site_tests/firmware_LegacyRecovery/control
index 04e04d7..78c8336 100644
--- a/server/site_tests/firmware_LegacyRecovery/control
+++ b/server/site_tests/firmware_LegacyRecovery/control
@@ -4,20 +4,21 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_LegacyRecovery"
 PURPOSE = "Servo based test to Verify recovery request at Remove Screen."
 CRITERIA = "This test will fail if the recovery at Remove screen is success."
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_lv4, suite:faft_normal, suite:faft_bios_ec3po, suite:faft_bios_tot"
-DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_lv4, suite:faft_normal, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
-This test requires a USB disk plugged-in, which contains a Chrome OS test image
+This test requires a USB disk plugged-in, which contains a ChromeOS test image
 (built by "build_image --test"). It recovery boots to the USB image and sets
 recovery_request=1 and do a reboot. A failure is expected.
 """
diff --git a/server/site_tests/firmware_LegacyRecovery/firmware_LegacyRecovery.py b/server/site_tests/firmware_LegacyRecovery/firmware_LegacyRecovery.py
index e91eb08..1835fd5 100644
--- a/server/site_tests/firmware_LegacyRecovery/firmware_LegacyRecovery.py
+++ b/server/site_tests/firmware_LegacyRecovery/firmware_LegacyRecovery.py
@@ -14,7 +14,7 @@
     """
     Servo based test to Verify recovery request at Remove Screen.
 
-    This test requires a USB disk plugged-in, which contains a Chrome OS test
+    This test requires a USB disk plugged-in, which contains a ChromeOS test
     image (built by "build_image --test"). It recovery boots to the USB image
     and sets recovery_request=1 and do a reboot. A failure is expected.
     """
diff --git a/server/site_tests/firmware_MenuDevBootUSB/control b/server/site_tests/firmware_MenuDevBootUSB/control
new file mode 100644
index 0000000..4b280f2
--- /dev/null
+++ b/server/site_tests/firmware_MenuDevBootUSB/control
@@ -0,0 +1,32 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "ChromeOS Team"
+NAME = "firmware_MenuDevBootUSB"
+PURPOSE = "Select external boot menu item on developer screen"
+CRITERIA = "This test will fail the external boot menu item doesn't have any effect"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv2, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING"
+TIME = "MEDIUM"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "firmware"
+TEST_TYPE = "Server"
+JOB_RETRIES = 0
+PY_VERSION = 3
+
+DOC = """
+This test boots from USB disk by menu navigation in developer UI.
+"""
+
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    job.run_test("firmware_MenuDevBootUSB", host=host, cmdline_args=args,
+                 disable_sysinfo=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/firmware_MenuDevBootUSB/firmware_MenuDevBootUSB.py b/server/site_tests/firmware_MenuDevBootUSB/firmware_MenuDevBootUSB.py
new file mode 100644
index 0000000..f22e1b3
--- /dev/null
+++ b/server/site_tests/firmware_MenuDevBootUSB/firmware_MenuDevBootUSB.py
@@ -0,0 +1,99 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.server.cros.faft.firmware_test import FirmwareTest
+
+
+class firmware_MenuDevBootUSB(FirmwareTest):
+    """
+    Servo based test for USB boot in developer mode through the UI menu.
+    """
+    version = 1
+
+    def initialize(self, host, cmdline_args):
+        super(firmware_MenuDevBootUSB, self).initialize(host, cmdline_args)
+        if not self.menu_switcher:
+            raise error.TestNAError('Test skipped for menuless UI')
+        if not self.faft_config.chrome_ec:
+            raise error.TestNAError('Cannot check power state without EC')
+        self.switcher.setup_mode('dev')
+        self.setup_usbkey(usbkey=True, host=True, used_for_recovery=False)
+
+    def cleanup(self):
+        """Clean up the test."""
+        try:
+            self.faft_client.system.set_dev_boot_usb(0)
+            self.servo.switch_usbkey('host')
+        except Exception as e:
+            logging.error("Caught exception: %s", str(e))
+        super(firmware_MenuDevBootUSB, self).cleanup()
+
+    def _dev_reboot_and_unplug_usb(self):
+        """Reboot from internal disk and unplug USB disk."""
+        # Device must be in dev mode
+        logging.info('Reboot to dev mode and unplug USB')
+        self.switcher.mode_aware_reboot()
+        self.servo.switch_usbkey('host')
+
+    def run_once(self):
+        """Method which actually runs the test."""
+        self.check_state((self.checkers.mode_checker, 'dev'))
+        self.servo.switch_usbkey('dut')
+        self.faft_client.system.set_dev_boot_usb(1)
+        self.faft_client.system.set_dev_default_boot('disk')
+
+        # Now the device should be in dev screen
+        logging.info('Boot from USB in developer screen')
+        self.switcher.simple_reboot()
+        self.menu_switcher.dev_boot_from_external()
+        self.switcher.wait_for_client()
+        self.check_state((self.checkers.dev_boot_usb_checker, (True, True),
+                          'Device not booted from USB'))
+
+        # Reboot from internal disk in order to unplug USB
+        self._dev_reboot_and_unplug_usb()
+
+        # For menu UI, boot from USB in external boot screen, a polling screen
+        # that repeatedly checks for USB disks
+        if self.faft_config.mode_switcher_type != 'tablet_detachable_switcher':
+            logging.info('Boot from USB in external boot screen')
+            self.switcher.simple_reboot()
+            self.menu_switcher.dev_boot_from_external()
+            self.switcher.wait_for_client_offline()
+
+            # Since there is no USB plugged-in, now the device should be in
+            # external boot screen
+            self.servo.switch_usbkey('dut')
+            self.switcher.wait_for_client()
+            self.check_state((self.checkers.dev_boot_usb_checker, (True, True),
+                              'Device not booted from USB properly'))
+            self._dev_reboot_and_unplug_usb()
+        else:
+            logging.info('Skipped polling screen test for switcher type %s',
+                         self.faft_config.mode_switcher_type)
+
+        # After selecting "Boot from external disk" while no USB is plugged-in,
+        # the UI should still work
+        logging.info('Try to boot from USB without USB plugged-in')
+        self.switcher.simple_reboot()
+        self.menu_switcher.dev_boot_from_external()
+        self.wait_for('keypress_delay')
+        if self.faft_config.mode_switcher_type == 'tablet_detachable_switcher':
+            # In legacy menu UI, the device should be still in developer boot
+            # options screen
+            self.menu_switcher.menu.down()  # Boot From Internal Disk
+        else:
+            # In menu UI, the device should have changed to external boot screen
+            self.menu_switcher.menu.select('Going back to dev screen...')
+            self.wait_for('keypress_delay')
+            self.menu_switcher.menu.up()  # Boot from internal disk
+        self.wait_for('keypress_delay')
+        self.menu_switcher.menu.select(
+                'Selecting "Boot from internal disk"...')
+        self.switcher.wait_for_client()
+        self.check_state((self.checkers.dev_boot_usb_checker, False,
+                          'Device not booted from internal disk properly'))
diff --git a/server/site_tests/firmware_MenuModeTransition/control b/server/site_tests/firmware_MenuModeTransition/control
index 8255478..2a2d08c 100644
--- a/server/site_tests/firmware_MenuModeTransition/control
+++ b/server/site_tests/firmware_MenuModeTransition/control
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_MenuModeTransition"
-PURPOSE = "Servo based developer firmware boot test"
+PURPOSE = "Perform mode transitions through menu navigation"
 CRITERIA = "This test will fail if mode transition does not work"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv1, suite:faft_normal, suite:faft_bios_ec3po, suite:faft_bios_tot, suite:faft_smoke, suite:bvt-faft"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv2, suite:faft_bios_tot"
 DEPENDENCIES = "servo_state:WORKING"
-TIME = "SHORT"
+TIME = "MEDIUM"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test triggers to-dev transition by menu and boots from developer mode.
@@ -23,9 +24,9 @@
 args_dict = utils.args_to_dict(args)
 servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
 
-def run_devmode(machine):
+def run(machine):
     host = hosts.create_host(machine, servo_args=servo_args)
     job.run_test("firmware_MenuModeTransition", host=host, cmdline_args=args,
-                 disable_sysinfo=True, dev_mode=False, tag="normal")
+                 disable_sysinfo=True)
 
-parallel_simple(run_devmode, machines)
+parallel_simple(run, machines)
diff --git a/server/site_tests/firmware_MenuModeTransition/firmware_MenuModeTransition.py b/server/site_tests/firmware_MenuModeTransition/firmware_MenuModeTransition.py
index 77b3516..6261b28 100644
--- a/server/site_tests/firmware_MenuModeTransition/firmware_MenuModeTransition.py
+++ b/server/site_tests/firmware_MenuModeTransition/firmware_MenuModeTransition.py
@@ -8,216 +8,16 @@
 from autotest_lib.server.cros.faft.firmware_test import FirmwareTest
 
 
-class BaseMenuNavigator:
-    """Base class for menu navigator."""
-
-    def __init__(self, test):
-        self.test = test
-        self.faft_config = self.test.faft_config
-        self.servo = self.test.servo
-
-    def menu_up(self):
-        """Navigate up in the menu."""
-        if self.faft_config.is_detachable:
-            self.servo.set_nocheck('volume_up_hold', 100)
-        else:
-            self.servo.arrow_up()
-
-    def menu_down(self):
-        """Navigate down in the menu."""
-        if self.faft_config.is_detachable:
-            self.servo.set_nocheck('volume_down_hold', 100)
-        else:
-            self.servo.arrow_down()
-
-    def menu_select(self, msg=None):
-        """Select a menu item."""
-        if msg:
-            logging.info(msg)
-        if self.faft_config.is_detachable:
-            self.servo.power_short_press()
-        else:
-            self.servo.enter_key()
-
-
-class LegacyMenuNavigator(BaseMenuNavigator):
-    """Menu navigator for legacy menu UI.
-
-    The "legacy menu UI" is an old menu-based UI, which has been replaced
-    by the new one, called "menu UI".
-    """
-
-    def trigger_rec_to_dev(self):
-        """Trigger to-dev transition."""
-        self.test.switcher.trigger_rec_to_dev()
-
-    def dev_boot_from_internal(self):
-        """Boot from internal disk in developer mode.
-
-        Menu items in developer warning screen:
-            0. Developer Options
-            1. Show Debug Info
-            2. Enable OS Verification
-           *3. Power Off
-            4. Language
-
-        (*) is the default selection.
-        """
-        self.test.wait_for('firmware_screen')
-        for _ in range(3, 0, -1):
-            self.menu_up()
-            self.test.wait_for('confirm_screen')
-        self.menu_select('Selecting "Developer Options"...')
-        self.test.wait_for('confirm_screen')
-        self.menu_select('Selecting "Boot From Internal Disk"...')
-
-    def trigger_dev_to_normal(self):
-        """Trigger dev-to-norm transition.
-
-        Menu items in developer warning screen:
-            0. Developer Options
-            1. Show Debug Info
-            2. Enable OS Verification
-           *3. Power Off
-            4. Language
-
-        Menu items in to-norm confirmation screen:
-           *0. Confirm Enabling OS Verification
-            1. Cancel
-            2. Power Off
-            3. Language
-
-        (*) is the default selection.
-        """
-        self.test.wait_for('firmware_screen')
-        for _ in range(3, 2, -1):
-            self.menu_up()
-            self.test.wait_for('confirm_screen')
-        self.menu_select('Selecting "Enable OS Verification"...')
-        self.test.wait_for('confirm_screen')
-        self.menu_select('Selecing "Confirm Enabling OS Verification"...')
-
-
-class MenuNavigator(BaseMenuNavigator):
-    """Menu navigator for menu UI.
-
-    The "menu UI" aims to replace both "legacy clamshell UI" and "legacy
-    menu UI". See chromium:1033815 for the discussion about the naming.
-    """
-
-    def _confirm_to_dev(self):
-        if self.faft_config.rec_button_dev_switch:
-            logging.info('Confirm to-dev by RECOVERY button')
-            self.servo.toggle_recovery_switch()
-        elif self.faft_config.power_button_dev_switch:
-            logging.info('Confirm to-dev by POWER button')
-            self.servo.power_normal_press()
-        else:
-            self.menu_select('Confirm to-dev by menu selection')
-
-    def trigger_rec_to_dev(self):
-        """Trigger to-dev transition.
-
-        Menu items in recovery select screen:
-            0. Language
-            1. Recovery using phone
-            2. Recovery using external disk
-            3. Launch diagnostics
-            4. Advanced options
-            5. Power off
-
-        Menu items in advanced options screen:
-            0. Language
-           *1. Enable developer mode
-            2. Back
-            3. Power off
-
-        Menu items in to-dev screen:
-            0. Language
-           *1. Confirm
-            2. Cancel
-            3. Power off
-
-        (*) is the default selection.
-        """
-        self.test.wait_for('firmware_screen')
-        # Since the default selection is unknown, navigate to item 5 first
-        for _ in range(0, 5):
-            self.menu_down()
-            self.test.wait_for('confirm_screen')
-        # Navigate to "Advanced options"
-        self.menu_up()
-        self.test.wait_for('confirm_screen')
-        self.menu_select('Selecting "Advanced options"...')
-        self.test.wait_for('confirm_screen')
-        self.menu_select('Selecting "Enable developer mode"...')
-        self.test.wait_for('confirm_screen')
-        # Confirm to-dev transition
-        self._confirm_to_dev()
-
-    def dev_boot_from_internal(self):
-        """Boot from internal disk in developer mode.
-
-        Menu items in developer mode screen:
-            0. Language
-            1. Return to secure mode
-            2. Boot from internal disk
-            3. Boot from external disk
-            4. Advanced options
-            5. Power off
-        """
-        self.test.wait_for('firmware_screen')
-        # Since the default selection is unknown, navigate to item 0 first
-        for _ in range(5, 0, -1):
-            self.menu_up()
-            self.test.wait_for('confirm_screen')
-        # Navigate to "Boot from internal disk"
-        for _ in range(0, 2):
-            self.menu_down()
-            self.test.wait_for('confirm_screen')
-        self.menu_select('Selecting "Boot from internal disk"...')
-
-    def trigger_dev_to_normal(self):
-        """Trigger dev-to-norm transition.
-
-        Menu items in developer mode screen:
-            0. Language
-            1. Return to secure mode
-            2. Boot from internal disk
-            3. Boot from external disk
-            4. Advanced options
-            5. Power off
-
-        Menu items in to-norm screen:
-            0. Language
-           *1. Confirm
-            2. Cancel
-            3. Power off
-
-        (*) is the default selection.
-        """
-        self.test.wait_for('firmware_screen')
-        # Since the default selection is unknown, navigate to item 0 first
-        for _ in range(5, 0, -1):
-            self.menu_up()
-            self.test.wait_for('confirm_screen')
-        # Navigate to "Return to secure mode"
-        self.menu_down()
-        self.test.wait_for('confirm_screen')
-        self.menu_select('Selecting "Return to secure mode"...')
-        self.test.wait_for('confirm_screen')
-        self.menu_select('Selecing "Confirm"...')
-
-
 class firmware_MenuModeTransition(FirmwareTest):
     """
     Servo based test for manual mode transitions through the UI menu.
     """
     version = 1
 
-    def initialize(self, host, cmdline_args, ec_wp=None):
-        super(firmware_MenuModeTransition, self).initialize(
-                host, cmdline_args, ec_wp=ec_wp)
+    def initialize(self, host, cmdline_args):
+        super(firmware_MenuModeTransition, self).initialize(host, cmdline_args)
+        if not self.menu_switcher:
+            raise error.TestNAError('Test skipped for menuless UI')
         self.switcher.setup_mode('normal')
         self.setup_usbkey(usbkey=False)
 
@@ -225,22 +25,14 @@
         """Method which actually runs the test."""
         self.check_state((self.checkers.mode_checker, 'normal'))
 
-        if self.faft_config.mode_switcher_type == 'menu_switcher':
-            navigator = MenuNavigator(self)
-        elif (self.faft_config.mode_switcher_type ==
-              'tablet_detachable_switcher'):
-            navigator = LegacyMenuNavigator(self)
-        else:
-            raise error.TestNAError('Test skipped for menuless UI')
-
         # Trigger to-dev by menu navigation
         logging.info('Trigger to-dev by menu navigation.')
         self.switcher.enable_rec_mode_and_reboot(usb_state='host')
         self.switcher.wait_for_client_offline()
-        navigator.trigger_rec_to_dev()
+        self.menu_switcher.trigger_rec_to_dev()
 
         # Now the device should be in dev mode screen
-        navigator.dev_boot_from_internal()
+        self.menu_switcher.dev_boot_from_internal()
         self.switcher.wait_for_client()
 
         logging.info('Expected dev mode boot.')
@@ -248,9 +40,9 @@
 
         # Trigger to-norm by menu navigation
         logging.info('Trigger to-norm by menu navigation.')
-        self.switcher.disable_rec_mode_and_reboot()
+        self.switcher.simple_reboot()
         self.switcher.wait_for_client_offline()
-        navigator.trigger_dev_to_normal()
+        self.menu_switcher.trigger_dev_to_normal()
         self.switcher.wait_for_client()
 
         logging.info('Expected normal mode boot, done.')
diff --git a/server/site_tests/firmware_MenuPowerOff/control b/server/site_tests/firmware_MenuPowerOff/control
new file mode 100644
index 0000000..bae6623
--- /dev/null
+++ b/server/site_tests/firmware_MenuPowerOff/control
@@ -0,0 +1,32 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "ChromeOS Team"
+NAME = "firmware_MenuPowerOff"
+PURPOSE = "Select power off menu item on firmware screens"
+CRITERIA = "This test will fail the power off menu item doesn't have any effect"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv2, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING"
+TIME = "MEDIUM"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "firmware"
+TEST_TYPE = "Server"
+JOB_RETRIES = 0
+PY_VERSION = 3
+
+DOC = """
+This test selects "Power off" menu item in firmware UI.
+"""
+
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    job.run_test("firmware_MenuPowerOff", host=host, cmdline_args=args,
+                 disable_sysinfo=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/firmware_MenuPowerOff/firmware_MenuPowerOff.py b/server/site_tests/firmware_MenuPowerOff/firmware_MenuPowerOff.py
new file mode 100644
index 0000000..a1fda85
--- /dev/null
+++ b/server/site_tests/firmware_MenuPowerOff/firmware_MenuPowerOff.py
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.server.cros.faft.firmware_test import FirmwareTest
+
+
+class firmware_MenuPowerOff(FirmwareTest):
+    """
+    Servo based test for powering off the device through the UI menu.
+    """
+    version = 1
+
+    # Timeout of confirming DUT shutdown
+    POWER_OFF_TIMEOUT = 20
+
+    def initialize(self, host, cmdline_args):
+        super(firmware_MenuPowerOff, self).initialize(host, cmdline_args)
+        if not self.menu_switcher:
+            raise error.TestNAError('Test skipped for menuless UI')
+        if not self.faft_config.chrome_ec:
+            raise error.TestNAError('Cannot check power state without EC')
+        self.switcher.setup_mode('dev')
+        self.setup_usbkey(usbkey=False)
+
+    def run_once(self):
+        """Method which actually runs the test."""
+        self.check_state((self.checkers.mode_checker, 'dev'))
+        self.switcher.simple_reboot()
+
+        # Now the device should be in dev screen
+        logging.info('Power off device in developer screen')
+        self.run_shutdown_process(self.menu_switcher.power_off,
+                                  run_power_action=False,
+                                  shutdown_timeout=self.POWER_OFF_TIMEOUT)
+
+        # Reboot to rec screen
+        self.switcher.enable_rec_mode_and_reboot(usb_state='host')
+
+        # Now the device should be in rec screen
+        logging.info('Power off device in recovery screen')
+        self.run_shutdown_process(
+                self.menu_switcher.power_off,
+                post_power_action=self.switcher.bypass_dev_mode,
+                shutdown_timeout=self.POWER_OFF_TIMEOUT)
+        self.switcher.wait_for_client()
diff --git a/server/site_tests/firmware_MiniDiag/control b/server/site_tests/firmware_MiniDiag/control
new file mode 100644
index 0000000..6062af9
--- /dev/null
+++ b/server/site_tests/firmware_MiniDiag/control
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "ChromeOS Team"
+NAME = "firmware_MiniDiag"
+PURPOSE = "Servo based MiniDiag firmware boot test"
+CRITERIA = "This test will fail if MiniDiag does not work"
+ATTRIBUTES = ""
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv4, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING"
+TIME = "MEDIUM"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "firmware"
+TEST_TYPE = "server"
+JOB_RETRIES = 4
+PY_VERSION = 3
+
+DOC = """
+This test checks the MiniDiag (pre-boot diagnostics) firmware boot.
+This test only runs on devices which using menu UI and enable MiniDiag.
+"""
+
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    job.run_test("firmware_MiniDiag", host=host, cmdline_args=args,
+                 disable_sysinfo=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/firmware_MiniDiag/firmware_MiniDiag.py b/server/site_tests/firmware_MiniDiag/firmware_MiniDiag.py
new file mode 100644
index 0000000..540b01c
--- /dev/null
+++ b/server/site_tests/firmware_MiniDiag/firmware_MiniDiag.py
@@ -0,0 +1,48 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.server.cros.faft.firmware_test import FirmwareTest
+
+
+class firmware_MiniDiag(FirmwareTest):
+    """
+    Servo based MiniDiag firmware boot test.
+    """
+    version = 1
+
+    def initialize(self, host, cmdline_args):
+        super(firmware_MiniDiag, self).initialize(host, cmdline_args)
+
+        if not self.menu_switcher:
+            raise error.TestNAError('Test skipped for menuless UI')
+        if not self.faft_config.minidiag_enabled:
+            raise error.TestNAError('MiniDiag is not enabled for this board')
+        # Need apreset to leave MiniDiag
+        if not self.ec.has_command('apreset'):
+            raise error.TestNAError('EC command apreset is not supported')
+
+        self.switcher.setup_mode('normal')
+        self.setup_usbkey(usbkey=False)
+
+    def run_once(self):
+        """Method which actually runs the test."""
+        # Trigger MiniDiag by menu navigation
+        logging.info('Trigger MiniDiag by menu navigation')
+        self.switcher.enable_rec_mode_and_reboot(usb_state='host')
+        self.switcher.wait_for_client_offline()
+        self.menu_switcher.trigger_rec_to_minidiag()
+
+        # Navigator MiniDiag
+        logging.info('Navigate among MiniDiag screens')
+        self.menu_switcher.navigate_minidiag_storage()
+        self.menu_switcher.navigate_minidiag_quick_memory_check()
+
+        # Leave MiniDiag and reboot
+        logging.info('Leave MiniDiag and reboot')
+        self.menu_switcher.reset_and_leave_minidiag()
+        logging.info('Expect normal mode boot, done')
+        self.switcher.wait_for_client()
diff --git a/server/site_tests/firmware_MiniosMenu/control b/server/site_tests/firmware_MiniosMenu/control
new file mode 100644
index 0000000..81442b6
--- /dev/null
+++ b/server/site_tests/firmware_MiniosMenu/control
@@ -0,0 +1,33 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "ChromeOS Team"
+NAME = "firmware_MiniosMenu"
+PURPOSE = "Navigate to the MiniOS by menu navigation"
+CRITERIA = "This test will fail if the device fail to boot to MiniOS"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_lv4, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING"
+TIME = "MEDIUM"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "firmware"
+TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
+
+DOC = """
+This test requires the device to support MiniOS. This test will boot to the
+manual recovery screen and try to boot MiniOS through the UI menu.
+"""
+
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    job.run_test("firmware_MiniosMenu", host=host, cmdline_args=args,
+                 disable_sysinfo=True, older_version=False)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/firmware_MiniosMenu/control.old b/server/site_tests/firmware_MiniosMenu/control.old
new file mode 100644
index 0000000..931cff3
--- /dev/null
+++ b/server/site_tests/firmware_MiniosMenu/control.old
@@ -0,0 +1,33 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "ChromeOS Team"
+NAME = "firmware_MiniosMenu.old"
+PURPOSE = "Navigate to the MiniOS by menu navigation (older version)"
+CRITERIA = "This test will fail if the device fail to boot to MiniOS"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_lv4, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING"
+TIME = "MEDIUM"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "firmware"
+TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
+
+DOC = """
+This test requires the device to support MiniOS. This test will boot to the
+manual recovery screen and try to boot MiniOS through the UI menu.
+"""
+
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    job.run_test("firmware_MiniosMenu", host=host, cmdline_args=args,
+                 disable_sysinfo=True, older_version=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/firmware_MiniosMenu/firmware_MiniosMenu.py b/server/site_tests/firmware_MiniosMenu/firmware_MiniosMenu.py
new file mode 100644
index 0000000..b184435
--- /dev/null
+++ b/server/site_tests/firmware_MiniosMenu/firmware_MiniosMenu.py
@@ -0,0 +1,52 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.server.cros.faft.firmware_test import FirmwareTest
+
+
+class firmware_MiniosMenu(FirmwareTest):
+    """
+    Servo based for MiniOS boot through the UI menu.
+
+    This test requires the device to support MiniOS. This test will boot to the
+    manual recovery screen and try to boot MiniOS through the UI menu.
+    """
+    version = 1
+
+    def initialize(self, host, cmdline_args, older_version):
+        super(firmware_MiniosMenu, self).initialize(host, cmdline_args)
+
+        self.test_skipped = True
+        if not self.menu_switcher:
+            raise error.TestNAError('Test skipped for menuless UI')
+        if not self.faft_config.chrome_ec:
+            raise error.TestNAError('Cannot check power state without EC')
+        if not self.faft_config.minios_enabled:
+            raise error.TestNAError('MiniOS is not enabled for this board')
+        self.test_skipped = False
+
+        self.host = host
+        self.switcher.setup_mode('normal')
+        self.setup_usbkey(usbkey=False)
+        self.older_version = older_version
+
+    def cleanup(self):
+        if not self.test_skipped:
+            try:
+                self.switcher.leave_minios()
+            except Exception as e:
+                logging.error('Caught exception: %s', str(e))
+        super(firmware_MiniosMenu, self).cleanup()
+
+    def run_once(self):
+        """Run a single iteration of the test."""
+        logging.info('Boot into recovery mode, older_version: %s',
+                     self.older_version)
+        self.switcher.reboot_to_mode(to_mode="rec", wait_for_dut_up=False)
+        self.wait_for('firmware_screen')
+        self.menu_switcher.trigger_rec_to_minios(self.older_version)
+        self.check_state(self.checkers.minios_checker)
diff --git a/server/site_tests/firmware_MiniosPriority/control.minios_a b/server/site_tests/firmware_MiniosPriority/control.minios_a
new file mode 100644
index 0000000..fb91dd6
--- /dev/null
+++ b/server/site_tests/firmware_MiniosPriority/control.minios_a
@@ -0,0 +1,36 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "ChromeOS Team"
+NAME = "firmware_MiniosPriority.minios_a"
+PURPOSE = "Set the MiniOS priority and verify the device can boot to MiniOS in any priority setting"
+CRITERIA = "This test will fail if the device fail to boot to MiniOS or boot to the wrong partition"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_lv4, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING"
+TIME = "MEDIUM"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "firmware"
+TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
+
+DOC = """
+This test requires the device support MiniOS. At runtime, this test uses the
+crossystem tool to modify the MiniOS priority and try to boot MiniOS from
+firmware manual recovery screen. After booting, this test will verify if the
+device successfully boot to the MiniOS. This test does not cover verifying
+if the device successfully boots to the specified partition.
+"""
+
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    job.run_test("firmware_MiniosPriority", host=host, cmdline_args=args,
+                 disable_sysinfo=True, minios_priority='a')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/firmware_MiniosPriority/control.minios_b b/server/site_tests/firmware_MiniosPriority/control.minios_b
new file mode 100644
index 0000000..ca677bb
--- /dev/null
+++ b/server/site_tests/firmware_MiniosPriority/control.minios_b
@@ -0,0 +1,36 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "ChromeOS Team"
+NAME = "firmware_MiniosPriority.minios_b"
+PURPOSE = "Set the MiniOS priority and verify the device can boot to MiniOS in any priority setting"
+CRITERIA = "This test will fail if the device fail to boot to MiniOS or boot to the wrong partition"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_lv4, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING"
+TIME = "MEDIUM"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "firmware"
+TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
+
+DOC = """
+This test requires the device support MiniOS. At runtime, this test uses the
+crossystem tool to modify the MiniOS priority and try to boot MiniOS from
+firmware manual recovery screen. After booting, this test will verify if the
+device successfully boot to the MiniOS. This test does not cover verifying
+if the device successfully boots to the specified partition.
+"""
+
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    job.run_test("firmware_MiniosPriority", host=host, cmdline_args=args,
+                 disable_sysinfo=True, minios_priority='b')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/firmware_MiniosPriority/firmware_MiniosPriority.py b/server/site_tests/firmware_MiniosPriority/firmware_MiniosPriority.py
new file mode 100644
index 0000000..da51a94
--- /dev/null
+++ b/server/site_tests/firmware_MiniosPriority/firmware_MiniosPriority.py
@@ -0,0 +1,54 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.server.cros.faft.firmware_test import FirmwareTest
+
+
+class firmware_MiniosPriority(FirmwareTest):
+    """
+    Servo based MiniOS boot priority test.
+
+    This test requires the device support MiniOS. At runtime, this test uses the
+    crossystem tool to modify the MiniOS priority and try to boot MiniOS from
+    firmware manual recovery screen. After booting, this test will verify if the
+    device successfully boot to the MiniOS. This test does not cover verifying
+    if the device successfully boots to the specified partition.
+    """
+    version = 1
+
+    def initialize(self, host, cmdline_args, minios_priority):
+        super(firmware_MiniosPriority, self).initialize(host, cmdline_args)
+
+        self.test_skipped = True
+        if not self.menu_switcher:
+            raise error.TestNAError('Test skipped for menuless UI')
+        if not self.faft_config.chrome_ec:
+            raise error.TestNAError('Cannot check power state without EC')
+        if not self.faft_config.minios_enabled:
+            raise error.TestNAError('MiniOS is not enabled for this board')
+        self.test_skipped = False
+
+        self.host = host
+        self.switcher.setup_mode('normal')
+        self.setup_usbkey(usbkey=False)
+        self.minios_priority = minios_priority
+        self.restored_priority = self.faft_client.system.get_minios_priority()
+
+    def cleanup(self):
+        if not self.test_skipped:
+            try:
+                self.switcher.leave_minios()
+                self.faft_client.system.set_minios_priority(
+                        self.restored_priority)
+            except Exception as e:
+                logging.error('Caught exception: %s', str(e))
+        super(firmware_MiniosPriority, self).cleanup()
+
+    def run_once(self):
+        """Run a single iteration of the test."""
+        self.switcher.launch_minios(self.minios_priority)
+        self.check_state(self.checkers.minios_checker)
diff --git a/server/site_tests/firmware_Mosys/control b/server/site_tests/firmware_Mosys/control
index 3f53c2f..e01b69c 100644
--- a/server/site_tests/firmware_Mosys/control
+++ b/server/site_tests/firmware_Mosys/control
@@ -4,19 +4,20 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_Mosys"
 PURPOSE = "Execute firmware Mosys commands and check return value."
 CRITERIA = """
 This test will fail if mosys return unexpected output or incorrect value.
 """
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv1, suite:faft_normal, suite:faft_bios_ec3po, suite:faft_bios_tot, suite:bvt-faft, suite:labqual"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv1, suite:faft_normal, suite:faft_bios_tot, suite:labqual"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 Exercise Mosys command and verify the output.
diff --git a/server/site_tests/firmware_Mosys/firmware_Mosys.py b/server/site_tests/firmware_Mosys/firmware_Mosys.py
index 80877a6..5264144 100644
--- a/server/site_tests/firmware_Mosys/firmware_Mosys.py
+++ b/server/site_tests/firmware_Mosys/firmware_Mosys.py
@@ -27,7 +27,8 @@
         # Parse arguments from command line
         dict_args = utils.args_to_dict(cmdline_args)
         super(firmware_Mosys, self).initialize(host, cmdline_args)
-        self.switcher.setup_mode('dev' if dev_mode else 'normal')
+        self.switcher.setup_mode('dev' if dev_mode else 'normal',
+                                 allow_gbb_force=True)
         # a list contain failed execution.
         self.failed_command = []
         # Get a list of available mosys commands.
@@ -96,7 +97,7 @@
             logging.info('Expected ec version %s actual_version %s',
                          exp_ec_version, actual_version)
             if exp_ec_version != actual_version:
-               self._tag_failure(command)
+                self._tag_failure(command)
         else:
             self._tag_failure(command)
             logging.error('Failed to locate version from ectool')
@@ -126,7 +127,7 @@
             logging.info('Expected pd version %s actual_version %s',
                          exp_pd_version, actual_version)
             if exp_pd_version != actual_version:
-               self._tag_failure(command)
+                self._tag_failure(command)
         else:
             self._tag_failure(command)
             logging.error('Failed to locate version from ectool')
@@ -140,18 +141,18 @@
         # mosys -k ec info
         command = 'mosys -k ec info'
         if self.faft_config.chrome_ec:
-          output = self.run_cmd(command)
-          self.check_for_errors(output, command)
-          p = re.compile(
-            'vendor="[A-Z]?[a-z]+" name="[ -~]+" fw_version="(.*)"')
-          v = p.match(output[0])
-          if v:
-             version = v.group(1)
-             self.check_ec_version(command, version)
-          else:
-            self._tag_failure(command)
+            output = self.run_cmd(command)
+            self.check_for_errors(output, command)
+            p = re.compile(
+                    'vendor="[A-Z]?[a-z]+" name="[ -~]+" fw_version="(.*)"')
+            v = p.match(output[0])
+            if v:
+                version = v.group(1)
+                self.check_ec_version(command, version)
+            else:
+                self._tag_failure(command)
         else:
-          logging.info('Skip "%s", command not available.', command)
+            logging.info('Skip "%s", command not available.', command)
 
         # mosys platform name
         command = 'mosys platform name'
@@ -161,17 +162,17 @@
         # mosys -k pd info
         command = 'mosys -k pd info'
         if self.faft_config.chrome_usbpd and 'pd' in self.command_list:
-          output = self.run_cmd(command)
-          self.check_for_errors(output, command)
-          p = re.compile('vendor="[a-z]+" name="[ -~]+" fw_version="(.*)"')
-          v = p.match(output[0])
-          if v:
-             version = v.group(1)
-             self.check_pd_version(command, version)
-          else:
-             self._tag_failure(command)
+            output = self.run_cmd(command)
+            self.check_for_errors(output, command)
+            p = re.compile('vendor="[a-z]+" name="[ -~]+" fw_version="(.*)"')
+            v = p.match(output[0])
+            if v:
+                version = v.group(1)
+                self.check_pd_version(command, version)
+            else:
+                self._tag_failure(command)
         else:
-          logging.info('Skip "%s", command not available.', command)
+            logging.info('Skip "%s", command not available.', command)
 
         # mosys -k memory spd print all (check no error output)
         command = 'mosys -k memory spd print all'
@@ -188,7 +189,7 @@
         # Add any other mosys commands or tests before this section.
         # empty failed_command indicate all passed.
         if self.failed_command:
-          raise error.TestFail('%d commands failed, detail above.  '
-                               'Failed commands are "%s"' %
-                               (len(self.failed_command),
-                               ','.join(self.failed_command)))
+            raise error.TestFail(
+                    '%d commands failed, detail above.  '
+                    'Failed commands are "%s"' %
+                    (len(self.failed_command), ','.join(self.failed_command)))
diff --git a/server/site_tests/firmware_PDConnect/control b/server/site_tests/firmware_PDConnect/control
index 54e9ee5..12c2f26 100644
--- a/server/site_tests/firmware_PDConnect/control
+++ b/server/site_tests/firmware_PDConnect/control
@@ -4,15 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDConnect"
 PURPOSE = "Servo based PD Connect/Disconnect test"
 ATTRIBUTES = "suite:faft_pd"
 CRITERIA = "This test will fail if PD connection is not successful each attempt"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "MEDIUM"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDConnect/control.dts b/server/site_tests/firmware_PDConnect/control.dts
index 45bcfef..1279def 100644
--- a/server/site_tests/firmware_PDConnect/control.dts
+++ b/server/site_tests/firmware_PDConnect/control.dts
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDConnect.dts"
 PURPOSE = "Servo based PD Connect/Disconnect test"
 ATTRIBUTES = "suite:faft_pd"
 CRITERIA = "This test will fail if PD connection is not successful each attempt"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "MEDIUM"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDConnect/control.dts_flip b/server/site_tests/firmware_PDConnect/control.dts_flip
index 193131b..f5c5e8a 100644
--- a/server/site_tests/firmware_PDConnect/control.dts_flip
+++ b/server/site_tests/firmware_PDConnect/control.dts_flip
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDConnect.dts_flip"
 PURPOSE = "Servo based PD Connect/Disconnect test"
 ATTRIBUTES = "suite:faft_pd"
 CRITERIA = "This test will fail if PD connection is not successful each attempt"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "MEDIUM"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDConnect/control.flip b/server/site_tests/firmware_PDConnect/control.flip
index f21076a..8dbe61d 100644
--- a/server/site_tests/firmware_PDConnect/control.flip
+++ b/server/site_tests/firmware_PDConnect/control.flip
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDConnect.flip"
 PURPOSE = "Servo based PD Connect/Disconnect test"
 ATTRIBUTES = "suite:faft_pd"
 CRITERIA = "This test will fail if PD connection is not successful each attempt"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "MEDIUM"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDConnect/firmware_PDConnect.py b/server/site_tests/firmware_PDConnect/firmware_PDConnect.py
index 068a537..2cda0c7 100644
--- a/server/site_tests/firmware_PDConnect/firmware_PDConnect.py
+++ b/server/site_tests/firmware_PDConnect/firmware_PDConnect.py
@@ -35,13 +35,13 @@
                              'Unable to set it disconnected; skip this item.')
                 continue
 
-            for attempt in xrange(self.CONNECT_ITERATIONS):
+            for attempt in range(self.CONNECT_ITERATIONS):
                 logging.info('Disconnect/Connect iteration %d', attempt)
                 try:
                     if dev.drp_disconnect_connect(RECONNECT_DELAY) == False:
                         raise error.TestFail('Disconnect/Connect Failed')
                 except NotImplementedError:
-                    logging.warn('Device does not support disconnect/connect')
+                    logging.warning('Device does not support disconnect/connect')
                     break
 
     def initialize(self, host, cmdline_args, flip_cc=False, dts_mode=False):
@@ -84,7 +84,7 @@
                     swappable_dev = dev
                     break
             except NotImplementedError:
-                logging.warn('Power role swap not supported on the device')
+                logging.warning('Power role swap not supported on the device')
 
         if swappable_dev:
             try:
@@ -95,5 +95,5 @@
                 if not swappable_dev.pr_swap():
                     logging.error('Failed to swap power role to the original')
         else:
-            logging.warn('Device pair could not perform power role swap, '
+            logging.warning('Device pair could not perform power role swap, '
                          'ending test')
diff --git a/server/site_tests/firmware_PDDataSwap/control b/server/site_tests/firmware_PDDataSwap/control
index 83eda74..3422177 100644
--- a/server/site_tests/firmware_PDDataSwap/control
+++ b/server/site_tests/firmware_PDDataSwap/control
@@ -4,15 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDDataSwap"
 PURPOSE = "Servo based PD data role swap test"
 ATTRIBUTES = "suite:faft_pd"
 CRITERIA = "This test will fail if a data swap gives unexpected results"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDDataSwap/control.dts b/server/site_tests/firmware_PDDataSwap/control.dts
index 1fa6a18..3b6fb64 100644
--- a/server/site_tests/firmware_PDDataSwap/control.dts
+++ b/server/site_tests/firmware_PDDataSwap/control.dts
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDDataSwap.dts"
 PURPOSE = "Servo based PD data role swap test"
 ATTRIBUTES = "suite:faft_pd"
 CRITERIA = "This test will fail if a data swap gives unexpected results"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDDataSwap/control.dts_flip b/server/site_tests/firmware_PDDataSwap/control.dts_flip
index 39d99aa..1055f5e 100644
--- a/server/site_tests/firmware_PDDataSwap/control.dts_flip
+++ b/server/site_tests/firmware_PDDataSwap/control.dts_flip
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDDataSwap.dts_flip"
 PURPOSE = "Servo based PD data role swap test"
 ATTRIBUTES = "suite:faft_pd"
 CRITERIA = "This test will fail if a data swap gives unexpected results"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDDataSwap/control.flip b/server/site_tests/firmware_PDDataSwap/control.flip
index da00c13..56cd282 100644
--- a/server/site_tests/firmware_PDDataSwap/control.flip
+++ b/server/site_tests/firmware_PDDataSwap/control.flip
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDDataSwap.flip"
 PURPOSE = "Servo based PD data role swap test"
 ATTRIBUTES = "suite:faft_pd"
 CRITERIA = "This test will fail if a data swap gives unexpected results"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDDataSwap/control.shutdown b/server/site_tests/firmware_PDDataSwap/control.shutdown
index be7a8e1..42282b5 100644
--- a/server/site_tests/firmware_PDDataSwap/control.shutdown
+++ b/server/site_tests/firmware_PDDataSwap/control.shutdown
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDDataSwap.shutdown"
 PURPOSE = "Servo based PD data role swap test"
 ATTRIBUTES = "suite:faft_pd"
 CRITERIA = "This test will fail if a data swap gives unexpected results"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDDataSwap/firmware_PDDataSwap.py b/server/site_tests/firmware_PDDataSwap/firmware_PDDataSwap.py
index 763f5e1..68c57ee 100644
--- a/server/site_tests/firmware_PDDataSwap/firmware_PDDataSwap.py
+++ b/server/site_tests/firmware_PDDataSwap/firmware_PDDataSwap.py
@@ -164,7 +164,7 @@
 
         @param pd_port: port number of DUT PD connection
         """
-        for attempt in xrange(self.DATA_SWAP_ITERATIONS):
+        for attempt in range(self.DATA_SWAP_ITERATIONS):
             # Use the same direction for every 2 loop iterations
             if attempt & 2:
                 direction = 'tx'
@@ -282,8 +282,8 @@
                         # Swap power role, back to the original
                         self._change_dut_power_role()
                 else:
-                    logging.warn('Power swap not successful!')
-                    logging.warn('Only tested with DUT in %s state',
+                    logging.warning('Power swap not successful!')
+                    logging.warning('Only tested with DUT in %s state',
                                  dut_connect_state)
             else:
                 logging.info('DUT does not advertise power swap support')
@@ -291,7 +291,7 @@
             logging.info('***************** Swap Results ********************')
             total_attempts = 0
             total_failures = 0
-            for direction, role in self.swap_attempt.iterkeys():
+            for direction, role in self.swap_attempt.keys():
                 logging.info('%s %s swap attempts = %d, failures = %d',
                              direction, role,
                              self.swap_attempt[(direction, role)],
diff --git a/server/site_tests/firmware_PDPowerSwap/control b/server/site_tests/firmware_PDPowerSwap/control
index 6afb2da..03bdf66 100644
--- a/server/site_tests/firmware_PDPowerSwap/control
+++ b/server/site_tests/firmware_PDPowerSwap/control
@@ -4,15 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDPowerSwap"
 PURPOSE = "Servo based PD power role swap test"
 ATTRIBUTES = "suite:faft_pd"
 CRITERIA = "This test will fail if a power swap gives unexpected results"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDPowerSwap/control.dts b/server/site_tests/firmware_PDPowerSwap/control.dts
index d568111..1f227c9 100644
--- a/server/site_tests/firmware_PDPowerSwap/control.dts
+++ b/server/site_tests/firmware_PDPowerSwap/control.dts
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDPowerSwap.dts"
 PURPOSE = "Servo based PD power role swap test"
 ATTRIBUTES = "suite:faft_pd"
 CRITERIA = "This test will fail if a power swap gives unexpected results"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDPowerSwap/control.dts_flip b/server/site_tests/firmware_PDPowerSwap/control.dts_flip
index b8ea628..b86739b 100644
--- a/server/site_tests/firmware_PDPowerSwap/control.dts_flip
+++ b/server/site_tests/firmware_PDPowerSwap/control.dts_flip
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDPowerSwap.dts_flip"
 PURPOSE = "Servo based PD power role swap test"
 ATTRIBUTES = "suite:faft_pd"
 CRITERIA = "This test will fail if a power swap gives unexpected results"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDPowerSwap/control.flip b/server/site_tests/firmware_PDPowerSwap/control.flip
index f8dc6d1..813da66 100644
--- a/server/site_tests/firmware_PDPowerSwap/control.flip
+++ b/server/site_tests/firmware_PDPowerSwap/control.flip
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDPowerSwap.flip"
 PURPOSE = "Servo based PD power role swap test"
 ATTRIBUTES = "suite:faft_pd"
 CRITERIA = "This test will fail if a power swap gives unexpected results"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDPowerSwap/control.shutdown b/server/site_tests/firmware_PDPowerSwap/control.shutdown
index eb55fc5..33612fe 100644
--- a/server/site_tests/firmware_PDPowerSwap/control.shutdown
+++ b/server/site_tests/firmware_PDPowerSwap/control.shutdown
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDPowerSwap.shutdown"
 PURPOSE = "Servo based PD power role swap test"
 ATTRIBUTES = "suite:faft_pd"
 CRITERIA = "This test will fail if a power swap gives unexpected results"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDPowerSwap/control.suspend b/server/site_tests/firmware_PDPowerSwap/control.suspend
index 9956bb9..51d0de9 100644
--- a/server/site_tests/firmware_PDPowerSwap/control.suspend
+++ b/server/site_tests/firmware_PDPowerSwap/control.suspend
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDPowerSwap.suspend"
 PURPOSE = "Servo based PD power role swap test"
 ATTRIBUTES = "suite:faft_pd"
 CRITERIA = "This test will fail if a power swap gives unexpected results"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDPowerSwap/firmware_PDPowerSwap.py b/server/site_tests/firmware_PDPowerSwap/firmware_PDPowerSwap.py
index 93460d6..d024e90 100644
--- a/server/site_tests/firmware_PDPowerSwap/firmware_PDPowerSwap.py
+++ b/server/site_tests/firmware_PDPowerSwap/firmware_PDPowerSwap.py
@@ -77,15 +77,18 @@
             port = self.dut_port
         # Send power swap request
         self._send_power_swap_get_reply(port)
-        time.sleep(self.PD_CONNECT_DELAY)
-        # Get PDTester power role
-        pdtester_pr = self.pdtester_port.get_pd_state()
-        if self.dut_port.is_src(dut_pr) and self.pdtester_port.is_src(pdtester_pr):
-            return True
-        elif self.dut_port.is_snk(dut_pr) and self.pdtester_port.is_snk(pdtester_pr):
-            return True
-        else:
-            return False
+        for _ in range(self.PD_CONNECT_DELAY):
+            time.sleep(1)
+            # Get PDTester power role
+            pdtester_pr = self.pdtester_port.get_pd_state()
+            if self.dut_port.is_src(dut_pr) and self.pdtester_port.is_src(
+                    pdtester_pr):
+                return True
+            elif self.dut_port.is_snk(dut_pr) and self.pdtester_port.is_snk(
+                    pdtester_pr):
+                return True
+
+        return False
 
     def _test_power_swap_reject(self):
         """Verify that a power swap request is rejected
@@ -115,7 +118,7 @@
         Set the DUT power role to source and then suspend the DUT.
         Verify SRC-to-SNK power role request from the PD tester works,
         while SNK-to-SRC power role request fails. Note that this is
-        Chrome OS policy decision, not part of the PD spec.
+        ChromeOS policy decision, not part of the PD spec.
 
         When DUT doesn't provide power in suspend, set DUT power role
         to sink, supend DUT and check if SNK-to-SRC power role request fails.
@@ -247,7 +250,7 @@
             # DUT is dualrole in dual role mode. Test power role swap
             # operation intiated both by the DUT and PDTester.
             success = 0
-            for attempt in xrange(self.POWER_SWAP_ITERATIONS):
+            for attempt in range(self.POWER_SWAP_ITERATIONS):
                 if attempt & 1:
                     direction = 'rx'
                 else:
diff --git a/server/site_tests/firmware_PDProtocol/control b/server/site_tests/firmware_PDProtocol/control
index 92d99f0..2b2d68c 100644
--- a/server/site_tests/firmware_PDProtocol/control
+++ b/server/site_tests/firmware_PDProtocol/control
@@ -4,14 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDProtocol"
 PURPOSE = "Verify PD protocol negotiation."
 CRITERIA = "This test will fail if PD is negotiated when running from USB image."
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDProtocol/control.ec_wp b/server/site_tests/firmware_PDProtocol/control.ec_wp
index 2a2117f..dec7336 100644
--- a/server/site_tests/firmware_PDProtocol/control.ec_wp
+++ b/server/site_tests/firmware_PDProtocol/control.ec_wp
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDProtocol.ec_wp"
 PURPOSE = "Verify PD protocol negotiation."
 CRITERIA = "This test will fail if PD is negotiated when running from USB image."
 ATTRIBUTES = "suite:faft_ec_fw_qual, suite:faft_ec_wp"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDProtocol/firmware_PDProtocol.py b/server/site_tests/firmware_PDProtocol/firmware_PDProtocol.py
index 7ae36c3..f6d8323 100644
--- a/server/site_tests/firmware_PDProtocol/firmware_PDProtocol.py
+++ b/server/site_tests/firmware_PDProtocol/firmware_PDProtocol.py
@@ -63,7 +63,8 @@
 
     def cleanup(self):
         """Cleanup the test"""
-        self.ensure_dev_internal_boot(self.original_dev_boot_usb)
+        if hasattr(self, 'original_dev_boot_usb'):
+            self.ensure_dev_internal_boot(self.original_dev_boot_usb)
         super(firmware_PDProtocol, self).cleanup()
 
     def check_if_pd_supported(self):
@@ -115,7 +116,7 @@
 
         return False
 
-    def run_once(self):
+    def run_once(self, host):
         """Main test logic"""
         # TODO(b/35573842): Refactor to use PDPortPartner to probe the port
         self.pdtester_port = 1 if 'servo_v4' in self.pdtester.servo_type else 0
@@ -126,7 +127,7 @@
 
         # Check servo_v4 is negotiated
         if self.pdtester_pd_utils.is_disconnected(self.pdtester_port):
-            raise error.TestFail('PD not connected')
+            raise error.TestNAError('PD not connected')
 
         # TODO(b:152148025): Directly set role as pdsnkdts might fail the
         # PD communication. In short term, we could use PR SWAP instead, and
@@ -135,11 +136,14 @@
         self.boot_to_recovery()
 
         # Check PD is not negotiated
-        if (not
-            self.pdtester_pd_utils.is_snk_discovery_state(self.pdtester_port)):
-            raise error.TestFail(
-                'Expect PD to be disabled, WP (HW/SW) %s/%s',
-                   self.hw_wp, self.sw_wp)
+        # We allow the chromebox/chromebase, to enable the PD in the
+        # recovery mode.
+        if (host.get_board_type() != 'CHROMEBOX'
+                    and host.get_board_type() != 'CHROMEBASE'
+                    and not self.pdtester_pd_utils.is_snk_discovery_state(
+                            self.pdtester_port)):
+            raise error.TestFail('Expect PD to be disabled, WP (HW/SW) %s/%s' %
+                                 (self.hw_wp, self.sw_wp))
 
         # Check WP status. Only both SW/HW WP on should pass the test.
         if (not self.sw_wp) or ('off' in self.hw_wp):
diff --git a/server/site_tests/firmware_PDResetHard/control b/server/site_tests/firmware_PDResetHard/control
index d804238..0005899 100644
--- a/server/site_tests/firmware_PDResetHard/control
+++ b/server/site_tests/firmware_PDResetHard/control
@@ -4,15 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDResetHard"
 PURPOSE = "Servo based PD hard reset test"
 ATTRIBUTES = "suite:faft_pd"
 CRITERIA = "This test will fail if hard reset does not complete"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDResetHard/control.dts b/server/site_tests/firmware_PDResetHard/control.dts
index 9262e56..d90685f 100644
--- a/server/site_tests/firmware_PDResetHard/control.dts
+++ b/server/site_tests/firmware_PDResetHard/control.dts
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDResetHard.dts"
 PURPOSE = "Servo based PD hard reset test"
 ATTRIBUTES = "suite:faft_pd"
 CRITERIA = "This test will fail if hard reset does not complete"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDResetHard/control.dts_flip b/server/site_tests/firmware_PDResetHard/control.dts_flip
index 79983ce..ada3f85 100644
--- a/server/site_tests/firmware_PDResetHard/control.dts_flip
+++ b/server/site_tests/firmware_PDResetHard/control.dts_flip
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDResetHard.dts_flip"
 PURPOSE = "Servo based PD hard reset test"
 ATTRIBUTES = "suite:faft_pd"
 CRITERIA = "This test will fail if hard reset does not complete"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDResetHard/control.flip b/server/site_tests/firmware_PDResetHard/control.flip
index 268356b..4dde1ca 100644
--- a/server/site_tests/firmware_PDResetHard/control.flip
+++ b/server/site_tests/firmware_PDResetHard/control.flip
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDResetHard.flip"
 PURPOSE = "Servo based PD hard reset test"
 ATTRIBUTES = "suite:faft_pd"
 CRITERIA = "This test will fail if hard reset does not complete"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDResetHard/control.shutdown b/server/site_tests/firmware_PDResetHard/control.shutdown
index a468b91..dc75737 100644
--- a/server/site_tests/firmware_PDResetHard/control.shutdown
+++ b/server/site_tests/firmware_PDResetHard/control.shutdown
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDResetHard.shutdown"
 PURPOSE = "Servo based PD hard reset test"
 ATTRIBUTES = "suite:faft_pd"
 CRITERIA = "This test will fail if hard reset does not complete"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDResetHard/firmware_PDResetHard.py b/server/site_tests/firmware_PDResetHard/firmware_PDResetHard.py
index d4ba44b..7e91c28 100644
--- a/server/site_tests/firmware_PDResetHard/firmware_PDResetHard.py
+++ b/server/site_tests/firmware_PDResetHard/firmware_PDResetHard.py
@@ -31,14 +31,14 @@
         @param port_pair: list of 2 connected PD devices
         """
         for dev in port_pair:
-            for _ in xrange(self.RESET_ITERATIONS):
+            for _ in range(self.RESET_ITERATIONS):
                 try:
                     time.sleep(self.PD_CONNECT_DELAY)
                     if dev.hard_reset() == False:
                         raise error.TestFail('Hard Reset Failed')
                     time.sleep(self.DELAY_BETWEEN_ITERATIONS)
                 except NotImplementedError:
-                    logging.warn('Device cant hard reset ... skipping')
+                    logging.warning('Device cant hard reset ... skipping')
                     break
 
     def initialize(self, host, cmdline_args, flip_cc=False, dts_mode=False,
@@ -92,7 +92,7 @@
                     swappable_dev = dev
                     break
             except NotImplementedError:
-                logging.warn('Power role swap not supported on the device')
+                logging.warning('Power role swap not supported on the device')
 
         if swappable_dev:
             try:
@@ -103,5 +103,5 @@
                 if not swappable_dev.pr_swap():
                     logging.error('Failed to swap power role to the original')
         else:
-            logging.warn('Device pair could not perform power role swap, '
+            logging.warning('Device pair could not perform power role swap, '
                          'ending test')
diff --git a/server/site_tests/firmware_PDResetSoft/control b/server/site_tests/firmware_PDResetSoft/control
index 1c0cf0f..811af4c 100644
--- a/server/site_tests/firmware_PDResetSoft/control
+++ b/server/site_tests/firmware_PDResetSoft/control
@@ -4,15 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDResetSoft"
 PURPOSE = "Servo based PD soft reset test"
 ATTRIBUTES = "suite:faft_pd"
+DEPENDENCIES = "servo_state:WORKING"
 CRITERIA = "This test will fail if soft reset does not complete"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDResetSoft/control.dts b/server/site_tests/firmware_PDResetSoft/control.dts
index 8b3d1f3..2ae5c3c 100644
--- a/server/site_tests/firmware_PDResetSoft/control.dts
+++ b/server/site_tests/firmware_PDResetSoft/control.dts
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDResetSoft.dts"
 PURPOSE = "Servo based PD soft reset test"
 ATTRIBUTES = "suite:faft_pd"
+DEPENDENCIES = "servo_state:WORKING"
 CRITERIA = "This test will fail if soft reset does not complete"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDResetSoft/control.dts_flip b/server/site_tests/firmware_PDResetSoft/control.dts_flip
index 82bc5d8..4987763 100644
--- a/server/site_tests/firmware_PDResetSoft/control.dts_flip
+++ b/server/site_tests/firmware_PDResetSoft/control.dts_flip
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDResetSoft.dts_flip"
 PURPOSE = "Servo based PD soft reset test"
 ATTRIBUTES = "suite:faft_pd"
+DEPENDENCIES = "servo_state:WORKING"
 CRITERIA = "This test will fail if soft reset does not complete"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDResetSoft/control.flip b/server/site_tests/firmware_PDResetSoft/control.flip
index 3669bcc..f2df5f1 100644
--- a/server/site_tests/firmware_PDResetSoft/control.flip
+++ b/server/site_tests/firmware_PDResetSoft/control.flip
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDResetSoft.flip"
 PURPOSE = "Servo based PD soft reset test"
 ATTRIBUTES = "suite:faft_pd"
+DEPENDENCIES = "servo_state:WORKING"
 CRITERIA = "This test will fail if soft reset does not complete"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDResetSoft/control.shutdown b/server/site_tests/firmware_PDResetSoft/control.shutdown
index 1c74fdc..85742ff 100644
--- a/server/site_tests/firmware_PDResetSoft/control.shutdown
+++ b/server/site_tests/firmware_PDResetSoft/control.shutdown
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDResetSoft.shutdown"
 PURPOSE = "Servo based PD soft reset test"
 ATTRIBUTES = "suite:faft_pd"
+DEPENDENCIES = "servo_state:WORKING"
 CRITERIA = "This test will fail if soft reset does not complete"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDResetSoft/firmware_PDResetSoft.py b/server/site_tests/firmware_PDResetSoft/firmware_PDResetSoft.py
index e524638..f6e61d4 100644
--- a/server/site_tests/firmware_PDResetSoft/firmware_PDResetSoft.py
+++ b/server/site_tests/firmware_PDResetSoft/firmware_PDResetSoft.py
@@ -30,13 +30,13 @@
         @param port_pair: list of 2 connected PD devices
         """
         for dev in port_pair:
-            for _ in xrange(self.RESET_ITERATIONS):
+            for _ in range(self.RESET_ITERATIONS):
                 try:
                     time.sleep(self.PD_CONNECT_DELAY)
                     if dev.soft_reset() == False:
                         raise error.TestFail('Soft Reset Failed')
                 except NotImplementedError:
-                    logging.warn('Device cant soft reset ... skipping')
+                    logging.warning('Device cant soft reset ... skipping')
                     break
 
     def initialize(self, host, cmdline_args, flip_cc=False, dts_mode=False,
@@ -91,7 +91,7 @@
                     swappable_dev = dev
                     break
             except NotImplementedError:
-                logging.warn('Power role swap not supported on the device')
+                logging.warning('Power role swap not supported on the device')
 
         if swappable_dev:
             try:
@@ -102,5 +102,5 @@
                 if not swappable_dev.pr_swap():
                     logging.error('Failed to swap power role to the original')
         else:
-            logging.warn('Device pair could not perform power role swap, '
+            logging.warning('Device pair could not perform power role swap, '
                          'ending test')
diff --git a/server/site_tests/firmware_PDTrySrc/control b/server/site_tests/firmware_PDTrySrc/control
index 60ba8b3..485f281 100644
--- a/server/site_tests/firmware_PDTrySrc/control
+++ b/server/site_tests/firmware_PDTrySrc/control
@@ -4,15 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDTrySrc"
 PURPOSE = "Servo based PD Try.SRC protocol test"
 ATTRIBUTES = "suite:faft_pd"
+DEPENDENCIES = "servo_state:WORKING"
 CRITERIA = "This test will fail if a power swap gives unexpected results"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDTrySrc/control.flip b/server/site_tests/firmware_PDTrySrc/control.flip
index 9e9ce5d..9595d08 100644
--- a/server/site_tests/firmware_PDTrySrc/control.flip
+++ b/server/site_tests/firmware_PDTrySrc/control.flip
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDTrySrc.flip"
 PURPOSE = "Servo based PD Try.SRC protocol test"
 ATTRIBUTES = "suite:faft_pd"
+DEPENDENCIES = "servo_state:WORKING"
 CRITERIA = "This test will fail if a power swap gives unexpected results"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDTrySrc/firmware_PDTrySrc.py b/server/site_tests/firmware_PDTrySrc/firmware_PDTrySrc.py
index bc87299..870dedf 100644
--- a/server/site_tests/firmware_PDTrySrc/firmware_PDTrySrc.py
+++ b/server/site_tests/firmware_PDTrySrc/firmware_PDTrySrc.py
@@ -50,7 +50,7 @@
         stats = [0, 0]
         random.seed()
         # Try N disconnect/connects
-        for attempt in xrange(self.CONNECT_ITERATIONS):
+        for attempt in range(self.CONNECT_ITERATIONS):
             try:
                 # Disconnect time from 1 to 1.5 seconds
                 disc_time = self.PD_DISCONNECT_TIME + random.random() / 2
@@ -114,7 +114,7 @@
         # sequence does not affect the SRC/SNK connection. PDTester provides
         # a 'fakedisconnect' feature which more closely resembles unplugging
         # and replugging a Type C cable.
-        for side in xrange(len(port_pair)):
+        for side in range(len(port_pair)):
             original_drp[side] = port_pair[side].drp_get()
             if port_pair[side].is_pdtester:
                 # Identify PDTester and DUT device
@@ -130,11 +130,15 @@
                 except NotImplementedError:
                     raise error.TestFail('Both devices must support DRP')
 
+            # Setting DRP on ServoV4 ('usbc_action drp') triggers reconnect
+            # Wait some time to ensure that no operation will occur during test
+            time.sleep(port_pair[p_idx].utils.CONNECT_TIME)
+
             # Check to see if DUT supports Try.SRC mode
             try_src_supported = port_pair[d_idx].try_src(True)
 
             if not try_src_supported:
-                logging.warn('DUT does not support Try.SRC feature. '
+                logging.warning('DUT does not support Try.SRC feature. '
                              'Skip running Try.SRC-enabled test case.')
             else:
                 # Run disconnect/connect sequence with Try.SRC enabled
@@ -179,5 +183,5 @@
             # Reenable Try.SRC mode
             port_pair[d_idx].try_src(True)
             # Restore the original dualrole settings
-            for side in xrange(len(port_pair)):
+            for side in range(len(port_pair)):
                 port_pair[side].drp_set(original_drp[side])
diff --git a/server/site_tests/firmware_PDVbusRequest/control b/server/site_tests/firmware_PDVbusRequest/control
index 640ab89..d53d3ec 100644
--- a/server/site_tests/firmware_PDVbusRequest/control
+++ b/server/site_tests/firmware_PDVbusRequest/control
@@ -4,15 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDVbusRequest"
 PURPOSE = "Servo based PD VBUS voltage level test"
 ATTRIBUTES = "suite:faft_pd"
 CRITERIA = "This test will fail if actual voltage does not meet expected"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDVbusRequest/control.dts b/server/site_tests/firmware_PDVbusRequest/control.dts
index 01b112d..2779629 100644
--- a/server/site_tests/firmware_PDVbusRequest/control.dts
+++ b/server/site_tests/firmware_PDVbusRequest/control.dts
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDVbusRequest.dts"
 PURPOSE = "Servo based PD VBUS voltage level test"
 ATTRIBUTES = "suite:faft_pd"
 CRITERIA = "This test will fail if actual voltage does not meet expected"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDVbusRequest/control.dts_flip b/server/site_tests/firmware_PDVbusRequest/control.dts_flip
index 7953e81..7a38cdf 100644
--- a/server/site_tests/firmware_PDVbusRequest/control.dts_flip
+++ b/server/site_tests/firmware_PDVbusRequest/control.dts_flip
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDVbusRequest.dts_flip"
 PURPOSE = "Servo based PD VBUS voltage level test"
 ATTRIBUTES = "suite:faft_pd"
 CRITERIA = "This test will fail if actual voltage does not meet expected"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDVbusRequest/control.flip b/server/site_tests/firmware_PDVbusRequest/control.flip
index 9c8d286..db537fe9 100644
--- a/server/site_tests/firmware_PDVbusRequest/control.flip
+++ b/server/site_tests/firmware_PDVbusRequest/control.flip
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDVbusRequest.flip"
 PURPOSE = "Servo based PD VBUS voltage level test"
 ATTRIBUTES = "suite:faft_pd"
 CRITERIA = "This test will fail if actual voltage does not meet expected"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDVbusRequest/control.shutdown b/server/site_tests/firmware_PDVbusRequest/control.shutdown
index 5948bb2..7f22a5d 100644
--- a/server/site_tests/firmware_PDVbusRequest/control.shutdown
+++ b/server/site_tests/firmware_PDVbusRequest/control.shutdown
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDVbusRequest.shutdown"
 PURPOSE = "Servo based PD VBUS voltage level test"
 ATTRIBUTES = "suite:faft_pd"
 CRITERIA = "This test will fail if actual voltage does not meet expected"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDVbusRequest/control.suspend b/server/site_tests/firmware_PDVbusRequest/control.suspend
index 6c3b5fe..48e3cb3 100644
--- a/server/site_tests/firmware_PDVbusRequest/control.suspend
+++ b/server/site_tests/firmware_PDVbusRequest/control.suspend
@@ -4,15 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_PDVbusRequest.suspend"
 PURPOSE = "Servo based PD VBUS voltage level test"
 ATTRIBUTES = "suite:faft_pd"
 CRITERIA = "This test will fail if actual voltage does not meet expected"
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test checks:
diff --git a/server/site_tests/firmware_PDVbusRequest/firmware_PDVbusRequest.py b/server/site_tests/firmware_PDVbusRequest/firmware_PDVbusRequest.py
index 5a7fd16..9dff7a1 100644
--- a/server/site_tests/firmware_PDVbusRequest/firmware_PDVbusRequest.py
+++ b/server/site_tests/firmware_PDVbusRequest/firmware_PDVbusRequest.py
@@ -64,6 +64,22 @@
             result = 'PASS'
         return result, result_str
 
+    def _is_batt_full(self):
+        """Check if battery is full
+
+        @returns: True if battery is full, False otherwise
+        """
+        self.ec.update_battery_info()
+        return not self.ec.get_battery_charging_allowed(print_result=False)
+
+    def _enable_dps(self, en):
+        """Enable/disable Dynamic PDO Selection
+
+        @param en: a bool, True for enable, disable otherwise.
+
+        """
+        self.usbpd.send_command('dps %s' % ('en' if en else 'dis'))
+
     def initialize(self, host, cmdline_args, flip_cc=False, dts_mode=False,
                    init_power_mode=None):
         super(firmware_PDVbusRequest, self).initialize(host, cmdline_args)
@@ -73,12 +89,25 @@
         self.setup_pdtester(flip_cc, dts_mode)
         # Only run in normal mode
         self.switcher.setup_mode('normal')
+
+        self.shutdown_power_mode = False
         if init_power_mode:
             # Set the DUT to suspend or shutdown mode
             self.set_ap_off_power_mode(init_power_mode)
+            if init_power_mode == "shutdown":
+                self.shutdown_power_mode = True
+
         self.usbpd.send_command('chan 0')
+        logging.info('Disallow PR_SWAP request from DUT')
+        self.pdtester.allow_pr_swap(False)
+        # Disable dynamic PDO selection for voltage testing
+        self._enable_dps(False)
 
     def cleanup(self):
+        logging.info('Allow PR_SWAP request from DUT')
+        self.pdtester.allow_pr_swap(True)
+        # Re-enable DPS
+        self._enable_dps(True)
         # Set back to the max 20V SRC mode at the end.
         self.pdtester.charge(self.pdtester.USBC_MAX_VOLTAGE)
 
@@ -111,11 +140,65 @@
             raise error.TestFail("pd connection not found")
 
         dut_voltage_limit = self.faft_config.usbc_input_voltage_limit
+        dut_power_voltage_limit = dut_voltage_limit
+        dut_shutdown_and_full_batt_voltage_limit = (
+                self.faft_config.usbc_voltage_on_shutdown_and_full_batt)
+
         is_override = self.faft_config.charger_profile_override
         if is_override:
             logging.info('*** Custom charger profile takes over, which may '
                          'cause voltage-not-matched. It is OK to fail. *** ')
 
+        # Test will expect reduced voltage when battery is full and...:
+        # 1. We are running 'shutdown' variant of PDVbusRequest test (indicated
+        #    by self.shutdown_power_mode)
+        # 2. EC has battery capability
+        # 3. 'dut_shutdown_and_full_batt_voltage_limit' value will be less than
+        #    'dut_voltage_limit'. By default reduced voltage is set to maximum
+        #    voltage which means that no limit applies. Every board needs to
+        #    override this to correct value (most likely 5 or 9 volts)
+        is_voltage_reduced_if_batt_full = (
+                self.shutdown_power_mode
+                and self.check_ec_capability(['battery']) and
+                dut_shutdown_and_full_batt_voltage_limit < dut_voltage_limit)
+        if is_voltage_reduced_if_batt_full:
+            logging.info(
+                    '*** This DUT may reduce input voltage to %d volts '
+                    'when battery is full. ***',
+                    dut_shutdown_and_full_batt_voltage_limit)
+
+        # Obtain voltage limit due to maximum charging power. Note that this
+        # voltage limit applies only when EC follows the default policy. There
+        # are other policies like PREFER_LOW_VOLTAGE or PREFER_HIGH_VOLTAGE but
+        # they are not implemented in this test.
+        try:
+            srccaps = self.pdtester.get_adapter_source_caps()
+            dut_max_charging_power = self.faft_config.max_charging_power
+            selected_voltage = 0
+            selected_power = 0
+            for (mv, ma) in srccaps:
+                voltage = mv / 1000.0
+                current = ma / 1000.0
+                power = voltage * current
+
+                if (voltage > dut_voltage_limit or power <= selected_power
+                            or power > dut_max_charging_power):
+                    continue
+                selected_voltage = voltage
+                selected_power = power
+
+            if selected_voltage < dut_power_voltage_limit:
+                dut_power_voltage_limit = selected_voltage
+                logging.info(
+                        'EC may request maximum %dV due to adapter\'s max '
+                        'supported power and DUT\'s power constraints. DUT\'s '
+                        'max charging power %dW. Selected charging power %dW',
+                        dut_power_voltage_limit, dut_max_charging_power,
+                        selected_power)
+        except self.pdtester.PDTesterError:
+            logging.warning('Unable to get charging voltages and currents. '
+                         'Test may fail on high voltages.')
+
         pdtester_failures = []
         logging.info('Start PDTester initiated tests')
         charging_voltages = self.pdtester.get_charging_voltages()
@@ -141,9 +224,13 @@
                 expected_vbus_voltage = (self.USBC_SINK_VOLTAGE
                         if self.get_power_state() == 'S0' else 0)
                 ok_to_fail = False
+            elif (is_voltage_reduced_if_batt_full and self._is_batt_full()):
+                expected_vbus_voltage = min(
+                        voltage, dut_shutdown_and_full_batt_voltage_limit)
+                ok_to_fail = False
             else:
                 expected_vbus_voltage = min(voltage, dut_voltage_limit)
-                ok_to_fail = is_override
+                ok_to_fail = is_override or voltage > dut_power_voltage_limit
 
             result, result_str = self._compare_vbus(expected_vbus_voltage,
                                                     ok_to_fail)
@@ -162,6 +249,13 @@
             number = len(pdtester_failures)
             raise error.TestFail('PDTester failed %d times' % number)
 
+        if (is_voltage_reduced_if_batt_full and self._is_batt_full()):
+            logging.warning('This DUT reduces input voltage when chipset is in '
+                         'G3/S5 and battery is full. DUT initiated tests '
+                         'will be skipped. Please discharge battery to level '
+                         'that allows charging and run this test again')
+            return
+
         # The DUT must be in SNK mode for the pd <port> dev <voltage>
         # command to have an effect.
         if not self.dut_port.is_snk():
@@ -179,14 +273,17 @@
                              v, dut_voltage_limit)
                 continue
             if v not in charging_voltages:
-                logging.info('Target = %02dV: skipped, voltage unsupported, '
-                             'update hdctools and servo_v4 firmware', v)
+                logging.info(
+                        'Target = %02dV: skipped, voltage unsupported, '
+                        'update hdctools and servo_v4 firmware '
+                        'or attach a different charger', v)
                 continue
             # Build 'pd <port> dev <voltage> command
             cmd = 'pd %d dev %d' % (self.dut_port.port, v)
             self.dut_port.utils.send_pd_command(cmd)
             time.sleep(self.PD_SETTLE_DELAY)
-            result, result_str = self._compare_vbus(v, ok_to_fail=is_override)
+            ok_to_fail = is_override or v > dut_power_voltage_limit
+            result, result_str = self._compare_vbus(v, ok_to_fail)
             logging.info('%s, %s', result_str, result)
             if result == 'FAIL':
                 dut_failures.append(result_str)
diff --git a/server/site_tests/firmware_RecoveryButton/control b/server/site_tests/firmware_RecoveryButton/control
index 6d01f55..c4dd63e 100644
--- a/server/site_tests/firmware_RecoveryButton/control
+++ b/server/site_tests/firmware_RecoveryButton/control
@@ -4,20 +4,21 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_RecoveryButton"
 PURPOSE = "Press recovery button and check for recovery boot"
 CRITERIA = "This test will fail if firmware does not enter recovery mode"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_lv2, suite:faft_normal, suite:faft_bios_ec3po, suite:faft_bios_tot"
-DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_lv2, suite:faft_normal, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
-This test requires a USB disk plugged-in, which contains a Chrome OS test
+This test requires a USB disk plugged-in, which contains a ChromeOS test
 image (built by "build_image --test"). On runtime, this test emulates
 recovery button pressed and reboots. It then triggers recovery mode by
 unplugging and plugging in the USB disk and checks success of it.
diff --git a/server/site_tests/firmware_RecoveryButton/control.dev b/server/site_tests/firmware_RecoveryButton/control.dev
index 977ac40..2a4e199 100644
--- a/server/site_tests/firmware_RecoveryButton/control.dev
+++ b/server/site_tests/firmware_RecoveryButton/control.dev
@@ -4,20 +4,20 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_RecoveryButton.dev"
 PURPOSE = "Press recovery button and check for recovery boot"
 CRITERIA = "This test will fail if firmware does not enter recovery mode"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_dev, suite:faft_lv2, suite:faft_bios_ec3po, suite:faft_bios_tot"
-DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_dev, suite:faft_lv2, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
-This test requires a USB disk plugged-in, which contains a Chrome OS test
+This test requires a USB disk plugged-in, which contains a ChromeOS test
 image (built by "build_image --test"). On runtime, this test emulates
 recovery button pressed and reboots. It then triggers recovery mode by
 unplugging and plugging in the USB disk and checks success of it.
diff --git a/server/site_tests/firmware_RecoveryButton/firmware_RecoveryButton.py b/server/site_tests/firmware_RecoveryButton/firmware_RecoveryButton.py
index ca4c635..7476762 100644
--- a/server/site_tests/firmware_RecoveryButton/firmware_RecoveryButton.py
+++ b/server/site_tests/firmware_RecoveryButton/firmware_RecoveryButton.py
@@ -13,7 +13,7 @@
     """
     Servo based recovery button test.
 
-    This test requires a USB disk plugged-in, which contains a Chrome OS test
+    This test requires a USB disk plugged-in, which contains a ChromeOS test
     image (built by "build_image --test"). On runtime, this test emulates
     recovery button pressed and reboots. It then triggers recovery mode in
     two cases: (1) plug in the USB disk before power-on (2) plug in the USB
@@ -63,8 +63,7 @@
         self.check_state((self.checkers.crossystem_checker, {
                 'mainfw_type': 'developer' if dev_mode else 'normal',
         }))
-        self.switcher.reboot_to_mode(
-                to_mode='rec', from_mode='dev' if dev_mode else 'normal')
+        self.switcher.reboot_to_mode(to_mode="rec")
 
         logging.info("Expect a recovery boot from the USB stick.")
         self.check_recovery_state()
diff --git a/server/site_tests/firmware_RecoveryCacheBootKeys/control b/server/site_tests/firmware_RecoveryCacheBootKeys/control
index 78995b1..93b4450 100644
--- a/server/site_tests/firmware_RecoveryCacheBootKeys/control
+++ b/server/site_tests/firmware_RecoveryCacheBootKeys/control
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_RecoveryCacheBootKeys"
 PURPOSE = "Servo based RECOVERY_MRC_CACHE boot tests"
 CRITERIA = "This test will fail if the DUT doesn't use the cache during recovery boot."
 ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_lv2"
-DEPENDENCIES = "servo_state:WORKING"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 1
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test ensures that when booting to recovery mode the device will use the
diff --git a/server/site_tests/firmware_RecoveryCacheBootKeys/control.dev b/server/site_tests/firmware_RecoveryCacheBootKeys/control.dev
index 0488c08..01f4aa9 100644
--- a/server/site_tests/firmware_RecoveryCacheBootKeys/control.dev
+++ b/server/site_tests/firmware_RecoveryCacheBootKeys/control.dev
@@ -4,17 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_RecoveryCacheBootKeys.dev"
 PURPOSE = "Servo based RECOVERY_MRC_CACHE boot tests"
 CRITERIA = "This test will fail if the DUT doesn't use the cache during recovery boot."
 ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_lv2"
-DEPENDENCIES = "servo_state:WORKING"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 1
+PY_VERSION = 3
 
 DOC = """
 This test ensures that when booting to recovery mode the device will use the
diff --git a/server/site_tests/firmware_RecoveryCacheBootKeys/firmware_RecoveryCacheBootKeys.py b/server/site_tests/firmware_RecoveryCacheBootKeys/firmware_RecoveryCacheBootKeys.py
index 82783e1..ad832ab 100644
--- a/server/site_tests/firmware_RecoveryCacheBootKeys/firmware_RecoveryCacheBootKeys.py
+++ b/server/site_tests/firmware_RecoveryCacheBootKeys/firmware_RecoveryCacheBootKeys.py
@@ -35,7 +35,8 @@
 
         self.client = host
         self.dev_mode = dev_mode
-        self.switcher.setup_mode('dev' if dev_mode else 'normal')
+        self.switcher.setup_mode('dev' if dev_mode else 'normal',
+                                 allow_gbb_force=True)
         self.setup_usbkey(usbkey=True, host=False)
 
     def cleanup(self):
diff --git a/server/site_tests/firmware_RecoveryStress/control.rec b/server/site_tests/firmware_RecoveryStress/control.rec
index f9efd77..80504eb 100644
--- a/server/site_tests/firmware_RecoveryStress/control.rec
+++ b/server/site_tests/firmware_RecoveryStress/control.rec
@@ -8,11 +8,12 @@
 NAME = "firmware_RecoveryStress.rec"
 PURPOSE = "Servo based recovery stress test"
 CRITERIA = "This test will fail if DUT failed to boot into USB OS"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test checks the following FAFT hardware requirement:
diff --git a/server/site_tests/firmware_RecoveryStress/control.rec_force_mrc b/server/site_tests/firmware_RecoveryStress/control.rec_force_mrc
index 370ee43..bd856db 100644
--- a/server/site_tests/firmware_RecoveryStress/control.rec_force_mrc
+++ b/server/site_tests/firmware_RecoveryStress/control.rec_force_mrc
@@ -8,11 +8,12 @@
 NAME = "firmware_RecoveryStress.rec_force_mrc"
 PURPOSE = "Servo based recovery stress test"
 CRITERIA = "This test will fail if DUT failed to boot into USB OS"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test checks the following FAFT hardware requirement:
diff --git a/server/site_tests/firmware_RecoveryStress/firmware_RecoveryStress.py b/server/site_tests/firmware_RecoveryStress/firmware_RecoveryStress.py
index abfb527..abd61d0 100644
--- a/server/site_tests/firmware_RecoveryStress/firmware_RecoveryStress.py
+++ b/server/site_tests/firmware_RecoveryStress/firmware_RecoveryStress.py
@@ -67,7 +67,7 @@
             elif not self.cache_exist():
                 raise error.TestNAError('No RECOVERY_MRC_CACHE was found on DUT.')
 
-        for i in xrange(reboot_iterations):
+        for i in range(reboot_iterations):
             logging.info('======== RUNNING RECOVERY BOOT ITERATION %d/%d '
                     '========', i+1, reboot_iterations)
             self.switcher.reboot_to_mode(to_mode=self.mode)
diff --git a/server/site_tests/firmware_RollbackFirmware/control b/server/site_tests/firmware_RollbackFirmware/control
index 930a21f..b546f14 100644
--- a/server/site_tests/firmware_RollbackFirmware/control
+++ b/server/site_tests/firmware_RollbackFirmware/control
@@ -4,20 +4,21 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_RollbackFirmware"
 PURPOSE = "Servo based firmware rollback test."
 CRITERIA = "This test will fail if firmware rollback doesn't fail verification."
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv3, suite:faft_normal, suite:faft_bios_ec3po, suite:faft_bios_tot"
-DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv3, suite:faft_normal, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
-This test requires a USB disk plugged-in, which contains a Chrome OS test
+This test requires a USB disk plugged-in, which contains a ChromeOS test
 image (built by "build_image --test"). On runtime, this test rollbacks
 firmware A and results firmware B boot. It then rollbacks firmware B and
 results recovery boot.
diff --git a/server/site_tests/firmware_RollbackFirmware/control.dev b/server/site_tests/firmware_RollbackFirmware/control.dev
index 5bf30e4..cefe2de 100644
--- a/server/site_tests/firmware_RollbackFirmware/control.dev
+++ b/server/site_tests/firmware_RollbackFirmware/control.dev
@@ -4,20 +4,20 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_RollbackFirmware.dev"
 PURPOSE = "Servo based firmware rollback test."
 CRITERIA = "This test will fail if firmware rollback doesn't fail verification."
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv3, suite:faft_bios_ec3po, suite:faft_bios_tot"
-DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv3, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
-This test requires a USB disk plugged-in, which contains a Chrome OS test
+This test requires a USB disk plugged-in, which contains a ChromeOS test
 image (built by "build_image --test"). On runtime, this test rollbacks
 firmware A and results firmware B boot. It then rollbacks firmware B and
 results recovery boot.
diff --git a/server/site_tests/firmware_RollbackFirmware/firmware_RollbackFirmware.py b/server/site_tests/firmware_RollbackFirmware/firmware_RollbackFirmware.py
index 81be8f7..703b3df 100644
--- a/server/site_tests/firmware_RollbackFirmware/firmware_RollbackFirmware.py
+++ b/server/site_tests/firmware_RollbackFirmware/firmware_RollbackFirmware.py
@@ -12,7 +12,7 @@
     """
     Servo based firmware rollback test.
 
-    This test requires a USB disk plugged-in, which contains a Chrome OS test
+    This test requires a USB disk plugged-in, which contains a ChromeOS test
     image (built by "build_image --test"). On runtime, this test rollbacks
     firmware A and results firmware B boot. It then rollbacks firmware B and
     results recovery boot.
@@ -40,12 +40,18 @@
         """Runs a single iteration of the test."""
         logging.info("Rollback firmware A.")
         self.check_state((self.checkers.fw_tries_checker, 'A'))
-        self.faft_client.bios.move_version_backward('a')
+        version_a = self.faft_client.bios.get_version('a')
+        logging.info("Change A version from %d to %d.", version_a,
+                     version_a - 1)
+        self.faft_client.bios.set_version('a', version_a - 1)
         self.switcher.mode_aware_reboot()
 
         logging.info("Expected firmware B boot and rollback firmware B.")
         self.check_state((self.checkers.fw_tries_checker, ('B', False)))
-        self.faft_client.bios.move_version_backward('b')
+        version_b = self.faft_client.bios.get_version('b')
+        logging.info("Change B version from %d to %d.", version_b,
+                     version_b - 1)
+        self.faft_client.bios.set_version('b', version_b - 1)
 
         # Older devices (without BROKEN screen) didn't wait for removal in
         # dev mode. Make sure the USB key is not plugged in so they won't
@@ -62,8 +68,10 @@
                                 vboot.RECOVERY_REASON['RO_INVALID_RW'],
                                 vboot.RECOVERY_REASON['RW_FW_ROLLBACK']),
                            }))
-        self.faft_client.bios.move_version_forward('a')
-        self.faft_client.bios.move_version_forward('b')
+        logging.info("Restore version of firmware A/B to %d/%d.", version_a,
+                     version_b)
+        self.faft_client.bios.set_version('a', version_a)
+        self.faft_client.bios.set_version('b', version_b)
         self.switcher.mode_aware_reboot()
 
         expected_slot = 'B' if self.fw_vboot2 else 'A'
diff --git a/server/site_tests/firmware_RollbackKernel/control b/server/site_tests/firmware_RollbackKernel/control
index da91329..d19e1d6 100644
--- a/server/site_tests/firmware_RollbackKernel/control
+++ b/server/site_tests/firmware_RollbackKernel/control
@@ -4,20 +4,21 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_RollbackKernel"
 PURPOSE = "Servo based kernel rollback test."
 CRITERIA = "This test will fail if kernel rollback doesn't fail verification."
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv3, suite:faft_normal, suite:faft_bios_ec3po, suite:faft_bios_tot"
-DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv3, suite:faft_normal, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
-This test requires a USB disk plugged-in, which contains a Chrome OS test
+This test requires a USB disk plugged-in, which contains a ChromeOS test
 image (built by "build_image --test"). In normal mode, this test rollbacks
 kernel A and results kernel B boot. It then rollbacks kernel B and
 results recovery boot. In developer mode, the firmware ignores kernel
diff --git a/server/site_tests/firmware_RollbackKernel/control.dev b/server/site_tests/firmware_RollbackKernel/control.dev
index af8e5dd..d03b698 100644
--- a/server/site_tests/firmware_RollbackKernel/control.dev
+++ b/server/site_tests/firmware_RollbackKernel/control.dev
@@ -4,20 +4,20 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_RollbackKernel.dev"
 PURPOSE = "Servo based kernel rollback test."
 CRITERIA = "This test will fail if kernel rollback doesn't fail verification."
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv3, suite:faft_bios_ec3po, suite:faft_bios_tot"
-DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv3, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
-This test requires a USB disk plugged-in, which contains a Chrome OS test
+This test requires a USB disk plugged-in, which contains a ChromeOS test
 image (built by "build_image --test"). In normal mode, this test rollbacks
 kernel A and results kernel B boot. It then rollbacks kernel B and
 results recovery boot. In developer mode, the firmware ignores kernel
diff --git a/server/site_tests/firmware_RollbackKernel/firmware_RollbackKernel.py b/server/site_tests/firmware_RollbackKernel/firmware_RollbackKernel.py
index 085f016..e016349 100644
--- a/server/site_tests/firmware_RollbackKernel/firmware_RollbackKernel.py
+++ b/server/site_tests/firmware_RollbackKernel/firmware_RollbackKernel.py
@@ -12,7 +12,7 @@
     """
     Servo based kernel rollback test.
 
-    This test requires a USB disk plugged-in, which contains a Chrome OS test
+    This test requires a USB disk plugged-in, which contains a ChromeOS test
     image (built by "build_image --test"). In normal mode, this test rollbacks
     kernel A and results kernel B boot. It then rollbacks kernel B and
     results recovery boot. In developer mode, the firmware ignores kernel
diff --git a/server/site_tests/firmware_SelfSignedBoot/control b/server/site_tests/firmware_SelfSignedBoot/control
index 37001ae..977de3c 100644
--- a/server/site_tests/firmware_SelfSignedBoot/control
+++ b/server/site_tests/firmware_SelfSignedBoot/control
@@ -4,30 +4,31 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_SelfSignedBoot"
 PURPOSE = "Servo based developer mode boot only test to Self signed Kernels."
 CRITERIA = """
 Prerequirement is as follow:
 1. This test should run in Dev mode.
 2. Enable dev_boot_usb and dev_boot_signed_only.
-3. A USB disk should be plugged-in, which contains a Chrome OS Test Image.
+3. A USB disk should be plugged-in, which contains a ChromeOS test image.
 
 This test will fail if one of the following conditions is met:
 1. Ctrl-U boots to USB image with recovery keys.
 2. Enabling rec mode, if it doesnt boot to USB image.
 3. Ctrl-U doesnt boot to USB image after it resigned with SSD keys.
 """
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv2, suite:faft_bios_ec3po, suite:faft_bios_tot"
-DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv2, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
-This test requires a USB disk plugged-in, which contains a Chrome OS test
+This test requires a USB disk plugged-in, which contains a ChromeOS test
 image (built by "build_image test"). On runtime, this test first switches
 DUT to developer mode. When dev_boot_signed_only=1 and dev_boot_usb=1 and,
 pressing Ctrl-U on developer screen should not boot the USB disk. On resigning
diff --git a/server/site_tests/firmware_SelfSignedBoot/firmware_SelfSignedBoot.py b/server/site_tests/firmware_SelfSignedBoot/firmware_SelfSignedBoot.py
index 59bc4f6..a2b11bf 100644
--- a/server/site_tests/firmware_SelfSignedBoot/firmware_SelfSignedBoot.py
+++ b/server/site_tests/firmware_SelfSignedBoot/firmware_SelfSignedBoot.py
@@ -13,7 +13,7 @@
     """
     Servo based developer mode boot only test to Self signed Kernels.
 
-    This test requires a USB disk plugged-in, which contains a Chrome OS test
+    This test requires a USB disk plugged-in, which contains a ChromeOS test
     image (built by 'build_image test'). On runtime, this test first switches
     DUT to dev mode. When dev_boot_usb=1 and dev_boot_signed_only=1, pressing
     Ctrl-U on developer screen should not boot the USB disk(recovery mode boot
diff --git a/server/site_tests/firmware_SetSerialNumber/control b/server/site_tests/firmware_SetSerialNumber/control
index 5f8170d..2e61a9e 100644
--- a/server/site_tests/firmware_SetSerialNumber/control
+++ b/server/site_tests/firmware_SetSerialNumber/control
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_SetSerialNumber"
 PURPOSE = "Servo based test to set serial number in firmware."
 CRITERIA = """
@@ -14,11 +14,13 @@
 * Flash write protect is not enabled
 """
 ATTRIBUTES = ""
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 1
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 Test that setting the serial number in firmware during an onsite RMA works.
diff --git a/server/site_tests/firmware_SetSerialNumber/firmware_SetSerialNumber.py b/server/site_tests/firmware_SetSerialNumber/firmware_SetSerialNumber.py
index 2d6d338..343cd66 100644
--- a/server/site_tests/firmware_SetSerialNumber/firmware_SetSerialNumber.py
+++ b/server/site_tests/firmware_SetSerialNumber/firmware_SetSerialNumber.py
@@ -85,7 +85,7 @@
         self.switcher.wait_for_client()
 
         # Check that device is no longer in dev mode
-        self.checkers.mode_checker('normal')
+        self.check_state((self.checkers.mode_checker, 'normal'))
 
         # Check that serial_number is correctly set
         result = self.faft_client.system.run_shell_command_get_output(
@@ -104,8 +104,8 @@
         if ('is disabled' in result or
                 'start=0x00000000' in result or
                 'len=0x00000000' in result):
-           raise error.TestFail('Expected write protection to be enabled '
-                                'but output was:\n\n%s' % result)
+            raise error.TestFail('Expected write protection to be enabled '
+                                 'but output was:\n\n%s' % result)
 
     def cleanup(self):
         self.servo.set_nocheck('fw_wp_state', 'reset')
diff --git a/server/site_tests/firmware_SoftwareSync/control b/server/site_tests/firmware_SoftwareSync/control
index 59f53ad..40450f8 100644
--- a/server/site_tests/firmware_SoftwareSync/control
+++ b/server/site_tests/firmware_SoftwareSync/control
@@ -4,18 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_SoftwareSync"
 PURPOSE = "Servo based EC software sync test"
 CRITERIA = "This test will fail if EC software sync misbehalved"
-ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec3po, suite:faft_ec_tot," \
-             "suite:faft_cr50_prepvt, suite:faft_cr50_pvt, suite:faft_cr50_tot"
+ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec_tot, suite:faft_cr50_prepvt, suite:faft_cr50_pvt, suite:faft_cr50_tot"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
 DEPENDENCIES = "ec:cros, servo_state:WORKING"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test corrupts EC RW firmware body and checks software sync restores it.
diff --git a/server/site_tests/firmware_SoftwareSync/control.dev b/server/site_tests/firmware_SoftwareSync/control.dev
index f533f96..9fcd7a1 100644
--- a/server/site_tests/firmware_SoftwareSync/control.dev
+++ b/server/site_tests/firmware_SoftwareSync/control.dev
@@ -4,17 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_SoftwareSync.dev"
 PURPOSE = "Servo based EC software sync test"
 CRITERIA = "This test will fail if EC software sync misbehalved"
-ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec3po, suite:faft_ec_tot"
+ATTRIBUTES = "suite:faft_ec, suite:faft_ec_fw_qual, suite:faft_ec_tot"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
 DEPENDENCIES = "ec:cros, servo_state:WORKING"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test corrupts EC RW firmware body and checks software sync restores it.
diff --git a/server/site_tests/firmware_SoftwareSync/firmware_SoftwareSync.py b/server/site_tests/firmware_SoftwareSync/firmware_SoftwareSync.py
index 65dd06e..94e5956 100644
--- a/server/site_tests/firmware_SoftwareSync/firmware_SoftwareSync.py
+++ b/server/site_tests/firmware_SoftwareSync/firmware_SoftwareSync.py
@@ -26,6 +26,10 @@
         if not self.check_ec_capability():
             raise error.TestNAError("Nothing needs to be tested on this device")
 
+        if self._no_ec_sync:
+            raise error.TestNAError(
+                    "User selected to disable EC software sync")
+
         # In order to test software sync, it must be enabled.
         self.clear_set_gbb_flags(vboot.GBB_FLAG_DISABLE_EC_SOFTWARE_SYNC, 0)
         self.backup_firmware()
@@ -159,7 +163,7 @@
         # The boot mode should be "NORMAL".
         logging.info('Check the boot mode is NORMAL mode.')
         if not self.cr50.check_boot_mode('NORMAL'):
-            logging.warn('You may want to run %r in cr50 console to uncorrupt'
+            logging.warning('You may want to run %r in cr50 console to uncorrupt'
                          ' EC hash.', ec_corrupt_cmd)
             raise error.TestFail('Boot mode is not NORMAL.')
 
diff --git a/server/site_tests/firmware_StandbyPowerConsumption/control.30min b/server/site_tests/firmware_StandbyPowerConsumption/control.30min
index fba1604..cf268ae 100644
--- a/server/site_tests/firmware_StandbyPowerConsumption/control.30min
+++ b/server/site_tests/firmware_StandbyPowerConsumption/control.30min
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_StandbyPowerConsumption.30min"
 PURPOSE = "Collect power consumption during hibernate."
 TIME = "LONG"
@@ -13,6 +13,7 @@
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:powerplay"
 DEPENDENCIES = "servo_state:WORKING, powerplay"
+PY_VERSION = 3
 
 DOC = """
 This test collects and reports the power consumtion data during hibernate state.
diff --git a/server/site_tests/firmware_StandbyPowerConsumption/control.4hr b/server/site_tests/firmware_StandbyPowerConsumption/control.4hr
index 6f2eb2e..c6d7aab 100644
--- a/server/site_tests/firmware_StandbyPowerConsumption/control.4hr
+++ b/server/site_tests/firmware_StandbyPowerConsumption/control.4hr
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_StandbyPowerConsumption.4hr"
 PURPOSE = "Collect power consumption during hibernate."
 TIME = "LONG"
@@ -13,6 +13,7 @@
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:powerplay"
 DEPENDENCIES = "servo_state:WORKING, powerplay"
+PY_VERSION = 3
 
 DOC = """
 This test collects and reports the power consumtion data during hibernate state.
diff --git a/server/site_tests/firmware_SysfsVPD/control b/server/site_tests/firmware_SysfsVPD/control
index f77c1c4..d50b7cb 100644
--- a/server/site_tests/firmware_SysfsVPD/control
+++ b/server/site_tests/firmware_SysfsVPD/control
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_SysfsVPD"
 PURPOSE = "Servo based test for reading VPD data through sysfs"
 CRITERIA = "This test will fail if the VPD test string is not read back correctly"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv2, suite:faft_normal, suite:faft_bios_ec3po, suite:faft_bios_tot"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv2, suite:faft_normal, suite:faft_bios_tot"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test writes random test strings to the RO and RW sections of VPD data
diff --git a/server/site_tests/firmware_SysfsVPD/firmware_SysfsVPD.py b/server/site_tests/firmware_SysfsVPD/firmware_SysfsVPD.py
index 9d6e0f5..996ba9c 100644
--- a/server/site_tests/firmware_SysfsVPD/firmware_SysfsVPD.py
+++ b/server/site_tests/firmware_SysfsVPD/firmware_SysfsVPD.py
@@ -65,7 +65,8 @@
 
         self.host = host
         self.backup_firmware()
-        self.switcher.setup_mode('dev' if dev_mode else 'normal')
+        self.switcher.setup_mode('dev' if dev_mode else 'normal',
+                                 allow_gbb_force=True)
 
     def cleanup(self):
         """Cleanup the test"""
diff --git a/server/site_tests/firmware_TPMExtend/control b/server/site_tests/firmware_TPMExtend/control
index d221e10..7365e9e 100644
--- a/server/site_tests/firmware_TPMExtend/control
+++ b/server/site_tests/firmware_TPMExtend/control
@@ -8,13 +8,14 @@
 NAME = "firmware_TPMExtend"
 PURPOSE = "Ensure TPM PCRs have been correctly extended with boot mode and HWID"
 CRITERIA = "This test will fail if TPM PCR 0 or 1 has an incorrect value"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv2, suite:faft_normal, suite:faft_bios_ec3po, suite:faft_bios_tot"
-DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv2, suite:faft_normal, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test ensures that the TPM PCRs 0 and 1 have been extended with a hash that
diff --git a/server/site_tests/firmware_TPMExtend/firmware_TPMExtend.py b/server/site_tests/firmware_TPMExtend/firmware_TPMExtend.py
index 2a98e23..7f1e01d 100644
--- a/server/site_tests/firmware_TPMExtend/firmware_TPMExtend.py
+++ b/server/site_tests/firmware_TPMExtend/firmware_TPMExtend.py
@@ -7,6 +7,8 @@
 from autotest_lib.client.common_lib import error
 from autotest_lib.server.cros.faft.firmware_test import FirmwareTest
 
+def _encode_text(text):
+    return text.encode('utf-8')
 
 class firmware_TPMExtend(FirmwareTest):
     """Test to ensure TPM PCRs are extended correctly."""
@@ -31,7 +33,8 @@
                         'cat %s' % pcrs_file))
         logging.debug('Dumping PCRs read from device: \n%s', pcrs)
         extended = hashlib.sha1(b'\0' * 20 + hash_obj.digest()[:20]).hexdigest()
-        spaced = ' '.join(extended[i:i+2] for i in xrange(0, len(extended), 2))
+        spaced = ' '.join(extended[i:i + 2]
+                          for i in range(0, len(extended), 2))
         logging.debug('PCR %d should contain hash: %s', num, spaced)
         return ('PCR-%.2d: %s' % (num, spaced.upper())) in pcrs
 
@@ -51,6 +54,15 @@
         else:
             return self._tpm2_check_pcr(num, hash_obj)
 
+    def _check_pcr_bootmode(self, dev_mode, rec_mode, keyblock_flags):
+        bootmode = _encode_text(chr(dev_mode) +
+                                chr(rec_mode) +
+                                chr(keyblock_flags))
+        if not self._check_pcr(0, hashlib.sha1(bootmode)):
+            msg = 'PCR0 was not extended with bootmode %d|%d|%d!' % (
+                    dev_mode, rec_mode, keyblock_flags)
+            raise error.TestFail(msg)
+
     def run_once(self):
         """Runs a single iteration of the test."""
         if self.disable_hwid_check:
@@ -61,7 +73,7 @@
             hwid = self.faft_client.system.run_shell_command_get_output(
                     'crossystem hwid')[0]
             logging.debug('HWID reported by device is: %s', hwid)
-            if not self._check_pcr(1, hashlib.sha256(hwid)):
+            if not self._check_pcr(1, hashlib.sha256(_encode_text(hwid))):
                 raise error.TestFail(
                     'PCR1 was not extended with SHA256 of HWID!')
 
@@ -71,8 +83,7 @@
                             'mainfw_type': 'normal'
                             }))
         # dev_mode: 0, rec_mode: 0, keyblock_flags: "normal" (1)
-        if not self._check_pcr(0, hashlib.sha1(chr(0) + chr(0) + chr(1))):
-            raise error.TestFail('PCR0 was not extended with bootmode 0|0|1!')
+        self._check_pcr_bootmode(0, 0, 1)
 
         logging.info('Verifying bootmode digest in PCR0 in recovery mode')
         self.switcher.reboot_to_mode(to_mode='rec')
@@ -81,8 +92,7 @@
                             'mainfw_type': 'recovery'
                             }))
         # dev_mode: 0, rec_mode: 1, keyblock_flags: "unknown" (0)
-        if not self._check_pcr(0, hashlib.sha1(chr(0) + chr(1) + chr(0))):
-            raise error.TestFail('PCR0 was not extended with bootmode 0|1|0!')
+        self._check_pcr_bootmode(0, 1, 0)
 
         logging.info('Transitioning to dev mode for next test')
         self.switcher.reboot_to_mode(to_mode='dev')
@@ -93,8 +103,7 @@
                             'mainfw_type': 'developer'
                             }))
         # dev_mode: 1, rec_mode: 0, keyblock_flags: "normal" (1)
-        if not self._check_pcr(0, hashlib.sha1(chr(1) + chr(0) + chr(1))):
-            raise error.TestFail('PCR0 was not extended with bootmode 1|0|1!')
+        self._check_pcr_bootmode(1, 0, 1)
 
         logging.info('Verifying bootmode digest in PCR0 in dev-recovery mode')
         self.switcher.reboot_to_mode(to_mode='rec')
@@ -103,8 +112,7 @@
                             'mainfw_type': 'recovery'
                             }))
         # dev_mode: 1, rec_mode: 1, keyblock_flags: "unknown" (0)
-        if not self._check_pcr(0, hashlib.sha1(chr(1) + chr(1) + chr(0))):
-            raise error.TestFail('PCR0 was not extended with bootmode 1|1|0!')
+        self._check_pcr_bootmode(1, 1, 0)
 
         logging.info('All done, returning to normal mode')
         self.switcher.reboot_to_mode(to_mode='normal')
diff --git a/server/site_tests/firmware_TPMKernelVersion/control b/server/site_tests/firmware_TPMKernelVersion/control
index 6b8145b..739b071 100644
--- a/server/site_tests/firmware_TPMKernelVersion/control
+++ b/server/site_tests/firmware_TPMKernelVersion/control
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_TPMKernelVersion"
 PURPOSE = "Check kernel version in TPM is not corrupted."
 CRITERIA = "This test will fail if kernel version in TPM is corrupted."
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv3, suite:faft_normal, suite:faft_bios_ec3po, suite:faft_bios_tot"
-DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv3, suite:faft_normal, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 Booting in dev mode will not corrupt kernel and firmware version stored
diff --git a/server/site_tests/firmware_TPMNotCorruptedDevMode/control b/server/site_tests/firmware_TPMNotCorruptedDevMode/control
index ea631bd..1966bdd 100644
--- a/server/site_tests/firmware_TPMNotCorruptedDevMode/control
+++ b/server/site_tests/firmware_TPMNotCorruptedDevMode/control
@@ -4,20 +4,21 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_TPMNotCorruptedDevMode"
 PURPOSE = "Ensure kernel and fw version in TPM isn't corrupted in dev mode."
 CRITERIA = "This test will fail if dev mode corrupts the kernel or fw version."
 ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv2"
-DEPENDENCIES = "servo_state:WORKING"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
-This test requires a USB disk plugged-in, which contains a Chrome OS test
+This test requires a USB disk plugged-in, which contains a ChromeOS test
 image (built by "build_image test"). On runtime, this test first switches
 DUT to developer mode. It then checks the kernel and firmware version stored in
 the TPM. It then boots into normal mode and checks the kernel and firmware
diff --git a/server/site_tests/firmware_TPMNotCorruptedDevMode/firmware_TPMNotCorruptedDevMode.py b/server/site_tests/firmware_TPMNotCorruptedDevMode/firmware_TPMNotCorruptedDevMode.py
index a82bdb3..8f1fa31 100644
--- a/server/site_tests/firmware_TPMNotCorruptedDevMode/firmware_TPMNotCorruptedDevMode.py
+++ b/server/site_tests/firmware_TPMNotCorruptedDevMode/firmware_TPMNotCorruptedDevMode.py
@@ -16,7 +16,7 @@
     to USB and checks the firmware version and kernel version via crossystem for
     corruption.
 
-    This test requires a USB disk plugged-in, which contains a Chrome OS test
+    This test requires a USB disk plugged-in, which contains a ChromeOS test
     image (built by "build_image test").
     """
     version = 1
diff --git a/server/site_tests/firmware_TPMVersionCheck/control b/server/site_tests/firmware_TPMVersionCheck/control
index dcb4a45..d662c5e 100644
--- a/server/site_tests/firmware_TPMVersionCheck/control
+++ b/server/site_tests/firmware_TPMVersionCheck/control
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_TPMVersionCheck"
 PURPOSE = "Crossystem reports tpm versions correctly"
 CRITERIA = "This test will fail if crossystem does not report correctly."
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv1, suite:faft_normal, suite:faft_bios_ec3po, suite:faft_bios_tot"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv1, suite:faft_normal, suite:faft_bios_tot"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test will fail if crossystem does not report correctly.
diff --git a/server/site_tests/firmware_TPMVersionCheck/control.dev b/server/site_tests/firmware_TPMVersionCheck/control.dev
index 8687da1..8764914 100644
--- a/server/site_tests/firmware_TPMVersionCheck/control.dev
+++ b/server/site_tests/firmware_TPMVersionCheck/control.dev
@@ -4,17 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_TPMVersionCheck.dev"
 PURPOSE = "Crossystem reports the tpm versions correctly"
 CRITERIA = "This test will fail if crossystem reports tpm version incorrectly"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv1, suite:faft_bios_ec3po, suite:faft_bios_tot"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv1, suite:faft_bios_tot"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test will fail if crossystem does not report correctly.
diff --git a/server/site_tests/firmware_TPMVersionCheck/firmware_TPMVersionCheck.py b/server/site_tests/firmware_TPMVersionCheck/firmware_TPMVersionCheck.py
index 2d09a74..8b57d0e 100644
--- a/server/site_tests/firmware_TPMVersionCheck/firmware_TPMVersionCheck.py
+++ b/server/site_tests/firmware_TPMVersionCheck/firmware_TPMVersionCheck.py
@@ -17,7 +17,8 @@
     def initialize(self, host, cmdline_args, dev_mode=False, ec_wp=None):
         super(firmware_TPMVersionCheck, self).initialize(host, cmdline_args,
                                                          ec_wp=ec_wp)
-        self.switcher.setup_mode('dev' if dev_mode else 'normal')
+        self.switcher.setup_mode('dev' if dev_mode else 'normal',
+                                 allow_gbb_force=True)
         self.setup_usbkey(usbkey=False)
 
     def run_once(self):
diff --git a/server/site_tests/firmware_TryFwB/control b/server/site_tests/firmware_TryFwB/control
index ce2f1af..2ac0cc7 100644
--- a/server/site_tests/firmware_TryFwB/control
+++ b/server/site_tests/firmware_TryFwB/control
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_TryFwB"
 PURPOSE = "Servo based RW firmware B boot test"
 CRITERIA = "This test will fail if firmware does not switch to firmware B"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv1, suite:faft_normal, suite:faft_bios_ec3po, suite:faft_bios_tot"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv1, suite:faft_normal, suite:faft_bios_tot"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test sets fwb_tries flag and boots firmware B.
diff --git a/server/site_tests/firmware_TryFwB/control.dev b/server/site_tests/firmware_TryFwB/control.dev
index 1e35d53..f76b862 100644
--- a/server/site_tests/firmware_TryFwB/control.dev
+++ b/server/site_tests/firmware_TryFwB/control.dev
@@ -4,17 +4,17 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_TryFwB.dev"
 PURPOSE = "Servo based RW firmware B boot test"
 CRITERIA = "This test will fail if firmware does not switch to firmware B"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv1, suite:faft_bios_ec3po, suite:faft_bios_tot"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv1, suite:faft_bios_tot"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This test sets fwb_tries flag and boots firmware B.
diff --git a/server/site_tests/firmware_TryFwB/firmware_TryFwB.py b/server/site_tests/firmware_TryFwB/firmware_TryFwB.py
index 8a65d77..01a1934 100644
--- a/server/site_tests/firmware_TryFwB/firmware_TryFwB.py
+++ b/server/site_tests/firmware_TryFwB/firmware_TryFwB.py
@@ -14,7 +14,8 @@
 
     def initialize(self, host, cmdline_args, dev_mode=False, ec_wp=None):
         super(firmware_TryFwB, self).initialize(host, cmdline_args, ec_wp=ec_wp)
-        self.switcher.setup_mode('dev' if dev_mode else 'normal')
+        self.switcher.setup_mode('dev' if dev_mode else 'normal',
+                                 allow_gbb_force=True)
         self.setup_usbkey(usbkey=False)
         if not self.fw_vboot2:
             self.setup_tried_fwb(tried_fwb=False)
@@ -38,5 +39,5 @@
         self.switcher.mode_aware_reboot()
 
         expected_slot = 'B' if self.fw_vboot2 else 'A'
-        logging.info("Expected firmware " + expected_slot + " boot, done.")
+        logging.info("Expected firmware %s boot, done", expected_slot)
         self.check_state((self.checkers.fw_tries_checker, expected_slot))
diff --git a/server/site_tests/firmware_TypeCCharging/control b/server/site_tests/firmware_TypeCCharging/control
index 0324f23..5995627 100644
--- a/server/site_tests/firmware_TypeCCharging/control
+++ b/server/site_tests/firmware_TypeCCharging/control
@@ -4,13 +4,16 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_TypeCCharging"
 PURPOSE = "Remotely controlled USB type C charging test."
 CRITERIA = "This test will fail if Plankton type C VBUS voltage not in range."
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "FAST"
 TEST_CATEGORY = "Functional"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test remotely emulates type C charging adapter with different source
diff --git a/server/site_tests/firmware_TypeCProbeUSB3/control b/server/site_tests/firmware_TypeCProbeUSB3/control
index a36a094..ed7eb86 100644
--- a/server/site_tests/firmware_TypeCProbeUSB3/control
+++ b/server/site_tests/firmware_TypeCProbeUSB3/control
@@ -4,13 +4,16 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_TypeCProbeUSB3"
 PURPOSE = "Remotely controlled USB type C super speed device probing test."
 CRITERIA = "This test will fail if DUT can't probe USB3 device on type C port."
+DEPENDENCIES = "servo_state:WORKING"
 TIME = "FAST"
 TEST_CATEGORY = "Functional"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test remotely switches type C port to USB3 device mode. It fails if
diff --git a/server/site_tests/firmware_UpdateFirmwareDataKeyVersion/control b/server/site_tests/firmware_UpdateFirmwareDataKeyVersion/control
index dec0d87..aaa42dd 100644
--- a/server/site_tests/firmware_UpdateFirmwareDataKeyVersion/control
+++ b/server/site_tests/firmware_UpdateFirmwareDataKeyVersion/control
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_UpdateFirmwareDataKeyVersion"
 PURPOSE = """
 Servo based firmware update test which check firmware datakey version.
@@ -13,18 +13,20 @@
 Prerequirement is as follow:
 1.The fwid should matches shellball's (/usr/sbin/chromeos-firmwareupdate) fwid,
   unless this test use a given shellball.
-2. A USB disk should be plugged-in, which contains a Chrome OS test image.
+2. A USB disk should be plugged-in, which contains a ChromeOS test image.
 
 This test will fail if following requrements are met:
 1. fwid does not match shellball's (/usr/sbin/chromeos-firmwareupdate) fwid
 2. Firmware datakey version does not match original datakey version.
 """
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_normal, suite:faft_lv5, suite:faft_bios_ec3po, suite:faft_bios_tot"
-DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_normal, suite:faft_lv5, suite:faft_bios_tot"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test requires a USB test image plugged in. The firmware id
diff --git a/server/site_tests/firmware_UpdateFirmwareVersion/control b/server/site_tests/firmware_UpdateFirmwareVersion/control
index 6d45cf8..0782072 100644
--- a/server/site_tests/firmware_UpdateFirmwareVersion/control
+++ b/server/site_tests/firmware_UpdateFirmwareVersion/control
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_UpdateFirmwareVersion"
 PURPOSE = "Servo based firmware update test which checks the firmware version."
 CRITERIA = """
@@ -12,7 +12,7 @@
 1. This test should run in normal mode.
 2. Fwid should match shellball's (/usr/sbin/chromeos-firmwareupdate) fwid,
    unless this test use a given shellball.
-3. A USB disk should be plugged-in, which contains a Chrome OS test image.
+3. A USB disk should be plugged-in, which contains a ChromeOS test image.
 
 This test will fail if one of the following conditions is met:
 1. Firmware update fails.
@@ -20,12 +20,14 @@
 3. Firmware version does not increase after firmware update.
 4. Firmware version does not recover to original version after recovery.
 """
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_normal, suite:faft_lv5, suite:faft_bios_ec3po, suite:faft_bios_tot, suite:faft_smoke"
-DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_normal, suite:faft_lv5, suite:faft_bios_tot, suite:faft_smoke"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test requires a USB test image plugged in. The firmware id
diff --git a/server/site_tests/firmware_UpdateKernelDataKeyVersion/control b/server/site_tests/firmware_UpdateKernelDataKeyVersion/control
index f995d72..e00b86a 100644
--- a/server/site_tests/firmware_UpdateKernelDataKeyVersion/control
+++ b/server/site_tests/firmware_UpdateKernelDataKeyVersion/control
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_UpdateKernelDataKeyVersion"
 PURPOSE = """
 Servo based kernel update test which checks the kernel data key version.
@@ -17,13 +17,14 @@
 4. After recovery, device can't successfully restart.
 5. Kernel datakey version does not recover to original version after recovery.
 """
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv5, suite:faft_bios_ec3po, suite:faft_bios_tot"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv5, suite:faft_bios_tot"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test should run in developer mode. On runtime, this test modifies the
diff --git a/server/site_tests/firmware_UpdateKernelDataKeyVersion/firmware_UpdateKernelDataKeyVersion.py b/server/site_tests/firmware_UpdateKernelDataKeyVersion/firmware_UpdateKernelDataKeyVersion.py
index c163e5c..6da5cc0 100644
--- a/server/site_tests/firmware_UpdateKernelDataKeyVersion/firmware_UpdateKernelDataKeyVersion.py
+++ b/server/site_tests/firmware_UpdateKernelDataKeyVersion/firmware_UpdateKernelDataKeyVersion.py
@@ -64,7 +64,8 @@
         super(firmware_UpdateKernelDataKeyVersion, self).initialize(host,
                                                                 cmdline_args)
 
-        self.switcher.setup_mode('dev' if dev_mode else 'normal')
+        self.switcher.setup_mode('dev' if dev_mode else 'normal',
+                                 allow_gbb_force=True)
 
         actual_ver = self.faft_client.kernel.get_datakey_version('b')
         logging.info('Original Kernel Version of KERN-B is %s', actual_ver)
diff --git a/server/site_tests/firmware_UpdateKernelSubkeyVersion/control b/server/site_tests/firmware_UpdateKernelSubkeyVersion/control
index 0d8fb54..4c7b3e3 100644
--- a/server/site_tests/firmware_UpdateKernelSubkeyVersion/control
+++ b/server/site_tests/firmware_UpdateKernelSubkeyVersion/control
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_UpdateKernelSubkeyVersion"
 PURPOSE = """
 Servo based firmware update test, and check kernel subkey version.
@@ -14,13 +14,14 @@
 1. fwid does not match shellball's (/usr/sbin/chromeos-firmwareupdate) fwid
 2. Kernel subkey version does not match original kernel subkey version.
 """
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv5, suite:faft_bios_ec3po, suite:faft_bios_tot"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv5, suite:faft_bios_tot"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test requires firmware id matches fwid of shellball
diff --git a/server/site_tests/firmware_UpdateKernelSubkeyVersion/firmware_UpdateKernelSubkeyVersion.py b/server/site_tests/firmware_UpdateKernelSubkeyVersion/firmware_UpdateKernelSubkeyVersion.py
index fedcd24..34fa5ed 100644
--- a/server/site_tests/firmware_UpdateKernelSubkeyVersion/firmware_UpdateKernelSubkeyVersion.py
+++ b/server/site_tests/firmware_UpdateKernelSubkeyVersion/firmware_UpdateKernelSubkeyVersion.py
@@ -57,7 +57,8 @@
         super(firmware_UpdateKernelSubkeyVersion, self).initialize(
             host, cmdline_args)
         self.backup_firmware()
-        self.switcher.setup_mode('dev' if dev_mode else 'normal')
+        self.switcher.setup_mode('dev' if dev_mode else 'normal',
+                                 allow_gbb_force=True)
         self.setup_firmwareupdate_shellball(shellball_path)
 
         # Update firmware if needed
diff --git a/server/site_tests/firmware_UpdateKernelVersion/control b/server/site_tests/firmware_UpdateKernelVersion/control
index dab5673..3135c10 100644
--- a/server/site_tests/firmware_UpdateKernelVersion/control
+++ b/server/site_tests/firmware_UpdateKernelVersion/control
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_UpdateKernelVersion"
 PURPOSE = "Servo based kernel update test which checks the kernel version."
 CRITERIA = """
@@ -15,13 +15,14 @@
 4. After recovery, device can't successfully restart.
 5. Kernel version does not recover to original version after recovery.
 """
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv5, suite:faft_bios_ec3po, suite:faft_bios_tot, suite:faft_smoke"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv5, suite:faft_bios_tot, suite:faft_smoke"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test should run in developer mode. On runtime, this test modifies the
diff --git a/server/site_tests/firmware_UpdateKernelVersion/firmware_UpdateKernelVersion.py b/server/site_tests/firmware_UpdateKernelVersion/firmware_UpdateKernelVersion.py
index 0c50372..10c6181 100644
--- a/server/site_tests/firmware_UpdateKernelVersion/firmware_UpdateKernelVersion.py
+++ b/server/site_tests/firmware_UpdateKernelVersion/firmware_UpdateKernelVersion.py
@@ -46,7 +46,8 @@
         """Initialize the test"""
         super(firmware_UpdateKernelVersion, self).initialize(host, cmdline_args)
 
-        self.switcher.setup_mode('dev' if dev_mode else 'normal')
+        self.switcher.setup_mode('dev' if dev_mode else 'normal',
+                                 allow_gbb_force=True)
 
         actual_ver = self.faft_client.kernel.get_version('b')
         logging.info('Original Kernel Version of KERN-B is %s', actual_ver)
diff --git a/server/site_tests/firmware_UpdaterModes/control b/server/site_tests/firmware_UpdaterModes/control
index c30f641..c9574e4 100644
--- a/server/site_tests/firmware_UpdaterModes/control
+++ b/server/site_tests/firmware_UpdaterModes/control
@@ -2,14 +2,16 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_UpdaterModes"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv5, suite:faft_bios_tot"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv5, suite:faft_bios_tot, suite:distributed_lab_qual_faft"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 Test chromeos-firmwareupdate modes via --emulate, to avoid wearing out flash
diff --git a/server/site_tests/firmware_UserRequestRecovery/control b/server/site_tests/firmware_UserRequestRecovery/control
index 5a0aa32..1bdf2a7 100644
--- a/server/site_tests/firmware_UserRequestRecovery/control
+++ b/server/site_tests/firmware_UserRequestRecovery/control
@@ -4,20 +4,21 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_UserRequestRecovery"
 PURPOSE = "Request recovery mode and check it next reboot."
 CRITERIA = "This test will fail if firmware does not enter recovery mode"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv2, suite:faft_normal, suite:faft_bios_ec3po, suite:faft_bios_tot, suite:bvt-faft, suite:labqual"
-DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv2, suite:faft_normal, suite:faft_bios_tot, suite:labqual"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
-This test requires a USB disk plugged-in, which contains a Chrome OS test
+This test requires a USB disk plugged-in, which contains a ChromeOS test
 image (built by "build_image --test"). On runtime, this test first requests
 a recovery mode on next boot by setting the crossystem recovery_request
 flag. It then triggers recovery mode by unplugging and plugging in the USB
diff --git a/server/site_tests/firmware_UserRequestRecovery/control.dev b/server/site_tests/firmware_UserRequestRecovery/control.dev
index f786f56..705a988 100644
--- a/server/site_tests/firmware_UserRequestRecovery/control.dev
+++ b/server/site_tests/firmware_UserRequestRecovery/control.dev
@@ -4,20 +4,20 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "firmware_UserRequestRecovery.dev"
 PURPOSE = "Request recovery mode and check it next reboot."
 CRITERIA = "This test will fail if firmware does not enter recovery mode"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv2, suite:faft_bios_ec3po, suite:faft_bios_tot, suite:labqual"
-DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv2, suite:faft_bios_tot, suite:labqual"
+DEPENDENCIES = "servo_state:WORKING, servo_usb_state:NORMAL"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
-This test requires a USB disk plugged-in, which contains a Chrome OS test
+This test requires a USB disk plugged-in, which contains a ChromeOS test
 image (built by "build_image --test"). On runtime, this test first requests
 a recovery mode on next boot by setting the crossystem recovery_request
 flag. It then triggers recovery mode by unplugging and plugging in the USB
diff --git a/server/site_tests/firmware_UserRequestRecovery/firmware_UserRequestRecovery.py b/server/site_tests/firmware_UserRequestRecovery/firmware_UserRequestRecovery.py
index da49d6b..cae086c 100644
--- a/server/site_tests/firmware_UserRequestRecovery/firmware_UserRequestRecovery.py
+++ b/server/site_tests/firmware_UserRequestRecovery/firmware_UserRequestRecovery.py
@@ -12,7 +12,7 @@
     """
     Servo based user request recovery boot test.
 
-    This test requires a USB disk plugged-in, which contains a Chrome OS test
+    This test requires a USB disk plugged-in, which contains a ChromeOS test
     image (built by "build_image --test"). On runtime, this test first requests
     a recovery mode on next boot by setting the crossystem recovery_request
     flag. It then triggers recovery mode by unplugging and plugging in the USB
diff --git a/server/site_tests/firmware_WilcoDiagnosticsMode/control b/server/site_tests/firmware_WilcoDiagnosticsMode/control
index 89969d2..ad2af22 100644
--- a/server/site_tests/firmware_WilcoDiagnosticsMode/control
+++ b/server/site_tests/firmware_WilcoDiagnosticsMode/control
@@ -8,11 +8,11 @@
 NAME = "firmware_WilcoDiagnosticsMode"
 PURPOSE = "Verify that AP firmware handles a corrupt Wilco diagnostics binary and applies updates to it"
 ATTRIBUTES = "suite:faft_wilco, suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual"
-DEPENDENCIES = "servo_state:WORKING"
 TIME = "MEDIUM"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING, board:sarien"
 JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """Corrupt the Wilco diagnostics image and then reinstall it.
 
diff --git a/server/site_tests/firmware_WilcoDiagnosticsMode/firmware_WilcoDiagnosticsMode.py b/server/site_tests/firmware_WilcoDiagnosticsMode/firmware_WilcoDiagnosticsMode.py
index f6aa33e..6fb0547 100644
--- a/server/site_tests/firmware_WilcoDiagnosticsMode/firmware_WilcoDiagnosticsMode.py
+++ b/server/site_tests/firmware_WilcoDiagnosticsMode/firmware_WilcoDiagnosticsMode.py
@@ -22,7 +22,7 @@
 
     # The delay between pressing <F12> to enter diagnostics mode and reaching
     # the confirmation screen; typically about 10 seconds; overshoot to be safe.
-    DIAGNOSTICS_CONFIRM_SCREEN_DELAY_SECONDS = 15
+    DIAGNOSTICS_CONFIRM_SCREEN_DELAY_SECONDS = 20
     # The delay between pressing <Power> to confirm entry to diagnostics mode
     # and rebooting into diagnostics mode.
     DIAGNOSTICS_CONFIRM_REBOOT_DELAY_SECONDS = 8
@@ -81,27 +81,59 @@
     def _enter_diagnostics_mode(self):
         # Reboot to the recovery screen, press <F12>, and press power to
         # confirm.
-        logging.info('Rebooting to recovery screen')
-        self.switcher.enable_rec_mode_and_reboot(usb_state='host')
+        self.servo.switch_usbkey('host')
+        psc = self.servo.get_power_state_controller()
+        logging.info('Powering off')
+        if self.cr50.ap_is_on():
+            self.servo.power_key(self.faft_config.hold_pwr_button_poweroff)
+            logging.info('Waiting for power off')
+            time.sleep(1)
+            self._client.close_main_ssh()
+        logging.info('Booting to recovery screen')
+        psc.power_on(psc.REC_ON)
+
+        logging.info('Sleeping %s seconds (firmware_screen)',
+                     self.faft_config.firmware_screen)
         time.sleep(self.faft_config.firmware_screen)
+        if not self.cr50.ap_is_on():
+            raise error.TestFail('Expected AP on when booting to recovery')
         logging.info('Pressing <F12>')
         self._press_f12()
+        logging.info(
+                'Sleeping %s seconds (DIAGNOSTICS_CONFIRM_SCREEN_DELAY_SECONDS)',
+                self.DIAGNOSTICS_CONFIRM_SCREEN_DELAY_SECONDS)
         time.sleep(self.DIAGNOSTICS_CONFIRM_SCREEN_DELAY_SECONDS)
         logging.info('Pressing <Power> to confirm')
         self.servo.power_short_press()
         # At this point, the DUT will try to reboot into diagnostics mode.
 
+    def _verify_diagnostics_mode(self):
+        """Checks that the AP is on and ssh fails.
+
+        This is not certain that we are in the diagnostic mode, but gives some
+        confidence.
+        """
+        # Wait long enough that DUT would have rebooted to normal mode if
+        # diagnostics mode failed.
+        logging.info(
+                'Sleeping %s seconds (DIAGNOSTICS_FAIL_REBOOT_DELAY_SECONDS)',
+                self.DIAGNOSTICS_FAIL_REBOOT_DELAY_SECONDS)
+        time.sleep(self.DIAGNOSTICS_FAIL_REBOOT_DELAY_SECONDS)
+        if not self.cr50.ap_is_on():
+            raise error.TestFail(
+                    'AP is off, expected diagnostics mode. Is diagnostic '
+                    'corrupted? Run chromeos-firmwareupdate --mode=recovery')
+        logging.info('Sleeping %s seconds (delay_reboot_to_ping)',
+                     self.faft_config.delay_reboot_to_ping)
+        time.sleep(self.faft_config.delay_reboot_to_ping)
+        self.switcher.wait_for_client_offline(timeout=5)
+        logging.info('DUT offline after entering diagnostics mode')
+
     def run_once(self):
         """Run the body of the test."""
         logging.info('Attempting to enter diagnostics mode')
         self._enter_diagnostics_mode()
-        # Wait long enough that DUT would have rebooted to normal mode if
-        # diagnostics mode failed.
-        time.sleep(self.DIAGNOSTICS_CONFIRM_REBOOT_DELAY_SECONDS +
-                self.DIAGNOSTICS_FAIL_REBOOT_DELAY_SECONDS +
-                self.faft_config.delay_reboot_to_ping)
-        self.switcher.wait_for_client_offline(timeout=5)
-        logging.info('DUT offline after entering diagnostics mode')
+        self._verify_diagnostics_mode()
         self._client.reset_via_servo()
         self.switcher.wait_for_client()
 
@@ -110,8 +142,16 @@
         # enter diagnostics mode).
         self._corrupt_diagnostics_image()
         self._enter_diagnostics_mode()
+        logging.info(
+                'Sleeping %s seconds (DIAGNOSTICS_FAIL_REBOOT_DELAY_SECONDS)',
+                self.DIAGNOSTICS_FAIL_REBOOT_DELAY_SECONDS)
+        time.sleep(self.DIAGNOSTICS_FAIL_REBOOT_DELAY_SECONDS)
+        # If the diagnostic mode fails, it might just power off
+        if not self.cr50.ap_is_on():
+            logging.info('AP off, pressing <Power> to boot to normal mode')
+            self.servo.power_short_press()
         self.switcher.wait_for_client()
-        self.checkers.mode_checker('normal')
+        self.check_state((self.checkers.mode_checker, 'normal'))
 
         # Update the firmware to restore the diagnostics image, reboot into
         # diagnostics mode, and verify that the DUT goes down (indicating
@@ -123,10 +163,4 @@
 
         logging.info('Attempting to enter diagnostics mode')
         self._enter_diagnostics_mode()
-        # Wait long enough that DUT would have rebooted if diagnostics mode
-        # failed.
-        time.sleep(self.DIAGNOSTICS_CONFIRM_REBOOT_DELAY_SECONDS +
-                self.DIAGNOSTICS_FAIL_REBOOT_DELAY_SECONDS +
-                self.faft_config.delay_reboot_to_ping)
-        self.switcher.wait_for_client_offline(timeout=5)
-        logging.info('DUT offline after entering diagnostics mode')
+        self._verify_diagnostics_mode()
diff --git a/server/site_tests/firmware_WriteProtect/control b/server/site_tests/firmware_WriteProtect/control
index 9868574..9c376dc 100644
--- a/server/site_tests/firmware_WriteProtect/control
+++ b/server/site_tests/firmware_WriteProtect/control
@@ -8,13 +8,14 @@
 NAME = "firmware_WriteProtect"
 PURPOSE = "Servo based hardware write protect test"
 CRITERIA = "This test will fail if SPI hardware write-protect cannot be read correctly"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_normal, suite:faft_lv1, suite:faft_bios_ec3po, suite:faft_bios_tot"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_normal, suite:faft_lv1, suite:faft_bios_tot"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This is a simple test which toggles the hardware write-protect line via Servo
diff --git a/server/site_tests/firmware_WriteProtect/control.dev b/server/site_tests/firmware_WriteProtect/control.dev
index c85cce2..1e0e180 100644
--- a/server/site_tests/firmware_WriteProtect/control.dev
+++ b/server/site_tests/firmware_WriteProtect/control.dev
@@ -8,13 +8,13 @@
 NAME = "firmware_WriteProtect.dev"
 PURPOSE = "Servo based hardware write protect test"
 CRITERIA = "This test will fail if SPI hardware write-protect cannot be read correctly"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv1, suite:faft_bios_ec3po, suite:faft_bios_tot"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_dev, suite:faft_lv1, suite:faft_bios_tot"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 4
+PY_VERSION = 3
 
 DOC = """
 This is a simple test which toggles the hardware write-protect line via Servo
diff --git a/server/site_tests/firmware_WriteProtect/firmware_WriteProtect.py b/server/site_tests/firmware_WriteProtect/firmware_WriteProtect.py
index e373ef2..e267c14 100644
--- a/server/site_tests/firmware_WriteProtect/firmware_WriteProtect.py
+++ b/server/site_tests/firmware_WriteProtect/firmware_WriteProtect.py
@@ -17,7 +17,8 @@
     def initialize(self, host, cmdline_args, dev_mode=False):
         """Initialize the test"""
         super(firmware_WriteProtect, self).initialize(host, cmdline_args)
-        self.switcher.setup_mode('dev' if dev_mode else 'normal')
+        self.switcher.setup_mode('dev' if dev_mode else 'normal',
+                                 allow_gbb_force=True)
         self._original_wp = 'on' in self.servo.get('fw_wp_state')
 
     def cleanup(self):
diff --git a/server/site_tests/firmware_WriteProtectFunc/control b/server/site_tests/firmware_WriteProtectFunc/control
index d3837f9..e937382 100644
--- a/server/site_tests/firmware_WriteProtectFunc/control
+++ b/server/site_tests/firmware_WriteProtectFunc/control
@@ -8,13 +8,14 @@
 NAME = "firmware_WriteProtectFunc"
 PURPOSE = "Servo based SPI flash write protect functional test"
 CRITERIA = "This test will fail if SPI flash write-protection does not work correctly"
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_normal, suite:faft_lv2, suite:faft_bios_ec3po, suite:faft_bios_tot"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_normal, suite:faft_lv2, suite:faft_bios_tot"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "firmware"
 TEST_TYPE = "server"
-JOB_RETRIES = 1
+JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This test verifies the function of SPI flash write-protection.
diff --git a/server/site_tests/firmware_WriteProtectFunc/firmware_WriteProtectFunc.py b/server/site_tests/firmware_WriteProtectFunc/firmware_WriteProtectFunc.py
index 144785d..8bb1c6d 100644
--- a/server/site_tests/firmware_WriteProtectFunc/firmware_WriteProtectFunc.py
+++ b/server/site_tests/firmware_WriteProtectFunc/firmware_WriteProtectFunc.py
@@ -23,7 +23,8 @@
     def initialize(self, host, cmdline_args, dev_mode=False):
         """Initialize the test"""
         super(firmware_WriteProtectFunc, self).initialize(host, cmdline_args)
-        self.switcher.setup_mode('dev' if dev_mode else 'normal')
+        self.switcher.setup_mode('dev' if dev_mode else 'normal',
+                                 allow_gbb_force=True)
         if self.faft_config.chrome_ec:
             self._targets = (BIOS, EC)
         else:
@@ -34,6 +35,8 @@
         self._original_sw_wps = {}
         for target in self._targets:
             sw_wp_dict = self._rpcs[target].get_write_protect_status()
+            logging.debug("self._rpcs[%s].get_write_protect_status() = %s",
+                          target, sw_wp_dict)
             self._original_sw_wps[target] = sw_wp_dict['enabled']
         self._original_hw_wp = 'on' in self.servo.get('fw_wp_state')
         self.backup_firmware()
@@ -148,6 +151,8 @@
         # Check WP is properly enabled at the start
         for target in self._targets:
             sw_wp_dict = self._rpcs[target].get_write_protect_status()
+            logging.debug("self._rpcs[%s].get_write_protect_status() = %s",
+                          target, sw_wp_dict)
             if not sw_wp_dict['enabled']:
                 raise error.TestFail('Failed to enable %s SW WP at '
                                      'test start' % target.upper())
diff --git a/server/site_tests/fleet_FirmwareUpdate/control.rw b/server/site_tests/fleet_FirmwareUpdate/control.rw
new file mode 100644
index 0000000..7deea7c
--- /dev/null
+++ b/server/site_tests/fleet_FirmwareUpdate/control.rw
@@ -0,0 +1,35 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "Chrome Fleet Software team"
+NAME = "fleet_FirmwareUpdate"
+PURPOSE = "Update OS bundled firmware and verify DUT is good"
+ATTRIBUTES = "suite:fleet_firmware_update"
+CRITERIA = "This test will fail if the device failed to boot after update firmware"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "firmware"
+TEST_TYPE = "server"
+JOB_RETRIES = 0
+PY_VERSION = 3
+
+
+DOC = """
+This test is designed to simulate the same firmware update behavior that
+a ChromeOS user would have. We don't need OS update since it should be handled
+by prejob(provision) already. And this test will update OS bundled firmware(RW only)
+from the DUT and doing some post-update validation. The test includes below steps:
+
+1. Pre-update validation.
+2. Update DUT to OS bundled firmware via /usr/sbin/chromeos-firmwareupdate(RW only).
+3. Reboot.
+4. Post-update validation.
+"""
+
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('fleet_FirmwareUpdate', host=host)
+
+job.parallel_simple(run, machines)
\ No newline at end of file
diff --git a/server/site_tests/fleet_FirmwareUpdate/fleet_FirmwareUpdate.py b/server/site_tests/fleet_FirmwareUpdate/fleet_FirmwareUpdate.py
new file mode 100644
index 0000000..cc82fab
--- /dev/null
+++ b/server/site_tests/fleet_FirmwareUpdate/fleet_FirmwareUpdate.py
@@ -0,0 +1,109 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import time
+
+from autotest_lib.server import test
+from autotest_lib.server.hosts import cros_firmware
+from autotest_lib.client.common_lib import error
+
+
+class fleet_FirmwareUpdate(test.test):
+    """Test to update OS bundled firmware and validate DUT is in good state."""
+    version = 1
+
+    UPDATE_CMD = "/usr/sbin/chromeos-firmwareupdate --wp=1 --mode=autoupdate"
+
+    def update_os_bundled_firmware(self, host):
+        """Update OS bundled firmware, RW only.
+
+        Args:
+          host: Target host machine to update firmware.
+
+        raises:
+          error.TestFail if update firmware cmd return with non-zero code.
+        """
+        logging.info("Starting update firmware on %s.", host.hostname)
+        try:
+            res = host.run(self.UPDATE_CMD)
+        except:
+            raise error.TestFail("Failed to update firmware.")
+
+    def pre_update_validation(self, host):
+        """Validate DUT is in good state before firmware update.
+
+        Args:
+          host: Target host machine to do the validation.
+
+        raises:
+          error.TestNAError if DUT is not sshable, or not come back after
+                            reboot.
+        """
+        # Ensure the DUT is sshable before firmware update.
+        if not host.is_up():
+            raise error.TestNAError("DUT is down before firmware update.")
+
+        # Ensure the DUT can reboot normally before firmware update.
+        logging.info("Rebooting %s prior firmware update", host.hostname)
+        try:
+            host.reboot(timeout=host.BOOT_TIMEOUT, wait=True)
+        except Exception as e:
+            logging.error(e)
+            raise error.TestNAError("DUT failed to reboot before firmware"
+                                    " update.")
+
+    def is_firmware_updated(self, host):
+        """Check whether the DUT is updated to OS bundled firmware.
+
+        Args:
+          host: Target host machine to check.
+        """
+        model = host.get_platform()
+        expected = cros_firmware._get_available_firmware(host, model)
+        if not expected:
+            logging.info("Couldn't get expected version based on model"
+                         " info, skip firmware version check.")
+        actual = host.run("crossystem fwid").stdout
+        logging.debug("Expected firmware: %s, actual firmware on DUT: %s.",
+                      expected, actual)
+        return expected == actual
+
+    def post_update_validation(self, host):
+        """Validate DUT is good after firmware update.
+
+        Args:
+          host: Target host machine to do the validation.
+
+        raises:
+          error.TestFail if the DUT failed to pass validation.
+        """
+        try:
+            host.reboot(timeout=host.BOOT_TIMEOUT, wait=True)
+        except Exception as e:
+            logging.error(e)
+            raise error.TestFail("DUT didn't come back from reboot after"
+                                 " firmware update.")
+        if not self.is_firmware_updated(host):
+            raise error.TestFail("Firmware on DUT mismatch with OS bundled"
+                                 " firmware after update.")
+
+    def run_once(self, host):
+        """Main control of test steps:
+
+        1. Pre-update validation, ensure the DUT is in good state before actual
+           test to reduce flakiness.
+        2. Firmware update, update os bundled firmware in RW portion.
+        3. Post-update validation, test if the DUT is still in good state after
+           receive firmware update.
+        """
+        self.pre_update_validation(host)
+        # Need to wait for machine fully ready for firmware update to reduce
+        # flakiness.
+        time.sleep(60)
+        if self.is_firmware_updated(host):
+            raise error.TestNAError("Firmware version on the DUT is already"
+                                    " up-to-date.")
+        self.update_os_bundled_firmware(host)
+        self.post_update_validation(host)
diff --git a/server/site_tests/graphics_MultipleDisplays/control.aquarium_blob b/server/site_tests/graphics_MultipleDisplays/control.aquarium_blob
index e3e7945..c2709df 100644
--- a/server/site_tests/graphics_MultipleDisplays/control.aquarium_blob
+++ b/server/site_tests/graphics_MultipleDisplays/control.aquarium_blob
@@ -4,6 +4,7 @@
 
 from autotest_lib.server import utils
 
+PY_VERSION = 3
 AUTHOR = "chromeos-chameleon"
 NAME = "graphics_MultipleDisplays.aquarium_blob"
 PURPOSE = "Test multiple WebGL windows on internal and external displays."
diff --git a/server/site_tests/graphics_MultipleDisplays/control.aquarium_vp9_blob_h264 b/server/site_tests/graphics_MultipleDisplays/control.aquarium_vp9_blob_h264
index e870189..b7c0b4b 100644
--- a/server/site_tests/graphics_MultipleDisplays/control.aquarium_vp9_blob_h264
+++ b/server/site_tests/graphics_MultipleDisplays/control.aquarium_vp9_blob_h264
@@ -4,6 +4,7 @@
 
 from autotest_lib.server import utils
 
+PY_VERSION = 3
 AUTHOR = "chromeos-chameleon"
 NAME = "graphics_MultipleDisplays.aquarium_vp9_blob_h264"
 PURPOSE = "Test WebGL and video playback on internal and external displays."
diff --git a/server/site_tests/graphics_MultipleDisplays/graphics_MultipleDisplays.py b/server/site_tests/graphics_MultipleDisplays/graphics_MultipleDisplays.py
index 21a4d58..49ad538 100644
--- a/server/site_tests/graphics_MultipleDisplays/graphics_MultipleDisplays.py
+++ b/server/site_tests/graphics_MultipleDisplays/graphics_MultipleDisplays.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -107,9 +108,7 @@
                 window boundaries.
         """
         new_bounds = {'top': 0, 'left': 0, 'width': 0, 'height': 0}
-        display_info = filter(
-            lambda d: d.is_internal == config.internal_display,
-            self._display_facade.get_display_info())
+        display_info = [d for d in self._display_facade.get_display_info() if d.is_internal == config.internal_display]
         display_info = display_info[0]
 
         # Since we are "snapping" windows left and right, set the width to half
@@ -143,8 +142,7 @@
         # FPS information for saving later
         self._fps_list = chameleon_port.get_captured_fps_list()
 
-        stuck_fps_list = filter(lambda fps: fps < self.STUCK_FPS_THRESHOLD,
-                                self._fps_list)
+        stuck_fps_list = [fps for fps in self._fps_list if fps < self.STUCK_FPS_THRESHOLD]
         if len(stuck_fps_list) > self.MAXIMUM_STUCK_MEASUREMENTS:
             msg = 'Too many measurements {} are < {} FPS. GPU hang?'.format(
                 self._fps_list, self.STUCK_FPS_THRESHOLD)
@@ -159,7 +157,7 @@
 
         if self._subtest not in self.WINDOW_CONFIGS:
             msg = '{} is not a valid subtest. Choices are {}.'.format(
-                self._subtest, self.WINDOW_CONFIGS.keys())
+                self._subtest, list(self.WINDOW_CONFIGS.keys()))
             raise ValueError(msg)
 
         for window_config in self.WINDOW_CONFIGS[self._subtest]:
diff --git a/server/site_tests/graphics_PowerConsumption/control b/server/site_tests/graphics_PowerConsumption/control
index 30b04f1..1e3ad27 100644
--- a/server/site_tests/graphics_PowerConsumption/control
+++ b/server/site_tests/graphics_PowerConsumption/control
@@ -2,7 +2,8 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+PY_VERSION = 3
+AUTHOR = "ChromeOS Team"
 NAME = "graphics_PowerConsumption"
 TIME = "MEDIUM"
 TEST_CATEGORY = "Performance"
diff --git a/server/site_tests/graphics_PowerConsumption/graphics_PowerConsumption.py b/server/site_tests/graphics_PowerConsumption/graphics_PowerConsumption.py
index 207413b..c56b39b 100644
--- a/server/site_tests/graphics_PowerConsumption/graphics_PowerConsumption.py
+++ b/server/site_tests/graphics_PowerConsumption/graphics_PowerConsumption.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/graphics_TraceReplayExtended/control.glxgears_1minute b/server/site_tests/graphics_TraceReplayExtended/control.glxgears_1minute
index 0d2abb7..c4a4c6c 100644
--- a/server/site_tests/graphics_TraceReplayExtended/control.glxgears_1minute
+++ b/server/site_tests/graphics_TraceReplayExtended/control.glxgears_1minute
@@ -2,7 +2,8 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+PY_VERSION = 3
+AUTHOR = "ChromeOS Team"
 NAME = "graphics_TraceReplayExtended.glxgears_1minute"
 TIME = "MEDIUM"
 TEST_CATEGORY = "Performance"
@@ -20,9 +21,9 @@
 def run(machine):
     host = hosts.create_host(machine)
     job.run_test("graphics_TraceReplayExtended", host=host,
-                 client_tast_test='graphics.TraceReplayExtended.glxgears_1minute_amd64',
+                 client_tast_test='glxgears_1minute',
                  tast_build_bundle='cros',
-                 tast_command_arg=args,
+                 tast_command_args=args,
                  max_duration_minutes=10)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/graphics_TraceReplayExtended/graphics_TraceReplayExtended.py b/server/site_tests/graphics_TraceReplayExtended/graphics_TraceReplayExtended.py
index f29b072..887eb3e 100644
--- a/server/site_tests/graphics_TraceReplayExtended/graphics_TraceReplayExtended.py
+++ b/server/site_tests/graphics_TraceReplayExtended/graphics_TraceReplayExtended.py
@@ -1,238 +1,19 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 """Implementation of the graphics_TraceReplayExtended server test."""
 
-import logging
-import os
-import threading
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import test
-from autotest_lib.server.cros.graphics import graphics_power
-from autotest_lib.server.site_tests.tast import tast
+from autotest_lib.server.cros.graphics.graphics_tracereplayextended import (
+    GraphicsTraceReplayExtendedBase)
 
 
-class TastManagerThread(threading.Thread):
-    """Thread for running a local tast test from an autotest server test."""
-
-    def __init__(self,
-                 host,
-                 tast_instance,
-                 client_test,
-                 max_duration_minutes,
-                 build_bundle,
-                 varslist=None,
-                 command_args=None):
-        """Initializes the thread.
-
-        Args:
-            host: An autotest host instance.
-            tast_instance: An instance of the tast.tast() class.
-            client_test: String identifying which tast test to run.
-            max_duration_minutes: Float defining the maximum running time of the
-                managed sub-test.
-            build_bundle: String defining which tast test bundle to build and
-                query for the client_test.
-            varslist: list of strings that define dynamic variables made
-                available to tast tests at runtime via `tast run -var=name=value
-                ...`. Each string should be formatted as 'name=value'.
-            command_args: list of strings that are passed as args to the `tast
-                run` command.
-        """
-        super(TastManagerThread, self).__init__(name=__name__)
-        self.tast = tast_instance
-        self.tast.initialize(
-            host=host,
-            test_exprs=[client_test],
-            ignore_test_failures=True,
-            max_run_sec=max_duration_minutes * 60,
-            command_args=command_args if command_args else [],
-            build_bundle=build_bundle,
-            varslist=varslist)
-
-    def run(self):
-        logging.info('Started thread: %s', self.__class__.__name__)
-        self.tast.run_once()
-
-
-class graphics_TraceReplayExtended(test.test):
-    """Autotest server test for running repeated trace replays.
-
-    This test simultaneously initiates system performance logging and extended
-    trace replay processes on a target host, and parses their test results for
-    combined analysis and reporting.
-    """
+class graphics_TraceReplayExtended(GraphicsTraceReplayExtendedBase):
+    """Autotest server test for running repeated trace replays in Crostini."""
     version = 1
 
-    @staticmethod
-    def _initialize_dir_on_host(host, directory):
-        """Initialize a directory to a consistent (empty) state on the host.
-
-        Args:
-            host: An autotest host instance.
-            directory: String defining the location of the directory to
-                initialize.
-
-        Raises:
-            TestFail: If the directory cannot be initialized.
-        """
-        try:
-            host.run('rm -r %(0)s 2>/dev/null || true; ! test -d %(0)s' %
-                     {'0': directory})
-            host.run('mkdir -p %s' % directory)
-        except (error.AutotestHostRunCmdError, error.AutoservRunError) as err:
-            logging.exception(err)
-            raise error.TestFail(
-                'Failed to initialize directory "%s" on the test host' %
-                directory)
-
-    @staticmethod
-    def _cleanup_dir_on_host(host, directory):
-        """Ensure that a directory and its contents are deleted on the host.
-
-        Args:
-            host: An autotest host instance.
-            directory: String defining the location of the directory to delete.
-
-        Raises:
-            TestFail: If the directory remains on the host.
-        """
-        try:
-            host.run('rm -r %(0)s || true; ! test -d %(0)s' % {'0': directory})
-        except (error.AutotestHostRunCmdError, error.AutoservRunError) as err:
-            logging.exception(err)
-            raise error.TestFail(
-                'Failed to cleanup directory "%s" on the test host' % directory)
-
-    def run_once(self,
-                 host,
-                 client_tast_test,
-                 max_duration_minutes,
-                 tast_build_bundle='cros',
-                 tast_varslist=None,
-                 tast_command_args=None):
-        """Runs the test.
-
-        Args:
-            host: An autotest host instance.
-            client_tast_test: String defining which tast test to run.
-            max_duration_minutes: Float defining the maximum running time of the
-                managed sub-test.
-            tast_build_bundle: String defining which tast test bundle to build
-                and query for the client_test.
-            tast_varslist: list of strings that define dynamic variables made
-                available to tast tests at runtime via `tast run -var=name=value
-                ...`. Each string should be formatted as 'name=value'.
-            tast_command_args: list of strings that are passed as args to the
-                `tast run` command.
-        """
-        # Construct a suffix tag indicating which managing test is using logged
-        # data from the graphics_Power subtest.
-        trace_name = client_tast_test.split('.')[-1]
-
-        # workaround for running test locally since crrev/c/2374267 and
-        # crrev/i/2374267
-        if not tast_command_args:
-            tast_command_args = []
-        tast_command_args.extend([
-                'extraallowedbuckets=termina-component-testing,cros-containers-staging'
-        ])
-
-        # Define paths of signal files for basic RPC/IPC between sub-tests.
-        temp_io_root = '/tmp/%s/' % self.__class__.__name__
-        result_dir = os.path.join(temp_io_root, 'results')
-        signal_running_file = os.path.join(temp_io_root, 'signal_running')
-        signal_checkpoint_file = os.path.join(temp_io_root, 'signal_checkpoint')
-
-        # This test is responsible for creating/deleting root and resultdir.
-        logging.debug('Creating temporary IPC/RPC dir: %s', temp_io_root)
-        self._initialize_dir_on_host(host, temp_io_root)
-        self._initialize_dir_on_host(host, result_dir)
-
-        # Start background system performance monitoring process on the test
-        # target (via an autotest client 'power_Test').
-        logging.debug('Connecting to autotest client on host')
-        graphics_power_thread = graphics_power.GraphicsPowerThread(
-            host=host,
-            max_duration_minutes=max_duration_minutes,
-            test_tag='Trace' + '.' + trace_name,
-            pdash_note='',
-            result_dir=result_dir,
-            signal_running_file=signal_running_file,
-            signal_checkpoint_file=signal_checkpoint_file)
-        graphics_power_thread.start()
-
-        logging.info('Waiting for graphics_Power subtest to initialize...')
-        try:
-            graphics_power_thread.wait_until_running(timeout=120)
-        except Exception as err:
-            logging.exception(err)
-            raise error.TestFail(
-                'An error occured during graphics_Power subtest initialization')
-        logging.info('The graphics_Power subtest was properly initialized')
-
-        # Start repeated trace replay process on the test target (via a tast
-        # local test).
-        logging.info('Running Tast test: %s', client_tast_test)
-        tast_outputdir = os.path.join(self.outputdir, 'tast')
-        if not os.path.exists(tast_outputdir):
-            logging.debug('Creating tast outputdir: %s', tast_outputdir)
-            os.makedirs(tast_outputdir)
-
-        if not tast_varslist:
-            tast_varslist = []
-        tast_varslist.extend([
-            'graphics.TraceReplayExtended.resultDir=' + result_dir,
-            'graphics.TraceReplayExtended.signalRunningFile=' +
-            signal_running_file,
-            'graphics.TraceReplayExtended.signalCheckpointFile=' +
-            signal_checkpoint_file,
-        ])
-
-        tast_instance = tast.tast(
-            job=self.job, bindir=self.bindir, outputdir=tast_outputdir)
-        tast_manager_thread = TastManagerThread(
-            host,
-            tast_instance,
-            client_tast_test,
-            max_duration_minutes,
-            tast_build_bundle,
-            varslist=tast_varslist,
-            command_args=tast_command_args)
-        tast_manager_thread.start()
-
-        # Block until both subtests finish.
-        threads = [graphics_power_thread, tast_manager_thread]
-        stop_attempts = 0
-        while threads:
-            # TODO(ryanneph): Move stop signal emission to tast test instance.
-            if (not tast_manager_thread.is_alive() and
-                    graphics_power_thread.is_alive() and stop_attempts < 1):
-                logging.info('Attempting to stop graphics_Power thread')
-                graphics_power_thread.stop(timeout=0)
-                stop_attempts += 1
-
-            # Raise test failure if graphics_Power thread ends before tast test.
-            if (not graphics_power_thread.is_alive() and
-                    tast_manager_thread.is_alive()):
-                raise error.TestFail(
-                    'The graphics_Power subtest ended too soon.')
-
-            for thread in list(threads):
-                if not thread.is_alive():
-                    logging.info('Thread "%s" has ended',
-                                 thread.__class__.__name__)
-                    threads.remove(thread)
-            time.sleep(1)
-
-        client_result_dir = os.path.join(self.outputdir, 'client_results')
-        logging.info('Saving client results to %s', client_result_dir)
-        host.get_file(result_dir, client_result_dir)
-
-        # Ensure the host filesystem is clean for the next test.
-        self._cleanup_dir_on_host(host, result_dir)
-        self._cleanup_dir_on_host(host, temp_io_root)
-
-        # TODO(ryanneph): Implement results parsing/analysis/reporting
+    def run_once(self, *args, **kwargs):
+        kwargs['client_tast_test'] = 'graphics.TraceReplayExtended.' + kwargs[
+            'client_tast_test']
+        kwargs.setdefault('pdash_note', 'vm:crostini')
+        super(graphics_TraceReplayExtended, self).run_once(*args, **kwargs)
diff --git a/server/site_tests/hardware_DiskFirmwareUpgrade/control b/server/site_tests/hardware_DiskFirmwareUpgrade/control
index 1e2adde..c155435 100644
--- a/server/site_tests/hardware_DiskFirmwareUpgrade/control
+++ b/server/site_tests/hardware_DiskFirmwareUpgrade/control
@@ -10,6 +10,7 @@
 TEST_TYPE = 'server'
 DEPENDENCIES = 'storage:ssd'
 ATTRIBUTES = "suite:experimental"
+PY_VERSION = 3
 
 DOC = """
 The test uses a list of firmware packages and applies them to the DUT.
diff --git a/server/site_tests/hardware_DiskFirmwareUpgrade/hardware_DiskFirmwareUpgrade.py b/server/site_tests/hardware_DiskFirmwareUpgrade/hardware_DiskFirmwareUpgrade.py
index 2724b57..7f058ef 100644
--- a/server/site_tests/hardware_DiskFirmwareUpgrade/hardware_DiskFirmwareUpgrade.py
+++ b/server/site_tests/hardware_DiskFirmwareUpgrade/hardware_DiskFirmwareUpgrade.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -61,10 +62,10 @@
             model = self._get_device_name()
 
         i = 0
-        for model_re, package_desc in disk_fw_packages.iteritems():
+        for model_re, package_desc in list(disk_fw_packages.items()):
             if not re.match(model_re, model):
                 continue
-            for p, results in package_desc.iteritems():
+            for p, results in list(package_desc.items()):
                 result_dir = '-'.join([self.TEST_NAME, str(i), p])
                 if p.startswith('test_'):
                     self._client_at.run_test(
@@ -92,4 +93,3 @@
                         disk_firmware_package=self.DEFAULT_LOCATION,
                         upgrade_required=results[1])
                 i += 1
-
diff --git a/server/site_tests/hardware_MemoryIntegrity/control.idle b/server/site_tests/hardware_MemoryIntegrity/control.idle
index 301df07..83d99de 100644
--- a/server/site_tests/hardware_MemoryIntegrity/control.idle
+++ b/server/site_tests/hardware_MemoryIntegrity/control.idle
@@ -8,6 +8,7 @@
 TIME = 'LENGTHY'
 TEST_CLASS = 'hardware'
 TEST_TYPE = 'server'
+PY_VERSION = 3
 
 DOC = """
 This test call hardware_StorageFio to write data once to the ramfs and
diff --git a/server/site_tests/hardware_MemoryIntegrity/control.memory_qual b/server/site_tests/hardware_MemoryIntegrity/control.memory_qual
index 89e8b3a..cfc15bc 100644
--- a/server/site_tests/hardware_MemoryIntegrity/control.memory_qual
+++ b/server/site_tests/hardware_MemoryIntegrity/control.memory_qual
@@ -8,6 +8,9 @@
 TIME = 'LENGTHY'
 TEST_CLASS = 'hardware'
 TEST_TYPE = 'server'
+ATTRIBUTES = "suite:memory_qual2"
+EXTENDED_TIMEOUT = 9000 # 2.5 hours
+PY_VERSION = 3
 
 DOC = """
 This test call hardware_StorageFio to write data once to the ramfs and
diff --git a/server/site_tests/hardware_MemoryIntegrity/control.quick b/server/site_tests/hardware_MemoryIntegrity/control.quick
index 0700a5c..b39b0e1 100644
--- a/server/site_tests/hardware_MemoryIntegrity/control.quick
+++ b/server/site_tests/hardware_MemoryIntegrity/control.quick
@@ -9,6 +9,7 @@
 TEST_CLASS = 'hardware'
 TEST_TYPE = 'server'
 ATTRIBUTES = "suite:experimental"
+PY_VERSION = 3
 
 DOC = """
 This test call hardware_StorageFio to write data once to the ramfs and
diff --git a/server/site_tests/hardware_MemoryIntegrity/control.suspend b/server/site_tests/hardware_MemoryIntegrity/control.suspend
index 8bc083e..bc5316f 100644
--- a/server/site_tests/hardware_MemoryIntegrity/control.suspend
+++ b/server/site_tests/hardware_MemoryIntegrity/control.suspend
@@ -8,6 +8,7 @@
 TIME = 'LENGTHY'
 TEST_CLASS = 'hardware'
 TEST_TYPE = 'server'
+PY_VERSION = 3
 
 DOC = """
 This test call hardware_StorageFio to write data once to the ramfs and
diff --git a/server/site_tests/hardware_MemoryIntegrity/hardware_MemoryIntegrity.py b/server/site_tests/hardware_MemoryIntegrity/hardware_MemoryIntegrity.py
index 3c7fdfe..67a448c 100644
--- a/server/site_tests/hardware_MemoryIntegrity/hardware_MemoryIntegrity.py
+++ b/server/site_tests/hardware_MemoryIntegrity/hardware_MemoryIntegrity.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageQualBase_storage_qual_cq_1_after b/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageQualBase_storage_qual_cq_1_after
index f84a35f..807daa9 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageQualBase_storage_qual_cq_1_after
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageQualBase_storage_qual_cq_1_after
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualBase_storage_qual_cq_1_after"
 ATTRIBUTES = "suite:storage_qual_cq"
 PURPOSE = "hardware_StorageQualBase_storage_qual_cq_1_after"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 70
 DEPENDENCIES = "storage_qual_cq_1"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualBase_storage_qual_cq_1_after"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualBase", host=hosts.create_host(machine),
-            client_ip=machine, client_tag='after', tag='after', cq=True)
+            client_ip=machine, tag='after', client_tag='after', cq=True)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageQualBase_storage_qual_cq_1_before b/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageQualBase_storage_qual_cq_1_before
index a8bb211..07d9eb6 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageQualBase_storage_qual_cq_1_before
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageQualBase_storage_qual_cq_1_before
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualBase_storage_qual_cq_1_before"
 ATTRIBUTES = "suite:storage_qual_cq"
 PURPOSE = "hardware_StorageQualBase_storage_qual_cq_1_before"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 100
 DEPENDENCIES = "storage_qual_cq_1"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualBase_storage_qual_cq_1_before"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualBase", host=hosts.create_host(machine),
-            client_ip=machine, client_tag='before', tag='before', cq=True)
+            client_ip=machine, tag='before', client_tag='before', cq=True)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageQualBase_storage_qual_cq_2_after b/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageQualBase_storage_qual_cq_2_after
index c2b8134..0041583 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageQualBase_storage_qual_cq_2_after
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageQualBase_storage_qual_cq_2_after
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualBase_storage_qual_cq_2_after"
 ATTRIBUTES = "suite:storage_qual_cq"
 PURPOSE = "hardware_StorageQualBase_storage_qual_cq_2_after"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 70
 DEPENDENCIES = "storage_qual_cq_2"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualBase_storage_qual_cq_2_after"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualBase", host=hosts.create_host(machine),
-            client_ip=machine, client_tag='after', tag='after', cq=True)
+            client_ip=machine, tag='after', client_tag='after', cq=True)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageQualBase_storage_qual_cq_2_before b/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageQualBase_storage_qual_cq_2_before
index 43fb1ed..e25e1ed 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageQualBase_storage_qual_cq_2_before
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageQualBase_storage_qual_cq_2_before
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualBase_storage_qual_cq_2_before"
 ATTRIBUTES = "suite:storage_qual_cq"
 PURPOSE = "hardware_StorageQualBase_storage_qual_cq_2_before"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 100
 DEPENDENCIES = "storage_qual_cq_2"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualBase_storage_qual_cq_2_before"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualBase", host=hosts.create_host(machine),
-            client_ip=machine, client_tag='before', tag='before', cq=True)
+            client_ip=machine, tag='before', client_tag='before', cq=True)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageQualTrimStress_storage_qual_cq_2_0 b/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageQualTrimStress_storage_qual_cq_2_0
index 666c9fd..80b9c42 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageQualTrimStress_storage_qual_cq_2_0
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageQualTrimStress_storage_qual_cq_2_0
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_storage_qual_cq_2_0"
 ATTRIBUTES = "suite:storage_qual_cq"
 PURPOSE = "hardware_StorageQualTrimStress_storage_qual_cq_2_0"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 80
 DEPENDENCIES = "storage_qual_cq_2"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_storage_qual_cq_2_0"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualTrimStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=1800, cq=True)
+            client_ip=machine, duration=1800.0, cq=True)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageQualTrimStress_storage_qual_cq_2_1 b/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageQualTrimStress_storage_qual_cq_2_1
index 566a2b3..c58d551 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageQualTrimStress_storage_qual_cq_2_1
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageQualTrimStress_storage_qual_cq_2_1
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_storage_qual_cq_2_1"
 ATTRIBUTES = "suite:storage_qual_cq"
 PURPOSE = "hardware_StorageQualTrimStress_storage_qual_cq_2_1"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 80
 DEPENDENCIES = "storage_qual_cq_2"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_storage_qual_cq_2_1"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualTrimStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=1800, cq=True)
+            client_ip=machine, duration=1800.0, cq=True)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageStress_storage_qual_cq_1_soak_0 b/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageStress_storage_qual_cq_1_soak_0
index fadf017..59ad12b 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageStress_storage_qual_cq_1_soak_0
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageStress_storage_qual_cq_1_soak_0
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_storage_qual_cq_1_soak_0"
 ATTRIBUTES = "suite:storage_qual_cq"
 PURPOSE = "hardware_StorageStress_storage_qual_cq_1_soak_0"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "storage_qual_cq_1"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_storage_qual_cq_1_soak_0"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', suspend_duration=300, tag='soak', duration=14400, cq=True, power_command='wait')
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400, cq=True)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageStress_storage_qual_cq_1_soak_1 b/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageStress_storage_qual_cq_1_soak_1
index 6754193..4ecc35b 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageStress_storage_qual_cq_1_soak_1
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageStress_storage_qual_cq_1_soak_1
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_storage_qual_cq_1_soak_1"
 ATTRIBUTES = "suite:storage_qual_cq"
 PURPOSE = "hardware_StorageStress_storage_qual_cq_1_soak_1"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "storage_qual_cq_1"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_storage_qual_cq_1_soak_1"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', suspend_duration=300, tag='soak', duration=14400, cq=True, power_command='wait')
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400, cq=True)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageStress_storage_qual_cq_1_suspend b/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageStress_storage_qual_cq_1_suspend
index 950ff1b..2eff9de 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageStress_storage_qual_cq_1_suspend
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageStress_storage_qual_cq_1_suspend
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_storage_qual_cq_1_suspend"
 ATTRIBUTES = "suite:storage_qual_cq"
 PURPOSE = "hardware_StorageStress_storage_qual_cq_1_suspend"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 80
 DEPENDENCIES = "storage_qual_cq_1"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_storage_qual_cq_1_suspend"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', suspend_duration=120, tag='suspend', duration=1800, cq=True, power_command='suspend')
+            client_ip=machine, tag='suspend', power_command='suspend', storage_test_command='full_write', suspend_duration=120, duration=1800.0, cq=True)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageStress_storage_qual_cq_2_soak_0 b/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageStress_storage_qual_cq_2_soak_0
index deacfce..00cb03d 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageStress_storage_qual_cq_2_soak_0
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageStress_storage_qual_cq_2_soak_0
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_storage_qual_cq_2_soak_0"
 ATTRIBUTES = "suite:storage_qual_cq"
 PURPOSE = "hardware_StorageStress_storage_qual_cq_2_soak_0"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "storage_qual_cq_2"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_storage_qual_cq_2_soak_0"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', suspend_duration=300, tag='soak', duration=14400, cq=True, power_command='wait')
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400, cq=True)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageStress_storage_qual_cq_2_soak_1 b/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageStress_storage_qual_cq_2_soak_1
index 4a89bd2..245d577 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageStress_storage_qual_cq_2_soak_1
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_cq_hardware_StorageStress_storage_qual_cq_2_soak_1
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_storage_qual_cq_2_soak_1"
 ATTRIBUTES = "suite:storage_qual_cq"
 PURPOSE = "hardware_StorageStress_storage_qual_cq_2_soak_1"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "storage_qual_cq_2"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_storage_qual_cq_2_soak_1"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', suspend_duration=300, tag='soak', duration=14400, cq=True, power_command='wait')
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400, cq=True)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_external_hardware_StorageQualBase_storage_qual_external_after b/server/site_tests/hardware_StorageQual/control.storage_qual_external_hardware_StorageQualBase_storage_qual_external_after
index fe65079..7ad7313 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_external_hardware_StorageQualBase_storage_qual_external_after
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_external_hardware_StorageQualBase_storage_qual_external_after
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 70
 DEPENDENCIES = "storage_qual_external"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualBase_storage_qual_external_after"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_external_hardware_StorageQualBase_storage_qual_external_before b/server/site_tests/hardware_StorageQual/control.storage_qual_external_hardware_StorageQualBase_storage_qual_external_before
index 6248ffb..f7b4ac4 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_external_hardware_StorageQualBase_storage_qual_external_before
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_external_hardware_StorageQualBase_storage_qual_external_before
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 100
 DEPENDENCIES = "storage_qual_external"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualBase_storage_qual_external_before"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_external_hardware_StorageQualSuspendStress_storage_qual_external_suspend_0 b/server/site_tests/hardware_StorageQual/control.storage_qual_external_hardware_StorageQualSuspendStress_storage_qual_external_suspend_0
index 3f42c5c..93c7629 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_external_hardware_StorageQualSuspendStress_storage_qual_external_suspend_0
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_external_hardware_StorageQualSuspendStress_storage_qual_external_suspend_0
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 80
 DEPENDENCIES = "storage_qual_external"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_storage_qual_external_suspend_0"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_external_hardware_StorageQualSuspendStress_storage_qual_external_suspend_1 b/server/site_tests/hardware_StorageQual/control.storage_qual_external_hardware_StorageQualSuspendStress_storage_qual_external_suspend_1
index 2d45716..68dc577 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_external_hardware_StorageQualSuspendStress_storage_qual_external_suspend_1
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_external_hardware_StorageQualSuspendStress_storage_qual_external_suspend_1
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 80
 DEPENDENCIES = "storage_qual_external"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_storage_qual_external_suspend_1"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualBase_retention_after b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualBase_retention_after
index 21f66d0..47e03b50 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualBase_retention_after
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualBase_retention_after
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualBase_retention_after"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualBase_retention_after"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 70
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualBase_retention_after"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualBase", host=hosts.create_host(machine),
-            client_ip=machine, client_tag='after', tag='after')
+            client_ip=machine, tag='after', client_tag='after')
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualBase_retention_before b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualBase_retention_before
index 6c63622..c96152f 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualBase_retention_before
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualBase_retention_before
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualBase_retention_before"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualBase_retention_before"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 100
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualBase_retention_before"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualBase", host=hosts.create_host(machine),
-            client_ip=machine, client_tag='before', tag='before')
+            client_ip=machine, tag='before', client_tag='before')
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualBase_suspend_after b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualBase_suspend_after
index c51af48..e10bcef 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualBase_suspend_after
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualBase_suspend_after
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualBase_suspend_after"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualBase_suspend_after"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 70
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualBase_suspend_after"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualBase", host=hosts.create_host(machine),
-            client_ip=machine, client_tag='after', tag='after')
+            client_ip=machine, tag='after', client_tag='after')
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualBase_suspend_before b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualBase_suspend_before
index 2f7a453..22a078e 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualBase_suspend_before
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualBase_suspend_before
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualBase_suspend_before"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualBase_suspend_before"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 100
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualBase_suspend_before"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualBase", host=hosts.create_host(machine),
-            client_ip=machine, client_tag='before', tag='before')
+            client_ip=machine, tag='before', client_tag='before')
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualBase_trim_after b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualBase_trim_after
index 037d79d..5bebb5e 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualBase_trim_after
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualBase_trim_after
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualBase_trim_after"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualBase_trim_after"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 70
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualBase_trim_after"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualBase", host=hosts.create_host(machine),
-            client_ip=machine, client_tag='after', tag='after')
+            client_ip=machine, tag='after', client_tag='after')
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualBase_trim_before b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualBase_trim_before
index 1d62417..654149c 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualBase_trim_before
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualBase_trim_before
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualBase_trim_before"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualBase_trim_before"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 100
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualBase_trim_before"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualBase", host=hosts.create_host(machine),
-            client_ip=machine, client_tag='before', tag='before')
+            client_ip=machine, tag='before', client_tag='before')
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualCheckSetup_retention b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualCheckSetup_retention
index 914986f..f388011 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualCheckSetup_retention
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualCheckSetup_retention
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualCheckSetup_retention"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualCheckSetup_retention"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 REQUIRE_SSP = False
 PRIORITY = 110
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualCheckSetup_retention"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_0 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_0
index 062006a..b3cf502 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_0
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_0
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_0"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_0"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_0"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_1 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_1
index 3a37c6e..33784ea 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_1
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_1
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_1"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_1"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_1"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_10 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_10
index 365189c..078689f 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_10
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_10
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_10"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_10"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_10"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_11 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_11
index d494742..1ce643d 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_11
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_11
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_11"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_11"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_11"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_12 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_12
index 61541c3..c146692 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_12
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_12
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_12"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_12"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_12"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_13 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_13
index 3b6eceb..835c537 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_13
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_13
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_13"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_13"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_13"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_14 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_14
index c312344..b853d6f 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_14
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_14
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_14"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_14"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_14"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_15 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_15
index 6bf15a7..c6ee311 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_15
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_15
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_15"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_15"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_15"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_16 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_16
index 2e72656..55622d7 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_16
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_16
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_16"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_16"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_16"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_17 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_17
index 7d5d036..b00a439 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_17
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_17
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_17"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_17"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_17"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_18 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_18
index 706168e..1d91089 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_18
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_18
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_18"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_18"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_18"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_19 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_19
index f5d2178..b5e083a 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_19
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_19
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_19"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_19"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_19"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_2 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_2
index 0cc5be8..6cee10f 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_2
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_2
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_2"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_2"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_2"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_20 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_20
index 321f751..8056020 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_20
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_20
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_20"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_20"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_20"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_21 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_21
index e8fd631..50f9b30 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_21
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_21
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_21"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_21"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_21"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_22 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_22
index a93b478..b61f3b7 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_22
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_22
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_22"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_22"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_22"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_23 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_23
index 9e147b1..637c100 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_23
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_23
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_23"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_23"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_23"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_24 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_24
index b7353ef..6165ced 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_24
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_24
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_24"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_24"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_24"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_25 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_25
index c3feeb3..f6b4f3e 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_25
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_25
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_25"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_25"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_25"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_26 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_26
index abc8519..55a3469 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_26
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_26
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_26"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_26"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_26"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_27 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_27
index a87ee68..b9c35a7 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_27
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_27
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_27"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_27"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_27"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_28 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_28
index d0d1aac..f0f9fd7 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_28
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_28
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_28"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_28"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_28"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_29 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_29
index 74d0954..19b29b8 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_29
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_29
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_29"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_29"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_29"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_3 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_3
index 141752c..09255e0 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_3
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_3
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_3"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_3"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_3"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_30 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_30
index 98c5031..ca6471b 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_30
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_30
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_30"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_30"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_30"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_31 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_31
index 34db32e..d52956d 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_31
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_31
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_31"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_31"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_31"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_32 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_32
index e1a81e1..9ce7001 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_32
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_32
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_32"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_32"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_32"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_33 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_33
index b851472..c7c7833 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_33
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_33
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_33"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_33"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_33"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_34 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_34
index 3f7f151..67f02db 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_34
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_34
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_34"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_34"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_34"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_35 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_35
index 2ee2079..bd21697 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_35
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_35
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_35"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_35"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_35"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_36 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_36
index 6392659..ed7d546 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_36
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_36
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_36"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_36"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_36"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_37 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_37
index e136587..ee45c0e 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_37
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_37
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_37"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_37"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_37"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_38 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_38
index 4dbd1ae..3fa3572 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_38
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_38
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_38"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_38"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_38"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_39 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_39
index 3d4941b..6fd3a6e 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_39
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_39
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_39"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_39"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_39"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_4 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_4
index 33f7821..962146b 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_4
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_4
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_4"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_4"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_4"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_40 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_40
index 4c892b0..0048900 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_40
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_40
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_40"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_40"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_40"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_41 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_41
index f0d9a0e..6b933a1 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_41
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_41
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_41"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_41"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_41"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_5 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_5
index c4389dd..52479a2 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_5
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_5
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_5"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_5"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_5"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_6 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_6
index 7eddf92..2f10728 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_6
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_6
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_6"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_6"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_6"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_7 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_7
index ba9e42a..bce00ce 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_7
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_7
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_7"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_7"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_7"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_8 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_8
index 3da9a85..b04ec74 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_8
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_8
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_8"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_8"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_8"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_9 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_9
index 5f4ae9a..1a5c229 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_9
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualSuspendStress_suspend_suspend_9
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_9"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_9"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_9"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=14400, tag='suspend')
+            client_ip=machine, tag='suspend', duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_0 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_0
index 2527a38..e37128e 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_0
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_0
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_0"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_0"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_0"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_1 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_1
index 3494407..a9481c8 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_1
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_1
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_1"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_1"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_1"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_10 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_10
index f320807..4a5c1e2 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_10
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_10
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_10"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_10"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_10"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_11 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_11
index d1fd08f..46e41df 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_11
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_11
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_11"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_11"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_11"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_12 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_12
index 3f5e406..3bcab74 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_12
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_12
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_12"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_12"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_12"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_13 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_13
index 4ce513a..deed705 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_13
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_13
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_13"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_13"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_13"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_14 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_14
index 16cfa23..1369259 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_14
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_14
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_14"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_14"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_14"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_15 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_15
index d925c94..a7e0ae5 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_15
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_15
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_15"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_15"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_15"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_16 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_16
index b8604ee..500e601 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_16
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_16
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_16"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_16"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_16"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_17 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_17
index c24350e..84b5721 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_17
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_17
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_17"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_17"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_17"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_18 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_18
index 77706be..ee04d05 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_18
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_18
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_18"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_18"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_18"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_19 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_19
index 5446aff..e60c22c 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_19
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_19
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_19"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_19"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_19"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_2 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_2
index 43de760..ced9e49 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_2
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_2
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_2"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_2"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_2"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_20 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_20
index 68be124..bbda1ae 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_20
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_20
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_20"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_20"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_20"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_21 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_21
index c12c4d0..511491d 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_21
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_21
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_21"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_21"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_21"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_22 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_22
index a15f4f3..d37b3d2 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_22
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_22
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_22"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_22"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_22"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_23 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_23
index ca650d9..5c424db 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_23
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_23
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_23"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_23"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_23"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_24 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_24
index 0374b47..8636701 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_24
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_24
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_24"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_24"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_24"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_25 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_25
index 727b115..aa5537c 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_25
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_25
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_25"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_25"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_25"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_26 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_26
index f0aa0a3..bbade62 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_26
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_26
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_26"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_26"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_26"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_27 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_27
index 2dc579d..fa17035 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_27
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_27
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_27"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_27"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_27"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_28 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_28
index ab31786..7c22824 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_28
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_28
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_28"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_28"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_28"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_29 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_29
index 81afb56..bd5c4ec 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_29
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_29
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_29"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_29"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_29"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_3 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_3
index fce4cd4..d4d57c6 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_3
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_3
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_3"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_3"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_3"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_30 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_30
index e9ca56e..de65e96 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_30
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_30
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_30"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_30"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_30"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_31 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_31
index 08cf3f2..ff44c77 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_31
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_31
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_31"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_31"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_31"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_32 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_32
index 95073e2..9634d68 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_32
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_32
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_32"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_32"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_32"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_33 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_33
index 02d2e88..2744c19 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_33
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_33
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_33"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_33"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_33"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_34 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_34
index 4ba38f8..29e326b 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_34
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_34
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_34"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_34"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_34"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_35 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_35
index 9d7230f..ec4f9c8 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_35
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_35
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_35"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_35"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_35"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_36 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_36
index 68b9699..b0fd73b 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_36
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_36
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_36"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_36"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_36"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_37 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_37
index 1f15ca7..ecb0291 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_37
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_37
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_37"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_37"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_37"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_38 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_38
index 3f0aa43..cb4864b 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_38
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_38
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_38"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_38"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_38"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_39 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_39
index 4b529b3..4803f08 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_39
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_39
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_39"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_39"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_39"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_4 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_4
index 5d1bac3..30a2ece 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_4
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_4
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_4"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_4"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_4"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_40 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_40
index 0aa68e8..a21e130 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_40
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_40
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_40"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_40"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_40"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_41 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_41
index fe8234b..c9cada7 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_41
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_41
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_41"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_41"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_41"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_5 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_5
index e14f4ab..1d0f9fd 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_5
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_5
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_5"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_5"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_5"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_6 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_6
index c83eb3e..e62f80a 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_6
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_6
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_6"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_6"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_6"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_7 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_7
index 1803d7f..55c22d9 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_7
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_7
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_7"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_7"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_7"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_8 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_8
index 180ebf3..2ca4bce 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_8
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_8
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_8"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_8"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_8"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_9 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_9
index f59fec7..128435f 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_9
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageQualTrimStress_trim_9
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_9"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageQualTrimStress_trim_9"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_9"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_0 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_0
index 6444ba5..b958849 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_0
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_0
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_0"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_0"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_0"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_1 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_1
index ec6316c..38890bd 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_1
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_1
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_1"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_1"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_1"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_10 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_10
index 941489b..d566905 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_10
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_10
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_10"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_10"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_10"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_11 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_11
index fd1f6d2..820adbd 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_11
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_11
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_11"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_11"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_11"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_12 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_12
index d7630d7..e71926e 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_12
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_12
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_12"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_12"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_12"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_13 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_13
index b34880f..6910b60 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_13
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_13
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_13"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_13"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_13"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_14 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_14
index abadb47..00da6c6 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_14
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_14
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_14"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_14"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_14"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_15 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_15
index f2bd224..ecfa7a2 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_15
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_15
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_15"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_15"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_15"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_16 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_16
index b56e11b..19046b6 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_16
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_16
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_16"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_16"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_16"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_17 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_17
index 4e052aa..e8ba61f 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_17
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_17
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_17"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_17"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_17"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_18 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_18
index e8abdc2..b203566 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_18
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_18
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_18"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_18"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_18"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_19 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_19
index a5f4298..62686f6 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_19
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_19
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_19"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_19"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_19"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_2 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_2
index ffa461c..3e0123d 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_2
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_2
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_2"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_2"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_2"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_20 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_20
index 31a29cc..98ae44a 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_20
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_20
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_20"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_20"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_20"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_21 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_21
index 3fd2abe..6ccb29e 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_21
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_21
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_21"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_21"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_21"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_22 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_22
index e545dab..3369603 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_22
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_22
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_22"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_22"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_22"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_23 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_23
index 25c33e4..a1cb4a9 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_23
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_23
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_23"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_23"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_23"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_24 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_24
index b16425c..65c73b6 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_24
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_24
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_24"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_24"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_24"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_25 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_25
index fc7625f..c97b711 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_25
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_25
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_25"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_25"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_25"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_26 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_26
index 1d0966d..7da4e4f 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_26
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_26
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_26"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_26"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_26"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_27 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_27
index 309724f..f355ad0 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_27
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_27
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_27"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_27"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_27"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_28 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_28
index 881573c..96f4be5 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_28
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_28
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_28"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_28"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_28"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_29 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_29
index 197b451..423da5d 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_29
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_29
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_29"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_29"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_29"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_3 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_3
index f29f3df..66ac3f8 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_3
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_3
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_3"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_3"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_3"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_30 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_30
index fd084a7..8ca3d56 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_30
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_30
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_30"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_30"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_30"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_31 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_31
index dd29e19..aa18ef7 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_31
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_31
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_31"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_31"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_31"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_32 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_32
index 3b383b0..e84ccc6 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_32
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_32
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_32"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_32"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_32"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_33 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_33
index e4aab34..3aa9d65 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_33
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_33
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_33"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_33"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_33"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_34 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_34
index 667f690..5fc27e2 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_34
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_34
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_34"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_34"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_34"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_35 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_35
index bf1d37b..a54c145 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_35
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_35
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_35"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_35"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_35"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_36 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_36
index dd69393..9178722 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_36
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_36
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_36"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_36"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_36"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_37 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_37
index 8c4a0ad..e393b49 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_37
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_37
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_37"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_37"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_37"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_38 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_38
index bd11f84..d04fc82 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_38
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_38
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_38"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_38"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_38"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_39 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_39
index 97c4dc6..29cc8c0 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_39
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_39
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_39"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_39"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_39"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_4 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_4
index deafa70..9db8949 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_4
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_4
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_4"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_4"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_4"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_40 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_40
index 4b50662..92f7fba 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_40
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_40
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_40"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_40"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_40"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_41 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_41
index aff350c..a81dbfe 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_41
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_41
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_41"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_41"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_41"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_5 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_5
index ffeb820..6aee024 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_5
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_5
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_5"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_5"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_5"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_6 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_6
index a958d7d..85e4b64 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_6
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_6
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_6"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_6"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_6"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_7 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_7
index d51d367..e9888b7 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_7
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_7
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_7"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_7"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_7"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_8 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_8
index 91b6ea8..80ada06 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_8
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_8
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_8"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_8"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_8"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_9 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_9
index 592000e..42c6981 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_9
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_soak_9
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_9"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_soak_9"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_9"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_suspend b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_suspend
index acd6126..67130d9 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_suspend
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_retention_suspend
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_suspend"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_retention_suspend"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_suspend"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=604800, tag='suspend', power_command='suspend', suspend_duration=43200)
+            client_ip=machine, tag='suspend', power_command='suspend', storage_test_command='full_write', suspend_duration=43200, duration=604800)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_0 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_0
index 11b1375..0dbb31e 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_0
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_0
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_0"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_0"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_0"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_1 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_1
index d8fe95c..783ad7b 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_1
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_1
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_1"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_1"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_1"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_10 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_10
index 5bda065..3b4b0bd 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_10
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_10
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_10"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_10"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_10"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_11 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_11
index 08a4536..9bb262f 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_11
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_11
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_11"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_11"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_11"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_12 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_12
index 164c0bf..1abdac3 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_12
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_12
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_12"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_12"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_12"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_13 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_13
index b936699..88946fe 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_13
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_13
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_13"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_13"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_13"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_14 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_14
index 710fdfc..d94870c 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_14
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_14
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_14"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_14"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_14"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_15 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_15
index 24031d1..cd29594 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_15
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_15
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_15"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_15"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_15"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_16 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_16
index 308651f..3d4a65d 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_16
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_16
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_16"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_16"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_16"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_17 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_17
index f70631a..c116455 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_17
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_17
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_17"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_17"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_17"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_18 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_18
index 4f4b157..296c362 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_18
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_18
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_18"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_18"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_18"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_19 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_19
index 1b8b87e..6c02192 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_19
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_19
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_19"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_19"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_19"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_2 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_2
index a3b4cfe..3645e61 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_2
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_2
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_2"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_2"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_2"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_20 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_20
index 9ca119e..f67d4bd 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_20
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_20
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_20"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_20"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_20"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_21 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_21
index 5ce700f..c722232 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_21
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_21
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_21"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_21"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_21"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_22 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_22
index bf22c47..5ea3074 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_22
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_22
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_22"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_22"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_22"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_23 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_23
index 2306842..0477eda 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_23
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_23
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_23"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_23"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_23"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_24 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_24
index 7abd10a..de4c61a 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_24
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_24
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_24"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_24"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_24"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_25 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_25
index 1dda139..27dc1f7 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_25
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_25
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_25"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_25"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_25"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_26 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_26
index 4a3e4f2..7ef9563 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_26
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_26
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_26"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_26"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_26"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_27 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_27
index 10a49d7..8ec3f86 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_27
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_27
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_27"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_27"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_27"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_28 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_28
index 1b4b6ef..d2fd533 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_28
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_28
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_28"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_28"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_28"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_29 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_29
index a6b4370..7f7124f 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_29
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_29
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_29"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_29"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_29"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_3 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_3
index facc56d..84be859 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_3
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_3
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_3"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_3"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_3"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_30 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_30
index 68e3b6f..fe94b10 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_30
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_30
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_30"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_30"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_30"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_31 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_31
index 0a46109..b0e8e3d 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_31
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_31
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_31"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_31"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_31"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_32 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_32
index 5036377..143cf2f 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_32
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_32
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_32"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_32"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_32"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_33 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_33
index 88f71e2..7158817 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_33
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_33
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_33"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_33"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_33"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_34 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_34
index 16f32d0..cd19c24 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_34
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_34
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_34"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_34"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_34"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_35 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_35
index b1614db..df8513d 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_35
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_35
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_35"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_35"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_35"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_36 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_36
index 2a368bd..ab5d9d4 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_36
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_36
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_36"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_36"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_36"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_37 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_37
index 08118d8..57b6944 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_37
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_37
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_37"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_37"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_37"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_38 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_38
index 42d9987..5dbbc2f 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_38
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_38
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_38"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_38"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_38"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_39 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_39
index f28e5d0..1316dbe 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_39
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_39
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_39"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_39"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_39"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_4 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_4
index c82b998..d5f5f1b 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_4
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_4
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_4"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_4"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_4"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_40 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_40
index ea5cd75..d8ed557 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_40
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_40
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_40"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_40"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_40"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_41 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_41
index 97442c0..330cd63 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_41
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_41
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_41"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_41"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_41"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_5 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_5
index 3b559a7..ad6b207 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_5
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_5
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_5"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_5"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_5"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_6 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_6
index 5ce3437..dba23e7 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_6
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_6
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_6"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_6"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_6"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_7 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_7
index 387fc79..c4b97c0 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_7
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_7
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_7"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_7"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_7"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_8 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_8
index e5011be..7f015d2 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_8
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_8
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_8"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_8"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_8"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_9 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_9
index 2fdeb15..90a3759 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_9
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_suspend_soak_9
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_9"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_suspend_soak_9"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_9"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_0 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_0
index c9cd2bc..4d6f658 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_0
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_0
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_0"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_0"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_0"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_1 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_1
index e4ff1c0..828c252 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_1
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_1
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_1"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_1"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_1"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_10 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_10
index a0da3d6..bb5ae88 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_10
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_10
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_10"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_10"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_10"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_11 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_11
index 6048dab..093b121 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_11
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_11
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_11"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_11"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_11"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_12 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_12
index 93655ea..28c8b92 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_12
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_12
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_12"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_12"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_12"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_13 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_13
index 52f6f4d..9e98f9d 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_13
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_13
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_13"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_13"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_13"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_14 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_14
index ae04759..88f1a52 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_14
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_14
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_14"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_14"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_14"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_15 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_15
index 7966d20..648b47d 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_15
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_15
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_15"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_15"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_15"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_16 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_16
index b7772fc..7cc7f72 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_16
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_16
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_16"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_16"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_16"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_17 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_17
index 7f92977..af12f9e 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_17
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_17
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_17"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_17"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_17"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_18 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_18
index e6cb318..1a96e77 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_18
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_18
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_18"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_18"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_18"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_19 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_19
index 5b5434b..592c04f 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_19
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_19
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_19"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_19"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_19"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_2 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_2
index 6f86410..51d5da8 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_2
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_2
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_2"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_2"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_2"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_20 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_20
index b2127b9..d518273 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_20
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_20
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_20"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_20"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_20"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_21 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_21
index 6c52471..326f848 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_21
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_21
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_21"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_21"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_21"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_22 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_22
index 82ce8f9..442fa3d 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_22
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_22
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_22"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_22"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_22"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_23 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_23
index 1636d07..c36c5f7 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_23
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_23
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_23"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_23"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_23"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_24 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_24
index c7b2907..30fb736 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_24
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_24
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_24"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_24"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_24"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_25 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_25
index 39938be..5ca1571 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_25
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_25
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_25"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_25"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_25"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_26 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_26
index 50e3f7f..7d2f034 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_26
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_26
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_26"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_26"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_26"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_27 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_27
index 94eaea8..3d00845 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_27
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_27
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_27"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_27"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_27"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_28 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_28
index 7587c60..e7e7799 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_28
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_28
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_28"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_28"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_28"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_29 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_29
index 9aad41e..30a8c9a 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_29
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_29
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_29"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_29"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_29"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_3 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_3
index 6620897..2f5d39b 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_3
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_3
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_3"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_3"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_3"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_30 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_30
index bdaee7d..f0ae1de 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_30
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_30
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_30"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_30"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_30"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_31 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_31
index 9d90058..38e5d59 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_31
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_31
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_31"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_31"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_31"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_32 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_32
index 7372a13..577773e 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_32
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_32
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_32"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_32"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_32"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_33 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_33
index ac387b1..9c50e17 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_33
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_33
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_33"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_33"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_33"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_34 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_34
index 854d216..1b0a9f9 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_34
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_34
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_34"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_34"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_34"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_35 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_35
index affe08d..d2ef24b 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_35
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_35
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_35"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_35"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_35"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_36 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_36
index f0d777d..3186e10 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_36
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_36
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_36"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_36"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_36"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_37 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_37
index 0536e30..b61a9f6 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_37
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_37
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_37"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_37"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_37"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_38 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_38
index b78abee..67c6749 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_38
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_38
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_38"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_38"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_38"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_39 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_39
index dc5fab0..addad29 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_39
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_39
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_39"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_39"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_39"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_4 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_4
index fffd914..29cf64e 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_4
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_4
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_4"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_4"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_4"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_40 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_40
index 8420b6a..f4255a5 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_40
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_40
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_40"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_40"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_40"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_41 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_41
index 7834d46..6edcfa5 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_41
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_41
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_41"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_41"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_41"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_5 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_5
index dd1d530..847bdfa 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_5
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_5
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_5"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_5"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_5"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_6 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_6
index be7a4d3..ab09640 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_6
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_6
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_6"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_6"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_6"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_7 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_7
index 6afd00e..be6da77 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_7
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_7
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_7"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_7"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_7"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_8 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_8
index 5203576..292db9d 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_8
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_8
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_8"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_8"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_8"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_9 b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_9
index 8d82e66..922ca23 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_9
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_hardware_StorageStress_trim_soak_9
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_9"
 ATTRIBUTES = "suite:storage_qual"
 PURPOSE = "hardware_StorageStress_trim_soak_9"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_9"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=14400, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=14400)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_mini_soak_hardware_StorageQualBase_storage_qual_mini_soak_after b/server/site_tests/hardware_StorageQual/control.storage_qual_mini_soak_hardware_StorageQualBase_storage_qual_mini_soak_after
new file mode 100644
index 0000000..97c67f7
--- /dev/null
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_mini_soak_hardware_StorageQualBase_storage_qual_mini_soak_after
@@ -0,0 +1,42 @@
+
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This control file was auto-generated by generate_storage_qual_control_files.py
+# Do not edit this file!
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = "chromeos-storage"
+NAME = "hardware_StorageQualBase_storage_qual_mini_soak_after"
+ATTRIBUTES = "suite:storage_qual_mini_soak"
+PURPOSE = "hardware_StorageQualBase_storage_qual_mini_soak_after"
+TIME = "long"
+TEST_CATEGORY = "Stress"
+TEST_CLASS = "Hardware"
+TEST_TYPE = "server"
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 70
+DEPENDENCIES = "storage_qual_mini_soak"
+JOB_RETRIES = 0
+FAST = False
+
+DOC = "hardware_StorageQualBase_storage_qual_mini_soak_after"
+
+keyval = dict()
+keyval['storage_qual_version'] = 1
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    job.run_test("hardware_StorageQualBase", host=hosts.create_host(machine),
+            client_ip=machine, tag='after', client_tag='after', skip_crypto=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_mini_soak_hardware_StorageQualBase_storage_qual_mini_soak_before b/server/site_tests/hardware_StorageQual/control.storage_qual_mini_soak_hardware_StorageQualBase_storage_qual_mini_soak_before
new file mode 100644
index 0000000..4cf11fe
--- /dev/null
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_mini_soak_hardware_StorageQualBase_storage_qual_mini_soak_before
@@ -0,0 +1,42 @@
+
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This control file was auto-generated by generate_storage_qual_control_files.py
+# Do not edit this file!
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = "chromeos-storage"
+NAME = "hardware_StorageQualBase_storage_qual_mini_soak_before"
+ATTRIBUTES = "suite:storage_qual_mini_soak"
+PURPOSE = "hardware_StorageQualBase_storage_qual_mini_soak_before"
+TIME = "lengthy"
+TEST_CATEGORY = "Stress"
+TEST_CLASS = "Hardware"
+TEST_TYPE = "server"
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 100
+DEPENDENCIES = "storage_qual_mini_soak"
+JOB_RETRIES = 0
+FAST = False
+
+DOC = "hardware_StorageQualBase_storage_qual_mini_soak_before"
+
+keyval = dict()
+keyval['storage_qual_version'] = 1
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    job.run_test("hardware_StorageQualBase", host=hosts.create_host(machine),
+            client_ip=machine, tag='before', client_tag='before', skip_crypto=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_mini_soak_hardware_StorageStress_storage_qual_mini_soak_soak_0 b/server/site_tests/hardware_StorageQual/control.storage_qual_mini_soak_hardware_StorageStress_storage_qual_mini_soak_soak_0
new file mode 100644
index 0000000..3cd5f63
--- /dev/null
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_mini_soak_hardware_StorageStress_storage_qual_mini_soak_soak_0
@@ -0,0 +1,42 @@
+
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This control file was auto-generated by generate_storage_qual_control_files.py
+# Do not edit this file!
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = "chromeos-storage"
+NAME = "hardware_StorageStress_storage_qual_mini_soak_soak_0"
+ATTRIBUTES = "suite:storage_qual_mini_soak"
+PURPOSE = "hardware_StorageStress_storage_qual_mini_soak_soak_0"
+TIME = "lengthy"
+TEST_CATEGORY = "Stress"
+TEST_CLASS = "Hardware"
+TEST_TYPE = "server"
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 90
+DEPENDENCIES = "storage_qual_mini_soak"
+JOB_RETRIES = 0
+FAST = False
+
+DOC = "hardware_StorageStress_storage_qual_mini_soak_soak_0"
+
+keyval = dict()
+keyval['storage_qual_version'] = 1
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
+            client_ip=machine, tag='soak', power_command='nothing', storage_test_command='full_write', duration=7200)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualBase_retention_after b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualBase_retention_after
index e610130..753ed46 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualBase_retention_after
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualBase_retention_after
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualBase_retention_after"
 ATTRIBUTES = "suite:storage_qual_quick"
 PURPOSE = "hardware_StorageQualBase_retention_after"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 70
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualBase_retention_after"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualBase", host=hosts.create_host(machine),
-            client_ip=machine, client_tag='after', tag='after')
+            client_ip=machine, tag='after', client_tag='after')
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualBase_retention_before b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualBase_retention_before
index 554a021..3a7970e 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualBase_retention_before
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualBase_retention_before
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualBase_retention_before"
 ATTRIBUTES = "suite:storage_qual_quick"
 PURPOSE = "hardware_StorageQualBase_retention_before"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 100
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualBase_retention_before"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualBase", host=hosts.create_host(machine),
-            client_ip=machine, client_tag='before', tag='before')
+            client_ip=machine, tag='before', client_tag='before')
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualBase_suspend_after b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualBase_suspend_after
index fe45e3d..e4f368b 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualBase_suspend_after
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualBase_suspend_after
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualBase_suspend_after"
 ATTRIBUTES = "suite:storage_qual_quick"
 PURPOSE = "hardware_StorageQualBase_suspend_after"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 70
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualBase_suspend_after"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualBase", host=hosts.create_host(machine),
-            client_ip=machine, client_tag='after', tag='after')
+            client_ip=machine, tag='after', client_tag='after')
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualBase_suspend_before b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualBase_suspend_before
index ee27c73..d688e14 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualBase_suspend_before
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualBase_suspend_before
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualBase_suspend_before"
 ATTRIBUTES = "suite:storage_qual_quick"
 PURPOSE = "hardware_StorageQualBase_suspend_before"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 100
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualBase_suspend_before"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualBase", host=hosts.create_host(machine),
-            client_ip=machine, client_tag='before', tag='before')
+            client_ip=machine, tag='before', client_tag='before')
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualBase_trim_after b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualBase_trim_after
index e6ea1c0..52d8e77 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualBase_trim_after
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualBase_trim_after
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualBase_trim_after"
 ATTRIBUTES = "suite:storage_qual_quick"
 PURPOSE = "hardware_StorageQualBase_trim_after"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 70
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualBase_trim_after"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualBase", host=hosts.create_host(machine),
-            client_ip=machine, client_tag='after', tag='after')
+            client_ip=machine, tag='after', client_tag='after')
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualBase_trim_before b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualBase_trim_before
index 0d286dc..ce28b7b 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualBase_trim_before
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualBase_trim_before
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualBase_trim_before"
 ATTRIBUTES = "suite:storage_qual_quick"
 PURPOSE = "hardware_StorageQualBase_trim_before"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 100
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualBase_trim_before"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualBase", host=hosts.create_host(machine),
-            client_ip=machine, client_tag='before', tag='before')
+            client_ip=machine, tag='before', client_tag='before')
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualCheckSetup_retention b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualCheckSetup_retention
index 2649c53..49918fb 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualCheckSetup_retention
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualCheckSetup_retention
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualCheckSetup_retention"
 ATTRIBUTES = "suite:storage_qual_quick"
 PURPOSE = "hardware_StorageQualCheckSetup_retention"
@@ -16,17 +16,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
+PY_VERSION = 3
 REQUIRE_SSP = False
 PRIORITY = 110
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualCheckSetup_retention"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualSuspendStress_suspend_suspend_0 b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualSuspendStress_suspend_suspend_0
index 67c2ab2..4ab7388 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualSuspendStress_suspend_suspend_0
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualSuspendStress_suspend_suspend_0
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_0"
 ATTRIBUTES = "suite:storage_qual_quick"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_0"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_0"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=1800, tag='suspend')
+            client_ip=machine, tag='suspend', duration=1800.0)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualSuspendStress_suspend_suspend_1 b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualSuspendStress_suspend_suspend_1
index b348eec..daf363f 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualSuspendStress_suspend_suspend_1
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualSuspendStress_suspend_suspend_1
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualSuspendStress_suspend_suspend_1"
 ATTRIBUTES = "suite:storage_qual_quick"
 PURPOSE = "hardware_StorageQualSuspendStress_suspend_suspend_1"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualSuspendStress_suspend_suspend_1"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualSuspendStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=1800, tag='suspend')
+            client_ip=machine, tag='suspend', duration=1800.0)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualTrimStress_trim_0 b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualTrimStress_trim_0
index 4061fdd..e1cc649 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualTrimStress_trim_0
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualTrimStress_trim_0
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_0"
 ATTRIBUTES = "suite:storage_qual_quick"
 PURPOSE = "hardware_StorageQualTrimStress_trim_0"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_0"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualTrimStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=1800)
+            client_ip=machine, duration=1800.0)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualTrimStress_trim_1 b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualTrimStress_trim_1
index dff0749..5ed39c8 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualTrimStress_trim_1
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageQualTrimStress_trim_1
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageQualTrimStress_trim_1"
 ATTRIBUTES = "suite:storage_qual_quick"
 PURPOSE = "hardware_StorageQualTrimStress_trim_1"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageQualTrimStress_trim_1"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageQualTrimStress", host=hosts.create_host(machine),
-            client_ip=machine, duration=1800)
+            client_ip=machine, duration=1800.0)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageStress_retention_soak_0 b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageStress_retention_soak_0
index 4a7e95a..b34305a 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageStress_retention_soak_0
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageStress_retention_soak_0
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_0"
 ATTRIBUTES = "suite:storage_qual_quick"
 PURPOSE = "hardware_StorageStress_retention_soak_0"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_0"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=3600, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=3600)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageStress_retention_soak_1 b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageStress_retention_soak_1
index 6ca8fa0..8ea5e85 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageStress_retention_soak_1
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageStress_retention_soak_1
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_soak_1"
 ATTRIBUTES = "suite:storage_qual_quick"
 PURPOSE = "hardware_StorageStress_retention_soak_1"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_soak_1"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=3600, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=3600)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageStress_retention_suspend b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageStress_retention_suspend
index 867dfad..c636d9f 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageStress_retention_suspend
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageStress_retention_suspend
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_retention_suspend"
 ATTRIBUTES = "suite:storage_qual_quick"
 PURPOSE = "hardware_StorageStress_retention_suspend"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
-PRIORITY = 80
+PY_VERSION = 3
+REQUIRE_SSP = True
+PRIORITY = 60
 DEPENDENCIES = "retention"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_retention_suspend"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=1800, tag='suspend', power_command='suspend', suspend_duration=120)
+            client_ip=machine, tag='suspend', power_command='suspend', storage_test_command='full_write', suspend_duration=120, duration=1800.0)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageStress_suspend_soak_0 b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageStress_suspend_soak_0
index b4adc90..bd8c118 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageStress_suspend_soak_0
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageStress_suspend_soak_0
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_0"
 ATTRIBUTES = "suite:storage_qual_quick"
 PURPOSE = "hardware_StorageStress_suspend_soak_0"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_0"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=3600, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=3600)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageStress_suspend_soak_1 b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageStress_suspend_soak_1
index ba94d0a..f228e70 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageStress_suspend_soak_1
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageStress_suspend_soak_1
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_suspend_soak_1"
 ATTRIBUTES = "suite:storage_qual_quick"
 PURPOSE = "hardware_StorageStress_suspend_soak_1"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "suspend"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_suspend_soak_1"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=3600, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=3600)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageStress_trim_soak_0 b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageStress_trim_soak_0
index 904df18..93719c4 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageStress_trim_soak_0
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageStress_trim_soak_0
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_0"
 ATTRIBUTES = "suite:storage_qual_quick"
 PURPOSE = "hardware_StorageStress_trim_soak_0"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_0"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=3600, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=3600)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageStress_trim_soak_1 b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageStress_trim_soak_1
index cd65eb9..cedfa92 100644
--- a/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageStress_trim_soak_1
+++ b/server/site_tests/hardware_StorageQual/control.storage_qual_quick_hardware_StorageStress_trim_soak_1
@@ -1,5 +1,5 @@
 
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -8,7 +8,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "chromeos-storage"
 NAME = "hardware_StorageStress_trim_soak_1"
 ATTRIBUTES = "suite:storage_qual_quick"
 PURPOSE = "hardware_StorageStress_trim_soak_1"
@@ -16,21 +16,27 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = True
 PRIORITY = 90
 DEPENDENCIES = "trim"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "hardware_StorageStress_trim_soak_1"
 
 keyval = dict()
 keyval['storage_qual_version'] = 1
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
     job.run_test("hardware_StorageStress", host=hosts.create_host(machine),
-            client_ip=machine, storage_test_command='full_write', duration=3600, tag='soak', power_command='wait', suspend_duration=300)
+            client_ip=machine, tag='soak', power_command='wait', storage_test_command='full_write', suspend_duration=300, duration=3600)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQual/generate_storage_qual_control_files.py b/server/site_tests/hardware_StorageQual/generate_storage_qual_control_files.py
index 090b87f..f2f239e 100644
--- a/server/site_tests/hardware_StorageQual/generate_storage_qual_control_files.py
+++ b/server/site_tests/hardware_StorageQual/generate_storage_qual_control_files.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -36,36 +37,48 @@
 DAY_IN_SECS = HOUR_IN_SECS * DAY_IN_HOURS
 
 CHECK_SETUP = {
-    'test': 'hardware_StorageQualCheckSetup',
-    'args': {},
-    'priority': 110,
-    'length': 'lengthy'
+        'test': 'hardware_StorageQualCheckSetup',
+        'args': {},
+        'priority': 110,
+        'length': 'lengthy',
+        'ssp': False
 }
 
 BASE_BEFORE = {
-    'test': 'hardware_StorageQualBase',
-    'args': {'tag': 'before', 'client_tag': 'before'},
-    'priority': 100,
-    'length': 'lengthy'
+        'test': 'hardware_StorageQualBase',
+        'args': {
+                'tag': 'before',
+                'client_tag': 'before'
+        },
+        'priority': 100,
+        'length': 'lengthy',
+        'ssp': True
 }
 
 SOAK = {
-    'test': 'hardware_StorageStress',
-    'args': {'tag': 'soak', 'power_command': 'wait',
-        'storage_test_command': 'full_write',
-        'suspend_duration': 5 * MINUTE_IN_SECS,
-        'duration': 4 * HOUR_IN_SECS
-    },
-    'iterations': 7 * DAY_IN_HOURS / 4,
-    'priority': 90,
-    'length': 'long'
+        'test': 'hardware_StorageStress',
+        'args': {
+                'tag': 'soak',
+                'power_command': 'wait',
+                'storage_test_command': 'full_write',
+                'suspend_duration': 5 * MINUTE_IN_SECS,
+                'duration': 4 * HOUR_IN_SECS
+        },
+        'iterations': 7 * DAY_IN_HOURS / 4,
+        'priority': 90,
+        'length': 'long',
+        'ssp': True
 }
 
 BASE_AFTER = {
-    'test': 'hardware_StorageQualBase',
-    'args': {'tag': 'after', 'client_tag': 'after'},
-    'priority': 70,
-    'length': 'long'
+        'test': 'hardware_StorageQualBase',
+        'args': {
+                'tag': 'after',
+                'client_tag': 'after'
+        },
+        'priority': 70,
+        'length': 'long',
+        'ssp': True
 }
 
 
@@ -74,6 +87,11 @@
 BASE_NONROOT_AFTER = copy.deepcopy(BASE_AFTER)
 BASE_NONROOT_AFTER['args']['nonroot'] = True
 
+BASE_MINI_SOAK_BEFORE = copy.deepcopy(BASE_BEFORE)
+BASE_MINI_SOAK_BEFORE['args']['skip_crypto'] = True
+BASE_MINI_SOAK_AFTER = copy.deepcopy(BASE_AFTER)
+BASE_MINI_SOAK_AFTER['args']['skip_crypto'] = True
+
 SOAK_QUICK = copy.deepcopy(SOAK)
 SOAK_QUICK['iterations'] = 2
 SOAK_QUICK['args']['duration'] = HOUR_IN_SECS
@@ -87,175 +105,187 @@
 BASE_AFTER_CQ['args']['cq'] = True
 
 SUITES = {
-    'storage_qual': [
-        {
-            'label': 'retention',
-            'tests': [
-                CHECK_SETUP,
-                BASE_BEFORE,
-                SOAK,
-                {
-                    'test': 'hardware_StorageStress',
-                    'args': {'tag': 'suspend', 'power_command': 'suspend',
-                        'storage_test_command': 'full_write',
-                        'suspend_duration': 12 * HOUR_IN_SECS,
-                        'duration': 7 * DAY_IN_SECS
-                    },
-                    'priority': 80,
-                    'length': 'long'
-                },
-                BASE_AFTER
-            ]
-        },
-
-        {
-            'label': 'suspend',
-            'tests': [
-                BASE_BEFORE,
-                SOAK,
-                {
-                    'test': 'hardware_StorageQualSuspendStress',
-                    'args': {'tag': 'suspend', 'duration': 4 * HOUR_IN_SECS},
-                    'iterations': 7 * DAY_IN_HOURS / 4,
-                    'priority': 80,
-                    'length': 'long'
-                },
-                BASE_AFTER
-            ]
-        },
-
-        {
-            'label': 'trim',
-            'tests': [
-                BASE_BEFORE,
-                SOAK,
-                {
-                    'test': 'hardware_StorageQualTrimStress',
-                    'args': {'duration': 4 * HOUR_IN_SECS},
-                    'iterations': 7 * DAY_IN_HOURS / 4,
-                    'priority': 80,
-                    'length': 'long'
-                },
-                BASE_AFTER
-            ]
-        }
-
-    ],
-    'storage_qual_quick': [
-        {
-            'label': 'retention',
-            'tests': [
-                CHECK_SETUP,
-                BASE_BEFORE,
-                SOAK_QUICK,
-                {
-                    'test': 'hardware_StorageStress',
-                    'args': {'tag': 'suspend', 'power_command': 'suspend',
-                        'storage_test_command': 'full_write',
-                        'suspend_duration': 120,
-                        'duration': HOUR_IN_SECS / 2
-                    },
-                    'priority': 80,
-                    'length': 'long'
-                },
-                BASE_AFTER
-            ]
-        },
-
-        {
-            'label': 'suspend',
-            'tests': [
-                BASE_BEFORE,
-                SOAK_QUICK,
-                {
-                    'test': 'hardware_StorageQualSuspendStress',
-                    'args': {'tag': 'suspend', 'duration': HOUR_IN_SECS / 2},
-                    'iterations': 2,
-                    'priority': 80,
-                    'length': 'long'
-                },
-                BASE_AFTER
-            ]
-        },
-
-        {
-            'label': 'trim',
-            'tests': [
-                BASE_BEFORE,
-                SOAK_QUICK,
-                {
-                    'test': 'hardware_StorageQualTrimStress',
-                    'args': {'duration': HOUR_IN_SECS / 2},
-                    'iterations': 2,
-                    'priority': 80,
-                    'length': 'long'
-                },
-                BASE_AFTER
-            ]
-        }
-    ],
-    'storage_qual_external': [
-        {
-            'label': 'storage_qual_external',
-            'tests': [
-                BASE_NONROOT_BEFORE,
-                {
-                    'test': 'hardware_StorageQualSuspendStress',
-                    'args': {'tag': 'suspend', 'duration': 4 * HOUR_IN_SECS,
-                        'other_dev': True
-                    },
-                    'iterations': 2,
-                    'priority': 80,
-                    'length': 'long'
-                },
-                BASE_NONROOT_AFTER
-            ]
-        }
-    ],
-    'storage_qual_cq': [
-        {
-            'label': 'storage_qual_cq_1',
-            'tests': [
-                BASE_BEFORE_CQ,
-                SOAK_CQ,
-                {
-                    'test': 'hardware_StorageStress',
-                    'args': {'tag': 'suspend', 'power_command': 'suspend',
-                        'storage_test_command': 'full_write',
-                        'suspend_duration': 120,
-                        'duration': HOUR_IN_SECS / 2,
-                        'cq': True
-                    },
-                    'priority': 80,
-                    'length': 'long'
-                },
-                BASE_AFTER_CQ
-            ]
-        },
-
-        {
-            'label': 'storage_qual_cq_2',
-            'tests': [
-                BASE_BEFORE_CQ,
-                SOAK_CQ,
-                {
-                    'test': 'hardware_StorageQualTrimStress',
-                    'args': {'duration': HOUR_IN_SECS / 2, 'cq': True},
-                    'iterations': 2,
-                    'priority': 80,
-                    'length': 'long'
-                },
-                BASE_AFTER_CQ
-            ]
-        }
-    ]
+        'storage_qual': [{
+                'label':
+                'retention',
+                'tests': [
+                        CHECK_SETUP, BASE_BEFORE, SOAK, BASE_AFTER, {
+                                'test': 'hardware_StorageStress',
+                                'args': {
+                                        'tag': 'suspend',
+                                        'power_command': 'suspend',
+                                        'storage_test_command': 'full_write',
+                                        'suspend_duration': 12 * HOUR_IN_SECS,
+                                        'duration': 7 * DAY_IN_SECS
+                                },
+                                'priority': 60,
+                                'length': 'long',
+                                'ssp': True
+                        }
+                ]
+        }, {
+                'label':
+                'suspend',
+                'tests': [
+                        BASE_BEFORE, SOAK, BASE_AFTER, {
+                                'test': 'hardware_StorageQualSuspendStress',
+                                'args': {
+                                        'tag': 'suspend',
+                                        'duration': 4 * HOUR_IN_SECS
+                                },
+                                'iterations': 7 * DAY_IN_HOURS / 4,
+                                'priority': 60,
+                                'length': 'long',
+                                'ssp': True
+                        }
+                ]
+        }, {
+                'label':
+                'trim',
+                'tests': [
+                        BASE_BEFORE, SOAK, BASE_AFTER, {
+                                'test': 'hardware_StorageQualTrimStress',
+                                'args': {
+                                        'duration': 4 * HOUR_IN_SECS
+                                },
+                                'iterations': 7 * DAY_IN_HOURS / 4,
+                                'priority': 60,
+                                'length': 'long',
+                                'ssp': True
+                        }
+                ]
+        }],
+        'storage_qual_quick': [{
+                'label':
+                'retention',
+                'tests': [
+                        CHECK_SETUP, BASE_BEFORE, SOAK_QUICK, BASE_AFTER, {
+                                'test': 'hardware_StorageStress',
+                                'args': {
+                                        'tag': 'suspend',
+                                        'power_command': 'suspend',
+                                        'storage_test_command': 'full_write',
+                                        'suspend_duration': 120,
+                                        'duration': HOUR_IN_SECS / 2
+                                },
+                                'priority': 60,
+                                'length': 'long',
+                                'ssp': True
+                        }
+                ]
+        }, {
+                'label':
+                'suspend',
+                'tests': [
+                        BASE_BEFORE, SOAK_QUICK, BASE_AFTER, {
+                                'test': 'hardware_StorageQualSuspendStress',
+                                'args': {
+                                        'tag': 'suspend',
+                                        'duration': HOUR_IN_SECS / 2
+                                },
+                                'iterations': 2,
+                                'priority': 60,
+                                'length': 'long',
+                                'ssp': True
+                        }
+                ]
+        }, {
+                'label':
+                'trim',
+                'tests': [
+                        BASE_BEFORE, SOAK_QUICK, BASE_AFTER, {
+                                'test': 'hardware_StorageQualTrimStress',
+                                'args': {
+                                        'duration': HOUR_IN_SECS / 2
+                                },
+                                'iterations': 2,
+                                'priority': 60,
+                                'length': 'long',
+                                'ssp': True
+                        }
+                ]
+        }],
+        'storage_qual_external': [{
+                'label':
+                'storage_qual_external',
+                'tests': [
+                        BASE_NONROOT_BEFORE, {
+                                'test': 'hardware_StorageQualSuspendStress',
+                                'args': {
+                                        'tag': 'suspend',
+                                        'duration': 4 * HOUR_IN_SECS,
+                                        'other_dev': True
+                                },
+                                'iterations': 2,
+                                'priority': 80,
+                                'length': 'long',
+                                'ssp': True
+                        }, BASE_NONROOT_AFTER
+                ]
+        }],
+        'storage_qual_mini_soak': [{
+                'label':
+                'storage_qual_mini_soak',
+                'tests': [
+                        BASE_MINI_SOAK_BEFORE, {
+                                'test': 'hardware_StorageStress',
+                                'args': {
+                                        'tag': 'soak',
+                                        'power_command': 'nothing',
+                                        'storage_test_command': 'full_write',
+                                        'duration': 2 * HOUR_IN_SECS
+                                },
+                                'iterations': 1,
+                                'priority': 90,
+                                'length': 'lengthy',
+                                'ssp': True
+                        }, BASE_MINI_SOAK_AFTER
+                ]
+        }],
+        'storage_qual_cq': [{
+                'label':
+                'storage_qual_cq_1',
+                'tests': [
+                        BASE_BEFORE_CQ, SOAK_CQ, {
+                                'test': 'hardware_StorageStress',
+                                'args': {
+                                        'tag': 'suspend',
+                                        'power_command': 'suspend',
+                                        'storage_test_command': 'full_write',
+                                        'suspend_duration': 120,
+                                        'duration': HOUR_IN_SECS / 2,
+                                        'cq': True
+                                },
+                                'priority': 80,
+                                'length': 'long',
+                                'ssp': True
+                        }, BASE_AFTER_CQ
+                ]
+        }, {
+                'label':
+                'storage_qual_cq_2',
+                'tests': [
+                        BASE_BEFORE_CQ, SOAK_CQ, {
+                                'test': 'hardware_StorageQualTrimStress',
+                                'args': {
+                                        'duration': HOUR_IN_SECS / 2,
+                                        'cq': True
+                                },
+                                'iterations': 2,
+                                'priority': 80,
+                                'length': 'long',
+                                'ssp': True
+                        }, BASE_AFTER_CQ
+                ]
+        }]
 }
 
 SUITE_ATTRIBUTES = {
     'storage_qual': 'suite:storage_qual',
     'storage_qual_quick': 'suite:storage_qual_quick',
     'storage_qual_cq': 'suite:storage_qual_cq',
-    'storage_qual_external': 'suite:storage_qual_external'
+    'storage_qual_external': 'suite:storage_qual_external',
+    'storage_qual_mini_soak': 'suite:storage_qual_mini_soak'
 }
 
 TEMPLATE = """
@@ -276,17 +306,23 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "Hardware"
 TEST_TYPE = "server"
-REQUIRE_SSP = False
+PY_VERSION = 3
+REQUIRE_SSP = {ssp}
 PRIORITY = {priority}
 DEPENDENCIES = "{label}"
 JOB_RETRIES = 0
+FAST = False
 
 DOC = "{name}"
 
 keyval = dict()
 keyval['storage_qual_version'] = {version}
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
+try:
+    keyval['bug_id'] = bug_id
+    keyval['part_id'] = part_id
+except NameError:
+    # bug_id and/or part_id variables not defined
+    pass
 utils.write_keyval(job.resultdir, keyval)
 
 def run(machine):
@@ -312,7 +348,7 @@
 
 def _get_args(test):
     args = []
-    for key, value in test['args'].items():
+    for key, value in list(test['args'].items()):
         args.append('%s=%s' % (key, repr(value)))
     return ', '.join(args)
 
@@ -330,29 +366,29 @@
             if 'iterations' in test:
                 for i in range(int(test['iterations'])):
                     control_file = TEMPLATE.format(
-                        label = label,
-                        name = _get_name(label, test, i),
-                        args = _get_args(test),
-                        priority = test['priority'],
-                        test = test['test'],
-                        length = test['length'],
-                        attributes = SUITE_ATTRIBUTES[suite],
-                        version = STORAGE_QUAL_VERSION,
+                            label=label,
+                            name=_get_name(label, test, i),
+                            args=_get_args(test),
+                            priority=test['priority'],
+                            ssp=test['ssp'],
+                            test=test['test'],
+                            length=test['length'],
+                            attributes=SUITE_ATTRIBUTES[suite],
+                            version=STORAGE_QUAL_VERSION,
                     )
                     _write_control_file(_get_control_file_name(
                         suite, label, test, i), control_file)
 
             else:
                 control_file = TEMPLATE.format(
-                    label = label,
-                    name = _get_name(label, test),
-                    args = _get_args(test),
-                    priority = test['priority'],
-                    test = test['test'],
-                    length = test['length'],
-                    attributes = SUITE_ATTRIBUTES[suite],
-                    version = STORAGE_QUAL_VERSION
-                )
+                        label=label,
+                        name=_get_name(label, test),
+                        args=_get_args(test),
+                        priority=test['priority'],
+                        test=test['test'],
+                        length=test['length'],
+                        ssp=test['ssp'],
+                        attributes=SUITE_ATTRIBUTES[suite],
+                        version=STORAGE_QUAL_VERSION)
                 _write_control_file(_get_control_file_name(suite, label, test),
                         control_file)
-
diff --git a/server/site_tests/hardware_StorageQualBase/control.test b/server/site_tests/hardware_StorageQualBase/control.test
index 2f1d888..daad63b 100644
--- a/server/site_tests/hardware_StorageQualBase/control.test
+++ b/server/site_tests/hardware_StorageQualBase/control.test
@@ -8,6 +8,8 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "suite"
 TEST_TYPE = "server"
+PY_VERSION = 3
+FAST = False
 
 DOC = """
 Check the test hardware_StorageQualBase is working as expected.
diff --git a/server/site_tests/hardware_StorageQualBase/control.test_nonroot b/server/site_tests/hardware_StorageQualBase/control.test_nonroot
index d136ae8..fb19e88 100644
--- a/server/site_tests/hardware_StorageQualBase/control.test_nonroot
+++ b/server/site_tests/hardware_StorageQualBase/control.test_nonroot
@@ -8,6 +8,8 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "suite"
 TEST_TYPE = "server"
+PY_VERSION = 3
+FAST = False
 
 DOC = """
 Check the test hardware_StorageQualBase is working as expected for
diff --git a/server/site_tests/hardware_StorageQualBase/hardware_StorageQualBase.py b/server/site_tests/hardware_StorageQualBase/hardware_StorageQualBase.py
index e32ad71..1ccd893 100644
--- a/server/site_tests/hardware_StorageQualBase/hardware_StorageQualBase.py
+++ b/server/site_tests/hardware_StorageQualBase/hardware_StorageQualBase.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -54,8 +55,13 @@
     ]
 
 
-    def run_once(self, client_ip, client_tag='', crypto_runtime=CRYPTO_RUNTIME,
-                 cq=False, nonroot=False):
+    def run_once(self,
+                 client_ip,
+                 client_tag='',
+                 crypto_runtime=CRYPTO_RUNTIME,
+                 cq=False,
+                 nonroot=False,
+                 skip_crypto=False):
         """
         Runs simple tests to ensure the device meets basic criteria.
 
@@ -63,6 +69,7 @@
         @param client_tag: client tag for keyval label
         @param crypto_runtime: runtime for platform.CryptohomeFio tests
         @param cq: part of a cq run
+        @param skip_crypto: skip running cryptohome tests
 
         """
 
@@ -85,12 +92,13 @@
                 client_at.run_test(test_name, disable_sysinfo=True,
                                    tag=client_tag, **argv)
 
-            # Test real life performance
-            for script in self.CRYPTO_TESTS:
-                client_at.run_test('platform_CryptohomeFio',
-                    disable_sysinfo=True,
-                    from_internal_disk_only=True,
-                    script=script,
-                    tag='_'.join([client_tag, script]),
-                    runtime=crypto_runtime,
-                    disk_configs=['crypto', 'plain'])
+            if not skip_crypto:
+                # Test real life performance
+                for script in self.CRYPTO_TESTS:
+                    client_at.run_test('platform_CryptohomeFio',
+                                       disable_sysinfo=True,
+                                       from_internal_disk_only=True,
+                                       script=script,
+                                       tag='_'.join([client_tag, script]),
+                                       runtime=crypto_runtime,
+                                       disk_configs=['crypto', 'plain'])
diff --git a/server/site_tests/hardware_StorageQualCheckSetup/control b/server/site_tests/hardware_StorageQualCheckSetup/control
index 3d701ff..428a9fc 100644
--- a/server/site_tests/hardware_StorageQualCheckSetup/control
+++ b/server/site_tests/hardware_StorageQualCheckSetup/control
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "hardware_StorageQualCheckSetup"
 PURPOSE = "Test that the moblab has the correct setup for storage_qual suite"
 ATTRIBUTES = "suite:check_setup_storage_qual"
@@ -10,7 +10,9 @@
 TEST_CATEGORY = "General"
 TEST_CLASS = "dummy"
 TEST_TYPE = "server"
+PY_VERSION = 3
 REQUIRE_SSP = False
+FAST = False
 
 DOC = """
 
diff --git a/server/site_tests/hardware_StorageQualCheckSetup/hardware_StorageQualCheckSetup.py b/server/site_tests/hardware_StorageQualCheckSetup/hardware_StorageQualCheckSetup.py
index 27f82c9..17e888c 100644
--- a/server/site_tests/hardware_StorageQualCheckSetup/hardware_StorageQualCheckSetup.py
+++ b/server/site_tests/hardware_StorageQualCheckSetup/hardware_StorageQualCheckSetup.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/hardware_StorageQualSuspendStress/control.test b/server/site_tests/hardware_StorageQualSuspendStress/control.test
index 327159d..a09c4b4 100644
--- a/server/site_tests/hardware_StorageQualSuspendStress/control.test
+++ b/server/site_tests/hardware_StorageQualSuspendStress/control.test
@@ -2,15 +2,17 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-NAME = 'hardware_StorageSuspendStress.test'
+NAME = 'hardware_StorageQualSuspendStress.test'
 AUTHOR = 'grundler, gwendal, puthik'
 PURPOSE = 'Test StorageSuspendStress code'
 TIME = 'MEDIUM'
 TEST_CLASS = 'hardware'
 TEST_TYPE = 'server'
+PY_VERSION = 3
+FAST = False
 
 DOC = """
-Test hardware_StorageSuspendStress section.
+Test hardware_StorageQualSuspendStress section.
 """
 
 def run_hardware_storage_stress(machine):
diff --git a/server/site_tests/hardware_StorageQualSuspendStress/control.test_other_device b/server/site_tests/hardware_StorageQualSuspendStress/control.test_other_device
index 3e138dd..2884ab8 100644
--- a/server/site_tests/hardware_StorageQualSuspendStress/control.test_other_device
+++ b/server/site_tests/hardware_StorageQualSuspendStress/control.test_other_device
@@ -8,6 +8,8 @@
 TIME = 'MEDIUM'
 TEST_CLASS = 'hardware'
 TEST_TYPE = 'server'
+PY_VERSION = 3
+FAST = False
 
 DOC = """
 Test hardware_StorageSuspendStress section.
diff --git a/server/site_tests/hardware_StorageQualSuspendStress/hardware_StorageQualSuspendStress.py b/server/site_tests/hardware_StorageQualSuspendStress/hardware_StorageQualSuspendStress.py
index df7a9a8..3c8b5f5 100644
--- a/server/site_tests/hardware_StorageQualSuspendStress/hardware_StorageQualSuspendStress.py
+++ b/server/site_tests/hardware_StorageQualSuspendStress/hardware_StorageQualSuspendStress.py
@@ -1,9 +1,8 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
 from autotest_lib.server import autotest
 from autotest_lib.server import hosts
 from autotest_lib.server import test
@@ -21,7 +20,7 @@
         fio_test = "hardware_StorageFio"
         if other_dev:
             fio_test = "hardware_StorageFioOther"
-        control = """job.parallel(
+        control = """REQUIRE_SSP = True \n\njob.parallel(
             [lambda: job.run_test('power_SuspendStress', tag='disk',
                 duration=%d, init_delay=10, min_suspend=7, min_resume=30,
                 check_connection=True)],
@@ -34,4 +33,3 @@
                            blkdiscard=False,
                            requirements=[('write_stress', ['v'])],
                            tag='qual_verify')
-
diff --git a/server/site_tests/hardware_StorageQualTrimStress/control.test b/server/site_tests/hardware_StorageQualTrimStress/control.test
index c95a4d2..e918d53 100644
--- a/server/site_tests/hardware_StorageQualTrimStress/control.test
+++ b/server/site_tests/hardware_StorageQualTrimStress/control.test
@@ -8,6 +8,8 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "suite"
 TEST_TYPE = "server"
+PY_VERSION = 3
+FAST = False
 
 DOC = """
 Suite for testing the Storage Qual Trim test running from the fixed devices.
diff --git a/server/site_tests/hardware_StorageQualTrimStress/hardware_StorageQualTrimStress.py b/server/site_tests/hardware_StorageQualTrimStress/hardware_StorageQualTrimStress.py
index 3b76450..8d0b771 100644
--- a/server/site_tests/hardware_StorageQualTrimStress/hardware_StorageQualTrimStress.py
+++ b/server/site_tests/hardware_StorageQualTrimStress/hardware_StorageQualTrimStress.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -22,7 +23,7 @@
 
         client = hosts.create_host(client_ip)
         client_at = autotest.Autotest(client)
-        control = """job.parallel(
+        control = """REQUIRE_SSP = True \n\njob.parallel(
             [lambda: job.run_test('power_SuspendStress', tag='disk',
                 duration=%d, init_delay=10, min_suspend=7, min_resume=30,
                 check_connection=True)],
@@ -30,4 +31,3 @@
                 disable_sysinfo=True,
                 tag='qual_trim')])""" % (duration, duration)
         client_at.run(control, '.', None)
-
diff --git a/server/site_tests/hardware_StorageQualV2/control.short_functional b/server/site_tests/hardware_StorageQualV2/control.short_functional
new file mode 100644
index 0000000..a506851
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.short_functional
@@ -0,0 +1,50 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'dlunev, abergman, chromeos-engprod-platform-syd, chromeos-storage'
+NAME = 'hardware_StorageQualV2.short_functional'
+ATTRIBUTES = ''
+TIME = 'LENGTHY'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:storage_qual_bringup'
+PY_VERSION = 3
+PRIORITY = 200
+
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+def run(machine):
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        test_args['tast_skip_setup_check'] = 'true'
+        test_args['tast_suspend_block_timeout'] = '10m'
+        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
+
+        job.run_test('tast',
+                     host=hosts.create_host(machine),
+                     test_exprs=['storage.FullQualificationStress.functional'],
+                     ignore_test_failures=False,
+                     max_run_sec=7200,
+                     command_args=args,
+                     varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_setup b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_setup
deleted file mode 100644
index 8251285..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_setup
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_setup'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s, suite:storage_qual_v2_xs'
-TIME = 'lengthy'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 200
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.setup_benchmarks'],
-                    ignore_test_failures=False,
-                    max_run_sec=3600,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_setup_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_setup_1
new file mode 100644
index 0000000..ca3651e
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_setup_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_setup'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s, suite:storage_qual_v2_xs'
+TIME = 'lengthy'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 200
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.setup_benchmarks'],
+                    ignore_test_failures=False,
+                    max_run_sec=3600,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_setup_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_setup_2
new file mode 100644
index 0000000..1c639b8
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_setup_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_setup'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s, suite:storage_qual_v2_xs'
+TIME = 'lengthy'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 200
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.setup_benchmarks'],
+                    ignore_test_failures=False,
+                    max_run_sec=3600,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_setup_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_setup_3
new file mode 100644
index 0000000..bcc9ff0
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_setup_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_setup'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s, suite:storage_qual_v2_xs'
+TIME = 'lengthy'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 200
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.setup_benchmarks'],
+                    ignore_test_failures=False,
+                    max_run_sec=3600,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_setup_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_setup_4
new file mode 100644
index 0000000..80c13b4
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_setup_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_setup'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s, suite:storage_qual_v2_xs'
+TIME = 'lengthy'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 200
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.setup_benchmarks'],
+                    ignore_test_failures=False,
+                    max_run_sec=3600,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_setup_satlab1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_setup_satlab1
new file mode 100644
index 0000000..e98d62e
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_setup_satlab1
@@ -0,0 +1,64 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'varunth, chromeos-fleet-software'
+NAME = 'storage_qual_v2_setup_satlab'
+ATTRIBUTES = 'suite:storage_qual_v2_xs_satlab'
+TIME = 'lengthy'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 200
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+global args_dict
+try:
+    args_dict
+except NameError:
+    args_dict = utils.args_to_dict(args)
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = args_dict.get("bug_id", "")
+keyval['part_id'] = args_dict.get("part_id", "")
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.setup_benchmarks'],
+                    ignore_test_failures=False,
+                    max_run_sec=3600,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_01 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_01
deleted file mode 100644
index ee3004a..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_01
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_01'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s, suite:storage_qual_v2_xs'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 100
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_01_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_01_1
new file mode 100644
index 0000000..8d28b03
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_01_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_01'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s, suite:storage_qual_v2_xs'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 100
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_01_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_01_2
new file mode 100644
index 0000000..e43f981
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_01_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_01'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s, suite:storage_qual_v2_xs'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 100
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_01_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_01_3
new file mode 100644
index 0000000..5cad4fe
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_01_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_01'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s, suite:storage_qual_v2_xs'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 100
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_01_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_01_4
new file mode 100644
index 0000000..fc7e50b
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_01_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_01'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s, suite:storage_qual_v2_xs'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 100
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_01_satlab1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_01_satlab1
new file mode 100644
index 0000000..636a048
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_01_satlab1
@@ -0,0 +1,64 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'varunth, chromeos-fleet-software'
+NAME = 'storage_qual_v2_stress_01_satlab'
+ATTRIBUTES = 'suite:storage_qual_v2_xs_satlab'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 100
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+global args_dict
+try:
+    args_dict
+except NameError:
+    args_dict = utils.args_to_dict(args)
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = args_dict.get("bug_id", "")
+keyval['part_id'] = args_dict.get("part_id", "")
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_02 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_02
deleted file mode 100644
index 721d1ce..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_02
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_02'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s, suite:storage_qual_v2_xs'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 99
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_02_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_02_1
new file mode 100644
index 0000000..c73713b
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_02_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_02'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s, suite:storage_qual_v2_xs'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 99
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_02_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_02_2
new file mode 100644
index 0000000..5ff61b6
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_02_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_02'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s, suite:storage_qual_v2_xs'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 99
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_02_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_02_3
new file mode 100644
index 0000000..faf9754
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_02_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_02'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s, suite:storage_qual_v2_xs'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 99
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_02_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_02_4
new file mode 100644
index 0000000..6c5c753
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_02_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_02'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s, suite:storage_qual_v2_xs'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 99
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_03 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_03
deleted file mode 100644
index 2e42bcc..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_03
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_03'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 98
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_03_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_03_1
new file mode 100644
index 0000000..7aa6d4f
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_03_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_03'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 98
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_03_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_03_2
new file mode 100644
index 0000000..1a151bd
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_03_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_03'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 98
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_03_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_03_3
new file mode 100644
index 0000000..439467a
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_03_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_03'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 98
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_03_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_03_4
new file mode 100644
index 0000000..7f92ffc
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_03_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_03'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 98
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_04 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_04
deleted file mode 100644
index 7378307..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_04
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_04'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 97
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_04_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_04_1
new file mode 100644
index 0000000..f9d097e
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_04_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_04'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 97
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_04_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_04_2
new file mode 100644
index 0000000..71002fa
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_04_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_04'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 97
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_04_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_04_3
new file mode 100644
index 0000000..f4c1e2e
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_04_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_04'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 97
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_04_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_04_4
new file mode 100644
index 0000000..dfd38cb
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_04_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_04'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 97
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_05 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_05
deleted file mode 100644
index aa21557..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_05
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_05'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 96
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_05_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_05_1
new file mode 100644
index 0000000..a2ad6b8
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_05_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_05'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 96
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_05_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_05_2
new file mode 100644
index 0000000..e9555b4
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_05_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_05'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 96
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_05_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_05_3
new file mode 100644
index 0000000..ab126f8
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_05_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_05'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 96
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_05_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_05_4
new file mode 100644
index 0000000..f828724
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_05_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_05'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 96
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_06 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_06
deleted file mode 100644
index c0479a8..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_06
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_06'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 95
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_06_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_06_1
new file mode 100644
index 0000000..a44a166
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_06_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_06'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 95
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_06_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_06_2
new file mode 100644
index 0000000..737d369
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_06_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_06'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 95
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_06_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_06_3
new file mode 100644
index 0000000..74d1ee3
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_06_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_06'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 95
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_06_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_06_4
new file mode 100644
index 0000000..7bb89a3
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_06_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_06'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 95
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_07 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_07
deleted file mode 100644
index 3f42e0e..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_07
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_07'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 94
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_07_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_07_1
new file mode 100644
index 0000000..5b20e5a
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_07_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_07'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 94
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_07_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_07_2
new file mode 100644
index 0000000..b521c28
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_07_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_07'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 94
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_07_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_07_3
new file mode 100644
index 0000000..7caf76a
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_07_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_07'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 94
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_07_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_07_4
new file mode 100644
index 0000000..2e6ad73
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_07_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_07'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 94
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_08 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_08
deleted file mode 100644
index 417db3d..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_08
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_08'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 93
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_08_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_08_1
new file mode 100644
index 0000000..03fed61
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_08_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_08'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 93
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_08_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_08_2
new file mode 100644
index 0000000..a63112e
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_08_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_08'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 93
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_08_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_08_3
new file mode 100644
index 0000000..9597216
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_08_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_08'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 93
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_08_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_08_4
new file mode 100644
index 0000000..b85d39e
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_08_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_08'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 93
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_09 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_09
deleted file mode 100644
index ec92655..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_09
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_09'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 92
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_09_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_09_1
new file mode 100644
index 0000000..bb4fa9a
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_09_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_09'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 92
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_09_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_09_2
new file mode 100644
index 0000000..e6cde7f
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_09_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_09'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 92
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_09_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_09_3
new file mode 100644
index 0000000..0834385
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_09_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_09'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 92
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_09_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_09_4
new file mode 100644
index 0000000..d97116a
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_09_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_09'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 92
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_10 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_10
deleted file mode 100644
index 1366a15..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_10
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_10'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 91
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_10_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_10_1
new file mode 100644
index 0000000..cf13a38
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_10_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_10'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 91
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_10_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_10_2
new file mode 100644
index 0000000..af2279c
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_10_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_10'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 91
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_10_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_10_3
new file mode 100644
index 0000000..889d9d0
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_10_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_10'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 91
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_10_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_10_4
new file mode 100644
index 0000000..3f81c89
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_10_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_10'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 91
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_11 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_11
deleted file mode 100644
index f17482a..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_11
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_11'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 90
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_11_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_11_1
new file mode 100644
index 0000000..456ee0c
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_11_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_11'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 90
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_11_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_11_2
new file mode 100644
index 0000000..da46efd
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_11_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_11'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 90
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_11_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_11_3
new file mode 100644
index 0000000..318af47
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_11_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_11'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 90
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_11_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_11_4
new file mode 100644
index 0000000..4d11290
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_11_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_11'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 90
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_12 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_12
deleted file mode 100644
index 6fb8bb4..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_12
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_12'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 89
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_12_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_12_1
new file mode 100644
index 0000000..0d361c0
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_12_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_12'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 89
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_12_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_12_2
new file mode 100644
index 0000000..e5fb0d8
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_12_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_12'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 89
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_12_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_12_3
new file mode 100644
index 0000000..7489eae
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_12_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_12'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 89
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_12_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_12_4
new file mode 100644
index 0000000..1388e56
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_12_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_12'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 89
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_13 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_13
deleted file mode 100644
index b5f3353..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_13
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_13'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 88
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_13_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_13_1
new file mode 100644
index 0000000..e8e8b05
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_13_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_13'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 88
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_13_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_13_2
new file mode 100644
index 0000000..787d681
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_13_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_13'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 88
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_13_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_13_3
new file mode 100644
index 0000000..ea98408
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_13_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_13'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 88
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_13_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_13_4
new file mode 100644
index 0000000..0bd228b
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_13_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_13'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 88
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_14 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_14
deleted file mode 100644
index 804a819..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_14
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_14'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 87
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_14_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_14_1
new file mode 100644
index 0000000..e70737e
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_14_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_14'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 87
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_14_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_14_2
new file mode 100644
index 0000000..9007e72
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_14_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_14'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 87
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_14_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_14_3
new file mode 100644
index 0000000..72cd095
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_14_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_14'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 87
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_14_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_14_4
new file mode 100644
index 0000000..79fce19
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_14_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_14'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 87
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_15 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_15
deleted file mode 100644
index f5d8fa7..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_15
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_15'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 86
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_15_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_15_1
new file mode 100644
index 0000000..a66da1d
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_15_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_15'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 86
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_15_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_15_2
new file mode 100644
index 0000000..6477b56
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_15_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_15'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 86
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_15_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_15_3
new file mode 100644
index 0000000..6a487f2
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_15_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_15'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 86
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_15_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_15_4
new file mode 100644
index 0000000..e9c7b4d
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_15_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_15'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 86
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_16 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_16
deleted file mode 100644
index c66e684..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_16
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_16'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 85
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_16_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_16_1
new file mode 100644
index 0000000..7550cd6
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_16_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_16'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 85
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_16_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_16_2
new file mode 100644
index 0000000..81d9a8b
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_16_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_16'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 85
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_16_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_16_3
new file mode 100644
index 0000000..11a8d95
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_16_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_16'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 85
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_16_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_16_4
new file mode 100644
index 0000000..3110d36
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_16_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_16'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 85
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_17 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_17
deleted file mode 100644
index 84849c9..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_17
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_17'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 84
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_17_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_17_1
new file mode 100644
index 0000000..ca28eca
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_17_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_17'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 84
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_17_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_17_2
new file mode 100644
index 0000000..3d68d3a
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_17_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_17'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 84
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_17_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_17_3
new file mode 100644
index 0000000..190bec3
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_17_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_17'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 84
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_17_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_17_4
new file mode 100644
index 0000000..d2a0f29
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_17_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_17'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 84
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_18 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_18
deleted file mode 100644
index 9b87573..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_18
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_18'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 83
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_18_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_18_1
new file mode 100644
index 0000000..a03c5a7
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_18_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_18'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 83
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_18_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_18_2
new file mode 100644
index 0000000..8da83d0
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_18_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_18'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 83
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_18_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_18_3
new file mode 100644
index 0000000..65095df
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_18_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_18'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 83
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_18_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_18_4
new file mode 100644
index 0000000..dc1bd02
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_18_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_18'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 83
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_19 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_19
deleted file mode 100644
index b357264..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_19
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_19'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 82
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_19_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_19_1
new file mode 100644
index 0000000..bef9347
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_19_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_19'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 82
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_19_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_19_2
new file mode 100644
index 0000000..ae69328
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_19_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_19'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 82
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_19_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_19_3
new file mode 100644
index 0000000..2a45bca
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_19_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_19'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 82
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_19_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_19_4
new file mode 100644
index 0000000..9214b19
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_19_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_19'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 82
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_20 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_20
deleted file mode 100644
index d886dca..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_20
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_20'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 81
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_20_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_20_1
new file mode 100644
index 0000000..3babf40
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_20_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_20'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 81
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_20_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_20_2
new file mode 100644
index 0000000..1a79ef1
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_20_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_20'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 81
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_20_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_20_3
new file mode 100644
index 0000000..b80cdc3
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_20_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_20'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 81
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_20_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_20_4
new file mode 100644
index 0000000..d38fa17
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_20_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_20'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 81
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_21 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_21
deleted file mode 100644
index defe575..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_21
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_21'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 80
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_21_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_21_1
new file mode 100644
index 0000000..fcf4d66
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_21_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_21'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 80
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_21_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_21_2
new file mode 100644
index 0000000..ba6c5b7
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_21_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_21'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 80
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_21_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_21_3
new file mode 100644
index 0000000..87fe197
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_21_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_21'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 80
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_21_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_21_4
new file mode 100644
index 0000000..1f83947
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_21_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_21'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 80
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_22 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_22
deleted file mode 100644
index 960bc62..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_22
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_22'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 79
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_22_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_22_1
new file mode 100644
index 0000000..1c0720b
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_22_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_22'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 79
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_22_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_22_2
new file mode 100644
index 0000000..7141a2d
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_22_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_22'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 79
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_22_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_22_3
new file mode 100644
index 0000000..9843ec2
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_22_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_22'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 79
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_22_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_22_4
new file mode 100644
index 0000000..3ad19d1
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_22_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_22'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 79
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_23 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_23
deleted file mode 100644
index f06228f..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_23
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_23'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 78
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_23_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_23_1
new file mode 100644
index 0000000..6b21a8d
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_23_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_23'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 78
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_23_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_23_2
new file mode 100644
index 0000000..1130cf1
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_23_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_23'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 78
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_23_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_23_3
new file mode 100644
index 0000000..7cf8037
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_23_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_23'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 78
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_23_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_23_4
new file mode 100644
index 0000000..fe2dbe1
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_23_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_23'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 78
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_24 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_24
deleted file mode 100644
index 03884f8..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_24
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_24'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 77
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_24_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_24_1
new file mode 100644
index 0000000..fc64a09
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_24_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_24'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 77
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_24_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_24_2
new file mode 100644
index 0000000..30e147b
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_24_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_24'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 77
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_24_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_24_3
new file mode 100644
index 0000000..13eaa02
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_24_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_24'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 77
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_24_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_24_4
new file mode 100644
index 0000000..ff15857
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_24_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_24'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 77
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_25 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_25
deleted file mode 100644
index 24c8d35..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_25
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_25'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 76
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_25_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_25_1
new file mode 100644
index 0000000..6f590c6
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_25_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_25'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 76
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_25_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_25_2
new file mode 100644
index 0000000..7ba77da
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_25_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_25'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 76
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_25_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_25_3
new file mode 100644
index 0000000..b881ec4
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_25_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_25'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 76
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_25_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_25_4
new file mode 100644
index 0000000..58300fc
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_25_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_25'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 76
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_26 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_26
deleted file mode 100644
index db639d5..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_26
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_26'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 75
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_26_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_26_1
new file mode 100644
index 0000000..acb97d8
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_26_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_26'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 75
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_26_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_26_2
new file mode 100644
index 0000000..bd29ddb
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_26_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_26'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 75
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_26_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_26_3
new file mode 100644
index 0000000..4dac523
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_26_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_26'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 75
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_26_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_26_4
new file mode 100644
index 0000000..4c5a346
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_26_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_26'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 75
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_27 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_27
deleted file mode 100644
index bf2a1b0..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_27
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_27'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 74
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_27_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_27_1
new file mode 100644
index 0000000..f128bb7
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_27_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_27'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 74
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_27_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_27_2
new file mode 100644
index 0000000..249e9d7
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_27_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_27'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 74
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_27_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_27_3
new file mode 100644
index 0000000..3366ca4
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_27_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_27'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 74
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_27_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_27_4
new file mode 100644
index 0000000..4431bd4
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_27_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_27'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 74
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_28 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_28
deleted file mode 100644
index 72d630e..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_28
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_28'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 73
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_28_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_28_1
new file mode 100644
index 0000000..5646052
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_28_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_28'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 73
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_28_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_28_2
new file mode 100644
index 0000000..0f99d2e
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_28_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_28'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 73
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_28_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_28_3
new file mode 100644
index 0000000..07148ce
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_28_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_28'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 73
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_28_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_28_4
new file mode 100644
index 0000000..103847f
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_28_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_28'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 73
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_29 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_29
deleted file mode 100644
index 7a9dbff..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_29
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_29'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 72
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_29_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_29_1
new file mode 100644
index 0000000..cef07a3
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_29_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_29'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 72
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_29_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_29_2
new file mode 100644
index 0000000..4af3080
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_29_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_29'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 72
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_29_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_29_3
new file mode 100644
index 0000000..ea95cde
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_29_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_29'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 72
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_29_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_29_4
new file mode 100644
index 0000000..e0f8ded
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_29_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_29'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 72
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_30 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_30
deleted file mode 100644
index 168a02a..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_30
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_30'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 71
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_30_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_30_1
new file mode 100644
index 0000000..6ddfbfa
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_30_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_30'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 71
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_30_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_30_2
new file mode 100644
index 0000000..01e7c0c
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_30_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_30'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 71
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_30_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_30_3
new file mode 100644
index 0000000..34df372
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_30_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_30'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 71
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_30_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_30_4
new file mode 100644
index 0000000..b6a56b5
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_30_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_30'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 71
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_31 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_31
deleted file mode 100644
index d3fb9ad..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_31
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_31'
-ATTRIBUTES = 'suite:storage_qual_v2_xl'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 70
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_31_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_31_1
new file mode 100644
index 0000000..015bf5a
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_31_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_31'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 70
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_31_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_31_2
new file mode 100644
index 0000000..d12b2e2
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_31_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_31'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 70
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_31_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_31_3
new file mode 100644
index 0000000..a5a8304
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_31_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_31'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 70
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_31_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_31_4
new file mode 100644
index 0000000..6552602
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_31_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_31'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 70
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_32 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_32
deleted file mode 100644
index 3bb90fb..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_32
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_32'
-ATTRIBUTES = 'suite:storage_qual_v2_xl'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 69
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_32_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_32_1
new file mode 100644
index 0000000..389a31d
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_32_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_32'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 69
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_32_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_32_2
new file mode 100644
index 0000000..ee023bc
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_32_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_32'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 69
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_32_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_32_3
new file mode 100644
index 0000000..3475afc
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_32_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_32'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 69
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_32_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_32_4
new file mode 100644
index 0000000..c860288
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_32_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_32'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 69
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_33 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_33
deleted file mode 100644
index f2eccca..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_33
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_33'
-ATTRIBUTES = 'suite:storage_qual_v2_xl'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 68
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_33_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_33_1
new file mode 100644
index 0000000..fd26980
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_33_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_33'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 68
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_33_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_33_2
new file mode 100644
index 0000000..9360219
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_33_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_33'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 68
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_33_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_33_3
new file mode 100644
index 0000000..25ca930
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_33_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_33'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 68
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_33_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_33_4
new file mode 100644
index 0000000..651ea22
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_33_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_33'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 68
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_34 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_34
deleted file mode 100644
index e9a4319..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_34
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_34'
-ATTRIBUTES = 'suite:storage_qual_v2_xl'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 67
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_34_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_34_1
new file mode 100644
index 0000000..8462fbd
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_34_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_34'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 67
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_34_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_34_2
new file mode 100644
index 0000000..fe92cbe
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_34_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_34'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 67
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_34_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_34_3
new file mode 100644
index 0000000..0c82638
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_34_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_34'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 67
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_34_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_34_4
new file mode 100644
index 0000000..3bec983
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_34_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_34'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 67
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_35 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_35
deleted file mode 100644
index 1a80a44..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_35
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_35'
-ATTRIBUTES = 'suite:storage_qual_v2_xl'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 66
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_35_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_35_1
new file mode 100644
index 0000000..c17ed67
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_35_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_35'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 66
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_35_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_35_2
new file mode 100644
index 0000000..2ae6b73
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_35_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_35'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 66
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_35_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_35_3
new file mode 100644
index 0000000..c6cc2f6
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_35_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_35'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 66
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_35_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_35_4
new file mode 100644
index 0000000..ec75029
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_35_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_35'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 66
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_36 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_36
deleted file mode 100644
index 560a5c5..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_36
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_36'
-ATTRIBUTES = 'suite:storage_qual_v2_xl'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 65
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_36_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_36_1
new file mode 100644
index 0000000..ca0681f
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_36_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_36'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 65
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_36_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_36_2
new file mode 100644
index 0000000..3de992a
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_36_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_36'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 65
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_36_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_36_3
new file mode 100644
index 0000000..5a3d75e
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_36_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_36'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 65
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_36_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_36_4
new file mode 100644
index 0000000..c0f3efa
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_36_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_36'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 65
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_37 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_37
deleted file mode 100644
index 32ca8e4..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_37
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_37'
-ATTRIBUTES = 'suite:storage_qual_v2_xl'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 64
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_37_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_37_1
new file mode 100644
index 0000000..68d1c1f
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_37_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_37'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 64
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_37_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_37_2
new file mode 100644
index 0000000..3bb0d95
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_37_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_37'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 64
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_37_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_37_3
new file mode 100644
index 0000000..f9d6cd1
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_37_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_37'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 64
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_37_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_37_4
new file mode 100644
index 0000000..350f996
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_37_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_37'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 64
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_38 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_38
deleted file mode 100644
index d03867c..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_38
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_38'
-ATTRIBUTES = 'suite:storage_qual_v2_xl'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 63
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_38_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_38_1
new file mode 100644
index 0000000..4e6f389
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_38_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_38'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 63
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_38_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_38_2
new file mode 100644
index 0000000..9107161
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_38_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_38'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 63
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_38_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_38_3
new file mode 100644
index 0000000..fdc5ba5
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_38_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_38'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 63
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_38_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_38_4
new file mode 100644
index 0000000..a1ba31d
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_38_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_38'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 63
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_39 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_39
deleted file mode 100644
index 7276c7d..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_39
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_39'
-ATTRIBUTES = 'suite:storage_qual_v2_xl'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 62
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_39_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_39_1
new file mode 100644
index 0000000..29d08e5
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_39_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_39'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 62
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_39_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_39_2
new file mode 100644
index 0000000..860b379
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_39_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_39'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 62
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_39_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_39_3
new file mode 100644
index 0000000..65de5e8
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_39_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_39'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 62
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_39_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_39_4
new file mode 100644
index 0000000..271f044
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_39_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_39'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 62
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_40 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_40
deleted file mode 100644
index 12106f3..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_40
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_stress_40'
-ATTRIBUTES = 'suite:storage_qual_v2_xl'
-TIME = 'long'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 61
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.stress'],
-                    ignore_test_failures=False,
-                    max_run_sec=18000,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_40_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_40_1
new file mode 100644
index 0000000..0614233
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_40_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_40'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 61
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_40_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_40_2
new file mode 100644
index 0000000..3fb1a00
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_40_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_40'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 61
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_40_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_40_3
new file mode 100644
index 0000000..3cef30e
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_40_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_40'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 61
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_40_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_40_4
new file mode 100644
index 0000000..d5228c6
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_stress_40_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_stress_40'
+ATTRIBUTES = 'suite:storage_qual_v2_xl'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 61
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.stress'],
+                    ignore_test_failures=False,
+                    max_run_sec=28800,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_teardown b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_teardown
deleted file mode 100644
index ca751a8..0000000
--- a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_teardown
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'abergman, chromeos-engprod-platform-syd'
-NAME = 'storage_qual_v2_teardown'
-ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s, suite:storage_qual_v2_xs'
-TIME = 'lengthy'
-TEST_CATEGORY = 'Stress'
-TEST_CLASS = 'Hardware'
-TEST_TYPE = 'Server'
-PRIORITY = 50
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast-based storage qualification quick test.
-
-Tast is an integration-testing framework analogous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-import tempfile
-import yaml
-
-keyval = dict()
-keyval['storage_qual_version'] = 2
-keyval['bug_id'] = bug_id
-keyval['part_id'] = part_id
-utils.write_keyval(job.resultdir, keyval)
-
-def run(machine):
-    args_dict = globals().get('args_dict', dict())
-
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
-        # Writing test arguments to yaml file except for wrapper-related arguments.
-        test_args = dict()
-        for key in args_dict:
-            if key.startswith('tast_'):
-                test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
-
-        job.run_test('tast',
-                    host=hosts.create_host(machine),
-                    test_exprs=['storage.FullQualificationStress.teardown_benchmarks'],
-                    ignore_test_failures=False,
-                    max_run_sec=3600,
-                    command_args=args,
-                    varsfiles=[temp_file.name])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_teardown_1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_teardown_1
new file mode 100644
index 0000000..ccc50f1
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_teardown_1
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_teardown'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s, suite:storage_qual_v2_xs'
+TIME = 'lengthy'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 50
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:1'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.teardown_benchmarks'],
+                    ignore_test_failures=False,
+                    max_run_sec=3600,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_teardown_2 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_teardown_2
new file mode 100644
index 0000000..57b7f79
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_teardown_2
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_teardown'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s, suite:storage_qual_v2_xs'
+TIME = 'lengthy'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 50
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:2'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.teardown_benchmarks'],
+                    ignore_test_failures=False,
+                    max_run_sec=3600,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_teardown_3 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_teardown_3
new file mode 100644
index 0000000..db04d23
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_teardown_3
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_teardown'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s, suite:storage_qual_v2_xs'
+TIME = 'lengthy'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 50
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:3'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.teardown_benchmarks'],
+                    ignore_test_failures=False,
+                    max_run_sec=3600,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_teardown_4 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_teardown_4
new file mode 100644
index 0000000..1fca50c
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_teardown_4
@@ -0,0 +1,62 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'storage_qual_v2_teardown'
+ATTRIBUTES = 'suite:storage_qual_v2_xl, suite:storage_qual_v2_l, suite:storage_qual_v2_m, suite:storage_qual_v2_s, suite:storage_qual_v2_xs'
+TIME = 'lengthy'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 50
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = 'dut:4'
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = bug_id
+keyval['part_id'] = part_id
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.teardown_benchmarks'],
+                    ignore_test_failures=False,
+                    max_run_sec=3600,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_teardown_satlab1 b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_teardown_satlab1
new file mode 100644
index 0000000..49396cd
--- /dev/null
+++ b/server/site_tests/hardware_StorageQualV2/control.storage_qual_v2_teardown_satlab1
@@ -0,0 +1,64 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'varunth, chromeos-fleet-software'
+NAME = 'storage_qual_v2_teardown_satlab'
+ATTRIBUTES = 'suite:storage_qual_v2_xs_satlab'
+TIME = 'lengthy'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PY_VERSION = 3
+PRIORITY = 50
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 0
+REQUIRE_SSP = True
+FAST = False
+
+DOC = '''
+Run the Tast-based storage qualification quick test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import tempfile
+import yaml
+
+global args_dict
+try:
+    args_dict
+except NameError:
+    args_dict = utils.args_to_dict(args)
+
+keyval = dict()
+keyval['storage_qual_version'] = 2
+keyval['bug_id'] = args_dict.get("bug_id", "")
+keyval['part_id'] = args_dict.get("part_id", "")
+utils.write_keyval(job.resultdir, keyval)
+
+def run(machine):
+    args_dict = globals().get('args_dict', dict())
+
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+') as temp_file:
+        # Writing test arguments to yaml file except for wrapper-related arguments.
+        test_args = dict()
+        for key in args_dict:
+            if key.startswith('tast_'):
+                test_args[key] = args_dict[key]
+        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
+
+        job.run_test('tast',
+                    host=hosts.create_host(machine),
+                    test_exprs=['storage.FullQualificationStress.teardown_benchmarks'],
+                    ignore_test_failures=False,
+                    max_run_sec=3600,
+                    command_args=args,
+                    varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/hardware_StorageQualV2/generate_control_files.py b/server/site_tests/hardware_StorageQualV2/generate_control_files.py
index 8fdc87f..bb6b8b3 100644
--- a/server/site_tests/hardware_StorageQualV2/generate_control_files.py
+++ b/server/site_tests/hardware_StorageQualV2/generate_control_files.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -22,6 +23,7 @@
 SUITE = 'storage_qual_v2'
 TEST_PREFIX = 'storage.FullQualificationStress.'
 TEMPLATE_FILE = 'template.control.storage_qual'
+MAX_DUTS = 4
 
 TESTS = [{
         'test': 'setup',
@@ -34,7 +36,7 @@
         'test': 'stress_{index:02n}',
         'tast_name': 'stress',
         'iterations': 40,
-        'duration': 5 * HOUR_IN_SECS,
+        'duration': 8 * HOUR_IN_SECS,
         'priority': 100,
         'length': 'long'
 }, {
@@ -79,15 +81,18 @@
 
 for test in TESTS:
     for i in range(int(test['iterations'])):
-        test_name = test['test'].format(index=i + 1)
-        control_file = template.format(
-                name='_'.join([SUITE, test_name]),
-                priority=int(test['priority'] - i),
-                duration=int(test['duration']),
-                test_exprs=TEST_PREFIX + test['tast_name'],
-                length=test['length'],
-                version=STORAGE_QUAL_VERSION,
-                attributes=", ".join(_get_suite_attributes(i)),
-        )
-        control_file_name = 'control.' + '_'.join([SUITE, test_name])
-        _write_control_file(control_file_name, control_file)
+        for d in range(1, MAX_DUTS + 1):
+            test_name = test['test'].format(index=i + 1)
+            control_file = template.format(
+                    name='_'.join([SUITE, test_name]),
+                    priority=int(test['priority'] - i),
+                    duration=int(test['duration']),
+                    test_exprs=TEST_PREFIX + test['tast_name'],
+                    length=test['length'],
+                    version=STORAGE_QUAL_VERSION,
+                    attributes=", ".join(_get_suite_attributes(i)),
+                    dependency="dut:%d" % d,
+            )
+            control_file_name = 'control.' + '_'.join(
+                    [SUITE, test_name, str(d)])
+            _write_control_file(control_file_name, control_file)
diff --git a/server/site_tests/hardware_StorageQualV2/template.control.storage_qual b/server/site_tests/hardware_StorageQualV2/template.control.storage_qual
index 81bcb79..5c8dda9 100644
--- a/server/site_tests/hardware_StorageQualV2/template.control.storage_qual
+++ b/server/site_tests/hardware_StorageQualV2/template.control.storage_qual
@@ -11,10 +11,13 @@
 TEST_CATEGORY = 'Stress'
 TEST_CLASS = 'Hardware'
 TEST_TYPE = 'Server'
+PY_VERSION = 3
 PRIORITY = {priority}
 MAX_RESULT_SIZE_KB = 1024 * 1024
 JOB_RETRIES = 0
 REQUIRE_SSP = True
+FAST = False
+DEPENDENCIES = '{dependency}'
 
 DOC = '''
 Run the Tast-based storage qualification quick test.
@@ -37,13 +40,16 @@
 def run(machine):
     args_dict = globals().get('args_dict', dict())
 
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+', encoding='utf-8') as temp_file:
         # Writing test arguments to yaml file except for wrapper-related arguments.
         test_args = dict()
         for key in args_dict:
             if key.startswith('tast_'):
                 test_args[key] = args_dict[key]
-        yaml.dump(test_args, stream=temp_file, default_flow_style=False)
+        yaml.safe_dump(test_args,
+                      stream=temp_file,
+                      default_flow_style=False,
+                      allow_unicode=True)
 
         job.run_test('tast',
                     host=hosts.create_host(machine),
diff --git a/server/site_tests/hardware_StorageStress/control b/server/site_tests/hardware_StorageStress/control
index 5e7f3cd..95e9e37 100644
--- a/server/site_tests/hardware_StorageStress/control
+++ b/server/site_tests/hardware_StorageStress/control
@@ -8,6 +8,7 @@
 TIME = 'LENGTHY'
 TEST_CLASS = 'hardware'
 TEST_TYPE = 'server'
+PY_VERSION = 3
 
 DOC = """
 This test calls hardware_StorageFio to write data once and repeatedly verifies
@@ -18,4 +19,3 @@
     job.run_test('hardware_StorageStress', client_ip=machine)
 
 job.parallel_simple(run_hardware_storage_stress, machines)
-
diff --git a/server/site_tests/hardware_StorageStress/control.other b/server/site_tests/hardware_StorageStress/control.other
index 69bd0f4..7bc4d1c 100644
--- a/server/site_tests/hardware_StorageStress/control.other
+++ b/server/site_tests/hardware_StorageStress/control.other
@@ -8,6 +8,7 @@
 TIME = 'LENGTHY'
 TEST_CLASS = 'hardware'
 TEST_TYPE = 'server'
+PY_VERSION = 3
 
 DOC = """
 This test calls hardware_StorageFio to write data once and repeatedly verifies
diff --git a/server/site_tests/hardware_StorageStress/control.quick b/server/site_tests/hardware_StorageStress/control.quick
index 50b755c..2f90c17 100644
--- a/server/site_tests/hardware_StorageStress/control.quick
+++ b/server/site_tests/hardware_StorageStress/control.quick
@@ -8,6 +8,7 @@
 TIME = 'LENGTHY'
 TEST_CLASS = 'hardware'
 TEST_TYPE = 'server'
+PY_VERSION = 3
 
 DOC = """
 This test calls hardware_StorageFio to write data once and repeatedly verifies
diff --git a/server/site_tests/hardware_StorageStress/control.soak b/server/site_tests/hardware_StorageStress/control.soak
index 83eedf7..dd4255e 100644
--- a/server/site_tests/hardware_StorageStress/control.soak
+++ b/server/site_tests/hardware_StorageStress/control.soak
@@ -8,6 +8,7 @@
 TIME = 'LENGTHY'
 TEST_CLASS = 'hardware'
 TEST_TYPE = 'server'
+PY_VERSION = 3
 
 DOC = """
 Run intensive write/read test at different size for days.
diff --git a/server/site_tests/hardware_StorageStress/control.soak.test b/server/site_tests/hardware_StorageStress/control.soak.test
index de86230..2596bc9 100644
--- a/server/site_tests/hardware_StorageStress/control.soak.test
+++ b/server/site_tests/hardware_StorageStress/control.soak.test
@@ -8,6 +8,7 @@
 TIME = 'LENGTHY'
 TEST_CLASS = 'hardware'
 TEST_TYPE = 'server'
+PY_VERSION = 3
 
 DOC = """
 Shorter version of control.soak, last only 1 hour:
diff --git a/server/site_tests/hardware_StorageStress/control.stress b/server/site_tests/hardware_StorageStress/control.stress
index 7e81e7e..d990ce8 100644
--- a/server/site_tests/hardware_StorageStress/control.stress
+++ b/server/site_tests/hardware_StorageStress/control.stress
@@ -9,6 +9,7 @@
 TIME = 'LONG'
 TEST_CLASS = 'stress'
 TEST_TYPE = 'server'
+PY_VERSION = 3
 
 DOC = """
 Variants of the test are run to simulate a number of scenarios while execising
diff --git a/server/site_tests/hardware_StorageStress/control.suspend b/server/site_tests/hardware_StorageStress/control.suspend
index 54bc7a8..e84dd9c 100644
--- a/server/site_tests/hardware_StorageStress/control.suspend
+++ b/server/site_tests/hardware_StorageStress/control.suspend
@@ -8,6 +8,7 @@
 TIME = 'LENGTHY'
 TEST_CLASS = 'hardware'
 TEST_TYPE = 'server'
+PY_VERSION = 3
 
 DOC = """
 This test calls hardware_StorageFio to write data once and repeatedly verifies
diff --git a/server/site_tests/hardware_StorageStress/control.test b/server/site_tests/hardware_StorageStress/control.test
index d151f0a..2582841 100644
--- a/server/site_tests/hardware_StorageStress/control.test
+++ b/server/site_tests/hardware_StorageStress/control.test
@@ -8,6 +8,7 @@
 TIME = 'MEDIUM'
 TEST_CLASS = 'hardware'
 TEST_TYPE = 'server'
+PY_VERSION = 3
 
 DOC = """
 This test calls hardware_StorageFio to write data once and repeatedly verifies
diff --git a/server/site_tests/hardware_StorageStress/hardware_StorageStress.py b/server/site_tests/hardware_StorageStress/hardware_StorageStress.py
index 422b2f5..e14f821 100644
--- a/server/site_tests/hardware_StorageStress/hardware_StorageStress.py
+++ b/server/site_tests/hardware_StorageStress/hardware_StorageStress.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/infra_CompanionDuts/control b/server/site_tests/infra_CompanionDuts/control
new file mode 100644
index 0000000..c06b813
--- /dev/null
+++ b/server/site_tests/infra_CompanionDuts/control
@@ -0,0 +1,22 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'dbeckett'
+NAME = 'infra_CompanionDuts'
+TIME = 'SHORT'
+TEST_CATEGORY = 'General'
+TEST_CLASS = 'stub'
+TEST_TYPE = 'server'
+PY_VERSION = 3
+DOC = """
+Verify the companion dut flag reaches a test.
+"""
+
+
+def run(machine):
+    host = hosts.create_host(machine)
+    companions = hosts.create_companion_hosts(companion_hosts)
+    job.run_test('infra_CompanionDuts', host=host, companions=companions)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/infra_CompanionDuts/infra_CompanionDuts.py b/server/site_tests/infra_CompanionDuts/infra_CompanionDuts.py
new file mode 100644
index 0000000..aa03c09
--- /dev/null
+++ b/server/site_tests/infra_CompanionDuts/infra_CompanionDuts.py
@@ -0,0 +1,28 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.server import test
+
+
+class infra_CompanionDuts(test.test):
+    """
+    Verify the companion dut flag reaches a test.
+
+    """
+    version = 1
+
+    def run_once(self, host, companions):
+        """
+        Starting point of this test.
+
+        Note: base class sets host as self._host.
+
+        """
+        self.host = host
+        for c in companions:
+            dut_out = c.run('echo True').stdout.strip()
+            if dut_out != 'True':
+                raise error.TestError("Companion DUT stdout != True (got: %s)",
+                                      dut_out)
diff --git a/server/site_tests/infra_DutServers/control b/server/site_tests/infra_DutServers/control
new file mode 100644
index 0000000..ce1d13b
--- /dev/null
+++ b/server/site_tests/infra_DutServers/control
@@ -0,0 +1,22 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'seewaifu'
+NAME = 'infra_DutServers'
+TIME = 'SHORT'
+TEST_CATEGORY = 'General'
+TEST_CLASS = 'stub'
+TEST_TYPE = 'server'
+PY_VERSION = 3
+DOC = """
+Verify the dut server flag (--dut_servers) reaches a test.
+The following command should pass:
+  test_that --board=<board> <dut> infra_DutServers --dut_servers=<addr>
+"""
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('infra_DutServers', host=host, dut_servers=dut_servers)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/infra_DutServers/infra_DutServers.py b/server/site_tests/infra_DutServers/infra_DutServers.py
new file mode 100644
index 0000000..6165166
--- /dev/null
+++ b/server/site_tests/infra_DutServers/infra_DutServers.py
@@ -0,0 +1,31 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.server import test
+
+
+class infra_DutServers(test.test):
+    """
+    Verify the dutserver dut flag reaches a test.
+    """
+    version = 1
+
+    def run_once(self, host, dut_servers):
+        """
+        Starting point of this test.
+        Note: base class sets host as self._host.
+
+        @param host:        The host address of the DUT
+        @param dut_servers: A list of server specified by --dut_servers flag.
+
+        @returns: Nothing but will raise an error if the dut_servers is empty.
+        """
+        self.host = host
+        if not dut_servers:
+            raise error.TestError("DUT Server list is empty")
+        for c in dut_servers:
+            if not c:
+                raise error.TestError("DUT Server list %s has empty elements",
+                                      dut_servers)
diff --git a/server/site_tests/infra_FirmwareAutoupdate/control b/server/site_tests/infra_FirmwareAutoupdate/control
index 66a0849..60c7820 100644
--- a/server/site_tests/infra_FirmwareAutoupdate/control
+++ b/server/site_tests/infra_FirmwareAutoupdate/control
@@ -2,10 +2,11 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "infra_FirmwareAutoupdate"
 TIME = "SHORT"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This is an administrative procedure used to update the RW firmware
diff --git a/server/site_tests/infra_FirmwareAutoupdate/infra_FirmwareAutoupdate.py b/server/site_tests/infra_FirmwareAutoupdate/infra_FirmwareAutoupdate.py
index 85c5967..2d5283b 100644
--- a/server/site_tests/infra_FirmwareAutoupdate/infra_FirmwareAutoupdate.py
+++ b/server/site_tests/infra_FirmwareAutoupdate/infra_FirmwareAutoupdate.py
@@ -16,7 +16,7 @@
 
     In the test lab, this step is suppressed, to prevent devices from
     inadvertently updating to a new firmware version as a consequence of
-    installing a new Chrome OS build for testing.  In particular,
+    installing a new ChromeOS build for testing.  In particular,
     because the firmware is updated whenever the bundled firmware is
     _different_, and not merely _more recent_, suppressing the update
     prevents unexpectedly downgrading the firmware.
diff --git a/server/site_tests/infra_MultiDutsWithAndroid/control b/server/site_tests/infra_MultiDutsWithAndroid/control
new file mode 100644
index 0000000..044a756
--- /dev/null
+++ b/server/site_tests/infra_MultiDutsWithAndroid/control
@@ -0,0 +1,24 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'xianuowang'
+ATTRIBUTES = "suite:infra_multi_duts_with_android"
+NAME = 'infra_MultiDutsWithAndroid'
+TIME = 'SHORT'
+TEST_CATEGORY = 'General'
+TEST_CLASS = 'stub'
+TEST_TYPE = 'server'
+PY_VERSION = 3
+DOC = """
+Verify the test can create correct host type for Android devices and perform
+basic adb actions.
+"""
+
+
+def run(machine):
+    host = hosts.create_host(machine)
+    companions = hosts.create_companion_hosts(companion_hosts)
+    job.run_test('infra_MultiDutsWithAndroid', host=host, companions=companions)
+
+parallel_simple(run, machines)
\ No newline at end of file
diff --git a/server/site_tests/infra_MultiDutsWithAndroid/control.local b/server/site_tests/infra_MultiDutsWithAndroid/control.local
new file mode 100644
index 0000000..830cec1
--- /dev/null
+++ b/server/site_tests/infra_MultiDutsWithAndroid/control.local
@@ -0,0 +1,36 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = 'xianuowang'
+NAME = 'infra_MultiDutsWithAndroid.local'
+TIME = 'SHORT'
+TEST_CATEGORY = 'General'
+TEST_CLASS = 'stub'
+TEST_TYPE = 'server'
+PY_VERSION = 3
+DOC = """
+The local version of infra_MultiDutsWithAndroid. Local in this context means
+run from a local autotest checkout(e.g. test_that), where the autoserv doesn't
+have access to host_info_store data read from fleet inventory.
+
+For more details, please see the DOC section in the file 'control'.
+
+Below args are expect to be added to test run:
+--args="phone_station=$PHONE_HOST android_serial=$ANDROID_SERIAL_NUMBER"
+
+When use port forwarding to locahost, the expect args are:
+--args="phone_station=locahost android_station_ssh_port=$FORWARDED_PORT android_serial=$ANDROID_SERIAL_NUMBER"
+"""
+
+args_dict = utils.args_to_dict(args)
+android_args = hosts.AndroidHost.get_android_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine)
+    companions = [hosts.AndroidHost('local_phone', android_args=android_args)]
+    job.run_test('infra_MultiDutsWithAndroid', host=host, companions=companions)
+
+parallel_simple(run, machines)
\ No newline at end of file
diff --git a/server/site_tests/infra_MultiDutsWithAndroid/infra_MultiDutsWithAndroid.py b/server/site_tests/infra_MultiDutsWithAndroid/infra_MultiDutsWithAndroid.py
new file mode 100644
index 0000000..b424f56
--- /dev/null
+++ b/server/site_tests/infra_MultiDutsWithAndroid/infra_MultiDutsWithAndroid.py
@@ -0,0 +1,43 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+from autotest_lib.client.common_lib import error
+from autotest_lib.server import test
+
+
+class infra_MultiDutsWithAndroid(test.test):
+    """
+    Verify the test can create correct host type for Android devices.
+
+    """
+    version = 1
+
+    def _verify_adb(self, dut):
+        logging.info("Starting to verify basic adb actions.")
+        dut.restart_adb_server()
+        dut.ensure_device_connectivity()
+        ip_address = dut.get_wifi_ip_address()
+        logging.info("Ip address from Android device: %s", ip_address)
+
+    def run_once(self, host, companions):
+        """
+        Starting point of this test.
+
+        Note: base class sets host as self._host.
+
+        """
+        self.host = host
+        android_device_tested = False
+        for dut in companions:
+            if hasattr(dut, 'phone_station'):
+                dut_out = dut.phone_station.run('echo True').stdout.strip()
+                if dut_out != 'True':
+                    raise error.TestError(
+                            'phone station stdout != True (got: %s)', dut_out)
+                self._verify_adb(dut)
+                android_device_tested = True
+        if not android_device_tested:
+            raise error.TestError(
+                    'No Android host detected from companion duts.')
diff --git a/server/site_tests/infra_ServerPythonVersion/control.3 b/server/site_tests/infra_ServerPythonVersion/control.3
new file mode 100644
index 0000000..c5b6173
--- /dev/null
+++ b/server/site_tests/infra_ServerPythonVersion/control.3
@@ -0,0 +1,18 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "dbeckett"
+NAME = "infra_ServerPythonVersion.3"
+TIME = "SHORT"
+TEST_TYPE = "server"
+DOC = """Verify the Sever & Client Python Version."""
+PY_VERSION = 3
+ATTRIBUTES = "suite:py3-beta"
+
+def run(machine):
+    job.run_test('infra_ServerPythonVersion',
+                 host=hosts.create_host(machine),
+                 case=3)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/infra_ServerPythonVersion/infra_ServerPythonVersion.py b/server/site_tests/infra_ServerPythonVersion/infra_ServerPythonVersion.py
new file mode 100644
index 0000000..42100ae
--- /dev/null
+++ b/server/site_tests/infra_ServerPythonVersion/infra_ServerPythonVersion.py
@@ -0,0 +1,31 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.server import autotest
+from autotest_lib.server import test
+
+
+class infra_ServerPythonVersion(test.test):
+    """Checks the version on the server, then client."""
+    version = 1
+
+    def run_once(self, host, case):
+        """
+        Starting point of this test.
+
+        Note: base class sets host as self._host.
+
+        """
+        self.host = host
+
+        self.autotest_client = autotest.Autotest(self.host)
+        if sys.version_info.major != case:
+            raise error.TestFail("Not running in python version %s" % case)
+
+        self.autotest_client.run_test('infra_PythonVersion',
+                                      case=case,
+                                      check_client_result=True)
diff --git a/server/site_tests/infra_TLSExecDUTCommand/control b/server/site_tests/infra_TLSExecDUTCommand/control
index dc95dbe..a5cd700 100644
--- a/server/site_tests/infra_TLSExecDUTCommand/control
+++ b/server/site_tests/infra_TLSExecDUTCommand/control
@@ -6,11 +6,10 @@
 NAME = "infra_TLSExecDUTCommand"
 TIME = "SHORT"
 TEST_TYPE = "server"
-ATTRIBUTES = 'suite:ent-nightly'
-
 DOC = """
 Test the TLS ExecDUTCommand API
 """
+PY_VERSION = 3
 
 def run(machine):
     job.run_test('infra_TLSExecDUTCommand',
diff --git a/server/site_tests/infra_TLSExecDUTCommand/control.basic b/server/site_tests/infra_TLSExecDUTCommand/control.basic
new file mode 100644
index 0000000..1633b1c
--- /dev/null
+++ b/server/site_tests/infra_TLSExecDUTCommand/control.basic
@@ -0,0 +1,21 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "dbeckett"
+NAME = "infra_TLSExecDUTCommand.basic"
+TIME = "SHORT"
+TEST_TYPE = "server"
+ATTRIBUTES = 'suite:ent-nightly'
+PY_VERSION = 3
+
+DOC = """
+Test the TLS ExecDUTCommand API
+"""
+
+def run(machine):
+    job.run_test('infra_TLSExecDUTCommand',
+                 host=hosts.create_host(machine),
+                 case='basic')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/infra_TLSExecDUTCommand/control.stress b/server/site_tests/infra_TLSExecDUTCommand/control.stress
new file mode 100644
index 0000000..a6e6a1b
--- /dev/null
+++ b/server/site_tests/infra_TLSExecDUTCommand/control.stress
@@ -0,0 +1,21 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "dbeckett"
+NAME = "infra_TLSExecDUTCommand.stress"
+TIME = "SHORT"
+TEST_TYPE = "server"
+ATTRIBUTES = 'suite:ent-nightly'
+PY_VERSION = 3
+
+DOC = """
+Test the TLS ExecDUTCommand API
+"""
+
+def run(machine):
+    job.run_test('infra_TLSExecDUTCommand',
+                 host=hosts.create_host(machine),
+                 case='stress')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/infra_TLSExecDUTCommand/control.stress_fail b/server/site_tests/infra_TLSExecDUTCommand/control.stress_fail
new file mode 100644
index 0000000..aba91d9
--- /dev/null
+++ b/server/site_tests/infra_TLSExecDUTCommand/control.stress_fail
@@ -0,0 +1,21 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "dbeckett"
+NAME = "infra_TLSExecDUTCommand.stress_fail"
+TIME = "SHORT"
+TEST_TYPE = "server"
+ATTRIBUTES = 'suite:ent-nightly'
+PY_VERSION = 3
+
+DOC = """
+Test the TLS ExecDUTCommand API
+"""
+
+def run(machine):
+    job.run_test('infra_TLSExecDUTCommand',
+                 host=hosts.create_host(machine),
+                 case='stress_fail')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/infra_TLSExecDUTCommand/control.timeout b/server/site_tests/infra_TLSExecDUTCommand/control.timeout
new file mode 100644
index 0000000..c34506e
--- /dev/null
+++ b/server/site_tests/infra_TLSExecDUTCommand/control.timeout
@@ -0,0 +1,21 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "dbeckett"
+NAME = "infra_TLSExecDUTCommand.timeout"
+TIME = "SHORT"
+TEST_TYPE = "server"
+ATTRIBUTES = 'suite:ent-nightly'
+PY_VERSION = 3
+
+DOC = """
+Test the TLS ExecDUTCommand API
+"""
+
+def run(machine):
+    job.run_test('infra_TLSExecDUTCommand',
+                 host=hosts.create_host(machine),
+                 case='timeout')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/infra_TLSExecDUTCommand/infra_TLSExecDUTCommand.py b/server/site_tests/infra_TLSExecDUTCommand/infra_TLSExecDUTCommand.py
index 84cce5a..812504d 100644
--- a/server/site_tests/infra_TLSExecDUTCommand/infra_TLSExecDUTCommand.py
+++ b/server/site_tests/infra_TLSExecDUTCommand/infra_TLSExecDUTCommand.py
@@ -7,35 +7,75 @@
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib import utils
 from autotest_lib.server import test
-from autotest_lib.server.hosts.drone_api_client.client import TLSClient
+from autotest_lib.server.hosts.tls_client import connection
+from autotest_lib.server.hosts.tls_client import exec_dut_command
 
 
 class infra_TLSExecDUTCommand(test.test):
     """
     Run a command on the host via the TLS API (ExecDutCommand) and ensure the
-    output is as expected.
+    behavior matches the desired test.
 
     """
 
     version = 1
 
-    def run_once(self, host):
+    def run_once(self, host, case):
         """
         Run the test.
 
         @param host: A host object representing the DUT.
+        @param case: The case to run.
 
         """
-        tlsclient = TLSClient(hostname=host.hostname)
-        res = tlsclient.run_cmd("echo success")
+        tlsconn = connection.TLSConnection()
+        self.tlsclient = exec_dut_command.TLSExecDutCommandClient(
+                tlsconn, host.hostname)
+        if case == "basic":
+            self.basic()
+        elif case == "stress":
+            self.stress()
+        elif case == "stress_fail":
+            self.stress_fail()
+        elif case == "timeout":
+            self.timeout()
+        else:
+            raise error.TestError("Case {} does not exist".format(case))
+
+    def timeout(self):
+        """Test that the timeout is respected."""
+        try:
+            self.tlsclient.run_cmd("sleep 10", timeout=5)
+        except error.CmdTimeoutError:
+            return
+        raise error.TestError("Command did not timeout.")
+
+    def stress(self):
+        """Basic command 500 times in a row."""
+        for i in range(500):
+            self.basic()
+
+    def stress_fail(self):
+        """Test a cmd that should return exit_status of 1 does so, reliably."""
+        for i in range(500):
+            res = self.tlsclient.run_cmd("NonExistingCommand")
+            if res.exit_status == 0:
+                raise error.TestError(
+                        "TLS SSH exit status was: '{}'. Expected != 0".format(
+                                res.exit_status))
+
+    def basic(self):
+        """Run a command over the TLS ExecDutCommand API. Verify output."""
+        res = self.tlsclient.run_cmd("echo success")
         if not isinstance(res, utils.CmdResult):
             raise error.TestError(
                 "Client returned type: '{}'. Expected type: 'utils.CmdResult'"
                 .format(type(res)))
         if res.exit_status != 0:
             logging.info("STD_ERR of res {}".format(res.stderr))
-            raise error.TestError("TLS CMD exit status was: '{}'. Expected: '0'"
-                                  .format(res.exit_status))
+            raise error.TestError(
+                    "TLS SSH exit status was: '{}'. Expected: '0'".format(
+                            res.exit_status))
         if res.stdout != "success\n":
-            raise error.TestError("TLS returned: '{}'. Expected: '{}'"
-                                  .format(res.stdout, "success\n"))
+            raise error.TestError("TLS returned: '{}'. Expected: '{}'".format(
+                    res.stdout, "success\n"))
diff --git a/server/site_tests/infra_TLSFakeOmaha/control b/server/site_tests/infra_TLSFakeOmaha/control
new file mode 100644
index 0000000..de7e39b
--- /dev/null
+++ b/server/site_tests/infra_TLSFakeOmaha/control
@@ -0,0 +1,19 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "dbeckett"
+NAME = "infra_TLSFakeOmaha"
+TIME = "SHORT"
+TEST_TYPE = "server"
+PY_VERSION = 3
+
+DOC = """
+Test the TLS FakeOmaha API
+"""
+
+def run(machine):
+    job.run_test('infra_TLSFakeOmaha',
+                 host=hosts.create_host(machine))
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/infra_TLSFakeOmaha/control.basic b/server/site_tests/infra_TLSFakeOmaha/control.basic
new file mode 100644
index 0000000..57edaac
--- /dev/null
+++ b/server/site_tests/infra_TLSFakeOmaha/control.basic
@@ -0,0 +1,20 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "dbeckett"
+NAME = "infra_TLSFakeOmaha.basic"
+TIME = "SHORT"
+TEST_TYPE = "server"
+PY_VERSION = 3
+
+DOC = """
+Test the TLS FakeOmaha API
+"""
+
+def run(machine):
+    job.run_test('infra_TLSFakeOmaha',
+                 host=hosts.create_host(machine),
+                 case='basic')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/infra_TLSFakeOmaha/control.full b/server/site_tests/infra_TLSFakeOmaha/control.full
new file mode 100644
index 0000000..30308f1
--- /dev/null
+++ b/server/site_tests/infra_TLSFakeOmaha/control.full
@@ -0,0 +1,20 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "dbeckett"
+NAME = "infra_TLSFakeOmaha.full"
+TIME = "SHORT"
+TEST_TYPE = "server"
+PY_VERSION = 3
+
+DOC = """
+Test the TLS FakeOmaha API
+"""
+
+def run(machine):
+    job.run_test('infra_TLSFakeOmaha',
+                 host=hosts.create_host(machine),
+                 case='full')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/infra_TLSFakeOmaha/infra_TLSFakeOmaha.py b/server/site_tests/infra_TLSFakeOmaha/infra_TLSFakeOmaha.py
new file mode 100644
index 0000000..fcf992c
--- /dev/null
+++ b/server/site_tests/infra_TLSFakeOmaha/infra_TLSFakeOmaha.py
@@ -0,0 +1,80 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.server import test
+from autotest_lib.server.hosts.tls_client import connection
+from autotest_lib.server.hosts.tls_client import fake_omaha
+
+
+class infra_TLSFakeOmaha(test.test):
+    """
+    Start the TLS FakeOmaha service and ensure a URL is returned.
+
+    """
+
+    version = 1
+
+    def run_once(self, host, case):
+        """
+        Run the test.
+
+        @param host: A host object representing the DUT.
+        @param case: The case to run.
+
+        """
+        tlsconn = connection.TLSConnection()
+        self.fake_omaha = fake_omaha.TLSFakeOmaha(tlsconn)
+        self.host = host
+
+        # Run the case
+        eval("self._%s()" % case)
+
+    def _basic(self):
+        """Run the test with the minimum number of flags."""
+        fake_omaha_url = self.fake_omaha.start_omaha(
+                self.host.hostname,
+                target_build=
+                'gs://chromeos-image-archive/eve-release/R87-13457.0.0',
+                payloads=[{
+                        'payload_id': 'ROOTFS',
+                        'payload_type': 'FULL'
+                }])
+        if fake_omaha_url is None or fake_omaha_url == '':
+            raise error.TestFail("No url returned from fake_omaha")
+        if 'http://' not in fake_omaha_url:
+            raise error.TestFail("fake_omaha returned invalid update url: %s" %
+                                 fake_omaha_url)
+
+    def _full(self):
+        """Run the test with the none-default flags."""
+        fake_omaha_url = self.fake_omaha.start_omaha(
+                self.host.hostname,
+                target_build=
+                'gs://chromeos-image-archive/eve-release/R87-13457.0.0',
+                payloads=[{
+                        'payload_id': 'ROOTFS',
+                        'payload_type': 'FULL'
+                }],
+                exposed_via_proxy=True,
+                critical_update=True,
+                return_noupdate_starting=1)
+
+        critical_tag = 'critical_update=True'
+        no_update_tag = '&no_update=True'
+        none_proxy_url = 'http://127.0.0.1'
+        if critical_tag not in fake_omaha_url:
+            raise error.TestFail("fake_omaha returned invalid update url: %s"
+                                 " Expected %s in url." %
+                                 (fake_omaha_url, critical_tag))
+
+        if no_update_tag not in fake_omaha_url:
+            raise error.TestFail("fake_omaha returned invalid update url: %s"
+                                 " Expected %s in url." %
+                                 (fake_omaha_url, no_update_tag))
+
+        if none_proxy_url in fake_omaha_url:
+            raise error.TestFail("fake_omaha returned invalid update url: %s"
+                                 " Expected %s NOT in url." %
+                                 (fake_omaha_url, none_proxy_url))
diff --git a/server/site_tests/kernel_ExternalUsbPeripheralsDetectionTest/control b/server/site_tests/kernel_ExternalUsbPeripheralsDetectionTest/control
deleted file mode 100644
index 7406bfb..0000000
--- a/server/site_tests/kernel_ExternalUsbPeripheralsDetectionTest/control
+++ /dev/null
@@ -1,63 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "jimishs@google.com"
-NAME = "kernel_ExternalUsbPeripheralsDetectionTest"
-PURPOSE = "Kernel USB detection test"
-CRITERIA = "This test will fail if any of the actions or checks fail."
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform, kernel"
-TEST_TYPE = "server"
-#ATTRIBUTES = "suite:usb_detect"
-DEPENDENCIES = "servo_state:WORKING, kernel_usb"
-
-DOC = """
-This test uses servo to connect USB devices.
-This test verifies if drivers are created for each USB device or not.
-
-The test fails if
-- if USB device is not detected in lsusb command
-- if driver for USB device is not created
-- USB detected peripherals are different than expected
-- there is no servo board attached
-
-Set1 is set of four USB peripherals plugged
-- LG Android phone
-- USB 3G dongle
-- USB HD Webcam - should be Logitech HD Pro Webcam C920
-- USB3.0 Card Reader
-- USB Headphone
-- USB Mouse
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-
-    usb_checks = {
-        # Microsoft Headphone
-        # str("lsusb -v -d 045e:070f") :
-        #    ["idVendor\s+0x045e\s+Microsoft", "Audio"],
-        # Webcam
-        str("lsusb -v -d 058f:") :
-            ["idVendor\s+0x058f\s+Alcor", "iProduct.*(TeckNet|2.0 PC Camera)"],
-
-        }
-
-    vendor_id_dict_control_file = {
-                                    # '045e': 'Microsoft Headphone',
-
-                                   '058f': 'Webcam',
-                                  }
-
-    job.run_test("kernel_ExternalUsbPeripheralsDetectionTest", host=host,
-                 disable_sysinfo=True, usb_checks=usb_checks,
-                 vendor_id_dict_control_file=vendor_id_dict_control_file)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/kernel_ExternalUsbPeripheralsDetectionTest/control.printer_epson b/server/site_tests/kernel_ExternalUsbPeripheralsDetectionTest/control.printer_epson
deleted file mode 100644
index 463d060..0000000
--- a/server/site_tests/kernel_ExternalUsbPeripheralsDetectionTest/control.printer_epson
+++ /dev/null
@@ -1,52 +0,0 @@
-# Copyright (c) 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "Chrome OS Team"
-NAME = "kernel_ExternalUsbPeripheralsDetectionTest.printer_epson"
-PURPOSE = "Kernel USB detection test"
-CRITERIA = "This test will fail if any of the actions or checks fail."
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform, kernel"
-TEST_TYPE = "server"
-DEPENDENCIES = "servo_state:WORKING, usb_printer_epson"
-
-DOC = """
-This test uses servo to connect USB devices.
-This test verifies if drivers are created for each USB device or not.
-
-The test fails if
-- if USB device is not detected in lsusb command
-- if driver for USB device is not created
-- USB detected peripherals are different than expected
-- there is no servo board attached
-
-USB peripherals plugged
-- USB Epson XP-610 Series printer
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-
-    usb_checks = {
-        # Epson printer
-        str("lsusb") :
-            ["04b8:089e Seiko Epson Corp."],
-        str("lsusb -v -d 04b8:089e") :
-            ["iProduct.*EPSON XP-610 Series",
-             "bInterfaceClass.*Printer"],
-        }
-
-    vendor_id_dict_control_file = {'04b8': 'Printer'}
-
-    job.run_test("kernel_ExternalUsbPeripheralsDetectionTest", host=host,
-                 disable_sysinfo=True, usb_checks=usb_checks, tag="printer_epson",
-                 vendor_id_dict_control_file=vendor_id_dict_control_file)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/kernel_ExternalUsbPeripheralsDetectionTest/control.printer_hp b/server/site_tests/kernel_ExternalUsbPeripheralsDetectionTest/control.printer_hp
deleted file mode 100644
index 9e178d2..0000000
--- a/server/site_tests/kernel_ExternalUsbPeripheralsDetectionTest/control.printer_hp
+++ /dev/null
@@ -1,52 +0,0 @@
-# Copyright (c) 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "Chrome OS Team"
-NAME = "kernel_ExternalUsbPeripheralsDetectionTest.printer_hp"
-PURPOSE = "Kernel USB detection test"
-CRITERIA = "This test will fail if any of the actions or checks fail."
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform, kernel"
-TEST_TYPE = "server"
-DEPENDENCIES = "servo_state:WORKING, usb_printer_hp"
-
-DOC = """
-This test uses servo to connect USB devices.
-This test verifies if drivers are created for each USB device or not.
-
-The test fails if
-- if USB device is not detected in lsusb command
-- if driver for USB device is not created
-- USB detected peripherals are different than expected
-- there is no servo board attached
-
-USB peripherals plugged
-- USB printer
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-
-    usb_checks = {
-        # Epson or HP USB printer
-        str("lsusb") :
-            ["03f0:d911 Hewlett-Packard"],
-        str("lsusb -v -d 03f0:d911") :
-            ["iProduct.*OfficeJet 4650 series",
-             "bInterfaceClass.*Printer"],
-        }
-
-    vendor_id_dict_control_file = {'03f0': 'Printer'}
-
-    job.run_test("kernel_ExternalUsbPeripheralsDetectionTest", host=host,
-                 disable_sysinfo=True, usb_checks=usb_checks, tag="printer_hp",
-                 vendor_id_dict_control_file=vendor_id_dict_control_file)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/kernel_ExternalUsbPeripheralsDetectionTest/kernel_ExternalUsbPeripheralsDetectionTest.py b/server/site_tests/kernel_ExternalUsbPeripheralsDetectionTest/kernel_ExternalUsbPeripheralsDetectionTest.py
deleted file mode 100644
index a1ca7bc..0000000
--- a/server/site_tests/kernel_ExternalUsbPeripheralsDetectionTest/kernel_ExternalUsbPeripheralsDetectionTest.py
+++ /dev/null
@@ -1,163 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging, os, re, time
-
-from autotest_lib.server import test
-from autotest_lib.client.common_lib import error
-
-_WAIT_DELAY = 25
-_USB_DIR = '/sys/bus/usb/devices'
-
-class kernel_ExternalUsbPeripheralsDetectionTest(test.test):
-    """Uses servo to repeatedly connect/remove USB devices during boot."""
-    version = 1
-
-
-    def set_hub_power(self, on=True):
-        """Setting USB hub power status
-
-        @param: on To power on the servo-usb hub or not
-
-        """
-        reset = 'off'
-        if not on:
-            reset = 'on'
-        self.host.servo.set('dut_hub1_rst1', reset)
-        self.pluged_status = on
-        time.sleep(_WAIT_DELAY)
-
-
-    def check_usb_peripherals_details(self):
-        """Checks the effect from plugged in USB peripherals.
-
-        @returns True if command line output is matched successfuly; Else False
-        """
-        failed = list()
-        for cmd in self.usb_checks.keys():
-            out_match_list = self.usb_checks.get(cmd)
-            logging.info('Running %s',  cmd)
-
-            # Run the usb check command
-            cmd_out_lines = (self.host.run(cmd, ignore_status=True).
-                             stdout.strip().split('\n'))
-            for out_match in out_match_list:
-                match_result = False
-                for cmd_out_line in cmd_out_lines:
-                    match_result = (match_result or
-                        re.search(out_match, cmd_out_line) != None)
-                if not match_result:
-                    failed.append((cmd,out_match))
-        return failed
-
-
-    def get_usb_device_dirs(self):
-        """Gets the usb device dirs from _USB_DIR path.
-
-        @returns list with number of device dirs else None
-        """
-        usb_dir_list = []
-        cmd = 'ls -1 %s' % _USB_DIR
-        tmp = self.host.run(cmd).stdout.strip().split('\n')
-        for d in tmp:
-            usb_dir_list.append(os.path.join(_USB_DIR, d))
-        return usb_dir_list
-
-
-    def get_vendor_id_dict_from_dut(self, dir_list):
-        """Finds the vendor id from provided dir list.
-
-        @param dir_list: full path of directories
-        @returns dict of all vendor ids vs file path
-        """
-        vendor_id_dict = dict()
-        for d in dir_list:
-            file_name = os.path.join(d, 'idVendor')
-            if self._exists_on(file_name):
-                vendor_id = self.host.run('cat %s' % file_name).stdout.strip()
-                if vendor_id:
-                    vendor_id_dict[vendor_id] = d
-        logging.info('%s', vendor_id_dict)
-        return vendor_id_dict
-
-
-    def _exists_on(self, path):
-        """Checks if file exists on host or not.
-
-        @returns True or False
-        """
-        return self.host.run('ls %s' % path,
-                             ignore_status=True).exit_status == 0
-
-
-
-    def run_once(self, host, usb_checks=None,
-                 vendor_id_dict_control_file=None):
-        """Main function to run the autotest.
-
-        @param host: name of the host
-        @param usb_checks: dictionary defined in control file
-        @param vendor_id_list: dictionary defined in control file
-        """
-        self.host = host
-        self.usb_checks = usb_checks
-
-        self.host.servo.switch_usbkey('dut')
-        self.host.servo.set('usb_mux_sel3', 'dut_sees_usbkey')
-        time.sleep(_WAIT_DELAY)
-
-        self.set_hub_power(False)
-        # Collect the USB devices directories before switching on hub
-        usb_list_dir_off = self.get_usb_device_dirs()
-
-        self.set_hub_power(True)
-        # Collect the USB devices directories after switching on hub
-        usb_list_dir_on = self.get_usb_device_dirs()
-
-        diff_list = list(set(usb_list_dir_on).difference(set(usb_list_dir_off)))
-        if len(diff_list) == 0:
-            # Fail if no devices detected after
-            raise error.TestError('No connected devices were detected. Make '
-                                  'sure the devices are connected to USB_KEY '
-                                  'and DUT_HUB1_USB on the servo board.')
-        logging.debug('Connected devices list: %s', diff_list)
-
-        # Test 1: check USB peripherals info in detail
-        failed = self.check_usb_peripherals_details()
-        if len(failed)> 0:
-            raise error.TestError('USB device not detected %s', str(failed))
-
-        # Test 2: check USB device dir under /sys/bus/usb/devices
-        vendor_ids = {}
-        # Gets a dict idVendor: dir_path
-        vendor_ids = self.get_vendor_id_dict_from_dut(diff_list)
-        for vid in vendor_id_dict_control_file.keys():
-            peripheral = vendor_id_dict_control_file[vid]
-            if vid not in vendor_ids.keys():
-                raise error.TestFail('%s is not detected at %s dir'
-                                     % (peripheral, _USB_DIR))
-            else:
-            # Test 3: check driver symlink and dir for each USB device
-                tmp_list = [device_dir for device_dir in
-                            self.host.run('ls -1 %s' % vendor_ids[vid],
-                            ignore_status=True).stdout.split('\n')
-                            if re.match(r'\d-\d.*:\d\.\d', device_dir)]
-                if not tmp_list:
-                    raise error.TestFail('No driver created/loaded for %s'
-                                         % peripheral)
-                logging.info('---- Drivers for %s ----', peripheral)
-                flag = False
-                for device_dir in tmp_list:
-                    driver_path = os.path.join(vendor_ids[vid],
-                                               '%s/driver' % device_dir)
-                    if self._exists_on(driver_path):
-                        flag = True
-                        link = (self.host.run('ls -l %s | grep ^l'
-                                              '| grep driver'
-                                              % driver_path, ignore_status=True)
-                                              .stdout.strip())
-                        logging.info('%s', link)
-                if not flag:
-                    raise error.TestFail('Driver for %s is not loaded - %s'
-                                         % (peripheral, driver_path))
diff --git a/server/site_tests/kernel_IdlePerf/control b/server/site_tests/kernel_IdlePerf/control
deleted file mode 100644
index 04eff6e..0000000
--- a/server/site_tests/kernel_IdlePerf/control
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'dave.rodgman@arm.com'
-NAME = 'kernel_IdlePerf'
-PURPOSE = 'Test performance impact of idle'
-CRITERIA = 'This test will fail if performance drops when CPU idle is enabled'
-# Disable this test until it can be fixed: http://b/154426893
-# ATTRIBUTES = 'suite:crosbolt_perf_weekly'
-TIME = 'MEDIUM'
-TEST_CATEGORY = 'Performance'
-TEST_CLASS = 'kernel'
-TEST_TYPE = 'server'
-
-DOC = '''
-This server side test suite tests for performance regressions where enabling
-CPU idle hurts latency-sensitive workloads (e.g., smooth scrolling).
-
-This is done by running smoothness.top_25_smooth and comparing results for
-idle enabled vs. disabled: ideally, there should be only a very small impact.
-
-This test currently only supports Arm aarch64.
-
-Pass local=True to run with local telemetry and no AFE server.
-'''
-
-def run_benchmark(machine):
-    host = hosts.create_host(machine)
-    job.run_test('kernel_IdlePerf', host=host,
-                 args=utils.args_to_dict(args))
-
-parallel_simple(run_benchmark, machines)
diff --git a/server/site_tests/kernel_IdlePerf/kernel_IdlePerf.py b/server/site_tests/kernel_IdlePerf/kernel_IdlePerf.py
deleted file mode 100755
index b555155..0000000
--- a/server/site_tests/kernel_IdlePerf/kernel_IdlePerf.py
+++ /dev/null
@@ -1,240 +0,0 @@
-#!/usr/bin/python2
-#
-# Copyright (c) 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import json
-import math
-import re
-import numpy
-
-from autotest_lib.server import test
-from autotest_lib.server.cros import telemetry_runner
-from autotest_lib.client.common_lib import error
-
-# This test detects issues with low-throughput latency-sensitive workloads
-# caused by entering idle state.
-#
-# Such loads sleep regularly but also need to wake up and hit deadlines. We've
-# observed on some systems that if idle-state is enabled, we miss a lot of
-# deadlines (even though the compute capacity is sufficient).
-#
-# This test runs top_25_smooth with idle-state both enabled and disabled, and
-# looks for a discrepancy in the results. This workload is quite noisy, so
-# we run multiple times and take N * stdev as the threshold for flagging an
-# issue.
-#
-# In testing, this approach seemed quite robust, if the parameters (repetitions
-# and threshold) are set appropriately. Increasing page-set repetitions helped a
-# lot (reduces noise), as did selecting a good value for N (which trades off
-# false positives vs. false negatives).
-#
-# Based on testing, we found good results by using 5 indicative pages, setting
-# pageset-repetitions to 7, and taking the mean - 2 * stddev as the estimate
-# for "we can be confident that the true regression is not worse than this".
-#
-# This results in under-estimating the regression (typically by around 2 with
-# a healthy system), so false alarms should be rare or non-existent. In testing
-# 50 iterations with a good and bad system, this identified 100% of regressions
-# and non-regressions correctly (in fact mean - 1 * stddev would also have done
-# so, but this seems a bit marginal).
-
-# Repeat each page given number of times
-PAGESET_REPEAT = 7
-
-# PAGES can be set to a subset of pages to run for a shorter test, or None to
-# run all pages in rendering.desktop.
-# Simpler pages emphasise the issue more, as the system is more likely to enter
-# idle state.
-#
-# These were selected by running all pages many times (on a system which
-# exhibits the issue), and choosing pages which have a high value
-# for mean_regression - 2 * stddev - i.e. give the clearest indication of a
-# regression.
-# The exact page set selected is a mix of real pages (e.g. blogspot_2018) and
-# synthetic (e.g. transform_transitions_js_block)
-# For a longer test,'twitter_2018', 'wikipedia_2018' can be added to PAGES.
-PAGES = ['blogspot_2018', 'transform_transitions_js_block', 'throughput_scrolling_passive_handler']
-
-# Benchmark to run
-BENCHMARK = 'rendering.desktop'
-
-# Path to sysfs control file for disabling idle state
-DISABLE_PATH = '/sys/devices/system/cpu/cpu{}/cpuidle/state{}/disable'
-
-class kernel_IdlePerf(test.test):
-    """
-    Server side regression test for performance impact of idle-state.
-
-    This test runs some smoothness tests with and without sleep enabled, to
-    check that the impact of enabling sleep is not significant.
-
-    """
-    version = 1
-    _cleanup_required = False
-
-    def _check_sysfs(self, host):
-        # First check that we are on a suitable DUT which offers the ability to
-        # disable the idle state
-        arch = host.run_output('uname -m')
-        if arch != 'aarch64':
-            # Idle states differ between CPU architectures, so this test would
-            # need further development to support other platforms.
-            raise error.TestNAError('Test only supports Arm aarch64 CPUs')
-        if not host.path_exists(DISABLE_PATH.format(0, 1)):
-            logging.error('sysfs path absent: cannot disable idle state')
-            raise error.TestError('Cannot disable idle state')
-
-        # Identify available idle states. state0 is running state; other states
-        # should be disabled when disabling idle.
-        self.states = []
-        state_dirs = host.run_output(
-            'ls -1 /sys/devices/system/cpu/cpu0/cpuidle/')
-        for state in state_dirs.split('\n'):
-            if re.match('state[1-9][0-9]*$', state):
-                # Look for dirnames like 'state1' (but exclude 'state0')
-                self.states.append(int(state[5:]))
-        logging.info('Found idle states: {}'.format(self.states))
-
-        self.cpu_count = int(host.run_output('nproc --all'))
-        logging.info('Found {} cpus'.format(self.cpu_count))
-        logging.info('Idle enabled = {}'.format(self._is_idle_enabled(host)))
-
-        # From this point on we expect the test to be able to run, so we will
-        # need to ensure that the idle state is restored when the test exits
-        self._cleanup_required = True
-        self._enable_idle(host, False)
-        if self._is_idle_enabled(host):
-            logging.error('Failed to disable idle state')
-            raise error.TestError('Cannot disable idle state')
-        self._enable_idle(host, True)
-        if not self._is_idle_enabled(host):
-            logging.error('Failed to re-enable idle state')
-            raise error.TestError('Cannot disable idle state')
-
-    def _is_idle_enabled(self, host):
-        return host.run_output('cat ' + DISABLE_PATH.format(0, 1)) == '0'
-
-    def _enable_idle(self, host, enable):
-        logging.info('Setting idle enabled to {}'.format(enable))
-        x = '0' if enable else '1'
-        for cpu in range(0, self.cpu_count):
-            for state in self.states:
-                path = DISABLE_PATH.format(cpu, state)
-                host.run_output('echo {} > {}'.format(x, path))
-
-    def _parse_results_file(self, path):
-        with open(path) as fp:
-            histogram_json = json.load(fp)
-
-        guids = {x["guid"]: x["values"][0] for x in histogram_json
-                    if "guid" in x and "values" in x and len(x["values"]) > 0}
-
-        scores = {}
-        for e in histogram_json:
-            if "name" in e and e["name"] == "exp_percentage_smooth":
-                story_guid = e["diagnostics"]["stories"]
-                story = guids[story_guid]
-                if story not in scores: scores[story] = []
-                scores[story] += [e["sampleValues"][0]]
-
-        for story in scores:
-            scores[story] = {
-                'raw_exp_percentage_smooth_scores': scores[story],
-                'exp_percentage_smooth': numpy.mean(scores[story]),
-                'std': numpy.std(scores[story])
-            }
-
-        return scores
-
-    def _compare_results(self, idle_enabled, idle_disabled):
-        results = {
-            'passed': True
-        }
-        for page in idle_enabled:
-            diff = (idle_disabled[page]['exp_percentage_smooth']
-                   - idle_enabled[page]['exp_percentage_smooth'])
-            diff_std = (math.sqrt(idle_enabled[page]['std'] ** 2
-                       + idle_disabled[page]['std'] ** 2))
-            passed = (idle_enabled[page]['exp_percentage_smooth'] >=
-                     (idle_disabled[page]['exp_percentage_smooth'] - diff_std * 2))
-            key = re.sub('\W', '_', page)
-            results[key] = {
-                'idle_enabled': idle_enabled[page],
-                'idle_disabled': idle_disabled[page],
-                'difference': diff,
-                'difference_std': diff_std,
-                'passed': passed
-                }
-            results['passed'] = results['passed'] and passed
-        return results
-
-    def _run_telemetry(self, host, telemetry, enable):
-        logging.info('Running telemetry with idle enabled = {}'.format(enable))
-        self._enable_idle(host, enable)
-
-        args = ['--pageset-repeat={}'.format(PAGESET_REPEAT)]
-        if PAGES:
-            stories = r'\|'.join(r'\(^' + p + r'$\)' for p in PAGES)
-            story_filter = '--story-filter={}'.format(stories)
-            args.append(story_filter)
-
-        logging.info('Running telemetry with args: {}'.format(args))
-        result = telemetry.run_telemetry_benchmark(
-            BENCHMARK, self, *args)
-        if result.status != telemetry_runner.SUCCESS_STATUS:
-            raise error.TestFail('Failed to run benchmark')
-
-        # ensure first run doesn't get overwritten by second run
-        default_path = os.path.join(self.resultsdir, 'histograms.json')
-        if enable:
-            unique_path = os.path.join(self.resultsdir,
-                                       'results-histograms-idle-enabled.json')
-        else:
-            unique_path = os.path.join(self.resultsdir,
-                                       'results-histograms-idle-disabled.json')
-        os.rename(default_path, unique_path)
-
-        return self._parse_results_file(unique_path)
-
-    def run_once(self, host=None, args={}):
-        """Run the telemetry scrolling benchmark.
-
-        @param host: host we are running telemetry on.
-
-        """
-
-        logging.info('Checking sysfs')
-        self._check_sysfs(host)
-
-        local = args.get('local') == 'True'
-        telemetry = telemetry_runner.TelemetryRunner(
-                        host, local, telemetry_on_dut=False)
-
-        logging.info('Starting test')
-        results_idle   = self._run_telemetry(host, telemetry, True)
-        results_noidle = self._run_telemetry(host, telemetry, False)
-
-        # Score is the regression in percentage of smooth frames caused by
-        # enabling CPU idle.
-        logging.info('Processing results')
-        results = self._compare_results(results_idle, results_noidle)
-
-        self.write_perf_keyval(results)
-
-        if not results['passed']:
-            raise error.TestFail('enabling CPU idle significantly '
-                                 'regresses scrolling performance')
-
-    def cleanup(self, host):
-        """Cleanup of the test.
-
-        @param host: host we are running telemetry on.
-
-        """
-        if self._cleanup_required:
-            logging.info('Restoring idle to enabled')
-            self._enable_idle(host, True)
diff --git a/server/site_tests/kernel_MemoryRamoop/control b/server/site_tests/kernel_MemoryRamoop/control
deleted file mode 100644
index 6058595..0000000
--- a/server/site_tests/kernel_MemoryRamoop/control
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = 'kernel_MemoryRamoop'
-AUTHOR = 'puthik'
-PURPOSE = 'Check that kernel log preserve correctly in RAM'
-TIME = 'SHORT'
-TEST_CLASS = 'kernel'
-TEST_TYPE = 'server'
-ATTRIBUTES = "suite:experimental"
-
-DOC = """
-This test verify that /sys/fs/pstore/console-ramoops is preserved correctly
-after system reboot/kernel crash and also verify integrity of that log.
-"""
-
-def run_kernel_MemoryRamoop(machine):
-    job.run_test('kernel_MemoryRamoop', client_ip=machine)
-
-job.parallel_simple(run_kernel_MemoryRamoop, machines)
-
diff --git a/server/site_tests/kernel_MemoryRamoop/kernel_MemoryRamoop.py b/server/site_tests/kernel_MemoryRamoop/kernel_MemoryRamoop.py
deleted file mode 100644
index a329d1e..0000000
--- a/server/site_tests/kernel_MemoryRamoop/kernel_MemoryRamoop.py
+++ /dev/null
@@ -1,205 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging, random, re, string, traceback
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import autotest
-from autotest_lib.server import hosts
-from autotest_lib.server import test
-
-class kernel_MemoryRamoop(test.test):
-    """
-    This test verifies that /sys/fs/pstore/console-ramoops is preserved
-    after system reboot/kernel crash and also verifies that there is no memory
-    corruption in that log.
-
-    There is also platform_KernelErrorPaths test that tests kernel crashes. But
-    this test focuses on verifying that the kernel creates the console-ramoops
-    file correctly and its content is not corrupt. Contrary to the other test
-    that tests a bigger scope, i.e. the whole error reporting mechanism.
-    """
-    version = 1
-
-    # The name of this file has changed starting with linux-3.19.
-    # Use a glob to match all existing records.
-    _RAMOOP_PATH_GLOB = '/sys/fs/pstore/console-ramoops*'
-    _KMSG_PATH = '/dev/kmsg'
-    _LKDTM_PATH = '/sys/kernel/debug/provoke-crash/DIRECT'
-
-    # ramoops have a max size of 128K, so we will generate about 100K of random
-    # messages.
-    _MSG_LINE_COUNT = 1000
-    _MSG_LINE_LENGTH = 80
-    _MSG_MAGIC = 'ramoop_test'
-
-    def run_once(self, client_ip):
-        """
-        Run the test.
-        """
-        if not client_ip:
-            error.TestError("Must provide client's IP address to test")
-
-        self._client = hosts.create_host(client_ip)
-        self._client_at = autotest.Autotest(self._client)
-
-        self._run_test(self._do_reboot, '.*Restarting system.*$')
-
-        if self._client.check_for_lkdtm():
-            self._run_test(self._do_kernel_panic, '.*lkdtm:.*PANIC$')
-            self._run_test(self._do_kernel_bug, '.*lkdtm:.*BUG$')
-        else:
-            logging.warn('DUT did not have kernel dump test module')
-
-        self._run_test(self._do_reboot_with_suspend, '.*Restarting system.*$')
-
-    def _run_test(self, test_function, sig_pattern):
-        """
-        Run the test using by write random message to kernel log. Then
-        restart/crash the kernel and then verify integrity of console-ramoop
-
-        @param test_function: fuction to call to reboot / crash DUT
-        @param sig_pattern: regex of kernel log message generate when reboot
-                            or crash by test_function
-        """
-
-        msg = self._generate_random_msg()
-
-        for line in msg:
-            cmd = 'echo "%s" > %s' % (line, self._KMSG_PATH)
-            self._client.run(cmd)
-
-        test_function()
-
-        cmd = 'cat %s' % self._RAMOOP_PATH_GLOB
-        ramoop = self._client.run(cmd).stdout
-
-        self._verify_random_msg(ramoop, msg, sig_pattern)
-
-    def _do_reboot(self):
-        """
-        Reboot host machine
-        """
-        logging.info('Server: reboot client')
-        try:
-            self._client.reboot()
-        except error.AutoservRebootError as err:
-            raise error.TestFail('%s.\nTest failed with error %s' % (
-                    traceback.format_exc(), str(err)))
-
-    def _do_reboot_with_suspend(self):
-        """
-        Reboot host machine after suspend once
-        """
-        self._client.suspend(suspend_time=15)
-
-        logging.info('Server: reboot client')
-        try:
-            self._client.reboot()
-        except error.AutoservRebootError as err:
-            raise error.TestFail('%s.\nTest failed with error %s' % (
-                    traceback.format_exc(), str(err)))
-
-    def _do_kernel_panic(self):
-        """
-        Cause kernel panic using kernel dump test module
-        """
-        logging.info('Server: make client kernel panic')
-
-        cmd = 'echo PANIC > %s' % self._LKDTM_PATH
-        boot_id = self._client.get_boot_id()
-        self._client.run(cmd, ignore_status=True)
-        self._client.wait_for_restart(old_boot_id=boot_id)
-
-    def _do_kernel_bug(self):
-        """
-        Cause kernel bug using kernel dump test module
-        """
-        logging.info('Server: make client kernel bug')
-
-        cmd = 'echo BUG > %s' % self._LKDTM_PATH
-        boot_id = self._client.get_boot_id()
-        self._client.run(cmd, ignore_status=True)
-        self._client.wait_for_restart(old_boot_id=boot_id)
-
-    def _generate_random_msg(self):
-        """
-        Generate random message to put in kernel log
-        The message format is [magic string]: [3 digit id] [random char/digit]
-        """
-        valid_char = string.letters + string.digits
-        ret = []
-        for i in range(self._MSG_LINE_COUNT):
-            line = '%s: %03d ' % (self._MSG_MAGIC, i)
-            for _ in range(self._MSG_LINE_LENGTH):
-                line += random.choice(valid_char)
-            ret += [line]
-        return ret
-
-    def _verify_random_msg(self, ramoop, src_msg, sig_pattern):
-        """
-        Verify random message generated by _generate_random_msg
-
-        There are 3 things to verify.
-        1. At least one random message exist. (earlier random message may be
-           cutoff because console-ramoops has limited size.
-        2. Integrity of random message.
-        3. Signature of reboot / kernel crash
-
-        @param ramoop: console-ramoops file in DUT
-        @param src_msg: message write to kernel log
-        @param sig_patterm: regex of kernel log to verify
-        """
-        #                   time stamp     magic   id      random
-        pattern = str("\\[ *(\\d+\\.\\d+)\\].*(%s: (\\d{3}) \\w{%d})" %
-            (self._MSG_MAGIC, self._MSG_LINE_LENGTH))
-        matcher = re.compile(pattern)
-
-        logging.info('%s', pattern)
-
-        state = 'find_rand_msg'
-
-        last_timestamp = 0
-        for line in ramoop.split('\n'):
-            if state == 'find_rand_msg':
-                if not matcher.match(line):
-                    continue
-                last_id = int(matcher.split(line)[3]) - 1
-                state = 'match_rand_pattern'
-                logging.info("%s: %s", state, line)
-
-            if state == 'match_rand_pattern':
-                if not matcher.match(line):
-                    continue
-                components = matcher.split(line)
-                timestamp = float(components[1])
-                msg = components[2]
-                id = int(components[3])
-
-                if timestamp < last_timestamp:
-                    logging.info("last_timestamp: %f, timestamp: %d",
-                                 last_timestamp, timestamp)
-                    raise error.TestFail('Found reverse time stamp.')
-                last_timestamp = timestamp
-
-                if id != last_id + 1:
-                    logging.info("last_id: %d, id: %d", last_id, id)
-                    raise error.TestFail('Found missing message.')
-                last_id = id
-
-                if msg != src_msg[id]:
-                    logging.info("cur_msg: '%s'", msg)
-                    logging.info("src_msg: '%s'", src_msg[id])
-                    raise error.TestFail('Found corrupt message.')
-
-                if id == self._MSG_LINE_COUNT - 1:
-                    state = 'find_signature'
-
-            if state == 'find_signature':
-                if re.match(sig_pattern, line):
-                    logging.info("%s: %s", state, line)
-                    break
-
-        # error case: successful run must break in find_sigature state
-        else:
-            raise error.TestFail(str('Verify failed at state %s' % state))
diff --git a/server/site_tests/moblab_RunSuite/control.bvt-inline b/server/site_tests/moblab_RunSuite/control.bvt-inline
deleted file mode 100644
index 32eab7f..0000000
--- a/server/site_tests/moblab_RunSuite/control.bvt-inline
+++ /dev/null
@@ -1,68 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "chromeos-moblab@google.com"
-NAME = "moblab_BVTInline"
-PURPOSE = "Test that Moblab can run the bvt-inline suite."
-ATTRIBUTES = "suite:moblab"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "moblab"
-TEST_TYPE = "server"
-MAX_RESULT_SIZE_KB = 5120000
-
-DOC = """
-Kicks off the bvt-inline suite on a Moblab host against the DUTs on its subnet
-and ensures the suite completes successfully.
-
-To invole this test locally:
-  test_that -b guado_moblab <remote> moblab_BVTInline --args="<ARGLIST>"
-
-where ARGLIST is a whitespace separated list of the following key=value pairs.
-Values pertaining to the test case include:
-
-  boto_path=<boto_path>                path to the boto file to be installed on
-                                       the Moblab DUT. If not specified, the
-                                       boto file in the current home directory
-                                       will be installed if it exists.
-  image_storage_server=<server_name>   Google Storage Bucket from which to
-                                       fetch test images from. If not
-                                       specified, the value will be fetched
-                                       from global_config.
-  service_init_timeout_m=<int>         Timeout (in minutes) to wait for upstart
-                                       services to start on the moblab host.
-                                       This can take ~5 minutes on a physical
-                                       devices and ~10 minutes on a VM.
-  test_timeout_hint_m=<int>            The overall timeout to expect for the
-                                       test run. For this test, it is very
-                                       important to collect post failure data
-                                       from the moblab device. If the overall
-                                       timeout is provided, the test will try to
-                                       fail early to save some time for log
-                                       collection from the DUT.
-  clear_devserver_cache=<boolean>      If True, image cache of the devserver
-                                       running on moblab is cleared before
-                                       running the test to validate devserver
-                                       imaging staging flow.
-"""
-from autotest_lib.client.bin import sysinfo
-from autotest_lib.client.common_lib import utils
-
-MOBLAB_AUTOTEST_FOLDERS = ['/usr/local/autotest/logs']
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    args_dict = utils.args_to_dict(args)
-
-    logging.info('Logs from moblab\'s instance of autotest will be collected '
-                 'under the sysinfo/ folder in results.')
-    for folder in MOBLAB_AUTOTEST_FOLDERS:
-        logging.info('  Will collect %s', folder)
-        job.sysinfo.add_logdir(sysinfo.logdir(folder, excludes=()))
-
-    job.run_test('moblab_RunSuite', host=host, suite_name='bvt-inline',
-                 moblab_suite_max_retries=1, **args_dict)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/moblab_RunSuite/control.dummyServer b/server/site_tests/moblab_RunSuite/control.dummyServer
deleted file mode 100644
index 56f4360..0000000
--- a/server/site_tests/moblab_RunSuite/control.dummyServer
+++ /dev/null
@@ -1,69 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "chromeos-moblab@google.com"
-NAME = "moblab_DummyServerSuite"
-PURPOSE = "Test that Moblab can run the Dummy Server suite."
-ATTRIBUTES = "suite:moblab_quick"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "moblab"
-TEST_TYPE = "server"
-
-DOC = """
-Kicks off the Dummy Server suite on a Moblab host against the DUTs on its
-subnet and ensures the suite completes successfully.
-
-To invole this test locally:
-  test_that -b stumpy_moblab <remote> moblab_DummyServerSuite
-  --args="<ARGLIST>"
-
-where ARGLIST is a whitespace separated list of the following key=value pairs.
-Values pertaining to the test case include:
-
-  boto_path=<boto_path>                path to the boto file to be installed on
-                                       the Moblab DUT. If not specified, the
-                                       boto file in the current home directory
-                                       will be installed if it exists.
-  image_storage_server=<server_name>   Google Storage Bucket from which to
-                                       fetch test images from. If not
-                                       specified, the value will be fetched
-                                       from global_config.
-  service_init_timeout_m=<int>         Timeout (in minutes) to wait for upstart
-                                       services to start on the moblab host.
-                                       This can take ~5 minutes on a physical
-                                       devices and ~10 minutes on a VM.
-  test_timeout_hint_m=<int>            The overall timeout to expect for the
-                                       test run. For this test, it is very
-                                       important to collect post failure data
-                                       from the moblab device. If the overall
-                                       timeout is provided, the test will try to
-                                       fail early to save some time for log
-                                       collection from the DUT.
-  clear_devserver_cache=<boolean>      If True, image cache of the devserver
-                                       running on moblab is cleared before
-                                       running the test to validate devserver
-                                       imaging staging flow.
-"""
-from autotest_lib.client.bin import sysinfo
-from autotest_lib.client.common_lib import utils
-
-MOBLAB_AUTOTEST_FOLDERS = ['/usr/local/autotest/results',
-                           '/usr/local/autotest/logs']
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    args_dict = utils.args_to_dict(args)
-
-    logging.info('Logs from moblab\'s instance of autotest will be collected '
-                 'under the sysinfo/ folder in results.')
-    for folder in MOBLAB_AUTOTEST_FOLDERS:
-        logging.info('  Will collect %s', folder)
-        job.sysinfo.add_logdir(sysinfo.logdir(folder, excludes=()))
-
-    job.run_test('moblab_RunSuite', host=host, suite_name='dummy_server',
-                 moblab_suite_max_retries=1, **args_dict)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/moblab_RunSuite/control.dummyServerNoSsp b/server/site_tests/moblab_RunSuite/control.dummyServerNoSsp
deleted file mode 100644
index 23f0cd1..0000000
--- a/server/site_tests/moblab_RunSuite/control.dummyServerNoSsp
+++ /dev/null
@@ -1,68 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "moblab_DummyServerNoSspSuite"
-PURPOSE = "Test that Moblab can run the Dummy Server suite without SSP."
-TIME = "MEDIUM"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "moblab"
-TEST_TYPE = "server"
-
-DOC = """
-Kicks off the Dummy Server NoSSp suite on a Moblab host against the DUTs on its
-subnet and ensures the suite completes successfully.
-
-To invole this test locally:
-  test_that -b stumpy_moblab <remote> moblab_DummyServerSuiteNoSsp
-  --args="<ARGLIST>"
-
-where ARGLIST is a whitespace separated list of the following key=value pairs.
-Values pertaining to the test case include:
-
-  boto_path=<boto_path>                path to the boto file to be installed on
-                                       the Moblab DUT. If not specified, the
-                                       boto file in the current home directory
-                                       will be installed if it exists.
-  image_storage_server=<server_name>   Google Storage Bucket from which to
-                                       fetch test images from. If not
-                                       specified, the value will be fetched
-                                       from global_config.
-  service_init_timeout_m=<int>         Timeout (in minutes) to wait for upstart
-                                       services to start on the moblab host.
-                                       This can take ~5 minutes on a physical
-                                       devices and ~10 minutes on a VM.
-  test_timeout_hint_m=<int>            The overall timeout to expect for the
-                                       test run. For this test, it is very
-                                       important to collect post failure data
-                                       from the moblab device. If the overall
-                                       timeout is provided, the test will try to
-                                       fail early to save some time for log
-                                       collection from the DUT.
-  clear_devserver_cache=<boolean>      If True, image cache of the devserver
-                                       running on moblab is cleared before
-                                       running the test to validate devserver
-                                       imaging staging flow.
-"""
-from autotest_lib.client.bin import sysinfo
-from autotest_lib.client.common_lib import utils
-
-MOBLAB_AUTOTEST_FOLDERS = ['/usr/local/autotest/results',
-                           '/usr/local/autotest/logs']
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    args_dict = utils.args_to_dict(args)
-
-    logging.info('Logs from moblab\'s instance of autotest will be collected '
-                 'under the sysinfo/ folder in results.')
-    for folder in MOBLAB_AUTOTEST_FOLDERS:
-        logging.info('  Will collect %s', folder)
-        job.sysinfo.add_logdir(sysinfo.logdir(folder, excludes=()))
-
-    job.run_test('moblab_RunSuite', host=host, suite_name='dummy_server_nossp',
-                 moblab_suite_max_retries=1, **args_dict)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/moblab_RunSuite/control.smoke b/server/site_tests/moblab_RunSuite/control.smoke
deleted file mode 100644
index 1dbcdf3..0000000
--- a/server/site_tests/moblab_RunSuite/control.smoke
+++ /dev/null
@@ -1,67 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "chromeos-moblab@google.com"
-NAME = "moblab_SmokeSuite"
-PURPOSE = "Test that Moblab can run the smoke suite."
-TIME = "MEDIUM"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "moblab"
-TEST_TYPE = "server"
-MAX_RESULT_SIZE_KB = 5120000
-
-DOC = """
-Kicks off the smoke suite on a Moblab host against the DUTs on its subnet
-and ensures the suite completes successfully.
-
-To invole this test locally:
-  test_that -b stumpy_moblab <remote> moblab_SmokeSuite --args="<ARGLIST>"
-
-where ARGLIST is a whitespace separated list of the following key=value pairs.
-Values pertaining to the test case include:
-
-  boto_path=<boto_path>                path to the boto file to be installed on
-                                       the Moblab DUT. If not specified, the
-                                       boto file in the current home directory
-                                       will be installed if it exists.
-  image_storage_server=<server_name>   Google Storage Bucket from which to
-                                       fetch test images from. If not
-                                       specified, the value will be fetched
-                                       from global_config.
-  service_init_timeout_m=<int>         Timeout (in minutes) to wait for upstart
-                                       services to start on the moblab host.
-                                       This can take ~5 minutes on a physical
-                                       devices and ~10 minutes on a VM.
-  test_timeout_hint_m=<int>            The overall timeout to expect for the
-                                       test run. For this test, it is very
-                                       important to collect post failure data
-                                       from the moblab device. If the overall
-                                       timeout is provided, the test will try to
-                                       fail early to save some time for log
-                                       collection from the DUT.
-  clear_devserver_cache=<boolean>      If True, image cache of the devserver
-                                       running on moblab is cleared before
-                                       running the test to validate devserver
-                                       imaging staging flow.
-"""
-from autotest_lib.client.bin import sysinfo
-from autotest_lib.client.common_lib import utils
-
-MOBLAB_AUTOTEST_FOLDERS = ['/usr/local/autotest/logs']
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    args_dict = utils.args_to_dict(args)
-
-    logging.info('Logs from moblab\'s instance of autotest will be collected '
-                 'under the sysinfo/ folder in results.')
-    for folder in MOBLAB_AUTOTEST_FOLDERS:
-        logging.info('  Will collect %s', folder)
-        job.sysinfo.add_logdir(sysinfo.logdir(folder, excludes=()))
-
-    job.run_test('moblab_RunSuite', host=host, suite_name='smoke',
-                 moblab_suite_max_retries=1, **args_dict)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/moblab_RunSuite/moblab_RunSuite.py b/server/site_tests/moblab_RunSuite/moblab_RunSuite.py
deleted file mode 100644
index 411f951..0000000
--- a/server/site_tests/moblab_RunSuite/moblab_RunSuite.py
+++ /dev/null
@@ -1,136 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server.cros import moblab_test
-from autotest_lib.server.hosts import moblab_host
-from autotest_lib.utils import labellib
-
-
-_CLEANUP_TIME_M = 5
-_MOBLAB_IMAGE_STORAGE = '/mnt/moblab/static'
-
-class moblab_RunSuite(moblab_test.MoblabTest):
-    """
-    Moblab run suite test. Ensures that a Moblab can run a suite from start
-    to finish by kicking off a suite which will have the Moblab stage an
-    image, provision its DUTs and run the tests.
-    """
-    version = 1
-
-
-    def run_once(self, host, suite_name, moblab_suite_max_retries,
-                 target_build='', clear_devserver_cache=True,
-                 test_timeout_hint_m=None):
-        """Runs a suite on a Moblab Host against its test DUTS.
-
-        @param host: Moblab Host that will run the suite.
-        @param suite_name: Name of the suite to run.
-        @param moblab_suite_max_retries: The maximum number of test retries
-                allowed within the suite launched on moblab.
-        @param target_build: Optional build to be use in the run_suite
-                call on moblab. This argument is passed as is to run_suite. It
-                must be a sensible build target for the board of the sub-DUTs
-                attached to the moblab.
-        @param clear_devserver_cache: If True, image cache of the devserver
-                running on moblab is cleared before running the test to validate
-                devserver imaging staging flow.
-        @param test_timeout_hint_m: (int) Optional overall timeout for the test.
-                For this test, it is very important to collect post failure data
-                from the moblab device. If the overall timeout is provided, the
-                test will try to fail early to save some time for log collection
-                from the DUT.
-
-        @raises AutoservRunError if the suite does not complete successfully.
-        """
-        self._host = host
-
-        self._maybe_clear_devserver_cache(clear_devserver_cache)
-        # Fetch the board of the DUT's assigned to this Moblab. There should
-        # only be one type.
-        try:
-            dut = host.afe.get_hosts()[0]
-        except IndexError:
-            raise error.TestFail('All hosts for this MobLab are down. Please '
-                                 'request the lab admins to take a look.')
-
-        labels = labellib.LabelsMapping(dut.labels)
-        board = labels['board']
-
-        if not target_build:
-            stable_version_map = host.afe.get_stable_version_map(
-                    host.afe.CROS_IMAGE_TYPE)
-            target_build = stable_version_map.get_image_name(board)
-
-        logging.info('Running suite: %s.', suite_name)
-        cmd = ("%s/site_utils/run_suite.py --pool='' --board=%s --build=%s "
-               "--suite_name=%s --retry=True " "--max_retries=%d" %
-               (moblab_host.AUTOTEST_INSTALL_DIR, board, target_build,
-                suite_name, moblab_suite_max_retries))
-        cmd, run_suite_timeout_s = self._append_run_suite_timeout(
-                cmd,
-                test_timeout_hint_m,
-        )
-
-        logging.debug('Run suite command: %s', cmd)
-        try:
-            result = host.run_as_moblab(cmd, timeout=run_suite_timeout_s)
-        except error.AutoservRunError as e:
-            if _is_run_suite_error_critical(e.result_obj.exit_status):
-                raise
-        else:
-            logging.debug('Suite Run Output:\n%s', result.stdout)
-            # Cache directory can contain large binaries like CTS/CTS zip files
-            # no need to offload those in the results.
-            # The cache is owned by root user
-            host.run('rm -fR /mnt/moblab/results/shared/cache',
-                      timeout=600)
-
-    def _append_run_suite_timeout(self, cmd, test_timeout_hint_m):
-        """Modify given run_suite command with timeout.
-
-        @param cmd: run_suite command str.
-        @param test_timeout_hint_m: (int) timeout for the test, or None.
-        @return cmd, run_suite_timeout_s: cmd is the updated command str,
-                run_suite_timeout_s is the timeout to use for the run_suite
-                call, in seconds.
-        """
-        if test_timeout_hint_m is None:
-            return cmd, 10800
-
-        # Arguments passed in via test_args may be all str, depending on how
-        # they're passed in.
-        test_timeout_hint_m = int(test_timeout_hint_m)
-        elasped_m = self.elapsed.total_seconds() / 60
-        run_suite_timeout_m = (
-                test_timeout_hint_m - elasped_m - _CLEANUP_TIME_M)
-        logging.info('Overall test timeout hint provided (%d minutes)',
-                     test_timeout_hint_m)
-        logging.info('%d minutes have already elasped', elasped_m)
-        logging.info(
-                'Keeping %d minutes for cleanup, will allow %d minutes for '
-                'the suite to run.', _CLEANUP_TIME_M, run_suite_timeout_m)
-        cmd += ' --timeout_mins %d' % run_suite_timeout_m
-        return cmd, run_suite_timeout_m * 60
-
-    def _maybe_clear_devserver_cache(self, clear_devserver_cache):
-        # When passed in via test_args, all arguments are str
-        if not isinstance(clear_devserver_cache, bool):
-            clear_devserver_cache = (clear_devserver_cache.lower() == 'true')
-        if clear_devserver_cache:
-            self._host.run('rm -rf %s/*' % _MOBLAB_IMAGE_STORAGE)
-
-
-def _is_run_suite_error_critical(return_code):
-    # We can't actually import run_suite here because importing run_suite pulls
-    # in certain MySQLdb dependencies that fail to load in the context of a
-    # test.
-    # OTOH, these return codes are unlikely to change because external users /
-    # builders depend on them.
-    return return_code not in (
-            0,  # run_suite.RETURN_CODES.OK
-            2,  # run_suite.RETURN_CODES.WARNING
-    )
diff --git a/server/site_tests/moblab_Setup/control.cts_N b/server/site_tests/moblab_Setup/control.cts_N
deleted file mode 100644
index a81ea6c..0000000
--- a/server/site_tests/moblab_Setup/control.cts_N
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "cts_N"
-PURPOSE = "Test that the moblab has the correct setup for cts_N suite"
-ATTRIBUTES = "suite:check_setup_cts_N"
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "dummy"
-TEST_TYPE = "server"
-REQUIRE_SSP = False
-
-DOC = """
-Test that the moblab has 5 live DUTs connected, and that at least one of those
-DUTs has each required label
-"""
-
-def run(machine):
-    job.run_test('moblab_Setup', host=hosts.create_host(machine),
-      required_duts=5, required_labels=['lighting', 'noloopback'])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/moblab_Setup/moblab_Setup.py b/server/site_tests/moblab_Setup/moblab_Setup.py
deleted file mode 100644
index 1a475a2..0000000
--- a/server/site_tests/moblab_Setup/moblab_Setup.py
+++ /dev/null
@@ -1,94 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import subprocess
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-from autotest_lib.server import test
-from autotest_lib.server import frontend
-
-class moblab_Setup(test.test):
-    """ Moblab server test that checks for a specified number of
-    connected DUTs and that those DUTs have specified labels. Used to
-    verify the setup before kicking off a long running test suite.
-    """
-    version = 1
-
-    def run_once(self, required_duts=1, required_labels=[]):
-        """ Tests the moblab's connected DUTs to see if the current
-        configuration is valid for a specific test.
-
-        @param required_duts [int] number of _live_ DUTs required to run
-            the test in question. A DUT is not live if it is in a failed
-            repair state
-        @param required_labels [list<string>] list of labels that are
-            required to be on at least one _live_ DUT for this test.
-        """
-        logging.info('required_duts=%d required_labels=%s' %
-                (required_duts, str(required_labels)))
-
-        # creating a client to connect to autotest rpc interface
-        # all available rpc calls are defined in
-        # src/third_party/autotest/files/server/frontend.py
-        afe = frontend.AFE(server='localhost', user='moblab')
-
-        # get autotest statuses that indicate a live host
-        live_statuses = afe.host_statuses(live=True)
-        hosts = []
-        # get the hosts connected to autotest, find the live ones
-        for host in afe.get_hosts():
-            if host.status in live_statuses:
-                logging.info('Host %s is live, status %s' %
-                        (host.hostname, host.status))
-                hosts.append(host)
-            else:
-                logging.info('Host %s is not live, status %s' %
-                        (host.hostname, host.status))
-
-        # check that we have the required number of live duts
-        if len(hosts) < required_duts:
-            raise error.TestFail(('Suite requires %d DUTs, only %d connected' %
-                    (required_duts, len(hosts))))
-
-        required_labels_found = {}
-        for label in required_labels:
-            required_labels_found[label] = False
-
-        # check that at least one DUT has each required label
-        for host in hosts:
-            for label in host.get_labels():
-                if label.name in required_labels_found:
-                    required_labels_found[label.name] = True
-        # note: pools are stored as specially formatted labels
-        # to find if a DUT is in a pool,
-        # check if it has the label pool:mypoolname
-        for key in required_labels_found:
-            if not required_labels_found[key]:
-                raise error.TestFail('No DUT with required label %s' % key)
-
-        return
-
-        # to have autotest reverify that hosts are live, use the reverify_hosts
-        # rpc call
-        # note: this schedules a background asynchronous job, and
-        # logic to check back in on hosts would need to be built
-        # reverify_hostnames = [host.hostname for host in hosts]
-        # afe.reverify_hosts(hostnames=reverify_hostnames)
-
-        # example of running a command on the dut and getting the output back
-        # def run_ssh_command_on_dut(hostname, cmd):
-        #     """ Run a command on a DUT via ssh
-        #
-        #     @return output of the command
-        #     @raises subprocess.CalledProcessError if the ssh command fails,
-        #         such as a connection couldn't be established
-        #     """
-        #     ssh_cmd = ('ssh -o ConnectTimeout=2 -o StrictHostKeyChecking=no '
-        #             "root@%s '%s'") % (hostname, cmd)
-        #     return subprocess.check_output(ssh_cmd, shell=True)
-        # for host in hosts:
-        #     logging.info(run_ssh_command_on_dut(
-        #             host.hostname, 'cat /etc/lsb-release'))
diff --git a/server/site_tests/moblab_StorageQual/control b/server/site_tests/moblab_StorageQual/control
deleted file mode 100644
index 15949bb..0000000
--- a/server/site_tests/moblab_StorageQual/control
+++ /dev/null
@@ -1,71 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "chromeos-moblab@google.com"
-NAME = "moblab_StorageQual"
-PURPOSE = "Test that Moblab can run the Storage Qual suite."
-ATTRIBUTES = "suite:moblab_storage_qual"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "moblab"
-TEST_TYPE = "server"
-
-DOC = """
-Kicks off the storage qual suite on a Moblab host against the DUTs on its
-subnet and ensures the suite completes successfully. The suite tests that
-moblab correctly provisions and runs the storage qual suite, but does not
-perform any disk operations.
-
-To invole this test locally:
-  test_that -b stumpy_moblab <remote> moblab_StorageQual
-  --args="<ARGLIST>"
-
-where ARGLIST is a whitespace separated list of the following key=value pairs.
-Values pertaining to the test case include:
-
-  boto_path=<boto_path>                path to the boto file to be installed on
-                                       the Moblab DUT. If not specified, the
-                                       boto file in the current home directory
-                                       will be installed if it exists.
-  image_storage_server=<server_name>   Google Storage Bucket from which to
-                                       fetch test images from. If not
-                                       specified, the value will be fetched
-                                       from global_config.
-  service_init_timeout_m=<int>         Timeout (in minutes) to wait for upstart
-                                       services to start on the moblab host.
-                                       This can take ~5 minutes on a physical
-                                       devices and ~10 minutes on a VM.
-  test_timeout_hint_m=<int>            The overall timeout to expect for the
-                                       test run. For this test, it is very
-                                       important to collect post failure data
-                                       from the moblab device. If the overall
-                                       timeout is provided, the test will try to
-                                       fail early to save some time for log
-                                       collection from the DUT.
-  clear_devserver_cache=<boolean>      If True, image cache of the devserver
-                                       running on moblab is cleared before
-                                       running the test to validate devserver
-                                       imaging staging flow.
-"""
-from autotest_lib.client.bin import sysinfo
-from autotest_lib.client.common_lib import utils
-
-MOBLAB_AUTOTEST_FOLDERS = ['/usr/local/autotest/results',
-                           '/usr/local/autotest/logs']
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    args_dict = utils.args_to_dict(args)
-
-    logging.info('Logs from moblab\'s instance of autotest will be collected '
-                 'under the sysinfo/ folder in results.')
-    for folder in MOBLAB_AUTOTEST_FOLDERS:
-        logging.info('  Will collect %s', folder)
-        job.sysinfo.add_logdir(sysinfo.logdir(folder, excludes=()))
-
-    job.run_test('moblab_StorageQual', host=host,
-                 moblab_suite_max_retries=1, **args_dict)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/moblab_StorageQual/moblab_StorageQual.py b/server/site_tests/moblab_StorageQual/moblab_StorageQual.py
deleted file mode 100644
index 8aac8d3..0000000
--- a/server/site_tests/moblab_StorageQual/moblab_StorageQual.py
+++ /dev/null
@@ -1,272 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import re
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server.cros import moblab_test
-from autotest_lib.server.hosts import moblab_host
-from autotest_lib.utils import labellib
-
-
-_CLEANUP_TIME_M = 5
-_MOBLAB_IMAGE_STORAGE = '/mnt/moblab/static'
-
-class moblab_StorageQual(moblab_test.MoblabTest):
-    """
-    Moblab storage qual suite test. Ensures that moblab can run the storage
-    qual tests on the correct DUTs in the correct order. This test does not
-    perform any destructive disk operations.
-
-    The test requires 2 duts, labeled 'storage_qual_cq_1', 'storage_qual_cq_2'.
-    Each DUT will run a sequence of tests, and the test will then verify
-    that the correct tests ran on the correctly labeled DUT, in the correct
-    order.
-    """
-    version = 1
-
-    # Moblab expects to have 1 dut with each of these labels
-    REQUIRED_LABELS = {'storage_qual_cq_1', 'storage_qual_cq_2'}
-
-    EXPECTED_RESULTS = {
-        'storage_qual_cq_1': [
-            'hardware_StorageQualBase_before',
-            'hardware_StorageStress_soak',
-            'hardware_StorageStress_soak',
-            'hardware_StorageStress_suspend',
-            'hardware_StorageQualBase_after'
-        ],
-        'storage_qual_cq_2': [
-            'hardware_StorageQualBase_before',
-            'hardware_StorageStress_soak',
-            'hardware_StorageStress_soak',
-            'hardware_StorageQualTrimStress',
-            'hardware_StorageQualTrimStress',
-            'hardware_StorageQualBase_after'
-        ]
-    }
-
-    def run_once(self, host, moblab_suite_max_retries,
-                 target_build='', clear_devserver_cache=True,
-                 test_timeout_hint_m=None):
-        """Runs a suite on a Moblab Host against its test DUTS.
-
-        @param host: Moblab Host that will run the suite.
-        @param moblab_suite_max_retries: The maximum number of test retries
-                allowed within the suite launched on moblab.
-        @param target_build: Optional build to be use in the run_suite
-                call on moblab. This argument is passed as is to run_suite. It
-                must be a sensible build target for the board of the sub-DUTs
-                attached to the moblab.
-        @param clear_devserver_cache: If True, image cache of the devserver
-                running on moblab is cleared before running the test to validate
-                devserver imaging staging flow.
-        @param test_timeout_hint_m: (int) Optional overall timeout for the test.
-                For this test, it is very important to collect post failure data
-                from the moblab device. If the overall timeout is provided, the
-                test will try to fail early to save some time for log collection
-                from the DUT.
-
-        @raises AutoservRunError if the suite does not complete successfully.
-        """
-        self._host = host
-        self._maybe_clear_devserver_cache(clear_devserver_cache)
-
-        duts = host.afe.get_hosts()
-        if len(duts) == 0:
-            raise error.TestFail('All hosts for this MobLab are down. Please '
-                                 'request the lab admins to take a look.')
-
-        board = None
-        dut_to_label = {}
-        for dut in duts:
-            # Fetch the board of the DUT's assigned to this Moblab. There should
-            # only be one type.
-            board = labellib.LabelsMapping(dut.labels)['board']
-            for label in dut.labels:
-                if label in self.REQUIRED_LABELS:
-                    dut_to_label[dut.hostname] = label
-
-        if not set(dut_to_label.values()) == self.REQUIRED_LABELS:
-            raise error.TestFail(
-                'Missing required labels on hosts %s, are some hosts down?'
-                    % self.REQUIRED_LABELS - set(dut_to_label.values()))
-
-        if not board:
-            raise error.TestFail('Could not determine board from hosts.')
-
-        if not target_build:
-            stable_version_map = host.afe.get_stable_version_map(
-                    host.afe.CROS_IMAGE_TYPE)
-            target_build = stable_version_map.get_image_name(board)
-
-        logging.info('Running suite: hardware_storagequal_cq')
-        cmd = ("%s/site_utils/run_suite.py --pool='' --board=%s --build=%s "
-               "--suite_name=hardware_storagequal_cq --retry=True "
-               "--max_retries=%d" %
-               (moblab_host.AUTOTEST_INSTALL_DIR, board, target_build,
-               moblab_suite_max_retries))
-        cmd, run_suite_timeout_s = self._append_run_suite_timeout(
-                cmd,
-                test_timeout_hint_m,
-        )
-
-        logging.debug('Run suite command: %s', cmd)
-        try:
-            result = host.run_as_moblab(cmd, timeout=run_suite_timeout_s)
-        except error.AutoservRunError as e:
-            if _is_run_suite_error_critical(e.result_obj.exit_status):
-                raise
-
-        logging.debug('Suite Run Output:\n%s', result.stderr)
-
-        job_ids = self._get_job_ids_from_suite_output(result.stderr)
-
-        logging.debug('Suite job ids %s', job_ids)
-
-        keyvals_per_host = self._get_keyval_files_per_host(host, job_ids)
-
-        logging.debug('Keyvals grouped by host %s', keyvals_per_host)
-
-        failed_test = False
-        for hostname in keyvals_per_host:
-            label = dut_to_label[hostname]
-            expected = self.EXPECTED_RESULTS[label]
-            actual = self._get_test_execution_order(
-                host, keyvals_per_host[hostname])
-
-            logging.info('Comparing test order for %s from host %s',
-                label, hostname)
-            logging.info('%-37s %s', 'Expected', 'Actual')
-            for i in range(max(len(expected), len(actual))):
-                expected_i = expected[i] if i < len(expected) else None
-                actual_i = actual[i] if i < len(actual) else None
-                check_fail = expected_i != actual_i
-                check_text = 'X' if check_fail else ' '
-                logging.info('%s %-35s %s', check_text, expected_i, actual_i)
-                failed_test = failed_test or check_fail
-
-        # Cache directory can contain large binaries like CTS/CTS zip files
-        # no need to offload those in the results.
-        # The cache is owned by root user
-        host.run('rm -fR /mnt/moblab/results/shared/cache',
-                    timeout=600)
-
-        if failed_test:
-            raise error.TestFail(
-                'Actual test execution order did not match expected')
-
-    def _append_run_suite_timeout(self, cmd, test_timeout_hint_m):
-        """Modify given run_suite command with timeout.
-
-        @param cmd: run_suite command str.
-        @param test_timeout_hint_m: (int) timeout for the test, or None.
-        @return cmd, run_suite_timeout_s: cmd is the updated command str,
-                run_suite_timeout_s is the timeout to use for the run_suite
-                call, in seconds.
-        """
-        if test_timeout_hint_m is None:
-            return cmd, 10800
-
-        # Arguments passed in via test_args may be all str, depending on how
-        # they're passed in.
-        test_timeout_hint_m = int(test_timeout_hint_m)
-        elasped_m = self.elapsed.total_seconds() / 60
-        run_suite_timeout_m = (
-                test_timeout_hint_m - elasped_m - _CLEANUP_TIME_M)
-        logging.info('Overall test timeout hint provided (%d minutes)',
-                     test_timeout_hint_m)
-        logging.info('%d minutes have already elasped', elasped_m)
-        logging.info(
-                'Keeping %d minutes for cleanup, will allow %d minutes for '
-                'the suite to run.', _CLEANUP_TIME_M, run_suite_timeout_m)
-        cmd += ' --timeout_mins %d' % run_suite_timeout_m
-        return cmd, run_suite_timeout_m * 60
-
-    def _maybe_clear_devserver_cache(self, clear_devserver_cache):
-        # When passed in via test_args, all arguments are str
-        if not isinstance(clear_devserver_cache, bool):
-            clear_devserver_cache = (clear_devserver_cache.lower() == 'true')
-        if clear_devserver_cache:
-            self._host.run('rm -rf %s/*' % _MOBLAB_IMAGE_STORAGE)
-
-    def _get_job_ids_from_suite_output(self, suite_output):
-        """Parse the set of job ids from run_suite output
-
-        @param suite_output (str) output from run_suite command
-        @return (set<int>) job ids contained in the suite
-        """
-        job_ids = set()
-        job_id_pattern = re.compile('(\d+)-moblab')
-        for line in suite_output.splitlines():
-            match = job_id_pattern.search(line)
-            logging.debug('suite line %s match %s', line, match)
-            if match is None:
-                continue
-            job_ids.add(int(match.groups()[0]))
-        return job_ids
-
-    def _get_keyval_files_per_host(self, host, job_ids):
-        """Find the result keyval files for the given job ids and
-        group them by host
-
-        @param host (moblab_host)
-        @param job_ids (set<int>) set of job ids to find keyvals for
-        @return (dict<str, list<str>>) map of hosts and the keyval
-            file locations
-        @throws AutoservRunError if the command fails to run on moblab
-        """
-        keyvals_per_host = {}
-        keyvals = host.run_as_moblab(
-            'find /mnt/moblab/results '
-            '-wholename *-moblab/192.168*/hardware_Storage*/keyval')
-        pattern = re.compile('(\d+)-moblab/(192.168.\d+.\d+)')
-        for line in keyvals.stdout.splitlines():
-            match = pattern.search(line)
-            if match is None:
-                continue
-            job_id, dut = match.groups()
-            if int(job_id) not in job_ids:
-                continue
-            if dut not in keyvals_per_host:
-                keyvals_per_host[dut] = []
-            keyvals_per_host[dut].append(line)
-
-        return keyvals_per_host
-
-    def _get_test_execution_order(self, host, keyvals):
-        """Determines the test execution order for the given list
-        of storage qual test result keyvals
-
-        @param host (moblab_host)
-        @param keyvals (list<str>) location of keyval files to order
-        @return (list<str>) test names in the order they executed
-        @throws AutoservRunError if the command fails to run on moblab
-        """
-        tests = host.run_as_moblab(
-            'FILES=(%s); for FILE in ${FILES[@]}; do cat $FILE '
-            '| grep storage_qual_cq; done '
-            '| sort | cut -d " " -f 2'
-            % ' '.join(keyvals)
-        )
-        test_execution_order = []
-        pattern = re.compile('hardware_\w+')
-        logging.debug(tests.stdout)
-        for line in tests.stdout.splitlines():
-            match = pattern.search(line)
-            if match:
-                test_execution_order.append(match.group(0))
-        return test_execution_order
-
-def _is_run_suite_error_critical(return_code):
-    # We can't actually import run_suite here because importing run_suite pulls
-    # in certain MySQLdb dependencies that fail to load in the context of a
-    # test.
-    # OTOH, these return codes are unlikely to change because external users /
-    # builders depend on them.
-    return return_code not in (
-            0,  # run_suite.RETURN_CODES.OK
-            2,  # run_suite.RETURN_CODES.WARNING
-    )
diff --git a/server/site_tests/native_Benchmarks/control.octane b/server/site_tests/native_Benchmarks/control.octane
deleted file mode 100644
index dec5892..0000000
--- a/server/site_tests/native_Benchmarks/control.octane
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = "c-compiler-chrome@google.com"
-NAME = "native_Benchmarks.octane"
-TIME = "LONG"
-TEST_CATEGORY = "Benchmark"
-TEST_CLASS = "performance"
-TEST_TYPE = "server"
-
-DOC = """
-Build v8 and run octane.
-"""
-
-profiler = None
-p_args = []
-
-# Put the args into the args_dict.
-args_dict = utils.args_to_dict(args)
-
-if 'profiler' in args_dict:
-   profiler = args_dict['profiler']
-   if 'profiler_args' in args_dict:
-      p_args   = args_dict['profiler_args']
-
-if profiler:
-   job.default_profile_only = True
-   job.profilers.add(profiler, p_args)
-
-def run_native_Benchmarks(machine):
-    client = hosts.create_host(machine)
-    job.run_test('native_Benchmarks', client=client, name='octane', args=args)
-
-# run the test in multiple machines
-
-job.parallel_simple(run_native_Benchmarks, machines)
-
-if profiler:
-   job.profilers.delete (profiler)
diff --git a/server/site_tests/native_Benchmarks/control.vp8 b/server/site_tests/native_Benchmarks/control.vp8
deleted file mode 100644
index 4fb08ab..0000000
--- a/server/site_tests/native_Benchmarks/control.vp8
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = "c-compiler-chrome@google.com"
-NAME = "native_Benchmarks.vp8"
-TIME = "LONG"
-TEST_CATEGORY = "Benchmark"
-TEST_CLASS = "performance"
-TEST_TYPE = "server"
-
-DOC = """
-Run vp8 encoder and decoder as benchmarks.
-"""
-
-profiler = None
-p_args = []
-
-# Put the args into the args_dict.
-args_dict = utils.args_to_dict(args)
-
-if 'profiler' in args_dict:
-   profiler = args_dict['profiler']
-   if 'profiler_args' in args_dict:
-      p_args   = args_dict['profiler_args']
-
-if profiler:
-   job.default_profile_only = True
-   job.profilers.add(profiler, p_args)
-
-def run_native_Benchmarks(machine):
-    client = hosts.create_host(machine)
-    job.run_test('native_Benchmarks', client=client, name='vp8', args=args)
-
-# run the test in multiple machines
-
-job.parallel_simple(run_native_Benchmarks, machines)
-
-if profiler:
-   job.profilers.delete (profiler)
diff --git a/server/site_tests/native_Benchmarks/native_Benchmarks.py b/server/site_tests/native_Benchmarks/native_Benchmarks.py
deleted file mode 100644
index ed783dc..0000000
--- a/server/site_tests/native_Benchmarks/native_Benchmarks.py
+++ /dev/null
@@ -1,50 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import shutil
-import tempfile
-
-from autotest_lib.server import test
-
-from native_Benchmarks_common import CLIENT_TEST_ROOT
-from native_Benchmarks_common import run_check
-
-from octane import octane
-from vp8 import vp8
-
-# Benchmark suites
-suites = {
-    'octane': octane,
-    'vp8': vp8,
-}
-
-class native_Benchmarks(test.test):
-    """Build and run native benchmarks"""
-    version = 1
-
-    def run_once(self, client, name, args):
-        """
-        Build benchmark on the invoking machine and run it on client.
-
-        @param client: The autotest host object representing client.
-        @param name: The name of benchmark to run.
-        """
-
-        # scratch directory on server.
-        scratch_srv = tempfile.mkdtemp()
-        try:
-            # scratch directory on client.
-            cmd = 'mkdir -p %s' % CLIENT_TEST_ROOT
-            err_msg = 'Unable to create %s' % CLIENT_TEST_ROOT
-            run_check(client, cmd, err_msg)
-            scratch_cli = CLIENT_TEST_ROOT
-
-            flags = dict(i.split('=') for i in args)
-            results = suites[name](scratch_srv, scratch_cli, client, flags).run()
-            for r in results:
-                self.output_perf_value(**r)
-        finally:
-            if scratch_srv and os.path.isdir(scratch_srv):
-                shutil.rmtree(scratch_srv)
diff --git a/server/site_tests/native_Benchmarks/native_Benchmarks_common.py b/server/site_tests/native_Benchmarks/native_Benchmarks_common.py
deleted file mode 100644
index 1b296c0..0000000
--- a/server/site_tests/native_Benchmarks/native_Benchmarks_common.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import StringIO
-
-SERVER_TEST_ROOT = os.path.dirname(__file__)
-CLIENT_TEST_ROOT = '/usr/local/autotest/tests/native_Benchmarks'
-
-def run_check(host, cmd, err_msg):
-    """Run command on a host object.
-    It checks and logs if error occurred.
-
-    @param host: the host object
-    @param cmd: the command to run
-    @param err_msg: what to print when error occurred.
-    @return: stdout of the cmd.
-    """
-    logging.info('(%s) Running: %s', host, cmd)
-    stdout = StringIO.StringIO()
-    stderr = StringIO.StringIO()
-    try:
-        result = host.run(cmd, stdout_tee=stdout, stderr_tee=stderr)
-    except:
-        logging.info('%s:\n%s\n%s\n', err_msg,
-                                      stdout.getvalue(),
-                                      stderr.getvalue())
-        raise
-    finally:
-        stdout_str = stdout.getvalue()
-        stdout.close()
-        stderr.close()
-    return stdout_str
-
-def rcp_check(client, src, dst, err_msg):
-    """Copy src on the running machine to dst on client.
-    It checks and logs if error occurred.
-
-    @param client: a host object representing client.
-    @param src: path on the running machine.
-    @param dst: path on client.
-    @param err_msg: what to print when error occurred.
-    """
-    logging.info('Copying: %s -> %s', src, dst)
-    try:
-        client.send_file(src, dst)
-    except:
-        logging.info('%s: %s %s', err_msg, src, dst)
-        raise
-
-def def_flag(d, k, v):
-    """Define a flag: k=v in d
-    Warn if k is already in d.
-
-    @param d: the flag dictionary
-    @param k: key
-    @param v: value
-    """
-    if k in d:
-        logging.info('WARNING: Overriding flag %s: from %s to %s', k, d[k], v)
-    d[k] = v
diff --git a/server/site_tests/native_Benchmarks/octane.py b/server/site_tests/native_Benchmarks/octane.py
deleted file mode 100644
index e70f36b..0000000
--- a/server/site_tests/native_Benchmarks/octane.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from native_Benchmarks_common import *
-from v8 import v8
-
-class octane(object):
-    """Build v8 and run octane with it on client"""
-
-    def __init__(self, scratch_srv, scratch_cli, client, args):
-        # Instantiating v8 builds the v8 engine.
-        self.v8 = v8(scratch_srv, scratch_cli, client, args)
-        self.client = client
-        self.scratch_cli = scratch_cli
-
-        # download octane to client
-        src = '%s/octane.tar.bz2' % SERVER_TEST_ROOT
-        dst = '%s/octane.tar.bz2' % scratch_cli
-        rcp_check(client, src, dst,
-                  'Error occurred while sending octane to client.\n')
-
-        # unpack octane
-        cmd = 'tar jxf %s -C %s' % (dst, scratch_cli)
-        run_check(client, cmd, 'Error occurred while unpacking octane')
-
-    def run(self):
-        """Returns perf_value tuples"""
-        # Octane needs to run in PATH_TO/octane.
-        wd = '%s/octane' % self.scratch_cli
-        cmd = 'cd %s && %s run_all.js' % (wd, self.v8.executable)
-        log = run_check(self.client, cmd, "Error occurred while running v8")
-        return self.parse(log)
-
-    def parse(self, log):
-        """Translate logs into perf_values tuples.
-        @param log: the log to parse
-        """
-        pairs = [line.split(': ') for line in log.splitlines()]
-        del pairs[-2]
-        pairs[-1][0] = 'Total'
-        return [{'description': 'Octane V2',
-                 'graph': p[0],
-                 'value': p[1],
-                 'units': 'score'} for p in pairs]
diff --git a/server/site_tests/native_Benchmarks/octane.tar.bz2 b/server/site_tests/native_Benchmarks/octane.tar.bz2
deleted file mode 100644
index bc84609..0000000
--- a/server/site_tests/native_Benchmarks/octane.tar.bz2
+++ /dev/null
Binary files differ
diff --git a/server/site_tests/native_Benchmarks/v8.py b/server/site_tests/native_Benchmarks/v8.py
deleted file mode 100644
index bb56650..0000000
--- a/server/site_tests/native_Benchmarks/v8.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import shutil
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import utils
-
-from native_Benchmarks_common import *
-
-class v8(object):
-    """Build and copy the v8 engine to client."""
-
-    def __init__(self, scratch_srv, scratch_cli, client, flags_additional):
-        self.src = "%s/v8" % scratch_srv
-
-        # unpack
-        cmd = 'tar jxf %s/v8.tar.bz2 -C %s' % (SERVER_TEST_ROOT, scratch_srv)
-        run_check(utils, cmd, 'Error occurred while unpacking v8')
-
-        # build
-        arch = client.get_arch()
-        flags = {}
-        def_flag(flags, 'LDFLAGS', '-static')
-        options = '-C %s i18nsupport=off snapshot=off -j40' % self.src
-        if arch == 'armv7l':
-            def_flag(flags, 'CXX', 'armv7a-cros-linux-gnueabihf-g++')
-            def_flag(flags, 'LINK', 'armv7a-cros-linux-gnueabihf-g++')
-            options += ' arm.release'
-            d8src = '%s/out/arm.release/d8' % self.src
-        elif arch == 'x86_64':
-            def_flag(flags, 'CXX', 'x86_64-cros-linux-gnu-g++')
-            def_flag(flags, 'LINK', 'x86_64-cros-linux-gnu-g++')
-            options += ' x64.release'
-            d8src = '%s/out/x64.release/d8' % self.src
-        else:
-            raise error.TestFail('Unknown cpu architecture: %s' % arch)
-        for f, v in flags_additional.iteritems():
-            def_flag(flags, f, v)
-        envs = ' '.join('%s=%s' % (k, v) for k, v in flags.iteritems())
-        cmd = '%s make %s' % (envs, options)
-
-        run_check(utils, cmd, 'Error occurred building v8')
-        if not os.path.isfile(d8src):
-            raise error.TestFail('Unknown error when building v8')
-
-        # copy
-        d8dst = '%s/d8' % scratch_cli
-        rcp_check(client, d8src, d8dst,
-                  'Error occurred while sending d8 to client.\n')
-        self.executable = d8dst
-
-    def __del__(self):
-        if os.path.isdir(self.src):
-            shutil.rmtree(self.src)
diff --git a/server/site_tests/native_Benchmarks/v8.tar.bz2 b/server/site_tests/native_Benchmarks/v8.tar.bz2
deleted file mode 100644
index 7573164..0000000
--- a/server/site_tests/native_Benchmarks/v8.tar.bz2
+++ /dev/null
Binary files differ
diff --git a/server/site_tests/native_Benchmarks/vp8.py b/server/site_tests/native_Benchmarks/vp8.py
deleted file mode 100644
index 1b1a3fc..0000000
--- a/server/site_tests/native_Benchmarks/vp8.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from native_Benchmarks_common import *
-from webm import webm
-
-class vp8(object):
-    """Build webm codec (vpxenc/vpxdec) and run them on client"""
-    def __init__(self, scratch_srv, scratch_cli, client, args):
-        # Instantiating webm builds the codec.
-        self.webm = webm(scratch_srv, scratch_cli, client, args)
-        self.client = client
-        self.scratch_cli = scratch_cli
-
-        # download
-        src = '%s/vp8.webm' % SERVER_TEST_ROOT
-        dst = '%s/vp8.webm' % scratch_cli
-        rcp_check(client, src, dst,
-                  'Error occurred while sending vp8.webm to client.\n')
-
-    def run(self):
-        """Returns perf_value tuples"""
-        # run decoder
-        cmd = ('%s --summary %s/vp8.webm -o %s/vp8.yuv 2>&1' %
-               (self.webm.vpxdec, self.scratch_cli, self.scratch_cli))
-        declog = run_check(self.client, cmd, "Error occurred while running vp8")
-        # run encoder
-        cmd = (('%s %s/vp8.yuv -o /dev/null --codec=vp8 --i420 -w 1280' +
-                ' -h 720 --good --cpu-used=0 --target-bitrate=2000 2>&1') %
-               (self.webm.vpxenc, self.scratch_cli))
-        enclog = run_check(self.client, cmd,
-                           "Error occurred while running vp8enc")
-        return self.parse(declog, enclog)
-
-    def parse(self, dec, enc):
-        """Translate logs into perf_values tuples.
-        @param dec: logs from decoder
-        @param enc: logs from encoder
-        """
-        return [{'description': 'VP8',
-                 'graph': 'decode',
-                 'value': dec.split()[-2][1:],
-                 'units': 'fps'},
-                {'description': 'VP8',
-                 'graph': 'encode',
-                 'value': enc.split()[-2][1:],
-                 'units': 'fps'}]
-
-    def __del__(self):
-        run_check(self.client, 'rm -f %s/vp8.yuv' % self.scratch_cli,
-                  "Error occurred while cleaning up")
diff --git a/server/site_tests/native_Benchmarks/vp8.webm b/server/site_tests/native_Benchmarks/vp8.webm
deleted file mode 100644
index 9f861ae..0000000
--- a/server/site_tests/native_Benchmarks/vp8.webm
+++ /dev/null
Binary files differ
diff --git a/server/site_tests/native_Benchmarks/webm.py b/server/site_tests/native_Benchmarks/webm.py
deleted file mode 100644
index a5ed3e2..0000000
--- a/server/site_tests/native_Benchmarks/webm.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import shutil
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import utils
-
-from native_Benchmarks_common import *
-
-class webm(object):
-    """Build and copy the codec to client."""
-
-    def __init__(self, scratch_srv, scratch_cli, client, flags_additional):
-        self.src = "%s/webm" % scratch_srv
-
-        # unpack
-        cmd = 'tar jxf %s/webm.tar.bz2 -C %s' % (SERVER_TEST_ROOT, scratch_srv)
-        run_check(utils, cmd, 'Error occurred while unpacking webm')
-
-        # build
-        arch = client.get_arch()
-        flags = {}
-        def_flag(flags, 'LDFLAGS', '-static')
-        options =  ' --disable-unit_tests'
-        options += ' --disable-docs'
-        options += ' --disable-runtime-cpu-detect'
-        if arch == 'armv7l':
-            def_flag(flags, 'CC', 'armv7a-cros-linux-gnueabihf-gcc')
-            def_flag(flags, 'CXX', 'armv7a-cros-linux-gnueabihf-g++')
-            def_flag(flags, 'LD', 'armv7a-cros-linux-gnueabihf-g++')
-            def_flag(flags, 'AR', 'armv7a-cros-linux-gnueabihf-ar')
-            def_flag(flags, 'AS', 'armv7a-cros-linux-gnueabihf-as')
-            options += ' --target=armv7-linux-gcc'
-        elif arch == 'x86_64':
-            def_flag(flags, 'CC', 'x86_64-cros-linux-gnu-gcc')
-            def_flag(flags, 'CXX', 'x86_64-cros-linux-gnu-g++')
-            def_flag(flags, 'LD', 'x86_64-cros-linux-gnu-g++')
-            def_flag(flags, 'AR', 'x86_64-cros-linux-gnu-ar')
-            options += ' --target=x86_64-linux-gcc'
-        else:
-            raise error.TestFail('Unknown cpu architecture: %s' % arch)
-        for f, v in flags_additional.iteritems():
-            def_flag(flags, f, v)
-        envs = ' '.join('%s=%s' % (k, v) for k, v in flags.iteritems())
-        cmd =  'mkdir -p %s/webm/out && ' % scratch_srv
-        cmd += 'cd %s/webm/out && ' % scratch_srv
-        cmd += ' %s ../configure %s && ' % (envs, options)
-        cmd += 'make -j 40'
-
-        run_check(utils, cmd, 'Error occurred building vpxenc')
-
-        files = ['vpxenc', 'vpxdec']
-        for v in files:
-            if not os.path.isfile('%s/out/%s' % (self.src, v)):
-                raise error.TestFail('Unknown error when building %s' % v)
-
-        # copy
-        for v in files:
-            rcp_check(client, '%s/out/%s' % (self.src, v),
-                      '%s/%s' % (scratch_cli, v),
-                      'Error occurred while sending %s to client.' % v)
-        self.vpxenc = '%s/vpxenc' % scratch_cli
-        self.vpxdec = '%s/vpxdec' % scratch_cli
-
-    def __del__(self):
-        if os.path.isdir(self.src):
-            shutil.rmtree(self.src)
diff --git a/server/site_tests/native_Benchmarks/webm.tar.bz2 b/server/site_tests/native_Benchmarks/webm.tar.bz2
deleted file mode 100644
index 66213b2..0000000
--- a/server/site_tests/native_Benchmarks/webm.tar.bz2
+++ /dev/null
Binary files differ
diff --git a/server/site_tests/nbr_EndToEndTest/control.basic b/server/site_tests/nbr_EndToEndTest/control.basic
new file mode 100644
index 0000000..51c31c3
--- /dev/null
+++ b/server/site_tests/nbr_EndToEndTest/control.basic
@@ -0,0 +1,40 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "hbarnor, Chromium OS"
+NAME = "nbr_EndToEndTest.basic"
+TIME = "MEDIUM"
+PURPOSE = "Test an NBR N-to-M recovery with Nebraska."
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+DEPENDENCIES = "servo_state:WORKING"
+ATTRIBUTES = "suite:nbr"
+PY_VERSION = 3
+
+DOC = """
+This tests an N-to-M recovery. This means that it will recover from a ToT install
+to the current stable. This test will be used in the CQ to ensure that changes
+don't break NBR.
+
+We supply a job_repo_url to the test when running locally. In the lab this will
+be passed directly. The job_repo_url is a link to the autotest packages on a
+devserver. The test uses it to find the correct payload to use.
+
+Example usage:
+test_that nbr_EndToEndTest.basic <DUT> --board=<board> --args="job_repo_url='http://<devserver IP>:8082/static/<board>-release/RXX-XXXXX.X.X/autotest/packages', running_at_desk=True"
+
+"""
+
+from autotest_lib.client.common_lib import utils
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args, **args_dict)
+    # TODO(b//221263849): n2m is disabled till we have stable versions for
+    # guybrush and/or brya.
+    job.run_test("nbr_EndToEndTest", host=host, n2m=False, **args_dict)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/nbr_EndToEndTest/nbr_EndToEndTest.py b/server/site_tests/nbr_EndToEndTest/nbr_EndToEndTest.py
new file mode 100644
index 0000000..0b8f5f2
--- /dev/null
+++ b/server/site_tests/nbr_EndToEndTest/nbr_EndToEndTest.py
@@ -0,0 +1,82 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib.cros import kernel_utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.server.cros.minios import minios_test
+
+
+class nbr_EndToEndTest(minios_test.MiniOsTest):
+    """Test network based recovery of a DUT."""
+    version = 1
+
+    def run_once(self,
+                 job_repo_url=None,
+                 n2m=True,
+                 corrupt_partitions=False,
+                 network_name='Ethernet',
+                 network_password=None,
+                 running_at_desk=False):
+        """
+        Validates the network based recovery flow.
+
+        @param job_repo_url: A url pointing to the devserver where the autotest
+            package for this build should be staged.
+        @param n2m: Perform recovery from ToT to current stable version.
+        @param corrupt_partitions: Corrupt the kernel and rootfs partition before
+            attempting recovery.
+        @param network_name: The name of the network to connect to for recovery.
+        @param network_password: Optional password for the network.
+        @param running_at_desk: indicates test is run locally from a workstation.
+
+        """
+        update_url = job_repo_url
+        if n2m:
+            build_name = self._get_latest_serving_stable_build()
+            logging.debug('stable build name is %s', build_name)
+
+            # Determine the URL for the stable build.
+            autotest_devserver = dev_server.ImageServer.resolve(
+                    build_name, self._host.hostname)
+            update_url = autotest_devserver.get_update_url(build_name)
+
+        logging.info('Performing recovery with update url: %s', update_url)
+        payload_url = self.get_payload_for_nebraska(
+                update_url, full_payload=True, public_bucket=running_at_desk)
+
+        logging.info("Booting into MiniOS")
+        self._boot_minios()
+
+        # Install testing dependencies into MiniOS.
+        logging.info("Successfully booted into MiniOS.")
+        self._install_test_dependencies(public_bucket=running_at_desk)
+
+        old_boot_id = self._host.get_boot_id()
+        self._start_nebraska(payload_url=payload_url)
+        cmd = [
+                self._MINIOS_CLIENT_CMD, '--start_recovery',
+                f'--network_name={network_name}', '--watch'
+        ]
+        if network_password:
+            cmd += [f'--network_password={network_password}']
+        logging.info(f'Performing network based recovery with cmd: {cmd}.')
+        self._run(cmd)
+        logging.info('Recovery complete. Grabbing logs.')
+
+        # Generate host log.
+        minios_hostlog = self._create_minios_hostlog()
+        self._verify_reboot(old_boot_id)
+
+        # NBR always recovers into partition A.
+        kernel_utils.verify_boot_expectations(kernel_utils._KERNEL_A,
+                                              host=self._host)
+        # Verify the update engine events that happened during the recovery.
+        self.verify_update_events(self._RECOVERY_VERSION, minios_hostlog)
+
+        # Restore the stateful partition.
+        logging.info('Verification complete. Restoring stateful.')
+        self._restore_stateful(public_bucket=running_at_desk)
diff --git a/server/site_tests/network_DiskFull/control b/server/site_tests/network_DiskFull/control
deleted file mode 100644
index 54c76c2..0000000
--- a/server/site_tests/network_DiskFull/control
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "quiche@chromium.org"
-NAME = "network_DiskFull"
-PURPOSE = "Test that the connection manager survives on a full disk."
-TIME = "MEDIUM"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "server"
-
-DOC = """
-This test fills the /var partition, and sets up a process to keep
-the disk full (in case, e.g. an old log file is deleted). It then
-tests how various bits of network machinery (e.g. shill, dhcpcd)
-cope with a full disk.
-"""
-
-def run(machine):
-    job.run_test("network_DiskFull", client_addr=machine)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_DiskFull/hog_disk.sh b/server/site_tests/network_DiskFull/hog_disk.sh
deleted file mode 100755
index f2a9725..0000000
--- a/server/site_tests/network_DiskFull/hog_disk.sh
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/bin/sh
-
-set -e
-
-MAX_TIMEOUT_SECONDS=300
-
-usage() {
-    echo "$0 <mount point> <timeout seconds>"
-    exit 1
-}
-
-# Get the size of the filesystem mounted at $1, in bytes.
-get_mount_size_bytes() {
-    local mount_point="$1"
-
-    # Filesystem              1024-blocks  Used Available Capacity Mounted on
-    # /dev/mapper/encstateful      290968 47492    243476      17% /var
-    #
-    # awk uses double-representation internally; we'll hit problems if
-    # the filesystem has more than 2^53 bytes (8 petabytes).
-    df -P "$mount_point" |
-    awk '($6 == "'"$mount_point"'") { printf "%.0f", $2*1024; exit }'
-}
-
-if [ $# -ne 2 ]; then
-    usage
-fi
-
-mount_point="$1"
-timeout_seconds="$2"
-
-if [ "$timeout_seconds" -gt $MAX_TIMEOUT_SECONDS ]; then
-    echo "max timeout is "$MAX_TIMEOUT_SECONDS" seconds";
-    exit 1
-fi
-
-mount_size_bytes=$(get_mount_size_bytes /var)
-temp_file=$(mktemp --tmpdir="$mount_point" hog_disk.XXXXXXXXXX)
-trap 'rm -f "$temp_file"' EXIT
-trap 'exit' HUP INT QUIT TERM
-
-for i in $(seq 1 $(( timeout_seconds * 10 ))); do
-    fallocate --length "$mount_size_bytes" "$temp_file" 2>/dev/null || true
-    sleep 0.1
-done
diff --git a/server/site_tests/network_DiskFull/network_DiskFull.py b/server/site_tests/network_DiskFull/network_DiskFull.py
deleted file mode 100644
index 8d277e79..0000000
--- a/server/site_tests/network_DiskFull/network_DiskFull.py
+++ /dev/null
@@ -1,131 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import autotest
-from autotest_lib.server import hosts
-from autotest_lib.server import test
-from autotest_lib.server.cros import remote_command
-
-class network_DiskFull(test.test):
-    """Test networking daemons when /var is full."""
-
-    version = 1
-    CLIENT_TEST_LIST = [
-        ('network_DhcpNegotiationSuccess', {}),
-        ('network_DhcpRenew', {}),
-        ('network_RestartShill', {
-                'tag': 'profile_exists',
-                'remove_profile': False}),
-        ('network_RestartShill', {
-                'tag': 'profile_missing',
-                'remove_profile': True}),
-        ]
-    CLIENT_TMP_DIR = '/tmp'
-    DISK_FILL_SCRIPT = 'hog_disk.sh'
-    FILL_TIMEOUT_SECONDS = 5
-    MAX_FREE_KB = 1024
-    STATEFUL_PATH = '/var'
-    TEST_TIMEOUT_SECONDS = 180
-
-    def get_free_kilobytes(self, mount_point):
-        """
-        Get the size of free space on the filesystem mounted at |mount_point|,
-        in kilobytes.
-
-        @return Kilobytes free, as an integer.
-        """
-        # Filesystem              1024-blocks  Used Available Capacity Mount...
-        # /dev/mapper/encstateful      290968 47492    243476      17% /var
-        output = self._client.run('df -P %s' % mount_point).stdout
-        lines = output.splitlines()
-        if len(lines) != 2:
-            raise error.TestFail('Unexpected df output: %s' % lines)
-        _, _, _, free_kb, _, df_mount_point = lines[1].split(None, 5)
-        if df_mount_point != mount_point:
-            raise error.TestFail('Failed to find %s, got %s instead.' %
-                                 (mount_point, df_mount_point))
-        return int(free_kb)
-
-
-    def wait_until_full(self, mount_point, max_free_kilobytes):
-        """
-        Wait until |mount_point| has no more than |max_free_kilobytes| free.
-
-        @param mount_point The path at which the filesystem is mounted.
-        @param max_free_kilobytes Maximum free space permitted, in kilobytes.
-        @return True if the disk is full, else False
-        """
-        start_time = time.time()
-        while time.time() - start_time < self.FILL_TIMEOUT_SECONDS:
-            if (self.get_free_kilobytes(mount_point) <= max_free_kilobytes):
-                return True
-            else:
-                time.sleep(1)
-        return False
-
-
-    def run_once(self, client_addr):
-        """
-        Test main loop.
-
-        @param client_addr DUT hostname or IP address.
-        """
-        self._client = hosts.create_host(client_addr)
-        client_autotest = autotest.Autotest(self._client)
-
-        disk_filler_src = os.path.join(self.bindir, self.DISK_FILL_SCRIPT)
-        disk_filler_dst = os.path.join(self.CLIENT_TMP_DIR,
-                                       os.path.basename(self.DISK_FILL_SCRIPT))
-        self._client.send_file(disk_filler_src, disk_filler_dst)
-
-        disk_filler_command = '%s %s %d' % (
-            disk_filler_dst, self.STATEFUL_PATH, self.TEST_TIMEOUT_SECONDS)
-
-        with remote_command.Command(self._client, disk_filler_command) \
-                as disk_filler_process:
-            if not self.wait_until_full(self.STATEFUL_PATH, self.MAX_FREE_KB):
-                logging.debug(disk_filler_process.result)
-                raise error.TestFail(
-                    'did not fill %s within %d seconds' % (
-                        self.STATEFUL_PATH, self.FILL_TIMEOUT_SECONDS))
-
-            client_autotest.run_test('network_CheckCriticalProcesses',
-                                     tag='before_client_tests')
-            passed_with_failsafe = []
-
-            for name, kwargs in self.CLIENT_TEST_LIST:
-                # Autotest goes to /mnt/stateful_partition/dev_image,
-                # while /var is on /mnt/stateful_partition/encrypted.
-                #
-                # These are separate partitions, so we can copy
-                # the tests onto the DUT even when /var is full.
-                client_autotest.run_test(name, **kwargs)
-
-                if 'tag' in kwargs:
-                    full_test_name = '%s.%s' % (name, kwargs['tag'])
-                else:
-                    full_test_name = name
-
-                # To avoid leaving the system in a bad state, the disk
-                # filler times out eventually. This means a test can
-                # "pass" due to the failsafe. Check if the failsafe
-                # kicked in, by checking if the disk is still full.
-                if (self.get_free_kilobytes(self.STATEFUL_PATH) >
-                    self.MAX_FREE_KB):
-                    passed_with_failsafe.append(full_test_name)
-
-                client_autotest.run_test('network_CheckCriticalProcesses',
-                                         tag='after_%s' % full_test_name)
-
-            if len(passed_with_failsafe):
-                raise error.TestFail(
-                    '%d test(s) triggered the fail-safe: %s. '
-                    'They may be incorrectly listed as passing.' % (
-                        len(passed_with_failsafe),
-                        ', '.join(passed_with_failsafe)))
diff --git a/server/site_tests/network_EthCapsServer/control b/server/site_tests/network_EthCapsServer/control
deleted file mode 100644
index 55fbd38..0000000
--- a/server/site_tests/network_EthCapsServer/control
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Team"
-NAME = "network_EthCapsServer"
-PURPOSE = 'Verify that LAN devices have the required capabilities.'
-CRITERIA = """
-Test will pass if the following requirements are met, otherwise should fail:
-1. Has Wake-on-LAN magic packet capabilities and successfully wakes from
-   suspend.
-   - subtest should take <20 seconds to pass and <60seconds to fail
-"""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "server"
-
-DOC = """
-This test checks ethernet capabilities of a device.  See CRITERIA for more
-details.
-"""
-
-def run_ethcaps(machine):
-    job.run_test('network_EthCapsServer', client_ip=machine)
-
-job.parallel_simple(run_ethcaps, machines, timeout=60)
diff --git a/server/site_tests/network_EthCapsServer/network_EthCapsServer.py b/server/site_tests/network_EthCapsServer/network_EthCapsServer.py
deleted file mode 100644
index ad1cdcb..0000000
--- a/server/site_tests/network_EthCapsServer/network_EthCapsServer.py
+++ /dev/null
@@ -1,168 +0,0 @@
-# Copyright (c) 2009 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging, os, re, socket, subprocess, tempfile, threading, time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import autotest, hosts, test
-
-
-class WolWake(threading.Thread):
-    """Class to allow waking of DUT via Wake-on-LAN capabilities (WOL)."""
-
-
-    def __init__(self, hostname, mac_addr, sleep_secs):
-        """Constructor for waking DUT.
-
-        Args:
-          mac_addr: string of mac address tuple
-          sleep_secs: seconds to sleep prior to attempting WOL
-        """
-        threading.Thread.__init__(self)
-        self._hostname = hostname
-        self._mac_addr = mac_addr
-        self._sleep_secs = sleep_secs
-
-
-    # TODO(tbroch) Borrowed from class ServoTest.  Refactor for code re-use
-    def _ping_test(self, hostname, timeout=5):
-        """Verify whether a host responds to a ping.
-
-        Args:
-          hostname: Hostname to ping.
-          timeout: Time in seconds to wait for a response.
-
-        Returns: True if success False otherwise
-        """
-        with open(os.devnull, 'w') as fnull:
-            ping_good = False
-            elapsed_time = 0
-            while not ping_good and elapsed_time < timeout:
-                ping_good = subprocess.call(
-                    ['ping', '-c', '1', '-W', str(timeout), str(hostname)],
-                    stdout=fnull, stderr=fnull) == 0
-                time.sleep(1)
-                elapsed_time += 1
-            return ping_good
-
-
-    def _send_wol_magic_packet(self):
-        """Perform Wake-on-LAN magic wake.
-
-        WOL magic packet consists of:
-          0xff repeated for 6 bytes
-          <mac addr> repeated 16 times
-
-        Sent as a broadcast packet.
-        """
-        mac_tuple = self._mac_addr.split(':')
-        assert len(mac_tuple) == 6
-        magic = '\xff' * 6
-        submagic = ''.join("%c" % int(value, 16) for value in mac_tuple)
-        magic += submagic * 16
-        assert len(magic) == 102
-
-        sock=socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
-        sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
-        sock.sendto(magic, ('<broadcast>', 7))
-        sock.close()
-        logging.info("Wake thread sent WOL wakeup")
-
-
-    def run(self):
-        # ping device to make sure its network is off presumably from suspend
-        # not another malfunction.
-        ping_secs = 0
-        while self._ping_test(self._hostname, timeout=2) and \
-                ping_secs < self._sleep_secs:
-            time.sleep(1)
-            ping_secs += 1
-
-        self._send_wol_magic_packet()
-
-
-class network_EthCapsServer(test.test):
-    """test class"""
-    version = 1
-
-    def _parse_ifconfig(self, filename):
-        """Retrieve ifconfig information.
-
-        Raises
-          error.TestError if unable to parse mac address
-        """
-        self._mac_addr = None
-
-        fd = open(filename)
-        re_mac = re.compile(r'.*(HWaddr|ether)\s+(\S+:\S+:\S+:\S+:\S+:\S+).*')
-        for ln in fd.readlines():
-            logging.debug(ln)
-            mat = re.match(re_mac, ln)
-            if mat:
-                self._mac_addr = mat.group(2)
-                logging.info("mac addr = %s", self._mac_addr)
-                break
-        fd.close()
-
-        if not self._mac_addr:
-            raise error.TestError("Unable to find mac addresss")
-
-
-    def _client_cmd(self, cmd, results=None):
-        """Execute a command on the client.
-
-        Args:
-          results: string of filename to save results on client.
-
-        Returns:
-          string of filename on server side with stdout results of command
-        """
-        if results:
-            client_tmpdir = self._client.get_tmp_dir()
-            client_results = os.path.join(client_tmpdir, "%s" % results)
-            cmd = "%s > %s 2>&1" % (cmd, client_results)
-
-        logging.info("Client cmd = %s", cmd)
-        self._client.run(cmd)
-
-        if results:
-            server_tmpfile = tempfile.NamedTemporaryFile(delete=False)
-            server_tmpfile.close()
-            self._client.get_file(client_results, server_tmpfile.name)
-            return server_tmpfile.name
-
-        return None
-
-
-    def run_once(self, client_ip=None, ethname='eth0'):
-        """Run the test.
-
-        Args:
-          client_ip: string of client's ip address
-          ethname: string of ethernet device under test
-        """
-        if not client_ip:
-            raise error.TestError("Must provide client's IP address to test")
-
-        sleep_secs = 20
-
-        self._ethname = ethname
-        self._client_ip = client_ip
-        self._client = hosts.create_host(client_ip)
-        client_at = autotest.Autotest(self._client)
-
-        # retrieve ifconfig info for mac address of client
-        cmd = "ifconfig %s" % self._ethname
-        ifconfig_filename = self._client_cmd(cmd, results="ifconfig.log")
-        self._parse_ifconfig(ifconfig_filename)
-
-        # thread to wake the device using WOL
-        wol_wake = WolWake(self._client_ip, self._mac_addr, sleep_secs)
-        wol_wake.start()
-
-        # create and run client test to prepare and suspend device
-        client_at.run_test("network_EthCaps", ethname=ethname,
-                           threshold_secs=sleep_secs * 2)
-
-        wol_wake.join()
diff --git a/server/site_tests/network_FirewallHolePunchServer/control b/server/site_tests/network_FirewallHolePunchServer/control
deleted file mode 100644
index 552ea8d..0000000
--- a/server/site_tests/network_FirewallHolePunchServer/control
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "krisr"
-NAME = "network_FirewallHolePunchServer"
-PURPOSE = "Verify Chrome apps can open holes in the firewall"
-CRITERIA = """
-Test will pass if Chrome apps can open holes in the firewall, that those
-holes are closed when the app is reloaded or closed"""
-ATTRIBUTES = "suite:network_nightly"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "server"
-
-DOC = """
-This test checks network firewall manipulations.  See CRITERIA for more
-details
-"""
-
-from autotest_lib.server import utils
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_FirewallHolePunchServer', host=host)
-
-parallel_simple(run, machines)
-
diff --git a/server/site_tests/network_FirewallHolePunchServer/network_FirewallHolePunchServer.py b/server/site_tests/network_FirewallHolePunchServer/network_FirewallHolePunchServer.py
deleted file mode 100644
index 76a85c6..0000000
--- a/server/site_tests/network_FirewallHolePunchServer/network_FirewallHolePunchServer.py
+++ /dev/null
@@ -1,198 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import pprint
-import socket
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server.cros import stress
-from autotest_lib.server import autotest
-from autotest_lib.server import test
-
-_CLIENT_COMPLETE_FLAG = '/tmp/network_FirewallHolePunch'
-
-class network_FirewallHolePunchServer(test.test):
-    """Server test half of the FirewallHolePunch test."""
-    version = 1
-
-
-    def connect_to_dut(self):
-        """Attempts to connect to the DUT
-
-        @returns True if connection was successful; False otherwise.
-
-        """
-        clientsocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-        clientsocket.settimeout(5)
-
-        connected = False
-        try:
-            clientsocket.connect((self.hostname, self.port))
-            connected = True
-            logging.debug('Connected to client')
-        except socket.timeout:
-            logging.debug('Socket connection to DUT failed.')
-
-        return connected
-
-
-    def wait_for_client_test(self):
-        """Waits for the client test to complete it's task.
-
-        @returns True if the client responds to the request; False otherwise.
-
-        """
-
-        for i in range(30):
-            result = self.client.run('ls %s' %  _CLIENT_COMPLETE_FLAG,
-                                     ignore_status=True)
-            if result.exit_status == 0:
-                return True
-            time.sleep(1)
-        return False
-
-
-    def functional_test(self, test_error, test_fail, connected):
-        """Performs a functional testing of the firewall.
-
-        This performs a single test while coordinating with the client test.
-
-        @param test_error: string of the test error message
-        @param test_fail: string of the test fail message
-        @param connected: boolean test if the connection attempt should have
-                          passed or failed.
-
-        @raises: TestError if the client flag was not updated
-        @raises: TestFail if the connection expection is not met
-
-        """
-
-        self.client.run('rm %s' %  _CLIENT_COMPLETE_FLAG, ignore_status=True)
-        if self.wait_for_client_test() is False:
-            raise error.TestError(test_error)
-        if self.connect_to_dut() is connected:
-            raise error.TestFail(test_fail)
-
-
-    def perform_tests(self):
-        """Performs all of the tests in the script."""
-
-        for test in self.tests:
-            logging.debug('Performing...')
-            logging.debug(pprint.pprint(test))
-
-            self.functional_test(test['server_error'],
-                                 test['server_fail'],
-                                 test['server_connected'])
-
-
-    def run_once(self, host, port=8888):
-        """Run the test.
-
-        @param host: the host object
-        @param port: integer value for the port the client to listen on
-
-        """
-
-        # Strict ordering matters here.  If an invalid order is given
-        # below an exception will be thrown in the client test.
-        self.tests = [# Login, fail to connect
-            {'server_error': 'The client test did not login',
-             'server_fail' : 'Server was able to connect (login).',
-             'server_connected' : True,
-             'client_command' : 'login',
-             'client_error': 'Did not receive command to login (login)'
-            },
-            # Launch App, fail to connect
-            {'server_error': 'The client test did not launch the app',
-             'server_fail' : 'Server was able to connect (setup).',
-             'server_connected' : True,
-             'client_command' : 'launch app',
-             'client_error': 'Did not receive command to launch app (setup)'
-            },
-            # Start server, connect
-            {'server_error': 'The client test did not open the port. (1)',
-             'server_fail' : 'Server was unable to connect (1).',
-             'server_connected' : False,
-             'client_command' : 'start server',
-             'client_error': 'Did not receive command to start server (1)'
-            },
-            # Stop server, fail to connect
-            {'server_error' : 'The client test did not close the port',
-             'server_fail' : str('Server was able to connect to the port. (1) '
-                                '(It should not have been able to do so.)'),
-             'server_connected' : True,
-             'client_command' : 'stop server',
-             'client_error' : 'Did not receive command to stop server'
-            },
-            # Start server, connect
-            {'server_error' : 'The client test did not open the port. (2)',
-             'server_fail'  : 'Server was unable to connect (2).',
-             'server_connected'  : False,
-             'client_command' : 'start server',
-             'client_error' : 'Did not receive command to start server (2)'
-            },
-            # Quit app, fail to connect
-            {'server_error' : 'The client test did not close the app.',
-             'server_fail'  : str('Server was able to connect to the port (2). '
-                                '(It should not have been able to do so.)'),
-             'server_connected'  : True,
-             'client_command' : 'exit app',
-             'client_error' : 'Did not receive command to close app.'
-            },
-            # Telemetry cannot relaunch a closed extension; logout and back in.
-            # Logout, fail to connect
-            {'server_error' : 'The client test did not quit',
-             'server_fail' : str('Server was able to connect to the port (3). '
-                                '(It should not have been able to do so.)'),
-             'server_connected' : True,
-             'client_command' : 'logout',
-             'client_error': 'Did not receive command to exit.'
-            },
-            # Login, fail to connect
-            {'server_error': 'The client test did not login',
-             'server_fail' : 'Server was able to connect (login).',
-             'server_connected' : True,
-             'client_command' : 'login',
-             'client_error': 'Did not receive command to login (login)'
-            },
-            # Launch app, fail to connect
-            {'server_error': 'The client test did not launch the app',
-             'server_fail' : 'Server was able to connect (setup2).',
-             'server_connected' : True,
-             'client_command' : 'launch app',
-             'client_error': 'Did not receive command to launch app (setup2)'
-            },
-            # Start server, connect
-            {'server_error': 'The client test did not open the port. (1)',
-             'server_fail' : 'Server was unable to connect (1).',
-             'server_connected' : False,
-             'client_command' : 'start server',
-             'client_error': 'Did not receive command to start server (1)'
-            },
-            # Logout, fail to connect
-            {'server_error' : 'The client test did not quit',
-             'server_fail' : str('Server was able to connect to the port (3). '
-                                '(It should not have been able to do so.)'),
-             'server_connected' : True,
-             'client_command' : 'logout',
-             'client_error': 'Did not receive command to exit.'
-            }
-            ]
-
-        self.client = host
-        self.hostname = self.client.hostname
-        self.port = port
-        client_at = autotest.Autotest(self.client)
-
-        self.client.run('rm %s' %  _CLIENT_COMPLETE_FLAG, ignore_status=True)
-
-        stressor = stress.CountedStressor(self.perform_tests)
-        stressor.start(1)
-        client_at.run_test('network_FirewallHolePunch',
-                           test_sequence=self.tests,
-                           port=self.port)
-
diff --git a/server/site_tests/network_StressServoEthernetPlug/control b/server/site_tests/network_StressServoEthernetPlug/control
deleted file mode 100644
index 701a63d..0000000
--- a/server/site_tests/network_StressServoEthernetPlug/control
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-#
-# Test expects to be run on a jailbroken device in developer mode.
-
-from autotest_lib.server import utils
-
-AUTHOR = "Chrome OS Team"
-NAME = "network_StressServoEthernetPlug"
-PURPOSE = "Servo based ChromeOS functional tests."
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "network"
-TEST_TYPE = "server"
-
-DOC = """
-This test uses servo to repeatedly plug and unplug the ethernet device,
-then validate appropriate behavior.
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-num_iterations = int(args_dict.get('num_iterations', 10000))
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test("network_StressServoEthernetPlug", host=host,
-                 disable_sysinfo=True, num_iterations=num_iterations)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_StressServoEthernetPlug/network_StressServoEthernetPlug.py b/server/site_tests/network_StressServoEthernetPlug/network_StressServoEthernetPlug.py
deleted file mode 100644
index 6588c1e..0000000
--- a/server/site_tests/network_StressServoEthernetPlug/network_StressServoEthernetPlug.py
+++ /dev/null
@@ -1,120 +0,0 @@
-import logging, re, time
-
-from autotest_lib.server import autotest, test, hosts
-from autotest_lib.server.cros import stress
-from autotest_lib.client.common_lib import error
-
-# Timeout duration to wait for a DHCP response.
-# It usually doesn't take this long, but just in case.
-TIMEOUT = 60
-
-class network_StressServoEthernetPlug(test.test):
-
-    ETH_MAC = 'mac'
-    ETH_IP = 'ipaddress'
-
-    version = 1
-
-
-    def initialize(self, host):
-        self.host = host
-        self.host_iface = None
-        self.servo_iface = None
-        self.servo_eth_up()
-
-        end_time = time.time() + TIMEOUT
-        while time.time() < end_time:
-            self.eth_interfaces = self.get_eth_interfaces()
-            if len(self.eth_interfaces) >= 2:
-                break
-            time.sleep(1)
-
-        # Assuming 2 ethernet interfaces, the interface not for host
-        # is that associated with servo.
-        for iface, eth_dict in self.eth_interfaces.iteritems():
-            if eth_dict[self.ETH_IP] == self.host.hostname:
-                self.host_iface = iface
-            else:
-                self.servo_iface = iface
-
-        if not self.servo_iface:
-            raise error.TestError('Cannot find servo ethernet interface')
-
-        logging.info('Servo eth: %s', self.servo_iface)
-        logging.info('Host eth: %s', self.host_iface)
-
-
-    def servo_eth_up(self):
-        logging.info('Bringing up ethernet')
-        self.host.servo.set('dut_hub_on', 'yes')
-
-
-    def servo_eth_down(self):
-        logging.info('Bringing down ethernet')
-        self.host.servo.set('dut_hub_on', 'no')
-
-
-    def get_eth_interfaces(self):
-        """ Gets the ethernet device object.
-
-        Returns:
-            A dictionary of ethernet devices.
-            {
-                'eth<x>':
-                {
-                    'mac': <mac address>,
-                    'ipaddress': <ipaddress>,
-                }
-            }
-        """
-        results = self.host.run('ifconfig').stdout.split('\n')
-        eth_dict = {}
-
-        iterator = results.__iter__()
-        for line in iterator:
-            # Search for the beginning of an interface section.
-            iface_start = re.search('^(eth\S+)\s+Link encap:Ethernet\s+HWaddr'
-                                    '\s+(\S+)', line)
-            if iface_start:
-                (iface, hwaddr) = iface_start.groups()
-                line = iterator.next()
-                result = re.search('^\s+inet addr:(\S+)\s+', line)
-                ipaddress = None
-                if result:
-                    ipaddress = result.groups()[0]
-                eth_dict[iface] = {self.ETH_MAC: hwaddr, self.ETH_IP: ipaddress}
-        return eth_dict
-
-
-    def verify_eth_status(self, up_list, timeout=TIMEOUT):
-        """ Verify the up_list ifaces are up (and its contrapositive). """
-        end_time = time.time() + timeout
-        interfaces = {}
-        while time.time() < end_time:
-            interfaces = self.get_eth_interfaces()
-            error_message = ('Expected eth status %s but instead got %s' %
-                             (up_list, interfaces.keys()))
-            if set(interfaces.keys()) == set(up_list):
-                # Check to make sure all the interfaces are up.
-                for iface, eth_dict in interfaces.iteritems():
-                    if not eth_dict[self.ETH_IP]:
-                        error_message = ('Ethernet interface %s did not '
-                                         'receive address.' % iface)
-                        break
-                else:
-                    # All desired interfaces are up, and they have ip addresses.
-                    break
-            time.sleep(1)
-        else:
-            # If the while loop terminates without interruption, we've timed out
-            # waiting for the interface.
-            raise error.TestFail(error_message)
-
-
-    def run_once(self, num_iterations=1):
-        for iteration in range(num_iterations):
-            logging.info('Executing iteration %d', iteration)
-            self.servo_eth_down()
-            self.verify_eth_status([self.host_iface])
-            self.servo_eth_up()
-            self.verify_eth_status([self.host_iface, self.servo_iface])
diff --git a/server/site_tests/network_WiFi_APSupportedRates/control.11ac b/server/site_tests/network_WiFi_APSupportedRates/control.11ac
deleted file mode 100644
index 8045635..0000000
--- a/server/site_tests/network_WiFi_APSupportedRates/control.11ac
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'briannorris'
-NAME = 'network_WiFi_APSupportedRates.11ac'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-# TODO(crbug.com/953702): move to wifi_matfunc once stable.
-ATTRIBUTES = ('suite:wifi_flaky')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-Verify that we respond sanely to APs that disable certain legacy bitrates.
-"""
-
-from autotest_lib.server.cros.network import hostap_config
-
-def run(machine):
-    ap_config = hostap_config.HostapConfig(
-        channel=157, mode=hostap_config.HostapConfig.MODE_11AC_MIXED,
-        vht_center_channel=155,
-        vht_channel_width=hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_80,
-        supported_rates=[36, 48, 54],
-        basic_rates=[36],
-        n_capabilities=[hostap_config.HostapConfig.N_CAPABILITY_HT40_PLUS])
-
-    job.run_test('network_WiFi_APSupportedRates',
-                 host=hosts.create_host(machine),
-                 raw_cmdline_args=args,
-                 additional_params=ap_config)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_APSupportedRates/control.11g b/server/site_tests/network_WiFi_APSupportedRates/control.11g
deleted file mode 100644
index 494195b..0000000
--- a/server/site_tests/network_WiFi_APSupportedRates/control.11g
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'briannorris'
-NAME = 'network_WiFi_APSupportedRates.11g'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-# TODO(crbug.com/953702): move to wifi_matfunc once stable.
-ATTRIBUTES = ('suite:wifi_flaky')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-Verify that we respond sanely to APs that disable certain legacy bitrates.
-"""
-
-from autotest_lib.server.cros.network import hostap_config
-
-def run(machine):
-    ap_config = hostap_config.HostapConfig(channel=6,
-        mode=hostap_config.HostapConfig.MODE_11G,
-        supported_rates=[24, 36, 48, 54],
-        basic_rates=[24])
-
-    job.run_test('network_WiFi_APSupportedRates',
-                 host=hosts.create_host(machine),
-                 raw_cmdline_args=args,
-                 additional_params=ap_config)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_APSupportedRates/network_WiFi_APSupportedRates.py b/server/site_tests/network_WiFi_APSupportedRates/network_WiFi_APSupportedRates.py
deleted file mode 100644
index ac45406..0000000
--- a/server/site_tests/network_WiFi_APSupportedRates/network_WiFi_APSupportedRates.py
+++ /dev/null
@@ -1,110 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import tcpdump_analyzer
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class network_WiFi_APSupportedRates(wifi_cell_test_base.WiFiCellTestBase):
-    """Test that the WiFi chip honors the SupportedRates IEs sent by the AP."""
-    version = 1
-
-    def check_bitrates_in_capture(self, pcap_result, supported_rates):
-        """
-        Check that bitrates look like we expect in a packet capture.
-
-        The DUT should not send packets at bitrates that were disabled by the
-        AP.
-
-        @param pcap_result: RemoteCaptureResult tuple.
-        @param supported_rates: List of upported legacy bitrates (Mbps).
-
-        """
-        # Filter notes:
-        # (a) Some chips use self-addressed frames to tune channel performance.
-        #     They don't carry host-generated traffic, so filter them out.
-        # (b) Use TA (not SA), because multicast may retransmit our
-        #     "Source-Addressed" frames at rates we don't control.
-        # (c) BSSID filter: non-BSSID frames include Ack and BlockAck frames;
-        #     these rates tend to match the frames to which they're responding
-        #     (i.e., not under DUT's control).
-        # (d) QoS null filter: these frames are short (no data payload), and
-        #     it's more important that they be reliable (e.g., for PS
-        #     transitions) than fast. See b/132825853#comment40,
-        #     for example.
-        # Items (b) and (c) wouldn't be much problem if our APs actually
-        # respected the Supported Rates IEs that we're configuring, but current
-        # (2019-06-28) test AP builds do not appear to.
-        frame_filter = ('wlan.ta==%s and (not wlan.da==%s) and wlan.bssid==%s'
-                        ' and wlan.fc.type_subtype!=%s' %
-                        (self.context.client.wifi_mac,
-                         self.context.client.wifi_mac,
-                         self.context.router.get_hostapd_mac(0),
-                         tcpdump_analyzer.WLAN_QOS_NULL_TYPE))
-        frames = tcpdump_analyzer.get_frames(pcap_result.local_pcap_path,
-                                             frame_filter, reject_bad_fcs=False)
-        if not frames:
-            raise error.TestError('Failed to capture any relevant frames')
-
-        # Some frames don't have bitrate fields -- for example, if they are
-        # using MCS rates (not legacy rates). For MCS rates, that's OK, since
-        # that satisfies this test requirement (not using "unsupported legacy
-        # rates"). So ignore them.
-        bad_frames = [f for f in frames
-                      if f.bit_rate is not None and
-                         f.bit_rate not in supported_rates]
-        if bad_frames:
-            # Remove duplicates.
-            bad_rates = list(set(f.bit_rate for f in bad_frames))
-            logging.error('Unexpected rate for frames:')
-            for f in bad_frames:
-                logging.error('%s', f)
-            raise error.TestFail('Saw frames at rates %r (expected %r).' %
-                                 (bad_rates, supported_rates))
-
-    def parse_additional_arguments(self, commandline_args, additional_params):
-        """Hook into super class to take control files parameters.
-
-        @param commandline_args dict of parsed parameters from the autotest.
-        @param additional_params HostapConfig object.
-
-        """
-        self._ap_config = additional_params
-
-    def run_once(self):
-        """Verify that we respond sanely to APs that disable legacy bitrates.
-        """
-        # See b/138406224. ath10k only supports this on CrOS kernels >=4.14.
-        if self.context.client.host.get_board().split(':')[1] == 'scarlet':
-            raise error.TestNAError('Scarlet does not support this feature on '
-                                    'kernel 4.4')
-
-        ap_config = self._ap_config
-
-        # TODO(b/160281713): remove this if we cherry-pick the bitrate fixes
-        # and deploy an OS update.
-        if (ap_config.frequency < 5000 and
-            self.context.capture_host.board == 'whirlwind'):
-            raise error.TestNAError('Whirlwind does not capture 802.11g/OFDM '
-                                    'rates correctly (b/160281713)')
-
-        self.context.configure(ap_config)
-        self.context.capture_host.start_capture(
-                ap_config.frequency,
-                width_type=ap_config.packet_capture_mode)
-        assoc_params = xmlrpc_datatypes.AssociationParameters(
-                ssid=self.context.router.get_ssid())
-        self.context.assert_connect_wifi(assoc_params)
-        self.context.assert_ping_from_dut()
-        results = self.context.capture_host.stop_capture()
-        if len(results) != 1:
-            raise error.TestError('Expected to generate one packet '
-                                  'capture but got %d captures instead.' %
-                                  len(results))
-        self.check_bitrates_in_capture(results[0],
-                                       ap_config.supported_rates)
diff --git a/server/site_tests/network_WiFi_AssocConfigPerformance/control b/server/site_tests/network_WiFi_AssocConfigPerformance/control
index abbf21e..ee38dbf 100644
--- a/server/site_tests/network_WiFi_AssocConfigPerformance/control
+++ b/server/site_tests/network_WiFi_AssocConfigPerformance/control
@@ -10,6 +10,7 @@
 # metrics, and move this back to suite:wifi_perf.
 ATTRIBUTES = ('suite:wifi_flaky')
 DEPENDENCIES = 'wificell'
+PY_VERSION = 3
 
 DOC = """
 Tests the time taken to authenticate, associate, and get an IP.
diff --git a/server/site_tests/network_WiFi_AssocConfigPerformance/network_WiFi_AssocConfigPerformance.py b/server/site_tests/network_WiFi_AssocConfigPerformance/network_WiFi_AssocConfigPerformance.py
index c2db5ca..9933803 100644
--- a/server/site_tests/network_WiFi_AssocConfigPerformance/network_WiFi_AssocConfigPerformance.py
+++ b/server/site_tests/network_WiFi_AssocConfigPerformance/network_WiFi_AssocConfigPerformance.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -170,5 +171,3 @@
                     descript=prefix + '_total',
                     perf_times=total_times,
                     graph_description=graph_descript)
-
-
diff --git a/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch001 b/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch001
index 95e60d4..2ee88b1 100644
--- a/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch001
+++ b/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch001
@@ -8,6 +8,7 @@
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
+PY_VERSION = 3
 
 DOC = """
 This test uses netperf to measure the maximal receiving and transmitting
diff --git a/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch006 b/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch006
index 6001511..d2f3324 100644
--- a/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch006
+++ b/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch006
@@ -1,13 +1,14 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = 'wiley, pstew, quiche'
+AUTHOR = 'arowa'
 NAME = 'network_WiFi_AttenuatedPerf.ht40_ch006'
 ATTRIBUTES = "suite:wifi_atten_perf"
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
+PY_VERSION = 3
 
 DOC = """
 This test uses netperf to measure the maximal receiving and transmitting
diff --git a/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch011 b/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch011
deleted file mode 100644
index 8e673d9..0000000
--- a/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch011
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_AttenuatedPerf.ht40_ch011'
-ATTRIBUTES = "suite:wifi_atten_perf"
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test uses netperf to measure the maximal receiving and transmitting
-throughput on a DUT with an open HT40 802.11n network across multiple
-attenuation levels.
-"""
-
-
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    ap_config = hostap_config.HostapConfig(
-            channel=11,
-            n_capabilities=[hostap_config.HostapConfig.N_CAPABILITY_HT40_MINUS],
-            mode=hostap_config.HostapConfig.MODE_11N_PURE)
-    attenuation_increment = 4
-    final_attenuation = 100
-    job.run_test('network_WiFi_AttenuatedPerf',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=(ap_config, attenuation_increment, final_attenuation))
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch044 b/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch044
index 243834a..d98545b 100644
--- a/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch044
+++ b/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch044
@@ -8,6 +8,7 @@
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
+PY_VERSION = 3
 
 DOC = """
 This test uses netperf to measure the maximal receiving and transmitting
diff --git a/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch153 b/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch153
deleted file mode 100644
index bd389fc..0000000
--- a/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch153
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_AttenuatedPerf.ht40_ch153'
-ATTRIBUTES = "suite:wifi_atten_perf"
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test uses netperf to measure the maximal receiving and transmitting
-throughput on a DUT with an open HT40 802.11n network across multiple
-attenuation levels.
-"""
-
-
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    ap_config = hostap_config.HostapConfig(
-            channel=153,
-            n_capabilities=[hostap_config.HostapConfig.N_CAPABILITY_HT40_MINUS],
-            mode=hostap_config.HostapConfig.MODE_11N_PURE)
-    attenuation_increment = 4
-    final_attenuation = 100
-    job.run_test('network_WiFi_AttenuatedPerf',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=(ap_config, attenuation_increment, final_attenuation))
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_AttenuatedPerf/control.vht40_ch036 b/server/site_tests/network_WiFi_AttenuatedPerf/control.vht40_ch036
index 6cdb8e5..8ac6bb3 100644
--- a/server/site_tests/network_WiFi_AttenuatedPerf/control.vht40_ch036
+++ b/server/site_tests/network_WiFi_AttenuatedPerf/control.vht40_ch036
@@ -8,6 +8,7 @@
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
+PY_VERSION = 3
 
 DOC = """
 This test uses netperf to measure the maximal receiving and transmitting
@@ -25,6 +26,7 @@
             channel=36,
             mode=hostap_config.HostapConfig.MODE_11AC_PURE,
             n_capabilities=[hostap_config.HostapConfig.N_CAPABILITY_HT40],
+            ac_capabilities=[hostap_config.HostapConfig.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7],
             vht_channel_width=hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_40)
     attenuation_increment = 6
     final_attenuation = 100
diff --git a/server/site_tests/network_WiFi_AttenuatedPerf/control.vht40_ch060 b/server/site_tests/network_WiFi_AttenuatedPerf/control.vht40_ch060
deleted file mode 100644
index f7e3ec4..0000000
--- a/server/site_tests/network_WiFi_AttenuatedPerf/control.vht40_ch060
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_AttenuatedPerf.vht40_ch060'
-ATTRIBUTES = "suite:wifi_atten_perf"
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test uses netperf to measure the maximal receiving and transmitting
-throughput on a DUT with an open VHT40 802.11ac network across multiple
-attenuation levels.
-"""
-
-
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    ap_config = hostap_config.HostapConfig(
-            channel=60,
-            mode=hostap_config.HostapConfig.MODE_11AC_PURE,
-            n_capabilities=[hostap_config.HostapConfig.N_CAPABILITY_HT40],
-            vht_channel_width=hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_40)
-    attenuation_increment = 6
-    final_attenuation = 100
-    job.run_test('network_WiFi_AttenuatedPerf',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=(ap_config, attenuation_increment, final_attenuation))
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_AttenuatedPerf/control.vht40_ch149 b/server/site_tests/network_WiFi_AttenuatedPerf/control.vht40_ch149
index ee874d2..1f52d9e 100644
--- a/server/site_tests/network_WiFi_AttenuatedPerf/control.vht40_ch149
+++ b/server/site_tests/network_WiFi_AttenuatedPerf/control.vht40_ch149
@@ -8,6 +8,7 @@
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
+PY_VERSION = 3
 
 DOC = """
 This test uses netperf to measure the maximal receiving and transmitting
@@ -25,6 +26,7 @@
             channel=149,
             mode=hostap_config.HostapConfig.MODE_11AC_PURE,
             n_capabilities=[hostap_config.HostapConfig.N_CAPABILITY_HT40],
+            ac_capabilities=[hostap_config.HostapConfig.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7],
             vht_channel_width=hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_40)
     attenuation_increment = 6
     final_attenuation = 100
diff --git a/server/site_tests/network_WiFi_AttenuatedPerf/control.vht40_ch157 b/server/site_tests/network_WiFi_AttenuatedPerf/control.vht40_ch157
deleted file mode 100644
index e45afcd..0000000
--- a/server/site_tests/network_WiFi_AttenuatedPerf/control.vht40_ch157
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_AttenuatedPerf.vht40_ch157'
-ATTRIBUTES = "suite:wifi_atten_perf"
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test uses netperf to measure the maximal receiving and transmitting
-throughput on a DUT with an open VHT40 802.11ac network across multiple
-attenuation levels.
-"""
-
-
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    ap_config = hostap_config.HostapConfig(
-            channel=157,
-            mode=hostap_config.HostapConfig.MODE_11AC_PURE,
-            n_capabilities=[hostap_config.HostapConfig.N_CAPABILITY_HT40],
-            vht_channel_width=hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_40)
-    attenuation_increment = 6
-    final_attenuation = 100
-    job.run_test('network_WiFi_AttenuatedPerf',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=(ap_config, attenuation_increment, final_attenuation))
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_AttenuatedPerf/control.vht80_ch042 b/server/site_tests/network_WiFi_AttenuatedPerf/control.vht80_ch042
index 42ff259..ee3ef02 100644
--- a/server/site_tests/network_WiFi_AttenuatedPerf/control.vht80_ch042
+++ b/server/site_tests/network_WiFi_AttenuatedPerf/control.vht80_ch042
@@ -8,6 +8,7 @@
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
+PY_VERSION = 3
 
 DOC = """
 This test uses netperf to measure the maximal receiving and transmitting
@@ -22,7 +23,8 @@
 def run(machine):
     host = hosts.create_host(machine)
     n_caps = [hostap_config.HostapConfig.N_CAPABILITY_HT40_PLUS]
-    ac_caps = [hostap_config.HostapConfig.AC_CAPABILITY_SHORT_GI_80]
+    ac_caps = [hostap_config.HostapConfig.AC_CAPABILITY_SHORT_GI_80,
+               hostap_config.HostapConfig.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7]
     ac_mode = hostap_config.HostapConfig.MODE_11AC_PURE
     channel_width_80_mhz = hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_80
     ap_config = hostap_config.HostapConfig(
diff --git a/server/site_tests/network_WiFi_AttenuatedPerf/control.vht80_ch155 b/server/site_tests/network_WiFi_AttenuatedPerf/control.vht80_ch155
index c3c465a..110dc46 100644
--- a/server/site_tests/network_WiFi_AttenuatedPerf/control.vht80_ch155
+++ b/server/site_tests/network_WiFi_AttenuatedPerf/control.vht80_ch155
@@ -8,6 +8,7 @@
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
+PY_VERSION = 3
 
 DOC = """
 This test uses netperf to measure the maximal receiving and transmitting
@@ -22,7 +23,8 @@
 def run(machine):
     host = hosts.create_host(machine)
     n_caps = [hostap_config.HostapConfig.N_CAPABILITY_HT40_PLUS]
-    ac_caps = [hostap_config.HostapConfig.AC_CAPABILITY_SHORT_GI_80]
+    ac_caps = [hostap_config.HostapConfig.AC_CAPABILITY_SHORT_GI_80,
+               hostap_config.HostapConfig.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7]
     ac_mode = hostap_config.HostapConfig.MODE_11AC_PURE
     channel_width_80_mhz = hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_80
     ap_config = hostap_config.HostapConfig(
diff --git a/server/site_tests/network_WiFi_AttenuatedPerf/network_WiFi_AttenuatedPerf.py b/server/site_tests/network_WiFi_AttenuatedPerf/network_WiFi_AttenuatedPerf.py
index 1bfbd6f..db0c207 100644
--- a/server/site_tests/network_WiFi_AttenuatedPerf/network_WiFi_AttenuatedPerf.py
+++ b/server/site_tests/network_WiFi_AttenuatedPerf/network_WiFi_AttenuatedPerf.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -87,7 +88,7 @@
                     atten, self._ap_config.frequency)
             logging.info('RvR test: current attenuation = %d dB', atten)
 
-            # Give this attenuation level a quick sanity test. If we can't stay
+            # Give this attenuation level a quick check. If we can't stay
             # associated and handle a few pings, we probably won't get
             # meaningful results out of netperf.
             try:
diff --git a/server/site_tests/network_WiFi_BSSTMReq/control b/server/site_tests/network_WiFi_BSSTMReq/control
deleted file mode 100644
index 6fa2d80..0000000
--- a/server/site_tests/network_WiFi_BSSTMReq/control
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'matthewmwang'
-TIME = 'SHORT'
-NAME = 'network_WiFi_BSSTMReq'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release,')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-WiFi_BSSTMReq test configures two APs with the same ssid and runs the
-network_WiFi_BSSTMReq test which uses these APs to test a BSS Transition
-Management Request.
-"""
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_BSSTMReq',
-                 host=host,
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_BSSTMReq/network_WiFi_BSSTMReq.py b/server/site_tests/network_WiFi_BSSTMReq/network_WiFi_BSSTMReq.py
deleted file mode 100644
index a49c868..0000000
--- a/server/site_tests/network_WiFi_BSSTMReq/network_WiFi_BSSTMReq.py
+++ /dev/null
@@ -1,116 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-class network_WiFi_BSSTMReq(wifi_cell_test_base.WiFiCellTestBase):
-    """Tests a BSS Transition Management Request sent from the AP
-
-    This test seeks to associate the DUT with an AP with a set of
-    association parameters, create a second AP with a second set of
-    parameters but the same SSID, and send a BSS Transition Management Request
-    to the client. After that, the client will send a BSS Transition Management
-    Response back to the first AP. We seek to observe that the DUT successfully
-    connects to the second AP in a reasonable amount of time.
-    """
-
-    version = 1
-    TIMEOUT_SECONDS = 15
-
-    def _run_test(self, wait_for_scan=True):
-        """Send BSS TM Requests and verify the behaviour.
-
-        Setup an AP and make the DUT connect to that AP. Setup a second AP with
-        the same SSID. Then send a BSS TM Request from the first AP to the
-        second AP and verify that the DUT connects to te second AP.
-
-        Args:
-            wait_for_scan: when True, trigger a scan on the DUT and ensure that
-                it has seen AP#2 before AP#1 sends a BSS TM Request.
-        """
-        router0_conf = hostap_config.HostapConfig(channel=1)
-        router1_conf = hostap_config.HostapConfig(channel=48,
-                mode=hostap_config.HostapConfig.MODE_11A)
-        client_conf = xmlrpc_datatypes.AssociationParameters()
-
-        # Capture the detection and connection to the second AP.
-        self.context.capture_host.start_capture(router1_conf.frequency)
-
-        # Configure the initial AP.
-        self.context.configure(router0_conf)
-        router_ssid = self.context.router.get_ssid()
-
-        # Connect to the initial AP.
-        client_conf.ssid = router_ssid
-        self.context.assert_connect_wifi(client_conf)
-
-        # Setup a second AP with the same SSID.
-        router1_conf.ssid = router_ssid
-        self.context.configure(router1_conf, multi_interface=True)
-
-        # BSSID of the second AP. The DUT will have to roam to that second AP.
-        bssid_roam = self.context.router.get_hostapd_mac(1)
-
-        # Flush all scanned BSS from supplicant. Otherwise the DUT may have
-        # noticed them in a scan before, which would change the behaviour of the
-        # test.
-        self.context.client.flush_bss(age=0)
-
-        if wait_for_scan:
-            # Wait for DUT to see the second AP.
-            logging.info('Scanning to find BSS "%s"', bssid_roam)
-            self.context.client.wait_for_bss(bssid_roam)
-
-        # Send BSS Transition Management Request to client.
-        reply = self.context.router.send_bss_tm_req(
-            self.context.client.wifi_mac,
-            [bssid_roam])
-        if reply == 'OK':
-            pass
-        elif reply.startswith('Unknown command'):
-            raise error.TestNAError('AP does not support BSS Transition '
-                                    'Management')
-        else:
-            raise error.TestFail('Failed to send BSS TM Request: %s' % reply)
-
-        # Expect that the DUT will re-connect to the new AP.
-        # In some cases, the DUT may have roamed to the second AP on its own,
-        # without waiting for the BSS TM Request. In those particular runs, the
-        # test would not test the BSS TM feature, but the result would be
-        # identical. And in most runs the roam will be caused by the BSS TM
-        # Request anyway, so it's still a valid way to test the feature.
-        # TODO: Enforce correct roaming behaviour when porting the test to Tast.
-        if not self.context.client.wait_for_roam(
-                bssid_roam, timeout_seconds=self.TIMEOUT_SECONDS):
-            raise error.TestFail('Failed to roam after%s scanning.' %
-                    ('' if wait_for_scan else ' not'))
-
-        # Tear down.
-        self.context.client.shill.disconnect(router_ssid)
-        self.context.router.deconfig_aps()
-        self.context.capture_host.stop_capture()
-
-    def run_once(self):
-        """Test body."""
-        # Before sending the BSS TM Request, run a scan and make sure the DUT
-        # has seen the second AP. In that case, the DUT will typically re-use
-        # the result of the scan when receiving the request instead of probing
-        # the second AP.
-        self._run_test(wait_for_scan=True)
-
-        # After setting up both APs, immediately send the BSS TM Request before
-        # the DUT has scanned and noticed the second AP (at least in the
-        # majority of test runs). Instead of relying on the result of a previous
-        # scan, the DUT will probe for the second AP when receiving the
-        # transition request.
-        self._run_test(wait_for_scan=False)
-
-    def cleanup(self):
-        """Cleanup function."""
-        super(network_WiFi_BSSTMReq, self).cleanup()
diff --git a/server/site_tests/network_WiFi_BT_AntennaCoex/control b/server/site_tests/network_WiFi_BT_AntennaCoex/control
deleted file mode 100644
index 8dc2b9a..0000000
--- a/server/site_tests/network_WiFi_BT_AntennaCoex/control
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'kirtika'
-NAME = 'network_WiFi_BT_AntennaCoex'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc')
-DEPENDENCIES = 'wificell, bluetooth'
-
-DOC = """
-WiFi and BT share an antenna on most wifi modules. This test tests a sequence
-of turning off/on wifi mixed with turning off/on BT to check that at the end
-of the sequence, both wifi and BT are functional. This protects against
-firmware bugs in one subsystem that won't release the antenna and cause
-a deadlock state for the other subsystem. See b/79233533.
-"""
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_BT_AntennaCoex', host=host,
-                 raw_cmdline_args=args)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_BT_AntennaCoex/network_WiFi_BT_AntennaCoex.py b/server/site_tests/network_WiFi_BT_AntennaCoex/network_WiFi_BT_AntennaCoex.py
deleted file mode 100644
index e98136a..0000000
--- a/server/site_tests/network_WiFi_BT_AntennaCoex/network_WiFi_BT_AntennaCoex.py
+++ /dev/null
@@ -1,150 +0,0 @@
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.bluetooth import bluetooth_adapter_tests
-from autotest_lib.server.cros.multimedia import bluetooth_facade_adapter
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_test_context_manager
-
-class network_WiFi_BT_AntennaCoex(
-        bluetooth_adapter_tests.BluetoothAdapterTests):
-    """Test various sequences that mix wifi off/on, BT off/on
-    and ensure that wifi/BT functionality is intact during
-    and after these sequences.
-
-    """
-
-    # Sequence being tested:
-    # 1. wifi goes down
-    #    - verify bt discovery works
-    # 2. bt goes down
-    # 3. wifi comes up (test scan and connect)
-    #    - b/79233533 will cause step 3 to fail.
-    # 4. bt comes up (verify bt discovery again)
-    def _test_bringup_wifi_with_bt_off(self):
-        # Turn off wifi
-        client = self.wifi_context.client
-        client.set_device_enabled(client.wifi_if, False)
-
-        # Check that BT is up
-        self.test_power_on_adapter()
-        self.test_bluetoothd_running()
-
-        # Run a BT scan.
-        self.test_start_discovery()
-
-        utils.poll_for_condition(self.bluetooth_facade.is_discovering,
-                                 timeout=5,
-                                 sleep_interval=1,
-                                 desc="Checking BT scan in progress")
-        self.test_stop_discovery()
-
-        # Turn off BT
-        self.test_power_off_adapter()
-        self.test_bluetoothd_running()
-
-        # Turn wifi back on
-        client.set_device_enabled(client.wifi_if, True)
-
-        # Expect that the DUT will scan, find the AP again.
-        client.wait_for_bsses(self.ssid, 1)
-        self.wifi_context.wait_for_connection(self.ssid)
-
-        # Bring BT back up
-        self.test_power_on_adapter()
-        self.test_bluetoothd_running()
-
-    # Sequence being tested:
-    # 1. bt goes down
-    #    - verify wifi scanning works
-    # 2. wifi goes down
-    # 3. bt comes up (test discovery works)
-    # 4. wifi comes up (verify by connecting to AP)
-    def _test_bringup_bt_with_wifi_off(self):
-        # Turn off BT
-        self.test_power_off_adapter()
-        self.test_bluetoothd_running()
-
-        # Test that we can scan on wifi
-        client = self.wifi_context.client
-        client.wait_for_bsses(self.ssid, 1, timeout_seconds=10)
-
-        # Turn off wifi
-        client = self.wifi_context.client
-        client.set_device_enabled(client.wifi_if, False)
-
-        # Turn on BT
-        self.test_power_on_adapter()
-        self.test_bluetoothd_running()
-
-        # Run a BT scan.
-        self.test_start_discovery()
-
-        utils.poll_for_condition(self.bluetooth_facade.is_discovering,
-                                 timeout=5,
-                                 sleep_interval=1,
-                                 desc="Checking BT scan in progress")
-        self.test_stop_discovery()
-
-        # Turn wifi back on
-        client.set_device_enabled(client.wifi_if, True)
-
-        # Expect that the DUT will scan, find the AP again.
-        client.wait_for_bsses(self.ssid, 1)
-        self.wifi_context.wait_for_connection(self.ssid)
-
-    def warmup(self, host, raw_cmdline_args):
-        """Stashes away parameters for use by run_once().
-
-        @param host Host object representing the client DUT.
-        @param raw_cmdline_args Raw input from autotest.
-        @param additional_params One item from CONFIGS in control file.
-
-        """
-        # pylint: disable=attribute-defined-outside-init
-        self.host = host
-        self.cmdline_args = utils.args_to_dict(raw_cmdline_args)
-
-        # Initialize BT.
-        self.ble_adapter = \
-                bluetooth_facade_adapter.BluetoothFacadeRemoteAdapter
-        self.bluetooth_facade = self.ble_adapter(self.host, None)
-
-        # Initialize wifi
-        self.wifi_context = wifi_test_context_manager.WiFiTestContextManager(
-                self.__class__.__name__, self.host, self.cmdline_args,
-                self.debugdir)
-        self.wifi_context.setup()
-
-    def run_once(self):
-        """Run a series of WiFi on/off, BT on/off sequences."""
-
-        # Try the co-ex tests on both 2.4 and 5 GHz bands.
-        # Run 5 GHz first so that if the test throws an
-        # exception in the flow with 2.4 GHz and terminates abruptly,
-        # we have the 5 GHz data-point to compare against. WiFi is
-        # alone on 5 GHz, 2.4 GHz is the band with wifi-bt co-ex.
-        frequencies = [5180, 5745, 2412, 2462]
-
-        for freq in frequencies:
-            mode_n = hostap_config.HostapConfig.MODE_11N_PURE
-            self.wifi_context.configure(
-                    hostap_config.HostapConfig(frequency=freq, mode=mode_n))
-            # pylint: disable=attribute-defined-outside-init
-            self.ssid = self.wifi_context.router.get_ssid()
-            self.wifi_context.assert_connect_wifi(
-                    xmlrpc_datatypes.AssociationParameters(ssid=self.ssid))
-
-            self._test_bringup_wifi_with_bt_off()
-            self._test_bringup_bt_with_wifi_off()
-
-            self.wifi_context.client.shill.disconnect(self.ssid)
-            self.wifi_context.router.deconfig()
-
-    def cleanup(self):
-        super(network_WiFi_BT_AntennaCoex, self).cleanup()
-        if hasattr(self, 'wifi_context'):
-            self.wifi_context.teardown()
diff --git a/server/site_tests/network_WiFi_BeaconInterval/control.wifi_bintval b/server/site_tests/network_WiFi_BeaconInterval/control.wifi_bintval
deleted file mode 100644
index db02123..0000000
--- a/server/site_tests/network_WiFi_BeaconInterval/control.wifi_bintval
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_BeaconInterval.wifi_bintval'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wificell-cq')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test verifies that the beacon interval set on the AP was successfully
-adopted by the DUT.
-"""
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_BeaconInterval',
-                 host=host,
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_BeaconInterval/network_WiFi_BeaconInterval.py b/server/site_tests/network_WiFi_BeaconInterval/network_WiFi_BeaconInterval.py
deleted file mode 100644
index e891e21..0000000
--- a/server/site_tests/network_WiFi_BeaconInterval/network_WiFi_BeaconInterval.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib.cros.network import iw_runner
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class network_WiFi_BeaconInterval(wifi_cell_test_base.WiFiCellTestBase):
-    """Test that we understand the routers negotiated beacon interval."""
-    version = 1
-
-
-    def run_once(self):
-        """Body of the test."""
-        bint_val = 200
-        configuration = hostap_config.HostapConfig(
-                channel=6,
-                mode=hostap_config.HostapConfig.MODE_11B,
-                beacon_interval=bint_val)
-        self.context.configure(configuration)
-        assoc_params = xmlrpc_datatypes.AssociationParameters()
-        assoc_params.ssid = self.context.router.get_ssid()
-        self.context.assert_connect_wifi(assoc_params)
-        self.context.client.check_iw_link_value(
-                iw_runner.IW_LINK_KEY_BEACON_INTERVAL,
-                bint_val)
-        self.context.assert_ping_from_dut()
-        self.context.client.shill.disconnect(assoc_params.ssid)
-        self.context.router.deconfig()
diff --git a/server/site_tests/network_WiFi_BgscanBackoff/control.5760noise_check b/server/site_tests/network_WiFi_BgscanBackoff/control.5760noise_check
deleted file mode 100644
index a6814b3..0000000
--- a/server/site_tests/network_WiFi_BgscanBackoff/control.5760noise_check
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'kirtika'
-NAME = 'network_WiFi_BgscanBackoff.5760_noise_check'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc')
-DEPENDENCIES = 'wificell'
-MAX_RESULT_SIZE_KB = 512000
-
-DOC = """
-This test case verifies that bgscan aborts and/or backs off when
-there is consistent outgoing traffic. This is a fork of the legacy test
-that runs the test on channels 1 and 153 to serve two purposes:
-(a) provide more 5 GHz coverage.
-(b) help (a wee bit) to catch noise concerns around 5760 MHz as seen on
-certain Intel SoCs.
-This test can be compared with the '.wifi_bgscan_backoff' variant, to see
-whether channel 153 behaves worse than other 5GHz channels.
-"""
-
-from autotest_lib.server.cros.network import hostap_config
-
-def run(machine):
-    host = hosts.create_host(machine)
-    caps = [hostap_config.HostapConfig.N_CAPABILITY_HT40]
-    mode = hostap_config.HostapConfig.MODE_11N_MIXED
-    config_first_ap = (hostap_config.HostapConfig(channel=1,
-            mode=mode, n_capabilities=caps))
-    config_second_ap = (hostap_config.HostapConfig(channel=153,
-            mode=mode, n_capabilities=caps))
-    params = [config_first_ap, config_second_ap]
-    job.run_test('network_WiFi_BgscanBackoff',
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=params)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_BgscanBackoff/control.wifi_bgscan_backoff b/server/site_tests/network_WiFi_BgscanBackoff/control.wifi_bgscan_backoff
deleted file mode 100644
index d93b23d..0000000
--- a/server/site_tests/network_WiFi_BgscanBackoff/control.wifi_bgscan_backoff
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_BgscanBackoff.wifi_bgscan_backoff'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc')
-DEPENDENCIES = 'wificell'
-MAX_RESULT_SIZE_KB = 512000
-
-DOC = """
-This test case verifies that bgscan aborts and/or backs off when
-there is consistent outgoing traffic.
-"""
-
-from autotest_lib.server.cros.network import hostap_config
-
-def run(machine):
-    host = hosts.create_host(machine)
-    mode = hostap_config.HostapConfig.MODE_11N_MIXED
-    config_first_ap = (hostap_config.HostapConfig(channel=1, mode=mode))
-    config_second_ap = (hostap_config.HostapConfig(channel=36, mode=mode))
-    params = [config_first_ap, config_second_ap]
-    job.run_test('network_WiFi_BgscanBackoff',
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=params)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_BgscanBackoff/network_WiFi_BgscanBackoff.py b/server/site_tests/network_WiFi_BgscanBackoff/network_WiFi_BgscanBackoff.py
deleted file mode 100644
index 8975129..0000000
--- a/server/site_tests/network_WiFi_BgscanBackoff/network_WiFi_BgscanBackoff.py
+++ /dev/null
@@ -1,113 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-from autotest_lib.client.common_lib.cros.network import iw_runner
-from autotest_lib.client.common_lib.cros.network import ping_runner
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class network_WiFi_BgscanBackoff(wifi_cell_test_base.WiFiCellTestBase):
-    """Test that background scan backs off when there is foreground traffic."""
-    version = 1
-
-    BGSCAN_SAMPLE_PERIOD_SECONDS = 100
-    NO_BGSCAN_SAMPLE_PERIOD_SECONDS = 10
-    CONFIGURED_BGSCAN_INTERVAL_SECONDS = 7
-    PING_INTERVAL_SECONDS = 0.1
-
-    # Dwell time for scanning is usually configured to be around 100 ms (some
-    # are higher, around 150 ms), since this is also the standard beacon
-    # interval.  Tolerate spikes in latency up to 250 ms as a way of asking
-    # that our PHY be servicing foreground traffic regularly during background
-    # scans.
-    # See also network_WiFi_OverlappingBSSScan for similar parameters.
-    LATENCY_MARGIN_MS = 250
-    THRESHOLD_BASELINE_LATENCY_MS = 100
-
-    def parse_additional_arguments(self, commandline_args, additional_params):
-        """Hook into super class to take control files parameters.
-
-        @param commandline_args dict of parsed parameters from the autotest.
-        @param additional_params list of HostapConfig objects and LinuxSystem
-           capabilities.
-
-        """
-        self._config_first_ap = additional_params[0]
-        self._config_second_ap = additional_params[1]
-
-    def _find_bss_matching_mac_addr(self, iw, mac_addr):
-        """ Scan and look for a BSS in the scan results with given mac address.
-
-        @param iw iw_runner instantiated on the client
-        @param mac_addr the address to look for
-        """
-        bss_list = iw.scan_dump(self.context.client.wifi_if)
-        logging.debug('Found BSSes: %r', bss_list)
-        return filter(lambda bss: bss.bss == mac_addr, bss_list)
-
-    def run_once(self):
-        """Body of the test."""
-
-        get_assoc_params = lambda conf: xmlrpc_datatypes.AssociationParameters(
-                ssid=self.context.router.get_ssid(instance=0),
-                bgscan_config=conf)
-        get_ping_config = lambda period: ping_runner.PingConfig(
-                self.context.get_wifi_addr(),
-                interval=self.PING_INTERVAL_SECONDS,
-                count=int(period / self.PING_INTERVAL_SECONDS))
-        self.context.configure(self._config_first_ap)
-        bgscan_config = xmlrpc_datatypes.BgscanConfiguration(
-                short_interval=self.CONFIGURED_BGSCAN_INTERVAL_SECONDS,
-                long_interval=self.CONFIGURED_BGSCAN_INTERVAL_SECONDS,
-                method=xmlrpc_datatypes.BgscanConfiguration.SCAN_METHOD_SIMPLE)
-        self.context.assert_connect_wifi(get_assoc_params(bgscan_config))
-        logging.info('Pinging router with background scans for %d seconds.',
-                     self.BGSCAN_SAMPLE_PERIOD_SECONDS)
-        result_bgscan = self.context.client.ping(
-                get_ping_config(self.BGSCAN_SAMPLE_PERIOD_SECONDS))
-        logging.info('Ping statistics with bgscan: %r', result_bgscan)
-        # Bring up a second AP, make sure that it shows up in bgscans.
-        self._config_second_ap.ssid = self.context.router.get_ssid()
-        self.context.configure(self._config_second_ap,multi_interface=True)
-        logging.info('Without a ping running, ensure that bgscans succeed.')
-        ap_mac = self.context.router.get_hostapd_mac(ap_num=1)
-        logging.debug('Looking for BSS %s', ap_mac)
-        iw = iw_runner.IwRunner(remote_host=self.context.client.host)
-
-        utils.poll_for_condition(
-            condition=lambda: self._find_bss_matching_mac_addr(iw, ap_mac),
-            exception=error.TestFail('Background scans should detect new BSSes '
-                                     'within an associated ESS.'),
-            timeout=self.BGSCAN_SAMPLE_PERIOD_SECONDS,
-            sleep_interval=1)
-
-        self.context.router.deconfig_aps(instance=1)
-        self.context.client.shill.disconnect(
-                self.context.router.get_ssid(instance=0))
-        # Reconfigure AP, so the new bgscan setting can be correctly applied.
-        self.context.configure(self._config_first_ap)
-        # Gather some statistics about ping latencies without scanning going on.
-        self.context.assert_connect_wifi(get_assoc_params(None))
-        logging.info('Pinging router without background scans for %d seconds.',
-                     self.NO_BGSCAN_SAMPLE_PERIOD_SECONDS)
-        result_no_bgscan = self.context.client.ping(
-                get_ping_config(self.NO_BGSCAN_SAMPLE_PERIOD_SECONDS))
-        logging.info('Ping statistics without bgscan: %r', result_no_bgscan)
-        if result_no_bgscan.max_latency > self.THRESHOLD_BASELINE_LATENCY_MS:
-            raise error.TestFail('RTT latency is too high even without '
-                                 'background scans: %f' %
-                                 result_no_bgscan.max_latency)
-
-        if (result_bgscan.max_latency >
-                self.LATENCY_MARGIN_MS + result_no_bgscan.avg_latency):
-            raise error.TestFail('Significant difference in rtt due to bgscan: '
-                                 '%.1f > %.1f + %d' %
-                                 (result_bgscan.max_latency,
-                                  result_no_bgscan.avg_latency,
-                                  self.LATENCY_MARGIN_MS))
diff --git a/server/site_tests/network_WiFi_BluetoothScanPerf/control.11a b/server/site_tests/network_WiFi_BluetoothScanPerf/control.11a
index 1648351..92e88ab 100644
--- a/server/site_tests/network_WiFi_BluetoothScanPerf/control.11a
+++ b/server/site_tests/network_WiFi_BluetoothScanPerf/control.11a
@@ -8,6 +8,7 @@
 TIME = 'MEDIUM'
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
+PY_VERSION = 3
 
 DOC = """
 This test uses netperf to measure the maximal receiving and transmitting
diff --git a/server/site_tests/network_WiFi_BluetoothScanPerf/control.11b b/server/site_tests/network_WiFi_BluetoothScanPerf/control.11b
index 894ddf7..da6792f 100644
--- a/server/site_tests/network_WiFi_BluetoothScanPerf/control.11b
+++ b/server/site_tests/network_WiFi_BluetoothScanPerf/control.11b
@@ -8,6 +8,7 @@
 TIME = 'MEDIUM'
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
+PY_VERSION = 3
 
 DOC = """
 This test uses netperf to measure the maximal receiving and transmitting
diff --git a/server/site_tests/network_WiFi_BluetoothScanPerf/network_WiFi_BluetoothScanPerf.py b/server/site_tests/network_WiFi_BluetoothScanPerf/network_WiFi_BluetoothScanPerf.py
index 630c500..a56604b 100644
--- a/server/site_tests/network_WiFi_BluetoothScanPerf/network_WiFi_BluetoothScanPerf.py
+++ b/server/site_tests/network_WiFi_BluetoothScanPerf/network_WiFi_BluetoothScanPerf.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -57,7 +58,10 @@
 
         """
         get_ping_config = lambda period: ping_runner.PingConfig(
-                self.context.get_wifi_addr(), interval=1, count=period)
+                self.context.get_wifi_addr(),
+                interval=1,
+                count=period,
+                source_iface=self.context.client.wifi_if)
 
         logging.info('testing config %s, ap_config %s, BT:%s',
                      config.tag, ap_config_tag, bt_tag)
@@ -131,4 +135,3 @@
 
         end_time = time.time()
         logging.info('Running time %0.1f seconds.', end_time - start_time)
-
diff --git a/server/site_tests/network_WiFi_BluetoothStreamPerf/control.11a b/server/site_tests/network_WiFi_BluetoothStreamPerf/control.11a
index 0d75419..92b1bea 100644
--- a/server/site_tests/network_WiFi_BluetoothStreamPerf/control.11a
+++ b/server/site_tests/network_WiFi_BluetoothStreamPerf/control.11a
@@ -4,11 +4,11 @@
 
 AUTHOR = 'bfreed'
 NAME = 'network_WiFi_BluetoothStreamPerf.11a'
-# TODO(b/168842922) Schedule the test when it is fixed
 ATTRIBUTES = ''
 TIME = 'MEDIUM'
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
 This test uses netperf to measure the maximal receiving and transmitting
@@ -20,18 +20,15 @@
 from autotest_lib.server.cros.network import hostap_config
 
 args_dict = utils.args_to_dict(args)
-btpeer_args = hosts.CrosHost.get_btpeer_arguments(args_dict)
-
 
 def run(machine):
     host = hosts.create_host(machine)
-    host.initialize_btpeer(btpeer_args)
     mode = hostap_config.HostapConfig.MODE_11A
     configs = [hostap_config.HostapConfig(channel=channel, mode=mode)
                for channel in (44,)]
     job.run_test('network_WiFi_BluetoothStreamPerf', tag=NAME.split('.')[1],
-                 host=host, raw_cmdline_args=args,
-                 additional_params=configs)
+                 host=host, raw_cmdline_args=args, args_dict=args_dict,
+                 additional_params=configs, test_name='coex_test')
 
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_BluetoothStreamPerf/control.11ac b/server/site_tests/network_WiFi_BluetoothStreamPerf/control.11ac
new file mode 100644
index 0000000..2042bf2
--- /dev/null
+++ b/server/site_tests/network_WiFi_BluetoothStreamPerf/control.11ac
@@ -0,0 +1,46 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'bfreed'
+NAME = 'network_WiFi_BluetoothStreamPerf.11ac'
+ATTRIBUTES = 'suite:bluetooth_wifi_coex'
+# This test is flaky but adding to  'suite:bluetooth_flaky' will use up
+# a lot of lab capacity
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'wificell, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+This test uses netperf to measure the maximal receiving and transmitting
+throughput on a DUT with an open 802.11ac network while operating bluetooth.
+"""
+
+
+from autotest_lib.server import utils
+from autotest_lib.server.cros.network import hostap_config
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+
+    n_caps = [hostap_config.HostapConfig.N_CAPABILITY_HT40_PLUS]
+    ac_caps = [hostap_config.HostapConfig.AC_CAPABILITY_SHORT_GI_80]
+    ac_mode = hostap_config.HostapConfig.MODE_11AC_MIXED
+    channel_width_80_mhz = hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_80
+    configs = [hostap_config.HostapConfig(
+                    channel=channel,
+                    mode=ac_mode,
+                    n_capabilities=n_caps,
+                    vht_channel_width=channel_width_80_mhz,
+                    vht_center_channel=vht_center_channel,
+                    ac_capabilities=ac_caps)
+               for channel, vht_center_channel in [(44, 42), (157, 155)]]
+
+    job.run_test('network_WiFi_BluetoothStreamPerf', tag=NAME.split('.')[1],
+                 host=host, raw_cmdline_args=args, args_dict=args_dict,
+                 additional_params=configs, test_name='coex_test')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_BluetoothStreamPerf/control.11b b/server/site_tests/network_WiFi_BluetoothStreamPerf/control.11b
index b3950f6..655c436 100644
--- a/server/site_tests/network_WiFi_BluetoothStreamPerf/control.11b
+++ b/server/site_tests/network_WiFi_BluetoothStreamPerf/control.11b
@@ -4,11 +4,11 @@
 
 AUTHOR = 'bfreed'
 NAME = 'network_WiFi_BluetoothStreamPerf.11b'
-# TODO(b/168842922) Schedule the test when it is fixed
 ATTRIBUTES = ''
 TIME = 'MEDIUM'
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell, working_bluetooth_btpeer:1'
+PY_VERSION = 3
 
 DOC = """
 This test uses netperf to measure the maximal receiving and transmitting
@@ -20,17 +20,15 @@
 from autotest_lib.server.cros.network import hostap_config
 
 args_dict = utils.args_to_dict(args)
-btpeer_args = hosts.CrosHost.get_btpeer_arguments(args_dict)
 
 def run(machine):
     host = hosts.create_host(machine)
-    host.initialize_btpeer(btpeer_args)
     mode = hostap_config.HostapConfig.MODE_11B
     configs = [hostap_config.HostapConfig(channel=channel, mode=mode)
                for channel in (6,)]
     job.run_test('network_WiFi_BluetoothStreamPerf', tag=NAME.split('.')[1],
-                 host=host, raw_cmdline_args=args,
-                 additional_params=configs)
+                 host=host, raw_cmdline_args=args, args_dict=args_dict,
+                 additional_params=configs, test_name='coex_test')
 
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_BluetoothStreamPerf/control.11g b/server/site_tests/network_WiFi_BluetoothStreamPerf/control.11g
new file mode 100644
index 0000000..e210ad1
--- /dev/null
+++ b/server/site_tests/network_WiFi_BluetoothStreamPerf/control.11g
@@ -0,0 +1,37 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'bfreed'
+NAME = 'network_WiFi_BluetoothStreamPerf.11g'
+ATTRIBUTES = 'suite:bluetooth_wifi_coex'
+# This test is flaky but adding to  'suite:bluetooth_flaky' will use up
+# a lot of lab capacity
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'wificell, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+This test uses netperf to measure the maximal receiving and transmitting
+throughput on a DUT with an open 802.11g network while operating bluetooth.
+"""
+
+
+from autotest_lib.server import utils
+from autotest_lib.server.cros.network import hostap_config
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+
+    mode = hostap_config.HostapConfig.MODE_11G
+    configs = [hostap_config.HostapConfig(channel=channel, mode=mode)
+               for channel in (6,)]
+
+    job.run_test('network_WiFi_BluetoothStreamPerf', tag=NAME.split('.')[1],
+                 host=host, raw_cmdline_args=args, args_dict=args_dict,
+                 additional_params=configs, test_name='coex_test')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_BluetoothStreamPerf/control.11n b/server/site_tests/network_WiFi_BluetoothStreamPerf/control.11n
new file mode 100644
index 0000000..8b1ad9a
--- /dev/null
+++ b/server/site_tests/network_WiFi_BluetoothStreamPerf/control.11n
@@ -0,0 +1,41 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'bfreed'
+NAME = 'network_WiFi_BluetoothStreamPerf.11n'
+ATTRIBUTES = 'suite:bluetooth_wifi_coex'
+# This test is flaky but adding to  'suite:bluetooth_flaky' will use up
+# a lot of lab capacity
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'wificell, working_bluetooth_btpeer:1'
+PY_VERSION = 3
+
+DOC = """
+This test uses netperf to measure the maximal receiving and transmitting
+throughput on a DUT with an open 802.11n network while operating bluetooth.
+"""
+
+
+from autotest_lib.server import utils
+from autotest_lib.server.cros.network import hostap_config
+
+args_dict = utils.args_to_dict(args)
+
+def run(machine):
+    host = hosts.create_host(machine)
+
+    all_caps = ([hostap_config.HostapConfig.N_CAPABILITY_HT20],
+                [hostap_config.HostapConfig.N_CAPABILITY_HT40])
+    mode = hostap_config.HostapConfig.MODE_11N_PURE
+    configs = [hostap_config.HostapConfig(n_capabilities=caps,
+                                          channel=channel, mode=mode)
+               for caps in all_caps
+               for channel in (1, 157)]
+
+    job.run_test('network_WiFi_BluetoothStreamPerf', tag=NAME.split('.')[1],
+                 host=host, raw_cmdline_args=args, args_dict=args_dict,
+                 additional_params=configs, test_name='coex_test')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_BluetoothStreamPerf/network_WiFi_BluetoothStreamPerf.py b/server/site_tests/network_WiFi_BluetoothStreamPerf/network_WiFi_BluetoothStreamPerf.py
index 694fc52..3d9504e 100644
--- a/server/site_tests/network_WiFi_BluetoothStreamPerf/network_WiFi_BluetoothStreamPerf.py
+++ b/server/site_tests/network_WiFi_BluetoothStreamPerf/network_WiFi_BluetoothStreamPerf.py
@@ -1,23 +1,27 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 import logging
+import threading
 import time
 
-
-from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib.cros.network import ping_runner
 from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.cros.chameleon import chameleon_audio_helper
-from autotest_lib.client.cros.chameleon import chameleon_audio_ids
-from autotest_lib.server.cros.audio import audio_test
-from autotest_lib.server.cros.multimedia import remote_facade_factory
 from autotest_lib.server.cros.network import netperf_runner
 from autotest_lib.server.cros.network import netperf_session
 from autotest_lib.server.cros.network import wifi_cell_test_base
 
+from autotest_lib.server.cros.bluetooth.bluetooth_adapter_quick_tests import \
+     BluetoothAdapterQuickTests
+from autotest_lib.server.cros.bluetooth.bluetooth_adapter_audio_tests import (
+        BluetoothAdapterAudioTests)
+from autotest_lib.client.cros.bluetooth.bluetooth_audio_test_data import A2DP
 
-class network_WiFi_BluetoothStreamPerf(wifi_cell_test_base.WiFiCellTestBase):
+
+class network_WiFi_BluetoothStreamPerf(wifi_cell_test_base.WiFiCellTestBase,
+                                       BluetoothAdapterQuickTests,
+                                       BluetoothAdapterAudioTests):
     """Test maximal achievable bandwidth on several channels per band.
 
     Conducts a performance test for a set of specified router configurations
@@ -25,6 +29,9 @@
 
     """
 
+    test_wrapper = BluetoothAdapterQuickTests.quick_test_test_decorator
+    batch_wrapper = BluetoothAdapterQuickTests.quick_test_batch_decorator
+
     version = 1
 
     NETPERF_CONFIGS = [
@@ -59,7 +66,10 @@
 
         """
         get_ping_config = lambda period: ping_runner.PingConfig(
-                self.context.get_wifi_addr(), interval=1, count=period)
+                self.context.get_wifi_addr(),
+                interval=1,
+                count=period,
+                source_iface=self.context.client.wifi_if)
 
         logging.info('testing config %s, ap_config %s, BT:%s',
                      config.tag, ap_config_tag, bt_tag)
@@ -107,33 +117,26 @@
             { '_'.join(['ping', test_str]): result_ping.avg_latency })
         logging.info('Ping statistics with %s: %r', bt_tag, result_ping)
 
+    def pair_audio_device(self, device):
+        """Pair an audio device pre-test to simplify later re-connection"""
+        self.test_device_set_discoverable(device, True)
+        self.test_discover_device(device.address)
+        self.test_pairing(device.address, device.pin, trusted=True)
+        device.SetTrustedByRemoteAddress(self.bluetooth_facade.address)
+        self.test_disconnection_by_adapter(device.address)
 
+    def do_audio_test(self, device):
+        """Run the body of the audio test"""
+        self.test_a2dp_sinewaves(device, A2DP, 60)
 
-    def run_once(self, host):
+    @test_wrapper('Coex tests', devices={'BLUETOOTH_AUDIO': 1})
+    def coex_test(self):
         """Test body."""
         start_time = time.time()
 
-        # Setup Bluetooth widgets and their binder, but do not yet connect.
-        audio_test.audio_test_requirement()
-        factory = remote_facade_factory.RemoteFacadeFactory(
-                host, results_dir=self.resultsdir)
-        chameleon_board = host.btpeer
-        if chameleon_board is None:
-            raise error.TestNAError("Bluetooth peer is not present")
-
-        chameleon_board.setup_and_reset(self.outputdir)
-        widget_factory = chameleon_audio_helper.AudioWidgetFactory(
-                factory, host)
-        source = widget_factory.create_widget(
-            chameleon_audio_ids.CrosIds.BLUETOOTH_HEADPHONE)
-        bluetooth_widget = widget_factory.create_widget(
-            chameleon_audio_ids.PeripheralIds.BLUETOOTH_DATA_RX)
-        binder = widget_factory.create_binder(
-                source, bluetooth_widget)
-        audio_test_file = 'http://commondatastorage.googleapis.com/' \
-                          'chromiumos-test-assets-public/audio_test/' \
-                          'chameleon/Headphone/test_256_16.mp3'
-
+        device = self.devices['BLUETOOTH_AUDIO'][0]
+        self.initialize_bluetooth_audio(device, A2DP)
+        self.pair_audio_device(device)
 
         for ap_config in self._ap_configs:
             # Set up the router and associate the client with it.
@@ -154,14 +157,21 @@
             for config in self.NETPERF_CONFIGS:
                 self.base_through = 0
                 self.test_one(session, config, ap_config_tag, 'BT_disconnected')
-                with chameleon_audio_helper.bind_widgets(binder):
-                    self.test_one(session, config, ap_config_tag,
-                                  'BT_connected_but_not_streaming')
-                    logging.info('Playing an audio test file')
-                    browser_facade = factory.create_browser_facade()
-                    browser_facade.new_tab(audio_test_file)
-                    self.test_one(session, config, ap_config_tag,
-                                  'BT_streaming_audiofile')
+
+                self.test_connection_by_device(device)
+                self.test_one(session, config, ap_config_tag,
+                              'BT_connected_but_not_streaming')
+
+                # Start playing audio in background
+                audio_thread = threading.Thread(target=self.do_audio_test,
+                                                args=(device, ))
+                audio_thread.start()
+                self.test_one(session, config, ap_config_tag,
+                              'BT_streaming_audiofile')
+
+                # Wait for audio thread to complete
+                audio_thread.join()
+                self.test_disconnection_by_adapter(device.address)
                 self.test_one(session, config, ap_config_tag,
                               'BT_disconnected_again')
 
@@ -169,6 +179,23 @@
             self.context.client.shill.disconnect(self.context.router.get_ssid())
             self.context.router.deconfig()
 
+        self.cleanup_bluetooth_audio(device, A2DP)
         end_time = time.time()
         logging.info('Running time %0.1f seconds.', end_time - start_time)
 
+    @batch_wrapper('Coex batch')
+    def coex_health_batch_run(self, num_iterations=1, test_name=None):
+        """Run the bluetooth coex health test batch or a specific given test.
+
+        @param num_iterations: how many iterations to run
+        @param test_name: specific test to run otherwise None to run the
+                whole batch
+        """
+        self.coex_test()
+
+    def run_once(self, host, test_name=None):
+        self.host = host
+
+        self.quick_test_init(host, use_btpeer=True)
+        self.coex_health_batch_run(test_name=test_name)
+        self.quick_test_cleanup()
diff --git a/server/site_tests/network_WiFi_CSA/control b/server/site_tests/network_WiFi_CSA/control
deleted file mode 100644
index 5987031..0000000
--- a/server/site_tests/network_WiFi_CSA/control
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'pstew, wiley, quiche'
-NAME = 'network_WiFi_CSA'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test verifies that DUT will move off-channel if it is sent a
-Spectrum Management action frame that contains a Channel Move
-element.  Such frames are sent on a DFS network to vacate the
-channel if radar is detected.
-Note that not all clients support CSA, but they generally should at least try
-to disconnect from the AP.
-"""
-
-
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    a_mode = hostap_config.HostapConfig.MODE_11A
-    configurations = [
-        (hostap_config.HostapConfig(
-            channel=64, mode=a_mode, spectrum_mgmt_required=True), 36)]
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_CSA',
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=configurations)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_CSA/network_WiFi_CSA.py b/server/site_tests/network_WiFi_CSA/network_WiFi_CSA.py
deleted file mode 100644
index 9f3f63d..0000000
--- a/server/site_tests/network_WiFi_CSA/network_WiFi_CSA.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import ping_runner
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server import site_linux_system
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class network_WiFi_CSA(wifi_cell_test_base.WiFiCellTestBase):
-    """Test that the client vacates the channel after notification
-    from the AP that it should switch channels. Note that not all clients
-    support CSA (Channel Switch Announcement), but they generally should at
-    least try to disconnect from the AP."""
-    version = 1
-
-
-    def parse_additional_arguments(self, commandline_args, additional_params):
-        """Hook into super class to take control files parameters.
-
-        @param commandline_args dict of parsed parameters from the autotest.
-        @param additional_params list of dicts describing router configs.
-
-        """
-        self._configurations = additional_params
-
-
-    def run_once(self):
-        """Sets up a router, connects to it, then tests a channel switch."""
-        for router_conf, alternate_channel in self._configurations:
-            self.context.router.require_capabilities(
-                  [site_linux_system.LinuxSystem.
-                          CAPABILITY_SEND_MANAGEMENT_FRAME])
-            self.context.configure(router_conf)
-            assoc_params = xmlrpc_datatypes.AssociationParameters()
-            assoc_params.ssid = self.context.router.get_ssid()
-            self.context.assert_connect_wifi(assoc_params)
-            ping_config = ping_runner.PingConfig(
-                    self.context.get_wifi_addr(ap_num=0))
-            client_mac = self.context.client.wifi_mac
-            for _ in range(10):
-                # Since the client might be in power-save, we are not
-                # guaranteed it will hear this message the first time around.
-                self.context.router.send_management_frame_on_ap(
-                        'channel_switch', alternate_channel)
-
-                # Test to see if the router received a deauth message from
-                # the client.
-                # TODO (b/154879577): 'detect_client_deauth' may trigger based
-                # on a DEAUTH that happens during initial authentication.
-                # (e.g., clients may AUTH, retry (DEAUTH+AUTH), etc.) That may
-                # cause us to 'break' out here before the client _really_ sees
-                # the CSA.
-                if self.context.router.detect_client_deauth(client_mac):
-                    break
-
-                # Otherwise detect the client leaving indirectly by measuring
-                # client pings.  This should fail at some point.
-                ping_config = ping_runner.PingConfig(
-                        self.context.get_wifi_addr(ap_num=0),
-                        count=3, ignore_status=True,
-                        ignore_result=True)
-                result = self.context.client.ping(ping_config)
-                if result.loss > 60:
-                    break
-            else:
-                raise error.TestFail('Client never lost connectivity')
-            self.context.client.shill.disconnect(assoc_params.ssid)
-            self.context.router.deconfig()
diff --git a/server/site_tests/network_WiFi_CSADisconnect/control b/server/site_tests/network_WiFi_CSADisconnect/control
deleted file mode 100644
index 55c6586..0000000
--- a/server/site_tests/network_WiFi_CSADisconnect/control
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'zqiu, pstew, wiley, quiche'
-NAME = 'network_WiFi_CSADisconnect'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wificell-cq')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test verifies that DUT can still connect to the AP when it is
-disconnected right after receiving a CSA message. This is to make sure the MAC
-80211 queues are not stuck after those two events.
-"""
-
-
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    configurations = [(64, 36)]
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_CSADisconnect',
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=configurations)
-
-
-parallel_simple(run, machines)
-
diff --git a/server/site_tests/network_WiFi_CSADisconnect/network_WiFi_CSADisconnect.py b/server/site_tests/network_WiFi_CSADisconnect/network_WiFi_CSADisconnect.py
deleted file mode 100644
index 0a4b307..0000000
--- a/server/site_tests/network_WiFi_CSADisconnect/network_WiFi_CSADisconnect.py
+++ /dev/null
@@ -1,95 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server import site_linux_system
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class network_WiFi_CSADisconnect(wifi_cell_test_base.WiFiCellTestBase):
-    """Test that verifies the client's MAC 80211 queues are not stuck when
-    disconnecting immediately after receiving a CSA (Channel Switch
-    Announcement) message. Refer to "crbug.com/408370" for more information."""
-    version = 1
-
-
-    def _connect_to_ap(self, channel):
-        """Configure an AP and instruct client to connect to it with
-        autoconnect disabled.
-
-        @param channel int Channel to configure AP in.
-
-        """
-        self.context.configure(hostap_config.HostapConfig(
-                channel=channel,
-                mode=hostap_config.HostapConfig.MODE_11N_MIXED))
-        assoc_params = xmlrpc_datatypes.AssociationParameters()
-        assoc_params.ssid = self.context.router.get_ssid()
-        assoc_params.autoconnect = False
-        self.context.client.shill.configure_wifi_service(assoc_params)
-        self.context.assert_connect_wifi(assoc_params)
-
-
-    def _csa_test(self, router_initiated_disconnect):
-        """Perform channel switch, and initiate disconnect immediately, then
-        verify wifi connection still works, hence the 80211 queues are not
-        stuck.
-
-        @param router_initiated_disconnected bool indicating the initiator of
-            the disconnect.
-
-        """
-        # Run it multiple times since the client might be in power-save,
-        # we are not guaranteed it will hear this message the first time
-        # around. Alternate the AP channel with the CSA announced channel to
-        # work around with drivers (Marvell 8897) that disallow reconnecting
-        # immediately to the same AP on the same channel after CSA to a
-        # different channel.
-        for _ in range(5):
-            self._connect_to_ap(self._primary_channel)
-            self.context.router.send_management_frame_on_ap(
-                'channel_switch', self._alternate_channel)
-            if router_initiated_disconnect:
-                self.context.router.deauth_client(self.context.client.wifi_mac)
-            else:
-                self.context.client.shill.disconnect(
-                        self.context.router.get_ssid())
-
-            # Wait for client to be disconnected.
-            self.context.client.wait_for_service_states(
-                    self.context.router.get_ssid(), ('idle'), 30)
-
-            # Swap primary_channel with alternate channel so we don't configure
-            # AP using same channel in back-to-back runs.
-            tmp = self._alternate_channel
-            self._alternate_channel = self._primary_channel
-            self._primary_channel = tmp
-
-
-    def parse_additional_arguments(self, commandline_args, additional_params):
-        """Hook into super class to take control files parameters.
-
-        @param commandline_args dict of parsed parameters from the autotest.
-        @param additional_params list of dicts describing router configs.
-
-        """
-        self._configurations = additional_params
-
-
-    def run_once(self):
-        """Verify that wifi connectivity still works when disconnecting
-        right after channel switch."""
-
-        for self._primary_channel, self._alternate_channel in \
-                self._configurations:
-            self.context.router.require_capabilities(
-                  [site_linux_system.LinuxSystem.
-                          CAPABILITY_SEND_MANAGEMENT_FRAME])
-            # Test both router initiated and client initiated disconnect after
-            # channel switch announcement.
-            self._csa_test(True)
-            self._csa_test(False)
-
-            self.context.router.deconfig()
diff --git a/server/site_tests/network_WiFi_ChannelHop/control b/server/site_tests/network_WiFi_ChannelHop/control
deleted file mode 100644
index 727e7ea..0000000
--- a/server/site_tests/network_WiFi_ChannelHop/control
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-TIME = 'SHORT'
-NAME = 'network_WiFi_ChannelHop'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-The channel hop test verifies that the DUT, connected to a BSS on one
-channel will successfully re-connect when the AP changes channels on
-the BSS.
-"""
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_ChannelHop',
-                 host=host,
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_ChannelHop/network_WiFi_ChannelHop.py b/server/site_tests/network_WiFi_ChannelHop/network_WiFi_ChannelHop.py
deleted file mode 100644
index c23cb44..0000000
--- a/server/site_tests/network_WiFi_ChannelHop/network_WiFi_ChannelHop.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import iw_runner
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-class network_WiFi_ChannelHop(wifi_cell_test_base.WiFiCellTestBase):
-    """Tests roaming when an AP changes channels on an SSID."""
-
-    version = 1
-    ORIGINAL_FREQUENCY = 2412
-    ORIGINAL_BSSID = "00:01:02:03:04:05"
-
-    def run_once(self):
-        """Test body."""
-        freq = network_WiFi_ChannelHop.ORIGINAL_FREQUENCY
-        ssid = self.context.router.build_unique_ssid()
-        ap_config = hostap_config.HostapConfig(
-                ssid=ssid,
-                frequency=freq,
-                mode=hostap_config.HostapConfig.MODE_11B,
-                bssid=network_WiFi_ChannelHop.ORIGINAL_BSSID)
-        self.context.configure(ap_config)
-        assoc_params = xmlrpc_datatypes.AssociationParameters(
-                ssid=self.context.router.get_ssid())
-        self.context.assert_connect_wifi(assoc_params)
-
-        self.context.assert_ping_from_dut()
-        self.context.client.check_iw_link_value(
-                iw_runner.IW_LINK_KEY_FREQUENCY,
-                freq)
-        self.context.router.deconfig()
-
-        # This checks both channel jumping on the same BSSID and channel
-        # jumping between BSSIDs, all inside the same SSID.
-        for freq, bssid in ((2437, network_WiFi_ChannelHop.ORIGINAL_BSSID),
-                            (2462, network_WiFi_ChannelHop.ORIGINAL_BSSID),
-                            (2422, "06:07:08:09:0a:0b"),
-                            (2447, "0c:0d:0e:0f:10:11")):
-            # Wait for the disconnect to happen.
-            success, state, elapsed_seconds = \
-                    self.context.client.wait_for_service_states(ssid,
-                            ['idle'], 30)
-            if not success:
-                raise error.TestFail(
-                        'Failed to disconnect from "%s" in %f seconds (state=%s)' %
-                        (ssid, elapsed_seconds, state))
-
-            # Change channels on the AP.  This happens in full view of the DUT
-            # and the AP deauths everyone as it exits.
-            ap_config = hostap_config.HostapConfig(
-                    ssid=ssid,
-                    frequency=freq,
-                    mode=hostap_config.HostapConfig.MODE_11B,
-                    bssid=bssid)
-            self.context.configure(ap_config)
-
-            # Wait for the DUT to scan and acquire the AP at the new
-            # frequency.
-            success, state, elapsed_seconds = \
-                    self.context.client.wait_for_service_states(ssid,
-                            self.context.client.CONNECTED_STATES, 30)
-            if not success:
-                raise error.TestFail(
-                        'Failed to connect to "%s" in %f seconds (state=%s)' %
-                        (ssid, elapsed_seconds, state))
-
-            # Verify that we're connected.
-            self.context.assert_ping_from_dut()
-
-            # Verify that the client switched to new frequency
-            self.context.client.check_iw_link_value(
-                    iw_runner.IW_LINK_KEY_FREQUENCY,
-                    freq)
-            self.context.router.deconfig()
diff --git a/server/site_tests/network_WiFi_ChannelScanDwellTime/control b/server/site_tests/network_WiFi_ChannelScanDwellTime/control
deleted file mode 100644
index f466c00..0000000
--- a/server/site_tests/network_WiFi_ChannelScanDwellTime/control
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'zqiu, wiley, pstew, quiche'
-NAME = 'network_WiFi_ChannelScanDwellTime'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_perf')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test is designed to determine the channel scan dwell time.
-"""
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_ChannelScanDwellTime',
-                 host=host,
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_ChannelScanDwellTime/network_WiFi_ChannelScanDwellTime.py b/server/site_tests/network_WiFi_ChannelScanDwellTime/network_WiFi_ChannelScanDwellTime.py
deleted file mode 100644
index 054659c..0000000
--- a/server/site_tests/network_WiFi_ChannelScanDwellTime/network_WiFi_ChannelScanDwellTime.py
+++ /dev/null
@@ -1,215 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from __future__ import division
-
-import logging
-import random
-import string
-import time
-
-from autotest_lib.server.cros.network import frame_sender
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server import site_linux_system
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-from autotest_lib.client.common_lib.cros.network import tcpdump_analyzer
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class network_WiFi_ChannelScanDwellTime(wifi_cell_test_base.WiFiCellTestBase):
-    """Test for determine channel scan dwell time."""
-    version = 1
-
-    KNOWN_TEST_PREFIX = 'network_WiFi'
-    SUFFIX_LETTERS = string.ascii_lowercase + string.digits
-    DELAY_INTERVAL_MILLISECONDS = 1
-    SCAN_RETRY_TIMEOUT_SECONDS = 10
-    NUM_BSS = 1024
-    MISSING_BEACON_THRESHOLD = 2
-    MAX_DWELL_TIME_MS = 250
-    MIN_DWELL_TIME_MS = 5
-    FREQUENCY_MHZ = 2412
-    MSEC_PER_SEC = 1000
-    SCAN_START_DELAY_MS = 200
-
-    def _build_ssid_prefix(self):
-        """Build ssid prefix."""
-        unique_salt = ''.join([random.choice(self.SUFFIX_LETTERS)
-                               for _ in range(5)])
-        prefix = self.__class__.__name__[len(self.KNOWN_TEST_PREFIX):]
-        prefix = prefix.lstrip('_')
-        prefix += '_' + unique_salt + '_'
-        return prefix[-23:]
-
-
-    def _get_ssid_index(self, ssid):
-        """Return the SSID index from an SSID string.
-
-        Given an SSID of the form [testName]_[salt]_[index], returns |index|.
-
-        @param ssid: full SSID, as received in scan results.
-        @return int SSID index.
-        """
-        return int(ssid.split('_')[-1], 16)
-
-
-    def _get_beacon_timestamp(self, beacon_frames, ssid_num):
-        """Return the time at which the beacon with |ssid_num| was transmitted.
-
-        If multiple beacons match |ssid_num|, return the time of the first
-        matching beacon.
-
-        @param beacon_frames: List of Frames.
-        @param ssid_num: int SSID number to match.
-        @return datetime time at which beacon was transmitted.
-        """
-        for frame in beacon_frames:
-            if self._get_ssid_index(frame.ssid) == ssid_num:
-                return frame.time_datetime
-        else:
-            raise error.TestFail('Failed to find SSID %d in pcap.' % ssid_num)
-
-
-    def _get_dwell_time(self, bss_list, sent_beacon_frames):
-        """Parse scan result to get dwell time.
-
-        Calculate dwell time based on the SSIDs in the scan result.
-
-        @param bss_list: List of BSSs.
-        @param sent_beacon_frames: List of Frames, as captured on sender.
-
-        @return int dwell time in ms.
-        """
-        ssid_index = [self._get_ssid_index(bss) for bss in bss_list]
-        # Calculate dwell time based on the start ssid index and end ssid index.
-        ssid_index.sort()
-        index_diff = ssid_index[-1] - ssid_index[0]
-
-        # Check if number of missed beacon frames exceed the test threshold.
-        missed_beacons = index_diff - (len(ssid_index) - 1)
-        if missed_beacons > self.MISSING_BEACON_THRESHOLD:
-            logging.info('Missed %d beacon frames, SSID Index: %r',
-                         missed_beacons, ssid_index)
-            raise error.TestFail('DUT missed more than %d beacon frames' %
-                                 missed_beacons)
-
-        first_ssid_tstamp = self._get_beacon_timestamp(
-            sent_beacon_frames, ssid_index[0])
-        last_ssid_tstamp = self._get_beacon_timestamp(
-            sent_beacon_frames, ssid_index[-1])
-        return int(round(
-            (last_ssid_tstamp - first_ssid_tstamp).total_seconds() *
-            self.MSEC_PER_SEC))
-
-    def _scan_frequencies(self, frequencies):
-        """Scan for BSSs on the provided frequencies.
-
-        The result of the scan is stored in self._bss_list.
-
-        @return True if scan was successfully triggered, even
-                if no BSS was found. False otherwise.
-        """
-        self._bss_list = self.context.client.iw_runner.scan(
-                self.context.client.wifi_if,
-                frequencies=frequencies)
-
-        return self._bss_list is not None
-
-    def _channel_dwell_time_test(self, single_channel):
-        """Perform test to determine channel dwell time.
-
-        This function invoke FrameSender to continuously send beacon frames
-        for specific number of BSSs with specific delay, the SSIDs of the
-        BSS are in hex numerical order. And at the same time, perform wifi scan
-        on the DUT. The index in the SSIDs of the scan result will be used to
-        interpret the relative start time and end time of the channel scan.
-
-        @param single_channel: bool perform single channel scan if true.
-
-        @return int dwell time in ms.
-
-        """
-        channel = hostap_config.HostapConfig.get_channel_for_frequency(
-            self.FREQUENCY_MHZ)
-        # Configure an AP to inject beacons.
-        self.context.configure(hostap_config.HostapConfig(channel=channel))
-        self.context.capture_host.start_capture(self.FREQUENCY_MHZ)
-        ssid_prefix = self._build_ssid_prefix()
-
-        with frame_sender.FrameSender(self.context.router, 'beacon', channel,
-                                      ssid_prefix=ssid_prefix,
-                                      num_bss=self.NUM_BSS,
-                                      frame_count=0,
-                                      delay=self.DELAY_INTERVAL_MILLISECONDS):
-            if single_channel:
-                frequencies = [self.FREQUENCY_MHZ]
-            else:
-                frequencies = []
-            # Don't immediately start the scan, wait a bit so the AP has enough
-            # time to start actually sending beacon frames before the scan
-            # starts.
-            time.sleep(self.SCAN_START_DELAY_MS / self.MSEC_PER_SEC)
-            # Perform scan
-            try:
-                utils.poll_for_condition(
-                        condition=lambda: self._scan_frequencies(frequencies),
-                        timeout=self.SCAN_RETRY_TIMEOUT_SECONDS,
-                        sleep_interval=0.5)
-            except utils.TimeoutError:
-                raise error.TestFail('Unable to trigger scan on client.')
-            if not self._bss_list:
-                raise error.TestFail('Failed to find any BSS')
-
-            # Remaining work is done outside the FrameSender
-            # context. This is to ensure that no additional frames are
-            # transmitted while we're waiting for the packet capture
-            # to complete.
-        pcap_path = self.context.capture_host.stop_capture()[0].local_pcap_path
-
-        # Filter scan result based on ssid prefix to remove any cached
-        # BSSs from previous run.
-        result_list = [bss.ssid for bss in self._bss_list if
-                       bss.ssid and bss.ssid.startswith(ssid_prefix)]
-        if not result_list:
-            raise error.TestFail('Failed to find any BSS for this test')
-
-        beacon_frames = tcpdump_analyzer.get_frames(
-            pcap_path, tcpdump_analyzer.WLAN_BEACON_ACCEPTOR,
-            reject_bad_fcs=False)
-        # Filter beacon frames based on ssid prefix.
-        result_beacon_frames = [frame for frame in beacon_frames if frame.ssid
-                                and frame.ssid.startswith(ssid_prefix)]
-        if not result_beacon_frames:
-            raise error.TestFail('Failed to find any beacons for this test')
-        return self._get_dwell_time(result_list, result_beacon_frames)
-
-
-    def run_once(self):
-        """Measure channel dwell time for single-channel scan"""
-        self.context.router.require_capabilities(
-            [site_linux_system.LinuxSystem.CAPABILITY_SEND_MANAGEMENT_FRAME])
-        # Claim the control over the wifi interface from WiFiClient, which
-        # will prevent shill and wpa_supplicant from managing that interface.
-        # So this test can have the sole ownership of the interface and can
-        # perform scans without interference from shill and wpa_supplicant.
-        self.context.client.claim_wifi_if()
-        try:
-            # Get channel dwell time for single-channel scan
-            dwell_time = self._channel_dwell_time_test(True)
-            # Ensure that the measured value is sane, so a glitch doesn't
-            # pollute the perf dataset.
-            if (dwell_time < self.MIN_DWELL_TIME_MS or
-                    dwell_time > self.MAX_DWELL_TIME_MS):
-                raise error.TestFail(
-                        'Dwell time %d ms is not within range [%dms,%dms]' %
-                        (dwell_time, self.MIN_DWELL_TIME_MS,
-                            self.MAX_DWELL_TIME_MS))
-            logging.info('Channel dwell time for single-channel scan: %d ms',
-                         dwell_time)
-            self.output_perf_value(
-                    'dwell_time_single_channel_scan', dwell_time, units='ms',
-                    higher_is_better=False)
-        finally:
-            self.context.client.release_wifi_if()
diff --git a/server/site_tests/network_WiFi_ChaosConfigFailure/control b/server/site_tests/network_WiFi_ChaosConfigFailure/control
index 2d50ee1..ba96f92 100644
--- a/server/site_tests/network_WiFi_ChaosConfigFailure/control
+++ b/server/site_tests/network_WiFi_ChaosConfigFailure/control
@@ -2,6 +2,11 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+# TEST IS DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
 import traceback
 
 from autotest_lib.client.common_lib.cros.network import ap_constants
@@ -11,6 +16,7 @@
 NAME = 'network_WiFi_ChaosConfigFailure'
 TIME = 'SHORT'
 TEST_TYPE = 'server'
+PY_VERSION = 3
 
 DOC = """
 This test is run when a chaos APConfigurator fails to successfully configure
diff --git a/server/site_tests/network_WiFi_ChaosConfigFailure/network_WiFi_ChaosConfigFailure.py b/server/site_tests/network_WiFi_ChaosConfigFailure/network_WiFi_ChaosConfigFailure.py
index c6a103c..400d53a 100644
--- a/server/site_tests/network_WiFi_ChaosConfigFailure/network_WiFi_ChaosConfigFailure.py
+++ b/server/site_tests/network_WiFi_ChaosConfigFailure/network_WiFi_ChaosConfigFailure.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/network_WiFi_ChaosConfigSniffer/control b/server/site_tests/network_WiFi_ChaosConfigSniffer/control
index b54f760..260eb7b 100644
--- a/server/site_tests/network_WiFi_ChaosConfigSniffer/control
+++ b/server/site_tests/network_WiFi_ChaosConfigSniffer/control
@@ -2,6 +2,11 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+# TEST IS DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
 from autotest_lib.server import utils
 
 AUTHOR = 'krisr@chromium.org'
@@ -10,6 +15,7 @@
 TEST_CATEGORY = 'Functional'
 TEST_CLASS = 'network'
 TEST_TYPE = 'server'
+PY_VERSION = 3
 
 DOC = """
 Searches for particular SSIDs and generates the config block to be added
diff --git a/server/site_tests/network_WiFi_ChaosConfigSniffer/network_WiFi_ChaosConfigSniffer.py b/server/site_tests/network_WiFi_ChaosConfigSniffer/network_WiFi_ChaosConfigSniffer.py
index 32fe2bc..6297640 100644
--- a/server/site_tests/network_WiFi_ChaosConfigSniffer/network_WiFi_ChaosConfigSniffer.py
+++ b/server/site_tests/network_WiFi_ChaosConfigSniffer/network_WiFi_ChaosConfigSniffer.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.debug b/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.debug
deleted file mode 100644
index db41e27..0000000
--- a/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.debug
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'krisr, wiley, jabele'
-NAME = 'network_WiFi_ChaosConnectDisconnect.debug'
-TIME = 'LONG'
-TEST_TYPE = 'server'
-
-DOC = """
-This script iterates through all of the access points in the AP compatibility
-lab and has a chrome device connect to each in series. This test must be
-performed in the AP compatibility lab.
-"""
-
-from autotest_lib.server.cros.ap_configurators import ap_spec
-from autotest_lib.server.cros.chaos_lib import chaos_runner
-
-def run_chaos_debug(machine):
-    host = hosts.create_host(machine)
-    # Test with these two APs
-    debug_aps = ['chromeos3-row2-rack1-host2', 'chromeos3-row2-rack1-host3']
-    ap_specs = [ap_spec.APSpec(band=ap_spec.BAND_2GHZ, hostnames=debug_aps),
-                ap_spec.APSpec(band=ap_spec.BAND_5GHZ, hostnames=debug_aps)]
-    for spec in ap_specs:
-        runner = chaos_runner.ChaosRunner(
-                'network_WiFi_ChaosConnectDisconnect', host, spec)
-        runner.run(job, batch_size=15, tries=2)
-
-
-parallel_simple(run_chaos_debug, machines)
diff --git a/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.jetstream b/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.jetstream
deleted file mode 100644
index c6887d8..0000000
--- a/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.jetstream
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'krisr, wiley'
-NAME = 'network_WiFi_ChaosConnectDisconnect.jetstream'
-TIME = 'LONG'
-TEST_TYPE = 'server'
-
-DOC = """
-This script iterates through all jetstream access points in the AP compatibility
-lab and has a chrome device connect to each in series. This test must be
-performed in the AP compatibility lab.
-"""
-
-from autotest_lib.server.cros.ap_configurators import ap_spec
-from autotest_lib.server.cros.chaos_lib import chaos_runner
-
-def run_chaos_jetstream(machine):
-    host = hosts.create_host(machine)
-    # Pick up just Jetstream APs on both 2.4 and 5 GHz bands
-    jetstream_ap = ['chromeos3-row2-rack3-host9']
-    ap_specs = [ap_spec.APSpec(channel=5, hostnames=jetstream_ap,
-                               security=ap_spec.SECURITY_TYPE_WPA2PSK),
-                ap_spec.APSpec(channel=48, hostnames=jetstream_ap,
-                               security=ap_spec.SECURITY_TYPE_WPA2PSK)]
-    for spec in ap_specs:
-        runner = chaos_runner.ChaosRunner(
-                'network_WiFi_ChaosConnectDisconnect', host, spec)
-        runner.run(job, tries=10)
-
-
-parallel_simple(run_chaos_jetstream, machines)
-
diff --git a/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.local b/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.local
deleted file mode 100644
index 925d3f9..0000000
--- a/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.local
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'krisr, wiley, jabele'
-NAME = 'network_WiFi_ChaosConnectDisconnect.local'
-TIME = 'LONG'
-TEST_TYPE = 'server'
-
-DOC = """
-This script iterates through all of the access points in the AP compatibility
-lab and has a chrome device connect to each in series. This test must be
-performed in the AP compatibility lab.
-"""
-
-from autotest_lib.server.cros.ap_configurators import ap_spec
-from autotest_lib.server.cros.chaos_lib import chaos_runner
-
-def run_chaos_debug(machine):
-    host = hosts.create_host(machine)
-    # List of specific APs to debug. Please verify the APs indeed support PSK.
-    # Add your APs here.
-    debug_aps = ['chromeos3-row7-rack1-host2',]
-    # IP or DNS name of host to use as a packet capturing device.
-    capturer_hostname = 'chromeos3-row7-rack1-host1.cros'
-    ap_specs = [ap_spec.APSpec(security=ap_spec.SECURITY_TYPE_WPA2PSK,
-                               hostnames=debug_aps,
-                               band=ap_spec.BAND_2GHZ,
-                               lab_ap=False)]
-    for spec in ap_specs:
-        runner = chaos_runner.ChaosRunner(
-                'network_WiFi_ChaosConnectDisconnect', host, spec)
-        runner.run(job, batch_size=2, tries=2,
-                   capturer_hostname=capturer_hostname)
-
-
-parallel_simple(run_chaos_debug, machines)
diff --git a/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.open b/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.open
deleted file mode 100644
index 6fe97cd..0000000
--- a/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.open
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'krisr, wiley, jabele'
-NAME = 'network_WiFi_ChaosConnectDisconnect.open'
-TIME = 'LONG'
-TEST_TYPE = 'server'
-
-DOC = """
-This script iterates through all of the access points in the AP compatibility
-lab and has a chrome device connect to each in series. This test must be
-performed in the AP compatibility lab.
-"""
-
-from autotest_lib.server.cros.ap_configurators import ap_spec
-from autotest_lib.server.cros.chaos_lib import chaos_runner
-
-def run_chaos_open(machine):
-    host = hosts.create_host(machine)
-    # Test with no security on both 2.4 and 5 GHz bands
-    ap_specs = [ap_spec.APSpec(security=ap_spec.SECURITY_TYPE_DISABLED,
-                               band=ap_spec.BAND_2GHZ),
-                ap_spec.APSpec(security=ap_spec.SECURITY_TYPE_DISABLED,
-                               band=ap_spec.BAND_5GHZ)]
-    for spec in ap_specs:
-        runner = chaos_runner.ChaosRunner(
-                'network_WiFi_ChaosConnectDisconnect', host, spec)
-        runner.run(job)
-
-
-parallel_simple(run_chaos_open, machines)
diff --git a/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.open_n b/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.open_n
deleted file mode 100644
index 223f679..0000000
--- a/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.open_n
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'krisr, wiley, jabele'
-NAME = 'network_WiFi_ChaosConnectDisconnect.open_n'
-TIME = 'LONG'
-TEST_TYPE = 'server'
-ATTRIBUTES = 'suite:wifi_interop, suite:android_wifi_interop'
-DEPENDENCIES = 'chaos_nightly, chaos_dut'
-
-DOC = """
-This script iterates through all of the access points in the AP compatibility
-lab and has a chrome device connect to each in series. This test must be
-performed in the AP compatibility lab.
-"""
-
-from autotest_lib.server.cros.ap_configurators import ap_spec
-from autotest_lib.server.cros.chaos_lib import chaos_runner
-
-def run_chaos_open(machine):
-    host = hosts.create_host(machine)
-    # Test with no security, mode N and on both 2.4 and 5 GHz bands
-    ap_specs = [ap_spec.APSpec(mode=ap_spec.MODE_N,
-                               security=ap_spec.SECURITY_TYPE_DISABLED,
-                               band=ap_spec.BAND_2GHZ),
-                ap_spec.APSpec(mode=ap_spec.MODE_N,
-                               security=ap_spec.SECURITY_TYPE_DISABLED,
-                               band=ap_spec.BAND_5GHZ)]
-    for spec in ap_specs:
-        runner = chaos_runner.ChaosRunner(
-                'network_WiFi_ChaosConnectDisconnect', host, spec)
-        runner.run(job)
-
-
-parallel_simple(run_chaos_open, machines)
diff --git a/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.static b/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.static
deleted file mode 100644
index f67b5b7..0000000
--- a/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.static
+++ /dev/null
@@ -1,41 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'tienchang'
-NAME = 'network_WiFi_ChaosConnectDisconnect.static'
-TIME = 'LONG'
-TEST_TYPE = 'server'
-ATTRIBUTES = 'suite:wifi_interop_static'
-DEPENDENCIES = 'casey_dut'
-
-DOC = """
-This script iterates through all of the access points configured as static, both
-open and wpa2psk. This test will run in Casey labs.
-"""
-
-from autotest_lib.server.cros.ap_configurators import ap_spec
-from autotest_lib.server.cros.chaos_lib import static_runner
-
-def run_chaos_static(machine):
-    host = hosts.create_host(machine)
-    # Test all static APs
-    ap_specs = [ap_spec.APSpec(security=ap_spec.SECURITY_TYPE_DISABLED,
-                               band=ap_spec.BAND_2GHZ,
-                               configurator_type=ap_spec.CONFIGURATOR_STATIC),
-                ap_spec.APSpec(security=ap_spec.SECURITY_TYPE_DISABLED,
-                               band=ap_spec.BAND_5GHZ,
-                               configurator_type=ap_spec.CONFIGURATOR_STATIC),
-                ap_spec.APSpec(security=ap_spec.SECURITY_TYPE_WPA2PSK,
-                               band=ap_spec.BAND_2GHZ,
-                               configurator_type=ap_spec.CONFIGURATOR_STATIC),
-                ap_spec.APSpec(security=ap_spec.SECURITY_TYPE_WPA2PSK,
-                               band=ap_spec.BAND_5GHZ,
-                               configurator_type=ap_spec.CONFIGURATOR_STATIC)]
-
-    for spec in ap_specs:
-        runner = static_runner.StaticRunner(
-                'network_WiFi_ChaosConnectDisconnect', host, spec)
-        runner.run(job)
-
-parallel_simple(run_chaos_static, machines)
diff --git a/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.static_2Ghz b/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.static_2Ghz
index 3b2779d..e4b5e6f 100644
--- a/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.static_2Ghz
+++ b/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.static_2Ghz
@@ -9,6 +9,7 @@
 ATTRIBUTES = 'suite:wifi_interop'
 DEPENDENCIES = 'chaos_dut'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 
 DOC = """
 This script iterates through all 2Ghz access points in Chaos chamber.
diff --git a/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.static_5Ghz b/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.static_5Ghz
index a7d79f8..0a48605 100644
--- a/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.static_5Ghz
+++ b/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.static_5Ghz
@@ -9,6 +9,7 @@
 ATTRIBUTES = 'suite:wifi_interop'
 DEPENDENCIES = 'chaos_dut'
 MAX_RESULT_SIZE_KB = 512000
+PY_VERSION = 3
 
 DOC = """
 This script iterates through all 5Ghz access points in Chaos chamber.
diff --git a/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.wpa2psk b/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.wpa2psk
deleted file mode 100644
index 83d740a..0000000
--- a/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.wpa2psk
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'krisr, wiley, jabele'
-NAME = 'network_WiFi_ChaosConnectDisconnect.wpa2psk'
-TIME = 'LONG'
-TEST_TYPE = 'server'
-ATTRIBUTES = ('suite:wifi_interop, suite:android_wifi_interop, '
-              'suite:wifi_interop_wpa2')
-DEPENDENCIES = 'chaos_nightly, chaos_dut'
-
-DOC = """
-This script iterates through all of the access points in the AP compatibility
-lab and has a chrome device connect to each in series. This test must be
-performed in the AP compatibility lab.
-"""
-
-from autotest_lib.server.cros.ap_configurators import ap_spec
-from autotest_lib.server.cros.chaos_lib import chaos_runner
-
-def run_chaos_wpa2psk(machine):
-    host = hosts.create_host(machine)
-    # Test with WPA2PSK on both 2.4 and 5 GHz bands
-    ap_specs = [ap_spec.APSpec(security=ap_spec.SECURITY_TYPE_WPA2PSK,
-                               band=ap_spec.BAND_2GHZ),
-                ap_spec.APSpec(security=ap_spec.SECURITY_TYPE_WPA2PSK,
-                               band=ap_spec.BAND_5GHZ),
-                # Non-US models, which are all static
-                ap_spec.APSpec(security=ap_spec.SECURITY_TYPE_WPA2PSK,
-                               band=ap_spec.BAND_5GHZ,
-                               channel=48,
-                               configurator_type=ap_spec.CONFIGURATOR_STATIC)]
-    for spec in ap_specs:
-        runner = chaos_runner.ChaosRunner(
-                'network_WiFi_ChaosConnectDisconnect', host, spec)
-        runner.run(job)
-
-
-parallel_simple(run_chaos_wpa2psk, machines)
diff --git a/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.wpapsk b/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.wpapsk
deleted file mode 100644
index a5031a7..0000000
--- a/server/site_tests/network_WiFi_ChaosConnectDisconnect/control.wpapsk
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'krisr, wiley, jabele'
-NAME = 'network_WiFi_ChaosConnectDisconnect.wpapsk'
-TIME = 'LONG'
-TEST_TYPE = 'server'
-
-DOC = """
-This script iterates through all of the access points in the AP compatibility
-lab and has a chrome device connect to each in series. This test must be
-performed in the AP compatibility lab.
-"""
-
-from autotest_lib.server.cros.ap_configurators import ap_spec
-from autotest_lib.server.cros.chaos_lib import chaos_runner
-
-def run_chaos_wpapsk(machine):
-    host = hosts.create_host(machine)
-    # Test with WPAPSK on both 2.4 and 5 GHz bands
-    ap_specs = [ap_spec.APSpec(security=ap_spec.SECURITY_TYPE_WPAPSK,
-                               band=ap_spec.BAND_2GHZ),
-                ap_spec.APSpec(security=ap_spec.SECURITY_TYPE_WPAPSK,
-                               band=ap_spec.BAND_5GHZ),
-                # Non-US models, which are all static.
-                ap_spec.APSpec(security=ap_spec.SECURITY_TYPE_WPAPSK,
-                               band=ap_spec.BAND_5GHZ,
-                               channel=48,
-                               configurator_type=ap_spec.CONFIGURATOR_STATIC)]
-    for spec in ap_specs:
-        runner = chaos_runner.ChaosRunner(
-                'network_WiFi_ChaosConnectDisconnect', host, spec)
-        runner.run(job)
-
-
-parallel_simple(run_chaos_wpapsk, machines)
diff --git a/server/site_tests/network_WiFi_ChaosConnectDisconnect/network_WiFi_ChaosConnectDisconnect.py b/server/site_tests/network_WiFi_ChaosConnectDisconnect/network_WiFi_ChaosConnectDisconnect.py
index 7543aab..cea0d5e 100644
--- a/server/site_tests/network_WiFi_ChaosConnectDisconnect/network_WiFi_ChaosConnectDisconnect.py
+++ b/server/site_tests/network_WiFi_ChaosConnectDisconnect/network_WiFi_ChaosConnectDisconnect.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/network_WiFi_ChaosLongConnect/control.debug b/server/site_tests/network_WiFi_ChaosLongConnect/control.debug
index d6f128d..dd3da47 100644
--- a/server/site_tests/network_WiFi_ChaosLongConnect/control.debug
+++ b/server/site_tests/network_WiFi_ChaosLongConnect/control.debug
@@ -2,10 +2,16 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+# TEST IS DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
 AUTHOR = 'asnagarajan, wiley, jabele'
 NAME = 'network_WiFi_ChaosLongConnect.debug'
 TIME = 'LONG'
 TEST_TYPE = 'server'
+PY_VERSION = 3
 
 DOC = """
 This script iterates through all of the access points in the AP compatibility
diff --git a/server/site_tests/network_WiFi_ChaosLongConnect/control.long b/server/site_tests/network_WiFi_ChaosLongConnect/control.long
index 6928acf..247ac55 100644
--- a/server/site_tests/network_WiFi_ChaosLongConnect/control.long
+++ b/server/site_tests/network_WiFi_ChaosLongConnect/control.long
@@ -2,10 +2,16 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+# TEST IS DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
 AUTHOR = 'asnagarajan, wiley, jabele'
 NAME = 'network_WiFi_ChaosLongConnect.long'
 TIME = 'LONG'
 TEST_TYPE = 'server'
+PY_VERSION = 3
 
 DOC = """
 This script iterates through all of the access points in the AP compatibility
diff --git a/server/site_tests/network_WiFi_ChaosLongConnect/control.netperf_udp_downstream b/server/site_tests/network_WiFi_ChaosLongConnect/control.netperf_udp_downstream
index 373a33c..bed6c23 100644
--- a/server/site_tests/network_WiFi_ChaosLongConnect/control.netperf_udp_downstream
+++ b/server/site_tests/network_WiFi_ChaosLongConnect/control.netperf_udp_downstream
@@ -2,10 +2,16 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+# TEST IS DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
 AUTHOR = 'asnagarajan, wiley, jabele'
 NAME = 'network_WiFi_ChaosLongConnect.netperf_udp_downstream'
 TIME = 'LONG'
 TEST_TYPE = 'server'
+PY_VERSION = 3
 
 DOC = """
 This script iterates through all of the access points in the AP compatibility
diff --git a/server/site_tests/network_WiFi_ChaosLongConnect/control.netperf_udp_upstream b/server/site_tests/network_WiFi_ChaosLongConnect/control.netperf_udp_upstream
index d12f5a3..63429a4 100644
--- a/server/site_tests/network_WiFi_ChaosLongConnect/control.netperf_udp_upstream
+++ b/server/site_tests/network_WiFi_ChaosLongConnect/control.netperf_udp_upstream
@@ -2,10 +2,16 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+# TEST IS DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
 AUTHOR = 'asnagarajan, wiley, jabele'
 NAME = 'network_WiFi_ChaosLongConnect.netperf_udp_upstream'
 TIME = 'LONG'
 TEST_TYPE = 'server'
+PY_VERSION = 3
 
 DOC = """
 This script iterates through all of the access points in the AP compatibility
diff --git a/server/site_tests/network_WiFi_ChaosLongConnect/control.suspend b/server/site_tests/network_WiFi_ChaosLongConnect/control.suspend
index 8992e76..0d65140 100644
--- a/server/site_tests/network_WiFi_ChaosLongConnect/control.suspend
+++ b/server/site_tests/network_WiFi_ChaosLongConnect/control.suspend
@@ -2,10 +2,16 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+# TEST IS DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
 AUTHOR = 'asnagarajan, wiley, jabele'
 NAME = 'network_WiFi_ChaosLongConnect.suspend'
 TIME = 'LONG'
 TEST_TYPE = 'server'
+PY_VERSION = 3
 
 DOC = """
 This script iterates through all of the access points in the AP compatibility
diff --git a/server/site_tests/network_WiFi_ChaosLongConnect/network_WiFi_ChaosLongConnect.py b/server/site_tests/network_WiFi_ChaosLongConnect/network_WiFi_ChaosLongConnect.py
index ef759fe..5949610 100644
--- a/server/site_tests/network_WiFi_ChaosLongConnect/network_WiFi_ChaosLongConnect.py
+++ b/server/site_tests/network_WiFi_ChaosLongConnect/network_WiFi_ChaosLongConnect.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/network_WiFi_ChromeEndToEnd/control b/server/site_tests/network_WiFi_ChromeEndToEnd/control
index 51f8f79..1688ef6 100644
--- a/server/site_tests/network_WiFi_ChromeEndToEnd/control
+++ b/server/site_tests/network_WiFi_ChromeEndToEnd/control
@@ -7,6 +7,7 @@
 NAME = 'network_WiFi_ChromeEndToEnd'
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
+PY_VERSION = 3
 
 DOC = """
 WiFi_ChromeEndToEnd test configures two APs and runs the client side
diff --git a/server/site_tests/network_WiFi_ChromeEndToEnd/control.autoconnectWiFi b/server/site_tests/network_WiFi_ChromeEndToEnd/control.autoconnectWiFi
index c02c436..2557cd4 100644
--- a/server/site_tests/network_WiFi_ChromeEndToEnd/control.autoconnectWiFi
+++ b/server/site_tests/network_WiFi_ChromeEndToEnd/control.autoconnectWiFi
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = ('suite:wifi_endtoend, suite:wifi_release')
 DEPENDENCIES = 'wificell'
+PY_VERSION = 3
 
 DOC = """
 Configures wifi APs and tests that the DUT autoconnects to a previously
diff --git a/server/site_tests/network_WiFi_ChromeEndToEnd/control.enableDisableWiFi b/server/site_tests/network_WiFi_ChromeEndToEnd/control.enableDisableWiFi
index 9933bec..7ae68af 100644
--- a/server/site_tests/network_WiFi_ChromeEndToEnd/control.enableDisableWiFi
+++ b/server/site_tests/network_WiFi_ChromeEndToEnd/control.enableDisableWiFi
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = ('suite:wifi_endtoend, suite:wifi_release')
 DEPENDENCIES = 'wificell'
+PY_VERSION = 3
 
 DOC = """
 WiFi_ChromeEndToEnd test configures two APs and runs the client side
diff --git a/server/site_tests/network_WiFi_ChromeEndToEnd/control.findVerifyWiFiNetworks b/server/site_tests/network_WiFi_ChromeEndToEnd/control.findVerifyWiFiNetworks
index be1c583..e3d1d8a 100644
--- a/server/site_tests/network_WiFi_ChromeEndToEnd/control.findVerifyWiFiNetworks
+++ b/server/site_tests/network_WiFi_ChromeEndToEnd/control.findVerifyWiFiNetworks
@@ -6,8 +6,9 @@
 TIME = 'SHORT'
 NAME = 'network_WiFi_ChromeEndToEnd.findVerifyWiFiNetworks'
 TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_endtoend, suite:wifi_release')
+ATTRIBUTES = ('suite:wifi_endtoend, suite:wifi_release, suite:infra_qual_wifi')
 DEPENDENCIES = 'wificell'
+PY_VERSION = 3
 
 DOC = """
 WiFi_ChromeEndToEnd test configures two APs and runs the client side
diff --git a/server/site_tests/network_WiFi_ChromeEndToEnd/control.transitionWiFiNetworks b/server/site_tests/network_WiFi_ChromeEndToEnd/control.transitionWiFiNetworks
index c4914e1..a2b2a5e 100644
--- a/server/site_tests/network_WiFi_ChromeEndToEnd/control.transitionWiFiNetworks
+++ b/server/site_tests/network_WiFi_ChromeEndToEnd/control.transitionWiFiNetworks
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = ('suite:wifi_endtoend, suite:wifi_release')
 DEPENDENCIES = 'wificell'
+PY_VERSION = 3
 
 DOC = """
 WiFi_ChromeEndToEnd test configures two APs and runs the client side
diff --git a/server/site_tests/network_WiFi_ChromeEndToEnd/network_WiFi_ChromeEndToEnd.py b/server/site_tests/network_WiFi_ChromeEndToEnd/network_WiFi_ChromeEndToEnd.py
index 9b07556..f0ff4d9 100644
--- a/server/site_tests/network_WiFi_ChromeEndToEnd/network_WiFi_ChromeEndToEnd.py
+++ b/server/site_tests/network_WiFi_ChromeEndToEnd/network_WiFi_ChromeEndToEnd.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/network_WiFi_CliqueConnectDisconnect/control.debug b/server/site_tests/network_WiFi_CliqueConnectDisconnect/control.debug
deleted file mode 100644
index 09305f8..0000000
--- a/server/site_tests/network_WiFi_CliqueConnectDisconnect/control.debug
+++ /dev/null
@@ -1,48 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rpius'
-NAME = 'network_WiFi_CliqueConnectDisconnect.debug'
-TIME = 'SHORT'
-TEST_TYPE = 'server'
-
-DOC = """
-This test makes 2 sets of DUTs repeatedly (2 runs) connect/disconnect
-simultaneously to 2 APs.
-"""
-
-from autotest_lib.server.cros.ap_configurators import ap_spec
-from autotest_lib.server.cros.clique_lib import clique_dut_locker
-from autotest_lib.server.cros.clique_lib import clique_runner
-
-def run_clique_debug(machine):
-    host = hosts.create_host(machine)
-
-    # Test with these two APs
-    debug_aps = ['chromeos3-row2-rack1-host3', 'chromeos3-row2-rack1-host7']
-    ap_specs = [ap_spec.APSpec(security=ap_spec.SECURITY_TYPE_WPA2PSK,
-                               band=ap_spec.BAND_2GHZ,
-                               hostnames=debug_aps),
-                ap_spec.APSpec(security=ap_spec.SECURITY_TYPE_WPA2PSK,
-                               band=ap_spec.BAND_5GHZ,
-                               hostnames=debug_aps)]
-
-    # Test with 2 DUT's in a set
-    dut_pool_spec = clique_dut_locker.DUTPoolSpec()
-    dut_set_spec1 = clique_dut_locker.DUTSetSpec()
-    dut_set_spec2 = clique_dut_locker.DUTSetSpec()
-    dut_spec1 = clique_dut_locker.DUTSpec(
-            host_name='chromeos1-row1-rack3-host4.cros')
-    dut_spec2 = clique_dut_locker.DUTSpec(
-            host_name='chromeos1-row1-rack10-host2.cros')
-    dut_set_spec1.append(dut_spec1)
-    dut_set_spec2.append(dut_spec2)
-    dut_pool_spec.extend([dut_set_spec1, dut_set_spec2])
-
-    runner = clique_runner.CliqueRunner(
-            'network_WiFi_CliqueConnectDisconnect', dut_pool_spec, ap_specs)
-    runner.run(job, tries=2)
-
-
-parallel_simple(run_clique_debug, machines)
diff --git a/server/site_tests/network_WiFi_CliqueConnectDisconnect/network_WiFi_CliqueConnectDisconnect.py b/server/site_tests/network_WiFi_CliqueConnectDisconnect/network_WiFi_CliqueConnectDisconnect.py
deleted file mode 100644
index 1736dd0..0000000
--- a/server/site_tests/network_WiFi_CliqueConnectDisconnect/network_WiFi_CliqueConnectDisconnect.py
+++ /dev/null
@@ -1,62 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-import logging
-
-import common
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import test
-from autotest_lib.server.cros.clique_lib import clique_dut_control
-
-
-class network_WiFi_CliqueConnectDisconnect(test.test):
-    """ Dynamic Clique test to connect and disconnect to an AP. """
-
-    version = 1
-
-
-    def run_once(self, capturer, capturer_frequency, capturer_ht_type,
-                 dut_pool, assoc_params_list, tries, debug_info,
-                 conn_workers):
-        """ Main entry function for autotest.
-
-        @param capturer: a packet capture device
-        @param capturer_frequency: integer channel frequency in MHz.
-        @param capturer_ht_type: string specifier of channel HT type.
-        @param dut_pool: the DUT pool to be used for the test. It is a 2D list
-                         of DUTObjects.
-        @param assoc_params_list: a list of AssociationParameters objects.
-        @param tries: an integer, number of connection attempts.
-        @param debug_info: a string of additional info to display on failure
-        @param conn_workers: List of ConnectionWorkerAbstract objects, to
-                             run extra work after successful connection.
-        """
-        # We need 2 sets in the pool for this test.
-        if len(dut_pool) != 2:
-            raise error.TestFail("Incorrect DUT pool configuration.")
-        # We need 2 AP's in the pool for this test.
-        if len(assoc_params_list) != 2:
-            raise error.TestFail("Incorrect AP pool configuration.")
-        # No connection workers expected for this test.
-        if conn_workers:
-            raise error.TestFail("Incorrect connection worker configuration.")
-        else:
-            conn_workers = [None, None]
-
-        # Both DUT sets are performing connect/disconnect.
-        dut_role_classes = [clique_dut_control.DUTRoleConnectDisconnect,
-                            clique_dut_control.DUTRoleConnectDisconnect]
-
-        test_params = { 'capturer': capturer,
-                        'capturer_frequency': capturer_frequency,
-                        'capturer_ht_type': capturer_ht_type,
-                        'debug_info': debug_info }
-        error_results = clique_dut_control.execute_dut_pool(
-                dut_pool, dut_role_classes, assoc_params_list, conn_workers,
-                test_params)
-        if error_results:
-            logging.debug('Debug info: %s', debug_info)
-            raise error.TestFail("Failed test. Error Results: %s" %
-                                 str(error_results))
diff --git a/server/site_tests/network_WiFi_CliqueLongConnect/control.debug b/server/site_tests/network_WiFi_CliqueLongConnect/control.debug
deleted file mode 100644
index 032ea77..0000000
--- a/server/site_tests/network_WiFi_CliqueLongConnect/control.debug
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rpius'
-NAME = 'network_WiFi_CliqueLongConnect.debug'
-TIME = 'SHORT'
-TEST_TYPE = 'server'
-
-DOC = """
-This test makes 2 sets of DUTs repeatedly (2 runs) connect and maintain the
-connection alive for 30 seconds simultaneously to 2 different APs.
-"""
-
-from autotest_lib.server.cros.ap_configurators import ap_spec
-from autotest_lib.server.cros.clique_lib import clique_dut_locker
-from autotest_lib.server.cros.clique_lib import clique_runner
-
-def run_clique_debug(machine):
-    host = hosts.create_host(machine)
-
-    # Test with these two APs
-    debug_aps = ['chromeos3-row2-rack1-host3', 'chromeos3-row2-rack1-host7']
-    ap_specs = [ap_spec.APSpec(security=ap_spec.SECURITY_TYPE_WPA2PSK,
-                               band=ap_spec.BAND_2GHZ,
-                               hostnames=debug_aps),
-                ap_spec.APSpec(security=ap_spec.SECURITY_TYPE_WPA2PSK,
-                               band=ap_spec.BAND_5GHZ,
-                               hostnames=debug_aps)]
-
-    # Test with 2 DUT's in a set
-    dut_pool_spec = clique_dut_locker.DUTPoolSpec()
-    dut_set_spec1 = clique_dut_locker.DUTSetSpec()
-    dut_set_spec2 = clique_dut_locker.DUTSetSpec()
-    dut_spec1 = clique_dut_locker.DUTSpec(
-            host_name='chromeos1-row1-rack3-host4.cros')
-    dut_spec2 = clique_dut_locker.DUTSpec(
-            host_name='chromeos1-row1-rack10-host2.cros')
-    dut_set_spec1.append(dut_spec1)
-    dut_set_spec2.append(dut_spec2)
-    dut_pool_spec.extend([dut_set_spec1, dut_set_spec2])
-
-    # Let's accquire 2 random connection worker devices.
-    conn_worker_hostnames = [None, None]
-
-    runner = clique_runner.CliqueRunner(
-            'network_WiFi_CliqueLongConnect', dut_pool_spec, ap_specs)
-    runner.run(job, tries=2, conn_worker_hostnames=conn_worker_hostnames)
-
-
-parallel_simple(run_clique_debug, machines)
diff --git a/server/site_tests/network_WiFi_CliqueLongConnect/network_WiFi_CliqueLongConnect.py b/server/site_tests/network_WiFi_CliqueLongConnect/network_WiFi_CliqueLongConnect.py
deleted file mode 100644
index 05d8e40..0000000
--- a/server/site_tests/network_WiFi_CliqueLongConnect/network_WiFi_CliqueLongConnect.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-import logging
-
-import common
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import test
-from autotest_lib.server.cros.clique_lib import clique_dut_control
-
-
-class network_WiFi_CliqueLongConnect(test.test):
-    """ Dynamic Clique test to connect and disconnect to an AP. """
-
-    version = 1
-
-
-    def run_once(self, capturer, capturer_frequency, capturer_ht_type,
-                 dut_pool, assoc_params_list, tries, debug_info,
-                 conn_workers):
-        """ Main entry function for autotest.
-
-        @param capturer: a packet capture device
-        @param capturer_frequency: integer channel frequency in MHz.
-        @param capturer_ht_type: object specifier of channel HT type.
-        @param dut_pool: the DUT pool to be used for the test. It is a 2D list
-                         of DUTObjects.
-        @param assoc_params_list: a list of AssociationParameters objects.
-        @param tries: an integer, number of connection attempts.
-        @param debug_info: a string of additional info to display on failure
-        @param conn_workers: List of ConnectionWorkerAbstract objects, to
-                             run extra work after successful connection.
-        """
-        # We need 2 sets in the pool for this test.
-        if len(dut_pool) != 2:
-            raise error.TestFail("Incorrect DUT pool configuration.")
-        # We need 2 AP's in the pool for this test.
-        if len(assoc_params_list) != 2:
-            raise error.TestFail("Incorrect AP pool configuration.")
-        # We need 2 connection workers in the pool for this test.
-        if len(conn_workers) != 2:
-            raise error.TestFail("Incorrect connection worker configuration.")
-
-        # Both DUT sets are performing long connects.
-        dut_role_classes = [clique_dut_control.DUTRoleConnectDuration,
-                            clique_dut_control.DUTRoleConnectDuration]
-
-        test_params = { 'capturer': capturer,
-                        'capturer_frequency': capturer_frequency,
-                        'capturer_ht_type': capturer_ht_type,
-                        'debug_info': debug_info }
-        error_results = clique_dut_control.execute_dut_pool(
-                dut_pool, dut_role_classes, assoc_params_list, conn_workers,
-                test_params)
-        if error_results:
-            logging.debug('Debug info: %s', debug_info)
-            raise error.TestFail("Failed test. Error Results: %s" %
-                                 str(error_results))
diff --git a/server/site_tests/network_WiFi_ConnectionIdentifier/control b/server/site_tests/network_WiFi_ConnectionIdentifier/control
deleted file mode 100644
index 9b618d9..0000000
--- a/server/site_tests/network_WiFi_ConnectionIdentifier/control
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'zqiu, wiley, pstew, quiche'
-NAME = 'network_WiFi_ConnectionIdentifier'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wificell-cq')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test is designed to verify the correctness of connection identifier
-generated by shill, in which connection identifier should be the same when
-connecting to the same network, and different when connecting to different
-network.
-"""
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_ConnectionIdentifier',
-                 host=host,
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_ConnectionIdentifier/network_WiFi_ConnectionIdentifier.py b/server/site_tests/network_WiFi_ConnectionIdentifier/network_WiFi_ConnectionIdentifier.py
deleted file mode 100644
index 2353096..0000000
--- a/server/site_tests/network_WiFi_ConnectionIdentifier/network_WiFi_ConnectionIdentifier.py
+++ /dev/null
@@ -1,101 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class network_WiFi_ConnectionIdentifier(wifi_cell_test_base.WiFiCellTestBase):
-    """Test for verifying connection identifier."""
-    version = 1
-
-    CONNECTION_ID_TIMEOUT_SECS = 10
-    SERVICE_PROPERTY_CONNECTION_ID = 'ConnectionId'
-
-    def _attempt_get_service_id(self, ssid):
-        properties = self.context.client.shill.get_service_properties(ssid)
-        logging.debug('Service properties are: %s', properties)
-        self._connection_id = properties[self.SERVICE_PROPERTY_CONNECTION_ID]
-        return self._connection_id != 0
-
-
-    def _get_service_connection_id(self, ssid):
-        """Get the connection ID for a service.
-
-        Polls a service's properties until ConnectionId becomes non-zero,
-        or a timeout occurs.
-
-        @param ssid: SSID of the service of interest.
-        @raise TestError if a timeout occurs.
-        @return ConnectionId of the current service.
-        """
-        utils.poll_for_condition(
-                condition=lambda: self._attempt_get_service_id(ssid),
-                exception=error.TestFail('ConnectionId remained zero'),
-                timeout=self.CONNECTION_ID_TIMEOUT_SECS,
-                sleep_interval=1)
-        return self._connection_id
-
-
-    def _connect(self, ssid, expected_connection_id=None):
-        """Connect to an AP, and verify connection ID if it is specified.
-
-        @param ssid: SSID of the AP.
-        @param expected_connection_id: Expected connection ID.
-        @return ConnectionId of the new connection.
-        """
-        client_conf = xmlrpc_datatypes.AssociationParameters(ssid)
-        self.context.assert_connect_wifi(client_conf)
-        connection_id = self._get_service_connection_id(ssid)
-        if (expected_connection_id is not None and
-                expected_connection_id != connection_id):
-            raise error.TestFail(
-              'Expected connection ID %s, but got %s' % (
-                expected_connection_id, connection_id))
-        return connection_id
-
-
-    def run_once(self):
-        """Test to verify connection id, which depends only on the network
-        (gateway) that the AP is connected to."""
-
-        # Configure two APs which will be automatically assigned different
-        # SSIDs. Each AP instance is connected to specific gateway.
-        router_conf = hostap_config.HostapConfig(channel=6);
-        self.context.configure(router_conf)
-        self.context.configure(router_conf, multi_interface=True)
-        ssid0 = self.context.router.get_ssid(instance=0)
-        ssid1 = self.context.router.get_ssid(instance=1)
-
-        # Connect to both APs and save the connection ID for both connections.
-        # Verify the connection ID is different for the two connections.
-        connection_id0 = self._connect(ssid0)
-        connection_id1 = self._connect(ssid1)
-        if connection_id0 == connection_id1:
-            raise error.TestFail('Connection ID should be different for two '
-                                 'different networks')
-        self.context.router.deconfig()
-
-        # Reconfigure the router with different SSIDs, and verify the
-        # connection ID sticks with AP instance regardless of the SSID.
-        self.context.configure(router_conf)
-        self.context.configure(router_conf, multi_interface=True)
-
-        # Verify SSID is different
-        if (self.context.router.get_ssid(instance=0) == ssid0 or
-            self.context.router.get_ssid(instance=1) == ssid1):
-            raise error.TestError('SSID should different from previous '
-                                  'configuration')
-
-        # Connect and verify connection ID stays the same for the same
-        # AP instance.
-        self._connect(self.context.router.get_ssid(instance=0),
-                      expected_connection_id=connection_id0)
-        self._connect(self.context.router.get_ssid(instance=1),
-                      expected_connection_id=connection_id1)
diff --git a/server/site_tests/network_WiFi_DTIMPeriod/control.wifi_DTIM_period b/server/site_tests/network_WiFi_DTIMPeriod/control.wifi_DTIM_period
deleted file mode 100644
index 0e3c87f..0000000
--- a/server/site_tests/network_WiFi_DTIMPeriod/control.wifi_DTIM_period
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_DTIMPeriod.wifi_DTIM_period'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ''
-DEPENDENCIES = 'wificell'
-
-DOC = """
-Purpose: This test verifies that the DTIM period set on the AP was
-successfully adopted by the DUT.
-"""
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_DTIMPeriod',
-                 host=host,
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_DTIMPeriod/network_WiFi_DTIMPeriod.py b/server/site_tests/network_WiFi_DTIMPeriod/network_WiFi_DTIMPeriod.py
deleted file mode 100644
index a2f0793..0000000
--- a/server/site_tests/network_WiFi_DTIMPeriod/network_WiFi_DTIMPeriod.py
+++ /dev/null
@@ -1,50 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib.cros.network import iw_runner
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class network_WiFi_DTIMPeriod(wifi_cell_test_base.WiFiCellTestBase):
-    """Test that we understand the routers negotiated DTIM period."""
-    version = 1
-
-
-    def run_once(self):
-        """DTIM period test.
-
-        DTIM stands for delivery traffic information message and refers to
-        the number of beacons between DTIMS.  For instance, a DTIM period
-        of 1 would indicate that every beacon should have a DTIM element.
-        The default DTIM period value is 2.
-
-        This flag is used in combination with powersave mode as follows:
-        1) A client goes into powersave mode and notifies the router.
-        2) While in powersave mode, the client turns off as much as possible;
-           the AP is supposed to buffer unicast traffic.
-        3) The client wakes up to receive beacons, which may include a DTIM
-           notification.
-        4) On receiving such a notification, the client should
-           stay up to recieve the pending frames.
-
-        """
-        dtim_val = 5
-        configuration = hostap_config.HostapConfig(
-                frequency=2437,
-                mode=hostap_config.HostapConfig.MODE_11G,
-                dtim_period=dtim_val)
-        self.context.configure(configuration)
-        assoc_params = xmlrpc_datatypes.AssociationParameters()
-        assoc_params.ssid = self.context.router.get_ssid()
-        self.context.client.powersave_switch(True)
-        self.context.assert_connect_wifi(assoc_params)
-        self.context.client.check_iw_link_value(
-                iw_runner.IW_LINK_KEY_DTIM_PERIOD,
-                dtim_val)
-        self.context.assert_ping_from_dut()
-        self.context.client.shill.disconnect(assoc_params.ssid)
-        self.context.client.powersave_switch(False)
-        self.context.router.deconfig()
diff --git a/server/site_tests/network_WiFi_DarkResumeActiveScans/control b/server/site_tests/network_WiFi_DarkResumeActiveScans/control
deleted file mode 100644
index 5ba4121..0000000
--- a/server/site_tests/network_WiFi_DarkResumeActiveScans/control
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = 'samueltan, ejcaruso'
-NAME = 'network_WiFi_DarkResumeActiveScans'
-TIME = 'MEDIUM'
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell, servo_state:WORKING, lucidsleep'
-ATTRIBUTES = 'suite:wifi_lucidsleep'
-
-DOC = """
-When wake on WiFi is supported and enabled, our system could wake up from
-suspend in dark resume for several reasons. Due to privacy concerns, we do not
-want active scans to be launched on certain types of dark resume wakes where the
-user does not expect information that can uniquely identify their system
-(e.g. MAC addresses) to be broadcasted.
-
-We permit active scans to be launched when the system wakes up due to a SSID
-match or disconnect, since we might try to connect or reconnect to known
-networks during these periods, and the connection/authentication process
-inevitably broadcasts MAC addresses.
-
-However, when the system wakes up due to a RTC timer (e.g. the periodic scan
-timer or DHCP lease renewal timer) or a packet pattern match, we expect the
-system to carry out its tasks and re-suspend without active scanning and
-broadcasting MAC addresses.
-
-This test attempts to verify that no active scans are started in dark resumes
-that are triggered by RTC timers or packet pattern matches. Verify this by
-triggering these wake events and analyzing packet captures to ensure that the
-DUT does not launch any probe requests during these dark resumes.
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test('network_WiFi_DarkResumeActiveScans',
-                 host=host,
-                 raw_cmdline_args=args)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_DarkResumeActiveScans/network_WiFi_DarkResumeActiveScans.py b/server/site_tests/network_WiFi_DarkResumeActiveScans/network_WiFi_DarkResumeActiveScans.py
deleted file mode 100644
index a46b94b..0000000
--- a/server/site_tests/network_WiFi_DarkResumeActiveScans/network_WiFi_DarkResumeActiveScans.py
+++ /dev/null
@@ -1,134 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import contextlib
-import logging
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import tcpdump_analyzer
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import lucid_sleep_test_base
-from autotest_lib.server.cros.network import wifi_client
-
-class network_WiFi_DarkResumeActiveScans(
-        lucid_sleep_test_base.LucidSleepTestBase):
-    """
-    Test that no active scans are launched when the system wakes on dark resumes
-    triggered by RTC timers and wake on pattern.
-    """
-
-    version = 1
-
-    def stop_capture_and_check_for_probe_requests(self, mac):
-        """
-        Stop packet capture and check that no probe requests launched by the DUT
-        with MAC address |mac| are found in the packet capture.  Fails test if
-        any probe request frames are found.
-
-        @param mac: MAC address of the DUT.
-        """
-        logging.info('Stopping packet capture')
-        results = self.context.capture_host.stop_capture()
-        if len(results) != 1:
-            raise error.TestError('Expected to generate one packet '
-                                  'capture but got %d captures instead.' %
-                                  len(results))
-
-        logging.info('Analyzing packet capture...')
-        probe_req_pcap_filter = '%s and wlan.sa==%s' % (
-                tcpdump_analyzer.WLAN_PROBE_REQ_ACCEPTOR, mac)
-        # Get all the frames in chronological order.
-        frames = tcpdump_analyzer.get_frames(results[0].local_pcap_path,
-                probe_req_pcap_filter, reject_bad_fcs=False)
-        if len(frames) > 0:
-            raise error.TestFail('Packet capture contained probe requests!')
-
-        logging.info('Packet capture contained no probe requests')
-
-
-    def run_once(self):
-        """Body of the test."""
-        ap_config = hostap_config.HostapConfig(channel=1)
-        self.configure_and_connect_to_ap(ap_config)
-        self.context.assert_ping_from_dut()
-
-        client = self.context.client
-        router = self.context.router
-        dut_mac = client.wifi_mac
-        dut_ip = client.wifi_ip
-        prev_num_dark_resumes = 0
-
-        logging.info('DUT WiFi MAC = %s, IPv4 = %s', dut_mac, dut_ip)
-        logging.info('Router WiFi IPv4 = %s', router.wifi_ip)
-
-        # Trigger a wake on packet dark resume, and make sure that no probe
-        # requests were launched during this dark resume.
-        with client.wake_on_wifi_features(wifi_client.WAKE_ON_WIFI_PACKET):
-            logging.info('Set up WoWLAN')
-
-            # Wake on packets from the router.
-            client.add_wake_packet_source(self.context.router.wifi_ip)
-
-            with self.dr_utils.suspend():
-                time.sleep(wifi_client.SUSPEND_WAIT_TIME_SECONDS)
-
-                # Start capture after suspend concludes in case probe requests
-                # are launched on the way to suspend.
-                self.context.capture_host.start_capture(
-                        ap_config.frequency,
-                        width_type=ap_config.packet_capture_mode)
-
-                # Send the DUT a packet from the router to wake it up.
-                router.send_magic_packet(dut_ip, dut_mac)
-
-                # Wait for the DUT to wake up in dark resume and suspend again.
-                time.sleep(wifi_client.RECEIVE_PACKET_WAIT_TIME_SECONDS +
-                           wifi_client.DARK_RESUME_WAIT_TIME_SECONDS)
-
-                # Check for packet capture before waking the DUT with
-                # |count_dark_resumes| because probe requests might be launched
-                # during the wake.
-                self.stop_capture_and_check_for_probe_requests(mac=dut_mac)
-
-                prev_num_dark_resumes = self.dr_utils.count_dark_resumes()
-                if prev_num_dark_resumes < 1:
-                    raise error.TestFail('Client failed to wake on packet.')
-                logging.info('Client woke up on packet successfully.')
-
-        # Trigger a wake to scan RTC timer dark resume, and make sure that no
-        # probe requests were launched during this dark resume.
-        with contextlib.nested(
-                client.wake_on_wifi_features(
-                        wifi_client.WAKE_ON_WIFI_DARKCONNECT),
-                client.wake_to_scan_period_seconds(
-                        wifi_client.WAKE_TO_SCAN_PERIOD_SECONDS),
-                client.force_wake_to_scan_timer(True)):
-
-            # Bring the AP down so the DUT suspends disconnected.
-            router.deconfig_aps()
-            time.sleep(wifi_client.DISCONNECT_WAIT_TIME_SECONDS)
-
-            with self.dr_utils.suspend():
-                time.sleep(wifi_client.SUSPEND_WAIT_TIME_SECONDS)
-
-                # Start capture after suspend concludes in case probe requests
-                # are launched on the way to suspend.
-                self.context.capture_host.start_capture(
-                        ap_config.frequency,
-                        width_type=ap_config.packet_capture_mode)
-
-                # Wait for the DUT to wake to scan and suspend again.
-                time.sleep(wifi_client.WAKE_TO_SCAN_PERIOD_SECONDS +
-                           wifi_client.DARK_RESUME_WAIT_TIME_SECONDS)
-
-                # Check for packet capture before waking the DUT with
-                # |count_dark_resumes| because probe requests might be launched
-                # during the wake.
-                self.stop_capture_and_check_for_probe_requests(mac=dut_mac)
-
-                if (self.dr_utils.count_dark_resumes() -
-                    prev_num_dark_resumes) < 1:
-                    raise error.TestFail('Client failed to wake up to scan.')
-                logging.info('Client woke up to scan successfully.')
diff --git a/server/site_tests/network_WiFi_DisableEnable/control b/server/site_tests/network_WiFi_DisableEnable/control
deleted file mode 100644
index 254ccc6..0000000
--- a/server/site_tests/network_WiFi_DisableEnable/control
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'pstew, wiley, quiche'
-NAME = 'network_WiFi_DisableEnable'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ''
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test attempts to verify that we can disable and enable the WiFi
-device and it will re-connect to the AP it was previously connected to.
-"""
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_DisableEnable', host=host, raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_DisableEnable/network_WiFi_DisableEnable.py b/server/site_tests/network_WiFi_DisableEnable/network_WiFi_DisableEnable.py
deleted file mode 100644
index 572d71d..0000000
--- a/server/site_tests/network_WiFi_DisableEnable/network_WiFi_DisableEnable.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class network_WiFi_DisableEnable(wifi_cell_test_base.WiFiCellTestBase):
-    """Tests that disabling an enabling WiFi re-connects the system.
-
-    This test run seeks to associate the DUT with an AP, then toggle
-    the "enable" flag on the WiFi device.  This should disconnect and
-    reconnect the device.
-
-    """
-
-    version = 1
-
-    def run_once(self):
-        """Test body."""
-        # Configure the AP.
-        frequency = 2412
-        self.context.configure(hostap_config.HostapConfig(frequency=frequency))
-        router_ssid = self.context.router.get_ssid()
-
-        # Connect to the AP.
-        self.context.assert_connect_wifi(
-                xmlrpc_datatypes.AssociationParameters(ssid=router_ssid))
-
-        # Disable the interface only long enough that we're sure we have
-        # disconnected.
-        interface = self.context.client.wifi_if
-        client = self.context.client
-        with InterfaceDisableContext(client, interface):
-            success, state, elapsed_seconds = client.wait_for_service_states(
-                    router_ssid, ( 'idle', ), 3)
-            # We should either be in the 'idle' state or not even know about
-            # this service state anymore.  The latter is more likely since
-            # the AP's service should lose visibility when the device is
-            # disabled.
-            if not success and state != 'unknown':
-                raise error.TestFail(
-                        'Failed to disconnect from "%s" after interface was '
-                        'disabled for %f seconds (state=%s)' %
-                        (router_ssid, elapsed_seconds, state))
-
-        # Expect that the DUT will re-connect to the AP.
-        self.context.wait_for_connection(router_ssid, frequency);
-        self.context.router.deconfig()
-
-
-class InterfaceDisableContext(object):
-    """Context that encapsulates disabling of a device.
-
-    This context ensures that if the test fails while the device is disabled
-    we will attempt to re-enable it before our test exits.
-
-    """
-
-    def __init__(self, client, interface):
-        self._client = client
-        self._interface = interface
-
-
-    def __enter__(self):
-        self._client.set_device_enabled(self._interface, False)
-
-
-    def __exit__(self, exception, value, traceback):
-        self._client.set_device_enabled(self._interface, True)
diff --git a/server/site_tests/network_WiFi_DisableRandomMACAddress/control b/server/site_tests/network_WiFi_DisableRandomMACAddress/control
deleted file mode 100644
index dc8c23d..0000000
--- a/server/site_tests/network_WiFi_DisableRandomMACAddress/control
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'ejcaruso, snanda'
-NAME = 'network_WiFi_DisableRandomMACAddress'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell'
-ATTRIBUTES = ''
-
-DOC = """
-This test verifies that MAC address randomization can be
-turned on and then off, and that scans launched afterwards
-don't continue to randomize the MAC address.
-"""
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_DisableRandomMACAddress',
-                 host=host,
-                 raw_cmdline_args=args)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_DisableRandomMACAddress/network_WiFi_DisableRandomMACAddress.py b/server/site_tests/network_WiFi_DisableRandomMACAddress/network_WiFi_DisableRandomMACAddress.py
deleted file mode 100644
index b41af12..0000000
--- a/server/site_tests/network_WiFi_DisableRandomMACAddress/network_WiFi_DisableRandomMACAddress.py
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server.cros.network import random_mac_address_test_base
-
-class network_WiFi_DisableRandomMACAddress(
-        random_mac_address_test_base.RandomMACAddressTestBase):
-    """
-    Test that the MAC address is not still randomized during scans
-    when we toggle it on and off.
-    """
-
-    version = 1
-
-    def run_once(self):
-        """Body of the test."""
-        client = self.context.client
-        dut_hw_mac = client.wifi_mac
-
-        # Enable and run a single scan to make sure the flag has been
-        # propagated to the NIC.
-        with client.mac_address_randomization(True):
-            self.request_scans(num_scans=1)
-
-        # Turn MAC address randomization off to capture probe requests.
-        with client.mac_address_randomization(False):
-            self.start_capture()
-            self.request_scans()
-            frames = self.stop_capture_and_get_probe_requests()
-
-        if not frames:
-            raise error.TestFail('No probe requests were found!')
-        elif any(frame.source_addr != dut_hw_mac for frame in frames):
-            raise error.TestFail('Found probe requests with non-hardware MAC!')
diff --git a/server/site_tests/network_WiFi_DisconnectReason/control.ap_gone b/server/site_tests/network_WiFi_DisconnectReason/control.ap_gone
deleted file mode 100644
index 4d7fb29..0000000
--- a/server/site_tests/network_WiFi_DisconnectReason/control.ap_gone
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'silberst, pstew, quiche'
-NAME = 'network_WiFi_DisconnectReason.ap_gone'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test reads (but does not verify) the reported DiconnectReason property
-reported by supplicant when the AP is abruptly disabled.
-"""
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_DisconnectReason',
-                 host=host,
-                 tag=NAME.split('.')[1],
-                 disconnect_trigger='AP gone',
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_DisconnectReason/control.ap_send_chan_switch b/server/site_tests/network_WiFi_DisconnectReason/control.ap_send_chan_switch
deleted file mode 100644
index 80f2ad2..0000000
--- a/server/site_tests/network_WiFi_DisconnectReason/control.ap_send_chan_switch
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'silberst, pstew, quiche'
-NAME = 'network_WiFi_DisconnectReason.ap_send_chan_switch'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-
-# TODO: This test seems broken, because it passes even though DUTs with Intel
-#       WiFi drivers don't support CSA: Full discussion at
-#       https://chromium-review.googlesource.com/c/604753/.
-#       Removing from all test suites for now.
-
-ATTRIBUTES = (
-#             'suite:wifi_matfunc'
-)
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test reads (but does not verify) the reported DiconnectReason property
-reported by supplicant when the AP sends a channel switch message.
-"""
-
-
-from autotest_lib.server import site_linux_system
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    capabilities = [site_linux_system.LinuxSystem.CAPABILITY_SEND_MANAGEMENT_FRAME]
-    job.run_test('network_WiFi_DisconnectReason',
-                 host=host,
-                 tag=NAME.split('.')[1],
-                 disconnect_trigger='AP send channel switch',
-                 req_caps=capabilities, raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_DisconnectReason/control.deauth_client b/server/site_tests/network_WiFi_DisconnectReason/control.deauth_client
deleted file mode 100644
index 09d06f8..0000000
--- a/server/site_tests/network_WiFi_DisconnectReason/control.deauth_client
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'silberst, pstew, quiche'
-NAME = 'network_WiFi_DisconnectReason.deauth_client'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test reads (but does not verify) the reported DiconnectReason property
-reported by supplicant when the AP deauthenticates the client.
-"""
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_DisconnectReason',
-                 host=host,
-                 tag=NAME.split('.')[1],
-                 disconnect_trigger='deauth client',
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_DisconnectReason/control.disable_client_wifi b/server/site_tests/network_WiFi_DisconnectReason/control.disable_client_wifi
deleted file mode 100644
index 062bf64..0000000
--- a/server/site_tests/network_WiFi_DisconnectReason/control.disable_client_wifi
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'silberst, pstew, quiche'
-NAME = 'network_WiFi_DisconnectReason.disable_client_wifi'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test reads (but does not verify) the reported DiconnectReason property
-reported by supplicant when the client disables WiFi.
-"""
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_DisconnectReason',
-                 host=host,
-                 tag=NAME.split('.')[1],
-                 disconnect_trigger='disable client wifi',
-                 raw_cmdline_args=args,
-                 test='all')
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_DisconnectReason/control.switch_ap b/server/site_tests/network_WiFi_DisconnectReason/control.switch_ap
deleted file mode 100644
index bd83c3e..0000000
--- a/server/site_tests/network_WiFi_DisconnectReason/control.switch_ap
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'silberst, pstew, quiche'
-NAME = 'network_WiFi_DisconnectReason.switch_ap'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test reads (but does not verify) the reported DiconnectReason property
-reported by supplicant when the client switches from one AP to another AP.
-"""
-
-
-from autotest_lib.server import site_linux_system
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    capabilities = [site_linux_system.LinuxSystem.CAPABILITY_MULTI_AP]
-    job.run_test('network_WiFi_DisconnectReason',
-                 host=host,
-                 tag=NAME.split('.')[1],
-                 disconnect_trigger='switch AP',
-                 req_caps=capabilities, raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_DisconnectReason/network_WiFi_DisconnectReason.py b/server/site_tests/network_WiFi_DisconnectReason/network_WiFi_DisconnectReason.py
deleted file mode 100644
index 35f27e9..0000000
--- a/server/site_tests/network_WiFi_DisconnectReason/network_WiFi_DisconnectReason.py
+++ /dev/null
@@ -1,77 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server import site_linux_system
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-from autotest_lib.server.cros.network import wifi_client
-
-
-class network_WiFi_DisconnectReason(wifi_cell_test_base.WiFiCellTestBase):
-    """Verify the client disconnects from an AP and read (but not verify)
-    the supplicant DisconnectReason for various scenarios."""
-    version = 1
-
-    INITIAL_CHANNEL = 64
-    ALT_CHANNEL = 6
-    CHANNEL_SWITCH_ATTEMPTS = 5
-    CHANNEL_SWITCH_WAIT_TIME_SEC = 3
-
-    def run_once(self, disconnect_trigger, req_caps=None):
-        """Sets up a router, connects to it, pings it and disables it to trigger
-        disconnect."""
-        configuration = hostap_config.HostapConfig(
-                channel=self.INITIAL_CHANNEL,
-                mode=hostap_config.HostapConfig.MODE_11A,
-                spectrum_mgmt_required=True)
-        if req_caps is None:
-            req_caps = []
-        self.context.router.require_capabilities(req_caps)
-        self.context.configure(configuration)
-
-        if site_linux_system.LinuxSystem.CAPABILITY_MULTI_AP in req_caps:
-            # prep alternate Access Point
-            alt_ap_config = hostap_config.HostapConfig(
-                    channel=self.ALT_CHANNEL,
-                    mode=hostap_config.HostapConfig.MODE_11N_MIXED)
-            self.context.configure(alt_ap_config, multi_interface=True)
-            alt_assoc_params = xmlrpc_datatypes.AssociationParameters()
-            alt_assoc_params.ssid = self.context.router.get_ssid(instance=1)
-
-        assoc_params = xmlrpc_datatypes.AssociationParameters()
-        assoc_params.ssid = self.context.router.get_ssid(instance=0)
-        self.context.assert_connect_wifi(assoc_params)
-        self.context.assert_ping_from_dut()
-
-        with self.context.client.assert_disconnect_event():
-            if disconnect_trigger == 'AP gone':
-                self.context.router.deconfig()
-            elif disconnect_trigger == 'deauth client':
-                self.context.router.deauth_client(self.context.client.wifi_mac)
-            elif disconnect_trigger == 'AP send channel switch':
-                for _ in range(self.CHANNEL_SWITCH_ATTEMPTS):
-                    self.context.router.send_management_frame_on_ap(
-                            'channel_switch',
-                            self.ALT_CHANNEL)
-                    time.sleep(self.CHANNEL_SWITCH_WAIT_TIME_SEC)
-            elif disconnect_trigger == 'switch AP':
-                self.context.assert_connect_wifi(alt_assoc_params)
-            elif disconnect_trigger == 'disable client wifi':
-                self.context.client.set_device_enabled(
-                        self.context.client.wifi_if, False)
-            else:
-                raise error.TestError('unknown test mode: %s' %
-                                      disconnect_trigger)
-            time.sleep(wifi_client.DISCONNECT_WAIT_TIME_SECONDS)
-
-        disconnect_reasons = self.context.client.get_disconnect_reasons()
-        if disconnect_reasons is None or len(disconnect_reasons) == 0:
-            raise error.TestFail('supplicant DisconnectReason not logged')
-        for entry in disconnect_reasons:
-            logging.info("DisconnectReason: %s", entry);
diff --git a/server/site_tests/network_WiFi_FastReconnectInDarkResume/control b/server/site_tests/network_WiFi_FastReconnectInDarkResume/control
deleted file mode 100644
index 6254535..0000000
--- a/server/site_tests/network_WiFi_FastReconnectInDarkResume/control
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'samueltan, ejcaruso'
-NAME = 'network_WiFi_FastReconnectInDarkResume'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'servo_state:WORKING, wificell, lucidsleep'
-ATTRIBUTES = 'suite:wifi_lucidsleep'
-
-DOC = """
-This test verifies that, during suspend, when a DUT is momentarily disconnected
-from an AP that is still up, the DUT will reconnect to that AP during the same
-dark resume that was triggered by the disconnect.
-
-We verify the connectivity status of the DUT on resume by parsing shill logs,
-since the delays involved in waking a DUT from suspend using autotest framework
-make real-time checks inaccurate.
-"""
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server import utils
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test('network_WiFi_FastReconnectInDarkResume',
-                 host=host,
-                 raw_cmdline_args=args)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_FastReconnectInDarkResume/network_WiFi_FastReconnectInDarkResume.py b/server/site_tests/network_WiFi_FastReconnectInDarkResume/network_WiFi_FastReconnectInDarkResume.py
deleted file mode 100644
index fdf4185..0000000
--- a/server/site_tests/network_WiFi_FastReconnectInDarkResume/network_WiFi_FastReconnectInDarkResume.py
+++ /dev/null
@@ -1,54 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import lucid_sleep_test_base
-from autotest_lib.server.cros.network import wifi_client
-
-class network_WiFi_FastReconnectInDarkResume(
-        lucid_sleep_test_base.LucidSleepTestBase):
-    """
-    Test that we can reconnect quickly (within the span of one dark resume)
-    if we are disconnected during suspend but the AP is still up.
-    """
-
-    version = 1
-
-    def run_once(self):
-        """Body of the test"""
-        self.configure_and_connect_to_ap(hostap_config.HostapConfig(channel=1))
-        client = self.context.client
-        client_mac = client.wifi_mac
-        router = self.context.router
-
-        # Enable the dark connect feature in shill.
-        with client.wake_on_wifi_features(wifi_client.WAKE_ON_WIFI_DARKCONNECT):
-            logging.info('Set up WoWLAN')
-            prev_dark_resume_count = self.dr_utils.count_dark_resumes()
-
-            with self.dr_utils.suspend():
-                # Wait for suspend actions to finish.
-                time.sleep(wifi_client.SUSPEND_WAIT_TIME_SECONDS)
-
-                logging.info('Deauthenticating the DUT')
-                # A deauth packet should instantaneously disconnect the DUT
-                # from the AP without bringing the AP down.
-                router.deauth_client(client_mac)
-
-                # Wait for the DUT to receive the disconnect, wake in
-                # dark resume, reconnect, then suspend again.
-                time.sleep(wifi_client.DISCONNECT_WAIT_TIME_SECONDS +
-                           wifi_client.DARK_RESUME_WAIT_TIME_SECONDS)
-
-            client.check_connected_on_last_resume()
-            dark_resume_count = (self.dr_utils.count_dark_resumes() -
-                                 prev_dark_resume_count)
-            if dark_resume_count != 1:
-                # If there was more than 1 dark resume, the DUT might not have
-                # reconnected on the dark resume triggered by the disconnect.
-                raise error.TestFail('Expected exactly one dark resume')
diff --git a/server/site_tests/network_WiFi_GTK/control b/server/site_tests/network_WiFi_GTK/control
deleted file mode 100644
index 1398a91..0000000
--- a/server/site_tests/network_WiFi_GTK/control
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_GTK'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wificell-cq')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test verifies that we can continue to decrypt broadcast traffic while
-going through group temporal key (GTK) rekeys.
-"""
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_GTK',
-                 host=host,
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_GTK/network_WiFi_GTK.py b/server/site_tests/network_WiFi_GTK/network_WiFi_GTK.py
deleted file mode 100644
index 2d3040d..0000000
--- a/server/site_tests/network_WiFi_GTK/network_WiFi_GTK.py
+++ /dev/null
@@ -1,91 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import arping_runner
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class network_WiFi_GTK(wifi_cell_test_base.WiFiCellTestBase):
-    """Tests that a DUT can continue receiving and sending broadcast traffic.
-
-    This test sets up an AP with an artificially small GTK and GMK rekey
-    periods, so that we can test our ability to receive and correctly interpret
-    rekeys.
-
-    """
-    version = 1
-    ARPING_COUNT = 20
-    GTK_REKEY_PERIOD = 5
-    GMK_REKEY_PERIOD = 7
-
-
-    def run_once(self):
-        """Test body."""
-        wpa_config = xmlrpc_security_types.WPAConfig(
-                psk='chromeos',
-                wpa_mode=xmlrpc_security_types.WPAConfig.MODE_MIXED_WPA,
-                wpa_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_TKIP,
-                             xmlrpc_security_types.WPAConfig.CIPHER_CCMP],
-                wpa2_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_CCMP],
-                use_strict_rekey=True,
-                wpa_gtk_rekey_period=self.GTK_REKEY_PERIOD,
-                wpa_gmk_rekey_period=self.GMK_REKEY_PERIOD)
-        ap_config = hostap_config.HostapConfig(
-                frequency=2412,
-                mode=hostap_config.HostapConfig.MODE_11G,
-                security_config=wpa_config)
-        client_conf = xmlrpc_datatypes.AssociationParameters(
-                security_config=wpa_config)
-        self.context.configure(ap_config)
-        client_conf.ssid = self.context.router.get_ssid()
-        self.context.assert_connect_wifi(client_conf)
-        # Sanity check ourselves with some unicast pings.
-        self.context.assert_ping_from_dut()
-        # Now check that network traffic goes through.
-        if (not self.check_client_can_recv_broadcast_traffic() or
-                not self.check_client_can_send_broadcast_traffic()):
-            raise error.TestFail('Not all arping passes were successful.')
-
-        self.context.client.shill.disconnect(client_conf.ssid)
-        self.context.router.deconfig()
-
-
-    def check_client_can_recv_broadcast_traffic(self):
-        """@return True iff the client can receive server broadcast packets."""
-        logging.info('Checking that broadcast traffic is received by DUT.')
-        runner = arping_runner.ArpingRunner(self.context.get_wifi_host(),
-                                            self.context.get_wifi_if())
-        if not self.context.client.wifi_ip:
-            raise error.TestFail('Tried to arping client, but client has no '
-                                 'suitable IP address')
-
-        arping_result = runner.arping(self.context.client.wifi_ip,
-                                      count=self.ARPING_COUNT)
-        if not arping_result.was_successful():
-            logging.error('arping from server failed: %r', arping_result)
-            return False
-
-        logging.info('arping from server passed: %r', arping_result)
-        return True
-
-
-    def check_client_can_send_broadcast_traffic(self):
-        """@return True iff the server can receive client broadcast packets."""
-        logging.info('Checking that broadcast traffic may be sent by the DUT.')
-        runner = arping_runner.ArpingRunner(self.context.client.host,
-                                            self.context.client.wifi_if)
-        arping_result = runner.arping(self.context.get_wifi_addr(),
-                                      count=self.ARPING_COUNT)
-        if not arping_result.was_successful():
-            logging.error('arping from client failed: %r', arping_result)
-            return False
-
-        logging.info('arping from client passed: %r', arping_result)
-        return True
diff --git a/server/site_tests/network_WiFi_HiddenRemains/control b/server/site_tests/network_WiFi_HiddenRemains/control
deleted file mode 100644
index 4ae5d6f..0000000
--- a/server/site_tests/network_WiFi_HiddenRemains/control
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_HiddenRemains'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wificell-cq')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test verifies that shill continues to remember that a network is
-hidden after associating with it.
-"""
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_HiddenRemains',
-                 tag=NAME,
-                 host=host,
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_HiddenRemains/network_WiFi_HiddenRemains.py b/server/site_tests/network_WiFi_HiddenRemains/network_WiFi_HiddenRemains.py
deleted file mode 100644
index 259942e..0000000
--- a/server/site_tests/network_WiFi_HiddenRemains/network_WiFi_HiddenRemains.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class network_WiFi_HiddenRemains(wifi_cell_test_base.WiFiCellTestBase):
-    """Check that shill preserves hidden network settings after connect."""
-
-    version = 1
-
-    SERVICE_PROPERTY_HIDDEN_SSID = 'WiFi.HiddenSSID'
-
-
-    def check_hidden(self, ssid, should_be_hidden):
-        """Asserts that the network with |ssid| is a hidden network in shill.
-
-        Implicitly, we assert that shill already has an entry for the given
-        network.
-
-        @param ssid string name of network to make assertions about.
-        @param should_be_hidden bool True iff service should be marked as
-                a hidden SSID.
-
-        """
-        logging.info('Checking that %s has hidden=%r.', ssid, should_be_hidden)
-        service_properties = self.context.client.shill.get_service_properties(
-                ssid)
-        if service_properties is None:
-            raise error.TestFail('Unable to retrieve properties for service '
-                                 '%s.' % ssid)
-
-        logging.debug(service_properties)
-        is_hidden = service_properties[self.SERVICE_PROPERTY_HIDDEN_SSID]
-        if is_hidden != should_be_hidden:
-            raise error.TestFail('Expected hidden=%r, but found hidden=%r.' %
-                                 (should_be_hidden, is_hidden))
-
-        logging.info('Service had the expected hidden value.')
-
-
-    def run_once(self):
-        """Test body."""
-        ap_configs = [hostap_config.HostapConfig(
-                              ssid='a visible network',
-                              frequency=2437,
-                              mode=hostap_config.HostapConfig.MODE_11G),
-                      hostap_config.HostapConfig(
-                              hide_ssid=True,
-                              ssid='a hidden network',
-                              frequency=2437,
-                              mode=hostap_config.HostapConfig.MODE_11G)]
-        for ap_config in ap_configs:
-            self.context.configure(ap_config)
-            client_config = xmlrpc_datatypes.AssociationParameters(
-                    ssid=self.context.router.get_ssid(),
-                    is_hidden=ap_config.hide_ssid)
-            self.context.assert_connect_wifi(client_config)
-            self.context.assert_ping_from_dut()
-            # Check that shill's opinion of our hidden-ness is correct.
-            self.check_hidden(self.context.router.get_ssid(),
-                              ap_config.hide_ssid is True)
-            self.context.client.shill.disconnect(self.context.router.get_ssid())
diff --git a/server/site_tests/network_WiFi_HiddenScan/control b/server/site_tests/network_WiFi_HiddenScan/control
deleted file mode 100644
index fa78fd4..0000000
--- a/server/site_tests/network_WiFi_HiddenScan/control
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'quiche, wiley, pstew'
-NAME = 'network_WiFi_HiddenScan'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ''
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test scans for hidden networks. The test verifies that the 802.11
-probe frames are seen over-the-air, and that the probes include both
-a) the specific hidden SSID, and b) the broadcast SSID.
-
-"""
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_HiddenScan',
-                 host=host, raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_HiddenScan/network_WiFi_HiddenScan.py b/server/site_tests/network_WiFi_HiddenScan/network_WiFi_HiddenScan.py
deleted file mode 100644
index d01478d..0000000
--- a/server/site_tests/network_WiFi_HiddenScan/network_WiFi_HiddenScan.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import tcpdump_analyzer
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import packet_capturer
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class network_WiFi_HiddenScan(wifi_cell_test_base.WiFiCellTestBase):
-    """Test scanning behavior when a hidden SSID is configured."""
-
-    version = 1
-
-    BROADCAST_SSID = ''
-
-    def run_once(self):
-        """Test body."""
-        ap_config = hostap_config.HostapConfig(channel=1, hide_ssid=True)
-
-        # Start capture before starting anything else.
-        self.context.capture_host.start_capture(
-                ap_config.frequency,
-                width_type=ap_config.packet_capture_mode,
-                snaplen=packet_capturer.SNAPLEN_WIFI_PROBE_REQUEST)
-
-        # We're looking for the MAC address, so disable randomization.
-        with self.context.client.mac_address_randomization(False):
-            # Set up the router and associate the client with it.
-            self.context.configure(ap_config)
-            test_ssid = self.context.router.get_ssid()
-            assoc_params = xmlrpc_datatypes.AssociationParameters(
-                    ssid=test_ssid, is_hidden=True)
-
-            self.context.assert_connect_wifi(assoc_params)
-            results = self.context.capture_host.stop_capture()
-
-        if len(results) != 1:
-            raise error.TestError('Expected to generate one packet '
-                                  'capture but got %d instead.' %
-                                  len(results))
-        probe_ssids = tcpdump_analyzer.get_probe_ssids(
-                results[0].local_pcap_path,
-                probe_sender=self.context.client.wifi_mac)
-        if len(probe_ssids) != 2:
-            raise error.TestError('Expected exactly two SSIDs, but got %s' %
-                                  probe_ssids)
-        if probe_ssids - {self.BROADCAST_SSID, test_ssid}:
-            raise error.TestError('Unexpected probe SSIDs: %s' % probe_ssids)
diff --git a/server/site_tests/network_WiFi_LinkMonitorFailure/control b/server/site_tests/network_WiFi_LinkMonitorFailure/control
deleted file mode 100644
index 4e58c5a..0000000
--- a/server/site_tests/network_WiFi_LinkMonitorFailure/control
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'zqiu, wiley, pstew, quiche'
-NAME = 'network_WiFi_LinkMonitorFailure'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ''
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test checks how fast the DUT detects the link failure when an AP changes
-its DHCP configuration, and how fast the DUT reconnects after the failure.
-"""
-
-
-def run(machine):
-    job.run_test('network_WiFi_LinkMonitorFailure',
-                 host=hosts.create_host(machine),
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_LinkMonitorFailure/network_WiFi_LinkMonitorFailure.py b/server/site_tests/network_WiFi_LinkMonitorFailure/network_WiFi_LinkMonitorFailure.py
deleted file mode 100644
index 69dc13b..0000000
--- a/server/site_tests/network_WiFi_LinkMonitorFailure/network_WiFi_LinkMonitorFailure.py
+++ /dev/null
@@ -1,77 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class network_WiFi_LinkMonitorFailure(wifi_cell_test_base.WiFiCellTestBase):
-    """Test how a DUT behaves when the network link disappears.
-
-    Connects a DUT to an AP, then silently change the gateway IP on the AP
-    to simulate network link disappearance. Determine the time the DUT take
-    to detect link failure and the time for the subsequent reassociation
-    request.
-
-    """
-
-    version = 1
-
-    # Passive link monitor takes 25 seconds to fail, active link monitor
-    # takes upto 50 seconds to fail (unicast ARP failures doesn't count since
-    # unicast ARP gateway support is not established).
-    LINK_FAILURE_MAX_SECONDS = 80
-    REASSOCIATE_TIMEOUT_SECONDS = 10
-
-    def run_once(self):
-        """Body of the test."""
-        # Establish a connection with an AP.
-        ap_config = hostap_config.HostapConfig(channel=1)
-        self.context.configure(ap_config)
-        ssid = self.context.router.get_ssid()
-        client_config = xmlrpc_datatypes.AssociationParameters(ssid=ssid)
-        self.context.assert_connect_wifi(client_config)
-        self.context.assert_ping_from_dut()
-
-        with self.context.client.iw_runner.get_event_logger() as logger:
-            logger.start()
-
-            # Restart local server with a different address index. This will
-            # simulate the disappearance of the network link from the client's
-            # point of view.
-            logging.info("Restart local server with different address")
-            self.context.router.change_server_address_index()
-
-            # wait for the timeout seconds for link failure and reassociation
-            # to complete.
-            time.sleep(self.LINK_FAILURE_MAX_SECONDS +
-                       self.REASSOCIATE_TIMEOUT_SECONDS)
-            logger.stop()
-
-            # Link failure detection time.
-            link_failure_time = logger.get_time_to_disconnected()
-            if link_failure_time is None:
-                # Some drivers perform a true Reassociation, without disconnect.
-                # See also https://crbug.com/990012.
-                logging.info('Failed to disconnect within timeout; '
-                             'this is expected for some drivers')
-            elif link_failure_time > self.LINK_FAILURE_MAX_SECONDS:
-                raise error.TestFail(
-                        'Failed to detect link failure within given timeout')
-            else:
-                logging.info('Link failure detection time: %.2f seconds',
-                             link_failure_time)
-
-            # Reassociation time.
-            reassociate_time = logger.get_reassociation_time()
-            if (reassociate_time is None or
-                reassociate_time > self.REASSOCIATE_TIMEOUT_SECONDS):
-                raise error.TestFail(
-                        'Failed to reassociate within given timeout')
-            logging.info('Reassociate time: %.2f seconds', reassociate_time)
diff --git a/server/site_tests/network_WiFi_MalformedProbeResp/control b/server/site_tests/network_WiFi_MalformedProbeResp/control
deleted file mode 100644
index 65e10ce..0000000
--- a/server/site_tests/network_WiFi_MalformedProbeResp/control
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'silberst, pstew, quiche'
-NAME = 'network_WiFi_MalformedProbeResp'
-TIME = 'MEDIUM'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wificell-cq')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test attempts to verify that we can stay connected to a router even
-if we receive malformed probe responses.  In this particular case, the
-probe response data has a tag with an incorrect length.
-"""
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_MalformedProbeResp',
-                 host=host,
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_MalformedProbeResp/network_WiFi_MalformedProbeResp.py b/server/site_tests/network_WiFi_MalformedProbeResp/network_WiFi_MalformedProbeResp.py
deleted file mode 100644
index c5d6cbb..0000000
--- a/server/site_tests/network_WiFi_MalformedProbeResp/network_WiFi_MalformedProbeResp.py
+++ /dev/null
@@ -1,87 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server import site_linux_system
-from autotest_lib.server.cros.network import frame_sender
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-class network_WiFi_MalformedProbeResp(wifi_cell_test_base.WiFiCellTestBase):
-    """Test that we can stay connected to the configured AP when receiving
-    malformed probe responses from an AP that we are not connected to."""
-    version = 1
-
-    PROBE_RESPONSE_DELAY_MSEC = 50
-    SCAN_LOOP_SEC = 60
-    SCAN_LOOP_SLEEP_SEC = 10
-    PROBE_RESPONSE_TEST_CHANNEL = 1
-
-    def run_once(self):
-        """Sets up a router, connects to it, pings it, and repeats."""
-        configuration = hostap_config.HostapConfig(
-                channel=self.PROBE_RESPONSE_TEST_CHANNEL,
-                mode=hostap_config.HostapConfig.MODE_11B)
-        self.context.router.require_capabilities(
-            [site_linux_system.LinuxSystem.CAPABILITY_SEND_MANAGEMENT_FRAME])
-
-        self.context.configure(configuration)
-        # Configure 2nd AP to inject the malformed probe responses.
-        self.context.configure(configuration, multi_interface=True)
-        client_mac = self.context.client.wifi_mac
-
-        pretest_reset_count = self.context.client.get_num_card_resets()
-        logging.debug('pretest_reset_count=%d', pretest_reset_count)
-        self.context.capture_host.start_capture(
-                configuration.frequency,
-                width_type=configuration.packet_capture_mode)
-        assoc_params = xmlrpc_datatypes.AssociationParameters()
-        assoc_params.ssid = self.context.router.get_ssid(instance=0)
-        self.context.assert_connect_wifi(assoc_params)
-        start_time = time.time()
-        rx_probe_resp_count = 0
-        with self.context.client.assert_no_disconnects():
-            with frame_sender.FrameSender(
-                    self.context.router,
-                    'probe_response',
-                    self.PROBE_RESPONSE_TEST_CHANNEL,
-                    ssid_prefix='TestingProbes',
-                    num_bss=1,
-                    frame_count=0,
-                    delay=self.PROBE_RESPONSE_DELAY_MSEC,
-                    dest_addr=client_mac,
-                    probe_resp_footer='\xdd\xb7\x00\x1a\x11\x01\x01\x02\x03',
-                    instance=1):
-                while time.time() - start_time < self.SCAN_LOOP_SEC:
-                    bss_list = self.context.client.iw_runner.scan(
-                            self.context.client.wifi_if, [2412]) or []
-                    for bss in bss_list:
-                        logging.debug('found bss: %s', bss.ssid)
-                        if bss.ssid == 'TestingProbes00000000':
-                            rx_probe_resp_count += 1
-                    time.sleep(self.SCAN_LOOP_SLEEP_SEC)
-                else:
-                    logging.debug('done scanning for networks')
-
-        logging.debug('received %s probe_responses', rx_probe_resp_count)
-        if rx_probe_resp_count == 0:
-            raise error.TestFail('Client failed to receive probe responses')
-
-        reset_count = self.context.client.get_num_card_resets()
-        logging.debug('reset count = %s', reset_count)
-        test_resets = reset_count - pretest_reset_count
-        if test_resets < 0:
-            logging.debug('logs rotated during test')
-            if reset_count > 0:
-                test_resets = reset_count
-
-        if test_resets > 0:
-            raise error.TestFail('Client reset card')
-        self.context.client.shill.disconnect(assoc_params.ssid)
-        self.context.router.deconfig()
-        self.context.capture_host.stop_capture()
diff --git a/server/site_tests/network_WiFi_Manual/control b/server/site_tests/network_WiFi_Manual/control
deleted file mode 100644
index e6a6052..0000000
--- a/server/site_tests/network_WiFi_Manual/control
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_Manual'
-TIME = 'LONG'
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-Start up an AP for manual testing.
-"""
-
-
-def run(machine):
-    job.run_test('network_WiFi_Manual',
-                 host=hosts.create_host(machine),
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_Manual/network_WiFi_Manual.py b/server/site_tests/network_WiFi_Manual/network_WiFi_Manual.py
deleted file mode 100644
index 86f9211..0000000
--- a/server/site_tests/network_WiFi_Manual/network_WiFi_Manual.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import signal
-
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-class network_WiFi_Manual(wifi_cell_test_base.WiFiCellTestBase):
-    """Set up an AP, so that we can test things manually."""
-
-    version = 1
-
-
-    def run_once(self):
-        """Body of the test."""
-        self.context.configure(hostap_config.HostapConfig(
-            channel=1, ssid='manual_test',
-            mode=hostap_config.HostapConfig.MODE_11N_MIXED,
-            n_capabilities=
-            [hostap_config.HostapConfig.N_CAPABILITY_HT40_PLUS]))
-        signal.pause()
diff --git a/server/site_tests/network_WiFi_MultiAuth/control b/server/site_tests/network_WiFi_MultiAuth/control
deleted file mode 100644
index 4ebc5bb..0000000
--- a/server/site_tests/network_WiFi_MultiAuth/control
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_MultiAuth'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wificell-cq')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test is designed to check that we can successfully select
-between two networks with identical SSIDs that have different
-security parameters.
-"""
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_MultiAuth',
-                 host=host,
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_MultiAuth/network_WiFi_MultiAuth.py b/server/site_tests/network_WiFi_MultiAuth/network_WiFi_MultiAuth.py
deleted file mode 100644
index c40fc37..0000000
--- a/server/site_tests/network_WiFi_MultiAuth/network_WiFi_MultiAuth.py
+++ /dev/null
@@ -1,50 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class network_WiFi_MultiAuth(wifi_cell_test_base.WiFiCellTestBase):
-    """Test our ability to disambiguate similar networks.
-
-    Test that we can distinguish between networks with different
-    security and identical SSIDs.
-
-    """
-    version = 1
-
-    TEST_SSID = 'an ssid'
-
-
-    def run_once(self):
-        """Test body."""
-        wpa_config = xmlrpc_security_types.WPAConfig(
-                psk='chromeos',
-                wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA,
-                wpa_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_CCMP])
-        ap_config0 = hostap_config.HostapConfig(
-                ssid=self.TEST_SSID,
-                frequency=2412,
-                mode=hostap_config.HostapConfig.MODE_11G,
-                scenario_name='open_network')
-        client_config0 = xmlrpc_datatypes.AssociationParameters(
-                ssid=self.TEST_SSID)
-        ap_config1 = hostap_config.HostapConfig(
-                ssid=self.TEST_SSID,
-                frequency=2412,
-                mode=hostap_config.HostapConfig.MODE_11G,
-                security_config=wpa_config,
-                scenario_name='wpa_network')
-        client_config1 = xmlrpc_datatypes.AssociationParameters(
-                ssid=self.TEST_SSID,
-                security_config=wpa_config)
-        self.context.configure(ap_config0)
-        self.context.configure(ap_config1, multi_interface=True)
-        self.context.assert_connect_wifi(client_config0)
-        self.context.assert_ping_from_dut(ap_num=0)
-        self.context.assert_connect_wifi(client_config1)
-        self.context.assert_ping_from_dut(ap_num=1)
diff --git a/server/site_tests/network_WiFi_OverlappingBSSScan/control b/server/site_tests/network_WiFi_OverlappingBSSScan/control
deleted file mode 100644
index 252b211..0000000
--- a/server/site_tests/network_WiFi_OverlappingBSSScan/control
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'pstew, wiley, quiche'
-NAME = 'network_WiFi_OverlappingBSSScan'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test case verifies that OBSS scan aborts and/or backs off when
-there is consistent outgoing traffic.  This test is similar to the
-current BgscanBackoff test, except that scans are triggered by the
-802.11n Overlapping BSS detection scans as opposed to wpa_supplicant's
-background scan mechanism.  To do so, we add configuration to
-wpa_supplicant to request that clients perform these scans, then
-test both that the client performs the scan (by monitoring the hostapd
-log) and that these scans do not disrupt the latency.
-
-"""
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_OverlappingBSSScan',
-                 host=host,
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_OverlappingBSSScan/network_WiFi_OverlappingBSSScan.py b/server/site_tests/network_WiFi_OverlappingBSSScan/network_WiFi_OverlappingBSSScan.py
deleted file mode 100644
index b44bf0b..0000000
--- a/server/site_tests/network_WiFi_OverlappingBSSScan/network_WiFi_OverlappingBSSScan.py
+++ /dev/null
@@ -1,104 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import ping_runner
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class network_WiFi_OverlappingBSSScan(wifi_cell_test_base.WiFiCellTestBase):
-    """Test that background scan backs off when there is foreground traffic."""
-    version = 1
-
-    OBSS_SCAN_SAMPLE_PERIOD_SECONDS = 100
-    NO_OBSS_SCAN_SAMPLE_PERIOD_SECONDS = 10
-    PING_INTERVAL_SECONDS = 0.1
-
-    # Dwell time for scanning is usually configured to be around 100 ms (some
-    # are higher, around 150 ms), since this is also the standard beacon
-    # interval. Tolerate spikes in latency up to 250 ms as a way of asking that
-    # our PHY be servicing foreground traffic regularly during background
-    # scans.
-    # See also network_WiFi_BgscanBackoff for similar parameters.
-    LATENCY_MARGIN_MS = 250
-    THRESHOLD_BASELINE_LATENCY_MS = 100
-
-    WIFI_FREQUENCY = 2437
-
-
-    @classmethod
-    def get_ap_config(cls, scenario_name, use_obss):
-        """Returns a HostapConfig object based on the given parameters.
-
-        @param scenario_name: string describing a portion of this test.
-        @param use_obss: bool indicating if the AP should ask clients to
-            perform OBSS scans.
-        @return HostapConfig which incorporates the given parameters.
-
-        """
-        return hostap_config.HostapConfig(
-            frequency=cls.WIFI_FREQUENCY,
-            mode=hostap_config.HostapConfig.MODE_11N_PURE,
-            n_capabilities=[
-                hostap_config.HostapConfig.N_CAPABILITY_GREENFIELD,
-                hostap_config.HostapConfig.N_CAPABILITY_HT40
-            ],
-            obss_interval=10 if use_obss else None,
-            scenario_name=scenario_name)
-
-
-    def run_once(self):
-        """Body of the test."""
-        get_assoc_params = lambda: xmlrpc_datatypes.AssociationParameters(
-                ssid=self.context.router.get_ssid())
-        get_ping_config = lambda period: ping_runner.PingConfig(
-                self.context.get_wifi_addr(),
-                interval=self.PING_INTERVAL_SECONDS,
-                count=int(period / self.PING_INTERVAL_SECONDS))
-        # Gather some statistics about ping latencies without scanning going on.
-        self.context.configure(self.get_ap_config('obss_disabled', False))
-        self.context.assert_connect_wifi(get_assoc_params())
-        logging.info('Pinging router without OBSS scans for %d seconds.',
-                     self.NO_OBSS_SCAN_SAMPLE_PERIOD_SECONDS)
-        result_no_obss_scan = self.context.client.ping(
-                get_ping_config(self.NO_OBSS_SCAN_SAMPLE_PERIOD_SECONDS))
-        logging.info('Ping statistics without OBSS scans: %r',
-                     result_no_obss_scan)
-        if result_no_obss_scan.max_latency > self.THRESHOLD_BASELINE_LATENCY_MS:
-            raise error.TestFail('RTT latency is too high even without '
-                                 'OBSS scans: %f' %
-                                 result_no_obss_scan.max_latency)
-
-        self.context.client.shill.disconnect(self.context.router.get_ssid())
-
-        # Re-configure the AP for OBSS and repeat the ping test.
-        self.context.configure(self.get_ap_config('obss_enabled', True))
-        self.context.capture_host.start_capture(
-                self.WIFI_FREQUENCY, filename='obss_enabled.pcap')
-        self.context.assert_connect_wifi(get_assoc_params())
-        logging.info('Pinging router with OBSS scans for %d seconds.',
-                     self.OBSS_SCAN_SAMPLE_PERIOD_SECONDS)
-        result_obss_scan = self.context.client.ping(
-                get_ping_config(self.OBSS_SCAN_SAMPLE_PERIOD_SECONDS))
-        logging.info('Ping statistics with OBSS scans: %r', result_obss_scan)
-        self.context.capture_host.stop_capture()
-
-        if not self.context.router.detect_client_coexistence_report(
-                self.context.client.wifi_mac):
-            raise error.TestFail('No coexistence action frames detected '
-                                 'from the client.')
-
-        self.context.client.shill.disconnect(self.context.router.get_ssid())
-        self.context.router.deconfig()
-        if (result_obss_scan.max_latency >
-                self.LATENCY_MARGIN_MS + result_no_obss_scan.avg_latency):
-            raise error.TestFail('Significant difference in rtt due to OBSS: '
-                                 '%.1f > %.1f + %d' %
-                                 (result_obss_scan.max_latency,
-                                  result_no_obss_scan.avg_latency,
-                                  self.LATENCY_MARGIN_MS))
diff --git a/server/site_tests/network_WiFi_PMKSACaching/control b/server/site_tests/network_WiFi_PMKSACaching/control
deleted file mode 100644
index 20e5540..0000000
--- a/server/site_tests/network_WiFi_PMKSACaching/control
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_PMKSACaching'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-Test that 802.1x authentication is bypassed and uses PMKSA caching
-instead when a cache candidate is available.
-
-"""
-
-
-def run(machine):
-    job.run_test('network_WiFi_PMKSACaching',
-                 host=hosts.create_host(machine),
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_PMKSACaching/network_WiFi_PMKSACaching.py b/server/site_tests/network_WiFi_PMKSACaching/network_WiFi_PMKSACaching.py
deleted file mode 100644
index 6e98571..0000000
--- a/server/site_tests/network_WiFi_PMKSACaching/network_WiFi_PMKSACaching.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-from autotest_lib.client.common_lib.cros import site_eap_certs
-from autotest_lib.client.common_lib.cros.network import ping_runner
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class network_WiFi_PMKSACaching(wifi_cell_test_base.WiFiCellTestBase):
-    """Test that we use PMKSA caching where appropriate."""
-    version = 1
-    AP0_FREQUENCY = 2412
-    AP1_FREQUENCY = 5220
-    TIMEOUT_SECONDS = 15
-
-
-    def run_once(self):
-        """Body of the test."""
-        mode_n = hostap_config.HostapConfig.MODE_11N_PURE
-        eap_config = xmlrpc_security_types.WPAEAPConfig(
-                server_ca_cert=site_eap_certs.ca_cert_1,
-                server_cert=site_eap_certs.server_cert_1,
-                server_key=site_eap_certs.server_private_key_1,
-                client_ca_cert=site_eap_certs.ca_cert_1,
-                client_cert=site_eap_certs.client_cert_1,
-                client_key=site_eap_certs.client_private_key_1,
-                # PMKSA caching is only defined for WPA2.
-                wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA2)
-        ap_config0 = hostap_config.HostapConfig(
-                mode=mode_n, frequency=self.AP0_FREQUENCY,
-                security_config=eap_config)
-        self.context.configure(ap_config0)
-        assoc_params = xmlrpc_datatypes.AssociationParameters(
-                ssid=self.context.router.get_ssid(),
-                security_config=eap_config)
-        self.context.assert_connect_wifi(assoc_params)
-        # Add another AP with identical configuration except in 5 Ghz.
-        ap_config1 = hostap_config.HostapConfig(
-                mode=mode_n, ssid=self.context.router.get_ssid(),
-                frequency=self.AP1_FREQUENCY, security_config=eap_config)
-        self.context.configure(ap_config1, multi_interface=True)
-        bssid0 = self.context.router.get_hostapd_mac(0)
-        bssid1 = self.context.router.get_hostapd_mac(1)
-        self.context.client.wait_for_bss(bssid1)
-        self.context.client.request_roam(bssid1)
-        if not self.context.client.wait_for_roam(
-                bssid1, timeout_seconds=self.TIMEOUT_SECONDS):
-            raise error.TestFail('Failed to roam to second BSS.')
-
-        self.context.router.deconfig_aps(instance=1, silent=True)
-        if not self.context.client.wait_for_roam(
-                bssid0, timeout_seconds=self.TIMEOUT_SECONDS):
-            raise error.TestFail('Failed to fall back to first BSS.')
-
-        pinger = ping_runner.PingRunner(host=self.context.client.host)
-        utils.poll_for_condition(
-                condition=lambda: pinger.simple_ping(
-                        self.context.router.get_wifi_ip(0)),
-                exception=error.TestFail(
-                        'Timed out waiting for DUT to be able to'
-                        'ping first BSS after fallback'),
-                timeout=self.TIMEOUT_SECONDS,
-                sleep_interval=1)
-        self.context.router.confirm_pmksa_cache_use(instance=0)
diff --git a/server/site_tests/network_WiFi_PTK/control b/server/site_tests/network_WiFi_PTK/control
deleted file mode 100644
index c6db9c5..0000000
--- a/server/site_tests/network_WiFi_PTK/control
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_PTK'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test checks that we can continue to communicate with a WPA network
-despite several pairwise temporal key (PTK) rekeyings.
-"""
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_PTK',
-                 host=host,
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_PTK/network_WiFi_PTK.py b/server/site_tests/network_WiFi_PTK/network_WiFi_PTK.py
deleted file mode 100644
index 5f7adc5..0000000
--- a/server/site_tests/network_WiFi_PTK/network_WiFi_PTK.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import ping_runner
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network  import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class network_WiFi_PTK(wifi_cell_test_base.WiFiCellTestBase):
-    """Test that pairwise temporal key rotations work as expected."""
-    version = 1
-
-    # These settings combine to give us around 75 seconds of ping time,
-    # which should be around 15 rekeys.
-    PING_COUNT = 150
-    PING_INTERVAL = 0.5
-    REKEY_PERIOD = 5
-    PING_LOSS_THRESHOLD=20
-
-    def run_once(self):
-        """Test body."""
-        wpa_config = xmlrpc_security_types.WPAConfig(
-                psk='chromeos',
-                wpa_mode=xmlrpc_security_types.WPAConfig.MODE_MIXED_WPA,
-                wpa_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_TKIP,
-                             xmlrpc_security_types.WPAConfig.CIPHER_CCMP],
-                wpa2_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_CCMP],
-                wpa_ptk_rekey_period=self.REKEY_PERIOD)
-        ap_config = hostap_config.HostapConfig(
-                    frequency=2412,
-                    mode=hostap_config.HostapConfig.MODE_11N_PURE,
-                    security_config=wpa_config)
-        # TODO(wiley) This is just until we find the source of these
-        #             test failures.
-        self.context.capture_host.start_capture(ap_config.frequency)
-        self.context.configure(ap_config)
-        assoc_params = xmlrpc_datatypes.AssociationParameters(
-                ssid=self.context.router.get_ssid(),
-                security_config=wpa_config)
-        self.context.assert_connect_wifi(assoc_params)
-        ping_config = ping_runner.PingConfig(self.context.get_wifi_addr(),
-                                             count=self.PING_COUNT,
-                                             interval=self.PING_INTERVAL,
-                                             ignore_result=True)
-        logging.info('Pinging DUT for %d seconds and rekeying '
-                     'every %d seconds.',
-                     self.PING_COUNT * self.PING_INTERVAL,
-                     self.REKEY_PERIOD)
-        ping_result = self.context.client.ping(ping_config=ping_config)
-        logging.info('Ping loss percentage: %r.', ping_result.loss)
-        self.output_perf_value(description='Network_wifi_PTK_PingLoss',
-                value=ping_result.loss, units='percent', higher_is_better=False)
-        if ping_result.loss > self.PING_LOSS_THRESHOLD:
-            raise error.TestFail('Lost ping packets %r percentage.' %
-                                 ping_result.loss)
-        self.context.client.shill.disconnect(assoc_params.ssid)
-        self.context.router.deconfig()
-        self.context.capture_host.stop_capture()
diff --git a/server/site_tests/network_WiFi_Perf/control.11g b/server/site_tests/network_WiFi_Perf/control.11g
deleted file mode 100644
index 3b0e5ce..0000000
--- a/server/site_tests/network_WiFi_Perf/control.11g
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_Perf.11g'
-ATTRIBUTES = 'suite:wifi_perf'
-TIME = 'SHORT'
-MAX_RESULT_SIZE_KB = 512000
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test uses netperf to measure the maximal receiving and transmitting
-throughput on a DUT with an open 802.11g network.
-"""
-
-
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    mode = hostap_config.HostapConfig.MODE_11G
-    configs = [hostap_config.HostapConfig(channel=channel, mode=mode)
-               for channel in (6,)]
-    job.run_test('network_WiFi_Perf', tag=NAME.split('.')[1],
-                 host=host, raw_cmdline_args=args,
-                 additional_params=configs)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_Perf/control.11g_aes b/server/site_tests/network_WiFi_Perf/control.11g_aes
deleted file mode 100644
index 9440f4d..0000000
--- a/server/site_tests/network_WiFi_Perf/control.11g_aes
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_Perf.11g_aes'
-ATTRIBUTES = 'suite:wifi_perf'
-TIME = 'SHORT'
-MAX_RESULT_SIZE_KB = 512000
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test uses netperf to measure the maximal receiving and transmitting
-throughput on a DUT with an 802.11g network with AES encryption.
-"""
-
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    mode = hostap_config.HostapConfig.MODE_11G
-    wpa_config = xmlrpc_security_types.WPAConfig(
-            psk='chromeos',
-            wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA2,
-            wpa_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_CCMP])
-    configs = [hostap_config.HostapConfig(channel=channel, mode=mode,
-                                          security_config=wpa_config)
-               for channel in (6,)]
-    job.run_test('network_WiFi_Perf', tag=NAME.split('.')[1],
-                 host=host, raw_cmdline_args=args,
-                 additional_params=configs)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_Perf/control.11g_tkip b/server/site_tests/network_WiFi_Perf/control.11g_tkip
deleted file mode 100644
index 79b2022..0000000
--- a/server/site_tests/network_WiFi_Perf/control.11g_tkip
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_Perf.11g_tkip'
-ATTRIBUTES = 'suite:wifi_perf'
-TIME = 'SHORT'
-MAX_RESULT_SIZE_KB = 512000
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test uses netperf to measure the maximal receiving and transmitting
-throughput on a DUT with an 802.11g network with TKIP encryption.
-"""
-
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    mode = hostap_config.HostapConfig.MODE_11G
-    wpa_config = xmlrpc_security_types.WPAConfig(
-            psk='chromeos',
-            wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA,
-            wpa_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_TKIP])
-    configs = [hostap_config.HostapConfig(channel=channel, mode=mode,
-                                          security_config=wpa_config)
-               for channel in (6,)]
-    job.run_test('network_WiFi_Perf', tag=NAME.split('.')[1],
-                 host=host, raw_cmdline_args=args,
-                 additional_params=configs)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_Perf/control.ht20 b/server/site_tests/network_WiFi_Perf/control.ht20
index 2437840..94d9de4 100644
--- a/server/site_tests/network_WiFi_Perf/control.ht20
+++ b/server/site_tests/network_WiFi_Perf/control.ht20
@@ -9,10 +9,11 @@
 MAX_RESULT_SIZE_KB = 512000
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
+PY_VERSION = 3
 
 DOC = """
-This test uses netperf to measure the maximal receiving and transmitting
-throughput on a DUT with an open HT20 802.11n network.
+This test uses netperf to measure and verify the maximal receiving and
+transmitting throughput on a DUT with an open HT20 802.11n network.
 """
 
 
@@ -26,9 +27,10 @@
     configs = [hostap_config.HostapConfig(n_capabilities=caps,
                                           channel=channel, mode=mode)
                for channel in (1, 157)]
+    use_iperf=True
     job.run_test('network_WiFi_Perf', tag=NAME.split('.')[1],
                  host=host, raw_cmdline_args=args,
-                 additional_params=configs)
+                 additional_params=(configs, use_iperf))
 
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_Perf/control.ht20_aes b/server/site_tests/network_WiFi_Perf/control.ht20_aes
index 0b54385..2c42aee 100644
--- a/server/site_tests/network_WiFi_Perf/control.ht20_aes
+++ b/server/site_tests/network_WiFi_Perf/control.ht20_aes
@@ -9,10 +9,11 @@
 MAX_RESULT_SIZE_KB = 512000
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
+PY_VERSION = 3
 
 DOC = """
-This test uses netperf to measure the maximal receiving and transmitting
-throughput on a DUT with an AES encrypted HT20 802.11n network.
+This test uses netperf to measure and verify the maximal receiving and
+transmitting throughput on a DUT with an AES encrypted HT20 802.11n network.
 """
 
 
@@ -32,9 +33,10 @@
                                           channel=channel, mode=mode,
                                           security_config=wpa_config)
                for channel in (1, 157)]
+    use_iperf = True
     job.run_test('network_WiFi_Perf', tag=NAME.split('.')[1],
                  host=host, raw_cmdline_args=args,
-                 additional_params=configs)
+                 additional_params=(configs, use_iperf))
 
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_Perf/control.ht40 b/server/site_tests/network_WiFi_Perf/control.ht40
index 477ee83..2d685a9 100644
--- a/server/site_tests/network_WiFi_Perf/control.ht40
+++ b/server/site_tests/network_WiFi_Perf/control.ht40
@@ -9,10 +9,11 @@
 MAX_RESULT_SIZE_KB = 512000
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
+PY_VERSION = 3
 
 DOC = """
-This test uses netperf to measure the maximal receiving and transmitting
-throughput on a DUT with an open HT40 802.11n network.
+This test uses netperf to measure and verify the maximal receiving and
+transmitting throughput on a DUT with an open HT40 802.11n network.
 """
 
 
@@ -26,9 +27,10 @@
     configs = [hostap_config.HostapConfig(n_capabilities=caps,
                                           channel=channel, mode=mode)
                for channel in (1, 157)]
+    use_iperf = True
     job.run_test('network_WiFi_Perf', tag=NAME.split('.')[1],
                  host=host, raw_cmdline_args=args,
-                 additional_params=configs)
+                 additional_params=(configs, use_iperf))
 
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_Perf/control.ht40_aes b/server/site_tests/network_WiFi_Perf/control.ht40_aes
index ef5e536..8972701 100644
--- a/server/site_tests/network_WiFi_Perf/control.ht40_aes
+++ b/server/site_tests/network_WiFi_Perf/control.ht40_aes
@@ -9,10 +9,11 @@
 MAX_RESULT_SIZE_KB = 512000
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
+PY_VERSION = 3
 
 DOC = """
-This test uses netperf to measure the maximal receiving and transmitting
-throughput on a DUT with an AES encrypted HT40 802.11n network.
+This test uses netperf to measure and verify the maximal receiving and
+transmitting throughput on a DUT with an AES encrypted HT40 802.11n network.
 """
 
 
@@ -32,9 +33,10 @@
                                           channel=channel, mode=mode,
                                           security_config=wpa_config)
                for channel in (1, 157)]
+    use_iperf = True
     job.run_test('network_WiFi_Perf', tag=NAME.split('.')[1],
                  host=host, raw_cmdline_args=args,
-                 additional_params=configs)
+                 additional_params=(configs, use_iperf))
 
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_Perf/control.vht20 b/server/site_tests/network_WiFi_Perf/control.vht20
new file mode 100644
index 0000000..eac0ce4
--- /dev/null
+++ b/server/site_tests/network_WiFi_Perf/control.vht20
@@ -0,0 +1,37 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'wiley, pstew, quiche'
+NAME = 'network_WiFi_Perf.vht20'
+ATTRIBUTES = 'suite:wifi_perf'
+TIME = 'SHORT'
+MAX_RESULT_SIZE_KB = 512000
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'wificell'
+PY_VERSION = 3
+
+DOC = """
+This test uses netperf to measure and verify the maximal receiving and
+transmitting throughput on a DUT with an open HT20 802.11n network.
+"""
+
+
+from autotest_lib.server.cros.network import hostap_config
+
+
+def run(machine):
+    host = hosts.create_host(machine)
+    configs = [hostap_config.HostapConfig(
+                        channel=36,
+                        mode=hostap_config.HostapConfig.MODE_11AC_PURE,
+                        n_capabilities=[hostap_config.HostapConfig.N_CAPABILITY_HT20],
+                        ac_capabilities=[hostap_config.HostapConfig.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7],
+                        vht_channel_width=hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_20)]
+    use_iperf = True
+    job.run_test('network_WiFi_Perf', tag=NAME.split('.')[1],
+                 host=host, raw_cmdline_args=args,
+                 additional_params=(configs, use_iperf))
+
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_Perf/control.vht20_aes b/server/site_tests/network_WiFi_Perf/control.vht20_aes
new file mode 100644
index 0000000..9928921
--- /dev/null
+++ b/server/site_tests/network_WiFi_Perf/control.vht20_aes
@@ -0,0 +1,43 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'wiley, pstew, quiche'
+NAME = 'network_WiFi_Perf.vht20_aes'
+ATTRIBUTES = 'suite:wifi_perf'
+TIME = 'SHORT'
+MAX_RESULT_SIZE_KB = 512000
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'wificell'
+PY_VERSION = 3
+
+DOC = """
+This test uses netperf to measure and verify the maximal receiving and
+transmitting throughput on a DUT with an AES encrypted HT20 802.11n network.
+"""
+
+
+from autotest_lib.server.cros.network import hostap_config
+from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
+
+
+def run(machine):
+    host = hosts.create_host(machine)
+    wpa_config = xmlrpc_security_types.WPAConfig(
+            psk='chromeos',
+            wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA2,
+            wpa_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_CCMP])
+    configs = [hostap_config.HostapConfig(
+                        channel=36,
+                        mode=hostap_config.HostapConfig.MODE_11AC_PURE,
+                        n_capabilities=[hostap_config.HostapConfig.N_CAPABILITY_HT20],
+                        ac_capabilities=[hostap_config.HostapConfig.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7],
+                        vht_channel_width=hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_20,
+                                          security_config=wpa_config)]
+    use_iperf = True
+    job.run_test('network_WiFi_Perf', tag=NAME.split('.')[1],
+                 host=host, raw_cmdline_args=args,
+                 additional_params=(configs, use_iperf))
+
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_Perf/control.vht40 b/server/site_tests/network_WiFi_Perf/control.vht40
new file mode 100644
index 0000000..100237b
--- /dev/null
+++ b/server/site_tests/network_WiFi_Perf/control.vht40
@@ -0,0 +1,37 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'wiley, pstew, quiche'
+NAME = 'network_WiFi_Perf.vht40'
+ATTRIBUTES = 'suite:wifi_perf'
+TIME = 'SHORT'
+MAX_RESULT_SIZE_KB = 512000
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'wificell'
+PY_VERSION = 3
+
+DOC = """
+This test uses netperf to measure and verify the maximal receiving and
+transmitting throughput on a DUT with an open HT40 802.11n network.
+"""
+
+
+from autotest_lib.server.cros.network import hostap_config
+
+
+def run(machine):
+    host = hosts.create_host(machine)
+    configs = [hostap_config.HostapConfig(
+                        channel=36,
+                        mode=hostap_config.HostapConfig.MODE_11AC_PURE,
+                        n_capabilities=[hostap_config.HostapConfig.N_CAPABILITY_HT40],
+                        ac_capabilities=[hostap_config.HostapConfig.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7],
+                        vht_channel_width=hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_40)]
+    use_iperf = True
+    job.run_test('network_WiFi_Perf', tag=NAME.split('.')[1],
+                 host=host, raw_cmdline_args=args,
+                 additional_params=(configs, use_iperf))
+
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_Perf/control.vht40_aes b/server/site_tests/network_WiFi_Perf/control.vht40_aes
new file mode 100644
index 0000000..1a152e9
--- /dev/null
+++ b/server/site_tests/network_WiFi_Perf/control.vht40_aes
@@ -0,0 +1,43 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'wiley, pstew, quiche'
+NAME = 'network_WiFi_Perf.vht40_aes'
+ATTRIBUTES = 'suite:wifi_perf'
+TIME = 'SHORT'
+MAX_RESULT_SIZE_KB = 512000
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'wificell'
+PY_VERSION = 3
+
+DOC = """
+This test uses netperf to measure and verify the maximal receiving and
+transmitting throughput on a DUT with an AES encrypted HT40 802.11n network.
+"""
+
+
+from autotest_lib.server.cros.network import hostap_config
+from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
+
+
+def run(machine):
+    host = hosts.create_host(machine)
+    wpa_config = xmlrpc_security_types.WPAConfig(
+            psk='chromeos',
+            wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA2,
+            wpa_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_CCMP])
+    configs = [hostap_config.HostapConfig(
+                        channel=36,
+                        mode=hostap_config.HostapConfig.MODE_11AC_PURE,
+                        n_capabilities=[hostap_config.HostapConfig.N_CAPABILITY_HT40],
+                        ac_capabilities=[hostap_config.HostapConfig.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7],
+                        vht_channel_width=hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_40,
+                        security_config=wpa_config)]
+    use_iperf = True
+    job.run_test('network_WiFi_Perf', tag=NAME.split('.')[1],
+                 host=host, raw_cmdline_args=args,
+                 additional_params=(configs, use_iperf))
+
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_Perf/control.vht80 b/server/site_tests/network_WiFi_Perf/control.vht80
index e5773a7..1170cee 100644
--- a/server/site_tests/network_WiFi_Perf/control.vht80
+++ b/server/site_tests/network_WiFi_Perf/control.vht80
@@ -9,10 +9,11 @@
 MAX_RESULT_SIZE_KB = 512000
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
+PY_VERSION = 3
 
 DOC = """
-This test uses netperf to measure the maximal receiving and transmitting
-throughput on a DUT with an open HT80 802.11ac network.
+This test uses netperf to measure and verify the maximal receiving and
+transmitting throughput on a DUT with an open HT80 802.11ac network.
 """
 
 
@@ -21,7 +22,8 @@
 
 def run(machine):
     n_caps = [hostap_config.HostapConfig.N_CAPABILITY_HT40_PLUS]
-    ac_caps = [hostap_config.HostapConfig.AC_CAPABILITY_SHORT_GI_80]
+    ac_caps = [hostap_config.HostapConfig.AC_CAPABILITY_SHORT_GI_80,
+               hostap_config.HostapConfig.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7]
     ac_mode = hostap_config.HostapConfig.MODE_11AC_MIXED
     channel_width_80_mhz = hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_80
     configs = [hostap_config.HostapConfig(
@@ -33,9 +35,10 @@
                     ac_capabilities=ac_caps)
                for channel, vht_center_channel in [(44, 42), (157, 155)]]
     host = hosts.create_host(machine)
+    use_iperf = True
     job.run_test('network_WiFi_Perf', tag=NAME.split('.')[1],
                  host=host, raw_cmdline_args=args,
-                 additional_params=configs)
+                 additional_params=(configs, use_iperf))
 
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_Perf/control.vht80_pmf b/server/site_tests/network_WiFi_Perf/control.vht80_pmf
new file mode 100644
index 0000000..fa0ec25
--- /dev/null
+++ b/server/site_tests/network_WiFi_Perf/control.vht80_pmf
@@ -0,0 +1,52 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'wiley, pstew, quiche, kuabhs'
+NAME = 'network_WiFi_Perf.vht80_pmf'
+ATTRIBUTES = 'suite:wifi_perf'
+TIME = 'SHORT'
+MAX_RESULT_SIZE_KB = 512000
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'wificell'
+PY_VERSION = 3
+
+DOC = """
+This test uses netperf to measure and verify the maximal receiving and
+transmitting throughput on a DUT with a network based on WPA2/WPA3 mixed and PMF
+enabled.
+"""
+
+from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
+from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
+from autotest_lib.server.cros.network import hostap_config
+
+def run(machine):
+    n_caps = [hostap_config.HostapConfig.N_CAPABILITY_HT40_PLUS]
+    ac_caps = [hostap_config.HostapConfig.AC_CAPABILITY_SHORT_GI_80,
+               hostap_config.HostapConfig.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7]
+    ac_mode = hostap_config.HostapConfig.MODE_11AC_MIXED
+    channel_width_80_mhz = hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_80
+    wpa_config = xmlrpc_security_types.WPAConfig(
+            psk='chromeos',
+            wpa_mode=xmlrpc_security_types.WPAConfig.MODE_MIXED_WPA3,
+            wpa_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_CCMP])
+
+    configs = [hostap_config.HostapConfig(
+                    channel=channel,
+                    mode=ac_mode,
+                    n_capabilities=n_caps,
+                    vht_channel_width=channel_width_80_mhz,
+                    vht_center_channel=vht_center_channel,
+                    pmf_support=hostap_config.HostapConfig.PMF_SUPPORT_ENABLED,
+                    ac_capabilities=ac_caps,
+                    security_config=wpa_config)
+               for channel, vht_center_channel in [(44, 42), (157, 155)]]
+    host = hosts.create_host(machine)
+    use_iperf = True
+    job.run_test('network_WiFi_Perf', tag=NAME.split('.')[1],
+                 host=host, raw_cmdline_args=args,
+                 additional_params=(configs, use_iperf))
+
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_Perf/network_WiFi_Perf.py b/server/site_tests/network_WiFi_Perf/network_WiFi_Perf.py
index 2700f1d..5bc9582 100644
--- a/server/site_tests/network_WiFi_Perf/network_WiFi_Perf.py
+++ b/server/site_tests/network_WiFi_Perf/network_WiFi_Perf.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -7,13 +8,14 @@
 
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib import utils
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import netperf_runner
-from autotest_lib.server.cros.network import netperf_session
-from autotest_lib.server.cros.network import wifi_cell_test_base
+from autotest_lib.client.common_lib.cros.network import interface
+from autotest_lib.server.cros.network import expected_performance_results
+from autotest_lib.server.cros.network import ip_config_context_manager
+from autotest_lib.server.cros.network import perf_test_manager as perf_manager
+from autotest_lib.server.cros.network import wifi_cell_perf_test_base
 
 
-class network_WiFi_Perf(wifi_cell_test_base.WiFiCellTestBase):
+class network_WiFi_Perf(wifi_cell_perf_test_base.WiFiCellPerfTestBase):
     """Test maximal achievable bandwidth on several channels per band.
 
     Conducts a performance test for a set of specified router configurations
@@ -23,25 +25,40 @@
 
     version = 1
 
-    NETPERF_CONFIGS = [
-            netperf_runner.NetperfConfig(
-                       netperf_runner.NetperfConfig.TEST_TYPE_TCP_STREAM),
-            netperf_runner.NetperfConfig(
-                       netperf_runner.NetperfConfig.TEST_TYPE_TCP_MAERTS),
-            netperf_runner.NetperfConfig(
-                       netperf_runner.NetperfConfig.TEST_TYPE_UDP_STREAM),
-            netperf_runner.NetperfConfig(
-                       netperf_runner.NetperfConfig.TEST_TYPE_UDP_MAERTS),
+    PERF_TEST_TYPES = [
+            perf_manager.PerfTestTypes.TEST_TYPE_TCP_TX,
+            perf_manager.PerfTestTypes.TEST_TYPE_TCP_RX,
+            perf_manager.PerfTestTypes.TEST_TYPE_TCP_BIDIRECTIONAL,
+            perf_manager.PerfTestTypes.TEST_TYPE_UDP_TX,
+            perf_manager.PerfTestTypes.TEST_TYPE_UDP_RX,
+            perf_manager.PerfTestTypes.TEST_TYPE_UDP_BIDIRECTIONAL,
     ]
 
+    DEFAULT_ROUTER_LAN_IP_ADDRESS = "192.168.1.50"
+    DEFAULT_PCAP_LAN_IP_ADDRESS = "192.168.1.51"
+    DEFAULT_ROUTER_LAN_IFACE_NAME = "eth1"
+    DEFAULT_PCAP_LAN_IFACE_NAME = "eth1"
 
     def parse_additional_arguments(self, commandline_args, additional_params):
         """Hook into super class to take control files parameters.
 
         @param commandline_args dict of parsed parameters from the autotest.
         @param additional_params list of HostapConfig objects.
-
         """
+        self._should_required = 'should' in commandline_args
+        self._power_save_off = 'power_save_off' in commandline_args
+
+        get_arg_value_or_default = lambda attr, default: commandline_args[
+                attr] if attr in commandline_args else default
+        self._router_lan_ip_addr = get_arg_value_or_default(
+                'router_lan_ip_addr', self.DEFAULT_ROUTER_LAN_IP_ADDRESS)
+        self._router_lan_iface_name = get_arg_value_or_default(
+                'router_lan_iface_name', self.DEFAULT_ROUTER_LAN_IFACE_NAME)
+        self._pcap_lan_ip_addr = get_arg_value_or_default(
+                'pcap_lan_ip_addr', self.DEFAULT_PCAP_LAN_IP_ADDRESS)
+        self._pcap_lan_iface_name = get_arg_value_or_default(
+                'pcap_lan_iface_name', self.DEFAULT_PCAP_LAN_IFACE_NAME)
+
         if 'governor' in commandline_args:
             self._governor = commandline_args['governor']
             # validate governor string. Not all machines will support all of
@@ -49,22 +66,80 @@
             # valid governor was passed in
             if self._governor not in ('performance', 'powersave', 'userspace',
                                       'ondemand', 'conservative', 'schedutil'):
-                logging.warning('Unrecognized CPU governor "%s". Running test '
-                        'without setting CPU governor...' % self._governor)
+                logging.warning(
+                        'Unrecognized CPU governor %s. Running test '
+                        'without setting CPU governor...', self._governor)
                 self._governor = None
         else:
             self._governor = None
-        self._ap_configs = additional_params
+        self._ap_configs, self._use_iperf = additional_params
 
+    def verify_result(self, result, must_expected_throughput,
+                      should_expected_throughput, test_type, failed_test_types,
+                      power_save, ap_config):
+        """Verfiy that performance test result passes the must and should
+        throughput requirements.
 
-    def do_run(self, ap_config, session, power_save, governor):
+        @param result: the throughput result object
+        @param must_expected_throughput: the min must expected throughput
+        @param should_expected_throughput: the min should expected throughput
+        @param test_type: the performance_test_types test type
+        @param failed_test_types: a set of failed test_types
+        @param power_save: powersaving configuration
+        @param ap_config: the AP configuration
+        """
+        must_tput_failed = False
+        should_tput_failed = False
+
+        # If the must requirement is greater than our maximum expecation for a
+        # board, use the maximum expectation instead of the must requirement.
+        board_max_expectation = expected_performance_results.get_board_max_expectation(
+                test_type, self.context.client.board)
+        if board_max_expectation and board_max_expectation < must_expected_throughput:
+            must_expected_throughput = board_max_expectation
+
+        if result.throughput < must_expected_throughput:
+            logging.error(
+                    'Throughput is too low for %s. Expected (must) %0.2f Mbps, got %0.2f.',
+                    test_type, must_expected_throughput, result.throughput)
+            must_tput_failed = True
+        if result.throughput < should_expected_throughput:
+            if self._should_required:
+                logging.error(
+                        'Throughput is too low for %s. Expected (should) %0.2f Mbps, got %0.2f.',
+                        test_type, should_expected_throughput,
+                        result.throughput)
+                should_tput_failed = True
+            else:
+                logging.info(
+                        'Throughput is below (should) expectation for %s. Expected (should) %0.2f Mbps, got %0.2f.',
+                        test_type, should_expected_throughput,
+                        result.throughput)
+        if must_tput_failed or should_tput_failed:
+            failed_test_type_list = [
+                    '[test_type=%s' % test_type,
+                    'channel=%d' % ap_config.channel,
+                    'power_save_on=%r' % power_save,
+                    'measured_Tput=%0.2f' % result.throughput
+            ]
+            if must_tput_failed:
+                failed_test_type_list.append(
+                        'must_expected_Tput_failed=%0.2f' %
+                        must_expected_throughput)
+            elif should_tput_failed:
+                failed_test_type_list.append(
+                        'should_expected_Tput_failed=%0.2f' %
+                        should_expected_throughput)
+            failed_test_types.add(', '.join(failed_test_type_list) + ']')
+
+    def do_run(self, ap_config, manager, power_save, governor):
         """Run a single set of perf tests, for a given AP and DUT config.
 
         @param ap_config: the AP configuration that is being used
-        @param session: a netperf session instance
+        @param manager: a PerfTestManager instance
         @param power_save: whether or not to use power-save mode on the DUT
                            (boolean)
-
+        @ return set of failed configs
         """
         def get_current_governor(host):
             """
@@ -98,8 +173,8 @@
                 # perform the run twice
                 return
 
+        failed_test_types = set()
         self.context.client.powersave_switch(power_save)
-        session.warmup_stations()
         ps_tag = 'PS%s' % ('on' if power_save else 'off')
         governor_tag = 'governor-%s' % governor_name
         ap_config_tag = '_'.join([ap_config.perf_loggable_description,
@@ -107,56 +182,96 @@
         signal_level = self.context.client.wifi_signal_level
         signal_description = '_'.join([ap_config_tag, 'signal'])
         self.write_perf_keyval({signal_description: signal_level})
-        for config in self.NETPERF_CONFIGS:
+        for test_type in self.PERF_TEST_TYPES:
+            config = manager.get_config(test_type)
+            pcap_lan_iface = interface.Interface(self._pcap_lan_iface_name,
+                                                 self.context.pcap_host.host)
+            session = manager.get_session(test_type,
+                                          self.context.client,
+                                          self.context.pcap_host,
+                                          peer_device_interface=pcap_lan_iface)
+            ch_width = ap_config.channel_width
+            if ch_width is None:
+                raise error.TestFail(
+                        'Failed to get the channel width used by the AP and client'
+                )
+            expected_throughput = expected_performance_results.get_expected_throughput_wifi(
+                    test_type, ap_config.mode, ch_width)
             results = session.run(config)
             if not results:
-                logging.error('Failed to take measurement for %s',
-                              config.tag)
+                logging.error('Failed to take measurement for %s', test_type)
                 continue
-            values = [result.throughput for result in results]
-            self.output_perf_value(config.tag, values, units='Mbps',
+
+            values = [sample.throughput for sample in results]
+            self.output_perf_value(test_type,
+                                   values,
+                                   units='Mbps',
                                    higher_is_better=True,
                                    graph=ap_config_tag)
-            result = netperf_runner.NetperfResult.from_samples(results)
-            self.write_perf_keyval(result.get_keyval(
-                prefix='_'.join([ap_config_tag, config.tag])))
+            result = manager.get_result(results)
+            self.verify_result(result, expected_throughput[0],
+                               expected_throughput[1], test_type,
+                               failed_test_types, power_save, ap_config)
+            self.write_perf_keyval(
+                    result.get_keyval(
+                            prefix='_'.join([ap_config_tag, test_type])))
         if governor:
             utils.restore_scaling_governor_states(client_governor,
                     self.context.client.host)
             utils.restore_scaling_governor_states(router_governor,
                     self.context.router.host)
+        return failed_test_types
 
 
     def run_once(self):
         """Test body."""
         start_time = time.time()
+        low_throughput_tests = set()
+        logging.info(self.context.client.board)
+
         for ap_config in self._ap_configs:
             # Set up the router and associate the client with it.
-            self.context.configure(ap_config)
-            # self.context.configure has a similar check - but that one only
-            # errors out if the AP *requires* VHT i.e. AP is requesting
-            # MODE_11AC_PURE and the client does not support it.
-            # For wifi_perf, we don't want to run MODE_11AC_MIXED on the AP if
-            # the client does not support VHT, as we are guaranteed to get the
-            # same results at 802.11n/HT40 in that case.
-            if ap_config.is_11ac and not self.context.client.is_vht_supported():
-                raise error.TestNAError('Client does not have AC support')
-            assoc_params = xmlrpc_datatypes.AssociationParameters(
-                    ssid=self.context.router.get_ssid(),
-                    security_config=ap_config.security_config)
-            self.context.assert_connect_wifi(assoc_params)
-            session = netperf_session.NetperfSession(self.context.client,
-                                                     self.context.router)
+            self.configure_and_connect_to_ap(ap_config)
+            with ip_config_context_manager.IpConfigContextManager(
+            ) as ip_context:
 
-            # Flag a test error if we disconnect for any reason.
-            with self.context.client.assert_no_disconnects():
-                # Conduct the performance tests while toggling powersave mode.
-                for power_save in (True, False):
+                ip_context.bring_interface_up(self.context.router.host,
+                                              self._router_lan_iface_name)
+                ip_context.bring_interface_up(self.context.pcap_host.host,
+                                              self._pcap_lan_iface_name)
+                ip_context.assign_ip_addr_to_iface(self.context.router.host,
+                                                   self._router_lan_ip_addr,
+                                                   self._router_lan_iface_name)
+                ip_context.assign_ip_addr_to_iface(self.context.pcap_host.host,
+                                                   self._pcap_lan_ip_addr,
+                                                   self._pcap_lan_iface_name)
+                ip_context.add_ip_route(self.context.client.host,
+                                        self._pcap_lan_ip_addr,
+                                        self.context.client.wifi_if,
+                                        self.context.router.wifi_ip)
+                ip_context.add_ip_route(self.context.pcap_host.host,
+                                        self.context.client.wifi_ip,
+                                        self._router_lan_iface_name,
+                                        self._router_lan_ip_addr)
+
+                manager = perf_manager.PerfTestManager(self._use_iperf)
+                # Flag a test error if we disconnect for any reason.
+                with self.context.client.assert_no_disconnects():
                     for governor in sorted(set([None, self._governor])):
-                        self.do_run(ap_config, session, power_save, governor)
+                        # Run the performance test and record the test types
+                        # which failed due to low throughput.
+                        low_throughput_tests.update(
+                                self.do_run(ap_config, manager,
+                                            not (self._power_save_off),
+                                            governor))
 
             # Clean up router and client state for the next run.
             self.context.client.shill.disconnect(self.context.router.get_ssid())
             self.context.router.deconfig()
         end_time = time.time()
         logging.info('Running time %0.1f seconds.', end_time - start_time)
+        if len(low_throughput_tests) != 0:
+            low_throughput_tags = list(low_throughput_tests)
+            raise error.TestFail(
+                    'Throughput performance too low for test type(s): %s' %
+                    ', '.join(low_throughput_tags))
diff --git a/server/site_tests/network_WiFi_Prefer5Ghz/control b/server/site_tests/network_WiFi_Prefer5Ghz/control
deleted file mode 100644
index 0e139ef..0000000
--- a/server/site_tests/network_WiFi_Prefer5Ghz/control
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_Prefer5Ghz'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test case verifies that we prefer 5Ghz APs over 2.4Ghz APs
-when everything else is equal.
-
-"""
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_Prefer5Ghz',
-                 host=host,
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_Prefer5Ghz/network_WiFi_Prefer5Ghz.py b/server/site_tests/network_WiFi_Prefer5Ghz/network_WiFi_Prefer5Ghz.py
deleted file mode 100644
index 5c7801a..0000000
--- a/server/site_tests/network_WiFi_Prefer5Ghz/network_WiFi_Prefer5Ghz.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import iw_runner
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class network_WiFi_Prefer5Ghz(wifi_cell_test_base.WiFiCellTestBase):
-    """Test that if we see two APs in the same network, we take the 5Ghz one."""
-    version = 1
-
-
-    def run_once(self):
-        """Body of the test."""
-        client = self.context.client
-        router = self.context.router
-        mode_n = hostap_config.HostapConfig.MODE_11N_PURE
-        ssid = router.build_unique_ssid()
-        ap_config_2G = hostap_config.HostapConfig(channel=1, ssid=ssid,
-                mode=mode_n)
-        ap_config_5G = hostap_config.HostapConfig(ssid=ssid, channel=36,
-                mode=mode_n)
-
-        # Make sure neither the 2.4 GHz nor the 5 GHz BSS is blacklisted.
-        client.clear_supplicant_blacklist()
-
-        # Bring up a 2.4 GHz and 5 GHz BSS, both with the same SSID.
-        self.context.configure(ap_config_2G)
-        self.context.configure(ap_config_5G, multi_interface=True)
-
-        # Uncomment the below for testing/debugging: Lowers the tx power of the
-        # 5 GHz AP to make its signal weaker, so the DUT is more likely to
-        # choose 2.4 GHz. 100 mBm or 1 dbM is the most we can lower it to.
-        # _5G_interface = self.context.router.get_hostapd_interface(1)
-        # self.context.router.iw_runner.set_tx_power(_5G_interface, 'fixed 100')
-
-        # Wait for both BSSes to be discovered - if the DUT doesn't discover
-        # both BSSes in the scan, it may end up connecting to the wrong BSS.
-        client.wait_for_bsses(ssid, 2)
-        self.context.assert_connect_wifi(
-                xmlrpc_datatypes.AssociationParameters(ssid=ssid))
-
-        # Cache the frequency to signal strength mapping for debugging purposes.
-        wanted_freq = ap_config_5G.frequency
-        actual_freq = int(client.get_iw_link_value(\
-            iw_runner.IW_LINK_KEY_FREQUENCY))
-        signal_strengths = {}
-
-        bss_list_full = client.iw_runner.scan_dump(client.wifi_if)
-        bss_list = filter(lambda bss: bss.ssid == ssid, bss_list_full)
-        for bss in bss_list:
-            signal_strengths[bss.frequency] = bss.signal
-            logging.info('Freq: %d Signal: %d' , bss.frequency, bss.signal)
-
-        if actual_freq != wanted_freq:
-            raise error.TestFail('Connected to 2.4GHz at %r dBm, wanted 5 GHz '
-                                  ' at %r dBm' %
-                                  (signal_strengths.get(actual_freq),
-                                  signal_strengths.get(wanted_freq)))
diff --git a/server/site_tests/network_WiFi_ProfileBasic/control b/server/site_tests/network_WiFi_ProfileBasic/control
deleted file mode 100644
index ae46fc8..0000000
--- a/server/site_tests/network_WiFi_ProfileBasic/control
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_ProfileBasic'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wificell-cq')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-Tests basic operations on profiles and profile entries.  Tests that we
-autoconnect to remembered services when a profile pop or entry delete
-causes us to lose credentials to our currently connected service.  Tests
-that we auto-connect to a WiFi network when credentials become available
-and we are not already connected.
-
-"""
-
-
-def run(machine):
-    job.run_test('network_WiFi_ProfileBasic',
-                 host=hosts.create_host(machine),
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_ProfileBasic/network_WiFi_ProfileBasic.py b/server/site_tests/network_WiFi_ProfileBasic/network_WiFi_ProfileBasic.py
deleted file mode 100644
index d7ec579..0000000
--- a/server/site_tests/network_WiFi_ProfileBasic/network_WiFi_ProfileBasic.py
+++ /dev/null
@@ -1,160 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class ProfileRemovingContext(object):
-    """Creates and pushes a profile that is guaranteed to be removed."""
-
-    @property
-    def profile_name(self):
-        """@return string: name of profile created and pushed."""
-        return self._profile_name
-
-
-    def __init__(self, wifi_client, profile_name='always_removed'):
-        self._wifi_client = wifi_client
-        self._profile_name = profile_name
-
-
-    def __enter__(self):
-        if not all([self._wifi_client.shill.create_profile(self.profile_name),
-                    self._wifi_client.shill.push_profile(self.profile_name)]):
-            raise error.TestFail('Failed to create/push profile %s' %
-                                 self.profile_name)
-        return self
-
-
-    def __exit__(self, exc_type, exc_value, traceback):
-        # Ignore pop errors in case the test popped it on its own
-        self._wifi_client.shill.pop_profile(self.profile_name)
-        if not self._wifi_client.shill.remove_profile(self.profile_name):
-            raise error.TestFail('Failed to remove profile %s.' %
-                                 self.profile_name)
-
-
-class network_WiFi_ProfileBasic(wifi_cell_test_base.WiFiCellTestBase):
-    """Tests that credentials are stored in profiles."""
-
-    version = 1
-
-    CHANNEL_NUMBER = 1
-    STATE_TRANSITION_TIMEOUT_SECONDS = 20
-
-
-    def _assert_state_transition(self, ssid, states):
-        """Raise an error if a WiFi service doesn't transition to |states|.
-
-        @param ssid: string ssid of service.
-        @param states: list of string states to wait for.
-
-        """
-        result = self.context.client.wait_for_service_states(
-                ssid, states,
-                timeout_seconds=self.STATE_TRANSITION_TIMEOUT_SECONDS)
-
-        success, state, duration_seconds = result
-        if not success:
-            raise error.TestFail('Timed out waiting for states: %r in %f '
-                                 'seconds.  Ended in %s' %
-                                 (states, duration_seconds, state))
-
-
-    def run_once(self):
-        """Body of the test."""
-        self.context.client.shill.clean_profiles()
-        wep_config = xmlrpc_security_types.WEPConfig(
-                wep_keys=['abcde', 'fghij', 'klmno', 'pqrst'])
-        ap_config0 = hostap_config.HostapConfig(
-                channel=self.CHANNEL_NUMBER, security_config=wep_config)
-        ap_config1 = hostap_config.HostapConfig(
-                channel=self.CHANNEL_NUMBER, security_config=wep_config)
-        with ProfileRemovingContext(self.context.client,
-                                    profile_name='bottom') as bottom:
-            self.context.configure(ap_config0)
-            client_config0 = xmlrpc_datatypes.AssociationParameters(
-                    security_config=ap_config0.security_config,
-                    ssid=self.context.router.get_ssid())
-            self.context.assert_connect_wifi(client_config0)
-            self.context.assert_ping_from_dut(ap_num=0)
-            # Check that popping a profile causes a loss of credentials and a
-            # disconnect.
-            if not self.context.client.shill.pop_profile(bottom.profile_name):
-                raise error.TestFail('Failed to pop profile %s.' %
-                                      bottom.profile_name)
-
-            self._assert_state_transition(client_config0.ssid, ['idle'])
-            # Check that pushing a profile causes credentials to reappear.
-            if not self.context.client.shill.push_profile(bottom.profile_name):
-                raise error.TestFail('Failed to push profile %s.' %
-                                      bottom.profile_name)
-
-            self._assert_state_transition(client_config0.ssid,
-                                          self.context.client.CONNECTED_STATES)
-
-            # Explicitly disconnect from the AP.
-            self.context.client.shill.disconnect(client_config0.ssid)
-            self._assert_state_transition(client_config0.ssid, ['idle'])
-
-            with ProfileRemovingContext(self.context.client,
-                                        profile_name='top') as top:
-                # Changes to the profile stack should clear the "explicitly
-                # disconnected" flag on all services.  This should cause shill
-                # to re-connect to the AP.
-                self._assert_state_transition(client_config0.ssid,
-                                              self.context.client.CONNECTED_STATES)
-
-                self.context.configure(ap_config1, multi_interface=True)
-                client_config1 = xmlrpc_datatypes.AssociationParameters(
-                        security_config=ap_config1.security_config,
-                        ssid=self.context.router.get_ssid(instance=1))
-                self.context.assert_connect_wifi(client_config1)
-                self.context.assert_ping_from_dut(ap_num=1)
-                # Check that deleting an entry also causes a disconnect and
-                # autoconect to a previously remembered service.
-                if not self.context.client.shill.delete_entries_for_ssid(
-                        client_config1.ssid):
-                    raise error.TestFail('Failed to delete profile entry for '
-                                         '%s' % client_config1.ssid)
-
-                self._assert_state_transition(client_config1.ssid, ['idle'])
-                self._assert_state_transition(client_config0.ssid,
-                                              self.context.client.CONNECTED_STATES)
-                # Verify that the same sort of thing happens when we pop
-                # a profile on top of another one.
-                self.context.assert_connect_wifi(client_config1)
-                self.context.assert_ping_from_dut(ap_num=1)
-                if not self.context.client.shill.pop_profile(top.profile_name):
-                    raise error.TestFail('Failed to pop profile %s.' %
-                                          top.profile_name)
-                self._assert_state_transition(client_config1.ssid, ['idle'])
-                self._assert_state_transition(client_config0.ssid,
-                                              self.context.client.CONNECTED_STATES)
-
-                # Re-push the top profile.
-                if not self.context.client.shill.push_profile(top.profile_name):
-                    raise error.TestFail('Failed to push profile %s.' %
-                                          top.profile_name)
-
-                # Explicitly disconnect from the AP.
-                self.context.client.shill.disconnect(client_config0.ssid)
-                self._assert_state_transition(client_config0.ssid, ['idle'])
-
-                # Verify that popping a profile -- even one which does not
-                # affect the service profile -- returns explicitly disconnected
-                # services back into the pool of connectable services.
-                if not self.context.client.shill.pop_profile(top.profile_name):
-                    raise error.TestFail('Failed to pop profile %s.' %
-                                          top.profile_name)
-
-                # A change to the profile stack should have caused us to
-                # reconnect to the service, since the "explicitly disconnected"
-                # flag will be removed.
-                self._assert_state_transition(client_config0.ssid,
-                                              self.context.client.CONNECTED_STATES)
diff --git a/server/site_tests/network_WiFi_ProfileGUID/control b/server/site_tests/network_WiFi_ProfileGUID/control
deleted file mode 100644
index 79e5449..0000000
--- a/server/site_tests/network_WiFi_ProfileGUID/control
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_ProfileGUID'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ''
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test verifies that shill correctly handles GUIDs in the context
-of WiFi services.
-"""
-
-
-def run(machine):
-    job.run_test('network_WiFi_ProfileGUID',
-                 host=hosts.create_host(machine),
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_ProfileGUID/network_WiFi_ProfileGUID.py b/server/site_tests/network_WiFi_ProfileGUID/network_WiFi_ProfileGUID.py
deleted file mode 100644
index 52299b7..0000000
--- a/server/site_tests/network_WiFi_ProfileGUID/network_WiFi_ProfileGUID.py
+++ /dev/null
@@ -1,114 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class network_WiFi_ProfileGUID(wifi_cell_test_base.WiFiCellTestBase):
-    """Test that we can connect to router configured in various ways."""
-    version = 1
-
-    TEST_GUID = '01234'
-    TEST_PASSWORD0 = 'chromeos0'
-    TEST_PASSWORD1 = 'chromeos1'
-    TEST_PASSWORD2 = 'chromeos2'
-    SERVICE_PROPERTY_GUID = 'GUID'
-    STATE_TRANSITION_TIMEOUT_SECONDS = 15
-
-
-    def _assert_connection(self, ssid):
-        """Assert that shill connects to |ssid| after a scan.
-
-        @param ssid: string name of network we expect to connect to.
-
-        """
-        # Request a scan, this should goad shill into action.
-        self.context.client.scan(frequencies=[], ssids=[])
-        result = self.context.client.wait_for_service_states(
-                ssid, self.context.client.CONNECTED_STATES,
-                self.STATE_TRANSITION_TIMEOUT_SECONDS)
-        success, state, time = result
-        if not success:
-            logging.error('ERROR!')
-            raise error.TestFail('Failed to connect to %s in %f seconds (%r).' %
-                                 (ssid, time, state))
-
-
-    def _assert_guid_value(self, ssid, expected_guid, expect_missing=False):
-        """Assert that a service's GUID field has a particular value.
-
-        @param ssid: string name of WiFi network corresponding to the service.
-        @param expected_guid: string expected value of the GUID on the service.
-        @param expect_missing: boolean True if we expect an empty GUID value.
-
-        """
-        properties = self.context.client.shill.get_service_properties(ssid)
-        real_guid = properties.get(self.SERVICE_PROPERTY_GUID, '')
-        logging.debug('Got service properties: %r.', properties)
-        if expect_missing and real_guid:
-            raise error.TestFail('Expected GUID property to be missing.')
-
-        if not expect_missing and real_guid != expected_guid:
-            raise error.TestFail('Expected GUID value of %r, but got %r.' %
-                                 (expected_guid, real_guid))
-
-
-    def run_once(self):
-        """Sets up a router, connects to it, pings it, and repeats."""
-        CIPHER_CCMP = xmlrpc_security_types.WPAConfig.CIPHER_CCMP
-        WPA_MODE = xmlrpc_security_types.WPAConfig.MODE_PURE_WPA
-        get_ap_config = lambda ssid, password: hostap_config.HostapConfig(
-                ssid=ssid, channel=1,
-                security_config=xmlrpc_security_types.WPAConfig(
-                        psk=password,
-                        wpa_mode=WPA_MODE,
-                        wpa_ciphers=[CIPHER_CCMP]))
-        get_client_config = lambda ap_config: \
-                xmlrpc_datatypes.AssociationParameters(
-                        ssid=self.context.router.get_ssid(),
-                        security_config=ap_config.security_config,
-                        guid=self.TEST_GUID,
-                        autoconnect=True)
-        ap_config = get_ap_config(None, self.TEST_PASSWORD0)
-        self.context.configure(ap_config)
-        assoc_params = get_client_config(ap_config)
-        self.context.client.shill.configure_wifi_service(assoc_params)
-        self._assert_connection(assoc_params.ssid)
-        # Expect the GUID property to be set.
-        self._assert_guid_value(assoc_params.ssid, assoc_params.guid)
-        if not self.context.client.shill.delete_entries_for_ssid(
-                assoc_params.ssid):
-            raise error.TestFail('Failed to delete profile entry for %s' %
-                                 assoc_params.ssid)
-
-        # GUID property should be missing, since we don't have an entry.
-        self._assert_guid_value(assoc_params.ssid, assoc_params.guid,
-                                expect_missing=True)
-
-        # Change the password on the AP, do everything again.
-        ap_config = get_ap_config(assoc_params.ssid, self.TEST_PASSWORD1)
-        self.context.configure(ap_config)
-        assoc_params = get_client_config(ap_config)
-        self.context.client.shill.configure_wifi_service(assoc_params)
-        self._assert_connection(assoc_params.ssid)
-        # Expect the GUID property to be set.
-        self._assert_guid_value(assoc_params.ssid, assoc_params.guid)
-        # Change the security configuration again.
-        ap_config = get_ap_config(assoc_params.ssid, self.TEST_PASSWORD2)
-        self.context.configure(ap_config)
-        # Connect again, but do so by configuring the existing entry.
-        # We'll address it by its GUID here.
-        if not self.context.client.shill.configure_service_by_guid(
-                xmlrpc_datatypes.ConfigureServiceParameters(
-                        assoc_params.guid, autoconnect=True,
-                        passphrase=self.TEST_PASSWORD2)):
-            raise error.TestFail('Failed to configure service by GUID.')
-
-        self._assert_connection(assoc_params.ssid)
diff --git a/server/site_tests/network_WiFi_RandomMACAddress/control b/server/site_tests/network_WiFi_RandomMACAddress/control
deleted file mode 100644
index 3f595fc..0000000
--- a/server/site_tests/network_WiFi_RandomMACAddress/control
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = 'ejcaruso, snanda'
-NAME = 'network_WiFi_RandomMACAddress'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell'
-ATTRIBUTES = ''
-
-DOC = """
-This test verifies that MAC address randomization can be
-turned on and that scans launched with the feature do not use
-the hardware MAC address.
-"""
-
-args_dict = utils.args_to_dict(args)
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_RandomMACAddress',
-                 host=host,
-                 raw_cmdline_args=args)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_RandomMACAddress/network_WiFi_RandomMACAddress.py b/server/site_tests/network_WiFi_RandomMACAddress/network_WiFi_RandomMACAddress.py
deleted file mode 100644
index d0bcc46..0000000
--- a/server/site_tests/network_WiFi_RandomMACAddress/network_WiFi_RandomMACAddress.py
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server.cros.network import random_mac_address_test_base
-
-class network_WiFi_RandomMACAddress(
-        random_mac_address_test_base.RandomMACAddressTestBase):
-    """
-    Test that the MAC address is randomized during scans when we
-    are not connected to an AP already.
-    """
-
-    version = 1
-
-    def run_once(self):
-        """Body of the test."""
-        client = self.context.client
-        dut_hw_mac = client.wifi_mac
-
-        # Enable MAC address randomization in shill.
-        with client.mac_address_randomization(True):
-            self.start_capture()
-            self.request_scans()
-            frames = self.stop_capture_and_get_probe_requests()
-
-        if not frames:
-            raise error.TestFail('No probe requests were found!')
-        elif any(frame.source_addr == dut_hw_mac for frame in frames):
-            raise error.TestFail('Found probe requests with hardware MAC!')
diff --git a/server/site_tests/network_WiFi_RateControl/control b/server/site_tests/network_WiFi_RateControl/control
deleted file mode 100644
index 2b663e9..0000000
--- a/server/site_tests/network_WiFi_RateControl/control
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_RateControl'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_flaky')
-DEPENDENCIES = 'wificell, conductive:True'
-
-DOC = """
-This test associates a DUT with several APs serving an open HT40 network.  The
-test then conducts a packet capture of some IP traffic, and checks that the DUT
-is transmitting at the highest possible 2x2 MCS rate (MCS index 15).  This
-check assumes that the test is being conducted under relatively good RF
-conditions.
-
-"""
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_RateControl',
-                 host=host, raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_RateControl/network_WiFi_RateControl.py b/server/site_tests/network_WiFi_RateControl/network_WiFi_RateControl.py
deleted file mode 100644
index 46e84bb..0000000
--- a/server/site_tests/network_WiFi_RateControl/network_WiFi_RateControl.py
+++ /dev/null
@@ -1,156 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import iw_runner
-from autotest_lib.client.common_lib.cros.network import tcpdump_analyzer
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import netperf_runner
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class network_WiFi_RateControl(wifi_cell_test_base.WiFiCellTestBase):
-    """
-    Test maximal achievable bandwidth on several channels per band.
-
-    Conducts a performance test for a set of specified router configurations
-    and reports results as keyval pairs.
-
-    """
-
-    version = 1
-
-    # In case of aggregated frames, we need to capture whole packet to be able
-    # to parse the A-MSDU subframe and extract the IP/UDP content.
-    # Per spec the max AMSDU size for 11n is 7935bytes, let use it here.
-    TEST_SNAPLEN = 7935
-
-
-    def parse_additional_arguments(self, commandline_args, additional_params):
-        """
-        Hook into super class to take control files parameters.
-
-        @param commandline_args: dict of parsed parameters from the autotest.
-        @param additional_params: list of HostapConfig objects.
-
-        """
-        self._ap_configs = additional_params
-
-
-    def get_highest_mcs_rate(self, frequency):
-        """
-        Get the highest MCS index supported by the DUT on |frequency|.
-
-        @param frequency: int frequency to look for supported MCS rates.
-        @return int highest rate supported.
-
-        """
-        # Figure out the highest MCS index supported by this hardware.
-        phys = iw_runner.IwRunner(
-                remote_host=self.context.client.host).list_phys()
-        if len(phys) != 1:
-            raise error.TestFail('Test expects a single PHY, but we got %d' %
-                                 len(phys))
-
-        phy = phys[0]
-        bands = [band for band in phy.bands if frequency in band.frequencies]
-        if len(bands) != 1:
-            raise error.TestFail('Test expects a single possible band for a '
-                                 'given frequency, but this device has %d '
-                                 'such bands.' % len(bands))
-
-        # 32 is a special low throughput, high resilience mode.  Ignore it.
-        possible_indices = filter(lambda x: x != 32, bands[0].mcs_indices)
-
-        if not possible_indices:
-            raise error.TestFail('No possible MCS indices on frequency %d' %
-                                 frequency)
-
-        return max(possible_indices)
-
-
-    def check_bitrates_in_capture(self, pcap_result, max_mcs_index):
-        """
-        Check that frames in a packet capture have expected MCS indices.
-
-        @param pcap_result: RemoteCaptureResult tuple.
-        @param max_mcs_index: int MCS index representing the highest possible
-                bitrate on this device.
-
-        """
-        logging.info('Analyzing packet capture...')
-        display_filter = 'udp and ip.src==%s' % self.context.client.wifi_ip
-        frames = tcpdump_analyzer.get_frames(pcap_result.local_pcap_path,
-                display_filter, reject_bad_fcs=False)
-
-        logging.info('Grouping frames by MCS index')
-        counts = {}
-        for frame in frames:
-            counts[frame.mcs_index] = counts.get(frame.mcs_index, 0) + 1
-        logging.info('Saw WiFi frames with MCS indices: %r', counts)
-
-        # Now figure out the index which the device sent the most packets with.
-        dominant_index = None
-        num_packets_sent = -1
-        for index, num_packets in counts.iteritems():
-            if num_packets > num_packets_sent:
-                dominant_index = index
-                num_packets_sent = num_packets
-
-        # We should see that the device sent more frames with the maximal index
-        # than anything else.  This checks that the rate controller is fairly
-        # aggressive and using all of the device's capabilities.
-        if dominant_index != max_mcs_index:
-            raise error.TestFail('Failed to use best possible MCS '
-                                 'index %d in a clean RF environment: %r' %
-                                 (max_mcs_index, counts))
-
-
-    def run_once(self):
-        """Test body."""
-        # Just abort the test if we are not on a machine known to be conducted.
-        # The performance requirements of this test are hard to meet, without
-        # strong multi-path effects. (Our conducted setups are designed to
-        # provide strong multi-path.)
-        if not self.context.client.conductive:
-            raise error.TestNAError(
-                'This test requires a great RF environment.')
-
-        caps = [hostap_config.HostapConfig.N_CAPABILITY_GREENFIELD,
-                hostap_config.HostapConfig.N_CAPABILITY_HT40]
-        mode_11n = hostap_config.HostapConfig.MODE_11N_PURE
-        get_config = lambda channel: hostap_config.HostapConfig(
-                channel=channel, mode=mode_11n, n_capabilities=caps)
-        netperf_config = netperf_runner.NetperfConfig(
-                netperf_runner.NetperfConfig.TEST_TYPE_UDP_STREAM)
-        for _, ap_config in enumerate([get_config(1), get_config(157)]):
-            # Set up the router and associate the client with it.
-            self.context.configure(ap_config)
-            self.context.capture_host.start_capture(
-                    ap_config.frequency,
-                    width_type=ap_config.packet_capture_mode,
-                    snaplen=self.TEST_SNAPLEN)
-            assoc_params = xmlrpc_datatypes.AssociationParameters(
-                    ssid=self.context.router.get_ssid())
-            self.context.assert_connect_wifi(assoc_params)
-            with netperf_runner.NetperfRunner(self.context.client,
-                                              self.context.router,
-                                              netperf_config) as runner:
-                runner.run()
-            results = self.context.capture_host.stop_capture()
-            if len(results) != 1:
-                raise error.TestError('Expected to generate one packet '
-                                      'capture but got %d instead.' %
-                                      len(results))
-
-            # The device should sense that it is in a clean RF environment and
-            # use the highest index to achieve maximal throughput.
-            max_mcs_index = self.get_highest_mcs_rate(ap_config.frequency)
-            self.check_bitrates_in_capture(results[0], max_mcs_index)
-            # Clean up router and client state for the next run.
-            self.context.client.shill.disconnect(self.context.router.get_ssid())
-            self.context.router.deconfig()
diff --git a/server/site_tests/network_WiFi_Reassociate/control b/server/site_tests/network_WiFi_Reassociate/control
deleted file mode 100644
index 3e948da..0000000
--- a/server/site_tests/network_WiFi_Reassociate/control
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'zqiu, wiley, pstew, quiche'
-NAME = 'network_WiFi_Reassociate'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wificell-cq')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test is designed to perform timing test for wpa_supplicant reassociate
-command.
-"""
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_Reassociate',
-                 host=host,
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_Reassociate/network_WiFi_Reassociate.py b/server/site_tests/network_WiFi_Reassociate/network_WiFi_Reassociate.py
deleted file mode 100644
index a0d942f..0000000
--- a/server/site_tests/network_WiFi_Reassociate/network_WiFi_Reassociate.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class network_WiFi_Reassociate(wifi_cell_test_base.WiFiCellTestBase):
-    """Timing test for wpa_supplicant reassociate operation."""
-    version = 1
-
-    def run_once(self):
-        """Setup and connect to an AP, then perform reassociate test."""
-        ap_config = hostap_config.HostapConfig(channel=6)
-        self.context.configure(ap_config)
-        client_conf = xmlrpc_datatypes.AssociationParameters(
-                ssid=self.context.router.get_ssid())
-        self.context.assert_connect_wifi(client_conf)
-        self.context.client.reassociate(timeout_seconds=10)
diff --git a/server/site_tests/network_WiFi_ReconnectInDarkResume/control.DisconnectAfterSuspendDiffAP b/server/site_tests/network_WiFi_ReconnectInDarkResume/control.DisconnectAfterSuspendDiffAP
deleted file mode 100644
index 609fddd..0000000
--- a/server/site_tests/network_WiFi_ReconnectInDarkResume/control.DisconnectAfterSuspendDiffAP
+++ /dev/null
@@ -1,50 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = 'samueltan, ejcaruso'
-NAME = 'network_WiFi_ReconnectInDarkResume.DisconnectAfterSuspendDiffAP'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell, servo_state:WORKING, lucidsleep'
-ATTRIBUTES = 'suite:wifi_lucidsleep'
-
-DOC = """
-This test verifies that the DUT that suspends connected, then subsequently
-disconnects while suspended, successfully reconnects to a preferred network
-that later appears while it is still suspended. In this test, the preferred
-network that appears during suspend is different from the network that the DUT
-was last connected to before suspend.
-
-The test is conducted as follows:
-
-1) AP ("AP 1") is brought up, and DUT connects to it
-2) AP 1 is brought down, disconnecting the DUT from it
-3) Another AP ("AP 2") is brought up, and DUT connects to it
-4) DUT suspends while connected to AP 2
-5) AP 2 is brought down, disconnecting the DUT from it while suspended
-6) AP 1 is brought up again
-7) The DUT is woken from suspend
-8) Verify that the DUT is connected to AP 1 upon resuming from suspend
-
-We verify the connectivity status of the DUT on resume by parsing shill logs,
-since the delays involved in waking a DUT from suspend using autotest framework
-make real-time checks inaccurate.
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test('network_WiFi_ReconnectInDarkResume',
-                 host=host,
-                 tag=NAME.split('.')[1],
-                 disconnect_before_suspend=False,
-                 reconnect_to_same_ap=False,
-                 num_iterations=1,
-                 raw_cmdline_args=args)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_ReconnectInDarkResume/control.DisconnectAfterSuspendSameAP b/server/site_tests/network_WiFi_ReconnectInDarkResume/control.DisconnectAfterSuspendSameAP
deleted file mode 100644
index afaa85c..0000000
--- a/server/site_tests/network_WiFi_ReconnectInDarkResume/control.DisconnectAfterSuspendSameAP
+++ /dev/null
@@ -1,50 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = 'samueltan, ejcaruso'
-NAME = 'network_WiFi_ReconnectInDarkResume.DisconnectAfterSuspendSameAP'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell, servo_state:WORKING, lucidsleep'
-ATTRIBUTES = 'suite:wifi_lucidsleep'
-
-DOC = """
-This test verifies that the DUT that suspends connected, then subsequently
-disconnects while suspended, successfully reconnects to a preferred network
-that later appears while it is still suspended. In this test, the preferred
-network that appears during suspend is the same network that the DUT was last
-connected to before suspend.
-
-The test is conducted as follows:
-
-1) AP ("AP 1") is brought up, and DUT connects to it
-2) AP 1 is brought down, disconnecting the DUT from it
-3) Another AP ("AP 2") is brought up, and DUT connects to it
-4) DUT suspends while connected to AP 2
-5) AP 2 is brought down, disconnecting the DUT from it while suspended
-6) AP 2 is brought up again
-7) The DUT is woken from suspend
-8) Verify that the DUT is connected to AP 2 upon resuming from suspend
-
-We verify the connectivity status of the DUT on resume by parsing shill logs,
-since the delays involved in waking a DUT from suspend using autotest framework
-make real-time checks inaccurate.
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test('network_WiFi_ReconnectInDarkResume',
-                 host=host,
-                 tag=NAME.split('.')[1],
-                 disconnect_before_suspend=False,
-                 reconnect_to_same_ap=True,
-                 num_iterations=1,
-                 raw_cmdline_args=args)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_ReconnectInDarkResume/control.DisconnectBeforeSuspendDiffAP b/server/site_tests/network_WiFi_ReconnectInDarkResume/control.DisconnectBeforeSuspendDiffAP
deleted file mode 100644
index d9c4236..0000000
--- a/server/site_tests/network_WiFi_ReconnectInDarkResume/control.DisconnectBeforeSuspendDiffAP
+++ /dev/null
@@ -1,53 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'samueltan, ejcaruso'
-NAME = 'network_WiFi_ReconnectInDarkResume.DisconnectBeforeSuspendDiffAP'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'servo_state:WORKING, wificell, lucidsleep'
-ATTRIBUTES = 'suite:wifi_lucidsleep'
-
-DOC = """
-This test verifies that the DUT that suspends disconnected successfully
-reconnects to a preferred network that appears while it is suspended. In this
-test, the preferred network that appears during suspend is different from the
-one that the DUT was last connected to before suspend.
-
-The test is conducted as follows:
-
-1) AP ("AP 1") is brought up, and DUT connects to it
-2) AP 1 is brought down, disconnecting the DUT from it
-3) Another AP ("AP 2") is brought up, and DUT connects to it
-4) AP 2 is brought down, disconnecting the DUT from it
-5) DUT suspends while disconnected
-6) AP 1 is brought up again
-7) The DUT is woken from suspend
-8) Verify that the DUT is connected to AP 1 upon resuming from suspend
-
-We verify the connectivity status of the DUT on resume by parsing shill logs,
-since the delays involved in waking a DUT from suspend using autotest framework
-make real-time checks inaccurate.
-"""
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server import utils
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test('network_WiFi_ReconnectInDarkResume',
-                 host=host,
-                 tag=NAME.split('.')[1],
-                 disconnect_before_suspend=True,
-                 reconnect_to_same_ap=False,
-                 num_iterations=1,
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_ReconnectInDarkResume/control.DisconnectBeforeSuspendSameAP b/server/site_tests/network_WiFi_ReconnectInDarkResume/control.DisconnectBeforeSuspendSameAP
deleted file mode 100644
index c48216b..0000000
--- a/server/site_tests/network_WiFi_ReconnectInDarkResume/control.DisconnectBeforeSuspendSameAP
+++ /dev/null
@@ -1,53 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'samueltan, ejcaruso'
-NAME = 'network_WiFi_ReconnectInDarkResume.DisconnectBeforeSuspendSameAP'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'servo_state:WORKING, wificell, lucidsleep'
-ATTRIBUTES = 'suite:wifi_lucidsleep'
-
-DOC = """
-This test verifies that the DUT that suspends disconnected successfully
-reconnects to a preferred network that appears while it is suspended. In this
-test, the preferred network that appears during suspend is the same network that
-the DUT was last connected to before suspend.
-
-The test is conducted as follows:
-
-1) AP ("AP 1") is brought up, and DUT connects to it
-2) AP 1 is brought down, disconnecting the DUT from it
-3) Another AP ("AP 2") is brought up, and DUT connects to it
-4) AP 2 is brought down, disconnecting the DUT from it
-5) DUT suspends while disconnected
-6) AP 2 is brought up again
-7) The DUT is woken from suspend
-8) Verify that the DUT is connected to AP 2 upon resuming from suspend
-
-We verify the connectivity status of the DUT on resume by parsing shill logs,
-since the delays involved in waking a DUT from suspend using autotest framework
-make real-time checks inaccurate.
-"""
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server import utils
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test('network_WiFi_ReconnectInDarkResume',
-                 host=host,
-                 tag=NAME.split('.')[1],
-                 disconnect_before_suspend=True,
-                 reconnect_to_same_ap=True,
-                 num_iterations=1,
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_ReconnectInDarkResume/control.Stress b/server/site_tests/network_WiFi_ReconnectInDarkResume/control.Stress
deleted file mode 100644
index 24a1105..0000000
--- a/server/site_tests/network_WiFi_ReconnectInDarkResume/control.Stress
+++ /dev/null
@@ -1,57 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'samueltan, ejcaruso'
-NAME = 'network_WiFi_ReconnectInDarkResume.Stress'
-TIME = 'MEDIUM'
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'servo_state:WORKING, wificell, lucidsleep'
-ATTRIBUTES = 'suite:wifi_lucidsleep'
-
-DOC = """
-This test verifies that the DUT successfully reconnects to a network in dark
-resume after getting disconnected from that same network while suspended.
-This test simulates the cases where multiple disconnect and SSID appearance
-events take place over a single period of suspension. The test is conducted as
-follows:
-
-1) AP ("AP 1") is brought up, and DUT connects to it
-2) AP 1 is brought down, disconnecting the DUT from it
-3) Another AP ("AP 2") is brought up, and DUT connects to it
-4) DUT suspends while connected to AP 2
-5) AP 2 is brought down, disconnecting the DUT from it while suspended
-6) AP 2 is brought up again
-7) Repeat steps 5-6 another 4 times (for 5 total iterations)
-8) The DUT is woken from suspend
-9) Verify that the DUT is connected to AP 2 upon resuming from suspend
-
-We verify the connectivity status of the DUT on resume by parsing shill logs,
-since the delays involved in waking a DUT from suspend using autotest framework
-make real-time checks inaccurate.
-
-Note: this test suspends the DUT for long periods of time, which may lead to
-flakiness if the ssh connection from the autotest host to the DUT has a
-ServerAliveInterval setting that is too low (i.e. less than the total suspend
-time). This test was verified stable with the ServerAliveInterval set to 900.
-"""
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server import utils
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test('network_WiFi_ReconnectInDarkResume',
-                 host=host,
-                 tag=NAME.split('.')[1],
-                 disconnect_before_suspend=False,
-                 reconnect_to_same_ap=True,
-                 num_iterations=5,
-                 raw_cmdline_args=args)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_ReconnectInDarkResume/network_WiFi_ReconnectInDarkResume.py b/server/site_tests/network_WiFi_ReconnectInDarkResume/network_WiFi_ReconnectInDarkResume.py
deleted file mode 100644
index ddc0026..0000000
--- a/server/site_tests/network_WiFi_ReconnectInDarkResume/network_WiFi_ReconnectInDarkResume.py
+++ /dev/null
@@ -1,121 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import contextlib
-import logging
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import lucid_sleep_test_base
-from autotest_lib.server.cros.network import wifi_client
-
-class network_WiFi_ReconnectInDarkResume(
-        lucid_sleep_test_base.LucidSleepTestBase):
-    """Test that known WiFi access points wake up the system."""
-
-    version = 1
-
-    def run_once(self,
-                 disconnect_before_suspend=False,
-                 reconnect_to_same_ap=True,
-                 num_iterations=1):
-        """Body of the test
-
-        @param disconnect_before_suspend: whether we disconnect the DUT before
-        or after first suspending it.
-        @param reconnect_to_same_ap: if this is true, during suspend, we bring
-        up the same AP that the DUT was last connected to before the first
-        suspend for the DUT to reconnect to.
-        @param num_iterations: number of times to bring the AP down and up
-        during dark resume. In each iteration, we bring the AP down once, and
-        bring it up again once.
-
-        """
-        client = self.context.client
-        router = self.context.router
-
-        # We configure and connect to two APs (i.e. same AP configured with two
-        # different SSIDs) so that the DUT has two preferred networks.
-        first_ap_ssid = self.configure_and_connect_to_ap(
-                hostap_config.HostapConfig(channel=1))
-        router.deconfig_aps()
-        second_ap_ssid = self.configure_and_connect_to_ap(
-                hostap_config.HostapConfig(channel=1))
-
-        if reconnect_to_same_ap:
-            reconnect_ap_ssid = second_ap_ssid
-        else:
-            reconnect_ap_ssid = first_ap_ssid
-
-        # Enable the dark connect feature in shill, and set the scan period.
-        with contextlib.nested(
-                client.wake_on_wifi_features(
-                        wifi_client.WAKE_ON_WIFI_DARKCONNECT),
-                client.net_detect_scan_period_seconds(
-                        wifi_client.NET_DETECT_SCAN_WAIT_TIME_SECONDS)):
-            logging.info('Set up WoWLAN')
-
-            bring_ap_down_in_suspend = True
-            if disconnect_before_suspend:
-                # If we disconnect before suspend, we do not need to bring the
-                # AP down again on the first suspend.
-                bring_ap_down_in_suspend = False
-                logging.info('Bringing AP %s down.' % router.get_ssid())
-                router.deconfig_aps()
-                time.sleep(wifi_client.DISCONNECT_WAIT_TIME_SECONDS)
-
-            with self.dr_utils.suspend():
-                for iter_num in xrange(1, num_iterations+1):
-                    logging.info('Iteration %d of %d' %
-                            (iter_num, num_iterations))
-                    # Wait for suspend actions to finish.
-                    time.sleep(wifi_client.SUSPEND_WAIT_TIME_SECONDS)
-
-                    if bring_ap_down_in_suspend:
-                        logging.info('Bringing AP %s down.' % router.get_ssid())
-                        router.deconfig_aps()
-                        # Wait for the DUT to receive the disconnect, wake in
-                        # dark resume, then suspend again. Wait a little more
-                        # after that so we don't trigger the next dark resume
-                        # too soon and  set off the throttling mechanism.
-                        time.sleep(wifi_client.DISCONNECT_WAIT_TIME_SECONDS +
-                                   wifi_client.DARK_RESUME_WAIT_TIME_SECONDS +
-                                   60)
-                    else:
-                        # We will bring the AP back up after this, so we
-                        # will need to bring the AP down on any subsequent
-                        # iterations to test wake on disconnect.
-                        bring_ap_down_in_suspend = True
-
-                    # Bring the AP back up to wake up the DUT.
-                    logging.info('Bringing AP %s up.' % reconnect_ap_ssid)
-                    self.context.configure(hostap_config.HostapConfig(
-                            ssid=reconnect_ap_ssid, channel=1))
-
-                    # Wait long enough for the NIC on the DUT to perform a net
-                    # detect scan, discover the AP with the allowlisted SSID,
-                    # wake up in dark resume, connect, then suspend again.
-                    time.sleep(wifi_client.NET_DETECT_SCAN_WAIT_TIME_SECONDS +
-                               wifi_client.DARK_RESUME_WAIT_TIME_SECONDS)
-
-            client.check_connected_on_last_resume()
-
-            num_dark_resumes = self.dr_utils.count_dark_resumes()
-            if disconnect_before_suspend and num_iterations == 1:
-                # Only expect a single wake on SSID dark resume in this case
-                # since no wake on disconnect would have been triggered.
-                expected_num_dark_resumes = 1
-            else:
-                # Expect at least one disconnect dark resume and one SSID dark
-                # resume per iteration.
-                # Note: this is not foolproof; excess wakes on some iteration
-                # can make up for a shortfall in dark resumes in another
-                # iteration.
-                expected_num_dark_resumes = 2 * num_iterations
-            if num_dark_resumes < expected_num_dark_resumes:
-                raise error.TestFail('Client only came up in %d dark resumes '
-                                     'during the test (expected: at least %d)' %
-                                     (num_dark_resumes,
-                                      expected_num_dark_resumes))
diff --git a/server/site_tests/network_WiFi_Reset/control b/server/site_tests/network_WiFi_Reset/control
deleted file mode 100644
index e0a44e8..0000000
--- a/server/site_tests/network_WiFi_Reset/control
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'briannorris'
-NAME = 'network_WiFi_Reset'
-TIME = 'SHORT'
-MAX_RESULT_SIZE_KB = 512000
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell'
-ATTRIBUTES = ('suite:wifi_matfunc')
-
-DOC = """
-This test verifies that if a network device can be reset from user space, that
-it will connect to the network correctly after being reset a few times. We also
-run through a few system suspend/resume cycles in between, for completeness.
-Only supports select drivers at the moment, but will report TestNA for
-unsupported devices.
-"""
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_Reset',
-                 host=host,
-                 raw_cmdline_args=args)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_Reset/network_WiFi_Reset.py b/server/site_tests/network_WiFi_Reset/network_WiFi_Reset.py
deleted file mode 100644
index ae2bb2a..0000000
--- a/server/site_tests/network_WiFi_Reset/network_WiFi_Reset.py
+++ /dev/null
@@ -1,202 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import collections
-import logging
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-class network_WiFi_Reset(wifi_cell_test_base.WiFiCellTestBase):
-    """Test that the WiFi interface can be reset successfully, and that WiFi
-    comes back up properly. Also run a few suspend/resume cycles along the way.
-    Supports only Marvell (mwifiex) and Qualcomm/Atheros (ath10k) Wifi drivers.
-    """
-
-    version = 1
-
-    _MWIFIEX_RESET_PATH = "/sys/kernel/debug/mwifiex/%s/reset"
-    _MWIFIEX_RESET_TIMEOUT = 20
-    _MWIFIEX_RESET_INTERVAL = 0.5
-
-    _ATH10K_RESET_PATH = \
-        '/sys/kernel/debug/ieee80211/%s/ath10k/simulate_fw_crash'
-
-    # Possible reset paths for Intel wireless NICs are:
-    # 1. '/sys/kernel/debug/iwlwifi/*/iwlmvm/fw_restart'
-    # Logs look like: iwlwifi 0000:00:0c.0: 0x00000038 | BAD_COMMAND
-    # This also triggers a register dump after the restart.
-    # 2. '/sys/kernel/debug/iwlwifi/\*/iwlmvm/fw_nmi'
-    # Logs look like: iwlwifi 0000:00:0c.0: 0x00000084 | NMI_INTERRUPT_UNKNOWN
-    # This triggers a "hardware restart" once the NMI is processed
-    # 3. '/sys/kernel/debug/iwlwifi/\*/iwlmvm/fw_dbg_collect'
-    # The  third one is a mechanism to collect firmware debug dumps, that
-    # effectively causes a restart, but we'll leave it aside for now.
-    _IWLWIFI_RESET_PATH = '/sys/kernel/debug/iwlwifi/%s/iwlmvm/fw_restart'
-
-    _NUM_RESETS = 15
-    _NUM_SUSPENDS = 5
-    _SUSPEND_DELAY = 10
-
-    # Implement these 3 functions for each driver reset mechanism we want to
-    # support:
-    #  @supported: return True if this driver is supported (e.g., check for
-    #    available debugfs/sysfs triggers)
-    #  @do_reset: perform a Wifi reset, to simulate a firmware crash/restart.
-    #    Different drivers may handle restart in different ways, but this
-    #    method should at least ensure the network device is available before
-    #    returning.
-    DriverReset = collections.namedtuple('DriverReset', ['supported',
-                                                         'do_reset'])
-
-
-    @property
-    def mwifiex_reset_path(self):
-        """Get path to the Wifi interface's reset file."""
-        return self._MWIFIEX_RESET_PATH % self.context.client.wifi_if
-
-    def mwifiex_reset_exists(self):
-        """Check if the mwifiex reset file is present (i.e., a mwifiex
-        interface is present).
-        """
-        return self.context.client.host.path_exists(self.mwifiex_reset_path)
-
-    def mwifiex_reset(self):
-        """Perform mwifiex reset and wait for the interface to come back up."""
-
-        ssid = self.context.router.get_ssid()
-
-        # Adapter will asynchronously reset.
-        self.context.client.host.run('echo 1 > ' + self.mwifiex_reset_path)
-
-        # Wait for disconnect. We aren't guaranteed to receive a disconnect
-        # event, but shill will at least notice the adapter went away.
-        self.context.client.wait_for_service_states(ssid, ['idle'],
-                timeout_seconds=20)
-
-        # Now wait for the reset interface file to come back.
-        utils.poll_for_condition(
-                condition=self.mwifiex_reset_exists,
-                exception=error.TestFail(
-                        'Failed to reset device %s' %
-                        self.context.client.wifi_if),
-                timeout=self._MWIFIEX_RESET_TIMEOUT,
-                sleep_interval=self._MWIFIEX_RESET_INTERVAL)
-
-    @property
-    def ath10k_reset_path(self):
-        """Get path to ath10k debugfs reset file"""
-        phy_name = self.context.client.wifi_phy_name
-        return self._ATH10K_RESET_PATH % phy_name
-
-    def ath10k_reset_exists(self):
-        """@return True if ath10k debugfs reset file exists"""
-        return self.context.client.host.path_exists(self.ath10k_reset_path)
-
-    def ath10k_reset(self):
-        """
-        Simulate ath10k firmware crash. mac80211 handles firmware crashes
-        transparently, so we don't expect a full disconnect/reconnet event.
-
-        From ath10k debugfs:
-        To simulate firmware crash write one of the keywords to this file:
-        `soft` - this will send WMI_FORCE_FW_HANG_ASSERT to firmware if FW
-            supports that command.
-        `hard` - this will send to firmware command with illegal parameters
-            causing firmware crash.
-        `assert` - this will send special illegal parameter to firmware to
-            cause assert failure and crash.
-        `hw-restart` - this will simply queue hw restart without fw/hw actually
-            crashing.
-        """
-        self.context.client.host.run('echo soft > ' + self.ath10k_reset_path)
-
-    def iwlwifi_reset_path(self):
-        """Get path to iwlwifi debugfs reset file"""
-        pci_dev_name = self.context.client.parent_device_name
-        return self._IWLWIFI_RESET_PATH % pci_dev_name
-
-    def iwlwifi_reset_exists(self):
-        """@return True if iwlwifi debugfs reset file exists"""
-        return self.context.client.host.path_exists(self.iwlwifi_reset_path())
-
-    def iwlwifi_reset(self):
-        """
-        Simulate iwlwifi firmware crash.
-        """
-        self.context.client.host.run('echo 1 > ' + self.iwlwifi_reset_path())
-
-    def get_reset_driver(self):
-        DRIVER_LIST = [
-            self.DriverReset(
-                supported=self.mwifiex_reset_exists,
-                do_reset=self.mwifiex_reset,
-            ),
-            self.DriverReset(
-                supported=self.ath10k_reset_exists,
-                do_reset=self.ath10k_reset,
-            ),
-            self.DriverReset(
-                supported=self.iwlwifi_reset_exists,
-                do_reset=self.iwlwifi_reset,
-            ),
-        ]
-
-        for driver in DRIVER_LIST:
-            if driver.supported():
-                return driver
-        else:
-            raise error.TestNAError('DUT does not support device reset')
-
-    def run_once(self):
-        """Body of the test."""
-        self._passed = False
-
-        client = self.context.client
-
-        self.boot_id = client.host.get_boot_id()
-        self.reset_driver = self.get_reset_driver()
-
-        ap_config = hostap_config.HostapConfig(channel=1)
-        ssid = self.configure_and_connect_to_ap(ap_config)
-
-        self.context.assert_ping_from_dut()
-
-        router = self.context.router
-        ssid = router.get_ssid()
-
-        logging.info("Running %d suspends", self._NUM_SUSPENDS)
-        for _ in range(self._NUM_SUSPENDS):
-            logging.info("Running %d resets", self._NUM_RESETS)
-            for __ in range(self._NUM_RESETS):
-                self.reset_driver.do_reset()
-                client.wait_for_connection(ssid)
-                self.context.assert_ping_from_dut()
-
-            client.do_suspend(self._SUSPEND_DELAY)
-            client.host.test_wait_for_resume(self.boot_id)
-            client.wait_for_connection(ssid)
-
-        self._passed = True
-
-    def cleanup(self):
-        """Performs cleanup at exit. May reboot the DUT, to keep the system
-        functioning for the next test.
-        """
-        # TODO: Technically, we should be able to handle both
-        # super(...).cleanup() and arbitrary reboots (either driver crashes or
-        # forced reboot). This would require fixing up some of WiFiCellTestBase
-        # (e.g., not to assume a persistent xmlrpc connection in cleanup()).
-        # But cleanup() is not absolutely critical -- subsequent tests should
-        # handle re-initializing state.
-        if not self._passed:
-            logging.info('Test failed: may have left DUT in bad state; '
-                         'rebooting')
-            self.context.client.reboot(timeout=60)
-        elif self.context.client.host.get_boot_id() == self.boot_id:
-            super(network_WiFi_Reset, self).cleanup()
-        else:
-            logging.info('May have rebooted during test; skipping cleanup')
diff --git a/server/site_tests/network_WiFi_RetryConnectHidden/control b/server/site_tests/network_WiFi_RetryConnectHidden/control
deleted file mode 100644
index 3422d6f..0000000
--- a/server/site_tests/network_WiFi_RetryConnectHidden/control
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-TIME = 'SHORT'
-NAME = 'network_WiFi_RetryConnectHidden'
-TEST_CATEGORY = 'Functional'
-TEST_CLASS = 'network'
-TEST_TYPE = 'Server'
-DOC = """
-This test run is designed to check that the connection manager
-re-scans a few times before quiescing after losing a connection.
-We simulate this by shutting off an AP in full sight of the
-DUT, waiting for a bit, and then re-instating the AP.  We choose
-a hidden AP since this requires the connection manager to do the
-scans (since only the connection manager knows to scan for them,
-as opposed to wpa_supplicant).
-
-"""
-
-
-def run(machine):
-    job.run_test('network_WiFi_RetryConnectHidden',
-                 host=hosts.create_host(machine),
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_RetryConnectHidden/network_WiFi_RetryConnectHidden.py b/server/site_tests/network_WiFi_RetryConnectHidden/network_WiFi_RetryConnectHidden.py
deleted file mode 100644
index 8bed056..0000000
--- a/server/site_tests/network_WiFi_RetryConnectHidden/network_WiFi_RetryConnectHidden.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import time
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-class network_WiFi_RetryConnectHidden(wifi_cell_test_base.WiFiCellTestBase):
-    """
-    Test that we retry to connect after an AP disappears.
-
-    This test:
-        1) Sets up a network with a single hidden BSS.
-        2) Connects the DUT to that network and that particular BSS.
-        3) Takes down the BSS in view of the DUT.
-        4) Waits for scan cache results from the device to expire.
-        5) Waits an additional few seconds to get past any immediate
-            reactions from the connection manager.
-        6) Bring the same BSS back up.
-        8) Watches to make sure the DUT connects to this BSS.
-
-    Note that since the BSS is hidden, and wpa_supplicant does not
-    know to explicitly scan for hidden BSS's, this means that shill
-    must be triggering the scans.
-
-    """
-
-    version = 1
-
-
-    def run_once(self):
-        """Test body."""
-        self.context.configure(
-                hostap_config.HostapConfig(channel=1, hide_ssid=True))
-        router_ssid = self.context.router.get_ssid()
-        assoc_params = xmlrpc_datatypes.AssociationParameters(
-                ssid=router_ssid, is_hidden=True)
-        self.context.assert_connect_wifi(assoc_params)
-        self.context.router.deconfig()
-        self.context.client.wait_for_ssid_vanish(router_ssid)
-        # Don't let any of shill's short term actions affect our test.
-        time.sleep(20)
-        self.context.configure(hostap_config.HostapConfig(
-            channel=11, ssid=router_ssid, hide_ssid=True))
-        # But shill should continue to probe around for the network.
-        self.context.wait_for_connection(router_ssid)
diff --git a/server/site_tests/network_WiFi_Roam/control.wifi_roam1xTLS b/server/site_tests/network_WiFi_Roam/control.wifi_roam1xTLS
deleted file mode 100644
index afeea01..0000000
--- a/server/site_tests/network_WiFi_Roam/control.wifi_roam1xTLS
+++ /dev/null
@@ -1,48 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_Roam.wifi_roam1xTLS'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test attempts to verify that we can roam between two 802.1x EAP-TLS APs
-in full view of the DUT.
-"""
-
-
-from autotest_lib.client.common_lib.cros import site_eap_certs
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    eap_config = xmlrpc_security_types.WPAEAPConfig(
-            server_ca_cert=site_eap_certs.ca_cert_1,
-            server_cert=site_eap_certs.server_cert_1,
-            server_key=site_eap_certs.server_private_key_1,
-            client_ca_cert=site_eap_certs.ca_cert_1,
-            client_cert=site_eap_certs.client_cert_1,
-            client_key=site_eap_certs.client_private_key_1)
-    configuration = (hostap_config.HostapConfig(
-                             channel=1, security_config=eap_config),
-                     hostap_config.HostapConfig(
-                             channel=48,
-                             mode=hostap_config.HostapConfig.MODE_11A,
-                             security_config=eap_config),
-                     xmlrpc_datatypes.AssociationParameters(
-                             security_config=eap_config))
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_Roam',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=configuration)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_Roam/control.wifi_roamNone b/server/site_tests/network_WiFi_Roam/control.wifi_roamNone
deleted file mode 100644
index ffbf858..0000000
--- a/server/site_tests/network_WiFi_Roam/control.wifi_roamNone
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_Roam.wifi_roamNone'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wificell-cq')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test attempts to verify that we can roam between two APs in full
-view of the DUT.
-"""
-
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    configuration = (hostap_config.HostapConfig(channel=1),
-                     hostap_config.HostapConfig(
-                             channel=48,
-                             mode=hostap_config.HostapConfig.MODE_11A),
-                     xmlrpc_datatypes.AssociationParameters())
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_Roam',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=configuration)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_Roam/control.wifi_roamWEP b/server/site_tests/network_WiFi_Roam/control.wifi_roamWEP
deleted file mode 100644
index 26369f7..0000000
--- a/server/site_tests/network_WiFi_Roam/control.wifi_roamWEP
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_Roam.wifi_roamWEP'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test attempts to verify that we can roam between two WEP APs
-in full view of the DUT.
-"""
-
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    wep_config = xmlrpc_security_types.WEPConfig(
-            [ 'fedcba9876' ],
-            wep_default_key=0,
-            auth_algorithm=xmlrpc_security_types.WEPConfig.AUTH_ALGORITHM_OPEN)
-    configuration = (hostap_config.HostapConfig(
-                             channel=1, security_config=wep_config),
-                     hostap_config.HostapConfig(
-                             channel=48,
-                             mode=hostap_config.HostapConfig.MODE_11A,
-                             security_config=wep_config),
-                     xmlrpc_datatypes.AssociationParameters(
-                             security_config=wep_config))
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_Roam',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=configuration)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_Roam/control.wifi_roamWPA b/server/site_tests/network_WiFi_Roam/control.wifi_roamWPA
deleted file mode 100644
index d428a3f..0000000
--- a/server/site_tests/network_WiFi_Roam/control.wifi_roamWPA
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_Roam.wifi_roamWPA'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test attempts to verify that we can roam between two WPA APs
-in full view of the DUT.
-"""
-
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    wpa_config = xmlrpc_security_types.WPAConfig(
-            psk='chromeos',
-            wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA2,
-            wpa2_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_CCMP])
-    configuration = (hostap_config.HostapConfig(
-                             channel=1, security_config=wpa_config),
-                     hostap_config.HostapConfig(
-                             channel=48,
-                             mode=hostap_config.HostapConfig.MODE_11A,
-                             security_config=wpa_config),
-                     xmlrpc_datatypes.AssociationParameters(
-                             security_config=wpa_config))
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_Roam',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=configuration)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_Roam/network_WiFi_Roam.py b/server/site_tests/network_WiFi_Roam/network_WiFi_Roam.py
deleted file mode 100644
index 3381877..0000000
--- a/server/site_tests/network_WiFi_Roam/network_WiFi_Roam.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-class network_WiFi_Roam(wifi_cell_test_base.WiFiCellTestBase):
-    """Tests roaming to an AP that changes while the client is awake
-
-    This test run seeks to associate the DUT with an AP with a set of
-    association parameters, create a second AP with a second set of
-    parameters but the same SSID, and shut down the first DUT.  We
-    seek to observe that the DUT successfully connects to the second
-    AP in a reasonable amount of time.
-    """
-
-    version = 1
-
-    def parse_additional_arguments(self, commandline_args, additional_params):
-        """Hook into super class to take control files parameters.
-
-        @param commandline_args dict of parsed parameters from the autotest.
-        @param additional_params tuple of (HostapConfig,
-                                           AssociationParameters).
-
-        """
-        self._router0_conf, self._router1_conf, self._client_conf = (
-                additional_params)
-
-
-    def run_once(self):
-        """Test body."""
-        # Configure the inital AP.
-        self.context.configure(self._router0_conf)
-        router_ssid = self.context.router.get_ssid()
-
-        # Connect to the inital AP.
-        self._client_conf.ssid = router_ssid
-        self.context.assert_connect_wifi(self._client_conf)
-
-        # Setup a second AP with the same SSID.
-        self._router1_conf.ssid = router_ssid
-        self.context.configure(self._router1_conf, multi_interface=True)
-
-        # Tear down the AP instance that the DUT is currently connected to.
-        self.context.router.deconfig_aps(instance=0)
-
-        # Expect that the DUT will re-connect to the new AP.
-        self.context.wait_for_connection(router_ssid,
-                                         self._router1_conf.frequency)
-        self.context.router.deconfig()
diff --git a/server/site_tests/network_WiFi_RoamDbus/control b/server/site_tests/network_WiFi_RoamDbus/control
deleted file mode 100644
index 9c6e867..0000000
--- a/server/site_tests/network_WiFi_RoamDbus/control
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche, nywang'
-NAME = 'network_WiFi_RoamDbus'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test attempts to verify that we can roam between two APs in full
-view of the DUT.
-"""
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_RoamDbus',
-                 host=host,
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_RoamDbus/network_WiFi_RoamDbus.py b/server/site_tests/network_WiFi_RoamDbus/network_WiFi_RoamDbus.py
deleted file mode 100644
index 2a03793..0000000
--- a/server/site_tests/network_WiFi_RoamDbus/network_WiFi_RoamDbus.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-import logging
-
-class network_WiFi_RoamDbus(wifi_cell_test_base.WiFiCellTestBase):
-    """Tests an intentional client-driven roam between APs
-
-    This test seeks to associate the DUT with an AP with a set of
-    association parameters, create a second AP with a second set of
-    parameters but the same SSID, and send roam command to shill. After
-    that shill will send a dbus roam command to wpa_supplicant. We seek
-    to observe that the DUT successfully connects to the second AP in
-    a reasonable amount of time.
-    """
-
-    version = 1
-    TIMEOUT_SECONDS = 15
-
-    def run_once(self,host):
-        """Test body."""
-        self._router0_conf = hostap_config.HostapConfig(channel=48,
-                             mode=hostap_config.HostapConfig.MODE_11A)
-        self._router1_conf = hostap_config.HostapConfig(channel=1)
-        self._client_conf = xmlrpc_datatypes.AssociationParameters()
-
-        # Configure the inital AP.
-        self.context.configure(self._router0_conf)
-        router_ssid = self.context.router.get_ssid()
-
-        # Connect to the inital AP.
-        self._client_conf.ssid = router_ssid
-        self.context.assert_connect_wifi(self._client_conf)
-
-        # Setup a second AP with the same SSID.
-        self._router1_conf.ssid = router_ssid
-        self.context.configure(self._router1_conf, multi_interface=True)
-
-        # Get BSSIDs of the two APs
-        bssid0 = self.context.router.get_hostapd_mac(0)
-        bssid1 = self.context.router.get_hostapd_mac(1)
-
-        # Wait for DUT to see the second AP
-        self.context.client.wait_for_bss(bssid1)
-
-        # Check which AP we are currently connected.
-        # This is to include the case that wpa_supplicant
-        # automatically roam to AP2 during the scan.
-        interface = self.context.client.wifi_if
-        current_bssid = self.context.client.iw_runner.get_current_bssid(interface)
-        if current_bssid == bssid0:
-            roam_to_bssid = bssid1
-        else:
-            roam_to_bssid = bssid0
-
-        logging.info('Requesting roam from %s to %s', current_bssid, roam_to_bssid)
-        # Send roam command to shill,
-        # and shill will send dbus roam command to wpa_supplicant
-        if not self.context.client.request_roam_dbus(roam_to_bssid, interface):
-            raise error.TestFail('Failed to send roam command')
-
-        # Expect that the DUT will re-connect to the new AP.
-        if not self.context.client.wait_for_roam(
-               roam_to_bssid, timeout_seconds=self.TIMEOUT_SECONDS):
-            raise error.TestFail('Failed to roam.')
-        self.context.router.deconfig()
diff --git a/server/site_tests/network_WiFi_RoamDiagnostics/control b/server/site_tests/network_WiFi_RoamDiagnostics/control
deleted file mode 100644
index 77064f4..0000000
--- a/server/site_tests/network_WiFi_RoamDiagnostics/control
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (c) 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'matthewmwang'
-NAME = 'network_WiFi_RoamDiagnostics'
-TIME = 'LONG'
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-Bring up two APs and attenuate them around several values to observe and assess
-roam stickiness.
-"""
-
-
-def run(machine):
-    job.run_test('network_WiFi_RoamDiagnostics',
-                 host=hosts.create_host(machine),
-                 raw_cmdline_args=args,
-                 pcap_as_router=True)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_RoamDiagnostics/network_WiFi_RoamDiagnostics.py b/server/site_tests/network_WiFi_RoamDiagnostics/network_WiFi_RoamDiagnostics.py
deleted file mode 100644
index a307348..0000000
--- a/server/site_tests/network_WiFi_RoamDiagnostics/network_WiFi_RoamDiagnostics.py
+++ /dev/null
@@ -1,151 +0,0 @@
-# Copyright (c) 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-
-from autotest_lib.server import site_linux_system
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-from autotest_lib.server.cros.network import wpa_mon
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-
-class network_WiFi_RoamDiagnostics(wifi_cell_test_base.WiFiCellTestBase):
-    """Bring an AP up, connect to it, set the attenuation, and vary a second AP
-    around the same RSSI as the first AP. Perform a scan after every change in
-    attenuation and observe when the device roams between APs. Record all roam
-    events in a file for analysis.
-
-    The purpose of this diagnostic is to capture the stickiness of the device's
-    roam algorithm. For example, the stickier the algorithm, the more skewed
-    toward higher RSSI differentials (between current and target AP) the
-    distribution of roams in the output files will be. This is not necessarily
-    a good thing as it's important for a device to be able to move between APs
-    when it needs to. Therefore, we use network_WiFi_RoamNatural in conjunction
-    with this test to ensure that normal roam behavior is not broken."""
-
-    version = 1
-    MAX_ATTEN = 96
-    MIN_ATTEN = 56
-    ATTEN_STEP = 4
-    ATTEN_RANGE = 12
-    ROAM_BUCKETS = 7
-
-    def test_body(self, pair_num, ap_pair, logger, roam_stats):
-        """
-        Execute the test on the given APs.
-
-        @param pair_num int: the nth time this function is called (for logging
-        purpose).
-        @param ap_pair tuple of HostapConfig objects: the APs.
-        @param logger WpaMon object: used for event monitoring.
-        """
-        self.context.configure(ap_pair[0])
-        ssid = self.context.router.get_ssid()
-        bgscan_none = xmlrpc_datatypes.BgscanConfiguration(
-            method=xmlrpc_datatypes.BgscanConfiguration.SCAN_METHOD_NONE)
-        assoc_params = xmlrpc_datatypes.AssociationParameters(
-                ssid=ssid,
-                bgscan_config=bgscan_none)
-        self.context.assert_connect_wifi(assoc_params)
-        ap_pair[1].ssid = ssid
-        self.context.configure(ap_pair[1], configure_pcap=True)
-
-        roam_log = open(
-            os.path.join(self.resultsdir, str(pair_num) + "_roam.txt"), 'w')
-        for atten0 in range(self.MIN_ATTEN, self.MAX_ATTEN, self.ATTEN_STEP):
-            self.context.attenuator.set_total_attenuation(
-                atten0, ap_pair[0].frequency, 0)
-            self.context.attenuator.set_total_attenuation(
-                atten0, ap_pair[0].frequency, 1)
-
-            # Vary the RSSI of the second AP around that of the first AP.
-            min_atten = max(
-                atten0 - self.ATTEN_RANGE,
-                self.context.attenuator.get_minimal_total_attenuation())
-            max_atten = atten0 + self.ATTEN_RANGE
-            for _ in range(2):
-                for atten1 in range(max_atten, min_atten, -self.ATTEN_STEP) + \
-                              range(min_atten, max_atten, self.ATTEN_STEP):
-                    self.context.attenuator.set_total_attenuation(
-                        atten1, ap_pair[1].frequency, 2)
-                    self.context.attenuator.set_total_attenuation(
-                        atten1, ap_pair[1].frequency, 3)
-
-                    scan_success = False
-                    logger.start_event_capture()
-                    for i in range(2):
-                        # Explicitly ask shill to perform a scan. This
-                        # should induce a roam if the RSSI difference is
-                        # large enough.
-                        self.context.client.shill.request_scan()
-                        if logger.wait_for_event(wpa_mon.WpaMon.\
-                                                 CTRL_EVENT_SCAN_RESULTS):
-                            scan_success = True
-                            break
-                        logging.info("Scan failed %d time(s)", i + 1)
-                    if not scan_success:
-                        logging.error("Unable to get scan results")
-                        continue
-
-                    # Wait for roam.
-                    roams = logger.wait_for_event(
-                        wpa_mon.WpaMon.CTRL_EVENT_DO_ROAM, timeout=5)
-                    for roam in roams:
-                        logging.info(roam)
-                        roam_log.write(str(roam) + '\n')
-                        freq_pair = (int(roam.cur_freq) / 1000,
-                                     int(roam.sel_freq) / 1000)
-                        diff = max(min(int(roam.sel_level) - \
-                                       int(roam.cur_level),
-                                       (self.ROAM_BUCKETS - 1) * 2), 0)
-                        roam_stats[freq_pair][diff / 2] += 1
-
-        roam_log.close()
-        self.context.client.shill.disconnect(ssid)
-        self.context.router.deconfig()
-        self.context.pcap_host.deconfig()
-
-    def output_roam_stats(self, roam_stats):
-        """Output roam stats."""
-        for pair, stats in roam_stats.items():
-            total = sum(stats)
-            logging.info('Roams from %d GHz to %d GHz',
-                         pair[0], pair[1])
-            self.output_perf_value('roam_diagnostics_%d_%d' % \
-                                   (pair[0], pair[1]),
-                                   stats, units='roams',
-                                   higher_is_better=False)
-            for i, roams in enumerate(stats):
-                logging.info('%d roams out of %d with diff >= %d', roams,
-                             total, i * 2)
-
-    def run_once(self):
-        """Body of the test."""
-        self.context.client.require_capabilities(
-            [site_linux_system.LinuxSystem.CAPABILITY_SUPPLICANT_ROAMING])
-
-        mode = hostap_config.HostapConfig.MODE_11N_PURE
-        ap1 = hostap_config.HostapConfig(channel=1, mode=mode)
-        ap2 = hostap_config.HostapConfig(channel=2, mode=mode)
-        ap3 = hostap_config.HostapConfig(channel=36, mode=mode)
-        ap_configs = [(ap1, ap2), (ap1, ap3)]
-
-        # roam_stats records the number of roams that occurred in each roam
-        # bucket for each frequency type (2.4 GHz to 2.4 GHz, 2.4 GHz to 5 GHz,
-        # and 5 GHz to 2.4 GHz; we don't test 5 GHz to 5 GHz because the logic
-        # should be the same as 2.4 GHz to 2.4 GHz). Each bucket holds a 2 dBm
-        # range of RSSI differences at which a roam occurred. For example,
-        # roam_stats[(2, 5)][5] represents the number of roams from 2.4 GHz to
-        # 5GHz that happened at an RSSI difference of 8-9 dBm.
-        roam_stats = {(2, 2): [0] * self.ROAM_BUCKETS,
-                      (2, 5): [0] * self.ROAM_BUCKETS,
-                      (5, 2): [0] * self.ROAM_BUCKETS}
-
-        with self.context.client._wpa_mon as logger:
-            for pair_num, ap_pair in enumerate(ap_configs):
-                self.test_body(pair_num, ap_pair, logger, roam_stats)
-
-        # Log roam distribution and output perf values
-        self.output_roam_stats(roam_stats)
diff --git a/server/site_tests/network_WiFi_RoamEndToEnd/control b/server/site_tests/network_WiFi_RoamEndToEnd/control
index 57d3120..f938ad7 100755
--- a/server/site_tests/network_WiFi_RoamEndToEnd/control
+++ b/server/site_tests/network_WiFi_RoamEndToEnd/control
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = ('suite:wifi_endtoend, suite:wifi_release')
 DEPENDENCIES = 'wificell'
+PY_VERSION = 3
 
 DOC = """
 WiFi_RoamEndToEnd test configures two APs with the same ssid and runs the
diff --git a/server/site_tests/network_WiFi_RoamEndToEnd/network_WiFi_RoamEndToEnd.py b/server/site_tests/network_WiFi_RoamEndToEnd/network_WiFi_RoamEndToEnd.py
index 6d287f7..6dc581e 100755
--- a/server/site_tests/network_WiFi_RoamEndToEnd/network_WiFi_RoamEndToEnd.py
+++ b/server/site_tests/network_WiFi_RoamEndToEnd/network_WiFi_RoamEndToEnd.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/network_WiFi_RoamFT/control.EAP b/server/site_tests/network_WiFi_RoamFT/control.EAP
deleted file mode 100644
index 83daa4e..0000000
--- a/server/site_tests/network_WiFi_RoamFT/control.EAP
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'matthewmwang'
-TIME = 'SHORT'
-NAME = 'network_WiFi_RoamFT.EAP'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test attempts to verify that we can roam between two APs using the FT-EAP
-key management suite (i.e. WPA-EAP with Fast BSS Transition) in full view of the
-DUT. Fast BSS Transition is part of the 802.11r protocol, which describes a
-procedure for fast roaming from one AP to another within an SSID.
-"""
-
-from autotest_lib.client.common_lib.cros import site_eap_certs
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-
-def run(machine):
-    ft_eap_config = xmlrpc_security_types.WPAEAPConfig(
-        server_ca_cert=site_eap_certs.ca_cert_1,
-        server_cert=site_eap_certs.server_cert_1,
-        server_key=site_eap_certs.server_private_key_1,
-        client_ca_cert=site_eap_certs.ca_cert_1,
-        client_cert=site_eap_certs.client_cert_1,
-        client_key=site_eap_certs.client_private_key_1,
-        wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA2,
-        ft_mode=xmlrpc_security_types.WPAConfig.FT_MODE_PURE)
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_RoamFT',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=[ft_eap_config])
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_RoamFT/control.PSK b/server/site_tests/network_WiFi_RoamFT/control.PSK
deleted file mode 100644
index 029eaf1..0000000
--- a/server/site_tests/network_WiFi_RoamFT/control.PSK
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'matthewmwang'
-TIME = 'SHORT'
-NAME = 'network_WiFi_RoamFT.PSK'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test attempts to verify that we can roam between two APs using the FT-PSK
-key management suite (i.e. WPA-PSK with Fast BSS Transition) in full view of the
-DUT. Fast BSS Transition is part of the 802.11r protocol, which describes a
-procedure for fast roaming from one AP to another within an SSID.
-"""
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-
-def run(machine):
-    ft_psk_config = xmlrpc_security_types.WPAConfig(
-        psk='chromeos',
-        wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA2,
-        wpa2_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_CCMP],
-        ft_mode=xmlrpc_security_types.WPAConfig.FT_MODE_PURE)
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_RoamFT',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=[ft_psk_config])
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_RoamFT/control.mixedEAP b/server/site_tests/network_WiFi_RoamFT/control.mixedEAP
deleted file mode 100644
index fef3452..0000000
--- a/server/site_tests/network_WiFi_RoamFT/control.mixedEAP
+++ /dev/null
@@ -1,52 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'matthewmwang'
-TIME = 'SHORT'
-NAME = 'network_WiFi_RoamFT.mixedEAP'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test attempts to verify that we can connect to and roam to and from APs
-that support FT mixed mode for EAP in full view of the DUT. This ensures that
-devices that don't support FT are still compatible with APs that do. Fast BSS
-Transition is part of the 802.11r protocol, which describes a procedure for fast
-roaming from one AP to another within an SSID.
-"""
-
-from autotest_lib.client.common_lib.cros import site_eap_certs
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-
-def run(machine):
-    # TODO(crbug.com/977722): we shouldn't need different certs. Remove this
-    # once the bug is resolved.
-    ft_eap_config1 = xmlrpc_security_types.WPAEAPConfig(
-        server_ca_cert=site_eap_certs.ca_cert_1,
-        server_cert=site_eap_certs.server_cert_1,
-        server_key=site_eap_certs.server_private_key_1,
-        client_ca_cert=site_eap_certs.ca_cert_1,
-        client_cert=site_eap_certs.client_cert_1,
-        client_key=site_eap_certs.client_private_key_1,
-        wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA2,
-        ft_mode=xmlrpc_security_types.WPAConfig.FT_MODE_MIXED)
-    ft_eap_config2 = xmlrpc_security_types.WPAEAPConfig(
-        server_ca_cert=site_eap_certs.ca_cert_2,
-        server_cert=site_eap_certs.server_cert_2,
-        server_key=site_eap_certs.server_private_key_2,
-        client_ca_cert=site_eap_certs.ca_cert_2,
-        client_cert=site_eap_certs.client_cert_2,
-        client_key=site_eap_certs.client_private_key_2,
-        wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA2,
-        ft_mode=xmlrpc_security_types.WPAConfig.FT_MODE_MIXED)
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_RoamFT',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=[ft_eap_config1, ft_eap_config2])
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_RoamFT/control.mixedPSK b/server/site_tests/network_WiFi_RoamFT/control.mixedPSK
deleted file mode 100644
index 89bfd0b..0000000
--- a/server/site_tests/network_WiFi_RoamFT/control.mixedPSK
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'matthewmwang'
-TIME = 'SHORT'
-NAME = 'network_WiFi_RoamFT.mixedPSK'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test attempts to verify that we can connect to and roam to and from APs
-that support FT mixed mode for PSK in full view of the DUT. This ensures that
-devices that don't support FT are still compatible with APs that do. Fast BSS
-Transition is part of the 802.11r protocol, which describes a procedure for fast
-roaming from one AP to another within an SSID.
-"""
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-
-def run(machine):
-    ft_psk_config = xmlrpc_security_types.WPAConfig(
-        psk='chromeos',
-        wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA2,
-        wpa2_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_CCMP],
-        ft_mode=xmlrpc_security_types.WPAConfig.FT_MODE_MIXED)
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_RoamFT',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=[ft_psk_config, ft_psk_config])
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_RoamFT/network_WiFi_RoamFT.py b/server/site_tests/network_WiFi_RoamFT/network_WiFi_RoamFT.py
deleted file mode 100644
index c806db5..0000000
--- a/server/site_tests/network_WiFi_RoamFT/network_WiFi_RoamFT.py
+++ /dev/null
@@ -1,224 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-from autotest_lib.server import site_linux_system
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import wifi_cell_test_base
-from autotest_lib.server.cros.network import hostap_config
-
-class network_WiFi_RoamFT(wifi_cell_test_base.WiFiCellTestBase):
-    """Tests roams on ROAM D-Bus command using FT auth suites
-
-    This test seeks to associate the DUT with an AP with a set of
-    association parameters, create a second AP with a second set of
-    parameters but the same SSID, and ask, via D-Bus, the DUT to roam
-    to the second AP. We seek to observe that the DUT successfully
-    connects to the second AP in a reasonable amount of time.
-
-    Roaming using FT is different from standard roaming in that there
-    is a special key exchange protocol that needs to occur between the
-    APs prior to a successful roam. In order for this communication to
-    work, we need to construct a specific interface architecture as
-    shown below:
-                 _________                       _________
-                |         |                     |         |
-                |   br0   |                     |   br1   |
-                |_________|                     |_________|
-                   |   |                           |   |
-               ____|   |____                   ____|   |____
-         _____|____     ____|____         ____|____     ____|_____
-        |          |   |         |       |         |   |          |
-        | managed0 |   |  veth0  | <---> |  veth1  |   | managed1 |
-        |__________|   |_________|       |_________|   |__________|
-
-    The managed0 and managed1 interfaces cannot communicate with each
-    other without a bridge. However, the same bridge cannot be used
-    to bridge the two interfaces either (you can't read from a bridge
-    that you write to as well without putting the bridge in
-    promiscuous mode). Thus, we create a virtual ethernet interface
-    with one peer on either bridge to allow the bridges to forward
-    traffic between managed0 and managed1.
-    """
-
-    version = 1
-    TIMEOUT_SECONDS = 15
-    GLOBAL_FT_PROPERTY = 'WiFi.GlobalFTEnabled'
-
-    def parse_additional_arguments(self, commandline_args, additional_params):
-        """Hook into super class to take control files parameters.
-
-        @param commandline_args dict of parsed parameters from the autotest.
-        @param additional_params list of xmlrpc_security_types security config.
-
-        """
-        self._security_configs = additional_params
-
-    def test_body(self, config):
-        """Test body.
-
-        @param config xmlrpc_security_types security config to use in the APs
-                      and DUT.
-        """
-
-        if config.ft_mode == xmlrpc_security_types.WPAConfig.FT_MODE_PURE:
-            self.context.client.require_capabilities(
-                [site_linux_system.LinuxSystem.CAPABILITY_SME])
-
-        # Manually create bridges for the APs; we want them ready before the
-        # second AP is up, so we can link it up before clients try to roam to
-        # it.
-        br0 = self.context.router.create_brif()
-        br1 = self.context.router.create_brif()
-
-        self.veth0 = 'veth0'
-        self.veth1 = 'veth1'
-
-        # Cleanup veth interfaces from previous runs
-        self.context.router.delete_link(self.veth0)
-        self.context.router.delete_link(self.veth1)
-
-        # Set up virtual ethernet interface so APs can talk to each other
-        try:
-            self.context.router.router.run('ip link add %s type veth peer name '
-                                           '%s' % (self.veth0, self.veth1))
-            self.context.router.router.run('ifconfig %s up' % self.veth0)
-            self.context.router.router.run('ifconfig %s up' % self.veth1)
-            self.context.router.router.run('ip link set %s master %s' %
-                                           (self.veth0, br0))
-            self.context.router.router.run('ip link set %s master %s' %
-                                           (self.veth1, br1))
-        except Exception as e:
-            raise error.TestFail('veth configuration failed: %s' % e)
-
-        mac0 = '02:00:00:00:03:00'
-        mac1 = '02:00:00:00:04:00'
-        id0 = '020000000300'
-        id1 = '020000000400'
-        key0 = '0f0e0d0c0b0a09080706050403020100'
-        key1 = '000102030405060708090a0b0c0d0e0f'
-        mdid = 'a1b2'
-        router0_conf = hostap_config.HostapConfig(channel=1,
-                       mode=hostap_config.HostapConfig.MODE_11G,
-                       security_config=config,
-                       bssid=mac0,
-                       mdid=mdid,
-                       nas_id=id0,
-                       r1kh_id=id0,
-                       r0kh='%s %s %s' % (mac1, id1, key0),
-                       r1kh='%s %s %s' % (mac1, mac1, key1),
-                       bridge=br0)
-        n_caps = [hostap_config.HostapConfig.N_CAPABILITY_HT40_PLUS]
-        ac_caps = [hostap_config.HostapConfig.AC_CAPABILITY_SHORT_GI_80]
-        channel_width_80_mhz = hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_80
-        router1_conf = hostap_config.HostapConfig(channel=157,
-                       mode=hostap_config.HostapConfig.MODE_11AC_PURE,
-                       n_capabilities=n_caps,
-                       ac_capabilities=ac_caps,
-                       vht_channel_width=channel_width_80_mhz,
-                       vht_center_channel=155,
-                       security_config=config,
-                       bssid=mac1,
-                       mdid=mdid,
-                       nas_id=id1,
-                       r1kh_id=id1,
-                       r0kh='%s %s %s' % (mac0, id0, key1),
-                       r1kh='%s %s %s' % (mac0, mac0, key0),
-                       bridge=br1)
-        bgscan_none = xmlrpc_datatypes.BgscanConfiguration(
-            method=xmlrpc_datatypes.BgscanConfiguration.SCAN_METHOD_NONE)
-        client_conf = xmlrpc_datatypes.AssociationParameters(
-                      security_config=config,
-                      bgscan_config=bgscan_none)
-
-        # Configure the inital AP.
-        logging.info('Bringing up first AP')
-        self.context.configure(router0_conf)
-        router_ssid = self.context.router.get_ssid()
-
-        # Connect to the inital AP.
-        client_conf.ssid = router_ssid
-        self.context.assert_connect_wifi(client_conf)
-
-        # Note that we assume that only one roam happens here. It's possible,
-        # despite bgscan being turned off, that shill kicks off a scan and we
-        # roam before the ROAM D-Bus command is issued, in which case we will
-        # end up with more than one disconnect event. This will cause the test
-        # to fail, but as it is, we don't have a great solution for this. Table
-        # this until we port this test to Tast.
-        with self.context.client.assert_disconnect_count(1):
-            # Setup a second AP with the same SSID.
-            logging.info('Bringing up second AP')
-            router1_conf.ssid = router_ssid
-            self.context.configure(router1_conf, multi_interface=True)
-
-            # Get BSSIDs of the two APs.
-            bssid0 = self.context.router.get_hostapd_mac(0)
-            bssid1 = self.context.router.get_hostapd_mac(1)
-            curr_ap_if = self.context.router.get_hostapd_interface(0)
-
-            interface = self.context.client.wifi_if
-
-            # Wait for DUT to see the second AP
-            # TODO(matthewmwang): wait_for_bss uses iw to check whether or not
-            # the BSS appears in the scan results, but when we request a roam
-            # with wpa_supplicant afterward, we race with wpa_supplicant
-            # receiving the updated scan results. When migrating the test to
-            # Tast, poll wpa_supplicant for scan results instead.
-            self.context.client.wait_for_bss(bssid1)
-
-            logging.info('Requesting roam from %s to %s', bssid0, bssid1)
-            # Ask shill to request a roam from wpa_supplicant via D-Bus.
-            if not self.context.client.request_roam_dbus(bssid1, interface):
-                raise error.TestFail('Failed to send roam command')
-
-            # Expect that the DUT will re-connect to the new AP.
-            if not self.context.client.wait_for_roam(
-                    bssid1, timeout_seconds=self.TIMEOUT_SECONDS):
-                raise error.TestFail('Failed to roam')
-
-            # We've roamed at the 802.11 layer, but make sure Shill brings the
-            # connection up completely (DHCP).
-            # TODO(https://crbug.com/1070321): Note that we don't run any ping
-            # test.
-            # Check that we don't disconnect along the way here, in case we're
-            # ping-ponging around APs -- and after the first (failed) roam, the
-            # second re-connection will not be testing FT at all.
-            self.context.client.wait_for_connection(router_ssid)
-            curr = self.context.client.iw_runner.get_current_bssid(interface)
-            if curr != bssid1:
-                raise error.TestFail(
-                    'Unexpectedly roamed back: current BSS %s, expected %s' %
-                        (curr, bssid1))
-
-        self.context.client.shill.disconnect(router_ssid)
-        self.context.router.deconfig()
-
-    def run_once(self,host):
-        """
-        Set global FT switch and call test_body.
-        """
-        self.context.client.require_capabilities(
-            [site_linux_system.LinuxSystem.CAPABILITY_SUPPLICANT_ROAMING])
-
-        assert len(self._security_configs) == 1 or \
-                len(self._security_configs) == 2
-
-        with self.context.client.set_manager_property(
-                self.GLOBAL_FT_PROPERTY, True):
-            self.test_body(self._security_configs[0])
-        if len(self._security_configs) > 1:
-            logging.info('Disabling FT and trying again')
-            with self.context.client.set_manager_property(
-                    self.GLOBAL_FT_PROPERTY, False):
-                self.test_body(self._security_configs[1])
-
-    def cleanup(self):
-        """Cleanup function."""
-
-        if hasattr(self, 'veth0'):
-            self.context.router.delete_link(self.veth0)
-        super(network_WiFi_RoamFT, self).cleanup()
diff --git a/server/site_tests/network_WiFi_RoamNatural/control b/server/site_tests/network_WiFi_RoamNatural/control
deleted file mode 100644
index 490c6d2..0000000
--- a/server/site_tests/network_WiFi_RoamNatural/control
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (c) 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'matthewmwang'
-NAME = 'network_WiFi_RoamNatural'
-TIME = 'LONG'
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-Bring up two APs and attenuate them around several values to observe and assess
-roam behavior.
-"""
-
-
-def run(machine):
-    job.run_test('network_WiFi_RoamNatural',
-                 host=hosts.create_host(machine),
-                 raw_cmdline_args=args,
-                 pcap_as_router=True)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_RoamNatural/network_WiFi_RoamNatural.py b/server/site_tests/network_WiFi_RoamNatural/network_WiFi_RoamNatural.py
deleted file mode 100644
index a24daaa..0000000
--- a/server/site_tests/network_WiFi_RoamNatural/network_WiFi_RoamNatural.py
+++ /dev/null
@@ -1,197 +0,0 @@
-# Copyright (c) 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from datetime import datetime
-import logging
-import time
-import os
-
-from autotest_lib.server import site_linux_system
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-from autotest_lib.server.cros.network import wpa_mon
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-
-class network_WiFi_RoamNatural(wifi_cell_test_base.WiFiCellTestBase):
-    """Bring up two APs, connect, vary attenuation as if the device is moving
-    between the two APs (i.e. the signal gets weaker on one and stronger on the
-    other until the first one cannot be seen anymore). At some point before the
-    first AP is torn down, the device should have roamed to the second AP. If it
-    doesn't there will be an association failure, which we can then log and
-    write to a file. Ideally, there would be no association failures and a roam
-    every time we expected one. Realistically, RSSI can vary quite widely, and
-    we can't expect to see a good roam signal on every scan even where there
-    should be one.
-
-    This test is used to sanity check that "normal" roaming behavior is not
-    broken by any roaming algorithm changes. A couple failed associations is
-    acceptable, but any more than that is a good indication that roaming has
-    become too sticky."""
-
-    version = 1
-
-    MAX_CENTER = 100
-    MIN_CENTER = 84
-    MAX_ATTEN = 106
-    ATTEN_STEP = 2
-
-    def test_body(self, pair_num, ap_pair, logger, roam_stats, failure_stats):
-        """
-        Execute the test with the given APs and record stats.
-
-        @param pair_num int: the nth time this function is called (for results
-        logging purposes).
-        @param ap_pair tuple of HostapConfig objects: the APs
-        @param logger WpaMon object: used for event monitoring
-        @roam_stats dict of tuple to int: used to log roam stats.
-        @failure_stats int list with a single int element: used to log assoc
-        failure stats
-        """
-        # Reset the attenuation here since it won't have been reset after
-        # previous iterations of this function.
-        self.context.attenuator.set_variable_attenuation(0)
-
-        min_atten = self.context.attenuator.get_minimal_total_attenuation()
-        ap_pair[0].ssid = None
-        self.context.configure(ap_pair[0])
-        ssid = self.context.router.get_ssid()
-        self.context.assert_connect_wifi(
-            xmlrpc_datatypes.AssociationParameters(ssid=ssid))
-        ap_pair[0].ssid = ssid
-        ap_pair[1].ssid = ssid
-        self.context.configure(ap_pair[1], configure_pcap=True)
-        self.context.client.wait_for_bss(self.context.pcap_host.get_hostapd_mac(0))
-        skip_roam_log = open(
-            os.path.join(self.resultsdir,
-                         str(pair_num) + '_skip_roam.txt'), 'w')
-        assoc_failure_log = open(
-            os.path.join(self.resultsdir,
-                         str(pair_num) + '_failure.txt'), 'w')
-        for center in range(self.MIN_CENTER, self.MAX_CENTER,
-                            2 * self.ATTEN_STEP):
-            # The attenuation should [con,di]verge around center. We move
-            # the attenuation out 2dBm at a time until self.MAX_ATTEN is hit
-            # on one AP, at which point we tear that AP down to simulate it
-            # disappearing from the DUT's view. This should trigger a deauth
-            # if the DUT is still associated.
-            max_offset = self.MAX_ATTEN - center
-            for _ in range(2):
-                ranges = [range(0, max_offset, self.ATTEN_STEP),
-                          range(max_offset, 0, -self.ATTEN_STEP),
-                          range(0, -max_offset, -self.ATTEN_STEP),
-                          range(-max_offset, 0, self.ATTEN_STEP)]
-                for r, _ in enumerate(ranges):
-                    self.context.client.clear_supplicant_blacklist()
-                    logger.start_event_capture()
-                    for offset in ranges[r]:
-                        ap1_atten = max(center + offset, min_atten)
-                        ap2_atten = max(center - offset, min_atten)
-                        self.context.attenuator.set_total_attenuation(
-                            ap1_atten, ap_pair[0].frequency, 0)
-                        self.context.attenuator.set_total_attenuation(
-                            ap1_atten, ap_pair[0].frequency, 1)
-                        self.context.attenuator.set_total_attenuation(
-                            ap2_atten, ap_pair[1].frequency, 2)
-                        self.context.attenuator.set_total_attenuation(
-                            ap2_atten, ap_pair[1].frequency, 3)
-                        time.sleep(2)
-                    if r % 2 == 1:
-                        # The APs' RSSIs should have converged. No reason to
-                        # check for disconnects/roams here.
-                        continue
-
-                    if r == 0:
-                        # First AP is no longer in view
-                        self.context.router.deconfig()
-                    elif r == 2:
-                        # Second AP is no longer in view
-                        self.context.pcap_host.deconfig()
-
-                    dc_events = logger.wait_for_event(
-                        wpa_mon.WpaMon.CTRL_EVENT_DISCONNECTED, timeout=5)
-                    if dc_events:
-                        # Association failure happened, check if this
-                        # was because a roam was skipped.
-                        skip_roams = logger.get_events(
-                            wpa_mon.WpaMon.CTRL_EVENT_SKIP_ROAM, True)
-                        if skip_roams:
-                            # Skipped roam caused association failure, log this
-                            # so we can re-examine the roam decision.
-                            for roam in skip_roams:
-                                logging.info(roam)
-                                skip_roam_log.write(str(roam) + '\n')
-                                freq_pair = (int(roam.cur_freq) / 1000,
-                                             int(roam.sel_freq) / 1000)
-                                roam_stats[freq_pair] += 1
-                        else:
-                            # Association failure happened for some other reason
-                            # (likely because AP disappeared before scan
-                            # results returned). Log the failure for the
-                            # timestamp in case we'd like to take a closer look.
-                            for event in dc_events:
-                                dc = str(datetime.now()) + ' ' +  str(event)
-                                logging.info(dc)
-                                assoc_failure_log.write(dc + '\n')
-                                failure_stats[0] += 1
-
-                    # Reset the attenuation here. In some groamer cells, the
-                    # attenuation for 5GHz channels is miscalibrated such that
-                    # the RSSI is lower than expected. If we bring the AP back
-                    # up while it's still maximally attenuated, it may not be
-                    # visible to the DUT (the test was written deliberately so
-                    # that it wouldn't happen even at full attenuation for
-                    # properly calibrated cells, but this is apparently not
-                    # always a good assumption).
-                    self.context.attenuator.set_variable_attenuation(0)
-
-                    self.context.configure(ap_pair[r / 2],
-                                           configure_pcap=(r == 2))
-                    host = self.context.router if r == 0 else \
-                        self.context.pcap_host
-                    self.context.client.wait_for_bss(host.get_hostapd_mac(0))
-
-        skip_roam_log.close()
-        assoc_failure_log.close()
-        self.context.client.shill.disconnect(ssid)
-        self.context.router.deconfig()
-        self.context.pcap_host.deconfig()
-
-    def output_roam_stats(self, roam_skip_stats, failure_stats):
-        """Output roam stats."""
-        for pair, skips in roam_skip_stats.items():
-            logging.info('%d association failures caused by skipped roams ' \
-                         'from %s GHz to %s GHz', skips, pair[0], pair[1])
-            self.output_perf_value('roam_natural_%s_%s' % (pair[0], pair[1]),
-                                   skips, units='roams skipped',
-                                   higher_is_better=False)
-        logging.info('%d association failures unrelated to skipped roams',
-                     failure_stats)
-        self.output_perf_value('roam_natural_assoc_failures', failure_stats,
-                               units='assocation failures',
-                               higher_is_better=False)
-
-    def run_once(self):
-        """Body of the test."""
-        self.context.client.require_capabilities(
-            [site_linux_system.LinuxSystem.CAPABILITY_SUPPLICANT_ROAMING])
-
-        mode = hostap_config.HostapConfig.MODE_11N_PURE
-        ap1 = hostap_config.HostapConfig(channel=1, mode=mode)
-        ap2 = hostap_config.HostapConfig(channel=2, mode=mode)
-        ap3 = hostap_config.HostapConfig(channel=36, mode=mode)
-        ap_configs = [(ap1, ap2), (ap1, ap3)]
-
-        # Dictionary of roams skipped keyed by a pair of ints representing the
-        # current AP's frequency band and the target AP's frequency band.
-        roam_stats = {(2, 2): 0,
-                      (2, 5): 0,
-                      (5, 2): 0}
-        failure_stats = [0]
-
-        with self.context.client._wpa_mon as logger:
-            for pair_num, ap_pair in enumerate(ap_configs):
-                self.test_body(pair_num, ap_pair, logger, roam_stats, failure_stats)
-
-        # Log roam skips and assoc failures and output perf values
-        self.output_roam_stats(roam_stats, failure_stats[0])
diff --git a/server/site_tests/network_WiFi_RoamSuspend/control b/server/site_tests/network_WiFi_RoamSuspend/control
deleted file mode 100644
index 72bc6cc..0000000
--- a/server/site_tests/network_WiFi_RoamSuspend/control
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-TIME = 'SHORT'
-NAME = 'network_WiFi_RoamSuspend'
-TEST_TYPE = 'Server'
-DOC = """
-The RoamSuspend test verifies that the connection manager is able
-to join a new AP (new frequency, new BSSID, but same SSID as previously
-connected AP) when the previous AP goes away while the DUT is suspended.
-"""
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_RoamSuspend',
-                 host=host,
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_RoamSuspend/network_WiFi_RoamSuspend.py b/server/site_tests/network_WiFi_RoamSuspend/network_WiFi_RoamSuspend.py
deleted file mode 100644
index 20ab18f..0000000
--- a/server/site_tests/network_WiFi_RoamSuspend/network_WiFi_RoamSuspend.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import time
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-class network_WiFi_RoamSuspend(wifi_cell_test_base.WiFiCellTestBase):
-    """Tests roaming to an AP that changes while we're suspended.
-
-    This test:
-    1) Sets up a network with a single BSS.
-    2) Connects the DUT to that network and that particular BSS.
-    3) Places the DUT in suspend-to-RAM
-    4) Replaces the BSS with another BSS on the same SSID.
-    5) Watches to make sure the DUT connects to this BSS on resume.
-
-    """
-
-    version = 1
-
-    FREQUENCY_1 = 2412
-    FREQUENCY_2 = 5240
-    BSSID_1 = "00:01:02:03:04:05"
-    BSSID_2 = "06:07:08:09:0a:0b"
-
-
-    def run_once(self):
-        """Test body."""
-        logging.info("- Set up AP, connect.")
-        self.context.configure(hostap_config.HostapConfig(
-                frequency=network_WiFi_RoamSuspend.FREQUENCY_1,
-                mode=hostap_config.HostapConfig.MODE_11B,
-                bssid=network_WiFi_RoamSuspend.BSSID_1))
-        router_ssid = self.context.router.get_ssid()
-        self.context.assert_connect_wifi(xmlrpc_datatypes.AssociationParameters(
-                ssid=router_ssid))
-
-        # For this short of a duration, the DUT should still consider itself
-        # connected to the AP and simply resume without re-associating or
-        # reconnect quickly enough without intervention from the connection
-        # manager that it appears to remain connected.
-        logging.info("- Short suspend, verify we're still connected.")
-        self.context.client.do_suspend(10)
-        self.context.assert_ping_from_dut()
-
-        logging.info("- Reconfigure the AP during longer suspend.")
-        self.context.client.do_suspend_bg(20)
-        # Locally, let's wait 15 seconds to make sure the DUT is really asleep
-        # before we proceed.
-        time.sleep(15)
-        self.context.configure(hostap_config.HostapConfig(
-                ssid=router_ssid,
-                frequency=network_WiFi_RoamSuspend.FREQUENCY_2,
-                mode=hostap_config.HostapConfig.MODE_11A,
-                bssid=network_WiFi_RoamSuspend.BSSID_2))
-
-        logging.info("- Verify that we roam to same network w/new parameters.")
-        self.context.wait_for_connection(router_ssid,
-                                         network_WiFi_RoamSuspend.FREQUENCY_2)
-        self.context.router.deconfig()
diff --git a/server/site_tests/network_WiFi_RoamSuspendEndToEnd/control b/server/site_tests/network_WiFi_RoamSuspendEndToEnd/control
index cfb72e4..c9adec9 100755
--- a/server/site_tests/network_WiFi_RoamSuspendEndToEnd/control
+++ b/server/site_tests/network_WiFi_RoamSuspendEndToEnd/control
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:wifi_endtoend'
 DEPENDENCIES = 'wificell'
+PY_VERSION = 3
 
 DOC = """
 WiFi_RoamSuspendEndToEnd test configures two APs with the same ssid and runs the
diff --git a/server/site_tests/network_WiFi_RoamSuspendEndToEnd/network_WiFi_RoamSuspendEndToEnd.py b/server/site_tests/network_WiFi_RoamSuspendEndToEnd/network_WiFi_RoamSuspendEndToEnd.py
index 02cd479..54dfe05 100755
--- a/server/site_tests/network_WiFi_RoamSuspendEndToEnd/network_WiFi_RoamSuspendEndToEnd.py
+++ b/server/site_tests/network_WiFi_RoamSuspendEndToEnd/network_WiFi_RoamSuspendEndToEnd.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/network_WiFi_RoamSuspendSSID/control.none b/server/site_tests/network_WiFi_RoamSuspendSSID/control.none
deleted file mode 100644
index 483a377..0000000
--- a/server/site_tests/network_WiFi_RoamSuspendSSID/control.none
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-TIME = 'SHORT'
-NAME = 'network_WiFi_RoamSuspendSSID.none'
-TEST_TYPE = 'Server'
-DOC = """
-This test verifies that the connection manager connects to a
-previously connected network on resume from suspend when an
-SSID disappears during suspend.
-
-"""
-
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    mode = hostap_config.HostapConfig.MODE_11N_MIXED
-    ap_config0 = hostap_config.HostapConfig(channel=1, mode=mode)
-    ap_config1 = hostap_config.HostapConfig(channel=36, mode=mode)
-    job.run_test('network_WiFi_RoamSuspendSSID',
-                 tag=NAME.split('.')[1],
-                 host=hosts.create_host(machine),
-                 raw_cmdline_args=args,
-                 additional_params=(ap_config0, ap_config1))
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_RoamSuspendSSID/control.rsn b/server/site_tests/network_WiFi_RoamSuspendSSID/control.rsn
deleted file mode 100644
index b533407..0000000
--- a/server/site_tests/network_WiFi_RoamSuspendSSID/control.rsn
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-TIME = 'SHORT'
-NAME = 'network_WiFi_RoamSuspendSSID.rsn'
-TEST_TYPE = 'Server'
-DOC = """
-This test verifies that the connection manager connects to a
-previously connected network on resume from suspend when an
-SSID disappears during suspend.
-
-"""
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    mode = hostap_config.HostapConfig.MODE_11N_MIXED
-    wpa_config0 = xmlrpc_security_types.WPAConfig(
-            psk='chromeos0',
-            wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA2,
-	    wpa2_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_CCMP])
-    wpa_config1 = xmlrpc_security_types.WPAConfig(
-            psk='chromeos1',
-            wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA2,
-	    wpa2_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_CCMP])
-    ap_config0 = hostap_config.HostapConfig(channel=1,
-                                            security_config=wpa_config0,
-					    mode=mode)
-    ap_config1 = hostap_config.HostapConfig(channel=44,
-                                            security_config=wpa_config1,
-					    mode=mode)
-    job.run_test('network_WiFi_RoamSuspendSSID',
-                 tag=NAME.split('.')[1],
-                 host=hosts.create_host(machine),
-                 raw_cmdline_args=args,
-                 additional_params=(ap_config0, ap_config1))
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_RoamSuspendSSID/control.wpa b/server/site_tests/network_WiFi_RoamSuspendSSID/control.wpa
deleted file mode 100644
index c82f3b0..0000000
--- a/server/site_tests/network_WiFi_RoamSuspendSSID/control.wpa
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-TIME = 'SHORT'
-NAME = 'network_WiFi_RoamSuspendSSID.wpa'
-TEST_TYPE = 'Server'
-DOC = """
-This test verifies that the connection manager connects to a
-previously connected network on resume from suspend when an
-SSID disappears during suspend.
-
-"""
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    mode = hostap_config.HostapConfig.MODE_11N_MIXED
-    wpa_config0 = xmlrpc_security_types.WPAConfig(
-            psk='chromeos0',
-            wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA,
-	    wpa_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_TKIP])
-    wpa_config1 = xmlrpc_security_types.WPAConfig(
-            psk='chromeos1',
-            wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA,
-	    wpa_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_TKIP])
-    ap_config0 = hostap_config.HostapConfig(channel=44,
-                                            security_config=wpa_config0,
-					    mode=mode)
-    ap_config1 = hostap_config.HostapConfig(channel=11,
-                                            security_config=wpa_config1,
-					    mode=mode)
-    job.run_test('network_WiFi_RoamSuspendSSID',
-                 tag=NAME.split('.')[1],
-                 host=hosts.create_host(machine),
-                 raw_cmdline_args=args,
-                 additional_params=(ap_config0, ap_config1))
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_RoamSuspendSSID/network_WiFi_RoamSuspendSSID.py b/server/site_tests/network_WiFi_RoamSuspendSSID/network_WiFi_RoamSuspendSSID.py
deleted file mode 100644
index 6200466..0000000
--- a/server/site_tests/network_WiFi_RoamSuspendSSID/network_WiFi_RoamSuspendSSID.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import time
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-class network_WiFi_RoamSuspendSSID(wifi_cell_test_base.WiFiCellTestBase):
-    """Tests roaming to a new SSID when a previous SSID disappears in suspend.
-
-    This test:
-    1) Connects the DUT to a network A
-    2) Connects the DUT to a network B while keeping network A around.
-    3) Suspend the DUT (while connected to network B).
-    4) Deconfigure (take down) network B.
-    5) Assert that the DUT automatically connects to network A on resume.
-
-    """
-
-    version = 1
-
-    SUSPEND_TIME_SECONDS = 15
-
-
-    def parse_additional_arguments(self, commandline_args, additional_params):
-        """Hook into super class to take control files parameters.
-
-        @param commandline_args: dict of parsed parameters from the autotest.
-        @param additional_params: tuple(HostapConfig, HostapConfig) used as
-                networks A and B from the test description.
-
-        """
-        self._ap_config0 = additional_params[0]
-        self._ap_config1 = additional_params[1]
-
-
-    def run_once(self):
-        """Test body."""
-        get_client_config = lambda ssid, ap_config: \
-                xmlrpc_datatypes.AssociationParameters(
-                        ssid=ssid,
-                        security_config=ap_config.security_config)
-        self.context.configure(self._ap_config0)
-        self.context.configure(self._ap_config1, multi_interface=True)
-        self.context.assert_connect_wifi(
-                get_client_config(self.context.router.get_ssid(instance=0),
-                                  self._ap_config0))
-        self.context.assert_connect_wifi(
-                get_client_config(self.context.router.get_ssid(instance=1),
-                                  self._ap_config1))
-        self.context.client.do_suspend_bg(self.SUSPEND_TIME_SECONDS + 5)
-        logging.info('Waiting %d seconds for DUT to be fully suspended.',
-                     self.SUSPEND_TIME_SECONDS)
-        time.sleep(self.SUSPEND_TIME_SECONDS)
-        logging.info('Tearing down the most recently connected AP.')
-        self.context.router.deconfig_aps(instance=1)
-        logging.info('Expect that we connect to our previous connected AP '
-                     'on resume.')
-        self.context.wait_for_connection(
-                self.context.router.get_ssid(instance=0),
-                self._ap_config0.frequency)
diff --git a/server/site_tests/network_WiFi_RoamSuspendTimeout/control.11a b/server/site_tests/network_WiFi_RoamSuspendTimeout/control.11a
index 7447241..c48836e 100644
--- a/server/site_tests/network_WiFi_RoamSuspendTimeout/control.11a
+++ b/server/site_tests/network_WiFi_RoamSuspendTimeout/control.11a
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = "suite:wifi_flaky"
 DEPENDENCIES = 'wificell'
+PY_VERSION = 3
 
 DOC = """
 This test verifies that DUT can reconnect to an open 802.11a network
diff --git a/server/site_tests/network_WiFi_RoamSuspendTimeout/control.11b b/server/site_tests/network_WiFi_RoamSuspendTimeout/control.11b
index 6a5f20d..e514a66 100644
--- a/server/site_tests/network_WiFi_RoamSuspendTimeout/control.11b
+++ b/server/site_tests/network_WiFi_RoamSuspendTimeout/control.11b
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = "suite:wifi_flaky"
 DEPENDENCIES = 'wificell'
+PY_VERSION = 3
 
 DOC = """
 This test attempts to verify that we can reconnect to a router over 802.11b
diff --git a/server/site_tests/network_WiFi_RoamSuspendTimeout/control.11g b/server/site_tests/network_WiFi_RoamSuspendTimeout/control.11g
index 41ef5f3..45b85a1 100644
--- a/server/site_tests/network_WiFi_RoamSuspendTimeout/control.11g
+++ b/server/site_tests/network_WiFi_RoamSuspendTimeout/control.11g
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = "suite:wifi_flaky"
 DEPENDENCIES = 'wificell'
+PY_VERSION = 3
 
 DOC = """
 This test attempts to verify that we can reconnect to a router over 802.11g
diff --git a/server/site_tests/network_WiFi_RoamSuspendTimeout/control.wep104 b/server/site_tests/network_WiFi_RoamSuspendTimeout/control.wep104
index 9b19100..d8108df 100644
--- a/server/site_tests/network_WiFi_RoamSuspendTimeout/control.wep104
+++ b/server/site_tests/network_WiFi_RoamSuspendTimeout/control.wep104
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = "suite:wifi_flaky"
 DEPENDENCIES = 'wificell'
+PY_VERSION = 3
 
 DOC = """
 This test case verifies that the DUT can reconnect to an AP using WEP
diff --git a/server/site_tests/network_WiFi_RoamSuspendTimeout/control.wep40 b/server/site_tests/network_WiFi_RoamSuspendTimeout/control.wep40
index 546ae14..bbe451c 100644
--- a/server/site_tests/network_WiFi_RoamSuspendTimeout/control.wep40
+++ b/server/site_tests/network_WiFi_RoamSuspendTimeout/control.wep40
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = "suite:wifi_flaky"
 DEPENDENCIES = 'wificell'
+PY_VERSION = 3
 
 DOC = """
 This test case verifies that the DUT can reconnect to an AP using
diff --git a/server/site_tests/network_WiFi_RoamSuspendTimeout/control.wpa b/server/site_tests/network_WiFi_RoamSuspendTimeout/control.wpa
index 6a381fc..b1c6aa8 100644
--- a/server/site_tests/network_WiFi_RoamSuspendTimeout/control.wpa
+++ b/server/site_tests/network_WiFi_RoamSuspendTimeout/control.wpa
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = "suite:wifi_flaky"
 DEPENDENCIES = 'wificell'
+PY_VERSION = 3
 
 DOC = """
 This tests verifies that we can reconnect to an AP broadcasting a WPA network
diff --git a/server/site_tests/network_WiFi_RoamSuspendTimeout/control.wpa2 b/server/site_tests/network_WiFi_RoamSuspendTimeout/control.wpa2
index 38710a9..eb9783a 100644
--- a/server/site_tests/network_WiFi_RoamSuspendTimeout/control.wpa2
+++ b/server/site_tests/network_WiFi_RoamSuspendTimeout/control.wpa2
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = "suite:wifi_matfunc"
 DEPENDENCIES = 'wificell'
+PY_VERSION = 3
 
 DOC = """
 This tests verifies that we can reconnect to an AP broadcasting a WPA2 (aka
diff --git a/server/site_tests/network_WiFi_RoamSuspendTimeout/network_WiFi_RoamSuspendTimeout.py b/server/site_tests/network_WiFi_RoamSuspendTimeout/network_WiFi_RoamSuspendTimeout.py
index 9c8a01c..b3e5b13 100644
--- a/server/site_tests/network_WiFi_RoamSuspendTimeout/network_WiFi_RoamSuspendTimeout.py
+++ b/server/site_tests/network_WiFi_RoamSuspendTimeout/network_WiFi_RoamSuspendTimeout.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/network_WiFi_SSIDSwitchBack/control b/server/site_tests/network_WiFi_SSIDSwitchBack/control
deleted file mode 100644
index 1700d34..0000000
--- a/server/site_tests/network_WiFi_SSIDSwitchBack/control
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-TIME = 'SHORT'
-NAME = 'network_WiFi_SSIDSwitchBack'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-The SSIDSwitchBack test verifies that the connection manager is able
-to rejoin a previously connected AP when it loses connectivity to its
-current AP.
-"""
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_SSIDSwitchBack',
-                 host=host,
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SSIDSwitchBack/network_WiFi_SSIDSwitchBack.py b/server/site_tests/network_WiFi_SSIDSwitchBack/network_WiFi_SSIDSwitchBack.py
deleted file mode 100644
index cfc6bfd..0000000
--- a/server/site_tests/network_WiFi_SSIDSwitchBack/network_WiFi_SSIDSwitchBack.py
+++ /dev/null
@@ -1,102 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import iw_runner
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-class network_WiFi_SSIDSwitchBack(wifi_cell_test_base.WiFiCellTestBase):
-    """Tests roaming to a previous AP when current AP disappears."""
-
-    version = 1
-
-    FREQUENCY_1 = 2412
-    FREQUENCY_2 = 2437
-    BSSID_1 = "00:01:02:03:04:05"
-    BSSID_2 = "06:07:08:09:0a:0b"
-    SSID_1 = "InsideADogItsTooDarkToRead"
-    SSID_2 = "HeReallyIsAnIdiot"
-
-
-    def configure_connect_verify_deconfig_wait(self, ssid, freq, mode, bssid):
-        """Configure an AP, connect to it, then tear it all down, again.
-
-        This method does the following: configures the AP, connects to it and
-        verifies the connection, deconfigures the AP and waits for the
-        disconnect to complete.
-
-        @param ssid string SSID for the new connection.
-        @param freq int Frequency which the AP is to support.
-        @param mode string AP mode from hostap_config.HostapConfig.MODE_*.
-        @param bssid string BSSID for the new connection.
-
-        """
-        # Change channels on the AP.  This happens in full view of the DUT
-        # and the AP deauths everyone as it exits.
-        ap_config = hostap_config.HostapConfig(ssid=ssid, frequency=freq,
-                                               mode=mode, bssid=bssid)
-        self.context.configure(ap_config)
-        assoc_params = xmlrpc_datatypes.AssociationParameters(
-                ssid=self.context.router.get_ssid())
-        self.context.assert_connect_wifi(assoc_params)
-
-        self.context.assert_ping_from_dut()  # Verify that we're connected.
-        self.context.client.check_iw_link_value(
-                iw_runner.IW_LINK_KEY_FREQUENCY,
-                freq)  # Verify that the client switched to new frequency
-
-        # Deconfig and wait for the DUT to disconnect and end up at 'idle'.
-        self.context.router.deconfig()
-        self.context.client.wait_for_service_states(
-                        network_WiFi_SSIDSwitchBack.SSID_1, ['idle'], 30)
-
-
-    def run_once(self):
-        """Test body."""
-        # Connect to the first AP.  This just guarantees that this AP has
-        # been placed in the connection manager profile.  Then deconfig.
-        self.configure_connect_verify_deconfig_wait(
-                network_WiFi_SSIDSwitchBack.SSID_1,
-                network_WiFi_SSIDSwitchBack.FREQUENCY_1,
-                hostap_config.HostapConfig.MODE_11B,
-                network_WiFi_SSIDSwitchBack.BSSID_1)
-
-        # Configure and connect to the second AP.  Then deconfig.
-        self.configure_connect_verify_deconfig_wait(
-                network_WiFi_SSIDSwitchBack.SSID_2,
-                network_WiFi_SSIDSwitchBack.FREQUENCY_2,
-                hostap_config.HostapConfig.MODE_11G,
-                network_WiFi_SSIDSwitchBack.BSSID_2)
-
-        # Bring the first AP back up.
-        ap_config = hostap_config.HostapConfig(
-                ssid=network_WiFi_SSIDSwitchBack.SSID_1,
-                frequency=network_WiFi_SSIDSwitchBack.FREQUENCY_1,
-                mode=hostap_config.HostapConfig.MODE_11B,
-                bssid=network_WiFi_SSIDSwitchBack.BSSID_1)
-        self.context.configure(ap_config)
-
-        # Instead of explicitly connecting, just wait to see if the DUT
-        # re-connects by itself
-        success, state, elapsed_seconds = \
-                self.context.client.wait_for_service_states(
-                        network_WiFi_SSIDSwitchBack.SSID_1,
-                        self.context.client.CONNECTED_STATES, 30)
-        if (not success or
-            state not in self.context.client.CONNECTED_STATES):
-            raise error.TestFail(
-                    'Failed to connect to "%s" in %f seconds (state=%s)' %
-                    (network_WiFi_SSIDSwitchBack.SSID_1, elapsed_seconds,
-                     state))
-
-        # Verify that we're connected.
-        self.context.assert_ping_from_dut()
-
-        # Verify that the client switched to the original frequency
-        self.context.client.check_iw_link_value(
-                iw_runner.IW_LINK_KEY_FREQUENCY,
-                network_WiFi_SSIDSwitchBack.FREQUENCY_1)
-        self.context.router.deconfig()
diff --git a/server/site_tests/network_WiFi_SecChange/control b/server/site_tests/network_WiFi_SecChange/control
deleted file mode 100644
index 12c7e6c..0000000
--- a/server/site_tests/network_WiFi_SecChange/control
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_SecChange'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wificell-cq')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test checks that we can connect to a BSS after it changes security modes.
-In particular, we change the security of a BSS from WPA to open and assert that
-we can continue to connect.
-"""
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_SecChange',
-                 host=host,
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SecChange/network_WiFi_SecChange.py b/server/site_tests/network_WiFi_SecChange/network_WiFi_SecChange.py
deleted file mode 100644
index 5de39d2..0000000
--- a/server/site_tests/network_WiFi_SecChange/network_WiFi_SecChange.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network  import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class network_WiFi_SecChange(wifi_cell_test_base.WiFiCellTestBase):
-    """Test that we can connect to a BSS despite security changes."""
-
-    version = 1
-
-    TEST_SSID = 'My_security_changes'
-
-
-    def run_once(self):
-        """Test body."""
-        wpa_config = xmlrpc_security_types.WPAConfig(
-                psk='chromeos',
-                wpa_mode=xmlrpc_security_types.WPAConfig.MODE_MIXED_WPA,
-                wpa_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_TKIP,
-                             xmlrpc_security_types.WPAConfig.CIPHER_CCMP],
-                wpa2_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_CCMP])
-        ap_config = hostap_config.HostapConfig(
-                    ssid=self.TEST_SSID,
-                    frequency=2412,
-                    mode=hostap_config.HostapConfig.MODE_11B,
-                    security_config=wpa_config)
-        self.context.configure(ap_config)
-        assoc_params = xmlrpc_datatypes.AssociationParameters(
-                ssid=self.context.router.get_ssid(),
-                security_config=wpa_config)
-        self.context.assert_connect_wifi(assoc_params)
-        self.context.assert_ping_from_dut()
-        self.context.client.shill.disconnect(assoc_params.ssid)
-        # This deconfig erases the state stored in the router around WPA.
-        self.context.router.deconfig()
-        # Now we change the same SSID to be an open network.
-        ap_config = hostap_config.HostapConfig(
-                    ssid=self.TEST_SSID,
-                    frequency=2412,
-                    mode=hostap_config.HostapConfig.MODE_11B)
-        self.context.configure(ap_config)
-        assoc_params = xmlrpc_datatypes.AssociationParameters(
-                ssid=self.context.router.get_ssid())
-        self.context.assert_connect_wifi(assoc_params)
-        self.context.assert_ping_from_dut()
-        self.context.client.shill.disconnect(assoc_params.ssid)
-        self.context.router.deconfig()
diff --git a/server/site_tests/network_WiFi_SetOptionalDhcpProperties/control b/server/site_tests/network_WiFi_SetOptionalDhcpProperties/control
deleted file mode 100644
index c22405c..0000000
--- a/server/site_tests/network_WiFi_SetOptionalDhcpProperties/control
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (c) 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'silberst, pstew, quiche'
-NAME = 'network_WiFi_SetOptionalDhcpProperties'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wificell-cq')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test verifies that option DHCP properties set on the DUT are used as
-parameters in DHCP Request messages.
-
-"""
-
-def run(machine):
-    job.run_test('network_WiFi_SetOptionalDhcpProperties',
-                 host=hosts.create_host(machine),
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SetOptionalDhcpProperties/network_WiFi_SetOptionalDhcpProperties.py b/server/site_tests/network_WiFi_SetOptionalDhcpProperties/network_WiFi_SetOptionalDhcpProperties.py
deleted file mode 100644
index 2bad9e2..0000000
--- a/server/site_tests/network_WiFi_SetOptionalDhcpProperties/network_WiFi_SetOptionalDhcpProperties.py
+++ /dev/null
@@ -1,80 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import tcpdump_analyzer
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class network_WiFi_SetOptionalDhcpProperties(
-        wifi_cell_test_base.WiFiCellTestBase):
-    """Test that the optional DHCP properties are used in DHCP Requests."""
-    version = 1
-
-    FREQUENCY_MHZ = 2412
-    HOSTNAME_PROPERTY = 'DHCPProperty.Hostname'
-    VENDORCLASS_PROPERTY = 'DHCPProperty.VendorClass'
-    HOSTNAME_VALUE = 'testHostName'
-    VENDORCLASS_VALUE = 'testVendorClass'
-
-    def assert_hostname_and_vendorclass_are_present(self, pcap_result):
-        """
-        Confirm DHCP Request contains HostName and VendorClass properties.
-
-        @param pcap_result: RemoteCaptureResult tuple.
-
-        """
-
-        logging.info('Analyzing packet capture...')
-        dhcp_filter = ('(bootp.option.dhcp == 3) '
-                       'and (bootp.option.vendor_class_id == %s) '
-                       'and (bootp.option.hostname == %s)'
-                       % (self.VENDORCLASS_VALUE, self.HOSTNAME_VALUE));
-        dhcp_frames = tcpdump_analyzer.get_frames(pcap_result.local_pcap_path,
-                                                  dhcp_filter,
-                                                  reject_bad_fcs=False)
-        if not dhcp_frames:
-            raise error.TestFail('Packet capture did not contain a DHCP '
-                                 'negotiation!')
-
-
-    def run_once(self):
-        """
-        Test that optional DHCP properties are used in DHCP Request packets.
-
-        The test will temporarily set the DHCPProperty.VendorClass and
-        DHCPProperty.Hostname DBus properties for the Manager.  The test
-        resets the properties to their original values before completion.
-        During the test, the DUT should send packets with the optional DHCP
-        property settings in DHCP Requests.  If they are not in the packet,
-        the test will fail.
-
-        """
-
-        configuration = hostap_config.HostapConfig(frequency=self.FREQUENCY_MHZ)
-        self.context.configure(configuration)
-
-        # set hostname and vendorclass for this test
-        client = self.context.client
-        with client.set_manager_property(self.HOSTNAME_PROPERTY,
-                                         self.HOSTNAME_VALUE):
-            with client.set_manager_property(self.VENDORCLASS_PROPERTY,
-                                             self.VENDORCLASS_VALUE):
-                self.context.capture_host.start_capture(
-                        configuration.frequency,
-                        width_type=configuration.packet_capture_mode)
-                assoc_params = xmlrpc_datatypes.AssociationParameters()
-                assoc_params.ssid = self.context.router.get_ssid(instance=0)
-                self.context.assert_connect_wifi(assoc_params)
-                self.context.assert_ping_from_dut()
-                results = self.context.capture_host.stop_capture()
-        if len(results) != 1:
-          raise error.TestError('Expected to generate one packet '
-                                'capture but got %d captures instead.' %
-                                len(results))
-        self.assert_hostname_and_vendorclass_are_present(results[0])
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check11a b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check11a
deleted file mode 100644
index ec45585..0000000
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check11a
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_SimpleConnect.wifi_check11a'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_release')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test verifies that DUT can connect to an open 802.11a network
-on channels 48, 64.
-"""
-
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    a_mode = hostap_config.HostapConfig.MODE_11A
-    configurations = [(hostap_config.HostapConfig(channel=48, mode=a_mode),
-                       xmlrpc_datatypes.AssociationParameters()),
-                      (hostap_config.HostapConfig(channel=64, mode=a_mode),
-                       xmlrpc_datatypes.AssociationParameters())]
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_SimpleConnect',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=configurations)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check1x_PEAP b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check1x_PEAP
deleted file mode 100644
index 319efaf..0000000
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check1x_PEAP
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_SimpleConnect.wifi_check1x_PEAP'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test attempts to verify that we can connect to a router while using
-PEAP authentication with tunneled MSCHAPv2, MD5, and GTC.
-"""
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import tunneled_1x_tests
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    # Negative tests check the parts of the state machine that handle failures.
-    # We do these tests for only one inner authentication protocol because
-    # we presume that supplicant reuses this code between inner authentication
-    # types.
-    configurations = tunneled_1x_tests.get_negative_8021x_test_cases(
-            xmlrpc_security_types.Tunneled1xConfig.LAYER1_TYPE_PEAP,
-            xmlrpc_security_types.Tunneled1xConfig.LAYER2_TYPE_MSCHAPV2)
-    for inner in (xmlrpc_security_types.Tunneled1xConfig.LAYER2_TYPE_MSCHAPV2,
-                  xmlrpc_security_types.Tunneled1xConfig.LAYER2_TYPE_MD5,
-                  xmlrpc_security_types.Tunneled1xConfig.LAYER2_TYPE_GTC):
-        configurations += tunneled_1x_tests.get_positive_8021x_test_cases(
-                xmlrpc_security_types.Tunneled1xConfig.LAYER1_TYPE_PEAP,
-                inner)
-    job.run_test('network_WiFi_SimpleConnect',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=configurations)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check1x_TTLS b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check1x_TTLS
deleted file mode 100644
index 64a7b17..0000000
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check1x_TTLS
+++ /dev/null
@@ -1,47 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_SimpleConnect.wifi_check1x_TTLS'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test attempts to verify that we can connect to a router while using
-TTLS authentication with several tunneled protocols.
-"""
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import tunneled_1x_tests
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    # Negative tests check the parts of the state machine that handle failures.
-    # We do these tests for only one inner authentication protocol because
-    # we presume that supplicant reuses this code between inner authentication
-    # types.
-    configurations = tunneled_1x_tests.get_negative_8021x_test_cases(
-            xmlrpc_security_types.Tunneled1xConfig.LAYER1_TYPE_TTLS,
-            xmlrpc_security_types.Tunneled1xConfig.LAYER2_TYPE_MD5)
-    inners = (xmlrpc_security_types.Tunneled1xConfig.LAYER2_TYPE_MSCHAPV2,
-              xmlrpc_security_types.Tunneled1xConfig.LAYER2_TYPE_MD5,
-              xmlrpc_security_types.Tunneled1xConfig.LAYER2_TYPE_TTLS_MSCHAPV2,
-              xmlrpc_security_types.Tunneled1xConfig.LAYER2_TYPE_TTLS_MSCHAP,
-              xmlrpc_security_types.Tunneled1xConfig.LAYER2_TYPE_TTLS_PAP,
-              xmlrpc_security_types.Tunneled1xConfig.LAYER2_TYPE_GTC)
-    for inner in inners:
-        configurations += tunneled_1x_tests.get_positive_8021x_test_cases(
-                xmlrpc_security_types.Tunneled1xConfig.LAYER1_TYPE_TTLS,
-                inner)
-    job.run_test('network_WiFi_SimpleConnect',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=configurations)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check1x_WEP b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check1x_WEP
deleted file mode 100644
index 61e4425..0000000
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check1x_WEP
+++ /dev/null
@@ -1,48 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_SimpleConnect.wifi_check1x_WEP'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test attempts to verify that we can connect to a router while using
-dynamic WEP encryption.
-"""
-
-
-from autotest_lib.client.common_lib.cros import site_eap_certs
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    sec_config = xmlrpc_security_types.DynamicWEPConfig(
-            wep_rekey_period=20,
-            server_ca_cert=site_eap_certs.ca_cert_1,
-            server_cert=site_eap_certs.server_cert_1,
-            server_key=site_eap_certs.server_private_key_1,
-            client_ca_cert=site_eap_certs.ca_cert_1,
-            client_cert=site_eap_certs.client_cert_1,
-            client_key=site_eap_certs.client_private_key_1)
-    assoc_params = xmlrpc_datatypes.AssociationParameters()
-    assoc_params.security_config = sec_config
-    ap_config = hostap_config.HostapConfig(
-            frequency=2412,
-            mode=hostap_config.HostapConfig.MODE_11G,
-            security_config=sec_config)
-    configurations = [(ap_config, assoc_params)]
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_SimpleConnect',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=configurations)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check1x_WPA b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check1x_WPA
deleted file mode 100644
index 26184b0..0000000
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check1x_WPA
+++ /dev/null
@@ -1,133 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_SimpleConnect.wifi_check1x_WPA'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_matfunc, suite:wifi_release')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test attempts to verify that we can connect to a router while using
-vanilla 802.1x authentication to set up a WPA pipe.
-"""
-
-
-from autotest_lib.client.common_lib.cros import site_eap_certs
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-
-
-def get_configurations():
-    configurations = []
-    # Make sure we can connect when everything is set up correctly.
-    eap_config = xmlrpc_security_types.WPAEAPConfig(
-            server_ca_cert=site_eap_certs.ca_cert_1,
-            server_cert=site_eap_certs.server_cert_1,
-            server_key=site_eap_certs.server_private_key_1,
-            client_ca_cert=site_eap_certs.ca_cert_1,
-            client_cert=site_eap_certs.client_cert_1,
-            client_key=site_eap_certs.client_private_key_1)
-    ap_config = hostap_config.HostapConfig(
-            frequency=2412,
-            mode=hostap_config.HostapConfig.MODE_11G,
-            security_config=eap_config)
-    assoc_params = xmlrpc_datatypes.AssociationParameters(
-            security_config=eap_config)
-    configurations.append((ap_config, assoc_params))
-    # But if we have no CA cert to check the router against, we fail to connect.
-    eap_config = xmlrpc_security_types.WPAEAPConfig(
-            server_ca_cert=site_eap_certs.ca_cert_1,
-            server_cert=site_eap_certs.server_cert_1,
-            server_key=site_eap_certs.server_private_key_1,
-            client_ca_cert=None,
-            client_cert=site_eap_certs.client_cert_1,
-            client_key=site_eap_certs.client_private_key_1)
-    ap_config = hostap_config.HostapConfig(
-            frequency=2412,
-            mode=hostap_config.HostapConfig.MODE_11G,
-            security_config=eap_config)
-    assoc_params = xmlrpc_datatypes.AssociationParameters(
-            security_config=eap_config,
-            expect_failure=True)
-    configurations.append((ap_config, assoc_params))
-    # And if we have a CA cert, but it doesn't match, that should fail.
-    eap_config = xmlrpc_security_types.WPAEAPConfig(
-            server_ca_cert=site_eap_certs.ca_cert_1,
-            server_cert=site_eap_certs.server_cert_1,
-            server_key=site_eap_certs.server_private_key_1,
-            client_ca_cert=site_eap_certs.ca_cert_2,
-            client_cert=site_eap_certs.client_cert_1,
-            client_key=site_eap_certs.client_private_key_1)
-    ap_config = hostap_config.HostapConfig(
-            frequency=2412,
-            mode=hostap_config.HostapConfig.MODE_11G,
-            security_config=eap_config)
-    assoc_params = xmlrpc_datatypes.AssociationParameters(
-            security_config=eap_config,
-            expect_failure=True)
-    configurations.append((ap_config, assoc_params))
-    # But if we specify that we have no CA certs, then again, we connect.
-    eap_config = xmlrpc_security_types.WPAEAPConfig(
-            server_ca_cert=site_eap_certs.ca_cert_1,
-            server_cert=site_eap_certs.server_cert_1,
-            server_key=site_eap_certs.server_private_key_1,
-            client_ca_cert=None,
-            client_cert=site_eap_certs.client_cert_1,
-            client_key=site_eap_certs.client_private_key_1,
-            use_system_cas=False)
-    ap_config = hostap_config.HostapConfig(
-            frequency=2412,
-            mode=hostap_config.HostapConfig.MODE_11G,
-            security_config=eap_config)
-    assoc_params = xmlrpc_datatypes.AssociationParameters(
-            security_config=eap_config)
-    configurations.append((ap_config, assoc_params))
-    # The server will reject us if we use the wrong certificate chain.
-    eap_config = xmlrpc_security_types.WPAEAPConfig(
-            server_ca_cert=site_eap_certs.ca_cert_1,
-            server_cert=site_eap_certs.server_cert_1,
-            server_key=site_eap_certs.server_private_key_1,
-            client_ca_cert=site_eap_certs.ca_cert_1,
-            client_cert=site_eap_certs.client_cert_2,
-            client_key=site_eap_certs.client_private_key_2)
-    ap_config = hostap_config.HostapConfig(
-            frequency=2412,
-            mode=hostap_config.HostapConfig.MODE_11G,
-            security_config=eap_config)
-    assoc_params = xmlrpc_datatypes.AssociationParameters(
-            security_config=eap_config,
-            expect_failure=True)
-    configurations.append((ap_config, assoc_params))
-    # We will reject the server if its certificate is expired.
-    eap_config = xmlrpc_security_types.WPAEAPConfig(
-            server_ca_cert=site_eap_certs.ca_cert_1,
-            server_cert=site_eap_certs.server_expired_cert,
-            server_key=site_eap_certs.server_expired_key,
-            client_ca_cert=site_eap_certs.ca_cert_1,
-            client_cert=site_eap_certs.client_cert_1,
-            client_key=site_eap_certs.client_private_key_1)
-    ap_config = hostap_config.HostapConfig(
-            frequency=2412,
-            mode=hostap_config.HostapConfig.MODE_11G,
-            security_config=eap_config)
-    assoc_params = xmlrpc_datatypes.AssociationParameters(
-            security_config=eap_config,
-            expect_failure=True)
-    configurations.append((ap_config, assoc_params))
-    return configurations
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_SimpleConnect',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=get_configurations())
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check24HT20 b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check24HT20
deleted file mode 100644
index a69097d..0000000
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check24HT20
+++ /dev/null
@@ -1,41 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_SimpleConnect.wifi_check24HT20'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_release')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test verifies that DUT can connect to an open 802.11n network
-on 2.4 GHz channels (20MHz channels only).
-"""
-
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    caps = [hostap_config.HostapConfig.N_CAPABILITY_GREENFIELD,
-            hostap_config.HostapConfig.N_CAPABILITY_HT20]
-    n = hostap_config.HostapConfig.MODE_11N_PURE
-    configurations = [
-            (hostap_config.HostapConfig(channel=1, mode=n, n_capabilities=caps),
-             xmlrpc_datatypes.AssociationParameters()),
-            (hostap_config.HostapConfig(channel=6, mode=n, n_capabilities=caps),
-             xmlrpc_datatypes.AssociationParameters()),
-            (hostap_config.HostapConfig(channel=11, mode=n, n_capabilities=caps),
-             xmlrpc_datatypes.AssociationParameters())]
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_SimpleConnect',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=configurations)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check24HT40 b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check24HT40
deleted file mode 100644
index 37f6fad..0000000
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_check24HT40
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_SimpleConnect.wifi_check24HT40'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_release')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test verifies that DUT can connect to an open 802.11n network
-on 2.4 GHz channel (40MHz-channel).
-"""
-
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    caps = [hostap_config.HostapConfig.N_CAPABILITY_GREENFIELD,
-            hostap_config.HostapConfig.N_CAPABILITY_HT40]
-    n = hostap_config.HostapConfig.MODE_11N_PURE
-    configurations = [(hostap_config.HostapConfig(frequency=2437, mode=n,
-                                                  n_capabilities=caps),
-                       xmlrpc_datatypes.AssociationParameters())]
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_SimpleConnect',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=configurations)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkHiddenWEP b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkHiddenWEP
deleted file mode 100644
index 5aa57e7..0000000
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkHiddenWEP
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_SimpleConnect.wifi_checkHiddenWEP'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_release')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test case verifies that the DUT can connect to a hidden SSID
-using all variants of 40/104 bit open/shared WEP encryption.
-"""
-
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-
-
-def get_configurations():
-    wep40_keys = ['0123456789', '89abcdef01', '9876543210', 'fedcba9876']
-    wep104_keys = ['0123456789abcdef0123456789', '89abcdef0123456789abcdef01',
-                   'fedcba9876543210fedcba9876', '109fedcba987654321fedcba98']
-    auth_algs = (xmlrpc_security_types.WEPConfig.AUTH_ALGORITHM_OPEN,
-                 xmlrpc_security_types.WEPConfig.AUTH_ALGORITHM_SHARED)
-    configurations = []
-    for wep_keys in (wep40_keys, wep104_keys):
-        for auth_alg in auth_algs:
-            wep_config = xmlrpc_security_types.WEPConfig(
-                    wep_keys=wep_keys,
-                    auth_algorithm=auth_alg)
-            ap_config = hostap_config.HostapConfig(
-                    hide_ssid=True,
-                    frequency=2412,
-                    mode=hostap_config.HostapConfig.MODE_11G,
-                    security_config=wep_config)
-            assoc_params = xmlrpc_datatypes.AssociationParameters(
-                    is_hidden=True,
-                    security_config=wep_config)
-            configurations.append((ap_config, assoc_params))
-    return configurations
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_SimpleConnect',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=get_configurations())
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkHiddenWPA b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkHiddenWPA
deleted file mode 100644
index 497f515..0000000
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkHiddenWPA
+++ /dev/null
@@ -1,68 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_SimpleConnect.wifi_checkHiddenWPA'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_release')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test verifies that we can connect to hidden WPA networks.  It checks
-several common configurations of hidden networks such as pure WPA/WPA2 and
-mixed WPA/WPA2 networks.
-"""
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-
-
-def get_configurations():
-    wpa_configs = [
-            xmlrpc_security_types.WPAConfig(
-                    psk='chromeos',
-                    wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA,
-                    wpa_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_TKIP]),
-            xmlrpc_security_types.WPAConfig(
-                    psk='chromeos',
-                    wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA,
-                    wpa_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_TKIP,
-                                 xmlrpc_security_types.WPAConfig.CIPHER_CCMP]),
-            xmlrpc_security_types.WPAConfig(
-                    psk='chromeos',
-                    wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA2,
-                    wpa2_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_CCMP]),
-            xmlrpc_security_types.WPAConfig(
-                    psk='chromeos',
-                    wpa_mode=xmlrpc_security_types.WPAConfig.MODE_MIXED_WPA,
-                    wpa_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_TKIP,
-                                 xmlrpc_security_types.WPAConfig.CIPHER_CCMP],
-                    wpa2_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_CCMP]),
-    ]
-    configurations = []
-    for wpa_config in wpa_configs:
-        ap_config = hostap_config.HostapConfig(
-                hide_ssid=True,
-                frequency=2412,
-                mode=hostap_config.HostapConfig.MODE_11G,
-                security_config=wpa_config)
-        assoc_params = xmlrpc_datatypes.AssociationParameters(
-                is_hidden=True,
-                security_config=wpa_config)
-        configurations.append((ap_config, assoc_params))
-    return configurations
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_SimpleConnect',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=get_configurations())
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkMixedWPA b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkMixedWPA
deleted file mode 100644
index c2ee6b8..0000000
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkMixedWPA
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_SimpleConnect.wifi_checkMixedWPA'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_release')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This tests verifies that we can connect to an AP advertising support for both
-WPA and WPA2 with TKIP/AES supported for WPA and AES supported for WPA2.
-"""
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    wpa_config = xmlrpc_security_types.WPAConfig(
-            psk='chromeos',
-            wpa_mode=xmlrpc_security_types.WPAConfig.MODE_MIXED_WPA,
-            wpa_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_CCMP,
-                         xmlrpc_security_types.WPAConfig.CIPHER_TKIP],
-            wpa2_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_CCMP])
-    ap_config = hostap_config.HostapConfig(
-            frequency=2412,
-            mode=hostap_config.HostapConfig.MODE_11G,
-            security_config=wpa_config)
-    assoc_params = xmlrpc_datatypes.AssociationParameters()
-    assoc_params.security_config = wpa_config
-    configurations = [(ap_config, assoc_params)]
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_SimpleConnect',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=configurations)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkOddWPAPassphrases b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkOddWPAPassphrases
deleted file mode 100644
index f459e60..0000000
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkOddWPAPassphrases
+++ /dev/null
@@ -1,53 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_SimpleConnect.wifi_checkOddWPAPassphrases'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_release')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This tests verifies that we can connect to an AP broadcasting a WPA network
-with a variety of unusual passphrases.
-"""
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-
-
-def get_configurations():
-  wpa_configs = []
-  psks = ['\xe4\xb8\x80\xe4\xba\x8c\xe4\xb8\x89',  # Pure unicode.
-          'abcdef\xc2\xa2',  # Mixed unicode and ASCII.
-          ' !"#$%&\'()>*+,-./:;<=>?@[\\]^_{|}~']  # All punctuation.
-  for psk in psks:
-      wpa_configs.append(xmlrpc_security_types.WPAConfig(
-              psk=psk,
-              wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA2,
-              wpa2_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_CCMP]))
-      wpa_configs.append(xmlrpc_security_types.WPAConfig(
-              psk=psk,
-              wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA,
-              wpa_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_TKIP]))
-  return [(hostap_config.HostapConfig(
-                   frequency=2412,
-                   mode=hostap_config.HostapConfig.MODE_11G,
-                   security_config=wpa_config),
-           xmlrpc_datatypes.AssociationParameters(security_config=wpa_config))
-          for wpa_config in wpa_configs]
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_SimpleConnect',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=get_configurations())
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkRawPMK b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkRawPMK
deleted file mode 100644
index 108bd14..0000000
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkRawPMK
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_SimpleConnect.wifi_checkRawPMK'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_release')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test verifies that DUT can connect to a WPA network using
-a raw PMK value instead of an ASCII passphrase.
-"""
-
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    wpa_config = xmlrpc_security_types.WPAConfig(
-            psk='0123456789abcdef' * 4,
-            wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA,
-            wpa_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_TKIP])
-    ap_config = hostap_config.HostapConfig(
-            channel=1,
-            mode=hostap_config.HostapConfig.MODE_11G,
-            security_config=wpa_config)
-    client_config = xmlrpc_datatypes.AssociationParameters(
-            security_config=wpa_config)
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_SimpleConnect',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=[(ap_config, client_config)])
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWEP104 b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWEP104
deleted file mode 100644
index fb1925f..0000000
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWEP104
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_SimpleConnect.wifi_checkWEP104'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_release')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test case verifies that the DUT can connect to an AP using WEP both open
-and shared system authentication and 104-bit pre-shared keys.
-"""
-
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-
-
-
-def run(machine):
-    # FYI: D-Bus requires string parameters must be valid UTF-8.
-    wep_keys = ['0123456789abcdef0123456789',
-                'mlk:ihgfedcba',
-                # Mix ASCII characters with a 3-byte UTF-8 characters.
-                'd\xe4\xb8\x80\xe4\xba\x8c\xe4\xb8\x89\xe5\x9b\x9b',
-                # Mix different byte length UTF-8 characters.
-                '\xe4\xb8\x80\xe4\xba\x8c\xe4\xb8\x89\xc2\xa2\xc2\xa3']
-    auth_algs = (xmlrpc_security_types.WEPConfig.AUTH_ALGORITHM_OPEN,
-                 xmlrpc_security_types.WEPConfig.AUTH_ALGORITHM_SHARED)
-    configurations = []
-    for auth_algorithm in auth_algs:
-        for i in range(len(wep_keys)):
-            wep_config = xmlrpc_security_types.WEPConfig(
-                    wep_keys,
-                    wep_default_key=i,
-                    auth_algorithm=auth_algorithm)
-            ap_config = hostap_config.HostapConfig(
-                    frequency=2412,
-                    mode=hostap_config.HostapConfig.MODE_11G,
-                    security_config=wep_config)
-            assoc_params = xmlrpc_datatypes.AssociationParameters(
-                    security_config=wep_config)
-            configurations.append((ap_config, assoc_params))
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_SimpleConnect',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=configurations)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWEP40 b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWEP40
deleted file mode 100644
index 4133982..0000000
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWEP40
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_SimpleConnect.wifi_checkWEP40'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_release')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test case verifies that the DUT can connect to an AP using
-WEP both open and shared system authentication and 40-bit
-pre-shared keys.
-"""
-
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    # FYI: D-Bus requires string parameters must be valid UTF-8.
-    wep_keys = ['abcde', # Interpretted as ASCII.
-                'fedcba9876',  # Interpretted as hex.
-                # Mix ASCII characters with a 3-byte UTF-8 character.
-                'ab\xe4\xb8\x89',
-                # Mix different byte length UTF-8 characters.
-                '\xe4\xb8\x89\xc2\xa2']
-    auth_algs = (xmlrpc_security_types.WEPConfig.AUTH_ALGORITHM_OPEN,
-                 xmlrpc_security_types.WEPConfig.AUTH_ALGORITHM_SHARED)
-    configurations = []
-    for auth_algorithm in auth_algs:
-        for i in range(len(wep_keys)):
-            wep_config = xmlrpc_security_types.WEPConfig(
-                    wep_keys,
-                    wep_default_key=i,
-                    auth_algorithm=auth_algorithm)
-            ap_config = hostap_config.HostapConfig(
-                    frequency=2412,
-                    mode=hostap_config.HostapConfig.MODE_11G,
-                    security_config=wep_config)
-            assoc_params = xmlrpc_datatypes.AssociationParameters(
-                    security_config=wep_config)
-            configurations.append((ap_config, assoc_params))
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_SimpleConnect',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=configurations)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA2 b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA2
deleted file mode 100644
index ae7d7a0..0000000
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA2
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_SimpleConnect.wifi_checkWPA2'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_release')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This tests verifies that we can connect to an AP broadcasting a WPA2 (aka RSN)
-network.  By definition, traffic on this network is encrypted under AES.
-"""
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    wpa_config = xmlrpc_security_types.WPAConfig(
-            psk='chromeos',
-            wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA2,
-            wpa2_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_CCMP])
-    ap_config = hostap_config.HostapConfig(
-            frequency=2412,
-            mode=hostap_config.HostapConfig.MODE_11G,
-            security_config=wpa_config)
-    assoc_params = xmlrpc_datatypes.AssociationParameters()
-    assoc_params.security_config = wpa_config
-    configurations = [(ap_config, assoc_params)]
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_SimpleConnect',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=configurations)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA2_PMF b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA2_PMF
deleted file mode 100644
index 36daf6a..0000000
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA2_PMF
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_SimpleConnect.wifi_checkWPA2_CCMP_PMF'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_release')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This tests verifies that we can connect to an AP broadcasting a WPA2 network
-using AES based CCMP.  In addition, the client must also support 802.11w
-protected management frames.
-"""
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    wpa_config = xmlrpc_security_types.WPAConfig(
-            psk='chromeos',
-            wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA2,
-            wpa_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_CCMP])
-    ap_config = hostap_config.HostapConfig(
-            frequency=2412,
-            mode=hostap_config.HostapConfig.MODE_11G,
-            pmf_support=hostap_config.HostapConfig.PMF_SUPPORT_REQUIRED,
-            security_config=wpa_config)
-    assoc_params = xmlrpc_datatypes.AssociationParameters()
-    assoc_params.security_config = wpa_config
-    configurations = [(ap_config, assoc_params)]
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_SimpleConnect',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=configurations)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA2_TKIP b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA2_TKIP
deleted file mode 100644
index 78f5b1d..0000000
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA2_TKIP
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_SimpleConnect.wifi_checkWPA2_TKIP'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_release')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This tests verifies that we can connect to an AP broadcasting a WPA2 (aka RSN)
-network.  Some APs still use TKIP in this mode.
-"""
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    wpa_config = xmlrpc_security_types.WPAConfig(
-            psk='chromeos',
-            wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA2,
-            wpa2_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_TKIP])
-    ap_config = hostap_config.HostapConfig(
-            frequency=2412,
-            mode=hostap_config.HostapConfig.MODE_11G,
-            security_config=wpa_config)
-    assoc_params = xmlrpc_datatypes.AssociationParameters(
-            security_config = wpa_config)
-    configurations = [(ap_config, assoc_params)]
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_SimpleConnect',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=configurations)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA_CCMP b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA_CCMP
deleted file mode 100644
index da6f95b..0000000
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA_CCMP
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_SimpleConnect.wifi_checkWPA_CCMP'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_release')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This tests verifies that we can connect to an AP broadcasting a WPA network
-using AES based CCMP.
-"""
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    wpa_config = xmlrpc_security_types.WPAConfig(
-            psk='chromeos',
-            wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA,
-            wpa_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_CCMP])
-    ap_config = hostap_config.HostapConfig(
-            frequency=2412,
-            mode=hostap_config.HostapConfig.MODE_11G,
-            security_config=wpa_config)
-    assoc_params = xmlrpc_datatypes.AssociationParameters()
-    assoc_params.security_config = wpa_config
-    configurations = [(ap_config, assoc_params)]
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_SimpleConnect',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=configurations)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA_TKIP b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA_TKIP
deleted file mode 100644
index ac4d0ba..0000000
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA_TKIP
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_SimpleConnect.wifi_checkWPA_TKIP'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_release')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This tests verifies that we can connect to an AP broadcasting a WPA network
-using TKIP.  TKIP is deprecated for having some security holes, and WPA is
-deprecated in favor of WPA2, but routers of a certain age only support
-this mode.
-"""
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    wpa_config = xmlrpc_security_types.WPAConfig(
-            psk='chromeos',
-            wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA,
-            wpa_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_TKIP])
-    ap_config = hostap_config.HostapConfig(
-            frequency=2412,
-            mode=hostap_config.HostapConfig.MODE_11G,
-            security_config=wpa_config)
-    assoc_params = xmlrpc_datatypes.AssociationParameters()
-    assoc_params.security_config = wpa_config
-    configurations = [(ap_config, assoc_params)]
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_SimpleConnect',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=configurations)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA_multi b/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA_multi
deleted file mode 100644
index a928b40..0000000
--- a/server/site_tests/network_WiFi_SimpleConnect/control.wifi_checkWPA_multi
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_SimpleConnect.wifi_checkWPA_multi'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ('suite:wifi_release')
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This tests verifies that we can connect to an AP broadcasting a WPA network
-supporting both AES based CCMP and TKIP.
-"""
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    wpa_config = xmlrpc_security_types.WPAConfig(
-            psk='chromeos',
-            wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA,
-            wpa_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_CCMP,
-                         xmlrpc_security_types.WPAConfig.CIPHER_TKIP])
-    ap_config = hostap_config.HostapConfig(
-            frequency=2412,
-            mode=hostap_config.HostapConfig.MODE_11G,
-            security_config=wpa_config)
-    assoc_params = xmlrpc_datatypes.AssociationParameters()
-    assoc_params.security_config = wpa_config
-    configurations = [(ap_config, assoc_params)]
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_SimpleConnect',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=configurations)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SimpleConnect/network_WiFi_SimpleConnect.py b/server/site_tests/network_WiFi_SimpleConnect/network_WiFi_SimpleConnect.py
deleted file mode 100644
index 43ee21d..0000000
--- a/server/site_tests/network_WiFi_SimpleConnect/network_WiFi_SimpleConnect.py
+++ /dev/null
@@ -1,53 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-class network_WiFi_SimpleConnect(wifi_cell_test_base.WiFiCellTestBase):
-    """Test that we can connect to router configured in various ways."""
-    version = 1
-
-    def parse_additional_arguments(self, commandline_args, additional_params):
-        """Hook into super class to take control files parameters.
-
-        @param commandline_args dict of parsed parameters from the autotest.
-        @param additional_params list of tuple(HostapConfig,
-                                               AssociationParameters).
-
-        """
-        self._configurations = additional_params
-
-
-    def run_once(self):
-        """Sets up a router, connects to it, pings it, and repeats."""
-        client_mac = self.context.client.wifi_mac
-        for router_conf, client_conf in self._configurations:
-            self.context.configure(router_conf)
-            self.context.capture_host.start_capture(router_conf.frequency,
-                    width_type=router_conf.packet_capture_mode)
-            client_conf.ssid = self.context.router.get_ssid()
-            assoc_result = self.context.assert_connect_wifi(client_conf)
-            if client_conf.expect_failure:
-                logging.info('Skipping ping because we expected this '
-                             'attempt to fail.')
-            else:
-                with self.context.client.assert_no_disconnects():
-                    self.context.assert_ping_from_dut()
-                self.context.client.shill.disconnect(client_conf.ssid)
-                times_dict = {
-                    'Discovery': assoc_result.discovery_time,
-                    'Association': assoc_result.association_time,
-                    'Configuration': assoc_result.configuration_time}
-                for key in times_dict.keys():
-                    self.output_perf_value(
-                        description=key,
-                        value=times_dict[key],
-                        units='seconds',
-                        higher_is_better=False,
-                        graph=router_conf.perf_loggable_description)
-            self.context.client.shill.delete_entries_for_ssid(client_conf.ssid)
-            self.context.router.deconfig()
-            self.context.capture_host.stop_capture()
diff --git a/server/site_tests/network_WiFi_SuspendStress/control.11g b/server/site_tests/network_WiFi_SuspendStress/control.11g
deleted file mode 100644
index e128657..0000000
--- a/server/site_tests/network_WiFi_SuspendStress/control.11g
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'krisr, tienchang, bmahadev'
-NAME = 'network_WiFi_SuspendStress.11g'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell'
-ATTRIBUTES = ('suite:wifi_matfunc')
-
-DOC = """
-This test uses powerd_dbus to suspend and resume and checks that the
-wifi adapter is brought back up and connects to a 802.11g network on channels
-48, 64.
-"""
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-
-def run(machine):
-    host = hosts.create_host(machine)
-    g_mode = hostap_config.HostapConfig.MODE_11G
-    configurations = [(hostap_config.HostapConfig(channel=1, mode=g_mode),
-                       xmlrpc_datatypes.AssociationParameters()),
-                      (hostap_config.HostapConfig(channel=6, mode=g_mode),
-                       xmlrpc_datatypes.AssociationParameters()),
-                      (hostap_config.HostapConfig(channel=11, mode=g_mode),
-                       xmlrpc_datatypes.AssociationParameters())]
-    job.run_test('network_WiFi_SuspendStress',
-                 host=host,
-                 tag=NAME.split('.')[1],
-                 suspends=5,
-                 raw_cmdline_args=args,
-                 additional_params=configurations)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SuspendStress/control.24HT40 b/server/site_tests/network_WiFi_SuspendStress/control.24HT40
deleted file mode 100644
index acbc7a69..0000000
--- a/server/site_tests/network_WiFi_SuspendStress/control.24HT40
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'krisr, tienchang, bmahadev'
-NAME = 'network_WiFi_SuspendStress.24HT40'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell'
-ATTRIBUTES = ('suite:wifi_matfunc')
-
-DOC = """
-This test uses powerd_dbus to suspend and resume and checks that the
-wifi adapter is brought back up and connects to a 802.11n network on 2.4 GHz.
-"""
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-
-def run(machine):
-    host = hosts.create_host(machine)
-    caps = [hostap_config.HostapConfig.N_CAPABILITY_GREENFIELD,
-            hostap_config.HostapConfig.N_CAPABILITY_HT40]
-    n = hostap_config.HostapConfig.MODE_11N_PURE
-    configurations = [(hostap_config.HostapConfig(frequency=2437, mode=n,
-                                                  n_capabilities=caps),
-                       xmlrpc_datatypes.AssociationParameters())]
-    job.run_test('network_WiFi_SuspendStress',
-                 host=host,
-                 tag=NAME.split('.')[1],
-                 suspends=5,
-                 raw_cmdline_args=args,
-                 additional_params=configurations)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SuspendStress/control.5HT40 b/server/site_tests/network_WiFi_SuspendStress/control.5HT40
deleted file mode 100644
index f882e41..0000000
--- a/server/site_tests/network_WiFi_SuspendStress/control.5HT40
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'krisr, tienchang, bmahadev'
-NAME = 'network_WiFi_SuspendStress.5HT40'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell'
-ATTRIBUTES = ('suite:wifi_matfunc')
-
-DOC = """
-This test uses powerd_dbus to suspend and resume and checks that the
-wifi adapter is brought back up and connects to a 802.11n network on 5 GHz.
-"""
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-
-def run(machine):
-    host = hosts.create_host(machine)
-    caps = [hostap_config.HostapConfig.N_CAPABILITY_GREENFIELD,
-            hostap_config.HostapConfig.N_CAPABILITY_HT40_MINUS]
-    n = hostap_config.HostapConfig.MODE_11N_PURE
-    configurations = [(hostap_config.HostapConfig(frequency=5240,
-                                                  mode=n,
-                                                  n_capabilities=caps),
-                       xmlrpc_datatypes.AssociationParameters())]
-    job.run_test('network_WiFi_SuspendStress',
-                 host=host,
-                 tag=NAME.split('.')[1],
-                 suspends=5,
-                 raw_cmdline_args=args,
-                 additional_params=configurations)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SuspendStress/control.Hidden b/server/site_tests/network_WiFi_SuspendStress/control.Hidden
deleted file mode 100644
index d6af8a9..0000000
--- a/server/site_tests/network_WiFi_SuspendStress/control.Hidden
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'krisr, tienchang, bmahadev'
-NAME = 'network_WiFi_SuspendStress.Hidden'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell'
-ATTRIBUTES = ('suite:wifi_matfunc')
-
-DOC = """
-This test uses powerd_dbus to suspend and resume and checks that the
-wifi adapter is brought back up and connects to a hidden network.
-"""
-
-import copy
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-
-def run(machine):
-    host = hosts.create_host(machine)
-    g = hostap_config.HostapConfig.MODE_11G
-    n = hostap_config.HostapConfig.MODE_11N_PURE
-    assoc_params = xmlrpc_datatypes.AssociationParameters()
-    assoc_params.is_hidden = True
-    configurations = [
-            (hostap_config.HostapConfig(channel=6, mode=g, hide_ssid=True),
-             copy.copy(assoc_params)),
-            (hostap_config.HostapConfig(channel=36, mode=n, hide_ssid=True),
-             copy.copy(assoc_params)),
-            (hostap_config.HostapConfig(channel=48, mode=n, hide_ssid=True),
-             copy.copy(assoc_params))]
-
-    job.run_test('network_WiFi_SuspendStress',
-                 host=host,
-                 tag=NAME.split('.')[1],
-                 suspends=5,
-                 raw_cmdline_args=args,
-                 additional_params=configurations)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SuspendStress/control.VHT80 b/server/site_tests/network_WiFi_SuspendStress/control.VHT80
deleted file mode 100644
index 034463d..0000000
--- a/server/site_tests/network_WiFi_SuspendStress/control.VHT80
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'kirtika'
-NAME = 'network_WiFi_SuspendStress.VHT80'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell'
-ATTRIBUTES = ('suite:wifi_matfunc')
-
-DOC = """
-This test uses powerd_dbus to suspend and resume and checks that the
-wifi adapter is brought back up and connects to a 802.11ac network with VHT80
-rates.
-"""
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-
-def run(machine):
-    host = hosts.create_host(machine)
-    n_caps = [hostap_config.HostapConfig.N_CAPABILITY_HT40_PLUS]
-    ac_caps = [hostap_config.HostapConfig.AC_CAPABILITY_SHORT_GI_80]
-    ac_mode = hostap_config.HostapConfig.MODE_11AC_PURE
-    channel_width_80_mhz = hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_80
-    configs = [(hostap_config.HostapConfig(
-                    channel=157,
-                    mode=ac_mode,
-                    n_capabilities=n_caps,
-                    vht_channel_width=channel_width_80_mhz,
-                    vht_center_channel=155,
-                    ac_capabilities=ac_caps),
-               xmlrpc_datatypes.AssociationParameters())]
-    job.run_test('network_WiFi_SuspendStress',
-                 host=host,
-                 tag=NAME.split('.')[1],
-                 suspends=5,
-                 raw_cmdline_args=args,
-                 additional_params=configs)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SuspendStress/control.WPA2 b/server/site_tests/network_WiFi_SuspendStress/control.WPA2
deleted file mode 100644
index b7f97d7..0000000
--- a/server/site_tests/network_WiFi_SuspendStress/control.WPA2
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'krisr, tienchang, bmahadev'
-NAME = 'network_WiFi_SuspendStress.WPA2'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 512000
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell'
-ATTRIBUTES = ('suite:wifi_matfunc')
-
-DOC = """
-This test uses powerd_dbus to suspend and resume and checks that the
-wifi adapter is brought back up and connects to a WPA network.
-"""
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-
-def run(machine):
-    host = hosts.create_host(machine)
-    wpa_config = xmlrpc_security_types.WPAConfig(
-            psk='chromeos',
-            wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA2,
-            wpa2_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_CCMP])
-    ap_config = hostap_config.HostapConfig(
-            frequency=2412,
-            mode=hostap_config.HostapConfig.MODE_11G,
-            security_config=wpa_config)
-    assoc_params = xmlrpc_datatypes.AssociationParameters()
-    assoc_params.security_config = wpa_config
-    configurations = [(ap_config, assoc_params)]
-    job.run_test('network_WiFi_SuspendStress',
-                 host=host,
-                 tag=NAME.split('.')[1],
-                 suspends=5,
-                 raw_cmdline_args=args,
-                 additional_params=configurations)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SuspendStress/control.stress_24HT40 b/server/site_tests/network_WiFi_SuspendStress/control.stress_24HT40
deleted file mode 100644
index 636060f..0000000
--- a/server/site_tests/network_WiFi_SuspendStress/control.stress_24HT40
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'krisr, tienchang, bmahadev'
-NAME = 'network_WiFi_SuspendStress.stress_24HT40'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell'
-ATTRIBUTES = 'suite:wifi_stress'
-
-DOC = """
-This test uses powerd_dbus to suspend and resume and checks that the
-wifi adapter is brought back up and connects to a 802.11n network on 2.4 GHz.
-"""
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-
-def run(machine):
-    host = hosts.create_host(machine)
-    caps = [hostap_config.HostapConfig.N_CAPABILITY_GREENFIELD,
-            hostap_config.HostapConfig.N_CAPABILITY_HT40]
-    n = hostap_config.HostapConfig.MODE_11N_PURE
-    configurations = [(hostap_config.HostapConfig(frequency=2437, mode=n,
-                                                  n_capabilities=caps),
-                       xmlrpc_datatypes.AssociationParameters())]
-    job.run_test('network_WiFi_SuspendStress',
-                 host=host,
-                 tag=NAME.split('.')[1],
-                 suspends=690,
-                 raw_cmdline_args=args,
-                 additional_params=configurations)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SuspendStress/control.stress_WPA2 b/server/site_tests/network_WiFi_SuspendStress/control.stress_WPA2
deleted file mode 100644
index 83f3315..0000000
--- a/server/site_tests/network_WiFi_SuspendStress/control.stress_WPA2
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'krisr, tienchang, bmahadev'
-NAME = 'network_WiFi_SuspendStress.stress_WPA2'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 512000
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell'
-ATTRIBUTES = 'suite:wifi_stress'
-
-DOC = """
-This test uses powerd_dbus to suspend and resume and checks that the
-wifi adapter is brought back up and connects to a WPA2 network.
-"""
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-
-def run(machine):
-    host = hosts.create_host(machine)
-    wpa_config = xmlrpc_security_types.WPAConfig(
-            psk='chromeos',
-            wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA2,
-            wpa2_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_CCMP])
-    ap_config = hostap_config.HostapConfig(
-            frequency=2412,
-            mode=hostap_config.HostapConfig.MODE_11N_PURE,
-            security_config=wpa_config)
-    assoc_params = xmlrpc_datatypes.AssociationParameters()
-    assoc_params.security_config = wpa_config
-    configurations = [(ap_config, assoc_params)]
-    job.run_test('network_WiFi_SuspendStress',
-                 host=host,
-                 tag=NAME.split('.')[1],
-                 suspends=690,
-                 raw_cmdline_args=args,
-                 additional_params=configurations)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SuspendStress/network_WiFi_SuspendStress.py b/server/site_tests/network_WiFi_SuspendStress/network_WiFi_SuspendStress.py
deleted file mode 100644
index b529503..0000000
--- a/server/site_tests/network_WiFi_SuspendStress/network_WiFi_SuspendStress.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros import stress
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-_DELAY = 10
-_START_TIMEOUT_SECONDS = 20
-
-
-class network_WiFi_SuspendStress(wifi_cell_test_base.WiFiCellTestBase):
-    """Test suspend and resume with powerd_dbus_suspend command and assert wifi
-    connectivity to router.
-    """
-    version = 1
-
-
-    def parse_additional_arguments(self, commandline_args, additional_params):
-        """Hook into super class to take control files parameters.
-
-        @param commandline_args dict of parsed parameters from the autotest.
-        @param additional_params list of tuple(HostapConfig,
-                                               AssociationParameters).
-        """
-        self._configurations = additional_params
-
-
-    def stress_wifi_suspend(self):
-        """ Perform the suspend stress."""
-
-        boot_id = self._host.get_boot_id()
-        self.context.client.do_suspend(_DELAY)
-        self._host.test_wait_for_resume(boot_id)
-        state_info = self.context.wait_for_connection(
-                self.context.router.get_ssid())
-        self._timings.append(state_info.time)
-
-
-    def run_once(self, suspends=5):
-        """Run suspend stress test.
-
-        @param suspends: Number of suspend iterations
-
-        """
-        self._host = self.context.client.host
-
-        for router_conf, client_conf in self._configurations:
-            self.context.configure(ap_config=router_conf)
-            assoc_params = xmlrpc_datatypes.AssociationParameters(
-                is_hidden=client_conf.is_hidden,
-                security_config=client_conf.security_config,
-                ssid=self.context.router.get_ssid())
-            self.context.assert_connect_wifi(assoc_params)
-            self._timings = list()
-
-            stressor = stress.CountedStressor(self.stress_wifi_suspend)
-
-            stressor.start(suspends)
-            stressor.wait()
-
-            perf_dict = {'fastest': max(self._timings),
-                         'slowest': min(self._timings),
-                         'average': (float(sum(self._timings)) /
-                                     len(self._timings))}
-            for key in perf_dict:
-                self.output_perf_value(description=key,
-                    value=perf_dict[key],
-                    units='seconds',
-                    higher_is_better=False,
-                    graph=router_conf.perf_loggable_description)
-
-            # Explicitly disconnect and clear the shill profile, in case
-            # we're running another configuration after this. Especially on
-            # Hidden tests, the DUT may still think it can connect to
-            # previously-discovered networks, causing extra connection failures
-            # and delays along the way.
-            self.context.client.shill.disconnect(client_conf.ssid)
-            if not self.context.client.shill.init_test_network_state():
-                raise error.TestError('Failed to set up shill profile.')
diff --git a/server/site_tests/network_WiFi_SuspendTwice/control b/server/site_tests/network_WiFi_SuspendTwice/control
deleted file mode 100644
index cfdeaa4..0000000
--- a/server/site_tests/network_WiFi_SuspendTwice/control
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_SuspendTwice'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-# TODO(wiley) When we have servos on our test machines, put this in
-# wifi_matfunc
-# Currently, it kills to many device on broken suspends.
-DEPENDENCIES = 'wificell'
-
-DOC = """
-The ath9k driver had a bug where the the WiFi hardware would hang
-if we suspended and resumed twice while WiFi was disabled.  Run
-this test to make sure we never regress.  Try to run this late
-in the cycle so if this test fails, we don't take other tests with
-us.
-
-"""
-
-
-def run(machine):
-    job.run_test('network_WiFi_SuspendTwice',
-                 host=hosts.create_host(machine),
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_SuspendTwice/network_WiFi_SuspendTwice.py b/server/site_tests/network_WiFi_SuspendTwice/network_WiFi_SuspendTwice.py
deleted file mode 100644
index 7708f2b..0000000
--- a/server/site_tests/network_WiFi_SuspendTwice/network_WiFi_SuspendTwice.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class network_WiFi_SuspendTwice(wifi_cell_test_base.WiFiCellTestBase):
-    """Test that WiFi works after suspending twice with the device disabled."""
-
-    version = 1
-
-
-    def run_once(self):
-        """Body of the test."""
-        self.context.configure(hostap_config.HostapConfig(channel=1))
-        assoc_params = xmlrpc_datatypes.AssociationParameters(
-                ssid=self.context.router.get_ssid())
-        self.context.assert_connect_wifi(assoc_params)
-        self.context.client.set_device_enabled(
-                self.context.client.wifi_if, False, fail_on_unsupported=True)
-        self.context.client.do_suspend(3)
-        self.context.client.do_suspend(3)
-        self.context.client.set_device_enabled(
-                self.context.client.wifi_if, True, fail_on_unsupported=True)
-        self.context.wait_for_connection(self.context.router.get_ssid())
diff --git a/server/site_tests/network_WiFi_UpdateRouter/control b/server/site_tests/network_WiFi_UpdateRouter/control
index c29cc3a..8de7fba 100644
--- a/server/site_tests/network_WiFi_UpdateRouter/control
+++ b/server/site_tests/network_WiFi_UpdateRouter/control
@@ -7,7 +7,8 @@
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
-ATTRIBUTES = 'suite:wifi_update_router'
+ATTRIBUTES = 'suite:bluetooth_wifi_testbed_update'
+PY_VERSION = 3
 
 DOC = """
 This test updates the router in a WiFi cell to the latest stable version.
diff --git a/server/site_tests/network_WiFi_UpdateRouter/control.pcap b/server/site_tests/network_WiFi_UpdateRouter/control.pcap
index fc896ef..f2d9105 100644
--- a/server/site_tests/network_WiFi_UpdateRouter/control.pcap
+++ b/server/site_tests/network_WiFi_UpdateRouter/control.pcap
@@ -7,7 +7,8 @@
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
-ATTRIBUTES = 'suite:wifi_update_router'
+ATTRIBUTES = 'suite:bluetooth_wifi_testbed_update'
+PY_VERSION = 3
 
 DOC = """
 This test updates the packet capture in a WiFi cell to the latest stable
diff --git a/server/site_tests/network_WiFi_UpdateRouter/network_WiFi_UpdateRouter.py b/server/site_tests/network_WiFi_UpdateRouter/network_WiFi_UpdateRouter.py
index e592e39..c30f749 100644
--- a/server/site_tests/network_WiFi_UpdateRouter/network_WiFi_UpdateRouter.py
+++ b/server/site_tests/network_WiFi_UpdateRouter/network_WiFi_UpdateRouter.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -38,13 +39,15 @@
     version = 1
 
     STABLE_VERSIONS = {
-        ## crbug.com/1098024: these are left here as documentation of what the
-        # last stable version is, but the current updater code does not support
-        # them.
-        'whirlwind': StableVersion('whirlwind-test-ap-tryjob/R85-13310.60.0-b4641849',
-                                   '13310.60.2020_08_25_0212'),
-        'gale': StableVersion('gale-test-ap-tryjob/R85-13310.54.0-b4637444',
-                              '13310.54.2020_08_19_1536'),
+            ## crbug.com/1098024: these are left here as documentation of what the
+            # last stable version is, but the current updater code does not support
+            # them.
+            'whirlwind':
+            StableVersion('whirlwind-test-ap-tryjob/R85-13310.60.0-b4641849',
+                          '13310.60.2020_08_25_0212'),
+            'gale':
+            StableVersion('gale-test-ap-tryjob/R92-13982.81.0-b4959409',
+                          '13982.81.2021_08_11_1044'),
     }
 
     # List of files to remove.
@@ -89,6 +92,18 @@
         for path in self.FILES_TO_REMOVE:
             device_host.run('rm -rf %s' % path, ignore_status=True)
 
+    def stop_recover_duts(self, device_host):
+        """Stop running recover_duts on the host.
+
+        b/177380545: recover_duts is currently providing negative value on
+        routers. TBD: decided whether we should re-enable this when router
+        images are updated to fix hang issues?
+
+        @param device_host: router / pcap host object
+        """
+        device_host.run('rm -f %s' % provisioner.LAB_MACHINE_FILE,
+                        ignore_status=True)
+        device_host.run('stop recover_duts', ignore_status=True)
 
     def run_once(self, host, is_pcap=False):
         """Update router / packet capture associated with host.
@@ -119,10 +134,17 @@
         device_host = hosts.create_host(device_hostname,
                                         host_class=hosts.CrosHost,
                                         allow_failure=True)
+
+        # Stop recover_duts now, for cases where we don't go through a full
+        # update below.
+        self.stop_recover_duts(device_host)
+
         # Remove un-wanted files to freeup diskspace before starting update.
         self.freeup_disk_space(device_host)
         self.update_device(device_host)
 
+        # Stop recover_duts again, in case provisioning re-enabled it.
+        self.stop_recover_duts(device_host)
 
     def update_device(self, device_host):
         """Update router and pcap associated with host.
diff --git a/server/site_tests/network_WiFi_VerifyAttenuator/control b/server/site_tests/network_WiFi_VerifyAttenuator/control
deleted file mode 100644
index 46cb002..0000000
--- a/server/site_tests/network_WiFi_VerifyAttenuator/control
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_VerifyAttenuator'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test is designed to check as quickly as possible that a dual radio
-router is working correctly in a setup with variable attenuators.
-"""
-
-
-def run(machine):
-    job.run_test('network_WiFi_VerifyAttenuator',
-                 host=hosts.create_host(machine),
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_VerifyAttenuator/network_WiFi_VerifyAttenuator.py b/server/site_tests/network_WiFi_VerifyAttenuator/network_WiFi_VerifyAttenuator.py
deleted file mode 100644
index 16ea340..0000000
--- a/server/site_tests/network_WiFi_VerifyAttenuator/network_WiFi_VerifyAttenuator.py
+++ /dev/null
@@ -1,312 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import attenuator_controller
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-ATTENUATION_STEP = 4
-FINAL_ATTENUATION = 90
-ATTENUATORS_PER_PHY = 2
-
-LOW_POWER_SIGNAL = -75
-
-class AttenuatorInfo(object):
-    """Contains debug information about an attenuator."""
-
-    def __init__(self):
-        self.attenuator_zeros = False
-        self.zeroed_scan_signal = None
-        self.allows_connection = False
-        self._zeroed_linked_signal = None
-
-
-    @property
-    def zeroed_linked_signal(self):
-        """Returns the linked signal as a float."""
-        return self._zeroed_linked_signal
-
-
-    @zeroed_linked_signal.setter
-    def zeroed_linked_signal(self, value):
-        """Sets the linked signal to a float.
-
-        @param value: the linked signal as a float
-
-        """
-        if (self._zeroed_linked_signal is None or
-            value > self._zeroed_linked_signal):
-            self._zeroed_linked_signal = value
-
-
-    def healthy_attenuator(self):
-        """Returns True if the attenuator looks good; False otherwise."""
-        if (not self.allows_connection and
-            self.zeroed_scan_signal is None):
-            return False
-        elif not self.attenuator_zeros:
-            return False
-        if (self.zeroed_scan_signal < LOW_POWER_SIGNAL and
-            self.zeroed_linked_signal < LOW_POWER_SIGNAL):
-            return False
-        return True
-
-
-class network_WiFi_VerifyAttenuator(wifi_cell_test_base.WiFiCellTestBase):
-    """Test that all connected attenuators are functioning correctly."""
-    version = 1
-
-
-    def _refresh_ap_ssids(self, frequency):
-        """Start up new APs, with unique SSIDs.
-
-        Doing this before each connection attempt in the test prevents
-        spillover from previous connection attempts interfering with
-        our intentions.
-
-        @param frequency: int WiFi frequency to configure the APs on.
-
-        """
-        ap_config = hostap_config.HostapConfig(
-                frequency=frequency,
-                mode=hostap_config.HostapConfig.MODE_11N_PURE)
-        self.context.router.deconfig_aps()
-        self._all_ssids = list()
-        for i in range(self.num_phys):
-            self.context.configure(ap_config, multi_interface=True)
-            self._all_ssids.append(self.context.router.get_ssid(instance=i))
-
-
-    def _get_phy_num_for_instance(self, instance):
-        """Get the phy number corresponding to a hostapd instance.
-
-        @param instance: int hostapd instance to test against.
-        @return int phy number corresponding to that AP (e.g.
-                for phy0 return 0).
-
-        """
-        phy = self.context.router.get_hostapd_phy(instance)
-        if not phy.startswith('phy'):
-            raise error.TestError('Unexpected phy name %s' % phy)
-
-        return int(phy[3:])
-
-
-    def _wait_for_good_signal_levels(self, ssid, attenuator_info):
-        """Verify the desired SSID is available with a good signal.
-
-        @param ssid: the ssid as a string
-        @param attenuator_info: dictionary with information about the
-                                current attenuator
-
-        @returns an updated attenuator_info dictionary
-
-        """
-        # In practice it has been observed that going from max attuation
-        # to 0 attenuation may take several scans until the signal is what
-        # is desirable.
-        for _ in range(5):
-            scan_result = self._client_iw_runner.wait_for_scan_result(
-                self._client_if, ssids=[ssid], timeout_seconds=10)
-            if scan_result is None or len(scan_result) == 0:
-                # Device is busy or not results at this time, try again
-                continue
-            for network in scan_result:
-                if network.ssid == ssid and network.signal < LOW_POWER_SIGNAL:
-                    logging.info('WARNING: Signal strength is less than '
-                                 'optimal (%f) consider re-calibrating or '
-                                 'check the conductive cabling.',
-                                 network.signal)
-                    attenuator_info.zeroed_scan_signal = network.signal
-                    return attenuator_info
-                elif network.ssid == ssid and network.signal > LOW_POWER_SIGNAL:
-                    logging.info('Scan found an acceptable signal strength %f',
-                                 network.signal)
-                    attenuator_info.zeroed_scan_signal = network.signal
-                    return attenuator_info
-        raise error.TestError('The desired SSID is not visible, the '
-                              'attenuator may be stuck or broken. '
-                              'OR the AP is in a bad state or is '
-                              'bad, try swapping.')
-
-
-    def _verify_attenuator(self, ap_num, frequency_mhz, attenuator_num):
-        """Verify that each phy has two attenuators controlling its signal.
-
-        @param ap_num: int hostapd instance to test against.
-        @param frequency_mhz: int frequency of the AP.
-        @param attenuator_num: int attenuator num controlling one antenna on
-                the AP.
-
-        @return AttenuatorInfo object
-
-        """
-        logging.info('Verifying attenuator functionality')
-        ai = AttenuatorInfo()
-        # Remove knowledge of previous networks from shill.
-        self.context.client.shill.init_test_network_state()
-        # Isolate the client entirely.
-        self.context.attenuator.set_variable_attenuation(
-                attenuator_controller.MAX_VARIABLE_ATTENUATION)
-        logging.info('Removing variable attenuation for attenuator=%d',
-                     attenuator_num)
-        # But allow one antenna on this phy.
-        self.context.attenuator.set_variable_attenuation(
-                0, attenuator_num=attenuator_num)
-        client_conf = xmlrpc_datatypes.AssociationParameters(
-                ssid=self.context.router.get_ssid(instance=ap_num))
-
-        logging.info('Waiting for client signal levels to settle.')
-        ai = self._wait_for_good_signal_levels(client_conf.ssid, ai)
-        logging.info('Connecting to %s', client_conf.ssid)
-        assoc_result = xmlrpc_datatypes.deserialize(
-                self.context.client.shill.connect_wifi(client_conf))
-        if not assoc_result.success:
-            logging.error('Failed to connect to AP %d on attenuator %d',
-                          ap_num, attenuator_num)
-            return ai
-        ai.allows_connection = True
-        ai.zeroed_linked_signal = self.context.client.wifi_signal_level
-        logging.info('Connected successfully')
-        start_atten = self.context.attenuator.get_minimal_total_attenuation()
-        for atten in range(start_atten,
-                           min(start_atten + 20, FINAL_ATTENUATION),
-                           ATTENUATION_STEP):
-            self.context.attenuator.set_total_attenuation(
-                    atten, frequency_mhz, attenuator_num=attenuator_num)
-            time.sleep(2)
-            logging.info('Attenuator %d signal at attenuation=%d is %d dBm.',
-                         attenuator_num, atten,
-                         self.context.client.wifi_signal_level)
-        return ai
-
-
-    def _debug_phy_attenuator_correspondence(self, visible_ssid, hidden_ssid):
-        """Verify that the non-attenuated SSID is the only one that is visble.
-
-        If everything is working correctly then all the DUT should see is one
-        SSID that is not the one which is attenuated.  Here are the different
-        possible failure scenarios:
-            - Two network_<blah> SSIDs are visible, both with a strong signal
-              (something greater than -80 dBm) means the rainbow cables on the
-              attenuation rig are backwards.
-            - Two network_<blah> SSIDs are visble, the one which should be
-              hidden is visible with something less than -80 dBm means one
-              of the attenuators is broken.
-            - The attenuated SSID is the only visible one, means that rainbow
-              cables are in the wrong order.
-            - The visible SSID is not seen, means that both attenuators are
-              stuck at max attenuation or there is a cabling problem.
-
-        @param visible_ssid: string of the SSID that should be visible.
-        @param hidden_ssid: string of the SSID that should be hidden
-
-        """
-        scan_result = self._client_iw_runner.wait_for_scan_result(
-                self._client_if, ssids=[visible_ssid, hidden_ssid])
-        if scan_result is None or len(scan_result) == 0:
-            raise error.TestFail('No visible SSIDs. Check cables, the '
-                                 'attenuators may be stuck')
-        elif (len(scan_result) == 1 and scan_result[0].ssid == hidden_ssid):
-            raise error.TestFail('The wrong network is visible, the rainbow '
-                                 'cables are in the wrong order.')
-        elif len(scan_result) > 1:
-            for network in scan_result:
-                if (network.ssid == hidden_ssid):
-                    # The SSID that should be hidden from the DUT is not,
-                    # along with what is presumably the network that should
-                    # be visible. Check the signal strength.
-                    if network.signal > LOW_POWER_SIGNAL:
-                        raise error.TestFail('Two SSIDs are visible, the '
-                                             'rainbow cables may be '
-                                             'connected backwards.')
-                    else:
-                        logging.warning('The attenuated SSID is visible with '
-                                        'very low power (%f), the attenuator '
-                                        'may be broken, or this is ghost '
-                                        'signal; will attempt to connect',
-                                        network.signal)
-
-
-    def _verify_phy_attenuator_correspondence(self, instance):
-        """Verify that we cannot connect to a phy when it is attenuated.
-
-        Check that putting maximum attenuation on the attenuators expected
-        to gate a particular phy produces the expected result.  We should
-        be unable to connect to the corresponding SSID.
-
-        @param instance: int hostapd instance to verify corresponds to
-                a particular 2 attenuators.
-
-        """
-        logging.info('Verifying attenuator correspondence')
-        # Turn up all attenuation.
-        self.context.attenuator.set_variable_attenuation(
-                attenuator_controller.MAX_VARIABLE_ATTENUATION)
-        # Turn down attenuation for phys other than the instance we're
-        # interested in.
-        for other_instance in [x for x in range(self.num_phys)
-                                 if x != instance]:
-            other_phy_num = self._get_phy_num_for_instance(other_instance)
-            for attenuator_offset in range(ATTENUATORS_PER_PHY):
-                attenuator_num = (other_phy_num * ATTENUATORS_PER_PHY +
-                                  attenuator_offset)
-                self.context.attenuator.set_variable_attenuation(
-                        0, attenuator_num=attenuator_num)
-                # The other SSID should be available.
-                self._debug_phy_attenuator_correspondence(
-                    self.context.router.get_ssid(instance=other_instance),
-                    self.context.router.get_ssid(instance=instance))
-        # We should be unable to connect.
-        client_conf = xmlrpc_datatypes.AssociationParameters(
-                ssid=self.context.router.get_ssid(instance=instance),
-                expect_failure=True)
-        self.context.assert_connect_wifi(client_conf)
-
-
-    def run_once(self):
-        """For each PHY on a router, for 2 and 5 Ghz bands on a PHY:
-
-        1) Set up an AP on the PHY.
-        2) Walk the attenuators from low to high attenuations.
-        3) Measure AP signal as attenuation increases.
-        4) Tester should manually inspect that signal levels decrease linearly
-           and are consistent from attenuator to attenuator.
-
-        """
-        # Create some re-usable client objects
-        self._client_iw_runner = self.context.client.iw_runner
-        self._client_if = self.context.client.wifi_if
-
-        # Verify the client cell is clean
-        scan_result = self._client_iw_runner.scan(self._client_if)
-        if scan_result and len(scan_result) > 0:
-            raise error.TestError('SSIDs found, the cell is not closed or '
-                                  'is not cabled correctly.')
-
-        attenuators_info = list()
-        self.num_phys = len(self.context.router.iw_runner.list_phys())
-        # Pick channels other than the calibrated ones.
-        for frequency in (2447, 5660):
-            for instance in range(self.num_phys):
-                if self.num_phys > 1:
-                    self._refresh_ap_ssids(frequency)
-                    self._verify_phy_attenuator_correspondence(instance)
-                phy_num = self._get_phy_num_for_instance(instance)
-                for attenuator_offset in range(ATTENUATORS_PER_PHY):
-                    attenuator_num = (phy_num * ATTENUATORS_PER_PHY +
-                                      attenuator_offset)
-                    self._refresh_ap_ssids(frequency)
-                    attenuator_info = self._verify_attenuator(
-                            instance, frequency, attenuator_num)
-                    attenuators_info.append(attenuator_info)
-
-        for info in attenuators_info:
-            if info.healthy_attenuator is False:
-                raise error.TestFail('One or more attenuators are broken!')
diff --git a/server/site_tests/network_WiFi_VerifyRouter/control b/server/site_tests/network_WiFi_VerifyRouter/control
index 3f25b67..f4dadcb 100644
--- a/server/site_tests/network_WiFi_VerifyRouter/control
+++ b/server/site_tests/network_WiFi_VerifyRouter/control
@@ -7,7 +7,8 @@
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
-ATTRIBUTES = 'suite:wifi_update_router'
+ATTRIBUTES = 'suite:bluetooth_wifi_testbed_update, suite:wificell_dut_validation'
+PY_VERSION = 3
 
 DOC = """
 This test is designed to check as quickly as possible that a dual radio
diff --git a/server/site_tests/network_WiFi_VerifyRouter/control.pcap b/server/site_tests/network_WiFi_VerifyRouter/control.pcap
index cf8be2b..6911814 100644
--- a/server/site_tests/network_WiFi_VerifyRouter/control.pcap
+++ b/server/site_tests/network_WiFi_VerifyRouter/control.pcap
@@ -7,7 +7,8 @@
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'wificell'
-ATTRIBUTES = 'suite:wifi_update_router'
+ATTRIBUTES = 'suite:bluetooth_wifi_testbed_update, suite:wificell_dut_validation'
+PY_VERSION = 3
 
 DOC = """
 This test checks that a dual radio router is working correctly.
diff --git a/server/site_tests/network_WiFi_VerifyRouter/network_WiFi_VerifyRouter.py b/server/site_tests/network_WiFi_VerifyRouter/network_WiFi_VerifyRouter.py
index 385326e..e6885e8 100644
--- a/server/site_tests/network_WiFi_VerifyRouter/network_WiFi_VerifyRouter.py
+++ b/server/site_tests/network_WiFi_VerifyRouter/network_WiFi_VerifyRouter.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/network_WiFi_VisibleScan/control.11b b/server/site_tests/network_WiFi_VisibleScan/control.11b
deleted file mode 100644
index b2b4657..0000000
--- a/server/site_tests/network_WiFi_VisibleScan/control.11b
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'quiche, wiley, pstew'
-NAME = 'network_WiFi_VisibleScan.11b'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ''
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test scans for networks, with a shill profile that does not
-contain any hidden networks. The test verifies that 802.11 probe
-frames are seen over-the-air, and that the probe frames specify
-a broadcast scan.
-
-"""
-
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    # This configuration uses as many defaults as possible
-    configurations = [hostap_config.HostapConfig(channel=1,
-            mode=hostap_config.HostapConfig.MODE_11B)]
-    job.run_test('network_WiFi_VisibleScan',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=configurations)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_VisibleScan/control.5VHT80 b/server/site_tests/network_WiFi_VisibleScan/control.5VHT80
deleted file mode 100644
index fbdbc93..0000000
--- a/server/site_tests/network_WiFi_VisibleScan/control.5VHT80
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'pauletti, kirtika'
-NAME = 'network_WiFi_VisibleScan.5VHT80'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = ''
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test scans for networks, with a shill profile that does not
-contain any hidden networks. The test verifies that 802.11 probe
-frames are seen over-the-air, and that the probe frames specify
-a broadcast scan. This test has the AP use an 80 MHz channel.
-
-"""
-
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    n_caps = [hostap_config.HostapConfig.N_CAPABILITY_HT40_PLUS]
-    ac_caps = [hostap_config.HostapConfig.AC_CAPABILITY_SHORT_GI_80]
-    ac_mode = hostap_config.HostapConfig.MODE_11AC_PURE
-    channel_width_80_mhz = hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_80
-    configurations = [hostap_config.HostapConfig(
-                                  channel=149,
-                                  mode=ac_mode,
-                                  n_capabilities=n_caps,
-                                  vht_channel_width=channel_width_80_mhz,
-                                  vht_center_channel=155,
-                                  ac_capabilities=ac_caps)]
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_VisibleScan',
-                 tag=NAME.split('.')[1],
-                 host=host,
-                 raw_cmdline_args=args,
-                 additional_params=configurations)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_VisibleScan/network_WiFi_VisibleScan.py b/server/site_tests/network_WiFi_VisibleScan/network_WiFi_VisibleScan.py
deleted file mode 100644
index 4f741d0..0000000
--- a/server/site_tests/network_WiFi_VisibleScan/network_WiFi_VisibleScan.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros.network import tcpdump_analyzer
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import packet_capturer
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class network_WiFi_VisibleScan(wifi_cell_test_base.WiFiCellTestBase):
-    """Test scanning behavior when no hidden SSIDs are configured."""
-
-    version = 1
-
-    BROADCAST_SSID = ''
-
-    def parse_additional_arguments(self, commandline_args, additional_params):
-        """
-        Hook into super class to take control files parameters.
-
-        @param commandline_args: dict of parsed parameters from the autotest.
-        @param additional_params: list of HostapConfig objects.
-
-        """
-        self._ap_configs = additional_params
-
-
-    def run_once(self):
-        """Test body."""
-        for ap_config in self._ap_configs:
-            # Start capture before starting anything else.
-            self.context.capture_host.start_capture(
-                    ap_config.frequency,
-                    width_type=ap_config.packet_capture_mode,
-                    snaplen=packet_capturer.SNAPLEN_WIFI_PROBE_REQUEST)
-
-            # We're looking for the MAC address, so disable randomization.
-            with self.context.client.mac_address_randomization(False):
-                # Set up the router and associate the client with it.
-                self.context.configure(ap_config)
-                assoc_params = xmlrpc_datatypes.AssociationParameters(
-                        ssid=self.context.router.get_ssid())
-
-                self.context.assert_connect_wifi(assoc_params)
-                results = self.context.capture_host.stop_capture()
-
-            if len(results) != 1:
-                raise error.TestError('Expected to generate one packet '
-                                      'capture but got %d instead.' %
-                                      len(results))
-            probe_ssids = tcpdump_analyzer.get_probe_ssids(
-                    results[0].local_pcap_path,
-                    probe_sender=self.context.client.wifi_mac)
-            # We expect a broadcast probe, but it's not guaranteed.
-            expected_ssids = frozenset([self.BROADCAST_SSID])
-            permitted_ssids = (expected_ssids |
-                    frozenset([self.context.router.get_ssid()]))
-            # Verify probe result does not contain any unpermitted ssids
-            if probe_ssids - permitted_ssids:
-                raise error.TestError('Permitted SSIDs %s, but got %s' %
-                                      (permitted_ssids, probe_ssids))
diff --git a/server/site_tests/network_WiFi_WMM/control.wifi_wmm b/server/site_tests/network_WiFi_WMM/control.wifi_wmm
deleted file mode 100644
index 28d242b..0000000
--- a/server/site_tests/network_WiFi_WMM/control.wifi_wmm
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'wiley, pstew, quiche'
-NAME = 'network_WiFi_WMM.wifi_wmm'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = "suite:wifi_matfunc"
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test verifies that DUT can process datagrams transmitted at
-different Quality of Service (QoS) levels.
-"""
-
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('network_WiFi_WMM',
-                 host=host,
-                 raw_cmdline_args=args)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_WMM/network_WiFi_WMM.py b/server/site_tests/network_WiFi_WMM/network_WiFi_WMM.py
deleted file mode 100644
index f462b7e..0000000
--- a/server/site_tests/network_WiFi_WMM/network_WiFi_WMM.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib.cros.network import ping_runner
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class network_WiFi_WMM(wifi_cell_test_base.WiFiCellTestBase):
-    """Test that we can handle different QoS levels."""
-    version = 1
-
-
-    def run_once(self):
-        """Body of the test."""
-        configuration = hostap_config.HostapConfig(
-                frequency=2437,
-                mode=hostap_config.HostapConfig.MODE_11G,
-                force_wmm=True)
-        self.context.configure(configuration)
-        assoc_params = xmlrpc_datatypes.AssociationParameters()
-        assoc_params.ssid = self.context.router.get_ssid()
-        self.context.assert_connect_wifi(assoc_params)
-        for qos in ('BE', 'BK', 'VI', 'VO'):
-            client_ping_config = ping_runner.PingConfig(
-                    self.context.get_wifi_addr(), qos=qos)
-            server_ping_config = ping_runner.PingConfig(
-                    self.context.client.wifi_ip, qos=qos)
-            self.context.assert_ping_from_dut(ping_config=client_ping_config)
-            self.context.assert_ping_from_server(ping_config=server_ping_config)
-        self.context.client.shill.disconnect(assoc_params.ssid)
-        self.context.router.deconfig()
diff --git a/server/site_tests/network_WiFi_WakeOnDisconnect/control b/server/site_tests/network_WiFi_WakeOnDisconnect/control
deleted file mode 100644
index b94d3ea..0000000
--- a/server/site_tests/network_WiFi_WakeOnDisconnect/control
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = 'ejcaruso, samueltan, snanda'
-NAME = 'network_WiFi_WakeOnDisconnect'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell, servo_state:WORKING, lucidsleep'
-ATTRIBUTES = 'suite:wifi_lucidsleep'
-
-DOC = """
-This test verifies that the NIC wakes the DUT up in dark resume upon getting
-disconnected from an AP that the DUT was connected to when it last suspended.
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test('network_WiFi_WakeOnDisconnect',
-                 host=host,
-                 raw_cmdline_args=args)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_WakeOnDisconnect/network_WiFi_WakeOnDisconnect.py b/server/site_tests/network_WiFi_WakeOnDisconnect/network_WiFi_WakeOnDisconnect.py
deleted file mode 100644
index 791384e..0000000
--- a/server/site_tests/network_WiFi_WakeOnDisconnect/network_WiFi_WakeOnDisconnect.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import lucid_sleep_test_base
-from autotest_lib.server.cros.network import wifi_client
-
-class network_WiFi_WakeOnDisconnect(lucid_sleep_test_base.LucidSleepTestBase):
-    """Test that WiFi disconnect wakes up the system."""
-
-    version = 1
-
-    def run_once(self):
-        """Body of the test."""
-        self.configure_and_connect_to_ap(hostap_config.HostapConfig(channel=1))
-        client = self.context.client
-        router = self.context.router
-
-        # Enable the dark connect feature in shill.
-        with client.wake_on_wifi_features(wifi_client.WAKE_ON_WIFI_DARKCONNECT):
-            logging.info('Set up WoWLAN')
-
-            with self.dr_utils.suspend():
-                time.sleep(wifi_client.SUSPEND_WAIT_TIME_SECONDS)
-
-                # Kick over the router to trigger wake on disconnect.
-                router.deconfig_aps(silent=True)
-
-                # Wait for the DUT to wake up in dark resume and suspend again.
-                time.sleep(wifi_client.DARK_RESUME_WAIT_TIME_SECONDS)
-
-                # Ensure that wake on packet did not trigger a full wake.
-                if client.host.wait_up(
-                        timeout=wifi_client.WAIT_UP_TIMEOUT_SECONDS):
-                    raise error.TestFail('Client woke up fully.')
-
-                if self.dr_utils.count_dark_resumes() < 1:
-                    raise error.TestFail('Client failed to wake up.')
-
-                logging.info('Client woke up successfully.')
diff --git a/server/site_tests/network_WiFi_WakeOnSSID/control b/server/site_tests/network_WiFi_WakeOnSSID/control
deleted file mode 100644
index 6a2fffc..0000000
--- a/server/site_tests/network_WiFi_WakeOnSSID/control
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = 'ejcaruso, samueltan, snanda'
-NAME = 'network_WiFi_WakeOnSSID'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell, servo_state:WORKING, lucidsleep'
-ATTRIBUTES = 'suite:wifi_lucidsleep'
-
-DOC = """
-This test verifies that the NIC wakes the DUT up in dark resume on the
-appearance of a whitelisted SSID if it previously suspended disconnected.
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test('network_WiFi_WakeOnSSID',
-                 host=host,
-                 raw_cmdline_args=args)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_WakeOnSSID/network_WiFi_WakeOnSSID.py b/server/site_tests/network_WiFi_WakeOnSSID/network_WiFi_WakeOnSSID.py
deleted file mode 100644
index 9998023..0000000
--- a/server/site_tests/network_WiFi_WakeOnSSID/network_WiFi_WakeOnSSID.py
+++ /dev/null
@@ -1,61 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import contextlib
-import logging
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import lucid_sleep_test_base
-from autotest_lib.server.cros.network import wifi_client
-
-class network_WiFi_WakeOnSSID(lucid_sleep_test_base.LucidSleepTestBase):
-    """Test that known WiFi access points wake up the system."""
-
-    version = 1
-
-    def run_once(self):
-        """Body of the test."""
-        ap_ssid = self.configure_and_connect_to_ap(
-                hostap_config.HostapConfig(channel=1))
-        client = self.context.client
-        router = self.context.router
-
-        # Enable the dark connect feature in shill, and set the scan period.
-        with contextlib.nested(
-                client.wake_on_wifi_features(
-                        wifi_client.WAKE_ON_WIFI_DARKCONNECT),
-                client.net_detect_scan_period_seconds(
-                        wifi_client.NET_DETECT_SCAN_WAIT_TIME_SECONDS)):
-            logging.info('Set up WoWLAN')
-
-            # Bring the AP down so the DUT suspends disconnected.
-            router.deconfig_aps()
-
-            with self.dr_utils.suspend():
-                # Wait for suspend actions and first scan to finish.
-                time.sleep(wifi_client.SUSPEND_WAIT_TIME_SECONDS +
-                           wifi_client.NET_DETECT_SCAN_WAIT_TIME_SECONDS)
-
-                # Bring the AP back up to wake up the DUT.
-                logging.info('Bringing AP back online.')
-                self.context.configure(hostap_config.HostapConfig(
-                        ssid=ap_ssid, channel=1))
-
-                # Wait long enough for the NIC on the DUT to perform a net
-                # detect scan, discover the AP with the allowlisted SSID, wake
-                # up in dark resume, then suspend again.
-                time.sleep(wifi_client.NET_DETECT_SCAN_WAIT_TIME_SECONDS +
-                           wifi_client.DARK_RESUME_WAIT_TIME_SECONDS)
-
-                # Ensure that net detect did not trigger a full wake.
-                if client.host.wait_up(
-                        timeout=wifi_client.WAIT_UP_TIMEOUT_SECONDS):
-                    raise error.TestFail('Client woke up fully.')
-
-                if self.dr_utils.count_dark_resumes() < 1:
-                    raise error.TestFail('Client failed to wake up.')
-
-                logging.info('Client woke up successfully.')
diff --git a/server/site_tests/network_WiFi_WakeOnWiFiThrottling/control b/server/site_tests/network_WiFi_WakeOnWiFiThrottling/control
deleted file mode 100644
index 6cc26b7..0000000
--- a/server/site_tests/network_WiFi_WakeOnWiFiThrottling/control
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'samueltan, ejcaruso'
-NAME = 'network_WiFi_WakeOnWiFiThrottling'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'servo_state:WORKING, wificell, lucidsleep'
-ATTRIBUTES = ('suite:wifi_lucidsleep')
-
-DOC = """
-This test verifies that shill disables wake on WiFi when the DUT wakes up in
-dark resume too frequently. Test both the short and long thresholds, which are 3
-dark resumes per 1 minute and 10 dark resumes per 10 minutes respectively.
-"""
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_datatypes
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server import utils
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test('network_WiFi_WakeOnWiFiThrottling',
-                 host=host,
-                 raw_cmdline_args=args)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_WakeOnWiFiThrottling/network_WiFi_WakeOnWiFiThrottling.py b/server/site_tests/network_WiFi_WakeOnWiFiThrottling/network_WiFi_WakeOnWiFiThrottling.py
deleted file mode 100644
index 3c1b191..0000000
--- a/server/site_tests/network_WiFi_WakeOnWiFiThrottling/network_WiFi_WakeOnWiFiThrottling.py
+++ /dev/null
@@ -1,94 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import lucid_sleep_test_base
-from autotest_lib.server.cros.network import wifi_client
-
-_SHORT_DARK_RESUME_THRESHOLD = 3
-_LONG_DARK_RESUME_THRESHOLD = 10
-_SHORT_RECONNECT_WAIT_TIME_SECONDS = 5
-_LONG_RECONNECT_WAIT_TIME_SECONDS = 35
-
-class network_WiFi_WakeOnWiFiThrottling(
-        lucid_sleep_test_base.LucidSleepTestBase):
-    """
-    Test that the wake on WiFi throttling mechanism is triggered when the DUT
-    wakes in dark resume too frequently.
-    """
-
-    version = 1
-
-    def run_once(self):
-        """Body of the test"""
-        self.configure_and_connect_to_ap(hostap_config.HostapConfig(channel=1))
-        client = self.context.client
-        client_mac = client.wifi_mac
-        router = self.context.router
-
-        # Enable the dark connect feature in shill, and set the scan period.
-        with client.wake_on_wifi_features(wifi_client.WAKE_ON_WIFI_DARKCONNECT):
-            logging.info('Set up WoWLAN')
-
-            prev_num_dark_resumes = self.dr_utils.count_dark_resumes()
-
-            logging.info('Testing short dark resume threshold')
-            with self.dr_utils.suspend():
-                # Wait for suspend actions to finish.
-                time.sleep(wifi_client.SUSPEND_WAIT_TIME_SECONDS)
-
-                for iter_num in xrange(1, _SHORT_DARK_RESUME_THRESHOLD+1):
-                    logging.info('Sending deauthentication message %d of %d',
-                                 iter_num, _SHORT_DARK_RESUME_THRESHOLD)
-                    router.deauth_client(client_mac)
-
-                    # Wait for the DUT to receive the disconnect, wake in
-                    # dark resume, reconnect, then suspend again.
-                    time.sleep(wifi_client.DISCONNECT_WAIT_TIME_SECONDS +
-                               _SHORT_RECONNECT_WAIT_TIME_SECONDS)
-
-            client.check_wake_on_wifi_throttled()
-
-            num_dark_resumes = (self.dr_utils.count_dark_resumes() -
-                                prev_num_dark_resumes)
-            if num_dark_resumes != _SHORT_DARK_RESUME_THRESHOLD:
-                raise error.TestFail('Client did not enter the expected number '
-                                     'of dark resumes (actual: %d, expected: %d'
-                                     ')' % (num_dark_resumes,
-                                            _SHORT_DARK_RESUME_THRESHOLD))
-
-            prev_num_dark_resumes = self.dr_utils.count_dark_resumes()
-
-            # Since we wake from suspend and suspend again, the throttling
-            # mechanism should be reset.
-            logging.info('Testing long dark resume threshold')
-            with self.dr_utils.suspend():
-                # Wait for suspend actions to finish.
-                time.sleep(wifi_client.SUSPEND_WAIT_TIME_SECONDS)
-
-                for iter_num in xrange(1, _LONG_DARK_RESUME_THRESHOLD+1):
-                    logging.info('Sending deauthentication message %d of %d',
-                                 iter_num, _LONG_DARK_RESUME_THRESHOLD)
-                    router.deauth_client(client_mac)
-                    # Wait for the DUT to receive the disconnect, wake in
-                    # dark resume, reconnect, then suspend again. Wait longer
-                    # than in the short threshold test above to avoid hitting
-                    # the short dark resume threshold (i.e. 3 dark resumes in 1
-                    # minute).
-                    time.sleep(wifi_client.DISCONNECT_WAIT_TIME_SECONDS +
-                               _LONG_RECONNECT_WAIT_TIME_SECONDS)
-
-            client.check_wake_on_wifi_throttled()
-
-            new_num_dark_resumes = (self.dr_utils.count_dark_resumes() -
-                                    prev_num_dark_resumes)
-            if new_num_dark_resumes != _LONG_DARK_RESUME_THRESHOLD:
-                raise error.TestFail('Client did not enter the expected number '
-                                     'of dark resumes (actual: %d, expected: %d'
-                                     ')' % (new_num_dark_resumes,
-                                            _LONG_DARK_RESUME_THRESHOLD))
diff --git a/server/site_tests/network_WiFi_WoWLAN/control b/server/site_tests/network_WiFi_WoWLAN/control
deleted file mode 100644
index 9d0b253..0000000
--- a/server/site_tests/network_WiFi_WoWLAN/control
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = 'ejcaruso, samueltan, snanda'
-NAME = 'network_WiFi_WoWLAN'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell, servo_state:WORKING, lucidsleep'
-ATTRIBUTES = 'suite:wifi_lucidsleep'
-
-DOC = """
-This test verifies that the NIC wakes the DUT up in dark resume upon receiving
-a packet which matches a pattern registered with shill before the last suspend.
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test('network_WiFi_WoWLAN',
-                 host=host,
-                 raw_cmdline_args=args)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_WoWLAN/network_WiFi_WoWLAN.py b/server/site_tests/network_WiFi_WoWLAN/network_WiFi_WoWLAN.py
deleted file mode 100644
index 65a1e55..0000000
--- a/server/site_tests/network_WiFi_WoWLAN/network_WiFi_WoWLAN.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import lucid_sleep_test_base
-from autotest_lib.server.cros.network import wifi_client
-
-class network_WiFi_WoWLAN(lucid_sleep_test_base.LucidSleepTestBase):
-    """Test that WiFi packets can wake up the system."""
-
-    version = 1
-
-    def run_once(self):
-        """Body of the test."""
-        self.configure_and_connect_to_ap(hostap_config.HostapConfig(channel=1))
-        self.context.assert_ping_from_dut()
-
-        client = self.context.client
-        router = self.context.router
-        dut_mac = client.wifi_mac
-        dut_ip = client.wifi_ip
-
-        logging.info('DUT WiFi MAC = %s, IPv4 = %s', dut_mac, dut_ip)
-        logging.info('Router WiFi IPv4 = %s', router.wifi_ip)
-
-        # Set up WoWLAN to wake on packets and register ip, then sleep
-        with client.wake_on_wifi_features(wifi_client.WAKE_ON_WIFI_PACKET):
-            logging.info('Set up WoWLAN')
-            client.add_wake_packet_source(router.wifi_ip)
-
-            with self.dr_utils.suspend():
-                time.sleep(wifi_client.SUSPEND_WAIT_TIME_SECONDS)
-
-                router.send_magic_packet(dut_ip, dut_mac)
-
-                # Wait for the DUT to wake up in dark resume and suspend again.
-                time.sleep(wifi_client.RECEIVE_PACKET_WAIT_TIME_SECONDS +
-                           wifi_client.DARK_RESUME_WAIT_TIME_SECONDS)
-
-                # Ensure that wake on packet did not trigger a full wake.
-                if client.host.wait_up(
-                        timeout=wifi_client.WAIT_UP_TIMEOUT_SECONDS):
-                    raise error.TestFail('Client woke up fully.')
-
-                if self.dr_utils.count_dark_resumes() < 1:
-                    raise error.TestFail('Client failed to wake up.')
-
-                logging.info('Client woke up successfully.')
-
-
-    def cleanup(self):
-        self.context.client.remove_all_wake_packet_sources()
-        super(network_WiFi_WoWLAN, self).cleanup()
diff --git a/server/site_tests/p2p_EndToEndTest/control b/server/site_tests/p2p_EndToEndTest/control
index 4ddf811..a8aa098 100644
--- a/server/site_tests/p2p_EndToEndTest/control
+++ b/server/site_tests/p2p_EndToEndTest/control
@@ -7,7 +7,6 @@
 
 from autotest_lib.client.common_lib import error
 from autotest_lib.server import hosts
-from autotest_lib.server.cros import queue_barrier
 
 AUTHOR = "chromeos-installer@google.com"
 NAME = "p2p_EndToEndTest"
@@ -15,44 +14,32 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
-BUG_TEMPLATE = {
-    'cc': ['chromeos-installer-alerts@google.com'],
-    'components': ['Internals>Installer'],
-}
+ATTRIBUTES = "suite:au-p2p"
+JOB_RETRIES = 2
+PY_VERSION = 3
 
 DOC = """
 End-to-end test of the peer-to-peer (p2p) file sharing system.
 
-The test runs over a set of N machines generating a random file in one of
-them (called the "main") and sharing it with the rest of the machines. The
-success condition of this test occurs when all the N machines have the same
-generated file before a certain timeout.
+The test runs with a main DUT and a companion DUT. The main DUT generates a
+random file and shares it via p2p. The companion DUT searches for and
+downloads the file.
 
-To simulate a progressive download of the shared file in the main, the
+To simulate a progressive download of the shared file, the
 file becomes available in two parts. The first part of the file is available
 at the beginning of the test, while the second part appears later.
 """
 
 def run(machine):
-    dut = hosts.create_host(machine)
+    host = hosts.create_host(machine)
+    companions = hosts.create_companion_hosts(companion_hosts)
+
+    # The file ID shared among all test machines.
+    file_id = "%s-%s" % (time.strftime("%Y%m%d-%H%M"), uuid.uuid4())
 
     job.run_test('p2p_EndToEndTest',
-                 dut=dut,
+                 dut=host,
                  file_id=file_id,
-                 is_main=(machine == main),
-                 peers=machines,
-                 barrier=barrier)
-
-if len(machines) < 2:
-    raise error.TestError('At least two machines are needed for this test')
-
-# The file ID shared among all test machines.
-file_id = "%s-%s" % (time.strftime("%Y%m%d-%H%M"), uuid.uuid4())
-
-# Create the shared QueueBarrier to synchronize the processes.
-barrier = queue_barrier.QueueBarrier(len(machines)-1)
-
-# Pick any DUT as the main.
-main = machines[0]
+                 companions=companions)
 
 job.parallel_simple(run, machines)
diff --git a/server/site_tests/p2p_EndToEndTest/p2p_EndToEndTest.py b/server/site_tests/p2p_EndToEndTest/p2p_EndToEndTest.py
index a5913a5..5f18407 100644
--- a/server/site_tests/p2p_EndToEndTest/p2p_EndToEndTest.py
+++ b/server/site_tests/p2p_EndToEndTest/p2p_EndToEndTest.py
@@ -1,16 +1,16 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 import logging
 import os
-import random
-import time
 
 from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib import utils
 from autotest_lib.server import test
-from autotest_lib.server.cros import queue_barrier
 
+DEFAULT_AVAHI_SIZE_UPDATE_DELAY = 10
 
 # P2P_PATH is the path where the p2p server expects the sharing files.
 P2P_PATH = '/var/cache/p2p'
@@ -18,127 +18,120 @@
 # Prefix all the test files with P2P_TEST_PREFIX.
 P2P_TEST_PREFIX = 'p2p-test'
 
-# File size of the shared file in KB.
-P2P_FILE_SIZE_KB = 80 * 1000
+# Kilobyte.
+KB = 1024
+
+# File size of the shared file in MB.
+P2P_FILE_SIZE_MB = 4 * KB * KB
+P2P_FILE_SPLIT_SIZE_KB = P2P_FILE_SIZE_MB // (2 * KB)
 
 # After a peer finishes the download we need it to keep serving the file for
 # other peers. This peer will then wait up to P2P_SERVING_TIMEOUT_SECS seconds
 # for the test to conclude.
-P2P_SERVING_TIMEOUT_SECS = 600
+P2P_SERVING_TIMEOUT_SECS = 300
 
-# The file is initialy shared by the main in two parts. The first part is
-# available at the beginning of the test, while the second part of the file
-# becomes ready in the main after P2P_SHARING_GAP_SECS seconds.
-P2P_SHARING_GAP_SECS = 90
-
-# The main and clients have to initialize the p2p service and, in the case
-# of the main, generate the first part of the file on disk.
-P2P_INITIALIZE_TIMEOUT_SECS = 90
 
 class p2p_EndToEndTest(test.test):
     """Test to check that p2p works."""
     version = 1
 
 
-    def run_once(self, dut, file_id, is_main, peers, barrier):
+    def run_once(self, dut, file_id, companions):
         self._dut = dut
+        self._companion = companions[0]
 
         file_id = '%s-%s' % (P2P_TEST_PREFIX, file_id)
         file_temp_name = os.path.join(P2P_PATH, file_id + '.tmp')
         file_shared_name = os.path.join(P2P_PATH, file_id + '.p2p')
 
-        # Ensure that p2p is running.
-        dut.run('start p2p || true')
-        dut.run('status p2p | grep running')
+        logging.info('File ID: %s', file_id)
 
-        # Prepare the file - this includes specifying its final size.
+        # Setup dut and companion.
+        for host in [self._dut, self._companion]:
+            # Ensure that p2p is running.
+            host.run('start p2p || true')
+            host.run('status p2p | grep running')
+
+        # Prepare an empty file to share and specify its final size via the
+        # "user.cros-p2p-filesize" attribute.
+        logging.info('All devices setup. Generating a file on main DUT')
         dut.run('touch %s' % file_temp_name)
-        dut.run('setfattr -n user.cros-p2p-filesize -v %d %s'
-                % (P2P_FILE_SIZE_KB * 1000, file_temp_name))
+        dut.run('setfattr -n user.cros-p2p-filesize -v %d %s' %
+                (P2P_FILE_SIZE_MB, file_temp_name))
         dut.run('mv %s %s' % (file_temp_name, file_shared_name))
 
-        if is_main:
-            # The main generates a file and shares a part of it but announces
-            # the total size via the "user.cros-p2p-filesize" attribute.
-            # To ensure that the clients are retrieving this first shared part
-            # and hopefully blocking until the rest of the file is available,
-            # a sleep is included in the main side.
+        # Generate part of the files total file fize.
+        dut.run('dd if=/dev/zero of=%s bs=%d count=%d' %
+                (file_shared_name, KB, P2P_FILE_SPLIT_SIZE_KB))
 
-            logging.info('Main process running.')
+        def _wait_until_avahi_size_update():
+            ret = ''
+            try:
+                ret = self._companion.run(
+                        'p2p-client --get-url=%s --minimum-size=%d' %
+                        (file_id, P2P_FILE_SPLIT_SIZE_KB * KB))
+                ret = ret.stdout.strip()
+            except:
+                return False
+            return ret != ''
 
-            first_part_size_kb = P2P_FILE_SIZE_KB / 3
-            dut.run('dd if=/dev/urandom of=%s bs=1000 count=%d'
-                    % (file_shared_name, first_part_size_kb))
+        err = 'Shared file size did not update in time.'
+        # The actual delay is 10 seconds, so triple that to account for flakes.
+        utils.poll_for_condition(condition=_wait_until_avahi_size_update,
+                                 timeout=DEFAULT_AVAHI_SIZE_UPDATE_DELAY * 3,
+                                 exception=error.TestFail(err))
 
-            # This small sleep is to ensure that the new file size is updated
-            # by avahi daemon.
-            time.sleep(5)
+        # Now thhe companion can attempt a p2p file download.
+        logging.info('Listing all p2p peers for the companion: ')
+        logging.info(self._companion.run('p2p-client --list-all').stdout)
+        ret = self._companion.run('p2p-client --get-url=%s' % file_id,
+                                  ignore_status=True)
+        url = ret.stdout.strip()
 
-            # At this point, the main is sharing a non-empty file, signal all
-            # the clients that they can start the test. The clients should not
-            # take more and a few seconds to launch.
-            barrier.main_barrier(timeout=P2P_INITIALIZE_TIMEOUT_SECS)
-
-            # Wait some time to allow clients download a partial file.
-            time.sleep(P2P_SHARING_GAP_SECS)
-            dut.run('dd if=/dev/urandom of=%s bs=1000 count=%d'
-                    ' conv=notrunc oflag=append'
-                    % (file_shared_name, P2P_FILE_SIZE_KB - first_part_size_kb))
+        if not url:
+            raise error.TestFail(
+                    'p2p-client on companion returned an empty URL.')
         else:
-            # On the client side, first wait until the main is sharing
-            # a non-empty file, otherwise p2p-client will ignore the file.
-            # The main should not take more than a few seconds to generate
-            # the file.
-            barrier.node_barrier(timeout=P2P_INITIALIZE_TIMEOUT_SECS)
+            logging.info('Companion using URL %s.', url)
+            logging.info(
+                    'Companion downloading the file from main DUT via p2p in the background.'
+            )
+            self._companion.run_background('curl %s -o %s' %
+                                           (url, file_shared_name),
+                                           verbose=True)
 
-            # Wait a random time in order to not launch all the downloads
-            # at the same time, otherwise all devices would be seeing
-            # num-connections < $THRESHOLD .
-            r = random.Random()
-            secs_to_sleep = r.randint(1, 10)
-            logging.debug('Sleeping %d seconds', secs_to_sleep)
-            time.sleep(secs_to_sleep)
-
-            # Attempt the file download and start sharing it while
-            # downloading it.
-            ret = dut.run('p2p-client --get-url=%s' % file_id)
-            url = ret.stdout.strip()
-
-            if not url:
-                raise error.TestFail('p2p-client returned an empty URL.')
-            else:
-                logging.info('Using URL %s', url)
-                dut.run('curl %s -o %s' % (url, file_shared_name))
+        logging.info(
+                'While companion is downloading the file, we will expand it to its full size.'
+        )
+        dut.run('dd if=/dev/zero of=%s bs=%d count=%d'
+                ' conv=notrunc oflag=append' %
+                (file_shared_name, KB, P2P_FILE_SPLIT_SIZE_KB))
 
         # Calculate the SHA1 (160 bits -> 40 characters when
-        # hexencoded) of the file and report this back so the
-        # server-side test can check they're all the same.
+        # hexencoded) of the generated file.
         ret = dut.run('sha1sum %s' % file_shared_name)
-        sha1 = ret.stdout.strip()[0:40]
-        logging.info('SHA1 is %s', sha1)
+        sha1_main = ret.stdout.strip()[0:40]
+        logging.info('SHA1 of main is %s', sha1_main)
+        sha1_companion = ''
+        logging.info(
+                'Waiting for companion to finish downloading file so we can compare SHA1 values'
+        )
 
-        # Wait for all the clients to finish and check the received SHA1.
-        if is_main:
-            try:
-                client_sha1s = barrier.main_barrier(
-                        timeout=P2P_SERVING_TIMEOUT_SECS)
-            except queue_barrier.QueueBarrierTimeout:
-                raise error.TestFail("Test failed to complete in %d seconds."
-                                     % P2P_SERVING_TIMEOUT_SECS)
+        def _shas_match():
+            """Returns true when the SHA1 of the file matches on DUT and companion."""
+            ret = self._companion.run('sha1sum %s' % file_shared_name)
+            sha1_companion = ret.stdout.strip()[0:40]
+            logging.debug(sha1_companion)
+            return sha1_main == sha1_companion
 
-            for client_sha1 in client_sha1s:
-                if client_sha1 != sha1:
-                    # Wrong SHA1 received.
-                    raise error.TestFail("Received SHA1 (%s) doesn't match "
-                            "main's SHA1 (%s)." % (client_sha1, sha1))
-        else:
-            try:
-                barrier.node_barrier(sha1, timeout=P2P_SERVING_TIMEOUT_SECS)
-            except queue_barrier.QueueBarrierTimeout:
-                raise error.TestFail("Test failed to complete in %d seconds."
-                                     % P2P_SERVING_TIMEOUT_SECS)
-
+        err = "Main DUT's SHA1 (%s) doesn't match companions's SHA1 (%s)." % (
+                sha1_main, sha1_companion)
+        utils.poll_for_condition(condition=_shas_match,
+                                 timeout=P2P_SERVING_TIMEOUT_SECS,
+                                 exception=error.TestFail(err))
 
     def cleanup(self):
         # Clean the test environment and stop sharing this file.
-        self._dut.run('rm -f %s/%s-*.p2p' % (P2P_PATH, P2P_TEST_PREFIX))
+        for host in [self._dut, self._companion]:
+            host.run('rm -f %s/%s-*.p2p' % (P2P_PATH, P2P_TEST_PREFIX))
+            host.run('stop p2p')
diff --git a/server/site_tests/platform_ActivateDate/control b/server/site_tests/platform_ActivateDate/control
deleted file mode 100644
index ef114e3..0000000
--- a/server/site_tests/platform_ActivateDate/control
+++ /dev/null
@@ -1,20 +0,0 @@
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = "shapiroc@chromium.org"
-NAME = "platform_ActivateDate"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:regression"
-
-DOC = """
-This test verifies that activate_date, which is only run once ever on the
-platform, executes correctly and sets the activation date correctly.
-"""
-
-def run_test(machine):
-    host = hosts.create_host(machine)
-    job.run_test("platform_ActivateDate", host=host)
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/platform_ActivateDate/platform_ActivateDate.py b/server/site_tests/platform_ActivateDate/platform_ActivateDate.py
deleted file mode 100755
index 3998c61..0000000
--- a/server/site_tests/platform_ActivateDate/platform_ActivateDate.py
+++ /dev/null
@@ -1,50 +0,0 @@
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import test
-
-
-class platform_ActivateDate(test.test):
-    """
-    Tests that activate date is run correctly from a clean system state.
-    For context, activate date is an upstart script located in:
-        chromeos-base/chromeos-activate-date
-    It attempts to set the initial date when a new chromebook is first used
-    by the customer, which drives things like battery warranty issues.
-    """
-    version = 1
-
-    def run_once(self, host):
-        host.run('truncate -s 0 /var/log/messages')
-        host.run('activate_date --clean')
-
-        # Since the activate_date value was cleaned, this reboot will cause it
-        # to execute the activation sequence.
-        host.reboot()
-
-        # The activation sequence polls with a 200 second sleep, which seems
-        # pretty high, but we'll wait for at least one of those sleep cycles.
-        poll_interval = 10
-        max_num_attempts = 21
-
-        current_attempt = 1
-        success = False
-        while current_attempt <= max_num_attempts and not success:
-          # Autotest logs the grep command to the system log, so we're just
-          # going to search for it locally.
-          get_log_cmd = 'cat /var/log/messages'
-          success = 'Activation date set' in host.run(get_log_cmd).stdout
-          if not success:
-            current_attempt = current_attempt + 1
-            time.sleep(poll_interval)
-
-        if not success:
-          raise error.TestFail('Failed to set activation date')
-
-        # After this reboot, vpd_get_value ActivateDate should be set already
-        # because this is what prevents the init script from executing again.
-        host.reboot()
-
-        if not host.run('vpd_get_value ActivateDate').stdout:
-          raise error.TestFail(
-              'ActivateDate should be set correctly after reboot')
diff --git a/server/site_tests/platform_BootDevice/control b/server/site_tests/platform_BootDevice/control
index 7270094..64fcc31 100644
--- a/server/site_tests/platform_BootDevice/control
+++ b/server/site_tests/platform_BootDevice/control
@@ -2,13 +2,14 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-NAME = "BootDevice"
-AUTHOR = "Chrome OS Team"
+NAME = "platform_BootDevice.BootDevice"
+AUTHOR = "ChromeOS Team"
 ATTRIBUTES = "suite:kernel_daily_regression"
 TIME = "LONG"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test reboots the device continuously for a spedified number of iterations.
diff --git a/server/site_tests/platform_BootDevice/control.1 b/server/site_tests/platform_BootDevice/control.1
index c50c59c..081092f 100644
--- a/server/site_tests/platform_BootDevice/control.1
+++ b/server/site_tests/platform_BootDevice/control.1
@@ -2,13 +2,14 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-NAME = "BootDevice1"
+NAME = "platform_BootDevice.BootDevice1"
 AUTHOR = "Brillo Team"
 ATTRIBUTES = "suite:brillo-presubmit"
 TIME = "SHORT"
 TEST_CATEGORY = "functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test reboots the device reboots successfully.
diff --git a/server/site_tests/platform_BootDevice/control.100 b/server/site_tests/platform_BootDevice/control.100
index 5c8b85c..501d597 100644
--- a/server/site_tests/platform_BootDevice/control.100
+++ b/server/site_tests/platform_BootDevice/control.100
@@ -2,13 +2,14 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-NAME = "BootDevice100"
-AUTHOR = "Chrome OS Team"
+NAME = "platform_BootDevice.BootDevice100"
+AUTHOR = "ChromeOS Team"
 ATTRIBUTES = "suite:stress"
 TIME = "LONG"
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test reboots the device continuously for a spedified number of iterations.
diff --git a/server/site_tests/platform_BootDevice/platform_BootDevice.py b/server/site_tests/platform_BootDevice/platform_BootDevice.py
index dab4557..19b6b9d 100644
--- a/server/site_tests/platform_BootDevice/platform_BootDevice.py
+++ b/server/site_tests/platform_BootDevice/platform_BootDevice.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -9,7 +10,7 @@
     version = 1
 
     def run_once(self, reboot_iterations=1, host=None):
-        for i in xrange(reboot_iterations):
+        for i in range(reboot_iterations):
             logging.info('======== Running BOOTDEVICE REBOOT ITERATION %d/%d '
                          '========', i+1, reboot_iterations)
             # Reboot the client
diff --git a/server/site_tests/platform_BootPerfServer/control.bootperf b/server/site_tests/platform_BootPerfServer/control.bootperf
deleted file mode 100644
index 721965f..0000000
--- a/server/site_tests/platform_BootPerfServer/control.bootperf
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = "platform_BootPerfServer.bootperf"
-AUTHOR = "Chrome OS Team"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Benchmark"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-
-DOC = """
-This control file is meant for use by the "bootperf" script, in
-order to measure Chrome OS boot performance from a developer's
-desktop.
-"""
-
-# By design, the "site_utils/bootperf-bin/bootperf" script depends
-# on some key features of this control file:
-#   NAME must be "platform_BootPerfServer.bootperf".
-#   args[0] is expected to be an integer representing the number of
-#     iterations to perform.
-
-def run_bootperf(machine):
-    host = hosts.create_host(machine)
-    job.run_test("platform_BootPerfServer", host=host,
-                 iterations=int(args[0]),
-                 # Pop args[0] and add 'skip_rootfs_check=True' to args to make
-                 # it bypass the rootfs verification check.
-                 cmdline_args=args[1:] + ['skip_rootfs_check=True'])
-
-parallel_simple(run_bootperf, machines)
diff --git a/server/site_tests/platform_BootPerfServer/control.bvt b/server/site_tests/platform_BootPerfServer/control.bvt
deleted file mode 100644
index b1eb735..0000000
--- a/server/site_tests/platform_BootPerfServer/control.bvt
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Perf/jrbarnette"
-NAME = "BootPerfBVT"
-TIME = "LONG"
-TEST_CATEGORY = "Benchmark"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-
-DOC = """
-This test gathers performance metrics about a system by rebooting it and
-collecting boot times.
-"""
-
-def run_bootperf(machine):
-    host = hosts.create_host(machine)
-    job.run_test('platform_BootPerfServer', host=host, cmdline_args=args)
-
-parallel_simple(run_bootperf, machines)
diff --git a/server/site_tests/platform_BootPerfServer/control.crosperf b/server/site_tests/platform_BootPerfServer/control.crosperf
deleted file mode 100644
index 74c2099..0000000
--- a/server/site_tests/platform_BootPerfServer/control.crosperf
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = "BootPerfServerCrosPerf"
-AUTHOR = "Chrome OS Team"
-ATTRIBUTES = "suite:crosbolt_perf_perbuild"
-TIME = "SHORT"
-TEST_CATEGORY = "Benchmark"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-
-DOC = """
-This test reboots the client and uses the client-side platform_BootPerf test
-to collect boot performance metrics.
-"""
-
-def run_bootperf(machine):
-    host = hosts.create_host(machine)
-    job.run_test("platform_BootPerfServer", host=host,
-                 iterations=10, upload_perf=True, cmdline_args=args)
-
-parallel_simple(run_bootperf, machines)
diff --git a/server/site_tests/platform_BootPerfServer/control.perfalerts b/server/site_tests/platform_BootPerfServer/control.perfalerts
deleted file mode 100644
index ef5e311..0000000
--- a/server/site_tests/platform_BootPerfServer/control.perfalerts
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Perf/jrbarnette"
-NAME = "BootPerf"
-ATTRIBUTES = "suite:link_perf, suite:perfalerts"
-TIME = "LONG"
-TEST_CATEGORY = "Benchmark"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-
-DOC = """
-This test suite runs automated tests that record measurements for various
-system metrics.  In many cases a single test is repeated for a reasonable
-sample such as completing 50 reboots in one test.  The values recorded
-will be used to identify performance/resource regressions and to alert
-contributing (and previously unaware) developers of the impact.
-Ideally these are run for each build in order to make blame easier.
-
-To invoke this from the command line use syntax from the following examples:
-  --To modify the iteration count:
-    test_that -b ${BOARD} --iterations=10 'f:.*control.perfalerts'
-"""
-
-from autotest_lib.client.common_lib import utils
-
-dict_args = utils.args_to_dict(args)
-iterations = int(dict_args.get('iterations', 100))
-
-def run_bootperf(machine):
-    host = hosts.create_host(machine)
-    job.run_test('platform_BootPerfServer', host=host,
-                 iterations=iterations, upload_perf=True, cmdline_args=args)
-
-parallel_simple(run_bootperf, machines)
diff --git a/server/site_tests/platform_BootPerfServer/control.shard b/server/site_tests/platform_BootPerfServer/control.shard
deleted file mode 100644
index aa82fe3..0000000
--- a/server/site_tests/platform_BootPerfServer/control.shard
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = "platform_BootPerfServer.shard"
-AUTHOR = "Chrome OS Team"
-TIME = "SHORT"
-TEST_CATEGORY = "infra"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-
-DOC = """This test reboots the client on a shard. When a shard is deleted,
-the client that is assigned to it will be rebooted, which guarantees that
-the client has a healthy testing history, and is ready for continuous jobs.
-"""
-
-def run_bootperf(machine):
-    host = hosts.create_host(machine)
-    job.run_test("platform_BootPerfServer", host=host, cmdline_args=args)
-
-parallel_simple(run_bootperf, machines)
diff --git a/server/site_tests/platform_BootPerfServer/platform_BootPerfServer.py b/server/site_tests/platform_BootPerfServer/platform_BootPerfServer.py
deleted file mode 100644
index 00ba257..0000000
--- a/server/site_tests/platform_BootPerfServer/platform_BootPerfServer.py
+++ /dev/null
@@ -1,191 +0,0 @@
-# Copyright (c) 2009 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import re
-import shutil
-import traceback
-import uuid
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.bin import utils
-from autotest_lib.server import test, autotest
-
-
-LOWER_IS_BETTER_METRICS = set(['rdbytes', 'seconds'])
-
-
-class platform_BootPerfServer(test.test):
-    """A test that reboots the client and collect boot perf data."""
-    version = 1
-
-    def _is_rootfs_verification_enabled(self, host):
-        """Helper function to check whether rootfs verification is enabled"""
-        kernel_cmdline = host.run_output('cat /proc/cmdline')
-        return 'dm_verity.dev_wait=1' in kernel_cmdline
-
-    def _get_root_partition(self, host):
-        """Helper function for getting the partition index from the system. """
-        # Determine root partition
-        rootdev = host.run_output('rootdev -s')
-        # Sample value of rootdev: "/dev/mmcblk0p3" or "/dev/nvme0n1p3." For
-        # "/dev/mmcblk0p3", the target is partition 2. Extract the last digit
-        # to get the partition index.
-        logging.info('rootdev: %s', rootdev)
-        match = re.match(r'^/dev/.*\dp(\d+)$', rootdev)
-        if match:
-            return int(match.group(1)) - 1
-
-        return None
-
-    def _edit_kernel_args(self, host, gen_sed_command):
-        """Helper function for editing kernel args."""
-        partition = self._get_root_partition(host)
-        if partition is None:
-            logging.warn('Unable to get root partition index')
-            return
-
-        tmp_name = str(uuid.uuid4())
-
-        # Save the current boot config.
-        host.run(('/usr/share/vboot/bin/make_dev_ssd.sh --save_config /tmp/%s '
-                  '--partitions %d') % (tmp_name, partition))
-        # Add "cros_bootchart" to the boot config and then make it effective.
-        tmp_file = '/tmp/%s.%d' % (tmp_name, partition)
-        host.run(gen_sed_command(tmp_file))
-        host.run(('/usr/share/vboot/bin/make_dev_ssd.sh --set_config /tmp/%s '
-                '--partitions %d') % (tmp_name, partition))
-
-    def initialize(self, host, cmdline_args):
-        """Initialization steps before running the test"""
-        # Some tests might disable rootfs verification and mount rootfs as rw.
-        # If we run after those tests, re-enable rootfs verification to get
-        # consistent boot perf metrics.
-
-        args_dict = utils.args_to_dict(cmdline_args)
-        skip_rootfs_check = (args_dict.get('skip_rootfs_check', '').lower() ==
-                             'true')
-
-        if not (skip_rootfs_check or
-                self._is_rootfs_verification_enabled(host)):
-            logging.info('Reimage to enable rootfs verification.')
-            version = host.get_release_builder_path()
-            # Force reimage to the current version to enable rootfs
-            # verification.
-            self.job.run_test('provision_QuickProvision',
-                              host=host,
-                              value=version,
-                              force_update_engine=True)
-
-        # Bootchart is shipped but disabled by default in the image. Before
-        # the test, enable by adding 'cros_bootchart' to the kernel arg list.
-        kernel_cmdline = host.run_output('cat /proc/cmdline')
-        if 'cros_bootchart' in kernel_cmdline:
-            logging.warn('cros_bootchart is enabled before the test.')
-            return
-
-        logging.info('Enable bootchart.')
-        self._edit_kernel_args(
-            host,
-            lambda tmp_file: 'sed -i "s/$/ cros_bootchart/g" %s' % tmp_file)
-
-        # Run a login test to complete the OOBE flow, if we haven't already.
-        # This is so that we measure boot times for the stable state.
-        client_at = autotest.Autotest(host)
-        client_at.run_test('login_LoginSuccess', disable_sysinfo=True,
-                check_client_result=True)
-
-    def cleanup(self, host):
-        """After running the test, disable cros_bootchart by removing
-        "cros_bootchart" from the kernel arg list.
-        """
-        kernel_cmdline = host.run_output('cat /proc/cmdline')
-        if 'cros_bootchart' not in kernel_cmdline:
-            logging.warn('Bootchart not enabled in the test.')
-            return
-
-        logging.info('Disable cros_bootchart and reboot.')
-        self._edit_kernel_args(
-            host,
-            lambda tmp_file: 'sed -i "s/ cros_bootchart//g" %s' % tmp_file)
-        host.reboot()
-
-    def upload_perf_keyvals(self, keyvals):
-        """Upload perf keyvals in dictionary |keyvals| to Chrome perf dashboard.
-
-        This method assumes that the key of a perf keyval is in the format
-        of "units_description". The text before the first underscore represents
-        the units and the rest of the text represents
-        a description of the measured perf value. For instance,
-        'seconds_kernel_to_login', 'rdbytes_kernel_to_startup'.
-
-        @param keyvals: A dictionary that maps a perf metric to its value.
-
-        """
-        for key, val in keyvals.items():
-            match = re.match(r'^(.+?)_.+$', key)
-            if match:
-                units = match.group(1)
-                higher_is_better = units not in LOWER_IS_BETTER_METRICS
-                self.output_perf_value(
-                        description=key, value=val,
-                        units=units, higher_is_better=higher_is_better)
-
-
-    def run_once(self, host=None, upload_perf=False):
-        """Runs the test once: reboot and collect boot metrics from DUT."""
-        self.client = host
-        self.client_test = 'platform_BootPerf'
-
-        # Reboot the client
-        logging.info('BootPerfServer: reboot %s', self.client.hostname)
-        try:
-            self.client.reboot(reboot_timeout=90)
-        except error.AutoservRebootError as e:
-            raise error.TestFail('%s.\nTest failed with error %s' % (
-                    traceback.format_exc(), str(e)))
-
-        # Collect the performance metrics by running a client side test
-        logging.info('BootPerfServer: start client test')
-        client_at = autotest.Autotest(self.client)
-        client_at.run_test(
-            self.client_test, last_boot_was_reboot=True, disable_sysinfo=True)
-
-        # In the client results directory are a 'keyval' file, and
-        # various raw bootstat data files.  First promote the client
-        # test 'keyval' as our own.
-        logging.info('BootPerfServer: gather client results')
-        client_results_dir = os.path.join(
-            self.outputdir, self.client_test, "results")
-        src = os.path.join(client_results_dir, "keyval")
-        dst = os.path.join(self.resultsdir, "keyval")
-        if os.path.exists(src):
-            client_results = open(src, "r")
-            server_results = open(dst, "a")
-            shutil.copyfileobj(client_results, server_results)
-            server_results.close()
-            client_results.close()
-        else:
-            logging.warning('Unable to locate %s', src)
-
-        # Upload perf keyvals in the client keyval file to perf dashboard.
-        if upload_perf:
-            logging.info('Output perf data for iteration %03d', self.iteration)
-            perf_keyvals = utils.read_keyval(src, type_tag='perf')
-            self.upload_perf_keyvals(perf_keyvals)
-
-        # Everything that isn't the client 'keyval' file is raw data
-        # from the client test:  move it to a per-iteration
-        # subdirectory.  We move instead of copying so we can be sure
-        # we don't have any stale results in the next iteration
-        if self.iteration is not None:
-            rawdata_dir = "rawdata.%03d" % self.iteration
-        else:
-            rawdata_dir = "rawdata"
-        rawdata_dir = os.path.join(self.resultsdir, rawdata_dir)
-        shutil.move(client_results_dir, rawdata_dir)
-        try:
-            os.remove(os.path.join(rawdata_dir, "keyval"))
-        except Exception:
-            pass
diff --git a/server/site_tests/platform_CloseOpenLid/control b/server/site_tests/platform_CloseOpenLid/control
deleted file mode 100644
index dc778ca..0000000
--- a/server/site_tests/platform_CloseOpenLid/control
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "Chrome OS Team"
-NAME = "platform_CloseOpenLid"
-PURPOSE = "Test suspend/resume in response to lid events."
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-
-DOC = """
-This test uses servo to simulate lid close and open events. Device state is
-verified by pinging."""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test("platform_CloseOpenLid", host=host, disable_sysinfo=True)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/platform_CloseOpenLid/platform_CloseOpenLid.py b/server/site_tests/platform_CloseOpenLid/platform_CloseOpenLid.py
deleted file mode 100755
index ea4c440..0000000
--- a/server/site_tests/platform_CloseOpenLid/platform_CloseOpenLid.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import test
-
-class platform_CloseOpenLid(test.test):
-    """Uses servo to send the host to sleep and wake back up.
-
-    Uses pwr_button and lid_open gpios in various combinations.
-    """
-    version = 1
-
-
-    def run_once(self, host):
-        # lid only
-        boot_id = host.get_boot_id()
-        host.servo.lid_close()
-        host.test_wait_for_shutdown()
-
-        host.servo.lid_open()
-        host.servo.pass_devmode()
-        host.test_wait_for_boot(boot_id)
-
-        # pwr_button and open lid
-        boot_id = host.get_boot_id()
-        host.servo.power_long_press()
-        if host.is_up():
-            raise error.TestError('DUT still up after long press power')
-
-        host.servo.lid_close()
-        host.servo.lid_open()
-        host.servo.pass_devmode()
-        host.test_wait_for_boot(boot_id)
diff --git a/server/site_tests/platform_CompromisedStatefulPartition/control b/server/site_tests/platform_CompromisedStatefulPartition/control
index e2684aa..89d32e7 100644
--- a/server/site_tests/platform_CompromisedStatefulPartition/control
+++ b/server/site_tests/platform_CompromisedStatefulPartition/control
@@ -12,6 +12,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
+PY_VERSION = 3
 ATTRIBUTES = "suite:bvt-perbuild"
 
 DOC = """
diff --git a/server/site_tests/platform_CompromisedStatefulPartition/platform_CompromisedStatefulPartition.py b/server/site_tests/platform_CompromisedStatefulPartition/platform_CompromisedStatefulPartition.py
index 399e71e..1c5457d 100644
--- a/server/site_tests/platform_CompromisedStatefulPartition/platform_CompromisedStatefulPartition.py
+++ b/server/site_tests/platform_CompromisedStatefulPartition/platform_CompromisedStatefulPartition.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/platform_CorruptRootfs/control b/server/site_tests/platform_CorruptRootfs/control
deleted file mode 100644
index 79ace10..0000000
--- a/server/site_tests/platform_CorruptRootfs/control
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = "platform_CorruptRootfs"
-AUTHOR = "chromeos-kernel,taysom"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-
-DOC = """
-This test copies the kernel and root partitions from A to B.
-Corrupts the root partition on A (including bootcache area).
-Reboots the machine.
-Checks that the machine comes back up on B rootfs.
-"""
-
-def run_corrupt_rootfs(machine):
-    host = hosts.create_host(machine)
-    job.run_test("platform_CorruptRootfs", host=host)
-
-parallel_simple(run_corrupt_rootfs, machines)
diff --git a/server/site_tests/platform_CorruptRootfs/control.regression b/server/site_tests/platform_CorruptRootfs/control.regression
deleted file mode 100644
index 3a0d273..0000000
--- a/server/site_tests/platform_CorruptRootfs/control.regression
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = "platform_CorruptRootfs"
-AUTHOR = "chromeos-kernel,taysom"
-ATTRIBUTES = "suite:kernel_daily_regression"
-DEPENDENCIES = "servo_state:WORKING"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-
-DOC = """
-This test copies the kernel and root partitions from A to B.
-Corrupts the root partition on A (including bootcache area).
-Reboots the machine.
-Checks that the machine comes back up on B rootfs.
-"""
-
-def run_corrupt_rootfs(machine):
-    host = hosts.create_host(machine)
-    job.run_test("platform_CorruptRootfs", host=host)
-
-parallel_simple(run_corrupt_rootfs, machines)
diff --git a/server/site_tests/platform_CorruptRootfs/platform_CorruptRootfs.py b/server/site_tests/platform_CorruptRootfs/platform_CorruptRootfs.py
deleted file mode 100644
index ecfe1f2..0000000
--- a/server/site_tests/platform_CorruptRootfs/platform_CorruptRootfs.py
+++ /dev/null
@@ -1,123 +0,0 @@
-# Copyright (c) 2009 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import re
-import traceback
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import test
-from autotest_lib.client.bin import utils
-
-class platform_CorruptRootfs(test.test):
-    """Tests how the system recovers when the root file system is corrupted
-
-    1. Copies kernel A and rootfs A to kernel B and rootfs B.
-    2. Sets the modes on the partitions.
-    3. Corrupts rootfs A by writing some random data to a the first few sectors.
-    4. If enabled, corrupts the bootcache area.
-    5. Reboots the system.
-    6. Runs the test again in the reverse direction, leaving kernel A and
-       rootfs A in the same state.
-    """
-
-    version = 1
-
-
-    def _get_bootcache_offset(self):
-        """Gets the offset of the bootcache from the command line.
-
-        @return
-          if bootcache is found, returns offset as a string
-          otherwise returns '0'
-        """
-
-        # Get the linux cmd line
-        result = self.client.run('cat /proc/cmdline')
-
-        m = re.search('dm="(.*)"', result.stdout)
-        dm = m.group(1)
-        i = dm.find('bootcache')
-        if i > 0:
-            s = dm[i:].split()
-            return s[2]   # 2nd field after bootcache has sector offset
-        return '0'
-
-
-    def _get_partition_layout(self):
-        """Get the partition layout
-
-        @return
-          dev - name of the device hosting the partition
-          kernelA - partition used to boot kernel
-          rootfsA - partition of current root file system
-          kernelB - backup copy of kernel
-          rootfsB - backup copy of root file system
-        """
-
-        # What is our root partition?
-        # TODO(crbug.com/226082)
-        result = self.client.run('rootdev -s')
-        logging.info('Root partition %s', result.stdout)
-        rootdev = result.stdout.strip()
-        if os.path.basename(rootdev).startswith('mmc'):
-            dev = rootdev[:-2]
-        else:
-            dev = rootdev[:-1]
-        kernelA = utils.get_kernel_partition(rootdev)
-        rootfsA = rootdev
-        kernelB = utils.get_free_kernel_partition(rootdev)
-        rootfsB = utils.get_free_root_partition(rootdev)
-        return dev, kernelA, rootfsA, kernelB, rootfsB
-
-
-    def _corrupt_rootfs(self, host):
-        """Corrupt the root file system
-        """
-
-        self.client = host
-        self.client_test = 'platform_CorruptRootfs'
-
-        dev, kernelA, rootfsA, kernelB, rootfsB = self._get_partition_layout()
-        bootcache_offset = self._get_bootcache_offset()
-
-        # Copy kernel and rootfs paritions from A to B
-        logging.info('CorruptRootfs: copy partitions A to B')
-        self.client.run('dd if=%s of=%s bs=64K' % (kernelA, kernelB))
-        self.client.run('dd if=%s of=%s bs=64K' % (rootfsA, rootfsB))
-
-        # Set attribrutes on kernel A and B
-        logging.info('CorruptRootfs: set attributes on kernal A and B')
-        self.client.run('cgpt add -i 2 -T 5 -P 9 -S 1 %s' % dev)
-        self.client.run('cgpt add -i 4 -T 5 -P 9 -S 1 %s' % dev)
-
-        # Corrupt rootfs A and bootcache
-        logging.info('CorruptRootfs: corrupt rootfs A ' + rootfsA)
-        self.client.run('dd if=/dev/urandom of=%s count=16' % rootfsA)
-        if bootcache_offset != '0':
-            self.client.run('dd if=/dev/zero of=%s seek=%s count=4096' %
-                (rootfsA, bootcache_offset))
-
-        logging.info('CorruptRootfs: reboot ' + self.client.hostname)
-        try:
-            self.client.reboot()
-        except error.AutoservRebootError as e:
-            raise error.TestFail('%s.\nTest failed with error %s' % (
-                    traceback.format_exc(), str(e)))
-
-        # Find what partition we are now running on
-        result = self.client.run('rootdev -s')
-        logging.info('Root partition %s', result.stdout)
-
-
-    def run_once(self, host=None):
-        """
-        run_once actually runs the test twice. The second run "undoes"
-        what was done in the first run.
-
-        @param host - the host machine running the test
-        """
-
-        self._corrupt_rootfs(host)
-        self._corrupt_rootfs(host)
diff --git a/server/site_tests/platform_CrashStateful/control b/server/site_tests/platform_CrashStateful/control
deleted file mode 100644
index 719f7af..0000000
--- a/server/site_tests/platform_CrashStateful/control
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = "platform_CrashStateful"
-AUTHOR = "chromeos-kernel,taysom"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-
-DOC = """
-Tests crash recovery of stateful partition
-"""
-
-def run_crash_stateful(machine):
-    host = hosts.create_host(machine)
-    job.run_test("platform_CrashStateful", host=host)
-
-parallel_simple(run_crash_stateful, machines)
diff --git a/server/site_tests/platform_CrashStateful/control.regression b/server/site_tests/platform_CrashStateful/control.regression
deleted file mode 100644
index 7e74bb7..0000000
--- a/server/site_tests/platform_CrashStateful/control.regression
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-NAME = "platform_CrashStateful"
-AUTHOR = "chromeos-kernel,taysom"
-ATTRIBUTES = "suite:kernel_daily_regression"
-DEPENDENCIES = "servo_state:WORKING"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-
-DOC = """
-Tests crash recovery of stateful partition.
-"""
-
-def run_crash_stateful(machine):
-    host = hosts.create_host(machine)
-    job.run_test("platform_CrashStateful", host=host)
-
-parallel_simple(run_crash_stateful, machines)
diff --git a/server/site_tests/platform_CrashStateful/platform_CrashStateful.py b/server/site_tests/platform_CrashStateful/platform_CrashStateful.py
deleted file mode 100644
index 5a2b69c..0000000
--- a/server/site_tests/platform_CrashStateful/platform_CrashStateful.py
+++ /dev/null
@@ -1,157 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import traceback
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import test
-
-class platform_CrashStateful(test.test):
-    """Tests the crash recovery of the stateful file system
-
-    1. Create a specific file 'charlie'
-    2. Sync
-    3. Crash system
-    4. Wait for reboot
-    5. Check if 'charlie' is there and complete
-    6. Clean up
-
-    Do the samething with an ecryptfs volume.
-    """
-    version = 1
-    _STATEFUL_DIR = '/usr/local/CrashDir'
-    _ECRYPT_DIR = '/usr/local/ecryptfs_tst'
-    _ECRYPT_MOUNT_POINT = '/usr/local/ecryptfs_mnt'
-    _ECRYPT_TEST_DIR = '%s/CrashDir' % _ECRYPT_MOUNT_POINT
-
-
-    def _run(self, cmd):
-        """Run the give command and log results
-
-        @param cmd: command to be run
-        """
-        result = self.client.run(cmd)
-        if result.exit_status != 0:
-            logging.error('%s: %s', cmd, result.stdout)
-
-
-    def _ecrypt_mount(self, edir, mnt):
-        """Mount the eCrypt File System
-
-        @param ddir: directory where encrypted file system is stored
-        @param mnt: mount point for encrypted file system
-        """
-        options = ('-o'
-                   ' key=passphrase:passphrase_passwd=secret'
-                   ',ecryptfs_cipher=aes'
-                   ',ecryptfs_key_bytes=32'
-                   ',no_sig_cache'
-                   ',ecryptfs_passthrough=no'
-                   ',ecryptfs_enable_filename_crypto=no')
-        self._run('mkdir -p %s %s' % (edir, mnt))
-        self._run('mount -t ecryptfs %s %s %s' %
-                           (options, edir, mnt))
-
-
-    def _ecrypt_unmount(self, edir, mnt):
-        """Unmount the eCrypt File System and remove it and its mount point
-
-        @param dir: directory where encrypted file system is stored
-        @param mnt: mount point for encrypted file system
-        """
-        self._run('umount %s' % mnt)
-        self._run('rm -R %s' % edir)
-        self._run('rm -R %s' % mnt)
-
-
-    def _crash(self):
-        """crash the client without giving anything a chance to clean up
-
-        We use the kernel crash testing interface to immediately reboot the
-        system. No chance for any flushing of I/O or cleaning up.
-        """
-        logging.info('CrashStateful: force panic %s', self.client.hostname)
-        interface = "/sys/kernel/debug/provoke-crash/DIRECT"
-        cmd = 'echo PANIC > %s' % interface
-        if not self.client.run('ls %s' % interface,
-                               ignore_status=True).exit_status == 0:
-            interface = "/proc/breakme"
-            cmd = 'echo panic > %s' % interface
-        try:
-            """The following is necessary to avoid command execution errors
-            1) If ssh on the DUT doesn't terminate cleanly, it will exit with
-               status 255 causing an exception
-            2) ssh won't terminate if a background process holds open stdin,
-               stdout, or stderr
-            3) without a sleep delay, the reboot may close the connection with
-               an error
-            """
-            wrapped_cmd = 'sleep 1; %s'
-            self.client.reboot(reboot_cmd=wrapped_cmd % cmd)
-        except error.AutoservRebootError as e:
-            raise error.TestFail('%s.\nTest failed with error %s' % (
-                    traceback.format_exc(), str(e)))
-
-
-    def _create_file_and_crash(self, dir):
-        """Sets up first part of test, then crash
-
-        @param dir - directory where test files are created
-        """
-        self._run('mkdir -p %s' % dir)
-        self._run('echo charlie smith >%s/charlie' % dir)
-        self._run('sync')
-        self._crash()
-
-
-    def _verify_and_cleanup(self, dir):
-        """Verify results and clean up
-
-        @param dir - directory where test files were created
-        """
-        result = self.client.run('cat %s/charlie' % dir)
-        hi = result.stdout.strip()
-        if hi != 'charlie smith':
-            raise error.TestFail('Test failed, Sync mechanism failed')
-        self._run('rm -fr %s' % dir)
-
-
-    def _crash_stateful(self, dir):
-        """Crash the stateful file system while changing it
-
-        @param dir - directory where test files are created
-        """
-        self._create_file_and_crash(dir)
-        self._verify_and_cleanup(dir)
-
-
-    def _crash_ecrptfs(self, edir, mnt, dir):
-        """Crash the stateful file system while changing it
-
-        @param edir - directory used for the encrypted file system
-        @param mnt - mount point for the encrypted file system
-        @param dir - directory where test files are created
-        """
-        self._ecrypt_mount(edir, mnt)
-        self._create_file_and_crash(dir)
-        self._ecrypt_mount(edir, mnt)
-        self._verify_and_cleanup(dir)
-        self._ecrypt_unmount(edir, mnt)
-
-
-    def run_once(self, host=None):
-        """run_once runs the test.
-
-        1. Runs a crash test on stateful partition
-        2. Create an ecryptfs volume and run the same
-           crash test
-
-        @param host - the host machine running the test
-        """
-        self.client = host
-
-        self._crash_stateful(self._STATEFUL_DIR)
-
-        self._crash_ecrptfs(self._ECRYPT_DIR, self._ECRYPT_MOUNT_POINT,
-            self._ECRYPT_TEST_DIR)
diff --git a/server/site_tests/platform_CryptohomeLECredentialManagerServer/control b/server/site_tests/platform_CryptohomeLECredentialManagerServer/control
deleted file mode 100644
index b685ba5..0000000
--- a/server/site_tests/platform_CryptohomeLECredentialManagerServer/control
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "allenwebb"
-NAME = "platform_CryptohomeLECredentialManagerServer"
-PURPOSE = "Validate PinWeaver functionality on Cr50"
-ATTRIBUTES = "suite:bvt-perbuild"
-TIME = "SHORT"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-
-DOC = """
-Test the functionality of le_credential_manager. This feature handles
-low entropy credentials such as PINs and uses Cr50 to exchange the low
-entropy credential for a high entropy credential while enforcing limits
-on how often attempts can be made. The high entropy credential is needed
-to mount the user's home directory.
-"""
-
-job.run_test('platform_CryptohomeLECredentialManagerServer',
-             host=hosts.create_host(machines[0]))
diff --git a/server/site_tests/platform_CryptohomeLECredentialManagerServer/platform_CryptohomeLECredentialManagerServer.py b/server/site_tests/platform_CryptohomeLECredentialManagerServer/platform_CryptohomeLECredentialManagerServer.py
deleted file mode 100644
index fb761a1..0000000
--- a/server/site_tests/platform_CryptohomeLECredentialManagerServer/platform_CryptohomeLECredentialManagerServer.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-import logging
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import pinweaver_client
-from autotest_lib.server import autotest
-from autotest_lib.server import test
-
-
-class platform_CryptohomeLECredentialManagerServer(test.test):
-    """Tests the le_credential_manager functionality of cryptohome.
-    """
-
-    version = 1
-
-    def run_once(self, host):
-        """Runs the platform_CryptohomeLECredentialManager test across a reboot.
-        """
-        try:
-            pinweaver_client.GetLog(host)
-        except pinweaver_client.PinWeaverNotAvailableError:
-            logging.info('PinWeaver not supported!')
-            raise error.TestNAError('PinWeaver is not available')
-
-        autotest.Autotest(host).run_test(
-            'platform_CryptohomeLECredentialManager', pre_reboot=True,
-            check_client_result=True)
-
-        host.reboot()
-
-        autotest.Autotest(host).run_test(
-            'platform_CryptohomeLECredentialManager', pre_reboot=False,
-            check_client_result=True)
-
-        logging.info('Tests passed!')
diff --git a/server/site_tests/platform_CryptohomeTPMReOwnServer/control b/server/site_tests/platform_CryptohomeTPMReOwnServer/control
deleted file mode 100644
index 5a59553..0000000
--- a/server/site_tests/platform_CryptohomeTPMReOwnServer/control
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright (c) 2009 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "platform_CryptohomeTPMReOwnServer"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:bvt-perbuild"
-
-DOC = """
-This test verifies that cryptohome re-creates a user's cryptohome directory
-when the TPM is cleared and re-owned.
-"""
-
-job.run_test("platform_CryptohomeTPMReOwnServer",
-             host=hosts.create_host(machines[0]))
diff --git a/server/site_tests/platform_CryptohomeTPMReOwnServer/platform_CryptohomeTPMReOwnServer.py b/server/site_tests/platform_CryptohomeTPMReOwnServer/platform_CryptohomeTPMReOwnServer.py
deleted file mode 100644
index 4f4e122..0000000
--- a/server/site_tests/platform_CryptohomeTPMReOwnServer/platform_CryptohomeTPMReOwnServer.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-from autotest_lib.client.common_lib.cros import tpm_utils
-from autotest_lib.server import test, autotest
-
-class platform_CryptohomeTPMReOwnServer(test.test):
-    """
-    The server-side controller for verifying that cryptohome can re-create a
-    user's vault if the TPM is cleared and re-owned.
-    """
-    version = 1
-    n_client_reboots = 0
-    client_at = None
-
-    # Run the client subtest named [subtest].
-    def tpm_run(self, subtest, ignore_status=False):
-        self.client_at.run_test(self.client_test,
-                                subtest=subtest,
-                                check_client_result=(not ignore_status))
-
-
-    def reboot_client(self):
-        # Reboot the client
-        logging.info('CryptohomeTPMReOwnServer: rebooting %s number %d',
-                     self.client.hostname, self.n_client_reboots)
-        self.client.reboot()
-        self.n_client_reboots += 1
-
-
-    def run_once(self, host=None):
-        self.client = host
-        self.client_at = autotest.Autotest(self.client)
-        self.client_test = 'platform_CryptohomeTPMReOwn'
-
-        # Set up the client in the unowned state and init the TPM again.
-        tpm_utils.ClearTPMOwnerRequest(self.client)
-        self.tpm_run("take_tpm_ownership", ignore_status=True)
-
-        self.tpm_run("mount_cryptohome")
-
-        self.reboot_client()
-        self.tpm_run("mount_cryptohome_after_reboot")
-
-        # Clear and re-own the TPM on the next boot.
-        tpm_utils.ClearTPMOwnerRequest(self.client)
-        self.tpm_run("take_tpm_ownership", ignore_status=True)
-
-        self.tpm_run("mount_cryptohome_check_recreate")
diff --git a/server/site_tests/platform_CryptohomeTpmLiveTestServer/OWNERS b/server/site_tests/platform_CryptohomeTpmLiveTestServer/OWNERS
deleted file mode 100644
index b01b374..0000000
--- a/server/site_tests/platform_CryptohomeTpmLiveTestServer/OWNERS
+++ /dev/null
@@ -1,2 +0,0 @@
-emaxx@chromium.org
-apronin@chromium.org
diff --git a/server/site_tests/platform_CryptohomeTpmLiveTestServer/control b/server/site_tests/platform_CryptohomeTpmLiveTestServer/control
deleted file mode 100644
index f3edc01..0000000
--- a/server/site_tests/platform_CryptohomeTpmLiveTestServer/control
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "emaxx, apronin"
-NAME = "platform_CryptohomeTpmLiveTestServer"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:bvt-perbuild"
-JOB_RETRIES = 1
-
-DOC = """
-This test clears the TPM if necessary, kicks off a client side test that runs
-cryptohome's TPM live tests and clears the TPM again. Every time the TPM is
-cleared, the system is rebooted.
-"""
-
-def run_test(machine):
-    host = hosts.create_host(machine)
-    job.run_test('platform_CryptohomeTpmLiveTestServer', host=host)
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/platform_CryptohomeTpmLiveTestServer/platform_CryptohomeTpmLiveTestServer.py b/server/site_tests/platform_CryptohomeTpmLiveTestServer/platform_CryptohomeTpmLiveTestServer.py
deleted file mode 100644
index 7048710..0000000
--- a/server/site_tests/platform_CryptohomeTpmLiveTestServer/platform_CryptohomeTpmLiveTestServer.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import tpm_utils
-from autotest_lib.server import autotest
-from autotest_lib.server import test
-
-
-class platform_CryptohomeTpmLiveTestServer(test.test):
-    """A test that runs platform_CryptohomeTpmLiveTest and clears the TPM as
-    necessary."""
-    version = 1
-
-    def run_once(self, host=None):
-        """Runs a single iteration of the test."""
-        self.client = host
-
-        # Skip the test if the TPM is unavailable.
-        tpm_status = tpm_utils.TPMStatus(self.client)
-        if 'is_enabled' not in tpm_status:
-            raise error.TestError('Error obtaining TPM enabled state. Status '
-                                  'returned by cryptohome: ' + str(tpm_status))
-        if not tpm_status['is_enabled']:
-            return
-
-        # Clear the TPM, so that the client test is able to obtain the TPM owner
-        # password.
-        tpm_utils.ClearTPMOwnerRequest(self.client, wait_for_ready=True)
-
-        # Run the client test which executes the cryptohome's TPM live test.
-        autotest.Autotest(self.client).run_test(
-                'platform_CryptohomeTpmLiveTest', check_client_result=True)
-
-        # Clean the TPM up, so that the TPM state clobbered by the TPM live
-        # tests doesn't affect subsequent tests.
-        tpm_utils.ClearTPMOwnerRequest(self.client)
diff --git a/server/site_tests/platform_ExternalUSBBootStress/control b/server/site_tests/platform_ExternalUSBBootStress/control
deleted file mode 100644
index 9af34e4..0000000
--- a/server/site_tests/platform_ExternalUSBBootStress/control
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "Chrome OS Team"
-NAME = "platform_ExternalUSBBootStress"
-PURPOSE = "Servo based USB boot stress test"
-CRITERIA = "This test will fail if the device fails to boot."
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-
-DOC = """
-This test uses servo to simulate USB connect/removal events during boot
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-
-    reboots = int(args_dict.get('reboots', 1000))
-
-    job.run_test("platform_ExternalUSBBootStress", host=host,
-                 disable_sysinfo=True, reboots=reboots)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/platform_ExternalUSBBootStress/control.50 b/server/site_tests/platform_ExternalUSBBootStress/control.50
deleted file mode 100644
index 540fd22..0000000
--- a/server/site_tests/platform_ExternalUSBBootStress/control.50
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright (c) 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "Chrome OS Team"
-NAME = "platform_ExternalUSBBootStress.50"
-PURPOSE = "Servo based USB boot stress test"
-CRITERIA = "This test will fail if the device fails to boot."
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-# Stop running the test due to crbug.com/654478
-# ATTRIBUTES = "suite:usb_detect_stress"
-# DEPENDENCIES = "servo_state:WORKING"
-
-
-DOC = """
-This test uses servo to simulate USB connect/removal events during boot
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-
-    reboots = int(args_dict.get('reboots', 50))
-
-    job.run_test("platform_ExternalUSBBootStress", host=host,
-                 disable_sysinfo=True, reboots=reboots)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/platform_ExternalUSBBootStress/platform_ExternalUSBBootStress.py b/server/site_tests/platform_ExternalUSBBootStress/platform_ExternalUSBBootStress.py
deleted file mode 100644
index 420a5e8..0000000
--- a/server/site_tests/platform_ExternalUSBBootStress/platform_ExternalUSBBootStress.py
+++ /dev/null
@@ -1,118 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging, re, time
-
-from autotest_lib.server import test
-from autotest_lib.server.cros import stress
-from autotest_lib.server.cros.servo import servo
-from autotest_lib.client.common_lib import error
-
-_WAIT_DELAY = 5
-
-class platform_ExternalUSBBootStress(test.test):
-    """Uses servo to repeatedly connect/remove USB devices during boot."""
-    version = 1
-
-    def run_once(self, host, reboots):
-        reboots = int(reboots)
-        self.client = host
-        # The servo hubs come up as diffs in connected components.  These
-        # should be ignored for this test.  It is a list so when servo next
-        # is available it may have a differnet hub which can be appended.
-        servo_hardware_list = ['Standard Microsystems Corp.']
-
-
-        def strip_lsusb_output(lsusb_output):
-            items = lsusb_output.split('\n')
-            named_list = []
-            unnamed_device_count = 0
-            for item in items:
-                columns = item.split(' ')
-                if len(columns) == 6 or len(' '.join(columns[6:]).strip()) == 0:
-                    logging.info('Unnamed device located, adding generic name.')
-                    name = 'Unnamed device %d' % unnamed_device_count
-                    unnamed_device_count += 1
-                else:
-                    name = ' '.join(columns[6:]).strip()
-                if name not in servo_hardware_list:
-                    named_list.append(name)
-            return named_list
-
-
-        def set_hub_power(on=True, check_host_detection=False):
-            reset = 'off'
-            if not on:
-                reset = 'on'
-            host.servo.set('dut_hub1_rst1', reset)
-            if check_host_detection:
-                time.sleep(_WAIT_DELAY)
-                return strip_lsusb_output(host.run('lsusb').stdout.strip())
-
-
-        def stress_hotplug():
-            # Devices need some time to come up and to be recognized.  However
-            # this is a stress test so we want to move reasonably fast.
-            time.sleep(2)
-            removed = set_hub_power(False)
-            time.sleep(1)
-            connected = set_hub_power()
-
-
-        host.servo.switch_usbkey('dut')
-        host.servo.set('usb_mux_sel3', 'dut_sees_usbkey')
-
-        # There are some mice that need the data and power connection to both
-        # be removed, otherwise they won't come back up.  This means that the
-        # external devices should only use the usb connections labeled:
-        # USB_KEY and DUT_HUB1_USB.
-        connected = set_hub_power(check_host_detection=True)
-        off_list = set_hub_power(on=False, check_host_detection=True)
-        diff_list = set(connected).difference(set(off_list))
-        if len(diff_list) == 0:
-            raise error.TestError('No connected devices were detected.  Make '
-                                  'sure the devices are connected to USB_KEY '
-                                  'and DUT_HUB1_USB on the servo board.')
-        logging.info('Connected devices list: %s' % diff_list)
-        set_hub_power(True)
-
-        lsb_release = host.run('cat /etc/lsb-release').stdout.split('\n')
-        unsupported_gbb_boards = ['x86-mario', 'x86-alex', 'x86-zgb']
-        skip_gbb = False
-        for line in lsb_release:
-            m = re.match(r'^CHROMEOS_RELEASE_BOARD=(.+)$', line)
-            if m and m.group(1) in unsupported_gbb_boards:
-                skip_gbb = True
-                break
-
-        logging.info('Rebooting the device %d time(s)' % reboots)
-        for i in xrange(reboots):
-            # We want fast boot past the dev screen
-            if not skip_gbb:
-                host.run('/usr/share/vboot/bin/set_gbb_flags.sh 0x01')
-            stressor = stress.ControlledStressor(stress_hotplug)
-            logging.info('Reboot iteration %d of %d' % (i + 1, reboots))
-            if skip_gbb:
-                # For devices that do not support gbb we have servo
-                # accelerate booting through dev mode.
-                host.reset_via_servo()
-                host.servo.power_short_press()
-                time.sleep(servo.Servo.BOOT_DELAY)
-                host.servo.ctrl_d()
-                stressor.start()
-                host.wait_up(timeout=120)
-            else:
-                stressor.start()
-                self.client.reboot()
-            logging.info('Reboot complete, shutting down stressor.')
-            stressor.stop()
-            connected_now = set_hub_power(check_host_detection=True)
-            diff_now = set(connected_now).difference(set(off_list))
-            if diff_list != diff_now:
-                raise error.TestFail('The list of connected items does not '
-                                      'match the master list.\nMaster: %s\n'
-                                      'Current: %s' %
-                                      (diff_list, diff_now))
-            logging.info('Connected devices for iteration %d: %s' %
-                         (i, diff_now))
diff --git a/server/site_tests/platform_ExternalUSBStress/control b/server/site_tests/platform_ExternalUSBStress/control
deleted file mode 100644
index 9ef72f1..0000000
--- a/server/site_tests/platform_ExternalUSBStress/control
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "Chrome OS Team"
-NAME = "platform_ExternalUSBStress"
-PURPOSE = "Servo based USB stress test"
-CRITERIA = "This test will fail if the device fails to detect USB changes."
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-DEPENDENCIES = "servo_state:WORKING"
-
-DOC = """
-This test uses servo to simulate USB connect/removal,
-and lid close/open events.
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-
-    repeat = int(args_dict.get('repeat', 10))
-    network_debug = bool(args_dict.get('network_debug', True))
-
-    job.run_test("platform_ExternalUSBStress", host=host, disable_sysinfo=True,
-                 client_autotest='desktopui_SimpleLogin', repeat=repeat,
-                 network_debug=network_debug)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/platform_ExternalUSBStress/platform_ExternalUSBStress.py b/server/site_tests/platform_ExternalUSBStress/platform_ExternalUSBStress.py
deleted file mode 100644
index 2fa4336..0000000
--- a/server/site_tests/platform_ExternalUSBStress/platform_ExternalUSBStress.py
+++ /dev/null
@@ -1,194 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging, threading, time
-
-from autotest_lib.server import autotest, test
-from autotest_lib.client.common_lib import error
-
-_WAIT_DELAY = 10
-_LONG_TIMEOUT = 60
-_WAKE_PRESS_IN_SEC=0.2
-
-class platform_ExternalUSBStress(test.test):
-    """Uses servo to repeatedly connect/remove USB devices."""
-    version = 1
-
-    def run_once(self, host, client_autotest, repeat, network_debug):
-        self.has_lid = True
-
-        # Check if DUT has lid.
-        if host.servo.get('lid_open') == 'not_applicable':
-            self.has_lid = False
-        else:
-            # Check if
-            host.servo.lid_open()
-            if host.servo.get('lid_open') != 'yes':
-                raise error.TestError('SERVO has a bad lid_open control')
-
-        autotest_client = autotest.Autotest(host)
-        diff_list = []
-        off_list = []
-        # The servo hubs come up as diffs in connected components. These
-        # should be ignored for this test.
-        servo_hardware_prefix = 'Standard Microsystems Corp.'
-        self.is_suspended = False
-
-        def strip_lsusb_output(lsusb_output):
-            """Finds the external USB devices plugged
-
-            @param lsusb_output: lsusb command output to parse
-
-            @returns plugged_list: List of plugged usb devices names
-
-            """
-            items = lsusb_output.split('\n')
-            named_list = []
-            unnamed_device_count = 0
-            for item in items:
-                columns = item.split(' ')
-                if len(columns) == 6 or len(' '.join(columns[6:]).strip()) == 0:
-                    logging.debug('Unnamed device located, adding generic name.')
-                    name = 'Unnamed device %d' % unnamed_device_count
-                    unnamed_device_count += 1
-                else:
-                    name = ' '.join(columns[6:]).strip()
-                if not name.startswith(servo_hardware_prefix):
-                    named_list.append(name)
-            return named_list
-
-
-        def set_hub_power(on=True):
-            """Turns on or off the USB hub (dut_hub1_rst1).
-
-            @param on: To power on the servo-usb hub or not
-
-            @returns usb devices list if not suspended, None if suspended
-            """
-            reset = 'off'
-            if not on:
-                reset = 'on'
-            host.servo.set('dut_hub1_rst1', reset)
-            time.sleep(_WAIT_DELAY)
-
-
-        def wait_to_detect(timeout=_LONG_TIMEOUT):
-            """Waits till timeout for set of peripherals in lsusb output.
-
-            @param timeout: timeout in seconds
-
-            @raise error.TestFail: if timeout is reached
-
-            """
-            start_time = int(time.time())
-            while True:
-                connected = strip_lsusb_output(host.run('lsusb').stdout.strip())
-                if diff_list.issubset(connected):
-                    break
-                elif int(time.time()) - start_time > timeout:
-                    raise error.TestFail('USB peripherals not detected: %s' %
-                                          str(diff_list.difference(connected)))
-                time.sleep(1)
-
-
-        def test_suspend(plugged_before_suspended=False,
-                         plugged_before_resume=False):
-            """Close and open lid while different USB plug status.
-
-            @param plugged_before_suspended: USB plugged before suspended
-            @param plugged_before_resume: USB plugged after suspended
-
-
-            @raise error.TestFail: if USB peripherals do not match expectations.
-
-            """
-            set_hub_power(plugged_before_suspended)
-
-            # Suspend
-            boot_id = host.get_boot_id()
-            if self.has_lid:
-                host.servo.lid_close()
-            else:
-                thread = threading.Thread(target = host.suspend)
-                thread.start()
-            host.test_wait_for_sleep(_LONG_TIMEOUT)
-            logging.debug(' --DUT suspended')
-            self.is_suspended = True
-
-            if plugged_before_resume is not plugged_before_suspended:
-                set_hub_power(plugged_before_resume)
-
-            # Resume
-            if self.has_lid:
-                host.servo.lid_open()
-            else:
-                host.servo.power_key(_WAKE_PRESS_IN_SEC)
-            host.test_wait_for_resume(boot_id, _LONG_TIMEOUT)
-            logging.debug(' --DUT resumed')
-            self.is_suspended = False
-
-            if not plugged_before_resume:
-                time.sleep(_WAIT_DELAY)
-                connected = strip_lsusb_output(host.run('lsusb').stdout.strip())
-                if connected != off_list:
-                    raise error.TestFail('Devices were not removed on wake.')
-            else:
-                wait_to_detect(_LONG_TIMEOUT)
-
-
-        def test_hotplug():
-            """Testing unplug-plug and check for expected peripherals.
-
-             @raise error.TestFail: if USB peripherals do not match expectations.
-
-            """
-            set_hub_power(False)
-            set_hub_power(True)
-            wait_to_detect(_LONG_TIMEOUT)
-
-
-        def stress_external_usb():
-            """Test procedures in one iteration."""
-
-            # Unplug/plug
-            test_hotplug()
-
-            # Suspend/resume as unplugged
-            test_suspend()
-
-            # Plug/close_lid/unplug/open_lid
-            test_suspend(plugged_before_suspended=True)
-
-            #Unplug/close_lid/plug/open_lid
-            test_suspend(plugged_before_resume=True)
-
-            # Suspend/resume as plugged
-            test_suspend(plugged_before_suspended=True,
-                         plugged_before_resume=True)
-
-
-        host.servo.switch_usbkey('dut')
-
-        # There are some mice that need the data and power connection to both
-        # be removed, otherwise they won't come back up.  This means that the
-        # external devices should only use the usb connections labeled:
-        # USB_KEY and DUT_HUB1_USB.
-        set_hub_power(False)
-        time.sleep(_WAIT_DELAY)
-        off_list = strip_lsusb_output(host.run('lsusb').stdout.strip())
-        set_hub_power(True)
-        time.sleep(_WAIT_DELAY * 2)
-        connected = strip_lsusb_output(host.run('lsusb').stdout.strip())
-        diff_list = set(connected).difference(set(off_list))
-        if len(diff_list) == 0:
-            raise error.TestError('No connected devices were detected.  Make '
-                                  'sure the devices are connected to USB_KEY '
-                                  'and DUT_HUB1_USB on the servo board.')
-        logging.debug('Connected devices list: %s', diff_list)
-
-        autotest_client.run_test(client_autotest,
-                                 exit_without_logout=True)
-        for iteration in xrange(1, repeat + 1):
-            logging.debug('---Iteration %d/%d' % (iteration, repeat))
-            stress_external_usb()
diff --git a/server/site_tests/platform_ExternalUsbPeripherals/control b/server/site_tests/platform_ExternalUsbPeripherals/control
index 9c79052..819f9e7 100644
--- a/server/site_tests/platform_ExternalUsbPeripherals/control
+++ b/server/site_tests/platform_ExternalUsbPeripherals/control
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "platform_ExternalUsbPeripherals"
 PURPOSE = "Servo based USB boot stress test"
 CRITERIA = "This test will fail if any of the actions or checks fail."
@@ -13,6 +13,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This test uses servo to connect/disconnect servo USB hub before and
diff --git a/server/site_tests/platform_ExternalUsbPeripherals/control.crashes b/server/site_tests/platform_ExternalUsbPeripherals/control.crashes
deleted file mode 100644
index 1ed230c..0000000
--- a/server/site_tests/platform_ExternalUsbPeripherals/control.crashes
+++ /dev/null
@@ -1,47 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "Chrome OS Team"
-NAME = "platform_ExternalUsbPeripherals.crashes"
-PURPOSE = "Servo based USB boot stress test"
-CRITERIA = "This test will fail if crash files are present after each step."
-TIME = "MEDIUM"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-#ATTRIBUTES = "suite:usb_detect"
-DEPENDENCIES = "servo_state:WORKING"
-
-DOC = """
-This test uses servo to connect/disconnect servo USB hub before and
-after events like reboot, login, suspend, resume etc.
-
-The test fails if
-- crash files are generated
-- device is pingable when suspended
-- wrong action passed through action_sequence flag
-- USB detected peripherals are different than expected
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-
-    repeat = int(args_dict.get("repeat", 1))
-    default_actions = str("reboot,unplug,plug,login,unplug,plug,"
-                          "suspend,resume,unplug,suspend,plug,resume,"
-                          "suspend,unplug,resume,plug")
-
-    action_sequence = str(args_dict.get("action_sequence", default_actions))
-
-    job.run_test("platform_ExternalUsbPeripherals", host=host,
-                 disable_sysinfo=True, client_autotest="desktopui_SimpleLogin",
-                 action_sequence=action_sequence, repeat=repeat,
-                 crash_check=True, tag="crashes")
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/platform_ExternalUsbPeripherals/control.detect b/server/site_tests/platform_ExternalUsbPeripherals/control.detect
deleted file mode 100644
index 04b4c95..0000000
--- a/server/site_tests/platform_ExternalUsbPeripherals/control.detect
+++ /dev/null
@@ -1,81 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "Chrome OS Team"
-NAME = "platform_ExternalUsbPeripherals.detect"
-PURPOSE = "Servo based USB boot stress test"
-CRITERIA = "This test will fail if any of the actions or checks fail."
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:usb_detect_stress"
-DEPENDENCIES = "servo_state:WORKING"
-
-DOC = """
-This test uses servo to connect/disconnect servo USB hub before and
-after events like reboot, login, suspend, resume etc.
-
-The test fails if
-- device is pingable when suspended
-- wrong action passed through action_sequence flag
-- USB detected peripherals are different than expected
-- there is no servo board attached
-- USB peripherals checks(usb_checks below) on command line fail
-Other detection checks can be added for each peripheral
-
-Set of four USB peripherals plugged
-- USB headset
-- USB HD Webcam - should be Logitech HD Pro Webcam C920
-- USB stick with four partitions named ExFAT  Ext4  FAT  NTFS
-- USB mouse
-- USB 3G modem
-- USB SD card reader(one microSD card plugged)
-- USB connected phone(optional)
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-
-    repeat = int(args_dict.get("repeat", 2))
-
-    default_actions = str("reboot,unplug,plug,login,unplug,plug,"
-                          "suspend,resume,unplug,suspend,plug,resume,"
-                          "suspend,unplug,resume,plug")
-
-    action_sequence = str(args_dict.get("action_sequence", default_actions))
-
-    usb_list = ["\"Kingston Technology Company Inc.\"",
-                "\"Alcor Micro Corp.|GEMBIRD\"",
-                "\"USB PnP Sound Device: USB Audio|C-Media Electronics, Inc.* Audio\"",
-               ]
-    usb_checks = {
-        # USB Audio Output devices
-        str("cras_test_client --dump_server_info | "
-            "awk \"/Output Devices:/,/Output Nodes:/\" | grep -E ") :
-            ["\"USB PnP Sound Device: USB Audio|C-Media USB Headphone Set: USB Audio\"" ],
-        # USB Audio Input devices
-        str("loggedin:cras_test_client --dump_server_info | "
-            "awk \"/Input Devices:/,/Input Nodes:/\" | grep -iE ") :
-            ["\"TeckNet: USB Audio|USB ?2.0 PC Camera\"",
-             "\"USB PnP Sound Device: USB Audio|C-Media USB Headphone Set: USB Audio\""],
-        # USB stick four partitions volumes
-        "loggedin:ls -l /media/removable/ | grep -i " :
-            ["ExFAT", "Ext4", "FAT", "NTFS"],
-        # USB Web camera
-        "cat /sys/class/video4linux/video*/name | grep -iE " :
-            ["\"TeckNet|USB ?2.0 PC Camera\""],
-        }
-
-    job.run_test("platform_ExternalUsbPeripherals", host=host,
-                 disable_sysinfo=True, client_autotest="desktopui_SimpleLogin",
-                 action_sequence=action_sequence, repeat=repeat,
-                 usb_list=usb_list, usb_checks=usb_checks, tag="detect")
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/platform_ExternalUsbPeripherals/control.detect.crash_check_short b/server/site_tests/platform_ExternalUsbPeripherals/control.detect.crash_check_short
index 9ceff50..8c8686c 100644
--- a/server/site_tests/platform_ExternalUsbPeripherals/control.detect.crash_check_short
+++ b/server/site_tests/platform_ExternalUsbPeripherals/control.detect.crash_check_short
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "platform_ExternalUsbPeripherals.detect.crash_check_short"
 PURPOSE = "Servo based USB boot stress test"
 CRITERIA = "This test will fail if any of the actions or checks fail."
@@ -14,6 +14,7 @@
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:usb_detect"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This test uses servo to connect/disconnect servo USB hub before and
@@ -47,7 +48,7 @@
 
     action_sequence = str(args_dict.get("action_sequence", default_actions))
 
-    usb_list = ["\"Kingston Technology Company Inc.\"",
+    usb_list = ["\"Netac Technology Co., Ltd OnlyDisk\"",
                 "\"Alcor Micro Corp.|GEMBIRD\"",
                 "\"USB PnP Sound Device: USB Audio|C-Media Electronics, Inc.* Audio\"",
                ]
@@ -66,7 +67,7 @@
             ["ExFAT", "Ext4", "FAT", "NTFS"],
         # USB Web camera
         "cat /sys/class/video4linux/video*/name | grep -iE " :
-            ["\"TeckNet|USB ?2.0 PC Camera\""],
+            ["\"TeckNet|USB ?2.0 PC Camera|Video Capture|Video Output|Metadata\""],
         }
 
     job.run_test("platform_ExternalUsbPeripherals", host=host,
diff --git a/server/site_tests/platform_ExternalUsbPeripherals/control.detect.lid_close_open_short b/server/site_tests/platform_ExternalUsbPeripherals/control.detect.lid_close_open_short
index fc49b0e..9a277d8 100644
--- a/server/site_tests/platform_ExternalUsbPeripherals/control.detect.lid_close_open_short
+++ b/server/site_tests/platform_ExternalUsbPeripherals/control.detect.lid_close_open_short
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "platform_ExternalUsbPeripherals.detect.lid_close_open"
 PURPOSE = "Servo based USB boot stress test"
 CRITERIA = "This test will fail if any of the actions or checks fail."
@@ -15,6 +15,8 @@
 ATTRIBUTES = "suite:usb_detect"
 DEPENDENCIES = "servo_state:WORKING"
 JOB_RETRIES = 1
+PY_VERSION = 3
+
 DOC = """
 This test uses servo to connect/disconnect servo USB hub before and
 after events like reboot, login, closelid, openlid etc.
@@ -48,7 +50,7 @@
 
     action_sequence = str(args_dict.get("action_sequence", default_actions))
 
-    usb_list = ["\"Kingston Technology Company Inc.\"",
+    usb_list = ["\"Netac Technology Co., Ltd OnlyDisk\"",
                 "\"Alcor Micro Corp.|GEMBIRD\"",
                 "\"USB PnP Sound Device: USB Audio|C-Media Electronics, Inc.* Audio\"",
                ]
@@ -67,7 +69,7 @@
             ["ExFAT", "Ext4", "FAT", "NTFS"],
         # USB Web camera
         "cat /sys/class/video4linux/video*/name | grep -iE " :
-            ["\"TeckNet|USB ?2.0 PC Camera\""],
+            ["\"TeckNet|USB ?2.0 PC Camera|Video Capture|Video Output|Metadata\""],
         }
 
     job.run_test("platform_ExternalUsbPeripherals", host=host,
diff --git a/server/site_tests/platform_ExternalUsbPeripherals/control.detect.login_unplug_closelid_openlid_plug b/server/site_tests/platform_ExternalUsbPeripherals/control.detect.login_unplug_closelid_openlid_plug
deleted file mode 100644
index b1fc0ef..0000000
--- a/server/site_tests/platform_ExternalUsbPeripherals/control.detect.login_unplug_closelid_openlid_plug
+++ /dev/null
@@ -1,80 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "Chrome OS Team"
-NAME = "platform_ExternalUsbPeripherals.detect.login_unplug_closelid_openlid_plug"
-PURPOSE = "Servo based USB boot stress test"
-CRITERIA = "This test will fail if any of the actions or checks fail."
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-#ATTRIBUTES = "suite:usb_detect"
-DEPENDENCIES = "servo_state:WORKING"
-JOB_RETRIES = 2
-
-DOC = """
-This test uses servo to connect/disconnect servo USB hub before and
-after events like reboot, login, closelid, openlid etc.
-
-The test fails if
-- device is pingable when suspended
-- wrong action passed through action_sequence flag
-- USB detected peripherals are different than expected
-- there is no servo board attached
-- USB peripherals checks(usb_checks below) on command line fail
-Other detection checks can be added for each peripheral
-
-Set of four USB peripherals plugged
-- USB headset
-- USB HD Webcam - should be Logitech HD Pro Webcam C920
-- USB stick with four partitions named ExFAT  Ext4  FAT  NTFS
-- USB mouse
-- USB 3G modem
-- USB SD card reader(one microSD card plugged)
-- USB connected phone(optional)
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-
-    repeat = int(args_dict.get("repeat", 1))
-
-    default_actions = str("login,unplug,closelid,openlid,plug")
-
-    action_sequence = str(args_dict.get("action_sequence", default_actions))
-
-    usb_list = ["\"Kingston Technology Company Inc.\"",
-                "\"Alcor Micro Corp.|GEMBIRD\"",
-                "\"USB PnP Sound Device: USB Audio|C-Media Electronics, Inc.* Audio\"",
-               ]
-    usb_checks = {
-        # USB Audio Output devices
-        str("cras_test_client --dump_server_info | "
-            "awk \"/Output Devices:/,/Output Nodes:/\" | grep -E ") :
-            ["\"USB PnP Sound Device: USB Audio|C-Media USB Headphone Set: USB Audio\"" ],
-        # USB Audio Input devices
-        str("loggedin:cras_test_client --dump_server_info | "
-            "awk \"/Input Devices:/,/Input Nodes:/\" | grep -iE ") :
-            ["\"TeckNet: USB Audio|USB ?2.0 PC Camera\"",
-             "\"USB PnP Sound Device: USB Audio|C-Media USB Headphone Set: USB Audio\""],
-        # USB stick four partitions volumes
-        "loggedin:ls -l /media/removable/ | grep -i " :
-            ["ExFAT", "Ext4", "FAT", "NTFS"],
-        # USB Web camera
-        "cat /sys/class/video4linux/video*/name | grep -iE " :
-            ["\"TeckNet|USB ?2.0 PC Camera\""],
-        }
-
-    job.run_test("platform_ExternalUsbPeripherals", host=host,
-                 disable_sysinfo=True, client_autotest="desktopui_SimpleLogin",
-                 action_sequence=action_sequence, repeat=repeat,
-                 usb_list=usb_list, usb_checks=usb_checks, tag="login_unplug_closelid_openlid_plug")
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/platform_ExternalUsbPeripherals/control.detect.long_cycle b/server/site_tests/platform_ExternalUsbPeripherals/control.detect.long_cycle
index db1e6d3..45c68b7 100644
--- a/server/site_tests/platform_ExternalUsbPeripherals/control.detect.long_cycle
+++ b/server/site_tests/platform_ExternalUsbPeripherals/control.detect.long_cycle
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "platform_ExternalUsbPeripherals.detect.long_cycle"
 PURPOSE = "Servo based USB peripheral detection test"
 CRITERIA = "This test will fail if any of the actions or checks fail."
@@ -14,6 +14,7 @@
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:usb_detect"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This test uses servo to connect/disconnect servo USB hub before and
@@ -52,7 +53,7 @@
 
     action_sequence = str(args_dict.get("action_sequence", default_actions))
 
-    usb_list = ["\"Kingston Technology Company Inc.\"",
+    usb_list = ["\"Netac Technology Co., Ltd OnlyDisk\"",
                 "\"Alcor Micro Corp.|GEMBIRD\"",
                 "\"USB PnP Sound Device: USB Audio|C-Media Electronics, Inc.* Audio\"",
                ]
@@ -71,7 +72,7 @@
             ["ExFAT", "Ext4", "FAT", "NTFS"],
         # USB Web camera
         "cat /sys/class/video4linux/video*/name | grep -iE " :
-            ["\"TeckNet|USB ?2.0 PC Camera\""],
+            ["\"TeckNet|USB ?2.0 PC Camera|Video Capture|Video Output|Metadata\""],
         }
 
     job.run_test("platform_ExternalUsbPeripherals", host=host,
diff --git a/server/site_tests/platform_ExternalUsbPeripherals/control.detect.unplug_login_plug b/server/site_tests/platform_ExternalUsbPeripherals/control.detect.unplug_login_plug
deleted file mode 100644
index 04a7478..0000000
--- a/server/site_tests/platform_ExternalUsbPeripherals/control.detect.unplug_login_plug
+++ /dev/null
@@ -1,79 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "Chrome OS Team"
-NAME = "platform_ExternalUsbPeripherals.detect.unplug_login_plug"
-PURPOSE = "Servo based USB peripherals detection test"
-CRITERIA = "This test will fail if any of the actions or checks fail."
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-#ATTRIBUTES = "suite:usb_detect"
-DEPENDENCIES = "servo_state:WORKING"
-
-DOC = """
-This test uses servo to connect/disconnect servo USB hub before and
-after events like reboot, login, suspend, resume etc.
-
-The test fails if
-- device is pingable when suspended
-- wrong action passed through action_sequence flag
-- USB detected peripherals are different than expected
-- there is no servo board attached
-- USB peripherals checks(usb_checks below) on command line fail
-Other detection checks can be added for each peripheral
-
-Set of four USB peripherals plugged
-- USB headset
-- USB HD Webcam - should be Logitech HD Pro Webcam C920
-- USB stick with four partitions named ExFAT  Ext4  FAT  NTFS
-- USB mouse
-- USB 3G modem
-- USB SD card reader(one microSD card plugged)
-- USB connected phone(optional)
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-
-    repeat = int(args_dict.get("repeat", 1))
-
-    default_actions = str("unplug,login,plug")
-
-    action_sequence = str(args_dict.get("action_sequence", default_actions))
-
-    usb_list = ["\"Kingston Technology Company Inc.\"",
-                "\"Alcor Micro Corp.|GEMBIRD\"",
-                "\"USB PnP Sound Device: USB Audio|C-Media Electronics, Inc.* Audio\"",
-               ]
-    usb_checks = {
-        # USB Audio Output devices
-        str("cras_test_client --dump_server_info | "
-            "awk \"/Output Devices:/,/Output Nodes:/\" | grep -E ") :
-            ["\"USB PnP Sound Device: USB Audio|C-Media USB Headphone Set: USB Audio\"" ],
-        # USB Audio Input devices
-        str("loggedin:cras_test_client --dump_server_info | "
-            "awk \"/Input Devices:/,/Input Nodes:/\" | grep -iE ") :
-            ["\"TeckNet: USB Audio|USB ?2.0 PC Camera\"",
-             "\"USB PnP Sound Device: USB Audio|C-Media USB Headphone Set: USB Audio\""],
-        # USB stick four partitions volumes
-        "loggedin:ls -l /media/removable/ | grep -i " :
-            ["ExFAT", "Ext4", "FAT", "NTFS"],
-        # USB Web camera
-        "cat /sys/class/video4linux/video*/name | grep -iE " :
-            ["\"TeckNet|USB ?2.0 PC Camera\""],
-        }
-
-    job.run_test("platform_ExternalUsbPeripherals", host=host,
-                 disable_sysinfo=True, client_autotest="desktopui_SimpleLogin",
-                 action_sequence=action_sequence, repeat=repeat,
-                 usb_list=usb_list, usb_checks=usb_checks, tag="detect.unplug_login_plug")
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/platform_ExternalUsbPeripherals/control.stress b/server/site_tests/platform_ExternalUsbPeripherals/control.stress
deleted file mode 100644
index 0cbf1c3..0000000
--- a/server/site_tests/platform_ExternalUsbPeripherals/control.stress
+++ /dev/null
@@ -1,50 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "Chrome OS Team"
-NAME = "platform_ExternalUsbPeripherals"
-PURPOSE = "Servo based USB boot stress test"
-CRITERIA = "This test will fail if any of the actions or checks fail."
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-DEPENDENCIES = "servo_state:WORKING"
-
-DOC = """
-This test uses servo to connect/disconnect servo USB hub before and
-after events like reboot, login, suspend, resume etc.
-
-The test fails if
-- crash files are generated
-- device is pingable when suspended
-- wrong action passed through action_sequence flag
-- USB detected peripherals are different than expected
-- there is no servo board attached
-
-Repeats the sequence of actions for few times only.
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-
-    repeat = int(args_dict.get("repeat", 2))
-
-    default_actions = str("reboot,unplug,plug,login,unplug,plug,"
-                          "suspend,resume,unplug,suspend,plug,resume,"
-                          "suspend,unplug,resume,plug")
-
-    action_sequence = str(args_dict.get("action_sequence", default_actions))
-
-    job.run_test("platform_ExternalUsbPeripherals", host=host,
-                 disable_sysinfo=True, client_autotest="desktopui_SimpleLogin",
-                 action_sequence=action_sequence, repeat=repeat,
-                 stress_rack=True, tag="stress")
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/platform_ExternalUsbPeripherals/platform_ExternalUsbPeripherals.py b/server/site_tests/platform_ExternalUsbPeripherals/platform_ExternalUsbPeripherals.py
index 1264f3f..6c5f238 100644
--- a/server/site_tests/platform_ExternalUsbPeripherals/platform_ExternalUsbPeripherals.py
+++ b/server/site_tests/platform_ExternalUsbPeripherals/platform_ExternalUsbPeripherals.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -14,6 +15,12 @@
 _SUSPEND_TIME = 30
 _UPPER_USB_PORT = 'usb_mux_sel1'
 _WAIT_DELAY = 15
+_WAIT_OPENLID_DELAY = 30
+_WAIT_LONG_DELAY = 60
+
+# servo v4.1 controls
+_AUX_USB_PORT = 'aux_usbkey_mux'
+_IMAGE_USB_PORT = 'image_usbkey_mux'
 
 
 class platform_ExternalUsbPeripherals(test.test):
@@ -189,7 +196,7 @@
             # Check for mandatory USb devices passed by usb_list flag
             for usb_name in self.usb_list:
                 found = self.wait_for_cmd_output(
-                    'lsusb | grep -E ', usb_name, _WAIT_DELAY * 4,
+                    'lsusb | grep -E ', usb_name, _WAIT_LONG_DELAY,
                     'Not detecting %s' % usb_name)
                 result = result and found
         time.sleep(_WAIT_DELAY)
@@ -224,12 +231,8 @@
             board = self.host.get_board().split(':')[1].lower()
             # Run the usb check command
             for out_match in out_match_list:
-                # Skip running media_v4l2_test on hana boards
-                # crbug.com/820500
-                if 'media_v4l2_test' in cmd and board in ["hana"]:
-                    continue
                 match_result = self.wait_for_cmd_output(
-                    cmd, out_match, _WAIT_DELAY * 4,
+                    cmd, out_match, _WAIT_LONG_DELAY,
                     'USB CHECKS DETAILS failed at %s %s:' % (cmd, out_match))
                 usb_check_result = usb_check_result and match_result
         return usb_check_result
@@ -280,7 +283,7 @@
 
         # Collect USB peripherals when plugged
         self.plug_peripherals(True)
-        time.sleep(_WAIT_DELAY * 2)
+        time.sleep(_WAIT_LONG_DELAY)
         on_list = self.getPluggedUsbDevices()
 
         self.diff_list = set(on_list).difference(set(off_list))
@@ -292,22 +295,18 @@
         logging.debug('Connected devices list: %s', self.diff_list)
 
 
-    def prep_servo_for_test(self, stress_rack):
+    def prep_servo_for_test(self):
         """Connects servo to DUT  and sets servo ports
 
-        @param stress_rack: either to prep servo for stress tests, where
-        usb_mux_1 port should be on. For usb peripherals on usb_mux_3,
-        the port is on, and the oe2,oe4 poers are off.
-
         @returns port as string to plug/unplug the specific port
         """
-        port = _LOWER_USB_PORT
-        self.host.servo.switch_usbkey('dut')
-        self.host.servo.set('dut_hub1_rst1','off')
-        if stress_rack:
-            port = _UPPER_USB_PORT
-            self.host.servo.set(port, 'dut_sees_usbkey')
+        if 'servo_v4p1' in self.servo_type:
+            port = _AUX_USB_PORT
+            self.host.servo.set(_IMAGE_USB_PORT, 'servo_sees_usbkey')
         else:
+            port = _LOWER_USB_PORT
+            self.host.servo.switch_usbkey('dut')
+            self.host.servo.set('dut_hub1_rst1','off')
             self.host.servo.set(_UPPER_USB_PORT, 'servo_sees_usbkey')
             self.host.servo.set('usb_mux_oe2', 'off')
             self.host.servo.set('usb_mux_oe4', 'off')
@@ -319,14 +318,15 @@
         """Disconnect servo hub"""
         self.plug_peripherals(False)
         self.action_logout()
-        self.host.servo.set('dut_hub1_rst1','on')
+        if 'servo_v4p1' not in self.servo_type:
+            self.host.servo.set('dut_hub1_rst1','on')
         self.host.run('reboot now', ignore_status=True)
         self.host.test_wait_for_boot()
 
 
     def run_once(self, host, client_autotest, action_sequence, repeat,
                  usb_list=None, usb_checks=None,
-                 crash_check=False, stress_rack=False):
+                 crash_check=False):
         self.client_autotest = client_autotest
         self.host = host
         self.autotest_client = autotest.Autotest(self.host)
@@ -339,7 +339,9 @@
         self.fail_reasons = list()
         self.action_step = None
 
-        self.plug_port = self.prep_servo_for_test(stress_rack)
+        self.servo_type = self.host.servo.get_servo_type()
+
+        self.plug_port = self.prep_servo_for_test()
 
         # Unplug, plug, compare usb peripherals, and leave plugged.
         self.check_connected_peripherals()
@@ -354,11 +356,7 @@
         self.detect_crash = crash_detector.CrashDetector(self.host)
         self.detect_crash.remove_crash_files()
 
-        # Run camera client test to gather media_V4L2_test binary.
-        if 'media_v4l2_test' in str(self.usb_checks):
-            self.autotest_client.run_test("camera_V4L2")
-
-        for iteration in xrange(1, repeat + 1):
+        for iteration in range(1, repeat + 1):
             step = 0
             for action in actions:
                 step += 1
@@ -371,7 +369,7 @@
                     time.sleep(_WAIT_DELAY)
                 elif action == 'OPENLID':
                     self.open_lid(boot_id)
-                    time.sleep(_WAIT_DELAY)
+                    time.sleep(_WAIT_OPENLID_DELAY)
                 elif action == 'UNPLUG':
                     self.plug_peripherals(False)
                 elif action == 'PLUG':
@@ -392,7 +390,7 @@
                             logging.debug('Skipping logout. Not logged in.')
                     elif action == 'REBOOT':
                         self.host.reboot()
-                        time.sleep(_WAIT_DELAY * 3)
+                        time.sleep(_WAIT_LONG_DELAY)
                         self.login_status = False
                     elif action == 'SUSPEND':
                         boot_id = self.action_suspend()
diff --git a/server/site_tests/platform_FetchCloudConfig/control b/server/site_tests/platform_FetchCloudConfig/control
new file mode 100644
index 0000000..86d923a
--- /dev/null
+++ b/server/site_tests/platform_FetchCloudConfig/control
@@ -0,0 +1,24 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "abergman, chromeos-engprod-platform-syd"
+NAME = "platform_FetchCloudConfig"
+TIME = "SHORT"
+TEST_CATEGORY = "Stress"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+
+DOC = """
+This test doesn't run any actual tests, but rather loads a fresh copy of the
+performance CUJ config from the cloud.
+
+This test is supposed to run in the beginning of performance CUJ test suite, to
+ensure it always overrides cached configuration with the fresh one.
+"""
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('platform_FetchCloudConfig', host=host, disable_sysinfo=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_FetchCloudConfig/platform_FetchCloudConfig.py b/server/site_tests/platform_FetchCloudConfig/platform_FetchCloudConfig.py
new file mode 100644
index 0000000..c4ecfb2
--- /dev/null
+++ b/server/site_tests/platform_FetchCloudConfig/platform_FetchCloudConfig.py
@@ -0,0 +1,47 @@
+# Lint as: python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import common
+import logging
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.utils import labellib
+from autotest_lib.server import test
+
+
+class platform_FetchCloudConfig(test.test):
+    """Reload fresh performance CUJ cloud configuration from cloud."""
+    version = 1
+
+    def run_once(self, host):
+        devservers = dev_server.ImageServer.get_available_devservers()
+        devserver_url = devservers[0][0]
+        if devserver_url:
+            logging.info('Using devserver: %s', devserver_url)
+            labels = host.host_info_store.get().labels
+            build = labellib.LabelsMapping(labels).get(
+                    labellib.Key.CROS_VERSION)
+            if not build:
+                # Not able to detect build, means not running on Moblab.
+                raise error.TestFail('Unable to stage config on devserver %s, '
+                                     'probably not running in Moblab.' %
+                                     devserver_url)
+            ds = dev_server.ImageServer(devserver_url)
+            gs_bucket = dev_server._get_image_storage_server()
+            if gs_bucket:
+                config_path = 'config/perf_cuj/'
+                config_file = 'perf_cuj.config'
+                archive_url = gs_bucket + config_path
+                logging.info('Staging configuration from %s.', gs_bucket)
+                kwargs = {'clean': True}
+                ds.stage_artifacts(build,
+                                   archive_url=archive_url,
+                                   files=[config_file],
+                                   **kwargs)
+            else:
+                raise error.TestFail(
+                        'Invalid GS bucket %s for devserver %s.' % gs_bucket,
+                        devserver_url)
diff --git a/server/site_tests/platform_FlashErasers/control b/server/site_tests/platform_FlashErasers/control
index 5f15980..ab67893 100644
--- a/server/site_tests/platform_FlashErasers/control
+++ b/server/site_tests/platform_FlashErasers/control
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "platform_FlashErasers"
 PURPOSE = "Test flashrom erasing blocks of various sizes."
 ATTRIBUTES = "suite:faft_flashrom"
@@ -13,6 +13,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 DEPENDENCIES = "flashrom, servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This test should be executed on every flash chip and SPI controller
diff --git a/server/site_tests/platform_FlashErasers/platform_FlashErasers.py b/server/site_tests/platform_FlashErasers/platform_FlashErasers.py
index 51fb47c..80b9906 100644
--- a/server/site_tests/platform_FlashErasers/platform_FlashErasers.py
+++ b/server/site_tests/platform_FlashErasers/platform_FlashErasers.py
@@ -130,8 +130,9 @@
 
             # Now program the corrupted image, this would involve erasing the
             # section of test_size bytes.
-            self.run_cmd('flashrom -w %s --diff %s --fast-verify' %
+            self.run_cmd('flashrom -w %s --flash-contents %s --noverify-all' %
                          (junk_image, bios_image))
 
             # Now restore the image.
-            self.run_cmd('flashrom -w %s --diff %s' % (bios_image, junk_image))
+            self.run_cmd('flashrom -w %s --flash-contents %s' %
+                         (bios_image, junk_image))
diff --git a/server/site_tests/platform_Flashrom/control b/server/site_tests/platform_Flashrom/control
index 95ca66a..ba99afa 100644
--- a/server/site_tests/platform_Flashrom/control
+++ b/server/site_tests/platform_Flashrom/control
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "platform_Flashrom"
 PURPOSE = "Test flashrom in chromeos-firmwareupdate."
 ATTRIBUTES = "suite:faft_flashrom"
@@ -13,6 +13,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 DEPENDENCIES = "flashrom, servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This test should be executed on one board per type and on every FSI image to
diff --git a/server/site_tests/platform_HWwatchdog/control b/server/site_tests/platform_HWwatchdog/control
index 001a75c..55e3b14 100644
--- a/server/site_tests/platform_HWwatchdog/control
+++ b/server/site_tests/platform_HWwatchdog/control
@@ -3,12 +3,13 @@
 # found in the LICENSE file.
 
 NAME = "platform_HWwatchdog"
-AUTHOR = "Chrome OS Team"
-ATTRIBUTES = "suite:kernel_per-build_regression"
+AUTHOR = "ChromeOS Team"
+ATTRIBUTES = "suite:kernel_per-build_regression, suite:pvs-kernel"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test uses the hardware watchdog interface to make sure a machine
diff --git a/server/site_tests/platform_HWwatchdog/platform_HWwatchdog.py b/server/site_tests/platform_HWwatchdog/platform_HWwatchdog.py
index 23f4b5d..46a370e 100644
--- a/server/site_tests/platform_HWwatchdog/platform_HWwatchdog.py
+++ b/server/site_tests/platform_HWwatchdog/platform_HWwatchdog.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -8,8 +9,6 @@
 
 from __future__ import print_function
 
-import logging
-
 # http://docs.python.org/2/library/errno.html
 import errno
 
diff --git a/server/site_tests/platform_ImageLoaderServer/control b/server/site_tests/platform_ImageLoaderServer/control
index ff4655c..46b344e 100644
--- a/server/site_tests/platform_ImageLoaderServer/control
+++ b/server/site_tests/platform_ImageLoaderServer/control
@@ -9,6 +9,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:bvt-perbuild"
+PY_VERSION = 3
 
 DOC = """
 This test runs ImageLoader through its various dbus and command line
diff --git a/server/site_tests/platform_ImageLoaderServer/platform_ImageLoaderServer.py b/server/site_tests/platform_ImageLoaderServer/platform_ImageLoaderServer.py
index be35cea..a65f7dd 100644
--- a/server/site_tests/platform_ImageLoaderServer/platform_ImageLoaderServer.py
+++ b/server/site_tests/platform_ImageLoaderServer/platform_ImageLoaderServer.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/platform_InitLoginPerfServer/control b/server/site_tests/platform_InitLoginPerfServer/control
index 5b88b6b..7aa228b 100644
--- a/server/site_tests/platform_InitLoginPerfServer/control
+++ b/server/site_tests/platform_InitLoginPerfServer/control
@@ -10,6 +10,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:crosbolt_perf_weekly"
+PY_VERSION = 3
 
 DOC = """
 This test clears the tpm owner, then goes through OOBE and verifies that the
diff --git a/server/site_tests/platform_InitLoginPerfServer/platform_InitLoginPerfServer.py b/server/site_tests/platform_InitLoginPerfServer/platform_InitLoginPerfServer.py
index b7855b3..353c876 100644
--- a/server/site_tests/platform_InitLoginPerfServer/platform_InitLoginPerfServer.py
+++ b/server/site_tests/platform_InitLoginPerfServer/platform_InitLoginPerfServer.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -75,10 +76,10 @@
 
     def save_perf_data(self):
         """Extract perf data from client-side test results."""
-        for bmname, bm in BENCHMARKS.iteritems():
+        for bmname, bm in BENCHMARKS.items():
             try:
                 self.perf_results[bmname].append(
-                        self.client_results[bm['stage']][bm['name']])
+                        float(self.client_results[bm['stage']][bm['name']]))
             except:
                 logging.warning('Failed to extract %s from client results',
                                 bmname)
@@ -105,7 +106,7 @@
     def display_perf_headers(self):
         """Add headers for the results table to the info log."""
         hdr = "# "
-        for bm in BENCHMARKS.itervalues():
+        for bm in BENCHMARKS.values():
             hdr += bm['display'] + ' '
         logging.info('# Results for delay = %.2f sec', self.pre_init_delay)
         logging.info(hdr)
diff --git a/server/site_tests/platform_InstallFW/control b/server/site_tests/platform_InstallFW/control
deleted file mode 100644
index 613cc1d..0000000
--- a/server/site_tests/platform_InstallFW/control
+++ /dev/null
@@ -1,64 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "platform_InstallFW"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Install"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-
-DOC = """
-This test install the BIOS/EC on a specified device.
-This is useful for testing firmware.
-Here is the command to install a given firmware, which is either a raw binary
-or a shellball:
-  test_that --args='fw_path=<PATH_TO_FW_FILE>
-                    fw_name=<NAME_OF_THE_FILE>'
-                    fw_type=<TYPE_OF_FW>
-            --board=<board name>
-            <IP addres>
-            platform_InstallFW
-or install the local shellball (/usr/sbin/chromeos-firmwareupdate) in the
-current client image:
-  test_that --args='fw_path=local
-                    fw_type=<bios/ec>'
-            --board=<board name>
-            <IP addres>
-            platform_InstallFW
-"""
-
-import os
-from autotest_lib.client.common_lib import error
-
-# Convert autoserv args to something usable.
-opts = dict([[k, v] for (k, _, v) in [x.partition('=') for x in args]])
-
-def run_installfw(machine):
-    # Verify FW type in arg.
-    if 'fw_type' not in opts:
-        raise error.TestFail('No --fw_type specified')
-    # Verify fw_type value either 'bios' or 'ec'.
-    if not (opts['fw_type'] == 'bios' or opts['fw_type'] == 'ec'):
-        raise error.TestFail('Wrong --fw_type specified.  '
-                             'Correct FW Options are bios or ec.')
-    # Verify FW path arg.
-    if 'fw_path' not in opts:
-        raise error.TestFail('No --fw_path specified')
-    # Verify fw_name.
-    if 'fw_name' not in opts:
-        if opts['fw_path'] == 'local':
-            opts['fw_name'] = None
-        elif os.path.isfile(opts['fw_path']):
-            opts['fw_name'] = os.path.basename(opts['fw_path'])
-            opts['fw_path'] = os.path.dirname(opts['fw_path'])
-        else:
-            raise error.TestFail('No --fw_name specified')
-    # Setup the client machine.
-    host = hosts.create_host(machine)
-    job.run_test("platform_InstallFW", host=host, fw_path=opts['fw_path'],
-                 fw_type=opts['fw_type'], fw_name=opts['fw_name'],
-                 disable_sysinfo=True)
-
-parallel_simple(run_installfw, machines)
diff --git a/server/site_tests/platform_InstallFW/platform_InstallFW.py b/server/site_tests/platform_InstallFW/platform_InstallFW.py
deleted file mode 100644
index 2d59ee0..0000000
--- a/server/site_tests/platform_InstallFW/platform_InstallFW.py
+++ /dev/null
@@ -1,54 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-from autotest_lib.server import test, utils
-
-class platform_InstallFW(test.test):
-    """Test to install FW on DUT"""
-    version = 1
-
-    def run_once(self, host=None, fw_type=None, fw_path=None, fw_name=None):
-        """Run test to install firmware.
-
-        @param host: host to run on
-        @param fw_type: must be either "bios" or "ec"
-        @param fw_path: path to fw binary or set to "local"
-        @param fw_name: (optional) name of binary file
-        """
-
-        if fw_path == "local":
-            fw_dst = "/usr/sbin/chromeos-firmwareupdate"
-            is_shellball = True
-        else:
-            fw_src = "%s/%s" % (fw_path, fw_name)
-            # Determine the firmware file is a shellball or a raw binary.
-            is_shellball = (utils.system_output("file %s" % fw_src).find(
-                    "shell script") != -1)
-            fw_dst = "/tmp/%s" % fw_name
-            # Copy binary from server to client.
-            host.send_file(fw_src, fw_dst)
-
-        # Install bios/ec on a client.
-        if fw_type == "bios":
-            if is_shellball:
-                host.run("sudo /bin/sh %s --mode recovery --update_main "
-                         "--noupdate_ec" % fw_dst)
-            else:
-                host.run("sudo /usr/sbin/flashrom -p host -w %s"
-                         % fw_dst)
-        if fw_type == "ec":
-            if is_shellball:
-                host.run("sudo /bin/sh %s --mode recovery --update_ec "
-                         "--noupdate_main" % fw_dst)
-            else:
-                host.run("sudo /usr/sbin/flashrom -p ec -w %s"
-                         % fw_dst)
-        # Reboot client after installing the binary.
-        host.reboot()
-        # Get the versions of BIOS and EC binaries.
-        bios_info = host.run("crossystem fwid")
-        logging.info("BIOS version info:\n %s", bios_info)
-        ec_info = host.run("sudo mosys -k ec info")
-        logging.info("EC version info:\n %s", ec_info)
diff --git a/server/site_tests/platform_InstallRecoveryImage/control b/server/site_tests/platform_InstallRecoveryImage/control
deleted file mode 100644
index f095b33..0000000
--- a/server/site_tests/platform_InstallRecoveryImage/control
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = "Chrome OS Team"
-NAME = "platform_InstallRecoveryImage"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Install"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-
-DOC = """
-This test installs a specified recovery image onto a servo-connected DUT.
-This is useful for testing the recovery process and imaging servo-connected
-Devices under Test (DUTs).
-Here is the command to install a recovery image;
-test_that --args='image=<recovery_image_path_on_host>'
-          --board=<board name>
-          <IP addres>
-          platform_InstallRecoveryImage
-"""
-
-args_dict = utils.args_to_dict(args)
-image = args_dict.get('image', None)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    # Setup the client machine.
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.fast = True
-    job.run_test("platform_InstallRecoveryImage", host=host,
-                 disable_sysinfo=True, image=image, local=False)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/platform_InstallRecoveryImage/control.local b/server/site_tests/platform_InstallRecoveryImage/control.local
deleted file mode 100644
index b93e36c..0000000
--- a/server/site_tests/platform_InstallRecoveryImage/control.local
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = "Chrome OS Team"
-NAME = "platform_InstallRecoveryImage.local"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Install"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-
-DOC = """
-This test installs a recovery image that's already on the usb stick attached to
-a servo onto a servo-connected DUT.
-This is useful for testing the recovery process and imaging servo-connected
-Devices under Test (DUTs).
-Here is the command to install a recovery image;
-test_that --board=<board name> <IP addres> platform_InstallRecoveryImage.local
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    # Setup the client machine.
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.fast = True
-    job.run_test("platform_InstallRecoveryImage", host=host,
-                 disable_sysinfo=True, image=None, local=True)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/platform_InstallRecoveryImage/platform_InstallRecoveryImage.py b/server/site_tests/platform_InstallRecoveryImage/platform_InstallRecoveryImage.py
deleted file mode 100755
index e7e688b..0000000
--- a/server/site_tests/platform_InstallRecoveryImage/platform_InstallRecoveryImage.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import time
-
-from autotest_lib.server import test
-from autotest_lib.client.common_lib import error
-
-class platform_InstallRecoveryImage(test.test):
-    """Installs a specified recovery image onto a servo-connected DUT."""
-    version = 1
-
-    _RECOVERY_INSTALL_DELAY = 540
-
-    def initialize(self, host, image=None, local=False):
-        """Setup image path.
-
-        @param host: Host object representing DUT to be re-imaged.
-        @param image_url: URL of a test image to be installed.
-        @param local: bool indicating it's a local run with an image already
-                      on the usb stick of the servo
-        """
-        self.local = local
-        if not self.local:
-            self.image = image
-            if not self.image:
-                raise error.TestFail('No image path provided, and not run '
-                                     'with .local flag to indicate image '
-                                     'already on the stick.')
-
-    def run_once(self, host):
-        """Install recovery image on |host|.
-
-        @param host: Host object representing DUT to be re-imaged.
-        """
-        if self.local:
-            # This indicates the image is already on the stick before
-            # the test starts, so the only thing required is to boot
-            # in recovery.
-            host.servo.boot_in_recovery_mode()
-        else:
-            # In this phase, the image is a provided path.
-            host.servo.install_recovery_image(self.image,
-                                              make_image_noninteractive=True)
-        logging.info('Running the recovery process on the DUT. '
-                     'Will wait up to %d seconds for recovery to '
-                     'complete.', self._RECOVERY_INSTALL_DELAY)
-        start_time = time.time()
-        # Wait for the host to come up.
-        if host.ping_wait_up(timeout=self._RECOVERY_INSTALL_DELAY):
-            logging.info('Recovery process completed successfully in '
-                         '%d seconds.', time.time() - start_time)
-        else:
-            raise error.TestFail('Host failed to come back up after '
-                                 '%d seconds.' % self._RECOVERY_INSTALL_DELAY)
-        logging.info('Removing the usb key from the DUT.')
-        host.servo.switch_usbkey('host')
diff --git a/server/site_tests/platform_InstallTestImage/control b/server/site_tests/platform_InstallTestImage/control
deleted file mode 100644
index a61b2d8..0000000
--- a/server/site_tests/platform_InstallTestImage/control
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-from autotest_lib.client.common_lib.cros import dev_server
-from autotest_lib.server import host_attributes
-
-AUTHOR = "Chrome OS Team"
-NAME = "platform_InstallTestImage"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Install"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-
-DOC = """
-This test installs a specified test image onto a servo-connected DUT.
-The principle purpose is to allow installing a known-good image onto
-a wedged unit that would otherwise have to be re-imaged manually.
-
-Here is the command to install a recovery image with a locally attached
-servo:
-    test_that -b ${BOARD} ${IP_ADDRESS} \
-        --args="image=$IMAGE_PATH" platform_InstallTestImage
-
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-image_url = args_dict.get("image", None)
-
-def run(machine):
-    # Setup the client machine.
-    host = hosts.create_host(machine, servo_args=servo_args)
-    # If we're invoked from test_that, the user can pass an
-    # optional "image" argument.  If it's omitted, we want to pass
-    # `None` to the test.
-    job.fast = True
-    job.run_test("platform_InstallTestImage", host=host, local=False,
-                 disable_sysinfo=True, image_url=image_url)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/platform_InstallTestImage/control.local b/server/site_tests/platform_InstallTestImage/control.local
deleted file mode 100644
index eb7f702..0000000
--- a/server/site_tests/platform_InstallTestImage/control.local
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-from autotest_lib.client.common_lib.cros import dev_server
-from autotest_lib.server import host_attributes
-
-AUTHOR = "Chrome OS Team"
-NAME = "platform_InstallTestImage.local"
-ATTRIBUTES = "suite:servo_lab"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Install"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-
-DOC = """
-This test installs an image that's already on the servo usb stick onto a
-servo-connected DUT.
-The principle purpose is to allow installing a known-good image onto
-a wedged unit that would otherwise have to be re-imaged manually.
-
-Here is the command to install a recovery image with a locally attached
-servo:
-    test_that -b ${BOARD} ${IP_ADDRESS} platform_InstallTestImage.local
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-image_url = args_dict.get("image", None)
-
-def run(machine):
-    # Setup the client machine.
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.fast = True
-    job.run_test("platform_InstallTestImage", host=host, local=True,
-                 disable_sysinfo=True, image_url=image_url)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/platform_InstallTestImage/platform_InstallTestImage.py b/server/site_tests/platform_InstallTestImage/platform_InstallTestImage.py
deleted file mode 100755
index de50bbd..0000000
--- a/server/site_tests/platform_InstallTestImage/platform_InstallTestImage.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.server import test
-
-class platform_InstallTestImage(test.test):
-    """Installs a specified test image onto a servo-connected DUT."""
-    version = 1
-
-    def initialize(self, host, image_url=None, local=False):
-        """ Setup image url to install the image..
-
-        @param host: Host object representing DUT to be re-imaged.
-        @param image_url: URL of a test image to be installed.
-        @param local: bool indicating it's a local run with an image already
-                      on the usb stick of the servo
-        """
-        self.local = local
-        # A 'None' value here indicates to servo below to use the image
-        # on the stick.
-        self.image_url = None
-        if not self.local:
-            self.image_url = image_url
-            if self.image_url is None:
-                image_name = host.get_cros_repair_image_name()
-                # Succeeded, so stage the build and get its URL.
-                # N.B. Failures from staging the build at this point
-                # are fatal by design.
-                _, self.image_url = host.stage_image_for_servo(image_name)
-                logging.info("Using staged image:  %s", image_url)
-
-
-    def run_once(self, host):
-        """Install image from URL |self.image_url| on |host|.
-
-        @param host Host object representing DUT to be re-imaged.
-        """
-        host.servo_install(image_url=self.image_url)
diff --git a/server/site_tests/platform_InternalDisplay/control b/server/site_tests/platform_InternalDisplay/control
deleted file mode 100644
index 31b7402..0000000
--- a/server/site_tests/platform_InternalDisplay/control
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved 2014.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "Chrome OS Team"
-NAME = "platform_InternalDisplay"
-PURPOSE = "Check if chromebook is initialized with internal dispalyt"
-CRITERIA = "This test will fail if internal display is not set"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:bvt-perbuild"
-REQUIRE_SSP = False
-
-
-DOC = """
-This test catches internal display not initialized for devices with internal display - crbug/508569.
-It's checked after 1) reboot and 2) suspend/resume.
-"""
-
-args_dict = utils.args_to_dict(args)
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test("platform_InternalDisplay", host=host,
-                 disable_sysinfo=True)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/platform_InternalDisplay/platform_InternalDisplay.py b/server/site_tests/platform_InternalDisplay/platform_InternalDisplay.py
deleted file mode 100644
index e38ed42..0000000
--- a/server/site_tests/platform_InternalDisplay/platform_InternalDisplay.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from __future__ import print_function
-
-import logging, threading
-import time
-
-from autotest_lib.server import test
-from autotest_lib.client.common_lib import error
-
-_CHROME_PATH = '/opt/google/chrome/chrome'
-_LONG_TIMEOUT = 120
-_DO_NOT_RUN_ON_TYPE = ['CHROMEBOX', 'CHROMEBIT', 'OTHER']
-_DO_NOT_RUN_ON_BOARD = ['monroe']
-_SLEEP_BEFORE_SUSPEND_SEC = 5
-
-class platform_InternalDisplay(test.test):
-    version = 1
-
-    def run_suspend(self):
-        """Suspend i.e. powerd_dbus_suspend and wait
-
-        @returns boot_id for the following resume
-
-        """
-        boot_id = self.host.get_boot_id()
-        thread = threading.Thread(target = self.host.suspend)
-        thread.start()
-        self.host.test_wait_for_sleep(_LONG_TIMEOUT)
-        return boot_id
-
-    def run_once(self,host):
-
-        self.host = host
-
-        board_type = self.host.get_board_type()
-        if board_type in _DO_NOT_RUN_ON_TYPE:
-            raise error.TestNAError('DUT is %s type. Test Skipped' %board_type)
-
-        board = self.host.get_board().split(':')[-1]
-        logging.info(board)
-        if board in _DO_NOT_RUN_ON_BOARD:
-            raise error.TestNAError(
-                'Monroe does not have internal display. Test Skipped')
-
-        self.host.reboot()
-        if self.host.has_internal_display() != 'internal_display':
-            raise error.TestFail('Internal display is missing after reboot.')
-
-        time.sleep(_SLEEP_BEFORE_SUSPEND_SEC)
-        boot_id = self.run_suspend()
-        logging.info('DUT suspended')
-        self.host.test_wait_for_resume(boot_id, _LONG_TIMEOUT)
-        logging.info('DUT resumed')
-        if self.host.has_internal_display() != 'internal_display':
-            raise error.TestFail(
-                'Internal display is missing after suspend & resume.')
-
diff --git a/server/site_tests/platform_KernelErrorPaths/control.ALL b/server/site_tests/platform_KernelErrorPaths/control.ALL
index 15277ac..0f6ef1c 100644
--- a/server/site_tests/platform_KernelErrorPaths/control.ALL
+++ b/server/site_tests/platform_KernelErrorPaths/control.ALL
@@ -3,17 +3,18 @@
 # found in the LICENSE file.
 
 NAME = "platform_KernelErrorPaths.ALL"
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
-This test uses /proc/breakme to cause the machine to have various
-serious failures (kernel panic, kernel bug, deadlock, etc.), and then
-check to make sure that these failures were properly logged and had
-appropritate consequences (reboot, logging, etc.).
+This test uses lkdtm to cause the machine to have various serious failures
+(kernel panic, kernel bug, deadlock, etc.), and then check to make sure that
+these failures were properly logged and had appropritate consequences (reboot,
+logging, etc.).
 
 Pass the kernel crashes as a comma-delimited string of crash types.
 """
diff --git a/server/site_tests/platform_KernelErrorPaths/control.BUG b/server/site_tests/platform_KernelErrorPaths/control.BUG
index 8ab8b8a..d9a8623 100644
--- a/server/site_tests/platform_KernelErrorPaths/control.BUG
+++ b/server/site_tests/platform_KernelErrorPaths/control.BUG
@@ -3,18 +3,19 @@
 # found in the LICENSE file.
 
 NAME = "platform_KernelErrorPaths.BUG"
-AUTHOR = "Chrome OS Team"
-ATTRIBUTES = "suite:kernel_per-build_regression"
+AUTHOR = "ChromeOS Team"
+ATTRIBUTES = "suite:kernel_per-build_regression, suite:pvs-kernel"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
-This test uses /proc/breakme to cause the machine to have various
-serious failures (kernel panic, kernel bug, deadlock, etc.), and then
-check to make sure that these failures were properly logged and had
-appropritate consequences (reboot, logging, etc.).
+This test uses lkdtm to cause the machine to have various serious failures
+(kernel panic, kernel bug, deadlock, etc.), and then check to make sure that
+these failures were properly logged and had appropritate consequences (reboot,
+logging, etc.).
 
 Pass the kernel crashes as a comma-delimited string of crash types.
 """
diff --git a/server/site_tests/platform_KernelErrorPaths/control.CORRUPT_STACK b/server/site_tests/platform_KernelErrorPaths/control.CORRUPT_STACK
index ef796b9..f2ed949 100644
--- a/server/site_tests/platform_KernelErrorPaths/control.CORRUPT_STACK
+++ b/server/site_tests/platform_KernelErrorPaths/control.CORRUPT_STACK
@@ -3,18 +3,19 @@
 # found in the LICENSE file.
 
 NAME = "platform_KernelErrorPaths.CORRUPT_STACK"
-AUTHOR = "Chrome OS Team"
-ATTRIBUTES = "suite:kernel_per-build_regression, suite:partners"
+AUTHOR = "ChromeOS Team"
+ATTRIBUTES = "suite:kernel_per-build_regression, suite:pvs-kernel, suite:partners"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
-This test uses /proc/breakme to cause the machine to have various
-serious failures (kernel panic, kernel bug, deadlock, etc.), and then
-check to make sure that these failures were properly logged and had
-appropritate consequences (reboot, logging, etc.).
+This test uses lkdtm to cause the machine to have various serious failures
+(kernel panic, kernel bug, deadlock, etc.), and then check to make sure that
+these failures were properly logged and had appropritate consequences (reboot,
+logging, etc.).
 
 Pass the kernel crashes as a comma-delimited string of crash types.
 """
diff --git a/server/site_tests/platform_KernelErrorPaths/control.EXCEPTION b/server/site_tests/platform_KernelErrorPaths/control.EXCEPTION
index 1a20391..41fd54b 100644
--- a/server/site_tests/platform_KernelErrorPaths/control.EXCEPTION
+++ b/server/site_tests/platform_KernelErrorPaths/control.EXCEPTION
@@ -3,18 +3,19 @@
 # found in the LICENSE file.
 
 NAME = "platform_KernelErrorPaths.EXCEPTION"
-AUTHOR = "Chrome OS Team"
-ATTRIBUTES = "suite:kernel_per-build_regression, suite:partners"
+AUTHOR = "ChromeOS Team"
+ATTRIBUTES = "suite:kernel_per-build_regression, suite:pvs-kernel, suite:partners"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
-This test uses /proc/breakme to cause the machine to have various
-serious failures (kernel panic, kernel bug, deadlock, etc.), and then
-check to make sure that these failures were properly logged and had
-appropritate consequences (reboot, logging, etc.).
+This test uses lkdtm to cause the machine to have various serious failures
+(kernel panic, kernel bug, deadlock, etc.), and then check to make sure that
+these failures were properly logged and had appropritate consequences (reboot,
+logging, etc.).
 
 Pass the kernel crashes as a comma-delimited string of crash types.
 """
diff --git a/server/site_tests/platform_KernelErrorPaths/control.HARDLOCKUP b/server/site_tests/platform_KernelErrorPaths/control.HARDLOCKUP
index 7369fca..c7eaea7 100644
--- a/server/site_tests/platform_KernelErrorPaths/control.HARDLOCKUP
+++ b/server/site_tests/platform_KernelErrorPaths/control.HARDLOCKUP
@@ -3,18 +3,19 @@
 # found in the LICENSE file.
 
 NAME = "platform_KernelErrorPaths.HARDLOCKUP"
-AUTHOR = "Chrome OS Team"
-ATTRIBUTES = "suite:kernel_per-build_regression, suite:partners"
+AUTHOR = "ChromeOS Team"
+ATTRIBUTES = "suite:kernel_per-build_regression, suite:pvs-kernel, suite:partners"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
-This test uses /proc/breakme to cause the machine to have various
-serious failures (kernel panic, kernel bug, deadlock, etc.), and then
-check to make sure that these failures were properly logged and had
-appropritate consequences (reboot, logging, etc.).
+This test uses lkdtm to cause the machine to have various serious failures
+(kernel panic, kernel bug, deadlock, etc.), and then check to make sure that
+these failures were properly logged and had appropritate consequences (reboot,
+logging, etc.).
 
 Pass the kernel crashes as a comma-delimited string of crash types.
 """
diff --git a/server/site_tests/platform_KernelErrorPaths/control.HUNG_TASK b/server/site_tests/platform_KernelErrorPaths/control.HUNG_TASK
index 62f4d69..8e019c1 100644
--- a/server/site_tests/platform_KernelErrorPaths/control.HUNG_TASK
+++ b/server/site_tests/platform_KernelErrorPaths/control.HUNG_TASK
@@ -3,18 +3,19 @@
 # found in the LICENSE file.
 
 NAME = "platform_KernelErrorPaths.HUNG_TASK"
-AUTHOR = "Chrome OS Team"
-ATTRIBUTES = "suite:kernel_per-build_regression, suite:partners"
+AUTHOR = "ChromeOS Team"
+ATTRIBUTES = "suite:kernel_per-build_regression, suite:pvs-kernel, suite:partners"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
-This test uses /proc/breakme to cause the machine to have various
-serious failures (kernel panic, kernel bug, deadlock, etc.), and then
-check to make sure that these failures were properly logged and had
-appropritate consequences (reboot, logging, etc.).
+This test uses lkdtm to cause the machine to have various serious failures
+(kernel panic, kernel bug, deadlock, etc.), and then check to make sure that
+these failures were properly logged and had appropritate consequences (reboot,
+logging, etc.).
 
 Pass the kernel crashes as a comma-delimited string of crash types.
 """
diff --git a/server/site_tests/platform_KernelErrorPaths/control.PANIC b/server/site_tests/platform_KernelErrorPaths/control.PANIC
index 9d56af6..a97ab39 100644
--- a/server/site_tests/platform_KernelErrorPaths/control.PANIC
+++ b/server/site_tests/platform_KernelErrorPaths/control.PANIC
@@ -3,18 +3,19 @@
 # found in the LICENSE file.
 
 NAME = "platform_KernelErrorPaths.PANIC"
-AUTHOR = "Chrome OS Team"
-ATTRIBUTES = "suite:kernel_per-build_regression, suite:partners"
+AUTHOR = "ChromeOS Team"
+ATTRIBUTES = "suite:kernel_per-build_regression, suite:pvs-kernel, suite:partners"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
-This test uses /proc/breakme to cause the machine to have various
-serious failures (kernel panic, kernel bug, deadlock, etc.), and then
-check to make sure that these failures were properly logged and had
-appropritate consequences (reboot, logging, etc.).
+This test uses lkdtm to cause the machine to have various serious failures
+(kernel panic, kernel bug, deadlock, etc.), and then check to make sure that
+these failures were properly logged and had appropritate consequences (reboot,
+logging, etc.).
 
 Pass the kernel crashes as a comma-delimited string of crash types.
 """
diff --git a/server/site_tests/platform_KernelErrorPaths/control.SOFTLOCKUP b/server/site_tests/platform_KernelErrorPaths/control.SOFTLOCKUP
index e1b9289..ddc52fe 100644
--- a/server/site_tests/platform_KernelErrorPaths/control.SOFTLOCKUP
+++ b/server/site_tests/platform_KernelErrorPaths/control.SOFTLOCKUP
@@ -3,18 +3,19 @@
 # found in the LICENSE file.
 
 NAME = "platform_KernelErrorPaths.SOFTLOCKUP"
-AUTHOR = "Chrome OS Team"
-ATTRIBUTES = "suite:kernel_per-build_regression, suite:partners"
+AUTHOR = "ChromeOS Team"
+ATTRIBUTES = "suite:kernel_per-build_regression, suite:pvs-kernel, suite:partners"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
-This test uses /proc/breakme to cause the machine to have various
-serious failures (kernel panic, kernel bug, deadlock, etc.), and then
-check to make sure that these failures were properly logged and had
-appropritate consequences (reboot, logging, etc.).
+This test uses lkdtm to cause the machine to have various serious failures
+(kernel panic, kernel bug, deadlock, etc.), and then check to make sure that
+these failures were properly logged and had appropritate consequences (reboot,
+logging, etc.).
 
 Pass the kernel crashes as a comma-delimited string of crash types.
 """
diff --git a/server/site_tests/platform_KernelErrorPaths/control.SPINLOCKUP b/server/site_tests/platform_KernelErrorPaths/control.SPINLOCKUP
index 8ea9638..1269e2a 100644
--- a/server/site_tests/platform_KernelErrorPaths/control.SPINLOCKUP
+++ b/server/site_tests/platform_KernelErrorPaths/control.SPINLOCKUP
@@ -3,18 +3,19 @@
 # found in the LICENSE file.
 
 NAME = "platform_KernelErrorPaths.SPINLOCKUP"
-AUTHOR = "Chrome OS Team"
-ATTRIBUTES = "suite:kernel_per-build_regression, suite:partners"
+AUTHOR = "ChromeOS Team"
+ATTRIBUTES = "suite:kernel_per-build_regression, suite:pvs-kernel, suite:partners"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
-This test uses /proc/breakme to cause the machine to have various
-serious failures (kernel panic, kernel bug, deadlock, etc.), and then
-check to make sure that these failures were properly logged and had
-appropritate consequences (reboot, logging, etc.).
+This test uses lkdtm to cause the machine to have various serious failures
+(kernel panic, kernel bug, deadlock, etc.), and then check to make sure that
+these failures were properly logged and had appropritate consequences (reboot,
+logging, etc.).
 
 Pass the kernel crashes as a comma-delimited string of crash types.
 """
diff --git a/server/site_tests/platform_KernelErrorPaths/control.SYSRQ_X b/server/site_tests/platform_KernelErrorPaths/control.SYSRQ_X
index c948dd6..60bbf5f 100644
--- a/server/site_tests/platform_KernelErrorPaths/control.SYSRQ_X
+++ b/server/site_tests/platform_KernelErrorPaths/control.SYSRQ_X
@@ -3,18 +3,19 @@
 # found in the LICENSE file.
 
 NAME = "platform_KernelErrorPaths.SYSRQ_X"
-AUTHOR = "Chrome OS Team"
-ATTRIBUTES = "suite:kernel_per-build_regression, suite:partners"
+AUTHOR = "ChromeOS Team"
+ATTRIBUTES = "suite:kernel_per-build_regression, suite:pvs-kernel, suite:partners"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
-This test uses /proc/breakme to cause the machine to have various
-serious failures (kernel panic, kernel bug, deadlock, etc.), and then
-check to make sure that these failures were properly logged and had
-appropritate consequences (reboot, logging, etc.).
+This test uses lkdtm to cause the machine to have various serious failures
+(kernel panic, kernel bug, deadlock, etc.), and then check to make sure that
+these failures were properly logged and had appropritate consequences (reboot,
+logging, etc.).
 
 Pass the kernel crashes as a comma-delimited string of crash types.
 """
diff --git a/server/site_tests/platform_KernelErrorPaths/platform_KernelErrorPaths.py b/server/site_tests/platform_KernelErrorPaths/platform_KernelErrorPaths.py
index 1bb38a1..794c5e6 100644
--- a/server/site_tests/platform_KernelErrorPaths/platform_KernelErrorPaths.py
+++ b/server/site_tests/platform_KernelErrorPaths/platform_KernelErrorPaths.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -22,7 +23,7 @@
     def _run_client_command(self, command):
         try:
             # Simply sending the trigger into lkdtm resets the target
-            # immediately, leaving files unsaved to disk and the master ssh
+            # immediately, leaving files unsaved to disk and the ssh
             # connection wedged for a long time.
             self.client.run(
                 'sh -c "sync; sleep 1; %s" >/dev/null 2>&1 &' % command)
@@ -223,20 +224,18 @@
         Test the kernel panic paths.
         """
 
-        # Figure out which kernel crash interface is available.
         interface = "/sys/kernel/debug/provoke-crash/DIRECT"
         trigger = lkdtm
-        breakme, timeout, all_cpu, text = kcrash_tuple
+        timeout, all_cpu, text = kcrash_tuple
         if not self._exists_on_client(interface):
-            interface = "/proc/breakme"
-            trigger = breakme
-            logging.info("Falling back to %s", interface)
+            raise error.TestFail('Missing interface "%s"' % interface)
 
         # Find out how many cpus we have
-        client_cpus = map(lambda x: int(x),
-            self.client.run(
-                'cat /proc/cpuinfo | grep processor | cut -f 2 -d :')
-                .stdout.split())
+        client_cpus = [
+                int(x) for x in self.client.run(
+                        'cat /proc/cpuinfo | grep processor | cut -f 2 -d :').
+                stdout.split()
+        ]
 
         # Skip any triggers that are undefined for the given interface.
         if trigger == None:
@@ -283,31 +282,33 @@
 
         # kcrash data is given by a dictionary with key lkdtm string to write
         # to /sys/kernel/debug/provoke-crash/DIRECT on the target. The dict
-        # value is a tuple containing 1) the string to write to /proc/breakme.
-        # if lkdtm is not available, 2) the timeout, and 3)whether we run
-        # the tests on all CPUs or not. Some tests take less to run than other
+        # value is a tuple containing 1) the timeout and 2) whether we run the
+        # tests on all CPUs or not. Some tests take less to run than other
         # (null pointer and panic) so it would be best if we would run them on
-        # all the CPUS as it wouldn't add that much time to the total.
-        # The final component is the crash report string to look for in the
-        # crash dump after target restarts.
+        # all the CPUs as it wouldn't add that much time to the total. The
+        # final component is the crash report string to look for in the crash
+        # dump after target restarts.
         kcrash_types = {
-            'BUG' : ('bug', 10, False, ('kernel BUG at', 'BUG: failure at')),
-            'HUNG_TASK' : ('hungtask', 300, False, 'hung_task: blocked tasks'),
-            'SOFTLOCKUP' : (None, 25, False, 'BUG: soft lockup'),
-            'HARDLOCKUP' : ('nmiwatchdog', 50, False,
-                            'Watchdog detected hard LOCKUP'),
-            'SPINLOCKUP' : (None, 25, False, ('softlockup: hung tasks',
-                                             'BUG: scheduling while atomic',
-                                             'BUG: sleeping function called')),
-            'EXCEPTION' : ('nullptr',     10, True,
-             # Logs differ slightly between different kernels and archs (v5.4,
-             # x86, ARM), but all contain 'kernel NULL pointer dereference'.
-                           'kernel NULL pointer dereference'),
-            'PANIC' : ('panic', 10, True, 'Kernel panic - not syncing:'),
-            'CORRUPT_STACK' : (None, 10, True,
-                               'stack-protector: Kernel stack is '
-                               'corrupted in:')
-            }
+                'BUG': (10, False, ('kernel BUG at', 'BUG: failure at')),
+                'HUNG_TASK': (300, False, 'hung_task: blocked tasks'),
+                'SOFTLOCKUP': (25, False, 'BUG: soft lockup'),
+                'HARDLOCKUP': (50, False, 'Watchdog detected hard LOCKUP'),
+                'SPINLOCKUP': (25, False, ('softlockup: hung tasks',
+                                           'BUG: scheduling while atomic',
+                                           'BUG: sleeping function called')),
+                'EXCEPTION': (
+                        10,
+                        True,
+                        # Logs differ slightly between different kernels and archs. Many
+                        # contain 'kernel NULL pointer dereference', but some arm64 think
+                        # a NULL pointer may be a user-space address.
+                        ('kernel NULL pointer dereference',
+                         'Unable to handle kernel access to user memory '
+                         'outside uaccess routines')),
+                'PANIC': (10, True, 'Kernel panic - not syncing:'),
+                'CORRUPT_STACK':
+                (10, True, 'stack-protector: Kernel stack is corrupted in:')
+        }
 
         bad_kcrashes = []
 
diff --git a/server/site_tests/platform_LongPressPower/control b/server/site_tests/platform_LongPressPower/control
deleted file mode 100644
index 3420c56..0000000
--- a/server/site_tests/platform_LongPressPower/control
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-#
-# Test expects to be run on a jailbroken device in developer mode.
-
-from autotest_lib.server import utils
-
-AUTHOR = "Chrome OS Team"
-NAME = "platform_LongPressPower"
-PURPOSE = "Servo based ChromeOS functional tests."
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-
-DOC = """
-This test uses servo to press and hold the device power button and uses ping
-to validate behavior.
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test("platform_LongPressPower", host=host, disable_sysinfo=True)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/platform_LongPressPower/platform_LongPressPower.py b/server/site_tests/platform_LongPressPower/platform_LongPressPower.py
deleted file mode 100755
index be782d8..0000000
--- a/server/site_tests/platform_LongPressPower/platform_LongPressPower.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import test
-
-class platform_LongPressPower(test.test):
-    """Uses servo pwr_button gpio to power the host off and back on.
-    """
-    version = 1
-
-    def run_once(self, host):
-        boot_id = host.get_boot_id()
-
-        # turn off device
-        host.servo.power_long_press()
-
-        # ensure host is now off
-        if host.is_up():
-            raise error.TestError('DUT still up after long press power')
-
-        # ensure host boots
-        host.servo.boot_devmode()
-        host.test_wait_for_boot(boot_id)
diff --git a/server/site_tests/platform_MTBF/config_schema.yaml b/server/site_tests/platform_MTBF/config_schema.yaml
new file mode 100644
index 0000000..414910c
--- /dev/null
+++ b/server/site_tests/platform_MTBF/config_schema.yaml
@@ -0,0 +1,82 @@
+"$schema": http://json-schema.org/draft-07/schema#
+type: object
+properties:
+  version:
+    type: integer
+    minimum: 1
+  const:
+    type: array
+    items:
+      type: object
+      properties:
+        name:
+          type: string
+        value:
+          type: string
+      required:
+      - name
+      - value
+  tests:
+    type: array
+    items:
+      type: object
+      properties:
+        name:
+          type: string
+        tool:
+          type: string
+          pattern: "^(tast|tauto)$"
+        test_expr:
+          type: string
+        args:
+          type: array
+          items:
+            type: string
+        attributes:
+          type: array
+          items:
+            type: string
+        deps:
+          type: array
+          items:
+            type: string
+      required:
+      - name
+      - tool
+      - test_expr
+    minItems: 1
+    uniqueItems: true
+  suites:
+    type: array
+    items:
+      type: object
+      properties:
+        name:
+          type: string
+        args_file:
+          type: string
+        args:
+          type: array
+          items:
+            type: string
+        tests:
+          type: array
+          items:
+            type: object
+            properties:
+              test:
+                type: string
+              repeats:
+                type: integer
+            required:
+            - test
+            - repeats
+      required:
+      - name
+      - tests
+    minItems: 1
+    uniqueItems: true
+required:
+- version
+- tests
+- suites
diff --git a/server/site_tests/platform_MTBF/control.fetch_cloud_config b/server/site_tests/platform_MTBF/control.fetch_cloud_config
new file mode 100644
index 0000000..e177f96
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.fetch_cloud_config
@@ -0,0 +1,31 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TEST IS DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
+AUTHOR = "abergman, chromeos-engprod-platform-syd"
+NAME = "platform_PerfCujFetchCloudConfig"
+ATTRIBUTES = "suite:performance_cuj, suite:performance_cuj_quick, suite:performance_cuj_experimental"
+TIME = "SHORT"
+TEST_CATEGORY = "Stress"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+PRIORITY = 6000
+
+DOC = """
+This test file doesn't run any actual stress, but rather loads a fresh copy of
+the performance CUJ config from the cloud.
+
+This test has higher priority than any test of the suite, to ensure it's running
+first and always overrides cached configuration with the cloud one.
+"""
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('platform_FetchCloudConfig', host=host, disable_sysinfo=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_CRXPRT2_1 b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_CRXPRT2_1
new file mode 100644
index 0000000..07126df
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_CRXPRT2_1
@@ -0,0 +1,54 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TEST IS DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'benchmark_CRXPRT2_1'
+ATTRIBUTES = 'suite:performance_cuj_benchmarks'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4996
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 2
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.utils import labellib
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test_version'] = 1
+  labels['test'] = 'benchmark.CRXPRT2'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+
+def run(machine):
+  host=hosts.create_host(machine)
+  report_host_info(host)
+  job.run_test('tast',
+              host=host,
+              test_exprs=['benchmark.CRXPRT2'],
+              ignore_test_failures=False,
+              max_run_sec=3600,
+              command_args=args,
+              vars_gs_path='config/perf_cuj/perf_cuj.config')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_CRXPRT2_2 b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_CRXPRT2_2
new file mode 100644
index 0000000..003749b
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_CRXPRT2_2
@@ -0,0 +1,54 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TEST IS DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'benchmark_CRXPRT2_2'
+ATTRIBUTES = 'suite:performance_cuj_benchmarks'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4995
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 2
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.utils import labellib
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test_version'] = 1
+  labels['test'] = 'benchmark.CRXPRT2'
+  labels['test_iteration'] = '2'
+  utils.write_keyval(job.resultdir, labels)
+
+def run(machine):
+  host=hosts.create_host(machine)
+  report_host_info(host)
+  job.run_test('tast',
+              host=host,
+              test_exprs=['benchmark.CRXPRT2'],
+              ignore_test_failures=False,
+              max_run_sec=3600,
+              command_args=args,
+              vars_gs_path='config/perf_cuj/perf_cuj.config')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_GFXBenchPublicAndroidApp_1 b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_GFXBenchPublicAndroidApp_1
new file mode 100644
index 0000000..5f8f40e
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_GFXBenchPublicAndroidApp_1
@@ -0,0 +1,54 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TEST IS DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'benchmark_GFXBenchPublicAndroidApp_1'
+ATTRIBUTES = 'suite:performance_cuj_benchmarks'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4995
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 2
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.utils import labellib
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test_version'] = 1
+  labels['test'] = 'benchmark.GFXBenchPublicAndroidApp'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+
+def run(machine):
+  host=hosts.create_host(machine)
+  report_host_info(host)
+  job.run_test('tast',
+              host=host,
+              test_exprs=['benchmark.GFXBenchPublicAndroidApp'],
+              ignore_test_failures=False,
+              max_run_sec=3600,
+              command_args=args,
+              vars_gs_path='config/perf_cuj/perf_cuj.config')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_GFXBenchPublicAndroidApp_2 b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_GFXBenchPublicAndroidApp_2
new file mode 100644
index 0000000..cff2ccf
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_GFXBenchPublicAndroidApp_2
@@ -0,0 +1,54 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TEST IS DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'benchmark_GFXBenchPublicAndroidApp_2'
+ATTRIBUTES = 'suite:performance_cuj_benchmarks'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4994
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 2
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.utils import labellib
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test_version'] = 1
+  labels['test'] = 'benchmark.GFXBenchPublicAndroidApp'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+
+def run(machine):
+  host=hosts.create_host(machine)
+  report_host_info(host)
+  job.run_test('tast',
+              host=host,
+              test_exprs=['benchmark.GFXBenchPublicAndroidApp'],
+              ignore_test_failures=False,
+              max_run_sec=3600,
+              command_args=args,
+              vars_gs_path='config/perf_cuj/perf_cuj.config')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_GeekbenchPublicAndroidApp_1 b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_GeekbenchPublicAndroidApp_1
new file mode 100644
index 0000000..e64d50c
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_GeekbenchPublicAndroidApp_1
@@ -0,0 +1,54 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TEST IS DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'benchmark_GeekbenchPublicAndroidApp_1'
+ATTRIBUTES = 'suite:performance_cuj_benchmarks'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 5000
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 2
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.utils import labellib
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test_version'] = 1
+  labels['test'] = 'benchmark.GeekbenchPublicAndroidApp'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+
+def run(machine):
+  host=hosts.create_host(machine)
+  report_host_info(host)
+  job.run_test('tast',
+              host=host,
+              test_exprs=['benchmark.GeekbenchPublicAndroidApp'],
+              ignore_test_failures=False,
+              max_run_sec=3600,
+              command_args=args,
+              vars_gs_path='config/perf_cuj/perf_cuj.config')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_GeekbenchPublicAndroidApp_2 b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_GeekbenchPublicAndroidApp_2
new file mode 100644
index 0000000..ddc22d8
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_GeekbenchPublicAndroidApp_2
@@ -0,0 +1,54 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TEST IS DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'benchmark_GeekbenchPublicAndroidApp_2'
+ATTRIBUTES = 'suite:performance_cuj_benchmarks'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4999
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 2
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.utils import labellib
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test_version'] = 1
+  labels['test'] = 'benchmark.GeekbenchPublicAndroidApp'
+  labels['test_iteration'] = '2'
+  utils.write_keyval(job.resultdir, labels)
+
+def run(machine):
+  host=hosts.create_host(machine)
+  report_host_info(host)
+  job.run_test('tast',
+              host=host,
+              test_exprs=['benchmark.GeekbenchPublicAndroidApp'],
+              ignore_test_failures=False,
+              max_run_sec=3600,
+              command_args=args,
+              vars_gs_path='config/perf_cuj/perf_cuj.config')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_LMbench_1 b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_LMbench_1
new file mode 100644
index 0000000..c5f6786
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_LMbench_1
@@ -0,0 +1,54 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TEST IS DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'benchmark_LMbench_1'
+ATTRIBUTES = 'suite:performance_cuj_benchmarks'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4998
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 2
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.utils import labellib
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test_version'] = 1
+  labels['test'] = 'benchmark.LMbench'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+
+def run(machine):
+  host=hosts.create_host(machine)
+  report_host_info(host)
+  job.run_test('tast',
+              host=host,
+              test_exprs=['benchmark.LMbench'],
+              ignore_test_failures=False,
+              max_run_sec=3600,
+              command_args=args,
+              vars_gs_path='config/perf_cuj/perf_cuj.config')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_LMbench_2 b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_LMbench_2
new file mode 100644
index 0000000..7e8e57d
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_LMbench_2
@@ -0,0 +1,54 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TEST IS DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'benchmark_LMbench_2'
+ATTRIBUTES = 'suite:performance_cuj_benchmarks'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4997
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 2
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.utils import labellib
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test_version'] = 1
+  labels['test'] = 'benchmark.LMbench'
+  labels['test_iteration'] = '2'
+  utils.write_keyval(job.resultdir, labels)
+
+def run(machine):
+  host=hosts.create_host(machine)
+  report_host_info(host)
+  job.run_test('tast',
+              host=host,
+              test_exprs=['benchmark.LMbench'],
+              ignore_test_failures=False,
+              max_run_sec=3600,
+              command_args=args,
+              vars_gs_path='config/perf_cuj/perf_cuj.config')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_PCMarkWorkAndroidApp_1 b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_PCMarkWorkAndroidApp_1
new file mode 100644
index 0000000..441dcc5
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_PCMarkWorkAndroidApp_1
@@ -0,0 +1,54 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TEST IS DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'benchmark_PCMarkWorkAndroidApp_1'
+ATTRIBUTES = 'suite:performance_cuj_benchmarks'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4993
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 2
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.utils import labellib
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test_version'] = 1
+  labels['test'] = 'benchmark.PCMarkWorkAndroidApp'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+
+def run(machine):
+  host=hosts.create_host(machine)
+  report_host_info(host)
+  job.run_test('tast',
+              host=host,
+              test_exprs=['benchmark.PCMarkWorkAndroidApp'],
+              ignore_test_failures=False,
+              max_run_sec=3600,
+              command_args=args,
+              vars_gs_path='config/perf_cuj/perf_cuj.config')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_PCMarkWorkAndroidApp_2 b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_PCMarkWorkAndroidApp_2
new file mode 100644
index 0000000..b7b110d
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_benchmark_PCMarkWorkAndroidApp_2
@@ -0,0 +1,54 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TEST IS DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'benchmark_PCMarkWorkAndroidApp_2'
+ATTRIBUTES = 'suite:performance_cuj_benchmarks'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4992
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 2
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.utils import labellib
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test_version'] = 1
+  labels['test'] = 'benchmark.PCMarkWorkAndroidApp'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+
+def run(machine):
+  host=hosts.create_host(machine)
+  report_host_info(host)
+  job.run_test('tast',
+              host=host,
+              test_exprs=['benchmark.PCMarkWorkAndroidApp'],
+              ignore_test_failures=False,
+              max_run_sec=3600,
+              command_args=args,
+              vars_gs_path='config/perf_cuj/perf_cuj.config')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_ui_CRXPRT2_1 b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_ui_CRXPRT2_1
new file mode 100644
index 0000000..c0e32d1
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_ui_CRXPRT2_1
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_CRXPRT2_1'
+ATTRIBUTES = 'suite:performance_cuj_benchmarks'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4996
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.CRXPRT2'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.CRXPRT2'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_ui_CRXPRT2_2 b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_ui_CRXPRT2_2
new file mode 100644
index 0000000..2043638
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_ui_CRXPRT2_2
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_CRXPRT2_2'
+ATTRIBUTES = 'suite:performance_cuj_benchmarks'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4995
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.CRXPRT2'
+  labels['test_iteration'] = '2'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.CRXPRT2'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_ui_GeekbenchPublicAndroidApp_1 b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_ui_GeekbenchPublicAndroidApp_1
new file mode 100644
index 0000000..b4415b4
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_ui_GeekbenchPublicAndroidApp_1
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GeekbenchPublicAndroidApp_1'
+ATTRIBUTES = 'suite:performance_cuj_benchmarks'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 5000
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GeekbenchPublicAndroidApp'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GeekbenchPublicAndroidApp'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_ui_GeekbenchPublicAndroidApp_2 b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_ui_GeekbenchPublicAndroidApp_2
new file mode 100644
index 0000000..9af9772
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_ui_GeekbenchPublicAndroidApp_2
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GeekbenchPublicAndroidApp_2'
+ATTRIBUTES = 'suite:performance_cuj_benchmarks'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4999
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GeekbenchPublicAndroidApp'
+  labels['test_iteration'] = '2'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GeekbenchPublicAndroidApp'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_ui_LMbench_1 b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_ui_LMbench_1
new file mode 100644
index 0000000..bbef35e
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_ui_LMbench_1
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_LMbench_1'
+ATTRIBUTES = 'suite:performance_cuj_benchmarks'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4998
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.LMbench'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.LMbench'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_ui_LMbench_2 b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_ui_LMbench_2
new file mode 100644
index 0000000..2bae57a
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_benchmarks_ui_LMbench_2
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_LMbench_2'
+ATTRIBUTES = 'suite:performance_cuj_benchmarks'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4997
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.LMbench'
+  labels['test_iteration'] = '2'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.LMbench'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_EverydayMultiTaskingCUJ_basic_ytmusic b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_EverydayMultiTaskingCUJ_basic_ytmusic
new file mode 100644
index 0000000..e613661
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_EverydayMultiTaskingCUJ_basic_ytmusic
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_EverydayMultiTaskingCUJ_basic_ytmusic'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4984
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.EverydayMultiTaskingCUJ.basic_ytmusic'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.EverydayMultiTaskingCUJ.basic_ytmusic'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_EverydayMultiTaskingCUJ_plus_ytmusic b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_EverydayMultiTaskingCUJ_plus_ytmusic
new file mode 100644
index 0000000..7b8ef13
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_EverydayMultiTaskingCUJ_plus_ytmusic
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_EverydayMultiTaskingCUJ_plus_ytmusic'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4983
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.EverydayMultiTaskingCUJ.plus_ytmusic'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.EverydayMultiTaskingCUJ.plus_ytmusic'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_ExtendedDisplayCUJ_plus_video_youtube_web b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_ExtendedDisplayCUJ_plus_video_youtube_web
new file mode 100644
index 0000000..efd8deb
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_ExtendedDisplayCUJ_plus_video_youtube_web
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_ExtendedDisplayCUJ_plus_video_youtube_web'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4981
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'external_display, plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.ExtendedDisplayCUJ.plus_video_youtube_web'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.ExtendedDisplayCUJ.plus_video_youtube_web'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_ExtendedDisplayCUJ_premium_meet_large b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_ExtendedDisplayCUJ_premium_meet_large
new file mode 100644
index 0000000..4a5ac79
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_ExtendedDisplayCUJ_premium_meet_large
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_ExtendedDisplayCUJ_premium_meet_large'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4982
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'external_display, premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.ExtendedDisplayCUJ.premium_meet_large'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.ExtendedDisplayCUJ.premium_meet_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_GoogleDocsWebCUJ_basic b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_GoogleDocsWebCUJ_basic
new file mode 100644
index 0000000..69cf486
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_GoogleDocsWebCUJ_basic
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleDocsWebCUJ_basic'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4973
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleDocsWebCUJ.basic'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleDocsWebCUJ.basic'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_GoogleDocsWebCUJ_premium b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_GoogleDocsWebCUJ_premium
new file mode 100644
index 0000000..a5aae90
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_GoogleDocsWebCUJ_premium
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleDocsWebCUJ_premium'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4972
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleDocsWebCUJ.premium'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleDocsWebCUJ.premium'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_GoogleMeetCUJ_basic_class b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_GoogleMeetCUJ_basic_class
new file mode 100644
index 0000000..311ac6b
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_GoogleMeetCUJ_basic_class
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_class'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4997
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_class'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_class'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_GoogleMeetCUJ_basic_large b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_GoogleMeetCUJ_basic_large
new file mode 100644
index 0000000..dd4c4fc
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_GoogleMeetCUJ_basic_large
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_large'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4998
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_large'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_GoogleMeetCUJ_basic_small b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_GoogleMeetCUJ_basic_small
new file mode 100644
index 0000000..0bfbc6e
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_GoogleMeetCUJ_basic_small
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_small'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4999
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_small'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_small'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_GoogleMeetCUJ_basic_two b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_GoogleMeetCUJ_basic_two
new file mode 100644
index 0000000..cd8cfbb
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_GoogleMeetCUJ_basic_two
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_two'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 5000
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_two'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_two'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_GoogleMeetCUJ_plus_class b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_GoogleMeetCUJ_plus_class
new file mode 100644
index 0000000..77cdc6f
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_GoogleMeetCUJ_plus_class
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_plus_class'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4995
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.plus_class'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.plus_class'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_GoogleMeetCUJ_plus_large b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_GoogleMeetCUJ_plus_large
new file mode 100644
index 0000000..1b52d2b
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_GoogleMeetCUJ_plus_large
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_plus_large'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4996
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.plus_large'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.plus_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_GoogleMeetCUJ_premium_large b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_GoogleMeetCUJ_premium_large
new file mode 100644
index 0000000..283da8a
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_GoogleMeetCUJ_premium_large
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_premium_large'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4994
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.premium_large'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.premium_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_MicrosoftOfficeWebCUJ_plus b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_MicrosoftOfficeWebCUJ_plus
new file mode 100644
index 0000000..dc1db63
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_MicrosoftOfficeWebCUJ_plus
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_MicrosoftOfficeWebCUJ_plus'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4971
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.MicrosoftOfficeWebCUJ.plus'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.MicrosoftOfficeWebCUJ.plus'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_MicrosoftOfficeWebCUJ_premium b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_MicrosoftOfficeWebCUJ_premium
new file mode 100644
index 0000000..5f1f6ea
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_MicrosoftOfficeWebCUJ_premium
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_MicrosoftOfficeWebCUJ_premium'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4970
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.MicrosoftOfficeWebCUJ.premium'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.MicrosoftOfficeWebCUJ.premium'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_QuickCheckCUJ2_basic_unlock b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_QuickCheckCUJ2_basic_unlock
new file mode 100644
index 0000000..2d62e66
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_QuickCheckCUJ2_basic_unlock
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_QuickCheckCUJ2_basic_unlock'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4990
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.QuickCheckCUJ2.basic_unlock'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.QuickCheckCUJ2.basic_unlock'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_QuickCheckCUJ2_basic_wakeup b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_QuickCheckCUJ2_basic_wakeup
new file mode 100644
index 0000000..f088db3
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_QuickCheckCUJ2_basic_wakeup
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_QuickCheckCUJ2_basic_wakeup'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4989
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.QuickCheckCUJ2.basic_wakeup'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.QuickCheckCUJ2.basic_wakeup'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_TabSwitchCUJ2_basic_noproxy b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_TabSwitchCUJ2_basic_noproxy
new file mode 100644
index 0000000..089dae7
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_TabSwitchCUJ2_basic_noproxy
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_basic_noproxy'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4993
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.basic_noproxy'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.basic_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_TabSwitchCUJ2_plus_noproxy b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_TabSwitchCUJ2_plus_noproxy
new file mode 100644
index 0000000..e3755e4
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_TabSwitchCUJ2_plus_noproxy
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_plus_noproxy'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4992
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.plus_noproxy'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.plus_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_TabSwitchCUJ2_premium_noproxy b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_TabSwitchCUJ2_premium_noproxy
new file mode 100644
index 0000000..79787dc
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_TabSwitchCUJ2_premium_noproxy
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_premium_noproxy'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4991
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.premium_noproxy'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.premium_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_VideoCUJ2_basic_youtube_app b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_VideoCUJ2_basic_youtube_app
new file mode 100644
index 0000000..f48c398
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_VideoCUJ2_basic_youtube_app
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_basic_youtube_app'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4986
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.basic_youtube_app'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.basic_youtube_app'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_VideoCUJ2_basic_youtube_web b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_VideoCUJ2_basic_youtube_web
new file mode 100644
index 0000000..4c56d19
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_VideoCUJ2_basic_youtube_web
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_basic_youtube_web'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4988
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.basic_youtube_web'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.basic_youtube_web'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_VideoCUJ2_premium_youtube_app b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_VideoCUJ2_premium_youtube_app
new file mode 100644
index 0000000..d0dec34
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_VideoCUJ2_premium_youtube_app
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_premium_youtube_app'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4985
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.premium_youtube_app'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.premium_youtube_app'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_VideoCUJ2_premium_youtube_web b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_VideoCUJ2_premium_youtube_web
new file mode 100644
index 0000000..e58ee23
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_VideoCUJ2_premium_youtube_web
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_premium_youtube_web'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4987
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.premium_youtube_web'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.premium_youtube_web'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_ZoomConfCUJ_basic_class b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_ZoomConfCUJ_basic_class
new file mode 100644
index 0000000..40dcae2
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_ZoomConfCUJ_basic_class
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_ZoomConfCUJ_basic_class'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4977
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.ZoomConfCUJ.basic_class'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.ZoomConfCUJ.basic_class'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_ZoomConfCUJ_basic_large b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_ZoomConfCUJ_basic_large
new file mode 100644
index 0000000..bc39710
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_ZoomConfCUJ_basic_large
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_ZoomConfCUJ_basic_large'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4978
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.ZoomConfCUJ.basic_large'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.ZoomConfCUJ.basic_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_ZoomConfCUJ_basic_small b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_ZoomConfCUJ_basic_small
new file mode 100644
index 0000000..587f18f
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_ZoomConfCUJ_basic_small
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_ZoomConfCUJ_basic_small'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4979
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.ZoomConfCUJ.basic_small'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.ZoomConfCUJ.basic_small'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_ZoomConfCUJ_basic_two b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_ZoomConfCUJ_basic_two
new file mode 100644
index 0000000..ef63f3d
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_ZoomConfCUJ_basic_two
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_ZoomConfCUJ_basic_two'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4980
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.ZoomConfCUJ.basic_two'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.ZoomConfCUJ.basic_two'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_ZoomConfCUJ_plus_class b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_ZoomConfCUJ_plus_class
new file mode 100644
index 0000000..194fe88
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_ZoomConfCUJ_plus_class
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_ZoomConfCUJ_plus_class'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4975
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.ZoomConfCUJ.plus_class'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.ZoomConfCUJ.plus_class'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_ZoomConfCUJ_plus_large b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_ZoomConfCUJ_plus_large
new file mode 100644
index 0000000..b83dbc6
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_ZoomConfCUJ_plus_large
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_ZoomConfCUJ_plus_large'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4976
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.ZoomConfCUJ.plus_large'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.ZoomConfCUJ.plus_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_ZoomConfCUJ_premium_large b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_ZoomConfCUJ_premium_large
new file mode 100644
index 0000000..2a666c2
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_experimental_ui_ZoomConfCUJ_premium_large
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_ZoomConfCUJ_premium_large'
+ATTRIBUTES = 'suite:performance_cuj_experimental'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4974
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.ZoomConfCUJ.premium_large'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.ZoomConfCUJ.premium_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_quick_ui_EverydayMultiTaskingCUJ_basic_ytmusic b/server/site_tests/platform_MTBF/control.performance_cuj_quick_ui_EverydayMultiTaskingCUJ_basic_ytmusic
new file mode 100644
index 0000000..ed61a3e
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_quick_ui_EverydayMultiTaskingCUJ_basic_ytmusic
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_EverydayMultiTaskingCUJ_basic_ytmusic'
+ATTRIBUTES = 'suite:performance_cuj_quick'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4993
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.EverydayMultiTaskingCUJ.basic_ytmusic'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.EverydayMultiTaskingCUJ.basic_ytmusic'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_quick_ui_ExtendedDisplayCUJ_premium_meet_large b/server/site_tests/platform_MTBF/control.performance_cuj_quick_ui_ExtendedDisplayCUJ_premium_meet_large
new file mode 100644
index 0000000..7cbf968
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_quick_ui_ExtendedDisplayCUJ_premium_meet_large
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_ExtendedDisplayCUJ_premium_meet_large'
+ATTRIBUTES = 'suite:performance_cuj_quick'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4992
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'external_display, premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.ExtendedDisplayCUJ.premium_meet_large'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.ExtendedDisplayCUJ.premium_meet_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_quick_ui_GoogleMeetCUJ_basic_two b/server/site_tests/platform_MTBF/control.performance_cuj_quick_ui_GoogleMeetCUJ_basic_two
new file mode 100644
index 0000000..6e80a67
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_quick_ui_GoogleMeetCUJ_basic_two
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_two'
+ATTRIBUTES = 'suite:performance_cuj_quick'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 5000
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_two'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_two'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_quick_ui_GoogleMeetCUJ_plus_large b/server/site_tests/platform_MTBF/control.performance_cuj_quick_ui_GoogleMeetCUJ_plus_large
new file mode 100644
index 0000000..4a885e0
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_quick_ui_GoogleMeetCUJ_plus_large
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_plus_large'
+ATTRIBUTES = 'suite:performance_cuj_quick'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4999
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.plus_large'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.plus_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_quick_ui_GoogleMeetCUJ_premium_large b/server/site_tests/platform_MTBF/control.performance_cuj_quick_ui_GoogleMeetCUJ_premium_large
new file mode 100644
index 0000000..bf0599d
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_quick_ui_GoogleMeetCUJ_premium_large
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_premium_large'
+ATTRIBUTES = 'suite:performance_cuj_quick'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4998
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.premium_large'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.premium_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_quick_ui_QuickCheckCUJ2_basic_unlock b/server/site_tests/platform_MTBF/control.performance_cuj_quick_ui_QuickCheckCUJ2_basic_unlock
new file mode 100644
index 0000000..d160ee8
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_quick_ui_QuickCheckCUJ2_basic_unlock
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_QuickCheckCUJ2_basic_unlock'
+ATTRIBUTES = 'suite:performance_cuj_quick'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4996
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.QuickCheckCUJ2.basic_unlock'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.QuickCheckCUJ2.basic_unlock'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_quick_ui_QuickCheckCUJ2_basic_wakeup b/server/site_tests/platform_MTBF/control.performance_cuj_quick_ui_QuickCheckCUJ2_basic_wakeup
new file mode 100644
index 0000000..cc187c3
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_quick_ui_QuickCheckCUJ2_basic_wakeup
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_QuickCheckCUJ2_basic_wakeup'
+ATTRIBUTES = 'suite:performance_cuj_quick'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4995
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.QuickCheckCUJ2.basic_wakeup'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.QuickCheckCUJ2.basic_wakeup'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_quick_ui_TabSwitchCUJ2_basic_noproxy b/server/site_tests/platform_MTBF/control.performance_cuj_quick_ui_TabSwitchCUJ2_basic_noproxy
new file mode 100644
index 0000000..47186b1
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_quick_ui_TabSwitchCUJ2_basic_noproxy
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_basic_noproxy'
+ATTRIBUTES = 'suite:performance_cuj_quick'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4997
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.basic_noproxy'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.basic_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_quick_ui_VideoCUJ2_basic_youtube_web b/server/site_tests/platform_MTBF/control.performance_cuj_quick_ui_VideoCUJ2_basic_youtube_web
new file mode 100644
index 0000000..552bb65
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_quick_ui_VideoCUJ2_basic_youtube_web
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_basic_youtube_web'
+ATTRIBUTES = 'suite:performance_cuj_quick'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4994
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.basic_youtube_web'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.basic_youtube_web'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_01 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_01
new file mode 100644
index 0000000..726fe84
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_01
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_EverydayMultiTaskingCUJ_basic_ytmusic_01'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4840
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.EverydayMultiTaskingCUJ.basic_ytmusic'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.EverydayMultiTaskingCUJ.basic_ytmusic'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_02 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_02
new file mode 100644
index 0000000..4c1aa06
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_02
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_EverydayMultiTaskingCUJ_basic_ytmusic_02'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4839
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.EverydayMultiTaskingCUJ.basic_ytmusic'
+  labels['test_iteration'] = '2'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.EverydayMultiTaskingCUJ.basic_ytmusic'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_03 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_03
new file mode 100644
index 0000000..23da8f3
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_03
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_EverydayMultiTaskingCUJ_basic_ytmusic_03'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4838
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.EverydayMultiTaskingCUJ.basic_ytmusic'
+  labels['test_iteration'] = '3'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.EverydayMultiTaskingCUJ.basic_ytmusic'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_04 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_04
new file mode 100644
index 0000000..4a778f0
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_04
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_EverydayMultiTaskingCUJ_basic_ytmusic_04'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4837
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.EverydayMultiTaskingCUJ.basic_ytmusic'
+  labels['test_iteration'] = '4'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.EverydayMultiTaskingCUJ.basic_ytmusic'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_05 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_05
new file mode 100644
index 0000000..1e1143d
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_05
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_EverydayMultiTaskingCUJ_basic_ytmusic_05'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4836
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.EverydayMultiTaskingCUJ.basic_ytmusic'
+  labels['test_iteration'] = '5'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.EverydayMultiTaskingCUJ.basic_ytmusic'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_06 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_06
new file mode 100644
index 0000000..4ba278f
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_06
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_EverydayMultiTaskingCUJ_basic_ytmusic_06'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4835
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.EverydayMultiTaskingCUJ.basic_ytmusic'
+  labels['test_iteration'] = '6'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.EverydayMultiTaskingCUJ.basic_ytmusic'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_07 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_07
new file mode 100644
index 0000000..1cdb541
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_07
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_EverydayMultiTaskingCUJ_basic_ytmusic_07'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4834
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.EverydayMultiTaskingCUJ.basic_ytmusic'
+  labels['test_iteration'] = '7'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.EverydayMultiTaskingCUJ.basic_ytmusic'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_08 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_08
new file mode 100644
index 0000000..d980afd
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_08
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_EverydayMultiTaskingCUJ_basic_ytmusic_08'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4833
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.EverydayMultiTaskingCUJ.basic_ytmusic'
+  labels['test_iteration'] = '8'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.EverydayMultiTaskingCUJ.basic_ytmusic'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_09 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_09
new file mode 100644
index 0000000..2ce66ab
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_09
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_EverydayMultiTaskingCUJ_basic_ytmusic_09'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4832
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.EverydayMultiTaskingCUJ.basic_ytmusic'
+  labels['test_iteration'] = '9'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.EverydayMultiTaskingCUJ.basic_ytmusic'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_10 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_10
new file mode 100644
index 0000000..12754c9
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_basic_ytmusic_10
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_EverydayMultiTaskingCUJ_basic_ytmusic_10'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4831
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.EverydayMultiTaskingCUJ.basic_ytmusic'
+  labels['test_iteration'] = '10'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.EverydayMultiTaskingCUJ.basic_ytmusic'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_01 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_01
new file mode 100644
index 0000000..007d472
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_01
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_EverydayMultiTaskingCUJ_plus_ytmusic_01'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4830
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.EverydayMultiTaskingCUJ.plus_ytmusic'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.EverydayMultiTaskingCUJ.plus_ytmusic'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_02 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_02
new file mode 100644
index 0000000..0430a25
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_02
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_EverydayMultiTaskingCUJ_plus_ytmusic_02'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4829
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.EverydayMultiTaskingCUJ.plus_ytmusic'
+  labels['test_iteration'] = '2'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.EverydayMultiTaskingCUJ.plus_ytmusic'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_03 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_03
new file mode 100644
index 0000000..30be68b
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_03
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_EverydayMultiTaskingCUJ_plus_ytmusic_03'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4828
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.EverydayMultiTaskingCUJ.plus_ytmusic'
+  labels['test_iteration'] = '3'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.EverydayMultiTaskingCUJ.plus_ytmusic'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_04 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_04
new file mode 100644
index 0000000..2d40dbf
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_04
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_EverydayMultiTaskingCUJ_plus_ytmusic_04'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4827
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.EverydayMultiTaskingCUJ.plus_ytmusic'
+  labels['test_iteration'] = '4'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.EverydayMultiTaskingCUJ.plus_ytmusic'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_05 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_05
new file mode 100644
index 0000000..eed073a
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_05
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_EverydayMultiTaskingCUJ_plus_ytmusic_05'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4826
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.EverydayMultiTaskingCUJ.plus_ytmusic'
+  labels['test_iteration'] = '5'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.EverydayMultiTaskingCUJ.plus_ytmusic'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_06 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_06
new file mode 100644
index 0000000..fc0d498
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_06
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_EverydayMultiTaskingCUJ_plus_ytmusic_06'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4825
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.EverydayMultiTaskingCUJ.plus_ytmusic'
+  labels['test_iteration'] = '6'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.EverydayMultiTaskingCUJ.plus_ytmusic'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_07 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_07
new file mode 100644
index 0000000..8595cb4
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_07
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_EverydayMultiTaskingCUJ_plus_ytmusic_07'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4824
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.EverydayMultiTaskingCUJ.plus_ytmusic'
+  labels['test_iteration'] = '7'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.EverydayMultiTaskingCUJ.plus_ytmusic'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_08 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_08
new file mode 100644
index 0000000..3b82d38
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_08
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_EverydayMultiTaskingCUJ_plus_ytmusic_08'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4823
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.EverydayMultiTaskingCUJ.plus_ytmusic'
+  labels['test_iteration'] = '8'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.EverydayMultiTaskingCUJ.plus_ytmusic'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_09 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_09
new file mode 100644
index 0000000..230374e
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_09
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_EverydayMultiTaskingCUJ_plus_ytmusic_09'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4822
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.EverydayMultiTaskingCUJ.plus_ytmusic'
+  labels['test_iteration'] = '9'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.EverydayMultiTaskingCUJ.plus_ytmusic'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_10 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_10
new file mode 100644
index 0000000..d29d413
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_EverydayMultiTaskingCUJ_plus_ytmusic_10
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_EverydayMultiTaskingCUJ_plus_ytmusic_10'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4821
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.EverydayMultiTaskingCUJ.plus_ytmusic'
+  labels['test_iteration'] = '10'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.EverydayMultiTaskingCUJ.plus_ytmusic'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_01 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_01
new file mode 100644
index 0000000..a5b2fdb
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_01
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_ExtendedDisplayCUJ_premium_meet_large_01'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4820
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'external_display, premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.ExtendedDisplayCUJ.premium_meet_large'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.ExtendedDisplayCUJ.premium_meet_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_02 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_02
new file mode 100644
index 0000000..f2675f6
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_02
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_ExtendedDisplayCUJ_premium_meet_large_02'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4819
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'external_display, premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.ExtendedDisplayCUJ.premium_meet_large'
+  labels['test_iteration'] = '2'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.ExtendedDisplayCUJ.premium_meet_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_03 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_03
new file mode 100644
index 0000000..1e8ef48
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_03
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_ExtendedDisplayCUJ_premium_meet_large_03'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4818
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'external_display, premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.ExtendedDisplayCUJ.premium_meet_large'
+  labels['test_iteration'] = '3'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.ExtendedDisplayCUJ.premium_meet_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_04 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_04
new file mode 100644
index 0000000..04c2371
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_04
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_ExtendedDisplayCUJ_premium_meet_large_04'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4817
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'external_display, premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.ExtendedDisplayCUJ.premium_meet_large'
+  labels['test_iteration'] = '4'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.ExtendedDisplayCUJ.premium_meet_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_05 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_05
new file mode 100644
index 0000000..6801229
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_05
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_ExtendedDisplayCUJ_premium_meet_large_05'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4816
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'external_display, premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.ExtendedDisplayCUJ.premium_meet_large'
+  labels['test_iteration'] = '5'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.ExtendedDisplayCUJ.premium_meet_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_06 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_06
new file mode 100644
index 0000000..7554424
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_06
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_ExtendedDisplayCUJ_premium_meet_large_06'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4815
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'external_display, premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.ExtendedDisplayCUJ.premium_meet_large'
+  labels['test_iteration'] = '6'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.ExtendedDisplayCUJ.premium_meet_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_07 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_07
new file mode 100644
index 0000000..9e7a275
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_07
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_ExtendedDisplayCUJ_premium_meet_large_07'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4814
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'external_display, premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.ExtendedDisplayCUJ.premium_meet_large'
+  labels['test_iteration'] = '7'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.ExtendedDisplayCUJ.premium_meet_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_08 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_08
new file mode 100644
index 0000000..1682bb7
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_08
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_ExtendedDisplayCUJ_premium_meet_large_08'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4813
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'external_display, premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.ExtendedDisplayCUJ.premium_meet_large'
+  labels['test_iteration'] = '8'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.ExtendedDisplayCUJ.premium_meet_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_09 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_09
new file mode 100644
index 0000000..1f5f0cb
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_09
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_ExtendedDisplayCUJ_premium_meet_large_09'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4812
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'external_display, premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.ExtendedDisplayCUJ.premium_meet_large'
+  labels['test_iteration'] = '9'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.ExtendedDisplayCUJ.premium_meet_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_10 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_10
new file mode 100644
index 0000000..4bcb9c1
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_ExtendedDisplayCUJ_premium_meet_large_10
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_ExtendedDisplayCUJ_premium_meet_large_10'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4811
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'external_display, premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.ExtendedDisplayCUJ.premium_meet_large'
+  labels['test_iteration'] = '10'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.ExtendedDisplayCUJ.premium_meet_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_01 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_01
new file mode 100644
index 0000000..14aadee
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_01
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_class_01'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4970
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_class'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_class'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_02 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_02
new file mode 100644
index 0000000..c84165b
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_02
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_class_02'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4969
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_class'
+  labels['test_iteration'] = '2'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_class'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_03 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_03
new file mode 100644
index 0000000..625e49e
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_03
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_class_03'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4968
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_class'
+  labels['test_iteration'] = '3'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_class'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_04 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_04
new file mode 100644
index 0000000..cdb126d
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_04
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_class_04'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4967
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_class'
+  labels['test_iteration'] = '4'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_class'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_05 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_05
new file mode 100644
index 0000000..6d97901
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_05
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_class_05'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4966
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_class'
+  labels['test_iteration'] = '5'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_class'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_06 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_06
new file mode 100644
index 0000000..e2da8b3
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_06
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_class_06'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4965
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_class'
+  labels['test_iteration'] = '6'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_class'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_07 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_07
new file mode 100644
index 0000000..4572fa5
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_07
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_class_07'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4964
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_class'
+  labels['test_iteration'] = '7'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_class'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_08 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_08
new file mode 100644
index 0000000..0f63f85
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_08
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_class_08'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4963
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_class'
+  labels['test_iteration'] = '8'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_class'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_09 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_09
new file mode 100644
index 0000000..65412ff
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_09
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_class_09'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4962
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_class'
+  labels['test_iteration'] = '9'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_class'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_10 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_10
new file mode 100644
index 0000000..973cf57
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_class_10
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_class_10'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4961
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_class'
+  labels['test_iteration'] = '10'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_class'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_01 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_01
new file mode 100644
index 0000000..8e9a8e1
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_01
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_large_01'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4980
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_large'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_02 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_02
new file mode 100644
index 0000000..54a2d0b
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_02
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_large_02'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4979
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_large'
+  labels['test_iteration'] = '2'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_03 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_03
new file mode 100644
index 0000000..f8c01da
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_03
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_large_03'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4978
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_large'
+  labels['test_iteration'] = '3'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_04 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_04
new file mode 100644
index 0000000..e8493c2
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_04
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_large_04'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4977
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_large'
+  labels['test_iteration'] = '4'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_05 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_05
new file mode 100644
index 0000000..b3ef93e
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_05
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_large_05'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4976
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_large'
+  labels['test_iteration'] = '5'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_06 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_06
new file mode 100644
index 0000000..4e5f353
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_06
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_large_06'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4975
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_large'
+  labels['test_iteration'] = '6'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_07 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_07
new file mode 100644
index 0000000..0211100
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_07
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_large_07'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4974
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_large'
+  labels['test_iteration'] = '7'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_08 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_08
new file mode 100644
index 0000000..1b9943d
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_08
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_large_08'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4973
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_large'
+  labels['test_iteration'] = '8'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_09 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_09
new file mode 100644
index 0000000..1444883
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_09
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_large_09'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4972
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_large'
+  labels['test_iteration'] = '9'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_10 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_10
new file mode 100644
index 0000000..ddef8d1
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_large_10
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_large_10'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4971
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_large'
+  labels['test_iteration'] = '10'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_01 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_01
new file mode 100644
index 0000000..46900f6
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_01
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_small_01'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4990
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_small'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_small'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_02 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_02
new file mode 100644
index 0000000..60a0223
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_02
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_small_02'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4989
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_small'
+  labels['test_iteration'] = '2'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_small'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_03 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_03
new file mode 100644
index 0000000..254e278
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_03
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_small_03'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4988
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_small'
+  labels['test_iteration'] = '3'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_small'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_04 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_04
new file mode 100644
index 0000000..d634369
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_04
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_small_04'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4987
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_small'
+  labels['test_iteration'] = '4'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_small'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_05 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_05
new file mode 100644
index 0000000..572d447
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_05
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_small_05'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4986
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_small'
+  labels['test_iteration'] = '5'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_small'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_06 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_06
new file mode 100644
index 0000000..357b54a
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_06
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_small_06'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4985
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_small'
+  labels['test_iteration'] = '6'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_small'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_07 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_07
new file mode 100644
index 0000000..5251e97e
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_07
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_small_07'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4984
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_small'
+  labels['test_iteration'] = '7'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_small'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_08 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_08
new file mode 100644
index 0000000..c0baeb4
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_08
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_small_08'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4983
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_small'
+  labels['test_iteration'] = '8'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_small'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_09 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_09
new file mode 100644
index 0000000..977da7e
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_09
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_small_09'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4982
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_small'
+  labels['test_iteration'] = '9'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_small'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_10 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_10
new file mode 100644
index 0000000..d56ffe4
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_small_10
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_small_10'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4981
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_small'
+  labels['test_iteration'] = '10'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_small'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_01 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_01
new file mode 100644
index 0000000..518d044
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_01
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_two_01'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 5000
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_two'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_two'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_02 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_02
new file mode 100644
index 0000000..7c68a89
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_02
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_two_02'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4999
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_two'
+  labels['test_iteration'] = '2'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_two'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_03 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_03
new file mode 100644
index 0000000..2977923
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_03
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_two_03'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4998
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_two'
+  labels['test_iteration'] = '3'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_two'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_04 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_04
new file mode 100644
index 0000000..5794d99
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_04
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_two_04'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4997
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_two'
+  labels['test_iteration'] = '4'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_two'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_05 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_05
new file mode 100644
index 0000000..659088c
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_05
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_two_05'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4996
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_two'
+  labels['test_iteration'] = '5'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_two'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_06 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_06
new file mode 100644
index 0000000..933df64
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_06
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_two_06'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4995
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_two'
+  labels['test_iteration'] = '6'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_two'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_07 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_07
new file mode 100644
index 0000000..ea84e82
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_07
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_two_07'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4994
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_two'
+  labels['test_iteration'] = '7'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_two'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_08 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_08
new file mode 100644
index 0000000..e4f0eda
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_08
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_two_08'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4993
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_two'
+  labels['test_iteration'] = '8'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_two'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_09 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_09
new file mode 100644
index 0000000..588e782
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_09
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_two_09'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4992
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_two'
+  labels['test_iteration'] = '9'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_two'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_10 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_10
new file mode 100644
index 0000000..79fa79f
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_basic_two_10
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_basic_two_10'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4991
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.basic_two'
+  labels['test_iteration'] = '10'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.basic_two'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_01 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_01
new file mode 100644
index 0000000..82a5776
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_01
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_plus_class_01'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4950
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.plus_class'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.plus_class'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_02 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_02
new file mode 100644
index 0000000..f5c202e
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_02
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_plus_class_02'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4949
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.plus_class'
+  labels['test_iteration'] = '2'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.plus_class'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_03 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_03
new file mode 100644
index 0000000..fdaf9c8
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_03
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_plus_class_03'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4948
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.plus_class'
+  labels['test_iteration'] = '3'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.plus_class'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_04 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_04
new file mode 100644
index 0000000..81f9c41
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_04
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_plus_class_04'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4947
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.plus_class'
+  labels['test_iteration'] = '4'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.plus_class'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_05 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_05
new file mode 100644
index 0000000..18c7013
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_05
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_plus_class_05'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4946
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.plus_class'
+  labels['test_iteration'] = '5'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.plus_class'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_06 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_06
new file mode 100644
index 0000000..bb07cb1
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_06
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_plus_class_06'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4945
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.plus_class'
+  labels['test_iteration'] = '6'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.plus_class'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_07 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_07
new file mode 100644
index 0000000..83009b9
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_07
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_plus_class_07'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4944
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.plus_class'
+  labels['test_iteration'] = '7'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.plus_class'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_08 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_08
new file mode 100644
index 0000000..8dd66bd
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_08
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_plus_class_08'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4943
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.plus_class'
+  labels['test_iteration'] = '8'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.plus_class'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_09 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_09
new file mode 100644
index 0000000..5274679
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_09
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_plus_class_09'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4942
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.plus_class'
+  labels['test_iteration'] = '9'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.plus_class'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_10 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_10
new file mode 100644
index 0000000..15cdeb6
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_class_10
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_plus_class_10'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4941
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.plus_class'
+  labels['test_iteration'] = '10'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.plus_class'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_01 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_01
new file mode 100644
index 0000000..fa23ceb
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_01
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_plus_large_01'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4960
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.plus_large'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.plus_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_02 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_02
new file mode 100644
index 0000000..36ddceb
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_02
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_plus_large_02'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4959
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.plus_large'
+  labels['test_iteration'] = '2'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.plus_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_03 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_03
new file mode 100644
index 0000000..4c0464d
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_03
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_plus_large_03'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4958
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.plus_large'
+  labels['test_iteration'] = '3'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.plus_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_04 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_04
new file mode 100644
index 0000000..7740ab1
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_04
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_plus_large_04'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4957
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.plus_large'
+  labels['test_iteration'] = '4'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.plus_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_05 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_05
new file mode 100644
index 0000000..0d237ba
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_05
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_plus_large_05'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4956
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.plus_large'
+  labels['test_iteration'] = '5'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.plus_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_06 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_06
new file mode 100644
index 0000000..15f4c67
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_06
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_plus_large_06'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4955
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.plus_large'
+  labels['test_iteration'] = '6'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.plus_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_07 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_07
new file mode 100644
index 0000000..9622489
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_07
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_plus_large_07'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4954
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.plus_large'
+  labels['test_iteration'] = '7'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.plus_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_08 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_08
new file mode 100644
index 0000000..fd019bc
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_08
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_plus_large_08'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4953
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.plus_large'
+  labels['test_iteration'] = '8'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.plus_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_09 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_09
new file mode 100644
index 0000000..976a2ea
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_09
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_plus_large_09'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4952
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.plus_large'
+  labels['test_iteration'] = '9'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.plus_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_10 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_10
new file mode 100644
index 0000000..d9566ee
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_plus_large_10
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_plus_large_10'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4951
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.plus_large'
+  labels['test_iteration'] = '10'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.plus_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_01 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_01
new file mode 100644
index 0000000..18abf05
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_01
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_premium_large_01'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4940
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.premium_large'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.premium_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_02 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_02
new file mode 100644
index 0000000..453949f
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_02
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_premium_large_02'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4939
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.premium_large'
+  labels['test_iteration'] = '2'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.premium_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_03 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_03
new file mode 100644
index 0000000..5b2f86f
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_03
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_premium_large_03'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4938
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.premium_large'
+  labels['test_iteration'] = '3'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.premium_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_04 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_04
new file mode 100644
index 0000000..cc93963
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_04
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_premium_large_04'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4937
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.premium_large'
+  labels['test_iteration'] = '4'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.premium_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_05 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_05
new file mode 100644
index 0000000..bffe5c9
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_05
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_premium_large_05'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4936
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.premium_large'
+  labels['test_iteration'] = '5'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.premium_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_06 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_06
new file mode 100644
index 0000000..98b2a36
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_06
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_premium_large_06'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4935
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.premium_large'
+  labels['test_iteration'] = '6'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.premium_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_07 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_07
new file mode 100644
index 0000000..5262126
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_07
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_premium_large_07'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4934
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.premium_large'
+  labels['test_iteration'] = '7'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.premium_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_08 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_08
new file mode 100644
index 0000000..43293cd
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_08
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_premium_large_08'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4933
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.premium_large'
+  labels['test_iteration'] = '8'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.premium_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_09 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_09
new file mode 100644
index 0000000..a474349
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_09
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_premium_large_09'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4932
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.premium_large'
+  labels['test_iteration'] = '9'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.premium_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_10 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_10
new file mode 100644
index 0000000..a57b581
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_GoogleMeetCUJ_premium_large_10
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_GoogleMeetCUJ_premium_large_10'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4931
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.GoogleMeetCUJ.premium_large'
+  labels['test_iteration'] = '10'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.GoogleMeetCUJ.premium_large'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_01 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_01
new file mode 100644
index 0000000..805f748
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_01
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_QuickCheckCUJ2_basic_unlock_01'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4900
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.QuickCheckCUJ2.basic_unlock'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.QuickCheckCUJ2.basic_unlock'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_02 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_02
new file mode 100644
index 0000000..efd77f4
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_02
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_QuickCheckCUJ2_basic_unlock_02'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4899
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.QuickCheckCUJ2.basic_unlock'
+  labels['test_iteration'] = '2'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.QuickCheckCUJ2.basic_unlock'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_03 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_03
new file mode 100644
index 0000000..65716df
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_03
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_QuickCheckCUJ2_basic_unlock_03'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4898
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.QuickCheckCUJ2.basic_unlock'
+  labels['test_iteration'] = '3'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.QuickCheckCUJ2.basic_unlock'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_04 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_04
new file mode 100644
index 0000000..467153f
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_04
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_QuickCheckCUJ2_basic_unlock_04'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4897
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.QuickCheckCUJ2.basic_unlock'
+  labels['test_iteration'] = '4'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.QuickCheckCUJ2.basic_unlock'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_05 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_05
new file mode 100644
index 0000000..af36c94
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_05
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_QuickCheckCUJ2_basic_unlock_05'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4896
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.QuickCheckCUJ2.basic_unlock'
+  labels['test_iteration'] = '5'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.QuickCheckCUJ2.basic_unlock'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_06 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_06
new file mode 100644
index 0000000..a499b40
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_06
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_QuickCheckCUJ2_basic_unlock_06'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4895
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.QuickCheckCUJ2.basic_unlock'
+  labels['test_iteration'] = '6'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.QuickCheckCUJ2.basic_unlock'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_07 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_07
new file mode 100644
index 0000000..bc9c776
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_07
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_QuickCheckCUJ2_basic_unlock_07'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4894
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.QuickCheckCUJ2.basic_unlock'
+  labels['test_iteration'] = '7'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.QuickCheckCUJ2.basic_unlock'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_08 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_08
new file mode 100644
index 0000000..49e03eb
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_08
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_QuickCheckCUJ2_basic_unlock_08'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4893
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.QuickCheckCUJ2.basic_unlock'
+  labels['test_iteration'] = '8'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.QuickCheckCUJ2.basic_unlock'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_09 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_09
new file mode 100644
index 0000000..4d8a12c
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_09
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_QuickCheckCUJ2_basic_unlock_09'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4892
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.QuickCheckCUJ2.basic_unlock'
+  labels['test_iteration'] = '9'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.QuickCheckCUJ2.basic_unlock'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_10 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_10
new file mode 100644
index 0000000..22bf820
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_unlock_10
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_QuickCheckCUJ2_basic_unlock_10'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4891
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.QuickCheckCUJ2.basic_unlock'
+  labels['test_iteration'] = '10'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.QuickCheckCUJ2.basic_unlock'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_01 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_01
new file mode 100644
index 0000000..c696a79
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_01
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_QuickCheckCUJ2_basic_wakeup_01'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4890
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.QuickCheckCUJ2.basic_wakeup'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.QuickCheckCUJ2.basic_wakeup'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_02 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_02
new file mode 100644
index 0000000..da71566
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_02
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_QuickCheckCUJ2_basic_wakeup_02'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4889
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.QuickCheckCUJ2.basic_wakeup'
+  labels['test_iteration'] = '2'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.QuickCheckCUJ2.basic_wakeup'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_03 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_03
new file mode 100644
index 0000000..cac4d01
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_03
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_QuickCheckCUJ2_basic_wakeup_03'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4888
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.QuickCheckCUJ2.basic_wakeup'
+  labels['test_iteration'] = '3'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.QuickCheckCUJ2.basic_wakeup'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_04 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_04
new file mode 100644
index 0000000..9f5e6b9
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_04
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_QuickCheckCUJ2_basic_wakeup_04'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4887
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.QuickCheckCUJ2.basic_wakeup'
+  labels['test_iteration'] = '4'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.QuickCheckCUJ2.basic_wakeup'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_05 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_05
new file mode 100644
index 0000000..c991ba3
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_05
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_QuickCheckCUJ2_basic_wakeup_05'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4886
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.QuickCheckCUJ2.basic_wakeup'
+  labels['test_iteration'] = '5'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.QuickCheckCUJ2.basic_wakeup'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_06 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_06
new file mode 100644
index 0000000..c73f80a
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_06
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_QuickCheckCUJ2_basic_wakeup_06'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4885
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.QuickCheckCUJ2.basic_wakeup'
+  labels['test_iteration'] = '6'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.QuickCheckCUJ2.basic_wakeup'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_07 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_07
new file mode 100644
index 0000000..669a186
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_07
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_QuickCheckCUJ2_basic_wakeup_07'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4884
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.QuickCheckCUJ2.basic_wakeup'
+  labels['test_iteration'] = '7'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.QuickCheckCUJ2.basic_wakeup'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_08 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_08
new file mode 100644
index 0000000..ef3b4b2
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_08
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_QuickCheckCUJ2_basic_wakeup_08'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4883
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.QuickCheckCUJ2.basic_wakeup'
+  labels['test_iteration'] = '8'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.QuickCheckCUJ2.basic_wakeup'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_09 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_09
new file mode 100644
index 0000000..7520f51
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_09
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_QuickCheckCUJ2_basic_wakeup_09'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4882
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.QuickCheckCUJ2.basic_wakeup'
+  labels['test_iteration'] = '9'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.QuickCheckCUJ2.basic_wakeup'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_10 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_10
new file mode 100644
index 0000000..ef1e053
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_QuickCheckCUJ2_basic_wakeup_10
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_QuickCheckCUJ2_basic_wakeup_10'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4881
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.QuickCheckCUJ2.basic_wakeup'
+  labels['test_iteration'] = '10'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.QuickCheckCUJ2.basic_wakeup'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_01 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_01
new file mode 100644
index 0000000..2838283
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_01
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_basic_noproxy_01'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4930
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.basic_noproxy'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.basic_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_02 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_02
new file mode 100644
index 0000000..3d7a6c2
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_02
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_basic_noproxy_02'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4929
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.basic_noproxy'
+  labels['test_iteration'] = '2'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.basic_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_03 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_03
new file mode 100644
index 0000000..9efa908
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_03
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_basic_noproxy_03'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4928
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.basic_noproxy'
+  labels['test_iteration'] = '3'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.basic_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_04 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_04
new file mode 100644
index 0000000..3eeed6f
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_04
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_basic_noproxy_04'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4927
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.basic_noproxy'
+  labels['test_iteration'] = '4'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.basic_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_05 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_05
new file mode 100644
index 0000000..ed2c1dc
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_05
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_basic_noproxy_05'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4926
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.basic_noproxy'
+  labels['test_iteration'] = '5'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.basic_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_06 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_06
new file mode 100644
index 0000000..fe3eb16
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_06
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_basic_noproxy_06'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4925
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.basic_noproxy'
+  labels['test_iteration'] = '6'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.basic_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_07 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_07
new file mode 100644
index 0000000..7facc3a
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_07
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_basic_noproxy_07'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4924
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.basic_noproxy'
+  labels['test_iteration'] = '7'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.basic_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_08 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_08
new file mode 100644
index 0000000..a06da73
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_08
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_basic_noproxy_08'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4923
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.basic_noproxy'
+  labels['test_iteration'] = '8'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.basic_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_09 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_09
new file mode 100644
index 0000000..43ce7ff
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_09
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_basic_noproxy_09'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4922
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.basic_noproxy'
+  labels['test_iteration'] = '9'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.basic_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_10 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_10
new file mode 100644
index 0000000..e09c2e4
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_basic_noproxy_10
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_basic_noproxy_10'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4921
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.basic_noproxy'
+  labels['test_iteration'] = '10'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.basic_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_01 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_01
new file mode 100644
index 0000000..aa90fd7
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_01
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_plus_noproxy_01'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4920
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.plus_noproxy'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.plus_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_02 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_02
new file mode 100644
index 0000000..fdb5203
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_02
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_plus_noproxy_02'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4919
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.plus_noproxy'
+  labels['test_iteration'] = '2'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.plus_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_03 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_03
new file mode 100644
index 0000000..93794aa
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_03
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_plus_noproxy_03'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4918
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.plus_noproxy'
+  labels['test_iteration'] = '3'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.plus_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_04 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_04
new file mode 100644
index 0000000..090bc04
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_04
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_plus_noproxy_04'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4917
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.plus_noproxy'
+  labels['test_iteration'] = '4'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.plus_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_05 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_05
new file mode 100644
index 0000000..c5ce898
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_05
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_plus_noproxy_05'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4916
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.plus_noproxy'
+  labels['test_iteration'] = '5'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.plus_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_06 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_06
new file mode 100644
index 0000000..a1b7988
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_06
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_plus_noproxy_06'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4915
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.plus_noproxy'
+  labels['test_iteration'] = '6'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.plus_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_07 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_07
new file mode 100644
index 0000000..53287bf
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_07
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_plus_noproxy_07'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4914
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.plus_noproxy'
+  labels['test_iteration'] = '7'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.plus_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_08 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_08
new file mode 100644
index 0000000..befa559
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_08
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_plus_noproxy_08'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4913
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.plus_noproxy'
+  labels['test_iteration'] = '8'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.plus_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_09 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_09
new file mode 100644
index 0000000..60cbb42
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_09
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_plus_noproxy_09'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4912
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.plus_noproxy'
+  labels['test_iteration'] = '9'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.plus_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_10 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_10
new file mode 100644
index 0000000..28bfc6a
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_plus_noproxy_10
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_plus_noproxy_10'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4911
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'plus'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.plus_noproxy'
+  labels['test_iteration'] = '10'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.plus_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_01 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_01
new file mode 100644
index 0000000..9d04004
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_01
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_premium_noproxy_01'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4910
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.premium_noproxy'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.premium_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_02 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_02
new file mode 100644
index 0000000..9403b0b
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_02
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_premium_noproxy_02'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4909
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.premium_noproxy'
+  labels['test_iteration'] = '2'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.premium_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_03 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_03
new file mode 100644
index 0000000..aef2d9a
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_03
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_premium_noproxy_03'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4908
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.premium_noproxy'
+  labels['test_iteration'] = '3'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.premium_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_04 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_04
new file mode 100644
index 0000000..7ee0e29
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_04
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_premium_noproxy_04'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4907
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.premium_noproxy'
+  labels['test_iteration'] = '4'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.premium_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_05 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_05
new file mode 100644
index 0000000..8326f0e
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_05
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_premium_noproxy_05'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4906
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.premium_noproxy'
+  labels['test_iteration'] = '5'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.premium_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_06 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_06
new file mode 100644
index 0000000..8bd0fd8
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_06
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_premium_noproxy_06'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4905
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.premium_noproxy'
+  labels['test_iteration'] = '6'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.premium_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_07 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_07
new file mode 100644
index 0000000..5fc046f
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_07
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_premium_noproxy_07'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4904
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.premium_noproxy'
+  labels['test_iteration'] = '7'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.premium_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_08 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_08
new file mode 100644
index 0000000..8bccbdf
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_08
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_premium_noproxy_08'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4903
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.premium_noproxy'
+  labels['test_iteration'] = '8'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.premium_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_09 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_09
new file mode 100644
index 0000000..b3c5c4c
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_09
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_premium_noproxy_09'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4902
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.premium_noproxy'
+  labels['test_iteration'] = '9'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.premium_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_10 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_10
new file mode 100644
index 0000000..8a66045
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_TabSwitchCUJ2_premium_noproxy_10
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_TabSwitchCUJ2_premium_noproxy_10'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4901
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.TabSwitchCUJ2.premium_noproxy'
+  labels['test_iteration'] = '10'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.TabSwitchCUJ2.premium_noproxy'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_01 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_01
new file mode 100644
index 0000000..124c018
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_01
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_basic_youtube_app_01'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4860
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.basic_youtube_app'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.basic_youtube_app'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_02 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_02
new file mode 100644
index 0000000..d90f169
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_02
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_basic_youtube_app_02'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4859
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.basic_youtube_app'
+  labels['test_iteration'] = '2'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.basic_youtube_app'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_03 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_03
new file mode 100644
index 0000000..32577ac
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_03
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_basic_youtube_app_03'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4858
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.basic_youtube_app'
+  labels['test_iteration'] = '3'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.basic_youtube_app'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_04 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_04
new file mode 100644
index 0000000..c2e0169
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_04
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_basic_youtube_app_04'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4857
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.basic_youtube_app'
+  labels['test_iteration'] = '4'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.basic_youtube_app'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_05 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_05
new file mode 100644
index 0000000..6ad161a
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_05
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_basic_youtube_app_05'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4856
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.basic_youtube_app'
+  labels['test_iteration'] = '5'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.basic_youtube_app'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_06 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_06
new file mode 100644
index 0000000..307f87e
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_06
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_basic_youtube_app_06'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4855
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.basic_youtube_app'
+  labels['test_iteration'] = '6'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.basic_youtube_app'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_07 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_07
new file mode 100644
index 0000000..8e12b66
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_07
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_basic_youtube_app_07'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4854
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.basic_youtube_app'
+  labels['test_iteration'] = '7'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.basic_youtube_app'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_08 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_08
new file mode 100644
index 0000000..f5e152b
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_08
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_basic_youtube_app_08'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4853
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.basic_youtube_app'
+  labels['test_iteration'] = '8'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.basic_youtube_app'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_09 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_09
new file mode 100644
index 0000000..a43cebc
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_09
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_basic_youtube_app_09'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4852
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.basic_youtube_app'
+  labels['test_iteration'] = '9'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.basic_youtube_app'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_10 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_10
new file mode 100644
index 0000000..b3ccf98
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_app_10
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_basic_youtube_app_10'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4851
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.basic_youtube_app'
+  labels['test_iteration'] = '10'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.basic_youtube_app'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_01 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_01
new file mode 100644
index 0000000..08c0b2d
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_01
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_basic_youtube_web_01'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4880
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.basic_youtube_web'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.basic_youtube_web'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_02 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_02
new file mode 100644
index 0000000..73280a1
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_02
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_basic_youtube_web_02'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4879
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.basic_youtube_web'
+  labels['test_iteration'] = '2'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.basic_youtube_web'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_03 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_03
new file mode 100644
index 0000000..02db663
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_03
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_basic_youtube_web_03'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4878
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.basic_youtube_web'
+  labels['test_iteration'] = '3'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.basic_youtube_web'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_04 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_04
new file mode 100644
index 0000000..dfde373
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_04
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_basic_youtube_web_04'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4877
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.basic_youtube_web'
+  labels['test_iteration'] = '4'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.basic_youtube_web'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_05 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_05
new file mode 100644
index 0000000..07efc76
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_05
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_basic_youtube_web_05'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4876
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.basic_youtube_web'
+  labels['test_iteration'] = '5'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.basic_youtube_web'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_06 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_06
new file mode 100644
index 0000000..30fdda5
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_06
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_basic_youtube_web_06'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4875
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.basic_youtube_web'
+  labels['test_iteration'] = '6'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.basic_youtube_web'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_07 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_07
new file mode 100644
index 0000000..e71d7ff
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_07
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_basic_youtube_web_07'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4874
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.basic_youtube_web'
+  labels['test_iteration'] = '7'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.basic_youtube_web'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_08 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_08
new file mode 100644
index 0000000..0dcbdae
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_08
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_basic_youtube_web_08'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4873
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.basic_youtube_web'
+  labels['test_iteration'] = '8'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.basic_youtube_web'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_09 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_09
new file mode 100644
index 0000000..f1d6706
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_09
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_basic_youtube_web_09'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4872
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.basic_youtube_web'
+  labels['test_iteration'] = '9'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.basic_youtube_web'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_10 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_10
new file mode 100644
index 0000000..3d3d20c
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_basic_youtube_web_10
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_basic_youtube_web_10'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4871
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = ''
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.basic_youtube_web'
+  labels['test_iteration'] = '10'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.basic_youtube_web'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_01 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_01
new file mode 100644
index 0000000..b8c8d34
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_01
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_premium_youtube_app_01'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4850
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.premium_youtube_app'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.premium_youtube_app'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_02 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_02
new file mode 100644
index 0000000..e3055d2
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_02
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_premium_youtube_app_02'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4849
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.premium_youtube_app'
+  labels['test_iteration'] = '2'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.premium_youtube_app'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_03 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_03
new file mode 100644
index 0000000..d495b90
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_03
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_premium_youtube_app_03'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4848
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.premium_youtube_app'
+  labels['test_iteration'] = '3'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.premium_youtube_app'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_04 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_04
new file mode 100644
index 0000000..96987e6
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_04
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_premium_youtube_app_04'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4847
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.premium_youtube_app'
+  labels['test_iteration'] = '4'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.premium_youtube_app'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_05 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_05
new file mode 100644
index 0000000..987807a
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_05
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_premium_youtube_app_05'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4846
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.premium_youtube_app'
+  labels['test_iteration'] = '5'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.premium_youtube_app'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_06 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_06
new file mode 100644
index 0000000..504172d
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_06
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_premium_youtube_app_06'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4845
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.premium_youtube_app'
+  labels['test_iteration'] = '6'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.premium_youtube_app'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_07 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_07
new file mode 100644
index 0000000..6a8898e
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_07
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_premium_youtube_app_07'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4844
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.premium_youtube_app'
+  labels['test_iteration'] = '7'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.premium_youtube_app'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_08 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_08
new file mode 100644
index 0000000..f3c0b63
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_08
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_premium_youtube_app_08'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4843
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.premium_youtube_app'
+  labels['test_iteration'] = '8'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.premium_youtube_app'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_09 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_09
new file mode 100644
index 0000000..7abb9c1
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_09
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_premium_youtube_app_09'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4842
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.premium_youtube_app'
+  labels['test_iteration'] = '9'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.premium_youtube_app'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_10 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_10
new file mode 100644
index 0000000..8296bb4
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_app_10
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_premium_youtube_app_10'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4841
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.premium_youtube_app'
+  labels['test_iteration'] = '10'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.premium_youtube_app'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_01 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_01
new file mode 100644
index 0000000..fc5f474
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_01
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_premium_youtube_web_01'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4870
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.premium_youtube_web'
+  labels['test_iteration'] = '1'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.premium_youtube_web'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_02 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_02
new file mode 100644
index 0000000..a291982
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_02
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_premium_youtube_web_02'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4869
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.premium_youtube_web'
+  labels['test_iteration'] = '2'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.premium_youtube_web'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_03 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_03
new file mode 100644
index 0000000..448c70a
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_03
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_premium_youtube_web_03'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4868
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.premium_youtube_web'
+  labels['test_iteration'] = '3'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.premium_youtube_web'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_04 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_04
new file mode 100644
index 0000000..1c28f80
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_04
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_premium_youtube_web_04'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4867
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.premium_youtube_web'
+  labels['test_iteration'] = '4'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.premium_youtube_web'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_05 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_05
new file mode 100644
index 0000000..f237425
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_05
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_premium_youtube_web_05'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4866
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.premium_youtube_web'
+  labels['test_iteration'] = '5'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.premium_youtube_web'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_06 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_06
new file mode 100644
index 0000000..c364bec
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_06
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_premium_youtube_web_06'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4865
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.premium_youtube_web'
+  labels['test_iteration'] = '6'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.premium_youtube_web'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_07 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_07
new file mode 100644
index 0000000..46c6a2f
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_07
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_premium_youtube_web_07'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4864
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.premium_youtube_web'
+  labels['test_iteration'] = '7'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.premium_youtube_web'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_08 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_08
new file mode 100644
index 0000000..a697eb0
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_08
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_premium_youtube_web_08'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4863
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.premium_youtube_web'
+  labels['test_iteration'] = '8'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.premium_youtube_web'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_09 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_09
new file mode 100644
index 0000000..aedd188
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_09
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_premium_youtube_web_09'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4862
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.premium_youtube_web'
+  labels['test_iteration'] = '9'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.premium_youtube_web'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_10 b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_10
new file mode 100644
index 0000000..c5aada6
--- /dev/null
+++ b/server/site_tests/platform_MTBF/control.performance_cuj_ui_VideoCUJ2_premium_youtube_web_10
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = 'ui_VideoCUJ2_premium_youtube_web_10'
+ATTRIBUTES = 'suite:performance_cuj'
+TIME = 'long'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = 4861
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = 'premium'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = 1
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = 'ui.VideoCUJ2.premium_youtube_web'
+  labels['test_iteration'] = '10'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['ui.VideoCUJ2.premium_youtube_web'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec=3600,
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_MTBF/generate_performance_cuj.py b/server/site_tests/platform_MTBF/generate_performance_cuj.py
new file mode 100644
index 0000000..82994d2
--- /dev/null
+++ b/server/site_tests/platform_MTBF/generate_performance_cuj.py
@@ -0,0 +1,111 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""
+USAGE: python generate_performance_cuj.py
+
+Generates all the control files required to run the Tast-based performance
+critical user journey (CUJ) test cases.
+
+Tests are executed in a predefined order. Each test can be repeated number of
+times according to test generation argument.
+"""
+import os
+
+VERSION = 1
+
+HOUR_IN_SECS = 60 * 60
+DEFAULT_TEST_DURATION = 1 * HOUR_IN_SECS
+
+SUITE = 'performance_cuj'
+TEMPLATE_FILE = 'template.control.performance_cuj'
+TEST_PREFIX = 'ui.'
+
+# Following tests will be included in the generated test suite.
+TESTS = [{
+        'test': 'Tab Switch Basic Noproxy',
+        'tast_name': 'TabSwitchCUJ2.basic_noproxy',
+        'repeats': 3
+}, {
+        'test': 'Tab Switch Plus Noproxy',
+        'tast_name': 'TabSwitchCUJ2.plus_noproxy',
+        'repeats': 3
+}, {
+        'test': 'Tab Switch Premium Noproxy',
+        'tast_name': 'TabSwitchCUJ2.premium_noproxy',
+        'repeats': 3
+}, {
+        'test': 'Google Meet Basic 2',
+        'tast_name': 'GoogleMeetCUJ.basic_two',
+        'repeats': 3
+}, {
+        'test': 'Google Meet Basic Small',
+        'tast_name': 'GoogleMeetCUJ.basic_small',
+        'repeats': 3
+}, {
+        'test': 'Google Meet Basic Large',
+        'tast_name': 'GoogleMeetCUJ.basic_large',
+        'repeats': 3
+}, {
+        'test': 'Google Meet Basic Class',
+        'tast_name': 'GoogleMeetCUJ.basic_class',
+        'repeats': 3
+}, {
+        'test': 'Google Meet Plus Large',
+        'tast_name': 'GoogleMeetCUJ.plus_large',
+        'repeats': 3
+}, {
+        'test': 'Google Meet Plus Class',
+        'tast_name': 'GoogleMeetCUJ.plus_class',
+        'repeats': 3
+}, {
+        'test': 'Google Meet Premium Large',
+        'tast_name': 'GoogleMeetCUJ.premium_large',
+        'repeats': 3
+}, {
+        'test': 'Quick Check Basic Unlock',
+        'tast_name': 'QuickCheckCUJ2.basic_unlock',
+        'repeats': 3
+}, {
+        'test': 'Quick Check Basic Wakeup',
+        'tast_name': 'QuickCheckCUJ2.basic_wakeup',
+        'repeats': 3
+}]
+
+
+def _write_control_file(name, contents):
+    f = open(name, 'w')
+    f.write(contents)
+    f.close()
+
+
+def _read_template_file(filename):
+    f = open(filename)
+    d = f.read()
+    f.close()
+    return d
+
+
+template = _read_template_file(
+        os.path.join(os.path.dirname(os.path.realpath(__file__)),
+                     TEMPLATE_FILE))
+
+# Starting priority, will decrease for each test.
+priority = 500
+
+for test in TESTS:
+    for i in range(int(test['repeats'])):
+        test_name = (test['tast_name'] + '_{index:02n}').format(index=i + 1)
+        control_file = template.format(
+                name=test_name,
+                priority=priority,
+                duration=DEFAULT_TEST_DURATION,
+                test_exprs=TEST_PREFIX + test['tast_name'],
+                length='long',
+                version=VERSION,
+                attributes='suite:' + SUITE,
+        )
+        control_file_name = 'control.' + '_'.join([SUITE, test_name])
+        _write_control_file(control_file_name, control_file)
+        priority = priority - 1
diff --git a/server/site_tests/platform_MTBF/generate_tests.py b/server/site_tests/platform_MTBF/generate_tests.py
new file mode 100644
index 0000000..330f10b
--- /dev/null
+++ b/server/site_tests/platform_MTBF/generate_tests.py
@@ -0,0 +1,208 @@
+# Lint as: python2, python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Script to generate Tauto test wrappers based on JSON configuration.
+
+USAGE: python generate_tests.py <config_file.json> [suite]
+
+This script generates control files for wrapping all Tast test files provided
+in the configuration JSON file with a Tauto test cases. No Tauto suite files are
+generated, these assumed to be added manually.
+
+Configuration file may contain multiple suites, in which case, all tests of all
+suites will be generated, unless [suite] argument is present, in which case
+only that suite will be re/generated.
+
+Configuration file is validated against the schema in config_schema.yaml file.
+Schema file must be located in the same folder with the current script.
+"""
+
+import copy
+import json
+import os
+import sys
+from jsonschema import validate
+import yaml
+
+SCHEMA_FILE = 'config_schema.yaml'
+TEST_TEMPLATE_FILE = 'template.control.performance_cuj'
+
+# The priority of the first test. Decremented by 1 for each subsequent test.
+INITIAL_PRIORITY = 5000
+
+# Max duration of a single test.
+HOUR_IN_SECS = 60 * 60
+DEFAULT_TEST_DURATION = 1 * HOUR_IN_SECS
+
+
+def _get_absolute_path(local_file):
+    return os.path.join(os.path.dirname(os.path.realpath(__file__)),
+                        local_file)
+
+
+def _load_json_config(config_path):
+    with open(_get_absolute_path(config_path), 'r') as config_file:
+        return json.load(config_file)
+
+
+def _validate_config_schema(json_config):
+    # Loading the schema file
+    with open(SCHEMA_FILE, 'r') as schema_file:
+        schema = yaml.safe_load(schema_file)
+
+    validate(json_config, schema)
+
+
+def _parse_constants(json_config):
+    consts = dict()
+    if 'const' in json_config:
+        for c in json_config['const']:
+            consts[c['name']] = c['value']
+    return consts
+
+
+def _substitute_constants(val, constants):
+    for const in constants:
+        val = val.replace('$' + const + '$', constants[const])
+    return val
+
+
+def _parse_tests(json_config, constants):
+    tests = []
+    for test in json_config['tests']:
+        new_test = copy.deepcopy(test)
+        # Substitute constants in all fields of the test.
+        new_test['name'] = _substitute_constants(new_test['name'], constants)
+        new_test['test_expr'] = _substitute_constants(new_test['test_expr'],
+                                                      constants)
+        if 'args' in new_test:
+            new_args = []
+            for arg in new_test['args']:
+                new_args.append(_substitute_constants(arg, constants))
+            new_test['args'] = new_args
+        if 'attributes' in new_test:
+            new_attrs = []
+            for attr in new_test['attributes']:
+                new_attrs.append(_substitute_constants(attr, constants))
+            new_test['attributes'] = new_attrs
+        if 'deps' in new_test:
+            new_deps = []
+            for dep in new_test['deps']:
+                new_deps.append(_substitute_constants(dep, constants))
+            new_test['deps'] = new_deps
+        tests.append(new_test)
+    return tests
+
+
+def _find_test(test_name, tests):
+    for test in tests:
+        if test['name'] == test_name:
+            return test
+    return None
+
+
+def _parse_suites(json_config, tests, constants):
+    suites = []
+    for suite in json_config['suites']:
+        new_suite = copy.deepcopy(suite)
+        new_suite['name'] = _substitute_constants(new_suite['name'], constants)
+        if 'args_file' in new_suite:
+            new_suite['args_file'] = _substitute_constants(
+                    new_suite['args_file'], constants)
+        if 'args' in new_suite:
+            new_args = []
+            for arg in new_suite['args']:
+                new_args.append(_substitute_constants(arg, constants))
+            new_suite['args'] = new_args
+        for test in new_suite['tests']:
+            if not _find_test(test['test'], tests):
+                raise Exception(
+                        'Test %s (requested by suite %s) is not defined.' %
+                        (test['test'], new_suite['name']))
+            test['test'] = _substitute_constants(test['test'], constants)
+        suites.append(new_suite)
+    return suites
+
+
+def _read_file(filename):
+    with open(filename, 'r') as content_file:
+        return content_file.read()
+
+
+def _write_file(filename, data):
+    with open(filename, 'w') as out_file:
+        out_file.write(data)
+
+
+def _normalize_test_name(test_name):
+    return test_name.replace('.', '_').replace('*', '_')
+
+
+def _calculate_suffix(current_index, repeats):
+    # No suffix for single tests.
+    if repeats == 1:
+        return ''
+    # Number of suffix digits depends on the total repeat count.
+    digits = len(str(repeats))
+    format_string = ('_{{index:0{digits}n}}').format(digits=digits)
+    return format_string.format(index=current_index)
+
+
+def _generate_test_files(version, suites, tests, suite_name=None):
+    template = _read_file(_get_absolute_path(TEST_TEMPLATE_FILE))
+    for suite in suites:
+        priority = INITIAL_PRIORITY
+        if suite_name and suite['name'] != suite_name:
+            continue
+        for test in suite['tests']:
+            test_data = _find_test(test['test'], tests)
+            repeats = test['repeats']
+            deps = []
+            if 'deps' in test_data:
+                deps = test_data['deps']
+            for i in range(repeats):
+                test_name = _normalize_test_name(
+                        test_data['test_expr'] +
+                        _calculate_suffix(i + 1, repeats))
+                control_file = template.format(
+                        name=test_name,
+                        priority=priority,
+                        duration=DEFAULT_TEST_DURATION,
+                        test_exprs=test_data['test_expr'],
+                        length='long',
+                        version=version,
+                        attributes='suite:' + suite['name'],
+                        dependencies=', '.join(deps),
+                        iteration=i + 1,
+                )
+                control_file_name = 'control.' + '_'.join(
+                        [suite['name'], test_name])
+                _write_file(control_file_name, control_file)
+                priority = priority - 1
+
+
+def main(argv):
+    """Main program that parses JSON configuration and generates test wrappers."""
+    if not argv or len(argv) < 2 or len(argv) > 3:
+        raise Exception(
+                'Invalid command-line arguments. Usage: python generate_tests.py <config_file.json> [suite]'
+        )
+
+    suite_name = None
+    if (len(argv) == 3):
+        suite_name = argv[2]
+
+    # Load and validate the config JSON file.
+    json_config = _load_json_config(argv[1])
+    _validate_config_schema(json_config)
+
+    version = json_config['version']
+    constants = _parse_constants(json_config)
+    tests = _parse_tests(json_config, constants)
+    suites = _parse_suites(json_config, tests, constants)
+    _generate_test_files(version, suites, tests, suite_name)
+
+
+if __name__ == '__main__':
+    sys.exit(main(sys.argv))
diff --git a/server/site_tests/platform_MTBF/spera_config.json b/server/site_tests/platform_MTBF/spera_config.json
new file mode 100644
index 0000000..9cb184b
--- /dev/null
+++ b/server/site_tests/platform_MTBF/spera_config.json
@@ -0,0 +1,310 @@
+{
+  "version": 1,
+  "tests": [
+    {
+      "name": "meet-basic-2",
+      "tool": "tast",
+      "test_expr": "ui.GoogleMeetCUJ.basic_two",
+      "attributes": ["basic"]
+    },
+    {
+      "name": "meet-basic-small",
+      "tool": "tast",
+      "test_expr": "ui.GoogleMeetCUJ.basic_small",
+      "attributes": ["basic"]
+    },
+    {
+      "name": "meet-basic-large",
+      "tool": "tast",
+      "test_expr": "ui.GoogleMeetCUJ.basic_large",
+      "attributes": ["basic"]
+    },
+    {
+      "name": "meet-basic-class",
+      "tool": "tast",
+      "test_expr": "ui.GoogleMeetCUJ.basic_class",
+      "attributes": ["basic"]
+    },
+    {
+      "name": "meet-plus-large",
+      "tool": "tast",
+      "test_expr": "ui.GoogleMeetCUJ.plus_large",
+      "attributes": ["plus"],
+      "deps": ["plus"]
+    },
+    {
+      "name": "meet-plus-class",
+      "tool": "tast",
+      "test_expr": "ui.GoogleMeetCUJ.plus_class",
+      "attributes": ["plus"],
+      "deps": ["plus"]
+    },
+    {
+      "name": "meet-premium-large",
+      "tool": "tast",
+      "test_expr": "ui.GoogleMeetCUJ.premium_large",
+      "attributes": ["premium"],
+      "deps": ["premium"]
+    },
+    {
+      "name": "tabswitch-basic-noproxy",
+      "tool": "tast",
+      "test_expr": "ui.TabSwitchCUJ2.basic_noproxy",
+      "attributes": ["basic"]
+    },
+    {
+      "name": "tabswitch-plus-noproxy",
+      "tool": "tast",
+      "test_expr": "ui.TabSwitchCUJ2.plus_noproxy",
+      "attributes": ["plus"],
+      "deps": ["plus"]
+    },
+    {
+      "name": "tabswitch-premium-noproxy",
+      "tool": "tast",
+      "test_expr": "ui.TabSwitchCUJ2.premium_noproxy",
+      "attributes": ["premium"],
+      "deps": ["premium"]
+    },
+    {
+      "name": "quick-check-unlock",
+      "tool": "tast",
+      "test_expr": "ui.QuickCheckCUJ2.basic_unlock",
+      "attributes": ["basic"]
+    },
+    {
+      "name": "quick-check-wakeup",
+      "tool": "tast",
+      "test_expr": "ui.QuickCheckCUJ2.basic_wakeup",
+      "attributes": ["basic"]
+    },
+    {
+      "name": "video-basic-youtube-web",
+      "tool": "tast",
+      "test_expr": "ui.VideoCUJ2.basic_youtube_web",
+      "attributes": ["basic"]
+    },
+    {
+      "name": "video-premium-youtube-web",
+      "tool": "tast",
+      "test_expr": "ui.VideoCUJ2.premium_youtube_web",
+      "attributes": ["premium"],
+      "deps": ["premium"]
+    },
+    {
+      "name": "video-basic-youtube-app",
+      "tool": "tast",
+      "test_expr": "ui.VideoCUJ2.basic_youtube_app",
+      "attributes": ["basic"]
+    },
+    {
+      "name": "video-premium-youtube-app",
+      "tool": "tast",
+      "test_expr": "ui.VideoCUJ2.premium_youtube_app",
+      "attributes": ["premium"],
+      "deps": ["premium"]
+    },
+    {
+      "name": "everyday-basic-ytmusic",
+      "tool": "tast",
+      "test_expr": "ui.EverydayMultiTaskingCUJ.basic_ytmusic",
+      "attributes": ["basic"]
+    },
+    {
+      "name": "everyday-plus-ytmusic",
+      "tool": "tast",
+      "test_expr": "ui.EverydayMultiTaskingCUJ.plus_ytmusic",
+      "attributes": ["plus"],
+      "deps": ["plus"]
+    },
+    {
+      "name": "extended-display-meet",
+      "tool": "tast",
+      "test_expr": "ui.ExtendedDisplayCUJ.premium_meet_large",
+      "attributes": ["premium"],
+      "deps": ["external_display", "premium"]
+    },
+    {
+      "name": "extended-display-video",
+      "tool": "tast",
+      "test_expr": "ui.ExtendedDisplayCUJ.plus_video_youtube_web",
+      "attributes": ["plus"],
+      "deps": ["external_display", "plus"]
+    },
+    {
+      "name": "zoom-basic-two",
+      "tool": "tast",
+      "test_expr": "ui.ZoomConfCUJ.basic_two",
+      "attributes": ["basic"]
+    },
+    {
+      "name": "zoom-basic-small",
+      "tool": "tast",
+      "test_expr": "ui.ZoomConfCUJ.basic_small",
+      "attributes": ["basic"]
+    },
+    {
+      "name": "zoom-basic-large",
+      "tool": "tast",
+      "test_expr": "ui.ZoomConfCUJ.basic_large",
+      "attributes": ["basic"]
+    },
+    {
+      "name": "zoom-basic-class",
+      "tool": "tast",
+      "test_expr": "ui.ZoomConfCUJ.basic_class",
+      "attributes": ["basic"]
+    },
+    {
+      "name": "zoom-plus-large",
+      "tool": "tast",
+      "test_expr": "ui.ZoomConfCUJ.plus_large",
+      "attributes": ["plus"],
+      "deps": ["plus"]
+    },
+    {
+      "name": "zoom-plus-class",
+      "tool": "tast",
+      "test_expr": "ui.ZoomConfCUJ.plus_class",
+      "attributes": ["plus"],
+      "deps": ["plus"]
+    },
+    {
+      "name": "zoom-premium-large",
+      "tool": "tast",
+      "test_expr": "ui.ZoomConfCUJ.premium_large",
+      "attributes": ["premium"],
+      "deps": ["premium"]
+    },
+    {
+      "name": "gdocs-basic",
+      "tool": "tast",
+      "test_expr": "ui.GoogleDocsWebCUJ.basic",
+      "attributes": ["basic"]
+    },
+    {
+      "name": "gdocs-premium",
+      "tool": "tast",
+      "test_expr": "ui.GoogleDocsWebCUJ.premium",
+      "attributes": ["premium"]
+    },
+    {
+      "name": "msoffice-plus",
+      "tool": "tast",
+      "test_expr": "ui.MicrosoftOfficeWebCUJ.plus",
+      "attributes": ["plus"]
+    },
+    {
+      "name": "msoffice-premium",
+      "tool": "tast",
+      "test_expr": "ui.MicrosoftOfficeWebCUJ.premium",
+      "attributes": ["premium"]
+    },
+    {
+      "name": "geekbench-public-android",
+      "tool": "tast",
+      "test_expr": "ui.GeekbenchPublicAndroidApp",
+      "attributes": ["benchmark", "basic"]
+    },
+    {
+      "name": "lmbench",
+      "tool": "tast",
+      "test_expr": "ui.LMbench",
+      "attributes": ["benchmark", "basic"]
+    },
+    {
+      "name": "crxprt2",
+      "tool": "tast",
+      "test_expr": "ui.CRXPRT2",
+      "attributes": ["benchmark", "basic"]
+    }
+  ],
+  "suites": [
+    {
+      "name": "performance_cuj_experimental",
+      "args_file": "perf_cuj.config",
+      "tests": [
+        {"test": "meet-basic-2", "repeats": 1},
+        {"test": "meet-basic-small", "repeats": 1},
+        {"test": "meet-basic-large", "repeats": 1},
+        {"test": "meet-basic-class", "repeats": 1},
+        {"test": "meet-plus-large", "repeats": 1},
+        {"test": "meet-plus-class", "repeats": 1},
+        {"test": "meet-premium-large", "repeats": 1},
+        {"test": "tabswitch-basic-noproxy", "repeats": 1},
+        {"test": "tabswitch-plus-noproxy", "repeats": 1},
+        {"test": "tabswitch-premium-noproxy", "repeats": 1},
+        {"test": "quick-check-unlock", "repeats": 1},
+        {"test": "quick-check-wakeup", "repeats": 1},
+        {"test": "video-basic-youtube-web", "repeats": 1},
+        {"test": "video-premium-youtube-web", "repeats": 1},
+        {"test": "video-basic-youtube-app", "repeats": 1},
+        {"test": "video-premium-youtube-app", "repeats": 1},
+        {"test": "everyday-basic-ytmusic", "repeats": 1},
+        {"test": "everyday-plus-ytmusic", "repeats": 1},
+        {"test": "extended-display-meet", "repeats": 1},
+        {"test": "extended-display-video", "repeats":1},
+        {"test": "zoom-basic-two", "repeats": 1},
+        {"test": "zoom-basic-small", "repeats": 1},
+        {"test": "zoom-basic-large", "repeats": 1},
+        {"test": "zoom-basic-class", "repeats": 1},
+        {"test": "zoom-plus-large", "repeats": 1},
+        {"test": "zoom-plus-class", "repeats": 1},
+        {"test": "zoom-premium-large", "repeats": 1},
+        {"test": "gdocs-basic", "repeats": 1},
+        {"test": "gdocs-premium", "repeats": 1},
+        {"test": "msoffice-plus", "repeats": 1},
+        {"test": "msoffice-premium", "repeats": 1}
+      ]
+    },
+    {
+      "name": "performance_cuj_quick",
+      "args_file": "perf_cuj.config",
+      "tests": [
+        {"test": "meet-basic-2", "repeats": 1},
+        {"test": "meet-plus-large", "repeats": 1},
+        {"test": "meet-premium-large", "repeats": 1},
+        {"test": "tabswitch-basic-noproxy", "repeats": 1},
+        {"test": "quick-check-unlock", "repeats": 1},
+        {"test": "quick-check-wakeup", "repeats": 1},
+        {"test": "video-basic-youtube-web", "repeats": 1},
+        {"test": "everyday-basic-ytmusic", "repeats": 1},
+        {"test": "extended-display-meet", "repeats": 1}
+      ]
+    },
+    {
+      "name": "performance_cuj",
+      "args_file": "perf_cuj.config",
+      "tests": [
+        {"test": "meet-basic-2", "repeats": 10},
+        {"test": "meet-basic-small", "repeats": 10},
+        {"test": "meet-basic-large", "repeats": 10},
+        {"test": "meet-basic-class", "repeats": 10},
+        {"test": "meet-plus-large", "repeats": 10},
+        {"test": "meet-plus-class", "repeats": 10},
+        {"test": "meet-premium-large", "repeats": 10},
+        {"test": "tabswitch-basic-noproxy", "repeats": 10},
+        {"test": "tabswitch-plus-noproxy", "repeats": 10},
+        {"test": "tabswitch-premium-noproxy", "repeats": 10},
+        {"test": "quick-check-unlock", "repeats": 10},
+        {"test": "quick-check-wakeup", "repeats": 10},
+        {"test": "video-basic-youtube-web", "repeats": 10},
+        {"test": "video-premium-youtube-web", "repeats": 10},
+        {"test": "video-basic-youtube-app", "repeats": 10},
+        {"test": "video-premium-youtube-app", "repeats": 10},
+        {"test": "everyday-basic-ytmusic", "repeats": 10},
+        {"test": "everyday-plus-ytmusic", "repeats": 10},
+        {"test": "extended-display-meet", "repeats": 10}
+      ]
+    },
+    {
+      "name": "performance_cuj_benchmarks",
+      "args_file": "perf_cuj.config",
+      "tests": [
+        {"test": "geekbench-public-android", "repeats": 2},
+        {"test": "lmbench", "repeats": 2},
+        {"test": "crxprt2", "repeats": 2}
+      ]
+    }
+  ]
+}
diff --git a/server/site_tests/platform_MTBF/template.control.performance_cuj b/server/site_tests/platform_MTBF/template.control.performance_cuj
new file mode 100644
index 0000000..85f6575
--- /dev/null
+++ b/server/site_tests/platform_MTBF/template.control.performance_cuj
@@ -0,0 +1,111 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman, chromeos-engprod-platform-syd'
+NAME = '{name}'
+ATTRIBUTES = '{attributes}'
+TIME = '{length}'
+TEST_CATEGORY = 'Stress'
+TEST_CLASS = 'Hardware'
+TEST_TYPE = 'Server'
+PRIORITY = {priority}
+MAX_RESULT_SIZE_KB = 1024 * 1024
+JOB_RETRIES = 5
+REQUIRE_SSP = True
+DEPENDENCIES = '{dependencies}'
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast-based MTBF performance CUJ test.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+import common
+import json
+import logging
+import tempfile
+from six.moves import urllib
+import yaml
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.cros import dev_server
+from autotest_lib.site_utils.deployment.prepare import dut
+from autotest_lib.utils import labellib
+
+test_args = dict()
+test_args['test_version'] = {version}
+
+def report_host_info(host):
+  labels = labellib.LabelsMapping(host.host_info_store.get().labels)
+  labels['test'] = '{test_exprs}'
+  labels['test_iteration'] = '{iteration}'
+  utils.write_keyval(job.resultdir, labels)
+  # Try to retrieve and report DUT HWID and serial number.
+  try:
+    dut.setup_hwid_and_serialnumber(host)
+    logging.info("Host info store: %s", host.host_info_store.get())
+    utils.write_keyval(job.resultdir, host.host_info_store.get().attributes)
+  except Exception as e:
+    logging.warning("Failed retrieving DUT host info: %s", e)
+
+def parse_config(config_url):
+  response = urllib.request.urlopen(config_url)
+  vars = json.loads(response.read())
+  for key in vars:
+    test_args[key] = vars[key]
+  logging.info('Read %d values from remote configuration.', len(vars))
+
+def stage_config(host):
+  devservers = dev_server.ImageServer.get_available_devservers()
+  devserver_url = devservers[0][0]
+  if devserver_url:
+    logging.info('Using devserver: %s', devserver_url)
+    labels = host.host_info_store.get().labels
+    build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+    if not build:
+      # Not able to detect build, means not running on Moblab.
+      return
+    ds = dev_server.ImageServer(devserver_url)
+    gs_bucket = dev_server._get_image_storage_server()
+    if gs_bucket:
+      config_path = 'config/perf_cuj/'
+      config_file = 'perf_cuj.config'
+      archive_url = gs_bucket + config_path
+      logging.info('Staging configuration from %s.', gs_bucket)
+      try:
+        ds.stage_artifacts(build,
+                          archive_url = archive_url,
+                          files = [config_file])
+      except Exception as e:
+          logging.error('Staging artifacts failed: %s', str(e))
+      else:
+        logging.info('Parsing configuration from %s.', archive_url)
+        parse_config(devserver_url + '/static/' + config_path + config_file)
+
+def run(machine):
+  with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.yaml') as temp_file:
+      host=hosts.create_host(machine)
+      report_host_info(host)
+      stage_config(host)
+
+      # Writing all test arguments to yaml file.
+      yaml.safe_dump(test_args,
+                    stream=temp_file,
+                    default_flow_style=False,
+                    allow_unicode=True)
+      job.run_test('tast',
+                  host=host,
+                  test_exprs=['{test_exprs}'],
+                  clear_tpm=False,
+                  ignore_test_failures=False,
+                  max_run_sec={duration},
+                  command_args=args,
+                  varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_Powerwash/control b/server/site_tests/platform_Powerwash/control
deleted file mode 100644
index af5c894..0000000
--- a/server/site_tests/platform_Powerwash/control
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "deymo, chromeos-installer@google.com"
-NAME = "platform_Powerwash"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:bvt-installer"
-
-DOC = """
-Tests that a device runs powerwash on the first reboot after the powerwash was
-signaled. The test verifies that a given file on the stateful partition is
-deleted during powerwash and that the powerwash counter is increased.
-
-We supply a job_repo_url to the test when running locally. In the lab this will
-be passed directly. The job_repo_url is a link to the autotest packages on a
-devserver. The test uses it to find and stage the stateful payload it needs to
-restore after performing the powerwash.
-
-To get a list of available devservers to use execute this command:
-atest server list | grep devserver
-
-Example usage:
-test_that platform_Powerwash <eureka/cros/beaglobonedevice ip> --board=<board> --args="job_repo_url=http://<devserver IP>:8082/static/<board>-release/RXX-XXXXX.X.X/autotest/packages"
-"""
-
-from autotest_lib.client.common_lib import utils
-args_dict = utils.args_to_dict(args)
-job_repo_url = args_dict.get('job_repo_url')
-
-def run_test(machine):
-    host = hosts.create_host(machine)
-    job.run_test("platform_Powerwash", host=host, job_repo_url=job_repo_url,
-                 disable_sysinfo=True)
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/platform_Powerwash/platform_Powerwash.py b/server/site_tests/platform_Powerwash/platform_Powerwash.py
deleted file mode 100644
index 4af64f2..0000000
--- a/server/site_tests/platform_Powerwash/platform_Powerwash.py
+++ /dev/null
@@ -1,93 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server.cros.update_engine import update_engine_test
-
-POWERWASH_COUNT = '/mnt/stateful_partition/unencrypted/preserve/powerwash_count'
-POWERWASH_MARKER_FILE = '/mnt/stateful_partition/factory_install_reset'
-POWERWASH_COMMAND = 'safe fast keepimg'
-
-STATEFUL_MARKER_FILE = '/mnt/stateful_partition/platform_Powerwash_flag'
-
-# Log files to help debugging what happened during last clobbering (powerwash).
-CLOBBER_STATE_LOG_FILE = '/mnt/stateful_partition/unencrypted/clobber-state.log'
-CLOBBER_LOG_FILE = '/mnt/stateful_partition/unencrypted/clobber.log'
-
-
-class platform_Powerwash(update_engine_test.UpdateEngineTest):
-    """Powerwash a device."""
-    version = 1
-
-    def cleanup(self):
-        """Restore stateful so the DUT is usable by other tests."""
-        self._restore_stateful()
-
-    def run_once(self, job_repo_url=None):
-        """
-        Main function for the test.
-
-        @param job_repo_url: String used to allow the test to figure out a
-                             devserver and stateful payload to use.
-
-        """
-        # Setup the job_repo_url for this test run.
-        self._job_repo_url = self._get_job_repo_url(job_repo_url)
-
-        count_before = self._powerwash_count()
-
-        # We create a file on the stateful partition to test if it is deleted
-        # during the powerwash.
-        self._host.run('echo car > %s' % STATEFUL_MARKER_FILE)
-
-        logging.debug('Signaling powerwash on the device.')
-        self._mark_powerwash()
-        self._host.reboot()
-
-        # Check if the marker file still exists on the stateful partition.
-        # The powerwash cycle should remove it.
-        marker = self._host.run('[ -e %s ]' % STATEFUL_MARKER_FILE,
-                                ignore_status=True, ignore_timeout=True)
-
-        # If "[ -e file ]" finishes with status 0, the file is present.
-        if marker is None or marker.exit_status == 0:
-            raise error.TestFail("Powerwash cycle didn't remove the marker "
-                                 "file on the stateful partition.")
-
-        # Capture powerwash logs.
-        logging.debug('Powerwash logs: %r', self._host.run(
-                'cat %s %s 2>/dev/null' % (CLOBBER_LOG_FILE,
-                                           CLOBBER_STATE_LOG_FILE),
-                ignore_status=True).stdout.strip())
-
-        # Check the powerwash counter before and after the powerwash to verify
-        # it was incremented. This file should be preserved by the powerwash.
-        count_after = self._powerwash_count()
-        if count_after != count_before + 1:
-            raise error.TestFail("Powerwash count didn't increase after "
-                                 "powerwash cycle.")
-
-
-    def _mark_powerwash(self, command=None):
-        """Creates the Powerwash marker file on the host with the given command.
-
-        @param command: The text to include on the marker file, *not* including
-                        the '\n' at the end.
-        """
-        if command is None:
-            command = POWERWASH_COMMAND
-        self._host.run("echo '%s' > %s" % (command, POWERWASH_MARKER_FILE))
-
-
-    def _powerwash_count(self):
-        """Return the powerwash count from the DUT."""
-        count = self._host.run('test -e %s && cat %s || true' %
-                               (POWERWASH_COUNT,
-                                POWERWASH_COUNT)).stdout.strip()
-        logging.debug('Powerwash count is: %r', count)
-        if count:
-            return int(count)
-        return 0
diff --git a/server/site_tests/platform_RotationFps/control b/server/site_tests/platform_RotationFps/control
deleted file mode 100644
index 8a29b3d..0000000
--- a/server/site_tests/platform_RotationFps/control
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "chromeos-chameleon"
-NAME = "platform_RotationFps"
-PURPOSE = "Remotely controlled screen rotation test."
-CRITERIA = "This test will fail if slow fps while changing screen rotation."
-ATTRIBUTES = ""
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-DEPENDENCIES = 'chameleon, servo_state:WORKING'
-JOB_RETRIES = 2
-
-DOC = """
-This test remotely tests screen rotation function and benchmarks the fps.
-"""
-
-args_dict = utils.args_to_dict(args)
-chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, chameleon_args=chameleon_args,
-                             servo_args=servo_args)
-    job.run_test("platform_RotationFps", host=host)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/platform_RotationFps/platform_RotationFps.py b/server/site_tests/platform_RotationFps/platform_RotationFps.py
deleted file mode 100644
index bf54ac5..0000000
--- a/server/site_tests/platform_RotationFps/platform_RotationFps.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""This is a server side screen rotation test using the Chameleon board."""
-
-import logging
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import test
-from autotest_lib.server.cros.chameleon import chameleon_measurer
-
-
-class platform_RotationFps(test.test):
-    """Server side screen rotation test.
-
-    This test talks to a Chameleon board and a DUT to set up dock mode,
-    change rotation, and measure the fps.
-    """
-    version = 1
-
-    DELAY_BEFORE_ROTATION = 5
-    DELAY_AFTER_ROTATION = 5
-
-    def run_once(self, host):
-        # Check the servo object
-        if host.servo is None:
-            raise error.TestError('Invalid servo object found on the host.')
-        if host.get_board_type() != 'CHROMEBOOK':
-            raise error.TestNAError('DUT is not Chromebook. Test Skipped')
-
-        measurer = chameleon_measurer.RemoteChameleonMeasurer(
-                host, self.outputdir)
-        display_facade = measurer.display_facade
-
-        chameleon_board = host.chameleon
-        chameleon_board.setup_and_reset(self.outputdir)
-
-        with measurer.start_dock_mode_measurement() as chameleon_port:
-            chameleon_port_name = chameleon_port.get_connector_type()
-            logging.info('Detected %s chameleon port.', chameleon_port_name)
-            display_id = display_facade.get_first_external_display_id()
-
-            # Ask Chameleon to capture the video during rotation
-            chameleon_port.start_capturing_video()
-            # Rotate the screen to 90 degree.
-            # Adding delays before and after rotation such that we can easily
-            # know the rotation fps, not other animation like opening a tab.
-            display_facade.set_display_rotation(
-                    display_id, 90, self.DELAY_BEFORE_ROTATION,
-                    self.DELAY_AFTER_ROTATION)
-            chameleon_port.stop_capturing_video()
-            # Restore back to 0 degree.
-            display_facade.set_display_rotation(display_id, 0)
-
-            # Retrieve the FPS info
-            fps_list = chameleon_port.get_captured_fps_list()
-            # Cut the fps numbers before and after rotation
-            fps_list = fps_list[-(self.DELAY_AFTER_ROTATION + 1):
-                                -(self.DELAY_AFTER_ROTATION - 1)]
-            # The fps during rotation may cross the second-boundary. Sum them.
-            fps = sum(fps_list)
-            logging.info('***RESULT*** Rotation FPS is %d (max 15)', fps)
-
-            # Output the perf value
-            self.output_perf_value(description='Rotation FPS',
-                                   value=fps,
-                                   higher_is_better=True,
-                                   units='fps')
diff --git a/server/site_tests/platform_S0ixCycle/control b/server/site_tests/platform_S0ixCycle/control
deleted file mode 100644
index 3ca3456..0000000
--- a/server/site_tests/platform_S0ixCycle/control
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "Intel"
-NAME = "platform_S0ixCycle"
-PURPOSE = "Servo based S0ix state transition test."
-DEPENDENCIES = "servo_state:WORKING"
-TIME = "LONG"
-TEST_CATEGORY = "Stress"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-JOB_RETRIES = 2
-
-DOC = """
-This test check S0ix state transitions and its wake sources.
-The iteration should be specified by the parameter -a "faft_iterations=10"
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run_s0ixcycle(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test("platform_S0ixCycle", host=host, cmdline_args=args,
-                 disable_sysinfo=True)
-
-parallel_simple(run_s0ixcycle, machines)
diff --git a/server/site_tests/platform_S0ixCycle/control.1000 b/server/site_tests/platform_S0ixCycle/control.1000
deleted file mode 100644
index 7588013..0000000
--- a/server/site_tests/platform_S0ixCycle/control.1000
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "Intel"
-NAME = "platform_S0ixCycle.1000"
-PURPOSE = "Servo based S0ix state transition test."
-ATTRIBUTES = "suite:kernel_daily_regression"
-DEPENDENCIES = "servo_state:WORKING"
-TIME = "LONG"
-TEST_CATEGORY = "Stress"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-JOB_RETRIES = 2
-
-DOC = """
-This test check S0ix state transitions and its wake sources.
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-args.append('faft_iterations=1000')
-
-def run_s0ixcycle(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test("platform_S0ixCycle", host=host, cmdline_args=args,
-                 disable_sysinfo=True)
-
-parallel_simple(run_s0ixcycle, machines)
diff --git a/server/site_tests/platform_S0ixCycle/platform_S0ixCycle.py b/server/site_tests/platform_S0ixCycle/platform_S0ixCycle.py
deleted file mode 100644
index 470b445..0000000
--- a/server/site_tests/platform_S0ixCycle/platform_S0ixCycle.py
+++ /dev/null
@@ -1,127 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging, time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-from autotest_lib.server.cros.faft.firmware_test import FirmwareTest
-
-# platform_S0ixCycle test timing constants
-BEFORE_SUSPEND_WAIT_TIME_SECONDS = 10
-BEFORE_RESUME_WAIT_TIME_SECONDS = 2
-SUSPEND_WAIT_TIME_SECONDS = 5
-LIDOPEN_WAIT_TIME_SECONDS = 2
-POWER_STATE_RETRY_COUNT = 10
-
-
-class platform_S0ixCycle(FirmwareTest):
-    '''
-    Servo based S0ix cycle test and wake source.
-    '''
-    version = 1
-
-    def initialize(self, host, cmdline_args):
-        dict_args = utils.args_to_dict(cmdline_args)
-        self.faft_iterations = int(dict_args.get('faft_iterations', 1))
-        super(platform_S0ixCycle, self).initialize(host, cmdline_args)
-        self.switcher.setup_mode('normal')
-
-    def perform_s0ix_cycle(self):
-        """
-        Perform S0ix suspend/resume cycle and check state transition.
-        """
-        resume_sources = ['powerbtn', 'lid', 'kbpress']
-        for resume_source in resume_sources:
-            time.sleep(BEFORE_SUSPEND_WAIT_TIME_SECONDS)
-            self.perform_suspend()
-            self.perform_resume(resume_source)
-
-    def perform_suspend(self):
-        """
-        Perform suspend to idle and check state transition.
-        """
-        logging.info('== S0ix suspend and check the state transition ==')
-        # check S0ix state transition
-        if not self.wait_power_state('S0', POWER_STATE_RETRY_COUNT):
-            raise error.TestFail('Platform failed to reach S0 state.')
-        cmd = 'echo freeze > /sys/power/state'
-        block = False
-        self.faft_client.system.run_shell_command(cmd, block)
-        time.sleep(SUSPEND_WAIT_TIME_SECONDS)
-        # check S0ix state transition
-        if not self.wait_power_state('S0ix', POWER_STATE_RETRY_COUNT):
-            raise error.TestFail('Platform failed to reach S0ix state.')
-
-    def perform_resume(self, resume_source):
-        """
-        Perform resume with selected resume source and check state transition.
-        @param resume_source(string):resume source option.
-        """
-        logging.info('== S0ix resume and check the state transition ==')
-        time.sleep(BEFORE_RESUME_WAIT_TIME_SECONDS)
-        if resume_source == 'powerbtn':
-            self.ec.send_command('powerbtn')
-        elif resume_source == 'lid':
-            self.ec.send_command('lidclose')
-            time.sleep(LIDOPEN_WAIT_TIME_SECONDS)
-            self.ec.send_command('lidopen')
-        elif resume_source == 'kbpress':
-            self.ec.key_press('<enter>')
-        else:
-            raise error.TestFail('Invalid resume source.')
-        # check S0 state transition
-        if not self.wait_power_state('S0', POWER_STATE_RETRY_COUNT):
-            raise error.TestFail('Platform failed to reach S0 state.')
-
-    def is_skl_board(self):
-        """
-        Check this device is a SKL based ChromeBook.
-        """
-        skl_boards = ('kunimitsu', 'lars', 'glados', 'chell', 'sentry')
-        output = self.faft_client.system.get_platform_name()
-        return output.lower() in skl_boards
-
-    def is_s0ix_supported(self):
-        """
-        Check this device supports suspend to idle.
-        """
-        cmd = 'cat /var/lib/power_manager/suspend_to_idle'
-        output = self.faft_client.system.run_shell_command_get_output(cmd)
-        if not output:
-            return False
-        else:
-            return int(output[0]) == 1
-
-    def run_once(self):
-        """
-        Main test logic
-        """
-        if not self.faft_config.chrome_ec or not self.check_ec_capability():
-            raise error.TestNAError(
-                    'Chrome EC is not supported on this device.')
-
-        if not (self.is_skl_board() and self.is_s0ix_supported()):
-            raise error.TestNAError(
-                    'Suspend to idle is not supported on this device.')
-
-        for i in xrange(self.faft_iterations):
-            logging.info('== Running FAFT ITERATION %d/%s ==', i + 1,
-                         self.faft_iterations)
-            logging.info(
-                    'S0ix suspend/resume back and check state transition.')
-            # wake the display by key press.
-            self.ec.key_press('<enter>')
-            self.switcher.mode_aware_reboot('custom', self.perform_s0ix_cycle)
-
-    def cleanup(self):
-        """
-        Cleanup after test completes
-        """
-        self.ec.set_uart_regexp('None')
-        # Test may failed before resume, wake the system.
-        self.ec.send_command('powerbtn')
-        # Perform a warm reboot as part of the cleanup.
-        self._client.reboot()
-        super(platform_S0ixCycle, self).cleanup()
diff --git a/server/site_tests/platform_S3Cycle/control b/server/site_tests/platform_S3Cycle/control
deleted file mode 100644
index e80a087..0000000
--- a/server/site_tests/platform_S3Cycle/control
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "Intel"
-NAME = "platform_S3Cycle"
-PURPOSE = "Servo based S3 state transition test."
-DEPENDENCIES = "servo_state:WORKING"
-TIME = "LONG"
-TEST_CATEGORY = "Stress"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-JOB_RETRIES = 2
-
-DOC = """
-This test check S3 state transitions.
-The iteration should be specified by the parameter -a "faft_iterations=10"
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run_s3cycle(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test("platform_S3Cycle", host=host, cmdline_args=args,
-                 disable_sysinfo=True)
-
-parallel_simple(run_s3cycle, machines)
diff --git a/server/site_tests/platform_S3Cycle/control.1000 b/server/site_tests/platform_S3Cycle/control.1000
deleted file mode 100644
index fedf154..0000000
--- a/server/site_tests/platform_S3Cycle/control.1000
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "Intel"
-NAME = "platform_S3Cycle.1000"
-PURPOSE = "Servo based S3 state transition test."
-ATTRIBUTES = "suite:kernel_daily_regression"
-DEPENDENCIES = "servo_state:WORKING"
-TIME = "LONG"
-TEST_CATEGORY = "Stress"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-JOB_RETRIES = 2
-
-DOC = """
-This test check S3 state transitions.
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-args.append('faft_iterations=1000')
-
-def run_s3cycle(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test("platform_S3Cycle", host=host, cmdline_args=args,
-                 disable_sysinfo=True)
-
-parallel_simple(run_s3cycle, machines)
diff --git a/server/site_tests/platform_S3Cycle/platform_S3Cycle.py b/server/site_tests/platform_S3Cycle/platform_S3Cycle.py
deleted file mode 100644
index 272f431..0000000
--- a/server/site_tests/platform_S3Cycle/platform_S3Cycle.py
+++ /dev/null
@@ -1,93 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging, time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-from autotest_lib.server.cros.faft.firmware_test import FirmwareTest
-
-# platform_S3Cycle test timing constants
-BEFORE_SUSPEND_WAIT_TIME_SECONDS = 10
-BEFORE_RESUME_WAIT_TIME_SECONDS = 2
-SUSPEND_WAIT_TIME_SECONDS = 5
-LIDOPEN_WAIT_TIME_SECONDS = 2
-POWER_STATE_RETRY_COUNT = 10
-
-class platform_S3Cycle(FirmwareTest):
-    '''
-    Servo based S3 cycle test.
-    '''
-    version = 1
-
-    def initialize(self, host, cmdline_args):
-        dict_args = utils.args_to_dict(cmdline_args)
-        self.faft_iterations = int(dict_args.get('faft_iterations', 1))
-        super(platform_S3Cycle, self).initialize(host, cmdline_args)
-        self.switcher.setup_mode('normal')
-
-    def perform_s3_cycle(self):
-        """
-        Perform S3 suspend/resume cycle and check state transition.
-        """
-        resume_sources = ['powerbtn', 'lid', 'kbpress']
-        for resume_source in resume_sources:
-            time.sleep(BEFORE_SUSPEND_WAIT_TIME_SECONDS);
-            self.perform_suspend()
-            self.perform_resume(resume_source)
-
-    def perform_suspend(self):
-        """
-        Perform suspend to mem and check state transition.
-        """
-        logging.info('== S3 suspend and check the state transition ==')
-        # check S0 state transition
-        if not self.wait_power_state('S0', POWER_STATE_RETRY_COUNT):
-            raise error.TestFail('Platform failed to reach S0 state.')
-        cmd = 'echo mem > /sys/power/state'
-        block = False
-        self.faft_client.system.run_shell_command(cmd, block)
-        time.sleep(SUSPEND_WAIT_TIME_SECONDS);
-        # check S3 state transition
-        if not self.wait_power_state('S3', POWER_STATE_RETRY_COUNT):
-            raise error.TestFail('Platform failed to reach S3 state.')
-
-    def perform_resume(self, resume_source):
-        """
-        Perform resume with selected resume source and check state transition.
-        @param resume_source(string):resume source option.
-        """
-        logging.info('== S3 resume and check the state transition ==')
-        time.sleep(BEFORE_RESUME_WAIT_TIME_SECONDS);
-        if resume_source == 'powerbtn':
-            self.ec.send_command('powerbtn')
-        elif resume_source == 'lid':
-            self.ec.send_command('lidclose')
-            time.sleep(LIDOPEN_WAIT_TIME_SECONDS);
-            self.ec.send_command('lidopen')
-        elif resume_source == 'kbpress':
-            self.ec.key_press('<enter>')
-        else:
-            raise error.TestFail('Invalid resume source.')
-        # check S0 state transition
-        if not self.wait_power_state('S0', POWER_STATE_RETRY_COUNT):
-            raise error.TestFail('Platform failed to reach S0 state.')
-
-    def run_once(self):
-        """Main test logic"""
-        if not self.faft_config.chrome_ec or not self.check_ec_capability():
-            raise error.TestNAError('Chrome EC is not supported on this device.')
-
-        for i in xrange(self.faft_iterations):
-            logging.info('== Running FAFT ITERATION %d/%s ==',i+1, self.faft_iterations)
-            logging.info('S3 suspend/resume back and check state transition.')
-            self.switcher.mode_aware_reboot('custom', self.perform_s3_cycle)
-
-    def cleanup(self):
-        self.ec.set_uart_regexp('None')
-        # Test may failed before resume, wake the system.
-        self.ec.send_command('powerbtn')
-        # Perform a warm reboot as part of the cleanup.
-        self._client.reboot()
-        super(platform_S3Cycle, self).cleanup()
diff --git a/server/site_tests/platform_ServoPowerStateController/control.nousb b/server/site_tests/platform_ServoPowerStateController/control.nousb
index b6ad36d..08f75fb 100644
--- a/server/site_tests/platform_ServoPowerStateController/control.nousb
+++ b/server/site_tests/platform_ServoPowerStateController/control.nousb
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "platform_ServoPowerStateController.USBUnplugged"
 PURPOSE = "Verify servo PowerStateController functions."
 CRITERIA = "This test will fail if servo does not work as expected."
@@ -14,6 +14,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 The test exercises these transitions:
diff --git a/server/site_tests/platform_ServoPowerStateController/control.usb b/server/site_tests/platform_ServoPowerStateController/control.usb
index 96eba16..675f2c4 100644
--- a/server/site_tests/platform_ServoPowerStateController/control.usb
+++ b/server/site_tests/platform_ServoPowerStateController/control.usb
@@ -4,17 +4,18 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "platform_ServoPowerStateController.USBPluggedin"
 PURPOSE = "Verify servo PowerStateController functions."
 CRITERIA = "This test will fail if servo does not work as expected."
-ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv2, suite:faft_normal, suite:faft_bios_ec3po, suite:faft_bios_tot, suite:bvt-perbuild,suite:servo_lab, suite:labqual"
+ATTRIBUTES = "suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_lv2, suite:faft_normal, suite:faft_bios_tot, suite:bvt-perbuild,suite:servo_lab, suite:labqual, suite:distributed_lab_qual_faft"
 DEPENDENCIES = "servo_state:WORKING"
 TIME = "LONG"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 The test specifically exercises all the state transitions from control.nousb,
diff --git a/server/site_tests/platform_ServoPowerStateController/platform_ServoPowerStateController.py b/server/site_tests/platform_ServoPowerStateController/platform_ServoPowerStateController.py
index 4ae07a8..980907d 100644
--- a/server/site_tests/platform_ServoPowerStateController/platform_ServoPowerStateController.py
+++ b/server/site_tests/platform_ServoPowerStateController/platform_ServoPowerStateController.py
@@ -106,6 +106,8 @@
     """Test servo can power on and off DUT in recovery and non-recovery mode."""
     version = 1
 
+    # Acceptable power states when device is turned off
+    POWER_OFF_STATES = ['S5','G3']
 
     def initialize(self, host):
         """Initialize DUT for testing."""
@@ -154,7 +156,7 @@
                                  ('rec' if rec_on else 'on', boot_source))
 
 
-    def assert_dut_off(self, error_message):
+    def assert_dut_off(self, error_message, check_power_state_off=True):
         """Confirm DUT is off and does not turn back on after 30 seconds.
 
         @param error_message: Error message to raise if DUT stays on.
@@ -163,6 +165,13 @@
         if not self.host.ping_wait_down(timeout=10):
             raise error.TestFail(error_message)
 
+        # Check that power state is actually off (in S5 or G3)
+        if check_power_state_off:
+            ap_power_state = self.host.servo.get('ec_system_powerstate')
+            logging.info('Read power state: %s' % ap_power_state)
+            if ap_power_state not in self.POWER_OFF_STATES:
+                raise error.TestFail('%s. %s' % (error_message, 'DUT not in S5 or G3 state.'))
+
         if self.host.ping_wait_up(timeout=30):
             raise error.TestFail('%s. %s' % (error_message, 'DUT turns back on'
                                              ' after it is turned off.'))
@@ -190,7 +199,7 @@
         logging.info('Power DUT on in recovery mode, DUT shall boot from USB.')
         self.host.servo.switch_usbkey('off')
         self.host.power_on_via_servo(self.controller.REC_ON)
-        self.assert_dut_off('power_state:rec didn\'t stay at recovery screen.')
+        self.assert_dut_off('power_state:rec didn\'t stay at recovery screen.', False)
 
         self.host.servo.switch_usbkey('dut')
         time.sleep(30)
@@ -277,5 +286,5 @@
         self.controller = host.servo.get_power_state_controller()
 
         self.test_with_usb_unplugged()
-        if usb_available:
+        if usb_available and host.is_servo_usb_usable():
             self.test_with_usb_plugged_in()
diff --git a/server/site_tests/platform_StageAndRecover/control b/server/site_tests/platform_StageAndRecover/control
index 2ed492c..eba45d8 100644
--- a/server/site_tests/platform_StageAndRecover/control
+++ b/server/site_tests/platform_StageAndRecover/control
@@ -4,7 +4,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "platform_StageAndRecover"
 TIME = "MEDIUM"
 TEST_CATEGORY = "Install"
@@ -12,6 +12,7 @@
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
 ATTRIBUTES = "suite:platform_test"
+PY_VERSION = 3
 
 DOC = """
 This test stages and installs the same build recovery image onto a
diff --git a/server/site_tests/platform_SuspendResumeTiming/control b/server/site_tests/platform_SuspendResumeTiming/control
deleted file mode 100644
index 29e20fe..0000000
--- a/server/site_tests/platform_SuspendResumeTiming/control
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright (c) 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "kalin"
-NAME = "platform_SuspendResumeTiming"
-PURPOSE = "Servo based suspend-resume timing check test"
-CRITERIA = "This test will fail if time to suspend or resume is too long."
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:usb_detect"
-DEPENDENCIES = "servo_state:WORKING"
-
-DOC = """
-This test measures the time to suspend and resume
-for the case of disconnected USB hub with peripherals
-by servo.
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test("platform_SuspendResumeTiming", host=host,
-                 plug_usb=False, disable_sysinfo=True)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/platform_SuspendResumeTiming/control.usb_plugged b/server/site_tests/platform_SuspendResumeTiming/control.usb_plugged
deleted file mode 100644
index 8d27e287b..0000000
--- a/server/site_tests/platform_SuspendResumeTiming/control.usb_plugged
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright (c) 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "kalin"
-NAME = "platform_SuspendResumeTiming.USB_PLUGGED"
-PURPOSE = "Servo based suspend-resume timing check test"
-CRITERIA = "This test will fail if time to suspend or resume is too long."
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:usb_detect"
-DEPENDENCIES = "servo_state:WORKING"
-
-DOC = """
-This test measures the time to suspend and resume
-for the case of connected USB hub with peripherals
-by servo.
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test("platform_SuspendResumeTiming", host=host,
-                 plug_usb=True, disable_sysinfo=True)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/platform_SuspendResumeTiming/platform_SuspendResumeTiming.py b/server/site_tests/platform_SuspendResumeTiming/platform_SuspendResumeTiming.py
deleted file mode 100644
index fd8f5dc..0000000
--- a/server/site_tests/platform_SuspendResumeTiming/platform_SuspendResumeTiming.py
+++ /dev/null
@@ -1,156 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import datetime
-import logging
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import autotest
-from autotest_lib.server import test
-
-_POWERD_LOG_PATH = '/var/log/power_manager/powerd.LATEST'
-_RESUME_END_LOG = '\"daemon.* Chrome is using normal display mode$\"'
-_RESUME_START_LOG = '\"suspender.*Finishing request [0-9]+ successfully after [0-9]+s\"'
-_SERVO_USB_NUM = 2
-_SHORT_WAIT_ = 5
-_SUSPEND_END_LOG = '\"suspender.* Starting suspend$\"'
-_SUSPEND_START_LOG = '\"suspender.* Starting request [0-9]+$\"'
-_SUSPEND_TIME = 15
-_TIME_TO_RESUME_BAR = 3
-_TIME_TO_SUSPEND_BAR = 3
-_SLEEP_AFTER_RESUME = 60
-_SLEEP_AFTER_REBOOT = 30
-
-class platform_SuspendResumeTiming(test.test):
-    """Checks suspend and resume happen in reasonable timelines."""
-    version = 1
-
-
-    def cleanup(self):
-        """ Disconnect servo hub."""
-        self.host.servo.set('dut_hub1_rst1', 'on')
-        self.host.servo.set('usb_mux_sel3', 'servo_sees_usbkey')
-
-
-    def get_suspender_log_stamp(self, pwrd_log):
-        """ Reads powerd log and takes suspend and resume logs timestamps.
-
-        @param pwrd_log: log string to search for.
-
-        @raises TestError: if suspender log is met more than once.
-
-        @returns log timestamp as datetime.
-        """
-        out_log = self.host.run('tac %s | grep -E %s'
-                % (_POWERD_LOG_PATH, pwrd_log),
-                ignore_status=True).stdout.strip()
-        log_count = len(out_log.split('\n'))
-        if log_count != 1:
-            raise error.TestError('Log \"%s\" is found %d times!'
-                                  % (pwrd_log, log_count))
-        return datetime.datetime.strptime(out_log[1:12], "%m%d/%H%M%S")
-
-
-    def get_display_mode_timestamp(self):
-        """ Takes the first _RESUME_END_LOG line after _RESUME_START_LOG line
-        and returns its timestamp.
-
-        @returns log timestamp as datetime.
-        """
-
-        cmd = ('sed -nr \'/%s/,$p\' %s | grep -E %s | head -n 1'
-            % (_RESUME_START_LOG.replace("\"",""),
-               _POWERD_LOG_PATH, _RESUME_END_LOG))
-        out_log = self.host.run(cmd, ignore_status=True).stdout.strip()
-        return datetime.datetime.strptime(out_log[1:12], "%m%d/%H%M%S")
-
-
-    def get_suspend_resume_time(self):
-        """ Reads powerd log and takes suspend and resume timestamps.
-
-        @returns times took to suspend and resume as a tuple.
-
-        @raises error.TestError: if timestamps are not sequential.
-        """
-
-        suspend_start = self.get_suspender_log_stamp(_SUSPEND_START_LOG)
-        suspend_end = self.get_suspender_log_stamp(_SUSPEND_END_LOG)
-        resume_start = self.get_suspender_log_stamp(_RESUME_START_LOG)
-        resume_end = self.get_display_mode_timestamp()
-
-
-        logging.info([suspend_start, suspend_end,
-                      resume_start, resume_end])
-
-        if not all([resume_end >= resume_start,
-                    resume_start > suspend_end,
-                    suspend_end >= suspend_start]):
-            raise error.TestError('Log timestamps are not sequental!')
-
-        time_to_susp = (suspend_end - suspend_start).total_seconds()
-        time_to_res = (resume_end - resume_start).total_seconds()
-
-        return (time_to_susp, time_to_res)
-
-
-    def get_lsusb_lines(self):
-        """ Executes lsusb and returns list of the output lines."""
-        output =self.host.run('lsusb', ignore_status=True).stdout
-        return output.strip().split('\n')
-
-
-    def run_once(self, host, plug_usb=False):
-        """ Running the suspend-resume timing test.
-
-        @param host: device under test host.
-        @param plug_usb: whether to plug extetrnal USB through servo.
-
-        @raises TestFail: if time to suspend to resume exceeds the bar
-        and if no peripherals are connected to servo.
-        """
-        self.host = host
-        self.host.servo.set('dut_hub1_rst1', 'on')
-
-        # Reboot to create new powerd.Latest log file.
-        self.host.reboot()
-        time.sleep(_SLEEP_AFTER_REBOOT)
-
-        # Test user login.
-        autotest_client = autotest.Autotest(self.host)
-        autotest_client.run_test("desktopui_SimpleLogin",
-                                 exit_without_logout=True)
-
-        # Plug USB hub with peripherals.
-        if plug_usb:
-            lsusb_unplugged_len = len(self.get_lsusb_lines())
-            self.host.servo.switch_usbkey('dut')
-            self.host.servo.set('usb_mux_sel3', 'dut_sees_usbkey')
-            self.host.servo.set('dut_hub1_rst1', 'off')
-            time.sleep(_SHORT_WAIT_)
-            lsusb_plugged_len = len(self.get_lsusb_lines())
-            if lsusb_plugged_len - lsusb_unplugged_len <  _SERVO_USB_NUM + 1:
-                raise error.TestFail('No peripherals are connected to servo!')
-
-        try:
-            self.host.suspend(suspend_time=_SUSPEND_TIME)
-        except error.AutoservSuspendError:
-            pass
-        time.sleep(_SLEEP_AFTER_RESUME)
-        self.host.run('sync')
-
-        # powerd log output for debug log
-        self.host.run('cat %s' % _POWERD_LOG_PATH,
-                                   ignore_status=True).stdout.strip()
-
-        errors = []
-        time_to_suspend, time_to_resume = self.get_suspend_resume_time()
-        if time_to_suspend > _TIME_TO_SUSPEND_BAR:
-            errors.append('Suspend time is too long: %d' % time_to_suspend)
-        if time_to_resume > _TIME_TO_RESUME_BAR:
-            errors.append('Resume time is too long: %d' % time_to_resume)
-        if errors:
-            raise error.TestFail('; '.join(set(errors)))
-
-
diff --git a/server/site_tests/platform_TrackpadStressServer/control b/server/site_tests/platform_TrackpadStressServer/control
deleted file mode 100644
index 245756e..0000000
--- a/server/site_tests/platform_TrackpadStressServer/control
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "platform_TrackpadStressServer"
-TIME = "SHORT"
-TEST_CATEGORY = "Stress"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-
-DOC = """
-This script performs a kernel panic while running the syndetect command on the
-CMT trackpad driver.  It is designed to test if we can disable the trackpad.
-"""
-
-def run_trackpad_stress(machine):
-    host = hosts.create_host(machine)
-    job.run_test("platform_TrackpadStressServer", host=host, iterations=1,
-                 disable_sysinfo=True)
-
-parallel_simple(run_trackpad_stress, machines)
diff --git a/server/site_tests/platform_TrackpadStressServer/platform_TrackpadStressServer.py b/server/site_tests/platform_TrackpadStressServer/platform_TrackpadStressServer.py
deleted file mode 100644
index abed694..0000000
--- a/server/site_tests/platform_TrackpadStressServer/platform_TrackpadStressServer.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# Copyright (c) 2009 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from __future__ import print_function
-
-import logging
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import test, autotest
-
-
-class platform_TrackpadStressServer(test.test):
-    """
-    Make sure the trackpad continues to operate after a kernel panic.
-    """
-    version = 1
-
-    def _run_client_test(self, client_at, verify_only=False):
-        self.job.set_state('client_passed', None)
-        client_at.run_test(self.client_test)
-        return self.job.get_state('client_passed')
-
-    def run_once(self, host=None):
-        self.client = host
-        self.client_test = 'platform_TrackpadStress'
-
-        logging.info('TrackpadStressServer: start client test')
-        client_at = autotest.Autotest(self.client)
-        if not self._run_client_test(client_at):
-            raise error.TestFail('Client test failed precheck state')
-
-        # Configure the client to reboot on a kernel panic
-        self.client.run('sysctl kernel.panic|grep "kernel.panic = -1"')
-        self.client.run('sysctl kernel.panic_on_oops|'
-                        'grep "kernel.panic_on_oops = 1"')
-
-        boot_id = self.client.get_boot_id()
-
-        # Make it rain
-        command  = 'echo BUG > /sys/kernel/debug/provoke-crash/DIRECT'
-        command += '|| echo bug > /proc/breakme'
-        logging.info('TrackpadStressServer: executing "%s" on %s',
-                     command, self.client.hostname)
-        try:
-            # Simply writing to the crash interface resets the target
-            # immediately, leaving files unsaved to disk and the master ssh
-            # connection wedged for a long time.
-            self.client.run(
-                'sh -c "sync; sleep 1; %s" >/dev/null 2>&1 &' % command)
-        except error.AutoservRunError as e:
-            # It is expected that this will cause a non-zero exit status.
-            pass
-
-        self.client.wait_for_restart(
-                    down_timeout=60,
-                    down_warning=60,
-                    old_boot_id=boot_id,
-                    # Extend the default reboot timeout as some targets take
-                    # longer than normal before ssh is available again.
-                    timeout=self.client.DEFAULT_REBOOT_TIMEOUT * 4)
-
-        # Check that the trackpad is running
-        # Todo, need an additional client test.
-        if not self._run_client_test(client_at, verify_only=True):
-            raise error.TestFail('Client test failed final state verification.'
-                                 'The trackpad is not running after kernel '
-                                 'panic.')
-
-
diff --git a/server/site_tests/platform_UReadAheadServer/control b/server/site_tests/platform_UReadAheadServer/control
index 1f7a787..04868e9 100644
--- a/server/site_tests/platform_UReadAheadServer/control
+++ b/server/site_tests/platform_UReadAheadServer/control
@@ -3,10 +3,11 @@
 # found in the LICENSE file.
 
 NAME = "platform_UReadAheadServer"
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 TIME = "SHORT"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
+PY_VERSION = 3
 JOB_RETRIES = 2
 ATTRIBUTES = "suite:bvt-perbuild"
 BUG_TEMPLATE = {
diff --git a/server/site_tests/platform_UReadAheadServer/platform_UReadAheadServer.py b/server/site_tests/platform_UReadAheadServer/platform_UReadAheadServer.py
index 93db8d4..d1e9872 100644
--- a/server/site_tests/platform_UReadAheadServer/platform_UReadAheadServer.py
+++ b/server/site_tests/platform_UReadAheadServer/platform_UReadAheadServer.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/platform_USBHIDWake/control b/server/site_tests/platform_USBHIDWake/control
deleted file mode 100644
index 40662d4..0000000
--- a/server/site_tests/platform_USBHIDWake/control
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "platform_USBHIDWake"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-
-DOC = '''This test checks to see whether a USB HID Device is capable of waking
-a DUT in the S3 state. It requires user interaction (to press a button
-on an attached HID device), and also that a HID device is plugged into
-the DUT. The openvt binary (emerge emerge sys-apps/kbd) should be available
-so that the user prompting code can work'''
-
-
-def run(machine):
-    job.run_test('platform_USBHIDWake', client_ip=machine)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/platform_USBHIDWake/platform_USBHIDWake.py b/server/site_tests/platform_USBHIDWake/platform_USBHIDWake.py
deleted file mode 100644
index 40e066d..0000000
--- a/server/site_tests/platform_USBHIDWake/platform_USBHIDWake.py
+++ /dev/null
@@ -1,102 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-
-from autotest_lib.client.common_lib import autotemp
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils as client_utils
-from autotest_lib.server import autotest
-from autotest_lib.server import hosts
-from autotest_lib.server import test
-from autotest_lib.server import utils
-
-bored_now = """
-[  _____             ____  _               _  ]
-[ |_   _|__   ___   / ___|| | _____      _| | ]
-[  | |/ _ \ / _ \  \___ \| |/ _ \ \ /\ / /| | ]
-[  | | (_) | (_) |  ___) | | (_) \ V  V / |_| ]
-[  |_|\___/ \___/  |____/|_|\___/ \_/\_/  (_) ]
-[                                             ]
-[ The device didn't wake up - either the HID device isn't working or  ]
-[ the chromebook just didn't wake up: Either way, wake the chromebook ]
-[ so we can finish the test, or we'll be sitting here for a while...  ]
-"""
-
-press_button_banner = """
-[     _   _   _             _   _              ]
-[    / \ | |_| |_ ___ _ __ | |_(_) ___  _ __   ]
-[   / _ \| __| __/ _ \ '_ \| __| |/ _ \| '_ \  ]
-[  / ___ \ |_| ||  __/ | | | |_| | (_) | | | | ]
-[ /_/   \_\__|\__\___|_| |_|\__|_|\___/|_| |_| ]
-[                                              ]
-[ Press the power, sleep or other suitable button on your USB HID Device ]
-[ NOTE: NOT on the Chromebook itself - on the USB Keyboard/Remote/etc    ]
-[ Then press Return or Enter here so we can proceed with the test        ]
-"""
-
-
-class platform_USBHIDWake(test.test):
-    version = 1
-
-    def suspend(self):
-        self._client.run("(echo mem > /sys/power/state &)")
-
-
-    def check_dependencies(self):
-        if not utils.system('which openvt', ignore_status=True) == 0:
-            raise error.TestError('openvt missing (see control file)')
-        if not utils.system('sudo true', ignore_status=True) == 0:
-            raise error.TestError('Insufficient privileges: cannot sudo')
-
-
-    def prompt(self, banner=">>>>>>>>>>> Achtung! <<<<<<<<<<<"):
-        """prompt the user with the supplied banner,
-        then wait for them to press enter
-
-        @param banner: A [possibly multi-line] banner prompt to display
-        """
-        temp = autotemp.tempfile(unique_id='vtprompt', text=True)
-        os.write(temp.fd, banner)
-        pcmd = ("sudo openvt -s -w -- " +
-                "sh -c 'clear && cat %s && read -p \"READY> \" REPLY &&" +
-                " echo $REPLY'") % temp.name
-        utils.system(pcmd)
-        temp.clean()
-
-
-    def wait_for_host(self, host=None, timeout=30):
-        '''Wait for the DUT to come back up, with a timeout
-
-        @param host: ip address or hostname of DUT
-        @param timeout: maximum time in seconds to wait
-
-        Returns True if the host comes up in time, False otherwise'''
-        return client_utils.ping(host, deadline=timeout) == 0
-
-
-    def have_hid_device(self):
-        """Return True is a USB HID device is present, False otherwise"""
-        cmd = 'grep "^03$" /sys/bus/usb/devices/[0-9]*/[0-9]*/bInterfaceClass'
-        rval = self._client.run(cmd, ignore_status=True)
-        return rval.exit_status == 0
-
-
-    def run_once(self, client_ip):
-        """Check to see if a DUT at the given address wakes from suspend
-        on USB HID events
-
-        @param client_ip: ip address (string) at which the DUT may be found"""
-        self.check_dependencies()
-        if not client_ip:
-            raise error.TestError('Must have test client IP address')
-        self._client = hosts.create_host(client_ip)
-        if not self.have_hid_device():
-            raise error.TestError('No HID devices found, please attach one')
-        self.suspend()
-        self.prompt(banner=press_button_banner)
-        if not self.wait_for_host(host=client_ip, timeout=10):
-            self.prompt(banner=bored_now)
-            raise error.TestFail('DUT did not wake up on HID event')
diff --git a/server/site_tests/platform_Vpd/control b/server/site_tests/platform_Vpd/control
deleted file mode 100644
index ea7c323..0000000
--- a/server/site_tests/platform_Vpd/control
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = "bsimonnet@chromium.org"
-NAME = "platform_Vpd"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:regression"
-
-DOC = """
-This test checks that vpd can run correctly, generates the cache when it
-reboots and that the generated files have the right permissions.
-"""
-
-def run_test(machine):
-    host = hosts.create_host(machine)
-    job.run_test("platform_Vpd", host=host)
-
-parallel_simple(run_test, machines)
diff --git a/server/site_tests/platform_Vpd/platform_Vpd.py b/server/site_tests/platform_Vpd/platform_Vpd.py
deleted file mode 100755
index ab004de..0000000
--- a/server/site_tests/platform_Vpd/platform_Vpd.py
+++ /dev/null
@@ -1,124 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import test
-
-
-class platform_Vpd(test.test):
-    """Test that vpd's cache gets generated during the boot if missing.
-
-    Clean the vpd cache file and check that the files are correctly generated
-    at the next reboot.
-    """
-    version = 1
-
-    _VPD_FILES = [
-        ('/mnt/stateful_partition/unencrypted/cache/vpd/filtered.txt',
-            'root', 'root', '644'),
-        ('/mnt/stateful_partition/unencrypted/cache/vpd/echo/vpd_echo.txt',
-            'root', 'chronos', '640'),
-        ('/mnt/stateful_partition/unencrypted/cache/vpd/full-v2.txt',
-            'root', 'root', '600')
-    ]
-
-    _VPD_LINKS = [
-        '/var/log/vpd_2.0.txt',
-        '/var/cache/vpd/full-v2.txt',
-        '/var/cache/echo/vpd_echo.txt'
-    ]
-
-    # files used by older versions on vpd and that should not be here anymore
-    _VPD_OLD_FILES = [
-        '/var/cache/vpd/full.cache',
-        '/var/cache/offers/vpd_echo.txt',
-        '/var/cache/vpd/full-v2.cache'
-    ]
-
-    def get_stat(self, host, path):
-        """Return user, group and permissions of file on host.
-
-        @param host: the host machine to test
-        @param path: path to the file that we are testing
-
-        @return None if the file does not exist
-                (user, group, permissions) if it does.
-        """
-        if not self.file_exists(host, path):
-            return None
-
-        user = host.run('stat -c %U ' + path).stdout.strip()
-        group = host.run('stat -c %G ' + path).stdout.strip()
-        mode = host.run('stat -c %a ' + path).stdout.strip()
-
-        return (user, group, mode)
-
-    def file_exists(self, host, path):
-        """Check if the path exists.
-
-        @param host: the host machine
-        @param path: path of the file to check
-
-        @return True if the file exists
-        """
-        return host.run('[ -f %s ]' % path,
-                        ignore_status=True).exit_status == 0
-
-    def is_symlink(self, host, path):
-        """Check if a file is a symlink.
-
-        @param host: the host machine
-        @param path: path to the file
-
-        @return True is the file is a symlink
-        """
-        return host.run('[ -h %s ]' % path,
-                        ignore_status=True).exit_status == 0
-
-    def run_once(self, host):
-        host.run('dump_vpd_log --clean')
-
-        removed_files = [item[0] for item in self._VPD_FILES]
-        removed_files += self._VPD_LINKS
-        removed_files += self._VPD_OLD_FILES
-
-        for vpdfile in removed_files:
-            if self.file_exists(host, vpdfile):
-                raise error.TestFail('Vpd file %s was not removed by '
-                    'dump_vpd_log --clean' % vpdfile)
-
-        host.reboot()
-
-        # check that the files exist and have the right permissions
-        for (path, user, group, perm) in self._VPD_FILES:
-            if self.is_symlink(host, path):
-                raise error.TestFail('File %s should not be a symlink' % path)
-
-            stats = self.get_stat(host, path)
-            if stats is None:
-                raise error.TestFail('File %s should be present' % path)
-
-            if user != stats[0]:
-                raise error.TestFail('Wrong user (%s instead of %s) for %s' %
-                      (stats[0], user, path))
-
-            if group != stats[1]:
-                raise error.TestFail('Wrong group (%s instead of %s) for %s' %
-                      (stats[1], group, path))
-
-            if perm != stats[2]:
-                raise error.TestFail('Wrong permissions (%s instead of %s)'
-                    ' for %s' % (stats[2], perm, path))
-
-        # for symlinks, check that they exist and are symlinks
-        for path in self._VPD_LINKS:
-            if not self.is_symlink(host, path):
-                raise error.TestFail('%s should be a symlink' % path)
-
-            if not self.file_exists(host, path):
-                raise error.TestFail('Symlink %s does not exist' % path)
-
-        for path in self._VPD_OLD_FILES:
-            if self.file_exists(host, path):
-                raise error.TestFail('Old vpd file %s installed' % path)
diff --git a/server/site_tests/policy_AUServer/control.AUDisabled.false b/server/site_tests/policy_AUServer/control.AUDisabled.false
deleted file mode 100644
index a0af84b..0000000
--- a/server/site_tests/policy_AUServer/control.AUDisabled.false
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = 'kathrelkeld'
-NAME = 'policy_AUServer.AUDisabled.false'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-
-DOC = """
-Sets up and runs the client test for the DeviceAutoUpdateDisabled
-policy.
-
-"""
-args_dict = utils.args_to_dict(args)
-client_test = 'policy_DeviceAutoUpdateDisabled'
-case = False
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_AUServer', host=host, client_test=client_test,
-                 case=case, **args_dict)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_AUServer/control.AUDisabled.notset b/server/site_tests/policy_AUServer/control.AUDisabled.notset
deleted file mode 100644
index 020468c..0000000
--- a/server/site_tests/policy_AUServer/control.AUDisabled.notset
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = 'kathrelkeld'
-NAME = 'policy_AUServer.AUDisabled.notset'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-
-DOC = """
-Sets up and runs the client test for the DeviceAutoUpdateDisabled
-policy.
-
-"""
-args_dict = utils.args_to_dict(args)
-client_test = 'policy_DeviceAutoUpdateDisabled'
-case = None
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_AUServer', host=host, client_test=client_test,
-                 case=case, **args_dict)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_AUServer/control.AUDisabled.true b/server/site_tests/policy_AUServer/control.AUDisabled.true
deleted file mode 100644
index 72178f4..0000000
--- a/server/site_tests/policy_AUServer/control.AUDisabled.true
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = 'kathrelkeld'
-NAME = 'policy_AUServer.AUDisabled.true'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-
-DOC = """
-Sets up and runs the client test for the DeviceAutoUpdateDisabled
-policy.
-
-"""
-args_dict = utils.args_to_dict(args)
-client_test = 'policy_DeviceAutoUpdateDisabled'
-case = True
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_AUServer', host=host, client_test=client_test,
-                 case=case, **args_dict)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_AUServer/policy_AUServer.py b/server/site_tests/policy_AUServer/policy_AUServer.py
deleted file mode 100644
index 7533e82..0000000
--- a/server/site_tests/policy_AUServer/policy_AUServer.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.common_lib.cros import tpm_utils
-from autotest_lib.server.cros.update_engine import update_engine_test
-
-
-class policy_AUServer(update_engine_test.UpdateEngineTest):
-    """
-    This server test is used just to get the URL of the payload to use. It
-    will then call into a client side test to test different things in
-    the Omaha response.
-    """
-    version = 1
-
-
-    def cleanup(self):
-        """Cleanup for this test."""
-        super(policy_AUServer, self).cleanup()
-        tpm_utils.ClearTPMIfOwned(self._host)
-        self._host.reboot()
-
-
-    def run_once(self, client_test, case, full_payload=True,
-                 job_repo_url=None, running_at_desk=False):
-        """
-        Starting point of this test.
-
-        Note: base class sets host as self._host.
-
-        @param client_test: the name of the Client test to run.
-        @param case: the case to run for the given Client test.
-        @param full_payload: whether the update should be full or incremental.
-        @param job_repo_url: url provided at runtime (or passed in locally
-                             when running at workstation).
-        @param running_at_desk: indicates test is run from a workstation.
-
-        """
-        self._job_repo_url = job_repo_url
-
-        # Clear TPM to ensure that client test can enroll device.
-        tpm_utils.ClearTPMIfOwned(self._host)
-
-        # Figure out the payload to use for the current build.
-        payload = self._get_payload_url(full_payload=full_payload)
-        image_url, _ = self._stage_payload_by_uri(payload)
-
-        if running_at_desk:
-            image_url = self._copy_payload_to_public_bucket(payload)
-            logging.info('We are running from a workstation. Putting URL on a '
-                         'public location: %s', image_url)
-
-        logging.info('url: %s', image_url)
-
-        self._run_client_test_and_check_result(client_test,
-                                               case=case,
-                                               image_url=image_url)
diff --git a/server/site_tests/policy_DeviceBootOnAcEnabled/control.DeviceBootOnAcEnabled.false b/server/site_tests/policy_DeviceBootOnAcEnabled/control.DeviceBootOnAcEnabled.false
deleted file mode 100644
index 001f620..0000000
--- a/server/site_tests/policy_DeviceBootOnAcEnabled/control.DeviceBootOnAcEnabled.false
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright (c) 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "rzakarian"
-NAME = "policy_DeviceBootOnAcEnabled.false"
-CRITERIA = "This test will fail if servo does not work as expected."
-TIME = "LONG"
-TEST_CATEGORY = "General"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-DEPENDENCIES = "servo_state:WORKING"
-
-DOC = """
-Test that verifies DeviceBootOnAcEnabled policy.
-If this policy is set to true then boot on AC will always be enabled.
-If this policy is set to false, boot on AC will always be disabled.
-If this policy is left unset, boot on AC is disabled.
-
-"""
-
-case = False
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test('policy_DeviceBootOnAcEnabled', host=host, case=case)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_DeviceBootOnAcEnabled/control.DeviceBootOnAcEnabled.not_set b/server/site_tests/policy_DeviceBootOnAcEnabled/control.DeviceBootOnAcEnabled.not_set
deleted file mode 100644
index f6d2e1d..0000000
--- a/server/site_tests/policy_DeviceBootOnAcEnabled/control.DeviceBootOnAcEnabled.not_set
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright (c) 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "rzakarian"
-NAME = "policy_DeviceBootOnAcEnabled.not_set"
-CRITERIA = "This test will fail if servo does not work as expected."
-TIME = "LONG"
-TEST_CATEGORY = "General"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-DEPENDENCIES = "servo_state:WORKING"
-
-DOC = """
-Test that verifies DeviceBootOnAcEnabled policy.
-If this policy is set to true then boot on AC will always be enabled.
-If this policy is set to false, boot on AC will always be disabled.
-If this policy is left unset, boot on AC is disabled.
-
-"""
-
-case = None
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test('policy_DeviceBootOnAcEnabled', host=host, case=case)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_DeviceBootOnAcEnabled/control.DeviceBootOnAcEnabled.true b/server/site_tests/policy_DeviceBootOnAcEnabled/control.DeviceBootOnAcEnabled.true
deleted file mode 100644
index 71e06e2..0000000
--- a/server/site_tests/policy_DeviceBootOnAcEnabled/control.DeviceBootOnAcEnabled.true
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright (c) 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "rzakarian"
-NAME = "policy_DeviceBootOnAcEnabled.true"
-CRITERIA = "This test will fail if servo does not work as expected."
-TIME = "LONG"
-TEST_CATEGORY = "General"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-DEPENDENCIES = "servo_state:WORKING"
-
-DOC = """
-Test that verifies DeviceBootOnAcEnabled policy.
-If this policy is set to true then boot on AC will always be enabled.
-If this policy is set to false, boot on AC will always be disabled.
-If this policy is left unset, boot on AC is disabled.
-
-"""
-
-case = True
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test('policy_DeviceBootOnAcEnabled', host=host, case=case)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_DeviceBootOnAcEnabled/policy_DeviceBootOnAcEnabled.py b/server/site_tests/policy_DeviceBootOnAcEnabled/policy_DeviceBootOnAcEnabled.py
deleted file mode 100644
index 7a9243e..0000000
--- a/server/site_tests/policy_DeviceBootOnAcEnabled/policy_DeviceBootOnAcEnabled.py
+++ /dev/null
@@ -1,111 +0,0 @@
-# Copyright (c) 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import utils
-from autotest_lib.client.common_lib.cros import tpm_utils
-from autotest_lib.server import autotest
-from autotest_lib.server import test
-
-
-class policy_DeviceBootOnAcEnabled(test.test):
-    """Test that verifies DeviceBootOnAcEnabled policy.
-
-    If this policy is set to true then boot on AC will always be enabled.
-
-    If this policy is set to false, boot on AC will always be disabled.
-
-    If this policy is left unset, boot on AC is disabled.
-
-    This test has to run on a Wilco device with a servo board.
-    """
-    version = 1
-
-
-    def cleanup(self):
-        """Clean up DUT.
-
-        Make sure device is on.
-        Make sure servo board is supplying device with power.
-        Clear TPM.
-        """
-        self._if_device_off_turn_back_on()
-        self.host.servo.set_servo_v4_role('src')
-        tpm_utils.ClearTPMIfOwned(self.host)
-
-
-    def _check_power_discharging(self):
-        power_status = self.host.run('cat /sys/class/power_supply/BAT0/status')
-        power_status = power_status.stdout.lower().rstrip()
-        return power_status == 'discharging'
-
-
-    def _turn_device_on(self):
-        """Turns device back on."""
-        self.host.servo.pwr_button()
-        time.sleep(1)
-        self.host.servo.pwr_button('release')
-
-
-    def _confirm_dut_off(self):
-        """Confirms the DUT is off.
-
-        Note: tried using wait_down instead but the test would just hang.
-        """
-        if self.host.wait_up(timeout=10):
-            raise error.TestError('DUT is on, expected off.')
-
-
-    def _confirm_dut_on(self):
-        """Confirms the DUT is on."""
-        if not self.host.wait_up(timeout=20):
-            raise error.TestError('DUT is off, expected on.')
-
-
-    def _if_device_off_turn_back_on(self):
-        """Verify device is on, if not turn it on."""
-        if not self.host.wait_up(timeout=20):
-            self._turn_device_on()
-
-
-    def run_once(self, host, case):
-        """Run the test.
-
-        @param case: the case to run for the given Client test.
-        """
-        self.host = host
-        tpm_utils.ClearTPMIfOwned(self.host)
-
-        self.autotest_client = autotest.Autotest(self.host)
-        self.autotest_client.run_test(
-            'policy_DeviceBootOnAcEnabled',
-            case=case)
-
-        # Stop supplying power from servo, emulating unplugging power.
-        self.host.servo.set_servo_v4_role('snk')
-        # Verify the dut is running on battery.
-        utils.poll_for_condition(
-            lambda: self._check_power_discharging(),
-            exception=error.TestFail(
-                'Device should be running on battery but it is not.'),
-            timeout=5,
-            sleep_interval=1,
-            desc='Polling for power status change.')
-
-        # Turns off the device.
-        self.host.servo.power_key('long_press')
-
-        self._confirm_dut_off()
-
-        # Begin supplying power from servo, emulating plugging in power.
-        self.host.servo.set_servo_v4_role('src')
-
-        if case is True:
-            self._confirm_dut_on()
-        else:
-            self._confirm_dut_off()
-            # Bring device back up.
-            self._turn_device_on()
-            self._confirm_dut_on()
diff --git a/server/site_tests/policy_DeviceChargingServer/control.AdvancedBatteryChargeMode b/server/site_tests/policy_DeviceChargingServer/control.AdvancedBatteryChargeMode
deleted file mode 100644
index 8b1710d..0000000
--- a/server/site_tests/policy_DeviceChargingServer/control.AdvancedBatteryChargeMode
+++ /dev/null
@@ -1,106 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-from autotest_lib.server.hosts import cros_host
-
-AUTHOR = 'ncrews'
-DEPENDENCIES = "servo_state:WORKING"
-NAME = 'policy_DeviceChargingServer.AdvancedBatteryChargeMode'
-TIME = 'LONG'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-ATTRIBUTES = "suite:wilco_bve"
-
-DOC = """
-Ensures the DUT's battery level is in a testable range, clears the TPM if
-needed, and then runs the specified client test to verify charging behavior
-is consistent with policies.
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = cros_host.CrosHost.get_servo_arguments(args_dict)
-
-client_test = 'policy_DeviceScheduledCharging'
-
-# When AdvancedBatteryChargeMode is enabled, and the time is outside of the work
-# period, the battery should not charge above 90%. Therefore we need to be above
-# 90% so we can test this.
-MIN_BATTERY_LEVEL = 91
-
-# Various interesting day_config values of the policy, assuming the time
-# is noon on a Monday.
-BEFORE_START_DAY_CONFIG = {
-    'entries': [{'day': 'MONDAY',
-                 'charge_start_time':{'hour':22, 'minute':0},
-                 'charge_end_time':{'hour':23, 'minute':0},
-                }]
-}
-START_THRU_END_DAY_CONFIG = {
-    'entries': [{'day': 'MONDAY',
-                 'charge_start_time':{'hour':1, 'minute':0},
-                 'charge_end_time':{'hour':23, 'minute':0},
-                }]
-}
-AFTER_END_DAY_CONFIG = {
-    'entries': [{'day': 'MONDAY',
-                 'charge_start_time':{'hour':1, 'minute':0},
-                 'charge_end_time':{'hour':2, 'minute':0},
-                }]
-}
-
-# A test case consists of the policies, plus the expected power behavior.
-TEST_CASES = [
-    ({'DeviceAdvancedBatteryChargeModeEnabled': False,
-      'DeviceAdvancedBatteryChargeModeDayConfig': BEFORE_START_DAY_CONFIG},
-     'ON_AC_AND_CHARGING'),
-    ({'DeviceAdvancedBatteryChargeModeEnabled': False,
-      'DeviceAdvancedBatteryChargeModeDayConfig': START_THRU_END_DAY_CONFIG},
-     'ON_AC_AND_CHARGING'),
-    ({'DeviceAdvancedBatteryChargeModeEnabled': False,
-      'DeviceAdvancedBatteryChargeModeDayConfig': AFTER_END_DAY_CONFIG},
-     'ON_AC_AND_CHARGING'),
-
-    ({'DeviceAdvancedBatteryChargeModeEnabled': True,
-      'DeviceAdvancedBatteryChargeModeDayConfig': BEFORE_START_DAY_CONFIG},
-     'ON_AC_AND_NOT_CHARGING'),
-    ({'DeviceAdvancedBatteryChargeModeEnabled': True,
-      'DeviceAdvancedBatteryChargeModeDayConfig': START_THRU_END_DAY_CONFIG},
-     'ON_AC_AND_CHARGING'),
-    ({'DeviceAdvancedBatteryChargeModeEnabled': True,
-      'DeviceAdvancedBatteryChargeModeDayConfig': AFTER_END_DAY_CONFIG},
-     'ON_AC_AND_NOT_CHARGING'),
-]
-
-# These are used to cleanup the DUT and to prep the DUT before each test case.
-# See the test for more info.
-ON_AC_AND_CHARGING_POLICIES = {
-    'DeviceAdvancedBatteryChargeModeEnabled': False,
-    'DeviceAdvancedBatteryChargeModeDayConfig': BEFORE_START_DAY_CONFIG
-}
-ON_AC_AND_NOT_CHARGING_POLICIES = {
-    'DeviceAdvancedBatteryChargeModeEnabled': True,
-    'DeviceAdvancedBatteryChargeModeDayConfig': BEFORE_START_DAY_CONFIG
-}
-PREP_POLICIES = {
-    'ON_AC_AND_CHARGING'         : (ON_AC_AND_NOT_CHARGING_POLICIES,
-                                    'ON_AC_AND_NOT_CHARGING'),
-    'ON_AC_AND_NOT_CHARGING'     : (ON_AC_AND_CHARGING_POLICIES,
-                                    'ON_AC_AND_CHARGING'),
-    'NOT_ON_AC_AND_NOT_CHARGING' : (ON_AC_AND_CHARGING_POLICIES,
-                                    'ON_AC_AND_CHARGING'),
-}
-
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test('policy_DeviceChargingServer',
-                 host=host,
-                 client_test=client_test,
-                 test_cases=TEST_CASES,
-                 min_battery_level=MIN_BATTERY_LEVEL,
-                 prep_policies=PREP_POLICIES)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_DeviceChargingServer/control.BatteryChargeMode b/server/site_tests/policy_DeviceChargingServer/control.BatteryChargeMode
deleted file mode 100644
index 5edc56a..0000000
--- a/server/site_tests/policy_DeviceChargingServer/control.BatteryChargeMode
+++ /dev/null
@@ -1,79 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-from autotest_lib.server.hosts import cros_host
-
-AUTHOR = 'ncrews'
-DEPENDENCIES = "servo_state:WORKING"
-NAME = 'policy_DeviceChargingServer.BatteryChargeMode'
-TIME = 'LONG'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-ATTRIBUTES = "suite:wilco_bve"
-
-DOC = """
-Ensures the DUT's battery level is in a testable range, clears the TPM if
-needed, and then runs the specified client test to verify charging behavior
-is consistent with policies.
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = cros_host.CrosHost.get_servo_arguments(args_dict)
-
-client_test = 'policy_DeviceCharging'
-
-# When DeviceBatteryChargeMode is set to BATTERY_CHARGE_PRIMARILY_AC_USE, then
-# the DUT will not charge when above 86%. In order to test this, we need to be
-# above this threshold.
-MIN_BATTERY_LEVEL = 87
-
-# A test case consists of the policies, plus the expected power behavior.
-TEST_CASES = [
-    ({'DeviceBatteryChargeMode': 1}, # BATTERY_CHARGE_STANDARD
-     'ON_AC_AND_CHARGING'),
-    ({'DeviceBatteryChargeMode': 2}, # BATTERY_CHARGE_EXPRESS_CHARGE
-     'ON_AC_AND_CHARGING'),
-    ({'DeviceBatteryChargeMode': 3}, # BATTERY_CHARGE_PRIMARILY_AC_USE
-     'ON_AC_AND_CHARGING'),
-    ({'DeviceBatteryChargeMode': 4}, # `BATTERY_CHARGE_ADAPTIVE
-     'ON_AC_AND_CHARGING'),
-    ({'DeviceBatteryChargeMode': 5, # BATTERY_CHARGE_CUSTOM
-      'DeviceBatteryChargeCustomStartCharging': 50,
-      'DeviceBatteryChargeCustomStopCharging': 60},
-     'ON_AC_AND_NOT_CHARGING'),
-    ({'DeviceBatteryChargeMode': 5, # BATTERY_CHARGE_CUSTOM
-      'DeviceBatteryChargeCustomStartCharging': 50,
-      'DeviceBatteryChargeCustomStopCharging': 100},
-     'ON_AC_AND_CHARGING'),
-]
-
-# These are used to cleanup the DUT and to prep the DUT before each test case.
-# See the test for more info.
-ON_AC_AND_CHARGING_POLICIES = {
-    'DeviceBatteryChargeMode': 1, # BATTERY_CHARGE_STANDARD
-}
-ON_AC_AND_NOT_CHARGING_POLICIES = {
-    'DeviceBatteryChargeMode': 5, # BATTERY_CHARGE_CUSTOM
-    'DeviceBatteryChargeCustomStartCharging': 50,
-    'DeviceBatteryChargeCustomStopCharging': 60,
-}
-PREP_POLICIES = {
-    'ON_AC_AND_CHARGING'         : (ON_AC_AND_NOT_CHARGING_POLICIES,
-                                    'ON_AC_AND_NOT_CHARGING'),
-    'ON_AC_AND_NOT_CHARGING'     : (ON_AC_AND_CHARGING_POLICIES,
-                                    'ON_AC_AND_CHARGING'),
-}
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test('policy_DeviceChargingServer',
-                 host=host,
-                 client_test=client_test,
-                 test_cases=TEST_CASES,
-                 min_battery_level=MIN_BATTERY_LEVEL,
-                 prep_policies=PREP_POLICIES)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_DeviceChargingServer/control.PowerPeakShift b/server/site_tests/policy_DeviceChargingServer/control.PowerPeakShift
deleted file mode 100644
index be6afd4..0000000
--- a/server/site_tests/policy_DeviceChargingServer/control.PowerPeakShift
+++ /dev/null
@@ -1,134 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-from autotest_lib.server.hosts import cros_host
-
-AUTHOR = 'ncrews'
-DEPENDENCIES = "servo_state:WORKING"
-NAME = 'policy_DeviceChargingServer.PowerPeakShift'
-TIME = 'LONG'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-ATTRIBUTES = "suite:wilco_bve"
-
-DOC = """
-Ensures the DUT's battery level is in a testable range, clears the TPM if
-needed, and then runs the specified client test to verify charging behavior
-is consistent with policies.
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = cros_host.CrosHost.get_servo_arguments(args_dict)
-
-client_test = 'policy_DeviceScheduledCharging'
-
-# The lowest |DevicePowerPeakShift.battery_threshold| we can set is 15%. We need
-# to be able to set it to below our current battery level.
-MIN_BATTERY_LEVEL = 16
-
-# Various interesting day_config values of the policy, assuming the time
-# is noon on a Monday.
-BEFORE_START_DAY_CONFIG = {
-    'entries': [{'day': 'MONDAY',
-                 'start_time':{'hour':21, 'minute':0},
-                 'end_time':{'hour':22, 'minute':0},
-                 'charge_start_time':{'hour':23, 'minute':0},
-                }]
-}
-START_THRU_END_DAY_CONFIG = {
-    'entries': [{'day': 'MONDAY',
-                 'start_time':{'hour':1, 'minute':0},
-                 'end_time':{'hour':22, 'minute':0},
-                 'charge_start_time':{'hour':23, 'minute':0},
-                }]
-}
-END_THRU_CHARGE_START_DAY_CONFIG = {
-    'entries': [{'day': 'MONDAY',
-                 'start_time':{'hour':1, 'minute':0},
-                 'end_time':{'hour':2, 'minute':0},
-                 'charge_start_time':{'hour':23, 'minute':0},
-                }]
-}
-AFTER_CHARGE_START_DAY_CONFIG = {
-    'entries': [{'day': 'MONDAY',
-                 'start_time':{'hour':1, 'minute':0},
-                 'end_time':{'hour':2, 'minute':0},
-                 'charge_start_time':{'hour':3, 'minute':0},
-                }]
-}
-
-# A test case consists of the policies, plus the expected power behavior.
-TEST_CASES = [
-    ({'DevicePowerPeakShiftEnabled': False,
-      'DevicePowerPeakShiftBatteryThreshold': 0,
-      'DevicePowerPeakShiftDayConfig':{}},
-     'ON_AC_AND_CHARGING'),
-
-    ({'DevicePowerPeakShiftEnabled': True,
-      'DevicePowerPeakShiftBatteryThreshold': 15,
-      'DevicePowerPeakShiftDayConfig': BEFORE_START_DAY_CONFIG},
-     'ON_AC_AND_CHARGING'),
-    ({'DevicePowerPeakShiftEnabled': True,
-      'DevicePowerPeakShiftBatteryThreshold': 15,
-      'DevicePowerPeakShiftDayConfig': START_THRU_END_DAY_CONFIG},
-     'NOT_ON_AC_AND_NOT_CHARGING'),
-    ({'DevicePowerPeakShiftEnabled': True,
-      'DevicePowerPeakShiftBatteryThreshold': 15,
-      'DevicePowerPeakShiftDayConfig': END_THRU_CHARGE_START_DAY_CONFIG},
-     'ON_AC_AND_NOT_CHARGING'),
-    ({'DevicePowerPeakShiftEnabled': True,
-      'DevicePowerPeakShiftBatteryThreshold': 15,
-      'DevicePowerPeakShiftDayConfig': AFTER_CHARGE_START_DAY_CONFIG},
-     'ON_AC_AND_CHARGING'),
-
-    ({'DevicePowerPeakShiftEnabled': True,
-      'DevicePowerPeakShiftBatteryThreshold': 100,
-      'DevicePowerPeakShiftDayConfig': BEFORE_START_DAY_CONFIG},
-     'ON_AC_AND_CHARGING'),
-    ({'DevicePowerPeakShiftEnabled': True,
-      'DevicePowerPeakShiftBatteryThreshold': 100,
-      'DevicePowerPeakShiftDayConfig': START_THRU_END_DAY_CONFIG},
-     'ON_AC_AND_NOT_CHARGING'),
-    ({'DevicePowerPeakShiftEnabled': True,
-      'DevicePowerPeakShiftBatteryThreshold': 100,
-      'DevicePowerPeakShiftDayConfig': END_THRU_CHARGE_START_DAY_CONFIG},
-     'ON_AC_AND_NOT_CHARGING'),
-    ({'DevicePowerPeakShiftEnabled': True,
-      'DevicePowerPeakShiftBatteryThreshold': 100,
-      'DevicePowerPeakShiftDayConfig': AFTER_CHARGE_START_DAY_CONFIG},
-     'ON_AC_AND_CHARGING'),
-]
-
-# These are used to cleanup the DUT and to prep the DUT before each test case.
-# See the test for more info.
-ON_AC_AND_NOT_CHARGING_POLICIES = {
-    'DevicePowerPeakShiftEnabled': True,
-    'DevicePowerPeakShiftBatteryThreshold': 15,
-    'DevicePowerPeakShiftDayConfig': END_THRU_CHARGE_START_DAY_CONFIG
-}
-ON_AC_AND_CHARGING_POLICIES = {
-    'DevicePowerPeakShiftEnabled': False,
-    'DevicePowerPeakShiftBatteryThreshold': 0,
-    'DevicePowerPeakShiftDayConfig': {}
-}
-PREP_POLICIES = {
-    'ON_AC_AND_CHARGING'         : (ON_AC_AND_NOT_CHARGING_POLICIES,
-                                    'ON_AC_AND_NOT_CHARGING'),
-    'ON_AC_AND_NOT_CHARGING'     : (ON_AC_AND_CHARGING_POLICIES,
-                                    'ON_AC_AND_CHARGING'),
-    'NOT_ON_AC_AND_NOT_CHARGING' : (ON_AC_AND_CHARGING_POLICIES,
-                                    'ON_AC_AND_CHARGING'),
-}
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test('policy_DeviceChargingServer',
-                 host=host,
-                 client_test=client_test,
-                 test_cases=TEST_CASES,
-                 min_battery_level=MIN_BATTERY_LEVEL,
-                 prep_policies=PREP_POLICIES)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_DeviceChargingServer/policy_DeviceChargingServer.py b/server/site_tests/policy_DeviceChargingServer/policy_DeviceChargingServer.py
deleted file mode 100644
index a9666db..0000000
--- a/server/site_tests/policy_DeviceChargingServer/policy_DeviceChargingServer.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import autotest
-from autotest_lib.server.cros.enterprise import device_policy_test
-from autotest_lib.server.cros.power import servo_charger, utils
-
-
-class policy_DeviceChargingServer(device_policy_test.DevicePolicyServerTest):
-    """
-    A variant of DevicePolicyServerTest that verifies charging policy behavior.
-    As of this writing, these features are only present on the Wilco platform.
-
-    It requires a Servo v4 USB-C and Servo Micro attached to the DUT.
-    """
-    version = 1
-
-    # To be in a testable state, the DUT has to have low enough battery that
-    # it can charge. Let's give ourselves a buffer for when the battery
-    # inevitably charges a bit in the middle of the test.
-    MAX_BATTERY_LEVEL = 95
-
-    # Allow 15 minutes for battery to charge or drain to the needed range.
-    BATTERY_CHANGE_TIMEOUT = 15 * 60
-
-    def run_once(self, host, client_test, test_cases, min_battery_level,
-                 prep_policies):
-        """
-        Ensures the DUT's battery level is low enough to charge and above the
-        specified level, and then runs the specified client test. Assumes any
-        TPM stuff is dealt with in the parent class.
-        """
-        utils.put_host_battery_in_range(host, min_battery_level,
-                                        self.MAX_BATTERY_LEVEL,
-                                        self.BATTERY_CHANGE_TIMEOUT)
-        charger = servo_charger.ServoV4ChargeManager(host, host.servo)
-        charger.start_charging()
-
-        autotest_client = autotest.Autotest(host)
-        autotest_client.run_test(
-                client_test,
-                check_client_result=True,
-                test_cases=test_cases,
-                min_battery_level=min_battery_level,
-                prep_policies=prep_policies)
diff --git a/server/site_tests/policy_DeviceServer/control.AllowBluetooth_false b/server/site_tests/policy_DeviceServer/control.AllowBluetooth_false
deleted file mode 100644
index 2f393ee..0000000
--- a/server/site_tests/policy_DeviceServer/control.AllowBluetooth_false
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_DeviceServer.AllowBluetooth_false'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-# Disable until test can be fixed: http://b/160218741
-# ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-
-DOC = """
-Sets up and runs the client test for the DeviceAllowBluetooth
-policy. If the policy is set to false bluetooth shouldn't be available.
-
-"""
-
-client_test = 'policy_DeviceAllowBluetooth'
-case = False
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_DeviceServer', host=host,
-                  client_test=client_test, case=case)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_DeviceServer/control.AllowBluetooth_notset b/server/site_tests/policy_DeviceServer/control.AllowBluetooth_notset
deleted file mode 100644
index ea7d55d..0000000
--- a/server/site_tests/policy_DeviceServer/control.AllowBluetooth_notset
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_DeviceServer.AllowBluetooth_notset'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-# Disable until test can be fixed: http://b/160218741
-# ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-
-DOC = """
-Sets up and runs the client test for the DeviceAllowBluetooth
-policy. If the policy is set to None bluetooth should be available.
-
-"""
-
-client_test = 'policy_DeviceAllowBluetooth'
-case = None
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_DeviceServer', host=host,
-                  client_test=client_test, case=case)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_DeviceServer/control.AllowBluetooth_true b/server/site_tests/policy_DeviceServer/control.AllowBluetooth_true
deleted file mode 100644
index 809dfef..0000000
--- a/server/site_tests/policy_DeviceServer/control.AllowBluetooth_true
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_DeviceServer.AllowBluetooth_true'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-# Disable until test can be fixed: http://b/160218741
-# ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-
-DOC = """
-Sets up and runs the client test for the DeviceAllowBluetooth
-policy. If the policy is set to True bluetooth should be available.
-
-"""
-
-client_test = 'policy_DeviceAllowBluetooth'
-case = True
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_DeviceServer', host=host,
-                  client_test=client_test, case=case)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_DeviceServer/control.DeviceWilcoDtcAllowed_false b/server/site_tests/policy_DeviceServer/control.DeviceWilcoDtcAllowed_false
deleted file mode 100644
index 5a1535c..0000000
--- a/server/site_tests/policy_DeviceServer/control.DeviceWilcoDtcAllowed_false
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_DeviceServer.DeviceWilcoDtcAllowed_false'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-# Disable until test can be fixed: http://b/160218741
-# ATTRIBUTES = "suite:wilco_bve"
-
-DOC = """
-Sets up and runs the client test for the DeviceWilcoDtcAllowed
-policy. If the policy is set to false Wilco daemons should not start running
-on the device.
-
-"""
-
-client_test = 'policy_DeviceWilcoDtcAllowed'
-case = False
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_DeviceServer', host=host,
-                  client_test=client_test, case=case)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_DeviceServer/control.DeviceWilcoDtcAllowed_notset b/server/site_tests/policy_DeviceServer/control.DeviceWilcoDtcAllowed_notset
deleted file mode 100644
index 4356293..0000000
--- a/server/site_tests/policy_DeviceServer/control.DeviceWilcoDtcAllowed_notset
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_DeviceServer.DeviceWilcoDtcAllowed_notset'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-# Disable until test can be fixed: http://b/160218741
-# ATTRIBUTES = "suite:wilco_bve"
-
-DOC = """
-Sets up and runs the client test for the DeviceWilcoDtcAllowed
-policy. If the policy is not set Wilco daemons should not start running
-on the device.
-
-"""
-
-client_test = 'policy_DeviceWilcoDtcAllowed'
-case = None
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_DeviceServer', host=host,
-                  client_test=client_test, case=case)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_DeviceServer/control.DeviceWilcoDtcAllowed_true b/server/site_tests/policy_DeviceServer/control.DeviceWilcoDtcAllowed_true
deleted file mode 100644
index ea5a0b6..0000000
--- a/server/site_tests/policy_DeviceServer/control.DeviceWilcoDtcAllowed_true
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_DeviceServer.DeviceWilcoDtcAllowed_true'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-# Disable until test can be fixed: http://b/160218741
-# ATTRIBUTES = "suite:wilco_bve"
-
-DOC = """
-Sets up and runs the client test for the DeviceWilcoDtcAllowed
-policy. If the policy is set to true Wilco daemons should start running
-on the device.
-
-"""
-
-client_test = 'policy_DeviceWilcoDtcAllowed'
-case = True
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_DeviceServer', host=host,
-                  client_test=client_test, case=case)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_DeviceServer/control.EnterprisePrinters_allall b/server/site_tests/policy_DeviceServer/control.EnterprisePrinters_allall
deleted file mode 100644
index 629dcd9..0000000
--- a/server/site_tests/policy_DeviceServer/control.EnterprisePrinters_allall
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'pawliczek'
-NAME = 'policy_DeviceServer.EnterprisePrinters_allall'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-# Disable until test can be fixed: http://b/160218741
-# ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-
-DOC = """
-Sets up and runs the client test for NativePrintersBulk* and
-DeviceNativePrinters* policies.
-
-"""
-
-client_test = 'policy_NativePrintersBulkAccessMode'
-case = ('allowall','allowall')
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_DeviceServer', host=host,
-                  client_test=client_test, case=case)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_DeviceServer/control.EnterprisePrinters_blwl b/server/site_tests/policy_DeviceServer/control.EnterprisePrinters_blwl
deleted file mode 100644
index 090aa10..0000000
--- a/server/site_tests/policy_DeviceServer/control.EnterprisePrinters_blwl
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'pawliczek'
-NAME = 'policy_DeviceServer.EnterprisePrinters_blwl'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-# Disable until test can be fixed: http://b/160218741
-# ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-
-DOC = """
-Sets up and runs the client test for NativePrintersBulk* and
-DeviceNativePrinters* policies.
-
-"""
-
-client_test = 'policy_NativePrintersBulkAccessMode'
-case = ('blacklist','whitelist')
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_DeviceServer', host=host,
-                  client_test=client_test, case=case)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_DeviceServer/control.EnterprisePrinters_wlnone b/server/site_tests/policy_DeviceServer/control.EnterprisePrinters_wlnone
deleted file mode 100644
index 5501a91..0000000
--- a/server/site_tests/policy_DeviceServer/control.EnterprisePrinters_wlnone
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'pawliczek'
-NAME = 'policy_DeviceServer.EnterprisePrinters_wlnone'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-# Disable until test can be fixed: http://b/160218741
-# ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-
-DOC = """
-Sets up and runs the client test for NativePrintersBulk* and
-DeviceNativePrinters* policies.
-
-"""
-
-client_test = 'policy_NativePrintersBulkAccessMode'
-case = ('whitelist',None)
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_DeviceServer', host=host,
-                  client_test=client_test, case=case)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_DeviceServer/control.EphemeralUser_false b/server/site_tests/policy_DeviceServer/control.EphemeralUser_false
deleted file mode 100644
index d7bd263..0000000
--- a/server/site_tests/policy_DeviceServer/control.EphemeralUser_false
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_DeviceServer.EphemeralUser_false'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-# Disable until test can be fixed: http://b/160218741
-# ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-
-DOC = """
-Sets up and runs the client test for the DeviceEphemeralUsersEnabled
-policy.
-
-"""
-
-client_test = 'policy_DeviceEphemeralUsersEnabled'
-case = False
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_DeviceServer', host=host,
-                  client_test=client_test, case=case)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_DeviceServer/control.EphemeralUser_notset b/server/site_tests/policy_DeviceServer/control.EphemeralUser_notset
deleted file mode 100644
index fcc7c1a..0000000
--- a/server/site_tests/policy_DeviceServer/control.EphemeralUser_notset
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_DeviceServer.EphemeralUser_notset'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-# Disable until test can be fixed: http://b/160218741
-# ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-
-DOC = """
-Sets up and runs the client test for the DeviceEphemeralUsersEnabled
-policy.
-
-"""
-
-client_test = 'policy_DeviceEphemeralUsersEnabled'
-case = None
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_DeviceServer', host=host,
-                  client_test=client_test, case=case)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_DeviceServer/control.EphemeralUser_true b/server/site_tests/policy_DeviceServer/control.EphemeralUser_true
deleted file mode 100644
index be607f9..0000000
--- a/server/site_tests/policy_DeviceServer/control.EphemeralUser_true
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_DeviceServer.EphemeralUser_true'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-# Disable until test can be fixed: http://b/160218741
-# ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-
-DOC = """
-Sets up and runs the client test for the DeviceEphemeralUsersEnabled
-policy.
-
-"""
-
-client_test = 'policy_DeviceEphemeralUsersEnabled'
-case = True
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_DeviceServer', host=host,
-                  client_test=client_test, case=case)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_DeviceServer/control.KioskMode_enabled b/server/site_tests/policy_DeviceServer/control.KioskMode_enabled
deleted file mode 100644
index 89e2159..0000000
--- a/server/site_tests/policy_DeviceServer/control.KioskMode_enabled
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_DeviceServer.KioskMode_enabled'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-# Disable until test can be fixed: http://b/160218741
-# ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-
-DOC = """
-Sets up and runs the client test for the KioskModeEnabled
-policy.
-
-"""
-
-client_test = 'policy_KioskModeEnabled'
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_DeviceServer', host=host,
-                  client_test=client_test)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_DeviceServer/control.ReportUploadFrequency_60s b/server/site_tests/policy_DeviceServer/control.ReportUploadFrequency_60s
deleted file mode 100644
index 8225cbd..0000000
--- a/server/site_tests/policy_DeviceServer/control.ReportUploadFrequency_60s
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'carvalheira'
-NAME = 'policy_DeviceServer.ReportUploadFrequency_60s'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-# Disable until test can be fixed: http://b/160218741
-# ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-
-DOC = """
-Sets up and runs the client test for the ReportUploadFrequency
-policy.
-
-"""
-
-client_test = 'policy_ReportUploadFrequency'
-case = '60s'
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_DeviceServer', host=host,
-                  client_test=client_test, case=case)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_DeviceServer/control.SystemTimezone b/server/site_tests/policy_DeviceServer/control.SystemTimezone
deleted file mode 100644
index dfe5ad5..0000000
--- a/server/site_tests/policy_DeviceServer/control.SystemTimezone
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_DeviceServer.SystemTimezone'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-# Disable until test can be fixed: http://b/160218741
-# ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-
-DOC = """
-Sets up and runs the client test for the SystemTimezone
-policy.
-
-"""
-
-client_test = 'policy_SystemTimezone'
-case = True
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_DeviceServer', host=host,
-                  client_test=client_test, case=case)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_DeviceServer/control.SystemTimezone_notset b/server/site_tests/policy_DeviceServer/control.SystemTimezone_notset
deleted file mode 100644
index 91fdd86..0000000
--- a/server/site_tests/policy_DeviceServer/control.SystemTimezone_notset
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_DeviceServer.SystemTimezone_notset'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-# Disable until test can be fixed: http://b/160218741
-# ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-
-DOC = """
-Sets up and runs the client test for the SystemTimezone
-policy.
-
-"""
-
-client_test = 'policy_SystemTimezone'
-case = None
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_DeviceServer', host=host,
-                  client_test=client_test, case=case)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_DeviceServer/control.VirtualMachinesAllowed_false b/server/site_tests/policy_DeviceServer/control.VirtualMachinesAllowed_false
deleted file mode 100644
index 2fdcadf..0000000
--- a/server/site_tests/policy_DeviceServer/control.VirtualMachinesAllowed_false
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_DeviceServer.VirtualMachinesAllowed_false'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-# Disable until test can be fixed: http://b/160218741
-# ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-
-DOC = """
-Sets up and runs the client test for the VirtualMachinesAllowed
-policy. If the policy is set to True virtual machines should be allowed.
-
-"""
-
-client_test = 'policy_VirtualMachinesAllowed'
-case = False
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_DeviceServer', host=host,
-                  client_test=client_test, case=case)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_DeviceServer/control.VirtualMachinesAllowed_notset b/server/site_tests/policy_DeviceServer/control.VirtualMachinesAllowed_notset
deleted file mode 100644
index 61171c6..0000000
--- a/server/site_tests/policy_DeviceServer/control.VirtualMachinesAllowed_notset
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_DeviceServer.VirtualMachinesAllowed_notset'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-# Disable until test can be fixed: http://b/160218741
-# ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-
-DOC = """
-Sets up and runs the client test for the VirtualMachinesAllowed
-policy. If the policy is set to True virtual machines should be allowed.
-
-"""
-
-client_test = 'policy_VirtualMachinesAllowed'
-case = None
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_DeviceServer', host=host,
-                  client_test=client_test, case=case)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_DeviceServer/control.VirtualMachinesAllowed_true b/server/site_tests/policy_DeviceServer/control.VirtualMachinesAllowed_true
deleted file mode 100644
index 52014b6..0000000
--- a/server/site_tests/policy_DeviceServer/control.VirtualMachinesAllowed_true
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_DeviceServer.VirtualMachinesAllowed_true'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-# Disable until test can be fixed: http://b/160218741
-# ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-
-DOC = """
-Sets up and runs the client test for the VirtualMachinesAllowed
-policy. If the policy is set to True virtual machines should be allowed.
-
-"""
-
-client_test = 'policy_VirtualMachinesAllowed'
-case = True
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_DeviceServer', host=host,
-                  client_test=client_test, case=case)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_DeviceServer/policy_DeviceServer.py b/server/site_tests/policy_DeviceServer/policy_DeviceServer.py
deleted file mode 100644
index 2827dc4..0000000
--- a/server/site_tests/policy_DeviceServer/policy_DeviceServer.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib.cros import tpm_utils
-from autotest_lib.server import autotest
-from autotest_lib.server import test
-
-
-class policy_DeviceServer(test.test):
-    """
-    policy_DeviceServer test used to kick off any arbitrary client test.
-
-    """
-    version = 1
-
-
-    def cleanup(self):
-        """Cleanup for this test."""
-        tpm_utils.ClearTPMIfOwned(self.host)
-        self.host.reboot()
-
-
-    def run_once(self, client_test, host, case=None):
-        """
-        Starting point of this test.
-
-        Note: base class sets host as self._host.
-
-        @param client_test: the name of the Client test to run.
-        @param case: the case to run for the given Client test.
-
-        """
-
-        # Clear TPM to ensure that client test can enroll device.
-        self.host = host
-        tpm_utils.ClearTPMIfOwned(self.host)
-
-        self.autotest_client = autotest.Autotest(self.host)
-        self.autotest_client.run_test(
-            client_test, case=case, check_client_result=True)
diff --git a/server/site_tests/policy_ExternalStorageServer/control.Arc b/server/site_tests/policy_ExternalStorageServer/control.Arc
deleted file mode 100644
index b8d97eb..0000000
--- a/server/site_tests/policy_ExternalStorageServer/control.Arc
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = 'dbeckett'
-NAME = 'policy_ArcExternalStorageDisabled.Arc'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-DEPENDENCIES = 'servo_state:WORKING'
-
-DOC = """
-This test connects the servo repair USB stick to the DUT, then runs the
-client-side tests for the policy_ArcExternalStorageDisabled. At the end of the
-test it disconnects the USB stick.
-
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test('policy_ExternalStorageServer',
-                 host=host,
-                 client_test='policy_ArcExternalStorageDisabled')
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_ExternalStorageServer/control.Disabled b/server/site_tests/policy_ExternalStorageServer/control.Disabled
deleted file mode 100644
index 8836ee5..0000000
--- a/server/site_tests/policy_ExternalStorageServer/control.Disabled
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = 'timkovich'
-NAME = 'policy_ExternalStorageServer.Disabled'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-DEPENDENCIES = 'servo_state:WORKING'
-
-DOC = """
-This test connects the servo repair USB stick to the DUT, then runs the
-client-side tests for the ExternalStorageDisabled policy. At the end of the
-test it disconnects the USB stick.
-
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test('policy_ExternalStorageServer',
-                 host=host,
-                 client_test='policy_ExternalStorageDisabled')
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_ExternalStorageServer/control.ReadOnly b/server/site_tests/policy_ExternalStorageServer/control.ReadOnly
deleted file mode 100644
index c563028..0000000
--- a/server/site_tests/policy_ExternalStorageServer/control.ReadOnly
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = 'timkovich'
-NAME = 'policy_ExternalStorageServer.ReadOnly'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-DEPENDENCIES = 'servo_state:WORKING'
-
-DOC = """
-This test connects the servo repair USB stick to the DUT, then runs the
-client-side tests for the ExternalStorageReadOnly policy. At the end of the
-test it disconnects the USB stick.
-
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test('policy_ExternalStorageServer',
-                 host=host,
-                 client_test='policy_ExternalStorageReadOnly')
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_ExternalStorageServer/policy_ExternalStorageServer.py b/server/site_tests/policy_ExternalStorageServer/policy_ExternalStorageServer.py
deleted file mode 100644
index 90b2bbf..0000000
--- a/server/site_tests/policy_ExternalStorageServer/policy_ExternalStorageServer.py
+++ /dev/null
@@ -1,54 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import autotest
-from autotest_lib.server import test
-
-
-class policy_ExternalStorageServer(test.test):
-    """
-    This test connects the servo repair USB stick then runs a client-side test
-    that relies on a USB being connected.
-
-    """
-    version = 1
-
-
-    def _run_client_test(self, name, case):
-        """
-        Run client test.
-
-        @raises error.TestFail: In the instance were the client test fails.
-
-        """
-        logging.info('Performing %s' % case)
-        self.autotest_client.run_test(name,
-                                      case=case,
-                                      check_client_result=True)
-
-
-    def cleanup(self):
-        """Disconnect USB stick."""
-        self.host.servo.switch_usbkey('host')
-
-
-    def run_once(self, host, client_test=''):
-        """
-        @param host: A host object representing the DUT.
-        @param client_test: Name of client test to run.
-
-        """
-        self.host = host
-        self.autotest_client = autotest.Autotest(self.host)
-
-        # Connect servo repair USB stick
-        self.host.servo.switch_usbkey('dut')
-
-        policy_values = ['True_Block', 'NotSet_Allow', 'False_Allow']
-
-        for case in policy_values:
-            self._run_client_test(client_test, case)
diff --git a/server/site_tests/policy_GlobalNetworkSettingsServer/control.AllowOnlyPolicyNetworksToAutoconnect b/server/site_tests/policy_GlobalNetworkSettingsServer/control.AllowOnlyPolicyNetworksToAutoconnect
deleted file mode 100644
index dcae986..0000000
--- a/server/site_tests/policy_GlobalNetworkSettingsServer/control.AllowOnlyPolicyNetworksToAutoconnect
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-TIME = 'SHORT'
-NAME = 'policy_GlobalNetworkSettingsServer.AllowOnlyPolicyNetworksToAutoconnect'
-TEST_TYPE = 'Server'
-# Disable this test until it can be fixed: http://b/170942348
-# ATTRIBUTES = 'suite:ent-wificell'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test sets up a policy and user defined network. The
-GlobalNetworkConfiguration policy 'AllowOnlyPolicyNetworksToAutoconnect' is
-set to True. The DUT is disconnected from the network. Only the policy defined
-network should automatically reconnect.
-
-"""
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_GlobalNetworkSettingsServer',
-                 raw_cmdline_args=args,
-                 host=host,
-                 gnc_settings={'AllowOnlyPolicyNetworksToAutoconnect': True})
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_GlobalNetworkSettingsServer/control.AllowOnlyPolicyNetworksToConnect b/server/site_tests/policy_GlobalNetworkSettingsServer/control.AllowOnlyPolicyNetworksToConnect
deleted file mode 100644
index 7fab3b7..0000000
--- a/server/site_tests/policy_GlobalNetworkSettingsServer/control.AllowOnlyPolicyNetworksToConnect
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-TIME = 'SHORT'
-NAME = 'policy_GlobalNetworkSettingsServer.AllowOnlyPolicyNetworksToConnect'
-TEST_TYPE = 'Server'
-# Disable this test until it can be fixed: http://b/170942348
-# ATTRIBUTES = 'suite:ent-wificell'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test sets up a policy and user defined network. The
-GlobalNetworkConfiguration policy 'AllowOnlyPolicyNetworksToConnect' is
-set to True. Only the policy defined network should be allowed to connect.
-
-"""
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_GlobalNetworkSettingsServer',
-                 raw_cmdline_args=args,
-                 host=host,
-                 gnc_settings={'AllowOnlyPolicyNetworksToConnect': True})
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_GlobalNetworkSettingsServer/control.AllowOnlyPolicyNetworksToConnectIfAvailable b/server/site_tests/policy_GlobalNetworkSettingsServer/control.AllowOnlyPolicyNetworksToConnectIfAvailable
deleted file mode 100644
index 05f459c..0000000
--- a/server/site_tests/policy_GlobalNetworkSettingsServer/control.AllowOnlyPolicyNetworksToConnectIfAvailable
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-TIME = 'SHORT'
-NAME = 'policy_GlobalNetworkSettingsServer.AllowOnlyPolicyNetworksToConnectIfAvailable'
-TEST_TYPE = 'Server'
-# Disable this test until it can be fixed: http://b/170942348
-# ATTRIBUTES = 'suite:ent-wificell'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test sets up a policy and user defined network. The
-GlobalNetworkConfiguration policy 'AllowOnlyPolicyNetworksToConnectIfAvailable'
-is set to True. Only the policy defined network should be allowed to connect.
-The test then turns off the policy network and the user network should be
-allowed to connect.
-
-"""
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_GlobalNetworkSettingsServer',
-                 raw_cmdline_args=args,
-                 host=host,
-                 gnc_settings={
-                    'AllowOnlyPolicyNetworksToConnectIfAvailable': True
-                 })
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_GlobalNetworkSettingsServer/control.DisableNetworkTypes_WiFi b/server/site_tests/policy_GlobalNetworkSettingsServer/control.DisableNetworkTypes_WiFi
deleted file mode 100644
index 9ecfdc3..0000000
--- a/server/site_tests/policy_GlobalNetworkSettingsServer/control.DisableNetworkTypes_WiFi
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-TIME = 'SHORT'
-NAME = 'policy_GlobalNetworkSettingsServer.DisableNetworkTypes_WiFi'
-TEST_TYPE = 'Server'
-# Disable this test until it can be fixed: http://b/170942348
-# ATTRIBUTES = 'suite:ent-wificell'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-This test sets the 'DisableNetworkTypes' policy for 'WiFi'. The DUT then
-attempts to connect to a WiFi network but should be blocked.
-
-"""
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_GlobalNetworkSettingsServer',
-                 raw_cmdline_args=args,
-                 host=host,
-                 gnc_settings={'DisableNetworkTypes': ['WiFi']})
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_GlobalNetworkSettingsServer/policy_GlobalNetworkSettingsServer.py b/server/site_tests/policy_GlobalNetworkSettingsServer/policy_GlobalNetworkSettingsServer.py
deleted file mode 100644
index 2356941..0000000
--- a/server/site_tests/policy_GlobalNetworkSettingsServer/policy_GlobalNetworkSettingsServer.py
+++ /dev/null
@@ -1,105 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import pickle
-import socket
-
-from autotest_lib.client.common_lib.cros import tpm_utils
-from autotest_lib.client.cros.enterprise.network_config import NetworkConfig
-from autotest_lib.server import autotest
-from autotest_lib.server import site_linux_system
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class policy_GlobalNetworkSettingsServer(wifi_cell_test_base.WiFiCellTestBase):
-    version = 1
-
-
-    def cleanup(self):
-        """Clear TPM and catch errant socket exceptions."""
-        try:
-            super(policy_GlobalNetworkSettingsServer, self).cleanup()
-        except socket.error as e:
-            # Some of the WiFi components are closed when the DUT reboots,
-            # and a socket error is raised when cleanup tries to close them
-            # again.
-            logging.info(e)
-
-        tpm_utils.ClearTPMIfOwned(self.host)
-        self.host.reboot()
-
-
-    def run_client_test(self, gnc_settings, policy_network_config,
-                        user_network_config):
-        """
-        Run the client side test.
-
-        Pickle the NetworkConfig objects so they can be passed to the client
-        test.
-
-        @param gnc_settings: GlobalNetworkConfiguration policies to enable.
-        @param policy_network_config: NetworkConfig of the policy-defined
-            network.
-        @param user_network_config: NetworkConfig of the user-defined
-            network.
-
-        """
-        client_at = autotest.Autotest(self.host)
-        client_at.run_test(
-            'policy_GlobalNetworkSettings',
-            gnc_settings=gnc_settings,
-            policy_network_pickle=pickle.dumps(policy_network_config),
-            user_network_pickle=pickle.dumps(user_network_config),
-            check_client_result=True)
-
-
-    def run_once(self, host=None, gnc_settings=None):
-        """
-        Set up an AP for a WiFi authentication type then run the client test.
-
-        @param host: A host object representing the DUT.
-        @param gnc_settings: GlobalNetworkConfiguration policies to enable.
-
-        """
-        self.host = host
-
-        # Clear TPM to ensure that client test can enroll device.
-        tpm_utils.ClearTPMIfOwned(self.host)
-
-        self.context.router.require_capabilities(
-                [site_linux_system.LinuxSystem.CAPABILITY_MULTI_AP])
-        self.context.router.deconfig()
-
-        # Configure 2 open APs.
-        for ssid, channel in [('Policy_Network', 5), ('User_Network', 149)]:
-            n_mode = hostap_config.HostapConfig.MODE_11N_MIXED
-            ap_config = hostap_config.HostapConfig(channel=channel,
-                                                   mode=n_mode,
-                                                   ssid=ssid)
-            self.context.configure(ap_config, multi_interface=True)
-
-        policy_network_config = NetworkConfig(
-                                    self.context.router.get_ssid(instance=0))
-        user_network_config = NetworkConfig(
-                                  self.context.router.get_ssid(instance=1))
-
-        if gnc_settings.get('AllowOnlyPolicyNetworksToAutoconnect'):
-            policy_network_config.autoconnect = True
-
-        # Run the client test with both the policy and user networks available.
-        self.run_client_test(gnc_settings, policy_network_config,
-                             user_network_config)
-
-        # The AllowOnlyPolicyNetworksToConnectIfAvailable policy behaves
-        # differently depending on what networks are available. For this test,
-        # take down the policy network and run the client test again.
-        if gnc_settings.get('AllowOnlyPolicyNetworksToConnectIfAvailable'):
-            self.context.router.deconfig_aps(instance=0)
-
-            self.run_client_test(gnc_settings, policy_network_config,
-                                 user_network_config)
-
-        self.context.router.deconfig()
diff --git a/server/site_tests/policy_WiFiAutoconnectServer/control.false b/server/site_tests/policy_WiFiAutoconnectServer/control.false
deleted file mode 100644
index 205a197..0000000
--- a/server/site_tests/policy_WiFiAutoconnectServer/control.false
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-TIME = 'SHORT'
-NAME = 'policy_WiFiAutoconnectServer.false'
-TEST_TYPE = 'Server'
-ATTRIBUTES = 'suite:ent-wificell'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-'policy_WiFiAutoconnectServer' test configures an AP and runs the client
-side 'policy_WiFiAutoconnect' test which uses the AP to test network
-autoconnect policies with autoconnect set to False.
-
-"""
-
-
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    channel = 6
-    n_mode = hostap_config.HostapConfig.MODE_11N_MIXED
-    ap_config = hostap_config.HostapConfig(channel=channel, mode=n_mode)
-
-    host = hosts.create_host(machine)
-    job.run_test('policy_WiFiAutoconnectServer',
-                 raw_cmdline_args=args,
-                 autoconnect=False,
-                 additional_params=ap_config,
-                 host=host)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_WiFiAutoconnectServer/control.none b/server/site_tests/policy_WiFiAutoconnectServer/control.none
deleted file mode 100644
index 8faed4a..0000000
--- a/server/site_tests/policy_WiFiAutoconnectServer/control.none
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-TIME = 'SHORT'
-NAME = 'policy_WiFiAutoconnectServer.none'
-TEST_TYPE = 'Server'
-ATTRIBUTES = 'suite:ent-wificell'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-'policy_WiFiAutoconnectServer' test configures an AP and runs the client
-side 'policy_WiFiAutoconnect' test which uses the AP to test network
-autoconnect policies with autoconnect unset.
-
-"""
-
-
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    channel = 6
-    n_mode = hostap_config.HostapConfig.MODE_11N_MIXED
-    ap_config = hostap_config.HostapConfig(channel=channel, mode=n_mode)
-
-    host = hosts.create_host(machine)
-    job.run_test('policy_WiFiAutoconnectServer',
-                 raw_cmdline_args=args,
-                 autoconnect=None,
-                 additional_params=ap_config,
-                 host=host)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_WiFiAutoconnectServer/control.true b/server/site_tests/policy_WiFiAutoconnectServer/control.true
deleted file mode 100644
index 69e54677..0000000
--- a/server/site_tests/policy_WiFiAutoconnectServer/control.true
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-TIME = 'SHORT'
-NAME = 'policy_WiFiAutoconnectServer.true'
-TEST_TYPE = 'Server'
-ATTRIBUTES = 'suite:ent-wificell'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-'policy_WiFiAutoconnectServer' test configures an AP and runs the client
-side 'policy_WiFiAutoconnect' test which uses the AP to test network
-autoconnect policies with autoconnect set to True.
-
-"""
-
-
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    channel = 6
-    n_mode = hostap_config.HostapConfig.MODE_11N_MIXED
-    ap_config = hostap_config.HostapConfig(channel=channel, mode=n_mode)
-
-    host = hosts.create_host(machine)
-    job.run_test('policy_WiFiAutoconnectServer',
-                 raw_cmdline_args=args,
-                 autoconnect=True,
-                 additional_params=ap_config,
-                 host=host)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_WiFiAutoconnectServer/policy_WiFiAutoconnectServer.py b/server/site_tests/policy_WiFiAutoconnectServer/policy_WiFiAutoconnectServer.py
deleted file mode 100644
index 8ac1bf4..0000000
--- a/server/site_tests/policy_WiFiAutoconnectServer/policy_WiFiAutoconnectServer.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import autotest
-from autotest_lib.server import site_linux_system
-from autotest_lib.server.cros.network import hostap_config
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class policy_WiFiAutoconnectServer(wifi_cell_test_base.WiFiCellTestBase):
-    version = 1
-
-
-    def parse_additional_arguments(self, commandline_args, additional_params):
-        """
-        Hook into super class to take control files parameters.
-
-        @param commandline_args: dict of parsed parameters from the autotest.
-        @param additional_params: HostapConfig object.
-
-        """
-        self._ap_config = additional_params
-
-
-    def run_once(self, host, autoconnect=None):
-        """
-        Set up AP then run the client side autoconnect tests.
-
-        @param host: A host object representing the DUT.
-        @param autoconnect: Autoconnect setting for network policy.
-
-        """
-        self.context.router.require_capabilities(
-                [site_linux_system.LinuxSystem.CAPABILITY_MULTI_AP])
-        self.context.router.deconfig()
-        self.context.configure(self._ap_config, multi_interface=True)
-
-        client_at = autotest.Autotest(host)
-
-        client_at.run_test('policy_WiFiAutoconnect',
-                           ssid=self.context.router.get_ssid(instance=0),
-                           autoconnect=autoconnect,
-                           check_client_result=True)
-
-        self.context.router.deconfig()
diff --git a/server/site_tests/policy_WiFiPrecedenceServer/control.autoconnect_vs_no_autoconnect b/server/site_tests/policy_WiFiPrecedenceServer/control.autoconnect_vs_no_autoconnect
deleted file mode 100644
index 180a642..0000000
--- a/server/site_tests/policy_WiFiPrecedenceServer/control.autoconnect_vs_no_autoconnect
+++ /dev/null
@@ -1,55 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-TIME = 'SHORT'
-NAME = 'policy_WiFiPrecedenceServer.autoconnect_vs_no_autoconnect'
-TEST_TYPE = 'Server'
-ATTRIBUTES = 'suite:ent-wificell'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-'policy_WiFiPrecedenceServer.autoconnect_vs_no_autoconnect' sets up an AP.
-The client test is given two network policies for the same network, one with
-AutoConnect=True and one with AutoConnect=False.
-
-The DUT should autoconnect to the network.
-
-"""
-
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.client.cros.enterprise.network_config import NetworkConfig
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    ssid = 'User Network'
-    n_mode = hostap_config.HostapConfig.MODE_11N_MIXED
-
-    ap_configs = []
-    ap_configs.append(hostap_config.HostapConfig(
-            channel=5,
-            mode=n_mode,
-            ssid=ssid))
-
-    ap_configs.append(hostap_config.HostapConfig(
-            channel=149,
-            mode=n_mode,
-            ssid=ssid))
-
-    # Client network configurations.
-    network1_config = NetworkConfig(ssid, autoconnect=False)
-    network2_config = NetworkConfig(ssid, autoconnect=True)
-
-    host = hosts.create_host(machine)
-    job.run_test('policy_WiFiPrecedenceServer',
-                 raw_cmdline_args=args,
-                 host=host,
-                 ap_configs=ap_configs,
-                 network1_config=network1_config,
-                 network2_config=network2_config)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_WiFiPrecedenceServer/control.connectable_vs_not_connectable b/server/site_tests/policy_WiFiPrecedenceServer/control.connectable_vs_not_connectable
deleted file mode 100644
index b9906d7..0000000
--- a/server/site_tests/policy_WiFiPrecedenceServer/control.connectable_vs_not_connectable
+++ /dev/null
@@ -1,68 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-TIME = 'SHORT'
-NAME = 'policy_WiFiPrecedenceServer.connectable_vs_not_connectable'
-TEST_TYPE = 'Server'
-ATTRIBUTES = 'suite:ent-wificell'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-'policy_WiFiPrecedenceServer.connectable_vs_not_connectable' sets up both
-a WPA-PSK and Open network. The client test is given a network policy for
-both networks with AutoConnect=True, but the PSK network is given the wrong
-password.
-
-The open network should connect as the PSK network is inaccessible.
-
-"""
-
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.client.cros.enterprise.network_config import NetworkConfig
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    password = 'chromeos'
-    incorrect_password = 'android'
-    ssid1 = 'Network1'
-    ssid2 = 'Network2'
-    n_mode = hostap_config.HostapConfig.MODE_11N_MIXED
-
-    ap_configs = []
-    ap_configs.append(hostap_config.HostapConfig(
-            channel=5,
-            mode=n_mode,
-            ssid=ssid1))
-
-    wpa_config = xmlrpc_security_types.WPAConfig(
-            psk=password,
-            wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA2,
-            wpa2_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_CCMP])
-    ap_configs.append(hostap_config.HostapConfig(
-            channel=149,
-            mode=n_mode,
-            ssid=ssid2,
-            security_config=wpa_config))
-
-    # Client network configurations.
-    network1_config = NetworkConfig(ssid1, autoconnect=True)
-    network2_config = NetworkConfig(ssid2,
-                                    security='WPA-PSK',
-                                    password=incorrect_password,
-                                    autoconnect=True)
-
-    host = hosts.create_host(machine)
-    job.run_test('policy_WiFiPrecedenceServer',
-                 raw_cmdline_args=args,
-                 host=host,
-                 ap_configs=ap_configs,
-                 network1_config=network1_config,
-                 network2_config=network2_config,
-                 precedence=1)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_WiFiPrecedenceServer/control.device_vs_user b/server/site_tests/policy_WiFiPrecedenceServer/control.device_vs_user
deleted file mode 100644
index 9c425a7..0000000
--- a/server/site_tests/policy_WiFiPrecedenceServer/control.device_vs_user
+++ /dev/null
@@ -1,60 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-TIME = 'SHORT'
-NAME = 'policy_WiFiPrecedenceServer.device_vs_user'
-TEST_TYPE = 'Server'
-# Disable this test until it can be fixed: http://b/171583865
-# ATTRIBUTES = 'suite:ent-wificell'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-'policy_WiFiPrecedenceServer.user_vs_device' sets up both an open user
-and device network. The client test is given a network policy for both
-networks with AutoConnect=True.
-
-The user network should connect because, with all other things being equal,
-the user policy should take precedence.
-
-"""
-
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.client.cros.enterprise.network_config import NetworkConfig
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    user_ssid = 'User Network'
-    device_ssid = 'Device Network'
-    n_mode = hostap_config.HostapConfig.MODE_11N_MIXED
-
-    ap_configs = []
-    ap_configs.append(hostap_config.HostapConfig(
-            channel=5,
-            mode=n_mode,
-            ssid=user_ssid))
-
-    ap_configs.append(hostap_config.HostapConfig(
-            channel=149,
-            mode=n_mode,
-            ssid=device_ssid))
-
-    # Client network configurations.
-    network1_config = NetworkConfig(user_ssid, autoconnect=True)
-    network2_config = NetworkConfig(device_ssid, autoconnect=True)
-
-    host = hosts.create_host(machine)
-    job.run_test('policy_WiFiPrecedenceServer',
-                 raw_cmdline_args=args,
-                 host=host,
-                 ap_configs=ap_configs,
-                 network1_config=network1_config,
-                 network2_config=network2_config,
-                 precedence=1,
-                 test='device_vs_user')
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_WiFiPrecedenceServer/control.managed_vs_unmanaged b/server/site_tests/policy_WiFiPrecedenceServer/control.managed_vs_unmanaged
deleted file mode 100644
index 41754bc..0000000
--- a/server/site_tests/policy_WiFiPrecedenceServer/control.managed_vs_unmanaged
+++ /dev/null
@@ -1,59 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-TIME = 'SHORT'
-NAME = 'policy_WiFiPrecedenceServer.managed_vs_unmanaged'
-TEST_TYPE = 'Server'
-ATTRIBUTES = 'suite:ent-wificell'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-'policy_WiFiPrecedenceServer.managed_vs_unmanaged' sets up two APs.
-The client test is given a network policy for one of the networks.
-The DUT then connects to the unmanaged network so that the DUT "remembers"
-that network.
-
-The DUT should connect to the managed network specified in the user policy.
-
-"""
-
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.client.cros.enterprise.network_config import NetworkConfig
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    ssid1 = 'Network1'
-    ssid2 = 'Network2'
-    n_mode = hostap_config.HostapConfig.MODE_11N_MIXED
-
-    ap_configs = []
-    ap_configs.append(hostap_config.HostapConfig(
-            channel=5,
-            mode=n_mode,
-            ssid=ssid1))
-
-    ap_configs.append(hostap_config.HostapConfig(
-            channel=149,
-            mode=n_mode,
-            ssid=ssid2))
-
-    # Client network configurations.
-    network1_config = NetworkConfig(ssid1, autoconnect=True)
-    network2_config = NetworkConfig(ssid2)
-
-    host = hosts.create_host(machine)
-    job.run_test('policy_WiFiPrecedenceServer',
-                 raw_cmdline_args=args,
-                 host=host,
-                 ap_configs=ap_configs,
-                 network1_config=network1_config,
-                 network2_config=network2_config,
-                 precedence=1,
-                 test='managed_vs_unmanaged')
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_WiFiPrecedenceServer/control.open_vs_psk b/server/site_tests/policy_WiFiPrecedenceServer/control.open_vs_psk
deleted file mode 100644
index 8051191..0000000
--- a/server/site_tests/policy_WiFiPrecedenceServer/control.open_vs_psk
+++ /dev/null
@@ -1,66 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-TIME = 'SHORT'
-NAME = 'policy_WiFiPrecedenceServer.open_vs_psk'
-TEST_TYPE = 'Server'
-ATTRIBUTES = 'suite:ent-wificell'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-'policy_WiFiPrecedenceServer' sets up both a WPA-PSK and Open network.
-The client test is given a network policy for both networks with
-AutoConnect=True.
-
-The PSK network should connect as it is more secure.
-
-"""
-
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.client.cros.enterprise.network_config import NetworkConfig
-from autotest_lib.server.cros.network import hostap_config
-
-
-def run(machine):
-    password = 'chromeos'
-    ssid1 = 'Network1'
-    ssid2 = 'Network2'
-    n_mode = hostap_config.HostapConfig.MODE_11N_MIXED
-
-    ap_configs = []
-    ap_configs.append(hostap_config.HostapConfig(
-            channel=5,
-            mode=n_mode,
-            ssid=ssid1))
-
-    wpa_config = xmlrpc_security_types.WPAConfig(
-            psk=password,
-            wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA2,
-            wpa2_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_CCMP])
-    ap_configs.append(hostap_config.HostapConfig(
-            channel=149,
-            mode=n_mode,
-            ssid=ssid2,
-            security_config=wpa_config))
-
-    # Client network configurations.
-    network1_config = NetworkConfig(ssid1, autoconnect=True)
-    network2_config = NetworkConfig(ssid2,
-                                    security='WPA-PSK',
-                                    password=password,
-                                    autoconnect=True)
-
-    host = hosts.create_host(machine)
-    job.run_test('policy_WiFiPrecedenceServer',
-                 raw_cmdline_args=args,
-                 host=host,
-                 ap_configs=ap_configs,
-                 network1_config=network1_config,
-                 network2_config=network2_config,
-                 precedence=2)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_WiFiPrecedenceServer/policy_WiFiPrecedenceServer.py b/server/site_tests/policy_WiFiPrecedenceServer/policy_WiFiPrecedenceServer.py
deleted file mode 100644
index a5a4a36..0000000
--- a/server/site_tests/policy_WiFiPrecedenceServer/policy_WiFiPrecedenceServer.py
+++ /dev/null
@@ -1,76 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import pickle
-import socket
-
-from autotest_lib.server import autotest
-from autotest_lib.server import site_linux_system
-from autotest_lib.client.common_lib.cros import tpm_utils
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class policy_WiFiPrecedenceServer(wifi_cell_test_base.WiFiCellTestBase):
-    version = 1
-
-
-    def cleanup(self):
-        """Cleanup for this test."""
-        try:
-            super(policy_WiFiPrecedenceServer, self).cleanup()
-        except socket.error as e:
-            # Some of the WiFi components are closed when the DUT reboots,
-            # and a socket error is raised when cleanup tries to close them
-            # again.
-            logging.info(e)
-
-        if self.test == 'device_vs_user':
-            tpm_utils.ClearTPMIfOwned(self.host)
-            self.host.reboot()
-
-
-    def run_once(self, host=None, ap_configs=None, network1_config=None,
-                 network2_config=None, precedence=None, test=None):
-        """
-        Set up the APs then run the client side tests.
-
-        Clears the TPM because because the client test needs to enroll.
-
-        @param host: A host object representing the DUT.
-        @param ap_configs: List containing HostapConfig objects to setup APs.
-        @param network1_config: NetworkConfig object for the client-side
-            configuration of network1.
-        @param network1_config: NetworkConfig object for the client-side
-            configuration of network2.
-        @param precedence: One of 1 or 2: which of the APs the
-            DUT should connect to.
-
-        """
-        self.context.router.require_capabilities(
-                [site_linux_system.LinuxSystem.CAPABILITY_MULTI_AP])
-        self.context.router.deconfig()
-        for ap_config in ap_configs:
-            self.context.configure(ap_config, multi_interface=True)
-
-        self.host = host
-        self.test = test
-
-        # Clear TPM to ensure that client test can enroll device.
-        if self.test == 'device_vs_user':
-            tpm_utils.ClearTPMIfOwned(self.host)
-
-        client_at = autotest.Autotest(self.host)
-
-        client_at.run_test(
-                'policy_WiFiPrecedence',
-                # The config objects must be pickled before they can be
-                # passed to the client test.
-                network1_pickle=pickle.dumps(network1_config),
-                network2_pickle=pickle.dumps(network2_config),
-                precedence=precedence,
-                test=self.test,
-                check_client_result=True)
-
-        self.context.router.deconfig()
diff --git a/server/site_tests/policy_WiFiTypesServer/control b/server/site_tests/policy_WiFiTypesServer/control
deleted file mode 100644
index 16f009b..0000000
--- a/server/site_tests/policy_WiFiTypesServer/control
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-TIME = 'SHORT'
-NAME = 'policy_WiFiTypesServer'
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-'policy_WiFiTypesServer' test configures multiple APs and runs the client
-side 'policy_WiFiTypes' test on APs with various authentication types
-and checks that they all are able to connect.
-
-"""
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_WiFiTypesServer',
-                 raw_cmdline_args=args,
-                 host=host)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_WiFiTypesServer/control.open b/server/site_tests/policy_WiFiTypesServer/control.open
deleted file mode 100644
index ac6f50a..0000000
--- a/server/site_tests/policy_WiFiTypesServer/control.open
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-TIME = 'SHORT'
-NAME = 'policy_WiFiTypesServer.open'
-TEST_TYPE = 'Server'
-ATTRIBUTES = 'suite:ent-wificell'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-'policy_WiFiTypesServer.open' test configures an open network and runs the
-client side 'policy_WiFiTypes' test that sets the user network policy and
-attempts to connect.
-
-"""
-
-from autotest_lib.client.cros.enterprise.network_config import NetworkConfig
-from autotest_lib.server.cros.network import hostap_config
-
-def run(machine):
-    n_mode = hostap_config.HostapConfig.MODE_11N_MIXED
-    ap_config = hostap_config.HostapConfig(channel=6, mode=n_mode)
-    network = NetworkConfig()
-
-    host = hosts.create_host(machine)
-    job.run_test('policy_WiFiTypesServer',
-                 raw_cmdline_args=args,
-                 host=host,
-                 ap_config=ap_config,
-                 network=network)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_WiFiTypesServer/control.wpa_eap_tls b/server/site_tests/policy_WiFiTypesServer/control.wpa_eap_tls
deleted file mode 100644
index 9f922d1..0000000
--- a/server/site_tests/policy_WiFiTypesServer/control.wpa_eap_tls
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-TIME = 'SHORT'
-NAME = 'policy_WiFiTypesServer.wpa_eap_tls'
-TEST_TYPE = 'Server'
-ATTRIBUTES = 'suite:ent-wificell'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-'policy_WiFiTypesServer.wpa_eap_tls' test configures an EAP-TLS network and
-runs the client side 'policy_WiFiTypes' test that configures the user network
-policy and attempts to connect to the AP.
-
-"""
-
-from autotest_lib.client.common_lib.cros import site_eap_certs
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.client.cros.enterprise.network_config import NetworkConfig
-from autotest_lib.server.cros.network import hostap_config
-
-def run(machine):
-    eap_config = xmlrpc_security_types.WPAEAPConfig(
-            server_ca_cert=site_eap_certs.ca_cert_1,
-            server_cert=site_eap_certs.server_cert_1,
-            server_key=site_eap_certs.server_private_key_1,
-            client_ca_cert=site_eap_certs.ca_cert_1,
-            client_cert=site_eap_certs.client_cert_1,
-            client_key=site_eap_certs.client_private_key_1)
-    ap_config = hostap_config.HostapConfig(
-            frequency=2412,
-            mode=hostap_config.HostapConfig.MODE_11G,
-            security_config=eap_config)
-
-    network = NetworkConfig(security='WPA-EAP',
-                            eap='EAP-TLS',
-                            identity='chromeos',
-                            ca_cert=site_eap_certs.ca_cert_1,
-                            client_cert=site_eap_certs.client_cert_1_pkcs12)
-
-    host = hosts.create_host(machine)
-    job.run_test('policy_WiFiTypesServer',
-                 raw_cmdline_args=args,
-                 host=host,
-                 ap_config=ap_config,
-                 network=network)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_WiFiTypesServer/control.wpa_eap_ttls b/server/site_tests/policy_WiFiTypesServer/control.wpa_eap_ttls
deleted file mode 100644
index 2f65fa0..0000000
--- a/server/site_tests/policy_WiFiTypesServer/control.wpa_eap_ttls
+++ /dev/null
@@ -1,58 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-TIME = 'SHORT'
-NAME = 'policy_WiFiTypesServer.wpa_eap_ttls'
-TEST_TYPE = 'Server'
-ATTRIBUTES = 'suite:ent-wificell'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-'policy_WiFiTypesServer.wpa_eap_ttls' test configures a EAP-TTLS network and
-runs the client side 'policy_WiFiTypes' test that configures the user network
-policy and attempts to connect to the AP.
-
-"""
-
-from autotest_lib.client.common_lib.cros import site_eap_certs
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.client.cros.enterprise.network_config import NetworkConfig
-from autotest_lib.server.cros.network import hostap_config
-
-def run(machine):
-    TTLS = xmlrpc_security_types.Tunneled1xConfig.LAYER1_TYPE_TTLS
-    MSCHAPV2 = xmlrpc_security_types.Tunneled1xConfig.LAYER2_TYPE_MSCHAPV2
-    identity = 'chromeos'
-    password = 'chromeos'
-
-    eap_config = xmlrpc_security_types.Tunneled1xConfig(
-            site_eap_certs.ca_cert_1,
-            site_eap_certs.server_cert_1,
-            site_eap_certs.server_private_key_1,
-            site_eap_certs.ca_cert_1,
-            identity,
-            password,
-            inner_protocol=MSCHAPV2,
-            outer_protocol=TTLS)
-    ap_config = hostap_config.HostapConfig(
-            frequency=2412,
-            mode=hostap_config.HostapConfig.MODE_11G,
-            security_config=eap_config)
-
-    network = NetworkConfig(security='WPA-EAP',
-                            eap='EAP-TTLS',
-                            identity=identity,
-                            password=password,
-                            ca_cert=site_eap_certs.ca_cert_1)
-
-    host = hosts.create_host(machine)
-    job.run_test('policy_WiFiTypesServer',
-                 raw_cmdline_args=args,
-                 host=host,
-                 ap_config=ap_config,
-                 network=network)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_WiFiTypesServer/control.wpa_peap b/server/site_tests/policy_WiFiTypesServer/control.wpa_peap
deleted file mode 100644
index 5aeb4d1..0000000
--- a/server/site_tests/policy_WiFiTypesServer/control.wpa_peap
+++ /dev/null
@@ -1,58 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-TIME = 'SHORT'
-NAME = 'policy_WiFiTypesServer.wpa_peap'
-TEST_TYPE = 'Server'
-ATTRIBUTES = 'suite:ent-wificell'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-'policy_WiFiTypesServer.wpa_peap' test configures a PEAP network and runs the
-client side 'policy_WiFiTypes' test that configures the user network policy and
-attempts to connect to the AP.
-
-"""
-
-from autotest_lib.client.common_lib.cros import site_eap_certs
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.client.cros.enterprise.network_config import NetworkConfig
-from autotest_lib.server.cros.network import hostap_config
-
-def run(machine):
-    PEAP = xmlrpc_security_types.Tunneled1xConfig.LAYER1_TYPE_PEAP
-    MSCHAPV2 = xmlrpc_security_types.Tunneled1xConfig.LAYER2_TYPE_MSCHAPV2
-    identity = 'chromeos'
-    password = 'chromeos'
-
-    eap_config = xmlrpc_security_types.Tunneled1xConfig(
-            site_eap_certs.ca_cert_1,
-            site_eap_certs.server_cert_1,
-            site_eap_certs.server_private_key_1,
-            site_eap_certs.ca_cert_1,
-            identity,
-            password,
-            inner_protocol=MSCHAPV2,
-            outer_protocol=PEAP)
-    ap_config = hostap_config.HostapConfig(
-            frequency=2412,
-            mode=hostap_config.HostapConfig.MODE_11G,
-            security_config=eap_config)
-
-    network = NetworkConfig(security='WPA-EAP',
-                            eap='PEAP',
-                            identity=identity,
-                            password=password,
-                            ca_cert=site_eap_certs.ca_cert_1)
-
-    host = hosts.create_host(machine)
-    job.run_test('policy_WiFiTypesServer',
-                 raw_cmdline_args=args,
-                 host=host,
-                 ap_config=ap_config,
-                 network=network)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_WiFiTypesServer/control.wpa_peap_gtc b/server/site_tests/policy_WiFiTypesServer/control.wpa_peap_gtc
deleted file mode 100644
index 5cc1b11..0000000
--- a/server/site_tests/policy_WiFiTypesServer/control.wpa_peap_gtc
+++ /dev/null
@@ -1,58 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-TIME = 'SHORT'
-NAME = 'policy_WiFiTypesServer.wpa_peap_gtc'
-TEST_TYPE = 'Server'
-ATTRIBUTES = 'suite:ent-wificell'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-'policy_WiFiTypesServer.wpa_peap_gtc' test configures a PEAP/GTC network and
-runs the client side 'policy_WiFiTypes' test that configures the user network
-policy and attempts to connect to the AP.
-
-"""
-
-from autotest_lib.client.common_lib.cros import site_eap_certs
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.client.cros.enterprise.network_config import NetworkConfig
-from autotest_lib.server.cros.network import hostap_config
-
-def run(machine):
-    PEAP = xmlrpc_security_types.Tunneled1xConfig.LAYER1_TYPE_PEAP
-    GTC = xmlrpc_security_types.Tunneled1xConfig.LAYER2_TYPE_GTC
-    identity = 'chromeos'
-    password = 'chromeos'
-
-    eap_config = xmlrpc_security_types.Tunneled1xConfig(
-            site_eap_certs.ca_cert_1,
-            site_eap_certs.server_cert_1,
-            site_eap_certs.server_private_key_1,
-            site_eap_certs.ca_cert_1,
-            identity,
-            password,
-            inner_protocol=GTC,
-            outer_protocol=PEAP)
-    ap_config = hostap_config.HostapConfig(
-            frequency=2412,
-            mode=hostap_config.HostapConfig.MODE_11G,
-            security_config=eap_config)
-
-    network = NetworkConfig(security='WPA-EAP',
-                            eap='PEAP',
-                            identity=identity,
-                            password=password,
-                            ca_cert=site_eap_certs.ca_cert_1)
-
-    host = hosts.create_host(machine)
-    job.run_test('policy_WiFiTypesServer',
-                 raw_cmdline_args=args,
-                 host=host,
-                 ap_config=ap_config,
-                 network=network)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_WiFiTypesServer/control.wpa_psk b/server/site_tests/policy_WiFiTypesServer/control.wpa_psk
deleted file mode 100644
index 34c94ff..0000000
--- a/server/site_tests/policy_WiFiTypesServer/control.wpa_psk
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'timkovich'
-TIME = 'SHORT'
-NAME = 'policy_WiFiTypesServer.wpa_psk'
-TEST_TYPE = 'Server'
-ATTRIBUTES = 'suite:ent-wificell'
-DEPENDENCIES = 'wificell'
-
-DOC = """
-'policy_WiFiTypesServer.wpa_psk' test configures an WPA-PSK network and runs
-the client side 'policy_WiFiTypes' test that sets the user network policy and
-attempts to connect.
-
-"""
-
-from autotest_lib.client.common_lib.cros.network import xmlrpc_security_types
-from autotest_lib.client.cros.enterprise.network_config import NetworkConfig
-from autotest_lib.server.cros.network import hostap_config
-
-def run(machine):
-    password = 'chromeos'
-    wpa_config = xmlrpc_security_types.WPAConfig(
-            psk=password,
-            wpa_mode=xmlrpc_security_types.WPAConfig.MODE_PURE_WPA2,
-            wpa2_ciphers=[xmlrpc_security_types.WPAConfig.CIPHER_CCMP])
-    ap_config = hostap_config.HostapConfig(
-            channel=6,
-            mode=hostap_config.HostapConfig.MODE_11N_MIXED,
-            security_config=wpa_config)
-
-    network = NetworkConfig(password=password, security='WPA-PSK')
-
-    host = hosts.create_host(machine)
-    job.run_test('policy_WiFiTypesServer',
-                 raw_cmdline_args=args,
-                 host=host,
-                 ap_config=ap_config,
-                 network=network)
-
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_WiFiTypesServer/policy_WiFiTypesServer.py b/server/site_tests/policy_WiFiTypesServer/policy_WiFiTypesServer.py
deleted file mode 100644
index ec41278..0000000
--- a/server/site_tests/policy_WiFiTypesServer/policy_WiFiTypesServer.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import pickle
-
-from autotest_lib.server import autotest
-from autotest_lib.server import site_linux_system
-from autotest_lib.server.cros.network import wifi_cell_test_base
-
-
-class policy_WiFiTypesServer(wifi_cell_test_base.WiFiCellTestBase):
-    version = 1
-
-
-    def run_once(self, host, ap_config, network):
-        """
-        Set up an AP for a WiFi authentication type then run the client test.
-
-        @param host: A host object representing the DUT.
-        @param ap_config: HostapConfig object representing how to configure
-            the router.
-        @param network: NetworkConfig object of how to configure the client.
-
-        """
-        self.context.router.require_capabilities(
-                [site_linux_system.LinuxSystem.CAPABILITY_MULTI_AP])
-        self.context.router.deconfig()
-
-        # Configure the AP
-        self.context.configure(ap_config)
-        network.ssid = self.context.router.get_ssid()
-
-        client_at = autotest.Autotest(host)
-        client_at.run_test('policy_WiFiTypes',
-                           network_pickle=pickle.dumps(network),
-                           check_client_result=True)
-
-        self.context.router.deconfig()
diff --git a/server/site_tests/policy_WilcoServerDeviceDockMacAddressSource/control.DeviceDockMacAddressSource.designated_mac b/server/site_tests/policy_WilcoServerDeviceDockMacAddressSource/control.DeviceDockMacAddressSource.designated_mac
deleted file mode 100644
index d1ba03a..0000000
--- a/server/site_tests/policy_WilcoServerDeviceDockMacAddressSource/control.DeviceDockMacAddressSource.designated_mac
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright (c) 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "rzakarian"
-NAME = "policy_WilcoServerDeviceDockMacAddressSource.designated_mac"
-TIME = "LONG"
-TEST_CATEGORY = "General"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:wilco_bve_dock"
-
-DOC = """
-This test verifies the MAC address of the dock with the
-policy_DeviceDockMacAddressSource set. If the policy is set to 1 then the
-dock should use the designated MAC address of the DUT.
-
-"""
-
-client_test = 'policy_DeviceDockMacAddressSource'
-case = 'designated_mac'
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_WilcoServerDeviceDockMacAddressSource', host=host,
-                 client_test=client_test, case=case)
-
-parallel_simple(run, machines)
\ No newline at end of file
diff --git a/server/site_tests/policy_WilcoServerDeviceDockMacAddressSource/control.DeviceDockMacAddressSource.device_mac b/server/site_tests/policy_WilcoServerDeviceDockMacAddressSource/control.DeviceDockMacAddressSource.device_mac
deleted file mode 100644
index 1ee7668..0000000
--- a/server/site_tests/policy_WilcoServerDeviceDockMacAddressSource/control.DeviceDockMacAddressSource.device_mac
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright (c) 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "rzakarian"
-NAME = "policy_WilcoServerDeviceDockMacAddressSource.device_mac"
-TIME = "LONG"
-TEST_CATEGORY = "General"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:wilco_bve_dock"
-
-DOC = """
-This test verifies the MAC address of the dock with the
-policy_DeviceDockMacAddressSource set. If the policy is set to 2 then the
-dock should have the same MAC address as the DUT.
-
-"""
-
-client_test = 'policy_DeviceDockMacAddressSource'
-case = 'device_mac'
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_WilcoServerDeviceDockMacAddressSource', host=host,
-                 client_test=client_test, case=case)
-
-parallel_simple(run, machines)
\ No newline at end of file
diff --git a/server/site_tests/policy_WilcoServerDeviceDockMacAddressSource/control.DeviceDockMacAddressSource.dock_mac b/server/site_tests/policy_WilcoServerDeviceDockMacAddressSource/control.DeviceDockMacAddressSource.dock_mac
deleted file mode 100644
index 7f88792..0000000
--- a/server/site_tests/policy_WilcoServerDeviceDockMacAddressSource/control.DeviceDockMacAddressSource.dock_mac
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright (c) 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "rzakarian"
-NAME = "policy_WilcoServerDeviceDockMacAddressSource.dock_mac"
-TIME = "LONG"
-TEST_CATEGORY = "General"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:wilco_bve_dock"
-
-DOC = """
-This test verifies the MAC address of the dock with the
-policy_DeviceDockMacAddressSource set. If the policy is set to 3 then the
-dock should have its own MAC address.
-
-"""
-
-client_test = 'policy_DeviceDockMacAddressSource'
-case = 'dock_mac'
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_WilcoServerDeviceDockMacAddressSource', host=host,
-                 client_test=client_test, case=case)
-
-parallel_simple(run, machines)
\ No newline at end of file
diff --git a/server/site_tests/policy_WilcoServerDeviceDockMacAddressSource/policy_WilcoServerDeviceDockMacAddressSource.py b/server/site_tests/policy_WilcoServerDeviceDockMacAddressSource/policy_WilcoServerDeviceDockMacAddressSource.py
deleted file mode 100644
index 62227ee..0000000
--- a/server/site_tests/policy_WilcoServerDeviceDockMacAddressSource/policy_WilcoServerDeviceDockMacAddressSource.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright (c) 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-from autotest_lib.client.common_lib.cros import tpm_utils
-from autotest_lib.server import autotest
-from autotest_lib.server import test
-
-
-class policy_WilcoServerDeviceDockMacAddressSource(test.test):
-    """Test that verifies DeviceDockMacAddressSource policy.
-
-    If the policy is set to 1, dock will grab the designated mac address from
-    the device.
-    If the policy is set to 2, dock mac address will match the device mac.
-    If the policy is set to 3, dock will use its own mac address.
-
-    This test has to run on a Wilco device.
-
-    The way the test is currently setup is: ethernet cable is plugged into the
-    device and dock is not plugged into the internet directly. This might
-    change later on.
-    """
-    version = 1
-
-
-    def cleanup(self):
-        """Clean up DUT."""
-        tpm_utils.ClearTPMIfOwned(self.host)
-
-
-    def run_once(self, client_test, host, case):
-        """Run the test.
-
-        @param client_test: the name of the Client test to run.
-        @param case: the case to run for the given Client test.
-        """
-        self.host = host
-        tpm_utils.ClearTPMIfOwned(self.host)
-
-        self.autotest_client = autotest.Autotest(self.host)
-        self.autotest_client.run_test(client_test, case=case)
-
-        self.host.reboot()
-
-        self.autotest_client.run_test(
-            client_test, case=case, enroll=False, check_mac=True)
diff --git a/server/site_tests/policy_WilcoServerOnNonWilcoDevice/control.DeviceWilcoDtcAllowed_NonWilcoDevice b/server/site_tests/policy_WilcoServerOnNonWilcoDevice/control.DeviceWilcoDtcAllowed_NonWilcoDevice
deleted file mode 100644
index 91d5404..0000000
--- a/server/site_tests/policy_WilcoServerOnNonWilcoDevice/control.DeviceWilcoDtcAllowed_NonWilcoDevice
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'policy_WilcoServerOnNonWilcoDevice.wilco_policies'
-TIME = 'SHORT'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-# Disable this test until it can be fixed: http://b/171595642
-# ATTRIBUTES = 'suite:ent-nightly, suite:policy'
-
-DOC = """
-Sets up and runs the WilcoOnNonWilcoDevice client test. Sets Wilco policies on
-non Wilco devices and makes sure they don't crash.
-
-"""
-
-client_test = 'policy_WilcoOnNonWilcoDevice'
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('policy_WilcoServerOnNonWilcoDevice', host=host,
-                  client_test=client_test, case=None)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_WilcoServerOnNonWilcoDevice/policy_WilcoServerOnNonWilcoDevice.py b/server/site_tests/policy_WilcoServerOnNonWilcoDevice/policy_WilcoServerOnNonWilcoDevice.py
deleted file mode 100644
index 9c54121..0000000
--- a/server/site_tests/policy_WilcoServerOnNonWilcoDevice/policy_WilcoServerOnNonWilcoDevice.py
+++ /dev/null
@@ -1,55 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib.cros import tpm_utils
-from autotest_lib.server import autotest
-from autotest_lib.server import test
-
-
-class policy_WilcoServerOnNonWilcoDevice(test.test):
-    """
-    policy_DeviceServer test used to kick off any arbitrary client test.
-
-    """
-    version = 1
-
-
-    def cleanup(self):
-        """Cleanup for this test."""
-        tpm_utils.ClearTPMIfOwned(self.host)
-        self.host.reboot()
-
-
-    def run_once(self, client_test, host, case=None):
-        """
-        Starting point of this test.
-
-        Note: base class sets host as self._host.
-
-        @param client_test: the name of the Client test to run.
-        @param case: the case to run for the given Client test.
-
-        """
-        # Policies to loop through in the client test.
-        tests = [{'Policy_Name': 'DeviceBootOnAcEnabled',
-                    'Policy_Value': True},
-                 {'Policy_Name': 'DevicePowerPeakShiftEnabled',
-                    'Policy_Value': True},
-                 {'Policy_Name': 'DeviceDockMacAddressSource',
-                     'Policy_Value': 1},
-                 {'Policy_Name': 'DeviceUsbPowerShareEnabled',
-                     'Policy_Value': True},
-                 {'Policy_Name': 'DeviceAdvancedBatteryChargeModeEnabled',
-                     'Policy_Value': True},
-                 {'Policy_Name': 'DeviceBatteryChargeMode',
-                     'Policy_Value': 1},
-                ]
-
-        self.host = host
-        # Clear TPM to ensure that client test can enroll device.
-        tpm_utils.ClearTPMIfOwned(self.host)
-
-        self.autotest_client = autotest.Autotest(self.host)
-        self.autotest_client.run_test(
-            client_test, tests=tests, check_client_result=True)
diff --git a/server/site_tests/policy_WilcoServerUSBPowershare/control.WilcoServerUSBPowershare.disabled b/server/site_tests/policy_WilcoServerUSBPowershare/control.WilcoServerUSBPowershare.disabled
deleted file mode 100644
index c79434a..0000000
--- a/server/site_tests/policy_WilcoServerUSBPowershare/control.WilcoServerUSBPowershare.disabled
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright (c) 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "rzakarian"
-NAME = "policy_WilcoServerUSBPowershare.disabled"
-CRITERIA = "This test will fail if servo does not work as expected."
-TIME = "LONG"
-TEST_CATEGORY = "General"
-TEST_CLASS = "enterprise"
-TEST_TYPE = "server"
-DEPENDENCIES = "servo_state:WORKING"
-# Disable this test until it can be fixed: http://b/171595666
-# ATTRIBUTES = "suite:wilco_bve"
-
-DOC = """
-This test verifies the USB power output when the device is off with the
-DeviceUsbPowerShareEnabled policy set. If the policy is set to True or
-not set, power should be provided. If the policy is set to False power
-should not be provided.
-
-"""
-
-client_test = 'policy_WilcoUSBPowershare'
-case = False
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test('policy_WilcoServerUSBPowershare', host=host,
-                 client_test=client_test, case=case)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/policy_WilcoServerUSBPowershare/control.WilcoServerUSBPowershare.enabled b/server/site_tests/policy_WilcoServerUSBPowershare/control.WilcoServerUSBPowershare.enabled
index eebf46c..a1114f0 100644
--- a/server/site_tests/policy_WilcoServerUSBPowershare/control.WilcoServerUSBPowershare.enabled
+++ b/server/site_tests/policy_WilcoServerUSBPowershare/control.WilcoServerUSBPowershare.enabled
@@ -13,6 +13,7 @@
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
 ATTRIBUTES = "suite:wilco_bve"
+PY_VERSION = 3
 
 DOC = """
 This test verifies the USB power output when the device is off with the
diff --git a/server/site_tests/policy_WilcoServerUSBPowershare/control.WilcoServerUSBPowershare.not_set b/server/site_tests/policy_WilcoServerUSBPowershare/control.WilcoServerUSBPowershare.not_set
index ab18538..f58f374 100644
--- a/server/site_tests/policy_WilcoServerUSBPowershare/control.WilcoServerUSBPowershare.not_set
+++ b/server/site_tests/policy_WilcoServerUSBPowershare/control.WilcoServerUSBPowershare.not_set
@@ -13,6 +13,7 @@
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
 ATTRIBUTES = "suite:wilco_bve"
+PY_VERSION = 3
 
 DOC = """
 This test verifies the USB power output when the device is off with the
diff --git a/server/site_tests/power_BatteryChargeControl/control.args b/server/site_tests/power_BatteryChargeControl/control.args
index c9f3c8c..2a12226 100644
--- a/server/site_tests/power_BatteryChargeControl/control.args
+++ b/server/site_tests/power_BatteryChargeControl/control.args
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_BatteryChargeControl.args"
 PURPOSE = "Use Servo v4 to charge the DUT"
 CRITERIA = ""
@@ -13,6 +13,7 @@
 TEST_CLASS = "power"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 DUT is connected to Servo v4 via USB type-C, and Servo v4 is connected to AC
diff --git a/server/site_tests/power_BatteryChargeControl/control.charge70 b/server/site_tests/power_BatteryChargeControl/control.charge70
index 06b5e68..74f7116 100644
--- a/server/site_tests/power_BatteryChargeControl/control.charge70
+++ b/server/site_tests/power_BatteryChargeControl/control.charge70
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_BatteryChargeControl.charge70"
 PURPOSE = "Use Servo v4 to charge the DUT"
 CRITERIA = ""
@@ -13,6 +13,7 @@
 TEST_CLASS = "power"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Use Servo v4 to charge the DUT to 70% battery capacity.
diff --git a/server/site_tests/power_BatteryChargeControl/control.charge95 b/server/site_tests/power_BatteryChargeControl/control.charge95
index 9920d0b..4293a19 100644
--- a/server/site_tests/power_BatteryChargeControl/control.charge95
+++ b/server/site_tests/power_BatteryChargeControl/control.charge95
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_BatteryChargeControl.charge95"
 PURPOSE = "Use Servo v4 to charge the DUT"
 CRITERIA = ""
@@ -13,6 +13,7 @@
 TEST_CLASS = "power"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Use Servo v4 to charge the DUT to 95% battery capacity.
diff --git a/server/site_tests/power_BatteryChargeControl/power_BatteryChargeControl.py b/server/site_tests/power_BatteryChargeControl/power_BatteryChargeControl.py
index 6795724..9b71f51 100644
--- a/server/site_tests/power_BatteryChargeControl/power_BatteryChargeControl.py
+++ b/server/site_tests/power_BatteryChargeControl/power_BatteryChargeControl.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/power_BootUpTime/control.lid_close_open b/server/site_tests/power_BootUpTime/control.lid_close_open
deleted file mode 100644
index 79f343a..0000000
--- a/server/site_tests/power_BootUpTime/control.lid_close_open
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "Intel"
-NAME = "power_BootUpTime.lid_close_open"
-PURPOSE = "To check the boot up time using lid close/open"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "power"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:bootup_time"
-DEPENDENCIES = "servo_state:WORKING"
-JOB_RETRIES = 3
-
-DOC = """
-Lid close/open and determine the boot up time
-Boot up time should be <= 8.0 Secs
-"""
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test("power_BootUpTime", host=host, cmdline_args=args,
-                 boot_type='lid_close_open', disable_sysinfo=False,
-                 tag='lid_close_open')
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/power_BootUpTime/control.reboot b/server/site_tests/power_BootUpTime/control.reboot
deleted file mode 100644
index 4052a5d..0000000
--- a/server/site_tests/power_BootUpTime/control.reboot
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "Intel"
-NAME = "power_BootUpTime.reboot"
-PURPOSE = "To check the boot up time using reboot command"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "power"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:bootup_time"
-DEPENDENCIES = "servo_state:WORKING"
-JOB_RETRIES = 3
-
-DOC = """
-Reboot the DUT and determine the boot up time
-Boot up time should be <= 8.0 Secs
-"""
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test("power_BootUpTime", host=host, cmdline_args=args,
-                 boot_type='reboot', disable_sysinfo=False, tag='reboot')
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/power_BootUpTime/power_BootUpTime.py b/server/site_tests/power_BootUpTime/power_BootUpTime.py
deleted file mode 100644
index cfbea4f..0000000
--- a/server/site_tests/power_BootUpTime/power_BootUpTime.py
+++ /dev/null
@@ -1,55 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import time
-
-from autotest_lib.server import autotest
-from autotest_lib.server import test
-from autotest_lib.client.common_lib import error
-from autotest_lib.server.cros.faft.firmware_test import FirmwareTest
-
-
-class power_BootUpTime(FirmwareTest):
-    """Checks the device boot up time"""
-    version = 1
-    _WAIT_TIME_OPEN_LID = _WAIT_TIME_CLOSE_LID = 10
-
-    def initialize(self, host, cmdline_args, ec_wp=None):
-        """Autotest initialize method"""
-        self.host = host
-        super(power_BootUpTime, self).initialize(self.host, cmdline_args,
-                                                 ec_wp=ec_wp)
-        self.switcher.setup_mode('normal')
-
-    def run_once(self, bootup_time=8, boot_type='reboot'):
-        """Checks the boot up time
-
-        @param bootup_time: Expected boot up time
-        @param boot_type: Reboot type, Ex: Reboot, lid_close_open etc
-        """
-        autotest_client = autotest.Autotest(self.host)
-
-        # Assume that running the test case after immediate flash and
-        # keeping the DUT at logout screen.
-        autotest_client.run_test('login_LoginSuccess', disable_sysinfo=True)
-        if boot_type == 'reboot':
-            self.host.reboot()
-        elif boot_type == 'lid_close_open':
-            logging.info("Closing lid")
-            self.host.servo.lid_close()
-            self.switcher.wait_for_client_offline(
-                    timeout=self._WAIT_TIME_CLOSE_LID)
-            logging.info('Opening lid')
-            self.host.servo.lid_open()
-            if not self.faft_config.lid_wake_from_power_off:
-                logging.info('Pressing power button')
-                self.host.servo.power_normal_press()
-            self.switcher.wait_for_client(timeout=self._WAIT_TIME_OPEN_LID)
-        else:
-            raise error.TestError("Invalid boot_type, check the boot_type"
-                                  " in control file")
-
-        autotest_client.run_test('platform_BootPerf', constraints=[
-                'seconds_power_on_to_login <= %d' % bootup_time])
diff --git a/server/site_tests/power_BrightnessResetAfterReboot/control b/server/site_tests/power_BrightnessResetAfterReboot/control
index fdf2222..441018c 100644
--- a/server/site_tests/power_BrightnessResetAfterReboot/control
+++ b/server/site_tests/power_BrightnessResetAfterReboot/control
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_BrightnessResetAfterReboot"
 PURPOSE = "default brightness test."
 CRITERIA = "This test will fail if unable to set the default brightness after reboot."
@@ -14,6 +14,7 @@
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:bvt-perbuild, suite:partners"
 REQUIRE_SSP = False
+PY_VERSION = 3
 
 DOC = """
 This test:
diff --git a/server/site_tests/power_BrightnessResetAfterReboot/power_BrightnessResetAfterReboot.py b/server/site_tests/power_BrightnessResetAfterReboot/power_BrightnessResetAfterReboot.py
index 83cdf3a..6d72251 100644
--- a/server/site_tests/power_BrightnessResetAfterReboot/power_BrightnessResetAfterReboot.py
+++ b/server/site_tests/power_BrightnessResetAfterReboot/power_BrightnessResetAfterReboot.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/power_ChargeControlWrapper/control b/server/site_tests/power_ChargeControlWrapper/control
deleted file mode 100644
index 5d09403..0000000
--- a/server/site_tests/power_ChargeControlWrapper/control
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "mqg"
-NAME = "power_ChargeControlWrapper"
-PURPOSE = "Use Servo v4 to control charging / discharging the DUT"
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "power"
-TEST_TYPE = "server"
-DEPENDENCIES = "servo_state:WORKING"
-
-DOC = """
-DUT is connected to Servo v4 via USB type-C, and Servo v4 is connected to AC
-power.
-
-Sample usage:
-test_that <ip address of DUT> power_ChargeControlWrapper \
---args 'servo_host=localhost servo_port=9999 test=power_Dummy'
-
-What are the parameters:
-test: the client test to run in wrapper test; required.
-servo_host: host of servod instance; required.
-servo_port: port that the servod instance is on; required.
-"""
-
-# Workaround to make it compatible with moblab autotest UI.
-global args_dict
-try:
-    args_dict
-except NameError:
-    args_dict = utils.args_to_dict(args)
-
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test("power_ChargeControlWrapper", host=host, config=args_dict)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/power_ChargeControlWrapper/power_ChargeControlWrapper.py b/server/site_tests/power_ChargeControlWrapper/power_ChargeControlWrapper.py
deleted file mode 100644
index 211a743..0000000
--- a/server/site_tests/power_ChargeControlWrapper/power_ChargeControlWrapper.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Wrapper test that controls charging / discharging DUT with Servo v4."""
-
-from autotest_lib.server import test
-from autotest_lib.server.cros.power import servo_charger
-from autotest_lib.server.cros.power import wrapper_test_runner
-
-
-class power_ChargeControlWrapper(test.test):
-    """Base class for a wrapper test around a client test.
-
-    This wrapper test runs 1 client test given by user, and controls charging /
-    discharging the DUT with Servo v4.
-    """
-    version = 1
-
-    def run_once(self, host, config):
-        """Measure power while running the client side test.
-
-        @param host: CrosHost object representing the DUT.
-        @param config: the args argument from test_that in a dict.
-                       required data: {'test': 'test_TestName.tag'}
-        """
-        test_runner = wrapper_test_runner.WrapperTestRunner(
-                config, self.autodir)
-        test_runner.run_test(host)
-
-    def warmup(self, host):
-        """Disconnect DUT from AC power.
-
-        Many power autotests require that DUT is on battery, thus disconnect DUT
-        from AC power as preparation.
-        """
-        super(power_ChargeControlWrapper, self).warmup(host)
-        self._charge_manager = servo_charger.ServoV4ChargeManager(host,
-                                                                  host.servo)
-        self._charge_manager.stop_charging()
-
-    def cleanup(self):
-        """Connect DUT to AC power.
-
-        This allows DUT to charge between tests, and complies with moblab
-        requirement.
-        """
-        self._charge_manager.start_charging()
-        super(power_ChargeControlWrapper, self).cleanup()
diff --git a/server/site_tests/power_DeferForFlashrom/control b/server/site_tests/power_DeferForFlashrom/control
deleted file mode 100644
index c027e65..0000000
--- a/server/site_tests/power_DeferForFlashrom/control
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "derat@chromium.org, chromeos-power"
-NAME = "power_DeferForFlashrom"
-TIME = "SHORT"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:power_build"
-
-DOC = """
-This test verifies that flashrom creates a lockfile while performing
-potentially-destructive write operations and that powerd defers suspend or
-reboot requests while the lockfile exists.
-
-It fails if:
-1. powerd suspends or reboots the system while flashrom is running instead of
-   waiting until it's exited.
-2. After flashrom exits, powerd fails to honor pending suspend or reboot
-   requests.
-3. The system fails to resume or come back up after rebooting.
-"""
-
-def run(machine):
-    job.run_test("power_DeferForFlashrom", host=hosts.create_host(machine))
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/power_DeferForFlashrom/power_DeferForFlashrom.py b/server/site_tests/power_DeferForFlashrom/power_DeferForFlashrom.py
deleted file mode 100644
index 0040b5c..0000000
--- a/server/site_tests/power_DeferForFlashrom/power_DeferForFlashrom.py
+++ /dev/null
@@ -1,182 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import test
-
-
-# Timeout for commands run on the host.
-_COMMAND_TIMEOUT = 60
-
-# Lock file created by flashrom to tell powerd not to suspend or shut down the
-# system.
-_LOCK_FILE = '/run/lock/power_override/flashrom.lock'
-
-# Time in seconds to perform a flashrom write and to wait for the system to
-# suspend and resume.
-_SUSPEND_FLASHROM_SEC = 15
-_SUSPEND_DOWN_SEC = 60
-_SUSPEND_UP_SEC = 60
-
-# Time in seconds to perform a flashrom write and to wait for the system to
-# power down and reboot.
-_REBOOT_FLASHROM_SEC = 15
-_REBOOT_DOWN_SEC = 60
-_REBOOT_UP_SEC = 60
-
-
-class power_DeferForFlashrom(test.test):
-    """Test that powerd defers suspend and shutdown for flashrom."""
-    version = 1
-
-    def initialize(self, host):
-        """
-        Initial settings before running test.
-
-        @param host: Host/DUT object to use for test.
-        """
-        self.host = host
-
-
-    def create_temp_file(self, base_name, source_path, size):
-        """
-        Create a temporary file on the host and returns its path.
-
-        @param base_name: String containing the base name for the temp file.
-        @param source_path: String containing the path to the device from
-                which file contents should be read.
-        @param size: Number of bytes to write to the file.
-        """
-        logging.info('Creating %d-byte temp file from %s', size, source_path)
-        temp_file = self.host.run(
-            'mktemp --tmpdir %s.XXXXXXXXXX' % base_name,
-            timeout=_COMMAND_TIMEOUT).stdout.strip()
-        self.host.run('dd if=%s of=%s bs=%d count=1 2>&1' %
-            (source_path, temp_file, size))
-        logging.info('Created %s', temp_file)
-        return temp_file
-
-
-    def run_in_background(self, cmd):
-        """
-        Asynchronously run a command on the host.
-
-        @param cmd: Command to run (as a string).
-        """
-        bg_cmd = '(%s) </dev/null >/dev/null 2>&1 &' % (cmd)
-        logging.info("Running %s", bg_cmd)
-        self.host.run(bg_cmd, timeout=_COMMAND_TIMEOUT)
-
-
-    def start_fake_flashrom_write(self, duration_sec):
-        """
-        Start a fake flashrom write.
-
-        @param duration_sec: Duration for the write in seconds.
-        """
-        # flashrom simulates a 4096-byte block size, so the file size needs to
-        # be a multiple of that.
-        BLOCK_SIZE = 4096
-
-        # flashrom will write one bit per cycle. Convert the block size to bits
-        # (yielding the frequency for a one-second write) and then scale it as
-        # needed.
-        frequency_hz = int(BLOCK_SIZE * 8 / float(duration_sec))
-
-        # To avoid flashrom needing to read (slowly) from the dummy device, pass
-        # a custom diff file filled with zeroes.
-        zero_file = self.create_temp_file(
-            'power_DeferForFlashrom.zero', '/dev/zero', BLOCK_SIZE)
-        rand_file = self.create_temp_file(
-            'power_DeferForFlashrom.rand', '/dev/urandom', BLOCK_SIZE)
-
-        # Start flashrom in the background and wait for it to create its lock
-        # file.
-        self.run_in_background(
-            ('flashrom -w %s --diff %s --noverify '
-             '-p dummy:freq=%d,emulate=VARIABLE_SIZE,size=%d,'
-             'erase_to_zero=yes') %
-            (rand_file, zero_file, frequency_hz, BLOCK_SIZE))
-
-        logging.info("Waiting for flashrom to create %s...", _LOCK_FILE)
-        self.host.run(
-            'while [ ! -e %s ]; do sleep 0.1; done' % (_LOCK_FILE),
-            timeout=_COMMAND_TIMEOUT)
-
-
-    def send_suspend_request(self, wake_sec):
-        """
-        Asynchronously ask powerd to suspend the system immediately.
-
-        @param wake_sec: Integer delay in seconds to use for setting a wake
-                alarm. Note that the alarm starts when the request is sent to
-                powerd, not when the system actually suspends.
-        """
-        self.run_in_background(
-            'powerd_dbus_suspend --delay=0 --wakeup_timeout=%d' % (wake_sec))
-
-
-    def send_reboot_request(self):
-        """Ask powerd to reboot the system immediately."""
-        logging.info('Calling powerd\'s RequestRestart method')
-        self.host.run(
-            ('dbus-send --type=method_call --system '
-             '--dest=org.chromium.PowerManager /org/chromium/PowerManager '
-             'org.chromium.PowerManager.RequestRestart'),
-            timeout=_COMMAND_TIMEOUT)
-
-
-    def wait_for_system_to_cycle(self, down_sec, up_sec):
-        """
-        Wait for the system to stop and then start responding to pings.
-
-        @param down_sec: Maximum delay for the system to go down.
-        @param up_sec: Maximum delay for the system to come back up.
-
-        @return: Floating-point time when system went down.
-        """
-        logging.info("Waiting for host to go down...")
-        if not self.host.ping_wait_down(timeout=down_sec):
-            raise error.TestError(
-                'System hasn\'t gone down after %d seconds' % (down_sec))
-        down_timestamp = time.time()
-        logging.info("System went down at %.2f", down_timestamp)
-
-        logging.info("Waiting for host to come back up...")
-        if not self.host.ping_wait_up(timeout=up_sec) or \
-            not self.host.wait_up(timeout=up_sec):
-            raise error.TestError('System didn\'t come back up')
-
-        return down_timestamp
-
-
-    def run_once(self):
-        # Start flashrom and then request that the system be suspended. The
-        # suspend should be deferred until flashrom finishes writing but should
-        # happen eventually.
-        flashrom_time = time.time()
-        self.start_fake_flashrom_write(_SUSPEND_FLASHROM_SEC)
-        self.send_suspend_request(_SUSPEND_DOWN_SEC)
-        delay_sec = self.wait_for_system_to_cycle(
-            _SUSPEND_DOWN_SEC, _SUSPEND_UP_SEC) - flashrom_time
-
-        # Check that powerd waited for flashrom to finish.
-        if delay_sec < _SUSPEND_FLASHROM_SEC:
-            raise error.TestError(
-                ('Suspend was blocked for %.2f sec; expected it to be blocked '
-                 'for at least %d sec') % (delay_sec, _SUSPEND_FLASHROM_SEC))
-
-        # Now do the same thing, but with a reboot request.
-        flashrom_time = time.time()
-        self.start_fake_flashrom_write(_REBOOT_FLASHROM_SEC)
-        self.send_reboot_request()
-        delay_sec = self.wait_for_system_to_cycle(
-            _REBOOT_DOWN_SEC, _REBOOT_UP_SEC) - flashrom_time
-        if delay_sec < _REBOOT_FLASHROM_SEC:
-            raise error.TestError(
-                ('Reboot was blocked for %.2f sec; expected it to be blocked '
-                 'for at least %d sec') % (delay_sec, _REBOOT_FLASHROM_SEC))
diff --git a/server/site_tests/power_IdleServer/control b/server/site_tests/power_IdleServer/control
deleted file mode 100644
index c1547a5..0000000
--- a/server/site_tests/power_IdleServer/control
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright (c) 2009 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "power_IdleServer"
-TIME = "SHORT"
-TEST_CATEGORY = "Benchmark"
-TEST_CLASS = "power"
-TEST_TYPE = "server"
-DEPENDENCIES = "rpm"
-
-DOC = """
-This test first cuts off power to a machine and then measures the battery power
-draw at idle.
-"""
-
-from autotest_lib.server import utils
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run_system_power_idle(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    if host.has_power():
-        host.power_off()
-
-    host_at = autotest.Autotest(host)
-    host_test = 'power_Idle'
-
-    host_at.run_test(host_test)
-
-    if host.has_power():
-        host.power_on()
-
-
-(tuple, failures) = utils.form_ntuples_from_machines(machines, 1)
-
-job.parallel_simple(run_system_power_idle, tuple[0], log=False)
diff --git a/server/site_tests/power_LW/control.power_Display b/server/site_tests/power_LW/control.power_Display
index d11982b..f6ccffc 100644
--- a/server/site_tests/power_LW/control.power_Display
+++ b/server/site_tests/power_LW/control.power_Display
@@ -9,6 +9,7 @@
 TEST_CLASS = "power"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:power_dashboard"
+PY_VERSION = 3
 
 DOC = """
 Control file for running power_Display in power lab.
diff --git a/server/site_tests/power_LW/control.power_Idle b/server/site_tests/power_LW/control.power_Idle
index 9cc1be0..0cd4f7e 100644
--- a/server/site_tests/power_LW/control.power_Idle
+++ b/server/site_tests/power_LW/control.power_Idle
@@ -9,6 +9,7 @@
 TEST_CLASS = "power"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:power_dashboard"
+PY_VERSION = 3
 
 DOC = """
 Control file for running power_Idle in power lab.
diff --git a/server/site_tests/power_LW/control.power_LoadTest_1hour b/server/site_tests/power_LW/control.power_LoadTest_1hour
index 9e87164..391251b 100644
--- a/server/site_tests/power_LW/control.power_LoadTest_1hour
+++ b/server/site_tests/power_LW/control.power_LoadTest_1hour
@@ -9,6 +9,7 @@
 TEST_CLASS = "power"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:power_dashboard"
+PY_VERSION = 3
 
 DOC = """
 Control file for running power_LoadTest.1hour in power lab.
diff --git a/server/site_tests/power_LW/control.power_Standby_fast b/server/site_tests/power_LW/control.power_Standby_fast
deleted file mode 100644
index 503c7ec..0000000
--- a/server/site_tests/power_LW/control.power_Standby_fast
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "puthik"
-NAME = "power_LW.power_Standby_fast"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Benchmark"
-TEST_CLASS = "power"
-TEST_TYPE = "server"
-ATTRIBUTES = "suite:power_dashboard"
-
-DOC = """
-Control file for running power_Standby.fast in power lab.
-"""
-
-from autotest_lib.client.common_lib import utils
-
-args_dict = utils.args_to_dict(args)
-test = 'power_Standby'
-args = {
-    'pdash_note': args_dict.get('pdash_note', ''),
-    'tag' : 'fast_PLW',
-    'sample_hours' : 0.334,
-    'test_hours' : 0.334,
-}
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test("power_LW", host=host, test=test, args=args,
-                 machine=machine, tag=NAME.split('.')[1])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/power_LW/control.power_VideoEncode b/server/site_tests/power_LW/control.power_VideoEncode
index d3859f0..d400098 100644
--- a/server/site_tests/power_LW/control.power_VideoEncode
+++ b/server/site_tests/power_LW/control.power_VideoEncode
@@ -9,6 +9,7 @@
 TEST_CLASS = "power"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:power_dashboard"
+PY_VERSION = 3
 
 DOC = """
 Control file for running power_VideoEncode in power lab.
diff --git a/server/site_tests/power_LW/control.power_VideoPlayback b/server/site_tests/power_LW/control.power_VideoPlayback
index 22515e0..cb7f60a 100644
--- a/server/site_tests/power_LW/control.power_VideoPlayback
+++ b/server/site_tests/power_LW/control.power_VideoPlayback
@@ -9,6 +9,7 @@
 TEST_CLASS = "power"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:power_dashboard"
+PY_VERSION = 3
 
 DOC = """
 Control file for running power_VideoPlayback in power lab.
diff --git a/server/site_tests/power_LW/control.power_VideoPlayback_sw_decoder b/server/site_tests/power_LW/control.power_VideoPlayback_sw_decoder
index ae1626d..daa0fd3 100644
--- a/server/site_tests/power_LW/control.power_VideoPlayback_sw_decoder
+++ b/server/site_tests/power_LW/control.power_VideoPlayback_sw_decoder
@@ -9,6 +9,7 @@
 TEST_CLASS = "power"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:power_dashboard"
+PY_VERSION = 3
 
 DOC = """
 Control file for running power_VideoPlayback.sw_decoder in power lab.
diff --git a/server/site_tests/power_LW/control.power_WebGL b/server/site_tests/power_LW/control.power_WebGL
index 0471df9..59279f4 100644
--- a/server/site_tests/power_LW/control.power_WebGL
+++ b/server/site_tests/power_LW/control.power_WebGL
@@ -9,6 +9,7 @@
 TEST_CLASS = "power"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:power_dashboard"
+PY_VERSION = 3
 
 DOC = """
 Control file for running power_WebGL in power lab.
diff --git a/server/site_tests/power_LW/power_LW.py b/server/site_tests/power_LW/power_LW.py
index 05348de..62efb20 100644
--- a/server/site_tests/power_LW/power_LW.py
+++ b/server/site_tests/power_LW/power_LW.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -18,22 +19,25 @@
     SERVO_V4_ETH_VENDOR = '0bda'
     SERVO_V4_ETH_PRODUCT = '8153'
     WIFI_SSID = 'powertest_ap'
+    WIFI_PASSWORD = 'chromeos'
 
     def _get_wlan_ip(self, host):
         """Connect to wifi and return wlan ip address."""
         wlan_ip = host.get_wlan_ip()
+        logging.info('wlan_ip=%s', wlan_ip)
         if wlan_ip:
             return wlan_ip
 
-        if not host.connect_to_wifi(self.WIFI_SSID):
+        if not host.connect_to_wifi(self.WIFI_SSID, self.WIFI_PASSWORD):
             logging.info('Script to connect to wifi is probably missing.'
-                         'Run dummy_Pass as a workaround to install it.')
+                         'Run stub_Pass as a workaround to install it.')
             autotest_client = autotest.Autotest(host)
-            autotest_client.run_test('dummy_Pass')
-            if not host.connect_to_wifi(self.WIFI_SSID):
+            autotest_client.run_test('stub_Pass')
+            if not host.connect_to_wifi(self.WIFI_SSID, self.WIFI_PASSWORD):
                 raise error.TestError('Can not connect to wifi.')
 
         wlan_ip = host.get_wlan_ip()
+        logging.info('After connected to wifi wlan_ip=%s', wlan_ip)
         if not wlan_ip:
             raise error.TestError('Can not find wlan ip.')
         return wlan_ip
@@ -54,7 +58,11 @@
     def _stop_ethernet(self, host):
         """Find and unbind servo v4 usb ethernet."""
         # Stop check_ethernet.hook to reconnect the usb device
-        host.run('stop recover_duts')
+        try:
+            host.run('stop recover_duts')
+        except:
+            logging.warning("Continue if stop recover_duts failed.")
+
         eth_usb = host.find_usb_devices(
             self.SERVO_V4_ETH_VENDOR, self.SERVO_V4_ETH_PRODUCT)
         if len(eth_usb) == 1 and eth_usb[0] and host.get_wlan_ip():
diff --git a/server/site_tests/power_MeetCall/control.16bot b/server/site_tests/power_MeetCall/control.16bot
deleted file mode 100644
index d514819..0000000
--- a/server/site_tests/power_MeetCall/control.16bot
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "puthik"
-NAME = "power_MeetCall.16bot"
-PURPOSE = "Use bond api to create meet bot and test hangout meet."
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "power"
-TEST_TYPE = "server"
-
-DOC = """
-This test uses bond api to create meet bot and test hangout meet.
-
-To run the test manually:
-- Install the cred from https://crbug.com/874835#c3 to
-  /creds/service_accounts/bond_service_account.json in chroot
-- Ran the following command in chroot to install rsa to python2
-  sudo cp -r /usr/lib64/python{3.6,2.7}/site-packages/rsa
-"""
-
-from autotest_lib.client.common_lib import utils
-
-args_dict = utils.args_to_dict(args)
-args = {
-    'pdash_note': args_dict.get('pdash_note', ''),
-    'tag' : NAME.split('.')[1],
-    'num_bots' : 16,
-}
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test("power_MeetCall", host=host, args=args)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/power_MeetCall/control.4bot b/server/site_tests/power_MeetCall/control.4bot
deleted file mode 100644
index 7cc6819..0000000
--- a/server/site_tests/power_MeetCall/control.4bot
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "puthik"
-NAME = "power_MeetCall.4bot"
-PURPOSE = "Use bond api to create meet bot and test hangout meet."
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "power"
-TEST_TYPE = "server"
-
-DOC = """
-This test uses bond api to create meet bot and test hangout meet.
-
-To run the test manually:
-- Install the cred from https://crbug.com/874835#c3 to
-  /creds/service_accounts/bond_service_account.json in chroot
-- Ran the following command in chroot to install rsa to python2
-  sudo cp -r /usr/lib64/python{3.6,2.7}/site-packages/rsa
-
-"""
-
-from autotest_lib.client.common_lib import utils
-
-args_dict = utils.args_to_dict(args)
-args = {
-    'pdash_note': args_dict.get('pdash_note', ''),
-    'tag' : NAME.split('.')[1],
-    'num_bots' : 4,
-    'duration' : 180,
-}
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test("power_MeetCall", host=host, args=args)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/power_MeetCall/power_MeetCall.py b/server/site_tests/power_MeetCall/power_MeetCall.py
deleted file mode 100644
index 998562f..0000000
--- a/server/site_tests/power_MeetCall/power_MeetCall.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import logging
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import autotest
-from autotest_lib.server import test
-from autotest_lib.server.cros.cfm.utils import bond_http_api
-
-
-class power_MeetCall(test.test):
-    """Wrapper test to create meet bot and call power_MeetClient."""
-    version = 1
-
-    # 5 minutes for client test autotests overhead.
-    AUTOTESTS_OVERHEAD = 300
-
-    def run_once(self, host, args):
-        """Create meetbot and call client test."""
-        bond_api = bond_http_api.BondHttpApi()
-        meet_code = bond_api.CreateConference()
-        logging.info('meet_code: %s', meet_code)
-
-        num_bots = args.get('num_bots', 4)
-        duration = args.get('duration', 180) + self.AUTOTESTS_OVERHEAD
-
-        bots = bond_api.AddBotsRequest(meet_code, num_bots, duration)
-        if len(bots) < num_bots:
-            bond_api.ExecuteScript('@all leave', meet_code)
-            raise error.TestNAError('Can not add meet bots.')
-
-        args['meet_code'] = meet_code
-        if not args['pdash_note']:
-            args['pdash_note'] = meet_code
-        autotest_client = autotest.Autotest(host)
-        autotest_client.run_test('power_MeetClient', **args)
diff --git a/server/site_tests/power_Monitoring/control b/server/site_tests/power_Monitoring/control
deleted file mode 100644
index 597de3e..0000000
--- a/server/site_tests/power_Monitoring/control
+++ /dev/null
@@ -1,68 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "coconutruben"
-NAME = "power_Monitoring"
-PURPOSE = "Continuously measure power with servod while running client tests."
-CRITERIA = "This test is a wrapper for ServodWrapper wrapped test(s)."
-TIME = "LONG"
-TEST_CATEGORY = "Benchmark"
-TEST_CLASS = "power"
-TEST_TYPE = "server"
-DEPENDENCIES = "servo_state:WORKING"
-
-DOC = """
-This wrapper test runs the tests specified under a given suite in a continuous
-loop for a given amount of time. If the DUT runs out of power, the test charges
-it before continuing the test suite.
-
-So this test is designed to collect all relevant power tests for many loops.
-This has three main advantages and use cases:
-- There is a wider spread of battery charge percentages that tests run under
-  as tests don't recharge at the end individually.
-- This runs as a one stop test and requires less configuration and scheduling.
-- As this runs as one test this allows to collect multiple samples of the same
-  client test under the same OS image in environments where a new test triggers
-  an image update.
-
-Based on the above this test might be a good fit for your use case or not.
-
-This test makes the following assumptions:
-1. The DUT is attached to a servo device that supports charging and
-discharging (currently only v4).
-
-2. Servod is already running, and its host and port are provided to this
-autotest.
-
-3. The workstation (or where the autotest is kicked off from) should be in same
-timezone with the DUT.
-
-Sample usage:
-test_that <ip address of DUT> power_Monitoring --args \
-'suite=power_monitoring runtime_hr=20 servo_host=localhost servo_port=9999'
-
-What are the parameters (all optional with defaults):
-suite: the test suite to run.
-runtime_hr: desired runtime in hours.
-start_charging_level: battery charge percent when charging triggers.
-stop_charging_level: battery charge percent when charging stops.
-servo_host: host of servod instance.
-servo_port: port that the servod instance is on.
-pdash_note: User supplied note to tag the specific test; optional.
-"""
-# Workaround to make it compatible with moblab autotest UI.
-global args_dict
-try:
-    args_dict
-except NameError:
-    args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test('power_Monitoring', host=host, config=args_dict,
-                 disable_sysinfo=True)
-parallel_simple(run, machines)
diff --git a/server/site_tests/power_Monitoring/power_Monitoring.py b/server/site_tests/power_Monitoring/power_Monitoring.py
deleted file mode 100644
index a9f2a45..0000000
--- a/server/site_tests/power_Monitoring/power_Monitoring.py
+++ /dev/null
@@ -1,266 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Wrap test suite with power_ServodWrapper and run in a time-limited loop."""
-
-import logging
-import random
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import retry
-from autotest_lib.server import test
-from autotest_lib.server.cros.dynamic_suite import suite
-from autotest_lib.server.cros.power import servo_charger
-
-# Timeout in seconds to attempt to ping the DUT.
-DUT_PINGABLE_TIMEOUT_S = 30.0
-# Delay in seconds before reattempting to ping the DUT.
-VERIFY_DUT_PINGABLE_DELAY_S = 10.0
-# Number of times to attempt to ping (and reset on failure) the DUT.
-VERIFY_DUT_PINGABLE_TRIES = 3
-# Timeout in minutes to supply to retry decorator for dut pingable verifiction.
-VERIFY_DUT_PINGABLE_TIMEOUT_MIN = ((DUT_PINGABLE_TIMEOUT_S +
-                                    VERIFY_DUT_PINGABLE_DELAY_S) / 60.0 *
-                                   (VERIFY_DUT_PINGABLE_TRIES + 1))
-# Delay in seconds before reattempting to query DUT EC for battery charge after
-# reset.
-GET_CHARGE_WITH_RESET_DELAY_S = 3.0
-# Number of times to attempt to get the battery charge (and reset on failure)
-# the DUT.
-GET_CHARGE_WITH_RESET_TRIES = 3
-# Timeout in minutes to supply to retry decorator to get charge with hard reset.
-GET_CHARGE_WITH_RESET_TIMEOUT_MIN = (GET_CHARGE_WITH_RESET_DELAY_S / 60.0 *
-                                     (GET_CHARGE_WITH_RESET_TRIES + 1))
-# Delay in seconds before reattempting to query DUT EC for battery charge after
-# timeout/error from EC console
-GET_CHARGE_DELAY_S = 0.1
-# Number of times to attempt to get the battery charge from the DUT.
-GET_CHARGE_TRIES = 3
-# Timeout in minutes to supply to retry decorator to get charge.
-GET_CHARGE_TIMEOUT_MIN = (GET_CHARGE_DELAY_S / 60.0 *
-                          (GET_CHARGE_TRIES + 1))
-# Delay in seconds before attempting to query EC again after reset.
-EC_RESET_DELAY_S = 3.0
-# Delay in seconds to allow the system to detect AP shutdown.
-AP_SHUTDOWN_DELAY_S = 10.0
-# Polling rate in seconds to poll battery charge to determine charging complete.
-CHARGE_STATE_POLLING_RATE_S = 60.0
-
-class power_Monitoring(test.test):
-    """Time limited loop for running power suites with power_ServodWrapper.
-
-    This test runs a given suite of client tests with power_ServodWrapper
-    for a given amount of time, ensuring that if the battery level drops
-    below a configurable percentage it will be charged back up to a configurable
-    percentage.
-    """
-    version = 1
-
-    # The 'real' server side test to use to wrap power tests.
-    WRAPPER_TEST = 'power_ServodWrapper'
-    # The default suite of tests to run with monitoring.
-    DEFAULT_SUITE = 'power_monitoring'
-    # The default runtime in hours for the suite.
-    DEFAULT_RUNTIME_HR = 12.0
-    # The maximum runtime for monitoring. 6 weeks.
-    # Longer command line configurations will be ignored.
-    MAX_RUNTIME_HR = 6 * 7 * 24.0
-    # The minimum runtime for monitoring.
-    # Lower command line configurations will be ignored.
-    MIN_RUNTIME_HR = 0.1
-    # The default battery charge percent to stop charging the DUT.
-    DEFAULT_STOP_CHARGING_LEVEL = 75.0
-    # The default battery charge percent to start charging the DUT.
-    DEFAULT_START_CHARGING_LEVEL = 25.0
-    # The maximum battery charge percent to stop charging the DUT.
-    # Higher command line configurations will be ignored.
-    MAX_STOP_CHARGING_LEVEL = 95.0
-    # The minimum battery charge percent to start charging the DUT.
-    # Lower command line configurations will be ignored.
-    MIN_START_CHARGING_LEVEL = 4.0
-    # Maximum number of consecutive failures allowed before the monitoring
-    # is aborted.
-    MAX_CONSECUTIVE_FAILURES = 5
-    # Max time to charge from 0 to 100%. Plenty of room for slow charging.
-    MAX_CHARGING_TIME_HR = 5
-
-    def initialize(self, host, config={}):
-        """Setup power monitoring.
-
-        @param host: CrosHost object representing the DUT.
-        @param config: the args argument from test_that in a dict.
-            -start_charging_level: float, battery level when charging triggers
-            -stop_charging_level: float, battery level when charging stops
-            -suite: suite to loop over
-            -runtime_hr: runtime in hours the monitoring should run
-            -pdash_note: note to add to power dashboard upload
-        """
-        # power_Monitoring is a wrapper around a wrapper - it does not need
-        # to collect all the sysinfo information and potentially be stuck
-        # there waiting for a dead device to resurface.
-        self.job.fast = True
-        start = float(config.get('start_charging_level',
-                                 self.DEFAULT_START_CHARGING_LEVEL))
-        stop = float(config.get('stop_charging_level',
-                                self.DEFAULT_STOP_CHARGING_LEVEL))
-        self.stop_charging_level = min(stop, self.MAX_STOP_CHARGING_LEVEL)
-        self.start_charging_level = max(start, self.MIN_START_CHARGING_LEVEL)
-        self._host = host
-        servo = host.servo
-        self._charger = servo_charger.ServoV4ChargeManager(host, servo)
-        self._charger.stop_charging(verify=True)
-
-        # If no suite is defined, run with power_monitoring suite.
-        test_suite = config.get('suite', self.DEFAULT_SUITE)
-        runtime_hr = float(config.get('runtime_hr', self.DEFAULT_RUNTIME_HR))
-        self._runtime_hr = min(max(runtime_hr, self.MIN_RUNTIME_HR),
-                               self.MAX_RUNTIME_HR)
-
-        fs_getter = suite.create_fs_getter(self.autodir)
-        # Find the wrapper test.
-        w_predicate = suite.test_name_equals_predicate(self.WRAPPER_TEST)
-        self._wrapper_test = suite.find_and_parse_tests(fs_getter, w_predicate)
-        if not self._wrapper_test:
-            raise error.TestFail('%r wrapper test not found.' %
-                                 self.WRAPPER_TEST)
-        # Find the test suite in autotest file system.
-        predicate = suite.name_in_tag_predicate(test_suite)
-        self._tests = suite.find_and_parse_tests(fs_getter, predicate)
-        if not self._tests:
-            raise error.TestNAError('%r suite has no tests under it.' %
-                                    test_suite)
-        # Sort the tests by their name.
-        self._tests.sort(key=lambda t: t.name)
-        self._pdash_note = config.get('pdash_note', '')
-
-    def run_once(self, host):
-        """Measure power while running the client side tests in a loop.
-
-        @param host: CrosHost object representing the DUT.
-        """
-        # Figure out end timestamp.
-        end = time.time() + self._runtime_hr * 60 * 60
-        logging.info('Will be looping over the tests: %s. Order will be '
-                     'randomized.', ', '.join(t.name for t in self._tests))
-        random.shuffle(self._tests)
-        wrapper_name = self._wrapper_test[0].name
-        logging.debug('Going to run the tests on wrapper %s.', wrapper_name)
-        # Keep track of consecutive failures to bail out if it seems that
-        # tests are not passing.
-        consecutive_failures = test_run = 0
-        while time.time() < end:
-            # First verify and charge DUT to configured percentage.
-            self._verify_and_charge_dut()
-            # Use tests as a queue where the first test is always run
-            # and inserted in the back again at the end.
-            current_test = self._tests.pop(0)
-            logging.info('About to run monitoring on %s.', current_test.name)
-            wrapper_config = {'test': current_test.name}
-            subdir_tag = '%05d' % test_run
-            if self._pdash_note:
-                wrapper_config['pdash_note'] = self._pdash_note
-            try:
-                self.job.run_test(wrapper_name, host=host,
-                                  config=wrapper_config, subdir_tag=subdir_tag)
-                consecutive_failures = 0
-            except Exception as e:
-                # Broad except as we don't really care about the exception
-                # but rather want to make sure that we know how many failures
-                # have happened in a row.
-                logging.warn('Issue running %s: %s', current_test.name, str(e))
-                consecutive_failures += 1
-            if consecutive_failures >= self.MAX_CONSECUTIVE_FAILURES:
-                raise error.TestFail('%d consecutive failures. Stopping.' %
-                                     consecutive_failures)
-            test_run += 1
-            # Add the test back to the pipe.
-            self._tests.append(current_test)
-
-    def cleanup(self):
-        """Turn on charging at the end again."""
-        self._charger.start_charging(verify=False)
-
-    @retry.retry(error.TestFail, timeout_min=GET_CHARGE_TIMEOUT_MIN,
-                 delay_sec=GET_CHARGE_DELAY_S)
-    def _get_charge_percent(self):
-        """Retrieve battery_charge_percent from the DUT in a retry loop."""
-        return float(self._host.servo.get('battery_charge_percent'))
-
-    @retry.retry(error.TestFail, timeout_min=GET_CHARGE_WITH_RESET_TIMEOUT_MIN,
-                 delay_sec=GET_CHARGE_WITH_RESET_DELAY_S)
-    def _force_get_charge_percent(self):
-        """Attempt to get the charge percent through cold reset if necessary."""
-        try:
-            return self._get_charge_percent()
-        except error.TestFail as e:
-            logging.warn('Failed to get battery charge levels even ',
-                         'after turning on charging. Cold resetting.'
-                         'before re-attempting.')
-            self._host.servo._power_state.reset()
-            # Allow DUT time after cold reset to come back.
-            time.sleep(EC_RESET_DELAY_S)
-            raise error.TestFail('Failed to get battery charge percent. %s',
-                                 str(e))
-
-    def _charge_dut(self):
-        """Charge the DUT up."""
-        self._charger.start_charging()
-        time.sleep(EC_RESET_DELAY_S)
-        current_charge = start_charge = self._force_get_charge_percent()
-        logging.debug('Charge level of %d%%. Going to charge up.',
-                      current_charge)
-        charge_time_mltp = max(self.stop_charging_level -
-                               current_charge, 0.0)/100
-        # Calculate how many seconds to allow at most for charging.
-        charge_time = charge_time_mltp * self.MAX_CHARGING_TIME_HR * 60 * 60
-        logging.debug('Going to charge for at most %d minutes.',
-                      charge_time/60)
-        start = time.time()
-        end = start + charge_time
-        # Shut down the AP to increase charging speed.
-        self._host.servo.set_nocheck('ec_uart_cmd', 'apshutdown')
-        # Give the EC time to shutdown properly.
-        time.sleep(AP_SHUTDOWN_DELAY_S)
-        while time.time() < end and (current_charge <
-                                     self.stop_charging_level):
-            logging.debug('Charge level at %d%%. Continuing to charge '
-                          'until %d%%.', current_charge,
-                          self.stop_charging_level)
-            # Poll once a minute.
-            time.sleep(CHARGE_STATE_POLLING_RATE_S)
-            current_charge = self._get_charge_percent()
-        if current_charge < self.stop_charging_level:
-            # Restart the AP again before indicating failure.
-            self._host.servo.set_nocheck('ec_uart_cmd', 'powerbtn')
-            raise error.TestFail('Dut only charged from %d%% to %d%% in %d '
-                                 'minutes.' % (start_charge, current_charge,
-                                               int((time.time()-start)/60)))
-        else:
-            logging.debug('Charging finished. Charge at %d%% after %d '
-                          'minutes.', current_charge, (time.time() - start))
-        self._charger.stop_charging(verify=True)
-        # AP was shutdown. Restart it again.
-        self._host.servo.set_nocheck('ec_uart_cmd', 'powerbtn')
-
-    @retry.retry(error.TestFail, timeout_min=VERIFY_DUT_PINGABLE_TIMEOUT_MIN,
-                 delay_sec=VERIFY_DUT_PINGABLE_DELAY_S)
-    def _verify_dut(self):
-      """Verify DUT can be pinged. Reset if DUT not responsive."""
-      if not self._host.ping_wait_up(timeout=DUT_PINGABLE_TIMEOUT_S):
-          self._host.servo._power_state.reset()
-          raise error.TestFail('Dut did not reboot or respond to ping after '
-                               'charging.')
-
-    def _verify_and_charge_dut(self):
-        """Charge DUT up to |stop_charging| level if below |start_charging|."""
-        try:
-            if self._get_charge_percent() < self.start_charging_level:
-                # Only go through the charging sequence if necessary.
-                self._charge_dut()
-        except error.TestFail:
-            # Failure to initially get the charge might be due to the EC being
-            # off. Try charging to see whether that recovers the device.
-            self._charge_dut()
-        self._verify_dut()
diff --git a/server/site_tests/power_PacWrapper/control b/server/site_tests/power_PacWrapper/control
new file mode 100644
index 0000000..dffc4de
--- /dev/null
+++ b/server/site_tests/power_PacWrapper/control
@@ -0,0 +1,61 @@
+# Lint as: python3
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+
+AUTHOR = "klug"
+NAME = "power_PacWrapper"
+PURPOSE = "Measure power with a pacman debugger while running a client test."
+CRITERIA = "This test is a wrapper for a client test."
+TIME = "LONG"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "power"
+TEST_TYPE = "server"
+DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
+
+DOC = """
+This wrapper test automates the process of power measurement with a pacman
+debugger while running a client test. Please check the client test's control
+file for any hardware requirement, e.g. no AC power, no Ethernet.
+
+This test makes the following assumptions:
+1. Pacman Debugger is connected to the DUT and can be run manually.
+
+2. The workstation (or where the autotest is kicked off from) should be in same
+timezone with the DUT.
+
+3. pacman.py is in path and discoverable using shutil.which('pacman.py')
+
+Installation:
+wget https://bootstrap.pypa.io/pip/3.6/get-pip.py
+sudo python ./get-pip.py
+pip3 install pandas plotly pyftdi
+
+Sample usage:
+test_that <ip address of DUT> power_PacWrapper --args \
+'test=power_LoadTest.fast config=./boards/skyrim/skyrim_r1_pacs.py \
+ mapping=./boards/skyrim/skyrim_r1_railmap.csv \
+ gpio=./boards/skyrim/skyrim_r0_gpio.csv '
+
+What are the parameters:
+test: the client test to run in wrapper test; DUT power is measured during this
+      client test; required.
+note: User supplied note to tag the specific test; optional.
+"""
+
+# Workaround to make it compatible with moblab autotest UI.
+global args_dict
+try:
+    args_dict
+except NameError:
+    args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    host = hosts.create_host(machine, servo_args=servo_args)
+    job.run_test("power_PacWrapper", host=host, config=args_dict)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/power_PacWrapper/power_PacWrapper.py b/server/site_tests/power_PacWrapper/power_PacWrapper.py
new file mode 100644
index 0000000..dbaadff
--- /dev/null
+++ b/server/site_tests/power_PacWrapper/power_PacWrapper.py
@@ -0,0 +1,39 @@
+# Lint as: python3
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Wrapper test measures DUT power via servod with Servo devices."""
+
+from autotest_lib.server.cros.power import power_base_wrapper
+from autotest_lib.server.cros.power import power_telemetry_logger
+
+
+class power_PacWrapper(power_base_wrapper.PowerBaseWrapper):
+    """Wrapper test around a client test.
+
+    This wrapper test runs 1 client test given by user, and measures DUT power
+    via servod with Servo devices.
+    """
+    version = 1
+
+    def _get_power_telemetry_logger(self, host, config, resultsdir):
+        """Return powerlog telemetry logger.
+
+        @param host: CrosHost object representing the DUT.
+        @param config: the args argument from test_that in a dict. Settings for
+                       power telemetry devices.
+                       required data:
+                       {'test': 'test_TestName.tag',
+                        'servo_host': host of servod instance,
+                        'servo_port: port that the servod instance is on}
+        @param resultsdir: path to directory where current autotest results are
+                           stored, e.g. /tmp/test_that_results/
+                           results-1-test_TestName.tag/test_TestName.tag/
+                           results/
+        """
+        self._pacman_telemetry_logger = power_telemetry_logger.PacTelemetryLogger(
+                config, resultsdir, host)
+        return self._pacman_telemetry_logger
+
+    def postprocess(self):
+        self._pacman_telemetry_logger.output_pacman_aggregates(self)
diff --git a/server/site_tests/power_PowerlogWrapper/control b/server/site_tests/power_PowerlogWrapper/control
deleted file mode 100644
index 90bfbf8..0000000
--- a/server/site_tests/power_PowerlogWrapper/control
+++ /dev/null
@@ -1,58 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "mqg"
-NAME = "power_PowerlogWrapper"
-PURPOSE = "Measure power with powerlog tool while running a client test."
-CRITERIA = "This test is a wrapper for a client test."
-TIME = "LONG"
-TEST_CATEGORY = "Benchmark"
-TEST_CLASS = "power"
-TEST_TYPE = "server"
-
-DOC = """
-This wrapper test automates the process of power measurement with powerlog tool
-while running a client test. Please check the client test's control file for any
-hardware requirement, e.g. no AC power, no Ethernet.
-
-This test makes the following assumptions:
-1. Sweetberry config files are in directory
-/usr/lib64/python2.7/site-packages/servo/data/
-For example,
-/usr/lib64/python2.7/site-packages/servo/data/eve_rev7.board
-/usr/lib64/python2.7/site-packages/servo/data/eve_rev7.scenario
-
-2. The workstation (or where the autotest is kicked off from) should be in same
-timezone with the DUT.
-
-Sample usage:
-test_that <ip address of DUT> power_PowerlogWrapper --args \
-'test=power_LoadTest.fast sweetberry_interval=1 sweetberry_config=eve_rev7'
-
-What are the parameters:
-test: the client test to run in wrapper test; DUT power is measured during this
-      client test; required.
-sweetberry_interval: number of seconds between each Sweetberry measurement;
-                     optional.
-sweetberry_config: use [].board and [].scenario as configuration file for
-                   Sweetberry measurement; optional.
-sweetberry_serial: serial number of sweetberry to use; If not supplied use
-                   the first sweetberry found; optional.
-pdash_note: User supplied note to tag the specific test; optional.
-"""
-
-# Workaround to make it compatible with moblab autotest UI.
-global args_dict
-try:
-    args_dict
-except NameError:
-    args_dict = utils.args_to_dict(args)
-
-def run(machine):
-    job.run_test('power_PowerlogWrapper', host=hosts.create_host(machine),
-                 config=args_dict)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/power_PowerlogWrapper/control.charge_control b/server/site_tests/power_PowerlogWrapper/control.charge_control
deleted file mode 100644
index 419bcd0..0000000
--- a/server/site_tests/power_PowerlogWrapper/control.charge_control
+++ /dev/null
@@ -1,68 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "mqg"
-NAME = "power_PowerlogWrapper.charge_control"
-PURPOSE = "Measure power with powerlog tool while running a client test."
-CRITERIA = "This test is a wrapper for a client test."
-TIME = "LONG"
-TEST_CATEGORY = "Benchmark"
-TEST_CLASS = "power"
-TEST_TYPE = "server"
-DEPENDENCIES = "servo_state:WORKING"
-
-DOC = """
-This wrapper test automates the process of power measurement with powerlog tool
-while running a client test. It also uses Servo v4 to control charging and
-discharging the DUT. Please check the client test's control file for any
-hardware requirement, e.g. no AC power, no Ethernet.
-
-This test makes the following assumptions:
-1. Sweetberry config files are in directory
-/usr/lib64/python2.7/site-packages/servo/data/
-For example,
-/usr/lib64/python2.7/site-packages/servo/data/eve_rev7.board
-/usr/lib64/python2.7/site-packages/servo/data/eve_rev7.scenario
-
-2. The workstation (or where the autotest is kicked off from) should be in same
-timezone with the DUT.
-
-3. Servod is started for the Servo v4 connected to the DUT. AC should be plugged
-into the Servo v4.
-
-Sample usage:
-test_that <ip address of DUT> power_PowerlogWrapper --args \
-'test=power_LoadTest.fast servo_host=localhost servo_port=9999 sweetberry_interval=1 sweetberry_config=eve_rev7'
-
-What are the parameters:
-test: the client test to run in wrapper test; DUT power is measured during this
-      client test; required.
-servo_host: host of servod instance; required.
-servo_port: port that the servod instance is on; required.
-sweetberry_interval: number of seconds between each Sweetberry measurement;
-                     optional.
-sweetberry_config: use [].board and [].scenario as configuration file for
-                   Sweetberry measurement; optional.
-sweetberry_serial: serial number of sweetberry to use; If not supplied use
-                   the first sweetberry found; optional.
-pdash_note: User supplied note to tag the specific test; optional.
-"""
-
-# Workaround to make it compatible with moblab autotest UI.
-global args_dict
-try:
-    args_dict
-except NameError:
-    args_dict = utils.args_to_dict(args)
-
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test('power_PowerlogWrapper', host=host, tag=NAME.split('.')[1],
-                 config=args_dict, charge_control=True)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/power_PowerlogWrapper/control.moblab b/server/site_tests/power_PowerlogWrapper/control.moblab
deleted file mode 100644
index 67dc213..0000000
--- a/server/site_tests/power_PowerlogWrapper/control.moblab
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "mqg"
-NAME = "power_PowerlogWrapper.moblab"
-PURPOSE = "Measure power with powerlog tool while running a client test."
-CRITERIA = "This test is a wrapper for a client test."
-ATTRIBUTES = "suite:power_measurement_wrapper"
-TIME = "LONG"
-TEST_CATEGORY = "Benchmark"
-TEST_CLASS = "power"
-TEST_TYPE = "server"
-REQUIRE_SSP = False
-
-DOC = """
-This wrapper test automates the process of power measurement with powerlog tool
-while running a client test. Please check the client test's control file for any
-hardware requirement, e.g. no AC power, no Ethernet.
-
-This test makes the following assumptions:
-1. Sweetberry config files are in directory
-/usr/lib64/python2.7/site-packages/servo/data/
-For example,
-/usr/lib64/python2.7/site-packages/servo/data/eve_rev7.board
-/usr/lib64/python2.7/site-packages/servo/data/eve_rev7.scenario
-
-2. The workstation (or where the autotest is kicked off from) should be in same
-timezone with the DUT.
-
-Sample usage:
-test_that <ip address of DUT> power_PowerlogWrapper --args \
-'test=power_LoadTest.fast sweetberry_interval=1 sweetberry_config=eve_rev7'
-
-What are the parameters:
-test: the client test to run in wrapper test; DUT power is measured during this
-      client test; required.
-sweetberry_interval: number of seconds between each Sweetberry measurement;
-                     optional.
-sweetberry_config: use [].board and [].scenario as configuration file for
-                   Sweetberry measurement; optional.
-sweetberry_serial: serial number of sweetberry to use; If not supplied use
-                   the first sweetberry found; optional.
-pdash_note: User supplied note to tag the specific test; optional.
-"""
-
-# Intended for use with Moblab UI.
-args_dict = utils.args_to_dict(args)
-
-def run(machine):
-    job.run_test('power_PowerlogWrapper', host=hosts.create_host(machine),
-                 config=args_dict)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/power_PowerlogWrapper/power_PowerlogWrapper.py b/server/site_tests/power_PowerlogWrapper/power_PowerlogWrapper.py
deleted file mode 100644
index d8ba603..0000000
--- a/server/site_tests/power_PowerlogWrapper/power_PowerlogWrapper.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Wrapper test measures DUT power with Sweetberry via powerlog tool."""
-
-from autotest_lib.server.cros.power import power_base_wrapper
-from autotest_lib.server.cros.power import power_telemetry_logger
-from autotest_lib.server.cros.power import servo_charger
-
-
-class power_PowerlogWrapper(power_base_wrapper.PowerBaseWrapper):
-    """Wrapper test around a client test.
-
-    This wrapper test runs 1 client test given by user, and measures DUT power
-    with Sweetberry via powerlog tool.
-    """
-    version = 1
-
-    def warmup(self, host, charge_control=False):
-        """Disconnect DUT from AC power.
-
-        Many power autotests require that DUT is on battery, thus disconnect DUT
-        from AC power as preparation.
-        """
-        super(power_PowerlogWrapper, self).warmup(host)
-        if charge_control:
-            self._charge_manager = servo_charger.ServoV4ChargeManager(
-                    host, host.servo)
-            self._charge_manager.stop_charging()
-
-    def cleanup(self, charge_control=False):
-        """Connect DUT to AC power.
-
-        This allows DUT to charge between tests, and complies with moblab
-        requirement.
-        """
-        if charge_control:
-            self._charge_manager.start_charging()
-        super(power_PowerlogWrapper, self).cleanup()
-
-    def _get_power_telemetry_logger(self, host, config, resultsdir):
-        """Return powerlog telemetry logger.
-
-        @param host: CrosHost object representing the DUT.
-        @param config: the args argument from test_that in a dict. Settings for
-                       power telemetry devices.
-                       required data: {'test': 'test_TestName.tag'}
-        @param resultsdir: path to directory where current autotest results are
-                           stored, e.g. /tmp/test_that_results/
-                           results-1-test_TestName.tag/test_TestName.tag/
-                           results/
-        """
-        return power_telemetry_logger.PowerlogTelemetryLogger(config,
-                                                              resultsdir,
-                                                              host)
diff --git a/server/site_tests/power_QualTestSimple/power_QualTestSimple.py b/server/site_tests/power_QualTestSimple/power_QualTestSimple.py
new file mode 100644
index 0000000..8c195c5
--- /dev/null
+++ b/server/site_tests/power_QualTestSimple/power_QualTestSimple.py
@@ -0,0 +1,26 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server.cros.pvs import test_with_pass_criteria
+
+HOURS = 60 * 60
+
+
+class power_QualTestSimple(test_with_pass_criteria.test_with_pass_criteria):
+    """
+    power_QualTestWrapper extends test_with_pass_criteria for the purpose of
+    power qualification testing. We use the add_prefix_test method to add the
+    two tests which must run before each power qualification test
+    """
+
+    version = 1
+
+    def initialize(self, **args_dict):
+        """
+        initialize implements the initialize call in test.test, is called before
+        execution of the test. In this wrapper, initialize also adds the test
+        prefixes necessary for the power_Qual tests
+        """
+        super(power_QualTestSimple,
+              self).initialize(test_to_wrap=args_dict['test_to_wrap'])
diff --git a/server/site_tests/power_QualTestWrapper/control.FastLoadTest b/server/site_tests/power_QualTestWrapper/control.FastLoadTest
new file mode 100644
index 0000000..dcea28c
--- /dev/null
+++ b/server/site_tests/power_QualTestWrapper/control.FastLoadTest
@@ -0,0 +1,52 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS PVS Team"
+NAME = "power_FastLoadTest"
+PURPOSE = "power_Dummy orchestrated with wrappers for qualification"
+CRITERIA = "This test is a benchmark."
+TIME = "SHORT"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "power"
+TEST_TYPE = "server"
+ATTRIBUTES = ""
+PY_VERSION = 3
+DOC = """
+This is a power_QualTestWrapped example, which uses the QualTestWrapper
+implementation to call a power test, after first calling the standard
+prefix tests for the power_qual sequence. QualTestWrapper inherits from
+pvs.test_with_pass_criteria, which uses either pass criteria passed
+on the command line or the default pass criteria below to pass/fail tests
+based on their key value results written to the results directory.
+
+To create new wrapped Qual Tests, please copy this example file, and
+fill out the TEST_NAME you intend to call, as well as the TEST_ARGS
+dict to be passed to that test.
+"""
+
+from autotest_lib.server import utils
+from autotest_lib.server.cros.pvs import wrapper_job_with_name
+
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+TEST_NAME = "power_LoadTest"
+TEST_ARGS = {"loop_time":120,"loop_count":1,"check_network":False,"ac_ok":True,
+            "gaia_login":False}
+
+DEFAULT_PASS_CRITERIA = {"wh_energy_used":(0,None)}
+
+def run(machine):
+    args_dict["percent_target_charge"] = 65
+
+    # Setup the client machine.
+    host = hosts.create_host(machine, servo_args=servo_args)
+    wrapper_job = wrapper_job_with_name.wrapper_job_with_name(job=job,
+                                    job_name=NAME,
+                                    wrapper_url="power_QualTestWrapper",
+                                    args_dict=args_dict,
+                                    default_pass_criteria=DEFAULT_PASS_CRITERIA)
+    wrapper_job.run(host=host, test_to_wrap=TEST_NAME, wrap_args=TEST_ARGS)
+
+parallel_simple(run, machines)
\ No newline at end of file
diff --git a/server/site_tests/power_QualTestWrapper/power_QualTestWrapper.py b/server/site_tests/power_QualTestWrapper/power_QualTestWrapper.py
new file mode 100644
index 0000000..e57e031
--- /dev/null
+++ b/server/site_tests/power_QualTestWrapper/power_QualTestWrapper.py
@@ -0,0 +1,34 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server.cros.pvs import test_with_pass_criteria
+
+HOURS = 60 * 60
+
+
+class power_QualTestWrapper(test_with_pass_criteria.test_with_pass_criteria):
+    """
+    power_QualTestWrapper extends test_with_pass_criteria for the purpose of
+    power qualification testing. We use the add_prefix_test method to add the
+    two tests which must run before each power qualification test
+    """
+
+    version = 1
+
+    def initialize(self, **args_dict):
+        """
+        initialize implements the initialize call in test.test, is called before
+        execution of the test. In this wrapper, initialize also adds the test
+        prefixes necessary for the power_Qual tests
+        """
+        super(power_QualTestWrapper,
+              self).initialize(test_to_wrap=args_dict['test_to_wrap'])
+        self.add_prefix_test(
+                'power_BatteryCharge', {
+                        'percent_target_charge':
+                        args_dict['percent_target_charge'],
+                        'max_run_time': 5 * HOURS,
+                        'timeout': 6 * HOURS
+                })
+        self.add_prefix_test('power_WaitForCoolDown', {'target_temp': 48})
diff --git a/server/site_tests/power_RPMTest/control b/server/site_tests/power_RPMTest/control
deleted file mode 100644
index 2a8d006..0000000
--- a/server/site_tests/power_RPMTest/control
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import time
-from autotest_lib.server import utils
-
-AUTHOR = "dbasehore, snanda"
-NAME = "power_RPMTest"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "power"
-TEST_TYPE = "server"
-
-DOC = """
-This tests that the power supply for a device can successfully change state.
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test('power_RPMTest', verify=True, host=host,
-                 power_sequence=[False, True])
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/power_RPMTest/power_RPMTest.py b/server/site_tests/power_RPMTest/power_RPMTest.py
deleted file mode 100755
index 344bbf4..0000000
--- a/server/site_tests/power_RPMTest/power_RPMTest.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import autotest, test
-
-
-class power_RPMTest(test.test):
-    """Test RPM functionality."""
-    version = 1
-
-
-    def initialize(self, host, verify=True):
-        """
-        @param host: The host to run the test on
-        @param verify: True to test both on and off for the AC power and to
-            check with the host whether it sees the same state
-        """
-        self._host = host
-        self._host_at = autotest.Autotest(host)
-        self._verify = verify
-
-
-    def _set_power(self, power_on):
-        if power_on:
-            self._host.power_on()
-        else:
-            self._host.power_off()
-
-        if self._verify:
-            self._host_at.run_test('power_CheckAC', check_client_result=True,
-                                   power_on=power_on)
-
-
-    def run_once(self, power_sequence=[True]):
-        """Run the test.i
-
-        @param power_sequence: Sequence of values to set the power state to in
-            order
-        """
-
-        for val in power_sequence:
-            self._set_power(val)
diff --git a/server/site_tests/power_ServoChargeStress/control.100loops b/server/site_tests/power_ServoChargeStress/control.100loops
index 1e06b64..9e741e4 100644
--- a/server/site_tests/power_ServoChargeStress/control.100loops
+++ b/server/site_tests/power_ServoChargeStress/control.100loops
@@ -12,6 +12,7 @@
 TEST_CLASS = "power"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Stress test Servo changing PD role.
diff --git a/server/site_tests/power_ServoChargeStress/control.3loops b/server/site_tests/power_ServoChargeStress/control.3loops
index 440269f..21a32de 100644
--- a/server/site_tests/power_ServoChargeStress/control.3loops
+++ b/server/site_tests/power_ServoChargeStress/control.3loops
@@ -12,6 +12,7 @@
 TEST_CLASS = "power"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 Verify that Servo can change PD role.
diff --git a/server/site_tests/power_ServoChargeStress/power_ServoChargeStress.py b/server/site_tests/power_ServoChargeStress/power_ServoChargeStress.py
index 2488e7d..4d4c76e 100644
--- a/server/site_tests/power_ServoChargeStress/power_ServoChargeStress.py
+++ b/server/site_tests/power_ServoChargeStress/power_ServoChargeStress.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/power_ServodWrapper/control b/server/site_tests/power_ServodWrapper/control
index c39e541..6d75fac 100644
--- a/server/site_tests/power_ServodWrapper/control
+++ b/server/site_tests/power_ServodWrapper/control
@@ -13,6 +13,7 @@
 TEST_CLASS = "power"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This wrapper test automates the process of power measurement with servod while
diff --git a/server/site_tests/power_ServodWrapper/power_ServodWrapper.py b/server/site_tests/power_ServodWrapper/power_ServodWrapper.py
index e27bdc9..6f432f5 100644
--- a/server/site_tests/power_ServodWrapper/power_ServodWrapper.py
+++ b/server/site_tests/power_ServodWrapper/power_ServodWrapper.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/power_USBHotplugInSuspend/control b/server/site_tests/power_USBHotplugInSuspend/control
index a3eaf9e..7670468 100644
--- a/server/site_tests/power_USBHotplugInSuspend/control
+++ b/server/site_tests/power_USBHotplugInSuspend/control
@@ -17,6 +17,7 @@
 TEST_CLASS = "power"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This tests both hotplug insertion and removal of a USB device while the DUT
diff --git a/server/site_tests/power_USBHotplugInSuspend/power_USBHotplugInSuspend.py b/server/site_tests/power_USBHotplugInSuspend/power_USBHotplugInSuspend.py
index b8af1e7..33c0f3b 100644
--- a/server/site_tests/power_USBHotplugInSuspend/power_USBHotplugInSuspend.py
+++ b/server/site_tests/power_USBHotplugInSuspend/power_USBHotplugInSuspend.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -53,10 +54,7 @@
         Start the client test power_KernelSuspend to suspend the client and
         do not wait for it to finish.
         """
-        client_at = autotest.Autotest(self._host)
-        # TODO(scottz): Add server side support to sys_power: crosbug.com/38115
-        client_at.run_test('power_KernelSuspend', background=True,
-                           seconds=_SUSPEND_TIME)
+        self._host.suspend_bg(suspend_time=_SUSPEND_TIME)
 
     def _suspend_and_hotplug(self, insert):
         """
diff --git a/server/site_tests/power_WakeSources/README.md b/server/site_tests/power_WakeSources/README.md
index 1422d13..2998486 100644
--- a/server/site_tests/power_WakeSources/README.md
+++ b/server/site_tests/power_WakeSources/README.md
@@ -64,8 +64,8 @@
     :INFO:powerd_dbus_suspend.cc(57)] Wakeup type : other`.
 
 
-[Dark Resume]: https://chromium.googlesource.com/chromiumos/platform2/+/master/power_manager/docs/dark_resume.md
-[dark resume is disabled]: https://chromium.googlesource.com/chromiumos/platform2/+/master/power_manager/docs/dark_resume.md#Disabling-Dark-Resume
-[keyboard.hex]: https://chromium.googlesource.com/chromiumos/third_party/hdctools/+/refs/heads/master/servo/firmware/usbkm/KeyboardSerial/Keyboard.hex
-[firmware_FlashServoKeyboardMap]: https://chromium.googlesource.com/chromiumos/third_party/autotest/+/refs/heads/master/server/site_tests/firmware_FlashServoKeyboardMap/
-[chromeos-platform-power]: chromeos-platform-power@google.com
\ No newline at end of file
+\[Dark Resume\]: https://chromium.googlesource.com/chromiumos/platform2/+/main/power_manager/docs/dark_resume.md <br>
+\[dark resume is disabled\]: https://chromium.googlesource.com/chromiumos/platform2/+/main/power_manager/docs/dark_resume.md#Disabling-Dark-Resume <br>
+\[keyboard.hex\]: https://chromium.googlesource.com/chromiumos/third_party/hdctools/+/refs/heads/main/servo/firmware/usbkm/KeyboardSerial/Keyboard.hex <br>
+\[firmware_FlashServoKeyboardMap\]: https://chromium.googlesource.com/chromiumos/third_party/autotest/+/refs/heads/main/server/site_tests/firmware_FlashServoKeyboardMap/ <br>
+\[chromeos-platform-power\]: chromeos-platform-power@google.com <br>
\ No newline at end of file
diff --git a/server/site_tests/power_WakeSources/control b/server/site_tests/power_WakeSources/control
index 850d96b..a939e20 100644
--- a/server/site_tests/power_WakeSources/control
+++ b/server/site_tests/power_WakeSources/control
@@ -14,6 +14,7 @@
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
 ATTRIBUTES = "suite:power_build"
+PY_VERSION = 3
 
 DOC = """
 Tests the following
diff --git a/server/site_tests/power_WakeSources/power_WakeSources.py b/server/site_tests/power_WakeSources/power_WakeSources.py
index 1be2984..ed4e066 100644
--- a/server/site_tests/power_WakeSources/power_WakeSources.py
+++ b/server/site_tests/power_WakeSources/power_WakeSources.py
@@ -1,8 +1,10 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 import logging
+import re
 import time
 
 from autotest_lib.client.common_lib import autotest_enum, error
@@ -30,10 +32,6 @@
 # List of wake sources expected to cause a dark resume.
 DARK_RESUME_SOURCES = ['RTC', 'AC_CONNECTED', 'AC_DISCONNECTED']
 
-# Max time taken by the device to resume. This gives enough time for the device
-# to establish network connection with the autotest server
-SECS_FOR_RESUMING = 15
-
 # Time in future after which RTC goes off when testing wake due to RTC alarm.
 RTC_WAKE_SECS = 20
 
@@ -139,6 +137,51 @@
         ec_cmd += ec_arg[base_state]
         self._ec.send_command(ec_cmd)
 
+    def _x86_get_ec_wake_mask(self):
+        # Check both the S0ix and S3 wake masks.
+        try:
+            s0ix_wake_mask = int(self._host.run(
+                    'ectool hostevent get %d' %
+                    chrome_ec.EC_HOST_EVENT_LAZY_WAKE_MASK_S0IX).stdout,
+                                 base=16)
+        except error.AutoservRunError as e:
+            s0ix_wake_mask = 0
+            logging.info(
+                    '"ectool hostevent get" failed for s0ix wake mask with'
+                    ' exception: %s', str(e))
+
+        try:
+            s3_wake_mask = int(self._host.run(
+                    'ectool hostevent get %d' %
+                    chrome_ec.EC_HOST_EVENT_LAZY_WAKE_MASK_S3).stdout,
+                               base=16)
+        except error.AutoservRunError as e:
+            s3_wake_mask = 0
+            logging.info(
+                    '"ectool hostevent get" failed for s3 wake mask with'
+                    ' exception: %s', str(e))
+
+        return s0ix_wake_mask | s3_wake_mask
+
+    def _arm_get_ec_wake_mask(self):
+        try:
+            s3_mkbpwakemask_out = self._host.run(
+                    'ectool mkbpwakemask get hostevent').stdout
+            match = re.match(r'MBKP hostevent wake mask: (0x[0-9A-Fa-f]+)',
+                             s3_mkbpwakemask_out)
+            if match:
+                return int(match.group(1), base=16)
+            else:
+                logging.info(
+                        '"ectool mkbpwakemask get hostevent" returned: %s',
+                        s3_mkbpwakemask_out)
+        except error.AutoservRunError as e:
+            logging.info(
+                    '"ectool mkbpwakemask get hostevent" failed with'
+                    ' exception: %s', str(e))
+
+        return 0
+
     def _is_valid_wake_source(self, wake_source):
         """Check if |wake_source| is valid for DUT.
 
@@ -176,35 +219,18 @@
 
                 return False
         if wake_source in ['AC_CONNECTED', 'AC_DISCONNECTED']:
+            arch = self._host.get_architecture()
+            wake_mask = 0
             if not self._chg_manager:
                 logging.warning(
                     'Unable to test AC connect/disconnect with this '
                     'servo setup')
                 return False
-            # Check both the S0ix and S3 wake masks.
-            try:
-                s0ix_wake_mask = int(self._host.run(
-                        'ectool hostevent get %d' %
-                        chrome_ec.EC_HOST_EVENT_LAZY_WAKE_MASK_S0IX).stdout,
-                                     base=16)
-            except error.AutoservRunError as e:
-                s0ix_wake_mask = 0
-                logging.info(
-                        '"ectool hostevent get" failed for s0ix wake mask with'
-                        ' exception: %s', str(e))
+            elif arch.startswith('x86'):
+                wake_mask = self._x86_get_ec_wake_mask()
+            elif arch.startswith('arm'):
+                wake_mask = self._arm_get_ec_wake_mask()
 
-            try:
-                s3_wake_mask = int(self._host.run(
-                        'ectool hostevent get %d' %
-                        chrome_ec.EC_HOST_EVENT_LAZY_WAKE_MASK_S3).stdout,
-                                   base=16)
-            except error.AutoservRunError as e:
-                s3_wake_mask = 0
-                logging.info(
-                        '"ectool hostevent get" failed for s3 wake mask with'
-                        ' exception: %s', str(e))
-
-            wake_mask = s0ix_wake_mask | s3_wake_mask
             supported = False
             if wake_source == 'AC_CONNECTED':
                 supported = wake_mask & chrome_ec.HOSTEVENT_AC_CONNECTED
@@ -214,9 +240,8 @@
             if not supported:
                 logging.info(
                         '%s not supported. Platforms launched in 2020 or before'
-                        ' may not require it. S0ix wake mask: 0x%x S3 wake'
-                        ' mask: 0x%x', wake_source, s0ix_wake_mask,
-                        s3_wake_mask)
+                        ' may not require it. Wake mask: 0x%x', wake_source,
+                        wake_mask)
                 return False
 
         return True
@@ -230,8 +255,8 @@
         """
         is_success = True
         logging.info(
-            'Testing wake by %s triggers a '
-            'full wake when dark resume is enabled.', wake_source)
+                'Testing wake by %s triggers a %s wake when dark resume is '
+                'enabled.', wake_source, 'full' if full_wake else 'dark')
         if not self._before_suspend(wake_source):
             logging.error('Before suspend action failed for %s', wake_source)
             # Still run the _after_resume callback since we can do things like
@@ -246,11 +271,10 @@
         # fully suspend.
         time.sleep(SECS_FOR_SUSPENDING)
         self._trigger_wake(wake_source)
-        # Wait at least |SECS_FOR_RESUMING| secs for the device to
-        # resume.
-        time.sleep(SECS_FOR_RESUMING)
 
-        if not self._host.is_up_fast():
+        # Wait until it would be unclear if the RTC or wake_source triggered the
+        # wake.
+        if not self._host.wait_up(timeout=RTC_WAKE_SECS - 1):
             logging.error(
                     'Device did not resume from suspend for %s.'
                     ' Waking system with power button then RTC.', wake_source)
diff --git a/server/site_tests/provision_CheetsUpdate/README.md b/server/site_tests/provision_CheetsUpdate/README.md
index 8bc41ab..17a301d 100644
--- a/server/site_tests/provision_CheetsUpdate/README.md
+++ b/server/site_tests/provision_CheetsUpdate/README.md
@@ -24,13 +24,13 @@
 ```
 
 Warning: If the command fails, check run\_push\_to\_device method in
-[provision_CheetsUpdate.py](https://chromium.googlesource.com/chromiumos/third_party/autotest/+/master/server/site_tests/provision_CheetsUpdate/provision_CheetsUpdate.py) for required params.
+[provision_CheetsUpdate.py](https://chromium.googlesource.com/chromiumos/third_party/autotest/+/main/server/site_tests/provision_CheetsUpdate/provision_CheetsUpdate.py) for required params.
 
 ## Updating the autotest
 
-After you submit your changes, [Chrome OS lab deputy will push it to
+After you submit your changes, [ChromeOS lab deputy will push it to
 prod](https://sites.google.com/a/google.com/chromeos/for-team-members/infrastructure/chromeos-admin/push-to-prod).
-Send a heads up to [Chrome OS lab
+Send a heads up to [ChromeOS lab
 deputy](https://sites.google.com/a/google.com/chromeos/for-team-members/infrastructure/chrome-os-infrastructure-deputy)
 that this change is there.
 
diff --git a/server/site_tests/provision_CheetsUpdate/control b/server/site_tests/provision_CheetsUpdate/control
index f3c6570..0b4dc79 100644
--- a/server/site_tests/provision_CheetsUpdate/control
+++ b/server/site_tests/provision_CheetsUpdate/control
@@ -8,6 +8,7 @@
 DEPENDENCIES = 'arc'
 TEST_TYPE = 'server'
 TIME = 'LENGTHY'
+PY_VERSION = 3
 
 DOC = """This test downloads and installs an Android test image on the DUT to
 prepare the DUT for Android PFQ tests.
diff --git a/server/site_tests/provision_Cr50TOT/control b/server/site_tests/provision_Cr50TOT/control
index 8c5884c..d4d6cab 100644
--- a/server/site_tests/provision_Cr50TOT/control
+++ b/server/site_tests/provision_Cr50TOT/control
@@ -14,6 +14,7 @@
 TEST_CLASS = "provision"
 TEST_TYPE = "Server"
 DEPENDENCIES = "servo_state:WORKING,cr50-ro-keyid:dev"
+PY_VERSION = 3
 
 DOC = """
 Update the Cr50 to the TOT image from the latest reef build. This requires that
diff --git a/server/site_tests/provision_Cr50TOT/provision_Cr50TOT.py b/server/site_tests/provision_Cr50TOT/provision_Cr50TOT.py
index 329424a..761d1cc 100644
--- a/server/site_tests/provision_Cr50TOT/provision_Cr50TOT.py
+++ b/server/site_tests/provision_Cr50TOT/provision_Cr50TOT.py
@@ -8,12 +8,11 @@
 
 import logging
 import os
-import re
 
 from autotest_lib.client.common_lib.cros import cr50_utils
 from autotest_lib.client.common_lib import error
 from autotest_lib.server import utils
-from autotest_lib.server.cros import filesystem_util, gsutil_wrapper
+from autotest_lib.server.cros import gsutil_wrapper
 from autotest_lib.server.cros.faft.firmware_test import FirmwareTest
 
 
@@ -89,18 +88,12 @@
             try:
                 return self.get_cr50_build(latest_build, REMOTE_TMPDIR)
             except Exception as e:
-                logging.warn('Unable to find %s cr50 image %s', latest_build, e)
+                logging.warning('Unable to find %s cr50 image %s',
+                                latest_build, e)
         raise error.TestFail('Unable to find latest cr50 image in %s' %
                              latest_builds)
 
 
-    def get_bin_version(self, dut_path):
-        """Get the cr50 version from the image."""
-        find_ver_cmd = 'grep -a cr50_v.*tpm2 %s' % dut_path
-        version_output = self.host.run(find_ver_cmd).stdout.strip()
-        return re.findall('cr50_v\S+\s', version_output)[0].strip()
-
-
     def run_once(self, host, force=False):
         """Update cr50 to the TOT image from the reef builder."""
         # TODO(mruthven): remove once the test is successfully scheduled.
@@ -114,19 +107,12 @@
         logging.info('cr50 image is at %s', cr50_path)
         local_path = os.path.join(self.resultsdir, 'cr50.bin.tot')
         self.host.get_file(cr50_path, local_path)
-        expected_version = self.get_bin_version(cr50_path)
 
         cr50_utils.GSCTool(self.host, ['-a', cr50_path])
 
         self.cr50.wait_for_reboot(
                 timeout=self.faft_config.gsc_update_wait_for_reboot)
         cr50_version = self.cr50.get_active_version_info()[3].split('/')[-1]
-        logging.info('Cr50 running %s. Expected %s', cr50_version,
-                     expected_version)
-        # TODO(mruthven): Decide if failing to update should be a provisioning
-        # failure. Raising a failure will prevent the suite from running. See
-        # how often it fails and why.
-        if cr50_version.split('/')[-1] != expected_version:
-            logging.info('Unable to udpate Cr50.')
-        filesystem_util.make_rootfs_writable(self.host)
+        logging.info('Cr50 running %s after update', cr50_version)
+        self.make_rootfs_writable()
         cr50_utils.InstallImage(self.host, local_path, cr50_utils.CR50_PREPVT)
diff --git a/server/site_tests/provision_Cr50Update/control b/server/site_tests/provision_Cr50Update/control
index bc504b6..3a847e5 100644
--- a/server/site_tests/provision_Cr50Update/control
+++ b/server/site_tests/provision_Cr50Update/control
@@ -2,7 +2,6 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-
 AUTHOR = "chromeos-lab-infrastructure"
 NAME = "provision_Cr50Update"
 PURPOSE = "Provision a system to the correct firmware version."
@@ -11,6 +10,7 @@
 TEST_CLASS = "provision"
 TEST_TYPE = "Server"
 DEPENDENCIES = "servo_state:WORKING,cr50-ro-keyid:prod"
+PY_VERSION = 3
 
 DOC = """
 This is a test used by the provision control segment in autoserv to set the
diff --git a/server/site_tests/provision_FactoryImage/control b/server/site_tests/provision_FactoryImage/control
index 1cf5276..8c68716 100644
--- a/server/site_tests/provision_FactoryImage/control
+++ b/server/site_tests/provision_FactoryImage/control
@@ -13,6 +13,7 @@
 TEST_CATEGORY = "Install"
 TEST_CLASS = "provision"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test installs a specified factory image onto a servo-connected DUT.
diff --git a/server/site_tests/provision_FirmwareUpdate/control b/server/site_tests/provision_FirmwareUpdate/control
index 1080ba1..545c745 100644
--- a/server/site_tests/provision_FirmwareUpdate/control
+++ b/server/site_tests/provision_FirmwareUpdate/control
@@ -2,7 +2,6 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-
 AUTHOR = "chromeos-lab-infrastructure"
 NAME = "provision_FirmwareUpdate"
 PURPOSE = "Provision a system to the correct firmware version."
@@ -10,6 +9,7 @@
 TEST_CATEGORY = "System"
 TEST_CLASS = "provision"
 TEST_TYPE = "Server"
+PY_VERSION = 3
 
 DOC = """
 This is a test used by the provision control segment in autoserv to set the
diff --git a/server/site_tests/provision_FirmwareUpdate/control.rw_only b/server/site_tests/provision_FirmwareUpdate/control.rw_only
index ccff1a6..c36bcf9 100644
--- a/server/site_tests/provision_FirmwareUpdate/control.rw_only
+++ b/server/site_tests/provision_FirmwareUpdate/control.rw_only
@@ -2,7 +2,6 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-
 AUTHOR = "chromeos-lab-infrastructure"
 NAME = "provision_FirmwareUpdate.rw_only"
 PURPOSE = "Provision a system to the correct firmware version, update RW only."
@@ -10,6 +9,7 @@
 TEST_CATEGORY = "System"
 TEST_CLASS = "provision"
 TEST_TYPE = "Server"
+PY_VERSION = 3
 
 DOC = """
 This is a test used by the provision control segment in autoserv to set the
diff --git a/server/site_tests/provision_FirmwareUpdate/provision_FirmwareUpdate.py b/server/site_tests/provision_FirmwareUpdate/provision_FirmwareUpdate.py
index 96edef3..6ff8521 100644
--- a/server/site_tests/provision_FirmwareUpdate/provision_FirmwareUpdate.py
+++ b/server/site_tests/provision_FirmwareUpdate/provision_FirmwareUpdate.py
@@ -8,11 +8,7 @@
 from __future__ import print_function
 
 import logging
-import sys
 
-import six
-
-from autotest_lib.client.common_lib import error
 from autotest_lib.server import test
 
 
@@ -69,4 +65,4 @@
                                   try_scp=True)
         except Exception as e:
             logging.error(e)
-            six.reraise(error.TestFail, str(e), sys.exc_info()[2])
+            raise
\ No newline at end of file
diff --git a/server/site_tests/provision_QuickProvision/control b/server/site_tests/provision_QuickProvision/control
index 5c14bd7..a9cdc77 100644
--- a/server/site_tests/provision_QuickProvision/control
+++ b/server/site_tests/provision_QuickProvision/control
@@ -2,7 +2,6 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-
 AUTHOR = "milleral, chromeos-lab-infrastructure"
 NAME = "provision_QuickProvision"
 PURPOSE = "Provision a system to the correct OS version."
@@ -10,6 +9,7 @@
 TEST_CATEGORY = "System"
 TEST_CLASS = "provision"
 TEST_TYPE = "Server"
+PY_VERSION = 3
 
 DOC = """
 This is a test used by the provision control segment in autoserv to set the
diff --git a/server/site_tests/provision_QuickProvision/provision_QuickProvision.py b/server/site_tests/provision_QuickProvision/provision_QuickProvision.py
index 2a11ac5..2c90aee 100644
--- a/server/site_tests/provision_QuickProvision/provision_QuickProvision.py
+++ b/server/site_tests/provision_QuickProvision/provision_QuickProvision.py
@@ -4,6 +4,7 @@
 
 import logging
 import re
+import six
 import sys
 import time
 
@@ -17,7 +18,7 @@
 from autotest_lib.server.cros import provisioner
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = utils.metrics_mock
 
@@ -140,7 +141,7 @@
         try:
             ds = dev_server.ImageServer.resolve(image, host.hostname)
         except dev_server.DevServerException as e:
-            raise error.TestFail, str(e), sys.exc_info()[2]
+            six.reraise(error.TestFail, str(e), sys.exc_info()[2])
 
         url = _IMAGE_URL_PATTERN % (ds.url(), image)
 
diff --git a/server/site_tests/pvs_Sequence/control.ExampleSequence b/server/site_tests/pvs_Sequence/control.ExampleSequence
new file mode 100644
index 0000000..b035d33
--- /dev/null
+++ b/server/site_tests/pvs_Sequence/control.ExampleSequence
@@ -0,0 +1,46 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS PVS Team"
+NAME = "pvs_Sequence.ExampleSequence"
+PURPOSE = "stub_Pass and PassServer, instrumented as a basic sequence example"
+CRITERIA = "This test is a benchmark."
+TIME = "SHORT"
+TEST_CATEGORY = "Benchmark"
+TEST_CLASS = "pvs"
+TEST_TYPE = "server"
+ATTRIBUTES = ""
+PY_VERSION = 3
+DOC = """
+This is an example sequence, showing how to use the pvs_Sequence test class to
+standardize sequence implementation in control files. When creating a new
+sequence, start from this example implementation and:
+
+(1) Make sure your NAME is in the form 'pvs_Sequence.<sequence-name>'
+(2) Fill the SEQUENCE data structure below as the fields are indicated
+"""
+
+from autotest_lib.server import utils
+
+args_dict = utils.args_to_dict(args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+# Fill this list with tuples of (<test-name>, <args_dict>, <is_server_test>)
+SEQUENCE = [
+    ("stub_PassServer", {}, True),
+    ("stub_Pass",       {}, False)
+]
+
+def run(machine):
+    # Setup the client machine.
+    host = hosts.create_host(machine, servo_args=servo_args)
+
+    # run the pvs_Sequence wrapper, with the input of the SEQUENCE
+    # defined above
+    job.run_test("pvs_Sequence",
+                           tag=NAME.split('.')[1],
+                           host=host,
+                           sequence=SEQUENCE)
+
+parallel_simple(run, machines)
\ No newline at end of file
diff --git a/server/site_tests/pvs_Sequence/pvs_Sequence.py b/server/site_tests/pvs_Sequence/pvs_Sequence.py
new file mode 100644
index 0000000..f426b34
--- /dev/null
+++ b/server/site_tests/pvs_Sequence/pvs_Sequence.py
@@ -0,0 +1,21 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server.cros.pvs import sequence
+
+
+class pvs_Sequence(sequence.test_sequence):
+    """
+    pvs_Sequence implements a test_sequence (wrapper to test.test), instrumenting
+    a series of tests and surfacing their results independently
+    """
+
+    version = 1
+
+    def initialize(self, **args_dict):
+        """
+        initialize implements the initialize call in test.test, and is called before
+        execution of the test.
+        """
+        super(pvs_Sequence, self).initialize(sequence=args_dict['sequence'])
diff --git a/server/site_tests/rlz_CheckPing/control b/server/site_tests/rlz_CheckPing/control
index 2401ccf..fedf0a2 100644
--- a/server/site_tests/rlz_CheckPing/control
+++ b/server/site_tests/rlz_CheckPing/control
@@ -10,6 +10,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:rlz"
+PY_VERSION = 3
 DOC = """
 This tests the RLZ client install (CAI) and first-use event (CAF) pings are
 sent on first use of the device. Also checks that the CAF ping is not resent
diff --git a/server/site_tests/rlz_CheckPing/control.future_embargo_date b/server/site_tests/rlz_CheckPing/control.future_embargo_date
index 18e5706..95802f0 100644
--- a/server/site_tests/rlz_CheckPing/control.future_embargo_date
+++ b/server/site_tests/rlz_CheckPing/control.future_embargo_date
@@ -10,6 +10,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:rlz"
+PY_VERSION = 3
 DOC = """
 This tests that no RLZ first-use event (CAF) ping is sent if today's date is
 earlier than the rlz_embargo_end_date.
diff --git a/server/site_tests/rlz_CheckPing/control.guest b/server/site_tests/rlz_CheckPing/control.guest
index e4f6acc..2ece64c 100644
--- a/server/site_tests/rlz_CheckPing/control.guest
+++ b/server/site_tests/rlz_CheckPing/control.guest
@@ -10,6 +10,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:rlz"
+PY_VERSION = 3
 DOC = """
 This tests the RLZ client install (CAI) and first-use event (CAF) pings are
 sent on first use of the device in guest mode, as long as the device has been
diff --git a/server/site_tests/rlz_CheckPing/control.no_embargo_date b/server/site_tests/rlz_CheckPing/control.no_embargo_date
index 458ae8a..c566aa2 100644
--- a/server/site_tests/rlz_CheckPing/control.no_embargo_date
+++ b/server/site_tests/rlz_CheckPing/control.no_embargo_date
@@ -10,6 +10,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:rlz"
+PY_VERSION = 3
 DOC = """
 This tests the RLZ install event (CAI) and first-use event (CAF) pings are
 sent on first use, even if the rlz_embargo_end_date has not been set in the
diff --git a/server/site_tests/rlz_CheckPing/control.no_ping_after_reboot b/server/site_tests/rlz_CheckPing/control.no_ping_after_reboot
index c9f3f82..000890b 100644
--- a/server/site_tests/rlz_CheckPing/control.no_ping_after_reboot
+++ b/server/site_tests/rlz_CheckPing/control.no_ping_after_reboot
@@ -10,6 +10,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:rlz"
+PY_VERSION = 3
 DOC = """
 This tests the RLZ client install (CAI) and first-use event (CAF) pings are
 sent on first use of the device. It then checks that the CAF ping is not
diff --git a/server/site_tests/rlz_CheckPing/control.second_user_sends_ping b/server/site_tests/rlz_CheckPing/control.second_user_sends_ping
index a6f56d4..cce9c47 100644
--- a/server/site_tests/rlz_CheckPing/control.second_user_sends_ping
+++ b/server/site_tests/rlz_CheckPing/control.second_user_sends_ping
@@ -10,6 +10,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:rlz"
+PY_VERSION = 3
 DOC = """
 This tests that the RLZ first-use event (CAF) ping will be sent by the second
 user, if it was not sent by the first user to log in. It also verifies that
diff --git a/server/site_tests/rlz_CheckPing/control.should_not_send_ping b/server/site_tests/rlz_CheckPing/control.should_not_send_ping
index 7e14e69..acd0114 100644
--- a/server/site_tests/rlz_CheckPing/control.should_not_send_ping
+++ b/server/site_tests/rlz_CheckPing/control.should_not_send_ping
@@ -10,6 +10,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:rlz"
+PY_VERSION = 3
 DOC = """
 This tests that no first-use event (CAF) ping is sent if the
 should_send_rlz_ping vpd setting is 0. The install event (CAI) ping will still
diff --git a/server/site_tests/rlz_CheckPing/control.should_send_rlz_ping_missing b/server/site_tests/rlz_CheckPing/control.should_send_rlz_ping_missing
index 031fe2b..8927592 100644
--- a/server/site_tests/rlz_CheckPing/control.should_send_rlz_ping_missing
+++ b/server/site_tests/rlz_CheckPing/control.should_send_rlz_ping_missing
@@ -10,6 +10,7 @@
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
 ATTRIBUTES = "suite:rlz"
+PY_VERSION = 3
 DOC = """
 This tests that no first-use event (CAF) ping is sent if the
 should_send_rlz_ping vpd setting is missing. The install event
diff --git a/server/site_tests/rlz_CheckPing/rlz_CheckPing.py b/server/site_tests/rlz_CheckPing/rlz_CheckPing.py
index 02c2b75..a59334c 100644
--- a/server/site_tests/rlz_CheckPing/rlz_CheckPing.py
+++ b/server/site_tests/rlz_CheckPing/rlz_CheckPing.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -201,5 +202,5 @@
                                tag="check_ping_not_resent")
 
             # Confirm VPD settings are also unchanged
-            self._check_rlz_vpd_settings_post_ping(
-                should_send_rlz_ping=0, rlz_embargo_end_date=None)
\ No newline at end of file
+            self._check_rlz_vpd_settings_post_ping(should_send_rlz_ping=0,
+                                                   rlz_embargo_end_date=None)
diff --git a/server/site_tests/sequences/control.memory_qual b/server/site_tests/sequences/control.memory_qual
index 99fb48e..ebb530e 100644
--- a/server/site_tests/sequences/control.memory_qual
+++ b/server/site_tests/sequences/control.memory_qual
@@ -8,6 +8,8 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "suite"
 TEST_TYPE = "server"
+EXTENDED_TIMEOUT = 1209600  # 2 weeks
+PY_VERSION = 3
 
 DOC = """
 Sequence for qualification new memory part to AVL.
@@ -62,13 +64,13 @@
         ('i386', 'i686', 'x86_64') else CLIENT_TESTS)
     for test, argv in client_tests:
         client.reboot()
-        client_at.run_test(test, **argv)
+        client_at.run_test(test, timeout=EXTENDED_TIMEOUT, **argv)
 
 def run_server_test(machine):
     client = hosts.create_host(machine)
     for test, argv in SERVER_TESTS:
         client.reboot()
-        job.run_test(test, client_ip=machine, **argv)
+        job.run_test(test, client_ip=machine, timeout=EXTENDED_TIMEOUT, **argv)
 
 
 job.parallel_on_machines(run_client_test, machines)
diff --git a/server/site_tests/sequences/control.memory_qual_moblab b/server/site_tests/sequences/control.memory_qual_moblab
index 1c111a3..5eb8c03 100644
--- a/server/site_tests/sequences/control.memory_qual_moblab
+++ b/server/site_tests/sequences/control.memory_qual_moblab
@@ -11,6 +11,8 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "suite"
 TEST_TYPE = "server"
+EXTENDED_TIMEOUT = 1209600  # 2 weeks
+PY_VERSION = 3
 
 DOC = """
 Sequence for qualification new memory part to AVL.
@@ -65,13 +67,13 @@
         ('i386', 'i686', 'x86_64') else CLIENT_TESTS)
     for test, argv in client_tests:
         client.reboot()
-        client_at.run_test(test, **argv)
+        client_at.run_test(test, timeout=EXTENDED_TIMEOUT, **argv)
 
 def run_server_test(machine):
     client = hosts.create_host(machine)
     for test, argv in SERVER_TESTS:
         client.reboot()
-        job.run_test(test, client_ip=machine, **argv)
+        job.run_test(test, client_ip=machine, timeout=EXTENDED_TIMEOUT, **argv)
 
 keyval = dict()
 try:
diff --git a/server/site_tests/sequences/control.memory_qual_quick b/server/site_tests/sequences/control.memory_qual_quick
index 60e998d..f56a2f3 100644
--- a/server/site_tests/sequences/control.memory_qual_quick
+++ b/server/site_tests/sequences/control.memory_qual_quick
@@ -11,6 +11,7 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "suite"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 Sequence for testing the Memory Qual running from the fixed devices.
diff --git a/server/site_tests/sequences/control.power b/server/site_tests/sequences/control.power
index eaea003..43ca49a 100644
--- a/server/site_tests/sequences/control.power
+++ b/server/site_tests/sequences/control.power
@@ -2,13 +2,14 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "Power"
 TIME = "LONG"
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "suite"
 TEST_TYPE = "server"
 DEPENDENCIES = "rpm"
+PY_VERSION = 3
 
 DOC = """
 This test suite runs automated power tests that should all pass and that
diff --git a/server/site_tests/sequences/control.power_build b/server/site_tests/sequences/control.power_build
index d8bc47a..a64802c 100644
--- a/server/site_tests/sequences/control.power_build
+++ b/server/site_tests/sequences/control.power_build
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "Power per-build tests"
 TIME = "MEDIUM"
 TEST_CATEGORY = "Functional"
@@ -10,6 +10,7 @@
 ATTRIBUTES = "suite:power_build"
 TEST_TYPE = "server"
 DEPENDENCIES = "rpm"
+PY_VERSION = 3
 
 DOC = """
 This test suite runs automated power tests that should all pass. These
diff --git a/server/site_tests/sequences/control.power_daily b/server/site_tests/sequences/control.power_daily
index 353383d..6793402 100644
--- a/server/site_tests/sequences/control.power_daily
+++ b/server/site_tests/sequences/control.power_daily
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "Power daily tests"
 ATTRIBUTES = "suite:power_daily"
 TIME = "LONG"
@@ -10,6 +10,7 @@
 TEST_CLASS = "suite"
 TEST_TYPE = "server"
 DEPENDENCIES = "power:battery, rpm"
+PY_VERSION = 3
 
 DOC = """
 This test suite runs automated power tests that should all pass. These tests
diff --git a/server/site_tests/sequences/control.power_dashboard_fast b/server/site_tests/sequences/control.power_dashboard_fast
index 16349e84..8993c1a 100644
--- a/server/site_tests/sequences/control.power_dashboard_fast
+++ b/server/site_tests/sequences/control.power_dashboard_fast
@@ -8,6 +8,7 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "suite"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 Sequence to make sure that power_Dashboard tests work fine.
diff --git a/server/site_tests/sequences/control.power_dashboard_fast_lab b/server/site_tests/sequences/control.power_dashboard_fast_lab
index 2c8ab58..e6cf843 100644
--- a/server/site_tests/sequences/control.power_dashboard_fast_lab
+++ b/server/site_tests/sequences/control.power_dashboard_fast_lab
@@ -8,6 +8,7 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "suite"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 Sequence to make sure that power_Dashboard.full_lab tests work fine.
@@ -58,7 +59,7 @@
 
     wlan_ip = client.get_wlan_ip()
     if not wlan_ip:
-        autotest.Autotest(client).run_test('dummy_Pass')
+        autotest.Autotest(client).run_test('stub_Pass')
         if not client.connect_to_wifi(WIFI_SSID):
             raise error.TestFail('Can not connect to wifi.')
         wlan_ip = client.get_wlan_ip()
diff --git a/server/site_tests/sequences/control.power_dashboard_full b/server/site_tests/sequences/control.power_dashboard_full
index 910a5fb..6020d2c 100644
--- a/server/site_tests/sequences/control.power_dashboard_full
+++ b/server/site_tests/sequences/control.power_dashboard_full
@@ -8,6 +8,7 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "suite"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 Sequence for upload common power test data to dashboard.
diff --git a/server/site_tests/sequences/control.power_dashboard_full_lab b/server/site_tests/sequences/control.power_dashboard_full_lab
index 6c38592..1ca7fa3 100644
--- a/server/site_tests/sequences/control.power_dashboard_full_lab
+++ b/server/site_tests/sequences/control.power_dashboard_full_lab
@@ -8,6 +8,7 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "suite"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 Sequence for upload common power test data to dashboard.
@@ -51,7 +52,7 @@
 
     wlan_ip = client.get_wlan_ip()
     if not wlan_ip:
-        autotest.Autotest(client).run_test('dummy_Pass')
+        autotest.Autotest(client).run_test('stub_Pass')
         if not client.connect_to_wifi(WIFI_SSID):
             raise error.TestFail('Can not connect to wifi.')
         wlan_ip = client.get_wlan_ip()
diff --git a/server/site_tests/sequences/control.power_qual_fast b/server/site_tests/sequences/control.power_qual_fast
new file mode 100644
index 0000000..bf3b5c3
--- /dev/null
+++ b/server/site_tests/sequences/control.power_qual_fast
@@ -0,0 +1,96 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "puthik"
+NAME = "PowerQual.fast"
+ATTRIBUTES = "suite:power_qual_fast"
+DEPENDENCIES = 'wifi_connected_dut'
+TIME = "LENGTHY"
+TEST_CATEGORY = "Stress"
+TEST_CLASS = "suite"
+TEST_TYPE = "server"
+EXTENDED_TIMEOUT = 18900  # 5 Hours + 900 second guard.
+PY_VERSION = 3
+
+DOC = """
+Make sure that PowerQual.full works.
+"""
+
+import datetime
+from autotest_lib.client.common_lib import utils
+
+HOURS=60*60
+
+CLIENT_TESTS = [
+    ('power_BatteryCharge', {
+        'percent_target_charge' : 50, 'max_run_time': 5 * HOURS}),
+    ('power_WaitForCoolDown', {}),
+    ('power_LoadTest', {
+        'tag' : 'powerqual_fast', 'force_discharge' : True, 'loop_time' : 180,
+        'loop_count' : 1, 'test_low_batt_p' : 5}),
+
+    ('power_BatteryCharge', {
+        'percent_target_charge' : 50, 'max_run_time': 5 * HOURS,
+        'tag' : 'powerqual_fast'}),
+    ('power_WaitForCoolDown', {}),
+    ('power_VideoPlayback', {
+        'tag' : 'powerqual_fast', 'force_discharge' : True,
+        'secs_per_video' : 60,
+        'videos' : [('h264_1080_30fps', ''), ('vp9_1080_30fps', '')]}),
+
+    ('power_BatteryCharge', {
+        'percent_target_charge' : 50, 'max_run_time': 5 * HOURS}),
+    ('power_WaitForCoolDown', {}),
+    ('power_VideoCall', {
+        'tag' : 'powerqual_fast', 'force_discharge' : True, 'duration': 180}),
+
+    ('power_BatteryCharge', {
+        'percent_target_charge' : 50, 'max_run_time': 5 * HOURS}),
+    ('power_WaitForCoolDown', {}),
+    ('power_Idle', {
+        'tag' : 'powerqual_fast', 'force_discharge' : True, 'idle_secs': 10}),
+
+    ('power_BatteryCharge', {
+        'percent_target_charge' : 50, 'max_run_time': 5 * HOURS}),
+    ('power_WaitForCoolDown', {}),
+    ('power_VideoEncode', {
+        'tag' : 'powerqual_fast', 'force_discharge' : True,
+        'seconds_per_test' : 30, 'format' : [
+            ('h264', 'hd', 24),
+            ('vp9', 'hvga', 24),
+            ('vp8', 'qhvga', 15)]}),
+
+    ('power_BatteryCharge', {
+        'percent_target_charge' : 50, 'max_run_time': 5 * HOURS}),
+    ('power_WaitForCoolDown', {}),
+    ('power_Display', {
+        'tag' : 'powerqual_fast', 'force_discharge' : True,
+        'brightness' : 'max', 'secs_per_page' : 20, 'seconds_period' : 1,
+        'pages' : ['white', 'black', 'white', 'checker1', 'white']}),
+
+    ('power_WaitForCoolDown', {}),
+    ('power_UiResume', {
+        'tag' : 'powerqual_fast'}),
+]
+
+# Workaround to make it compatible with moblab autotest UI.
+global args_dict
+try:
+    args_dict
+except NameError:
+    args_dict = utils.args_to_dict(args)
+
+# Use time as pdash_note if not supplied to track all tests in same suite.
+pdash_note = args_dict.get('pdash_note',
+                           NAME + '_' + datetime.datetime.utcnow().isoformat())
+
+def run_client_test(machine):
+    client = hosts.create_host(machine)
+    client_at = autotest.Autotest(client)
+
+    for test, argv in CLIENT_TESTS:
+        argv['pdash_note'] = pdash_note
+        client_at.run_test(test, timeout=EXTENDED_TIMEOUT, **argv)
+
+job.parallel_on_machines(run_client_test, machines)
diff --git a/server/site_tests/sequences/control.power_qual_full b/server/site_tests/sequences/control.power_qual_full
new file mode 100644
index 0000000..f203167
--- /dev/null
+++ b/server/site_tests/sequences/control.power_qual_full
@@ -0,0 +1,130 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "puthik"
+NAME = "PowerQual.full"
+ATTRIBUTES = "suite:power_qual_full"
+DEPENDENCIES = 'wifi_connected_dut'
+TIME = "LENGTHY"
+TEST_CATEGORY = "Stress"
+TEST_CLASS = "suite"
+TEST_TYPE = "server"
+EXTENDED_TIMEOUT = 58500  # 16 Hours + 900 second guard.
+PY_VERSION = 3
+
+DOC = """
+Sequence for qualification for power.
+
+Mandatory tests
+- power_LoadTest (from full battery to 5%)
+- power_BatteryCharge (from 5% to full battery)
+- power_VideoPlayback (h264 1080p & vp9 1080p 1 hour each)
+- power_VideoCall (2 hours)
+
+Informational tests
+- power_Idle
+- power_VideoPlayback
+- power_VideoEncode
+- power_Display.brightness
+- power_Display.cabc
+"""
+
+import datetime
+from autotest_lib.client.common_lib import utils
+
+HOURS=60*60
+
+CLIENT_TESTS = [
+    ('power_BatteryCharge', {
+        'percent_target_charge' : 100, 'max_run_time': 5 * HOURS}),
+    ('power_WaitForCoolDown', {}),
+    ('power_LoadTest', {
+        'tag' : 'powerqual', 'force_discharge' : True, 'loop_time' : HOURS,
+        'loop_count' : 24, 'test_low_batt_p' : 5}),
+
+    ('power_BatteryCharge', {
+        'percent_target_charge' : 100, 'max_run_time': 5 * HOURS,
+        'tag' : 'powerqual'}),
+    ('power_WaitForCoolDown', {}),
+    ('power_VideoPlayback', {
+        'tag' : 'powerqual', 'force_discharge' : True, 'secs_per_video' : 3600,
+        'videos' : [('h264_1080_30fps', ''), ('vp9_1080_30fps', '')]}),
+
+    ('power_BatteryCharge', {
+        'percent_target_charge' : 80, 'max_run_time': 5 * HOURS}),
+    ('power_WaitForCoolDown', {}),
+    ('power_VideoCall', {
+        'tag' : 'powerqual', 'force_discharge' : True,
+        'min_run_time_percent': 75}),
+
+    ('power_BatteryCharge', {
+        'percent_target_charge' : 40, 'max_run_time': 5 * HOURS}),
+    ('power_WaitForCoolDown', {}),
+    ('power_Idle', {
+        'tag' : 'powerqual', 'force_discharge' : True}),
+
+    ('power_BatteryCharge', {
+        'percent_target_charge' : 35, 'max_run_time': 5 * HOURS}),
+    ('power_WaitForCoolDown', {}),
+    ('power_VideoPlayback', {
+        'tag' : 'powerqual_info', 'force_discharge' : True}),
+
+    ('power_BatteryCharge', {
+        'percent_target_charge' : 30, 'max_run_time': 5 * HOURS}),
+    ('power_WaitForCoolDown', {}),
+    ('power_VideoEncode', {
+        'tag' : 'powerqual', 'force_discharge' : True}),
+
+    ('power_BatteryCharge', {
+        'percent_target_charge' : 25, 'max_run_time': 5 * HOURS}),
+    ('power_WaitForCoolDown', {}),
+    ('power_Display', {
+        'tag' : 'powerqual_brightness', 'force_discharge' : True,
+        'brightness' : 'all', 'pages' : ['white', 'black', 'checker1'],
+        'secs_per_page' : 20, 'seconds_period' : 1}),
+
+    ('power_BatteryCharge', {
+        'percent_target_charge' : 20, 'max_run_time': 5 * HOURS}),
+    ('power_WaitForCoolDown', {}),
+    ('power_Display', {
+        'tag' : 'powerqual_cabc', 'force_discharge' : True,
+        'brightness' : 'max', 'secs_per_page' : 20, 'seconds_period' : 1,
+        'pages' : ['white', 'black',
+                   'white', 'grey10',
+                   'white', 'grey20',
+                   'white', 'grey30',
+                   'white', 'grey40',
+                   'white', 'grey50',
+                   'white', 'checker1',
+                   'white', 'checker90',
+                   'white', 'checker80',
+                   'white', 'checker70',
+                   'white', 'checker60',
+                   'white', 'checker50']}),
+
+    ('power_WaitForCoolDown', {}),
+    ('power_UiResume', {
+        'tag' : 'powerqual'}),
+]
+
+# Workaround to make it compatible with moblab autotest UI.
+global args_dict
+try:
+    args_dict
+except NameError:
+    args_dict = utils.args_to_dict(args)
+
+# Use time as pdash_note if not supplied to track all tests in same suite.
+pdash_note = args_dict.get('pdash_note',
+                           NAME + '_' + datetime.datetime.utcnow().isoformat())
+
+def run_client_test(machine):
+    client = hosts.create_host(machine)
+    client_at = autotest.Autotest(client)
+
+    for test, argv in CLIENT_TESTS:
+        argv['pdash_note'] = pdash_note
+        client_at.run_test(test, timeout=EXTENDED_TIMEOUT, **argv)
+
+job.parallel_on_machines(run_client_test, machines)
diff --git a/server/site_tests/sequences/control.power_qual_lab b/server/site_tests/sequences/control.power_qual_lab
new file mode 100644
index 0000000..1739baf
--- /dev/null
+++ b/server/site_tests/sequences/control.power_qual_lab
@@ -0,0 +1,142 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "puthik"
+NAME = "PowerQual.lab"
+TIME = "LENGTHY"
+TEST_CATEGORY = "Stress"
+TEST_CLASS = "suite"
+TEST_TYPE = "server"
+EXTENDED_TIMEOUT = 58500  # 16 Hours + 900 second guard.
+PY_VERSION = 3
+
+DOC = """
+Lab version of PowerQual.full
+
+Difference from PowerQual.full
+- Allow ethernet for power_LoadTest
+- Do not use design capacity when determine target battery charge
+
+Mandatory tests
+- power_LoadTest (from full battery to 5%)
+- power_BatteryCharge (from 5% to full battery)
+- power_VideoPlayback (h264 1080p & vp9 1080p 1 hour each)
+- power_VideoCall (2 hours)
+
+Informational tests
+- power_Idle
+- power_VideoPlayback
+- power_VideoEncode
+- power_Display.brightness
+- power_Display.cabc
+"""
+
+import datetime
+from autotest_lib.client.common_lib import utils
+
+HOURS=60*60
+
+CLIENT_TESTS = [
+    ('power_BatteryCharge', {
+        'percent_target_charge' : 20, 'max_run_time': 5 * HOURS,
+        'use_design_charge_capacity' : False}),
+    ('power_WaitForCoolDown', {}),
+    ('power_VideoCall', {
+        'tag' : 'powerqual_lab', 'force_discharge' : True,
+        'min_run_time_percent': 75}),
+
+    ('power_BatteryCharge', {
+        'percent_target_charge' : 20, 'max_run_time': 5 * HOURS,
+        'use_design_charge_capacity' : False}),
+    ('power_WaitForCoolDown', {}),
+    ('power_VideoPlayback', {
+        'tag' : 'powerqual_lab', 'force_discharge' : True, 'secs_per_video' : 3600,
+        'videos' : [('h264_1080_30fps', ''), ('vp9_1080_30fps', '')]}),
+
+    ('power_BatteryCharge', {
+        'percent_target_charge' : 20, 'max_run_time': 5 * HOURS,
+        'use_design_charge_capacity' : False}),
+    ('power_WaitForCoolDown', {}),
+    ('power_Idle', {
+        'tag' : 'powerqual_lab', 'force_discharge' : True}),
+
+    ('power_BatteryCharge', {
+        'percent_target_charge' : 20, 'max_run_time': 5 * HOURS,
+        'use_design_charge_capacity' : False}),
+    ('power_WaitForCoolDown', {}),
+    ('power_VideoPlayback', {
+        'tag' : 'powerqual_lab_info', 'force_discharge' : True}),
+
+    ('power_BatteryCharge', {
+        'percent_target_charge' : 20, 'max_run_time': 5 * HOURS,
+        'use_design_charge_capacity' : False}),
+    ('power_WaitForCoolDown', {}),
+    ('power_VideoEncode', {
+        'tag' : 'powerqual_lab', 'force_discharge' : True}),
+
+    ('power_BatteryCharge', {
+        'percent_target_charge' : 20, 'max_run_time': 5 * HOURS,
+        'use_design_charge_capacity' : False}),
+    ('power_WaitForCoolDown', {}),
+    ('power_Display', {
+        'tag' : 'powerqual_lab_brightness', 'force_discharge' : True,
+        'brightness' : 'all', 'pages' : ['white', 'black', 'checker1'],
+        'secs_per_page' : 20, 'seconds_period' : 1}),
+
+    ('power_BatteryCharge', {
+        'percent_target_charge' : 20, 'max_run_time': 5 * HOURS,
+        'use_design_charge_capacity' : False}),
+    ('power_WaitForCoolDown', {}),
+    ('power_Display', {
+        'tag' : 'powerqual_lab_cabc', 'force_discharge' : True,
+        'brightness' : 'max', 'secs_per_page' : 20, 'seconds_period' : 1,
+        'pages' : ['white', 'black',
+                   'white', 'grey10',
+                   'white', 'grey20',
+                   'white', 'grey30',
+                   'white', 'grey40',
+                   'white', 'grey50',
+                   'white', 'checker1',
+                   'white', 'checker90',
+                   'white', 'checker80',
+                   'white', 'checker70',
+                   'white', 'checker60',
+                   'white', 'checker50']}),
+
+    ('power_BatteryCharge', {
+        'percent_target_charge' : 20, 'max_run_time': 5 * HOURS,
+        'use_design_charge_capacity' : False}),
+    ('power_WaitForCoolDown', {}),
+    ('power_LoadTest', {
+        'tag' : 'powerqual_lab', 'force_discharge' : True, 'loop_time' : HOURS,
+        'loop_count' : 24, 'test_low_batt_p' : 5, 'check_network' : False}),
+
+    ('power_BatteryCharge', {
+        'percent_target_charge' : 100, 'max_run_time': 5 * HOURS,
+        'use_design_charge_capacity' : False, 'tag' : 'powerqual'}),
+    ('power_WaitForCoolDown', {}),
+    ('power_UiResume', {
+        'tag' : 'powerqual_lab'}),
+]
+
+# Workaround to make it compatible with moblab autotest UI.
+global args_dict
+try:
+    args_dict
+except NameError:
+    args_dict = utils.args_to_dict(args)
+
+# Use time as pdash_note if not supplied to track all tests in same suite.
+pdash_note = args_dict.get('pdash_note',
+                           NAME + '_' + datetime.datetime.utcnow().isoformat())
+
+def run_client_test(machine):
+    client = hosts.create_host(machine)
+    client_at = autotest.Autotest(client)
+
+    for test, argv in CLIENT_TESTS:
+        argv['pdash_note'] = pdash_note
+        client_at.run_test(test, timeout=EXTENDED_TIMEOUT, **argv)
+
+job.parallel_on_machines(run_client_test, machines)
diff --git a/server/site_tests/sequences/control.power_qual_mandatory b/server/site_tests/sequences/control.power_qual_mandatory
new file mode 100644
index 0000000..7ef9577
--- /dev/null
+++ b/server/site_tests/sequences/control.power_qual_mandatory
@@ -0,0 +1,76 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "puthik"
+NAME = "PowerQual.mandatory"
+TIME = "LENGTHY"
+TEST_CATEGORY = "Stress"
+TEST_CLASS = "suite"
+TEST_TYPE = "server"
+EXTENDED_TIMEOUT = 58500  # 16 Hours + 900 second guard.
+PY_VERSION = 3
+
+DOC = """
+Sequence for qualification for power, mandatory tests only.
+
+- power_LoadTest (from full battery to 5%)
+- power_BatteryCharge (from 5% to full battery)
+- power_VideoPlayback (h264 1080p & vp9 1080p 1 hour each)
+- power_VideoCall (2 hours)
+"""
+
+import datetime
+from autotest_lib.client.common_lib import utils
+
+HOURS=60*60
+
+CLIENT_TESTS = [
+    ('power_BatteryCharge', {
+        'percent_target_charge' : 100, 'max_run_time': 5 * HOURS}),
+    ('power_WaitForCoolDown', {}),
+    ('power_LoadTest', {
+        'tag' : 'powerqual', 'force_discharge' : True, 'loop_time' : HOURS,
+        'loop_count' : 24, 'test_low_batt_p' : 5}),
+
+    ('power_BatteryCharge', {
+        'percent_target_charge' : 100, 'max_run_time': 5 * HOURS,
+        'tag' : 'powerqual'}),
+    ('power_WaitForCoolDown', {}),
+    ('power_VideoPlayback', {
+        'tag' : 'powerqual', 'force_discharge' : True, 'secs_per_video' : 3600,
+        'videos' : [('h264_1080_30fps', ''), ('vp9_1080_30fps', '')]}),
+
+    ('power_BatteryCharge', {
+        'percent_target_charge' : 100, 'max_run_time': 5 * HOURS,
+        'tag' : 'powerqual'}),
+    ('power_WaitForCoolDown', {}),
+    ('power_VideoCall', {
+        'tag' : 'powerqual', 'force_discharge' : True,
+        'min_run_time_percent': 75}),
+
+    ('power_WaitForCoolDown', {}),
+    ('power_UiResume', {
+        'tag' : 'powerqual'}),
+]
+
+# Workaround to make it compatible with moblab autotest UI.
+global args_dict
+try:
+    args_dict
+except NameError:
+    args_dict = utils.args_to_dict(args)
+
+# Use time as pdash_note if not supplied to track all tests in same suite.
+pdash_note = args_dict.get('pdash_note',
+                           NAME + '_' + datetime.datetime.utcnow().isoformat())
+
+def run_client_test(machine):
+    client = hosts.create_host(machine)
+    client_at = autotest.Autotest(client)
+
+    for test, argv in CLIENT_TESTS:
+        argv['pdash_note'] = pdash_note
+        client_at.run_test(test, timeout=EXTENDED_TIMEOUT, **argv)
+
+job.parallel_on_machines(run_client_test, machines)
diff --git a/server/site_tests/sequences/control.power_weekly b/server/site_tests/sequences/control.power_weekly
index 7bb817b..2d7c905 100644
--- a/server/site_tests/sequences/control.power_weekly
+++ b/server/site_tests/sequences/control.power_weekly
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "Power Requirements tests"
 ATTRIBUTES = "suite:power_requirements"
 TIME = "LONG"
@@ -10,6 +10,7 @@
 TEST_CLASS = "suite"
 TEST_TYPE = "server"
 DEPENDENCIES = "rpm"
+PY_VERSION = 3
 
 DOC = """
 This test suite runs automated power tests that should all pass. These tests
diff --git a/server/site_tests/sequences/control.thermal_qual_fast b/server/site_tests/sequences/control.thermal_qual_fast
index bd3f635..22b0b8d 100644
--- a/server/site_tests/sequences/control.thermal_qual_fast
+++ b/server/site_tests/sequences/control.thermal_qual_fast
@@ -9,6 +9,8 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "suite"
 TEST_TYPE = "server"
+PY_VERSION = 3
+
 DOC = """
 Make sure that ThermalQual.full works.
 """
diff --git a/server/site_tests/sequences/control.thermal_qual_full b/server/site_tests/sequences/control.thermal_qual_full
index 1d1510e..5c64dd9 100644
--- a/server/site_tests/sequences/control.thermal_qual_full
+++ b/server/site_tests/sequences/control.thermal_qual_full
@@ -9,6 +9,9 @@
 TEST_CATEGORY = "Stress"
 TEST_CLASS = "suite"
 TEST_TYPE = "server"
+EXTENDED_TIMEOUT = 18900  # 5 Hours + 900 second guard.
+PY_VERSION = 3
+
 DOC = """
 Sequence for qualification for thermal.
 """
@@ -69,6 +72,6 @@
 
     for test, argv in CLIENT_TESTS:
         argv['pdash_note'] = pdash_note
-        client_at.run_test(test, **argv)
+        client_at.run_test(test, timeout=EXTENDED_TIMEOUT, **argv)
 
 job.parallel_on_machines(run_client_test, machines)
diff --git a/server/site_tests/sequences/control.thermal_qual_lab b/server/site_tests/sequences/control.thermal_qual_lab
new file mode 100644
index 0000000..b6d915b
--- /dev/null
+++ b/server/site_tests/sequences/control.thermal_qual_lab
@@ -0,0 +1,75 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "puthik"
+NAME = "ThermalQual.lab"
+ATTRIBUTES = "suite:crosbolt_perf_perbuild"
+TIME = "LENGTHY"
+TEST_CATEGORY = "Stress"
+TEST_CLASS = "suite"
+TEST_TYPE = "server"
+EXTENDED_TIMEOUT = 3600 # 1 hour
+PY_VERSION = 3
+
+DOC = """
+Shorter thermal qual sequence for lab regression test. This should be able to
+detect if there is a thermal issue most of the time. And full thermal qual can
+be used to pin point which workload that caused the issue.
+
+* 30 minutes subtest instead of 2 to 2.5 hours.
+* Don't wait for cooldown between each subtest to make total heavy load time
+  to be about 2 hours.
+* power_BatteryCharge is not expected to run in normal lab environment.
+  Lab DUTs should have near-full battery charge.
+
+"""
+
+import datetime
+from autotest_lib.client.common_lib import utils
+
+MINUTES=60
+
+# Need separate list for client and server test due to how these test work.
+CLIENT_TESTS = [
+    ('power_WaitForCoolDown', {}),
+    ('power_Speedometer2', {'tag' : 'thermal_qual_lab_before'}),
+
+    ('power_BatteryCharge', {
+        'percent_target_charge' : 30, 'max_run_time': 30 * MINUTES}),
+    ('power_WaitForCoolDown', {}),
+
+    ('power_ThermalLoad', {
+        'tag' : 'thermal_qual_lab_discharge', 'force_discharge' : True,
+        'duration': 30 * MINUTES}),
+    ('power_VideoCall', {
+        'tag' : 'thermal_qual_lab_discharge', 'force_discharge' : True,
+        'duration': 30 * MINUTES}),
+    ('power_ThermalLoad', {
+        'tag' : 'thermal_qual_lab_charge', 'duration': 30 * MINUTES}),
+    ('power_VideoCall', {
+        'tag' : 'thermal_qual_lab_charge', 'duration': 30 * MINUTES}),
+
+    ('power_Speedometer2', {'tag' : 'thermal_qual_lab_after'})
+]
+
+# Workaround to make it compatible with moblab autotest UI.
+global args_dict
+try:
+    args_dict
+except NameError:
+    args_dict = utils.args_to_dict(args)
+
+# Use time as pdash_note if not supplied to track all tests in same suite.
+pdash_note = args_dict.get('pdash_note',
+                           NAME + '_' + datetime.datetime.utcnow().isoformat())
+
+def run_client_test(machine):
+    client = hosts.create_host(machine)
+    client_at = autotest.Autotest(client)
+
+    for test, argv in CLIENT_TESTS:
+        argv['pdash_note'] = pdash_note
+        client_at.run_test(test, timeout=EXTENDED_TIMEOUT, **argv)
+
+job.parallel_on_machines(run_client_test, machines)
diff --git a/server/site_tests/servo_ConsoleStress/control b/server/site_tests/servo_ConsoleStress/control
index 51ddff4..af8f1ee 100644
--- a/server/site_tests/servo_ConsoleStress/control
+++ b/server/site_tests/servo_ConsoleStress/control
@@ -10,6 +10,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """Run the control a bunch. Make sure the output doesn't change
 
diff --git a/server/site_tests/servo_ConsoleStress/control.cr50 b/server/site_tests/servo_ConsoleStress/control.cr50
index 0331645..0a2841e 100644
--- a/server/site_tests/servo_ConsoleStress/control.cr50
+++ b/server/site_tests/servo_ConsoleStress/control.cr50
@@ -10,6 +10,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """Run the control a bunch. Make sure the output doesn't change
 
diff --git a/server/site_tests/servo_ConsoleStress/control.ec b/server/site_tests/servo_ConsoleStress/control.ec
index c1db60b..589482b 100644
--- a/server/site_tests/servo_ConsoleStress/control.ec
+++ b/server/site_tests/servo_ConsoleStress/control.ec
@@ -10,6 +10,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """Run the control a bunch. Make sure the output doesn't change
 
diff --git a/server/site_tests/servo_ConsoleStress/control.servo_micro b/server/site_tests/servo_ConsoleStress/control.servo_micro
index 0847efb..d60e74a 100644
--- a/server/site_tests/servo_ConsoleStress/control.servo_micro
+++ b/server/site_tests/servo_ConsoleStress/control.servo_micro
@@ -10,6 +10,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """Run the control a bunch. Make sure the output doesn't change
 
diff --git a/server/site_tests/servo_ConsoleStress/control.servo_v4 b/server/site_tests/servo_ConsoleStress/control.servo_v4
index cb6e5ff..0cdee63 100644
--- a/server/site_tests/servo_ConsoleStress/control.servo_v4
+++ b/server/site_tests/servo_ConsoleStress/control.servo_v4
@@ -10,6 +10,7 @@
 TIME = "SHORT"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """Run the control a bunch. Make sure the output doesn't change
 
@@ -28,7 +29,7 @@
     iterations = int(args_dict.get("iterations", 1))
     attempts = int(args_dict.get("attempts", 5000))
     cmd_type = args_dict.get("cmd_type", "servo")
-    cmd = args_dict.get("cmd", "servo_v4_version")
+    cmd = args_dict.get("cmd", "root.servo_fw_version")
 
     job.run_test("servo_ConsoleStress", host=host, cmdline_args=args,
                  full_args=args_dict, iterations=iterations,
diff --git a/server/site_tests/servo_ConsoleStress/servo_ConsoleStress.py b/server/site_tests/servo_ConsoleStress/servo_ConsoleStress.py
index 732d5bc..6895d71 100644
--- a/server/site_tests/servo_ConsoleStress/servo_ConsoleStress.py
+++ b/server/site_tests/servo_ConsoleStress/servo_ConsoleStress.py
@@ -1,8 +1,9 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-from __future__ import print_function
+
 
 import difflib
 import logging
diff --git a/server/site_tests/servo_LabControlVerification/control b/server/site_tests/servo_LabControlVerification/control
index 6c903e4..e097049 100644
--- a/server/site_tests/servo_LabControlVerification/control
+++ b/server/site_tests/servo_LabControlVerification/control
@@ -6,12 +6,13 @@
 
 AUTHOR = "coconutruben"
 NAME = "servo_LabControlVerification"
-PURPOSE = "Validate basic, critial servo controls for the lab work."
+PURPOSE = "Validate basic, critical servo controls for the lab work."
 TIME = "LONG"
 CRITERIA="Run on DUT with a servo attached and a functional DUT on the servo."
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "servo"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 
diff --git a/server/site_tests/servo_LabControlVerification/servo_LabControlVerification.py b/server/site_tests/servo_LabControlVerification/servo_LabControlVerification.py
index a14a303..22b62fc 100644
--- a/server/site_tests/servo_LabControlVerification/servo_LabControlVerification.py
+++ b/server/site_tests/servo_LabControlVerification/servo_LabControlVerification.py
@@ -1,8 +1,9 @@
+# Lint as: python2, python3
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-"""Test to validate basic, critial servo controls for the lab work."""
+"""Test to validate basic, critical servo controls for the lab work."""
 
 import logging
 import os
@@ -55,7 +56,7 @@
                 # This a set servod control.
                 ctrl['ctrl_value'] = ctrl_elems[1]
             elif len(ctrl_elems) > 2:
-                logging.warn('The line containing %r in the control sequence '
+                logging.warning('The line containing %r in the control sequence '
                              'file has an unkown format. Ignoring for now.',
                              ctrl)
             ctrls.append(ctrl)
@@ -84,7 +85,7 @@
           error.AutoservRunError if trying to start servod and it fails
         """
         # TODO(coconutruben): board is set to nami for now as that will allow
-        # servod to come up and the nami overlay does not have any crazy changes
+        # servod to come up and the nami overlay does not have any complex changes
         # from normal boards. When the new servod is rolled out and it can infer
         # board names itself, remove the board attribute here.
         self.servo_port = port
diff --git a/server/site_tests/servo_LabstationVerification/control b/server/site_tests/servo_LabstationVerification/control
index dc2f6be..3659c0a 100644
--- a/server/site_tests/servo_LabstationVerification/control
+++ b/server/site_tests/servo_LabstationVerification/control
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "servo_LabstationVerification"
 PURPOSE = "Ensure labstation images can provide basic lab functionality."
 ATTRIBUTES = "suite:labstation_verification"
@@ -12,6 +12,7 @@
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test is a wrapper around servo_Verification
diff --git a/server/site_tests/servo_LabstationVerification/control.local b/server/site_tests/servo_LabstationVerification/control.local
index 05036ce..5969df8 100644
--- a/server/site_tests/servo_LabstationVerification/control.local
+++ b/server/site_tests/servo_LabstationVerification/control.local
@@ -4,13 +4,14 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "servo_LabstationVerification.local"
 PURPOSE = "Ensure labstation images can provide basic lab functionality."
 TIME = "MEDIUM"
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 The local version of servo_LabstationVerification. Local in this context means
diff --git a/server/site_tests/servo_LabstationVerification/servo_LabstationVerification.py b/server/site_tests/servo_LabstationVerification/servo_LabstationVerification.py
index 691308e..9d13b50 100644
--- a/server/site_tests/servo_LabstationVerification/servo_LabstationVerification.py
+++ b/server/site_tests/servo_LabstationVerification/servo_LabstationVerification.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -259,7 +260,7 @@
             # ping again.
             logging.info('Failed to ping DUT on ipv6: %s. Cold resetting',
                          dut_ipv6)
-            servo_proxy._power_state.reset()
+            servo_proxy.get_power_state_controller().reset()
             time.sleep(self.RESET_TIMEOUT_S)
         dut_ipv4 = None
         try:
@@ -312,7 +313,14 @@
     def _byteify(self, data, ignore_dicts=False):
         """Helper method to convert unicode to string.
         """
-        if isinstance(data, unicode):
+        # In python2 we need to convert unicode into str, however in python3
+        # unicode renamed to str so json.load will load data into str by
+        # default so run encode with a str in python3 will actually convert
+        # the data into bytes which we don't want. To make the test compatible
+        # to both python2 and python3, we check data type != dict, list, str
+        # for now. # TODO(xianuowang@): remove this method once we fully
+        # migrated to python3.
+        if type(data) not in {dict, list, str}:
             return data.encode('utf-8')
         if isinstance(data, list):
             return [self._byteify(item, ignore_dicts=True) for item in data]
@@ -320,7 +328,7 @@
             return {
                 self._byteify(key, ignore_dicts=True):
                     self._byteify(value, ignore_dicts=True)
-                for key, value in data.iteritems()
+                for key, value in list(data.items())
             }
         return data
 
@@ -462,7 +470,7 @@
         if dut_host is None:
             raise error.TestFail('dut machine %r not known to suite. Known '
                                  'machines: %r', machine,
-                                 ', '.join(self.machine_dict.keys()))
+                                 ', '.join(list(self.machine_dict.keys())))
         logging.info('About to run on machine %s', machine)
         if not self.job.run_test('servo_Verification', host=dut_host,
                                  local=self.local):
@@ -480,5 +488,5 @@
         """Clean up by calling close for dut host, which will also take care
         of servo cleanup.
         """
-        for _, dut in self.machine_dict.items():
+        for _, dut in list(self.machine_dict.items()):
             dut.close()
diff --git a/server/site_tests/servo_LogGrab/control b/server/site_tests/servo_LogGrab/control
index a7246bb..d7bb980 100644
--- a/server/site_tests/servo_LogGrab/control
+++ b/server/site_tests/servo_LogGrab/control
@@ -13,6 +13,7 @@
 TEST_CATEGORY = "Functional"
 TEST_CLASS = "servo"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 
diff --git a/server/site_tests/servo_LogGrab/servo_LogGrab.py b/server/site_tests/servo_LogGrab/servo_LogGrab.py
index d169798..397424d 100644
--- a/server/site_tests/servo_LogGrab/servo_LogGrab.py
+++ b/server/site_tests/servo_LogGrab/servo_LogGrab.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -102,8 +103,8 @@
         """
         artifacts = [re.compile(r'- \(get\) %s' % ctrl) for ctrl in ctrls]
         # We also need to ensure that the initial logs don't get lost.
-        init_artifact = re.compile('Listening on (\d{1,3}\.){3}\d{1,3} '
-                                   'port \d{1,5}')
+        init_artifact = re.compile(r'Listening on (\d{1,3}\.){3}\d{1,3} '
+                                   r'port \d{1,5}')
         artifacts.append(init_artifact)
         fname = os.path.join(outdir, 'log.DEBUG.txt')
         with open(fname, 'r') as f:
diff --git a/server/site_tests/servo_USBInstall/control b/server/site_tests/servo_USBInstall/control
index 0c818ed..695ffa6 100644
--- a/server/site_tests/servo_USBInstall/control
+++ b/server/site_tests/servo_USBInstall/control
@@ -4,13 +4,14 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "servo_USBInstall"
 PURPOSE = "Force reinstall cros to a dut from the servo."
 TIME = "MEDIUM"
 TEST_CATEGORY = "Install"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 This test will do the same thing as ServoInstallRepair does, but with out
diff --git a/server/site_tests/servo_USBInstall/servo_USBInstall.py b/server/site_tests/servo_USBInstall/servo_USBInstall.py
index ebd1ac1..9a60c79 100644
--- a/server/site_tests/servo_USBInstall/servo_USBInstall.py
+++ b/server/site_tests/servo_USBInstall/servo_USBInstall.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/servo_USBMuxVerification/control b/server/site_tests/servo_USBMuxVerification/control
index 8f3d77d..286d473 100644
--- a/server/site_tests/servo_USBMuxVerification/control
+++ b/server/site_tests/servo_USBMuxVerification/control
@@ -2,6 +2,11 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+# TEST SOON TO BE DISABLED UNTIL MIGRATED TO PYTHON 3.
+# For instructions on how to: go/tauto-py3-migration
+# To re-enable migrate to Python 3.
+# If the test is not migrated by 1/14/22 it will be deleted.
+
 from autotest_lib.server import utils
 
 AUTHOR = "coconutruben"
@@ -14,6 +19,7 @@
 TEST_CLASS = "servo"
 TEST_TYPE = "server"
 DEPENDENCIES = "servo_state:WORKING"
+PY_VERSION = 3
 
 DOC = """
 
diff --git a/server/site_tests/servo_USBMuxVerification/servo_USBMuxVerification.py b/server/site_tests/servo_USBMuxVerification/servo_USBMuxVerification.py
index 19a681c..4dceba9 100644
--- a/server/site_tests/servo_USBMuxVerification/servo_USBMuxVerification.py
+++ b/server/site_tests/servo_USBMuxVerification/servo_USBMuxVerification.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/server/site_tests/servo_Verification/control b/server/site_tests/servo_Verification/control
index 4ef1fa8..3c85a2c 100644
--- a/server/site_tests/servo_Verification/control
+++ b/server/site_tests/servo_Verification/control
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "servo_Verification"
 PURPOSE = "Ensure a dut/servo pair provide required lab functionality."
 ATTRIBUTES = "suite:servo_verification"
@@ -12,6 +12,7 @@
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This test is a wrapper around a few functionality tests surrounding servo,
diff --git a/server/site_tests/servo_Verification/control.local b/server/site_tests/servo_Verification/control.local
index 213b81b..efde06d 100644
--- a/server/site_tests/servo_Verification/control.local
+++ b/server/site_tests/servo_Verification/control.local
@@ -4,7 +4,7 @@
 
 from autotest_lib.server import utils
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "servo_Verification.local"
 PURPOSE = "Ensure a dut/servo pair provide required lab functionality."
 ATTRIBUTES = "suite:servo_verification"
@@ -12,6 +12,7 @@
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "platform"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 The local version of servo_Verification. Local in this context means that
diff --git a/server/site_tests/servo_Verification/servo_Verification.py b/server/site_tests/servo_Verification/servo_Verification.py
index 98c31c3..dd91107 100644
--- a/server/site_tests/servo_Verification/servo_Verification.py
+++ b/server/site_tests/servo_Verification/servo_Verification.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -62,13 +63,13 @@
         # This assumes that the string is job.run_test(...) so the first ( is
         # at index 0.
         for index in range(1, len(run_test_str)):
-          if run_test_str[index] == '(': paran += 1
-          if run_test_str[index] == ')': paran -= 1
-          if paran == 0: break
+            if run_test_str[index] == '(': paran += 1
+            if run_test_str[index] == ')': paran -= 1
+            if paran == 0: break
         else:
-          # Failed to find balanced parentheses.
-          raise error.TestNAError('Unable to parse %s for %s.' % (anchor,
-                                                                  cname))
+            # Failed to find balanced parentheses.
+            raise error.TestNAError('Unable to parse %s for %s.' %
+                                    (anchor, cname))
         # Extract only the args
         run_test_str = run_test_str[1:index]
         raw_args = run_test_str.split(',')
@@ -129,7 +130,15 @@
             # need it can use it.
             _, image_url = host.stage_image_for_servo()
             host.servo.image_to_servo_usb(image_url)
-
+            # `image_to_servo_usb` turned DUT off while download image to usb
+            # drive, so we need to turn DUT back on as some tests assume DUT
+            # is sshable at begin.
+            host.servo.get_power_state_controller().power_on()
+            if not host.wait_up(timeout=host.BOOT_TIMEOUT):
+                logging.warning(
+                        '%s failed to boot in %s seconds, some tests'
+                        ' may fail due to not able to ssh to the DUT',
+                        host.hostname, host.BOOT_TIMEOUT)
 
     def run_once(self, host):
         """Run through the test sequence.
diff --git a/server/site_tests/stress_ClientTestReboot/control b/server/site_tests/stress_ClientTestReboot/control
deleted file mode 100644
index 255866a..0000000
--- a/server/site_tests/stress_ClientTestReboot/control
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.client.common_lib import utils
-
-
-AUTHOR = "kathrelkeld"
-NAME = "stress_ClientTestReboot"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "Stress"
-TEST_TYPE = "server"
-
-DOC = """
-This test takes the name of a client side test.  This test is run many times
-over, rebooting the DUT in between each time.
-
-Example usage:
-test_that stress_ClientTestReboot <ip> --board=<board>
-                                       --args="testname=NAME loops=N"
-"""
-
-args_dict = utils.args_to_dict(args)
-
-def run_reboot_stress(machine):
-    if 'testname' in args_dict and 'loops' in args_dict:
-        testname = args_dict.get('testname')
-        loops = int(args_dict.get('loops'))
-        job.run_test('stress_ClientTestReboot', client_ip=machine,
-                     testname=testname, loops=loops)
-    else:
-        logging.info('Please provide the name of a client autotest and the '
-                     'number of times the test should be run. '
-                     '"testname=<test> loops=<number>"')
-
-parallel_simple(run_reboot_stress, machines)
diff --git a/server/site_tests/stress_ClientTestReboot/control.audioCrasSanity b/server/site_tests/stress_ClientTestReboot/control.audioCrasSanity
deleted file mode 100644
index 289412c..0000000
--- a/server/site_tests/stress_ClientTestReboot/control.audioCrasSanity
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-AUTHOR = "kirtika"
-NAME = "stress_ClientTestReboot.audioCrasSanity"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "Stress"
-TEST_TYPE = "server"
-ATTRIBUTES = 'suite:reboot_stress'
-
-DOC = """
-Checks if audio subsystem passes sanity test across reboots.
-"""
-
-def run_reboot_stress(machine):
-    job.run_test('stress_ClientTestReboot', client_ip=machine,
-                 testname='audio_CrasSanity', loops=100)
-
-parallel_simple(run_reboot_stress, machines)
diff --git a/server/site_tests/stress_ClientTestReboot/control.bluetooth b/server/site_tests/stress_ClientTestReboot/control.bluetooth
deleted file mode 100644
index 49b1abc..0000000
--- a/server/site_tests/stress_ClientTestReboot/control.bluetooth
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-AUTHOR = "kathrelkeld"
-NAME = "stress_ClientTestReboot.bluetooth"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "Stress"
-TEST_TYPE = "server"
-ATTRIBUTES = 'suite:bluetooth_stress, suite:reboot_stress'
-
-DOC = """
-This test reboots the machine and checks whether the bluetooth adapter comes up.
-"""
-
-def run_reboot_stress(machine):
-    job.run_test('stress_ClientTestReboot', client_ip=machine,
-                 testname='bluetooth_AdapterHealth', loops=100)
-
-parallel_simple(run_reboot_stress, machines)
diff --git a/server/site_tests/stress_ClientTestReboot/control.graphics b/server/site_tests/stress_ClientTestReboot/control.graphics
deleted file mode 100644
index 3018d43..0000000
--- a/server/site_tests/stress_ClientTestReboot/control.graphics
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-AUTHOR = "kirtika"
-NAME = "stress_ClientTestReboot.graphics"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "Stress"
-TEST_TYPE = "server"
-
-DOC = """
-This test reboots the machine and checks whether GL/GLES works on each boot.
-"""
-
-def run_reboot_stress(machine):
-    job.run_test('stress_ClientTestReboot', client_ip=machine,
-                 testname='graphics_Sanity', loops=100)
-
-parallel_simple(run_reboot_stress, machines)
diff --git a/server/site_tests/stress_ClientTestReboot/control.touchUpdate b/server/site_tests/stress_ClientTestReboot/control.touchUpdate
deleted file mode 100644
index 0885c74..0000000
--- a/server/site_tests/stress_ClientTestReboot/control.touchUpdate
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-AUTHOR = "kathrelkeld"
-NAME = "stress_ClientTestReboot.touchUpdate"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "Stress"
-TEST_TYPE = "server"
-
-DOC = """
-This test reboots the machine and checks whether any touch update errors occur.
-"""
-
-def run_reboot_stress(machine):
-    job.run_test('stress_ClientTestReboot', client_ip=machine,
-                 testname='touch_UpdateErrors', loops=100)
-
-parallel_simple(run_reboot_stress, machines)
diff --git a/server/site_tests/stress_ClientTestReboot/control.wifi b/server/site_tests/stress_ClientTestReboot/control.wifi
deleted file mode 100644
index 1b2c774..0000000
--- a/server/site_tests/stress_ClientTestReboot/control.wifi
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-AUTHOR = "kirtika"
-NAME = "stress_ClientTestReboot.wifi"
-TIME = "MEDIUM"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "Stress"
-TEST_TYPE = "server"
-ATTRIBUTES = 'suite:network_nightly'
-
-DOC = """
-This test reboots the machine and checks whether the wireless interface comes up.
-"""
-
-def run_reboot_stress(machine):
-    job.run_test('stress_ClientTestReboot', client_ip=machine,
-                 testname='network_WlanPresent', loops=20)
-
-parallel_simple(run_reboot_stress, machines)
diff --git a/server/site_tests/stress_ClientTestReboot/stress_ClientTestReboot.py b/server/site_tests/stress_ClientTestReboot/stress_ClientTestReboot.py
deleted file mode 100644
index d2d580f..0000000
--- a/server/site_tests/stress_ClientTestReboot/stress_ClientTestReboot.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.server import autotest
-from autotest_lib.server import hosts
-from autotest_lib.server import test
-
-
-class stress_ClientTestReboot(test.test):
-    """Reboot a device."""
-    version = 1
-
-    def run_once(self, client_ip, testname, loops):
-        """Test body."""
-
-        host = hosts.create_host(client_ip)
-        autotest_client = autotest.Autotest(host)
-        for i in xrange(loops):
-            logging.debug('Starting loop #%d', i)
-            autotest_client.run_test(testname, check_client_result=True)
-            host.reboot()
diff --git a/server/site_tests/stress_EnrollmentRetainment/control.EnrollmentRetainment b/server/site_tests/stress_EnrollmentRetainment/control.EnrollmentRetainment
deleted file mode 100644
index bc360af..0000000
--- a/server/site_tests/stress_EnrollmentRetainment/control.EnrollmentRetainment
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'rzakarian'
-NAME = 'stress_EnrollmentRetainment.EnrollmentRetainment'
-TIME = 'LONG'
-TEST_CATEGORY = 'General'
-TEST_CLASS = 'enterprise'
-TEST_TYPE = 'server'
-ATTRIBUTES = 'suite:enroll_retainment'
-
-DOC = """
-Sets up and runs the client test for the policy_EnrollmentRetainment.
-
-"""
-
-client_test = 'policy_EnrollmentRetainment'
-
-def run(machine):
-    host = hosts.create_host(machine)
-    job.run_test('stress_EnrollmentRetainment', host=host,
-                  client_test=client_test, loops=100)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/stress_EnrollmentRetainment/stress_EnrollmentRetainment.py b/server/site_tests/stress_EnrollmentRetainment/stress_EnrollmentRetainment.py
deleted file mode 100644
index 43a127c..0000000
--- a/server/site_tests/stress_EnrollmentRetainment/stress_EnrollmentRetainment.py
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.server import autotest
-from autotest_lib.server import test
-
-
-class stress_EnrollmentRetainment(test.test):
-    """Used to kick off policy_EnrollmentRetainment test."""
-    version = 1
-
-
-    def run_once(self, client_test, host, loops):
-        """
-        Starting point of this test.
-
-        @param client_test: the name of the Client test to run.
-        @param host: the host machine running the test.
-        @param loops: how many times to loop the test.
-
-        """
-        self.autotest_client = autotest.Autotest(host)
-
-        for i in xrange(loops):
-            logging.info('Starting loop #%d', i)
-            self.autotest_client.run_test(
-                client_test, check_client_result=True)
-            host.reboot()
diff --git a/server/site_tests/stub_FailServer/control b/server/site_tests/stub_FailServer/control
new file mode 100644
index 0000000..8ea8335
--- /dev/null
+++ b/server/site_tests/stub_FailServer/control
@@ -0,0 +1,17 @@
+NAME = 'stub_FailServer'
+AUTHOR = 'scottz'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'General'
+TEST_TYPE = 'server'
+PY_VERSION = 3
+
+DOC = """
+Test that always fails.
+"""
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('stub_FailServer', disable_sysinfo=False)
+
+job.parallel_simple(run, machines)
diff --git a/server/site_tests/stub_FailServer/stub_FailServer.py b/server/site_tests/stub_FailServer/stub_FailServer.py
new file mode 100644
index 0000000..c4f4565
--- /dev/null
+++ b/server/site_tests/stub_FailServer/stub_FailServer.py
@@ -0,0 +1,15 @@
+# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import test
+from autotest_lib.client.common_lib import error
+
+
+class stub_FailServer(test.test):
+    """A test that always fails."""
+    version = 1
+
+    def run_once(self):
+        """Run the test that always fails, once"""
+        raise error.TestFail('Test always fails intentionally.')
diff --git a/server/site_tests/stub_PassServer/control b/server/site_tests/stub_PassServer/control
new file mode 100644
index 0000000..255f480
--- /dev/null
+++ b/server/site_tests/stub_PassServer/control
@@ -0,0 +1,25 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "stub_PassServer"
+PURPOSE = "Demonstrate success methods of autotests."
+CRITERIA = "This test will always succeed."
+ATTRIBUTES = "suite:dummy_server, suite:pvs-quick-check"
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "stub"
+TEST_TYPE = "server"
+MAX_RESULT_SIZE_KB = 6000
+PY_VERSION = 3
+
+DOC = """
+This is a helper test that will succeed.
+"""
+
+def run(machine):
+    job.run_test('stub_PassServer',
+                 host=hosts.create_host(machine))
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/stub_PassServer/control.calibration b/server/site_tests/stub_PassServer/control.calibration
new file mode 100644
index 0000000..fc6f9ec
--- /dev/null
+++ b/server/site_tests/stub_PassServer/control.calibration
@@ -0,0 +1,27 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "fdeng, chromeos-lab-infrastructure"
+NAME = "stub_PassServer.calibration"
+PURPOSE = "An empty control file that does nothing."
+CRITERIA = "This test will always succeed."
+ATTRIBUTES = "suite:calibration, suite:sanity"
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "stub"
+TEST_TYPE = "server"
+PY_VERSION = 3
+
+DOC = """
+It belongs to the calibration suite, which verifies provisioning.
+The only purpose of this test is to trigger a provision task.
+We explicitly make it not call job.runtest to avoid overhead of
+installing/uninstalling autotest packages.
+"""
+
+# We need to record the state, so that tko paser can still parse it
+# as a test.
+job.record('START', None, NAME)
+job.record('GOOD', None, None, 'completed successfully')
+job.record('END GOOD', None, NAME)
diff --git a/server/site_tests/stub_PassServer/control.nossp b/server/site_tests/stub_PassServer/control.nossp
new file mode 100644
index 0000000..65ac682
--- /dev/null
+++ b/server/site_tests/stub_PassServer/control.nossp
@@ -0,0 +1,32 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "stub_PassServer_nossp"
+PURPOSE = "Demonstrate success methods of autotests."
+CRITERIA = "This test will always succeed."
+ATTRIBUTES = (
+        "suite:dummy_server,"
+        " suite:dummy_server_nossp,"
+        " suite:skylab_staging_test,"
+        " suite:dev_drone_image_test"
+)
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "stub"
+TEST_TYPE = "server"
+# Force not to use server side package for this test.
+REQUIRE_SSP = False
+PY_VERSION = 3
+
+DOC = """
+This is a helper test that will succeed and force not to use server side
+packaging.
+"""
+
+def run(machine):
+    job.run_test('stub_PassServer', host=hosts.create_host(machine),
+                 expect_ssp=False)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/stub_PassServer/control.ssp b/server/site_tests/stub_PassServer/control.ssp
new file mode 100644
index 0000000..2a8c2c3
--- /dev/null
+++ b/server/site_tests/stub_PassServer/control.ssp
@@ -0,0 +1,24 @@
+# Copyright 2016 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "stub_PassServer.ssp"
+PURPOSE = "Demonstrate success methods of autotests."
+CRITERIA = "This test will always succeed when running in a container."
+ATTRIBUTES = "suite:dummy_server, suite:push_to_prod, suite:skylab_staging_test, suite:dev_drone_image_test, suite:infra_qual"
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "stub"
+TEST_TYPE = "server"
+PY_VERSION = 3
+
+DOC = """
+This is a helper test that will succeed and force to use server-side packaging.
+"""
+
+def run(machine):
+    job.run_test('stub_PassServer', host=hosts.create_host(machine),
+                 expect_ssp=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/stub_PassServer/stub_PassServer.py b/server/site_tests/stub_PassServer/stub_PassServer.py
new file mode 100644
index 0000000..04cd5e8
--- /dev/null
+++ b/server/site_tests/stub_PassServer/stub_PassServer.py
@@ -0,0 +1,26 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib import utils
+from autotest_lib.server import test
+
+
+class stub_PassServer(test.test):
+    """Tests that server tests can pass."""
+    version = 1
+
+    def run_once(self, expect_ssp=None):
+        """There is no body for this test.
+
+        @param expect_ssp: If True, ensure test is running inside a container.
+                If False, ensure test is not running inside a container.
+                If None (default), do nothing.
+        """
+        if expect_ssp is not None:
+            if expect_ssp and not utils.is_in_container():
+                raise error.TestFail(
+                        'The test is not running inside container')
+            if not expect_ssp and utils.is_in_container():
+                raise error.TestFail('Test test is running inside container')
diff --git a/server/site_tests/stub_ServerToClientPass/control b/server/site_tests/stub_ServerToClientPass/control
new file mode 100644
index 0000000..ef339e7
--- /dev/null
+++ b/server/site_tests/stub_ServerToClientPass/control
@@ -0,0 +1,23 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'dbeckett'
+NAME = 'stub_ServerToClientPass'
+TIME = 'SHORT'
+TEST_CATEGORY = 'General'
+TEST_CLASS = 'stub'
+TEST_TYPE = 'server'
+ATTRIBUTES = 'suite:infra_qual, suite:py3-beta, suite:cft-beta'
+PY_VERSION = 3
+
+DOC = """
+Stub to verify a client test can be kicked off and passes from a server test.
+"""
+
+
+def run(machine):
+    host = hosts.create_host(machine)
+    job.run_test('stub_ServerToClientPass', host=host)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/stub_ServerToClientPass/stub_ServerToClientPass.py b/server/site_tests/stub_ServerToClientPass/stub_ServerToClientPass.py
new file mode 100644
index 0000000..1f06073
--- /dev/null
+++ b/server/site_tests/stub_ServerToClientPass/stub_ServerToClientPass.py
@@ -0,0 +1,26 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import autotest
+from autotest_lib.server import test
+
+
+class stub_ServerToClientPass(test.test):
+    """
+    stub_ServerToClientPass test used to kick off stub_Pass client test.
+
+    """
+    version = 1
+
+    def run_once(self, host):
+        """
+        Starting point of this test.
+
+        Note: base class sets host as self._host.
+
+        """
+        self.host = host
+
+        self.autotest_client = autotest.Autotest(self.host)
+        self.autotest_client.run_test('stub_Pass', check_client_result=True)
diff --git a/server/site_tests/system_ColdBoot/control b/server/site_tests/system_ColdBoot/control
deleted file mode 100644
index 7cd2a74..0000000
--- a/server/site_tests/system_ColdBoot/control
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = "cernekee"
-NAME = "system_ColdBoot"
-PURPOSE = "Shut down DUT, then initiate a cold boot."
-ATTRIBUTES = "suite:experimental"
-TIME = "SHORT"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "power"
-TEST_TYPE = "server"
-DEPENDENCIES = "servo_state:WORKING"
-DOC = """
-Verifies that cold boots work correctly.
-
-This executes the following sequence:
-  - remote shutdown of DUT via Linux commands (ssh)
-  - wait for DUT to stop responding to pings
-  - simulate a power button press via servo
-  - wait for DUT to start responding to pings
-  - attempt to log into DUT
-"""
-
-args_dict = utils.args_to_dict(args)
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, servo_args=servo_args)
-    job.run_test("system_ColdBoot", host=host, disable_sysinfo=True)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/system_ColdBoot/system_ColdBoot.py b/server/site_tests/system_ColdBoot/system_ColdBoot.py
deleted file mode 100755
index f542ab1..0000000
--- a/server/site_tests/system_ColdBoot/system_ColdBoot.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import time
-
-from autotest_lib.server import autotest, test
-
-class system_ColdBoot(test.test):
-    """
-    Shut down the device gracefully via Linux shell commands, then simulate
-    a power button press and verify that it comes back up correctly.
-    """
-    version = 1
-
-    # Allowed timeout for graceful shutdown.
-    TIMEOUT_POWEROFF_TRANSITION = 15
-    # Time to sleep to ensure full power off, after OS quits replying to pings.
-    WAIT_TIME_FULL_POWEROFF = 5
-
-    def run_once(self, host):
-        boot_id = host.get_boot_id()
-
-        host.run("poweroff")
-        host.test_wait_for_shutdown(self.TIMEOUT_POWEROFF_TRANSITION)
-        time.sleep(self.WAIT_TIME_FULL_POWEROFF)
-
-        host.servo.power_normal_press()
-        host.test_wait_for_boot(boot_id)
-
-        autotest.Autotest(host).run_test("desktopui_SimpleLogin",
-                                         exit_without_logout=True)
diff --git a/server/site_tests/tast/control.appcompat b/server/site_tests/tast/control.appcompat
deleted file mode 100644
index cab0167..0000000
--- a/server/site_tests/tast/control.appcompat
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'Chromium OS team'
-NAME = 'tast.appcompat'
-TIME = 'MEDIUM'
-TEST_TYPE = 'Server'
-ATTRIBUTES = 'suite:appcompat'
-MAX_RESULT_SIZE_KB = 1024 * 1024
-
-# tast.py uses binaries installed from autotest_server_package.tar.bz2.
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast appcompat test suite.
-
-Tast is an integration-testing framework analagous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-This test runs Tast-based app compatibility tests against a remote DUT.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-
-def run(machine):
-    job.run_test('tast',
-                 host=hosts.create_host(machine),
-                 test_exprs=['("group:appcompat")'],
-                 ignore_test_failures=True, max_run_sec=10800,
-                 command_args=args)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.appcompat-shard-0 b/server/site_tests/tast/control.appcompat-shard-0
new file mode 100644
index 0000000..d90bce6
--- /dev/null
+++ b/server/site_tests/tast/control.appcompat-shard-0
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.appcompat-shard-0'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:appcompat'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast appcompat test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based app compatibility tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:appcompat")'],
+                 ignore_test_failures=True, max_run_sec=16200,
+                 totalshards=6,
+                 shardindex=0,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.appcompat-shard-1 b/server/site_tests/tast/control.appcompat-shard-1
new file mode 100644
index 0000000..f31ceca
--- /dev/null
+++ b/server/site_tests/tast/control.appcompat-shard-1
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.appcompat-shard-1'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:appcompat'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast appcompat test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based app compatibility tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:appcompat")'],
+                 ignore_test_failures=True, max_run_sec=16200,
+                 totalshards=6,
+                 shardindex=1,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.appcompat-shard-2 b/server/site_tests/tast/control.appcompat-shard-2
new file mode 100644
index 0000000..672c523
--- /dev/null
+++ b/server/site_tests/tast/control.appcompat-shard-2
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.appcompat-shard-2'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:appcompat'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast appcompat test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based app compatibility tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:appcompat")'],
+                 ignore_test_failures=True, max_run_sec=16200,
+                 totalshards=6,
+                 shardindex=2,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.appcompat-shard-3 b/server/site_tests/tast/control.appcompat-shard-3
new file mode 100644
index 0000000..08a1aa0
--- /dev/null
+++ b/server/site_tests/tast/control.appcompat-shard-3
@@ -0,0 +1,37 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.appcompat-shard-3'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:appcompat'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast appcompat test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based app compatibility tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:appcompat")'],
+                 ignore_test_failures=True, max_run_sec=16200,
+                 totalshards=6,
+                 shardindex=3,
+                 command_args=args)
+
+parallel_simple(run, machines)
\ No newline at end of file
diff --git a/server/site_tests/tast/control.appcompat-shard-4 b/server/site_tests/tast/control.appcompat-shard-4
new file mode 100644
index 0000000..d2e4217
--- /dev/null
+++ b/server/site_tests/tast/control.appcompat-shard-4
@@ -0,0 +1,37 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.appcompat-shard-4'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:appcompat'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast appcompat test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based app compatibility tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:appcompat")'],
+                 ignore_test_failures=True, max_run_sec=16200,
+                 totalshards=6,
+                 shardindex=4,
+                 command_args=args)
+
+parallel_simple(run, machines)
\ No newline at end of file
diff --git a/server/site_tests/tast/control.appcompat-shard-5 b/server/site_tests/tast/control.appcompat-shard-5
new file mode 100644
index 0000000..d9a083a
--- /dev/null
+++ b/server/site_tests/tast/control.appcompat-shard-5
@@ -0,0 +1,37 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.appcompat-shard-5'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:appcompat'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast appcompat test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based app compatibility tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:appcompat")'],
+                 ignore_test_failures=True, max_run_sec=16200,
+                 totalshards=6,
+                 shardindex=5,
+                 command_args=args)
+
+parallel_simple(run, machines)
\ No newline at end of file
diff --git a/server/site_tests/tast/control.appcompat_release-shard-0 b/server/site_tests/tast/control.appcompat_release-shard-0
new file mode 100644
index 0000000..c1f1718
--- /dev/null
+++ b/server/site_tests/tast/control.appcompat_release-shard-0
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.appcompat_release-shard-0'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:appcompat_release'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast appcompat release test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based app compatibility tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:appcompat" && appcompat_release)'],
+                 ignore_test_failures=True, max_run_sec=3600,
+                 totalshards=3,
+                 shardindex=0,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.appcompat_release-shard-1 b/server/site_tests/tast/control.appcompat_release-shard-1
new file mode 100644
index 0000000..63f27e6
--- /dev/null
+++ b/server/site_tests/tast/control.appcompat_release-shard-1
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.appcompat_release-shard-1'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:appcompat_release'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast appcompat release test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based app compatibility tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:appcompat" && appcompat_release)'],
+                 ignore_test_failures=True, max_run_sec=3600,
+                 totalshards=3,
+                 shardindex=1,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.appcompat_release-shard-2 b/server/site_tests/tast/control.appcompat_release-shard-2
new file mode 100644
index 0000000..a2043a8
--- /dev/null
+++ b/server/site_tests/tast/control.appcompat_release-shard-2
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.appcompat_release-shard-2'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:appcompat_release'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast appcompat release test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based app compatibility tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:appcompat" && appcompat_release)'],
+                 ignore_test_failures=True, max_run_sec=3600,
+                 totalshards=3,
+                 shardindex=2,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.appcompat_smoke-shard-0 b/server/site_tests/tast/control.appcompat_smoke-shard-0
new file mode 100644
index 0000000..d84ec29
--- /dev/null
+++ b/server/site_tests/tast/control.appcompat_smoke-shard-0
@@ -0,0 +1,37 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.appcompat_smoke-shard-0'
+TIME = 'SHORT'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:appcompat_smoke'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast appcompat smoke test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based app compatibility tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:appcompat" && appcompat_smoke)'],
+                 ignore_test_failures=True, max_run_sec=3600,
+                 totalshards=2,
+                 shardindex=0,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.appcompat_smoke-shard-1 b/server/site_tests/tast/control.appcompat_smoke-shard-1
new file mode 100644
index 0000000..bcd91d4
--- /dev/null
+++ b/server/site_tests/tast/control.appcompat_smoke-shard-1
@@ -0,0 +1,37 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.appcompat_smoke-shard-1'
+TIME = 'SHORT'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:appcompat_smoke'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast appcompat smoke test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based app compatibility tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:appcompat" && appcompat_smoke)'],
+                 ignore_test_failures=True, max_run_sec=3600,
+                 totalshards=2,
+                 shardindex=1,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.arc-data-collector b/server/site_tests/tast/control.arc-data-collector
index a3cd83b..eb17068 100644
--- a/server/site_tests/tast/control.arc-data-collector
+++ b/server/site_tests/tast/control.arc-data-collector
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:bvt-tast-android-pfq'
 MAX_RESULT_SIZE_KB = 50 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
@@ -29,11 +30,11 @@
 '''
 
 def run(machine):
-    # TODO(crbug.com/992303): Fix android condition.
     job.run_test('tast',
                  host=hosts.create_host(machine),
                  test_exprs=['("group:arc-data-collector")'],
                  ignore_test_failures=False, max_run_sec=1200,
-                 command_args=args)
+                 command_args=args,
+                 retries=2)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.arc-data-snapshot b/server/site_tests/tast/control.arc-data-snapshot
new file mode 100644
index 0000000..12dde68
--- /dev/null
+++ b/server/site_tests/tast/control.arc-data-snapshot
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.arc-data-snapshot'
+TIME = 'LONG'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:arc-data-snapshot_per-build'
+MAX_RESULT_SIZE_KB = 50 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run arc-data-snapshot Tast tests.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based ARC data/ snapshot related tests against a remote DUT.
+This is a smoke test and verifies a whole ARC data/ snapshot flow, that requires
+several reboots, app downloads and takes a considerable amount of time to be
+added to the existing test suites.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine), max_run_sec=10800,
+                 test_exprs=['("group:arc-data-snapshot")'], command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.autoupdate b/server/site_tests/tast/control.autoupdate
new file mode 100644
index 0000000..bcd952d
--- /dev/null
+++ b/server/site_tests/tast/control.autoupdate
@@ -0,0 +1,35 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'cros-engprod-muc'
+NAME = 'tast.autoupdate'
+TIME = 'LONG'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:au-perbuild-tast'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+DOC = '''
+This suite runs the autoupdate tests in Tast.
+
+Servo is needed in case of a device restoration, when every other image
+restoration attempts failed.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['("group:autoupdate")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.av-trace-nightly b/server/site_tests/tast/control.av-trace-nightly
deleted file mode 100644
index be6fef9..0000000
--- a/server/site_tests/tast/control.av-trace-nightly
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'Chromium OS team'
-NAME = 'tast.av-trace-nightly'
-TIME = 'MEDIUM'
-TEST_TYPE = 'Server'
-ATTRIBUTES = 'suite:av-analysis_trace_per-day'
-MAX_RESULT_SIZE_KB = 1024 * 1024
-
-# tast.py uses binaries installed from autotest_server_package.tar.bz2.
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast graphics-trace test suite for screen capture analysis.
-
-Tast is an integration-testing framework analagous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-This test runs Tast-based graphics-trace tests against a remote DUT.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-
-def run(machine):
-    job.run_test('tast',
-                 host=hosts.create_host(machine),
-                 test_exprs=['("group:graphics" && graphics_nightly && graphics_trace && graphics_av_analysis)'],
-                 ignore_test_failures=True, max_run_sec=21600,
-                 command_args=args)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.av-trace-nightly-shard-0 b/server/site_tests/tast/control.av-trace-nightly-shard-0
new file mode 100644
index 0000000..3edb701
--- /dev/null
+++ b/server/site_tests/tast/control.av-trace-nightly-shard-0
@@ -0,0 +1,39 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.av-trace-nightly-shard-0'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:av-analysis_trace_per-day'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-trace test suite for screen capture analysis.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based graphics-trace tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:graphics" && graphics_nightly && graphics_trace && graphics_av_analysis)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 totalshards=4,
+                 shardindex=0,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.av-trace-nightly-shard-1 b/server/site_tests/tast/control.av-trace-nightly-shard-1
new file mode 100644
index 0000000..740771c
--- /dev/null
+++ b/server/site_tests/tast/control.av-trace-nightly-shard-1
@@ -0,0 +1,39 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.av-trace-nightly-shard-1'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:av-analysis_trace_per-day'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-trace test suite for screen capture analysis.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based graphics-trace tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:graphics" && graphics_nightly && graphics_trace && graphics_av_analysis)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 totalshards=4,
+                 shardindex=1,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.av-trace-nightly-shard-2 b/server/site_tests/tast/control.av-trace-nightly-shard-2
new file mode 100644
index 0000000..885445e
--- /dev/null
+++ b/server/site_tests/tast/control.av-trace-nightly-shard-2
@@ -0,0 +1,39 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.av-trace-nightly-shard-2'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:av-analysis_trace_per-day'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-trace test suite for screen capture analysis.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based graphics-trace tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:graphics" && graphics_nightly && graphics_trace && graphics_av_analysis)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 totalshards=4,
+                 shardindex=2,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.av-trace-nightly-shard-3 b/server/site_tests/tast/control.av-trace-nightly-shard-3
new file mode 100644
index 0000000..c4df77c
--- /dev/null
+++ b/server/site_tests/tast/control.av-trace-nightly-shard-3
@@ -0,0 +1,39 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.av-trace-nightly-shard-3'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:av-analysis_trace_per-day'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-trace test suite for screen capture analysis.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based graphics-trace tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:graphics" && graphics_nightly && graphics_trace && graphics_av_analysis)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 totalshards=4,
+                 shardindex=3,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.av-trace-perbuild b/server/site_tests/tast/control.av-trace-perbuild
index 7010f7e..d44b9cc 100644
--- a/server/site_tests/tast/control.av-trace-perbuild
+++ b/server/site_tests/tast/control.av-trace-perbuild
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:av-analysis_trace_per-build'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
@@ -25,6 +26,8 @@
 '''
 
 def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
     job.run_test('tast',
                  host=hosts.create_host(machine),
                  test_exprs=['("group:graphics" && graphics_perbuild && graphics_trace && graphics_av_analysis)'],
diff --git a/server/site_tests/tast/control.av-trace-weekly b/server/site_tests/tast/control.av-trace-weekly
index 0b33e1e..43a2996 100644
--- a/server/site_tests/tast/control.av-trace-weekly
+++ b/server/site_tests/tast/control.av-trace-weekly
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:av-analysis_trace_per-week'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
@@ -25,6 +26,8 @@
 '''
 
 def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
     job.run_test('tast',
                  host=hosts.create_host(machine),
                  test_exprs=['("group:graphics" && graphics_weekly && graphics_trace && graphics_av_analysis)'],
diff --git a/server/site_tests/tast/control.av-video-nightly b/server/site_tests/tast/control.av-video-nightly
index efa4ff5..3d6563d 100644
--- a/server/site_tests/tast/control.av-video-nightly
+++ b/server/site_tests/tast/control.av-video-nightly
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:av-analysis_per-day'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
@@ -25,6 +26,8 @@
 '''
 
 def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
     job.run_test('tast',
                  host=hosts.create_host(machine),
                  test_exprs=['("group:graphics" && graphics_nightly && graphics_video && graphics_av_analysis)'],
diff --git a/server/site_tests/tast/control.av-video-perbuild b/server/site_tests/tast/control.av-video-perbuild
index 47f500d..35ac2d3 100644
--- a/server/site_tests/tast/control.av-video-perbuild
+++ b/server/site_tests/tast/control.av-video-perbuild
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:av-analysis_per-build'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
@@ -25,6 +26,8 @@
 '''
 
 def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
     job.run_test('tast',
                  host=hosts.create_host(machine),
                  test_exprs=['("group:graphics" && graphics_perbuild && graphics_video && graphics_av_analysis)'],
diff --git a/server/site_tests/tast/control.av-video-weekly b/server/site_tests/tast/control.av-video-weekly
index 5fe014e..70ab0e2 100644
--- a/server/site_tests/tast/control.av-video-weekly
+++ b/server/site_tests/tast/control.av-video-weekly
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:av-analysis_per-week'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
@@ -25,6 +26,8 @@
 '''
 
 def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
     job.run_test('tast',
                  host=hosts.create_host(machine),
                  test_exprs=['("group:graphics" && graphics_weekly && graphics_video && graphics_av_analysis)'],
diff --git a/server/site_tests/tast/control.borealis-cq b/server/site_tests/tast/control.borealis-cq
new file mode 100644
index 0000000..ca6a666
--- /dev/null
+++ b/server/site_tests/tast/control.borealis-cq
@@ -0,0 +1,32 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.borealis-cq'
+TIME = 'SHORT'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:borealis-cq'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast tests which run in suite:borealis-cq.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['("group:borealis" && !informational && borealis_cq)'],
+                 ignore_test_failures=True, max_run_sec=3600,
+                 command_args=args,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.borealis-nightly b/server/site_tests/tast/control.borealis-nightly
new file mode 100644
index 0000000..1b1369c
--- /dev/null
+++ b/server/site_tests/tast/control.borealis-nightly
@@ -0,0 +1,37 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.borealis-nightly'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:borealis_per-day'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast borealis test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based borealis tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:borealis" && borealis_nightly)'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.borealis-perbuild b/server/site_tests/tast/control.borealis-perbuild
new file mode 100644
index 0000000..2f2c25b
--- /dev/null
+++ b/server/site_tests/tast/control.borealis-perbuild
@@ -0,0 +1,37 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.borealis-perbuild'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:borealis_per-build'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast borealis test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based borealis tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:borealis" && borealis_perbuild || "group:borealis" && informational && borealis_cq)'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.borealis-weekly-shard-0 b/server/site_tests/tast/control.borealis-weekly-shard-0
new file mode 100644
index 0000000..8b23279
--- /dev/null
+++ b/server/site_tests/tast/control.borealis-weekly-shard-0
@@ -0,0 +1,39 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.borealis-weekly-shard-0'
+TIME = 'LONG'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:borealis_per-week'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast borealis test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based borealis tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:borealis" && borealis_weekly)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 totalshards=4,
+                 shardindex=0,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.borealis-weekly-shard-1 b/server/site_tests/tast/control.borealis-weekly-shard-1
new file mode 100644
index 0000000..c0ab0f2
--- /dev/null
+++ b/server/site_tests/tast/control.borealis-weekly-shard-1
@@ -0,0 +1,39 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.borealis-weekly-shard-1'
+TIME = 'LONG'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:borealis_per-week'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast borealis test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based borealis tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:borealis" && borealis_weekly)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 totalshards=4,
+                 shardindex=1,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.borealis-weekly-shard-2 b/server/site_tests/tast/control.borealis-weekly-shard-2
new file mode 100644
index 0000000..6a7a721
--- /dev/null
+++ b/server/site_tests/tast/control.borealis-weekly-shard-2
@@ -0,0 +1,39 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.borealis-weekly-shard-2'
+TIME = 'LONG'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:borealis_per-week'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast borealis test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based borealis tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:borealis" && borealis_weekly)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 totalshards=4,
+                 shardindex=2,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.borealis-weekly-shard-3 b/server/site_tests/tast/control.borealis-weekly-shard-3
new file mode 100644
index 0000000..3dc66e5
--- /dev/null
+++ b/server/site_tests/tast/control.borealis-weekly-shard-3
@@ -0,0 +1,39 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.borealis-weekly-shard-3'
+TIME = 'LONG'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:borealis_per-week'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast borealis test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based borealis tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:borealis" && borealis_weekly)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 totalshards=4,
+                 shardindex=3,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.camera-libcamera b/server/site_tests/tast/control.camera-libcamera
index 43c4939..16b1158 100644
--- a/server/site_tests/tast/control.camera-libcamera
+++ b/server/site_tests/tast/control.camera-libcamera
@@ -8,12 +8,13 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:camera-libcamera'
 MAX_RESULT_SIZE_KB = 50 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
 
 DOC = '''
-Run the tast camera-postsubmit test suite.
+Run the tast camera-libcamera test suite.
 
 Tast is an integration-testing framework analagous to the test-running portion
 of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
diff --git a/server/site_tests/tast/control.camera-postsubmit b/server/site_tests/tast/control.camera-postsubmit
index e764f49..61b3302 100644
--- a/server/site_tests/tast/control.camera-postsubmit
+++ b/server/site_tests/tast/control.camera-postsubmit
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:camera-postsubmit'
 MAX_RESULT_SIZE_KB = 50 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
@@ -24,7 +25,12 @@
 
 def run(machine):
     job.run_test('tast',
-                 host=hosts.create_host(machine), max_run_sec=10800,
-                 test_exprs=['("group:camera-postsubmit")'], command_args=args)
+                 host=hosts.create_host(machine),
+                 max_run_sec=10800,
+                 test_exprs=['('
+                             '("group:camera-postsubmit") || '
+                             '("group:mainline" && !informational && "name:camera.*")'
+                             ')'],
+                 command_args=args)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.camera-usb-qual b/server/site_tests/tast/control.camera-usb-qual
new file mode 100644
index 0000000..347ced3
--- /dev/null
+++ b/server/site_tests/tast/control.camera-usb-qual
@@ -0,0 +1,31 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.camera-usb-qual'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:camera-usb-qual'
+MAX_RESULT_SIZE_KB = 50 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the tast camera-usb-qual test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs group of camera tests for USB camera qualification.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine), max_run_sec=10800,
+                 test_exprs=['("group:camera-usb-qual")'], command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.camerabox-facing-back b/server/site_tests/tast/control.camerabox-facing-back
index 518cc9c..6e3f559 100644
--- a/server/site_tests/tast/control.camerabox-facing-back
+++ b/server/site_tests/tast/control.camerabox-facing-back
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:camerabox_facing-back'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
@@ -26,10 +27,13 @@
 '''
 
 def run(machine):
+    # By setting "use_camera_box" to true, the IP address of chart device in
+    # CameraBox can be brought when running tests to allow local tests connect
+    # to chart directly.
     job.run_test('tast',
                  host=hosts.create_host(machine),
                  test_exprs=['("group:camerabox" && camerabox_facing_back)'],
                  ignore_test_failures=True, max_run_sec=60*60*3,
-                 command_args=args)
+                 use_camera_box=True, command_args=args)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.camerabox-facing-front b/server/site_tests/tast/control.camerabox-facing-front
index cc22c5a..fea849f 100644
--- a/server/site_tests/tast/control.camerabox-facing-front
+++ b/server/site_tests/tast/control.camerabox-facing-front
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:camerabox_facing-front'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
@@ -26,10 +27,13 @@
 '''
 
 def run(machine):
+    # By setting "use_camera_box" to true, the IP address of chart device in
+    # CameraBox can be brought when running tests to allow local tests connect
+    # to chart directly.
     job.run_test('tast',
                  host=hosts.create_host(machine),
                  test_exprs=['("group:camerabox" && camerabox_facing_front)'],
                  ignore_test_failures=True, max_run_sec=60*60*3,
-                 command_args=args)
+                 use_camera_box=True, command_args=args)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-a11y b/server/site_tests/tast/control.category-a11y
new file mode 100644
index 0000000..a370df5
--- /dev/null
+++ b/server/site_tests/tast/control.category-a11y
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-a11y'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-a11y'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "a11y" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "a11y" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:a11y.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-ad b/server/site_tests/tast/control.category-ad
new file mode 100644
index 0000000..43d81f8
--- /dev/null
+++ b/server/site_tests/tast/control.category-ad
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-ad'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-ad'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "ad" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "ad" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:ad.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-ambient b/server/site_tests/tast/control.category-ambient
new file mode 100644
index 0000000..c829197
--- /dev/null
+++ b/server/site_tests/tast/control.category-ambient
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-ambient'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-ambient'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "ambient" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "ambient" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:ambient.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-apps b/server/site_tests/tast/control.category-apps
new file mode 100644
index 0000000..0ec3b98
--- /dev/null
+++ b/server/site_tests/tast/control.category-apps
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-apps'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-apps'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "apps" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "apps" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:apps.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-arc b/server/site_tests/tast/control.category-arc
new file mode 100644
index 0000000..86f851f
--- /dev/null
+++ b/server/site_tests/tast/control.category-arc
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-arc'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-arc'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "arc" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "arc" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:arc.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-arcappcompat b/server/site_tests/tast/control.category-arcappcompat
new file mode 100644
index 0000000..423f858
--- /dev/null
+++ b/server/site_tests/tast/control.category-arcappcompat
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-arcappcompat'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-arcappcompat'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "arcappcompat" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "arcappcompat" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:arcappcompat.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-assistant b/server/site_tests/tast/control.category-assistant
new file mode 100644
index 0000000..2c59b91
--- /dev/null
+++ b/server/site_tests/tast/control.category-assistant
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-assistant'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-assistant'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "assistant" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "assistant" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:assistant.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-audio b/server/site_tests/tast/control.category-audio
new file mode 100644
index 0000000..5269474
--- /dev/null
+++ b/server/site_tests/tast/control.category-audio
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-audio'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-audio'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "audio" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "audio" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:audio.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-baserpc b/server/site_tests/tast/control.category-baserpc
new file mode 100644
index 0000000..e69a0f6
--- /dev/null
+++ b/server/site_tests/tast/control.category-baserpc
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-baserpc'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-baserpc'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "baserpc" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "baserpc" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:baserpc.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-biod b/server/site_tests/tast/control.category-biod
new file mode 100644
index 0000000..b62c646
--- /dev/null
+++ b/server/site_tests/tast/control.category-biod
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-biod'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-biod'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "biod" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "biod" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:biod.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-camera b/server/site_tests/tast/control.category-camera
new file mode 100644
index 0000000..efd41ba
--- /dev/null
+++ b/server/site_tests/tast/control.category-camera
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-camera'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-camera'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "camera" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "camera" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:camera.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-crash b/server/site_tests/tast/control.category-crash
new file mode 100644
index 0000000..78c97d3
--- /dev/null
+++ b/server/site_tests/tast/control.category-crash
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-crash'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-crash'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "crash" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "crash" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:crash.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-crostini b/server/site_tests/tast/control.category-crostini
new file mode 100644
index 0000000..1791733
--- /dev/null
+++ b/server/site_tests/tast/control.category-crostini
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-crostini'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-crostini'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "crostini" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "crostini" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:crostini.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=2700,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-cryptohome b/server/site_tests/tast/control.category-cryptohome
new file mode 100644
index 0000000..eb78cc6
--- /dev/null
+++ b/server/site_tests/tast/control.category-cryptohome
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-cryptohome'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-cryptohome'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "cryptohome" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "cryptohome" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:cryptohome.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-dbus b/server/site_tests/tast/control.category-dbus
new file mode 100644
index 0000000..f636653
--- /dev/null
+++ b/server/site_tests/tast/control.category-dbus
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-dbus'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-dbus'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "dbus" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "dbus" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:dbus.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-debugd b/server/site_tests/tast/control.category-debugd
new file mode 100644
index 0000000..646944f
--- /dev/null
+++ b/server/site_tests/tast/control.category-debugd
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-debugd'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-debugd'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "debugd" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "debugd" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:debugd.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-dev b/server/site_tests/tast/control.category-dev
new file mode 100644
index 0000000..3ba5a7d
--- /dev/null
+++ b/server/site_tests/tast/control.category-dev
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-dev'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-dev'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "dev" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "dev" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:dev.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-diagnostics b/server/site_tests/tast/control.category-diagnostics
new file mode 100644
index 0000000..ce8b2bf
--- /dev/null
+++ b/server/site_tests/tast/control.category-diagnostics
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-diagnostics'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-diagnostics'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "diagnostics" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "diagnostics" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:diagnostics.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-documentscanapi b/server/site_tests/tast/control.category-documentscanapi
new file mode 100644
index 0000000..2f93a8c
--- /dev/null
+++ b/server/site_tests/tast/control.category-documentscanapi
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-documentscanapi'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-documentscanapi'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "documentscanapi" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "documentscanapi" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:documentscanapi.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-enterprise b/server/site_tests/tast/control.category-enterprise
new file mode 100644
index 0000000..d96c0c6
--- /dev/null
+++ b/server/site_tests/tast/control.category-enterprise
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-enterprise'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-enterprise'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "enterprise" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "enterprise" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:enterprise.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-example b/server/site_tests/tast/control.category-example
new file mode 100644
index 0000000..f7096a2
--- /dev/null
+++ b/server/site_tests/tast/control.category-example
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-example'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-example'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "example" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "example" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:example.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-factory b/server/site_tests/tast/control.category-factory
new file mode 100644
index 0000000..42b391e
--- /dev/null
+++ b/server/site_tests/tast/control.category-factory
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-factory'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-factory'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "factory" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "factory" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:factory.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-familylink b/server/site_tests/tast/control.category-familylink
new file mode 100644
index 0000000..85e9784
--- /dev/null
+++ b/server/site_tests/tast/control.category-familylink
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-familylink'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-familylink'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "familylink" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "familylink" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:familylink.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-feedback b/server/site_tests/tast/control.category-feedback
new file mode 100644
index 0000000..ea04713
--- /dev/null
+++ b/server/site_tests/tast/control.category-feedback
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-feedback'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-feedback'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "feedback" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "feedback" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:feedback.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-filemanager b/server/site_tests/tast/control.category-filemanager
new file mode 100644
index 0000000..f742ca4
--- /dev/null
+++ b/server/site_tests/tast/control.category-filemanager
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-filemanager'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-filemanager'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "filemanager" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "filemanager" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:filemanager.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-firmware b/server/site_tests/tast/control.category-firmware
new file mode 100644
index 0000000..95dc24e
--- /dev/null
+++ b/server/site_tests/tast/control.category-firmware
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-firmware'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-firmware'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "firmware" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "firmware" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:firmware.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-gamepad b/server/site_tests/tast/control.category-gamepad
new file mode 100644
index 0000000..e9a15f4
--- /dev/null
+++ b/server/site_tests/tast/control.category-gamepad
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-gamepad'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-gamepad'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "gamepad" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "gamepad" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:gamepad.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-graphics b/server/site_tests/tast/control.category-graphics
new file mode 100644
index 0000000..d85bc20
--- /dev/null
+++ b/server/site_tests/tast/control.category-graphics
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-graphics'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-graphics'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "graphics" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "graphics" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:graphics.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-hardware b/server/site_tests/tast/control.category-hardware
new file mode 100644
index 0000000..e365521
--- /dev/null
+++ b/server/site_tests/tast/control.category-hardware
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-hardware'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-hardware'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "hardware" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "hardware" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:hardware.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-health b/server/site_tests/tast/control.category-health
new file mode 100644
index 0000000..82746d9
--- /dev/null
+++ b/server/site_tests/tast/control.category-health
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-health'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-health'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "health" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "health" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:health.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-hwsec b/server/site_tests/tast/control.category-hwsec
new file mode 100644
index 0000000..c95ed5f
--- /dev/null
+++ b/server/site_tests/tast/control.category-hwsec
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-hwsec'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-hwsec'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "hwsec" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "hwsec" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:hwsec.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-inputs b/server/site_tests/tast/control.category-inputs
new file mode 100644
index 0000000..3fc0d0b
--- /dev/null
+++ b/server/site_tests/tast/control.category-inputs
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-inputs'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-inputs'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "inputs" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "inputs" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:inputs.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-kernel b/server/site_tests/tast/control.category-kernel
new file mode 100644
index 0000000..f14f432
--- /dev/null
+++ b/server/site_tests/tast/control.category-kernel
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-kernel'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-kernel'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "kernel" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "kernel" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:kernel.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-lacros b/server/site_tests/tast/control.category-lacros
new file mode 100644
index 0000000..2a2dec4
--- /dev/null
+++ b/server/site_tests/tast/control.category-lacros
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-lacros'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-lacros'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "lacros" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "lacros" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:lacros.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-launcher b/server/site_tests/tast/control.category-launcher
new file mode 100644
index 0000000..e6d9211
--- /dev/null
+++ b/server/site_tests/tast/control.category-launcher
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-launcher'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-launcher'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "launcher" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "launcher" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:launcher.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-logs b/server/site_tests/tast/control.category-logs
new file mode 100644
index 0000000..e78502f
--- /dev/null
+++ b/server/site_tests/tast/control.category-logs
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-logs'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-logs'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "logs" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "logs" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:logs.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-meta b/server/site_tests/tast/control.category-meta
new file mode 100644
index 0000000..a2778f3
--- /dev/null
+++ b/server/site_tests/tast/control.category-meta
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-meta'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-meta, suite:py3-beta'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "meta" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "meta" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:meta.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-mlservice b/server/site_tests/tast/control.category-mlservice
new file mode 100644
index 0000000..9cb326a
--- /dev/null
+++ b/server/site_tests/tast/control.category-mlservice
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-mlservice'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-mlservice'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "mlservice" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "mlservice" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:mlservice.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-multivm b/server/site_tests/tast/control.category-multivm
new file mode 100644
index 0000000..3bc596b
--- /dev/null
+++ b/server/site_tests/tast/control.category-multivm
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-multivm'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-multivm'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "multivm" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "multivm" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:multivm.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-nacl b/server/site_tests/tast/control.category-nacl
new file mode 100644
index 0000000..288591b
--- /dev/null
+++ b/server/site_tests/tast/control.category-nacl
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-nacl'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-nacl'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "nacl" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "nacl" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:nacl.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-nearbyshare b/server/site_tests/tast/control.category-nearbyshare
new file mode 100644
index 0000000..80cf355
--- /dev/null
+++ b/server/site_tests/tast/control.category-nearbyshare
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-nearbyshare'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-nearbyshare'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "nearbyshare" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "nearbyshare" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:nearbyshare.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-network b/server/site_tests/tast/control.category-network
new file mode 100644
index 0000000..a15f420
--- /dev/null
+++ b/server/site_tests/tast/control.category-network
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-network'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-network'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "network" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "network" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:network.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-notifications b/server/site_tests/tast/control.category-notifications
new file mode 100644
index 0000000..79fe008
--- /dev/null
+++ b/server/site_tests/tast/control.category-notifications
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-notifications'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-notifications'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "notifications" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "notifications" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:notifications.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-ocr b/server/site_tests/tast/control.category-ocr
new file mode 100644
index 0000000..ac0b6b3
--- /dev/null
+++ b/server/site_tests/tast/control.category-ocr
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-ocr'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-ocr'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "ocr" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "ocr" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:ocr.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-peripherals b/server/site_tests/tast/control.category-peripherals
new file mode 100644
index 0000000..53afde5
--- /dev/null
+++ b/server/site_tests/tast/control.category-peripherals
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-peripherals'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-peripherals'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "peripherals" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "peripherals" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:peripherals.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-platform b/server/site_tests/tast/control.category-platform
new file mode 100644
index 0000000..69487cf
--- /dev/null
+++ b/server/site_tests/tast/control.category-platform
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-platform'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-platform'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "platform" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "platform" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:platform.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-policy b/server/site_tests/tast/control.category-policy
new file mode 100644
index 0000000..ede2103
--- /dev/null
+++ b/server/site_tests/tast/control.category-policy
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-policy'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-policy'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "policy" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "policy" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:policy.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-power b/server/site_tests/tast/control.category-power
new file mode 100644
index 0000000..d9952bc
--- /dev/null
+++ b/server/site_tests/tast/control.category-power
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-power'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-power'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "power" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "power" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:power.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-printer b/server/site_tests/tast/control.category-printer
new file mode 100644
index 0000000..4b4f0ee
--- /dev/null
+++ b/server/site_tests/tast/control.category-printer
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-printer'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-printer'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "printer" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "printer" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:printer.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-qemu b/server/site_tests/tast/control.category-qemu
new file mode 100644
index 0000000..ea59ed4
--- /dev/null
+++ b/server/site_tests/tast/control.category-qemu
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-qemu'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-qemu'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "qemu" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "qemu" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:qemu.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-scanapp b/server/site_tests/tast/control.category-scanapp
new file mode 100644
index 0000000..dc6c895
--- /dev/null
+++ b/server/site_tests/tast/control.category-scanapp
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-scanapp'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-scanapp'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "scanapp" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "scanapp" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:scanapp.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-scanner b/server/site_tests/tast/control.category-scanner
new file mode 100644
index 0000000..597e3d7
--- /dev/null
+++ b/server/site_tests/tast/control.category-scanner
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-scanner'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-scanner'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "scanner" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "scanner" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:scanner.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-sched b/server/site_tests/tast/control.category-sched
new file mode 100644
index 0000000..57b9f27
--- /dev/null
+++ b/server/site_tests/tast/control.category-sched
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-sched'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-sched'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "sched" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "sched" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:sched.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-screenshot b/server/site_tests/tast/control.category-screenshot
new file mode 100644
index 0000000..3532cb8
--- /dev/null
+++ b/server/site_tests/tast/control.category-screenshot
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-screenshot'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-screenshot'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "screenshot" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "screenshot" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:screenshot.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-security b/server/site_tests/tast/control.category-security
new file mode 100644
index 0000000..2d16ebd
--- /dev/null
+++ b/server/site_tests/tast/control.category-security
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-security'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-security'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "security" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "security" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:security.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-session b/server/site_tests/tast/control.category-session
new file mode 100644
index 0000000..73da035
--- /dev/null
+++ b/server/site_tests/tast/control.category-session
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-session'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-session'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "session" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "session" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:session.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-shelf b/server/site_tests/tast/control.category-shelf
new file mode 100644
index 0000000..c20cddb
--- /dev/null
+++ b/server/site_tests/tast/control.category-shelf
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-shelf'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-shelf'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "shelf" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "shelf" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:shelf.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-storage b/server/site_tests/tast/control.category-storage
new file mode 100644
index 0000000..dff82ec
--- /dev/null
+++ b/server/site_tests/tast/control.category-storage
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-storage'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-storage'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "storage" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "storage" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:storage.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-typec b/server/site_tests/tast/control.category-typec
new file mode 100644
index 0000000..fa431d9
--- /dev/null
+++ b/server/site_tests/tast/control.category-typec
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-typec'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-typec'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "typec" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "typec" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:typec.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-u2fd b/server/site_tests/tast/control.category-u2fd
new file mode 100644
index 0000000..27deeab
--- /dev/null
+++ b/server/site_tests/tast/control.category-u2fd
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-u2fd'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-u2fd'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "u2fd" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "u2fd" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:u2fd.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-ui b/server/site_tests/tast/control.category-ui
new file mode 100644
index 0000000..7239a97
--- /dev/null
+++ b/server/site_tests/tast/control.category-ui
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-ui'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-ui'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "ui" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "ui" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:ui.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-video b/server/site_tests/tast/control.category-video
new file mode 100644
index 0000000..771ed76
--- /dev/null
+++ b/server/site_tests/tast/control.category-video
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-video'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-video'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "video" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "video" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:video.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-vm b/server/site_tests/tast/control.category-vm
new file mode 100644
index 0000000..97a0bcb
--- /dev/null
+++ b/server/site_tests/tast/control.category-vm
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-vm'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-vm'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "vm" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "vm" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:vm.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-webrtc b/server/site_tests/tast/control.category-webrtc
new file mode 100644
index 0000000..59b3002
--- /dev/null
+++ b/server/site_tests/tast/control.category-webrtc
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-webrtc'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-webrtc'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "webrtc" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "webrtc" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:webrtc.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-wifi b/server/site_tests/tast/control.category-wifi
new file mode 100644
index 0000000..abbd397
--- /dev/null
+++ b/server/site_tests/tast/control.category-wifi
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-wifi'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-wifi'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "wifi" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "wifi" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:wifi.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.category-wilco b/server/site_tests/tast/control.category-wilco
new file mode 100644
index 0000000..75a333b
--- /dev/null
+++ b/server/site_tests/tast/control.category-wilco
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.category-wilco'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-cq-wilco'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the critical Tast tests in the "wilco" category.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast tests in the "wilco" category that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:wilco.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-cq b/server/site_tests/tast/control.cellular-cq
new file mode 100644
index 0000000..a7d1db5
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-cq
@@ -0,0 +1,35 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-cq'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular-cq'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast tests which run in suite:cellular-cq.
+
+"group:cellular" indicates tests that runs on devices with live cellular
+connection.  And its sub-attribute "cellular_cq" indicates tests run in
+suite:cellular-cq.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cellular" && cellular_cq)'],
+                 ignore_test_failures=False, max_run_sec=10800,
+                 command_args=args,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-modem-fw b/server/site_tests/tast/control.cellular-modem-fw
new file mode 100644
index 0000000..67b8d75
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-modem-fw
@@ -0,0 +1,33 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-modem-fw'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_modem_fw'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+DOC =  '''
+Run Tast tests which verify basic Cellular functionality.
+
+"group:cellular" indicates that the test runs on DUTs with a Cellular modem.
+"cellular_modem_fw": Cellular modem firmware tast tests to check firmware updates.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cellular" && "cellular_modem_fw")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
\ No newline at end of file
diff --git a/server/site_tests/tast/control.cellular-ota-amari-stable b/server/site_tests/tast/control.cellular-ota-amari-stable
new file mode 100644
index 0000000..54f15d5
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-ota-amari-stable
@@ -0,0 +1,36 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-ota-amari-stable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_ota'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+DEPENDENCIES = "carrier:amarisoft"
+
+DOC = '''
+Run the unstable Tast tests which verify basic Cellular functionality.
+
+"group:cellular" indicates that the test runs on DUTs with a Cellular modem.
+"cellular_sim_active": Used to select tests that require a sim card .
+"cellular_unstable": Unstable tests that will be excluded from this suite.
+"cellular_amari_callbox": Attribute used to select tests that specifically run on DUTs with amari_callbox.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cellular" && "cellular_sim_active"  || "cellular_amari_callbox" && !"cellular_unstable")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-ota-amari-unstable b/server/site_tests/tast/control.cellular-ota-amari-unstable
new file mode 100644
index 0000000..1fde434
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-ota-amari-unstable
@@ -0,0 +1,36 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-ota-amari-unstable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_ota_flaky'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+DEPENDENCIES = "carrier:amarisoft"
+
+DOC = '''
+Run the unstable Tast tests which verify basic Cellular functionality.
+
+"group:cellular" indicates that the test runs on DUTs with a Cellular modem.
+"cellular_sim_active": Used to select tests that require a sim card .
+"cellular_unstable": Unstable tests that will be excluded from this suite.
+"cellular_amari_callbox": Attribute used to select tests that specifically run on DUTs with amari_callbox.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cellular" && "cellular_sim_active"  || "cellular_amari_callbox" && "cellular_unstable")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-ota-att-stable b/server/site_tests/tast/control.cellular-ota-att-stable
new file mode 100644
index 0000000..b8d516b
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-ota-att-stable
@@ -0,0 +1,35 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-ota-att-stable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_ota'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+DEPENDENCIES = "carrier:att"
+
+DOC = '''
+Run the unstable Tast tests which verify basic Cellular functionality.
+
+"group:cellular" indicates that the test runs on DUTs with a Cellular modem.
+"cellular_sim_active": Used to select tests that require a sim card .
+"cellular_unstable": Unstable tests that will be excluded from this suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cellular" && "cellular_sim_active" && !"cellular_unstable")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-ota-att-unstable b/server/site_tests/tast/control.cellular-ota-att-unstable
new file mode 100644
index 0000000..514ab03
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-ota-att-unstable
@@ -0,0 +1,35 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-ota-att-unstable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_ota_flaky'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+DEPENDENCIES = "carrier:att"
+
+DOC = '''
+Run the unstable Tast tests which verify basic Cellular functionality.
+
+"group:cellular" indicates that the test runs on DUTs with a Cellular modem.
+"cellular_sim_active": Used to select tests that require a sim card .
+"cellular_unstable": Unstable tests that will be included in this suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cellular" && "cellular_sim_active" && "cellular_unstable")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-ota-avl b/server/site_tests/tast/control.cellular-ota-avl
new file mode 100644
index 0000000..a7c3bfe
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-ota-avl
@@ -0,0 +1,33 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-ota-avl'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_ota_avl'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+DOC = '''
+Run the AVL Tast tests which are required for AVL Qualification.
+
+"group:cellular" indicates that the test runs on DUTs with a Cellular modem.
+"cellular_ota_avl": Tests that are tagged with this attribute are included in this suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cellular" && "cellular_ota_avl")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-ota-docomo-stable b/server/site_tests/tast/control.cellular-ota-docomo-stable
new file mode 100644
index 0000000..99f07b1
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-ota-docomo-stable
@@ -0,0 +1,35 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-ota-docomo-stable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_ota'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+DEPENDENCIES = "carrier:docomo"
+
+DOC = '''
+Run the unstable Tast tests which verify basic Cellular functionality.
+
+"group:cellular" indicates that the test runs on DUTs with a Cellular modem.
+"cellular_sim_active": Used to select tests that require a sim card .
+"cellular_unstable": Unstable tests that will be excluded from this suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cellular" && "cellular_sim_active" && !"cellular_unstable")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-ota-docomo-unstable b/server/site_tests/tast/control.cellular-ota-docomo-unstable
new file mode 100644
index 0000000..a4427bf
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-ota-docomo-unstable
@@ -0,0 +1,35 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-ota-docomo-unstable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_ota_flaky'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+DEPENDENCIES = "carrier:docomo"
+
+DOC = '''
+Run the unstable Tast tests which verify basic Cellular functionality.
+
+"group:cellular" indicates that the test runs on DUTs with a Cellular modem.
+"cellular_sim_active": Used to select tests that require a sim card .
+"cellular_unstable": Unstable tests that will be included in this suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cellular" && "cellular_sim_active" && "cellular_unstable")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-ota-ee-stable b/server/site_tests/tast/control.cellular-ota-ee-stable
new file mode 100644
index 0000000..710ba64
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-ota-ee-stable
@@ -0,0 +1,35 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-ota-ee-stable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_ota'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+DEPENDENCIES = "carrier:ee"
+
+DOC = '''
+Run the unstable Tast tests which verify basic Cellular functionality.
+
+"group:cellular" indicates that the test runs on DUTs with a Cellular modem.
+"cellular_sim_active": Used to select tests that require a sim card .
+"cellular_unstable": Unstable tests that will be excluded from this suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cellular" && "cellular_sim_active" && !"cellular_unstable")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-ota-ee-unstable b/server/site_tests/tast/control.cellular-ota-ee-unstable
new file mode 100644
index 0000000..509c783
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-ota-ee-unstable
@@ -0,0 +1,35 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-ota-ee-unstable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_ota_flaky'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+DEPENDENCIES = "carrier:ee"
+
+DOC = '''
+Run the unstable Tast tests which verify basic Cellular functionality.
+
+"group:cellular" indicates that the test runs on DUTs with a Cellular modem.
+"cellular_sim_active": Used to select tests that require a sim card .
+"cellular_unstable": Unstable tests that will be included in this suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cellular" && "cellular_sim_active" && "cellular_unstable")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-ota-kddi-stable b/server/site_tests/tast/control.cellular-ota-kddi-stable
new file mode 100644
index 0000000..bd45cd2
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-ota-kddi-stable
@@ -0,0 +1,35 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-ota-kddi-stable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_ota'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+DEPENDENCIES = "carrier:kddi"
+
+DOC = '''
+Run the unstable Tast tests which verify basic Cellular functionality.
+
+"group:cellular" indicates that the test runs on DUTs with a Cellular modem.
+"cellular_sim_active": Used to select tests that require a sim card .
+"cellular_unstable": Unstable tests that will be excluded from this suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cellular" && "cellular_sim_active" && !"cellular_unstable")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-ota-kddi-unstable b/server/site_tests/tast/control.cellular-ota-kddi-unstable
new file mode 100644
index 0000000..8e3b025
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-ota-kddi-unstable
@@ -0,0 +1,35 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-ota-kddi-unstable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_ota_flaky'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+DEPENDENCIES = "carrier:kddi"
+
+DOC = '''
+Run the unstable Tast tests which verify basic Cellular functionality.
+
+"group:cellular" indicates that the test runs on DUTs with a Cellular modem.
+"cellular_sim_active": Used to select tests that require a sim card .
+"cellular_unstable": Unstable tests that will be included in this suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cellular" && "cellular_sim_active" && "cellular_unstable")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-ota-multisim-stable b/server/site_tests/tast/control.cellular-ota-multisim-stable
new file mode 100644
index 0000000..1d67969
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-ota-multisim-stable
@@ -0,0 +1,36 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-ota-multisim-stable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_ota'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+DEPENDENCIES = "carrier:multisim"
+
+DOC = '''
+Run the stable Tast tests which verify Cellular multi-sim functionality.
+
+"group:cellular": indicates that the test runs on DUTs with a Cellular modem.
+"cellular_sim_dual_active": indicates that the test runs on DUTs with a Cellular
+modem and multiple provisioned SIM slots.
+"cellular_unstable": Unstable tests that will be excluded from the stable suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cellular" && "cellular_sim_dual_active" && !"cellular_unstable")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-ota-multisim-unstable b/server/site_tests/tast/control.cellular-ota-multisim-unstable
new file mode 100644
index 0000000..90501e7
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-ota-multisim-unstable
@@ -0,0 +1,36 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-ota-multisim-unstable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_ota_flaky'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+DEPENDENCIES = "carrier:multisim"
+
+DOC = '''
+Run the unstable Tast tests which verify Cellular multi-sim functionality.
+
+"group:cellular": indicates that the test runs on DUTs with a Cellular modem.
+"cellular_sim_dual_active": indicates that the test runs on DUTs with a Cellular
+modem and multiple provisioned SIM slots.
+"cellular_unstable": Unstable tests that will be run as part of the flaky suite
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cellular" && "cellular_sim_dual_active" && "cellular_unstable")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-ota-pinlock-stable b/server/site_tests/tast/control.cellular-ota-pinlock-stable
new file mode 100644
index 0000000..c563517
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-ota-pinlock-stable
@@ -0,0 +1,49 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-ota-pinlock-stable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_ota'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+DEPENDENCIES = "carrier:multisim"
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the stable Tast tests which verify Cellular sim pin & puk lock functionality.
+
+"group:cellular": indicates that the test runs on DUTs with a Cellular modem.
+"cellular_sim_pinlock": indicates that the test runs on DUTs with a Cellular
+modem and SIM with pin & puk codes.
+"cellular_unstable": Unstable tests that will be excluded from the stable suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import json
+import tempfile
+import yaml
+
+def run(machine):
+    host = hosts.create_host(machine)
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w') as temp_file:
+        host_info = host.host_info_store.get()
+        yaml.safe_dump({'autotest_host_info_labels':
+                        json.dumps(host_info.labels)}, stream=temp_file)
+        temp_file.flush()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:cellular" && "cellular_sim_pinlock" && !"cellular_unstable")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args,
+                 varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-ota-pinlock-unstable b/server/site_tests/tast/control.cellular-ota-pinlock-unstable
new file mode 100644
index 0000000..8b38fee
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-ota-pinlock-unstable
@@ -0,0 +1,49 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-ota-pinlock-unstable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_ota_flaky'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+DEPENDENCIES = "carrier:multisim"
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the unstable Tast tests which verify Cellular sim pin & puk lock functionality.
+
+"group:cellular": indicates that the test runs on DUTs with a Cellular modem.
+"cellular_sim_pinlock": indicates that the test runs on DUTs with a Cellular
+modem and SIM with pin & puk codes.
+"cellular_unstable": Unstable tests that will be run as part of the flaky suite
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+import json
+import tempfile
+import yaml
+
+def run(machine):
+    host = hosts.create_host(machine)
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w') as temp_file:
+        host_info = host.host_info_store.get()
+        yaml.safe_dump({'autotest_host_info_labels':
+                        json.dumps(host_info.labels)}, stream=temp_file)
+        temp_file.flush()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:cellular" && "cellular_sim_pinlock" && "cellular_unstable")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args,
+                 varsfiles=[temp_file.name])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-ota-prod-esim-stable b/server/site_tests/tast/control.cellular-ota-prod-esim-stable
new file mode 100644
index 0000000..1ebba4b
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-ota-prod-esim-stable
@@ -0,0 +1,35 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-ota-prod-esim-stable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_ota'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+DEPENDENCIES = "carrier:esim"
+
+DOC = '''
+Run the stable Tast tests which verify cellular prod esim functionality.
+
+"group:cellular" indicates that the test runs on DUTs with a Cellular modem.
+"cellular_sim_prod_esim": Used to select tests that require a prod esim card.
+!"cellular_unstable": Select only stable tests.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cellular" && "cellular_sim_prod_esim" && !"cellular_unstable")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-ota-prod-esim-unstable b/server/site_tests/tast/control.cellular-ota-prod-esim-unstable
new file mode 100644
index 0000000..7872392
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-ota-prod-esim-unstable
@@ -0,0 +1,35 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-ota-prod-esim-unstable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_ota_flaky'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+DEPENDENCIES = "carrier:esim"
+
+DOC = '''
+Run the unstable Tast tests which verify cellular prod esim functionality.
+
+"group:cellular" indicates that the test runs on DUTs with a Cellular modem.
+"cellular_sim_prod_esim": Used to select tests that require a prod esim card.
+"cellular_unstable": Select only unstable tests.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cellular" && "cellular_sim_prod_esim" && "cellular_unstable")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-ota-roamsim-stable b/server/site_tests/tast/control.cellular-ota-roamsim-stable
new file mode 100644
index 0000000..716244d
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-ota-roamsim-stable
@@ -0,0 +1,35 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-ota-roamsim-stable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_ota'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+DEPENDENCIES = "carrier:roamsim"
+
+DOC = '''
+Run stable Tast tests which verify cellular roaming functionality.
+
+"group:cellular" indicates that the test runs on DUTs with a Cellular modem.
+"cellular_sim_roaming": Used to select tests that require a roaming sim card.
+!"cellular_unstable": Select only stable tests.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cellular" && "cellular_sim_roaming" && !"cellular_unstable")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-ota-roamsim-unstable b/server/site_tests/tast/control.cellular-ota-roamsim-unstable
new file mode 100644
index 0000000..0b08357
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-ota-roamsim-unstable
@@ -0,0 +1,35 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-ota-roamsim-unstable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_ota_flaky'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+DEPENDENCIES = "carrier:roamsim"
+
+DOC = '''
+Run the unstable Tast tests which verify cellular roaming functionality.
+
+"group:cellular" indicates that the test runs on DUTs with a Cellular modem.
+"cellular_sim_roaming": Used to select tests that require a roaming sim card.
+"cellular_unstable": Select only unstable tests.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cellular" && "cellular_sim_roaming" && "cellular_unstable")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-ota-softbank-stable b/server/site_tests/tast/control.cellular-ota-softbank-stable
new file mode 100644
index 0000000..32ebdc3
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-ota-softbank-stable
@@ -0,0 +1,35 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-ota-softbank-stable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_ota'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+DEPENDENCIES = "carrier:softbank"
+
+DOC = '''
+Run the unstable Tast tests which verify basic Cellular functionality.
+
+"group:cellular" indicates that the test runs on DUTs with a Cellular modem.
+"cellular_sim_active": Used to select tests that require a sim card .
+"cellular_unstable": Unstable tests that will be excluded from this suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cellular" && "cellular_sim_active" && !"cellular_unstable")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-ota-softbank-unstable b/server/site_tests/tast/control.cellular-ota-softbank-unstable
new file mode 100644
index 0000000..190a1c6
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-ota-softbank-unstable
@@ -0,0 +1,35 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-ota-softbank-unstable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_ota_flaky'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+DEPENDENCIES = "carrier:softbank"
+
+DOC = '''
+Run the unstable Tast tests which verify basic Cellular functionality.
+
+"group:cellular" indicates that the test runs on DUTs with a Cellular modem.
+"cellular_sim_active": Used to select tests that require a sim card .
+"cellular_unstable": Unstable tests that will be included in this suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cellular" && "cellular_sim_active" && "cellular_unstable")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-ota-test-esim-stable b/server/site_tests/tast/control.cellular-ota-test-esim-stable
new file mode 100644
index 0000000..c9ecb6f
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-ota-test-esim-stable
@@ -0,0 +1,35 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-ota-test-esim-stable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_ota'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+DEPENDENCIES = "carrier:testesim"
+
+DOC = '''
+Run the stable Tast tests which verify cellular test esim functionality.
+
+"group:cellular" indicates that the test runs on DUTs with a Cellular modem.
+"cellular_sim_test_esim": Used to select tests that require a test esim card.
+!"cellular_unstable": Select only stable tests.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cellular" && "cellular_sim_test_esim" && !"cellular_unstable")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-ota-test-esim-unstable b/server/site_tests/tast/control.cellular-ota-test-esim-unstable
new file mode 100644
index 0000000..1efbe76
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-ota-test-esim-unstable
@@ -0,0 +1,35 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-ota-test-esim-unstable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_ota_flaky'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+DEPENDENCIES = "carrier:testesim"
+
+DOC = '''
+Run the unstable Tast tests which verify cellular test esim functionality.
+
+"group:cellular" indicates that the test runs on DUTs with a Cellular modem.
+"cellular_sim_test_esim": Used to select tests that require a test esim card.
+"cellular_unstable": Select only unstable tests.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cellular" && "cellular_sim_test_esim" && "cellular_unstable")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-ota-tmobile-stable b/server/site_tests/tast/control.cellular-ota-tmobile-stable
new file mode 100644
index 0000000..54290fb
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-ota-tmobile-stable
@@ -0,0 +1,35 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-ota-tmobile-stable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_ota'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+DEPENDENCIES = "carrier:tmobile"
+
+DOC = '''
+Run the unstable Tast tests which verify basic Cellular functionality.
+
+"group:cellular" indicates that the test runs on DUTs with a Cellular modem.
+"cellular_sim_active": Used to select tests that require a sim card .
+"cellular_unstable": Unstable tests that will be excluded from this suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cellular" && "cellular_sim_active" && !"cellular_unstable")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-ota-tmobile-unstable b/server/site_tests/tast/control.cellular-ota-tmobile-unstable
new file mode 100644
index 0000000..e2a39db
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-ota-tmobile-unstable
@@ -0,0 +1,35 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-ota-tmobile-unstable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_ota_flaky'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+DEPENDENCIES = "carrier:tmobile"
+
+DOC = '''
+Run the unstable Tast tests which verify basic Cellular functionality.
+
+"group:cellular" indicates that the test runs on DUTs with a Cellular modem.
+"cellular_sim_active": Used to select tests that require a sim card .
+"cellular_unstable": Unstable tests that will be included in this suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cellular" && "cellular_sim_active" && "cellular_unstable")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-ota-verizon-stable b/server/site_tests/tast/control.cellular-ota-verizon-stable
new file mode 100644
index 0000000..4a7ee9a
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-ota-verizon-stable
@@ -0,0 +1,35 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-ota-verizon-stable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_ota'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+DEPENDENCIES = "carrier:verizon"
+
+DOC = '''
+Run the unstable Tast tests which verify basic Cellular functionality.
+
+"group:cellular" indicates that the test runs on DUTs with a Cellular modem.
+"cellular_sim_active": Used to select tests that require a sim card .
+"cellular_unstable": Unstable tests that will be excluded from this suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cellular" && "cellular_sim_active" && !"cellular_unstable")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-ota-verizon-unstable b/server/site_tests/tast/control.cellular-ota-verizon-unstable
new file mode 100644
index 0000000..870a20d
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-ota-verizon-unstable
@@ -0,0 +1,35 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-ota-verizon-unstable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_ota_flaky'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+DEPENDENCIES = "carrier:verizon"
+
+DOC = '''
+Run the unstable Tast tests which verify basic Cellular functionality.
+
+"group:cellular" indicates that the test runs on DUTs with a Cellular modem.
+"cellular_sim_active": Used to select tests that require a sim card .
+"cellular_unstable": Unstable tests that will be included in this suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cellular" && "cellular_sim_active" && "cellular_unstable")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-ota-vodafone-stable b/server/site_tests/tast/control.cellular-ota-vodafone-stable
new file mode 100644
index 0000000..fe271eb
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-ota-vodafone-stable
@@ -0,0 +1,35 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-ota-vodafone-stable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_ota'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+DEPENDENCIES = "carrier:vodafone"
+
+DOC = '''
+Run the unstable Tast tests which verify basic Cellular functionality.
+
+"group:cellular" indicates that the test runs on DUTs with a Cellular modem.
+"cellular_sim_active": Used to select tests that require a sim card .
+"cellular_unstable": Unstable tests that will be excluded from this suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cellular" && "cellular_sim_active" && !"cellular_unstable")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cellular-ota-vodafone-unstable b/server/site_tests/tast/control.cellular-ota-vodafone-unstable
new file mode 100644
index 0000000..7cb9ad3
--- /dev/null
+++ b/server/site_tests/tast/control.cellular-ota-vodafone-unstable
@@ -0,0 +1,35 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.cellular-ota-vodafone-unstable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cellular_ota_flaky'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+DEPENDENCIES = "carrier:vodafone"
+
+DOC = '''
+Run the unstable Tast tests which verify basic Cellular functionality.
+
+"group:cellular" indicates that the test runs on DUTs with a Cellular modem.
+"cellular_sim_active": Used to select tests that require a sim card .
+"cellular_unstable": Unstable tests that will be included in this suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cellular" && "cellular_sim_active" && "cellular_unstable")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.companion-dut-manual b/server/site_tests/tast/control.companion-dut-manual
new file mode 100644
index 0000000..1b8373a
--- /dev/null
+++ b/server/site_tests/tast/control.companion-dut-manual
@@ -0,0 +1,52 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# The following library is not need because this test
+# is intented to run in the developer environment only.
+# from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'seewaifu'
+NAME = 'tast.companion_dut_manual'
+TIME = 'SHORT'
+TEST_TYPE = 'Server'
+# The following DEPENDENCIES variable is not appliable this test
+# because this test is intented to run in the developer environment only.
+# DEPENDENCIES = 'servo_state:WORKING'
+#
+# The following ATTRIBUTES variable is not appliable this test
+# because this test is intented to run in the developer environment only.
+# This test should not be run in any CQ.
+# ATTRIBUTES = 'suite:bvt-tast-cq'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Verify the companion dut flag reaches Tast.
+'''
+
+# Since we are not using servo for this test, we do not need following
+# three lines.
+# args_dict = utils.args_to_dict(args)
+# assert 'servo_state:WORKING' in DEPENDENCIES
+# servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    companions = hosts.create_companion_hosts(companion_hosts)
+    job.run_test('tast',
+                 # We do not need to specify servo_args because we only
+                 # run this test in developer environment.
+                 # host=hosts.create_host(machine, servo_args=servo_args),
+                 host=hosts.create_host(machine),
+                 test_exprs=['meta.CompanionDUTs'],
+                 ignore_test_failures=False, max_run_sec=1800,
+                 companion_duts={'cd1':companions[0], 'cd2':companions[1]},
+                 command_args=args,
+                 # setting clear_tpm to False because we only run this test
+                 # in developer environment.
+                 clear_tpm=False)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.critical-android b/server/site_tests/tast/control.critical-android
deleted file mode 100644
index e4364f4..0000000
--- a/server/site_tests/tast/control.critical-android
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'Chromium OS team'
-NAME = 'tast.critical-android'
-TIME = 'MEDIUM'
-TEST_TYPE = 'Server'
-# Android tests can also be broken by Chrome and OS changes, so these tests need
-# to run in both PFQs and the CQ.
-ATTRIBUTES = 'suite:bvt-tast-cq, suite:bvt-tast-android-pfq, suite:bvt-tast-chrome-pfq'
-MAX_RESULT_SIZE_KB = 256 * 1024
-
-# tast.py uses binaries installed from autotest_server_package.tar.bz2.
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast must-pass ARC test suite.
-
-Tast is an integration-testing framework analagous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-This test runs Android-dependent Tast-based tests that are required to pass
-against a remote DUT. It fails if any individual Tast tests fail.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-
-def run(machine):
-    # TODO(crbug.com/992303): Fix android condition.
-    # Make sure if you modify this condition to update ATP side config
-    # in http://cs/file:tast_critical.gcl
-    job.run_test('tast',
-                 host=hosts.create_host(machine),
-                 test_exprs=['('
-                             '"group:mainline" && '
-                             '!informational && '
-                             '!"dep:plugin_vm" && '
-                             '"dep:android*"'
-                             ')'],
-                 download_data_lazily=False,
-                 ignore_test_failures=False, max_run_sec=3600,
-                 command_args=args)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.critical-android-shard-0 b/server/site_tests/tast/control.critical-android-shard-0
new file mode 100644
index 0000000..9e20ae3
--- /dev/null
+++ b/server/site_tests/tast/control.critical-android-shard-0
@@ -0,0 +1,57 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.critical-android-shard-0'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+# Android tests can also be broken by Chrome and OS changes, so these tests need
+# to run in both PFQs and the CQ.
+ATTRIBUTES = 'suite:bvt-tast-cq, suite:bvt-tast-android-pfq, suite:bvt-tast-arc, suite:bvt-tast-chrome-pfq, suite:cft-beta'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast must-pass ARC test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Android-dependent Tast-based tests that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    # TODO(crbug.com/992303): Fix android condition.
+    # Make sure if you modify this condition to update ATP side config
+    # in http://cs/file:tast_critical.gcl
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '!"name:crostini.*" && '
+                             '"dep:android*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=3600,
+                 totalshards=3,
+                 shardindex=0,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.critical-android-shard-1 b/server/site_tests/tast/control.critical-android-shard-1
new file mode 100644
index 0000000..e8220ce
--- /dev/null
+++ b/server/site_tests/tast/control.critical-android-shard-1
@@ -0,0 +1,57 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.critical-android-shard-1'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+# Android tests can also be broken by Chrome and OS changes, so these tests need
+# to run in both PFQs and the CQ.
+ATTRIBUTES = 'suite:bvt-tast-cq, suite:bvt-tast-android-pfq, suite:bvt-tast-arc, suite:bvt-tast-chrome-pfq, suite:cft-beta'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast must-pass ARC test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Android-dependent Tast-based tests that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    # TODO(crbug.com/992303): Fix android condition.
+    # Make sure if you modify this condition to update ATP side config
+    # in http://cs/file:tast_critical.gcl
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '!"name:crostini.*" && '
+                             '"dep:android*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=3600,
+                 totalshards=3,
+                 shardindex=1,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.critical-android-shard-2 b/server/site_tests/tast/control.critical-android-shard-2
new file mode 100644
index 0000000..12a9c90
--- /dev/null
+++ b/server/site_tests/tast/control.critical-android-shard-2
@@ -0,0 +1,57 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.critical-android-shard-2'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+# Android tests can also be broken by Chrome and OS changes, so these tests need
+# to run in both PFQs and the CQ.
+ATTRIBUTES = 'suite:bvt-tast-cq, suite:bvt-tast-android-pfq, suite:bvt-tast-arc, suite:bvt-tast-chrome-pfq, suite:cft-beta'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast must-pass ARC test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Android-dependent Tast-based tests that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    # TODO(crbug.com/992303): Fix android condition.
+    # Make sure if you modify this condition to update ATP side config
+    # in http://cs/file:tast_critical.gcl
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '!"name:crostini.*" && '
+                             '"dep:android*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=3600,
+                 totalshards=3,
+                 shardindex=2,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.critical-chrome b/server/site_tests/tast/control.critical-chrome
deleted file mode 100644
index b74f07d..0000000
--- a/server/site_tests/tast/control.critical-chrome
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'Chromium OS team'
-NAME = 'tast.critical-chrome'
-TIME = 'MEDIUM'
-TEST_TYPE = 'Server'
-# Android changes can't break Chrome tests (so we don't need to run them in the
-# Android PFQ), but OS changes can (so we need to run them in the CQ).
-ATTRIBUTES = 'suite:bvt-tast-cq, suite:bvt-tast-chrome-pfq'
-MAX_RESULT_SIZE_KB = 256 * 1024
-
-# tast.py uses binaries installed from autotest_server_package.tar.bz2.
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast must-pass Chrome test suite.
-
-Tast is an integration-testing framework analagous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-This test runs Chrome-dependent Tast-based tests that are required to pass
-against a remote DUT. It fails if any individual Tast tests fail.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-
-def run(machine):
-    # TODO(crbug.com/992303): Fix android condition.
-    job.run_test('tast',
-                 host=hosts.create_host(machine),
-                 test_exprs=['('
-                             '"group:mainline" && '
-                             '!informational && '
-                             '"dep:chrome" && '
-                             '!"dep:plugin_vm" && '
-                             '!"dep:android*"'
-                             ')'],
-                 download_data_lazily=False,
-                 ignore_test_failures=False, max_run_sec=3600,
-                 command_args=args,
-                 clear_tpm=True)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.critical-chrome-shard-0 b/server/site_tests/tast/control.critical-chrome-shard-0
new file mode 100644
index 0000000..49ed14c
--- /dev/null
+++ b/server/site_tests/tast/control.critical-chrome-shard-0
@@ -0,0 +1,56 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.critical-chrome-shard-0'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+# Android changes can't break Chrome tests (so we don't need to run them in the
+# Android PFQ), but OS changes can (so we need to run them in the CQ).
+ATTRIBUTES = 'suite:bvt-tast-cq, suite:bvt-tast-chrome-pfq, suite:py3-beta, suite:cft-beta'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast must-pass Chrome test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Chrome-dependent Tast-based tests that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    # TODO(crbug.com/992303): Fix android condition.
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"dep:chrome" && '
+                             '!"name:crostini.*" && '
+                             '!"dep:android*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=7200,
+                 totalshards=3,
+                 shardindex=0,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.critical-chrome-shard-1 b/server/site_tests/tast/control.critical-chrome-shard-1
new file mode 100644
index 0000000..6557094
--- /dev/null
+++ b/server/site_tests/tast/control.critical-chrome-shard-1
@@ -0,0 +1,56 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.critical-chrome-shard-1'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+# Android changes can't break Chrome tests (so we don't need to run them in the
+# Android PFQ), but OS changes can (so we need to run them in the CQ).
+ATTRIBUTES = 'suite:bvt-tast-cq, suite:bvt-tast-chrome-pfq, suite:py3-beta, suite:cft-beta'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast must-pass Chrome test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Chrome-dependent Tast-based tests that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    # TODO(crbug.com/992303): Fix android condition.
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"dep:chrome" && '
+                             '!"name:crostini.*" && '
+                             '!"dep:android*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=7200,
+                 totalshards=3,
+                 shardindex=1,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.critical-chrome-shard-2 b/server/site_tests/tast/control.critical-chrome-shard-2
new file mode 100644
index 0000000..1cd0a6a
--- /dev/null
+++ b/server/site_tests/tast/control.critical-chrome-shard-2
@@ -0,0 +1,56 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.critical-chrome-shard-2'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+# Android changes can't break Chrome tests (so we don't need to run them in the
+# Android PFQ), but OS changes can (so we need to run them in the CQ).
+ATTRIBUTES = 'suite:bvt-tast-cq, suite:bvt-tast-chrome-pfq, suite:py3-beta, suite:cft-beta'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast must-pass Chrome test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Chrome-dependent Tast-based tests that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    # TODO(crbug.com/992303): Fix android condition.
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"dep:chrome" && '
+                             '!"name:crostini.*" && '
+                             '!"dep:android*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=7200,
+                 totalshards=3,
+                 shardindex=2,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.critical-crostini-shard-0 b/server/site_tests/tast/control.critical-crostini-shard-0
new file mode 100644
index 0000000..bf70e06
--- /dev/null
+++ b/server/site_tests/tast/control.critical-crostini-shard-0
@@ -0,0 +1,54 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.critical-crostini-shard-0'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+# Android changes can't break Chrome tests (so we don't need to run them in the
+# Android PFQ), but OS changes can (so we need to run them in the CQ).
+ATTRIBUTES = 'suite:bvt-tast-cq, suite:bvt-tast-chrome-pfq, suite:cft-beta'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast must-pass Crostini test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Crostini-dependent Tast-based tests that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    # TODO(crbug.com/992303): Fix android condition.
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:crostini.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=3600,
+                 totalshards=3,
+                 shardindex=0,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.critical-crostini-shard-1 b/server/site_tests/tast/control.critical-crostini-shard-1
new file mode 100644
index 0000000..7d21b21
--- /dev/null
+++ b/server/site_tests/tast/control.critical-crostini-shard-1
@@ -0,0 +1,54 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.critical-crostini-shard-1'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+# Android changes can't break Chrome tests (so we don't need to run them in the
+# Android PFQ), but OS changes can (so we need to run them in the CQ).
+ATTRIBUTES = 'suite:bvt-tast-cq, suite:bvt-tast-chrome-pfq, suite:cft-beta'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast must-pass Crostini test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Crostini-dependent Tast-based tests that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    # TODO(crbug.com/992303): Fix android condition.
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:crostini.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=3600,
+                 totalshards=3,
+                 shardindex=1,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.critical-crostini-shard-2 b/server/site_tests/tast/control.critical-crostini-shard-2
new file mode 100644
index 0000000..5cf335d
--- /dev/null
+++ b/server/site_tests/tast/control.critical-crostini-shard-2
@@ -0,0 +1,54 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.critical-crostini-shard-2'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+# Android changes can't break Chrome tests (so we don't need to run them in the
+# Android PFQ), but OS changes can (so we need to run them in the CQ).
+ATTRIBUTES = 'suite:bvt-tast-cq, suite:bvt-tast-chrome-pfq, suite:cft-beta'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast must-pass Crostini test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Crostini-dependent Tast-based tests that are required to pass
+against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    # TODO(crbug.com/992303): Fix android condition.
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '"name:crostini.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=3600,
+                 totalshards=3,
+                 shardindex=2,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.critical-parallels b/server/site_tests/tast/control.critical-parallels
deleted file mode 100644
index 2b5e1aa..0000000
--- a/server/site_tests/tast/control.critical-parallels
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'Chromium OS team'
-NAME = 'tast.critical-parallels'
-TIME = 'MEDIUM'
-TEST_TYPE = 'Server'
-# TODO(crbug.com/1154072): Remove from 'suite:bvt-tast-cq' once the
-# parallels suite is scheduled.
-ATTRIBUTES = 'suite:bvt-tast-cq, suite:bvt-tast-parallels-cq'
-MAX_RESULT_SIZE_KB = 256 * 1024
-
-# tast.py uses binaries installed from autotest_server_package.tar.bz2.
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast must-pass Parallels test suite.
-
-Tast is an integration-testing framework analagous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-This test runs Parallels-dependent Tast-based tests that are required to pass
-against a remote DUT. It fails if any individual Tast tests fail.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-
-def run(machine):
-    job.run_test('tast',
-                 host=hosts.create_host(machine),
-                 test_exprs=['('
-                             '"group:mainline" && '
-                             '!informational && '
-                             '"dep:plugin_vm"'
-                             ')'],
-                 download_data_lazily=False,
-                 ignore_test_failures=False, max_run_sec=3600,
-                 command_args=args,
-                 clear_tpm=True)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.critical-parallels-shard-0 b/server/site_tests/tast/control.critical-parallels-shard-0
new file mode 100644
index 0000000..9745bf2
--- /dev/null
+++ b/server/site_tests/tast/control.critical-parallels-shard-0
@@ -0,0 +1,54 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.critical-parallels-shard-0'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-parallels-cq'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast must-pass test suite for booted Parallels.
+
+This test may only be run on hardware licensed for specific versions of
+Windows and Office. Contact parallels-cros@google.com for details.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based tests using a booted Parallels VM that are required
+to pass against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:parallels_mainline" && '
+                             '!informational'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=3600,
+                 totalshards=3,
+                 shardindex=0,
+                 command_args=args,
+                 clear_tpm=True,
+                 varslist=['pita.windowsLicensed=true'],
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.critical-parallels-shard-1 b/server/site_tests/tast/control.critical-parallels-shard-1
new file mode 100644
index 0000000..d1c1190
--- /dev/null
+++ b/server/site_tests/tast/control.critical-parallels-shard-1
@@ -0,0 +1,54 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.critical-parallels-shard-1'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-parallels-cq'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast must-pass test suite for booted Parallels.
+
+This test may only be run on hardware licensed for specific versions of
+Windows and Office. Contact parallels-cros@google.com for details.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based tests using a booted Parallels VM that are required
+to pass against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:parallels_mainline" && '
+                             '!informational'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=3600,
+                 totalshards=3,
+                 shardindex=1,
+                 command_args=args,
+                 clear_tpm=True,
+                 varslist=['pita.windowsLicensed=true'],
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.critical-parallels-shard-2 b/server/site_tests/tast/control.critical-parallels-shard-2
new file mode 100644
index 0000000..d2b7455
--- /dev/null
+++ b/server/site_tests/tast/control.critical-parallels-shard-2
@@ -0,0 +1,54 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.critical-parallels-shard-2'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-parallels-cq'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast must-pass test suite for booted Parallels.
+
+This test may only be run on hardware licensed for specific versions of
+Windows and Office. Contact parallels-cros@google.com for details.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based tests using a booted Parallels VM that are required
+to pass against a remote DUT. It fails if any individual Tast tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:parallels_mainline" && '
+                             '!informational'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=3600,
+                 totalshards=3,
+                 shardindex=2,
+                 command_args=args,
+                 clear_tpm=True,
+                 varslist=['pita.windowsLicensed=true'],
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.critical-system b/server/site_tests/tast/control.critical-system
deleted file mode 100644
index eda5205..0000000
--- a/server/site_tests/tast/control.critical-system
+++ /dev/null
@@ -1,53 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.client.common_lib import utils
-
-AUTHOR = 'Chromium OS team'
-NAME = 'tast.critical-system'
-TIME = 'MEDIUM'
-TEST_TYPE = 'Server'
-DEPENDENCIES = 'servo_state:WORKING'
-# System tests can't be broken by Android or Chrome changes, so these tests
-# don't run in the PFQs.
-ATTRIBUTES = 'suite:bvt-tast-cq'
-MAX_RESULT_SIZE_KB = 256 * 1024
-
-# tast.py uses binaries installed from autotest_server_package.tar.bz2.
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast must-pass system test suite.
-
-Tast is an integration-testing framework analagous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-This test runs non-Chrome-dependent and non-Android-dependent Tast-based tests
-that are required to pass against a remote DUT. It fails if any individual Tast
-tests fail.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-
-args_dict = utils.args_to_dict(args)
-assert 'servo_state:WORKING' in DEPENDENCIES
-servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
-
-def run(machine):
-    job.run_test('tast',
-                 host=hosts.create_host(machine, servo_args=servo_args),
-                 test_exprs=['('
-                             '"group:mainline" && '
-                             '!informational && '
-                             '!"dep:chrome" && '
-                             '!"dep:plugin_vm" && '
-                             '!"dep:android*"'
-                             ')'],
-                 download_data_lazily=False,
-                 ignore_test_failures=False, max_run_sec=3600,
-                 command_args=args,
-                 clear_tpm=True)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.critical-system-shard-0 b/server/site_tests/tast/control.critical-system-shard-0
new file mode 100644
index 0000000..2d129fd
--- /dev/null
+++ b/server/site_tests/tast/control.critical-system-shard-0
@@ -0,0 +1,56 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.critical-system-shard-0'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+# System tests can't be broken by Android or Chrome changes, so these tests
+# don't run in the PFQs.
+ATTRIBUTES = 'suite:bvt-tast-cq, suite:cft-beta'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast must-pass system test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs non-Chrome-dependent and non-Android-dependent Tast-based tests
+that are required to pass against a remote DUT. It fails if any individual Tast
+tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '!"dep:chrome" && '
+                             '!"name:crostini.*" && '
+                             '!"dep:android*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=3600,
+                 totalshards=3,
+                 shardindex=0,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.critical-system-shard-1 b/server/site_tests/tast/control.critical-system-shard-1
new file mode 100644
index 0000000..a7cd675
--- /dev/null
+++ b/server/site_tests/tast/control.critical-system-shard-1
@@ -0,0 +1,56 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.critical-system-shard-1'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+# System tests can't be broken by Android or Chrome changes, so these tests
+# don't run in the PFQs.
+ATTRIBUTES = 'suite:bvt-tast-cq, suite:cft-beta'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast must-pass system test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs non-Chrome-dependent and non-Android-dependent Tast-based tests
+that are required to pass against a remote DUT. It fails if any individual Tast
+tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '!"dep:chrome" && '
+                             '!"name:crostini.*" && '
+                             '!"dep:android*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=3600,
+                 totalshards=3,
+                 shardindex=1,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.critical-system-shard-2 b/server/site_tests/tast/control.critical-system-shard-2
new file mode 100644
index 0000000..f260d00
--- /dev/null
+++ b/server/site_tests/tast/control.critical-system-shard-2
@@ -0,0 +1,56 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.critical-system-shard-2'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+# System tests can't be broken by Android or Chrome changes, so these tests
+# don't run in the PFQs.
+ATTRIBUTES = 'suite:bvt-tast-cq, suite:cft-beta'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast must-pass system test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs non-Chrome-dependent and non-Android-dependent Tast-based tests
+that are required to pass against a remote DUT. It fails if any individual Tast
+tests fail.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '!"dep:chrome" && '
+                             '!"name:crostini.*" && '
+                             '!"dep:android*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=3600,
+                 totalshards=3,
+                 shardindex=2,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.crosbolt-memory-nightly b/server/site_tests/tast/control.crosbolt-memory-nightly
index 2d168c9..f9a2eca 100644
--- a/server/site_tests/tast/control.crosbolt-memory-nightly
+++ b/server/site_tests/tast/control.crosbolt-memory-nightly
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:crosbolt_perf_nightly'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
diff --git a/server/site_tests/tast/control.crosbolt-nightly b/server/site_tests/tast/control.crosbolt-nightly
deleted file mode 100644
index 4d6df66..0000000
--- a/server/site_tests/tast/control.crosbolt-nightly
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'Chromium OS team'
-NAME = 'tast.crosbolt-nightly'
-TIME = 'MEDIUM'
-TEST_TYPE = 'Server'
-ATTRIBUTES = 'suite:crosbolt_perf_nightly'
-MAX_RESULT_SIZE_KB = 1024 * 1024
-
-# tast.py uses binaries installed from autotest_server_package.tar.bz2.
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast crosbolt performance test suite.
-
-Tast is an integration-testing framework analagous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-This test runs Tast-based crosbolt performance tests against a remote DUT.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-
-def run(machine):
-    job.run_test('tast',
-                 host=hosts.create_host(machine),
-                 test_exprs=['('
-                             '"group:crosbolt" && '
-                             'crosbolt_nightly && '
-                             '!"dep:plugin_vm"'
-                             ')'],
-                 ignore_test_failures=True,
-                 max_run_sec=14400,  # 4 hours.
-                 command_args=args)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.crosbolt-nightly-shard-0 b/server/site_tests/tast/control.crosbolt-nightly-shard-0
new file mode 100644
index 0000000..6ad1c42
--- /dev/null
+++ b/server/site_tests/tast/control.crosbolt-nightly-shard-0
@@ -0,0 +1,41 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.crosbolt-nightly-shard-0'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:crosbolt_perf_nightly'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast crosbolt performance test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based crosbolt performance tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['('
+                             '"group:crosbolt" && '
+                             'crosbolt_nightly'
+                             ')'],
+                 ignore_test_failures=True,
+                 max_run_sec=14400,  # 4 hours.
+                 totalshards=3,
+                 shardindex=0,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.crosbolt-nightly-shard-1 b/server/site_tests/tast/control.crosbolt-nightly-shard-1
new file mode 100644
index 0000000..a98c5d7
--- /dev/null
+++ b/server/site_tests/tast/control.crosbolt-nightly-shard-1
@@ -0,0 +1,41 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.crosbolt-nightly-shard-1'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:crosbolt_perf_nightly'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast crosbolt performance test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based crosbolt performance tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['('
+                             '"group:crosbolt" && '
+                             'crosbolt_nightly'
+                             ')'],
+                 ignore_test_failures=True,
+                 max_run_sec=14400,  # 4 hours.
+                 totalshards=3,
+                 shardindex=1,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.crosbolt-nightly-shard-2 b/server/site_tests/tast/control.crosbolt-nightly-shard-2
new file mode 100644
index 0000000..c5a22ce
--- /dev/null
+++ b/server/site_tests/tast/control.crosbolt-nightly-shard-2
@@ -0,0 +1,41 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.crosbolt-nightly-shard-2'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:crosbolt_perf_nightly'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast crosbolt performance test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based crosbolt performance tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['('
+                             '"group:crosbolt" && '
+                             'crosbolt_nightly'
+                             ')'],
+                 ignore_test_failures=True,
+                 max_run_sec=14400,  # 4 hours.
+                 totalshards=3,
+                 shardindex=2,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.crosbolt-parallels-nightly b/server/site_tests/tast/control.crosbolt-parallels-nightly
index 1ea5880..b86e379 100644
--- a/server/site_tests/tast/control.crosbolt-parallels-nightly
+++ b/server/site_tests/tast/control.crosbolt-parallels-nightly
@@ -6,16 +6,18 @@
 NAME = 'tast.crosbolt-parallels-nightly'
 TIME = 'MEDIUM'
 TEST_TYPE = 'Server'
-# TODO(crbug.com/1154072): Remove from 'suite:crosbolt_perf_nightly' once the
-# parallels suite is scheduled.
-ATTRIBUTES = 'suite:crosbolt_perf_nightly, suite:crosbolt_perf_parallels_nightly'
+ATTRIBUTES = 'suite:crosbolt_perf_parallels_nightly'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
 
 DOC = '''
-Run the Tast crosbolt performance test suite for Parallels.
+Run the Tast crosbolt performance test suite for booted Parallels.
+
+This test may only be run on hardware licensed for specific versions of
+Windows and Office. Contact parallels-cros@google.com for details.
 
 Tast is an integration-testing framework analagous to the test-running portion
 of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
@@ -30,12 +32,12 @@
     job.run_test('tast',
                  host=hosts.create_host(machine),
                  test_exprs=['('
-                             '"group:crosbolt" && '
-                             'crosbolt_nightly && '
-                             '"dep:plugin_vm"'
+                             '"group:parallels_crosbolt" && '
+                             'parallels_crosbolt_nightly'
                              ')'],
                  ignore_test_failures=True,
                  max_run_sec=7200,  # 2 hours.
-                 command_args=args)
+                 command_args=args,
+                 varslist=['pita.windowsLicensed=true'])
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.crosbolt-parallels-perbuild b/server/site_tests/tast/control.crosbolt-parallels-perbuild
index f5c9431..94504fd 100644
--- a/server/site_tests/tast/control.crosbolt-parallels-perbuild
+++ b/server/site_tests/tast/control.crosbolt-parallels-perbuild
@@ -6,16 +6,18 @@
 NAME = 'tast.crosbolt-parallels-perbuild'
 TIME = 'MEDIUM'
 TEST_TYPE = 'Server'
-# TODO(crbug.com/1154072): Remove from 'suite:crosbolt_perf_perbuild' once the
-# parallels suite is scheduled.
-ATTRIBUTES = 'suite:crosbolt_perf_perbuild, suite:crosbolt_perf_parallels_perbuild'
+ATTRIBUTES = 'suite:crosbolt_perf_parallels_perbuild'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
 
 DOC = '''
-Run the Tast crosbolt performance test suite for Parallels.
+Run the Tast crosbolt performance test suite for booted Parallels.
+
+This test may only be run on hardware licensed for specific versions of
+Windows and Office. Contact parallels-cros@google.com for details.
 
 Tast is an integration-testing framework analagous to the test-running portion
 of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
@@ -30,12 +32,12 @@
     job.run_test('tast',
                  host=hosts.create_host(machine),
                  test_exprs=['('
-                             '"group:crosbolt" && '
-                             'crosbolt_perbuild && '
-                             '"dep:plugin_vm"'
+                             '"group:parallels_crosbolt" && '
+                             'parallels_crosbolt_perbuild'
                              ')'],
                  ignore_test_failures=True,
                  max_run_sec=7200,  # 2 hours.
-                 command_args=args)
+                 command_args=args,
+                 varslist=['pita.windowsLicensed=true'])
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.crosbolt-parallels-weekly b/server/site_tests/tast/control.crosbolt-parallels-weekly
index a67d24e..8cf0768 100644
--- a/server/site_tests/tast/control.crosbolt-parallels-weekly
+++ b/server/site_tests/tast/control.crosbolt-parallels-weekly
@@ -6,16 +6,18 @@
 NAME = 'tast.crosbolt-parallels-weekly'
 TIME = 'MEDIUM'
 TEST_TYPE = 'Server'
-# TODO(crbug.com/1154072): Remove from 'suite:crosbolt_perf_weekly' once the
-# parallels suite is scheduled.
-ATTRIBUTES = 'suite:crosbolt_perf_weekly, suite:crosbolt_perf_parallels_weekly'
+ATTRIBUTES = 'suite:crosbolt_perf_parallels_weekly'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
 
 DOC = '''
-Run the Tast crosbolt performance test suite for Parallels.
+Run the Tast crosbolt performance test suite for booted Parallels.
+
+This test may only be run on hardware licensed for specific versions of
+Windows and Office. Contact parallels-cros@google.com for details.
 
 Tast is an integration-testing framework analagous to the test-running portion
 of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
@@ -30,12 +32,12 @@
     job.run_test('tast',
                  host=hosts.create_host(machine),
                  test_exprs=['('
-                             '"group:crosbolt" && '
-                             'crosbolt_weekly && '
-                             '"dep:plugin_vm"'
+                             '"group:parallels_crosbolt" && '
+                             'parallels_crosbolt_weekly'
                              ')'],
                  ignore_test_failures=True,
                  max_run_sec=7200,  # 2 hours.
-                 command_args=args)
+                 command_args=args,
+                 varslist=['pita.windowsLicensed=true'])
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.crosbolt-perbuild b/server/site_tests/tast/control.crosbolt-perbuild
deleted file mode 100644
index 1243b41..0000000
--- a/server/site_tests/tast/control.crosbolt-perbuild
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'Chromium OS team'
-NAME = 'tast.crosbolt-perbuild'
-TIME = 'MEDIUM'
-TEST_TYPE = 'Server'
-ATTRIBUTES = 'suite:crosbolt_perf_perbuild'
-MAX_RESULT_SIZE_KB = 1024 * 1024
-
-# tast.py uses binaries installed from autotest_server_package.tar.bz2.
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast crosbolt performance test suite.
-
-Tast is an integration-testing framework analagous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-This test runs Tast-based crosbolt performance tests against a remote DUT.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-
-def run(machine):
-    job.run_test('tast',
-                 host=hosts.create_host(machine),
-                 test_exprs=['('
-                             '"group:crosbolt" && '
-                             'crosbolt_perbuild && '
-                             '!"dep:android*" && '
-                             '!"dep:plugin_vm"'
-                             ')'],
-                 ignore_test_failures=True,
-                 max_run_sec=21600,  # 6 hours.
-                 command_args=args)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.crosbolt-perbuild-android b/server/site_tests/tast/control.crosbolt-perbuild-android
deleted file mode 100644
index e8a5ead..0000000
--- a/server/site_tests/tast/control.crosbolt-perbuild-android
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'Chromium OS team'
-NAME = 'tast.crosbolt-perbuild-android'
-TIME = 'MEDIUM'
-TEST_TYPE = 'Server'
-ATTRIBUTES = 'suite:crosbolt_perf_perbuild'
-MAX_RESULT_SIZE_KB = 1024 * 1024
-
-# tast.py uses binaries installed from autotest_server_package.tar.bz2.
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast crosbolt performance test suite.
-
-Tast is an integration-testing framework analagous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-This test runs Tast-based crosbolt performance tests against a remote DUT.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-
-def run(machine):
-    job.run_test('tast',
-                 host=hosts.create_host(machine),
-                 test_exprs=['('
-                             '"group:crosbolt" && '
-                             'crosbolt_perbuild && '
-                             '"dep:android*" && '
-                             '!"dep:plugin_vm"'
-                             ')'],
-                 ignore_test_failures=True,
-                 max_run_sec=21600,  # 6 hours.
-                 command_args=args)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.crosbolt-perbuild-android-shard-0 b/server/site_tests/tast/control.crosbolt-perbuild-android-shard-0
new file mode 100644
index 0000000..3f9ed05
--- /dev/null
+++ b/server/site_tests/tast/control.crosbolt-perbuild-android-shard-0
@@ -0,0 +1,42 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.crosbolt-perbuild-android-shard-0'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:crosbolt_perf_perbuild'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast crosbolt performance test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based crosbolt performance tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['('
+                             '"group:crosbolt" && '
+                             'crosbolt_perbuild && '
+                             '"dep:android*"'
+                             ')'],
+                 ignore_test_failures=True,
+                 max_run_sec=21600,  # 6 hours.
+                 totalshards=3,
+                 shardindex=0,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.crosbolt-perbuild-android-shard-1 b/server/site_tests/tast/control.crosbolt-perbuild-android-shard-1
new file mode 100644
index 0000000..aa23ace
--- /dev/null
+++ b/server/site_tests/tast/control.crosbolt-perbuild-android-shard-1
@@ -0,0 +1,42 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.crosbolt-perbuild-android-shard-1'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:crosbolt_perf_perbuild'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast crosbolt performance test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based crosbolt performance tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['('
+                             '"group:crosbolt" && '
+                             'crosbolt_perbuild && '
+                             '"dep:android*"'
+                             ')'],
+                 ignore_test_failures=True,
+                 max_run_sec=21600,  # 6 hours.
+                 totalshards=3,
+                 shardindex=1,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.crosbolt-perbuild-android-shard-2 b/server/site_tests/tast/control.crosbolt-perbuild-android-shard-2
new file mode 100644
index 0000000..1b777ca
--- /dev/null
+++ b/server/site_tests/tast/control.crosbolt-perbuild-android-shard-2
@@ -0,0 +1,42 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.crosbolt-perbuild-android-shard-2'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:crosbolt_perf_perbuild'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast crosbolt performance test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based crosbolt performance tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['('
+                             '"group:crosbolt" && '
+                             'crosbolt_perbuild && '
+                             '"dep:android*"'
+                             ')'],
+                 ignore_test_failures=True,
+                 max_run_sec=21600,  # 6 hours.
+                 totalshards=3,
+                 shardindex=2,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.crosbolt-perbuild-shard-0 b/server/site_tests/tast/control.crosbolt-perbuild-shard-0
new file mode 100644
index 0000000..9a1b6e0
--- /dev/null
+++ b/server/site_tests/tast/control.crosbolt-perbuild-shard-0
@@ -0,0 +1,42 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.crosbolt-perbuild-shard-0'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:crosbolt_perf_perbuild'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast crosbolt performance test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based crosbolt performance tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['('
+                             '"group:crosbolt" && '
+                             'crosbolt_perbuild && '
+                             '!"dep:android*"'
+                             ')'],
+                 ignore_test_failures=True,
+                 max_run_sec=21600,  # 6 hours.
+                 totalshards=5,
+                 shardindex=0,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.crosbolt-perbuild-shard-1 b/server/site_tests/tast/control.crosbolt-perbuild-shard-1
new file mode 100644
index 0000000..7e9701d
--- /dev/null
+++ b/server/site_tests/tast/control.crosbolt-perbuild-shard-1
@@ -0,0 +1,42 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.crosbolt-perbuild-shard-1'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:crosbolt_perf_perbuild'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast crosbolt performance test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based crosbolt performance tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['('
+                             '"group:crosbolt" && '
+                             'crosbolt_perbuild && '
+                             '!"dep:android*"'
+                             ')'],
+                 ignore_test_failures=True,
+                 max_run_sec=21600,  # 6 hours.
+                 totalshards=5,
+                 shardindex=1,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.crosbolt-perbuild-shard-2 b/server/site_tests/tast/control.crosbolt-perbuild-shard-2
new file mode 100644
index 0000000..b18fcd6
--- /dev/null
+++ b/server/site_tests/tast/control.crosbolt-perbuild-shard-2
@@ -0,0 +1,42 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.crosbolt-perbuild-shard-2'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:crosbolt_perf_perbuild'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast crosbolt performance test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based crosbolt performance tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['('
+                             '"group:crosbolt" && '
+                             'crosbolt_perbuild && '
+                             '!"dep:android*"'
+                             ')'],
+                 ignore_test_failures=True,
+                 max_run_sec=21600,  # 6 hours.
+                 totalshards=5,
+                 shardindex=2,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.crosbolt-perbuild-shard-3 b/server/site_tests/tast/control.crosbolt-perbuild-shard-3
new file mode 100644
index 0000000..eb055e5
--- /dev/null
+++ b/server/site_tests/tast/control.crosbolt-perbuild-shard-3
@@ -0,0 +1,42 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.crosbolt-perbuild-shard-3'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:crosbolt_perf_perbuild'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast crosbolt performance test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based crosbolt performance tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['('
+                             '"group:crosbolt" && '
+                             'crosbolt_perbuild && '
+                             '!"dep:android*"'
+                             ')'],
+                 ignore_test_failures=True,
+                 max_run_sec=21600,  # 6 hours.
+                 totalshards=5,
+                 shardindex=3,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.crosbolt-perbuild-shard-4 b/server/site_tests/tast/control.crosbolt-perbuild-shard-4
new file mode 100644
index 0000000..cbc8b88
--- /dev/null
+++ b/server/site_tests/tast/control.crosbolt-perbuild-shard-4
@@ -0,0 +1,42 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.crosbolt-perbuild-shard-4'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:crosbolt_perf_perbuild'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast crosbolt performance test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based crosbolt performance tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['('
+                             '"group:crosbolt" && '
+                             'crosbolt_perbuild && '
+                             '!"dep:android*"'
+                             ')'],
+                 ignore_test_failures=True,
+                 max_run_sec=21600,  # 6 hours.
+                 totalshards=5,
+                 shardindex=4,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.crosbolt-weekly b/server/site_tests/tast/control.crosbolt-weekly
index 0e8ad70..3a31bf8 100644
--- a/server/site_tests/tast/control.crosbolt-weekly
+++ b/server/site_tests/tast/control.crosbolt-weekly
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:crosbolt_perf_weekly'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
@@ -29,8 +30,7 @@
                  host=hosts.create_host(machine),
                  test_exprs=['('
                              '"group:crosbolt" && '
-                             'crosbolt_weekly && '
-                             '!"dep:plugin_vm"'
+                             'crosbolt_weekly'
                              ')'],
                  ignore_test_failures=True, max_run_sec=10800,
                  command_args=args)
diff --git a/server/site_tests/tast/control.cross-device b/server/site_tests/tast/control.cross-device
new file mode 100644
index 0000000..fec18c1
--- /dev/null
+++ b/server/site_tests/tast/control.cross-device
@@ -0,0 +1,43 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'ChromeOS SW Engprod Team (chromeos-sw-engprod@google.com)'
+NAME = 'tast.cross-device'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cross-device'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''Run the Tast Cross Device test suite.'''
+
+from autotest_lib.server.cros.crossdevice import cross_device_util
+
+def run(machine):
+    # Wifi details that chromebook will connect to.
+    ssid = 'nearbysharing_1'
+    password = 'password'
+
+    # Get host objects for each device.
+    host = hosts.create_host(machine)
+    companions = hosts.create_companion_hosts(companion_hosts)
+    phone = companions[0]
+
+    # Configure devices for crossdevice tests.
+    cross_device_util.connect_to_wifi(host, ssid, password)
+    ip_address = phone.setup_for_cross_device_tests()
+
+    # Pass the phones adb-over-tcp "serial" (e.g 192.168.0.30:5555) to Tast as a global var.
+    ip_address_arg = 'crossdevice.PhoneIP=%s:5555' % ip_address
+
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:cross-device")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args,
+                 varslist=[ip_address_arg])
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cross-device-chrome-from-tls b/server/site_tests/tast/control.cross-device-chrome-from-tls
new file mode 100644
index 0000000..909646e
--- /dev/null
+++ b/server/site_tests/tast/control.cross-device-chrome-from-tls
@@ -0,0 +1,90 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from autotest_lib.client.common_lib.error import TestFail
+from autotest_lib.server.cros import chrome_sideloader
+from autotest_lib.server import utils
+
+AUTHOR = 'ChromeOS SW Engprod Team (chromeos-sw-engprod@google.com)'
+NAME = 'tast.cross-device-chrome-from-tls'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+# This mount point controls the version of chrome to use
+CHROME_MOUNT_POINT = '/opt/google/chrome'
+# Location to put the sideloaded chrome artifacts
+CHROME_DIR = '/usr/local/chrome'
+
+DOC = '''
+Runs Cross Device scenarios with a custom chrome binary.
+
+Cross Device scenarios require two chromebooks.
+This control file is a generic wrapper for running cross device tests
+from Chromium builders and Chromium Skylab recipe.
+Chromium builders create chrome binaries and artifacts and upload to GCS.
+The chrome binaries are in turn provisioned to both DUTs through TLS and
+are used in tests.
+
+This control file expects tast_expr or tast_expr_b64 argument to determine
+the set of tast tests to be executed.
+
+Example for tast_expr: test_that --args="tast_expr=nearbyshare.SmokeMultiDUTUI"
+
+Example for tast_expr_b64:
+  In Python:
+    tast_expr = '("group:nearby-share-cq")'
+    # Yields 'KCJ1aS5DaHJvbWVTZXJ2aWNlR1JQQy5kZWZhdWx0X2Zha2VfbG9naW4iKQ=='
+    tast_expr_b64 = base64.b64encode(s.encode('utf-8')).decode('ascii')
+  Then in Autotest CLI:'
+    test_that --args="tast_expr_b64=KCJ1aS5DaHJvbWVTZXJ2aWNlR1JQQy5kZWZhdWx0X2Zha2VfbG9naW4iKQ=="
+
+More details at go/lacros-on-skylab.
+
+'''
+
+
+def run(machine):
+    tast_expr = chrome_sideloader.get_tast_expr(utils.args_to_dict(args))
+
+    primary_dut = hosts.create_host(machine)
+    companions = hosts.create_companion_hosts(companion_hosts)
+
+    if not companions:
+        raise TestFail('Missing companion hosts')
+
+    secondary_dut = companions[0]
+
+    logging.info("Running %s on primary_dut: %s and companion_host:%s with Tast expression:%s",
+                 NAME, primary_dut, secondary_dut, tast_expr)
+
+    # Setup both DUTs to use the chrome binary from TLS
+    for host in [primary_dut, secondary_dut]:
+        chrome_sideloader.setup_host(
+            host, CHROME_DIR, CHROME_MOUNT_POINT)
+
+    # Register a clean up callback to reset the chrome mount.
+    def cleanup():
+        for host in [primary_dut, secondary_dut]:
+            chrome_sideloader.cleanup_host(
+                host, CHROME_DIR, CHROME_MOUNT_POINT)
+    job.add_post_run_hook(cleanup)
+
+    job.run_test('tast',
+                 host=primary_dut,
+                 test_exprs=[tast_expr],
+                 download_data_lazily=False,
+                 ignore_test_failures=False,
+                 max_run_sec=3600,
+                 companion_duts={'cd1': secondary_dut},
+                 command_args=args
+                 )
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cross-device.local b/server/site_tests/tast/control.cross-device.local
new file mode 100644
index 0000000..73b8ff7
--- /dev/null
+++ b/server/site_tests/tast/control.cross-device.local
@@ -0,0 +1,61 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'ChromeOS SW Engprod Team (chromeos-sw-engprod@google.com)'
+NAME = 'tast.cross-device.local'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''Run the Tast Cross Device test suite with Skylab's Android support locally.
+
+This control file will setup the chromebook and android phone for you if you want to run locally.
+Locally means kicking off via test_that to run against either:
+1. One of the scheduling units / RF boxes in the lab.
+2. At your desk when using the chromebook as a fake labstation.
+You can use #2 to test the e2e adb-over-wifi flow without having a labstation at home.
+NOTE: Labstations store their adb keys at /var/lib/android_keys (and this gets wiped during login) so you need to click accept manually for the setups adb connection.
+
+You need to specify the wifi network details below that the chromebook and phone should be on together.
+
+These args are expected to be passed to test_that:
+--args="phone_station=$PHONE_HOST android_serial=$ANDROID_SERIAL_NUMBER"
+
+When using port forwarding to locahost, the expected args are:
+--args="phone_station=locahost android_station_ssh_port=$FORWARDED_PORT android_serial=$ANDROID_SERIAL_NUMBER"
+
+'''
+
+from autotest_lib.server import utils
+from autotest_lib.server.cros.crossdevice import cross_device_util
+
+def run(machine):
+    # Wifi details that chromebook will connect to.
+    ssid = '<SET NETWORK NAME>'
+    password = '<SET PASSWORD>'
+
+    # Get host objects for each device.
+    host = hosts.create_host(machine)
+    args_dict = utils.args_to_dict(args)
+    android_args = hosts.AndroidHost.get_android_arguments(args_dict)
+    phone = hosts.AndroidHost('local_phone', android_args=android_args)
+
+    # Configure devices for crossdevice tests.
+    cross_device_util.connect_to_wifi(host, ssid, password)
+    ip_address = phone.setup_for_cross_device_tests()
+
+    # Pass the phones adb-over-tcp "serial" (e.g 192.168.0.30:5555) to Tast as a global var.
+    ip_address_arg = 'crossdevice.PhoneIP=%s:5555' % ip_address
+
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:cross-device")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args,
+                 varslist=[ip_address_arg])
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.cuj b/server/site_tests/tast/control.cuj
new file mode 100644
index 0000000..779a61b
--- /dev/null
+++ b/server/site_tests/tast/control.cuj
@@ -0,0 +1,24 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+AUTHOR = 'ChromeOS Performance Metrics Team (chromeos-perfmetrics-eng@google.com)'
+NAME = 'tast.cuj'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:cuj'
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+This suite is used to run CUJ tast tests against a remote DUT.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:cuj")'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+parallel_simple(run, machines)
\ No newline at end of file
diff --git a/server/site_tests/tast/control.dpanel-end2end b/server/site_tests/tast/control.dpanel-end2end
new file mode 100644
index 0000000..816fd6b
--- /dev/null
+++ b/server/site_tests/tast/control.dpanel-end2end
@@ -0,0 +1,31 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.dpanel-end2end'
+TIME = 'LONG'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:dpanel-end2end_per-build'
+MAX_RESULT_SIZE_KB = 50 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run DPanel end2end Tast tests.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This runs group of DPanel end2end tests.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine), max_run_sec=10800,
+                 test_exprs=['("group:dpanel-end2end")'], command_args=args)
+
+parallel_simple(run, machines)
\ No newline at end of file
diff --git a/server/site_tests/tast/control.drivefs-cq b/server/site_tests/tast/control.drivefs-cq
new file mode 100644
index 0000000..bac2621
--- /dev/null
+++ b/server/site_tests/tast/control.drivefs-cq
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'chromeos-files-syd@google.com'
+NAME = 'tast.drivefs-cq'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:drivefs-cq'
+MAX_RESULT_SIZE_KB = 50 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Runs all the tests that have the tast drivefs-cq attribute. These tests are ran
+when a new DriveFS uprev CL is submitted.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:drivefs-cq")'],
+                 ignore_test_failures=False, max_run_sec=3600,
+                 command_args=args,
+                 clear_tpm=True,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.dutserver b/server/site_tests/tast/control.dutserver
new file mode 100644
index 0000000..a25ec09
--- /dev/null
+++ b/server/site_tests/tast/control.dutserver
@@ -0,0 +1,49 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# This tests will only run in the CFT/F20 environment.
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib import utils
+from autotest_lib.server.site_tests.tast import tast
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.dutserver'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+# This test belongs to no suite; it is intended mainly for manual invocation
+# via test_that.
+ATTRIBUTES = ''
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+DOC = '''
+Run arbitrary Tast tests.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs arbitary Tast-based tests specified by args given to test_that.
+This test might be useful on debugging to simulate Tast test runs invoked via
+Autotest.
+
+Examples:
+    test_that --dut_servers=${DUTSERVER} ${DUT} tast.dutserver
+'''
+
+command_args, varslist = tast.split_arguments(args)
+
+def run(machine):
+    args_dict = utils.args_to_dict(command_args)
+    varslist.append('servers.dut=:%s' % dut_servers[0])
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 ignore_test_failures=False, max_run_sec=3600,
+		         test_exprs=['meta.LocalPass'],
+                 command_args=command_args,
+                 varslist=varslist,
+                 is_cft=is_cft)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.enrollment b/server/site_tests/tast/control.enrollment
index 095a5e8..9d22fd4 100644
--- a/server/site_tests/tast/control.enrollment
+++ b/server/site_tests/tast/control.enrollment
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:enrollment_per-build'
 MAX_RESULT_SIZE_KB = 50 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
diff --git a/server/site_tests/tast/control.filterfile b/server/site_tests/tast/control.filterfile
new file mode 100644
index 0000000..ee8f43f
--- /dev/null
+++ b/server/site_tests/tast/control.filterfile
@@ -0,0 +1,39 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.server.site_tests.tast import tast
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.testfilterfile'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Serve as an example of how to use testfilterfile flag for tast.
+
+Examples:
+    test_that --args=test_filter_files=filter1.txt,filter2.txt ${DUT} tast.testfilterfile
+'''
+
+command_args, varslist = tast.split_arguments(args)
+
+def run(machine):
+    args_dict = utils.args_to_dict(command_args)
+    test_filter_files = []
+    if 'test_filter_files' in args_dict:
+        test_filter_files = args_dict['test_filter_files'].split(',')
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:meta")'],
+                 ignore_test_failures=False, max_run_sec=3600,
+		 test_filter_files=test_filter_files,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.fingerprint-cq b/server/site_tests/tast/control.fingerprint-cq
new file mode 100644
index 0000000..5587399
--- /dev/null
+++ b/server/site_tests/tast/control.fingerprint-cq
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.fingerprint-cq'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:fingerprint-cq'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast tests which run in suite:fingerprint-cq.
+"group:fingerprint-cq" indicates fingerprint tests that run in CQ (critical).
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['("group:fingerprint-cq")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args,
+                 retries=2)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.fingerprint-mcu b/server/site_tests/tast/control.fingerprint-mcu
deleted file mode 100644
index d181965..0000000
--- a/server/site_tests/tast/control.fingerprint-mcu
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'Chromium OS team'
-NAME = 'tast.fingerprint-mcu'
-TIME = 'SHORT'
-TEST_TYPE = 'Server'
-ATTRIBUTES = 'suite:fingerprint-mcu'
-MAX_RESULT_SIZE_KB = 1024 * 1024
-
-DOC = '''
-Run the Tast tests which run in suite:fingerprint-mcu.
-
-"group:fingerprint-mcu" indicates tests that run on standalone fingerprint MCU
-boards.
-
-Tast is an integration-testing framework analagous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-
-def run(machine):
-    job.run_test('tast',
-                 host=hosts.create_host(machine),
-                 test_exprs=['("group:fingerprint-mcu")'],
-                 ignore_test_failures=True, max_run_sec=10800,
-                 command_args=args)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.fingerprint-mcu-dragonclaw b/server/site_tests/tast/control.fingerprint-mcu-dragonclaw
new file mode 100644
index 0000000..f5b4796
--- /dev/null
+++ b/server/site_tests/tast/control.fingerprint-mcu-dragonclaw
@@ -0,0 +1,36 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.fingerprint-mcu-dragonclaw'
+TIME = 'SHORT'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:fingerprint-mcu-dragonclaw'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast tests which run in suite:fingerprint-mcu-dragonclaw.
+
+"group:fingerprint-mcu-dragonclaw" indicates tests that run on standalone Dragonclaw
+boards.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['('
+                             '"group:fingerprint-mcu" && '
+                             '"fingerprint-mcu_dragonclaw"'
+                             ')'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.firmware-Ti50Demo b/server/site_tests/tast/control.firmware-Ti50Demo
new file mode 100644
index 0000000..99c6d5e
--- /dev/null
+++ b/server/site_tests/tast/control.firmware-Ti50Demo
@@ -0,0 +1,63 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.server.hosts import gsc_devboard_host
+
+AUTHOR = 'Chromium OS BaseOS Engprod'
+NAME = 'tast.firmware-Ti50Demo'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+MAX_RESULT_SIZE_KB = 50 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run firmware.Ti50Demo test.
+
+Args:
+    ti50_build: ti50 build location -> gs://chromeos-releases/firmware-ti50-postsubmit/<build>
+    service_debugger_serial: serial of debugger connected to devboard, defaults to the first one found on the container
+    service_ip: devboard service ip, default is to start a new container
+    service_port: devboard service port, defaults to 39999
+
+Examples:
+
+    # To run test on a devboard connected to workstation:
+    #   Have setup SatLab gcr access according to official instructions.
+    #   docker installed outside of chroot
+    #   inside chroot, run src/third_party/autotest/files/utils/install_docker_chroot.sh
+    #   devboard connected to workstation, fake dut ssh fowarded on localhost:2222
+    #   outside chroot, start dockerd on local tcp port: dockerd -H tcp://127.0.0.1:2375
+    test_that --autotest_dir <path/to/autotest/files> --fast --args "ti50_build=gs://chromeos-releases/firmware-ti50-postsubmit/<build> service_debugger_serial=<serial>"  localhost:2222 tast.firmware-Ti50Demo
+
+    # To run test on a devboard connected to SatLab:
+    #   Have setup complete SatLab according to official instructions.
+    #   inside chroot, run src/third_party/autotest/files/utils/install_docker_chroot.sh
+    #   devboard connected to SatLab, fake dut ssh fowarded on localhost:2222
+    #   outside chroot, forward SatLab docker host: ssh -L 2377:192.168.231.1:2375 -N <mysatlab>
+    #   start service manually on SatLab, note the printed service ip and port: DOCKER_HOST=127.0.0.1:2377 python -m utils.start_gsc_devboard_host
+    #   outside chroot, forward service port: ssh -L 39999:<service ip>:<service port> -N <mysatlab>
+    test_that --autotest_dir <path/to/autotest/files> --fast --args "ti50_build=gs://chromeos-releases/firmware-ti50-postsubmit/<build> service_ip=<service ip> service_port=<service port>" localhost:2222 tast.firmware-Ti50Demo
+'''
+
+def run(machine):
+    args_dict = utils.args_to_dict(args)
+    devboard = hosts.create_host(machine, host_class=gsc_devboard_host.GSCDevboardHost, **args_dict)
+
+    with devboard.service_context() as service_ep:
+        varslist = ['devboardsvc=' + service_ep]
+        buildurl = args_dict.get('ti50_build')
+        if buildurl is not None:
+            varslist += ['buildurl=' + buildurl]
+        job.run_test('tast',
+                     host=hosts.create_host(machine),
+                     max_run_sec=10800,
+                     test_exprs=['firmware.Ti50Demo'],
+                     varslist=varslist,
+                     command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.firmware-Ti50SystemTestImage b/server/site_tests/tast/control.firmware-Ti50SystemTestImage
new file mode 100644
index 0000000..e7bc08a
--- /dev/null
+++ b/server/site_tests/tast/control.firmware-Ti50SystemTestImage
@@ -0,0 +1,63 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.server.hosts import gsc_devboard_host
+
+AUTHOR = 'Chromium OS BaseOS Engprod'
+NAME = 'tast.firmware-Ti50SystemTestImage'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+MAX_RESULT_SIZE_KB = 50 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run firmware.Ti50SystemTestImage test.
+
+Args:
+    ti50_build: ti50 build location -> gs://chromeos-releases/firmware-ti50-postsubmit/<build>
+    service_debugger_serial: serial of debugger connected to devboard, defaults to the first one found on the container
+    service_ip: devboard service ip, default is to start a new container
+    service_port: devboard service port, defaults to 39999
+
+Examples:
+
+    # To run test on a devboard connected to workstation:
+    #   Have setup SatLab gcr access according to official instructions.
+    #   docker installed outside of chroot
+    #   inside chroot, run src/third_party/autotest/files/utils/install_docker_chroot.sh
+    #   devboard connected to workstation, fake dut ssh fowarded on localhost:2222
+    #   outside chroot, start dockerd on local tcp port: dockerd -H tcp://127.0.0.1:2375
+    test_that --autotest_dir <path/to/autotest/files> --fast --args "ti50_build=gs://chromeos-releases/firmware-ti50-postsubmit/<build> service_debugger_serial=<serial>"  localhost:2222 tast.firmware-Ti50SystemTestImage
+
+    # To run test on a devboard connected to SatLab:
+    #   Have setup complete SatLab according to official instructions.
+    #   inside chroot, run src/third_party/autotest/files/utils/install_docker_chroot.sh
+    #   devboard connected to SatLab, fake dut ssh fowarded on localhost:2222
+    #   outside chroot, forward SatLab docker host: ssh -L 2377:192.168.231.1:2375 -N <mysatlab>
+    #   start service manually on SatLab, note the printed service ip and port: DOCKER_HOST=127.0.0.1:2377 python -m utils.start_gsc_devboard_host
+    #   outside chroot, forward service port: ssh -L 39999:<service ip>:<service port> -N <mysatlab>
+    test_that --autotest_dir <path/to/autotest/files> --fast --args "ti50_build=gs://chromeos-releases/firmware-ti50-postsubmit/<build> service_ip=<service ip> service_port=<service port>" localhost:2222 tast.firmware-Ti50SystemTestImage
+'''
+
+def run(machine):
+    args_dict = utils.args_to_dict(args)
+    devboard = hosts.create_host(machine, host_class=gsc_devboard_host.GSCDevboardHost, **args_dict)
+
+    with devboard.service_context() as service_ep:
+        varslist = ['devboardsvc=' + service_ep]
+        buildurl = args_dict.get('ti50_build')
+        if buildurl is not None:
+            varslist += ['buildurl=' + buildurl]
+        job.run_test('tast',
+                     host=hosts.create_host(machine),
+                     max_run_sec=10800,
+                     test_exprs=['firmware.Ti50SystemTestImage'],
+                     varslist=varslist,
+                     command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.firmware-bios b/server/site_tests/tast/control.firmware-bios
new file mode 100644
index 0000000..de29e7a
--- /dev/null
+++ b/server/site_tests/tast/control.firmware-bios
@@ -0,0 +1,41 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.server.site_tests.tast import tast
+
+AUTHOR = 'Chromium OS Firmware EngProd team'
+NAME = 'tast.firmware-bios'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:faft_bios, suite:faft_bios_ro_qual, suite:faft_bios_rw_qual, suite:faft_bios_tot'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+JOB_RETRIES = 0
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run Tast tests for EC firmware.
+
+The tests are part of 'group:firmware'. The 'firmware_bios' sub-attribute
+limits it to AP bios tests.
+'''
+
+command_args, varslist = tast.split_arguments(args)
+args_dict = utils.args_to_dict(command_args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['("group:firmware" && firmware_bios)'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=command_args,
+                 varslist=varslist)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.firmware-bios-lvl1 b/server/site_tests/tast/control.firmware-bios-lvl1
new file mode 100644
index 0000000..b579da4
--- /dev/null
+++ b/server/site_tests/tast/control.firmware-bios-lvl1
@@ -0,0 +1,40 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.server.site_tests.tast import tast
+
+AUTHOR = 'Chromium OS Firmware EngProd team'
+NAME = 'tast.firmware-bios-lvl1'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:faft_lv1'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+JOB_RETRIES = 0
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run Tast tests for EC firmware.
+
+The tests are part of 'group:firmware'. The 'firmware_level1' sub-attribute
+limits it to level 1 AP bios tests.
+'''
+
+command_args, varslist = tast.split_arguments(args)
+args_dict = utils.args_to_dict(command_args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['("group:firmware" && firmware_level1)'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=command_args,
+                 varslist=varslist)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.firmware-bios-lvl2 b/server/site_tests/tast/control.firmware-bios-lvl2
new file mode 100644
index 0000000..942ab1a
--- /dev/null
+++ b/server/site_tests/tast/control.firmware-bios-lvl2
@@ -0,0 +1,40 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.server.site_tests.tast import tast
+
+AUTHOR = 'Chromium OS Firmware EngProd team'
+NAME = 'tast.firmware-bios-lvl2'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:faft_lv2'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+JOB_RETRIES = 0
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run Tast tests for EC firmware.
+
+The tests are part of 'group:firmware'. The 'firmware_level2' sub-attribute
+limits it to level 2 AP bios tests.
+'''
+
+command_args, varslist = tast.split_arguments(args)
+args_dict = utils.args_to_dict(command_args)
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['("group:firmware" && firmware_level2)'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=command_args,
+                 varslist=varslist)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.firmware-ccd b/server/site_tests/tast/control.firmware-ccd
new file mode 100644
index 0000000..453d6be
--- /dev/null
+++ b/server/site_tests/tast/control.firmware-ccd
@@ -0,0 +1,42 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.server.site_tests.tast import tast
+
+AUTHOR = 'Chromium OS Firmware EngProd team'
+NAME = 'tast.firmware-ccd'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING, servo_component:ccd_cr50'
+ATTRIBUTES = 'suite:faft_ec, suite:faft_ec_fw_qual'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+JOB_RETRIES = 0
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run Tast tests for EC firmware that require a servo with CCD.
+
+The tests are part of 'group:firmware'. The 'firmware_ec' and
+'firmware_ccd' sub-attributes limits it to tests which require CCD.
+'''
+
+command_args, varslist = tast.split_arguments(args)
+args_dict = utils.args_to_dict(command_args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+assert 'servo_component:ccd_cr50' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['("group:firmware" && firmware_ec && firmware_ccd)'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=command_args,
+                 varslist=varslist)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.firmware-cr50 b/server/site_tests/tast/control.firmware-cr50
index d188579..95ca60d 100644
--- a/server/site_tests/tast/control.firmware-cr50
+++ b/server/site_tests/tast/control.firmware-cr50
@@ -3,14 +3,17 @@
 # found in the LICENSE file.
 
 from autotest_lib.client.common_lib import utils
+from autotest_lib.server.site_tests.tast import tast
 
 AUTHOR = 'Chromium OS team'
 NAME = 'tast.firmware-cr50'
 TIME = 'MEDIUM'
 TEST_TYPE = 'Server'
 DEPENDENCIES = 'servo_state:WORKING'
-ATTRIBUTES = 'suite:faft_cr50_prepvt_tast, suite:faft_cr50_pvt_tast'
+ATTRIBUTES = 'suite:faft_cr50_prepvt, suite:faft_cr50_pvt'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+JOB_RETRIES = 0
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
@@ -21,22 +24,24 @@
 The tests are part of 'group:firmware', and the 'firmware_cr50' sub-attribute
 limits it to those that cover the Google Security Chip.
 
-Tast is an integration-testing framework analagous to the test-running portion
+Tast is an integration-testing framework analogous to the test-running portion
 of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/
 for more information.
 
 See http://go/tast-failures for information about investigating failures.
 '''
 
-args_dict = utils.args_to_dict(args)
+command_args, varslist = tast.split_arguments(args)
+args_dict = utils.args_to_dict(command_args)
 assert 'servo_state:WORKING' in DEPENDENCIES
 servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
 
 def run(machine):
     job.run_test('tast',
                  host=hosts.create_host(machine, servo_args=servo_args),
-                 test_exprs=['("group:firmware" && firmware_cr50)'],
+                 test_exprs=['("group:firmware" && firmware_cr50 && !firmware_ccd)'],
                  ignore_test_failures=True, max_run_sec=10800,
-                 command_args=args)
+                 command_args=command_args,
+                 varslist=varslist)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.firmware-cr50_ccd b/server/site_tests/tast/control.firmware-cr50_ccd
new file mode 100644
index 0000000..89c0299
--- /dev/null
+++ b/server/site_tests/tast/control.firmware-cr50_ccd
@@ -0,0 +1,48 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.server.site_tests.tast import tast
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.firmware-cr50_ccd'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING, servo_component:ccd_cr50'
+ATTRIBUTES = 'suite:faft_cr50_prepvt, suite:faft_cr50_pvt'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+JOB_RETRIES = 0
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast tests for Google Security Chip firmware (Cr50).
+
+The tests are part of 'group:firmware', and the 'firmware_cr50' sub-attribute
+limits it to those that cover the Google Security Chip. Additionally the
+'firmware_ccd' attribute limits this to tests that require CCD.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/
+for more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+command_args, varslist = tast.split_arguments(args)
+args_dict = utils.args_to_dict(command_args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['("group:firmware" && firmware_cr50 && firmware_ccd)'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=command_args,
+                 varslist=varslist)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.firmware-detachable b/server/site_tests/tast/control.firmware-detachable
new file mode 100644
index 0000000..b831e73
--- /dev/null
+++ b/server/site_tests/tast/control.firmware-detachable
@@ -0,0 +1,45 @@
+# Copyright 2022 The ChromiumOS Authors.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.server.site_tests.tast import tast
+
+AUTHOR = 'Chromium OS Firmware EngProd team'
+NAME = 'tast.firmware-detachables'
+TIME = 'LENGTHY'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:faft_detachable'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+JOB_RETRIES = 0
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run Tast tests for EC firmware.
+
+The tests are part of 'group:firmware'. The 'firmware_detachable'
+sub-attribute limits it to detachable tests. Once the test is stable change
+the attribute to one of the others.
+'''
+
+command_args, varslist = tast.split_arguments(args)
+args_dict = utils.args_to_dict(command_args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['("group:firmware" && firmware_detachable)'],
+                 ignore_test_failures=True,
+                 max_run_sec=16200, # 4h30m
+                 command_args=command_args,
+                 varslist=varslist,
+                 ephemeraldevserver='false',
+                 )
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.firmware-ec b/server/site_tests/tast/control.firmware-ec
new file mode 100644
index 0000000..f57cf8b
--- /dev/null
+++ b/server/site_tests/tast/control.firmware-ec
@@ -0,0 +1,41 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.server.site_tests.tast import tast
+
+AUTHOR = 'Chromium OS Firmware EngProd team'
+NAME = 'tast.firmware-ec'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:faft_ec, suite:faft_ec_fw_qual'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+JOB_RETRIES = 0
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run Tast tests for EC firmware.
+
+The tests are part of 'group:firmware'. The 'firmware_ec' sub-attribute
+limits it to ec tests.
+'''
+
+command_args, varslist = tast.split_arguments(args)
+args_dict = utils.args_to_dict(command_args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['("group:firmware" && firmware_ec && !firmware_ccd && !firmware_usb)'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=command_args,
+                 varslist=varslist)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.firmware-experimental b/server/site_tests/tast/control.firmware-experimental
index 1c957c6..d49f537 100644
--- a/server/site_tests/tast/control.firmware-experimental
+++ b/server/site_tests/tast/control.firmware-experimental
@@ -3,6 +3,7 @@
 # found in the LICENSE file.
 
 from autotest_lib.client.common_lib import utils
+from autotest_lib.server.site_tests.tast import tast
 
 AUTHOR = 'Chromium OS Firmware EngProd team'
 NAME = 'tast.firmware-experimental'
@@ -11,6 +12,8 @@
 DEPENDENCIES = 'servo_state:WORKING'
 ATTRIBUTES = 'suite:faft_experimental'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+JOB_RETRIES = 0
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
@@ -28,7 +31,8 @@
 See http://go/tast-failures for information about investigating failures.
 '''
 
-args_dict = utils.args_to_dict(args)
+command_args, varslist = tast.split_arguments(args)
+args_dict = utils.args_to_dict(command_args)
 assert 'servo_state:WORKING' in DEPENDENCIES
 servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
 
@@ -37,6 +41,7 @@
                  host=hosts.create_host(machine, servo_args=servo_args),
                  test_exprs=['("group:firmware" && firmware_experimental)'],
                  ignore_test_failures=True, max_run_sec=10800,
-                 command_args=args)
+                 command_args=command_args,
+                 varslist=varslist)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.firmware-slow b/server/site_tests/tast/control.firmware-slow
new file mode 100644
index 0000000..5a704e7
--- /dev/null
+++ b/server/site_tests/tast/control.firmware-slow
@@ -0,0 +1,42 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.server.site_tests.tast import tast
+
+AUTHOR = 'Chromium OS Firmware EngProd team'
+NAME = 'tast.firmware-slow'
+TIME = 'LENGTHY'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:faft_ec, suite:faft_ec_fw_qual'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+JOB_RETRIES = 0
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run Tast tests that are very slow.
+
+The tests are part of 'group:firmware'. The 'firmware_slow' sub-attribute
+limits it to slow tests.
+'''
+
+command_args, varslist = tast.split_arguments(args)
+args_dict = utils.args_to_dict(command_args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['("group:firmware" && firmware_slow && !firmware_ccd)'],
+                 ignore_test_failures=True,
+                 max_run_sec=16200, # 4h30m
+                 command_args=command_args,
+                 varslist=varslist)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.firmware-slow_ccd b/server/site_tests/tast/control.firmware-slow_ccd
new file mode 100644
index 0000000..14d2001
--- /dev/null
+++ b/server/site_tests/tast/control.firmware-slow_ccd
@@ -0,0 +1,43 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.server.site_tests.tast import tast
+
+AUTHOR = 'Chromium OS Firmware EngProd team'
+NAME = 'tast.firmware-slow_ccd'
+TIME = 'LENGTHY'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING, servo_component:ccd_cr50'
+ATTRIBUTES = 'suite:faft_ec, suite:faft_ec_fw_qual'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+JOB_RETRIES = 0
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run Tast tests that are very slow.
+
+The tests are part of 'group:firmware'. The 'firmware_slow' sub-attribute
+limits it to slow tests. The 'firmware_ccd' further limits this
+to machines attached to a servo with CCD.
+'''
+
+command_args, varslist = tast.split_arguments(args)
+args_dict = utils.args_to_dict(command_args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['("group:firmware" && firmware_slow && firmware_ccd)'],
+                 ignore_test_failures=True,
+                 max_run_sec=16200, # 4h30m
+                 command_args=command_args,
+                 varslist=varslist)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.firmware-smoke b/server/site_tests/tast/control.firmware-smoke
index 7ca8a0c..ebfcc7b 100644
--- a/server/site_tests/tast/control.firmware-smoke
+++ b/server/site_tests/tast/control.firmware-smoke
@@ -3,6 +3,7 @@
 # found in the LICENSE file.
 
 from autotest_lib.client.common_lib import utils
+from autotest_lib.server.site_tests.tast import tast
 
 AUTHOR = 'Chromium OS Firmware EngProd team'
 NAME = 'tast.firmware-smoke'
@@ -11,6 +12,8 @@
 DEPENDENCIES = 'servo_state:WORKING'
 ATTRIBUTES = 'suite:faft_smoke'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+JOB_RETRIES = 0
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
@@ -21,6 +24,9 @@
 The tests are part of 'group:firmware'. The 'firmware_smoke' sub-attribute
 limits it to basic smoke-tests.
 
+Excludes tests with 'firmware_usb' sub-attribute, the firmware-smoke_usb
+control file will run those.
+
 Tast is an integration-testing framework analagous to the test-running portion
 of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/
 for more information.
@@ -28,15 +34,19 @@
 See http://go/tast-failures for information about investigating failures.
 '''
 
-args_dict = utils.args_to_dict(args)
+command_args, varslist = tast.split_arguments(args)
+args_dict = utils.args_to_dict(command_args)
 assert 'servo_state:WORKING' in DEPENDENCIES
 servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
 
 def run(machine):
     job.run_test('tast',
                  host=hosts.create_host(machine, servo_args=servo_args),
-                 test_exprs=['("group:firmware" && firmware_smoke)'],
+                 # TODO(b/188712428): Clean this up when TAST is fully
+                 # supported by test-exec-service
+                 test_exprs=['("group:firmware" && firmware_smoke && !firmware_usb)'],
                  ignore_test_failures=True, max_run_sec=10800,
-                 command_args=args)
+                 command_args=command_args,
+                 varslist=varslist)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.firmware-smoke_usb b/server/site_tests/tast/control.firmware-smoke_usb
new file mode 100644
index 0000000..84453ea
--- /dev/null
+++ b/server/site_tests/tast/control.firmware-smoke_usb
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.server.site_tests.tast import tast
+
+AUTHOR = 'Chromium OS Firmware EngProd team'
+NAME = 'tast.firmware-smoke_usb'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING, servo_usb_state:NORMAL'
+ATTRIBUTES = 'suite:faft_smoke'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+JOB_RETRIES = 0
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run Tast tests for basic firmware functionality.
+
+The tests are part of 'group:firmware'. The 'firmware_smoke' sub-attribute
+limits it to basic smoke-tests. The 'firmware_usb' sub-attribute further
+limits it to tests which require a working usb stick.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/
+for more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+command_args, varslist = tast.split_arguments(args)
+args_dict = utils.args_to_dict(command_args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['("group:firmware" && firmware_smoke && firmware_usb)'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=command_args,
+                 varslist=varslist,
+                 ephemeraldevserver='false')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.firmware-unstable-shard-0 b/server/site_tests/tast/control.firmware-unstable-shard-0
new file mode 100644
index 0000000..f82b8d0
--- /dev/null
+++ b/server/site_tests/tast/control.firmware-unstable-shard-0
@@ -0,0 +1,47 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.server.site_tests.tast import tast
+
+AUTHOR = 'Chromium OS Firmware EngProd team'
+NAME = 'tast.firmware-unstable-shard-0'
+TIME = 'LENGTHY'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:faft_unstable'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+JOB_RETRIES = 0
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run Tast tests for EC firmware.
+
+The tests are part of 'group:firmware'. The 'firmware_unstable'
+sub-attribute limits it to unstable tests. Once the test is stable change
+the attribute to one of the others.
+'''
+
+command_args, varslist = tast.split_arguments(args)
+args_dict = utils.args_to_dict(command_args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['("group:firmware" && firmware_unstable && !firmware_ccd && !firmware_usb)'],
+                 ignore_test_failures=True,
+                 max_run_sec=16200, # 4h30m
+                 command_args=command_args,
+                 varslist=varslist,
+                 ephemeraldevserver='false',
+                 totalshards=3,
+                 shardindex=0,
+                 )
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.firmware-unstable-shard-1 b/server/site_tests/tast/control.firmware-unstable-shard-1
new file mode 100644
index 0000000..4161833
--- /dev/null
+++ b/server/site_tests/tast/control.firmware-unstable-shard-1
@@ -0,0 +1,47 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.server.site_tests.tast import tast
+
+AUTHOR = 'Chromium OS Firmware EngProd team'
+NAME = 'tast.firmware-unstable-shard-1'
+TIME = 'LENGTHY'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:faft_unstable'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+JOB_RETRIES = 0
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run Tast tests for EC firmware.
+
+The tests are part of 'group:firmware'. The 'firmware_unstable'
+sub-attribute limits it to unstable tests. Once the test is stable change
+the attribute to one of the others.
+'''
+
+command_args, varslist = tast.split_arguments(args)
+args_dict = utils.args_to_dict(command_args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['("group:firmware" && firmware_unstable && !firmware_ccd && !firmware_usb)'],
+                 ignore_test_failures=True,
+                 max_run_sec=16200, # 4h30m
+                 command_args=command_args,
+                 varslist=varslist,
+                 ephemeraldevserver='false',
+                 totalshards=3,
+                 shardindex=1,
+                 )
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.firmware-unstable-shard-2 b/server/site_tests/tast/control.firmware-unstable-shard-2
new file mode 100644
index 0000000..8ea2bbd
--- /dev/null
+++ b/server/site_tests/tast/control.firmware-unstable-shard-2
@@ -0,0 +1,47 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.server.site_tests.tast import tast
+
+AUTHOR = 'Chromium OS Firmware EngProd team'
+NAME = 'tast.firmware-unstable-shard-2'
+TIME = 'LENGTHY'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:faft_unstable'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+JOB_RETRIES = 0
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run Tast tests for EC firmware.
+
+The tests are part of 'group:firmware'. The 'firmware_unstable'
+sub-attribute limits it to unstable tests. Once the test is stable change
+the attribute to one of the others.
+'''
+
+command_args, varslist = tast.split_arguments(args)
+args_dict = utils.args_to_dict(command_args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['("group:firmware" && firmware_unstable && !firmware_ccd && !firmware_usb)'],
+                 ignore_test_failures=True,
+                 max_run_sec=16200, # 4h30m
+                 command_args=command_args,
+                 varslist=varslist,
+                 ephemeraldevserver='false',
+                 totalshards=3,
+                 shardindex=2,
+                 )
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.firmware-unstable_ccd b/server/site_tests/tast/control.firmware-unstable_ccd
new file mode 100644
index 0000000..c861d03
--- /dev/null
+++ b/server/site_tests/tast/control.firmware-unstable_ccd
@@ -0,0 +1,45 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.server.site_tests.tast import tast
+
+AUTHOR = 'Chromium OS Firmware EngProd team'
+NAME = 'tast.firmware-unstable_ccd'
+TIME = 'LENGTHY'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING, servo_component:ccd_cr50'
+ATTRIBUTES = 'suite:faft_unstable'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+JOB_RETRIES = 0
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run Tast tests for EC firmware.
+
+The tests are part of 'group:firmware'. The 'firmware_unstable'
+sub-attribute limits it to unstable tests. Once the test is stable change
+the attribute to one of the others. The 'firmware_ccd' further limits this
+to machines attached to a servo with CCD.
+'''
+
+command_args, varslist = tast.split_arguments(args)
+args_dict = utils.args_to_dict(command_args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['("group:firmware" && firmware_unstable && firmware_ccd)'],
+                 ignore_test_failures=True,
+                 max_run_sec=16200, # 4h30m
+                 command_args=command_args,
+                 varslist=varslist,
+                 ephemeraldevserver='false')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.firmware-unstable_usb b/server/site_tests/tast/control.firmware-unstable_usb
new file mode 100644
index 0000000..777479a
--- /dev/null
+++ b/server/site_tests/tast/control.firmware-unstable_usb
@@ -0,0 +1,45 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.server.site_tests.tast import tast
+
+AUTHOR = 'Chromium OS Firmware EngProd team'
+NAME = 'tast.firmware-unstable_usb'
+TIME = 'LENGTHY'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING, servo_usb_state:NORMAL'
+ATTRIBUTES = 'suite:faft_unstable'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+JOB_RETRIES = 0
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run Tast tests for EC firmware.
+
+The tests are part of 'group:firmware'. The 'firmware_unstable'
+sub-attribute limits it to unstable tests. Once the test is stable change
+the attribute to one of the others. The 'firmware_usb' further limits this
+to machines with working usb sticks.
+'''
+
+command_args, varslist = tast.split_arguments(args)
+args_dict = utils.args_to_dict(command_args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['("group:firmware" && firmware_unstable && firmware_usb)'],
+                 ignore_test_failures=True,
+                 max_run_sec=16200, # 4h30m
+                 command_args=command_args,
+                 varslist=varslist,
+                 ephemeraldevserver='false')
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.firmware-usb b/server/site_tests/tast/control.firmware-usb
new file mode 100644
index 0000000..9189d77
--- /dev/null
+++ b/server/site_tests/tast/control.firmware-usb
@@ -0,0 +1,41 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+from autotest_lib.server.site_tests.tast import tast
+
+AUTHOR = 'Chromium OS Firmware EngProd team'
+NAME = 'tast.firmware-usb'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING, servo_usb_state:NORMAL'
+ATTRIBUTES = 'suite:faft_ec, suite:faft_ec_fw_qual'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+JOB_RETRIES = 0
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run Tast tests for EC firmware that require a working usb stick.
+
+The tests are part of 'group:firmware'. The 'firmware_ec' and
+'firmware_usb' sub-attributes limits it to tests which require USB.
+'''
+
+command_args, varslist = tast.split_arguments(args)
+args_dict = utils.args_to_dict(command_args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['("group:firmware" && firmware_ec && firmware_usb)'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=command_args,
+                 varslist=varslist)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.generic b/server/site_tests/tast/control.generic
index 935f962..8e0b9e1 100644
--- a/server/site_tests/tast/control.generic
+++ b/server/site_tests/tast/control.generic
@@ -4,6 +4,7 @@
 
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib import utils
+from autotest_lib.server.site_tests.tast import tast
 
 AUTHOR = 'Chromium OS team'
 NAME = 'tast.generic'
@@ -13,6 +14,7 @@
 # via test_that.
 ATTRIBUTES = ''
 MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
@@ -31,10 +33,13 @@
 Examples:
     test_that --args=tast_expr=example.Pass ${DUT} tast.generic
     test_that --args=tast_expr='("group:mainline")' ${DUT} tast.generic
+    test_that --args='tast_expr=example.RuntimeVars tast.example.strvar="Hello_World"' ${DUT} tast.generic
 '''
 
+command_args, varslist = tast.split_arguments(args)
+
 def run(machine):
-    args_dict = utils.args_to_dict(args)
+    args_dict = utils.args_to_dict(command_args)
     try:
         expr = args_dict['tast_expr']
     except KeyError:
@@ -44,6 +49,8 @@
                  host=hosts.create_host(machine),
                  test_exprs=[expr],
                  ignore_test_failures=False, max_run_sec=3600,
-                 command_args=args)
+                 command_args=command_args,
+                 varslist=varslist,
+                 is_cft=is_cft)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.generic-list b/server/site_tests/tast/control.generic-list
new file mode 100644
index 0000000..3ea0a2e
--- /dev/null
+++ b/server/site_tests/tast/control.generic-list
@@ -0,0 +1,57 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import error
+from autotest_lib.client.common_lib import utils
+from autotest_lib.server.site_tests.tast import tast
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.generic-list'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+# This test belongs to no suite; it is intended mainly for manual invocation
+# via test_that.
+ATTRIBUTES = ''
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run arbitrary Tast tests in a comma seperated list
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs arbitary Tast-based tests specified by args given to test_that.
+This test might be useful on debugging to simulate Tast test runs invoked via
+Autotest.  Tests run with this wrapper will report skipped tests using the 
+autotest TEST NA status.
+
+Examples:
+    test_that --args=tast_list=tast.test_1,tast.test_2 ${DUT} tast.generic_list
+'''
+
+command_args, varslist = tast.split_arguments(args)
+
+def run(machine):
+    args_dict = utils.args_to_dict(command_args)
+    try:
+        tast_list = args_dict['tast_list']
+    except KeyError:
+        raise error.TestFail(
+            'Attribute expression is unspecified; set --args=tast_list=...')
+    expr = tast_list.split(',')
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=expr,
+                 ignore_test_failures=True, max_run_sec=3600,
+                 command_args=command_args,
+                 varslist=varslist,
+                 exclude_missing=True,
+                 report_skipped=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.generic-servo b/server/site_tests/tast/control.generic-servo
index 25a0df5..a33e59c 100644
--- a/server/site_tests/tast/control.generic-servo
+++ b/server/site_tests/tast/control.generic-servo
@@ -14,6 +14,7 @@
 # via test_that.
 ATTRIBUTES = ''
 MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
diff --git a/server/site_tests/tast/control.graphics-drm-nightly b/server/site_tests/tast/control.graphics-drm-nightly
new file mode 100644
index 0000000..14a79fc
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-drm-nightly
@@ -0,0 +1,40 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS Display team'
+NAME = 'tast.graphics-drm-nightly'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-day'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-drm test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs drm tests against a remote DUT.
+Separate control files are needed because Tast doesn't isolate tests from each
+other and a clean device state is needed by rebooting before running a
+different graphics suite.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_nightly && graphics_drm)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-drm-perbuild b/server/site_tests/tast/control.graphics-drm-perbuild
new file mode 100644
index 0000000..19a17ec
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-drm-perbuild
@@ -0,0 +1,40 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS Display team'
+NAME = 'tast.graphics-drm-per-build'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-build'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-drm test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs drm tests against a remote DUT.
+Separate control files are needed because Tast doesn't isolate tests from each
+other and a clean device state is needed by rebooting before running a
+different graphics suite.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_perbuild && graphics_drm)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-drm-weekly b/server/site_tests/tast/control.graphics-drm-weekly
new file mode 100644
index 0000000..3dd1125
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-drm-weekly
@@ -0,0 +1,40 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS Display team'
+NAME = 'tast.graphics-drm-weekly'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-week'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-drm test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs drm tests against a remote DUT.
+Separate control files are needed because Tast doesn't isolate tests from each
+other and a clean device state is needed by rebooting before running a
+different graphics suite.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_weekly && graphics_drm)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-igt-nightly b/server/site_tests/tast/control.graphics-igt-nightly
new file mode 100644
index 0000000..d72f97a
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-igt-nightly
@@ -0,0 +1,40 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS Display team'
+NAME = 'tast.graphics-igt-nightly'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-day'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-igt test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs igt-gpu-tools against a remote DUT.
+Separate control files are needed because Tast doesn't isolate tests from each
+other and a clean device state is needed by rebooting before running a
+different graphics suite.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_nightly && graphics_igt)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-igt-perbuild b/server/site_tests/tast/control.graphics-igt-perbuild
new file mode 100644
index 0000000..4b9f795
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-igt-perbuild
@@ -0,0 +1,40 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS Display team'
+NAME = 'tast.graphics-igt-perbuild'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-build'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-igt test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs igt-gpu-tools against a remote DUT.
+Separate control files are needed because Tast doesn't isolate tests from each
+other and a clean device state is needed by rebooting before running a
+different graphics suite.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_perbuild && graphics_igt)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-igt-weekly b/server/site_tests/tast/control.graphics-igt-weekly
new file mode 100644
index 0000000..f2073dc
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-igt-weekly
@@ -0,0 +1,40 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS Display team'
+NAME = 'tast.graphics-igt-weekly'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-week'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-igt test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs igt-gpu-tools against a remote DUT.
+Separate control files are needed because Tast doesn't isolate tests from each
+other and a clean device state is needed by rebooting before running a
+different graphics suite.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_weekly && graphics_igt)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-nightly b/server/site_tests/tast/control.graphics-nightly
index 6a76ab8..f62dc0b 100644
--- a/server/site_tests/tast/control.graphics-nightly
+++ b/server/site_tests/tast/control.graphics-nightly
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:graphics_per-day'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
@@ -29,7 +30,7 @@
     host.reboot()
     job.run_test('tast',
                  host=host,
-                 test_exprs=['("group:graphics" && graphics_nightly && !graphics_trace && !graphics_video)'],
+                 test_exprs=['("group:graphics" && graphics_nightly && !graphics_igt && !graphics_opencl && !graphics_drm && !graphics_trace && !graphics_video)'],
                  ignore_test_failures=True, max_run_sec=10800,
                  command_args=args)
 
diff --git a/server/site_tests/tast/control.graphics-opencl-nightly b/server/site_tests/tast/control.graphics-opencl-nightly
new file mode 100644
index 0000000..4cc2acc
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-opencl-nightly
@@ -0,0 +1,40 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS Display team'
+NAME = 'tast.graphics-opencl-nightly'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-day'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-opencl test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs opencl tests against a remote DUT.
+Separate control files are needed because Tast doesn't isolate tests from each
+other and a clean device state is needed by rebooting before running a
+different graphics suite.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_nightly && graphics_opencl)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-opencl-perbuild b/server/site_tests/tast/control.graphics-opencl-perbuild
new file mode 100644
index 0000000..f439980
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-opencl-perbuild
@@ -0,0 +1,40 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS Display team'
+NAME = 'tast.graphics-opencl-perbuild'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-build'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-opencl test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs opencl tests against a remote DUT.
+Separate control files are needed because Tast doesn't isolate tests from each
+other and a clean device state is needed by rebooting before running a
+different graphics suite.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_perbuild && graphics_opencl)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-opencl-weekly-shard-0 b/server/site_tests/tast/control.graphics-opencl-weekly-shard-0
new file mode 100644
index 0000000..65a0240
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-opencl-weekly-shard-0
@@ -0,0 +1,42 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS Display team'
+NAME = 'tast.graphics-opencl-weekly-shard-0'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-week'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-opencl test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs opencl tests against a remote DUT.
+Separate control files are needed because Tast doesn't isolate tests from each
+other and a clean device state is needed by rebooting before running a
+different graphics suite.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_weekly && graphics_opencl)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 totalshards=10,
+                 shardindex=0,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-opencl-weekly-shard-1 b/server/site_tests/tast/control.graphics-opencl-weekly-shard-1
new file mode 100644
index 0000000..ac9e5c2
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-opencl-weekly-shard-1
@@ -0,0 +1,42 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS Display team'
+NAME = 'tast.graphics-opencl-weekly-shard-1'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-week'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-opencl test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs opencl tests against a remote DUT.
+Separate control files are needed because Tast doesn't isolate tests from each
+other and a clean device state is needed by rebooting before running a
+different graphics suite.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_weekly && graphics_opencl)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 totalshards=10,
+                 shardindex=1,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-opencl-weekly-shard-2 b/server/site_tests/tast/control.graphics-opencl-weekly-shard-2
new file mode 100644
index 0000000..cb95607
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-opencl-weekly-shard-2
@@ -0,0 +1,42 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS Display team'
+NAME = 'tast.graphics-opencl-weekly-shard-2'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-week'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-opencl test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs opencl tests against a remote DUT.
+Separate control files are needed because Tast doesn't isolate tests from each
+other and a clean device state is needed by rebooting before running a
+different graphics suite.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_weekly && graphics_opencl)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 totalshards=10,
+                 shardindex=2,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-opencl-weekly-shard-3 b/server/site_tests/tast/control.graphics-opencl-weekly-shard-3
new file mode 100644
index 0000000..71dafb5
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-opencl-weekly-shard-3
@@ -0,0 +1,42 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS Display team'
+NAME = 'tast.graphics-opencl-weekly-shard-3'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-week'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-opencl test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs opencl tests against a remote DUT.
+Separate control files are needed because Tast doesn't isolate tests from each
+other and a clean device state is needed by rebooting before running a
+different graphics suite.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_weekly && graphics_opencl)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 totalshards=10,
+                 shardindex=3,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-opencl-weekly-shard-4 b/server/site_tests/tast/control.graphics-opencl-weekly-shard-4
new file mode 100644
index 0000000..0f375d2
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-opencl-weekly-shard-4
@@ -0,0 +1,42 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS Display team'
+NAME = 'tast.graphics-opencl-weekly-shard-4'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-week'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-opencl test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs opencl tests against a remote DUT.
+Separate control files are needed because Tast doesn't isolate tests from each
+other and a clean device state is needed by rebooting before running a
+different graphics suite.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_weekly && graphics_opencl)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 totalshards=10,
+                 shardindex=4,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-opencl-weekly-shard-5 b/server/site_tests/tast/control.graphics-opencl-weekly-shard-5
new file mode 100644
index 0000000..8c2e341
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-opencl-weekly-shard-5
@@ -0,0 +1,42 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS Display team'
+NAME = 'tast.graphics-opencl-weekly-shard-5'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-week'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-opencl test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs opencl tests against a remote DUT.
+Separate control files are needed because Tast doesn't isolate tests from each
+other and a clean device state is needed by rebooting before running a
+different graphics suite.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_weekly && graphics_opencl)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 totalshards=10,
+                 shardindex=5,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-opencl-weekly-shard-6 b/server/site_tests/tast/control.graphics-opencl-weekly-shard-6
new file mode 100644
index 0000000..930f8fc
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-opencl-weekly-shard-6
@@ -0,0 +1,42 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS Display team'
+NAME = 'tast.graphics-opencl-weekly-shard-6'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-week'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-opencl test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs opencl tests against a remote DUT.
+Separate control files are needed because Tast doesn't isolate tests from each
+other and a clean device state is needed by rebooting before running a
+different graphics suite.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_weekly && graphics_opencl)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 totalshards=10,
+                 shardindex=6,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-opencl-weekly-shard-7 b/server/site_tests/tast/control.graphics-opencl-weekly-shard-7
new file mode 100644
index 0000000..dc703f9
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-opencl-weekly-shard-7
@@ -0,0 +1,42 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS Display team'
+NAME = 'tast.graphics-opencl-weekly-shard-7'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-week'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-opencl test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs opencl tests against a remote DUT.
+Separate control files are needed because Tast doesn't isolate tests from each
+other and a clean device state is needed by rebooting before running a
+different graphics suite.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_weekly && graphics_opencl)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 totalshards=10,
+                 shardindex=7,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-opencl-weekly-shard-8 b/server/site_tests/tast/control.graphics-opencl-weekly-shard-8
new file mode 100644
index 0000000..3f9acbe
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-opencl-weekly-shard-8
@@ -0,0 +1,42 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS Display team'
+NAME = 'tast.graphics-opencl-weekly-shard-8'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-week'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-opencl test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs opencl tests against a remote DUT.
+Separate control files are needed because Tast doesn't isolate tests from each
+other and a clean device state is needed by rebooting before running a
+different graphics suite.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_weekly && graphics_opencl)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 totalshards=10,
+                 shardindex=8,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-opencl-weekly-shard-9 b/server/site_tests/tast/control.graphics-opencl-weekly-shard-9
new file mode 100644
index 0000000..6a2335c
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-opencl-weekly-shard-9
@@ -0,0 +1,42 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS Display team'
+NAME = 'tast.graphics-opencl-weekly-shard-9'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-week'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-opencl test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs opencl tests against a remote DUT.
+Separate control files are needed because Tast doesn't isolate tests from each
+other and a clean device state is needed by rebooting before running a
+different graphics suite.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_weekly && graphics_opencl)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 totalshards=10,
+                 shardindex=9,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-perbuild b/server/site_tests/tast/control.graphics-perbuild
index 7a03e54..7f410e3 100644
--- a/server/site_tests/tast/control.graphics-perbuild
+++ b/server/site_tests/tast/control.graphics-perbuild
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:graphics_per-build'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
@@ -29,7 +30,7 @@
     host.reboot()
     job.run_test('tast',
                  host=host,
-                 test_exprs=['("group:graphics" && graphics_perbuild && !graphics_trace && !graphics_video)'],
+                 test_exprs=['("group:graphics" && graphics_perbuild && !graphics_igt && !graphics_opencl && !graphics_drm && !graphics_trace && !graphics_video)'],
                  ignore_test_failures=True, max_run_sec=10800,
                  command_args=args)
 
diff --git a/server/site_tests/tast/control.graphics-trace-nightly b/server/site_tests/tast/control.graphics-trace-nightly
index f7bfd4b..1745710 100644
--- a/server/site_tests/tast/control.graphics-trace-nightly
+++ b/server/site_tests/tast/control.graphics-trace-nightly
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:graphics_per-day'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
diff --git a/server/site_tests/tast/control.graphics-trace-perbuild b/server/site_tests/tast/control.graphics-trace-perbuild
index e538960..863be82 100644
--- a/server/site_tests/tast/control.graphics-trace-perbuild
+++ b/server/site_tests/tast/control.graphics-trace-perbuild
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:graphics_per-build'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
diff --git a/server/site_tests/tast/control.graphics-trace-weekly b/server/site_tests/tast/control.graphics-trace-weekly
index 26d63e8..0422318 100644
--- a/server/site_tests/tast/control.graphics-trace-weekly
+++ b/server/site_tests/tast/control.graphics-trace-weekly
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:graphics_per-week'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries inatalled from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
diff --git a/server/site_tests/tast/control.graphics-video-chromestackdecoding-nightly b/server/site_tests/tast/control.graphics-video-chromestackdecoding-nightly
new file mode 100644
index 0000000..80b3c7b
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-video-chromestackdecoding-nightly
@@ -0,0 +1,40 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS Display team'
+NAME = 'tast.graphics-video-chromestackdecoding-nightly'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-day'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-video-chromestackdecoding test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs chromestackdecoding tests against a remote DUT.
+Separate control files are needed because Tast doesn't isolate tests from each
+other and a clean device state is needed by rebooting before running a
+different graphics suite.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_nightly && graphics_video && graphics_video_chromestackdecoding)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-video-chromestackdecoding-perbuild b/server/site_tests/tast/control.graphics-video-chromestackdecoding-perbuild
new file mode 100644
index 0000000..c3921a2
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-video-chromestackdecoding-perbuild
@@ -0,0 +1,40 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS Display team'
+NAME = 'tast.graphics-video-chromestackdecoding-perbuild'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-build'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-video-chromestackdecoding test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs chromestackdecoding tests against a remote DUT.
+Separate control files are needed because Tast doesn't isolate tests from each
+other and a clean device state is needed by rebooting before running a
+different graphics suite.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_perbuild && graphics_video && graphics_video_chromestackdecoding)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-video-chromestackdecoding-weekly b/server/site_tests/tast/control.graphics-video-chromestackdecoding-weekly
new file mode 100644
index 0000000..6db2f9d
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-video-chromestackdecoding-weekly
@@ -0,0 +1,40 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS Display team'
+NAME = 'tast.graphics-video-chromestackdecoding-weekly'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-week'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-video-chromestackdecoding test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs chromestackdecoding tests against a remote DUT.
+Separate control files are needed because Tast doesn't isolate tests from each
+other and a clean device state is needed by rebooting before running a
+different graphics suite.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_weekly && graphics_video && graphics_video_chromestackdecoding)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-video-decodeaccel.nightly b/server/site_tests/tast/control.graphics-video-decodeaccel.nightly
new file mode 100644
index 0000000..83e88b3
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-video-decodeaccel.nightly
@@ -0,0 +1,40 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS Display team'
+NAME = 'tast.graphics-video-decodeaccel-nightly'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-day'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-video-decodeaccel test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs decodeaccel tests against a remote DUT.
+Separate control files are needed because Tast doesn't isolate tests from each
+other and a clean device state is needed by rebooting before running a
+different graphics suite.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_nightly && graphics_video && graphics_video_decodeaccel)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-video-decodeaccel.perbuild b/server/site_tests/tast/control.graphics-video-decodeaccel.perbuild
new file mode 100644
index 0000000..84069e2
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-video-decodeaccel.perbuild
@@ -0,0 +1,40 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS Display team'
+NAME = 'tast.graphics-video-decodeaccel-perbuild'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-build'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-video-decodeaccel test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs decodeaccel tests against a remote DUT.
+Separate control files are needed because Tast doesn't isolate tests from each
+other and a clean device state is needed by rebooting before running a
+different graphics suite.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_perbuild && graphics_video && graphics_video_decodeaccel)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-video-decodeaccel.weekly b/server/site_tests/tast/control.graphics-video-decodeaccel.weekly
new file mode 100644
index 0000000..6cea46d
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-video-decodeaccel.weekly
@@ -0,0 +1,40 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS Display team'
+NAME = 'tast.graphics-video-decodeaccel-weekly'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-week'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-video-decodeaccel test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs decodeaccel tests against a remote DUT.
+Separate control files are needed because Tast doesn't isolate tests from each
+other and a clean device state is needed by rebooting before running a
+different graphics suite.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_weekly && graphics_video && graphics_video_decodeaccel)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-video-nightly b/server/site_tests/tast/control.graphics-video-nightly
index 7590d00..4339b84 100644
--- a/server/site_tests/tast/control.graphics-video-nightly
+++ b/server/site_tests/tast/control.graphics-video-nightly
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:graphics_per-day'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
@@ -29,7 +30,7 @@
     host.reboot()
     job.run_test('tast',
                  host=host,
-                 test_exprs=['("group:graphics" && graphics_nightly && graphics_video && !graphics_av_analysis)'],
+                 test_exprs=['("group:graphics" && graphics_nightly && graphics_video && !graphics_video_chromestackdecoding && !graphics_video_decodeaccel && !graphics_av_analysis && !graphics_video_platformdecoding)'],
                  ignore_test_failures=True, max_run_sec=10800,
                  command_args=args)
 
diff --git a/server/site_tests/tast/control.graphics-video-perbuild b/server/site_tests/tast/control.graphics-video-perbuild
index 995cf62..dbe6e74 100644
--- a/server/site_tests/tast/control.graphics-video-perbuild
+++ b/server/site_tests/tast/control.graphics-video-perbuild
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:graphics_per-build'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
@@ -29,8 +30,8 @@
     host.reboot()
     job.run_test('tast',
                  host=host,
-                 test_exprs=['("group:graphics" && graphics_perbuild && graphics_video && !graphics_av_analysis)'],
-                 ignore_test_failures=True, max_run_sec=10800,
+                 test_exprs=['("group:graphics" && graphics_perbuild && graphics_video && !graphics_video_chromestackdecoding  &&  !graphics_video_decodeaccel &&  !graphics_av_analysis && !graphics_video_platformdecoding)'],
+                 ignore_test_failures=True, max_run_sec=21600,
                  command_args=args)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-video-platformdecoding-av1-nightly b/server/site_tests/tast/control.graphics-video-platformdecoding-av1-nightly
new file mode 100644
index 0000000..a5f75a0
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-video-platformdecoding-av1-nightly
@@ -0,0 +1,37 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.graphics-video-platformdecoding-av1-nightly'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-day'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-video platformdecoding av1 test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based graphics-video tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_nightly && graphics_video && !graphics_av_analysis && graphics_video_platformdecoding && graphics_video_av1)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-video-platformdecoding-av1-perbuild b/server/site_tests/tast/control.graphics-video-platformdecoding-av1-perbuild
new file mode 100644
index 0000000..b52f770
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-video-platformdecoding-av1-perbuild
@@ -0,0 +1,37 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.graphics-video-platformdecoding-av1-perbuild'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-build'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-video platformdecoding av1 test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based graphics-video tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_perbuild && graphics_video && !graphics_av_analysis && graphics_video_platformdecoding && graphics_video_av1)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-video-platformdecoding-av1-weekly b/server/site_tests/tast/control.graphics-video-platformdecoding-av1-weekly
new file mode 100644
index 0000000..41b0162
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-video-platformdecoding-av1-weekly
@@ -0,0 +1,37 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.graphics-video-platformdecoding-av1-weekly'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-week'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-video platformdecoding av1 test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based graphics-video tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_weekly && graphics_video && !graphics_av_analysis && graphics_video_platformdecoding && graphics_video_av1)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-video-platformdecoding-h264-nightly b/server/site_tests/tast/control.graphics-video-platformdecoding-h264-nightly
new file mode 100644
index 0000000..cfe42c9
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-video-platformdecoding-h264-nightly
@@ -0,0 +1,37 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.graphics-video-platformdecoding-h264-nightly'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-day'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-video platformdecoding h264 test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based graphics-video tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_nightly && graphics_video && !graphics_av_analysis && graphics_video_platformdecoding && graphics_video_h264)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-video-platformdecoding-h264-perbuild b/server/site_tests/tast/control.graphics-video-platformdecoding-h264-perbuild
new file mode 100644
index 0000000..5860ea9
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-video-platformdecoding-h264-perbuild
@@ -0,0 +1,37 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.graphics-video-platformdecoding-h264-perbuild'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-build'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-video platformdecoding h264 test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based graphics-video tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_perbuild && graphics_video && !graphics_av_analysis && graphics_video_platformdecoding && graphics_video_h264)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-video-platformdecoding-h264-weekly b/server/site_tests/tast/control.graphics-video-platformdecoding-h264-weekly
new file mode 100644
index 0000000..a90d348
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-video-platformdecoding-h264-weekly
@@ -0,0 +1,37 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.graphics-video-platformdecoding-h264-weekly'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-week'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-video platformdecoding h264 test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based graphics-video tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_weekly && graphics_video && !graphics_av_analysis && graphics_video_platformdecoding && graphics_video_h264)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-video-platformdecoding-hevc-nightly b/server/site_tests/tast/control.graphics-video-platformdecoding-hevc-nightly
new file mode 100644
index 0000000..4f0ddca
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-video-platformdecoding-hevc-nightly
@@ -0,0 +1,37 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.graphics-video-platformdecoding-hevc-nightly'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-day'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-video platformdecoding hevc test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based graphics-video tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_nightly && graphics_video && !graphics_av_analysis && graphics_video_platformdecoding && graphics_video_hevc)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-video-platformdecoding-hevc-perbuild b/server/site_tests/tast/control.graphics-video-platformdecoding-hevc-perbuild
new file mode 100644
index 0000000..cdc2d27
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-video-platformdecoding-hevc-perbuild
@@ -0,0 +1,37 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.graphics-video-platformdecoding-hevc-perbuild'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-build'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-video platformdecoding-hevc test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based graphics-video tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_perbuild && graphics_video && !graphics_av_analysis && graphics_video_platformdecoding && graphics_video_hevc)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-video-platformdecoding-hevc-weekly b/server/site_tests/tast/control.graphics-video-platformdecoding-hevc-weekly
new file mode 100644
index 0000000..ed36b88
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-video-platformdecoding-hevc-weekly
@@ -0,0 +1,37 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.graphics-video-platformdecoding-hevc-weekly'
+TIME = 'LONG'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-week'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-video platformdecoding-hevc test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based graphics-video tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_weekly && graphics_video && !graphics_av_analysis && graphics_video_platformdecoding && graphics_video_hevc)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-video-platformdecoding-vp8-nightly b/server/site_tests/tast/control.graphics-video-platformdecoding-vp8-nightly
new file mode 100644
index 0000000..99503b4
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-video-platformdecoding-vp8-nightly
@@ -0,0 +1,37 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.graphics-video-platformdecoding-vp8-nightly'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-day'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-video platformdecoding vp8 test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based graphics-video tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_nightly && graphics_video && !graphics_av_analysis && graphics_video_platformdecoding && graphics_video_vp8)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-video-platformdecoding-vp8-perbuild b/server/site_tests/tast/control.graphics-video-platformdecoding-vp8-perbuild
new file mode 100644
index 0000000..788a93f
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-video-platformdecoding-vp8-perbuild
@@ -0,0 +1,37 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.graphics-video-platformdecoding-vp8-perbuild'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-build'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-video platformdecoding-vp8 test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based graphics-video tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_perbuild && graphics_video && !graphics_av_analysis && graphics_video_platformdecoding && graphics_video_vp8)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-video-platformdecoding-vp8-weekly b/server/site_tests/tast/control.graphics-video-platformdecoding-vp8-weekly
new file mode 100644
index 0000000..187a19f
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-video-platformdecoding-vp8-weekly
@@ -0,0 +1,37 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.graphics-video-platformdecoding-vp8-weekly'
+TIME = 'LONG'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-week'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-video platformdecoding-vp8 test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based graphics-video tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_weekly && graphics_video && !graphics_av_analysis && graphics_video_platformdecoding && graphics_video_vp8)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-video-platformdecoding-vp9-nightly b/server/site_tests/tast/control.graphics-video-platformdecoding-vp9-nightly
new file mode 100644
index 0000000..5d50262
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-video-platformdecoding-vp9-nightly
@@ -0,0 +1,37 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.graphics-video-platformdecoding-vp9-nightly'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-day'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-video platformdecoding vp9 test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based graphics-video tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_nightly && graphics_video && !graphics_av_analysis && graphics_video_platformdecoding && graphics_video_vp9)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-video-platformdecoding-vp9-perbuild b/server/site_tests/tast/control.graphics-video-platformdecoding-vp9-perbuild
new file mode 100644
index 0000000..4236066
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-video-platformdecoding-vp9-perbuild
@@ -0,0 +1,37 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.graphics-video-platformdecoding-vp9-perbuild'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-build'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-video platformdecoding-vp9 test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based graphics-video tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_perbuild && graphics_video && !graphics_av_analysis && graphics_video_platformdecoding && graphics_video_vp9)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-video-platformdecoding-vp9-weekly b/server/site_tests/tast/control.graphics-video-platformdecoding-vp9-weekly
new file mode 100644
index 0000000..c7ca327
--- /dev/null
+++ b/server/site_tests/tast/control.graphics-video-platformdecoding-vp9-weekly
@@ -0,0 +1,37 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.graphics-video-platformdecoding-vp9-weekly'
+TIME = 'LONG'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:graphics_per-week'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast graphics-video platformdecoding-vp9 test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Tast-based graphics-video tests against a remote DUT.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    host.reboot()
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:graphics" && graphics_weekly && graphics_video && !graphics_av_analysis && graphics_video_platformdecoding && graphics_video_vp9)'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.graphics-video-weekly b/server/site_tests/tast/control.graphics-video-weekly
index 14ea75a..3c53031 100644
--- a/server/site_tests/tast/control.graphics-video-weekly
+++ b/server/site_tests/tast/control.graphics-video-weekly
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:graphics_per-week'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
@@ -29,7 +30,7 @@
     host.reboot()
     job.run_test('tast',
                  host=host,
-                 test_exprs=['("group:graphics" && graphics_weekly && graphics_video && !graphics_av_analysis)'],
+                 test_exprs=['("group:graphics" && graphics_weekly && graphics_video && !graphics_video_chromestackdecoding && !graphics_video_decodeaccel && !graphics_av_analysis && !graphics_video_platformdecoding)'],
                  ignore_test_failures=True, max_run_sec=10800,
                  command_args=args)
 
diff --git a/server/site_tests/tast/control.graphics-weekly b/server/site_tests/tast/control.graphics-weekly
index 1d74431..f3b20d4 100644
--- a/server/site_tests/tast/control.graphics-weekly
+++ b/server/site_tests/tast/control.graphics-weekly
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:graphics_per-week'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
@@ -29,7 +30,7 @@
     host.reboot()
     job.run_test('tast',
                  host=host,
-                 test_exprs=['("group:graphics" && graphics_weekly && !graphics_trace && !graphics_video)'],
+                 test_exprs=['("group:graphics" && graphics_weekly && !graphics_igt && !graphics_opencl && !graphics_drm && !graphics_trace && !graphics_video)'],
                  ignore_test_failures=True, max_run_sec=10800,
                  command_args=args)
 
diff --git a/server/site_tests/tast/control.hps-perbuild b/server/site_tests/tast/control.hps-perbuild
new file mode 100644
index 0000000..7631a8e
--- /dev/null
+++ b/server/site_tests/tast/control.hps-perbuild
@@ -0,0 +1,31 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.hps-perbuild'
+TIME = 'SHORT'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:hps_perf_perbuild'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast HPS test suite on HPS dev boards.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:hps" && hps_perbuild && !hps_devboard_p2_sweetberry)'],
+                 ignore_test_failures=True,
+                 max_run_sec=7200,  # 2 hours
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.hps_sweetberry_perbuild b/server/site_tests/tast/control.hps_sweetberry_perbuild
new file mode 100644
index 0000000..15b04e3
--- /dev/null
+++ b/server/site_tests/tast/control.hps_sweetberry_perbuild
@@ -0,0 +1,31 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.hps-sweetberry-perbuild'
+TIME = 'SHORT'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:hps_sweetberry_perf_perbuild'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast HPS sweetberry test suite with custom HPS dev board setup.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:hps" && hps_perbuild && hps_devboard_p2_sweetberry)'],
+                 ignore_test_failures=True,
+                 max_run_sec=7200,  # 2 hours
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.hwsec-destructive-crosbolt-perbuild b/server/site_tests/tast/control.hwsec-destructive-crosbolt-perbuild
index 23aa524..a77827d 100644
--- a/server/site_tests/tast/control.hwsec-destructive-crosbolt-perbuild
+++ b/server/site_tests/tast/control.hwsec-destructive-crosbolt-perbuild
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:crosbolt_perf_perbuild'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
diff --git a/server/site_tests/tast/control.hwsec-destructive-func b/server/site_tests/tast/control.hwsec-destructive-func
index 48d4b49..8d75743 100644
--- a/server/site_tests/tast/control.hwsec-destructive-func
+++ b/server/site_tests/tast/control.hwsec-destructive-func
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:bvt-tast-informational'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 JOB_RETRIES = 0  # Do not retry informational tests.
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
diff --git a/server/site_tests/tast/control.informational-android b/server/site_tests/tast/control.informational-android
deleted file mode 100644
index f440fc2..0000000
--- a/server/site_tests/tast/control.informational-android
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'Chromium OS team'
-NAME = 'tast.informational-android'
-TIME = 'MEDIUM'
-TEST_TYPE = 'Server'
-ATTRIBUTES = 'suite:bvt-tast-informational, suite:chrome-informational'
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0  # Do not retry informational tests.
-
-# tast.py uses binaries installed from autotest_server_package.tar.bz2.
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast informational ARC test suite.
-
-Tast is an integration-testing framework analagous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-This test runs Android-dependent Tast-based informational tests against a remote
-DUT. It only reports failure if the tast process itself (rather than individual
-Tast tests) fails.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-
-def run(machine):
-    # TODO(crbug.com/992303): Fix android condition.
-    job.run_test('tast',
-                 host=hosts.create_host(machine),
-                 test_exprs=['('
-                             '"group:mainline" && '
-                             'informational && '
-                             '!"name:crostini.*" && '
-                             '!"dep:plugin_vm" && '
-                             '"dep:android*"'
-                             ')'],
-                 ignore_test_failures=True,
-                 max_run_sec=3*60*60,
-                 command_args=args)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.informational-android-shard-0 b/server/site_tests/tast/control.informational-android-shard-0
new file mode 100644
index 0000000..330b221
--- /dev/null
+++ b/server/site_tests/tast/control.informational-android-shard-0
@@ -0,0 +1,55 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.informational-android-shard-0'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-informational, suite:chrome-informational'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+JOB_RETRIES = 0  # Do not retry informational tests.
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast informational ARC test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Android-dependent Tast-based informational tests against a remote
+DUT. It only reports failure if the tast process itself (rather than individual
+Tast tests) fails.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    # TODO(crbug.com/992303): Fix android condition.
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             'informational && '
+                             '!"name:crostini.*" && '
+                             '"dep:android*"'
+                             ')'],
+                 ignore_test_failures=True,
+                 max_run_sec=3*60*60,
+                 totalshards=3,
+                 shardindex=0,
+                 command_args=args,
+                 clear_tpm=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.informational-android-shard-1 b/server/site_tests/tast/control.informational-android-shard-1
new file mode 100644
index 0000000..d4735e0
--- /dev/null
+++ b/server/site_tests/tast/control.informational-android-shard-1
@@ -0,0 +1,55 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.informational-android-shard-1'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-informational, suite:chrome-informational'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+JOB_RETRIES = 0  # Do not retry informational tests.
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast informational ARC test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Android-dependent Tast-based informational tests against a remote
+DUT. It only reports failure if the tast process itself (rather than individual
+Tast tests) fails.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    # TODO(crbug.com/992303): Fix android condition.
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             'informational && '
+                             '!"name:crostini.*" && '
+                             '"dep:android*"'
+                             ')'],
+                 ignore_test_failures=True,
+                 max_run_sec=3*60*60,
+                 totalshards=3,
+                 shardindex=1,
+                 command_args=args,
+                 clear_tpm=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.informational-android-shard-2 b/server/site_tests/tast/control.informational-android-shard-2
new file mode 100644
index 0000000..2976273
--- /dev/null
+++ b/server/site_tests/tast/control.informational-android-shard-2
@@ -0,0 +1,55 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.informational-android-shard-2'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-informational, suite:chrome-informational'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+JOB_RETRIES = 0  # Do not retry informational tests.
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast informational ARC test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Android-dependent Tast-based informational tests against a remote
+DUT. It only reports failure if the tast process itself (rather than individual
+Tast tests) fails.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    # TODO(crbug.com/992303): Fix android condition.
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             'informational && '
+                             '!"name:crostini.*" && '
+                             '"dep:android*"'
+                             ')'],
+                 ignore_test_failures=True,
+                 max_run_sec=3*60*60,
+                 totalshards=3,
+                 shardindex=2,
+                 command_args=args,
+                 clear_tpm=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.informational-chrome b/server/site_tests/tast/control.informational-chrome
deleted file mode 100644
index d4f6baa..0000000
--- a/server/site_tests/tast/control.informational-chrome
+++ /dev/null
@@ -1,47 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'Chromium OS team'
-NAME = 'tast.informational-chrome'
-TIME = 'MEDIUM'
-TEST_TYPE = 'Server'
-ATTRIBUTES = 'suite:bvt-tast-informational, suite:chrome-informational'
-MAX_RESULT_SIZE_KB = 1024 * 1024
-JOB_RETRIES = 0  # Do not retry informational tests.
-
-# tast.py uses binaries installed from autotest_server_package.tar.bz2.
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast informational Chrome test suite.
-
-Tast is an integration-testing framework analagous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-This test runs Chrome-dependent Tast-based informational tests against a remote
-DUT. It only reports failure if the tast process itself (rather than individual
-Tast tests) fails.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-
-def run(machine):
-    # TODO(crbug.com/992303): Fix android condition.
-    job.run_test('tast',
-                 host=hosts.create_host(machine),
-                 test_exprs=['('
-                             '"group:mainline" && '
-                             'informational && '
-                             '!"name:crostini.*" && '
-                             '!"dep:plugin_vm" && '
-                             '"dep:chrome" && '
-                             '!"dep:android*"'
-                             ')'],
-                 ignore_test_failures=True,
-                 max_run_sec=3*60*60,
-                 command_args=args,
-                 clear_tpm=True)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.informational-chrome-shard-0 b/server/site_tests/tast/control.informational-chrome-shard-0
new file mode 100644
index 0000000..f7d81c1
--- /dev/null
+++ b/server/site_tests/tast/control.informational-chrome-shard-0
@@ -0,0 +1,56 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.informational-chrome-shard-0'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-informational, suite:chrome-informational'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+JOB_RETRIES = 0  # Do not retry informational tests.
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast informational Chrome test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Chrome-dependent Tast-based informational tests against a remote
+DUT. It only reports failure if the tast process itself (rather than individual
+Tast tests) fails.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    # TODO(crbug.com/992303): Fix android condition.
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             'informational && '
+                             '!"name:crostini.*" && '
+                             '"dep:chrome" && '
+                             '!"dep:android*"'
+                             ')'],
+                 ignore_test_failures=True,
+                 max_run_sec=4*60*60,
+                 totalshards=3,
+                 shardindex=0,
+                 command_args=args,
+                 clear_tpm=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.informational-chrome-shard-1 b/server/site_tests/tast/control.informational-chrome-shard-1
new file mode 100644
index 0000000..51503c5
--- /dev/null
+++ b/server/site_tests/tast/control.informational-chrome-shard-1
@@ -0,0 +1,56 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.informational-chrome-shard-1'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-informational, suite:chrome-informational'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+JOB_RETRIES = 0  # Do not retry informational tests.
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast informational Chrome test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Chrome-dependent Tast-based informational tests against a remote
+DUT. It only reports failure if the tast process itself (rather than individual
+Tast tests) fails.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    # TODO(crbug.com/992303): Fix android condition.
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             'informational && '
+                             '!"name:crostini.*" && '
+                             '"dep:chrome" && '
+                             '!"dep:android*"'
+                             ')'],
+                 ignore_test_failures=True,
+                 max_run_sec=4*60*60,
+                 totalshards=3,
+                 shardindex=1,
+                 command_args=args,
+                 clear_tpm=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.informational-chrome-shard-2 b/server/site_tests/tast/control.informational-chrome-shard-2
new file mode 100644
index 0000000..78a25cf
--- /dev/null
+++ b/server/site_tests/tast/control.informational-chrome-shard-2
@@ -0,0 +1,56 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.informational-chrome-shard-2'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-informational, suite:chrome-informational'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+JOB_RETRIES = 0  # Do not retry informational tests.
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast informational Chrome test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Chrome-dependent Tast-based informational tests against a remote
+DUT. It only reports failure if the tast process itself (rather than individual
+Tast tests) fails.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    # TODO(crbug.com/992303): Fix android condition.
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             'informational && '
+                             '!"name:crostini.*" && '
+                             '"dep:chrome" && '
+                             '!"dep:android*"'
+                             ')'],
+                 ignore_test_failures=True,
+                 max_run_sec=4*60*60,
+                 totalshards=3,
+                 shardindex=2,
+                 command_args=args,
+                 clear_tpm=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.informational-crostini b/server/site_tests/tast/control.informational-crostini
deleted file mode 100644
index 242d8ff..0000000
--- a/server/site_tests/tast/control.informational-crostini
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = 'Chromium OS team'
-NAME = 'tast.informational-crostini'
-TIME = 'MEDIUM'
-TEST_TYPE = 'Server'
-ATTRIBUTES = 'suite:bvt-tast-informational, suite:chrome-informational'
-MAX_RESULT_SIZE_KB = 1024 * 1024
-
-# tast.py uses binaries installed from autotest_server_package.tar.bz2.
-REQUIRE_SSP = True
-
-DOC = '''
-Run the Tast informational Crostini test suite.
-
-Tast is an integration-testing framework analagous to the test-running portion
-of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
-more information.
-
-This test runs Crostini-dependent Tast-based informational tests against a
-remote DUT. It only reports failure if the tast process itself (rather than
-individual Tast tests) fails.
-
-See http://go/tast-failures for information about investigating failures.
-'''
-
-def run(machine):
-    # TODO(crbug.com/992303): Fix android condition.
-    job.run_test('tast',
-                 host=hosts.create_host(machine),
-                 test_exprs=['('
-                             '"group:mainline" && '
-                             'informational && '
-                             '"name:crostini.*" && '
-                             '!"dep:plugin_vm"'
-                             ')'],
-                 ignore_test_failures=True,
-                 max_run_sec=3*60*60,
-                 command_args=args,
-                 clear_tpm=True)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.informational-crostini-shard-0 b/server/site_tests/tast/control.informational-crostini-shard-0
new file mode 100644
index 0000000..79b7f39
--- /dev/null
+++ b/server/site_tests/tast/control.informational-crostini-shard-0
@@ -0,0 +1,53 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.informational-crostini-shard-0'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-informational, suite:chrome-informational'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast informational Crostini test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Crostini-dependent Tast-based informational tests against a
+remote DUT. It only reports failure if the tast process itself (rather than
+individual Tast tests) fails.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    # TODO(crbug.com/992303): Fix android condition.
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             'informational && '
+                             '"name:crostini.*"'
+                             ')'],
+                 ignore_test_failures=True,
+                 max_run_sec=3*60*60,
+                 totalshards=3,
+                 shardindex=0,
+                 command_args=args,
+                 clear_tpm=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.informational-crostini-shard-1 b/server/site_tests/tast/control.informational-crostini-shard-1
new file mode 100644
index 0000000..829e12a
--- /dev/null
+++ b/server/site_tests/tast/control.informational-crostini-shard-1
@@ -0,0 +1,53 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.informational-crostini-shard-1'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-informational, suite:chrome-informational'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast informational Crostini test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Crostini-dependent Tast-based informational tests against a
+remote DUT. It only reports failure if the tast process itself (rather than
+individual Tast tests) fails.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    # TODO(crbug.com/992303): Fix android condition.
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             'informational && '
+                             '"name:crostini.*"'
+                             ')'],
+                 ignore_test_failures=True,
+                 max_run_sec=3*60*60,
+                 totalshards=3,
+                 shardindex=1,
+                 command_args=args,
+                 clear_tpm=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.informational-crostini-shard-2 b/server/site_tests/tast/control.informational-crostini-shard-2
new file mode 100644
index 0000000..89ace6e
--- /dev/null
+++ b/server/site_tests/tast/control.informational-crostini-shard-2
@@ -0,0 +1,53 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.informational-crostini-shard-2'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-informational, suite:chrome-informational'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast informational Crostini test suite.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs Crostini-dependent Tast-based informational tests against a
+remote DUT. It only reports failure if the tast process itself (rather than
+individual Tast tests) fails.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    # TODO(crbug.com/992303): Fix android condition.
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             'informational && '
+                             '"name:crostini.*"'
+                             ')'],
+                 ignore_test_failures=True,
+                 max_run_sec=3*60*60,
+                 totalshards=3,
+                 shardindex=2,
+                 command_args=args,
+                 clear_tpm=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.informational-parallels b/server/site_tests/tast/control.informational-parallels
index 4d80635..d1c270f 100644
--- a/server/site_tests/tast/control.informational-parallels
+++ b/server/site_tests/tast/control.informational-parallels
@@ -2,45 +2,54 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from autotest_lib.client.common_lib import utils
+
 AUTHOR = 'Chromium OS team'
 NAME = 'tast.informational-parallels'
 TIME = 'MEDIUM'
 TEST_TYPE = 'Server'
-# TODO(crbug.com/1154072): Remove from 'suite:bvt-tast-informational' once the
-# parallels suite is scheduled.
-ATTRIBUTES = 'suite:bvt-tast-informational, suite:bvt-tast-parallels-informational'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:bvt-tast-parallels-informational'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 JOB_RETRIES = 0  # Do not retry informational tests.
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
 
 DOC = '''
-Run the Tast informational Parallels test suite.
+Run the Tast informational test suite for booted Parallels.
+
+This test may only be run on hardware licensed for specific versions of
+Windows and Office. Contact parallels-cros@google.com for details.
 
 Tast is an integration-testing framework analagous to the test-running portion
 of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
 more information.
 
-This test runs Parallels-dependent Tast-based informational tests against a
-remote DUT. It only reports failure if the tast process itself (rather than
+This test runs informational tast tests which use a booted Parallels VM against
+a remote DUT. It only reports failure if the tast process itself (rather than
 individual Tast tests) fails.
 
 See http://go/tast-failures for information about investigating failures.
 '''
 
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
 def run(machine):
     # TODO(crbug.com/992303): Fix android condition.
     job.run_test('tast',
-                 host=hosts.create_host(machine),
+                 host=hosts.create_host(machine, servo_args=servo_args),
                  test_exprs=['('
-                             '"group:mainline" && '
-                             'informational && '
-                             '"dep:plugin_vm"'
+                             '"group:parallels_mainline" && '
+                             'informational'
                              ')'],
                  ignore_test_failures=True,
                  max_run_sec=3*60*60,
                  command_args=args,
-                 clear_tpm=True)
+                 clear_tpm=True,
+                 varslist=['pita.windowsLicensed=true'])
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.informational-system b/server/site_tests/tast/control.informational-system
index f4c2b68..dbe7fe1 100644
--- a/server/site_tests/tast/control.informational-system
+++ b/server/site_tests/tast/control.informational-system
@@ -11,6 +11,7 @@
 DEPENDENCIES = 'servo_state:WORKING'
 ATTRIBUTES = 'suite:bvt-tast-informational'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 JOB_RETRIES = 0  # Do not retry informational tests.
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
diff --git a/server/site_tests/tast/control.inputs-upstream b/server/site_tests/tast/control.inputs-upstream
new file mode 100644
index 0000000..f676e48
--- /dev/null
+++ b/server/site_tests/tast/control.inputs-upstream
@@ -0,0 +1,32 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.inputs-upstream'
+TIME = 'LONG'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:input-tools-upstream'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+DOC = '''
+Run the Tast tests which run in suite:input-tools-upstream.
+
+"group:input-tools-upstream" indicates tests that runs on CrOS Essential Inputs.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:input-tools-upstream")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.lacros b/server/site_tests/tast/control.lacros
new file mode 100644
index 0000000..bc2808a
--- /dev/null
+++ b/server/site_tests/tast/control.lacros
@@ -0,0 +1,82 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+
+from autotest_lib.server import utils
+from autotest_lib.server.cros import chrome_sideloader
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.lacros'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+# Location to put the sideloaded chrome artifacts
+CHROME_DIR = '/usr/local/lacros'
+
+DOC = '''
+Run the lacros test.
+
+This is a wrapper for lacros tast tests built by for Chromium builders. We
+mount a lacros artifact provisioned by TLS and configure the tast to execute
+lacros tests upon it.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/
+for more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    host=hosts.create_host(machine)
+    varslist = []
+    args_dict = utils.args_to_dict(args)
+
+    assert host.path_exists('/var/lib/imageloader/lacros'), ('lacros artifact'
+            'is not provisioned by CTP. Please check the CTP request.')
+
+    # Because this wrapper servers Chromium/Chrome CI builders, the Chrome version
+    # should be always fresher than the rootfs Chrome bundled in OS.
+    chrome_sideloader.setup_host(host, CHROME_DIR, None)
+
+    # lacros.DeployedBinary expects to set the directory containing chrome executable.
+    # If chrome is not under squashfs root, specify the relative path by exe_rel_path.
+    path_to_chrome = os.path.join(CHROME_DIR,
+                                  args_dict.get('exe_rel_path', 'chrome'))
+    assert host.path_exists(path_to_chrome), (
+            'lacros artifact is not provisioned at expected path, %s'
+            % path_to_chrome)
+
+    expr = chrome_sideloader.get_tast_expr_from_file(host, args_dict, '%s/' % job.resultdir, CHROME_DIR)
+    if not expr:
+        expr = chrome_sideloader.get_tast_expr(args_dict)
+    logging.info('Tast expr: %s', expr)
+
+    varslist.append('lacros.DeployedBinary=%s' %
+                    os.path.dirname(path_to_chrome)
+    )
+
+    def cleanup():
+        chrome_sideloader.cleanup_host(host, CHROME_DIR, None)
+
+    job.add_post_run_hook(cleanup)
+
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=[expr],
+                 download_data_lazily=False,
+                 ignore_test_failures=False, max_run_sec=3600,
+                 command_args=args,
+                 varslist=varslist,
+                 clear_tpm=True)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.meta b/server/site_tests/tast/control.meta
index b6895d1..f5a1918 100644
--- a/server/site_tests/tast/control.meta
+++ b/server/site_tests/tast/control.meta
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:dev_drone_image_test'
 MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
diff --git a/server/site_tests/tast/control.mfp_printscan b/server/site_tests/tast/control.mfp_printscan
new file mode 100644
index 0000000..198e85d
--- /dev/null
+++ b/server/site_tests/tast/control.mfp_printscan
@@ -0,0 +1,31 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.mfp_printscan'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:printscan'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Runs the scan tests that do not have the tast manual attribute.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:paper-io" && "paper-io_mfp_printscan")'],
+                 ignore_test_failures=False, max_run_sec=3600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.mtp b/server/site_tests/tast/control.mtp
new file mode 100644
index 0000000..948220a
--- /dev/null
+++ b/server/site_tests/tast/control.mtp
@@ -0,0 +1,26 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+AUTHOR = 'ARC Engprod Team'
+NAME = 'tast.mtp'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:mtp'
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = """
+This suite is used to run tast tests with 'mtp' group which verify that android app can
+read files on external Android device with MTP (Media Transfer Protocol).
+"""
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:mtp")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.nearby-share b/server/site_tests/tast/control.nearby-share
index 3fa7fbd..c1d0329 100644
--- a/server/site_tests/tast/control.nearby-share
+++ b/server/site_tests/tast/control.nearby-share
@@ -8,16 +8,36 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:nearby-share'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
 
 DOC = '''Run the Tast Nearby Share test suite.'''
 
+from autotest_lib.server.cros.crossdevice import cross_device_util
+
 def run(machine):
+    # Wifi details that chromebook will connect to.
+    ssid = 'nearbysharing_1'
+    password = 'password'
+
+    # Get host objects for each device.
+    host = hosts.create_host(machine)
+    companions = hosts.create_companion_hosts(companion_hosts)
+    phone = companions[0]
+
+    # Configure devices for crossdevice tests.
+    cross_device_util.connect_to_wifi(host, ssid, password)
+    ip_address = phone.setup_for_cross_device_tests(adb_persist_reboot=True)
+
+    # Pass the phones adb-over-tcp "serial" (e.g 192.168.0.30:5555) to Tast as a global var.
+    ip_address_arg = 'crossdevice.PhoneIP=%s:5555' % ip_address
+
     job.run_test('tast',
-                 host=hosts.create_host(machine),
+                 host=host,
                  test_exprs=['("group:nearby-share")'],
                  ignore_test_failures=True, max_run_sec=10800,
-                 command_args=args)
+                 command_args=args,
+                 varslist=[ip_address_arg])
 parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.nearby-share-arc b/server/site_tests/tast/control.nearby-share-arc
new file mode 100644
index 0000000..194c066
--- /dev/null
+++ b/server/site_tests/tast/control.nearby-share-arc
@@ -0,0 +1,43 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+AUTHOR = 'ChromeOS SW Engprod Team (chromeos-sw-engprod@google.com)'
+NAME = 'tast.nearby-share-arc'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:nearby-share-arc'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''Run the ARC++ Nearby Share test suite.'''
+
+from autotest_lib.server.cros.crossdevice import cross_device_util
+
+def run(machine):
+    # Wifi details that chromebook will connect to.
+    ssid = 'nearbysharing_1'
+    password = 'password'
+
+    # Get host objects for each device.
+    host = hosts.create_host(machine)
+    companions = hosts.create_companion_hosts(companion_hosts)
+    phone = companions[0]
+
+    # Configure devices for crossdevice tests.
+    cross_device_util.connect_to_wifi(host, ssid, password)
+    ip_address = phone.setup_for_cross_device_tests(adb_persist_reboot=True)
+
+    # Pass the phones adb-over-tcp "serial" (e.g 192.168.0.30:5555) to Tast as a global var.
+    ip_address_arg = 'crossdevice.PhoneIP=%s:5555' % ip_address
+
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:nearby-share-arc")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args,
+                 varslist=[ip_address_arg])
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.nearby-share-cb2cb b/server/site_tests/tast/control.nearby-share-cb2cb
new file mode 100644
index 0000000..f6cb659
--- /dev/null
+++ b/server/site_tests/tast/control.nearby-share-cb2cb
@@ -0,0 +1,27 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+AUTHOR = 'ChromeOS SW Engprod Team (chromeos-sw-engprod@google.com)'
+NAME = 'tast.nearby-share-cb2cb'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:nearby-share-remote'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''Run the Tast Nearby Share remote test suite.'''
+
+def run(machine):
+    companions = hosts.create_companion_hosts(companion_hosts)
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:nearby-share-remote")'],
+                 ignore_test_failures=False, max_run_sec=3600,
+                 companion_duts={'cd1':companions[0]},
+		 command_args=args)
+parallel_simple(run, machines)
+
diff --git a/server/site_tests/tast/control.nearby-share-cb2cb-chrome-from-tls b/server/site_tests/tast/control.nearby-share-cb2cb-chrome-from-tls
new file mode 100644
index 0000000..29df3eb
--- /dev/null
+++ b/server/site_tests/tast/control.nearby-share-cb2cb-chrome-from-tls
@@ -0,0 +1,75 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from autotest_lib.client.common_lib.error import TestFail
+from autotest_lib.server.cros import chrome_sideloader
+
+AUTHOR = 'ChromeOS SW Engprod Team (chromeos-sw-engprod@google.com)'
+NAME = 'tast.nearby-share-cb2cb-chrome-from-tls'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+# This mount point controls the version of chrome to use
+CHROME_MOUNT_POINT = '/opt/google/chrome'
+# Location to put the sideloaded chrome artifacts
+CHROME_DIR = '/usr/local/chrome'
+
+DOC = '''
+Runs the Tast Nearby Share remote tests with a custom chrome binary.
+
+These Nearby Share tests require two chromebooks, so we must provision
+both DUTs with the custom chrome binary before the tests are kicked off.
+
+This test is used by Chrome builder for Nearby Share CI/CQ purposes.
+A chrome builder creates a chrome binary and artifacts,
+which is in turn provisioned to both DUTs through TLS.
+
+Tast tests uses the sideloaded version of chrome for testing.
+
+'''
+
+
+def run(machine):
+    primary_dut = hosts.create_host(machine)
+    companions = hosts.create_companion_hosts(companion_hosts)
+
+    if not companions:
+        raise TestFail('Missing companion hosts')
+
+    secondary_dut = companions[0]
+
+    logging.info("Running %s on primary_dut: %s and companion_host:%s", NAME,
+                 primary_dut, secondary_dut)
+
+    # Setup both DUTs to use the chrome binary from TLS
+    for host in [primary_dut, secondary_dut]:
+        chrome_sideloader.setup_host(
+            host, CHROME_DIR, CHROME_MOUNT_POINT)
+
+    # Register a clean up callback to reset the chrome mount.
+    def cleanup():
+        for host in [primary_dut, secondary_dut]:
+            chrome_sideloader.cleanup_host(
+                host, CHROME_DIR, CHROME_MOUNT_POINT)
+    job.add_post_run_hook(cleanup)
+
+    job.run_test('tast',
+                 host=primary_dut,
+                 test_exprs=['("group:nearby-share-cq")'],
+                 download_data_lazily=False,
+                 ignore_test_failures=False,
+                 max_run_sec=3600,
+                 companion_duts={'cd1': secondary_dut},
+                 command_args=args
+                 )
+
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.nearby-share-dev b/server/site_tests/tast/control.nearby-share-dev
new file mode 100644
index 0000000..2f48e5b
--- /dev/null
+++ b/server/site_tests/tast/control.nearby-share-dev
@@ -0,0 +1,43 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+AUTHOR = 'ChromeOS SW Engprod Team (chromeos-sw-engprod@google.com)'
+NAME = 'tast.nearby-share-dev'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:nearby-share-dev'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''Run the Tast Nearby Share test suite with dev Android NS.'''
+
+from autotest_lib.server.cros.crossdevice import cross_device_util
+
+def run(machine):
+    # Wifi details that chromebook will connect to.
+    ssid = 'nearbysharing_1'
+    password = 'password'
+
+    # Get host objects for each device.
+    host = hosts.create_host(machine)
+    companions = hosts.create_companion_hosts(companion_hosts)
+    phone = companions[0]
+
+    # Configure devices for crossdevice tests.
+    cross_device_util.connect_to_wifi(host, ssid, password)
+    ip_address = phone.setup_for_cross_device_tests(adb_persist_reboot=True)
+
+    # Pass the phones adb-over-tcp "serial" (e.g 192.168.0.30:5555) to Tast as a global var.
+    ip_address_arg = 'crossdevice.PhoneIP=%s:5555' % ip_address
+
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:nearby-share-dev")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args,
+                 varslist=[ip_address_arg])
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.nearby-share-prod b/server/site_tests/tast/control.nearby-share-prod
new file mode 100644
index 0000000..fa5889b
--- /dev/null
+++ b/server/site_tests/tast/control.nearby-share-prod
@@ -0,0 +1,43 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+AUTHOR = 'ChromeOS SW Engprod Team (chromeos-sw-engprod@google.com)'
+NAME = 'tast.nearby-share-prod'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:nearby-share-prod'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''Run the Tast Nearby Share test suite with prod Android NS.'''
+
+from autotest_lib.server.cros.crossdevice import cross_device_util
+
+def run(machine):
+    # Wifi details that chromebook will connect to.
+    ssid = 'nearbysharing_1'
+    password = 'password'
+
+    # Get host objects for each device.
+    host = hosts.create_host(machine)
+    companions = hosts.create_companion_hosts(companion_hosts)
+    phone = companions[0]
+
+    # Configure devices for crossdevice tests.
+    cross_device_util.connect_to_wifi(host, ssid, password)
+    ip_address = phone.setup_for_cross_device_tests(adb_persist_reboot=True)
+
+    # Pass the phones adb-over-tcp "serial" (e.g 192.168.0.30:5555) to Tast as a global var.
+    ip_address_arg = 'crossdevice.PhoneIP=%s:5555' % ip_address
+
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:nearby-share-prod")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args,
+                 varslist=[ip_address_arg])
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.nearby-share.local b/server/site_tests/tast/control.nearby-share.local
new file mode 100644
index 0000000..f5fccc1
--- /dev/null
+++ b/server/site_tests/tast/control.nearby-share.local
@@ -0,0 +1,65 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'ChromeOS SW Engprod Team (chromeos-sw-engprod@google.com)'
+NAME = 'tast.nearby-share.local'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''Run the Tast Nearby Share test suites locally.
+
+This control file will setup the chromebook and android phone for you if you want to run locally.
+Locally means kicking off via test_that to run against either:
+1. One of the scheduling units / RF boxes in the lab.
+2. At your desk when using the chromebook as a fake labstation.
+You can use #2 to test the e2e adb-over-wifi flow without having a labstation at home.
+NOTE: Labstations store their adb keys at /var/lib/android_keys (and this gets wiped during login) so you need to click accept manually for the setups adb connection.
+
+You need to specify the wifi network details below that the chromebook and phone should be on together.
+Set <SET NETWORK NAME> to your SSID and <SET PASSWORD> to your network password.
+You also need to set the nearby share suite NAME
+Set <NEARBY SUITE NAME> to the suite you want to run. e.g nearby-share, nearby-share-arc, nearby-share-dev, nearby-share-prod
+
+These args are expected to be passed to test_that:
+--args="phone_station=$PHONE_HOST android_serial=$ANDROID_SERIAL_NUMBER"
+
+When using port forwarding to locahost, the expected args are:
+--args="phone_station=locahost android_station_ssh_port=$FORWARDED_PORT android_serial=$ANDROID_SERIAL_NUMBER"
+$FORWARDED_PORT is the port of the labstation that you port forwarded to localhost
+
+'''
+
+from autotest_lib.server import utils
+from autotest_lib.server.cros.crossdevice import cross_device_util
+
+def run(machine):
+    # Wifi details that chromebook will connect to.
+    ssid = '<SET NETWORK NAME>'
+    password = '<SET PASSWORD>'
+
+    # Get host objects for each device.
+    host = hosts.create_host(machine)
+    args_dict = utils.args_to_dict(args)
+    android_args = hosts.AndroidHost.get_android_arguments(args_dict)
+    phone = hosts.AndroidHost('local_phone', android_args=android_args)
+
+    # Configure devices for crossdevice tests.
+    cross_device_util.connect_to_wifi(host, ssid, password)
+    ip_address = phone.setup_for_cross_device_tests(adb_persist_reboot=True)
+
+    # Pass the phones adb-over-tcp "serial" (e.g 192.168.0.30:5555) to Tast as a global var.
+    ip_address_arg = 'crossdevice.PhoneIP=%s:5555' % ip_address
+
+    job.run_test('tast',
+                 host=host,
+                 test_exprs=['("group:<NEARBY SUITE NAME>")'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args,
+                 varslist=[ip_address_arg])
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.omaha b/server/site_tests/tast/control.omaha
new file mode 100644
index 0000000..0b48e76
--- /dev/null
+++ b/server/site_tests/tast/control.omaha
@@ -0,0 +1,32 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.omaha'
+TIME = 'LONG'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:omaha_per-week'
+MAX_RESULT_SIZE_KB = 50 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run omaha Tast tests.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+This test runs group of tests that check the state of prod Omaha and are not
+dependent on the current build, they can run periodically.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine), max_run_sec=10800,
+                 test_exprs=['("group:omaha")'], command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.pvs-audio b/server/site_tests/tast/control.pvs-audio
new file mode 100644
index 0000000..a49f62f
--- /dev/null
+++ b/server/site_tests/tast/control.pvs-audio
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'ChromeOS Team'
+NAME = 'tast.pvs-audio'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:pvs-audio'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+This test suite is part of the Platform Validation Suite (go/cros-pvs)
+and includes a collection of tast.audio tests that require no
+special hardware to run.
+'''
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['audio.ALSAConformance',
+                             'audio.CrasPlay',
+                             'audio.CrasRecord',
+                             'audio.CrasRecordQuality',
+                             'audio.DevicePlay',
+                             'audio.DeviceRecord',
+                             'audio.Microphone'],
+                 ignore_test_failures=False, max_run_sec=3600,
+                 command_args=args,
+		 clear_tmp=True)
+parallel_simple(run, machines)
\ No newline at end of file
diff --git a/server/site_tests/tast/control.pvs-display b/server/site_tests/tast/control.pvs-display
new file mode 100644
index 0000000..289c14e
--- /dev/null
+++ b/server/site_tests/tast/control.pvs-display
@@ -0,0 +1,48 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'ChromeOS Team'
+NAME = 'tast.pvs-display'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:pvs-display'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+This test suite is part of the Platform Validation Suite (go/cros-pvs)
+and includes a collection of tast.gaphics tests that require no
+special hardware to run.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['('
+                   ' "name:graphics.IGT.kms_*" && '
+                   ' !"name:graphics.IGT.kms_addfb_basic" && '
+                   ' !"name:graphics.IGT.kms_atomic" && '
+                   ' !"name:graphics.IGT.kms_atomic_transition" && '
+                   ' !"name:graphics.IGT.kms_concurrent" && '
+                   ' !"name:graphics.IGT.kms_content_protection" && '
+                   ' !"name:graphics.IGT.kms_cursor_legacy" && '
+                   ' !"name:graphics.IGT.kms_dp_dsc" && '
+                   ' !"name:graphics.IGT.kms_flip" && '
+                   ' !"name:graphics.IGT.kms_panel_fitting" && '
+                   ' !"name:graphics.IGT.kms_plane" && '
+                   ' !"name:graphics.IGT.kms_plane_alpha_blend" && '
+                   ' !"name:graphics.IGT.kms_plane_cursor" && '
+                   ' !"name:graphics.IGT.kms_plane_multiple" && '
+                   ' !"name:graphics.IGT.kms_plane_scaling" && '
+                   ' !"name:graphics.IGT.kms_prime" && '
+                   ' !"name:graphics.IGT.kms_setmode" && '
+                   ' !"name:graphics.IGT.kms_vblank" '
+                 ')'],
+                 ignore_test_failures=True, max_run_sec=21600,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.pvs-graphics b/server/site_tests/tast/control.pvs-graphics
new file mode 100644
index 0000000..df5ee95
--- /dev/null
+++ b/server/site_tests/tast/control.pvs-graphics
@@ -0,0 +1,35 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'ChromeOS Team'
+NAME = 'tast.pvs-graphics'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:pvs-graphics'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+This test suite is part of the Platform Validation Suite (go/cros-pvs)
+and includes a collection of tast.gaphics tests that require no
+special hardware to run.
+'''
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['graphics.Connector',
+                            'graphics.DEQP',
+                            'graphics.FPS',
+                            'graphics.GLBench.hasty',
+                            'graphics.ScreenshotCLI',
+                            'graphics.ScreenshotChrome',
+                            'graphics.Smoke.chrome',
+                            'graphics.Smoke.platform'],
+                 ignore_test_failures=False, max_run_sec=3600,
+                 command_args=args,
+                 clear_tmp=True)
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.pvs-staging b/server/site_tests/tast/control.pvs-staging
new file mode 100644
index 0000000..8e12323
--- /dev/null
+++ b/server/site_tests/tast/control.pvs-staging
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'mwiitala, ChromeOS Kernel Team'
+NAME = 'tast.pvs-staging'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:pvs-staging'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast test(s) which will be exported to partners for PVS testing.
+For more info on PVS, please check out go/cros-pvs-prd
+'''
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '!"name:printer.AddBrotherPrinter" && '     # b/192458034
+                             '!"name:printer.ResolutionBrother.*" && '   # b/192458034
+                             '!"name:typec.Basic"' # typec.Basic requires a servo, which may not be available for PVS testing.
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=3600,
+                 command_args=args,
+		 clear_tmp=True)
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.pvs-tast-cq b/server/site_tests/tast/control.pvs-tast-cq
new file mode 100644
index 0000000..55a3094
--- /dev/null
+++ b/server/site_tests/tast/control.pvs-tast-cq
@@ -0,0 +1,61 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'ChromeOS Team'
+NAME = 'tast.pvs-tast-cq'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:pvs-tast-cq'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+This test suite is part of the Platform Validation Suite and contains all
+critical (CQ-blocking) tast tests that can run on Moblab with no special
+test bed requirements. It is intended to mirror the bvt-tast-cq test suite as
+closely as possible.
+For more info on PVS, please check out go/cros-pvs
+For partner-facing docs, see https://chromeos.google.com/partner/dlm/docs/PVS/pvs_partner_documentation.html
+'''
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '!informational && '
+                             '!"name:printer.AddBrotherPrinter" && '     # b/192458034
+                             '!"name:printer.ResolutionBrother.*" && '   # b/192458034
+                             '!"name:typec.Basic" && ' # typec.Basic requires a servo, which may not be available for PVS testing.
+                             '!"name:hwsec.AttestationNoExternalServer" && ' # b/217752622
+                             '!"name:apps.LaunchHelpApp.clamshell_logged_in_stable" && ' #b/218705871
+                             '!"name:apps.LaunchHelpApp.clamshell_oobe_stable" && ' #b/218705871
+                             '!"name:apps.LaunchHelpApp.tablet_logged_in_stable" && ' #b/218705871
+                             '!"name:apps.LaunchHelpApp.tablet_oobe_stable" && ' #b/218705871
+                             '!"name:arc.Drivefs" && ' #b/218705871
+                             '!"name:arc.Drivefs.vm" && ' #b/218705871
+                             '!"name:arc.Optin" && ' #b/218705871
+                             '!"name:arc.Optin.vm" && ' #b/218705871
+                             '!"name:arc.OptinNetworkError" && ' #b/218705871
+                             '!"name:example.SecretVars" && ' #b/218705871
+                             '!"name:filemanager.DrivefsUI" && ' #b/218705871
+                             '!"name:inputs.VirtualKeyboardOOBE" && ' #b/218705871
+                             '!"name:login.AuthError" && ' #b/218705871
+                             '!"name:login.ChangePassword" && ' #b/218705871
+                             '!"name:login.ChromeGAIA" && ' #b/218705871
+                             '!"name:login.ExistingUser" && ' #b/218705871
+                             '!"name:login.Offline" && ' #b/218705871
+                             '!"name:login.ProfileExtension" && ' #b/218705871
+                             '!"name:platform.Drivefs" && ' #b/218705871
+                             '!"name:quicksettings.SignInScreen.audio" && ' #b/218705871
+                             '!"name:quicksettings.SignInScreen.battery" && ' #b/218705871
+                             '!"name:quicksettings.SignInScreen.noaudio_nobattery"' #b/218705871
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=10800,
+                 command_args=args,
+		 clear_tmp=True,
+         retries=2)
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.pvs-video b/server/site_tests/tast/control.pvs-video
new file mode 100644
index 0000000..69add89
--- /dev/null
+++ b/server/site_tests/tast/control.pvs-video
@@ -0,0 +1,79 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'ChromeOS Team'
+NAME = 'tast.pvs-video'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:pvs-video'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+This test suite is part of the Platform Validation Suite (go/cros-pvs)
+and includes a collection of Video tests that run without additional hardware.
+'''
+def run(machine):
+  job.run_test('tast',
+              host=hosts.create_host(machine),
+              test_exprs=['('
+                '!"name:*_lacros*" && ' # b/198181442
+                '!"name:video.EncodeAccel.h264_2160p" && ' # b/198181442
+                '!"name:video.DecodeAccel.vp8_odd_dimensions" && ' # http://b/202188074
+                '!"name:video.DecodeAccel.vp9_odd_dimensions" && ' # http://b/202188074
+                '!"name:video.DecodeAccelVD.vp8_odd_dimension" && ' # http://b/202188074
+                '!"name:video.DecodeAccelVD.vp9_odd_dimension" && ' # http://b/202188074
+                '!"name:video.PlatformDecoding.vaapi_vp9_0_svc" && ' # http://b/202188074
+                '!"name:video.EncodeAccel.h264_180p" && ' # http://b/215462119
+                '!"name:video.MemCheck.h264_hw_switch" && ' # http://b/215462119
+                '!"name:video.PlatformDecoding.vaapi_vp9_0_level5_0_buf" && ' # http://b/215462119
+                '!"name:video.PlatformDecoding.vaapi_vp9_0_level5_0_gf_dist" && ' # http://b/215462119
+                '!"name:video.DecodeAccelPerf.h264_2160p_60fps" && ' # http://b/215462119
+                '!"name:video.DecodeAccelPerf.vp8_2160p_60fps" && ' # http://b/215462119
+                '!"name:video.DecodeAccelPerf.vp9_2160p_30fps" && ' # http://b/215462119
+                '!"name:video.DecodeAccelPerf.vp9_2160p_60fps" && ' # http://b/215462119
+                '!"name:video.DecodeAccelVDPerf.h264_2160p_60fps" && ' # http://b/215462119
+                '!"name:video.DecodeAccelVDPerf.vp8_2160p_60fps" && ' # http://b/215462119
+                '!"name:video.PlatformDecoding.vaapi_vp9_0_level5_0_odd_size" && ' # http://b/215462119
+                '!"name:video.PlatformDecoding.vaapi_vp9_0_level5_0_sub8x8" && ' # http://b/215462119
+                '('
+                  ' "name:video.Capability" || '
+                  ' "name:video.CDMOEMCrypto.*" || '
+                  ' "name:video.Contents.*" || '
+                  ' "name:video.DecodeAccel.*" || '
+                  ' "name:video.DecodeAccelPerf.*" || '
+                  ' "name:video.DecodeAccelSmoke" || '
+                  ' "name:video.DecodeAccelVD.*" || '
+                  ' "name:video.DecodeAccelVDPerf.*" || '
+                  ' "name:video.DecodeCompliance.*" || '
+                  ' "name:video.DecodeEncodeAccelPerf" || '
+                  ' "name:video.EncodeAccel.*" || '
+                  ' "name:video.EncodeAccelPerf.*" || '
+                  ' "name:video.MemCheck.*" || '
+                  ' "name:video.PlatformDecoding.*" || '
+                  ' "name:video.PlatformLibYUVPerftest.*" || '
+                  ' "name:video.PlatformV4L2.decoder" || '
+                  ' "name:video.PlatformVAAPIUnittest" || '
+                  ' "name:video.Play.*" || '
+                  ' "name:video.PlayDRM.*" || '
+                  ' "name:video.PlaybackPerf.*" || '
+                  ' "name:video.Seek.*" || '
+                  ' "name:webrtc.GetDisplayMedia.*" || '
+                  ' "name:webrtc.MediaRecorder.*" || '
+                  ' "name:webrtc.MediaRecorderAPI" || '
+                  ' "name:webrtc.MediaRecorderMulti.*" || '
+                  ' "name:webrtc.MediaRecorderPerf.*" || '
+                  ' "name:webrtc.RTCPeerConnection.*" || '
+                  ' "name:webrtc.RTCPeerConnection.*svc*" || '
+                  ' "name:webrtc.RTCPeerConnectionPerf.*" '
+                ')'
+              ')'],
+              ignore_test_failures=False,
+              max_run_sec=14200,
+              command_args=args,
+              clear_tmp=True)
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.runtime-probe b/server/site_tests/tast/control.runtime-probe
index 937fd28..c87c0a7 100644
--- a/server/site_tests/tast/control.runtime-probe
+++ b/server/site_tests/tast/control.runtime-probe
@@ -7,6 +7,7 @@
 TIME = 'MEDIUM'
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:runtime_probe_perbuild'
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
@@ -26,10 +27,11 @@
 
 def run(machine):
     host = hosts.create_host(machine)
-    with tempfile.NamedTemporaryFile(suffix='.yaml') as temp_file:
+    with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w') as temp_file:
         host_info = host.host_info_store.get()
-        yaml.dump({"autotest_host_info_labels": json.dumps(host_info.labels)},
-                  stream=temp_file)
+        yaml.safe_dump({'autotest_host_info_labels':
+                        json.dumps(host_info.labels)}, stream=temp_file)
+        temp_file.flush()
         job.run_test('tast',
                      host=host,
                      test_exprs=['("group:runtime_probe")'],
diff --git a/server/site_tests/tast/control.skylab-staging b/server/site_tests/tast/control.skylab-staging
index 388a3f7..67f0183 100644
--- a/server/site_tests/tast/control.skylab-staging
+++ b/server/site_tests/tast/control.skylab-staging
@@ -7,6 +7,7 @@
 ATTRIBUTES = "suite:skylab_staging_test"
 TIME = 'MEDIUM'
 TEST_TYPE = 'Server'
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
diff --git a/server/site_tests/tast/control.storage-qual-quick b/server/site_tests/tast/control.storage-qual-quick
index d259078..aa181f8 100644
--- a/server/site_tests/tast/control.storage-qual-quick
+++ b/server/site_tests/tast/control.storage-qual-quick
@@ -2,12 +2,15 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from autotest_lib.client.common_lib import utils
+
 AUTHOR = 'abergman, chromeos-engprod-platform-syd'
 NAME = 'tast.storage-qual-quick'
 TIME = 'MEDIUM'
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:storage_qual_v2_quick'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
@@ -25,6 +28,12 @@
 import tempfile
 import yaml
 
+utils.write_keyval(job.resultdir, {
+  'storage_qual_version': 2,
+  'bug_id': bug_id,
+  'part_id': part_id
+})
+
 def run(machine):
     args_dict = globals().get('args_dict', {})
     test_exprs = args_dict.get('test_exprs', 'storage.QuickStress.*').split(',')
@@ -35,8 +44,9 @@
         test_args = dict()
         tast_prefix = 'tast_'
         for key, value in args_dict.items():
+            # TODO(b/185932989): get rid of 'tast_' prefix for var names.
             if key.startswith(tast_prefix):
-                test_args[key] = value.lstrip(tast_prefix)
+                test_args[key] = value
         yaml.dump(test_args, stream=temp_file, default_flow_style=False)
 
         job.run_test('tast',
diff --git a/server/site_tests/tast/control.stress b/server/site_tests/tast/control.stress
index 7e7a2e5..338952c 100644
--- a/server/site_tests/tast/control.stress
+++ b/server/site_tests/tast/control.stress
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:stress'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
diff --git a/server/site_tests/tast/control.syzcorpus b/server/site_tests/tast/control.syzcorpus
new file mode 100644
index 0000000..07c39f7
--- /dev/null
+++ b/server/site_tests/tast/control.syzcorpus
@@ -0,0 +1,32 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'zsm, ChromeOS Kernel Team'
+NAME = 'tast.syzcorpus'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:syzcorpus'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run the Tast test(s) to run Syzkaller reproducers to test ChromeOS Kernels.
+This is a regression test suite for the kernel(both core and drivers).
+See https://github.com/dvyukov/syzkaller-repros for more information.
+
+Tast is an integration-testing framework analogous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/
+for more information.
+See http://go/tast-failures for information about investigating failures.
+'''
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:syzcorpus")'],
+                 ignore_test_failures=False, max_run_sec=3600,
+                 command_args=args)
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.syzkaller b/server/site_tests/tast/control.syzkaller
index a41be0d..d2c015a 100644
--- a/server/site_tests/tast/control.syzkaller
+++ b/server/site_tests/tast/control.syzkaller
@@ -2,18 +2,19 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = 'mwiitala, Chrome OS Kernel Team'
+AUTHOR = 'mwiitala, ChromeOS Kernel Team'
 NAME = 'tast.syzkaller'
 TIME = 'SHORT'
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:syzkaller'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 # tast.py uses binaries installed from autotest_server_package.tar.bz2.
 REQUIRE_SSP = True
 
 DOC = '''
-Run the Tast test(s) to fuzz the Chrome OS kernel using Syzkaller.
+Run the Tast test(s) to fuzz the ChromeOS Kernel using Syzkaller.
 See go/ctp-syzkaller for more details.
 
 Tast is an integration-testing framework analogous to the test-running portion
diff --git a/server/site_tests/tast/control.tast-timeout b/server/site_tests/tast/control.tast-timeout
new file mode 100644
index 0000000..c30b1f2
--- /dev/null
+++ b/server/site_tests/tast/control.tast-timeout
@@ -0,0 +1,35 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.tast-timeout'
+TIME = 'SHORT'
+TEST_TYPE = 'Server'
+# Uncomment the following line for CQ testing
+#ATTRIBUTES = 'suite:bvt-tast-cq'
+MAX_RESULT_SIZE_KB = 256 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Simulate a tast timeout in the middle of the running tast tests.
+
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['('
+                             '"group:mainline" && '
+                             '"name:example.*"'
+                             ')'],
+                 ignore_test_failures=False, max_run_sec=120,
+                 command_args=args,
+                 clear_tpm=False)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.typec-lab b/server/site_tests/tast/control.typec-lab
new file mode 100644
index 0000000..4afdc02
--- /dev/null
+++ b/server/site_tests/tast/control.typec-lab
@@ -0,0 +1,43 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.typec-lab'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:typec_lab'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run Tast tests for basic typec/TBT functionality.
+
+The tests are part of 'group:typec'. The 'typec_lab' sub-attribute
+limits it to typec tests.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/
+for more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['("group:typec" && typec_lab)'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.wificell-atten-roam-perf b/server/site_tests/tast/control.wificell-atten-roam-perf
index 1574b48..d2cc048 100644
--- a/server/site_tests/tast/control.wificell-atten-roam-perf
+++ b/server/site_tests/tast/control.wificell-atten-roam-perf
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:wifi_atten_roam_perf'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 DOC = '''
 Run Tast test which check WiFi roaming performance/correctness.
diff --git a/server/site_tests/tast/control.wificell-cq b/server/site_tests/tast/control.wificell-cq
index 670dc22..dddf4ae 100644
--- a/server/site_tests/tast/control.wificell-cq
+++ b/server/site_tests/tast/control.wificell-cq
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:wificell-cq'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 DOC = '''
 Run the Tast tests which run in suite:wificell-cq.
@@ -28,6 +29,7 @@
                  host=hosts.create_host(machine),
                  test_exprs=['("group:wificell" && wificell_cq && !wificell_unstable)'],
                  ignore_test_failures=True, max_run_sec=10800,
-                 command_args=args)
+                 command_args=args,
+                 retries=2)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.wificell-dut-validation b/server/site_tests/tast/control.wificell-dut-validation
new file mode 100644
index 0000000..272be81
--- /dev/null
+++ b/server/site_tests/tast/control.wificell-dut-validation
@@ -0,0 +1,34 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.wificell-dut-validation'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:wificell_dut_validation'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+DOC = '''
+Run the stable Tast tests for basic WiFi functions on suite:wifi_matfunc.
+
+"group:wificell" indicates that the test runs on a wificell fixture. Its
+sub-attributes:
+  * "wificell_dut_validation": the test is used to verify DUT & its Peripherals.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:wificell" && wificell_dut_validation)'],
+                 ignore_test_failures=False, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.wificell-func-ax b/server/site_tests/tast/control.wificell-func-ax
new file mode 100644
index 0000000..ff03443
--- /dev/null
+++ b/server/site_tests/tast/control.wificell-func-ax
@@ -0,0 +1,34 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'chromeos-kernel-wifi@google.com'
+NAME = 'tast.wificell-func-ax'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:wifi_matfunc_ax'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+DOC = '''
+Run the stable Tast tests for basic WiFi functions on suite:wifi_matfunc.
+
+"group:wificell" indicates that the test runs on a wificell fixture. Its
+sub-attributes:
+  * "wificell_matfunc_ax": verify 802.11ax functions using APs that support wifi6 & Wifi6e.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:wificell" && wificell_func_ax)'],
+                 ignore_test_failures=False, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.wificell-func-stable b/server/site_tests/tast/control.wificell-func-stable
index 9d8041f..d2957ab 100644
--- a/server/site_tests/tast/control.wificell-func-stable
+++ b/server/site_tests/tast/control.wificell-func-stable
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:wifi_matfunc'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 DOC = '''
 Run the stable Tast tests for basic WiFi functions on suite:wifi_matfunc.
@@ -29,7 +30,7 @@
                  host=hosts.create_host(machine),
                  test_exprs=['("group:wificell" && wificell_func && '
                              '!wificell_unstable)'],
-                 ignore_test_failures=True, max_run_sec=10800,
+                 ignore_test_failures=False, max_run_sec=10800,
                  command_args=args)
 
 parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.wificell-func-unstable b/server/site_tests/tast/control.wificell-func-unstable
index d0ec5be..6f70c29 100644
--- a/server/site_tests/tast/control.wificell-func-unstable
+++ b/server/site_tests/tast/control.wificell-func-unstable
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:wifi_func_tast'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 DOC = '''
 Run the unstable Tast tests which verify basic WiFi functions using wificell fixture.
diff --git a/server/site_tests/tast/control.wificell-perf-stable b/server/site_tests/tast/control.wificell-perf-stable
index 49eb521..c4565c4 100644
--- a/server/site_tests/tast/control.wificell-perf-stable
+++ b/server/site_tests/tast/control.wificell-perf-stable
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:wifi_perf'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 DOC = '''
 Run the stable Tast tests which measure WiFi performance.
diff --git a/server/site_tests/tast/control.wificell-perf-unstable b/server/site_tests/tast/control.wificell-perf-unstable
index d082822..3b17923 100644
--- a/server/site_tests/tast/control.wificell-perf-unstable
+++ b/server/site_tests/tast/control.wificell-perf-unstable
@@ -8,6 +8,7 @@
 TEST_TYPE = 'Server'
 ATTRIBUTES = 'suite:wifi_func_tast'
 MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
 
 DOC = '''
 Run the unstable Tast tests which measure WiFi performance.
diff --git a/server/site_tests/tast/control.wificell-suspend-stable b/server/site_tests/tast/control.wificell-suspend-stable
new file mode 100644
index 0000000..53c99b7
--- /dev/null
+++ b/server/site_tests/tast/control.wificell-suspend-stable
@@ -0,0 +1,32 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.wificell-suspend-stable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:wifi_matfunc'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+DOC = '''
+Run the stable Tast tests for basic WiFi behavior related to suspend/resume
+on suite:wifi_matfunc.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine),
+                 test_exprs=['("group:wificell" && wificell_suspend && '
+                             '!wificell_unstable)'],
+                 ignore_test_failures=False, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.wificell-suspend-unstable b/server/site_tests/tast/control.wificell-suspend-unstable
new file mode 100644
index 0000000..089affe
--- /dev/null
+++ b/server/site_tests/tast/control.wificell-suspend-unstable
@@ -0,0 +1,46 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.wificell-suspend-unstable'
+TIME = 'MEDIUM'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:wifi_func_tast'
+MAX_RESULT_SIZE_KB = 1024 * 1024
+PY_VERSION = 3
+
+DOC = '''
+Run the unstable Tast tests which verify basic WiFi behavior related to
+suspend/resume using wificell fixture.
+
+Note that wificell-suspend-stable does not have servo dependency as
+wificell-suspend-stable belongs to suite:wifi_matfunc, and currently it does
+not configure servo yet.
+
+The suspend tests that need servo shall stay in this unstable group until
+suite:wifi_matfunc supports servo.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+
+See http://go/tast-failures for information about investigating failures.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 test_exprs=['("group:wificell" && wificell_suspend && '
+                             'wificell_unstable)'],
+                 ignore_test_failures=True, max_run_sec=10800,
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.wilco_bve b/server/site_tests/tast/control.wilco_bve
new file mode 100644
index 0000000..9c5f878
--- /dev/null
+++ b/server/site_tests/tast/control.wilco_bve
@@ -0,0 +1,39 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import utils
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.wilco_bve'
+TIME = 'LONG'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'servo_state:WORKING'
+ATTRIBUTES = 'suite:wilco_bve'
+MAX_RESULT_SIZE_KB = 50 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run Wilco tests that require servo type-A connected to a USB-A port that
+has a lightning bolt or a battery icon engraved into it.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+'''
+
+args_dict = utils.args_to_dict(args)
+assert 'servo_state:WORKING' in DEPENDENCIES
+servo_args = hosts.CrosHost.get_servo_arguments(args_dict)
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine, servo_args=servo_args),
+                 max_run_sec=10800,
+                 test_exprs=['("group:wilco_bve")'],
+                 command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/control.wilco_bve_dock b/server/site_tests/tast/control.wilco_bve_dock
new file mode 100644
index 0000000..2967363
--- /dev/null
+++ b/server/site_tests/tast/control.wilco_bve_dock
@@ -0,0 +1,29 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'Chromium OS team'
+NAME = 'tast.wilco_bve_dock'
+TIME = 'LONG'
+TEST_TYPE = 'Server'
+ATTRIBUTES = 'suite:wilco_bve_dock'
+MAX_RESULT_SIZE_KB = 50 * 1024
+PY_VERSION = 3
+
+# tast.py uses binaries installed from autotest_server_package.tar.bz2.
+REQUIRE_SSP = True
+
+DOC = '''
+Run Wilco tests that require a solomon dock connected to the DUT.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/ for
+more information.
+'''
+
+def run(machine):
+    job.run_test('tast',
+                 host=hosts.create_host(machine), max_run_sec=10800,
+                 test_exprs=['("group:wilco_bve_dock")'], command_args=args)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/tast/tast.py b/server/site_tests/tast/tast.py
index 4f168e1..36c8fec 100644
--- a/server/site_tests/tast/tast.py
+++ b/server/site_tests/tast/tast.py
@@ -2,43 +2,110 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import, division, print_function
+
+import datetime
 import json
 import logging
 import os
+import shutil
+import socket
+import tempfile
+from collections import OrderedDict
 
 import dateutil.parser
-
-from autotest_lib.client.common_lib import base_job
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import dev_server
-from autotest_lib.client.common_lib.cros import tpm_utils
-from autotest_lib.server import test
-from autotest_lib.server import utils
+import six
+import yaml
+from autotest_lib.client.common_lib import (base_job, config_vars, error)
+from autotest_lib.client.common_lib.cros import dev_server, tpm_utils
+from autotest_lib.server import test, utils
 from autotest_lib.server.cros.network import wifi_test_context_manager
-from autotest_lib.server.hosts import cros_host
-from autotest_lib.server.hosts import servo_host
-from autotest_lib.server.hosts import servo_constants
+from autotest_lib.server.hosts import cros_host, servo_constants, servo_host
+from autotest_lib.site_utils.rpm_control_system import rpm_constants
 from autotest_lib.utils import labellib
-
+from six.moves import urllib
 
 # A datetime.DateTime representing the Unix epoch in UTC.
 _UNIX_EPOCH = dateutil.parser.parse('1970-01-01T00:00:00Z')
 
+# Keywords that are used in result json file.
+_KEY_NAME = 'name'
+_KEY_START = 'start'
+_KEY_END = 'end'
+_KEY_ERRORS = 'errors'
+_KEY_SKIP_REASON = 'skipReason'
+_KEY_REASON = 'reason'
+_KEY_TIME = 'time'
+_KEY_MISSING_REASON = 'missingReason'
 
-def _encode_utf8_json(j):
-    """Takes JSON object parsed by json.load() family, and encode each unicode
-    strings into utf-8.
+
+def split_arguments(args):
+    """Splits arguments into the autotest and tast variable assignments.
+    Use the results as command_args and varslist respectively.
+
+    @param args: List of strings passed to test_that --args
+
+    @returns Array of Tauto args, Array of TAST variable assignments.
     """
-    if isinstance(j, unicode):
-        return j.encode('utf-8')
+
+    auto_args = []
+    tast_vars = []
+    for a in args:
+        if a.startswith("tast."):
+            tast_vars.append(a[5:])
+        else:
+            auto_args.append(a)
+    return auto_args, tast_vars
+
+
+def _encode_text(text):
+    """Takes an unicode string into utf-8 string
+    (bytes for python 2 and text for python 3).
+    """
+    if six.PY2:
+        return text.encode('utf-8')
+    return text
+
+
+def _encode_json(j):
+    """Takes JSON object parsed by json.load() family, and encode each unicode
+    strings into str.
+    """
+    if isinstance(j, six.text_type):
+        return _encode_text(j)
     if isinstance(j, list):
-        return [_encode_utf8_json(x) for x in j]
+        return [_encode_json(x) for x in j]
     if isinstance(j, dict):
-        return dict((_encode_utf8_json(k), _encode_utf8_json(v))
-                    for k, v in j.iteritems())
+        return dict((_encode_json(k), _encode_json(v))
+                    for k, v in six.iteritems(j))
     return j
 
 
+def _dut_server_arg(command_args):
+    """Return dut_server arg out of command_args if found."""
+    dut_server_arg = None
+    dut_server = None
+    for arg in command_args:
+        if 'dut_servers=' in arg:
+            dut_server_arg = arg
+            break
+    if not dut_server_arg:
+        return
+    dut_server = dut_server_arg.split('=')[1]
+
+    # In case multiple dutservers are passed...
+    # e.g.: "dut_servers=localhost:1343,localhost:5678"
+    if ',' in dut_server:
+        # Currently only support the first, until we map dut:dut_server
+        dut_server = dut_server.split(',')[0]
+
+    return dut_server
+
+
+class TastConfigError(error.AutotestError):
+    """Indicates a problem with configuration files."""
+
+
 class tast(test.test):
     """Autotest server test that runs a Tast test suite.
 
@@ -53,10 +120,10 @@
     version = 1
 
     # Maximum time to wait for various tast commands to complete, in seconds.
-    # Note that _LIST_TIMEOUT_SEC includes time to download private test bundles
-    # if run_private_tests=True.
     _VERSION_TIMEOUT_SEC = 10
-    _LIST_TIMEOUT_SEC = 60
+    _DOWNLOAD_TIMEOUT_SEC = 120
+    _LIST_TIMEOUT_SEC = 30
+    _LIST_TRIES = 2
 
     # Additional time to add to the run timeout (e.g. for collecting crashes and
     # logs).
@@ -90,6 +157,7 @@
     _SSP_REMOTE_TEST_RUNNER_PATH = os.path.join(_SSP_ROOT, 'remote_test_runner')
     _SSP_DEFAULT_VARS_DIR_PATH = os.path.join(_SSP_ROOT, 'vars')
 
+    _F20_CONTAINER_BREADCRUMB = '/usr/local/f20container'
     # Prefix added to Tast test names when writing their results to TKO
     # status.log files.
     _TEST_NAME_PREFIX = 'tast.'
@@ -102,15 +170,20 @@
     _JOB_STATUS_START = 'START'
     _JOB_STATUS_END_GOOD = 'END GOOD'
     _JOB_STATUS_END_FAIL = 'END FAIL'
-    _JOB_STATUS_END_ABORT = 'END ABORT'
+    _JOB_STATUS_END_NOSTATUS = 'END NOSTATUS'
+    _JOB_STATUS_END_SKIP = 'END TEST_NA'
 
     # In-job TKO event status codes from base_client_job._run_test_base in
     # client/bin/job.py and client/common_lib/error.py.
     _JOB_STATUS_GOOD = 'GOOD'
     _JOB_STATUS_FAIL = 'FAIL'
+    _JOB_STATUS_NOSTATUS = 'NOSTATUS'
+    _JOB_STATUS_SKIP = 'TEST_NA'
 
     # Status reason used when an individual Tast test doesn't finish running.
     _TEST_DID_NOT_FINISH_MSG = 'Test did not finish'
+    # Status reason used when an individual Tast test doesn't start running.
+    _TEST_DID_NOT_RUN_MSG = 'Test did not run'
 
     def initialize(self,
                    host,
@@ -125,8 +198,20 @@
                    run_private_tests=True,
                    varsfiles=[],
                    download_data_lazily=True,
-                   clear_tpm=False,
-                   varslist=[]):
+                   clear_tpm=True,
+                   totalshards=1,
+                   shardindex=0,
+                   companion_duts={},
+                   varslist=[],
+                   maybemissingvars='',
+                   use_camera_box=False,
+                   vars_gs_path='',
+                   retries=0,
+                   ephemeraldevserver=None,
+                   is_cft=False,
+                   exclude_missing=False,
+                   test_filter_files=[],
+                   report_skipped=False):
         """
         @param host: remote.RemoteHost instance representing DUT.
         @param test_exprs: Array of strings describing tests to run.
@@ -156,11 +241,40 @@
             lazily between tests. If false, external data files are downloaded
             in a batch before running tests.
         @param clear_tpm: clear the TPM first before running the tast tests.
+        @param totalshards: Total number of shards.
+        @param shardindex: The shard index to be run.
+        @param companion_duts: A map of role to DUT name to tast run command as
+            |-companiondut| arguments. Each entry in the map will be formatted
+            as "role:dut" for each -companiondut argument.
         @param varslist: list of strings to pass to tast run command as |-vars|
             arguments. Each string should be formatted as "name=value".
+        @param maybemissingvars: a regex to pass to tast run command as
+            |-maybemissingvars| arguments.
+        @param vars_gs_path: gs path to load vars from. The vars are loaded
+            from gs in json format (key = value), then stored in a local
+            yaml file. The local file name is then appended to |-varsfiles|.
+        @param use_camera_box: Bring the IP address of chart device in CameraBox
+            to tast tests.
+        @param ephemeraldevserver: A value to pass to -ephemeraldevserver
+        @param exclude_missing: This option will exclude tests that are requested, but not found in
+        `tast list` command
+        @param test_filter_files: This option includes a list of files containing names
+        of test to be disabled.
+        @param report_skipped: If true then skipped tests will be reported in
+        the status.log
+
+        When the F20 breadcrumb is detected, it is assumed we are running in
+            the F20 container, meaning we will force disable SSP (though the
+            SSP flag should be false in this case). The F20 container is fully
+            build versioned and matches the chroot paths, so we do not want to
+            take the SSP logic.
 
         @raises error.TestFail if the Tast installation couldn't be found.
         """
+        f20_container = False
+        if os.path.exists(self._F20_CONTAINER_BREADCRUMB):
+            ssp = False
+            f20_container = True
         if ssp is None:
             ssp = os.path.exists(self._SSP_TAST_PATH)
         if build is None:
@@ -181,6 +295,26 @@
         self._varslist = varslist
         self._download_data_lazily = download_data_lazily
         self._clear_tpm = clear_tpm
+        self._totalshards = totalshards
+        self._shardindex = shardindex
+        self._companion_duts = companion_duts
+        self._maybemissingvars = maybemissingvars
+        self._vars_gs_path = vars_gs_path
+        self._use_camera_box = use_camera_box
+        self._retries = retries
+        self._f20_container = f20_container or is_cft
+        self._ephemeraldevserver = ephemeraldevserver
+        self._exclude_missing = exclude_missing
+        self._test_filter_files = test_filter_files
+        self._report_skipped = report_skipped
+
+        # Need to pass in dut_servers for every test in CFT.
+        # But only add it if not already in varslist.
+        if not any(
+                [True if 'dut.servers' in arg else False for arg in varslist]):
+            dut_server = _dut_server_arg(command_args)
+            if dut_server:
+                self._varslist.append('servers.dut=:%s' % dut_server)
 
         # List of JSON objects describing tests that will be run. See Test in
         # src/platform/tast/src/chromiumos/tast/testing/test.go for details.
@@ -199,7 +333,7 @@
 
         # Register a hook to write the results of individual Tast tests as
         # top-level entries in the TKO status.log file.
-        self.job.add_post_run_hook(self._log_all_tests)
+        self.job.add_post_run_hook(self._log_all_unique_tests)
 
     def run_once(self):
         """Runs a single iteration of the test."""
@@ -208,22 +342,47 @@
             tpm_utils.ClearTPMOwnerRequest(self._host, wait_for_ready=True)
 
         self._log_version()
-        self._find_devservers()
+        if not self._f20_container:
+            self._find_devservers()
+
+        self._ensure_bundles()
 
         # Shortcut if no test belongs to the specified test_exprs.
-        if not self._get_tests_to_run():
+        list_test_exception = None
+        has_tests_to_run = False
+        for i in range(self._LIST_TRIES):
+            try:
+                if i > 0:
+                    logging.info('Retrying to get which tests to run')
+                has_tests_to_run = bool(self._get_tests_to_run())
+                list_test_exception = None
+                break
+            except Exception as e:
+                list_test_exception = e
+        if list_test_exception:
+            raise error.TestFail('Failed to get list of tests to run: %s' %
+                                 str(list_test_exception))
+        if not has_tests_to_run:
             return
 
+        # TODO(b/221333999): There are no devservers in CFT (F20), so this
+        # would likely error. Once full CFT is done tast.py will be deprecated
+        # and this won't be needed.
+        if not self._f20_container:
+            self._pull_varsfile_from_gs()
+
         run_failed = False
+        run_failed_msg = None
         try:
             self._run_tests()
-        except:
+        except Exception as e:
             run_failed = True
+            run_failed_msg = str(e).split('\n', 1)[0]
             raise
         finally:
             self._read_run_error()
             # Parse partial results even if the tast command didn't finish.
-            self._parse_results(run_failed)
+            self._parse_results(run_failed, run_failed_msg)
 
     def set_fake_now_for_testing(self, now):
         """Sets a fake timestamp to use in place of time.time() for unit tests.
@@ -232,6 +391,118 @@
         """
         self._fake_now = now
 
+    def _pull_varsfile_from_gs(self):
+        """Pulls varsfiles from GS, does dynamic values transformation, stores
+        it as a local file and appends the file name to varsfiles.
+
+        Has to be called after _get_tests_to_run since it's using _tests_to_run.
+
+        @param varsgspath Path to varsfiles in GS e.g.
+            'config/perf_cuj/perf_cuj.config'.
+
+        @raises TastConfigError for config errors.
+        """
+        if not self._vars_gs_path:
+            return
+
+        devservers = dev_server.ImageServer.get_available_devservers()
+        devserver_url = devservers[0][0]
+        if not devserver_url:
+            raise TastConfigError('No devserver_url')
+
+        logging.info('Using devserver: %s', devserver_url)
+        labels = self._host.host_info_store.get().labels
+        build = labellib.LabelsMapping(labels).get(labellib.Key.CROS_VERSION)
+        if not build:
+            raise TastConfigError(
+                    'Not able to detect build, means not running on Moblab.')
+
+        ds = dev_server.ImageServer(devserver_url)
+        gs_bucket = dev_server._get_image_storage_server()
+        if not gs_bucket:
+            raise TastConfigError('No image storage server gs bucket')
+
+        config_path, config_file = os.path.split(self._vars_gs_path)
+        archive_url = os.path.join(gs_bucket, config_path.strip('/'))
+        logging.info('Staging configuration from %s.', gs_bucket)
+        try:
+            ds.stage_artifacts(build,
+                               archive_url=archive_url,
+                               files=[config_file])
+        except Exception as e:
+            raise TastConfigError('Staging artifacts failed: %s', str(e))
+
+        logging.info('Parsing configuration from %s.', archive_url)
+        config_url = os.path.join(devserver_url, 'static',
+                                  self._vars_gs_path.strip('/'))
+        response = urllib.request.urlopen(config_url)
+        vars = json.loads(response.read())
+        test_args = dict()
+        for key in vars:
+            test_args[key] = vars[key]
+        logging.info('Read %d values from remote configuration.', len(vars))
+
+        extvars = self._fill_config_extvars()
+        test_args = config_vars.TransformConfig(test_args, extvars)
+
+        with tempfile.NamedTemporaryFile(suffix='.yaml',
+                                         delete=False) as temp_file:
+            yaml.dump(test_args, stream=temp_file, default_flow_style=False)
+            self._varsfiles.append(temp_file.name)
+
+    def _fill_config_extvars(self):
+        """Fill in external variables map for conditional config processing.
+
+        The sources used (in order of precedence low to high):
+          * --varsfiles.
+          * --varslist.
+          * list of tests to run.
+          * command_args: List of arguments passed on the command line via
+            test_that's --args flag, i.e. |args| in control file.
+          * DUT labels (with and without a value).
+
+        @returns external variables map.
+        """
+        # The latter overwrites the former.
+        extvars = {}
+
+        # Load varsfiles
+        for varsfile in self._varsfiles:
+            with open(varsfile, 'r') as f:
+                for key, val in yaml.safe_load(f).items():
+                    if 'var:' + key in extvars:
+                        logging.info('var:%s overwritten', key)
+                    extvars['var:' + key] = val
+
+        # Load vars
+        for var in self._varslist:
+            key, val = var.split('=', 1)
+            if 'var:' + key in extvars:
+                logging.info('var:%s overwritten', key)
+            extvars['var:' + key] = val
+
+        # Load tests_to_run
+        extvars['tests:'] = '\n'.join(self._tests_to_run)
+        for test_to_run in self._tests_to_run:
+            extvars['test:' + test_to_run] = ''
+
+        # Load command_args
+        extvars['args:'] = '\n'.join(self._command_args)
+        for key, val in utils.args_to_dict(self._command_args).items():
+            extvars['arg:' + key] = val
+        for command_arg in self._command_args:
+            if '=' not in command_arg and ':' not in command_arg:
+                extvars['arg:' + command_arg] = ''
+
+        # Load labels
+        labels = self._host.host_info_store.get().labels
+        extvars['labels:'] = '\n'.join(labels)
+        for label in labels:
+            key, val = (label.split(':', 1) + [''])[0:2]
+            extvars['label:' + key] = val
+
+        return extvars
+
     def _get_path(self, path):
         """Returns the path to an installed Tast-related file or directory.
 
@@ -242,6 +513,34 @@
         """
         return os.path.join(self._install_root, os.path.relpath(path, '/'))
 
+    def _get_camerabox_args(self):
+        """Gets camerabox-related arguments to pass to "tast run".
+
+        @returns List of command-line flag strings that should be inserted in
+            the command line after "tast run".
+        """
+        args = []
+        if self._use_camera_box:
+            host_name = self._host.hostname
+
+            # If host name is "FOO.BAR.BAR2", the chart host name should be
+            # "FOO-tablet.BAR.BAR2"
+            domains = host_name.split('.', 1)
+            domains[0] += '-tablet'
+            chart_host_name = '.'.join(domains)
+            try:
+                chart_ip = socket.gethostbyname(chart_host_name)
+
+                # Check if the IP is pingable.
+                if os.system("ping -c 1 " + chart_ip) != 0:
+                    logging.error('Failed to ping IP: %s.', chart_ip)
+
+                args += ['-var=chart=' + chart_ip]
+            except socket.gaierror:
+                logging.exception('Failed to get IP: %s.', chart_host_name)
+        logging.info('Camerabox args: %s', args)
+        return args
+
     def _get_servo_args(self):
         """Gets servo-related arguments to pass to "tast run".
 
@@ -265,6 +564,41 @@
             return []
         return ['-var=servo=%s:%s' % (host_arg, port_arg)]
 
+    def _get_firmware_args(self):
+        """Gets firmware-related arguments to pass to "tast run".
+
+        @returns List of command-line flag strings that should be inserted in
+            the command line after "tast run".
+        """
+        # Incorporate information that was passed manually.
+        args_dict = utils.args_to_dict(self._command_args)
+
+        args = []
+        no_ec_sync = args_dict.get("no_ec_sync")
+        if no_ec_sync:
+            args += ['-var=firmware.no_ec_sync=' + no_ec_sync]
+        logging.info('Firmware args: %s', args)
+        return args
+
+    def _get_rpm_args(self):
+        """Gets rpm-related arguments to pass to "tast run".
+
+        @returns List of command-line flag strings that should be inserted in
+            the command line after "tast run".
+        """
+        info = self._host.host_info_store.get()
+        args = []
+        forward_args = [
+                (rpm_constants.POWERUNIT_HOSTNAME_KEY, 'powerunitHostname=%s'),
+                (rpm_constants.POWERUNIT_OUTLET_KEY, 'powerunitOutlet=%s'),
+                (rpm_constants.HYDRA_HOSTNAME_KEY, 'hydraHostname=%s'),
+        ]
+        for key, var_arg in forward_args:
+            if key in info.attributes:
+                args += ['-var=' + var_arg % info.attributes[key]]
+        logging.info('RPM args: %s', args)
+        return args
+
     def _get_wificell_args(self):
         """Gets wificell-related (router, pcap) arguments to pass to "tast run".
 
@@ -285,6 +619,17 @@
         for key, var_arg in forward_args:
             if key in args_dict:
                 args += ['-var=' + var_arg % args_dict[key]]
+        # Append "routers" var for supporting multi-router tests with current
+        # two-AP fixture setup (with specified router_addr and pcap_addr args).
+        # TODO(b/171949862): remove this when a new multi-router fixture is
+        # defined and rolled out to the lab.
+        if (WiFiManager.CMDLINE_ROUTER_ADDR in args_dict
+                    and WiFiManager.CMDLINE_PCAP_ADDR in args_dict):
+            args += [
+                    '-var=routers=%s,%s' %
+                    (args_dict[WiFiManager.CMDLINE_ROUTER_ADDR],
+                     args_dict[WiFiManager.CMDLINE_PCAP_ADDR])
+            ]
         logging.info('Autotest wificell-related args: %s', args)
         return args
 
@@ -304,7 +649,7 @@
 
         if not gs_bucket or not build:
             return []
-        gs_path = gs_bucket + build
+        gs_path = os.path.join(gs_bucket, build)
         if not gs_path.endswith('/'):
             gs_path += '/'
         logging.info('Cloud storage bucket: %s', gs_path)
@@ -315,10 +660,19 @@
 
         The result is saved as self._devserver_args.
         """
-        devservers, _ = dev_server.ImageServer.get_available_devservers(
-            self._host.hostname, prefer_local_devserver=True)
+        logging.info('All devservers: %s',
+                     ', '.join(dev_server.ImageServer.servers()))
+        devservers, can_retry = dev_server.ImageServer.get_available_devservers(
+                self._host.hostname, prefer_local_devserver=True)
+        if not devservers and can_retry and (self._host.is_satlab()
+                                             or 'MOBLAB' in os.environ):
+            devservers, can_retry = dev_server.ImageServer.get_available_devservers(
+                    self._host.hostname, prefer_local_devserver=False)
         logging.info('Using devservers: %s', ', '.join(devservers))
         self._devserver_args = ['-devservers=%s' % ','.join(devservers)]
+        if self._ephemeraldevserver is not None:
+            self._devserver_args.append('-ephemeraldevserver=%s' %
+                                        self._ephemeraldevserver)
 
     def _log_version(self):
         """Runs the tast command locally to log its version."""
@@ -333,16 +687,23 @@
         except error.CmdError as e:
             logging.error('Failed to log tast version: %s', str(e))
 
-    def _run_tast(self, subcommand, extra_subcommand_args, timeout_sec,
-                  log_stdout=False):
+    def _run_tast(self,
+                  subcommand,
+                  extra_subcommand_args,
+                  test_exprs,
+                  timeout_sec,
+                  log_stdout=False,
+                  ignore_status=False):
         """Runs the tast command locally to e.g. list available tests or perform
         testing against the DUT.
 
         @param subcommand: Subcommand to pass to the tast executable, e.g. 'run'
             or 'list'.
         @param extra_subcommand_args: List of additional subcommand arguments.
+        @param test_exprs: Array of strings describing tests to run.
         @param timeout_sec: Integer timeout for the command in seconds.
         @param log_stdout: If true, write stdout to log.
+        @param ignore_status: If true, command execution errors are ignored.
 
         @returns client.common_lib.utils.CmdResult object describing the result.
 
@@ -356,8 +717,14 @@
             '-sshretries=%d' % self._SSH_CONNECT_RETRIES,
             '-downloaddata=%s' % (
                 'lazy' if self._download_data_lazily else 'batch'),
+            '-totalshards=%s' % self._totalshards,
+            '-shardindex=%s' % self._shardindex,
         ]
-        if self._build:
+        if self._f20_container:
+            cmd.extend(['-build=false'])
+            if self._run_private_tests:
+                cmd.append('-downloadprivatebundles=true')
+        elif self._build:
             cmd.extend([
                 '-build=true',
                 '-buildbundle=%s' % self._build_bundle,
@@ -384,24 +751,61 @@
                                self._get_path(self._SSP_DEFAULT_VARS_DIR_PATH))
             if self._run_private_tests:
                 cmd.append('-downloadprivatebundles=true')
-        cmd.extend(self._devserver_args)
+        if not self._f20_container:
+            cmd.extend(self._devserver_args)
         cmd.extend(extra_subcommand_args)
-        cmd.append('%s:%d' % (self._host.hostname, self._host.port))
-        cmd.extend(self._test_exprs)
+        cmd.append('%s%s' % (self._host.hostname, ':%d' %
+                             self._host.port if self._host.port else ''))
+        cmd.extend(test_exprs)
 
         logging.info('Running %s',
                      ' '.join([utils.sh_quote_word(a) for a in cmd]))
         try:
-            return utils.run(cmd,
-                             ignore_status=False,
-                             timeout=timeout_sec,
-                             stdout_tee=(utils.TEE_TO_LOGS if log_stdout
-                                         else None),
-                             stderr_tee=utils.TEE_TO_LOGS,
-                             stderr_is_expected=True,
-                             stdout_level=logging.INFO,
-                             stderr_level=logging.ERROR)
+            return utils.run(
+                    cmd,
+                    ignore_status=ignore_status,
+                    timeout=timeout_sec,
+                    stdout_tee=(utils.TEE_TO_LOGS if log_stdout else None),
+                    stderr_tee=utils.TEE_TO_LOGS,
+                    stderr_is_expected=True,
+                    stdout_level=logging.INFO,
+                    stderr_level=logging.ERROR)
         except error.CmdError as e:
+            # Run several commands to debug possible network issues.
+            # TODO(b/189332919): Remove this logic once we finish debugging.
+            logging.info('Tast exited abnormally. Running several commands to '
+                         'diagnose possible network issues...')
+            utils.run('time getent ahosts %s' % self._host.hostname,
+                      timeout=60,
+                      ignore_status=True,
+                      stdout_tee=utils.TEE_TO_LOGS,
+                      stderr_tee=utils.TEE_TO_LOGS,
+                      stderr_is_expected=True,
+                      stdout_level=logging.INFO,
+                      stderr_level=logging.ERROR)
+            utils.run(
+                    'ssh '
+                    # Enable maximum debug logging.
+                    '-vvv '
+                    # Disable connection sharing to debug connection issues.
+                    '-o ControlPath=none '
+                    # Following arguments were copied from Autotest logs.
+                    '-a -x '
+                    '-o StrictHostKeyChecking=no '
+                    '-o UserKnownHostsFile=/dev/null '
+                    '-o BatchMode=yes '
+                    '-o ConnectTimeout=10 '
+                    '-o ConnectionAttempts=3 '
+                    '-l root %s%s true' %
+                    ('-p %d ' % self._host.port if self._host.port else '',
+                     self._host.hostname),
+                    timeout=60,
+                    ignore_status=True,
+                    stdout_tee=utils.TEE_TO_LOGS,
+                    stderr_tee=utils.TEE_TO_LOGS,
+                    stderr_is_expected=True,
+                    stdout_level=logging.INFO,
+                    stderr_level=logging.ERROR)
             # The tast command's output generally ends with a line describing
             # the error that was encountered; include it in the first line of
             # the TestFail exception. Fall back to stderr if stdout is empty (as
@@ -415,6 +819,43 @@
         except error.CmdTimeoutError as e:
             raise error.TestFail('Got timeout while running tast: %s' % str(e))
 
+    def _ensure_bundles(self):
+        """Runs the tast command to ensure all test bundles are available.
+
+        If private test bundles are available, they are downloaded from cloud
+        storage and installed to the DUT. Otherwise it is no-nop.
+
+        Note that "tast list" also downloads private test bundles if they are
+        missing. Nevertheless we attempt to download them in advance because
+        "tast list" cannot emit detailed logs due to technical limitations and
+        often make it difficult to debug issues related to private test bundle
+        installation.
+        """
+        logging.info('Downloading private test bundles (if any)')
+        temp_dir = tempfile.mkdtemp()
+        try:
+            args = ['-resultsdir=' + temp_dir] + self._get_cloud_storage_info()
+            for role, dut in sorted(self._companion_duts.items()):
+                args.append('-companiondut=%s:%s' % (role, dut))
+
+            for var in self._varslist:
+                args.append('-var=%s' % var)
+
+            for file_name in self._test_filter_files:
+                args.append('-testfilterfile=%s' % file_name)
+
+            # Start "tast run" with an attribute expression matching no test
+            # to trigger a private test bundle download.
+            # Note that Tast CLI will exit abnormally when no test matches,
+            # so we set ignore_status=True to avoid throwing TestFail.
+            self._run_tast('run',
+                           args, ['("group:none")'],
+                           tast._DOWNLOAD_TIMEOUT_SEC,
+                           log_stdout=True,
+                           ignore_status=True)
+        finally:
+            shutil.rmtree(temp_dir, ignore_errors=True)
+
     def _get_tests_to_run(self):
         """Runs the tast command to update the list of tests that will be run.
 
@@ -424,10 +865,11 @@
         """
         logging.info('Getting list of tests that will be run')
         args = ['-json=true'] + self._get_cloud_storage_info()
-        result = self._run_tast('list', args, self._LIST_TIMEOUT_SEC)
+        result = self._run_tast('list', args, self._test_exprs,
+                                self._LIST_TIMEOUT_SEC)
         try:
-            self._tests_to_run = _encode_utf8_json(
-                json.loads(result.stdout.strip()))
+            self._tests_to_run = _encode_json(json.loads(
+                    result.stdout.strip()))
         except ValueError as e:
             raise error.TestFail('Failed to parse tests: %s' % str(e))
         if len(self._tests_to_run) == 0:
@@ -436,6 +878,11 @@
             return False
 
         logging.info('Expect to run %d test(s)', len(self._tests_to_run))
+
+        logging.info('Tests in scope:')
+        for test in self._tests_to_run:
+            logging.info('Test: %s', test['name'])
+
         return True
 
     def _run_tests(self):
@@ -445,11 +892,19 @@
             if individual tests fail).
         """
         args = [
-            '-resultsdir=' + self.resultsdir,
-            '-waituntilready=true',
-            '-timeout=' + str(self._max_run_sec),
-            '-continueafterfailure=true',
-        ] + self._get_servo_args() + self._get_wificell_args() + self._get_cloud_storage_info()
+                '-resultsdir=' + self.resultsdir,
+                '-waituntilready=true',
+                '-timeout=' + str(self._max_run_sec),
+                '-continueafterfailure=true',
+        ]
+        args.extend(self._get_servo_args())
+        args.extend(self._get_rpm_args())
+        args.extend(self._get_wificell_args())
+        args.extend(self._get_cloud_storage_info())
+        args.extend(self._get_firmware_args())
+        args.extend(self._get_camerabox_args())
+        if self._retries:
+            args.append('-retries=%d' % self._retries)
 
         for varsfile in self._varsfiles:
             args.append('-varsfile=%s' % varsfile)
@@ -457,9 +912,33 @@
         for var in self._varslist:
             args.append('-var=%s' % var)
 
+        if self._maybemissingvars:
+            args.append('-maybemissingvars=%s' % self._maybemissingvars)
+
+        for role, dut in sorted(self._companion_duts.items()):
+            args.append(
+                    '-companiondut=%s:%s%s' %
+                    (role, dut.hostname, ':%d' % dut.port if dut.port else ''))
+
+        for file in self._test_filter_files:
+            args.append('-testfilterfile=%s' % file)
+
         logging.info('Running tests with timeout of %d sec', self._max_run_sec)
-        self._run_tast('run', args, self._max_run_sec + tast._RUN_EXIT_SEC,
-                       log_stdout=True)
+        # This option will exclude tests that are requested, but not found in
+        # `tast list` command
+        if self._exclude_missing:
+            tests_to_run_list = [test["name"] for test in self._tests_to_run]
+            self._run_tast('run',
+                           args,
+                           tests_to_run_list,
+                           self._max_run_sec + tast._RUN_EXIT_SEC,
+                           log_stdout=True)
+        else:
+            self._run_tast('run',
+                           args,
+                           self._test_exprs,
+                           self._max_run_sec + tast._RUN_EXIT_SEC,
+                           log_stdout=True)
 
     def _read_run_error(self):
         """Reads a global run error message written by the tast command."""
@@ -469,13 +948,25 @@
             with open(path, 'r') as f:
                 self._run_error = f.read().strip()
 
-    def _parse_results(self, ignore_missing_file):
+    def maybe_replace(self, test, failed):
+        """ Removes a test from the list of failed results
+
+        @param test: Name of test to remove from failed list
+        @param failed: List of failed tests
+        """
+        # Remove the result, will take & only count the second result.
+        if test[_KEY_NAME] in failed:
+            failed.remove(test[_KEY_NAME])
+
+    def _parse_results(self, ignore_missing_file, run_error_msg):
         """Parses results written by the tast command.
 
         @param ignore_missing_file: If True, return without raising an exception
             if the Tast results file is missing. This is used to avoid raising a
             new error if there was already an earlier error while running the
             tast process.
+        @param run_error_msg: The error message from Tast when there is an
+            error. It will be None if Tast encounters no errors.
 
         @raises error.TestFail if results file is missing and
             ignore_missing_file is False, or one or more tests failed and
@@ -490,7 +981,7 @@
                 return
             raise error.TestFail('Results file %s not found' % path)
 
-        failed = []
+        failed = set()
         seen_test_names = set()
         with open(path, 'r') as f:
             for line in f:
@@ -498,30 +989,48 @@
                 if not line:
                     continue
                 try:
-                    test = _encode_utf8_json(json.loads(line))
+                    test = _encode_json(json.loads(line))
                 except ValueError as e:
                     raise error.TestFail('Failed to parse %s: %s' % (path, e))
                 self._test_results.append(test)
+                if test[_KEY_NAME] in seen_test_names:
+                    self.maybe_replace(test, failed)
 
-                name = test['name']
+                name = test[_KEY_NAME]
                 seen_test_names.add(name)
 
-                if test.get('errors'):
-                    for err in test['errors']:
-                        logging.warning('%s: %s', name, err['reason'])
-                    failed.append(name)
+                if test.get(_KEY_ERRORS):
+                    for err in test[_KEY_ERRORS]:
+                        logging.warning('%s: %s', name, err[_KEY_REASON])
+                    failed.add(name)
                 else:
                     # The test will have a zero (i.e. 0001-01-01 00:00:00 UTC)
                     # end time (preceding the Unix epoch) if it didn't report
                     # completion.
-                    if _rfc3339_time_to_timestamp(test['end']) <= 0:
-                        failed.append(name)
+                    if _rfc3339_time_to_timestamp(test[_KEY_END]) <= 0:
+                        failed.add(name)
 
-        missing = [t['name'] for t in self._tests_to_run
-                   if t['name'] not in seen_test_names]
+        missing = [
+                t[_KEY_NAME] for t in self._tests_to_run
+                if t[_KEY_NAME] not in seen_test_names
+        ]
 
         if missing:
             self._record_missing_tests(missing)
+            time_str = '%sZ' % datetime.datetime.utcnow().isoformat()
+            for name in missing:
+                t = {}
+                t[_KEY_NAME] = name
+                t[_KEY_START] = time_str
+                t[_KEY_END] = time_str
+                if self._run_error:
+                    t[_KEY_MISSING_REASON] = '%s due to global error: %s' % (
+                            self._TEST_DID_NOT_RUN_MSG, self._run_error)
+                elif run_error_msg:
+                    t[_KEY_MISSING_REASON] = run_error_msg
+                else:
+                    t[_KEY_MISSING_REASON] = self._TEST_DID_NOT_RUN_MSG
+                self._test_results.append(t)
 
         failure_msg = self._get_failure_message(failed, missing)
         if failure_msg:
@@ -556,14 +1065,34 @@
             msg += '%d missing: %s' % (len(missing), list_tests(missing))
         return msg
 
-    def _log_all_tests(self):
+    def _log_all_unique_tests(self):
         """Writes entries to the TKO status.log file describing the results of
         all tests.
+
+        If there are 2 tests with the same name, AND it has an error (failure)
+            replace the result.
+        Because: if it has an err AND a second result, its either:
+            The first attempt is logged and failed and we want to use the
+                retry result
+            Or the attempts are out of order, and the already logged attempt is
+                the second attempt which failed, meaning the first ALSO failed.
+                So in this case, its still safe to override because we just
+                need to mark the failure.
+        The benefit of this is, if the first result is logged and failed, the
+            retry might have passed, so we want to log that.
+
         """
         seen_test_names = set()
-        for test in self._test_results:
+        tests_to_log = OrderedDict()
+        for test_res in self._test_results:
+            test_name = test_res[_KEY_NAME]
+
+            dup_res = tests_to_log.get(test_name)
+            if not dup_res or dup_res.get(_KEY_ERRORS):
+                tests_to_log[test_name] = test_res
+        for test in tests_to_log.values():
             self._log_test(test)
-            seen_test_names.add(test['name'])
+            seen_test_names.add(test[_KEY_NAME])
 
     def _log_test(self, test):
         """Writes events to the TKO status.log file describing the results from
@@ -574,33 +1103,48 @@
             src/platform/tast/src/chromiumos/cmd/tast/run/results.go for
             details.
         """
-        name = test['name']
-        start_time = _rfc3339_time_to_timestamp(test['start'])
-        end_time = _rfc3339_time_to_timestamp(test['end'])
+        name = test[_KEY_NAME]
+        start_time = _rfc3339_time_to_timestamp(test[_KEY_START])
+        end_time = _rfc3339_time_to_timestamp(test[_KEY_END])
 
-        test_reported_errors = bool(test.get('errors'))
-        test_skipped = bool(test.get('skipReason'))
+        test_reported_errors = bool(test.get(_KEY_ERRORS))
+        test_skipped = bool(test.get(_KEY_SKIP_REASON))
+        test_not_run = bool(test.get(_KEY_MISSING_REASON))
         # The test will have a zero (i.e. 0001-01-01 00:00:00 UTC) end time
         # (preceding the Unix epoch) if it didn't report completion.
         test_finished = end_time > 0
 
         # Avoid reporting tests that were skipped.
-        if test_skipped and not test_reported_errors:
+        if test_skipped and not test_reported_errors and not self._report_skipped:
             return
 
+        # Look for magic error _TEST_DID_NOT_RUN_MSG and mark test as not run.
+        for err in test.get(_KEY_ERRORS) or []:
+            if err[_KEY_REASON] == self._TEST_DID_NOT_RUN_MSG:
+                test_not_run = True
+                test[_KEY_MISSING_REASON] = self._TEST_DID_NOT_RUN_MSG
+
         self._log_test_event(self._JOB_STATUS_START, name, start_time)
 
-        if test_finished and not test_reported_errors:
+        if test_not_run:
+            self._log_test_event(self._JOB_STATUS_NOSTATUS, name, end_time,
+                                 test[_KEY_MISSING_REASON])
+            end_status = self._JOB_STATUS_END_NOSTATUS
+        elif test_skipped and not test_reported_errors and self._report_skipped:
+            self._log_test_event(self._JOB_STATUS_SKIP, name, end_time,
+                                 test.get(_KEY_SKIP_REASON))
+            end_status = self._JOB_STATUS_END_SKIP
+        elif test_finished and not test_reported_errors:
             self._log_test_event(self._JOB_STATUS_GOOD, name, end_time)
             end_status = self._JOB_STATUS_END_GOOD
         else:
             # The previous START event automatically increases the log
             # indentation level until the following END event.
             if test_reported_errors:
-                for err in test['errors']:
-                    error_time = _rfc3339_time_to_timestamp(err['time'])
+                for err in test[_KEY_ERRORS]:
+                    error_time = _rfc3339_time_to_timestamp(err[_KEY_TIME])
                     self._log_test_event(self._JOB_STATUS_FAIL, name,
-                                         error_time, err['reason'])
+                                         error_time, err[_KEY_REASON])
             if not test_finished:
                 # If a run-level error was encountered (e.g. the SSH connection
                 # to the DUT was lost), report it here to make it easier to see
@@ -627,8 +1171,12 @@
         """
         full_name = self._TEST_NAME_PREFIX + test_name
         # The TKO parser code chokes on floating-point timestamps.
-        entry = base_job.status_log_entry(status_code, None, full_name, message,
-                                          None, timestamp=int(timestamp))
+        entry = base_job.status_log_entry(status_code,
+                                          None,
+                                          full_name,
+                                          message,
+                                          None,
+                                          timestamp=int(timestamp))
         self.job.record_entry(entry, False)
 
     def _record_missing_tests(self, missing):
diff --git a/server/site_tests/tast/tast_unittest.py b/server/site_tests/tast/tast_unittest.py
index aa14932..b13afe0 100755
--- a/server/site_tests/tast/tast_unittest.py
+++ b/server/site_tests/tast/tast_unittest.py
@@ -1,22 +1,26 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # -*- coding: utf-8 -*-
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import datetime
 import json
 import os
 import shutil
 import tempfile
 import unittest
+import six
 import yaml
 
 import dateutil.parser
 
 import common
-import tast
-
+from autotest_lib.server.site_tests.tast import tast
 from autotest_lib.client.common_lib import base_job
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib import utils
@@ -129,10 +133,21 @@
                 tast.tast._SSP_REMOTE_TEST_RUNNER_PATH if ssp
                 else self._PORTAGE_REMOTE_TEST_RUNNER_PATH)
 
-    def _init_tast_commands(self, tests, ssp=False, build=False,
-                            build_bundle='fakebundle', run_private_tests=False,
-                            run_vars=[], run_varsfiles=[],
-                            download_data_lazily=False):
+    def _init_tast_commands(self,
+                            tests,
+                            ssp=False,
+                            build=False,
+                            build_bundle='fakebundle',
+                            run_private_tests=False,
+                            run_vars=[],
+                            run_varsfiles=[],
+                            download_data_lazily=False,
+                            totalshards=1,
+                            shardindex=0,
+                            companion_duts={},
+                            maybemissingvars='',
+                            port=True,
+                            test_filter_files=[]):
         """Sets fake_tast.py's behavior for 'list' and 'run' commands.
 
         @param tests: List of TestInfo objects.
@@ -142,14 +157,21 @@
         @param run_varsfiles: filenames should be passed to 'run' via -varsfile.
         @param download_data_lazily: Whether to download external data files
             lazily.
+        @param totalshards: total number of shards.
+        @param shardindex: shard index to be run.
+        @param companion_duts: mapping between roles and DUTs.
+        @param test_filter_files: a list of files specify which tests to disable.
         """
         list_args = [
-            'build=%s' % build,
-            'patterns=%s' % self.TEST_PATTERNS,
-            'sshretries=%d' % tast.tast._SSH_CONNECT_RETRIES,
-            'downloaddata=%s' % ('lazy' if download_data_lazily else 'batch'),
-            'target=%s:%d' % (self.HOST, self.PORT),
-            'verbose=True',
+                'build=%s' % build,
+                'patterns=%s' % self.TEST_PATTERNS,
+                'sshretries=%d' % tast.tast._SSH_CONNECT_RETRIES,
+                'downloaddata=%s' %
+                ('lazy' if download_data_lazily else 'batch'),
+                'totalshards=%d' % totalshards,
+                'shardindex=%d' % shardindex,
+                'target=%s%s' % (self.HOST, ':%d' % self.PORT if port else ''),
+                'verbose=True',
         ]
         if build:
             list_args.extend([
@@ -177,7 +199,15 @@
         ]
         if run_varsfiles:
             run_args.append('varsfile=%s' % run_varsfiles)
-
+        if companion_duts:
+            role_dut_pairs = []
+            for role, dut in sorted(companion_duts.items()):
+                role_dut_pairs.append('%s:%s%s' %
+                                      (role, dut.hostname,
+                                       ':%d' % dut.port if dut.port else ''))
+            run_args.append('companiondut=%s' % role_dut_pairs)
+        if test_filter_files:
+            run_args.append('testfilterfile=%s' % test_filter_files)
         test_list = json.dumps([t.test() for t in tests])
         run_files = {
             self._results_path(): ''.join(
@@ -206,7 +236,16 @@
                   run_private_tests=False,
                   varsfiles=[],
                   download_data_lazily=False,
-                  varslist=[]):
+                  clear_tpm=False,
+                  totalshards=1,
+                  shardindex=0,
+                  companion_duts={},
+                  varslist=[],
+                  maybemissingvars='',
+                  use_camera_box=False,
+                  vars_gs_path='',
+                  test_filter_files=[],
+                  report_skipped=False):
         """Writes fake_tast.py's configuration and runs the test.
 
         @param ignore_test_failures: Passed as the identically-named arg to
@@ -223,8 +262,13 @@
              in |-varsfile| arguments.
         @param download_data_lazily: Whether to download external data files
             lazily.
+        @param clear_tpm: clear the TPM first before running the tast tests.
         @param varslist: list of strings to pass to tast run command as |-vars|
             arguments. Each string should be formatted as "name=value".
+        @param maybemissingvars: a regex to pass to tast run command as
+            |-maybemissingvars| arguments.
+        @param use_camera_box: Whether the test run in CameraBox.
+        @param report_skipped: Whether or not skipped tests should be reported.
         """
         self._test.initialize(self._host,
                               self.TEST_PATTERNS,
@@ -238,12 +282,21 @@
                               run_private_tests=run_private_tests,
                               varsfiles=varsfiles,
                               download_data_lazily=download_data_lazily,
-                              varslist=varslist)
+                              clear_tpm=clear_tpm,
+                              totalshards=totalshards,
+                              shardindex=shardindex,
+                              companion_duts=companion_duts,
+                              varslist=varslist,
+                              maybemissingvars=maybemissingvars,
+                              use_camera_box=use_camera_box,
+                              vars_gs_path=vars_gs_path,
+                              test_filter_files=test_filter_files,
+                              report_skipped=report_skipped)
         self._test.set_fake_now_for_testing(
                 (NOW - tast._UNIX_EPOCH).total_seconds())
 
         cfg = {}
-        for name, cmd in self._tast_commands.iteritems():
+        for name, cmd in six.iteritems(self._tast_commands):
             cfg[name] = vars(cmd)
         path = os.path.join(os.path.dirname(self._tast_path), 'config.json')
         with open(path, 'a') as f:
@@ -289,11 +342,25 @@
                          status_string(self._job.status_entries))
         self.assertIs(self._load_job_keyvals(), None)
 
+    def testPassingTestsNoPort(self):
+        """Tests that passing tests are reported correctly."""
+        self._host = FakeHost(self.HOST, None)
+        tests = [
+                TestInfo('pkg.Test1', 0, 2),
+                TestInfo('pkg.Test2', 3, 5),
+                TestInfo('pkg.Test3', 6, 8)
+        ]
+        self._init_tast_commands(tests, port=None)
+        self._run_test()
+        self.assertEqual(status_string(get_status_entries_from_tests(tests)),
+                         status_string(self._job.status_entries))
+        self.assertIs(self._load_job_keyvals(), None)
+
     def testFailingTests(self):
         """Tests that failing tests are reported correctly."""
         tests = [TestInfo('pkg.Test1', 0, 2, errors=[('failed', 1)]),
                  TestInfo('pkg.Test2', 3, 6),
-                 TestInfo('pkg.Test2', 7, 8, errors=[('another', 7)])]
+                 TestInfo('pkg.Test3', 7, 8, errors=[('another', 7)])]
         self._init_tast_commands(tests)
         self._run_test_for_failure([tests[0], tests[2]], [])
         self.assertEqual(status_string(get_status_entries_from_tests(tests)),
@@ -310,14 +377,32 @@
 
     def testSkippedTest(self):
         """Tests that skipped tests aren't reported."""
-        tests = [TestInfo('pkg.Normal', 0, 1),
-                 TestInfo('pkg.Skipped', 2, 2, skip_reason='missing deps')]
+        tests = [
+                TestInfo('pkg.Normal', 0, 1),
+                TestInfo('pkg.Skipped', 2, 2, skip_reason='missing deps')
+        ]
         self._init_tast_commands(tests)
         self._run_test()
         self.assertEqual(status_string(get_status_entries_from_tests(tests)),
                          status_string(self._job.status_entries))
         self.assertIs(self._load_job_keyvals(), None)
 
+    def testSkippedTestWithReportSkipped(self):
+        """Tests that skipped tests are reported correctly when report_skipped=True."""
+        tests = [
+                TestInfo('pkg.Normal', 0, 1),
+                TestInfo('pkg.Skipped',
+                         2,
+                         3,
+                         skip_reason='missing deps',
+                         report_skipped=True)
+        ]
+        self._init_tast_commands(tests)
+        self._run_test(report_skipped=True)
+        self.assertEqual(status_string(get_status_entries_from_tests(tests)),
+                         status_string(self._job.status_entries))
+        self.assertIs(self._load_job_keyvals(), None)
+
     def testSkippedTestWithErrors(self):
         """Tests that skipped tests are reported if they also report errors."""
         tests = [TestInfo('pkg.Normal', 0, 1),
@@ -359,9 +444,8 @@
                  TestInfo('pkg.Test3', None, None)]
         self._init_tast_commands(tests)
         self._run_test_for_failure([tests[1]], [tests[2]])
-        self.assertEqual(
-                status_string(get_status_entries_from_tests(tests[:2])),
-                status_string(self._job.status_entries))
+        self.assertEqual(status_string(get_status_entries_from_tests(tests)),
+                         status_string(self._job.status_entries))
         self.assertEqual(self._load_job_keyvals(),
                          {'tast_missing_test.0': 'pkg.Test3'})
 
@@ -381,7 +465,7 @@
 
         self._run_test_for_failure([tests[1]], [tests[2]])
         self.assertEqual(
-                status_string(get_status_entries_from_tests(tests[:2], msg)),
+                status_string(get_status_entries_from_tests(tests, msg)),
                 status_string(self._job.status_entries))
         self.assertEqual(self._load_job_keyvals(),
                          {'tast_missing_test.0': 'pkg.Test3'})
@@ -439,8 +523,43 @@
                 'not valid JSON data'
         with self.assertRaises(error.TestFail) as _:
             self._run_test()
-        self.assertEqual(status_string(get_status_entries_from_tests(tests)),
-                         status_string(self._job.status_entries))
+        # Missing tests are reported in the _parse_results which is called after
+        # _run_tests, so missing tests is not available and the status_entries
+        # should include only the first test case.
+        self.assertEqual(
+                status_string(get_status_entries_from_tests(tests[:1])),
+                status_string(self._job.status_entries))
+
+    def testRunCommandWithSharding(self):
+        """Tests that sharding parameter is passing thru without issues."""
+        tests = [TestInfo('pkg.Test1', 0, 2), TestInfo('pkg.Test2', 3, 5)]
+        self._init_tast_commands(tests=tests, totalshards=2, shardindex=1)
+        self._run_test(totalshards=2, shardindex=1)
+
+    def testRunCommandWithCompanionDUTs(self):
+        """Tests that companion dut parameter is passing thru without issues."""
+        tests = [TestInfo('pkg.Test1', 0, 2), TestInfo('pkg.Test2', 3, 5)]
+        companion_duts = {'role1': FakeHost('dut1', 22), 'role2':FakeHost('dut2', 22)}
+        self._init_tast_commands(tests=tests, companion_duts=companion_duts)
+        self._run_test(companion_duts=companion_duts)
+
+    def testRunCommandWithCompanionDUTsNoPort(self):
+        """Tests that companion dut parameter is passing thru without issues."""
+        tests = [TestInfo('pkg.Test1', 0, 2), TestInfo('pkg.Test2', 3, 5)]
+        companion_duts = {
+                'role1': FakeHost('dut1', 22),
+                'role2': FakeHost('dut2', None)
+        }
+        self._init_tast_commands(tests=tests, companion_duts=companion_duts)
+        self._run_test(companion_duts=companion_duts)
+
+    def testRunCommandWithTestFilterFiles(self):
+        """Tests that companion dut parameter is passing thru without issues."""
+        tests = [TestInfo('pkg.Test1', 0, 2), TestInfo('pkg.Test2', 3, 5)]
+        test_filter_files = ['filter_1.txt', 'filter_2.txt']
+        self._init_tast_commands(tests=tests,
+                                 test_filter_files=test_filter_files)
+        self._run_test(test_filter_files=test_filter_files)
 
     def testNoResultsFile(self):
         """Tests that an error is raised if no results file is written."""
@@ -449,7 +568,10 @@
         self._tast_commands['run'].files_to_write = {}
         with self.assertRaises(error.TestFail) as _:
             self._run_test()
-        self.assertEqual(status_string(get_status_entries_from_tests(tests)),
+        # Missing tests are reported in the _parse_results which is called after
+        # _run_tests, so missing tests is not available and the status_entries
+        # should be empty.
+        self.assertEqual(status_string(get_status_entries_from_tests([])),
                          status_string(self._job.status_entries))
 
     def testNoResultsFileAfterRunCommandFails(self):
@@ -468,7 +590,10 @@
         first_line = str(cm.exception).split('\n')[0]
         self.assertTrue(FAILURE_MSG in first_line,
                         '"%s" not in "%s"' % (FAILURE_MSG, first_line))
-        self.assertEqual(status_string(get_status_entries_from_tests(tests)),
+        # Missing tests are reported in the _parse_results which is called after
+        # _run_tests, so missing tests is not available and the status_entries
+        # should be empty.
+        self.assertEqual(status_string(get_status_entries_from_tests([])),
                          status_string(self._job.status_entries))
 
     def testMissingTastExecutable(self):
@@ -592,8 +717,9 @@
         ROUTER_IP = '192.168.1.2:1234'
         PCAP_IP = '192.168.1.3:2345'
         wificell_var = [
-            'router=%s' % ROUTER_IP,
-            'pcap=%s' % PCAP_IP,
+                'router=%s' % ROUTER_IP,
+                'pcap=%s' % PCAP_IP,
+                'routers=%s,%s' % (ROUTER_IP, PCAP_IP),
         ]
         self._init_tast_commands([TestInfo('pkg.Test', 0, 0)],
                                  run_vars=wificell_var)
@@ -603,13 +729,34 @@
             (WiFiManager.CMDLINE_ROUTER_ADDR, ROUTER_IP),
             (WiFiManager.CMDLINE_PCAP_ADDR, PCAP_IP),
         ]
-        args = map(lambda x: ("%s=%s" % x), arg_list)
+        args = [("%s=%s" % x) for x in arg_list]
         self._run_test(command_args=args)
 
+    def testFirmwareArgs(self):
+        """Tests passing firmware specific args into Tast runner."""
+        vars = ['firmware.no_ec_sync=true']
+        self._init_tast_commands([TestInfo('pkg.Test', 0, 0)], run_vars=vars)
+
+        args = ['no_ec_sync=true']
+        self._run_test(command_args=args)
+
+    def testCameraboxArgs(self):
+        """Tests passing camerabox specific args into Tast runner."""
+        # Now it won't specify any chart IP address if it does not find a valid
+        # one.
+        vars = []
+        self._init_tast_commands([TestInfo('pkg.Test', 0, 0)], run_vars=vars)
+        self._run_test(use_camera_box=True)
+
     def testVarsfileOption(self):
         with tempfile.NamedTemporaryFile(
                 suffix='.yaml', dir=self._temp_dir) as temp_file:
-            yaml.dump({"var1": "val1", "var2": "val2"}, stream=temp_file)
+            yaml.dump({
+                    "var1": "val1",
+                    "var2": "val2"
+            },
+                      stream=temp_file,
+                      encoding='utf-8')
             varsfiles = [temp_file.name]
             self._init_tast_commands([TestInfo('pkg.Test', 0, 0)],
                                      run_varsfiles=varsfiles)
@@ -621,6 +768,79 @@
                                  run_vars=varslist)
         self._run_test(varslist=varslist)
 
+    def testMaybeMissingVarsOption(self):
+        arg = '.*\.Test'
+        self._init_tast_commands([TestInfo('pkg.Test', 0, 0)],
+                                 maybemissingvars=arg)
+        self._run_test(maybemissingvars=arg)
+
+    def testFillExtvars(self):
+        with tempfile.NamedTemporaryFile(suffix='.yaml',
+                                         dir=self._temp_dir) as temp_file:
+            yaml.dump({
+                    'var1': 'val1',
+                    'var2': 'val2'
+            },
+                      stream=temp_file,
+                      encoding='utf-8')
+
+            host = FakeHost(self.HOST, self.PORT)
+            host.host_info_store = host_info.InMemoryHostInfoStore(
+                    host_info.HostInfo(labels=[
+                            'plus', 'board:octopus', 'fleex', 'os:cros'
+                    ]))
+            self._test.initialize(
+                    host=host,
+                    test_exprs=self.TEST_PATTERNS,
+                    varsfiles=[temp_file.name],
+                    varslist=['var3=val3', 'var4=val4'],
+                    command_args=['arg1', 'arg2=arg2val', 'arg3:arg3val'])
+            self._test._tests_to_run = ['test1', 'test2']
+
+            self.maxDiff = None
+            self.assertDictEqual(
+                    {
+                            'var:var1': 'val1',
+                            'var:var2': 'val2',
+                            'var:var3': 'val3',
+                            'var:var4': 'val4',
+                            'tests:': 'test1\ntest2',
+                            'test:test1': '',
+                            'test:test2': '',
+                            'args:': 'arg1\narg2=arg2val\narg3:arg3val',
+                            'arg:arg1': 'arg1val',
+                            'arg:arg1': '',
+                            'arg:arg2': 'arg2val',
+                            'arg:arg3': 'arg3val',
+                            'labels:': 'plus\nboard:octopus\nfleex\nos:cros',
+                            'label:plus': '',
+                            'label:board': 'octopus',
+                            'label:fleex': '',
+                            'label:os': 'cros',
+                    }, self._test._fill_config_extvars())
+
+    def test_dut_server_arg(self):
+        # Test positive single case
+        cargs = [
+                'dut_servers=100.101.102.103:1111,', 'servo_host=localhost',
+                'servo_port=9999'
+        ]
+        dut_server = tast._dut_server_arg(cargs)
+        self.assertEqual(dut_server, '100.101.102.103:1111')
+
+        # Test no dut_servers provided case:
+        cargs = ['servo_host=localhost', 'servo_port=9999']
+        dut_server = tast._dut_server_arg(cargs)
+        self.assertEqual(dut_server, None)
+
+        # Test multiple dut_servers provided case:
+        cargs = [
+                'dut_servers=100.101.102.103:1111,localhost:1234',
+                'servo_host=localhost', 'servo_port=9999'
+        ]
+        dut_server = tast._dut_server_arg(cargs)
+        self.assertEqual(dut_server, '100.101.102.103:1111')
+
 
 class TestInfo:
     """Wraps information about a Tast test.
@@ -631,8 +851,17 @@
     - get expected base_job.status_log_entry objects that unit tests compare
       against what tast.Tast actually recorded
     """
-    def __init__(self, name, start_offset, end_offset, errors=None,
-                 skip_reason=None, attr=None, timeout_ns=0):
+
+    def __init__(self,
+                 name,
+                 start_offset,
+                 end_offset,
+                 errors=None,
+                 skip_reason=None,
+                 attr=None,
+                 timeout_ns=0,
+                 missing_reason=None,
+                 report_skipped=False):
         """
         @param name: Name of the test, e.g. 'ui.ChromeLogin'.
         @param start_offset: Start time as int seconds offset from BASE_TIME,
@@ -648,6 +877,7 @@
         @param attr: List of string test attributes assigned to the test, or
             None if no attributes are assigned.
         @param timeout_ns: Test timeout in nanoseconds.
+        @param report_skipped: Decide if skipped tests should be reported
         """
         def from_offset(offset):
             """Returns an offset from BASE_TIME.
@@ -667,6 +897,7 @@
         self._skip_reason = skip_reason
         self._attr = list(attr) if attr else []
         self._timeout_ns = timeout_ns
+        self._report_skipped = report_skipped
 
     def name(self):
         # pylint: disable=missing-docstring
@@ -712,23 +943,20 @@
         """
         # Deliberately-skipped tests shouldn't have status entries unless errors
         # were also reported.
-        if self._skip_reason and not self._errors:
-            return []
-
-        # Tests that weren't even started (e.g. because of an earlier issue)
-        # shouldn't have status entries.
-        if not self._start_time:
+        if not self._report_skipped and self._skip_reason and not self._errors:
             return []
 
         def make(status_code, dt, msg=''):
             """Makes a base_job.status_log_entry.
 
             @param status_code: String status code.
-            @param dt: datetime.datetime object containing entry time.
+            @param dt: datetime.datetime object containing entry time, and its
+                value should be None if the test is not supposed to be started
             @param msg: String message (typically only set for errors).
             @return: base_job.status_log_entry object.
             """
-            timestamp = int((dt - tast._UNIX_EPOCH).total_seconds())
+            timestamp = int(
+                    (dt - tast._UNIX_EPOCH).total_seconds()) if dt else None
             return base_job.status_log_entry(
                     status_code, None,
                     tast.tast._TEST_NAME_PREFIX + self._name, msg, None,
@@ -736,7 +964,22 @@
 
         entries = [make(tast.tast._JOB_STATUS_START, self._start_time)]
 
-        if self._end_time and not self._errors:
+        if not self._start_time:
+            if run_error_msg:
+                reason = '%s due to global error: %s' % (
+                        tast.tast._TEST_DID_NOT_RUN_MSG, run_error_msg)
+            else:
+                reason = tast.tast._TEST_DID_NOT_RUN_MSG
+
+            entries.append(make(tast.tast._JOB_STATUS_NOSTATUS, None, reason))
+            entries.append(make(tast.tast._JOB_STATUS_END_NOSTATUS, None))
+        elif self._end_time and self._skip_reason and not self._errors:
+            entries.append(
+                    make(tast.tast._JOB_STATUS_SKIP, self._end_time,
+                         self._skip_reason))
+            entries.append(make(tast.tast._JOB_STATUS_END_SKIP,
+                                self._end_time))
+        elif self._end_time and not self._errors:
             entries.append(make(tast.tast._JOB_STATUS_GOOD, self._end_time))
             entries.append(make(tast.tast._JOB_STATUS_END_GOOD, self._end_time))
         else:
@@ -840,14 +1083,31 @@
     @param entries: List of base_job.status_log_entry objects.
     @return: String containing space-separated representations of entries.
     """
-    strings = []
-    for entry in entries:
-        timestamp = entry.fields[base_job.status_log_entry.TIMESTAMP_FIELD]
-        s = '[%s %s %s %s]' % (timestamp, entry.operation, entry.status_code,
-                               repr(str(entry.message)))
-        strings.append(s)
+    found_test_strings = []
+    missing_test_strings = []
 
-    return ' '.join(strings)
+    # For each missing test, there are three corresponding entries that we want
+    # to put in missing_test_strings: "START", "NOSTATUS" and "END NOSTATUS".
+    # We cannot tell if the test is missing in the "START" entry. Therefore,
+    # we use missing_tests to keep track of all the missing tests.
+    missing_tests = set(entry.operation for entry in entries
+                        if entry.status_code == tast.tast._JOB_STATUS_NOSTATUS)
+
+    for entry in entries:
+        message = entry.message
+        if isinstance(message, six.binary_type):
+            message = message.decode('utf-8')
+        # Ignore timestamp for missing entry
+        timestamp = entry.fields[base_job.status_log_entry.TIMESTAMP_FIELD]
+        if entry.operation not in missing_tests:
+            s = '[%s %s %s %s]' % (timestamp, entry.operation,
+                                   entry.status_code, repr(message))
+            found_test_strings.append(s)
+        else:
+            s = '[%s %s %s]' % (entry.operation, entry.status_code,
+                                repr(message))
+            missing_test_strings.append(s)
+    return ' '.join(found_test_strings + missing_test_strings)
 
 
 if __name__ == '__main__':
diff --git a/server/site_tests/tast/testdata/fake_tast.py b/server/site_tests/tast/testdata/fake_tast.py
index 6d122b9..8593635 100755
--- a/server/site_tests/tast/testdata/fake_tast.py
+++ b/server/site_tests/tast/testdata/fake_tast.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -20,6 +20,7 @@
 import argparse
 import json
 import os
+import six
 import sys
 
 
@@ -39,17 +40,21 @@
     if not cmd:
         raise RuntimeError('Unexpected command "%s"' % args.command)
 
-    for arg in cmd.get('required_args', []):
-        name, expected_value = arg.split('=', 1)
-        # argparse puts the repeated "pattern" args into a list of lists
-        # instead of a single list. Pull the args back out in this case.
-        val = getattr(args, name)
-        if isinstance(val, list) and len(val) == 1 and isinstance(val[0], list):
-            val = val[0]
-        actual_value = str(val)
-        if actual_value != expected_value:
-            raise RuntimeError('Got arg %s with value "%s"; want "%s"' %
-                               (name, actual_value, expected_value))
+    # If patterns is ("group:none"), this is a warm-up run, so skip checking
+    # arguments.
+    if args.patterns != [['("group:none")']]:
+        for arg in cmd.get('required_args', []):
+            name, expected_value = arg.split('=', 1)
+            # argparse puts the repeated "pattern" args into a list of lists
+            # instead of a single list. Pull the args back out in this case.
+            val = getattr(args, name)
+            if isinstance(val, list) and len(val) == 1 and isinstance(
+                    val[0], list):
+                val = val[0]
+            actual_value = str(val)
+            if actual_value != expected_value:
+                raise RuntimeError('Got arg %s with value "%s"; want "%s"' %
+                                   (name, actual_value, expected_value))
 
     if cmd.get('stdout'):
         sys.stdout.write(cmd['stdout'])
@@ -57,10 +62,10 @@
         sys.stderr.write(cmd['stderr'])
 
     if cmd.get('files_to_write'):
-        for path, data in cmd['files_to_write'].iteritems():
+        for path, data in six.iteritems(cmd['files_to_write']):
             dirname = os.path.dirname(path)
             if not os.path.exists(dirname):
-                os.makedirs(dirname, 0o0755)
+                os.makedirs(dirname, 0o755)
             with open(path, 'w') as f:
                 f.write(data)
 
@@ -101,6 +106,8 @@
         subparser.add_argument('-remoterunner')
         subparser.add_argument('-sshretries')
         subparser.add_argument('-downloaddata')
+        subparser.add_argument('-totalshards')
+        subparser.add_argument('-shardindex')
         subparser.add_argument('target')
         subparser.add_argument('patterns', action='append', nargs='*')
 
@@ -118,7 +125,10 @@
     run_parser.add_argument('-var', action='append', default=[])
     run_parser.add_argument('-defaultvarsdir')
     run_parser.add_argument('-varsfile', action='append', default=[])
+    run_parser.add_argument('-companiondut', action='append', default=[])
     run_parser.add_argument('-buildartifactsurl')
+    run_parser.add_argument('-maybemissingvars')
+    run_parser.add_argument('-testfilterfile', action='append', default=[])
 
     return parser.parse_args()
 
diff --git a/server/site_tests/telemetry_AFDOGenerate/control b/server/site_tests/telemetry_AFDOGenerate/control
index cf0a08d..08f1bf3 100644
--- a/server/site_tests/telemetry_AFDOGenerate/control
+++ b/server/site_tests/telemetry_AFDOGenerate/control
@@ -13,6 +13,7 @@
 TEST_CATEGORY = "Benchmark"
 TEST_CLASS = "telemetry"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 Run a pre-defined set of benchmarks on the DUT and create a sampled profile
diff --git a/server/site_tests/telemetry_AFDOGenerate/telemetry_AFDOGenerate.py b/server/site_tests/telemetry_AFDOGenerate/telemetry_AFDOGenerate.py
index d1511e1..b319151 100644
--- a/server/site_tests/telemetry_AFDOGenerate/telemetry_AFDOGenerate.py
+++ b/server/site_tests/telemetry_AFDOGenerate/telemetry_AFDOGenerate.py
@@ -22,12 +22,30 @@
   telemetry_AFDOGenerate
 """
 
-from __future__ import print_function
 
 import bz2
 import logging
 import os
 import time
+import sys
+
+# TODO (b/206008069), remove this when migrated to new env
+sys.path.insert(0,
+                '/usr/local/lib/python2.7/dist-packages/six-1.16.0-py2.7.egg')
+try:
+    # This is weird. But it seems something is bringing in six earlier
+    # Going to force a reload after the egg is inserted.
+    import six
+    if six.PY2:
+        reload(six)
+    else:
+        import importlib
+        importlib.reload(six)
+    logging.debug("six version is {}".format(six.__version__))
+    if six.__version__ != '1.16.0':
+        logging.debug(sys.path)
+except ImportError as e:
+    logging.warning("Could not import six due to %s", e)
 
 from contextlib import contextmanager
 
@@ -41,7 +59,8 @@
 
 # These are arguments to the linux "perf" tool.
 # The -e value is processor specific and comes from the Intel SDM vol 3b
-PROFILER_ARGS = 'record -a -e r20c4 -c 50000 -b'
+# TODO(b:229298221): Revert to -c 50000 when fixed.
+PROFILER_ARGS = 'record -a -e r20c4 -c 200003 -b'
 
 # In practice, it takes >2min to copy the perf.data back from the DUT, set
 # this timeout to 600 secs to be safe.
@@ -85,11 +104,11 @@
         # page_cycler tests are deprecated. Replace them with loading.desktop.
         ('loading.desktop', ('--pageset-repeat=1',
                              '--story-tag-filter=typical')),
-        ('loading.desktop', ('--pageset-repeat=1',
-                             '--story-tag-filter=intl_ja_zh')),
-        ('rendering.desktop',
-         ('--story-tag-filter=tough_canvas',
-          '--story-filter="bouncing\\*\\|canvas\\*\\|microsoft\\*"')),
+        # TODO(b:229298221): Re-enabled when fixed.
+        # ('loading.desktop', ('--pageset-repeat=1',
+        #                      '--story-tag-filter=intl_ja_zh')),
+        ('rendering.desktop', ('--pageset-repeat=1',
+                               '--story-tag-filter=tough_canvas')),
         ('octane', ),
         ('kraken', ),
         ('speedometer2', ),
@@ -116,7 +135,7 @@
 # FIXME(tcwang): only used for testing Async AFDO generation builders.
 # Remove this after testing is done.
 # Due to crbug.com/991299 and crbug.com/992539, AFDO profiles generated
-# by samus is not suitable for production in both master and branch.
+# by samus is not suitable for production in both main and branch.
 # So it's suitable to test generation profiles but not actually use it.
 LLVM_BOARDS_ASYNC = ['samus']
 
@@ -140,8 +159,11 @@
         cmd = []
         src = ('root@%s:%s/%s' % (dut.hostname, DUT_CHROME_RESULTS_DIR,
                                   'perf.data'))
-        cmd.extend(['scp', DUT_SCP_OPTIONS, RSA_KEY, '-P', str(dut.port), '-v',
-                    src, host_dir])
+        cmd.extend([
+                'scp', DUT_SCP_OPTIONS, RSA_KEY,
+                '-P %s' % str(dut.port) if dut.port else '', '-v', src,
+                host_dir
+        ])
         command = ' '.join(cmd)
 
         logging.debug('Retrieving Perf Data: %s', command)
@@ -220,20 +242,19 @@
             if self._minimal_telemetry:
                 self._run_tests_minimal_telemetry()
             else:
-                self._telemetry_runner = telemetry_runner.TelemetryRunner(
-                        self._host, self._local, telemetry_on_dut=False)
-
-                for benchmark_info in TELEMETRY_AFDO_BENCHMARKS:
-                    benchmark = benchmark_info[0]
-                    args = (
-                    ) if len(benchmark_info) == 1 else benchmark_info[1]
-                    try:
-                        self._run_test_with_retry(benchmark, *args)
-                    except error.TestBaseException:
-                        if not self._ignore_failures:
-                            raise
-                        logging.info('Ignoring failure from benchmark %s.',
-                                     benchmark)
+                with telemetry_runner.TelemetryRunnerFactory().get_runner(
+                        self._host, self._local, telemetry_on_dut=False) as tr:
+                    for benchmark_info in TELEMETRY_AFDO_BENCHMARKS:
+                        benchmark = benchmark_info[0]
+                        args = (
+                        ) if len(benchmark_info) == 1 else benchmark_info[1]
+                        try:
+                            self._run_test_with_retry(tr, benchmark, *args)
+                        except error.TestBaseException:
+                            if not self._ignore_failures:
+                                raise
+                            logging.info('Ignoring failure from benchmark %s.',
+                                         benchmark)
 
     def after_run_once(self):
         """After the profile information has been collected, compress it
@@ -285,7 +306,10 @@
         # this will be set to True by default.
         self._minimal_telemetry = False
 
-        for option_name, value in args.iteritems():
+        # Ignored servo arguments.
+        ignored_options = ('servo_host', 'servo_port')
+
+        for option_name, value in args.items():
             if option_name == 'arch':
                 self._arch = value
             elif option_name == 'gs_test_location':
@@ -298,12 +322,15 @@
                 self._minimal_telemetry = (value == 'True')
             elif option_name == 'version':
                 self._version = value
+            elif option_name in ignored_options:
+                continue
             else:
                 raise error.TestFail('Unknown option passed: %s' % option_name)
 
-    def _run_test(self, benchmark, *args):
+    def _run_test(self, tr, benchmark, *args):
         """Run the benchmark using Telemetry.
 
+        @param tr: Instance of the TelemetryRunner subclass.
         @param benchmark: Name of the benchmark to run.
         @param args: Additional arguments to pass to the telemetry execution
                      script.
@@ -313,8 +340,7 @@
         try:
             logging.info('Starting run for Telemetry benchmark %s', benchmark)
             start_time = time.time()
-            result = self._telemetry_runner.run_telemetry_benchmark(
-                    benchmark, None, *args)
+            result = tr.run_telemetry_benchmark(benchmark, None, *args)
             end_time = time.time()
             logging.info('Completed Telemetry benchmark %s in %f seconds',
                          benchmark, end_time - start_time)
@@ -336,9 +362,10 @@
             raise error.TestFail('An error occurred while executing'
                                  ' benchmark: %s' % benchmark)
 
-    def _run_test_with_retry(self, benchmark, *args):
+    def _run_test_with_retry(self, tr, benchmark, *args):
         """Run the benchmark using Telemetry. Retry in case of failure.
 
+        @param tr: An instance of the TelemetryRunner subclass.
         @param benchmark: Name of the benchmark to run.
         @param args: Additional arguments to pass to the telemetry execution
                      script.
@@ -348,7 +375,7 @@
         tried = False
         while True:
             try:
-                self._run_test(benchmark, *args)
+                self._run_test(tr, benchmark, *args)
                 logging.info('Benchmark %s succeeded on %s try', benchmark,
                              'first' if not tried else 'second')
                 break
@@ -396,9 +423,9 @@
         @returns Name of compressed file.
         """
         dest = ''
-        with open(unc_file, 'r') as inp:
+        with open(unc_file, 'rb') as inp:
             dest = telemetry_AFDOGenerate._get_compressed_name(com_file)
-            with bz2.BZ2File(dest, 'w') as out:
+            with bz2.BZ2File(dest, 'wb') as out:
                 for data in inp:
                     out.write(data)
         if not dest or not os.path.isfile(dest):
diff --git a/server/site_tests/telemetry_Benchmarks/control.backdrop_filter.rendering.desktop b/server/site_tests/telemetry_Benchmarks/control.backdrop_filter.rendering.desktop
index 89133f6..b4807ee 100644
--- a/server/site_tests/telemetry_Benchmarks/control.backdrop_filter.rendering.desktop
+++ b/server/site_tests/telemetry_Benchmarks/control.backdrop_filter.rendering.desktop
@@ -9,18 +9,19 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = 'Chrome OS Team'
+AUTHOR = 'ChromeOS Team'
 NAME = 'telemetry_Benchmarks.backdrop_filter.rendering.desktop'
 ATTRIBUTES = 'suite:crosbolt_perf_perbuild'
 TIME = 'LONG'
 TEST_CATEGORY = 'Benchmark'
 TEST_CLASS = 'performance'
 TEST_TYPE = 'server'
+PY_VERSION = 3
 
 DOC = '''
 This server side test suite executes the Telemetry Benchmark:
 rendering.desktop (but only stories with tag backdrop_filter)
-This is part of Chrome for Chrome OS performance testing.
+This is part of Chrome for ChromeOS performance testing.
 
 Pass local=True to run with local telemetry and no AFE server.
 '''
diff --git a/server/site_tests/telemetry_Benchmarks/control.blink_perf.image_decoder b/server/site_tests/telemetry_Benchmarks/control.blink_perf.image_decoder
index bb4ef59..2c49c0d 100644
--- a/server/site_tests/telemetry_Benchmarks/control.blink_perf.image_decoder
+++ b/server/site_tests/telemetry_Benchmarks/control.blink_perf.image_decoder
@@ -6,7 +6,7 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = 'Chrome OS Team'
+AUTHOR = 'ChromeOS Team'
 NAME = 'telemetry_Benchmarks.blink_perf.image_decoder'
 # Disable this test until it can be fixed: http://b/170998795
 # ATTRIBUTES = 'suite:crosbolt_perf_nightly'
@@ -14,11 +14,12 @@
 TEST_CATEGORY = 'Benchmark'
 TEST_CLASS = 'performance'
 TEST_TYPE = 'server'
+PY_VERSION = 3
 
 DOC = '''
 This server side test suite executes the Telemetry Benchmark:
 blink_perf.image_decoder
-This is part of Chrome for Chrome OS performance testing.
+This is part of Chrome for ChromeOS performance testing.
 
 Pass local=True to run with local telemetry and no AFE server.
 '''
diff --git a/server/site_tests/telemetry_Benchmarks/control.loading.desktop b/server/site_tests/telemetry_Benchmarks/control.loading.desktop
index 4c4170f..a1330ae 100644
--- a/server/site_tests/telemetry_Benchmarks/control.loading.desktop
+++ b/server/site_tests/telemetry_Benchmarks/control.loading.desktop
@@ -1,4 +1,4 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -6,18 +6,19 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = 'Chrome OS Team'
+AUTHOR = 'ChromeOS Team'
 NAME = 'telemetry_Benchmarks.loading.desktop'
 ATTRIBUTES = 'suite:crosbolt_perf_perbuild'
 TIME = 'LONG'
 TEST_CATEGORY = 'Benchmark'
 TEST_CLASS = 'performance'
 TEST_TYPE = 'server'
+PY_VERSION = 3
 
 DOC = '''
 This server side test suite executes the Telemetry Benchmark:
 loading.desktop
-This is part of Chrome for Chrome OS performance testing.
+This is part of Chrome for ChromeOS performance testing.
 
 Pass local=True to run with local telemetry and no AFE server.
 '''
diff --git a/server/site_tests/telemetry_Benchmarks/control.rendering.desktop b/server/site_tests/telemetry_Benchmarks/control.rendering.desktop
index d926841..4863abe 100644
--- a/server/site_tests/telemetry_Benchmarks/control.rendering.desktop
+++ b/server/site_tests/telemetry_Benchmarks/control.rendering.desktop
@@ -1,4 +1,4 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -6,18 +6,19 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = 'Chrome OS Team'
+AUTHOR = 'ChromeOS Team'
 NAME = 'telemetry_Benchmarks.rendering.desktop'
 ATTRIBUTES = 'suite:crosbolt_perf_perbuild'
 TIME = 'LONG'
 TEST_CATEGORY = 'Benchmark'
 TEST_CLASS = 'performance'
 TEST_TYPE = 'server'
+PY_VERSION = 3
 
 DOC = '''
 This server side test suite executes the Telemetry Benchmark:
 rendering.desktop
-This is part of Chrome for Chrome OS performance testing.
+This is part of Chrome for ChromeOS performance testing.
 
 Pass local=True to run with local telemetry and no AFE server.
 '''
diff --git a/server/site_tests/telemetry_Benchmarks/control.speedometer2 b/server/site_tests/telemetry_Benchmarks/control.speedometer2
index 1fa69a2..8700490 100644
--- a/server/site_tests/telemetry_Benchmarks/control.speedometer2
+++ b/server/site_tests/telemetry_Benchmarks/control.speedometer2
@@ -1,4 +1,4 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -6,18 +6,19 @@
 
 from autotest_lib.client.common_lib import utils
 
-AUTHOR = 'hctsai'
+AUTHOR = 'ChromeOS Team'
 NAME = 'telemetry_Benchmarks.speedometer2'
 ATTRIBUTES = 'suite:crosbolt_perf_perbuild'
 TIME = 'LONG'
 TEST_CATEGORY = 'Benchmark'
 TEST_CLASS = 'performance'
 TEST_TYPE = 'server'
+PY_VERSION = 3
 
 DOC = '''
 This server side test suite executes the Telemetry Benchmark:
 speedometer2
-This is part of Chrome for Chrome OS performance testing.
+This is part of Chrome for ChromeOS performance testing.
 
 Pass local=True to run with local telemetry and no AFE server.
 '''
diff --git a/server/site_tests/telemetry_Benchmarks/generate_controlfiles.py b/server/site_tests/telemetry_Benchmarks/generate_controlfiles.py
index 0024490..9e72c4a 100644
--- a/server/site_tests/telemetry_Benchmarks/generate_controlfiles.py
+++ b/server/site_tests/telemetry_Benchmarks/generate_controlfiles.py
@@ -1,10 +1,11 @@
-#!/usr/bin/env python2
+# Lint as: python2, python3
+#!/usr/bin/env python3
 
 """
-This file generates all telemetry_Benchmarks control files from a master list.
+This file generates all telemetry_Benchmarks control files from a main list.
 """
 
-from __future__ import print_function
+
 
 from datetime import datetime
 import os
@@ -45,10 +46,10 @@
 
 DEFAULT_YEAR = str(datetime.now().year)
 
-DEFAULT_AUTHOR = 'Chrome OS Team'
+DEFAULT_AUTHOR = 'ChromeOS Team'
 
 CONTROLFILE_TEMPLATE = (
-"""# Copyright {year} The Chromium OS Authors. All rights reserved.
+        """# Copyright {year} The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
@@ -63,11 +64,12 @@
 TEST_CATEGORY = 'Benchmark'
 TEST_CLASS = 'performance'
 TEST_TYPE = 'server'
+PY_VERSION = 3
 
 DOC = '''
 This server side test suite executes the Telemetry Benchmark:
 {test}
-This is part of Chrome for Chrome OS performance testing.
+This is part of Chrome for ChromeOS performance testing.
 
 Pass local=True to run with local telemetry and no AFE server.
 '''
diff --git a/server/site_tests/telemetry_Benchmarks/telemetry_Benchmarks.py b/server/site_tests/telemetry_Benchmarks/telemetry_Benchmarks.py
index 25511e5..415d678 100644
--- a/server/site_tests/telemetry_Benchmarks/telemetry_Benchmarks.py
+++ b/server/site_tests/telemetry_Benchmarks/telemetry_Benchmarks.py
@@ -1,8 +1,29 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 import json
+import logging
+import sys
+
+# TODO (b/206008069), remove this when migrated to new env
+sys.path.insert(0,
+                '/usr/local/lib/python2.7/dist-packages/six-1.16.0-py2.7.egg')
+try:
+    # This is weird. But it seems something is bringing in six earlier
+    # Going to force a reload after the egg is inserted.
+    import six
+    if six.PY2:
+        reload(six)
+    else:
+        import importlib
+        importlib.reload(six)
+    logging.debug("six version is {}".format(six.__version__))
+    if six.__version__ != '1.16.0':
+        logging.debug(sys.path)
+except ImportError as e:
+    logging.warning("Could not import six due to %s", e)
 
 from autotest_lib.server import test
 from autotest_lib.server.cros import telemetry_runner
@@ -31,12 +52,13 @@
         if dut_config:
             device_setup_utils.setup_device(host, dut_config)
 
-        telemetry = telemetry_runner.TelemetryRunner(host, local, **optional)
-        perf_value_writer = self
-        extra_args = args.get("extra_args", [])
-        repeat = args.get("pageset_repeat")
-        if repeat is not None:
-            extra_args.append('--pageset-repeat=%s' % repeat)
+        with telemetry_runner.TelemetryRunnerFactory().get_runner(
+                host, local, **optional) as telemetry:
+            perf_value_writer = self
+            extra_args = args.get("extra_args", [])
+            repeat = args.get("pageset_repeat")
+            if repeat is not None:
+                extra_args.append('--pageset-repeat=%s' % repeat)
 
-        telemetry.run_telemetry_benchmark(benchmark, perf_value_writer,
-                                          *extra_args)
+            telemetry.run_telemetry_benchmark(benchmark, perf_value_writer,
+                                              *extra_args)
diff --git a/server/site_tests/telemetry_Crosperf/control b/server/site_tests/telemetry_Crosperf/control
index 05bf70e..0a86dad 100644
--- a/server/site_tests/telemetry_Crosperf/control
+++ b/server/site_tests/telemetry_Crosperf/control
@@ -12,10 +12,11 @@
 TEST_CLASS = "performance"
 TEST_TYPE = "server"
 JOB_RETRIES = 0
+PY_VERSION = 3
 
 DOC = """
 This runs various Telemetry performance tests under the crosperf script.
-This is part of Chrome OS Toolchain testing platform.
+This is part of ChromeOS Toolchain testing platform.
 """
 
 # Put the args into the args_dict.
@@ -30,4 +31,3 @@
 # run the test in multiple machines
 
 job.parallel_simple(run_telemetry_Crosperf, machines)
-
diff --git a/server/site_tests/telemetry_Crosperf/telemetry_Crosperf.py b/server/site_tests/telemetry_Crosperf/telemetry_Crosperf.py
index 43f6838..128dcb0 100644
--- a/server/site_tests/telemetry_Crosperf/telemetry_Crosperf.py
+++ b/server/site_tests/telemetry_Crosperf/telemetry_Crosperf.py
@@ -2,7 +2,6 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-from __future__ import print_function
 
 import json
 import logging
@@ -10,6 +9,27 @@
 import re
 import shlex
 import shutil
+import sys
+import tempfile
+
+
+# TODO (b/206008069), remove this when migrated to new env
+sys.path.insert(0,
+                '/usr/local/lib/python2.7/dist-packages/six-1.16.0-py2.7.egg')
+try:
+    # This is weird. But it seems something is bringing in six earlier
+    # Going to force a reload after the egg is inserted.
+    import six
+    if six.PY2:
+        reload(six)
+    else:
+        import importlib
+        importlib.reload(six)
+    logging.debug("six version is {}".format(six.__version__))
+    if six.__version__ != '1.16.0':
+        logging.debug(sys.path)
+except ImportError as e:
+    logging.warning("Could not import six due to %s", e)
 
 from contextlib import contextmanager
 
@@ -44,8 +64,10 @@
 FAILED_STATUS = 'FAILED'
 
 # Regex for the RESULT output lines understood by chrome buildbot.
+# TODO b:169251326 terms below are set outside of this codebase and
+# should be updated when possible ("slave"). # nocheck
 # Keep in sync with
-# chromium/tools/build/scripts/slave/performance_log_processor.py.
+# chromium/tools/build/scripts/slave/performance_log_processor.py. # nocheck
 RESULTS_REGEX = re.compile(r'(?P<IMPORTANT>\*)?RESULT '
                            r'(?P<GRAPH>[^:]*): (?P<TRACE>[^=]*)= '
                            r'(?P<VALUE>[\{\[]?[-\d\., ]+[\}\]]?)('
@@ -79,16 +101,16 @@
         port = ''
 
         if dut:
-          port = dut.port
-          ip = dut.hostname
+            port = dut.port
+            ip = dut.hostname
         else:
-          ip_and_port = client_ip.split(':')
-          ip = ip_and_port[0]
-          if len(ip_and_port) > 1:
-            port = ip_and_port[1]
+            ip_and_port = client_ip.split(':')
+            ip = ip_and_port[0]
+            if len(ip_and_port) > 1:
+                port = ip_and_port[1]
 
         if port:
-          cmd.extend(['-P', str(port)])
+            cmd.extend(['-P', str(port)])
 
         src = 'root@%s:%s' % (ip, file_path)
         cmd.extend([src, host_dir])
@@ -106,8 +128,8 @@
             raise
 
         if exit_code:
-          logging.error('Command "%s" returned non-zero status: %d',
-                           command, exit_code)
+            logging.error('Command "%s" returned non-zero status: %d', command,
+                          exit_code)
         return exit_code
 
     @contextmanager
@@ -153,7 +175,7 @@
                 logging.info('Killing background process, pid %s', pid)
                 # Kill the process blindly. OK if it's already gone.
                 # There is an issue when underlying child processes stay alive
-                # while the parent master process is killed.
+                # while the parent main process is killed.
                 # The solution is to kill the chain of processes via process
                 # group id.
                 dut.run('pgid=$(cat /proc/%s/stat | cut -d")" -f2 | '
@@ -288,7 +310,7 @@
         output_format = 'histograms'
 
         # For local runs, we set local=True and use local chrome source to run
-        # tests; for lab runs, we use devserver instead.
+        # tests; for lab runs, we use the drone instead.
         # By default to be True.
         local = args.get('local', 'true').lower() == 'true'
 
@@ -299,10 +321,6 @@
         # to "telemetry_on_dut" in crosperf experiment files for consistency.
         telemetry_on_dut = args.get('run_local', '').lower() == 'true'
 
-        # Init TelemetryRunner.
-        tr = telemetry_runner.TelemetryRunner(
-                dut, local=local, telemetry_on_dut=telemetry_on_dut)
-
         # Run the test. And collect profile if needed.
         try:
             # If profiler_args specified, we want to add several more options
@@ -338,17 +356,20 @@
                 logging.debug('Telemetry Arguments: %s', arguments)
                 perf_value_writer = self
                 artifacts = True if profiler_args else False
-                result = tr.run_telemetry_benchmark(
-                        test_name,
-                        perf_value_writer,
-                        *arguments,
-                        ex_output_format=output_format,
-                        results_dir=self.resultsdir,
-                        no_verbose=True,
-                        artifacts=artifacts)
-                logging.info('Telemetry completed with exit status: %s.',
-                             result.status)
-                logging.info('output: %s\n', result.output)
+                with telemetry_runner.TelemetryRunnerFactory().get_runner(
+                        dut, local=local,
+                        telemetry_on_dut=telemetry_on_dut) as tr:
+                    result = tr.run_telemetry_benchmark(
+                            test_name,
+                            perf_value_writer,
+                            *arguments,
+                            ex_output_format=output_format,
+                            results_dir=self.resultsdir,
+                            no_verbose=True,
+                            artifacts=artifacts)
+                    logging.info('Telemetry completed with exit status: %s.',
+                                 result.status)
+                    logging.info('output: %s\n', result.output)
 
         except (error.TestFail, error.TestWarn):
             logging.debug(
@@ -383,9 +404,10 @@
                         # perf.data files, but only if they are named exactly
                         # so. Therefore, create a subdir for each perf.data
                         # file.
-                        dst_dir = os.path.join(self.profdir, ''.join(
-                                f.split('.')[:-2]))
-                        os.makedirs(dst_dir)
+                        # Use mkdtemp to make sure a directory name is unique.
+                        dst_dir = tempfile.mkdtemp(dir=self.profdir,
+                                                   prefix=''.join(
+                                                           f.split('.')[:-2]))
                         dst_file = os.path.join(dst_dir, 'perf.data')
                         shutil.copyfile(src_file, dst_file)
             if not perf_exist:
diff --git a/server/site_tests/telemetry_ScrollingActionTests/control b/server/site_tests/telemetry_ScrollingActionTests/control
index 85a3b09..e666217 100644
--- a/server/site_tests/telemetry_ScrollingActionTests/control
+++ b/server/site_tests/telemetry_ScrollingActionTests/control
@@ -7,10 +7,11 @@
 TIME = "LONG"
 TEST_CATEGORY = "Functional"
 TEST_TYPE = "server"
+PY_VERSION = 3
 
 DOC = """
 This server side test suite executes the Telemetry Scrolling Benchmark.
-This is part of our effort to support Chrome for Chrome OS performance testing.
+This is part of our effort to support Chrome for ChromeOS performance testing.
 """
 
 def run_test(machine):
diff --git a/server/site_tests/telemetry_ScrollingActionTests/telemetry_ScrollingActionTests.py b/server/site_tests/telemetry_ScrollingActionTests/telemetry_ScrollingActionTests.py
index 3756dee..ce8b54c 100644
--- a/server/site_tests/telemetry_ScrollingActionTests/telemetry_ScrollingActionTests.py
+++ b/server/site_tests/telemetry_ScrollingActionTests/telemetry_ScrollingActionTests.py
@@ -1,8 +1,28 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 import logging
+import sys
+
+# TODO (b/206008069), remove this when migrated to new env
+sys.path.insert(0,
+                '/usr/local/lib/python2.7/dist-packages/six-1.16.0-py2.7.egg')
+try:
+    # This is weird. But it seems something is bringing in six earlier
+    # Going to force a reload after the egg is inserted.
+    import six
+    if six.PY2:
+        reload(six)
+    else:
+        import importlib
+        importlib.reload(six)
+    logging.debug("six version is {}".format(six.__version__))
+    if six.__version__ != '1.16.0':
+        logging.debug(sys.path)
+except ImportError as e:
+    logging.warning("Could not import six due to %s", e)
 
 from autotest_lib.server import test
 from autotest_lib.server.cros import telemetry_runner
@@ -18,7 +38,9 @@
 
         @param host: host we are running telemetry on.
         """
-        telemetry = telemetry_runner.TelemetryRunner(host)
-        result = telemetry.run_telemetry_test('ScrollingActionTest')
-        logging.debug('Telemetry completed with a status of: %s with output:'
-                      ' %s', result.status, result.output)
\ No newline at end of file
+        with telemetry_runner.TelemetryRunnerFactory().get_runner(
+                host) as telemetry:
+            result = telemetry.run_telemetry_test('ScrollingActionTest')
+            logging.debug(
+                    'Telemetry completed with a status of: %s with '
+                    'output: %s', result.status, result.output)
diff --git a/server/site_tests/telemetry_UnitTestsServer/control.guest b/server/site_tests/telemetry_UnitTestsServer/control.guest
deleted file mode 100644
index 906f9a1..0000000
--- a/server/site_tests/telemetry_UnitTestsServer/control.guest
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "achuith@chromium.org"
-NAME = "telemetry_UnitTestsServer_guest"
-PURPOSE = "Run the Telemetry unit tests as guest (incognito)."
-ATTRIBUTES = "suite:telemetry_unit_server"
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "desktopui"
-TEST_TYPE = "server"
-
-DOC = """
-Verifies that all Telemetry unit tests pass when run as guest (incognito).
-"""
-
-def run_client(machine):
-    host = hosts.create_host(machine)
-    host.log_kernel()
-    job.run_test('telemetry_UnitTestsServer',  host=host,
-                 use_packaging=use_packaging,
-                 browser_type='system-guest', tag='guest',
-                 unit_tests=['BrowserTest'], perf_tests=[])
-
-
-job.parallel_simple(run_client, machines)
diff --git a/server/site_tests/telemetry_UnitTestsServer/control.perf b/server/site_tests/telemetry_UnitTestsServer/control.perf
deleted file mode 100644
index df0c4b4..0000000
--- a/server/site_tests/telemetry_UnitTestsServer/control.perf
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "achuith@chromium.org"
-NAME = "telemetry_UnitTestsServer_perf"
-PURPOSE = "Run the Telemetry perf unit tests as a logged-in user."
-ATTRIBUTES = "suite:telemetry_unit_server"
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "desktopui"
-TEST_TYPE = "server"
-
-DOC = """
-Verifies that all Telemetry perf unit tests pass when run as a logged-in user.
-"""
-
-def run_client(machine):
-    host = hosts.create_host(machine)
-    host.log_kernel()
-    job.run_test('telemetry_UnitTestsServer', host=host,
-                 use_packaging=use_packaging,
-                 copy_boto_file=True,
-                 browser_type='system', tag='perf', unit_tests=[],
-                 perf_tests=[''])
-
-
-job.parallel_simple(run_client, machines)
diff --git a/server/site_tests/telemetry_UnitTestsServer/control.user b/server/site_tests/telemetry_UnitTestsServer/control.user
deleted file mode 100644
index d3b0ebc..0000000
--- a/server/site_tests/telemetry_UnitTestsServer/control.user
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "achuith@chromium.org"
-NAME = "telemetry_UnitTestsServer"
-PURPOSE = "Run the Telemetry unit tests as a logged-in user."
-ATTRIBUTES = "suite:telemetry_unit_server"
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "desktopui"
-TEST_TYPE = "server"
-
-DOC = """
-Verifies that all Telemetry unit tests pass when run as a logged-in user.
-"""
-
-def run_client(machine):
-    host = hosts.create_host(machine)
-    host.log_kernel()
-    job.run_test('telemetry_UnitTestsServer',  host=host,
-                 use_packaging=use_packaging,
-                 copy_boto_file=True,
-                 browser_type='system', tag='user', unit_tests=[''],
-                 perf_tests=[])
-
-
-job.parallel_simple(run_client, machines)
diff --git a/server/site_tests/telemetry_UnitTestsServer/telemetry_UnitTestsServer.py b/server/site_tests/telemetry_UnitTestsServer/telemetry_UnitTestsServer.py
deleted file mode 100644
index b75edbf..0000000
--- a/server/site_tests/telemetry_UnitTestsServer/telemetry_UnitTestsServer.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# Copyright (c) 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import os.path
-
-from autotest_lib.server import autotest
-from autotest_lib.server import test
-
-
-class telemetry_UnitTestsServer(test.test):
-    """Runs the telemetry_UnitTests client tests with some extra setup."""
-    version = 1
-
-    # The .boto file needs to be copied into the client (VM) so that the tests
-    # can run. The host boto file
-    LOCAL_BOTO_FILE = os.path.join(os.getenv('HOME'), '.boto')
-    CLIENT_BOTO_FILE = '/home/chromeos-test/.boto'
-
-    def initialize(self, host, copy_boto_file=False):
-        if copy_boto_file:
-            # Copy ~/.boto from the local file system to the client. This is
-            # needed for the telemetry tests to run there.
-            logging.info('Creating client directory %s',
-                         os.path.dirname(self.CLIENT_BOTO_FILE))
-            host.run('mkdir -p %s' % os.path.dirname(self.CLIENT_BOTO_FILE))
-
-            logging.info('Copying local %s to client %s', self.LOCAL_BOTO_FILE,
-                         self.CLIENT_BOTO_FILE)
-            assert(os.path.exists(self.LOCAL_BOTO_FILE))
-            host.send_file(self.LOCAL_BOTO_FILE, self.CLIENT_BOTO_FILE)
-
-    def cleanup(self, host, copy_boto_file=False):
-        if copy_boto_file:
-            # Clear the copied .boto file from the client, since it should no
-            # longer be useful.
-            logging.info('Clearing client %s', self.CLIENT_BOTO_FILE)
-            host.run('rm %s' % self.CLIENT_BOTO_FILE)
-
-    def run_once(self, host, use_packaging, browser_type, unit_tests,
-                 perf_tests):
-        # Otherwise we do nothing but run the client side tests.
-        client_at = autotest.Autotest(host)
-        client_at.run_test(
-            'telemetry_UnitTests', host=host, use_packaging=use_packaging,
-            browser_type=browser_type, unit_tests=unit_tests,
-            perf_tests=perf_tests)
diff --git a/server/site_tests/video_PlaybackQuality/control.mp4 b/server/site_tests/video_PlaybackQuality/control.mp4
deleted file mode 100644
index aea06a6..0000000
--- a/server/site_tests/video_PlaybackQuality/control.mp4
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from autotest_lib.server import utils
-
-AUTHOR = 'chromeos-chameleon'
-NAME = 'video_PlaybackQuality.mp4'
-PURPOSE = 'Measure video playback quality and dropped frame count.'
-ATTRIBUTES = "suite:chameleon_dp, suite:chameleon_dp_hdmi, suite:chameleon_hdmi_perbuild, suite:chameleon_hdmi"
-TIME = 'MEDIUM'
-TEST_CATEGORY = 'Performance'
-TEST_CLASS = 'video'
-TEST_TYPE = 'server'
-DEPENDENCIES = 'chameleon'
-JOB_RETRIES = 2
-
-DOC = """
-This test measure video playback quality by playback mp4 video format file.
-It will measure the top/bottom/left/right color bars correction during the
-playback.
-"""
-
-VIDEO_URL = ('http://commondatastorage.googleapis.com/'
-        'chromiumos-test-assets-public/chameleon/'
-        'video_PlaybackQuality/'
-        'video_quality.mp4')
-VIDEO_DESCRIPTION = '1920_1080_60fps_mp4'
-
-TEST_REGIONS= [('Top', (0, 96, 1920, 32)),
-               ('Left', (96, 0, 32, 1080)),
-               ('Bottom', (0, 1080 - 96 - 32, 1920, 32)),
-               ('Right', (1920 - 96 - 32, 0, 32, 1080))]
-
-args_dict = utils.args_to_dict(args)
-chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
-
-def run(machine):
-    host = hosts.create_host(machine, chameleon_args=chameleon_args)
-    job.run_test('video_PlaybackQuality', host=host, video_url=VIDEO_URL,
-                 test_regions=TEST_REGIONS, video_description=VIDEO_DESCRIPTION)
-
-parallel_simple(run, machines)
diff --git a/server/site_tests/video_PlaybackQuality/test_data/edids/EDIDv2_1920x1080 b/server/site_tests/video_PlaybackQuality/test_data/edids/EDIDv2_1920x1080
deleted file mode 100644
index 64036f0..0000000
--- a/server/site_tests/video_PlaybackQuality/test_data/edids/EDIDv2_1920x1080
+++ /dev/null
Binary files differ
diff --git a/server/site_tests/video_PlaybackQuality/video_PlaybackQuality.py b/server/site_tests/video_PlaybackQuality/video_PlaybackQuality.py
deleted file mode 100644
index bff7665..0000000
--- a/server/site_tests/video_PlaybackQuality/video_PlaybackQuality.py
+++ /dev/null
@@ -1,369 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import tempfile
-from PIL import Image
-
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import file_utils
-from autotest_lib.client.cros.chameleon import chameleon_port_finder
-from autotest_lib.client.cros.chameleon import chameleon_stream_server
-from autotest_lib.client.cros.chameleon import edid
-from autotest_lib.server import test
-from autotest_lib.server.cros.multimedia import remote_facade_factory
-
-
-class video_PlaybackQuality(test.test):
-    """Server side video playback quality measurement.
-
-    This test measures the video playback quality by chameleon.
-    It will output 2 performance data. Number of Corrupted Frames and Number of
-    Dropped Frames.
-
-    """
-    version = 1
-
-    # treat 0~0x30 as 0
-    COLOR_MARGIN_0 = 0x30
-    # treat (0xFF-0x60)~0xFF as 0xFF.
-    COLOR_MARGIN_255 = 0xFF - 0x60
-
-    # If we can't find the expected frame after TIMEOUT_FRAMES, raise exception.
-    TIMEOUT_FRAMES = 120
-
-    # RGB for black. Used for preamble and postamble.
-    RGB_BLACK = [0, 0, 0]
-
-    # Expected color bar rgb. The color order in the array is the same order in
-    # the video frames.
-    EXPECTED_RGB = [('Blue', [0, 0, 255]), ('Green', [0, 255, 0]),
-                    ('Cyan', [0, 255, 255]), ('Red', [255, 0, 0]),
-                    ('Magenta', [255, 0, 255]), ('Yellow', [255, 255, 0]),
-                    ('White', [255, 255, 255])]
-
-    def _save_frame_to_file(self, resolution, frame, filename):
-        """Save video frame to file under results directory.
-
-        This function will append .png filename extension.
-
-        @param resolution: A tuple (width, height) of resolution.
-        @param frame: The video frame data.
-        @param filename: File name.
-
-        """
-        image = Image.fromstring('RGB', resolution, frame)
-        image.save('%s/%s.png' % (self.resultsdir, filename))
-
-    def _check_rgb_value(self, value, expected_value):
-        """Check value of the RGB.
-
-        This function will check if the value is in the range of expected value
-        and its margin.
-
-        @param value: The value for checking.
-        @param expected_value: Expected value. It's ether 0 or 0xFF.
-        @returns: True if the value is in range. False otherwise.
-
-        """
-        if expected_value <= value <= self.COLOR_MARGIN_0:
-            return True
-
-        if expected_value >= value >= self.COLOR_MARGIN_255:
-            return True
-
-        return False
-
-    def _check_rgb(self, frame, expected_rgb):
-        """Check the RGB raw data of all pixels in a video frame.
-
-        Because checking all pixels may take more than one video frame time. If
-        we want to analyze the video frame on the fly, we need to skip pixels
-        for saving the checking time.
-        The parameter of how many pixels to skip is self._skip_check_pixels.
-
-        @param frame: Array of all pixels of video frame.
-        @param expected_rgb: Expected values for RGB.
-        @returns: number of error pixels.
-
-        """
-        error_number = 0
-
-        for i in xrange(0, len(frame), 3 * (self._skip_check_pixels + 1)):
-            if not self._check_rgb_value(ord(frame[i]), expected_rgb[0]):
-                error_number += 1
-                continue
-
-            if not self._check_rgb_value(ord(frame[i + 1]), expected_rgb[1]):
-                error_number += 1
-                continue
-
-            if not self._check_rgb_value(ord(frame[i + 2]), expected_rgb[2]):
-                error_number += 1
-
-        return error_number
-
-    def _find_and_skip_preamble(self, description):
-        """Find and skip the preamble video frames.
-
-        @param description: Description of the log and file name.
-
-        """
-        # find preamble which is the first black frame.
-        number_of_frames = 0
-        while True:
-            video_frame = self._stream_server.receive_realtime_video_frame()
-            (frame_number, width, height, _, frame) = video_frame
-            if self._check_rgb(frame, self.RGB_BLACK) == 0:
-                logging.info('Find preamble at frame %d', frame_number)
-                break
-            if number_of_frames > self.TIMEOUT_FRAMES:
-                raise error.TestFail('%s found no preamble' % description)
-            number_of_frames += 1
-            self._save_frame_to_file((width, height), frame,
-                                     '%s_pre_%d' % (description, frame_number))
-        # skip preamble.
-        # After finding preamble, find the first frame that is not black.
-        number_of_frames = 0
-        while True:
-            video_frame = self._stream_server.receive_realtime_video_frame()
-            (frame_number, _, _, _, frame) = video_frame
-            if self._check_rgb(frame, self.RGB_BLACK) != 0:
-                logging.info('End preamble at frame %d', frame_number)
-                self._save_frame_to_file((width, height), frame,
-                                         '%s_end_preamble' % description)
-                break
-            if number_of_frames > self.TIMEOUT_FRAMES:
-                raise error.TestFail('%s found no color bar' % description)
-            number_of_frames += 1
-
-    def _store_wrong_frames(self, frame_number, resolution, frames):
-        """Store wrong frames for debugging.
-
-        @param frame_number: latest frame number.
-        @param resolution: A tuple (width, height) of resolution.
-        @param frames: Array of video frames. The latest video frame is in the
-                front.
-
-        """
-        for index, frame in enumerate(frames):
-            if not frame:
-                continue
-            element = ((frame_number - index), resolution, frame)
-            self._wrong_frames.append(element)
-
-    def _check_color_bars(self, description):
-        """Check color bars for video playback quality.
-
-        This function will read video frame from stream server and check if the
-        color is right by self._check_rgb until read postamble.
-        If only some pixels are wrong, the frame will be counted to corrupted
-        frame. If all pixels are wrong, the frame will be counted to wrong
-        frame.
-
-        @param description: Description of log and file name.
-        @return A tuple (corrupted_frame_count, wrong_frame_count) for quality
-                data.
-
-        """
-        # store the recent 2 video frames for debugging.
-        # Put the latest frame in the front.
-        frame_history = [None, None]
-        # Check index for color bars.
-        check_index = 0
-        corrupted_frame_count = 0
-        wrong_frame_count = 0
-        while True:
-            # Because the first color bar is skipped in _find_and_skip_preamble,
-            # we start from the 2nd color.
-            check_index = (check_index + 1) % len(self.EXPECTED_RGB)
-            video_frame = self._stream_server.receive_realtime_video_frame()
-            (frame_number, width, height, _, frame) = video_frame
-            # drop old video frame and store new one
-            frame_history.pop(-1)
-            frame_history.insert(0, frame)
-            color_name = self.EXPECTED_RGB[check_index][0]
-            expected_rgb = self.EXPECTED_RGB[check_index][1]
-            error_number = self._check_rgb(frame, expected_rgb)
-
-            # The video frame is correct, go to next video frame.
-            if not error_number:
-                continue
-
-            # Total pixels need to be adjusted by the _skip_check_pixels.
-            total_pixels = width * height / (self._skip_check_pixels + 1)
-            log_string = ('[%s] Number of error pixels %d on frame %d, '
-                          'expected color %s, RGB %r' %
-                          (description, error_number, frame_number, color_name,
-                           expected_rgb))
-
-            self._store_wrong_frames(frame_number, (width, height),
-                                     frame_history)
-            # clean history after they are stored.
-            frame_history = [None, None]
-
-            # Some pixels are wrong.
-            if error_number != total_pixels:
-                corrupted_frame_count += 1
-                logging.warn('[Corrupted]%s', log_string)
-                continue
-
-            # All pixels are wrong.
-            # Check if we get postamble where all pixels are black.
-            if self._check_rgb(frame, self.RGB_BLACK) == 0:
-                logging.info('Find postamble at frame %d', frame_number)
-                break
-
-            wrong_frame_count += 1
-            logging.info('[Wrong]%s', log_string)
-            # Adjust the check index due to frame drop.
-            # The screen should keep the old frame or go to next video frame
-            # due to frame drop.
-            # Check if color is the same as the previous frame.
-            # If it is not the same as previous frame, we assign the color of
-            # next frame without checking.
-            previous_index = ((check_index + len(self.EXPECTED_RGB) - 1)
-                              % len(self.EXPECTED_RGB))
-            if not self._check_rgb(frame, self.EXPECTED_RGB[previous_index][1]):
-                check_index = previous_index
-            else:
-                check_index = (check_index + 1) % len(self.EXPECTED_RGB)
-
-        return (corrupted_frame_count, wrong_frame_count)
-
-    def _dump_wrong_frames(self, description):
-        """Dump wrong frames to files.
-
-        @param description: Description of the file name.
-
-        """
-        for frame_number, resolution, frame in self._wrong_frames:
-            self._save_frame_to_file(resolution, frame,
-                                     '%s_%d' % (description, frame_number))
-        self._wrong_frames = []
-
-    def _prepare_playback(self):
-        """Prepare playback video."""
-        # Workaround for white bar on rightmost and bottommost on samus when we
-        # set fullscreen from fullscreen.
-        self._display_facade.set_fullscreen(False)
-        self._video_facade.prepare_playback(self._video_tempfile.name)
-
-    def _get_playback_quality(self, description, capture_dimension):
-        """Get the playback quality.
-
-        This function will playback the video and analysis each video frames.
-        It will output performance data too.
-
-        @param description: Description of the log, file name and performance
-                data.
-        @param capture_dimension: A tuple (width, height) of the captured video
-                frame.
-        """
-        logging.info('Start to get %s playback quality', description)
-        self._prepare_playback()
-        self._chameleon_port.start_capturing_video(capture_dimension)
-        self._stream_server.reset_video_session()
-        self._stream_server.dump_realtime_video_frame(
-            False, chameleon_stream_server.RealtimeMode.BestEffort)
-
-        self._video_facade.start_playback()
-        self._find_and_skip_preamble(description)
-
-        (corrupted_frame_count, wrong_frame_count) = (
-            self._check_color_bars(description))
-
-        self._stream_server.stop_dump_realtime_video_frame()
-        self._chameleon_port.stop_capturing_video()
-        self._video_facade.pause_playback()
-        self._dump_wrong_frames(description)
-
-        dropped_frame_count = self._video_facade.dropped_frame_count()
-
-        graph_name = '%s_%s' % (self._video_description, description)
-        self.output_perf_value(description='Corrupted frames',
-                               value=corrupted_frame_count, units='frame',
-                               higher_is_better=False, graph=graph_name)
-        self.output_perf_value(description='Wrong frames',
-                               value=wrong_frame_count, units='frame',
-                               higher_is_better=False, graph=graph_name)
-        self.output_perf_value(description='Dropped frames',
-                               value=dropped_frame_count, units='frame',
-                               higher_is_better=False, graph=graph_name)
-
-    def run_once(self, host, video_url, video_description, test_regions,
-                 skip_check_pixels=5):
-        """Runs video playback quality measurement.
-
-        @param host: A host object representing the DUT.
-        @param video_url: The ULR of the test video.
-        @param video_description: a string describes the video to play which
-                will be part of entry name in dashboard.
-        @param test_regions: An array of tuples (description, capture_dimension)
-                for the testing region of video. capture_dimension is a tuple
-                (width, height).
-        @param skip_check_pixels: We will check one pixel and skip number of
-                pixels. 0 means no skip. 1 means check 1 pixel and skip 1 pixel.
-                Because we may take more than 1 video frame time for checking
-                all pixels. Skip some pixles for saving time.
-
-        """
-        # Store wrong video frames for dumping and debugging.
-        self._video_url = video_url
-        self._video_description = video_description
-        self._wrong_frames = []
-        self._skip_check_pixels = skip_check_pixels
-
-        factory = remote_facade_factory.RemoteFacadeFactory(
-                host, results_dir=self.resultsdir, no_chrome=True)
-        chameleon_board = host.chameleon
-        browser_facade = factory.create_browser_facade()
-        display_facade = factory.create_display_facade()
-        self._display_facade = display_facade
-        self._video_facade = factory.create_video_facade()
-        self._stream_server = chameleon_stream_server.ChameleonStreamServer(
-            chameleon_board.host.hostname)
-
-        chameleon_board.setup_and_reset(self.outputdir)
-        self._stream_server.connect()
-
-        # Download the video to self._video_tempfile.name
-        _, ext = os.path.splitext(video_url)
-        self._video_tempfile = tempfile.NamedTemporaryFile(suffix=ext)
-        # The default permission is 0o600.
-        os.chmod(self._video_tempfile.name, 0o644)
-        file_utils.download_file(video_url, self._video_tempfile.name)
-
-        browser_facade.start_default_chrome()
-        display_facade.set_mirrored(False)
-
-        edid_path = os.path.join(self.bindir, 'test_data', 'edids',
-                                 'EDIDv2_1920x1080')
-        finder = chameleon_port_finder.ChameleonVideoInputFinder(
-                chameleon_board, display_facade)
-        for chameleon_port in finder.iterate_all_ports():
-            self._chameleon_port = chameleon_port
-
-            connector_type = chameleon_port.get_connector_type()
-            logging.info('See the display on Chameleon: port %d (%s)',
-                         chameleon_port.get_connector_id(),
-                         connector_type)
-
-            with chameleon_port.use_edid(
-                    edid.Edid.from_file(edid_path, skip_verify=True)):
-                resolution = utils.wait_for_value_changed(
-                    display_facade.get_external_resolution,
-                    old_value=None)
-                if resolution is None:
-                    raise error.TestFail('No external display detected on DUT')
-
-            display_facade.move_to_display(
-                display_facade.get_first_external_display_id())
-
-            for description, capture_dimension in test_regions:
-                self._get_playback_quality('%s_%s' % (connector_type,
-                                                      description),
-                                           capture_dimension)
diff --git a/server/site_utils.py b/server/site_utils.py
index 1ea764c..9794d73 100644
--- a/server/site_utils.py
+++ b/server/site_utils.py
@@ -10,8 +10,6 @@
 
 import collections
 import contextlib
-import grp
-import six.moves.http_client
 import json
 import logging
 import os
@@ -39,19 +37,13 @@
 from autotest_lib.server.cros.dynamic_suite import job_status
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = utils.metrics_mock
 
 
 CONFIG = global_config.global_config
 
-_SHERIFF_JS = CONFIG.get_config_value('NOTIFICATIONS', 'sheriffs', default='')
-_LAB_SHERIFF_JS = CONFIG.get_config_value(
-        'NOTIFICATIONS', 'lab_sheriffs', default='')
-_CHROMIUM_BUILD_URL = CONFIG.get_config_value(
-        'NOTIFICATIONS', 'chromium_build_url', default='')
-
 LAB_GOOD_STATES = ('open', 'throttled')
 
 ENABLE_DRONE_IN_RESTRICTED_SUBNET = CONFIG.get_config_value(
@@ -207,7 +199,7 @@
     return None
 
 
-# TODO(fdeng): fix get_sheriffs crbug.com/483254
+# TODO(ayatane): Can be deleted
 def get_sheriffs(lab_only=False):
     """
     Polls the javascript file that holds the identity of the sheriff and
@@ -219,30 +211,7 @@
     @return: A list of chroium.org sheriff email addresses to cc on the bug.
              An empty list if failed to parse the javascript.
     """
-    sheriff_ids = []
-    sheriff_js_list = _LAB_SHERIFF_JS.split(',')
-    if not lab_only:
-        sheriff_js_list.extend(_SHERIFF_JS.split(','))
-
-    for sheriff_js in sheriff_js_list:
-        try:
-            url_content = utils.urlopen('%s%s'% (
-                _CHROMIUM_BUILD_URL, sheriff_js)).read()
-        except (ValueError, IOError) as e:
-            logging.warning('could not parse sheriff from url %s%s: %s',
-                             _CHROMIUM_BUILD_URL, sheriff_js, str(e))
-        except (urllib.error.URLError, six.moves.http_client.HTTPException) as e:
-            logging.warning('unexpected error reading from url "%s%s": %s',
-                             _CHROMIUM_BUILD_URL, sheriff_js, str(e))
-        else:
-            ldaps = re.search(r"document.write\('(.*)'\)", url_content)
-            if not ldaps:
-                logging.warning('Could not retrieve sheriff ldaps for: %s',
-                                 url_content)
-                continue
-            sheriff_ids += ['%s@chromium.org' % alias.replace(' ', '')
-                            for alias in ldaps.group(1).split(',')]
-    return sheriff_ids
+    return []
 
 
 def remote_wget(source_url, dest_path, ssh_cmd):
@@ -399,9 +368,9 @@
     @param tko: an instance of TKO as defined in server/frontend.py.
     @return: A defaultdict where keys are test names and values are
              lists of test statuses, e.g.,
-             {'dummy_Fail.Error': ['ERROR'. 'ERROR'],
-              'dummy_Fail.NAError': ['TEST_NA'],
-              'dummy_Fail.RetrySuccess': ['ERROR', 'GOOD'],
+             {'stub_Fail.Error': ['ERROR'. 'ERROR'],
+              'stub_Fail.NAError': ['TEST_NA'],
+              'stub_Fail.RetrySuccess': ['ERROR', 'GOOD'],
               }
     @raise: Exception when there is no test view found.
 
@@ -422,7 +391,7 @@
     Constructs a key string from parameters.
 
     @param prefix: Prefix for the generating key.
-    @param suite: a suite name. e.g., bvt-cq, bvt-inline, dummy
+    @param suite: a suite name. e.g., bvt-cq, bvt-inline, infra_qual
     @param build: The build string. This string should have a consistent
         format eg: x86-mario-release/R26-3570.0.0. If the format of this
         string changes such that we can't determine build_type or branch
@@ -475,29 +444,6 @@
     return CONFIG.get_config_value('SERVER', 'global_afe_hostname')
 
 
-def is_restricted_user(username):
-    """Determines if a user is in a restricted group.
-
-    User in restricted group only have access to main.
-
-    @param username: A string, representing a username.
-
-    @returns: True if the user is in a restricted group.
-    """
-    if not username:
-        return False
-
-    restricted_groups = CONFIG.get_config_value(
-            'AUTOTEST_WEB', 'restricted_groups', default='').split(',')
-    for group in restricted_groups:
-        try:
-            if group and username in grp.getgrnam(group).gr_mem:
-                return True
-        except KeyError as e:
-            logging.debug("%s is not a valid group.", group)
-    return False
-
-
 def get_special_task_status(is_complete, success, is_active):
     """Get the status of a special task.
 
@@ -692,26 +638,6 @@
     return machine.get('connection_pool')
 
 
-def get_creds_abspath(creds_file):
-    """Returns the abspath of the credentials file.
-
-    If creds_file is already an absolute path, just return it.
-    Otherwise, assume it is located in the creds directory
-    specified in global_config and return the absolute path.
-
-    @param: creds_path, a path to the credentials.
-    @return: An absolute path to the credentials file.
-    """
-    if not creds_file:
-        return None
-    if os.path.isabs(creds_file):
-        return creds_file
-    creds_dir = CONFIG.get_config_value('SERVER', 'creds_dir', default='')
-    if not creds_dir or not os.path.exists(creds_dir):
-        creds_dir = common.autotest_dir
-    return os.path.join(creds_dir, creds_file)
-
-
 def SetupTsMonGlobalState(*args, **kwargs):
     """Import-safe wrap around chromite.lib.ts_mon_config's setup function.
 
@@ -723,9 +649,9 @@
         # 1-2 seconds to the module import time and most users of site_utils
         # don't need it. The correct fix is to break apart site_utils into more
         # meaningful chunks.
-        from chromite.lib import ts_mon_config
-    except ImportError:
-        logging.warn('Unable to import chromite. Monarch is disabled.')
+        from autotest_lib.utils.frozen_chromite.lib import ts_mon_config
+    except ImportError as e:
+        logging.warning('Unable to import chromite. Monarch is disabled: %s', e)
         return TrivialContextManager()
 
     try:
@@ -847,19 +773,19 @@
     metrics.Counter(RESULT_METRICS_PREFIX + 'client_result_collected_KB',
                     description='The total size (in KB) of test results '
                     'collected from test device. Set to be the total size of '
-                    'the given path.'
-                    ).increment_by(result_size_info.client_result_collected_KB,
-                                   fields=fields)
+                    'the given path.').increment_by(int(
+                            result_size_info.client_result_collected_KB),
+                                                    fields=fields)
     metrics.Counter(RESULT_METRICS_PREFIX + 'original_result_total_KB',
                     description='The original size (in KB) of test results '
-                    'before being trimmed.'
-                    ).increment_by(result_size_info.original_result_total_KB,
-                                   fields=fields)
+                    'before being trimmed.').increment_by(int(
+                            result_size_info.original_result_total_KB),
+                                                          fields=fields)
     metrics.Counter(RESULT_METRICS_PREFIX + 'result_uploaded_KB',
                     description='The total size (in KB) of test results to be '
-                    'uploaded.'
-                    ).increment_by(result_size_info.result_uploaded_KB,
-                                   fields=fields)
+                    'uploaded.').increment_by(int(
+                            result_size_info.result_uploaded_KB),
+                                              fields=fields)
 
 
 @metrics.SecondsTimerDecorator(
diff --git a/server/site_utils_unittest.py b/server/site_utils_unittest.py
index 223bf13..7158be7 100644
--- a/server/site_utils_unittest.py
+++ b/server/site_utils_unittest.py
@@ -7,8 +7,8 @@
 from __future__ import division
 from __future__ import print_function
 
-import mox
 import unittest
+from unittest.mock import patch
 
 import common
 from autotest_lib.frontend import setup_django_lite_environment
@@ -19,7 +19,7 @@
 import six
 
 
-class SiteUtilsUnittests(mox.MoxTestBase):
+class SiteUtilsUnittests(unittest.TestCase):
     """Test functions in site_utils.py"""
 
     def testParseJobName(self):
@@ -58,32 +58,33 @@
             self.assertEqual(info, expected_info, '%s failed to be parsed to '
                              '%s' % (test_job_name, expected_info))
 
-
     def testGetViewsFromTko(self):
         """Test method get_test_views_from_tko
         """
         test_results = [
-            ('dummy_Pass', 'GOOD'),
-            ('dummy_Fail.RetrySuccess', 'GOOD'),
-            ('dummy_Fail.RetrySuccess', 'FAIL'),
-            ('dummy_Fail.Fail', 'FAIL'),
-            ('dummy_Fail.Fail', 'FAIL'),
+                ('stub_Pass', 'GOOD'),
+                ('stub_Fail.RetrySuccess', 'GOOD'),
+                ('stub_Fail.RetrySuccess', 'FAIL'),
+                ('stub_Fail.Fail', 'FAIL'),
+                ('stub_Fail.Fail', 'FAIL'),
         ]
 
         expected_test_views = {
-            'dummy_Pass': ['GOOD'],
-            'dummy_Fail.RetrySuccess': ['FAIL', 'GOOD'],
-            'dummy_Fail.Fail': ['FAIL', 'FAIL'],
+                'stub_Pass': ['GOOD'],
+                'stub_Fail.RetrySuccess': ['FAIL', 'GOOD'],
+                'stub_Fail.Fail': ['FAIL', 'FAIL'],
         }
 
-        self.mox.UnsetStubs()
-        tko = self.mox.CreateMock(frontend.TKO)
-        tko.run('get_detailed_test_views', afe_job_id=0).AndReturn(
-            [{'test_name':r[0], 'status':r[1]} for r in test_results])
+        patcher = patch.object(frontend, 'TKO')
+        tko = patcher.start()
+        self.addCleanup(patcher.stop)
 
-        self.mox.ReplayAll()
+        tko.run.return_value = ([{
+                'test_name': r[0],
+                'status': r[1]
+        } for r in test_results])
         test_views = site_utils.get_test_views_from_tko(0, tko)
-        self.mox.VerifyAll()
+        tko.run.assert_called_with('get_detailed_test_views', afe_job_id=0)
 
         self.assertEqual(sorted(test_views.keys()),
                          sorted(expected_test_views.keys()),
diff --git a/server/standalone_profiler.py b/server/standalone_profiler.py
index 4674d59..ca9dc6f 100644
--- a/server/standalone_profiler.py
+++ b/server/standalone_profiler.py
@@ -22,8 +22,8 @@
 # Client control file snippet used to synchronize profiler start & stop.
 _RUNTEST_PATTERN = ("job.run_test('profiler_sync', timeout_sync=%r,\n"
                     "             timeout_start=%r, timeout_stop=%r,\n"
-                    "             hostid='%s', masterid='%s', all_ids=%r)")
-_PROF_MASTER = platform.node()
+                    "             hostid='%s', mainid='%s', all_ids=%r)")
+_PROF_MAIN = platform.node()
 _PORT = 11920
 
 
@@ -62,7 +62,7 @@
 
     profiler_sync_call = (_RUNTEST_PATTERN %
                           (timeout_sync, timeout_start, timeout_stop,
-                           hostname, _PROF_MASTER, machines))
+                           hostname, _PROF_MAIN, machines))
     control_file.append(profiler_sync_call)
 
     for profiler in reversed(profilers):
@@ -72,24 +72,24 @@
 
 
 def wait_for_profilers(machines, timeout=300):
-    sb = barrier.barrier(_PROF_MASTER, "sync_profilers",
+    sb = barrier.barrier(_PROF_MAIN, "sync_profilers",
             timeout, port=_PORT)
-    sb.rendezvous_servers(_PROF_MASTER, *machines)
+    sb.rendezvous_servers(_PROF_MAIN, *machines)
 
 
 def start_profilers(machines, timeout=120):
-    sb = barrier.barrier(_PROF_MASTER, "start_profilers",
+    sb = barrier.barrier(_PROF_MAIN, "start_profilers",
             timeout, port=_PORT)
-    sb.rendezvous_servers(_PROF_MASTER, *machines)
+    sb.rendezvous_servers(_PROF_MAIN, *machines)
 
 
 def stop_profilers(machines, timeout=120):
-    sb = barrier.barrier(_PROF_MASTER, "stop_profilers",
+    sb = barrier.barrier(_PROF_MAIN, "stop_profilers",
             timeout, port=_PORT)
-    sb.rendezvous_servers(_PROF_MASTER, *machines)
+    sb.rendezvous_servers(_PROF_MAIN, *machines)
 
 
 def finish_profilers(machines, timeout=120):
-    sb = barrier.barrier(_PROF_MASTER, "finish_profilers",
+    sb = barrier.barrier(_PROF_MAIN, "finish_profilers",
             timeout, port=_PORT)
-    sb.rendezvous_servers(_PROF_MASTER, *machines)
+    sb.rendezvous_servers(_PROF_MAIN, *machines)
diff --git a/server/subcommand.py b/server/subcommand.py
index 234c81a..9b6dbbe 100644
--- a/server/subcommand.py
+++ b/server/subcommand.py
@@ -47,8 +47,10 @@
             if status != 0:
                 run_error = True
 
-        results.append(six.moves.cPickle.load(task.result_pickle))
-        task.result_pickle.close()
+        results.append(
+                six.moves.cPickle.load(task.result_pickle, encoding='utf-8'))
+        if hasattr(task.result_pickle, 'close'):
+            task.result_pickle.close()
 
     if return_results:
         return results
@@ -169,7 +171,7 @@
 
         if self.pid:                            # I am the parent
             os.close(w)
-            self.result_pickle = os.fdopen(r, 'r')
+            self.result_pickle = os.fdopen(r, 'rb')
             return
         else:
             os.close(r)
diff --git a/server/subcommand_unittest.py b/server/subcommand_unittest.py
index c775f9f..057dac9 100755
--- a/server/subcommand_unittest.py
+++ b/server/subcommand_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2009 Google Inc. Released under the GPL v2
 
 from __future__ import absolute_import
@@ -241,14 +241,14 @@
         """Test fork_waitfor failure with an exception."""
         cmd = self._setup_subcommand(lambda: None, 'foo')
         with self.assertRaises(error.AutoservSubcommandError):
-          cmd.fork_waitfor(timeout=60)
+            cmd.fork_waitfor(timeout=60)
 
 
     def test_fork_waitfor_timeout_fail(self):
         """Test fork_waitfor timing out."""
         cmd = self._setup_subcommand(lambda: time.sleep(60))
         with self.assertRaises(error.AutoservSubcommandError):
-          cmd.fork_waitfor(timeout=1)
+            cmd.fork_waitfor(timeout=1)
 
 
 class parallel_test(unittest.TestCase):
@@ -263,7 +263,7 @@
 
     def _get_cmd(self, func, args):
         cmd = _create_subcommand(func, args)
-        cmd.result_pickle = self.god.create_mock_class(file, 'file')
+        cmd.result_pickle = self.god.create_mock_class(open, 'open')
         return self.god.create_mock_class(cmd, 'subcommand')
 
 
@@ -286,9 +286,9 @@
 
         for task in tasklist:
             task.fork_waitfor.expect_call(timeout=None).and_return(0)
-            (six.moves.cPickle.load.expect_call(task.result_pickle)
-                    .and_return(6))
-            task.result_pickle.close.expect_call()
+            (six.moves.cPickle.load.expect_call(
+                    task.result_pickle, encoding='utf-8').and_return(6))
+
 
         subcommand.parallel(tasklist)
         self.god.check_playback()
@@ -299,9 +299,9 @@
 
         for task in tasklist:
             task.fork_waitfor.expect_call(timeout=None).and_return(1)
-            (six.moves.cPickle.load.expect_call(task.result_pickle)
-                    .and_return(6))
-            task.result_pickle.close.expect_call()
+            (six.moves.cPickle.load.expect_call(
+                    task.result_pickle, encoding='utf-8').and_return(6))
+
 
         self.assertRaises(subcommand.error.AutoservError, subcommand.parallel,
                           tasklist)
@@ -319,9 +319,10 @@
         for task in tasklist:
             subcommand.time.time.expect_call().and_return(1)
             task.fork_waitfor.expect_call(timeout=timeout).and_return(None)
-            (six.moves.cPickle.load.expect_call(task.result_pickle)
-                    .and_return(6))
-            task.result_pickle.close.expect_call()
+            (six.moves.cPickle.load.expect_call(
+                    task.result_pickle, encoding='utf-8').and_return(6))
+
+
 
         self.assertRaises(subcommand.error.AutoservError, subcommand.parallel,
                           tasklist, timeout=timeout)
@@ -332,15 +333,15 @@
         tasklist = self._setup_common()
 
         tasklist[0].fork_waitfor.expect_call(timeout=None).and_return(0)
-        (six.moves.cPickle.load.expect_call(tasklist[0].result_pickle)
-                .and_return(6))
-        tasklist[0].result_pickle.close.expect_call()
+        (six.moves.cPickle.load.expect_call(tasklist[0].result_pickle,
+                                            encoding='utf-8').and_return(6))
+        # tasklist[0].result_pickle.close.expect_call()
 
         error = Exception('fail')
         tasklist[1].fork_waitfor.expect_call(timeout=None).and_return(1)
-        (six.moves.cPickle.load.expect_call(tasklist[1].result_pickle)
-                .and_return(error))
-        tasklist[1].result_pickle.close.expect_call()
+        (six.moves.cPickle.load.expect_call(
+                tasklist[1].result_pickle, encoding='utf-8').and_return(error))
+        # tasklist[1].result_pickle.close.expect_call()
 
         self.assertEquals(subcommand.parallel(tasklist, return_results=True),
                           [6, error])
diff --git a/server/system_utils.py b/server/system_utils.py
index 951cf06..26af73a 100644
--- a/server/system_utils.py
+++ b/server/system_utils.py
@@ -13,28 +13,20 @@
 import common
 from autotest_lib.client.common_lib import global_config
 from autotest_lib.client.common_lib import utils
-from autotest_lib.site_utils import server_manager_utils
 
 
 def get_drones():
     """Get a list of drones from server database or global config.
     """
-    if server_manager_utils.use_server_db():
-        return server_manager_utils.get_drones()
-    else:
-        return []
+    return []
 
 
 def get_shards():
     """Get a list of shards from server database or global config.
     """
-    if server_manager_utils.use_server_db():
-        return server_manager_utils.get_shards()
-    else:
-        config = global_config.global_config
-        shards = config.get_config_value(
-                'SERVER', 'shards', default='')
-        return [hostname.strip() for hostname in shards.split(',')]
+    config = global_config.global_config
+    shards = config.get_config_value('SERVER', 'shards', default='')
+    return [hostname.strip() for hostname in shards.split(',')]
 
 
 class DroneCache(object):
diff --git a/server/test.py b/server/test.py
index 6f40475..ab0c806 100644
--- a/server/test.py
+++ b/server/test.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2007 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -15,6 +16,7 @@
 from autotest_lib.client.common_lib import log
 from autotest_lib.client.common_lib import test as common_test
 from autotest_lib.client.common_lib import utils
+from autotest_lib.server import hosts, autotest
 
 
 class test(common_test.base_test):
@@ -28,7 +30,7 @@
 mytest = test.test(job, '', %r)
 job.sysinfo.log_before_each_test(mytest)
 sysinfo_pickle = os.path.join(mytest.outputdir, 'sysinfo.pickle')
-pickle.dump(job.sysinfo, open(sysinfo_pickle, 'w'))
+pickle.dump(job.sysinfo, open(sysinfo_pickle, 'wb'))
 job.record('GOOD', '', 'sysinfo.before')
 """
 
@@ -39,9 +41,15 @@
 # success is passed in so diffable_logdir can decide if or not to collect
 # full log content.
 mytest.success = %s
+mytest.collect_full_logs = %s
 sysinfo_pickle = os.path.join(mytest.outputdir, 'sysinfo.pickle')
 if os.path.exists(sysinfo_pickle):
-    job.sysinfo = pickle.load(open(sysinfo_pickle))
+    try:
+        with open(sysinfo_pickle, 'r') as rf:
+            job.sysinfo = pickle.load(rf)
+    except UnicodeDecodeError:
+        with open(sysinfo_pickle, 'rb') as rf:
+            job.sysinfo = pickle.load(rf)
     job.sysinfo.__init__(job.resultdir)
 job.sysinfo.log_after_each_test(mytest)
 job.record('GOOD', '', 'sysinfo.after')
@@ -57,10 +65,15 @@
 mytest = test.test(job, '', %r)
 sysinfo_pickle = os.path.join(mytest.outputdir, 'sysinfo.pickle')
 if os.path.exists(sysinfo_pickle):
-    job.sysinfo = pickle.load(open(sysinfo_pickle))
+    try:
+        with open(sysinfo_pickle, 'r') as rf:
+            job.sysinfo = pickle.load(rf)
+    except UnicodeDecodeError:
+        with open(sysinfo_pickle, 'rb') as rf:
+            job.sysinfo = pickle.load(rf)
     job.sysinfo.__init__(job.resultdir)
 job.sysinfo.%s(mytest, iteration=%d)
-pickle.dump(job.sysinfo, open(sysinfo_pickle, 'w'))
+pickle.dump(job.sysinfo, open(sysinfo_pickle, 'wb'))
 job.record('GOOD', '', 'sysinfo.iteration.%s')
 """
 
@@ -101,7 +114,6 @@
 
     def _install(self):
         if not self.host:
-            from autotest_lib.server import hosts, autotest
             self.host = hosts.create_target_machine(
                     self.job.machine_dict_list[0])
             try:
@@ -217,12 +229,29 @@
     def after_hook(self, mytest, host, at, outputdir):
         self._push_pickle(host, outputdir);
         # run the post-test sysinfo script
-        at.run(_sysinfo_after_test_script % (outputdir, mytest.success),
+        at.run(_sysinfo_after_test_script %
+               (outputdir, mytest.success, mytest.force_full_log_collection),
                results_dir=self.job.resultdir)
 
         self._pull_sysinfo_keyval(host, outputdir, mytest)
 
 
+    @log.log_and_ignore_errors("post-test server crossystem error:")
+    def after_hook_crossystem_fast(self, mytest):
+        """Collects crossystem log file in fast mode
+
+        This is used in place of after_hook in fast mode. This function will
+        grab output of crossystem but not process other sysinfo logs.
+        """
+        if not self.host:
+            self.host = hosts.create_target_machine(
+                    self.job.machine_dict_list[0])
+        output_path = '%s/sysinfo' % mytest.outputdir
+        utils.run('mkdir -p %s' % output_path)
+        crossystem_output = self.host.run_output('crossystem')
+        with open('%s/crossystem' % output_path, 'w') as f:
+            f.write(crossystem_output)
+
     def cleanup(self, host_close=True):
         if self.host and self.autotest:
             try:
@@ -260,15 +289,14 @@
             'disable_after_iteration_sysinfo', False)
 
     disable_sysinfo = dargs.pop('disable_sysinfo', False)
+    logger = _sysinfo_logger(job)
     if job.fast and not disable_sysinfo:
         # Server job will be executed in fast mode, which means
         # 1) if job succeeds, no hook will be executed.
         # 2) if job failed, after_hook will be executed.
-        logger = _sysinfo_logger(job)
         logging_args = [None, logger.after_hook, None,
                         logger.after_iteration_hook]
     elif not disable_sysinfo:
-        logger = _sysinfo_logger(job)
         logging_args = [
             logger.before_hook if not disable_before_test_hook else None,
             logger.after_hook if not disable_after_test_hook else None,
@@ -278,8 +306,7 @@
                  if not disable_after_iteration_hook else None),
         ]
     else:
-        logger = None
-        logging_args = [None, None, None, None]
+        logging_args = [None, logger.after_hook_crossystem_fast, None, None]
 
     # add in a hook that calls host.log_kernel if we can
     def log_kernel_hook(mytest, existing_hook=logging_args[0]):
diff --git a/server/tests/OWNERS b/server/tests/OWNERS
new file mode 100644
index 0000000..0d959c8
--- /dev/null
+++ b/server/tests/OWNERS
@@ -0,0 +1,3 @@
+include /FIRMWARE_OWNERS
+include /ENGPROD_OWNERS
+include /INFRA_OWNERS
diff --git a/server/tests/barriertest_2client/control.srv b/server/tests/barriertest_2client/control.srv
deleted file mode 100644
index f810075..0000000
--- a/server/tests/barriertest_2client/control.srv
+++ /dev/null
@@ -1,78 +0,0 @@
-AUTHOR = "gps@google.com (Gregory P. Smith)"
-TIME = "SHORT"
-NAME = "barrier_2client"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = 'Network'
-TEST_TYPE = "Server"
-SYNC_COUNT = 2
-DOC = """
-A functional test of autotest's Barrier mechanisms for synchronizing
-events between two clients without the help of the server.
-"""
-
-from autotest_lib.server import utils
-from six.moves import zip
-
-def run(pair):
-    logging.info('Running on %s and %s', pair[0], pair[1])
-    host_objs = [hosts.create_host(machine) for machine in pair]
-    host_at_objs = [autotest.Autotest(host) for host in host_objs]
-
-    client_control_template = """
-import logging, platform, socket, traceback
-try:
-    client_hostnames = %r
-    master_hostname = client_hostnames[0]
-    client_hostname = client_hostnames[1]
-
-    logging.info('Testing hostname only barrier')
-    barrier = job.barrier(platform.node(), 'barriertest_2client', 120)
-    logging.info('rendezvous-ing')
-    barrier.rendezvous(master_hostname, client_hostname)
-    logging.info('done.')
-
-    logging.info('Testing local identifier barrier')
-    barrier = job.barrier(platform.node() + '#id0', 'barriertest_2client', 120)
-    logging.info('rendezvous-ing')
-    barrier.rendezvous(master_hostname + '#id0',
-                       client_hostname + '#id0')
-    logging.info('done.')
-
-    logging.info('Testing IP@ barrier')
-    barrier = job.barrier(socket.gethostbyname(platform.node()),
-                          'barriertest_2client', 120)
-    logging.info('rendezvous-ing')
-    barrier.rendezvous(socket.gethostbyname(master_hostname),
-                       socket.gethostbyname(client_hostname))
-    logging.info('done.')
-
-    logging.info('Testing IP@ barrier with ids')
-    barrier = job.barrier(socket.gethostbyname(platform.node()) + '#42',
-                          'barriertest_2client', 120)
-    logging.info('rendezvous-ing')
-    barrier.rendezvous(socket.gethostbyname(master_hostname) + '#42',
-                       socket.gethostbyname(client_hostname) + '#42')
-    logging.info('done.')
-except:
-    traceback.print_exc()
-    raise
-"""
-    client_controls = [client_control_template % (pair,) for host in host_objs]
-
-    subcommand_list = []
-    for host, host_at, control in zip(host_objs, host_at_objs, client_controls):
-        subcommand_list.append(subcommand(host_at.run,
-                                          (control, host.hostname)))
-
-    parallel(subcommand_list)
-
-
-# grab the pairs (and failures)
-(pairs, failures) = utils.form_ntuples_from_machines(machines, 2)
-
-# log the failures
-for failure in failures:
-    job.record("FAIL", failure[0], "barrier_2client", failure[1])
-
-# now run through each pair and run
-job.parallel_simple(run, pairs, log=False)
diff --git a/server/tests/netperf2/control.srv b/server/tests/netperf2/control.srv
deleted file mode 100644
index 9969679..0000000
--- a/server/tests/netperf2/control.srv
+++ /dev/null
@@ -1,40 +0,0 @@
-AUTHOR = "kdlucas@google.com (K.D. Lucas)"
-TIME = "SHORT"
-NAME = "Netperf Basic"
-TEST_CATEGORY = "Benchmark"
-TEST_CLASS = 'Network'
-TEST_TYPE = "Server"
-SYNC_COUNT = 2
-DOC = """
-netperf2 is a 2 machine test (server/client) that measures the performance
-of various network attributes.
-
-Arguments to run_test:
-
-test          - the list of valid netperf tests that can be run
-                This currently is:
-                  TCP_STREAM, TCP_SENDFILE, TCP_RR, TCP_CRR, UDP_STREAM, UDP_RR
-test_time     - Specifies how long each iteration of the test should run for.
-stream_list   - A list containing the number of streams to run the test for. If
-                the list is [1,10,100] then the test will run 3 times. If
-                bidirectional is set then there will be the specified number of
-                bidirectional streams.
-cycles        - The number of times to run each test.
-"""
-
-from autotest_lib.server import utils
-
-
-def run(pair):
-    job.run_test('netperf2', pair=pair, test='TCP_STREAM', time=10,
-                 stream_list=[1], cycles=1)
-
-# grab the pairs (and failures)
-(pairs, failures) = utils.form_ntuples_from_machines(machines, 2)
-
-# log the failures
-for failure in failures:
-    job.record("FAIL", failure[0], "netperf2", failure[1])
-
-# now run through each pair and run
-job.parallel_simple(run, pairs, log=False)
diff --git a/server/tests/netperf2/control.stress.srv b/server/tests/netperf2/control.stress.srv
deleted file mode 100644
index 33961cf..0000000
--- a/server/tests/netperf2/control.stress.srv
+++ /dev/null
@@ -1,49 +0,0 @@
-AUTHOR = "kdlucas@google.com (Kelly Lucas)"
-TIME = "MEDIUM"
-NAME = "Netperf Stress"
-TEST_CATEGORY = "Stress"
-TEST_CLASS = 'Network'
-TEST_TYPE = "Server"
-SYNC_COUNT = 2
-DOC = """
-netperf_stress is a 2 machine test (server/client) that measures the performance
-of various network attributes. This test will cycle through the various types
-of supported tests and streams, and will take about 1 hour to run. 
-You can adjust the streams by changing the values of the streams list in the
-run function.
-
-Arguments to run_test:
-
-test          - the list of valid netperf tests that can be run
-                This currently is:
-                  TCP_STREAM, TCP_SENDFILE, TCP_RR, TCP_CRR, UDP_STREAM, UDP_RR
-test_time     - Specifies how long each iteration of the test should run for.
-stream_list   - A list containing the number of streams to run the test for. If
-                the list is [1,10,100] then the test will run 3 times. If
-                bidirectional is set then there will be the specified number of
-                bidirectional streams.
-"""
-
-from autotest_lib.server import utils
-
-
-def run(pair):
-    """
-    Run netperf with various parameter settings.
-    """
-    streams = [1, 10, 20, 40, 60, 80, 100, 200]
-    for t in ['TCP_STREAM', 'TCP_MAERTS', 'TCP_RR', 'TCP_CRR', 'UDP_RR']:
-        tag = 'netprerf2' + t
-        job.run_test('netperf2', tag=tag, pair=pair, test=t, time=60,
-                     stream_list=streams, cycles=1)
-
-
-# grab the pairs (and failures)
-(pairs, failures) = utils.form_ntuples_from_machines(machines, 2)
-
-# log the failures
-for failure in failures:
-    job.record("FAIL", failure[0], "netperf2", failure[1])
-
-# now run through each pair and run
-job.parallel_simple(run, pairs, log=False)
diff --git a/server/tests/netperf2/netperf2.py b/server/tests/netperf2/netperf2.py
deleted file mode 100644
index cce439b..0000000
--- a/server/tests/netperf2/netperf2.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# Lint as: python2, python3
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-from autotest_lib.server import autotest, hosts, subcommand, test
-from autotest_lib.server import utils
-
-class netperf2(test.test):
-    version = 2
-
-    def run_once(self, pair, test, time, stream_list, cycles):
-        print("running on %s and %s\n" % (pair[0], pair[1]))
-
-        # Designate a label for the server side tests.
-        server_label = 'net_server'
-
-        server = hosts.create_host(pair[0])
-        client = hosts.create_host(pair[1])
-
-        # If client has the server_label, then swap server and client.
-        platform_label = client.get_platform_label()
-        if platform_label == server_label:
-            (server, client) = (client, server)
-
-
-        # Disable IPFilters if they are enabled.
-        for m in [client, server]:
-            status = m.run('/sbin/iptables -L')
-            if not status.exit_status:
-                m.disable_ipfilters()
-
-        # Starting a test indents the status.log entries. This test starts 2
-        # additional tests causing their log entries to be indented twice. This
-        # double indent confuses the parser, so reset the indent level on the
-        # job, let the forked tests record their entries, then restore the
-        # previous indent level.
-        self.job._indenter.decrement()
-
-        server_at = autotest.Autotest(server)
-        client_at = autotest.Autotest(client)
-
-        template = ''.join(["job.run_test('netperf2', server_ip='%s', ",
-                            "client_ip='%s', role='%s', test='%s', ",
-                            "test_time=%d, stream_list=%s, tag='%s', ",
-                            "iterations=%d)"])
-
-        server_control_file = template % (server.ip, client.ip, 'server', test,
-                                          time, stream_list, 'server', cycles)
-        client_control_file = template % (server.ip, client.ip, 'client', test,
-                                          time, stream_list, 'client', cycles)
-
-        server_command = subcommand.subcommand(server_at.run,
-                                    [server_control_file, server.hostname],
-                                    subdir='../')
-        client_command = subcommand.subcommand(client_at.run,
-                                    [client_control_file, client.hostname],
-                                    subdir='../')
-
-        subcommand.parallel([server_command, client_command])
-
-        # The parser needs a keyval file to know what host ran the test.
-        utils.write_keyval('../' + server.hostname,
-                           {"hostname": server.hostname})
-        utils.write_keyval('../' + client.hostname,
-                           {"hostname": client.hostname})
-
-        # Restore indent level of main job.
-        self.job._indenter.increment()
-
-        for m in [client, server]:
-            status = m.run('/sbin/iptables -L')
-            if not status.exit_status:
-                m.enable_ipfilters()
diff --git a/server/tests/netpipe/control.srv b/server/tests/netpipe/_control.srv
similarity index 100%
rename from server/tests/netpipe/control.srv
rename to server/tests/netpipe/_control.srv
diff --git a/server/tests/netpipe/control.stress.srv b/server/tests/netpipe/_control.stress.srv
similarity index 100%
rename from server/tests/netpipe/control.stress.srv
rename to server/tests/netpipe/_control.stress.srv
diff --git a/server/tests/sleeptest/control b/server/tests/sleeptest/control
index fc7f8a2..9b281f6 100644
--- a/server/tests/sleeptest/control
+++ b/server/tests/sleeptest/control
@@ -4,7 +4,7 @@
 TEST_CLASS = 'Software'
 TEST_CATEGORY = 'Functional'
 TEST_TYPE = 'server'
-
+PY_VERSION = 3
 DOC = """
 runs sleep for one second on the list of machines.
 """
diff --git a/site_utils/add_detected_host_labels.py b/site_utils/add_detected_host_labels.py
index beb4a4c..2b46889 100755
--- a/site_utils/add_detected_host_labels.py
+++ b/site_utils/add_detected_host_labels.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/site_utils/admin/clean_staged_images.py b/site_utils/admin/clean_staged_images.py
index 3aeca1b..47aac77 100755
--- a/site_utils/admin/clean_staged_images.py
+++ b/site_utils/admin/clean_staged_images.py
@@ -1,11 +1,11 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 """Clean Staged Images.
 
-This script is responsible for removing older builds from the Chrome OS
+This script is responsible for removing older builds from the ChromeOS
 devserver. It walks through the files in the images folder, check each found
 staged.timestamp and do following.
 1. Check if the build target is in the list of targets that need to keep the
diff --git a/site_utils/admin/prod_manifest.xml b/site_utils/admin/prod_manifest.xml
index 2a7697e..e6c5533 100644
--- a/site_utils/admin/prod_manifest.xml
+++ b/site_utils/admin/prod_manifest.xml
@@ -6,8 +6,8 @@
   <remote  name="cros-internal"
            fetch="https://chrome-internal-review.googlesource.com"
            review="chrome-internal-review.googlesource.com" />
-<!--  We track master branch now because we build Docker images and use those to track testing and release rather than the prod ref.-->
-  <default revision="refs/heads/master"
+<!--  We track 'main' branch now because we build Docker images and use those to track testing and release rather than the prod ref.-->
+  <default revision="refs/heads/main"
            remote="cros" />
 
   <project path="autotest"
diff --git a/site_utils/admin_audit/base.py b/site_utils/admin_audit/base.py
index b9db61f..450697d 100644
--- a/site_utils/admin_audit/base.py
+++ b/site_utils/admin_audit/base.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/site_utils/admin_audit/battery_validator.py b/site_utils/admin_audit/battery_validator.py
new file mode 100644
index 0000000..3989620
--- /dev/null
+++ b/site_utils/admin_audit/battery_validator.py
@@ -0,0 +1,169 @@
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Functional to validate RPM configs in the lab."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import os
+import logging
+
+import common
+from autotest_lib.site_utils.admin_audit import constants
+
+
+class BatteryValidator(object):
+    """Battery validator provides capacity verification of battery on the host.
+
+    The state detection and set state as:
+    - NORMAL - battery capacity >= 70%
+    - ACCEPTABLE - battery capacity >= 40%
+    - NEED_REPLACEMENT - battery capacity < 40%
+    - UNKNOWN - logic cannot read data to specify the state
+    - NOT_DETECTED - battery is not present on the host
+    """
+    # Battery capacity levels
+    BATTER_NORMAL_LEVEL = 70
+    BATTER_ACCEPTABLE_LEVEL = 40
+
+    # Attempts to try read battery data
+    READ_DATA_RETRY_COUNT = 3
+
+    def __init__(self, host):
+        """Initialize the battery validator.
+
+        @params host CrosHost instance.
+        """
+        self._host = host
+        self._battery_path = None
+        self.charge_full = 0
+        self.charge_full_design = 0
+
+    def _read_battery_path(self):
+        """Detect path to battery properties on the host."""
+        self._battery_path = None
+        info = self._host.get_power_supply_info()
+        if 'Battery' not in info:
+            logging.debug('Battery is not presented but expected!'
+                          ' In some cases it possible.')
+            return None
+        self._battery_path = info['Battery']['path']
+        logging.info('Battery path: %s', self._battery_path)
+        return self._battery_path
+
+    def is_battery_expected(self):
+        """Verify if battery expected on the host based on host info."""
+        host_info = self._host.host_info_store.get()
+        return host_info.get_label_value('power') == 'battery'
+
+    def _read_data_from_host(self):
+        """Read data from the host."""
+
+        def read_val(file_name, field_type):
+            """Read a value from file."""
+            try:
+                path = os.path.join(self._battery_path, file_name)
+                out = self._host.run('cat %s' % path,
+                                     ignore_status=True).stdout.strip()
+                return field_type(out)
+            except:
+                return field_type(0)
+
+        self.charge_full = read_val('charge_full', float)
+        self.charge_full_design = read_val('charge_full_design', float)
+        cycle_count = read_val('cycle_count', int)
+        logging.debug('Battery cycle_count: %d', cycle_count)
+
+    def _validate_by_host(self):
+        """Validate battery by reading data from the host."""
+        logging.debug('Try to validate from host side.')
+        if self._host.is_up():
+            for _ in range(self.READ_DATA_RETRY_COUNT):
+                try:
+                    self._read_battery_path()
+                    if not self._battery_path:
+                        logging.info('Battery is not present/found on host')
+                        return self._update_host_info(
+                                constants.HW_STATE_NOT_DETECTED)
+                    self._read_data_from_host()
+                    return self._update_battery_state()
+                except Exception as e:
+                    logging.debug('(Not critical) %s', e)
+        return None
+
+    def _validate_by_servo(self):
+        """Validate battery by servo access."""
+        servo = self._host.servo
+        logging.debug('Try to validate from servo side.')
+        if servo:
+            for _ in range(self.READ_DATA_RETRY_COUNT):
+                try:
+                    if not servo.has_control('battery_full_charge_mah'):
+                        break
+                    self.charge_full = servo.get('battery_full_charge_mah')
+                    self.charge_full_design = servo.get(
+                            'battery_full_design_mah')
+                    return self._update_battery_state()
+                except Exception as e:
+                    logging.debug('(Not critical) %s', e)
+        return None
+
+    def validate(self):
+        """Validate battery and update state.
+
+        Try to validate from host if device is sshable if not then try
+        read battery info by servo.
+        """
+        logging.info('Starting battery validation.')
+        state = None
+        if not self.is_battery_expected():
+            state = self._update_host_info(constants.HW_STATE_NOT_DETECTED)
+        if not state:
+            state = self._validate_by_host()
+        if not state:
+            state = self._validate_by_servo()
+        if not state:
+            state = self._update_host_info(constants.HW_STATE_UNKNOWN)
+        return state
+
+    def _update_battery_state(self):
+        """Update battery state based on batter charging capacity
+
+        The logic will update state based on:
+            if capacity >= 70% then NORMAL
+            if capacity >= 40% then ACCEPTABLE
+            if capacity  < 40% then NEED_REPLACEMENT
+        """
+        if self.charge_full == 0:
+            logging.debug('charge_full is 0. Skip update battery_state!')
+            return
+        if self.charge_full_design == 0:
+            logging.debug('charge_full_design is 0.'
+                          ' Skip update battery_state!')
+            return
+        capacity = (100.0 * self.charge_full / self.charge_full_design)
+        logging.debug('Battery capacity: %d', capacity)
+
+        if capacity >= self.BATTER_NORMAL_LEVEL:
+            return self._update_host_info(constants.HW_STATE_NORMAL)
+        if capacity >= self.BATTER_ACCEPTABLE_LEVEL:
+            return self._update_host_info(constants.HW_STATE_ACCEPTABLE)
+        return self._update_host_info(constants.HW_STATE_NEED_REPLACEMENT)
+
+    def _update_host_info(self, state):
+        """Update state value to the battery_state in the host_info
+
+        @param state: new state value for the label
+        """
+        if self._host:
+            state_prefix = constants.BATTERY_STATE_PREFIX
+            host_info = self._host.host_info_store.get()
+            old_state = host_info.get_label_value(state_prefix)
+            host_info.set_version_label(state_prefix, state)
+            logging.info('Set %s as `%s` (previous: `%s`)', state_prefix,
+                         state, old_state)
+            self._host.host_info_store.commit(host_info)
+        return state
diff --git a/site_utils/admin_audit/constants.py b/site_utils/admin_audit/constants.py
index 0bb746d..5980145 100644
--- a/site_utils/admin_audit/constants.py
+++ b/site_utils/admin_audit/constants.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -13,6 +13,7 @@
 # Labels for hardware parts
 DUT_STORAGE_STATE_PREFIX = 'storage_state'
 SERVO_USB_STATE_PREFIX = 'servo_usb_state'
+BATTERY_STATE_PREFIX = 'battery_state'
 
 # RPM states
 RPM_STATE_LABEL_PREFIX = 'rpm_state'
diff --git a/site_utils/admin_audit/main.py b/site_utils/admin_audit/main.py
index fcb7cac..05e6eb1 100755
--- a/site_utils/admin_audit/main.py
+++ b/site_utils/admin_audit/main.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -100,6 +100,7 @@
                 host_info_path=opts.host_info_file,
                 try_lab_servo=need_servod,
                 try_servo_repair=need_servod,
+                try_servo_recovery=need_servod,
                 servo_uart_logs_dir=servo_uart_logs_dir)
     except Exception as err:
         logging.error("fail to create host: %s", err)
diff --git a/site_utils/admin_audit/rpm_validator.py b/site_utils/admin_audit/rpm_validator.py
index 5453378..89af651 100644
--- a/site_utils/admin_audit/rpm_validator.py
+++ b/site_utils/admin_audit/rpm_validator.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/site_utils/admin_audit/servo_updater.py b/site_utils/admin_audit/servo_updater.py
index f396557..27758f1 100644
--- a/site_utils/admin_audit/servo_updater.py
+++ b/site_utils/admin_audit/servo_updater.py
@@ -1,72 +1,85 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import os
 import logging
 
 import common
 from autotest_lib.client.common_lib import utils as client_utils
+from autotest_lib.server.cros.servo.topology import topology_constants
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = client_utils.metrics_mock
 
 
+class ServoFwVersionMissedError(Exception):
+    """Raised when Available version is not detected."""
+
+
+class ServoUpdaterError(Exception):
+    """Raised when detected issue with servo_updater."""
+
+
 class _BaseUpdateServoFw(object):
     """Base class to update firmware on servo"""
 
-    # Command to update servo device.
-    # param 1: servo board (servo_v4|servo_micro)
-    # param 2: serial number of main device on the board
-    UPDATER = 'servo_updater -b %s -s %s --reboot'
-    UPDATER_FORCE = UPDATER + ' --force'
+    # Commands to kill active servo_updater fail with timeout
+    ACTIVE_UPDATER_CORE = 'ps aux | grep -ie [s]ervo_updater |grep "%s" '
+    ACTIVE_UPDATER_PRINT = ACTIVE_UPDATER_CORE + "| awk '{print $2}' "
+    ACTIVE_UPDATER_KILL = ACTIVE_UPDATER_PRINT + "| xargs kill -9 "
 
-    # Command to read current version on the servo
-    # param 1: serial number of main device on the board
-    SERVO_VERSION = 'cat $(servodtool device -s %s usb-path)/configuration'
+    # Command to update FW for servo. Always reboot servo after update.
+    UPDATER_TAIL = '-b %s -s "%s" -c %s --reboot'
+    UPDATER_CMD = 'servo_updater ' + UPDATER_TAIL
+    UPDATER_CONTAINER_CMD = 'python /update_servo_firmware.py ' + UPDATER_TAIL
 
-    # Command to read servod config file with extracting value by key
-    # param 1: servo port, provided by servo config
-    # param 2: required parammeter (key) from config file
-    SERVOD_CONFIG = 'cat /var/lib/servod/config_%s | grep %s'
+    # Command to get servo firmware version for requested board and channel.
+    LATEST_VERSION_CMD = 'servo_updater -p -b "%s" -c %s | grep firmware'
 
-    # Command to get PATH to the latest available firmware on the host
-    # param 1: servo board (servo_v4|servo_micro)
-    LATEST_VERSION_FW = 'realpath /usr/share/servo_updater/firmware/%s.bin'
+    # Default firmware channel.
+    DEFAULT_FW_CHANNEL = 'stable'
 
-    # Command to get servo product supported by device
-    # param 1: serial number of main device on the board
-    SERVO_PRODUCT = 'cat $(servodtool device -s %s usb-path)/product'
+    def __init__(self, servo_host, device):
+        """Init servo-updater instance.
 
-    def __init__(self, servo_host):
-        self._host = servo_host
-        # keep flag that class support and can run updater
-        self._supported = None
-
-    def check_needs(self, ignore_version=False):
-        """Check if class supports update for particular servo type.
-
-        @params ignore_version: do not check the version on the device.
+        @params servo_host: ServoHost instance to run terminal commands
+        @params device:     ConnectedServo instance provided servo info
         """
-        if self._supported is None:
-            if not self._host:
-                self._supported = False
-            elif not self._host.is_labstation():
-                self._supported = False
-            elif not self._host.servo_serial:
-                self._supported = False
-            elif not self._check_needs():
-                self._supported = False
-            elif not ignore_version:
-                self._supported = self._is_outdated_version()
-            else:
-                self._supported = True
-        return self._supported
+        self._host = servo_host
+        self._device = device
 
-    def update(self, force_update=False, ignore_version=False):
+    def need_update(self, ignore_version=False, channel=None):
+        """Verify that servo_update is required.
+
+        @params ignore_version: Do not check the version on the device.
+        @params channel:        Channel for servo firmware. Supported from
+                                version R90. Possible values: stable, prev,
+                                dev, alpha.
+
+        @returns: True if update required, False if not
+        """
+        if not channel:
+            channel = self.DEFAULT_FW_CHANNEL
+        if not self._host:
+            logging.debug('Skip update as host is provided.')
+            return False
+        elif not self.get_serial_number():
+            logging.debug('Skip update as servo serial is empty.')
+            return False
+        elif not (self._host.is_labstation()
+                  or self._host.is_containerized_servod()):
+            logging.debug('Skip as we run onlu from labstation and container.')
+            return False
+        elif not ignore_version:
+            if not self._is_outdated_version(channel=channel):
+                logging.debug('Skip as version is up today')
+                return False
+        return True
+
+    def update(self, force_update=False, ignore_version=False, channel=None):
         """Update firmware on the servo.
 
         Steps:
@@ -74,219 +87,333 @@
         2) Try to get serial number for the servo.
         3) Updating firmware.
 
-        @params force_update: run updater with force option.
-        @params ignore_version: do not check the version on the device.
+        @params force_update:   Run updater with force option.
+        @params ignore_version: Do not check the version on the device.
+        @params channel:        Channel for servo firmware. Supported from
+                                version R90. Possible values: stable, prev,
+                                dev, alpha.
         """
-        if not self.check_needs(ignore_version):
-            logging.info('The board %s does not need update or '
-                         'not present in the setup.', self.get_board())
+        if not channel:
+            channel = self.DEFAULT_FW_CHANNEL
+        if not self.need_update(ignore_version, channel=channel):
+            logging.info("The board %s doesn't need update.", self.get_board())
             return
         if not self.get_serial_number():
             logging.info('Serial number is not detected. It means no update'
                          ' will be performed on servo.')
             return
-        self._update_firmware(force_update)
-
-    def _check_needs(self):
-        """Check is servo type supported"""
-        raise NotImplementedError('Please implement method to perform'
-                                  ' check of supporting the servo type')
+        if self._device.get_type() != self.get_board():
+            logging.info('Attempt use incorrect updater for %s. Expected: %s.',
+                         self._device.get_type(), self.get_board())
+            return
+        self._update_firmware(force_update, channel)
 
     def get_board(self):
-        """Return servo type supported by updater"""
+        """Return servo type supported by updater."""
         raise NotImplementedError('Please implement method to return'
                                   ' servo type')
 
-    def get_serial_number(self):
-        """Return serial number for main servo device on servo"""
-        raise NotImplementedError('Please implement method to return'
-                                  ' serial number')
+    def get_device(self):
+        """Return ConnectedServo instance"""
+        return self._device
 
-    def _get_updater_cmd(self, force_update):
+    def get_serial_number(self):
+        """Return serial number for servo device"""
+        return self._device.get_serial_number()
+
+    def _get_updater_cmd(self, force_update, channel):
         """Return command to run firmware updater for the servo device.
 
-        @params force_update: run updater with force option.
+        @params force_update:   Run updater with force option.
+        @params channel:        Channel for servo firmware.
         """
+        if self._host.is_containerized_servod():
+            cmd = self.UPDATER_CONTAINER_CMD
+        else:
+            cmd = self.UPDATER_CMD
         board = self.get_board()
         serial_number = self.get_serial_number()
+        cmd = cmd % (board, serial_number, channel.lower())
         if force_update:
-            cmd = self.UPDATER_FORCE
-        else:
-            cmd = self.UPDATER
-        return cmd % (board, serial_number)
+            cmd += ' --force '
+        return cmd
 
-    def _update_firmware(self, force_update):
+    def _update_firmware(self, force_update, channel):
         """Execute firmware updater command.
 
-        Method generate a metric to collect statistics of update.
-        @params force_update: run updater with force option.
+        @params force_update:   Run updater with force option.
+        @params channel:        UpdateCompare version from special firmware channel
         """
-        cmd = self._get_updater_cmd(force_update)
-        logging.info('Servo fw update: %s', cmd)
-        result = self._host.run(cmd, ignore_status=True).stdout.strip()
-        logging.debug('Servo fw update finished; %s', result)
-        logging.info('Servo fw update finished')
-        metrics.Counter(
-            'chromeos/autotest/audit/servo/fw_update'
-            ).increment(fields={'status': 'success'})
+        cmd = self._get_updater_cmd(force_update, channel)
+        logging.info('Try to update servo fw update by running: %s', cmd)
+        try:
+            res = self._host.run(cmd, timeout=120)
+            logging.debug('Servo fw update finished; %s', res.stdout.strip())
+            logging.info('Servo fw update finished')
+        finally:
+            self._kill_active_update_process()
 
-    def _get_config_value(self, key):
-        """Read configuration value by provided key.
-
-        @param key: key from key=value pair in config file.
-                    eg: 'HUB' or 'SERVO_MICRO_SERIAL'
-        """
-        """Read value from servod config file"""
-        cmd = self.SERVOD_CONFIG % (self._host.servo_port, key)
-        result = self._host.run(cmd, ignore_status=True).stdout.strip()
-        if result:
-            return result[len(key)+1:]
-        return None
+    def _kill_active_update_process(self):
+        """Kill active servo_update processes when stuck after attempt."""
+        try:
+            cmd = self.ACTIVE_UPDATER_KILL % self.get_serial_number()
+            self._host.run(cmd, timeout=30, ignore_status=True)
+        except Exception as e:
+            logging.debug('Fail kill active processes; %s', e)
 
     def _current_version(self):
         """Get current version on servo device"""
-        cmd = self.SERVO_VERSION % self.get_serial_number()
-        version = self._host.run(cmd, ignore_status=True).stdout.strip()
-        logging.debug('Current version: %s', version)
-        return version
+        return self._device.get_version()
 
-    def _latest_version(self):
-        """Get latest version available on servo-host"""
-        cmd = self.LATEST_VERSION_FW % self.get_board()
-        filepath = self._host.run(cmd, ignore_status=True).stdout.strip()
-        if not filepath:
-            return None
-        version = os.path.basename(os.path.splitext(filepath)[0]).strip()
-        logging.debug('Latest version: %s', version)
-        return version
+    def _latest_version(self, channel):
+        """Get latest available version from servo_updater.
 
-    def _is_outdated_version(self):
+        @params channel: Compare version from special firmware channel
+        """
+        cmd = self.LATEST_VERSION_CMD % (self.get_board(), channel.lower())
+        re = self._host.run(cmd, ignore_status=True)
+        if re.exit_status == 0:
+            result = re.stdout.strip().split(':')
+            if len(result) == 2:
+                return result[-1].strip()
+        return None
+
+    def _is_outdated_version(self, channel):
         """Compare version to determine request to update the Servo or not.
 
-        Method generate metrics to collect statistics with version.
+        @params channel: Compare version from special firmware channel
         """
         current_version = self._current_version()
-        latest_version = self._latest_version()
-        if not current_version or not latest_version:
+        logging.debug('Servo fw on the device: "%s"', current_version)
+        latest_version = self._latest_version(channel)
+        logging.debug('Latest servo fw: "%s"', latest_version)
+        if not current_version:
             return True
+        if not latest_version:
+            raise ServoFwVersionMissedError()
         if current_version == latest_version:
             return False
-        metrics.Counter(
-            'chromeos/autotest/audit/servo/fw_need_update'
-            ).increment(fields={'version': current_version})
         return True
 
-    def _get_product(self):
-        """Get servo product from servo device"""
-        cmd = self.SERVO_PRODUCT % self.get_serial_number()
-        return self._host.run(cmd, ignore_status=True).stdout.strip()
-
 
 class UpdateServoV4Fw(_BaseUpdateServoFw):
-    """Servo firmware updater for servo_v4 version.
+    """Servo firmware updater for servo_v4."""
 
-    Update firmware will be only if new version present and servo
-    was not updated.
-    """
     def get_board(self):
         """Return servo type supported by updater"""
-        return 'servo_v4'
+        return topology_constants.ST_V4_TYPE
 
-    def get_serial_number(self):
-        # serial number of servo_v4 match with device number
-        return self._host.servo_serial
 
-    def _check_needs(self):
-        """Check if servo is servo_v4.
+class UpdateServoV4p1Fw(_BaseUpdateServoFw):
+    """Servo firmware updater for servo_v4p1."""
 
-        Check servo type.
-        Check access to the serial number.
-        """
-        if self._get_product() != 'Servo V4':
-            return False
-        if not self.get_serial_number():
-            return False
-        return True
+    def get_board(self):
+        """Return servo type supported by updater"""
+        return topology_constants.ST_V4P1_TYPE
 
 
 class UpdateServoMicroFw(_BaseUpdateServoFw):
-    """Servo firmware updater for servo_micro version.
-
-    Update firmware will be only if new version present and servo
-    was not updated.
-    """
-    def __init__(self, servo_host):
-        super(UpdateServoMicroFw, self).__init__(servo_host)
-        self._serial_number = None
+    """Servo firmware updater for servo_micro."""
 
     def get_board(self):
         """Return servo type supported by updater"""
-        return 'servo_micro'
-
-    def get_serial_number(self):
-        # serial number of servo_v4 match with device number
-        if self._serial_number is None:
-            # servo_micro serial number is not match to serial on
-            # the servo device servod is keeping it in config file
-            serial = self._get_config_value('SERVO_MICRO_SERIAL')
-            self._serial_number = serial if serial is not None else ''
-        return self._serial_number
-
-    def _check_needs(self):
-        """Check if servo is servo_micro.
-
-        Check servo type.
-        Check access to the serial number.
-        """
-        if not self.get_serial_number():
-            # set does not include servo_micro
-            return False
-        if self._get_product() != 'Servo Micro':
-            return False
-        return True
+        return topology_constants.ST_SERVO_MICRO_TYPE
 
 
-# List servo firmware updaters
-SERVO_UPDATERS = (
-    UpdateServoV4Fw,
-    UpdateServoMicroFw,
-)
+class UpdateC2D2Fw(_BaseUpdateServoFw):
+    """Servo firmware updater for c2d2."""
+
+    def get_board(self):
+        """Return servo type supported by updater"""
+        return topology_constants.ST_C2D2_TYPE
+
+
+class UpdateSweetberryFw(_BaseUpdateServoFw):
+    """Servo firmware updater for sweetberry."""
+
+    def get_board(self):
+        """Return servo type supported by updater"""
+        return topology_constants.ST_SWEETBERRY_TYPE
+
+
+# List servo firmware updaters mapped to the type
+SERVO_UPDATERS = {
+        topology_constants.ST_V4_TYPE: UpdateServoV4Fw,
+        topology_constants.ST_V4P1_TYPE: UpdateServoV4p1Fw,
+        topology_constants.ST_SERVO_MICRO_TYPE: UpdateServoMicroFw,
+        topology_constants.ST_C2D2_TYPE: UpdateC2D2Fw,
+        topology_constants.ST_SWEETBERRY_TYPE: UpdateSweetberryFw,
+}
+
+# List known, tracking issue related to servo_updater.
+SERVO_UPDATER_ISSUE_MSGS = ('Configuration not set', )
+
+
+def _run_update_attempt(updater, topology, try_count, force_update,
+                        ignore_version, channel):
+    """Run servo update attempt.
+
+    @params updater:        Servo updater instance.
+    @params topology:       ServoTopology instance to update version.
+    @params try_count:      Count of attempt to run update.
+    @params force_update:   Run updater with force option.
+    @params ignore_version: Do not check the version on the device.
+    @params channel:        Request servo firmware from special channel
+
+    @returns:   True is finished without any error, False - with error
+    """
+    board = updater.get_board()
+    success = False
+    for a in range(try_count):
+        msg = 'Starting attempt: %d (of %d) to update "%s".'
+        if force_update:
+            msg += ' with force'
+        logging.info(msg, a + 1, try_count, board)
+        try:
+            updater.update(force_update=force_update,
+                           ignore_version=ignore_version,
+                           channel=channel)
+            topology.update_servo_version(updater.get_device())
+            if not updater.need_update(ignore_version=ignore_version,
+                                       channel=channel):
+                success = True
+        except Exception as er:
+            error_message = str(er)
+            logging.debug('(Not critical) fail to update %s; %s', board,
+                          error_message)
+            for message in SERVO_UPDATER_ISSUE_MSGS:
+                if message in error_message:
+                    raise ServoUpdaterError()
+        if success:
+            break
+    return success
+
+
+def any_servo_needs_firmware_update(host):
+    """Verify if any servo requires firmware update.
+
+    @params host:   ServoHost instance to run required commands
+                    and access to topology.
+    @returns:       True if any servo requires an update.
+    """
+    if not host:
+        raise ValueError('ServoHost is not provided.')
+
+    has_servo_requires_update = False
+    for device in host.get_topology().get_list_of_devices():
+        # Verify that device can provide serial and servo_type.
+        if not device.is_good():
+            continue
+        board = device.get_type()
+        updater_type = SERVO_UPDATERS.get(board, None)
+        if not updater_type:
+            logging.debug('No specified updater for %s', board)
+            continue
+        logging.debug('Specified updater found for %s', board)
+        # Creating update instance
+        updater = updater_type(host, device)
+        if updater.need_update(ignore_version=False,
+                               channel=host.servo_fw_channel):
+            logging.info('The servo: %s requires firmware update!', board)
+            has_servo_requires_update = True
+        else:
+            logging.info('The servo: %s does not require firmware update!',
+                         board)
+    return has_servo_requires_update
 
 
 def update_servo_firmware(host,
                           boards=None,
+                          try_attempt_count=1,
                           force_update=False,
+                          try_force_update=False,
                           ignore_version=False):
     """Update firmware on servo devices.
 
-    @params host: ServoHost instance to run all required commands.
-    @params force_update: run updater with force option.
-    @params ignore_version: do not check the version on the device.
+    @params host:               ServoHost instance to run required commands
+                                and access to topology.
+    @params try_attempt_count:  Count of attempts to update servo. For force
+                                option the count attempts is always 1 (one).
+    @params try_force_update:   Try force force option if fail to update in
+                                normal mode.
+    @params force_update:       Run updater with force option. Override
+                                try_force_update option.
+    @params ignore_version:     Do not check the version on the device.
+
+    @returns:                   True is all servos updated or does not need it,
+                                False if any device could not updated.
     """
     if boards is None:
         boards = []
     if ignore_version:
-        logging.debug('Running servo_updater with ignore_version=True')
+        logging.info('Running servo_updater with ignore_version=True')
 
+    if not host:
+        raise ValueError('ServoHost is not provided.')
+
+    # Use force option as first attempt
+    use_force_option_as_first_attempt = False
+    # If requested to update with force then first attempt will be with force
+    # and there no second attempt.
+    if force_update:
+        try_attempt_count = 1
+        try_force_update = False
+        use_force_option_as_first_attempt = True
     # to run updater we need make sure the servod is not running
     host.stop_servod()
-    # initialize all updaters
-    updaters = [updater(host) for updater in SERVO_UPDATERS]
+    if host.is_containerized_servod():
+        # Starting container as servo_updated located in it.
+        # Starting without servod as it can block access to the servos.
+        host.start_containerized_servod(with_servod=False)
 
-    for updater in updaters:
-        board = updater.get_board()
+    # Collection to count which board failed to update
+    fail_boards = []
+
+    servo_topology = host.get_topology()
+    # Get list connected servos
+    for device in servo_topology.get_list_of_devices():
+        # Verify that device can provide serial and servo_type.
+        if not device.is_good():
+            continue
+        board = device.get_type()
         if len(boards) > 0 and board not in boards:
             logging.info('The %s is not requested for update', board)
             continue
-        logging.info('Try to update board: %s', board)
-        try:
-            updater.update(force_update=force_update,
-                           ignore_version=ignore_version)
-        except Exception as e:
-            data = {'host': host.get_dut_hostname() or '',
-                    'board': board}
-            metrics.Counter(
-                'chromeos/autotest/audit/servo/fw/update/error'
-                ).increment(fields=data)
+        updater_type = SERVO_UPDATERS.get(board, None)
+        if not updater_type:
+            logging.info('No specified updater for %s', board)
+            continue
+        # Creating update instance
+        updater = updater_type(host, device)
+        is_success_update = _run_update_attempt(
+                updater=updater,
+                topology=servo_topology,
+                try_count=try_attempt_count,
+                force_update=use_force_option_as_first_attempt,
+                ignore_version=ignore_version,
+                channel=host.servo_fw_channel)
+        # If fail to update and we got requested to try force option then
+        # run second time with force.
+        if not is_success_update and try_force_update:
+            is_success_update = _run_update_attempt(
+                    updater=updater,
+                    topology=servo_topology,
+                    try_count=1,
+                    force_update=True,
+                    ignore_version=ignore_version,
+                    channel=host.servo_fw_channel)
+        if not is_success_update:
             logging.info('Fail update firmware for %s', board)
-            logging.debug('Fail update firmware for %s: %s', board, str(e))
+            hostname = host.get_dut_hostname() or host.hostname
+            metrics.Counter('chromeos/autotest/servo/fw_update_fail'
+                            ).increment(fields={'host': hostname})
+            fail_boards.append(board)
+
+    # Need stop containr without servod we started above.
+    if host.is_containerized_servod():
+        host.stop_servod()
+
+    if len(fail_boards) == 0:
+        logging.info('Successfull updated all requested servos.')
+        return True
+    return False
diff --git a/site_utils/admin_audit/verifiers.py b/site_utils/admin_audit/verifiers.py
index b055ff2..abc91d3 100644
--- a/site_utils/admin_audit/verifiers.py
+++ b/site_utils/admin_audit/verifiers.py
@@ -1,26 +1,24 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 import logging
 
-import common
-import base
-import constants
-import servo_updater
-import time
-import os
-import re
 
+import common
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib import utils as client_utils
 from autotest_lib.server.cros.storage import storage_validate as storage
-from autotest_lib.server.cros import servo_keyboard_utils
+from autotest_lib.server.cros.servo.keyboard import servo_keyboard_flasher
+from autotest_lib.server.cros.repair import mac_address_helper
+from autotest_lib.site_utils.admin_audit import base
+from autotest_lib.site_utils.admin_audit import constants
 from autotest_lib.site_utils.admin_audit import rpm_validator
+from autotest_lib.site_utils.admin_audit import servo_updater
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = client_utils.metrics_mock
 
@@ -256,99 +254,16 @@
 class FlashServoKeyboardMapVerifier(base._BaseDUTVerifier):
     """Flash the keyboard map on servo."""
 
-    _ATMEGA_RESET_DELAY = 0.2
-    _USB_PRESENT_DELAY = 1
-
-    # Command to detect LUFA Keyboard Demo by VID.
-    LSUSB_CMD = 'lsusb -d %s:' % servo_keyboard_utils.ATMEL_USB_VENDOR_ID
-
     def _verify(self):
         if not self.host_is_up():
-            logging.info('Host is down; Skipping the action')
-            return
+            raise base.AuditError('Host is down')
         if not self.servo_is_up():
-            logging.info('Servo not initialized; Skipping the action')
-            return
+            raise base.AuditError('Servo not initialized')
 
         host = self.get_host()
-        servo = host.servo
-        try:
-            logging.info('Starting flashing the keyboard map.')
-            status = self._flash_keyboard_map(host, servo)
-            logging.info('Set status: %s', status)
-            if status == STATUS_FAIL:
-                self._send_metrics()
-        except Exception as e:
-            # The possible errors is timeout of commands.
-            logging.debug('Failed to flash servo keyboard map; %s', e)
-            self._send_metrics()
-        finally:
-            # Restore the default settings.
-            # Select the chip on the USB mux unless using Servo V4
-            if 'servo_v4' not in servo.get_servo_version():
-                servo.set('usb_mux_sel4', 'on')
-
-    def _flash_keyboard_map(self, host, servo):
-        if host.run('hash dfu-programmer', ignore_status=True).exit_status:
-            logging.info(
-                'The image is too old that does not have dfu-programmer.')
-            return STATUS_SKIPPED
-
-        servo.set_nocheck('init_usb_keyboard', 'on')
-
-        if self._is_keyboard_present(host):
-            logging.info('Already using the new keyboard map.')
-            return STATUS_SUCCESS
-
-        # Boot AVR into DFU mode by enabling the HardWareBoot mode
-        # strapping and reset.
-        servo.set_get_all(['at_hwb:on',
-                            'atmega_rst:on',
-                            'sleep:%f' % self._ATMEGA_RESET_DELAY,
-                            'atmega_rst:off',
-                            'sleep:%f' % self._ATMEGA_RESET_DELAY,
-                            'at_hwb:off'])
-
-        result = host.run(self.LSUSB_CMD, timeout=30).stdout.strip()
-        if not 'Atmel Corp. atmega32u4 DFU bootloader' in result:
-            logging.info('Not an expected chip: %s', result)
-            return STATUS_FAIL
-
-        # Update the keyboard map.
-        bindir = os.path.dirname(os.path.realpath(__file__))
-        local_path = os.path.join(bindir, 'data', 'keyboard.hex')
-        host.send_file(local_path, '/tmp')
-        logging.info('Updating the keyboard map...')
-        host.run('dfu-programmer atmega32u4 erase --force', timeout=120)
-        host.run('dfu-programmer atmega32u4 flash /tmp/keyboard.hex',
-                 timeout=120)
-
-        # Reset the chip.
-        servo.set_get_all(['atmega_rst:on',
-                            'sleep:%f' % self._ATMEGA_RESET_DELAY,
-                            'atmega_rst:off'])
-        if self._is_keyboard_present(host):
-            logging.info('Update successfully!')
-            return STATUS_SUCCESS
-
-        logging.info('Update failed!')
-        return STATUS_FAIL
-
-    def _is_keyboard_present(self, host):
-        # Check the result of lsusb.
-        time.sleep(self._USB_PRESENT_DELAY)
-        result = host.run(self.LSUSB_CMD, timeout=30).stdout.strip()
-        logging.info('got the result: %s', result)
-        if ('LUFA Keyboard Demo' in result and
-            servo_keyboard_utils.is_servo_usb_wake_capable(host)):
-            return True
-        return False
-
-    def _send_metrics(self):
-        host = self.get_host()
-        data = {'host': host.hostname, 'status': STATUS_FAIL}
-        metrics.Counter(
-            'chromeos/autotest/audit/servo_keyboard').increment(fields=data)
+        flasher = servo_keyboard_flasher.ServoKeyboardMapFlasher()
+        if flasher.is_image_supported(host):
+            flasher.update(host)
 
 
 class VerifyDUTMacAddress(base._BaseDUTVerifier):
@@ -358,157 +273,11 @@
     address on servod side to better debugging.
     """
 
-    # HUB and NIC VID/PID.
-    # Values presented as the string of the hex without 0x to match
-    # representation in sysfs (idVendor/idProduct).
-    HUB_VID = '04b4'
-    HUB_PID = '6502'
-    NIC_VID = '0bda'
-    NIC_PID = '8153'
-
-    # Regex to check mac address format.
-    # eg: f4:f5:e8:50:e9:45
-    RE_MACADDR = re.compile('^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$')
-
     def _verify(self):
         if not self.host_is_up():
-            logging.info('Host is down; Skipping the action')
-            return
+            raise base.AuditError('Host is down.')
         if not self.servo_is_up():
-            logging.info('Servo host is down; Skipping the action')
-            return
-        host = self.get_host()
-        servo = host.servo
-        if not host._servo_host.is_labstation():
-            logging.info('Only servo_v4 has NIC; '
-                         'Skipping the action')
-            return
-        if not servo.has_control('macaddr'):
-            logging.info('"macaddr" control not supported;'
-                         'Skipping the action')
-            return
+            raise base.AuditError('Servo host is down.')
 
-        # Path to the NIC has to be located in the HUB.
-        # eg.
-        # HUB: /sys/bus/usb/devices/1-1
-        # NIC: /sys/bus/usb/devices/1-1.1
-        hub_path = self._get_device_path(None, self.HUB_VID, self.HUB_PID)
-        if not hub_path or hub_path == '.':
-            logging.info('The servo_v4 HUB not detected from DUT')
-            self._send_metrics()
-            return
-        logging.info('Path to the servo_v4 HUB device: %s', hub_path)
-        nic_path = self._get_device_path(hub_path, self.NIC_VID, self.NIC_PID)
-        if not nic_path or nic_path == '.':
-            logging.info('The servo_v4 NIC not detected in HUB folder')
-            self._send_metrics()
-            return
-        logging.info('Path to the servo_v4 NIC device: %s', nic_path)
-        if hub_path == nic_path or not nic_path.startswith(hub_path):
-            logging.info('The servo_v4 NIC was detect out of servo_v4 HUB;'
-                         ' Skipping the action.')
-            self._send_metrics()
-            return
-
-        macaddr = self._get_mac_address(host, nic_path)
-        if not macaddr:
-            self._send_metrics()
-            return
-
-        cached_mac = self._get_cached_mac_address()
-        if not cached_mac or macaddr != cached_mac:
-            try:
-                servo.set('macaddr', macaddr)
-                logging.info('Successfully updated the servo "macaddr"!')
-            except error.TestFail as e:
-                logging.debug('Fail to update macaddr value; %s', e)
-                logging.info('Fail to update the "macaddr" value!')
-                self._send_metrics()
-        else:
-            logging.info('The servo "macaddr" doe not need update.')
-
-    def _get_cached_mac_address(self):
-        try:
-            return self.get_host().servo.get('macaddr')
-        except error.TestFail as e:
-            logging.error('(Non-critical) Fail to get macaddr: %s', e)
-            return None
-
-    def _get_mac_address(self, host, nic_path):
-        cmd = r'find %s/ | grep /net/ | grep /address' % nic_path
-        res = host.run(cmd,
-                       timeout=30,
-                       ignore_status=True,
-                       ignore_timeout=True)
-        if not res:
-            logging.info('Timeout during retriving NIC address files.')
-            return None
-        addrs = res.stdout.splitlines()
-        if not addrs or len(addrs) == 0:
-            logging.info('No NIC address file found.')
-            return None
-        if len(addrs) > 1:
-            logging.info('More than one NIC address file found.')
-            return None
-        logging.info('Found NIC address file: %s', addrs[0])
-        cmd = r'cat %s' % addrs[0]
-        res = host.run(cmd,
-                       timeout=30,
-                       ignore_status=True,
-                       ignore_timeout=True)
-        if not res:
-            logging.info('Timeout during attemp read NIC address file: %s',
-                         addrs[0])
-            return None
-        mac_addr = res.stdout.strip()
-        if not self.RE_MACADDR.match(mac_addr):
-            logging.info('incorrect format of the mac address: %s', mac_addr)
-            return None
-        logging.info('Servo_v4 NIC mac address from DUT side: %s', mac_addr)
-        return mac_addr
-
-    def _get_device_path(self, base_path, vid, pid):
-        """Find a device by VID/PID under particular path.
-
-        1) Get path to the unique idVendor file with VID
-        2) Get path to the unique idProduct file with PID
-        3) Get directions of both file and compare them
-
-        @param base_path:   Path to the directory where to look for the device.
-        @param vid:         Vendor ID of the looking device.
-        @param pid:         Product ID of the looking device.
-
-        @returns: path to the folder of the device
-        """
-        host = self.get_host()
-        def _run(cmd):
-            res = host.run(cmd, timeout=30,
-                           ignore_status=True,
-                           ignore_timeout=True)
-            l = res.stdout.splitlines()
-            if not l or len(l) != 1:
-                return None
-            return l[0]
-
-        if not base_path:
-            base_path = '/sys/bus/usb/devices/*/'
-        else:
-            base_path += '*/'
-        cmd_template = 'grep -l %s $(find %s -maxdepth 1 -name %s)'
-        vid_path = _run(cmd_template % (vid, base_path, 'idVendor'))
-        if not vid_path:
-            return None
-
-        pid_path = _run(cmd_template % (pid, base_path, 'idProduct'))
-        if not pid_path:
-            return None
-
-        # check if both files locates in the same folder
-        return _run('LC_ALL=C comm -12 <(dirname %s) <(dirname %s)' %
-                    (vid_path, pid_path))
-
-    def _send_metrics(self):
-        host = self.get_host()
-        data = {'host': host.hostname, 'status': STATUS_FAIL}
-        metrics.Counter(
-            'chromeos/autotest/audit/servo_macaddr').increment(fields=data)
+        helper = mac_address_helper.MacAddressHelper()
+        helper.update_if_needed(self.get_host())
diff --git a/site_utils/admin_audit/verifiers_unittest.py b/site_utils/admin_audit/verifiers_unittest.py
index 3d36b61..0d82f90 100644
--- a/site_utils/admin_audit/verifiers_unittest.py
+++ b/site_utils/admin_audit/verifiers_unittest.py
@@ -1,13 +1,13 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import mock
 import unittest
+from unittest import mock
 
 import common
-import constants
+from autotest_lib.site_utils.admin_audit import constants
 from autotest_lib.site_utils.admin_audit import verifiers
 from autotest_lib.server.cros.storage import storage_validate as storage
 
@@ -49,4 +49,4 @@
 
 
 if __name__ == '__main__':
-    unittest.main()
\ No newline at end of file
+    unittest.main()
diff --git a/site_utils/attribute_allowlist.txt b/site_utils/attribute_allowlist.txt
index 5962a11..0fee9c5 100644
--- a/site_utils/attribute_allowlist.txt
+++ b/site_utils/attribute_allowlist.txt
@@ -4,17 +4,25 @@
 suite:android_wifi_interop
 suite:another_suite
 suite:appcompat
+suite:appcompat_release
+suite:appcompat_smoke
 suite:arc-cts
 suite:arc-cts-camera
 suite:arc-cts-deqp
+suite:arc-cts-hardware
 suite:arc-cts-qual
 suite:arc-cts-r
+suite:arc-cts-t
 suite:arc-cts-unibuild
 suite:arc-cts-unibuild-hw
+suite:arc-data-snapshot_per-build
 suite:arc-gts
 suite:arc-gts-qual
+suite:au-m2n
 suite:au-oobe
+suite:au-p2p
 suite:au-perbuild
+suite:au-perbuild-tast
 suite:au_fsi
 suite:audio
 suite:audio_advanced
@@ -30,7 +38,10 @@
 suite:bluestreak-pre-cq
 suite:bluetooth
 suite:bluetooth_e2e
+suite:bluetooth_e2e_cq
 suite:bluetooth_flaky
+suite:bluetooth_floss
+suite:bluetooth_floss_cq
 suite:bluetooth_health
 suite:bluetooth_mtbf
 suite:bluetooth_qualification
@@ -38,38 +49,112 @@
 suite:bluetooth_standalone_cq
 suite:bluetooth_stress
 suite:bluetooth_wifi_coex
+suite:bluetooth_wifi_testbed_update
 suite:bootup_time
+suite:borealis-cq
+suite:borealis_per-build
+suite:borealis_per-day
+suite:borealis_per-week
 suite:brillo-presubmit
 suite:bvt-arc
 suite:bvt-cq
-suite:bvt-faft
 suite:bvt-inline
 suite:bvt-installer
 suite:bvt-perbuild
 suite:bvt-tast-android-pfq
+suite:bvt-tast-arc
 suite:bvt-tast-chrome-pfq
 suite:bvt-tast-cq
+suite:bvt-tast-cq-a11y
+suite:bvt-tast-cq-ad
+suite:bvt-tast-cq-ambient
+suite:bvt-tast-cq-apps
+suite:bvt-tast-cq-arc
+suite:bvt-tast-cq-arcappcompat
+suite:bvt-tast-cq-assistant
+suite:bvt-tast-cq-audio
+suite:bvt-tast-cq-baserpc
+suite:bvt-tast-cq-biod
+suite:bvt-tast-cq-camera
+suite:bvt-tast-cq-crash
+suite:bvt-tast-cq-crostini
+suite:bvt-tast-cq-cryptohome
+suite:bvt-tast-cq-dbus
+suite:bvt-tast-cq-debugd
+suite:bvt-tast-cq-dev
+suite:bvt-tast-cq-diagnostics
+suite:bvt-tast-cq-documentscanapi
+suite:bvt-tast-cq-enterprise
+suite:bvt-tast-cq-example
+suite:bvt-tast-cq-factory
+suite:bvt-tast-cq-familylink
+suite:bvt-tast-cq-feedback
+suite:bvt-tast-cq-filemanager
+suite:bvt-tast-cq-firmware
+suite:bvt-tast-cq-gamepad
+suite:bvt-tast-cq-graphics
+suite:bvt-tast-cq-hardware
+suite:bvt-tast-cq-health
+suite:bvt-tast-cq-hwsec
+suite:bvt-tast-cq-inputs
+suite:bvt-tast-cq-kernel
+suite:bvt-tast-cq-lacros
+suite:bvt-tast-cq-launcher
+suite:bvt-tast-cq-logs
+suite:bvt-tast-cq-meta
+suite:bvt-tast-cq-mlservice
+suite:bvt-tast-cq-multivm
+suite:bvt-tast-cq-nacl
+suite:bvt-tast-cq-nearbyshare
+suite:bvt-tast-cq-network
+suite:bvt-tast-cq-notifications
+suite:bvt-tast-cq-ocr
+suite:bvt-tast-cq-peripherals
+suite:bvt-tast-cq-platform
+suite:bvt-tast-cq-policy
+suite:bvt-tast-cq-power
+suite:bvt-tast-cq-printer
+suite:bvt-tast-cq-qemu
+suite:bvt-tast-cq-scanapp
+suite:bvt-tast-cq-scanner
+suite:bvt-tast-cq-sched
+suite:bvt-tast-cq-screenshot
+suite:bvt-tast-cq-security
+suite:bvt-tast-cq-session
+suite:bvt-tast-cq-shelf
+suite:bvt-tast-cq-storage
+suite:bvt-tast-cq-typec
+suite:bvt-tast-cq-u2fd
+suite:bvt-tast-cq-ui
+suite:bvt-tast-cq-video
+suite:bvt-tast-cq-vm
+suite:bvt-tast-cq-webrtc
+suite:bvt-tast-cq-wifi
+suite:bvt-tast-cq-wilco
 suite:bvt-tast-informational
 suite:bvt-tast-parallels-cq
 suite:bvt-tast-parallels-informational
+suite:calibration
 suite:camera-libcamera
 suite:camera-postsubmit
+suite:camera-usb-qual
 suite:camerabox_facing-back
 suite:camerabox_facing-front
+suite:cellular-cq
+suite:cellular_au
+suite:cellular_au_nightly
+suite:cellular_callbox
 suite:cellular_endtoend
 suite:cellular_mbim_compliance
+suite:cellular_modem_fw
 suite:cellular_modem_repair
 suite:cellular_ota
+suite:cellular_ota_avl
 suite:cellular_ota_flaky
 suite:cellular_pseudomodem
 suite:cellular_qual
-suite:chameleon_audiovideo
-suite:chameleon_dp
-suite:chameleon_dp_hdmi
-suite:chameleon_hdmi
+suite:cft-beta
 suite:chameleon_hdmi_perbuild
-suite:chameleon_hdmi_unstable
-suite:chameleon_vga
 suite:check_setup_cts_N
 suite:check_setup_storage_qual
 suite:chrome-informational
@@ -80,17 +165,21 @@
 suite:crosbolt_perf_parallels_weekly
 suite:crosbolt_perf_perbuild
 suite:crosbolt_perf_weekly
-suite:cts_P
+suite:cross-device
 suite:cts
+suite:cts-hardware
+suite:cts_P
+suite:cuj
 suite:cups_weekly
 suite:debug_kernel_testing
 suite:deqp
 suite:dev_drone_image_test
 suite:display
-suite:dummy
+suite:distributed_lab_qual_faft
+suite:dpanel-end2end_per-build
+suite:drivefs-cq
 suite:dummy_server
 suite:dummy_server_nossp
-suite:dummyclientretries
 suite:enroll_retainment
 suite:enrollment_per-build
 suite:ent-nightly
@@ -99,19 +188,17 @@
 suite:experimental
 suite:factory
 suite:faft_bios
-suite:faft_bios_ec3po
 suite:faft_bios_ro_qual
 suite:faft_bios_rw_qual
 suite:faft_bios_tot
+suite:faft_ccd
 suite:faft_cr50_experimental
 suite:faft_cr50_prepvt
-suite:faft_cr50_prepvt_tast
 suite:faft_cr50_pvt
-suite:faft_cr50_pvt_tast
 suite:faft_cr50_tot
+suite:faft_detachable
 suite:faft_dev
 suite:faft_ec
-suite:faft_ec3po
 suite:faft_ec_fw_qual
 suite:faft_ec_tot
 suite:faft_ec_wp
@@ -128,10 +215,13 @@
 suite:faft_setup
 suite:faft_smoke
 suite:faft_stress
+suite:faft_unstable
 suite:faft_wilco
 suite:files
 suite:fingerprint
-suite:fingerprint-mcu
+suite:fingerprint-cq
+suite:fingerprint-mcu-dragonclaw
+suite:fleet_firmware_update
 suite:graphics
 suite:graphics_browser
 suite:graphics_per-build
@@ -141,8 +231,14 @@
 suite:gts
 suite:hotrod
 suite:hotrod-remora
+suite:hps_perf_perbuild
+suite:hps_sweetberry_perf_perbuild
 suite:hwqual
+suite:infra_multi_duts_with_android
 suite:infra_qual
+suite:infra_qual_cellular
+suite:infra_qual_wifi
+suite:input-tools-upstream
 suite:jailed_build
 suite:kernel_daily_benchmarks
 suite:kernel_daily_regression
@@ -155,21 +251,35 @@
 suite:longevity
 suite:manual_platform_suite
 suite:memory_qual
+suite:memory_qual2
 suite:memory_qual_quick
 suite:mmc_qual
 suite:moblab
 suite:moblab_quick
 suite:moblab_storage_qual
+suite:mtp
+suite:nbr
 suite:nearby-share
+suite:nearby-share-android
+suite:nearby-share-arc
+suite:nearby-share-dev
+suite:nearby-share-prod
+suite:nearby-share-remote
 suite:network_nightly
 suite:network_ui
 suite:offloads
+suite:omaha_per-week
 suite:partners
 suite:perfalerts
+suite:performance_cuj
+suite:performance_cuj_benchmarks
+suite:performance_cuj_experimental
+suite:performance_cuj_quick
 suite:platform_internal_display
 suite:platform_test_nightly
 suite:policy
 suite:power_build
+suite:power_check
 suite:power_daily
 suite:power_dashboard
 suite:power_idle
@@ -178,15 +288,38 @@
 suite:power_loadtest_fast
 suite:power_measurement_wrapper
 suite:power_monitoring
+suite:power_qual_fast
+suite:power_qual_full
 suite:power_requirements
-suite:power_sanity
 suite:powerplay
+suite:printscan
 suite:push_to_prod
+suite:pvs-audio
+suite:pvs-bvt-inline
+suite:pvs-display
+suite:pvs-graphics
+suite:pvs-kernel
+suite:pvs-quick-check
+suite:pvs-staging
+suite:pvs-tast-cq
+suite:pvs-video
+suite:py3-beta
 suite:reboot_stress
 suite:regression
 suite:rlz
 suite:runtime_probe_perbuild
 suite:sanity
+suite:satlab-qual-arc-cts
+suite:satlab-qual-arc-cts-qual
+suite:satlab-qual-arc-gts-qual
+suite:satlab-qual-bvt-cq
+suite:satlab-qual-bvt-inline
+suite:satlab-qual-bvt-installer
+suite:satlab-qual-bvt-perbuild
+suite:satlab-qual-bvt-tast-cq
+suite:satlab-qual-bvt-tast-informational
+suite:satlab-qual-faft_bios
+suite:satlab-qual-fat_smoke
 suite:sb65-presubmit
 suite:security
 suite:servo_lab
@@ -196,8 +329,10 @@
 suite:something_else
 suite:ssdqual
 suite:storage_qual
+suite:storage_qual_bringup
 suite:storage_qual_cq
 suite:storage_qual_external
+suite:storage_qual_mini_soak
 suite:storage_qual_quick
 suite:storage_qual_v2_l
 suite:storage_qual_v2_m
@@ -207,7 +342,10 @@
 suite:storage_qual_v2_xs
 suite:storagequal
 suite:stress
+suite:stub
+suite:stubclientretries
 suite:suite:cellular_ota_sprint
+suite:syzcorpus
 suite:syzkaller
 suite:tablet_mode
 suite:telemetry_unit
@@ -216,7 +354,8 @@
 suite:thermal_qual_full
 suite:toolchain-tests
 suite:touch
-suite:usb-camera
+suite:touch_replay
+suite:typec_lab
 suite:usb_detect
 suite:usb_detect_stress
 suite:video
@@ -233,12 +372,13 @@
 suite:wifi_interop_wpa2
 suite:wifi_lucidsleep
 suite:wifi_matfunc
+suite:wifi_matfunc_ax
 suite:wifi_perf
 suite:wifi_release
 suite:wifi_stress
 suite:wifi_tdls_cast
-suite:wifi_update_router
 suite:wificell-cq
+suite:wificell_dut_validation
 suite:wifichaos
 suite:wilco_bve
 suite:wilco_bve_dock
diff --git a/site_utils/backup_mysql_db.py b/site_utils/backup_mysql_db.py
deleted file mode 100755
index a90c219..0000000
--- a/site_utils/backup_mysql_db.py
+++ /dev/null
@@ -1,355 +0,0 @@
-#!/usr/bin/python2
-
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Module used to back up the mysql db and upload to Google Storage.
-
-Usage:
-  backup_mysql_db.py --type=weekly --gs_bucket=gs://my_bucket --keep 10
-
-  gs_bucket may refer to a local location by omitting gs:// and giving a local
-  path if desired for testing. The example usage above creates a dump
-  of the autotest db, uploads it to gs://my_bucket/weekly/dump_file.date and
-  cleans up older dumps if there are more than 10 in that directory.
-"""
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-import datetime
-from distutils import version
-import logging
-import optparse
-import os
-import tempfile
-
-import common
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib import global_config
-from autotest_lib.client.common_lib import utils
-
-from chromite.lib import metrics
-from chromite.lib import ts_mon_config
-from six.moves import range
-
-_ATTEMPTS = 3
-_GSUTIL_BIN = 'gsutil'
-_GS_BUCKET = 'gs://chromeos-lab/backup/database'
-# TODO(scottz): Should we need to ignore more than one database a general
-# function should be designed that lists tables in the database and properly
-# creates the --ignore-table= args to be passed to mysqldump.
-# Tables to ignore when dumping all databases.
-# performance_schema is an internal database that cannot be dumped
-IGNORE_TABLES = ['performance_schema.cond_instances',
-                 'performance_schema.events_waits_current',
-                 'performance_schema.cond_instances',
-                 'performance_schema.events_waits_history',
-                 'performance_schema.events_waits_history_long',
-                 'performance_schema.events_waits_summary_by_instance',
-                 ('performance_schema.'
-                  'events_waits_summary_by_thread_by_event_name'),
-                 'performance_schema.events_waits_summary_global_by_event_name',
-                 'performance_schema.file_instances',
-                 'performance_schema.file_summary_by_event_name',
-                 'performance_schema.file_summary_by_instance',
-                 'performance_schema.mutex_instances',
-                 'performance_schema.performance_timers',
-                 'performance_schema.rwlock_instances',
-                 'performance_schema.setup_consumers',
-                 'performance_schema.setup_instruments',
-                 'performance_schema.setup_timers',
-                 'performance_schema.threads']
-
-# Conventional mysqldump schedules.
-_DAILY = 'daily'
-_WEEKLY = 'weekly'
-_MONTHLY = 'monthly'
-
-# Dump of server db only
-_SERVER_DB = 'server_db'
-
-# Contrary to a conventional mysql dump which takes O(hours) on large databases,
-# a host dump is the cheapest form of backup possible. We dump the output of a
-# of a mysql command showing all hosts and their pool labels to a text file that
-# is backed up to google storage.
-_ONLY_HOSTS = 'only_hosts'
-_ONLY_SHARDS = 'only_shards'
-_SCHEDULER_TYPES = [_SERVER_DB, _ONLY_HOSTS, _ONLY_SHARDS,
-                    _DAILY, _WEEKLY, _MONTHLY]
-
-class BackupError(Exception):
-  """Raised for error occurred during backup."""
-
-
-class MySqlArchiver(object):
-    """Class that archives the Autotest MySQL DB to Google Storage.
-
-    Vars:
-      gs_dir:  The path to the directory in Google Storage that this dump file
-               will be uploaded to.
-      number_to_keep:  The number of dumps we should store.
-    """
-    _AUTOTEST_DB = "chromeos_autotest_db"
-    _SERVER_DB = "chromeos_lab_servers"
-
-
-    def __init__(self, scheduled_type, number_to_keep, gs_bucket):
-        # For conventional scheduled type, we back up all databases.
-        # self._db is only used when scheduled_type is not
-        # conventional scheduled type.
-        self._db = self._get_db_name(scheduled_type)
-        self._gs_dir = '/'.join([gs_bucket, scheduled_type])
-        self._number_to_keep = number_to_keep
-        self._type = scheduled_type
-
-
-    @classmethod
-    def _get_db_name(cls, scheduled_type):
-        """Get the db name to backup.
-
-        @param scheduled_type: one of _SCHEDULER_TYPES.
-
-        @returns: The name of the db to backup.
-                  Or None for backup all dbs.
-        """
-        if scheduled_type == _SERVER_DB:
-            return cls._SERVER_DB
-        elif scheduled_type in [_ONLY_HOSTS, _ONLY_SHARDS]:
-            return cls._AUTOTEST_DB
-        else:
-            return None
-
-    @staticmethod
-    def _get_user_pass():
-        """Returns a tuple containing the user/pass to use to access the DB."""
-        user = global_config.global_config.get_config_value(
-                'CROS', 'db_backup_user')
-        password = global_config.global_config.get_config_value(
-                'CROS', 'db_backup_password')
-        return user, password
-
-
-    def create_mysql_dump(self):
-        """Returns the path to a mysql dump of the current autotest DB."""
-        user, password = self._get_user_pass()
-        _, filename = tempfile.mkstemp('autotest_db_dump')
-        logging.debug('Dumping mysql database to file %s', filename)
-        extra_dump_args = ''
-        for entry in IGNORE_TABLES:
-            extra_dump_args += '--ignore-table=%s ' % entry
-        if self._type in [_WEEKLY, _MONTHLY]:
-            extra_dump_args += '--dump-slave '
-
-        if not self._db:
-            extra_dump_args += "--all-databases"
-        db_name = self._db or ''
-        utils.system('set -o pipefail; mysqldump --user=%s '
-                     '--password=%s %s %s| gzip - > %s' % (
-                     user, password, extra_dump_args, db_name, filename))
-        return filename
-
-
-    def _create_dump_from_query(self, query):
-        """Dumps result of a query into a text file.
-
-        @param query: Query to execute.
-
-        @return: The path to a tempfile containing the response of the query.
-        """
-        if not self._db:
-            raise BackupError("_create_dump_from_query requires a specific db.")
-        parameters = {'db': self._db, 'query': query}
-        parameters['user'], parameters['password'] = self._get_user_pass()
-        _, parameters['filename'] = tempfile.mkstemp('autotest_db_dump')
-        utils.system(
-                'set -o pipefail; mysql -u %(user)s -p%(password)s '
-                '%(db)s -e "%(query)s" > %(filename)s' %
-                parameters)
-        return parameters['filename']
-
-
-    def create_host_dump(self):
-        """Dumps hosts and their labels into a text file.
-
-        @return: The path to a tempfile containing a dump of
-                 hosts and their pool labels.
-        """
-        respect_static_labels = global_config.global_config.get_config_value(
-                'SKYLAB', 'respect_static_labels', type=bool, default=False)
-        template = ('SELECT hosts.hostname, labels.name FROM afe_hosts AS '
-                    'hosts JOIN %(hosts_labels_table)s AS hlt ON '
-                    'hosts.id = hlt.host_id '
-                    'JOIN %(labels_table)s AS labels '
-                    'ON labels.id = hlt.%(column)s '
-                    'WHERE labels.name LIKE \'%%pool%%\';')
-        if respect_static_labels:
-            # HACK: We're not checking the replaced_by_static_label on the
-            # pool label and just hard coding the fact that pool labels are
-            # indeed static labels. Expedience.
-            query = template % {
-                    'hosts_labels_table': 'afe_static_hosts_labels',
-                    'labels_table': 'afe_static_labels',
-                    'column': 'staticlabel_id',
-            }
-        else:
-            query = template % {
-                    'hosts_labels_table': 'afe_hosts_labels',
-                    'labels_table': 'afe_labels',
-                    'column': 'label_id',
-            }
-        return self._create_dump_from_query(query)
-
-
-    def create_shards_dump(self):
-        """Dumps shards and their labels into a text file.
-
-        @return: The path to a tempfile containing a dump of
-                 shards and their labels.
-        """
-        query = ('SELECT hostname, labels.name FROM afe_shards AS shards '
-                 'JOIN afe_shards_labels '
-                 'ON shards.id = afe_shards_labels.shard_id '
-                 'JOIN afe_labels AS labels '
-                 'ON labels.id = afe_shards_labels.label_id;')
-        return self._create_dump_from_query(query)
-
-
-    def dump(self):
-        """Creates a data dump based on the type of schedule.
-
-        @return: The path to a file containing the dump.
-        """
-        if self._type == _ONLY_HOSTS:
-            return self.create_host_dump()
-        if self._type == _ONLY_SHARDS:
-            return self.create_shards_dump()
-        return self.create_mysql_dump()
-
-
-    def _get_name(self):
-        """Returns the name of the dump as presented to google storage."""
-        if self._type in [_ONLY_HOSTS, _ONLY_SHARDS]:
-            file_type = 'txt'
-        else:
-            file_type = 'gz'
-        return 'autotest-dump.%s.%s' % (
-                datetime.datetime.now().strftime('%y.%m.%d'), file_type)
-
-
-    @staticmethod
-    def _retry_run(cmd):
-        """Run the specified |cmd| string, retrying if necessary.
-
-        Args:
-          cmd: The command to run.
-        """
-        for attempt in range(_ATTEMPTS):
-            try:
-                return utils.system_output(cmd)
-            except error.CmdError:
-                if attempt == _ATTEMPTS - 1:
-                    raise
-                else:
-                    logging.error('Failed to run %r', cmd)
-
-
-    def upload_to_google_storage(self, dump_file):
-        """Uploads the given |dump_file| to Google Storage.
-
-        @param dump_file: The path to the file containing the dump.
-        """
-        cmd = '%(gs_util)s cp %(dump_file)s %(gs_dir)s/%(name)s'
-        input_dict = dict(gs_util=_GSUTIL_BIN, dump_file=dump_file,
-                          name=self._get_name(), gs_dir=self._gs_dir)
-        cmd = cmd % input_dict
-        logging.debug('Uploading mysql dump to google storage')
-        self._retry_run(cmd)
-        os.remove(dump_file)
-
-
-    def _get_gs_command(self, cmd):
-        """Returns an array representing the command for rm or ls."""
-        # Helpful code to allow us to test without gs.
-        assert cmd in ['rm', 'ls']
-        gs_bin = _GSUTIL_BIN
-        if self._gs_dir.startswith('gs://'):
-            cmd_array = [gs_bin, cmd]
-        else:
-            cmd_array = [cmd]
-
-        return cmd_array
-
-
-    def _do_ls(self):
-        """Returns the output of running ls on the gs bucket."""
-        cmd = self._get_gs_command('ls') + [self._gs_dir]
-        return self._retry_run(' '.join(cmd))
-
-
-    def cleanup(self):
-        """Cleans up the gs bucket to ensure we don't over archive."""
-        logging.debug('Cleaning up previously archived dump files.')
-        listing = self._do_ls()
-        ordered_listing = sorted(listing.splitlines(), key=version.LooseVersion)
-        if len(ordered_listing) < self._number_to_keep:
-            logging.debug('Cleanup found nothing to do.')
-            return
-
-        to_remove = ordered_listing[:-self._number_to_keep]
-        rm_cmd = self._get_gs_command('rm')
-        for artifact in to_remove:
-            cmd = ' '.join(rm_cmd + [artifact])
-            self._retry_run(cmd)
-
-
-def parse_options():
-    """Parses given options."""
-    parser = optparse.OptionParser()
-    parser.add_option('--gs_bucket', default=_GS_BUCKET,
-                      help='Google storage bucket to store mysql db dumps.')
-    parser.add_option('--keep', default=10, type=int,
-                      help='Number of dumps to keep of specified type.')
-    parser.add_option('--type', default=_DAILY,
-                      help='The type of mysql dump to store.')
-    parser.add_option('--verbose', default=False, action='store_true',
-                      help='Google storage bucket to store mysql db dumps.')
-    options = parser.parse_args()[0]
-    if options.type not in _SCHEDULER_TYPES:
-        parser.error('Type must be either: %s.' % ', '.join(_SCHEDULER_TYPES))
-
-    return options
-
-
-def main():
-    """Runs the program."""
-    options = parse_options()
-    backup_succeeded = False
-
-    with ts_mon_config.SetupTsMonGlobalState(service_name='mysql_db_backup',
-                                             indirect=True):
-        with metrics.SecondsTimer(
-                'chromeos/autotest/afe_db/backup/durations',
-                fields={'type': options.type}):
-             try:
-                 logging.debug('Start db backup: %s', options.type)
-                 archiver = MySqlArchiver(
-                         options.type, options.keep, options.gs_bucket)
-                 dump_file = archiver.dump()
-                 logging.debug('Uploading backup: %s', options.type)
-                 archiver.upload_to_google_storage(dump_file)
-                 archiver.cleanup()
-                 logging.debug('Db backup completed: %s', options.type)
-                 backup_succeeded = True
-             finally:
-                 metrics.Counter(
-                     'chromeos/autotest/db/db_backup/completed').increment(
-                         fields={'success': backup_succeeded,
-                                 'type': options.type})
-
-
-if __name__ == '__main__':
-    main()
diff --git a/site_utils/balance_pools.py b/site_utils/balance_pools.py
index a5db6f9..f3ce951 100755
--- a/site_utils/balance_pools.py
+++ b/site_utils/balance_pools.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -68,8 +68,8 @@
 from autotest_lib.server.lib import status_history
 from autotest_lib.site_utils import lab_inventory
 from autotest_lib.utils import labellib
-from chromite.lib import metrics
-from chromite.lib import parallel
+from autotest_lib.utils.frozen_chromite.lib import metrics
+from autotest_lib.utils.frozen_chromite.lib import parallel
 
 #This must be imported after chromite.lib.metrics
 from infra_libs import ts_mon
diff --git a/site_utils/bootperf-bin/bootperf b/site_utils/bootperf-bin/bootperf
deleted file mode 100755
index bd20e12..0000000
--- a/site_utils/bootperf-bin/bootperf
+++ /dev/null
@@ -1,226 +0,0 @@
-#!/bin/bash
-
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Wrapper to run the platform_BootPerfServer autotest, and store the
-# results for later analysis by the 'showbootdata' script.
-#
-# NOTE: This script must be run from inside the chromeos build
-# chroot environment.
-#
-COMMONSH="/mnt/host/source/src/scripts/common.sh"
-. ${COMMONSH} || (echo "Unable to load ${COMMONSH}" && exit 1)
-
-# TODO(jrbarnette) The log files produced in this script will be
-# stored inside the chroot.  So, from outside the chroot, this
-# script doesn't work.  I don't know if this is easy to fix, but
-# you're welcome to try.  Let me know how it goes.  :-)
-assert_inside_chroot
-
-DEFINE_string output_dir "" "output directory for results" o
-DEFINE_string board "" "name of board we are testing"
-DEFINE_boolean keep_logs "$FLAGS_FALSE" "keep autotest results" k
-
-RUN_TEST="test_that"
-TEST='platform_BootPerfServer.bootperf'
-TMP_RESULTS="$(mktemp -d /tmp/bootperf.XXXXXX)"
-RESULTS_ROOT="results-1-$TEST"
-RESULTS_DIR=platform_BootPerfServer/results
-RESULTS_KEYVAL=$RESULTS_DIR/keyval
-RESULTS_SUMMARY_FILES=(
-  $RESULTS_DIR
-  keyval
-  platform_BootPerfServer/keyval
-  platform_BootPerfServer/platform_BootPerf/keyval
-  platform_BootPerfServer/platform_BootPerf/status
-  platform_BootPerfServer/status
-  platform_BootPerfServer/status.log
-  platform_BootPerfServer/sysinfo/cmdline
-  platform_BootPerfServer/sysinfo/cpuinfo
-  platform_BootPerfServer/sysinfo/modules
-  platform_BootPerfServer/sysinfo/uname
-  platform_BootPerfServer/sysinfo/version
-  status.log
-)
-
-# Structure of a results directory:
-#   $RUNDIR.$ITER/          - directory
-#       $RUNDIR_LOG             - file
-#       $RUNDIR_SUMMARY/        - directory
-#       $RUNDIR_ALL_RESULTS/    - optional directory
-#   $KEYVAL_SUMMARY/        - file
-# If you add any other content under the results directory, you'll
-# probably need to change extra_files(), below.
-RUNDIR=run
-RUNDIR_LOG=log.txt
-RUNDIR_SUMMARY=summary
-RUNDIR_ALL_RESULTS=logs
-KEYVAL_SUMMARY=results_keyval
-
-
-# Usage/help function.  This function is known to the shflags library,
-# and mustn't be renamed.
-flags_help() {
-  cat <<END_USAGE >&2
-usage: $(basename $0) [ <options> ] <ip-address> [ <count> ]
-Options:
-  --output_dir <directory>
-  --o <directory>       Specify output directory for results
-
-  --board <BOARDNAME>   name of board we are testing (e.g. daisy)
-
-  --[no]keep_logs
-  -k                    Keep [don't keep] autotest log files
-Summary:
-  Run the platform_BootPerfServer autotest, and store results in the
-  given destination directory.  The test target is specified by
-  <ip-address>.
-
-  By default, the test is run once; if <count> is given, the test is
-  run that many times.  Note that the platform_BootPerfServer test
-  reboots the target 10 times, so the total number of reboots will
-  be 10*<count>.
-
-  If the destination directory doesn't exist, it is created.  If the
-  destination directory already holds test results, additional
-  results are added in without overwriting earlier results.
-
-  If no destination is specified, the current directory is used,
-  provided that the directory is empty, or has been previously used
-  as a destination directory for this command.
-
-  By default, only a summary subset of the log files created by
-  autotest is preserved; with --keep_logs the (potentially large)
-  autotest logs are preserved with the test results.
-END_USAGE
-  return $FLAGS_TRUE
-}
-
-usage() {
-  if [ $# -gt 0 ]; then
-    error "$(basename $0): $*"
-    echo >&2
-  fi
-  flags_help
-  exit 1
-}
-
-# List any files in the current directory not created as output
-# from running this script.
-extra_files() {
-  ls | grep -v "^$RUNDIR[.]...\$" |
-       grep -v $KEYVAL_SUMMARY
-}
-
-# Main function to run the boot performance test.  Run the boot
-# performance test for the given count, putting output into the
-# current directory.
-#
-# Arguments are <ip-address> and <count> arguments, as for the main
-# command.
-#
-# We terminate test runs if "test_that" ever fails to produce the
-# results keyval file; generally this is the result of a serious
-# error (e.g. disk full) that won't go away if we just plow on.
-run_boot_test() {
-  local remote="$1"
-  local count="${2:-1}"
-
-  local iter=$(expr "$(echo $RUNDIR.???)" : '.*\(...\)')
-  if [ "$iter" != "???" ]; then
-      iter=$(echo $iter | awk '{printf "%03d\n", $1 + 1}')
-  else
-      iter=000
-  fi
-
-  i=0
-  while [ $i -lt $count ]; do
-    local iter_rundir=$RUNDIR.$iter
-    local logfile=$(pwd)/$iter_rundir/$RUNDIR_LOG
-    local summary_dir=$iter_rundir/$RUNDIR_SUMMARY
-    local all_results_dir=$iter_rundir/$RUNDIR_ALL_RESULTS
-
-    mkdir $iter_rundir
-    echo "$(date '+%T') - $logfile"
-
-    $RUN_TEST --results_dir="$TMP_RESULTS" --args "10" $BOARD \
-              "$remote" $TEST >$logfile 2>&1
-    if [ ! -e "$TMP_RESULTS/$RESULTS_ROOT/$RESULTS_KEYVAL" ]; then
-      error "No results file; terminating test runs."
-      error "Check $logfile for output from the test run,"
-      error "and see $TMP_RESULTS for full test logs and output."
-      return
-    fi
-    mkdir $summary_dir
-    tar cf - -C $TMP_RESULTS/$RESULTS_ROOT "${RESULTS_SUMMARY_FILES[@]}" |
-      tar xf - -C $summary_dir
-    if [ $FLAGS_keep_logs -eq $FLAGS_TRUE ]; then
-      mv $TMP_RESULTS $all_results_dir
-      chmod 755 $all_results_dir
-    else
-      rm -rf $TMP_RESULTS
-    fi
-    i=$(expr $i + 1)
-    iter=$(echo $iter | awk '{printf "%03d\n", $1 + 1}')
-  done
-  date '+%T'
-  cat $RUNDIR.???/$RUNDIR_SUMMARY/$RESULTS_KEYVAL >$KEYVAL_SUMMARY
-}
-
-# Main routine - check validity of the (already parsed) command line
-# options.  'cd' to the results directory, if it was specified.  If
-# all the arguments checks pass, hand control to run_boot_test
-main() {
-  if [ $# -lt 1 ]; then
-      usage "Missing target host address"
-  elif [ $# -gt 2 ]; then
-      usage "Too many arguments"
-  fi
-
-  if [ -n "${FLAGS_board}" ]; then
-    BOARD="--board=${FLAGS_board}"
-  fi
-
-  if [ -n "${FLAGS_output_dir}" ]; then
-    if [ ! -d "${FLAGS_output_dir}" ]; then
-      if ! mkdir "${FLAGS_output_dir}"; then
-        usage "Unable to create ${FLAGS_output_dir}"
-      fi
-    fi
-    cd "${FLAGS_output_dir}" ||
-      usage "No permissions to chdir to ${FLAGS_output_dir}"
-  elif [ -n "$(extra_files)" ]; then
-    error "No results directory specified, and current directory"
-    error "contains contents other than run results."
-    error "You can override this error by using the --output_dir option"
-    usage
-  fi
-
-  # Check the count argument.
-  # N.B. the test [ "$2" -eq "$2" ] tests whether "$2" is valid as a
-  # number; when it fails it will also report a syntax error (which
-  # we suppress).
-  if [ -n "$2" ]; then
-    if ! [ "$2" -eq "$2" ] 2>/dev/null || [ "$2" -le 0 ]; then
-      usage "<count> argument must be a positive number"
-    fi
-  fi
-
-  run_boot_test "$@"
-}
-
-# shflags defines --help implicitly; if it's used on the command
-# line FLAGS will invoke flags_help, set FLAGS_help to TRUE, and
-# then return false.  To avoid printing help twice, we have to check
-# for that case here.
-if ! FLAGS "$@"; then
-  if [ ${FLAGS_help} -eq ${FLAGS_TRUE} ]; then
-    exit 0
-  else
-    usage
-  fi
-fi
-
-eval main "${FLAGS_ARGV}"
diff --git a/site_utils/bootperf-bin/perfprinter.py b/site_utils/bootperf-bin/perfprinter.py
deleted file mode 100644
index 9b4bfbb..0000000
--- a/site_utils/bootperf-bin/perfprinter.py
+++ /dev/null
@@ -1,93 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Routines for printing boot time performance test results."""
-
-import resultset
-
-
-def PrintRawData(reader, dirlist, keytype, keylist):
-  """Print 'bootperf' results in "raw data" format.
-
-  @param reader Function for reading results from results
-                directories.
-  @param dirlist List of directories to read results from.
-  @param keytype Selector specifying the desired key set (e.g.
-                 the boot time keyset, the disk stats keyset, etc.)
-  @param keylist List of event keys to be printed in the report.
-
-  """
-  for dir_ in dirlist:
-    results = reader(dir_)
-    keyset = results.KeySet(keytype)
-    for i in range(0, keyset.num_iterations):
-      if len(dirlist) > 1:
-        line = "%s %3d" % (results.name, i)
-      else:
-        line = "%3d" % i
-      if keylist is not None:
-        markers = keylist
-      else:
-        markers = keyset.markers
-      for stat in markers:
-        (_, v) = keyset.PrintableStatistic(keyset.RawData(stat)[i])
-        line += " %5s" % str(v)
-      print line
-
-
-def PrintStatisticsSummary(reader, dirlist, keytype, keylist):
-  """Print 'bootperf' results in "summary of averages" format.
-
-  @param reader Function for reading results from results
-                directories.
-  @param dirlist List of directories to read results from.
-  @param keytype Selector specifying the desired key set (e.g.
-                 the boot time keyset, the disk stats keyset, etc.)
-  @param keylist List of event keys to be printed in the report.
-
-  """
-  if (keytype == resultset.TestResultSet.BOOTTIME_KEYSET or
-      keytype == resultset.TestResultSet.FIRMWARE_KEYSET):
-    header = "%5s %3s  %5s %3s  %s" % (
-        "time", "s%", "dt", "s%", "event")
-    tformat = "%5s %2d%%  %5s %2d%%  %s"
-  else:
-    header = "%7s %3s  %7s %3s  %s" % (
-        "diskrd", "s%", "delta", "s%", "event")
-    tformat = "%7s %2d%%  %7s %2d%%  %s"
-  havedata = False
-  for dir_ in dirlist:
-    results = reader(dir_)
-    keyset = results.KeySet(keytype)
-    if keylist is not None:
-      markers = keylist
-    else:
-      markers = keyset.markers
-    if havedata:
-      print
-    if len(dirlist) > 1:
-      print "%s" % results.name,
-    print "(on %d cycles):" % keyset.num_iterations
-    print header
-    prevvalue = 0
-    prevstat = None
-    for stat in markers:
-      (valueavg, valuedev) = keyset.Statistics(stat)
-      valuepct = int(100 * valuedev / valueavg + 0.5)
-      if prevstat:
-        (deltaavg, deltadev) = keyset.DeltaStatistics(prevstat, stat)
-        if deltaavg == 0:
-          deltaavg=1
-          print "deltaavg is zero! (delta is %s to %s)" % (prevstat, stat)
-
-        deltapct = int(100 * deltadev / deltaavg + 0.5)
-      else:
-        deltapct = valuepct
-      (valstring, val_printed) = keyset.PrintableStatistic(valueavg)
-      delta = val_printed - prevvalue
-      (deltastring, _) = keyset.PrintableStatistic(delta)
-      print tformat % (valstring, valuepct, "+" + deltastring, deltapct, stat)
-      prevvalue = val_printed
-      prevstat = stat
-    havedata = True
diff --git a/site_utils/bootperf-bin/resultsdir.py b/site_utils/bootperf-bin/resultsdir.py
deleted file mode 100644
index b40d3c7..0000000
--- a/site_utils/bootperf-bin/resultsdir.py
+++ /dev/null
@@ -1,74 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Routines for reading performance results from a directory.
-
-The directory should match the format created by the 'bootperf'
-script; see comments in that script for a summary of the layout.
-
-"""
-
-import fnmatch
-import os
-import re
-
-import resultset
-
-
-_PERF_KEYVAL_PATTERN = re.compile("(.*){perf}=(.*)\n")
-
-
-def _ReadKeyvalFile(results, file_):
-  """Read an autotest keyval file, and process the results.
-
-  The `file_` parameter is a file object with contents in autotest
-  perf keyval format:
-      <keyname>{perf}=<value>
-
-  Each iteration of the test is terminated with a single blank line,
-  including the last iteration.  Each iteration's results are added
-  to the `results` parameter, which should be an instance of
-  TestResultSet.
-
-  @param results A TestResultSet where the result data will be
-                 collected.
-  @param file_ File object for the results file to be read.
-
-  """
-  kvd = {}
-  for line in iter(file_):
-    if line == "\n":
-      results.AddIterationResults(kvd)
-      kvd = {}
-      continue
-    m = _PERF_KEYVAL_PATTERN.match(line)
-    if m is None:
-      continue
-    kvd[m.group(1)] = m.group(2)
-
-
-_RESULTS_PATH = "summary/platform_BootPerfServer/results/keyval"
-
-
-def ReadResultsDirectory(dir_):
-  """Process results from a 'bootperf' output directory.
-
-  The accumulated results are returned in a newly created
-  TestResultSet object.
-
-  @param dir_ The directory containing the test results keyval file.
-
-  """
-  res_set = resultset.TestResultSet(dir_)
-  dirlist = fnmatch.filter(os.listdir(dir_), "run.???")
-  dirlist.sort()
-  for rundir in dirlist:
-    keyval_path = os.path.join(dir_, rundir, _RESULTS_PATH)
-    try:
-      kvf = open(keyval_path)
-    except IOError:
-      continue
-    _ReadKeyvalFile(res_set, kvf)
-  res_set.FinalizeResults()
-  return res_set
diff --git a/site_utils/bootperf-bin/resultset.py b/site_utils/bootperf-bin/resultset.py
deleted file mode 100644
index 09e1aa5..0000000
--- a/site_utils/bootperf-bin/resultset.py
+++ /dev/null
@@ -1,387 +0,0 @@
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Classes and functions for managing platform_BootPerf results.
-
-Results from the platform_BootPerf test in the ChromiumOS autotest
-package are stored as performance 'keyvals', that is, a mapping
-of names to numeric values.  For each iteration of the test, one
-set of keyvals is recorded.
-
-This module currently tracks four kinds of keyval results: the boot
-time results, the disk read results, the firmware time results, and
-reboot time results.  These results are stored with keyval names
-such as 'seconds_kernel_to_login', 'rdbytes_kernel_to_login', and
-'seconds_power_on_to_kernel'.  These keyvals record an accumulated
-total measured from a fixed time in the past, e.g.
-'seconds_kernel_to_login' records the total seconds from kernel
-startup to login screen ready.
-
-The boot time keyval names all start with the prefix
-'seconds_kernel_to_', and record time in seconds since kernel
-startup.
-
-The disk read keyval names all start with the prefix
-'rdbytes_kernel_to_', and record bytes read from the boot device
-since kernel startup.
-
-The firmware keyval names all start with the prefix
-'seconds_power_on_to_', and record time in seconds since CPU
-power on.
-
-The reboot keyval names are selected from a hard-coded list of
-keyvals that include both some boot time and some firmware time
-keyvals, plus specific keyvals keyed to record shutdown and reboot
-time.
-
-"""
-
-import math
-
-
-def _ListStats(list_):
-  """Return the mean and sample standard deviation of a list.
-
-  The returned result is float, even if the input list is full of
-  integers.
-
-  @param list_ The list over which to calculate.
-
-  """
-  sum_ = 0.0
-  sumsq = 0.0
-  for v in list_:
-    sum_ += v
-    sumsq += v * v
-  n = len(list_)
-  avg = sum_ / n
-  var = (sumsq - sum_ * avg) / (n - 1)
-  if var < 0.0:
-    var = 0.0
-  dev = math.sqrt(var)
-  return (avg, dev)
-
-
-class TestResultSet(object):
-  """A set of boot time and disk usage result statistics.
-
-  Objects of this class consist of three sets of result statistics:
-  the boot time statistics, the disk statistics, and the firmware
-  time statistics.
-
-  Class TestResultsSet does not interpret or store keyval mappings
-  directly; iteration results are processed by attached _KeySet
-  objects, one for each of the three types of result keyval. The
-  _KeySet objects are kept in a dictionary; they can be obtained
-  by calling the KeySet with the name of the keyset desired.
-  Various methods on the KeySet objects will calculate statistics on
-  the results, and provide the raw data.
-
-  """
-
-  # The names of the available KeySets, to be used as arguments to
-  # KeySet().
-  BOOTTIME_KEYSET = "boot"
-  DISK_KEYSET = "disk"
-  FIRMWARE_KEYSET = "firmware"
-  REBOOT_KEYSET = "reboot"
-  AVAILABLE_KEYSETS = [
-    BOOTTIME_KEYSET, DISK_KEYSET, FIRMWARE_KEYSET, REBOOT_KEYSET
-  ]
-
-  def __init__(self, name):
-    self.name = name
-    self._keysets = {
-      self.BOOTTIME_KEYSET : _TimeKeySet(),
-      self.DISK_KEYSET : _DiskKeySet(),
-      self.FIRMWARE_KEYSET : _FirmwareKeySet(),
-      self.REBOOT_KEYSET : _RebootKeySet(),
-    }
-
-  def AddIterationResults(self, runkeys):
-    """Add keyval results from a single iteration.
-
-    A TestResultSet is constructed by repeatedly calling
-    AddIterationResults(), iteration by iteration.  Iteration
-    results are passed in as a dictionary mapping keyval attributes
-    to values.  When all iteration results have been added,
-    FinalizeResults() makes the results available for analysis.
-
-    @param runkeys The dictionary of keyvals for the iteration.
-
-    """
-
-    for keyset in self._keysets.itervalues():
-      keyset.AddIterationResults(runkeys)
-
-  def FinalizeResults(self):
-    """Make results available for analysis.
-
-    A TestResultSet is constructed by repeatedly feeding it results,
-    iteration by iteration.  Iteration results are passed in as a
-    dictionary mapping keyval attributes to values.  When all iteration
-    results have been added, FinalizeResults() makes the results
-    available for analysis.
-
-    """
-
-    for keyset in self._keysets.itervalues():
-      keyset.FinalizeResults()
-
-  def KeySet(self, keytype):
-    """Return a selected keyset from the test results.
-
-    @param keytype Selector for the desired keyset.
-
-    """
-    return self._keysets[keytype]
-
-
-class _KeySet(object):
-  """Container for a set of related statistics.
-
-  _KeySet is an abstract superclass for containing collections of
-  a related set of performance statistics.  Statistics are stored
-  as a dictionary (`_keyvals`) mapping keyval names to lists of
-  values.  The lists are indexed by the iteration number.
-
-  The mapped keyval names are shortened by stripping the prefix
-  that identifies the type of keyval (keyvals that don't start with
-  the proper prefix are ignored).  So, for example, with boot time
-  keyvals, 'seconds_kernel_to_login' becomes 'login' (and
-  'rdbytes_kernel_to_login' is ignored).
-
-  A list of all valid keyval names is stored in the `markers`
-  instance variable.  The list is sorted by the ordering of the
-  average of the corresponding values.  Each iteration is required
-  to contain the same set of keyvals.  This is enforced in
-  FinalizeResults() (see below).
-
-  """
-
-  def __init__(self):
-    self._keyvals = {}
-
-  def _CheckCounts(self):
-    """Check the validity of the keyvals results dictionary.
-
-    Each keyval must have occurred the same number of times.  When
-    this check succeeds, it returns the total number of occurrences;
-    on failure return `None`.
-
-    """
-    check = map(len, self._keyvals.values())
-    if not check:
-      return None
-    for i in range(1, len(check)):
-      if check[i] != check[i-1]:
-        return None
-    return check[0]
-
-  def AddIterationResults(self, runkeys):
-    """Add results for one iteration.
-
-    @param runkeys The dictionary of keyvals for the iteration.
-    """
-    for key, value in runkeys.iteritems():
-      if not key.startswith(self.PREFIX):
-        continue
-      shortkey = key[len(self.PREFIX):]
-      keylist = self._keyvals.setdefault(shortkey, [])
-      keylist.append(self._ConvertVal(value))
-
-  def FinalizeResults(self):
-    """Finalize this object's results.
-
-    This method makes available the `markers` and `num_iterations`
-    instance variables.  It also ensures that every keyval occurred
-    in every iteration by requiring that all keyvals have the same
-    number of data points.
-
-    """
-    count = self._CheckCounts()
-    if count is None:
-      self.num_iterations = 0
-      self.markers = []
-      return False
-    self.num_iterations = count
-    keylist = map(lambda k: (sum(self._keyvals[k]), k),
-                  self._keyvals.keys())
-    keylist.sort(key=lambda tp: tp[0])
-    self.markers = map(lambda tp: tp[1], keylist)
-    return True
-
-  def RawData(self, key):
-    """Return the list of values for the given key.
-
-    @param key Key of the list of values to return.
-
-    """
-    return self._keyvals[key]
-
-  def DeltaData(self, key0, key1):
-    """Return the vector difference between two keyvals lists.
-
-    @param key0 Key of the subtrahend vector.
-    @param key1 Key of the subtractor vector.
-
-    """
-    return map(lambda a, b: b - a,
-               self._keyvals[key0],
-               self._keyvals[key1])
-
-  def Statistics(self, key):
-    """Return the average and standard deviation for a key.
-
-    @param key
-    """
-    return _ListStats(self._keyvals[key])
-
-  def DeltaStatistics(self, key0, key1):
-    """Return the average and standard deviation between two keys.
-
-    Calculates the difference between each matching element in the
-    two key's lists, and returns the average and sample standard
-    deviation of the differences.
-
-    @param key0 Key of the subtrahend.
-    @param key1 Key of the subtractor.
-
-    """
-    return _ListStats(self.DeltaData(key0, key1))
-
-
-class _TimeKeySet(_KeySet):
-  """Concrete subclass of _KeySet for boot time statistics."""
-
-  PREFIX = 'seconds_kernel_to_'
-
-  # Time-based keyvals are reported in seconds and get converted to
-  # milliseconds
-  TIME_SCALE = 1000
-
-  def _ConvertVal(self, value):
-    """Return a keyval value in its 'canonical' form.
-
-    For boot time values, the input is seconds as a float; the
-    canonical form is milliseconds as an integer.
-
-    @param value A time statistic in seconds.
-
-    """
-    # We want to return the nearest exact integer here.  round()
-    # returns a float, and int() truncates its results, so we have
-    # to combine them.
-    return int(round(self.TIME_SCALE * float(value)))
-
-  def PrintableStatistic(self, value):
-    """Return a keyval in its preferred form for printing.
-
-    The return value is a tuple of a string to be printed, and
-    value rounded to the precision to which it was printed.
-
-    Rationale: Some callers of this function total up intermediate
-    results.  Returning the rounded value makes those totals more
-    robust against visible rounding anomalies.
-
-    @param value The value to be printed.
-
-    """
-    v = int(round(value))
-    return ("%d" % v, v)
-
-
-class _FirmwareKeySet(_TimeKeySet):
-  """Concrete subclass of _KeySet for firmware time statistics."""
-
-  PREFIX = 'seconds_power_on_to_'
-
-  # Time-based keyvals are reported in seconds and get converted to
-  # milliseconds
-  TIME_SCALE = 1000
-
-
-class _RebootKeySet(_TimeKeySet):
-  """Concrete subclass of _KeySet for reboot time statistics."""
-
-  PREFIX = ''
-
-  # Time-based keyvals are reported in seconds and get converted to
-  # milliseconds
-  TIME_SCALE = 1000
-
-  def AddIterationResults(self, runkeys):
-    """Add results for one iteration.
-
-    For _RebootKeySet, we cherry-pick and normalize a hard-coded
-    list of keyvals.
-
-    @param runkeys The dictionary of keyvals for the iteration.
-    """
-    # The time values we report are calculated as the time from when
-    # shutdown was requested.  However, the actual keyvals from the
-    # test are reported, variously, as "time from shutdown request",
-    # "time from power-on", and "time from kernel start".  So,
-    # the values have to be normalized to a common time line.
-    #
-    # The keyvals below capture the time from shutdown request of
-    # the _end_ of a designated phase of reboot, as follows:
-    #   shutdown - end of shutdown, start of firmware power-on
-    #       sequence.
-    #   firmware - end of firmware, transfer to kernel.
-    #   startup - end of kernel initialization, Upstart's "startup"
-    #       event.
-    #   chrome_exec - session_manager initialization complete,
-    #       Chrome starts running.
-    #   login - Chrome completes initialization of the login screen.
-    #
-    shutdown = float(runkeys["seconds_shutdown_time"])
-    firmware_time = float(runkeys["seconds_power_on_to_kernel"])
-    startup = float(runkeys["seconds_kernel_to_startup"])
-    chrome_exec = float(runkeys["seconds_kernel_to_chrome_exec"])
-    reboot = float(runkeys["seconds_reboot_time"])
-    newkeys = {}
-    newkeys["shutdown"] = shutdown
-    newkeys["firmware"] = shutdown + firmware_time
-    newkeys["startup"] = newkeys["firmware"] + startup
-    newkeys["chrome_exec"] = newkeys["firmware"] + chrome_exec
-    newkeys["login"] = reboot
-    super(_RebootKeySet, self).AddIterationResults(newkeys)
-
-
-class _DiskKeySet(_KeySet):
-  """Concrete subclass of _KeySet for disk read statistics."""
-
-  PREFIX = 'rdbytes_kernel_to_'
-
-  # Disk read keyvals are reported in bytes and get converted to
-  # MBytes (1 MByte = 1 million bytes, not 2**20)
-  DISK_SCALE = 1.0e-6
-
-  def _ConvertVal(self, value):
-    """Return a keyval value in its 'canonical' form.
-
-    For disk statistics, the input is bytes as a float; the
-    canonical form is megabytes as a float.
-
-    @param value A disk data statistic in megabytes.
-
-    """
-    return self.DISK_SCALE * float(value)
-
-  def PrintableStatistic(self, value):
-    """Return a keyval in its preferred form for printing.
-
-    The return value is a tuple of a string to be printed, and
-    value rounded to the precision to which it was printed.
-
-    Rationale: Some callers of this function total up intermediate
-    results.  Returning the rounded value makes those totals more
-    robust against visible rounding anomalies.
-
-    @param value The value to be printed.
-
-    """
-    v = round(value, 1)
-    return ("%.1fM" % v, v)
diff --git a/site_utils/bootperf-bin/showbootdata b/site_utils/bootperf-bin/showbootdata
deleted file mode 100755
index 23d19ad..0000000
--- a/site_utils/bootperf-bin/showbootdata
+++ /dev/null
@@ -1,165 +0,0 @@
-#!/usr/bin/python2
-
-# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""A command to display summary statistics from runs of 'bootperf'.
-
-Command line options allow selecting from one of four sets of
-performance statistics:
- 1. boot time statistics (selected by --keyset boot) measure time
-    spent since kernel startup;
- 2. disk statistics (selected by --keyset disk) measure total bytes
-    read from the boot device since kernel startup;
- 3. firmware time statistics (selected by --keyset firmware) measure
-    time spent since CPU power on.
- 4. reboot time statistics (selected by --keyset reboot) measure
-    time spent since the shutdown request immediately preceding
-    the request.
-
-The various statistics are recorded as cumulative time (or disk read)
-since kernel startup (or CPU power on), sampled when specific events
-occur during boot.  Events include such things as 'startup', (the
-moment when the upstart 'startup' job begins running), and 'login',
-(when the Chrome OS login screen is displayed).  By default, all
-recorded events are included in the output; command line options
-allow restricting the view to a selected subset of events.
-
-Separate command line options allow selecting from one of two
-different display modes.  When --averages is selected, the display
-shows the average value and sample standard deviation (as a percent
-of the average) for all selected events.  The --averages display
-also calculates the difference (in time or bytes) between adjacent
-events, and shows the average and sample standard deviation of the
-differences.
-
-The --rawdata display shows the raw data value associated with each
-event for each boot:  Each line of output represents the event values
-for one boot iteration.
-
-"""
-
-import sys
-import optparse
-
-import perfprinter
-import resultsdir
-import resultset
-
-
-_USAGE = "%prog [options] [results-directory ...]"
-_DESCRIPTION = """\
-Summarize boot time performance results.  The result directory
-arguments are directories previously specified as output for the
-'bootperf' script.
-"""
-
-
-def _SetupOptions():
-  """Create an OptionParser for the command line."""
-  optparser = optparse.OptionParser(usage=_USAGE, description=_DESCRIPTION)
-
-  optgroup = optparse.OptionGroup(
-      optparser, "Statistics selection")
-
-  keyset_help = ("Selects the set of statistics to display; "
-                    "choose one of ")
-  keyset_help += "'" + resultset.TestResultSet.AVAILABLE_KEYSETS[0] + "'"
-  for keyset in resultset.TestResultSet.AVAILABLE_KEYSETS[1:-1]:
-    keyset_help += ", '" + keyset + "'"
-  keyset_help += (", or '" +
-                  resultset.TestResultSet.AVAILABLE_KEYSETS[-1] + "'.")
-  keyset_default = resultset.TestResultSet.BOOTTIME_KEYSET
-  keyset_help += "  (Default is '" + keyset_default + "'.)"
-  optgroup.add_option(
-      "-k", "--keyset", action="store", dest="keyset", type="choice",
-      choices=resultset.TestResultSet.AVAILABLE_KEYSETS,
-      help=keyset_help)
-  optparser.add_option_group(optgroup)
-  optparser.set_defaults(keyset=keyset_default)
-
-  optgroup = optparse.OptionGroup(optparser, "Event selection")
-  optgroup.add_option(
-      "-e", "--event", action="append",
-      dest="eventnames",
-      help="Restrict statistics to the comma-separated list of events.")
-  optparser.add_option_group(optgroup)
-
-  optgroup = optparse.OptionGroup(
-      optparser, "Display mode selection (choose one)")
-  optgroup.add_option(
-      "-a", "--averages", action="store_true",
-      dest="print_averages",
-      help="Display a summary of the averages of chosen statistics (default).")
-  optgroup.add_option(
-      "-r", "--rawdata", action="store_true",
-      dest="print_raw",
-      help="Display raw data from all boot iterations.")
-  optparser.add_option_group(optgroup)
-  optparser.set_defaults(print_averages=False)
-  optparser.set_defaults(print_raw=False)
-  return optparser
-
-
-def _ProcessDisplayOptions(options):
-  """Determine options controlling the display format.
-
-  Command options allow choosing either raw data format, or summary
-  statistics format.  The default option is the summary format.
-  It's not allowed to select both formats.
-
-  @param options Parsed command line options data.
-
-  """
-  display_count = 0
-  if options.print_averages:
-    display_count += 1
-    printfunc = perfprinter.PrintStatisticsSummary
-  if options.print_raw:
-    display_count += 1
-    printfunc = perfprinter.PrintRawData
-  if display_count == 0:
-    printfunc = perfprinter.PrintStatisticsSummary
-  elif display_count > 1:
-    print >>sys.stderr, "Can't use -a and -r together.\n"
-    return None
-  return printfunc
-
-
-def _ProcessEventlistOptions(options):
-  """Determine whether we'll display all events, or a subset.
-
-  Command options allow restricting a chosen key set to a
-  list of specific events.  If the option is present, return
-  the list of events.  Otherwise, return `None`.
-
-  @param options Parsed command line options data.
-
-  """
-  if not options.eventnames:
-    return None
-  eventlist = []
-  for kl in options.eventnames:
-    eventlist.extend(kl.split(','))
-  return eventlist
-
-
-def main(argv):
-  """Canonical main routine."""
-  optparser = _SetupOptions()
-  (options, args) = optparser.parse_args(argv)
-  printfunc = _ProcessDisplayOptions(options)
-  keyset_type = options.keyset
-  eventlist = _ProcessEventlistOptions(options)
-  if printfunc is None or keyset_type is None:
-    optparser.print_help()
-    sys.exit(1)
-  if not args:
-    args = ["."]
-  printfunc(resultsdir.ReadResultsDirectory,
-            args, keyset_type, eventlist)
-
-
-if __name__ == "__main__":
-  main(sys.argv[1:])
diff --git a/site_utils/bvt_allowlist.txt b/site_utils/bvt_allowlist.txt
index e12f803..1b08ae1 100644
--- a/site_utils/bvt_allowlist.txt
+++ b/site_utils/bvt_allowlist.txt
@@ -9,11 +9,11 @@
 graphics_GLBench
 graphics_GLMark2
 graphics_Idle
-graphics_Sanity
-logging_CrashSender
+graphics_Check
 logging_UdevCrash
 logging_UserCrash
 login_LoginSuccess
+login_UserPolicyKeys
 platform_DMVerityBitCorruption
 platform_DMVerityCorruption
 platform_MemCheck
diff --git a/site_utils/check_hung_proc.py b/site_utils/check_hung_proc.py
index 268dcba..1bdf071 100755
--- a/site_utils/check_hung_proc.py
+++ b/site_utils/check_hung_proc.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #
 # Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -22,7 +22,7 @@
 from autotest_lib.server import site_utils
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = site_utils.metrics_mock
 
diff --git a/site_utils/cleanup_tko_db.py b/site_utils/cleanup_tko_db.py
index e8698de..a85f49c 100755
--- a/site_utils/cleanup_tko_db.py
+++ b/site_utils/cleanup_tko_db.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2017 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -16,8 +16,8 @@
 from autotest_lib.client.common_lib import global_config
 from autotest_lib.client.common_lib import logging_config
 
-from chromite.lib import metrics
-from chromite.lib import ts_mon_config
+from autotest_lib.utils.frozen_chromite.lib import metrics
+from autotest_lib.utils.frozen_chromite.lib import ts_mon_config
 
 
 CONFIG = global_config.global_config
diff --git a/site_utils/cloud_console.proto b/site_utils/cloud_console.proto
index 25c2fa9..618e844 100644
--- a/site_utils/cloud_console.proto
+++ b/site_utils/cloud_console.proto
@@ -21,7 +21,7 @@
 // It represents a point in time independent of any time zone
 // or calendar, represented as seconds and fractions of seconds at
 // nanosecond resolution.
-// Clone from https://github.com/google/protobuf/blob/master/src/google/protobuf/timestamp.proto.
+// Clone from https://github.com/google/protobuf/blob/master/src/google/protobuf/timestamp.proto. //nocheck
 message Timestamp {
   optional int64 seconds = 1;
   // Non-negative fractions of a second at nanosecond resolution.
diff --git a/site_utils/cloud_console_client.py b/site_utils/cloud_console_client.py
index f3cd9ce..51acde5 100644
--- a/site_utils/cloud_console_client.py
+++ b/site_utils/cloud_console_client.py
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-"""Chrome OS Parnter Concole remote actions."""
+"""ChromeOS Parnter Concole remote actions."""
 
 from __future__ import print_function
 
diff --git a/site_utils/cloud_console_client_unittest.py b/site_utils/cloud_console_client_unittest.py
index c669619..64e9ee8 100644
--- a/site_utils/cloud_console_client_unittest.py
+++ b/site_utils/cloud_console_client_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/site_utils/control_file_preprocessor.py b/site_utils/control_file_preprocessor.py
index ed96d35..171afaa 100755
--- a/site_utils/control_file_preprocessor.py
+++ b/site_utils/control_file_preprocessor.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #
 # Copyright (c) 20123 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -12,7 +12,7 @@
 {'suite1': ['path/to/test1/control', 'path/to/test2/control'],
  'suite2': ['path/to/test4/control', 'path/to/test5/control']}
 
-This is intended for use only with Chrome OS test suites that leverage the
+This is intended for use only with ChromeOS test suites that leverage the
 dynamic suite infrastructure in server/cros/dynamic_suite.py. It is invoked
 at build time to generate said suite to control files map, which dynamic_suite
 consults at run time to determine which tests belong to a suite.
diff --git a/site_utils/count_jobs.py b/site_utils/count_jobs.py
index 798f431..0bafcc3 100755
--- a/site_utils/count_jobs.py
+++ b/site_utils/count_jobs.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -16,7 +16,7 @@
 from autotest_lib.server import site_utils
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = site_utils.metrics_mock
 
diff --git a/site_utils/count_jobs_unittest.py b/site_utils/count_jobs_unittest.py
index c7a9d24..fa98fff 100755
--- a/site_utils/count_jobs_unittest.py
+++ b/site_utils/count_jobs_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -9,19 +9,18 @@
 from __future__ import print_function
 
 from datetime import timedelta, datetime
-import mock
 import unittest
+from unittest import mock
 
 import common
 from autotest_lib.frontend import setup_django_readonly_environment
 from autotest_lib.frontend import setup_test_environment
 from autotest_lib.frontend.afe import models
 from autotest_lib.site_utils import count_jobs
-from django import test
 from six.moves import range
 
 
-class TestCountJobs(test.TestCase):
+class TestCountJobs(unittest.TestCase):
     """Tests the count_jobs script's functionality.
     """
 
diff --git a/site_utils/create_attr_allowlist.py b/site_utils/create_attr_allowlist.py
index 8c2e04f..77343e9 100755
--- a/site_utils/create_attr_allowlist.py
+++ b/site_utils/create_attr_allowlist.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/site_utils/deployment/cmdparse_unittest.py b/site_utils/deployment/cmdparse_unittest.py
index 458c972..8820880 100755
--- a/site_utils/deployment/cmdparse_unittest.py
+++ b/site_utils/deployment/cmdparse_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 import contextlib
 import sys
diff --git a/site_utils/deployment/cmdvalidate.py b/site_utils/deployment/cmdvalidate.py
index cbd228e..be35c86 100644
--- a/site_utils/deployment/cmdvalidate.py
+++ b/site_utils/deployment/cmdvalidate.py
@@ -163,7 +163,7 @@
     """Return whether a given board exists in Google storage.
 
     For purposes of this function, a board exists if it has a
-    "LATEST-master" file in its release builder's directory.
+    "LATEST-main" file in its release builder's directory.
 
     N.B. For convenience, this function prints an error message
     on stderr in certain failure cases.  This is currently useful
@@ -178,12 +178,8 @@
     if board is None:
         return False
 
-    # TODO b:169251326 terms below (and in the comment above) are set outside
-    # of this codebase and should be updated when possible.
-    # ("master" -> "main")
-
     # Check Google storage; report failures on stderr.
-    if _build_path_exists(board, 'LATEST-master'):
+    if _build_path_exists(board, 'LATEST-main'):
         return True
     else:
         sys.stderr.write('Board %s doesn\'t exist.\n' % board)
@@ -217,7 +213,7 @@
 def _validate_hostname(hostname):
     """Return whether a given hostname is valid for the test lab.
 
-    This is a sanity check meant to guarantee that host names follow
+    This is a validity check meant to guarantee that host names follow
     naming requirements for the test lab.
 
     N.B. For convenience, this function prints an error message
diff --git a/site_utils/deployment/deploy.py b/site_utils/deployment/deploy.py
index fda427a..1b730a5 100755
--- a/site_utils/deployment/deploy.py
+++ b/site_utils/deployment/deploy.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/site_utils/deployment/install.py b/site_utils/deployment/install.py
index c837294..c9fd1b2 100644
--- a/site_utils/deployment/install.py
+++ b/site_utils/deployment/install.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -159,7 +159,7 @@
 
     @param board  The board to look up from GoldenEye.
 
-    @return Returns a Chrome OS version string in standard form
+    @return Returns a ChromeOS version string in standard form
             R##-####.#.#.  Will return `None` if no Beta channel
             entry is found.
     """
@@ -740,8 +740,8 @@
         # We'll choose first port available in descending order.
         for port in xrange(start_port, end_port - 1, -1):
             if port not in used_ports:
-              servo_port = port
-              break
+                servo_port = port
+                break
     used_ports.append(servo_port)
     used_servo_ports[servo_hostname] = used_ports
     return servo_port
diff --git a/site_utils/deployment/install_unittest.py b/site_utils/deployment/install_unittest.py
index c907fbb..db294b3 100755
--- a/site_utils/deployment/install_unittest.py
+++ b/site_utils/deployment/install_unittest.py
@@ -4,8 +4,8 @@
 
 """Test the install module."""
 
-import mock
 import unittest
+from unittest import mock
 
 import common
 from autotest_lib.site_utils.deployment import install
diff --git a/site_utils/deployment/prepare/dut.py b/site_utils/deployment/prepare/dut.py
index 6b80285..3c31b46 100644
--- a/site_utils/deployment/prepare/dut.py
+++ b/site_utils/deployment/prepare/dut.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -103,7 +103,7 @@
     """Download the given image to the USB attached to host's servo.
 
     @param host   A server.hosts.Host object.
-    @param build  A Chrome OS version string for the build to download.
+    @param build  A ChromeOS version string for the build to download.
     """
     _, update_url = host.stage_image_for_servo(build)
     host.servo.image_to_servo_usb(update_url)
@@ -177,6 +177,12 @@
         logging.info("Skepping due DUT does not have the battery")
         return
     power_info = host.get_power_supply_info()
+    # Dues overheat battery in the audio-boxes the device can be deployed
+    # without battery.
+    if 'Battery' not in power_info and host_info.has_label('audio_box'):
+        logging.info('Device does not have battery.'
+                     ' Skip battery verification as it is audio_box setup.')
+        return
     battery_path = power_info['Battery']['path']
     cmd = 'cat %s/status' % battery_path
     status = host.run(cmd, timeout=30, ignore_status=True).stdout.strip()
@@ -363,7 +369,7 @@
         try:
             host.run('chromeos-tpm-recovery')
         except error.AutoservRunError:
-            logging.warn('chromeos-tpm-recovery is too old.')
+            logging.warning('chromeos-tpm-recovery is too old.')
     except Exception:
         # Restore the servo_v4 role to src if we called boot_in_recovery_mode
         # method with snk_mode=True earlier. If no exception raise, recover
@@ -405,7 +411,7 @@
     else:
         raise Exception('DUT failed to boot from USB for install test image.')
 
-    host.run('chromeos-install --yes', timeout=host.INSTALL_TIMEOUT)
+    host.run('chromeos-install --yes', timeout=host.ADMIN_INSTALL_TIMEOUT)
 
     logging.info("Rebooting DUT to boot from hard drive.")
     try:
diff --git a/site_utils/deployment/prepare/main.py b/site_utils/deployment/prepare/main.py
index 8f2256f..5774305 100755
--- a/site_utils/deployment/prepare/main.py
+++ b/site_utils/deployment/prepare/main.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2 -u
+#!/usr/bin/python3 -u
 # Copyright 2019 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -29,6 +29,7 @@
 
 RETURN_CODES = autotest_enum.AutotestEnum(
         'OK',
+        'SERVO_VERIFICATION_FAILURE',
         'STAGE_USB_FAILURE',
         'INSTALL_FIRMWARE_FAILURE',
         'INSTALL_TEST_IMAGE_FAILURE',
@@ -68,6 +69,14 @@
 
         is_labstation = (host_info.get().os == "labstation")
 
+        if 'servo-verification' in opts.actions:
+            try:
+                if not is_labstation:
+                    preparedut.verify_servo(host)
+            except Exception as err:
+                logging.error("fail to check servo: %s", err)
+                return RETURN_CODES.SERVO_VERIFICATION_FAILURE
+
         if 'stage-usb' in opts.actions:
             try:
                 repair_image = afe_utils.get_stable_cros_image_name_v2(
@@ -142,9 +151,10 @@
             'actions',
             nargs='+',
             choices=[
-                    'stage-usb', 'install-test-image', 'install-firmware',
-                    'verify-recovery-mode', 'run-pre-deploy-verification',
-                    'update-label', 'setup-labstation'
+                    'servo-verification', 'stage-usb', 'install-test-image',
+                    'install-firmware', 'verify-recovery-mode',
+                    'run-pre-deploy-verification', 'update-label',
+                    'setup-labstation'
             ],
             help='DUT preparation actions to execute.',
     )
@@ -218,6 +228,7 @@
                                       host_info_store=host_info,
                                       try_lab_servo=need_servo,
                                       try_servo_repair=need_servo,
+                                      try_servo_recovery=need_servo,
                                       servo_uart_logs_dir=dut_logs_dir)
 
 
diff --git a/site_utils/diagnosis_utils.py b/site_utils/diagnosis_utils.py
index f9f221c..976245b 100755
--- a/site_utils/diagnosis_utils.py
+++ b/site_utils/diagnosis_utils.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/site_utils/diagnosis_utils_unittest.py b/site_utils/diagnosis_utils_unittest.py
index 6c3cc08..a4cc075 100755
--- a/site_utils/diagnosis_utils_unittest.py
+++ b/site_utils/diagnosis_utils_unittest.py
@@ -1,10 +1,11 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright 2016 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 import unittest
-import mock
+from unittest import mock
+import six
 
 import common
 from autotest_lib.site_utils import diagnosis_utils
@@ -104,7 +105,10 @@
         self.status = status
         self.locked = locked
 
-    is_available = frontend.Host.is_available.__func__
+    if six.PY2:
+        is_available = frontend.Host.is_available.__func__
+    elif six.PY3:
+        is_available = frontend.Host.is_available
 
 
 if __name__ == '__main__':
diff --git a/site_utils/docker/__init__.py b/site_utils/docker/__init__.py
new file mode 100644
index 0000000..80f1d03
--- /dev/null
+++ b/site_utils/docker/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/site_utils/docker/utils.py b/site_utils/docker/utils.py
new file mode 100644
index 0000000..5552483
--- /dev/null
+++ b/site_utils/docker/utils.py
@@ -0,0 +1,88 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import logging
+try:
+    import docker
+except ImportError:
+    logging.info("Docker API is not installed in this environment")
+
+env_vars = os.environ
+
+# Default docker socker.
+DOCKER_SOCKET = env_vars.get('DOCKER_SOCKET', '/var/run/docker.sock')
+
+# This the default IP where the docker daemon is running on the Satlab.
+DEFAULT_DOCKER_SERVER_IP = '192.168.231.1'
+# This the default IP where the docker daemon is listening on the Satlab.
+DEFAULT_DOCKER_TCP_SERVER_PORT = '2375'
+# Optional docker tcp ip address/port dockerd listens to.
+DOCKER_TCP_SERVER_IP = env_vars.get('DOCKER_TCP_SERVER_IP',
+                                    DEFAULT_DOCKER_SERVER_IP)
+DOCKER_TCP_SERVER_PORT = env_vars.get('DOCKER_TCP_SERVER_PORT',
+                                      DEFAULT_DOCKER_TCP_SERVER_PORT)
+
+
+def get_docker_client(timeout=300):
+    """
+    Get the client of the host Docker server either via default Docker socket or TCP connection.
+    """
+    # Use default TCP connection IP to create docker client if docker socket(
+    # /var/run/docker.sock) doesn't exists on the machine or when TCP connection IP
+    # is not default IP, otherwise use docker socket file to create docker client.
+    if os.path.exists(DOCKER_SOCKET
+                      ) and DEFAULT_DOCKER_SERVER_IP == DOCKER_TCP_SERVER_IP:
+        client = docker.from_env(timeout=timeout)
+    else:
+        tcp_connection = "tcp://{}:{}".format(DOCKER_TCP_SERVER_IP,
+                                              DOCKER_TCP_SERVER_PORT)
+        client = docker.DockerClient(base_url=tcp_connection, timeout=timeout)
+    return client
+
+
+def get_running_containers(client=None):
+    """
+    Return the names of running containers
+    """
+    if client is None:
+        client = get_docker_client()
+    containers = client.containers.list()
+    return [c.name for c in containers]
+
+
+def get_container_networks(container_name, client=None):
+    """
+    Return the list of networks of the container. Return [] if container is not found.
+    """
+    if client is None:
+        client = get_docker_client()
+    containers = get_running_containers(client)
+    if container_name not in containers:
+        return []
+    else:
+        container = client.containers.get(container_name)
+        return container.attrs['NetworkSettings']['Networks'].keys()
+
+
+def get_container_ip(container_name, client=None):
+    """
+    Return the IP Address of networks of the container. Return None if container is not found.
+    """
+    if client is None:
+        client = get_docker_client()
+    try:
+        container = client.containers.get(container_name)
+        if container and container.status == 'running':
+            container_network = os.environ.get("DOCKER_DEFAULT_NETWORK",
+                                               "default_satlab")
+            return container.attrs['NetworkSettings']['Networks'][
+                    container_network]['IPAddress']
+        logging.exception("Servod container %s found but not running",
+                          container_name)
+    except docker.errors.APIError:
+        logging.exception("Failed to access servod container.")
+    except docker.errors.NotFound:
+        logging.exception("Servod container %s Not Found", container_name)
+    return None
diff --git a/site_utils/dump_suite_report.py b/site_utils/dump_suite_report.py
deleted file mode 100755
index 1e9b30f..0000000
--- a/site_utils/dump_suite_report.py
+++ /dev/null
@@ -1,77 +0,0 @@
-#!/usr/bin/env python2
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Generate a report on a given suite run."""
-
-from __future__ import print_function
-
-import common
-import argparse
-import logging
-import sys
-
-from autotest_lib.server.cros.dynamic_suite import frontend_wrappers
-from autotest_lib.server.lib import suite_report
-from chromite.lib import ts_mon_config
-
-def GetParser():
-    """Creates the argparse parser."""
-    parser = argparse.ArgumentParser(description=__doc__)
-    parser.add_argument('job_ids', type=int, nargs='+',
-                        help='Suite job ids to dump')
-    # As a provision suite may exit before its all provision jobs finish, the
-    # provision suite report may not contain all the provision jobs. This hack
-    # helps to dump report for all provision jobs under the given provision
-    # suite (provision-job-id). See more details in crbug.com/794346
-    parser.add_argument('--provision-job-id', type=int,
-                        help='Provision suite job id to dump report.')
-    parser.add_argument('--output', '-o', type=str, action='store',
-                        help='Path to write JSON file to')
-    parser.add_argument('--afe', type=str, action='store',
-                        help='AFE server to connect to')
-    return parser
-
-
-def main(argv):
-    """Standard main() for command line processing.
-
-    @param argv Command line arguments (normally sys.argv).
-    """
-
-    parser = GetParser()
-    options = parser.parse_args(argv[1:])
-
-    with ts_mon_config.SetupTsMonGlobalState('dump_suite_report'):
-
-        afe = frontend_wrappers.RetryingAFE(timeout_min=5, delay_sec=10,
-                                            server=options.afe)
-        tko = frontend_wrappers.RetryingTKO(timeout_min=5, delay_sec=10)
-
-        job_ids = set(options.job_ids)
-        if options.provision_job_id:
-            job_ids.add(options.provision_job_id)
-
-        # Look up and generate entries for all jobs.
-        entries = []
-        for suite_job_id in job_ids:
-            reset_finish_time = (suite_job_id == options.provision_job_id)
-
-            logging.debug('Suite job %s:' % suite_job_id)
-            suite_entries = suite_report.generate_suite_report(
-                suite_job_id, afe=afe, tko=tko,
-                reset_finish_time=reset_finish_time)
-            logging.debug('... generated %d entries' % len(suite_entries))
-            entries.extend(suite_entries)
-
-        # Write all entries as JSON.
-        if options.output:
-            with open(options.output, 'w') as f:
-                suite_report.dump_entries_as_json(entries, f)
-        else:
-            suite_report.dump_entries_as_json(entries, sys.stdout)
-
-
-if __name__ == '__main__':
-    main(sys.argv)
diff --git a/site_utils/dump_to_cloudsql.py b/site_utils/dump_to_cloudsql.py
deleted file mode 100755
index 2639a9b..0000000
--- a/site_utils/dump_to_cloudsql.py
+++ /dev/null
@@ -1,281 +0,0 @@
-#!/usr/bin/python2
-#
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Module to upload a MySQL dump file to Cloud SQL.
-
-Usage:
-  dump_to_cloudsql.py [-h] [--resume NUM] [--user USER] [--passwd PASSWD] FILE
-                      [REMOTE]
-
-  Uploads MySQL dump file to a MySQL database or Cloud SQL. With no optional
-  arguments will connect to localhost as root with an empty password.
-
-  positional arguments:
-    FILE             text dump file containing MySQL commands
-    REMOTE           Cloud SQL instance name or MySQL hostname
-
-  optional arguments:
-    -h, --help       show this help message and exit
-    --resume NUM     resume dump at command NUM
-    --user USER      user (ignored for CloudSQL)
-    --passwd PASSWD  passwd (ignored for CloudSQL)
-"""
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-import argparse
-import collections
-import datetime
-import os
-import re
-import sys
-import time
-import six
-
-
-BYTES_PER_GB = 2**30
-
-
-class MySQLConnectionManager(object):
-    """Manages connections to a MySQL database.
-
-    Vars:
-      factory: A *ConnectionFactory.
-      connected: Whether we currently hold a live DB connection.
-      cmd_num: The number of commands executed.
-    """
-    def __init__(self, connection_factory):
-        self.factory = connection_factory
-        self.connected = False
-        self.cmd_num = 0
-
-    def write(self, data, execute_cmd=True, increment_cmd=False):
-        """Buffers writes to command boundaries.
-
-        Args:
-          data: A line of data from the MySQL dump.
-          execute_cmd: Whether to execute the command, defaults to True.
-          increment_cmd: Whether to increment cmd_num, defaults to False.
-          """
-        if not data or not data.strip() or data == '\n' or data[:2] == '--':
-            return
-        self._cmd += data[:-1] if data[-1] == '\n' else data
-        if self._cmd[-1] != ';':
-            return
-        # Execute command.
-        if execute_cmd:
-            self._cursor.execute(six.ensure_text(self._cmd, 'utf-8'))
-        self._cmd = ''
-        if increment_cmd:
-            self.cmd_num += 1
-
-    def disconnect(self):
-      """Closes the current database connection."""
-      if self.connected:
-          self.connected = False
-          self._cursor.close()
-          self._db.close()
-
-    def connect(self):
-      """Creates a new database connection."""
-      self.disconnect()
-      self._db = self.factory.connect()
-      self.connected = True
-      self._cursor = self._db.cursor()
-      self._cmd = ''
-
-
-class CloudSQLConnectionFactory(object):
-    """Creates Cloud SQL database connections."""
-    def __init__(self, cloudsql_instance):
-        self._instance = cloudsql_instance
-
-    def connect(self):
-        """Connects to the Cloud SQL database and returns the connection.
-
-        Returns:
-          A MySQLdb compatible database connection to the Cloud SQL instance.
-        """
-        print('Connecting to Cloud SQL instance %s.' % self._instance)
-        try:
-            from google.storage.speckle.python.api import rdbms_googleapi
-        except ImportError:
-            sys.exit('Unable to import rdbms_googleapi. Add the AppEngine SDK '
-                     'directory to your PYTHONPATH. Download the SDK from: '
-                     'https://developers.google.com/appengine/downloads')
-        return rdbms_googleapi.connect(None, instance=self._instance)
-
-
-class LocalSQLConnectionFactory(object):
-    """Creates local MySQL database connections."""
-    def __init__(self, host=None, user='root', passwd=''):
-        if not host:
-          host = 'localhost'
-        self._host = host
-        self._user = user
-        self._passwd = passwd
-
-    def connect(self):
-        """Connects to the local MySQL database and returns the connection.
-
-        Returns:
-          A MySQLdb database connection to the local MySQL database.
-        """
-        print('Connecting to mysql at localhost as %s.' % self._user)
-        try:
-            import MySQLdb
-        except ImportError:
-            sys.exit('Unable to import MySQLdb. To install on Ubuntu: '
-                     'apt-get install python-mysqldb')
-        return MySQLdb.connect(host=self._host, user=self._user,
-                               passwd=self._passwd)
-
-
-class MySQLState(object):
-    """Maintains the MySQL global state.
-
-    This is a hack that keeps record of all MySQL lines that set global state.
-    These are needed to reconstruct the MySQL state on resume.
-    """
-    _set_regex = re.compile('\S*\s*SET(.*)[\s=]')
-
-    def __init__(self):
-        self._db_line = ''
-        self._table_lock = []
-        self._sets = collections.OrderedDict()
-
-    def process(self, line):
-        """Check and save lines that affect the global state.
-
-        Args:
-          line: A line from the MySQL dump file.
-        """
-        # Most recent USE line.
-        if line[:3] == 'USE':
-            self._db_line = line
-        # SET variables.
-        m = self._set_regex.match(line)
-        if m:
-            self._sets[m.group(1).strip()] = line
-        # Maintain LOCK TABLES
-        if (line[:11] == 'LOCK TABLES' or
-            ('ALTER TABLE' in line and 'DISABLE KEYS' in line)):
-            self._table_lock.append(line)
-        if (line[:14] == 'UNLOCK TABLES;'):
-            self._table_lock = []
-
-    def write(self, out):
-        """Print lines to recreate the saved state.
-
-        Args:
-          out: A File-like object to write out saved state.
-        """
-        out.write(self._db_line)
-        for v in six.itervalues(self._sets):
-            out.write(v)
-        for l in self._table_lock:
-            out.write(l)
-
-    def breakpoint(self, line):
-      """Returns true if we can handle breaking after this line.
-
-      Args:
-        line: A line from the MySQL dump file.
-
-      Returns:
-        Boolean indicating whether we can break after |line|.
-      """
-      return (line[:28] == '-- Table structure for table' or
-              line[:11] == 'INSERT INTO')
-
-
-def dump_to_cloudsql(dumpfile, manager, cmd_offset=0):
-    """Dumps a MySQL dump file to a database through a MySQLConnectionManager.
-
-    Args:
-      dumpfile: Path to a file from which to read the MySQL dump.
-      manager: An instance of MySQLConnectionManager.
-      cmd_offset: No commands will be executed on the database before this count
-        is reached. Used to continue an uncompleted dump. Defaults to 0.
-    """
-    state = MySQLState()
-    total = os.path.getsize(dumpfile)
-    start_time = time.time()
-    line_num = 0
-    with open(dumpfile, 'r') as dump:
-        for line in dump:
-            line_num += 1
-            if not manager.connected:
-                manager.connect()
-            try:
-                # Construct commands from lines and execute them.
-                state.process(line)
-                if manager.cmd_num == cmd_offset and cmd_offset != 0:
-                    print('\nRecreating state at line: %d' % line_num)
-                    state.write(manager)
-                manager.write(line, manager.cmd_num >= cmd_offset, True)
-                # Print status.
-                sys.stdout.write(
-                    '\rstatus:  %.3f%%     %0.2f GB     %d commands ' %
-                    (100 * dump.tell() / total, dump.tell() / BYTES_PER_GB,
-                     manager.cmd_num))
-                sys.stdout.flush()
-            # Handle interrupts and connection failures.
-            except KeyboardInterrupt:
-                print('\nInterrupted while executing command: %d' %
-                      manager.cmd_num)
-                raise
-            except:
-                print('\nFailed while executing command: %d' % manager.cmd_num)
-                delta = int(time.time() - start_time)
-                print('Total time: %s' % str(datetime.timedelta(seconds=delta)))
-                if state.breakpoint(line):
-                    # Attempt to resume.
-                    print('Execution can resume from here (line = %d)' %
-                          line_num)
-                    manager.cmd_num += 1
-                    cmd_offset = manager.cmd_num
-                    print('Will now attempt to auto-resume at command: %d' %
-                          cmd_offset)
-                    manager.disconnect()
-                else:
-                    print('Execution may fail to resume correctly from here.')
-                    print('Use --resume=%d to attempt to resume the dump.' %
-                          manager.cmd_num)
-                    raise
-    print('\nDone.')
-
-
-if __name__ == '__main__':
-    """Imports a MySQL database from a dump file.
-
-    Interprets command line arguments and calls dump_to_cloudsql appropriately.
-    """
-    description = """Uploads MySQL dump file to a MySQL database or Cloud SQL.
-                  With no optional arguments will connect to localhost as root
-                  with an empty password."""
-    parser = argparse.ArgumentParser(description=description)
-    parser.add_argument('mysqldump', metavar='FILE',
-                        help='text dump file containing MySQL commands')
-    parser.add_argument('remote', default=None, nargs='?', metavar='REMOTE',
-        help='either a Cloud SQL account:instance or a hostname')
-    parser.add_argument('--resume', default=0, type=int, metavar='NUM',
-                        help='resume dump at command NUM')
-    parser.add_argument('--user', default='root', metavar='USER',
-                        help='user (ignored for Cloud SQL)')
-    parser.add_argument('--passwd', default='', metavar='PASSWD',
-                        help='passwd (ignored for Cloud SQL)')
-    args = parser.parse_args()
-    if args.remote and ':' in args.remote:
-        connection = CloudSQLConnectionFactory(args.remote)
-    else:
-        connection = LocalSQLConnectionFactory(args.remote, args.user,
-                                               args.passwd)
-    if args.resume:
-        print('Resuming execution at command: %d' % options.resume)
-    dump_to_cloudsql(args.mysqldump, MySQLConnectionManager(connection),
-                     args.resume)
diff --git a/site_utils/dut_status.py b/site_utils/dut_status.py
index 5f07c03..bb4dba9 100755
--- a/site_utils/dut_status.py
+++ b/site_utils/dut_status.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -445,7 +445,7 @@
                         nargs='*',
                         help='Host names of DUTs to report on')
     parser.add_argument('--web',
-                        help='Master autotest frontend hostname. If no value '
+                        help='Autotest frontend hostname. If no value '
                              'is given, the one in global config will be used.',
                         default=None)
     arguments = parser.parse_args(argv[1:])
diff --git a/site_utils/dut_status_unittest.py b/site_utils/dut_status_unittest.py
index 2c59a5a..7ba611a 100644
--- a/site_utils/dut_status_unittest.py
+++ b/site_utils/dut_status_unittest.py
@@ -1,11 +1,11 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import mock
 import time
 import unittest
+from unittest import mock
 
 import common
 
diff --git a/site_utils/generate_test_report b/site_utils/generate_test_report
index 3944f36..6dc0bf0 100755
--- a/site_utils/generate_test_report
+++ b/site_utils/generate_test_report
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -10,13 +10,20 @@
 generates test reports.
 """
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import datetime
 import glob
+import json
 import logging
 import operator
 import optparse
 import os
 import re
+import six
+from six.moves import range
 import sys
 
 import common
@@ -159,12 +166,12 @@
             w = _CRASH_ALLOWLIST.get(match.group(1))
             if (self._allow_chrome_crashes and
                     chrome_regex.match(match.group(1))):
-                print '@@@STEP_WARNINGS@@@'
-                print '%s crashed with %s' % (match.group(1), match.group(2))
+                print('@@@STEP_WARNINGS@@@')
+                print('%s crashed with %s' % (match.group(1), match.group(2)))
             elif (w is not None and match.group(2) in w.signals and
                         w.deadline > datetime.datetime.now()):
-                print 'Ignoring crash in %s for waiver that expires %s' % (
-                        match.group(1), w.deadline.strftime('%Y-%b-%d'))
+                print('Ignoring crash in %s for waiver that expires %s' % (
+                        match.group(1), w.deadline.strftime('%Y-%b-%d')))
             else:
                 crashes.append('%s %s' % match.groups())
         return crashes
@@ -190,9 +197,9 @@
         info = custom_info
 
         sysinfo_dir = os.path.join(testdir, 'sysinfo', 'var', 'log')
-        for info_file, info_keys in {'ec_info.txt': ['fw_version'],
-                                     'bios_info.txt': ['fwid',
-                                                       'hwid']}.iteritems():
+        for info_file, info_keys in six.iteritems(
+            {'ec_info.txt': ['fw_version'],
+             'bios_info.txt': ['fwid', 'hwid']}):
             info_file_path = os.path.join(sysinfo_dir, info_file)
             if not os.path.isfile(info_file_path):
                 continue
@@ -240,7 +247,7 @@
             # There may be multiple lines with timestamp/localtime info.
             # The last one found is selected because it will reflect the end
             # time.
-            for i in xrange(len(matches)):
+            for i in range(len(matches)):
                 timestamp_, localtime_ = matches[-(i+1)]
                 if not timestamp or timestamp_ > timestamp:
                     timestamp = timestamp_
@@ -268,6 +275,48 @@
             else:
                 return False
 
+    def _get_failure_msg_from_status(self, status_raw):
+        reason_tags = 'ABORT|ERROR|FAIL|WARN|TEST_NA'
+        match = re.search(r'^\t+(%s)\t(.+)' % (reason_tags),
+                          status_raw, re.MULTILINE)
+
+        error_msg = 'Reason Unknown'
+        if match:
+            failure_type = match.group(1)
+            reason = match.group(2).split('\t')[4]
+            if self._escape_error:
+                reason = re.escape(reason)
+            error_msg = ': '.join([failure_type, reason])
+
+        return error_msg
+
+    def _get_full_status(self, status_raw):
+        """Collect the full status of a test, and err msg if any.
+
+        This will grab the full status, rather than just pass/fail.
+        Additionally, if there is an err msg, it will be scraped as well.
+
+        @param status_raw: the status log, as a string.
+
+        @return The full status, and the err msg, if any.
+
+        """
+        status = 'Error'
+        if re.search(r'%s' % 'FAIL', status_raw):
+            status = 'Fail'
+        elif re.search(r'%s' % 'ERROR', status_raw):
+            status = 'Error'
+        elif re.search(r'%s' % 'ABORT', status_raw):
+            status = 'Abort'
+        elif re.search(r'%s' % 'WARN', status_raw):
+            status = 'Warn'
+        elif re.search(r'%s' % 'TEST_NA', status_raw):
+            status = 'Not Run'
+        elif re.search(r'GOOD.+completed successfully', status_raw):
+            status = 'Pass'
+            return status, None
+
+        return status, self._get_failure_msg_from_status(status_raw)
 
     def _CollectResult(self, testdir, results, is_experimental=False):
         """Collects results stored under testdir into a dictionary.
@@ -307,8 +356,11 @@
 
         """
         status_file = os.path.join(testdir, 'status.log')
+        top_level = True
+
         if not os.path.isfile(status_file):
             status_file = os.path.join(testdir, 'status')
+            top_level = False
             if not os.path.isfile(status_file):
                 return
 
@@ -328,42 +380,54 @@
             status = True
 
         if not good:
-            match = re.search(r'^\t+(%s|%s)\t(.+)' % (failure_tags,
-                                                      warning_tag),
-                              status_raw, re.MULTILINE)
-            if match:
-                failure_type = match.group(1)
-                reason = match.group(2).split('\t')[4]
-                if self._escape_error:
-                    reason = re.escape(reason)
-                error_msg = ': '.join([failure_type, reason])
+            error_msg = self._get_failure_msg_from_status(status_raw)
 
         # Grab the timestamp - can be used for sorting the test runs.
         # Grab the localtime - may be printed to enable line filtering by date.
         # Designed to match a line like this:
         #   END GOOD testname ... timestamp=1347324321 localtime=Sep 10 17:45:21
         status_re = r'GOOD|%s|%s' % (failure_tags, warning_tag)
-        timestamp, localtime = self._CollectEndTimes(status_raw, status_re)
+        endtimestamp, endlocaltime = self._CollectEndTimes(status_raw,
+                                                           status_re)
+        starttimestamp, startlocaltime = self._CollectEndTimes(status_raw,
+                                                               is_end=False)
         # Hung tests will occasionally skip printing the END line so grab
         # a default timestamp from the START line in those cases.
-        if not timestamp:
-            timestamp, localtime = self._CollectEndTimes(status_raw,
-                                                         is_end=False)
+        if not endtimestamp:
+            endtimestamp, endlocaltime = starttimestamp, startlocaltime
+
+        full_status = False
+        for r in results:
+            # Already logged results for this test.
+            if r['testdir'] in testdir:
+                full_status, err = None, None
+                break
+
+        if full_status is not None:
+            full_status, err = self._get_full_status(status_raw)
 
         results.append({
                 'testdir': testdir,
                 'crashes': self._CollectCrashes(status_raw),
                 'status': status,
                 'error_msg': error_msg,
-                'localtime': localtime,
-                'timestamp': timestamp,
+                'localtime': endlocaltime,
+                'timestamp': endtimestamp,
                 'perf': self._CollectPerf(testdir),
                 'attr': self._CollectAttr(testdir),
-                'info': self._CollectInfo(testdir, {'localtime': localtime,
-                                                    'timestamp': timestamp}),
-                'experimental': is_experimental})
+                'info': self._CollectInfo(testdir, {'localtime': endlocaltime,
+                                                    'timestamp': endtimestamp}),
+                'experimental': is_experimental,
+                'full_status': full_status,
+                'full_err': err,
+                'startlocaltime': startlocaltime,
+                'starttimestamp': starttimestamp
+                })
 
-    def RecursivelyCollectResults(self, resdir, parent_experimental_tag=False):
+    def RecursivelyCollectResults(self,
+                                  resdir,
+                                  parent_experimental_tag=False,
+                                  results=[]):
         """Recursively collect results into a list of dictionaries.
 
         Only recurses into directories that possess a 'debug' subdirectory
@@ -381,7 +445,6 @@
         @return List of dictionaries of results.
 
         """
-        results = []
         is_experimental = (parent_experimental_tag or
                            self._CheckExperimental(resdir))
         self._CollectResult(resdir, results, is_experimental)
@@ -390,8 +453,7 @@
             if not os.path.exists(os.path.join(testdir, 'debug')):
                 continue
 
-            results.extend(self.RecursivelyCollectResults(
-                    testdir, is_experimental))
+            self.RecursivelyCollectResults(testdir, is_experimental, results)
         return results
 
 
@@ -484,7 +546,8 @@
         @param width: an integer.
         """
         if not self._options.csv:
-            print ''.ljust(width + len(self._STATUS_STRINGS['hr']['pass']), '-')
+            print(''.ljust(width +
+                  len(self._STATUS_STRINGS['hr']['pass']), '-'))
 
     def _PrintEntries(self, entries):
         """Prints a list of strings, delimited based on --csv flag.
@@ -493,7 +556,7 @@
 
         """
         delimiter = ',' if self._options.csv else ' '
-        print delimiter.join(entries)
+        print(delimiter.join(entries))
 
     def _PrintErrors(self, test, error_msg):
         """Prints an indented error message, unless the --csv flag is set.
@@ -526,7 +589,7 @@
                             self._PrintEntries(
                                     [test_string, self._Indent(line.rstrip())])
                 except IOError:
-                    print 'Could not open %s' % path
+                    print('Could not open %s' % path)
 
     def _PrintResultDictKeyVals(self, test_entry, result_dict):
         """Formatted print a dict of keyvals like 'perf' or 'info'.
@@ -542,7 +605,7 @@
         """
         if not result_dict:
             return
-        dict_keys = result_dict.keys()
+        dict_keys = list(result_dict.keys())
         dict_keys.sort()
         width = self._GetTestColumnWidth()
         for dict_key in dict_keys:
@@ -596,7 +659,7 @@
                 continue
 
             deduped_results[test_name] = test
-        return deduped_results.values()
+        return list(deduped_results.values())
 
     def _GetResultsForHTMLReport(self):
         """Return cleaned results for HTML report.!"""
@@ -620,7 +683,7 @@
                 test_name = test_name.split('/')[0]
             if test_status['error_msg'] is None:
                 test_status['error_msg'] = ''
-            if not html_results.has_key(test_name):
+            if test_name not in html_results:
                 count = count + 1
                 # Arranging the results in an order
                 individual_tc_results['status'] = test_status['status']
@@ -830,45 +893,88 @@
             total_tests = len(tests)
             percent_pass = 100 * tests_pass / total_tests
             pass_str = '%d/%d (%d%%)' % (tests_pass, total_tests, percent_pass)
-            print 'Total PASS: ' + self._color.Color(self._color.BOLD, pass_str)
+            print('Total PASS: ' +
+                  self._color.Color(self._color.BOLD, pass_str))
 
         if self._options.crash_detection:
-            print ''
+            print('')
             if crashes:
-                print self._color.Color(self._color.RED,
-                                        'Crashes detected during testing:')
+                print(self._color.Color(self._color.RED,
+                                        'Crashes detected during testing:'))
                 self._PrintDashLine(width)
 
-                for crash_name, crashed_tests in sorted(crashes.iteritems()):
-                    print self._color.Color(self._color.RED, crash_name)
+                for crash_name, crashed_tests in sorted(six.iteritems(crashes)):
+                    print(self._color.Color(self._color.RED, crash_name))
                     for crashed_test in crashed_tests:
-                        print self._Indent(crashed_test)
+                        print(self._Indent(crashed_test))
 
                 self._PrintDashLine(width)
-                print ('Total unique crashes: ' +
-                       self._color.Color(self._color.BOLD, str(len(crashes))))
+                print(('Total unique crashes: ' +
+                       self._color.Color(self._color.BOLD, str(len(crashes)))))
 
             # Sometimes the builders exit before these buffers are flushed.
             sys.stderr.flush()
             sys.stdout.flush()
 
+    def _test_name_from_dir(self, test_dir):
+        """Return the name from the test_dir.
+
+        Examples:
+        /tmp/test_that_latest/something/else/results-n-testname
+            returns `testname`
+
+        /tmp/TTL/something/results-n-test-name-here
+            returns `test-name-here`
+
+        """
+        test_name = test_dir.split('/')[-1]
+        return '-'.join(test_name.split('-')[2:])
+
+    def _translate_to_dict(self):
+        """Return the full_status, testname, and err to a json dict."""
+        res = {'tests': []}
+        for test_info in self._results:
+            if test_info['full_status'] is None:
+                continue
+            res['tests'].append(
+                {'verdict': test_info['full_status'],
+                 'testname': self._test_name_from_dir(test_info['testdir']),
+                 'errmsg': test_info['full_err'],
+                 'resultspath': test_info['testdir'],
+                 'starttime': test_info['starttimestamp'],
+                 'endtime': test_info['timestamp']
+                 })
+        return res
+
+    def _write_simple_json(self):
+        """Write the translated json results to results.json."""
+        if not self._options.html_report_dir:
+            return
+        json_results = self._translate_to_dict()
+        with open(os.path.join(self._options.html_report_dir,
+                               "results.json"), 'w') as wf:
+                json.dump(json_results, wf)
+
     def Run(self):
         """Runs report generation."""
         self._CollectAllResults()
+        self._write_simple_json()
         if not self._options.just_status_code:
             self._GenerateReportText()
             if self._options.html:
-                print "\nLogging the data into test_report.html file."
+                print("\nLogging the data into test_report.html file.")
                 try:
                     self.GenerateReportHTML()
                 except Exception as e:
-                    print "Failed to generate HTML report %s" % str(e)
+                    print("Failed to generate HTML report %s" % str(e))
         for d in self._GetDedupedResults():
             if d['experimental'] and self._options.ignore_experimental_tests:
                 continue
             if not d['status'] or (
                     self._options.crash_detection and d['crashes']):
-                sys.exit(1)
+                # When a test fails, but autotest doesn't crash, do not exit(1)
+                if not self._options.is_cft:
+                    sys.exit(1)
 
 
 def main():
@@ -876,7 +982,8 @@
     parser = optparse.OptionParser(usage=usage)
     parser.add_option('--color', dest='color', action='store_true',
                       default=_STDOUT_IS_TTY,
-                      help='Use color for text reports [default if TTY stdout]')
+                      help='Use color for text reports [default if TTY stdout]'
+                      )
     parser.add_option('--no-color', dest='color', action='store_false',
                       help='Don\'t use color for text reports')
     parser.add_option('--no-crash-detection', dest='crash_detection',
@@ -924,6 +1031,11 @@
                       dest='just_status_code',
                       action='store_true', default=False,
                       help='Skip generating a report, just return status code.')
+    parser.add_option('--cft',
+                      dest='is_cft',
+                      action='store_true', default=False,
+                      help='If set: will not return 1 on test failure')
+
 
     (options, args) = parser.parse_args()
 
diff --git a/site_utils/gmail_lib.py b/site_utils/gmail_lib.py
index f5df276..7daf01e 100755
--- a/site_utils/gmail_lib.py
+++ b/site_utils/gmail_lib.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -25,36 +25,21 @@
 import httplib2
 import logging
 import sys
-import os
 import random
 from email.mime.text import MIMEText
 
 import common
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import global_config
 from autotest_lib.server import site_utils
 
 try:
-  from apiclient.discovery import build as apiclient_build
-  from apiclient import errors as apiclient_errors
-  from oauth2client import file as oauth_client_fileio
+    from apiclient.discovery import build as apiclient_build
+    from apiclient import errors as apiclient_errors
+    from oauth2client import file as oauth_client_fileio
 except ImportError as e:
-  apiclient_build = None
-  logging.debug("API client for gmail disabled. %s", e)
-
-# Note: These imports needs to come after the apiclient imports, because
-# of a sys.path war between chromite and autotest crbug.com/622988
-from autotest_lib.server import utils as server_utils
-from chromite.lib import retry_util
-
-try:
-    from chromite.lib import metrics
-except ImportError:
-    metrics = utils.metrics_mock
+    apiclient_build = None
+    logging.debug("API client for gmail disabled. %s", e)
 
 
-DEFAULT_CREDS_FILE = global_config.global_config.get_config_value(
-        'NOTIFICATIONS', 'gmail_api_credentials', default=None)
 RETRY_DELAY = 5
 RETRY_BACKOFF_FACTOR = 1.5
 MAX_RETRY = 10
@@ -141,48 +126,8 @@
     @param creds_path: The credential path for gmail account, if None,
                        will use DEFAULT_CREDS_FILE.
     """
-    auth_creds = server_utils.get_creds_abspath(
-        creds_path or DEFAULT_CREDS_FILE)
-    if not auth_creds or not os.path.isfile(auth_creds):
-        logging.error('Failed to send email to %s: Credential file does not '
-                      'exist: %s. If this is a prod server, puppet should '
-                      'install it. If you need to be able to send email, '
-                      'find the credential file from chromeos-admin repo and '
-                      'copy it to %s', to, auth_creds, auth_creds)
-        return
-    client = GmailApiClient(oauth_credentials=auth_creds)
-    m = Message(to, subject, message_text)
-    retry_count = MAX_RETRY if retry else 0
-
-    def _run():
-        """Send the message."""
-        client.send_message(m, ignore_error=False)
-
-    def handler(exc):
-        """Check if exc is an HttpError and is retriable.
-
-        @param exc: An exception.
-
-        @return: True if is an retriable HttpError.
-        """
-        if not isinstance(exc, apiclient_errors.HttpError):
-            return False
-
-        error_msg = str(exc)
-        should_retry = any([msg in error_msg for msg in RETRIABLE_MSGS])
-        if should_retry:
-            logging.warning('Will retry error %s', exc)
-        return should_retry
-
-    success = False
-    try:
-        retry_util.GenericRetry(
-                handler, retry_count, _run, sleep=RETRY_DELAY,
-                backoff_factor=RETRY_BACKOFF_FACTOR)
-        success = True
-    finally:
-        metrics.Counter('chromeos/autotest/send_email/count').increment(
-                fields={'success': success})
+    # TODO(ayatane): Deprecated, not untangling imports now
+    pass
 
 
 if __name__ == '__main__':
diff --git a/site_utils/gs_offloader.py b/site_utils/gs_offloader.py
index 0b6569c..1d61c91 100755
--- a/site_utils/gs_offloader.py
+++ b/site_utils/gs_offloader.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -10,15 +10,18 @@
 Upon successful copy, the local results directory is deleted.
 """
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import abc
 try:
-  import cachetools
+    import cachetools
 except ImportError:
-  cachetools = None
+    cachetools = None
 import datetime
 import errno
 import glob
-import gzip
 import logging
 import logging.handlers
 import os
@@ -30,7 +33,6 @@
 import tarfile
 import tempfile
 import time
-import urllib
 
 from optparse import OptionParser
 
@@ -48,7 +50,7 @@
 from autotest_lib.utils import labellib
 from autotest_lib.utils import gslib
 from autotest_lib.utils.side_effects import config_loader
-from chromite.lib import timeout_util
+from autotest_lib.utils.frozen_chromite.lib import timeout_util
 
 # Autotest requires the psutil module from site-packages, so it must be imported
 # after "import common".
@@ -58,10 +60,11 @@
 except ImportError:
     psutil = None
 
-from chromite.lib import parallel
+from autotest_lib.utils.frozen_chromite.lib import parallel
+import six
 try:
-    from chromite.lib import metrics
-    from chromite.lib import ts_mon_config
+    from autotest_lib.utils.frozen_chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import ts_mon_config
 except ImportError:
     metrics = utils.metrics_mock
     ts_mon_config = utils.metrics_mock
@@ -104,31 +107,12 @@
 USE_RSYNC_ENABLED = global_config.global_config.get_config_value(
         'CROS', 'gs_offloader_use_rsync', type=bool, default=False)
 
-LIMIT_FILE_COUNT = global_config.global_config.get_config_value(
-        'CROS', 'gs_offloader_limit_file_count', type=bool, default=False)
+LIMIT_FILE_COUNT = True
 
 # Use multiprocessing for gsutil uploading.
 GS_OFFLOADER_MULTIPROCESSING = global_config.global_config.get_config_value(
         'CROS', 'gs_offloader_multiprocessing', type=bool, default=False)
 
-D = '[0-9][0-9]'
-TIMESTAMP_PATTERN = '%s%s.%s.%s_%s.%s.%s' % (D, D, D, D, D, D, D)
-CTS_RESULT_PATTERN = 'testResult.xml'
-CTS_COMPRESSED_RESULT_PATTERN = 'testResult.xml.tgz'
-CTS_V2_RESULT_PATTERN = 'test_result.xml'
-CTS_V2_COMPRESSED_RESULT_PATTERN = 'test_result.xml.tgz'
-
-CTS_COMPRESSED_RESULT_TYPES = {
-        CTS_COMPRESSED_RESULT_PATTERN: CTS_RESULT_PATTERN,
-        CTS_V2_COMPRESSED_RESULT_PATTERN: CTS_V2_RESULT_PATTERN}
-
-# Google Storage bucket URI to store results in.
-DEFAULT_CTS_RESULTS_GSURI = global_config.global_config.get_config_value(
-        'CROS', 'cts_results_server', default='')
-DEFAULT_CTS_APFE_GSURI = global_config.global_config.get_config_value(
-        'CROS', 'cts_apfe_server', default='')
-DEFAULT_CTS_BVT_APFE_GSURI = global_config.global_config.get_config_value(
-        'CROS', 'ctsbvt_apfe_server', default='')
 
 # metadata type
 GS_OFFLOADER_SUCCESS_TYPE = 'gs_offloader_success'
@@ -396,83 +380,6 @@
                       dir_entry, e)
 
 
-def _upload_cts_testresult(dir_entry, multiprocessing):
-    """Upload test results to separate gs buckets.
-
-    Upload testResult.xml.gz/test_result.xml.gz file to cts_results_bucket.
-    Upload timestamp.zip to cts_apfe_bucket.
-
-    @param dir_entry: Path to the results folder.
-    @param multiprocessing: True to turn on -m option for gsutil.
-    """
-    for host in glob.glob(os.path.join(dir_entry, '*')):
-        cts_path = os.path.join(host, 'cheets_CTS.*', 'results', '*',
-                                TIMESTAMP_PATTERN)
-        cts_v2_path = os.path.join(host, 'cheets_CTS_*', 'results', '*',
-                                   TIMESTAMP_PATTERN)
-        gts_v2_path = os.path.join(host, 'cheets_GTS*', 'results', '*',
-                                   TIMESTAMP_PATTERN)
-        for result_path, result_pattern in [(cts_path, CTS_RESULT_PATTERN),
-                            (cts_path, CTS_COMPRESSED_RESULT_PATTERN),
-                            (cts_v2_path, CTS_V2_RESULT_PATTERN),
-                            (cts_v2_path, CTS_V2_COMPRESSED_RESULT_PATTERN),
-                            (gts_v2_path, CTS_V2_RESULT_PATTERN)]:
-            for path in glob.glob(result_path):
-                try:
-                    # Treat BVT and non-BVT CTS test results same, offload them
-                    # to APFE and result buckets. More details in b/172869794.
-                    # We will make this more structured when moving to
-                    # synchronous offloading.
-                    _upload_files(host, path, result_pattern,
-                                  multiprocessing,
-                                  DEFAULT_CTS_RESULTS_GSURI,
-                                  DEFAULT_CTS_APFE_GSURI)
-                except Exception as e:
-                    logging.error('ERROR uploading test results %s to GS: %s',
-                                  path, e)
-
-
-def _is_valid_result(build, result_pattern, suite):
-    """Check if the result should be uploaded to CTS/GTS buckets.
-
-    @param build: Builder name.
-    @param result_pattern: XML result file pattern.
-    @param suite: Test suite name.
-
-    @returns: Bool flag indicating whether a valid result.
-    """
-    if build is None or suite is None:
-        return False
-
-    # Not valid if it's not a release build.
-    if not re.match(r'(?!trybot-).*-release/.*', build):
-        return False
-
-    # Not valid if it's cts result but not 'arc-cts*' or 'test_that_wrapper'
-    # suite.
-    result_patterns = [CTS_RESULT_PATTERN, CTS_V2_RESULT_PATTERN]
-    if result_pattern in result_patterns and not (
-            suite.startswith('arc-cts') or
-            suite.startswith('arc-gts') or
-            suite.startswith('bvt-arc') or
-            suite.startswith('bvt-perbuild') or
-            suite.startswith('cros_test_platform') or
-            suite.startswith('test_that_wrapper')):
-        return False
-
-    return True
-
-
-def _is_test_collector(package):
-    """Returns true if the test run is just to collect list of CTS tests.
-
-    @param package: Autotest package name. e.g. cheets_CTS_N.CtsGraphicsTestCase
-
-    @return Bool flag indicating a test package is CTS list generator or not.
-    """
-    return TEST_LIST_COLLECTOR in package
-
-
 def _get_swarming_req_dir(path):
     """
     Returns the parent directory of |path|, if |path| is a swarming task result.
@@ -513,101 +420,6 @@
     return job_id, cts_package, timestamp
 
 
-def _upload_files(host, path, result_pattern, multiprocessing,
-                  result_gs_bucket, apfe_gs_bucket):
-    keyval = models.test.parse_job_keyval(host)
-    build = keyval.get('build')
-    suite = keyval.get('suite')
-
-    host_keyval = models.test.parse_host_keyval(host, keyval.get('hostname'))
-    labels =  urllib.unquote(host_keyval.get('labels'))
-    try:
-        host_model_name = re.search(r'model:(\w+)', labels).group(1)
-    except AttributeError:
-        logging.error('Model: name attribute is missing in %s/host_keyval/%s.',
-                      host, keyval.get('hostname'))
-        return
-
-    if not _is_valid_result(build, result_pattern, suite):
-        # No need to upload current folder, return.
-        return
-
-    parent_job_id = str(keyval['parent_job_id'])
-
-    job_id, package, timestamp = _parse_cts_job_results_file_path(path)
-
-    # Results produced by CTS test list collector are dummy results.
-    # They don't need to be copied to APFE bucket which is mainly being used for
-    # CTS APFE submission.
-    if not _is_test_collector(package):
-        # Path: bucket/build/parent_job_id/cheets_CTS.*/job_id_timestamp/
-        # or bucket/build/parent_job_id/cheets_GTS.*/job_id_timestamp/
-        index = build.find('-release')
-        build_with_model_name = ''
-        if index == -1:
-            logging.info('Not a release build.'
-                         'Non release build results can be skipped from offloading')
-            return
-
-        # CTS v2 pipeline requires device info in 'board.model' format.
-        # e.g. coral.robo360-release, eve.eve-release
-        build_with_model_name = (build[:index] + '.' + host_model_name +
-                                     build[index:])
-
-        cts_apfe_gs_path = os.path.join(
-                apfe_gs_bucket, build_with_model_name, parent_job_id,
-                package, job_id + '_' + timestamp) + '/'
-
-        for zip_file in glob.glob(os.path.join('%s.zip' % path)):
-            utils.run(' '.join(_get_cmd_list(
-                    multiprocessing, zip_file, cts_apfe_gs_path)))
-            logging.debug('Upload %s to %s ', zip_file, cts_apfe_gs_path)
-    else:
-        logging.debug('%s is a CTS Test collector Autotest test run.', package)
-        logging.debug('Skipping CTS results upload to APFE gs:// bucket.')
-
-    if result_gs_bucket:
-        # Path: bucket/cheets_CTS.*/job_id_timestamp/
-        # or bucket/cheets_GTS.*/job_id_timestamp/
-        test_result_gs_path = os.path.join(
-                result_gs_bucket, package, job_id + '_' + timestamp) + '/'
-
-        for test_result_file in glob.glob(os.path.join(path, result_pattern)):
-            # gzip test_result_file(testResult.xml/test_result.xml)
-
-            test_result_tgz_file = ''
-            if test_result_file.endswith('tgz'):
-                # Extract .xml file from tgz file for better handling in the
-                # CTS dashboard pipeline.
-                # TODO(rohitbm): work with infra team to produce .gz file so
-                # tgz to gz middle conversion is not needed.
-                try:
-                    with tarfile.open(test_result_file, 'r:gz') as tar_file:
-                        tar_file.extract(
-                                CTS_COMPRESSED_RESULT_TYPES[result_pattern])
-                        test_result_tgz_file = test_result_file
-                        test_result_file = os.path.join(path,
-                                CTS_COMPRESSED_RESULT_TYPES[result_pattern])
-                except tarfile.ReadError as error:
-                    logging.debug(error)
-                except KeyError as error:
-                    logging.debug(error)
-
-            test_result_file_gz =  '%s.gz' % test_result_file
-            with open(test_result_file, 'r') as f_in, (
-                    gzip.open(test_result_file_gz, 'w')) as f_out:
-                shutil.copyfileobj(f_in, f_out)
-            utils.run(' '.join(_get_cmd_list(
-                    multiprocessing, test_result_file_gz, test_result_gs_path)))
-            logging.debug('Zip and upload %s to %s',
-                          test_result_file_gz, test_result_gs_path)
-            # Remove test_result_file_gz(testResult.xml.gz/test_result.xml.gz)
-            os.remove(test_result_file_gz)
-            # Remove extracted test_result.xml file.
-            if test_result_tgz_file:
-               os.remove(test_result_file)
-
-
 def _emit_gs_returncode_metric(returncode):
     """Increment the gs_returncode counter based on |returncode|."""
     m_gs_returncode = 'chromeos/autotest/gs_offloader/gs_returncode'
@@ -632,12 +444,10 @@
     metrics.Counter(m_permission_error).increment(fields=metrics_fields)
 
 
-class BaseGSOffloader(object):
+class BaseGSOffloader(six.with_metaclass(abc.ABCMeta, object)):
 
     """Google Storage offloader interface."""
 
-    __metaclass__ = abc.ABCMeta
-
     def offload(self, dir_entry, dest_path, job_complete_time):
         """Safely offload a directory entry to Google Storage.
 
@@ -774,26 +584,21 @@
         metrics_fields = _get_metrics_fields(dir_entry)
         error_obj = _OffloadError(start_time)
         config = config_loader.load(dir_entry)
-        cts_enabled = True
         if config:
-          # TODO(linxinan): use credential file assigned by the side_effect
-          # config.
-          if not config.cts.enabled:
-            cts_enabled = config.cts.enabled
-          if config.google_storage.bucket:
-            gs_prefix = ('' if config.google_storage.bucket.startswith('gs://')
-                         else 'gs://')
-            self._gs_uri = gs_prefix + config.google_storage.bucket
+            # TODO(linxinan): use credential file assigned by the side_effect
+            # config.
+            if config.google_storage.bucket:
+                gs_prefix = ('' if
+                             config.google_storage.bucket.startswith('gs://')
+                             else 'gs://')
+                self._gs_uri = gs_prefix + config.google_storage.bucket
         else:
-          # For now, the absence of config does not block gs_offloader
-          # from uploading files via default credential.
-          logging.debug('Failed to load the side effects config in %s.',
-                        dir_entry)
+            # For now, the absence of config does not block gs_offloader
+            # from uploading files via default credential.
+            logging.debug('Failed to load the side effects config in %s.',
+                          dir_entry)
         try:
             sanitize_dir(dir_entry)
-            if DEFAULT_CTS_RESULTS_GSURI and cts_enabled:
-                _upload_cts_testresult(dir_entry, self._multiprocessing)
-
             if LIMIT_FILE_COUNT:
                 limit_file_count(dir_entry)
 
@@ -904,16 +709,16 @@
 
 
 class OptionalMemoryCache(object):
-   """Implements memory cache if cachetools module can be loaded.
+    """Implements memory cache if cachetools module can be loaded.
 
    If the platform has cachetools available then the cache will
    be created, otherwise the get calls will always act as if there
    was a cache miss and the set/delete will be no-ops.
    """
-   cache = None
+    cache = None
 
-   def setup(self, age_to_delete):
-       """Set up a TTL cache size based on how long the job will be handled.
+    def setup(self, age_to_delete):
+        """Set up a TTL cache size based on how long the job will be handled.
 
        Autotest jobs are handled by gs_offloader until they are deleted from
        local storage, base the cache size on how long that is.
@@ -921,34 +726,34 @@
        @param age_to_delete: Number of days after which items in the cache
                              should expire.
        """
-       if cachetools:
-           # Min cache is 1000 items for 10 mins. If the age to delete is 0
-           # days you still want a short / small cache.
-           # 2000 items is a good approximation for the max number of jobs a
-           # moblab # can produce in a day, lab offloads immediatly so
-           # the number of carried jobs should be very small in the normal
-           # case.
-           ttl = max(age_to_delete * 24 * 60 * 60, 600)
-           maxsize = max(age_to_delete * 2000, 1000)
-           job_timestamp_cache.cache = cachetools.TTLCache(maxsize=maxsize,
-                                                           ttl=ttl)
+        if cachetools:
+            # Min cache is 1000 items for 10 mins. If the age to delete is 0
+            # days you still want a short / small cache.
+            # 2000 items is a good approximation for the max number of jobs a
+            # moblab # can produce in a day, lab offloads immediatly so
+            # the number of carried jobs should be very small in the normal
+            # case.
+            ttl = max(age_to_delete * 24 * 60 * 60, 600)
+            maxsize = max(age_to_delete * 2000, 1000)
+            job_timestamp_cache.cache = cachetools.TTLCache(maxsize=maxsize,
+                                                            ttl=ttl)
 
-   def get(self, key):
-       """If we have a cache try to retrieve from it."""
-       if self.cache is not None:
-           result = self.cache.get(key)
-           return result
-       return None
+    def get(self, key):
+        """If we have a cache try to retrieve from it."""
+        if self.cache is not None:
+            result = self.cache.get(key)
+            return result
+        return None
 
-   def add(self, key, value):
-       """If we have a cache try to store key/value."""
-       if self.cache is not None:
-           self.cache[key] = value
+    def add(self, key, value):
+        """If we have a cache try to store key/value."""
+        if self.cache is not None:
+            self.cache[key] = value
 
-   def delete(self, key):
-       """If we have a cache try to remove a key."""
-       if self.cache is not None:
-           return self.cache.delete(key)
+    def delete(self, key):
+        """If we have a cache try to remove a key."""
+        if self.cache is not None:
+            return self.cache.delete(key)
 
 
 job_timestamp_cache = OptionalMemoryCache()
@@ -1150,7 +955,9 @@
     def _remove_offloaded_jobs(self):
         """Removed offloaded jobs from `self._open_jobs`."""
         removed_job_count = 0
-        for jobkey, job in self._open_jobs.items():
+        # must be list to make a copy of the dictionary to allow deletion
+        # during iterations.
+        for jobkey, job in list(six.iteritems(self._open_jobs)):
             if (
                     not os.path.exists(job.dirname)
                     or _is_uploaded(job.dirname)):
diff --git a/site_utils/gs_offloader_unittest.py b/site_utils/gs_offloader_unittest.py
index 355174f..e82da41 100755
--- a/site_utils/gs_offloader_unittest.py
+++ b/site_utils/gs_offloader_unittest.py
@@ -1,10 +1,14 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-import __builtin__
-import Queue
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import six.moves.builtins
+import six.moves.queue
 import json
 import logging
 import os
@@ -17,13 +21,14 @@
 import tempfile
 import time
 import unittest
-
-import mock
-import mox
+from unittest import mock
 
 import common
+
 from autotest_lib.client.common_lib import global_config
 from autotest_lib.client.common_lib import utils
+from autotest_lib.client.common_lib.test_utils.comparators import IsA
+
 #For unittest without cloud_client.proto compiled.
 try:
     from autotest_lib.site_utils import cloud_console_client
@@ -35,7 +40,8 @@
 from autotest_lib.tko import models
 from autotest_lib.utils import gslib
 from autotest_lib.site_utils import pubsub_utils
-from chromite.lib import timeout_util
+from autotest_lib.utils.frozen_chromite.lib import timeout_util
+from six.moves import range
 
 # Test value to use for `days_old`, if nothing else is required.
 _TEST_EXPIRATION_AGE = 7
@@ -52,15 +58,15 @@
 
 
 def is_fifo(path):
-  """Determines whether a path is a fifo.
+    """Determines whether a path is a fifo.
 
   @param path: fifo path string.
   """
-  return stat.S_ISFIFO(os.lstat(path).st_mode)
+    return stat.S_ISFIFO(os.lstat(path).st_mode)
 
 
 def _get_fake_process():
-  return FakeProcess()
+    return FakeProcess()
 
 
 class FakeProcess(object):
@@ -74,7 +80,7 @@
         return True
 
 
-class OffloaderOptionsTests(mox.MoxTestBase):
+class OffloaderOptionsTests(unittest.TestCase):
     """Tests for the `Offloader` constructor.
 
     Tests that offloader instance fields are set as expected
@@ -91,7 +97,10 @@
 
     def setUp(self):
         super(OffloaderOptionsTests, self).setUp()
-        self.mox.StubOutWithMock(utils, 'get_offload_gsuri')
+        patcher = mock.patch.object(utils, 'get_offload_gsuri')
+        self.gsuri_patch = patcher.start()
+        self.addCleanup(patcher.stop)
+
         gs_offloader.GS_OFFLOADING_ENABLED = True
         gs_offloader.GS_OFFLOADER_MULTIPROCESSING = False
 
@@ -100,35 +109,51 @@
                                console_client=None, delete_age=0):
         """Mock the process of getting the offload_dir function."""
         if is_moblab:
-            expected_gsuri = '%sresults/%s/%s/' % (
+            self.expected_gsuri = '%sresults/%s/%s/' % (
                     global_config.global_config.get_config_value(
                             'CROS', 'image_storage_server'),
                     'Fa:ke:ma:c0:12:34', 'rand0m-uu1d')
         else:
-            expected_gsuri = utils.DEFAULT_OFFLOAD_GSURI
-        utils.get_offload_gsuri().AndReturn(expected_gsuri)
-        sub_offloader = gs_offloader.GSOffloader(expected_gsuri,
-            multiprocessing, delete_age, console_client)
-        self.mox.StubOutWithMock(gs_offloader, 'GSOffloader')
+            self.expected_gsuri = utils.DEFAULT_OFFLOAD_GSURI
+        utils.get_offload_gsuri.return_value = self.expected_gsuri
+        sub_offloader = gs_offloader.GSOffloader(self.expected_gsuri,
+                                                 multiprocessing, delete_age,
+                                                 console_client)
+
+        GsOffloader_patcher = mock.patch.object(gs_offloader, 'GSOffloader')
+        self.GsOffloader_patch = GsOffloader_patcher.start()
+        self.addCleanup(GsOffloader_patcher.stop)
+
         if cloud_console_client:
-            self.mox.StubOutWithMock(cloud_console_client,
-                    'is_cloud_notification_enabled')
+            console_patcher = mock.patch.object(
+                    cloud_console_client, 'is_cloud_notification_enabled')
+            self.ccc_notification_patch = console_patcher.start()
+            self.addCleanup(console_patcher.stop)
+
         if console_client:
-            cloud_console_client.is_cloud_notification_enabled().AndReturn(True)
-            gs_offloader.GSOffloader(
-                    expected_gsuri, multiprocessing, delete_age,
-                    mox.IsA(cloud_console_client.PubSubBasedClient)).AndReturn(
-                        sub_offloader)
+            cloud_console_client.is_cloud_notification_enabled.return_value = True
+            gs_offloader.GSOffloader.return_value = sub_offloader
         else:
             if cloud_console_client:
-                cloud_console_client.is_cloud_notification_enabled().AndReturn(
-                        False)
-            gs_offloader.GSOffloader(
-                expected_gsuri, multiprocessing, delete_age, None).AndReturn(
-                    sub_offloader)
-        self.mox.ReplayAll()
+                cloud_console_client.is_cloud_notification_enabled.return_value = False
+            gs_offloader.GSOffloader.return_value = sub_offloader
+
         return sub_offloader
 
+    def _verify_sub_offloader(self,
+                              is_moblab,
+                              multiprocessing=False,
+                              console_client=None,
+                              delete_age=0):
+        if console_client:
+            self.GsOffloader_patch.assert_called_with(
+                    self.expected_gsuri, multiprocessing, delete_age,
+                    IsA(cloud_console_client.PubSubBasedClient))
+
+        else:
+            self.GsOffloader_patch.assert_called_with(self.expected_gsuri,
+                                                      multiprocessing,
+                                                      delete_age, None)
 
     def test_process_no_options(self):
         """Test default offloader options."""
@@ -141,7 +166,7 @@
                          sub_offloader)
         self.assertEqual(offloader._upload_age_limit, 0)
         self.assertEqual(offloader._delete_age_limit, 0)
-
+        self._verify_sub_offloader(False)
 
     def test_process_all_option(self):
         """Test offloader handling for the --all option."""
@@ -153,6 +178,7 @@
                          sub_offloader)
         self.assertEqual(offloader._upload_age_limit, 0)
         self.assertEqual(offloader._delete_age_limit, 0)
+        self._verify_sub_offloader(False)
 
 
     def test_process_hosts_option(self):
@@ -167,6 +193,7 @@
                          sub_offloader)
         self.assertEqual(offloader._upload_age_limit, 0)
         self.assertEqual(offloader._delete_age_limit, 0)
+        self._verify_sub_offloader(False)
 
 
     def test_parallelism_option(self):
@@ -181,6 +208,7 @@
                          sub_offloader)
         self.assertEqual(offloader._upload_age_limit, 0)
         self.assertEqual(offloader._delete_age_limit, 0)
+        self._verify_sub_offloader(False)
 
 
     def test_delete_only_option(self):
@@ -208,6 +236,7 @@
                          sub_offloader)
         self.assertEqual(offloader._upload_age_limit, 7)
         self.assertEqual(offloader._delete_age_limit, 7)
+        self._verify_sub_offloader(False, delete_age=7)
 
 
     def test_moblab_gsuri_generation(self):
@@ -221,6 +250,7 @@
                          sub_offloader)
         self.assertEqual(offloader._upload_age_limit, 0)
         self.assertEqual(offloader._delete_age_limit, 0)
+        self._verify_sub_offloader(True)
 
 
     def test_globalconfig_offloading_flag(self):
@@ -237,7 +267,7 @@
         offloader = gs_offloader.Offloader(_get_options(['-m']))
         self.assertEqual(offloader._gs_offloader,
                          sub_offloader)
-        self.mox.VerifyAll()
+        self._verify_sub_offloader(True, True)
 
     def test_offloader_multiprocessing_flag_not_set_default_false(self):
         """Test multiprocessing is set."""
@@ -246,7 +276,7 @@
         offloader = gs_offloader.Offloader(_get_options([]))
         self.assertEqual(offloader._gs_offloader,
                          sub_offloader)
-        self.mox.VerifyAll()
+        self._verify_sub_offloader(True, False)
 
     def test_offloader_multiprocessing_flag_not_set_default_true(self):
         """Test multiprocessing is set."""
@@ -255,20 +285,20 @@
         offloader = gs_offloader.Offloader(_get_options([]))
         self.assertEqual(offloader._gs_offloader,
                          sub_offloader)
-        self.mox.VerifyAll()
+        self._verify_sub_offloader(True, True)
 
 
     def test_offloader_pubsub_enabled(self):
         """Test multiprocessing is set."""
         if not cloud_console_client:
             return
-        self.mox.StubOutWithMock(pubsub_utils, "PubSubClient")
-        sub_offloader = self._mock_get_sub_offloader(True, False,
-                cloud_console_client.PubSubBasedClient())
-        offloader = gs_offloader.Offloader(_get_options([]))
-        self.assertEqual(offloader._gs_offloader,
-                         sub_offloader)
-        self.mox.VerifyAll()
+        with mock.patch.object(pubsub_utils, "PubSubClient"):
+            sub_offloader = self._mock_get_sub_offloader(
+                    True, False, cloud_console_client.PubSubBasedClient())
+            offloader = gs_offloader.Offloader(_get_options([]))
+            self.assertEqual(offloader._gs_offloader, sub_offloader)
+            self._verify_sub_offloader(
+                    True, False, cloud_console_client.PubSubBasedClient())
 
 
 class _MockJobDirectory(job_directories._JobDirectory):
@@ -488,8 +518,8 @@
             os.mkdir(d)
 
 
-class _TempResultsDirTestBase(_TempResultsDirTestCase, mox.MoxTestBase):
-    """Base Mox test class for tests using a temporary results directory."""
+class _TempResultsDirTestBase(_TempResultsDirTestCase, unittest.TestCase):
+    """Base test class for tests using a temporary results directory."""
 
 
 class FailedOffloadsLogTest(_TempResultsDirTestBase):
@@ -574,11 +604,19 @@
         self._saved_loglevel = logging.getLogger().getEffectiveLevel()
         logging.getLogger().setLevel(logging.CRITICAL+1)
         self._job = self.make_job(self.REGULAR_JOBLIST[0])
-        self.mox.StubOutWithMock(gs_offloader, '_get_cmd_list')
+
+        cmd_list_patcher = mock.patch.object(gs_offloader, '_get_cmd_list')
+        cmd_list_patch = cmd_list_patcher.start()
+        self.addCleanup(cmd_list_patcher.stop)
+
         alarm = mock.patch('signal.alarm', return_value=0)
         alarm.start()
         self.addCleanup(alarm.stop)
-        self.mox.StubOutWithMock(models.test, 'parse_job_keyval')
+
+        cmd_list_patcher = mock.patch.object(models.test, 'parse_job_keyval')
+        cmd_list_patch = cmd_list_patcher.start()
+        self.addCleanup(cmd_list_patcher.stop)
+
         self.should_remove_sarming_req_dir = False
 
 
@@ -586,14 +624,12 @@
         logging.getLogger().setLevel(self._saved_loglevel)
         super(OffloadDirectoryTests, self).tearDown()
 
-    def _mock__upload_cts_testresult(self):
-        self.mox.StubOutWithMock(gs_offloader, '_upload_cts_testresult')
-        gs_offloader._upload_cts_testresult(
-                mox.IgnoreArg(),mox.IgnoreArg()).AndReturn(None)
-
     def _mock_create_marker_file(self):
-        self.mox.StubOutWithMock(__builtin__, 'open')
-        open(mox.IgnoreArg(), 'a').AndReturn(mock.MagicMock())
+        open_patcher = mock.patch.object(six.moves.builtins, 'open')
+        open_patch = open_patcher.start()
+        self.addCleanup(open_patcher.stop)
+
+        open.return_value = mock.MagicMock()
 
 
     def _mock_offload_dir_calls(self, command, queue_args,
@@ -606,14 +642,16 @@
                        call to `_get_cmd_list()`.
 
         """
-        self.mox.StubOutWithMock(os.path, 'isfile')
-        os.path.isfile(mox.IgnoreArg()).AndReturn(marker_initially_exists)
+        isfile_patcher = mock.patch.object(os.path, 'isfile')
+        isfile_patcher.start()
+        self.addCleanup(isfile_patcher.stop)
+
+        os.path.isfile.return_value = marker_initially_exists
         command.append(queue_args[0])
         gs_offloader._get_cmd_list(
                 False, queue_args[0],
                 '%s%s' % (utils.DEFAULT_OFFLOAD_GSURI,
                           queue_args[1])).AndReturn(command)
-        self._mock__upload_cts_testresult()
 
 
     def _run_offload_dir(self, should_succeed, delete_age):
@@ -626,13 +664,11 @@
                               offloaded job directory.
 
         """
-        self.mox.ReplayAll()
         gs_offloader.GSOffloader(
                 utils.DEFAULT_OFFLOAD_GSURI, False, delete_age).offload(
                         self._job.queue_args[0],
                         self._job.queue_args[1],
                         self._job.queue_args[2])
-        self.mox.VerifyAll()
         self.assertEqual(not should_succeed,
                          os.path.isdir(self._job.queue_args[0]))
         swarming_req_dir = gs_offloader._get_swarming_req_dir(
@@ -646,7 +682,7 @@
         """Test that `offload_dir()` can succeed correctly."""
         self._mock_offload_dir_calls(['test', '-d'],
                                      self._job.queue_args)
-        os.path.isfile(mox.IgnoreArg()).AndReturn(True)
+        os.path.isfile.return_value = True
         self._mock_create_marker_file()
         self._run_offload_dir(True, 0)
 
@@ -665,7 +701,7 @@
         self._mock_offload_dir_calls(['test', '-d'],
                                      self._job.queue_args)
 
-        os.path.isfile(mox.IgnoreArg()).AndReturn(True)
+        os.path.isfile.return_value = True
         self.should_remove_sarming_req_dir = True
         self._mock_create_marker_file()
         self._run_offload_dir(True, 0)
@@ -679,7 +715,7 @@
         self._mock_offload_dir_calls(['test', '-d'],
                                      self._job.queue_args)
 
-        os.path.isfile(mox.IgnoreArg()).AndReturn(True)
+        os.path.isfile.return_value = True
         self.should_remove_sarming_req_dir = False
         self._mock_create_marker_file()
         self._run_offload_dir(True, 0)
@@ -698,7 +734,7 @@
         invalid_files.append(os.path.join(
                 invalid_folder,
                 'invalid_name_file_%s' % invalid_chars))
-        good_folder =  os.path.join(results_folder, 'valid_name_folder')
+        good_folder = os.path.join(results_folder, 'valid_name_folder')
         good_file = os.path.join(good_folder, 'valid_name_file')
         for folder in [invalid_folder, good_folder]:
             os.makedirs(folder)
@@ -793,57 +829,25 @@
         self.check_limit_file_count(is_test_job=False)
 
 
-    def test_is_valid_result(self):
-        """Test _is_valid_result."""
-        release_build = 'veyron_minnie-cheets-release/R52-8248.0.0'
-        pfq_build = 'cyan-cheets-android-pfq/R54-8623.0.0-rc1'
-        trybot_build = 'trybot-samus-release/R54-8640.0.0-b5092'
-        trybot_2_build = 'trybot-samus-pfq/R54-8640.0.0-b5092'
-        release_2_build = 'test-trybot-release/R54-8640.0.0-b5092'
-        self.assertTrue(gs_offloader._is_valid_result(
-            release_build, gs_offloader.CTS_RESULT_PATTERN, 'arc-cts'))
-        self.assertTrue(gs_offloader._is_valid_result(
-            release_build, gs_offloader.CTS_RESULT_PATTERN, 'test_that_wrapper'))
-        self.assertTrue(gs_offloader._is_valid_result(
-            release_build, gs_offloader.CTS_RESULT_PATTERN, 'cros_test_platform'))
-        self.assertTrue(gs_offloader._is_valid_result(
-            release_build, gs_offloader.CTS_RESULT_PATTERN, 'bvt-arc'))
-        self.assertTrue(gs_offloader._is_valid_result(
-            release_build, gs_offloader.CTS_RESULT_PATTERN, 'bvt-perbuild'))
-        self.assertFalse(gs_offloader._is_valid_result(
-            release_build, gs_offloader.CTS_RESULT_PATTERN, 'bvt-cq'))
-        self.assertTrue(gs_offloader._is_valid_result(
-            release_build, gs_offloader.CTS_V2_RESULT_PATTERN, 'arc-gts'))
-        self.assertFalse(gs_offloader._is_valid_result(
-            None, gs_offloader.CTS_RESULT_PATTERN, 'arc-cts'))
-        self.assertFalse(gs_offloader._is_valid_result(
-            release_build, gs_offloader.CTS_RESULT_PATTERN, None))
-        self.assertFalse(gs_offloader._is_valid_result(
-            pfq_build, gs_offloader.CTS_RESULT_PATTERN, 'arc-cts'))
-        self.assertFalse(gs_offloader._is_valid_result(
-            trybot_build, gs_offloader.CTS_RESULT_PATTERN, 'arc-cts'))
-        self.assertFalse(gs_offloader._is_valid_result(
-            trybot_2_build, gs_offloader.CTS_RESULT_PATTERN, 'arc-cts'))
-        self.assertTrue(gs_offloader._is_valid_result(
-            release_2_build, gs_offloader.CTS_RESULT_PATTERN, 'arc-cts'))
+    def test_get_metrics_fields(self):
+        """Test method _get_metrics_fields."""
+        results_folder, host_folder = self._create_results_folder()
+        models.test.parse_job_keyval.return_value = ({
+                'build': 'veyron_minnie-cheets-release/R52-8248.0.0',
+                'parent_job_id': 'p_id',
+                'suite': 'arc-cts'
+        })
+        try:
+            self.assertEqual({'board': 'veyron_minnie-cheets',
+                              'milestone': 'R52'},
+                             gs_offloader._get_metrics_fields(host_folder))
+        finally:
+            shutil.rmtree(results_folder)
 
 
-    def create_results_folder(self):
-        """Create CTS/GTS results folders."""
+    def _create_results_folder(self):
         results_folder = tempfile.mkdtemp()
         host_folder = os.path.join(results_folder, 'chromeos4-row9-rack11-host22')
-        debug_folder = os.path.join(host_folder, 'debug')
-        sysinfo_folder = os.path.join(host_folder, 'sysinfo')
-        cts_result_folder = os.path.join(
-                host_folder, 'cheets_CTS.android.dpi', 'results', 'cts-results')
-        cts_v2_result_folder = os.path.join(host_folder,
-                'cheets_CTS_N.CtsGraphicsTestCases', 'results', 'android-cts')
-        gts_result_folder = os.path.join(
-                host_folder, 'cheets_GTS.google.admin', 'results', 'android-gts')
-        timestamp_str = '2016.04.28_01.41.44'
-        timestamp_cts_folder = os.path.join(cts_result_folder, timestamp_str)
-        timestamp_cts_v2_folder = os.path.join(cts_v2_result_folder, timestamp_str)
-        timestamp_gts_folder = os.path.join(gts_result_folder, timestamp_str)
 
         # Build host keyvals set to parse model info.
         host_info_path = os.path.join(host_folder, 'host_keyvals')
@@ -864,151 +868,7 @@
         with open(autoserve_path, 'w') as temp_file:
             temp_file.write(' ')
 
-        # Test results in cts_result_folder with a different time-stamp.
-        timestamp_str_2 = '2016.04.28_10.41.44'
-        timestamp_cts_folder_2 = os.path.join(cts_result_folder, timestamp_str_2)
-
-        for folder in [debug_folder, sysinfo_folder, cts_result_folder,
-                       timestamp_cts_folder, timestamp_cts_folder_2,
-                       timestamp_cts_v2_folder, timestamp_gts_folder]:
-            os.makedirs(folder)
-
-        path_pattern_pair = [(timestamp_cts_folder, gs_offloader.CTS_RESULT_PATTERN),
-                             (timestamp_cts_folder_2, gs_offloader.CTS_RESULT_PATTERN),
-                             (timestamp_cts_folder_2, gs_offloader.CTS_COMPRESSED_RESULT_PATTERN),
-                             (timestamp_cts_v2_folder, gs_offloader.CTS_V2_RESULT_PATTERN),
-                             (timestamp_cts_v2_folder, gs_offloader.CTS_V2_COMPRESSED_RESULT_PATTERN),
-                             (timestamp_gts_folder, gs_offloader.CTS_V2_RESULT_PATTERN)]
-
-        # Create timestamp.zip file_path.
-        cts_zip_file = os.path.join(cts_result_folder, timestamp_str + '.zip')
-        cts_zip_file_2 = os.path.join(cts_result_folder, timestamp_str_2 + '.zip')
-        cts_v2_zip_file = os.path.join(cts_v2_result_folder, timestamp_str + '.zip')
-        gts_zip_file = os.path.join(gts_result_folder, timestamp_str + '.zip')
-
-        # Create xml file_path.
-        cts_result_file = os.path.join(timestamp_cts_folder, 'testResult.xml')
-        cts_result_file_2 = os.path.join(timestamp_cts_folder_2,
-                                         'testResult.xml')
-        cts_result_compressed_file_2 = os.path.join(timestamp_cts_folder_2,
-                                                     'testResult.xml.tgz')
-        gts_result_file = os.path.join(timestamp_gts_folder, 'test_result.xml')
-        cts_v2_result_file = os.path.join(timestamp_cts_v2_folder,
-                                         'test_result.xml')
-        cts_v2_result_compressed_file = os.path.join(timestamp_cts_v2_folder,
-                                         'test_result.xml.tgz')
-
-        for file_path in [cts_zip_file, cts_zip_file_2, cts_v2_zip_file,
-                          gts_zip_file, cts_result_file, cts_result_file_2,
-                          cts_result_compressed_file_2, gts_result_file,
-                          cts_v2_result_file, cts_v2_result_compressed_file]:
-          if file_path.endswith('tgz'):
-              test_result_file = gs_offloader.CTS_COMPRESSED_RESULT_TYPES[
-                      os.path.basename(file_path)]
-              with open(test_result_file, 'w') as f:
-                  f.write('test')
-              with tarfile.open(file_path, 'w:gz') as tar_file:
-                  tar_file.add(test_result_file)
-              os.remove(test_result_file)
-          else:
-              with open(file_path, 'w') as f:
-                  f.write('test')
-
-        return (results_folder, host_folder, path_pattern_pair)
-
-
-    def test__upload_cts_testresult(self):
-        """Test _upload_cts_testresult."""
-        results_folder, host_folder, path_pattern_pair = self.create_results_folder()
-
-        self.mox.StubOutWithMock(gs_offloader, '_upload_files')
-        gs_offloader._upload_files(
-            mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg(), False,
-                mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(
-                ['test', '-d', host_folder])
-        gs_offloader._upload_files(
-            mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg(), False,
-                mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(
-                ['test', '-d', host_folder])
-        gs_offloader._upload_files(
-            mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg(), False,
-                mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(
-                ['test', '-d', host_folder])
-
-        self.mox.ReplayAll()
-        gs_offloader._upload_cts_testresult(results_folder, False)
-        self.mox.VerifyAll()
-        shutil.rmtree(results_folder)
-
-
-    def test_parse_cts_job_results_file_path(self):
-        # A autotest path
-        path = ('/317739475-chromeos-test/chromeos4-row9-rack11-host22/'
-                'cheets_CTS.android.dpi/results/cts-results/'
-                '2016.04.28_01.41.44')
-        job_id, package, timestamp = \
-            gs_offloader._parse_cts_job_results_file_path(path)
-        self.assertEqual('317739475-chromeos-test', job_id)
-        self.assertEqual('cheets_CTS.android.dpi', package)
-        self.assertEqual('2016.04.28_01.41.44', timestamp)
-
-
-        # A skylab path
-        path = ('/swarming-458e3a3a7fc6f210/1/autoserv_test/'
-                'cheets_CTS.android.dpi/results/cts-results/'
-                '2016.04.28_01.41.44')
-        job_id, package, timestamp = \
-            gs_offloader._parse_cts_job_results_file_path(path)
-        self.assertEqual('swarming-458e3a3a7fc6f210-1', job_id)
-        self.assertEqual('cheets_CTS.android.dpi', package)
-        self.assertEqual('2016.04.28_01.41.44', timestamp)
-
-
-    def test_upload_files(self):
-        """Test upload_files"""
-        results_folder, host_folder, path_pattern_pair = self.create_results_folder()
-
-        for path, pattern in path_pattern_pair:
-            models.test.parse_job_keyval(mox.IgnoreArg()).AndReturn({
-                'build': 'veyron_minnie-cheets-release/R52-8248.0.0',
-                'hostname': 'chromeos4-row9-rack11-host22',
-                'parent_job_id': 'p_id',
-                'suite': 'arc-cts'
-            })
-
-            gs_offloader._get_cmd_list(
-                False, mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(
-                    ['test', '-d', path])
-            gs_offloader._get_cmd_list(
-                False, mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(
-                    ['test', '-d', path])
-
-            self.mox.ReplayAll()
-            gs_offloader._upload_files(host_folder, path, pattern, False,
-                                       'gs://a-test-bucket/',
-                                       'gs://a-test-apfe-bucket/')
-            self.mox.VerifyAll()
-            self.mox.ResetAll()
-
-        shutil.rmtree(results_folder)
-
-
-    def test_get_metrics_fields(self):
-        """Test method _get_metrics_fields."""
-        results_folder, host_folder, _ = self.create_results_folder()
-        models.test.parse_job_keyval(mox.IgnoreArg()).AndReturn({
-                'build': 'veyron_minnie-cheets-release/R52-8248.0.0',
-                'parent_job_id': 'p_id',
-                'suite': 'arc-cts'
-            })
-        try:
-            self.mox.ReplayAll()
-            self.assertEqual({'board': 'veyron_minnie-cheets',
-                              'milestone': 'R52'},
-                             gs_offloader._get_metrics_fields(host_folder))
-            self.mox.VerifyAll()
-        finally:
-            shutil.rmtree(results_folder)
+        return (results_folder, host_folder)
 
 
 class OffladerConfigTests(_TempResultsDirTestBase):
@@ -1019,16 +879,35 @@
         gs_offloader.GS_OFFLOADING_ENABLED = True
         gs_offloader.GS_OFFLOADER_MULTIPROCESSING = True
         self.dest_path = '/results'
-        self.mox.StubOutWithMock(gs_offloader, '_get_metrics_fields')
-        self.mox.StubOutWithMock(gs_offloader, '_OffloadError')
-        self.mox.StubOutWithMock(gs_offloader, '_upload_cts_testresult')
-        self.mox.StubOutWithMock(gs_offloader, '_emit_offload_metrics')
-        self.mox.StubOutWithMock(gs_offloader, '_get_cmd_list')
-        self.mox.StubOutWithMock(subprocess, 'Popen')
-        self.mox.StubOutWithMock(gs_offloader, '_emit_gs_returncode_metric')
 
+        metrics_fields_patcher = mock.patch.object(gs_offloader,
+                                                   '_get_metrics_fields')
+        metrics_fields_patcher.start()
+        self.addCleanup(metrics_fields_patcher.stop)
 
-    def _run(self, results_dir, gs_bucket, expect_dest, cts_enabled):
+        offloadError_patcher = mock.patch.object(gs_offloader, '_OffloadError')
+        offloadError_patcher.start()
+        self.addCleanup(offloadError_patcher.stop)
+
+        offload_metrics_patcher = mock.patch.object(gs_offloader,
+                                                    '_emit_offload_metrics')
+        offload_metrics_patcher.start()
+        self.addCleanup(offload_metrics_patcher.stop)
+
+        cmd_list_patcher = mock.patch.object(gs_offloader, '_get_cmd_list')
+        cmd_list_patcher.start()
+        self.addCleanup(cmd_list_patcher.stop)
+
+        Popen_patcher = mock.patch.object(subprocess, 'Popen')
+        Popen_patcher.start()
+        self.addCleanup(Popen_patcher.stop)
+
+        returncode_metric_patcher = mock.patch.object(
+                gs_offloader, '_emit_gs_returncode_metric')
+        returncode_metric_patcher.start()
+        self.addCleanup(returncode_metric_patcher.stop)
+
+    def _run(self, results_dir, gs_bucket, expect_dest):
         stdout = os.path.join(results_dir, 'std.log')
         stderr = os.path.join(results_dir, 'std.err')
         config = {
@@ -1041,60 +920,41 @@
                 'bucket': gs_bucket,
                 'credentials_file': '/foo-creds'
             },
-            'cts': {
-                'enabled': cts_enabled,
-            },
             'this_field_is_ignored': True
         }
         path = os.path.join(results_dir, 'side_effects_config.json')
         with open(path, 'w') as f:
             f.write(json.dumps(config))
         gs_offloader._get_metrics_fields(results_dir)
-        if cts_enabled:
-            gs_offloader._upload_cts_testresult(results_dir, True)
-        gs_offloader._get_cmd_list(
-            True,
-            mox.IgnoreArg(),
-            expect_dest).AndReturn(['test', '-d', expect_dest])
-        subprocess.Popen(mox.IgnoreArg(),
-                         stdout=stdout,
-                         stderr=stderr).AndReturn(_get_fake_process())
-        gs_offloader._OffloadError(mox.IgnoreArg())
-        gs_offloader._emit_gs_returncode_metric(mox.IgnoreArg()).AndReturn(True)
-        gs_offloader._emit_offload_metrics(mox.IgnoreArg()).AndReturn(True)
+        gs_offloader._get_cmd_list.return_value = ['test', '-d', expect_dest]
+        subprocess.Popen.side_effect = [
+                _get_fake_process(), _get_fake_process()
+        ]
+        gs_offloader._OffloadError(mock.ANY)
+        gs_offloader._emit_gs_returncode_metric.return_value = True
+        gs_offloader._emit_offload_metrics.return_value = True
         sub_offloader = gs_offloader.GSOffloader(results_dir, True, 0, None)
-        subprocess.Popen(mox.IgnoreArg(),
-                         stdout=stdout,
-                         stderr=stderr).AndReturn(_get_fake_process())
-        self.mox.ReplayAll()
         sub_offloader._try_offload(results_dir, self.dest_path, stdout, stderr)
-        self.mox.VerifyAll()
-        self.mox.ResetAll()
         shutil.rmtree(results_dir)
 
+    def _verify(self, results_dir, gs_bucket, expect_dest):
+        gs_offloader._get_cmd_list.assert_called_with(True, mock.ANY,
+                                                      expect_dest)
 
-    def test_upload_files_to_dev(self):
-        """Test upload results to dev gs bucket and skip cts uploading."""
-        res = tempfile.mkdtemp()
-        gs_bucket = 'dev-bucket'
-        expect_dest = 'gs://' + gs_bucket + self.dest_path
-        self._run(res, gs_bucket, expect_dest, False)
+        stdout = os.path.join(results_dir, 'std.log')
+        stderr = os.path.join(results_dir, 'std.err')
 
-
-    def test_upload_files_prod(self):
-        """Test upload results to the prod gs bucket and also upload to cts."""
-        res = tempfile.mkdtemp()
-        gs_bucket = 'prod-bucket'
-        expect_dest = 'gs://' + gs_bucket + self.dest_path
-        self._run(res, gs_bucket, expect_dest, True)
-
+        # x2
+        subprocess_call = mock.call(mock.ANY, stdout=stdout, stderr=stderr)
+        subprocess.Popen.assert_has_calls([subprocess_call, subprocess_call])
 
     def test_skip_gs_prefix(self):
         """Test skip the 'gs://' prefix if already presented."""
         res = tempfile.mkdtemp()
         gs_bucket = 'gs://prod-bucket'
         expect_dest = gs_bucket + self.dest_path
-        self._run(res, gs_bucket, expect_dest, True)
+        self._run(res, gs_bucket, expect_dest)
+        self._verify(res, gs_bucket, expect_dest)
 
 
 class JobDirectoryOffloadTests(_TempResultsDirTestBase):
@@ -1132,7 +992,7 @@
     def setUp(self):
         super(JobDirectoryOffloadTests, self).setUp()
         self._job = self.make_job(self.REGULAR_JOBLIST[0])
-        self._queue = Queue.Queue()
+        self._queue = six.moves.queue.Queue()
 
 
     def _offload_unexpired_job(self, days_old):
@@ -1303,8 +1163,10 @@
             set(self.REGULAR_JOBLIST) | set(self.SPECIAL_JOBLIST))
         self.make_job_hierarchy()
         self._offloader = gs_offloader.Offloader(_get_options(['-a']))
-        self.mox.StubOutWithMock(logging, 'debug')
 
+        logging_patcher = mock.patch.object(logging, 'debug')
+        self.logging_patch = logging_patcher.start()
+        self.addCleanup(logging_patcher.stop)
 
     def _run_add_new_jobs(self, expected_key_set):
         """Basic test assertions for `_add_new_jobs()`.
@@ -1317,16 +1179,13 @@
 
         """
         count = len(expected_key_set) - len(self._offloader._open_jobs)
-        logging.debug(mox.IgnoreArg(), count)
-        self.mox.ReplayAll()
         self._offloader._add_new_jobs()
         self.assertEqual(expected_key_set,
                          set(self._offloader._open_jobs.keys()))
         for jobkey, job in self._offloader._open_jobs.items():
             self.assertEqual(jobkey, job.dirname)
-        self.mox.VerifyAll()
-        self.mox.ResetAll()
 
+        self.logging_patch.assert_called_with(mock.ANY, count)
 
     def test_add_jobs_empty(self):
         """Test adding jobs to an empty dictionary.
@@ -1366,9 +1225,15 @@
     def setUp(self):
         super(ReportingTests, self).setUp()
         self._offloader = gs_offloader.Offloader(_get_options([]))
-        self.mox.StubOutWithMock(self._offloader, '_log_failed_jobs_locally')
-        self.mox.StubOutWithMock(logging, 'debug')
 
+        failed_jobs_patcher = mock.patch.object(self._offloader,
+                                                '_log_failed_jobs_locally')
+        self.failed_jobs_patch = failed_jobs_patcher.start()
+        self.addCleanup(failed_jobs_patcher.stop)
+
+        logging_patcher = mock.patch.object(logging, 'debug')
+        self.logging_patch = logging_patcher.start()
+        self.addCleanup(logging_patcher.stop)
 
     def _add_job(self, jobdir):
         """Add a job to the dictionary of unfinished jobs."""
@@ -1377,7 +1242,7 @@
         return j
 
 
-    def _expect_log_message(self, new_open_jobs, with_failures):
+    def _expect_log_message(self, new_open_jobs, with_failures, count=None):
         """Mock expected logging calls.
 
         `_report_failed_jobs()` logs one message with the number
@@ -1397,11 +1262,12 @@
                              failure count is expected.
 
         """
-        count = len(self._offloader._open_jobs) - len(new_open_jobs)
-        logging.debug(mox.IgnoreArg(), count, len(new_open_jobs))
+        if not count:
+            count = len(self._offloader._open_jobs) - len(new_open_jobs)
+        self.logging_patch.assert_called_with(mock.ANY, count,
+                                              len(new_open_jobs))
         if with_failures:
-            logging.debug(mox.IgnoreArg(), len(new_open_jobs))
-
+            self.logging_patch.assert_called_with(mock.ANY, len(new_open_jobs))
 
     def _run_update(self, new_open_jobs):
         """Call `_report_failed_jobs()`.
@@ -1416,12 +1282,9 @@
                              new value of the offloader's
                              `_open_jobs` field.
         """
-        self.mox.ReplayAll()
         self._offloader._report_failed_jobs()
         self._offloader._remove_offloaded_jobs()
         self.assertEqual(self._offloader._open_jobs, new_open_jobs)
-        self.mox.VerifyAll()
-        self.mox.ResetAll()
 
 
     def _expect_failed_jobs(self, failed_jobs):
@@ -1443,9 +1306,9 @@
         Expected result is an empty `_open_jobs` list.
 
         """
-        self._expect_log_message({}, False)
         self._expect_failed_jobs([])
         self._run_update({})
+        self._expect_log_message({}, False)
 
 
     def test_all_completed(self):
@@ -1456,11 +1319,13 @@
         Expected result is an empty `_open_jobs` list.
 
         """
+
         for d in self.REGULAR_JOBLIST:
             self._add_job(d).set_complete()
-        self._expect_log_message({}, False)
+        count = len(self._offloader._open_jobs)
         self._expect_failed_jobs([])
         self._run_update({})
+        self._expect_log_message({}, False, count)
 
 
     def test_none_finished(self):
@@ -1474,9 +1339,9 @@
         for d in self.REGULAR_JOBLIST:
             self._add_job(d)
         new_jobs = self._offloader._open_jobs.copy()
-        self._expect_log_message(new_jobs, False)
         self._expect_failed_jobs([])
         self._run_update(new_jobs)
+        self._expect_log_message(new_jobs, False)
 
 
 class GsOffloaderMockTests(_TempResultsDirTestCase):
@@ -1502,15 +1367,12 @@
 
         """
         signal.alarm.side_effect = [0, timeout_util.TimeoutError('fubar')]
-        with mock.patch.object(gs_offloader, '_upload_cts_testresult',
-                               autospec=True) as upload:
-            upload.return_value = None
-            gs_offloader.GSOffloader(
-                    utils.DEFAULT_OFFLOAD_GSURI, False, 0).offload(
-                            self._job.queue_args[0],
-                            self._job.queue_args[1],
-                            self._job.queue_args[2])
-            self.assertTrue(os.path.isdir(self._job.queue_args[0]))
+        gs_offloader.GSOffloader(
+                utils.DEFAULT_OFFLOAD_GSURI, False, 0).offload(
+                        self._job.queue_args[0],
+                        self._job.queue_args[1],
+                        self._job.queue_args[2])
+        self.assertTrue(os.path.isdir(self._job.queue_args[0]))
 
 
     # TODO(ayatane): This tests passes when run locally, but it fails
@@ -1528,11 +1390,8 @@
 
         """
         signal.alarm.side_effect = [0, 0, timeout_util.TimeoutError('fubar')]
-        with mock.patch.object(gs_offloader, '_upload_cts_testresult',
-                               autospec=True) as upload, \
-             mock.patch.object(gs_offloader, '_get_cmd_list',
+        with mock.patch.object(gs_offloader, '_get_cmd_list',
                                autospec=True) as get_cmd_list:
-            upload.return_value = None
             get_cmd_list.return_value = ['test', '-d', self._job.queue_args[0]]
             gs_offloader.GSOffloader(
                     utils.DEFAULT_OFFLOAD_GSURI, False, 0).offload(
diff --git a/site_utils/host_info_store_testfile b/site_utils/host_info_store_testfile
new file mode 100644
index 0000000..b56846c
--- /dev/null
+++ b/site_utils/host_info_store_testfile
@@ -0,0 +1,11 @@
+{
+    "attributes": {
+        "attrib1": "1"
+    },
+    "labels": [
+        "some",
+        "labels"
+    ],
+    "serializer_version": 1,
+    "stable_versions": {}
+}
\ No newline at end of file
diff --git a/site_utils/hwid_lib.py b/site_utils/hwid_lib.py
deleted file mode 100644
index 2253335..0000000
--- a/site_utils/hwid_lib.py
+++ /dev/null
@@ -1,143 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import json
-import logging
-import urllib2
-
-from autotest_lib.client.common_lib import global_config
-
-# HWID info types to request.
-HWID_INFO_LABEL = 'dutlabel'
-HWID_INFO_BOM = 'bom'
-HWID_INFO_SKU = 'sku'
-HWID_INFO_TYPES = [HWID_INFO_BOM, HWID_INFO_SKU, HWID_INFO_LABEL]
-
-# HWID url vars.
-HWID_VERSION = 'v1'
-HWID_BASE_URL = 'https://www.googleapis.com/chromeoshwid'
-CHROMEOS_HWID_SERVER_URL = "https://chromeos-hwid.appspot.com/api/chromeoshwid"
-URL_FORMAT_STRING='%(base_url)s/%(version)s/%(info_type)s/%(hwid)s/?key=%(key)s'
-
-# Key file name to use when we don't want hwid labels.
-KEY_FILENAME_NO_HWID = 'no_hwid_labels'
-
-class HwIdException(Exception):
-    """Raised whenever anything fails in the hwid info request."""
-
-
-def get_hwid_info(hwid, info_type, key_file):
-    """Given a hwid and info type, return a dict of the requested info.
-
-    @param hwid: hwid to use for the query.
-    @param info_type: String of info type requested.
-    @param key_file: Filename that holds the key for authentication.
-
-    @return: A dict of the info.
-
-    @raises HwIdException: If hwid/info_type/key_file is invalid or there's an
-                           error anywhere related to getting the raw hwid info
-                           or decoding it.
-    """
-    # There are situations we don't want to call out to the hwid service, we use
-    # the key_file name as the indicator for that.
-    if key_file == KEY_FILENAME_NO_HWID:
-        return {}
-
-    if not isinstance(hwid, str):
-        raise ValueError('hwid is not a string.')
-
-    if info_type not in HWID_INFO_TYPES:
-        raise ValueError('invalid info type: "%s".' % info_type)
-
-    hwid_info_dict = _try_hwid_v1(hwid, info_type)
-    if hwid_info_dict is not None:
-        return hwid_info_dict
-    else:
-        logging.debug("Switch back to use old endpoint")
-
-    key = None
-    with open(key_file) as f:
-        key = f.read().strip()
-
-    url_format_dict = {'base_url': HWID_BASE_URL,
-                       'version': HWID_VERSION,
-                       'info_type': info_type,
-                       'hwid': urllib2.quote(hwid),
-                       'key': key}
-    return _fetch_hwid_response(url_format_dict)
-
-
-def get_all_possible_dut_labels(key_file):
-    """Return all possible labels that can be supplied by dutlabels.
-
-    We can send a dummy key to the service to retrieve all the possible
-    labels the service will provide under the dutlabel api call.  We need
-    this in order to track which labels we can remove if they're not detected
-    on a dut anymore.
-
-    @param key_file: Filename that holds the key for authentication.
-
-    @return: A list of all possible labels.
-    """
-    return get_hwid_info('dummy_hwid', HWID_INFO_LABEL, key_file).get(
-            'possible_labels', [])
-
-
-def _try_hwid_v1(hwid, info_type):
-    """Try chromeos-hwid endpoints for fetching hwid info.
-
-    @param hwid: a string hardware ID.
-    @param info_type: String of info type requested.
-
-    @return a dict of hwid info.
-    """
-    key_file_path = global_config.global_config.get_config_value(
-            'CROS', 'NEW_HWID_KEY', type=str, default="")
-    if key_file_path == "":
-        return None
-
-    key = None
-    with open(key_file_path) as f:
-        key = f.read().strip()
-
-    if key is None:
-        return None
-
-    url_format_dict = {'base_url': CHROMEOS_HWID_SERVER_URL,
-                       'version': HWID_VERSION,
-                       'info_type': info_type,
-                       'hwid': urllib2.quote(hwid),
-                       'key': key}
-
-    try:
-        return _fetch_hwid_response(url_format_dict)
-    except Exception as e:
-        logging.debug("fail to call new HWID endpoint: %s", str(e))
-        return None
-
-
-def _fetch_hwid_response(req_parameter_dict):
-    """Fetch and parse hwid response.
-
-    @param req_parameter_dict: A dict of url parameters.
-
-    @return a dict of hwid info.
-    """
-    url_request = URL_FORMAT_STRING % req_parameter_dict
-    try:
-        page_contents = urllib2.urlopen(url_request)
-    except (urllib2.URLError, urllib2.HTTPError) as e:
-        # TODO(kevcheng): Might need to scrub out key from exception message.
-        raise HwIdException('error retrieving raw hwid info: %s' % e)
-
-    try:
-        hwid_info_dict = json.load(page_contents)
-    except ValueError as e:
-        raise HwIdException('error decoding hwid info: %s - "%s"' %
-                            (e, page_contents.getvalue()))
-    finally:
-        page_contents.close()
-
-    return hwid_info_dict
diff --git a/site_utils/hwid_lib_unittest.py b/site_utils/hwid_lib_unittest.py
deleted file mode 100644
index 0aaeaa8..0000000
--- a/site_utils/hwid_lib_unittest.py
+++ /dev/null
@@ -1,164 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import cStringIO
-import json
-import mock
-import os
-import shutil
-import tempfile
-import unittest
-import urllib2
-
-import common
-
-from autotest_lib.site_utils import hwid_lib
-
-
-class HwIdUnittests(unittest.TestCase):
-    """Unittest for testing get_hwid_info."""
-
-    def setUp(self):
-        # Create tmp dir and dummy key files.
-        self.tmp_dir = tempfile.mkdtemp(prefix='hwid_test')
-        self.dummy_key = 'dummy_key'
-        self.dummy_key_file = os.path.join(self.tmp_dir, 'dummy_key')
-        with open(self.dummy_key_file, 'w') as f:
-            f.write(self.dummy_key)
-        self.dummy_key_file_spaces = os.path.join(self.tmp_dir,
-                                                  'dummy_key_spaces')
-        with open(self.dummy_key_file_spaces, 'w') as f:
-            f.write('  %s   ' % self.dummy_key)
-        self.dummy_key_file_newline = os.path.join(self.tmp_dir,
-                                                  'dummy_key_newline')
-        with open(self.dummy_key_file_newline, 'w') as f:
-            f.write('%s\n' % self.dummy_key)
-        self.invalid_dummy_key_file = os.path.join(self.tmp_dir,
-                                                   'invalid_dummy_key_file')
-
-
-    def tearDown(self):
-        mock.patch.stopall()
-        if os.path.exists(self.tmp_dir):
-            shutil.rmtree(self.tmp_dir)
-
-
-    def validate_exception(self, exception, *args):
-        """Helper method to validate proper exception is raised.
-
-        @param exception: The exception class to check against.
-        @param args: The unamed args to pass to func.
-        """
-        with self.assertRaises(exception):
-            hwid_lib.get_hwid_info(*args)
-
-
-    def test_none_hwid(self):
-        """Test that an empty hwid raises a ValueError."""
-        self.validate_exception(ValueError, None, None, None)
-
-
-    def test_invalid_info_type(self):
-        """Test that an invalid info type raises a ValueError."""
-        self.validate_exception(ValueError, 'hwid', 'invalid_info_type', None)
-
-
-    def test_fail_open_with_nonexistent_file(self):
-        """Test that trying to open non-existent file will raise an IOError."""
-        self.validate_exception(IOError, 'hwid', hwid_lib.HWID_INFO_BOM,
-                                self.invalid_dummy_key_file)
-
-
-    @mock.patch('urllib2.urlopen', side_effect=urllib2.URLError('url error'))
-    def test_fail_to_open_url_urlerror(self, *args, **dargs):
-        """Test that failing to open a url will raise a HwIdException."""
-        self.validate_exception(hwid_lib.HwIdException, 'hwid',
-                                hwid_lib.HWID_INFO_BOM, self.dummy_key_file)
-
-
-    # pylint: disable=missing-docstring
-    @mock.patch('urllib2.urlopen')
-    def test_fail_decode_hwid(self, mock_urlopen, *args, **dargs):
-        """Test that supplying bad json raises a HwIdException."""
-        mock_page_contents = mock.Mock(wraps=cStringIO.StringIO('bad json'))
-        mock_urlopen.return_value = mock_page_contents
-        self.validate_exception(hwid_lib.HwIdException, 'hwid',
-                                hwid_lib.HWID_INFO_BOM, self.dummy_key_file)
-        mock_page_contents.close.assert_called_once_with()
-
-
-    # pylint: disable=missing-docstring
-    @mock.patch('urllib2.urlopen')
-    def test_success(self, mock_urlopen, *args, **dargs):
-        """Test that get_hwid_info successfully returns a hwid dict.
-
-        We want to check that it works on all valid info types.
-        """
-        returned_json = '{"key1": "data1"}'
-        expected_dict = json.loads(returned_json)
-        for valid_info_type in hwid_lib.HWID_INFO_TYPES:
-            mock_page_contents = mock.Mock(
-                    wraps=cStringIO.StringIO(returned_json))
-            mock_urlopen.return_value = mock_page_contents
-            self.assertEqual(hwid_lib.get_hwid_info('hwid', valid_info_type,
-                                                    self.dummy_key_file),
-                             expected_dict)
-            mock_page_contents.close.assert_called_once_with()
-
-
-    # pylint: disable=missing-docstring
-    @mock.patch('urllib2.urlopen')
-    def test_url_properly_constructed(self, mock_urlopen, *args, **dargs):
-        """Test that the url is properly constructed.
-
-        Let's make sure that the key is properly cleaned before getting
-        inserted into the url by trying all the different dummy_key_files.
-        """
-        info_type = hwid_lib.HWID_INFO_BOM
-        hwid = 'mock_hwid'
-        expected_url = ('%s/%s/%s/%s/?key=%s' % (hwid_lib.HWID_BASE_URL,
-                                                 hwid_lib.HWID_VERSION,
-                                                 info_type, hwid,
-                                                 self.dummy_key))
-
-        for dummy_key_file in [self.dummy_key_file,
-                               self.dummy_key_file_spaces,
-                               self.dummy_key_file_newline]:
-            mock_page_contents = mock.Mock(wraps=cStringIO.StringIO('{}'))
-            mock_urlopen.return_value = mock_page_contents
-            hwid_lib.get_hwid_info(hwid, info_type, dummy_key_file)
-            mock_urlopen.assert_called_with(expected_url)
-
-
-    # pylint: disable=missing-docstring
-    @mock.patch('urllib2.urlopen')
-    def test_url_properly_constructed_again(self, mock_urlopen, *args, **dargs):
-        """Test that the url is properly constructed with special hwid.
-
-        Let's make sure that a hwid with a space is properly transformed.
-        """
-        info_type = hwid_lib.HWID_INFO_BOM
-        hwid = 'mock hwid with space'
-        hwid_quoted = 'mock%20hwid%20with%20space'
-        expected_url = ('%s/%s/%s/%s/?key=%s' % (hwid_lib.HWID_BASE_URL,
-                                                 hwid_lib.HWID_VERSION,
-                                                 info_type, hwid_quoted,
-                                                 self.dummy_key))
-
-        mock_page_contents = mock.Mock(wraps=cStringIO.StringIO('{}'))
-        mock_urlopen.return_value = mock_page_contents
-        hwid_lib.get_hwid_info(hwid, info_type, self.dummy_key_file)
-        mock_urlopen.assert_called_with(expected_url)
-
-
-    def test_dummy_key_file(self):
-        """Test that we get an empty dict with a dummy key file."""
-        info_type = hwid_lib.HWID_INFO_BOM
-        hwid = 'mock hwid with space'
-        key_file = hwid_lib.KEY_FILENAME_NO_HWID
-        self.assertEqual(hwid_lib.get_hwid_info(hwid, info_type, key_file), {})
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/site_utils/job_directories.py b/site_utils/job_directories.py
index b8b6cce..e66df66 100755
--- a/site_utils/job_directories.py
+++ b/site_utils/job_directories.py
@@ -6,15 +6,17 @@
 import os
 import re
 import shutil
+import six
 
 import common
+
 from autotest_lib.client.common_lib import time_utils
 from autotest_lib.client.common_lib import utils
 from autotest_lib.server.cros.dynamic_suite import constants
 from autotest_lib.server.cros.dynamic_suite import frontend_wrappers
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = utils.metrics_mock
 
@@ -24,13 +26,13 @@
 def is_job_expired(age_limit, timestamp):
     """Check whether a job timestamp is older than an age limit.
 
-  @param age_limit: Minimum age, measured in days.  If the value is
-                    not positive, the job is always expired.
-  @param timestamp: Timestamp of the job whose age we are checking.
-                    The format must match time_utils.TIME_FMT.
+    @param age_limit: Minimum age, measured in days.  If the value is
+                      not positive, the job is always expired.
+    @param timestamp: Timestamp of the job whose age we are checking.
+                      The format must match time_utils.TIME_FMT.
 
-  @returns True iff the job is old enough to be expired.
-  """
+    @returns True if the job is old enough to be expired.
+    """
     if age_limit <= 0:
         return True
     job_time = time_utils.time_string_to_datetime(timestamp)
@@ -89,33 +91,30 @@
     return None
 
 
-class _JobDirectory(object):
+class _JobDirectory(six.with_metaclass(abc.ABCMeta, object)):
     """State associated with a job to be offloaded.
 
-  The full life-cycle of a job (including failure events that
-  normally don't occur) looks like this:
-   1. The job's results directory is discovered by
-      `get_job_directories()`, and a job instance is created for it.
-   2. Calls to `offload()` have no effect so long as the job
-      isn't complete in the database and the job isn't expired
-      according to the `age_limit` parameter.
-   3. Eventually, the job is both finished and expired.  The next
-      call to `offload()` makes the first attempt to offload the
-      directory to GS.  Offload is attempted, but fails to complete
-      (e.g. because of a GS problem).
-   4. Finally, a call to `offload()` succeeds, and the directory no
-      longer exists.  Now `is_offloaded()` is true, so the job
-      instance is deleted, and future failures will not mention this
-      directory any more.
+    The full life-cycle of a job (including failure events that
+    normally don't occur) looks like this:
+      1. The job's results directory is discovered by
+         `get_job_directories()`, and a job instance is created for it.
+      2. Calls to `offload()` have no effect so long as the job
+         isn't complete in the database and the job isn't expired
+         according to the `age_limit` parameter.
+      3. Eventually, the job is both finished and expired.  The next
+         call to `offload()` makes the first attempt to offload the
+         directory to GS.  Offload is attempted, but fails to complete
+         (e.g. because of a GS problem).
+      4. Finally, a call to `offload()` succeeds, and the directory no
+         longer exists.  Now `is_offloaded()` is true, so the job
+         instance is deleted, and future failures will not mention this
+         directory any more.
 
-  Only steps 1. and 4. are guaranteed to occur.  The others depend
-  on the timing of calls to `offload()`, and on the reliability of
-  the actual offload process.
+    Only steps 1. and 4. are guaranteed to occur.  The others depend
+    on the timing of calls to `offload()`, and on the reliability of
+    the actual offload process.
 
-  """
-
-    __metaclass__ = abc.ABCMeta
-
+    """
     GLOB_PATTERN = None  # must be redefined in subclass
 
     def __init__(self, resultsdir):
@@ -133,22 +132,22 @@
     def get_timestamp_if_finished(self):
         """Return this job's timestamp from the database.
 
-    If the database has not marked the job as finished, return
-    `None`.  Otherwise, return a timestamp for the job.  The
-    timestamp is to be used to determine expiration in
-    `is_job_expired()`.
+        If the database has not marked the job as finished, return
+        `None`.  Otherwise, return a timestamp for the job.  The
+        timestamp is to be used to determine expiration in
+        `is_job_expired()`.
 
-    @return Return `None` if the job is still running; otherwise
-            return a string with a timestamp in the appropriate
-            format.
-    """
+        @return Return `None` if the job is still running; otherwise
+                return a string with a timestamp in the appropriate
+                format.
+        """
         raise NotImplementedError("_JobDirectory.get_timestamp_if_finished")
 
     def process_gs_instructions(self):
         """Process any gs_offloader instructions for this special task.
 
-    @returns True/False if there is anything left to offload.
-    """
+        @returns True/False if there is anything left to offload.
+        """
         # Default support is to still offload the directory.
         return True
 
@@ -165,8 +164,8 @@
     def process_gs_instructions(self):
         """Process any gs_offloader instructions for this job.
 
-    @returns True/False if there is anything left to offload.
-    """
+        @returns True/False if there is anything left to offload.
+        """
         # Go through the gs_offloader instructions file for each test in this job.
         for path in glob.glob(
                 os.path.join(self.dirname, '*',
@@ -186,9 +185,9 @@
     def get_timestamp_if_finished(self):
         """Get the timestamp to use for finished jobs.
 
-    @returns the latest hqe finished_on time. If the finished_on times are null
-             returns the job's created_on time.
-    """
+        @returns the latest hqe finished_on time. If the finished_on times are null
+                 returns the job's created_on time.
+        """
         entry = _cached_afe().get_jobs(id=self._id, finished=True)
         if not entry:
             return None
@@ -276,9 +275,9 @@
     def get_timestamp_if_finished(self):
         """Get the timestamp to use for finished jobs.
 
-    @returns the latest hqe finished_on time. If the finished_on times are null
-             returns the job's created_on time.
-    """
+        @returns the latest hqe finished_on time. If the finished_on times are null
+                 returns the job's created_on time.
+        """
         marker_path = os.path.join(self.dirname, _OFFLOAD_MARKER)
         try:
             with open(marker_path) as f:
@@ -300,4 +299,4 @@
     global _AFE
     if _AFE is None:
         _AFE = frontend_wrappers.RetryingAFE()
-    return _AFE
+    return _AFE
\ No newline at end of file
diff --git a/site_utils/job_directories_unittest.py b/site_utils/job_directories_unittest.py
index fc10b06..db20534 100644
--- a/site_utils/job_directories_unittest.py
+++ b/site_utils/job_directories_unittest.py
@@ -6,13 +6,14 @@
 
 import contextlib
 import datetime
-import mox
 import os
 import shutil
 import tempfile
 import unittest
+from unittest.mock import patch
 
 import common
+
 from autotest_lib.site_utils import job_directories
 from autotest_lib.client.common_lib import time_utils
 
@@ -81,8 +82,7 @@
         )
 
 
-
-class JobDirectorySubclassTests(mox.MoxTestBase):
+class JobDirectorySubclassTests(unittest.TestCase):
     """Test specific to RegularJobDirectory and SpecialJobDirectory.
 
     This provides coverage for the implementation in both
@@ -92,8 +92,9 @@
 
     def setUp(self):
         super(JobDirectorySubclassTests, self).setUp()
-        self.mox.StubOutWithMock(job_directories, '_AFE')
-
+        patcher = patch.object(job_directories, '_AFE')
+        self._mock = patcher.start()
+        self.addCleanup(patcher.stop)
 
     def test_regular_job_fields(self):
         """Test the constructor for `RegularJobDirectory`.
@@ -107,7 +108,6 @@
         self.assertEqual(job.dirname, resultsdir)
         self.assertEqual(job._id, '118')
 
-
     def test_special_job_fields(self):
         """Test the constructor for `SpecialJobDirectory`.
 
@@ -121,7 +121,6 @@
         self.assertEqual(job.dirname, resultsdir)
         self.assertEqual(job._id, '118')
 
-
     def _check_finished_job(self, jobtime, hqetimes, expected):
         """Mock and test behavior of a finished job.
 
@@ -138,17 +137,16 @@
 
         """
         job = job_directories.RegularJobDirectory('118-fubar')
-        job_directories._AFE.get_jobs(
-                id=job._id, finished=True).AndReturn(
-                        [_MockJob(jobtime)])
-        job_directories._AFE.get_host_queue_entries(
-                finished_on__isnull=False,
-                job_id=job._id).AndReturn(
-                        [_MockHostQueueEntry(t) for t in hqetimes])
-        self.mox.ReplayAll()
-        self.assertEqual(expected, job.get_timestamp_if_finished())
-        self.mox.VerifyAll()
+        self._mock.get_jobs.return_value = [_MockJob(jobtime)]
 
+        self._mock.get_host_queue_entries.return_value = ([
+                _MockHostQueueEntry(t) for t in hqetimes
+        ])
+
+        self.assertEqual(expected, job.get_timestamp_if_finished())
+        self._mock.get_jobs.assert_called_with(id=job._id, finished=True)
+        self._mock.get_host_queue_entries.assert_called_with(
+                finished_on__isnull=False, job_id=job._id)
 
     def test_finished_regular_job(self):
         """Test getting the timestamp for a finished regular job.
@@ -164,7 +162,6 @@
                                  [hqe_timestamp],
                                  hqe_timestamp)
 
-
     def test_finished_regular_job_multiple_hqes(self):
         """Test getting the timestamp for a regular job with multiple hqes.
 
@@ -185,13 +182,11 @@
         self._check_finished_job(created_timestamp,
                                  hqe_list,
                                  newer_hqe_timestamp)
-        self.mox.ResetAll()
         hqe_list.reverse()
         self._check_finished_job(created_timestamp,
                                  hqe_list,
                                  newer_hqe_timestamp)
 
-
     def test_finished_regular_job_null_finished_times(self):
         """Test getting the timestamp for an aborted regular job.
 
@@ -204,7 +199,6 @@
         timestamp = make_timestamp(0, True)
         self._check_finished_job(timestamp, [], timestamp)
 
-
     def test_unfinished_regular_job(self):
         """Test getting the timestamp for an unfinished regular job.
 
@@ -214,12 +208,9 @@
 
         """
         job = job_directories.RegularJobDirectory('118-fubar')
-        job_directories._AFE.get_jobs(
-                id=job._id, finished=True).AndReturn([])
-        self.mox.ReplayAll()
+        self._mock.get_jobs.return_value = []
         self.assertIsNone(job.get_timestamp_if_finished())
-        self.mox.VerifyAll()
-
+        self._mock.get_jobs.assert_called_with(id=job._id, finished=True)
 
     def test_finished_special_job(self):
         """Test getting the timestamp for a finished special job.
@@ -232,14 +223,13 @@
         job = job_directories.SpecialJobDirectory(
                 'hosts/host1/118-reset')
         timestamp = make_timestamp(0, True)
-        job_directories._AFE.get_special_tasks(
-                id=job._id, is_complete=True).AndReturn(
-                    [_MockSpecialTask(timestamp)])
-        self.mox.ReplayAll()
+        self._mock.get_special_tasks.return_value = ([
+                _MockSpecialTask(timestamp)
+        ])
         self.assertEqual(timestamp,
                          job.get_timestamp_if_finished())
-        self.mox.VerifyAll()
-
+        self._mock.get_special_tasks.assert_called_with(id=job._id,
+                                                        is_complete=True)
 
     def test_unfinished_special_job(self):
         """Test getting the timestamp for an unfinished special job.
@@ -251,11 +241,10 @@
         """
         job = job_directories.SpecialJobDirectory(
                 'hosts/host1/118-reset')
-        job_directories._AFE.get_special_tasks(
-                id=job._id, is_complete=True).AndReturn([])
-        self.mox.ReplayAll()
+        self._mock.get_special_tasks.return_value = []
         self.assertIsNone(job.get_timestamp_if_finished())
-        self.mox.VerifyAll()
+        self._mock.get_special_tasks.assert_called_with(id=job._id,
+                                                        is_complete=True)
 
 
 class JobExpirationTests(unittest.TestCase):
@@ -268,7 +257,6 @@
             job_directories.is_job_expired(
                 _TEST_EXPIRATION_AGE, timestamp))
 
-
     def test_alive(self):
         """Test detection of a job that's not expired."""
         # N.B.  This test may fail if its run time exceeds more than
diff --git a/site_utils/job_history.py b/site_utils/job_history.py
index 69db85e..fad7764 100755
--- a/site_utils/job_history.py
+++ b/site_utils/job_history.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -7,6 +7,10 @@
 # including special tasks executed before and after the job, and each steps
 # start/end time.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import argparse
 import datetime as datetime_base
 
@@ -411,7 +415,7 @@
 
     job_info = get_job_info(options.job_id)
 
-    print job_info
+    print(job_info)
 
 
 if __name__ == '__main__':
diff --git a/site_utils/kill_slow_queries.py b/site_utils/kill_slow_queries.py
deleted file mode 100755
index 29872e9..0000000
--- a/site_utils/kill_slow_queries.py
+++ /dev/null
@@ -1,164 +0,0 @@
-#!/usr/bin/python2
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Kill slow queries in local autotest database."""
-
-import logging
-import optparse
-import sys
-import time
-
-import common
-from autotest_lib.client.common_lib import global_config
-from autotest_lib.site_utils import gmail_lib
-from autotest_lib.client.common_lib import utils
-from autotest_lib.site_utils.stats import mysql_stats
-
-try:
-    from chromite.lib import metrics
-    from chromite.lib import ts_mon_config
-except ImportError:
-    metrics = utils.metrics_mock
-    ts_mon_config = utils.metrics_mock
-
-AT_DIR='/usr/local/autotest'
-DEFAULT_USER = global_config.global_config.get_config_value(
-        'CROS', 'db_backup_user', type=str, default='')
-DEFAULT_PASSWD = global_config.global_config.get_config_value(
-        'CROS', 'db_backup_password', type=str, default='')
-DEFAULT_MAIL = global_config.global_config.get_config_value(
-        'SCHEDULER', 'notify_email', type=str, default='')
-
-
-def parse_options():
-    """Parse the command line arguments."""
-    usage = 'usage: %prog [options]'
-    parser = optparse.OptionParser(usage=usage)
-    parser.add_option('-u', '--user', default=DEFAULT_USER,
-                      help='User to login to the Autotest DB. Default is the '
-                           'one defined in config file.')
-    parser.add_option('-p', '--password', default=DEFAULT_PASSWD,
-                      help='Password to login to the Autotest DB. Default is '
-                           'the one defined in config file.')
-    parser.add_option('-t', '--timeout', type=int, default=300,
-                      help='Timeout boundry of the slow database query. '
-                           'Default is 300s')
-    parser.add_option('-m', '--mail', default=DEFAULT_MAIL,
-                      help='Mail address to send the summary to. Default is '
-                           'ChromeOS infra Deputy')
-    options, args = parser.parse_args()
-    return parser, options, args
-
-
-def verify_options_and_args(options, args):
-    """Verify the validity of options and args.
-
-    @param options: The parsed options to verify.
-    @param args: The parsed args to verify.
-
-    @returns: True if verification passes, False otherwise.
-    """
-    if args:
-        logging.error('Unknown arguments: ' + str(args))
-        return False
-
-    if not (options.user and options.password):
-        logging.error('Failed to get the default user of password for Autotest'
-                      ' DB. Please specify them through the command line.')
-        return False
-    return True
-
-
-def format_the_output(slow_queries):
-    """Convert a list of slow queries into a readable string format.
-
-    e.g. [(a, b, c...)]  -->
-         "Id: a
-          Host: b
-          User: c
-          ...
-         "
-    @param slow_queries: A list of tuples, one tuple contains all the info about
-                         one single slow query.
-
-    @returns: one clean string representation of all the slow queries.
-    """
-    query_str_list = [('Id: %s\nUser: %s\nHost: %s\ndb: %s\nCommand: %s\n'
-                       'Time: %s\nState: %s\nInfo: %s\n') %
-                      q for q in slow_queries]
-    return '\n'.join(query_str_list)
-
-
-def kill_slow_queries(user, password, timeout):
-    """Kill the slow database queries running beyond the timeout limit.
-
-    @param user: User to login to the Autotest DB.
-    @param password: Password to login to the Autotest DB.
-    @param timeout: Timeout limit to kill the slow queries.
-
-    @returns: a tuple, first element is the string representation of all the
-              killed slow queries, second element is the total number of them.
-    """
-    cursor = mysql_stats.RetryingConnection('localhost', user, password)
-    cursor.Connect()
-
-    # Get the processlist.
-    cursor.Execute('SHOW FULL PROCESSLIST')
-    processlist = cursor.Fetchall()
-    # Filter out the slow queries and kill them.
-    slow_queries = [p for p in processlist if p[4]=='Query' and p[5]>=timeout]
-    queries_str = ''
-    num_killed_queries = 0
-    if slow_queries:
-        queries_str = format_the_output(slow_queries)
-        queries_ids = [q[0] for q in slow_queries]
-        logging.info('Start killing following slow queries\n%s', queries_str)
-        for query_id in queries_ids:
-            logging.info('Killing %s...', query_id)
-            cursor.Execute('KILL %d' % query_id)
-            logging.info('Done!')
-            num_killed_queries += 1
-    else:
-        logging.info('No slow queries over %ds!', timeout)
-    return (queries_str, num_killed_queries)
-
-
-def main():
-    """Main entry."""
-    # Clear all loggers to make sure the following basicConfig take effect.
-    logging.shutdown()
-    reload(logging)
-    logging.basicConfig(format='%(asctime)s %(message)s',
-                        datefmt='%m/%d/%Y %H:%M:%S', level=logging.DEBUG)
-
-    with ts_mon_config.SetupTsMonGlobalState(service_name='kill_slow_queries',
-                                             indirect=True):
-        count = 0
-        parser, options, args = parse_options()
-        if not verify_options_and_args(options, args):
-            parser.print_help()
-            return 1
-        try:
-            while True:
-                result_log_strs, count = kill_slow_queries(
-                    options.user, options.password, options.timeout)
-                if result_log_strs:
-                    gmail_lib.send_email(
-                        options.mail,
-                        'Successfully killed slow autotest db queries',
-                        'Below are killed queries:\n%s' % result_log_strs)
-                    m = 'chromeos/autotest/afe_db/killed_slow_queries'
-                    metrics.Counter(m).increment_by(count)
-                time.sleep(options.timeout)
-        except Exception as e:
-            m = 'chromeos/autotest/afe_db/failed_to_kill_query'
-            metrics.Counter(m).increment()
-            logging.error('Failed to kill slow db queries.\n%s', e)
-            raise
-
-
-if __name__ == '__main__':
-    sys.exit(main())
-
diff --git a/site_utils/lab_inventory.py b/site_utils/lab_inventory.py
deleted file mode 100755
index 31a29a5..0000000
--- a/site_utils/lab_inventory.py
+++ /dev/null
@@ -1,1393 +0,0 @@
-#!/usr/bin/env python2
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Create e-mail reports of the Lab's DUT inventory.
-
-Gathers a list of all DUTs of interest in the Lab, segregated by
-model and pool, and determines whether each DUT is working or
-broken.  Then, send one or more e-mail reports summarizing the
-status to e-mail addresses provided on the command line.
-
-usage:  lab_inventory.py [ options ] [ model ... ]
-
-Options:
---duration / -d <hours>
-    How far back in time to search job history to determine DUT
-    status.
-
---model-notify <address>[,<address>]
-    Send the "model status" e-mail to all the specified e-mail
-    addresses.
-
---pool-notify <address>[,<address>]
-    Send the "pool status" e-mail to all the specified e-mail
-    addresses.
-
---recommend <number>
-    When generating the "model status" e-mail, include a list of
-    <number> specific DUTs to be recommended for repair.
-
---report-untestable
-    Scan the inventory for DUTs that can't test because they're stuck in
-    repair loops, or because the scheduler can't give them work.
-
---logdir <directory>
-    Log progress and actions in a file under this directory.  Text
-    of any e-mail sent will also be logged in a timestamped file in
-    this directory.
-
---debug
-    Suppress all logging, metrics reporting, and sending e-mail.
-    Instead, write the output that would be generated onto stdout.
-
-<model> arguments:
-    With no arguments, gathers the status for all models in the lab.
-    With one or more named models on the command line, restricts
-    reporting to just those models.
-"""
-
-
-import argparse
-import collections
-import logging
-import logging.handlers
-import os
-import re
-import sys
-import time
-
-import common
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import time_utils
-from autotest_lib.frontend.afe.json_rpc import proxy
-from autotest_lib.server import constants
-from autotest_lib.server import site_utils
-from autotest_lib.server.cros.dynamic_suite import frontend_wrappers
-from autotest_lib.server.hosts import servo_host
-from autotest_lib.server.lib import status_history
-from autotest_lib.site_utils import gmail_lib
-from chromite.lib import metrics
-
-
-CRITICAL_POOLS = constants.Pools.CRITICAL_POOLS
-SPARE_POOL = constants.Pools.SPARE_POOL
-MANAGED_POOLS = constants.Pools.MANAGED_POOLS
-
-# _EXCLUDED_LABELS - A set of labels that disqualify a DUT from
-#     monitoring by this script.  Currently, we're excluding these:
-#   + 'adb' - We're not ready to monitor Android or Brillo hosts.
-#   + 'board:guado_moblab' - These are maintained by a separate
-#     process that doesn't use this script.
-#   + 'board:veyron_rialto' due to crbug.com/854404
-
-_EXCLUDED_LABELS = {'adb', 'board:guado_moblab',
-                    'board:veyron_rialto'}
-
-# _DEFAULT_DURATION:
-#     Default value used for the --duration command line option.
-#     Specifies how far back in time to search in order to determine
-#     DUT status.
-
-_DEFAULT_DURATION = 24
-
-# _LOGDIR:
-#     Relative path used in the calculation of the default setting for
-#     the --logdir option.  The full path is relative to the root of the
-#     autotest directory, as determined from sys.argv[0].
-# _LOGFILE:
-#     Basename of a file to which general log information will be
-#     written.
-# _LOG_FORMAT:
-#     Format string for log messages.
-
-_LOGDIR = os.path.join('logs', 'dut-data')
-_LOGFILE = 'lab-inventory.log'
-_LOG_FORMAT = '%(asctime)s | %(levelname)-10s | %(message)s'
-
-# Pattern describing location-based host names in the Chrome OS test
-# labs.  Each DUT hostname designates the DUT's location:
-#   * A lab (room) that's physically separated from other labs
-#     (i.e. there's a door).
-#   * A row (or aisle) of DUTs within the lab.
-#   * A vertical rack of shelves on the row.
-#   * A specific host on one shelf of the rack.
-
-_HOSTNAME_PATTERN = re.compile(
-        r'(chromeos\d+)-row(\d+)-rack(\d+)-host(\d+)')
-
-# _REPAIR_LOOP_THRESHOLD:
-#    The number of repeated Repair tasks that must be seen to declare
-#    that a DUT is stuck in a repair loop.
-
-_REPAIR_LOOP_THRESHOLD = 4
-
-
-_METRICS_PREFIX = 'chromeos/autotest/inventory'
-_UNTESTABLE_PRESENCE_METRIC = metrics.BooleanMetric(
-    _METRICS_PREFIX + '/untestable',
-    'DUTs that cannot be scheduled for testing')
-
-_MISSING_DUT_METRIC = metrics.Counter(
-    _METRICS_PREFIX + '/missing', 'DUTs which cannot be found by lookup queries'
-    ' because they are invalid or deleted')
-
-# _Diagnosis - namedtuple corresponding to the return value from
-# `HostHistory.last_diagnosis()`
-_Diagnosis = collections.namedtuple('_Diagnosis', ['status', 'task'])
-
-def _get_diagnosis(history):
-    dut_present = True
-    try:
-        diagnosis = _Diagnosis(*history.last_diagnosis())
-        if (diagnosis.status == status_history.BROKEN
-                and diagnosis.task.end_time < history.start_time):
-            return _Diagnosis(status_history.UNUSED, diagnosis.task)
-        else:
-            return diagnosis
-    except proxy.JSONRPCException as e:
-        logging.warn(e)
-        dut_present = False
-    finally:
-        _MISSING_DUT_METRIC.increment(
-            fields={'host': history.hostname, 'presence': dut_present})
-    return _Diagnosis(None, None)
-
-
-def _host_is_working(history):
-    return _get_diagnosis(history).status == status_history.WORKING
-
-
-def _host_is_broken(history):
-    return _get_diagnosis(history).status == status_history.BROKEN
-
-
-def _host_is_idle(history):
-    idle_statuses = {status_history.UNUSED, status_history.UNKNOWN}
-    return _get_diagnosis(history).status in idle_statuses
-
-
-class _HostSetInventory(object):
-    """Maintains a set of related `HostJobHistory` objects.
-
-    Current usage of this class is that all DUTs are part of a single
-    scheduling pool of DUTs for a single model; however, this class make
-    no assumptions about the actual relationship among the DUTs.
-
-    The collection is segregated into disjoint categories of "working",
-    "broken", and "idle" DUTs.  Accessor methods allow finding both the
-    list of DUTs in each category, as well as counts of each category.
-
-    Performance note:  Certain methods in this class are potentially
-    expensive:
-      * `get_working()`
-      * `get_working_list()`
-      * `get_broken()`
-      * `get_broken_list()`
-      * `get_idle()`
-      * `get_idle_list()`
-    The first time any one of these methods is called, it causes
-    multiple RPC calls with a relatively expensive set of database
-    queries.  However, the results of the queries are cached in the
-    individual `HostJobHistory` objects, so only the first call
-    actually pays the full cost.
-
-    Additionally, `get_working_list()`, `get_broken_list()` and
-    `get_idle_list()` cache their return values to avoid recalculating
-    lists at every call; this caching is separate from the caching of
-    RPC results described above.
-
-    This class is deliberately constructed to delay the RPC cost until
-    the accessor methods are called (rather than to query in
-    `record_host()`) so that it's possible to construct a complete
-    `_LabInventory` without making the expensive queries at creation
-    time.  `_populate_model_counts()`, below, assumes this behavior.
-    """
-
-    def __init__(self):
-        self._histories = []
-        self._working_list = None
-        self._broken_list = None
-        self._idle_list = None
-
-    def record_host(self, host_history):
-        """Add one `HostJobHistory` object to the collection.
-
-        @param host_history The `HostJobHistory` object to be
-                            remembered.
-        """
-        self._working_list = None
-        self._broken_list = None
-        self._idle_list = None
-        self._histories.append(host_history)
-
-    def get_working_list(self):
-        """Return a list of all working DUTs in the pool.
-
-        Filter `self._histories` for histories where the DUT is
-        diagnosed as working.
-
-        Cache the result so that we only cacluate it once.
-
-        @return A list of HostJobHistory objects.
-        """
-        if self._working_list is None:
-            self._working_list = [h for h in self._histories
-                                  if _host_is_working(h)]
-        return self._working_list
-
-    def get_working(self):
-        """Return the number of working DUTs in the pool."""
-        return len(self.get_working_list())
-
-    def get_broken_list(self):
-        """Return a list of all broken DUTs in the pool.
-
-        Filter `self._histories` for histories where the DUT is
-        diagnosed as broken.
-
-        Cache the result so that we only cacluate it once.
-
-        @return A list of HostJobHistory objects.
-        """
-        if self._broken_list is None:
-            self._broken_list = [h for h in self._histories
-                                 if _host_is_broken(h)]
-        return self._broken_list
-
-    def get_broken(self):
-        """Return the number of broken DUTs in the pool."""
-        return len(self.get_broken_list())
-
-    def get_idle_list(self):
-        """Return a list of all idle DUTs in the pool.
-
-        Filter `self._histories` for histories where the DUT is
-        diagnosed as idle.
-
-        Cache the result so that we only cacluate it once.
-
-        @return A list of HostJobHistory objects.
-        """
-        if self._idle_list is None:
-            self._idle_list = [h for h in self._histories
-                               if _host_is_idle(h)]
-        return self._idle_list
-
-    def get_idle(self):
-        """Return the number of idle DUTs in the pool."""
-        return len(self.get_idle_list())
-
-    def get_total(self):
-        """Return the total number of DUTs in the pool."""
-        return len(self._histories)
-
-    def get_all_histories(self):
-        return self._histories
-
-
-class _PoolSetInventory(object):
-    """Maintains a set of `HostJobHistory`s for a set of pools.
-
-    The collection is segregated into disjoint categories of "working",
-    "broken", and "idle" DUTs.  Accessor methods allow finding both the
-    list of DUTs in each category, as well as counts of each category.
-    Accessor queries can be for an individual pool, or against all
-    pools.
-
-    Performance note:  This class relies on `_HostSetInventory`.  Public
-    methods in this class generally rely on methods of the same name in
-    the underlying class, and so will have the same underlying
-    performance characteristics.
-    """
-
-    def __init__(self, pools):
-        self._histories_by_pool = {
-            pool: _HostSetInventory() for pool in pools
-        }
-
-    def record_host(self, host_history):
-        """Add one `HostJobHistory` object to the collection.
-
-        @param host_history The `HostJobHistory` object to be
-                            remembered.
-        """
-        pool = host_history.host_pool
-        self._histories_by_pool[pool].record_host(host_history)
-
-    def _count_pool(self, get_pool_count, pool=None):
-        """Internal helper to count hosts in a given pool.
-
-        The `get_pool_count` parameter is a function to calculate
-        the exact count of interest for the pool.
-
-        @param get_pool_count  Function to return a count from a
-                               _PoolCount object.
-        @param pool            The pool to be counted.  If `None`,
-                               return the total across all pools.
-        """
-        if pool is None:
-            return sum([get_pool_count(cached_history) for cached_history in
-                        self._histories_by_pool.values()])
-        else:
-            return get_pool_count(self._histories_by_pool[pool])
-
-    def get_working_list(self):
-        """Return a list of all working DUTs (across all pools).
-
-        Go through all HostJobHistory objects across all pools,
-        selecting all DUTs identified as working.
-
-        @return A list of HostJobHistory objects.
-        """
-        l = []
-        for p in self._histories_by_pool.values():
-            l.extend(p.get_working_list())
-        return l
-
-    def get_working(self, pool=None):
-        """Return the number of working DUTs in a pool.
-
-        @param pool  The pool to be counted.  If `None`, return the
-                     total across all pools.
-
-        @return The total number of working DUTs in the selected
-                pool(s).
-        """
-        return self._count_pool(_HostSetInventory.get_working, pool)
-
-    def get_broken_list(self):
-        """Return a list of all broken DUTs (across all pools).
-
-        Go through all HostJobHistory objects across all pools,
-        selecting all DUTs identified as broken.
-
-        @return A list of HostJobHistory objects.
-        """
-        l = []
-        for p in self._histories_by_pool.values():
-            l.extend(p.get_broken_list())
-        return l
-
-    def get_broken(self, pool=None):
-        """Return the number of broken DUTs in a pool.
-
-        @param pool  The pool to be counted.  If `None`, return the
-                     total across all pools.
-
-        @return The total number of broken DUTs in the selected pool(s).
-        """
-        return self._count_pool(_HostSetInventory.get_broken, pool)
-
-    def get_idle_list(self, pool=None):
-        """Return a list of all idle DUTs in the given pool.
-
-        Go through all HostJobHistory objects across all pools,
-        selecting all DUTs identified as idle.
-
-        @param pool: The pool to be counted. If `None`, return the total list
-                     across all pools.
-
-        @return A list of HostJobHistory objects.
-        """
-        if pool is None:
-            l = []
-            for p in self._histories_by_pool.itervalues():
-                l.extend(p.get_idle_list())
-            return l
-        else:
-            return self._histories_by_pool[pool].get_idle_list()
-
-    def get_idle(self, pool=None):
-        """Return the number of idle DUTs in a pool.
-
-        @param pool: The pool to be counted. If `None`, return the total
-                     across all pools.
-
-        @return The total number of idle DUTs in the selected pool(s).
-        """
-        return self._count_pool(_HostSetInventory.get_idle, pool)
-
-    def get_spares_buffer(self, spare_pool=SPARE_POOL):
-        """Return the the nominal number of working spares.
-
-        Calculates and returns how many working spares there would
-        be in the spares pool if all broken DUTs were in the spares
-        pool.  This number may be negative, indicating a shortfall
-        in the critical pools.
-
-        @return The total number DUTs in the spares pool, less the total
-                number of broken DUTs in all pools.
-        """
-        return self.get_total(spare_pool) - self.get_broken()
-
-    def get_total(self, pool=None):
-        """Return the total number of DUTs in a pool.
-
-        @param pool  The pool to be counted.  If `None`, return the
-                     total across all pools.
-
-        @return The total number of DUTs in the selected pool(s).
-        """
-        return self._count_pool(_HostSetInventory.get_total, pool)
-
-    def get_all_histories(self, pool=None):
-        if pool is None:
-            for p in self._histories_by_pool.itervalues():
-                for h in p.get_all_histories():
-                    yield h
-        else:
-            for h in self._histories_by_pool[pool].get_all_histories():
-                yield h
-
-
-def _is_migrated_to_skylab(afehost):
-    """Return True if the provided frontend.Host has been migrated to skylab."""
-    return afehost.hostname.endswith('-migrated-do-not-use')
-
-
-def _eligible_host(afehost):
-    """Return whether this host is eligible for monitoring.
-
-    @param afehost  The host to be tested for eligibility.
-    """
-    if _is_migrated_to_skylab(afehost):
-        return False
-
-    # DUTs without an existing, unique 'model' or 'pool' label aren't meant to
-    # exist in the managed inventory; their presence generally indicates an
-    # error in the database. The _LabInventory constructor requires hosts to
-    # conform to the label restrictions. Failing an inventory run for a single
-    # bad entry is wrong, so we ignore these hosts.
-    models = [l for l in afehost.labels
-                 if l.startswith(constants.Labels.MODEL_PREFIX)]
-    pools = [l for l in afehost.labels
-                 if l.startswith(constants.Labels.POOL_PREFIX)]
-    excluded = _EXCLUDED_LABELS.intersection(afehost.labels)
-    return len(models) == 1 and len(pools) == 1 and not excluded
-
-
-class _LabInventory(collections.Mapping):
-    """Collection of `HostJobHistory` objects for the Lab's inventory.
-
-    This is a dict-like collection indexed by model.  Indexing returns
-    the _PoolSetInventory object associated with the model.
-    """
-
-    @classmethod
-    def create_inventory(cls, afe, start_time, end_time, modellist=[]):
-        """Return a Lab inventory with specified parameters.
-
-        By default, gathers inventory from `HostJobHistory` objects for
-        all DUTs in the `MANAGED_POOLS` list.  If `modellist` is
-        supplied, the inventory will be restricted to only the given
-        models.
-
-        @param afe          AFE object for constructing the
-                            `HostJobHistory` objects.
-        @param start_time   Start time for the `HostJobHistory` objects.
-        @param end_time     End time for the `HostJobHistory` objects.
-        @param modellist    List of models to include.  If empty,
-                            include all available models.
-        @return A `_LabInventory` object for the specified models.
-        """
-        target_pools = MANAGED_POOLS
-        label_list = [constants.Labels.POOL_PREFIX + l for l in target_pools]
-        afehosts = afe.get_hosts(labels__name__in=label_list)
-        if modellist:
-            # We're deliberately not checking host eligibility in this
-            # code path.  This is a debug path, not used in production;
-            # it may be useful to include ineligible hosts here.
-            modelhosts = []
-            for model in modellist:
-                model_label = constants.Labels.MODEL_PREFIX + model
-                host_list = [h for h in afehosts
-                                  if model_label in h.labels]
-                modelhosts.extend(host_list)
-            afehosts = modelhosts
-        else:
-            afehosts = [h for h in afehosts if _eligible_host(h)]
-        create = lambda host: (
-                status_history.HostJobHistory(afe, host,
-                                              start_time, end_time))
-        return cls([create(host) for host in afehosts], target_pools)
-
-    def __init__(self, histories, pools):
-        models = {h.host_model for h in histories}
-        self._modeldata = {model: _PoolSetInventory(pools) for model in models}
-        self._dut_count = len(histories)
-        for h in histories:
-            self[h.host_model].record_host(h)
-        self._boards = {h.host_board for h in histories}
-
-    def __getitem__(self, key):
-        return self._modeldata.__getitem__(key)
-
-    def __len__(self):
-        return self._modeldata.__len__()
-
-    def __iter__(self):
-        return self._modeldata.__iter__()
-
-    def get_num_duts(self):
-        """Return the total number of DUTs in the inventory."""
-        return self._dut_count
-
-    def get_num_models(self):
-        """Return the total number of models in the inventory."""
-        return len(self)
-
-    def get_pool_models(self, pool):
-        """Return all models in `pool`.
-
-        @param pool The pool to be inventoried for models.
-        """
-        return {m for m, h in self.iteritems() if h.get_total(pool)}
-
-    def get_boards(self):
-        return self._boards
-
-
-def _reportable_models(inventory, spare_pool=SPARE_POOL):
-    """Iterate over all models subject to reporting.
-
-    Yields the contents of `inventory.iteritems()` filtered to include
-    only reportable models.  A model is reportable if it has DUTs in
-    both `spare_pool` and at least one other pool.
-
-    @param spare_pool  The spare pool to be tested for reporting.
-    """
-    for model, poolset in inventory.iteritems():
-        spares = poolset.get_total(spare_pool)
-        total = poolset.get_total()
-        if spares != 0 and spares != total:
-            yield model, poolset
-
-
-def _all_dut_histories(inventory):
-    for poolset in inventory.itervalues():
-        for h in poolset.get_all_histories():
-            yield h
-
-
-def _sort_by_location(inventory_list):
-    """Return a list of DUTs, organized by location.
-
-    Take the given list of `HostJobHistory` objects, separate it
-    into a list per lab, and sort each lab's list by location.  The
-    order of sorting within a lab is
-      * By row number within the lab,
-      * then by rack number within the row,
-      * then by host shelf number within the rack.
-
-    Return a list of the sorted lists.
-
-    Implementation note: host locations are sorted by converting
-    each location into a base 100 number.  If row, rack or
-    host numbers exceed the range [0..99], then sorting will
-    break down.
-
-    @return A list of sorted lists of DUTs.
-    """
-    BASE = 100
-    lab_lists = {}
-    for history in inventory_list:
-        location = _HOSTNAME_PATTERN.match(history.host.hostname)
-        if location:
-            lab = location.group(1)
-            key = 0
-            for idx in location.group(2, 3, 4):
-                key = BASE * key + int(idx)
-            lab_lists.setdefault(lab, []).append((key, history))
-    return_list = []
-    for dut_list in lab_lists.values():
-        dut_list.sort(key=lambda t: t[0])
-        return_list.append([t[1] for t in dut_list])
-    return return_list
-
-
-def _score_repair_set(buffer_counts, repair_list):
-    """Return a numeric score rating a set of DUTs to be repaired.
-
-    `buffer_counts` is a dictionary mapping model names to the size of
-    the model's spares buffer.
-
-    `repair_list` is a list of `HostJobHistory` objects for the DUTs to
-    be repaired.
-
-    This function calculates the new set of buffer counts that would
-    result from the proposed repairs, and scores the new set using two
-    numbers:
-      * Worst case buffer count for any model (higher is better).  This
-        is the more significant number for comparison.
-      * Number of models at the worst case (lower is better).  This is
-        the less significant number.
-
-    Implementation note:  The score could fail to reflect the intended
-    criteria if there are more than 1000 models in the inventory.
-
-    @param spare_counts   A dictionary mapping models to buffer counts.
-    @param repair_list    A list of `HostJobHistory` objects for the
-                          DUTs to be repaired.
-    @return A numeric score.
-    """
-    # Go through `buffer_counts`, and create a list of new counts
-    # that records the buffer count for each model after repair.
-    # The new list of counts discards the model names, as they don't
-    # contribute to the final score.
-    _NMODELS = 1000
-    pools = {h.host_pool for h in repair_list}
-    repair_inventory = _LabInventory(repair_list, pools)
-    new_counts = []
-    for m, c in buffer_counts.iteritems():
-        if m in repair_inventory:
-            newcount = repair_inventory[m].get_total()
-        else:
-            newcount = 0
-        new_counts.append(c + newcount)
-    # Go through the new list of counts.  Find the worst available
-    # spares count, and count how many times that worst case occurs.
-    worst_count = new_counts[0]
-    num_worst = 1
-    for c in new_counts[1:]:
-        if c == worst_count:
-            num_worst += 1
-        elif c < worst_count:
-            worst_count = c
-            num_worst = 1
-    # Return the calculated score
-    return _NMODELS * worst_count - num_worst
-
-
-def _generate_repair_recommendation(inventory, num_recommend):
-    """Return a summary of selected DUTs needing repair.
-
-    Returns a message recommending a list of broken DUTs to be repaired.
-    The list of DUTs is selected based on these criteria:
-      * No more than `num_recommend` DUTs will be listed.
-      * All DUTs must be in the same lab.
-      * DUTs should be selected for some degree of physical proximity.
-      * DUTs for models with a low spares buffer are more important than
-        DUTs with larger buffers.
-
-    The algorithm used will guarantee that at least one DUT from a model
-    with the lowest spares buffer will be recommended.  If the worst
-    spares buffer number is shared by more than one model, the algorithm
-    will tend to prefer repair sets that include more of those models
-    over sets that cover fewer models.
-
-    @param inventory      `_LabInventory` object from which to generate
-                          recommendations.
-    @param num_recommend  Number of DUTs to recommend for repair.
-    """
-    logging.debug('Creating DUT repair recommendations')
-    model_buffer_counts = {}
-    broken_list = []
-    for model, counts in _reportable_models(inventory):
-        logging.debug('Listing failed DUTs for %s', model)
-        if counts.get_broken() != 0:
-            model_buffer_counts[model] = counts.get_spares_buffer()
-            broken_list.extend(counts.get_broken_list())
-    # N.B. The logic inside this loop may seem complicated, but
-    # simplification is hard:
-    #   * Calculating an initial recommendation outside of
-    #     the loop likely would make things more complicated,
-    #     not less.
-    #   * It's necessary to calculate an initial lab slice once per
-    #     lab _before_ the while loop, in case the number of broken
-    #     DUTs in a lab is less than `num_recommend`.
-    recommendation = None
-    best_score = None
-    for lab_duts in _sort_by_location(broken_list):
-        start = 0
-        end = num_recommend
-        lab_slice = lab_duts[start : end]
-        lab_score = _score_repair_set(model_buffer_counts, lab_slice)
-        while end < len(lab_duts):
-            start += 1
-            end += 1
-            new_slice = lab_duts[start : end]
-            new_score = _score_repair_set(model_buffer_counts, new_slice)
-            if new_score > lab_score:
-                lab_slice = new_slice
-                lab_score = new_score
-        if recommendation is None or lab_score > best_score:
-            recommendation = lab_slice
-            best_score = lab_score
-    # N.B. The trailing space in `line_fmt` is manadatory:  Without it,
-    # Gmail will parse the URL wrong.  Don't ask.  If you simply _must_
-    # know more, go try it yourself...
-    line_fmt = '%-30s %-16s %-6s\n    %s '
-    message = ['Repair recommendations:\n',
-               line_fmt % ( 'Hostname', 'Model', 'Servo?', 'Logs URL')]
-    if recommendation:
-        for h in recommendation:
-            servo_name = servo_host.make_servo_hostname(h.host.hostname)
-            servo_present = utils.host_is_in_lab_zone(servo_name)
-            event = _get_diagnosis(h).task
-            line = line_fmt % (
-                    h.host.hostname, h.host_model,
-                    'Yes' if servo_present else 'No', event.job_url)
-            message.append(line)
-    else:
-        message.append('(No DUTs to repair)')
-    return '\n'.join(message)
-
-
-def _generate_model_inventory_message(inventory):
-    """Generate the "model inventory" e-mail message.
-
-    The model inventory is a list by model summarizing the number of
-    working, broken, and idle DUTs, and the total shortfall or surplus
-    of working devices relative to the minimum critical pool
-    requirement.
-
-    The report omits models with no DUTs in the spare pool or with no
-    DUTs in a critical pool.
-
-    N.B. For sample output text formattted as users can expect to
-    see it in e-mail and log files, refer to the unit tests.
-
-    @param inventory  `_LabInventory` object to be reported on.
-    @return String with the inventory message to be sent.
-    """
-    logging.debug('Creating model inventory')
-    nworking = 0
-    nbroken = 0
-    nidle = 0
-    nbroken_models = 0
-    ntotal_models = 0
-    summaries = []
-    column_names = (
-        'Model', 'Avail', 'Bad', 'Idle', 'Good', 'Spare', 'Total')
-    for model, counts in _reportable_models(inventory):
-        logging.debug('Counting %2d DUTS for model %s',
-                      counts.get_total(), model)
-        # Summary elements laid out in the same order as the column
-        # headers:
-        #     Model Avail   Bad  Idle  Good  Spare Total
-        #      e[0]  e[1]  e[2]  e[3]  e[4]  e[5]  e[6]
-        element = (model,
-                   counts.get_spares_buffer(),
-                   counts.get_broken(),
-                   counts.get_idle(),
-                   counts.get_working(),
-                   counts.get_total(SPARE_POOL),
-                   counts.get_total())
-        if element[2]:
-            summaries.append(element)
-            nbroken_models += 1
-        ntotal_models += 1
-        nbroken += element[2]
-        nidle += element[3]
-        nworking += element[4]
-    ntotal = nworking + nbroken + nidle
-    summaries = sorted(summaries, key=lambda e: (e[1], -e[2]))
-    broken_percent = int(round(100.0 * nbroken / ntotal))
-    idle_percent = int(round(100.0 * nidle / ntotal))
-    working_percent = 100 - broken_percent - idle_percent
-    message = ['Summary of DUTs in inventory:',
-               '%10s %10s %10s %6s' % ('Bad', 'Idle', 'Good', 'Total'),
-               '%5d %3d%% %5d %3d%% %5d %3d%% %6d' % (
-                   nbroken, broken_percent,
-                   nidle, idle_percent,
-                   nworking, working_percent,
-                   ntotal),
-               '',
-               'Models with failures: %d' % nbroken_models,
-               'Models in inventory:  %d' % ntotal_models,
-               '', '',
-               'Full model inventory:\n',
-               '%-22s %5s %5s %5s %5s %5s %5s' % column_names]
-    message.extend(
-            ['%-22s %5d %5d %5d %5d %5d %5d' % e for e in summaries])
-    return '\n'.join(message)
-
-
-_POOL_INVENTORY_HEADER = '''\
-Notice to Infrastructure deputies:  All models shown below are at
-less than full strength, please take action to resolve the issues.
-Once you're satisified that failures won't recur, failed DUTs can
-be replaced with spares by running `balance_pool`.  Detailed
-instructions can be found here:
-    http://go/cros-manage-duts
-'''
-
-
-def _generate_pool_inventory_message(inventory):
-    """Generate the "pool inventory" e-mail message.
-
-    The pool inventory is a list by pool and model summarizing the
-    number of working and broken DUTs in the pool.  Only models with
-    at least one broken DUT are included in the list.
-
-    N.B. For sample output text formattted as users can expect to see it
-    in e-mail and log files, refer to the unit tests.
-
-    @param inventory  `_LabInventory` object to be reported on.
-    @return String with the inventory message to be sent.
-    """
-    logging.debug('Creating pool inventory')
-    message = [_POOL_INVENTORY_HEADER]
-    newline = ''
-    for pool in CRITICAL_POOLS:
-        message.append(
-            '%sStatus for pool:%s, by model:' % (newline, pool))
-        message.append(
-            '%-20s   %5s %5s %5s %5s' % (
-                'Model', 'Bad', 'Idle', 'Good', 'Total'))
-        data_list = []
-        for model, counts in inventory.iteritems():
-            logging.debug('Counting %2d DUTs for %s, %s',
-                          counts.get_total(pool), model, pool)
-            broken = counts.get_broken(pool)
-            idle = counts.get_idle(pool)
-            # models at full strength are not reported
-            if not broken and not idle:
-                continue
-            working = counts.get_working(pool)
-            total = counts.get_total(pool)
-            data_list.append((model, broken, idle, working, total))
-        if data_list:
-            data_list = sorted(data_list, key=lambda d: -d[1])
-            message.extend(
-                ['%-20s   %5d %5d %5d %5d' % t for t in data_list])
-        else:
-            message.append('(All models at full strength)')
-        newline = '\n'
-    return '\n'.join(message)
-
-
-_IDLE_INVENTORY_HEADER = '''\
-Notice to Infrastructure deputies:  The hosts shown below haven't
-run any jobs for at least 24 hours. Please check each host; locked
-hosts should normally be unlocked; stuck jobs should normally be
-aborted.
-'''
-
-
-def _generate_idle_inventory_message(inventory):
-    """Generate the "idle inventory" e-mail message.
-
-    The idle inventory is a host list with corresponding pool and model,
-    where the hosts are identified as idle.
-
-    N.B. For sample output text format as users can expect to
-    see it in e-mail and log files, refer to the unit tests.
-
-    @param inventory  `_LabInventory` object to be reported on.
-    @return String with the inventory message to be sent.
-    """
-    logging.debug('Creating idle inventory')
-    message = [_IDLE_INVENTORY_HEADER]
-    message.append('Idle Host List:')
-    message.append('%-30s %-20s %s' % ('Hostname', 'Model', 'Pool'))
-    data_list = []
-    for pool in MANAGED_POOLS:
-        for model, counts in inventory.iteritems():
-            logging.debug('Counting %2d DUTs for %s, %s',
-                          counts.get_total(pool), model, pool)
-            data_list.extend([(dut.host.hostname, model, pool)
-                                  for dut in counts.get_idle_list(pool)])
-    if data_list:
-        message.extend(['%-30s %-20s %s' % t for t in data_list])
-    else:
-        message.append('(No idle DUTs)')
-    return '\n'.join(message)
-
-
-def _send_email(arguments, tag, subject, recipients, body):
-    """Send an inventory e-mail message.
-
-    The message is logged in the selected log directory using `tag` for
-    the file name.
-
-    If the --debug option was requested, the message is neither logged
-    nor sent, but merely printed on stdout.
-
-    @param arguments   Parsed command-line options.
-    @param tag         Tag identifying the inventory for logging
-                       purposes.
-    @param subject     E-mail Subject: header line.
-    @param recipients  E-mail addresses for the To: header line.
-    @param body        E-mail message body.
-    """
-    logging.debug('Generating email: "%s"', subject)
-    all_recipients = ', '.join(recipients)
-    report_body = '\n'.join([
-            'To: %s' % all_recipients,
-            'Subject: %s' % subject,
-            '', body, ''])
-    if arguments.debug:
-        print report_body
-    else:
-        filename = os.path.join(arguments.logdir, tag)
-        try:
-            report_file = open(filename, 'w')
-            report_file.write(report_body)
-            report_file.close()
-        except EnvironmentError as e:
-            logging.error('Failed to write %s:  %s', filename, e)
-        try:
-            gmail_lib.send_email(all_recipients, subject, body)
-        except Exception as e:
-            logging.error('Failed to send e-mail to %s:  %s',
-                          all_recipients, e)
-
-
-def _populate_model_counts(inventory):
-    """Gather model counts while providing interactive feedback.
-
-    Gathering the status of all individual DUTs in the lab can take
-    considerable time (~30 minutes at the time of this writing).
-    Normally, we pay that cost by querying as we go.  However, with
-    the `--debug` option, we expect a human being to be watching the
-    progress in real time.  So, we force the first (expensive) queries
-    to happen up front, and provide simple ASCII output on sys.stdout
-    to show a progress bar and results.
-
-    @param inventory  `_LabInventory` object from which to gather
-                      counts.
-    """
-    n = 0
-    total_broken = 0
-    for counts in inventory.itervalues():
-        n += 1
-        if n % 10 == 5:
-            c = '+'
-        elif n % 10 == 0:
-            c = '%d' % ((n / 10) % 10)
-        else:
-            c = '.'
-        sys.stdout.write(c)
-        sys.stdout.flush()
-        # This next call is where all the time goes - it forces all of a
-        # model's `HostJobHistory` objects to query the database and
-        # cache their results.
-        total_broken += counts.get_broken()
-    sys.stdout.write('\n')
-    sys.stdout.write('Found %d broken DUTs\n' % total_broken)
-
-
-def _perform_model_inventory(arguments, inventory, timestamp):
-    """Perform the model inventory report.
-
-    The model inventory report consists of the following:
-      * A list of DUTs that are recommended to be repaired.  This list
-        is optional, and only appears if the `--recommend` option is
-        present.
-      * A list of all models that have failed DUTs, with counts
-        of working, broken, and spare DUTs, among others.
-
-    @param arguments  Command-line arguments as returned by
-                      `ArgumentParser`
-    @param inventory  `_LabInventory` object to be reported on.
-    @param timestamp  A string used to identify this run's timestamp
-                      in logs and email output.
-    """
-    if arguments.recommend:
-        recommend_message = _generate_repair_recommendation(
-                inventory, arguments.recommend) + '\n\n\n'
-    else:
-        recommend_message = ''
-    model_message = _generate_model_inventory_message(inventory)
-    _send_email(arguments,
-                'models-%s.txt' % timestamp,
-                'DUT model inventory %s' % timestamp,
-                arguments.model_notify,
-                recommend_message + model_message)
-
-
-def _perform_pool_inventory(arguments, inventory, timestamp):
-    """Perform the pool inventory report.
-
-    The pool inventory report consists of the following:
-      * A list of all critical pools that have failed DUTs, with counts
-        of working, broken, and idle DUTs.
-      * A list of all idle DUTs by hostname including the model and
-        pool.
-
-    @param arguments  Command-line arguments as returned by
-                      `ArgumentParser`
-    @param inventory  `_LabInventory` object to be reported on.
-    @param timestamp  A string used to identify this run's timestamp in
-                      logs and email output.
-    """
-    pool_message = _generate_pool_inventory_message(inventory)
-    idle_message = _generate_idle_inventory_message(inventory)
-    _send_email(arguments,
-                'pools-%s.txt' % timestamp,
-                'DUT pool inventory %s' % timestamp,
-                arguments.pool_notify,
-                pool_message + '\n\n\n' + idle_message)
-
-
-def _dut_in_repair_loop(history):
-    """Return whether a DUT's history indicates a repair loop.
-
-    A DUT is considered looping if it runs no tests, and no tasks pass
-    other than repair tasks.
-
-    @param history  An instance of `status_history.HostJobHistory` to be
-                    scanned for a repair loop.  The caller guarantees
-                    that this history corresponds to a working DUT.
-    @returns  Return a true value if the DUT's most recent history
-              indicates a repair loop.
-    """
-    # Our caller passes only histories for working DUTs; that means
-    # we've already paid the cost of fetching the diagnosis task, and
-    # we know that the task was successful.  The diagnosis task will be
-    # one of the tasks we must scan to find a loop, so if the task isn't
-    # a repair task, then our history includes a successful non-repair
-    # task, and we're not looping.
-    #
-    # The for loop below is very expensive, because it must fetch the
-    # full history, regardless of how many tasks we examine.  At the
-    # time of this writing, this check against the diagnosis task
-    # reduces the cost of finding loops in the full inventory from hours
-    # to minutes.
-    if _get_diagnosis(history).task.name != 'Repair':
-        return False
-    repair_ok_count = 0
-    for task in history:
-        if not task.is_special:
-            # This is a test, so we're not looping.
-            return False
-        if task.diagnosis == status_history.BROKEN:
-            # Failed a repair, so we're not looping.
-            return False
-        if (task.diagnosis == status_history.WORKING
-                and task.name != 'Repair'):
-            # Non-repair task succeeded, so we're not looping.
-            return False
-        # At this point, we have either a failed non-repair task, or
-        # a successful repair.
-        if task.name == 'Repair':
-            repair_ok_count += 1
-            if repair_ok_count >= _REPAIR_LOOP_THRESHOLD:
-                return True
-
-
-def _report_untestable_dut(history, state):
-    fields = {
-        'dut_hostname': history.hostname,
-        'model': history.host_model,
-        'pool': history.host_pool,
-        'state': state,
-    }
-    logging.info('DUT in state %(state)s: %(dut_hostname)s, '
-                 'model: %(model)s, pool: %(pool)s', fields)
-    _UNTESTABLE_PRESENCE_METRIC.set(True, fields=fields)
-
-
-def _report_untestable_dut_metrics(inventory):
-    """Scan the inventory for DUTs unable to run tests.
-
-    DUTs in the inventory are judged "untestable" if they meet one of
-    two criteria:
-      * The DUT is stuck in a repair loop; that is, it regularly passes
-        repair, but never passes other operations.
-      * The DUT runs no tasks at all, but is not locked.
-
-    This routine walks through the given inventory looking for DUTs in
-    either of these states.  Results are reported via a Monarch presence
-    metric.
-
-    Note:  To make sure that DUTs aren't flagged as "idle" merely
-    because there's no work, a separate job runs prior to regular
-    inventory runs which schedules trivial work on any DUT that appears
-    idle.
-
-    @param inventory  `_LabInventory` object to be reported on.
-    """
-    logging.info('Scanning for untestable DUTs.')
-    for history in _all_dut_histories(inventory):
-        # Managed DUTs with names that don't match
-        # _HOSTNAME_PATTERN shouldn't be possible.  However, we
-        # don't want arbitrary strings being attached to the
-        # 'dut_hostname' field, so for safety, we exclude all
-        # anomalies.
-        if not _HOSTNAME_PATTERN.match(history.hostname):
-            continue
-        if _host_is_working(history):
-            if _dut_in_repair_loop(history):
-                _report_untestable_dut(history, 'repair_loop')
-        elif _host_is_idle(history):
-            if not history.host.locked:
-                _report_untestable_dut(history, 'idle_unlocked')
-
-
-def _log_startup(arguments, startup_time):
-    """Log the start of this inventory run.
-
-    Print various log messages indicating the start of the run.  Return
-    a string based on `startup_time` that will be used to identify this
-    run in log files and e-mail messages.
-
-    @param startup_time   A UNIX timestamp marking the moment when
-                          this inventory run began.
-    @returns  A timestamp string that will be used to identify this run
-              in logs and email output.
-    """
-    timestamp = time.strftime('%Y-%m-%d.%H',
-                              time.localtime(startup_time))
-    logging.debug('Starting lab inventory for %s', timestamp)
-    if arguments.model_notify:
-        if arguments.recommend:
-            logging.debug('Will include repair recommendations')
-        logging.debug('Will include model inventory')
-    if arguments.pool_notify:
-        logging.debug('Will include pool inventory')
-    return timestamp
-
-
-def _create_inventory(arguments, end_time):
-    """Create the `_LabInventory` instance to use for reporting.
-
-    @param end_time   A UNIX timestamp for the end of the time range
-                      to be searched in this inventory run.
-    """
-    start_time = end_time - arguments.duration * 60 * 60
-    afe = frontend_wrappers.RetryingAFE(server=None)
-    inventory = _LabInventory.create_inventory(
-            afe, start_time, end_time, arguments.modelnames)
-    logging.info('Found %d hosts across %d models',
-                     inventory.get_num_duts(),
-                     inventory.get_num_models())
-    return inventory
-
-
-def _perform_inventory_reports(arguments):
-    """Perform all inventory checks requested on the command line.
-
-    Create the initial inventory and run through the inventory reports
-    as called for by the parsed command-line arguments.
-
-    @param arguments  Command-line arguments as returned by
-                      `ArgumentParser`.
-    """
-    startup_time = time.time()
-    timestamp = _log_startup(arguments, startup_time)
-    inventory = _create_inventory(arguments, startup_time)
-    if arguments.debug:
-        _populate_model_counts(inventory)
-    if arguments.model_notify:
-        _perform_model_inventory(arguments, inventory, timestamp)
-    if arguments.pool_notify:
-        _perform_pool_inventory(arguments, inventory, timestamp)
-    if arguments.report_untestable:
-        _report_untestable_dut_metrics(inventory)
-
-
-def _separate_email_addresses(address_list):
-    """Parse a list of comma-separated lists of e-mail addresses.
-
-    @param address_list  A list of strings containing comma
-                         separate e-mail addresses.
-    @return A list of the individual e-mail addresses.
-    """
-    newlist = []
-    for arg in address_list:
-        newlist.extend([email.strip() for email in arg.split(',')])
-    return newlist
-
-
-def _verify_arguments(arguments):
-    """Validate command-line arguments.
-
-    Join comma separated e-mail addresses for `--model-notify` and
-    `--pool-notify` in separate option arguments into a single list.
-
-    For non-debug uses, require that at least one inventory report be
-    requested.  For debug, if a report isn't specified, treat it as "run
-    all the reports."
-
-    The return value indicates success or failure; in the case of
-    failure, we also write an error message to stderr.
-
-    @param arguments  Command-line arguments as returned by
-                      `ArgumentParser`
-    @return True if the arguments are semantically good, or False
-            if the arguments don't meet requirements.
-    """
-    arguments.model_notify = _separate_email_addresses(
-            arguments.model_notify)
-    arguments.pool_notify = _separate_email_addresses(
-            arguments.pool_notify)
-    if not any([arguments.model_notify, arguments.pool_notify,
-                arguments.report_untestable]):
-        if not arguments.debug:
-            sys.stderr.write('Must request at least one report via '
-                             '--model-notify, --pool-notify, or '
-                             '--report-untestable\n')
-            return False
-        else:
-            # We want to run all the e-mail reports.  An empty notify
-            # list will cause a report to be skipped, so make sure the
-            # lists are non-empty.
-            arguments.model_notify = ['']
-            arguments.pool_notify = ['']
-    return True
-
-
-def _get_default_logdir(script):
-    """Get the default directory for the `--logdir` option.
-
-    The default log directory is based on the parent directory
-    containing this script.
-
-    @param script  Path to this script file.
-    @return A path to a directory.
-    """
-    basedir = os.path.dirname(os.path.abspath(script))
-    basedir = os.path.dirname(basedir)
-    return os.path.join(basedir, _LOGDIR)
-
-
-def _parse_command(argv):
-    """Parse the command line arguments.
-
-    Create an argument parser for this command's syntax, parse the
-    command line, and return the result of the ArgumentParser
-    parse_args() method.
-
-    @param argv Standard command line argument vector; argv[0] is
-                assumed to be the command name.
-    @return Result returned by ArgumentParser.parse_args().
-    """
-    parser = argparse.ArgumentParser(
-            prog=argv[0],
-            description='Gather and report lab inventory statistics')
-    parser.add_argument('-d', '--duration', type=int,
-                        default=_DEFAULT_DURATION, metavar='HOURS',
-                        help='number of hours back to search for status'
-                             ' (default: %d)' % _DEFAULT_DURATION)
-    parser.add_argument('--model-notify', action='append',
-                        default=[], metavar='ADDRESS',
-                        help='Generate model inventory message, '
-                        'and send it to the given e-mail address(es)')
-    parser.add_argument('--pool-notify', action='append',
-                        default=[], metavar='ADDRESS',
-                        help='Generate pool inventory message, '
-                             'and send it to the given address(es)')
-    parser.add_argument('-r', '--recommend', type=int, default=None,
-                        help=('Specify how many DUTs should be '
-                              'recommended for repair (default: no '
-                              'recommendation)'))
-    parser.add_argument('--report-untestable', action='store_true',
-                        help='Check for devices unable to run tests.')
-    parser.add_argument('--debug', action='store_true',
-                        help='Print e-mail, metrics messages on stdout '
-                             'without sending them.')
-    parser.add_argument('--no-metrics', action='store_false',
-                        dest='use_metrics',
-                        help='Suppress generation of Monarch metrics.')
-    parser.add_argument('--logdir', default=_get_default_logdir(argv[0]),
-                        help='Directory where logs will be written.')
-    parser.add_argument('modelnames', nargs='*',
-                        metavar='MODEL',
-                        help='names of models to report on '
-                             '(default: all models)')
-    arguments = parser.parse_args(argv[1:])
-    if not _verify_arguments(arguments):
-        return None
-    return arguments
-
-
-def _configure_logging(arguments):
-    """Configure the `logging` module for our needs.
-
-    How we log depends on whether the `--debug` option was provided on
-    the command line.
-      * Without the option, we configure the logging to capture all
-        potentially relevant events in a log file.  The log file is
-        configured to rotate once a week on Friday evening, preserving
-        ~3 months worth of history.
-      * With the option, we expect stdout to contain other
-        human-readable output (including the contents of the e-mail
-        messages), so we restrict the output to INFO level.
-
-    For convenience, when `--debug` is on, the logging format has
-    no adornments, so that a call like `logging.info(msg)` simply writes
-    `msg` to stdout, plus a trailing newline.
-
-    @param arguments  Command-line arguments as returned by
-                      `ArgumentParser`
-    """
-    root_logger = logging.getLogger()
-    if arguments.debug:
-        root_logger.setLevel(logging.INFO)
-        handler = logging.StreamHandler(sys.stdout)
-        handler.setFormatter(logging.Formatter())
-    else:
-        if not os.path.exists(arguments.logdir):
-            os.mkdir(arguments.logdir)
-        root_logger.setLevel(logging.DEBUG)
-        logfile = os.path.join(arguments.logdir, _LOGFILE)
-        handler = logging.handlers.TimedRotatingFileHandler(
-                logfile, when='W4', backupCount=13)
-        formatter = logging.Formatter(_LOG_FORMAT,
-                                      time_utils.TIME_FMT)
-        handler.setFormatter(formatter)
-    # TODO(jrbarnette) This is gross.  Importing client.bin.utils
-    # implicitly imported logging_config, which calls
-    # logging.basicConfig() *at module level*.  That gives us an
-    # extra logging handler that we don't want.  So, clear out all
-    # the handlers here.
-    for h in root_logger.handlers:
-        root_logger.removeHandler(h)
-    root_logger.addHandler(handler)
-
-
-def main(argv):
-    """Standard main routine.
-
-    @param argv  Command line arguments, including `sys.argv[0]`.
-    """
-    arguments = _parse_command(argv)
-    if not arguments:
-        sys.exit(1)
-    _configure_logging(arguments)
-
-    try:
-        if arguments.use_metrics:
-            if arguments.debug:
-                logging.info('Debug mode: Will not report metrics to monarch.')
-                metrics_file = '/dev/null'
-            else:
-                metrics_file = None
-            with site_utils.SetupTsMonGlobalState(
-                    'lab_inventory', debug_file=metrics_file,
-                    auto_flush=False):
-                success = False
-                try:
-                    with metrics.SecondsTimer('%s/duration' % _METRICS_PREFIX):
-                        _perform_inventory_reports(arguments)
-                    success = True
-                finally:
-                    metrics.Counter('%s/tick' % _METRICS_PREFIX).increment(
-                            fields={'success': success})
-                    metrics.Flush()
-        else:
-            _perform_inventory_reports(arguments)
-    except KeyboardInterrupt:
-        pass
-    except Exception:
-        # Our cron setup doesn't preserve stderr, so drop extra breadcrumbs.
-        logging.exception('Error escaped main')
-        raise
-
-
-def get_inventory(afe):
-    end_time = int(time.time())
-    start_time = end_time - 24 * 60 * 60
-    return _LabInventory.create_inventory(afe, start_time, end_time)
-
-
-def get_managed_boards(afe):
-    return get_inventory(afe).get_boards()
-
-
-if __name__ == '__main__':
-    main(sys.argv)
diff --git a/site_utils/lab_inventory_unittest.py b/site_utils/lab_inventory_unittest.py
deleted file mode 100755
index d497bf8..0000000
--- a/site_utils/lab_inventory_unittest.py
+++ /dev/null
@@ -1,1210 +0,0 @@
-#!/usr/bin/env python2
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import collections
-import itertools
-import logging
-import os
-import unittest
-
-import common
-from autotest_lib.frontend.afe.json_rpc import proxy
-from autotest_lib.server.lib import status_history
-from autotest_lib.site_utils import lab_inventory
-
-
-# _FAKE_TIME - an arbitrary but plausible time_t value.
-# You can make your own with `date +%s`.
-
-_FAKE_TIME = 1537457599
-
-
-class _FakeHost(object):
-    """Class to mock `Host` in _FakeHostHistory for testing."""
-
-    def __init__(self, hostname):
-        self.hostname = hostname
-
-
-class _FakeHostEvent(object):
-    def __init__(self, time):
-        self.start_time = time
-        self.end_time = time + 1
-
-
-class _FakeHostHistory(object):
-    """Class to mock `HostJobHistory` for testing."""
-
-    def __init__(self, model, pool, status, hostname=''):
-        self.host_model = model
-        self.host_board = model + '_board'
-        self.host_pool = pool
-        self.status = status
-        self.host = _FakeHost(hostname)
-        self.hostname = hostname
-        self.start_time = _FAKE_TIME
-        self.end_time = _FAKE_TIME + 20
-        self.fake_task = _FakeHostEvent(_FAKE_TIME + 5)
-        self.exception = None
-
-    def last_diagnosis(self):
-        """Return the recorded diagnosis."""
-        if self.exception:
-            raise self.exception
-        else:
-            return self.status, self.fake_task
-
-
-class _FakeHostLocation(object):
-    """Class to mock `HostJobHistory` for location sorting."""
-
-    _HOSTNAME_FORMAT = 'chromeos%d-row%d-rack%d-host%d'
-
-    def __init__(self, location):
-        self.hostname = self._HOSTNAME_FORMAT % location
-
-    @property
-    def host(self):
-        """Return a fake host object with a hostname."""
-        return self
-
-
-# Status values that may be returned by `HostJobHistory`.
-#
-# These merely rename the corresponding values in `status_history`
-# for convenience.
-
-_WORKING = status_history.WORKING
-_UNUSED = status_history.UNUSED
-_BROKEN = status_history.BROKEN
-_UNKNOWN = status_history.UNKNOWN
-
-
-class GetStatusTestCase(unittest.TestCase):
-    """Tests for `_get_diagnosis()`."""
-
-    def _get_diagnosis_status(self, history):
-        return lab_inventory._get_diagnosis(history).status
-
-    def test_working_and_in_range(self):
-        """Test WORKING when task times are in the history range."""
-        history = _FakeHostHistory('', '', _WORKING)
-        history.fake_task = _FakeHostEvent(history.start_time + 1)
-        self.assertEqual(self._get_diagnosis_status(history), _WORKING)
-
-    def test_broken_and_in_range(self):
-        """Test BROKEN when task times are in the history range."""
-        history = _FakeHostHistory('', '', _BROKEN)
-        history.fake_task = _FakeHostEvent(history.start_time + 1)
-        self.assertEqual(self._get_diagnosis_status(history), _BROKEN)
-
-    def test_broken_and_straddles(self):
-        """Test BROKEN when task time straddles the history start point."""
-        history = _FakeHostHistory('', '', _BROKEN)
-        history.fake_task = _FakeHostEvent(history.start_time - 1)
-        self.assertEqual(self._get_diagnosis_status(history), _BROKEN)
-
-    def test_broken_and_out_of_range(self):
-        """Test BROKEN when task times are before the history range."""
-        history = _FakeHostHistory('', '', _BROKEN)
-        history.fake_task = _FakeHostEvent(history.start_time - 2)
-        self.assertEqual(self._get_diagnosis_status(history), _UNUSED)
-
-    def test_exception(self):
-        """Test exceptions raised by `last_diagnosis()`."""
-        history = _FakeHostHistory('', '', _BROKEN)
-        history.exception = proxy.JSONRPCException('exception for testing')
-        self.assertIsNone(self._get_diagnosis_status(history))
-
-
-class HostSetInventoryTestCase(unittest.TestCase):
-    """Unit tests for class `_HostSetInventory`.
-
-    Coverage is quite basic:  mostly just enough to make sure every
-    function gets called, and to make sure that the counting knows
-    the difference between 0 and 1.
-
-    The testing also ensures that all known status values that can be
-    returned by `HostJobHistory` are counted as expected.
-    """
-
-    def setUp(self):
-        super(HostSetInventoryTestCase, self).setUp()
-        self.histories = lab_inventory._HostSetInventory()
-
-    def _add_host(self, status):
-        fake = _FakeHostHistory('zebra', lab_inventory.SPARE_POOL, status)
-        self.histories.record_host(fake)
-
-    def _check_counts(self, working, broken, idle):
-        """Check that pool counts match expectations.
-
-        Asserts that `get_working()`, `get_broken()`, and `get_idle()`
-        return the given expected values.  Also assert that
-        `get_total()` is the sum of all counts.
-
-        @param working The expected total of working devices.
-        @param broken  The expected total of broken devices.
-        @param idle  The expected total of idle devices.
-        """
-        self.assertEqual(self.histories.get_working(), working)
-        self.assertEqual(self.histories.get_broken(), broken)
-        self.assertEqual(self.histories.get_idle(), idle)
-        self.assertEqual(self.histories.get_total(),
-                         working + broken + idle)
-
-    def test_empty(self):
-        """Test counts when there are no DUTs recorded."""
-        self._check_counts(0, 0, 0)
-
-    def test_broken(self):
-        """Test counting for broken DUTs."""
-        self._add_host(_BROKEN)
-        self._check_counts(0, 1, 0)
-
-    def test_working(self):
-        """Test counting for working DUTs."""
-        self._add_host(_WORKING)
-        self._check_counts(1, 0, 0)
-
-    def test_idle(self):
-        """Testing counting for idle status values."""
-        self._add_host(_UNUSED)
-        self._check_counts(0, 0, 1)
-        self._add_host(_UNKNOWN)
-        self._check_counts(0, 0, 2)
-
-    def test_working_then_broken(self):
-        """Test counts after adding a working and then a broken DUT."""
-        self._add_host(_WORKING)
-        self._add_host(_BROKEN)
-        self._check_counts(1, 1, 0)
-
-    def test_broken_then_working(self):
-        """Test counts after adding a broken and then a working DUT."""
-        self._add_host(_BROKEN)
-        self._add_host(_WORKING)
-        self._check_counts(1, 1, 0)
-
-
-class PoolSetInventoryTestCase(unittest.TestCase):
-    """Unit tests for class `_PoolSetInventory`.
-
-    Coverage is quite basic:  just enough to make sure every function
-    gets called, and to make sure that the counting knows the difference
-    between 0 and 1.
-
-    The tests make sure that both individual pool counts and totals are
-    counted correctly.
-    """
-
-    _POOL_SET = ['humpty', 'dumpty']
-
-    def setUp(self):
-        super(PoolSetInventoryTestCase, self).setUp()
-        self._pool_histories = lab_inventory._PoolSetInventory(self._POOL_SET)
-
-    def _add_host(self, pool, status):
-        fake = _FakeHostHistory('zebra', pool, status)
-        self._pool_histories.record_host(fake)
-
-    def _check_all_counts(self, working, broken):
-        """Check that total counts for all pools match expectations.
-
-        Checks that `get_working()` and `get_broken()` return the
-        given expected values when called without a pool specified.
-        Also check that `get_total()` is the sum of working and
-        broken devices.
-
-        Additionally, call the various functions for all the pools
-        individually, and confirm that the totals across pools match
-        the given expectations.
-
-        @param working The expected total of working devices.
-        @param broken  The expected total of broken devices.
-        """
-        self.assertEqual(self._pool_histories.get_working(), working)
-        self.assertEqual(self._pool_histories.get_broken(), broken)
-        self.assertEqual(self._pool_histories.get_total(),
-                         working + broken)
-        count_working = 0
-        count_broken = 0
-        count_total = 0
-        for pool in self._POOL_SET:
-            count_working += self._pool_histories.get_working(pool)
-            count_broken += self._pool_histories.get_broken(pool)
-            count_total += self._pool_histories.get_total(pool)
-        self.assertEqual(count_working, working)
-        self.assertEqual(count_broken, broken)
-        self.assertEqual(count_total, working + broken)
-
-    def _check_pool_counts(self, pool, working, broken):
-        """Check that counts for a given pool match expectations.
-
-        Checks that `get_working()` and `get_broken()` return the
-        given expected values for the given pool.  Also check that
-        `get_total()` is the sum of working and broken devices.
-
-        @param pool    The pool to be checked.
-        @param working The expected total of working devices.
-        @param broken  The expected total of broken devices.
-        """
-        self.assertEqual(self._pool_histories.get_working(pool),
-                         working)
-        self.assertEqual(self._pool_histories.get_broken(pool),
-                         broken)
-        self.assertEqual(self._pool_histories.get_total(pool),
-                         working + broken)
-
-    def test_empty(self):
-        """Test counts when there are no DUTs recorded."""
-        self._check_all_counts(0, 0)
-        for pool in self._POOL_SET:
-            self._check_pool_counts(pool, 0, 0)
-
-    def test_all_working_then_broken(self):
-        """Test counts after adding a working and then a broken DUT.
-
-        For each pool, add first a working, then a broken DUT.  After
-        each DUT is added, check counts to confirm the correct values.
-        """
-        working = 0
-        broken = 0
-        for pool in self._POOL_SET:
-            self._add_host(pool, _WORKING)
-            working += 1
-            self._check_pool_counts(pool, 1, 0)
-            self._check_all_counts(working, broken)
-            self._add_host(pool, _BROKEN)
-            broken += 1
-            self._check_pool_counts(pool, 1, 1)
-            self._check_all_counts(working, broken)
-
-    def test_all_broken_then_working(self):
-        """Test counts after adding a broken and then a working DUT.
-
-        For each pool, add first a broken, then a working DUT.  After
-        each DUT is added, check counts to confirm the correct values.
-        """
-        working = 0
-        broken = 0
-        for pool in self._POOL_SET:
-            self._add_host(pool, _BROKEN)
-            broken += 1
-            self._check_pool_counts(pool, 0, 1)
-            self._check_all_counts(working, broken)
-            self._add_host(pool, _WORKING)
-            working += 1
-            self._check_pool_counts(pool, 1, 1)
-            self._check_all_counts(working, broken)
-
-
-class LocationSortTests(unittest.TestCase):
-    """Unit tests for `_sort_by_location()`."""
-
-    def setUp(self):
-        super(LocationSortTests, self).setUp()
-
-    def _check_sorting(self, *locations):
-        """Test sorting a given list of locations.
-
-        The input is an already ordered list of lists of tuples with
-        row, rack, and host numbers.  The test converts the tuples
-        to hostnames, preserving the original ordering.  Then it
-        flattens and scrambles the input, runs it through
-        `_sort_by_location()`, and asserts that the result matches
-        the original.
-        """
-        lab = 0
-        expected = []
-        for tuples in locations:
-            lab += 1
-            expected.append(
-                    [_FakeHostLocation((lab,) + t) for t in tuples])
-        scrambled = [e for e in itertools.chain(*expected)]
-        scrambled = [e for e in reversed(scrambled)]
-        actual = lab_inventory._sort_by_location(scrambled)
-        # The ordering of the labs in the output isn't guaranteed,
-        # so we can't compare `expected` and `actual` directly.
-        # Instead, we create a dictionary keyed on the first host in
-        # each lab, and compare the dictionaries.
-        self.assertEqual({l[0]: l for l in expected},
-                         {l[0]: l for l in actual})
-
-    def test_separate_labs(self):
-        """Test that sorting distinguishes labs."""
-        self._check_sorting([(1, 1, 1)], [(1, 1, 1)], [(1, 1, 1)])
-
-    def test_separate_rows(self):
-        """Test for proper sorting when only rows are different."""
-        self._check_sorting([(1, 1, 1), (9, 1, 1), (10, 1, 1)])
-
-    def test_separate_racks(self):
-        """Test for proper sorting when only racks are different."""
-        self._check_sorting([(1, 1, 1), (1, 9, 1), (1, 10, 1)])
-
-    def test_separate_hosts(self):
-        """Test for proper sorting when only hosts are different."""
-        self._check_sorting([(1, 1, 1), (1, 1, 9), (1, 1, 10)])
-
-    def test_diagonal(self):
-        """Test for proper sorting when all parts are different."""
-        self._check_sorting([(1, 1, 2), (1, 2, 1), (2, 1, 1)])
-
-
-class InventoryScoringTests(unittest.TestCase):
-    """Unit tests for `_score_repair_set()`."""
-
-    def setUp(self):
-        super(InventoryScoringTests, self).setUp()
-
-    def _make_buffer_counts(self, *counts):
-        """Create a dictionary suitable as `buffer_counts`.
-
-        @param counts List of tuples with model count data.
-        """
-        self._buffer_counts = dict(counts)
-
-    def _make_history_list(self, repair_counts):
-        """Create a list suitable as `repair_list`.
-
-        @param repair_counts List of (model, count) tuples.
-        """
-        pool = lab_inventory.SPARE_POOL
-        histories = []
-        for model, count in repair_counts:
-            for i in range(0, count):
-                histories.append(
-                    _FakeHostHistory(model, pool, _BROKEN))
-        return histories
-
-    def _check_better(self, repair_a, repair_b):
-        """Test that repair set A scores better than B.
-
-        Contruct repair sets from `repair_a` and `repair_b`,
-        and score both of them using the pre-existing
-        `self._buffer_counts`.  Assert that the score for A is
-        better than the score for B.
-
-        @param repair_a Input data for repair set A
-        @param repair_b Input data for repair set B
-        """
-        score_a = lab_inventory._score_repair_set(
-                self._buffer_counts,
-                self._make_history_list(repair_a))
-        score_b = lab_inventory._score_repair_set(
-                self._buffer_counts,
-                self._make_history_list(repair_b))
-        self.assertGreater(score_a, score_b)
-
-    def _check_equal(self, repair_a, repair_b):
-        """Test that repair set A scores the same as B.
-
-        Contruct repair sets from `repair_a` and `repair_b`,
-        and score both of them using the pre-existing
-        `self._buffer_counts`.  Assert that the score for A is
-        equal to the score for B.
-
-        @param repair_a Input data for repair set A
-        @param repair_b Input data for repair set B
-        """
-        score_a = lab_inventory._score_repair_set(
-                self._buffer_counts,
-                self._make_history_list(repair_a))
-        score_b = lab_inventory._score_repair_set(
-                self._buffer_counts,
-                self._make_history_list(repair_b))
-        self.assertEqual(score_a, score_b)
-
-    def test_improve_worst_model(self):
-        """Test that improving the worst model improves scoring.
-
-        Construct a buffer counts dictionary with all models having
-        different counts.  Assert that it is both necessary and
-        sufficient to improve the count of the worst model in order
-        to improve the score.
-        """
-        self._make_buffer_counts(('lion', 0),
-                                 ('tiger', 1),
-                                 ('bear', 2))
-        self._check_better([('lion', 1)], [('tiger', 1)])
-        self._check_better([('lion', 1)], [('bear', 1)])
-        self._check_better([('lion', 1)], [('tiger', 2)])
-        self._check_better([('lion', 1)], [('bear', 2)])
-        self._check_equal([('tiger', 1)], [('bear', 1)])
-
-    def test_improve_worst_case_count(self):
-        """Test that improving the number of worst cases improves the score.
-
-        Construct a buffer counts dictionary with all models having
-        the same counts.  Assert that improving two models is better
-        than improving one.  Assert that improving any one model is
-        as good as any other.
-        """
-        self._make_buffer_counts(('lion', 0),
-                                 ('tiger', 0),
-                                 ('bear', 0))
-        self._check_better([('lion', 1), ('tiger', 1)], [('bear', 2)])
-        self._check_equal([('lion', 2)], [('tiger', 1)])
-        self._check_equal([('tiger', 1)], [('bear', 1)])
-
-
-# Each item is the number of DUTs in that status.
-STATUS_CHOICES = (_WORKING, _BROKEN, _UNUSED)
-StatusCounts = collections.namedtuple('StatusCounts', ['good', 'bad', 'idle'])
-# Each item is a StatusCounts tuple specifying the number of DUTs per status in
-# the that pool.
-CRITICAL_POOL = lab_inventory.CRITICAL_POOLS[0]
-SPARE_POOL = lab_inventory.SPARE_POOL
-POOL_CHOICES = (CRITICAL_POOL, SPARE_POOL)
-PoolStatusCounts = collections.namedtuple('PoolStatusCounts',
-                                          ['critical', 'spare'])
-
-def create_inventory(data):
-    """Create a `_LabInventory` instance for testing.
-
-    This function allows the construction of a complete `_LabInventory`
-    object from a simplified input representation.
-
-    A single 'critical pool' is arbitrarily chosen for purposes of
-    testing; there's no coverage for testing arbitrary combinations
-    in more than one critical pool.
-
-    @param data: dict {key: PoolStatusCounts}.
-    @returns: lab_inventory._LabInventory object.
-    """
-    histories = []
-    for model, counts in data.iteritems():
-        for p, pool in enumerate(POOL_CHOICES):
-            for s, status in enumerate(STATUS_CHOICES):
-                fake_host = _FakeHostHistory(model, pool, status)
-                histories.extend([fake_host] * counts[p][s])
-    inventory = lab_inventory._LabInventory(
-            histories, lab_inventory.MANAGED_POOLS)
-    return inventory
-
-
-class LabInventoryTests(unittest.TestCase):
-    """Tests for the basic functions of `_LabInventory`.
-
-    Contains basic coverage to show that after an inventory is created
-    and DUTs with known status are added, the inventory counts match the
-    counts of the added DUTs.
-    """
-
-    _MODEL_LIST = ['lion', 'tiger', 'bear'] # Oh, my!
-
-    def _check_inventory_counts(self, inventory, data, msg=None):
-        """Check that all counts in the inventory match `data`.
-
-        This asserts that the actual counts returned by the various
-        accessor functions for `inventory` match the values expected for
-        the given `data` that created the inventory.
-
-        @param inventory: _LabInventory object to check.
-        @param data Inventory data to check against. Same type as
-                `create_inventory`.
-        """
-        self.assertEqual(set(inventory.keys()), set(data.keys()))
-        for model, histories in inventory.iteritems():
-            expected_counts = data[model]
-            actual_counts = PoolStatusCounts(
-                    StatusCounts(
-                            histories.get_working(CRITICAL_POOL),
-                            histories.get_broken(CRITICAL_POOL),
-                            histories.get_idle(CRITICAL_POOL),
-                    ),
-                    StatusCounts(
-                            histories.get_working(SPARE_POOL),
-                            histories.get_broken(SPARE_POOL),
-                            histories.get_idle(SPARE_POOL),
-                    ),
-            )
-            self.assertEqual(actual_counts, expected_counts, msg)
-
-            self.assertEqual(len(histories.get_working_list()),
-                             sum([p.good for p in expected_counts]),
-                             msg)
-            self.assertEqual(len(histories.get_broken_list()),
-                             sum([p.bad for p in expected_counts]),
-                             msg)
-            self.assertEqual(len(histories.get_idle_list()),
-                             sum([p.idle for p in expected_counts]),
-                             msg)
-
-    def test_empty(self):
-        """Test counts when there are no DUTs recorded."""
-        inventory = create_inventory({})
-        self.assertEqual(inventory.get_num_duts(), 0)
-        self.assertEqual(inventory.get_boards(), set())
-        self._check_inventory_counts(inventory, {})
-        self.assertEqual(inventory.get_num_models(), 0)
-
-    def _check_model_count(self, model_count):
-        """Parameterized test for testing a specific number of models."""
-        msg = '[model: %d]' % (model_count,)
-        models = self._MODEL_LIST[:model_count]
-        data = {
-                m: PoolStatusCounts(
-                        StatusCounts(1, 1, 1),
-                        StatusCounts(1, 1, 1),
-                )
-                for m in models
-        }
-        inventory = create_inventory(data)
-        self.assertEqual(inventory.get_num_duts(), 6 * model_count, msg)
-        self.assertEqual(inventory.get_num_models(), model_count, msg)
-        for pool in [CRITICAL_POOL, SPARE_POOL]:
-            self.assertEqual(set(inventory.get_pool_models(pool)),
-                             set(models))
-        self._check_inventory_counts(inventory, data, msg=msg)
-
-    def test_model_counts(self):
-        """Test counts for various numbers of models."""
-        self.longMessage = True
-        for model_count in range(0, len(self._MODEL_LIST)):
-            self._check_model_count(model_count)
-
-    def _check_single_dut_counts(self, critical, spare):
-        """Parmeterized test for single dut counts."""
-        self.longMessage = True
-        counts = PoolStatusCounts(critical, spare)
-        model = self._MODEL_LIST[0]
-        data = {model: counts}
-        msg = '[data: %s]' % (data,)
-        inventory = create_inventory(data)
-        self.assertEqual(inventory.get_num_duts(), 1, msg)
-        self.assertEqual(inventory.get_num_models(), 1, msg)
-        self._check_inventory_counts(inventory, data, msg=msg)
-
-    def test_single_dut_counts(self):
-        """Test counts when there is a single DUT per board, and it is good."""
-        status_100 = StatusCounts(1, 0, 0)
-        status_010 = StatusCounts(0, 1, 0)
-        status_001 = StatusCounts(0, 0, 1)
-        status_null = StatusCounts(0, 0, 0)
-        self._check_single_dut_counts(status_100, status_null)
-        self._check_single_dut_counts(status_010, status_null)
-        self._check_single_dut_counts(status_001, status_null)
-        self._check_single_dut_counts(status_null, status_100)
-        self._check_single_dut_counts(status_null, status_010)
-        self._check_single_dut_counts(status_null, status_001)
-
-
-# MODEL_MESSAGE_TEMPLATE -
-# This is a sample of the output text produced by
-# _generate_model_inventory_message().  This string is parsed by the
-# tests below to construct a sample inventory that should produce
-# the output, and then the output is generated and checked against
-# this original sample.
-#
-# Constructing inventories from parsed sample text serves two
-# related purposes:
-#   - It provides a way to see what the output should look like
-#     without having to run the script.
-#   - It helps make sure that a human being will actually look at
-#     the output to see that it's basically readable.
-# This should also help prevent test bugs caused by writing tests
-# that simply parrot the original output generation code.
-
-_MODEL_MESSAGE_TEMPLATE = '''
-Model                  Avail   Bad  Idle  Good Spare Total
-lion                      -1    13     2    11    12    26
-tiger                     -1     5     2     9     4    16
-bear                       0     5     2    10     5    17
-platypus                   4     2     2    20     6    24
-aardvark                   7     2     2     6     9    10
-'''
-
-
-class PoolSetInventoryTests(unittest.TestCase):
-    """Tests for `_generate_model_inventory_message()`.
-
-    The tests create various test inventories designed to match the
-    counts in `_MODEL_MESSAGE_TEMPLATE`, and asserts that the
-    generated message text matches the original message text.
-
-    Message text is represented as a list of strings, split on the
-    `'\n'` separator.
-    """
-
-    def setUp(self):
-        self.maxDiff = None
-        lines = [x.strip() for x in _MODEL_MESSAGE_TEMPLATE.split('\n') if
-                 x.strip()]
-        self._header, self._model_lines = lines[0], lines[1:]
-        self._model_data = []
-        for l in self._model_lines:
-            items = l.split()
-            model = items[0]
-            bad, idle, good, spare = [int(x) for x in items[2:-1]]
-            self._model_data.append((model, (good, bad, idle, spare)))
-
-    def _make_minimum_spares(self, counts):
-        """Create a counts tuple with as few spare DUTs as possible."""
-        good, bad, idle, spares = counts
-        if spares > bad + idle:
-            return PoolStatusCounts(
-                    StatusCounts(good + bad +idle - spares, 0, 0),
-                    StatusCounts(spares - bad - idle, bad, idle),
-            )
-        elif spares < bad:
-            return PoolStatusCounts(
-                    StatusCounts(good, bad - spares, idle),
-                    StatusCounts(0, spares, 0),
-            )
-        else:
-            return PoolStatusCounts(
-                    StatusCounts(good, 0, idle + bad - spares),
-                    StatusCounts(0, bad, spares - bad),
-            )
-
-    def _make_maximum_spares(self, counts):
-        """Create a counts tuple with as many spare DUTs as possible."""
-        good, bad, idle, spares = counts
-        if good > spares:
-            return PoolStatusCounts(
-                    StatusCounts(good - spares, bad, idle),
-                    StatusCounts(spares, 0, 0),
-            )
-        elif good + bad > spares:
-            return PoolStatusCounts(
-                    StatusCounts(0, good + bad - spares, idle),
-                    StatusCounts(good, spares - good, 0),
-            )
-        else:
-            return PoolStatusCounts(
-                    StatusCounts(0, 0, good + bad + idle - spares),
-                    StatusCounts(good, bad, spares - good - bad),
-            )
-
-    def _check_message(self, message):
-        """Checks that message approximately matches expected string."""
-        message = [x.strip() for x in message.split('\n') if x.strip()]
-        self.assertIn(self._header, message)
-        body = message[message.index(self._header) + 1:]
-        self.assertEqual(body, self._model_lines)
-
-    def test_minimum_spares(self):
-        """Test message generation when the spares pool is low."""
-        data = {
-            model: self._make_minimum_spares(counts)
-                for model, counts in self._model_data
-        }
-        inventory = create_inventory(data)
-        message = lab_inventory._generate_model_inventory_message(inventory)
-        self._check_message(message)
-
-    def test_maximum_spares(self):
-        """Test message generation when the critical pool is low."""
-        data = {
-            model: self._make_maximum_spares(counts)
-                for model, counts in self._model_data
-        }
-        inventory = create_inventory(data)
-        message = lab_inventory._generate_model_inventory_message(inventory)
-        self._check_message(message)
-
-    def test_ignore_no_spares(self):
-        """Test that messages ignore models with no spare pool."""
-        data = {
-            model: self._make_maximum_spares(counts)
-                for model, counts in self._model_data
-        }
-        data['elephant'] = ((5, 4, 0), (0, 0, 0))
-        inventory = create_inventory(data)
-        message = lab_inventory._generate_model_inventory_message(inventory)
-        self._check_message(message)
-
-    def test_ignore_no_critical(self):
-        """Test that messages ignore models with no critical pools."""
-        data = {
-            model: self._make_maximum_spares(counts)
-                for model, counts in self._model_data
-        }
-        data['elephant'] = ((0, 0, 0), (1, 5, 1))
-        inventory = create_inventory(data)
-        message = lab_inventory._generate_model_inventory_message(inventory)
-        self._check_message(message)
-
-    def test_ignore_no_bad(self):
-        """Test that messages ignore models with no bad DUTs."""
-        data = {
-            model: self._make_maximum_spares(counts)
-                for model, counts in self._model_data
-        }
-        data['elephant'] = ((5, 0, 1), (5, 0, 1))
-        inventory = create_inventory(data)
-        message = lab_inventory._generate_model_inventory_message(inventory)
-        self._check_message(message)
-
-
-class _PoolInventoryTestBase(unittest.TestCase):
-    """Parent class for tests relating to generating pool inventory messages.
-
-    Func `setUp` in the class parses a given |message_template| to obtain
-    header and body.
-    """
-
-    def _read_template(self, message_template):
-        """Read message template for PoolInventoryTest and IdleInventoryTest.
-
-        @param message_template: the input template to be parsed into: header
-        and content (report_lines).
-        """
-        message_lines = message_template.split('\n')
-        self._header = message_lines[1]
-        self._report_lines = message_lines[2:-1]
-
-    def _check_report_no_info(self, text):
-        """Test a message body containing no reported info.
-
-        The input `text` was created from a query to an inventory, which
-        has no objects meet the query and leads to an `empty` return.
-        Assert that the text consists of a single line starting with '('
-        and ending with ')'.
-
-        @param text: Message body text to be tested.
-        """
-        self.assertTrue(len(text) == 1 and
-                            text[0][0] == '(' and
-                            text[0][-1] == ')')
-
-    def _check_report(self, text):
-        """Test a message against the passed |expected_content|.
-
-        @param text: Message body text to be tested.
-        @param expected_content: The ground-truth content to be compared with.
-        """
-        self.assertEqual(text, self._report_lines)
-
-
-# _POOL_MESSAGE_TEMPLATE -
-# This is a sample of the output text produced by
-# _generate_pool_inventory_message().  This string is parsed by the
-# tests below to construct a sample inventory that should produce
-# the output, and then the output is generated and checked against
-# this original sample.
-#
-# See the comments on _BOARD_MESSAGE_TEMPLATE above for the
-# rationale on using sample text in this way.
-
-_POOL_MESSAGE_TEMPLATE = '''
-Model                    Bad  Idle  Good Total
-lion                       5     2     6    13
-tiger                      4     1     5    10
-bear                       3     0     7    10
-aardvark                   2     0     0     2
-platypus                   1     1     1     3
-'''
-
-_POOL_ADMIN_URL = 'http://go/cros-manage-duts'
-
-
-class PoolInventoryTests(_PoolInventoryTestBase):
-    """Tests for `_generate_pool_inventory_message()`.
-
-    The tests create various test inventories designed to match the
-    counts in `_POOL_MESSAGE_TEMPLATE`, and assert that the
-    generated message text matches the format established in the
-    original message text.
-
-    The output message text is parsed against the following grammar:
-        <message> -> <intro> <pool> { "blank line" <pool> }
-        <intro> ->
-            Instructions to depty mentioning the admin page URL
-            A blank line
-        <pool> ->
-            <description>
-            <header line>
-            <message body>
-        <description> ->
-            Any number of lines describing one pool
-        <header line> ->
-            The header line from `_POOL_MESSAGE_TEMPLATE`
-        <message body> ->
-            Any number of non-blank lines
-
-    After parsing messages into the parts described above, various
-    assertions are tested against the parsed output, including
-    that the message body matches the body from
-    `_POOL_MESSAGE_TEMPLATE`.
-
-    Parse message text is represented as a list of strings, split on
-    the `'\n'` separator.
-    """
-
-    def setUp(self):
-        super(PoolInventoryTests, self)._read_template(_POOL_MESSAGE_TEMPLATE)
-        self._model_data = []
-        for l in self._report_lines:
-            items = l.split()
-            model = items[0]
-            bad = int(items[1])
-            idle = int(items[2])
-            good = int(items[3])
-            self._model_data.append((model, (good, bad, idle)))
-
-    def _create_histories(self, pools, model_data):
-        """Return a list suitable to create a `_LabInventory` object.
-
-        Creates a list of `_FakeHostHistory` objects that can be
-        used to create a lab inventory.  `pools` is a list of strings
-        naming pools, and `model_data` is a list of tuples of the
-        form
-            `(model, (goodcount, badcount))`
-        where
-            `model` is a model name.
-            `goodcount` is the number of working DUTs in the pool.
-            `badcount` is the number of broken DUTs in the pool.
-
-        @param pools       List of pools for which to create
-                           histories.
-        @param model_data  List of tuples containing models and DUT
-                           counts.
-        @return A list of `_FakeHostHistory` objects that can be
-                used to create a `_LabInventory` object.
-        """
-        histories = []
-        status_choices = (_WORKING, _BROKEN, _UNUSED)
-        for pool in pools:
-            for model, counts in model_data:
-                for status, count in zip(status_choices, counts):
-                    for x in range(0, count):
-                        histories.append(
-                            _FakeHostHistory(model, pool, status))
-        return histories
-
-    def _parse_pool_summaries(self, histories):
-        """Parse message output according to the grammar above.
-
-        Create a lab inventory from the given `histories`, and
-        generate the pool inventory message.  Then parse the message
-        and return a dictionary mapping each pool to the message
-        body parsed after that pool.
-
-        Tests the following assertions:
-          * Each <description> contains a mention of exactly one
-            pool in the `CRITICAL_POOLS` list.
-          * Each pool is mentioned in exactly one <description>.
-        Note that the grammar requires the header to appear once
-        for each pool, so the parsing implicitly asserts that the
-        output contains the header.
-
-        @param histories  Input used to create the test
-                          `_LabInventory` object.
-        @return A dictionary mapping model names to the output
-                (a list of lines) for the model.
-        """
-        inventory = lab_inventory._LabInventory(
-                histories, lab_inventory.MANAGED_POOLS)
-        message = lab_inventory._generate_pool_inventory_message(
-                inventory).split('\n')
-        poolset = set(lab_inventory.CRITICAL_POOLS)
-        seen_url = False
-        seen_intro = False
-        description = ''
-        model_text = {}
-        current_pool = None
-        for line in message:
-            if not seen_url:
-                if _POOL_ADMIN_URL in line:
-                    seen_url = True
-            elif not seen_intro:
-                if not line:
-                    seen_intro = True
-            elif current_pool is None:
-                if line == self._header:
-                    pools_mentioned = [p for p in poolset
-                                           if p in description]
-                    self.assertEqual(len(pools_mentioned), 1)
-                    current_pool = pools_mentioned[0]
-                    description = ''
-                    model_text[current_pool] = []
-                    poolset.remove(current_pool)
-                else:
-                    description += line
-            else:
-                if line:
-                    model_text[current_pool].append(line)
-                else:
-                    current_pool = None
-        self.assertEqual(len(poolset), 0)
-        return model_text
-
-    def test_no_shortages(self):
-        """Test correct output when no pools have shortages."""
-        model_text = self._parse_pool_summaries([])
-        for text in model_text.values():
-            self._check_report_no_info(text)
-
-    def test_one_pool_shortage(self):
-        """Test correct output when exactly one pool has a shortage."""
-        for pool in lab_inventory.CRITICAL_POOLS:
-            histories = self._create_histories((pool,),
-                                               self._model_data)
-            model_text = self._parse_pool_summaries(histories)
-            for checkpool in lab_inventory.CRITICAL_POOLS:
-                text = model_text[checkpool]
-                if checkpool == pool:
-                    self._check_report(text)
-                else:
-                    self._check_report_no_info(text)
-
-    def test_all_pool_shortages(self):
-        """Test correct output when all pools have a shortage."""
-        histories = []
-        for pool in lab_inventory.CRITICAL_POOLS:
-            histories.extend(
-                self._create_histories((pool,),
-                                       self._model_data))
-        model_text = self._parse_pool_summaries(histories)
-        for pool in lab_inventory.CRITICAL_POOLS:
-            self._check_report(model_text[pool])
-
-    def test_full_model_ignored(self):
-        """Test that models at full strength are not reported."""
-        pool = lab_inventory.CRITICAL_POOLS[0]
-        full_model = [('echidna', (5, 0, 0))]
-        histories = self._create_histories((pool,),
-                                           full_model)
-        text = self._parse_pool_summaries(histories)[pool]
-        self._check_report_no_info(text)
-        model_data = self._model_data + full_model
-        histories = self._create_histories((pool,), model_data)
-        text = self._parse_pool_summaries(histories)[pool]
-        self._check_report(text)
-
-    def test_spare_pool_ignored(self):
-        """Test that reporting ignores the spare pool inventory."""
-        spare_pool = lab_inventory.SPARE_POOL
-        spare_data = self._model_data + [('echidna', (0, 5, 0))]
-        histories = self._create_histories((spare_pool,),
-                                           spare_data)
-        model_text = self._parse_pool_summaries(histories)
-        for pool in lab_inventory.CRITICAL_POOLS:
-            self._check_report_no_info(model_text[pool])
-
-
-_IDLE_MESSAGE_TEMPLATE = '''
-Hostname                       Model                Pool
-chromeos4-row12-rack4-host7    tiger                bvt
-chromeos1-row3-rack1-host2     lion                 bvt
-chromeos3-row2-rack2-host5     lion                 cq
-chromeos2-row7-rack3-host11    platypus             suites
-'''
-
-
-class IdleInventoryTests(_PoolInventoryTestBase):
-    """Tests for `_generate_idle_inventory_message()`.
-
-    The tests create idle duts that match the counts and pool in
-    `_IDLE_MESSAGE_TEMPLATE`. In test, it asserts that the generated
-    idle message text matches the format established in
-    `_IDLE_MESSAGE_TEMPLATE`.
-
-    Parse message text is represented as a list of strings, split on
-    the `'\n'` separator.
-    """
-
-    def setUp(self):
-        super(IdleInventoryTests, self)._read_template(_IDLE_MESSAGE_TEMPLATE)
-        self._host_data = []
-        for h in self._report_lines:
-            items = h.split()
-            hostname = items[0]
-            model = items[1]
-            pool = items[2]
-            self._host_data.append((hostname, model, pool))
-        self._histories = []
-        self._histories.append(_FakeHostHistory('echidna', 'bvt', _BROKEN))
-        self._histories.append(_FakeHostHistory('lion', 'bvt', _WORKING))
-
-    def _add_idles(self):
-        """Add idle duts from `_IDLE_MESSAGE_TEMPLATE`."""
-        idle_histories = [_FakeHostHistory(
-                model, pool, _UNUSED, hostname)
-                        for hostname, model, pool in self._host_data]
-        self._histories.extend(idle_histories)
-
-    def _check_header(self, text):
-        """Check whether header in the template `_IDLE_MESSAGE_TEMPLATE` is in
-        passed text."""
-        self.assertIn(self._header, text)
-
-    def _get_idle_message(self, histories):
-        """Generate idle inventory and obtain its message.
-
-        @param histories: Used to create lab inventory.
-
-        @return the generated idle message.
-        """
-        inventory = lab_inventory._LabInventory(
-                histories, lab_inventory.MANAGED_POOLS)
-        message = lab_inventory._generate_idle_inventory_message(
-                inventory).split('\n')
-        return message
-
-    def test_check_idle_inventory(self):
-        """Test that reporting all the idle DUTs for every pool, sorted by
-        lab_inventory.MANAGED_POOLS.
-        """
-        self._add_idles()
-
-        message = self._get_idle_message(self._histories)
-        self._check_header(message)
-        self._check_report(message[message.index(self._header) + 1 :])
-
-    def test_no_idle_inventory(self):
-        """Test that reporting no idle DUTs."""
-        message = self._get_idle_message(self._histories)
-        self._check_header(message)
-        self._check_report_no_info(
-                message[message.index(self._header) + 1 :])
-
-
-class CommandParsingTests(unittest.TestCase):
-    """Tests for command line argument parsing in `_parse_command()`."""
-
-    # At least one of these options must be specified on every command
-    # line; otherwise, the command line parsing will fail.
-    _REPORT_OPTIONS = [
-        '--model-notify=', '--pool-notify=', '--report-untestable'
-    ]
-
-    def setUp(self):
-        dirpath = '/usr/local/fubar'
-        self._command_path = os.path.join(dirpath,
-                                          'site_utils',
-                                          'arglebargle')
-        self._logdir = os.path.join(dirpath, lab_inventory._LOGDIR)
-
-    def _parse_arguments(self, argv):
-        """Test parsing with explictly passed report options."""
-        full_argv = [self._command_path] + argv
-        return lab_inventory._parse_command(full_argv)
-
-    def _parse_non_report_arguments(self, argv):
-        """Test parsing for non-report command-line options."""
-        return self._parse_arguments(argv + self._REPORT_OPTIONS)
-
-    def _check_non_report_defaults(self, report_option):
-        arguments = self._parse_arguments([report_option])
-        self.assertEqual(arguments.duration,
-                         lab_inventory._DEFAULT_DURATION)
-        self.assertIsNone(arguments.recommend)
-        self.assertFalse(arguments.debug)
-        self.assertEqual(arguments.logdir, self._logdir)
-        self.assertEqual(arguments.modelnames, [])
-        return arguments
-
-    def test_empty_arguments(self):
-        """Test that no reports requested is an error."""
-        arguments = self._parse_arguments([])
-        self.assertIsNone(arguments)
-
-    def test_argument_defaults(self):
-        """Test that option defaults match expectations."""
-        for report in self._REPORT_OPTIONS:
-            arguments = self._check_non_report_defaults(report)
-
-    def test_model_notify_defaults(self):
-        """Test defaults when `--model-notify` is specified alone."""
-        arguments = self._parse_arguments(['--model-notify='])
-        self.assertEqual(arguments.model_notify, [''])
-        self.assertEqual(arguments.pool_notify, [])
-        self.assertFalse(arguments.report_untestable)
-
-    def test_pool_notify_defaults(self):
-        """Test defaults when `--pool-notify` is specified alone."""
-        arguments = self._parse_arguments(['--pool-notify='])
-        self.assertEqual(arguments.model_notify, [])
-        self.assertEqual(arguments.pool_notify, [''])
-        self.assertFalse(arguments.report_untestable)
-
-    def test_report_untestable_defaults(self):
-        """Test defaults when `--report-untestable` is specified alone."""
-        arguments = self._parse_arguments(['--report-untestable'])
-        self.assertEqual(arguments.model_notify, [])
-        self.assertEqual(arguments.pool_notify, [])
-        self.assertTrue(arguments.report_untestable)
-
-    def test_model_arguments(self):
-        """Test that non-option arguments are returned in `modelnames`."""
-        modellist = ['aardvark', 'echidna']
-        arguments = self._parse_non_report_arguments(modellist)
-        self.assertEqual(arguments.modelnames, modellist)
-
-    def test_recommend_option(self):
-        """Test parsing of the `--recommend` option."""
-        for opt in ['-r', '--recommend']:
-            for recommend in ['5', '55']:
-                arguments = self._parse_non_report_arguments([opt, recommend])
-                self.assertEqual(arguments.recommend, int(recommend))
-
-    def test_debug_option(self):
-        """Test parsing of the `--debug` option."""
-        arguments = self._parse_non_report_arguments(['--debug'])
-        self.assertTrue(arguments.debug)
-
-    def test_duration(self):
-        """Test parsing of the `--duration` option."""
-        for opt in ['-d', '--duration']:
-            for duration in ['1', '11']:
-                arguments = self._parse_non_report_arguments([opt, duration])
-                self.assertEqual(arguments.duration, int(duration))
-
-    def _check_email_option(self, option, getlist):
-        """Test parsing of e-mail address options.
-
-        This is a helper function to test the `--model-notify` and
-        `--pool-notify` options.  It tests the following cases:
-          * `--option a1` gives the list [a1]
-          * `--option ' a1 '` gives the list [a1]
-          * `--option a1 --option a2` gives the list [a1, a2]
-          * `--option a1,a2` gives the list [a1, a2]
-          * `--option 'a1, a2'` gives the list [a1, a2]
-
-        @param option  The option to be tested.
-        @param getlist A function to return the option's value from
-                       parsed command line arguments.
-        """
-        a1 = 'mumble@mumbler.com'
-        a2 = 'bumble@bumbler.org'
-        arguments = self._parse_arguments([option, a1])
-        self.assertEqual(getlist(arguments), [a1])
-        arguments = self._parse_arguments([option, ' ' + a1 + ' '])
-        self.assertEqual(getlist(arguments), [a1])
-        arguments = self._parse_arguments([option, a1, option, a2])
-        self.assertEqual(getlist(arguments), [a1, a2])
-        arguments = self._parse_arguments(
-                [option, ','.join([a1, a2])])
-        self.assertEqual(getlist(arguments), [a1, a2])
-        arguments = self._parse_arguments(
-                [option, ', '.join([a1, a2])])
-        self.assertEqual(getlist(arguments), [a1, a2])
-
-    def test_model_notify(self):
-        """Test parsing of the `--model-notify` option."""
-        self._check_email_option('--model-notify',
-                                 lambda a: a.model_notify)
-
-    def test_pool_notify(self):
-        """Test parsing of the `--pool-notify` option."""
-        self._check_email_option('--pool-notify',
-                                 lambda a: a.pool_notify)
-
-    def test_logdir_option(self):
-        """Test parsing of the `--logdir` option."""
-        logdir = '/usr/local/whatsis/logs'
-        arguments = self._parse_non_report_arguments(['--logdir', logdir])
-        self.assertEqual(arguments.logdir, logdir)
-
-
-if __name__ == '__main__':
-    # Some of the functions we test log messages.  Prevent those
-    # messages from showing up in test output.
-    logging.getLogger().setLevel(logging.CRITICAL)
-    unittest.main()
diff --git a/site_utils/label_cleaner.py b/site_utils/label_cleaner.py
deleted file mode 100755
index 2b9e277..0000000
--- a/site_utils/label_cleaner.py
+++ /dev/null
@@ -1,311 +0,0 @@
-#!/usr/bin/python2
-#
-# Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-"""Tool for cleaning up labels that are not in use.
-
-Delete given labels from database when they are not in use.
-Labels that match the query `SELECT_USED_LABELS_FORMAT` are considered in use.
-When given labels are not in the used labels, those labels are deleted.
-
-For example, following command deletes all labels whose name begins with
-'cros-version' and are not in use.
-
-./label_cleaner.py -p cros-version
-
-If '-p' option is not given, we delete labels whose name is exactly
-'cros-version' and are not in use.
-"""
-
-
-import argparse
-import logging
-import os
-import socket
-import sys
-import tempfile
-
-import common
-# Installed via build_externals, must be after import common.
-import MySQLdb
-from autotest_lib.client.common_lib import global_config
-from autotest_lib.client.common_lib import logging_config
-from autotest_lib.server import frontend
-from chromite.lib import metrics
-from chromite.lib import ts_mon_config
-
-
-_METRICS_PREFIX = 'chromeos/autotest/afe_db/admin/label_cleaner'
-
-GLOBAL_AFE = global_config.global_config.get_config_value(
-        'SERVER', 'global_afe_hostname')
-DB_SERVER = global_config.global_config.get_config_value('AUTOTEST_WEB', 'host')
-USER = global_config.global_config.get_config_value('AUTOTEST_WEB', 'user')
-PASSWD = global_config.global_config.get_config_value(
-        'AUTOTEST_WEB', 'password')
-DATABASE = global_config.global_config.get_config_value(
-        'AUTOTEST_WEB', 'database')
-RESPECT_STATIC_LABELS = global_config.global_config.get_config_value(
-        'SKYLAB', 'respect_static_labels', type=bool, default=False)
-
-# Per-prefix metrics are generated only for the following prefixes. This
-# allowlist is a second level defence against populating the 'label_prefix'
-# field with arbitrary values provided on the commandline.
-_LABEL_PREFIX_METRICS_ALLOWLIST = (
-        'cros-version',
-        'fwro-version',
-        'fwrw-version',
-        'pool',
-)
-
-SELECT_USED_LABELS_FORMAT = """
-SELECT DISTINCT(label_id) FROM afe_autotests_dependency_labels UNION
-SELECT DISTINCT(label_id) FROM afe_hosts_labels UNION
-SELECT DISTINCT(label_id) FROM afe_jobs_dependency_labels UNION
-SELECT DISTINCT(label_id) FROM afe_shards_labels UNION
-SELECT DISTINCT(label_id) FROM afe_parameterized_jobs UNION
-SELECT DISTINCT(meta_host) FROM afe_host_queue_entries
-"""
-
-SELECT_REPLACED_LABELS = """
-SELECT label_id FROM afe_replaced_labels
-"""
-
-DELETE_LABELS_FORMAT = """
-DELETE FROM afe_labels WHERE id in (%s)
-"""
-
-
-def get_used_labels(conn):
-    """Get labels that are currently in use.
-
-    @param conn: MySQLdb Connection object.
-
-    @return: A list of label ids.
-    """
-    cursor = conn.cursor()
-    sql = SELECT_USED_LABELS_FORMAT
-    logging.debug('Running: %r', sql)
-    cursor.execute(sql)
-    rows = cursor.fetchall()
-    return set(r[0] for r in rows)
-
-
-def fetch_labels(conn, label=None, prefix=False):
-    """Fetch labels from database.
-
-    @param conn: MySQLdb Connection object.
-    @param label: (optional) Label name to fetch.
-    @param prefix: If True, use `label` as a prefix. Otherwise, fetch
-                   labels whose name is exactly same as `label`.
-
-    @return: A list of label ids.
-    """
-    cursor = conn.cursor()
-    if label is not None:
-        if prefix:
-            sql = 'SELECT id FROM afe_labels WHERE name LIKE "%s%%"' % label
-        else:
-            sql = 'SELECT id FROM afe_labels WHERE name = "%s"' % label
-    else:
-        sql = 'SELECT id FROM afe_labels'
-    logging.debug('Running: %r', sql)
-    cursor.execute(sql)
-    rows = cursor.fetchall()
-    # Don't delete labels whose replaced_by_static_label=True, since they're
-    # actually maintained by afe_static_labels, not afe_labels.
-    if not RESPECT_STATIC_LABELS:
-        return set(r[0] for r in rows)
-    else:
-        cursor.execute(SELECT_REPLACED_LABELS)
-        replaced_labels = cursor.fetchall()
-        replaced_label_ids = set([r[0] for r in replaced_labels])
-        return set(r[0] for r in rows) - replaced_label_ids
-
-
-def _delete_labels(conn, labels, dry_run):
-    """Helper function of `delete_labels`."""
-    labels_str = ','.join([str(l) for l in labels])
-    sql = DELETE_LABELS_FORMAT % labels_str
-    if dry_run:
-        logging.info('[DRY RUN] Would have run: %r', sql)
-    else:
-        logging.debug('Running: %r', sql)
-        conn.cursor().execute(sql)
-        conn.commit()
-
-
-def delete_labels(conn, labels, max_delete, dry_run=False):
-    """Delete given labels from database.
-
-    @param conn: MySQLdb Connection object.
-    @param labels: iterable of labels to delete.
-    @param max_delete: Max number of records to delete in a query.
-    @param dry_run: (Boolean) Whether this is a dry run.
-    """
-    while labels:
-        chunk = labels[:max_delete]
-        labels = labels[max_delete:]
-        _delete_labels(conn, chunk, dry_run)
-
-
-def is_primary_server():
-    """Check if this server's status is primary
-
-    @return: True if primary, False otherwise.
-    """
-    server = frontend.AFE(server=GLOBAL_AFE).run(
-            'get_servers', hostname=socket.getfqdn())
-    if server and server[0]['status'] == 'primary':
-        return True
-    return False
-
-
-def clean_labels(options):
-    """Cleans unused labels from AFE database"""
-    msg = 'Label cleaner starts. Will delete '
-    if options.prefix:
-        msg += 'all labels whose prefix is "%s".'
-    else:
-        msg += 'a label "%s".'
-    logging.info(msg, options.label)
-    logging.info('Target database: %s.', options.db_server)
-    if options.check_status and not is_primary_server():
-        raise Exception('Cannot run in a non-primary server')
-
-    conn = MySQLdb.connect(
-            host=options.db_server,
-            user=options.db_user,
-            passwd=options.db_password,
-            db=DATABASE,
-    )
-
-    all_labels = fetch_labels(conn)
-    logging.info('Found total %d labels', len(all_labels))
-    metrics.Gauge(_METRICS_PREFIX + '/total_labels_count').set(
-            len(all_labels),
-            fields={
-                    'target_db': options.db_server,
-                    'label_prefix': '',
-            },
-    )
-
-    labels = fetch_labels(conn, options.label, options.prefix)
-    logging.info('Found total %d labels matching %s', len(labels),
-                 options.label)
-    if options.prefix and options.label in _LABEL_PREFIX_METRICS_ALLOWLIST:
-        metrics.Gauge(_METRICS_PREFIX + '/total_labels_count').set(
-                len(labels),
-                fields={
-                        'target_db': options.db_server,
-                        'label_prefix': options.label,
-                },
-        )
-
-    used_labels = get_used_labels(conn)
-    logging.info('Found %d labels are used', len(used_labels))
-    metrics.Gauge(_METRICS_PREFIX + '/used_labels_count').set(
-            len(used_labels), fields={'target_db': options.db_server})
-
-    to_delete = list(labels - used_labels)
-    logging.info('Deleting %d unused labels', len(to_delete))
-    delete_labels(conn, to_delete, options.max_delete, options.dry_run)
-    metrics.Counter(_METRICS_PREFIX + '/labels_deleted').increment_by(
-            len(to_delete), fields={'target_db': options.db_server})
-
-
-def main():
-    """Cleans unused labels from AFE database"""
-    parser = argparse.ArgumentParser(
-            formatter_class=argparse.ArgumentDefaultsHelpFormatter)
-    parser.add_argument(
-            '--db',
-            dest='db_server',
-            help='Database server',
-            default=DB_SERVER,
-    )
-    parser.add_argument(
-            '--db-user',
-            dest='db_user',
-            help='Database user',
-            default=USER,
-    )
-    parser.add_argument(
-            '--db-password',
-            dest='db_password',
-            help='Database password',
-            default=PASSWD,
-    )
-    parser.add_argument(
-            '-p',
-            dest='prefix',
-            action='store_true',
-            help=('Use argument <label> as a prefix for matching. '
-                  'For example, when the argument <label> is "cros-version" '
-                  'and this option is enabled, then labels whose name '
-                  'beginning with "cros-version" are matched. When this '
-                  'option is disabled, we match labels whose name is '
-                  'exactly same as the argument <label>.'),
-    )
-    parser.add_argument(
-            '-n',
-            dest='max_delete',
-            type=int,
-            help='Max number of records to delete in each query.',
-            default=100,
-    )
-    parser.add_argument(
-            '-s',
-            dest='check_status',
-            action='store_true',
-            help='Enforce to run only in a server that has primary status',
-    )
-    parser.add_argument(
-            '--dry-run',
-            dest='dry_run',
-            action='store_true',
-            help='Dry run mode. Do not actually delete any labels.',
-    )
-    parser.add_argument('label', help='Label name to delete')
-    options = parser.parse_args()
-
-    logging_config.LoggingConfig().configure_logging(
-            datefmt='%Y-%m-%d %H:%M:%S',
-            verbose=True)
-
-    if options.dry_run:
-        tfd, metrics_file=tempfile.mkstemp()
-        os.close(tfd)
-        ts_mon_context = ts_mon_config.SetupTsMonGlobalState(
-                'afe_label_cleaner',
-                auto_flush=False,
-                debug_file=metrics_file,
-        )
-    else:
-        ts_mon_context = ts_mon_config.SetupTsMonGlobalState(
-                'afe_label_cleaner',
-                auto_flush=False,
-        )
-    with ts_mon_context:
-        try:
-            clean_labels(options)
-        except:
-            metrics.Counter(_METRICS_PREFIX + '/tick').increment(
-                    fields={'target_db': options.db_server,
-                            'success': False})
-            raise
-        else:
-            metrics.Counter(_METRICS_PREFIX + '/tick').increment(
-                    fields={'target_db': options.db_server,
-                            'success': True})
-        finally:
-            metrics.Flush()
-            if options.dry_run:
-                logging.info('Dumped ts_mon metrics to %s', metrics_file)
-
-
-if __name__ == '__main__':
-    sys.exit(main())
diff --git a/site_utils/lib/infra.py b/site_utils/lib/infra.py
index 64d9003..9df8d88 100644
--- a/site_utils/lib/infra.py
+++ b/site_utils/lib/infra.py
@@ -12,7 +12,6 @@
 from autotest_lib.client.common_lib import error
 from autotest_lib.client.common_lib import global_config
 from autotest_lib.client.common_lib import utils
-from autotest_lib.server.cros.dynamic_suite import frontend_wrappers
 
 
 @contextlib.contextmanager
@@ -43,20 +42,20 @@
     @returns: The output of cmd, will be stdout and stderr.
     @raises CalledProcessError: If there was a non-0 return code.
     """
-    print 'Running command: %s' % cmd
+    print('Running command: %s' % cmd)
     proc = subprocess.Popen(
         cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
     if stream_output:
         output = ''
         for newline in iter(proc.stdout.readline, ''):
             output += newline
-            print newline.rstrip(os.linesep)
+            print(newline.rstrip(os.linesep))
     else:
         output = proc.communicate()[0]
 
     return_code = proc.wait()
     if return_code !=0:
-        print "ERROR: '%s' failed with error:\n%s" % (cmd, output)
+        print("ERROR: '%s' failed with error:\n%s" % (cmd, output))
         raise subprocess.CalledProcessError(return_code, cmd, output[:1024])
     return output
 
diff --git a/site_utils/log_socket_server_unittest.py b/site_utils/log_socket_server_unittest.py
index a289670..b725ac6 100644
--- a/site_utils/log_socket_server_unittest.py
+++ b/site_utils/log_socket_server_unittest.py
@@ -9,21 +9,18 @@
 
 import logging
 import logging.handlers
-import mox
 import multiprocessing
+import os
 import tempfile
 import time
-import os
 import unittest
 
 from autotest_lib.site_utils import log_socket_server
 from six.moves import range
 
 
-class TestLogSocketServer(mox.MoxTestBase):
-    """Test LogSocketServer can start and save logs to a local file.
-    """
-
+class TestLogSocketServer(unittest.TestCase):
+    """Test LogSocketServer can start and save logs to a local file."""
 
     def log_call(self, value, port):
         """Method to be called in a new process to log to a socket server.
@@ -37,7 +34,6 @@
         logging.getLogger().level = logging.INFO
         logging.info(value)
 
-
     def testMultiProcessLoggingSuccess(self):
         """Test log can be saved from multiple processes."""
         # Start log TCP server.
@@ -65,7 +61,7 @@
         # Read log to confirm all logs are written to file.
         num_lines = sum(1 for line in open(log_filename))
         if process_number != num_lines:
-            logging.warn('Not all log messages were written to file %s. '
+            logging.warning('Not all log messages were written to file %s. '
                          'Expected number of logs: %s, Logs found in file: %s',
                          log_filename, process_number, num_lines)
         self.assertNotEqual(0, num_lines, 'No log message was written to file '
diff --git a/site_utils/lxc.py b/site_utils/lxc.py
index 7062e2b..37bf48b 100755
--- a/site_utils/lxc.py
+++ b/site_utils/lxc.py
@@ -29,9 +29,11 @@
     parser.add_argument('-s', '--setup', action='store_true',
                         default=False,
                         help='Set up base container.')
-    parser.add_argument('-p', '--path', type=str,
+    parser.add_argument('-p',
+                        '--path',
+                        type=str,
                         help='Directory to store the container.',
-                        default=lxc.DEFAULT_CONTAINER_PATH)
+                        default=lxc.DEFAULT_BASE_CONTAINER_PATH)
     parser.add_argument('-f', '--force_delete', action='store_true',
                         default=False,
                         help=('Force to delete existing containers and rebuild '
@@ -53,7 +55,7 @@
     # TODO(dshi): crbug.com/459344 Set remove this enforcement when test
     # container can be unprivileged container.
     if utils.sudo_require_password():
-        logging.warn('SSP requires root privilege to run commands, please '
+        logging.warning('SSP requires root privilege to run commands, please '
                      'grant root access to this process.')
         utils.run('sudo true')
 
diff --git a/site_utils/lxc/__init__.py b/site_utils/lxc/__init__.py
index b307c5a..f128623 100644
--- a/site_utils/lxc/__init__.py
+++ b/site_utils/lxc/__init__.py
@@ -9,10 +9,17 @@
   4. Run a command in the container and return the output.
   5. Cleanup, e.g., destroy the container.
 """
-
-from constants import *
-from container import Container
-from container import ContainerId
-from container_bucket import ContainerBucket
-from container_factory import ContainerFactory
-from lxc import install_packages
+try:
+    from autotest_lib.site_utils.lxc.constants import *
+    from autotest_lib.site_utils.lxc.container import Container
+    from autotest_lib.site_utils.lxc.container import ContainerId
+    from autotest_lib.site_utils.lxc.container_bucket import ContainerBucket
+    from autotest_lib.site_utils.lxc.container_factory import ContainerFactory
+    from autotest_lib.site_utils.lxc.lxc import install_packages
+except ImportError:
+    from constants import *
+    from container import Container
+    from container import ContainerId
+    from container_bucket import ContainerBucket
+    from container_factory import ContainerFactory
+    from lxc import install_packages
diff --git a/site_utils/lxc/base_image.py b/site_utils/lxc/base_image.py
index c2435b3..c29819c 100644
--- a/site_utils/lxc/base_image.py
+++ b/site_utils/lxc/base_image.py
@@ -2,6 +2,10 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import logging
 import os
 import sys
@@ -13,6 +17,8 @@
 from autotest_lib.site_utils.lxc import lxc
 from autotest_lib.site_utils.lxc import utils as lxc_utils
 from autotest_lib.site_utils.lxc.container import Container
+import six
+from six.moves import range
 
 
 class BaseImage(object):
@@ -102,7 +108,7 @@
                 except error.CmdError as e:
                     logging.error(e)
                 # Raise the cached exception with original backtrace.
-                raise exc_info[0], exc_info[1], exc_info[2]
+                six.reraise(exc_info[0], exc_info[1], exc_info[2])
             else:
                 raise
         else:
diff --git a/site_utils/lxc/base_image_unittest.py b/site_utils/lxc/base_image_unittest.py
index c9b8488..33e87f3 100755
--- a/site_utils/lxc/base_image_unittest.py
+++ b/site_utils/lxc/base_image_unittest.py
@@ -1,8 +1,12 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import os
 import shutil
 import tempfile
@@ -16,6 +20,7 @@
 from autotest_lib.site_utils.lxc import unittest_setup
 from autotest_lib.site_utils.lxc import utils as lxc_utils
 from autotest_lib.site_utils.lxc.base_image import BaseImage
+from six.moves import range
 
 
 test_dir = None
diff --git a/site_utils/lxc/cleanup_if_fail.py b/site_utils/lxc/cleanup_if_fail.py
index 254b3fb..a9a6b7a 100644
--- a/site_utils/lxc/cleanup_if_fail.py
+++ b/site_utils/lxc/cleanup_if_fail.py
@@ -2,12 +2,17 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import logging
 import sys
 
 import common
 from autotest_lib.client.bin import utils
 from autotest_lib.client.common_lib import error
+import six
 
 
 def cleanup_if_fail():
@@ -48,6 +53,7 @@
                     logging.error(e)
 
                 # Raise the cached exception with original backtrace.
-                raise exc_info[0], exc_info[1], exc_info[2]
+                six.reraise(exc_info[0], exc_info[1], exc_info[2])
+
         return func_cleanup_if_fail
     return deco_cleanup_if_fail
diff --git a/site_utils/lxc/config.py b/site_utils/lxc/config.py
index 01f6e33..fac75d7 100644
--- a/site_utils/lxc/config.py
+++ b/site_utils/lxc/config.py
@@ -211,7 +211,7 @@
         self.container = container
         # If shadow config is used, the deployment procedure will skip some
         # special handling of config file, e.g.,
-        # 1. Set enable_master_ssh to False in autotest shadow config.
+        # 1. Set enable_main_ssh to False in autotest shadow config.
         # 2. Set ssh logleve to ERROR for all hosts.
         if config_file is None:
             self.is_shadow_config = os.path.exists(
@@ -289,7 +289,7 @@
         container. If one chooses to use a shadow SSP deploy config file, the
         autotest shadow_config.ini must be from a source with following
         modification:
-        1. Disable master ssh connection in shadow config, as it is not working
+        1. Disable main ssh connection in shadow config, as it is not working
            properly in container yet, and produces noise in the log.
         2. Update AUTOTEST_WEB/host and SERVER/hostname to be the IP of the host
            if any is set to localhost or 127.0.0.1. Otherwise, set it to be the
@@ -302,10 +302,10 @@
         shadow_config = os.path.join(constants.CONTAINER_AUTOTEST_DIR,
                                      'shadow_config.ini')
 
-        # Inject "AUTOSERV/enable_master_ssh: False" in shadow config as
-        # container does not support master ssh connection yet.
+        # Inject "AUTOSERV/enable_main_ssh: False" in shadow config as
+        # container does not support main ssh connection yet.
         self.container.attach_run(
-                'echo $\'\n[AUTOSERV]\nenable_master_ssh: False\n\' >> %s' %
+                'echo $\'\n[AUTOSERV]\nenable_main_ssh: False\n\' >> %s' %
                 shadow_config)
 
         host_ip = lxc_utils.get_host_ip()
@@ -353,7 +353,7 @@
         self.container.attach_run('sed -i \'s/UseProxyIf=false//g\' \'%s\'' %
                                   ssh_config)
         # TODO(dshi): crbug.com/451622 ssh connection loglevel is set to
-        # ERROR in container before master ssh connection works. This is
+        # ERROR in container before the ssh connection works. This is
         # to avoid logs being flooded with warning `Permanently added
         # '[hostname]' (RSA) to the list of known hosts.` (crbug.com/478364)
         # The sed command injects following at the beginning of .ssh/config
diff --git a/site_utils/lxc/constants.py b/site_utils/lxc/constants.py
index 20ff29d..de6d32c 100644
--- a/site_utils/lxc/constants.py
+++ b/site_utils/lxc/constants.py
@@ -29,6 +29,9 @@
 # Default directory used to store LXC containers.
 DEFAULT_CONTAINER_PATH = global_config.get_config_value('AUTOSERV',
                                                         'container_path')
+# Default directory used to store the base LXC container.
+DEFAULT_BASE_CONTAINER_PATH = global_config.get_config_value(
+        'AUTOSERV', 'base_container_path')
 # Default directory for host mounts
 DEFAULT_SHARED_HOST_PATH = global_config.get_config_value(
         'AUTOSERV',
@@ -81,9 +84,10 @@
 SUPPORT_SNAPSHOT_CLONE = True
 
 # Number of seconds to wait for network to be up in a container.
-NETWORK_INIT_TIMEOUT = 300
+NETWORK_INIT_TIMEOUT = 1200
 # Network bring up is slower in Moblab.
-NETWORK_INIT_CHECK_INTERVAL = 1 if IS_MOBLAB else 0.1
+# TODO(184304822) reset back to 0.1 for the main lab.
+NETWORK_INIT_CHECK_INTERVAL = 1 if IS_MOBLAB else 5
 
 # Number of seconds to download files from devserver. We chose a timeout that
 # is on the same order as the permitted CTS runtime for normal jobs (1h). In
diff --git a/site_utils/lxc/container.py b/site_utils/lxc/container.py
index dcdbd5a..6c5a2e9 100644
--- a/site_utils/lxc/container.py
+++ b/site_utils/lxc/container.py
@@ -2,11 +2,16 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import collections
 import json
 import logging
 import os
 import re
+import shutil
 import tempfile
 import time
 
@@ -16,14 +21,13 @@
 from autotest_lib.site_utils.lxc import constants
 from autotest_lib.site_utils.lxc import lxc
 from autotest_lib.site_utils.lxc import utils as lxc_utils
+import six
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = utils.metrics_mock
 
-ISOLATESERVER = 'https://isolateserver.appspot.com'
-
 # Naming convention of test container, e.g., test_300_1422862512_2424, where:
 # 300:        The test job ID.
 # 1422862512: The tick when container is created.
@@ -43,6 +47,9 @@
 
 
     def __str__(self):
+        # NOTE: The `creation_time` is a float, but we format it as an integer.
+        # Internally we still use the float value to do comparing, hashing,
+        # etc.
         return _TEST_CONTAINER_NAME_FMT % self
 
 
@@ -56,6 +63,10 @@
         with open(dst, 'w') as f:
             json.dump(self, f)
 
+        with open(dst) as f:
+            logging.debug('Container id saved to %s (content: %s)', dst,
+                          f.read())
+
     @classmethod
     def load(cls, path):
         """Reads the ID from the given path.
@@ -74,9 +85,10 @@
         try:
             with open(src, 'r') as f:
                 job_id, ctime, pid = json.load(f)
-        except IOError:
+        except IOError as err:
             # File not found, or couldn't be opened for some other reason.
             # Treat all these cases as no ID.
+            logging.warning('Load container id file "%s" error: %s', src, err)
             return None
         # TODO(pprabhu, crbug.com/842343) Remove this once all persistent
         # container ids have migrated to str.
@@ -151,7 +163,7 @@
         # property rootfs is retrieved.
         self._rootfs = None
         self.name = name
-        for attribute, value in attribute_values.iteritems():
+        for attribute, value in six.iteritems(attribute_values):
             setattr(self, attribute, value)
 
         # Clone the container
@@ -166,6 +178,7 @@
             try:
                 self._id = ContainerId.load(
                         os.path.join(self.container_path, self.name))
+                logging.debug('Container %s has id: "%s"', self.name, self._id)
             except (ValueError, TypeError):
                 # Ignore load errors.  ContainerBucket currently queries every
                 # container quite frequently, and emitting exceptions here would
@@ -176,7 +189,7 @@
                 self._id = None
 
         if not Container._LXC_VERSION:
-          Container._LXC_VERSION = lxc_utils.get_lxc_version()
+            Container._LXC_VERSION = lxc_utils.get_lxc_version()
 
 
     @classmethod
@@ -239,7 +252,7 @@
                 except error.CmdError as e:
                     # The container could be created in a incompleted
                     # state. Delete the container folder instead.
-                    logging.warn('Failed to destroy container %s, error: %s',
+                    logging.warning('Failed to destroy container %s, error: %s',
                                  new_name, e)
                     utils.run('sudo rm -rf "%s"' % container_folder)
             # Create the directory prior to creating the new container.  This
@@ -263,7 +276,7 @@
                     'No container found in directory %s with name of %s.' %
                     (self.container_path, self.name))
         attribute_values = containers[0]
-        for attribute, value in attribute_values.iteritems():
+        for attribute, value in six.iteritems(attribute_values):
             setattr(self, attribute, value)
 
 
@@ -332,8 +345,18 @@
 
         @return: True if the network is up, otherwise False.
         """
+        # TODO(b/184304822) Remove the extra logging.
         try:
-            self.attach_run('curl --head %s' % constants.CONTAINER_BASE_URL)
+            with open('/proc/net/udp') as f:
+                logging.debug('Checking UDP on drone:\n %s', f.read())
+        except Exception as e:
+            logging.debug(e)
+
+        try:
+            self.attach_run('ifconfig eth0 ;'
+                            'ping -c 1 8.8.8.8 ;'
+                            'cat /proc/net/udp ;'
+                            'curl --head %s' % constants.CONTAINER_BASE_URL)
             return True
         except error.CmdError as e:
             logging.debug(e)
@@ -342,7 +365,7 @@
 
     @metrics.SecondsTimerDecorator(
         '%s/container_start_duration' % constants.STATS_KEY)
-    def start(self, wait_for_network=True):
+    def start(self, wait_for_network=True, log_dir=None):
         """Start the container.
 
         @param wait_for_network: True to wait for network to be up. Default is
@@ -350,7 +373,14 @@
 
         @raise ContainerError: If container does not exist, or fails to start.
         """
-        cmd = 'sudo lxc-start -P %s -n %s -d' % (self.container_path, self.name)
+        log_addendum = ""
+        if log_dir:
+            log_addendum = "--logpriority=DEBUG --logfile={} --console-log={}".format(
+                    os.path.join(log_dir, 'ssp_logs/debug/lxc-start.log'),
+                    os.path.join(log_dir, 'ssp_logs/debug/lxc-console.log'))
+
+        cmd = 'sudo lxc-start -P %s -n %s -d %s' % (self.container_path,
+                                                    self.name, log_addendum)
         output = utils.run(cmd).stdout
         if not self.is_running():
             raise error.ContainerError(
@@ -361,11 +391,17 @@
         if wait_for_network:
             logging.debug('Wait for network to be up.')
             start_time = time.time()
-            utils.poll_for_condition(
-                condition=self.is_network_up,
-                timeout=constants.NETWORK_INIT_TIMEOUT,
-                sleep_interval=constants.NETWORK_INIT_CHECK_INTERVAL,
-                desc='network is up')
+            try:
+                utils.poll_for_condition(
+                        condition=self.is_network_up,
+                        timeout=constants.NETWORK_INIT_TIMEOUT,
+                        sleep_interval=constants.NETWORK_INIT_CHECK_INTERVAL,
+                        desc='network is up')
+            except Exception:
+                # Save and upload syslog for network issues debugging.
+                shutil.copy('/var/log/syslog',
+                            os.path.join(log_dir, 'ssp_logs', 'debug'))
+                raise
             logging.debug('Network is up after %.2f seconds.',
                           time.time() - start_time)
 
@@ -518,35 +554,6 @@
 
         lxc.download_extract(ssp_url, autotest_pkg_path, usr_local_path)
 
-    def install_ssp_isolate(self, isolate_hash, dest_path=None):
-        """Downloads and install the contents of the given isolate.
-        This places the isolate contents under /usr/local or a provided path.
-        Most commonly this is a copy of a specific autotest version, in which
-        case:
-          /usr/local/autotest contains the autotest code
-          /usr/local/logs contains logs from the installation process.
-
-        @param isolate_hash: The hash string which serves as a key to retrieve
-                             the desired isolate
-        @param dest_path: Path to the directory to place the isolate in.
-                          Defaults to /usr/local/
-
-        @return: Exit status of the installation command.
-        """
-        dest_path = dest_path or os.path.join(self.rootfs, 'usr', 'local')
-        isolate_log_path = os.path.join(
-            self.rootfs, 'usr', 'local', 'logs', 'isolate')
-        log_file = os.path.join(isolate_log_path,
-            'contents.' + time.strftime('%Y-%m-%d-%H.%M.%S'))
-
-        utils.run('sudo mkdir -p %s' % isolate_log_path)
-        _command = ("sudo isolated download -isolated {sha} -I {server}"
-                    " -output-dir {dest_dir} -output-files {log_file}")
-
-        return utils.run(_command.format(
-            sha=isolate_hash, dest_dir=dest_path,
-            log_file=log_file, server=ISOLATESERVER))
-
 
     def install_control_file(self, control_file):
         """Installs the given control file.
diff --git a/site_utils/lxc/container_bucket.py b/site_utils/lxc/container_bucket.py
index 34109ec..3b7cace 100644
--- a/site_utils/lxc/container_bucket.py
+++ b/site_utils/lxc/container_bucket.py
@@ -22,7 +22,7 @@
 from autotest_lib.site_utils.lxc.container_factory import ContainerFactory
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
     from infra_libs import ts_mon
 except ImportError:
     import mock
@@ -34,8 +34,11 @@
     """A wrapper class to interact with containers in a specific container path.
     """
 
-    def __init__(self, container_path=constants.DEFAULT_CONTAINER_PATH,
-                 base_name=constants.BASE, container_factory=None):
+    def __init__(self,
+                 container_path=constants.DEFAULT_CONTAINER_PATH,
+                 base_name=constants.BASE,
+                 container_factory=None,
+                 base_container_path=constants.DEFAULT_BASE_CONTAINER_PATH):
         """Initialize a ContainerBucket.
 
         @param container_path: Path to the directory used to store containers.
@@ -46,6 +49,9 @@
                           arguments. Defaults to value set via
                           AUTOSERV/container_base_name in global config.
         @param container_factory: A factory for creating Containers.
+        @param base_container_path: Path to the directory used for the base container.
+                                    Default is AUTOSERV/base_container_path in
+                                    global config.
         """
         self.container_path = os.path.realpath(container_path)
         if container_factory is not None:
@@ -56,7 +62,7 @@
             # fall back to using the default container path).
             try:
                 base_image_ok = True
-                container = BaseImage(self.container_path, base_name).get()
+                container = BaseImage(base_container_path, base_name).get()
             except error.ContainerError:
                 base_image_ok = False
                 raise
@@ -86,16 +92,21 @@
         logging.debug("Fetching all extant LXC containers")
         info_collection = lxc.get_container_info(self.container_path)
         if force_update:
-          logging.debug("Clearing cached container info")
+            logging.debug("Clearing cached container info")
         containers = {} if force_update else self.container_cache
         for info in info_collection:
-            if info["name"] in containers:
-                continue
+            # The keys of `containers` are container.ContainerId object, not a
+            # string.
+            for k in containers:
+                if str(k) == info['name']:
+                    continue
             container = Container.create_from_existing_dir(self.container_path,
                                                            **info)
             # Active containers have an ID.  Zygotes and base containers, don't.
             if container.id is not None:
                 containers[container.id] = container
+        logging.debug('All containers found: %s',
+                      [(repr(k), str(k)) for k in containers])
         self.container_cache = containers
         return containers
 
@@ -114,9 +125,24 @@
             return self.container_cache[container_id]
 
         container = self.get_all().get(container_id, None)
-        if None == container:
-          logging.debug("Could not find container %s", container_id)
-        return container
+        if container:
+            return container
+
+        logging.debug(
+                "Could not find container by container id object: %s (%s)",
+                container_id, repr(container_id))
+        # When load container Ids from disk, we cast job_id from NoneType to a
+        # string 'None' (crrev/c/1056366). This causes problems if the input id
+        # has not been casted.
+        logging.debug('Try to get container by the id string: %s',
+                      container_id)
+        for k, v in self.get_all().items():
+            if str(k) == str(container_id):
+                return v
+
+        logging.debug('Could not find container by id string: %s',
+                      container_id)
+        return None
 
 
     def exist(self, container_id):
@@ -160,12 +186,15 @@
             "Force-destroying container %s if it exists, with timeout %s sec",
             name, timeout)
         try:
-          result = lxc_utils.destroy(
-              self.container_path, name,
-              force=True, snapshots=True, ignore_status=True, timeout=timeout
-          )
+            result = lxc_utils.destroy(self.container_path,
+                                       name,
+                                       force=True,
+                                       snapshots=True,
+                                       ignore_status=True,
+                                       timeout=timeout)
         except error.CmdTimeoutError:
-          logging.warning("Force-destruction of container %s timed out.", name)
+            logging.warning("Force-destruction of container %s timed out.",
+                            name)
         logging.debug("Force-destruction exit code %s", result.exit_status)
         return result
 
@@ -174,9 +203,15 @@
     @metrics.SecondsTimerDecorator(
         '%s/setup_test_duration' % constants.STATS_KEY)
     @cleanup_if_fail()
-    def setup_test(self, container_id, job_id, server_package_url, result_path,
-                   control=None, skip_cleanup=False, job_folder=None,
-                   dut_name=None, isolate_hash=None):
+    def setup_test(self,
+                   container_id,
+                   job_id,
+                   server_package_url,
+                   result_path,
+                   control=None,
+                   skip_cleanup=False,
+                   job_folder=None,
+                   dut_name=None):
         """Setup test container for the test job to run.
 
         The setup includes:
@@ -200,9 +235,6 @@
         @param job_folder: Folder name of the job, e.g., 123-debug_user.
         @param dut_name: Name of the dut to run test, used as the hostname of
                          the container. Default is None.
-        @param isolate_hash: String key to look up the isolate package needed
-                             to run test. Default is None, supersedes
-                             server_package_url if present.
         @return: A Container object for the test container.
 
         @raise ContainerError: If container does not exist, or not running.
@@ -230,10 +262,7 @@
         container = self._factory.create_container(container_id)
 
         # Deploy server side package
-        if isolate_hash:
-          container.install_ssp_isolate(isolate_hash)
-        else:
-          container.install_ssp(server_package_url)
+        container.install_ssp(server_package_url)
 
         deploy_config_manager = lxc_config.DeployConfigManager(container)
         deploy_config_manager.deploy_pre_start()
@@ -271,7 +300,7 @@
         utils.run('sudo chown -R root "%s"' % autotest_path)
         utils.run('sudo chgrp -R root "%s"' % autotest_path)
 
-        container.start(wait_for_network=True)
+        container.start(wait_for_network=True, log_dir=result_path)
         deploy_config_manager.deploy_post_start()
 
         # Update the hostname of the test container to be `dut-name`.
diff --git a/site_utils/lxc/container_bucket_unittest.py b/site_utils/lxc/container_bucket_unittest.py
index cb7d7e4..6879a83 100755
--- a/site_utils/lxc/container_bucket_unittest.py
+++ b/site_utils/lxc/container_bucket_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/site_utils/lxc/container_factory.py b/site_utils/lxc/container_factory.py
index e235c60..f2575c8 100644
--- a/site_utils/lxc/container_factory.py
+++ b/site_utils/lxc/container_factory.py
@@ -11,7 +11,7 @@
 from autotest_lib.site_utils.lxc import container
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = utils.metrics_mock
 
@@ -102,15 +102,13 @@
                                                new_path=lxc_path,
                                                snapshot=use_snapshot,
                                                cleanup=self._force_cleanup)
-        except error.CmdError:
+        except error.CmdError as e:
             if not use_snapshot:
                 raise
             else:
-                logging.debug(
-                        'Creating snapshot clone failed.'
-                        ' Attempting without snapshot...'
-                        ' This forces cleanup of old cloned container.'
-                )
+                logging.debug('Creating snapshot clone failed: %s', e)
+                logging.debug('Attempting without snapshot...'
+                              ' This forces cleanup of old cloned container.')
                 return self._container_class.clone(src=self._base_container,
                                                    new_name=name,
                                                    new_path=lxc_path,
diff --git a/site_utils/lxc/container_factory_unittest.py b/site_utils/lxc/container_factory_unittest.py
index 5695ce4..173a235 100755
--- a/site_utils/lxc/container_factory_unittest.py
+++ b/site_utils/lxc/container_factory_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/site_utils/lxc/container_unittest.py b/site_utils/lxc/container_unittest.py
index 04b02a1..5c08e28 100755
--- a/site_utils/lxc/container_unittest.py
+++ b/site_utils/lxc/container_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -217,7 +217,7 @@
         """Verifies that files are correctly copied into the container."""
         control_string = 'amazingly few discotheques provide jukeboxes'
         with tempfile.NamedTemporaryFile() as tmpfile:
-            tmpfile.write(control_string)
+            tmpfile.write(control_string.encode('utf-8'))
             tmpfile.flush()
 
             with self.createContainer() as container:
diff --git a/site_utils/lxc/lxc.py b/site_utils/lxc/lxc.py
index b53aab7..a87c895 100644
--- a/site_utils/lxc/lxc.py
+++ b/site_utils/lxc/lxc.py
@@ -2,6 +2,10 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import logging
 import os
 import tempfile
@@ -13,9 +17,11 @@
 from autotest_lib.client.common_lib.cros import retry
 from autotest_lib.server import utils as server_utils
 from autotest_lib.site_utils.lxc import constants
+import six
+from six.moves import zip
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = common_utils.metrics_mock
 
@@ -47,10 +53,11 @@
         # Only LXC 1.x has the second line of '-' as a separator.
         if line.startswith('------'):
             continue
-        info_collection.append(dict(zip(constants.ATTRIBUTES, line.split())))
+        info_collection.append(
+                dict(list(zip(constants.ATTRIBUTES, line.split()))))
     if filters:
         filtered_collection = []
-        for key, value in filters.iteritems():
+        for key, value in six.iteritems(filters):
             for info in info_collection:
                 if key in info and info[key] == value:
                     filtered_collection.append(info)
diff --git a/site_utils/lxc/lxc_config_unittest.py b/site_utils/lxc/lxc_config_unittest.py
index 0da8f54..36ecf34 100755
--- a/site_utils/lxc/lxc_config_unittest.py
+++ b/site_utils/lxc/lxc_config_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright 2015 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -162,7 +162,7 @@
     @param config: A list of config objects.  Each config object is a dictionary
                    which conforms to the format described in config.py.
     """
-    with tempfile.NamedTemporaryFile() as tmp:
+    with tempfile.NamedTemporaryFile(mode='w') as tmp:
         json.dump(config, tmp)
         tmp.flush()
         yield tmp.name
diff --git a/site_utils/lxc/lxc_functional_test.py b/site_utils/lxc/lxc_functional_test.py
index 089200f..694ef0a 100755
--- a/site_utils/lxc/lxc_functional_test.py
+++ b/site_utils/lxc/lxc_functional_test.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2015 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -79,7 +79,7 @@
 
 TEST_SCRIPT_CONTENT_TS_MON = """
 # Test ts_mon metrics can be set up.
-from chromite.lib import ts_mon_config
+from autotest_lib.utils.frozen_chromite.lib import ts_mon_config
 ts_mon_config.SetupTsMonGlobalState('some_test', suppress_exception=False)
 """
 
@@ -95,7 +95,6 @@
     {
         "credentials":"/tmp/service_account_prodx_mon.json",
         "endpoint":"https://xxx.googleapis.com/v1:insert",
-        "use_new_proto": true
     }'''
 
 FAKE_SERVICE_ACCOUNT_CRED_JSON = '''
@@ -174,7 +173,7 @@
 # Content of the test control file.
 TEST_CONTROL_CONTENT = """
 def run(machine):
-    job.run_test('dummy_PassServer',
+    job.run_test('stub_PassServer',
                  host=hosts.create_host(machine))
 
 parallel_simple(run, machines)
diff --git a/site_utils/lxc/unittest_cleanup.py b/site_utils/lxc/unittest_cleanup.py
index 45ecd6e..112a27e 100644
--- a/site_utils/lxc/unittest_cleanup.py
+++ b/site_utils/lxc/unittest_cleanup.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/site_utils/lxc/unittest_http.py b/site_utils/lxc/unittest_http.py
index 22a970e..ddad11a 100644
--- a/site_utils/lxc/unittest_http.py
+++ b/site_utils/lxc/unittest_http.py
@@ -8,8 +8,8 @@
 import os
 import shutil
 import tempfile
-import SocketServer
-import SimpleHTTPServer
+import six.moves.socketserver
+import six.moves.SimpleHTTPServer
 
 import common
 from autotest_lib.client.bin import utils
@@ -35,8 +35,8 @@
         tmpdir = tempfile.mkdtemp()
         shutil.copy(file_path, tmpdir)
 
-        httpd = SocketServer.TCPServer(
-                ('', 0), SimpleHTTPServer.SimpleHTTPRequestHandler)
+        httpd = six.moves.socketserver.TCPServer(
+                ('', 0), six.moves.SimpleHTTPServer.SimpleHTTPRequestHandler)
         port = httpd.socket.getsockname()[1]
 
         # Start the http daemon in the tmpdir to serve the file.
diff --git a/site_utils/lxc/unittest_setup.py b/site_utils/lxc/unittest_setup.py
index 56b1311..b402a86 100644
--- a/site_utils/lxc/unittest_setup.py
+++ b/site_utils/lxc/unittest_setup.py
@@ -44,7 +44,7 @@
     # TODO(dshi): crbug.com/459344 Set remove this enforcement when test
     # container can be unprivileged container.
     if require_sudo and utils.sudo_require_password():
-        logging.warn('SSP requires root privilege to run commands, please '
+        logging.warning('SSP requires root privilege to run commands, please '
                      'grant root access to this process.')
         utils.run('sudo true')
 
diff --git a/site_utils/lxc/utils.py b/site_utils/lxc/utils.py
index 9fb1a59..d0cd3c5 100644
--- a/site_utils/lxc/utils.py
+++ b/site_utils/lxc/utils.py
@@ -76,14 +76,14 @@
         logging.debug('virt-what output: %s', virt)
         return bool(virt)
     except error.CmdError:
-        logging.warn('Package virt-what is not installed, default to assume '
+        logging.warning('Package virt-what is not installed, default to assume '
                      'it is not a virtual machine.')
         return False
 
 
 def destroy(path, name,
             force=True, snapshots=False, ignore_status=False, timeout=-1):
-  """
+    """
   Destroy an LXC container.
 
   @param force: Destroy even if running. Default true.
@@ -94,15 +94,16 @@
 
   @returns: CmdResult object from the shell command
   """
-  cmd = 'sudo lxc-destroy -P %s -n %s' % (path, name)
-  if force:
-    cmd += ' -f'
-  if snapshots:
-    cmd += ' -s'
-  if timeout >= 0:
-    return utils.run(cmd, ignore_status=ignore_status, timeout=timeout)
-  else:
-    return utils.run(cmd, ignore_status=ignore_status)
+    cmd = 'sudo lxc-destroy -P %s -n %s' % (path, name)
+    if force:
+        cmd += ' -f'
+    if snapshots:
+        cmd += ' -s'
+    if timeout >= 0:
+        return utils.run(cmd, ignore_status=ignore_status, timeout=timeout)
+    else:
+        return utils.run(cmd, ignore_status=ignore_status)
+
 
 def clone(lxc_path, src_name, new_path, dst_name, snapshot):
     """Clones a container.
@@ -254,7 +255,7 @@
 
     if combine:
         with tempfile.NamedTemporaryFile() as temp:
-            temp.write("set -e\n")
+            temp.write(b"set -e\n")
             temp.writelines([command+"\n" for command in commands])
             logging.info("Commands to run: %s", str(commands))
             return utils.run("sudo bash %s" % temp.name)
diff --git a/site_utils/lxc_cleanup.py b/site_utils/lxc_cleanup.py
index 954bc0a..be0599b 100755
--- a/site_utils/lxc_cleanup.py
+++ b/site_utils/lxc_cleanup.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright 2015 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/site_utils/mysql_bootstrap.py b/site_utils/mysql_bootstrap.py
index a5b3cad..0448a16 100755
--- a/site_utils/mysql_bootstrap.py
+++ b/site_utils/mysql_bootstrap.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -7,7 +7,7 @@
 
 The purpose of this module is to grant access to a new-user/host/password
 combination on a remote db server. For example, if we were bootstrapping
-a new autotest master A1 with a remote database server A2, the scheduler
+a new autotest main A1 with a remote database server A2, the scheduler
 running on A1 needs to access the database on A2 with the credentials
 specified in the shadow_config of A1 (A1_user, A1_pass). To achieve this
 we ssh into A2 and execute the grant privileges command for (A1_user,
diff --git a/site_utils/perf_compare.py b/site_utils/perf_compare.py
index 0318d25..109ddc62 100644
--- a/site_utils/perf_compare.py
+++ b/site_utils/perf_compare.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/site_utils/perf_csv_uploader.py b/site_utils/perf_csv_uploader.py
index 9857d3f..94276c7 100755
--- a/site_utils/perf_csv_uploader.py
+++ b/site_utils/perf_csv_uploader.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/site_utils/presubmit_hooks/check_control_files.py b/site_utils/presubmit_hooks/check_control_files.py
index b17fdf6..b29e132 100755
--- a/site_utils/presubmit_hooks/check_control_files.py
+++ b/site_utils/presubmit_hooks/check_control_files.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2 -u
+#!/usr/bin/python3 -u
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -108,7 +108,11 @@
                     GetAutotestTestPackages(overlay))
         child = subprocess.Popen(cmd_args, stdout=subprocess.PIPE,
                                  stderr=subprocess.PIPE)
-        new_useflags = child.communicate()[0].splitlines()
+        # [bytes] ==> [str]
+        new_useflags = [
+                c.decode() if isinstance(c, bytes) else c
+                for c in child.communicate()[0].splitlines()
+        ]
         if child.returncode == 0:
             useflags = useflags.union(new_useflags)
     return useflags
@@ -288,6 +292,5 @@
             CheckRetry(ctrl_data, test_name)
             CheckDependencies(ctrl_data, test_name)
 
-
 if __name__ == '__main__':
     main()
diff --git a/site_utils/presubmit_hooks/check_json_files.py b/site_utils/presubmit_hooks/check_json_files.py
index 3078ba1..16d9bf9 100755
--- a/site_utils/presubmit_hooks/check_json_files.py
+++ b/site_utils/presubmit_hooks/check_json_files.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2 -u
+#!/usr/bin/python3 -u
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -32,10 +32,10 @@
                     json.load(json_file)
             except ValueError:
                 # Re-raise the error to include the file path.
-                print ('Presubmit check `check_json_file` failed. If the file '
-                       'is meant to be malformated, please do not name it as a '
-                       'json file, or you will have to upload the CL using '
-                       '--no-verify')
+                print('Presubmit check `check_json_file` failed. If the file '
+                      'is meant to be malformated, please do not name it as a '
+                      'json file, or you will have to upload the CL using '
+                      '--no-verify')
                 raise InvalidJsonFile('Invalid json file: %s' % f)
 
 
diff --git a/site_utils/pubsub_utils_unittest.py b/site_utils/pubsub_utils_unittest.py
index c052a9f..37599ac 100644
--- a/site_utils/pubsub_utils_unittest.py
+++ b/site_utils/pubsub_utils_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright 2016 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -8,18 +8,20 @@
 from __future__ import print_function
 import os
 import unittest
+from unittest.mock import patch
+from unittest.mock import MagicMock
 
-import mox
+import common
 
 # TODO(crbug.com/1050892): The unittests rely on apiclient in chromite.
-import chromite  # pylint: disable=unused-import
+import autotest_lib.utils.frozen_chromite  # pylint: disable=unused-import
 
 from apiclient import discovery
 from oauth2client.client import ApplicationDefaultCredentialsError
 from oauth2client.client import GoogleCredentials
 from googleapiclient.errors import UnknownApiNameOrVersion
 
-import pubsub_utils
+from autotest_lib.site_utils import pubsub_utils
 
 _TEST_CLOUD_SERVICE_ACCOUNT_FILE = '/tmp/test-credential'
 
@@ -74,95 +76,99 @@
     return msg_payload
 
 
-class PubSubTests(mox.MoxTestBase):
+class PubSubTests(unittest.TestCase):
     """Tests for pubsub related functios."""
 
+    def setUp(self):
+        patcher = patch.object(os.path, 'isfile')
+        self.isfile_mock = patcher.start()
+        self.addCleanup(patcher.stop)
+        creds_patcher = patch.object(GoogleCredentials, 'from_stream')
+        self.creds_mock = creds_patcher.start()
+        self.addCleanup(creds_patcher.stop)
+
     def test_pubsub_with_no_service_account(self):
         """Test getting the pubsub service"""
-        self.mox.StubOutWithMock(os.path, 'isfile')
-        self.mox.ReplayAll()
         with self.assertRaises(pubsub_utils.PubSubException):
             pubsub_utils.PubSubClient()
-        self.mox.VerifyAll()
 
     def test_pubsub_with_non_existing_service_account(self):
         """Test getting the pubsub service"""
-        self.mox.StubOutWithMock(os.path, 'isfile')
-        os.path.isfile(_TEST_CLOUD_SERVICE_ACCOUNT_FILE).AndReturn(False)
-        self.mox.ReplayAll()
+        self.isfile_mock.return_value = False
         with self.assertRaises(pubsub_utils.PubSubException):
             pubsub_utils.PubSubClient(_TEST_CLOUD_SERVICE_ACCOUNT_FILE)
-        self.mox.VerifyAll()
+        self.isfile_mock.assert_called_with(_TEST_CLOUD_SERVICE_ACCOUNT_FILE)
 
     def test_pubsub_with_corrupted_service_account(self):
         """Test pubsub with corrupted service account."""
-        self.mox.StubOutWithMock(os.path, 'isfile')
-        self.mox.StubOutWithMock(GoogleCredentials, 'from_stream')
-        os.path.isfile(_TEST_CLOUD_SERVICE_ACCOUNT_FILE).AndReturn(True)
-        GoogleCredentials.from_stream(
-            _TEST_CLOUD_SERVICE_ACCOUNT_FILE).AndRaise(
-                ApplicationDefaultCredentialsError())
-        self.mox.ReplayAll()
+
+        self.isfile_mock.return_value = True
+        self.creds_mock.side_effect = ApplicationDefaultCredentialsError
+
         with self.assertRaises(pubsub_utils.PubSubException):
             pubsub_utils.PubSubClient(_TEST_CLOUD_SERVICE_ACCOUNT_FILE)
-        self.mox.VerifyAll()
+
+        self.creds_mock.assert_called_with(_TEST_CLOUD_SERVICE_ACCOUNT_FILE)
+        self.isfile_mock.assert_called_with(_TEST_CLOUD_SERVICE_ACCOUNT_FILE)
 
     def test_pubsub_with_invalid_service_account(self):
         """Test pubsubwith invalid service account."""
-        self.mox.StubOutWithMock(os.path, 'isfile')
-        self.mox.StubOutWithMock(GoogleCredentials, 'from_stream')
-        os.path.isfile(_TEST_CLOUD_SERVICE_ACCOUNT_FILE).AndReturn(True)
-        credentials = self.mox.CreateMock(GoogleCredentials)
-        GoogleCredentials.from_stream(
-            _TEST_CLOUD_SERVICE_ACCOUNT_FILE).AndReturn(credentials)
-        credentials.create_scoped_required().AndReturn(True)
-        credentials.create_scoped(pubsub_utils.PUBSUB_SCOPES).AndReturn(
-            credentials)
-        self.mox.StubOutWithMock(discovery, 'build')
-        discovery.build(
-            pubsub_utils.PUBSUB_SERVICE_NAME,
-            pubsub_utils.PUBSUB_VERSION,
-            credentials=credentials).AndRaise(UnknownApiNameOrVersion())
-        self.mox.ReplayAll()
-        with self.assertRaises(pubsub_utils.PubSubException):
-            msg = _create_sample_message()
-            pubsub_client = pubsub_utils.PubSubClient(
-                _TEST_CLOUD_SERVICE_ACCOUNT_FILE)
-            pubsub_client.publish_notifications('test_topic', [msg])
-        self.mox.VerifyAll()
+        self.isfile_mock.return_value = True
+        credentials = MagicMock(GoogleCredentials)
+        self.creds_mock.return_value = credentials
+
+        credentials.create_scoped_required.return_value = True
+        credentials.create_scoped.return_value = credentials
+
+        with patch.object(discovery, 'build') as discovery_mock:
+            discovery_mock.side_effect = UnknownApiNameOrVersion
+
+            with self.assertRaises(pubsub_utils.PubSubException):
+                msg = _create_sample_message()
+                pubsub_client = pubsub_utils.PubSubClient(
+                        _TEST_CLOUD_SERVICE_ACCOUNT_FILE)
+                pubsub_client.publish_notifications('test_topic', [msg])
+
+            credentials.create_scoped.assert_called_with(
+                    pubsub_utils.PUBSUB_SCOPES)
+            discovery_mock.assert_called_with(pubsub_utils.PUBSUB_SERVICE_NAME,
+                                              pubsub_utils.PUBSUB_VERSION,
+                                              credentials=credentials)
+        self.creds_mock.assert_called_with(_TEST_CLOUD_SERVICE_ACCOUNT_FILE)
+        self.isfile_mock.assert_called_with(_TEST_CLOUD_SERVICE_ACCOUNT_FILE)
 
     def test_publish_notifications(self):
         """Test getting the pubsub service"""
-        self.mox.StubOutWithMock(os.path, 'isfile')
-        self.mox.StubOutWithMock(GoogleCredentials, 'from_stream')
-        os.path.isfile(_TEST_CLOUD_SERVICE_ACCOUNT_FILE).AndReturn(True)
-        credentials = self.mox.CreateMock(GoogleCredentials)
-        GoogleCredentials.from_stream(
-            _TEST_CLOUD_SERVICE_ACCOUNT_FILE).AndReturn(credentials)
-        credentials.create_scoped_required().AndReturn(True)
-        credentials.create_scoped(pubsub_utils.PUBSUB_SCOPES).AndReturn(
-            credentials)
-        self.mox.StubOutWithMock(discovery, 'build')
-        msg = _create_sample_message()
-        discovery.build(
-            pubsub_utils.PUBSUB_SERVICE_NAME,
-            pubsub_utils.PUBSUB_VERSION,
-            credentials=credentials).AndReturn(MockedPubSub(
-                self,
-                'test_topic',
-                msg,
-                pubsub_utils.DEFAULT_PUBSUB_NUM_RETRIES,
-                # use tuple ('123') instead of list just for easy to
-                # write the test.
-                ret_val={'messageIds': ('123')}))
+        self.isfile_mock.return_value = True
+        credentials = MagicMock(GoogleCredentials)
+        self.creds_mock.return_value = credentials
 
-        self.mox.ReplayAll()
-        pubsub_client = pubsub_utils.PubSubClient(
-                _TEST_CLOUD_SERVICE_ACCOUNT_FILE)
-        msg_ids = pubsub_client.publish_notifications('test_topic', [msg])
-        self.assertEquals(('123'), msg_ids)
+        credentials.create_scoped_required.return_value = True
+        credentials.create_scoped.return_value = credentials
 
-        self.mox.VerifyAll()
+        with patch.object(discovery, 'build') as discovery_mock:
+            msg = _create_sample_message()
+            discovery_mock.return_value = MockedPubSub(
+                    self,
+                    'test_topic',
+                    msg,
+                    pubsub_utils.DEFAULT_PUBSUB_NUM_RETRIES,
+                    # use tuple ('123') instead of list just for easy to
+                    # write the test.
+                    ret_val={'messageIds': ('123')})
+
+            pubsub_client = pubsub_utils.PubSubClient(
+                    _TEST_CLOUD_SERVICE_ACCOUNT_FILE)
+            msg_ids = pubsub_client.publish_notifications('test_topic', [msg])
+            self.assertEquals(('123'), msg_ids)
+
+            credentials.create_scoped.assert_called_with(
+                    pubsub_utils.PUBSUB_SCOPES)
+            discovery_mock.assert_called_with(pubsub_utils.PUBSUB_SERVICE_NAME,
+                                              pubsub_utils.PUBSUB_VERSION,
+                                              credentials=credentials)
+        self.creds_mock.assert_called_with(_TEST_CLOUD_SERVICE_ACCOUNT_FILE)
+        self.isfile_mock.assert_called_with(_TEST_CLOUD_SERVICE_ACCOUNT_FILE)
 
 
 if __name__ == '__main__':
diff --git a/site_utils/rpc_flight_recorder.py b/site_utils/rpc_flight_recorder.py
index f86f980..e9a004a 100755
--- a/site_utils/rpc_flight_recorder.py
+++ b/site_utils/rpc_flight_recorder.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -19,9 +19,8 @@
 # import needed to setup host_attributes
 # pylint: disable=unused-import
 from autotest_lib.server import site_host_attributes
-from autotest_lib.site_utils import server_manager_utils
-from chromite.lib import metrics
-from chromite.lib import ts_mon_config
+from autotest_lib.utils.frozen_chromite.lib import metrics
+from autotest_lib.utils.frozen_chromite.lib import ts_mon_config
 
 METRIC_ROOT = 'chromeos/autotest/blackbox/afe_rpc'
 METRIC_RPC_CALL_DURATIONS = METRIC_ROOT + '/rpc_call_durations'
@@ -58,7 +57,9 @@
         start_time = time.time()
 
         logging.debug('Updating Shards')
-        new_shards = set(server_manager_utils.get_shards())
+
+        # server_manager_utils.get_shards() is deprecated.
+        new_shards = set()
 
         with shards_lock:
             current_shards = set(shards)
diff --git a/site_utils/rpc_logserver.py b/site_utils/rpc_logserver.py
index 4e225aa..9e0858f 100755
--- a/site_utils/rpc_logserver.py
+++ b/site_utils/rpc_logserver.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #
 # Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/site_utils/rpm_control_system/BeautifulSoup.py b/site_utils/rpm_control_system/BeautifulSoup.py
deleted file mode 100644
index 7278215..0000000
--- a/site_utils/rpm_control_system/BeautifulSoup.py
+++ /dev/null
@@ -1,2017 +0,0 @@
-"""Beautiful Soup
-Elixir and Tonic
-"The Screen-Scraper's Friend"
-http://www.crummy.com/software/BeautifulSoup/
-
-Beautiful Soup parses a (possibly invalid) XML or HTML document into a
-tree representation. It provides methods and Pythonic idioms that make
-it easy to navigate, search, and modify the tree.
-
-A well-formed XML/HTML document yields a well-formed data
-structure. An ill-formed XML/HTML document yields a correspondingly
-ill-formed data structure. If your document is only locally
-well-formed, you can use this library to find and process the
-well-formed part of it.
-
-Beautiful Soup works with Python 2.2 and up. It has no external
-dependencies, but you'll have more success at converting data to UTF-8
-if you also install these three packages:
-
-* chardet, for auto-detecting character encodings
-  http://chardet.feedparser.org/
-* cjkcodecs and iconv_codec, which add more encodings to the ones supported
-  by stock Python.
-  http://cjkpython.i18n.org/
-
-Beautiful Soup defines classes for two main parsing strategies:
-
- * BeautifulStoneSoup, for parsing XML, SGML, or your domain-specific
-   language that kind of looks like XML.
-
- * BeautifulSoup, for parsing run-of-the-mill HTML code, be it valid
-   or invalid. This class has web browser-like heuristics for
-   obtaining a sensible parse tree in the face of common HTML errors.
-
-Beautiful Soup also defines a class (UnicodeDammit) for autodetecting
-the encoding of an HTML or XML document, and converting it to
-Unicode. Much of this code is taken from Mark Pilgrim's Universal Feed Parser.
-
-For more than you ever wanted to know about Beautiful Soup, see the
-documentation:
-http://www.crummy.com/software/BeautifulSoup/documentation.html
-
-Here, have some legalese:
-
-Copyright (c) 2004-2010, Leonard Richardson
-
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
-  * Redistributions of source code must retain the above copyright
-    notice, this list of conditions and the following disclaimer.
-
-  * Redistributions in binary form must reproduce the above
-    copyright notice, this list of conditions and the following
-    disclaimer in the documentation and/or other materials provided
-    with the distribution.
-
-  * Neither the name of the the Beautiful Soup Consortium and All
-    Night Kosher Bakery nor the names of its contributors may be
-    used to endorse or promote products derived from this software
-    without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
-PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
-LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
-NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE, DAMMIT.
-
-"""
-from __future__ import generators
-
-__author__ = "Leonard Richardson (leonardr@segfault.org)"
-__version__ = "3.2.1"
-__copyright__ = "Copyright (c) 2004-2012 Leonard Richardson"
-__license__ = "New-style BSD"
-
-from sgmllib import SGMLParser, SGMLParseError
-import codecs
-import markupbase
-import types
-import re
-import sgmllib
-try:
-  from htmlentitydefs import name2codepoint
-except ImportError:
-  name2codepoint = {}
-try:
-    set
-except NameError:
-    from sets import Set as set
-
-#These hacks make Beautiful Soup able to parse XML with namespaces
-sgmllib.tagfind = re.compile('[a-zA-Z][-_.:a-zA-Z0-9]*')
-markupbase._declname_match = re.compile(r'[a-zA-Z][-_.:a-zA-Z0-9]*\s*').match
-
-DEFAULT_OUTPUT_ENCODING = "utf-8"
-
-def _match_css_class(str):
-    """Build a RE to match the given CSS class."""
-    return re.compile(r"(^|.*\s)%s($|\s)" % str)
-
-# First, the classes that represent markup elements.
-
-class PageElement(object):
-    """Contains the navigational information for some part of the page
-    (either a tag or a piece of text)"""
-
-    def _invert(h):
-        "Cheap function to invert a hash."
-        i = {}
-        for k,v in h.items():
-            i[v] = k
-        return i
-
-    XML_ENTITIES_TO_SPECIAL_CHARS = { "apos" : "'",
-                                      "quot" : '"',
-                                      "amp" : "&",
-                                      "lt" : "<",
-                                      "gt" : ">" }
-
-    XML_SPECIAL_CHARS_TO_ENTITIES = _invert(XML_ENTITIES_TO_SPECIAL_CHARS)
-
-    def setup(self, parent=None, previous=None):
-        """Sets up the initial relations between this element and
-        other elements."""
-        self.parent = parent
-        self.previous = previous
-        self.next = None
-        self.previousSibling = None
-        self.nextSibling = None
-        if self.parent and self.parent.contents:
-            self.previousSibling = self.parent.contents[-1]
-            self.previousSibling.nextSibling = self
-
-    def replaceWith(self, replaceWith):
-        oldParent = self.parent
-        myIndex = self.parent.index(self)
-        if hasattr(replaceWith, "parent")\
-                  and replaceWith.parent is self.parent:
-            # We're replacing this element with one of its siblings.
-            index = replaceWith.parent.index(replaceWith)
-            if index and index < myIndex:
-                # Furthermore, it comes before this element. That
-                # means that when we extract it, the index of this
-                # element will change.
-                myIndex = myIndex - 1
-        self.extract()
-        oldParent.insert(myIndex, replaceWith)
-
-    def replaceWithChildren(self):
-        myParent = self.parent
-        myIndex = self.parent.index(self)
-        self.extract()
-        reversedChildren = list(self.contents)
-        reversedChildren.reverse()
-        for child in reversedChildren:
-            myParent.insert(myIndex, child)
-
-    def extract(self):
-        """Destructively rips this element out of the tree."""
-        if self.parent:
-            try:
-                del self.parent.contents[self.parent.index(self)]
-            except ValueError:
-                pass
-
-        #Find the two elements that would be next to each other if
-        #this element (and any children) hadn't been parsed. Connect
-        #the two.
-        lastChild = self._lastRecursiveChild()
-        nextElement = lastChild.next
-
-        if self.previous:
-            self.previous.next = nextElement
-        if nextElement:
-            nextElement.previous = self.previous
-        self.previous = None
-        lastChild.next = None
-
-        self.parent = None
-        if self.previousSibling:
-            self.previousSibling.nextSibling = self.nextSibling
-        if self.nextSibling:
-            self.nextSibling.previousSibling = self.previousSibling
-        self.previousSibling = self.nextSibling = None
-        return self
-
-    def _lastRecursiveChild(self):
-        "Finds the last element beneath this object to be parsed."
-        lastChild = self
-        while hasattr(lastChild, 'contents') and lastChild.contents:
-            lastChild = lastChild.contents[-1]
-        return lastChild
-
-    def insert(self, position, newChild):
-        if isinstance(newChild, basestring) \
-            and not isinstance(newChild, NavigableString):
-            newChild = NavigableString(newChild)
-
-        position =  min(position, len(self.contents))
-        if hasattr(newChild, 'parent') and newChild.parent is not None:
-            # We're 'inserting' an element that's already one
-            # of this object's children.
-            if newChild.parent is self:
-                index = self.index(newChild)
-                if index > position:
-                    # Furthermore we're moving it further down the
-                    # list of this object's children. That means that
-                    # when we extract this element, our target index
-                    # will jump down one.
-                    position = position - 1
-            newChild.extract()
-
-        newChild.parent = self
-        previousChild = None
-        if position == 0:
-            newChild.previousSibling = None
-            newChild.previous = self
-        else:
-            previousChild = self.contents[position-1]
-            newChild.previousSibling = previousChild
-            newChild.previousSibling.nextSibling = newChild
-            newChild.previous = previousChild._lastRecursiveChild()
-        if newChild.previous:
-            newChild.previous.next = newChild
-
-        newChildsLastElement = newChild._lastRecursiveChild()
-
-        if position >= len(self.contents):
-            newChild.nextSibling = None
-
-            parent = self
-            parentsNextSibling = None
-            while not parentsNextSibling:
-                parentsNextSibling = parent.nextSibling
-                parent = parent.parent
-                if not parent: # This is the last element in the document.
-                    break
-            if parentsNextSibling:
-                newChildsLastElement.next = parentsNextSibling
-            else:
-                newChildsLastElement.next = None
-        else:
-            nextChild = self.contents[position]
-            newChild.nextSibling = nextChild
-            if newChild.nextSibling:
-                newChild.nextSibling.previousSibling = newChild
-            newChildsLastElement.next = nextChild
-
-        if newChildsLastElement.next:
-            newChildsLastElement.next.previous = newChildsLastElement
-        self.contents.insert(position, newChild)
-
-    def append(self, tag):
-        """Appends the given tag to the contents of this tag."""
-        self.insert(len(self.contents), tag)
-
-    def findNext(self, name=None, attrs={}, text=None, **kwargs):
-        """Returns the first item that matches the given criteria and
-        appears after this Tag in the document."""
-        return self._findOne(self.findAllNext, name, attrs, text, **kwargs)
-
-    def findAllNext(self, name=None, attrs={}, text=None, limit=None,
-                    **kwargs):
-        """Returns all items that match the given criteria and appear
-        after this Tag in the document."""
-        return self._findAll(name, attrs, text, limit, self.nextGenerator,
-                             **kwargs)
-
-    def findNextSibling(self, name=None, attrs={}, text=None, **kwargs):
-        """Returns the closest sibling to this Tag that matches the
-        given criteria and appears after this Tag in the document."""
-        return self._findOne(self.findNextSiblings, name, attrs, text,
-                             **kwargs)
-
-    def findNextSiblings(self, name=None, attrs={}, text=None, limit=None,
-                         **kwargs):
-        """Returns the siblings of this Tag that match the given
-        criteria and appear after this Tag in the document."""
-        return self._findAll(name, attrs, text, limit,
-                             self.nextSiblingGenerator, **kwargs)
-    fetchNextSiblings = findNextSiblings # Compatibility with pre-3.x
-
-    def findPrevious(self, name=None, attrs={}, text=None, **kwargs):
-        """Returns the first item that matches the given criteria and
-        appears before this Tag in the document."""
-        return self._findOne(self.findAllPrevious, name, attrs, text, **kwargs)
-
-    def findAllPrevious(self, name=None, attrs={}, text=None, limit=None,
-                        **kwargs):
-        """Returns all items that match the given criteria and appear
-        before this Tag in the document."""
-        return self._findAll(name, attrs, text, limit, self.previousGenerator,
-                           **kwargs)
-    fetchPrevious = findAllPrevious # Compatibility with pre-3.x
-
-    def findPreviousSibling(self, name=None, attrs={}, text=None, **kwargs):
-        """Returns the closest sibling to this Tag that matches the
-        given criteria and appears before this Tag in the document."""
-        return self._findOne(self.findPreviousSiblings, name, attrs, text,
-                             **kwargs)
-
-    def findPreviousSiblings(self, name=None, attrs={}, text=None,
-                             limit=None, **kwargs):
-        """Returns the siblings of this Tag that match the given
-        criteria and appear before this Tag in the document."""
-        return self._findAll(name, attrs, text, limit,
-                             self.previousSiblingGenerator, **kwargs)
-    fetchPreviousSiblings = findPreviousSiblings # Compatibility with pre-3.x
-
-    def findParent(self, name=None, attrs={}, **kwargs):
-        """Returns the closest parent of this Tag that matches the given
-        criteria."""
-        # NOTE: We can't use _findOne because findParents takes a different
-        # set of arguments.
-        r = None
-        l = self.findParents(name, attrs, 1)
-        if l:
-            r = l[0]
-        return r
-
-    def findParents(self, name=None, attrs={}, limit=None, **kwargs):
-        """Returns the parents of this Tag that match the given
-        criteria."""
-
-        return self._findAll(name, attrs, None, limit, self.parentGenerator,
-                             **kwargs)
-    fetchParents = findParents # Compatibility with pre-3.x
-
-    #These methods do the real heavy lifting.
-
-    def _findOne(self, method, name, attrs, text, **kwargs):
-        r = None
-        l = method(name, attrs, text, 1, **kwargs)
-        if l:
-            r = l[0]
-        return r
-
-    def _findAll(self, name, attrs, text, limit, generator, **kwargs):
-        "Iterates over a generator looking for things that match."
-
-        if isinstance(name, SoupStrainer):
-            strainer = name
-        # (Possibly) special case some findAll*(...) searches
-        elif text is None and not limit and not attrs and not kwargs:
-            # findAll*(True)
-            if name is True:
-                return [element for element in generator()
-                        if isinstance(element, Tag)]
-            # findAll*('tag-name')
-            elif isinstance(name, basestring):
-                return [element for element in generator()
-                        if isinstance(element, Tag) and
-                        element.name == name]
-            else:
-                strainer = SoupStrainer(name, attrs, text, **kwargs)
-        # Build a SoupStrainer
-        else:
-            strainer = SoupStrainer(name, attrs, text, **kwargs)
-        results = ResultSet(strainer)
-        g = generator()
-        while True:
-            try:
-                i = g.next()
-            except StopIteration:
-                break
-            if i:
-                found = strainer.search(i)
-                if found:
-                    results.append(found)
-                    if limit and len(results) >= limit:
-                        break
-        return results
-
-    #These Generators can be used to navigate starting from both
-    #NavigableStrings and Tags.
-    def nextGenerator(self):
-        i = self
-        while i is not None:
-            i = i.next
-            yield i
-
-    def nextSiblingGenerator(self):
-        i = self
-        while i is not None:
-            i = i.nextSibling
-            yield i
-
-    def previousGenerator(self):
-        i = self
-        while i is not None:
-            i = i.previous
-            yield i
-
-    def previousSiblingGenerator(self):
-        i = self
-        while i is not None:
-            i = i.previousSibling
-            yield i
-
-    def parentGenerator(self):
-        i = self
-        while i is not None:
-            i = i.parent
-            yield i
-
-    # Utility methods
-    def substituteEncoding(self, str, encoding=None):
-        encoding = encoding or "utf-8"
-        return str.replace("%SOUP-ENCODING%", encoding)
-
-    def toEncoding(self, s, encoding=None):
-        """Encodes an object to a string in some encoding, or to Unicode.
-        ."""
-        if isinstance(s, unicode):
-            if encoding:
-                s = s.encode(encoding)
-        elif isinstance(s, str):
-            if encoding:
-                s = s.encode(encoding)
-            else:
-                s = unicode(s)
-        else:
-            if encoding:
-                s  = self.toEncoding(str(s), encoding)
-            else:
-                s = unicode(s)
-        return s
-
-    BARE_AMPERSAND_OR_BRACKET = re.compile("([<>]|"
-                                           + "&(?!#\d+;|#x[0-9a-fA-F]+;|\w+;)"
-                                           + ")")
-
-    def _sub_entity(self, x):
-        """Used with a regular expression to substitute the
-        appropriate XML entity for an XML special character."""
-        return "&" + self.XML_SPECIAL_CHARS_TO_ENTITIES[x.group(0)[0]] + ";"
-
-
-class NavigableString(unicode, PageElement):
-
-    def __new__(cls, value):
-        """Create a new NavigableString.
-
-        When unpickling a NavigableString, this method is called with
-        the string in DEFAULT_OUTPUT_ENCODING. That encoding needs to be
-        passed in to the superclass's __new__ or the superclass won't know
-        how to handle non-ASCII characters.
-        """
-        if isinstance(value, unicode):
-            return unicode.__new__(cls, value)
-        return unicode.__new__(cls, value, DEFAULT_OUTPUT_ENCODING)
-
-    def __getnewargs__(self):
-        return (NavigableString.__str__(self),)
-
-    def __getattr__(self, attr):
-        """text.string gives you text. This is for backwards
-        compatibility for Navigable*String, but for CData* it lets you
-        get the string without the CData wrapper."""
-        if attr == 'string':
-            return self
-        else:
-            raise AttributeError, "'%s' object has no attribute '%s'" % (self.__class__.__name__, attr)
-
-    def __unicode__(self):
-        return str(self).decode(DEFAULT_OUTPUT_ENCODING)
-
-    def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
-        # Substitute outgoing XML entities.
-        data = self.BARE_AMPERSAND_OR_BRACKET.sub(self._sub_entity, self)
-        if encoding:
-            return data.encode(encoding)
-        else:
-            return data
-
-class CData(NavigableString):
-
-    def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
-        return "<![CDATA[%s]]>" % NavigableString.__str__(self, encoding)
-
-class ProcessingInstruction(NavigableString):
-    def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
-        output = self
-        if "%SOUP-ENCODING%" in output:
-            output = self.substituteEncoding(output, encoding)
-        return "<?%s?>" % self.toEncoding(output, encoding)
-
-class Comment(NavigableString):
-    def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
-        return "<!--%s-->" % NavigableString.__str__(self, encoding)
-
-class Declaration(NavigableString):
-    def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
-        return "<!%s>" % NavigableString.__str__(self, encoding)
-
-class Tag(PageElement):
-
-    """Represents a found HTML tag with its attributes and contents."""
-
-    def _convertEntities(self, match):
-        """Used in a call to re.sub to replace HTML, XML, and numeric
-        entities with the appropriate Unicode characters. If HTML
-        entities are being converted, any unrecognized entities are
-        escaped."""
-        x = match.group(1)
-        if self.convertHTMLEntities and x in name2codepoint:
-            return unichr(name2codepoint[x])
-        elif x in self.XML_ENTITIES_TO_SPECIAL_CHARS:
-            if self.convertXMLEntities:
-                return self.XML_ENTITIES_TO_SPECIAL_CHARS[x]
-            else:
-                return u'&%s;' % x
-        elif len(x) > 0 and x[0] == '#':
-            # Handle numeric entities
-            if len(x) > 1 and x[1] == 'x':
-                return unichr(int(x[2:], 16))
-            else:
-                return unichr(int(x[1:]))
-
-        elif self.escapeUnrecognizedEntities:
-            return u'&amp;%s;' % x
-        else:
-            return u'&%s;' % x
-
-    def __init__(self, parser, name, attrs=None, parent=None,
-                 previous=None):
-        "Basic constructor."
-
-        # We don't actually store the parser object: that lets extracted
-        # chunks be garbage-collected
-        self.parserClass = parser.__class__
-        self.isSelfClosing = parser.isSelfClosingTag(name)
-        self.name = name
-        if attrs is None:
-            attrs = []
-        elif isinstance(attrs, dict):
-            attrs = attrs.items()
-        self.attrs = attrs
-        self.contents = []
-        self.setup(parent, previous)
-        self.hidden = False
-        self.containsSubstitutions = False
-        self.convertHTMLEntities = parser.convertHTMLEntities
-        self.convertXMLEntities = parser.convertXMLEntities
-        self.escapeUnrecognizedEntities = parser.escapeUnrecognizedEntities
-
-        # Convert any HTML, XML, or numeric entities in the attribute values.
-        convert = lambda(k, val): (k,
-                                   re.sub("&(#\d+|#x[0-9a-fA-F]+|\w+);",
-                                          self._convertEntities,
-                                          val))
-        self.attrs = map(convert, self.attrs)
-
-    def getString(self):
-        if (len(self.contents) == 1
-            and isinstance(self.contents[0], NavigableString)):
-            return self.contents[0]
-
-    def setString(self, string):
-        """Replace the contents of the tag with a string"""
-        self.clear()
-        self.append(string)
-
-    string = property(getString, setString)
-
-    def getText(self, separator=u""):
-        if not len(self.contents):
-            return u""
-        stopNode = self._lastRecursiveChild().next
-        strings = []
-        current = self.contents[0]
-        while current is not stopNode:
-            if isinstance(current, NavigableString):
-                strings.append(current.strip())
-            current = current.next
-        return separator.join(strings)
-
-    text = property(getText)
-
-    def get(self, key, default=None):
-        """Returns the value of the 'key' attribute for the tag, or
-        the value given for 'default' if it doesn't have that
-        attribute."""
-        return self._getAttrMap().get(key, default)
-
-    def clear(self):
-        """Extract all children."""
-        for child in self.contents[:]:
-            child.extract()
-
-    def index(self, element):
-        for i, child in enumerate(self.contents):
-            if child is element:
-                return i
-        raise ValueError("Tag.index: element not in tag")
-
-    def has_key(self, key):
-        return self._getAttrMap().has_key(key)
-
-    def __getitem__(self, key):
-        """tag[key] returns the value of the 'key' attribute for the tag,
-        and throws an exception if it's not there."""
-        return self._getAttrMap()[key]
-
-    def __iter__(self):
-        "Iterating over a tag iterates over its contents."
-        return iter(self.contents)
-
-    def __len__(self):
-        "The length of a tag is the length of its list of contents."
-        return len(self.contents)
-
-    def __contains__(self, x):
-        return x in self.contents
-
-    def __nonzero__(self):
-        "A tag is non-None even if it has no contents."
-        return True
-
-    def __setitem__(self, key, value):
-        """Setting tag[key] sets the value of the 'key' attribute for the
-        tag."""
-        self._getAttrMap()
-        self.attrMap[key] = value
-        found = False
-        for i in range(0, len(self.attrs)):
-            if self.attrs[i][0] == key:
-                self.attrs[i] = (key, value)
-                found = True
-        if not found:
-            self.attrs.append((key, value))
-        self._getAttrMap()[key] = value
-
-    def __delitem__(self, key):
-        "Deleting tag[key] deletes all 'key' attributes for the tag."
-        for item in self.attrs:
-            if item[0] == key:
-                self.attrs.remove(item)
-                #We don't break because bad HTML can define the same
-                #attribute multiple times.
-            self._getAttrMap()
-            if self.attrMap.has_key(key):
-                del self.attrMap[key]
-
-    def __call__(self, *args, **kwargs):
-        """Calling a tag like a function is the same as calling its
-        findAll() method. Eg. tag('a') returns a list of all the A tags
-        found within this tag."""
-        return apply(self.findAll, args, kwargs)
-
-    def __getattr__(self, tag):
-        #print "Getattr %s.%s" % (self.__class__, tag)
-        if len(tag) > 3 and tag.rfind('Tag') == len(tag)-3:
-            return self.find(tag[:-3])
-        elif tag.find('__') != 0:
-            return self.find(tag)
-        raise AttributeError, "'%s' object has no attribute '%s'" % (self.__class__, tag)
-
-    def __eq__(self, other):
-        """Returns true iff this tag has the same name, the same attributes,
-        and the same contents (recursively) as the given tag.
-
-        NOTE: right now this will return false if two tags have the
-        same attributes in a different order. Should this be fixed?"""
-        if other is self:
-            return True
-        if not hasattr(other, 'name') or not hasattr(other, 'attrs') or not hasattr(other, 'contents') or self.name != other.name or self.attrs != other.attrs or len(self) != len(other):
-            return False
-        for i in range(0, len(self.contents)):
-            if self.contents[i] != other.contents[i]:
-                return False
-        return True
-
-    def __ne__(self, other):
-        """Returns true iff this tag is not identical to the other tag,
-        as defined in __eq__."""
-        return not self == other
-
-    def __repr__(self, encoding=DEFAULT_OUTPUT_ENCODING):
-        """Renders this tag as a string."""
-        return self.__str__(encoding)
-
-    def __unicode__(self):
-        return self.__str__(None)
-
-    def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING,
-                prettyPrint=False, indentLevel=0):
-        """Returns a string or Unicode representation of this tag and
-        its contents. To get Unicode, pass None for encoding.
-
-        NOTE: since Python's HTML parser consumes whitespace, this
-        method is not certain to reproduce the whitespace present in
-        the original string."""
-
-        encodedName = self.toEncoding(self.name, encoding)
-
-        attrs = []
-        if self.attrs:
-            for key, val in self.attrs:
-                fmt = '%s="%s"'
-                if isinstance(val, basestring):
-                    if self.containsSubstitutions and '%SOUP-ENCODING%' in val:
-                        val = self.substituteEncoding(val, encoding)
-
-                    # The attribute value either:
-                    #
-                    # * Contains no embedded double quotes or single quotes.
-                    #   No problem: we enclose it in double quotes.
-                    # * Contains embedded single quotes. No problem:
-                    #   double quotes work here too.
-                    # * Contains embedded double quotes. No problem:
-                    #   we enclose it in single quotes.
-                    # * Embeds both single _and_ double quotes. This
-                    #   can't happen naturally, but it can happen if
-                    #   you modify an attribute value after parsing
-                    #   the document. Now we have a bit of a
-                    #   problem. We solve it by enclosing the
-                    #   attribute in single quotes, and escaping any
-                    #   embedded single quotes to XML entities.
-                    if '"' in val:
-                        fmt = "%s='%s'"
-                        if "'" in val:
-                            # TODO: replace with apos when
-                            # appropriate.
-                            val = val.replace("'", "&squot;")
-
-                    # Now we're okay w/r/t quotes. But the attribute
-                    # value might also contain angle brackets, or
-                    # ampersands that aren't part of entities. We need
-                    # to escape those to XML entities too.
-                    val = self.BARE_AMPERSAND_OR_BRACKET.sub(self._sub_entity, val)
-
-                attrs.append(fmt % (self.toEncoding(key, encoding),
-                                    self.toEncoding(val, encoding)))
-        close = ''
-        closeTag = ''
-        if self.isSelfClosing:
-            close = ' /'
-        else:
-            closeTag = '</%s>' % encodedName
-
-        indentTag, indentContents = 0, 0
-        if prettyPrint:
-            indentTag = indentLevel
-            space = (' ' * (indentTag-1))
-            indentContents = indentTag + 1
-        contents = self.renderContents(encoding, prettyPrint, indentContents)
-        if self.hidden:
-            s = contents
-        else:
-            s = []
-            attributeString = ''
-            if attrs:
-                attributeString = ' ' + ' '.join(attrs)
-            if prettyPrint:
-                s.append(space)
-            s.append('<%s%s%s>' % (encodedName, attributeString, close))
-            if prettyPrint:
-                s.append("\n")
-            s.append(contents)
-            if prettyPrint and contents and contents[-1] != "\n":
-                s.append("\n")
-            if prettyPrint and closeTag:
-                s.append(space)
-            s.append(closeTag)
-            if prettyPrint and closeTag and self.nextSibling:
-                s.append("\n")
-            s = ''.join(s)
-        return s
-
-    def decompose(self):
-        """Recursively destroys the contents of this tree."""
-        self.extract()
-        if len(self.contents) == 0:
-            return
-        current = self.contents[0]
-        while current is not None:
-            next = current.next
-            if isinstance(current, Tag):
-                del current.contents[:]
-            current.parent = None
-            current.previous = None
-            current.previousSibling = None
-            current.next = None
-            current.nextSibling = None
-            current = next
-
-    def prettify(self, encoding=DEFAULT_OUTPUT_ENCODING):
-        return self.__str__(encoding, True)
-
-    def renderContents(self, encoding=DEFAULT_OUTPUT_ENCODING,
-                       prettyPrint=False, indentLevel=0):
-        """Renders the contents of this tag as a string in the given
-        encoding. If encoding is None, returns a Unicode string.."""
-        s=[]
-        for c in self:
-            text = None
-            if isinstance(c, NavigableString):
-                text = c.__str__(encoding)
-            elif isinstance(c, Tag):
-                s.append(c.__str__(encoding, prettyPrint, indentLevel))
-            if text and prettyPrint:
-                text = text.strip()
-            if text:
-                if prettyPrint:
-                    s.append(" " * (indentLevel-1))
-                s.append(text)
-                if prettyPrint:
-                    s.append("\n")
-        return ''.join(s)
-
-    #Soup methods
-
-    def find(self, name=None, attrs={}, recursive=True, text=None,
-             **kwargs):
-        """Return only the first child of this Tag matching the given
-        criteria."""
-        r = None
-        l = self.findAll(name, attrs, recursive, text, 1, **kwargs)
-        if l:
-            r = l[0]
-        return r
-    findChild = find
-
-    def findAll(self, name=None, attrs={}, recursive=True, text=None,
-                limit=None, **kwargs):
-        """Extracts a list of Tag objects that match the given
-        criteria.  You can specify the name of the Tag and any
-        attributes you want the Tag to have.
-
-        The value of a key-value pair in the 'attrs' map can be a
-        string, a list of strings, a regular expression object, or a
-        callable that takes a string and returns whether or not the
-        string matches for some custom definition of 'matches'. The
-        same is true of the tag name."""
-        generator = self.recursiveChildGenerator
-        if not recursive:
-            generator = self.childGenerator
-        return self._findAll(name, attrs, text, limit, generator, **kwargs)
-    findChildren = findAll
-
-    # Pre-3.x compatibility methods
-    first = find
-    fetch = findAll
-
-    def fetchText(self, text=None, recursive=True, limit=None):
-        return self.findAll(text=text, recursive=recursive, limit=limit)
-
-    def firstText(self, text=None, recursive=True):
-        return self.find(text=text, recursive=recursive)
-
-    #Private methods
-
-    def _getAttrMap(self):
-        """Initializes a map representation of this tag's attributes,
-        if not already initialized."""
-        if not getattr(self, 'attrMap'):
-            self.attrMap = {}
-            for (key, value) in self.attrs:
-                self.attrMap[key] = value
-        return self.attrMap
-
-    #Generator methods
-    def childGenerator(self):
-        # Just use the iterator from the contents
-        return iter(self.contents)
-
-    def recursiveChildGenerator(self):
-        if not len(self.contents):
-            raise StopIteration
-        stopNode = self._lastRecursiveChild().next
-        current = self.contents[0]
-        while current is not stopNode:
-            yield current
-            current = current.next
-
-
-# Next, a couple classes to represent queries and their results.
-class SoupStrainer:
-    """Encapsulates a number of ways of matching a markup element (tag or
-    text)."""
-
-    def __init__(self, name=None, attrs={}, text=None, **kwargs):
-        self.name = name
-        if isinstance(attrs, basestring):
-            kwargs['class'] = _match_css_class(attrs)
-            attrs = None
-        if kwargs:
-            if attrs:
-                attrs = attrs.copy()
-                attrs.update(kwargs)
-            else:
-                attrs = kwargs
-        self.attrs = attrs
-        self.text = text
-
-    def __str__(self):
-        if self.text:
-            return self.text
-        else:
-            return "%s|%s" % (self.name, self.attrs)
-
-    def searchTag(self, markupName=None, markupAttrs={}):
-        found = None
-        markup = None
-        if isinstance(markupName, Tag):
-            markup = markupName
-            markupAttrs = markup
-        callFunctionWithTagData = callable(self.name) \
-                                and not isinstance(markupName, Tag)
-
-        if (not self.name) \
-               or callFunctionWithTagData \
-               or (markup and self._matches(markup, self.name)) \
-               or (not markup and self._matches(markupName, self.name)):
-            if callFunctionWithTagData:
-                match = self.name(markupName, markupAttrs)
-            else:
-                match = True
-                markupAttrMap = None
-                for attr, matchAgainst in self.attrs.items():
-                    if not markupAttrMap:
-                         if hasattr(markupAttrs, 'get'):
-                            markupAttrMap = markupAttrs
-                         else:
-                            markupAttrMap = {}
-                            for k,v in markupAttrs:
-                                markupAttrMap[k] = v
-                    attrValue = markupAttrMap.get(attr)
-                    if not self._matches(attrValue, matchAgainst):
-                        match = False
-                        break
-            if match:
-                if markup:
-                    found = markup
-                else:
-                    found = markupName
-        return found
-
-    def search(self, markup):
-        #print 'looking for %s in %s' % (self, markup)
-        found = None
-        # If given a list of items, scan it for a text element that
-        # matches.
-        if hasattr(markup, "__iter__") \
-                and not isinstance(markup, Tag):
-            for element in markup:
-                if isinstance(element, NavigableString) \
-                       and self.search(element):
-                    found = element
-                    break
-        # If it's a Tag, make sure its name or attributes match.
-        # Don't bother with Tags if we're searching for text.
-        elif isinstance(markup, Tag):
-            if not self.text:
-                found = self.searchTag(markup)
-        # If it's text, make sure the text matches.
-        elif isinstance(markup, NavigableString) or \
-                 isinstance(markup, basestring):
-            if self._matches(markup, self.text):
-                found = markup
-        else:
-            raise Exception, "I don't know how to match against a %s" \
-                  % markup.__class__
-        return found
-
-    def _matches(self, markup, matchAgainst):
-        #print "Matching %s against %s" % (markup, matchAgainst)
-        result = False
-        if matchAgainst is True:
-            result = markup is not None
-        elif callable(matchAgainst):
-            result = matchAgainst(markup)
-        else:
-            #Custom match methods take the tag as an argument, but all
-            #other ways of matching match the tag name as a string.
-            if isinstance(markup, Tag):
-                markup = markup.name
-            if markup and not isinstance(markup, basestring):
-                markup = unicode(markup)
-            #Now we know that chunk is either a string, or None.
-            if hasattr(matchAgainst, 'match'):
-                # It's a regexp object.
-                result = markup and matchAgainst.search(markup)
-            elif hasattr(matchAgainst, '__iter__'): # list-like
-                result = markup in matchAgainst
-            elif hasattr(matchAgainst, 'items'):
-                result = markup.has_key(matchAgainst)
-            elif matchAgainst and isinstance(markup, basestring):
-                if isinstance(markup, unicode):
-                    matchAgainst = unicode(matchAgainst)
-                else:
-                    matchAgainst = str(matchAgainst)
-
-            if not result:
-                result = matchAgainst == markup
-        return result
-
-class ResultSet(list):
-    """A ResultSet is just a list that keeps track of the SoupStrainer
-    that created it."""
-    def __init__(self, source):
-        list.__init__([])
-        self.source = source
-
-# Now, some helper functions.
-
-def buildTagMap(default, *args):
-    """Turns a list of maps, lists, or scalars into a single map.
-    Used to build the SELF_CLOSING_TAGS, NESTABLE_TAGS, and
-    NESTING_RESET_TAGS maps out of lists and partial maps."""
-    built = {}
-    for portion in args:
-        if hasattr(portion, 'items'):
-            #It's a map. Merge it.
-            for k,v in portion.items():
-                built[k] = v
-        elif hasattr(portion, '__iter__'): # is a list
-            #It's a list. Map each item to the default.
-            for k in portion:
-                built[k] = default
-        else:
-            #It's a scalar. Map it to the default.
-            built[portion] = default
-    return built
-
-# Now, the parser classes.
-
-class BeautifulStoneSoup(Tag, SGMLParser):
-
-    """This class contains the basic parser and search code. It defines
-    a parser that knows nothing about tag behavior except for the
-    following:
-
-      You can't close a tag without closing all the tags it encloses.
-      That is, "<foo><bar></foo>" actually means
-      "<foo><bar></bar></foo>".
-
-    [Another possible explanation is "<foo><bar /></foo>", but since
-    this class defines no SELF_CLOSING_TAGS, it will never use that
-    explanation.]
-
-    This class is useful for parsing XML or made-up markup languages,
-    or when BeautifulSoup makes an assumption counter to what you were
-    expecting."""
-
-    SELF_CLOSING_TAGS = {}
-    NESTABLE_TAGS = {}
-    RESET_NESTING_TAGS = {}
-    QUOTE_TAGS = {}
-    PRESERVE_WHITESPACE_TAGS = []
-
-    MARKUP_MASSAGE = [(re.compile('(<[^<>]*)/>'),
-                       lambda x: x.group(1) + ' />'),
-                      (re.compile('<!\s+([^<>]*)>'),
-                       lambda x: '<!' + x.group(1) + '>')
-                      ]
-
-    ROOT_TAG_NAME = u'[document]'
-
-    HTML_ENTITIES = "html"
-    XML_ENTITIES = "xml"
-    XHTML_ENTITIES = "xhtml"
-    # TODO: This only exists for backwards-compatibility
-    ALL_ENTITIES = XHTML_ENTITIES
-
-    # Used when determining whether a text node is all whitespace and
-    # can be replaced with a single space. A text node that contains
-    # fancy Unicode spaces (usually non-breaking) should be left
-    # alone.
-    STRIP_ASCII_SPACES = { 9: None, 10: None, 12: None, 13: None, 32: None, }
-
-    def __init__(self, markup="", parseOnlyThese=None, fromEncoding=None,
-                 markupMassage=True, smartQuotesTo=XML_ENTITIES,
-                 convertEntities=None, selfClosingTags=None, isHTML=False):
-        """The Soup object is initialized as the 'root tag', and the
-        provided markup (which can be a string or a file-like object)
-        is fed into the underlying parser.
-
-        sgmllib will process most bad HTML, and the BeautifulSoup
-        class has some tricks for dealing with some HTML that kills
-        sgmllib, but Beautiful Soup can nonetheless choke or lose data
-        if your data uses self-closing tags or declarations
-        incorrectly.
-
-        By default, Beautiful Soup uses regexes to sanitize input,
-        avoiding the vast majority of these problems. If the problems
-        don't apply to you, pass in False for markupMassage, and
-        you'll get better performance.
-
-        The default parser massage techniques fix the two most common
-        instances of invalid HTML that choke sgmllib:
-
-         <br/> (No space between name of closing tag and tag close)
-         <! --Comment--> (Extraneous whitespace in declaration)
-
-        You can pass in a custom list of (RE object, replace method)
-        tuples to get Beautiful Soup to scrub your input the way you
-        want."""
-
-        self.parseOnlyThese = parseOnlyThese
-        self.fromEncoding = fromEncoding
-        self.smartQuotesTo = smartQuotesTo
-        self.convertEntities = convertEntities
-        # Set the rules for how we'll deal with the entities we
-        # encounter
-        if self.convertEntities:
-            # It doesn't make sense to convert encoded characters to
-            # entities even while you're converting entities to Unicode.
-            # Just convert it all to Unicode.
-            self.smartQuotesTo = None
-            if convertEntities == self.HTML_ENTITIES:
-                self.convertXMLEntities = False
-                self.convertHTMLEntities = True
-                self.escapeUnrecognizedEntities = True
-            elif convertEntities == self.XHTML_ENTITIES:
-                self.convertXMLEntities = True
-                self.convertHTMLEntities = True
-                self.escapeUnrecognizedEntities = False
-            elif convertEntities == self.XML_ENTITIES:
-                self.convertXMLEntities = True
-                self.convertHTMLEntities = False
-                self.escapeUnrecognizedEntities = False
-        else:
-            self.convertXMLEntities = False
-            self.convertHTMLEntities = False
-            self.escapeUnrecognizedEntities = False
-
-        self.instanceSelfClosingTags = buildTagMap(None, selfClosingTags)
-        SGMLParser.__init__(self)
-
-        if hasattr(markup, 'read'):        # It's a file-type object.
-            markup = markup.read()
-        self.markup = markup
-        self.markupMassage = markupMassage
-        try:
-            self._feed(isHTML=isHTML)
-        except StopParsing:
-            pass
-        self.markup = None                 # The markup can now be GCed
-
-    def convert_charref(self, name):
-        """This method fixes a bug in Python's SGMLParser."""
-        try:
-            n = int(name)
-        except ValueError:
-            return
-        if not 0 <= n <= 127 : # ASCII ends at 127, not 255
-            return
-        return self.convert_codepoint(n)
-
-    def _feed(self, inDocumentEncoding=None, isHTML=False):
-        # Convert the document to Unicode.
-        markup = self.markup
-        if isinstance(markup, unicode):
-            if not hasattr(self, 'originalEncoding'):
-                self.originalEncoding = None
-        else:
-            dammit = UnicodeDammit\
-                     (markup, [self.fromEncoding, inDocumentEncoding],
-                      smartQuotesTo=self.smartQuotesTo, isHTML=isHTML)
-            markup = dammit.unicode
-            self.originalEncoding = dammit.originalEncoding
-            self.declaredHTMLEncoding = dammit.declaredHTMLEncoding
-        if markup:
-            if self.markupMassage:
-                if not hasattr(self.markupMassage, "__iter__"):
-                    self.markupMassage = self.MARKUP_MASSAGE
-                for fix, m in self.markupMassage:
-                    markup = fix.sub(m, markup)
-                # TODO: We get rid of markupMassage so that the
-                # soup object can be deepcopied later on. Some
-                # Python installations can't copy regexes. If anyone
-                # was relying on the existence of markupMassage, this
-                # might cause problems.
-                del(self.markupMassage)
-        self.reset()
-
-        SGMLParser.feed(self, markup)
-        # Close out any unfinished strings and close all the open tags.
-        self.endData()
-        while self.currentTag.name != self.ROOT_TAG_NAME:
-            self.popTag()
-
-    def __getattr__(self, methodName):
-        """This method routes method call requests to either the SGMLParser
-        superclass or the Tag superclass, depending on the method name."""
-        #print "__getattr__ called on %s.%s" % (self.__class__, methodName)
-
-        if methodName.startswith('start_') or methodName.startswith('end_') \
-               or methodName.startswith('do_'):
-            return SGMLParser.__getattr__(self, methodName)
-        elif not methodName.startswith('__'):
-            return Tag.__getattr__(self, methodName)
-        else:
-            raise AttributeError
-
-    def isSelfClosingTag(self, name):
-        """Returns true iff the given string is the name of a
-        self-closing tag according to this parser."""
-        return self.SELF_CLOSING_TAGS.has_key(name) \
-               or self.instanceSelfClosingTags.has_key(name)
-
-    def reset(self):
-        Tag.__init__(self, self, self.ROOT_TAG_NAME)
-        self.hidden = 1
-        SGMLParser.reset(self)
-        self.currentData = []
-        self.currentTag = None
-        self.tagStack = []
-        self.quoteStack = []
-        self.pushTag(self)
-
-    def popTag(self):
-        tag = self.tagStack.pop()
-
-        #print "Pop", tag.name
-        if self.tagStack:
-            self.currentTag = self.tagStack[-1]
-        return self.currentTag
-
-    def pushTag(self, tag):
-        #print "Push", tag.name
-        if self.currentTag:
-            self.currentTag.contents.append(tag)
-        self.tagStack.append(tag)
-        self.currentTag = self.tagStack[-1]
-
-    def endData(self, containerClass=NavigableString):
-        if self.currentData:
-            currentData = u''.join(self.currentData)
-            if (currentData.translate(self.STRIP_ASCII_SPACES) == '' and
-                not set([tag.name for tag in self.tagStack]).intersection(
-                    self.PRESERVE_WHITESPACE_TAGS)):
-                if '\n' in currentData:
-                    currentData = '\n'
-                else:
-                    currentData = ' '
-            self.currentData = []
-            if self.parseOnlyThese and len(self.tagStack) <= 1 and \
-                   (not self.parseOnlyThese.text or \
-                    not self.parseOnlyThese.search(currentData)):
-                return
-            o = containerClass(currentData)
-            o.setup(self.currentTag, self.previous)
-            if self.previous:
-                self.previous.next = o
-            self.previous = o
-            self.currentTag.contents.append(o)
-
-
-    def _popToTag(self, name, inclusivePop=True):
-        """Pops the tag stack up to and including the most recent
-        instance of the given tag. If inclusivePop is false, pops the tag
-        stack up to but *not* including the most recent instqance of
-        the given tag."""
-        #print "Popping to %s" % name
-        if name == self.ROOT_TAG_NAME:
-            return
-
-        numPops = 0
-        mostRecentTag = None
-        for i in range(len(self.tagStack)-1, 0, -1):
-            if name == self.tagStack[i].name:
-                numPops = len(self.tagStack)-i
-                break
-        if not inclusivePop:
-            numPops = numPops - 1
-
-        for i in range(0, numPops):
-            mostRecentTag = self.popTag()
-        return mostRecentTag
-
-    def _smartPop(self, name):
-
-        """We need to pop up to the previous tag of this type, unless
-        one of this tag's nesting reset triggers comes between this
-        tag and the previous tag of this type, OR unless this tag is a
-        generic nesting trigger and another generic nesting trigger
-        comes between this tag and the previous tag of this type.
-
-        Examples:
-         <p>Foo<b>Bar *<p>* should pop to 'p', not 'b'.
-         <p>Foo<table>Bar *<p>* should pop to 'table', not 'p'.
-         <p>Foo<table><tr>Bar *<p>* should pop to 'tr', not 'p'.
-
-         <li><ul><li> *<li>* should pop to 'ul', not the first 'li'.
-         <tr><table><tr> *<tr>* should pop to 'table', not the first 'tr'
-         <td><tr><td> *<td>* should pop to 'tr', not the first 'td'
-        """
-
-        nestingResetTriggers = self.NESTABLE_TAGS.get(name)
-        isNestable = nestingResetTriggers != None
-        isResetNesting = self.RESET_NESTING_TAGS.has_key(name)
-        popTo = None
-        inclusive = True
-        for i in range(len(self.tagStack)-1, 0, -1):
-            p = self.tagStack[i]
-            if (not p or p.name == name) and not isNestable:
-                #Non-nestable tags get popped to the top or to their
-                #last occurance.
-                popTo = name
-                break
-            if (nestingResetTriggers is not None
-                and p.name in nestingResetTriggers) \
-                or (nestingResetTriggers is None and isResetNesting
-                    and self.RESET_NESTING_TAGS.has_key(p.name)):
-
-                #If we encounter one of the nesting reset triggers
-                #peculiar to this tag, or we encounter another tag
-                #that causes nesting to reset, pop up to but not
-                #including that tag.
-                popTo = p.name
-                inclusive = False
-                break
-            p = p.parent
-        if popTo:
-            self._popToTag(popTo, inclusive)
-
-    def unknown_starttag(self, name, attrs, selfClosing=0):
-        #print "Start tag %s: %s" % (name, attrs)
-        if self.quoteStack:
-            #This is not a real tag.
-            #print "<%s> is not real!" % name
-            attrs = ''.join([' %s="%s"' % (x, y) for x, y in attrs])
-            self.handle_data('<%s%s>' % (name, attrs))
-            return
-        self.endData()
-
-        if not self.isSelfClosingTag(name) and not selfClosing:
-            self._smartPop(name)
-
-        if self.parseOnlyThese and len(self.tagStack) <= 1 \
-               and (self.parseOnlyThese.text or not self.parseOnlyThese.searchTag(name, attrs)):
-            return
-
-        tag = Tag(self, name, attrs, self.currentTag, self.previous)
-        if self.previous:
-            self.previous.next = tag
-        self.previous = tag
-        self.pushTag(tag)
-        if selfClosing or self.isSelfClosingTag(name):
-            self.popTag()
-        if name in self.QUOTE_TAGS:
-            #print "Beginning quote (%s)" % name
-            self.quoteStack.append(name)
-            self.literal = 1
-        return tag
-
-    def unknown_endtag(self, name):
-        #print "End tag %s" % name
-        if self.quoteStack and self.quoteStack[-1] != name:
-            #This is not a real end tag.
-            #print "</%s> is not real!" % name
-            self.handle_data('</%s>' % name)
-            return
-        self.endData()
-        self._popToTag(name)
-        if self.quoteStack and self.quoteStack[-1] == name:
-            self.quoteStack.pop()
-            self.literal = (len(self.quoteStack) > 0)
-
-    def handle_data(self, data):
-        self.currentData.append(data)
-
-    def _toStringSubclass(self, text, subclass):
-        """Adds a certain piece of text to the tree as a NavigableString
-        subclass."""
-        self.endData()
-        self.handle_data(text)
-        self.endData(subclass)
-
-    def handle_pi(self, text):
-        """Handle a processing instruction as a ProcessingInstruction
-        object, possibly one with a %SOUP-ENCODING% slot into which an
-        encoding will be plugged later."""
-        if text[:3] == "xml":
-            text = u"xml version='1.0' encoding='%SOUP-ENCODING%'"
-        self._toStringSubclass(text, ProcessingInstruction)
-
-    def handle_comment(self, text):
-        "Handle comments as Comment objects."
-        self._toStringSubclass(text, Comment)
-
-    def handle_charref(self, ref):
-        "Handle character references as data."
-        if self.convertEntities:
-            data = unichr(int(ref))
-        else:
-            data = '&#%s;' % ref
-        self.handle_data(data)
-
-    def handle_entityref(self, ref):
-        """Handle entity references as data, possibly converting known
-        HTML and/or XML entity references to the corresponding Unicode
-        characters."""
-        data = None
-        if self.convertHTMLEntities:
-            try:
-                data = unichr(name2codepoint[ref])
-            except KeyError:
-                pass
-
-        if not data and self.convertXMLEntities:
-                data = self.XML_ENTITIES_TO_SPECIAL_CHARS.get(ref)
-
-        if not data and self.convertHTMLEntities and \
-            not self.XML_ENTITIES_TO_SPECIAL_CHARS.get(ref):
-                # TODO: We've got a problem here. We're told this is
-                # an entity reference, but it's not an XML entity
-                # reference or an HTML entity reference. Nonetheless,
-                # the logical thing to do is to pass it through as an
-                # unrecognized entity reference.
-                #
-                # Except: when the input is "&carol;" this function
-                # will be called with input "carol". When the input is
-                # "AT&T", this function will be called with input
-                # "T". We have no way of knowing whether a semicolon
-                # was present originally, so we don't know whether
-                # this is an unknown entity or just a misplaced
-                # ampersand.
-                #
-                # The more common case is a misplaced ampersand, so I
-                # escape the ampersand and omit the trailing semicolon.
-                data = "&amp;%s" % ref
-        if not data:
-            # This case is different from the one above, because we
-            # haven't already gone through a supposedly comprehensive
-            # mapping of entities to Unicode characters. We might not
-            # have gone through any mapping at all. So the chances are
-            # very high that this is a real entity, and not a
-            # misplaced ampersand.
-            data = "&%s;" % ref
-        self.handle_data(data)
-
-    def handle_decl(self, data):
-        "Handle DOCTYPEs and the like as Declaration objects."
-        self._toStringSubclass(data, Declaration)
-
-    def parse_declaration(self, i):
-        """Treat a bogus SGML declaration as raw data. Treat a CDATA
-        declaration as a CData object."""
-        j = None
-        if self.rawdata[i:i+9] == '<![CDATA[':
-             k = self.rawdata.find(']]>', i)
-             if k == -1:
-                 k = len(self.rawdata)
-             data = self.rawdata[i+9:k]
-             j = k+3
-             self._toStringSubclass(data, CData)
-        else:
-            try:
-                j = SGMLParser.parse_declaration(self, i)
-            except SGMLParseError:
-                toHandle = self.rawdata[i:]
-                self.handle_data(toHandle)
-                j = i + len(toHandle)
-        return j
-
-class BeautifulSoup(BeautifulStoneSoup):
-
-    """This parser knows the following facts about HTML:
-
-    * Some tags have no closing tag and should be interpreted as being
-      closed as soon as they are encountered.
-
-    * The text inside some tags (ie. 'script') may contain tags which
-      are not really part of the document and which should be parsed
-      as text, not tags. If you want to parse the text as tags, you can
-      always fetch it and parse it explicitly.
-
-    * Tag nesting rules:
-
-      Most tags can't be nested at all. For instance, the occurance of
-      a <p> tag should implicitly close the previous <p> tag.
-
-       <p>Para1<p>Para2
-        should be transformed into:
-       <p>Para1</p><p>Para2
-
-      Some tags can be nested arbitrarily. For instance, the occurance
-      of a <blockquote> tag should _not_ implicitly close the previous
-      <blockquote> tag.
-
-       Alice said: <blockquote>Bob said: <blockquote>Blah
-        should NOT be transformed into:
-       Alice said: <blockquote>Bob said: </blockquote><blockquote>Blah
-
-      Some tags can be nested, but the nesting is reset by the
-      interposition of other tags. For instance, a <tr> tag should
-      implicitly close the previous <tr> tag within the same <table>,
-      but not close a <tr> tag in another table.
-
-       <table><tr>Blah<tr>Blah
-        should be transformed into:
-       <table><tr>Blah</tr><tr>Blah
-        but,
-       <tr>Blah<table><tr>Blah
-        should NOT be transformed into
-       <tr>Blah<table></tr><tr>Blah
-
-    Differing assumptions about tag nesting rules are a major source
-    of problems with the BeautifulSoup class. If BeautifulSoup is not
-    treating as nestable a tag your page author treats as nestable,
-    try ICantBelieveItsBeautifulSoup, MinimalSoup, or
-    BeautifulStoneSoup before writing your own subclass."""
-
-    def __init__(self, *args, **kwargs):
-        if not kwargs.has_key('smartQuotesTo'):
-            kwargs['smartQuotesTo'] = self.HTML_ENTITIES
-        kwargs['isHTML'] = True
-        BeautifulStoneSoup.__init__(self, *args, **kwargs)
-
-    SELF_CLOSING_TAGS = buildTagMap(None,
-                                    ('br' , 'hr', 'input', 'img', 'meta',
-                                    'spacer', 'link', 'frame', 'base', 'col'))
-
-    PRESERVE_WHITESPACE_TAGS = set(['pre', 'textarea'])
-
-    QUOTE_TAGS = {'script' : None, 'textarea' : None}
-
-    #According to the HTML standard, each of these inline tags can
-    #contain another tag of the same type. Furthermore, it's common
-    #to actually use these tags this way.
-    NESTABLE_INLINE_TAGS = ('span', 'font', 'q', 'object', 'bdo', 'sub', 'sup',
-                            'center')
-
-    #According to the HTML standard, these block tags can contain
-    #another tag of the same type. Furthermore, it's common
-    #to actually use these tags this way.
-    NESTABLE_BLOCK_TAGS = ('blockquote', 'div', 'fieldset', 'ins', 'del')
-
-    #Lists can contain other lists, but there are restrictions.
-    NESTABLE_LIST_TAGS = { 'ol' : [],
-                           'ul' : [],
-                           'li' : ['ul', 'ol'],
-                           'dl' : [],
-                           'dd' : ['dl'],
-                           'dt' : ['dl'] }
-
-    #Tables can contain other tables, but there are restrictions.
-    NESTABLE_TABLE_TAGS = {'table' : [],
-                           'tr' : ['table', 'tbody', 'tfoot', 'thead'],
-                           'td' : ['tr'],
-                           'th' : ['tr'],
-                           'thead' : ['table'],
-                           'tbody' : ['table'],
-                           'tfoot' : ['table'],
-                           }
-
-    NON_NESTABLE_BLOCK_TAGS = ('address', 'form', 'p', 'pre')
-
-    #If one of these tags is encountered, all tags up to the next tag of
-    #this type are popped.
-    RESET_NESTING_TAGS = buildTagMap(None, NESTABLE_BLOCK_TAGS, 'noscript',
-                                     NON_NESTABLE_BLOCK_TAGS,
-                                     NESTABLE_LIST_TAGS,
-                                     NESTABLE_TABLE_TAGS)
-
-    NESTABLE_TAGS = buildTagMap([], NESTABLE_INLINE_TAGS, NESTABLE_BLOCK_TAGS,
-                                NESTABLE_LIST_TAGS, NESTABLE_TABLE_TAGS)
-
-    # Used to detect the charset in a META tag; see start_meta
-    CHARSET_RE = re.compile("((^|;)\s*charset=)([^;]*)", re.M)
-
-    def start_meta(self, attrs):
-        """Beautiful Soup can detect a charset included in a META tag,
-        try to convert the document to that charset, and re-parse the
-        document from the beginning."""
-        httpEquiv = None
-        contentType = None
-        contentTypeIndex = None
-        tagNeedsEncodingSubstitution = False
-
-        for i in range(0, len(attrs)):
-            key, value = attrs[i]
-            key = key.lower()
-            if key == 'http-equiv':
-                httpEquiv = value
-            elif key == 'content':
-                contentType = value
-                contentTypeIndex = i
-
-        if httpEquiv and contentType: # It's an interesting meta tag.
-            match = self.CHARSET_RE.search(contentType)
-            if match:
-                if (self.declaredHTMLEncoding is not None or
-                    self.originalEncoding == self.fromEncoding):
-                    # An HTML encoding was sniffed while converting
-                    # the document to Unicode, or an HTML encoding was
-                    # sniffed during a previous pass through the
-                    # document, or an encoding was specified
-                    # explicitly and it worked. Rewrite the meta tag.
-                    def rewrite(match):
-                        return match.group(1) + "%SOUP-ENCODING%"
-                    newAttr = self.CHARSET_RE.sub(rewrite, contentType)
-                    attrs[contentTypeIndex] = (attrs[contentTypeIndex][0],
-                                               newAttr)
-                    tagNeedsEncodingSubstitution = True
-                else:
-                    # This is our first pass through the document.
-                    # Go through it again with the encoding information.
-                    newCharset = match.group(3)
-                    if newCharset and newCharset != self.originalEncoding:
-                        self.declaredHTMLEncoding = newCharset
-                        self._feed(self.declaredHTMLEncoding)
-                        raise StopParsing
-                    pass
-        tag = self.unknown_starttag("meta", attrs)
-        if tag and tagNeedsEncodingSubstitution:
-            tag.containsSubstitutions = True
-
-class StopParsing(Exception):
-    pass
-
-class ICantBelieveItsBeautifulSoup(BeautifulSoup):
-
-    """The BeautifulSoup class is oriented towards skipping over
-    common HTML errors like unclosed tags. However, sometimes it makes
-    errors of its own. For instance, consider this fragment:
-
-     <b>Foo<b>Bar</b></b>
-
-    This is perfectly valid (if bizarre) HTML. However, the
-    BeautifulSoup class will implicitly close the first b tag when it
-    encounters the second 'b'. It will think the author wrote
-    "<b>Foo<b>Bar", and didn't close the first 'b' tag, because
-    there's no real-world reason to bold something that's already
-    bold. When it encounters '</b></b>' it will close two more 'b'
-    tags, for a grand total of three tags closed instead of two. This
-    can throw off the rest of your document structure. The same is
-    true of a number of other tags, listed below.
-
-    It's much more common for someone to forget to close a 'b' tag
-    than to actually use nested 'b' tags, and the BeautifulSoup class
-    handles the common case. This class handles the not-co-common
-    case: where you can't believe someone wrote what they did, but
-    it's valid HTML and BeautifulSoup screwed up by assuming it
-    wouldn't be."""
-
-    I_CANT_BELIEVE_THEYRE_NESTABLE_INLINE_TAGS = \
-     ('em', 'big', 'i', 'small', 'tt', 'abbr', 'acronym', 'strong',
-      'cite', 'code', 'dfn', 'kbd', 'samp', 'strong', 'var', 'b',
-      'big')
-
-    I_CANT_BELIEVE_THEYRE_NESTABLE_BLOCK_TAGS = ('noscript',)
-
-    NESTABLE_TAGS = buildTagMap([], BeautifulSoup.NESTABLE_TAGS,
-                                I_CANT_BELIEVE_THEYRE_NESTABLE_BLOCK_TAGS,
-                                I_CANT_BELIEVE_THEYRE_NESTABLE_INLINE_TAGS)
-
-class MinimalSoup(BeautifulSoup):
-    """The MinimalSoup class is for parsing HTML that contains
-    pathologically bad markup. It makes no assumptions about tag
-    nesting, but it does know which tags are self-closing, that
-    <script> tags contain Javascript and should not be parsed, that
-    META tags may contain encoding information, and so on.
-
-    This also makes it better for subclassing than BeautifulStoneSoup
-    or BeautifulSoup."""
-
-    RESET_NESTING_TAGS = buildTagMap('noscript')
-    NESTABLE_TAGS = {}
-
-class BeautifulSOAP(BeautifulStoneSoup):
-    """This class will push a tag with only a single string child into
-    the tag's parent as an attribute. The attribute's name is the tag
-    name, and the value is the string child. An example should give
-    the flavor of the change:
-
-    <foo><bar>baz</bar></foo>
-     =>
-    <foo bar="baz"><bar>baz</bar></foo>
-
-    You can then access fooTag['bar'] instead of fooTag.barTag.string.
-
-    This is, of course, useful for scraping structures that tend to
-    use subelements instead of attributes, such as SOAP messages. Note
-    that it modifies its input, so don't print the modified version
-    out.
-
-    I'm not sure how many people really want to use this class; let me
-    know if you do. Mainly I like the name."""
-
-    def popTag(self):
-        if len(self.tagStack) > 1:
-            tag = self.tagStack[-1]
-            parent = self.tagStack[-2]
-            parent._getAttrMap()
-            if (isinstance(tag, Tag) and len(tag.contents) == 1 and
-                isinstance(tag.contents[0], NavigableString) and
-                not parent.attrMap.has_key(tag.name)):
-                parent[tag.name] = tag.contents[0]
-        BeautifulStoneSoup.popTag(self)
-
-#Enterprise class names! It has come to our attention that some people
-#think the names of the Beautiful Soup parser classes are too silly
-#and "unprofessional" for use in enterprise screen-scraping. We feel
-#your pain! For such-minded folk, the Beautiful Soup Consortium And
-#All-Night Kosher Bakery recommends renaming this file to
-#"RobustParser.py" (or, in cases of extreme enterprisiness,
-#"RobustParserBeanInterface.class") and using the following
-#enterprise-friendly class aliases:
-class RobustXMLParser(BeautifulStoneSoup):
-    pass
-class RobustHTMLParser(BeautifulSoup):
-    pass
-class RobustWackAssHTMLParser(ICantBelieveItsBeautifulSoup):
-    pass
-class RobustInsanelyWackAssHTMLParser(MinimalSoup):
-    pass
-class SimplifyingSOAPParser(BeautifulSOAP):
-    pass
-
-######################################################
-#
-# Bonus library: Unicode, Dammit
-#
-# This class forces XML data into a standard format (usually to UTF-8
-# or Unicode).  It is heavily based on code from Mark Pilgrim's
-# Universal Feed Parser. It does not rewrite the XML or HTML to
-# reflect a new encoding: that happens in BeautifulStoneSoup.handle_pi
-# (XML) and BeautifulSoup.start_meta (HTML).
-
-# Autodetects character encodings.
-# Download from http://chardet.feedparser.org/
-try:
-    import chardet
-#    import chardet.constants
-#    chardet.constants._debug = 1
-except ImportError:
-    chardet = None
-
-# cjkcodecs and iconv_codec make Python know about more character encodings.
-# Both are available from http://cjkpython.i18n.org/
-# They're built in if you use Python 2.4.
-try:
-    import cjkcodecs.aliases
-except ImportError:
-    pass
-try:
-    import iconv_codec
-except ImportError:
-    pass
-
-class UnicodeDammit:
-    """A class for detecting the encoding of a *ML document and
-    converting it to a Unicode string. If the source encoding is
-    windows-1252, can replace MS smart quotes with their HTML or XML
-    equivalents."""
-
-    # This dictionary maps commonly seen values for "charset" in HTML
-    # meta tags to the corresponding Python codec names. It only covers
-    # values that aren't in Python's aliases and can't be determined
-    # by the heuristics in find_codec.
-    CHARSET_ALIASES = { "macintosh" : "mac-roman",
-                        "x-sjis" : "shift-jis" }
-
-    def __init__(self, markup, overrideEncodings=[],
-                 smartQuotesTo='xml', isHTML=False):
-        self.declaredHTMLEncoding = None
-        self.markup, documentEncoding, sniffedEncoding = \
-                     self._detectEncoding(markup, isHTML)
-        self.smartQuotesTo = smartQuotesTo
-        self.triedEncodings = []
-        if markup == '' or isinstance(markup, unicode):
-            self.originalEncoding = None
-            self.unicode = unicode(markup)
-            return
-
-        u = None
-        for proposedEncoding in overrideEncodings:
-            u = self._convertFrom(proposedEncoding)
-            if u: break
-        if not u:
-            for proposedEncoding in (documentEncoding, sniffedEncoding):
-                u = self._convertFrom(proposedEncoding)
-                if u: break
-
-        # If no luck and we have auto-detection library, try that:
-        if not u and chardet and not isinstance(self.markup, unicode):
-            u = self._convertFrom(chardet.detect(self.markup)['encoding'])
-
-        # As a last resort, try utf-8 and windows-1252:
-        if not u:
-            for proposed_encoding in ("utf-8", "windows-1252"):
-                u = self._convertFrom(proposed_encoding)
-                if u: break
-
-        self.unicode = u
-        if not u: self.originalEncoding = None
-
-    def _subMSChar(self, orig):
-        """Changes a MS smart quote character to an XML or HTML
-        entity."""
-        sub = self.MS_CHARS.get(orig)
-        if isinstance(sub, tuple):
-            if self.smartQuotesTo == 'xml':
-                sub = '&#x%s;' % sub[1]
-            else:
-                sub = '&%s;' % sub[0]
-        return sub
-
-    def _convertFrom(self, proposed):
-        proposed = self.find_codec(proposed)
-        if not proposed or proposed in self.triedEncodings:
-            return None
-        self.triedEncodings.append(proposed)
-        markup = self.markup
-
-        # Convert smart quotes to HTML if coming from an encoding
-        # that might have them.
-        if self.smartQuotesTo and proposed.lower() in("windows-1252",
-                                                      "iso-8859-1",
-                                                      "iso-8859-2"):
-            markup = re.compile("([\x80-\x9f])").sub \
-                     (lambda(x): self._subMSChar(x.group(1)),
-                      markup)
-
-        try:
-            # print "Trying to convert document to %s" % proposed
-            u = self._toUnicode(markup, proposed)
-            self.markup = u
-            self.originalEncoding = proposed
-        except Exception, e:
-            # print "That didn't work!"
-            # print e
-            return None
-        #print "Correct encoding: %s" % proposed
-        return self.markup
-
-    def _toUnicode(self, data, encoding):
-        '''Given a string and its encoding, decodes the string into Unicode.
-        %encoding is a string recognized by encodings.aliases'''
-
-        # strip Byte Order Mark (if present)
-        if (len(data) >= 4) and (data[:2] == '\xfe\xff') \
-               and (data[2:4] != '\x00\x00'):
-            encoding = 'utf-16be'
-            data = data[2:]
-        elif (len(data) >= 4) and (data[:2] == '\xff\xfe') \
-                 and (data[2:4] != '\x00\x00'):
-            encoding = 'utf-16le'
-            data = data[2:]
-        elif data[:3] == '\xef\xbb\xbf':
-            encoding = 'utf-8'
-            data = data[3:]
-        elif data[:4] == '\x00\x00\xfe\xff':
-            encoding = 'utf-32be'
-            data = data[4:]
-        elif data[:4] == '\xff\xfe\x00\x00':
-            encoding = 'utf-32le'
-            data = data[4:]
-        newdata = unicode(data, encoding)
-        return newdata
-
-    def _detectEncoding(self, xml_data, isHTML=False):
-        """Given a document, tries to detect its XML encoding."""
-        xml_encoding = sniffed_xml_encoding = None
-        try:
-            if xml_data[:4] == '\x4c\x6f\xa7\x94':
-                # EBCDIC
-                xml_data = self._ebcdic_to_ascii(xml_data)
-            elif xml_data[:4] == '\x00\x3c\x00\x3f':
-                # UTF-16BE
-                sniffed_xml_encoding = 'utf-16be'
-                xml_data = unicode(xml_data, 'utf-16be').encode('utf-8')
-            elif (len(xml_data) >= 4) and (xml_data[:2] == '\xfe\xff') \
-                     and (xml_data[2:4] != '\x00\x00'):
-                # UTF-16BE with BOM
-                sniffed_xml_encoding = 'utf-16be'
-                xml_data = unicode(xml_data[2:], 'utf-16be').encode('utf-8')
-            elif xml_data[:4] == '\x3c\x00\x3f\x00':
-                # UTF-16LE
-                sniffed_xml_encoding = 'utf-16le'
-                xml_data = unicode(xml_data, 'utf-16le').encode('utf-8')
-            elif (len(xml_data) >= 4) and (xml_data[:2] == '\xff\xfe') and \
-                     (xml_data[2:4] != '\x00\x00'):
-                # UTF-16LE with BOM
-                sniffed_xml_encoding = 'utf-16le'
-                xml_data = unicode(xml_data[2:], 'utf-16le').encode('utf-8')
-            elif xml_data[:4] == '\x00\x00\x00\x3c':
-                # UTF-32BE
-                sniffed_xml_encoding = 'utf-32be'
-                xml_data = unicode(xml_data, 'utf-32be').encode('utf-8')
-            elif xml_data[:4] == '\x3c\x00\x00\x00':
-                # UTF-32LE
-                sniffed_xml_encoding = 'utf-32le'
-                xml_data = unicode(xml_data, 'utf-32le').encode('utf-8')
-            elif xml_data[:4] == '\x00\x00\xfe\xff':
-                # UTF-32BE with BOM
-                sniffed_xml_encoding = 'utf-32be'
-                xml_data = unicode(xml_data[4:], 'utf-32be').encode('utf-8')
-            elif xml_data[:4] == '\xff\xfe\x00\x00':
-                # UTF-32LE with BOM
-                sniffed_xml_encoding = 'utf-32le'
-                xml_data = unicode(xml_data[4:], 'utf-32le').encode('utf-8')
-            elif xml_data[:3] == '\xef\xbb\xbf':
-                # UTF-8 with BOM
-                sniffed_xml_encoding = 'utf-8'
-                xml_data = unicode(xml_data[3:], 'utf-8').encode('utf-8')
-            else:
-                sniffed_xml_encoding = 'ascii'
-                pass
-        except:
-            xml_encoding_match = None
-        xml_encoding_match = re.compile(
-            '^<\?.*encoding=[\'"](.*?)[\'"].*\?>').match(xml_data)
-        if not xml_encoding_match and isHTML:
-            regexp = re.compile('<\s*meta[^>]+charset=([^>]*?)[;\'">]', re.I)
-            xml_encoding_match = regexp.search(xml_data)
-        if xml_encoding_match is not None:
-            xml_encoding = xml_encoding_match.groups()[0].lower()
-            if isHTML:
-                self.declaredHTMLEncoding = xml_encoding
-            if sniffed_xml_encoding and \
-               (xml_encoding in ('iso-10646-ucs-2', 'ucs-2', 'csunicode',
-                                 'iso-10646-ucs-4', 'ucs-4', 'csucs4',
-                                 'utf-16', 'utf-32', 'utf_16', 'utf_32',
-                                 'utf16', 'u16')):
-                xml_encoding = sniffed_xml_encoding
-        return xml_data, xml_encoding, sniffed_xml_encoding
-
-
-    def find_codec(self, charset):
-        return self._codec(self.CHARSET_ALIASES.get(charset, charset)) \
-               or (charset and self._codec(charset.replace("-", ""))) \
-               or (charset and self._codec(charset.replace("-", "_"))) \
-               or charset
-
-    def _codec(self, charset):
-        if not charset: return charset
-        codec = None
-        try:
-            codecs.lookup(charset)
-            codec = charset
-        except (LookupError, ValueError):
-            pass
-        return codec
-
-    EBCDIC_TO_ASCII_MAP = None
-    def _ebcdic_to_ascii(self, s):
-        c = self.__class__
-        if not c.EBCDIC_TO_ASCII_MAP:
-            emap = (0,1,2,3,156,9,134,127,151,141,142,11,12,13,14,15,
-                    16,17,18,19,157,133,8,135,24,25,146,143,28,29,30,31,
-                    128,129,130,131,132,10,23,27,136,137,138,139,140,5,6,7,
-                    144,145,22,147,148,149,150,4,152,153,154,155,20,21,158,26,
-                    32,160,161,162,163,164,165,166,167,168,91,46,60,40,43,33,
-                    38,169,170,171,172,173,174,175,176,177,93,36,42,41,59,94,
-                    45,47,178,179,180,181,182,183,184,185,124,44,37,95,62,63,
-                    186,187,188,189,190,191,192,193,194,96,58,35,64,39,61,34,
-                    195,97,98,99,100,101,102,103,104,105,196,197,198,199,200,
-                    201,202,106,107,108,109,110,111,112,113,114,203,204,205,
-                    206,207,208,209,126,115,116,117,118,119,120,121,122,210,
-                    211,212,213,214,215,216,217,218,219,220,221,222,223,224,
-                    225,226,227,228,229,230,231,123,65,66,67,68,69,70,71,72,
-                    73,232,233,234,235,236,237,125,74,75,76,77,78,79,80,81,
-                    82,238,239,240,241,242,243,92,159,83,84,85,86,87,88,89,
-                    90,244,245,246,247,248,249,48,49,50,51,52,53,54,55,56,57,
-                    250,251,252,253,254,255)
-            import string
-            c.EBCDIC_TO_ASCII_MAP = string.maketrans( \
-            ''.join(map(chr, range(256))), ''.join(map(chr, emap)))
-        return s.translate(c.EBCDIC_TO_ASCII_MAP)
-
-    MS_CHARS = { '\x80' : ('euro', '20AC'),
-                 '\x81' : ' ',
-                 '\x82' : ('sbquo', '201A'),
-                 '\x83' : ('fnof', '192'),
-                 '\x84' : ('bdquo', '201E'),
-                 '\x85' : ('hellip', '2026'),
-                 '\x86' : ('dagger', '2020'),
-                 '\x87' : ('Dagger', '2021'),
-                 '\x88' : ('circ', '2C6'),
-                 '\x89' : ('permil', '2030'),
-                 '\x8A' : ('Scaron', '160'),
-                 '\x8B' : ('lsaquo', '2039'),
-                 '\x8C' : ('OElig', '152'),
-                 '\x8D' : '?',
-                 '\x8E' : ('#x17D', '17D'),
-                 '\x8F' : '?',
-                 '\x90' : '?',
-                 '\x91' : ('lsquo', '2018'),
-                 '\x92' : ('rsquo', '2019'),
-                 '\x93' : ('ldquo', '201C'),
-                 '\x94' : ('rdquo', '201D'),
-                 '\x95' : ('bull', '2022'),
-                 '\x96' : ('ndash', '2013'),
-                 '\x97' : ('mdash', '2014'),
-                 '\x98' : ('tilde', '2DC'),
-                 '\x99' : ('trade', '2122'),
-                 '\x9a' : ('scaron', '161'),
-                 '\x9b' : ('rsaquo', '203A'),
-                 '\x9c' : ('oelig', '153'),
-                 '\x9d' : '?',
-                 '\x9e' : ('#x17E', '17E'),
-                 '\x9f' : ('Yuml', ''),}
-
-#######################################################################
-
-
-#By default, act as an HTML pretty-printer.
-if __name__ == '__main__':
-    import sys
-    soup = BeautifulSoup(sys.stdin)
-    print soup.prettify()
diff --git a/site_utils/rpm_control_system/MultiThreadedXMLRPCServer.py b/site_utils/rpm_control_system/MultiThreadedXMLRPCServer.py
deleted file mode 100644
index dca12ee..0000000
--- a/site_utils/rpm_control_system/MultiThreadedXMLRPCServer.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import SimpleXMLRPCServer
-from SocketServer import ThreadingMixIn
-
-
-class MultiThreadedXMLRPCServer(ThreadingMixIn,
-                                SimpleXMLRPCServer.SimpleXMLRPCServer):
-    """
-    This class simply subclasses SimepleXMLRPCServer and ThreadingMixIn so that
-    our XMLRPCSERVER will be multi-threaded and can handle multiple xml-rpc
-    requests in parallel.
-    """
-    pass
\ No newline at end of file
diff --git a/site_utils/rpm_control_system/config.py b/site_utils/rpm_control_system/config.py
deleted file mode 100644
index 74cf1ad..0000000
--- a/site_utils/rpm_control_system/config.py
+++ /dev/null
@@ -1,10 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import os
-
-import ConfigParser
-
-CONFIG_FILE = os.path.join(os.path.dirname(__file__), 'rpm_config.ini')
-rpm_config = ConfigParser.SafeConfigParser()
-rpm_config.read(CONFIG_FILE)
\ No newline at end of file
diff --git a/site_utils/rpm_control_system/dli.py b/site_utils/rpm_control_system/dli.py
deleted file mode 100644
index 76ce7c7..0000000
--- a/site_utils/rpm_control_system/dli.py
+++ /dev/null
@@ -1,115 +0,0 @@
-#!/usr/bin/python2
-#pylint: disable-msg=C0111
-import BeautifulSoup,optparse
-
-try:
-    import pycurl
-except ImportError:
-    print "Failed to import pycurl. Ignoring."
-
-###############################################################
-# Digital Loggers Web Power Switch management
-###############################################################
-# Version: 0.01
-# Description: This is both a module and a script
-#              
-#              The module provides a python class named
-#              DLIPower that allows managing the web power
-#              switch from python programs.
-# 
-#              When run as a script this acts as a command
-#              line utilty to manage the DLI Power switch.
-# Author: Dwight Hubbard d@dhub.me
-# Copyright: This module may be used for any use personal
-#            or commercial as long as the author and copyright
-#            notice are included in full.
-###############################################################
-
-# Global settings
-# Timeout in seconds
-TIMEOUT=5
-
-class powerswitch:
-    """ Manage the DLI Web power switch """
-    def __init__(self,userid='admin',password='4321',hostname='192.168.0.100'):
-        self.userid=userid
-        self.password=password
-        self.hostname=hostname
-        self.contents=''
-    def verify(self):
-        """ Verify we can reach the switch, returns true if ok """
-        return self.geturl()
-    def body_callback(self,buf):
-        self.contents=self.contents+buf
-    def geturl(self,url='index.htm') :
-        self.contents=''
-        curl = pycurl.Curl()
-        curl.setopt(curl.TIMEOUT,TIMEOUT)
-        curl.setopt(curl.URL, 'http://%s:%s@%s/%s' % (self.userid,self.password,self.hostname,url))
-        curl.setopt(curl.WRITEFUNCTION, self.body_callback)
-        try:
-            curl.perform()
-            curl.close()
-        except pycurl.error:
-            return None
-        return self.contents
-    def off(self,outlet=0):
-        """ Turn off a power to an outlet """
-        self.geturl(url= 'outlet?%d=OFF' % outlet)
-    def on(self,outlet=0):
-        """ Turn on power to an outlet """
-        self.geturl(url= 'outlet?%d=ON' % outlet)
-    def statuslist(self):
-        """ Return the status of all outlets in a list, 
-        each item will contain 3 itmes plugnumber, hostname and state  """
-        outlets=[]
-        url=self.geturl('index.htm')
-        if not url:
-            return None
-        soup=BeautifulSoup.BeautifulSoup(url)
-        try:
-            powertable=soup.findAll('table')[5]
-        except IndexError:
-            return None
-        for temp in powertable.findAll('tr')[2:]:
-            columns=temp.findAll('td')
-            plugnumber=columns[0].string
-            hostname=columns[1].string
-            state=columns[2].find('font').string
-            outlets.append([int(plugnumber),hostname,state])
-        return outlets
-    def printstatus(self):
-        """ Print the status off all the outlets as a table to stdout """
-        if not self.statuslist():
-            print "Unable to communicte to the Web power switch at %s" % self.hostname
-            return None
-        print 'Outlet\t%-15.15s\tState' % 'Hostname'
-        for item in self.statuslist():
-            print '%d\t%-15.15s\t%s' % (item[0],item[1],item[2])
-    def status(self,outlet=1):
-        """ Return the status of an outlet, returned value will be one of: On, Off, Unknown """
-        outlets=self.statuslist()
-        if outlet:
-            for plug in outlets:
-                if plug[0] == outlet:
-                    return plug[2]
-        return 'Unknown'
-
-if __name__ == "__main__":
-    parser = optparse.OptionParser()
-    parser.add_option('--hostname',dest='hostname',default="192.168.0.100")
-    parser.add_option('--user',    dest='user',    default="admin")
-    parser.add_option('--password',dest='password',default="4321")
-    (options, args) = parser.parse_args()
-
-    switch=powerswitch(userid=options.user,password=options.password,hostname=options.hostname)
-    if len(args):
-        if len(args) == 2:
-            if args[0].lower() in ['on','poweron']:
-                switch.on(int(args[1]))
-            if args[0].lower() in ['off','poweroff']:
-                switch.off(int(args[1]))
-            if args[0].lower() in ['status']:
-                print switch.status(int(args[1]))
-    else:
-        switch.printstatus()
diff --git a/site_utils/rpm_control_system/dli_urllib.py b/site_utils/rpm_control_system/dli_urllib.py
deleted file mode 100644
index f14aeaa..0000000
--- a/site_utils/rpm_control_system/dli_urllib.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import urllib
-
-import dli
-
-
-class Powerswitch(dli.powerswitch):
-    """
-    This class will utilize urllib instead of pycurl to get the web page info.
-    """
-
-
-    def geturl(self,url='index.htm') :
-        self.contents=''
-        path = 'http://%s:%s@%s:80/%s' % (self.userid,self.password,
-                                          self.hostname,url)
-        web_file = urllib.urlopen(path)
-        if web_file.getcode() != 200:
-            return None
-        self.contents = web_file.read()
-        return self.contents
diff --git a/site_utils/rpm_control_system/frontend_server.py b/site_utils/rpm_control_system/frontend_server.py
deleted file mode 100755
index bb5d33e..0000000
--- a/site_utils/rpm_control_system/frontend_server.py
+++ /dev/null
@@ -1,402 +0,0 @@
-#!/usr/bin/python2
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import errno
-import heapq
-import logging
-import os
-import sys
-import socket
-import threading
-import xmlrpclib
-
-import rpm_logging_config
-from config import rpm_config
-from MultiThreadedXMLRPCServer import MultiThreadedXMLRPCServer
-from rpm_infrastructure_exception import RPMInfrastructureException
-
-import common
-from autotest_lib.site_utils.rpm_control_system import utils
-
-DEFAULT_RPM_COUNT = 0
-TERMINATED = -1
-
-# Indexes for accessing heap entries.
-RPM_COUNT = 0
-DISPATCHER_URI = 1
-
-LOG_FILENAME_FORMAT = rpm_config.get('GENERAL','frontend_logname_format')
-DEFAULT_RPM_ID = rpm_config.get('RPM_INFRASTRUCTURE', 'default_rpm_id')
-
-# Valid state values.
-VALID_STATE_VALUES = ['ON', 'OFF', 'CYCLE']
-
-# Servo-interface mapping file
-MAPPING_FILE = os.path.join(
-        os.path.dirname(__file__),
-        rpm_config.get('CiscoPOE', 'servo_interface_mapping_file'))
-
-# Size of the LRU that holds power management unit information related
-# to a device, e.g. rpm_hostname, outlet, hydra_hostname, etc.
-LRU_SIZE = rpm_config.getint('RPM_INFRASTRUCTURE', 'lru_size')
-
-
-class DispatcherDownException(Exception):
-    """Raised when a particular RPMDispatcher is down."""
-
-
-class RPMFrontendServer(object):
-    """
-    This class is the frontend server of the RPM Infrastructure. All clients
-    will send their power state requests to this central server who will
-    forward the requests to an avaliable or already assigned RPM dispatcher
-    server.
-
-    Once the dispatcher processes the request it will return the result
-    to this frontend server who will send the result back to the client.
-
-    All calls to this server are blocking.
-
-    @var _dispatcher_minheap: Min heap that returns a list of format-
-                              [ num_rpm's, dispatcher_uri ]
-                              Used to choose the least loaded dispatcher.
-    @var _entry_dict: Maps dispatcher URI to an entry (list) inside the min
-                     heap. If a dispatcher server shuts down this allows us to
-                     invalidate the entry in the minheap.
-    @var _lock: Used to protect data from multiple running threads all
-                manipulating the same data.
-    @var _rpm_dict: Maps rpm hostname's to an already assigned dispatcher
-                    server.
-    @var _mapping_last_modified: Last-modified time of the servo-interface
-                                 mapping file.
-    @var _servo_interface: Maps servo hostname to (switch_hostname, interface).
-    @var _rpm_info: An LRU cache to hold recently visited rpm information
-                    so that we don't hit AFE too often. The elements in
-                    the cache are instances of PowerUnitInfo indexed by
-                    dut hostnames. POE info is not stored in the cache.
-    @var _afe: AFE instance to talk to autotest. Used to retrieve rpm hostname.
-    @var _email_handler: Email handler to use to control email notifications.
-    """
-
-
-    def __init__(self, email_handler=None):
-        """
-        RPMFrontendServer constructor.
-
-        Initializes instance variables.
-        """
-        self._dispatcher_minheap = []
-        self._entry_dict = {}
-        self._lock = threading.Lock()
-        self._mapping_last_modified = os.path.getmtime(MAPPING_FILE)
-        self._servo_interface = utils.load_servo_interface_mapping()
-        self._rpm_dict = {}
-        self._rpm_info = utils.LRUCache(size=LRU_SIZE)
-        self._email_handler = email_handler
-
-
-    def set_power_via_poe(self, device_hostname, new_state):
-        """Sets power state of the device to the requested state via POE.
-
-        @param device_hostname: Hostname of the servo to control.
-        @param new_state: [ON, OFF, CYCLE] State to which we want to set the
-                          device's outlet to.
-
-        @return: True if the attempt to change power state was successful,
-                 False otherwise.
-
-        @raise RPMInfrastructureException: No dispatchers are available or can
-                                           be reached.
-        """
-        # Remove any DNS Zone information and simplify down to just the hostname.
-        device_hostname = device_hostname.split('.')[0]
-        new_state = new_state.upper()
-        if new_state not in VALID_STATE_VALUES:
-            logging.error('Received request to set servo %s to invalid '
-                          'state %s', device_hostname, new_state)
-            return False
-        logging.info('Received request to set servo: %s to state: %s',
-                     device_hostname, new_state)
-        powerunit_info = self._get_poe_powerunit_info(device_hostname)
-        try:
-            return self._queue_once(powerunit_info, new_state)
-        except DispatcherDownException:
-            # Retry forwarding the request.
-            return self.set_power_via_poe(device_hostname, new_state)
-
-
-    def set_power_via_rpm(self, device_hostname, rpm_hostname,
-                          rpm_outlet, hydra_hostname, new_state):
-        """Sets power state of a device to the requested state via RPM.
-
-        Unlike the special case of POE, powerunit information is not available
-        on the RPM server, so must be provided as arguments.
-
-        @param device_hostname: Hostname of the servo to control.
-        @param rpm_hostname: Hostname of the RPM to use.
-        @param rpm_outlet: The RPM outlet to control.
-        @param hydra_hostname: If required, the hydra device to SSH through to
-                               get to the RPM.
-        @param new_state: [ON, OFF, CYCLE] State to which we want to set the
-                          device's outlet to.
-
-        @return: True if the attempt to change power state was successful,
-                 False otherwise.
-
-        @raise RPMInfrastructureException: No dispatchers are available or can
-                                           be reached.
-        """
-        new_state = new_state.upper()
-        if new_state not in VALID_STATE_VALUES:
-            logging.error('Received request to set device %s to invalid '
-                          'state %s', device_hostname, new_state)
-            return False
-        logging.info('Received request to set device: %s to state: %s',
-                     device_hostname, new_state)
-
-        powerunit_info = utils.PowerUnitInfo(
-                device_hostname=device_hostname,
-                powerunit_type=utils.PowerUnitInfo.POWERUNIT_TYPES.RPM,
-                powerunit_hostname=rpm_hostname,
-                outlet=rpm_outlet,
-                hydra_hostname=hydra_hostname,
-        )
-        try:
-            return self._queue_once(powerunit_info, new_state)
-        except DispatcherDownException:
-            # Retry forwarding the request.
-            return self.set_power_via_rpm(device_hostname, rpm_hostname,
-                                          rpm_outlet, hydra_hostname, new_state)
-
-
-    def _queue_once(self, powerunit_info, new_state):
-        """Queue one request to the dispatcher."""
-        dispatcher_uri = self._get_dispatcher(powerunit_info)
-        if not dispatcher_uri:
-            # No dispatchers available.
-            raise RPMInfrastructureException('No dispatchers available.')
-        client = xmlrpclib.ServerProxy(dispatcher_uri, allow_none=True)
-        try:
-            # Block on the request and return the result once it arrives.
-            return client.queue_request(powerunit_info, new_state)
-        except socket.error as er:
-            # Dispatcher Server is not reachable. Unregister it and retry.
-            logging.error("Can't reach Dispatch Server: %s. Error: %s",
-                          dispatcher_uri, errno.errorcode[er.errno])
-            if self.is_network_infrastructure_down():
-                # No dispatchers can handle this request so raise an Exception
-                # to the caller.
-                raise RPMInfrastructureException('No dispatchers can be'
-                                                 'reached.')
-            logging.info('Will attempt forwarding request to other dispatch '
-                         'servers.')
-            logging.error('Unregistering %s due to error. Recommend resetting '
-                          'that dispatch server.', dispatcher_uri)
-            self.unregister_dispatcher(dispatcher_uri)
-            raise DispatcherDownException(dispatcher_uri)
-
-
-    def is_network_infrastructure_down(self):
-        """
-        Check to see if we can communicate with any dispatcher servers.
-
-        Only called in the situation that queuing a request to a dispatcher
-        server failed.
-
-        @return: False if any dispatcher server is up and the rpm infrastructure
-                 can still function. True otherwise.
-        """
-        for dispatcher_entry in self._dispatcher_minheap:
-            dispatcher = xmlrpclib.ServerProxy(
-                    dispatcher_entry[DISPATCHER_URI], allow_none=True)
-            try:
-                if dispatcher.is_up():
-                    # Atleast one dispatcher is alive so our network is fine.
-                    return False
-            except socket.error:
-                # Can't talk to this dispatcher so keep looping.
-                pass
-        logging.error("Can't reach any dispatchers. Check frontend network "
-                      'status or all dispatchers are down.')
-        return True
-
-
-    def _get_poe_powerunit_info(self, device_hostname):
-        """Get the power management unit information for a POE controller.
-
-        Servo is managed by POE. The related information we need to know
-        include poe hostname, poe interface. Such information is
-        stored in a local file and read into memory.
-
-        @param device_hostname: A string representing the device's hostname.
-
-        @returns: A PowerUnitInfo object.
-        @raises RPMInfrastructureException if failed to get the power
-                unit info.
-
-        """
-        with self._lock:
-            reload_info = utils.reload_servo_interface_mapping_if_necessary(
-                    self._mapping_last_modified)
-            if reload_info:
-                self._mapping_last_modified, self._servo_interface = reload_info
-            switch_if_tuple = self._servo_interface.get(device_hostname)
-            if not switch_if_tuple:
-                raise RPMInfrastructureException(
-                        'Could not determine POE hostname for %s. '
-                        'Please check the servo-interface mapping file.',
-                        device_hostname)
-            else:
-                return utils.PowerUnitInfo(
-                        device_hostname=device_hostname,
-                        powerunit_type=utils.PowerUnitInfo.POWERUNIT_TYPES.POE,
-                        powerunit_hostname=switch_if_tuple[0],
-                        outlet=switch_if_tuple[1],
-                        hydra_hostname=None)
-
-
-    def _get_dispatcher(self, powerunit_info):
-        """
-        Private method that looks up or assigns a dispatcher server
-        responsible for communicating with the given RPM/POE.
-
-        Will also call _check_dispatcher to make sure it is up before returning
-        it.
-
-        @param powerunit_info: A PowerUnitInfo instance.
-
-        @return: URI of dispatcher server responsible for the rpm/poe.
-                 None if no dispatcher servers are available.
-        """
-        powerunit_type = powerunit_info.powerunit_type
-        powerunit_hostname = powerunit_info.powerunit_hostname
-        with self._lock:
-            if self._rpm_dict.get(powerunit_hostname):
-                return self._rpm_dict[powerunit_hostname]
-            logging.info('No Dispatcher assigned for %s %s.',
-                         powerunit_type, powerunit_hostname)
-            # Choose the least loaded dispatcher to communicate with the RPM.
-            try:
-                heap_entry = heapq.heappop(self._dispatcher_minheap)
-            except IndexError:
-                logging.error('Infrastructure Error: Frontend has no'
-                              'registered dispatchers to field out this '
-                              'request!')
-                return None
-            dispatcher_uri = heap_entry[DISPATCHER_URI]
-            # Put this entry back in the heap with an RPM Count + 1.
-            heap_entry[RPM_COUNT] = heap_entry[RPM_COUNT] + 1
-            heapq.heappush(self._dispatcher_minheap, heap_entry)
-            logging.info('Assigning %s for %s %s', dispatcher_uri,
-                         powerunit_type, powerunit_hostname)
-            self._rpm_dict[powerunit_hostname] = dispatcher_uri
-            return dispatcher_uri
-
-
-    def register_dispatcher(self, dispatcher_uri):
-        """
-        Called by a dispatcher server so that the frontend server knows it is
-        available to field out RPM requests.
-
-        Adds an entry to the min heap and entry map for this dispatcher.
-
-        @param dispatcher_uri: Address of dispatcher server we are registering.
-        """
-        logging.info('Registering uri: %s as a rpm dispatcher.', dispatcher_uri)
-        with self._lock:
-            heap_entry = [DEFAULT_RPM_COUNT, dispatcher_uri]
-            heapq.heappush(self._dispatcher_minheap, heap_entry)
-            self._entry_dict[dispatcher_uri] = heap_entry
-
-
-    def unregister_dispatcher(self, uri_to_unregister):
-        """
-        Called by a dispatcher server as it exits so that the frontend server
-        knows that it is no longer available to field out requests.
-
-        Assigns an rpm count of -1 to this dispatcher so that it will be pushed
-        out of the min heap.
-
-        Removes from _rpm_dict all entries with the value of this dispatcher so
-        that those RPM's can be reassigned to a new dispatcher.
-
-        @param uri_to_unregister: Address of dispatcher server we are
-                                  unregistering.
-        """
-        logging.info('Unregistering uri: %s as a rpm dispatcher.',
-                     uri_to_unregister)
-        with self._lock:
-            heap_entry = self._entry_dict.get(uri_to_unregister)
-            if not heap_entry:
-                logging.warning('%s was not registered.', uri_to_unregister)
-                return
-            # Set this entry's RPM_COUNT to TERMINATED (-1).
-            heap_entry[RPM_COUNT] = TERMINATED
-            # Remove all RPM mappings.
-            for rpm, dispatcher in self._rpm_dict.items():
-                if dispatcher == uri_to_unregister:
-                    self._rpm_dict[rpm] = None
-            self._entry_dict[uri_to_unregister] = None
-            # Re-sort the heap and remove any terminated dispatchers.
-            heapq.heapify(self._dispatcher_minheap)
-            self._remove_terminated_dispatchers()
-
-
-    def _remove_terminated_dispatchers(self):
-        """
-        Peek at the head of the heap and keep popping off values until there is
-        a non-terminated dispatcher at the top.
-        """
-        # Heapq guarantees the head of the heap is in the '0' index.
-        try:
-            # Peek at the next element in the heap.
-            top_of_heap = self._dispatcher_minheap[0]
-            while top_of_heap[RPM_COUNT] is TERMINATED:
-                # Pop off the top element.
-                heapq.heappop(self._dispatcher_minheap)
-                # Peek at the next element in the heap.
-                top_of_heap = self._dispatcher_minheap[0]
-        except IndexError:
-            # No more values in the heap. Can be thrown by both minheap[0]
-            # statements.
-            pass
-
-
-    def suspend_emails(self, hours):
-        """Suspend email notifications.
-
-        @param hours: How many hours to suspend email notifications.
-        """
-        if self._email_handler:
-            self._email_handler.suspend_emails(hours)
-
-
-    def resume_emails(self):
-        """Resume email notifications."""
-        if self._email_handler:
-            self._email_handler.resume_emails()
-
-
-if __name__ == '__main__':
-    """
-    Main function used to launch the frontend server. Creates an instance of
-    RPMFrontendServer and registers it to a MultiThreadedXMLRPCServer instance.
-    """
-    if len(sys.argv) != 2:
-      print 'Usage: ./%s <log_file_dir>.' % sys.argv[0]
-      sys.exit(1)
-
-    email_handler = rpm_logging_config.set_up_logging_to_file(
-            sys.argv[1], LOG_FILENAME_FORMAT)
-    frontend_server = RPMFrontendServer(email_handler=email_handler)
-    # We assume that external clients will always connect to us via the
-    # hostname, so listening on the hostname ensures we pick the right network
-    # interface.
-    address = socket.gethostname()
-    port = rpm_config.getint('RPM_INFRASTRUCTURE', 'frontend_port')
-    server = MultiThreadedXMLRPCServer((address, port), allow_none=True)
-    server.register_instance(frontend_server)
-    logging.info('Listening on %s port %d', address, port)
-    server.serve_forever()
diff --git a/site_utils/rpm_control_system/frontend_server_unittest.py b/site_utils/rpm_control_system/frontend_server_unittest.py
deleted file mode 100755
index 9c48b84..0000000
--- a/site_utils/rpm_control_system/frontend_server_unittest.py
+++ /dev/null
@@ -1,167 +0,0 @@
-#!/usr/bin/python2
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import mox
-import socket
-import unittest
-
-import frontend_server
-from rpm_infrastructure_exception import RPMInfrastructureException
-
-import common
-from autotest_lib.site_utils.rpm_control_system import utils
-
-
-FAKE_DISPATCHER_URI1 = 'http://fake_dispatcher:1'
-FAKE_DISPATCHER_URI2 = 'http://fake_dispatcher:2'
-UNREACHABLE_SERVER_MSG = 'Server Unreachable'
-DUT_HOSTNAME = 'chromeos-rack8e-hostbs1'
-POWERUNIT_HOSTNAME = 'chromeos-rack8e-rpm1'
-OUTLET = '.A100'
-NEW_STATE = 'ON'
-FAKE_ERRNO = 1
-
-
-class TestFrontendServer(mox.MoxTestBase):
-    """Test frontend_server."""
-
-
-    def setUp(self):
-        super(TestFrontendServer, self).setUp()
-        self.frontend = frontend_server.RPMFrontendServer()
-        self.frontend._rpm_info[DUT_HOSTNAME] = utils.PowerUnitInfo(
-                device_hostname=DUT_HOSTNAME,
-                powerunit_type=utils.PowerUnitInfo.POWERUNIT_TYPES.RPM,
-                powerunit_hostname=POWERUNIT_HOSTNAME,
-                outlet=OUTLET, hydra_hostname=None)
-        self.xmlrpc_mock = self.mox.CreateMockAnything()
-        frontend_server.xmlrpclib.ServerProxy = self.mox.CreateMockAnything()
-        frontend_server.xmlrpclib.ServerProxy(FAKE_DISPATCHER_URI1,
-                allow_none=True).AndReturn(self.xmlrpc_mock)
-
-
-    def testNoRegisteredDispatchers(self):
-        """ Queue request with no dispatchers. Should fail. """
-        self.mox.ResetAll()
-        self.assertRaises(
-                RPMInfrastructureException,
-                self.frontend.set_power_via_rpm,
-                DUT_HOSTNAME, POWERUNIT_HOSTNAME, OUTLET, '', NEW_STATE,
-        )
-
-
-    def testSuccessfullyQueueRequest(self):
-        """
-        Queue request with at least one dispatcher.
-
-        Expects the request to succeed.
-        """
-        self.xmlrpc_mock.queue_request(
-                mox.IgnoreArg(), NEW_STATE).AndReturn(True)
-        self.mox.ReplayAll()
-        self.frontend.register_dispatcher(FAKE_DISPATCHER_URI1)
-        self.assertTrue(self.frontend.set_power_via_rpm(
-                DUT_HOSTNAME, POWERUNIT_HOSTNAME, OUTLET, '', NEW_STATE,
-        ))
-        self.mox.VerifyAll()
-
-
-    def testFailedQueueRequest(self):
-        """
-        Queue request with at least one dispatcher.
-
-        Expects the request to fail.
-        """
-        self.xmlrpc_mock.queue_request(
-                mox.IgnoreArg(), NEW_STATE).AndReturn(False)
-        self.mox.ReplayAll()
-        self.frontend.register_dispatcher(FAKE_DISPATCHER_URI1)
-        self.assertFalse(self.frontend.set_power_via_rpm(
-                DUT_HOSTNAME, POWERUNIT_HOSTNAME, OUTLET, '', NEW_STATE,
-        ))
-        self.mox.VerifyAll()
-
-
-    def testAllDispatchersUnregistered(self):
-        """
-        Queue request before and after a dispatcher unregisters.
-
-        queue_request should return True then False.
-        """
-        self.xmlrpc_mock.queue_request(
-                mox.IgnoreArg(), NEW_STATE).AndReturn(True)
-        self.mox.ReplayAll()
-        self.frontend.register_dispatcher(FAKE_DISPATCHER_URI1)
-        self.assertTrue(self.frontend.set_power_via_rpm(
-                DUT_HOSTNAME, POWERUNIT_HOSTNAME, OUTLET, '', NEW_STATE,
-        ))
-        self.frontend.unregister_dispatcher(FAKE_DISPATCHER_URI1)
-        self.assertRaises(
-                RPMInfrastructureException,
-                self.frontend.set_power_via_rpm,
-                DUT_HOSTNAME, POWERUNIT_HOSTNAME, OUTLET, '', NEW_STATE,
-        )
-        self.mox.VerifyAll()
-
-
-    def testUnreachableDispatcherServer(self):
-        """
-        Make sure that if the dispatch server is unreachable and raises an error
-        that we retry the call which will fail because there is no other servers
-        available.
-
-        The call to queue_request will raise a socket.error, and then it should
-        return False as there is no other dispatch servers available.
-        """
-        self.xmlrpc_mock.queue_request(
-                mox.IgnoreArg(), NEW_STATE).AndRaise(
-                socket.error(FAKE_ERRNO, UNREACHABLE_SERVER_MSG))
-        frontend_server.xmlrpclib.ServerProxy(
-                FAKE_DISPATCHER_URI1,
-                allow_none=True).AndReturn(
-                self.xmlrpc_mock)
-        self.xmlrpc_mock.is_up().AndRaise(
-                socket.error(FAKE_ERRNO, UNREACHABLE_SERVER_MSG))
-        self.mox.ReplayAll()
-        self.frontend.register_dispatcher(FAKE_DISPATCHER_URI1)
-        self.assertRaises(
-                RPMInfrastructureException,
-                self.frontend.set_power_via_rpm,
-                DUT_HOSTNAME, POWERUNIT_HOSTNAME, OUTLET, '', NEW_STATE,
-        )
-        self.mox.VerifyAll()
-
-
-    def testUnreachableDispatcherServerWithBackup(self):
-        """
-        Make sure that if the dispatch server is unreachable and raises an error
-        that we retry the call with a different dispatch server (if it's
-        available).
-
-        The first call to queue_request will raise a socket.error, however it
-        should make a second attempt that should be successful.
-        """
-        self.xmlrpc_mock.queue_request(
-                mox.IgnoreArg(), NEW_STATE).AndRaise(
-                socket.error(FAKE_ERRNO,UNREACHABLE_SERVER_MSG))
-        frontend_server.xmlrpclib.ServerProxy(
-                mox.IgnoreArg(), allow_none=True).MultipleTimes(3).AndReturn(
-                        self.xmlrpc_mock)
-        self.xmlrpc_mock.is_up().AndRaise(
-                socket.error(FAKE_ERRNO, UNREACHABLE_SERVER_MSG))
-        self.xmlrpc_mock.is_up().AndReturn(True)
-        self.xmlrpc_mock.queue_request(
-                mox.IgnoreArg(), NEW_STATE).AndReturn(True)
-        self.mox.ReplayAll()
-        self.frontend.register_dispatcher(FAKE_DISPATCHER_URI1)
-        self.frontend.register_dispatcher(FAKE_DISPATCHER_URI2)
-        self.assertTrue(self.frontend.set_power_via_rpm(
-                DUT_HOSTNAME, POWERUNIT_HOSTNAME, OUTLET, '', NEW_STATE,
-        ))
-        self.mox.VerifyAll()
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/site_utils/rpm_control_system/rpm_client.py b/site_utils/rpm_control_system/rpm_client.py
index 548360b..05bfcd2 100755
--- a/site_utils/rpm_control_system/rpm_client.py
+++ b/site_utils/rpm_control_system/rpm_client.py
@@ -1,40 +1,35 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import division
+from __future__ import print_function
+
 import argparse
 import logging
 import sys
-import xmlrpclib
+import six.moves.xmlrpc_client
 
 import common
 
-from config import rpm_config
-from autotest_lib.client.common_lib import global_config
 from autotest_lib.client.common_lib.cros import retry
+from autotest_lib.site_utils.rpm_control_system import rpm_constants
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     from autotest_lib.client.bin.utils import metrics_mock as metrics
 
-RPM_FRONTEND_URI = global_config.global_config.get_config_value('CROS',
-        'rpm_frontend_uri', type=str, default='')
-RPM_CALL_TIMEOUT_MINS = rpm_config.getint('RPM_INFRASTRUCTURE',
-                                          'call_timeout_mins')
-
-POWERUNIT_HOSTNAME_KEY = 'powerunit_hostname'
-POWERUNIT_OUTLET_KEY = 'powerunit_outlet'
-HYDRA_HOSTNAME_KEY = 'hydra_hostname'
-
 
 class RemotePowerException(Exception):
     """This is raised when we fail to set the state of the device's outlet."""
     pass
 
 
-def set_power(host, new_state, timeout_mins=RPM_CALL_TIMEOUT_MINS):
+def set_power(host,
+              new_state,
+              timeout_mins=rpm_constants.RPM_CALL_TIMEOUT_MINS):
     """Sends the power state change request to the RPM Infrastructure.
 
     @param host: A CrosHost or ServoHost instance.
@@ -47,11 +42,12 @@
     else:
         info = host.host_info_store.get()
         try:
-            args_tuple = (host.hostname,
-                          info.attributes[POWERUNIT_HOSTNAME_KEY],
-                          info.attributes[POWERUNIT_OUTLET_KEY],
-                          info.attributes.get(HYDRA_HOSTNAME_KEY),
-                          new_state)
+            args_tuple = (
+                    host.hostname,
+                    info.attributes[rpm_constants.POWERUNIT_HOSTNAME_KEY],
+                    info.attributes[rpm_constants.POWERUNIT_OUTLET_KEY],
+                    info.attributes.get(rpm_constants.HYDRA_HOSTNAME_KEY),
+                    new_state)
         except KeyError as e:
             logging.warning('Powerunit information not found. Missing:'
                             ' %s in host_info_store.', e)
@@ -63,15 +59,14 @@
     _set_power(args_tuple, timeout_mins)
 
 
-def _set_power(args_tuple, timeout_mins=RPM_CALL_TIMEOUT_MINS):
+def _set_power(args_tuple, timeout_mins=rpm_constants.RPM_CALL_TIMEOUT_MINS):
     """Sends the power state change request to the RPM Infrastructure.
 
     @param args_tuple: A args tuple for rpc call. See example below:
         (hostname, powerunit_hostname, outlet, hydra_hostname, new_state)
     """
-    client = xmlrpclib.ServerProxy(RPM_FRONTEND_URI,
-                                   verbose=False,
-                                   allow_none=True)
+    client = six.moves.xmlrpc_client.ServerProxy(
+            rpm_constants.RPM_FRONTEND_URI, verbose=False, allow_none=True)
     timeout = None
     result = None
     endpoint = (client.set_power_via_poe if len(args_tuple) == 2
@@ -83,11 +78,12 @@
                                         default_result=False)
     except Exception as e:
         logging.exception(e)
-        raise RemotePowerException(
-                'Client call exception (%s): %s' % (RPM_FRONTEND_URI, e))
+        raise RemotePowerException('Client call exception (%s): %s' %
+                                   (rpm_constants.RPM_FRONTEND_URI, e))
     if timeout:
         raise RemotePowerException(
-                'Call to RPM Infrastructure timed out (%s).' % RPM_FRONTEND_URI)
+                'Call to RPM Infrastructure timed out (%s).' %
+                rpm_constants.RPM_FRONTEND_URI)
     if not result:
         error_msg = ('Failed to change outlet status for host: %s to '
                      'state: %s.' % (args_tuple[0], args_tuple[-1]))
@@ -137,7 +133,7 @@
     if options.machine.endswith('servo'):
         args_tuple = (options.machine, options.state)
     elif not options.p_hostname or not options.outlet:
-        print "Powerunit hostname and outlet info are required for DUT."
+        print("Powerunit hostname and outlet info are required for DUT.")
         return
     else:
         args_tuple = (options.machine, options.p_hostname, options.outlet,
@@ -145,10 +141,12 @@
     _set_power(args_tuple)
 
     if options.disable_emails is not None:
-        client = xmlrpclib.ServerProxy(RPM_FRONTEND_URI, verbose=False)
+        client = six.moves.xmlrpc_client.ServerProxy(
+                rpm_constants.RPM_FRONTEND_URI, verbose=False)
         client.suspend_emails(options.disable_emails)
     if options.enable_emails:
-        client = xmlrpclib.ServerProxy(RPM_FRONTEND_URI, verbose=False)
+        client = six.moves.xmlrpc_client.ServerProxy(
+                rpm_constants.RPM_FRONTEND_URI, verbose=False)
         client.resume_emails()
 
 
diff --git a/site_utils/rpm_control_system/rpm_config.ini b/site_utils/rpm_control_system/rpm_config.ini
deleted file mode 100644
index 7a598b3..0000000
--- a/site_utils/rpm_control_system/rpm_config.ini
+++ /dev/null
@@ -1,50 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-[GENERAL]
-debug: True
-logging_format: %%(asctime)s - %%(levelname)s - %%(message)s
-frontend_logname_format = frontend_server_log_%%Y%%m%%d_%%H%%M%%S.txt
-dispatcher_logname_format =  rpm_dispatcher_log_%%Y%%m%%d_%%H%%M%%S.txt
-email_subject_line_format: ** RPM INFRASTRUCTURE ISSUE on %%s **
-
-[CROS]
-# The zone that all Chrome OS devices are in if they are in a lab.
-dns_zone: cros.corp.google.com
-
-[RPM_INFRASTRUCTURE]
-# General RPM infrastructure information.
-frontend_port: 9999
-default_rpm_id = rpm1
-email_notification_recipients = chromeos-build-alerts+rpm-control-system@google.com
-call_timeout_mins: 15
-# Number of seconds for the call set_power_state to timeout. This is used
-# to guarantee that such call won't block the controller working thread.
-set_power_state_timeout_seconds = 120
-# Size of the LRU that holds power management unit information related
-# to a device, e.g. rpm_hostname, outlet, hydra_hostname, etc.
-lru_size = 1500
-
-[SENTRY]
-# Access information for Sentry Based Devices.
-username: admn
-password: admn
-testing_user: testing
-testing_password: g00gl3
-
-[WEBPOWERED]
-# Access information for Web Powered Devices.
-username: testing
-password: g00gl3
-
-[CiscoPOE]
-# Acess information for POE Switches.
-username: root
-password: google
-servo_interface_mapping_file: servo_interface_mapping.csv
-
-[HYDRA]
-username: autotest
-password: s3rialsw
-admin_username: root
-admin_password: tslinux
diff --git a/site_utils/rpm_control_system/rpm_constants.py b/site_utils/rpm_control_system/rpm_constants.py
new file mode 100644
index 0000000..47a855d
--- /dev/null
+++ b/site_utils/rpm_control_system/rpm_constants.py
@@ -0,0 +1,13 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.client.common_lib import global_config
+
+RPM_FRONTEND_URI = global_config.global_config.get_config_value(
+        'CROS', 'rpm_frontend_uri', type=str, default='')
+
+RPM_CALL_TIMEOUT_MINS = 15
+POWERUNIT_HOSTNAME_KEY = 'powerunit_hostname'
+POWERUNIT_OUTLET_KEY = 'powerunit_outlet'
+HYDRA_HOSTNAME_KEY = 'hydra_hostname'
diff --git a/site_utils/rpm_control_system/rpm_controller.py b/site_utils/rpm_control_system/rpm_controller.py
deleted file mode 100644
index 6edbeb9..0000000
--- a/site_utils/rpm_control_system/rpm_controller.py
+++ /dev/null
@@ -1,983 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import ctypes
-import datetime
-import logging
-import multiprocessing
-import os
-import pexpect
-import Queue
-import re
-import threading
-import time
-
-from config import rpm_config
-import dli_urllib
-import rpm_logging_config
-
-import common
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.common_lib.cros import retry
-
-RPM_CALL_TIMEOUT_MINS = rpm_config.getint('RPM_INFRASTRUCTURE',
-                                          'call_timeout_mins')
-SET_POWER_STATE_TIMEOUT_SECONDS = rpm_config.getint(
-        'RPM_INFRASTRUCTURE', 'set_power_state_timeout_seconds')
-PROCESS_TIMEOUT_BUFFER = 30
-
-
-class RPMController(object):
-    """
-    This abstract class implements RPM request queueing and
-    processes queued requests.
-
-    The actual interaction with the RPM device will be implemented
-    by the RPM specific subclasses.
-
-    It assumes that you know the RPM hostname and that the device is on
-    the specified RPM.
-
-    This class also allows support for RPM devices that can be accessed
-    directly or through a hydra serial concentrator device.
-
-    Implementation details:
-    This is an abstract class, subclasses must implement the methods
-    listed here. You must not instantiate this class but should
-    instantiate one of those leaf subclasses. Subclasses should
-    also set TYPE class attribute to indicate device type.
-
-    @var behind_hydra: boolean value to represent whether or not this RPM is
-                        behind a hydra device.
-    @var hostname: hostname for this rpm device.
-    @var is_running_lock: lock used to control access to _running.
-    @var request_queue: queue used to store requested outlet state changes.
-    @var queue_lock: lock used to control access to request_queue.
-    @var _running: boolean value to represent if this controller is currently
-                   looping over queued requests.
-    """
-
-
-    SSH_LOGIN_CMD = ('ssh -l %s -o StrictHostKeyChecking=no '
-                     '-o ConnectTimeout=90 -o UserKnownHostsFile=/dev/null %s')
-    USERNAME_PROMPT = 'Username:'
-    HYRDA_RETRY_SLEEP_SECS = 10
-    HYDRA_MAX_CONNECT_RETRIES = 3
-    LOGOUT_CMD = 'logout'
-    CLI_CMD = 'CLI'
-    CLI_HELD = r'The administrator \[root\] has an active .* session.'
-    CLI_KILL_PREVIOUS = 'cancel'
-    CLI_PROMPT = 'cli>'
-    HYDRA_PROMPT = '#'
-    PORT_STATUS_CMD = 'portStatus'
-    QUIT_CMD = 'quit'
-    SESSION_KILL_CMD_FORMAT = 'administration sessions kill %s'
-    HYDRA_CONN_HELD_MSG_FORMAT = 'is being used'
-    CYCLE_SLEEP_TIME = 5
-
-    # Global Variables that will likely be changed by subclasses.
-    DEVICE_PROMPT = '$'
-    PASSWORD_PROMPT = 'Password:'
-    # The state change command can be any string format but must accept 2 vars:
-    # state followed by device/Plug name.
-    SET_STATE_CMD = '%s %s'
-    SUCCESS_MSG = None # Some RPM's may not return a success msg.
-
-    NEW_STATE_ON = 'ON'
-    NEW_STATE_OFF = 'OFF'
-    NEW_STATE_CYCLE = 'CYCLE'
-    TYPE = 'Should set TYPE in subclass.'
-
-
-    def __init__(self, rpm_hostname, hydra_hostname=None):
-        """
-        RPMController Constructor.
-        To be called by subclasses.
-
-        @param rpm_hostname: hostname of rpm device to be controlled.
-        """
-        self._dns_zone = rpm_config.get('CROS', 'dns_zone')
-        self.hostname = rpm_hostname
-        self.request_queue = Queue.Queue()
-        self._running = False
-        self.is_running_lock = threading.Lock()
-        # If a hydra name is provided by the subclass then we know we are
-        # talking to an rpm behind a hydra device.
-        self.hydra_hostname = hydra_hostname if hydra_hostname else None
-        self.behind_hydra = hydra_hostname is not None
-
-
-    def _start_processing_requests(self):
-        """
-        Check if there is a thread processing requests.
-        If not start one.
-        """
-        with self.is_running_lock:
-            if not self._running:
-                self._running = True
-                self._running_thread = threading.Thread(target=self._run)
-                self._running_thread.start()
-
-
-    def _stop_processing_requests(self):
-        """
-        Called if the request request_queue is empty.
-        Set running status to false.
-        """
-        with self.is_running_lock:
-            logging.debug('Request queue is empty. RPM Controller for %s'
-                          ' is terminating.', self.hostname)
-            self._running = False
-        if not self.request_queue.empty():
-            # This can occur if an item was pushed into the queue after we
-            # exited the while-check and before the _stop_processing_requests
-            # call was made. Therefore we need to start processing again.
-            self._start_processing_requests()
-
-
-    def _run(self):
-        """
-        Processes all queued up requests for this RPM Controller.
-        Callers should first request_queue up atleast one request and if this
-        RPM Controller is not running then call run.
-
-        Caller can either simply call run but then they will be blocked or
-        can instantiate a new thread to process all queued up requests.
-        For example:
-          threading.Thread(target=rpm_controller.run).start()
-
-        Requests are in the format of:
-          [powerunit_info, new_state, condition_var, result]
-        Run will set the result with the correct value.
-        """
-        while not self.request_queue.empty():
-            try:
-                result = multiprocessing.Value(ctypes.c_bool, False)
-                request = self.request_queue.get()
-                device_hostname = request['powerunit_info'].device_hostname
-                if (datetime.datetime.utcnow() > (request['start_time'] +
-                        datetime.timedelta(minutes=RPM_CALL_TIMEOUT_MINS))):
-                    logging.error('The request was waited for too long to be '
-                                  "processed. It is timed out and won't be "
-                                  'processed.')
-                    request['result_queue'].put(False)
-                    continue
-
-                is_timeout = multiprocessing.Value(ctypes.c_bool, False)
-                process = multiprocessing.Process(target=self._process_request,
-                                                  args=(request, result,
-                                                        is_timeout))
-                process.start()
-                process.join(SET_POWER_STATE_TIMEOUT_SECONDS +
-                             PROCESS_TIMEOUT_BUFFER)
-                if process.is_alive():
-                    logging.debug('%s: process (%s) still running, will be '
-                                  'terminated!', device_hostname, process.pid)
-                    process.terminate()
-                    is_timeout.value = True
-
-                if is_timeout.value:
-                    raise error.TimeoutException(
-                            'Attempt to set power state is timed out after %s '
-                            'seconds.' % SET_POWER_STATE_TIMEOUT_SECONDS)
-                if not result.value:
-                    logging.error('Request to change %s to state %s failed.',
-                                  device_hostname, request['new_state'])
-            except Exception as e:
-                logging.error('Request to change %s to state %s failed: '
-                              'Raised exception: %s', device_hostname,
-                              request['new_state'], e)
-                result.value = False
-
-            # Put result inside the result Queue to allow the caller to resume.
-            request['result_queue'].put(result.value)
-        self._stop_processing_requests()
-
-
-    def _process_request(self, request, result, is_timeout):
-        """Process the request to change a device's outlet state.
-
-        The call of set_power_state is made in a new running process. If it
-        takes longer than SET_POWER_STATE_TIMEOUT_SECONDS, the request will be
-        timed out.
-
-        @param request: A request to change a device's outlet state.
-        @param result: A Value object passed to the new process for the caller
-                       thread to retrieve the result.
-        @param is_timeout: A Value object passed to the new process for the
-                           caller thread to retrieve the information about if
-                           the set_power_state call timed out.
-        """
-        try:
-            logging.getLogger().handlers = []
-            is_timeout_value, result_value = retry.timeout(
-                     rpm_logging_config.set_up_logging_to_server,
-                     timeout_sec=10)
-            if is_timeout_value:
-                raise Exception('Setup local log server handler timed out.')
-        except Exception as e:
-            # Fail over to log to a new file.
-            LOG_FILENAME_FORMAT = rpm_config.get('GENERAL',
-                                                 'dispatcher_logname_format')
-            log_filename_format = LOG_FILENAME_FORMAT.replace(
-                    'dispatcher', 'controller_%d' % os.getpid())
-            logging.getLogger().handlers = []
-            rpm_logging_config.set_up_logging_to_file(
-                    log_dir='./logs',
-                    log_filename_format=log_filename_format)
-            logging.info('Failed to set up logging through log server: %s', e)
-        kwargs = {'powerunit_info':request['powerunit_info'],
-                  'new_state':request['new_state']}
-        try:
-            is_timeout_value, result_value = retry.timeout(
-                    self.set_power_state,
-                    args=(),
-                    kwargs=kwargs,
-                    timeout_sec=SET_POWER_STATE_TIMEOUT_SECONDS)
-            result.value = result_value
-            is_timeout.value = is_timeout_value
-        except Exception as e:
-            # This method runs in a subprocess. Must log the exception,
-            # otherwise exceptions raised in set_power_state just get lost.
-            # Need to convert e to a str type, because our logging server
-            # code doesn't handle the conversion very well.
-            logging.error('Request to change %s to state %s failed: '
-                          'Raised exception: %s',
-                          request['powerunit_info'].device_hostname,
-                          request['new_state'], str(e))
-            raise e
-
-
-    def queue_request(self, powerunit_info, new_state):
-        """
-        Queues up a requested state change for a device's outlet.
-
-        Requests are in the format of:
-          [powerunit_info, new_state, condition_var, result]
-        Run will set the result with the correct value.
-
-        @param powerunit_info: And PowerUnitInfo instance.
-        @param new_state: ON/OFF/CYCLE - state or action we want to perform on
-                          the outlet.
-        """
-        request = {}
-        request['powerunit_info'] = powerunit_info
-        request['new_state'] = new_state
-        request['start_time'] = datetime.datetime.utcnow()
-        # Reserve a spot for the result to be stored.
-        request['result_queue'] = Queue.Queue()
-        # Place in request_queue
-        self.request_queue.put(request)
-        self._start_processing_requests()
-        # Block until the request is processed.
-        result = request['result_queue'].get(block=True)
-        return result
-
-
-    def _kill_previous_connection(self):
-        """
-        In case the port to the RPM through the hydra serial concentrator is in
-        use, terminate the previous connection so we can log into the RPM.
-
-        It logs into the hydra serial concentrator over ssh, launches the CLI
-        command, gets the port number and then kills the current session.
-        """
-        ssh = self._authenticate_with_hydra(admin_override=True)
-        if not ssh:
-            return
-        ssh.expect(RPMController.PASSWORD_PROMPT, timeout=60)
-        ssh.sendline(rpm_config.get('HYDRA', 'admin_password'))
-        ssh.expect(RPMController.HYDRA_PROMPT)
-        ssh.sendline(RPMController.CLI_CMD)
-        cli_prompt_re = re.compile(RPMController.CLI_PROMPT)
-        cli_held_re = re.compile(RPMController.CLI_HELD)
-        response = ssh.expect_list([cli_prompt_re, cli_held_re], timeout=60)
-        if response == 1:
-            # Need to kill the previous adminstator's session.
-            logging.error("Need to disconnect previous administrator's CLI "
-                          "session to release the connection to RPM device %s.",
-                          self.hostname)
-            ssh.sendline(RPMController.CLI_KILL_PREVIOUS)
-            ssh.expect(RPMController.CLI_PROMPT)
-        ssh.sendline(RPMController.PORT_STATUS_CMD)
-        ssh.expect(': %s' % self.hostname)
-        ports_status = ssh.before
-        port_number = ports_status.split(' ')[-1]
-        ssh.expect(RPMController.CLI_PROMPT)
-        ssh.sendline(RPMController.SESSION_KILL_CMD_FORMAT % port_number)
-        ssh.expect(RPMController.CLI_PROMPT)
-        self._logout(ssh, admin_logout=True)
-
-
-    def _hydra_login(self, ssh):
-        """
-        Perform the extra steps required to log into a hydra serial
-        concentrator.
-
-        @param ssh: pexpect.spawn object used to communicate with the hydra
-                    serial concentrator.
-
-        @return: True if the login procedure is successful. False if an error
-                 occurred. The most common case would be if another user is
-                 logged into the device.
-        """
-        try:
-            response = ssh.expect_list(
-                    [re.compile(RPMController.PASSWORD_PROMPT),
-                     re.compile(RPMController.HYDRA_CONN_HELD_MSG_FORMAT)],
-                    timeout=15)
-        except pexpect.TIMEOUT:
-            # If there was a timeout, this ssh tunnel could be set up to
-            # not require the hydra password.
-            ssh.sendline('')
-            try:
-                ssh.expect(re.compile(RPMController.USERNAME_PROMPT))
-                logging.debug('Connected to rpm through hydra. Logging in.')
-                return True
-            except pexpect.ExceptionPexpect:
-                return False
-        if response == 0:
-            try:
-                ssh.sendline(rpm_config.get('HYDRA','password'))
-                ssh.sendline('')
-                response = ssh.expect_list(
-                        [re.compile(RPMController.USERNAME_PROMPT),
-                         re.compile(RPMController.HYDRA_CONN_HELD_MSG_FORMAT)],
-                        timeout=60)
-            except pexpect.EOF:
-                # Did not receive any of the expect responses, retry.
-                return False
-            except pexpect.TIMEOUT:
-                logging.debug('Timeout occurred logging in to hydra.')
-                return False
-        # Send the username that the subclass will have set in its
-        # construction.
-        if response == 1:
-            logging.debug('SSH Terminal most likely serving another'
-                          ' connection, retrying.')
-            # Kill the connection for the next connection attempt.
-            try:
-                self._kill_previous_connection()
-            except pexpect.ExceptionPexpect:
-                logging.error('Failed to disconnect previous connection, '
-                              'retrying.')
-                raise
-            return False
-        logging.debug('Connected to rpm through hydra. Logging in.')
-        return True
-
-
-    def _authenticate_with_hydra(self, admin_override=False):
-        """
-        Some RPM's are behind a hydra serial concentrator and require their ssh
-        connection to be tunneled through this device. This can fail if another
-        user is logged in; therefore this will retry multiple times.
-
-        This function also allows us to authenticate directly to the
-        administrator interface of the hydra device.
-
-        @param admin_override: Set to True if we are trying to access the
-                               administrator interface rather than tunnel
-                               through to the RPM.
-
-        @return: The connected pexpect.spawn instance if the login procedure is
-                 successful. None if an error occurred. The most common case
-                 would be if another user is logged into the device.
-        """
-        if admin_override:
-            username = rpm_config.get('HYDRA', 'admin_username')
-        else:
-            username = '%s:%s' % (rpm_config.get('HYDRA','username'),
-                                  self.hostname)
-        cmd = RPMController.SSH_LOGIN_CMD % (username, self.hydra_hostname)
-        num_attempts = 0
-        while num_attempts < RPMController.HYDRA_MAX_CONNECT_RETRIES:
-            try:
-                ssh = pexpect.spawn(cmd)
-            except pexpect.ExceptionPexpect:
-                return None
-            if admin_override:
-                return ssh
-            if self._hydra_login(ssh):
-                return ssh
-            # Authenticating with hydra failed. Sleep then retry.
-            time.sleep(RPMController.HYRDA_RETRY_SLEEP_SECS)
-            num_attempts += 1
-        logging.error('Failed to connect to the hydra serial concentrator after'
-                      ' %d attempts.', RPMController.HYDRA_MAX_CONNECT_RETRIES)
-        return None
-
-
-    def _login(self):
-        """
-        Log in into the RPM Device.
-
-        The login process should be able to connect to the device whether or not
-        it is behind a hydra serial concentrator.
-
-        @return: ssh - a pexpect.spawn instance if the connection was successful
-                 or None if it was not.
-        """
-        if self.behind_hydra:
-            # Tunnel the connection through the hydra.
-            ssh = self._authenticate_with_hydra()
-            if not ssh:
-                return None
-            ssh.sendline(self._username)
-        else:
-            # Connect directly to the RPM over SSH.
-            hostname = '%s.%s' % (self.hostname, self._dns_zone)
-            cmd = RPMController.SSH_LOGIN_CMD % (self._username, hostname)
-            try:
-                ssh = pexpect.spawn(cmd)
-            except pexpect.ExceptionPexpect:
-                return None
-        # Wait for the password prompt
-        try:
-            ssh.expect(self.PASSWORD_PROMPT, timeout=60)
-            ssh.sendline(self._password)
-            ssh.expect(self.DEVICE_PROMPT, timeout=60)
-        except pexpect.ExceptionPexpect:
-            return None
-        return ssh
-
-
-    def _logout(self, ssh, admin_logout=False):
-        """
-        Log out of the RPM device.
-
-        Send the device specific logout command and if the connection is through
-        a hydra serial concentrator, kill the ssh connection.
-
-        @param admin_logout: Set to True if we are trying to logout of the
-                             administrator interface of a hydra serial
-                             concentrator, rather than an RPM.
-        @param ssh: pexpect.spawn instance to use to send the logout command.
-        """
-        if admin_logout:
-            ssh.sendline(RPMController.QUIT_CMD)
-            ssh.expect(RPMController.HYDRA_PROMPT)
-        ssh.sendline(self.LOGOUT_CMD)
-        if self.behind_hydra and not admin_logout:
-            # Terminate the hydra session.
-            ssh.sendline('~.')
-            # Wait a bit so hydra disconnects completely. Launching another
-            # request immediately can cause a timeout.
-            time.sleep(5)
-
-
-    def set_power_state(self, powerunit_info, new_state):
-        """
-        Set the state of the dut's outlet on this RPM.
-
-        For ssh based devices, this will create the connection either directly
-        or through a hydra tunnel and call the underlying _change_state function
-        to be implemented by the subclass device.
-
-        For non-ssh based devices, this method should be overloaded with the
-        proper connection and state change code. And the subclass will handle
-        accessing the RPM devices.
-
-        @param powerunit_info: An instance of PowerUnitInfo.
-        @param new_state: ON/OFF/CYCLE - state or action we want to perform on
-                          the outlet.
-
-        @return: True if the attempt to change power state was successful,
-                 False otherwise.
-        """
-        ssh = self._login()
-        if not ssh:
-            return False
-        if new_state == self.NEW_STATE_CYCLE:
-            logging.debug('Beginning Power Cycle for device: %s',
-                          powerunit_info.device_hostname)
-            result = self._change_state(powerunit_info, self.NEW_STATE_OFF, ssh)
-            if not result:
-                return result
-            time.sleep(RPMController.CYCLE_SLEEP_TIME)
-            result = self._change_state(powerunit_info, self.NEW_STATE_ON, ssh)
-        else:
-            # Try to change the state of the device's power outlet.
-            result = self._change_state(powerunit_info, new_state, ssh)
-
-        # Terminate hydra connection if necessary.
-        self._logout(ssh)
-        ssh.close(force=True)
-        return result
-
-
-    def _change_state(self, powerunit_info, new_state, ssh):
-        """
-        Perform the actual state change operation.
-
-        Once we have established communication with the RPM this method is
-        responsible for changing the state of the RPM outlet.
-
-        @param powerunit_info: An instance of PowerUnitInfo.
-        @param new_state: ON/OFF - state or action we want to perform on
-                          the outlet.
-        @param ssh: The ssh connection used to execute the state change commands
-                    on the RPM device.
-
-        @return: True if the attempt to change power state was successful,
-                 False otherwise.
-        """
-        outlet = powerunit_info.outlet
-        device_hostname = powerunit_info.device_hostname
-        if not outlet:
-            logging.error('Request to change outlet for device: %s to new '
-                          'state %s failed: outlet is unknown, please '
-                          'make sure POWERUNIT_OUTLET exist in the host\'s '
-                          'attributes in afe.', device_hostname, new_state)
-        ssh.sendline(self.SET_STATE_CMD % (new_state, outlet))
-        if self.SUCCESS_MSG:
-            # If this RPM device returns a success message check for it before
-            # continuing.
-            try:
-                ssh.expect(self.SUCCESS_MSG, timeout=60)
-            except pexpect.ExceptionPexpect:
-                logging.error('Request to change outlet for device: %s to new '
-                              'state %s failed.', device_hostname, new_state)
-                return False
-        logging.debug('Outlet for device: %s set to %s', device_hostname,
-                      new_state)
-        return True
-
-
-    def type(self):
-        """
-        Get the type of RPM device we are interacting with.
-        Class attribute TYPE should be set by the subclasses.
-
-        @return: string representation of RPM device type.
-        """
-        return self.TYPE
-
-
-class SentryRPMController(RPMController):
-    """
-    This class implements power control for Sentry Switched CDU
-    http://www.servertech.com/products/switched-pdus/
-
-    Example usage:
-      rpm = SentrySwitchedCDU('chromeos-rack1-rpm1')
-      rpm.queue_request('chromeos-rack1-host1', 'ON')
-
-    @var _username: username used to access device.
-    @var _password: password used to access device.
-    """
-
-    DEVICE_PROMPT = ['Switched CDU:', 'Switched PDU:']
-    SET_STATE_CMD = '%s %s'
-    SUCCESS_MSG = 'Command successful'
-    NUM_OF_OUTLETS = 17
-    TYPE = 'Sentry'
-
-
-    def __init__(self, hostname, hydra_hostname=None):
-        super(SentryRPMController, self).__init__(hostname, hydra_hostname)
-        self._username = rpm_config.get('SENTRY', 'username')
-        self._password = rpm_config.get('SENTRY', 'password')
-
-
-    def _setup_test_user(self, ssh):
-        """Configure the test user for the RPM
-
-        @param ssh: Pexpect object to use to configure the RPM.
-        """
-        # Create and configure the testing user profile.
-        testing_user = rpm_config.get('SENTRY','testing_user')
-        testing_password = rpm_config.get('SENTRY','testing_password')
-        ssh.sendline('create user %s' % testing_user)
-        response = ssh.expect_list([re.compile('not unique'),
-                                    re.compile(self.PASSWORD_PROMPT)])
-        if not response:
-            return
-        # Testing user is not set up yet.
-        ssh.sendline(testing_password)
-        ssh.expect('Verify Password:')
-        ssh.sendline(testing_password)
-        ssh.expect(self.SUCCESS_MSG)
-        ssh.expect(self.DEVICE_PROMPT)
-        ssh.sendline('add outlettouser all %s' % testing_user)
-        ssh.expect(self.SUCCESS_MSG)
-        ssh.expect(self.DEVICE_PROMPT)
-
-
-    def _clear_outlet_names(self, ssh):
-        """
-        Before setting the outlet names, we need to clear out all the old
-        names so there are no conflicts. For example trying to assign outlet
-        2 a name already assigned to outlet 9.
-        """
-        for outlet in range(1, self.NUM_OF_OUTLETS):
-            outlet_name = 'Outlet_%d' % outlet
-            ssh.sendline(self.SET_OUTLET_NAME_CMD % (outlet, outlet_name))
-            ssh.expect(self.SUCCESS_MSG)
-            ssh.expect(self.DEVICE_PROMPT)
-
-
-    def setup(self, outlet_naming_map):
-        """
-        Configure the RPM by adding the test user and setting up the outlet
-        names.
-
-        Note the rpm infrastructure does not rely on the outlet name to map a
-        device to its outlet any more. We keep this method in case there is
-        a need to label outlets for other reasons. We may deprecate
-        this method if it has been proved the outlet names will not be used
-        in any scenario.
-
-        @param outlet_naming_map: Dictionary used to map the outlet numbers to
-                                  host names. Keys must be ints. And names are
-                                  in the format of 'hostX'.
-
-        @return: True if setup completed successfully, False otherwise.
-        """
-        ssh = self._login()
-        if not ssh:
-            logging.error('Could not connect to %s.', self.hostname)
-            return False
-        try:
-            self._setup_test_user(ssh)
-            # Set up the outlet names.
-            # Hosts have the same name format as the RPM hostname except they
-            # end in hostX instead of rpmX.
-            dut_name_format = re.sub('-rpm[0-9]*', '', self.hostname)
-            if self.behind_hydra:
-                # Remove "chromeosX" from DUTs behind the hydra due to a length
-                # constraint on the names we can store inside the RPM.
-                dut_name_format = re.sub('chromeos[0-9]*-', '', dut_name_format)
-            dut_name_format = dut_name_format + '-%s'
-            self._clear_outlet_names(ssh)
-            for outlet, name in outlet_naming_map.items():
-                dut_name = dut_name_format % name
-                ssh.sendline(self.SET_OUTLET_NAME_CMD % (outlet, dut_name))
-                ssh.expect(self.SUCCESS_MSG)
-                ssh.expect(self.DEVICE_PROMPT)
-        except pexpect.ExceptionPexpect as e:
-            logging.error('Setup failed. %s', e)
-            return False
-        finally:
-            self._logout(ssh)
-        return True
-
-
-class WebPoweredRPMController(RPMController):
-    """
-    This class implements RPMController for the Web Powered units
-    produced by Digital Loggers Inc.
-
-    @var _rpm: dli_urllib.Powerswitch instance used to interact with RPM.
-    """
-
-
-    TYPE = 'Webpowered'
-
-
-    def __init__(self, hostname, powerswitch=None):
-        username = rpm_config.get('WEBPOWERED', 'username')
-        password = rpm_config.get('WEBPOWERED', 'password')
-        # Call the constructor in RPMController. However since this is a web
-        # accessible device, there should not be a need to tunnel through a
-        # hydra serial concentrator.
-        super(WebPoweredRPMController, self).__init__(hostname)
-        self.hostname = '%s.%s' % (self.hostname, self._dns_zone)
-        if not powerswitch:
-            self._rpm = dli_urllib.Powerswitch(hostname=self.hostname,
-                                               userid=username,
-                                               password=password)
-        else:
-            # Should only be used in unit_testing
-            self._rpm = powerswitch
-
-
-    def _get_outlet_state(self, outlet):
-        """
-        Look up the state for a given outlet on the RPM.
-
-        @param outlet: the outlet to look up.
-
-        @return state: the outlet's current state.
-        """
-        status_list = self._rpm.statuslist()
-        for outlet_name, _, state in status_list:
-            if outlet_name == outlet:
-                return state
-        return None
-
-
-    def set_power_state(self, powerunit_info, new_state):
-        """
-        Since this does not utilize SSH in any manner, this will overload the
-        set_power_state in RPMController and completes all steps of changing
-        the device's outlet state.
-        """
-        device_hostname = powerunit_info.device_hostname
-        outlet = powerunit_info.outlet
-        if not outlet:
-            logging.error('Request to change outlet for device %s to '
-                          'new state %s failed: outlet is unknown. Make sure '
-                          'POWERUNIT_OUTLET exists in the host\'s '
-                          'attributes in afe' , device_hostname, new_state)
-            return False
-        expected_state = new_state
-        if new_state == self.NEW_STATE_CYCLE:
-            logging.debug('Beginning Power Cycle for device: %s',
-                          device_hostname)
-            self._rpm.off(outlet)
-            logging.debug('Outlet for device: %s set to OFF', device_hostname)
-            # Pause for 5 seconds before restoring power.
-            time.sleep(RPMController.CYCLE_SLEEP_TIME)
-            self._rpm.on(outlet)
-            logging.debug('Outlet for device: %s set to ON', device_hostname)
-            expected_state = self.NEW_STATE_ON
-        if new_state == self.NEW_STATE_OFF:
-            self._rpm.off(outlet)
-            logging.debug('Outlet for device: %s set to OFF', device_hostname)
-        if new_state == self.NEW_STATE_ON:
-            self._rpm.on(outlet)
-            logging.debug('Outlet for device: %s set to ON', device_hostname)
-        # Lookup the final state of the outlet
-        return self._is_plug_state(powerunit_info, expected_state)
-
-
-    def _is_plug_state(self, powerunit_info, expected_state):
-        state = self._get_outlet_state(powerunit_info.outlet)
-        if expected_state not in state:
-            logging.error('Outlet for device: %s did not change to new state'
-                          ' %s', powerunit_info.device_hostname, expected_state)
-            return False
-        return True
-
-
-class CiscoPOEController(RPMController):
-    """
-    This class implements power control for Cisco POE switch.
-
-    Example usage:
-      poe = CiscoPOEController('chromeos1-poe-switch1')
-      poe.queue_request('chromeos1-rack5-host12-servo', 'ON')
-    """
-
-
-    SSH_LOGIN_CMD = ('ssh -o StrictHostKeyChecking=no '
-                     '-o UserKnownHostsFile=/dev/null %s')
-    POE_USERNAME_PROMPT = 'User Name:'
-    POE_PROMPT = '%s#'
-    EXIT_CMD = 'exit'
-    END_CMD = 'end'
-    CONFIG = 'configure terminal'
-    CONFIG_PROMPT = r'%s\(config\)#'
-    CONFIG_IF = 'interface %s'
-    CONFIG_IF_PROMPT = r'%s\(config-if\)#'
-    SET_STATE_ON = 'power inline auto'
-    SET_STATE_OFF = 'power inline never'
-    CHECK_INTERFACE_STATE = 'show interface status %s'
-    INTERFACE_STATE_MSG = r'Port\s+.*%s(\s+(\S+)){6,6}'
-    CHECK_STATE_TIMEOUT = 60
-    CMD_TIMEOUT = 30
-    LOGIN_TIMEOUT = 60
-    PORT_UP = 'Up'
-    PORT_DOWN = 'Down'
-    TYPE = 'CiscoPOE'
-
-
-    def __init__(self, hostname):
-        """
-        Initialize controller class for a Cisco POE switch.
-
-        @param hostname: the Cisco POE switch host name.
-        """
-        super(CiscoPOEController, self).__init__(hostname)
-        self._username = rpm_config.get('CiscoPOE', 'username')
-        self._password = rpm_config.get('CiscoPOE', 'password')
-        # For a switch, e.g. 'chromeos2-poe-switch8',
-        # the device prompt looks like 'chromeos2-poe-sw8#'.
-        short_hostname = self.hostname.replace('switch', 'sw')
-        self.poe_prompt = self.POE_PROMPT % short_hostname
-        self.config_prompt = self.CONFIG_PROMPT % short_hostname
-        self.config_if_prompt = self.CONFIG_IF_PROMPT % short_hostname
-
-
-    def _login(self):
-        """
-        Log in into the Cisco POE switch.
-
-        Overload _login in RPMController, as it always prompts for a user name.
-
-        @return: ssh - a pexpect.spawn instance if the connection was successful
-                 or None if it was not.
-        """
-        hostname = '%s.%s' % (self.hostname, self._dns_zone)
-        cmd = self.SSH_LOGIN_CMD % (hostname)
-        try:
-            ssh = pexpect.spawn(cmd)
-        except pexpect.ExceptionPexpect:
-            logging.error('Could not connect to switch %s', hostname)
-            return None
-        # Wait for the username and password prompt.
-        try:
-            ssh.expect(self.POE_USERNAME_PROMPT, timeout=self.LOGIN_TIMEOUT)
-            ssh.sendline(self._username)
-            ssh.expect(self.PASSWORD_PROMPT, timeout=self.LOGIN_TIMEOUT)
-            ssh.sendline(self._password)
-            ssh.expect(self.poe_prompt, timeout=self.LOGIN_TIMEOUT)
-        except pexpect.ExceptionPexpect:
-            logging.error('Could not log into switch %s', hostname)
-            return None
-        return ssh
-
-
-    def _enter_configuration_terminal(self, interface, ssh):
-        """
-        Enter configuration terminal of |interface|.
-
-        This function expects that we've already logged into the switch
-        and the ssh is prompting the switch name. The work flow is
-            chromeos1-poe-sw1#
-            chromeos1-poe-sw1#configure terminal
-            chromeos1-poe-sw1(config)#interface fa36
-            chromeos1-poe-sw1(config-if)#
-        On success, the function exits with 'config-if' prompt.
-        On failure, the function exits with device prompt,
-        e.g. 'chromeos1-poe-sw1#' in the above case.
-
-        @param interface: the name of the interface.
-        @param ssh: pexpect.spawn instance to use.
-
-        @return: True on success otherwise False.
-        """
-        try:
-            # Enter configure terminal.
-            ssh.sendline(self.CONFIG)
-            ssh.expect(self.config_prompt, timeout=self.CMD_TIMEOUT)
-            # Enter configure terminal of the interface.
-            ssh.sendline(self.CONFIG_IF % interface)
-            ssh.expect(self.config_if_prompt, timeout=self.CMD_TIMEOUT)
-            return True
-        except pexpect.ExceptionPexpect, e:
-            ssh.sendline(self.END_CMD)
-            logging.exception(e)
-        return False
-
-
-    def _exit_configuration_terminal(self, ssh):
-        """
-        Exit interface configuration terminal.
-
-        On success, the function exits with device prompt,
-        e.g. 'chromeos1-poe-sw1#' in the above case.
-        On failure, the function exists with 'config-if' prompt.
-
-        @param ssh: pexpect.spawn instance to use.
-
-        @return: True on success otherwise False.
-        """
-        try:
-            ssh.sendline(self.END_CMD)
-            ssh.expect(self.poe_prompt, timeout=self.CMD_TIMEOUT)
-            return True
-        except pexpect.ExceptionPexpect, e:
-            logging.exception(e)
-        return False
-
-
-    def _verify_state(self, interface, expected_state, ssh):
-        """
-        Check whehter the current state of |interface| matches expected state.
-
-        This function tries to check the state of |interface| multiple
-        times until its state matches the expected state or time is out.
-
-        After the command of changing state has been executed,
-        the state of an interface doesn't always change immediately to
-        the expected state but requires some time. As such, we need
-        a retry logic here.
-
-        @param interface: the name of the interface.
-        @param expect_state: the expected state, 'ON' or 'OFF'
-        @param ssh: pexpect.spawn instance to use.
-
-        @return: True if the state of |interface| swiches to |expected_state|,
-                 otherwise False.
-        """
-        expected_state = (self.PORT_UP if expected_state == self.NEW_STATE_ON
-                          else self.PORT_DOWN)
-        try:
-            start = time.time()
-            while((time.time() - start) < self.CHECK_STATE_TIMEOUT):
-                ssh.sendline(self.CHECK_INTERFACE_STATE % interface)
-                state_matcher = '.*'.join([self.INTERFACE_STATE_MSG % interface,
-                                           self.poe_prompt])
-                ssh.expect(state_matcher, timeout=self.CMD_TIMEOUT)
-                state = ssh.match.group(2)
-                if state == expected_state:
-                    return True
-        except pexpect.ExceptionPexpect, e:
-            logging.exception(e)
-        return False
-
-
-    def _logout(self, ssh, admin_logout=False):
-        """
-        Log out of the Cisco POE switch after changing state.
-
-        Overload _logout in RPMController.
-
-        @param admin_logout: ignored by this method.
-        @param ssh: pexpect.spawn instance to use to send the logout command.
-        """
-        ssh.sendline(self.EXIT_CMD)
-
-
-    def _change_state(self, powerunit_info, new_state, ssh):
-        """
-        Perform the actual state change operation.
-
-        Overload _change_state in RPMController.
-
-        @param powerunit_info: An PowerUnitInfo instance.
-        @param new_state: ON/OFF - state or action we want to perform on
-                          the outlet.
-        @param ssh: The ssh connection used to execute the state change commands
-                    on the POE switch.
-
-        @return: True if the attempt to change power state was successful,
-                 False otherwise.
-        """
-        interface = powerunit_info.outlet
-        device_hostname = powerunit_info.device_hostname
-        if not interface:
-            logging.error('Could not change state: the interface on %s for %s '
-                          'was not given.', self.hostname, device_hostname)
-            return False
-
-        # Enter configuration terminal.
-        if not self._enter_configuration_terminal(interface, ssh):
-            logging.error('Could not enter configuration terminal for %s',
-                          interface)
-            return False
-        # Change the state.
-        if new_state == self.NEW_STATE_ON:
-            ssh.sendline(self.SET_STATE_ON)
-        elif new_state == self.NEW_STATE_OFF:
-            ssh.sendline(self.SET_STATE_OFF)
-        else:
-            logging.error('Unknown state request: %s', new_state)
-            return False
-        # Exit configuraiton terminal.
-        if not self._exit_configuration_terminal(ssh):
-            logging.error('Skipping verifying outlet state for device: %s, '
-                          'because could not exit configuration terminal.',
-                          device_hostname)
-            return False
-        # Verify if the state has changed successfully.
-        if not self._verify_state(interface, new_state, ssh):
-            logging.error('Could not verify state on interface %s', interface)
-            return False
-
-        logging.debug('Outlet for device: %s set to %s',
-                      device_hostname, new_state)
-        return True
diff --git a/site_utils/rpm_control_system/rpm_controller_unittest.py b/site_utils/rpm_control_system/rpm_controller_unittest.py
deleted file mode 100755
index 9f704be..0000000
--- a/site_utils/rpm_control_system/rpm_controller_unittest.py
+++ /dev/null
@@ -1,271 +0,0 @@
-#!/usr/bin/python2
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import mox
-import pexpect
-import unittest
-
-import dli
-
-import rpm_controller
-
-import common
-from autotest_lib.site_utils.rpm_control_system import utils
-
-
-class TestRPMControllerQueue(mox.MoxTestBase):
-    """Test request can be queued and processed in controller."""
-
-    def setUp(self):
-        super(TestRPMControllerQueue, self).setUp()
-        self.rpm = rpm_controller.SentryRPMController('chromeos-rack1-host8')
-        self.powerunit_info = utils.PowerUnitInfo(
-                device_hostname='chromos-rack1-host8',
-                powerunit_hostname='chromeos-rack1-rpm1',
-                powerunit_type=utils.PowerUnitInfo.POWERUNIT_TYPES.RPM,
-                outlet='.A100',
-                hydra_hostname=None)
-
-
-    def testQueueRequest(self):
-        """Should create a new process to handle request."""
-        new_state = 'ON'
-        process = self.mox.CreateMockAnything()
-        rpm_controller.multiprocessing.Process = self.mox.CreateMockAnything()
-        rpm_controller.multiprocessing.Process(target=mox.IgnoreArg(),
-                args=mox.IgnoreArg()).AndReturn(process)
-        process.start()
-        process.join()
-        self.mox.ReplayAll()
-        self.assertFalse(self.rpm.queue_request(self.powerunit_info, new_state))
-        self.mox.VerifyAll()
-
-
-class TestSentryRPMController(mox.MoxTestBase):
-    """Test SentryRPMController."""
-
-
-    def setUp(self):
-        super(TestSentryRPMController, self).setUp()
-        self.ssh = self.mox.CreateMockAnything()
-        rpm_controller.pexpect.spawn = self.mox.CreateMockAnything()
-        rpm_controller.pexpect.spawn(mox.IgnoreArg()).AndReturn(self.ssh)
-        self.rpm = rpm_controller.SentryRPMController('chromeos-rack1-host8')
-        self.powerunit_info = utils.PowerUnitInfo(
-                device_hostname='chromos-rack1-host8',
-                powerunit_hostname='chromeos-rack1-rpm1',
-                powerunit_type=utils.PowerUnitInfo.POWERUNIT_TYPES.RPM,
-                outlet='.A100',
-                hydra_hostname=None)
-
-
-    def testSuccessfullyChangeOutlet(self):
-        """Should return True if change was successful."""
-        prompt = ['Switched CDU:', 'Switched PDU:']
-        password = 'admn'
-        new_state = 'ON'
-        self.ssh.expect('Password:', timeout=60)
-        self.ssh.sendline(password)
-        self.ssh.expect(prompt, timeout=60)
-        self.ssh.sendline('%s %s' % (new_state, self.powerunit_info.outlet))
-        self.ssh.expect('Command successful', timeout=60)
-        self.ssh.sendline('logout')
-        self.ssh.close(force=True)
-        self.mox.ReplayAll()
-        self.assertTrue(self.rpm.set_power_state(
-                self.powerunit_info, new_state))
-        self.mox.VerifyAll()
-
-
-    def testUnsuccessfullyChangeOutlet(self):
-        """Should return False if change was unsuccessful."""
-        prompt = ['Switched CDU:', 'Switched PDU:']
-        password = 'admn'
-        new_state = 'ON'
-        self.ssh.expect('Password:', timeout=60)
-        self.ssh.sendline(password)
-        self.ssh.expect(prompt, timeout=60)
-        self.ssh.sendline('%s %s' % (new_state, self.powerunit_info.outlet))
-        self.ssh.expect('Command successful',
-                        timeout=60).AndRaise(pexpect.TIMEOUT('Timed Out'))
-        self.ssh.sendline('logout')
-        self.ssh.close(force=True)
-        self.mox.ReplayAll()
-        self.assertFalse(
-            self.rpm.set_power_state(self.powerunit_info, new_state))
-        self.mox.VerifyAll()
-
-
-class TestWebPoweredRPMController(mox.MoxTestBase):
-    """Test WebPoweredRPMController."""
-
-
-    def setUp(self):
-        super(TestWebPoweredRPMController, self).setUp()
-        self.dli_ps = self.mox.CreateMock(dli.powerswitch)
-        hostname = 'chromeos-rack8a-rpm1'
-        self.web_rpm = rpm_controller.WebPoweredRPMController(hostname,
-                                                              self.dli_ps)
-        outlet = 8
-        dut = 'chromeos-rack8a-host8'
-        # Outlet statuses are in the format "u'ON'"
-        initial_state = 'u\'ON\''
-        self.test_status_list_initial = [[outlet, dut, initial_state]]
-        self.powerunit_info = utils.PowerUnitInfo(
-                device_hostname=dut,
-                powerunit_hostname=hostname,
-                powerunit_type=utils.PowerUnitInfo.POWERUNIT_TYPES.RPM,
-                outlet=outlet,
-                hydra_hostname=None)
-
-
-    def testSuccessfullyChangeOutlet(self):
-        """Should return True if change was successful."""
-        test_status_list_final = [[8, 'chromeos-rack8a-host8', 'u\'OFF\'']]
-        self.dli_ps.off(8)
-        self.dli_ps.statuslist().AndReturn(test_status_list_final)
-        self.mox.ReplayAll()
-        self.assertTrue(self.web_rpm.set_power_state(
-                self.powerunit_info, 'OFF'))
-        self.mox.VerifyAll()
-
-
-    def testUnsuccessfullyChangeOutlet(self):
-        """Should return False if Outlet State does not change."""
-        test_status_list_final = [[8, 'chromeos-rack8a-host8', 'u\'ON\'']]
-        self.dli_ps.off(8)
-        self.dli_ps.statuslist().AndReturn(test_status_list_final)
-        self.mox.ReplayAll()
-        self.assertFalse(self.web_rpm.set_power_state(
-                self.powerunit_info, 'OFF'))
-        self.mox.VerifyAll()
-
-
-    def testNoOutlet(self):
-        """Should return False if DUT hostname is not on the RPM device."""
-        self.powerunit_info.outlet=None
-        self.assertFalse(self.web_rpm.set_power_state(
-                self.powerunit_info, 'OFF'))
-
-
-class TestCiscoPOEController(mox.MoxTestBase):
-    """Test CiscoPOEController."""
-
-    DEVICE = 'chromeos2-poe-sw8#'
-    MATCHER = 'Port\\s+.*%s(\\s+(\\S+)){6,6}.*%s'
-    PORT = 'fa32'
-    PWD = 'Password:'
-    SERVO = 'chromeos1-rack3-host12-servo'
-    SWITCH = 'chromeos2-poe-switch8'
-    POWERUNIT_INFO = utils.PowerUnitInfo(
-            device_hostname=PORT,
-            powerunit_hostname=SERVO,
-            powerunit_type=utils.PowerUnitInfo.POWERUNIT_TYPES.POE,
-            outlet=PORT,
-            hydra_hostname=None)
-
-    def testLogin(self):
-        """Test we can log into the switch."""
-        rpm_controller.pexpect.spawn = self.mox.CreateMockAnything()
-        mock_ssh = self.mox.CreateMockAnything()
-        rpm_controller.pexpect.spawn(mox.IgnoreArg()).AndReturn(mock_ssh)
-        sut = rpm_controller.CiscoPOEController(self.SWITCH)
-        mock_ssh.expect(sut.POE_USERNAME_PROMPT, timeout=sut.LOGIN_TIMEOUT)
-        mock_ssh.sendline(mox.IgnoreArg())
-        mock_ssh.expect(self.PWD, timeout=sut.LOGIN_TIMEOUT)
-        mock_ssh.sendline(mox.IgnoreArg())
-        mock_ssh.expect(self.DEVICE, timeout=sut.LOGIN_TIMEOUT)
-        self.mox.ReplayAll()
-        self.assertIsNotNone(sut._login())
-        self.mox.VerifyAll()
-
-    def testSuccessfullyChangePowerState(self):
-        """Should return True if change was successful."""
-        sut = rpm_controller.CiscoPOEController(self.SWITCH)
-        mock_ssh = self.mox.CreateMockAnything()
-        self.mox.StubOutWithMock(sut, '_login')
-        sut._login().AndReturn(mock_ssh)
-        self.mox.StubOutWithMock(sut, '_verify_state')
-        sut._verify_state(self.PORT, 'ON', mock_ssh).AndReturn(True)
-        # _enter_configuration_terminal
-        mock_ssh.sendline(sut.CONFIG)
-        mock_ssh.expect(sut.config_prompt, timeout=sut.CMD_TIMEOUT)
-        mock_ssh.sendline(sut.CONFIG_IF % self.PORT)
-        mock_ssh.expect(sut.config_if_prompt, timeout=sut.CMD_TIMEOUT)
-        # _change_state
-        mock_ssh.sendline(sut.SET_STATE_ON)
-        # _exit_configuration_terminal
-        mock_ssh.sendline(sut.END_CMD)
-        mock_ssh.expect(sut.poe_prompt, timeout=sut.CMD_TIMEOUT)
-        # exit
-        mock_ssh.sendline(sut.EXIT_CMD)
-        mock_ssh.close(force=True)
-        self.mox.ReplayAll()
-        self.assertTrue(sut.set_power_state(self.POWERUNIT_INFO, 'ON'))
-        self.mox.VerifyAll()
-
-    def testUnableToEnterConfigurationTerminal(self):
-        """Should return False if unable to enter configuration terminal."""
-        exception = pexpect.TIMEOUT('Could not enter configuration terminal.')
-        sut = rpm_controller.CiscoPOEController(self.SWITCH)
-        timeout = sut.CMD_TIMEOUT
-        mock_ssh = self.mox.CreateMockAnything()
-        self.mox.StubOutWithMock(sut, '_login')
-        sut._login().AndReturn(mock_ssh)
-        mock_ssh.sendline(sut.CONFIG)
-        mock_ssh.expect(sut.config_prompt, timeout=timeout)
-        mock_ssh.sendline(sut.CONFIG_IF % self.PORT)
-        config_if_prompt = sut.config_if_prompt
-        mock_ssh.expect(config_if_prompt, timeout=timeout).AndRaise(exception)
-        mock_ssh.sendline(sut.END_CMD)
-        mock_ssh.sendline(sut.EXIT_CMD)
-        mock_ssh.close(force=True)
-        self.mox.ReplayAll()
-        self.assertFalse(sut.set_power_state(self.POWERUNIT_INFO, mock_ssh))
-        self.mox.VerifyAll()
-
-    def testUnableToExitConfigurationTerminal(self):
-        """Should return False if unable to exit configuration terminal."""
-        exception = pexpect.TIMEOUT('Could not exit configuration terminal.')
-        sut = rpm_controller.CiscoPOEController(self.SWITCH)
-        mock_ssh = self.mox.CreateMockAnything()
-        self.mox.StubOutWithMock(sut, '_login')
-        self.mox.StubOutWithMock(sut, '_enter_configuration_terminal')
-        sut._login().AndReturn(mock_ssh)
-        sut._enter_configuration_terminal(self.PORT, mock_ssh).AndReturn(True)
-        mock_ssh.sendline(sut.SET_STATE_ON)
-        mock_ssh.sendline(sut.END_CMD)
-        mock_ssh.expect(
-                self.DEVICE, timeout=sut.CMD_TIMEOUT).AndRaise(exception)
-        mock_ssh.sendline(sut.EXIT_CMD)
-        mock_ssh.close(force=True)
-        self.mox.ReplayAll()
-        self.assertFalse(sut.set_power_state(self.POWERUNIT_INFO, 'ON'))
-        self.mox.VerifyAll()
-
-    def testUnableToVerifyState(self):
-        """Should return False if unable to verify current state."""
-        sut = rpm_controller.CiscoPOEController(self.SWITCH)
-        mock_ssh = self.mox.CreateMockAnything()
-        self.mox.StubOutWithMock(sut, '_login')
-        self.mox.StubOutWithMock(sut, '_enter_configuration_terminal')
-        self.mox.StubOutWithMock(sut, '_exit_configuration_terminal')
-        sut._login().AndReturn(mock_ssh)
-        sut._enter_configuration_terminal(self.PORT, mock_ssh).AndReturn(True)
-        sut._exit_configuration_terminal(mock_ssh).AndReturn(True)
-        mock_ssh.sendline(sut.SET_STATE_ON)
-        mock_ssh.sendline(sut.CHECK_INTERFACE_STATE % self.PORT)
-        exception = pexpect.TIMEOUT('Could not verify state.')
-        matcher = self.MATCHER % (self.PORT, self.DEVICE)
-        mock_ssh.expect(matcher, timeout=sut.CMD_TIMEOUT).AndRaise(exception)
-        mock_ssh.sendline(sut.EXIT_CMD)
-        mock_ssh.close(force=True)
-        self.mox.ReplayAll()
-        self.assertFalse(sut.set_power_state(self.POWERUNIT_INFO, 'ON'))
-        self.mox.VerifyAll()
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/site_utils/rpm_control_system/rpm_dispatcher.py b/site_utils/rpm_control_system/rpm_dispatcher.py
deleted file mode 100755
index 44e6b62..0000000
--- a/site_utils/rpm_control_system/rpm_dispatcher.py
+++ /dev/null
@@ -1,259 +0,0 @@
-#!/usr/bin/python2
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import atexit
-import errno
-import logging
-import re
-import sys
-import socket
-import threading
-import xmlrpclib
-
-import rpm_controller
-import rpm_logging_config
-
-from config import rpm_config
-from MultiThreadedXMLRPCServer import MultiThreadedXMLRPCServer
-from rpm_infrastructure_exception import RPMInfrastructureException
-
-import common
-from autotest_lib.site_utils.rpm_control_system import utils
-
-LOG_FILENAME_FORMAT = rpm_config.get('GENERAL','dispatcher_logname_format')
-
-
-class RPMDispatcher(object):
-    """
-    This class is the RPM dispatcher server and it is responsible for
-    communicating directly to the RPM devices to change a DUT's outlet status.
-
-    When an RPMDispatcher is initialized it registers itself with the frontend
-    server, who will field out outlet requests to this dispatcher.
-
-    Once a request is received the dispatcher looks up the RPMController
-    instance for the given DUT and then queues up the request and blocks until
-    it is processed.
-
-    @var _address: IP address or Hostname of this dispatcher server.
-    @var _frontend_server: URI of the frontend server.
-    @var _lock: Lock used to synchronize access to _worker_dict.
-    @var _port: Port assigned to this server instance.
-    @var _worker_dict: Dictionary mapping RPM hostname's to RPMController
-                       instances.
-    """
-
-
-    def __init__(self, address, port):
-        """
-        RPMDispatcher constructor.
-
-        Initialized instance vars and registers this server with the frontend
-        server.
-
-        @param address: Address of this dispatcher server.
-        @param port: Port assigned to this dispatcher server.
-
-        @raise RPMInfrastructureException: Raised if the dispatch server is
-                                           unable to register with the frontend
-                                           server.
-        """
-        self._address = address
-        self._port = port
-        self._lock = threading.Lock()
-        self._worker_dict = {}
-        # We assume that the frontend server and dispatchers are running on the
-        # same host, and the frontend server is listening for connections from
-        # the external world.
-        frontend_server_port = rpm_config.getint('RPM_INFRASTRUCTURE',
-                                                 'frontend_port')
-        self._frontend_server = 'http://%s:%d' % (socket.gethostname(),
-                                                  frontend_server_port)
-        logging.info('Registering this rpm dispatcher with the frontend '
-                     'server at %s.', self._frontend_server)
-        client = xmlrpclib.ServerProxy(self._frontend_server)
-        # De-register with the frontend when the dispatcher exit's.
-        atexit.register(self._unregister)
-        try:
-            client.register_dispatcher(self._get_serveruri())
-        except socket.error as er:
-            err_msg = ('Unable to register with frontend server. Error: %s.' %
-                       errno.errorcode[er.errno])
-            logging.error(err_msg)
-            raise RPMInfrastructureException(err_msg)
-
-
-    def _worker_dict_put(self, key, value):
-        """
-        Private method used to synchronize access to _worker_dict.
-
-        @param key: key value we are using to access _worker_dict.
-        @param value: value we are putting into _worker_dict.
-        """
-        with self._lock:
-            self._worker_dict[key] = value
-
-
-    def _worker_dict_get(self, key):
-        """
-        Private method used to synchronize access to _worker_dict.
-
-        @param key: key value we are using to access _worker_dict.
-        @return: value found when accessing _worker_dict
-        """
-        with self._lock:
-            return self._worker_dict.get(key)
-
-
-    def is_up(self):
-        """
-        Allows the frontend server to see if the dispatcher server is up before
-        attempting to queue requests.
-
-        @return: True. If connection fails, the client proxy will throw a socket
-                 error on the client side.
-        """
-        return True
-
-
-    def queue_request(self, powerunit_info_dict, new_state):
-        """
-        Looks up the appropriate RPMController instance for the device and queues
-        up the request.
-
-        @param powerunit_info_dict: A dictionary, containing the attribute/values
-                                    of an unmarshalled PowerUnitInfo instance.
-        @param new_state: [ON, OFF, CYCLE] state we want to the change the
-                          outlet to.
-        @return: True if the attempt to change power state was successful,
-                 False otherwise.
-        """
-        powerunit_info = utils.PowerUnitInfo(**powerunit_info_dict)
-        logging.info('Received request to set device: %s to state: %s',
-                     powerunit_info.device_hostname, new_state)
-        rpm_controller = self._get_rpm_controller(
-                powerunit_info.powerunit_hostname,
-                powerunit_info.hydra_hostname)
-        return rpm_controller.queue_request(powerunit_info, new_state)
-
-
-    def _get_rpm_controller(self, rpm_hostname, hydra_hostname=None):
-        """
-        Private method that retreives the appropriate RPMController instance
-        for this RPM Hostname or calls _create_rpm_controller it if it does not
-        already exist.
-
-        @param rpm_hostname: hostname of the RPM whose RPMController we want.
-
-        @return: RPMController instance responsible for this RPM.
-        """
-        if not rpm_hostname:
-            return None
-        rpm_controller = self._worker_dict_get(rpm_hostname)
-        if not rpm_controller:
-            rpm_controller = self._create_rpm_controller(
-                    rpm_hostname, hydra_hostname)
-            self._worker_dict_put(rpm_hostname, rpm_controller)
-        return rpm_controller
-
-
-    def _create_rpm_controller(self, rpm_hostname, hydra_hostname):
-        """
-        Determines the type of RPMController required and initializes it.
-
-        @param rpm_hostname: Hostname of the RPM we need to communicate with.
-
-        @return: RPMController instance responsible for this RPM.
-        """
-        hostname_elements = rpm_hostname.split('-')
-        if hostname_elements[-2] == 'poe':
-            # POE switch hostname looks like 'chromeos2-poe-switch1'.
-            logging.info('The controller is a Cisco POE switch.')
-            return rpm_controller.CiscoPOEController(rpm_hostname)
-        else:
-            # The device is an RPM.
-            rack_id = hostname_elements[-2]
-            rpm_typechecker = re.compile('rack[0-9]+[a-z]+')
-            if rpm_typechecker.match(rack_id):
-                logging.info('RPM is a webpowered device.')
-                return rpm_controller.WebPoweredRPMController(rpm_hostname)
-            else:
-                logging.info('RPM is a Sentry CDU device.')
-                return rpm_controller.SentryRPMController(
-                        hostname=rpm_hostname,
-                        hydra_hostname=hydra_hostname)
-
-
-    def _get_serveruri(self):
-        """
-        Formats the _address and _port into a meaningful URI string.
-
-        @return: URI of this dispatch server.
-        """
-        return 'http://%s:%d' % (self._address, self._port)
-
-
-    def _unregister(self):
-        """
-        Tells the frontend server that this dispatch server is shutting down and
-        to unregister it.
-
-        Called by atexit.
-
-        @raise RPMInfrastructureException: Raised if the dispatch server is
-                                           unable to unregister with the
-                                           frontend server.
-        """
-        logging.info('Dispatch server shutting down. Unregistering with RPM '
-                     'frontend server.')
-        client = xmlrpclib.ServerProxy(self._frontend_server)
-        try:
-            client.unregister_dispatcher(self._get_serveruri())
-        except socket.error as er:
-            err_msg = ('Unable to unregister with frontend server. Error: %s.' %
-                       errno.errorcode[er.errno])
-            logging.error(err_msg)
-            raise RPMInfrastructureException(err_msg)
-
-
-def launch_server_on_unused_port():
-    """
-    Looks up an unused port on this host and launches the xmlrpc server.
-
-    Useful for testing by running multiple dispatch servers on the same host.
-
-    @return: server,port - server object and the port that which it is listening
-             to.
-    """
-    address = socket.gethostbyname(socket.gethostname())
-    sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-    # Set this socket to allow reuse.
-    sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
-    sock.bind(('', 0))
-    port = sock.getsockname()[1]
-    server = MultiThreadedXMLRPCServer((address, port),
-                                       allow_none=True)
-    sock.close()
-    return server, port
-
-
-if __name__ == '__main__':
-    """
-    Main function used to launch the dispatch server. Creates an instance of
-    RPMDispatcher and registers it to a MultiThreadedXMLRPCServer instance.
-    """
-    if len(sys.argv) != 2:
-      print 'Usage: ./%s <log_file_name>' % sys.argv[0]
-      sys.exit(1)
-
-    rpm_logging_config.start_log_server(sys.argv[1], LOG_FILENAME_FORMAT)
-    rpm_logging_config.set_up_logging_to_server()
-
-    # Get the local ip _address and set the server to utilize it.
-    address = socket.gethostbyname(socket.gethostname())
-    server, port = launch_server_on_unused_port()
-    rpm_dispatcher = RPMDispatcher(address, port)
-    server.register_instance(rpm_dispatcher)
-    server.serve_forever()
diff --git a/site_utils/rpm_control_system/rpm_dispatcher_unittest.py b/site_utils/rpm_control_system/rpm_dispatcher_unittest.py
deleted file mode 100755
index a90e8a3..0000000
--- a/site_utils/rpm_control_system/rpm_dispatcher_unittest.py
+++ /dev/null
@@ -1,83 +0,0 @@
-#!/usr/bin/python2
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import mox
-import socket
-import unittest
-
-from config import rpm_config
-import rpm_dispatcher
-
-DUT_SAME_RPM1 = 'chromeos-rack8e-hostbs1'
-DUT_SAME_RPM2 = 'chromeos-rack8e-hostbs2'
-RPM_HOSTNAME = 'chromeos-rack8e-rpm1'
-DUT_DIFFERENT_RPM = 'chromeos-rack1-hostbs1'
-FAKE_DISPATCHER_URI = 'fake-dispatcher'
-FAKE_DISPATCHER_PORT = 9999
-FRONT_END_PORT = rpm_config.getint('RPM_INFRASTRUCTURE', 'frontend_port')
-PROPER_URI_FORMAT = 'http://%s:%d'
-
-
-class TestRPMDispatcher(mox.MoxTestBase):
-    """
-    Simple unit tests to verify that the RPM Dispatcher properly registers with
-    the frontend server, and also initializes and reuses the same RPM Controller
-    for DUT requests on the same RPM.
-
-    queue_request is the only public method of RPM Dispatcher, however its logic
-    is simple and relies mostly on the private methods; therefore, I am testing
-    primarily RPMDispatcher initialization and _get_rpm_controller (which calls
-    _create_rpm_controller) to verify correct implementation.
-    """
-
-    def setUp(self):
-        super(TestRPMDispatcher, self).setUp()
-        self.frontend_mock = self.mox.CreateMockAnything()
-        expected_uri = PROPER_URI_FORMAT % (FAKE_DISPATCHER_URI,
-                                            FAKE_DISPATCHER_PORT)
-        self.frontend_mock.register_dispatcher(expected_uri)
-        rpm_dispatcher.xmlrpclib.ServerProxy = self.mox.CreateMockAnything()
-        frontend_uri = 'http://%s:%d' % (socket.gethostname(), FRONT_END_PORT)
-        rpm_dispatcher.xmlrpclib.ServerProxy(frontend_uri).AndReturn(
-                self.frontend_mock)
-        rpm_dispatcher.atexit = self.mox.CreateMockAnything()
-        rpm_dispatcher.atexit.register(mox.IgnoreArg())
-        self.mox.ReplayAll()
-        self.dispatcher = rpm_dispatcher.RPMDispatcher(FAKE_DISPATCHER_URI,
-                                                       FAKE_DISPATCHER_PORT)
-
-
-    def testRegistration(self):
-        """
-        Make sure that as a dispatcher is initialized it properly registered
-        with the frontend server.
-        """
-        self.mox.VerifyAll()
-
-
-    def testGetSameRPMController(self):
-        """
-        Make sure that calls to _get_rpm_controller with DUT hostnames that
-        belong to the same RPM device create and retrieve the same RPMController
-        instance.
-        """
-        controller1 = self.dispatcher._get_rpm_controller(RPM_HOSTNAME)
-        controller2 = self.dispatcher._get_rpm_controller(RPM_HOSTNAME)
-        self.assertEquals(controller1, controller2)
-
-
-    def testGetDifferentRPMController(self):
-        """
-        Make sure that calls to _get_rpm_controller with DUT hostnames that
-        belong to the different RPM device create and retrieve different
-        RPMController instances.
-        """
-        controller1 = self.dispatcher._get_rpm_controller(DUT_SAME_RPM1)
-        controller2 = self.dispatcher._get_rpm_controller(DUT_DIFFERENT_RPM)
-        self.assertNotEquals(controller1, controller2)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/site_utils/rpm_control_system/rpm_infrastructure_exception.py b/site_utils/rpm_control_system/rpm_infrastructure_exception.py
deleted file mode 100644
index afce659..0000000
--- a/site_utils/rpm_control_system/rpm_infrastructure_exception.py
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-class RPMInfrastructureException(Exception):
-    """
-    Exception used to indicate infrastructure failures in the RPM control
-    system.
-    """
-    pass
-
-
-class RPMLoggingSetupError(RPMInfrastructureException):
-    """Rasied when setup logging fails."""
-    pass
diff --git a/site_utils/rpm_control_system/rpm_integration_test.py b/site_utils/rpm_control_system/rpm_integration_test.py
deleted file mode 100644
index 4431e14..0000000
--- a/site_utils/rpm_control_system/rpm_integration_test.py
+++ /dev/null
@@ -1,146 +0,0 @@
-import logging
-import threading
-
-import common
-
-from autotest_lib.site_utils.rpm_control_system import rpm_controller
-from autotest_lib.site_utils.rpm_control_system import utils
-
-
-# Format Appears as: [Date] [Time] - [Msg Level] - [Message]
-LOGGING_FORMAT = '%(asctime)s - %(levelname)s - %(message)s'
-
-
-def test_in_order_requests():
-    """Simple integration testing."""
-    rpm = rpm_controller.WebPoweredRPMController(
-            'chromeos-rack8e-rpm1')
-    info_1 = utils.PowerUnitInfo(
-            device_hostname='chromeos1-rack8e-hostbs1',
-            powerunit_hostname='chromeos-rack8e-rpm1',
-            powerunit_type=utils.PowerUnitInfo.POWERUNIT_TYPES.RPM,
-            hydra_hostname=None,
-            outlet='')
-    info_2 = utils.PowerUnitInfo(
-            device_hostname='chromeos1-rack8e-hostbs2',
-            powerunit_hostname='chromeos-rack8e-rpm1',
-            powerunit_type=utils.PowerUnitInfo.POWERUNIT_TYPES.RPM,
-            hydra_hostname=None,
-            outlet='')
-    info_3 = utils.PowerUnitInfo(
-            device_hostname='chromeos1-rack8e-hostbs3',
-            powerunit_hostname='chromeos-rack8e-rpm1',
-            powerunit_type=utils.PowerUnitInfo.POWERUNIT_TYPES.RPM,
-            hydra_hostname=None,
-            outlet='')
-    rpm.queue_request(info_1, 'OFF')
-    rpm.queue_request(info_2, 'OFF')
-    rpm.queue_request(info_3, 'CYCLE')
-
-
-def test_parrallel_webrequests():
-    """Simple integration testing."""
-    rpm = rpm_controller.WebPoweredRPMController(
-            'chromeos-rack8e-rpm1')
-    info_1 = utils.PowerUnitInfo(
-            device_hostname='chromeos1-rack8e-hostbs1',
-            powerunit_hostname='chromeos-rack8e-rpm1',
-            powerunit_type=utils.PowerUnitInfo.POWERUNIT_TYPES.RPM,
-            hydra_hostname=None,
-            outlet='')
-    info_2 = utils.PowerUnitInfo(
-            device_hostname='chromeos1-rack8e-hostbs2',
-            powerunit_hostname='chromeos-rack8e-rpm1',
-            powerunit_type=utils.PowerUnitInfo.POWERUNIT_TYPES.RPM,
-            hydra_hostname=None,
-            outlet='')
-    threading.Thread(target=rpm.queue_request,
-                     args=(info_1, 'OFF')).start()
-    threading.Thread(target=rpm.queue_request,
-                     args=(info_2, 'ON')).start()
-
-
-def test_parrallel_sshrequests():
-    """Simple integration testing."""
-    rpm =   rpm_controller.SentryRPMController('chromeos-rack8-rpm1')
-    info_1 = utils.PowerUnitInfo(
-            device_hostname='chromeos-rack8-hostbs1',
-            powerunit_hostname='chromeos-rack8-rpm1',
-            powerunit_type=utils.PowerUnitInfo.POWERUNIT_TYPES.RPM,
-            hydra_hostname=None,
-            outlet='.A14')
-    info_2 = utils.PowerUnitInfo(
-            device_hostname='chromeos-rack8-hostbs2',
-            powerunit_hostname='chromeos-rack8-rpm1',
-            powerunit_type=utils.PowerUnitInfo.POWERUNIT_TYPES.RPM,
-            hydra_hostname=None,
-            outlet='.A11')
-    threading.Thread(target=rpm.queue_request,
-                     args=(info_1, 'CYCLE')).start()
-    threading.Thread(target=rpm.queue_request,
-                     args=(info_2, 'CYCLE')).start()
-
-    # The following test are disabled as the
-    # outlets on the rpm are in actual use.
-    # rpm2 = SentryRPMController('chromeos2-row2-rack3-rpm1')
-    # threading.Thread(target=rpm2.queue_request,
-    #                  args=('chromeos2-row2-rack3-hostbs', 'ON')).start()
-    # threading.Thread(target=rpm2.queue_request,
-    #                  args=('chromeos2-row2-rack3-hostbs2', 'ON')).start()
-    # threading.Thread(target=rpm2.queue_request,
-    #                  args=('chromeos2-row1-rack7-hostbs1', 'ON')).start()
-
-
-def test_in_order_poerequests():
-    """Simple integration testing for poe controller."""
-    poe_controller = rpm_controller.CiscoPOEController(
-            'chromeos1-poe-switch1')
-    info_1 = utils.PowerUnitInfo(
-            device_hostname='chromeos1-rack4-host1bs-servo',
-            powerunit_hostname='chromeos1-poe-switch1',
-            powerunit_type=utils.PowerUnitInfo.POWERUNIT_TYPES.RPM,
-            hydra_hostname=None,
-            outlet='fa33')
-    info_2 = utils.PowerUnitInfo(
-            device_hostname='chromeos1-rack4-host2bs-servo',
-            powerunit_hostname='chromeos1-poe-switch1',
-            powerunit_type=utils.PowerUnitInfo.POWERUNIT_TYPES.RPM,
-            hydra_hostname=None,
-            outlet='fa34')
-    poe_controller.queue_request(info_1, 'OFF')
-    poe_controller.queue_request(info_1, 'ON')
-    poe_controller.queue_request(info_2, 'CYCLE')
-
-
-def test_parrallel_poerequests():
-    """Simple integration testing for poe controller."""
-    poe_controller = rpm_controller.CiscoPOEController(
-            'chromeos1-poe-switch1')
-    info_1 = utils.PowerUnitInfo(
-            device_hostname='chromeos1-rack4-host1bs-servo',
-            powerunit_hostname='chromeos1-poe-switch1',
-            powerunit_type=utils.PowerUnitInfo.POWERUNIT_TYPES.RPM,
-            hydra_hostname=None,
-            outlet='fa33')
-    info_2 = utils.PowerUnitInfo(
-            device_hostname='chromeos1-rack4-host2bs-servo',
-            powerunit_hostname='chromeos1-poe-switch1',
-            powerunit_type=utils.PowerUnitInfo.POWERUNIT_TYPES.RPM,
-            hydra_hostname=None,
-            outlet='fa34')
-    threading.Thread(target=poe_controller.queue_request,
-                     args=(info_1, 'CYCLE')).start()
-    threading.Thread(target=poe_controller.queue_request,
-                     args=(info_2, 'CYCLE')).start()
-
-
-if __name__ == '__main__':
-    logging.basicConfig(level=logging.DEBUG, format=LOGGING_FORMAT)
-#    The tests in this file are disabled since most of the ports are
-#    in actual use now. If you are going to run them, make sure
-#    to choose unused hosts/ports.
-#    test_in_order_requests()
-#    test_parrallel_webrequests()
-#    test_parrallel_sshrequests()
-#    test_in_order_poerequests()
-#    test_parrallel_poerequests()
diff --git a/site_utils/rpm_control_system/rpm_logging_config.py b/site_utils/rpm_control_system/rpm_logging_config.py
deleted file mode 100644
index 3c92163..0000000
--- a/site_utils/rpm_control_system/rpm_logging_config.py
+++ /dev/null
@@ -1,119 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import datetime
-import logging
-import logging.handlers
-import os
-import socket
-import time
-
-from config import rpm_config
-
-import common
-from autotest_lib.site_utils import log_socket_server
-from autotest_lib.site_utils.rpm_control_system import rpm_infrastructure_exception
-
-LOGGING_FORMAT = rpm_config.get('GENERAL', 'logging_format')
-RECEIVERS = rpm_config.get('RPM_INFRASTRUCTURE',
-                           'email_notification_recipients').split(',')
-SUBJECT_LINE = (rpm_config.get('GENERAL', 'email_subject_line_format') %
-                socket.gethostname())
-
-
-class SuspendableSMTPHandler(logging.handlers.SMTPHandler):
-    """SMTPHandler that can have it's emails suspended."""
-    _suspend_start_time = datetime.datetime.now()
-    _suspend_time_hrs = 0
-
-
-    def suspend_emails(self, hours):
-        """Suspend email notifications.
-
-        @param hours: How many hours to suspend email notifications.
-        """
-        self._suspend_start_time = datetime.datetime.now()
-        self._suspend_time_hrs = int(hours, 0)
-
-
-    def resume_emails(self):
-        """Resume email notifications."""
-        self._suspend_time_hrs = 0
-
-
-    def emit(self, record):
-        """Emit a log record.
-
-        This subclassed version only emits the log record if emails are not
-        suspended.
-
-        @param record: Log record object we want to emit/record.
-        """
-        if datetime.datetime.now() < (self._suspend_start_time +
-                datetime.timedelta(hours=self._suspend_time_hrs)):
-            return
-        record.msg += ('\n\nTo disable these emails use rpm_client from your '
-                       'local checkout. For a 12 hour suspension run: '
-                       'site_utils/rpm_control_system/rpm_client.py -d 12')
-        return super(SuspendableSMTPHandler, self).emit(record)
-
-
-def set_up_logging_to_file(log_dir, log_filename_format=None):
-    """
-    Correctly set up logging to have the correct format/level, log to a file,
-    and send out email notifications in case of error level messages.
-
-    @param log_dir: The directory in which log files should be created.
-    @param log_filename_format: Format to use to create the log file.
-
-    @returns email_handler: Logging handler used to send out email alerts.
-    """
-    logging.basicConfig(filename=_logfile_path(log_dir, log_filename_format),
-                        level=logging.INFO, format=LOGGING_FORMAT)
-    _set_common_logger_options()
-
-
-def start_log_server(log_dir, log_filename_format):
-    """Start log server to accept logging through a TCP server.
-
-    @param log_dir: The directory in which log files should be created.
-    @param log_filename_format: Format to use to create the log file.
-    """
-    log_filename = _logfile_path(log_dir, log_filename_format)
-    log_socket_server.LogSocketServer.start(filename=log_filename,
-                                            level=logging.INFO,
-                                            format=LOGGING_FORMAT)
-
-
-def set_up_logging_to_server():
-    """Sets up logging option when using a logserver."""
-    if log_socket_server.LogSocketServer.port is None:
-        raise rpm_infrastructure_exception.RPMLoggingSetupError(
-                'set_up_logging failed: Log server port is unknown.')
-    socketHandler = logging.handlers.SocketHandler(
-            'localhost', log_socket_server.LogSocketServer.port)
-    logging.getLogger().addHandler(socketHandler)
-    _set_common_logger_options()
-
-
-def _logfile_path(log_dir, log_filename_format):
-    """Get file name of log based on given log_filename_format.
-
-    @param log_filename_format: Format to use to create the log file.
-    """
-    log_filename = time.strftime(log_filename_format)
-    if not os.path.isdir(log_dir):
-        os.makedirs(log_dir)
-    return os.path.join(log_dir, log_filename)
-
-
-def _set_common_logger_options():
-    """Sets the options common to both file and server based logging."""
-    logger = logging.getLogger()
-    if rpm_config.getboolean('GENERAL', 'debug'):
-        logger.setLevel(logging.DEBUG)
-    email_handler = SuspendableSMTPHandler('localhost', 'rpm@google.com',
-                                           RECEIVERS, SUBJECT_LINE, None)
-    email_handler.setLevel(logging.ERROR)
-    email_handler.setFormatter(logging.Formatter(LOGGING_FORMAT))
-    logger.addHandler(email_handler)
diff --git a/site_utils/rpm_control_system/servo_interface_mapping.csv b/site_utils/rpm_control_system/servo_interface_mapping.csv
deleted file mode 100644
index fa796b9..0000000
--- a/site_utils/rpm_control_system/servo_interface_mapping.csv
+++ /dev/null
@@ -1,250 +0,0 @@
-chromeos1-row1-rack10-host1-servo,chromeos1-poe-switch5,fa1
-chromeos1-row1-rack10-host2-servo,chromeos1-poe-switch5,fa25
-chromeos1-row1-rack10-host3-servo,chromeos1-poe-switch5,fa26
-chromeos1-row1-rack10-host4-servo,chromeos1-poe-switch5,fa27
-chromeos1-row1-rack10-host5-servo,chromeos1-poe-switch5,fa29
-chromeos1-row1-rack10-host6-servo,chromeos1-poe-switch5,fa30
-chromeos1-row1-rack11-host1-servo,chromeos1-poe-switch5,fa7
-chromeos1-row1-rack11-host2-servo,chromeos1-poe-switch5,fa8
-chromeos1-row1-rack11-host3-servo,chromeos1-poe-switch5,fa33
-chromeos1-row1-rack11-host4-servo,chromeos1-poe-switch5,fa34
-chromeos1-row1-rack11-host5-servo,chromeos1-poe-switch5,fa35
-chromeos1-row1-rack11-host6-servo,chromeos1-poe-switch5,fa36
-chromeos1-row1-rack1-host1-servo,chromeos1-poe-switch1,fa25
-chromeos1-row1-rack1-host2-servo,chromeos1-poe-switch1,fa26
-chromeos1-row1-rack1-host3-servo,chromeos1-poe-switch1,fa27
-chromeos1-row1-rack1-host4-servo,chromeos1-poe-switch1,fa28
-chromeos1-row1-rack1-host5-servo,chromeos1-poe-switch1,fa29
-chromeos1-row1-rack1-host6-servo,chromeos1-poe-switch1,fa30
-chromeos1-row1-rack2-host1-servo,chromeos1-poe-switch1,fa31
-chromeos1-row1-rack2-host2-servo,chromeos1-poe-switch1,fa32
-chromeos1-row1-rack2-host3-servo,chromeos1-poe-switch1,fa33
-chromeos1-row1-rack2-host4-servo,chromeos1-poe-switch1,fa34
-chromeos1-row1-rack2-host5-servo,chromeos1-poe-switch1,fa37
-chromeos1-row1-rack2-host6-servo,chromeos1-poe-switch1,fa38
-chromeos1-row1-rack3-host1-servo,chromeos1-poe-switch2,fa25
-chromeos1-row1-rack3-host2-servo,chromeos1-poe-switch2,fa26
-chromeos1-row1-rack3-host3-servo,chromeos1-poe-switch2,fa27
-chromeos1-row1-rack3-host4-servo,chromeos1-poe-switch2,fa28
-chromeos1-row1-rack3-host5-servo,chromeos1-poe-switch2,fa29
-chromeos1-row1-rack3-host6-servo,chromeos1-poe-switch2,fa30
-chromeos1-row1-rack4-host1-servo,chromeos1-poe-switch2,fa31
-chromeos1-row1-rack4-host2-servo,chromeos1-poe-switch2,fa32
-chromeos1-row1-rack4-host3-servo,chromeos1-poe-switch2,fa33
-chromeos1-row1-rack4-host4-servo,chromeos1-poe-switch2,fa34
-chromeos1-row1-rack4-host5-servo,chromeos1-poe-switch2,fa35
-chromeos1-row1-rack4-host6-servo,chromeos1-poe-switch2,fa36
-chromeos1-row1-rack5-host1-servo,chromeos1-poe-switch3,
-chromeos1-row1-rack5-host2-servo,chromeos1-poe-switch3,
-chromeos1-row1-rack5-host3-servo,chromeos1-poe-switch3,
-chromeos1-row1-rack5-host4-servo,chromeos1-poe-switch3,
-chromeos1-row1-rack5-host5-servo,chromeos1-poe-switch3,
-chromeos1-row1-rack5-host6-servo,chromeos1-poe-switch3,
-chromeos1-row1-rack6-host1-servo,chromeos1-poe-switch3,
-chromeos1-row1-rack6-host2-servo,chromeos1-poe-switch3,
-chromeos1-row1-rack6-host3-servo,chromeos1-poe-switch3,
-chromeos1-row1-rack6-host4-servo,chromeos1-poe-switch3,
-chromeos1-row1-rack7-host1-servo,chromeos1-poe-switch3,fa31
-chromeos1-row1-rack7-host2-servo,chromeos1-poe-switch3,fa32
-chromeos1-row1-rack7-host3-servo,chromeos1-poe-switch3,fa33
-chromeos1-row1-rack7-host4-servo,chromeos1-poe-switch3,fa34
-chromeos1-row1-rack7-host5-servo,chromeos1-poe-switch3,fa35
-chromeos1-row1-rack7-host6-servo,chromeos1-poe-switch3,fa36
-chromeos1-row1-rack8-host1-servo,chromeos1-poe-switch4,fa25
-chromeos1-row1-rack8-host2-servo,chromeos1-poe-switch4,fa26
-chromeos1-row1-rack8-host3-servo,chromeos1-poe-switch4,fa27
-chromeos1-row1-rack8-host4-servo,chromeos1-poe-switch4,fa28
-chromeos1-row1-rack8-host5-servo,chromeos1-poe-switch4,fa29
-chromeos1-row1-rack8-host6-servo,chromeos1-poe-switch4,fa30
-chromeos1-row1-rack9-host1-servo,chromeos1-poe-switch4,fa31
-chromeos1-row1-rack9-host2-servo,chromeos1-poe-switch4,fa32
-chromeos1-row1-rack9-host3-servo,chromeos1-poe-switch4,fa33
-chromeos1-row1-rack9-host4-servo,chromeos1-poe-switch4,fa34
-chromeos1-row1-rack9-host5-servo,chromeos1-poe-switch4,fa35
-chromeos1-row1-rack9-host6-servo,chromeos1-poe-switch4,fa36
-chromeos1-row2-rack10-host1-servo,chromeos1-poe-switch5,fa39
-chromeos1-row2-rack10-host2-servo,chromeos1-poe-switch5,fa40
-chromeos1-row2-rack10-host3-servo,chromeos1-poe-switch5,fa41
-chromeos1-row2-rack10-host4-servo,chromeos1-poe-switch5,fa42
-chromeos1-row2-rack10-host5-servo,chromeos1-poe-switch5,fa43
-chromeos1-row2-rack10-host6-servo,chromeos1-poe-switch5,fa44
-chromeos1-row2-rack11-host1-servo,chromeos1-poe-switch6,fa13
-chromeos1-row2-rack11-host2-servo,chromeos1-poe-switch5,fa46
-chromeos1-row2-rack11-host3-servo,chromeos1-poe-switch5,fa47
-chromeos1-row2-rack11-host4-servo,chromeos1-poe-switch5,fa24
-chromeos1-row2-rack11-host5-servo,chromeos1-poe-switch4,fa19
-chromeos1-row2-rack11-host6-servo,chromeos1-poe-switch6,fa15
-chromeos1-row2-rack1-host4-servo,chromeos1-poe-switch1,fa40
-chromeos1-row2-rack1-host5-servo,chromeos1-poe-switch1,fa41
-chromeos1-row2-rack1-host6-servo,chromeos1-poe-switch1,fa42
-chromeos1-row2-rack2-host1-servo,chromeos1-poe-switch1,fa43
-chromeos1-row2-rack2-host2-servo,chromeos1-poe-switch1,fa44
-chromeos1-row2-rack2-host3-servo,chromeos1-poe-switch1,fa45
-chromeos1-row2-rack2-host4-servo,chromeos1-poe-switch1,fa22
-chromeos1-row2-rack2-host5-servo,chromeos1-poe-switch1,fa23
-chromeos1-row2-rack2-host6-servo,chromeos1-poe-switch2,fa13
-chromeos1-row2-rack3-host1-servo,,
-chromeos1-row2-rack3-host2-servo,,
-chromeos1-row2-rack3-host3-servo,,
-chromeos1-row2-rack3-host4-servo,,
-chromeos1-row2-rack3-host5-servo,chromeos1-poe-switch2,fa43
-chromeos1-row2-rack3-host6-servo,,
-chromeos1-row2-rack4-host1-servo,,
-chromeos1-row2-rack4-host2-servo,chromeos1-poe-switch2,fa21
-chromeos1-row2-rack4-host3-servo,chromeos1-poe-switch2,fa22
-chromeos1-row2-rack4-host4-servo,chromeos1-poe-switch2,fa23
-chromeos1-row2-rack4-host5-servo,chromeos1-poe-switch2,fa24
-chromeos1-row2-rack4-host6-servo,chromeos1-poe-switch3,fa37
-chromeos1-row2-rack5-host1-servo,,
-chromeos1-row2-rack5-host2-servo,,
-chromeos1-row2-rack5-host3-servo,,
-chromeos1-row2-rack5-host4-servo,,
-chromeos1-row2-rack5-host5-servo,,
-chromeos1-row2-rack5-host6-servo,,
-chromeos1-row2-rack6-host1-servo,,
-chromeos1-row2-rack6-host2-servo,,
-chromeos1-row2-rack6-host3-servo,,
-chromeos1-row2-rack6-host4-servo,,
-chromeos1-row2-rack7-host1-servo,chromeos1-poe-switch3,fa44
-chromeos1-row2-rack7-host2-servo,chromeos1-poe-switch3,fa45
-chromeos1-row2-rack7-host3-servo,chromeos1-poe-switch3,fa46
-chromeos1-row2-rack7-host4-servo,chromeos1-poe-switch3,fa47
-chromeos1-row2-rack7-host5-servo,chromeos1-poe-switch4,fa13
-chromeos1-row2-rack7-host6-servo,chromeos1-poe-switch4,fa14
-chromeos1-row2-rack8-host1-servo,chromeos1-poe-switch4,fa15
-chromeos1-row2-rack8-host2-servo,chromeos1-poe-switch4,fa16
-chromeos1-row2-rack8-host3-servo,chromeos1-poe-switch4,fa17
-chromeos1-row2-rack8-host4-servo,chromeos1-poe-switch4,fa18
-chromeos1-row2-rack8-host5-servo,chromeos1-poe-switch4,fa19
-chromeos1-row2-rack8-host6-servo,chromeos1-poe-switch4,fa20
-chromeos1-row2-rack9-host1-servo,chromeos1-poe-switch4,fa21
-chromeos1-row2-rack9-host2-servo,chromeos1-poe-switch4,fa22
-chromeos1-row2-rack9-host3-servo,chromeos1-poe-switch4,fa23
-chromeos1-row2-rack9-host4-servo,chromeos1-poe-switch4,fa24
-chromeos1-row2-rack9-host5-servo,chromeos1-poe-switch5,fa37
-chromeos1-row2-rack9-host6-servo,chromeos1-poe-switch5,fa38
-chromeos1-row3-rack10-host1-servo,,
-chromeos1-row3-rack10-host2-servo,,
-chromeos1-row3-rack10-host3-servo,,
-chromeos1-row3-rack10-host4-servo,,
-chromeos1-row3-rack10-host5-servo,,
-chromeos1-row3-rack10-host6-servo,,
-chromeos1-row3-rack11-host1-servo,,
-chromeos1-row3-rack11-host2-servo,,
-chromeos1-row3-rack11-host3-servo,,
-chromeos1-row3-rack11-host4-servo,,
-chromeos1-row3-rack11-host5-servo,,
-chromeos1-row3-rack11-host6-servo,,
-chromeos1-row3-rack1-host4-servo,,
-chromeos1-row3-rack1-host5-servo,,
-chromeos1-row3-rack1-host6-servo,,
-chromeos1-row3-rack2-host1-servo,,
-chromeos1-row3-rack2-host2-servo,,
-chromeos1-row3-rack2-host3-servo,,
-chromeos1-row3-rack2-host4-servo,,
-chromeos1-row3-rack2-host5-servo,,
-chromeos1-row3-rack2-host6-servo,,
-chromeos1-row3-rack3-host1-servo,,
-chromeos1-row3-rack3-host2-servo,,
-chromeos1-row3-rack3-host3-servo,,
-chromeos1-row3-rack3-host4-servo,,
-chromeos1-row3-rack3-host5-servo,,
-chromeos1-row3-rack3-host6-servo,,
-chromeos1-row3-rack4-host1-servo,,
-chromeos1-row3-rack4-host2-servo,,
-chromeos1-row3-rack4-host3-servo,,
-chromeos1-row3-rack4-host4-servo,,
-chromeos1-row3-rack4-host5-servo,,
-chromeos1-row3-rack4-host6-servo,,
-chromeos1-row3-rack5-host1-servo,chromeos1-poe-switch9,fa2
-chromeos1-row3-rack5-host2-servo,chromeos1-poe-switch9,fa4
-chromeos1-row3-rack5-host3-servo,chromeos1-poe-switch9,fa6
-chromeos1-row3-rack5-host4-servo,chromeos1-poe-switch9,fa8
-chromeos1-row3-rack5-host5-servo,chromeos1-poe-switch9,fa10
-chromeos1-row3-rack5-host6-servo,chromeos1-poe-switch9,fa12
-chromeos1-row3-rack6-host1-servo,,
-chromeos1-row3-rack6-host2-servo,,
-chromeos1-row3-rack6-host3-servo,,
-chromeos1-row3-rack6-host4-servo,,
-chromeos1-row3-rack7-host1-servo,,
-chromeos1-row3-rack7-host2-servo,,
-chromeos1-row3-rack7-host3-servo,,
-chromeos1-row3-rack7-host4-servo,,
-chromeos1-row3-rack7-host5-servo,,
-chromeos1-row3-rack7-host6-servo,,
-chromeos1-row3-rack8-host1-servo,,
-chromeos1-row3-rack8-host2-servo,,
-chromeos1-row3-rack8-host3-servo,,
-chromeos1-row3-rack8-host4-servo,,
-chromeos1-row3-rack8-host5-servo,,
-chromeos1-row3-rack8-host6-servo,,
-chromeos1-row3-rack9-host1-servo,,
-chromeos1-row3-rack9-host2-servo,,
-chromeos1-row3-rack9-host3-servo,,
-chromeos1-row3-rack9-host4-servo,,
-chromeos1-row3-rack9-host5-servo,,
-chromeos1-row3-rack9-host6-servo,,
-chromeos1-row4-rack10-host1-servo,,
-chromeos1-row4-rack10-host2-servo,,
-chromeos1-row4-rack10-host3-servo,,
-chromeos1-row4-rack10-host4-servo,,
-chromeos1-row4-rack10-host5-servo,,
-chromeos1-row4-rack10-host6-servo,,
-chromeos1-row4-rack11-host1-servo,,
-chromeos1-row4-rack11-host2-servo,,
-chromeos1-row4-rack11-host3-servo,,
-chromeos1-row4-rack11-host4-servo,,
-chromeos1-row4-rack11-host5-servo,,
-chromeos1-row4-rack11-host6-servo,,
-chromeos1-row4-rack1-host1-servo,,
-chromeos1-row4-rack1-host2-servo,,
-chromeos1-row4-rack1-host3-servo,,
-chromeos1-row4-rack1-host4-servo,,
-chromeos1-row4-rack1-host5-servo,,
-chromeos1-row4-rack1-host6-servo,,
-chromeos1-row4-rack2-host1-servo,,
-chromeos1-row4-rack2-host2-servo,,
-chromeos1-row4-rack2-host3-servo,,
-chromeos1-row4-rack2-host4-servo,,
-chromeos1-row4-rack2-host5-servo,,
-chromeos1-row4-rack2-host6-servo,,
-chromeos1-row4-rack3-host1-servo,,
-chromeos1-row4-rack3-host2-servo,,
-chromeos1-row4-rack3-host3-servo,,
-chromeos1-row4-rack3-host4-servo,,
-chromeos1-row4-rack3-host5-servo,,
-chromeos1-row4-rack3-host6-servo,,
-chromeos1-row4-rack4-host1-servo,,
-chromeos1-row4-rack4-host2-servo,,
-chromeos1-row4-rack4-host3-servo,,
-chromeos1-row4-rack4-host4-servo,,
-chromeos1-row4-rack4-host5-servo,,
-chromeos1-row4-rack4-host6-servo,,
-chromeos1-row4-rack5-host1-servo,chromeos1-poe-switch9,fa14
-chromeos1-row4-rack5-host2-servo,chromeos1-poe-switch9,fa16
-chromeos1-row4-rack5-host3-servo,chromeos1-poe-switch9,fa18
-chromeos1-row4-rack5-host4-servo,chromeos1-poe-switch9,fa20
-chromeos1-row4-rack5-host5-servo,chromeos1-poe-switch9,fa22
-chromeos1-row4-rack5-host6-servo,chromeos1-poe-switch9,fa24
-chromeos1-row4-rack6-host1-servo,,
-chromeos1-row4-rack6-host2-servo,,
-chromeos1-row4-rack6-host3-servo,,
-chromeos1-row4-rack6-host4-servo,,
-chromeos1-row4-rack7-host1-servo,,
-chromeos1-row4-rack7-host2-servo,,
-chromeos1-row4-rack7-host3-servo,,
-chromeos1-row4-rack7-host4-servo,,
-chromeos1-row4-rack7-host5-servo,,
-chromeos1-row4-rack7-host6-servo,,
-chromeos1-row4-rack8-host1-servo,,
-chromeos1-row4-rack8-host2-servo,,
-chromeos1-row4-rack8-host3-servo,,
-chromeos1-row4-rack8-host4-servo,,
-chromeos1-row4-rack8-host5-servo,,
-chromeos1-row4-rack8-host6-servo,,
-chromeos1-row4-rack9-host1-servo,,
-chromeos1-row4-rack9-host2-servo,,
-chromeos1-row4-rack9-host3-servo,,
-chromeos1-row4-rack9-host4-servo,,
-chromeos1-row4-rack9-host5-servo,,
-chromeos1-row4-rack9-host6-servo,,
diff --git a/site_utils/rpm_control_system/setup_rpms.py b/site_utils/rpm_control_system/setup_rpms.py
deleted file mode 100644
index 660e49e..0000000
--- a/site_utils/rpm_control_system/setup_rpms.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging, sys
-
-from config import rpm_config
-import rpm_controller
-
-LOGGING_FORMAT = rpm_config.get('GENERAL','logging_format')
-oyster_rpm_name_format = 'chromeos1-rack%d-rpm1'
-atlantis_rpm_name_format = 'chromeos2-row%d-rack%d-rpm1'
-DEFAULT_OYSTERBAY_OUTLET_MAP = {
-    1 : 'host1',
-    2 : 'host2',
-    4 : 'host3',
-    5 : 'host4',
-    7 : 'host5',
-    8 : 'host6',
-    9 : 'host7',
-    10 : 'host8',
-    12 : 'host9',
-    13 : 'host10',
-    15 : 'host11',
-    16 : 'host12'
-}
-DEFAULT_ATLANTIS_OUTLET_MAP = {
-    1 : 'host1',
-    2 : 'host7',
-    4 : 'host2',
-    5 : 'host8',
-    7 : 'host3',
-    8 : 'host9',
-    9 : 'host4',
-    10 : 'host10',
-    12 : 'host5',
-    13 : 'host11',
-    15 : 'host6',
-    16 : 'host12'
-}
-
-
-def setup_rpm(rpm_name):
-    logging.debug('Setting up %s.', rpm_name)
-    rpm = rpm_controller.SentryRPMController(rpm_name)
-    if rpm_name.startswith('chromeos1'):
-        outlet_mapping = DEFAULT_OYSTERBAY_OUTLET_MAP
-    else:
-        outlet_mapping = DEFAULT_ATLANTIS_OUTLET_MAP
-    if not rpm.setup(outlet_mapping):
-        logging.error('Failed to set up %s.', rpm_name)
-
-
-def main():
-    if len(sys.argv) != 2:
-        print 'USAGE: python %s [rpm|atlantis|oyster]' % sys.argv[0]
-        print 'atlantis|oyster: implies all RPMs inside that lab.'
-        return
-    if sys.argv[1] != 'atlantis' and sys.argv[1] != 'oyster':
-        setup_rpm(sys.argv[1])
-        return
-    if sys.argv[1] == 'oyster':
-        logging.debug('Setting up All RPM devices in lab: Oyster Bay.')
-        for rack in range(3,8):
-            setup_rpm(oyster_rpm_name_format % rack)
-        return
-    logging.debug('Setting up All RPM devices in lab: Atlantis.')
-    for row in range(1,6):
-        for rack in range(1,8):
-            if ((row == 1 and rack == 1) or (row == 1 and rack == 2) or
-                (row == 5 and rack == 6) or (row ==5 and rack == 7)):
-                # These RPM's do not follow the normal layout.
-                continue
-            setup_rpm(atlantis_rpm_name_format % (row, rack))
-
-
-if __name__ == '__main__':
-    logging.basicConfig(level=logging.DEBUG, format=LOGGING_FORMAT)
-    main()
diff --git a/site_utils/rpm_control_system/utils.py b/site_utils/rpm_control_system/utils.py
deleted file mode 100644
index 1fb9d70..0000000
--- a/site_utils/rpm_control_system/utils.py
+++ /dev/null
@@ -1,170 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-"""This file provides util functions used by RPM infrastructure."""
-
-
-import collections
-import csv
-import logging
-import os
-import time
-
-import common
-
-import rpm_infrastructure_exception
-from config import rpm_config
-from autotest_lib.client.common_lib import autotest_enum
-
-
-MAPPING_FILE = os.path.join(
-        os.path.dirname(__file__),
-        rpm_config.get('CiscoPOE', 'servo_interface_mapping_file'))
-
-
-POWERUNIT_HOSTNAME_KEY = 'powerunit_hostname'
-POWERUNIT_OUTLET_KEY = 'powerunit_outlet'
-HYDRA_HOSTNAME_KEY = 'hydra_hostname'
-DEFAULT_EXPIRATION_SECS = 60 * 30
-
-class PowerUnitInfo(object):
-    """A class that wraps rpm/poe information of a device."""
-
-    POWERUNIT_TYPES = autotest_enum.AutotestEnum('POE', 'RPM',
-                                                 string_value=True)
-
-    def __init__(self, device_hostname, powerunit_type,
-                 powerunit_hostname, outlet, hydra_hostname=None):
-        self.device_hostname = device_hostname
-        self.powerunit_type = powerunit_type
-        self.powerunit_hostname = powerunit_hostname
-        self.outlet = outlet
-        self.hydra_hostname = hydra_hostname
-
-
-class LRUCache(object):
-    """A simple implementation of LRU Cache."""
-
-
-    def __init__(self, size, expiration_secs=DEFAULT_EXPIRATION_SECS):
-        """Initialize.
-
-        @param size: Size of the cache.
-        @param expiration_secs: The items expire after |expiration_secs|
-                                Set to None so that items never expire.
-                                Default to DEFAULT_EXPIRATION_SECS.
-        """
-        self.size = size
-        self.cache = collections.OrderedDict()
-        self.timestamps = {}
-        self.expiration_secs = expiration_secs
-
-
-    def __getitem__(self, key):
-        """Get an item from the cache"""
-        # pop and insert the element again so that it
-        # is moved to the end.
-        value = self.cache.pop(key)
-        self.cache[key] = value
-        return value
-
-
-    def __setitem__(self, key, value):
-        """Insert an item into the cache."""
-        if key in self.cache:
-            self.cache.pop(key)
-        elif len(self.cache) == self.size:
-            removed_key, _ = self.cache.popitem(last=False)
-            self.timestamps.pop(removed_key)
-        self.cache[key] = value
-        self.timestamps[key] = time.time()
-
-
-    def __contains__(self, key):
-        """Check whether a key is in the cache."""
-        if (self.expiration_secs is not None and
-            key in self.timestamps and
-            time.time() - self.timestamps[key] > self.expiration_secs):
-            self.cache.pop(key)
-            self.timestamps.pop(key)
-        return key in self.cache
-
-
-def load_servo_interface_mapping(mapping_file=MAPPING_FILE):
-    """
-    Load servo-switch-interface mapping from a CSV file.
-
-    In the file, the first column represents servo hostnames,
-    the second column represents switch hostnames, the third column
-    represents interface names. Columns are saparated by comma.
-
-    chromeos1-rack3-host12-servo,chromeos1-poe-switch1,fa31
-    chromeos1-rack4-host2-servo,chromeos1-poe-switch1,fa32
-    ,chromeos1-poe-switch1,fa33
-    ...
-
-    A row without a servo hostname indicates that no servo
-    has been connected to the corresponding interface.
-    This method ignores such rows.
-
-    @param mapping_file: A csv file that stores the mapping.
-                         If None, the setting in rpm_config.ini will be used.
-
-    @return a dictionary that maps servo host name to a
-              tuple of switch hostname and interface.
-              e.g. {
-              'chromeos1-rack3-host12-servo': ('chromeos1-poe-switch1', 'fa31')
-               ...}
-
-    @raises: rpm_infrastructure_exception.RPMInfrastructureException
-             when arg mapping_file is None.
-    """
-    if not mapping_file:
-        raise rpm_infrastructure_exception.RPMInfrastructureException(
-                'mapping_file is None.')
-    servo_interface = {}
-    with open(mapping_file) as csvfile:
-        reader = csv.reader(csvfile, delimiter=',')
-        for row in reader:
-            servo_hostname = row[0].strip()
-            switch_hostname = row[1].strip()
-            interface = row[2].strip()
-            if servo_hostname:
-                servo_interface[servo_hostname] = (switch_hostname, interface)
-    return servo_interface
-
-
-def reload_servo_interface_mapping_if_necessary(
-        check_point, mapping_file=MAPPING_FILE):
-    """Reload the servo-interface mapping file if it is modified.
-
-    This method checks if the last-modified time of |mapping_file| is
-    later than |check_point|, if so, it reloads the file.
-
-    @param check_point: A float number representing a time, used to determine
-                        whether we need to reload the mapping file.
-    @param mapping_file: A csv file that stores the mapping, if none,
-                         the setting in rpm_config.ini will be used.
-
-    @return: If the file is reloaded, returns a tuple
-             (last_modified_time, servo_interface) where
-             the first element is the last_modified_time of the
-             mapping file, the second element is a dictionary that
-             maps servo hostname to (switch hostname, interface).
-             If the file is not reloaded, return None.
-
-    @raises: rpm_infrastructure_exception.RPMInfrastructureException
-             when arg mapping_file is None.
-    """
-    if not mapping_file:
-        raise rpm_infrastructure_exception.RPMInfrastructureException(
-                'mapping_file is None.')
-    last_modified = os.path.getmtime(mapping_file)
-    if check_point < last_modified:
-        servo_interface = load_servo_interface_mapping(mapping_file)
-        logging.info('Servo-interface mapping file %s is reloaded.',
-                     mapping_file)
-        return (last_modified, servo_interface)
-    return None
diff --git a/site_utils/rpm_control_system/utils_unittest.py b/site_utils/rpm_control_system/utils_unittest.py
deleted file mode 100755
index 5bc4d1d..0000000
--- a/site_utils/rpm_control_system/utils_unittest.py
+++ /dev/null
@@ -1,150 +0,0 @@
-#!/usr/bin/python2
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import __builtin__
-import mox
-import os
-import unittest
-import time
-from StringIO import StringIO
-
-import utils
-
-
-class TestUtils(mox.MoxTestBase):
-    """Test utility functions."""
-
-
-    def test_load_servo_interface_mapping(self):
-        """Test servo-interface mapping file can be loaded."""
-        self.mox.StubOutWithMock(__builtin__, 'open')
-        fake_content = (
-                'chromeos1-rack5-host10-servo, chromeos1-poe-switch1, fa42\n'
-                'chromeos1-rack5-host11-servo, chromeos1-poe-switch1, fa43\n'
-                ', chromeos2-poe-switch8, fa43\n'
-                'chromeos2-rack5-host11-servo, chromeos2-poe-switch8, fa44\n')
-        fake_file = self.mox.CreateMockAnything()
-        fake_file.__enter__().AndReturn(StringIO(fake_content))
-        fake_file.__exit__(mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg())
-        open('fake_file.csv').AndReturn(fake_file)
-        expect = {'chromeos1-rack5-host10-servo':
-                          ('chromeos1-poe-switch1', 'fa42'),
-                  'chromeos1-rack5-host11-servo':
-                          ('chromeos1-poe-switch1', 'fa43'),
-                  'chromeos2-rack5-host11-servo':
-                          ('chromeos2-poe-switch8', 'fa44')}
-        self.mox.ReplayAll()
-        self.assertEqual(
-                utils.load_servo_interface_mapping('fake_file.csv'), expect)
-        self.mox.VerifyAll()
-
-
-    def _reload_helper(self, do_reload):
-        """Helper class for mapping file reloading tests."""
-        self.mox.StubOutWithMock(utils, 'load_servo_interface_mapping')
-        self.mox.StubOutWithMock(os.path, 'getmtime')
-        check_point = 1369783561.8525634
-        if do_reload:
-            last_modified = check_point + 10.0
-            servo_interface = {'fake_servo': ('fake_switch', 'fake_if')}
-            utils.load_servo_interface_mapping('fake_file').AndReturn(
-                    servo_interface)
-        else:
-            last_modified = check_point
-        os.path.getmtime(mox.IgnoreArg()).AndReturn(last_modified)
-        self.mox.ReplayAll()
-        result = utils.reload_servo_interface_mapping_if_necessary(
-                check_point, mapping_file='fake_file')
-        if do_reload:
-            self.assertEqual(result, (last_modified, servo_interface))
-        else:
-            self.assertIsNone(result)
-        self.mox.VerifyAll()
-
-
-    def test_reload_servo_interface_mapping_necessary(self):
-        """Test that mapping file is reloaded when it is modified."""
-        self._reload_helper(True)
-
-
-    def test_reload_servo_interface_mapping_not_necessary(self):
-        """Test that mapping file is not reloaded when it is not modified."""
-        self._reload_helper(False)
-
-
-    def  test_LRU_cache(self):
-        """Test LRUCache."""
-        p1 = utils.PowerUnitInfo(
-                'host1', utils.PowerUnitInfo.POWERUNIT_TYPES.RPM,
-                'rpm1', 'hydra1')
-        p2 = utils.PowerUnitInfo(
-                'host2', utils.PowerUnitInfo.POWERUNIT_TYPES.RPM,
-                'rpm2', 'hydra2')
-        p3 = utils.PowerUnitInfo(
-                'host3', utils.PowerUnitInfo.POWERUNIT_TYPES.RPM,
-                'rpm3', 'hydra3')
-        # Initialize an LRU with size 2, items never expire.
-        cache = utils.LRUCache(2, expiration_secs=None)
-        # Add two items, LRU should be full now
-        cache['host1'] = p1
-        cache['host2'] = p2
-        self.assertEqual(len(cache.cache), 2)
-        # Visit host2 and add one more item
-        # host1 should be removed from cache
-        _ = cache['host2']
-        cache['host3'] = p3
-        self.assertEqual(len(cache.cache), 2)
-        self.assertTrue('host1' not in cache)
-        self.assertTrue('host2' in cache)
-        self.assertTrue('host3' in cache)
-
-
-    def  test_LRU_cache_expires(self):
-        """Test LRUCache expires."""
-        self.mox.StubOutWithMock(time, 'time')
-        time.time().AndReturn(10)
-        time.time().AndReturn(25)
-        p1 = utils.PowerUnitInfo(
-                'host1', utils.PowerUnitInfo.POWERUNIT_TYPES.RPM,
-                'rpm1', 'hydra1')
-
-        self.mox.ReplayAll()
-        # Initialize an LRU with size 1, items exppire after 10 secs.
-        cache = utils.LRUCache(1, expiration_secs=10)
-        # Add two items, LRU should be full now
-        cache['host1'] = p1
-        check_contains_1 = 'host1' in cache
-        check_contains_2 = 'host2' in cache
-        self.mox.VerifyAll()
-        self.assertFalse(check_contains_1)
-        self.assertFalse(check_contains_2)
-
-
-    def  test_LRU_cache_full_with_expries(self):
-        """Test timestamp is removed properly when cache is full."""
-        self.mox.StubOutWithMock(time, 'time')
-        time.time().AndReturn(10)
-        time.time().AndReturn(25)
-        p1 = utils.PowerUnitInfo(
-                'host1', utils.PowerUnitInfo.POWERUNIT_TYPES.RPM,
-                'rpm1', 'hydra1')
-        p2 = utils.PowerUnitInfo(
-                'host2', utils.PowerUnitInfo.POWERUNIT_TYPES.RPM,
-                'rpm2', 'hydra2')
-        self.mox.ReplayAll()
-        # Initialize an LRU with size 1, items expire after 10 secs.
-        cache = utils.LRUCache(1, expiration_secs=10)
-        # Add two items, LRU should be full now
-        cache['host1'] = p1
-        cache['host2'] = p2
-        self.mox.VerifyAll()
-        self.assertEqual(len(cache.timestamps), 1)
-        self.assertEqual(len(cache.cache), 1)
-        self.assertTrue('host2' in cache.timestamps)
-        self.assertTrue('host2' in cache.cache)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/site_utils/seed_test_attr.py b/site_utils/seed_test_attr.py
index feacbab..c736bb3 100644
--- a/site_utils/seed_test_attr.py
+++ b/site_utils/seed_test_attr.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # Copyright (c) 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/site_utils/server_manager.py b/site_utils/server_manager.py
deleted file mode 100644
index 90c8430..0000000
--- a/site_utils/server_manager.py
+++ /dev/null
@@ -1,271 +0,0 @@
-# Lint as: python2, python3
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""This module provides functions to manage servers in server database
-(defined in global config section AUTOTEST_SERVER_DB).
-
-create(hostname, role=None, note=None)
-    Create a server with given role, with status primary.
-
-delete(hostname)
-    Delete a server from the database.
-
-modify(hostname, role=None, status=None, note=None, delete=False,
-       attribute=None, value=None)
-    Modify a server's role, status, note, or attribute:
-    1. Add role to a server. If the server is in primary status, proper actions
-       like service restart will be executed to enable the role.
-    2. Delete a role from a server. If the server is in primary status, proper
-       actions like service restart will be executed to disable the role.
-    3. Change status of a server. If the server is changed from or to primary
-       status, proper actions like service restart will be executed to enable
-       or disable each role of the server.
-    4. Change note of a server. Note is a field you can add description about
-       the server.
-    5. Change/delete attribute of a server. Attribute can be used to store
-       information about a server. For example, the max_processes count for a
-       drone.
-
-"""
-
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-import datetime
-
-import common
-
-from autotest_lib.frontend.server import models as server_models
-from autotest_lib.site_utils import server_manager_actions
-from autotest_lib.site_utils import server_manager_utils
-
-
-def _add_role(server, role, action):
-    """Add a role to the server.
-
-    @param server: An object of server_models.Server.
-    @param role: Role to be added to the server.
-    @param action: Execute actions after role or status is changed. Default to
-                   False.
-
-    @raise ServerActionError: If role is failed to be added.
-    """
-    server_models.validate(role=role)
-    if role in server.get_role_names():
-        raise server_manager_utils.ServerActionError(
-                'Server %s already has role %s.' % (server.hostname, role))
-
-    # Verify server
-    if not server_manager_utils.check_server(server.hostname, role):
-        raise server_manager_utils.ServerActionError(
-                'Server %s is not ready for role %s.' % (server.hostname, role))
-
-    if (role in server_models.ServerRole.ROLES_REQUIRE_UNIQUE_INSTANCE and
-        server.status == server_models.Server.STATUS.PRIMARY):
-        servers = server_models.Server.objects.filter(
-                roles__role=role, status=server_models.Server.STATUS.PRIMARY)
-        if len(servers) >= 1:
-            raise server_manager_utils.ServerActionError(
-                'Role %s must be unique. Server %s already has role %s.' %
-                (role, servers[0].hostname, role))
-
-    server_models.ServerRole.objects.create(server=server, role=role)
-
-    # If needed, apply actions to enable the role for the server.
-    server_manager_actions.try_execute(server, [role], enable=True,
-                                       post_change=True, do_action=action)
-
-    print('Role %s is added to server %s.' % (role, server.hostname))
-
-
-def _delete_role(server, role, action=False):
-    """Delete a role from the server.
-
-    @param server: An object of server_models.Server.
-    @param role: Role to be deleted from the server.
-    @param action: Execute actions after role or status is changed. Default to
-                   False.
-
-    @raise ServerActionError: If role is failed to be deleted.
-    """
-    server_models.validate(role=role)
-    if role not in server.get_role_names():
-        raise server_manager_utils.ServerActionError(
-                'Server %s does not have role %s.' % (server.hostname, role))
-
-    if server.status == server_models.Server.STATUS.PRIMARY:
-        server_manager_utils.warn_missing_role(role, server)
-
-    # Apply actions to disable the role for the server before the role is
-    # removed from the server.
-    server_manager_actions.try_execute(server, [role], enable=False,
-                                       post_change=False, do_action=action)
-
-    print('Deleting role %s from server %s...' % (role, server.hostname))
-    server.roles.get(role=role).delete()
-
-    # Apply actions to disable the role for the server after the role is
-    # removed from the server.
-    server_manager_actions.try_execute(server, [role], enable=False,
-                                       post_change=True, do_action=action)
-
-    if (not server.get_role_names() and
-        server.status == server_models.Server.STATUS.PRIMARY):
-        print ('Server %s has no role.')
-
-    print('Role %s is deleted from server %s.' % (role, server.hostname))
-
-
-def _change_status(server, status, action):
-    """Change the status of the server.
-
-    @param server: An object of server_models.Server.
-    @param status: New status of the server.
-    @param action: Execute actions after role or status is changed. Default to
-                   False.
-
-    @raise ServerActionError: If status is failed to be changed.
-    """
-    server_models.validate(status=status)
-    if server.status == status:
-        raise server_manager_utils.ServerActionError(
-                'Server %s already has status of %s.' %
-                (server.hostname, status))
-    if (not server.roles.all() and
-        status == server_models.Server.STATUS.PRIMARY):
-        raise server_manager_utils.ServerActionError(
-                'Server %s has no role associated. Server must have a role to '
-                'be in status primary.' % server.hostname)
-
-    # Abort the action if the server's status will be changed to primary and
-    # the Autotest instance already has another server running an unique role.
-    # For example, a scheduler server is already running, and a repair_required
-    # server with role scheduler should not be changed to status primary.
-    unique_roles = server.roles.filter(
-            role__in=server_models.ServerRole.ROLES_REQUIRE_UNIQUE_INSTANCE)
-    if unique_roles and status == server_models.Server.STATUS.PRIMARY:
-        for role in unique_roles:
-            servers = server_models.Server.objects.filter(
-                    roles__role=role.role,
-                    status=server_models.Server.STATUS.PRIMARY)
-            if len(servers) == 1:
-                raise server_manager_utils.ServerActionError(
-                        'Role %s must be unique. Server %s already has the '
-                        'role.' % (role.role, servers[0].hostname))
-
-    # Post a warning if the server's status will be changed from primary to
-    # other value and the server is running a unique role across database, e.g.
-    # scheduler.
-    if server.status == server_models.Server.STATUS.PRIMARY:
-        for role in server.get_role_names():
-            server_manager_utils.warn_missing_role(role, server)
-
-    enable = status == server_models.Server.STATUS.PRIMARY
-    server_manager_actions.try_execute(server, server.get_role_names(),
-                                       enable=enable, post_change=False,
-                                       do_action=action)
-
-    prev_status = server.status
-    server.status = status
-    server.save()
-
-    # Apply actions to enable/disable roles of the server after the status is
-    # changed.
-    server_manager_actions.try_execute(server, server.get_role_names(),
-                                       enable=enable, post_change=True,
-                                       prev_status=prev_status,
-                                       do_action=action)
-
-    print('Status of server %s is changed from %s to %s. Affected roles: %s' %
-          (server.hostname, prev_status, status,
-           ', '.join(server.get_role_names())))
-
-
-@server_manager_utils.verify_server(exist=False)
-def create(hostname, role=None, note=None):
-    """Create a new server.
-
-    The status of new server will always be primary.
-
-    @param hostname: hostname of the server.
-    @param role: role of the new server, default to None.
-    @param note: notes about the server, default to None.
-
-    @return: A Server object that contains the server information.
-    """
-    server_models.validate(hostname=hostname, role=role)
-    server = server_models.Server.objects.create(
-            hostname=hostname, status=server_models.Server.STATUS.PRIMARY,
-            note=note, date_created=datetime.datetime.now())
-    server_models.ServerRole.objects.create(server=server, role=role)
-    return server
-
-
-@server_manager_utils.verify_server()
-def delete(hostname, server=None):
-    """Delete given server from server database.
-
-    @param hostname: hostname of the server to be deleted.
-    @param server: Server object from database query, this argument should be
-                   injected by the verify_server_exists decorator.
-
-    @raise ServerActionError: If delete server action failed, e.g., server is
-            not found in database.
-    """
-    print('Deleting server %s from server database.' % hostname)
-
-    if (server_manager_utils.use_server_db() and
-        server.status == server_models.Server.STATUS.PRIMARY):
-        print('Server %s is in status primary, need to disable its '
-              'current roles first.' % hostname)
-        for role in server.roles.all():
-            _delete_role(server, role.role)
-
-    server.delete()
-    print('Server %s is deleted from server database.' % hostname)
-
-
-@server_manager_utils.verify_server()
-def modify(hostname, role=None, status=None, delete=False, note=None,
-           attribute=None, value=None, action=False, server=None):
-    """Modify given server with specified actions.
-
-    @param hostname: hostname of the server to be modified.
-    @param role: Role to be added to the server.
-    @param status: Modify server status.
-    @param delete: True to delete given role from the server, default to False.
-    @param note: Note of the server.
-    @param attribute: Name of an attribute of the server.
-    @param value: Value of an attribute of the server.
-    @param action: Execute actions after role or status is changed. Default to
-                   False.
-    @param server: Server object from database query, this argument should be
-                   injected by the verify_server_exists decorator.
-
-    @raise InvalidDataError: If the operation failed with any wrong value of
-                             the arguments.
-    @raise ServerActionError: If any operation failed.
-    """
-    if role:
-        if not delete:
-            _add_role(server, role, action)
-        else:
-            _delete_role(server, role, action)
-
-    if status:
-        _change_status(server, status, action)
-
-    if note is not None:
-        server.note = note
-        server.save()
-
-    if attribute and value:
-        server_manager_utils.change_attribute(server, attribute, value)
-    elif attribute and delete:
-        server_manager_utils.delete_attribute(server, attribute)
-
-    return server
diff --git a/site_utils/server_manager_actions.py b/site_utils/server_manager_actions.py
deleted file mode 100644
index f48f585..0000000
--- a/site_utils/server_manager_actions.py
+++ /dev/null
@@ -1,155 +0,0 @@
-# Lint as: python2, python3
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""This module provides utility functions to help managing servers in server
-database (defined in global config section AUTOTEST_SERVER_DB).
-
-After a role is added or removed from a server, certain services may need to
-be restarted. For example, scheduler needs to be restarted after a drone is
-added to a primary server. This module includes functions to check if actions
-are required to be executed and what actions to executed on which servers.
-"""
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-import subprocess
-import sys
-
-import common
-
-from autotest_lib.frontend.server import models as server_models
-from autotest_lib.site_utils import server_manager_utils
-from autotest_lib.site_utils.lib import infra
-
-
-# Actions that must be executed for server management action to be effective.
-# Each action is a tuple:
-# (the role of which the command should be executed, the command)
-RESTART_SCHEDULER = (server_models.ServerRole.ROLE.SCHEDULER,
-                     'sudo service scheduler restart')
-RESTART_HOST_SCHEDULER = (server_models.ServerRole.ROLE.HOST_SCHEDULER,
-                          'sudo service host-scheduler restart')
-RELOAD_APACHE = (server_models.ServerRole.ROLE.SCHEDULER,
-                 'sudo service apache reload')
-
-STOP_SCHEDULER = (server_models.ServerRole.ROLE.SCHEDULER,
-                  'sudo service scheduler stop')
-STOP_HOST_SCHEDULER = (server_models.ServerRole.ROLE.HOST_SCHEDULER,
-                       'sudo service host-scheduler stop')
-
-# Dictionary of actions needed for a role to be enabled. Key is the role, and
-# value is a list of action. All these actions should be applied after the role
-# is added to the server, or the server's status is changed to primary.
-ACTIONS_AFTER_ROLE_APPLIED = {
-        server_models.ServerRole.ROLE.SCHEDULER: [RESTART_SCHEDULER],
-        server_models.ServerRole.ROLE.HOST_SCHEDULER: [RESTART_HOST_SCHEDULER],
-        server_models.ServerRole.ROLE.DRONE: [RESTART_SCHEDULER],
-        server_models.ServerRole.ROLE.DATABASE:
-                [RESTART_SCHEDULER, RESTART_HOST_SCHEDULER, RELOAD_APACHE],
-        server_models.ServerRole.ROLE.DEVSERVER: [RESTART_SCHEDULER],
-        }
-
-# Dictionary of actions needed for a role to be disabled. Key is the role, and
-# value is a list of action.
-# Action should be taken before role is deleted from a server, or the server's
-# status is changed to primary.
-ACTIONS_BEFORE_ROLE_REMOVED = {
-        server_models.ServerRole.ROLE.SCHEDULER: [STOP_SCHEDULER],
-        server_models.ServerRole.ROLE.HOST_SCHEDULER: [STOP_HOST_SCHEDULER],
-        server_models.ServerRole.ROLE.DATABASE:
-                [STOP_SCHEDULER, STOP_HOST_SCHEDULER],
-        }
-# Action should be taken after role is deleted from a server, or the server's
-# status is changed to primary.
-ACTIONS_AFTER_ROLE_REMOVED = {
-        server_models.ServerRole.ROLE.DRONE: [RESTART_SCHEDULER],
-        server_models.ServerRole.ROLE.DEVSERVER: [RESTART_SCHEDULER],
-        }
-
-
-def apply(action):
-    """Apply an given action.
-
-    It usually involves ssh to the server with specific role and run the
-    command, e.g., ssh to scheduler server and restart scheduler.
-
-    @param action: A tuple of (the role of which the command should be executed,
-                   the command)
-    @raise ServerActionError: If the action can't be applied due to database
-                              issue.
-    @param subprocess.CalledProcessError: If command is failed to be
-                                          executed.
-    """
-    role = action[0]
-    command = action[1]
-    # Find the servers with role
-    servers = server_manager_utils.get_servers(
-            role=role, status=server_models.Server.STATUS.PRIMARY)
-    if not servers:
-        print('WARNING! Action %s failed to be applied. No '
-              'server with given role %s was found.' % (action, role),
-              file=sys.stderr)
-        return
-
-    for server in servers:
-        print('Run command `%s` on server %s' % (command, server.hostname))
-        try:
-            infra.execute_command(server.hostname, command)
-        except subprocess.CalledProcessError as e:
-            print('Failed to check server %s, error: %s' %
-                  (server.hostname, e), file=sys.stderr)
-
-
-def try_execute(server, roles, enable, post_change,
-                prev_status=server_models.Server.STATUS.REPAIR_REQUIRED,
-                do_action=False):
-    """Try to execute actions for given role changes of the server.
-
-    @param server: Server that has the role changes.
-    @param roles: A list of roles changed.
-    @param enable: Set to True if the roles are enabled, i.e., added to server.
-                   If it's False, the roles are removed from the server.
-    @param post_change: Set to True if to apply actions should be applied after
-                        the role changes, otherwise, set to False.
-    @param prev_status: The previous status after the status change if any. This
-                        is to help to decide if actions should be executed,
-                        since actions should be applied if the server's status
-                        is changed from primary to other status. Default to
-                        repair_required.
-    @param do_action: Set to True to execute actions, otherwise, post a warning.
-    """
-    if not server_manager_utils.use_server_db():
-        return
-    # This check is to prevent actions to be applied to server not in primary
-    # role or server database is not enabled. Note that no action is needed
-    # before a server is changed to primary status. If that assumption is
-    # no longer valid, this method needs to be updated accordingly.
-    if (server.status != server_models.Server.STATUS.PRIMARY and
-        prev_status != server_models.Server.STATUS.PRIMARY):
-        return
-
-    possible_actions = {}
-    if enable:
-        if post_change:
-            possible_actions = ACTIONS_AFTER_ROLE_APPLIED
-    else:
-        if post_change:
-            possible_actions = ACTIONS_AFTER_ROLE_REMOVED
-        else:
-            possible_actions = ACTIONS_BEFORE_ROLE_REMOVED
-
-    all_actions = []
-    for role in roles:
-        all_actions.extend(possible_actions.get(role, []))
-    for action in set(all_actions):
-        if do_action:
-            apply(action)
-        else:
-            message = ('WARNING! Action %s is skipped. Please manually '
-                       'execute the action to make your change effective.' %
-                       str(action))
-            print(message, file=sys.stderr)
diff --git a/site_utils/server_manager_unittest.py b/site_utils/server_manager_unittest.py
deleted file mode 100644
index a060ef2..0000000
--- a/site_utils/server_manager_unittest.py
+++ /dev/null
@@ -1,395 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import mox
-import unittest
-
-import common
-
-import django.core.exceptions
-from autotest_lib.client.common_lib.cros.network import ping_runner
-from autotest_lib.frontend import setup_django_environment
-from autotest_lib.frontend.server import models as server_models
-from autotest_lib.site_utils import server_manager
-from autotest_lib.site_utils import server_manager_utils
-from autotest_lib.site_utils.lib import infra
-
-
-class QueriableList(list):
-    """A mock list object supports queries including filter and all.
-    """
-
-    def filter(self, **kwargs):
-        """Mock the filter call in django model.
-        """
-        raise NotImplementedError()
-
-
-    def get(self, **kwargs):
-        """Mock the get call in django model.
-        """
-        raise NotImplementedError()
-
-
-    def all(self):
-        """Return all items in the list.
-
-        @return: All items in the list.
-        """
-        return [item for item in self]
-
-
-class ServerManagerUnittests(mox.MoxTestBase):
-    """Unittest for testing server_manager module.
-    """
-
-    def setUp(self):
-        """Initialize the unittest."""
-        super(ServerManagerUnittests, self).setUp()
-
-        # Initialize test objects.
-        self.DRONE_ROLE = mox.MockObject(
-                server_models.ServerRole,
-                attrs={'role': server_models.ServerRole.ROLE.DRONE})
-        self.SCHEDULER_ROLE = mox.MockObject(
-                server_models.ServerRole,
-                attrs={'role': server_models.ServerRole.ROLE.SCHEDULER})
-        self.DRONE_ATTRIBUTE = mox.MockObject(
-                server_models.ServerAttribute,
-                attrs={'attribute': 'max_processes', 'value':1})
-        self.PRIMARY_DRONE = mox.MockObject(
-                server_models.Server,
-                attrs={'hostname': 'primary_drone_hostname',
-                       'status': server_models.Server.STATUS.PRIMARY,
-                       'roles': QueriableList([self.DRONE_ROLE]),
-                       'attributes': QueriableList([self.DRONE_ATTRIBUTE])})
-        self.REPAIR_REQUIRED_DRONE = mox.MockObject(
-                server_models.Server,
-                attrs={'hostname': 'repair_required_drone_hostname',
-                       'status': server_models.Server.STATUS.REPAIR_REQUIRED,
-                       'roles': QueriableList([self.DRONE_ROLE]),
-                       'attributes': QueriableList([self.DRONE_ATTRIBUTE])})
-        self.PRIMARY_SCHEDULER = mox.MockObject(
-                server_models.Server,
-                attrs={'hostname': 'primary_scheduler_hostname',
-                       'status': server_models.Server.STATUS.PRIMARY,
-                       'roles': QueriableList([self.SCHEDULER_ROLE]),
-                       'attributes': QueriableList([])})
-        self.REPAIR_REQUIRED_SCHEDULER = mox.MockObject(
-                server_models.Server,
-                attrs={'hostname': 'repair_required_scheduler_hostname',
-                       'status': server_models.Server.STATUS.REPAIR_REQUIRED,
-                       'roles': QueriableList([self.SCHEDULER_ROLE]),
-                       'attributes': QueriableList([])})
-
-        self.mox.StubOutWithMock(server_manager_utils, 'check_server')
-        self.mox.StubOutWithMock(server_manager_utils, 'warn_missing_role')
-        self.mox.StubOutWithMock(server_manager_utils, 'use_server_db')
-        self.mox.StubOutWithMock(server_models.Server, 'get_role_names')
-        self.mox.StubOutWithMock(server_models.Server.objects, 'create')
-        self.mox.StubOutWithMock(server_models.Server.objects, 'filter')
-        self.mox.StubOutWithMock(server_models.Server.objects, 'get')
-        self.mox.StubOutWithMock(server_models.ServerRole, 'delete')
-        self.mox.StubOutWithMock(server_models.ServerRole.objects, 'create')
-        self.mox.StubOutWithMock(server_models.ServerRole.objects, 'filter')
-        self.mox.StubOutWithMock(server_models.ServerAttribute.objects,
-                                 'create')
-        self.mox.StubOutWithMock(server_models.ServerAttribute.objects,
-                                 'filter')
-        self.mox.StubOutWithMock(infra, 'execute_command')
-        self.mox.StubOutWithMock(ping_runner.PingRunner, 'simple_ping')
-
-
-    def testCreateServerSuccess(self):
-        """Test create method can create a server successfully.
-        """
-        ping_runner.PingRunner().simple_ping(self.PRIMARY_DRONE.hostname
-                                             ).AndReturn(True)
-        server_models.Server.objects.get(
-                hostname=self.PRIMARY_DRONE.hostname
-                ).AndRaise(django.core.exceptions.ObjectDoesNotExist)
-        server_models.Server.objects.create(
-                hostname=mox.IgnoreArg(), status=mox.IgnoreArg(),
-                date_created=mox.IgnoreArg(), note=mox.IgnoreArg()
-                ).AndReturn(self.PRIMARY_DRONE)
-        server_models.ServerRole.objects.create(
-                server=mox.IgnoreArg(), role=server_models.ServerRole.ROLE.DRONE
-                ).AndReturn(self.DRONE_ROLE)
-        self.mox.ReplayAll()
-        drone = server_manager.create(hostname=self.PRIMARY_DRONE.hostname,
-                                      role=server_models.ServerRole.ROLE.DRONE)
-
-
-    def testAddRoleToRepairRequiredSuccess(self):
-        """Test manager can add a role to a repair_failed server successfully.
-
-        Confirm that database call is made, and no action is taken, e.g.,
-        restart scheduler to activate a new devserver.
-        """
-        server_models.validate(role=server_models.ServerRole.ROLE.DEVSERVER)
-        server_manager_utils.check_server(mox.IgnoreArg(),
-                                          mox.IgnoreArg()).AndReturn(True)
-        server_manager_utils.use_server_db().MultipleTimes(
-                ).AndReturn(True)
-        self.mox.StubOutWithMock(self.REPAIR_REQUIRED_DRONE, 'get_role_names')
-        self.REPAIR_REQUIRED_DRONE.get_role_names().AndReturn(
-                [server_models.ServerRole.ROLE.DRONE])
-        server_models.ServerRole.objects.create(
-                server=mox.IgnoreArg(),
-                role=server_models.ServerRole.ROLE.DEVSERVER
-                ).AndReturn(self.DRONE_ROLE)
-        self.mox.ReplayAll()
-        server_manager._add_role(server=self.REPAIR_REQUIRED_DRONE,
-                                 role=server_models.ServerRole.ROLE.DEVSERVER,
-                                 action=True)
-
-
-    def testAddRoleToRepairRequiredFail_RoleAlreadyExists(self):
-        """Test manager fails to add a role to a repair_required server if
-        server already has the given role.
-        """
-        server_models.validate(role=server_models.ServerRole.ROLE.DRONE)
-        self.mox.StubOutWithMock(self.REPAIR_REQUIRED_DRONE, 'get_role_names')
-        self.REPAIR_REQUIRED_DRONE.get_role_names().AndReturn(
-                [server_models.ServerRole.ROLE.DRONE])
-        self.mox.ReplayAll()
-        self.assertRaises(server_manager_utils.ServerActionError,
-                          server_manager._add_role,
-                          server=self.REPAIR_REQUIRED_DRONE,
-                          role=server_models.ServerRole.ROLE.DRONE,
-                          action=True)
-
-
-    def testDeleteRoleFromRepairRequiredSuccess(self):
-        """Test manager can delete a role from a repair_required server
-        successfully.
-
-        Confirm that database call is made, and no action is taken, e.g.,
-        restart scheduler to delete an existing devserver.
-        """
-        server_models.validate(role=server_models.ServerRole.ROLE.DRONE)
-        server_manager_utils.use_server_db().MultipleTimes(
-                ).AndReturn(True)
-        self.mox.StubOutWithMock(self.REPAIR_REQUIRED_DRONE, 'get_role_names')
-        self.REPAIR_REQUIRED_DRONE.get_role_names().MultipleTimes().AndReturn(
-                [server_models.ServerRole.ROLE.DRONE])
-        self.mox.StubOutWithMock(self.REPAIR_REQUIRED_DRONE.roles, 'get')
-        self.REPAIR_REQUIRED_DRONE.roles.get(
-                role=server_models.ServerRole.ROLE.DRONE
-                ).AndReturn(self.DRONE_ROLE)
-        self.mox.ReplayAll()
-        server_manager._delete_role(server=self.REPAIR_REQUIRED_DRONE,
-                                    role=server_models.ServerRole.ROLE.DRONE,
-                                    action=True)
-
-
-    def testDeleteRoleFromRepairRequiredFail_RoleNotExist(self):
-        """Test manager fails to delete a role from a repair_required server if
-        the server does not have the given role.
-        """
-        server_models.validate(role=server_models.ServerRole.ROLE.DEVSERVER)
-        self.mox.StubOutWithMock(self.REPAIR_REQUIRED_DRONE, 'get_role_names')
-        self.REPAIR_REQUIRED_DRONE.get_role_names().AndReturn(
-                [server_models.ServerRole.ROLE.DRONE])
-        self.mox.ReplayAll()
-        self.assertRaises(server_manager_utils.ServerActionError,
-                          server_manager._delete_role,
-                          server=self.REPAIR_REQUIRED_DRONE,
-                          role=server_models.ServerRole.ROLE.DEVSERVER,
-                          action=True)
-
-
-    def testChangeStatusSuccess_RepairFailedToPrimary(self):
-        """Test manager can change the status of a repair_required server to
-        primary.
-        """
-        server_models.validate(status=server_models.Server.STATUS.PRIMARY)
-        server_manager_utils.use_server_db().MultipleTimes(
-                ).AndReturn(True)
-        self.mox.StubOutWithMock(self.REPAIR_REQUIRED_DRONE, 'get_role_names')
-        self.REPAIR_REQUIRED_DRONE.get_role_names().MultipleTimes().AndReturn(
-                [server_models.ServerRole.ROLE.DRONE])
-        self.mox.StubOutWithMock(self.REPAIR_REQUIRED_DRONE.roles, 'filter')
-        self.REPAIR_REQUIRED_DRONE.roles.filter(
-                role__in=server_models.ServerRole.ROLES_REQUIRE_UNIQUE_INSTANCE
-                ).AndReturn(None)
-        server_models.Server.objects.filter(
-                roles__role=server_models.ServerRole.ROLE.SCHEDULER,
-                status=server_models.Server.STATUS.PRIMARY
-                ).AndReturn([self.PRIMARY_SCHEDULER])
-        infra.execute_command(mox.IgnoreArg(), mox.IgnoreArg())
-        self.mox.ReplayAll()
-        server_manager._change_status(
-                server=self.REPAIR_REQUIRED_DRONE,
-                status=server_models.Server.STATUS.PRIMARY,
-                action=True)
-
-
-    def testChangeStatusSuccess_PrimaryToRepairFailed(self):
-        """Test manager can change the status of a primary server to
-        repair_required.
-        """
-        server_models.validate(
-                status=server_models.Server.STATUS.REPAIR_REQUIRED)
-        self.mox.StubOutWithMock(self.PRIMARY_DRONE.roles, 'filter')
-        self.mox.StubOutWithMock(self.PRIMARY_DRONE, 'get_role_names')
-        self.PRIMARY_DRONE.get_role_names().MultipleTimes().AndReturn(
-                [server_models.ServerRole.ROLE.DRONE])
-        self.PRIMARY_DRONE.roles.filter(
-                role__in=server_models.ServerRole.ROLES_REQUIRE_UNIQUE_INSTANCE
-                ).AndReturn(None)
-        server_manager_utils.use_server_db().MultipleTimes().AndReturn(True)
-        server_manager_utils.warn_missing_role(
-                server_models.ServerRole.ROLE.DRONE, self.PRIMARY_DRONE)
-        server_models.Server.objects.filter(
-                roles__role=server_models.ServerRole.ROLE.SCHEDULER,
-                status=server_models.Server.STATUS.PRIMARY
-                ).AndReturn([self.PRIMARY_SCHEDULER])
-        infra.execute_command(mox.IgnoreArg(), mox.IgnoreArg())
-        self.mox.ReplayAll()
-        server_manager._change_status(
-                server=self.PRIMARY_DRONE,
-                status=server_models.Server.STATUS.REPAIR_REQUIRED,
-                action=True)
-
-
-    def testChangeStatusFail_StatusNoChange(self):
-        """Test manager cannot change the status of a server with the same
-        status.
-        """
-        server_models.validate(
-                status=server_models.Server.STATUS.REPAIR_REQUIRED)
-        self.mox.ReplayAll()
-        self.assertRaises(server_manager_utils.ServerActionError,
-                          server_manager._change_status,
-                          server=self.REPAIR_REQUIRED_DRONE,
-                          status=server_models.Server.STATUS.REPAIR_REQUIRED,
-                          action=True)
-
-
-    def testChangeStatusFail_UniqueInstance(self):
-        """Test manager cannot change the status of a server from
-        repair_required to primary if there is already a primary exists for
-        role doesn't allow multiple instances.
-        """
-        server_models.validate(status=server_models.Server.STATUS.PRIMARY)
-        self.mox.StubOutWithMock(self.REPAIR_REQUIRED_SCHEDULER.roles, 'filter')
-        self.REPAIR_REQUIRED_SCHEDULER.roles.filter(
-                role__in=server_models.ServerRole.ROLES_REQUIRE_UNIQUE_INSTANCE
-                ).AndReturn(QueriableList([self.SCHEDULER_ROLE]))
-        server_models.Server.objects.filter(
-                roles__role=self.SCHEDULER_ROLE.role,
-                status=server_models.Server.STATUS.PRIMARY
-                ).AndReturn(QueriableList([self.PRIMARY_SCHEDULER]))
-        self.mox.ReplayAll()
-        self.assertRaises(server_manager_utils.ServerActionError,
-                          server_manager._change_status,
-                          server=self.REPAIR_REQUIRED_SCHEDULER,
-                          status=server_models.Server.STATUS.PRIMARY,
-                          action=True)
-
-
-    def testAddRoleToRepairFailedFail_CheckServerFail(self):
-        """Test manager fails to add a role to a repair_required server if check
-        server is failed.
-        """
-        server_manager_utils.check_server(mox.IgnoreArg(),
-                                          mox.IgnoreArg()).AndReturn(False)
-        server_models.validate(role=server_models.ServerRole.ROLE.DRONE)
-        self.mox.StubOutWithMock(self.REPAIR_REQUIRED_DRONE, 'get_role_names')
-        self.REPAIR_REQUIRED_DRONE.get_role_names().MultipleTimes().AndReturn(
-                [server_models.ServerRole.ROLE.DRONE])
-        self.mox.ReplayAll()
-        self.assertRaises(server_manager_utils.ServerActionError,
-                          server_manager._add_role,
-                          server=self.REPAIR_REQUIRED_DRONE,
-                          role=server_models.ServerRole.ROLE.SCHEDULER,
-                          action=True)
-
-
-    def testAddRoleToPrimarySuccess(self):
-        """Test manager can add a role to a primary server successfully.
-
-        Confirm that actions needs to be taken, e.g., restart scheduler for
-        new drone to be added.
-        """
-        server_models.validate(role=server_models.ServerRole.ROLE.DRONE)
-        server_manager_utils.check_server(mox.IgnoreArg(),
-                                          mox.IgnoreArg()).AndReturn(True)
-        server_manager_utils.use_server_db().MultipleTimes().AndReturn(True)
-        self.mox.StubOutWithMock(self.PRIMARY_SCHEDULER, 'get_role_names')
-        self.PRIMARY_SCHEDULER.get_role_names().AndReturn(
-                [server_models.ServerRole.ROLE.SCHEDULER])
-        server_models.ServerRole.objects.create(
-                server=self.PRIMARY_SCHEDULER,
-                role=server_models.ServerRole.ROLE.DRONE
-                ).AndReturn(self.DRONE_ROLE)
-        server_models.Server.objects.filter(
-                roles__role=server_models.ServerRole.ROLE.SCHEDULER,
-                status=server_models.Server.STATUS.PRIMARY
-                ).AndReturn([self.PRIMARY_SCHEDULER])
-        infra.execute_command(mox.IgnoreArg(), mox.IgnoreArg())
-        self.mox.ReplayAll()
-        server_manager._add_role(self.PRIMARY_SCHEDULER,
-                                 server_models.ServerRole.ROLE.DRONE,
-                                 action=True)
-
-
-    def testDeleteRoleFromPrimarySuccess(self):
-        """Test manager can delete a role from a primary server successfully.
-
-        Confirm that database call is made, and actions are taken, e.g.,
-        restart scheduler to delete an existing drone.
-        """
-        server_manager_utils.use_server_db().MultipleTimes().AndReturn(True)
-        server_models.validate(role=server_models.ServerRole.ROLE.DRONE)
-        self.mox.StubOutWithMock(self.PRIMARY_DRONE, 'get_role_names')
-        self.PRIMARY_DRONE.get_role_names().MultipleTimes().AndReturn(
-                [server_models.ServerRole.ROLE.DRONE])
-
-        self.mox.StubOutWithMock(self.PRIMARY_DRONE.roles, 'get')
-        self.PRIMARY_DRONE.roles.get(
-                role=server_models.ServerRole.ROLE.DRONE
-                ).AndReturn(self.DRONE_ROLE)
-
-        server_models.Server.objects.filter(
-                roles__role=server_models.ServerRole.ROLE.SCHEDULER,
-                status=server_models.Server.STATUS.PRIMARY
-                ).AndReturn([self.PRIMARY_SCHEDULER])
-        server_manager.server_manager_utils.warn_missing_role(
-                server_models.ServerRole.ROLE.DRONE, self.PRIMARY_DRONE)
-        infra.execute_command(mox.IgnoreArg(), mox.IgnoreArg())
-        self.mox.ReplayAll()
-        server_manager._delete_role(self.PRIMARY_DRONE,
-                                    server_models.ServerRole.ROLE.DRONE,
-                                    action=True)
-
-
-    def testDeleteRoleFromPrimarySuccess_NoAction(self):
-        """Test manager can delete a role from a primary server successfully.
-
-        Confirm that database call is made, and no action is taken as action
-        is set to False.
-        """
-        server_manager_utils.use_server_db().MultipleTimes().AndReturn(True)
-        server_models.validate(role=server_models.ServerRole.ROLE.DRONE)
-        self.mox.StubOutWithMock(self.PRIMARY_DRONE, 'get_role_names')
-        self.PRIMARY_DRONE.get_role_names().MultipleTimes().AndReturn(
-                [server_models.ServerRole.ROLE.DRONE])
-
-        self.mox.StubOutWithMock(self.PRIMARY_DRONE.roles, 'get')
-        self.PRIMARY_DRONE.roles.get(
-                role=server_models.ServerRole.ROLE.DRONE
-                ).AndReturn(self.DRONE_ROLE)
-
-        server_manager.server_manager_utils.warn_missing_role(
-                server_models.ServerRole.ROLE.DRONE, self.PRIMARY_DRONE)
-        self.mox.ReplayAll()
-        server_manager._delete_role(self.PRIMARY_DRONE,
-                                    server_models.ServerRole.ROLE.DRONE,
-                                    action=False)
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/site_utils/server_manager_utils.py b/site_utils/server_manager_utils.py
deleted file mode 100644
index b164318..0000000
--- a/site_utils/server_manager_utils.py
+++ /dev/null
@@ -1,284 +0,0 @@
-# Lint as: python2, python3
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""This module provides utility functions to help managing servers in server
-database (defined in global config section AUTOTEST_SERVER_DB).
-
-"""
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-import json
-import socket
-import subprocess
-import sys
-
-import common
-
-import django.core.exceptions
-from autotest_lib.client.common_lib import utils
-from autotest_lib.client.common_lib.global_config import global_config
-from autotest_lib.frontend.server import models as server_models
-from autotest_lib.site_utils.lib import infra
-
-
-class ServerActionError(Exception):
-    """Exception raised when action on server failed.
-    """
-
-
-def use_server_db():
-    """Check if use_server_db is enabled in configuration.
-
-    @return: True if use_server_db is set to True in global config.
-    """
-    return global_config.get_config_value(
-            'SERVER', 'use_server_db', default=False, type=bool)
-
-
-def warn_missing_role(role, exclude_server):
-    """Post a warning if Autotest instance has no other primary server with
-    given role.
-
-    @param role: Name of the role.
-    @param exclude_server: Server to be excluded from search for role.
-    """
-    servers = server_models.Server.objects.filter(
-            roles__role=role,
-            status=server_models.Server.STATUS.PRIMARY).exclude(
-                    hostname=exclude_server.hostname)
-    if not servers:
-        message = ('WARNING! There will be no server with role %s after it\'s '
-                   'removed from server %s. Autotest will not function '
-                   'normally without any server in role %s.' %
-                   (role, exclude_server.hostname, role))
-        print(message, file=sys.stderr)
-
-
-def get_servers(hostname=None, role=None, status=None):
-    """Find servers with given role and status.
-
-    @param hostname: hostname of the server.
-    @param role: Role of server, default to None.
-    @param status: Status of server, default to None.
-
-    @return: A list of server objects with given role and status.
-    """
-    filters = {}
-    if hostname:
-        filters['hostname'] = hostname
-    if role:
-        filters['roles__role'] = role
-    if status:
-        filters['status'] = status
-    return list(server_models.Server.objects.filter(**filters))
-
-
-def format_servers(servers):
-    """Format servers for printing.
-
-    Example output:
-
-        Hostname     : server2
-        Status       : primary
-        Roles        : drone
-        Attributes   : {'max_processes':300}
-        Date Created : 2014-11-25 12:00:00
-        Date Modified: None
-        Note         : Drone in lab1
-
-    @param servers: Sequence of Server instances.
-    @returns: Formatted output as string.
-    """
-    return '\n'.join(str(server) for server in servers)
-
-
-def format_servers_json(servers):
-    """Format servers for printing as JSON.
-
-    @param servers: Sequence of Server instances.
-    @returns: String.
-    """
-    server_dicts = []
-    for server in servers:
-        if server.date_modified is None:
-            date_modified = None
-        else:
-            date_modified = str(server.date_modified)
-        attributes = {k: v for k, v in server.attributes.values_list(
-                'attribute', 'value')}
-        server_dicts.append({'hostname': server.hostname,
-                             'status': server.status,
-                             'roles': server.get_role_names(),
-                             'date_created': str(server.date_created),
-                             'date_modified': date_modified,
-                             'note': server.note,
-                             'attributes': attributes})
-    return json.dumps(server_dicts)
-
-
-def format_servers_nameonly(servers):
-    """format servers for printing names only
-
-    @param servers: Sequence of Server instances.
-    @returns: Formatted output as string.
-    """
-    return '\n'.join(s.hostname for s in servers)
-
-
-def check_server(hostname, role):
-    """Confirm server with given hostname is ready to be primary of given role.
-
-    If the server is a backup and failed to be verified for the role, remove
-    the role from its roles list. If it has no other role, set its status to
-    repair_required.
-
-    @param hostname: hostname of the server.
-    @param role: Role to be checked.
-    @return: True if server can be verified for the given role, otherwise
-             return False.
-    """
-    # TODO(dshi): Add more logic to confirm server is ready for the role.
-    # For now, the function just checks if server is ssh-able.
-    try:
-        infra.execute_command(hostname, 'true')
-        return True
-    except subprocess.CalledProcessError as e:
-        print('Failed to check server %s, error: %s' %
-              (hostname, e), file=sys.stderr)
-        return False
-
-
-def verify_server(exist=True):
-    """Decorator to check if server with given hostname exists in the database.
-
-    @param exist: Set to True to confirm server exists in the database, raise
-                  exception if not. If it's set to False, raise exception if
-                  server exists in database. Default is True.
-
-    @raise ServerActionError: If `exist` is True and server does not exist in
-                              the database, or `exist` is False and server exists
-                              in the database.
-    """
-    def deco_verify(func):
-        """Wrapper for the decorator.
-
-        @param func: Function to be called.
-        """
-        def func_verify(*args, **kwargs):
-            """Decorator to check if server exists.
-
-            If exist is set to True, raise ServerActionError is server with
-            given hostname is not found in server database.
-            If exist is set to False, raise ServerActionError is server with
-            given hostname is found in server database.
-
-            @param func: function to be called.
-            @param args: arguments for function to be called.
-            @param kwargs: keyword arguments for function to be called.
-            """
-            hostname = kwargs['hostname']
-            try:
-                server = server_models.Server.objects.get(hostname=hostname)
-            except django.core.exceptions.ObjectDoesNotExist:
-                server = None
-
-            if not exist and server:
-                raise ServerActionError('Server %s already exists.' %
-                                        hostname)
-            if exist and not server:
-                raise ServerActionError('Server %s does not exist in the '
-                                        'database.' % hostname)
-            if server:
-                kwargs['server'] = server
-            return func(*args, **kwargs)
-        return func_verify
-    return deco_verify
-
-
-def get_drones():
-    """Get a list of drones in status primary.
-
-    @return: A list of drones in status primary.
-    """
-    servers = get_servers(role=server_models.ServerRole.ROLE.DRONE,
-                          status=server_models.Server.STATUS.PRIMARY)
-    return [s.hostname for s in servers]
-
-
-def delete_attribute(server, attribute):
-    """Delete the attribute from the host.
-
-    @param server: An object of server_models.Server.
-    @param attribute: Name of an attribute of the server.
-    """
-    attributes = server.attributes.filter(attribute=attribute)
-    if not attributes:
-        raise ServerActionError('Server %s does not have attribute %s' %
-                                (server.hostname, attribute))
-    attributes[0].delete()
-    print('Attribute %s is deleted from server %s.' % (attribute,
-                                                       server.hostname))
-
-
-def change_attribute(server, attribute, value):
-    """Change the value of an attribute of the server.
-
-    @param server: An object of server_models.Server.
-    @param attribute: Name of an attribute of the server.
-    @param value: Value of the attribute of the server.
-
-    @raise ServerActionError: If the attribute already exists and has the
-                              given value.
-    """
-    attributes = server_models.ServerAttribute.objects.filter(
-            server=server, attribute=attribute)
-    if attributes and attributes[0].value == value:
-        raise ServerActionError('Attribute %s for Server %s already has '
-                                'value of %s.' %
-                                (attribute, server.hostname, value))
-    if attributes:
-        old_value = attributes[0].value
-        attributes[0].value = value
-        attributes[0].save()
-        print('Attribute `%s` of server %s is changed from %s to %s.' %
-              (attribute, server.hostname, old_value, value))
-    else:
-        server_models.ServerAttribute.objects.create(
-                server=server, attribute=attribute, value=value)
-        print('Attribute `%s` of server %s is set to %s.' %
-              (attribute, server.hostname, value))
-
-
-def get_shards():
-    """Get a list of shards in status primary.
-
-    @return: A list of shards in status primary.
-    """
-    servers = get_servers(role=server_models.ServerRole.ROLE.SHARD,
-                          status=server_models.Server.STATUS.PRIMARY)
-    return [s.hostname for s in servers]
-
-
-def confirm_server_has_role(hostname, role):
-    """Confirm a given server has the given role, and its status is primary.
-
-    @param hostname: hostname of the server.
-    @param role: Name of the role to be checked.
-    @raise ServerActionError: If localhost does not have given role or it's
-                              not in primary status.
-    """
-    if hostname.lower() in ['localhost', '127.0.0.1']:
-        hostname = socket.gethostname()
-    hostname = utils.normalize_hostname(hostname)
-
-    servers = get_servers(role=role, status=server_models.Server.STATUS.PRIMARY)
-    for server in servers:
-        if hostname == utils.normalize_hostname(server.hostname):
-            return True
-    raise ServerActionError('Server %s does not have role of %s running in '
-                            'status primary.' % (hostname, role))
diff --git a/site_utils/set_tree_status.py b/site_utils/set_tree_status.py
index 4ddc813..6e21199 100755
--- a/site_utils/set_tree_status.py
+++ b/site_utils/set_tree_status.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 # Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/site_utils/sponge_lib/__init__.py b/site_utils/sponge_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/site_utils/sponge_lib/__init__.py
+++ /dev/null
diff --git a/site_utils/sponge_lib/acts_job_info.py b/site_utils/sponge_lib/acts_job_info.py
deleted file mode 100644
index c4b4c51..0000000
--- a/site_utils/sponge_lib/acts_job_info.py
+++ /dev/null
@@ -1,260 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import glob
-import json
-import os
-
-import logging
-
-from autotest_lib.site_utils.sponge_lib import autotest_job_info
-
-
-UNKNOWN_EFFORT_NAME = 'UNKNOWN_BUILD'
-UNKNOWN_ENV_NAME = 'UNKNOWN_BOARD'
-
-
-class ACTSSummaryEnums(object):
-    """A class contains the attribute names used in a ACTS summary."""
-
-    Requested = 'Requested'
-    Failed = 'Failed'
-    Unknown = 'Unknown'
-
-
-class ACTSRecordEnums(object):
-    """A class contains the attribute names used in an ACTS record."""
-
-    BeginTime = 'Begin Time'
-    Details = 'Details'
-    EndTime = 'End Time'
-    Extras = 'Extras'
-    ExtraErrors = 'Extra Errors'
-    Result = 'Result'
-    TestClass = 'Test Class'
-    TestName = 'Test Name'
-    UID = 'UID'
-
-
-class ACTSTaskInfo(autotest_job_info.AutotestTaskInfo):
-    """Task info for an ACTS test."""
-
-    tags = autotest_job_info.AutotestTaskInfo.tags + ['acts', 'testtracker']
-    logs = autotest_job_info.AutotestTaskInfo.logs + ['results']
-
-    def __init__(self, test, job):
-        """
-        @param test: The autotest test for this ACTS test.
-        @param job: The job info that is the parent ot this task.
-        """
-        super(ACTSTaskInfo, self).__init__(test, job)
-
-        summary_location = os.path.join(
-                self.results_dir, 'results/latest/test_run_summary.json')
-
-        build_info_location = os.path.join(self.results_dir,
-                'results/BUILD_INFO-*')
-        build_info_files = glob.iglob(build_info_location)
-
-        try:
-            build_info_file = next(build_info_files)
-            logging.info('Using build info file: %s', build_info_file)
-            with open(build_info_file) as fd:
-                self.build_info = json.load(fd)
-        except Exception as e:
-            logging.exception(e)
-            logging.error('Bad build info file.')
-            self.build_info = {}
-
-        try:
-            build_prop_str = self.build_info['build_prop']
-            prop_dict = {}
-            self.build_info['build_prop'] = prop_dict
-            lines = build_prop_str.splitlines()
-            for line in lines:
-                parts = line.split('=')
-
-                if len(parts) != 2:
-                    continue
-
-                prop_dict[parts[0]] = parts[1]
-        except Exception as e:
-            logging.exception(e)
-            logging.error('Bad build prop data, using default empty dict')
-            self.build_info['build_prop'] = {}
-
-        try:
-            with open(summary_location) as fd:
-                self._acts_summary = json.load(fd)
-
-            self._summary_block = self._acts_summary['Summary']
-
-            record_block = self._acts_summary['Results']
-            self._records = list(ACTSRecord(record) for record in record_block)
-            self.is_valid = True
-        except Exception as e:
-            logging.exception(e)
-            logging.error('Bad acts data, reverting to autotest only.')
-            self.is_valid = False
-            self.tags = autotest_job_info.AutotestTaskInfo.tags
-
-    @property
-    def test_case_count(self):
-        """The number of test cases run."""
-        return self._summary_block[ACTSSummaryEnums.Requested]
-
-    @property
-    def failed_case_count(self):
-        """The number of failed test cases."""
-        return self._summary_block[ACTSSummaryEnums.Failed]
-
-    @property
-    def error_case_count(self):
-        """The number of errored test cases."""
-        return self._summary_block[ACTSSummaryEnums.Unknown]
-
-    @property
-    def records(self):
-        """All records of test cases in the ACTS tests."""
-        return self._records
-
-    @property
-    def owner(self):
-        """The owner of the task."""
-        if 'param-testtracker_owner' in self.keyvals:
-            return self.keyvals['param-testtracker_owner'].strip("'").strip('"')
-        elif 'param-test_tracker_owner' in self.keyvals:
-            return self.keyvals['param-testtracker_owner'].strip("'").strip('"')
-        else:
-            return self._job.user.strip("'").strip('"')
-
-    @property
-    def effort_name(self):
-        """The test tracker effort name."""
-        build_id = self.build_info.get('build_prop', {}).get('ro.build.id')
-        if build_id and any(c.isdigit() for c in build_id):
-            return build_id
-        else:
-            build_version = self.build_info.get('build_prop', {}).get(
-                    'ro.build.version.incremental', UNKNOWN_EFFORT_NAME)
-            return build_version
-
-
-    @property
-    def project_id(self):
-        """The test tracker project id."""
-        if 'param-testtracker_project_id' in self.keyvals:
-            return self.keyvals.get('param-testtracker_project_id')
-        else:
-            return self.keyvals.get('param-test_tracker_project_id')
-
-    @property
-    def environment(self):
-        """The name of the enviroment for test tracker."""
-        build_props = self.build_info.get('build_prop', {})
-
-        if 'ro.product.board' in build_props:
-            board = build_props['ro.product.board']
-        elif 'ro.build.product' in build_props:
-            board = build_props['ro.build.product']
-        else:
-            board = UNKNOWN_ENV_NAME
-
-        return board
-
-    @property
-    def extra_environment(self):
-        """Extra environment info about the task."""
-        if 'param-testtracker_extra_env' in self.keyvals:
-            extra = self.keyvals.get('param-testtracker_extra_env', [])
-        else:
-            extra = self.keyvals.get('param-test_tracker_extra_env', [])
-
-        if not isinstance(extra, list):
-            extra = [extra]
-
-        return extra
-
-
-class ACTSRecord(object):
-    """A single record of a test case in an ACTS test."""
-
-    tags = ['acts', 'testtracker']
-
-    def __init__(self, json_record):
-        """
-        @param json_record: The json info for this record
-        """
-        self._json_record = json_record
-
-    @property
-    def test_class(self):
-        """The test class that was run."""
-        return self._json_record[ACTSRecordEnums.TestClass]
-
-    @property
-    def test_case(self):
-        """The test case that was run. None implies all in the class."""
-        return self._json_record.get(ACTSRecordEnums.TestName)
-
-    @property
-    def uid(self):
-        """The uid of the test case."""
-        return self._json_record.get(ACTSRecordEnums.UID)
-
-    @property
-    def status(self):
-        """The status of the test case."""
-        return self._json_record[ACTSRecordEnums.Result]
-
-    @property
-    def start_time(self):
-        """The start time of the test case."""
-        return self._json_record[ACTSRecordEnums.BeginTime] / 1000.0
-
-    @property
-    def end_time(self):
-        """The end time of the test case."""
-        return self._json_record[ACTSRecordEnums.EndTime] / 1000.0
-
-    @property
-    def details(self):
-        """Details about the test case."""
-        return self._json_record.get(ACTSRecordEnums.Details)
-
-    @property
-    def extras(self):
-        """Extra info about the test case."""
-        return self._json_record.get(ACTSRecordEnums.Extras)
-
-    @property
-    def extra_errors(self):
-        """Extra errors about the test case."""
-        return self._json_record.get(ACTSRecordEnums.ExtraErrors)
-
-    @property
-    def extra_environment(self):
-        """Extra details about the environment for this test."""
-        extras = self.extras
-        if not extras:
-            return None
-
-        test_tracker_info = self.extras.get('test_tracker_info')
-        if not test_tracker_info:
-            return self.extras.get('test_tracker_environment_info')
-
-        return test_tracker_info.get('extra_environment')
-
-    @property
-    def uuid(self):
-        """The test tracker uuid of the test case."""
-        extras = self.extras
-        if not extras:
-            return None
-
-        test_tracker_info = self.extras.get('test_tracker_info')
-        if not test_tracker_info:
-            return self.extras.get('test_tracker_uuid')
-
-        return test_tracker_info.get('test_tracker_uuid')
diff --git a/site_utils/sponge_lib/autotest_dynamic_job.py b/site_utils/sponge_lib/autotest_dynamic_job.py
deleted file mode 100644
index fc95941..0000000
--- a/site_utils/sponge_lib/autotest_dynamic_job.py
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from autotest_lib.site_utils.sponge_lib import acts_job_info
-from autotest_lib.site_utils.sponge_lib import autotest_job_info
-
-
-class DynamicJobInfo(autotest_job_info.AutotestJobInfo):
-    """A job that will create tasks based on the info they contain."""
-
-    def create_task_info(self, test):
-        """Dynamically creates tasks based on the type of test run."""
-        if test.subdir and 'android_ACTS' in test.subdir:
-            logging.info('Using ACTS task info for %s.', test.testname)
-            return acts_job_info.ACTSTaskInfo(test, self)
-
-        return super(DynamicJobInfo, self).create_task_info(test)
diff --git a/site_utils/sponge_lib/autotest_job_info.py b/site_utils/sponge_lib/autotest_job_info.py
deleted file mode 100644
index 54cb4e1..0000000
--- a/site_utils/sponge_lib/autotest_job_info.py
+++ /dev/null
@@ -1,183 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import socket
-import time
-
-from autotest_lib.client.common_lib import base_utils
-from autotest_lib.client.common_lib import global_config
-from autotest_lib.client.common_lib import time_utils
-from autotest_lib.site_utils import job_directories
-
-CONFIG=global_config.global_config
-
-RETRIEVE_LOGS_CGI = CONFIG.get_config_value(
-        'BUG_REPORTING', 'retrieve_logs_cgi', default='')
-USE_PROD_SERVER = CONFIG.get_config_value(
-        'SERVER', 'use_prod_sponge_server', default=False, type=bool)
-
-
-class AutotestJobInfo(object):
-    """Autotest job info."""
-
-    # Tell the uploader what type of info this object holds.
-    tags=['autotest']
-
-    # Version of the data stored.
-    version = 2
-
-    def __init__(self, job):
-        self._job = job
-        self._tasks = list(
-                self.create_task_info(test) for test in self._job.tests)
-
-        self.build = job.build
-        self.build_version = job.build_version
-        self.board = job.board
-
-    @property
-    def id(self):
-        """The id of the autotest job."""
-        return job_directories.get_job_id_or_task_id(self._job.dir)
-
-    @property
-    def label(self):
-        """The label of the autotest job."""
-        return self._job.label
-
-    @property
-    def user(self):
-        """The user who launched the autotest job."""
-        return self._job.user
-
-    @property
-    def start_time(self):
-        """The utc start time of the autotest job."""
-        return self._job.keyval_dict.get('job_started', time.time())
-
-    @property
-    def end_time(self):
-        """The utc end time of the autotest job."""
-        return self._job.keyval_dict.get('job_finished', time.time())
-
-    @property
-    def dut(self):
-        """The dut for the job."""
-        return self._job.machine
-
-    @property
-    def drone(self):
-        """The drone used to run the job."""
-        return self._job.keyval_dict.get('drone', socket.gethostname())
-
-    @property
-    def keyvals(self):
-        """Keyval dict for this job."""
-        return self._job.keyval_dict
-
-    @property
-    def tasks(self):
-        """All tests that this job ran."""
-        return self._tasks
-
-    @property
-    def results_dir(self):
-        """The directory where job results are stored."""
-        return os.path.abspath(self._job.dir)
-
-    @property
-    def results_url(self):
-        """The url where results are stored."""
-        return '%sresults/%s-%s/%s' % (
-            RETRIEVE_LOGS_CGI, self.id, self.user, self.dut)
-
-    @property
-    def is_official(self):
-        """If this is a production result."""
-        return USE_PROD_SERVER
-
-    def create_task_info(self, test):
-        """Thunk for creating task info.
-
-        @param test: The autotest test.
-
-        @returns The task info.
-        """
-        logging.info('Using default autotest task info for %s.', test.testname)
-        return AutotestTaskInfo(test, self)
-
-
-class AutotestTaskInfo(object):
-    """Info about an autotest test."""
-
-    # Tell the uploader what type of info is kept in this task.
-    tags = ['autotest']
-
-    # A list of logs to upload for this task.
-    logs = ['debug', 'status.log', 'crash', 'keyval', 'control', 'control.srv',
-            'results/results-chart.json']
-
-    # Version of the data stored.
-    version = 2
-
-    def __init__(self, test, job):
-        """
-        @param test: The autotest test to create this task from.
-        @param job: The job info that owns this task.
-        """
-        self._test = test
-        self._job = job
-
-        keyvals_file = os.path.join(self.results_dir, 'keyval')
-        self.keyvals = base_utils.read_keyval(keyvals_file)
-
-    @property
-    def taskname(self):
-        """The name of the test."""
-        return self._test.testname
-
-    @property
-    def status(self):
-        """The autotest status of this test."""
-        return self._test.status
-
-    @property
-    def start_time(self):
-        """The utc recorded time of when this test started."""
-        return time_utils.to_utc_timestamp(self._test.started_time)
-
-    @property
-    def end_time(self):
-        """The utc recorded time of when this test ended."""
-        return time_utils.to_utc_timestamp(self._test.finished_time)
-
-    @property
-    def subdir(self):
-        """The sub directory used for this test."""
-        return self._test.subdir
-
-    @property
-    def attributes(self):
-        """Attributes of this task."""
-        return getattr(self._test, 'attributes', {})
-
-    @property
-    def reason(self):
-        """The reason for this tasks status."""
-        return getattr(self._test, 'reason', None)
-
-    @property
-    def results_dir(self):
-        """The full directory where results are stored for this test."""
-        if self.subdir == '----' or not self.subdir:
-            return self._job.results_dir
-        else:
-            return os.path.join(self._job.results_dir, self.subdir)
-
-    @property
-    def is_test(self):
-        """True if this task is an actual test that ran."""
-        return self.subdir != '----'
diff --git a/site_utils/sponge_lib/sponge_utils.py b/site_utils/sponge_lib/sponge_utils.py
deleted file mode 100644
index 39268a4..0000000
--- a/site_utils/sponge_lib/sponge_utils.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""This module contains utilities for test to report result to Sponge.
-"""
-
-import logging
-
-from autotest_lib.site_utils.sponge_lib import autotest_dynamic_job
-from autotest_lib.client.common_lib import decorators
-
-try:
-    from sponge import upload_utils
-except ImportError:
-    logging.debug('Module failed to be imported: sponge')
-    upload_utils = None
-
-
-
-class SpongeLogHandler(logging.Handler):
-    """Helper log handler for logging during sponge."""
-    def __init__(self, log_func):
-        super(SpongeLogHandler, self).__init__()
-        self.log_func = log_func
-
-    def emit(self, record):
-        log_entry = self.format(record)
-        self.log_func(log_entry)
-
-
-
-@decorators.test_module_available(upload_utils)
-def upload_results(job, log=logging.debug):
-    """Upload test results to Sponge with given job details.
-
-    @param job: A job object created by tko/parsers.
-    @param log: Logging method, default is logging.debug.
-
-    @return: A url to the Sponge invocation.
-    """
-    start_level = logging.getLogger().level
-
-    log_handler = SpongeLogHandler(log)
-    logging.getLogger().addHandler(log_handler)
-    logging.getLogger().setLevel(logging.DEBUG)
-
-    logging.info("added log handler")
-
-    try:
-        logging.info('Starting sponge upload.')
-        info = autotest_dynamic_job.DynamicJobInfo(job)
-        return upload_utils.UploadInfo(info)
-    except:
-        logging.exception('Failed to upload to sponge.')
-    finally:
-        logging.getLogger().removeHandler(log_handler)
-        logging.getLogger().setLevel(start_level)
diff --git a/site_utils/sponge_lib/sponge_utils_functional_test.py b/site_utils/sponge_lib/sponge_utils_functional_test.py
deleted file mode 100644
index 24213f9..0000000
--- a/site_utils/sponge_lib/sponge_utils_functional_test.py
+++ /dev/null
@@ -1,151 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Test methods in module sponge_utils.
-
-This test has dependency on sponge module, therefore it's not a unit test.
-User can run this module manually to verify test results can be compiled into
-Sponge XML and uploaded to test server correctly.
-"""
-
-import datetime
-import mox
-import os
-import shutil
-import tempfile
-import time
-import unittest
-
-import common
-
-from autotest_lib.site_utils.sponge_lib import sponge_utils
-from autotest_lib.tko import models
-
-ACTS_SUMMARY_JSON = """
-{
-    "Results": [
-        {
-            "Begin Time": 1464054883744,
-            "Details": "setup_class failed for FilteringTest.",
-            "End Time": 1464054883744,
-            "Extras": null,
-            "Result": "FAIL",
-            "Test Class": "FilteringTest",
-            "Test Name": "",
-            "UID": null,
-            "Extra Errors": {"on_fail": "I also failed for whatever reason"}
-        },
-        {
-            "Begin Time": 1464054888355,
-            "Details": null,
-            "End Time": 1464054888644,
-            "Extras": null,
-            "Result": "PASS",
-            "Test Class": "UniqueFilteringTest",
-            "Test Name": "test_scan_flush_pending_scan_results",
-            "UID": null,
-            "Extra Errors": {}
-        }
-    ],
-    "Summary": {
-        "Executed": 2,
-        "Failed": 1,
-        "Passed": 1,
-        "Requested": 10,
-        "Skipped": 0,
-        "Unknown": 8
-    }
-}
-"""
-
-class SpongeUtilsUnitTests(mox.MoxTestBase):
-    """Test functions in sponge_utils.
-    """
-
-    def setUp(self):
-        """Set up test."""
-        super(SpongeUtilsUnitTests, self).setUp()
-        self.acts_summary = tempfile.NamedTemporaryFile(delete=False)
-        self.acts_summary.write(ACTS_SUMMARY_JSON)
-        self.acts_summary.close()
-        self.tmp_dir = tempfile.mkdtemp()
-        self.resultsdir = os.path.join(self.tmp_dir,
-                                       '123-debug_user/host1/dummy_PassServer')
-        os.makedirs(self.resultsdir)
-        with open(os.path.join(self.tmp_dir, '123-debug_user/host1',
-                               '.autoserv_execute'), 'w') as f:
-            f.write('')
-
-
-    def tearDown(self):
-        """Delete temporary file.
-        """
-        super(SpongeUtilsUnitTests, self).tearDown()
-        os.unlink(self.acts_summary.name)
-        shutil.rmtree(self.tmp_dir)
-
-
-    def test_upload_results_in_test(self):
-        """Test function upload_results_in_test.
-        """
-        test = self.mox.CreateMockAnything()
-        test.resultsdir = os.path.join(self.tmp_dir,
-                                       '123-debug_user/host1/dummy_PassServer')
-        test.tagged_testname = 'dummy_PassServer'
-
-        test.job = self.mox.CreateMockAnything()
-        test.job.user = 'debug_user'
-        test.job.machines = ['host1']
-
-        job_keyvals = {'drone': 'localhost',
-                       'job_started': time.time()-1000}
-        self.mox.StubOutWithMock(models.test, 'parse_job_keyval')
-        models.test.parse_job_keyval(test.resultsdir).AndReturn(job_keyvals)
-
-        self.mox.ReplayAll()
-
-        invocation_url = sponge_utils.upload_results_in_test(
-                test, test_pass=True, acts_summary=self.acts_summary.name)
-        print 'Invocation URL: %s' % invocation_url
-        self.assertIsNotNone(invocation_url)
-
-
-    def test_upload_results_in_parsing(self):
-        """Test function upload_results.
-        """
-        job = self.mox.CreateMockAnything()
-        job.started_time = datetime.datetime(2016, 8, 15, 0, 0, 0)
-        job.finished_time = datetime.datetime(2016, 8, 15, 1, 0, 0)
-        job.keyval_dict = {'drone': 'server1',
-                           'hostname': 'host1',
-                           'job_finished': 1471284056,
-                           'job_queued': 1471283461,
-                           'job_started': 1471283480,
-                           'label': 'dummy',
-                           'status_version': 1,
-                           'suite': 'dummy',
-                           'parent_job_id': 100,
-                           'user': 'debug_user'}
-
-        job.dir = os.path.join(self.tmp_dir, '123-debug_user/host1')
-        job.label = 'dummy_PassServer'
-
-        job.tests = []
-        test = self.mox.CreateMockAnything()
-        test.attributes = {'host-labels': 'board%3Aveyron'}
-        test.status = 'GOOD'
-        test.started_time = datetime.datetime(2016, 8, 15, 0, 0, 0)
-        test.finished_time = datetime.datetime(2016, 8, 15, 1, 0, 0)
-        test.testname = 'dummy_PassServer'
-        job.tests.append(test)
-        job.user = 'debug_user'
-        job.machine = 'host1'
-
-        invocation_url = sponge_utils.upload_results(job)
-        print 'Invocation URL: %s' % invocation_url
-        self.assertIsNotNone(invocation_url)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/site_utils/stable_images/assign_stable_images.py b/site_utils/stable_images/assign_stable_images.py
deleted file mode 100755
index 3287404..0000000
--- a/site_utils/stable_images/assign_stable_images.py
+++ /dev/null
@@ -1,395 +0,0 @@
-#!/usr/bin/python2
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Automatically update the afe_stable_versions table.
-
-This command updates the stable repair version for selected boards
-in the lab.  For each board, if the version that Omaha is serving
-on the Beta channel for the board is more recent than the current
-stable version in the AFE database, then the AFE is updated to use
-the version on Omaha.
-
-The upgrade process is applied to every "managed board" in the test
-lab.  Generally, a managed board is a board with both spare and
-critical scheduling pools.
-
-See `autotest_lib.site_utils.lab_inventory` for the full definition
-of "managed board".
-
-The command supports a `--dry-run` option that reports changes that
-would be made, without making the actual RPC calls to change the
-database.
-
-"""
-
-import argparse
-import logging
-
-import common
-from autotest_lib.server.cros.dynamic_suite import frontend_wrappers
-from autotest_lib.site_utils import lab_inventory
-from autotest_lib.site_utils import loglib
-from autotest_lib.site_utils.stable_images import build_data
-from chromite.lib import ts_mon_config
-from chromite.lib import metrics
-
-
-# _DEFAULT_BOARD - The distinguished board name used to identify a
-# stable version mapping that is used for any board without an explicit
-# mapping of its own.
-#
-# _DEFAULT_VERSION_TAG - A string used to signify that there is no
-# mapping for a board, in other words, the board is mapped to the
-# default version.
-#
-_DEFAULT_BOARD = 'DEFAULT'
-_DEFAULT_VERSION_TAG = '(default)'
-
-_METRICS_PREFIX = 'chromeos/autotest/assign_stable_images'
-
-
-class _VersionUpdater(object):
-    """
-    Class to report and apply version changes.
-
-    This class is responsible for the low-level logic of applying
-    version upgrades and reporting them as command output.
-
-    This class exists to solve two problems:
-     1. To distinguish "normal" vs. "dry-run" modes.  Each mode has a
-        subclass; methods that perform actual AFE updates are
-        implemented for the normal mode subclass only.
-     2. To provide hooks for unit tests.  The unit tests override both
-        the reporting and modification behaviors, in order to test the
-        higher level logic that decides what changes are needed.
-
-    Methods meant merely to report changes to command output have names
-    starting with "report" or "_report".  Methods that are meant to
-    change the AFE in normal mode have names starting with "_do"
-    """
-
-    def __init__(self, afe, dry_run):
-        """Initialize us.
-
-        @param afe:     A frontend.AFE object.
-        @param dry_run: A boolean indicating whether to execute in dry run mode.
-                        No updates are persisted to the afe in dry run.
-        """
-        self._dry_run = dry_run
-        image_types = [afe.CROS_IMAGE_TYPE, afe.FIRMWARE_IMAGE_TYPE]
-        self._version_maps = {
-            image_type: afe.get_stable_version_map(image_type)
-                for image_type in image_types
-        }
-        self._cros_map = self._version_maps[afe.CROS_IMAGE_TYPE]
-        self._selected_map = None
-
-    def select_version_map(self, image_type):
-        """
-        Select an AFE version map object based on `image_type`.
-
-        This creates and remembers an AFE version mapper object to be
-        used for making changes in normal mode.
-
-        @param image_type   Image type parameter for the version mapper
-                            object.
-        """
-        self._selected_map = self._version_maps[image_type]
-        return self._selected_map.get_all_versions()
-
-    def report_default_changed(self, old_default, new_default):
-        """
-        Report that the default version mapping is changing.
-
-        This merely reports a text description of the pending change
-        without executing it.
-
-        @param old_default  The original default version.
-        @param new_default  The new default version to be applied.
-        """
-        logging.debug('Default %s -> %s', old_default, new_default)
-
-    def _report_board_changed(self, board, old_version, new_version):
-        """
-        Report a change in one board's assigned version mapping.
-
-        This merely reports a text description of the pending change
-        without executing it.
-
-        @param board        The board with the changing version.
-        @param old_version  The original version mapped to the board.
-        @param new_version  The new version to be applied to the board.
-        """
-        logging.debug('    %-22s %s -> %s', board, old_version, new_version)
-
-    def report_board_unchanged(self, board, old_version):
-        """
-        Report that a board's version mapping is unchanged.
-
-        This reports that a board has a non-default mapping that will be
-        unchanged.
-
-        @param board        The board that is not changing.
-        @param old_version  The board's version mapping.
-        """
-        self._report_board_changed(board, '(no change)', old_version)
-
-    def _do_set_mapping(self, board, new_version):
-        """
-        Change one board's assigned version mapping.
-
-        @param board        The board with the changing version.
-        @param new_version  The new version to be applied to the board.
-        """
-        if self._dry_run:
-            logging.info('DRYRUN: Would have set %s version to %s',
-                         board, new_version)
-        else:
-            self._selected_map.set_version(board, new_version)
-
-    def _do_delete_mapping(self, board):
-        """
-        Delete one board's assigned version mapping.
-
-        @param board        The board with the version to be deleted.
-        """
-        if self._dry_run:
-            logging.info('DRYRUN: Would have deleted version for %s', board)
-        else:
-            self._selected_map.delete_version(board)
-
-    def set_mapping(self, board, old_version, new_version):
-        """
-        Change and report a board version mapping.
-
-        @param board        The board with the changing version.
-        @param old_version  The original version mapped to the board.
-        @param new_version  The new version to be applied to the board.
-        """
-        self._report_board_changed(board, old_version, new_version)
-        self._do_set_mapping(board, new_version)
-
-    def upgrade_default(self, new_default):
-        """
-        Apply a default version change.
-
-        @param new_default  The new default version to be applied.
-        """
-        self._do_set_mapping(_DEFAULT_BOARD, new_default)
-
-    def delete_mapping(self, board, old_version):
-        """
-        Delete a board version mapping, and report the change.
-
-        @param board        The board with the version to be deleted.
-        @param old_version  The board's verson prior to deletion.
-        """
-        assert board != _DEFAULT_BOARD
-        self._report_board_changed(board,
-                                   old_version,
-                                   _DEFAULT_VERSION_TAG)
-        self._do_delete_mapping(board)
-
-
-def _get_upgrade_versions(cros_versions, omaha_versions, boards):
-    """
-    Get the new stable versions to which we should update.
-
-    The new versions are returned as a tuple of a dictionary mapping
-    board names to versions, plus a new default board setting.  The
-    new default is determined as the most commonly used version
-    across the given boards.
-
-    The new dictionary will have a mapping for every board in `boards`.
-    That mapping will be taken from `cros_versions`, unless the board has
-    a mapping in `omaha_versions` _and_ the omaha version is more recent
-    than the AFE version.
-
-    @param cros_versions    The current board->version mappings in the
-                            AFE.
-    @param omaha_versions   The current board->version mappings from
-                            Omaha for the Beta channel.
-    @param boards           Set of boards to be upgraded.
-    @return Tuple of (mapping, default) where mapping is a dictionary
-            mapping boards to versions, and default is a version string.
-    """
-    upgrade_versions = {}
-    version_counts = {}
-    afe_default = cros_versions[_DEFAULT_BOARD]
-    for board in boards:
-        version = build_data.get_omaha_upgrade(
-                omaha_versions, board,
-                cros_versions.get(board, afe_default))
-        upgrade_versions[board] = version
-        version_counts.setdefault(version, 0)
-        version_counts[version] += 1
-    return (upgrade_versions,
-            max(version_counts.items(), key=lambda x: x[1])[0])
-
-
-def _get_firmware_upgrades(cros_versions):
-    """
-    Get the new firmware versions to which we should update.
-
-    @param cros_versions    Current board->cros version mappings in the
-                            AFE.
-    @return A dictionary mapping boards/models to firmware upgrade versions.
-            If the build is unibuild, the key is a model name; else, the key
-            is a board name.
-    """
-    firmware_upgrades = {}
-    for board, version in cros_versions.iteritems():
-        firmware_upgrades.update(
-            build_data.get_firmware_versions(board, version))
-    return firmware_upgrades
-
-
-def _apply_cros_upgrades(updater, old_versions, new_versions,
-                         new_default):
-    """
-    Change CrOS stable version mappings in the AFE.
-
-    The input `old_versions` dictionary represents the content of the
-    `afe_stable_versions` database table; it contains mappings for a
-    default version, plus exceptions for boards with non-default
-    mappings.
-
-    The `new_versions` dictionary contains a mapping for every board,
-    including boards that will be mapped to the new default version.
-
-    This function applies the AFE changes necessary to produce the new
-    AFE mappings indicated by `new_versions` and `new_default`.  The
-    changes are ordered so that at any moment, every board is mapped
-    either according to the old or the new mapping.
-
-    @param updater        Instance of _VersionUpdater responsible for
-                          making the actual database changes.
-    @param old_versions   The current board->version mappings in the
-                          AFE.
-    @param new_versions   New board->version mappings obtained by
-                          applying Beta channel upgrades from Omaha.
-    @param new_default    The new default build for the AFE.
-    """
-    old_default = old_versions[_DEFAULT_BOARD]
-    if old_default != new_default:
-        updater.report_default_changed(old_default, new_default)
-    logging.info('Applying stable version changes:')
-    default_count = 0
-    for board, new_build in new_versions.items():
-        if new_build == new_default:
-            default_count += 1
-        elif board in old_versions and new_build == old_versions[board]:
-            updater.report_board_unchanged(board, new_build)
-        else:
-            old_build = old_versions.get(board)
-            if old_build is None:
-                old_build = _DEFAULT_VERSION_TAG
-            updater.set_mapping(board, old_build, new_build)
-    if old_default != new_default:
-        updater.upgrade_default(new_default)
-    for board, new_build in new_versions.items():
-        if new_build == new_default and board in old_versions:
-            updater.delete_mapping(board, old_versions[board])
-    logging.info('%d boards now use the default mapping', default_count)
-
-
-def _apply_firmware_upgrades(updater, old_versions, new_versions):
-    """
-    Change firmware version mappings in the AFE.
-
-    The input `old_versions` dictionary represents the content of the
-    firmware mappings in the `afe_stable_versions` database table.
-    There is no default version; missing boards simply have no current
-    version.
-
-    This function applies the AFE changes necessary to produce the new
-    AFE mappings indicated by `new_versions`.
-
-    TODO(jrbarnette) This function ought to remove any mapping not found
-    in `new_versions`.  However, in theory, that's only needed to
-    account for boards that are removed from the lab, and that hasn't
-    happened yet.
-
-    @param updater        Instance of _VersionUpdater responsible for
-                          making the actual database changes.
-    @param old_versions   The current board->version mappings in the
-                          AFE.
-    @param new_versions   New board->version mappings obtained by
-                          applying Beta channel upgrades from Omaha.
-    """
-    unchanged = 0
-    no_version = 0
-    for board, new_firmware in new_versions.items():
-        if new_firmware is None:
-            no_version += 1
-        elif board not in old_versions:
-            updater.set_mapping(board, '(nothing)', new_firmware)
-        else:
-            old_firmware = old_versions[board]
-            if new_firmware != old_firmware:
-                updater.set_mapping(board, old_firmware, new_firmware)
-            else:
-                unchanged += 1
-    logging.info('%d boards have no firmware mapping', no_version)
-    logging.info('%d boards are unchanged', unchanged)
-
-
-def _assign_stable_images(arguments):
-    afe = frontend_wrappers.RetryingAFE(server=arguments.web)
-    updater = _VersionUpdater(afe, dry_run=arguments.dry_run)
-
-    cros_versions = updater.select_version_map(afe.CROS_IMAGE_TYPE)
-    omaha_versions = build_data.get_omaha_version_map()
-    upgrade_versions, new_default = (
-            _get_upgrade_versions(cros_versions, omaha_versions,
-                                  lab_inventory.get_managed_boards(afe)))
-    _apply_cros_upgrades(updater, cros_versions,
-                         upgrade_versions, new_default)
-
-    logging.info('Applying firmware updates.')
-    fw_versions = updater.select_version_map(afe.FIRMWARE_IMAGE_TYPE)
-    firmware_upgrades = _get_firmware_upgrades(upgrade_versions)
-    _apply_firmware_upgrades(updater, fw_versions, firmware_upgrades)
-
-
-def main():
-    """Standard main routine."""
-    parser = argparse.ArgumentParser(
-            description='Update the stable repair version for all '
-                        'boards')
-    parser.add_argument('-n', '--dry-run',
-                        action='store_true',
-                        help='print changes without executing them')
-    loglib.add_logging_options(parser)
-    # TODO(crbug/888046) Make these arguments required once puppet is updated to
-    # pass them in.
-    parser.add_argument('--web',
-                        default='cautotest',
-                        help='URL to the AFE to update.')
-
-    arguments = parser.parse_args()
-    loglib.configure_logging_with_args(parser, arguments)
-
-    tsmon_args = {
-            'service_name': parser.prog,
-            'indirect': False,
-            'auto_flush': False,
-    }
-    if arguments.dry_run:
-        logging.info('DRYRUN: No changes will be made.')
-        # metrics will be logged to logging stream anyway.
-        tsmon_args['debug_file'] = '/dev/null'
-
-    try:
-        with ts_mon_config.SetupTsMonGlobalState(**tsmon_args):
-            with metrics.SuccessCounter(_METRICS_PREFIX + '/tick',
-                                        fields={'afe': arguments.web}):
-                _assign_stable_images(arguments)
-    finally:
-        metrics.Flush()
-
-if __name__ == '__main__':
-    main()
diff --git a/site_utils/stable_images/assign_stable_images_unittest.py b/site_utils/stable_images/assign_stable_images_unittest.py
deleted file mode 100644
index f3a073d..0000000
--- a/site_utils/stable_images/assign_stable_images_unittest.py
+++ /dev/null
@@ -1,678 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Unit tests for functions in `assign_stable_images`.
-"""
-
-
-import mock
-import unittest
-
-import common
-from autotest_lib.site_utils.stable_images import assign_stable_images
-from autotest_lib.site_utils.stable_images import build_data
-
-
-_DEFAULT_BOARD = assign_stable_images._DEFAULT_BOARD
-
-
-class GetFirmwareUpgradesTests(unittest.TestCase):
-    """Tests for _get_firmware_upgrades."""
-
-    @mock.patch.object(build_data, 'get_firmware_versions')
-    def test_get_firmware_upgrades(self, mock_get_firmware_versions):
-        """Test _get_firmware_upgrades."""
-        mock_get_firmware_versions.side_effect = [
-            {'auron_paine': 'fw_version'},
-            {'blue': 'fw_version',
-             'robo360': 'fw_version',
-             'porbeagle': 'fw_version'}
-        ]
-        cros_versions = {
-            'coral': 'R64-10176.65.0',
-            'auron_paine': 'R64-10176.65.0'
-        }
-        boards = ['auron_paine', 'coral']
-
-        firmware_upgrades = assign_stable_images._get_firmware_upgrades(
-            cros_versions)
-        expected_firmware_upgrades = {
-            'auron_paine': 'fw_version',
-            'blue': 'fw_version',
-            'robo360': 'fw_version',
-            'porbeagle': 'fw_version'
-        }
-        self.assertEqual(firmware_upgrades, expected_firmware_upgrades)
-
-
-class GetUpgradeTests(unittest.TestCase):
-    """Tests for the `_get_upgrade_versions()` function."""
-
-    # _VERSIONS - a list of sample version strings such as may be used
-    #   for Chrome OS, sorted from oldest to newest.  These are used to
-    #   construct test data in multiple test cases, below.
-    _VERSIONS = ['R1-1.0.0', 'R1-1.1.0', 'R2-4.0.0']
-
-    def test_board_conversions(self):
-        """
-        Test proper mapping of names from the AFE to Omaha.
-
-        Board names in Omaha don't have '_' characters; when an AFE
-        board contains '_' characters, they must be converted to '-'.
-
-        Assert that for various forms of name in the AFE mapping, the
-        converted name is the one looked up in the Omaha mapping.
-        """
-        board_equivalents = [
-            ('a-b', 'a-b'), ('c_d', 'c-d'),
-            ('e_f-g', 'e-f-g'), ('hi', 'hi')]
-        afe_versions = {
-            _DEFAULT_BOARD: self._VERSIONS[0]
-        }
-        omaha_versions = {}
-        expected = {}
-        boards = set()
-        for afe_board, omaha_board in board_equivalents:
-            boards.add(afe_board)
-            afe_versions[afe_board] = self._VERSIONS[1]
-            omaha_versions[omaha_board] = self._VERSIONS[2]
-            expected[afe_board] = self._VERSIONS[2]
-        upgrades, _ = assign_stable_images._get_upgrade_versions(
-                afe_versions, omaha_versions, boards)
-        self.assertEqual(upgrades, expected)
-
-    def test_afe_default(self):
-        """
-        Test that the AFE default board mapping is honored.
-
-        If a board isn't present in the AFE dictionary, the mapping
-        for `_DEFAULT_BOARD` should be used.
-
-        Primary assertions:
-          * When a board is present in the AFE mapping, its version
-            mapping is used.
-          * When a board is not present in the AFE mapping, the default
-            version mapping is used.
-
-        Secondarily, assert that when a mapping is absent from Omaha,
-        the AFE mapping is left unchanged.
-        """
-        afe_versions = {
-            _DEFAULT_BOARD: self._VERSIONS[0],
-            'a': self._VERSIONS[1]
-        }
-        boards = set(['a', 'b'])
-        expected = {
-            'a': self._VERSIONS[1],
-            'b': self._VERSIONS[0]
-        }
-        upgrades, _ = assign_stable_images._get_upgrade_versions(
-                afe_versions, {}, boards)
-        self.assertEqual(upgrades, expected)
-
-    def test_omaha_upgrade(self):
-        """
-        Test that upgrades from Omaha are detected.
-
-        Primary assertion:
-          * If a board is found in Omaha, and the version in Omaha is
-            newer than the AFE version, the Omaha version is the one
-            used.
-
-        Secondarily, asserts that version comparisons between various
-        specific version strings are all correct.
-        """
-        boards = set(['a'])
-        for i in range(0, len(self._VERSIONS) - 1):
-            afe_versions = {_DEFAULT_BOARD: self._VERSIONS[i]}
-            for j in range(i+1, len(self._VERSIONS)):
-                omaha_versions = {b: self._VERSIONS[j] for b in boards}
-                upgrades, _ = assign_stable_images._get_upgrade_versions(
-                        afe_versions, omaha_versions, boards)
-                self.assertEqual(upgrades, omaha_versions)
-
-    def test_no_upgrade(self):
-        """
-        Test that if Omaha is behind the AFE, it is ignored.
-
-        Primary assertion:
-          * If a board is found in Omaha, and the version in Omaha is
-            older than the AFE version, the AFE version is the one used.
-
-        Secondarily, asserts that version comparisons between various
-        specific version strings are all correct.
-        """
-        boards = set(['a'])
-        for i in range(1, len(self._VERSIONS)):
-            afe_versions = {_DEFAULT_BOARD: self._VERSIONS[i]}
-            expected = {b: self._VERSIONS[i] for b in boards}
-            for j in range(0, i):
-                omaha_versions = {b: self._VERSIONS[j] for b in boards}
-                upgrades, _ = assign_stable_images._get_upgrade_versions(
-                        afe_versions, omaha_versions, boards)
-                self.assertEqual(upgrades, expected)
-
-    def test_ignore_unused_boards(self):
-        """
-        Test that unlisted boards are ignored.
-
-        Assert that boards present in the AFE or Omaha mappings aren't
-        included in the return mappings when they aren't in the passed
-        in set of boards.
-        """
-        unused_boards = set(['a', 'b'])
-        used_boards = set(['c', 'd'])
-        afe_versions = {b: self._VERSIONS[0] for b in unused_boards}
-        afe_versions[_DEFAULT_BOARD] = self._VERSIONS[1]
-        expected = {b: self._VERSIONS[1] for b in used_boards}
-        omaha_versions = expected.copy()
-        omaha_versions.update(
-                {b: self._VERSIONS[0] for b in unused_boards})
-        upgrades, _ = assign_stable_images._get_upgrade_versions(
-                afe_versions, omaha_versions, used_boards)
-        self.assertEqual(upgrades, expected)
-
-    def test_default_unchanged(self):
-        """
-        Test correct handling when the default build is unchanged.
-
-        Assert that if in Omaha, one board in a set of three upgrades
-        from the AFE default, that the returned default board mapping is
-        the original default in the AFE.
-        """
-        boards = set(['a', 'b', 'c'])
-        afe_versions = {_DEFAULT_BOARD: self._VERSIONS[0]}
-        omaha_versions = {b: self._VERSIONS[0] for b in boards}
-        omaha_versions['c'] = self._VERSIONS[1]
-        _, new_default = assign_stable_images._get_upgrade_versions(
-                afe_versions, omaha_versions, boards)
-        self.assertEqual(new_default, self._VERSIONS[0])
-
-    def test_default_upgrade(self):
-        """
-        Test correct handling when the default build must change.
-
-        Assert that if in Omaha, two boards in a set of three upgrade
-        from the AFE default, that the returned default board mapping is
-        the new build in Omaha.
-        """
-        boards = set(['a', 'b', 'c'])
-        afe_versions = {_DEFAULT_BOARD: self._VERSIONS[0]}
-        omaha_versions = {b: self._VERSIONS[1] for b in boards}
-        omaha_versions['c'] = self._VERSIONS[0]
-        _, new_default = assign_stable_images._get_upgrade_versions(
-                afe_versions, omaha_versions, boards)
-        self.assertEqual(new_default, self._VERSIONS[1])
-
-
-# Sample version string values to be used when testing
-# `_apply_upgrades()`.
-#
-# _OLD_DEFAULT - Test value representing the default version mapping
-#   in the `old_versions` dictionary in a call to `_apply_upgrades()`.
-# _NEW_DEFAULT - Test value representing the default version mapping
-#   in the `new_versions` dictionary when a version update is being
-#   tested.
-# _OLD_VERSION - Test value representing an arbitrary version for a
-#   board that is mapped in the `old_versions` dictionary in a call to
-#   `_apply_upgrades()`.
-# _NEW_VERSION - Test value representing an arbitrary version for a
-#   board that is mapped in the `new_versions` dictionary in a call to
-#   `_apply_upgrades()`.
-#
-_OLD_DEFAULT = 'old-default-version'
-_NEW_DEFAULT = 'new-default-version'
-_OLD_VERSION = 'old-board-version'
-_NEW_VERSION = 'new-board-version'
-
-
-class _StubAFE(object):
-    """Stubbed out version of `server.frontend.AFE`."""
-
-    CROS_IMAGE_TYPE = 'cros-image-type'
-    FIRMWARE_IMAGE_TYPE = 'firmware-image-type'
-
-    def get_stable_version_map(self, image_type):
-        return image_type
-
-
-class _TestUpdater(assign_stable_images._VersionUpdater):
-    """
-    Subclass of `_VersionUpdater` for testing.
-
-    This class extends `_VersionUpdater` to provide support for testing
-    various assertions about the behavior of the base class and its
-    interactions with `_apply_cros_upgrades()` and
-    `_apply_firmware_upgrades()`.
-
-    The class tests assertions along the following lines:
-      * When applied to the original mappings, the calls to
-        `_do_set_mapping()` and `_do_delete_mapping()` create the
-        expected final mapping state.
-      * Calls to report state changes are made with the expected
-        values.
-      * There's a one-to-one match between reported and actually
-        executed changes.
-
-    """
-
-    def __init__(self, testcase):
-        super(_TestUpdater, self).__init__(_StubAFE(), dry_run=True)
-        self._testcase = testcase
-        self._default_changed = None
-        self._reported_mappings = None
-        self._updated_mappings = None
-        self._reported_deletions = None
-        self._actual_deletions = None
-        self._original_mappings = None
-        self._mappings = None
-        self._expected_mappings = None
-        self._unchanged_boards = None
-
-    def pretest_init(self, initial_versions, expected_versions):
-        """
-        Initialize for testing.
-
-        @param initial_versions   Mappings to be used as the starting
-                                  point for testing.
-        @param expected_versions  The expected final value of the
-                                  mappings after the test.
-        """
-        self._default_changed = False
-        self._reported_mappings = {}
-        self._updated_mappings = {}
-        self._reported_deletions = set()
-        self._actual_deletions = set()
-        self._original_mappings = initial_versions.copy()
-        self._mappings = initial_versions.copy()
-        self._expected_mappings = expected_versions
-        self._unchanged_boards = set()
-
-    def check_results(self, change_default):
-        """
-        Assert that observed changes match expectations.
-
-        Asserts the following:
-          * The `report_default_changed()` method was called (or not)
-            based on whether `change_default` is true (or not).
-          * The changes reported via `_report_board_changed()` match
-            the changes actually applied.
-          * The final mappings after applying requested changes match
-            the actually expected mappings.
-
-        @param old_versions   Parameter to be passed to
-                              `_apply_cros_upgrades()`.
-        @param new_versions   Parameter to be passed to
-                              `_apply_cros_upgrades()`.
-        @param change_default   Whether the test should include a change
-                                to the default version mapping.
-        """
-        self._testcase.assertEqual(change_default,
-                                   self._default_changed)
-        self._testcase.assertEqual(self._reported_mappings,
-                                   self._updated_mappings)
-        self._testcase.assertEqual(self._reported_deletions,
-                                   self._actual_deletions)
-        self._testcase.assertEqual(self._mappings,
-                                   self._expected_mappings)
-
-    def report_default_changed(self, old_default, new_default):
-        """
-        Override of our parent class' method for test purposes.
-
-        Saves a record of the report for testing the final result in
-        `apply_upgrades()`, above.
-
-        Assert the following:
-          * The old and new default values match the values that
-            were passed in the original call's arguments.
-          * This function is not being called for a second time.
-
-        @param old_default  The original default version.
-        @param new_default  The new default version to be applied.
-        """
-        self._testcase.assertNotEqual(old_default, new_default)
-        self._testcase.assertEqual(old_default,
-                                   self._original_mappings[_DEFAULT_BOARD])
-        self._testcase.assertEqual(new_default,
-                                   self._expected_mappings[_DEFAULT_BOARD])
-        self._testcase.assertFalse(self._default_changed)
-        self._default_changed = True
-        self._reported_mappings[_DEFAULT_BOARD] = new_default
-
-    def _report_board_changed(self, board, old_version, new_version):
-        """
-        Override of our parent class' method for test purposes.
-
-        Saves a record of the report for testing the final result in
-        `apply_upgrades()`, above.
-
-        Assert the following:
-          * The change being reported actually reports two different
-            versions.
-          * If the board isn't mapped to the default version, then the
-            reported old version is the actually mapped old version.
-          * If the board isn't changing to the default version, then the
-            reported new version is the expected new version.
-          * This is not a second report for this board.
-
-        The implementation implicitly requires that the specified board
-        have a valid mapping.
-
-        @param board        The board with the changing version.
-        @param old_version  The original version mapped to the board.
-        @param new_version  The new version to be applied to the board.
-        """
-        self._testcase.assertNotEqual(old_version, new_version)
-        if board in self._original_mappings:
-            self._testcase.assertEqual(old_version,
-                                       self._original_mappings[board])
-        if board in self._expected_mappings:
-            self._testcase.assertEqual(new_version,
-                                       self._expected_mappings[board])
-            self._testcase.assertNotIn(board, self._reported_mappings)
-            self._reported_mappings[board] = new_version
-        else:
-            self._testcase.assertNotIn(board, self._reported_deletions)
-            self._reported_deletions.add(board)
-
-    def report_board_unchanged(self, board, old_version):
-        """
-        Override of our parent class' method for test purposes.
-
-        Assert the following:
-          * The version being reported as unchanged is actually mapped.
-          * The reported old version matches the expected value.
-          * This is not a second report for this board.
-
-        @param board        The board that is not changing.
-        @param old_version  The board's version mapping.
-        """
-        self._testcase.assertIn(board, self._original_mappings)
-        self._testcase.assertEqual(old_version,
-                                   self._original_mappings[board])
-        self._testcase.assertNotIn(board, self._unchanged_boards)
-        self._unchanged_boards.add(board)
-
-    def _do_set_mapping(self, board, new_version):
-        """
-        Override of our parent class' method for test purposes.
-
-        Saves a record of the change for testing the final result in
-        `apply_upgrades()`, above.
-
-        Assert the following:
-          * This is not a second change for this board.
-          * If we're changing the default mapping, then every board
-            that will be changing to a non-default mapping has been
-            updated.
-
-        @param board        The board with the changing version.
-        @param new_version  The new version to be applied to the board.
-        """
-        self._mappings[board] = new_version
-        self._testcase.assertNotIn(board, self._updated_mappings)
-        self._updated_mappings[board] = new_version
-        if board == _DEFAULT_BOARD:
-            for board in self._expected_mappings:
-                self._testcase.assertIn(board, self._mappings)
-
-    def _do_delete_mapping(self, board):
-        """
-        Override of our parent class' method for test purposes.
-
-        Saves a record of the change for testing the final result in
-        `apply_upgrades()`, above.
-
-        Assert that the board has a mapping prior to deletion.
-
-        @param board        The board with the version to be deleted.
-        """
-        self._testcase.assertNotEqual(board, _DEFAULT_BOARD)
-        self._testcase.assertIn(board, self._mappings)
-        del self._mappings[board]
-        self._actual_deletions.add(board)
-
-
-class ApplyCrOSUpgradesTests(unittest.TestCase):
-    """Tests for the `_apply_cros_upgrades()` function."""
-
-    def _apply_upgrades(self, old_versions, new_versions, change_default):
-        """
-        Test a single call to `_apply_cros_upgrades()`.
-
-        All assertions are handled by an instance of `_TestUpdater`.
-
-        @param old_versions   Parameter to be passed to
-                              `_apply_cros_upgrades()`.
-        @param new_versions   Parameter to be passed to
-                              `_apply_cros_upgrades()`.
-        @param change_default   Whether the test should include a change
-                                to the default version mapping.
-        """
-        old_versions[_DEFAULT_BOARD] = _OLD_DEFAULT
-        if change_default:
-            new_default = _NEW_DEFAULT
-        else:
-            new_default = _OLD_DEFAULT
-        expected_versions = {
-            b: v for b, v in new_versions.items() if v != new_default
-        }
-        expected_versions[_DEFAULT_BOARD] = new_default
-        updater = _TestUpdater(self)
-        updater.pretest_init(old_versions, expected_versions)
-        assign_stable_images._apply_cros_upgrades(
-            updater, old_versions, new_versions, new_default)
-        updater.check_results(change_default)
-
-    def test_no_changes(self):
-        """
-        Test an empty upgrade that does nothing.
-
-        Test the boundary case of an upgrade where there are no boards,
-        and the default does not change.
-        """
-        self._apply_upgrades({}, {}, False)
-
-    def test_change_default(self):
-        """
-        Test an empty upgrade that merely changes the default.
-
-        Test the boundary case of an upgrade where there are no boards,
-        but the default is upgraded.
-        """
-        self._apply_upgrades({}, {}, True)
-
-    def test_board_default_no_changes(self):
-        """
-        Test that a board at default stays with an unchanged default.
-
-        Test the case of a board that is mapped to the default, where
-        neither the board nor the default change.
-        """
-        self._apply_upgrades({}, {'board': _OLD_DEFAULT}, False)
-
-    def test_board_left_behind(self):
-        """
-        Test a board left at the old default after a default upgrade.
-
-        Test the case of a board that stays mapped to the old default as
-        the default board is upgraded.
-        """
-        self._apply_upgrades({}, {'board': _OLD_DEFAULT}, True)
-
-    def test_board_upgrade_from_default(self):
-        """
-        Test upgrading a board from a default that doesn't change.
-
-        Test the case of upgrading a board from default to non-default,
-        where the default doesn't change.
-        """
-        self._apply_upgrades({}, {'board': _NEW_VERSION}, False)
-
-    def test_board_and_default_diverge(self):
-        """
-        Test upgrading a board that diverges from the default.
-
-        Test the case of upgrading a board and default together from the
-        same to different versions.
-        """
-        self._apply_upgrades({}, {'board': _NEW_VERSION}, True)
-
-    def test_board_tracks_default(self):
-        """
-        Test upgrading a board to track a default upgrade.
-
-        Test the case of upgrading a board and the default together.
-        """
-        self._apply_upgrades({}, {'board': _NEW_DEFAULT}, True)
-
-    def test_board_non_default_no_changes(self):
-        """
-        Test an upgrade with no changes to a board or the default.
-
-        Test the case of an upgrade with a board in it, where neither
-        the board nor the default change.
-        """
-        self._apply_upgrades({'board': _NEW_VERSION},
-                             {'board': _NEW_VERSION},
-                             False)
-
-    def test_board_upgrade_and_keep_default(self):
-        """
-        Test a board upgrade with an unchanged default.
-
-        Test the case of upgrading a board while the default stays the
-        same.
-        """
-        self._apply_upgrades({'board': _OLD_VERSION},
-                             {'board': _NEW_VERSION},
-                             False)
-
-    def test_board_upgrade_and_change_default(self):
-        """
-        Test upgrading a board and the default separately.
-
-        Test the case of upgrading both a board and the default, each
-        from and to different versions.
-        """
-        self._apply_upgrades({'board': _OLD_VERSION},
-                             {'board': _NEW_VERSION},
-                             True)
-
-    def test_board_leads_default(self):
-        """
-        Test a board that upgrades ahead of the new default.
-
-        Test the case of upgrading both a board and the default, where
-        the board's old version is the new default version.
-        """
-        self._apply_upgrades({'board': _NEW_DEFAULT},
-                             {'board': _NEW_VERSION},
-                             True)
-
-    def test_board_lags_to_old_default(self):
-        """
-        Test a board that upgrades behind the old default.
-
-        Test the case of upgrading both a board and the default, where
-        the board's new version is the old default version.
-        """
-        self._apply_upgrades({'board': _OLD_VERSION},
-                             {'board': _OLD_DEFAULT},
-                             True)
-
-    def test_board_joins_old_default(self):
-        """
-        Test upgrading a board to a default that doesn't change.
-
-        Test the case of upgrading board to the default, where the
-        default mapping stays unchanged.
-        """
-        self._apply_upgrades({'board': _OLD_VERSION},
-                             {'board': _OLD_DEFAULT},
-                             False)
-
-    def test_board_joins_new_default(self):
-        """
-        Test upgrading a board to match the new default.
-
-        Test the case of upgrading board and the default to the same
-        version.
-        """
-        self._apply_upgrades({'board': _OLD_VERSION},
-                             {'board': _NEW_DEFAULT},
-                             True)
-
-    def test_board_becomes_default(self):
-        """
-        Test a board that becomes default after a default upgrade.
-
-        Test the case of upgrading the default to a version already
-        mapped for an existing board.
-        """
-        self._apply_upgrades({'board': _NEW_DEFAULT},
-                             {'board': _NEW_DEFAULT},
-                             True)
-
-
-class ApplyFirmwareUpgradesTests(unittest.TestCase):
-    """Tests for the `_apply_firmware_upgrades()` function."""
-
-    def _apply_upgrades(self, old_versions, new_versions):
-        """
-        Test a single call to `_apply_firmware_upgrades()`.
-
-        All assertions are handled by an instance of `_TestUpdater`.
-
-        @param old_versions   Parameter to be passed to
-                              `_apply_firmware_upgrades()`.
-        @param new_versions   Parameter to be passed to
-                              `_apply_firmware_upgrades()`.
-        """
-        updater = _TestUpdater(self)
-        updater.pretest_init(old_versions, new_versions)
-        assign_stable_images._apply_firmware_upgrades(
-            updater, old_versions, new_versions)
-        updater.check_results(False)
-
-    def test_no_changes(self):
-        """
-        Test an empty upgrade that does nothing.
-
-        Test the boundary case of an upgrade where there are no boards.
-        """
-        self._apply_upgrades({}, {})
-
-    def test_board_added(self):
-        """
-        Test an upgrade that adds a new board.
-
-        Test the case of an upgrade where a board that was previously
-        unmapped is added.
-        """
-        self._apply_upgrades({}, {'board': _NEW_VERSION})
-
-    def test_board_unchanged(self):
-        """
-        Test an upgrade with no changes to a board.
-
-        Test the case of an upgrade with a board that stays the same.
-        """
-        self._apply_upgrades({'board': _NEW_VERSION},
-                             {'board': _NEW_VERSION})
-
-    def test_board_upgrade_and_change_default(self):
-        """
-        Test upgrading a board.
-
-        Test the case of upgrading a board to a new version.
-        """
-        self._apply_upgrades({'board': _OLD_VERSION},
-                             {'board': _NEW_VERSION})
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/site_utils/stable_images/build_data.py b/site_utils/stable_images/build_data.py
index a93dc86..b4a74b2 100644
--- a/site_utils/stable_images/build_data.py
+++ b/site_utils/stable_images/build_data.py
@@ -5,17 +5,18 @@
 """Functions for reading build information from GoogleStorage.
 
 This module contains functions providing access to basic data about
-Chrome OS builds:
-  * Functions for finding information about the Chrome OS versions
+ChromeOS builds:
+  * Functions for finding information about the ChromeOS versions
     currently being served by Omaha for various boards/hardware models.
   * Functions for finding information about the firmware delivered by
-    any given build of Chrome OS.
+    any given build of ChromeOS.
 
 The necessary data is stored in JSON files in well-known locations in
 GoogleStorage.
 """
 
 import json
+import six
 import subprocess
 
 import common
@@ -31,7 +32,7 @@
 # served by Omaha, including the following information:
 #   * The board name of the product, as known to Omaha.
 #   * The channel associated with the image.
-#   * The Chrome and Chrome OS version strings for the image
+#   * The Chrome and ChromeOS version strings for the image
 #     being served.
 #
 _OMAHA_STATUS = 'gs://chromeos-build-release-console/omaha_status.json'
@@ -163,7 +164,7 @@
     model_versions = _get_by_key_path(metadata_json, key_path)
 
     if model_versions is not None:
-        for model, fw_versions in model_versions.iteritems():
+        for model, fw_versions in six.iteritems(model_versions):
             fw_version = (fw_versions.get('main-readwrite-firmware-version') or
                           fw_versions.get('main-readonly-firmware-version'))
             model_firmware_versions[model] = fw_version
@@ -217,7 +218,7 @@
     @param board      Name of the board to look up, as known to the AFE.
     @param version    Minimum version to be accepted.
 
-    @return Returns a Chrome OS version string in standard form
+    @return Returns a ChromeOS version string in standard form
             R##-####.#.#.  Will return `None` if `version` is `None` and
             no Omaha entry is found.
     """
diff --git a/site_utils/stable_images/build_data_unittest.py b/site_utils/stable_images/build_data_unittest.py
index 23e69fb..5de4b5e 100644
--- a/site_utils/stable_images/build_data_unittest.py
+++ b/site_utils/stable_images/build_data_unittest.py
@@ -8,10 +8,10 @@
 
 
 import json
-import mock
 import os
 import sys
 import unittest
+from unittest import mock
 
 import common
 from autotest_lib.site_utils.stable_images import build_data
@@ -49,7 +49,7 @@
     def test_make_omaha_versions(self, mock_read_gs):
         """Test `get_omaha_version_map()` against one simple input.
 
-        This is a trivial sanity test that asserts that a single
+        This is a trivial test that asserts that a single
         hard-coded input returns a correct hard-coded output.
 
         @param mock_read_gs  Mock created for `_read_gs_json_data()`.
diff --git a/site_utils/stable_images/stable_version.py b/site_utils/stable_images/stable_version.py
index eed4eb0..5bb517e 100755
--- a/site_utils/stable_images/stable_version.py
+++ b/site_utils/stable_images/stable_version.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2018 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -54,6 +54,8 @@
 this case.
 """
 
+from __future__ import print_function
+
 import argparse
 import os
 import sys
@@ -110,7 +112,7 @@
 
     def print_all_mappings(self):
         """Print all mappings in `self._version_map`"""
-        print '%s version mappings:' % self._description
+        print('%s version mappings:' % self._description)
         mappings = self._version_map.get_all_versions()
         if not mappings:
             return
@@ -118,7 +120,7 @@
         key_width = max(12, len(max(key_list, key=len)))
         format = '%%-%ds  %%s' % key_width
         for k in sorted(key_list):
-            print format % (k, mappings[k])
+            print(format % (k, mappings[k]))
 
     def print_mapping(self, key):
         """Print the mapping for `key`.
@@ -130,7 +132,7 @@
         """
         version = self.get_mapping(key)
         if version is not None:
-            print '%s  %s' % (self._format_key_data(key), version)
+            print('%s  %s' % (self._format_key_data(key), version))
 
     def set_mapping(self, key, new_version):
         """Change the mapping for `key`, and report the action.
@@ -147,15 +149,15 @@
         """
         old_version = self.get_mapping(key)
         if old_version is None:
-            print '%s -> %s' % (
-                self._format_operation('Adding', key), new_version)
+            print('%s -> %s' % (
+                self._format_operation('Adding', key), new_version))
         elif old_version != new_version:
-            print '%s -> %s to %s' % (
+            print('%s -> %s to %s' % (
                 self._format_operation('Updating', key),
-                old_version, new_version)
+                old_version, new_version))
         else:
-            print '%s -> %s' % (
-                self._format_operation('Unchanged', key), old_version)
+            print('%s -> %s' % (
+                self._format_operation('Unchanged', key), old_version))
         if not self._dry_run and old_version != new_version:
             self._version_map.set_version(key, new_version)
 
@@ -173,12 +175,12 @@
         """
         version = self.get_mapping(key)
         if version is not None:
-            print '%s -> %s' % (
-                self._format_operation('Delete', key), version)
+            print('%s -> %s' % (
+                self._format_operation('Delete', key), version))
             if not self._dry_run:
                 self._version_map.delete_version(key)
         else:
-            print self._format_operation('Unmapped', key)
+            print(self._format_operation('Unmapped', key))
 
 
 class _FirmwareVersionMapHandler(_VersionMapHandler):
@@ -188,25 +190,25 @@
 
 class _CrOSVersionMapHandler(_VersionMapHandler):
     TYPE = frontend.AFE.CROS_IMAGE_TYPE
-    DESCRIPTION = 'Chrome OS'
+    DESCRIPTION = 'ChromeOS'
 
     def set_mapping(self, board, version):
-        """Assign the Chrome OS mapping for the given board.
+        """Assign the ChromeOS mapping for the given board.
 
-        This function assigns the given Chrome OS version to the given
+        This function assigns the given ChromeOS version to the given
         board.  Additionally, for any model with firmware bundled in the
         assigned build, that model will be assigned the firmware version
         found for it in the build.
 
-        @param board    Chrome OS board to be assigned a new version.
-        @param version  New Chrome OS version to be assigned to the
+        @param board    ChromeOS board to be assigned a new version.
+        @param version  New ChromeOS version to be assigned to the
                         board.
         """
         new_version = build_data.get_omaha_upgrade(
             build_data.get_omaha_version_map(), board, version)
         if new_version != version:
-            print 'Force %s version from Omaha:  %-12s -> %s' % (
-                self._description, board, new_version)
+            print('Force %s version from Omaha:  %-12s -> %s' % (
+                self._description, board, new_version))
         super(_CrOSVersionMapHandler, self).set_mapping(board, new_version)
         fw_versions = build_data.get_firmware_versions(board, new_version)
         fw_handler = _FirmwareVersionMapHandler(self._afe, self._dry_run)
@@ -215,13 +217,13 @@
                 fw_handler.set_mapping(model, fw_version)
 
     def delete_mapping(self, board):
-        """Delete the Chrome OS mapping for the given board.
+        """Delete the ChromeOS mapping for the given board.
 
-        This function handles deletes the Chrome OS version mapping for the
+        This function handles deletes the ChromeOS version mapping for the
         given board.  Additionally, any R/W firmware mapping that existed
         because of the OS mapping will be deleted as well.
 
-        @param board    Chrome OS board to be deleted from the mapping.
+        @param board    ChromeOS board to be deleted from the mapping.
         """
         version = self.get_mapping(board)
         super(_CrOSVersionMapHandler, self).delete_mapping(board)
@@ -285,7 +287,7 @@
     need_newline = False
     for handler in _requested_mapping_handlers(afe, image_type):
         if need_newline:
-            print
+            print()
         handler.print_all_mappings()
         need_newline = True
 
@@ -340,7 +342,7 @@
     @param dry_run      Whether the `-n` option was supplied.
     """
     if dry_run:
-        print 'Dry run; no mappings will be changed.'
+        print('Dry run; no mappings will be changed.')
     handler = _create_version_map_handler(image_type, afe, dry_run)
     handler.set_mapping(key, version)
 
@@ -377,7 +379,7 @@
     @param dry_run      Whether the `-n` option was supplied.
     """
     if dry_run:
-        print 'Dry run; no mappings will be deleted.'
+        print('Dry run; no mappings will be deleted.')
     handler = _create_version_map_handler(image_type, afe, dry_run)
     handler.delete_mapping(key)
 
@@ -436,7 +438,7 @@
     try:
         _dispatch_command(afe, arguments)
     except _CommandError as exc:
-        print >>sys.stderr, 'Error: %s' % str(exc)
+        print('Error: %s' % str(exc), file=sys.stderr)
         sys.exit(1)
 
 
diff --git a/site_utils/stable_images/stable_version_unittest.py b/site_utils/stable_images/stable_version_unittest.py
index d5221fe..53c3cf0 100755
--- a/site_utils/stable_images/stable_version_unittest.py
+++ b/site_utils/stable_images/stable_version_unittest.py
@@ -4,8 +4,8 @@
 
 """Unit tests for the `stable_version` module and CLI."""
 
-import mock
 import unittest
+from unittest import mock
 
 import common
 from autotest_lib.server import frontend
diff --git a/site_utils/stats/apache_access_log_metrics.py b/site_utils/stats/apache_access_log_metrics.py
deleted file mode 100755
index fcf9c37..0000000
--- a/site_utils/stats/apache_access_log_metrics.py
+++ /dev/null
@@ -1,202 +0,0 @@
-#!/usr/bin/env python2
-
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Script to upload metrics from apache access logs to Monarch."""
-
-from __future__ import print_function
-
-import argparse
-import re
-import sys
-import urlparse
-
-import common
-
-from chromite.lib import ts_mon_config
-from chromite.lib import metrics
-
-from autotest_lib.site_utils.stats import log_daemon_common
-# Not used, but needed for importing rpc_interface.
-from autotest_lib.frontend import setup_django_environment
-from autotest_lib.frontend.afe import rpc_interface
-from autotest_lib.frontend.afe import moblab_rpc_interface
-
-
-"""
-The log format is set to:
-  %v:%p %h %l %u %t \"%r\" %>s %O \"%{Referer}i\" \"%{User-Agent}i\" %T
-
-These are documented as follows:
-  (from https://httpd.apache.org/docs/current/mod/mod_log_config.html)
-
-%h: Remote host
-%l: Remote logname (from identd, if supplied)
-%O: Bytes sent, including headers. May be zero in rare cases such as when a
-    request is aborted before a response is sent. You need to enable mod_logio
-    to use this.
-%p: The canonical Port of the server serving the request
-%r: First line of request
-%s: Status.  For requests that got internally redirected, this is
-    the status of the *original* request --- %...>s for the last.
-%t: Time, in common log format time format (standard english format)
-%T: The time taken to serve the request, in seconds.
-%u: Remote user (from auth; may be bogus if return status (%s) is 401)
-%v: The canonical ServerName of the server serving the request.
-"""
-
-# Lemma: a regex to match sections delimited be double-quotes ("), which
-# possible contained escaped quotes (\").
-# This works by matching non-quotes or the string r'\"' repeatedly; then it ends
-# when finding a quote (") preceeded by a character which is not a backslash.
-MATCH_UNTIL_QUOTE = r'([^"]|\\")*[^\\]'
-
-ACCESS_MATCHER = re.compile(
-    r'^'
-    r'\S+ \S+ \S+ \S+ '               # Ignore %v:%p %h %l %u
-    r'\[[^]]+\] '                     # Ignore %t
-    r'"('                             # Begin %r
-    r'(?P<request_method>\S+) '       # e.g. POST
-    r'(?P<endpoint>\S+)'              # e.g. /afe/server/noauth/rpc/
-    + MATCH_UNTIL_QUOTE +             # Ignore protocol (e.g. HTTP/1.1)
-    r'|-'                             # The request data might just be "-"
-    r')" '                            # End %r
-    r'(?P<response_code>\d+) '        # %>s (e.g. 200)
-    r'(?P<bytes_sent>\d+)'            # %O
-    r' "' + MATCH_UNTIL_QUOTE + '"'   # Ignore Referer
-    r' "' + MATCH_UNTIL_QUOTE + '"'   # Ignore User-Agent
-    r' ?(?P<response_seconds>\d+?)'   # The server time in seconds
-    r'.*'                             # Allow adding extra stuff afterward.
-)
-
-ACCESS_TIME_METRIC = '/chromeos/autotest/http/server/response_seconds'
-ACCESS_BYTES_METRIC = '/chromeos/autotest/http/server/response_bytes'
-
-RPC_ACCESS_TIME_METRIC = '/chromeos/autotest/http/server/rpc/response_seconds'
-RPC_ACCESS_BYTES_METRIC = '/chromeos/autotest/http/server/rpc/response_bytes'
-RPC_METHOD_ARGUMENT = 'method'
-
-
-# TODO(phobbs) use a memory-efficient structure to detect non-unique paths.
-# We can't just include the endpoint because it will cause a cardinality
-# explosion.
-ALLOWLISTED_ENDPOINTS = frozenset((
-        '/',
-        '/afe/',
-        '/new_tko/server/rpc/',
-        '/afe/server/rpc/',
-        '/___rPc_sWiTcH___',
-        '*',
-        '/afe/server/noauth/rpc/',
-))
-
-
-# A bad actor could DOS Monarch by requesting millions of different RPC methods,
-# each of which would create a different stream. Only allow an allowlist of
-# methods to be recorded in Monarch.
-ALLOWLISTED_METHODS = (frozenset(dir(rpc_interface))
-                       | frozenset(dir(moblab_rpc_interface)))
-
-
-def EmitRequestMetrics(m):
-    """Emits metrics for each line in the access log.
-
-    @param m: A regex match object
-    """
-    fields = {
-        'request_method': m.groupdict().get('request_method', ''),
-        'endpoint': SanitizeEndpoint(m.group('endpoint')),
-        'response_code': int(m.group('response_code')),
-    }
-
-    send_rpc_metrics = (
-        '?' in m.group('endpoint') and '/rpc' in m.group('endpoint'))
-    if send_rpc_metrics:
-        EmitRPCMetrics(m)
-
-    # Request seconds and bytes sent are both extremely high cardinality, so
-    # they must be the VAL of a metric, not a metric field.
-    if m.group('response_seconds'):
-        response_seconds = int(m.group('response_seconds'))
-        metrics.SecondsDistribution(ACCESS_TIME_METRIC).add(response_seconds,
-                                                            fields=fields)
-
-    bytes_sent = int(m.group('bytes_sent'))
-    metrics.CumulativeDistribution(ACCESS_BYTES_METRIC).add(
-        bytes_sent, fields=fields)
-
-
-def EmitRPCMetrics(m):
-    """Emit a special metric including the method when the request was an RPC."""
-    fields = {
-            'request_method': m.groupdict().get('request_method', ''),
-            'rpc_method': ParseRPCMethod(m.group('endpoint')),
-            'response_code': int(m.group('response_code')),
-    }
-
-    if m.group('response_seconds'):
-        response_seconds = int(m.group('response_seconds'))
-        metrics.SecondsDistribution(RPC_ACCESS_TIME_METRIC).add(
-                response_seconds, fields=fields)
-
-    bytes_sent = int(m.group('bytes_sent'))
-    metrics.CumulativeDistribution(RPC_ACCESS_BYTES_METRIC).add(bytes_sent,
-                                                                fields=fields)
-
-
-def ParseRPCMethod(url):
-    """Parses the RPC method from an RPC query string.
-
-  Args:
-    url: The URL requested.
-  """
-    query = urlparse.urlparse(url).query
-    return urlparse.parse_qs(query)[RPC_METHOD_ARGUMENT][-1]
-
-
-def SanitizeEndpoint(endpoint):
-    """Returns empty string if endpoint is not allowlisted.
-
-    @param endpoint: The endpoint to sanitize.
-    """
-    if endpoint in ALLOWLISTED_ENDPOINTS:
-        return endpoint
-    else:
-        return ''
-
-
-MATCHERS = [
-    (ACCESS_MATCHER, EmitRequestMetrics),
-]
-
-
-def ParseArgs():
-    """Parses the command line arguments."""
-    p = argparse.ArgumentParser(
-        description='Parses apache logs and emits metrics to Monarch')
-    p.add_argument('--output-logfile')
-    p.add_argument('--debug-metrics-file',
-                   help='Output metrics to the given file instead of sending '
-                   'them to production.')
-    return p.parse_args()
-
-
-def Main():
-    """Sets up logging and runs matchers against stdin."""
-    args = ParseArgs()
-    log_daemon_common.SetupLogging(args)
-
-    # Set up metrics sending and go.
-    ts_mon_args = {}
-    if args.debug_metrics_file:
-        ts_mon_args['debug_file'] = args.debug_metrics_file
-
-    with ts_mon_config.SetupTsMonGlobalState('apache_access_log_metrics',
-                                             **ts_mon_args):
-        log_daemon_common.RunMatchers(sys.stdin, MATCHERS)
-
-
-if __name__ == '__main__':
-    Main()
diff --git a/site_utils/stats/apache_access_log_metrics_unittest.py b/site_utils/stats/apache_access_log_metrics_unittest.py
deleted file mode 100755
index 61d8061..0000000
--- a/site_utils/stats/apache_access_log_metrics_unittest.py
+++ /dev/null
@@ -1,88 +0,0 @@
-#!/usr/bin/python2
-
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Unit tests for apache_access_log_metrics.py"""
-
-from __future__ import print_function
-
-import os
-import mock
-import re
-import subprocess
-import tempfile
-import unittest
-
-import apache_access_log_metrics
-
-
-SCRIPT_PATH = os.path.abspath(
-    os.path.join(os.path.dirname(__file__),
-                 'apache_access_log_metrics.py'))
-
-
-EXAMPLE_REQUEST_LINE = (
-    r'chromeos-server2.mtv.corp.google.com:80 100.108.96.5 - - '
-    r'[19/May/2017:11:47:03 -0700] '
-    r'"POST /afe/server/noauth/rpc/?method=foo HTTP/1.1\"" '
-    r'200 354 "-" "Python-urllib/2.7" 5'
-)
-
-
-class TestParsers(unittest.TestCase):
-    """Tests the parsing functions in apache_access_log_metrics."""
-
-    def testParseResponse(self):
-        """Tests that the regex matches the example log line."""
-        match = apache_access_log_metrics.ACCESS_MATCHER.match(
-            EXAMPLE_REQUEST_LINE)
-        self.assertTrue(match)
-
-        self.assertEqual(match.group('bytes_sent'), '354')
-        self.assertEqual(match.group('response_seconds'), '5')
-
-
-class TestEmitters(unittest.TestCase):
-    """Tests the emitter functions in apache_access_log_metrics."""
-
-    def testEmitResponse(self):
-        """Tests that the matcher function doesn't throw an Exception."""
-        match = apache_access_log_metrics.ACCESS_MATCHER.match(
-            EXAMPLE_REQUEST_LINE)
-        # Calling the emitter should not raise any exceptions (for example, by
-        # referencing regex match groups that don't exist.
-        with mock.patch.object(apache_access_log_metrics, 'metrics'):
-            apache_access_log_metrics.EmitRequestMetrics(match)
-
-
-class TestScript(unittest.TestCase):
-    """Tests the script end-to-end."""
-    def testApachAccessLogScriptWithMatchingLine(self):
-        """Try shelling out the the script with --debug-file.
-
-        Sending it a line which matches the first-line regex should result in
-        output from ACCESS_TIME_METRIC.
-        """
-        with tempfile.NamedTemporaryFile() as temp_file:
-            p = subprocess.Popen([SCRIPT_PATH,
-                                  '--debug-metrics-file', temp_file.name],
-                                 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
-            p.communicate(EXAMPLE_REQUEST_LINE)
-
-            with open(temp_file.name) as fh:
-                contents = fh.read()
-
-            self.assertTrue(re.search(
-                apache_access_log_metrics.ACCESS_TIME_METRIC[1:] + r'\b',
-                contents))
-            self.assertTrue(re.search(
-                apache_access_log_metrics.ACCESS_BYTES_METRIC[1:] + r'\b',
-
-                contents))
-
-
-
-if __name__ == '__main__':
-  unittest.main()
diff --git a/site_utils/stats/apache_error_log_example.txt b/site_utils/stats/apache_error_log_example.txt
deleted file mode 100644
index ff723c0..0000000
--- a/site_utils/stats/apache_error_log_example.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-[Mon Apr 17 16:20:27.694190 2017] [:error] [pid 21539] WARNING:root:/usr/local/autotest/site-packages/django/conf/urls/defaults.py:3: DeprecationWarning: django.conf.urls.defaults   is deprecated; use django.conf.urls instead
-[Mon Apr 17 16:20:27.694306 2017] [:error] [pid 21539]   DeprecationWarning)
-[Mon Apr 17 16:20:27.694320 2017] [:error] [pid 21539]
-[Mon Apr 17 16:30:26.141731 2017] [:warn] [pid 4177] mod_wsgi (pid=4177): Callback registration for signal 14 ignored.
-[Mon Apr 17 16:30:26.142223 2017] [:warn] [pid 4177]   File "/usr/local/autotest/frontend/frontend.wsgi", line 23, in application
-[Mon Apr 17 16:30:26.142259 2017] [:warn] [pid 4177]     return _application(environ, start_response)
-[Mon Apr 17 16:30:26.142285 2017] [:warn] [pid 4177]   File "/usr/local/autotest/site-packages/django/core/handlers/wsgi.py", line 255, in __call__
-[Mon Apr 17 16:30:26.142323 2017] [:warn] [pid 4177]     response = self.get_response(request)
-[Tue May 09 16:14:34.418394 2017] [:error] [pid 21225:tid 140225955858176] WARNING:root:/usr/local/autotest/client/common_lib/site_utils.py:18: UserWarning: autotest_lib.client.common_lib.site_utils module is deprecated; use the equivalent utils module instead
-[Tue May 09 16:11:17.009727 2017] [:error] [pid 10631:tid 140225955858176] RuntimeError: module compiled against API version 9 but this version of numpy is 7
-[Tue May 09 16:10:58.871121 2017] [:error] [pid 21164:tid 140225955858176] [remote 127.0.0.1:37613] Traceback (most recent call last):
-
-
diff --git a/site_utils/stats/apache_error_log_metrics.py b/site_utils/stats/apache_error_log_metrics.py
deleted file mode 100755
index 3617ad9..0000000
--- a/site_utils/stats/apache_error_log_metrics.py
+++ /dev/null
@@ -1,161 +0,0 @@
-#!/usr/bin/env python2
-
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""A script to parse apache error logs
-
-The script gets the contents of the log file through stdin, and emits a counter
-metric for the beginning of each error message it recognizes.
-"""
-from __future__ import print_function
-
-import argparse
-import re
-import sys
-
-import common
-
-from chromite.lib import metrics
-from chromite.lib import ts_mon_config
-# infra_libs comes from chromite's third_party modules.
-from infra_libs import ts_mon
-
-from autotest_lib.site_utils.stats import log_daemon_common
-
-
-LOOP_INTERVAL = 60
-ERROR_LOG_METRIC = '/chromeos/autotest/apache/error_log'
-ERROR_LOG_LINE_METRIC = '/chromeos/autotest/apache/error_log_line'
-SEGFAULT_METRIC = '/chromeos/autotest/apache/segfault_count'
-START_METRIC = '/chromeos/autotest/apache/start_count'
-STOP_METRIC = '/chromeos/autotest/apache/stop_count'
-
-ERROR_LOG_MATCHER = re.compile(
-    r'^\[[^]]+\] ' # The timestamp. We don't need this.
-    r'\[(mpm_event|core)?:(?P<log_level>\S+)\] '
-    r'\[pid \d+[^]]+\] ' # The PID, possibly followed by a task id.
-    # There may be other sections, such as [remote <ip>]
-    r'(?P<sections>\[[^]]+\] )*'
-    r'\S' # first character after pid must be non-space; otherwise it is
-          # indented, meaning it is just a continuation of a previous message.
-    r'(?P<mod_wsgi>od_wsgi)?' # Note: the 'm' of mod_wsgi was already matched.
-    r'(?P<rest>.*)'
-)
-
-def EmitSegfault(_m):
-    """Emits a Counter metric for segfaults.
-
-    @param _m: A regex match object
-    """
-    metrics.Counter(
-            SEGFAULT_METRIC,
-            description='A metric counting segfaults in apache',
-            field_spec=None,
-    ).increment()
-
-
-def EmitStart(_m):
-    """Emits a Counter metric for apache service starts.
-
-    @param _m: A regex match object
-    """
-
-    metrics.Counter(
-            START_METRIC,
-            description="A metric counting Apache service starts.",
-            field_spec=None,
-    ).increment()
-
-
-def EmitStop(_m, graceful):
-    """Emits a Counter metric for apache service stops
-
-    @param _m: A regex match object
-    @param graceful: Whether apache was stopped gracefully.
-    """
-    metrics.Counter(
-            STOP_METRIC,
-            description="A metric counting Apache service stops.",
-            field_spec=[ts_mon.BooleanField('graceful')]
-    ).increment(fields={
-        'graceful': graceful
-    })
-
-
-MESSAGE_PATTERNS = {
-        r'Segmentation fault': EmitSegfault,
-        r'configured -- resuming normal operations': EmitStart,
-        r'caught SIGTERM, shutting down': lambda m: EmitStop(m, graceful=True),
-        # TODO(phobbs) add log message for when Apache dies ungracefully
-}
-
-
-def EmitErrorLog(m):
-    """Emits a Counter metric for error log messages.
-
-    @param m: A regex match object
-    """
-    log_level = m.group('log_level') or ''
-    # It might be interesting to see whether the error/warning was emitted
-    # from python at the mod_wsgi process or not.
-    mod_wsgi_present = bool(m.group('mod_wsgi'))
-
-    metrics.Counter(ERROR_LOG_METRIC).increment(fields={
-        'log_level': log_level,
-        'mod_wsgi': mod_wsgi_present})
-
-    rest = m.group('rest')
-    for pattern, handler in MESSAGE_PATTERNS.iteritems():
-        if pattern in rest:
-            handler(m)
-
-
-def EmitErrorLogLine(_m):
-    """Emits a Counter metric for each error log line.
-
-    @param _m: A regex match object.
-    """
-    metrics.Counter(
-            ERROR_LOG_LINE_METRIC,
-            description="A count of lines emitted to the apache error log.",
-            field_spec=None,
-    ).increment()
-
-
-MATCHERS = [
-    (ERROR_LOG_MATCHER, EmitErrorLog),
-    (re.compile(r'.*'), EmitErrorLogLine),
-]
-
-
-def ParseArgs():
-    """Parses the command line arguments."""
-    p = argparse.ArgumentParser(
-        description='Parses apache logs and emits metrics to Monarch')
-    p.add_argument('--output-logfile')
-    p.add_argument('--debug-metrics-file',
-                   help='Output metrics to the given file instead of sending '
-                   'them to production.')
-    return p.parse_args()
-
-
-def Main():
-    """Sets up logging and runs matchers against stdin"""
-    args = ParseArgs()
-    log_daemon_common.SetupLogging(args)
-
-    # Set up metrics sending and go.
-    ts_mon_args = {}
-    if args.debug_metrics_file:
-        ts_mon_args['debug_file'] = args.debug_metrics_file
-
-    with ts_mon_config.SetupTsMonGlobalState('apache_error_log_metrics',
-                                             **ts_mon_args):
-      log_daemon_common.RunMatchers(sys.stdin, MATCHERS)
-      metrics.Flush()
-
-
-if __name__ == '__main__':
-    Main()
diff --git a/site_utils/stats/apache_error_log_metrics_unittest.py b/site_utils/stats/apache_error_log_metrics_unittest.py
deleted file mode 100644
index 2ef3b01..0000000
--- a/site_utils/stats/apache_error_log_metrics_unittest.py
+++ /dev/null
@@ -1,135 +0,0 @@
-"""Tests for apache_error_log_metrics."""
-
-import os
-import re
-import subprocess
-import tempfile
-import unittest
-
-import common
-
-import apache_error_log_metrics
-
-
-SCRIPT_PATH = os.path.abspath(
-    os.path.join(os.path.dirname(__file__),
-                 'apache_error_log_metrics.py'))
-
-
-class ApacheErrorTest(unittest.TestCase):
-    """Unittest for the apache error log regexp."""
-
-    def testNonMatchingLine(self):
-        """Test for log lines which don't match the expected format.."""
-        lines = [
-          '[] [] [] blank components',
-          '[] [:error] [] no "pid" section',
-          '[] [:error] [pid 1234] no timestamp',
-          '[hello world] [:] [pid 1234] no log level',
-          '[hello] [:error] [pid 42]     too far indented.'
-        ]
-        for line in lines:
-          self.assertEqual(
-              None, apache_error_log_metrics.ERROR_LOG_MATCHER.match(line))
-
-    def testMatchingLines(self):
-        """Test for lines that are expected to match the format."""
-        match = apache_error_log_metrics.ERROR_LOG_MATCHER.match(
-            "[foo] [:bar] [pid 123] WARNING")
-        self.assertEqual('bar', match.group('log_level'))
-        self.assertEqual(None, match.group('mod_wsgi'))
-
-        match = apache_error_log_metrics.ERROR_LOG_MATCHER.match(
-            "[foo] [mpm_event:bar] [pid 123] WARNING")
-        self.assertEqual('bar', match.group('log_level'))
-        self.assertEqual(None, match.group('mod_wsgi'))
-
-        match = apache_error_log_metrics.ERROR_LOG_MATCHER.match(
-            "[foo] [:bar] [pid 123] mod_wsgi (pid=123)")
-        self.assertEqual('bar', match.group('log_level'))
-        self.assertEqual('od_wsgi', match.group('mod_wsgi'))
-
-    def testExampleLog(self):
-        """Try on some example lines from a real apache error log."""
-        with open(os.path.join(os.path.dirname(__file__),
-                               'apache_error_log_example.txt')) as fh:
-          example_log = fh.readlines()
-        matcher_output = [apache_error_log_metrics.ERROR_LOG_MATCHER.match(line)
-                          for line in example_log]
-        matched = filter(bool, matcher_output)
-        self.assertEqual(5, len(matched))
-
-        self.assertEqual('error', matched[0].group('log_level'))
-        self.assertEqual(None, matched[0].group('mod_wsgi'))
-
-        self.assertEqual('warn', matched[1].group('log_level'))
-        self.assertEqual('od_wsgi', matched[1].group('mod_wsgi'))
-
-        self.assertEqual('error', matched[2].group('log_level'))
-        self.assertEqual(None, matched[2].group('mod_wsgi'))
-
-        self.assertEqual('error', matched[3].group('log_level'))
-        self.assertEqual(None, matched[3].group('mod_wsgi'))
-
-        self.assertEqual('error', matched[4].group('log_level'))
-        self.assertEqual(None, matched[4].group('mod_wsgi'))
-
-
-    def _ShellOutToScript(self, lines):
-        """Shells out to the script.
-
-        @param lines: Lines to feed to stdin."""
-        with tempfile.NamedTemporaryFile() as temp_file:
-            p = subprocess.Popen([SCRIPT_PATH,
-                                  '--debug-metrics-file', temp_file.name],
-                                 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
-            p.communicate('\n'.join(lines))
-
-            with open(temp_file.name) as fh:
-                return fh.read()
-
-
-    def testApacheErrorLogScriptWithNonMatchingLine(self):
-        """Try shelling out the the script with --debug-file.
-
-        Sending it a non-matching line should result in no output from
-        ERROR_LOG_METRIC.
-        """
-        contents = self._ShellOutToScript(['an example log line'])
-
-        # We have to use re.search here with a word border character ('\b')
-        # because the ERROR_LOG_LINE_METRIC contains ERROR_LOG_METRIC as a
-        # substring.
-        self.assertTrue(re.search(
-            apache_error_log_metrics.ERROR_LOG_LINE_METRIC[1:] + r'\b',
-            contents))
-        self.assertFalse(re.search(
-            apache_error_log_metrics.ERROR_LOG_METRIC[1:] + r'\b',
-            contents))
-
-    def testApachErrorLogScriptWithMatchingLine(self):
-        """Try shelling out the the script with a matching line.
-
-        Sending it a line which matches the first-line regex should result in
-        output from ERROR_LOG_METRIC.
-        """
-        contents = self._ShellOutToScript(['[foo] [:bar] [pid 123] WARNING'])
-
-        self.assertTrue(re.search(
-            apache_error_log_metrics.ERROR_LOG_LINE_METRIC[1:] + r'\b',
-            contents))
-        self.assertTrue(re.search(
-            apache_error_log_metrics.ERROR_LOG_METRIC[1:] + r'\b',
-            contents))
-
-    def testApachErrorLogScriptWithSpecialLines(self):
-        """Sending lines with specific messages."""
-        contents = self._ShellOutToScript(['[foo] [:bar] [pid 123] WARNING Segmentation fault'])
-
-        self.assertTrue(re.search(
-            apache_error_log_metrics.SEGFAULT_METRIC[1:] + r'\b',
-            contents))
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/site_utils/stats/mysql_stats.py b/site_utils/stats/mysql_stats.py
deleted file mode 100755
index 8c9e766..0000000
--- a/site_utils/stats/mysql_stats.py
+++ /dev/null
@@ -1,174 +0,0 @@
-#!/usr/bin/python2
-
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Queries a MySQL database and emits status metrics to Monarch.
-
-Note: confusingly, 'Innodb_buffer_pool_reads' is actually the cache-misses, not
-the number of reads to the buffer pool.  'Innodb_buffer_pool_read_requests'
-corresponds to the number of reads the the buffer pool.
-"""
-import logging
-import sys
-
-import MySQLdb
-import time
-
-import common
-
-from autotest_lib.client.common_lib import global_config
-from autotest_lib.client.common_lib.cros import retry
-
-from chromite.lib import metrics
-from chromite.lib import ts_mon_config
-
-AT_DIR='/usr/local/autotest'
-DEFAULT_USER = global_config.global_config.get_config_value(
-        'CROS', 'db_backup_user', type=str, default='')
-DEFAULT_PASSWD = global_config.global_config.get_config_value(
-        'CROS', 'db_backup_password', type=str, default='')
-
-LOOP_INTERVAL = 60
-
-EMITTED_STATUSES_COUNTERS = [
-    'bytes_received',
-    'bytes_sent',
-    'connections',
-    'Innodb_buffer_pool_read_requests',
-    'Innodb_buffer_pool_reads',
-    'Innodb_row_lock_waits',
-    'questions',
-    'slow_queries',
-    'threads_created',
-]
-
-EMITTED_STATUS_GAUGES = [
-    'Innodb_row_lock_time_avg',
-    'Innodb_row_lock_current_waits',
-    'threads_running',
-    'threads_connected',
-]
-
-
-class RetryingConnection(object):
-    """Maintains a db connection and a cursor."""
-    INITIAL_SLEEP_SECONDS = 20
-    MAX_TIMEOUT_SECONDS = 60 * 60
-
-    def __init__(self, *args, **kwargs):
-        self.args = args
-        self.kwargs = kwargs
-        self.db = None
-        self.cursor = None
-
-    def Connect(self):
-        """Establishes a MySQL connection and creates a cursor."""
-        self.db = MySQLdb.connect(*self.args, **self.kwargs)
-        self.cursor = self.db.cursor()
-
-    def Reconnect(self):
-        """Attempts to close the connection, then reconnects."""
-        try:
-            self.cursor.close()
-            self.db.close()
-        except MySQLdb.Error:
-            pass
-        self.Connect()
-
-    def RetryWith(self, func):
-        """Run a function, retrying on OperationalError."""
-        return retry.retry(
-            MySQLdb.OperationalError,
-            delay_sec=self.INITIAL_SLEEP_SECONDS,
-            timeout_min=self.MAX_TIMEOUT_SECONDS,
-            callback=self.Reconnect
-        )(func)()
-
-    def Execute(self, *args, **kwargs):
-        """Runs .execute on the cursor, reconnecting on failure."""
-        def _Execute():
-            return self.cursor.execute(*args, **kwargs)
-        return self.RetryWith(_Execute)
-
-    def Fetchall(self):
-        """Runs .fetchall on the cursor."""
-        return self.cursor.fetchall()
-
-
-def GetStatus(connection, status):
-    """Get the status variable from the database, retrying on failure.
-
-    @param connection: MySQLdb cursor to query with.
-    @param status: Name of the status variable.
-    @returns The mysql query result.
-    """
-    connection.Execute('SHOW GLOBAL STATUS LIKE "%s";' % status)
-    output = connection.Fetchall()[0][1]
-
-    if not output:
-        logging.error('Cannot find any global status like %s', status)
-
-    return int(output)
-
-
-def QueryAndEmit(baselines, conn):
-    """Queries MySQL for important stats and emits Monarch metrics
-
-    @param baselines: A dict containing the initial values for the cumulative
-                      metrics.
-    @param conn: The mysql connection object.
-    """
-    for status in EMITTED_STATUSES_COUNTERS:
-        metric_name = 'chromeos/autotest/afe_db/%s' % status.lower()
-        delta = GetStatus(conn, status) - baselines[status]
-        metrics.Counter(metric_name).set(delta)
-
-    for status in EMITTED_STATUS_GAUGES:
-        metric_name = 'chromeos/autotest/afe_db/%s' % status.lower()
-        metrics.Gauge(metric_name).set(GetStatus(conn, status))
-
-    pages_free = GetStatus(conn, 'Innodb_buffer_pool_pages_free')
-    pages_total = GetStatus(conn, 'Innodb_buffer_pool_pages_total')
-
-    metrics.Gauge('chromeos/autotest/afe_db/buffer_pool_pages').set(
-        pages_free, fields={'used': False})
-
-    metrics.Gauge('chromeos/autotest/afe_db/buffer_pool_pages').set(
-        pages_total - pages_free, fields={'used': True})
-
-
-def main():
-    """Sets up ts_mon and repeatedly queries MySQL stats"""
-    logging.basicConfig(stream=sys.stdout, level=logging.INFO)
-    conn = RetryingConnection('localhost', DEFAULT_USER, DEFAULT_PASSWD)
-    conn.Connect()
-
-    # TODO(crbug.com/803566) Use indirect=False to mitigate orphan mysql_stats
-    # processes overwhelming shards.
-    with ts_mon_config.SetupTsMonGlobalState('mysql_stats', indirect=False):
-      QueryLoop(conn)
-
-
-def QueryLoop(conn):
-    """Queries and emits metrics every LOOP_INTERVAL seconds.
-
-    @param conn: The mysql connection object.
-    """
-    # Get the baselines for cumulative metrics. Otherwise the windowed rate at
-    # the very beginning will be extremely high as it shoots up from 0 to its
-    # current value.
-    baselines = dict((s, GetStatus(conn, s))
-                     for s in EMITTED_STATUSES_COUNTERS)
-
-    while True:
-        now = time.time()
-        QueryAndEmit(baselines, conn)
-        time_spent = time.time() - now
-        sleep_duration = LOOP_INTERVAL - time_spent
-        time.sleep(max(0, sleep_duration))
-
-
-if __name__ == '__main__':
-  main()
diff --git a/site_utils/stats/mysql_stats_unittest.py b/site_utils/stats/mysql_stats_unittest.py
deleted file mode 100644
index 970226c..0000000
--- a/site_utils/stats/mysql_stats_unittest.py
+++ /dev/null
@@ -1,26 +0,0 @@
-"""Tests for mysql_stats."""
-
-import common
-
-import collections
-import mock
-import unittest
-
-import mysql_stats
-
-
-class MysqlStatsTest(unittest.TestCase):
-    """Unittest for mysql_stats."""
-
-    def testQueryAndEmit(self):
-        """Test for QueryAndEmit."""
-        connection = mock.Mock()
-        connection.Fetchall.return_value = [(
-            'Column-name', 0)]
-
-        # This shouldn't raise an exception.
-        mysql_stats.QueryAndEmit(collections.defaultdict(lambda: 0), connection)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/site_utils/suite_enumerator.py b/site_utils/suite_enumerator.py
index 37e949f..5b65ac4 100755
--- a/site_utils/suite_enumerator.py
+++ b/site_utils/suite_enumerator.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -12,10 +12,9 @@
 Example:
 $ ./site_utils/suite_enumerator.py -a /usr/local/autotest bvt 2>/dev/null
 login_LoginSuccess
-logging_CrashSender
 login_BadAuthentication
 
-This is intended for use only with Chrome OS test suits that leverage the
+This is intended for use only with ChromeOS test suits that leverage the
 dynamic suite infrastructure in server/cros/dynamic_suite.py.
 """
 
diff --git a/site_utils/suite_preprocessor.py b/site_utils/suite_preprocessor.py
index 3f0452d..d263323 100755
--- a/site_utils/suite_preprocessor.py
+++ b/site_utils/suite_preprocessor.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 #
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/site_utils/sync_cloudsql_access.py b/site_utils/sync_cloudsql_access.py
index 3abb94b..e3ca786 100755
--- a/site_utils/sync_cloudsql_access.py
+++ b/site_utils/sync_cloudsql_access.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/site_utils/test_push.py b/site_utils/test_push.py
deleted file mode 100755
index e19f739..0000000
--- a/site_utils/test_push.py
+++ /dev/null
@@ -1,528 +0,0 @@
-#!/usr/bin/python2
-#
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Tool to validate code in prod branch before pushing to lab.
-
-The script runs push_to_prod suite to verify code in prod branch is ready to be
-pushed. Link to design document:
-https://docs.google.com/a/google.com/document/d/1JMz0xS3fZRSHMpFkkKAL_rxsdbNZomhHbC3B8L71uuI/edit
-
-To verify if prod branch can be pushed to lab, run following command in
-chromeos-staging-master2.hot server:
-/usr/local/autotest/site_utils/test_push.py -e someone@company.com
-
-The script uses latest gandof stable build as test build by default.
-
-"""
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-import argparse
-import ast
-import datetime
-import getpass
-import multiprocessing
-import os
-import re
-import subprocess
-import sys
-import time
-import traceback
-from six.moves import urllib
-
-import common
-try:
-    from autotest_lib.frontend import setup_django_environment
-    from autotest_lib.frontend.afe import models
-    from autotest_lib.frontend.afe import rpc_utils
-except ImportError:
-    # Unittest may not have Django database configured and will fail to import.
-    pass
-from autotest_lib.client.common_lib import global_config
-from autotest_lib.client.common_lib import priorities
-from autotest_lib.client.common_lib.cros import retry
-from autotest_lib.frontend.afe import rpc_client_lib
-from autotest_lib.server import constants
-from autotest_lib.server import site_utils
-from autotest_lib.server import utils
-from autotest_lib.server.cros import provision
-from autotest_lib.server.cros.dynamic_suite import frontend_wrappers
-from autotest_lib.site_utils import test_push_common
-
-AUTOTEST_DIR=common.autotest_dir
-CONFIG = global_config.global_config
-
-AFE = frontend_wrappers.RetryingAFE(timeout_min=0.5, delay_sec=2)
-TKO = frontend_wrappers.RetryingTKO(timeout_min=0.1, delay_sec=10)
-
-MAIL_FROM = 'chromeos-test@google.com'
-BUILD_REGEX = 'R[\d]+-[\d]+\.[\d]+\.[\d]+'
-RUN_SUITE_COMMAND = 'run_suite.py'
-PUSH_TO_PROD_SUITE = 'push_to_prod'
-DUMMY_SUITE = 'dummy'
-DEFAULT_TIMEOUT_MIN_FOR_SUITE_JOB = 30
-IMAGE_BUCKET = CONFIG.get_config_value('CROS', 'image_storage_server')
-DEFAULT_NUM_DUTS = (
-        ('gandof', 4),
-        ('quawks', 2),
-)
-
-SUITE_JOB_START_INFO_REGEX = ('^.*Created suite job:.*'
-                              'tab_id=view_job&object_id=(\d+)$')
-
-URL_HOST = CONFIG.get_config_value('SERVER', 'hostname', type=str)
-URL_PATTERN = CONFIG.get_config_value('CROS', 'log_url_pattern', type=str)
-
-# Some test could be extra / missing or have mismatched results for various
-# reasons. Add such test in this list and explain the reason.
-_IGNORED_TESTS = [
-    # test_push uses a stable image build to test, which is quite behind ToT.
-    # The following expectations are correct at ToT, but need to be ignored
-    # until stable image is recent enough.
-
-    # TODO(pprabhu): Remove once R70 is stable.
-    'dummy_Fail.RetrySuccess',
-    'dummy_Fail.RetryFail',
-]
-
-# Multiprocessing proxy objects that are used to share data between background
-# suite-running processes and main process. The multiprocessing-compatible
-# versions are initialized in _main.
-_run_suite_output = []
-_all_suite_ids = []
-
-DEFAULT_SERVICE_RESPAWN_LIMIT = 2
-
-
-class TestPushException(Exception):
-    """Exception to be raised when the test to push to prod failed."""
-    pass
-
-@retry.retry(TestPushException, timeout_min=5, delay_sec=30)
-def check_dut_inventory(required_num_duts, pool):
-    """Check DUT inventory for each board in the pool specified..
-
-    @param required_num_duts: a dict specifying the number of DUT each platform
-                              requires in order to finish push tests.
-    @param pool: the pool used by test_push.
-    @raise TestPushException: if number of DUTs are less than the requirement.
-    """
-    print('Checking DUT inventory...')
-    pool_label = constants.Labels.POOL_PREFIX + pool
-    hosts = AFE.run('get_hosts', status='Ready', locked=False)
-    hosts = [h for h in hosts if pool_label in h.get('labels', [])]
-    platforms = [host['platform'] for host in hosts]
-    current_inventory = {p : platforms.count(p) for p in platforms}
-    error_msg = ''
-    for platform, req_num in required_num_duts.items():
-        curr_num = current_inventory.get(platform, 0)
-        if curr_num < req_num:
-            error_msg += ('\nRequire %d %s DUTs in pool: %s, only %d are Ready'
-                          ' now' % (req_num, platform, pool, curr_num))
-    if error_msg:
-        raise TestPushException('Not enough DUTs to run push tests. %s' %
-                                error_msg)
-
-
-def powerwash_dut_to_test_repair(hostname, timeout):
-    """Powerwash dut to test repair workflow.
-
-    @param hostname: hostname of the dut.
-    @param timeout: seconds of the powerwash test to hit timeout.
-    @raise TestPushException: if DUT fail to run the test.
-    """
-    t = models.Test.objects.get(name='platform_Powerwash')
-    c = utils.read_file(os.path.join(AUTOTEST_DIR, t.path))
-    job_id = rpc_utils.create_job_common(
-             'powerwash', priority=priorities.Priority.SUPER,
-             control_type='Server', control_file=c, hosts=[hostname])
-
-    end = time.time() + timeout
-    while not TKO.get_job_test_statuses_from_db(job_id):
-        if time.time() >= end:
-            AFE.run('abort_host_queue_entries', job=job_id)
-            raise TestPushException(
-                'Powerwash test on %s timeout after %ds, abort it.' %
-                (hostname, timeout))
-        time.sleep(10)
-    verify_test_results(job_id,
-                        test_push_common.EXPECTED_TEST_RESULTS_POWERWASH)
-    # Kick off verify, verify will fail and a repair should be triggered.
-    AFE.reverify_hosts(hostnames=[hostname])
-
-
-def reverify_all_push_duts():
-    """Reverify all the push DUTs."""
-    print('Reverifying all DUTs.')
-    hosts = [h.hostname for h in AFE.get_hosts()]
-    AFE.reverify_hosts(hostnames=hosts)
-
-
-def parse_arguments(argv):
-    """Parse arguments for test_push tool.
-
-    @param argv   Argument vector, as for `sys.argv`, including the
-                  command name in `argv[0]`.
-    @return: Parsed arguments.
-
-    """
-    parser = argparse.ArgumentParser(prog=argv[0])
-    parser.add_argument('-b', '--board', dest='board', default='gandof',
-                        help='Default is gandof.')
-    parser.add_argument('-sb', '--shard_board', dest='shard_board',
-                        default='quawks',
-                        help='Default is quawks.')
-    parser.add_argument('-i', '--build', dest='build', default=None,
-                        help='Default is the latest stale build of given '
-                             'board. Must be a stable build, otherwise AU test '
-                             'will fail. (ex: gandolf-release/R54-8743.25.0)')
-    parser.add_argument('-si', '--shard_build', dest='shard_build', default=None,
-                        help='Default is the latest stable build of given '
-                             'board. Must be a stable build, otherwise AU test '
-                             'will fail.')
-    parser.add_argument('-p', '--pool', dest='pool', default='bvt')
-    parser.add_argument('-t', '--timeout_min', dest='timeout_min', type=int,
-                        default=DEFAULT_TIMEOUT_MIN_FOR_SUITE_JOB,
-                        help='Time in mins to wait before abort the jobs we '
-                             'are waiting on. Only for the asynchronous suites '
-                             'triggered by create_and_return flag.')
-    parser.add_argument('-ud', '--num_duts', dest='num_duts',
-                        default=dict(DEFAULT_NUM_DUTS),
-                        type=ast.literal_eval,
-                        help="Python dict literal that specifies the required"
-                        " number of DUTs for each board. E.g {'gandof':4}")
-    parser.add_argument('-c', '--continue_on_failure', action='store_true',
-                        dest='continue_on_failure',
-                        help='All tests continue to run when there is failure')
-    parser.add_argument('-sl', '--service_respawn_limit', type=int,
-                        default=DEFAULT_SERVICE_RESPAWN_LIMIT,
-                        help='If a service crashes more than this, the test '
-                             'push is considered failed.')
-
-    arguments = parser.parse_args(argv[1:])
-
-    # Get latest stable build as default build.
-    version_map = AFE.get_stable_version_map(AFE.CROS_IMAGE_TYPE)
-    if not arguments.build:
-        arguments.build = version_map.get_image_name(arguments.board)
-    if not arguments.shard_build:
-        arguments.shard_build = version_map.get_image_name(
-            arguments.shard_board)
-    return arguments
-
-
-def do_run_suite(suite_name, arguments, use_shard=False,
-                 create_and_return=False):
-    """Call run_suite to run a suite job, and return the suite job id.
-
-    The script waits the suite job to finish before returning the suite job id.
-    Also it will echo the run_suite output to stdout.
-
-    @param suite_name: Name of a suite, e.g., dummy.
-    @param arguments: Arguments for run_suite command.
-    @param use_shard: If true, suite is scheduled for shard board.
-    @param create_and_return: If True, run_suite just creates the suite, print
-                              the job id, then finish immediately.
-
-    @return: Suite job ID.
-
-    """
-    if use_shard:
-        board = arguments.shard_board
-        build = arguments.shard_build
-    else:
-        board = arguments.board
-        build = arguments.build
-
-    # Remove cros-version label to force provision.
-    hosts = AFE.get_hosts(label=constants.Labels.BOARD_PREFIX+board,
-                          locked=False)
-    for host in hosts:
-        labels_to_remove = [
-                l for l in host.labels
-                if l.startswith(provision.CROS_VERSION_PREFIX)]
-        if labels_to_remove:
-            AFE.run('host_remove_labels', id=host.id, labels=labels_to_remove)
-
-        # Test repair work flow on shards, powerwash test will timeout after 7m.
-        if use_shard and not create_and_return:
-            powerwash_dut_to_test_repair(host.hostname, timeout=420)
-
-    current_dir = os.path.dirname(os.path.realpath(__file__))
-    cmd = [os.path.join(current_dir, RUN_SUITE_COMMAND),
-           '-s', suite_name,
-           '-b', board,
-           '-i', build,
-           '-p', arguments.pool,
-           '--minimum_duts', str(arguments.num_duts[board])]
-    if create_and_return:
-        cmd += ['-c']
-
-    suite_job_id = None
-
-    proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
-                            stderr=subprocess.STDOUT)
-
-    while True:
-        line = proc.stdout.readline()
-
-        # Break when run_suite process completed.
-        if not line and proc.poll() != None:
-            break
-        print(line.rstrip())
-        _run_suite_output.append(line.rstrip())
-
-        if not suite_job_id:
-            m = re.match(SUITE_JOB_START_INFO_REGEX, line)
-            if m and m.group(1):
-                suite_job_id = int(m.group(1))
-                _all_suite_ids.append(suite_job_id)
-
-    if not suite_job_id:
-        raise TestPushException('Failed to retrieve suite job ID.')
-
-    # If create_and_return specified, wait for the suite to finish.
-    if create_and_return:
-        end = time.time() + arguments.timeout_min * 60
-        while not AFE.get_jobs(id=suite_job_id, finished=True):
-            if time.time() < end:
-                time.sleep(10)
-            else:
-                AFE.run('abort_host_queue_entries', job=suite_job_id)
-                raise TestPushException(
-                        'Asynchronous suite triggered by create_and_return '
-                        'flag has timed out after %d mins. Aborting it.' %
-                        arguments.timeout_min)
-
-    print('Suite job %s is completed.' % suite_job_id)
-    return suite_job_id
-
-
-def check_dut_image(build, suite_job_id):
-    """Confirm all DUTs used for the suite are imaged to expected build.
-
-    @param build: Expected build to be imaged.
-    @param suite_job_id: job ID of the suite job.
-    @raise TestPushException: If a DUT does not have expected build imaged.
-    """
-    print('Checking image installed in DUTs...')
-    job_ids = [job.id for job in
-               models.Job.objects.filter(parent_job_id=suite_job_id)]
-    hqes = [models.HostQueueEntry.objects.filter(job_id=job_id)[0]
-            for job_id in job_ids]
-    hostnames = set([hqe.host.hostname for hqe in hqes])
-    for hostname in hostnames:
-        found_build = site_utils.get_build_from_afe(hostname, AFE)
-        if found_build != build:
-            raise TestPushException('DUT is not imaged properly. Host %s has '
-                                    'build %s, while build %s is expected.' %
-                                    (hostname, found_build, build))
-
-
-def test_suite(suite_name, expected_results, arguments, use_shard=False,
-               create_and_return=False):
-    """Call run_suite to start a suite job and verify results.
-
-    @param suite_name: Name of a suite, e.g., dummy
-    @param expected_results: A dictionary of test name to test result.
-    @param arguments: Arguments for run_suite command.
-    @param use_shard: If true, suite is scheduled for shard board.
-    @param create_and_return: If True, run_suite just creates the suite, print
-                              the job id, then finish immediately.
-    """
-    suite_job_id = do_run_suite(suite_name, arguments, use_shard,
-                                create_and_return)
-
-    # Confirm all DUTs used for the suite are imaged to expected build.
-    # hqe.host_id for jobs running in shard is not synced back to master db,
-    # therefore, skip verifying dut build for jobs running in shard.
-    build_expected = arguments.build
-    if not use_shard:
-        check_dut_image(build_expected, suite_job_id)
-
-    # Verify test results are the expected results.
-    verify_test_results(suite_job_id, expected_results)
-
-
-def verify_test_results(job_id, expected_results):
-    """Verify the test results with the expected results.
-
-    @param job_id: id of the running jobs. For suite job, it is suite_job_id.
-    @param expected_results: A dictionary of test name to test result.
-    @raise TestPushException: If verify fails.
-    """
-    print('Comparing test results...')
-    test_views = site_utils.get_test_views_from_tko(job_id, TKO)
-    summary = test_push_common.summarize_push(test_views, expected_results,
-                                              _IGNORED_TESTS)
-
-    # Test link to log can be loaded.
-    job_name = '%s-%s' % (job_id, getpass.getuser())
-    log_link = URL_PATTERN % (rpc_client_lib.add_protocol(URL_HOST), job_name)
-    try:
-        urllib.request.urlopen(log_link).read()
-    except urllib.error.URLError:
-        summary.append('Failed to load page for link to log: %s.' % log_link)
-
-    if summary:
-        raise TestPushException('\n'.join(summary))
-
-def test_suite_wrapper(queue, suite_name, expected_results, arguments,
-                       use_shard=False, create_and_return=False):
-    """Wrapper to call test_suite. Handle exception and pipe it to parent
-    process.
-
-    @param queue: Queue to save exception to be accessed by parent process.
-    @param suite_name: Name of a suite, e.g., dummy
-    @param expected_results: A dictionary of test name to test result.
-    @param arguments: Arguments for run_suite command.
-    @param use_shard: If true, suite is scheduled for shard board.
-    @param create_and_return: If True, run_suite just creates the suite, print
-                              the job id, then finish immediately.
-    """
-    try:
-        test_suite(suite_name, expected_results, arguments, use_shard,
-                   create_and_return)
-    except Exception:
-        # Store the whole exc_info leads to a PicklingError.
-        except_type, except_value, tb = sys.exc_info()
-        queue.put((except_type, except_value, traceback.extract_tb(tb)))
-
-
-def check_queue(queue):
-    """Check the queue for any exception being raised.
-
-    @param queue: Queue used to store exception for parent process to access.
-    @raise: Any exception found in the queue.
-    """
-    if queue.empty():
-        return
-    exc_info = queue.get()
-    # Raise the exception with original backtrace.
-    print('Original stack trace of the exception:\n%s' % exc_info[2])
-    raise exc_info[0](exc_info[1])
-
-
-def _run_test_suites(arguments):
-    """Run the actual tests that comprise the test_push."""
-    # Use daemon flag will kill child processes when parent process fails.
-    use_daemon = not arguments.continue_on_failure
-    queue = multiprocessing.Queue()
-
-    push_to_prod_suite = multiprocessing.Process(
-            target=test_suite_wrapper,
-            args=(queue, PUSH_TO_PROD_SUITE,
-                  test_push_common.EXPECTED_TEST_RESULTS, arguments))
-    push_to_prod_suite.daemon = use_daemon
-    push_to_prod_suite.start()
-
-    # suite test with --create_and_return flag
-    asynchronous_suite = multiprocessing.Process(
-            target=test_suite_wrapper,
-            args=(queue, DUMMY_SUITE,
-                  test_push_common.EXPECTED_TEST_RESULTS_DUMMY,
-                  arguments, True, True))
-    asynchronous_suite.daemon = True
-    asynchronous_suite.start()
-
-    while push_to_prod_suite.is_alive() or asynchronous_suite.is_alive():
-        check_queue(queue)
-        time.sleep(5)
-    check_queue(queue)
-    push_to_prod_suite.join()
-    asynchronous_suite.join()
-
-
-def check_service_crash(respawn_limit, start_time):
-  """Check whether scheduler or host_scheduler crash during testing.
-
-  Since the testing push is kicked off at the beginning of a given hour, the way
-  to check whether a service is crashed is to check whether the times of the
-  service being respawn during testing push is over the respawn_limit.
-
-  @param respawn_limit: The maximum number of times the service is allowed to
-                        be respawn.
-  @param start_time: The time that testing push is kicked off.
-  """
-  def _parse(filename_prefix, filename):
-    """Helper method to parse the time of the log.
-
-    @param filename_prefix: The prefix of the filename.
-    @param filename: The name of the log file.
-    """
-    return datetime.datetime.strptime(filename[len(filename_prefix):],
-                                      "%Y-%m-%d-%H.%M.%S")
-
-  services = ['scheduler', 'host_scheduler']
-  logs = os.listdir('%s/logs/' % AUTOTEST_DIR)
-  curr_time = datetime.datetime.now()
-
-  error_msg = ''
-  for service in services:
-    log_prefix = '%s.log.' % service
-    respawn_count = sum(1 for l in logs if l.startswith(log_prefix)
-                        and start_time <= _parse(log_prefix, l) <= curr_time)
-
-    if respawn_count > respawn_limit:
-      error_msg += ('%s has been respawned %s times during testing push at %s. '
-                    'It is very likely crashed. Please check!\n' %
-                    (service, respawn_count,
-                     start_time.strftime("%Y-%m-%d-%H")))
-  if error_msg:
-    raise TestPushException(error_msg)
-
-
-_SUCCESS_MSG = """
-All staging tests completed successfully.
-
-Instructions for pushing to prod are available at
-https://goto.google.com/autotest-to-prod
-"""
-
-
-def _main(arguments):
-    """Run test and promote repo branches if tests succeed.
-
-    @param arguments: command line arguments.
-    """
-
-    # TODO Use chromite.lib.parallel.Manager instead, to workaround the
-    # too-long-tmp-path problem.
-    mpmanager = multiprocessing.Manager()
-    # These are globals used by other functions in this module to communicate
-    # back from worker processes.
-    global _run_suite_output
-    _run_suite_output = mpmanager.list()
-    global _all_suite_ids
-    _all_suite_ids = mpmanager.list()
-
-    try:
-        start_time = datetime.datetime.now()
-        reverify_all_push_duts()
-        time.sleep(15) # Wait for the verify test to start.
-        check_dut_inventory(arguments.num_duts, arguments.pool)
-        _run_test_suites(arguments)
-        check_service_crash(arguments.service_respawn_limit, start_time)
-        print(_SUCCESS_MSG)
-    except Exception:
-        # Abort running jobs unless flagged to continue when there is a failure.
-        if not arguments.continue_on_failure:
-            for suite_id in _all_suite_ids:
-                if AFE.get_jobs(id=suite_id, finished=False):
-                    AFE.run('abort_host_queue_entries', job=suite_id)
-        raise
-
-
-def main():
-    """Entry point."""
-    arguments = parse_arguments(sys.argv)
-    _main(arguments)
-
-
-if __name__ == '__main__':
-    main()
diff --git a/site_utils/test_push_common.py b/site_utils/test_push_common.py
deleted file mode 100644
index 715aa6b..0000000
--- a/site_utils/test_push_common.py
+++ /dev/null
@@ -1,130 +0,0 @@
-# Lint as: python2, python3
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Common file shared by test_push of autotest and skylab.
-
-autotest: site_utils/test_push.py
-skylab: venv/skylab_staging/test_push.py
-"""
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-import collections
-import re
-import six
-
-# Dictionary of test results keyed by test name regular expression.
-EXPECTED_TEST_RESULTS = {'^SERVER_JOB$':                 ['GOOD'],
-                         # This is related to dummy_Fail/control.dependency.
-                         'dummy_Fail.dependency$':       ['TEST_NA'],
-                         'login_LoginSuccess.*':         ['GOOD'],
-                         'dummy_Pass.*':                 ['GOOD'],
-                         'dummy_Fail.Fail$':             ['FAIL'],
-                         'dummy_Fail.Error$':            ['ERROR'],
-                         'dummy_Fail.Warn$':             ['WARN'],
-                         'dummy_Fail.NAError$':          ['TEST_NA'],
-                         'dummy_Fail.Crash$':            ['GOOD'],
-                         }
-
-EXPECTED_TEST_RESULTS_DUMMY = {'^SERVER_JOB$':       ['GOOD'],
-                               'dummy_Pass.*':       ['GOOD'],
-                               'dummy_Fail.Fail':    ['FAIL'],
-                               'dummy_Fail.Warn':    ['WARN'],
-                               'dummy_Fail.Crash':   ['GOOD'],
-                               'dummy_Fail.Error':   ['ERROR'],
-                               'dummy_Fail.NAError': ['TEST_NA'],
-                               }
-
-EXPECTED_TEST_RESULTS_POWERWASH = {'platform_Powerwash': ['GOOD'],
-                                   'SERVER_JOB':         ['GOOD'],
-                                   }
-
-_TestPushErrors = collections.namedtuple(
-        '_TestPushErrors',
-        [
-                'mismatch_errors',
-                'unknown_tests',
-                'missing_tests',
-        ]
-)
-
-
-def summarize_push(test_views, expected_results, ignored_tests=[]):
-    """Summarize the test push errors."""
-    test_push_errors = _match_test_results(test_views, expected_results,
-                                           ignored_tests)
-    return _generate_push_summary(test_push_errors)
-
-
-def _match_test_results(test_views, expected_results, ignored_tests):
-    """Match test results with expected results.
-
-    @param test_views: A defaultdict where keys are test names and values are
-                       lists of test statuses, e.g.,
-                       {'dummy_Fail.Error': ['ERROR', 'ERROR],
-                        'dummy_Fail.NAError': ['TEST_NA'],
-                        'dummy_Fail.RetrySuccess': ['ERROR', 'GOOD'],
-                        }
-    @param expected_results: A dictionary of test name to expected test result.
-                             Has the same format as test_views.
-    @param ignored_tests: A list of test name patterns. Any mismatch between
-                          test results and expected test results that matches
-                          one these patterns is ignored.
-
-    @return: A _TestPushErrors tuple.
-    """
-    mismatch_errors = []
-    unknown_tests = []
-    found_keys = set()
-    for test_name, test_status_list in six.iteritems(test_views):
-        test_found = False
-        for test_name_pattern, expected_result in expected_results.items():
-            if re.search(test_name_pattern, test_name):
-                test_found = True
-                found_keys.add(test_name_pattern)
-                if (sorted(expected_result) != sorted(test_status_list) and
-                    _is_significant(test_name, ignored_tests)):
-                    error = ('%s Expected: %s, Actual: %s' %
-                             (test_name, expected_result, test_status_list))
-                    mismatch_errors.append(error)
-
-        if not test_found and _is_significant(test_name, ignored_tests):
-            unknown_tests.append(test_name)
-
-    missing_tests = set(expected_results.keys()) - found_keys
-    missing_tests = [t for t in missing_tests
-                     if _is_significant(t, ignored_tests)]
-    return _TestPushErrors(mismatch_errors=mismatch_errors,
-                           unknown_tests=unknown_tests,
-                           missing_tests=missing_tests)
-
-
-def _is_significant(test, ignored_tests_patterns):
-    return all([test not in m for m in ignored_tests_patterns])
-
-
-def _generate_push_summary(test_push_errors):
-    """Generate a list of summary based on the test_push results."""
-    summary = []
-    if test_push_errors.mismatch_errors:
-        summary.append(('Results of %d test(s) do not match expected '
-                        'values:') % len(test_push_errors.mismatch_errors))
-        summary.extend(test_push_errors.mismatch_errors)
-        summary.append('\n')
-
-    if test_push_errors.unknown_tests:
-        summary.append('%d test(s) are not expected to be run:' %
-                       len(test_push_errors.unknown_tests))
-        summary.extend(test_push_errors.unknown_tests)
-        summary.append('\n')
-
-    if test_push_errors.missing_tests:
-        summary.append('%d test(s) are missing from the results:' %
-                       len(test_push_errors.missing_tests))
-        summary.extend(test_push_errors.missing_tests)
-        summary.append('\n')
-
-    return summary
diff --git a/site_utils/test_push_unittest.py b/site_utils/test_push_unittest.py
deleted file mode 100755
index fbc3d31..0000000
--- a/site_utils/test_push_unittest.py
+++ /dev/null
@@ -1,125 +0,0 @@
-#!/usr/bin/python2
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import six
-import mox
-import unittest
-from six.moves import urllib
-
-import mock
-
-import common
-from autotest_lib.client.common_lib.cros import retry
-from autotest_lib.server import site_utils
-
-
-# Mock retry.retry used in test_push before importing test_push.
-retry.retry = mock.create_autospec(retry.retry, return_value=lambda func: func)
-from autotest_lib.site_utils import test_push
-
-class TestPushUnittests(mox.MoxTestBase):
-    """Unittest for test_push script."""
-
-    _ARGV = [
-        'command',
-        '--build', 'stumpy-release/R36-5881-0.0',
-        '--shard_build', 'quawks-release/R36-5881-0.0'
-    ]
-
-    def setUp(self):
-        """Initialize the unittest."""
-        super(TestPushUnittests, self).setUp()
-        # Overwrite expected test results.
-        test_push.EXPECTED_TEST_RESULTS = {
-            '^SERVER_JOB$':                  ['GOOD'],
-            '.*control.dependency$':         ['TEST_NA'],
-            '.*dummy_Fail.RetryFail$':       ['FAIL', 'FAIL'],
-            }
-        test_push.TKO = None
-
-
-    def stub_out_methods(self, test_views):
-        """Stub out methods in test_push module with given test results.
-
-        @param test_views: Desired test result views.
-
-        """
-        self.mox.UnsetStubs()
-        response = six.StringIO('some_value')
-        self.mox.StubOutWithMock(urllib.request, 'urlopen')
-        urllib.request.urlopen(mox.IgnoreArg()).AndReturn(response)
-
-        self.mox.StubOutWithMock(test_push, 'check_dut_image')
-        test_push.check_dut_image(mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(
-                None)
-
-        self.mox.StubOutWithMock(test_push, 'do_run_suite')
-        test_push.do_run_suite(
-                test_push.PUSH_TO_PROD_SUITE, mox.IgnoreArg(), mox.IgnoreArg(),
-                mox.IgnoreArg()).AndReturn((1))
-
-        self.mox.StubOutWithMock(site_utils, 'get_test_views_from_tko')
-        site_utils.get_test_views_from_tko(1, None).AndReturn(test_views)
-
-
-    def test_suite_success(self):
-        """Test test_suite method with matching results."""
-        test_views = {'SERVER_JOB':                        ['GOOD'],
-                      'dummy_fail/control.dependency':     ['TEST_NA'],
-                      'dummy_Fail.RetryFail':              ['FAIL', 'FAIL'],
-                      }
-
-        self.stub_out_methods(test_views)
-        self.mox.ReplayAll()
-        test_push.test_suite(test_push.PUSH_TO_PROD_SUITE, test_views,
-                             arguments=test_push.parse_arguments(self._ARGV))
-        self.mox.VerifyAll()
-
-
-    def test_suite_fail_with_missing_test(self):
-        """Test test_suite method that should fail with missing test."""
-        test_views = {'SERVER_JOB':                        ['GOOD'],
-                      'dummy_fail/control.dependency':     ['TEST_NA'],
-                      }
-
-        self.stub_out_methods(test_views)
-        self.mox.ReplayAll()
-        test_push.test_suite(test_push.PUSH_TO_PROD_SUITE, test_views,
-                             arguments=test_push.parse_arguments(self._ARGV))
-        self.mox.VerifyAll()
-
-
-    def test_suite_fail_with_unexpected_test_results(self):
-        """Test test_suite method that should fail with unexpected test results.
-        """
-        test_views = {'SERVER_JOB':                        ['FAIL'],
-                      'dummy_fail/control.dependency':     ['TEST_NA'],
-                      'dummy_Fail.RetryFail':              ['FAIL', 'FAIL'],
-                      }
-
-        self.stub_out_methods(test_views)
-        self.mox.ReplayAll()
-        test_push.test_suite(test_push.PUSH_TO_PROD_SUITE, test_views,
-                             arguments=test_push.parse_arguments(self._ARGV))
-        self.mox.VerifyAll()
-
-
-    def test_suite_fail_with_extra_test(self):
-        """Test test_suite method that should fail with extra test."""
-        test_views = {'SERVER_JOB':                        ['GOOD'],
-                      'dummy_fail/control.dependency':     ['TEST_NA'],
-                      'dummy_Fail.RetryFail':              ['FAIL', 'FAIL'],
-                      'dummy_Fail.ExtraTest':              ['GOOD'],
-                      }
-
-        self.stub_out_methods(test_views)
-        self.mox.ReplayAll()
-        test_push.test_suite(test_push.PUSH_TO_PROD_SUITE, test_views,
-                             arguments=test_push.parse_arguments(self._ARGV))
-        self.mox.VerifyAll()
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/site_utils/test_runner_utils.py b/site_utils/test_runner_utils.py
index 7d6f2d8..6370db3 100755
--- a/site_utils/test_runner_utils.py
+++ b/site_utils/test_runner_utils.py
@@ -25,18 +25,15 @@
 logging.basicConfig(level=logging.INFO)
 
 import common
-from autotest_lib.client.common_lib.cros import dev_server, retry
+from autotest_lib.client.common_lib.cros import retry
 from autotest_lib.client.common_lib import logging_manager
 from autotest_lib.server.cros.dynamic_suite import suite, constants
-from autotest_lib.server.cros import provision
 from autotest_lib.server.hosts import factory
 from autotest_lib.server.hosts import file_store
 from autotest_lib.server.hosts import host_info
 from autotest_lib.server import autoserv_utils
 from autotest_lib.server import server_logging_config
 from autotest_lib.server import utils
-from autotest_lib.utils import labellib
-from six.moves import range
 
 
 _autoserv_proc = None
@@ -64,6 +61,10 @@
     """Raised when it fails to provision the DUT to the requested build."""
 
 
+class TestThatControlError(Exception):
+    """Raise when there is an issue the specified test's control file."""
+
+
 def add_common_args(parser):
     """
     Add common arguments for both test_that and test_droid to their parser.
@@ -95,11 +96,11 @@
     parser.add_argument('--pretend', action='store_true', default=False,
                         help='Print autoserv commands that would be run, '
                              'rather than running them.')
-    parser.add_argument('--no-experimental', action='store_true',
-                        default=False, dest='no_experimental',
-                        help='When scheduling a suite, skip any tests marked '
-                             'as experimental. Applies only to tests scheduled'
-                             ' via suite:[SUITE].')
+    parser.add_argument('--no-experimental',
+                        action='store_true',
+                        default=False,
+                        dest='no_experimental',
+                        help='DEPRECATED DO NOT USE.')
     parser.add_argument('--enforce-deps', action='store_true',
                         default=False, dest='enforce_deps',
                         help='Skip tests whose DEPENDENCIES can not '
@@ -178,77 +179,6 @@
             return self._jobs_to_tests[job_id].name
 
 
-
-def fetch_local_suite(autotest_path, suite_predicate, afe, test_arg, remote,
-                      build=NO_BUILD, board=NO_BOARD,
-                      results_directory=None, no_experimental=False,
-                      ignore_deps=True, job_retry=True):
-    """Create a suite from the given suite predicate.
-
-    Satisfaction of dependencies is enforced by Suite.schedule() if
-    ignore_deps is False. Note that this method assumes only one host,
-    i.e. |remote|, was added to afe. Suite.schedule() will not
-    schedule a job if none of the hosts in the afe (in our case,
-    just one host |remote|) has a label that matches a requested
-    test dependency.
-
-    @param autotest_path: Absolute path to autotest (in sysroot or
-                          custom autotest directory set by --autotest_dir).
-    @param suite_predicate: callable that takes ControlData objects, and
-                            returns True on those that should be in suite
-    @param afe: afe object to schedule against (typically a directAFE)
-    @param test_arg: String. An individual TEST command line argument, e.g.
-                     'login_CryptohomeMounted' or 'suite:smoke'.
-    @param remote: String representing the IP of the remote host.
-    @param build: Build to schedule suite for.
-    @param board: Board to schedule suite for.
-    @param results_directory: Absolute path of directory to store results in.
-                              (results will be stored in subdirectory of this).
-    @param no_experimental: Skip experimental tests when scheduling a suite.
-    @param ignore_deps: If True, test dependencies will be ignored.
-    @param job_retry: If False, tests will not be retried at all.
-
-    @returns: A LocalSuite object.
-
-    """
-    fs_getter = suite.create_fs_getter(autotest_path)
-    devserver = dev_server.ImageServer('')
-    my_suite = LocalSuite.create_from_predicates(
-        [suite_predicate],
-        {provision.CROS_VERSION_PREFIX: build},
-        constants.BOARD_PREFIX + board,
-        devserver, fs_getter, afe=afe,
-        ignore_deps=ignore_deps,
-        results_dir=results_directory,
-        forgiving_parser=False,
-        job_retry=job_retry
-    )
-    if len(my_suite.tests) == 0:
-        (similarity_predicate, similarity_description) = (
-                get_predicate_for_possible_test_arg(test_arg))
-        logging.error('No test found, searching for possible tests with %s',
-                      similarity_description)
-        possible_tests = suite.find_possible_tests(fs_getter,
-                                                         similarity_predicate)
-        raise ValueError('Found no tests. Check your suite name, test name, '
-                         'or test matching wildcard.\nDid you mean any of '
-                         'following tests?\n  %s' % '\n  '.join(possible_tests))
-
-    if not ignore_deps:
-        # Log tests whose dependencies can't be satisfied.
-        labels = [label.name for label in
-                  afe.get_labels(host__hostname=remote)]
-        for test in my_suite.tests:
-            if test.experimental and no_experimental:
-                continue
-            unsatisfiable_deps = set(test.dependencies).difference(labels)
-            if unsatisfiable_deps:
-                logging.warning('%s will be skipped, unsatisfiable '
-                             'test dependencies: %s', test.name,
-                             unsatisfiable_deps)
-    return my_suite
-
-
 def _run_autoserv(command, pretend=False):
     """Run autoserv command.
 
@@ -269,8 +199,7 @@
         # so that autoserv output can be displayed to the user
         # immediately.
         for message in iter(_autoserv_proc.stdout.readline, b''):
-            logging.info('autoserv| %s', message.rstrip())
-
+            logging.info('autoserv| %s', message.rstrip().decode('utf-8'))
         _autoserv_proc.wait()
         returncode = _autoserv_proc.returncode
         _autoserv_proc = None
@@ -324,10 +253,22 @@
     return results_directory
 
 
-def run_job(job, host, info, autotest_path, results_directory, fast_mode,
-            id_digits=1, ssh_verbosity=0, ssh_options=None,
-            args=None, pretend=False,
-            autoserv_verbose=False):
+def run_job(job,
+            host,
+            info,
+            autotest_path,
+            results_directory,
+            fast_mode,
+            id_digits=1,
+            ssh_verbosity=0,
+            ssh_options=None,
+            args=None,
+            pretend=False,
+            autoserv_verbose=False,
+            companion_hosts=None,
+            dut_servers=None,
+            is_cft=False,
+            ch_info=None):
     """
     Shell out to autoserv to run an individual test job.
 
@@ -349,14 +290,18 @@
     @param pretend: If True, will print out autoserv commands rather than
                     running them.
     @param autoserv_verbose: If true, pass the --verbose flag to autoserv.
+    @param companion_hosts: Companion hosts for the test.
+    @param dut_servers: DUT servers for the test.
+    @param ch_info: hostinfo for companion hosts.
 
     @returns: a tuple, return code of the job and absolute path of directory
               where results were stored.
     """
     with tempfile.NamedTemporaryFile() as temp_file:
-        temp_file.write(job.control_file)
+        temp_file.write(job.control_file.encode())
         temp_file.flush()
-        name_tail = job.name.split('/')[-1]
+
+        name_tail = job.ctrlname.split('/')[-1]
         results_directory = os.path.join(results_directory,
                                          'results-%0*d-%s' % (id_digits, job.id,
                                                               name_tail))
@@ -366,21 +311,33 @@
                            {constants.JOB_EXPERIMENTAL_KEY: job.keyvals[
                                    constants.JOB_EXPERIMENTAL_KEY]})
         _write_host_info(results_directory, _HOST_INFO_SUBDIR, host, info)
+
+        if ch_info:
+            for chost in companion_hosts.split(" "):
+                _write_host_info(results_directory, _HOST_INFO_SUBDIR, chost,
+                                 ch_info[chost], False)
+
         extra_args = [temp_file.name]
         if args:
             extra_args.extend(['--args', args])
 
         command = autoserv_utils.autoserv_run_job_command(
                 os.path.join(autotest_path, 'server'),
-                machines=host, job=job, verbose=autoserv_verbose,
+                machines=host,
+                job=job,
+                verbose=autoserv_verbose,
                 results_directory=results_directory,
-                fast_mode=fast_mode, ssh_verbosity=ssh_verbosity,
+                fast_mode=fast_mode,
+                ssh_verbosity=ssh_verbosity,
                 ssh_options=ssh_options,
                 extra_args=extra_args,
                 no_console_prefix=True,
                 use_packaging=False,
                 host_attributes=info.attributes,
-                host_info_subdir=_HOST_INFO_SUBDIR)
+                host_info_subdir=_HOST_INFO_SUBDIR,
+                companion_hosts=companion_hosts,
+                dut_servers=dut_servers,
+                is_cft=is_cft)
 
         code = _run_autoserv(command, pretend)
         return code, results_directory
@@ -463,7 +420,7 @@
     """Add an ssh identity to the agent.
 
     TODO (sbasi) b/26186193: Add support for test_droid and make TEST_KEY_PATH
-    not Chrome OS specific.
+    not ChromeOS specific.
 
     @param temp_directory: A directory to copy the |private key| into.
     @param ssh_private_key: Path to the ssh private key to use for testing.
@@ -487,13 +444,11 @@
                         'may fail.')
 
 
-def _auto_detect_labels(afe, remote):
-    """Automatically detect host labels and add them to the host in afe.
+def _auto_detect_labels(remote):
+    """Automatically detect host labels and return them.
 
     Note that the label of board will not be auto-detected.
-    This method assumes the host |remote| has already been added to afe.
 
-    @param afe: A direct_afe object used to interact with local afe database.
     @param remote: The hostname of the remote device.
 
     @returns: the detected labels as a list of strings.
@@ -501,21 +456,35 @@
     cros_host = factory.create_host(remote)
     labels_to_create = [label for label in cros_host.get_labels()
                         if not label.startswith(constants.BOARD_PREFIX)]
-    labels_to_add_to_afe_host = []
-    for label in labels_to_create:
-        new_label = afe.create_label(label)
-        labels_to_add_to_afe_host.append(new_label.name)
-    hosts = afe.get_hosts(hostname=remote)
-    if not hosts:
-        raise TestThatRunError('Unexpected error: %s has not '
-                               'been added to afe.' % remote)
-    afe_host = hosts[0]
-    afe_host.add_labels(labels_to_add_to_afe_host)
-    return labels_to_add_to_afe_host
+    return labels_to_create
 
 
-def perform_local_run(afe,
-                      autotest_path,
+def get_all_control_files(test, autotest_path):
+    """Get all control files for specified test in the given autotest_path.
+
+    @param test: name of the test or suite to fetch
+    @praram autotest_path:  Absolute path of autotest installed in sysroot
+    """
+    (predicate, description) = get_predicate_for_test_arg(test)
+    logging.info('Fetching suite for %s...', description)
+    return get_control_files(autotest_path=autotest_path, pred=predicate)
+
+
+def get_possible_tests(test, autotest_path):
+    fs_getter = suite.create_fs_getter(autotest_path)
+
+    (similarity_predicate,
+     similarity_description) = (get_predicate_for_possible_test_arg(test))
+
+    logging.error('No test found, searching for possible tests with %s',
+                  similarity_description)
+    possible_tests = suite.find_possible_tests(fs_getter, similarity_predicate)
+    raise SystemExit('Found no tests. Check your suite name, test name, '
+                     'or test matching wildcard.\nDid you mean any of '
+                     'following tests?\n  %s' % '\n  '.join(possible_tests))
+
+
+def perform_local_run(autotest_path,
                       tests,
                       remote,
                       fast_mode,
@@ -524,7 +493,6 @@
                       model=NO_MODEL,
                       args=None,
                       pretend=False,
-                      no_experimental=False,
                       ignore_deps=True,
                       results_directory=None,
                       ssh_verbosity=0,
@@ -532,13 +500,18 @@
                       autoserv_verbose=False,
                       iterations=1,
                       host_attributes={},
-                      job_retry=True):
+                      job_retry=True,
+                      companion_hosts=None,
+                      minus=[],
+                      dut_servers=None,
+                      is_cft=False,
+                      host_labels=None,
+                      label=None):
     """Perform local run of tests.
 
     This method enforces satisfaction of test dependencies for tests that are
     run as a part of a suite.
 
-    @param afe: A direct_afe object used to interact with local afe database.
     @param autotest_path: Absolute path of autotest installed in sysroot or
                           custom autotest path set by --autotest_dir.
     @param tests: List of strings naming tests and suites to run. Suite strings
@@ -552,8 +525,6 @@
                  and then ultimitely to test itself.
     @param pretend: If True, will print out autoserv commands rather than
                     running them.
-    @param no_experimental: Skip experimental tests when scheduling a suite.
-    @param ignore_deps: If True, test dependencies will be ignored.
     @param results_directory: Directory to store results in. Defaults to None,
                               in which case results will be stored in a new
                               subdirectory of /tmp
@@ -564,129 +535,106 @@
     @param iterations: int number of times to schedule tests.
     @param host_attributes: Dict of host attributes to pass into autoserv.
     @param job_retry: If False, tests will not be retried at all.
+    @param companion_hosts: companion hosts for the test.
+    @param dut_servers: dut servers for the test.
+    @param label: Optional label to use for the jobname. Will be appended to
+        the keyval file via server_job.
 
     @returns: A list of return codes each job that has run. Or [1] if
               provision failed prior to running any jobs.
     """
     args = _set_default_servo_args(args)
-    # Create host in afe, add board and build labels.
-    cros_version_label = labellib.format_keyval_label(
-        labellib.KeyvalLabel(labellib.Key.CROS_VERSION, build))
 
-    build_label = afe.create_label(cros_version_label)
-    board_label = afe.create_label(constants.BOARD_PREFIX + board)
-    model_label = afe.create_label(constants.MODEL_PREFIX + model)
-    labels = [build_label.name, board_label.name, model_label.name]
+    # version doesn't really matter for local runs...
+    if not host_labels:
+        host_labels = [
+                u'cros-version:ad_hoc_build',
+                u'board:%s' % board,
+                u'model:%s' % model
+        ]
+        if not ignore_deps:
+            logging.info('Auto-detecting labels for %s', remote)
+            # Auto-detected labels may duplicate explicitly set ones.
+            host_labels += list(set(_auto_detect_labels(remote)))
 
-    new_host = afe.create_host(remote)
-    new_host.add_labels(labels)
-    if not ignore_deps:
-        logging.info('Auto-detecting labels for %s', remote)
-        labels += _auto_detect_labels(afe, remote)
-        # Auto-detected labels may duplicate explicitly set ones.
-        labels = list(set(labels))
+    else:
+        host_labels = host_labels.split(" ")
+    info = host_info.HostInfo(host_labels, host_attributes)
 
-    info = host_info.HostInfo(labels, host_attributes)
+    # If using test_that, there needs to a hostinfo file (even if blank)
+    # for each host (including companions).
+    # TODO: Determine if we want to auto-detect labels, and/or expose
+    # CLI options for them (which might be required in CFT)
+    ch_info = {}
+    if companion_hosts:
+        for chost in companion_hosts.split(" "):
+            chost_labels = []
+            if not ignore_deps:
+                logging.info('Auto-detecting labels for %s', chost)
+                # Auto-detected labels may duplicate explicitly set ones.
+                chost_labels += list(set(_auto_detect_labels(chost)))
+            ch_info[chost] = host_info.HostInfo(chost_labels, {})
 
-    # Provision the host to |build|.
-    if build != NO_BUILD:
-        logging.info('Provisioning %s...', cros_version_label)
-        try:
-            run_provisioning_job(
-                cros_version_label,
-                remote,
-                info,
-                autotest_path,
-                results_directory,
-                fast_mode,
-                ssh_verbosity,
-                ssh_options,
-                pretend,
-                autoserv_verbose,
-            )
-        except TestThatProvisioningError as e:
-            logging.error('Provisioning %s to %s failed, tests are aborted, '
-                          'failure reason: %s',
-                          remote, cros_version_label, e)
-            return [1]
+    job_queue = []
+    test_num = 0
 
-    # Create suites that will be scheduled.
-    suites_and_descriptions = []
-    for test in tests:
-        (predicate, description) = get_predicate_for_test_arg(test)
-        logging.info('Fetching suite for %s...', description)
-        suite = fetch_local_suite(autotest_path, predicate, afe, test_arg=test,
-                                  remote=remote,
-                                  build=build, board=board,
-                                  results_directory=results_directory,
-                                  no_experimental=no_experimental,
-                                  ignore_deps=ignore_deps,
-                                  job_retry=job_retry)
-        suites_and_descriptions.append((suite, description))
+    m_queue = []
+    for m in minus:
+        ctrl_files = get_all_control_files(m, autotest_path)
+        for ctrl in ctrl_files:
+            m_queue.append(ctrl)
 
-    jobs_to_suites = {}
-    null_logger = lambda log_entry, log_in_subdir=False: None
-    # Schedule the suites, looping over iterations if necessary.
-    for iteration in range(iterations):
-        if iteration > 0:
-            logging.info('Repeating scheduling for iteration %d:', iteration)
-
-        for suite, description in suites_and_descriptions:
-            logging.info('Scheduling suite for %s...', description)
-            ntests = suite.schedule(null_logger)
-            logging.debug('jobs: %s nonzero job_retries: %s',
-                          len(suite._jobs_to_tests),
-                          len([True for (job_id, test) in
-                               suite._jobs_to_tests.items()]))
-            logging.info('... scheduled %s job(s).', ntests)
-            for job in suite.jobs:
-                jobs_to_suites[job.id] = suite
-
-    if not afe.get_jobs():
-        logging.info('No jobs scheduled. End of local run.')
-        return []
-
-    last_job_id = afe.get_jobs()[-1].id
-    job_id_digits = len(str(last_job_id))
+    if iterations > 1:
+        logging.info("Scheduling for %s iterations", iterations)
+    for _ in range(iterations):
+        for test in tests:
+            ctrl_files = get_all_control_files(test, autotest_path)
+            if len(ctrl_files) == 0:
+                get_possible_tests(test, autotest_path)
+            for control in ctrl_files:
+                if any([control.name == no_run.name for no_run in m_queue]):
+                    continue
+                test_num += 1
+                if label:
+                    name = label
+                else:
+                    name = "adhoc/{}".format(control.name)
+                job = SimpleJob(name=name,
+                                owner='autotest_system',
+                                test_num=test_num,
+                                ctrlname=control.name)
+                job.set_control_file(control)
+                if ignore_deps:
+                    job_queue.append(job)
+                elif job.deps_satisfied(host_labels):
+                    job_queue.append(job)
+    _set_pyversion(job_queue)
     codes = []
-    job_queue = afe.get_jobs()
-    completed_job_ids = set()
-    while job_queue:
+    job_id_digits = 0
+    for job in job_queue:
         logging.info('%s jobs in job queue', len(job_queue))
-        for job in job_queue:
-            suite = jobs_to_suites.get(job.id)
-            if not suite:
-                logging.error('Job %s not run, no associated suite.', job.id)
-            else:
-                logging.debug('Running job %s of test %s', job.id,
-                              suite.test_name_from_job(job.id))
-                code, abs_dir = run_job(
-                        job,
-                        remote,
-                        info,
-                        autotest_path,
-                        results_directory,
-                        fast_mode,
-                        job_id_digits,
-                        ssh_verbosity,
-                        ssh_options,
-                        args,
-                        pretend,
-                        autoserv_verbose,
-                )
-                codes.append(code)
-                logging.debug("Code: %s, Results in %s", code, abs_dir)
-                new_id = suite.handle_local_result(job.id, abs_dir,
-                                                   null_logger)
-                if new_id:
-                    jobs_to_suites[new_id] = jobs_to_suites[job.id]
-            completed_job_ids.add(job.id)
-        all_jobs = afe.get_jobs(not_yet_run=True, running=True)
-        new_jobs = set(job for job in all_jobs
-                       if job.id not in completed_job_ids)
-        logging.debug('%s incomplete jobs, %s jobs total', len(new_jobs),
-                      len(all_jobs))
-        job_queue = list(new_jobs)
+        # could also math.log10... but for a single conversion, not worth.
+        job_id_digits = len(str(job.id))
+        logging.debug('Running job %s of test %s', job.id, (job.name))
+        code, abs_dir = run_job(job=job,
+                                host=remote,
+                                info=info,
+                                autotest_path=autotest_path,
+                                results_directory=results_directory,
+                                fast_mode=fast_mode,
+                                id_digits=job_id_digits,
+                                ssh_verbosity=ssh_verbosity,
+                                ssh_options=ssh_options,
+                                args=args,
+                                pretend=pretend,
+                                autoserv_verbose=autoserv_verbose,
+                                companion_hosts=companion_hosts,
+                                dut_servers=dut_servers,
+                                is_cft=is_cft,
+                                ch_info=ch_info)
+        codes.append(code)
+        logging.debug("Code: %s, Results in %s", code, abs_dir)
+
     return codes
 
 
@@ -785,11 +733,13 @@
                 raise
     return results_directory
 
+
 def generate_report(directory,
                     allow_chrome_crashes=False,
                     just_status_code=False,
-                    html_report=False):
-    """Parse the test result files in the given directory into a report
+                    html_report=False,
+                    is_cft=False):
+    """Parse the test result files in the given directory into a report.
 
     @param directory: string, the absolute path of the directory to look in
     @param allow_chrome_crashes: boolean, ignore Chrome crashes in the
@@ -801,6 +751,8 @@
                                         'generate_test_report')]
     # Experimental test results do not influence the exit code.
     test_report_command.append('--ignore_experimental_tests')
+    if is_cft:
+        test_report_command.append('--cft')
     if html_report:
         test_report_command.append('--html')
         test_report_command.append('--html-report-dir=%s' % directory)
@@ -826,7 +778,6 @@
                                    model=NO_MODEL,
                                    args=None,
                                    pretend=False,
-                                   no_experimental=False,
                                    ignore_deps=True,
                                    results_directory=None,
                                    ssh_verbosity=0,
@@ -836,7 +787,13 @@
                                    debug=False,
                                    allow_chrome_crashes=False,
                                    host_attributes={},
-                                   job_retry=True):
+                                   job_retry=True,
+                                   companion_hosts=None,
+                                   minus=[],
+                                   dut_servers=None,
+                                   is_cft=False,
+                                   host_labels=None,
+                                   label=None):
     """
     Perform a test_that run, from the |autotest_path|.
 
@@ -856,7 +813,6 @@
                  and then ultimitely to test itself.
     @param pretend: If True, will print out autoserv commands rather than
                     running them.
-    @param no_experimental: Skip experimental tests when scheduling a suite.
     @param ignore_deps: If True, test dependencies will be ignored.
     @param results_directory: Directory to store results in. Defaults to None,
                               in which case results will be stored in a new
@@ -871,6 +827,10 @@
     @param allow_chrome_crashes: If True, allow chrome crashes.
     @param host_attributes: Dict of host attributes to pass into autoserv.
     @param job_retry: If False, tests will not be retried at all.
+    @param companion_hosts: companion hosts for the test.
+    @param dut_servers: dut servers for the test.
+    @param label: Optional label to use for the jobname. Will be appended to
+        the keyval file via server_job.
 
     @return: A return code that test_that should exit with.
     """
@@ -891,9 +851,7 @@
     signal.signal(signal.SIGINT, sigint_handler)
     signal.signal(signal.SIGTERM, sigint_handler)
 
-    afe = setup_local_afe()
-    codes = perform_local_run(afe,
-                              autotest_path,
+    codes = perform_local_run(autotest_path,
                               tests,
                               remote,
                               fast_mode,
@@ -902,7 +860,6 @@
                               model,
                               args=args,
                               pretend=pretend,
-                              no_experimental=no_experimental,
                               ignore_deps=ignore_deps,
                               results_directory=results_directory,
                               ssh_verbosity=ssh_verbosity,
@@ -910,14 +867,21 @@
                               autoserv_verbose=debug,
                               iterations=iterations,
                               host_attributes=host_attributes,
-                              job_retry=job_retry)
+                              job_retry=job_retry,
+                              companion_hosts=companion_hosts,
+                              minus=minus,
+                              dut_servers=dut_servers,
+                              is_cft=is_cft,
+                              host_labels=host_labels,
+                              label=label)
     if pretend:
         logging.info('Finished pretend run. Exiting.')
         return 0
 
     final_result = generate_report(results_directory,
                                    allow_chrome_crashes=allow_chrome_crashes,
-                                   html_report=True)
+                                   html_report=True,
+                                   is_cft=is_cft)
     try:
         os.unlink(_LATEST_RESULTS_DIRECTORY)
     except OSError:
@@ -934,15 +898,82 @@
     return final_result
 
 
-def _write_host_info(results_dir, host_info_subdir, hostname, info):
+def _write_host_info(results_dir,
+                     host_info_subdir,
+                     hostname,
+                     info,
+                     new_dir=True):
     """ Write HostInfo to a FileStore to be used by autoserv.
 
-    @param results_dir: Path to he results directory.
+    @param results_dir: Path to the results directory.
     @param host_info_subdir: Subdirectory of results directory for host info.
     @param hostname: Hostname passed into autoserv.
     @param info: hosts.HostInfo to write.
     """
     d = os.path.join(results_dir, host_info_subdir)
-    os.makedirs(d)
+    if new_dir:
+        os.makedirs(d)
     store = file_store.FileStore(os.path.join(d, '%s.store' % hostname))
     store.commit(info)
+
+
+class SimpleJob(object):
+    """
+    A Simple job for running autotests without an AFE.
+
+    The goal here is to remove the deps to frontend/afe, and their dependent
+    libs. Autotests will be run via 2 methods going forward: Skylab world, and
+    test_that. Skylab invokes autoserv directly, bypassing all of this.
+    test_that is a CLI, not a UI, and should be split free of the AFE libs.
+    """
+
+    def __init__(self,
+                 owner,
+                 name,
+                 control_type='client',
+                 test_num=1,
+                 ctrlname=None):
+        self.owner = owner
+        self.name = name
+        self.control_type = control_type
+        self.id = test_num
+        self.keyvals = {'experimental': False}
+        self.dependencies = []
+        self.py_version = None
+        self.ctrlname = ctrlname
+
+    def set_control_file(self, control):
+        self.control_file = control.text
+        self.control_type = control.test_type.capitalize()
+        if hasattr(control, 'dependencies'):
+            self.dependencies = set(control.dependencies)
+        if control.py_version and control.py_version not in (2, 3):
+            raise TestThatControlError(
+                    "Test py_version not compatible. Expected 2 or 3 got %s" %
+                    control.py_version)
+        self.py_version = control.py_version
+
+    def deps_satisfied(self, labels):
+        """Verify the deps for this job are satisfied on the given labels"""
+        return self.dependencies.issubset(labels)
+
+
+def _set_pyversion(tests):
+    """If there is a py_version specified, set it in the env.
+
+    If not, set it to 2. If 2 is set, lock the entire suite into 2.
+    Different versions in the same suite is *not* supported.
+    """
+    set2 = all(v.py_version == 2 for v in tests)
+    set3 = all(v.py_version == 3 for v in tests)
+    if not set2 and not set3:
+        return
+    if set2:
+        os.environ['PY_VERSION'] = "2"
+    elif set3:
+        os.environ['PY_VERSION'] = "3"
+
+
+def get_control_files(autotest_path, pred):
+    cf_getter = suite.create_fs_getter(autotest_path)
+    return list(suite.find_and_parse_tests(cf_getter, pred))
diff --git a/site_utils/test_runner_utils_unittest.py b/site_utils/test_runner_utils_unittest.py
index 2c87bad..dd9155e 100755
--- a/site_utils/test_runner_utils_unittest.py
+++ b/site_utils/test_runner_utils_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright 2015 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -7,9 +7,12 @@
 from __future__ import absolute_import
 from __future__ import division
 from __future__ import print_function
-import os, unittest
-import mox
+import os
+import unittest
+from unittest.mock import patch
+
 import common
+
 import shutil
 import tempfile
 import types
@@ -18,56 +21,90 @@
 from autotest_lib.server.cros.dynamic_suite import suite as suite_module
 from autotest_lib.server.hosts import host_info
 from autotest_lib.site_utils import test_runner_utils
-from six.moves import range
-from six.moves import zip
 
 
-class StartsWithList(mox.Comparator):
-    def __init__(self, start_of_list):
-        """Mox comparator which returns True if the argument
-        to the mocked function is a list that begins with the elements
-        in start_of_list.
-        """
-        self._lhs = start_of_list
+class TypeMatcher(object):
+    """Matcher for object is of type."""
 
-    def equals(self, rhs):
-        if len(rhs)<len(self._lhs):
+    def __init__(self, expected_type):
+        self.expected_type = expected_type
+
+    def __eq__(self, other):
+        return isinstance(other, self.expected_type)
+
+
+class JobMatcher(object):
+    """Matcher for JobObject + Name."""
+
+    def __init__(self, expected_type, name):
+        self.expected_type = expected_type
+        self.name = name
+
+    def __eq__(self, other):
+        return (isinstance(other, self.expected_type)
+                and self.name in other.name)
+
+
+class hostinfoMatcher(object):
+    """Match hostinfo stuff"""
+
+    def __init__(self, labels, attributes):
+        self.labels = labels.split(' ')
+        self.attributes = attributes
+
+    def __eq__(self, other):
+        return self.labels == other.labels and self.attributes == other.attributes
+
+
+class ContainsMatcher:
+    """Matcher for object contains attr."""
+
+    def __init__(self, key, value):
+        self.key = key
+        self.value = value
+
+    def __eq__(self, rhs):
+        try:
+            return getattr(rhs, self._key) == self._value
+        except Exception:
             return False
-        for (x, y) in zip(self._lhs, rhs):
-            if x != y:
-                return False
-        return True
 
 
-class ContainsSublist(mox.Comparator):
-    def __init__(self, sublist):
-        """Mox comparator which returns True if the argument
-        to the mocked function is a list that contains sublist
-        as a sub-list.
-        """
-        self._sublist = sublist
+class SampleJob(object):
+    """Sample to be used for mocks."""
 
-    def equals(self, rhs):
-        n = len(self._sublist)
-        if len(rhs)<n:
-            return False
-        return any((self._sublist == rhs[i:i+n])
-                   for i in range(len(rhs) - n + 1))
-
-class DummyJob(object):
     def __init__(self, id=1):
         self.id = id
 
-class TestRunnerUnittests(mox.MoxTestBase):
 
-    def setUp(self):
-        mox.MoxTestBase.setUp(self)
+class FakeTests(object):
+    """A fake test to be used for mocks."""
+
+    def __init__(self, text, deps=[], py_version=None):
+        self.text = text
+        self.test_type = 'client'
+        self.dependencies = deps
+        self.name = text
+        self.py_version = py_version
 
 
-    def test_fetch_local_suite(self):
-        # Deferred until fetch_local_suite knows about non-local builds.
-        pass
+class TestRunnerUnittests(unittest.TestCase):
+    """Test test_runner_utils."""
 
+    autotest_path = 'ottotest_path'
+    suite_name = 'sweet_name'
+    test_arg = 'suite:' + suite_name
+    remote = 'remoat'
+    build = 'bild'
+    board = 'bored'
+    fast_mode = False
+    suite_control_files = ['c1', 'c2', 'c3', 'c4']
+    results_dir = '/tmp/test_that_results_fake'
+    id_digits = 1
+    ssh_verbosity = 2
+    ssh_options = '-F /dev/null -i /dev/null'
+    args = 'matey'
+    retry = True
 
     def _results_directory_from_results_list(self, results_list):
         """Generate a temp directory filled with provided test results.
@@ -89,16 +126,18 @@
                 status.flush()
         return global_dir
 
-
     def test_handle_local_result_for_good_test(self):
-        getter = self.mox.CreateMock(control_file_getter.DevServerGetter)
-        getter.get_control_file_list(suite_name=mox.IgnoreArg()).AndReturn([])
-        job = DummyJob()
-        test = self.mox.CreateMock(control_data.ControlData)
+        patcher = patch.object(control_file_getter, 'DevServerGetter')
+        getter = patcher.start()
+        self.addCleanup(patcher.stop)
+        getter.get_control_file_list.return_value = []
+        job = SampleJob()
+
+        test_patcher = patch.object(control_data, 'ControlData')
+        test = test_patcher.start()
+        self.addCleanup(test_patcher.stop)
         test.job_retries = 5
-        self.mox.StubOutWithMock(test_runner_utils.LocalSuite,
-                                 '_retry_local_result')
-        self.mox.ReplayAll()
+
         suite = test_runner_utils.LocalSuite([], "tag", [], None, getter,
                                              job_retry=True)
         suite._retry_handler = suite_module.RetryHandler({job.id: test})
@@ -112,18 +151,25 @@
         self.assertIsNone(new_id)
         shutil.rmtree(directory)
 
-
     def test_handle_local_result_for_bad_test(self):
-        getter = self.mox.CreateMock(control_file_getter.DevServerGetter)
-        getter.get_control_file_list(suite_name=mox.IgnoreArg()).AndReturn([])
-        job = DummyJob()
-        test = self.mox.CreateMock(control_data.ControlData)
+        patcher = patch.object(control_file_getter, 'DevServerGetter')
+        getter = patcher.start()
+        self.addCleanup(patcher.stop)
+        getter.get_control_file_list.return_value = []
+
+        job = SampleJob()
+
+        test_patcher = patch.object(control_data, 'ControlData')
+        test = test_patcher.start()
+        self.addCleanup(test_patcher.stop)
         test.job_retries = 5
-        self.mox.StubOutWithMock(test_runner_utils.LocalSuite,
-                                 '_retry_local_result')
-        test_runner_utils.LocalSuite._retry_local_result(
-            job.id, mox.IgnoreArg()).AndReturn(42)
-        self.mox.ReplayAll()
+
+        utils_mock = patch.object(test_runner_utils.LocalSuite,
+                                  '_retry_local_result')
+        test_runner_utils_mock = utils_mock.start()
+        self.addCleanup(utils_mock.stop)
+        test_runner_utils_mock._retry_local_result.return_value = 42
+
         suite = test_runner_utils.LocalSuite([], "tag", [], None, getter,
                                              job_retry=True)
         suite._retry_handler = suite_module.RetryHandler({job.id: test})
@@ -171,112 +217,244 @@
             self.assertTrue(isinstance(desc, str))
 
     def test_perform_local_run(self):
-        afe = test_runner_utils.setup_local_afe()
-        autotest_path = 'ottotest_path'
-        suite_name = 'sweet_name'
-        test_arg = 'suite:' + suite_name
-        remote = 'remoat'
-        build = 'bild'
-        board = 'bored'
-        fast_mode = False
-        suite_control_files = ['c1', 'c2', 'c3', 'c4']
-        results_dir = '/tmp/test_that_results_fake'
-        id_digits = 1
-        ssh_verbosity = 2
-        ssh_options = '-F /dev/null -i /dev/null'
-        args = 'matey'
-        ignore_deps = False
-        retry = True
+        """Test a local run that should pass."""
+        patcher = patch.object(test_runner_utils, '_auto_detect_labels')
+        _auto_detect_labels_mock = patcher.start()
+        self.addCleanup(patcher.stop)
 
-        # Fake suite objects that will be returned by fetch_local_suite
-        class fake_suite(object):
-            def __init__(self, suite_control_files, hosts):
-                self._suite_control_files = suite_control_files
-                self._hosts = hosts
-                self._jobs = []
-                self._jobs_to_tests = {}
-                self.retry_hack = True
+        patcher2 = patch.object(test_runner_utils, 'get_all_control_files')
+        get_all_control_files_mock = patcher2.start()
+        self.addCleanup(patcher2.stop)
 
-            def schedule(self, *args, **kwargs):
-                for control_file in self._suite_control_files:
-                    job_id = afe.create_job(control_file, hosts=self._hosts)
-                    self._jobs.append(job_id)
-                    self._jobs_to_tests[job_id] = control_file
+        _auto_detect_labels_mock.return_value = [
+                'os:cros', 'has_chameleon:True'
+        ]
 
-            def handle_local_result(self, job_id, results_dir, logger,
-                                    **kwargs):
-                if results_dir == "success_directory":
-                    return None
-                retries = True
-                if 'retries' in kwargs:
-                    retries = kwargs['retries']
-                if retries and self.retry_hack:
-                    self.retry_hack = False
-                else:
-                    return None
-                control_file = self._jobs_to_tests.get(job_id)
-                job_id = afe.create_job(control_file, hosts=self._hosts)
-                self._jobs.append(job_id)
-                self._jobs_to_tests[job_id] = control_file
-                return job_id
+        get_all_control_files_mock.return_value = [
+                FakeTests(test, deps=['has_chameleon:True'])
+                for test in self.suite_control_files
+        ]
 
-            @property
-            def jobs(self):
-                return self._jobs
+        patcher3 = patch.object(test_runner_utils, 'run_job')
+        run_job_mock = patcher3.start()
+        self.addCleanup(patcher3.stop)
 
-            def test_name_from_job(self, id):
-                return ""
+        for control_file in self.suite_control_files:
+            run_job_mock.return_value = (0, '/fake/dir')
+        test_runner_utils.perform_local_run(self.autotest_path,
+                                            ['suite:' + self.suite_name],
+                                            self.remote,
+                                            self.fast_mode,
+                                            build=self.build,
+                                            board=self.board,
+                                            ssh_verbosity=self.ssh_verbosity,
+                                            ssh_options=self.ssh_options,
+                                            args=self.args,
+                                            results_directory=self.results_dir,
+                                            job_retry=self.retry,
+                                            ignore_deps=False,
+                                            minus=[])
 
-        # Mock out scheduling of suite and running of jobs.
-        self.mox.StubOutWithMock(test_runner_utils, 'fetch_local_suite')
-        test_runner_utils.fetch_local_suite(autotest_path, mox.IgnoreArg(),
-                afe, test_arg=test_arg, remote=remote, build=build,
-                board=board, results_directory=results_dir,
-                no_experimental=False,
-                ignore_deps=ignore_deps,
-                job_retry=retry
-                ).AndReturn(fake_suite(suite_control_files, [remote]))
-        self.mox.StubOutWithMock(test_runner_utils, 'run_job')
-        self.mox.StubOutWithMock(test_runner_utils, 'run_provisioning_job')
-        self.mox.StubOutWithMock(test_runner_utils, '_auto_detect_labels')
+        run_job_mock.assert_called_with(job=TypeMatcher(
+                test_runner_utils.SimpleJob),
+                                        host=self.remote,
+                                        info=TypeMatcher(host_info.HostInfo),
+                                        autotest_path=self.autotest_path,
+                                        results_directory=self.results_dir,
+                                        fast_mode=self.fast_mode,
+                                        id_digits=self.id_digits,
+                                        ssh_verbosity=self.ssh_verbosity,
+                                        ssh_options=self.ssh_options,
+                                        args=TypeMatcher(str),
+                                        pretend=False,
+                                        autoserv_verbose=False,
+                                        companion_hosts=None,
+                                        dut_servers=None,
+                                        is_cft=False,
+                                        ch_info={})
 
-        test_runner_utils._auto_detect_labels(afe, remote).AndReturn([])
-        # Test perform_local_run. Enforce that run_provisioning_job,
-        # run_job and _auto_detect_labels are called correctly.
-        test_runner_utils.run_provisioning_job(
-                'cros-version:' + build,
-                remote,
-                mox.IsA(host_info.HostInfo),
-                autotest_path,
-                results_dir,
-                fast_mode,
-                ssh_verbosity,
-                ssh_options,
-                False,
-                False,
-        )
+    def test_perform_local_run_missing_deps(self):
+        """Test a local run with missing dependencies. No tests should run."""
+        patcher = patch.object(test_runner_utils, '_auto_detect_labels')
+        getter = patcher.start()
+        self.addCleanup(patcher.stop)
 
-        for control_file in suite_control_files:
-            test_runner_utils.run_job(
-                    mox.ContainsAttributeValue('control_file', control_file),
-                    remote,
-                    mox.IsA(host_info.HostInfo),
-                    autotest_path,
-                    results_dir,
-                    fast_mode,
-                    id_digits,
-                    ssh_verbosity,
-                    ssh_options,
-                    mox.StrContains(args),
-                    False,
-                    False,
-            ).AndReturn((0, '/fake/dir'))
-        self.mox.ReplayAll()
-        test_runner_utils.perform_local_run(
-                afe, autotest_path, ['suite:'+suite_name], remote, fast_mode,
-                build=build, board=board, ignore_deps=False,
-                ssh_verbosity=ssh_verbosity, ssh_options=ssh_options,
-                args=args, results_directory=results_dir, job_retry=retry)
+        getter.return_value = ['os:cros', 'has_chameleon:True']
+
+        patcher2 = patch.object(test_runner_utils, 'get_all_control_files')
+        test_runner_utils_mock = patcher2.start()
+        self.addCleanup(patcher2.stop)
+        test_runner_utils_mock.return_value = [
+                FakeTests(test, deps=['has_chameleon:False'])
+                for test in self.suite_control_files
+        ]
+
+        res = test_runner_utils.perform_local_run(
+                self.autotest_path, ['suite:' + self.suite_name],
+                self.remote,
+                self.fast_mode,
+                build=self.build,
+                board=self.board,
+                ssh_verbosity=self.ssh_verbosity,
+                ssh_options=self.ssh_options,
+                args=self.args,
+                results_directory=self.results_dir,
+                job_retry=self.retry,
+                ignore_deps=False,
+                minus=[])
+
+        # Verify when the deps are not met, the tests are not run.
+        self.assertEquals(res, [])
+
+    def test_minus_flag(self):
+        """Verify the minus flag skips tests."""
+        patcher = patch.object(test_runner_utils, '_auto_detect_labels')
+        getter = patcher.start()
+        self.addCleanup(patcher.stop)
+
+        getter.return_value = ['os:cros', 'has_chameleon:True']
+
+        patcher2 = patch.object(test_runner_utils, 'get_all_control_files')
+        test_runner_utils_mock = patcher2.start()
+        self.addCleanup(patcher2.stop)
+
+        patcher3 = patch.object(test_runner_utils, 'run_job')
+        run_job_mock = patcher3.start()
+        self.addCleanup(patcher3.stop)
+
+        minus_tests = [FakeTests(self.suite_control_files[0])]
+        all_tests = [
+                FakeTests(test, deps=[]) for test in self.suite_control_files
+        ]
+
+        test_runner_utils_mock.side_effect = [minus_tests, all_tests]
+        run_job_mock.side_effect = [(0, 'fakedir') for _ in range(3)]
+        test_labels = "'a' 'test' 'label'"
+        test_attributes = {"servo": "yes"}
+
+        res = test_runner_utils.perform_local_run(
+                self.autotest_path, ['suite:' + self.suite_name],
+                self.remote,
+                self.fast_mode,
+                build=self.build,
+                board=self.board,
+                ssh_verbosity=self.ssh_verbosity,
+                ssh_options=self.ssh_options,
+                args=self.args,
+                results_directory=self.results_dir,
+                host_attributes=test_attributes,
+                job_retry=self.retry,
+                ignore_deps=False,
+                minus=[self.suite_control_files[0]],
+                is_cft=True,
+                host_labels=test_labels,
+                label=None)
+
+        from mock import call
+
+        calls = []
+        for name in self.suite_control_files[1:]:
+            calls.append(
+                    call(job=JobMatcher(test_runner_utils.SimpleJob,
+                                        name=name),
+                         host=self.remote,
+                         info=hostinfoMatcher(labels=test_labels,
+                                              attributes=test_attributes),
+                         autotest_path=self.autotest_path,
+                         results_directory=self.results_dir,
+                         fast_mode=self.fast_mode,
+                         id_digits=self.id_digits,
+                         ssh_verbosity=self.ssh_verbosity,
+                         ssh_options=self.ssh_options,
+                         args=TypeMatcher(str),
+                         pretend=False,
+                         autoserv_verbose=False,
+                         companion_hosts=None,
+                         dut_servers=None,
+                         is_cft=True,
+                         ch_info={}))
+
+        run_job_mock.assert_has_calls(calls, any_order=True)
+        assert run_job_mock.call_count == len(calls)
+
+    def test_set_pyversion(self):
+        """Test the tests can properly set the python version."""
+
+        # When a test is missing a version, use the current setting.
+        starting_version = os.getenv('PY_VERSION')
+
+        try:
+            fake_test1 = FakeTests('foo')
+            fake_test2 = FakeTests('foo', py_version=2)
+            fake_test3 = FakeTests('foo', py_version=3)
+
+            test_runner_utils._set_pyversion(
+                    [fake_test1, fake_test2, fake_test3])
+            self.assertEqual(os.getenv('PY_VERSION'), starting_version)
+
+            # When there is a mix, use the current setting.
+            starting_version = os.getenv('PY_VERSION')
+            fake_test1 = FakeTests('foo', py_version=2)
+            fake_test2 = FakeTests('foo', py_version=2)
+            fake_test3 = FakeTests('foo', py_version=3)
+
+            test_runner_utils._set_pyversion(
+                    [fake_test1, fake_test2, fake_test3])
+            self.assertEqual(os.getenv('PY_VERSION'), starting_version)
+
+            # When all agree, but still 1 missing, use the current setting.
+            fake_test1 = FakeTests('foo')
+            fake_test2 = FakeTests('foo', py_version=3)
+            fake_test3 = FakeTests('foo', py_version=3)
+
+            test_runner_utils._set_pyversion(
+                    [fake_test1, fake_test2, fake_test3])
+            self.assertEqual(os.getenv('PY_VERSION'), starting_version)
+
+            # When all are set to 3, use 3.
+            fake_test1 = FakeTests('foo', py_version=3)
+            fake_test2 = FakeTests('foo', py_version=3)
+            fake_test3 = FakeTests('foo', py_version=3)
+
+            test_runner_utils._set_pyversion(
+                    [fake_test1, fake_test2, fake_test3])
+            self.assertEqual(os.getenv('PY_VERSION'), '3')
+
+            # When all are set to 2, use 2.
+            fake_test1 = FakeTests('foo', py_version=2)
+            fake_test2 = FakeTests('foo', py_version=2)
+            fake_test3 = FakeTests('foo', py_version=2)
+
+            test_runner_utils._set_pyversion(
+                    [fake_test1, fake_test2, fake_test3])
+            self.assertEqual(os.getenv('PY_VERSION'), '2')
+        finally:
+            # In the event something breaks, reset the pre-test version.
+            os.environ['PY_VERSION'] = starting_version
+
+    def test_host_info_write(self):
+
+        dirpath = tempfile.mkdtemp()
+
+        info = host_info.HostInfo(['some', 'labels'], {'attrib1': '1'})
+        import pathlib
+        expected_path = os.path.join(
+                pathlib.Path(__file__).parent.absolute(),
+                'host_info_store_testfile')
+        try:
+
+            test_runner_utils._write_host_info(dirpath, 'host_info_store',
+                                               'localhost:1234', info)
+            test_path = os.path.join(dirpath, 'host_info_store',
+                                     'localhost:1234.store')
+            with open(test_path, 'r') as rf:
+                test_data = rf.read()
+            with open(expected_path, 'r') as rf:
+                expected_data = rf.read()
+            self.assertEqual(test_data, expected_data)
+
+        finally:
+            shutil.rmtree(dirpath)
 
 
 if __name__ == '__main__':
diff --git a/site_utils/test_that.py b/site_utils/test_that.py
index 64a01a3..371cdf3 100755
--- a/site_utils/test_that.py
+++ b/site_utils/test_that.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -8,6 +8,7 @@
 from __future__ import print_function
 
 import argparse
+import json
 import os
 import signal
 import subprocess
@@ -30,40 +31,43 @@
 _QUICKMERGE_SCRIPTNAME = '/mnt/host/source/chromite/bin/autotest_quickmerge'
 
 
-def _get_board_from_host(remote):
-    """Get the board of the remote host.
+def _get_info_from_host(remote, board=None, model=None, ssh_options=''):
+    """Get the info of the remote host if needed.
 
     @param remote: string representing the IP of the remote host.
+    @param board: board arg from CLI.
+    @param model: model arg from CLI.
 
-    @return: A string representing the board of the remote host.
+    @return: board, model string representing the board, model
+        of the remote host.
     """
-    logging.info('Board unspecified, attempting to determine board from host.')
-    host = factory.create_host(remote)
-    try:
-        board = host.get_board().replace(constants.BOARD_PREFIX, '')
-    except error.AutoservRunError:
-        raise test_runner_utils.TestThatRunError(
-                'Cannot determine board, please specify a --board option.')
-    logging.info('Detected host board: %s', board)
-    return board
 
+    if board and model:
+        return board, model
 
-def _get_model_from_host(remote):
-    """Get the model of the remote host.
+    host = factory.create_host(remote, ssh_options=ssh_options)
 
-    @param remote: string representing the IP of the remote host.
+    if not board:
+        logging.info(
+                'Board unspecified, attempting to determine board from host.')
+        try:
+            board = host.get_board().replace(constants.BOARD_PREFIX, '')
+        except error.AutoservRunError:
+            raise test_runner_utils.TestThatRunError(
+                    'Cannot determine board, please specify a --board option.')
+        logging.info('Detected host board: %s', board)
 
-    @return: A string representing the board of the remote host.
-   """
-    logging.info('Model unspecified, attempting to determine model from host.')
-    host = factory.create_host(remote)
-    try:
-        model = host.get_platform()
-    except error.AutoservRunError:
-        raise test_runner_utils.TestThatRunError(
-                'Cannot determine model, please specify a --model option.')
-    logging.info('Detected host model: %s', model)
-    return model
+    if not model:
+        logging.info(
+                'Model unspecified, attempting to determine model from host.')
+        try:
+            model = host.get_platform()
+        except error.AutoservRunError:
+            raise test_runner_utils.TestThatRunError(
+                    'Cannot determine model, please specify a --model option.')
+        logging.info('Detected host model: %s', model)
+
+    return board, model
 
 
 def validate_arguments(arguments):
@@ -90,6 +94,12 @@
         if arguments.web:
             raise ValueError('--web flag not supported when running locally')
 
+    try:
+        json.loads(arguments.host_attributes)
+    except TypeError:
+        raise ValueError("--host_attributes must be quoted dict, got: %s" %
+                         arguments.host_attributes)
+
 
 def parse_arguments(argv):
     """
@@ -164,6 +174,42 @@
     parser.add_argument('--ssh_private_key', action='store',
                         default=test_runner_utils.TEST_KEY_PATH,
                         help='Path to the private ssh key.')
+    parser.add_argument(
+            '--companion_hosts',
+            action='store',
+            default=None,
+            help='Companion duts for the test, quoted space seperated strings')
+    parser.add_argument('--dut_servers',
+                        action='store',
+                        default=None,
+                        help='DUT servers for the test.')
+    parser.add_argument('--minus',
+                        dest='minus',
+                        nargs='*',
+                        help='List of tests to not use.',
+                        default=[''])
+    parser.add_argument('--py_version',
+                        dest='py_version',
+                        help='Python version to use, passed '
+                        'to Autotest modules, defaults to 2.',
+                        default=None)
+    parser.add_argument('--CFT',
+                        action='store_true',
+                        default=False,
+                        dest='CFT',
+                        help="If running in, or mocking, the CFT env.")
+    parser.add_argument('--host_attributes',
+                        action='store',
+                        default='{}',
+                        help='host_attributes')
+    parser.add_argument('--host_labels',
+                        action='store',
+                        default="",
+                        help='host_labels, quoted space seperated strings')
+    parser.add_argument('--label',
+                        action='store',
+                        default="",
+                        help='label for test name')
     return parser.parse_args(argv), remote_argv
 
 
@@ -256,10 +302,6 @@
     @param argv: Script command line arguments.
     @param arguments: Parsed command line arguments.
     """
-    if not os.path.exists('/etc/cros_chroot_version'):
-        print('For local runs, script must be run inside chroot.', file=sys.stderr)
-        return 1
-
     results_directory = test_runner_utils.create_results_directory(
             arguments.results_dir, arguments.board)
     test_runner_utils.add_ssh_identity(results_directory,
@@ -270,13 +312,13 @@
     # --model, and is not set in the default_board file, determine the board by
     # ssh-ing into the host. Also prepend it to argv so we can re-use it when we
     # run test_that from the sysroot.
-    if arguments.board is None:
-        arguments.board = _get_board_from_host(arguments.remote)
-        argv = ['--board=%s' % (arguments.board,)] + argv
-
-    if arguments.model is None:
-        arguments.model = _get_model_from_host(arguments.remote)
-        argv = ['--model=%s' % (arguments.model, )] + argv
+    arguments.board, arguments.model = _get_info_from_host(
+            arguments.remote,
+            arguments.board,
+            arguments.model,
+            ssh_options=arguments.ssh_options)
+    argv = ['--board=%s' % (arguments.board, )] + argv
+    argv = ['--model=%s' % (arguments.model, )] + argv
 
     if arguments.autotest_dir:
         autotest_path = arguments.autotest_dir
@@ -329,7 +371,14 @@
                 debug=arguments.debug,
                 allow_chrome_crashes=arguments.allow_chrome_crashes,
                 pretend=arguments.pretend,
-                job_retry=arguments.retry)
+                job_retry=arguments.retry,
+                companion_hosts=arguments.companion_hosts,
+                minus=arguments.minus,
+                dut_servers=arguments.dut_servers,
+                is_cft=arguments.CFT,
+                host_attributes=json.loads(arguments.host_attributes),
+                host_labels=arguments.host_labels,
+                label=arguments.label)
 
 
 def _main_for_lab_run(argv, arguments):
diff --git a/site_utils/test_that_unittest.py b/site_utils/test_that_unittest.py
index 5fdb086..6ba0db9 100755
--- a/site_utils/test_that_unittest.py
+++ b/site_utils/test_that_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/test_suites/OWNERS b/test_suites/OWNERS
new file mode 100644
index 0000000..28c710d
--- /dev/null
+++ b/test_suites/OWNERS
@@ -0,0 +1,4 @@
+include /HARNESS_OWNERS
+include /FIRMWARE_OWNERS
+include /ENGPROD_OWNERS
+*
diff --git a/test_suites/control.AFDO_record b/test_suites/control.AFDO_record
index 394c7ac..be8f31a 100644
--- a/test_suites/control.AFDO_record
+++ b/test_suites/control.AFDO_record
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "AFDO_record"
 PURPOSE = "Exercise Chrome, record profiling data using AFDO."
 
diff --git a/test_suites/control.appcompat b/test_suites/control.appcompat
index 30eeeb2..5a119db 100644
--- a/test_suites/control.appcompat
+++ b/test_suites/control.appcompat
@@ -19,7 +19,7 @@
 
 args_dict['name'] = NAME
 args_dict['add_experimental'] = True
-args_dict['max_runtime_mins'] = 180
+args_dict['max_runtime_mins'] = 270
 args_dict['job'] = job
 
 dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.appcompat_release b/test_suites/control.appcompat_release
new file mode 100644
index 0000000..377111c
--- /dev/null
+++ b/test_suites/control.appcompat_release
@@ -0,0 +1,25 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "Chromium OS team"
+NAME = "appcompat_release"
+PURPOSE = "Suite for app compat release testing."
+TIME = "MEDIUM"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite runs a few app compat tests on various devices in the lab.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['add_experimental'] = True
+args_dict['max_runtime_mins'] = 120
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.appcompat_smoke b/test_suites/control.appcompat_smoke
new file mode 100644
index 0000000..f2d2831
--- /dev/null
+++ b/test_suites/control.appcompat_smoke
@@ -0,0 +1,25 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "Chromium OS team"
+NAME = "appcompat_smoke"
+PURPOSE = "Suite for app compat smoke testing."
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite runs a few app compat tests on various devices in the lab.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['add_experimental'] = True
+args_dict['max_runtime_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.arc-data-snapshot_per-build b/test_suites/control.arc-data-snapshot_per-build
new file mode 100644
index 0000000..543ec3a
--- /dev/null
+++ b/test_suites/control.arc-data-snapshot_per-build
@@ -0,0 +1,38 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team, arc-commercial@google.com"
+NAME = "arc-data-snapshot_per-build"
+PURPOSE = "ARC data/ snapshot tests"
+
+TIME = "LONG"
+TEST_CATEGORY = "Snapshot"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This is the part of the ARC data snapshot test suite which should run once per
+build.
+
+@param build: The name of the image to test.
+              Ex: x86-mario-release/R17-1412.33.0-a1-b29
+@param board: The board to test on.  Ex: x86-mario
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['max_runtime_mins'] = 960
+args_dict['timeout_mins'] = 1440
+args_dict['file_bugs'] = False
+args_dict['name'] = 'arc-data-snapshot_per-build'
+args_dict['job'] = job
+args_dict['add_experimental'] = True
+args_dict['bug_template'] = _BUG_TEMPLATE
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.arcless_smoke b/test_suites/control.arcless_smoke
index 870bbc9..87c603a 100644
--- a/test_suites/control.arcless_smoke
+++ b/test_suites/control.arcless_smoke
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "arcless_smoke"
 PURPOSE = "Basic non-ARC tests that are able to run in VMs"
 
diff --git a/test_suites/control.au-m2n b/test_suites/control.au-m2n
new file mode 100644
index 0000000..d5d8d75
--- /dev/null
+++ b/test_suites/control.au-m2n
@@ -0,0 +1,29 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "David Haddock <dhaddock@chromium.org>"
+NAME = "au-m2n"
+PURPOSE = "Suite for M->N autoupdate tests."
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite runs various autoupdate tests M to N. This means the tests will
+update from a previous milestone to the current milestone.
+
+Most autoupdate tests go N->N and update from a version to itself. This M->N
+suite should catch regressions that are missed by N->N tests.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['add_experimental'] = True
+args_dict['max_runtime_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.au-p2p b/test_suites/control.au-p2p
new file mode 100644
index 0000000..d4ea3f9
--- /dev/null
+++ b/test_suites/control.au-p2p
@@ -0,0 +1,25 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "David Haddock <dhaddock@chromium.org>"
+NAME = "au-p2p"
+PURPOSE = "Suite for autoupdate P2P tests."
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite runs the P2P autoupdate tests.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['add_experimental'] = True
+args_dict['max_runtime_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.au-perbuild-tast b/test_suites/control.au-perbuild-tast
new file mode 100644
index 0000000..e895ced
--- /dev/null
+++ b/test_suites/control.au-perbuild-tast
@@ -0,0 +1,26 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "cros-engprod-muc"
+NAME = "au-perbuild-tast"
+PURPOSE = "Suite for autoupdate tests in Tast."
+TIME = "LONG"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite runs the autoupdate tests in Tast.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['add_experimental'] = True
+args_dict['max_runtime_mins'] = 960
+args_dict['timeout_mins'] = 1440
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.audio_essential b/test_suites/control.audio_essential
index 2c55135..34ba596 100644
--- a/test_suites/control.audio_essential
+++ b/test_suites/control.audio_essential
@@ -13,7 +13,7 @@
 
 DOC = """
 Audio tests that cover audio functionalities that are essential to the
-Chrome OS audio stack.
+ChromeOS audio stack.
 
 Generally the tests require chameleon and audio boards connected.
 Together with DUT and jack plugger bundled in audio-box environment for
diff --git a/test_suites/control.bluetooth b/test_suites/control.bluetooth
index e88726d..423fda4 100644
--- a/test_suites/control.bluetooth
+++ b/test_suites/control.bluetooth
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "bluetooth"
 PURPOSE = "Automated testing of Bluetooth."
 
diff --git a/test_suites/control.bluetooth_e2e b/test_suites/control.bluetooth_e2e
index a28bf34..6ca8c01 100644
--- a/test_suites/control.bluetooth_e2e
+++ b/test_suites/control.bluetooth_e2e
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "bluetooth_e2e"
 PURPOSE = "Automated testing of Bluetooth."
 
diff --git a/test_suites/control.bluetooth_e2e_cq b/test_suites/control.bluetooth_e2e_cq
new file mode 100644
index 0000000..c63debc
--- /dev/null
+++ b/test_suites/control.bluetooth_e2e_cq
@@ -0,0 +1,26 @@
+# Copyright (c) 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Bluetooth Team"
+NAME = "bluetooth_e2e_cq"
+PURPOSE = "Bluetooth tests to be included in rfcell CQ"
+
+TIME = "Medium"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite contains e2e bluetooth tests that require Bluetooth peers
+to be run in rfcell CQ
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['add_experimental'] = True
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bluetooth_flaky b/test_suites/control.bluetooth_flaky
index 80f0695..deb4d91 100644
--- a/test_suites/control.bluetooth_flaky
+++ b/test_suites/control.bluetooth_flaky
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "bluetooth_flaky"
 PURPOSE = "Automated testing of Bluetooth."
 
diff --git a/test_suites/control.bluetooth_floss b/test_suites/control.bluetooth_floss
new file mode 100644
index 0000000..35d3647
--- /dev/null
+++ b/test_suites/control.bluetooth_floss
@@ -0,0 +1,26 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Bluetooth Team"
+NAME = "bluetooth_floss"
+PURPOSE = "Automated testing of Bluetooth Floss Stack"
+
+TIME = "LONG"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite consists of test cases from bluetooth_e2e and bluetooth_standalone
+which is modified run on Floss stack
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['add_experimental'] = True
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bluetooth_floss_cq b/test_suites/control.bluetooth_floss_cq
new file mode 100644
index 0000000..b5d22e7
--- /dev/null
+++ b/test_suites/control.bluetooth_floss_cq
@@ -0,0 +1,29 @@
+# Copyright (c) 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Bluetooth Team"
+NAME = "bluetooth_floss_cq"
+PURPOSE = "Bluetooth tests to be included in rfcell CQ for Project Floss"
+
+TIME = "Medium"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite contains bluetooth tests that require Bluetooth peers
+to be run in rfcell CQ for changes in Floss project
+
+Details of Bluetooth suites can be found at
+https://g3doc.corp.google.com/company/teams/chrome/ops/chromeos/engprod/connectivity/bluetooth/automated_tests_and_lab.md
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['add_experimental'] = True
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bluetooth_health b/test_suites/control.bluetooth_health
index 8db9012..0eaa799 100644
--- a/test_suites/control.bluetooth_health
+++ b/test_suites/control.bluetooth_health
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "bluetooth_health"
 PURPOSE = "Automated health testing of Bluetooth."
 
diff --git a/test_suites/control.bluetooth_mtbf b/test_suites/control.bluetooth_mtbf
index 510488b..6018c21 100644
--- a/test_suites/control.bluetooth_mtbf
+++ b/test_suites/control.bluetooth_mtbf
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "bluetooth_mtbf"
 PURPOSE = "Automated mtbf testing of Bluetooth."
 
diff --git a/test_suites/control.bluetooth_qualification b/test_suites/control.bluetooth_qualification
index 4df7f2d..e8ad1fd 100644
--- a/test_suites/control.bluetooth_qualification
+++ b/test_suites/control.bluetooth_qualification
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "bluetooth_qualification"
 PURPOSE = "Automated qualification testing of Bluetooth."
 
diff --git a/test_suites/control.bluetooth_standalone b/test_suites/control.bluetooth_standalone
index 0252a71..3d520ef 100644
--- a/test_suites/control.bluetooth_standalone
+++ b/test_suites/control.bluetooth_standalone
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "bluetooth_standalone"
 PURPOSE = "Automated testing of Bluetooth."
 
diff --git a/test_suites/control.bluetooth_standalone_cq b/test_suites/control.bluetooth_standalone_cq
index 281ae24..01e1961 100644
--- a/test_suites/control.bluetooth_standalone_cq
+++ b/test_suites/control.bluetooth_standalone_cq
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Bluetooth Team"
+AUTHOR = "ChromeOS Bluetooth Team"
 NAME = "bluetooth_standalone_cq"
 PURPOSE = "Standalone Bluetooth tests to be included in CQ"
 
diff --git a/test_suites/control.bluetooth_stress b/test_suites/control.bluetooth_stress
index 713d01c..4a91c97 100644
--- a/test_suites/control.bluetooth_stress
+++ b/test_suites/control.bluetooth_stress
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "bluetooth_stress"
 PURPOSE = "Automated stress testing of Bluetooth."
 
diff --git a/test_suites/control.bluetooth_wifi_coex b/test_suites/control.bluetooth_wifi_coex
index 8314e66..dc9126e 100644
--- a/test_suites/control.bluetooth_wifi_coex
+++ b/test_suites/control.bluetooth_wifi_coex
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "bluetooth_wifi_coex"
 PURPOSE = "Automated testing of Bluetooth."
 
diff --git a/test_suites/control.bluetooth_wifi_testbed_update b/test_suites/control.bluetooth_wifi_testbed_update
new file mode 100644
index 0000000..81b8047
--- /dev/null
+++ b/test_suites/control.bluetooth_wifi_testbed_update
@@ -0,0 +1,26 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Platform Engprod Team, cros-conn-test-team@google.com"
+NAME = "bluetooth_wifi_testbed_update"
+PURPOSE = "Tests that check and update bluetooth/WiFi peer in wificell test bed"
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite runs a set of test that updates and check the WiFi and Bluetooth
+peer devices present in wificell testbeds.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['add_experimental'] = True
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.borealis-cq b/test_suites/control.borealis-cq
new file mode 100644
index 0000000..1f4cb60
--- /dev/null
+++ b/test_suites/control.borealis-cq
@@ -0,0 +1,24 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Graphics Team"
+NAME = "borealis-cq"
+PURPOSE = "Run borealis-related critical tests."
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """ This suite runs borealis-related critical tests. """
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 60
+args_dict['timeout_mins'] = 1440
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.borealis_per-build b/test_suites/control.borealis_per-build
new file mode 100644
index 0000000..76a665f
--- /dev/null
+++ b/test_suites/control.borealis_per-build
@@ -0,0 +1,37 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "chromeos-gfx"
+NAME = "borealis_per-build"
+PURPOSE = "Borealis related tests to run per build"
+
+TIME = "MEDIUM"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This is the part of the borealis test suite which should run once per build.
+
+@param build: The name of the image to test.
+              Ex: x86-mario-release/R17-1412.33.0-a1-b29
+@param board: The board to test on.  Ex: x86-mario
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['max_runtime_mins'] = 960
+args_dict['timeout_mins'] = 1440
+args_dict['file_bugs'] = False
+args_dict['name'] = 'borealis_per-build'
+args_dict['job'] = job
+args_dict['add_experimental'] = True
+args_dict['bug_template'] = _BUG_TEMPLATE
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.borealis_per-day b/test_suites/control.borealis_per-day
new file mode 100644
index 0000000..bb9e0c7
--- /dev/null
+++ b/test_suites/control.borealis_per-day
@@ -0,0 +1,37 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "chromeos-gfx"
+NAME = "borealis_per-day"
+PURPOSE = "Borealis related tests to run per day"
+
+TIME = "MEDIUM"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This is the part of the borealis test suite which should run once every day.
+
+@param build: The name of the image to test.
+              Ex: x86-mario-release/R17-1412.33.0-a1-b29
+@param board: The board to test on.  Ex: x86-mario
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['max_runtime_mins'] = 1440
+args_dict['timeout_mins'] = 2880
+args_dict['file_bugs'] = False
+args_dict['name'] = 'borealis_per-day'
+args_dict['job'] = job
+args_dict['add_experimental'] = True
+args_dict['bug_template'] = _BUG_TEMPLATE
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.borealis_per-week b/test_suites/control.borealis_per-week
new file mode 100644
index 0000000..5457860
--- /dev/null
+++ b/test_suites/control.borealis_per-week
@@ -0,0 +1,37 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "chromeos-gfx"
+NAME = "borealis_per-week"
+PURPOSE = "Borealis related tests to run per week"
+
+TIME = "MEDIUM"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This is the part of the Borealis test suite which should run once every week.
+
+@param build: The name of the image to test.
+              Ex: x86-mario-release/R17-1412.33.0-a1-b29
+@param board: The board to test on.  Ex: x86-mario
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['max_runtime_mins'] = 1440
+args_dict['timeout_mins'] = 4320
+args_dict['file_bugs'] = False
+args_dict['name'] = 'borealis_per-week'
+args_dict['job'] = job
+args_dict['add_experimental'] = True
+args_dict['bug_template'] = _BUG_TEMPLATE
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-arc b/test_suites/control.bvt-arc
index 67ced11..2e68fd7 100644
--- a/test_suites/control.bvt-arc
+++ b/test_suites/control.bvt-arc
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "bvt-arc"
 PURPOSE = "Test basic functionality of ARC."
 
@@ -19,10 +19,10 @@
   * Failures in the canary turn the tree red, block lower priority
     tests for the build, and generally mean that QA cannot further
     evaluate the build's fitness for release.
-  * Chrome OS CLs must pass these tests in the Commit Queue prior to
+  * ChromeOS CLs must pass these tests in the Commit Queue prior to
     being accepted into the tree.
   * A new Chrome or Android build must pass these tests prior to the
-    build being included in a Chrome OS canary build.
+    build being included in a ChromeOS canary build.
 
 Requirements for a test to be in this suite:
  1. The test should be SHORT or MEDIUM, and should not require any
diff --git a/test_suites/control.bvt-cq b/test_suites/control.bvt-cq
index 29dbdc0..8b15984 100644
--- a/test_suites/control.bvt-cq
+++ b/test_suites/control.bvt-cq
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "bvt-cq"
 PURPOSE = "Test functionality required by the Commit Queue."
 
@@ -13,12 +13,12 @@
 
 DOC = """
 This is the portion of the Build Verification Test suite required to
-pass the Chrome OS Commit Queue and Pre-Flight Queue.  Test failures
+pass the ChromeOS Commit Queue and Pre-Flight Queue.  Test failures
 prevent code from being included in the tree or in canary builds:
-  * Chrome OS CLs must pass these tests prior to being accepted into
+  * ChromeOS CLs must pass these tests prior to being accepted into
     the tree.
   * A new Chrome build must pass these tests prior to the build
-    being included in a Chrome OS canary build.
+    being included in a ChromeOS canary build.
 
 Requirements for a test to be in this suite:
  1. The test should be SHORT, and should not require any specialized
diff --git a/test_suites/control.bvt-faft b/test_suites/control.bvt-faft
deleted file mode 100644
index 0270bb6..0000000
--- a/test_suites/control.bvt-faft
+++ /dev/null
@@ -1,48 +0,0 @@
-# Copyright (c) 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "bvt-faft"
-PURPOSE = "Run critical FAFT tests for BVT."
-
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "suite"
-TEST_TYPE = "Server"
-
-DOC = """
-This is the FAFT (fully automated firmware test) portion of the Build
-Verification Test suite required to pass before any other tests may run.
-
-@param build: The name of the image to test.
-              Ex: x86-mario-release/R17-1412.33.0-a1-b29
-@param board: The board to test on. Ex: x86-mario
-@param pool: The pool of machines to utilize for scheduling. If pool=None
-             board is used.
-@param check_hosts: require appropriate live hosts to exist in the lab.
-@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
-"""
-
-import common
-from autotest_lib.server.cros.dynamic_suite import dynamic_suite
-
-
-# Values specified in this bug template will override default values when
-# filing bugs on tests that are a part of this suite. If left unspecified
-# the bug filer will fallback to it's defaults.
-_BUG_TEMPLATE = {
-    'labels': ['FW-labblocker', 'Restrict-View-Google'],
-    'owner': '',
-    'status': None,
-    'summary': None,
-    'title': None,
-}
-
-args_dict['max_runtime_mins'] = 30
-args_dict['name'] = 'bvt-faft'
-args_dict['job'] = job
-args_dict['add_experimental'] = True
-args_dict['bug_template'] = _BUG_TEMPLATE
-
-dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-inline b/test_suites/control.bvt-inline
index c93df23..49297cb 100644
--- a/test_suites/control.bvt-inline
+++ b/test_suites/control.bvt-inline
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "bvt-inline"
 PURPOSE = "Test critical functionality."
 
diff --git a/test_suites/control.bvt-installer b/test_suites/control.bvt-installer
index 143a6a6..7f6a76c 100644
--- a/test_suites/control.bvt-installer
+++ b/test_suites/control.bvt-installer
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "bvt-installer"
 PURPOSE = "Build verification of ChromeOS pieces related to Installer."
 
diff --git a/test_suites/control.bvt-perbuild b/test_suites/control.bvt-perbuild
index fc4c19d..6134320 100644
--- a/test_suites/control.bvt-perbuild
+++ b/test_suites/control.bvt-perbuild
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "bvt-perbuild"
 PURPOSE = "Test basic, required functionality."
 
diff --git a/test_suites/control.bvt-tast-android-pfq b/test_suites/control.bvt-tast-android-pfq
index 057a818..c411a04 100644
--- a/test_suites/control.bvt-tast-android-pfq
+++ b/test_suites/control.bvt-tast-android-pfq
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "bvt-tast-android-pfq"
 PURPOSE = "Tests critical ARC functionality for Android PFQ using Tast."
 
@@ -16,10 +16,12 @@
 by running ARC-specific Tast integration tests that must always pass against a
 DUT. See http://go/tast for more information about Tast.
 
-The only Autotest test executed by this suite is tast.critical-android, which is
-a server test that executes the tast executable. The tast executable runs
-individual Tast tests. If any of these Tast tests fail, then
-tast.critical-android (and this suite) fail. """
+The following Autotest tests are executed by this suite:
+- tast.critical-android, which is a server test that executes the tast
+executable. The tast executable runs individual Tast tests. If any of these Tast
+tests fail, then tast.critical-android (and this suite) fail.
+- tast.arc-data-collector, which launches the arc.DataCollector Tast test to
+collect runtime artifacts from the DUT for future optimization."""
 
 import common
 from autotest_lib.server.cros.dynamic_suite import dynamic_suite
diff --git a/test_suites/control.bvt-tast-arc b/test_suites/control.bvt-tast-arc
new file mode 100644
index 0000000..065bf19
--- /dev/null
+++ b/test_suites/control.bvt-tast-arc
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-arc"
+PURPOSE = "Tests critical ARC functionality for ChromeOS CQ using Tast."
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ARC's basic functionality for ARC variant boards
+(e.g. *-arc-r) in the ChromeOS CQ, by running ARC-specific Tast integration
+tests that must always pass against a DUT. See http://go/tast for more
+information about Tast.
+
+The only Autotest test executed by this suite is tast.critical-android, which is
+a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then
+tast.critical-android (and this suite) fail. """
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 20
+args_dict['timeout_mins'] = 1440
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-chrome-pfq b/test_suites/control.bvt-tast-chrome-pfq
index 2a546b0..e24f7fb 100644
--- a/test_suites/control.bvt-tast-chrome-pfq
+++ b/test_suites/control.bvt-tast-chrome-pfq
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "bvt-tast-chrome-pfq"
 PURPOSE = "Tests critical Chrome functionality for Chrome PFQ using Tast."
 
diff --git a/test_suites/control.bvt-tast-cq b/test_suites/control.bvt-tast-cq
index 9e13058..faf24e3 100644
--- a/test_suites/control.bvt-tast-cq
+++ b/test_suites/control.bvt-tast-cq
@@ -2,9 +2,9 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "bvt-tast-cq"
-PURPOSE = "Tests all critical functionality for Chrome OS CQ using Tast."
+PURPOSE = "Tests all critical functionality for ChromeOS CQ using Tast."
 
 TIME = "SHORT"
 TEST_CATEGORY = "General"
@@ -12,7 +12,7 @@
 TEST_TYPE = "Server"
 
 DOC = """
-This suite verifies Chrome OS's basic functionality for the Chrome OS Commit
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
 Queue by running all Tast integration tests (exercising ARC, Chrome, and the OS
 itself) that must always pass against a DUT. See http://go/tast for more
 information about Tast.
diff --git a/test_suites/control.bvt-tast-cq-a11y b/test_suites/control.bvt-tast-cq-a11y
new file mode 100644
index 0000000..32e5293
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-a11y
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-a11y"
+PURPOSE = 'Tests the critical Tast tests in the "a11y" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "a11y" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-a11y, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-a11y test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-ad b/test_suites/control.bvt-tast-cq-ad
new file mode 100644
index 0000000..250fcf2
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-ad
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-ad"
+PURPOSE = 'Tests the critical Tast tests in the "ad" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "ad" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-ad, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-ad test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-ambient b/test_suites/control.bvt-tast-cq-ambient
new file mode 100644
index 0000000..d431a6e
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-ambient
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-ambient"
+PURPOSE = 'Tests the critical Tast tests in the "ambient" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "ambient" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-ambient, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-ambient test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-apps b/test_suites/control.bvt-tast-cq-apps
new file mode 100644
index 0000000..c020e18
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-apps
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-apps"
+PURPOSE = 'Tests the critical Tast tests in the "apps" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "apps" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-apps, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-apps test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-arc b/test_suites/control.bvt-tast-cq-arc
new file mode 100644
index 0000000..4ad353f
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-arc
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-arc"
+PURPOSE = 'Tests the critical Tast tests in the "arc" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "arc" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-arc, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-arc test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-arcappcompat b/test_suites/control.bvt-tast-cq-arcappcompat
new file mode 100644
index 0000000..ead80bd
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-arcappcompat
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-arcappcompat"
+PURPOSE = 'Tests the critical Tast tests in the "arcappcompat" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "arcappcompat" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-arcappcompat, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-arcappcompat test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-assistant b/test_suites/control.bvt-tast-cq-assistant
new file mode 100644
index 0000000..dc33665
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-assistant
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-assistant"
+PURPOSE = 'Tests the critical Tast tests in the "assistant" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "assistant" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-assistant, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-assistant test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-audio b/test_suites/control.bvt-tast-cq-audio
new file mode 100644
index 0000000..91cc3b0
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-audio
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-audio"
+PURPOSE = 'Tests the critical Tast tests in the "audio" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "audio" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-audio, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-audio test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-baserpc b/test_suites/control.bvt-tast-cq-baserpc
new file mode 100644
index 0000000..a879e81
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-baserpc
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-baserpc"
+PURPOSE = 'Tests the critical Tast tests in the "baserpc" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "baserpc" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-baserpc, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-baserpc test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-biod b/test_suites/control.bvt-tast-cq-biod
new file mode 100644
index 0000000..9e98e98
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-biod
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-biod"
+PURPOSE = 'Tests the critical Tast tests in the "biod" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "biod" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-biod, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-biod test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-camera b/test_suites/control.bvt-tast-cq-camera
new file mode 100644
index 0000000..ff78082
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-camera
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-camera"
+PURPOSE = 'Tests the critical Tast tests in the "camera" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "camera" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-camera, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-camera test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-crash b/test_suites/control.bvt-tast-cq-crash
new file mode 100644
index 0000000..cbbe4cd
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-crash
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-crash"
+PURPOSE = 'Tests the critical Tast tests in the "crash" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "crash" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-crash, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-crash test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-crostini b/test_suites/control.bvt-tast-cq-crostini
new file mode 100644
index 0000000..33b9fa8
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-crostini
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-crostini"
+PURPOSE = 'Tests the critical Tast tests in the "crostini" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "crostini" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-crostini, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-crostini test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-cryptohome b/test_suites/control.bvt-tast-cq-cryptohome
new file mode 100644
index 0000000..fa7d8eb
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-cryptohome
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-cryptohome"
+PURPOSE = 'Tests the critical Tast tests in the "cryptohome" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "cryptohome" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-cryptohome, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-cryptohome test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-dbus b/test_suites/control.bvt-tast-cq-dbus
new file mode 100644
index 0000000..f6fdb85
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-dbus
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-dbus"
+PURPOSE = 'Tests the critical Tast tests in the "dbus" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "dbus" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-dbus, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-dbus test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-debugd b/test_suites/control.bvt-tast-cq-debugd
new file mode 100644
index 0000000..0c9c3d7
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-debugd
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-debugd"
+PURPOSE = 'Tests the critical Tast tests in the "debugd" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "debugd" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-debugd, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-debugd test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-dev b/test_suites/control.bvt-tast-cq-dev
new file mode 100644
index 0000000..9cdcb50
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-dev
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-dev"
+PURPOSE = 'Tests the critical Tast tests in the "dev" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "dev" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-dev, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-dev test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-diagnostics b/test_suites/control.bvt-tast-cq-diagnostics
new file mode 100644
index 0000000..d9c9879
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-diagnostics
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-diagnostics"
+PURPOSE = 'Tests the critical Tast tests in the "diagnostics" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "diagnostics" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-diagnostics, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-diagnostics test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-documentscanapi b/test_suites/control.bvt-tast-cq-documentscanapi
new file mode 100644
index 0000000..fa1c87a
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-documentscanapi
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-documentscanapi"
+PURPOSE = 'Tests the critical Tast tests in the "documentscanapi" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "documentscanapi" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-documentscanapi, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-documentscanapi test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-enterprise b/test_suites/control.bvt-tast-cq-enterprise
new file mode 100644
index 0000000..fb96e91
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-enterprise
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-enterprise"
+PURPOSE = 'Tests the critical Tast tests in the "enterprise" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "enterprise" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-enterprise, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-enterprise test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-example b/test_suites/control.bvt-tast-cq-example
new file mode 100644
index 0000000..d9837a8
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-example
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-example"
+PURPOSE = 'Tests the critical Tast tests in the "example" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "example" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-example, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-example test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-factory b/test_suites/control.bvt-tast-cq-factory
new file mode 100644
index 0000000..34425f9
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-factory
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-factory"
+PURPOSE = 'Tests the critical Tast tests in the "factory" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "factory" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-factory, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-factory test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-familylink b/test_suites/control.bvt-tast-cq-familylink
new file mode 100644
index 0000000..c2867d0
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-familylink
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-familylink"
+PURPOSE = 'Tests the critical Tast tests in the "familylink" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "familylink" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-familylink, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-familylink test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-feedback b/test_suites/control.bvt-tast-cq-feedback
new file mode 100644
index 0000000..7fbee8c
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-feedback
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-feedback"
+PURPOSE = 'Tests the critical Tast tests in the "feedback" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "feedback" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-feedback, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-feedback test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-filemanager b/test_suites/control.bvt-tast-cq-filemanager
new file mode 100644
index 0000000..a8b78ed
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-filemanager
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-filemanager"
+PURPOSE = 'Tests the critical Tast tests in the "filemanager" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "filemanager" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-filemanager, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-filemanager test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-firmware b/test_suites/control.bvt-tast-cq-firmware
new file mode 100644
index 0000000..e4b244c
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-firmware
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-firmware"
+PURPOSE = 'Tests the critical Tast tests in the "firmware" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "firmware" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-firmware, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-firmware test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-gamepad b/test_suites/control.bvt-tast-cq-gamepad
new file mode 100644
index 0000000..94cca38
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-gamepad
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-gamepad"
+PURPOSE = 'Tests the critical Tast tests in the "gamepad" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "gamepad" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-gamepad, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-gamepad test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-graphics b/test_suites/control.bvt-tast-cq-graphics
new file mode 100644
index 0000000..7668f5a
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-graphics
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-graphics"
+PURPOSE = 'Tests the critical Tast tests in the "graphics" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "graphics" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-graphics, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-graphics test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-hardware b/test_suites/control.bvt-tast-cq-hardware
new file mode 100644
index 0000000..dddda29
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-hardware
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-hardware"
+PURPOSE = 'Tests the critical Tast tests in the "hardware" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "hardware" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-hardware, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-hardware test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-health b/test_suites/control.bvt-tast-cq-health
new file mode 100644
index 0000000..08959a9
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-health
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-health"
+PURPOSE = 'Tests the critical Tast tests in the "health" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "health" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-health, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-health test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-hwsec b/test_suites/control.bvt-tast-cq-hwsec
new file mode 100644
index 0000000..22a690f
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-hwsec
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-hwsec"
+PURPOSE = 'Tests the critical Tast tests in the "hwsec" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "hwsec" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-hwsec, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-hwsec test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-inputs b/test_suites/control.bvt-tast-cq-inputs
new file mode 100644
index 0000000..93afd38
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-inputs
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-inputs"
+PURPOSE = 'Tests the critical Tast tests in the "inputs" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "inputs" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-inputs, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-inputs test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-kernel b/test_suites/control.bvt-tast-cq-kernel
new file mode 100644
index 0000000..27737f1
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-kernel
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-kernel"
+PURPOSE = 'Tests the critical Tast tests in the "kernel" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "kernel" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-kernel, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-kernel test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-lacros b/test_suites/control.bvt-tast-cq-lacros
new file mode 100644
index 0000000..a1e8142
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-lacros
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-lacros"
+PURPOSE = 'Tests the critical Tast tests in the "lacros" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "lacros" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-lacros, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-lacros test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-launcher b/test_suites/control.bvt-tast-cq-launcher
new file mode 100644
index 0000000..5950e53
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-launcher
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-launcher"
+PURPOSE = 'Tests the critical Tast tests in the "launcher" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "launcher" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-launcher, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-launcher test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-logs b/test_suites/control.bvt-tast-cq-logs
new file mode 100644
index 0000000..cb93f11
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-logs
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-logs"
+PURPOSE = 'Tests the critical Tast tests in the "logs" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "logs" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-logs, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-logs test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-meta b/test_suites/control.bvt-tast-cq-meta
new file mode 100644
index 0000000..9578651
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-meta
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-meta"
+PURPOSE = 'Tests the critical Tast tests in the "meta" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "meta" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-meta, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-meta test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-mlservice b/test_suites/control.bvt-tast-cq-mlservice
new file mode 100644
index 0000000..d84ab6a
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-mlservice
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-mlservice"
+PURPOSE = 'Tests the critical Tast tests in the "mlservice" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "mlservice" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-mlservice, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-mlservice test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-multivm b/test_suites/control.bvt-tast-cq-multivm
new file mode 100644
index 0000000..d662369
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-multivm
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-multivm"
+PURPOSE = 'Tests the critical Tast tests in the "multivm" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "multivm" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-multivm, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-multivm test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-nacl b/test_suites/control.bvt-tast-cq-nacl
new file mode 100644
index 0000000..8768a2b
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-nacl
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-nacl"
+PURPOSE = 'Tests the critical Tast tests in the "nacl" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "nacl" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-nacl, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-nacl test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-nearbyshare b/test_suites/control.bvt-tast-cq-nearbyshare
new file mode 100644
index 0000000..12de0cc
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-nearbyshare
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-nearbyshare"
+PURPOSE = 'Tests the critical Tast tests in the "nearbyshare" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "nearbyshare" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-nearbyshare, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-nearbyshare test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-network b/test_suites/control.bvt-tast-cq-network
new file mode 100644
index 0000000..f948b6f
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-network
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-network"
+PURPOSE = 'Tests the critical Tast tests in the "network" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "network" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-network, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-network test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-notifications b/test_suites/control.bvt-tast-cq-notifications
new file mode 100644
index 0000000..fb5cc3c
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-notifications
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-notifications"
+PURPOSE = 'Tests the critical Tast tests in the "notifications" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "notifications" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-notifications, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-notifications test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-ocr b/test_suites/control.bvt-tast-cq-ocr
new file mode 100644
index 0000000..445fcda
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-ocr
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-ocr"
+PURPOSE = 'Tests the critical Tast tests in the "ocr" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "ocr" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-ocr, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-ocr test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-peripherals b/test_suites/control.bvt-tast-cq-peripherals
new file mode 100644
index 0000000..f658096
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-peripherals
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-peripherals"
+PURPOSE = 'Tests the critical Tast tests in the "peripherals" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "peripherals" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-peripherals, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-peripherals test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-platform b/test_suites/control.bvt-tast-cq-platform
new file mode 100644
index 0000000..ba26056
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-platform
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-platform"
+PURPOSE = 'Tests the critical Tast tests in the "platform" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "platform" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-platform, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-platform test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-policy b/test_suites/control.bvt-tast-cq-policy
new file mode 100644
index 0000000..00ae450
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-policy
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-policy"
+PURPOSE = 'Tests the critical Tast tests in the "policy" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "policy" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-policy, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-policy test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-power b/test_suites/control.bvt-tast-cq-power
new file mode 100644
index 0000000..f38adc4
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-power
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-power"
+PURPOSE = 'Tests the critical Tast tests in the "power" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "power" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-power, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-power test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-printer b/test_suites/control.bvt-tast-cq-printer
new file mode 100644
index 0000000..136848d
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-printer
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-printer"
+PURPOSE = 'Tests the critical Tast tests in the "printer" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "printer" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-printer, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-printer test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-qemu b/test_suites/control.bvt-tast-cq-qemu
new file mode 100644
index 0000000..6b4cf13
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-qemu
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-qemu"
+PURPOSE = 'Tests the critical Tast tests in the "qemu" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "qemu" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-qemu, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-qemu test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-scanapp b/test_suites/control.bvt-tast-cq-scanapp
new file mode 100644
index 0000000..4e56c67
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-scanapp
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-scanapp"
+PURPOSE = 'Tests the critical Tast tests in the "scanapp" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "scanapp" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-scanapp, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-scanapp test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-scanner b/test_suites/control.bvt-tast-cq-scanner
new file mode 100644
index 0000000..1f4949b
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-scanner
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-scanner"
+PURPOSE = 'Tests the critical Tast tests in the "scanner" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "scanner" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-scanner, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-scanner test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-sched b/test_suites/control.bvt-tast-cq-sched
new file mode 100644
index 0000000..698948a
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-sched
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-sched"
+PURPOSE = 'Tests the critical Tast tests in the "sched" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "sched" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-sched, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-sched test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-screenshot b/test_suites/control.bvt-tast-cq-screenshot
new file mode 100644
index 0000000..7adf963
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-screenshot
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-screenshot"
+PURPOSE = 'Tests the critical Tast tests in the "screenshot" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "screenshot" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-screenshot, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-screenshot test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-security b/test_suites/control.bvt-tast-cq-security
new file mode 100644
index 0000000..8793ce0
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-security
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-security"
+PURPOSE = 'Tests the critical Tast tests in the "security" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "security" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-security, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-security test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-session b/test_suites/control.bvt-tast-cq-session
new file mode 100644
index 0000000..e583d38
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-session
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-session"
+PURPOSE = 'Tests the critical Tast tests in the "session" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "session" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-session, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-session test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-shelf b/test_suites/control.bvt-tast-cq-shelf
new file mode 100644
index 0000000..9f51b35
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-shelf
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-shelf"
+PURPOSE = 'Tests the critical Tast tests in the "shelf" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "shelf" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-shelf, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-shelf test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-storage b/test_suites/control.bvt-tast-cq-storage
new file mode 100644
index 0000000..984304c
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-storage
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-storage"
+PURPOSE = 'Tests the critical Tast tests in the "storage" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "storage" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-storage, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-storage test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-typec b/test_suites/control.bvt-tast-cq-typec
new file mode 100644
index 0000000..a67153d
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-typec
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-typec"
+PURPOSE = 'Tests the critical Tast tests in the "typec" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "typec" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-typec, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-typec test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-u2fd b/test_suites/control.bvt-tast-cq-u2fd
new file mode 100644
index 0000000..651681a
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-u2fd
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-u2fd"
+PURPOSE = 'Tests the critical Tast tests in the "u2fd" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "u2fd" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-u2fd, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-u2fd test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-ui b/test_suites/control.bvt-tast-cq-ui
new file mode 100644
index 0000000..50dca58
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-ui
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-ui"
+PURPOSE = 'Tests the critical Tast tests in the "ui" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "ui" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-ui, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-ui test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-video b/test_suites/control.bvt-tast-cq-video
new file mode 100644
index 0000000..5e61b07
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-video
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-video"
+PURPOSE = 'Tests the critical Tast tests in the "video" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "video" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-video, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-video test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-vm b/test_suites/control.bvt-tast-cq-vm
new file mode 100644
index 0000000..b23d7d4
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-vm
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-vm"
+PURPOSE = 'Tests the critical Tast tests in the "vm" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "vm" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-vm, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-vm test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-webrtc b/test_suites/control.bvt-tast-cq-webrtc
new file mode 100644
index 0000000..35049df
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-webrtc
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-webrtc"
+PURPOSE = 'Tests the critical Tast tests in the "webrtc" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "webrtc" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-webrtc, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-webrtc test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-wifi b/test_suites/control.bvt-tast-cq-wifi
new file mode 100644
index 0000000..6c0114c
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-wifi
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-wifi"
+PURPOSE = 'Tests the critical Tast tests in the "wifi" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "wifi" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-wifi, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-wifi test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-cq-wilco b/test_suites/control.bvt-tast-cq-wilco
new file mode 100644
index 0000000..4565b53
--- /dev/null
+++ b/test_suites/control.bvt-tast-cq-wilco
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-cq-wilco"
+PURPOSE = 'Tests the critical Tast tests in the "wilco" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies ChromeOS's basic functionality for the ChromeOS Commit
+Queue by running all Tast integration tests in the "wilco" category that must
+always pass against a DUT. See http://go/tast for more information about Tast.
+
+The only Autotest test executed by this suite is tast.category-wilco, which
+is a server test that executes the tast executable. The tast executable runs
+individual Tast tests. If any of these Tast tests fail, then the
+test.category-wilco test (and this suite) fail.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-informational b/test_suites/control.bvt-tast-informational
index 9e222a5..595e0de 100644
--- a/test_suites/control.bvt-tast-informational
+++ b/test_suites/control.bvt-tast-informational
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "bvt-tast-informational"
 PURPOSE = "Runs informational Tast tests."
 
diff --git a/test_suites/control.bvt-tast-parallels-cq b/test_suites/control.bvt-tast-parallels-cq
new file mode 100644
index 0000000..9746af7
--- /dev/null
+++ b/test_suites/control.bvt-tast-parallels-cq
@@ -0,0 +1,32 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-parallels-cq"
+PURPOSE = "Runs all critical Parallels Tast tests."
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies basic Parallels functionality on ChromeOS for the Chrome
+OS Commit Queue by running all Parallels Tast integration tests that must
+always pass against a DUT.
+
+This suite should only be run against devices licensed for an appropriate
+version of Windows and Office. Contact parallels-cros@ for details about
+license specifics.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 20
+args_dict['timeout_mins'] = 1440
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.bvt-tast-parallels-informational b/test_suites/control.bvt-tast-parallels-informational
new file mode 100644
index 0000000..bf1a74f
--- /dev/null
+++ b/test_suites/control.bvt-tast-parallels-informational
@@ -0,0 +1,31 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-tast-parallels-informational"
+PURPOSE = "Runs informational Parallels Tast tests."
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite is used to run all informational Parallels Tast integration tests
+on release (a.k.a. canary) builders.
+
+This suite should only be run against devices licensed for an appropriate
+version of Windows and Office. Contact parallels-cros@ for details about
+license specifics.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 60
+args_dict['timeout_mins'] = 1440
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.calibration b/test_suites/control.calibration
new file mode 100644
index 0000000..11e4fda
--- /dev/null
+++ b/test_suites/control.calibration
@@ -0,0 +1,30 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'chromeos-lab-infrastructure'
+NAME = 'calibration'
+PURPOSE = 'Calibration check run in buildbot to ensure the build will not break duts'
+
+TIME = 'SHORT'
+TEST_CATEGORY = 'General'
+TEST_CLASS = 'suite'
+TEST_TYPE = 'Server'
+
+DOC = """
+This suite is intended to be run by buildbot as a calibration check of the build.
+It currently only contains a stub_Pass test. It works by relying
+on the safety checks built into the provisioning code to identify bad builds.
+
+This suite is intended to block other bvt suites, and as such is highly
+restricted because of the potential performance costs.
+
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.camera-libcamera b/test_suites/control.camera-libcamera
index 76bf796..941e8d9 100644
--- a/test_suites/control.camera-libcamera
+++ b/test_suites/control.camera-libcamera
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "camera-libcamera"
 PURPOSE = "Suite to run camera tests in for libcamera build"
 TIME = "MEDIUM"
diff --git a/test_suites/control.camera-postsubmit b/test_suites/control.camera-postsubmit
index 90ae2db..b07017d 100644
--- a/test_suites/control.camera-postsubmit
+++ b/test_suites/control.camera-postsubmit
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "camera-postsubmit"
 PURPOSE = "Suite to run camera tests in postsubmit"
 TIME = "MEDIUM"
diff --git a/test_suites/control.camera-usb-qual b/test_suites/control.camera-usb-qual
new file mode 100644
index 0000000..36baeb9
--- /dev/null
+++ b/test_suites/control.camera-usb-qual
@@ -0,0 +1,32 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "camera-usb-qual"
+PURPOSE = "Suite to run camera tests for USB camera qualification"
+TIME = "MEDIUM"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This is the cameara-usb-qual test suite.
+
+@param build: The name of the image to test.
+              Ex: x86-mario-release/R17-1412.33.0-a1-b29
+@param board: The board to test on.  Ex: x86-mario
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['add_experimental'] = True
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.cellular-cq b/test_suites/control.cellular-cq
new file mode 100644
index 0000000..5a6333c
--- /dev/null
+++ b/test_suites/control.cellular-cq
@@ -0,0 +1,25 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Cellular Team"
+NAME = "cellular-cq"
+PURPOSE = "Cellular tests to be included in cellular CQ"
+
+TIME = "Medium"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite contains cellular tests to be run in cellular CQ
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['add_experimental'] = True
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.cellular_callbox b/test_suites/control.cellular_callbox
new file mode 100644
index 0000000..17c9f89
--- /dev/null
+++ b/test_suites/control.cellular_callbox
@@ -0,0 +1,26 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Cellular Team"
+NAME = "cellular_callbox"
+PURPOSE = "Cellular callbox tests to be included in cellular callbox CQ"
+
+TIME = "Medium"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite contains cellular callbox tests that need to be
+run in cellular callbox CQ
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['add_experimental'] = True
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.cellular_mbim_compliance b/test_suites/control.cellular_mbim_compliance
index 20d10f2..c2cb351 100644
--- a/test_suites/control.cellular_mbim_compliance
+++ b/test_suites/control.cellular_mbim_compliance
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "cellular_mbim_compliance"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
diff --git a/test_suites/control.cellular_modem_fw b/test_suites/control.cellular_modem_fw
new file mode 100644
index 0000000..38acd1f
--- /dev/null
+++ b/test_suites/control.cellular_modem_fw
@@ -0,0 +1,31 @@
+# Copyright (c) 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_modem_fw"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "suite"
+TEST_TYPE = "server"
+
+DOC = """
+Run automated cellular tests to ensure that cellular functions has not regressed.
+@param build: The name of the image to test.
+              Ex: octopus-release/R99-14469.12.0
+@param board: The board to test on.  Ex: octopus
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['add_experimental'] = True
+args_dict['max_runtime_mins'] = 60
+args_dict['name'] = NAME
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.cellular_ota b/test_suites/control.cellular_ota
index 8db7839..3130c46 100644
--- a/test_suites/control.cellular_ota
+++ b/test_suites/control.cellular_ota
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "cellular_ota"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
diff --git a/test_suites/control.cellular_ota_avl b/test_suites/control.cellular_ota_avl
new file mode 100644
index 0000000..a8cbf4a
--- /dev/null
+++ b/test_suites/control.cellular_ota_avl
@@ -0,0 +1,30 @@
+# Copyright (c) 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "cellular_ota_avl"
+TIME = "SHORT"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "suite"
+TEST_TYPE = "server"
+
+DOC = """
+Run automated Cellular tests to ensure that 4g/5g has not regressed.
+@param build: The name of the image to test.
+              Ex: octopus-release/R99-14469.12.0
+@param board: The board to test on.  Ex: octopus
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['max_runtime_mins'] = 20
+args_dict['job'] = job
+args_dict['add_experimental'] = True
+args_dict['name'] = 'cellular_ota'
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.cellular_ota_flaky b/test_suites/control.cellular_ota_flaky
index 61e02ca..669e3a4 100644
--- a/test_suites/control.cellular_ota_flaky
+++ b/test_suites/control.cellular_ota_flaky
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "cellular_ota_flaky"
 TIME = "SHORT"
 TEST_CATEGORY = "Functional"
diff --git a/test_suites/control.cellular_pseudomodem b/test_suites/control.cellular_pseudomodem
index 5ca6b61..dcfdded 100644
--- a/test_suites/control.cellular_pseudomodem
+++ b/test_suites/control.cellular_pseudomodem
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "cellular_pseudomodem"
 PURPOSE = "Cellular tests that use a pseudomodem and are able to run in VMs"
 
diff --git a/test_suites/control.cft-beta b/test_suites/control.cft-beta
new file mode 100644
index 0000000..7344461
--- /dev/null
+++ b/test_suites/control.cft-beta
@@ -0,0 +1,26 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "dbeckett@"
+NAME = "cft-beta"
+PURPOSE = "Suite for CFT rollout in lab/CQ."
+TIME = "LONG"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite is used flush out the CFT rollout in the lab.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['add_experimental'] = True
+args_dict['max_runtime_mins'] = 960
+args_dict['timeout_mins'] = 1440
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.chameleon_audio_perbuild b/test_suites/control.chameleon_audio_perbuild
index cabdcad..fb9fb7b 100644
--- a/test_suites/control.chameleon_audio_perbuild
+++ b/test_suites/control.chameleon_audio_perbuild
@@ -14,7 +14,7 @@
 DOC = """
 Audio tests which require chameleon and audio boards connected.
 The Audio and Chameleon boards can emulate audio jack audio activity
-in order to test the Chrome OS audio stack.
+in order to test the ChromeOS audio stack.
 
 Together with DUT and external peripherals they are bundled in
 audio-box environment for end-to-end testing. Details on go/audioboard
diff --git a/test_suites/control.chameleon_audio_unstable b/test_suites/control.chameleon_audio_unstable
index 6ae3198..1225192 100644
--- a/test_suites/control.chameleon_audio_unstable
+++ b/test_suites/control.chameleon_audio_unstable
@@ -14,7 +14,7 @@
 DOC = """
 Audio tests which require chameleon and audio boards connected.
 The Audio and Chameleon boards can emulate audio jack audio activity
-in order to test the Chrome OS audio stack.
+in order to test the ChromeOS audio stack.
 
 Together with DUT and external peripherals they are bundled in
 audio-box environment for end-to-end testing. Details on go/audioboard
diff --git a/test_suites/control.chameleon_hdmi_perbuild b/test_suites/control.chameleon_hdmi_perbuild
index 6cdc03d..0e099ab 100644
--- a/test_suites/control.chameleon_hdmi_perbuild
+++ b/test_suites/control.chameleon_hdmi_perbuild
@@ -14,7 +14,7 @@
 DOC = """
 Display tests which require Chameleon board connected via HDMI connection.
 The Chameleon board can emulate a monitor such that the test can control
-its behaviors in order to test the Chrome OS graphic stack.
+its behaviors in order to test the ChromeOS graphic stack.
 
 @param build: The name of the image to test.
               Ex: x86-mario-release/R17-1412.33.0-a1-b29
diff --git a/test_suites/control.chameleon_hdmi_stress b/test_suites/control.chameleon_hdmi_stress
index ebd7317..d837786 100644
--- a/test_suites/control.chameleon_hdmi_stress
+++ b/test_suites/control.chameleon_hdmi_stress
@@ -14,7 +14,7 @@
 DOC = """
 Display tests which require Chameleon board connected via HDMI connection.
 The Chameleon board can emulate a monitor such that the test can control
-its behaviors in order to test the Chrome OS graphic stack.
+its behaviors in order to test the ChromeOS graphic stack.
 
 @param build: The name of the image to test.
               Ex: x86-mario-release/R17-1412.33.0-a1-b29
diff --git a/test_suites/control.chameleon_hdmi_unstable b/test_suites/control.chameleon_hdmi_unstable
index fed5c98..e39f4f8 100644
--- a/test_suites/control.chameleon_hdmi_unstable
+++ b/test_suites/control.chameleon_hdmi_unstable
@@ -14,7 +14,7 @@
 DOC = """
 Display tests which require Chameleon board connected via HDMI connection.
 The Chameleon board can emulate a monitor such that the test can control
-its behaviors in order to test the Chrome OS graphic stack.
+its behaviors in order to test the ChromeOS graphic stack.
 
 @param build: The name of the image to test.
               Ex: x86-mario-release/R17-1412.33.0-a1-b29
diff --git a/test_suites/control.check_setup_cts_N b/test_suites/control.check_setup_cts_N
index 6ce8699..5310b58 100644
--- a/test_suites/control.check_setup_cts_N
+++ b/test_suites/control.check_setup_cts_N
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "check_setup_cts_N"
 PURPOSE = """
 Test moblab has correct DUT configuration for cts N suite
diff --git a/test_suites/control.check_setup_storage_qual b/test_suites/control.check_setup_storage_qual
index 84e0a4b..20d7f96 100644
--- a/test_suites/control.check_setup_storage_qual
+++ b/test_suites/control.check_setup_storage_qual
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "check_setup_storage_qual"
 PURPOSE = """
 Test moblab has correct DUT configuration for storage qual suite
diff --git a/test_suites/control.chrome-informational b/test_suites/control.chrome-informational
index a6901bd..033ce60 100644
--- a/test_suites/control.chrome-informational
+++ b/test_suites/control.chrome-informational
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "chrome-informational"
 PURPOSE = "Informational tests of chrome"
 
@@ -14,10 +14,10 @@
 DOC = """
 This suite contains informational tests that:
 
-* Test Chrome, not Chrome OS
+* Test Chrome, not ChromeOS
 * Require real hardware (for example, to test GPU process init
 or display management)
-* Should not block uprev of Chrome into Chrome OS
+* Should not block uprev of Chrome into ChromeOS
 
 Chrome tests that do not require real hardware should be
 implemented in the Chrome repository as unit_tests or browser_tests.
diff --git a/test_suites/control.cr50_stress b/test_suites/control.cr50_stress
index 6d29c7c..c65ff88 100644
--- a/test_suites/control.cr50_stress
+++ b/test_suites/control.cr50_stress
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "cr50_stress"
 PURPOSE = "Cr50 stress test functionality and devices"
 
diff --git a/test_suites/control.cros_test_platform b/test_suites/control.cros_test_platform
index 8938ca0..d316d24 100644
--- a/test_suites/control.cros_test_platform
+++ b/test_suites/control.cros_test_platform
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Test Platform Team"
+AUTHOR = "ChromeOS Test Platform Team"
 NAME = "cros_test_platform"
 PURPOSE = "Entry point for cros_test_platform execution of autotest tests."
 
diff --git a/test_suites/control.crosbolt_perf_parallels_nightly b/test_suites/control.crosbolt_perf_parallels_nightly
new file mode 100644
index 0000000..8f705e4
--- /dev/null
+++ b/test_suites/control.crosbolt_perf_parallels_nightly
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "meiring@google.com, cros-parallels@google.com"
+NAME = "crosbolt_perf_parallels_nightly"
+PURPOSE = "Runs nightly Parallels performance tests."
+
+TIME = "LONG"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+ChromeOS performance tests for Parallels.
+
+If a test takes too long, consider putting it in the
+crosbolt_perf_parallels_weekly suite.
+
+@param build: The name of the image to test.
+              E.g. x86-mario-release/R17-1412.33.0-a1-b29
+@param board: The board to test on. E.g. x86-mario
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+
+# Values specified in this bug template will override default values when
+# filing bugs on tests that are a part of this suite. If left unspecified
+# the bug filer will fallback to its defaults.
+_BUG_TEMPLATE = {
+    'labels': ['Type-Bug', 'Cros-Perf-Test'],
+    'owner': 'rohitbm@chromium.org',
+    'cc': ['lafeenstra@chromium.org']
+}
+
+args_dict['max_runtime_mins'] = 1440
+args_dict['timeout_mins'] = 2880
+args_dict['name'] = NAME
+args_dict['job'] = job
+args_dict['add_experimental'] = True
+args_dict['bug_template'] = _BUG_TEMPLATE
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.crosbolt_perf_parallels_perbuild b/test_suites/control.crosbolt_perf_parallels_perbuild
new file mode 100644
index 0000000..5959107
--- /dev/null
+++ b/test_suites/control.crosbolt_perf_parallels_perbuild
@@ -0,0 +1,47 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "meiring@google.com, cros-parallels@google.com"
+NAME = "crosbolt_perf_parallels_perbuild"
+PURPOSE = "Runs per-build Parallels performance tests."
+
+TIME = "LONG"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+ChromeOS performance tests for Parallels.
+
+@param build: The name of the image to test.
+              Ex: x86-mario-release/R17-1412.33.0-a1-b29
+@param board: The board to test on. Ex: x86-mario
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+
+# Values specified in this bug template will override default values when
+# filing bugs on tests that are a part of this suite. If left unspecified
+# the bug filer will fallback to its defaults.
+_BUG_TEMPLATE = {
+    'labels': ['Type-Bug', 'Cros-Perf-Test'],
+    'owner': 'rohitbm@chromium.org',
+    'cc': []
+}
+
+args_dict['max_runtime_mins'] = 960
+args_dict['timeout_mins'] = 1440
+args_dict['file_bugs'] = False
+args_dict['name'] = NAME
+args_dict['job'] = job
+args_dict['add_experimental'] = True
+args_dict['bug_template'] = _BUG_TEMPLATE
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.crosbolt_perf_parallels_weekly b/test_suites/control.crosbolt_perf_parallels_weekly
new file mode 100644
index 0000000..99a1d08
--- /dev/null
+++ b/test_suites/control.crosbolt_perf_parallels_weekly
@@ -0,0 +1,49 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "meiring@google.com, cros-parallels@google.com"
+NAME = "crosbolt_perf_parallels_weekly"
+PURPOSE = "Runs weekly Parallels performance tests."
+
+TIME = "MEDIUM"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+ChromeOS performance tests for Parallels.
+
+A test should be considered living in crosbolt_perf_nightly first unless the
+test takes too long to run daily.
+
+@param build: The name of the image to test.
+              E.g. x86-mario-release/R17-1412.33.0-a1-b29
+@param board: The board to test on. E.g. x86-mario
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+
+# Values specified in this bug template will override default values when
+# filing bugs on tests that are a part of this suite. If left unspecified
+# the bug filer will fallback to its defaults.
+_BUG_TEMPLATE = {
+    'labels': ['Type-Bug', 'Cros-Perf-Test'],
+    'owner': 'bccheng@chromium.org',
+    'cc': ['conradlo@chromium.org']
+}
+
+args_dict['max_runtime_mins'] = 1440
+args_dict['timeout_mins'] = 4320
+args_dict['name'] = NAME
+args_dict['job'] = job
+args_dict['add_experimental'] = True
+args_dict['bug_template'] = _BUG_TEMPLATE
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.cross-device b/test_suites/control.cross-device
new file mode 100644
index 0000000..56b6e1b
--- /dev/null
+++ b/test_suites/control.cross-device
@@ -0,0 +1,20 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS SW Engprod Team (chromeos-sw-engprod@google.com)"
+NAME = "cross-device"
+PURPOSE = "Suite to run Cross Device tests on ChromeOS devices."
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+DOC = """ This suite runs Cross Device tests. """
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+args_dict['max_runtime_mins'] = 60
+args_dict['name'] = NAME
+args_dict['job'] = job
+dynamic_suite.reimage_and_run(**args_dict)
+
diff --git a/test_suites/control.cts b/test_suites/control.cts
index 589ab20..ce12544 100644
--- a/test_suites/control.cts
+++ b/test_suites/control.cts
@@ -27,7 +27,14 @@
     return False
   # Strip off the cheets_CTS. from the test name before comparing to args
   name = test.name[test.name.find('.') + 1:]
-  if 'tests' in args_dict and name not in args_dict['tests']:
+  # Typical examples:
+  #   name = 'arm.CtsDeqpTestCases.64'
+  #     or
+  #   name = 'x86.CtsDeqpTestCases.32'
+  # and
+  #   args_dict['tests'] = ['arm.CtsDeqpTestCases', 'x86.CtsDeqpTestCases']
+  # So, we want to includes test with names being subpart of any arg.
+  if 'tests' in args_dict and all(arg not in name for arg in args_dict['tests']):
     return False
   return True
 
diff --git a/test_suites/control.cts_hardware b/test_suites/control.cts_hardware
new file mode 100644
index 0000000..0a52820
--- /dev/null
+++ b/test_suites/control.cts_hardware
@@ -0,0 +1,48 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "jaydeepmehta@chromium.org"
+NAME = "cts-hardware"
+PURPOSE = "Special hardware only test-suite for ARC++ CTS qualification."
+CRITERIA = "All tests with SUITE=cts-hardware must pass."
+
+TIME = "LONG"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+ChromeOS ARC++ tests.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+def predicate(test):
+  if not hasattr(test, 'suite') or not hasattr(test, 'name'):
+    return False
+  if not NAME in test.suite.split(','):
+    return False
+  # Strip off the cheets_CTS. from the test name before comparing to args
+  name = test.name[test.name.find('.') + 1:]
+  # Typical examples:
+  #   name = 'arm.CtsDeqpTestCases.64'
+  #     or
+  #   name = 'x86.CtsDeqpTestCases.32.ctshardware'
+  # and
+  #   args_dict['tests'] = ['arm.CtsDeqpTestCases', 'x86.CtsDeqpTestCases']
+  # So, we want to includes test with names being subpart of any arg.
+  if 'tests' in args_dict and all(arg not in name for arg in args_dict['tests']):
+    return False
+  return True
+
+args_dict['name'] = NAME
+args_dict['job'] = job
+args_dict['file_bugs'] = False
+args_dict['max_runtime_mins'] = 10080
+args_dict['timeout_mins'] = 10080
+args_dict['add_experimental'] = True
+args_dict['predicate'] = predicate
+dynamic_suite.reimage_and_run(**args_dict)
+
diff --git a/test_suites/control.cuj b/test_suites/control.cuj
new file mode 100644
index 0000000..73cd464
--- /dev/null
+++ b/test_suites/control.cuj
@@ -0,0 +1,24 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Performance Metrics Team (chromeos-perfmetrics-eng@google.com)"
+NAME = "cuj"
+PURPOSE = "Suite to run CUJ tests regularly on ChromeOS devices."
+
+TIME = "MEDIUM"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """ This suite runs CUJ tests. """
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['max_runtime_mins'] = 960
+args_dict['timeout_mins'] = 1440
+args_dict['name'] = NAME
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.debug_kernel_testing b/test_suites/control.debug_kernel_testing
index 44a9df6..c71673d 100644
--- a/test_suites/control.debug_kernel_testing
+++ b/test_suites/control.debug_kernel_testing
@@ -2,9 +2,9 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "mwiitala, Chrome OS Kernel Team"
+AUTHOR = "mwiitala, ChromeOS Kernel Team"
 NAME = "debug_kernel_testing"
-PURPOSE = "Suite to run Syzkaller tests on Chrome OS devices."
+PURPOSE = "Suite to run Syzkaller tests on ChromeOS devices."
 
 TIME = "SHORT"
 TEST_CATEGORY = "General"
diff --git a/test_suites/control.distributed_lab_qual_faft b/test_suites/control.distributed_lab_qual_faft
new file mode 100644
index 0000000..9598aae
--- /dev/null
+++ b/test_suites/control.distributed_lab_qual_faft
@@ -0,0 +1,26 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Distributed Fleet Platform"
+NAME = "distributed_lab_qual_faft"
+PURPOSE = "Test Distributed Lab stability with faft tests"
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite is used to qualify new Distributed Lab containers with faft tests.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 20
+args_dict['timeout_mins'] = 1440
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.dpanel-end2end_per-build b/test_suites/control.dpanel-end2end_per-build
new file mode 100644
index 0000000..62cbc53
--- /dev/null
+++ b/test_suites/control.dpanel-end2end_per-build
@@ -0,0 +1,37 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "rzakarian"
+NAME = "dpanel-end2end_per-build"
+PURPOSE = "DPanel end2end tests"
+
+TIME = "LONG"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This is the part of the DPanel end2end test suite which should run once per build.
+
+@param build: The name of the image to test.
+              Ex: x86-mario-release/R17-1412.33.0-a1-b29
+@param board: The board to test on.  Ex: x86-mario
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['max_runtime_mins'] = 960
+args_dict['timeout_mins'] = 1440
+args_dict['file_bugs'] = False
+args_dict['name'] = 'dpanel-end2end_per-build'
+args_dict['job'] = job
+args_dict['add_experimental'] = True
+args_dict['bug_template'] = _BUG_TEMPLATE
+
+dynamic_suite.reimage_and_run(**args_dict)
\ No newline at end of file
diff --git a/test_suites/control.drivefs-cq b/test_suites/control.drivefs-cq
new file mode 100644
index 0000000..fdffe22
--- /dev/null
+++ b/test_suites/control.drivefs-cq
@@ -0,0 +1,36 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "chromeos-files-syd@google.com"
+NAME = "drivefs-cq"
+PURPOSE = "Suite to verify DriveFS functionality to ensure successful uprev"
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite verifies all of ChromeOS's basic functionality using tests
+which have a tast group attribute of drivefs-cq. These tests should run
+as presubmit only when a new ebuild is uprevved in the drivefs and
+drivefs-ipc packages.
+
+@param build: The name of the image to test.
+              Ex: x86-mario-release/R17-1412.33.0-a1-b29
+@param board: The board to test on.  Ex: x86-mario
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 20
+args_dict['timeout_mins'] = 1440
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.dummy b/test_suites/control.dummy
deleted file mode 100644
index 689adbf..0000000
--- a/test_suites/control.dummy
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "dummy"
-PURPOSE = "To be run while testing the infrastructure and test harness."
-
-TIME = "SHORT"
-TEST_CATEGORY = "Dummy"
-TEST_CLASS = "suite"
-TEST_TYPE = "Server"
-
-DOC = """
-This is a dummy test suite.  It runs dummy tests that always pass or always fail
-so that we can test result gathering and reporting mechanisms.
-"""
-
-import common
-from autotest_lib.server.cros.dynamic_suite import dynamic_suite
-
-args_dict['add_experimental'] = True
-args_dict['name'] = 'dummy'
-args_dict['job'] = job
-
-dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.dummy_server b/test_suites/control.dummy_server
index 5f8cc71..5e3ace8 100644
--- a/test_suites/control.dummy_server
+++ b/test_suites/control.dummy_server
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "dummy_server"
 PURPOSE = "To be run while testing the infrastructure and test harness."
 
diff --git a/test_suites/control.dummy_server_nossp b/test_suites/control.dummy_server_nossp
index fdb1ead..daa26e4 100644
--- a/test_suites/control.dummy_server_nossp
+++ b/test_suites/control.dummy_server_nossp
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "dummy_server_nossp"
 PURPOSE = "To be run while testing the infrastructure and test harness."
 
diff --git a/test_suites/control.dummyflake b/test_suites/control.dummyflake
index b149d76..5a010bf 100644
--- a/test_suites/control.dummyflake
+++ b/test_suites/control.dummyflake
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "dummyflake"
 PURPOSE = "Test basic, required functionality."
 
diff --git a/test_suites/control.experimental b/test_suites/control.experimental
index b418426..3beed33 100644
--- a/test_suites/control.experimental
+++ b/test_suites/control.experimental
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "experimental"
 PURPOSE = "Staging ground for BVT candidates"
 
diff --git a/test_suites/control.faft_bios b/test_suites/control.faft_bios
index 9f5e0c5..9408f4c 100644
--- a/test_suites/control.faft_bios
+++ b/test_suites/control.faft_bios
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "faft_bios"
 PURPOSE = "Test hard-to-automate firmware scenarios."
 
@@ -44,10 +44,21 @@
     'title': None,
 }
 
+def predicate(test):
+  if not hasattr(test, 'suite') or not hasattr(test, 'name'):
+    return False
+  if not NAME in test.suite.split(','):
+    return False
+  name = test.name
+  if 'tests' in args_dict and name not in args_dict['tests']:
+    return False
+  return True
+
 args_dict['file_bugs'] = False
 args_dict['name'] = 'faft_bios'
 args_dict['job'] = job
 args_dict['add_experimental'] = True
 args_dict['bug_template'] = _BUG_TEMPLATE
+args_dict['predicate'] = predicate
 
 dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.faft_bios_ec3po b/test_suites/control.faft_bios_ec3po
deleted file mode 100644
index f9fc688..0000000
--- a/test_suites/control.faft_bios_ec3po
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "faft_bios_ec3po"
-PURPOSE = "Test hard-to-automate firmware scenarios using EC-3PO changes."
-
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "suite"
-TEST_TYPE = "Server"
-
-DOC = """
-This is the same suite as faft_bios, but instead uses the EC-3PO console
-interpreter inside of servod.  This suite should be removed when satisfied with
-the results.
-
-This is a faft (FULLY AUTOMATED FIRMWARE TEST) suite.
-The tests verfiy that valid boot scenarios progress properly (with state
-progress checks) and that error scenarios (corrupted blobs) are caught as
-expected. Some of these test failures should close the tree as they may imply
-that the system is unbootable and further tests will only become hung or
-blocked. Other tests verify all of the features (some of them security
-related) are functioning.
-
-@param build: The name of the image to test.
-              Ex: x86-mario-release/R17-1412.33.0-a1-b29
-@param board: The board to test on.  Ex: x86-mario
-@param pool: The pool of machines to utilize for scheduling. If pool=None
-             board is used.
-@param check_hosts: require appropriate live hosts to exist in the lab.
-@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
-"""
-
-import common
-from autotest_lib.server.cros.dynamic_suite import dynamic_suite
-
-# Values specified in this bug template will override default values when
-# filing bugs on tests that are a part of this suite. If left unspecified
-# the bug filer will fallback to it's defaults.
-_BUG_TEMPLATE = {
-    'labels': ['FW-labblocker', 'Restrict-View-Google'],
-    'owner': 'aaboagye@chromium.org',
-    'status': None,
-    'summary': None,
-    'title': None,
-}
-
-args_dict['file_bugs'] = True
-args_dict['name'] = 'faft_bios_ec3po'
-args_dict['job'] = job
-args_dict['add_experimental'] = True
-args_dict['bug_template'] = _BUG_TEMPLATE
-
-dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.faft_bios_tot b/test_suites/control.faft_bios_tot
index 6776143..484baa6 100644
--- a/test_suites/control.faft_bios_tot
+++ b/test_suites/control.faft_bios_tot
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "faft_bios_tot"
 PURPOSE = "Test hard-to-automate firmware scenarios on Tot."
 
diff --git a/test_suites/control.faft_ccd b/test_suites/control.faft_ccd
new file mode 100644
index 0000000..6f251f2
--- /dev/null
+++ b/test_suites/control.faft_ccd
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "faft_ccd"
+PURPOSE = "Test suite to verify basic ccd functionality."
+
+TIME = "SHORT"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This is the faft_ccd test suite.
+
+This will verify CCD functionality.
+
+@param build: The name of the image to test.
+              Ex: x86-mario-release/R17-1412.33.0-a1-b29
+@param board: The board to test on.  Ex: x86-mario
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = 'faft_ccd'
+args_dict['add_experimental'] = True
+args_dict['job'] = job
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.faft_cr50_crypto b/test_suites/control.faft_cr50_crypto
index ad9d59f..aa5048c 100644
--- a/test_suites/control.faft_cr50_crypto
+++ b/test_suites/control.faft_cr50_crypto
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "faft_cr50_crypto"
 PURPOSE = "Experimental verify cr50 crypto."
 
diff --git a/test_suites/control.faft_cr50_experimental b/test_suites/control.faft_cr50_experimental
index 6e0a859..3834ee1 100644
--- a/test_suites/control.faft_cr50_experimental
+++ b/test_suites/control.faft_cr50_experimental
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "faft_cr50_experimental"
 PURPOSE = "Experimental suite to test cr50 stress tests."
 
diff --git a/test_suites/control.faft_cr50_prepvt b/test_suites/control.faft_cr50_prepvt
index cb1d016..73c2c1a 100644
--- a/test_suites/control.faft_cr50_prepvt
+++ b/test_suites/control.faft_cr50_prepvt
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "faft_cr50_prepvt"
 PURPOSE = "Suite to test cr50 prePVT images."
 
diff --git a/test_suites/control.faft_cr50_prepvt_tast b/test_suites/control.faft_cr50_prepvt_tast
deleted file mode 100644
index 5a156e9..0000000
--- a/test_suites/control.faft_cr50_prepvt_tast
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright (c) 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "faft_cr50_prepvt_tast"
-PURPOSE = "Suite to test Cr50 (pvt) firmware functionality using Tast framework."
-
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "suite"
-TEST_TYPE = "Server"
-
-DOC = """
-The suite is the Tast counterpart of suite:faft_cr50_prepvt.
-
-The reason why we need a separate suite is because "test_that" command
-cannot run Tast tests if devserver is not presented (crbug.com/948805).
-Placing Tast tests on this suite can avoid possible regression of
-suite:faft_cr50_prepvt.
-
-Note that the prevpt and pvt suites are identical.
-The two names are to make searches for faft_cr50_p* include Tast results.
-"""
-
-import common
-from autotest_lib.server.cros.dynamic_suite import dynamic_suite
-
-args_dict['add_experimental'] = True
-args_dict['max_runtime_mins'] = 60
-args_dict['name'] = NAME
-args_dict['job'] = job
-
-dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.faft_cr50_pvt b/test_suites/control.faft_cr50_pvt
index 151029d..a7f70a2 100644
--- a/test_suites/control.faft_cr50_pvt
+++ b/test_suites/control.faft_cr50_pvt
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "faft_cr50_pvt"
 PURPOSE = "Experimental suite to test cr50 stress tests."
 
diff --git a/test_suites/control.faft_cr50_pvt_tast b/test_suites/control.faft_cr50_pvt_tast
deleted file mode 100644
index 3c8a2bb..0000000
--- a/test_suites/control.faft_cr50_pvt_tast
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright (c) 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "faft_cr50_pvt_tast"
-PURPOSE = "Suite to test Cr50 (prepvt) firmware functionality using Tast framework."
-
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "suite"
-TEST_TYPE = "Server"
-
-DOC = """
-The suite is the Tast counterpart of suite:faft_cr50_pvt.
-
-The reason why we need a separate suite is because "test_that" command
-cannot run Tast tests if devserver is not presented (crbug.com/948805).
-Placing Tast tests on this suite can avoid possible regression of
-suite:faft_cr50_pvt.
-
-Note that the prevpt and pvt suites are identical.
-The two names are to make searches for faft_cr50_p* include Tast results.
-"""
-
-import common
-from autotest_lib.server.cros.dynamic_suite import dynamic_suite
-
-args_dict['add_experimental'] = True
-args_dict['max_runtime_mins'] = 60
-args_dict['name'] = NAME
-args_dict['job'] = job
-
-dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.faft_cr50_tot b/test_suites/control.faft_cr50_tot
index 73e6fc5..bcd6c72 100644
--- a/test_suites/control.faft_cr50_tot
+++ b/test_suites/control.faft_cr50_tot
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "faft_cr50_tot"
 PURPOSE = "Suite to test TOT cr50 images."
 
diff --git a/test_suites/control.faft_detachable b/test_suites/control.faft_detachable
new file mode 100644
index 0000000..592b606
--- /dev/null
+++ b/test_suites/control.faft_detachable
@@ -0,0 +1,47 @@
+# Copyright 2022 The ChromiumOS Authors.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "faft_detachable"
+PURPOSE = "New experimental tests for detachables."
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+All tests in this suite should also be in faft_unstable. The purpose of this
+suite is to allow a small set of tests to be scheduled into the DUT_QUOTA_POOL.
+
+@param build: The name of the image to test.
+              Ex: x86-mario-release/R17-1412.33.0-a1-b29
+@param board: The board to test on.  Ex: x86-mario
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+# Values specified in this bug template will override default values when
+# filing bugs on tests that are a part of this suite. If left unspecified
+# the bug filer will fallback to it's defaults.
+_BUG_TEMPLATE = {
+    'labels': ['FW-labblocker', 'Restrict-View-Google'],
+    'owner': '',
+    'status': None,
+    'summary': None,
+    'title': None,
+}
+
+args_dict['file_bugs'] = False
+args_dict['name'] = 'faft_detachable'
+args_dict['job'] = job
+args_dict['add_experimental'] = True
+args_dict['bug_template'] = _BUG_TEMPLATE
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.faft_dev b/test_suites/control.faft_dev
index 3b1d4b5..0ba9878 100644
--- a/test_suites/control.faft_dev
+++ b/test_suites/control.faft_dev
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "faft_dev"
 PURPOSE = "Test hard-to-automate firmware and ec scenarios."
 
diff --git a/test_suites/control.faft_ec b/test_suites/control.faft_ec
index 22042cb..d7482bb 100644
--- a/test_suites/control.faft_ec
+++ b/test_suites/control.faft_ec
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "faft_ec"
 PURPOSE = "Test hard-to-automate firmware and ec scenarios."
 
@@ -13,13 +13,17 @@
 
 DOC = """
 This is a faft (FULLY AUTOMATED FIRMWARE TEST) suite. The tests in this suite
-verify the Chrome OS EC operation, that valid boot scenarios progress properly
+verify the ChromeOS EC operation, that valid boot scenarios progress properly
 (with state progress checks) and that error scenarios (corrupted blobs) are
 caught as expected. Some of these test failures should close the tree as they
 may imply that the system is unbootable and further tests will only become
 hung or blocked. Other tests verify all of the features (some of them security
 related) are functioning.
 
+Tests in the faft_ec suite are suitable for bring-up scenarios. Tests which
+are for production machines only should go in suite:faft_ec_fw_qual instead.
+However, all tests in this suite should also be in suite:faft_ec_fw_qual
+
 @param build: The name of the image to test.
               Ex: x86-mario-release/R17-1412.33.0-a1-b29
 @param board: The board to test on.  Ex: x86-mario
diff --git a/test_suites/control.faft_ec3po b/test_suites/control.faft_ec3po
deleted file mode 100644
index deed311..0000000
--- a/test_suites/control.faft_ec3po
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "faft_ec3po"
-PURPOSE = "Test hard-to-automate firmware and ec scenarios using EC-3PO."
-
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "suite"
-TEST_TYPE = "Server"
-
-DOC = """
-This is the same suite as faft_ec, but instead uses the EC-3PO console
-interpreter inside of servod.  This suite should be removed when satisfied with
-the results.
-
-This is a faft (FULLY AUTOMATED FIRMWARE TEST) suite. The tests in this suite
-verify the Chrome OS EC operation, that valid boot scenarios progress properly
-(with state progress checks) and that error scenarios (corrupted blobs) are
-caught as expected. Some of these test failures should close the tree as they
-may imply that the system is unbootable and further tests will only become
-hung or blocked. Other tests verify all of the features (some of them security
-related) are functioning.
-
-@param build: The name of the image to test.
-              Ex: x86-mario-release/R17-1412.33.0-a1-b29
-@param board: The board to test on.  Ex: x86-mario
-@param pool: The pool of machines to utilize for scheduling. If pool=None
-             board is used.
-@param check_hosts: require appropriate live hosts to exist in the lab.
-@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
-"""
-
-import common
-from autotest_lib.server.cros.dynamic_suite import dynamic_suite
-
-# Values specified in this bug template will override default values when
-# filing bugs on tests that are a part of this suite. If left unspecified
-# the bug filer will fallback to it's defaults.
-_BUG_TEMPLATE = {
-    'labels': ['FW-labblocker', 'Restrict-View-Google'],
-    'owner': 'aaboagye@chromium.org',
-    'status': None,
-    'summary': None,
-    'title': None,
-}
-
-args_dict['file_bugs'] = True
-args_dict['name'] = 'faft_ec3po'
-args_dict['job'] = job
-args_dict['add_experimental'] = True
-args_dict['bug_template'] = _BUG_TEMPLATE
-
-dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.faft_ec_fw_qual b/test_suites/control.faft_ec_fw_qual
index e3eccf2..6c96848 100644
--- a/test_suites/control.faft_ec_fw_qual
+++ b/test_suites/control.faft_ec_fw_qual
@@ -14,6 +14,9 @@
 DOC = """
 This test suite verifies if the EC firmware is ready for production
 release.
+
+Tests intended for both production release and bring-up should be in
+both suite:faft_ec and suite:faft_ec_fw_qual.
 """
 
 import common
diff --git a/test_suites/control.faft_ec_tot b/test_suites/control.faft_ec_tot
index 410c658..f21b5bf 100644
--- a/test_suites/control.faft_ec_tot
+++ b/test_suites/control.faft_ec_tot
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "faft_ec_tot"
 PURPOSE = "Test hard-to-automate firmware and ec scenarios on Tot."
 
@@ -16,7 +16,7 @@
 Tot instead of from the firmware branches.
 
 This is a faft (FULLY AUTOMATED FIRMWARE TEST) suite. The tests in this suite
-verify the Chrome OS EC operation, that valid boot scenarios progress properly
+verify the ChromeOS EC operation, that valid boot scenarios progress properly
 (with state progress checks) and that error scenarios (corrupted blobs) are
 caught as expected. Some of these test failures should close the tree as they
 may imply that the system is unbootable and further tests will only become
diff --git a/test_suites/control.faft_experimental b/test_suites/control.faft_experimental
index 55d35a1..4d52097 100644
--- a/test_suites/control.faft_experimental
+++ b/test_suites/control.faft_experimental
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "faft_experimental"
 PURPOSE = "Experimental suite to test faft_bios and faft_ec."
 
diff --git a/test_suites/control.faft_flashrom b/test_suites/control.faft_flashrom
index 24106c0..00cfae1 100644
--- a/test_suites/control.faft_flashrom
+++ b/test_suites/control.faft_flashrom
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "faft_flashrom"
 PURPOSE = "Test flashrom and chromeos-firmwareupdate"
 
diff --git a/test_suites/control.faft_lv1 b/test_suites/control.faft_lv1
index 942a397..b991bc2 100644
--- a/test_suites/control.faft_lv1
+++ b/test_suites/control.faft_lv1
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "faft_lv1"
 PURPOSE = "Test hard-to-automate 'normal' mode firmware scenarios."
 
@@ -14,7 +14,7 @@
 DOC = """
 
 This test suite runs FAFT (Fully Automated Firmware Test) for BIOS that should
-all pass and that verifies the BIOS fit Chrome OS verified-boot requirements.
+all pass and that verifies the BIOS fit ChromeOS verified-boot requirements.
 
 All BIOS tests are categorized into 5 test levels:
   Level-1: Basic BIOS tests which verify basic vboot functions.
diff --git a/test_suites/control.faft_lv2 b/test_suites/control.faft_lv2
index c72aea9..d856c5d 100644
--- a/test_suites/control.faft_lv2
+++ b/test_suites/control.faft_lv2
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "faft_lv2"
 PURPOSE = "Test hard-to-automate 'normal' mode firmware scenarios."
 
@@ -14,7 +14,7 @@
 DOC = """
 
 This test suite runs FAFT (Fully Automated Firmware Test) for BIOS that should
-all pass and that verifies the BIOS fit Chrome OS verified-boot requirements.
+all pass and that verifies the BIOS fit ChromeOS verified-boot requirements.
 
 All BIOS tests are categorized into 5 test levels:
   Level-1: Basic BIOS tests which verify basic vboot functions.
diff --git a/test_suites/control.faft_lv3 b/test_suites/control.faft_lv3
index 84707e1..d493b3d 100644
--- a/test_suites/control.faft_lv3
+++ b/test_suites/control.faft_lv3
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "faft_lv3"
 PURPOSE = "Test hard-to-automate 'normal' mode firmware scenarios."
 
@@ -14,7 +14,7 @@
 DOC = """
 
 This test suite runs FAFT (Fully Automated Firmware Test) for BIOS that should
-all pass and that verifies the BIOS fit Chrome OS verified-boot requirements.
+all pass and that verifies the BIOS fit ChromeOS verified-boot requirements.
 
 All BIOS tests are categorized into 5 test levels:
   Level-1: Basic BIOS tests which verify basic vboot functions.
diff --git a/test_suites/control.faft_lv4 b/test_suites/control.faft_lv4
index fb97155..0ac5555 100644
--- a/test_suites/control.faft_lv4
+++ b/test_suites/control.faft_lv4
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "faft_lv4"
 PURPOSE = "Test hard-to-automate 'normal' mode firmware scenarios."
 
@@ -14,7 +14,7 @@
 DOC = """
 
 This test suite runs FAFT (Fully Automated Firmware Test) for BIOS that should
-all pass and that verifies the BIOS fit Chrome OS verified-boot requirements.
+all pass and that verifies the BIOS fit ChromeOS verified-boot requirements.
 
 All BIOS tests are categorized into 5 test levels:
   Level-1: Basic BIOS tests which verify basic vboot functions.
diff --git a/test_suites/control.faft_lv5 b/test_suites/control.faft_lv5
index 41cf6ee..b37729f 100644
--- a/test_suites/control.faft_lv5
+++ b/test_suites/control.faft_lv5
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "faft_lv5"
 PURPOSE = "Test hard-to-automate 'normal' mode firmware scenarios."
 
@@ -14,7 +14,7 @@
 DOC = """
 
 This test suite runs FAFT (Fully Automated Firmware Test) for BIOS that should
-all pass and that verifies the BIOS fit Chrome OS verified-boot requirements.
+all pass and that verifies the BIOS fit ChromeOS verified-boot requirements.
 
 All BIOS tests are categorized into 5 test levels:
   Level-1: Basic BIOS tests which verify basic vboot functions.
diff --git a/test_suites/control.faft_normal b/test_suites/control.faft_normal
index aea72bd..6e1f78f 100644
--- a/test_suites/control.faft_normal
+++ b/test_suites/control.faft_normal
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "faft_normal"
 PURPOSE = "Test hard-to-automate 'normal' mode firmware scenarios."
 
diff --git a/test_suites/control.faft_pd b/test_suites/control.faft_pd
index c3b3606..064786b 100644
--- a/test_suites/control.faft_pd
+++ b/test_suites/control.faft_pd
@@ -2,9 +2,9 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "faft_pd"
-PURPOSE = "Stress test hard-to-automate USB PD behaviors."
+PURPOSE = "Stress test hard-to-automate USB PD behaviors and ULP mode."
 
 TIME = "LONG"
 TEST_CATEGORY = "Stress"
@@ -14,7 +14,9 @@
 DOC = """
 This is the faft_pd test suite. The tests in this suites verify the behaviors
 of the USB PD stack, like establishing a PD contract, hard and soft resets,
-data role swap, power role swap, VBUS voltage negotiation, etc.
+data role swap, power role swap, VBUS voltage negotiation, etc. Besides,
+the suite test ULP mode related tests since they have the same servo
+setup requirements.
 
 This suite only tests a single Type-C port connecting to the PDTester device
 (Plankton or Servo v4). For DUT with multiple Type-C ports, should run this
diff --git a/test_suites/control.faft_setup b/test_suites/control.faft_setup
index ed5f3b4..58816c9 100644
--- a/test_suites/control.faft_setup
+++ b/test_suites/control.faft_setup
@@ -2,9 +2,9 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "faft_setup"
-PURPOSE = "Suite to setup and run sanity checks to ensure DUT is ready for FAFT"
+PURPOSE = "Suite to setup and run validity checks to ensure DUT is ready for FAFT"
 
 TIME = "SHORT"
 TEST_CATEGORY = "General"
diff --git a/test_suites/control.faft_smoke b/test_suites/control.faft_smoke
index 88c43d6..193d5f4 100644
--- a/test_suites/control.faft_smoke
+++ b/test_suites/control.faft_smoke
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "faft_smoke"
 PURPOSE = "Smoke test of new firmware."""
 TIME = "SHORT"
diff --git a/test_suites/control.faft_unstable b/test_suites/control.faft_unstable
new file mode 100644
index 0000000..0887995
--- /dev/null
+++ b/test_suites/control.faft_unstable
@@ -0,0 +1,37 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "faft_unstable"
+PURPOSE = "Unstable suite to test faft_bios and faft_ec."
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This is the faft_unstable test suite, for AP (BIOS) and EC firmware.
+
+This suite is meant to be a proving ground for AP/EC firmware tests before they
+are promoted to the actual faft_bios / faft_ec suites.  Flaky tests should also
+be moved here while they're being worked on.
+
+@param build: The name of the image to test.
+              Ex: x86-mario-release/R17-1412.33.0-a1-b29
+@param board: The board to test on.  Ex: x86-mario
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = 'faft_unstable'
+args_dict['add_experimental'] = True
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.faft_wilco b/test_suites/control.faft_wilco
index 28ce4df..e3b7ba8 100644
--- a/test_suites/control.faft_wilco
+++ b/test_suites/control.faft_wilco
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "faft_wilco"
 PURPOSE = "Test Wilco-specific firmware behavior."
 
diff --git a/test_suites/control.fingerprint b/test_suites/control.fingerprint
index 025a3a3..7e9e3f4 100644
--- a/test_suites/control.fingerprint
+++ b/test_suites/control.fingerprint
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "fingerprint"
 PURPOSE = "Runs fingerprint firmware tests."
 
diff --git a/test_suites/control.fingerprint-cq b/test_suites/control.fingerprint-cq
new file mode 100644
index 0000000..a4f8b2a
--- /dev/null
+++ b/test_suites/control.fingerprint-cq
@@ -0,0 +1,24 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Fingerprint Team"
+NAME = "fingerprint-cq"
+PURPOSE = "Run fingerprint-related critical tests."
+
+TIME = "MEDIUM"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """ This suite runs fingerprint-related critical tests. """
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['add_experimental'] = True
+args_dict['max_runtime_mins'] = 60
+args_dict['name'] = NAME
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.fingerprint-mcu b/test_suites/control.fingerprint-mcu
deleted file mode 100644
index e32fefe..0000000
--- a/test_suites/control.fingerprint-mcu
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "yichengli@chromium.org"
-NAME = "fingerprint-mcu"
-PURPOSE = "Verify fingerprint MCU unittests pass on device."
-
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "suite"
-TEST_TYPE = "Server"
-
-DOC = """ This suite runs fingerprint MCU unittests on board. """
-
-import common
-from autotest_lib.server.cros.dynamic_suite import dynamic_suite
-
-args_dict['add_experimental'] = True
-args_dict['max_runtime_mins'] = 60
-args_dict['name'] = NAME
-args_dict['job'] = job
-
-dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.fingerprint-mcu-dragonclaw b/test_suites/control.fingerprint-mcu-dragonclaw
new file mode 100644
index 0000000..296e60e
--- /dev/null
+++ b/test_suites/control.fingerprint-mcu-dragonclaw
@@ -0,0 +1,24 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "yichengli@chromium.org"
+NAME = "fingerprint-mcu-dragonclaw"
+PURPOSE = "Verify fingerprint MCU unittests pass on device."
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """ This suite runs fingerprint MCU unittests on Dragonclaw boards. """
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['add_experimental'] = True
+args_dict['max_runtime_mins'] = 60
+args_dict['name'] = NAME
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.fleet_firmware_update b/test_suites/control.fleet_firmware_update
new file mode 100644
index 0000000..7465b03
--- /dev/null
+++ b/test_suites/control.fleet_firmware_update
@@ -0,0 +1,23 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "xianuowang"
+NAME = "fleet_firmware_update"
+PURPOSE = "Test update OS bundled firmware for lab firmware uprev"
+
+TIME = "MEDIUM"
+TEST_CATEGORY = "Infra"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """This test suite runs fleet_FirmwareUpdate test"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['max_runtime_mins'] = 60
+args_dict['timeout_mins'] = 120
+args_dict['name'] = NAME
+args_dict['job'] = job
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.graphics b/test_suites/control.graphics
index 451ca3b..b2123ac 100644
--- a/test_suites/control.graphics
+++ b/test_suites/control.graphics
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "graphics"
 PURPOSE = "Test graphics functionality."
 
@@ -19,7 +19,7 @@
 
 Tests should be run in order of increasing likeness of hanging the system
 graphics_GLAPICheck                     < 1 minute runtime
-graphics_Sanity                         < 1 minute
+graphics_Check                          < 1 minute
 graphics_SanAngeles                       3 minutes
 graphics_GLBench                          5 minutes
 graphics_Piglit                      10..15 minutes on OpenGL/Intel only
diff --git a/test_suites/control.gts b/test_suites/control.gts
index 60bea46..81054ab 100644
--- a/test_suites/control.gts
+++ b/test_suites/control.gts
@@ -27,7 +27,14 @@
     return False
   # Strip off the cheets_GTS. from the test name before comparing to args
   name = test.name[test.name.find('.') + 1:]
-  if 'tests' in args_dict and name not in args_dict['tests']:
+  # Typical examples:
+  #   name = 'arm.CtsDeqpTestCases.64'
+  #     or
+  #   name = 'x86.CtsDeqpTestCases.32'
+  # and
+  #   args_dict['tests'] = ['arm.CtsDeqpTestCases', 'x86.CtsDeqpTestCases']
+  # So, we want to includes test with names being subpart of any arg.
+  if 'tests' in args_dict and all(arg not in name for arg in args_dict['tests']):
     return False
   return True
 
diff --git a/test_suites/control.hardware_storagequal_mini_soak b/test_suites/control.hardware_storagequal_mini_soak
new file mode 100644
index 0000000..8ee9180
--- /dev/null
+++ b/test_suites/control.hardware_storagequal_mini_soak
@@ -0,0 +1,35 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'abergman'
+NAME = 'storage_qual_mini_soak'
+TIME = 'LENGTHY'
+TEST_CATEGORY = 'General'
+TEST_CLASS = 'suite'
+TEST_TYPE = 'Server'
+
+DOC = """
+This dynamic suite runs the minimal storage qualification tests.
+This test suite is needed for comparison with storage qual v2 test.
+
+@param build: The name of the image to test.
+          Ex: x86-mario-release/R17-1412.33.0-a1-b29
+@param board: The board to test on. Ex: x86-mario
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+@param file_bugs: If True your suite will file bugs on failures.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['add_experimental'] = True
+args_dict['timeout_mins'] = 720 # entire suite timeout is 12 hours
+args_dict['max_runtime_mins'] = 720 # one test timeout is 12 hours
+args_dict['name'] = NAME
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.hps_perf_perbuild b/test_suites/control.hps_perf_perbuild
new file mode 100644
index 0000000..b358204
--- /dev/null
+++ b/test_suites/control.hps_perf_perbuild
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "mblsha@chromium.org"
+NAME = "hps_perf_perbuild"
+PURPOSE = "Performance tests for HPS"
+TIME = "SHORT"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+Performance test suite for running on custom HPS dev board setup.
+
+@param build: The name of the image to test.
+@param board: The board to test on.
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+@param file_bugs: If True your suite will file bugs on failures.
+@param max_run_time: Amount of time each test shoud run in minutes.i
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['add_experimental'] = True
+args_dict['max_runtime_mins'] = 120
+args_dict['name'] = NAME
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.hps_sweetberry_perf_perbuild b/test_suites/control.hps_sweetberry_perf_perbuild
new file mode 100644
index 0000000..09e87de
--- /dev/null
+++ b/test_suites/control.hps_sweetberry_perf_perbuild
@@ -0,0 +1,34 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "eunicesun@chromium.org"
+NAME = "hps_sweetberry_perf_perbuild"
+PURPOSE = "Performance tests for HPS (Sweetberry-specific)"
+TIME = "SHORT"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+Performance test suite for running on custom HPS dev board setup.
+Requires Sweetberry to be connected.
+
+@param build: The name of the image to test.
+@param board: The board to test on.
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+@param file_bugs: If True your suite will file bugs on failures.
+@param max_run_time: Amount of time each test shoud run in minutes.i
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['add_experimental'] = True
+args_dict['max_runtime_mins'] = 120
+args_dict['name'] = NAME
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.hwqual b/test_suites/control.hwqual
index ac47032..8559e39 100644
--- a/test_suites/control.hwqual
+++ b/test_suites/control.hwqual
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Jean Zheng <jean@google.com>/Chrome OS PSO"
+AUTHOR = "Jean Zheng <jean@google.com>/ChromeOS PSO"
 NAME = "hwqual"
 TIME = "LONG"
 TEST_CATEGORY = "General"
diff --git a/test_suites/control.infra_multi_duts_with_android b/test_suites/control.infra_multi_duts_with_android
new file mode 100644
index 0000000..b2c4d3a
--- /dev/null
+++ b/test_suites/control.infra_multi_duts_with_android
@@ -0,0 +1,23 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "xianuowang"
+NAME = "infra_multi_duts_with_android"
+PURPOSE = "Test multi-DUTs infra support with Android devices."
+
+TIME = "MEDIUM"
+TEST_CATEGORY = "Infra"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """This test suite runs infra_MultiDutsWithAndroid test"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['max_runtime_mins'] = 60
+args_dict['timeout_mins'] = 120
+args_dict['name'] = NAME
+args_dict['job'] = job
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.infra_qual_cellular b/test_suites/control.infra_qual_cellular
new file mode 100644
index 0000000..416d5fe
--- /dev/null
+++ b/test_suites/control.infra_qual_cellular
@@ -0,0 +1,26 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "andrewlamb"
+NAME = "infra_qual_cellular"
+PURPOSE = "Cellular suite to certify changes in autotest infra prior prod."
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite is intended to run a set of cellular tests that will certify major
+autotest infrastructure changes/updates do not break major test libraries, such
+as servo, network, platform, bluetooth, etc.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.infra_qual_wifi b/test_suites/control.infra_qual_wifi
new file mode 100644
index 0000000..dda05e7
--- /dev/null
+++ b/test_suites/control.infra_qual_wifi
@@ -0,0 +1,26 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "andrewlamb"
+NAME = "infra_qual_wifi"
+PURPOSE = "Wifi suite to certify changes in autotest infra prior prod."
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite is intended to run a set of wifi tests that will certify major
+autotest infrastructure changes/updates do not break major test libraries, such
+as servo, network, platform, bluetooth, etc.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.inputs-upstream b/test_suites/control.inputs-upstream
new file mode 100644
index 0000000..7af6401
--- /dev/null
+++ b/test_suites/control.inputs-upstream
@@ -0,0 +1,35 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "shengjun, msta"
+NAME = "input-tools-upstream"
+PURPOSE = "This suite is a baseline set of tests that act as a pre-cq of Essential Inputs."
+TIME = "LONG"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite acts as a pre-cq specific to ChromeOS Essential Inputs related changes and is
+only invoked via a flag in CLs.
+
+@param build: The name of the image to test.
+@param board: The board to test on.
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+@param file_bugs: If True your suite will file bugs on failures.
+@param max_run_time: Amount of time each test shoud run in minutes.i
+
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['add_experimental'] = True
+args_dict['max_runtime_mins'] = 60
+args_dict['name'] = NAME
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.jailed_build b/test_suites/control.jailed_build
index 9917fff..04242c6 100644
--- a/test_suites/control.jailed_build
+++ b/test_suites/control.jailed_build
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "jailed_build"
 PURPOSE = "Quartine potentially dangerous tests."
 
diff --git a/test_suites/control.kernel_daily_benchmarks b/test_suites/control.kernel_daily_benchmarks
index 0569f37..a99d96d 100644
--- a/test_suites/control.kernel_daily_benchmarks
+++ b/test_suites/control.kernel_daily_benchmarks
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Mike Truty <truty@google.com>/Chrome OS Kernel"
+AUTHOR = "Mike Truty <truty@google.com>/ChromeOS Kernel"
 NAME = "kernel_daily_benchmarks"
 PURPOSE = "Gather benchmark/performance data for kernel evaluation."
 
diff --git a/test_suites/control.kernel_daily_regression b/test_suites/control.kernel_daily_regression
index 96c13c0..471abaf 100644
--- a/test_suites/control.kernel_daily_regression
+++ b/test_suites/control.kernel_daily_regression
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Mike Truty <truty@google.com>/Chrome OS Kernel"
+AUTHOR = "Mike Truty <truty@google.com>/ChromeOS Kernel"
 NAME = "kernel_daily_regression"
 PURPOSE = "Test required functionality - longer running than bvt tests usually."
 
diff --git a/test_suites/control.kernel_per-build_benchmarks b/test_suites/control.kernel_per-build_benchmarks
index 7449886..679a1e5 100644
--- a/test_suites/control.kernel_per-build_benchmarks
+++ b/test_suites/control.kernel_per-build_benchmarks
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "kernel_per-build_benchmarks"
 PURPOSE = "Gather benchmark/performance data for kernel evaluation."
 
diff --git a/test_suites/control.kernel_per-build_regression b/test_suites/control.kernel_per-build_regression
index 42bf62e..609b678 100644
--- a/test_suites/control.kernel_per-build_regression
+++ b/test_suites/control.kernel_per-build_regression
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "kernel_per-build_regression"
 PURPOSE = "Test required functionality - longer running than bvt tests usually."
 
diff --git a/test_suites/control.kernel_weekly_regression b/test_suites/control.kernel_weekly_regression
index 8bd12eb..3e6a78e 100644
--- a/test_suites/control.kernel_weekly_regression
+++ b/test_suites/control.kernel_weekly_regression
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "kernel_weekly_regression"
 PURPOSE = "Test required functionality - longer running than bvt tests usually."
 
diff --git a/test_suites/control.labqual b/test_suites/control.labqual
index f42a334..655b993 100644
--- a/test_suites/control.labqual
+++ b/test_suites/control.labqual
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "labqual"
 PURPOSE = "Suite to qualify firmware for low-touch lab"
 
diff --git a/test_suites/control.link_perf b/test_suites/control.link_perf
index 1b8256e..90c743f 100644
--- a/test_suites/control.link_perf
+++ b/test_suites/control.link_perf
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "link_perf"
 TIME = "SHORT"
 TEST_CATEGORY = "Servo"
diff --git a/test_suites/control.manual_platform_suite b/test_suites/control.manual_platform_suite
index 2494ddd..7d64fc6 100644
--- a/test_suites/control.manual_platform_suite
+++ b/test_suites/control.manual_platform_suite
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "platform_manual_suite"
 PURPOSE = "USB peripherals detection during stress procedure."
 
diff --git a/test_suites/control.moblab b/test_suites/control.moblab
index fafa4d4..b82b3ba 100644
--- a/test_suites/control.moblab
+++ b/test_suites/control.moblab
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "moblab"
 PURPOSE = "Test critical moblab functionality."
 
diff --git a/test_suites/control.moblab-cts-mini b/test_suites/control.moblab-cts-mini
index 8a73283..4249518 100644
--- a/test_suites/control.moblab-cts-mini
+++ b/test_suites/control.moblab-cts-mini
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "moblab-cts-mini"
 PURPOSE = "Ensure that CTS is running on moblab for every commit."
 
diff --git a/test_suites/control.moblab_quick b/test_suites/control.moblab_quick
index 8969e3a..3888838 100644
--- a/test_suites/control.moblab_quick
+++ b/test_suites/control.moblab_quick
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "moblab_quick"
 PURPOSE = "Quickly test basic moblab functionality."
 
diff --git a/test_suites/control.moblab_storage_qual b/test_suites/control.moblab_storage_qual
index 9413818..a1035d0 100644
--- a/test_suites/control.moblab_storage_qual
+++ b/test_suites/control.moblab_storage_qual
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "moblab_storage_qual"
 PURPOSE = "Quickly test basic moblab functionality."
 
diff --git a/test_suites/control.mtp b/test_suites/control.mtp
new file mode 100644
index 0000000..d1efba2
--- /dev/null
+++ b/test_suites/control.mtp
@@ -0,0 +1,20 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ARC Engprod Team"
+NAME = "mtp"
+PURPOSE = "Suite to run MTP(Media Transfer Protocol) tests on ChromeOS devices."
+TIME = "MEDIUM"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+DOC = """ This suite runs MTP tests. """
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+args_dict['max_runtime_mins'] = 180
+args_dict['name'] = NAME
+args_dict['job'] = job
+dynamic_suite.reimage_and_run(**args_dict)
+
diff --git a/test_suites/control.nbr b/test_suites/control.nbr
new file mode 100644
index 0000000..6f10e60
--- /dev/null
+++ b/test_suites/control.nbr
@@ -0,0 +1,25 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "Henry Barnor <hbarnor@chromium.org>"
+NAME = "nbr"
+PURPOSE = "Runs nbr tests."
+TIME = "MEDIUM"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite runs the nbr tests.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['add_experimental'] = True
+args_dict['max_runtime_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.nearby-share b/test_suites/control.nearby-share
index da46783..20eb10e6 100644
--- a/test_suites/control.nearby-share
+++ b/test_suites/control.nearby-share
@@ -4,7 +4,7 @@
 
 AUTHOR = "ChromeOS SW Engprod Team (chromeos-sw-engprod@google.com)"
 NAME = "nearby-share"
-PURPOSE = "Suite to run Nearby Share tests on Chrome OS devices."
+PURPOSE = "Suite to run Nearby Share tests on ChromeOS devices."
 TIME = "SHORT"
 TEST_CATEGORY = "General"
 TEST_CLASS = "suite"
diff --git a/test_suites/control.nearby-share-android b/test_suites/control.nearby-share-android
new file mode 100644
index 0000000..90dee46
--- /dev/null
+++ b/test_suites/control.nearby-share-android
@@ -0,0 +1,20 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS SW Engprod Team (chromeos-sw-engprod@google.com)"
+NAME = "nearby-share-android"
+PURPOSE = "Suite to run Nearby Share tests on ChromeOS devices."
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+DOC = """ This suite runs Cross Device tests. """
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+args_dict['max_runtime_mins'] = 60
+args_dict['name'] = NAME
+args_dict['job'] = job
+dynamic_suite.reimage_and_run(**args_dict)
+
diff --git a/test_suites/control.nearby-share-arc b/test_suites/control.nearby-share-arc
new file mode 100644
index 0000000..0f5e6f3
--- /dev/null
+++ b/test_suites/control.nearby-share-arc
@@ -0,0 +1,20 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS SW Engprod Team (chromeos-sw-engprod@google.com)"
+NAME = "nearby-share-arc"
+PURPOSE = "Suite to run Nearby Share tests on ChromeOS devices."
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+DOC = """ This suite runs Nearby Share tests. """
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+args_dict['max_runtime_mins'] = 60
+args_dict['name'] = NAME
+args_dict['job'] = job
+dynamic_suite.reimage_and_run(**args_dict)
+
diff --git a/test_suites/control.nearby-share-dev b/test_suites/control.nearby-share-dev
new file mode 100644
index 0000000..3cc873f
--- /dev/null
+++ b/test_suites/control.nearby-share-dev
@@ -0,0 +1,19 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS SW Engprod Team (chromeos-sw-engprod@google.com)"
+NAME = "nearby-share-dev"
+PURPOSE = "Nearby Share tests on ChromeOS devices with dev Android Nearby."
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+DOC = """ This suite runs Nearby Share tests. """
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+args_dict['max_runtime_mins'] = 60
+args_dict['name'] = NAME
+args_dict['job'] = job
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.nearby-share-prod b/test_suites/control.nearby-share-prod
new file mode 100644
index 0000000..e92cdb3
--- /dev/null
+++ b/test_suites/control.nearby-share-prod
@@ -0,0 +1,19 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS SW Engprod Team (chromeos-sw-engprod@google.com)"
+NAME = "nearby-share-prod"
+PURPOSE = "Nearby Share tests on ChromeOS devices with prod Android Nearby."
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+DOC = """ This suite runs Nearby Share tests. """
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+args_dict['max_runtime_mins'] = 60
+args_dict['name'] = NAME
+args_dict['job'] = job
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.nearby-share-remote b/test_suites/control.nearby-share-remote
new file mode 100644
index 0000000..82b812b
--- /dev/null
+++ b/test_suites/control.nearby-share-remote
@@ -0,0 +1,20 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS SW Engprod Team (chromeos-sw-engprod@google.com)"
+NAME = "nearby-share-remote"
+PURPOSE = "Suite to run Nearby Share remote tests on ChromeOS devices."
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+DOC = """ This suite runs Nearby Share remote tests. """
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+args_dict['max_runtime_mins'] = 60
+args_dict['name'] = NAME
+args_dict['job'] = job
+dynamic_suite.reimage_and_run(**args_dict)
+
diff --git a/test_suites/control.network_nightly b/test_suites/control.network_nightly
index 4d4e4ac..1196226 100644
--- a/test_suites/control.network_nightly
+++ b/test_suites/control.network_nightly
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS "
+AUTHOR = "ChromeOS "
 NAME = "network_nightly"
 PURPOSE = "A simple example suite."
 
diff --git a/test_suites/control.network_ui b/test_suites/control.network_ui
index dc80202..78fe561 100644
--- a/test_suites/control.network_ui
+++ b/test_suites/control.network_ui
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "network_ui"
 PURPOSE = "Software-only network UI tests for shill and Chrome integration."
 
@@ -13,7 +13,7 @@
 
 DOC = """
 Run automated tests backed up by a fake hardware simulator (e.g. pseudomodem)
-to test the Chrome OS network UI and the chrome.networkingPrivate extension
+to test the ChromeOS network UI and the chrome.networkingPrivate extension
 API layer.
 
 @param build: The name of the image to test.
diff --git a/test_suites/control.omaha_per-week b/test_suites/control.omaha_per-week
new file mode 100644
index 0000000..4158378
--- /dev/null
+++ b/test_suites/control.omaha_per-week
@@ -0,0 +1,30 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+AUTHOR = "chromeos-commercial-remote-management"
+NAME = "omaha_per-week"
+PURPOSE = "Omaha tests"
+TIME = "LONG"
+TEST_CATEGORY = "Omaha"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+DOC = """
+This is the part of the omaha test suite which should run once per week.
+@param build: The name of the image to test.
+              Ex: x86-mario-release/R17-1412.33.0-a1-b29
+@param board: The board to test on.  Ex: x86-mario
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+"""
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+args_dict['max_runtime_mins'] = 960
+args_dict['timeout_mins'] = 1440
+args_dict['file_bugs'] = False
+args_dict['name'] = 'omaha_per-week'
+args_dict['job'] = job
+args_dict['add_experimental'] = True
+args_dict['bug_template'] = _BUG_TEMPLATE
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.perfalerts b/test_suites/control.perfalerts
index 606635a..e9f8f3e 100644
--- a/test_suites/control.perfalerts
+++ b/test_suites/control.perfalerts
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "perfalerts"
 TIME = "LONG"
 TEST_CATEGORY = "Functional"
diff --git a/test_suites/control.performance_cuj b/test_suites/control.performance_cuj
new file mode 100644
index 0000000..5a42318
--- /dev/null
+++ b/test_suites/control.performance_cuj
@@ -0,0 +1,35 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "abergman, chromeos-engprod-platform-syd"
+NAME = "performance_cuj"
+PURPOSE = "Tast wrapper suite for running performance CUJ Tast tests on Moblab."
+TIME = "LENGTHY"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite executes a set of performance CUJ (critical user journey) test cases.
+Supposed to be run by partners (OEMs, ODMs) using Moblab.
+
+@param build: The name of the image to test.
+          Ex: x86-mario-release/R17-1412.33.0-a1-b29
+@param board: The board to test on. Ex: x86-mario
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param dry_run: simulate test execution without stressing the storage.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+"""
+
+import common
+
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['job'] = job
+args_dict['timeout_mins'] = 10080 # entire suite timeout after 7 days
+args_dict['max_runtime_mins'] = 10080 # one test timeout after 7 days
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.performance_cuj_benchmarks b/test_suites/control.performance_cuj_benchmarks
new file mode 100644
index 0000000..0ece49a
--- /dev/null
+++ b/test_suites/control.performance_cuj_benchmarks
@@ -0,0 +1,33 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "abergman, chromeos-engprod-platform-syd"
+NAME = "performance_cuj_benchmarks"
+PURPOSE = "Tast wrapper suite for running public benchmarks performance CUJ Tast tests on Moblab."
+TIME = "LONG"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite executes a set of public benchmark performance CUJ (critical user
+journey) test cases. Supposed to be run by partners (OEMs, ODMs) using Moblab as
+a quick check that all included tests are running successfully.
+
+@param build: The name of the image to test.
+          Ex: x86-mario-release/R17-1412.33.0-a1-b29
+@param board: The board to test on. Ex: x86-mario
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+"""
+
+import common
+
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.performance_cuj_experimental b/test_suites/control.performance_cuj_experimental
new file mode 100644
index 0000000..7e3cd42
--- /dev/null
+++ b/test_suites/control.performance_cuj_experimental
@@ -0,0 +1,35 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "abergman, chromeos-engprod-platform-syd"
+NAME = "performance_cuj_experimental"
+PURPOSE = "Suite for running experimental performance CUJ tests on Moblab."
+TIME = "LONG"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite executes a set of experimental performance CUJs (critical user
+journey) test cases. Supposed to be run by developers of test and for
+verification purposes to validate newly added tests before adding them to the
+production suite.
+
+@param build: The name of the image to test.
+          Ex: x86-mario-release/R17-1412.33.0-a1-b29
+@param board: The board to test on. Ex: x86-mario
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param dry_run: simulate test execution without stressing the storage.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+"""
+
+import common
+
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.performance_cuj_quick b/test_suites/control.performance_cuj_quick
new file mode 100644
index 0000000..1f26dc5
--- /dev/null
+++ b/test_suites/control.performance_cuj_quick
@@ -0,0 +1,34 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "abergman, chromeos-engprod-platform-syd"
+NAME = "performance_cuj_quick"
+PURPOSE = "Tast wrapper suite for running minimal set of performance CUJ Tast tests on Moblab."
+TIME = "LONG"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite executes a minimal, stable set of performance CUJ (critical user
+journey) test cases. Supposed to be run by partners (OEMs, ODMs) using Moblab as
+a quick check that all included tests are running successfully. Should not
+repeat tests more than once and include only the basic variant for each test.
+
+@param build: The name of the image to test.
+          Ex: x86-mario-release/R17-1412.33.0-a1-b29
+@param board: The board to test on. Ex: x86-mario
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+"""
+
+import common
+
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.power_build b/test_suites/control.power_build
index e626cc6..b5f971f 100644
--- a/test_suites/control.power_build
+++ b/test_suites/control.power_build
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_build"
 TIME = "SHORT"
 TEST_CATEGORY = "General"
diff --git a/test_suites/control.power_check b/test_suites/control.power_check
new file mode 100644
index 0000000..5998536
--- /dev/null
+++ b/test_suites/control.power_check
@@ -0,0 +1,25 @@
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "power_check"
+PURPOSE = "Daily power check tests"
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+Daily power check tests.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['add_experimental'] = True
+args_dict['name'] = NAME
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.power_daily b/test_suites/control.power_daily
index 6b82fee..9af3068 100644
--- a/test_suites/control.power_daily
+++ b/test_suites/control.power_daily
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_daily"
 TIME = "SHORT"
 TEST_CATEGORY = "General"
diff --git a/test_suites/control.power_idle b/test_suites/control.power_idle
index 43df634..f39b19c 100644
--- a/test_suites/control.power_idle
+++ b/test_suites/control.power_idle
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_idle"
 PURPOSE = "Measure power usage when system is idle."
 
diff --git a/test_suites/control.power_loadtest b/test_suites/control.power_loadtest
index 7d770b0..91147c3 100644
--- a/test_suites/control.power_loadtest
+++ b/test_suites/control.power_loadtest
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_loadtest"
 PURPOSE = "Power load testing."
 
diff --git a/test_suites/control.power_loadtest_1hour b/test_suites/control.power_loadtest_1hour
index 55c63b6..43e6e64 100644
--- a/test_suites/control.power_loadtest_1hour
+++ b/test_suites/control.power_loadtest_1hour
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_loadtest_1hour"
 PURPOSE = "Power load testing."
 
diff --git a/test_suites/control.power_loadtest_fast b/test_suites/control.power_loadtest_fast
index b26d7c4..acd09a2 100644
--- a/test_suites/control.power_loadtest_fast
+++ b/test_suites/control.power_loadtest_fast
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_loadtest_fast"
 PURPOSE = "Power load testing."
 
diff --git a/test_suites/control.power_measurement_wrapper b/test_suites/control.power_measurement_wrapper
index c15689a..0cad379 100644
--- a/test_suites/control.power_measurement_wrapper
+++ b/test_suites/control.power_measurement_wrapper
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_measurement_wrapper"
 PURPOSE = "Power testing."
 
diff --git a/test_suites/control.power_qual_fast b/test_suites/control.power_qual_fast
new file mode 100644
index 0000000..b0b88a6
--- /dev/null
+++ b/test_suites/control.power_qual_fast
@@ -0,0 +1,35 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'mqg'
+NAME = 'power_qual_fast'
+TIME = 'LENGTHY'
+TEST_CATEGORY = 'General'
+TEST_CLASS = 'suite'
+TEST_TYPE = 'Server'
+
+DOC = """
+This dynamic suite runs the test version of the power qualification
+test run by OEM using moblab.
+
+@param build: The name of the image to test.
+          Ex: x86-mario-release/R17-1412.33.0-a1-b29
+@param board: The board to test on. Ex: x86-mario
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+@param file_bugs: If True your suite will file bugs on failures.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['add_experimental'] = True
+args_dict['timeout_mins'] = 2880 # 2 days
+args_dict['max_runtime_mins'] = 2880 # 2 days
+args_dict['name'] = NAME
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.power_qual_full b/test_suites/control.power_qual_full
new file mode 100644
index 0000000..1a92877
--- /dev/null
+++ b/test_suites/control.power_qual_full
@@ -0,0 +1,35 @@
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'mqg'
+NAME = 'power_qual_full'
+TIME = 'LENGTHY'
+TEST_CATEGORY = 'General'
+TEST_CLASS = 'suite'
+TEST_TYPE = 'Server'
+
+DOC = """
+This dynamic suite runs the test version of the power qualification
+test run by OEM using moblab.
+
+@param build: The name of the image to test.
+          Ex: x86-mario-release/R17-1412.33.0-a1-b29
+@param board: The board to test on. Ex: x86-mario
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+@param file_bugs: If True your suite will file bugs on failures.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['add_experimental'] = True
+args_dict['timeout_mins'] = 2880 # 2 days
+args_dict['max_runtime_mins'] = 2880 # 2 days
+args_dict['name'] = NAME
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.power_requirements b/test_suites/control.power_requirements
index a7f2be3..30b7f07 100644
--- a/test_suites/control.power_requirements
+++ b/test_suites/control.power_requirements
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "power_requirements"
 TIME = "LONG"
 TEST_CATEGORY = "General"
diff --git a/test_suites/control.power_sanity b/test_suites/control.power_sanity
deleted file mode 100644
index ea53880..0000000
--- a/test_suites/control.power_sanity
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "power_sanity"
-PURPOSE = "Daily power sanity tests"
-
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "suite"
-TEST_TYPE = "Server"
-
-DOC = """
-Daily power sanity tests.
-"""
-
-import common
-from autotest_lib.server.cros.dynamic_suite import dynamic_suite
-
-args_dict['add_experimental'] = True
-args_dict['name'] = NAME
-args_dict['job'] = job
-
-dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.printscan b/test_suites/control.printscan
new file mode 100644
index 0000000..0601430
--- /dev/null
+++ b/test_suites/control.printscan
@@ -0,0 +1,34 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "chromeos-test"
+NAME = "printscan"
+PURPOSE = "Tests printing and scanning on real MFPs."
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+Printing and scanning tests with real MFPs.
+
+@param build: The name of the image to test.
+              Ex: x86-mario-release/R17-1412.33.0-a1-b29
+@param board: The board to test on. Ex: x86-mario
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = 'printscan'
+args_dict['add_experimental'] = True
+args_dict['max_runtime_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.provision b/test_suites/control.provision
index 95b28f1..c683acf 100644
--- a/test_suites/control.provision
+++ b/test_suites/control.provision
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 # This name is hard-coded in run_suite.py
 NAME = "provision"
 PURPOSE = "Provisioning suite job."
diff --git a/test_suites/control.push_to_prod b/test_suites/control.push_to_prod
index cfbb479..f0365d0 100644
--- a/test_suites/control.push_to_prod
+++ b/test_suites/control.push_to_prod
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "push_to_prod"
 PURPOSE = "Test Autotest before push to prod."
 
diff --git a/test_suites/control.pvs-audio b/test_suites/control.pvs-audio
new file mode 100644
index 0000000..e59dde2
--- /dev/null
+++ b/test_suites/control.pvs-audio
@@ -0,0 +1,28 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "pvs-audio"
+PURPOSE = "PVS Audio tests that require no special hardware to run"
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This test suite is part of the Platform Validation Suite (go/cros-pvs)
+and includes a collection of tast.audio tests that require no
+special hardware to run.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['max_runtime_mins'] = 60
+args_dict['add_experimental'] = True
+args_dict['name'] = NAME
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.pvs-bvt-inline b/test_suites/control.pvs-bvt-inline
new file mode 100644
index 0000000..948a04b
--- /dev/null
+++ b/test_suites/control.pvs-bvt-inline
@@ -0,0 +1,69 @@
+# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS PVS Team"
+NAME = "pvs-bvt-inline"
+PURPOSE = "Test critical functionality."
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This is the portion of the Build Verification Test suite required
+to pass before any other tests may run.  Test failures in this
+suite trigger automatic actions:
+  * Failures in the Commit Queue or Pre-Flight Queue fail the build,
+    and block running tests from the bvt-cq suite.
+  * Failures in the canary turn the tree red, block all other tests
+    for the build, and generally mean that QA cannot further
+    evaluate the build's fitness for release.
+
+Requirements for a test to be in this suite:
+ 1. The test should be SHORT, and should not require any specialized
+    lab resources.
+ 2. A test failure should indicate one or more of the following
+    impacts is possible:
+     * The failure may impact the stability of the test lab.
+     * The failure may prevent discovery of other bugs.
+     * The failure may block ordinary development tasks.
+ 3. A test failure must reliably indicate a bug in the product, and
+    not a bug in the test.
+ 4. The test must be hermetic.  That is, the test should have no
+    dependencies on external network resources.
+ 5. (added for pvs-* suite) must not access internal resources or be 
+    dependent on Moblab execution
+
+@param build: The name of the image to test.
+              Ex: x86-mario-release/R17-1412.33.0-a1-b29
+@param board: The board to test on. Ex: x86-mario
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+
+# Values specified in this bug template will override default values when
+# filing bugs on tests that are a part of this suite. If left unspecified
+# the bug filer will fallback to it's defaults.
+_BUG_TEMPLATE = {
+    'labels': ['bvt'],
+    'owner': '',
+    'status': None,
+    'summary': None,
+    'title': None,
+}
+
+args_dict['max_runtime_mins'] = 30
+args_dict['name'] = 'pvs-bvt-inline'
+args_dict['job'] = job
+args_dict['add_experimental'] = True
+args_dict['bug_template'] = _BUG_TEMPLATE
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.pvs-display b/test_suites/control.pvs-display
new file mode 100644
index 0000000..c8b83f5
--- /dev/null
+++ b/test_suites/control.pvs-display
@@ -0,0 +1,27 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'ChromeOS Team'
+NAME = 'pvs-display'
+PURPOSE = 'PVS Display tests that require no special hardware to run'
+
+TIME = 'MEDIUM'
+TEST_CATEGORY = 'General'
+TEST_CLASS = 'suite'
+TEST_TYPE = 'Server'
+
+DOC = '''
+This test suite is part of the Platform Validation Suite (go/cros-pvs)
+and includes a collection of Display tests that run without additional hardware.
+'''
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['max_runtime_mins'] = 240
+args_dict['add_experimental'] = True
+args_dict['name'] = NAME
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.pvs-graphics b/test_suites/control.pvs-graphics
new file mode 100644
index 0000000..c60a5ec
--- /dev/null
+++ b/test_suites/control.pvs-graphics
@@ -0,0 +1,32 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "pvs-graphics"
+PURPOSE = "PVS graphics tests that require no special hardware to run"
+
+TIME = "LENGTHY"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This test suite is part of the Platform Validation Suite (go/cros-pvs)
+and includes a collection of tast and tauto graphics tests that require no
+special hardware to run.
+
+See go/cros-pvs (internal) or
+https://chromeos.google.com/partner/dlm/docs/PVS/pvs_partner_documentation.html
+for more information on PVS.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['max_runtime_mins'] = 90
+args_dict['add_experimental'] = True
+args_dict['name'] = NAME
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.pvs-kernel b/test_suites/control.pvs-kernel
new file mode 100644
index 0000000..aded6ae
--- /dev/null
+++ b/test_suites/control.pvs-kernel
@@ -0,0 +1,38 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "pvs-kernel"
+PURPOSE = "Run kernel tests that partners are able to run over moblab"
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This test suite is part of the Platform Validation Suite and contains all
+critical kernel tests that can run on Moblab with no special test bed
+requirements. It is intended to mirror the kernel_per-build_regression test
+suite as closely as possible.
+For more info on PVS, please check out go/cros-pvs
+For partner-facing docs, see https://chromeos.google.com/partner/dlm/docs/PVS/pvs_partner_documentation.html
+
+@param build: The name of the image to test.
+              Ex: x86-mario-release/R17-1412.33.0-a1-b29
+@param board: The board to test on.  Ex: x86-mario
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['add_experimental'] = True
+args_dict['name'] = 'pvs-kernel'
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.pvs-quick-check b/test_suites/control.pvs-quick-check
new file mode 100644
index 0000000..ef32d9a
--- /dev/null
+++ b/test_suites/control.pvs-quick-check
@@ -0,0 +1,27 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'chromeos-pvs-team'
+NAME = 'pvs-quick-check'
+PURPOSE = 'Quick check to download autotest client to dut and run stub test'
+
+TIME = 'SHORT'
+TEST_CATEGORY = 'General'
+TEST_CLASS = 'suite'
+TEST_TYPE = 'Server'
+
+DOC = """
+This suite is intended to be run by partners as a quick check of the build.
+It only contains a stub_Pass test. It works by relying
+on the safety checks built into the provisioning code to identify bad builds.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['add_experimental'] = True
+args_dict['name'] = NAME
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.pvs-staging b/test_suites/control.pvs-staging
new file mode 100644
index 0000000..337d4ce
--- /dev/null
+++ b/test_suites/control.pvs-staging
@@ -0,0 +1,37 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "mwiitala, ChromeOS Team"
+NAME = "pvs-staging"
+PURPOSE = "Verify that tests are stable and ready to be run by partners for PVS"
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This test suite is used to verify tests that will become part of the Platform Validation Suite.
+This test suite should not be used for any purposes apart from testing done by the PVS team.
+
+For more info on PVS, please check out go/cros-pvs-prd.
+
+@param build: The name of the image to test.
+              Ex: x86-mario-release/R17-1412.33.0-a1-b29
+@param board: The board to test on. Ex: x86-mario
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['max_runtime_mins'] = 60
+args_dict['add_experimental'] = True
+args_dict['name'] = NAME
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
\ No newline at end of file
diff --git a/test_suites/control.pvs-tast-cq b/test_suites/control.pvs-tast-cq
new file mode 100644
index 0000000..300a0bd
--- /dev/null
+++ b/test_suites/control.pvs-tast-cq
@@ -0,0 +1,39 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "pvs-tast-cq"
+PURPOSE = "Run critical (CQ-blocking) tast tests"
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This test suite is part of the Platform Validation Suite and contains all
+critical (CQ-blocking) tast tests that can run on Moblab with no special
+test bed requirements. It is intended to mirror the bvt-tast-cq test suite as
+closely as possible.
+For more info on PVS, please check out go/cros-pvs
+For partner-facing docs, see https://chromeos.google.com/partner/dlm/docs/PVS/pvs_partner_documentation.html
+
+@param build: The name of the image to test.
+              Ex: x86-mario-release/R17-1412.33.0-a1-b29
+@param board: The board to test on. Ex: x86-mario
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['max_runtime_mins'] = 180
+args_dict['add_experimental'] = True
+args_dict['name'] = NAME
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.pvs-video b/test_suites/control.pvs-video
new file mode 100644
index 0000000..5986745
--- /dev/null
+++ b/test_suites/control.pvs-video
@@ -0,0 +1,27 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'ChromeOS Team'
+NAME = 'pvs-video'
+PURPOSE = 'PVS Video tests that require no special hardware to run'
+
+TIME = 'SHORT'
+TEST_CATEGORY = 'General'
+TEST_CLASS = 'suite'
+TEST_TYPE = 'Server'
+
+DOC = '''
+This test suite is part of the Platform Validation Suite (go/cros-pvs)
+and includes a collection of Video tests that run without additional hardware.
+'''
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['max_runtime_mins'] = 240
+args_dict['add_experimental'] = True
+args_dict['name'] = NAME
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
\ No newline at end of file
diff --git a/test_suites/control.py3-beta b/test_suites/control.py3-beta
new file mode 100644
index 0000000..8640fd4
--- /dev/null
+++ b/test_suites/control.py3-beta
@@ -0,0 +1,26 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "dbeckett@"
+NAME = "py3-beta"
+PURPOSE = "Suite for Python 3 rollout in lab/CQ."
+TIME = "LONG"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite is used flush out the Python 3 rollout in the lab.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['add_experimental'] = True
+args_dict['max_runtime_mins'] = 960
+args_dict['timeout_mins'] = 1440
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.regression b/test_suites/control.regression
index d5733f7..e5b2fed 100644
--- a/test_suites/control.regression
+++ b/test_suites/control.regression
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "regression"
 TIME = "SHORT"
 TEST_CATEGORY = "General"
diff --git a/test_suites/control.sanity b/test_suites/control.sanity
index bbd65fc..1079aa7 100644
--- a/test_suites/control.sanity
+++ b/test_suites/control.sanity
@@ -13,7 +13,7 @@
 
 DOC = """
 This suite is intended to be run by buildbot as a sanity check of the build.
-It currently only contains a dummy_Pass test. It works by relying
+It currently only contains a stub_Pass test. It works by relying
 on the safety checks built into the provisioning code to identify bad builds.
 
 This suite is intended to block other bvt suites, and as such is highly
diff --git a/test_suites/control.satlab-qual-bvt-cq b/test_suites/control.satlab-qual-bvt-cq
new file mode 100644
index 0000000..aa65401
--- /dev/null
+++ b/test_suites/control.satlab-qual-bvt-cq
@@ -0,0 +1,32 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "gregorynisbet@chromium.org"
+NAME = "satlab-qual-bvt-cq"
+PURPOSE = "Tests functionality required by the Commit Queue for Satlab"
+
+TIME = "MEDIUM"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This is a fork of control.bvt-cq for Satlab qualification.
+
+The original is control.bvt-cq in the same directory as this file.
+
+https://chromium.googlesource.com/chromiumos/third_party/autotest/+/main/test_suites/control.bvt-cq
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['add_experimental'] = True
+args_dict['job'] = job
+args_dict['max_runtime_mins'] = 30
+args_dict['name'] = NAME
+# timeout_mins not present in original.
+args_dict['timeout_mins'] = 4 * 60
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.satlab-qual-bvt-inline b/test_suites/control.satlab-qual-bvt-inline
new file mode 100644
index 0000000..342b4a3
--- /dev/null
+++ b/test_suites/control.satlab-qual-bvt-inline
@@ -0,0 +1,32 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "gregorynisbet@chromium.org"
+NAME = "satlab-qual-bvt-cq"
+PURPOSE = "Test critical functionality for Satlab"
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This is a fork of control.bvt-inline for Satlab qualification.
+
+The original is control.bvt-inline in the same directory as this file.
+
+https://chromium.googlesource.com/chromiumos/third_party/autotest/+/main/test_suites/control.bvt-inline
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['add_experimental'] = True
+args_dict['job'] = job
+args_dict['max_runtime_mins'] = 30
+args_dict['name'] = NAME
+# timeout_mins not present in original.
+args_dict['timeout_mins'] = 4 * 60
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.satlab-qual-bvt-installer b/test_suites/control.satlab-qual-bvt-installer
new file mode 100644
index 0000000..6ef0adc
--- /dev/null
+++ b/test_suites/control.satlab-qual-bvt-installer
@@ -0,0 +1,31 @@
+# Copyright 2017 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "bvt-installer"
+PURPOSE = "Build verification of ChromeOS pieces related to Installer for Satlab"
+
+TIME = "MEDIUM"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This is a fork of control.bvt-installer for Satlab qualification.
+
+The original is control.bvt-installer in the same directory as this file.
+
+https://chromium.googlesource.com/chromiumos/third_party/autotest/+/main/test_suites/control.bvt-installer
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['job'] = job
+args_dict['max_runtime_mins'] = 60
+args_dict['name'] = 'bvt-installer'
+# timeout_mins not present in original.
+args_dict['timeout_mins'] = 4 * 60
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.security b/test_suites/control.security
index 3d4c8f8..e4c85cc 100644
--- a/test_suites/control.security
+++ b/test_suites/control.security
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "security"
 PURPOSE = "Test security related functionality."
 
diff --git a/test_suites/control.security_weekly b/test_suites/control.security_weekly
index 4447f41..365f7e6 100644
--- a/test_suites/control.security_weekly
+++ b/test_suites/control.security_weekly
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "security_weekly"
 PURPOSE = "Test long-running security related functionality each week."
 
diff --git a/test_suites/control.skylab_staging_test b/test_suites/control.skylab_staging_test
index 3334d5b..e813e20 100644
--- a/test_suites/control.skylab_staging_test
+++ b/test_suites/control.skylab_staging_test
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "skylab_staging_test"
 PURPOSE = "Test Skylab (and lucifer, autotest) before push to prod."
 
diff --git a/test_suites/control.smoke b/test_suites/control.smoke
index 28601ca..a51cb1c 100644
--- a/test_suites/control.smoke
+++ b/test_suites/control.smoke
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "smoke"
 PURPOSE = "Basic tests that are able to run in VMs"
 
diff --git a/test_suites/control.storage_qual_bringup b/test_suites/control.storage_qual_bringup
new file mode 100644
index 0000000..06f08f8
--- /dev/null
+++ b/test_suites/control.storage_qual_bringup
@@ -0,0 +1,32 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "mblsha@chromium.org"
+NAME = "storage_qual_bringup"
+PURPOSE = "Subset of StorageQual tests to be run on Satlab during bringup"
+TIME = "SHORT"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+Storage qualification test suite to run on Satlab during bringup.
+
+@param build: The name of the image to test.
+@param board: The board to test on.
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+@param file_bugs: If True your suite will file bugs on failures.
+@param max_run_time: Amount of time each test shoud run in minutes.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['max_runtime_mins'] = 120
+args_dict['name'] = NAME
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.stress b/test_suites/control.stress
index 96f681d..121e0af 100644
--- a/test_suites/control.stress
+++ b/test_suites/control.stress
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "stress"
 PURPOSE = "Stress test functionality and devices."
 
diff --git a/test_suites/control.stub b/test_suites/control.stub
new file mode 100644
index 0000000..ea4a54b
--- /dev/null
+++ b/test_suites/control.stub
@@ -0,0 +1,26 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "stub"
+PURPOSE = "To be run while testing the infrastructure and test harness."
+
+TIME = "SHORT"
+TEST_CATEGORY = "Stub"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This is a stub  test suite. It runs stub tests that always pass or always fail
+so that we can test result gathering and reporting mechanisms.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['add_experimental'] = True
+args_dict['name'] = 'stub'
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.syzcorpus b/test_suites/control.syzcorpus
new file mode 100644
index 0000000..b5a986a
--- /dev/null
+++ b/test_suites/control.syzcorpus
@@ -0,0 +1,27 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "zsm, ChromeOS Kernel Team"
+NAME = "syzcorpus"
+PURPOSE = "Suite to run Syzkaller repros on ChromeOS devices."
+
+TIME = "MEDIUM"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This suite runs Syzkaller reproducers to test the ChromeOS Kernel.
+This is a regression test suite for the kernel(both core and drivers).
+See https://github.com/dvyukov/syzkaller-repros for more information.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['max_runtime_mins'] = 60
+args_dict['name'] = NAME
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.syzkaller b/test_suites/control.syzkaller
index 9ab029b..7c77866 100644
--- a/test_suites/control.syzkaller
+++ b/test_suites/control.syzkaller
@@ -2,9 +2,9 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "mwiitala, Chrome OS Kernel Team"
+AUTHOR = "mwiitala, ChromeOS Kernel Team"
 NAME = "syzkaller"
-PURPOSE = "Suite to run Syzkaller tests on Chrome OS devices."
+PURPOSE = "Suite to run Syzkaller tests on ChromeOS devices."
 
 TIME = "SHORT"
 TEST_CATEGORY = "General"
@@ -12,7 +12,7 @@
 TEST_TYPE = "Server"
 
 DOC = """
-This suite utilizes Syzkaller to fuzz the Chrome OS kernel.
+This suite utilizes Syzkaller to fuzz the ChromeOS Kernel.
 See go/ctp-syzkaller for more details.
 """
 
diff --git a/test_suites/control.telemetry_unit b/test_suites/control.telemetry_unit
index 47f88cb..df03e30 100644
--- a/test_suites/control.telemetry_unit
+++ b/test_suites/control.telemetry_unit
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "telemetry_unit"
 PURPOSE = "Telemetry unit tests."
 
diff --git a/test_suites/control.telemetry_unit_server b/test_suites/control.telemetry_unit_server
index 178af7e..287aac6 100644
--- a/test_suites/control.telemetry_unit_server
+++ b/test_suites/control.telemetry_unit_server
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "telemetry_unit_server"
 PURPOSE = "Telemetry unit tests."
 
diff --git a/test_suites/control.hardware_thermalqual_fast b/test_suites/control.thermal_qual_fast
similarity index 100%
rename from test_suites/control.hardware_thermalqual_fast
rename to test_suites/control.thermal_qual_fast
diff --git a/test_suites/control.hardware_thermalqual_full b/test_suites/control.thermal_qual_full
similarity index 100%
rename from test_suites/control.hardware_thermalqual_full
rename to test_suites/control.thermal_qual_full
diff --git a/test_suites/control.toolchain-tests b/test_suites/control.toolchain-tests
index 3f5d756..327012e 100644
--- a/test_suites/control.toolchain-tests
+++ b/test_suites/control.toolchain-tests
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "toolchain-tests"
 PURPOSE = "Test basic, required functionality for the toolchain team."
 
diff --git a/test_suites/control.touch_replay b/test_suites/control.touch_replay
new file mode 100644
index 0000000..aac253a
--- /dev/null
+++ b/test_suites/control.touch_replay
@@ -0,0 +1,34 @@
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "chromeos-test"
+NAME = "touch_replay"
+PURPOSE = "Tests basic touch input functionality."
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+Touch input playback based tests.
+
+@param build: The name of the image to test.
+              Ex: x86-mario-release/R17-1412.33.0-a1-b29
+@param board: The board to test on. Ex: x86-mario
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = 'touch_replay'
+args_dict['add_experimental'] = True
+args_dict['max_runtime_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.typec_lab b/test_suites/control.typec_lab
new file mode 100644
index 0000000..e21b3e2
--- /dev/null
+++ b/test_suites/control.typec_lab
@@ -0,0 +1,35 @@
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "typec_lab"
+PURPOSE = 'Tests the end to end Tast tests in the "typec-lab" category.'
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+Run Tast tests for basic typec/TBT functionality.
+
+The tests are part of 'group:typec'. The 'typec_lab' sub-attribute
+limits it to typec tests.
+
+Tast is an integration-testing framework analagous to the test-running portion
+of Autotest. See https://chromium.googlesource.com/chromiumos/platform/tast/
+for more information.
+
+See http://go/tast-failures for information about investigating failures.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['name'] = NAME
+args_dict['max_runtime_mins'] = 30
+args_dict['timeout_mins'] = 60
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.usb-camera b/test_suites/control.usb-camera
deleted file mode 100644
index eb74275..0000000
--- a/test_suites/control.usb-camera
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "ChromeOS Video and Camera Team/chromeos-video-eng@google.com"
-NAME = "usb-camera"
-PURPOSE = "Test USB camera required functionality."
-
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "suite"
-TEST_TYPE = "Server"
-
-DOC = """
-This is USB camera test suite.
-
-The suite is used to test USB cameras to make sure cameras meet ChromeOS
-requirements.
-
-@param build: The name of the image to test.
-              Ex: x86-mario-release/R17-1412.33.0-a1-b29
-@param board: The board to test on. Ex: x86-mario
-@param pool: The pool of machines to utilize for scheduling. If pool=None
-             board is used.
-@param check_hosts: require appropriate live hosts to exist in the lab.
-@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
-"""
-
-import common
-from autotest_lib.server.cros.dynamic_suite import dynamic_suite
-
-args_dict['name'] = NAME
-args_dict['add_experimental'] = True
-args_dict['max_runtime_mins'] = 60
-args_dict['job'] = job
-
-dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.usb_detect b/test_suites/control.usb_detect
index 1532d46..6b97269 100644
--- a/test_suites/control.usb_detect
+++ b/test_suites/control.usb_detect
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "usb_detect"
 PURPOSE = "USB peripherals detection."
 
diff --git a/test_suites/control.usb_detect_stress b/test_suites/control.usb_detect_stress
index 8bc9f8b..67ee4ad 100644
--- a/test_suites/control.usb_detect_stress
+++ b/test_suites/control.usb_detect_stress
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "usb_detect_stress"
 PURPOSE = "USB peripherals detection during stress procedure."
 
diff --git a/test_suites/control.wifi_matfunc_ax b/test_suites/control.wifi_matfunc_ax
new file mode 100644
index 0000000..3704e9d
--- /dev/null
+++ b/test_suites/control.wifi_matfunc_ax
@@ -0,0 +1,32 @@
+AUTHOR = "chromeos-kernel-wifi@google.com"
+NAME = "wifi_matfunc_ax"
+PURPOSE = "Run WiFi 802.11ax tests with APs that support wifi6 & wifi6e."
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This is an example of a dynamic test suite.
+
+@param build: The name of the image to test.
+          Ex: octopus-release/R99-14469.12.0
+@param board: The board to test on. Ex: octopus
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+@param file_bugs: If True your suite will file bugs on failures.
+@param max_run_time: Amount of time each test shoud run in minutes.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['add_experimental'] = True
+args_dict['max_runtime_mins'] = 60
+args_dict['name'] = NAME
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.wifi_update_router b/test_suites/control.wifi_update_router
deleted file mode 100644
index d254cc4..0000000
--- a/test_suites/control.wifi_update_router
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "krisr@chromium.org"
-NAME = "wifi_update_router"
-PURPOSE = "Update the wificell routers."
-
-TIME = "SHORT"
-TEST_CATEGORY = "General"
-TEST_CLASS = "suite"
-TEST_TYPE = "Server"
-
-DOC = """
-This is an example of a dynamic test suite.
-
-@param build: The name of the image to test.
-          Ex: x86-mario-release/R17-1412.33.0-a1-b29
-@param board: The board to test on. Ex: x86-mario
-@param pool: The pool of machines to utilize for scheduling. If pool=None
-             board is used.
-@param check_hosts: require appropriate live hosts to exist in the lab.
-@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
-@param file_bugs: If True your suite will file bugs on failures.
-@param max_run_time: Amount of time each test shoud run in minutes.
-"""
-
-import common
-from autotest_lib.server.cros.dynamic_suite import dynamic_suite
-
-args_dict['add_experimental'] = True
-args_dict['max_runtime_mins'] = 60
-args_dict['name'] = NAME
-args_dict['job'] = job
-
-dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.wificell_dut_validation b/test_suites/control.wificell_dut_validation
new file mode 100644
index 0000000..3727ee6
--- /dev/null
+++ b/test_suites/control.wificell_dut_validation
@@ -0,0 +1,36 @@
+# Copyright (c) 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = "ChromeOS Team"
+NAME = "wificell_dut_validation"
+PURPOSE = "Group of tests run by lab team to validate AP, PCAP, BT-Peers & DUT during deployment."
+
+TIME = "SHORT"
+TEST_CATEGORY = "General"
+TEST_CLASS = "suite"
+TEST_TYPE = "Server"
+
+DOC = """
+This is an example of a dynamic test suite.
+
+@param build: The name of the image to test.
+          Ex: octopus-release/R99-14469.12.0
+@param board: The board to test on. Ex: octopus
+@param pool: The pool of machines to utilize for scheduling. If pool=None
+             board is used.
+@param check_hosts: require appropriate live hosts to exist in the lab.
+@param SKIP_IMAGE: (optional) If present and True, don't re-image devices.
+@param file_bugs: If True your suite will file bugs on failures.
+@param max_run_time: Amount of time each test shoud run in minutes.
+"""
+
+import common
+from autotest_lib.server.cros.dynamic_suite import dynamic_suite
+
+args_dict['add_experimental'] = True
+args_dict['max_runtime_mins'] = 60
+args_dict['name'] = NAME
+args_dict['job'] = job
+
+dynamic_suite.reimage_and_run(**args_dict)
diff --git a/test_suites/control.wifichaos b/test_suites/control.wifichaos
index e5f0151..d1d3973 100644
--- a/test_suites/control.wifichaos
+++ b/test_suites/control.wifichaos
@@ -2,7 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-AUTHOR = "Chrome OS Team"
+AUTHOR = "ChromeOS Team"
 NAME = "wifichaos"
 TIME = "SHORT"
 TEST_CATEGORY = "General"
diff --git a/tko/OWNERS b/tko/OWNERS
index ab758b3..f79a524 100644
--- a/tko/OWNERS
+++ b/tko/OWNERS
@@ -1 +1,2 @@
 include /INFRA_OWNERS
+include /HARNESS_OWNERS
\ No newline at end of file
diff --git a/tko/compose_query.cgi b/tko/compose_query.cgi
deleted file mode 100755
index 4752eea..0000000
--- a/tko/compose_query.cgi
+++ /dev/null
@@ -1,378 +0,0 @@
-#!/usr/bin/python2
-
-"""
-Selects all rows and columns that satisfy the condition specified
-and draws the matrix. There is a seperate SQL query made for every (x,y)
-in the matrix.
-"""
-
-print "Content-type: text/html\n"
-
-import sys, os, urllib, cgi, cgitb, re, datetime, time
-
-total_wall_time_start = time.time()
-
-import common
-from autotest_lib.tko import display, frontend, db, query_lib
-from autotest_lib.client.common_lib import kernel_versions
-
-html_header = """\
-<form action="/tko/compose_query.cgi" method="get">
-<table border="0">
-<tr>
-  <td>Column: </td>
-  <td>Row: </td>
-  <td>Condition: </td>
-  <td align="center">
-  <a href="http://autotest.kernel.org/wiki/AutotestTKOCondition">Help</a>
-  </td>
-</tr>
-<tr>
-  <td>
-  <SELECT NAME="columns">
-  %s
- </SELECT>
-  </td>
-  <td>
-  <SELECT NAME="rows">
-  %s
-  </SELECT>
-  </td>
-  <td>
-    <input type="text" name="condition" size="30" value="%s">
-    <input type="hidden" name="title" value="%s">
-  </td>
-  <td align="center"><input type="submit" value="Submit">
-  </td>
-</tr>
-</table>
-</form>
-<form action="/tko/save_query.cgi" method="get">
-<table border="0">
-<tr>
- <td>Name your query:&nbsp;&nbsp;</td>
-  <td>
-    <input type="text" name="label" size="15" value="">
-  </td>
-  <td align="center">&nbsp;<input type="submit" value="Save Query">
-  </td>
- <td>&nbsp;&nbsp;<a href="/tko/query_history.cgi">View saved queries</a></td>
-  <td>
-    <input type="hidden" name="columns" value="%s">
-    <input type="hidden" name="rows" value="%s">
-    <input type="hidden" name="condition" value="%s">
-  </td>
-</tr>
-</table>
-</form>
-"""
-
-
-next_field = {
-    'machine_group': 'hostname',
-    'hostname': 'tag',
-    'tag': 'tag',
-
-    'kernel': 'test',
-    'test': 'label',
-    'label': 'tag',
-
-    'reason': 'tag',
-    'user': 'tag',
-    'status': 'tag',
-   
-    'time': 'tag',
-    'time_daily': 'time',
-}
-
-
-def parse_field(form, form_field, field_default):
-    if not form_field in form:
-        return field_default
-    field_input = form[form_field].value.lower()
-    if field_input and field_input in frontend.test_view_field_dict:
-        return field_input
-    return field_default
-
-
-def parse_condition(form, form_field, field_default):
-    if not form_field in form:
-        return field_default
-    return form[form_field].value
-
-
-form = cgi.FieldStorage()
-
-title_field = parse_condition(form, 'title', '')
-row = parse_field(form, 'rows', 'kernel')
-column = parse_field(form, 'columns', 'machine_group')
-condition_field = parse_condition(form, 'condition', '')
-
-if 'brief' in form.keys() and form['brief'].value <> '0':
-    display.set_brief_mode()
-
-## caller can specify rows and columns that shall be included into the report
-## regardless of whether actual test data is available yet
-force_row_field = parse_condition(form,'force_row','')
-force_column_field = parse_condition(form,'force_column','')
-
-
-def split_forced_fields(force_field):
-    if force_field:
-        return force_field.split()
-    else:
-        return []
-
-force_row =  split_forced_fields(force_row_field)
-force_column =  split_forced_fields(force_column_field)
-  
-cgitb.enable()
-db_obj = db.db()
-
-
-def construct_link(x, y):
-    next_row = row
-    next_column = column
-    condition_list = []
-    if condition_field != '':
-        condition_list.append(condition_field)
-    if y:
-        next_row = next_field[row]
-        condition_list.append("%s='%s'" % (row, y))
-    if x:
-        next_column = next_field[column]
-        condition_list.append("%s='%s'" % (column, x))
-    next_condition = '&'.join(condition_list)
-    link = '/tko/compose_query.cgi?' + urllib.urlencode({'columns': next_column,
-               'rows': next_row, 'condition': next_condition,
-               'title': title_field})
-    return link
-
-
-def construct_logs_link(x, y, job_tag):
-    job_path = frontend.html_root + job_tag + '/'
-    test = ''
-    if (row == 'test' and
-        not y.split('.')[0] in ('boot', 'build', 'install')):
-        test = y
-    if (column == 'test' and
-        not x.split('.')[0] in ('boot', 'build', 'install')):
-        test = x
-    return '/tko/retrieve_logs.cgi?' + urllib.urlencode({'job' : job_path,
-         'test' : test})
-
-
-def create_select_options(selected_val):
-    ret = ""
-    for option in sorted(frontend.test_view_field_dict.keys()):
-        if selected_val == option:
-            selected = " SELECTED"
-        else:
-            selected = ""
-
-        ret += '<OPTION VALUE="%s"%s>%s</OPTION>\n' % \
-                        (option, selected, option)
-    return ret
-
-
-def map_kernel_base(kernel_name):
-    ## insert <br> after each / in kernel name
-    ## but spare consequtive //
-    kernel_name = kernel_name.replace('/','/<br>')
-    kernel_name = kernel_name.replace('/<br>/<br>','//')
-    return kernel_name
-
-
-def header_tuneup(field_name, header):
-        ## header tune up depends on particular field name and may include:
-        ## - breaking header into several strings if it is long url
-        ## - creating date from datetime stamp
-        ## - possibly, expect more various refinements for different fields
-        if field_name == 'kernel':
-                return  map_kernel_base(header)
-        else:
-                return header
-
-
-# Kernel name mappings -- the kernels table 'printable' field is
-# effectively a sortable identifier for the kernel It encodes the base
-# release which is used for overall sorting, plus where patches are
-# applied it adds an increasing pNNN patch combination identifier
-# (actually the kernel_idx for the entry).  This allows sorting
-# as normal by the base kernel version and then sub-sorting by the
-# "first time we saw" a patch combination which should keep them in
-# approximatly date order.  This patch identifier is not suitable
-# for display, so we have to map it to a suitable html fragment for
-# display.  This contains the kernel base version plus the truncated
-# names of all the patches,
-#
-#     2.6.24-mm1 p112
-#     +add-new-string-functions-
-#     +x86-amd-thermal-interrupt
-# 
-# This mapping is produced when the first mapping is request, with
-# a single query over the patches table; the result is then cached.
-#
-# Note: that we only count a base version as patched if it contains
-# patches which are not already "expressed" in the base version.
-# This includes both -gitN and -mmN kernels.
-map_kernel_map = None
-
-
-def map_kernel_init():
-    fields = ['base', 'k.kernel_idx', 'name', 'url']
-    map = {}
-    for (base, idx, name, url) in db_obj.select(','.join(fields),
-            'tko_kernels k, tko_patches p', 'k.kernel_idx=p.kernel_idx'):
-        match = re.match(r'.*(-mm[0-9]+|-git[0-9]+)\.(bz2|gz)$', url)
-        if match:
-            continue
-
-        key = base + ' p%d' % (idx)
-        if not map.has_key(key):
-            map[key] = map_kernel_base(base) + ' p%d' % (idx)
-        map[key] += ('<br>+<span title="' + name + '">' +
-                 name[0:25] + '</span>')
-
-    return map
-
-
-def map_kernel(name):
-    global map_kernel_map
-    if map_kernel_map is None:
-        map_kernel_map = map_kernel_init()
-
-    if map_kernel_map.has_key(name):
-        return map_kernel_map[name]
-
-    return map_kernel_base(name.split(' ')[0])
-
-
-field_map = {
-    'kernel':map_kernel
-}
-
-sql_wall_time = 0
-
-def gen_matrix():
-    where = None
-    if condition_field.strip() != '':
-        try:
-            where = query_lib.parse_scrub_and_gen_condition(
-                condition_field, frontend.test_view_field_dict)
-            print "<!-- where clause: %s -->" % (where,)
-        except:
-            msg = "Unspecified error when parsing condition"
-            return [[display.box(msg)]]
-
-    wall_time_start = time.time()
-    try:
-        ## Unfortunately, we can not request reasons of failure always
-        ## because it may result in an inflated size of data transfer
-        ## (at the moment we fetch 500 bytes of reason descriptions into
-        ## each cell )
-        ## If 'status' in [row,column] then either width or height
-        ## of the table <=7, hence table is not really 2D, and
-        ## query_reason is relatively save.
-        ## At the same time view when either rows or columns grouped
-        ## by status is when users need reasons of failures the most.
-        
-        ## TO DO: implement [Show/Hide reasons] button or link in
-        ## all views and make thorough performance testing 
-        test_data = frontend.get_matrix_data(db_obj, column, row, where,
-                query_reasons = ('status' in [row,column])
-                )
-        global sql_wall_time
-        sql_wall_time = time.time() - wall_time_start
-
-    except db.MySQLTooManyRows, error:
-        return [[display.box(str(error))]]            
-    
-    for f_row in force_row:
-        if not f_row in test_data.y_values:
-            test_data.y_values.append(f_row)
-    for f_column in force_column:
-        if not f_column in test_data.x_values:
-            test_data.x_values.append(f_column)
-
-    if not test_data.y_values:
-        msg = "There are no results for this query (yet?)."
-        return [[display.box(msg)]]
-
-    dict_url = {'columns': row,
-               'rows': column, 'condition': condition_field,
-               'title': title_field}
-    link = '/tko/compose_query.cgi?' + urllib.urlencode(dict_url)
-    header_row = [display.box("<center>(Flip Axis)</center>", link=link)]
-
-    for x in test_data.x_values:
-        dx = x
-        if field_map.has_key(column):
-            dx = field_map[column](x)
-        x_header = header_tuneup(column, dx)
-        link = construct_link(x, None)
-        header_row.append(display.box(x_header,header=True,link=link))
-
-    matrix = [header_row]
-    # For each row, we are looping horizontally over the columns.
-    for y in test_data.y_values:
-        dy = y
-        if field_map.has_key(row):
-            dy = field_map[row](y)
-        y_header = header_tuneup(row, dy)
-        link = construct_link(None, y)
-        cur_row = [display.box(y_header, header=True, link=link)]
-        for x in test_data.x_values:
-            ## next 2 lines: temporary, until non timestamped
-            ## records are in the database
-            if x==datetime.datetime(1970,1,1): x = None
-            if y==datetime.datetime(1970,1,1): y = None
-            try:
-                box_data = test_data.data[x][y]
-            except:
-                cur_row.append(display.box(None, None, 
-                           row_label=y, column_label=x))
-                continue
-            job_tag = test_data.data[x][y].job_tag
-            if job_tag:
-                link = construct_logs_link(x, y, job_tag)
-            else:
-                link = construct_link(x, y)
-
-            apnd = display.status_precounted_box(db_obj, box_data,
-                                 link, y, x)
-            cur_row.append(apnd)
-        matrix.append(cur_row)
-    return matrix
-
-
-def main():
-    if display.is_brief_mode():
-        ## create main grid table only as provided by gen_matrix()
-        display.print_table(gen_matrix())
-    else:
-        # create the actual page 
-        print '<html><head><title>'
-        print 'Filtered Autotest Results'
-        print '</title></head><body>'
-        display.print_main_header()
-        print html_header % (create_select_options(column),
-                     create_select_options(row),
-                     condition_field, title_field,
-                     ## history form
-                     column,row,condition_field)
-        if title_field:
-            print '<h1> %s </h1>' % (title_field)
-        print display.color_keys_row()
-        display.print_table(gen_matrix())
-        print display.color_keys_row()
-        total_wall_time = time.time() - total_wall_time_start
-        
-        perf_info = '<p style="font-size:x-small;">'
-        perf_info += 'sql access wall time = %s secs,' % sql_wall_time
-        perf_info += 'total wall time = %s secs</p>' % total_wall_time
-        print perf_info
-        print '</body></html>'
-
-
-main()
diff --git a/tko/db.py b/tko/db.py
index 6d34c53..aa16ca5 100644
--- a/tko/db.py
+++ b/tko/db.py
@@ -1,7 +1,11 @@
+# Lint as: python2, python3
 # Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+from __future__ import division
+from __future__ import print_function
+
 try:
     import MySQLdb as driver
 except ImportError:
@@ -28,9 +32,10 @@
 from autotest_lib.client.common_lib import utils
 from autotest_lib.client.common_lib.cros import retry
 from autotest_lib.frontend import database_settings_helper
+import six
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = utils.metrics_mock
 
@@ -40,7 +45,7 @@
 
     @param msg: Message string
     """
-    print >> sys.stderr, msg
+    print(msg, file=sys.stderr)
     sys.stderr.flush()  # we want these msgs to show up immediately
 
 
@@ -144,7 +149,7 @@
         # we have database routers in place any django settings will apply to
         # both tko and afe.
         # The intended use of this port is to allow a testing shard vm to
-        # update the master vm's database with test results. Specifying
+        # update the vm's database with test results. Specifying
         # and empty string will fallback to not even specifying the port
         # to the backend in tko/db.py. Unfortunately this means retries
         # won't work on the test cluster till we've migrated to routers.
@@ -207,7 +212,7 @@
         while not success:
             try:
                 result = function(*args, **dargs)
-            except driver.OperationalError, e:
+            except driver.OperationalError as e:
                 _log_error("%s; retrying, don't panic yet"
                            % _format_operational_error(e))
                 stop_time = time.time()
@@ -218,7 +223,7 @@
                     try:
                         self._random_delay()
                         self._init_db()
-                    except driver.OperationalError, e:
+                    except driver.OperationalError as e:
                         _log_error('%s; panic now'
                                    % _format_operational_error(e))
             else:
@@ -274,7 +279,7 @@
         if isinstance(where, dict):
             # key/value pairs (which should be equal, or None for null)
             keys, values = [], []
-            for field, value in where.iteritems():
+            for field, value in six.iteritems(where):
                 quoted_field = self._quote(field)
                 if value is None:
                     keys.append(quoted_field + ' is null')
@@ -282,7 +287,7 @@
                     keys.append(quoted_field + '=%s')
                     values.append(value)
             where_clause = ' and '.join(keys)
-        elif isinstance(where, basestring):
+        elif isinstance(where, six.string_types):
             # the exact string
             where_clause = where
             values = []
@@ -401,7 +406,7 @@
         @param data: The insert data.
         @param commit: If commit the transaction .
         """
-        fields = data.keys()
+        fields = list(data.keys())
         refs = ['%s' for field in fields]
         values = [data[field] for field in fields]
         cmd = ('insert into %s (%s) values (%s)' %
@@ -445,7 +450,7 @@
         if commit is None:
             commit = self.autocommit
         cmd = 'update %s ' % table
-        fields = data.keys()
+        fields = list(data.keys())
         data_refs = [self._quote(field) + '=%s' for field in fields]
         data_values = [data[field] for field in fields]
         cmd += ' set ' + ', '.join(data_refs)
@@ -556,7 +561,7 @@
         @param job: The job object.
         @param commit: If commit the transaction .
         """
-        for key, value in job.keyval_dict.iteritems():
+        for key, value in six.iteritems(job.keyval_dict):
             where = {'job_id': job.job_idx, 'key': key}
             data = dict(where, value=value)
             exists = self.select('id', 'tko_job_keyvals', where=where)
@@ -599,12 +604,12 @@
 
         for i in test.iterations:
             data['iteration'] = i.index
-            for key, value in i.attr_keyval.iteritems():
+            for key, value in six.iteritems(i.attr_keyval):
                 data['attribute'] = key
                 data['value'] = value
                 self.insert('tko_iteration_attributes', data,
                             commit=commit)
-            for key, value in i.perf_keyval.iteritems():
+            for key, value in six.iteritems(i.perf_keyval):
                 data['attribute'] = key
                 if math.isnan(value) or math.isinf(value):
                     data['value'] = None
@@ -615,7 +620,7 @@
 
         data = {'test_idx': test_idx}
 
-        for key, value in test.attributes.iteritems():
+        for key, value in six.iteritems(test.attributes):
             data = {'test_idx': test_idx, 'attribute': key,
                     'value': value}
             try:
@@ -787,7 +792,7 @@
         @param patch: The kernel patch object.
         @param commit: If commit the transaction .
         """
-        print patch.reference
+        print(patch.reference)
         name = os.path.basename(patch.reference)[:80]
         self.insert('tko_patches',
                     {'kernel_idx': kver,
diff --git a/tko/db_unittest.py b/tko/db_unittest.py
index deda51d..c5c4312 100755
--- a/tko/db_unittest.py
+++ b/tko/db_unittest.py
@@ -1,9 +1,9 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 import sys
 import unittest
 
-from cStringIO import StringIO
+from six import StringIO
 
 import common
 from autotest_lib.tko import db
diff --git a/tko/delete_job_results b/tko/delete_job_results
index 8d301bf..03af00f 100755
--- a/tko/delete_job_results
+++ b/tko/delete_job_results
@@ -1,4 +1,7 @@
 #!/usr/bin/python2
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
 import os, sys, shutil
 thisdir = os.path.dirname(os.path.abspath(sys.argv[0]))
 sys.path.insert(0, os.path.abspath(os.path.join(thisdir, '../tko')))
@@ -7,7 +10,7 @@
 usage = "usage: delete_job_results <job tag>"
 
 if len(sys.argv) < 2:
-    print usage
+    print(usage)
     sys.exit(2)
 tag = sys.argv[1]
 resultsdir = os.path.abspath(os.path.join(thisdir, '../results', tag))
diff --git a/tko/display.py b/tko/display.py
index 623596a..6b10b8f 100644
--- a/tko/display.py
+++ b/tko/display.py
@@ -1,3 +1,7 @@
+# Lint as: python2, python3
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
 import re
 import reason_qualifier
 
@@ -44,7 +48,7 @@
     ### This function does not require maintenance in case of
     ### color_map augmenting - as long as
     ### color keys for box shading have names that end with 'pct'
-    keys = filter(lambda key: key.endswith('pct'), color_map.keys())
+    keys = [key for key in color_map.keys() if key.endswith('pct')]
     def num_pct(key):
         return int(key.replace('pct',''))
     keys.sort(key=num_pct)
@@ -53,7 +57,7 @@
         html+= "\t\t\t<td bgcolor =%s>&nbsp;&nbsp;&nbsp;</td>\n"\
                         % color_map[key]
         hint = key.replace('pct',' %')
-        if hint[0]<>'0': ## anything but 0 %
+        if hint[0]!='0': ## anything but 0 %
             hint = 'to ' + hint
         html+= "\t\t\t<td> %s </td>\n" % hint
 
@@ -114,7 +118,7 @@
             self.data = calculate_html(link, data, tooltip,
                                        row_label, column_label)
 
-        if color_map.has_key(color_key):
+        if color_key in color_map:
             self.color = color_map[color_key]
         elif header:
             self.color = color_map['header']
@@ -278,15 +282,15 @@
     Display the given matrix of data as a table.
     """
 
-    print ('<table bgcolor="%s" cellspacing="1" cellpadding="5" '
+    print(('<table bgcolor="%s" cellspacing="1" cellpadding="5" '
            'style="margin-right: 200px;">') % (
-           color_map['borders'])
+           color_map['borders']))
     for row in matrix:
-        print '<tr>'
+        print('<tr>')
         for element in row:
-            print element.html()
-        print '</tr>'
-    print '</table>'
+            print(element.html())
+        print('</tr>')
+    print('</table>')
 
 
 def print_main_header():
@@ -312,13 +316,13 @@
 text-align: left
 }
 """
-    print '<head><style type="text/css">'
-    print 'a { text-decoration: none }'
-    print hover_css
-    print '</style></head>'
-    print '<h2>'
-    print '<a href="compose_query.cgi">Functional</a>'
-    print '&nbsp&nbsp&nbsp'
+    print('<head><style type="text/css">')
+    print('a { text-decoration: none }')
+    print(hover_css)
+    print('</style></head>')
+    print('<h2>')
+    print('<a href="compose_query.cgi">Functional</a>')
+    print('&nbsp&nbsp&nbsp')
 
 
 def group_name(group):
diff --git a/tko/frontend.py b/tko/frontend.py
index 9033c20..adea771 100644
--- a/tko/frontend.py
+++ b/tko/frontend.py
@@ -1,9 +1,14 @@
+# Lint as: python2, python3
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
 import os, re, db, sys, datetime
 import common
 from autotest_lib.client.common_lib import kernel_versions
+from six.moves import map
 
-MAX_RECORDS = 50000L
-MAX_CELLS = 500000L
+MAX_RECORDS = 50000
+MAX_CELLS = 500000
 
 tko = os.path.dirname(os.path.realpath(os.path.abspath(__file__)))
 root_url_file = os.path.join(tko, '.root_url')
@@ -51,9 +56,9 @@
             else:
                 (x,y, status, count, job_tags) = row
                 reasons = None
-            if not data.has_key(x):
+            if x not in data:
                 data[x] = {}
-            if not data[x].has_key(y):
+            if y not in data[x]:
                 y_values.add(y)
                 data[x][y] = status_cell()
             data[x][y].add(status, count, job_tags, reasons)
@@ -61,7 +66,7 @@
         # 2-d hash of data - [x-value][y-value]
         self.data = data
         # List of possible columns (x-values)
-        self.x_values = smart_sort(data.keys(), x_field)
+        self.x_values = smart_sort(list(data.keys()), x_field)
         # List of rows columns (y-values)
         self.y_values = smart_sort(list(y_values), y_field)
         nCells = len(self.y_values)*len(self.x_values)
@@ -122,14 +127,14 @@
                 return datetime.datetime(1970, 1, 1, 0, 0, 0)
             else:
                 return date_time
-        list = map(convert_None_to_datetime, list)
+        list = list(map(convert_None_to_datetime, list))
     elif field == 'DATE(test_finished_time)':
         def convert_None_to_date(date):
             if not date:
                 return datetime.date(1970, 1, 1)
             else:
                 return date
-        list = map(convert_None_to_date, list)
+        list = list(map(convert_None_to_date, list))
     list.sort()
     return list
 
@@ -249,7 +254,7 @@
             # A dictionary - dict{key} = [value1, value2, ....]
             where = {'test_idx' : self.idx}
             for i in iteration.select(self.db, where):
-                if self.__iterations.has_key(i.key):
+                if i.key in self.__iterations:
                     self.__iterations[i.key].append(i.value)
                 else:
                     self.__iterations[i.key] = [i.value]
diff --git a/tko/job_serializer.py b/tko/job_serializer.py
index 1d975f8..c121dd9 100755
--- a/tko/job_serializer.py
+++ b/tko/job_serializer.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 """A script that provides convertion between models.job and a protocol
 buffer object.
@@ -10,6 +10,9 @@
 """
 
 # import python libraries
+from __future__ import division
+from __future__ import print_function
+
 import datetime
 import time
 import logging
@@ -18,6 +21,7 @@
 from autotest_lib.tko import models
 from autotest_lib.tko import tko_pb2
 from autotest_lib.tko import utils
+import six
 
 __author__ = 'darrenkuo@google.com (Darren Kuo)'
 
@@ -93,7 +97,7 @@
         protocol buffer.
 
         The method takes a tko job object and constructs a protocol
-        buffer job object. Then invokes the native serializing
+        buffer job object. Then invokes the built-in serializing
         function on the object to get a binary string. The string is
         then written to outfile.
 
@@ -194,7 +198,7 @@
             newtest = pb_job.tests.add()
             self.set_pb_test(test, newtest)
 
-        for key, val in tko_job.keyval_dict.iteritems():
+        for key, val in six.iteritems(tko_job.keyval_dict):
             newkeyval = pb_job.keyval_dict.add()
             newkeyval.name = key
             newkeyval.value = str(val)
@@ -269,7 +273,7 @@
             pb_iteration = pb_test.iterations.add()
             self.set_pb_iteration(current_iteration, pb_iteration)
 
-        for key, val in tko_test.attributes.iteritems():
+        for key, val in six.iteritems(tko_test.attributes):
             newkeyval = pb_test.attributes.add()
             newkeyval.name = key
             newkeyval.value = str(val)
@@ -354,12 +358,12 @@
         self.set_trivial_attr(tko_iteration, pb_iteration,
                               self.iteration_type_dict)
 
-        for key, val in tko_iteration.attr_keyval.iteritems():
+        for key, val in six.iteritems(tko_iteration.attr_keyval):
             newkeyval = pb_iteration.attr_keyval.add()
             newkeyval.name = key
             newkeyval.value = str(val)
 
-        for key, val in tko_iteration.perf_keyval.iteritems():
+        for key, val in six.iteritems(tko_iteration.perf_keyval):
             newkeyval = pb_iteration.perf_keyval.add()
             newkeyval.name = key
             newkeyval.value = str(val)
@@ -382,7 +386,9 @@
         resultdict = {}
         for field, field_type in objdict.items():
             value = getattr(obj, field)
-            if field_type in (str, int, long):
+            # six.integer_types is a tuple, so we can't check
+            # "if field_type in (str, six.integer_types)"
+            if field_type == str or field_type in six.integer_types:
                 resultdict[field] = field_type(value)
             elif field_type == datetime:
                 resultdict[field] = (
@@ -406,14 +412,17 @@
         are working with.
 
         """
-        for attr, attr_type in objdict.iteritems():
+        for attr, attr_type in six.iteritems(objdict):
             if attr_type == datetime:
                 t = getattr(tko_obj, attr)
                 if not t:
                     self.set_attr_safely(pb_obj, attr, t, int)
                 else:
                     t = mktime(t.timetuple()) + 1e-6 * t.microsecond
-                    setattr(pb_obj, attr, long(t*1000))
+                    if six.PY2:
+                        setattr(pb_obj, attr, long(t*1000))
+                    else:
+                        setattr(pb_obj, attr, int(t*1000))
             else:
                 value = getattr(tko_obj, attr)
                 self.set_attr_safely(pb_obj, attr, value, attr_type)
@@ -434,8 +443,8 @@
         vartype: the expected type of the attr
 
         """
-
-        supported_types = [int, long, str]
+        # In py2, there is int and long, in py3 its only int.
+        supported_types = six.integer_types + (str,)
         if vartype in supported_types:
             if value is None:
                 value = vartype()
diff --git a/tko/job_serializer_unittest.py b/tko/job_serializer_unittest.py
index ddbb5b5..6c0a909 100755
--- a/tko/job_serializer_unittest.py
+++ b/tko/job_serializer_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 """Unittests for the JobSerializer class.
 
@@ -6,6 +6,9 @@
 
 """
 
+from __future__ import division
+from __future__ import print_function
+
 import datetime
 import tempfile
 import time
@@ -15,6 +18,8 @@
 from autotest_lib.tko import tko_pb2
 from autotest_lib.tko import job_serializer
 from autotest_lib.tko import models
+import six
+from six.moves import zip
 
 NamedTemporaryFile = tempfile.NamedTemporaryFile
 datetime = datetime.datetime
@@ -222,7 +227,10 @@
         @param stime: The original time.
         """
         t = mktime(dTime.timetuple()) + 1e-6 * dTime.microsecond
-        self.assertEqual(long(t), stime/1000)
+        if six.PY2:
+            self.assertEqual(long(t), stime/1000)
+        else:
+            self.assertEqual(int(t), stime/1000)
 
 
     def check_iteration(self, tko_iterations, pb_iterations):
@@ -264,7 +272,7 @@
         @param dictionary: The dict object.
         @param keyval: The keyval object.
         """
-        for key, value in dictionary.iteritems():
+        for key, value in six.iteritems(dictionary):
             self.assertTrue(key in keyval);
             self.assertEqual(str(value), keyval[key])
 
@@ -367,11 +375,18 @@
         """
 
         t = mktime(dTime.timetuple()) + 1e-6 * dTime.microsecond
-        if isinstance(sTime, (int, long)):
-            self.assertEqual(long(t*1000), sTime)
+        if isinstance(sTime, six.integer_types):
+            if six.PY2:
+                self.assertEqual(long(t*1000), sTime)
+            else:
+                self.assertEqual(int(t*1000), sTime)
+
         else:
             t1 = mktime(sTime.timetuple()) + 1e-6 * sTime.microsecond
-            self.assertEqual(long(t*1000), long(t1*1000))
+            if six.PY2:
+                self.assertEqual(long(t*1000), long(t1*1000))
+            else:
+                self.assertEqual(int(t*1000), int(t1*1000))
 
 
     def check_iteration(self, iterations, newiterations):
diff --git a/tko/jsonp_fetcher.cgi b/tko/jsonp_fetcher.cgi
deleted file mode 100755
index 68267c5..0000000
--- a/tko/jsonp_fetcher.cgi
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/python2
-
-import cgi, traceback, urllib2
-import common
-from autotest_lib.frontend.afe.json_rpc import serviceHandler
-
-script = """\
-Content-Type: text/javascript
-
-%(callback)s(%(result)s);
-"""
-
-class LogFileNotFound(Exception):
-    pass
-
-form = cgi.FieldStorage(keep_blank_values=True)
-encoded_request = form['request'].value
-callback = form['callback'].value
-
-request = serviceHandler.ServiceHandler.translateRequest(encoded_request)
-parameters = request['params'][0]
-path = parameters['path']
-
-result_dict = serviceHandler.ServiceHandler.blank_result_dict()
-try:
-    file_contents = urllib2.urlopen('http://localhost' + path).read()
-    result_dict['result'] = file_contents
-except urllib2.HTTPError:
-    result_dict['err'] = LogFileNotFound('%s not found' % path)
-    result_dict['err_traceback'] = traceback.format_exc()
-
-encoded_result = serviceHandler.ServiceHandler.translateResult(result_dict)
-print script % dict(callback=callback, result=encoded_result)
diff --git a/tko/migrations/031_rename_tko_tables.py b/tko/migrations/031_rename_tko_tables.py
index 127ef43..8ec6ec9 100644
--- a/tko/migrations/031_rename_tko_tables.py
+++ b/tko/migrations/031_rename_tko_tables.py
@@ -1,5 +1,10 @@
+# Lint as: python2, python3
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
 import common
 from autotest_lib.database import db_utils
+import six
 
 
 RECREATE_VIEWS_UP = """
@@ -362,7 +367,7 @@
         'perf_view_2',
         )
 
-RENAMES_DOWN = dict((value, key) for key, value in RENAMES_UP.iteritems())
+RENAMES_DOWN = dict((value, key) for key, value in six.iteritems(RENAMES_UP))
 VIEWS_TO_DROP_DOWN = ['tko_' + view for view in VIEWS_TO_DROP_UP]
 
 
diff --git a/tko/models.py b/tko/models.py
index 053fdc0..b4129f8 100644
--- a/tko/models.py
+++ b/tko/models.py
@@ -1,3 +1,7 @@
+# Lint as: python2, python3
+from __future__ import division
+from __future__ import print_function
+
 import json
 import os
 
@@ -5,6 +9,7 @@
 from autotest_lib.client.common_lib import utils
 from autotest_lib.tko import tast
 from autotest_lib.tko import utils as tko_utils
+import six
 
 
 class HostKeyvalError(Exception):
@@ -190,7 +195,7 @@
         # Grab test+host attributes from the host keyval.
         host_keyval = cls.parse_host_keyval(job.dir, job.machine)
         attributes.update(dict(('host-%s' % k, v)
-                               for k, v in host_keyval.iteritems()))
+                               for k, v in six.iteritems(host_keyval)))
 
         if existing_instance:
             def constructor(*args, **dargs):
@@ -352,7 +357,11 @@
     # TODO(ayatane): Investigate if urllib.quote is better.
     label_string = ','.join(label.replace(':', '%3A')
                             for label in hostinfo.labels)
-    return {'labels': label_string, 'platform': hostinfo.model}
+    return {
+            'labels': label_string,
+            'platform': hostinfo.model,
+            'board': hostinfo.board
+    }
 
 
 class patch(object):
@@ -406,7 +415,9 @@
         iterations = []
         index = 1
         attr, perf = {}, {}
-        for line in file(keyval_path):
+        with open(keyval_path, 'r') as kp:
+            lines = kp.readlines()
+        for line in lines:
             line = line.strip()
             if line:
                 cls.parse_line_into_dicts(line, attr, perf)
diff --git a/tko/parse b/tko/parse
index 5e92766..aff6f74 100755
--- a/tko/parse
+++ b/tko/parse
@@ -1,4 +1,4 @@
-#!/usr/bin/python2 -u
+#!/usr/bin/python3 -u
 
 import common
 from autotest_lib.tko import parse
diff --git a/tko/parse.py b/tko/parse.py
index 3d88b26..da1d408 100755
--- a/tko/parse.py
+++ b/tko/parse.py
@@ -1,4 +1,8 @@
-#!/usr/bin/python2 -u
+#!/usr/bin/python3 -u
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
 
 import collections
 import errno
@@ -7,7 +11,6 @@
 import optparse
 import os
 import socket
-import subprocess
 import sys
 import time
 import traceback
@@ -24,14 +27,14 @@
 from autotest_lib.frontend.tko import models as tko_models
 from autotest_lib.server import site_utils
 from autotest_lib.server.cros.dynamic_suite import constants
-from autotest_lib.site_utils.sponge_lib import sponge_utils
 from autotest_lib.tko import db as tko_db, utils as tko_utils
 from autotest_lib.tko import models, parser_lib
 from autotest_lib.tko.perf_upload import perf_uploader
 from autotest_lib.utils.side_effects import config_loader
+import six
 
 try:
-    from chromite.lib import metrics
+    from autotest_lib.utils.frozen_chromite.lib import metrics
 except ImportError:
     metrics = utils.metrics_mock
 
@@ -50,6 +53,9 @@
         'control.from_control_name',
 )
 
+# Max size for the parser is 350mb due to large suites getting throttled.
+DEFAULT_MAX_RESULT_SIZE_KB = 350000
+
 
 def parse_args():
     """Parse args."""
@@ -96,17 +102,22 @@
                       dest="suite_report", action="store_true",
                       default=False)
     parser.add_option("--datastore-creds",
-                      help=("The path to gcloud datastore credentials file, "
+                      help=("[DEPRECATED] "
+                            "The path to gcloud datastore credentials file, "
                             "which will be used to upload suite timeline "
-                            "report to gcloud. If not specified, the one "
-                            "defined in shadow_config will be used."),
-                      dest="datastore_creds", action="store", default=None)
-    parser.add_option("--export-to-gcloud-path",
-                      help=("The path to export_to_gcloud script. Please find "
-                            "chromite path on your server. The script is under "
-                            "chromite/bin/."),
-                      dest="export_to_gcloud_path", action="store",
+                            "report to gcloud."),
+                      dest="datastore_creds",
+                      action="store",
                       default=None)
+    parser.add_option(
+            "--export-to-gcloud-path",
+            help=("[DEPRECATED] "
+                  "The path to export_to_gcloud script. Please find "
+                  "chromite path on your server. The script is under "
+                  "chromite/bin/."),
+            dest="export_to_gcloud_path",
+            action="store",
+            default=None)
     parser.add_option("--disable-perf-upload",
                       help=("Do not upload perf results to chrome perf."),
                       dest="disable_perf_upload", action="store_true",
@@ -120,26 +131,6 @@
         parser.print_help()
         sys.exit(1)
 
-    if not options.datastore_creds:
-        gcloud_creds = global_config.global_config.get_config_value(
-            'GCLOUD', 'cidb_datastore_writer_creds', default=None)
-        options.datastore_creds = (site_utils.get_creds_abspath(gcloud_creds)
-                                   if gcloud_creds else None)
-
-    if not options.export_to_gcloud_path:
-        export_script = 'chromiumos/chromite/bin/export_to_gcloud'
-        # If it is a lab server, the script is under ~chromeos-test/
-        if os.path.exists(os.path.expanduser('~chromeos-test/%s' %
-                                             export_script)):
-            path = os.path.expanduser('~chromeos-test/%s' % export_script)
-        # If it is a local workstation, it is probably under ~/
-        elif os.path.exists(os.path.expanduser('~/%s' % export_script)):
-            path = os.path.expanduser('~/%s' % export_script)
-        # If it is not found anywhere, the default will be set to None.
-        else:
-            path = None
-        options.export_to_gcloud_path = path
-
     # pass the options back
     return options, args
 
@@ -204,12 +195,12 @@
     For example, assume Job(job_idx=105) are retried by Job(job_idx=108), after
     this method is run, their tko_tests rows will look like:
     __________________________________________________________________________
-    test_idx| job_idx | test            | ... | invalid | invalidates_test_idx
-    10      | 105     | dummy_Fail.Error| ... | 1       | NULL
-    11      | 105     | dummy_Fail.Fail | ... | 1       | NULL
+    test_idx| job_idx | test              | ... | invalid | invalidates_test_idx
+    10      | 105     | example_Fail.Error| ... | 1       | NULL
+    11      | 105     | example_Fail.Fail | ... | 1       | NULL
     ...
-    20      | 108     | dummy_Fail.Error| ... | 0       | 10
-    21      | 108     | dummy_Fail.Fail | ... | 0       | 11
+    20      | 108     | example_Fail.Error| ... | 0       | 10
+    21      | 108     | example_Fail.Fail | ... | 0       | 11
     __________________________________________________________________________
     Note the invalid bits of the rows for Job(job_idx=105) are set to '1'.
     And the 'invalidates_test_idx' fields of the rows for Job(job_idx=108)
@@ -241,11 +232,11 @@
     # identifies a test run, but 'test' does not.
     # In a control file, one could run the same test with different
     # 'subdir_tag', for example,
-    #     job.run_test('dummy_Fail', tag='Error', subdir_tag='subdir_1')
-    #     job.run_test('dummy_Fail', tag='Error', subdir_tag='subdir_2')
+    #     job.run_test('example_Fail', tag='Error', subdir_tag='subdir_1')
+    #     job.run_test('example_Fail', tag='Error', subdir_tag='subdir_2')
     # In tko, we will get
-    #    (test='dummy_Fail.Error', subdir='dummy_Fail.Error.subdir_1')
-    #    (test='dummy_Fail.Error', subdir='dummy_Fail.Error.subdir_2')
+    #    (test='example_Fail.Error', subdir='example_Fail.Error.subdir_1')
+    #    (test='example_Fail.Error', subdir='example_Fail.Error.subdir_2')
     invalidated_tests = {(orig_test.test, orig_test.subdir): orig_test
                          for orig_test in orig_tests}
     for retry in retry_tests:
@@ -274,7 +265,7 @@
 
     max_result_size_KB = _max_result_size_from_control(path)
     if max_result_size_KB is None:
-        max_result_size_KB = control_data.DEFAULT_MAX_RESULT_SIZE_KB
+        max_result_size_KB = DEFAULT_MAX_RESULT_SIZE_KB
 
     try:
         result_utils.execute(path, max_result_size_KB)
@@ -297,7 +288,7 @@
         try:
             max_result_size_KB = control_data.parse_control(
                     control, raise_warnings=False).max_result_size_KB
-            if max_result_size_KB != control_data.DEFAULT_MAX_RESULT_SIZE_KB:
+            if max_result_size_KB != DEFAULT_MAX_RESULT_SIZE_KB:
                 return max_result_size_KB
         except IOError as e:
             tko_utils.dprint(
@@ -315,7 +306,7 @@
 
     @param job: database object.
     @param jobname: the job name as string.
-    @param filename: The path to the results to be parsed.
+    @param filename: the serialized binary destination path.
     """
     from autotest_lib.tko import job_serializer
 
@@ -332,19 +323,19 @@
                     e.g. '1234-chromeos-test/host1'
     @param path: The path to the results to be parsed.
     @param parse_options: _ParseOptions instance.
+
+    @return job: the parsed job object
     """
     reparse = parse_options.reparse
     mail_on_failure = parse_options.mail_on_failure
     dry_run = parse_options.dry_run
     suite_report = parse_options.suite_report
-    datastore_creds = parse_options.datastore_creds
-    export_to_gcloud_path = parse_options.export_to_gcloud_path
 
     tko_utils.dprint("\nScanning %s (%s)" % (jobname, path))
     old_job_idx = db.find_job(jobname)
     if old_job_idx is not None and not reparse:
         tko_utils.dprint("! Job is already parsed, done")
-        return
+        return None
 
     # look up the status version
     job_keyval = models.job.read_keyval(path)
@@ -356,7 +347,7 @@
     status_log_path = _find_status_log_path(path)
     if not status_log_path:
         tko_utils.dprint("! Unable to parse job, no status file")
-        return
+        return None
     _parse_status_log(parser, job, status_log_path)
 
     if old_job_idx is not None:
@@ -382,10 +373,12 @@
             job.suite = label_info.get('suite', None)
 
     if 'suite' in job.keyval_dict:
-      job.suite = job.keyval_dict['suite']
+        job.suite = job.keyval_dict['suite']
 
     result_utils_lib.LOG =  tko_utils.dprint
-    _throttle_result_size(path)
+
+    # Do not throttle results for now (b/207409280)
+    # _throttle_result_size(path)
 
     # Record test result size to job_keyvals
     start_time = time.time()
@@ -393,7 +386,7 @@
             path, log=tko_utils.dprint)
     tko_utils.dprint('Finished collecting result sizes after %s seconds' %
                      (time.time()-start_time))
-    job.keyval_dict.update(result_size_info.__dict__)
+    job.keyval_dict.update(result_size_info._asdict())
 
     # TODO(dshi): Update sizes with sponge_invocation.xml and throttle it.
 
@@ -430,11 +423,6 @@
             for test in job.tests:
                 perf_uploader.upload_test(job, test, jobname)
 
-        # Upload job details to Sponge.
-        sponge_url = sponge_utils.upload_results(job, log=tko_utils.dprint)
-        if sponge_url:
-            job.keyval_dict['sponge_url'] = sponge_url
-
         _write_job_to_db(db, jobname, job)
 
         # Verify the job data is written to the database.
@@ -476,41 +464,6 @@
     if not dry_run:
         db.commit()
 
-    # Generate a suite report.
-    # Check whether this is a suite job, a suite job will be a hostless job, its
-    # jobname will be <JOB_ID>-<USERNAME>/hostless, the suite field will not be
-    # NULL. Only generate timeline report when datastore_parent_key is given.
-    datastore_parent_key = job_keyval.get('datastore_parent_key', None)
-    provision_job_id = job_keyval.get('provision_job_id', None)
-    if (suite_report and jobname.endswith('/hostless')
-        and job.suite and datastore_parent_key):
-        tko_utils.dprint('Start dumping suite timing report...')
-        timing_log = os.path.join(path, 'suite_timing.log')
-        dump_cmd = ("%s/site_utils/dump_suite_report.py %s "
-                    "--output='%s' --debug" %
-                    (common.autotest_dir, job.afe_job_id,
-                        timing_log))
-
-        if provision_job_id is not None:
-            dump_cmd += " --provision_job_id=%d" % int(provision_job_id)
-
-        subprocess.check_output(dump_cmd, shell=True)
-        tko_utils.dprint('Successfully finish dumping suite timing report')
-
-        if (datastore_creds and export_to_gcloud_path
-            and os.path.exists(export_to_gcloud_path)):
-            upload_cmd = [export_to_gcloud_path, datastore_creds,
-                            timing_log, '--parent_key',
-                            datastore_parent_key]
-            tko_utils.dprint('Start exporting timeline report to gcloud')
-            subprocess.check_output(upload_cmd)
-            tko_utils.dprint('Successfully export timeline report to '
-                                'gcloud')
-        else:
-            tko_utils.dprint('DEBUG: skip exporting suite timeline to '
-                                'gcloud, because either gcloud creds or '
-                                'export_to_gcloud script is not found.')
-
     # Mark GS_OFFLOADER_NO_OFFLOAD in gs_offloader_instructions at the end of
     # the function, so any failure, e.g., db connection error, will stop
     # gs_offloader_instructions being updated, and logs can be uploaded for
@@ -529,6 +482,7 @@
             gs_offloader_instructions[constants.GS_OFFLOADER_NO_OFFLOAD] = True
             with open(gs_instructions_file, 'w') as f:
                 json.dump(gs_offloader_instructions, f)
+    return job
 
 
 def _write_job_to_db(db, jobname, job):
@@ -599,7 +553,7 @@
 
 
 def _delete_tests_from_db(db, tests):
-    for test_idx in tests.itervalues():
+    for test_idx in six.itervalues(tests):
         where = {'test_idx' : test_idx}
         db.delete('tko_iteration_result', where)
         db.delete('tko_iteration_perf_value', where)
@@ -617,7 +571,8 @@
     # if there's a .machines file, use it to get the subdirs
     machine_list = os.path.join(path, ".machines")
     if os.path.exists(machine_list):
-        subdirs = set(line.strip() for line in file(machine_list))
+        with open(machine_list, 'r') as ml:
+            subdirs = set(line.strip() for line in ml.readlines())
         existing_subdirs = set(subdir for subdir in subdirs
                                if os.path.exists(os.path.join(path, subdir)))
         if len(existing_subdirs) != 0:
@@ -763,7 +718,7 @@
                 flags |= fcntl.LOCK_NB
             try:
                 fcntl.flock(lockfile, flags)
-            except IOError, e:
+            except IOError as e:
                 # lock is not available and nonblock has been requested
                 if e.errno == errno.EWOULDBLOCK:
                     lockfile.close()
diff --git a/tko/parsers/test/execute_parser.py b/tko/parsers/test/execute_parser.py
index 2bdd84e..eafa878 100755
--- a/tko/parsers/test/execute_parser.py
+++ b/tko/parsers/test/execute_parser.py
@@ -1,7 +1,10 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 """Reexecute parser in scenario and store the result at specified tag.
 """
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
 import optparse, sys
 from os import path
 import common
@@ -21,7 +24,7 @@
     parser_result_tag = args[1]
 
     if not path.exists(scenario_dirpath) or not path.isdir(scenario_dirpath):
-        print 'Invalid scenarios_dirpath:', scenario_dirpath
+        print('Invalid scenarios_dirpath:', scenario_dirpath)
         parser.print_help()
         sys.exit(1)
 
@@ -29,7 +32,7 @@
     harness = scenario_base.new_parser_harness(results_dirpath)
     try:
         parser_result = harness.execute()
-    except Exception, e:
+    except Exception as e:
         parser_result = e
     scenario_base.store_parser_result(
         scenario_dirpath, parser_result, parser_result_tag)
diff --git a/tko/parsers/test/inspect_parser_result_store.py b/tko/parsers/test/inspect_parser_result_store.py
index b6b572f..3709f8d 100755
--- a/tko/parsers/test/inspect_parser_result_store.py
+++ b/tko/parsers/test/inspect_parser_result_store.py
@@ -1,10 +1,13 @@
-#!/usr/bin/python2 -i
+#!/usr/bin/python3 -i
 """Inspector for parser_result.store from specified scenerio package.
 
 Load in parser_result.store as 'sto' and launch interactive interp.
 Define some helper functions as required.
 """
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
 import optparse, os, sys
 from os import path
 import common
@@ -22,7 +25,7 @@
 
 scenario_dirpath = path.normpath(args[0])
 if not path.exists(scenario_dirpath) or not path.isdir(scenario_dirpath):
-    print 'Invalid scenarios_dirpath:', scenario_dirpath
+    print('Invalid scenarios_dirpath:', scenario_dirpath)
     parser.print_help()
     sys.exit(1)
 
@@ -31,12 +34,12 @@
 
 
 def compare(left_tag, right_tag):
-    missing = set([left_tag, right_tag]).difference(sto.keys())
+    missing = set([left_tag, right_tag]).difference(list(sto.keys()))
     if missing:
-        print 'Store does not have the following tag(s): ', ','.join(missing)
-        print 'Doing nothing.'
+        print('Store does not have the following tag(s): ', ','.join(missing))
+        print('Doing nothing.')
         return
 
     for diffline in scenario_base.compare_parser_results(
         sto[left_tag], sto[right_tag]):
-        print diffline
+        print(diffline)
diff --git a/tko/parsers/test/new_scenario.py b/tko/parsers/test/new_scenario.py
index f72d38c..f58b8b5 100755
--- a/tko/parsers/test/new_scenario.py
+++ b/tko/parsers/test/new_scenario.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 """Create new scenario test instance from an existing results directory.
 
 This automates creation of regression tests for the results parsers.
@@ -17,6 +17,10 @@
 be easily extended and configured once generated.
 """
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import optparse, os, shutil, sys
 from os import path
 
@@ -47,13 +51,13 @@
 
     results_dirpath = path.normpath(args[0])
     if not path.exists(results_dirpath) or not path.isdir(results_dirpath):
-        print 'Invalid results_dirpath:', results_dirpath
+        print('Invalid results_dirpath:', results_dirpath)
         parser.print_help()
         sys.exit(1)
 
     scenarios_dirpath = path.normpath(args[1])
     if not path.exists(scenarios_dirpath) or not path.isdir(scenarios_dirpath):
-        print 'Invalid scenarios_dirpath:', scenarios_dirpath
+        print('Invalid scenarios_dirpath:', scenarios_dirpath)
         parser.print_help()
         sys.exit(1)
 
@@ -65,7 +69,7 @@
     scenario_package_dirpath = path.join(
         scenarios_dirpath, package_dirname)
     if path.exists(scenario_package_dirpath):
-        print (
+        print(
             'Scenario package already exists at path: %s' %
             scenario_package_dirpath)
         parser.print_help()
@@ -86,7 +90,7 @@
     harness = scenario_base.new_parser_harness(copied_dirpath)
     try:
         parser_result = harness.execute()
-    except Exception, e:
+    except Exception as e:
         parser_result = e
 
     scenario_base.store_parser_result(
diff --git a/tko/parsers/test/scenario_base.py b/tko/parsers/test/scenario_base.py
index 3732259..120ca92 100644
--- a/tko/parsers/test/scenario_base.py
+++ b/tko/parsers/test/scenario_base.py
@@ -1,14 +1,20 @@
+# Lint as: python2, python3
 """Base support for parser scenario testing.
 """
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
 from os import path
-import ConfigParser, os, shelve, shutil, sys, tarfile, time
+import six.moves.configparser, os, shelve, shutil, sys, tarfile, time
 import difflib, itertools
 import common
 from autotest_lib.client.common_lib import utils, autotemp
 from autotest_lib.tko import parser_lib
 from autotest_lib.tko.parsers.test import templates
 from autotest_lib.tko.parsers.test import unittest_hotfix
+import six
+from six.moves import zip
 
 TEMPLATES_DIRPATH = templates.__path__[0]
 # Set TZ used to UTC
@@ -55,8 +61,8 @@
           orig: Exception; To copy
         """
         self.classname = orig.__class__.__name__
-        print "Copying exception:", self.classname
-        for key, val in orig.__dict__.iteritems():
+        print("Copying exception:", self.classname)
+        for key, val in six.iteritems(orig.__dict__):
             setattr(self, key, val)
 
 
@@ -93,7 +99,7 @@
         Args:
             orig: testobj; Framework test result instance to copy.
         """
-        for key, val in orig.__dict__.iteritems():
+        for key, val in six.iteritems(orig.__dict__):
             if key == 'kernel':
                 setattr(self, key, dict(val.__dict__))
             elif key == 'iterations':
@@ -157,7 +163,7 @@
         if type(obj) is list:
             return [
                 '%d) %s' % pair
-                for pair in itertools.izip(itertools.count(), obj)]
+                for pair in zip(itertools.count(), obj)]
         else:
             return ['i) %s' % obj]
 
@@ -231,7 +237,7 @@
 
 
     def test_status_version(self):
-        """Ensure basic sanity."""
+        """Ensure basic functionality."""
         self.skipIf(not self.harness)
         self.assertEquals(
             self.harness.status_version, self.expected_status_version)
@@ -245,7 +251,7 @@
 
 
 def new_parser_harness(results_dirpath):
-    """Ensure sane environment and create new parser with wrapper.
+    """Ensure valid environment and create new parser with wrapper.
 
     Args:
       results_dirpath: str; Path to job results directory
@@ -347,9 +353,9 @@
       package_dirpath: str; Path to scenario package directory.
       properties: dict; Key value entries to write to to config file.
     """
-    config = ConfigParser.RawConfigParser()
+    config = six.moves.configparser.RawConfigParser()
     config.add_section(TEST)
-    for key, val in properties.iteritems():
+    for key, val in six.iteritems(properties):
         config.set(TEST, key, val)
 
     config_filepath = path.join(package_dirpath, CONFIG_FILENAME)
@@ -367,7 +373,7 @@
     Returns:
       ConfigParser.RawConfigParser;
     """
-    config = ConfigParser.RawConfigParser()
+    config = six.moves.configparser.RawConfigParser()
     config_filepath = path.join(package_dirpath, CONFIG_FILENAME)
     config.read(config_filepath)
     return config
diff --git a/tko/parsers/test/templates/base.py b/tko/parsers/test/templates/base.py
index 1a8a80a..a5442d6 100755
--- a/tko/parsers/test/templates/base.py
+++ b/tko/parsers/test/templates/base.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 """
 This is not meant to be executed unless copied into a
 scenario package and renamed with a _unittest suffix.
diff --git a/tko/parsers/test/unittest_hotfix.py b/tko/parsers/test/unittest_hotfix.py
index 206ff37..6eda4d2 100644
--- a/tko/parsers/test/unittest_hotfix.py
+++ b/tko/parsers/test/unittest_hotfix.py
@@ -1,10 +1,15 @@
+# Lint as: python2, python3
 """Monkey patch lame-o vanilla unittest with test skip feature.
 
 From the patch that was never applied (shameful!):
 http://bugs.python.org/issue1034053
 """
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
 import time, unittest
+from six.moves import map
 
 
 class SkipException(Exception):
@@ -130,8 +135,8 @@
     self.stream.writeln()
     if not result.wasSuccessful():
         self.stream.write("FAILED (")
-        failed, errored, skipped = map(
-            len, (result.failures, result.errors, result.skipped))
+        failed, errored, skipped = list(map(
+            len, (result.failures, result.errors, result.skipped)))
         if failed:
             self.stream.write("failures=%d" % failed)
         if errored:
diff --git a/tko/parsers/version_0.py b/tko/parsers/version_0.py
index cfcac42..b67d737 100644
--- a/tko/parsers/version_0.py
+++ b/tko/parsers/version_0.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # pylint: disable=missing-docstring
 import os
 import re
@@ -92,7 +93,7 @@
                                if l.startswith('board%3A')]
                 if board_labels:
                     # Multiple board/model labels aren't supposed to
-                    # happen, but let's report something sane rather
+                    # happen, but let's report something valid rather
                     # than just failing.
                     machine_groups.add(','.join(board_labels))
                 else:
@@ -111,7 +112,8 @@
     def find_hostname(path):
         hostname = os.path.join(path, "sysinfo", "hostname")
         try:
-            machine = open(hostname).readline().rstrip()
+            with open(hostname) as rf:
+                machine = rf.readline().rstrip()
             return machine
         except Exception:
             tko_utils.dprint("Could not read a hostname from "
@@ -190,7 +192,9 @@
             return None
 
         base, patches, hashes = "UNKNOWN", [], []
-        for line in file(path):
+        with open(path) as rf:
+            lines = rf.readlines()
+        for line in lines:
             head, rest = line.split(": ", 1)
             rest = rest.split()
             if head == "BASE":
@@ -285,7 +289,7 @@
         if not match:
             # A more useful error message than:
             #  AttributeError: 'NoneType' object has no attribute 'groups'
-            # to help us debug WTF happens on occasion here.
+            # to help us debug what happens on occasion here.
             raise RuntimeError("line %r could not be parsed." % line)
         indent, line = match.groups()
         indent = len(indent)
diff --git a/tko/parsers/version_0_unittest.py b/tko/parsers/version_0_unittest.py
index 0490b72..13a084f 100755
--- a/tko/parsers/version_0_unittest.py
+++ b/tko/parsers/version_0_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 import unittest
 
diff --git a/tko/parsers/version_1.py b/tko/parsers/version_1.py
index 6e6c420..373a849 100644
--- a/tko/parsers/version_1.py
+++ b/tko/parsers/version_1.py
@@ -1,3 +1,8 @@
+# Lint as: python2, python3
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import json
 import math
 import os
@@ -9,6 +14,8 @@
 from autotest_lib.tko import utils as tko_utils
 from autotest_lib.tko.parsers import base
 from autotest_lib.tko.parsers import version_0
+from six.moves import map
+from six.moves import range
 
 
 class job(version_0.job):
@@ -173,7 +180,9 @@
         value = perf_dict['value']
         perf_dict['stddev'] = 0.0
         if isinstance(value, list):
-            value, stddev = mean_and_standard_deviation(map(float, value))
+            # list wrapping the map IS needed here.
+            value, stddev = mean_and_standard_deviation(list(map(float,
+                                                                 value)))
             perf_dict['value'] = value
             perf_dict['stddev'] = stddev
 
@@ -251,7 +260,7 @@
 
 
     @staticmethod
-    def make_dummy_abort(indent, subdir, testname, timestamp, reason):
+    def make_stub_abort(indent, subdir, testname, timestamp, reason):
         """
         Creates an abort string.
 
@@ -296,7 +305,7 @@
         """
         tko_utils.dprint('Unexpected indent: aborting log parse')
         line_buffer.put_back(line)
-        abort = parser.make_dummy_abort(
+        abort = parser.make_stub_abort(
             indent, subdir, testname, timestamp, reason)
         line_buffer.put_back(abort)
 
@@ -319,6 +328,7 @@
         started_time_stack = [None]
         subdir_stack = [None]
         testname_stack = [None]
+        running_client = None
         running_test = None
         running_reasons = set()
         ignored_lines = []
@@ -370,12 +380,12 @@
                     reason = 'Job aborted unexpectedly'
 
                 timestamp = line.optional_fields.get('timestamp')
-                for i in reversed(xrange(stack.size())):
+                for i in reversed(range(stack.size())):
                     if abort_subdir_stack:
                         subdir = abort_subdir_stack.pop()
                     else:
                         subdir = None
-                    abort = self.make_dummy_abort(
+                    abort = self.make_stub_abort(
                         i, subdir, subdir, timestamp, reason)
                     buffer.put(abort)
 
@@ -399,7 +409,7 @@
                 print_ignored_lines()
                 ignored_lines = []
 
-            # Do an initial sanity check of the indentation.
+            # Do an initial check of the indentation.
             expected_indent = stack.size()
             if line.type == 'END':
                 expected_indent -= 1
@@ -563,6 +573,8 @@
                 tko_utils.dprint(msg)
                 new_tests.append(new_test)
 
+        if current_reason and not running_job.reason:
+            running_job.reason = current_reason
         # The job is finished; produce the final SERVER_JOB entry and exit.
         final_job = test.parse_test(self.job, '----', 'SERVER_JOB',
                                     self.job.exit_status(), running_job.reason,
diff --git a/tko/parsers/version_1_unittest.py b/tko/parsers/version_1_unittest.py
index 8477573..037b574 100755
--- a/tko/parsers/version_1_unittest.py
+++ b/tko/parsers/version_1_unittest.py
@@ -1,6 +1,7 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
-import datetime, time, unittest
+from autotest_lib.tko import models
+import datetime, time, unittest, mock
 
 import common
 from autotest_lib.client.common_lib import utils
@@ -321,7 +322,7 @@
 
 
 class DummyAbortTestCase(unittest.TestCase):
-    """Tests for the make_dummy_abort function."""
+    """Tests for the make_stub_abort function."""
 
     def setUp(self):
         self.indent = 3
@@ -331,9 +332,9 @@
         self.reason = 'Job aborted unexpectedly'
 
 
-    def test_make_dummy_abort_with_timestamp(self):
-        """Tests make_dummy_abort with a timestamp specified."""
-        abort = version_1.parser.make_dummy_abort(
+    def test_make_stub_abort_with_timestamp(self):
+        """Tests make_stub_abort with a timestamp specified."""
+        abort = version_1.parser.make_stub_abort(
             self.indent, self.subdir, self.testname, self.timestamp,
             self.reason)
         self.assertEquals(
@@ -341,30 +342,58 @@
             '\t' * self.indent, self.subdir, self.testname, self.timestamp,
             self.reason))
 
-    def test_make_dummy_abort_with_no_subdir(self):
-        """Tests make_dummy_abort with no subdir specified."""
-        abort= version_1.parser.make_dummy_abort(
+    def test_make_stub_abort_with_no_subdir(self):
+        """Tests make_stub_abort with no subdir specified."""
+        abort= version_1.parser.make_stub_abort(
             self.indent, None, self.testname, self.timestamp, self.reason)
         self.assertEquals(
             abort, '%sEND ABORT\t----\t%s\ttimestamp=%d\t%s' % (
             '\t' * self.indent, self.testname, self.timestamp, self.reason))
 
-    def test_make_dummy_abort_with_no_testname(self):
-        """Tests make_dummy_abort with no testname specified."""
-        abort= version_1.parser.make_dummy_abort(
+    def test_make_stub_abort_with_no_testname(self):
+        """Tests make_stub_abort with no testname specified."""
+        abort= version_1.parser.make_stub_abort(
         self.indent, self.subdir, None, self.timestamp, self.reason)
         self.assertEquals(
             abort, '%sEND ABORT\t%s\t----\ttimestamp=%d\t%s' % (
             '\t' * self.indent, self.subdir, self.timestamp, self.reason))
 
-    def test_make_dummy_abort_no_timestamp(self):
-        """Tests make_dummy_abort with no timestamp specified."""
-        abort = version_1.parser.make_dummy_abort(
+    def test_make_stub_abort_no_timestamp(self):
+        """Tests make_stub_abort with no timestamp specified."""
+        abort = version_1.parser.make_stub_abort(
             self.indent, self.subdir, self.testname, None, self.reason)
         self.assertEquals(
             abort, '%sEND ABORT\t%s\t%s\t%s' % (
             '\t' * self.indent, self.subdir, self.testname, self.reason))
 
 
+class test_parse_file(unittest.TestCase):
+    """Tests for parsing a status.log file."""
+
+    class fake_job(models.job):
+        """Fake job object."""
+
+        def exit_status(self):
+            """Fake exit_status method."""
+            return 'FAIL'
+
+    @staticmethod
+    def _parse_host_keyval(job_dir, hostname):
+        return {}
+
+    @mock.patch.object(models.test, 'parse_host_keyval', _parse_host_keyval)
+    def test_top_level_fail_with_reason(self):
+        """Tests that a status.log with a FAIL keeps the reason."""
+        job = self.fake_job('dir', 'user', 'label', 'machine', None, None,
+                            None, None, None, None, None, None)
+        parser = version_1.parser()
+        parser.start(job)
+        tests = parser.end([
+                'FAIL\t----\t----\ttimestamp=1615249387\tlocaltime=Mar 09 00:23:07\tThis is the reason.'
+        ])
+        self.assertEquals(tests[0].status, 'FAIL')
+        self.assertEquals(tests[0].reason, 'This is the reason.')
+
+
 if __name__ == '__main__':
     unittest.main()
diff --git a/tko/perf_upload/perf_dashboard_config.json b/tko/perf_upload/perf_dashboard_config.json
index da32175..b1933a2 100644
--- a/tko/perf_upload/perf_dashboard_config.json
+++ b/tko/perf_upload/perf_dashboard_config.json
@@ -1,603 +1,628 @@
 [
   {
     "autotest_name": "audio_PlaybackPower",
-    "master_name": "ChromeOS_Audio"
+    "main_name": "ChromeOS_Audio"
   },
   {
     "autotest_name": "audio_WebRtcAudioLoopback",
-    "master_name": "ChromeOSWebRtcAudio"
+    "main_name": "ChromeOSWebRtcAudio"
   },
   {
     "autotest_name": "audiovideo_AVSync",
-    "master_name": "ChromeOS_Audio"
+    "main_name": "ChromeOS_Audio"
   },
   {
     "autotest_name": "autoupdate_EndToEndTest.paygen_au_beta_delta",
-    "master_name": "ChromeOSPerf"
+    "main_name": "ChromeOSPerf"
   },
   {
     "autotest_name": "autoupdate_EndToEndTest.paygen_au_beta_full",
-    "master_name": "ChromeOSPerf"
+    "main_name": "ChromeOSPerf"
   },
   {
     "autotest_name": "autoupdate_EndToEndTest.paygen_au_canary_delta",
-    "master_name": "ChromeOSPerf"
+    "main_name": "ChromeOSPerf"
   },
   {
     "autotest_name": "autoupdate_EndToEndTest.paygen_au_canary_full",
-    "master_name": "ChromeOSPerf"
+    "main_name": "ChromeOSPerf"
   },
   {
     "autotest_name": "autoupdate_EndToEndTest.paygen_au_dev_delta",
-    "master_name": "ChromeOSPerf"
+    "main_name": "ChromeOSPerf"
   },
   {
     "autotest_name": "autoupdate_EndToEndTest.paygen_au_dev_full",
-    "master_name": "ChromeOSPerf"
+    "main_name": "ChromeOSPerf"
   },
   {
     "autotest_name": "autoupdate_EndToEndTest.paygen_au_stable_delta",
-    "master_name": "ChromeOSPerf"
+    "main_name": "ChromeOSPerf"
   },
   {
     "autotest_name": "autoupdate_EndToEndTest.paygen_au_stable_full",
-    "master_name": "ChromeOSPerf"
+    "main_name": "ChromeOSPerf"
   },
   {
     "autotest_name": "bluetooth_Sanity_Discoverable",
-    "master_name": "ChromeOSPerf"
+    "main_name": "ChromeOSPerf"
   },
   {
     "autotest_name": "bluetooth_Sanity_Discovery",
-    "master_name": "ChromeOSPerf"
+    "main_name": "ChromeOSPerf"
   },
   {
     "autotest_name": "camera_HAL3Perf",
-    "master_name": "ChromeOSVideo"
+    "main_name": "ChromeOSVideo"
   },
   {
     "autotest_name": "cheets_StartAndroid.stress",
     "dashboard_test_name": "cheets_StartAndroid",
-    "master_name": "ChromeOSPerf"
+    "main_name": "ChromeOSPerf"
   },
   {
     "autotest_regex": "cheets_CTS_.*\\.CtsDeqpTestCases$",
     "dashboard_test_name": "cheets_CTS",
-    "master_name": "ChromeOS_Graphics"
+    "main_name": "ChromeOS_Graphics"
   },
   {
     "autotest_regex": "cheets_CTS_.*\\.CtsGraphicsTestCases$",
     "dashboard_test_name": "cheets_CTS",
-    "master_name": "ChromeOS_Graphics"
+    "main_name": "ChromeOS_Graphics"
   },
   {
     "autotest_regex": "cheets_CTS_.*\\.CtsGraphics2TestCases$",
     "dashboard_test_name": "cheets_CTS",
-    "master_name": "ChromeOS_Graphics"
+    "main_name": "ChromeOS_Graphics"
   },
   {
     "autotest_regex": "cheets_CTS_.*\\.CtsSkQPTestCases$",
     "dashboard_test_name": "cheets_CTS",
-    "master_name": "ChromeOS_Graphics"
+    "main_name": "ChromeOS_Graphics"
   },
   {
     "autotest_regex": "cheets_CTS_.*CtsViewTestCases$",
     "dashboard_test_name": "cheets_CTS",
-    "master_name": "ChromeOS_Graphics"
+    "main_name": "ChromeOS_Graphics"
   },
   {
     "autotest_name": "enterprise_CFM_MeetingRoomScenario.idle_in_meeting_with_bots",
-    "master_name": "ChromeOS_Enterprise"
+    "main_name": "ChromeOS_Enterprise"
   },
   {
     "autotest_name": "enterprise_CFM_MeetingRoomScenario.idle_on_landing_page",
-    "master_name": "ChromeOS_Enterprise"
+    "main_name": "ChromeOS_Enterprise"
   },
   {
     "autotest_name": "enterprise_CFM_MeetingRoomScenario.join_leave_meeting_with_bots",
-    "master_name": "ChromeOS_Enterprise"
+    "main_name": "ChromeOS_Enterprise"
   },
   {
     "autotest_name": "enterprise_CFM_Perf.meet_app",
-    "master_name": "ChromeOS_Enterprise"
+    "main_name": "ChromeOS_Enterprise"
   },
   {
     "autotest_name": "firmware_StandbyPowerConsumption.30min",
-    "master_name": "ChromeOSPower"
+    "main_name": "ChromeOSPower"
   },
   {
     "autotest_name": "firmware_StandbyPowerConsumption.4hr",
-    "master_name": "ChromeOSPower"
+    "main_name": "ChromeOSPower"
   },
   {
     "autotest_name": "graphics_Chrome",
-    "master_name": "ChromeOS_Graphics"
+    "main_name": "ChromeOS_Graphics"
   },
   {
     "autotest_name": "graphics_dEQP.bvt",
     "dashboard_test_name": "graphics_dEQP",
-    "master_name": "ChromeOS_Graphics"
+    "main_name": "ChromeOS_Graphics"
   },
   {
     "autotest_name": "graphics_dEQP.gles2-master",
     "dashboard_test_name": "graphics_dEQP",
-    "master_name": "ChromeOS_Graphics"
+    "main_name": "ChromeOS_Graphics"
   },
   {
     "autotest_name": "graphics_dEQP.gles3-master",
     "dashboard_test_name": "graphics_dEQP",
-    "master_name": "ChromeOS_Graphics"
+    "main_name": "ChromeOS_Graphics"
   },
   {
     "autotest_name": "graphics_dEQP.gles31-master",
     "dashboard_test_name": "graphics_dEQP",
-    "master_name": "ChromeOS_Graphics"
+    "main_name": "ChromeOS_Graphics"
   },
   {
     "autotest_name": "graphics_dEQP.vk-master",
     "dashboard_test_name": "graphics_dEQP",
-    "master_name": "ChromeOS_Graphics"
+    "main_name": "ChromeOS_Graphics"
   },
   {
     "autotest_name": "graphics_Gbm",
-    "master_name": "ChromeOS_Graphics"
+    "main_name": "ChromeOS_Graphics"
   },
   {
     "autotest_name": "graphics_GLAPICheck",
-    "master_name": "ChromeOS_Graphics"
+    "main_name": "ChromeOS_Graphics"
   },
   {
     "autotest_name": "graphics_GLBench",
-    "master_name": "ChromeOS_Graphics"
+    "main_name": "ChromeOS_Graphics"
   },
   {
     "autotest_name": "graphics_GLMark2",
-    "master_name": "ChromeOS_Graphics"
+    "main_name": "ChromeOS_Graphics"
   },
   {
     "autotest_name": "graphics_KernelMemory",
-    "master_name": "ChromeOS_Graphics"
+    "main_name": "ChromeOS_Graphics"
   },
   {
     "autotest_name": "graphics_KernelConfig",
-    "master_name": "ChromeOS_Graphics"
+    "main_name": "ChromeOS_Graphics"
   },
   {
     "autotest_name": "graphics_Idle",
-    "master_name": "ChromeOS_Graphics"
+    "main_name": "ChromeOS_Graphics"
   },
   {
     "autotest_name": "graphics_LibDRM",
-    "master_name": "ChromeOS_Graphics"
+    "main_name": "ChromeOS_Graphics"
   },
   {
     "autotest_name": "graphics_SanAngeles",
-    "master_name": "ChromeOS_Graphics"
+    "main_name": "ChromeOS_Graphics"
   },
   {
-    "autotest_name": "graphics_Sanity",
-    "master_name": "ChromeOS_Graphics"
+    "autotest_name": "graphics_Check",
+    "main_name": "ChromeOS_Graphics"
   },
   {
     "autotest_name": "graphics_Stress",
-    "master_name": "ChromeOS_Graphics"
+    "main_name": "ChromeOS_Graphics"
   },
   {
     "autotest_name": "graphics_PerfControl",
-    "master_name": "ChromeOS_Graphics"
+    "main_name": "ChromeOS_Graphics"
   },
   {
     "autotest_name": "graphics_VideoRenderingPower",
-    "master_name": "ChromeOS_Graphics"
+    "main_name": "ChromeOS_Graphics"
   },
   {
     "autotest_name": "graphics_WebGLAquarium",
-    "master_name": "ChromeOS_Graphics"
+    "main_name": "ChromeOS_Graphics"
   },
   {
     "autotest_name": "graphics_WebGLManyPlanetsDeep",
-    "master_name": "ChromeOS_Graphics"
+    "main_name": "ChromeOS_Graphics"
   },
   {
     "autotest_name": "graphics_VTSwitch",
-    "master_name": "ChromeOS_Graphics"
+    "main_name": "ChromeOS_Graphics"
   },
   {
     "autotest_name": "hardware_MemoryZRAMThroughput",
-    "master_name": "ChromeOSPerf"
+    "main_name": "ChromeOSPerf"
   },
   {
     "autotest_name": "hardware_RamFio",
-    "master_name": "ChromeOSPerf"
+    "main_name": "ChromeOSPerf"
   },
   {
     "autotest_name": "hardware_StorageFio",
-    "master_name": "ChromeOSPerf"
+    "main_name": "ChromeOSPerf"
   },
   {
     "autotest_name": "hardware_StorageFio.bvt",
-    "master_name": "ChromeOSPerf"
+    "main_name": "ChromeOSPerf"
   },
   {
     "autotest_name": "enterprise_LongevityTrackerServer.chromesign_20min",
-    "master_name": "ChromeOS_Enterprise"
+    "main_name": "ChromeOS_Enterprise"
   },
   {
     "autotest_name": "enterprise_LongevityTrackerServer.chromesign_3hr",
-    "master_name": "ChromeOS_Enterprise"
+    "main_name": "ChromeOS_Enterprise"
   },
   {
     "autotest_name": "enterprise_LongevityTrackerServer.riseplayer",
-    "master_name": "ChromeOS_Enterprise"
+    "main_name": "ChromeOS_Enterprise"
   },
   {
     "autotest_name": "network_WiFi_AssocConfigPerformance",
     "dashboard_test_name": "network_WiFi_AssocConfig",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_AttenuatedPerf.ht40_ch001",
     "dashboard_test_name": "network_WiFi_AttenuatedPerf",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_AttenuatedPerf.ht40_ch006",
     "dashboard_test_name": "network_WiFi_AttenuatedPerf",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_AttenuatedPerf.ht40_ch011",
     "dashboard_test_name": "network_WiFi_AttenuatedPerf",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_AttenuatedPerf.ht40_ch044",
     "dashboard_test_name": "network_WiFi_AttenuatedPerf",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_AttenuatedPerf.ht40_ch153",
     "dashboard_test_name": "network_WiFi_AttenuatedPerf",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_AttenuatedPerf.vht40_ch036",
     "dashboard_test_name": "network_WiFi_AttenuatedPerf",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_AttenuatedPerf.vht40_ch060",
     "dashboard_test_name": "network_WiFi_AttenuatedPerf",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_AttenuatedPerf.vht40_ch149",
     "dashboard_test_name": "network_WiFi_AttenuatedPerf",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_AttenuatedPerf.vht40_ch157",
     "dashboard_test_name": "network_WiFi_AttenuatedPerf",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_AttenuatedPerf.vht80_ch042",
     "dashboard_test_name": "network_WiFi_AttenuatedPerf",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_AttenuatedPerf.vht80_ch155",
     "dashboard_test_name": "network_WiFi_AttenuatedPerf",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_BluetoothStreamPerf.11a",
     "dashboard_test_name": "network_WiFi_BluetoothStreamPerf",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_BluetoothStreamPerf.11b",
     "dashboard_test_name": "network_WiFi_BluetoothStreamPerf",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_Perf.11g",
     "dashboard_test_name": "network_WiFi_Perf",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_Perf.11g_aes",
     "dashboard_test_name": "network_WiFi_Perf",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_Perf.11g_tkip",
     "dashboard_test_name": "network_WiFi_Perf",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_Perf.ht20",
     "dashboard_test_name": "network_WiFi_Perf",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_Perf.ht20_aes",
     "dashboard_test_name": "network_WiFi_Perf",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_Perf.ht40",
     "dashboard_test_name": "network_WiFi_Perf",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_Perf.ht40_aes",
     "dashboard_test_name": "network_WiFi_Perf",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
+  },
+  {
+    "autotest_name": "network_WiFi_Perf.vht20",
+    "dashboard_test_name": "network_WiFi_Perf",
+    "main_name": "ChromeOSWiFi"
+  },
+  {
+    "autotest_name": "network_WiFi_Perf.vht20_aes",
+    "dashboard_test_name": "network_WiFi_Perf",
+    "main_name": "ChromeOSWiFi"
+  },
+  {
+    "autotest_name": "network_WiFi_Perf.vht40",
+    "dashboard_test_name": "network_WiFi_Perf",
+    "main_name": "ChromeOSWiFi"
+  },
+  {
+    "autotest_name": "network_WiFi_Perf.vht40_aes",
+    "dashboard_test_name": "network_WiFi_Perf",
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_Perf.vht80",
     "dashboard_test_name": "network_WiFi_Perf",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
+  },
+  {
+    "autotest_name": "network_WiFi_Perf.vht80_pmf",
+    "dashboard_test_name": "network_WiFi_Perf",
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_PTK",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SimpleConnect.wifi_check1x_PEAP",
     "dashboard_test_name": "network_WiFi_SimpleConnect",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SimpleConnect.wifi_check1x_TTLS",
     "dashboard_test_name": "network_WiFi_SimpleConnect",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SimpleConnect.wifi_check1x_WEP",
     "dashboard_test_name": "network_WiFi_SimpleConnect",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SimpleConnect.wifi_check1x_WPA",
     "dashboard_test_name": "network_WiFi_SimpleConnect",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SimpleConnect.wifi_check5HT20",
     "dashboard_test_name": "network_WiFi_SimpleConnect",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SimpleConnect.wifi_check5HT40",
     "dashboard_test_name": "network_WiFi_SimpleConnect",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SimpleConnect.wifi_check5VHT80_pure",
     "dashboard_test_name": "network_WiFi_SimpleConnect",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SimpleConnect.wifi_check5VHT80_mixed",
     "dashboard_test_name": "network_WiFi_SimpleConnect",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SimpleConnect.wifi_check11a",
     "dashboard_test_name": "network_WiFi_SimpleConnect",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SimpleConnect.wifi_check11b",
     "dashboard_test_name": "network_WiFi_SimpleConnect",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SimpleConnect.wifi_check11g",
     "dashboard_test_name": "network_WiFi_SimpleConnect",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SimpleConnect.wifi_check24HT20",
     "dashboard_test_name": "network_WiFi_SimpleConnect",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SimpleConnect.wifi_check24HT40",
     "dashboard_test_name": "network_WiFi_SimpleConnect",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SimpleConnect.wifi_checkDFS",
     "dashboard_test_name": "network_WiFi_SimpleConnect",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SimpleConnect.wifi_checkHidden",
     "dashboard_test_name": "network_WiFi_SimpleConnect",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SimpleConnect.wifi_checkHiddenWEP",
     "dashboard_test_name": "network_WiFi_SimpleConnect",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SimpleConnect.wifi_checkHiddenWPA",
     "dashboard_test_name": "network_WiFi_SimpleConnect",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SimpleConnect.wifi_checkMixedWPA",
     "dashboard_test_name": "network_WiFi_SimpleConnect",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SimpleConnect.wifi_checkWEP40",
     "dashboard_test_name": "network_WiFi_SimpleConnect",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SimpleConnect.wifi_checkWEP104",
     "dashboard_test_name": "network_WiFi_SimpleConnect",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SimpleConnect.wifi_checkWPA2",
     "dashboard_test_name": "network_WiFi_SimpleConnect",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SimpleConnect.wifi_checkWPA2_PMF",
     "dashboard_test_name": "network_WiFi_SimpleConnect",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SimpleConnect.wifi_checkWPA2_TKIP",
     "dashboard_test_name": "network_WiFi_SimpleConnect",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SimpleConnect.wifi_checkWPA_CCMP",
     "dashboard_test_name": "network_WiFi_SimpleConnect",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SimpleConnect.wifi_checkWPA_TKIP",
     "dashboard_test_name": "network_WiFi_SimpleConnect",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SimpleConnect.wifi_checkWPA_multi",
     "dashboard_test_name": "network_WiFi_SimpleConnect",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SuspendStress.5HT40",
     "dashboard_test_name": "network_WiFi_SuspendStress",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SuspendStress.11g",
     "dashboard_test_name": "network_WiFi_SuspendStress",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SuspendStress.24HT40",
     "dashboard_test_name": "network_WiFi_SuspendStress",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SuspendStress.Hidden",
     "dashboard_test_name": "network_WiFi_SuspendStress",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "network_WiFi_SuspendStress.WPA2",
     "dashboard_test_name": "network_WiFi_SuspendStress",
-    "master_name": "ChromeOSWiFi"
+    "main_name": "ChromeOSWiFi"
   },
   {
     "autotest_name": "platform_BootPerfServer",
-    "master_name": "ChromeOSPerf"
+    "main_name": "ChromeOSPerf"
   },
   {
     "autotest_name": "platform_GesturesRegressionTest",
-    "master_name": "ChromeOSTouchpad"
+    "main_name": "ChromeOSTouchpad"
   },
   {
     "autotest_name": "platform_CryptohomeFio.16k_write",
-    "master_name": "ChromeOSPerf"
+    "main_name": "ChromeOSPerf"
   },
   {
     "autotest_name": "platform_CryptohomeFio.dirty_setting",
-    "master_name": "ChromeOSPerf"
+    "main_name": "ChromeOSPerf"
   },
   {
     "autotest_name": "platform_CryptohomeFio.surfing",
-    "master_name": "ChromeOSPerf"
+    "main_name": "ChromeOSPerf"
   },
   {
     "autotest_name": "platform_InitLoginPerfServer",
-    "master_name": "ChromeOSPerf"
+    "main_name": "ChromeOSPerf"
   },
   {
     "autotest_name": "platform_LogoutPerf",
-    "master_name": "ChromeOSPerf"
+    "main_name": "ChromeOSPerf"
   },
   {
     "autotest_name": "platform_Pkcs11LoadPerf",
-    "master_name": "ChromeOSPerf"
+    "main_name": "ChromeOSPerf"
   },
   {
     "autotest_name": "power_Consumption",
-    "master_name": "ChromeOSPower"
+    "main_name": "ChromeOSPower"
   },
   {
     "autotest_name": "power_Idle",
-    "master_name": "ChromeOSPower"
+    "main_name": "ChromeOSPower"
   },
   {
     "autotest_name": "power_LoadTest.WIFI_1hr",
-    "master_name": "ChromeOSPower"
+    "main_name": "ChromeOSPower"
   },
   {
     "autotest_name": "power_LoadTest.WIRED_1hr",
-    "master_name": "ChromeOSPower"
+    "main_name": "ChromeOSPower"
   },
   {
     "autotest_name": "power_Resume",
-    "master_name": "ChromeOSPower"
+    "main_name": "ChromeOSPower"
   },
   {
     "autotest_name": "power_UiResume",
-    "master_name": "ChromeOSPower"
+    "main_name": "ChromeOSPower"
   },
   {
     "autotest_regex": "tast\\..*",
-    "master_name": "ChromeOSTast"
+    "main_name": "ChromeOSTast"
   },
   {
     "autotest_name": "telemetry_Benchmarks.backdrop_filter.rendering.desktop",
     "dashboard_test_name": "rendering.desktop",
-    "master_name": "ChromeOSPerf"
+    "main_name": "ChromeOSPerf"
   },
   {
     "autotest_name": "telemetry_Benchmarks.blink_perf.image_decoder",
     "dashboard_test_name": "blink_perf.image_decoder",
-    "master_name": "ChromeOSPerf"
+    "main_name": "ChromeOSPerf"
   },
   {
     "autotest_name": "telemetry_Benchmarks.loading.desktop",
     "dashboard_test_name": "loading.desktop",
-    "master_name": "ChromeOSPerf"
+    "main_name": "ChromeOSPerf"
   },
   {
     "autotest_name": "telemetry_Benchmarks.rendering.desktop",
     "dashboard_test_name": "rendering.desktop",
-    "master_name": "ChromeOSPerf"
+    "main_name": "ChromeOSPerf"
   },
   {
     "autotest_name": "telemetry_Benchmarks.speedometer2",
     "dashboard_test_name": "speedometer2",
-    "master_name": "ChromeOSPerf"
+    "main_name": "ChromeOSPerf"
   },
   {
     "autotest_name": "video_WebRtcMainFeedSwitching.perf",
-    "master_name": "ChromeOSWebRtcVideo"
+    "main_name": "ChromeOSWebRtcVideo"
   },
   {
     "autotest_name": "video_WebRtcResolutionSwitching.perf",
-    "master_name": "ChromeOSWebRtcVideo"
+    "main_name": "ChromeOSWebRtcVideo"
   },
   {
     "autotest_name": "webrtc_PausePlayPeerConnections.audio_perf",
-    "master_name": "ChromeOSWebRtcVideo"
+    "main_name": "ChromeOSWebRtcVideo"
   },
   {
     "autotest_name": "webrtc_PausePlayPeerConnections.video_perf",
-    "master_name": "ChromeOSWebRtcVideo"
+    "main_name": "ChromeOSWebRtcVideo"
   }
 ]
diff --git a/tko/perf_upload/perf_uploader.py b/tko/perf_upload/perf_uploader.py
index 7834da3..c9e8802 100644
--- a/tko/perf_upload/perf_uploader.py
+++ b/tko/perf_upload/perf_uploader.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -13,12 +14,11 @@
 
 """
 
-import httplib
+import six.moves.http_client
 import json
 import os
 import re
-import urllib
-import urllib2
+from six.moves import urllib
 
 import common
 from autotest_lib.tko import utils as tko_utils
@@ -43,7 +43,7 @@
         _ROOT_DIR, 'perf_dashboard_shadow_config.json')
 _SERVICE_ACCOUNT_FILE = '/creds/service_accounts/skylab-drone.json'
 
-# Format for Chrome and Chrome OS version strings.
+# Format for Chrome and ChromeOS version strings.
 VERSION_REGEXP = r'^(\d+)\.(\d+)\.(\d+)\.(\d+)$'
 
 
@@ -64,7 +64,7 @@
     @returns A dictionary mapping each unique autotest name to a dictionary
         of presentation config information.
 
-    @raises PerfUploadingError if config data or master name for the test
+    @raises PerfUploadingError if config data or main name for the test
         is missing from the config file.
 
     """
@@ -108,14 +108,14 @@
                 'No config data is specified for test %s in %s.' %
                 (test_name, _PRESENTATION_CONFIG_FILE))
     try:
-        master_name = presentation_dict['master_name']
+        main_name = presentation_dict['main_name']
     except KeyError:
         raise PerfUploadingError(
-                'No master name is specified for test %s in %s.' %
+                'No main name is specified for test %s in %s.' %
                 (test_name, _PRESENTATION_CONFIG_FILE))
     if 'dashboard_test_name' in presentation_dict:
         test_name = presentation_dict['dashboard_test_name']
-    return {'master_name': master_name, 'test_name': test_name}
+    return {'main_name': main_name, 'test_name': test_name}
 
 
 def _format_for_upload(board_name, cros_version, chrome_version,
@@ -127,7 +127,7 @@
     specially-formatted JSON string.  In particular, the JSON object must be a
     dictionary with key "data", and value being a list of dictionaries where
     each dictionary contains all the information associated with a single
-    measured perf value: master name, bot name, test name, perf value, error
+    measured perf value: main name, bot name, test name, perf value, error
     value, units, and build version numbers.
 
     @param board_name: The string name of the image board name.
@@ -156,21 +156,24 @@
           'charts': perf_values,
         }
 
+    # TODO b:169251326 terms below are set outside of this codebase and
+    # should be updated when possible ("master" -> "main"). # nocheck
+    # see catapult-project/catapult/dashboard/dashboard/add_point.py
     dash_entry = {
-        'master': presentation_info['master_name'],
-        'bot': 'cros-' + board_name,  # Prefix to clarify it's ChromeOS.
-        'point_id': _get_id_from_version(chrome_version, cros_version),
-        'versions': {
-            'cros_version': cros_version,
-            'chrome_version': chrome_version,
-        },
-        'supplemental': {
-            'default_rev': 'r_cros_version',
-            'hardware_identifier': hardware_id,
-            'hardware_hostname': hardware_hostname,
-            'jobname': jobname,
-        },
-        'chart_data': perf_values,
+            'master': presentation_info['main_name'],  # nocheck
+            'bot': 'cros-' + board_name,  # Prefix to clarify it's ChromeOS.
+            'point_id': _get_id_from_version(chrome_version, cros_version),
+            'versions': {
+                    'cros_version': cros_version,
+                    'chrome_version': chrome_version,
+            },
+            'supplemental': {
+                    'default_rev': 'r_cros_version',
+                    'hardware_identifier': hardware_id,
+                    'hardware_hostname': hardware_hostname,
+                    'jobname': jobname,
+            },
+            'chart_data': perf_values,
     }
     return {'data': json.dumps(dash_entry)}
 
@@ -181,7 +184,7 @@
     @param test_attributes: The attributes property (which is a dict) of an
         autotest tko.models.test object.
 
-    @return A pair of strings (Chrome OS version, Chrome version).
+    @return A pair of strings (ChromeOS version, Chrome version).
 
     @raises PerfUploadingError if a version isn't formatted as expected.
     """
@@ -191,9 +194,10 @@
     # Use the release milestone as the milestone if present, othewise prefix the
     # cros version with the with the Chrome browser milestone.
     if cros_milestone:
-      cros_version = "%s.%s" % (cros_milestone, cros_version)
+        cros_version = "%s.%s" % (cros_milestone, cros_version)
     else:
-      cros_version = chrome_version[:chrome_version.find('.') + 1] + cros_version
+        cros_version = chrome_version[:chrome_version.find('.') +
+                                      1] + cros_version
     if not re.match(VERSION_REGEXP, cros_version):
         raise PerfUploadingError('CrOS version "%s" does not match expected '
                                  'format.' % cros_version)
@@ -239,7 +243,7 @@
     """
 
     # Number of digits to use from each part of the version string for Chrome
-    # and Chrome OS versions when building a point ID out of these two versions.
+    # and ChromeOS versions when building a point ID out of these two versions.
     chrome_version_col_widths = [0, 0, 5, 3]
     cros_version_col_widths = [0, 5, 3, 2]
 
@@ -316,19 +320,19 @@
     @raises PerfUploadingError if an exception was raised when uploading.
 
     """
-    encoded = urllib.urlencode(data_obj)
-    req = urllib2.Request(_DASHBOARD_UPLOAD_URL, encoded)
+    encoded = urllib.parse.urlencode(data_obj)
+    req = urllib.request.Request(_DASHBOARD_UPLOAD_URL, encoded)
     _add_oauth_token(req.headers)
     try:
-        urllib2.urlopen(req)
-    except urllib2.HTTPError as e:
+        urllib.request.urlopen(req)
+    except urllib.error.HTTPError as e:
         raise PerfUploadingError('HTTPError: %d %s for JSON %s\n' % (
                 e.code, e.msg, data_obj['data']))
-    except urllib2.URLError as e:
+    except urllib.error.URLError as e:
         raise PerfUploadingError(
                 'URLError: %s for JSON %s\n' %
                 (str(e.reason), data_obj['data']))
-    except httplib.HTTPException:
+    except six.moves.http_client.HTTPException:
         raise PerfUploadingError(
                 'HTTPException for JSON %s\n' % data_obj['data'])
 
@@ -399,4 +403,3 @@
     else:
         tko_utils.dprint('Successfully uploaded perf data to the perf '
                          'dashboard for test %s.' % test_name)
-
diff --git a/tko/perf_upload/perf_uploader_unittest.py b/tko/perf_upload/perf_uploader_unittest.py
index 190249b..966f2eb 100755
--- a/tko/perf_upload/perf_uploader_unittest.py
+++ b/tko/perf_upload/perf_uploader_unittest.py
@@ -1,9 +1,13 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 """Unit tests for the perf_uploader.py module.
 
 """
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import json
 import os
 import tempfile
@@ -12,6 +16,7 @@
 import common
 from autotest_lib.tko import models as tko_models
 from autotest_lib.tko.perf_upload import perf_uploader
+import six
 
 
 class test_aggregate_iterations(unittest.TestCase):
@@ -76,14 +81,14 @@
     def setUp(self):
         """Sets up for each test case."""
         self._perf_values = []
-        for iter_num, iter_data in self._PERF_ITERATION_DATA.iteritems():
+        for iter_num, iter_data in six.iteritems(self._PERF_ITERATION_DATA):
             self._perf_values.append(
                     tko_models.perf_value_iteration(iter_num, iter_data))
 
 
 
-class test_json_config_file_sanity(unittest.TestCase):
-    """Sanity tests for the JSON-formatted presentation config file."""
+class test_json_config_file(unittest.TestCase):
+    """Tests for the JSON-formatted presentation config file."""
 
     def test_parse_json(self):
         """Verifies _parse_config_file function."""
@@ -114,22 +119,21 @@
             self.fail('Presentation config file could not be parsed as JSON.')
 
 
-    def test_required_master_name(self):
-        """Verifies that master name must be specified."""
+    def test_required_main_name(self):
+        """Verifies that main name must be specified."""
         json_obj = []
         try:
             with open(perf_uploader._PRESENTATION_CONFIG_FILE, 'r') as fp:
                 json_obj = json.load(fp)
         except:
             self.fail('Presentation config file could not be parsed as JSON.')
-
         for entry in json_obj:
-            if not 'master_name' in entry:
-                self.fail('Missing master field for test %s.' %
+            if not 'main_name' in entry:
+                self.fail('Missing main field for test %s.' %
                           entry['autotest_name'])
 
 class test_get_image_board_name(unittest.TestCase):
-    """Sanity tests for retrieving the image board name."""
+    """Tests for retrieving the image board name."""
     def test_normal_platform(self):
         """Verify image board name is equal to the platform in normal image."""
         platform = 'veyron_jerry'
@@ -138,7 +142,7 @@
                          'veyron_jerry')
 
     def test_empty_platform(self):
-        """Sanity Verify image board name is equal to the platform."""
+        """Verify image board name is equal to the platform."""
         platform = ''
         image = '-release/R78-12428.0.0'
         self.assertEqual(perf_uploader._get_image_board_name(platform, image),
@@ -163,12 +167,12 @@
 
     _PRESENT_INFO = {
         'test_name': {
-            'master_name': 'new_master_name',
+            'main_name': 'new_main_name',
             'dashboard_test_name': 'new_test_name',
         }
     }
 
-    _PRESENT_INFO_MISSING_MASTER = {
+    _PRESENT_INFO_MISSING_MAIN = {
         'test_name': {
             'dashboard_test_name': 'new_test_name',
         }
@@ -176,18 +180,18 @@
 
     _PRESENT_INFO_REGEX = {
         'test_name.*': {
-            'master_name': 'new_master_name',
+            'main_name': 'new_main_name',
             'dashboard_test_name': 'new_test_name',
         }
     }
 
     _PRESENT_INFO_COLLISION = {
         'test_name.*': {
-            'master_name': 'new_master_name',
+            'main_name': 'new_main_name',
             'dashboard_test_name': 'new_test_name',
         },
         'test_name-test.*': {
-            'master_name': 'new_master_name',
+            'main_name': 'new_main_name',
             'dashboard_test_name': 'new_test_name',
         },
     }
@@ -209,11 +213,11 @@
                     self._PRESENT_INFO, 'test_name_P')
             self.assertTrue(
                     all([key in result for key in
-                         ['test_name', 'master_name']]),
+                         ['test_name', 'main_name']]),
                     msg='Unexpected keys in resulting dictionary: %s' % result)
-            self.assertEqual(result['master_name'], 'new_master_name',
-                             msg='Unexpected "master_name" value: %s' %
-                                 result['master_name'])
+            self.assertEqual(result['main_name'], 'new_main_name',
+                             msg='Unexpected "main_name" value: %s' %
+                                 result['main_name'])
             self.assertEqual(result['test_name'], 'new_test_name',
                              msg='Unexpected "test_name" value: %s' %
                                  result['test_name'])
@@ -224,11 +228,11 @@
                 self._PRESENT_INFO, 'test_name')
         self.assertTrue(
                 all([key in result for key in
-                     ['test_name', 'master_name']]),
+                     ['test_name', 'main_name']]),
                 msg='Unexpected keys in resulting dictionary: %s' % result)
-        self.assertEqual(result['master_name'], 'new_master_name',
-                         msg='Unexpected "master_name" value: %s' %
-                             result['master_name'])
+        self.assertEqual(result['main_name'], 'new_main_name',
+                         msg='Unexpected "main_name" value: %s' %
+                             result['main_name'])
         self.assertEqual(result['test_name'], 'new_test_name',
                          msg='Unexpected "test_name" value: %s' %
                              result['test_name'])
@@ -242,24 +246,24 @@
                         self._PRESENT_INFO, 'other_test_name')
 
 
-    def test_master_not_specified(self):
-        """Verifies exception raised if master is not there."""
+    def test_main_not_specified(self):
+        """Verifies exception raised if main is not there."""
         self.assertRaises(
                 perf_uploader.PerfUploadingError,
                 perf_uploader._gather_presentation_info,
-                    self._PRESENT_INFO_MISSING_MASTER, 'test_name')
+                    self._PRESENT_INFO_MISSING_MAIN, 'test_name')
 
 
 class test_parse_and_gather_presentation(unittest.TestCase):
     """Tests for _parse_config_file and then_gather_presentation_info."""
     _AUTOTEST_NAME_CONFIG = """[{
         "autotest_name": "test.test.VM",
-        "master_name": "ChromeOSPerf"
+        "main_name": "ChromeOSPerf"
     }]"""
 
     _AUTOTEST_REGEX_CONFIG = r"""[{
         "autotest_regex": "test\\.test\\.VM.*",
-        "master_name": "ChromeOSPerf"
+        "main_name": "ChromeOSPerf"
     }]"""
 
     def setUp(self):
@@ -277,7 +281,7 @@
         result = perf_uploader._gather_presentation_info(config, test_name)
         self.assertEqual(result, {
             'test_name': test_name,
-            'master_name': 'ChromeOSPerf'
+            'main_name': 'ChromeOSPerf'
         })
 
     def test_autotest_name_is_exact_matched(self):
@@ -318,7 +322,7 @@
             result = perf_uploader._gather_presentation_info(config, test_name)
             self.assertEqual(result, {
                 'test_name': test_name,
-                'master_name': 'ChromeOSPerf'
+                'main_name': 'ChromeOSPerf'
             })
 
     def test_autotest_regex_is_not_matched(self):
@@ -371,42 +375,44 @@
     """Tests for the _get_version_numbers function."""
 
     def test_with_valid_versions(self):
-      """Checks the version numbers used when data is formatted as expected."""
-      self.assertEqual(
-              ('34.5678.9.0', '34.5.678.9'),
-              perf_uploader._get_version_numbers(
-                  {
-                      'CHROME_VERSION': '34.5.678.9',
-                      'CHROMEOS_RELEASE_VERSION': '5678.9.0',
-                  }))
+        """Checks the version numbers used when data is formatted as expected."""
+        self.assertEqual(('34.5678.9.0', '34.5.678.9'),
+                         perf_uploader._get_version_numbers({
+                                 'CHROME_VERSION':
+                                 '34.5.678.9',
+                                 'CHROMEOS_RELEASE_VERSION':
+                                 '5678.9.0',
+                         }))
 
     def test_with_missing_version_raises_error(self):
-      """Checks that an error is raised when a version is missing."""
-      with self.assertRaises(perf_uploader.PerfUploadingError):
-          perf_uploader._get_version_numbers(
-              {
-                  'CHROMEOS_RELEASE_VERSION': '5678.9.0',
-              })
+        """Checks that an error is raised when a version is missing."""
+        with self.assertRaises(perf_uploader.PerfUploadingError):
+            perf_uploader._get_version_numbers({
+                    'CHROMEOS_RELEASE_VERSION':
+                    '5678.9.0',
+            })
 
     def test_with_unexpected_version_format_raises_error(self):
-      """Checks that an error is raised when there's a rN suffix."""
-      with self.assertRaises(perf_uploader.PerfUploadingError):
-          perf_uploader._get_version_numbers(
-              {
-                  'CHROME_VERSION': '34.5.678.9',
-                  'CHROMEOS_RELEASE_VERSION': '5678.9.0r1',
-              })
+        """Checks that an error is raised when there's a rN suffix."""
+        with self.assertRaises(perf_uploader.PerfUploadingError):
+            perf_uploader._get_version_numbers({
+                    'CHROME_VERSION':
+                    '34.5.678.9',
+                    'CHROMEOS_RELEASE_VERSION':
+                    '5678.9.0r1',
+            })
 
     def test_with_valid_release_milestone(self):
-      """Checks the version numbers used when data is formatted as expected."""
-      self.assertEqual(
-              ('54.5678.9.0', '34.5.678.9'),
-              perf_uploader._get_version_numbers(
-                  {
-                      'CHROME_VERSION': '34.5.678.9',
-                      'CHROMEOS_RELEASE_VERSION': '5678.9.0',
-                      'CHROMEOS_RELEASE_CHROME_MILESTONE': '54',
-                  }))
+        """Checks the version numbers used when data is formatted as expected."""
+        self.assertEqual(('54.5678.9.0', '34.5.678.9'),
+                         perf_uploader._get_version_numbers({
+                                 'CHROME_VERSION':
+                                 '34.5.678.9',
+                                 'CHROMEOS_RELEASE_VERSION':
+                                 '5678.9.0',
+                                 'CHROMEOS_RELEASE_CHROME_MILESTONE':
+                                 '54',
+                         }))
 
 
 class test_format_for_upload(unittest.TestCase):
@@ -433,7 +439,7 @@
         },
     }
     _PRESENT_INFO = {
-        'master_name': 'new_master_name',
+        'main_name': 'new_main_name',
         'test_name': 'new_test_name',
     }
 
@@ -458,11 +464,12 @@
         def ordered(obj):
             """Return the sorted obj."""
             if isinstance(obj, dict):
-               return sorted((k, ordered(v)) for k, v in obj.items())
+                return sorted((k, ordered(v)) for k, v in obj.items())
             if isinstance(obj, list):
-               return sorted(ordered(x) for x in obj)
+                return sorted(ordered(x) for x in obj)
             else:
-               return obj
+                return obj
+
         fail_msg = 'Unexpected result string: %s' % actual_result
         self.assertEqual(ordered(expected), ordered(actual), msg=fail_msg)
 
@@ -473,39 +480,42 @@
                 'platform', '25.1200.0.0', '25.10.1000.0', 'WINKY E2A-F2K-Q35',
                 'test_machine', self._perf_data, self._PRESENT_INFO,
                 '52926644-username/hostname')
+        # TODO b:169251326 terms below are set outside of this codebase and
+        # should be updated when possible ("master" -> "main"). # nocheck
+        # see catapult-project/catapult/dashboard/dashboard/add_point.py
         expected_result_string = (
-          '{"versions":  {'
-             '"cros_version": "25.1200.0.0",'
-             '"chrome_version": "25.10.1000.0"'
-          '},'
-          '"point_id": 10000000120000000,'
-          '"bot": "cros-platform",'
-          '"chart_data": {'
-             '"charts": {'
-               '"metric2": {'
-                 '"summary": {'
-                   '"units": "frames_per_sec",'
-                   '"type": "scalar",'
-                   '"value": 101.35,'
-                   '"improvement_direction": "up"'
-                 '}'
-               '},'
-               '"metric1": {'
-                 '"summary": {'
-                 '"units": "msec",'
-                 '"type": "scalar",'
-                 '"value": 2.7,'
-                 '"improvement_direction": "down"}'
-               '}'
-             '}'
-          '},'
-          '"master": "new_master_name",'
-          '"supplemental": {'
-             '"hardware_identifier": "WINKY E2A-F2K-Q35",'
-             '"jobname": "52926644-username/hostname",'
-             '"hardware_hostname": "test_machine",'
-             '"default_rev": "r_cros_version"}'
-           '}')
+                '{"versions":  {'
+                '"cros_version": "25.1200.0.0",'
+                '"chrome_version": "25.10.1000.0"'
+                '},'
+                '"point_id": 10000000120000000,'
+                '"bot": "cros-platform",'
+                '"chart_data": {'
+                '"charts": {'
+                '"metric2": {'
+                '"summary": {'
+                '"units": "frames_per_sec",'
+                '"type": "scalar",'
+                '"value": 101.35,'
+                '"improvement_direction": "up"'
+                '}'
+                '},'
+                '"metric1": {'
+                '"summary": {'
+                '"units": "msec",'
+                '"type": "scalar",'
+                '"value": 2.7,'
+                '"improvement_direction": "down"}'
+                '}'
+                '}'
+                '},'
+                '"master": "new_main_name",'  # nocheck
+                '"supplemental": {'
+                '"hardware_identifier": "WINKY E2A-F2K-Q35",'
+                '"jobname": "52926644-username/hostname",'
+                '"hardware_hostname": "test_machine",'
+                '"default_rev": "r_cros_version"}'
+                '}')
         self._verify_result_string(result['data'], expected_result_string)
 
 
diff --git a/tko/query_history.cgi b/tko/query_history.cgi
deleted file mode 100755
index bfc90f1..0000000
--- a/tko/query_history.cgi
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/usr/bin/python2
-
-import sys, os
-import common
-import MySQLdb
-import urllib, db, unique_cookie
-
-uid = unique_cookie.unique_id('tko_history')
-
-
-def body():
-    db_obj = db.db()
-    condition = "uid='%s'" % uid
-    where = (condition,[])
-    try:
-        rows = db_obj.select("time_created,user_comment,url",
-                     "tko_query_history", where)
-    except MySQLdb.ProgrammingError, err:
-        print err
-        rows = ()
-    print '<table border="1">'
-    ##  Display history starting with the most recent queries
-    for row in reversed(rows):
-        (time_created, user_comment, tko_url) = row
-        print '<tr>'
-        print '<td>&nbsp;%s&nbsp;</td>' % time_created
-        print '<td>&nbsp;%s&nbsp;</td>' % user_comment
-        dict_url = {'delete':time_created}
-        link = 'save_query.cgi?' + urllib.urlencode(dict_url)
-        print '<td>&nbsp;<a href="%s">Delete</a>&nbsp;</td>' % link
-        print '<td><a href="%s">%s</a></td>' % (tko_url, tko_url)
-        print '</tr>'
-    print '</table>'
-
-    last_recorded_query = ''
-    if rows:
-        (time_created, user_comment, last_recorded_query) = rows[-1]
-    ## Link "Back to Autotest" on query history page
-    back_link = os.environ.get('HTTP_REFERER')
-    ## possible complications:
-    ## a) HTTP_REFERER = None
-    ## b) HTTP_REFERER is save_query page
-    ## In both cases we still want to get to tko results.
-    ## primary fall back: link to last_recorded_query
-    ## secondary fall back: link to opening tko page
-    if not "compose_query.cgi" in str(back_link):
-        back_link = last_recorded_query
-    if not back_link: ## e.g. history is empty and/or HTTP_REFERER unknown
-        back_link = "compose_query.cgi"
-    print '<br><a href="%s">Autotest Results</a><br>' % back_link
-
-
-def main():
-    print "Content-type: text/html\n"
-    print
-    # create the actual page
-    print '<html><head><title>'
-    print 'History of TKO usage'
-    print '</title></head><body>'
-    body()
-    print '</body></html>'
-
-
-main()
diff --git a/tko/query_lib.py b/tko/query_lib.py
index 0843b89..27c7c22 100644
--- a/tko/query_lib.py
+++ b/tko/query_lib.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 """
 This library provides a bunch of miscellaneous parameter parsing,
 sql generating and list cleanup library functions that are used
diff --git a/tko/reason_qualifier.py b/tko/reason_qualifier.py
index 6347a4f..240fa20 100644
--- a/tko/reason_qualifier.py
+++ b/tko/reason_qualifier.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 import re,string
 
 
@@ -54,8 +55,7 @@
             ## will keep it in reduced/generalized form
             reason_htable[reason_reduced].update(reason_reduced)
 
-    generic_reasons = reason_htable.keys()
+    generic_reasons = list(reason_htable.keys())
     generic_reasons.sort(key = (lambda k: reason_htable[k].num),
                          reverse = True)
-    return map(lambda generic_reason: reason_htable[generic_reason].html(),
-                            generic_reasons)
+    return [reason_htable[generic_reason].html() for generic_reason in generic_reasons]
diff --git a/tko/retrieve_logs.cgi b/tko/retrieve_logs.cgi
deleted file mode 100755
index 1e488de..0000000
--- a/tko/retrieve_logs.cgi
+++ /dev/null
@@ -1,172 +0,0 @@
-#!/usr/bin/python2
-
-import cgi, os, socket, sys, urllib2
-import common
-from multiprocessing import pool
-from autotest_lib.frontend import setup_django_environment
-
-from autotest_lib.client.common_lib import global_config
-from autotest_lib.client.bin import utils
-from autotest_lib.frontend.afe.json_rpc import serviceHandler
-from autotest_lib.server import system_utils
-from autotest_lib.server import utils as server_utils
-
-
-_PAGE = """\
-Status: 302 Found
-Content-Type: text/plain
-Location: %s\r\n\r
-"""
-
-VIEWER_PREFIX = 'stainless.corp.google.com/browse/'
-
-# Define function for retrieving logs
-def _retrieve_logs_dummy(job_path):
-    pass
-
-site_retrieve_logs = utils.import_site_function(__file__,
-    "autotest_lib.tko.site_retrieve_logs", "site_retrieve_logs",
-    _retrieve_logs_dummy)
-
-site_find_repository_host = utils.import_site_function(__file__,
-    "autotest_lib.tko.site_retrieve_logs", "site_find_repository_host",
-    _retrieve_logs_dummy)
-
-form = cgi.FieldStorage(keep_blank_values=True)
-# determine if this is a JSON-RPC request. we support both so that the new TKO
-# client can use its RPC client code, but the old TKO can still use simple GET
-# params.
-_is_json_request = form.has_key('callback')
-
-# if this key exists, we check if requested log exists in local machine,
-# and do not return Google Storage URL when the log doesn't exist.
-_local_only = form.has_key('localonly')
-
-
-def _get_requested_path():
-    if _is_json_request:
-        request_data = form['request'].value
-        request = serviceHandler.ServiceHandler.translateRequest(request_data)
-        parameters = request['params'][0]
-        return parameters['path']
-
-    return form['job'].value
-
-
-def _check_result(args):
-    host = args['host']
-    job_path = args['job_path']
-    shard = args['shard']
-    if shard:
-        http_path = 'http://%s/tko/retrieve_logs.cgi?localonly&job=%s' % (
-                host, job_path)
-    else:
-        http_path = 'http://%s%s' % (host, job_path)
-
-    try:
-        # HACK: This urlopen call isn't forwarding HTTP headers correctly. This
-        # leads to uberproxy sitting between master (orignator of this request)
-        # and shard (target of the request) to redirect to the the login page.
-        # We detect this condition and reject the target shard as a viable
-        # redirect. The implication is that we will not redirect to the shard
-        # even if the user could themselves access the shard with the correct
-        # credentials.
-        u = utils.urlopen(http_path)
-        redirected_url = u.geturl()
-        if 'accounts.google.com' in redirected_url:
-            return None
-
-        # On Vms the shard name is set to the default gateway but the
-        # browser used to navigate frontends (that runs on the host of
-        # the vms) is immune to the same NAT routing the vms have, so we
-        # need to replace the gateway with 'localhost'.
-        if utils.DEFAULT_VM_GATEWAY in host:
-            normalized_host = host.replace(utils.DEFAULT_VM_GATEWAY, 'localhost')
-        else:
-            try:
-                normalized_host = utils.normalize_hostname(host)
-            except socket.herror:
-                # Ignore error: 'socket.herror: [Errno 1] Unknown host'
-                # This can happen when reverse name lookup is not stable.
-                normalized_host = host
-        return 'http', normalized_host, job_path
-    except urllib2.URLError:
-        return None
-
-
-def _get_tpool_args(hosts, job_path, is_shard, host_set):
-    """Get a list of arguments to be passed to multiprocessing.pool.ThreadPool.
-
-    @param hosts: a list of host names.
-    @param job_path: a requested job path.
-    @param is_shard: True if hosts are shards, False otherwise.
-    @param host_set: a Set to filter out duplicated hosts.
-
-    @return: a list of dictionaries to be used as input of _check_result().
-    """
-    args = []
-    for host in hosts:
-        host = host.strip()
-        if host and host != 'localhost' and host not in host_set:
-            host_set.add(host)
-            arg = {'host': host, 'job_path': job_path, 'shard': is_shard}
-            args.append(arg)
-    return args
-
-
-def find_repository_host(job_path):
-    """Find the machine holding the given logs and return a URL to the logs"""
-    site_repo_info = site_find_repository_host(job_path)
-    if site_repo_info is not None:
-        return site_repo_info
-
-    # This cgi script is run only in master (cautotest) and shards.
-    # Drones do not run this script when receiving '/results/...' request.
-    # Only master should check drones and shards for the requested log.
-    # Also restricted users do not have access to drones or shards,
-    # always point them to localhost or google storage.
-    if (not server_utils.is_shard() and
-        not server_utils.is_restricted_user(os.environ.get('REMOTE_USER'))):
-        drones = system_utils.get_drones()
-        shards = system_utils.get_shards()
-
-        host_set = set()
-        tpool_args = _get_tpool_args(drones, job_path, False, host_set)
-        tpool_args += _get_tpool_args(shards, job_path, True, host_set)
-
-        tpool = pool.ThreadPool()
-        for result_path in tpool.imap_unordered(_check_result, tpool_args):
-            if result_path:
-                return result_path
-
-    # If the URL requested is a test result, it is now either on the local
-    # host or in Google Storage.
-    if job_path.startswith('/results/'):
-        # We only care about the path after '/results/'.
-        job_relative_path = job_path[9:]
-        if not _local_only and not os.path.exists(
-                    os.path.join('/usr/local/autotest/results',
-                                 job_relative_path)):
-            gsuri = utils.get_offload_gsuri().split('gs://')[1]
-            return ['https', VIEWER_PREFIX, gsuri + job_relative_path]
-
-
-def get_full_url(info, log_path):
-    if info is not None:
-        protocol, host, path = info
-        prefix = '%s://%s' % (protocol, host)
-    else:
-        prefix = ''
-        path = log_path
-
-    if _is_json_request:
-        return '%s/tko/jsonp_fetcher.cgi?%s' % (prefix,
-                                                os.environ['QUERY_STRING'])
-    else:
-        return prefix + path
-
-
-log_path = _get_requested_path()
-info = find_repository_host(log_path)
-site_retrieve_logs(log_path)
-print _PAGE % get_full_url(info, log_path)
diff --git a/tko/save_query.cgi b/tko/save_query.cgi
deleted file mode 100755
index 0c1bcc1..0000000
--- a/tko/save_query.cgi
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/python2
-
-import os, cgi, cgitb, time, urllib
-import db, unique_cookie
-
-## setting script globals
-form = cgi.FieldStorage()
-if 'label' in form.keys():
-    comment = form['label'].value
-else:
-    comment = ''
-dict_url = {}
-for key in form.keys():
-    dict_url[key] = form[key].value
-
-tm = time.asctime()
-uid = unique_cookie.unique_id('tko_history')
-HTTP_REFERER = os.environ.get('HTTP_REFERER')
-if HTTP_REFERER is None:
-    ## fall back strategy for proxy connection
-    ## substitute relative url
-    HTTP_REFERER = 'compose_query.cgi?' + urllib.urlencode(dict_url)    
-
-
-class QueryHistoryError(Exception):
-    pass
-
-
-def log_query():
-    db_obj = db.db()
-    data_to_insert = {'uid':uid, 'time_created':tm,
-              'user_comment':comment, 'url':HTTP_REFERER }
-    try:
-        db_obj.insert('tko_query_history', data_to_insert)
-    except:
-        raise QueryHistoryError("Could not save query")
-
-
-def delete_query(time_stamp):
-    ## query is marked for delete by time stamp
-    db_obj = db.db()
-    data_to_delete = {'time_created':time_stamp}
-    try:
-        db_obj.delete('tko_query_history', data_to_delete)
-    except Exception:
-        raise QueryHistoryError("Could not delete query")
-    
-
-def body():
-    if not 'delete' in dict_url.keys():
-        log_query()
-        print '<b>%s</b><br><br>' % "Your query has been saved"
-        print 'time: %s<br>' % tm
-        print 'comments: %s<br><br>' % comment
-    else:
-        ## key 'delete' has arg value of time_stamp
-        ## which identifies the query to be deleted
-        time_stamp = dict_url['delete']
-        delete_query(time_stamp)
-        print '<b>%s</b><br><br>' % "Your query has been deleted"
-
-    print '<a href="query_history.cgi">View saved queries</a>&nbsp;&nbsp;'
-    print '<br><br>'
-    if not 'delete' in dict_url.keys():
-        print '<a href="%s">Back to Autotest</a><br>' % HTTP_REFERER
-    else:
-        print '<a href="compose_query.cgi">Autotest Results</a><br>'
-
-
-def main():
-    print "Content-type: text/html\n"
-    print '<html><head><title>'
-    print '</title></head>'
-    print '<body>'
-    body()
-    print '</body>'
-    print '</html>'
-
-
-main()
diff --git a/tko/site_parse b/tko/site_parse
index efef5ce..b15aefe 100755
--- a/tko/site_parse
+++ b/tko/site_parse
@@ -1,4 +1,4 @@
-#!/usr/bin/python2 -u
+#!/usr/bin/python3 -u
 
 import common
 from autotest_lib.tko import site_parse
diff --git a/tko/site_parse.py b/tko/site_parse.py
index 40ff098..23e6ea7 100755
--- a/tko/site_parse.py
+++ b/tko/site_parse.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2 -u
+#!/usr/bin/python3 -u
 #
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -13,13 +13,13 @@
 #
 # The parser uses the test report generator which comes bundled with the Chrome
 # OS source tree in order to maintain consistency. As well as not having to keep
-# track of any secondary failure white lists.
+# track of any secondary failure allow lists.
 #
 # Stack trace generation is done by the minidump_stackwalk utility which is also
-# bundled with the Chrome OS source tree. Requires gsutil and cros_sdk utilties
+# bundled with the ChromeOS source tree. Requires gsutil and cros_sdk utilties
 # be present in the path.
 #
-# The path to the Chrome OS source tree is defined in global_config under the
+# The path to the ChromeOS source tree is defined in global_config under the
 # CROS section as 'source_tree'.
 #
 # Existing parse behavior is kept completely intact. If the site parser is not
@@ -27,6 +27,10 @@
 # called.
 #
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import errno
 import json
 import os
@@ -39,6 +43,7 @@
 from autotest_lib.tko import parse
 from autotest_lib.tko import utils as tko_utils
 from autotest_lib.tko.parsers import version_0
+import six
 
 
 # Name of the report file to produce upon completion.
@@ -76,7 +81,7 @@
 
         Args:
             results_dir: Full path to the results directory to process.
-            cros_src_dir: Full path to Chrome OS source tree. Must have a
+            cros_src_dir: Full path to ChromeOS source tree. Must have a
                 working chroot.
         """
         self._results_dir = results_dir
@@ -97,7 +102,7 @@
         if not os.path.exists(cache_dir):
             try:
                 os.makedirs(cache_dir)
-            except OSError, e:
+            except OSError as e:
                 if e.errno != errno.EEXIST:
                     raise
         return cache_dir
@@ -171,7 +176,7 @@
     # Results directory should be the last argument passed in.
     results_dir = sys.argv[-1]
 
-    # Load the Chrome OS source tree location.
+    # Load the ChromeOS source tree location.
     cros_src_dir = global_config.global_config.get_config_value(
         'CROS', 'source_tree', default='')
 
@@ -183,13 +188,13 @@
             ' to default parser.')
         return
 
-    # Load ResultCollector from the Chrome OS source tree.
+    # Load ResultCollector from the ChromeOS source tree.
     sys.path.append(os.path.join(
         cros_src_dir, 'src/platform/crostestutils/utils_py'))
     from generate_test_report import ResultCollector
 
-    # Collect results using the standard Chrome OS test report generator. Doing
-    # so allows us to use the same crash white list and reporting standards the
+    # Collect results using the standard ChromeOS test report generator. Doing
+    # so allows us to use the same crash allow list and reporting standards the
     # VM based test instances use.
     # TODO(scottz): Reevaluate this code usage. crosbug.com/35282
     results = ResultCollector().RecursivelyCollectResults(results_dir)
@@ -215,7 +220,7 @@
             continue
 
         # Parse failure reason for this test.
-        for t, r in parse_reason(test_dict['testdir']).iteritems():
+        for t, r in six.iteritems(parse_reason(test_dict['testdir'])):
             # Server tests may have subtests which will each have their own
             # reason, so display the test name for the subtest in that case.
             if t != test_name:
diff --git a/tko/site_parse_unittest.py b/tko/site_parse_unittest.py
index 1ad3694..bb9bb21 100755
--- a/tko/site_parse_unittest.py
+++ b/tko/site_parse_unittest.py
@@ -1,5 +1,3 @@
-#!/usr/bin/python2 -u
-#
 # Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -7,9 +5,15 @@
 
 #pylint: disable-msg=C0111
 
-import mox, os, shutil, tempfile, unittest
+import os
+import shutil
+import tempfile
+import unittest
+from unittest.mock import patch
+from six.moves import reload_module as reload
 
 import common
+
 from django.conf import settings
 from autotest_lib.client.common_lib import global_config
 from autotest_lib.frontend import database_settings_helper
@@ -35,7 +39,7 @@
             'CROS', 'source_tree', default=None)
 
         if not self._cros_src_dir:
-            self.fail('No Chrome OS source tree defined in global_config.ini')
+            self.fail('No ChromeOS source tree defined in global_config.ini')
 
         self._stack_trace = StackTrace(
             self._fake_results, self._cros_src_dir)
@@ -115,7 +119,7 @@
         self.assertEqual(version, '1166.0.0')
 
 
-class database_selection_test(mox.MoxTestBase,
+class database_selection_test(unittest.TestCase,
                               frontend_test_utils.FrontendTestMixin):
 
     def setUp(self):
@@ -170,7 +174,7 @@
                           reload, settings)
 
 
-    def testRunOnMasterWithoutGlobalConfigsWorks(self):
+    def testRunOnMainWithoutGlobalConfigsWorks(self):
         global_config.global_config.override_config_value(
                 'SHARD', 'shard_hostname', '')
         from autotest_lib.frontend import settings
@@ -212,15 +216,16 @@
             raise ConnectCalledException
 
         tko_db.db_sql.connect = None
-        self.mox.StubOutWithMock(tko_db.db_sql, 'connect')
-        tko_db.db_sql.connect(
-                global_host, global_db, global_user, global_pw,
-                global_port).WithSideEffects(fake_connect)
+        patcher = patch.object(tko_db.db_sql, 'connect')
+        mock = patcher.start()
+        self.addCleanup(patcher.stop)
 
-        self.mox.ReplayAll()
-
+        mock.side_effect = fake_connect
         self.assertRaises(ConnectCalledException, tko_db.db_sql)
 
+        mock.assert_called_with(global_host, global_db, global_user, global_pw,
+                                global_port)
+
 
 if __name__ == "__main__":
     unittest.main()
diff --git a/tko/status_lib_unittest.py b/tko/status_lib_unittest.py
index d36b3ef..d80387e 100755
--- a/tko/status_lib_unittest.py
+++ b/tko/status_lib_unittest.py
@@ -1,9 +1,13 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
 import unittest
 import common
 from autotest_lib.tko import status_lib, parser_lib
 from autotest_lib.client.common_lib import log
+from six.moves import range
 
 
 class clean_raw_line_test(unittest.TestCase):
@@ -38,7 +42,7 @@
 
     def test_is_fifo(self):
         buf = status_lib.line_buffer()
-        lines = ["line #%d" for x in xrange(10)]
+        lines = ["line #%d" for x in range(10)]
         for line in lines:
             buf.put(line)
         results = []
@@ -49,8 +53,8 @@
 
     def test_put_multiple_same_as_multiple_puts(self):
         buf_put, buf_multi = [status_lib.line_buffer()
-                              for x in xrange(2)]
-        lines = ["line #%d" % x for x in xrange(10)]
+                              for x in range(2)]
+        lines = ["line #%d" % x for x in range(10)]
         for line in lines:
             buf_put.put(line)
         buf_multi.put_multiple(lines)
@@ -133,9 +137,9 @@
 
 
     def test_worse_overrides_better(self):
-        for i in xrange(len(self.statuses)):
+        for i in range(len(self.statuses)):
             worse_status = self.statuses[i]
-            for j in xrange(i + 1, len(self.statuses)):
+            for j in range(i + 1, len(self.statuses)):
                 stack = status_lib.status_stack()
                 better_status = self.statuses[j]
                 stack.update(better_status)
@@ -145,9 +149,9 @@
 
 
     def test_better_never_overrides_better(self):
-        for i in xrange(len(self.statuses)):
+        for i in range(len(self.statuses)):
             better_status = self.statuses[i]
-            for j in xrange(i):
+            for j in range(i):
                 stack = status_lib.status_stack()
                 worse_status = self.statuses[j]
                 stack.update(worse_status)
diff --git a/tko/test.cgi b/tko/test.cgi
deleted file mode 100755
index 4fbee9f..0000000
--- a/tko/test.cgi
+++ /dev/null
@@ -1,67 +0,0 @@
-#!/usr/bin/python2
-"""
-Further display the tests in a matrix of the form tests X machines
-to help understand the results selected from the previous form.
-"""
-
-print "Content-type: text/html\n"
-import cgi, cgitb, os, sys, re
-sys.stdout.flush()
-cgitb.enable()
-
-import common
-from autotest_lib.tko import db, display, frontend
-
-db = db.db()
-
-def main():
-    display.print_main_header()
-    
-    form = cgi.FieldStorage()
-
-    if form.has_key('sql'):
-        sql = form['sql'].value
-
-    if form.has_key('values'):
-        values = [val for val in form['values'].value.split(',')]
-
-    if not sql:
-        return
-    if not values:
-        return
-
-    tests = frontend.test.select_sql(db, sql, values)
-
-    # get the list of tests/machines to populate the row and column header.
-    testname = [test.testname for test in tests]
-    machine_idx = [test.machine_idx for test in tests]
-
-    # We dont want repetitions in the table row/column headers,
-    # so eliminate the dups.
-    uniq_test = list(set(testname))
-    uniq_machine_idx = list(set(machine_idx))
-
-    header_row = [ display.box('', header = True) ]
-    for test_name in uniq_test:
-        header_row.append(display.box(test_name, header=True))
-    matrix = [header_row]
-    for machine in uniq_machine_idx:
-        mach_name = db.select_sql('hostname', 'machines',
-                 ' where machine_idx=%s', [str(machine)])
-        row = [display.box(mach_name[0][0])]
-        for test_name in uniq_test:
-            testlist = [test for test in tests
-                     if test.machine_idx == machine
-                     and test.testname == test_name]
-            # url link to the first test.
-            # TODO: provide another level to show the different
-            #    test results.
-            link = None
-            if testlist and testlist[0]:
-                link = testlist[0].url
-            box = display.status_count_box(db, testlist, link=link)
-            row.append(box)
-        matrix.append(row)
-    display.print_table(matrix)
-
-main()
diff --git a/tko/unique_cookie.py b/tko/unique_cookie.py
index 112acdc..c7da626 100644
--- a/tko/unique_cookie.py
+++ b/tko/unique_cookie.py
@@ -1,3 +1,6 @@
+# Lint as: python2, python3
+from __future__ import division
+from __future__ import print_function
 import os, random
 
 
@@ -25,6 +28,6 @@
         uid = str(random.random())[2:16] # random string of 14 digits
         set_cookie_statement = 'Set-Cookie:%s=%s;' % (cookie_key, uid)
         set_cookie_statement += 'expires=Thu, 26-Dec-2013 22:03:25 GMT;'
-        print set_cookie_statement
+        print(set_cookie_statement)
 
     return uid
diff --git a/tko/utils_unittest.py b/tko/utils_unittest.py
index 790d7fb..da82014 100755
--- a/tko/utils_unittest.py
+++ b/tko/utils_unittest.py
@@ -1,10 +1,15 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
 
 import os, unittest, time, datetime, itertools
 
 import common
 from autotest_lib.client.common_lib.test_utils import mock
 from autotest_lib.tko import utils
+from six.moves import zip
 
 
 class get_timestamp_test(unittest.TestCase):
@@ -35,7 +40,7 @@
     def test_date_can_be_string_or_integer(self):
         int_times = [1, 12, 123, 1234, 12345, 123456]
         str_times = [str(t) for t in int_times]
-        for int_t, str_t in itertools.izip(int_times, str_times):
+        for int_t, str_t in zip(int_times, str_times):
             date_int = utils.get_timestamp({"key": int_t}, "key")
             date_str = utils.get_timestamp({"key": str_t}, "key")
             self.assertEquals(date_int, date_str)
diff --git a/unblocked_terms.txt b/unblocked_terms.txt
new file mode 100644
index 0000000..2356098
--- /dev/null
+++ b/unblocked_terms.txt
@@ -0,0 +1,35 @@
+# This is the global list of words which are still too commonly used in the
+# codebase to be blocked outright. This list will continue to shrink as the
+# codebase moves away from the use of these terms.
+#
+# Copy this file to your project and comment more words to enable the repo hook
+# to block the words.
+# KEEP THIS COMMENT IN YOUR COPY.
+#
+# Don't delete this file if you want to keep keyword_check enabled even if it's
+# empty.
+#
+# See repohooks/README.md for more details.
+# black.?hat
+# black.?list
+# build.?cop
+dummy
+# first.?class.?citizen
+# gr[ae]y.?hat
+gr[ae]y.?list
+\bhe\b
+\bshe\b
+\bhim\b
+\bher\b
+\bhis\b
+\bhers\b
+man.?in.?the.?middle
+# master
+\bnative
+\bred.?line
+sanity
+# slave
+# white.?glove
+# white.?hat
+white.?label
+# white.?list
\ No newline at end of file
diff --git a/utils/OWNERS b/utils/OWNERS
new file mode 100644
index 0000000..ab758b3
--- /dev/null
+++ b/utils/OWNERS
@@ -0,0 +1 @@
+include /INFRA_OWNERS
diff --git a/utils/Python2MigrationFinder.py b/utils/Python2MigrationFinder.py
new file mode 100644
index 0000000..0318ad6
--- /dev/null
+++ b/utils/Python2MigrationFinder.py
@@ -0,0 +1,95 @@
+#!/usr/bin/python3

+

+import os

+

+

+def has_match(line):

+    """check if file current line matches py3_strs.

+

+    Args:

+            line: Current line to check.

+

+    return:

+            Boolean True or False.

+    """

+    py3_strs = [

+            "#!/usr/bin/python3", "#!/usr/bin/env python3",

+            "# lint as: python2, python3", "# lint as: python3"

+    ]

+    for match in py3_strs:

+        if match in line:

+            return True

+    return False

+

+

+def need_to_skip(fullname):

+    """check if this file or folder that needs to be skipped from skip_strs.

+

+    Args:

+            fullname: Current file or folder name.

+

+    return:

+            Boolean True or False.

+    """

+    skip_strs = ["__init__.py", "autotest_lib", "common.py", "site_tests"]

+    for match in skip_strs:

+        if match in fullname:

+            return True

+    return False

+

+

+def list_files_to_txt(upper_dir, file, suffix, nums_line_to_check):

+    """List results to .txt file by check all target files.

+    under the folder and subfolder.

+

+    Args:

+            upper_dir: The folder path need to check. The default.

+                    is the ipper path of this script.

+            file: output .txt file. The default is Python2MigrationTarget.txt.

+            suffix: File extensions that need to be checked.

+            nums_line_to_check: The number of rows to check.

+

+    return:

+            All file names and paths that meet the standard.

+    """

+    exts = suffix.split(" ")

+    files = os.listdir(upper_dir)

+    for filename in files:

+        fullname = os.path.join(upper_dir, filename)

+        if need_to_skip(fullname):

+            continue

+        if os.path.isdir(fullname):

+            list_files_to_txt(fullname, file, suffix, nums_line_to_check)

+        else:

+            for ext in exts:

+                if filename.endswith(ext):

+                    filename = fullname

+                    with open(filename, "r") as f:

+                        for i in range(nums_line_to_check):

+                            line = str(f.readline().strip()).lower()

+                            if has_match(line):

+                                tail = filename.split("third_party")[-1]

+                                file.write("%s, 3\n" % tail)

+                            else:

+                                tail = filename.split("third_party")[-1]

+                                file.write("%s, 2\n" % tail)

+                            break

+

+

+def main():

+    """This is main function"""

+    upper_dir = os.path.abspath(

+            os.path.join(os.path.dirname("__file__"), os.path.pardir))

+    outfile = "Python2MigrationTarget.txt"

+    suffix = ".py"

+    nums_line_to_check = 20

+    file = open(outfile, "w")

+    if not file:

+        print("cannot open the file %s " % outfile)

+    list_files_to_txt(upper_dir, file, suffix, nums_line_to_check)

+    file.close()

+

+

+if __name__ == "__main__":

+

+    main()

diff --git a/utils/build_externals.py b/utils/build_externals.py
index c373139..79c2ce2 100755
--- a/utils/build_externals.py
+++ b/utils/build_externals.py
@@ -58,7 +58,7 @@
     options = parse_arguments(sys.argv[1:])
     logging_manager.configure_logging(BuildExternalsLoggingConfig(),
                                       verbose=True)
-    os.umask(022)
+    os.umask(0o22)
 
     top_of_tree = external_packages.find_top_of_autotest_tree()
     package_dir = os.path.join(top_of_tree, PACKAGE_DIR)
@@ -78,7 +78,7 @@
     fetched_packages, fetch_errors = fetch_necessary_packages(
         package_dir, install_dir, set(options.names_to_check))
     install_errors = build_and_install_packages(fetched_packages, install_dir,
-                                                options.use_chromite_master)
+                                                options.use_chromite_main)
 
     # Byte compile the code after it has been installed in its final
     # location as .pyc files contain the path passed to compile_dir().
@@ -113,8 +113,8 @@
     parser = argparse.ArgumentParser(
             description='Command to build third party dependencies required '
                         'for autotest.')
-    parser.add_argument('--use_chromite_master', action='store_true',
-                        help='Update chromite to master branch, rather than '
+    parser.add_argument('--use_chromite_main', action='store_true',
+                        help='Update chromite to main branch, rather than '
                              'prod.')
     parser.add_argument('--names_to_check', nargs='*', type=str, default=set(),
                         help='Package names to check whether they are needed '
@@ -160,13 +160,13 @@
 
 
 def build_and_install_packages(packages, install_dir,
-                               use_chromite_master=True):
+                               use_chromite_main=True):
     """
     Builds and installs all packages into install_dir.
 
     @param packages - A list of already fetched ExternalPackage instances.
     @param install_dir - Directory the packages should be installed into.
-    @param use_chromite_master: True if updating chromite to master branch. Due
+    @param use_chromite_main: True if updating chromite to main branch. Due
                                 to the non-usage of origin/prod tag, the default
                                 value for this argument has been set to True.
                                 This argument has not been removed for backward
@@ -177,14 +177,14 @@
     errors = []
     for package in packages:
         if package.name.lower() == 'chromiterepo':
-            if not use_chromite_master:
+            if not use_chromite_main:
                 logging.warning(
-                    'Even though use_chromite_master has been set to %s, it '
-                    'will be checked out to master as the origin/prod tag '
-                    'carries little value now.', use_chromite_master)
-            logging.info('Checking out autotest-chromite to master branch.')
+                    'Even though use_chromite_main has been set to %s, it '
+                    'will be checked out to main as the origin/prod tag '
+                    'carries little value now.', use_chromite_main)
+            logging.info('Checking out autotest-chromite to main branch.')
             result = package.build_and_install(
-                install_dir, master_branch=True)
+                install_dir, main_branch=True)
         else:
             result = package.build_and_install(install_dir)
         if isinstance(result, bool):
diff --git a/utils/compile_gwt_clients.py b/utils/compile_gwt_clients.py
index be12f37..1815073 100755
--- a/utils/compile_gwt_clients.py
+++ b/utils/compile_gwt_clients.py
@@ -1,12 +1,23 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
+
+from __future__ import print_function
+from __future__ import division
+from __future__ import absolute_import
+
 import common
 import sys, os, shutil, optparse, logging
+import six
+
 from autotest_lib.client.common_lib import error, utils
 from autotest_lib.client.common_lib import logging_config, logging_manager
+
+
 """
 Compile All Autotest GWT Clients Living in autotest/frontend/client/src
 """
 
+
+
 _AUTOTEST_DIR = common.autotest_dir
 _DEFAULT_GWT_DIRS = ['/usr/local/lib/gwt', '/opt/google-web-toolkit']
 _DEFAULT_APP_DIR = os.path.join(_AUTOTEST_DIR, 'frontend/client')
@@ -76,7 +87,7 @@
         try:
             os.rename(tmp_client_dir, install_dir)
             return True
-        except Exception, err:
+        except Exception as err:
             # If we can't rename the client raise an exception
             # and put the old client back
             shutil.rmtree(install_dir)
@@ -122,7 +133,7 @@
        @returns list of failed client installations
     """
     failed_clients = []
-    for project,clients in enumerate_projects().iteritems():
+    for project,clients in six.iteritems(enumerate_projects()):
         for client in clients:
             project_client = '%s.%s' % (project, client)
             if not compile_and_install_client(project_client, extra_args):
@@ -133,7 +144,7 @@
 
 def print_projects():
     logging.info('Projects that can be compiled:')
-    for project,clients in enumerate_projects().iteritems():
+    for project,clients in six.iteritems(enumerate_projects()):
         for client in clients:
             logging.info('%s.%s', project, client)
 
diff --git a/utils/external_packages.py b/utils/external_packages.py
index 92371eb..3ca6df0 100644
--- a/utils/external_packages.py
+++ b/utils/external_packages.py
@@ -1,10 +1,17 @@
+# Lint as: python2, python3
 # Please keep this code python 2.4 compatible and stand alone.
 
-import logging, os, shutil, sys, tempfile, time, urllib2
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import logging, os, shutil, sys, tempfile, time
+from six.moves import urllib
 import subprocess, re
 from distutils.version import LooseVersion
 
 from autotest_lib.client.common_lib import autotemp, revision_control, utils
+import six
 
 _READ_SIZE = 64*1024
 _MAX_PACKAGE_SIZE = 100*1024*1024
@@ -161,7 +168,7 @@
             return True
         try:
             module = __import__(self.module_name)
-        except ImportError, e:
+        except ImportError as e:
             logging.info("%s isn't present. Will install.", self.module_name)
             return True
         # Check if we're getting a module installed somewhere else,
@@ -235,7 +242,7 @@
         if not self.os_requirements:
             return
         failed = False
-        for file_names, package_name in self.os_requirements.iteritems():
+        for file_names, package_name in six.iteritems(self.os_requirements):
             if not any(os.path.exists(file_name) for file_name in file_names):
                 failed = True
                 logging.error('Can\'t find %s, %s probably needs it.',
@@ -553,8 +560,8 @@
         for url in self.urls:
             logging.info('Fetching %s', url)
             try:
-                url_file = urllib2.urlopen(url)
-            except (urllib2.URLError, EnvironmentError):
+                url_file = urllib.request.urlopen(url)
+            except (urllib.error.URLError, EnvironmentError):
                 logging.warning('Could not fetch %s package from %s.',
                                 self.name, url)
                 continue
@@ -563,7 +570,7 @@
                                                   _MAX_PACKAGE_SIZE))
             if data_length <= 0 or data_length > _MAX_PACKAGE_SIZE:
                 raise FetchError('%s from %s fails Content-Length %d '
-                                 'sanity check.' % (self.name, url,
+                                 'validity check.' % (self.name, url,
                                                     data_length))
             checksum = utils.hash('sha1')
             total_read = 0
@@ -622,10 +629,10 @@
         if not egg_path:
             return False
 
-        print '!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n'
-        print 'About to run sudo to install setuptools', self.version
-        print 'on your system for use by', sys.executable, '\n'
-        print '!! ^C within', self.SUDO_SLEEP_DELAY, 'seconds to abort.\n'
+        print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n')
+        print('About to run sudo to install setuptools', self.version)
+        print('on your system for use by', sys.executable, '\n')
+        print('!! ^C within', self.SUDO_SLEEP_DELAY, 'seconds to abort.\n')
         time.sleep(self.SUDO_SLEEP_DELAY)
 
         # Copy the egg to the local filesystem /var/tmp so that root can
@@ -638,7 +645,7 @@
             p = subprocess.Popen(['sudo', '/bin/sh', temp_egg],
                                  stdout=subprocess.PIPE)
             regex = re.compile('Copying (.*?) to (.*?)\n')
-            match = regex.search(p.communicate()[0])
+            match = regex.search(p.communicate()[0].decode('utf-8'))
             status = p.wait()
 
             if match:
@@ -711,24 +718,6 @@
             ExternalPackage._build_and_install_current_dir_setupegg_py)
 
 
-
-class JsonRPCLib(ExternalPackage):
-    """jsonrpclib package"""
-    version = '0.1.3'
-    module_name = 'jsonrpclib'
-    local_filename = '%s-%s.tar.gz' % (module_name, version)
-    urls = (_CHROMEOS_MIRROR + local_filename,)
-    hex_sum = '431714ed19ab677f641ce5d678a6a95016f5c452'
-
-    def _get_installed_version_from_module(self, module):
-        # jsonrpclib doesn't contain a proper version
-        return self.version
-
-    _build_and_install = ExternalPackage._build_and_install_from_package
-    _build_and_install_current_dir = (
-                        ExternalPackage._build_and_install_current_dir_noegg)
-
-
 class GwtPackage(ExternalPackage):
     """Fetch and extract a local copy of GWT used to build the frontend."""
 
@@ -864,34 +853,13 @@
                         ExternalPackage._build_and_install_current_dir_setup_py)
 
 
-class ElasticSearchPackage(ExternalPackage):
-    """elasticsearch-py package."""
-    version = '1.6.0'
-    url_filename = 'elasticsearch-%s.tar.gz' % version
-    local_filename = url_filename
-    urls = ('https://pypi.python.org/packages/source/e/elasticsearch/%s' %
-            (url_filename),)
-    hex_sum = '3e676c96f47935b1f52df82df3969564bd356b1c'
-    _build_and_install = ExternalPackage._build_and_install_from_package
-    _build_and_install_current_dir = (
-            ExternalPackage._build_and_install_current_dir_setup_py)
-
-    def _get_installed_version_from_module(self, module):
-        # Elastic's version format is like tuple (1, 6, 0), which needs to be
-        # transferred to 1.6.0.
-        try:
-            return '.'.join(str(i) for i in module.__version__)
-        except:
-            return self.version
-
-
 class Urllib3Package(ExternalPackage):
     """elasticsearch-py package."""
-    version = '1.9'
+    version = '1.23'
     url_filename = 'urllib3-%s.tar.gz' % version
     local_filename = url_filename
     urls = (_CHROMEOS_MIRROR + local_filename,)
-    hex_sum = '9522197efb2a2b49ce804de3a515f06d97b6602f'
+    hex_sum = '0c54209c397958a7cebe13cb453ec8ef5833998d'
     _build_and_install = ExternalPackage._build_and_install_from_package
     _build_and_install_current_dir = (
             ExternalPackage._build_and_install_current_dir_setup_py)
@@ -1000,6 +968,18 @@
             ExternalPackage._build_and_install_current_dir_setup_py)
 
 
+class SetuptoolsScmPackage(ExternalPackage):
+    """setuptools_scm package."""
+    version = '5.0.2'
+    url_filename = 'setuptools_scm-%s.tar.gz' % version
+    local_filename = url_filename
+    urls = (_CHROMEOS_MIRROR + local_filename, )
+    hex_sum = '28ec9ce4a5270f82f07e919398c74221da67a8bb'
+    _build_and_install = ExternalPackage._build_and_install_from_package
+    _build_and_install_current_dir = (
+            ExternalPackage._build_and_install_current_dir_setup_py)
+
+
 class LruCachePackage(ExternalPackage):
     """backports.functools_lru_cache package (dependency for astroid)."""
     version = '1.4'
@@ -1025,6 +1005,18 @@
             ExternalPackage._build_and_install_current_dir_setup_py)
 
 
+class PytestRunnerPackage(ExternalPackage):
+    """pytest-runner package."""
+    version = '5.2'
+    url_filename = 'pytest-runner-%s.tar.gz' % version
+    local_filename = url_filename
+    urls = (_CHROMEOS_MIRROR + local_filename,)
+    hex_sum = '3427663b575c5d885ea3869a1be09aca36517f74'
+    _build_and_install = ExternalPackage._build_and_install_from_package
+    _build_and_install_current_dir = (
+            ExternalPackage._build_and_install_current_dir_setup_py)
+
+
 class PyLintPackage(ExternalPackage):
     """pylint package."""
     version = '1.7.2'
@@ -1073,33 +1065,6 @@
             ExternalPackage._build_and_install_current_dir_setup_py)
 
 
-class Pytz(ExternalPackage):
-    """Pytz package."""
-    version = '2016.10'
-    url_filename = 'pytz-%s.tar.gz' % version
-    local_filename = url_filename
-    #md5=cc9f16ba436efabdcef3c4d32ae4919c
-    urls = ('https://pypi.python.org/packages/42/00/'
-            '5c89fc6c9b305df84def61863528e899e9dccb196f8438f6cbe960758fc5/%s' %
-            (url_filename),)
-    hex_sum = '8d63f1e9b1ee862841b990a7d8ad1d4508d9f0be'
-    _build_and_install = ExternalPackage._build_and_install_from_package
-    _build_and_install_current_dir = (
-            ExternalPackage._build_and_install_current_dir_setup_py)
-
-
-class Tzlocal(ExternalPackage):
-    """Tzlocal package."""
-    version = '1.3'
-    url_filename = 'tzlocal-%s.tar.gz' % version
-    local_filename = url_filename
-    urls = (_CHROMEOS_MIRROR + local_filename,)
-    hex_sum = '730e9d7112335865a1dcfabec69c8c3086be424f'
-    _build_and_install = ExternalPackage._build_and_install_from_package
-    _build_and_install_current_dir = (
-            ExternalPackage._build_and_install_current_dir_setup_py)
-
-
 class PyYAMLPackage(ExternalPackage):
     """pyyaml package."""
     version = '3.12'
@@ -1122,17 +1087,6 @@
             ExternalPackage._build_and_install_current_dir_setup_py)
 
 
-class CachetoolsPackage(ExternalPackage):
-    """Cachetools package."""
-    version = '3.1.1'
-    local_filename = 'cachetools-%s.tar.gz' % version
-    urls = (_CHROMEOS_MIRROR + local_filename,)
-    hex_sum = 'd030bfdfa91b0b1188993f5e8d7da077308c1eaf'
-    _build_and_install = ExternalPackage._build_and_install_from_package
-    _build_and_install_current_dir = (
-            ExternalPackage._build_and_install_current_dir_setup_py)
-
-
 class GrpcioPackage(ExternalPackage):
     """GrpcioPackage package."""
     version = '1.26.0'
@@ -1208,7 +1162,6 @@
     # All the chromiumos projects used on the lab servers should have a 'prod'
     # branch used to track the software version deployed in prod.
     PROD_BRANCH = 'prod'
-    MASTER_BRANCH = 'master'
 
     def is_needed(self, unused_install_dir):
         """Tell build_externals that we need to fetch."""
@@ -1237,6 +1190,7 @@
     temp_hdctools_dir = tempfile.mktemp(suffix='hdctools')
     _GIT_URL = ('https://chromium.googlesource.com/'
                 'chromiumos/third_party/hdctools')
+    MAIN_BRANCH = 'main'
 
     def fetch(self, unused_dest_dir):
         """
@@ -1281,20 +1235,21 @@
     """Clones or updates the chromite repo."""
 
     _GIT_URL = ('https://chromium.googlesource.com/chromiumos/chromite')
+    MAIN_BRANCH = 'main'
 
-    def build_and_install(self, install_dir, master_branch=False):
+    def build_and_install(self, install_dir, main_branch=False):
         """
         Clone if the repo isn't initialized, pull clean bits if it is.
 
-        Unlike it's hdctools counterpart the chromite repo clones master
+        Unlike it's hdctools counterpart the chromite repo clones main
         directly into site-packages. It doesn't use an intermediate temp
         directory because it doesn't need installation.
 
         @param install_dir: destination directory for chromite installation.
-        @param master_branch: if True, install master branch. Otherwise,
+        @param main_branch: if True, install main branch. Otherwise,
                               install prod branch.
         """
-        init_branch = (self.MASTER_BRANCH if master_branch
+        init_branch = (self.MAIN_BRANCH if main_branch
                        else self.PROD_BRANCH)
         local_chromite_dir = os.path.join(install_dir, 'chromite')
         git_repo = revision_control.GitRepo(
@@ -1309,38 +1264,14 @@
         return False
 
 
-class SuiteSchedulerRepo(_ExternalGitRepo):
-    """Clones or updates the suite_scheduler repo."""
-
-    _GIT_URL = ('https://chromium.googlesource.com/chromiumos/'
-                'infra/suite_scheduler')
-
-    def build_and_install(self, install_dir):
-        """
-        Clone if the repo isn't initialized, pull clean bits if it is.
-
-        @param install_dir: destination directory for suite_scheduler
-                            installation.
-        @param master_branch: if True, install master branch. Otherwise,
-                              install prod branch.
-        """
-        local_dir = os.path.join(install_dir, 'suite_scheduler')
-        git_repo = revision_control.GitRepo(
-                local_dir,
-                self._GIT_URL,
-                abs_work_tree=local_dir)
-        git_repo.reinit_repo_at(self.MASTER_BRANCH)
-
-        if git_repo.get_latest_commit_hash():
-            return True
-        return False
-
-
 class BtsocketRepo(_ExternalGitRepo):
     """Clones or updates the btsocket repo."""
 
     _GIT_URL = ('https://chromium.googlesource.com/'
                 'chromiumos/platform/btsocket')
+    # TODO b:169251326 terms below are set outside of this codebase and should
+    # be updated when possible ("master" -> "main").
+    MAIN_BRANCH = 'master'
 
     def fetch(self, unused_dest_dir):
         """
@@ -1398,6 +1329,9 @@
 
     _GIT_URL = ('https://chromium.googlesource.com/chromiumos/infra/'
                 'skylab_inventory')
+    # TODO b:169251326 terms below are set outside of this codebase and should
+    # be updated when possible ("master" -> "main").
+    MAIN_BRANCH = 'master'
 
     # TODO(nxia): create a prod branch for skylab_inventory.
     def build_and_install(self, install_dir):
@@ -1410,7 +1344,7 @@
                 local_skylab_dir,
                 self._GIT_URL,
                 abs_work_tree=local_skylab_dir)
-        git_repo.reinit_repo_at(self.MASTER_BRANCH)
+        git_repo.reinit_repo_at(self.MAIN_BRANCH)
 
         # The top-level __init__.py for skylab is at venv/skylab_inventory.
         source = os.path.join(local_skylab_dir, 'venv', 'skylab_inventory')
diff --git a/utils/frozen_chromite/README.md b/utils/frozen_chromite/README.md
new file mode 100644
index 0000000..17cb7df
--- /dev/null
+++ b/utils/frozen_chromite/README.md
@@ -0,0 +1,5 @@
+This is a fork of chromite code used by autotest.
+Since autotest is stuck on Python 2 and is holding back chromite,
+we've pulled out the code that autotest cares about here.
+If/when autotest supports Python 3.6+ only, it can migrate back
+to using chromite directly.
diff --git a/utils/frozen_chromite/__init__.py b/utils/frozen_chromite/__init__.py
new file mode 100644
index 0000000..f356b9b
--- /dev/null
+++ b/utils/frozen_chromite/__init__.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import os
+import sys
+
+# Add the third_party/ dir to our search path so that we can find the
+# modules in there automatically.  This isn't normal, so don't replicate
+# this pattern elsewhere.
+_chromite_dir = os.path.normpath(os.path.dirname(os.path.realpath(__file__)))
+_third_party_dir = os.path.join(_chromite_dir, 'third_party')
+sys.path.insert(0, _third_party_dir)
+sys.path.insert(
+    # Allow Python 2 or 3 specific modules under a separate subpath.
+    1, os.path.join(_third_party_dir, 'python%s' % sys.version_info.major))
diff --git a/client/common_lib/cros/fake_device_server/__init__.py b/utils/frozen_chromite/cli/__init__.py
similarity index 100%
rename from client/common_lib/cros/fake_device_server/__init__.py
rename to utils/frozen_chromite/cli/__init__.py
diff --git a/utils/frozen_chromite/cli/command.py b/utils/frozen_chromite/cli/command.py
new file mode 100644
index 0000000..4f84af4
--- /dev/null
+++ b/utils/frozen_chromite/cli/command.py
@@ -0,0 +1,171 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module that contains meta-logic related to CLI commands.
+
+This module contains two important definitions used by all commands:
+  CliCommand: The parent class of all CLI commands.
+  CommandDecorator: Decorator that must be used to ensure that the command shows
+    up in |_commands| and is discoverable.
+
+Commands can be either imported directly or looked up using this module's
+ListCommands() function.
+"""
+
+from __future__ import print_function
+
+import importlib
+import os
+
+from autotest_lib.utils.frozen_chromite.lib import constants
+from autotest_lib.utils.frozen_chromite.lib import commandline
+from autotest_lib.utils.frozen_chromite.lib import cros_build_lib
+from autotest_lib.utils.frozen_chromite.lib import cros_logging as logging
+
+
+# Paths for finding and importing subcommand modules.
+_SUBCOMMAND_MODULE_DIRECTORY = os.path.join(os.path.dirname(__file__), 'cros')
+_SUBCOMMAND_MODULE_PREFIX = 'cros_'
+
+
+_commands = dict()
+
+
+def UseProgressBar():
+  """Determine whether the progress bar is to be used or not.
+
+  We only want the progress bar to display for the brillo commands which operate
+  at logging level NOTICE. If the user wants to see the noisy output, then they
+  can execute the command at logging level INFO or DEBUG.
+  """
+  return logging.getLogger().getEffectiveLevel() == logging.NOTICE
+
+
+def ImportCommand(name):
+  """Directly import the specified subcommand.
+
+  This method imports the module which must contain the single subcommand.  When
+  the module is loaded, the declared command (those that use CommandDecorator)
+  will automatically get added to |_commands|.
+
+  Args:
+    name: The subcommand to load.
+
+  Returns:
+    A reference to the subcommand class.
+  """
+  module_path = os.path.join(_SUBCOMMAND_MODULE_DIRECTORY,
+                             'cros_%s' % (name.replace('-', '_'),))
+  import_path = os.path.relpath(os.path.realpath(module_path),
+                                os.path.dirname(constants.CHROMITE_DIR))
+  module_parts = import_path.split(os.path.sep)
+  importlib.import_module('.'.join(module_parts))
+  return _commands[name]
+
+
+def ListCommands():
+  """Return the set of available subcommands.
+
+  We assume that there is a direct one-to-one relationship between the module
+  name on disk and the command that module implements.  We assume this as a
+  performance requirement (to avoid importing every subcommand every time even
+  though we'd only ever run a single one), and to avoid 3rd party module usage
+  in one subcommand breaking all other subcommands (not a great solution).
+  """
+  # Filenames use underscores due to python naming limitations, but subcommands
+  # use dashes as they're easier for humans to type.
+  # Strip off the leading "cros_" and the trailing ".py".
+  return set(x[5:-3].replace('_', '-')
+             for x in os.listdir(_SUBCOMMAND_MODULE_DIRECTORY)
+             if (x.startswith(_SUBCOMMAND_MODULE_PREFIX) and x.endswith('.py')
+                 and not x.endswith('_unittest.py')))
+
+
+class InvalidCommandError(Exception):
+  """Error that occurs when command class fails sanity checks."""
+
+
+def CommandDecorator(command_name):
+  """Decorator that sanity checks and adds class to list of usable commands."""
+
+  def InnerCommandDecorator(original_class):
+    """Inner Decorator that actually wraps the class."""
+    if not hasattr(original_class, '__doc__'):
+      raise InvalidCommandError('All handlers must have docstrings: %s' %
+                                original_class)
+
+    if not issubclass(original_class, CliCommand):
+      raise InvalidCommandError('All Commands must derive from CliCommand: %s' %
+                                original_class)
+
+    _commands[command_name] = original_class
+    original_class.command_name = command_name
+
+    return original_class
+
+  return InnerCommandDecorator
+
+
+class CliCommand(object):
+  """All CLI commands must derive from this class.
+
+  This class provides the abstract interface for all CLI commands. When
+  designing a new command, you must sub-class from this class and use the
+  CommandDecorator decorator. You must specify a class docstring as that will be
+  used as the usage for the sub-command.
+
+  In addition your command should implement AddParser which is passed in a
+  parser that you can add your own custom arguments. See argparse for more
+  information.
+  """
+  # Indicates whether command uses cache related commandline options.
+  use_caching_options = False
+
+  def __init__(self, options):
+    self.options = options
+
+  @classmethod
+  def AddParser(cls, parser):
+    """Add arguments for this command to the parser."""
+    parser.set_defaults(command_class=cls)
+
+  @classmethod
+  def AddDeviceArgument(cls, parser, schemes=commandline.DEVICE_SCHEME_SSH,
+                        positional=False):
+    """Add a device argument to the parser.
+
+    This standardizes the help message across all subcommands.
+
+    Args:
+      parser: The parser to add the device argument to.
+      schemes: List of device schemes or single scheme to allow.
+      positional: Whether it should be a positional or named argument.
+    """
+    help_strings = []
+    schemes = list(cros_build_lib.iflatten_instance(schemes))
+    if commandline.DEVICE_SCHEME_SSH in schemes:
+      help_strings.append('Target a device with [user@]hostname[:port]. '
+                          'IPv4/IPv6 addresses are allowed, but IPv6 must '
+                          'use brackets (e.g. [::1]).')
+    if commandline.DEVICE_SCHEME_USB in schemes:
+      help_strings.append('Target removable media with usb://[path].')
+    if commandline.DEVICE_SCHEME_SERVO in schemes:
+      help_strings.append('Target a servo by port or serial number with '
+                          'servo:port[:port] or servo:serial:serial-number. '
+                          'e.g. servo:port:1234 or servo:serial:C1230024192.')
+    if commandline.DEVICE_SCHEME_FILE in schemes:
+      help_strings.append('Target a local file with file://path.')
+    if positional:
+      parser.add_argument('device',
+                          type=commandline.DeviceParser(schemes),
+                          help=' '.join(help_strings))
+    else:
+      parser.add_argument('-d', '--device',
+                          type=commandline.DeviceParser(schemes),
+                          help=' '.join(help_strings))
+
+  def Run(self):
+    """The command to run."""
+    raise NotImplementedError()
diff --git a/utils/frozen_chromite/lib/__init__.py b/utils/frozen_chromite/lib/__init__.py
new file mode 100644
index 0000000..4143aec
--- /dev/null
+++ b/utils/frozen_chromite/lib/__init__.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+# Copyright 2017 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import sys
+
+
+# This is to work around a Python bug:  The first call to
+# datetime.datetime.strptime() within the Python VM can fail if it
+# happens in a multi-threaded context.  To work around that, we force a
+# "safe" call here.  For more details, see:
+#     https://bugs.python.org/issue7980
+#     https://crbug.com/710182
+if sys.version_info.major < 3:
+  import datetime
+  datetime.datetime.strptime(datetime.datetime.now().strftime('%Y'), '%Y')
diff --git a/utils/frozen_chromite/lib/auth.py b/utils/frozen_chromite/lib/auth.py
new file mode 100644
index 0000000..1cf08a1
--- /dev/null
+++ b/utils/frozen_chromite/lib/auth.py
@@ -0,0 +1,267 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Functions for authenticating httplib2 requests with OAuth2 tokens."""
+
+from __future__ import print_function
+
+import os
+
+import httplib2
+
+from autotest_lib.utils.frozen_chromite.lib import cipd
+from autotest_lib.utils.frozen_chromite.lib import cros_build_lib
+from autotest_lib.utils.frozen_chromite.lib import cros_logging as logging
+from autotest_lib.utils.frozen_chromite.lib import retry_util
+from autotest_lib.utils.frozen_chromite.lib import path_util
+
+
+REFRESH_STATUS_CODES = [401]
+
+# Retry times on get_access_token
+RETRY_GET_ACCESS_TOKEN = 3
+
+
+class AccessTokenError(Exception):
+  """Error accessing the token."""
+
+
+def _GetCipdBinary(pkg_name, bin_name, instance_id):
+  """Returns a local path to the given binary fetched from cipd."""
+  cache_dir = os.path.join(path_util.GetCacheDir(), 'cipd', 'packages')
+  path = cipd.InstallPackage(
+      cipd.GetCIPDFromCache(),
+      pkg_name,
+      instance_id,
+      destination=cache_dir)
+
+  return os.path.join(path, bin_name)
+
+
+# crbug:871831 default to last sha1 version.
+def GetLuciAuth(
+    instance_id='git_revision:fd059ace316e4dbcaa5afdcec9ed4a855c4f3c65'):
+  """Returns a path to the luci-auth binary.
+
+  This will download and install the luci-auth package if it is not already
+  deployed.
+
+  Args:
+    instance_id: The instance-id of the package to install.
+
+  Returns:
+    the path to the luci-auth binary.
+  """
+  return _GetCipdBinary(
+      'infra/tools/luci-auth/linux-amd64',
+      'luci-auth',
+      instance_id)
+
+
+# crbug:871831 default to last sha1 version.
+def GetLuciGitCreds(
+    instance_id='git_revision:fd059ace316e4dbcaa5afdcec9ed4a855c4f3c65'):
+  """Returns a path to the git-credential-luci binary.
+
+  This will download and install the git-credential-luci package if it is not
+  already deployed.
+
+  Args:
+    instance_id: The instance-id of the package to install.
+
+  Returns:
+    the path to the git-credential-luci binary.
+  """
+  return _GetCipdBinary(
+      'infra/tools/luci/git-credential-luci/linux-amd64',
+      'git-credential-luci',
+      instance_id)
+
+
+def Login(service_account_json=None):
+  """Logs a user into chrome-infra-auth using luci-auth.
+
+  Runs 'luci-auth login' to get a OAuth2 refresh token.
+
+  Args:
+    service_account_json: A optional path to a service account.
+
+  Raises:
+    AccessTokenError if login command failed.
+  """
+  logging.info('Logging into chrome-infra-auth with service_account %s',
+               service_account_json)
+
+  cmd = [GetLuciAuth(), 'login']
+  if service_account_json and os.path.isfile(service_account_json):
+    cmd += ['-service-account-json=%s' % service_account_json]
+
+  result = cros_build_lib.run(
+      cmd,
+      print_cmd=True,
+      check=False)
+
+  if result.returncode:
+    raise AccessTokenError('Failed at  logging in to chrome-infra-auth: %s,'
+                           ' may retry.')
+
+
+def Token(service_account_json=None):
+  """Get the token using luci-auth.
+
+  Runs 'luci-auth token' to get the OAuth2 token.
+
+  Args:
+    service_account_json: A optional path to a service account.
+
+  Returns:
+    The token string if the command succeeded;
+
+  Raises:
+    AccessTokenError if token command failed.
+  """
+  cmd = [GetLuciAuth(), 'token']
+  if service_account_json and os.path.isfile(service_account_json):
+    cmd += ['-service-account-json=%s' % service_account_json]
+
+  result = cros_build_lib.run(
+      cmd,
+      print_cmd=False,
+      capture_output=True,
+      check=False,
+      encoding='utf-8')
+
+  if result.returncode:
+    raise AccessTokenError('Failed at getting the access token, may retry.')
+
+  return result.output.strip()
+
+
+def _TokenAndLoginIfNeed(service_account_json=None, force_token_renew=False):
+  """Run Token and Login opertions.
+
+  If force_token_renew is on, run Login operation first to force token renew,
+  then run Token operation to return token string.
+  If force_token_renew is off, run Token operation first. If no token found,
+  run Login operation to refresh the token. Throw an AccessTokenError after
+  running the Login operation, so that GetAccessToken can retry on
+  _TokenAndLoginIfNeed.
+
+  Args:
+    service_account_json: A optional path to a service account.
+    force_token_renew: Boolean indicating whether to force login to renew token
+      before returning a token. Default to False.
+
+  Returns:
+    The token string if the command succeeded; else, None.
+
+  Raises:
+    AccessTokenError if the Token operation failed.
+  """
+  if force_token_renew:
+    Login(service_account_json=service_account_json)
+    return Token(service_account_json=service_account_json)
+  else:
+    try:
+      return Token(service_account_json=service_account_json)
+    except AccessTokenError as e:
+      Login(service_account_json=service_account_json)
+      # Raise the error and let the caller decide wether to retry
+      raise e
+
+
+def GetAccessToken(**kwargs):
+  """Returns an OAuth2 access token using luci-auth.
+
+  Retry the _TokenAndLoginIfNeed function when the error thrown is an
+  AccessTokenError.
+
+  Args:
+    kwargs: A list of keyword arguments to pass to _TokenAndLoginIfNeed.
+
+  Returns:
+    The access token string or None if failed to get access token.
+  """
+  service_account_json = kwargs.get('service_account_json')
+  force_token_renew = kwargs.get('force_token_renew', False)
+  retry = lambda e: isinstance(e, AccessTokenError)
+  try:
+    result = retry_util.GenericRetry(
+        retry, RETRY_GET_ACCESS_TOKEN,
+        _TokenAndLoginIfNeed,
+        service_account_json=service_account_json,
+        force_token_renew=force_token_renew,
+        sleep=3)
+    return result
+  except AccessTokenError as e:
+    logging.error('Failed at getting the access token: %s ', e)
+    # Do not raise the AccessTokenError here.
+    # Let the response returned by the request handler
+    # tell the status and errors.
+    return
+
+
+def GitCreds(service_account_json=None):
+  """Get the git credential using git-credential-luci.
+
+  Args:
+    service_account_json: A optional path to a service account.
+
+  Returns:
+    The git credential if the command succeeded;
+
+  Raises:
+    AccessTokenError if token command failed.
+  """
+  cmd = [GetLuciGitCreds(), 'get']
+  if service_account_json and os.path.isfile(service_account_json):
+    cmd += ['-service-account-json=%s' % service_account_json]
+
+  result = cros_build_lib.run(
+      cmd,
+      print_cmd=False,
+      capture_output=True,
+      check=False,
+      encoding='utf-8')
+
+  if result.returncode:
+    raise AccessTokenError('Unable to fetch git credential.')
+
+  for line in result.stdout.splitlines():
+    if line.startswith('password='):
+      return line.split('password=')[1].strip()
+
+  raise AccessTokenError('Unable to fetch git credential.')
+
+
+class AuthorizedHttp(object):
+  """Authorized http instance"""
+
+  def __init__(self, get_access_token, http, **kwargs):
+    self.get_access_token = get_access_token
+    self.http = http if http is not None else httplib2.Http()
+    self.token = self.get_access_token(**kwargs)
+    self.kwargs = kwargs
+
+  # Adapted from oauth2client.OAuth2Credentials.authorize.
+  # We can't use oauthclient2 because the import will fail on slaves due to
+  # missing PyOpenSSL (crbug.com/498467).
+  def request(self, *args, **kwargs):
+    headers = kwargs.get('headers', {}).copy()
+    headers['Authorization'] = 'Bearer %s' % self.token
+    kwargs['headers'] = headers
+
+    resp, content = self.http.request(*args, **kwargs)
+    if resp.status in REFRESH_STATUS_CODES:
+      logging.info('OAuth token TTL expired, auto-refreshing')
+
+      # Token expired, force token renew
+      kwargs_copy = dict(self.kwargs, force_token_renew=True)
+      self.token = self.get_access_token(**kwargs_copy)
+
+      # TODO(phobbs): delete the "access_token" key from the token file used.
+      headers['Authorization'] = 'Bearer %s' % self.token
+      resp, content = self.http.request(*args, **kwargs)
+
+    return resp, content
diff --git a/utils/frozen_chromite/lib/auto_update_util.py b/utils/frozen_chromite/lib/auto_update_util.py
new file mode 100644
index 0000000..dc7b0af
--- /dev/null
+++ b/utils/frozen_chromite/lib/auto_update_util.py
@@ -0,0 +1,136 @@
+# -*- coding: utf-8 -*-
+# Copyright 2016 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This file contains util functions for the auto-update lib."""
+
+from __future__ import print_function
+
+import re
+
+from autotest_lib.utils.frozen_chromite.lib import cros_logging as logging
+
+
+LSB_RELEASE = '/etc/lsb-release'
+
+
+def GetChromeosBuildInfo(lsb_release_content=None, regex=None):
+  """Get chromeos build info in device under test as string. None on fail.
+
+  Args:
+    lsb_release_content: A string represents the content of lsb-release.
+        If the caller is from drone, it can pass in the file content here.
+    regex: A regular expression, refers to which line this func tries to fetch
+        from lsb_release_content.
+
+  Returns:
+    A kind of chromeos build info in device under test as string. None on fail.
+  """
+  if not lsb_release_content or not regex:
+    return None
+
+  for line in lsb_release_content.split('\n'):
+    m = re.match(regex, line)
+    if m:
+      return m.group(1)
+
+  return None
+
+def VersionMatch(build_version, release_version):
+  """Compare release version from lsb-release with cros-version label.
+
+  build_version is a string based on build name. It is prefixed with builder
+  info and branch ID, e.g., lumpy-release/R43-6809.0.0.
+  release_version is retrieved from lsb-release.
+  These two values might not match exactly.
+
+  The method is designed to compare version for following 6 scenarios with
+  samples of build version and expected release version:
+  1. trybot non-release build (paladin, pre-cq or test-ap build).
+  build version:   trybot-lumpy-paladin/R27-3837.0.0-b123
+  release version: 3837.0.2013_03_21_1340
+
+  2. trybot release build.
+  build version:   trybot-lumpy-release/R27-3837.0.0-b456
+  release version: 3837.0.0
+
+  3. buildbot official release build.
+  build version:   lumpy-release/R27-3837.0.0
+  release version: 3837.0.0
+
+  4. non-official paladin rc build.
+  build version:   lumpy-paladin/R27-3878.0.0-rc7
+  release version: 3837.0.0-rc7
+
+  5. chrome-perf build.
+  build version:   lumpy-chrome-perf/R28-3837.0.0-b2996
+  release version: 3837.0.0
+
+  6. pgo-generate build.
+  build version:   lumpy-release-pgo-generate/R28-3837.0.0-b2996
+  release version: 3837.0.0-pgo-generate
+
+  TODO: This logic has a bug if a trybot paladin build failed to be
+  installed in a DUT running an older trybot paladin build with same
+  platform number, but different build number (-b###). So to conclusively
+  determine if a tryjob paladin build is imaged successfully, we may need
+  to find out the date string from update url.
+
+  Args:
+    build_version: Build name for cros version, e.g.
+        peppy-release/R43-6809.0.0 or R43-6809.0.0
+    release_version: Release version retrieved from lsb-release,
+        e.g., 6809.0.0
+
+  Returns:
+    True if the values match, otherwise returns False.
+  """
+  # If the build is from release, CQ or PFQ builder, cros-version label must
+  # be ended with release version in lsb-release.
+
+  if build_version.endswith(release_version):
+    return True
+
+  # Remove R#- and -b# at the end of build version
+  stripped_version = re.sub(r'(R\d+-|-b\d+)', '', build_version)
+  # Trim the builder info, e.g., trybot-lumpy-paladin/
+  stripped_version = stripped_version.split('/')[-1]
+
+  # Add toolchain here since is_trybot_non_release_build cannot detect build
+  # like 'trybot-sentry-llvm-toolchain/R56-8885.0.0-b943'.
+  is_trybot_non_release_build = re.match(
+      r'.*trybot-.+-(paladin|pre-cq|test-ap|toolchain)', build_version)
+
+  # Replace date string with 0 in release_version
+  release_version_no_date = re.sub(r'\d{4}_\d{2}_\d{2}_\d+', '0',
+                                   release_version)
+  has_date_string = release_version != release_version_no_date
+
+  is_pgo_generate_build = re.match(r'.+-pgo-generate', build_version)
+
+  # Remove |-pgo-generate| in release_version
+  release_version_no_pgo = release_version.replace('-pgo-generate', '')
+  has_pgo_generate = release_version != release_version_no_pgo
+
+  if is_trybot_non_release_build:
+    if not has_date_string:
+      logging.error('A trybot paladin or pre-cq build is expected. '
+                    'Version "%s" is not a paladin or pre-cq  build.',
+                    release_version)
+      return False
+    return stripped_version == release_version_no_date
+  elif is_pgo_generate_build:
+    if not has_pgo_generate:
+      logging.error('A pgo-generate build is expected. Version '
+                    '"%s" is not a pgo-generate build.',
+                    release_version)
+      return False
+    return stripped_version == release_version_no_pgo
+  else:
+    if has_date_string:
+      logging.error('Unexpected date found in a non trybot paladin or '
+                    'pre-cq build.')
+      return False
+    # Versioned build, i.e., rc or release build.
+    return stripped_version == release_version
diff --git a/utils/frozen_chromite/lib/auto_updater.py b/utils/frozen_chromite/lib/auto_updater.py
new file mode 100644
index 0000000..fcfe76b
--- /dev/null
+++ b/utils/frozen_chromite/lib/auto_updater.py
@@ -0,0 +1,1040 @@
+# -*- coding: utf-8 -*-
+# Copyright 2016 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Library containing functions to execute auto-update on a remote device.
+
+ChromiumOSUpdater includes:
+  ----Check-----
+  * Check functions, including kernel/version/cgpt check.
+
+  ----Precheck---
+  * Pre-check if the device can run its nebraska.
+  * Pre-check for stateful/rootfs update/whole update.
+
+  ----Tranfer----
+  * This step is carried out by Transfer subclasses in
+    auto_updater_transfer.py.
+
+  ----Auto-Update---
+  * Do rootfs partition update if it's required.
+  * Do stateful partition update if it's required.
+  * Do reboot for device if it's required.
+
+  ----Verify----
+  * Do verification if it's required.
+  * Disable rootfs verification in device if it's required.
+  * Post-check stateful/rootfs update/whole update.
+"""
+
+from __future__ import print_function
+
+import json
+import os
+import re
+import subprocess
+import tempfile
+import time
+
+import six
+
+from autotest_lib.utils.frozen_chromite.cli import command
+from autotest_lib.utils.frozen_chromite.lib import auto_update_util
+from autotest_lib.utils.frozen_chromite.lib import auto_updater_transfer
+from autotest_lib.utils.frozen_chromite.lib import cros_build_lib
+from autotest_lib.utils.frozen_chromite.lib import cros_logging as logging
+from autotest_lib.utils.frozen_chromite.lib import nebraska_wrapper
+from autotest_lib.utils.frozen_chromite.lib import operation
+from autotest_lib.utils.frozen_chromite.lib import osutils
+from autotest_lib.utils.frozen_chromite.lib import remote_access
+from autotest_lib.utils.frozen_chromite.lib import retry_util
+from autotest_lib.utils.frozen_chromite.lib import stateful_updater
+from autotest_lib.utils.frozen_chromite.lib import timeout_util
+
+from autotest_lib.utils.frozen_chromite.utils import key_value_store
+
+# Naming conventions for global variables:
+#   File on remote host without slash: REMOTE_XXX_FILENAME
+#   File on remote host with slash: REMOTE_XXX_FILE_PATH
+#   Path on remote host with slash: REMOTE_XXX_PATH
+#   File on local server without slash: LOCAL_XXX_FILENAME
+
+# Update Status for remote device.
+UPDATE_STATUS_IDLE = 'UPDATE_STATUS_IDLE'
+UPDATE_STATUS_DOWNLOADING = 'UPDATE_STATUS_DOWNLOADING'
+UPDATE_STATUS_FINALIZING = 'UPDATE_STATUS_FINALIZING'
+UPDATE_STATUS_UPDATED_NEED_REBOOT = 'UPDATE_STATUS_UPDATED_NEED_REBOOT'
+
+# Max number of the times for retry:
+# 1. for transfer functions to be retried.
+# 2. for some retriable commands to be retried.
+MAX_RETRY = 5
+
+# The delay between retriable tasks.
+DELAY_SEC_FOR_RETRY = 5
+
+# Number of seconds to wait for the post check version to settle.
+POST_CHECK_SETTLE_SECONDS = 15
+
+# Number of seconds to delay between post check retries.
+POST_CHECK_RETRY_SECONDS = 5
+
+
+class ChromiumOSUpdateError(Exception):
+  """Thrown when there is a general ChromiumOS-specific update error."""
+
+
+class PreSetupUpdateError(ChromiumOSUpdateError):
+  """Raised for the rootfs/stateful update pre-setup failures."""
+
+
+class RootfsUpdateError(ChromiumOSUpdateError):
+  """Raised for the Rootfs partition update failures."""
+
+
+class StatefulUpdateError(ChromiumOSUpdateError):
+  """Raised for the stateful partition update failures."""
+
+
+class AutoUpdateVerifyError(ChromiumOSUpdateError):
+  """Raised for verification failures after auto-update."""
+
+
+class RebootVerificationError(ChromiumOSUpdateError):
+  """Raised for failing to reboot errors."""
+
+
+class BaseUpdater(object):
+  """The base updater class."""
+
+  def __init__(self, device, payload_dir):
+    self.device = device
+    self.payload_dir = payload_dir
+
+
+class ChromiumOSUpdater(BaseUpdater):
+  """Used to update DUT with image."""
+
+  # Nebraska files.
+  LOCAL_NEBRASKA_LOG_FILENAME = 'nebraska.log'
+  REMOTE_NEBRASKA_FILENAME = 'nebraska.py'
+
+  # rootfs update files.
+  REMOTE_UPDATE_ENGINE_BIN_FILENAME = 'update_engine_client'
+  REMOTE_UPDATE_ENGINE_LOGFILE_PATH = '/var/log/update_engine.log'
+
+  UPDATE_CHECK_INTERVAL_PROGRESSBAR = 0.5
+  UPDATE_CHECK_INTERVAL_NORMAL = 10
+
+  # `mode` parameter when copying payload files to the DUT.
+  PAYLOAD_MODE_PARALLEL = 'parallel'
+  PAYLOAD_MODE_SCP = 'scp'
+
+  # Related to crbug.com/276094: Restore to 5 mins once the 'host did not
+  # return from reboot' bug is solved.
+  REBOOT_TIMEOUT = 480
+
+  REMOTE_STATEFUL_PATH_TO_CHECK = ('/var', '/home', '/mnt/stateful_partition')
+  REMOTE_STATEFUL_TEST_FILENAME = '.test_file_to_be_deleted'
+  REMOTE_UPDATED_MARKERFILE_PATH = '/run/update_engine_autoupdate_completed'
+  REMOTE_LAB_MACHINE_FILE_PATH = '/mnt/stateful_partition/.labmachine'
+  KERNEL_A = {'name': 'KERN-A', 'kernel': 2, 'root': 3}
+  KERNEL_B = {'name': 'KERN-B', 'kernel': 4, 'root': 5}
+  KERNEL_UPDATE_TIMEOUT = 180
+
+  def __init__(self, device, build_name, payload_dir, transfer_class,
+               log_file=None, tempdir=None, clobber_stateful=True,
+               yes=False, do_rootfs_update=True, do_stateful_update=True,
+               reboot=True, disable_verification=False,
+               send_payload_in_parallel=False, payload_filename=None,
+               staging_server=None, clear_tpm_owner=False, ignore_appid=False):
+    """Initialize a ChromiumOSUpdater for auto-update a chromium OS device.
+
+    Args:
+      device: the ChromiumOSDevice to be updated.
+      build_name: the target update version for the device.
+      payload_dir: the directory of payload(s).
+      transfer_class: A reference to any subclass of
+          auto_updater_transfer.Transfer class.
+      log_file: The file to save running logs.
+      tempdir: the temp directory in caller, not in the device. For example,
+          the tempdir for cros flash is /tmp/cros-flash****/, used to
+          temporarily keep files when transferring update-utils package, and
+          reserve nebraska and update engine logs.
+      do_rootfs_update: whether to do rootfs partition update. The default is
+          True.
+      do_stateful_update: whether to do stateful partition update. The default
+          is True.
+      reboot: whether to reboot device after update. The default is True.
+      disable_verification: whether to disabling rootfs verification on the
+          device. The default is False.
+      clobber_stateful: whether to do a clean stateful update. The default is
+          False.
+      yes: Assume "yes" (True) for any prompt. The default is False. However,
+          it should be set as True if we want to disable all the prompts for
+          auto-update.
+      payload_filename: Filename of exact payload file to use for
+          update instead of the default: update.gz. Defaults to None. Use
+          only if you staged a payload by filename (i.e not artifact) first.
+      send_payload_in_parallel: whether to transfer payload in chunks
+          in parallel. The default is False.
+      staging_server: URL (str) of the server that's staging the payload files.
+          Assuming transfer_class is None, if value for staging_server is None
+          or empty, an auto_updater_transfer.LocalTransfer reference must be
+          passed through the transfer_class parameter.
+      clear_tpm_owner: If true, it will clear the TPM owner on reboot.
+      ignore_appid: True to tell Nebraska to ignore the update request's
+          App ID. This allows mismatching the source and target version boards.
+          One specific use case is updating between <board> and
+          <board>-kernelnext images which have different App IDs.
+    """
+    super(ChromiumOSUpdater, self).__init__(device, payload_dir)
+
+    self.tempdir = (tempdir if tempdir is not None
+                    else tempfile.mkdtemp(prefix='cros-update'))
+    self.inactive_kernel = None
+    self.update_version = build_name
+
+    # Update setting
+    self._cmd_kwargs = {}
+    self._cmd_kwargs_omit_error = {'check': False}
+    self._do_stateful_update = do_stateful_update
+    self._do_rootfs_update = do_rootfs_update
+    self._disable_verification = disable_verification
+    self._clobber_stateful = clobber_stateful
+    self._reboot = reboot
+    self._yes = yes
+    # Device's directories
+    self.device_dev_dir = os.path.join(self.device.work_dir, 'src')
+    self.device_payload_dir = os.path.join(
+        self.device.work_dir,
+        auto_updater_transfer.Transfer.PAYLOAD_DIR_NAME)
+    # autoupdate_EndToEndTest uses exact payload filename for update
+    self.payload_filename = payload_filename
+    if send_payload_in_parallel:
+      self.payload_mode = self.PAYLOAD_MODE_PARALLEL
+    else:
+      self.payload_mode = self.PAYLOAD_MODE_SCP
+    self.perf_id = None
+
+    if log_file:
+      log_kwargs = {
+          'stdout': log_file,
+          'append_to_file': True,
+          'stderr': subprocess.STDOUT,
+      }
+      self._cmd_kwargs.update(log_kwargs)
+      self._cmd_kwargs_omit_error.update(log_kwargs)
+
+    self._staging_server = staging_server
+    self._transfer_obj = self._CreateTransferObject(transfer_class)
+
+    self._clear_tpm_owner = clear_tpm_owner
+    self._ignore_appid = ignore_appid
+
+  @property
+  def is_au_endtoendtest(self):
+    return self.payload_filename is not None
+
+  @property
+  def request_logs_dir(self):
+    """Returns path to the nebraska request logfiles directory.
+
+    Returns:
+      A complete path to the logfiles directory.
+    """
+    return self.tempdir
+
+  def _CreateTransferObject(self, transfer_class):
+    """Create the correct Transfer class.
+
+    Args:
+      transfer_class: A variable that contains a reference to one of the
+          Transfer classes in auto_updater_transfer.
+    """
+    assert issubclass(transfer_class, auto_updater_transfer.Transfer)
+
+    # Determine if staging_server needs to be passed as an argument to
+    # class_ref.
+    cls_kwargs = {}
+    if self._staging_server:
+      cls_kwargs['staging_server'] = self._staging_server
+
+    return transfer_class(
+        device=self.device, payload_dir=self.payload_dir,
+        payload_name=self._GetRootFsPayloadFileName(),
+        cmd_kwargs=self._cmd_kwargs,
+        transfer_rootfs_update=self._do_rootfs_update,
+        transfer_stateful_update=self._do_rootfs_update,
+        device_payload_dir=self.device_payload_dir, tempdir=self.tempdir,
+        payload_mode=self.payload_mode, **cls_kwargs)
+
+  def CheckRestoreStateful(self):
+    """Check whether to restore stateful."""
+    logging.debug('Checking whether to restore stateful...')
+    restore_stateful = False
+    try:
+      self._CheckNebraskaCanRun()
+      return restore_stateful
+    except nebraska_wrapper.NebraskaStartupError as e:
+      if self._do_rootfs_update:
+        msg = ('Cannot start nebraska! The stateful partition may be '
+               'corrupted: %s' % e)
+        prompt = 'Attempt to restore the stateful partition?'
+        restore_stateful = self._yes or cros_build_lib.BooleanPrompt(
+            prompt=prompt, default=False, prolog=msg)
+        if not restore_stateful:
+          raise ChromiumOSUpdateError(
+              'Cannot continue to perform rootfs update!')
+
+    logging.debug('Restore stateful partition is%s required.',
+                  ('' if restore_stateful else ' not'))
+    return restore_stateful
+
+  def _CheckNebraskaCanRun(self):
+    """We can run Nebraska on |device|."""
+    nebraska_bin = os.path.join(self.device_dev_dir,
+                                self.REMOTE_NEBRASKA_FILENAME)
+    nebraska = nebraska_wrapper.RemoteNebraskaWrapper(
+        self.device, nebraska_bin=nebraska_bin)
+    nebraska.CheckNebraskaCanRun()
+
+  @classmethod
+  def GetUpdateStatus(cls, device, keys=None):
+    """Returns the status of the update engine on the |device|.
+
+    Retrieves the status from update engine and confirms all keys are
+    in the status.
+
+    Args:
+      device: A ChromiumOSDevice object.
+      keys: the keys to look for in the status result (defaults to
+          ['CURRENT_OP']).
+
+    Returns:
+      A list of values in the order of |keys|.
+    """
+    keys = keys or ['CURRENT_OP']
+    result = device.run([cls.REMOTE_UPDATE_ENGINE_BIN_FILENAME, '--status'],
+                        capture_output=True, log_output=True)
+
+    if not result.output:
+      raise Exception('Cannot get update status')
+
+    try:
+      status = key_value_store.LoadData(result.output)
+    except ValueError:
+      raise ValueError('Cannot parse update status')
+
+    values = []
+    for key in keys:
+      if key not in status:
+        raise ValueError('Missing "%s" in the update engine status' % key)
+
+      values.append(status.get(key))
+
+    return values
+
+  @classmethod
+  def GetRootDev(cls, device):
+    """Get the current root device on |device|.
+
+    Args:
+      device: a ChromiumOSDevice object, defines whose root device we
+          want to fetch.
+    """
+    rootdev = device.run(
+        ['rootdev', '-s'], capture_output=True).output.strip()
+    logging.debug('Current root device is %s', rootdev)
+    return rootdev
+
+  def _StartUpdateEngineIfNotRunning(self, device):
+    """Starts update-engine service if it is not running.
+
+    Args:
+      device: a ChromiumOSDevice object, defines the target root device.
+    """
+    try:
+      result = device.run(['start', 'update-engine'],
+                          capture_output=True, log_output=True).stdout
+      if 'start/running' in result:
+        logging.info('update engine was not running, so we started it.')
+    except cros_build_lib.RunCommandError as e:
+      if e.result.returncode != 1 or 'is already running' not in e.result.error:
+        raise e
+
+  def SetupRootfsUpdate(self):
+    """Makes sure |device| is ready for rootfs update."""
+    logging.info('Checking if update engine is idle...')
+    self._StartUpdateEngineIfNotRunning(self.device)
+    status = self.GetUpdateStatus(self.device)[0]
+    if status == UPDATE_STATUS_UPDATED_NEED_REBOOT:
+      logging.info('Device needs to reboot before updating...')
+      self._Reboot('setup of Rootfs Update')
+      status = self.GetUpdateStatus(self.device)[0]
+
+    if status != UPDATE_STATUS_IDLE:
+      raise RootfsUpdateError('Update engine is not idle. Status: %s' % status)
+
+    if self.is_au_endtoendtest:
+      # TODO(ahassani): This should only be done for jetsteam devices.
+      self._RetryCommand(['sudo', 'stop', 'ap-update-manager'],
+                         **self._cmd_kwargs_omit_error)
+
+      self._RetryCommand(['rm', '-f', self.REMOTE_UPDATED_MARKERFILE_PATH],
+                         **self._cmd_kwargs)
+      self._RetryCommand(['stop', 'ui'], **self._cmd_kwargs_omit_error)
+
+
+  def _GetDevicePythonSysPath(self):
+    """Get python sys.path of the given |device|."""
+    sys_path = self.device.run(
+        ['python', '-c', '"import json, sys; json.dump(sys.path, sys.stdout)"'],
+        capture_output=True, log_output=True).output
+    return json.loads(sys_path)
+
+  def _FindDevicePythonPackagesDir(self):
+    """Find the python packages directory for the given |device|."""
+    third_party_host_dir = ''
+    sys_path = self._GetDevicePythonSysPath()
+    for p in sys_path:
+      if p.endswith('site-packages') or p.endswith('dist-packages'):
+        third_party_host_dir = p
+        break
+
+    if not third_party_host_dir:
+      raise ChromiumOSUpdateError(
+          'Cannot find proper site-packages/dist-packages directory from '
+          'sys.path for storing packages: %s' % sys_path)
+
+    return third_party_host_dir
+
+  def _GetRootFsPayloadFileName(self):
+    """Get the correct RootFs payload filename.
+
+    Returns:
+      The payload filename. (update.gz or a custom payload filename).
+    """
+    if self.is_au_endtoendtest:
+      return self.payload_filename
+    else:
+      return auto_updater_transfer.ROOTFS_FILENAME
+
+  def ResetStatefulPartition(self):
+    """Clear any pending stateful update request."""
+    logging.debug('Resetting stateful partition...')
+    try:
+      stateful_updater.StatefulUpdater(self.device).Reset()
+    except stateful_updater.Error as e:
+      raise StatefulUpdateError(e)
+
+  def RevertBootPartition(self):
+    """Revert the boot partition."""
+    part = self.GetRootDev(self.device)
+    logging.warning('Reverting update; Boot partition will be %s', part)
+    try:
+      self.device.run(['/postinst', part], **self._cmd_kwargs)
+    except cros_build_lib.RunCommandError as e:
+      logging.warning('Reverting the boot partition failed: %s', e)
+
+  def UpdateRootfs(self):
+    """Update the rootfs partition of the device (utilizing nebraska)."""
+    logging.notice('Updating rootfs partition...')
+    nebraska_bin = os.path.join(self.device_dev_dir,
+                                self.REMOTE_NEBRASKA_FILENAME)
+
+    nebraska = nebraska_wrapper.RemoteNebraskaWrapper(
+        self.device, nebraska_bin=nebraska_bin,
+        update_payloads_address='file://' + self.device_payload_dir,
+        update_metadata_dir=self.device_payload_dir,
+        ignore_appid=self._ignore_appid)
+
+    try:
+      nebraska.Start()
+
+      # Use the localhost IP address (default) to ensure that update engine
+      # client can connect to the nebraska.
+      nebraska_url = nebraska.GetURL(critical_update=True)
+      cmd = [self.REMOTE_UPDATE_ENGINE_BIN_FILENAME, '--check_for_update',
+             '--omaha_url="%s"' % nebraska_url]
+
+      self.device.run(cmd, **self._cmd_kwargs)
+
+      # If we are using a progress bar, update it every 0.5s instead of 10s.
+      if command.UseProgressBar():
+        update_check_interval = self.UPDATE_CHECK_INTERVAL_PROGRESSBAR
+        oper = operation.ProgressBarOperation()
+      else:
+        update_check_interval = self.UPDATE_CHECK_INTERVAL_NORMAL
+        oper = None
+      end_message_not_printed = True
+
+      # Loop until update is complete.
+      while True:
+        # Number of times to retry `update_engine_client --status`. See
+        # crbug.com/744212.
+        update_engine_status_retry = 30
+        op, progress = retry_util.RetryException(
+            cros_build_lib.RunCommandError,
+            update_engine_status_retry,
+            self.GetUpdateStatus,
+            self.device,
+            ['CURRENT_OP', 'PROGRESS'],
+            delay_sec=DELAY_SEC_FOR_RETRY)[0:2]
+        logging.info('Waiting for update...status: %s at progress %s',
+                     op, progress)
+
+        if op == UPDATE_STATUS_UPDATED_NEED_REBOOT:
+          logging.info('Update completed.')
+          break
+
+        if op == UPDATE_STATUS_IDLE:
+          # Something went wrong. Try to get last error code.
+          cmd = ['cat', self.REMOTE_UPDATE_ENGINE_LOGFILE_PATH]
+          log = self.device.run(cmd).stdout.strip().splitlines()
+          err_str = 'Updating payload state for error code: '
+          targets = [line for line in log if err_str in line]
+          logging.debug('Error lines found: %s', targets)
+          if not targets:
+            raise RootfsUpdateError(
+                'Update failed with unexpected update status: %s' % op)
+          else:
+            # e.g 20 (ErrorCode::kDownloadStateInitializationError)
+            raise RootfsUpdateError(targets[-1].rpartition(err_str)[2])
+
+        if oper is not None:
+          if op == UPDATE_STATUS_DOWNLOADING:
+            oper.ProgressBar(float(progress))
+          elif end_message_not_printed and op == UPDATE_STATUS_FINALIZING:
+            oper.Cleanup()
+            logging.info('Finalizing image.')
+            end_message_not_printed = False
+
+        time.sleep(update_check_interval)
+    # TODO(ahassani): Scope the Exception to finer levels. For example we don't
+    # need to revert the boot partition if the Nebraska fails to start, etc.
+    except Exception as e:
+      logging.error('Rootfs update failed %s', e)
+      self.RevertBootPartition()
+      logging.warning(nebraska.PrintLog() or 'No nebraska log is available.')
+      raise RootfsUpdateError('Failed to perform rootfs update: %r' % e)
+    finally:
+      nebraska.Stop()
+
+      nebraska.CollectLogs(os.path.join(self.tempdir,
+                                        self.LOCAL_NEBRASKA_LOG_FILENAME))
+      self.device.CopyFromDevice(
+          self.REMOTE_UPDATE_ENGINE_LOGFILE_PATH,
+          os.path.join(self.tempdir, os.path.basename(
+              self.REMOTE_UPDATE_ENGINE_LOGFILE_PATH)),
+          follow_symlinks=True, **self._cmd_kwargs_omit_error)
+
+  def UpdateStateful(self):
+    """Update the stateful partition of the device."""
+    try:
+      stateful_update_payload = os.path.join(
+          self.device.work_dir, auto_updater_transfer.STATEFUL_FILENAME)
+
+      updater = stateful_updater.StatefulUpdater(self.device)
+      updater.Update(
+          stateful_update_payload,
+          update_type=(stateful_updater.StatefulUpdater.UPDATE_TYPE_CLOBBER if
+                       self._clobber_stateful else None))
+
+      # Delete the stateful update file on success so it doesn't occupy extra
+      # disk space. On failure it will get cleaned up.
+      self.device.DeletePath(stateful_update_payload)
+    except stateful_updater.Error as e:
+      error_msg = 'Stateful update failed with error: %s' % str(e)
+      logging.exception(error_msg)
+      self.ResetStatefulPartition()
+      raise StatefulUpdateError(error_msg)
+
+  def _FixPayloadPropertiesFile(self):
+    """Fix the update payload properties file so nebraska can use it.
+
+    Update the payload properties file to make sure that nebraska can use it.
+    The reason is that very old payloads are still being used for provisioning
+    the AU tests, but those properties files are not compatible with recent
+    nebraska protocols.
+
+    TODO(ahassani): Once we only test delta or full payload with
+    source image of M77 or higher, this function can be deprecated.
+    """
+    logging.info('Fixing payload properties file.')
+    payload_properties_path = self._transfer_obj.GetPayloadPropsFile()
+    props = json.loads(osutils.ReadFile(payload_properties_path))
+    props['appid'] = self.ResolveAPPIDMismatchIfAny(props.get('appid'))
+    values = self._transfer_obj.GetPayloadProps()
+
+    # TODO(ahassani): Use the keys form nebraska.py once it is moved to
+    # chromite.
+    valid_entries = {
+        # Since only old payloads don't have this and they are only used for
+        # provisioning, they will be full payloads.
+        'is_delta': False,
+        'size': values['size'],
+        'target_version': values['image_version'],
+    }
+
+    for key, value in valid_entries.items():
+      if props.get(key) is None:
+        props[key] = value
+
+    with open(payload_properties_path, 'w') as fp:
+      json.dump(props, fp)
+
+  def RunUpdateRootfs(self):
+    """Run all processes needed by updating rootfs.
+
+    1. Check device's status to make sure it can be updated.
+    2. Copy files to remote device needed for rootfs update.
+    3. Do root updating.
+    """
+
+    # Any call to self._transfer_obj.TransferRootfsUpdate() must be preceeded by
+    # a conditional call to self._FixPayloadPropertiesFile() as this handles the
+    # usecase in reported in crbug.com/1012520. Whenever
+    # self._FixPayloadPropertiesFile() gets deprecated, this call can be safely
+    # removed. For more details on TODOs, refer to self.TransferRootfsUpdate()
+    # docstrings.
+
+    self._FixPayloadPropertiesFile()
+
+    # SetupRootfsUpdate() may reboot the device and therefore should be called
+    # before any payloads are transferred to the device and only if rootfs
+    # update is required.
+    self.SetupRootfsUpdate()
+
+    # Copy payload for rootfs update.
+    self._transfer_obj.TransferRootfsUpdate()
+
+    self.UpdateRootfs()
+
+    if self.is_au_endtoendtest:
+      self.PostCheckRootfsUpdate()
+
+    # Delete the update file so it doesn't take much space on disk for the
+    # remainder of the update process.
+    self.device.DeletePath(self.device_payload_dir, recursive=True)
+
+  def RunUpdateStateful(self):
+    """Run all processes needed by updating stateful.
+
+    1. Copy files to remote device needed by stateful update.
+    2. Do stateful update.
+    """
+    self._transfer_obj.TransferStatefulUpdate()
+    self.UpdateStateful()
+
+  def RebootAndVerify(self):
+    """Reboot and verify the remote device.
+
+    1. Reboot the remote device. If _clobber_stateful (--clobber-stateful)
+    is executed, the stateful partition is wiped, and the working directory
+    on the remote device no longer exists. So, recreate the working directory
+    for this remote device.
+    2. Verify the remote device, by checking that whether the root device
+    changed after reboot.
+    """
+    logging.notice('Rebooting device...')
+    # Record the current root device. This must be done after SetupRootfsUpdate
+    # and before reboot, since SetupRootfsUpdate may reboot the device if there
+    # is a pending update, which changes the root device, and reboot will
+    # definitely change the root device if update successfully finishes.
+    old_root_dev = self.GetRootDev(self.device)
+    self.device.Reboot()
+    if self._clobber_stateful:
+      self.device.run(['mkdir', '-p', self.device.work_dir])
+
+    if self._do_rootfs_update:
+      logging.notice('Verifying that the device has been updated...')
+      new_root_dev = self.GetRootDev(self.device)
+      if old_root_dev is None:
+        raise AutoUpdateVerifyError(
+            'Failed to locate root device before update.')
+
+      if new_root_dev is None:
+        raise AutoUpdateVerifyError(
+            'Failed to locate root device after update.')
+
+      if new_root_dev == old_root_dev:
+        raise AutoUpdateVerifyError(
+            'Failed to boot into the new version. Possibly there was a '
+            'signing problem, or an automated rollback occurred because '
+            'your new image failed to boot.')
+
+  def ResolveAPPIDMismatchIfAny(self, payload_app_id):
+    """Resolves and APP ID mismatch between the payload and device.
+
+    If the APP ID of the payload is different than the device, then the nebraska
+    will fail. We empty the payload's AppID so nebraska can do partial APP ID
+    matching.
+    """
+    if ((self.device.app_id and self.device.app_id == payload_app_id) or
+        payload_app_id == ''):
+      return payload_app_id
+
+    logging.warning('You are installing an image with a different release '
+                    'App ID than the device (%s vs %s), we are forcing the '
+                    'install!', payload_app_id, self.device.app_id)
+    return ''
+
+  def RunUpdate(self):
+    """Update the device with image of specific version."""
+    self._transfer_obj.CheckPayloads()
+
+    self._transfer_obj.TransferUpdateUtilsPackage()
+
+    restore_stateful = self.CheckRestoreStateful()
+    if restore_stateful:
+      self.RestoreStateful()
+
+    # Perform device updates.
+    if self._do_rootfs_update:
+      self.RunUpdateRootfs()
+      logging.info('Rootfs update completed.')
+
+    if self._do_stateful_update and not restore_stateful:
+      self.RunUpdateStateful()
+      logging.info('Stateful update completed.')
+
+    if self._clear_tpm_owner:
+      self.SetClearTpmOwnerRequest()
+
+    if self._reboot:
+      self.RebootAndVerify()
+
+    if self.is_au_endtoendtest:
+      self.PostCheckCrOSUpdate()
+
+    if self._disable_verification:
+      logging.info('Disabling rootfs verification on the device...')
+      self.device.DisableRootfsVerification()
+
+  def _Reboot(self, error_stage, timeout=None):
+    try:
+      if timeout is None:
+        timeout = self.REBOOT_TIMEOUT
+      self.device.Reboot(timeout_sec=timeout)
+    except cros_build_lib.DieSystemExit:
+      raise ChromiumOSUpdateError('Could not recover from reboot at %s' %
+                                  error_stage)
+    except remote_access.SSHConnectionError:
+      raise ChromiumOSUpdateError('Failed to connect at %s' % error_stage)
+
+  def _cgpt(self, flag, kernel, dev='$(rootdev -s -d)'):
+    """Return numeric cgpt value for the specified flag, kernel, device."""
+    cmd = ['cgpt', 'show', '-n', '-i', '%d' % kernel['kernel'], flag, dev]
+    return int(self._RetryCommand(
+        cmd, capture_output=True, log_output=True).output.strip())
+
+  def _GetKernelPriority(self, kernel):
+    """Return numeric priority for the specified kernel.
+
+    Args:
+      kernel: information of the given kernel, KERNEL_A or KERNEL_B.
+    """
+    return self._cgpt('-P', kernel)
+
+  def _GetKernelSuccess(self, kernel):
+    """Return boolean success flag for the specified kernel.
+
+    Args:
+      kernel: information of the given kernel, KERNEL_A or KERNEL_B.
+    """
+    return self._cgpt('-S', kernel) != 0
+
+  def _GetKernelTries(self, kernel):
+    """Return tries count for the specified kernel.
+
+    Args:
+      kernel: information of the given kernel, KERNEL_A or KERNEL_B.
+    """
+    return self._cgpt('-T', kernel)
+
+  def _GetKernelState(self):
+    """Returns the (<active>, <inactive>) kernel state as a pair."""
+    active_root = int(re.findall(r'(\d+\Z)', self.GetRootDev(self.device))[0])
+    if active_root == self.KERNEL_A['root']:
+      return self.KERNEL_A, self.KERNEL_B
+    elif active_root == self.KERNEL_B['root']:
+      return self.KERNEL_B, self.KERNEL_A
+    else:
+      raise ChromiumOSUpdateError('Encountered unknown root partition: %s' %
+                                  active_root)
+
+  def _GetReleaseVersion(self):
+    """Get release version of the device."""
+    lsb_release_content = self._RetryCommand(
+        ['cat', '/etc/lsb-release'],
+        capture_output=True, log_output=True).output.strip()
+    regex = r'^CHROMEOS_RELEASE_VERSION=(.+)$'
+    return auto_update_util.GetChromeosBuildInfo(
+        lsb_release_content=lsb_release_content, regex=regex)
+
+  def _GetReleaseBuilderPath(self):
+    """Get release version of the device."""
+    lsb_release_content = self._RetryCommand(
+        ['cat', '/etc/lsb-release'],
+        capture_output=True, log_output=True).output.strip()
+    regex = r'^CHROMEOS_RELEASE_BUILDER_PATH=(.+)$'
+    return auto_update_util.GetChromeosBuildInfo(
+        lsb_release_content=lsb_release_content, regex=regex)
+
+  def CheckVersion(self):
+    """Check the image running in DUT has the expected version.
+
+    Returns:
+      True if the DUT's image version matches the version that the
+      ChromiumOSUpdater tries to update to.
+    """
+    if not self.update_version:
+      return False
+
+    # Use CHROMEOS_RELEASE_BUILDER_PATH to match the build version if it exists
+    # in lsb-release, otherwise, continue using CHROMEOS_RELEASE_VERSION.
+    release_builder_path = self._GetReleaseBuilderPath()
+    if release_builder_path:
+      return self.update_version == release_builder_path
+
+    return self.update_version.endswith(self._GetReleaseVersion())
+
+  def _VerifyBootExpectations(self, expected_kernel_state, rollback_message):
+    """Verify that we fully booted given expected kernel state.
+
+    It verifies that we booted using the correct kernel state, and that the
+    OS has marked the kernel as good.
+
+    Args:
+      expected_kernel_state: kernel state that we're verifying with i.e. I
+        expect to be booted onto partition 4 etc. See output of _GetKernelState.
+      rollback_message: string to raise as a RootfsUpdateError if we booted
+        with the wrong partition.
+    """
+    logging.debug('Start verifying boot expectations...')
+    # Figure out the newly active kernel
+    active_kernel_state = self._GetKernelState()[0]
+
+    # Rollback
+    if (expected_kernel_state and
+        active_kernel_state != expected_kernel_state):
+      logging.debug('Dumping partition table.')
+      self.device.run(['cgpt', 'show', '$(rootdev -s -d)'],
+                      **self._cmd_kwargs)
+      logging.debug('Dumping crossystem for firmware debugging.')
+      self.device.run(['crossystem', '--all'], **self._cmd_kwargs)
+      raise RootfsUpdateError(rollback_message)
+
+    # Make sure chromeos-setgoodkernel runs
+    try:
+      timeout_util.WaitForReturnTrue(
+          lambda: (self._GetKernelTries(active_kernel_state) == 0
+                   and self._GetKernelSuccess(active_kernel_state)),
+          self.KERNEL_UPDATE_TIMEOUT,
+          period=5)
+    except timeout_util.TimeoutError:
+      services_status = self.device.run(
+          ['status', 'system-services'], capture_output=True,
+          log_output=True).output
+      logging.debug('System services_status: %r', services_status)
+      if services_status != 'system-services start/running\n':
+        event = ('Chrome failed to reach login screen')
+      else:
+        event = ('update-engine failed to call '
+                 'chromeos-setgoodkernel')
+      raise RootfsUpdateError(
+          'After update and reboot, %s '
+          'within %d seconds' % (event, self.KERNEL_UPDATE_TIMEOUT))
+
+  def _CheckVersionToConfirmInstall(self):
+    logging.debug('Checking whether the new build is successfully installed...')
+    if not self.update_version:
+      logging.debug('No update_version is provided if test is executed with'
+                    'local nebraska.')
+      return True
+
+    # Always try the default check_version method first, this prevents
+    # any backward compatibility issue.
+    if self.CheckVersion():
+      return True
+
+    return auto_update_util.VersionMatch(
+        self.update_version, self._GetReleaseVersion())
+
+  def _RetryCommand(self, cmd, **kwargs):
+    """Retry commands if SSHConnectionError happens.
+
+    Args:
+      cmd: the command to be run by device.
+      kwargs: the parameters for device to run the command.
+
+    Returns:
+      the output of running the command.
+    """
+    return retry_util.RetryException(
+        remote_access.SSHConnectionError,
+        MAX_RETRY,
+        self.device.run,
+        cmd, delay_sec=DELAY_SEC_FOR_RETRY,
+        shell=isinstance(cmd, six.string_types),
+        **kwargs)
+
+  def PreSetupStatefulUpdate(self):
+    """Pre-setup for stateful update for CrOS host."""
+    logging.debug('Start pre-setup for stateful update...')
+    if self._clobber_stateful:
+      for folder in self.REMOTE_STATEFUL_PATH_TO_CHECK:
+        touch_path = os.path.join(folder, self.REMOTE_STATEFUL_TEST_FILENAME)
+        self._RetryCommand(['touch', touch_path], **self._cmd_kwargs)
+
+  def PostCheckStatefulUpdate(self):
+    """Post-check for stateful update for CrOS host."""
+    logging.debug('Start post check for stateful update...')
+    self._Reboot('post check of stateful update')
+    if self._clobber_stateful:
+      for folder in self.REMOTE_STATEFUL_PATH_TO_CHECK:
+        test_file_path = os.path.join(folder,
+                                      self.REMOTE_STATEFUL_TEST_FILENAME)
+        # If stateful update succeeds, these test files should not exist.
+        if self.device.IfFileExists(test_file_path,
+                                    **self._cmd_kwargs_omit_error):
+          raise StatefulUpdateError('failed to post-check stateful update.')
+
+  def _IsUpdateUtilsPackageInstalled(self):
+    """Check whether update-utils package is well installed.
+
+    There's a chance that nebraska package is removed in the middle of
+    auto-update process. This function double check it and transfer it if it's
+    removed.
+    """
+    logging.info('Checking whether nebraska files are still on the device...')
+    try:
+      nebraska_bin = os.path.join(self.device_dev_dir,
+                                  self.REMOTE_NEBRASKA_FILENAME)
+      if not self.device.IfFileExists(
+          nebraska_bin, **self._cmd_kwargs_omit_error):
+        logging.info('Nebraska files not found on device. Resending them...')
+
+        self._transfer_obj.TransferUpdateUtilsPackage()
+
+      return True
+    except cros_build_lib.RunCommandError as e:
+      logging.warning('Failed to verify whether packages still exist: %s', e)
+      return False
+
+  def CheckNebraskaCanRun(self):
+    """Check if nebraska can successfully run for ChromiumOSUpdater."""
+    self._IsUpdateUtilsPackageInstalled()
+    self._CheckNebraskaCanRun()
+
+  def RestoreStateful(self):
+    """Restore stateful partition for device."""
+    logging.warning('Restoring the stateful partition.')
+    self.PreSetupStatefulUpdate()
+    self._transfer_obj.TransferStatefulUpdate()
+    self.ResetStatefulPartition()
+    self.UpdateStateful()
+    self.PostCheckStatefulUpdate()
+    try:
+      self.CheckNebraskaCanRun()
+      logging.info('Stateful partition restored.')
+    except nebraska_wrapper.NebraskaStartupError as e:
+      raise ChromiumOSUpdateError(
+          'Unable to restore stateful partition: %s' % e)
+
+  def SetClearTpmOwnerRequest(self):
+    """Set clear_tpm_owner_request flag."""
+    # The issue is that certain AU tests leave the TPM in a bad state which
+    # most commonly shows up in provisioning.  Executing this 'crossystem'
+    # command before rebooting clears the problem state during the reboot.
+    # It's also worth mentioning that this isn't a complete fix:  The bad
+    # TPM state in theory might happen some time other than during
+    # provisioning.  Also, the bad TPM state isn't supposed to happen at
+    # all; this change is just papering over the real bug.
+    logging.info('Setting clear_tpm_owner_request to 1.')
+    self._RetryCommand('crossystem clear_tpm_owner_request=1',
+                       **self._cmd_kwargs_omit_error)
+
+  def PostCheckRootfsUpdate(self):
+    """Post-check for rootfs update for CrOS host."""
+    logging.debug('Start post check for rootfs update...')
+    active_kernel, inactive_kernel = self._GetKernelState()
+    logging.debug('active_kernel= %s, inactive_kernel=%s',
+                  active_kernel, inactive_kernel)
+    if (self._GetKernelPriority(inactive_kernel) <
+        self._GetKernelPriority(active_kernel)):
+      raise RootfsUpdateError('Update failed. The priority of the inactive '
+                              'kernel partition is less than that of the '
+                              'active kernel partition.')
+    self.inactive_kernel = inactive_kernel
+
+  def PostCheckCrOSUpdate(self):
+    """Post check for the whole auto-update process."""
+    logging.debug('Post check for the whole CrOS update...')
+    start_time = time.time()
+    # Not use 'sh' here since current device.run cannot recognize
+    # the content of $FILE.
+    autoreboot_cmd = ('FILE="%s" ; [ -f "$FILE" ] || '
+                      '( touch "$FILE" ; start autoreboot )')
+    self._RetryCommand(autoreboot_cmd % self.REMOTE_LAB_MACHINE_FILE_PATH,
+                       **self._cmd_kwargs)
+
+    # Loop in case the initial check happens before the reboot.
+    while True:
+      try:
+        start_verify_time = time.time()
+        self._VerifyBootExpectations(
+            self.inactive_kernel, rollback_message=
+            'Build %s failed to boot on %s; system rolled back to previous '
+            'build' % (self.update_version, self.device.hostname))
+
+        # Check that we've got the build we meant to install.
+        if not self._CheckVersionToConfirmInstall():
+          raise ChromiumOSUpdateError(
+              'Failed to update %s to build %s; found build '
+              '%s instead' % (self.device.hostname,
+                              self.update_version,
+                              self._GetReleaseVersion()))
+      except RebootVerificationError as e:
+        # If a minimum amount of time since starting the check has not
+        # occurred, wait and retry.  Use the start of the verification
+        # time in case an SSH call takes a long time to return/fail.
+        if start_verify_time - start_time < POST_CHECK_SETTLE_SECONDS:
+          logging.warning('Delaying for re-check of %s to update to %s (%s)',
+                          self.device.hostname, self.update_version, e)
+          time.sleep(POST_CHECK_RETRY_SECONDS)
+          continue
+        raise
+      break
+
+    if not self._clobber_stateful:
+      self.PostRebootUpdateCheckForAUTest()
+
+  def PostRebootUpdateCheckForAUTest(self):
+    """Do another update check after reboot to get the post update hostlog.
+
+    This is only done with autoupdate_EndToEndTest.
+    """
+    logging.debug('Doing one final update check to get post update hostlog.')
+    nebraska_bin = os.path.join(self.device_dev_dir,
+                                self.REMOTE_NEBRASKA_FILENAME)
+    nebraska = nebraska_wrapper.RemoteNebraskaWrapper(
+        self.device, nebraska_bin=nebraska_bin,
+        update_metadata_dir=self.device.work_dir)
+
+    try:
+      nebraska.Start()
+
+      nebraska_url = nebraska.GetURL(critical_update=True, no_update=True)
+      cmd = [self.REMOTE_UPDATE_ENGINE_BIN_FILENAME, '--check_for_update',
+             '--omaha_url="%s"' % nebraska_url]
+      self.device.run(cmd, **self._cmd_kwargs)
+      op = self.GetUpdateStatus(self.device)[0]
+      logging.info('Post update check status: %s', op)
+    except Exception as err:
+      logging.error('Post reboot update check failed: %s', str(err))
+      logging.warning(nebraska.PrintLog() or 'No nebraska log is available.')
+    finally:
+      nebraska.Stop()
diff --git a/utils/frozen_chromite/lib/auto_updater_transfer.py b/utils/frozen_chromite/lib/auto_updater_transfer.py
new file mode 100644
index 0000000..66cd67b
--- /dev/null
+++ b/utils/frozen_chromite/lib/auto_updater_transfer.py
@@ -0,0 +1,642 @@
+# -*- coding: utf-8 -*-
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Library containing functions to transfer files onto a remote device.
+
+Transfer Base class includes:
+
+  ----Tranfer----
+  * @retry functionality for all public transfer functions.
+
+LocalTransfer includes:
+
+  ----Precheck---
+  * Pre-check payload's existence before auto-update.
+
+  ----Tranfer----
+  * Transfer update-utils (nebraska, et. al.) package at first.
+  * Transfer rootfs update files if rootfs update is required.
+  * Transfer stateful update files if stateful update is required.
+
+LabTransfer includes:
+
+  ----Precheck---
+  * Pre-check payload's existence on the staging server before auto-update.
+
+  ----Tranfer----
+  * Download the update-utils (nebraska, et. al.) package onto the DUT directly
+    from the staging server at first.
+  * Download rootfs update files onto the DUT directly from the staging server
+    if rootfs update is required.
+  * Download stateful update files onto the DUT directly from the staging server
+    if stateful update is required.
+"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import abc
+import json
+import os
+import re
+
+import six
+from six.moves import urllib
+
+from autotest_lib.utils.frozen_chromite.lib import cros_build_lib
+from autotest_lib.utils.frozen_chromite.lib import cros_logging as logging
+from autotest_lib.utils.frozen_chromite.lib import nebraska_wrapper
+from autotest_lib.utils.frozen_chromite.lib import osutils
+from autotest_lib.utils.frozen_chromite.lib import retry_util
+
+# Naming conventions for global variables:
+#   Path on remote host with slash: REMOTE_XXX_PATH
+#   File on local server without slash: LOCAL_XXX_FILENAME
+#   Path on local server: LOCAL_XXX_PATH
+
+# Max number of the times for retry:
+# 1. for transfer functions to be retried.
+# 2. for some retriable commands to be retried.
+_MAX_RETRY = 5
+
+# The delay between retriable tasks.
+_DELAY_SEC_FOR_RETRY = 5
+
+# Update file names for rootfs+kernel and stateful partitions.
+ROOTFS_FILENAME = 'update.gz'
+STATEFUL_FILENAME = 'stateful.tgz'
+
+# Regular expression that is used to evaluate payload names to determine payload
+# validity.
+_PAYLOAD_PATTERN = r'payloads/chromeos_(?P<image_version>[^_]+)_.*'
+
+# File copying modes.
+_SCP = 'scp'
+
+
+class Error(Exception):
+  """A generic auto updater transfer error."""
+
+
+class ChromiumOSTransferError(Error):
+  """Thrown when there is a general transfer specific error."""
+
+
+def GetPayloadPropertiesFileName(payload):
+  """Returns the payload properties file given the path to the payload."""
+  return payload + '.json'
+
+
+class Transfer(six.with_metaclass(abc.ABCMeta, object)):
+  """Abstract Base Class that handles payload precheck and transfer."""
+
+  PAYLOAD_DIR_NAME = 'payloads'
+
+  def __init__(self, device, payload_dir, tempdir,
+               payload_name, cmd_kwargs, device_payload_dir,
+               payload_mode='scp', transfer_stateful_update=True,
+               transfer_rootfs_update=True):
+    """Initialize Base Class for transferring payloads functionality.
+
+    Args:
+      device: The ChromiumOSDevice to be updated.
+      payload_dir: The directory of payload(s).
+      tempdir: The temp directory in caller, not in the device. For example,
+          the tempdir for cros flash is /tmp/cros-flash****/, used to
+          temporarily keep files when transferring update-utils package, and
+          reserve nebraska and update engine logs.
+      payload_name: Filename of exact payload file to use for update.
+      cmd_kwargs: Keyword arguments that are sent along with the commands that
+          are run on the device.
+      device_payload_dir: Path to the payload directory in the device's work
+          directory.
+      payload_mode: The payload mode - it can be 'parallel' or 'scp'.
+      transfer_stateful_update: Whether to transfer payloads necessary for
+          stateful update. The default is True.
+      transfer_rootfs_update: Whether to transfer payloads necessary for
+          rootfs update. The default is True.
+    """
+    self._device = device
+    self._payload_dir = payload_dir
+    self._tempdir = tempdir
+    self._payload_name = payload_name
+    self._cmd_kwargs = cmd_kwargs
+    self._device_payload_dir = device_payload_dir
+    if payload_mode not in ('scp', 'parallel'):
+      raise ValueError('The given value %s for payload mode is not valid.' %
+                       payload_mode)
+    self._payload_mode = payload_mode
+    self._transfer_stateful_update = transfer_stateful_update
+    self._transfer_rootfs_update = transfer_rootfs_update
+    self._local_payload_props_path = None
+
+  @abc.abstractmethod
+  def CheckPayloads(self):
+    """Verify that all required payloads are in |self.payload_dir|."""
+
+  def TransferUpdateUtilsPackage(self):
+    """Transfer update-utils package to work directory of the remote device."""
+    retry_util.RetryException(
+        cros_build_lib.RunCommandError,
+        _MAX_RETRY,
+        self._TransferUpdateUtilsPackage,
+        delay_sec=_DELAY_SEC_FOR_RETRY)
+
+  def TransferRootfsUpdate(self):
+    """Transfer files for rootfs update.
+
+    The corresponding payloads are copied to the remote device for rootfs
+    update.
+    """
+    retry_util.RetryException(
+        cros_build_lib.RunCommandError,
+        _MAX_RETRY,
+        self._TransferRootfsUpdate,
+        delay_sec=_DELAY_SEC_FOR_RETRY)
+
+  def TransferStatefulUpdate(self):
+    """Transfer files for stateful update.
+
+    The stateful update bin and the corresponding payloads are copied to the
+    target remote device for stateful update.
+    """
+    retry_util.RetryException(
+        cros_build_lib.RunCommandError,
+        _MAX_RETRY,
+        self._TransferStatefulUpdate,
+        delay_sec=_DELAY_SEC_FOR_RETRY)
+
+  def _EnsureDeviceDirectory(self, directory):
+    """Mkdir the directory no matther whether this directory exists on host.
+
+    Args:
+      directory: The directory to be made on the device.
+    """
+    self._device.run(['mkdir', '-p', directory], **self._cmd_kwargs)
+
+  @abc.abstractmethod
+  def GetPayloadPropsFile(self):
+    """Get the payload properties file path."""
+
+  @abc.abstractmethod
+  def GetPayloadProps(self):
+    """Gets properties necessary to fix the payload properties file.
+
+    Returns:
+      Dict in the format: {'image_version': 12345.0.0, 'size': 123456789}.
+    """
+
+  def _GetPayloadFormat(self):
+    """Gets the payload format that should be evaluated.
+
+    Returns:
+      The payload name as a string.
+    """
+    return self._payload_name
+
+  def _GetPayloadPattern(self):
+    """The regex pattern that the payload format must match.
+
+    Returns:
+      Regular expression.
+    """
+    return _PAYLOAD_PATTERN
+
+
+class LocalTransfer(Transfer):
+  """Abstracts logic that handles transferring local files to the DUT."""
+
+  def __init__(self, *args, **kwargs):
+    """Initialize LocalTransfer to handle transferring files from local to DUT.
+
+    Args:
+      *args: The list of arguments to be passed. See Base class for a complete
+          list of accepted arguments.
+      **kwargs: Any keyword arguments to be passed. See Base class for a
+          complete list of accepted keyword arguments.
+    """
+    super(LocalTransfer, self).__init__(*args, **kwargs)
+
+  def CheckPayloads(self):
+    """Verify that all required payloads are in |self.payload_dir|."""
+    logging.debug('Checking if payloads have been stored in directory %s...',
+                  self._payload_dir)
+    filenames = []
+
+    if self._transfer_rootfs_update:
+      filenames += [self._payload_name,
+                    GetPayloadPropertiesFileName(self._payload_name)]
+
+    if self._transfer_stateful_update:
+      filenames += [STATEFUL_FILENAME]
+
+    for fname in filenames:
+      payload = os.path.join(self._payload_dir, fname)
+      if not os.path.exists(payload):
+        raise ChromiumOSTransferError('Payload %s does not exist!' % payload)
+
+  def _TransferUpdateUtilsPackage(self):
+    """Transfer update-utils package to work directory of the remote device."""
+    logging.notice('Copying update script to device...')
+    source_dir = os.path.join(self._tempdir, 'src')
+    osutils.SafeMakedirs(source_dir)
+    nebraska_wrapper.RemoteNebraskaWrapper.GetNebraskaSrcFile(source_dir)
+
+    # Make sure the device.work_dir exists after any installation and reboot.
+    self._EnsureDeviceDirectory(self._device.work_dir)
+    # Python packages are plain text files.
+    self._device.CopyToWorkDir(source_dir, mode=_SCP, log_output=True,
+                               **self._cmd_kwargs)
+
+  def _TransferRootfsUpdate(self):
+    """Transfer files for rootfs update.
+
+    Copy the update payload to the remote device for rootfs update.
+    """
+    self._EnsureDeviceDirectory(self._device_payload_dir)
+    logging.notice('Copying rootfs payload to device...')
+    payload = os.path.join(self._payload_dir, self._payload_name)
+    self._device.CopyToWorkDir(payload, self.PAYLOAD_DIR_NAME,
+                               mode=self._payload_mode,
+                               log_output=True, **self._cmd_kwargs)
+    payload_properties_path = GetPayloadPropertiesFileName(payload)
+    self._device.CopyToWorkDir(payload_properties_path, self.PAYLOAD_DIR_NAME,
+                               mode=self._payload_mode,
+                               log_output=True, **self._cmd_kwargs)
+
+  def _TransferStatefulUpdate(self):
+    """Transfer files for stateful update.
+
+    The stateful update payloads are copied to the target remote device for
+    stateful update.
+    """
+    logging.notice('Copying target stateful payload to device...')
+    payload = os.path.join(self._payload_dir, STATEFUL_FILENAME)
+    self._device.CopyToWorkDir(payload, mode=self._payload_mode,
+                               log_output=True, **self._cmd_kwargs)
+
+  def GetPayloadPropsFile(self):
+    """Finds the local payload properties file."""
+    # Payload properties file is available locally so just catch it next to the
+    # payload file.
+    if self._local_payload_props_path is None:
+      self._local_payload_props_path = os.path.join(
+          self._payload_dir, GetPayloadPropertiesFileName(self._payload_name))
+    return self._local_payload_props_path
+
+  def GetPayloadProps(self):
+    """Gets image_version from the payload_name and size of the payload.
+
+    The payload_dir must be in the format <board>/Rxx-12345.0.0 for a complete
+    match; else a ValueError will be raised. In case the payload filename is
+    update.gz, then image_version cannot be extracted from its name; therefore,
+    image_version is set to a dummy 99999.0.0.
+
+    Returns:
+      Dict - See parent class's function for full details.
+    """
+    payload_filepath = os.path.join(self._payload_dir, self._payload_name)
+    values = {
+        'image_version': '99999.0.0',
+        'size': os.path.getsize(payload_filepath)
+    }
+    if self._payload_name != ROOTFS_FILENAME:
+      payload_format = self._GetPayloadFormat()
+      payload_pattern = self._GetPayloadPattern()
+      m = re.match(payload_pattern, payload_format)
+      if not m:
+        raise ValueError(
+            'Regular expression %r did not match the expected payload format '
+            '%s' % (payload_pattern, payload_format))
+      values.update(m.groupdict())
+    return values
+
+
+class LabTransfer(Transfer):
+  """Abstracts logic that transfers files from staging server to the DUT."""
+
+  def __init__(self, staging_server, *args, **kwargs):
+    """Initialize LabTransfer to transfer files from staging server to DUT.
+
+    Args:
+      staging_server: Url of the server that's staging the payload files.
+      *args: The list of arguments to be passed. See Base class for a complete
+          list of accepted arguments.
+      **kwargs: Any keyword arguments to be passed. See Base class for a
+          complete list of accepted keyword arguments.
+    """
+    self._staging_server = staging_server
+    super(LabTransfer, self).__init__(*args, **kwargs)
+
+  def _GetPayloadFormat(self):
+    """Gets the payload format that should be evaluated.
+
+    Returns:
+      The payload dir as a string.
+    """
+    return self._payload_dir
+
+  def _GetPayloadPattern(self):
+    """The regex pattern that the payload format must match.
+
+    Returns:
+      Regular expression.
+    """
+    return r'.*/(R[0-9]+-)(?P<image_version>.+)'
+
+  def _RemoteDevserverCall(self, cmd, stdout=False):
+    """Runs a command on a remote devserver by sshing into it.
+
+    Raises cros_build_lib.RunCommandError() if the command could not be run
+    successfully.
+
+    Args:
+      cmd: (list) the command to be run.
+      stdout: True if the stdout of the command should be captured.
+    """
+    ip = urllib.parse.urlparse(self._staging_server).hostname
+    return cros_build_lib.run(['ssh', ip] + cmd, log_output=True, stdout=stdout)
+
+  def _CheckPayloads(self, payload_name):
+    """Runs the curl command that checks if payloads have been staged."""
+    payload_url = self._GetStagedUrl(staged_filename=payload_name,
+                                     build_id=self._payload_dir)
+    cmd = ['curl', '-I', payload_url, '--fail']
+    try:
+      self._RemoteDevserverCall(cmd)
+    except cros_build_lib.RunCommandError as e:
+      raise ChromiumOSTransferError(
+          'Could not verify if %s was staged at %s. Received exception: %s' %
+          (payload_name, payload_url, e))
+
+  def CheckPayloads(self):
+    """Verify that all required payloads are staged on staging server."""
+    logging.debug('Checking if payloads have been staged on server %s...',
+                  self._staging_server)
+
+    if self._transfer_rootfs_update:
+      self._CheckPayloads(self._payload_name)
+      self._CheckPayloads(GetPayloadPropertiesFileName(self._payload_name))
+
+    if self._transfer_stateful_update:
+      self._CheckPayloads(STATEFUL_FILENAME)
+
+  def _GetStagedUrl(self, staged_filename, build_id=None):
+    """Returns a valid url to check availability of staged files.
+
+    Args:
+      staged_filename: Name of the staged file.
+      build_id: This is the path at which the needed file can be found. It
+        is usually of the format <board_name>-release/R79-12345.6.0. By default,
+        the path is set to be None.
+
+    Returns:
+      A URL in the format:
+        http://<ip>:<port>/static/<board>-release/<version>/<staged_filename>
+    """
+    # Formulate the download URL out of components.
+    url = urllib.parse.urljoin(self._staging_server, 'static/')
+    if build_id:
+      # Add slash at the end of image_name if necessary.
+      if not build_id.endswith('/'):
+        build_id = build_id + '/'
+      url = urllib.parse.urljoin(url, build_id)
+    return urllib.parse.urljoin(url, staged_filename)
+
+  def _GetCurlCmdForPayloadDownload(self, payload_dir, payload_filename,
+                                    build_id=None):
+    """Returns a valid curl command to download payloads into device tmp dir.
+
+    Args:
+      payload_dir: Path to the payload directory on the device.
+      payload_filename: Name of the file by which the downloaded payload should
+        be saved. This is assumed to be the same as the name of the payload.
+      build_id: This is the path at which the needed payload can be found. It
+        is usually of the format <board_name>-release/R79-12345.6.0. By default,
+        the path is set to None.
+
+    Returns:
+      A fully formed curl command in the format:
+        ['curl', '-o', '<path where payload should be saved>',
+         '<payload download URL>']
+    """
+    return ['curl', '-o', os.path.join(payload_dir, payload_filename),
+            self._GetStagedUrl(payload_filename, build_id)]
+
+  def _TransferUpdateUtilsPackage(self):
+    """Transfer update-utils package to work directory of the remote device.
+
+    The update-utils package will be transferred to the device from the
+    staging server via curl.
+    """
+    logging.notice('Copying update script to device...')
+    source_dir = os.path.join(self._device.work_dir, 'src')
+    self._EnsureDeviceDirectory(source_dir)
+
+    self._device.run(self._GetCurlCmdForPayloadDownload(
+        payload_dir=source_dir,
+        payload_filename=nebraska_wrapper.NEBRASKA_FILENAME))
+
+    # Make sure the device.work_dir exists after any installation and reboot.
+    self._EnsureDeviceDirectory(self._device.work_dir)
+
+  def _TransferStatefulUpdate(self):
+    """Transfer files for stateful update.
+
+    The stateful update bin and the corresponding payloads are copied to the
+    target remote device for stateful update from the staging server via curl.
+    """
+    self._EnsureDeviceDirectory(self._device_payload_dir)
+
+    # TODO(crbug.com/1024639): Another way to make the payloads available is
+    # to make update_engine download it directly from the staging_server. This
+    # will avoid a disk copy but has the potential to be harder to debug if
+    # update engine does not report the error clearly.
+
+    logging.notice('Copying target stateful payload to device...')
+    self._device.run(self._GetCurlCmdForPayloadDownload(
+        payload_dir=self._device.work_dir, build_id=self._payload_dir,
+        payload_filename=STATEFUL_FILENAME))
+
+  def _TransferRootfsUpdate(self):
+    """Transfer files for rootfs update.
+
+    Copy the update payload to the remote device for rootfs update from the
+    staging server via curl.
+    """
+    self._EnsureDeviceDirectory(self._device_payload_dir)
+
+    logging.notice('Copying rootfs payload to device...')
+
+    # TODO(crbug.com/1024639): Another way to make the payloads available is
+    # to make update_engine download it directly from the staging_server. This
+    # will avoid a disk copy but has the potential to be harder to debug if
+    # update engine does not report the error clearly.
+
+    self._device.run(self._GetCurlCmdForPayloadDownload(
+        payload_dir=self._device_payload_dir, build_id=self._payload_dir,
+        payload_filename=self._payload_name))
+
+    self._device.CopyToWorkDir(src=self._local_payload_props_path,
+                               dest=self.PAYLOAD_DIR_NAME,
+                               mode=self._payload_mode,
+                               log_output=True, **self._cmd_kwargs)
+
+  def GetPayloadPropsFile(self):
+    """Downloads the PayloadProperties file onto the drone.
+
+    The payload properties file may be required to be updated in
+    auto_updater.ResolveAppIsMismatchIfAny(). Download the file from where it
+    has been staged on the staging server into the tempdir of the drone, so that
+    the file is available locally for any updates.
+    """
+    if self._local_payload_props_path is None:
+      payload_props_filename = GetPayloadPropertiesFileName(self._payload_name)
+      payload_props_path = os.path.join(self._tempdir, payload_props_filename)
+
+      # Get command to retrieve contents of the properties file.
+      cmd = ['curl',
+             self._GetStagedUrl(payload_props_filename, self._payload_dir)]
+      try:
+        result = self._RemoteDevserverCall(cmd, stdout=True)
+        json.loads(result.output)
+        osutils.WriteFile(payload_props_path, result.output, 'wb',
+                          makedirs=True)
+      except cros_build_lib.RunCommandError as e:
+        raise ChromiumOSTransferError(
+            'Unable to get payload properties file by running %s due to '
+            'exception: %s.' % (' '.join(cmd), e))
+      except ValueError:
+        raise ChromiumOSTransferError(
+            'Could not create %s as %s not valid json.' %
+            (payload_props_path, result.output))
+
+      self._local_payload_props_path = payload_props_path
+    return self._local_payload_props_path
+
+  def _GetPayloadSize(self):
+    """Returns the size of the payload by running a curl -I command.
+
+    Returns:
+      Payload size in bytes.
+    """
+    payload_url = self._GetStagedUrl(staged_filename=self._payload_name,
+                                     build_id=self._payload_dir)
+    cmd = ['curl', '-I', payload_url, '--fail']
+    try:
+      proc = self._RemoteDevserverCall(cmd, stdout=True)
+    except cros_build_lib.RunCommandError as e:
+      raise ChromiumOSTransferError(
+          'Unable to get payload size by running command %s due to exception: '
+          '%s.' % (' '.join(cmd), e))
+
+    pattern = re.compile(r'Content-Length: [0-9]+', re.I)
+    match = pattern.findall(str(proc.output))
+    if not match:
+      raise ChromiumOSTransferError('Could not get payload size from output: '
+                                    '%s ' % proc.output)
+    return int(match[0].split()[1].strip())
+
+  def GetPayloadProps(self):
+    """Gets image_version from the payload_dir name and gets payload size.
+
+    The payload_dir must be in the format <board>/Rxx-12345.0.0 for a complete
+    match; else a ValueError will be raised.
+
+    Returns:
+      Dict - See parent class's function for full details.
+    """
+    values = {'size': self._GetPayloadSize()}
+    payload_format = self._GetPayloadFormat()
+    payload_pattern = self._GetPayloadPattern()
+    m = re.match(payload_pattern, payload_format)
+    if not m:
+      raise ValueError('Regular expression %r did not match the expected '
+                       'payload format %s' % (payload_pattern, payload_format))
+    values.update(m.groupdict())
+    return values
+
+
+class LabEndToEndPayloadTransfer(LabTransfer):
+  """Abstracts logic that transfers files from staging server to the DUT.
+
+  TODO(crbug.com/1061570): AutoUpdate_endToEnd tests stage their payloads in a
+  different location on the devserver in comparison to the provision_AutoUpdate
+  test. Since we are removing the use of the cros_au RPC (see crbug.com/1049708
+  and go/devserver-deprecation) from the EndToEnd tests, it is necessary to
+  extend LabTransfer class to support this new payload staging location.
+  Ideally, the URL at which the payload is staged should be abstracted from the
+  actual transfer of payloads.
+  """
+
+  def _GetPayloadFormat(self):
+    """Gets the payload format that should be evaluated.
+
+    Returns:
+      The payload name as a string.
+    """
+    return self._payload_name
+
+  def _GetPayloadPattern(self):
+    """The regex pattern that the payload format must match.
+
+    Returns:
+      Regular expression.
+    """
+    if "payloads/" in self._GetPayloadFormat():
+      # Ex: payloads/chromeos_14698.0.0_octopus_dev-channel_full_test.bin-gyzdkobygyzdck3swpkou632wan55vgx
+      return _PAYLOAD_PATTERN
+    else:
+      # Ex: chromeos_R102-14692.0.0_octopus_full_dev.bin
+      return r'.*(R[0-9]+-)(?P<image_version>.+)'
+
+  def _GetCurlCmdForPayloadDownload(self, payload_dir, payload_filename,
+                                    build_id=None):
+    """Returns a valid curl command to download payloads into device tmp dir.
+
+    Args:
+      payload_dir: Path to the payload directory on the device.
+      payload_filename: Name of the file by which the downloaded payload should
+        be saved. This is assumed to be the same as the name of the payload.
+        If the payload_name must is in this format:
+        payloads/whatever_file_name, the 'payloads/' at the start will be
+        removed while saving the file as the files need to be saved in specific
+        directories for their subsequent installation. Keeping the 'payloads/'
+        at the beginning of the payload_filename, adds a new directory that
+        messes up its installation.
+      build_id: This is the path at which the needed payload can be found. It
+        is usually of the format <board_name>-release/R79-12345.6.0. By default,
+        the path is set to None.
+
+    Returns:
+      A fully formed curl command in the format:
+      ['curl', '-o', '<path where payload should be saved>',
+      '<payload download URL>']
+    """
+    saved_filename = payload_filename
+    if saved_filename.startswith('payloads/'):
+      saved_filename = '/'.join(saved_filename.split('/')[1:])
+    cmd = ['curl', '-o', os.path.join(payload_dir, saved_filename),
+           self._GetStagedUrl(payload_filename, build_id)]
+    return cmd
+
+  def _TransferUpdateUtilsPackage(self):
+    """Transfer update-utils package to work directory of the remote device."""
+    try:
+      logging.notice('Copying update script to device from googlesource...')
+      source_dir = os.path.join(self._tempdir, 'src')
+      osutils.SafeMakedirs(source_dir)
+      nebraska_wrapper.RemoteNebraskaWrapper.GetNebraskaSrcFile(
+        source_dir, force_download=True)
+
+      # Make sure the device.work_dir exists after any installation and reboot.
+      self._EnsureDeviceDirectory(self._device.work_dir)
+      # Python packages are plain text files.
+      self._device.CopyToWorkDir(source_dir, mode=_SCP, log_output=True,
+                                 **self._cmd_kwargs)
+    except Exception as e:
+      logging.exception('Falling back to getting nebraska from devserver')
+      super(LabEndToEndPayloadTransfer, self)._TransferUpdateUtilsPackage()
diff --git a/utils/frozen_chromite/lib/buildbot_annotations.py b/utils/frozen_chromite/lib/buildbot_annotations.py
new file mode 100644
index 0000000..079f5ab
--- /dev/null
+++ b/utils/frozen_chromite/lib/buildbot_annotations.py
@@ -0,0 +1,119 @@
+# -*- coding: utf-8 -*-
+# Copyright 2016 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Functions and classes for formatting buildbot stage annotations."""
+
+from __future__ import print_function
+
+import abc
+import itertools
+import json
+
+import six
+
+
+class Annotation(object):
+  """Formatted annotation for buildbot."""
+
+  def __init__(self, name, args):
+    """Initialize instance.
+
+    Args:
+      name: Annotation name.
+      args: A sequence of string arguments.
+    """
+    self.name = name
+    self.args = args
+
+  def __str__(self):
+    inner_text = '@'.join(
+        _EscapeArgText(text)
+        for text in itertools.chain([self.name], self.args)
+    )
+    return '@@@%s@@@' % (inner_text,)
+
+  @property
+  def human_friendly(self):
+    """Human-friendly format."""
+    if self.args:
+      return '%s: %s' % (self.name, '; '.join(self.args))
+    else:
+      return self.name
+
+
+@six.add_metaclass(abc.ABCMeta)
+class _NamedAnnotation(Annotation):
+  """Abstract subclass for creating named annotations.
+
+  Concrete subclasses should define the ANNOTATION_NAME class attribute.
+  """
+
+  def __init__(self, *args):
+    super(_NamedAnnotation, self).__init__(self.ANNOTATION_NAME, args)
+
+  @abc.abstractproperty
+  def ANNOTATION_NAME(self):
+    raise NotImplementedError()
+
+
+class StepLink(_NamedAnnotation):
+  """STEP_LINK annotation."""
+  ANNOTATION_NAME = 'STEP_LINK'
+
+  # Some callers pass in text/url by kwarg.  We leave the full signature here
+  # so the API is a bit cleaner/more obvious.
+  # pylint: disable=useless-super-delegation
+  def __init__(self, text, url):
+    super(StepLink, self).__init__(text, url)
+
+
+class StepText(_NamedAnnotation):
+  """STEP_TEXT annotation."""
+  ANNOTATION_NAME = 'STEP_TEXT'
+
+
+class StepWarnings(_NamedAnnotation):
+  """STEP_WARNINGS annotation."""
+  ANNOTATION_NAME = 'STEP_WARNINGS'
+
+
+class StepFailure(_NamedAnnotation):
+  """STEP_FAILURE annotation."""
+  ANNOTATION_NAME = 'STEP_FAILURE'
+
+
+class BuildStep(_NamedAnnotation):
+  """BUILD_STEP annotation."""
+  ANNOTATION_NAME = 'BUILD_STEP'
+
+
+class SetBuildProperty(_NamedAnnotation):
+  """SET_BUILD_PROPERTY annotation."""
+  ANNOTATION_NAME = 'SET_BUILD_PROPERTY'
+
+  def __init__(self, name, value):
+    super(SetBuildProperty, self).__init__(name, json.dumps(value))
+
+
+class SetEmailNotifyProperty(_NamedAnnotation):
+  """SET_BUILD_PROPERTY annotation for email_notify."""
+  ANNOTATION_NAME = 'SET_BUILD_PROPERTY'
+
+  def __init__(self, name, value):
+    super(SetEmailNotifyProperty, self).__init__(name, json.dumps(value))
+
+  def __str__(self):
+    inner_text = '@'.join(
+        text for text in itertools.chain([self.name], self.args))
+    return '@@@%s@@@' % (inner_text)
+
+
+def _EscapeArgText(text):
+  """Escape annotation argument text.
+
+  Args:
+    text: String to escape.
+  """
+  return text.replace('@', '-AT-')
diff --git a/utils/frozen_chromite/lib/cache.py b/utils/frozen_chromite/lib/cache.py
new file mode 100644
index 0000000..a34175e
--- /dev/null
+++ b/utils/frozen_chromite/lib/cache.py
@@ -0,0 +1,375 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Contains on-disk caching functionality."""
+
+from __future__ import print_function
+
+import datetime
+import errno
+import os
+import shutil
+import tempfile
+
+from six.moves import urllib
+
+from autotest_lib.utils.frozen_chromite.lib import cros_build_lib
+from autotest_lib.utils.frozen_chromite.lib import cros_logging as logging
+from autotest_lib.utils.frozen_chromite.lib import locking
+from autotest_lib.utils.frozen_chromite.lib import osutils
+from autotest_lib.utils.frozen_chromite.lib import retry_util
+
+
+# pylint: disable=protected-access
+
+
+def EntryLock(f):
+  """Decorator that provides monitor access control."""
+
+  def new_f(self, *args, **kwargs):
+    # Ensure we don't have a read lock before potentially blocking while trying
+    # to access the monitor.
+    if self.read_locked:
+      raise AssertionError(
+          'Cannot call %s while holding a read lock.' % f.__name__)
+
+    with self._entry_lock:
+      self._entry_lock.write_lock()
+      return f(self, *args, **kwargs)
+  return new_f
+
+
+def WriteLock(f):
+  """Decorator that takes a write lock."""
+
+  def new_f(self, *args, **kwargs):
+    with self._lock.write_lock():
+      return f(self, *args, **kwargs)
+  return new_f
+
+
+class CacheReference(object):
+  """Encapsulates operations on a cache key reference.
+
+  CacheReferences are returned by the DiskCache.Lookup() function.  They are
+  used to read from and insert into the cache.
+
+  A typical example of using a CacheReference:
+
+  @contextlib.contextmanager
+  def FetchFromCache()
+    with cache.Lookup(key) as ref:
+       # If entry doesn't exist in cache already, generate it ourselves, and
+       # insert it into the cache, acquiring a read lock on it in the process.
+       # If the entry does exist, we grab a read lock on it.
+      if not ref.Exists(lock=True):
+        path = PrepareItem()
+        ref.SetDefault(path, lock=True)
+
+      # yield the path to the cached entry to consuming code.
+      yield ref.path
+  """
+
+  def __init__(self, cache, key):
+    self._cache = cache
+    self.key = key
+    self.acquired = False
+    self.read_locked = False
+    self._lock = cache._LockForKey(key)
+    self._entry_lock = cache._LockForKey(key, suffix='.entry_lock')
+
+  @property
+  def path(self):
+    """Returns on-disk path to the cached item."""
+    return self._cache.GetKeyPath(self.key)
+
+  def Acquire(self):
+    """Prepare the cache reference for operation.
+
+    This must be called (either explicitly or through entering a 'with'
+    context) before calling any methods that acquire locks, or mutates
+    reference.
+    """
+    if self.acquired:
+      raise AssertionError(
+          'Attempting to acquire an already acquired reference.')
+
+    self.acquired = True
+    self._lock.__enter__()
+
+  def Release(self):
+    """Release the cache reference.  Causes any held locks to be released."""
+    if not self.acquired:
+      raise AssertionError(
+          'Attempting to release an unacquired reference.')
+
+    self.acquired = False
+    self._lock.__exit__(None, None, None)
+    self.read_locked = False
+
+  def __enter__(self):
+    self.Acquire()
+    return self
+
+  def __exit__(self, *args):
+    self.Release()
+
+  def _ReadLock(self):
+    self._lock.read_lock()
+    self.read_locked = True
+
+  @WriteLock
+  def _Assign(self, path):
+    self._cache._Insert(self.key, path)
+
+  @WriteLock
+  def _AssignText(self, text):
+    self._cache._InsertText(self.key, text)
+
+  @WriteLock
+  def _Remove(self):
+    self._cache._Remove(self.key)
+    osutils.SafeUnlink(self._lock.path)
+    osutils.SafeUnlink(self._entry_lock.path)
+
+  def _Exists(self):
+    return self._cache._KeyExists(self.key)
+
+  @EntryLock
+  def Assign(self, path):
+    """Insert a file or a directory into the cache at the referenced key."""
+    self._Assign(path)
+
+  @EntryLock
+  def AssignText(self, text):
+    """Create a file containing |text| and assign it to the key.
+
+    Args:
+      text: Can be a string or an iterable.
+    """
+    self._AssignText(text)
+
+  @EntryLock
+  def Remove(self):
+    """Removes the entry from the cache."""
+    self._Remove()
+
+  @EntryLock
+  def Exists(self, lock=False):
+    """Tests for existence of entry.
+
+    Args:
+      lock: If the entry exists, acquire and maintain a read lock on it.
+    """
+    if self._Exists():
+      if lock:
+        self._ReadLock()
+      return True
+    return False
+
+  @EntryLock
+  def SetDefault(self, default_path, lock=False):
+    """Assigns default_path if the entry doesn't exist.
+
+    Args:
+      default_path: The path to assign if the entry doesn't exist.
+      lock: Acquire and maintain a read lock on the entry.
+    """
+    if not self._Exists():
+      self._Assign(default_path)
+    if lock:
+      self._ReadLock()
+
+
+class DiskCache(object):
+  """Locked file system cache keyed by tuples.
+
+  Key entries can be files or directories.  Access to the cache is provided
+  through CacheReferences, which are retrieved by using the cache Lookup()
+  method.
+  """
+  _STAGING_DIR = 'staging'
+
+  def __init__(self, cache_dir, cache_user=None, lock_suffix='.lock'):
+    self._cache_dir = cache_dir
+    self._cache_user = cache_user
+    self._lock_suffix = lock_suffix
+    self.staging_dir = os.path.join(cache_dir, self._STAGING_DIR)
+
+    osutils.SafeMakedirsNonRoot(self._cache_dir, user=self._cache_user)
+    osutils.SafeMakedirsNonRoot(self.staging_dir, user=self._cache_user)
+
+  def _KeyExists(self, key):
+    return os.path.lexists(self.GetKeyPath(key))
+
+  def GetKeyPath(self, key):
+    """Get the on-disk path of a key."""
+    return os.path.join(self._cache_dir, '+'.join(key))
+
+  def _LockForKey(self, key, suffix=None):
+    """Returns an unacquired lock associated with a key."""
+    suffix = suffix or self._lock_suffix
+    key_path = self.GetKeyPath(key)
+    osutils.SafeMakedirsNonRoot(os.path.dirname(key_path),
+                                user=self._cache_user)
+    lock_path = os.path.join(self._cache_dir, os.path.dirname(key_path),
+                             os.path.basename(key_path) + suffix)
+    return locking.FileLock(lock_path)
+
+  def _TempDirContext(self):
+    return osutils.TempDir(base_dir=self.staging_dir)
+
+  def _Insert(self, key, path):
+    """Insert a file or a directory into the cache at a given key."""
+    self._Remove(key)
+    key_path = self.GetKeyPath(key)
+    osutils.SafeMakedirsNonRoot(os.path.dirname(key_path),
+                                user=self._cache_user)
+    shutil.move(path, key_path)
+
+  def _InsertText(self, key, text):
+    """Inserts a file containing |text| into the cache."""
+    with self._TempDirContext() as tempdir:
+      file_path = os.path.join(tempdir, 'tempfile')
+      osutils.WriteFile(file_path, text)
+      self._Insert(key, file_path)
+
+  def _Remove(self, key):
+    """Remove a key from the cache."""
+    if self._KeyExists(key):
+      with self._TempDirContext() as tempdir:
+        shutil.move(self.GetKeyPath(key), tempdir)
+
+  def GetKey(self, path):
+    """Returns the key for an item's path in the cache."""
+    if self._cache_dir in path:
+      path = os.path.relpath(path, self._cache_dir)
+    return tuple(path.split('+'))
+
+  def ListKeys(self):
+    """Returns a list of keys for every item present in the cache."""
+    keys = []
+    for root, dirs, files in os.walk(self._cache_dir):
+      for f in dirs + files:
+        key_path = os.path.join(root, f)
+        if os.path.exists(key_path + self._lock_suffix):
+          # Test for the presence of the key's lock file to determine if this
+          # is the root key path, or some file nested within a key's dir.
+          keys.append(self.GetKey(key_path))
+    return keys
+
+  def Lookup(self, key):
+    """Get a reference to a given key."""
+    return CacheReference(self, key)
+
+  def DeleteStale(self, max_age):
+    """Removes any item from the cache that was modified after a given lifetime.
+
+    Args:
+      max_age: An instance of datetime.timedelta. Any item not modified within
+          this amount of time will be removed.
+
+    Returns:
+      List of keys removed.
+    """
+    if not isinstance(max_age, datetime.timedelta):
+      raise TypeError('max_age must be an instance of datetime.timedelta.')
+    keys_removed = []
+    for key in self.ListKeys():
+      path = self.GetKeyPath(key)
+      mtime = max(os.path.getmtime(path), os.path.getctime(path))
+      time_since_last_modify = (
+          datetime.datetime.now() - datetime.datetime.fromtimestamp(mtime))
+      if time_since_last_modify > max_age:
+        self.Lookup(key).Remove()
+        keys_removed.append(key)
+    return keys_removed
+
+
+class RemoteCache(DiskCache):
+  """Supports caching of remote objects via URI."""
+
+  def _Fetch(self, url, local_path):
+    """Fetch a remote file."""
+    # We have to nest the import because gs.GSContext uses us to cache its own
+    # gsutil tarball.  We know we won't get into a recursive loop though as it
+    # only fetches files via non-gs URIs.
+    from autotest_lib.utils.frozen_chromite.lib import gs
+
+    if gs.PathIsGs(url):
+      ctx = gs.GSContext()
+      ctx.Copy(url, local_path)
+    else:
+      # Note: unittests assume local_path is at the end.
+      retry_util.RunCurl(['--fail', url, '-o', local_path],
+                         debug_level=logging.DEBUG, capture_output=True)
+
+  def _Insert(self, key, url):  # pylint: disable=arguments-differ
+    """Insert a remote file into the cache."""
+    o = urllib.parse.urlparse(url)
+    if o.scheme in ('file', ''):
+      DiskCache._Insert(self, key, o.path)
+      return
+
+    with tempfile.NamedTemporaryFile(dir=self.staging_dir,
+                                     delete=False) as local_path:
+      self._Fetch(url, local_path.name)
+      DiskCache._Insert(self, key, local_path.name)
+
+
+def Untar(path, cwd, sudo=False):
+  """Untar a tarball."""
+  functor = cros_build_lib.sudo_run if sudo else cros_build_lib.run
+  comp = cros_build_lib.CompressionExtToType(path)
+  cmd = ['tar']
+  if comp != cros_build_lib.COMP_NONE:
+    cmd += ['-I', cros_build_lib.FindCompressor(comp)]
+  functor(cmd + ['-xpf', path], cwd=cwd, debug_level=logging.DEBUG, quiet=True)
+
+
+class TarballCache(RemoteCache):
+  """Supports caching of extracted tarball contents."""
+
+  def _Insert(self, key, tarball_path):  # pylint: disable=arguments-differ
+    """Insert a tarball and its extracted contents into the cache.
+
+    Download the tarball first if a URL is provided as tarball_path.
+    """
+    with osutils.TempDir(prefix='tarball-cache',
+                         base_dir=self.staging_dir) as tempdir:
+
+      o = urllib.parse.urlsplit(tarball_path)
+      if o.scheme == 'file':
+        tarball_path = o.path
+      elif o.scheme:
+        url = tarball_path
+        tarball_path = os.path.join(tempdir, os.path.basename(o.path))
+        self._Fetch(url, tarball_path)
+
+      extract_path = os.path.join(tempdir, 'extract')
+      os.mkdir(extract_path)
+      Untar(tarball_path, extract_path)
+      DiskCache._Insert(self, key, extract_path)
+
+  def _KeyExists(self, key):
+    """Specialized DiskCache._KeyExits that ignores empty directories.
+
+    The normal _KeyExists just checks to see if the key path exists in the cache
+    directory. Many tests mock out run then fetch a tarball. The mock
+    blocks untarring into it. This leaves behind an empty dir which blocks
+    future untarring in non-test scripts.
+
+    See crbug.com/468838
+    """
+    # Wipe out empty directories before testing for existence.
+    key_path = self.GetKeyPath(key)
+
+    try:
+      os.rmdir(key_path)
+    except OSError as ex:
+      if ex.errno not in (errno.ENOTEMPTY, errno.ENOENT):
+        raise
+
+    return os.path.exists(key_path)
diff --git a/utils/frozen_chromite/lib/cipd.py b/utils/frozen_chromite/lib/cipd.py
new file mode 100644
index 0000000..9ab375f
--- /dev/null
+++ b/utils/frozen_chromite/lib/cipd.py
@@ -0,0 +1,266 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module to download and run the CIPD client.
+
+CIPD is the Chrome Infra Package Deployer, a simple method of resolving a
+package/version into a GStorage link and installing them.
+"""
+
+from __future__ import print_function
+
+import hashlib
+import json
+import os
+import pprint
+import tempfile
+
+import httplib2
+from six.moves import urllib
+
+import autotest_lib.utils.frozen_chromite.lib.cros_logging as log
+from autotest_lib.utils.frozen_chromite.lib import cache
+from autotest_lib.utils.frozen_chromite.lib import osutils
+from autotest_lib.utils.frozen_chromite.lib import path_util
+from autotest_lib.utils.frozen_chromite.lib import cros_build_lib
+from autotest_lib.utils.frozen_chromite.utils import memoize
+
+# pylint: disable=line-too-long
+# CIPD client to download.
+#
+# This is version "git_revision:db7a486094873e3944b8e27ab5b23a3ae3c401e7".
+#
+# To switch to another version:
+#   1. Find it in CIPD Web UI, e.g.
+#      https://chrome-infra-packages.appspot.com/p/infra/tools/cipd/linux-amd64/+/latest
+#   2. Look up SHA256 there.
+# pylint: enable=line-too-long
+CIPD_CLIENT_PACKAGE = 'infra/tools/cipd/linux-amd64'
+CIPD_CLIENT_SHA256 = (
+    'ea6b7547ddd316f32fd9974f598949c3f8f22f6beb8c260370242d0d84825162')
+
+CHROME_INFRA_PACKAGES_API_BASE = (
+    'https://chrome-infra-packages.appspot.com/prpc/cipd.Repository/')
+
+
+class Error(Exception):
+  """Raised on fatal errors."""
+
+
+def _ChromeInfraRequest(method, request):
+  """Makes a request to the Chrome Infra Packages API with httplib2.
+
+  Args:
+    method: Name of RPC method to call.
+    request: RPC request body.
+
+  Returns:
+    Deserialized RPC response body.
+  """
+  resp, body = httplib2.Http().request(
+      uri=CHROME_INFRA_PACKAGES_API_BASE+method,
+      method='POST',
+      headers={
+          'Accept': 'application/json',
+          'Content-Type': 'application/json',
+          'User-Agent': 'chromite',
+      },
+      body=json.dumps(request))
+  if resp.status != 200:
+    raise Error('Got HTTP %d from CIPD %r: %s' % (resp.status, method, body))
+  try:
+    return json.loads(body.lstrip(b")]}'\n"))
+  except ValueError:
+    raise Error('Bad response from CIPD server:\n%s' % (body,))
+
+
+def _DownloadCIPD(instance_sha256):
+  """Finds the CIPD download link and requests the binary.
+
+  Args:
+    instance_sha256: The version of CIPD client to download.
+
+  Returns:
+    The CIPD binary as a string.
+  """
+  # Grab the signed URL to fetch the client binary from.
+  resp = _ChromeInfraRequest('DescribeClient', {
+      'package': CIPD_CLIENT_PACKAGE,
+      'instance': {
+          'hashAlgo': 'SHA256',
+          'hexDigest': instance_sha256,
+      },
+  })
+  if 'clientBinary' not in resp:
+    log.error(
+        'Error requesting the link to download CIPD from. Got:\n%s',
+        pprint.pformat(resp))
+    raise Error('Failed to bootstrap CIPD client')
+
+  # Download the actual binary.
+  http = httplib2.Http(cache=None)
+  response, binary = http.request(uri=resp['clientBinary']['signedUrl'])
+  if response.status != 200:
+    raise Error('Got a %d response from Google Storage.' % response.status)
+
+  # Check SHA256 matches what server expects.
+  digest = hashlib.sha256(binary).hexdigest()
+  for alias in resp['clientRefAliases']:
+    if alias['hashAlgo'] == 'SHA256':
+      if digest != alias['hexDigest']:
+        raise Error(
+            'Unexpected CIPD client SHA256: got %s, want %s' %
+            (digest, alias['hexDigest']))
+      break
+  else:
+    raise Error("CIPD server didn't provide expected SHA256")
+
+  return binary
+
+
+class CipdCache(cache.RemoteCache):
+  """Supports caching of the CIPD download."""
+  def _Fetch(self, url, local_path):
+    instance_sha256 = urllib.parse.urlparse(url).netloc
+    binary = _DownloadCIPD(instance_sha256)
+    log.info('Fetched CIPD package %s:%s', CIPD_CLIENT_PACKAGE, instance_sha256)
+    osutils.WriteFile(local_path, binary, mode='wb')
+    os.chmod(local_path, 0o755)
+
+
+def GetCIPDFromCache():
+  """Checks the cache, downloading CIPD if it is missing.
+
+  Returns:
+    Path to the CIPD binary.
+  """
+  cache_dir = os.path.join(path_util.GetCacheDir(), 'cipd')
+  bin_cache = CipdCache(cache_dir)
+  key = (CIPD_CLIENT_SHA256,)
+  ref = bin_cache.Lookup(key)
+  ref.SetDefault('cipd://' + CIPD_CLIENT_SHA256)
+  return ref.path
+
+
+def GetInstanceID(cipd_path, package, version, service_account_json=None):
+  """Get the latest instance ID for ref latest.
+
+  Args:
+    cipd_path: The path to a cipd executable. GetCIPDFromCache can give this.
+    package: A string package name.
+    version: A string version of package.
+    service_account_json: The path of the service account credentials.
+
+  Returns:
+    A string instance ID.
+  """
+  service_account_flag = []
+  if service_account_json:
+    service_account_flag = ['-service-account-json', service_account_json]
+
+  result = cros_build_lib.run(
+      [cipd_path, 'resolve', package, '-version', version] +
+      service_account_flag, capture_output=True, encoding='utf-8')
+  # An example output of resolve is like:
+  #   Packages:\n package:instance_id
+  return result.output.splitlines()[-1].split(':')[-1]
+
+
+@memoize.Memoize
+def InstallPackage(cipd_path, package, instance_id, destination,
+                   service_account_json=None):
+  """Installs a package at a given destination using cipd.
+
+  Args:
+    cipd_path: The path to a cipd executable. GetCIPDFromCache can give this.
+    package: A package name.
+    instance_id: The version of the package to install.
+    destination: The folder to install the package under.
+    service_account_json: The path of the service account credentials.
+
+  Returns:
+    The path of the package.
+  """
+  destination = os.path.join(destination, package)
+
+  service_account_flag = []
+  if service_account_json:
+    service_account_flag = ['-service-account-json', service_account_json]
+
+  with tempfile.NamedTemporaryFile() as f:
+    f.write(('%s %s' % (package, instance_id)).encode('utf-8'))
+    f.flush()
+
+    cros_build_lib.run(
+        [cipd_path, 'ensure', '-root', destination, '-list', f.name]
+        + service_account_flag,
+        capture_output=True)
+
+  return destination
+
+
+def CreatePackage(cipd_path, package, in_dir, tags, refs,
+                  cred_path=None):
+  """Create (build and register) a package using cipd.
+
+  Args:
+    cipd_path: The path to a cipd executable. GetCIPDFromCache can give this.
+    package: A package name.
+    in_dir: The directory to create the package from.
+    tags: A mapping of tags to apply to the package.
+    refs: An Iterable of refs to apply to the package.
+    cred_path: The path of the service account credentials.
+  """
+  args = [
+      cipd_path, 'create',
+      '-name', package,
+      '-in', in_dir,
+  ]
+  for key, value in tags.items():
+    args.extend(['-tag', '%s:%s' % (key, value)])
+  for ref in refs:
+    args.extend(['-ref', ref])
+  if cred_path:
+    args.extend(['-service-account-json', cred_path])
+
+  cros_build_lib.run(args, capture_output=True)
+
+
+def BuildPackage(cipd_path, package, in_dir, outfile):
+  """Build a package using cipd.
+
+  Args:
+    cipd_path: The path to a cipd executable. GetCIPDFromCache can give this.
+    package: A package name.
+    in_dir: The directory to create the package from.
+    outfile: Output file.  Should have extension .cipd
+  """
+  args = [
+      cipd_path, 'pkg-build',
+      '-name', package,
+      '-in', in_dir,
+      '-out', outfile,
+  ]
+  cros_build_lib.run(args, capture_output=True)
+
+
+def RegisterPackage(cipd_path, package_file, tags, refs, cred_path=None):
+  """Register and upload a package using cipd.
+
+  Args:
+    cipd_path: The path to a cipd executable. GetCIPDFromCache can give this.
+    package_file: The path to a .cipd package file.
+    tags: A mapping of tags to apply to the package.
+    refs: An Iterable of refs to apply to the package.
+    cred_path: The path of the service account credentials.
+  """
+  args = [cipd_path, 'pkg-register', package_file]
+  for key, value in tags.items():
+    args.extend(['-tag', '%s:%s' % (key, value)])
+  for ref in refs:
+    args.extend(['-ref', ref])
+  if cred_path:
+    args.extend(['-service-account-json', cred_path])
+  cros_build_lib.run(args, capture_output=True)
diff --git a/utils/frozen_chromite/lib/cloud_trace.py b/utils/frozen_chromite/lib/cloud_trace.py
new file mode 100644
index 0000000..9997c075
--- /dev/null
+++ b/utils/frozen_chromite/lib/cloud_trace.py
@@ -0,0 +1,320 @@
+# -*- coding: utf-8 -*-
+# Copyright 2017 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A library for emitting traces and spans to Google Cloud trace."""
+from __future__ import print_function
+
+import contextlib
+import errno
+import functools
+import json
+import os
+import random
+import re
+
+try:
+  from google.protobuf import timestamp_pb2
+except ImportError:
+  import google.protobuf.internal.well_known_types as timestamp_pb2
+
+from infra_libs import ts_mon
+
+from autotest_lib.utils.frozen_chromite.lib import cros_logging as log
+from autotest_lib.utils.frozen_chromite.lib import metrics
+from autotest_lib.utils.frozen_chromite.lib import structured
+
+
+SPANS_LOG = '/var/log/trace/{pid}-{span_id}.json'
+_SPAN_COUNT_METRIC = 'chromeos/trace/client/logged_count'
+
+
+# --- Code for logging spans to a file for later processing. -------------------
+def GetSpanLogFilePath(span):
+  """Gets the path to write a span to.
+
+  Args:
+    span: The span to write.
+  """
+  return SPANS_LOG.format(pid=os.getpid(), span_id=span.spanId)
+
+
+def LogSpan(span):
+  """Serializes and logs a Span to a file.
+
+  Args:
+    span: A Span instance to serialize.
+  """
+  _RecordSpanMetrics(span)
+  try:
+    with open(GetSpanLogFilePath(span), 'w') as fh:
+      fh.write(json.dumps(span.ToDict()))
+  # Catch various configuration errors.
+  # TODO(vapier): Drop IOError when we're Python 3-only.
+  # pylint: disable=overlapping-except
+  except (OSError, IOError) as error:
+    if error.errno == errno.EPERM:
+      log.warning(
+          'Received permissions error while trying to open the span log file.')
+      return None
+    elif error.errno == errno.ENOENT:
+      log.warning('/var/log/traces does not exist; skipping trace log.')
+      return None
+    else:
+      raise
+
+
+def _RecordSpanMetrics(span):
+  """Increments the count of spans logged.
+
+  Args:
+    span: The span to record.
+  """
+  m = metrics.Counter(
+      _SPAN_COUNT_METRIC,
+      description='A count of spans logged by a client.',
+      field_spec=[ts_mon.StringField('name')])
+  m.increment(fields={'name': span.name})
+
+
+# -- User-facing API -----------------------------------------------------------
+class Span(structured.Structured):
+  """An object corresponding to a cloud trace Span."""
+
+  VISIBLE_KEYS = (
+      'name', 'spanId', 'parentSpanId', 'labels',
+      'startTime', 'endTime', 'status')
+
+  def __init__(self, name, spanId=None, labels=None, parentSpanId=None,
+               traceId=None):
+    """Creates a Span object.
+
+    Args:
+      name: The name of the span
+      spanId: (optional) A 64-bit number as a string. If not provided, it will
+          be generated randomly with .GenerateSpanId().
+      labels: (optional) a dict<string, string> of key/values
+      traceId: (optional) A 32 hex digit string referring to the trace
+          containing this span. If not provided, a new trace will be created
+          with a random id.
+      parentSpanId: (optional) The spanId of the parent.
+    """
+    # Visible attributes
+    self.name = name
+    self.spanId = spanId or Span.GenerateSpanId()
+    self.parentSpanId = parentSpanId
+    self.labels = labels or {}
+    self.startTime = None
+    self.endTime = None
+    # Non-visible attributes
+    self.traceId = traceId or Span.GenerateTraceId()
+
+  @staticmethod
+  def GenerateSpanId():
+    """Returns a random 64-bit number as a string."""
+    return str(random.randint(0, 2**64))
+
+  @staticmethod
+  def GenerateTraceId():
+    """Returns a random 128-bit number as a 32-byte hex string."""
+    id_number = random.randint(0, 2**128)
+    return '%0.32X' % id_number
+
+  def __enter__(self):
+    """Enters the span context.
+
+    Side effect: Records the start time as a Timestamp.
+    """
+    start = timestamp_pb2.Timestamp()
+    start.GetCurrentTime()
+    self.startTime = start.ToJsonString()
+    return self
+
+  def __exit__(self, _type, _value, _traceback):
+    """Exits the span context.
+
+    Side-effect:
+      Record the end Timestamp.
+    """
+    end = timestamp_pb2.Timestamp()
+    end.GetCurrentTime()
+    self.endTime = end.ToJsonString()
+
+
+class SpanStack(object):
+  """A stack of Span contexts."""
+
+  CLOUD_TRACE_CONTEXT_ENV = 'CLOUD_TRACE_CONTEXT'
+  CLOUD_TRACE_CONTEXT_PATTERN = re.compile(
+      r'(?P<traceId>[0-9A-Fa-f]+)'
+      r'/'
+      r'(?P<parentSpanId>\d+)'
+      r';o=(?P<options>\d+)'
+  )
+
+  def __init__(self, global_context=None, traceId=None, parentSpanId=None,
+               labels=None, enabled=True):
+    """Initializes the Span.
+
+      global_context: (optional) A global context str, perhaps read from the
+          X-Cloud-Trace-Context header.
+      traceId: (optional) A 32 hex digit string referring to the trace
+          containing this span. If not provided, a new trace will be created
+          with a random id.
+      parentSpanId: (optional) The spanId of the parent.
+      labels: (optional) a dict<string, string> of key/values to attach to
+          each Span created, or None.
+      enabled: (optional) a bool indicating whether we should log the spans
+          to a file for later uploading by the cloud trace log consumer daemon.
+    """
+    self.traceId = traceId
+    self.spans = []
+    self.last_span_id = parentSpanId
+    self.labels = labels
+    self.enabled = enabled
+
+    global_context = (global_context or
+                      os.environ.get(self.CLOUD_TRACE_CONTEXT_ENV, ''))
+    context = SpanStack._ParseCloudTraceContext(global_context)
+
+    if traceId is None:
+      self.traceId = context.get('traceId')
+    if parentSpanId is None:
+      self.last_span_id = context.get('parentSpanId')
+    if context.get('options') == '0':
+      self.enabled = False
+
+  def _CreateSpan(self, name, **kwargs):
+    """Creates a span instance, setting certain defaults.
+
+    Args:
+      name: The name of the span
+      **kwargs: The keyword arguments to configure the span with.
+    """
+    kwargs.setdefault('traceId', self.traceId)
+    kwargs.setdefault('labels', self.labels)
+    kwargs.setdefault('parentSpanId', self.last_span_id)
+    span = Span(name, **kwargs)
+    if self.traceId is None:
+      self.traceId = span.traceId
+    return span
+
+  @contextlib.contextmanager
+  def Span(self, name, **kwargs):
+    """Enter a new Span context contained within the top Span of the stack.
+
+    Args:
+      name: The name of the span to enter
+      **kwargs: The kwargs to construct the span with.
+
+    Side effect:
+      Appends the new span object to |spans|, and yields span while in its
+      context. Pops the span object when exiting the context.
+
+    Returns:
+      A contextmanager whose __enter__() returns the new Span.
+    """
+    span = self._CreateSpan(name, **kwargs)
+    old_span_id, self.last_span_id = self.last_span_id, span.spanId
+    self.spans.append(span)
+
+    with span:
+      with self.EnvironmentContext():
+        yield span
+
+    self.spans.pop()
+    self.last_span_id = old_span_id
+
+    # Log each span to a file for later processing.
+    if self.enabled:
+      LogSpan(span)
+
+  # pylint: disable=docstring-misnamed-args
+  def Spanned(self, *span_args, **span_kwargs):
+    """A decorator equivalent of 'with span_stack.Span(...)'
+
+    Args:
+      *span_args: *args to use with the .Span
+      **span_kwargs: **kwargs to use with the .Span
+
+    Returns:
+      A decorator to wrap the body of a function in a span block.
+    """
+    def SpannedDecorator(f):
+      """Wraps the body of |f| with a .Span block."""
+      @functools.wraps(f)
+      def inner(*args, **kwargs):
+        with self.Span(*span_args, **span_kwargs):
+          f(*args, **kwargs)
+      return inner
+    return SpannedDecorator
+
+  def _GetCloudTraceContextHeader(self):
+    """Gets the Cloud Trace HTTP header context.
+
+    From the cloud trace doc explaining this (
+    https://cloud.google.com/trace/docs/support?hl=bg)
+
+      'X-Cloud-Trace-Context: TRACE_ID/SPAN_ID;o=TRACE_TRUE'
+      Where:
+        - TRACE_ID is a 32-character hex value representing a 128-bit number.
+        It should be unique between your requests, unless you intentionally
+        want to bundle the requests together. You can use UUIDs.
+        - SPAN_ID should be 0 for the first span in your trace. For
+        subsequent requests, set SPAN_ID to the span ID of the parent
+        request. See the description of TraceSpan (REST, RPC) for more
+        information about nested traces.
+        - TRACE_TRUE must be 1 to trace this request. Specify 0 to not trace
+        the request. For example, to force a trace with cURL:
+          curl 'http://www.example.com' --header 'X-Cloud-Trace-Context:
+            105445aa7843bc8bf206b120001000/0;o=1'
+    """
+    if not self.traceId:
+      return ''
+    span_postfix = '/%s' % self.spans[-1].spanId if self.spans else ''
+    enabled = '1' if self.enabled else '0'
+    return '{trace_id}{span_postfix};o={enabled}'.format(
+        trace_id=self.traceId,
+        span_postfix=span_postfix,
+        enabled=enabled)
+
+  @contextlib.contextmanager
+  def EnvironmentContext(self):
+    """Sets CLOUD_TRACE_CONTEXT to the value of X-Cloud-Trace-Context.
+
+    Cloud Trace uses an HTTP header to propagate trace context across RPC
+    boundaries. This method does the same across process boundaries using an
+    environment variable.
+    """
+    old_value = os.environ.get(self.CLOUD_TRACE_CONTEXT_ENV)
+    try:
+      os.environ[self.CLOUD_TRACE_CONTEXT_ENV] = (
+          self._GetCloudTraceContextHeader())
+      yield
+    finally:
+      if old_value is not None:
+        os.environ[self.CLOUD_TRACE_CONTEXT_ENV] = old_value
+      elif self.CLOUD_TRACE_CONTEXT_ENV in os.environ:
+        del os.environ[self.CLOUD_TRACE_CONTEXT_ENV]
+
+  @staticmethod
+  def _ParseCloudTraceContext(context):
+    """Sets current_span_id and trace_id from the |context|.
+
+    See _GetCloudTraceContextHeader.
+
+    Args:
+      context: The context variable, either from X-Cloud-Trace-Context
+          or from the CLOUD_TRACE_CONTEXT environment variable.
+
+    Returns:
+      A dictionary, which if the context string matches
+      CLOUD_TRACE_CONTEXT_PATTERN, contains the matched groups. If not matched,
+      returns an empty dictionary.
+    """
+    m = SpanStack.CLOUD_TRACE_CONTEXT_PATTERN.match(context)
+    if m:
+      return m.groupdict()
+    else:
+      return {}
\ No newline at end of file
diff --git a/utils/frozen_chromite/lib/commandline.py b/utils/frozen_chromite/lib/commandline.py
new file mode 100644
index 0000000..7292846
--- /dev/null
+++ b/utils/frozen_chromite/lib/commandline.py
@@ -0,0 +1,1064 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Purpose of this module is to hold common script/commandline functionality.
+
+This ranges from optparse, to a basic script wrapper setup (much like
+what is used for chromite.bin.*).
+"""
+
+from __future__ import print_function
+
+import argparse
+import collections
+import datetime
+import functools
+import os
+import optparse  # pylint: disable=deprecated-module
+import signal
+import sys
+
+import six
+from six.moves import urllib
+
+from autotest_lib.utils.frozen_chromite.lib import constants
+from autotest_lib.utils.frozen_chromite.lib import cros_build_lib
+from autotest_lib.utils.frozen_chromite.lib import cros_collections
+from autotest_lib.utils.frozen_chromite.lib import cros_logging as logging
+from autotest_lib.utils.frozen_chromite.lib import gs
+from autotest_lib.utils.frozen_chromite.lib import osutils
+from autotest_lib.utils.frozen_chromite.lib import path_util
+from autotest_lib.utils.frozen_chromite.lib import terminal
+from autotest_lib.utils.frozen_chromite.utils import attrs_freezer
+
+
+DEVICE_SCHEME_FILE = 'file'
+DEVICE_SCHEME_SERVO = 'servo'
+DEVICE_SCHEME_SSH = 'ssh'
+DEVICE_SCHEME_USB = 'usb'
+
+
+class ChrootRequiredError(Exception):
+  """Raised when a command must be run in the chroot
+
+  This exception is intended to be caught by code which will restart execution
+  in the chroot. Throwing this exception allows contexts to be exited and
+  general cleanup to happen before we exec an external binary.
+
+  The command to run inside the chroot, and (optionally) special cros_sdk
+  arguments are attached to the exception. Any adjustments to the arguments
+  should be done before raising the exception.
+  """
+  def __init__(self, cmd, chroot_args=None, extra_env=None):
+    """Constructor for ChrootRequiredError.
+
+    Args:
+      cmd: Command line to run inside the chroot as a list of strings.
+      chroot_args: Arguments to pass directly to cros_sdk.
+      extra_env: Environmental variables to set in the chroot.
+    """
+    super(ChrootRequiredError, self).__init__()
+    self.cmd = cmd
+    self.chroot_args = chroot_args
+    self.extra_env = extra_env
+
+
+class ExecRequiredError(Exception):
+  """Raised when a command needs to exec, after cleanup.
+
+  This exception is intended to be caught by code which will exec another
+  command. Throwing this exception allows contexts to be exited and general
+  cleanup to happen before we exec an external binary.
+
+  The command to run is attached to the exception. Any adjustments to the
+  arguments should be done before raising the exception.
+  """
+  def __init__(self, cmd):
+    """Constructor for ExecRequiredError.
+
+    Args:
+      cmd: Command line to run inside the chroot as a list of strings.
+    """
+    super(ExecRequiredError, self).__init__()
+    self.cmd = cmd
+
+
+def AbsolutePath(_option, _opt, value):
+  """Expand paths and make them absolute."""
+  return osutils.ExpandPath(value)
+
+
+def NormalizeGSPath(value):
+  """Normalize GS paths."""
+  url = gs.CanonicalizeURL(value, strict=True)
+  return '%s%s' % (gs.BASE_GS_URL, os.path.normpath(url[len(gs.BASE_GS_URL):]))
+
+
+def NormalizeLocalOrGSPath(value):
+  """Normalize a local or GS path."""
+  ptype = 'gs_path' if gs.PathIsGs(value) else 'path'
+  return VALID_TYPES[ptype](value)
+
+
+def NormalizeAbUrl(value):
+  """Normalize an androidbuild URL."""
+  if not value.startswith('ab://'):
+    # Give a helpful error message about the format expected.  Putting this
+    # message in the exception is useless because argparse ignores the
+    # exception message and just says the value is invalid.
+    msg = 'Invalid ab:// URL format: [%s].' % value
+    logging.error(msg)
+    raise ValueError(msg)
+
+  # If no errors, just return the unmodified value.
+  return value
+
+
+def ValidateCipdURL(value):
+  """Return plain string."""
+  if not value.startswith('cipd://'):
+    msg = 'Invalid cipd:// URL format: %s' % value
+    logging.error(msg)
+    raise ValueError(msg)
+  return value
+
+
+def ParseBool(value):
+  """Parse bool argument into a bool value.
+
+  For the existing type=bool functionality, the parser uses the built-in bool(x)
+  function to determine the value.  This function will only return false if x
+  is False or omitted.  Even with this type specified, however, arguments that
+  are generated from a command line initially get parsed as a string, and for
+  any string value passed in to bool(x), it will always return True.
+
+  Args:
+    value: String representing a boolean value.
+
+  Returns:
+    True or False.
+  """
+  return cros_build_lib.BooleanShellValue(value, False)
+
+
+def ParseDate(value):
+  """Parse date argument into a datetime.date object.
+
+  Args:
+    value: String representing a single date in "YYYY-MM-DD" format.
+
+  Returns:
+    A datetime.date object.
+  """
+  try:
+    return datetime.datetime.strptime(value, '%Y-%m-%d').date()
+  except ValueError:
+    # Give a helpful error message about the format expected.  Putting this
+    # message in the exception is useless because argparse ignores the
+    # exception message and just says the value is invalid.
+    logging.error('Date is expected to be in format YYYY-MM-DD.')
+    raise
+
+
+def NormalizeUri(value):
+  """Normalize a local path or URI."""
+  o = urllib.parse.urlparse(value)
+  if o.scheme == 'file':
+    # Trim off the file:// prefix.
+    return VALID_TYPES['path'](value[7:])
+  elif o.scheme not in ('', 'gs'):
+    o = list(o)
+    o[2] = os.path.normpath(o[2])
+    return urllib.parse.urlunparse(o)
+  else:
+    return NormalizeLocalOrGSPath(value)
+
+
+# A Device object holds information parsed from the command line input:
+#   scheme: DEVICE_SCHEME_SSH, DEVICE_SCHEME_USB, DEVICE_SCHEME_SERVO,
+#     or DEVICE_SCHEME_FILE.
+#   username: String SSH username or None.
+#   hostname: String SSH hostname or None.
+#   port: Int SSH or Servo port or None.
+#   path: String USB/file path or None.
+#   raw: String raw input from the command line.
+#   serial_number: String Servo serial number or None.
+# For now this is a superset of all information for USB, SSH, or file devices.
+# If functionality diverges based on type, it may be useful to split this into
+# separate device classes instead.
+Device = cros_collections.Collection(
+    'Device', scheme=None, username=None, hostname=None, port=None, path=None,
+    raw=None, serial_number=None)
+
+
+class DeviceParser(object):
+  """Parses devices as an argparse argument type.
+
+  In addition to parsing user input, this class will also ensure that only
+  supported device schemes are accepted by the parser. For example,
+  `cros deploy` only makes sense with an SSH device, but `cros flash` can use
+  SSH, USB, or file device schemes.
+
+  If the device input is malformed or the scheme is wrong, an error message will
+  be printed and the program will exit.
+
+  Valid device inputs are:
+    - [ssh://][username@]hostname[:port].
+    - usb://[path].
+    - file://path or /absolute_path.
+    - servo:port[:port] to use a port via dut-control, e.g. servo:port:1234.
+    - servo:serial:serial-number to use the servo's serial number,
+        e.g. servo:serial:641220-00057 servo:serial:C1230024192.
+    - [ssh://]:vm:.
+
+  The last item above is an alias for ssh'ing into a virtual machine on a
+  localhost.  It gets translated into 'localhost:9222'.
+
+  Examples:
+    parser = argparse.ArgumentParser()
+
+    parser.add_argument(
+      'ssh_device',
+      type=commandline.DeviceParser(commandline.DEVICE_SCHEME_SSH))
+
+    parser.add_argument(
+      'usb_or_file_device',
+      type=commandline.DeviceParser([commandline.DEVICE_SCHEME_USB,
+                                     commandline.DEVICE_SCHEME_FILE]))
+  """
+
+  def __init__(self, schemes):
+    """Initializes the parser.
+
+    See the class comments for usage examples.
+
+    Args:
+      schemes: A scheme or list of schemes to accept.
+    """
+    self.schemes = ([schemes] if isinstance(schemes, six.string_types)
+                    else schemes)
+    # Provide __name__ for argparse to print on failure, or else it will use
+    # repr() which creates a confusing error message.
+    self.__name__ = type(self).__name__
+
+  def __call__(self, value):
+    """Parses a device input and enforces constraints.
+
+    DeviceParser is an object so that a set of valid schemes can be specified,
+    but argparse expects a parsing function, so we overload __call__() for
+    argparse to use.
+
+    Args:
+      value: String representing a device target. See class comments for
+        valid device input formats.
+
+    Returns:
+      A Device object.
+
+    Raises:
+      ValueError: |value| is not a valid device specifier or doesn't
+        match the supported list of schemes.
+    """
+    try:
+      device = self._ParseDevice(value)
+      self._EnforceConstraints(device, value)
+      return device
+    except ValueError as e:
+      # argparse ignores exception messages, so print the message manually.
+      logging.error(e)
+      raise
+    except Exception as e:
+      logging.error('Internal error while parsing device input: %s', e)
+      raise
+
+  def _EnforceConstraints(self, device, value):
+    """Verifies that user-specified constraints are upheld.
+
+    Checks that the parsed device has a scheme that matches what the user
+    expects. Additional constraints can be added if needed.
+
+    Args:
+      device: Device object.
+      value: String representing a device target.
+
+    Raises:
+      ValueError: |device| has the wrong scheme.
+    """
+    if device.scheme not in self.schemes:
+      raise ValueError('Unsupported scheme "%s" for device "%s"' %
+                       (device.scheme, value))
+
+  def _ParseDevice(self, value):
+    """Parse a device argument.
+
+    Args:
+      value: String representing a device target.
+
+    Returns:
+      A Device object.
+
+    Raises:
+      ValueError: |value| is not a valid device specifier.
+    """
+    # ':vm:' is an alias for ssh'ing into a virtual machihne on localhost;
+    # translate it appropriately.
+    if value.strip().lower() == ':vm:':
+      value = 'localhost:9222'
+    elif value.strip().lower() == 'ssh://:vm:':
+      value = 'ssh://localhost:9222'
+    parsed = urllib.parse.urlparse(value)
+
+    # crbug.com/1069325: Starting in python 3.7 urllib has different parsing
+    # results. 127.0.0.1:9999 parses as scheme='127.0.0.1' path='9999'
+    # instead of scheme='' path='127.0.0.1:9999'. We want that parsed as ssh.
+    # Check for '.' or 'localhost' in the scheme to catch the most common cases
+    # for this result.
+    if (not parsed.scheme or '.' in parsed.scheme or
+        parsed.scheme == 'localhost'):
+      # Default to a file scheme for absolute paths, SSH scheme otherwise.
+      if value and value[0] == '/':
+        scheme = DEVICE_SCHEME_FILE
+      else:
+        # urlparse won't provide hostname/username/port unless a scheme is
+        # specified so we need to re-parse.
+        parsed = urllib.parse.urlparse('%s://%s' % (DEVICE_SCHEME_SSH, value))
+        scheme = DEVICE_SCHEME_SSH
+    else:
+      scheme = parsed.scheme.lower()
+
+    if scheme == DEVICE_SCHEME_SSH:
+      hostname = parsed.hostname
+      port = parsed.port
+      if hostname == 'localhost' and not port:
+        # Use of localhost as the actual machine is uncommon enough relative to
+        # the use of KVM that we require users to specify localhost:22 if they
+        # actually want to connect to the localhost.  Otherwise the expectation
+        # is that they intend to access the VM but forget or didn't know to use
+        # port 9222.
+        raise ValueError('To connect to localhost, use ssh://localhost:22 '
+                         'explicitly, or use ssh://localhost:9222 for the local'
+                         ' VM.')
+      if not hostname:
+        raise ValueError('Hostname is required for device "%s"' % value)
+      return Device(scheme=scheme, username=parsed.username, hostname=hostname,
+                    port=port, raw=value)
+    elif scheme == DEVICE_SCHEME_USB:
+      path = parsed.netloc + parsed.path
+      # Change path '' to None for consistency.
+      return Device(scheme=scheme, path=path if path else None, raw=value)
+    elif scheme == DEVICE_SCHEME_FILE:
+      path = parsed.netloc + parsed.path
+      if not path:
+        raise ValueError('Path is required for "%s"' % value)
+      return Device(scheme=scheme, path=path, raw=value)
+    elif scheme == DEVICE_SCHEME_SERVO:
+      # Parse the identifier type and value.
+      servo_type, _, servo_id = parsed.path.partition(':')
+      # Don't want to do the netloc before the split in case of serial number.
+      servo_type = servo_type.lower()
+
+      return self._parse_servo(servo_type, servo_id)
+    else:
+      raise ValueError('Unknown device scheme "%s" in "%s"' % (scheme, value))
+
+  @staticmethod
+  def _parse_servo(servo_type, servo_id):
+    """Parse a servo device from the parsed servo uri info.
+
+    Args:
+      servo_type: The servo identifier type, either port or serial.
+      servo_id: The servo identifier, either the port number it is
+        communicating through or its serial number.
+    """
+    servo_port = None
+    serial_number = None
+    if servo_type == 'serial':
+      if servo_id:
+        serial_number = servo_id
+      else:
+        raise ValueError('No serial number given.')
+    elif servo_type == 'port':
+      if servo_id:
+        # Parse and validate when given.
+        try:
+          servo_port = int(servo_id)
+        except ValueError:
+          raise ValueError('Invalid servo port value: %s' % servo_id)
+        if servo_port <= 0 or servo_port > 65535:
+          raise ValueError(
+              'Invalid port, must be 1-65535: %d given.' % servo_port)
+    else:
+      raise ValueError('Invalid servo type given: %s' % servo_type)
+
+    return Device(
+        scheme=DEVICE_SCHEME_SERVO,
+        port=servo_port,
+        serial_number=serial_number)
+
+
+class _AppendOption(argparse.Action):
+  """Append the command line option (with no arguments) to dest.
+
+  parser.add_argument('-b', '--barg', dest='out', action='append_option')
+  options = parser.parse_args(['-b', '--barg'])
+  options.out == ['-b', '--barg']
+  """
+  def __init__(self, option_strings, dest, **kwargs):
+    if 'nargs' in kwargs:
+      raise ValueError('nargs is not supported for append_option action')
+    super(_AppendOption, self).__init__(
+        option_strings, dest, nargs=0, **kwargs)
+
+  def __call__(self, parser, namespace, values, option_string=None):
+    if getattr(namespace, self.dest, None) is None:
+      setattr(namespace, self.dest, [])
+    getattr(namespace, self.dest).append(option_string)
+
+
+class _AppendOptionValue(argparse.Action):
+  """Append the command line option to dest. Useful for pass along arguments.
+
+  parser.add_argument('-b', '--barg', dest='out', action='append_option_value')
+  options = parser.parse_args(['--barg', 'foo', '-b', 'bar'])
+  options.out == ['-barg', 'foo', '-b', 'bar']
+  """
+  def __call__(self, parser, namespace, values, option_string=None):
+    if getattr(namespace, self.dest, None) is None:
+      setattr(namespace, self.dest, [])
+    getattr(namespace, self.dest).extend([option_string, str(values)])
+
+
+class _SplitExtendAction(argparse.Action):
+  """Callback to split the argument and extend existing value.
+
+  We normalize whitespace before splitting.  This is to support the forms:
+    cbuildbot -p 'proj:branch ' ...
+    cbuildbot -p ' proj:branch' ...
+    cbuildbot -p 'proj:branch  proj2:branch' ...
+    cbuildbot -p "$(some_command_that_returns_nothing)" ...
+  """
+  def __call__(self, parser, namespace, values, option_string=None):
+    if getattr(namespace, self.dest, None) is None:
+      setattr(namespace, self.dest, [])
+    getattr(namespace, self.dest).extend(values.split())
+
+
+VALID_TYPES = {
+    'ab_url': NormalizeAbUrl,
+    'bool': ParseBool,
+    'cipd': ValidateCipdURL,
+    'date': ParseDate,
+    'path': osutils.ExpandPath,
+    'gs_path': NormalizeGSPath,
+    'local_or_gs_path': NormalizeLocalOrGSPath,
+    'path_or_uri': NormalizeUri,
+}
+
+VALID_ACTIONS = {
+    'append_option': _AppendOption,
+    'append_option_value': _AppendOptionValue,
+    'split_extend': _SplitExtendAction,
+}
+
+_DEPRECATE_ACTIONS = [None, 'store', 'store_const', 'store_true', 'store_false',
+                      'append', 'append_const', 'count'] + list(VALID_ACTIONS)
+
+
+class _DeprecatedAction(object):
+  """Base functionality to allow adding warnings for deprecated arguments.
+
+  To add a deprecated warning, simply include a deprecated=message argument
+  to the add_argument call for the deprecated argument. Beside logging the
+  deprecation warning, the argument will behave as normal.
+  """
+
+  def __init__(self, *args, **kwargs):
+    """Init override to extract the deprecated argument when it exists."""
+    self.deprecated_message = kwargs.pop('deprecated', None)
+    super(_DeprecatedAction, self).__init__(*args, **kwargs)
+
+  def __call__(self, parser, namespace, values, option_string=None):
+    """Log the message then defer to the parent action."""
+    if self.deprecated_message:
+      logging.warning('Argument %s is deprecated: %s', option_string,
+                      self.deprecated_message)
+    return super(_DeprecatedAction, self).__call__(
+        parser, namespace, values, option_string=option_string)
+
+
+def OptparseWrapCheck(desc, check_f, _option, opt, value):
+  """Optparse adapter for type checking functionality."""
+  try:
+    return check_f(value)
+  except ValueError:
+    raise optparse.OptionValueError(
+        'Invalid %s given: --%s=%s' % (desc, opt, value))
+
+
+class Option(optparse.Option):
+  """Subclass to implement path evaluation & other useful types."""
+
+  _EXTRA_TYPES = ('path', 'gs_path')
+  TYPES = optparse.Option.TYPES + _EXTRA_TYPES
+  TYPE_CHECKER = optparse.Option.TYPE_CHECKER.copy()
+  for t in _EXTRA_TYPES:
+    TYPE_CHECKER[t] = functools.partial(OptparseWrapCheck, t, VALID_TYPES[t])
+
+
+class FilteringOption(Option):
+  """Subclass that supports Option filtering for FilteringOptionParser"""
+
+  _EXTRA_ACTIONS = ('split_extend',)
+  ACTIONS = Option.ACTIONS + _EXTRA_ACTIONS
+  STORE_ACTIONS = Option.STORE_ACTIONS + _EXTRA_ACTIONS
+  TYPED_ACTIONS = Option.TYPED_ACTIONS + _EXTRA_ACTIONS
+  ALWAYS_TYPED_ACTIONS = (Option.ALWAYS_TYPED_ACTIONS + _EXTRA_ACTIONS)
+
+  def take_action(self, action, dest, opt, value, values, parser):
+    if action == 'split_extend':
+      lvalue = value.split()
+      values.ensure_value(dest, []).extend(lvalue)
+    else:
+      Option.take_action(self, action, dest, opt, value, values, parser)
+
+    if value is None:
+      value = []
+    elif not self.nargs or self.nargs <= 1:
+      value = [value]
+
+    parser.AddParsedArg(self, opt, [str(v) for v in value])
+
+
+class ColoredFormatter(logging.Formatter):
+  """A logging formatter that can color the messages."""
+
+  _COLOR_MAPPING = {
+      'WARNING': terminal.Color.YELLOW,
+      'ERROR': terminal.Color.RED,
+  }
+
+  def __init__(self, *args, **kwargs):
+    """Initializes the formatter.
+
+    Args:
+      args: See logging.Formatter for specifics.
+      kwargs: See logging.Formatter for specifics.
+      enable_color: Whether to enable colored logging. Defaults
+        to None, where terminal.Color will set to a sane default.
+    """
+    self.color = terminal.Color(enabled=kwargs.pop('enable_color', None))
+    super(ColoredFormatter, self).__init__(*args, **kwargs)
+
+  def format(self, record):
+    """Formats |record| with color."""
+    msg = super(ColoredFormatter, self).format(record)
+    color = self._COLOR_MAPPING.get(record.levelname)
+    return msg if not color else self.color.Color(color, msg)
+
+
+class ChromiteStreamHandler(logging.StreamHandler):
+  """A stream handler for logging."""
+
+
+class BaseParser(object):
+  """Base parser class that includes the logic to add logging controls."""
+
+  DEFAULT_LOG_LEVELS = ('fatal', 'critical', 'error', 'warning', 'notice',
+                        'info', 'debug')
+
+  DEFAULT_LOG_LEVEL = 'info'
+  ALLOW_LOGGING = True
+
+  def __init__(self, **kwargs):
+    """Initialize this parser instance.
+
+    kwargs:
+      logging: Defaults to ALLOW_LOGGING from the class; if given,
+        add --log-level.
+      default_log_level: If logging is enabled, override the default logging
+        level.  Defaults to the class's DEFAULT_LOG_LEVEL value.
+      log_levels: If logging is enabled, this overrides the enumeration of
+        allowed logging levels.  If not given, defaults to the classes
+        DEFAULT_LOG_LEVELS value.
+      manual_debug: If logging is enabled and this is True, suppress addition
+        of a --debug alias.  This option defaults to True unless 'debug' has
+        been exempted from the allowed logging level targets.
+      caching: If given, must be either a callable that discerns the cache
+        location if it wasn't specified (the prototype must be akin to
+        lambda parser, values:calculated_cache_dir_path; it may return None to
+        indicate that it handles setting the value on its own later in the
+        parsing including setting the env), or True; if True, the
+        machinery defaults to invoking the class's FindCacheDir method
+        (which can be overridden).  FindCacheDir $CROS_CACHEDIR, falling
+        back to $REPO/.cache, finally falling back to $TMP.
+        Note that the cache_dir is not created, just discerned where it
+        should live.
+        If False, or caching is not given, then no --cache-dir option will be
+        added.
+    """
+    self.debug_enabled = False
+    self.caching_group = None
+    self.debug_group = None
+    self.default_log_level = None
+    self.log_levels = None
+    self.logging_enabled = kwargs.get('logging', self.ALLOW_LOGGING)
+    self.default_log_level = kwargs.get('default_log_level',
+                                        self.DEFAULT_LOG_LEVEL)
+    self.log_levels = tuple(x.lower() for x in
+                            kwargs.get('log_levels', self.DEFAULT_LOG_LEVELS))
+    self.debug_enabled = (not kwargs.get('manual_debug', False)
+                          and 'debug' in self.log_levels)
+    self.caching = kwargs.get('caching', False)
+    self._cros_defaults = {}
+
+  @staticmethod
+  def PopUsedArgs(kwarg_dict):
+    """Removes keys used by the base parser from the kwarg namespace."""
+    parser_keys = ['logging', 'default_log_level', 'log_levels', 'manual_debug',
+                   'caching']
+    for key in parser_keys:
+      kwarg_dict.pop(key, None)
+
+  def SetupOptions(self):
+    """Sets up standard chromite options."""
+    # NB: All options here must go through add_common_argument_to_group.
+    # You cannot use add_argument or such helpers directly.  This is to
+    # support default values with subparsers.
+    #
+    # You should also explicitly add default=None here when you want the
+    # default to be set up in the parsed option namespace.
+    if self.logging_enabled:
+      self.debug_group = self.add_argument_group('Debug options')
+      self.add_common_argument_to_group(
+          self.debug_group, '--log-level', choices=self.log_levels,
+          default=self.default_log_level,
+          help='Set logging level to report at.')
+      self.add_common_argument_to_group(
+          self.debug_group, '--log-format', action='store',
+          default=constants.LOGGER_FMT,
+          help='Set logging format to use.')
+      # Backwards compat name.  We should delete this at some point.
+      self.add_common_argument_to_group(
+          self.debug_group, '--log_format', action='store',
+          default=constants.LOGGER_FMT,
+          help=argparse.SUPPRESS)
+      self.add_common_argument_to_group(
+          self.debug_group,
+          '-v',
+          '--verbose',
+          action='store_const',
+          const='info',
+          dest='log_level',
+          help='Alias for `--log-level=info`.')
+      if self.debug_enabled:
+        self.add_common_argument_to_group(
+            self.debug_group, '--debug', action='store_const', const='debug',
+            dest='log_level', help='Alias for `--log-level=debug`. '
+            'Useful for debugging bugs/failures.')
+      self.add_common_argument_to_group(
+          self.debug_group, '--nocolor', action='store_false', dest='color',
+          default=None,
+          help='Do not use colorized output (or `export NOCOLOR=true`)')
+
+    if self.caching:
+      self.caching_group = self.add_argument_group('Caching Options')
+      self.add_common_argument_to_group(
+          self.caching_group, '--cache-dir', default=None, type='path',
+          help='Override the calculated chromeos cache directory; '
+          "typically defaults to '$REPO/.cache' .")
+
+  def SetupLogging(self, opts):
+    """Sets up logging based on |opts|."""
+    value = opts.log_level.upper()
+    logger = logging.getLogger()
+    logger.setLevel(getattr(logging, value))
+    formatter = ColoredFormatter(fmt=opts.log_format,
+                                 datefmt=constants.LOGGER_DATE_FMT,
+                                 enable_color=opts.color)
+
+    # Only set colored formatter for ChromiteStreamHandler instances,
+    # which could have been added by ScriptWrapperMain() below.
+    chromite_handlers = [x for x in logger.handlers if
+                         isinstance(x, ChromiteStreamHandler)]
+    for handler in chromite_handlers:
+      handler.setFormatter(formatter)
+
+    logging.captureWarnings(True)
+
+    return value
+
+  def DoPostParseSetup(self, opts, args):
+    """Method called to handle post opts/args setup.
+
+    This can be anything from logging setup to positional arg count validation.
+
+    Args:
+      opts: optparse.Values or argparse.Namespace instance
+      args: position arguments unconsumed from parsing.
+
+    Returns:
+      (opts, args), w/ whatever modification done.
+    """
+    for dest, default in self._cros_defaults.items():
+      if not hasattr(opts, dest):
+        setattr(opts, dest, default)
+
+    if self.logging_enabled:
+      value = self.SetupLogging(opts)
+      if self.debug_enabled:
+        opts.debug = (value == 'DEBUG')
+      opts.verbose = value in ('INFO', 'DEBUG')
+
+    if self.caching:
+      path = os.environ.get(constants.SHARED_CACHE_ENVVAR)
+      if path is not None and opts.cache_dir is None:
+        opts.cache_dir = os.path.abspath(path)
+
+      opts.cache_dir_specified = opts.cache_dir is not None
+      if not opts.cache_dir_specified:
+        func = self.FindCacheDir if not callable(self.caching) else self.caching
+        opts.cache_dir = func(self, opts)
+      if opts.cache_dir is not None:
+        self.ConfigureCacheDir(opts.cache_dir)
+
+    return opts, args
+
+  @staticmethod
+  def ConfigureCacheDir(cache_dir):
+    if cache_dir is None:
+      os.environ.pop(constants.SHARED_CACHE_ENVVAR, None)
+      logging.debug('Removed cache_dir setting')
+    else:
+      os.environ[constants.SHARED_CACHE_ENVVAR] = cache_dir
+      logging.debug('Configured cache_dir to %r', cache_dir)
+
+  @classmethod
+  def FindCacheDir(cls, _parser, _opts):
+    logging.debug('Cache dir lookup.')
+    return path_util.FindCacheDir()
+
+
+@six.add_metaclass(attrs_freezer.Class)
+class ArgumentNamespace(argparse.Namespace):
+  """Class to mimic argparse.Namespace with value freezing support."""
+  _FROZEN_ERR_MSG = 'Option values are frozen, cannot alter %s.'
+
+
+# Note that because optparse.Values is not a new-style class this class
+# must use the mixin rather than the metaclass.
+class OptionValues(attrs_freezer.Mixin, optparse.Values):
+  """Class to mimic optparse.Values with value freezing support."""
+  _FROZEN_ERR_MSG = 'Option values are frozen, cannot alter %s.'
+
+  def __init__(self, defaults, *args, **kwargs):
+    attrs_freezer.Mixin.__init__(self)
+    optparse.Values.__init__(self, defaults, *args, **kwargs)
+
+    # Used by FilteringParser.
+    self.parsed_args = None
+
+
+PassedOption = collections.namedtuple(
+    'PassedOption', ['opt_inst', 'opt_str', 'value_str'])
+
+
+class FilteringParser(optparse.OptionParser, BaseParser):
+  """Custom option parser for filtering options.
+
+  Aside from adding a couple of types (path for absolute paths,
+  gs_path for google storage urls, and log_level for logging level control),
+  this additionally exposes logging control by default; if undesired,
+  either derive from this class setting ALLOW_LOGGING to False, or
+  pass in logging=False to the constructor.
+  """
+
+  DEFAULT_OPTION_CLASS = FilteringOption
+
+  def __init__(self, usage=None, **kwargs):
+    BaseParser.__init__(self, **kwargs)
+    self.PopUsedArgs(kwargs)
+    kwargs.setdefault('option_class', self.DEFAULT_OPTION_CLASS)
+    optparse.OptionParser.__init__(self, usage=usage, **kwargs)
+    self.SetupOptions()
+
+  def add_common_argument_to_group(self, group, *args, **kwargs):
+    """Adds the given option defined by args and kwargs to group."""
+    return group.add_option(*args, **kwargs)
+
+  def add_argument_group(self, *args, **kwargs):
+    """Return an option group rather than an argument group."""
+    return self.add_option_group(*args, **kwargs)
+
+  def parse_args(self, args=None, values=None):
+    # If no Values object is specified then use our custom OptionValues.
+    if values is None:
+      values = OptionValues(defaults=self.defaults)
+
+    values.parsed_args = []
+
+    opts, remaining = optparse.OptionParser.parse_args(
+        self, args=args, values=values)
+    return self.DoPostParseSetup(opts, remaining)
+
+  def AddParsedArg(self, opt_inst, opt_str, value_str):
+    """Add a parsed argument with attributes.
+
+    Args:
+      opt_inst: An instance of a raw optparse.Option object that represents the
+                option.
+      opt_str: The option string.
+      value_str: A list of string-ified values dentified by OptParse.
+    """
+    self.values.parsed_args.append(PassedOption(opt_inst, opt_str, value_str))
+
+  @staticmethod
+  def FilterArgs(parsed_args, filter_fn):
+    """Filter the argument by passing it through a function.
+
+    Args:
+      parsed_args: The list of parsed argument namedtuples to filter.  Tuples
+        are of the form (opt_inst, opt_str, value_str).
+      filter_fn: A function with signature f(PassedOption), and returns True if
+        the argument is to be passed through.  False if not.
+
+    Returns:
+      A tuple containing two lists - one of accepted arguments and one of
+      removed arguments.
+    """
+    removed = []
+    accepted = []
+    for arg in parsed_args:
+      target = accepted if filter_fn(arg) else removed
+      target.append(arg.opt_str)
+      target.extend(arg.value_str)
+
+    return accepted, removed
+
+
+class ArgumentParser(BaseParser, argparse.ArgumentParser):
+  """Custom argument parser for use by chromite.
+
+  This class additionally exposes logging control by default; if undesired,
+  either derive from this class setting ALLOW_LOGGING to False, or
+  pass in logging=False to the constructor.
+  """
+
+  def __init__(self, usage=None, **kwargs):
+    kwargs.setdefault('formatter_class', argparse.RawDescriptionHelpFormatter)
+    BaseParser.__init__(self, **kwargs)
+    self.PopUsedArgs(kwargs)
+    argparse.ArgumentParser.__init__(self, usage=usage, **kwargs)
+    self._SetupTypes()
+    self.SetupOptions()
+    self._RegisterActions()
+
+  def _SetupTypes(self):
+    """Register types with ArgumentParser."""
+    for t, check_f in VALID_TYPES.items():
+      self.register('type', t, check_f)
+    for a, class_a in VALID_ACTIONS.items():
+      self.register('action', a, class_a)
+
+  def _RegisterActions(self):
+    """Update the container's actions.
+
+    This method builds out a new action class to register for each action type.
+    The new action class allows handling the deprecated argument without any
+    other changes to the argument parser logic. See _DeprecatedAction.
+    """
+    for action in _DEPRECATE_ACTIONS:
+      current_class = self._registry_get('action', action, object)
+      # Base classes for the new class. The _DeprecatedAction must be first to
+      # ensure its method overrides are called first.
+      bases = (_DeprecatedAction, current_class)
+      try:
+        self.register('action', action, type('deprecated-wrapper', bases, {}))
+      except TypeError:
+        # Method resolution order error. This occurs when the _DeprecatedAction
+        # class is inherited multiple times, so we've already registered the
+        # replacement class. The underlying _ActionsContainer gets passed
+        # around, so this may get triggered in non-obvious ways.
+        continue
+
+  def add_common_argument_to_group(self, group, *args, **kwargs):
+    """Adds the given argument to the group.
+
+    This argument is expected to show up across the base parser and subparsers
+    that might be added later on.  The default argparse module does not handle
+    this scenario well -- it processes the base parser first (defaults and the
+    user arguments), then it processes the subparser (defaults and arguments).
+    That means defaults in the subparser will clobber user arguments passed in
+    to the base parser!
+    """
+    default = kwargs.pop('default', None)
+    kwargs['default'] = argparse.SUPPRESS
+    action = group.add_argument(*args, **kwargs)
+    self._cros_defaults.setdefault(action.dest, default)
+    return action
+
+  def parse_args(self, args=None, namespace=None):
+    """Translates OptionParser call to equivalent ArgumentParser call."""
+    # If no Namespace object is specified then use our custom ArgumentNamespace.
+    if namespace is None:
+      namespace = ArgumentNamespace()
+
+    # Unlike OptionParser, ArgParser works only with a single namespace and no
+    # args. Re-use BaseParser DoPostParseSetup but only take the namespace.
+    namespace = argparse.ArgumentParser.parse_args(
+        self, args=args, namespace=namespace)
+    return self.DoPostParseSetup(namespace, None)[0]
+
+
+class _ShutDownException(SystemExit):
+  """Exception raised when user hits CTRL+C."""
+
+  def __init__(self, sig_num, message):
+    self.signal = sig_num
+    # Setup a usage message primarily for any code that may intercept it
+    # while this exception is crashing back up the stack to us.
+    SystemExit.__init__(self, 128 + sig_num)
+    self.args = (sig_num, message)
+
+  def __str__(self):
+    """Stringify this exception."""
+    return self.args[1]
+
+
+def _DefaultHandler(signum, _frame):
+  # Don't double process sigterms; just trigger shutdown from the first
+  # exception.
+  signal.signal(signum, signal.SIG_IGN)
+  raise _ShutDownException(
+      signum, 'Received signal %i; shutting down' % (signum,))
+
+
+def _RestartInChroot(cmd, chroot_args, extra_env):
+  """Rerun inside the chroot.
+
+  Args:
+    cmd: Command line to run inside the chroot as a list of strings.
+    chroot_args: Arguments to pass directly to cros_sdk (or None).
+    extra_env: Dictionary of environmental variables to set inside the
+        chroot (or None).
+  """
+  return cros_build_lib.run(cmd, check=False, enter_chroot=True,
+                            chroot_args=chroot_args, extra_env=extra_env,
+                            cwd=constants.SOURCE_ROOT).returncode
+
+
+def RunInsideChroot(command=None, chroot_args=None):
+  """Restart the current command inside the chroot.
+
+  This method is only valid for any code that is run via ScriptWrapperMain.
+  It allows proper cleanup of the local context by raising an exception handled
+  in ScriptWrapperMain.
+
+  Args:
+    command: An instance of CliCommand to be restarted inside the chroot.
+             |command| can be None if you do not wish to modify the log_level.
+    chroot_args: List of command-line arguments to pass to cros_sdk, if invoked.
+  """
+  if cros_build_lib.IsInsideChroot():
+    return
+
+  # Produce the command line to execute inside the chroot.
+  argv = sys.argv[:]
+  argv[0] = path_util.ToChrootPath(argv[0])
+
+  # Set log-level of cros_sdk to be same as log-level of command entering the
+  # chroot.
+  if chroot_args is None:
+    chroot_args = []
+  if command is not None:
+    chroot_args += ['--log-level', command.options.log_level]
+
+  raise ChrootRequiredError(argv, chroot_args)
+
+
+def ReExec():
+  """Restart the current command.
+
+  This method is only valid for any code that is run via ScriptWrapperMain.
+  It allows proper cleanup of the local context by raising an exception handled
+  in ScriptWrapperMain.
+  """
+  # The command to exec.
+  raise ExecRequiredError(sys.argv[:])
+
+
+def ScriptWrapperMain(find_target_func, argv=None,
+                      log_level=logging.DEBUG,
+                      log_format=constants.LOGGER_FMT):
+  """Function usable for chromite.script.* style wrapping.
+
+  Note that this function invokes sys.exit on the way out by default.
+
+  Args:
+    find_target_func: a function, which, when given the absolute
+      pathway the script was invoked via (for example,
+      /home/ferringb/cros/trunk/chromite/bin/cros_sdk; note that any
+      trailing .py from the path name will be removed),
+      will return the main function to invoke (that functor will take
+      a single arg- a list of arguments, and shall return either None
+      or an integer, to indicate the exit code).
+    argv: sys.argv, or an equivalent tuple for testing.  If nothing is
+      given, sys.argv is defaulted to.
+    log_level: Default logging level to start at.
+    log_format: Default logging format to use.
+  """
+  if argv is None:
+    argv = sys.argv[:]
+  target = os.path.abspath(argv[0])
+  name = os.path.basename(target)
+  if target.endswith('.py'):
+    target = os.path.splitext(target)[0]
+  target = find_target_func(target)
+  if target is None:
+    print('Internal error detected- no main functor found in module %r.' %
+          (name,), file=sys.stderr)
+    sys.exit(100)
+
+  # Set up basic logging information for all modules that use logging.
+  # Note a script target may setup default logging in its module namespace
+  # which will take precedence over this.
+  logger = logging.getLogger()
+  logger.setLevel(log_level)
+  logger_handler = ChromiteStreamHandler()
+  logger_handler.setFormatter(
+      logging.Formatter(fmt=log_format, datefmt=constants.LOGGER_DATE_FMT))
+  logger.addHandler(logger_handler)
+  logging.captureWarnings(True)
+
+  signal.signal(signal.SIGTERM, _DefaultHandler)
+
+  ret = 1
+  try:
+    ret = target(argv[1:])
+  except _ShutDownException as e:
+    sys.stdout.flush()
+    print('%s: Signaled to shutdown: caught %i signal.' % (name, e.signal),
+          file=sys.stderr)
+    sys.stderr.flush()
+  except SystemExit as e:
+    # Right now, let this crash through- longer term, we'll update the scripts
+    # in question to not use sys.exit, and make this into a flagged error.
+    raise
+  except ChrootRequiredError as e:
+    ret = _RestartInChroot(e.cmd, e.chroot_args, e.extra_env)
+  except ExecRequiredError as e:
+    logging.shutdown()
+    # This does not return.
+    os.execv(e.cmd[0], e.cmd)
+  except Exception as e:
+    sys.stdout.flush()
+    print('%s: Unhandled exception:' % (name,), file=sys.stderr)
+    sys.stderr.flush()
+    raise
+  finally:
+    logging.shutdown()
+
+  if ret is None:
+    ret = 0
+  sys.exit(ret)
diff --git a/utils/frozen_chromite/lib/config_lib.py b/utils/frozen_chromite/lib/config_lib.py
new file mode 100644
index 0000000..0a47990
--- /dev/null
+++ b/utils/frozen_chromite/lib/config_lib.py
@@ -0,0 +1,2134 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Configuration options for various cbuildbot builders."""
+
+from __future__ import print_function
+
+import copy
+import itertools
+import json
+import numbers
+import os
+import re
+
+from autotest_lib.utils.frozen_chromite.lib import constants
+from autotest_lib.utils.frozen_chromite.lib import osutils
+from autotest_lib.utils.frozen_chromite.utils import memoize
+
+GS_PATH_DEFAULT = 'default'  # Means gs://chromeos-image-archive/ + bot_id
+
+# Contains the valid build config suffixes.
+CONFIG_TYPE_RELEASE = 'release'
+CONFIG_TYPE_FULL = 'full'
+CONFIG_TYPE_FIRMWARE = 'firmware'
+CONFIG_TYPE_FACTORY = 'factory'
+CONFIG_TYPE_TOOLCHAIN = 'toolchain'
+
+# DISPLAY labels are used to group related builds together in the GE UI.
+
+DISPLAY_LABEL_TRYJOB = 'tryjob'
+DISPLAY_LABEL_INCREMENATAL = 'incremental'
+DISPLAY_LABEL_FULL = 'full'
+DISPLAY_LABEL_CHROME_INFORMATIONAL = 'chrome_informational'
+DISPLAY_LABEL_INFORMATIONAL = 'informational'
+DISPLAY_LABEL_RELEASE = 'release'
+DISPLAY_LABEL_CHROME_PFQ = 'chrome_pfq'
+DISPLAY_LABEL_MST_ANDROID_PFQ = 'mst_android_pfq'
+DISPLAY_LABEL_VMMST_ANDROID_PFQ = 'vmmst_android_pfq'
+DISPLAY_LABEL_PI_ANDROID_PFQ = 'pi_android_pfq'
+DISPLAY_LABEL_QT_ANDROID_PFQ = 'qt_android_pfq'
+DISPLAY_LABEL_RVC_ANDROID_PFQ = 'rvc_android_pfq'
+DISPLAY_LABEL_VMRVC_ANDROID_PFQ = 'vmrvc_android_pfq'
+DISPLAY_LABEL_FIRMWARE = 'firmware'
+DISPLAY_LABEL_FACTORY = 'factory'
+DISPLAY_LABEL_TOOLCHAIN = 'toolchain'
+DISPLAY_LABEL_UTILITY = 'utility'
+DISPLAY_LABEL_PRODUCTION_TRYJOB = 'production_tryjob'
+
+# This list of constants should be kept in sync with GoldenEye code.
+ALL_DISPLAY_LABEL = {
+    DISPLAY_LABEL_TRYJOB,
+    DISPLAY_LABEL_INCREMENATAL,
+    DISPLAY_LABEL_FULL,
+    DISPLAY_LABEL_CHROME_INFORMATIONAL,
+    DISPLAY_LABEL_INFORMATIONAL,
+    DISPLAY_LABEL_RELEASE,
+    DISPLAY_LABEL_CHROME_PFQ,
+    DISPLAY_LABEL_MST_ANDROID_PFQ,
+    DISPLAY_LABEL_VMMST_ANDROID_PFQ,
+    DISPLAY_LABEL_PI_ANDROID_PFQ,
+    DISPLAY_LABEL_QT_ANDROID_PFQ,
+    DISPLAY_LABEL_RVC_ANDROID_PFQ,
+    DISPLAY_LABEL_VMRVC_ANDROID_PFQ,
+    DISPLAY_LABEL_FIRMWARE,
+    DISPLAY_LABEL_FACTORY,
+    DISPLAY_LABEL_TOOLCHAIN,
+    DISPLAY_LABEL_UTILITY,
+    DISPLAY_LABEL_PRODUCTION_TRYJOB,
+}
+
+# These values must be kept in sync with the ChromeOS LUCI builders.
+#
+# https://chrome-internal.googlesource.com/chromeos/
+#     infra/config/+/refs/heads/master/luci/cr-buildbucket.cfg
+LUCI_BUILDER_FACTORY = 'Factory'
+LUCI_BUILDER_FULL = 'Full'
+LUCI_BUILDER_INCREMENTAL = 'Incremental'
+LUCI_BUILDER_INFORMATIONAL = 'Informational'
+LUCI_BUILDER_INFRA = 'Infra'
+LUCI_BUILDER_LEGACY_RELEASE = 'LegacyRelease'
+LUCI_BUILDER_PFQ = 'PFQ'
+LUCI_BUILDER_RAPID = 'Rapid'
+LUCI_BUILDER_RELEASE = 'Release'
+LUCI_BUILDER_STAGING = 'Staging'
+LUCI_BUILDER_TRY = 'Try'
+
+ALL_LUCI_BUILDER = {
+    LUCI_BUILDER_FACTORY,
+    LUCI_BUILDER_FULL,
+    LUCI_BUILDER_INCREMENTAL,
+    LUCI_BUILDER_INFORMATIONAL,
+    LUCI_BUILDER_INFRA,
+    LUCI_BUILDER_LEGACY_RELEASE,
+    LUCI_BUILDER_PFQ,
+    LUCI_BUILDER_RAPID,
+    LUCI_BUILDER_RELEASE,
+    LUCI_BUILDER_STAGING,
+    LUCI_BUILDER_TRY,
+}
+
+
+def isTryjobConfig(build_config):
+    """Is a given build config a tryjob config, or a production config?
+
+  Args:
+    build_config: A fully populated instance of BuildConfig.
+
+  Returns:
+    Boolean. True if it's a tryjob config.
+  """
+    return build_config.luci_builder in [LUCI_BUILDER_RAPID, LUCI_BUILDER_TRY]
+
+# In the Json, this special build config holds the default values for all
+# other configs.
+DEFAULT_BUILD_CONFIG = '_default'
+
+# Constants for config template file
+CONFIG_TEMPLATE_BOARDS = 'boards'
+CONFIG_TEMPLATE_NAME = 'name'
+CONFIG_TEMPLATE_EXPERIMENTAL = 'experimental'
+CONFIG_TEMPLATE_LEADER_BOARD = 'leader_board'
+CONFIG_TEMPLATE_BOARD_GROUP = 'board_group'
+CONFIG_TEMPLATE_BUILDER = 'builder'
+CONFIG_TEMPLATE_RELEASE = 'RELEASE'
+CONFIG_TEMPLATE_CONFIGS = 'configs'
+CONFIG_TEMPLATE_ARCH = 'arch'
+CONFIG_TEMPLATE_RELEASE_BRANCH = 'release_branch'
+CONFIG_TEMPLATE_REFERENCE_BOARD_NAME = 'reference_board_name'
+CONFIG_TEMPLATE_MODELS = 'models'
+CONFIG_TEMPLATE_MODEL_NAME = 'name'
+CONFIG_TEMPLATE_MODEL_BOARD_NAME = 'board_name'
+CONFIG_TEMPLATE_MODEL_TEST_SUITES = 'test_suites'
+CONFIG_TEMPLATE_MODEL_CQ_TEST_ENABLED = 'cq_test_enabled'
+
+CONFIG_X86_INTERNAL = 'X86_INTERNAL'
+CONFIG_X86_EXTERNAL = 'X86_EXTERNAL'
+CONFIG_ARM_INTERNAL = 'ARM_INTERNAL'
+CONFIG_ARM_EXTERNAL = 'ARM_EXTERNAL'
+
+
+def IsCanaryMaster(builder_run):
+    """Returns True if this build type is master-release"""
+    return (builder_run.config.build_type == constants.CANARY_TYPE
+            and builder_run.config.master
+            and builder_run.manifest_branch == 'master')
+
+
+def IsPFQType(b_type):
+    """Returns True if this build type is a PFQ."""
+    return b_type in (constants.PFQ_TYPE, constants.ANDROID_PFQ_TYPE)
+
+
+def IsCanaryType(b_type):
+    """Returns True if this build type is a Canary."""
+    return b_type == constants.CANARY_TYPE
+
+
+def IsMasterAndroidPFQ(config):
+    """Returns True if this build is master Android PFQ type."""
+    return config.build_type == constants.ANDROID_PFQ_TYPE and config.master
+
+
+def GetHWTestEnv(builder_run_config, model_config=None, suite_config=None):
+    """Return the env of a suite to run for a given build/model.
+
+  Args:
+    builder_run_config: The BuildConfig object inside a BuilderRun object.
+    model_config: A ModelTestConfig object to test against.
+    suite_config: A HWTestConfig object to test against.
+
+  Returns:
+    A string variable to indiate the hwtest environment.
+  """
+    enable_suite = True if suite_config is None else suite_config.enable_skylab
+    enable_model = True if model_config is None else model_config.enable_skylab
+    if (builder_run_config.enable_skylab_hw_tests and enable_suite
+                and enable_model):
+        return constants.ENV_SKYLAB
+
+    return constants.ENV_AUTOTEST
+
+
+class AttrDict(dict):
+    """Dictionary with 'attribute' access.
+
+  This is identical to a dictionary, except that string keys can be addressed as
+  read-only attributes.
+  """
+
+    def __getattr__(self, name):
+        """Support attribute-like access to each dict entry."""
+        if name in self:
+            return self[name]
+
+        # Super class (dict) has no __getattr__ method, so use __getattribute__.
+        return super(AttrDict, self).__getattribute__(name)
+
+
+class BuildConfig(AttrDict):
+    """Dictionary of explicit configuration settings for a cbuildbot config
+
+  Each dictionary entry is in turn a dictionary of config_param->value.
+
+  See DefaultSettings for details on known configurations, and their
+  documentation.
+  """
+
+    def deepcopy(self):
+        """Create a deep copy of this object.
+
+    This is a specialized version of copy.deepcopy() for BuildConfig objects. It
+    speeds up deep copies by 10x because we know in advance what is stored
+    inside a BuildConfig object and don't have to do as much introspection. This
+    function is called a lot during setup of the config objects so optimizing it
+    makes a big difference. (It saves seconds off the load time of this module!)
+    """
+        result = BuildConfig(self)
+
+        # Here is where we handle all values that need deepcopy instead of shallow.
+        for k, v in result.items():
+            if v is not None:
+                if k == 'child_configs':
+                    result[k] = [x.deepcopy() for x in v]
+                elif k in ('vm_tests', 'vm_tests_override', 'hw_tests',
+                           'hw_tests_override', 'tast_vm_tests'):
+                    result[k] = [copy.copy(x) for x in v]
+                # type(v) is faster than isinstance.
+                elif type(v) is list:  # pylint: disable=unidiomatic-typecheck
+                    result[k] = v[:]
+
+        return result
+
+    def apply(self, *args, **kwargs):
+        """Apply changes to this BuildConfig.
+
+    Note: If an override is callable, it will be called and passed the prior
+    value for the given key (or None) to compute the new value.
+
+    Args:
+      args: Dictionaries or templates to update this config with.
+      kwargs: Settings to inject; see DefaultSettings for valid values.
+
+    Returns:
+      self after changes are applied.
+    """
+        inherits = list(args)
+        inherits.append(kwargs)
+
+        for update_config in inherits:
+            for name, value in update_config.items():
+                if callable(value):
+                    # If we are applying to a fixed value, we resolve to a fixed value.
+                    # Otherwise, we save off a callable to apply later, perhaps with
+                    # nested callables (IE: we curry them). This allows us to use
+                    # callables in templates, and apply templates to each other and still
+                    # get the expected result when we use them later on.
+                    #
+                    # Delaying the resolution of callables is safe, because "Add()" always
+                    # applies against the default, which has fixed values for everything.
+
+                    if name in self:
+                        # apply it to the current value.
+                        if callable(self[name]):
+                            # If we have no fixed value to resolve with, stack the callables.
+                            def stack(new_callable, old_callable):
+                                """Helper method to isolate namespace for closure."""
+                                return lambda fixed: new_callable(
+                                        old_callable(fixed))
+
+                            self[name] = stack(value, self[name])
+                        else:
+                            # If the current value was a fixed value, apply the callable.
+                            self[name] = value(self[name])
+                    else:
+                        # If we had no value to apply it to, save it for later.
+                        self[name] = value
+
+                elif name == '_template':
+                    # We never apply _template. You have to set it through Add.
+                    pass
+
+                else:
+                    # Simple values overwrite whatever we do or don't have.
+                    self[name] = value
+
+        return self
+
+    def derive(self, *args, **kwargs):
+        """Create a new config derived from this one.
+
+    Note: If an override is callable, it will be called and passed the prior
+    value for the given key (or None) to compute the new value.
+
+    Args:
+      args: Mapping instances to mixin.
+      kwargs: Settings to inject; see DefaultSettings for valid values.
+
+    Returns:
+      A new _config instance.
+    """
+        return self.deepcopy().apply(*args, **kwargs)
+
+    def AddSlave(self, slave):
+        """Assign slave config(s) to a build master.
+
+    A helper for adding slave configs to a master config.
+    """
+        assert self.master
+        if self['slave_configs'] is None:
+            self['slave_configs'] = []
+        self.slave_configs.append(slave.name)
+        self.slave_configs.sort()
+
+    def AddSlaves(self, slaves):
+        """Assign slave config(s) to a build master.
+
+    A helper for adding slave configs to a master config.
+    """
+        assert self.master
+        if self['slave_configs'] is None:
+            self['slave_configs'] = []
+        self.slave_configs.extend(slave_config.name for slave_config in slaves)
+        self.slave_configs.sort()
+
+
+class VMTestConfig(object):
+    """Config object for virtual machine tests suites.
+
+  Attributes:
+    test_type: Test type to be run.
+    test_suite: Test suite to be run in VMTest.
+    timeout: Number of seconds to wait before timing out waiting for
+             results.
+    retry: Whether we should retry tests that fail in a suite run.
+    max_retries: Integer, maximum job retries allowed at suite level.
+                 None for no max.
+    warn_only: Boolean, failure on VM tests warns only.
+    use_ctest: Use the old ctest code path rather than the new chromite one.
+  """
+    DEFAULT_TEST_TIMEOUT = 90 * 60
+
+    def __init__(self,
+                 test_type,
+                 test_suite=None,
+                 timeout=DEFAULT_TEST_TIMEOUT,
+                 retry=False,
+                 max_retries=constants.VM_TEST_MAX_RETRIES,
+                 warn_only=False,
+                 use_ctest=True):
+        """Constructor -- see members above."""
+        self.test_type = test_type
+        self.test_suite = test_suite
+        self.timeout = timeout
+        self.retry = retry
+        self.max_retries = max_retries
+        self.warn_only = warn_only
+        self.use_ctest = use_ctest
+
+    def __eq__(self, other):
+        return self.__dict__ == other.__dict__
+
+
+class GCETestConfig(object):
+    """Config object for GCE tests suites.
+
+  Attributes:
+    test_type: Test type to be run.
+    test_suite: Test suite to be run in GCETest.
+    timeout: Number of seconds to wait before timing out waiting for
+             results.
+    use_ctest: Use the old ctest code path rather than the new chromite one.
+  """
+    DEFAULT_TEST_TIMEOUT = 60 * 60
+
+    def __init__(self,
+                 test_type,
+                 test_suite=None,
+                 timeout=DEFAULT_TEST_TIMEOUT,
+                 use_ctest=True):
+        """Constructor -- see members above."""
+        self.test_type = test_type
+        self.test_suite = test_suite
+        self.timeout = timeout
+        self.use_ctest = use_ctest
+
+    def __eq__(self, other):
+        return self.__dict__ == other.__dict__
+
+
+class TastVMTestConfig(object):
+    """Config object for a Tast virtual-machine-based test suite.
+
+  Attributes:
+    name: String containing short human-readable name describing test suite.
+    test_exprs: List of string expressions describing which tests to run; this
+                is passed directly to the 'tast run' command. See
+                https://goo.gl/UPNEgT for info about test expressions.
+    timeout: Number of seconds to wait before timing out waiting for
+             results.
+  """
+    DEFAULT_TEST_TIMEOUT = 60 * 60
+
+    def __init__(self, suite_name, test_exprs, timeout=DEFAULT_TEST_TIMEOUT):
+        """Constructor -- see members above."""
+        # This is an easy mistake to make and results in confusing errors later when
+        # a list of one-character strings gets passed to the tast command.
+        if not isinstance(test_exprs, list):
+            raise TypeError('test_exprs must be list of strings')
+        self.suite_name = suite_name
+        self.test_exprs = test_exprs
+        self.timeout = timeout
+
+    def __eq__(self, other):
+        return self.__dict__ == other.__dict__
+
+
+class MoblabVMTestConfig(object):
+    """Config object for moblab tests suites.
+
+  Attributes:
+    test_type: Test type to be run.
+    timeout: Number of seconds to wait before timing out waiting for
+             results.
+  """
+    DEFAULT_TEST_TIMEOUT = 60 * 60
+
+    def __init__(self, test_type, timeout=DEFAULT_TEST_TIMEOUT):
+        """Constructor -- see members above."""
+        self.test_type = test_type
+        self.timeout = timeout
+
+    def __eq__(self, other):
+        return self.__dict__ == other.__dict__
+
+
+class ModelTestConfig(object):
+    """Model specific config that controls which test suites are executed.
+
+  Attributes:
+    name: The name of the model that will be tested (matches model label)
+    lab_board_name: The name of the board in the lab (matches board label)
+    test_suites: List of hardware test suites that will be executed.
+  """
+
+    def __init__(self,
+                 name,
+                 lab_board_name,
+                 test_suites=None,
+                 enable_skylab=True):
+        """Constructor -- see members above."""
+        self.name = name
+        self.lab_board_name = lab_board_name
+        self.test_suites = test_suites
+        self.enable_skylab = enable_skylab
+
+    def __eq__(self, other):
+        return self.__dict__ == other.__dict__
+
+
+class HWTestConfig(object):
+    """Config object for hardware tests suites.
+
+  Attributes:
+    suite: Name of the test suite to run.
+    timeout: Number of seconds to wait before timing out waiting for
+             results.
+    pool: Pool to use for hw testing.
+    blocking: Setting this to true requires that this suite must PASS for suites
+              scheduled after it to run. This also means any suites that are
+              scheduled before a blocking one are also blocking ones scheduled
+              after. This should be used when you want some suites to block
+              whether or not others should run e.g. only run longer-running
+              suites if some core ones pass first.
+
+              Note, if you want multiple suites to block other suites but run
+              in parallel, you should only mark the last one scheduled as
+              blocking (it effectively serves as a thread/process join).
+    async: Fire-and-forget suite.
+    warn_only: Failure on HW tests warns only (does not generate error).
+    critical: Usually we consider structural failures here as OK.
+    priority:  Priority at which tests in the suite will be scheduled in
+               the hw lab.
+    file_bugs: Should we file bugs if a test fails in a suite run.
+    minimum_duts: minimum number of DUTs required for testing in the hw lab.
+    retry: Whether we should retry tests that fail in a suite run.
+    max_retries: Integer, maximum job retries allowed at suite level.
+                 None for no max.
+    suite_min_duts: Preferred minimum duts. Lab will prioritize on getting such
+                    number of duts even if the suite is competing with
+                    other suites that have higher priority.
+    suite_args: Arguments passed to the suite.  This should be a dict
+                representing keyword arguments.  The value is marshalled
+                using repr(), so the dict values should be basic types.
+    quota_account: The quotascheduler account to use for all tests in this
+                   suite.
+
+  Some combinations of member settings are invalid:
+    * A suite config may not specify both blocking and async.
+    * A suite config may not specify both warn_only and critical.
+  """
+    _MINUTE = 60
+    _HOUR = 60 * _MINUTE
+    _DAY = 24 * _HOUR
+    # CTS timeout ~ 2 * expected runtime in case other tests are using the CTS
+    # pool.
+    # Must not exceed the buildbucket build timeout set at
+    # https://chrome-internal.googlesource.com/chromeos/infra/config/+/8f12edac54383831aaed9ed1819ef909a66ecc97/testplatform/main.star#90
+    CTS_QUAL_HW_TEST_TIMEOUT = int(1 * _DAY + 18 * _HOUR)
+    # GTS runs faster than CTS. But to avoid starving GTS by CTS we set both
+    # timeouts equal.
+    GTS_QUAL_HW_TEST_TIMEOUT = CTS_QUAL_HW_TEST_TIMEOUT
+    SHARED_HW_TEST_TIMEOUT = int(3.0 * _HOUR)
+    PALADIN_HW_TEST_TIMEOUT = int(2.0 * _HOUR)
+    BRANCHED_HW_TEST_TIMEOUT = int(10.0 * _HOUR)
+
+    # TODO(jrbarnette) Async HW test phases complete within seconds.
+    # however, the tests they start can require hours to complete.
+    # Chromite code doesn't distinguish "timeout for Autotest" from
+    # timeout in the builder.  This is WRONG WRONG WRONG.  But, until
+    # there's a better fix, we'll allow these phases hours to fail.
+    ASYNC_HW_TEST_TIMEOUT = int(250.0 * _MINUTE)
+
+    def __init__(self,
+                 suite,
+                 pool=constants.HWTEST_QUOTA_POOL,
+                 timeout=SHARED_HW_TEST_TIMEOUT,
+                 warn_only=False,
+                 critical=False,
+                 blocking=False,
+                 file_bugs=False,
+                 priority=constants.HWTEST_BUILD_PRIORITY,
+                 retry=True,
+                 max_retries=constants.HWTEST_MAX_RETRIES,
+                 minimum_duts=0,
+                 suite_min_duts=0,
+                 suite_args=None,
+                 offload_failures_only=False,
+                 enable_skylab=True,
+                 quota_account=constants.HWTEST_QUOTA_ACCOUNT_BVT,
+                 **kwargs):
+        """Constructor -- see members above."""
+        # Python 3.7+ made async a reserved keyword.
+        asynchronous = kwargs.pop('async', False)
+        setattr(self, 'async', asynchronous)
+        assert not kwargs, 'Excess kwargs found: %s' % (kwargs, )
+
+        assert not asynchronous or not blocking, '%s is async and blocking' % suite
+        assert not warn_only or not critical
+        self.suite = suite
+        self.pool = pool
+        self.timeout = timeout
+        self.blocking = blocking
+        self.warn_only = warn_only
+        self.critical = critical
+        self.file_bugs = file_bugs
+        self.priority = priority
+        self.retry = retry
+        self.max_retries = max_retries
+        self.minimum_duts = minimum_duts
+        self.suite_min_duts = suite_min_duts
+        self.suite_args = suite_args
+        self.offload_failures_only = offload_failures_only
+        # Usually whether to run in skylab is controlled by 'enable_skylab_hw_test'
+        # in build config. But for some particular suites, we want to exclude them
+        # from Skylab even if the build config is migrated to Skylab.
+        self.enable_skylab = enable_skylab
+        self.quota_account = quota_account
+
+    def _SetCommonBranchedValues(self):
+        """Set the common values for branched builds."""
+        self.timeout = max(HWTestConfig.BRANCHED_HW_TEST_TIMEOUT, self.timeout)
+
+        # Set minimum_duts default to 0, which means that lab will not check the
+        # number of available duts to meet the minimum requirement before creating
+        # a suite job for branched build.
+        self.minimum_duts = 0
+
+    def SetBranchedValuesForSkylab(self):
+        """Set suite values for branched builds for skylab."""
+        self._SetCommonBranchedValues()
+
+        if (constants.SKYLAB_HWTEST_PRIORITIES_MAP[self.priority] <
+                    constants.SKYLAB_HWTEST_PRIORITIES_MAP[
+                            constants.HWTEST_DEFAULT_PRIORITY]):
+            self.priority = constants.HWTEST_DEFAULT_PRIORITY
+
+    def SetBranchedValues(self):
+        """Changes the HW Test timeout/priority values to branched values."""
+        self._SetCommonBranchedValues()
+
+        # Only reduce priority if it's lower.
+        new_priority = constants.HWTEST_PRIORITIES_MAP[
+                constants.HWTEST_DEFAULT_PRIORITY]
+        if isinstance(self.priority, numbers.Integral):
+            self.priority = min(self.priority, new_priority)
+        elif constants.HWTEST_PRIORITIES_MAP[self.priority] > new_priority:
+            self.priority = new_priority
+
+    @property
+    def timeout_mins(self):
+        return self.timeout // 60
+
+    def __eq__(self, other):
+        return self.__dict__ == other.__dict__
+
+
+class NotificationConfig(object):
+    """Config object for defining notification settings.
+
+  Attributes:
+    email: Email address that receives failure notifications.
+    threshold: Number of consecutive failures that should occur in order to
+              be notified. This number should be greater than or equal to 1. If
+              none is specified, default is 1.
+    template: Email template luci-notify should use when sending the email
+              notification. If none is specified, uses the default template.
+  """
+    DEFAULT_TEMPLATE = 'legacy_release'
+    DEFAULT_THRESHOLD = 1
+
+    def __init__(self,
+                 email,
+                 threshold=DEFAULT_THRESHOLD,
+                 template=DEFAULT_TEMPLATE):
+        """Constructor -- see members above."""
+        self.email = email
+        self.threshold = threshold
+        self.template = template
+        self.threshold = threshold
+
+    @property
+    def email_notify(self):
+        return {'email': self.email, 'template': self.template}
+
+    def __eq__(self, other):
+        return self.__dict__ == other.__dict__
+
+
+def DefaultSettings():
+    # Enumeration of valid settings; any/all config settings must be in this.
+    # All settings must be documented.
+    return dict(
+            # The name of the template we inherit settings from.
+            _template=None,
+
+            # The name of the config.
+            name=None,
+
+            # A list of boards to build.
+            boards=None,
+
+            # A list of ModelTestConfig objects that represent all of the models
+            # supported by a given unified build and their corresponding test config.
+            models=[],
+
+            # This value defines what part of the Golden Eye UI is responsible for
+            # displaying builds of this build config. The value is required, and
+            # must be in ALL_DISPLAY_LABEL.
+            # TODO: Make the value required after crbug.com/776955 is finished.
+            display_label=None,
+
+            # This defines which LUCI Builder to use. It must match an entry in:
+            #
+            # https://chrome-internal.git.corp.google.com/chromeos/
+            #    manifest-internal/+/infra/config/cr-buildbucket.cfg
+            #
+            luci_builder=LUCI_BUILDER_LEGACY_RELEASE,
+
+            # The profile of the variant to set up and build.
+            profile=None,
+
+            # This bot pushes changes to the overlays.
+            master=False,
+
+            # A basic_builder is a special configuration which does not perform tests
+            # or mutate external config.
+            basic_builder=False,
+
+            # If this bot triggers slave builds, this will contain a list of
+            # slave config names.
+            slave_configs=None,
+
+            # If False, this flag indicates that the CQ should not check whether
+            # this bot passed or failed. Set this to False if you are setting up a
+            # new bot. Once the bot is on the waterfall and is consistently green,
+            # mark the builder as important=True.
+            important=True,
+
+            # If True, build config should always be run as if --debug was set
+            # on the cbuildbot command line. This is different from 'important'
+            # and is usually correlated with tryjob build configs.
+            debug=False,
+
+            # If True, use the debug instance of CIDB instead of prod.
+            debug_cidb=False,
+
+            # Timeout for the build as a whole (in seconds).
+            build_timeout=(5 * 60 + 30) * 60,
+
+            # A list of NotificationConfig objects describing who to notify of builder
+            # failures.
+            notification_configs=[],
+
+            # An integer. If this builder fails this many times consecutively, send
+            # an alert email to the recipients health_alert_recipients. This does
+            # not apply to tryjobs. This feature is similar to the ERROR_WATERMARK
+            # feature of upload_symbols, and it may make sense to merge the features
+            # at some point.
+            health_threshold=0,
+
+            # List of email addresses to send health alerts to for this builder. It
+            # supports automatic email address lookup for the following sheriff
+            # types:
+            #     'tree': tree sheriffs
+            #     'chrome': chrome gardeners
+            health_alert_recipients=[],
+
+            # Whether this is an internal build config.
+            internal=False,
+
+            # Whether this is a branched build config. Used for pfq logic.
+            branch=False,
+
+            # The name of the manifest to use. E.g., to use the buildtools manifest,
+            # specify 'buildtools'.
+            manifest=constants.DEFAULT_MANIFEST,
+
+            # emerge use flags to use while setting up the board, building packages,
+            # making images, etc.
+            useflags=[],
+
+            # Set the variable CHROMEOS_OFFICIAL for the build. Known to affect
+            # parallel_emerge, cros_set_lsb_release, and chromeos_version.sh. See
+            # bug chromium-os:14649
+            chromeos_official=False,
+
+            # Use binary packages for building the toolchain. (emerge --getbinpkg)
+            usepkg_toolchain=True,
+
+            # Use binary packages for build_packages and setup_board.
+            usepkg_build_packages=True,
+
+            # Does this profile need to sync chrome?  If None, we guess based on
+            # other factors.  If True/False, we always do that.
+            sync_chrome=None,
+
+            # Use the newest ebuilds for all the toolchain packages.
+            latest_toolchain=False,
+
+            # This is only valid when latest_toolchain is True. If you set this to a
+            # commit-ish, the gcc ebuild will use it to build the toolchain
+            # compiler.
+            gcc_githash=None,
+
+            # Wipe and replace the board inside the chroot.
+            board_replace=False,
+
+            # Wipe and replace chroot, but not source.
+            chroot_replace=True,
+
+            # Create the chroot on a loopback-mounted chroot.img instead of a bare
+            # directory.  Required for snapshots; otherwise optional.
+            chroot_use_image=True,
+
+            # Uprevs the local ebuilds to build new changes since last stable.
+            # build.  If master then also pushes these changes on success. Note that
+            # we uprev on just about every bot config because it gives us a more
+            # deterministic build system (the tradeoff being that some bots build
+            # from source more frequently than if they never did an uprev). This way
+            # the release/factory/etc... builders will pick up changes that devs
+            # pushed before it runs, but after the corresponding PFQ bot ran (which
+            # is what creates+uploads binpkgs).  The incremental bots are about the
+            # only ones that don't uprev because they mimic the flow a developer
+            # goes through on their own local systems.
+            uprev=True,
+
+            # Select what overlays to look at for revving and prebuilts. This can be
+            # any constants.VALID_OVERLAYS.
+            overlays=constants.PUBLIC_OVERLAYS,
+
+            # Select what overlays to push at. This should be a subset of overlays
+            # for the particular builder.  Must be None if not a master.  There
+            # should only be one master bot pushing changes to each overlay per
+            # branch.
+            push_overlays=None,
+
+            # Uprev Android, values of 'latest_release', or None.
+            android_rev=None,
+
+            # Which Android branch build do we try to uprev from.
+            android_import_branch=None,
+
+            # Android package name.
+            android_package=None,
+
+            # Uprev Chrome, values of 'tot', 'stable_release', or None.
+            chrome_rev=None,
+
+            # Exit the builder right after checking compilation.
+            # TODO(mtennant): Should be something like "compile_check_only".
+            compilecheck=False,
+
+            # If True, run DebugInfoTest stage.
+            debuginfo_test=False,
+
+            # Runs the tests that the signer would run. This should only be set if
+            # 'recovery' is in images.
+            signer_tests=False,
+
+            # Runs unittests for packages.
+            unittests=True,
+
+            # A list of the packages to blacklist from unittests.
+            unittest_blacklist=[],
+
+            # Generates AFDO data. Will capture a profile of chrome using a hwtest
+            # to run a predetermined set of benchmarks.
+            # FIXME(tcwang): Keep this config during transition to async AFDO
+            afdo_generate=False,
+
+            # Generates AFDO data asynchronously. Will capture a profile of chrome
+            # using a hwtest to run a predetermined set of benchmarks.
+            afdo_generate_async=False,
+
+            # Verify and publish kernel profiles.
+            kernel_afdo_verify=False,
+
+            # Verify and publish chrome profiles.
+            chrome_afdo_verify=False,
+
+            # Generate Chrome orderfile. Will build Chrome with C3 ordering and
+            # generate an orderfile for uploading as a result.
+            orderfile_generate=False,
+
+            # Verify unvetted Chrome orderfile. Will use the most recent unvetted
+            # orderfile and build Chrome. Upload the orderfile to vetted bucket
+            # as a result.
+            orderfile_verify=False,
+
+            # Generates AFDO data, builds the minimum amount of artifacts and
+            # assumes a non-distributed builder (i.e.: the whole process in a single
+            # builder).
+            afdo_generate_min=False,
+
+            # Update the Chrome ebuild with the AFDO profile info.
+            afdo_update_chrome_ebuild=False,
+
+            # Update the kernel ebuild with the AFDO profile info.
+            afdo_update_kernel_ebuild=False,
+
+            # Uses AFDO data. The Chrome build will be optimized using the AFDO
+            # profile information found in Chrome's source tree.
+            afdo_use=True,
+
+            # A list of VMTestConfig objects to run by default.
+            vm_tests=[
+                    VMTestConfig(constants.VM_SUITE_TEST_TYPE,
+                                 test_suite='smoke'),
+                    VMTestConfig(constants.SIMPLE_AU_TEST_TYPE)
+            ],
+
+            # A list of all VMTestConfig objects to use if VM Tests are forced on
+            # (--vmtest command line or trybot). None means no override.
+            vm_tests_override=None,
+
+            # If true, in addition to upload vm test result to artifact folder, report
+            # results to other dashboard as well.
+            vm_test_report_to_dashboards=False,
+
+            # The number of times to run the VMTest stage. If this is >1, then we
+            # will run the stage this many times, stopping if we encounter any
+            # failures.
+            vm_test_runs=1,
+
+            # If True, run SkylabHWTestStage instead of HWTestStage for suites that
+            # use pools other than pool:cts.
+            enable_skylab_hw_tests=False,
+
+            # If set, this is the URL of the bug justifying why hw_tests are disabled
+            # on a builder that should always have hw_tests.
+            hw_tests_disabled_bug='',
+
+            # If True, run SkylabHWTestStage instead of HWTestStage for suites that
+            # use pool:cts.
+            enable_skylab_cts_hw_tests=False,
+
+            # A list of HWTestConfig objects to run.
+            hw_tests=[],
+
+            # A list of all HWTestConfig objects to use if HW Tests are forced on
+            # (--hwtest command line or trybot). None means no override.
+            hw_tests_override=None,
+
+            # If true, uploads artifacts for hw testing. Upload payloads for test
+            # image if the image is built. If not, dev image is used and then base
+            # image.
+            upload_hw_test_artifacts=True,
+
+            # If true, uploads individual image tarballs.
+            upload_standalone_images=True,
+
+            # A list of GCETestConfig objects to use. Currently only some lakitu
+            # builders run gce tests.
+            gce_tests=[],
+
+            # Whether to run CPEExport stage. This stage generates portage depgraph
+            # data that is used for bugs reporting (see go/why-cpeexport). Only
+            # release builders should run this stage.
+            run_cpeexport=False,
+
+            # Whether to run BuildConfigsExport stage. This stage generates build
+            # configs (see crbug.com/974795 project). Only release builders should
+            # run this stage.
+            run_build_configs_export=False,
+
+            # A list of TastVMTestConfig objects describing Tast-based test suites
+            # that should be run in a VM.
+            tast_vm_tests=[],
+
+            # Default to not run moblab tests. Currently the blessed moblab board runs
+            # these tests.
+            moblab_vm_tests=[],
+
+            # List of patterns for portage packages for which stripped binpackages
+            # should be uploaded to GS. The patterns are used to search for packages
+            # via `equery list`.
+            upload_stripped_packages=[
+                    # Used by SimpleChrome workflow.
+                    'chromeos-base/chromeos-chrome',
+                    'sys-kernel/*kernel*',
+            ],
+
+            # Google Storage path to offload files to.
+            #   None - No upload
+            #   GS_PATH_DEFAULT - 'gs://chromeos-image-archive/' + bot_id
+            #   value - Upload to explicit path
+            gs_path=GS_PATH_DEFAULT,
+
+            # TODO(sosa): Deprecate binary.
+            # Type of builder.  Check constants.VALID_BUILD_TYPES.
+            build_type=constants.PFQ_TYPE,
+
+            # Whether to schedule test suites by suite_scheduler. Generally only
+            # True for "release" builders.
+            suite_scheduling=False,
+
+            # The class name used to build this config.  See the modules in
+            # cbuildbot / builders/*_builders.py for possible values.  This should
+            # be the name in string form -- e.g. "simple_builders.SimpleBuilder" to
+            # get the SimpleBuilder class in the simple_builders module.  If not
+            # specified, we'll fallback to legacy probing behavior until everyone
+            # has been converted (see the scripts/cbuildbot.py file for details).
+            builder_class_name=None,
+
+            # List of images we want to build -- see build_image for more details.
+            images=['test'],
+
+            # Image from which we will build update payloads.  Must either be None
+            # or name one of the images in the 'images' list, above.
+            payload_image=None,
+
+            # Whether to build a netboot image.
+            factory_install_netboot=True,
+
+            # Whether to build the factory toolkit.
+            factory_toolkit=True,
+
+            # Whether to build factory packages in BuildPackages.
+            factory=True,
+
+            # Flag to control if all packages for the target are built. If disabled
+            # and unittests are enabled, the unit tests and their dependencies
+            # will still be built during the testing stage.
+            build_packages=True,
+
+            # Tuple of specific packages we want to build.  Most configs won't
+            # specify anything here and instead let build_packages calculate.
+            packages=[],
+
+            # Do we push a final release image to chromeos-images.
+            push_image=False,
+
+            # Do we upload debug symbols.
+            upload_symbols=False,
+
+            # Whether we upload a hwqual tarball.
+            hwqual=False,
+
+            # Run a stage that generates release payloads for signed images.
+            paygen=False,
+
+            # If the paygen stage runs, generate tests, and schedule auto-tests for
+            # them.
+            paygen_skip_testing=False,
+
+            # If the paygen stage runs, don't generate any delta payloads. This is
+            # only done if deltas are broken for a given board.
+            paygen_skip_delta_payloads=False,
+
+            # Run a stage that generates and uploads package CPE information.
+            cpe_export=True,
+
+            # Run a stage that generates and uploads debug symbols.
+            debug_symbols=True,
+
+            # Do not package the debug symbols in the binary package. The debug
+            # symbols will be in an archive with the name cpv.debug.tbz2 in
+            # /build/${BOARD}/packages and uploaded with the prebuilt.
+            separate_debug_symbols=True,
+
+            # Include *.debug files for debugging core files with gdb in debug.tgz.
+            # These are very large. This option only has an effect if debug_symbols
+            # and archive are set.
+            archive_build_debug=False,
+
+            # Run a stage that archives build and test artifacts for developer
+            # consumption.
+            archive=True,
+
+            # Git repository URL for our manifests.
+            #  https://chromium.googlesource.com/chromiumos/manifest
+            #  https://chrome-internal.googlesource.com/chromeos/manifest-internal
+            manifest_repo_url=None,
+
+            # Whether we are using the manifest_version repo that stores per-build
+            # manifests.
+            manifest_version=False,
+
+            # Use a different branch of the project manifest for the build.
+            manifest_branch=None,
+
+            # LKGM for ChromeOS generated for Chrome builds that are blessed from
+            # canary runs.
+            use_chrome_lkgm=False,
+
+            # Upload prebuilts for this build. Valid values are PUBLIC, PRIVATE, or
+            # False.
+            prebuilts=False,
+
+            # Use SDK as opposed to building the chroot from source.
+            use_sdk=True,
+
+            # The description string to print out for config when user runs --list.
+            description=None,
+
+            # Boolean that enables parameter --git-sync for upload_prebuilts.
+            git_sync=False,
+
+            # A list of the child config groups, if applicable. See the AddGroup
+            # method.
+            child_configs=[],
+
+            # Whether this config belongs to a config group.
+            grouped=False,
+
+            # layout of build_image resulting image. See
+            # scripts/build_library/legacy_disk_layout.json or
+            # overlay-<board>/scripts/disk_layout.json for possible values.
+            disk_layout=None,
+
+            # If enabled, run the PatchChanges stage.  Enabled by default. Can be
+            # overridden by the --nopatch flag.
+            postsync_patch=True,
+
+            # Reexec into the buildroot after syncing.  Enabled by default.
+            postsync_reexec=True,
+
+            # Run the binhost_test stage. Only makes sense for builders that have no
+            # boards.
+            binhost_test=False,
+
+            # If specified, it is passed on to the PushImage script as '--sign-types'
+            # commandline argument.  Must be either None or a list of image types.
+            sign_types=None,
+
+            # TODO(sosa): Collapse to one option.
+            # ========== Dev installer prebuilts options =======================
+
+            # Upload prebuilts for this build to this bucket. If it equals None the
+            # default buckets are used.
+            binhost_bucket=None,
+
+            # Parameter --key for upload_prebuilts. If it equals None, the default
+            # values are used, which depend on the build type.
+            binhost_key=None,
+
+            # Parameter --binhost-base-url for upload_prebuilts. If it equals None,
+            # the default value is used.
+            binhost_base_url=None,
+
+            # Upload dev installer prebuilts.
+            dev_installer_prebuilts=False,
+
+            # Enable rootfs verification on the image.
+            rootfs_verification=True,
+
+            # Build the Chrome SDK.
+            chrome_sdk=False,
+
+            # If chrome_sdk is set to True, this determines whether we attempt to
+            # build Chrome itself with the generated SDK.
+            chrome_sdk_build_chrome=True,
+
+            # If chrome_sdk is set to True, this determines whether we use goma to
+            # build chrome.
+            chrome_sdk_goma=True,
+
+            # Run image tests. This should only be set if 'base' is in our list of
+            # images.
+            image_test=False,
+
+            # ==================================================================
+            # Workspace related options.
+
+            # Which branch should WorkspaceSyncStage checkout, if run.
+            workspace_branch=None,
+
+            # ==================================================================
+            # The documentation associated with the config.
+            doc=None,
+
+            # ==================================================================
+            # The goma related options.
+
+            # Which goma client to use.
+            goma_client_type=None,
+
+            # Try to use goma to build all packages.
+            build_all_with_goma=False,
+
+            # This is a LUCI Scheduler schedule string. Setting this will create
+            # a LUCI Scheduler for this build on swarming (not buildbot).
+            # See: https://goo.gl/VxSzFf
+            schedule=None,
+
+            # This is the list of git repos which can trigger this build in swarming.
+            # Implies that schedule is set, to "triggered".
+            # The format is of the form:
+            #   [ (<git repo url>, (<ref1>, <ref2>, …)),
+            #    …]
+            triggered_gitiles=None,
+
+            # If true, skip package retries in BuildPackages step.
+            nobuildretry=False,
+
+            # Attempt to run this build on the same bot each time it builds.
+            # This is only meaningful for slave builds run on swarming. This
+            # should only be used with LUCI Builders that use a reserved
+            # role to avoid having bots stolen by other builds while
+            # waiting on a new master build.
+            build_affinity=False,
+    )
+
+
+def GerritInstanceParameters(name, instance):
+    param_names = [
+            '_GOB_INSTANCE', '_GERRIT_INSTANCE', '_GOB_HOST', '_GERRIT_HOST',
+            '_GOB_URL', '_GERRIT_URL'
+    ]
+
+    gob_instance = instance
+    gerrit_instance = '%s-review' % instance
+    gob_host = constants.GOB_HOST % gob_instance
+    gerrit_host = constants.GOB_HOST % gerrit_instance
+    gob_url = 'https://%s' % gob_host
+    gerrit_url = 'https://%s' % gerrit_host
+
+    params = [
+            gob_instance, gerrit_instance, gob_host, gerrit_host, gob_url,
+            gerrit_url
+    ]
+
+    return dict([('%s%s' % (name, pn), p)
+                 for pn, p in zip(param_names, params)])
+
+
+def DefaultSiteParameters():
+    # Enumeration of valid site parameters; any/all site parameters must be here.
+    # All site parameters should be documented.
+    default_site_params = {}
+
+    manifest_project = 'chromiumos/manifest'
+    manifest_int_project = 'chromeos/manifest-internal'
+    external_remote = 'cros'
+    internal_remote = 'cros-internal'
+    chromium_remote = 'chromium'
+    chrome_remote = 'chrome'
+    aosp_remote = 'aosp'
+    weave_remote = 'weave'
+
+    internal_change_prefix = 'chrome-internal:'
+    external_change_prefix = 'chromium:'
+
+    # Gerrit instance site parameters.
+    default_site_params.update(GerritInstanceParameters(
+            'EXTERNAL', 'chromium'))
+    default_site_params.update(
+            GerritInstanceParameters('INTERNAL', 'chrome-internal'))
+    default_site_params.update(GerritInstanceParameters('AOSP', 'android'))
+    default_site_params.update(GerritInstanceParameters('WEAVE', 'weave'))
+
+    default_site_params.update(
+            # Parameters to define which manifests to use.
+            MANIFEST_PROJECT=manifest_project,
+            MANIFEST_INT_PROJECT=manifest_int_project,
+            MANIFEST_PROJECTS=(manifest_project, manifest_int_project),
+            MANIFEST_URL=os.path.join(default_site_params['EXTERNAL_GOB_URL'],
+                                      manifest_project),
+            MANIFEST_INT_URL=os.path.join(
+                    default_site_params['INTERNAL_GERRIT_URL'],
+                    manifest_int_project),
+
+            # CrOS remotes specified in the manifests.
+            EXTERNAL_REMOTE=external_remote,
+            INTERNAL_REMOTE=internal_remote,
+            GOB_REMOTES={
+                    default_site_params['EXTERNAL_GOB_INSTANCE']:
+                    external_remote,
+                    default_site_params['INTERNAL_GOB_INSTANCE']:
+                    internal_remote,
+            },
+            CHROMIUM_REMOTE=chromium_remote,
+            CHROME_REMOTE=chrome_remote,
+            AOSP_REMOTE=aosp_remote,
+            WEAVE_REMOTE=weave_remote,
+
+            # Only remotes listed in CROS_REMOTES are considered branchable.
+            # CROS_REMOTES and BRANCHABLE_PROJECTS must be kept in sync.
+            GERRIT_HOSTS={
+                    external_remote:
+                    default_site_params['EXTERNAL_GERRIT_HOST'],
+                    internal_remote:
+                    default_site_params['INTERNAL_GERRIT_HOST'],
+                    aosp_remote: default_site_params['AOSP_GERRIT_HOST'],
+                    weave_remote: default_site_params['WEAVE_GERRIT_HOST'],
+            },
+            CROS_REMOTES={
+                    external_remote: default_site_params['EXTERNAL_GOB_URL'],
+                    internal_remote: default_site_params['INTERNAL_GOB_URL'],
+                    aosp_remote: default_site_params['AOSP_GOB_URL'],
+                    weave_remote: default_site_params['WEAVE_GOB_URL'],
+            },
+            GIT_REMOTES={
+                    chromium_remote: default_site_params['EXTERNAL_GOB_URL'],
+                    chrome_remote: default_site_params['INTERNAL_GOB_URL'],
+                    external_remote: default_site_params['EXTERNAL_GOB_URL'],
+                    internal_remote: default_site_params['INTERNAL_GOB_URL'],
+                    aosp_remote: default_site_params['AOSP_GOB_URL'],
+                    weave_remote: default_site_params['WEAVE_GOB_URL'],
+            },
+
+            # Prefix to distinguish internal and external changes. This is used
+            # when a user specifies a patch with "-g", when generating a key for
+            # a patch to use in our PatchCache, and when displaying a custom
+            # string for the patch.
+            INTERNAL_CHANGE_PREFIX=internal_change_prefix,
+            EXTERNAL_CHANGE_PREFIX=external_change_prefix,
+            CHANGE_PREFIX={
+                    external_remote: external_change_prefix,
+                    internal_remote: internal_change_prefix,
+            },
+
+            # List of remotes that are okay to include in the external manifest.
+            EXTERNAL_REMOTES=(
+                    external_remote,
+                    chromium_remote,
+                    aosp_remote,
+                    weave_remote,
+            ),
+
+            # Mapping 'remote name' -> regexp that matches names of repositories on
+            # that remote that can be branched when creating CrOS branch.
+            # Branching script will actually create a new git ref when branching
+            # these projects. It won't attempt to create a git ref for other projects
+            # that may be mentioned in a manifest. If a remote is missing from this
+            # dictionary, all projects on that remote are considered to not be
+            # branchable.
+            BRANCHABLE_PROJECTS={
+                    external_remote: r'(chromiumos|aosp)/(.+)',
+                    internal_remote: r'chromeos/(.+)',
+            },
+
+            # Additional parameters used to filter manifests, create modified
+            # manifests, and to branch manifests.
+            MANIFEST_VERSIONS_GOB_URL=(
+                    '%s/chromiumos/manifest-versions' %
+                    default_site_params['EXTERNAL_GOB_URL']),
+            MANIFEST_VERSIONS_GOB_URL_TEST=(
+                    '%s/chromiumos/manifest-versions-test' %
+                    default_site_params['EXTERNAL_GOB_URL']),
+            MANIFEST_VERSIONS_INT_GOB_URL=(
+                    '%s/chromeos/manifest-versions' %
+                    default_site_params['INTERNAL_GOB_URL']),
+            MANIFEST_VERSIONS_INT_GOB_URL_TEST=(
+                    '%s/chromeos/manifest-versions-test' %
+                    default_site_params['INTERNAL_GOB_URL']),
+            MANIFEST_VERSIONS_GS_URL='gs://chromeos-manifest-versions',
+
+            # Standard directories under buildroot for cloning these repos.
+            EXTERNAL_MANIFEST_VERSIONS_PATH='manifest-versions',
+            INTERNAL_MANIFEST_VERSIONS_PATH='manifest-versions-internal',
+
+            # GS URL in which to archive build artifacts.
+            ARCHIVE_URL='gs://chromeos-image-archive',
+    )
+
+    return default_site_params
+
+
+class SiteConfig(dict):
+    """This holds a set of named BuildConfig values."""
+
+    def __init__(self, defaults=None, templates=None):
+        """Init.
+
+    Args:
+      defaults: Dictionary of key value pairs to use as BuildConfig values.
+                All BuildConfig values should be defined here. If None,
+                the DefaultSettings() is used. Most sites should use
+                DefaultSettings(), and then update to add any site specific
+                values needed.
+      templates: Dictionary of template names to partial BuildConfigs
+                 other BuildConfigs can be based on. Mostly used to reduce
+                 verbosity of the config dump file format.
+    """
+        super(SiteConfig, self).__init__()
+        self._defaults = DefaultSettings()
+        if defaults:
+            self._defaults.update(defaults)
+        self._templates = AttrDict() if templates is None else AttrDict(
+                templates)
+
+    def GetDefault(self):
+        """Create the canonical default build configuration."""
+        # Enumeration of valid settings; any/all config settings must be in this.
+        # All settings must be documented.
+        return BuildConfig(**self._defaults)
+
+    def GetTemplates(self):
+        """Get the templates of the build configs"""
+        return self._templates
+
+    @property
+    def templates(self):
+        return self._templates
+
+    #
+    # Methods for searching a SiteConfig's contents.
+    #
+    def GetBoards(self):
+        """Return an iterable of all boards in the SiteConfig."""
+        return set(
+                itertools.chain.from_iterable(x.boards for x in self.values()
+                                              if x.boards))
+
+    def FindFullConfigsForBoard(self, board=None):
+        """Returns full builder configs for a board.
+
+    Args:
+      board: The board to match. By default, match all boards.
+
+    Returns:
+      A tuple containing a list of matching external configs and a list of
+      matching internal release configs for a board.
+    """
+        ext_cfgs = []
+        int_cfgs = []
+
+        for name, c in self.items():
+            if c['boards'] and (board is None or board in c['boards']):
+                if name.endswith(
+                        '-%s' % CONFIG_TYPE_RELEASE) and c['internal']:
+                    int_cfgs.append(c.deepcopy())
+                elif name.endswith(
+                        '-%s' % CONFIG_TYPE_FULL) and not c['internal']:
+                    ext_cfgs.append(c.deepcopy())
+
+        return ext_cfgs, int_cfgs
+
+    def FindCanonicalConfigForBoard(self, board, allow_internal=True):
+        """Get the canonical cbuildbot builder config for a board."""
+        ext_cfgs, int_cfgs = self.FindFullConfigsForBoard(board)
+        # If both external and internal builds exist for this board, prefer the
+        # internal one unless instructed otherwise.
+        both = (int_cfgs if allow_internal else []) + ext_cfgs
+
+        if not both:
+            raise ValueError('Invalid board specified: %s.' % board)
+        return both[0]
+
+    def GetSlaveConfigMapForMaster(self,
+                                   master_config,
+                                   options=None,
+                                   important_only=True):
+        """Gets the slave builds triggered by a master config.
+
+    If a master builder also performs a build, it can (incorrectly) return
+    itself.
+
+    Args:
+      master_config: A build config for a master builder.
+      options: The options passed on the commandline. This argument is required
+      for normal operation, but we accept None to assist with testing.
+      important_only: If True, only get the important slaves.
+
+    Returns:
+      A slave_name to slave_config map, corresponding to the slaves for the
+      master represented by master_config.
+
+    Raises:
+      AssertionError if the given config is not a master config or it does
+        not have a manifest_version.
+    """
+        assert master_config.master
+        assert master_config.slave_configs is not None
+
+        slave_name_config_map = {}
+        if options is not None and options.remote_trybot:
+            return {}
+
+        # Look up the build configs for all slaves named by the master.
+        slave_name_config_map = {
+                name: self[name]
+                for name in master_config.slave_configs
+        }
+
+        if important_only:
+            # Remove unimportant configs from the result.
+            slave_name_config_map = {
+                    k: v
+                    for k, v in slave_name_config_map.items() if v.important
+            }
+
+        return slave_name_config_map
+
+    def GetSlavesForMaster(self,
+                           master_config,
+                           options=None,
+                           important_only=True):
+        """Get a list of qualified build slave configs given the master_config.
+
+    Args:
+      master_config: A build config for a master builder.
+      options: The options passed on the commandline. This argument is optional,
+               and only makes sense when called from cbuildbot.
+      important_only: If True, only get the important slaves.
+    """
+        slave_map = self.GetSlaveConfigMapForMaster(
+                master_config, options=options, important_only=important_only)
+        return list(slave_map.values())
+
+    #
+    # Methods used when creating a Config programmatically.
+    #
+    def Add(self, name, template=None, *args, **kwargs):
+        """Add a new BuildConfig to the SiteConfig.
+
+    Examples:
+      # Creates default build named foo.
+      site_config.Add('foo')
+
+      # Creates default build with board 'foo_board'
+      site_config.Add('foo',
+                      boards=['foo_board'])
+
+      # Creates build based on template_build for 'foo_board'.
+      site_config.Add('foo',
+                      template_build,
+                      boards=['foo_board'])
+
+      # Creates build based on template for 'foo_board'. with mixin.
+      # Inheritance order is default, template, mixin, arguments.
+      site_config.Add('foo',
+                      template_build,
+                      mixin_build_config,
+                      boards=['foo_board'])
+
+      # Creates build without a template but with mixin.
+      # Inheritance order is default, template, mixin, arguments.
+      site_config.Add('foo',
+                      None,
+                      mixin_build_config,
+                      boards=['foo_board'])
+
+    Args:
+      name: The name to label this configuration; this is what cbuildbot
+            would see.
+      template: BuildConfig to use as a template for this build.
+      args: BuildConfigs to patch into this config. First one (if present) is
+            considered the template. See AddTemplate for help on templates.
+      kwargs: BuildConfig values to explicitly set on this config.
+
+    Returns:
+      The BuildConfig just added to the SiteConfig.
+    """
+        assert name not in self, ('%s already exists.' % name)
+
+        inherits, overrides = args, kwargs
+        if template:
+            inherits = (template, ) + inherits
+
+        # Make sure we don't ignore that argument silently.
+        if '_template' in overrides:
+            raise ValueError('_template cannot be explicitly set.')
+
+        result = self.GetDefault()
+        result.apply(*inherits, **overrides)
+
+        # Select the template name based on template argument, or nothing.
+        resolved_template = template.get('_template') if template else None
+        assert not resolved_template or resolved_template in self.templates, \
+            '%s inherits from non-template %s' % (name, resolved_template)
+
+        # Our name is passed as an explicit argument. We use the first build
+        # config as our template, or nothing.
+        result['name'] = name
+        result['_template'] = resolved_template
+        self[name] = result
+        return result
+
+    def AddWithoutTemplate(self, name, *args, **kwargs):
+        """Add a config containing only explicitly listed values (no defaults)."""
+        self.Add(name, None, *args, **kwargs)
+
+    def AddGroup(self, name, *args, **kwargs):
+        """Create a new group of build configurations.
+
+    Args:
+      name: The name to label this configuration; this is what cbuildbot
+            would see.
+      args: Configurations to build in this group. The first config in
+            the group is considered the primary configuration and is used
+            for syncing and creating the chroot.
+      kwargs: Override values to use for the parent config.
+
+    Returns:
+      A new BuildConfig instance.
+    """
+        child_configs = [x.deepcopy().apply(grouped=True) for x in args]
+        return self.Add(name, args[0], child_configs=child_configs, **kwargs)
+
+    def AddForBoards(self,
+                     suffix,
+                     boards,
+                     per_board=None,
+                     template=None,
+                     *args,
+                     **kwargs):
+        """Create configs for all boards in |boards|.
+
+    Args:
+      suffix: Config name is <board>-<suffix>.
+      boards: A list of board names as strings.
+      per_board: A dictionary of board names to BuildConfigs, or None.
+      template: The template to use for all configs created.
+      *args: Mixin templates to apply.
+      **kwargs: Additional keyword arguments to be used in AddConfig.
+
+    Returns:
+      List of the configs created.
+    """
+        result = []
+
+        for board in boards:
+            config_name = '%s-%s' % (board, suffix)
+
+            # Insert the per_board value as the last mixin, if it exists.
+            mixins = args + (dict(boards=[board]), )
+            if per_board and board in per_board:
+                mixins = mixins + (per_board[board], )
+
+            # Create the new config for this board.
+            result.append(self.Add(config_name, template, *mixins, **kwargs))
+
+        return result
+
+    def ApplyForBoards(self, suffix, boards, *args, **kwargs):
+        """Update configs for all boards in |boards|.
+
+    Args:
+      suffix: Config name is <board>-<suffix>.
+      boards: A list of board names as strings.
+      *args: Mixin templates to apply.
+      **kwargs: Additional keyword arguments to be used in AddConfig.
+
+    Returns:
+      List of the configs updated.
+    """
+        result = []
+
+        for board in boards:
+            config_name = '%s-%s' % (board, suffix)
+            assert config_name in self, ('%s does not exist.' % config_name)
+
+            # Update the config for this board.
+            result.append(self[config_name].apply(*args, **kwargs))
+
+        return result
+
+    def AddTemplate(self, name, *args, **kwargs):
+        """Create a template named |name|.
+
+    Templates are used to define common settings that are shared across types
+    of builders. They help reduce duplication in config_dump.json, because we
+    only define the template and its settings once.
+
+    Args:
+      name: The name of the template.
+      args: See the docstring of BuildConfig.derive.
+      kwargs: See the docstring of BuildConfig.derive.
+    """
+        assert name not in self._templates, ('Template %s already exists.' %
+                                             name)
+
+        template = BuildConfig()
+        template.apply(*args, **kwargs)
+        template['_template'] = name
+        self._templates[name] = template
+
+        return template
+
+    def _MarshalBuildConfig(self, name, config):
+        """Hide the defaults from a given config entry.
+
+    Args:
+      name: Default build name (usually dictionary key).
+      config: A config entry.
+
+    Returns:
+      The same config entry, but without any defaults.
+    """
+        defaults = self.GetDefault()
+        defaults['name'] = name
+
+        template = config.get('_template')
+        if template:
+            defaults.apply(self._templates[template])
+            defaults['_template'] = None
+
+        result = {}
+        for k, v in config.items():
+            if defaults.get(k) != v:
+                if k == 'child_configs':
+                    result['child_configs'] = [
+                            self._MarshalBuildConfig(name, child)
+                            for child in v
+                    ]
+                else:
+                    result[k] = v
+
+        return result
+
+    def _MarshalTemplates(self):
+        """Return a version of self._templates with only used templates.
+
+    Templates have callables/delete keys resolved against GetDefault() to
+    ensure they can be safely saved to json.
+
+    Returns:
+      Dict copy of self._templates with all unreferenced templates removed.
+    """
+        defaults = self.GetDefault()
+
+        # All templates used. We ignore child configs since they
+        # should exist at top level.
+        used = set(c.get('_template', None) for c in self.values())
+        used.discard(None)
+
+        result = {}
+
+        for name in used:
+            # Expand any special values (callables, etc)
+            expanded = defaults.derive(self._templates[name])
+            # Recover the '_template' value which is filtered out by derive.
+            expanded['_template'] = name
+            # Hide anything that matches the default.
+            save = {k: v for k, v in expanded.items() if defaults.get(k) != v}
+            result[name] = save
+
+        return result
+
+    def SaveConfigToString(self):
+        """Save this Config object to a Json format string."""
+        default = self.GetDefault()
+
+        config_dict = {}
+        config_dict['_default'] = default
+        config_dict['_templates'] = self._MarshalTemplates()
+        for k, v in self.items():
+            config_dict[k] = self._MarshalBuildConfig(k, v)
+
+        return PrettyJsonDict(config_dict)
+
+    def SaveConfigToFile(self, config_file):
+        """Save this Config to a Json file.
+
+    Args:
+      config_file: The file to write too.
+    """
+        json_string = self.SaveConfigToString()
+        osutils.WriteFile(config_file, json_string)
+
+    def DumpExpandedConfigToString(self):
+        """Dump the SiteConfig to Json with all configs full expanded.
+
+    This is intended for debugging default/template behavior. The dumped JSON
+    can't be reloaded (at least not reliably).
+    """
+        return PrettyJsonDict(self)
+
+    def DumpConfigCsv(self):
+        """Dump the SiteConfig to CSV with all configs fully expanded.
+
+    This supports configuration analysis and debugging.
+    """
+        raw_config = json.loads(self.DumpExpandedConfigToString())
+        header_keys = {'builder_name', 'test_type', 'device'}
+        csv_rows = []
+        for builder_name, values in raw_config.items():
+            row = {'builder_name': builder_name}
+            tests = {}
+            raw_devices = []
+            for key, value in values.items():
+                header_keys.add(key)
+                if value:
+                    if isinstance(value, list):
+                        if '_tests' in key:
+                            tests[key] = value
+                        elif key == 'models':
+                            raw_devices = value
+                        else:
+                            # Ignoring this for now for test analysis.
+                            if key != 'child_configs':
+                                row[key] = ' | '.join(
+                                        str(array_val) for array_val in value)
+                    else:
+                        row[key] = value
+
+            if tests:
+                for test_type, test_entries in tests.items():
+                    for test_entry in test_entries:
+                        test_row = copy.deepcopy(row)
+                        test_row['test_type'] = test_type
+                        raw_test = json.loads(test_entry)
+                        for test_key, test_value in raw_test.items():
+                            if test_value:
+                                header_keys.add(test_key)
+                                test_row[test_key] = test_value
+                        csv_rows.append(test_row)
+                        if raw_devices:
+                            for raw_device in raw_devices:
+                                device = json.loads(raw_device)
+                                test_suite = test_row.get('suite', '')
+                                test_suites = device.get('test_suites', [])
+                                if test_suite and test_suites and test_suite in test_suites:
+                                    device_row = copy.deepcopy(test_row)
+                                    device_row['device'] = device['name']
+                                    csv_rows.append(device_row)
+            else:
+                csv_rows.append(row)
+
+        csv_result = [','.join(header_keys)]
+        for csv_row in csv_rows:
+            row_values = []
+            for header_key in header_keys:
+                row_values.append('"%s"' % str(csv_row.get(header_key, '')))
+            csv_result.append(','.join(row_values))
+
+        return '\n'.join(csv_result)
+
+
+#
+# Functions related to working with GE Data.
+#
+
+
+def LoadGEBuildConfigFromFile(
+    build_settings_file=constants.GE_BUILD_CONFIG_FILE):
+    """Load template config dict from a Json encoded file."""
+    json_string = osutils.ReadFile(build_settings_file)
+    return json.loads(json_string)
+
+
+def GeBuildConfigAllBoards(ge_build_config):
+    """Extract a list of board names from the GE Build Config.
+
+  Args:
+    ge_build_config: Dictionary containing the decoded GE configuration file.
+
+  Returns:
+    A list of board names as strings.
+  """
+    return [b['name'] for b in ge_build_config['boards']]
+
+
+def GetUnifiedBuildConfigAllBuilds(ge_build_config):
+    """Extract a list of all unified build configurations.
+
+  This dictionary is based on the JSON defined by the proto generated from
+  GoldenEye.  See cs/crosbuilds.proto
+
+  Args:
+    ge_build_config: Dictionary containing the decoded GE configuration file.
+
+  Returns:
+    A list of unified build configurations (json configs)
+  """
+    return ge_build_config.get('reference_board_unified_builds', [])
+
+
+class BoardGroup(object):
+    """Class holds leader_boards and follower_boards for grouped boards"""
+
+    def __init__(self):
+        self.leader_boards = []
+        self.follower_boards = []
+
+    def AddLeaderBoard(self, board):
+        self.leader_boards.append(board)
+
+    def AddFollowerBoard(self, board):
+        self.follower_boards.append(board)
+
+    def __str__(self):
+        return ('Leader_boards: %s Follower_boards: %s' %
+                (self.leader_boards, self.follower_boards))
+
+
+def GroupBoardsByBuilderAndBoardGroup(board_list):
+    """Group boards by builder and board_group.
+
+  Args:
+    board_list: board list from the template file.
+
+  Returns:
+    builder_group_dict: maps builder to {group_n: board_group_n}
+    builder_ungrouped_dict: maps builder to a list of ungrouped boards
+  """
+    builder_group_dict = {}
+    builder_ungrouped_dict = {}
+
+    for b in board_list:
+        name = b[CONFIG_TEMPLATE_NAME]
+        # Invalid build configs being written out with no config templates,
+        # thus the default. See https://crbug.com/1012278.
+        for config in b.get(CONFIG_TEMPLATE_CONFIGS, []):
+            board = {'name': name}
+            board.update(config)
+
+            builder = config[CONFIG_TEMPLATE_BUILDER]
+            if builder not in builder_group_dict:
+                builder_group_dict[builder] = {}
+            if builder not in builder_ungrouped_dict:
+                builder_ungrouped_dict[builder] = []
+
+            board_group = config[CONFIG_TEMPLATE_BOARD_GROUP]
+            if not board_group:
+                builder_ungrouped_dict[builder].append(board)
+                continue
+            if board_group not in builder_group_dict[builder]:
+                builder_group_dict[builder][board_group] = BoardGroup()
+            if config[CONFIG_TEMPLATE_LEADER_BOARD]:
+                builder_group_dict[builder][board_group].AddLeaderBoard(board)
+            else:
+                builder_group_dict[builder][board_group].AddFollowerBoard(
+                        board)
+
+    return (builder_group_dict, builder_ungrouped_dict)
+
+
+def GroupBoardsByBuilder(board_list):
+    """Group boards by the 'builder' flag."""
+    builder_to_boards_dict = {}
+
+    for b in board_list:
+        # Invalid build configs being written out with no configs array, thus the
+        # default. See https://crbug.com/1005803.
+        for config in b.get(CONFIG_TEMPLATE_CONFIGS, []):
+            builder = config[CONFIG_TEMPLATE_BUILDER]
+            if builder not in builder_to_boards_dict:
+                builder_to_boards_dict[builder] = set()
+            builder_to_boards_dict[builder].add(b[CONFIG_TEMPLATE_NAME])
+
+    return builder_to_boards_dict
+
+
+def GetNonUniBuildLabBoardName(board):
+    """Return the board name labeled in the lab for non-unibuild."""
+    # Those special string represent special configuration used in the image,
+    # and should run on DUT without those string.
+    # We strip those string from the board so that lab can handle it correctly.
+    SPECIAL_SUFFIX = [
+            '-arcnext$',
+            '-arcvm$',
+            '-arc-r$',
+            '-arc-r-userdebug$',
+            '-connectivitynext$',
+            '-kernelnext$',
+            '-kvm$',
+            '-ndktranslation$',
+            '-cfm$',
+            '-campfire$',
+            '-borealis$',
+    ]
+    # ARM64 userspace boards use 64 suffix but can't put that in list above
+    # because of collisions with boards like kevin-arc64.
+    ARM64_BOARDS = ['cheza64', 'kevin64']
+    for suffix in SPECIAL_SUFFIX:
+        board = re.sub(suffix, '', board)
+    if board in ARM64_BOARDS:
+        # Remove '64' suffix from the board name.
+        board = board[:-2]
+    return board
+
+
+def GetArchBoardDict(ge_build_config):
+    """Get a dict mapping arch types to board names.
+
+  Args:
+    ge_build_config: Dictionary containing the decoded GE configuration file.
+
+  Returns:
+    A dict mapping arch types to board names.
+  """
+    arch_board_dict = {}
+
+    for b in ge_build_config[CONFIG_TEMPLATE_BOARDS]:
+        board_name = b[CONFIG_TEMPLATE_NAME]
+        # Invalid build configs being written out with no configs array, thus the
+        # default. See https://crbug.com/947712.
+        for config in b.get(CONFIG_TEMPLATE_CONFIGS, []):
+            arch = config[CONFIG_TEMPLATE_ARCH]
+            arch_board_dict.setdefault(arch, set()).add(board_name)
+
+    for b in GetUnifiedBuildConfigAllBuilds(ge_build_config):
+        board_name = b[CONFIG_TEMPLATE_REFERENCE_BOARD_NAME]
+        arch = b[CONFIG_TEMPLATE_ARCH]
+        arch_board_dict.setdefault(arch, set()).add(board_name)
+
+    return arch_board_dict
+
+
+#
+# Functions related to loading/saving Json.
+#
+class ObjectJSONEncoder(json.JSONEncoder):
+    """Json Encoder that encodes objects as their dictionaries."""
+
+    # pylint: disable=method-hidden
+    def default(self, o):
+        return self.encode(o.__dict__)
+
+
+def PrettyJsonDict(dictionary):
+    """Returns a pretty-ified json dump of a dictionary."""
+    return json.dumps(dictionary,
+                      cls=ObjectJSONEncoder,
+                      sort_keys=True,
+                      indent=4,
+                      separators=(',', ': ')) + '\n'
+
+
+def LoadConfigFromFile(config_file=constants.CHROMEOS_CONFIG_FILE):
+    """Load a Config a Json encoded file."""
+    json_string = osutils.ReadFile(config_file)
+    return LoadConfigFromString(json_string)
+
+
+def LoadConfigFromString(json_string):
+    """Load a cbuildbot config from it's Json encoded string."""
+    config_dict = json.loads(json_string)
+
+    # Use standard defaults, but allow the config to override.
+    defaults = DefaultSettings()
+    defaults.update(config_dict.pop(DEFAULT_BUILD_CONFIG))
+    _DeserializeConfigs(defaults)
+
+    templates = config_dict.pop('_templates', {})
+    for t in templates.values():
+        _DeserializeConfigs(t)
+
+    defaultBuildConfig = BuildConfig(**defaults)
+
+    builds = {
+            n: _CreateBuildConfig(n, defaultBuildConfig, v, templates)
+            for n, v in config_dict.items()
+    }
+
+    # config is the struct that holds the complete cbuildbot config.
+    result = SiteConfig(defaults=defaults, templates=templates)
+    result.update(builds)
+
+    return result
+
+
+def _DeserializeConfig(build_dict,
+                       config_key,
+                       config_class,
+                       preserve_none=False):
+    """Deserialize config of given type inside build_dict.
+
+  Args:
+    build_dict: The build_dict to update (in place)
+    config_key: Key for the config inside build_dict.
+    config_class: The class to instantiate for the config.
+    preserve_none: If True, None values are preserved as is. By default, they
+        are dropped.
+  """
+    serialized_configs = build_dict.pop(config_key, None)
+    if serialized_configs is None:
+        if preserve_none:
+            build_dict[config_key] = None
+        return
+
+    deserialized_configs = []
+    for config_string in serialized_configs:
+        if isinstance(config_string, config_class):
+            deserialized_config = config_string
+        else:
+            # Each test config is dumped as a json string embedded in json.
+            embedded_configs = json.loads(config_string)
+            deserialized_config = config_class(**embedded_configs)
+        deserialized_configs.append(deserialized_config)
+    build_dict[config_key] = deserialized_configs
+
+
+def _DeserializeConfigs(build_dict):
+    """Updates a config dictionary with recreated objects.
+
+  Notification configs and various test configs are serialized as strings
+  (rather than JSON objects), so we need to turn them into real objects before
+  they can be consumed.
+
+  Args:
+    build_dict: The config dictionary to update (in place).
+  """
+    _DeserializeConfig(build_dict, 'vm_tests', VMTestConfig)
+    _DeserializeConfig(build_dict,
+                       'vm_tests_override',
+                       VMTestConfig,
+                       preserve_none=True)
+    _DeserializeConfig(build_dict, 'models', ModelTestConfig)
+    _DeserializeConfig(build_dict, 'hw_tests', HWTestConfig)
+    _DeserializeConfig(build_dict,
+                       'hw_tests_override',
+                       HWTestConfig,
+                       preserve_none=True)
+    _DeserializeConfig(build_dict, 'gce_tests', GCETestConfig)
+    _DeserializeConfig(build_dict, 'tast_vm_tests', TastVMTestConfig)
+    _DeserializeConfig(build_dict, 'moblab_vm_tests', MoblabVMTestConfig)
+    _DeserializeConfig(build_dict, 'notification_configs', NotificationConfig)
+
+
+def _CreateBuildConfig(name, default, build_dict, templates):
+    """Create a BuildConfig object from it's parsed JSON dictionary encoding."""
+    # These build config values need special handling.
+    child_configs = build_dict.pop('child_configs', None)
+    template = build_dict.get('_template')
+
+    # Use the name passed in as the default build name.
+    build_dict.setdefault('name', name)
+
+    result = default.deepcopy()
+    # Use update to explicitly avoid apply's special handing.
+    if template:
+        result.update(templates[template])
+    result.update(build_dict)
+
+    _DeserializeConfigs(result)
+
+    if child_configs is not None:
+        result['child_configs'] = [
+                _CreateBuildConfig(name, default, child, templates)
+                for child in child_configs
+        ]
+
+    return result
+
+
+@memoize.Memoize
+def GetConfig():
+    """Load the current SiteConfig.
+
+  Returns:
+    SiteConfig instance to use for this build.
+  """
+    return LoadConfigFromFile(constants.CHROMEOS_CONFIG_FILE)
+
+
+@memoize.Memoize
+def GetSiteParams():
+    """Get the site parameter configs.
+
+  This is the new, preferred method of accessing the site parameters, instead of
+  SiteConfig.params.
+
+  Returns:
+    AttrDict of site parameters
+  """
+    site_params = AttrDict()
+    site_params.update(DefaultSiteParameters())
+    return site_params
+
+
+def append_useflags(useflags):
+    """Used to append a set of useflags to existing useflags.
+
+  Useflags that shadow prior use flags will cause the prior flag to be removed.
+  (e.g. appending '-foo' to 'foo' will cause 'foo' to be removed)
+
+  Examples:
+    new_config = base_config.derive(useflags=append_useflags(['foo', '-bar'])
+
+  Args:
+    useflags: List of string useflags to append.
+  """
+    assert isinstance(useflags, (list, set))
+    shadowed_useflags = {
+            '-' + flag
+            for flag in useflags if not flag.startswith('-')
+    }
+    shadowed_useflags.update(
+            {flag[1:]
+             for flag in useflags if flag.startswith('-')})
+
+    def handler(old_useflags):
+        new_useflags = set(old_useflags or [])
+        new_useflags.update(useflags)
+        new_useflags.difference_update(shadowed_useflags)
+        return sorted(list(new_useflags))
+
+    return handler
diff --git a/utils/frozen_chromite/lib/constants.py b/utils/frozen_chromite/lib/constants.py
new file mode 100644
index 0000000..09e5a6e
--- /dev/null
+++ b/utils/frozen_chromite/lib/constants.py
@@ -0,0 +1,981 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This module contains constants used by cbuildbot and related code."""
+
+from __future__ import print_function
+
+import itertools
+import os
+
+def _FindSourceRoot():
+    """Try and find the root check out of the chromiumos tree"""
+    source_root = path = os.path.realpath(
+            os.path.join(os.path.abspath(__file__), '..', '..', '..'))
+    while True:
+        if os.path.isdir(os.path.join(path, '.repo')):
+            return path
+        elif path == '/':
+            break
+        path = os.path.dirname(path)
+    return source_root
+
+
+SOURCE_ROOT = _FindSourceRoot()
+CHROOT_SOURCE_ROOT = '/mnt/host/source'
+CHROOT_CACHE_ROOT = '/var/cache/chromeos-cache'
+DEPOT_TOOLS_SUBPATH = 'src/chromium/depot_tools'
+
+CROSUTILS_DIR = os.path.join(SOURCE_ROOT, 'src/scripts')
+CHROMITE_DIR = os.path.realpath(os.path.join(
+    os.path.abspath(__file__), '..', '..'))
+BOOTSTRAP_DIR = os.path.join(CHROMITE_DIR, 'bootstrap')
+DEPOT_TOOLS_DIR = os.path.join(SOURCE_ROOT, DEPOT_TOOLS_SUBPATH)
+CHROMITE_BIN_SUBDIR = 'chromite/bin'
+CHROMITE_BIN_DIR = os.path.join(CHROMITE_DIR, 'bin')
+CHROMITE_SCRIPTS_DIR = os.path.join(CHROMITE_DIR, 'scripts')
+PATH_TO_CBUILDBOT = os.path.join(CHROMITE_BIN_SUBDIR, 'cbuildbot')
+DEFAULT_CHROOT_DIR = 'chroot'
+DEFAULT_CHROOT_PATH = os.path.join(SOURCE_ROOT, DEFAULT_CHROOT_DIR)
+TERMINA_TOOLS_DIR = os.path.join(
+    SOURCE_ROOT, 'src/platform/container-guest-tools/termina')
+
+STATEFUL_DIR = '/mnt/stateful_partition'
+
+# These constants are defined and used in the die_hook that logs failed
+# packages: 'cros_log_failed_packages' in profiles/base/profile.bashrc in
+# chromiumos-overlay. The status file is generated in CROS_METRICS_DIR, and
+# only if that environment variable is defined.
+CROS_METRICS_DIR_ENVVAR = 'CROS_METRICS_DIR'
+DIE_HOOK_STATUS_FILE_NAME = 'FAILED_PACKAGES'
+
+CHROMEOS_CONFIG_FILE = os.path.join(CHROMITE_DIR, 'config', 'config_dump.json')
+WATERFALL_CONFIG_FILE = os.path.join(
+    CHROMITE_DIR, 'config', 'waterfall_layout_dump.txt')
+LUCI_SCHEDULER_CONFIG_FILE = os.path.join(
+    CHROMITE_DIR, 'config', 'luci-scheduler.cfg')
+
+GE_BUILD_CONFIG_FILE = os.path.join(
+    CHROMITE_DIR, 'config', 'ge_build_config.json')
+
+# The following define the location for storing toolchain packages and
+# SDK overlay tarballs created during SDK builder runs. The paths are relative
+# to the build root's chroot, which guarantees that they are reachable from it
+# and get cleaned up when it is removed.
+SDK_TOOLCHAINS_OUTPUT = 'tmp/toolchain-pkgs'
+SDK_OVERLAYS_OUTPUT = 'tmp/sdk-overlays'
+
+AUTOTEST_BUILD_PATH = 'usr/local/build/autotest'
+UNITTEST_PKG_PATH = 'test-packages'
+
+# Only used for testing pinned images on test images.
+GUEST_IMAGES_PINS_PATH = 'usr/local/opt/google/containers/pins'
+PIN_KEY_FILENAME = 'filename'
+PIN_KEY_GSURI = 'gsuri'
+
+# Path to the lsb-release file on the device.
+LSB_RELEASE_PATH = '/etc/lsb-release'
+
+HOME_DIRECTORY = os.path.expanduser('~')
+
+# If cbuiltbot is running on a bot, then the cidb access credentials will be
+# available here. This directory will not exist otherwise.
+CIDB_PROD_BOT_CREDS = os.path.join(HOME_DIRECTORY, '.cidb_creds',
+                                   'prod_cidb_bot')
+CIDB_DEBUG_BOT_CREDS = os.path.join(HOME_DIRECTORY, '.cidb_creds',
+                                    'debug_cidb_bot')
+
+# Crash Server upload API key.
+CRASH_API_KEY = os.path.join('/', 'creds', 'api_keys',
+                             'api_key-chromeos-crash-uploader')
+
+# Buildbucket build status
+BUILDBUCKET_BUILDER_STATUS_SCHEDULED = 'SCHEDULED'
+BUILDBUCKET_BUILDER_STATUS_STARTED = 'STARTED'
+BUILDBUCKET_BUILDER_STATUS_COMPLETED = 'COMPLETED'
+
+BUILDBUCKET_BUILDER_STATUSES = (BUILDBUCKET_BUILDER_STATUS_SCHEDULED,
+                                BUILDBUCKET_BUILDER_STATUS_STARTED,
+                                BUILDBUCKET_BUILDER_STATUS_COMPLETED)
+
+BUILDBUCKET_BUILDER_RESULT_SUCCESS = 'SUCCESS'
+BUILDBUCKET_BUILDER_RESULT_FAILURE = 'FAILURE'
+BUILDBUCKET_BUILDER_RESULT_CANCELED = 'CANCELED'
+
+# Builder status strings
+BUILDER_STATUS_FAILED = 'fail'
+BUILDER_STATUS_PASSED = 'pass'
+BUILDER_STATUS_INFLIGHT = 'inflight'
+BUILDER_STATUS_MISSING = 'missing'
+BUILDER_STATUS_ABORTED = 'aborted'
+# The following statuses are currently only used for build stages.
+BUILDER_STATUS_PLANNED = 'planned'
+BUILDER_STATUS_WAITING = 'waiting'
+BUILDER_STATUS_SKIPPED = 'skipped'
+BUILDER_STATUS_FORGIVEN = 'forgiven'
+BUILDER_COMPLETED_STATUSES = (BUILDER_STATUS_PASSED,
+                              BUILDER_STATUS_FAILED,
+                              BUILDER_STATUS_ABORTED,
+                              BUILDER_STATUS_SKIPPED,
+                              BUILDER_STATUS_FORGIVEN)
+BUILDER_ALL_STATUSES = (BUILDER_STATUS_FAILED,
+                        BUILDER_STATUS_PASSED,
+                        BUILDER_STATUS_INFLIGHT,
+                        BUILDER_STATUS_MISSING,
+                        BUILDER_STATUS_ABORTED,
+                        BUILDER_STATUS_WAITING,
+                        BUILDER_STATUS_PLANNED,
+                        BUILDER_STATUS_SKIPPED,
+                        BUILDER_STATUS_FORGIVEN)
+BUILDER_NON_FAILURE_STATUSES = (BUILDER_STATUS_PLANNED,
+                                BUILDER_STATUS_PASSED,
+                                BUILDER_STATUS_SKIPPED,
+                                # Quick fix for Buildbucket race problems.
+                                BUILDER_STATUS_INFLIGHT,
+                                BUILDER_STATUS_FORGIVEN)
+
+# Partition labels
+CROS_PART_STATEFUL = 'STATE'
+
+# Signer status strings
+SIGNER_STATUS_PASSED = 'passed'
+SIGNER_STATUS_FAILED = 'failed'
+
+# Change sources
+CHANGE_SOURCE_INTERNAL = 'internal'
+CHANGE_SOURCE_EXTERNAL = 'external'
+
+# Exception categories, as recorded in cidb
+EXCEPTION_CATEGORY_UNKNOWN = 'unknown'
+EXCEPTION_CATEGORY_BUILD = 'build'
+EXCEPTION_CATEGORY_TEST = 'test'
+EXCEPTION_CATEGORY_INFRA = 'infra'
+EXCEPTION_CATEGORY_LAB = 'lab'
+
+EXCEPTION_CATEGORY_ALL_CATEGORIES = (
+    EXCEPTION_CATEGORY_UNKNOWN,
+    EXCEPTION_CATEGORY_BUILD,
+    EXCEPTION_CATEGORY_TEST,
+    EXCEPTION_CATEGORY_INFRA,
+    EXCEPTION_CATEGORY_LAB,
+)
+
+# Monarch metric names
+MON_LAST_SLAVE = 'chromeos/cbuildbot/last_completed_slave'
+MON_BUILD_COMP_COUNT = 'chromeos/cbuildbot/build/completed_count'
+MON_BUILD_DURATION = 'chromeos/cbuildbot/build/durations'
+MON_STAGE_COMP_COUNT = 'chromeos/cbuildbot/stage/completed_count'
+MON_STAGE_DURATION = 'chromeos/cbuildbot/stage/durations'
+MON_STAGE_INSTANCE_DURATION = 'chromeos/cbuildbot/stage/instance_durations'
+MON_STAGE_FAILURE_COUNT = 'chromeos/cbuildbot/stage/failure_count'
+MON_FAILED_STAGE = 'chromeos/chromite/cbuildbot_launch/failed_stage'
+MON_CHROOT_USED = 'chromeos/cbuildbot/chroot_at_version'
+MON_REPO_SYNC_COUNT = 'chromeos/cbuildbot/repo/sync_count'
+MON_REPO_SYNC_RETRY_COUNT = 'chromeos/cbuildbot/repo/sync_retry_count'
+MON_REPO_SELFUPDATE_FAILURE_COUNT = ('chromeos/cbuildbot/repo/'
+                                     'selfupdate_failure_count')
+MON_REPO_INIT_RETRY_COUNT = 'chromeos/cbuildbot/repo/init_retry_count'
+MON_REPO_MANIFEST_FAILURE_COUNT = ('chromeos/cbuildbot/repo/'
+                                   'manifest_failure_count')
+MON_BB_RETRY_BUILD_COUNT = ('chromeos/cbuildbot/buildbucket/'
+                            'retry_build_count')
+MON_BB_CANCEL_BATCH_BUILDS_COUNT = ('chromeos/cbuildbot/buildbucket/'
+                                    'cancel_batch_builds_count')
+MON_EXPORT_TO_GCLOUD = 'chromeos/cbuildbot/export_to_gcloud'
+
+# Stage Categorization for failed stages metric.
+UNCATEGORIZED_STAGE = 'Uncategorized'
+CI_INFRA_STAGE = 'CI-Infra'
+TEST_INFRA_STAGE = 'Test-Infra'
+PRODUCT_OS_STAGE = 'Product-OS'
+PRODUCT_ANDROID_STAGE = 'Product-Android'
+PRODUCT_CHROME_STAGE = 'Product-Chrome'
+PRODUCT_TOOLCHAIN_STAGE = 'Product-Toolchain'
+
+
+# Re-execution API constants.
+# Used by --resume and --bootstrap to decipher which options they
+# can pass to the target cbuildbot (since it may not have that
+# option).
+# Format is Major.Minor.  Minor is used for tracking new options added
+# that aren't critical to the older version if it's not ran.
+# Major is used for tracking heavy API breakage- for example, no longer
+# supporting the --resume option.
+REEXEC_API_MAJOR = 0
+REEXEC_API_MINOR = 12
+REEXEC_API_VERSION = '%i.%i' % (REEXEC_API_MAJOR, REEXEC_API_MINOR)
+
+# Support --master-build-id
+REEXEC_API_MASTER_BUILD_ID = 3
+# Support --git-cache-dir
+REEXEC_API_GIT_CACHE_DIR = 4
+# Support --goma_dir and --goma_client_json
+REEXEC_API_GOMA = 5
+# Support --ts-mon-task-num
+REEXEC_API_TSMON_TASK_NUM = 6
+# Support --sanity-check-build
+REEXEC_API_SANITY_CHECK_BUILD = 7
+# Support --previous-build-state
+REEXEC_API_PREVIOUS_BUILD_STATE = 8
+# Support --workspace
+REEXEC_API_WORKSPACE = 9
+# Support --master-buildbucket-id
+REEXEC_API_MASTER_BUILDBUCKET_ID = 10
+# Support --chromeos_goma_dir
+REEXEC_API_CHROMEOS_GOMA_DIR = 11
+# Support --chrome-preload-dir
+REEXEC_API_CHROME_PRELOAD_DIR = 12
+
+# We rely on the (waterfall, builder name, build number) to uniquely identify
+# a build. However, future migrations or state wipes of the buildbot master may
+# cause it to reset its build number counter. When that happens, this value
+# should be incremented, ensuring that (waterfall, builder name, build number,
+# buildbot generation) is a unique identifier of builds.
+BUILDBOT_GENERATION = 1
+
+ISOLATESERVER = 'https://isolateserver.appspot.com'
+
+GOOGLE_EMAIL = '@google.com'
+CHROMIUM_EMAIL = '@chromium.org'
+
+CORP_DOMAIN = 'corp.google.com'
+GOLO_DOMAIN = 'golo.chromium.org'
+CHROME_DOMAIN = 'chrome.' + CORP_DOMAIN
+CHROMEOS_BOT_INTERNAL = 'chromeos-bot.internal'
+
+GOB_HOST = '%s.googlesource.com'
+
+EXTERNAL_GOB_INSTANCE = 'chromium'
+EXTERNAL_GERRIT_INSTANCE = 'chromium-review'
+EXTERNAL_GOB_HOST = GOB_HOST % EXTERNAL_GOB_INSTANCE
+EXTERNAL_GERRIT_HOST = GOB_HOST % EXTERNAL_GERRIT_INSTANCE
+EXTERNAL_GOB_URL = 'https://%s' % EXTERNAL_GOB_HOST
+EXTERNAL_GERRIT_URL = 'https://%s' % EXTERNAL_GERRIT_HOST
+
+INTERNAL_GOB_INSTANCE = 'chrome-internal'
+INTERNAL_GERRIT_INSTANCE = 'chrome-internal-review'
+INTERNAL_GOB_HOST = GOB_HOST % INTERNAL_GOB_INSTANCE
+INTERNAL_GERRIT_HOST = GOB_HOST % INTERNAL_GERRIT_INSTANCE
+INTERNAL_GOB_URL = 'https://%s' % INTERNAL_GOB_HOST
+INTERNAL_GERRIT_URL = 'https://%s' % INTERNAL_GERRIT_HOST
+
+# Tests without 'cheets_CTS_', 'cheets_GTS.' prefix will not considered
+# as CTS/GTS test in chromite.lib.cts_helper
+DEFAULT_CTS_TEST_XML_MAP = {
+    'cheets_CTS_': 'test_result.xml',
+    'cheets_GTS.': 'test_result.xml',
+    'cheets_GTS_': 'test_result.xml',
+}
+# Google Storage bucket URI to store results in.
+DEFAULT_CTS_RESULTS_GSURI = 'gs://chromeos-cts-results/'
+DEFAULT_CTS_APFE_GSURI = 'gs://chromeos-cts-apfe/'
+
+ANDROID_CONTAINER_PACKAGE_KEYWORD = 'android-container'
+ANDROID_VM_PACKAGE_KEYWORD = 'android-vm'
+
+ANDROID_BUCKET_URL = 'gs://android-build-chromeos/builds'
+ANDROID_PI_BUILD_BRANCH = 'git_pi-arc'
+ANDROID_VMRVC_BUILD_BRANCH = 'git_rvc-arc-dev'
+ANDROID_VMMST_BUILD_BRANCH = 'git_master-arc-dev'
+
+ANDROID_PI_BUILD_TARGETS = {
+    # Roll XkbToKcmConverter with system image. It's a host executable and
+    # doesn't depend on the target as long as it's pi-arc branch. The converter
+    # is ARC specific and not a part of Android SDK. Having a custom target like
+    # SDK_TOOLS might be better in the long term, but let's use one from ARM or
+    # X86 target as there's no other similar executables right now.
+    # We put it in two buckets because we have separate ACLs for arm and x86.
+    # http://b/128405786
+    'APPS': ('linux-apps', 'org.chromium.arc.cachebuilder.jar'),
+    'ARM': ('linux-cheets_arm-user', r'(\.zip|/XkbToKcmConverter)$'),
+    'ARM64': ('linux-cheets_arm64-user', r'(\.zip|/XkbToKcmConverter)$'),
+    'X86': ('linux-cheets_x86-user', r'(\.zip|/XkbToKcmConverter)$'),
+    'X86_64': ('linux-cheets_x86_64-user', r'\.zip$'),
+    'ARM_USERDEBUG': ('linux-cheets_arm-userdebug', r'\.zip$'),
+    'ARM64_USERDEBUG': ('linux-cheets_arm64-userdebug', r'\.zip$'),
+    'X86_USERDEBUG': ('linux-cheets_x86-userdebug', r'\.zip$'),
+    'X86_64_USERDEBUG': ('linux-cheets_x86_64-userdebug', r'\.zip$'),
+    'SDK_GOOGLE_X86_USERDEBUG': ('linux-sdk_cheets_x86-userdebug', r'\.zip$'),
+    'SDK_GOOGLE_X86_64_USERDEBUG': ('linux-sdk_cheets_x86_64-userdebug',
+                                    r'\.zip$'),
+}
+ANDROID_VMMST_BUILD_TARGETS = {
+    # For XkbToKcmConverter, see the comment in ANDROID_PI_BUILD_TARGETS.
+    'X86_64_USERDEBUG': ('linux-bertha_x86_64-userdebug',
+                         r'(\.zip|/XkbToKcmConverter)$'),
+}
+ANDROID_VMRVC_BUILD_TARGETS = {
+    # For XkbToKcmConverter, see the comment in ANDROID_PI_BUILD_TARGETS.
+    'APPS': ('linux-apps', 'org.chromium.arc.cachebuilder.jar'),
+    'ARM64': ('linux-bertha_arm64-user', r'(\.zip|/XkbToKcmConverter)$'),
+    'X86_64': ('linux-bertha_x86_64-user', r'(\.zip|/XkbToKcmConverter)$'),
+    'ARM64_USERDEBUG': ('linux-bertha_arm64-userdebug',
+                        r'(\.zip|/XkbToKcmConverter)$'),
+    'X86_64_USERDEBUG': ('linux-bertha_x86_64-userdebug',
+                         r'(\.zip|/XkbToKcmConverter)$'),
+}
+
+# These refer to *_TARGET variables in Android ebuild files, used when
+# parsing ebuilds to determine the corresponding Android branch.
+# NOTE: We may use `|` operator to union dict keys after we completely go
+# Python 3.
+ANDROID_ALL_BUILD_TARGETS = frozenset(
+    x + '_TARGET' for x in itertools.chain(
+        ANDROID_PI_BUILD_TARGETS,
+        ANDROID_VMMST_BUILD_TARGETS,
+        ANDROID_VMRVC_BUILD_TARGETS,
+    )
+)
+
+ARC_BUCKET_URL = 'gs://chromeos-arc-images/builds'
+ARC_BUCKET_ACLS = {
+    'APPS': 'googlestorage_acl_public.txt',
+    'ARM': 'googlestorage_acl_arm.txt',
+    'ARM64': 'googlestorage_acl_arm.txt',
+    'X86': 'googlestorage_acl_x86.txt',
+    'X86_64': 'googlestorage_acl_x86.txt',
+    'ARM_USERDEBUG': 'googlestorage_acl_arm.txt',
+    'ARM64_USERDEBUG': 'googlestorage_acl_arm.txt',
+    'X86_USERDEBUG': 'googlestorage_acl_x86.txt',
+    'X86_64_USERDEBUG': 'googlestorage_acl_x86.txt',
+    'SDK_GOOGLE_X86_USERDEBUG': 'googlestorage_acl_x86.txt',
+    'SDK_GOOGLE_X86_64_USERDEBUG': 'googlestorage_acl_x86.txt',
+}
+ANDROID_SYMBOLS_URL_TEMPLATE = (
+    ARC_BUCKET_URL +
+    '/%(branch)s-linux-%(target)s_%(arch)s-%(variant)s/%(version)s'
+    '/%(target)s_%(arch)s%(suffix)s-symbols-%(version)s.zip')
+ANDROID_SYMBOLS_FILE = 'android-symbols.zip'
+# x86-user, x86-userdebug and x86-eng builders create build artifacts with the
+# same name, e.g. cheets_x86-target_files-${VERSION}.zip. ChromeOS builders
+# that need to select x86-user or x86-userdebug artifacts at emerge time need
+# the artifacts to have different filenames to avoid checksum failures. These
+# targets will have their artifacts renamed when the PFQ copies them from the
+# the Android bucket to the ARC++ bucket (b/33072485).
+ARC_BUILDS_NEED_ARTIFACTS_RENAMED = {
+    'ARM_USERDEBUG',
+    'ARM64_USERDEBUG',
+    'X86_USERDEBUG',
+    'X86_64_USERDEBUG',
+    'SDK_GOOGLE_X86_USERDEBUG',
+    'SDK_GOOGLE_X86_64_USERDEBUG',
+}
+# All builds will have the same name without target prefix.
+# Emerge checksum failures will be workarounded by ebuild rename symbol (->).
+ARC_ARTIFACTS_RENAME_NOT_NEEDED = [
+    'push_to_device.zip',
+    'sepolicy.zip',
+    'XkbToKcmConverter',
+]
+
+GOB_COOKIE_PATH = os.path.expanduser('~/.git-credential-cache/cookie')
+GITCOOKIES_PATH = os.path.expanduser('~/.gitcookies')
+
+# Timestamps in the JSON from GoB's web interface is of the form 'Tue
+# Dec 02 17:48:06 2014' and is assumed to be in UTC.
+GOB_COMMIT_TIME_FORMAT = '%a %b %d %H:%M:%S %Y'
+
+CHROMITE_PROJECT = 'chromiumos/chromite'
+CHROMITE_URL = '%s/%s' % (EXTERNAL_GOB_URL, CHROMITE_PROJECT)
+CHROMIUM_SRC_PROJECT = 'chromium/src'
+CHROMIUM_GOB_URL = '%s/%s.git' % (EXTERNAL_GOB_URL, CHROMIUM_SRC_PROJECT)
+CHROME_INTERNAL_PROJECT = 'chrome/src-internal'
+CHROME_INTERNAL_GOB_URL = '%s/%s.git' % (
+    INTERNAL_GOB_URL, CHROME_INTERNAL_PROJECT)
+
+DEFAULT_MANIFEST = 'default.xml'
+OFFICIAL_MANIFEST = 'official.xml'
+LKGM_MANIFEST = 'LKGM/lkgm.xml'
+
+SHARED_CACHE_ENVVAR = 'CROS_CACHEDIR'
+PARALLEL_EMERGE_STATUS_FILE_ENVVAR = 'PARALLEL_EMERGE_STATUS_FILE'
+
+# These projects can be responsible for infra failures.
+INFRA_PROJECTS = (CHROMITE_PROJECT,)
+
+# The manifest contains extra attributes in the 'project' nodes to determine our
+# branching strategy for the project.
+#   create: Create a new branch on the project repo for the new CrOS branch.
+#           This is the default.
+#   pin: On the CrOS branch, pin the project to the current revision.
+#   tot: On the CrOS branch, the project still tracks ToT.
+MANIFEST_ATTR_BRANCHING = 'branch-mode'
+MANIFEST_ATTR_BRANCHING_CREATE = 'create'
+MANIFEST_ATTR_BRANCHING_PIN = 'pin'
+MANIFEST_ATTR_BRANCHING_TOT = 'tot'
+MANIFEST_ATTR_BRANCHING_ALL = (
+    MANIFEST_ATTR_BRANCHING_CREATE,
+    MANIFEST_ATTR_BRANCHING_PIN,
+    MANIFEST_ATTR_BRANCHING_TOT,
+)
+
+STREAK_COUNTERS = 'streak_counters'
+
+PATCH_BRANCH = 'patch_branch'
+STABLE_EBUILD_BRANCH = 'stabilizing_branch'
+MERGE_BRANCH = 'merge_branch'
+
+# These branches are deleted at the beginning of every buildbot run.
+CREATED_BRANCHES = [
+    PATCH_BRANCH,
+    STABLE_EBUILD_BRANCH,
+    MERGE_BRANCH
+]
+
+# Default OS target packages.
+TARGET_OS_PKG = 'virtual/target-os'
+TARGET_OS_DEV_PKG = 'virtual/target-os-dev'
+TARGET_OS_TEST_PKG = 'virtual/target-os-test'
+TARGET_OS_FACTORY_PKG = 'virtual/target-os-factory'
+
+# Constants for uprevving Chrome
+
+CHROMEOS_BASE = 'chromeos-base'
+
+# Portage category and package name for Chrome.
+CHROME_CN = CHROMEOS_BASE
+CHROME_PN = 'chromeos-chrome'
+CHROME_CP = '%s/%s' % (CHROME_CN, CHROME_PN)
+
+# Other packages to uprev while uprevving Chrome.
+OTHER_CHROME_PACKAGES = ['chromeos-base/chromium-source',
+                         'chromeos-base/chrome-icu']
+
+# Chrome use flags
+USE_CHROME_INTERNAL = 'chrome_internal'
+USE_AFDO_USE = 'afdo_use'
+
+
+# Builds and validates _alpha ebuilds.  These builds sync to the latest
+# revsion of the Chromium src tree and build with that checkout.
+CHROME_REV_TOT = 'tot'
+
+# Builds and validates chrome at a given revision through cbuildbot
+# --chrome_version
+CHROME_REV_SPEC = 'spec'
+
+# Builds and validates the latest Chromium release as defined by
+# ~/trunk/releases in the Chrome src tree.  These ebuilds are suffixed with rc.
+CHROME_REV_LATEST = 'latest_release'
+
+# Builds and validates the latest Chromium release for a specific Chromium
+# branch that we want to watch.  These ebuilds are suffixed with rc.
+CHROME_REV_STICKY = 'stable_release'
+
+# Builds and validates Chromium for a pre-populated directory.
+# Also uses _alpha, since portage doesn't have anything lower.
+CHROME_REV_LOCAL = 'local'
+VALID_CHROME_REVISIONS = [CHROME_REV_TOT, CHROME_REV_LATEST,
+                          CHROME_REV_STICKY, CHROME_REV_LOCAL, CHROME_REV_SPEC]
+
+
+# Constants for uprevving Android.
+
+# Portage package name for Android container.
+ANDROID_PACKAGE_NAME = 'android-container'
+
+# Builds and validates the latest Android release.
+ANDROID_REV_LATEST = 'latest_release'
+VALID_ANDROID_REVISIONS = [ANDROID_REV_LATEST]
+
+# Build types supported.
+
+# TODO(sosa): Deprecate PFQ type.
+# Incremental builds that are built using binary packages when available.
+# These builds have less validation than other build types.
+INCREMENTAL_TYPE = 'binary'
+
+# These builds serve as PFQ builders.  This is being deprecated.
+PFQ_TYPE = 'pfq'
+
+# Android PFQ type.  Builds and validates new versions of Android.
+ANDROID_PFQ_TYPE = 'android'
+
+# Builds from source and non-incremental.  This builds fully wipe their
+# chroot before the start of every build and no not use a BINHOST.
+FULL_TYPE = 'full'
+
+# Full but with versioned logic.
+CANARY_TYPE = 'canary'
+
+# Generate payloads for an already built build/version.
+PAYLOADS_TYPE = 'payloads'
+
+# Similar behavior to canary, but used to validate toolchain changes.
+TOOLCHAIN_TYPE = 'toolchain'
+
+# Generic type of tryjob only build configs.
+TRYJOB_TYPE = 'tryjob'
+
+# Special build type for Chroot builders.  These builds focus on building
+# toolchains and validate that they work.
+CHROOT_BUILDER_TYPE = 'chroot'
+CHROOT_BUILDER_BOARD = 'amd64-host'
+
+# Use for builds that don't requite a type.
+GENERIC_TYPE = 'generic'
+
+VALID_BUILD_TYPES = (
+    INCREMENTAL_TYPE,
+    FULL_TYPE,
+    CANARY_TYPE,
+    CHROOT_BUILDER_TYPE,
+    CHROOT_BUILDER_BOARD,
+    ANDROID_PFQ_TYPE,
+    PFQ_TYPE,
+    PAYLOADS_TYPE,
+    TOOLCHAIN_TYPE,
+    TRYJOB_TYPE,
+    GENERIC_TYPE,
+)
+
+HWTEST_TRYBOT_NUM = 3
+HWTEST_QUOTA_POOL = 'quota'
+
+HWTEST_QUOTA_ACCOUNT_BVT = 'legacypool-bvt'
+HWTEST_QUOTA_ACCOUNT_BVT_SYNC = 'bvt-sync'
+HWTEST_QUOTA_ACCOUNT_PFQ = 'pfq'
+HWTEST_QUOTA_ACCOUNT_SUITES = 'legacypool-suites'
+HWTEST_QUOTA_ACCOUNT_TOOLCHAIN = 'toolchain'
+
+# How many total test retries should be done for a suite.
+HWTEST_MAX_RETRIES = 5
+
+# Defines for the various hardware test suites:
+#   BVT:  Basic blocking suite to be run against any build that
+#       requires a HWTest phase.
+#   COMMIT:  Suite of basic tests required for commits to the source
+#       tree.  Runs as a blocking suite on the CQ and PFQ; runs as
+#       a non-blocking suite on canaries.
+#   CANARY:  Non-blocking suite run only against the canaries.
+#   AFDO:  Non-blocking suite run only AFDO builders.
+#   MOBLAB: Blocking Suite run only on *_moblab builders.
+#   INSTALLER: Blocking suite run against all canaries; tests basic installer
+#              functionality.
+HWTEST_ARC_COMMIT_SUITE = 'bvt-arc'
+HWTEST_BVT_SUITE = 'bvt-inline'
+HWTEST_COMMIT_SUITE = 'bvt-cq'
+HWTEST_CANARY_SUITE = 'bvt-perbuild'
+HWTEST_INSTALLER_SUITE = 'bvt-installer'
+# Runs all non-informational Tast tests (exercising any of OS, Chrome, and ARC).
+HWTEST_TAST_CQ_SUITE = 'bvt-tast-cq'
+# Runs non-informational Tast tests exercising either Chrome or ARC.
+HWTEST_TAST_CHROME_PFQ_SUITE = 'bvt-tast-chrome-pfq'
+# Runs non-informational Tast tests exercising ARC.
+HWTEST_TAST_ANDROID_PFQ_SUITE = 'bvt-tast-android-pfq'
+# Runs all Tast informational tests.
+HWTEST_TAST_INFORMATIONAL_SUITE = 'bvt-tast-informational'
+HWTEST_AFDO_SUITE = 'AFDO_record'
+HWTEST_JETSTREAM_COMMIT_SUITE = 'jetstream_cq'
+HWTEST_MOBLAB_SUITE = 'moblab'
+HWTEST_MOBLAB_QUICK_SUITE = 'moblab_quick'
+HWTEST_SANITY_SUITE = 'sanity'
+HWTEST_TOOLCHAIN_SUITE = 'toolchain-tests'
+# Non-blocking informational hardware tests for Chrome, run throughout the
+# day on tip-of-trunk Chrome rather than on the daily Chrome branch.
+HWTEST_CHROME_INFORMATIONAL = 'chrome-informational'
+
+# Additional timeout to wait for autotest to abort a suite if the test takes
+# too long to run. This is meant to be overly conservative as a timeout may
+# indicate that autotest is at capacity.
+HWTEST_TIMEOUT_EXTENSION = 10 * 60
+
+HWTEST_WEEKLY_PRIORITY = 'Weekly'
+HWTEST_CTS_PRIORITY = 'CTS'
+HWTEST_GTS_PRIORITY = HWTEST_CTS_PRIORITY
+HWTEST_DAILY_PRIORITY = 'Daily'
+HWTEST_DEFAULT_PRIORITY = 'DEFAULT'
+HWTEST_CQ_PRIORITY = 'CQ'
+HWTEST_BUILD_PRIORITY = 'Build'
+HWTEST_PFQ_PRIORITY = 'PFQ'
+HWTEST_POST_BUILD_PRIORITY = 'PostBuild'
+
+# Ordered by priority (first item being lowest).
+HWTEST_VALID_PRIORITIES = [HWTEST_WEEKLY_PRIORITY,
+                           HWTEST_CTS_PRIORITY,
+                           HWTEST_DAILY_PRIORITY,
+                           HWTEST_POST_BUILD_PRIORITY,
+                           HWTEST_DEFAULT_PRIORITY,
+                           HWTEST_BUILD_PRIORITY,
+                           HWTEST_PFQ_PRIORITY,
+                           HWTEST_CQ_PRIORITY]
+
+# Creates a mapping of priorities to make easy comparsions.
+# Use the same priorities mapping as autotest/client/common_lib/priorities.py
+HWTEST_PRIORITIES_MAP = {
+    HWTEST_WEEKLY_PRIORITY: 10,
+    HWTEST_CTS_PRIORITY: 11,
+    HWTEST_DAILY_PRIORITY: 20,
+    HWTEST_POST_BUILD_PRIORITY: 30,
+    HWTEST_DEFAULT_PRIORITY: 40,
+    HWTEST_BUILD_PRIORITY: 50,
+    HWTEST_PFQ_PRIORITY: 60,
+    HWTEST_CQ_PRIORITY: 70}
+
+# Creates a mapping of priorities for skylab hwtest tasks. In swarming,
+# lower number means high priorities. Priority lower than 48 will be special
+# tasks. The upper bound of priority is 255.
+# Use the same priorities mapping as autotest/venv/skylab_suite/swarming_lib.py
+SKYLAB_HWTEST_PRIORITIES_MAP = {
+    HWTEST_WEEKLY_PRIORITY: 230,
+    HWTEST_CTS_PRIORITY: 215,
+    HWTEST_DAILY_PRIORITY: 200,
+    HWTEST_POST_BUILD_PRIORITY: 170,
+    HWTEST_DEFAULT_PRIORITY: 140,
+    HWTEST_BUILD_PRIORITY: 110,
+    HWTEST_PFQ_PRIORITY: 80,
+    HWTEST_CQ_PRIORITY: 50,
+}
+
+# The environment for executing tests.
+ENV_SKYLAB = 'skylab'
+ENV_AUTOTEST = 'autotest'
+
+# The cipd package for skylab tool
+CIPD_SKYLAB_PACKAGE = 'chromiumos/infra/skylab/linux-amd64'
+# crbug.com/1108489: The skylab tool CIPD package is pinned to a specific
+# version to avoid uncontrolled tool release and so that the tool is effectively
+# branched with cbuildbot.
+CIPD_SKYLAB_INSTANCE_ID = 'LU2Xmdk1oXyZPuiEfzDQhUWFMXY3jYQNPOzHRkRkZBEC'
+
+# HWTest result statuses
+HWTEST_STATUS_PASS = 'pass'
+HWTEST_STATUS_FAIL = 'fail'
+HWTEST_STATUS_ABORT = 'abort'
+HWTEST_STATUS_OTHER = 'other'
+HWTEST_STATUES_NOT_PASSED = frozenset([HWTEST_STATUS_FAIL,
+                                       HWTEST_STATUS_ABORT,
+                                       HWTEST_STATUS_OTHER])
+
+# Build messages
+MESSAGE_TYPE_IGNORED_REASON = 'ignored_reason'
+MESSAGE_TYPE_ANNOTATIONS_FINALIZED = 'annotations_finalized'
+# MESSSGE_TYPE_IGNORED_REASON messages store the affected build as
+# the CIDB column message_value.
+MESSAGE_SUBTYPE_SELF_DESTRUCTION = 'self_destruction'
+
+# Define HWTEST job_keyvals
+JOB_KEYVAL_DATASTORE_PARENT_KEY = 'datastore_parent_key'
+JOB_KEYVAL_CIDB_BUILD_ID = 'cidb_build_id'
+JOB_KEYVAL_CIDB_BUILD_STAGE_ID = 'cidb_build_stage_id'
+JOB_KEYVAL_BUILD_CONFIG = 'build_config'
+JOB_KEYVAL_MASTER_BUILD_CONFIG = 'master_build_config'
+JOB_KEYVAL_BRANCH = 'branch'
+
+
+# How many total test retries should be done for a suite.
+VM_TEST_MAX_RETRIES = 5
+# Defines VM Test types.
+SIMPLE_AU_TEST_TYPE = 'pfq_suite'
+VM_SUITE_TEST_TYPE = 'vm_suite'
+GCE_SUITE_TEST_TYPE = 'gce_suite'
+CROS_VM_TEST_TYPE = 'cros_vm_test'
+DEV_MODE_TEST_TYPE = 'dev_mode_test'
+VALID_VM_TEST_TYPES = [
+    SIMPLE_AU_TEST_TYPE,
+    VM_SUITE_TEST_TYPE,
+    GCE_SUITE_TEST_TYPE,
+    CROS_VM_TEST_TYPE,
+    DEV_MODE_TEST_TYPE
+]
+VALID_GCE_TEST_SUITES = ['gce-smoke', 'gce-sanity']
+# MoblabVM tests are suites of tests used to validate a moblab image via
+# VMTests.
+MOBLAB_VM_SMOKE_TEST_TYPE = 'moblab_smoke_test'
+
+CHROMIUMOS_OVERLAY_DIR = 'src/third_party/chromiumos-overlay'
+PORTAGE_STABLE_OVERLAY_DIR = 'src/third_party/portage-stable'
+ECLASS_OVERLAY_DIR = 'src/third_party/eclass-overlay'
+CHROMEOS_PARTNER_OVERLAY_DIR = 'src/private-overlays/chromeos-partner-overlay/'
+PUBLIC_BINHOST_CONF_DIR = os.path.join(CHROMIUMOS_OVERLAY_DIR,
+                                       'chromeos/binhost')
+PRIVATE_BINHOST_CONF_DIR = os.path.join(CHROMEOS_PARTNER_OVERLAY_DIR,
+                                        'chromeos/binhost')
+
+VERSION_FILE = os.path.join(CHROMIUMOS_OVERLAY_DIR,
+                            'chromeos/config/chromeos_version.sh')
+SDK_VERSION_FILE = os.path.join(PUBLIC_BINHOST_CONF_DIR,
+                                'host/sdk_version.conf')
+SDK_GS_BUCKET = 'chromiumos-sdk'
+
+PUBLIC = 'public'
+PRIVATE = 'private'
+
+BOTH_OVERLAYS = 'both'
+PUBLIC_OVERLAYS = PUBLIC
+PRIVATE_OVERLAYS = PRIVATE
+VALID_OVERLAYS = [BOTH_OVERLAYS, PUBLIC_OVERLAYS, PRIVATE_OVERLAYS, None]
+
+# Common default logging settings for use with the logging module.
+LOGGER_FMT = '%(asctime)s: %(levelname)s: %(message)s'
+LOGGER_DATE_FMT = '%H:%M:%S'
+
+# Used by remote patch serialization/deserialzation.
+INTERNAL_PATCH_TAG = 'i'
+EXTERNAL_PATCH_TAG = 'e'
+PATCH_TAGS = (INTERNAL_PATCH_TAG, EXTERNAL_PATCH_TAG)
+
+GERRIT_ON_BORG_LABELS = {
+    'Code-Review': 'CRVW',
+    'Commit-Queue': 'COMR',
+    'Verified': 'VRIF',
+}
+
+# Environment variables that should be exposed to all children processes
+# invoked via cros_build_lib.run.
+ENV_PASSTHRU = ('CROS_SUDO_KEEP_ALIVE', SHARED_CACHE_ENVVAR,
+                PARALLEL_EMERGE_STATUS_FILE_ENVVAR)
+
+# List of variables to proxy into the chroot from the host, and to
+# have sudo export if existent. Anytime this list is modified, a new
+# chroot_version_hooks.d upgrade script that symlinks to 153_rewrite_sudoers.d
+# should be created.
+CHROOT_ENVIRONMENT_WHITELIST = (
+    'CHROMEOS_OFFICIAL',
+    'CHROMEOS_VERSION_AUSERVER',
+    'CHROMEOS_VERSION_DEVSERVER',
+    'CHROMEOS_VERSION_TRACK',
+    'GCC_GITHASH',
+    'GIT_AUTHOR_EMAIL',
+    'GIT_AUTHOR_NAME',
+    'GIT_COMMITTER_EMAIL',
+    'GIT_COMMITTER_NAME',
+    'GIT_PROXY_COMMAND',
+    'GIT_SSH',
+    'RSYNC_PROXY',
+    'SSH_AGENT_PID',
+    'SSH_AUTH_SOCK',
+    'TMUX',
+    'USE',
+    'all_proxy',
+    'ftp_proxy',
+    'http_proxy',
+    'https_proxy',
+    'no_proxy',
+)
+
+# Paths for Chrome LKGM which are relative to the Chromium base url.
+CHROME_LKGM_FILE = 'CHROMEOS_LKGM'
+PATH_TO_CHROME_LKGM = 'chromeos/%s' % CHROME_LKGM_FILE
+# Path for the Chrome LKGM's closest OWNERS file.
+PATH_TO_CHROME_CHROMEOS_OWNERS = 'chromeos/OWNERS'
+
+# Cache constants.
+COMMON_CACHE = 'common'
+
+def _SlashToUnderscore(string):
+    """Artifact constants."""
+    return string.replace('/', '_')
+
+
+def ImageBinToGceTar(image_bin):
+    """GCE tar ball constants."""
+    assert image_bin.endswith('.bin'), (
+            'Filename %s does not end with ".bin"' % image_bin)
+    return '%s_gce.tar.gz' % os.path.splitext(image_bin)[0]
+
+
+RELEASE_BUCKET = 'gs://chromeos-releases'
+TRASH_BUCKET = 'gs://chromeos-throw-away-bucket'
+CHROME_SYSROOT_TAR = 'sysroot_%s.tar.xz' % _SlashToUnderscore(CHROME_CP)
+CHROME_ENV_TAR = 'environment_%s.tar.xz' % _SlashToUnderscore(CHROME_CP)
+CHROME_ENV_FILE = 'environment'
+BASE_IMAGE_NAME = 'chromiumos_base_image'
+BASE_IMAGE_TAR = '%s.tar.xz' % BASE_IMAGE_NAME
+BASE_IMAGE_BIN = '%s.bin' % BASE_IMAGE_NAME
+BASE_IMAGE_GCE_TAR = ImageBinToGceTar(BASE_IMAGE_BIN)
+IMAGE_SCRIPTS_NAME = 'image_scripts'
+IMAGE_SCRIPTS_TAR = '%s.tar.xz' % IMAGE_SCRIPTS_NAME
+TARGET_SYSROOT_TAR = 'sysroot_%s.tar.xz' % _SlashToUnderscore(TARGET_OS_PKG)
+VM_IMAGE_NAME = 'chromiumos_qemu_image'
+VM_IMAGE_BIN = '%s.bin' % VM_IMAGE_NAME
+VM_IMAGE_TAR = '%s.tar.xz' % VM_IMAGE_NAME
+VM_DISK_PREFIX = 'chromiumos_qemu_disk.bin'
+VM_MEM_PREFIX = 'chromiumos_qemu_mem.bin'
+VM_NUM_RETRIES = 0
+# Disabling Tast VM retries because of https://crbug.com/1098346.
+TAST_VM_NUM_RETRIES = 0
+TAST_VM_TEST_RESULTS = 'tast_vm_test_results_%(attempt)s'
+BASE_GUEST_VM_DIR = 'guest-vm-base'
+TEST_GUEST_VM_DIR = 'guest-vm-test'
+BASE_GUEST_VM_TAR = '%s.tar.xz' % BASE_GUEST_VM_DIR
+TEST_GUEST_VM_TAR = '%s.tar.xz' % TEST_GUEST_VM_DIR
+
+TEST_IMAGE_NAME = 'chromiumos_test_image'
+TEST_IMAGE_TAR = '%s.tar.xz' % TEST_IMAGE_NAME
+TEST_IMAGE_BIN = '%s.bin' % TEST_IMAGE_NAME
+TEST_IMAGE_GCE_TAR = ImageBinToGceTar(TEST_IMAGE_BIN)
+TEST_KEY_PRIVATE = 'id_rsa'
+TEST_KEY_PUBLIC = 'id_rsa.pub'
+
+DEV_IMAGE_NAME = 'chromiumos_image'
+DEV_IMAGE_BIN = '%s.bin' % DEV_IMAGE_NAME
+
+RECOVERY_IMAGE_NAME = 'recovery_image'
+RECOVERY_IMAGE_BIN = '%s.bin' % RECOVERY_IMAGE_NAME
+RECOVERY_IMAGE_TAR = '%s.tar.xz' % RECOVERY_IMAGE_NAME
+
+# Image type constants.
+IMAGE_TYPE_BASE = 'base'
+IMAGE_TYPE_DEV = 'dev'
+IMAGE_TYPE_TEST = 'test'
+IMAGE_TYPE_RECOVERY = 'recovery'
+IMAGE_TYPE_FACTORY = 'factory'
+IMAGE_TYPE_FIRMWARE = 'firmware'
+# USB PD accessory microcontroller firmware (e.g. power brick, display dongle).
+IMAGE_TYPE_ACCESSORY_USBPD = 'accessory_usbpd'
+# Standalone accessory microcontroller firmware (e.g. wireless keyboard).
+IMAGE_TYPE_ACCESSORY_RWSIG = 'accessory_rwsig'
+# Cr50 Firmware.
+IMAGE_TYPE_CR50_FIRMWARE = 'cr50_firmware'
+
+IMAGE_TYPE_TO_NAME = {
+    IMAGE_TYPE_BASE: BASE_IMAGE_BIN,
+    IMAGE_TYPE_DEV: DEV_IMAGE_BIN,
+    IMAGE_TYPE_RECOVERY: RECOVERY_IMAGE_BIN,
+    IMAGE_TYPE_TEST: TEST_IMAGE_BIN,
+}
+IMAGE_NAME_TO_TYPE = dict((v, k) for k, v in IMAGE_TYPE_TO_NAME.items())
+
+METADATA_JSON = 'metadata.json'
+PARTIAL_METADATA_JSON = 'partial-metadata.json'
+METADATA_TAGS = 'tags'
+DELTA_SYSROOT_TAR = 'delta_sysroot.tar.xz'
+DELTA_SYSROOT_BATCH = 'batch'
+
+FIRMWARE_ARCHIVE_NAME = 'firmware_from_source.tar.bz2'
+
+# Global configuration constants.
+CHROMITE_CONFIG_DIR = os.path.expanduser('~/.chromite')
+CHROME_SDK_BASHRC = os.path.join(CHROMITE_CONFIG_DIR, 'chrome_sdk.bashrc')
+SYNC_RETRIES = 4
+SLEEP_TIMEOUT = 30
+
+# Lab status url.
+LAB_STATUS_URL = 'http://chromiumos-lab.appspot.com/current?format=json'
+
+GOLO_SMTP_SERVER = 'mail.golo.chromium.org'
+
+CHROME_GARDENER = 'chrome'
+# Email alias to add as reviewer in Gerrit, which GWSQ will then automatically
+# assign to the current gardener.
+CHROME_GARDENER_REVIEW_EMAIL = 'chrome-os-gardeners@google.com'
+
+# Useful config targets.
+CANARY_MASTER = 'master-release'
+PFQ_MASTER = 'master-chromium-pfq'
+VMMST_ANDROID_PFQ_MASTER = 'master-vmmst-android-pfq'
+PI_ANDROID_PFQ_MASTER = 'master-pi-android-pfq'
+VMRVC_ANDROID_PFQ_MASTER = 'master-vmrvc-android-pfq'
+TOOLCHAIN_MASTTER = 'master-toolchain'
+
+
+# Email validation regex. Not quite fully compliant with RFC 2822, but good
+# approximation.
+EMAIL_REGEX = r'[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,4}'
+
+# Blacklist of files not allowed to be uploaded into the Partner Project Google
+# Storage Buckets:
+# debug.tgz contains debug symbols.
+# manifest.xml exposes all of our repo names.
+# vm_test_results can contain symbolicated crash dumps.
+EXTRA_BUCKETS_FILES_BLACKLIST = [
+    'debug.tgz',
+    'manifest.xml',
+    'vm_test_results_*'
+]
+
+# AFDO common constants.
+# How long does the AFDO_record autotest have to generate the AFDO perf data.
+AFDO_GENERATE_TIMEOUT = 120 * 60
+
+# Gmail Credentials.
+GMAIL_TOKEN_CACHE_FILE = os.path.expanduser('~/.gmail_credentials')
+GMAIL_TOKEN_JSON_FILE = '/creds/refresh_tokens/chromeos_gmail_alerts'
+
+# Maximum number of boards per release group builder. This should be
+# chosen/adjusted based on expected release build times such that successive
+# builds don't overlap and create a backlog.
+MAX_RELEASE_GROUP_BOARDS = 4
+
+CHROMEOS_SERVICE_ACCOUNT = os.path.join('/', 'creds', 'service_accounts',
+                                        'service-account-chromeos.json')
+
+# Buildbucket buckets
+CHROMEOS_RELEASE_BUILDBUCKET_BUCKET = 'master.chromeos_release'
+CHROMEOS_BUILDBUCKET_BUCKET = 'master.chromeos'
+INTERNAL_SWARMING_BUILDBUCKET_BUCKET = 'luci.chromeos.general'
+
+ACTIVE_BUCKETS = [
+    CHROMEOS_RELEASE_BUILDBUCKET_BUCKET,
+    CHROMEOS_BUILDBUCKET_BUCKET,
+    INTERNAL_SWARMING_BUILDBUCKET_BUCKET,
+]
+
+# Build retry limit on buildbucket
+#
+# 2020-05-13 by engeg@: This is rarely effective, causes confusion,
+# higher bot utilization, and if the initial try was past uploading artifacts
+# then the retry is destined to fail with a difficult to parse error.
+# 2020-05-19 by seanabraham@: Leave this at zero. These retries can break
+# Chrome-wide profiling. http://b/156994019
+BUILDBUCKET_BUILD_RETRY_LIMIT = 0  # Do not change. Read the above.
+
+# TODO(nxia): consolidate all run.metadata key constants,
+# add a unit test to avoid duplicated keys in run_metadata
+
+# Builder_run metadata keys
+METADATA_SCHEDULED_IMPORTANT_SLAVES = 'scheduled_important_slaves'
+METADATA_SCHEDULED_EXPERIMENTAL_SLAVES = 'scheduled_experimental_slaves'
+METADATA_UNSCHEDULED_SLAVES = 'unscheduled_slaves'
+# List of builders marked as experimental through the tree status, not all the
+# experimental builders for a run.
+METADATA_EXPERIMENTAL_BUILDERS = 'experimental_builders'
+
+# Metadata key to indicate whether a build is self-destructed.
+SELF_DESTRUCTED_BUILD = 'self_destructed_build'
+
+# Metadata key to indicate whether a build is self-destructed with success.
+SELF_DESTRUCTED_WITH_SUCCESS_BUILD = 'self_destructed_with_success_build'
+
+# Chroot snapshot names
+CHROOT_SNAPSHOT_CLEAN = 'clean-chroot'
+
+# Partition labels.
+PART_STATE = 'STATE'
+PART_ROOT_A = 'ROOT-A'
+PART_ROOT_B = 'ROOT-B'
+PART_KERN_A = 'KERN-A'
+PART_KERN_B = 'KERN-B'
+
+# Quick provision payloads. These file names should never be changed, otherwise
+# very bad things can happen :). The reason is we have already uploaded these
+# files with these names for all boards. So if the name changes, all scripts
+# that have been using this need to handle both cases to be backward compatible.
+QUICK_PROVISION_PAYLOAD_KERNEL = 'full_dev_part_KERN.bin.gz'
+QUICK_PROVISION_PAYLOAD_ROOTFS = 'full_dev_part_ROOT.bin.gz'
+
+# Mock build and stage IDs.
+MOCK_STAGE_ID = 313377
+MOCK_BUILD_ID = 31337
+
+# Topology dictionary copied from CIDB.
+TOPOLOGY_DICT = {
+    '/buildbucket/host':
+        'cr-buildbucket.appspot.com',
+    '/chrome_swarming_proxy/host':
+        'chromeos-swarming.appspot.com',
+    '/datastore/creds_file': ('/creds/service_accounts/service-account-chromeos'
+                              '-datastore-writer-prod.json'),
+    '/sheriffomatic/host':
+        'sheriff-o-matic.appspot.com',
+    '/statsd/es_host':
+        '104.154.79.237',
+    '/statsd/host':
+        '104.154.79.237',
+}
+
+# Percentage of child builders that need to complete to update LKGM
+LKGM_THRESHOLD = 80
diff --git a/utils/frozen_chromite/lib/cros_build_lib.py b/utils/frozen_chromite/lib/cros_build_lib.py
new file mode 100644
index 0000000..72c0a42
--- /dev/null
+++ b/utils/frozen_chromite/lib/cros_build_lib.py
@@ -0,0 +1,1827 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Common python commands used by various build scripts."""
+
+from __future__ import print_function
+
+import base64
+import contextlib
+from datetime import datetime
+import email.utils
+import errno
+import functools
+import getpass
+import inspect
+import operator
+import os
+import re
+import signal
+import socket
+import subprocess
+import sys
+import tempfile
+import time
+
+import six
+
+from autotest_lib.utils.frozen_chromite.lib import constants
+from autotest_lib.utils.frozen_chromite.lib import cros_collections
+from autotest_lib.utils.frozen_chromite.lib import cros_logging as logging
+from autotest_lib.utils.frozen_chromite.lib import signals
+
+
+STRICT_SUDO = False
+
+# For use by ShellQuote.  Match all characters that the shell might treat
+# specially.  This means a number of things:
+#  - Reserved characters.
+#  - Characters used in expansions (brace, variable, path, globs, etc...).
+#  - Characters that an interactive shell might use (like !).
+#  - Whitespace so that one arg turns into multiple.
+# See the bash man page as well as the POSIX shell documentation for more info:
+#   http://www.gnu.org/software/bash/manual/bashref.html
+#   http://pubs.opengroup.org/onlinepubs/9699919799/utilities/V3_chap02.html
+_SHELL_QUOTABLE_CHARS = frozenset('[|&;()<> \t!{}[]=*?~$"\'\\#^')
+# The chars that, when used inside of double quotes, need escaping.
+# Order here matters as we need to escape backslashes first.
+_SHELL_ESCAPE_CHARS = r'\"`$'
+
+# The number of files is larger than this, we will use -T option
+# and files to be added may not show up to the command line.
+_THRESHOLD_TO_USE_T_FOR_TAR = 50
+
+
+def ShellQuote(s):
+  """Quote |s| in a way that is safe for use in a shell.
+
+  We aim to be safe, but also to produce "nice" output.  That means we don't
+  use quotes when we don't need to, and we prefer to use less quotes (like
+  putting it all in single quotes) than more (using double quotes and escaping
+  a bunch of stuff, or mixing the quotes).
+
+  While python does provide a number of alternatives like:
+   - pipes.quote
+   - shlex.quote
+  They suffer from various problems like:
+   - Not widely available in different python versions.
+   - Do not produce pretty output in many cases.
+   - Are in modules that rarely otherwise get used.
+
+  Note: We don't handle reserved shell words like "for" or "case".  This is
+  because those only matter when they're the first element in a command, and
+  there is no use case for that.  When we want to run commands, we tend to
+  run real programs and not shell ones.
+
+  Args:
+    s: The string to quote.
+
+  Returns:
+    A safely (possibly quoted) string.
+  """
+  if sys.version_info.major < 3:
+    # This is a bit of a hack.  Python 2 will display strings with u prefixes
+    # when logging which makes things harder to work with.  Writing bytes to
+    # stdout will be interpreted as UTF-8 content implicitly.
+    if isinstance(s, six.string_types):
+      try:
+        s = s.encode('utf-8')
+      except UnicodeDecodeError:
+        # We tried our best.  Let Python's automatic mixed encoding kick in.
+        pass
+    else:
+      return repr(s)
+  else:
+    # If callers pass down bad types, don't blow up.
+    if isinstance(s, six.binary_type):
+      s = s.decode('utf-8', 'backslashreplace')
+    elif not isinstance(s, six.string_types):
+      return repr(s)
+
+  # See if no quoting is needed so we can return the string as-is.
+  for c in s:
+    if c in _SHELL_QUOTABLE_CHARS:
+      break
+  else:
+    if not s:
+      return "''"
+    else:
+      return s
+
+  # See if we can use single quotes first.  Output is nicer.
+  if "'" not in s:
+    return "'%s'" % s
+
+  # Have to use double quotes.  Escape the few chars that still expand when
+  # used inside of double quotes.
+  for c in _SHELL_ESCAPE_CHARS:
+    if c in s:
+      s = s.replace(c, r'\%s' % c)
+  return '"%s"' % s
+
+
+def TruncateStringToLine(s, maxlen=80):
+  """Truncate |s| to a maximum length of |maxlen| including elipsis (...)
+
+  Args:
+    s: A string.
+    maxlen: Maximum length of desired returned string. Must be at least 3.
+
+  Returns:
+    s if len(s) <= maxlen already and s has no newline in it.
+    Otherwise, a single line truncation that ends with '...' and is of
+    length |maxlen|.
+  """
+  assert maxlen >= 3
+  line = s.splitlines()[0]
+  if len(line) <= maxlen:
+    return line
+  else:
+    return line[:maxlen-3] + '...'
+
+
+def ShellUnquote(s):
+  """Do the opposite of ShellQuote.
+
+  This function assumes that the input is a valid escaped string. The behaviour
+  is undefined on malformed strings.
+
+  Args:
+    s: An escaped string.
+
+  Returns:
+    The unescaped version of the string.
+  """
+  if not s:
+    return ''
+
+  if s[0] == "'":
+    return s[1:-1]
+
+  if s[0] != '"':
+    return s
+
+  s = s[1:-1]
+  output = ''
+  i = 0
+  while i < len(s) - 1:
+    # Skip the backslash when it makes sense.
+    if s[i] == '\\' and s[i + 1] in _SHELL_ESCAPE_CHARS:
+      i += 1
+    output += s[i]
+    i += 1
+  return output + s[i] if i < len(s) else output
+
+
+def CmdToStr(cmd):
+  """Translate a command list into a space-separated string.
+
+  The resulting string should be suitable for logging messages and for
+  pasting into a terminal to run.  Command arguments are surrounded by
+  quotes to keep them grouped, even if an argument has spaces in it.
+
+  Examples:
+    ['a', 'b'] ==> "'a' 'b'"
+    ['a b', 'c'] ==> "'a b' 'c'"
+    ['a', 'b\'c'] ==> '\'a\' "b\'c"'
+    [u'a', "/'$b"] ==> '\'a\' "/\'$b"'
+    [] ==> ''
+    See unittest for additional (tested) examples.
+
+  Args:
+    cmd: List of command arguments.
+
+  Returns:
+    String representing full command.
+  """
+  # If callers pass down bad types, triage it a bit.
+  if isinstance(cmd, (list, tuple)):
+    return ' '.join(ShellQuote(arg) for arg in cmd)
+  else:
+    raise ValueError('cmd must be list or tuple, not %s: %r' %
+                     (type(cmd), repr(cmd)))
+
+
+class CompletedProcess(getattr(subprocess, 'CompletedProcess', object)):
+  """An object to store various attributes of a child process.
+
+  This is akin to subprocess.CompletedProcess.
+  """
+
+  # The linter is confused by the getattr usage above.
+  # TODO(vapier): Drop this once we're Python 3-only and we drop getattr.
+  # pylint: disable=bad-option-value,super-on-old-class
+  def __init__(self, args=None, returncode=None, stdout=None, stderr=None):
+    if sys.version_info.major < 3:
+      self.args = args
+      self.stdout = stdout
+      self.stderr = stderr
+      self.returncode = returncode
+    else:
+      super(CompletedProcess, self).__init__(
+          args=args, returncode=returncode, stdout=stdout, stderr=stderr)
+
+  @property
+  def cmd(self):
+    """Alias to self.args to better match other subprocess APIs."""
+    return self.args
+
+  @property
+  def cmdstr(self):
+    """Return self.cmd as a well shell-quoted string useful for log messages."""
+    if self.args is None:
+      return ''
+    else:
+      return CmdToStr(self.args)
+
+  def check_returncode(self):
+    """Raise CalledProcessError if the exit code is non-zero."""
+    if self.returncode:
+      raise CalledProcessError(
+          returncode=self.returncode, cmd=self.args, stdout=self.stdout,
+          stderr=self.stderr, msg='check_returncode failed')
+
+
+# TODO(crbug.com/1006587): Migrate users to CompletedProcess and drop this.
+class CommandResult(CompletedProcess):
+  """An object to store various attributes of a child process.
+
+  This is akin to subprocess.CompletedProcess.
+  """
+
+  # The linter is confused by the getattr usage above.
+  # TODO(vapier): Drop this once we're Python 3-only and we drop getattr.
+  # pylint: disable=bad-option-value,super-on-old-class
+  def __init__(self, cmd=None, error=None, output=None, returncode=None,
+               args=None, stdout=None, stderr=None):
+    if args is None:
+      args = cmd
+    elif cmd is not None:
+      raise TypeError('Only specify |args|, not |cmd|')
+    if stdout is None:
+      stdout = output
+    elif output is not None:
+      raise TypeError('Only specify |stdout|, not |output|')
+    if stderr is None:
+      stderr = error
+    elif error is not None:
+      raise TypeError('Only specify |stderr|, not |error|')
+
+    super(CommandResult, self).__init__(args=args, stdout=stdout, stderr=stderr,
+                                        returncode=returncode)
+
+  @property
+  def output(self):
+    """Backwards compat API."""
+    return self.stdout
+
+  @property
+  def error(self):
+    """Backwards compat API."""
+    return self.stderr
+
+
+class CalledProcessError(subprocess.CalledProcessError):
+  """Error caught in run() function.
+
+  This is akin to subprocess.CalledProcessError.  We do not support |output|,
+  only |stdout|.
+
+  Attributes:
+    returncode: The exit code of the process.
+    cmd: The command that triggered this exception.
+    msg: Short explanation of the error.
+    exception: The underlying Exception if available.
+  """
+
+  def __init__(self, returncode, cmd, stdout=None, stderr=None, msg=None,
+               exception=None):
+    if exception is not None and not isinstance(exception, Exception):
+      raise TypeError('exception must be an exception instance; got %r'
+                      % (exception,))
+
+    super(CalledProcessError, self).__init__(returncode, cmd, stdout)
+    # The parent class will set |output|, so delete it.
+    del self.output
+    # TODO(vapier): When we're Python 3-only, delete this assignment as the
+    # parent handles it for us.
+    self.stdout = stdout
+    # TODO(vapier): When we're Python 3-only, move stderr to the init above.
+    self.stderr = stderr
+    self.msg = msg
+    self.exception = exception
+
+  @property
+  def cmdstr(self):
+    """Return self.cmd as a well shell-quoted string useful for log messages."""
+    if self.cmd is None:
+      return ''
+    else:
+      return CmdToStr(self.cmd)
+
+  def Stringify(self, stdout=True, stderr=True):
+    """Custom method for controlling what is included in stringifying this.
+
+    Args:
+      stdout: Whether to include captured stdout in the return value.
+      stderr: Whether to include captured stderr in the return value.
+
+    Returns:
+      A summary string for this result.
+    """
+    items = [
+        u'return code: %s; command: %s' % (
+            self.returncode, self.cmdstr),
+    ]
+    if stderr and self.stderr:
+      stderr = self.stderr
+      if isinstance(stderr, six.binary_type):
+        stderr = stderr.decode('utf-8', 'replace')
+      items.append(stderr)
+    if stdout and self.stdout:
+      stdout = self.stdout
+      if isinstance(stdout, six.binary_type):
+        stdout = stdout.decode('utf-8', 'replace')
+      items.append(stdout)
+    if self.msg:
+      msg = self.msg
+      if isinstance(msg, six.binary_type):
+        msg = msg.decode('utf-8', 'replace')
+      items.append(msg)
+    return u'\n'.join(items)
+
+  def __str__(self):
+    if sys.version_info.major < 3:
+      # __str__ needs to return ascii, thus force a conversion to be safe.
+      return self.Stringify().encode('ascii', 'xmlcharrefreplace')
+    else:
+      return self.Stringify()
+
+  def __eq__(self, other):
+    return (isinstance(other, type(self)) and
+            self.returncode == other.returncode and
+            self.cmd == other.cmd and
+            self.stdout == other.stdout and
+            self.stderr == other.stderr and
+            self.msg == other.msg and
+            self.exception == other.exception)
+
+  def __ne__(self, other):
+    return not self.__eq__(other)
+
+
+# TODO(crbug.com/1006587): Migrate users to CompletedProcess and drop this.
+class RunCommandError(CalledProcessError):
+  """Error caught in run() method.
+
+  Attributes:
+    args: Tuple of the attributes below.
+    msg: Short explanation of the error.
+    result: The CommandResult that triggered this error, if available.
+    exception: The underlying Exception if available.
+  """
+
+  def __init__(self, msg, result=None, exception=None):
+    # This makes mocking tests easier.
+    if result is None:
+      result = CommandResult()
+    elif not isinstance(result, CommandResult):
+      raise TypeError('result must be a CommandResult instance; got %r'
+                      % (result,))
+
+    self.args = (msg, result, exception)
+    self.result = result
+    super(RunCommandError, self).__init__(
+        returncode=result.returncode, cmd=result.args, stdout=result.stdout,
+        stderr=result.stderr, msg=msg, exception=exception)
+
+
+class TerminateRunCommandError(RunCommandError):
+  """We were signaled to shutdown while running a command.
+
+  Client code shouldn't generally know, nor care about this class.  It's
+  used internally to suppress retry attempts when we're signaled to die.
+  """
+
+
+def sudo_run(cmd, user='root', preserve_env=False, **kwargs):
+  """Run a command via sudo.
+
+  Client code must use this rather than coming up with their own run
+  invocation that jams sudo in- this function is used to enforce certain
+  rules in our code about sudo usage, and as a potential auditing point.
+
+  Args:
+    cmd: The command to run.  See run for rules of this argument: sudo_run
+         purely prefixes it with sudo.
+    user: The user to run the command as.
+    preserve_env (bool): Whether to preserve the environment.
+    kwargs: See run() options, it's a direct pass thru to it.
+          Note that this supports a 'strict' keyword that defaults to True.
+          If set to False, it'll suppress strict sudo behavior.
+
+  Returns:
+    See run documentation.
+
+  Raises:
+    This function may immediately raise RunCommandError if we're operating
+    in a strict sudo context and the API is being misused.
+    Barring that, see run's documentation: it can raise the same things run
+    does.
+  """
+  sudo_cmd = ['sudo']
+
+  strict = kwargs.pop('strict', True)
+
+  if user == 'root' and os.geteuid() == 0:
+    return run(cmd, **kwargs)
+
+  if strict and STRICT_SUDO:
+    if 'CROS_SUDO_KEEP_ALIVE' not in os.environ:
+      raise RunCommandError(
+          'We were invoked in a strict sudo non - interactive context, but no '
+          'sudo keep alive daemon is running.  This is a bug in the code.',
+          CommandResult(args=cmd, returncode=126))
+    sudo_cmd += ['-n']
+
+  if user != 'root':
+    sudo_cmd += ['-u', user]
+
+  if preserve_env:
+    sudo_cmd += ['--preserve-env']
+
+  # Pass these values down into the sudo environment, since sudo will
+  # just strip them normally.
+  extra_env = kwargs.pop('extra_env', None)
+  extra_env = {} if extra_env is None else extra_env.copy()
+
+  for var in constants.ENV_PASSTHRU:
+    if var not in extra_env and var in os.environ:
+      extra_env[var] = os.environ[var]
+
+  sudo_cmd.extend('%s=%s' % (k, v) for k, v in extra_env.items())
+
+  # Finally, block people from passing options to sudo.
+  sudo_cmd.append('--')
+
+  if isinstance(cmd, six.string_types):
+    # We need to handle shell ourselves so the order is correct:
+    #  $ sudo [sudo args] -- bash -c '[shell command]'
+    # If we let run take care of it, we'd end up with:
+    #  $ bash -c 'sudo [sudo args] -- [shell command]'
+    shell = kwargs.pop('shell', False)
+    if not shell:
+      raise Exception('Cannot run a string command without a shell')
+    sudo_cmd.extend(['/bin/bash', '-c', cmd])
+  else:
+    sudo_cmd.extend(cmd)
+
+  return run(sudo_cmd, **kwargs)
+
+
+def _KillChildProcess(proc, int_timeout, kill_timeout, cmd, original_handler,
+                      signum, frame):
+  """Used as a signal handler by run.
+
+  This is internal to run.  No other code should use this.
+  """
+  if signum:
+    # If we've been invoked because of a signal, ignore delivery of that signal
+    # from this point forward.  The invoking context of _KillChildProcess
+    # restores signal delivery to what it was prior; we suppress future delivery
+    # till then since this code handles SIGINT/SIGTERM fully including
+    # delivering the signal to the original handler on the way out.
+    signal.signal(signum, signal.SIG_IGN)
+
+  # Do not trust Popen's returncode alone; we can be invoked from contexts where
+  # the Popen instance was created, but no process was generated.
+  if proc.returncode is None and proc.pid is not None:
+    try:
+      while proc.poll_lock_breaker() is None and int_timeout >= 0:
+        time.sleep(0.1)
+        int_timeout -= 0.1
+
+      proc.terminate()
+      while proc.poll_lock_breaker() is None and kill_timeout >= 0:
+        time.sleep(0.1)
+        kill_timeout -= 0.1
+
+      if proc.poll_lock_breaker() is None:
+        # Still doesn't want to die.  Too bad, so sad, time to die.
+        proc.kill()
+    except EnvironmentError as e:
+      logging.warning('Ignoring unhandled exception in _KillChildProcess: %s',
+                      e)
+
+    # Ensure our child process has been reaped.
+    kwargs = {}
+    if sys.version_info.major >= 3:
+      # ... but don't wait forever.
+      kwargs['timeout'] = 60
+    proc.wait_lock_breaker(**kwargs)
+
+  if not signals.RelaySignal(original_handler, signum, frame):
+    # Mock up our own, matching exit code for signaling.
+    cmd_result = CommandResult(args=cmd, returncode=signum << 8)
+    raise TerminateRunCommandError('Received signal %i' % signum, cmd_result)
+
+
+class _Popen(subprocess.Popen):
+  """subprocess.Popen derivative customized for our usage.
+
+  Specifically, we fix terminate/send_signal/kill to work if the child process
+  was a setuid binary; on vanilla kernels, the parent can wax the child
+  regardless, on goobuntu this apparently isn't allowed, thus we fall back
+  to the sudo machinery we have.
+
+  While we're overriding send_signal, we also suppress ESRCH being raised
+  if the process has exited, and suppress signaling all together if the process
+  has knowingly been waitpid'd already.
+  """
+
+  # Pylint seems to be buggy with the send_signal signature detection.
+  # pylint: disable=arguments-differ
+  def send_signal(self, sig):
+    if self.returncode is not None:
+      # The original implementation in Popen would allow signaling whatever
+      # process now occupies this pid, even if the Popen object had waitpid'd.
+      # Since we can escalate to sudo kill, we do not want to allow that.
+      # Fixing this addresses that angle, and makes the API less sucky in the
+      # process.
+      return
+
+    try:
+      os.kill(self.pid, sig)
+    except EnvironmentError as e:
+      if e.errno == errno.EPERM:
+        # Kill returns either 0 (signal delivered), or 1 (signal wasn't
+        # delivered).  This isn't particularly informative, but we still
+        # need that info to decide what to do, thus the check=False.
+        ret = sudo_run(['kill', '-%i' % sig, str(self.pid)],
+                       print_cmd=False, stdout=True,
+                       stderr=True, check=False)
+        if ret.returncode == 1:
+          # The kill binary doesn't distinguish between permission denied,
+          # and the pid is missing.  Denied can only occur under weird
+          # grsec/selinux policies.  We ignore that potential and just
+          # assume the pid was already dead and try to reap it.
+          self.poll()
+      elif e.errno == errno.ESRCH:
+        # Since we know the process is dead, reap it now.
+        # Normally Popen would throw this error- we suppress it since frankly
+        # that's a misfeature and we're already overriding this method.
+        self.poll()
+      else:
+        raise
+
+  def _lock_breaker(self, func, *args, **kwargs):
+    """Helper to manage the waitpid lock.
+
+    Workaround https://bugs.python.org/issue25960.
+    """
+    # If the lock doesn't exist, or is not locked, call the func directly.
+    lock = getattr(self, '_waitpid_lock', None)
+    if lock is not None and lock.locked():
+      try:
+        lock.release()
+        return func(*args, **kwargs)
+      finally:
+        if not lock.locked():
+          lock.acquire()
+    else:
+      return func(*args, **kwargs)
+
+  def poll_lock_breaker(self, *args, **kwargs):
+    """Wrapper around poll() to break locks if needed."""
+    return self._lock_breaker(self.poll, *args, **kwargs)
+
+  def wait_lock_breaker(self, *args, **kwargs):
+    """Wrapper around wait() to break locks if needed."""
+    return self._lock_breaker(self.wait, *args, **kwargs)
+
+
+# pylint: disable=redefined-builtin
+def run(cmd, print_cmd=True, stdout=None, stderr=None,
+        cwd=None, input=None, enter_chroot=False,
+        shell=False, env=None, extra_env=None, ignore_sigint=False,
+        chroot_args=None, debug_level=logging.INFO,
+        check=True, int_timeout=1, kill_timeout=1,
+        log_output=False, capture_output=False,
+        quiet=False, encoding=None, errors=None, dryrun=False,
+        **kwargs):
+  """Runs a command.
+
+  Args:
+    cmd: cmd to run.  Should be input to subprocess.Popen. If a string, shell
+      must be true. Otherwise the command must be an array of arguments, and
+      shell must be false.
+    print_cmd: prints the command before running it.
+    stdout: Where to send stdout.  This may be many things to control
+      redirection:
+        * None is the default; the existing stdout is used.
+        * An existing file object (must be opened with mode 'w' or 'wb').
+        * A string to a file (will be truncated & opened automatically).
+        * subprocess.PIPE to capture & return the output.
+        * A boolean to indicate whether to capture the output.
+          True will capture the output via a tempfile (good for large output).
+        * An open file descriptor (as a positive integer).
+    stderr: Where to send stderr.  See |stdout| for possible values.  This also
+      may be subprocess.STDOUT to indicate stderr & stdout should be combined.
+    cwd: the working directory to run this cmd.
+    input: The data to pipe into this command through stdin.  If a file object
+      or file descriptor, stdin will be connected directly to that.
+    enter_chroot: this command should be run from within the chroot.  If set,
+      cwd must point to the scripts directory. If we are already inside the
+      chroot, this command will be run as if |enter_chroot| is False.
+    shell: Controls whether we add a shell as a command interpreter.  See cmd
+      since it has to agree as to the type.
+    env: If non-None, this is the environment for the new process.  If
+      enter_chroot is true then this is the environment of the enter_chroot,
+      most of which gets removed from the cmd run.
+    extra_env: If set, this is added to the environment for the new process.
+      In enter_chroot=True case, these are specified on the post-entry
+      side, and so are often more useful.  This dictionary is not used to
+      clear any entries though.
+    ignore_sigint: If True, we'll ignore signal.SIGINT before calling the
+      child.  This is the desired behavior if we know our child will handle
+      Ctrl-C.  If we don't do this, I think we and the child will both get
+      Ctrl-C at the same time, which means we'll forcefully kill the child.
+    chroot_args: An array of arguments for the chroot environment wrapper.
+    debug_level: The debug level of run's output.
+    check: Whether to raise an exception when command returns a non-zero exit
+      code, or return the CommandResult object containing the exit code.
+      Note: will still raise an exception if the cmd file does not exist.
+    int_timeout: If we're interrupted, how long (in seconds) should we give the
+      invoked process to clean up before we send a SIGTERM.
+    kill_timeout: If we're interrupted, how long (in seconds) should we give the
+      invoked process to shutdown from a SIGTERM before we SIGKILL it.
+    log_output: Log the command and its output automatically.
+    capture_output: Set |stdout| and |stderr| to True.
+    quiet: Set |print_cmd| to False, and |capture_output| to True.
+    encoding: Encoding for stdin/stdout/stderr, otherwise bytes are used.  Most
+      users want 'utf-8' here for string data.
+    errors: How to handle errors when |encoding| is used.  Defaults to 'strict',
+      but 'ignore' and 'replace' are common settings.
+    dryrun: Only log the command,and return a stub result.
+
+  Returns:
+    A CommandResult object.
+
+  Raises:
+    RunCommandError: Raised on error.
+  """
+  # Hide this function in pytest tracebacks when a RunCommandError is raised,
+  # as seeing the contents of this function when a command fails is not helpful.
+  # https://docs.pytest.org/en/latest/example/simple.html#writing-well-integrated-assertion-helpers
+  __tracebackhide__ = operator.methodcaller('errisinstance', RunCommandError)
+
+  # Handle backwards compatible settings.
+  if 'log_stdout_to_file' in kwargs:
+    logging.warning('run: log_stdout_to_file=X is now stdout=X')
+    log_stdout_to_file = kwargs.pop('log_stdout_to_file')
+    if log_stdout_to_file is not None:
+      stdout = log_stdout_to_file
+  stdout_file_mode = 'w+b'
+  if 'append_to_file' in kwargs:
+    # TODO(vapier): Enable this warning once chromite & users migrate.
+    # logging.warning('run: append_to_file is now part of stdout')
+    if kwargs.pop('append_to_file'):
+      stdout_file_mode = 'a+b'
+  assert not kwargs, 'Unknown arguments to run: %s' % (list(kwargs),)
+
+  if quiet:
+    print_cmd = False
+    capture_output = True
+
+  if capture_output:
+    # TODO(vapier): Enable this once we migrate all the legacy arguments above.
+    # if stdout is not None or stderr is not None:
+    #   raise ValueError('capture_output may not be used with stdout & stderr')
+    # TODO(vapier): Drop this specialization once we're Python 3-only as we can
+    # pass this argument down to Popen directly.
+    if stdout is None:
+      stdout = True
+    if stderr is None:
+      stderr = True
+
+  if encoding is not None and errors is None:
+    errors = 'strict'
+
+  # Set default for variables.
+  popen_stdout = None
+  popen_stderr = None
+  stdin = None
+  cmd_result = CommandResult()
+
+  # Force the timeout to float; in the process, if it's not convertible,
+  # a self-explanatory exception will be thrown.
+  kill_timeout = float(kill_timeout)
+
+  def _get_tempfile():
+    try:
+      return UnbufferedTemporaryFile()
+    except EnvironmentError as e:
+      if e.errno != errno.ENOENT:
+        raise
+      # This can occur if we were pointed at a specific location for our
+      # TMP, but that location has since been deleted.  Suppress that issue
+      # in this particular case since our usage gurantees deletion,
+      # and since this is primarily triggered during hard cgroups shutdown.
+      return UnbufferedTemporaryFile(dir='/tmp')
+
+  # Modify defaults based on parameters.
+  # Note that tempfiles must be unbuffered else attempts to read
+  # what a separate process did to that file can result in a bad
+  # view of the file.
+  log_stdout_to_file = False
+  if isinstance(stdout, six.string_types):
+    popen_stdout = open(stdout, stdout_file_mode)
+    log_stdout_to_file = True
+  elif hasattr(stdout, 'fileno'):
+    popen_stdout = stdout
+    log_stdout_to_file = True
+  elif isinstance(stdout, bool):
+    # This check must come before isinstance(int) because bool subclasses int.
+    if stdout:
+      popen_stdout = _get_tempfile()
+  elif isinstance(stdout, int):
+    popen_stdout = stdout
+  elif log_output:
+    popen_stdout = _get_tempfile()
+
+  log_stderr_to_file = False
+  if hasattr(stderr, 'fileno'):
+    popen_stderr = stderr
+    log_stderr_to_file = True
+  elif isinstance(stderr, bool):
+    # This check must come before isinstance(int) because bool subclasses int.
+    if stderr:
+      popen_stderr = _get_tempfile()
+  elif isinstance(stderr, int):
+    popen_stderr = stderr
+  elif log_output:
+    popen_stderr = _get_tempfile()
+
+  # If subprocesses have direct access to stdout or stderr, they can bypass
+  # our buffers, so we need to flush to ensure that output is not interleaved.
+  if popen_stdout is None or popen_stderr is None:
+    sys.stdout.flush()
+    sys.stderr.flush()
+
+  # If input is a string, we'll create a pipe and send it through that.
+  # Otherwise we assume it's a file object that can be read from directly.
+  if isinstance(input, (six.string_types, six.binary_type)):
+    stdin = subprocess.PIPE
+    # Allow people to always pass in bytes or strings regardless of encoding.
+    # Our Popen usage takes care of converting everything to bytes first.
+    #
+    # Linter can't see that we're using |input| as a var, not a builtin.
+    # pylint: disable=input-builtin
+    if encoding and isinstance(input, six.text_type):
+      input = input.encode(encoding, errors)
+    elif not encoding and isinstance(input, six.text_type):
+      input = input.encode('utf-8')
+  elif input is not None:
+    stdin = input
+    input = None
+
+  # Sanity check the command.  This helps when RunCommand is deep in the call
+  # chain, but the command itself was constructed along the way.
+  if isinstance(cmd, (six.string_types, six.binary_type)):
+    if not shell:
+      raise ValueError('Cannot run a string command without a shell')
+    cmd = ['/bin/bash', '-c', cmd]
+    shell = False
+  elif shell:
+    raise ValueError('Cannot run an array command with a shell')
+  elif not cmd:
+    raise ValueError('Missing command to run')
+  elif not isinstance(cmd, (list, tuple)):
+    raise TypeError('cmd must be list or tuple, not %s: %r' %
+                    (type(cmd), repr(cmd)))
+  elif not all(isinstance(x, (six.binary_type, six.string_types)) for x in cmd):
+    raise TypeError('All command elements must be bytes/strings: %r' % (cmd,))
+
+  # If we are using enter_chroot we need to use enterchroot pass env through
+  # to the final command.
+  env = env.copy() if env is not None else os.environ.copy()
+  # Looking at localized error messages may be unexpectedly dangerous, so we
+  # set LC_MESSAGES=C to make sure the output of commands is safe to inspect.
+  env['LC_MESSAGES'] = 'C'
+  env.update(extra_env if extra_env else {})
+
+  if enter_chroot and not IsInsideChroot():
+    wrapper = ['cros_sdk']
+    if cwd:
+      # If the current working directory is set, try to find cros_sdk relative
+      # to cwd. Generally cwd will be the buildroot therefore we want to use
+      # {cwd}/chromite/bin/cros_sdk. For more info PTAL at crbug.com/432620
+      path = os.path.join(cwd, constants.CHROMITE_BIN_SUBDIR, 'cros_sdk')
+      if os.path.exists(path):
+        wrapper = [path]
+
+    if chroot_args:
+      wrapper += chroot_args
+
+    if extra_env:
+      wrapper.extend('%s=%s' % (k, v) for k, v in extra_env.items())
+
+    cmd = wrapper + ['--'] + cmd
+
+  for var in constants.ENV_PASSTHRU:
+    if var not in env and var in os.environ:
+      env[var] = os.environ[var]
+
+  # Print out the command before running.
+  if dryrun or print_cmd or log_output:
+    log = ''
+    if dryrun:
+      log += '(dryrun) '
+    log += 'run: %s' % (CmdToStr(cmd),)
+    if cwd:
+      log += ' in %s' % (cwd,)
+    logging.log(debug_level, '%s', log)
+
+  cmd_result.args = cmd
+
+  # We want to still something in dryrun mode so we process all the options
+  # and return appropriate values (e.g. output with correct encoding).
+  popen_cmd = ['true'] if dryrun else cmd
+
+  proc = None
+  # Verify that the signals modules is actually usable, and won't segfault
+  # upon invocation of getsignal.  See signals.SignalModuleUsable for the
+  # details and upstream python bug.
+  use_signals = signals.SignalModuleUsable()
+  try:
+    proc = _Popen(popen_cmd, cwd=cwd, stdin=stdin, stdout=popen_stdout,
+                  stderr=popen_stderr, shell=False, env=env,
+                  close_fds=True)
+
+    if use_signals:
+      if ignore_sigint:
+        old_sigint = signal.signal(signal.SIGINT, signal.SIG_IGN)
+      else:
+        old_sigint = signal.getsignal(signal.SIGINT)
+        signal.signal(signal.SIGINT,
+                      functools.partial(_KillChildProcess, proc, int_timeout,
+                                        kill_timeout, cmd, old_sigint))
+
+      old_sigterm = signal.getsignal(signal.SIGTERM)
+      signal.signal(signal.SIGTERM,
+                    functools.partial(_KillChildProcess, proc, int_timeout,
+                                      kill_timeout, cmd, old_sigterm))
+
+    try:
+      (cmd_result.stdout, cmd_result.stderr) = proc.communicate(input)
+    finally:
+      if use_signals:
+        signal.signal(signal.SIGINT, old_sigint)
+        signal.signal(signal.SIGTERM, old_sigterm)
+
+      if (popen_stdout and not isinstance(popen_stdout, int) and
+          not log_stdout_to_file):
+        popen_stdout.seek(0)
+        cmd_result.stdout = popen_stdout.read()
+        popen_stdout.close()
+      elif log_stdout_to_file:
+        popen_stdout.close()
+
+      if (popen_stderr and not isinstance(popen_stderr, int) and
+          not log_stderr_to_file):
+        popen_stderr.seek(0)
+        cmd_result.stderr = popen_stderr.read()
+        popen_stderr.close()
+
+    cmd_result.returncode = proc.returncode
+
+    # The try/finally block is a bit hairy.  We normally want the logged
+    # output to be what gets passed back up.  But if there's a decode error,
+    # we don't want it to break logging entirely.  If the output had a lot of
+    # newlines, always logging it as bytes wouldn't be human readable.
+    try:
+      if encoding:
+        if cmd_result.stdout is not None:
+          cmd_result.stdout = cmd_result.stdout.decode(encoding, errors)
+        if cmd_result.stderr is not None:
+          cmd_result.stderr = cmd_result.stderr.decode(encoding, errors)
+    finally:
+      if log_output:
+        if cmd_result.stdout:
+          logging.log(debug_level, '(stdout):\n%s', cmd_result.stdout)
+        if cmd_result.stderr:
+          logging.log(debug_level, '(stderr):\n%s', cmd_result.stderr)
+
+    if check and proc.returncode:
+      msg = 'cmd=%s' % cmd
+      if cwd:
+        msg += ', cwd=%s' % cwd
+      if extra_env:
+        msg += ', extra env=%s' % extra_env
+      raise RunCommandError(msg, cmd_result)
+  except OSError as e:
+    estr = str(e)
+    if e.errno == errno.EACCES:
+      estr += '; does the program need `chmod a+x`?'
+    raise RunCommandError(estr, CommandResult(args=cmd), exception=e)
+  finally:
+    if proc is not None:
+      # Ensure the process is dead.
+      _KillChildProcess(proc, int_timeout, kill_timeout, cmd, None, None, None)
+
+  # We might capture stdout/stderr for internal reasons (like logging), but we
+  # don't want to let it leak back out to the callers.  They only get output if
+  # they explicitly requested it.
+  if stdout is None:
+    cmd_result.stdout = None
+  if stderr is None:
+    cmd_result.stderr = None
+
+  return cmd_result
+# pylint: enable=redefined-builtin
+
+
+# Convenience run methods.
+#
+# We don't use functools.partial because it binds the methods at import time,
+# which doesn't work well with unit tests, since it bypasses the mock that may
+# be set up for run.
+
+def dbg_run(*args, **kwargs):
+  kwargs.setdefault('debug_level', logging.DEBUG)
+  return run(*args, **kwargs)
+
+
+class DieSystemExit(SystemExit):
+  """Custom Exception used so we can intercept this if necessary."""
+
+
+def Die(message, *args, **kwargs):
+  """Emits an error message with a stack trace and halts execution.
+
+  Args:
+    message: The message to be emitted before exiting.
+  """
+  logging.error(message, *args, **kwargs)
+  raise DieSystemExit(1)
+
+
+def GetSysrootToolPath(sysroot, tool_name):
+  """Returns the path to the sysroot specific version of a tool.
+
+  Does not check that the tool actually exists.
+
+  Args:
+    sysroot: build root of the system in question.
+    tool_name: string name of tool desired (e.g. 'equery').
+
+  Returns:
+    string path to tool inside the sysroot.
+  """
+  if sysroot == '/':
+    return os.path.join(sysroot, 'usr', 'bin', tool_name)
+
+  return os.path.join(sysroot, 'build', 'bin', tool_name)
+
+
+def IsInsideChroot():
+  """Returns True if we are inside chroot."""
+  return os.path.exists('/etc/cros_chroot_version')
+
+
+def IsOutsideChroot():
+  """Returns True if we are outside chroot."""
+  return not IsInsideChroot()
+
+
+def AssertInsideChroot():
+  """Die if we are outside the chroot"""
+  if not IsInsideChroot():
+    Die('%s: please run inside the chroot', os.path.basename(sys.argv[0]))
+
+
+def AssertOutsideChroot():
+  """Die if we are inside the chroot"""
+  if IsInsideChroot():
+    Die('%s: please run outside the chroot', os.path.basename(sys.argv[0]))
+
+
+def GetHostName(fully_qualified=False):
+  """Return hostname of current machine, with domain if |fully_qualified|."""
+  hostname = socket.gethostname()
+  try:
+    hostname = socket.gethostbyaddr(hostname)[0]
+  except (socket.gaierror, socket.herror) as e:
+    logging.warning('please check your /etc/hosts file; resolving your hostname'
+                    ' (%s) failed: %s', hostname, e)
+
+  if fully_qualified:
+    return hostname
+  else:
+    return hostname.partition('.')[0]
+
+
+def GetHostDomain():
+  """Return domain of current machine.
+
+  If there is no domain, return 'localdomain'.
+  """
+
+  hostname = GetHostName(fully_qualified=True)
+  domain = hostname.partition('.')[2]
+  return domain if domain else 'localdomain'
+
+
+def HostIsCIBuilder(fq_hostname=None, golo_only=False, gce_only=False):
+  """Return True iff a host is a continuous-integration builder.
+
+  Args:
+    fq_hostname: The fully qualified hostname. By default, we fetch it for you.
+    golo_only: Only return True if the host is in the Chrome Golo. Defaults to
+      False.
+    gce_only: Only return True if the host is in the Chrome GCE block. Defaults
+      to False.
+  """
+  if not fq_hostname:
+    fq_hostname = GetHostName(fully_qualified=True)
+  in_golo = fq_hostname.endswith('.' + constants.GOLO_DOMAIN)
+  in_gce = (fq_hostname.endswith('.' + constants.CHROME_DOMAIN) or
+            fq_hostname.endswith('.' + constants.CHROMEOS_BOT_INTERNAL))
+  if golo_only:
+    return in_golo
+  elif gce_only:
+    return in_gce
+  else:
+    return in_golo or in_gce
+
+
+COMP_NONE = 0
+COMP_GZIP = 1
+COMP_BZIP2 = 2
+COMP_XZ = 3
+
+
+def FindCompressor(compression, chroot=None):
+  """Locate a compressor utility program (possibly in a chroot).
+
+  Since we compress/decompress a lot, make it easy to locate a
+  suitable utility program in a variety of locations.  We favor
+  the one in the chroot over /, and the parallel implementation
+  over the single threaded one.
+
+  Args:
+    compression: The type of compression desired.
+    chroot: Optional path to a chroot to search.
+
+  Returns:
+    Path to a compressor.
+
+  Raises:
+    ValueError: If compression is unknown.
+  """
+  if compression == COMP_XZ:
+    return os.path.join(constants.CHROMITE_SCRIPTS_DIR, 'xz_auto')
+  elif compression == COMP_GZIP:
+    std = 'gzip'
+    para = 'pigz'
+  elif compression == COMP_BZIP2:
+    std = 'bzip2'
+    para = 'pbzip2'
+  elif compression == COMP_NONE:
+    return 'cat'
+  else:
+    raise ValueError('unknown compression')
+
+  roots = []
+  if chroot:
+    roots.append(chroot)
+  roots.append('/')
+
+  for prog in [para, std]:
+    for root in roots:
+      for subdir in ['', 'usr']:
+        path = os.path.join(root, subdir, 'bin', prog)
+        if os.path.exists(path):
+          return path
+
+  return std
+
+
+def CompressionStrToType(s):
+  """Convert a compression string type to a constant.
+
+  Args:
+    s: string to check
+
+  Returns:
+    A constant, or None if the compression type is unknown.
+  """
+  _COMP_STR = {
+      'gz': COMP_GZIP,
+      'bz2': COMP_BZIP2,
+      'xz': COMP_XZ,
+  }
+  if s:
+    return _COMP_STR.get(s)
+  else:
+    return COMP_NONE
+
+
+def CompressionExtToType(file_name):
+  """Retrieve a compression type constant from a compression file's name.
+
+  Args:
+    file_name: Name of a compression file.
+
+  Returns:
+    A constant, return COMP_NONE if the extension is unknown.
+  """
+  ext = os.path.splitext(file_name)[-1]
+  _COMP_EXT = {
+      '.tgz': COMP_GZIP,
+      '.gz': COMP_GZIP,
+      '.tbz2': COMP_BZIP2,
+      '.bz2': COMP_BZIP2,
+      '.txz': COMP_XZ,
+      '.xz': COMP_XZ,
+  }
+  return _COMP_EXT.get(ext, COMP_NONE)
+
+
+def CompressFile(infile, outfile):
+  """Compress a file using compressor specified by |outfile| suffix.
+
+  Args:
+    infile: File to compress.
+    outfile: Name of output file. Compression used is based on the
+             type of suffix of the name specified (e.g.: .bz2).
+  """
+  comp_type = CompressionExtToType(outfile)
+  assert comp_type and comp_type != COMP_NONE
+  comp = FindCompressor(comp_type)
+  if os.path.basename(comp) == 'pixz':
+    # pixz does not accept '-c'; instead an explicit '-i' indicates input file
+    # should not be deleted, and '-o' specifies output file.
+    cmd = [comp, '-i', infile, '-o', outfile]
+    run(cmd)
+  else:
+    cmd = [comp, '-c', infile]
+    run(cmd, stdout=outfile)
+
+
+def UncompressFile(infile, outfile):
+  """Uncompress a file using compressor specified by |infile| suffix.
+
+  Args:
+    infile: File to uncompress. Compression used is based on the
+            type of suffix of the name specified (e.g.: .bz2).
+    outfile: Name of output file.
+  """
+  comp_type = CompressionExtToType(infile)
+  assert comp_type and comp_type != COMP_NONE
+  comp = FindCompressor(comp_type)
+  if os.path.basename(comp) == 'pixz':
+    # pixz does not accept '-c'; instead an explicit '-i' indicates input file
+    # should not be deleted, and '-o' specifies output file.
+    cmd = [comp, '-d', '-i', infile, '-o', outfile]
+    run(cmd)
+  else:
+    cmd = [comp, '-dc', infile]
+    run(cmd, stdout=outfile)
+
+
+class CreateTarballError(RunCommandError):
+  """Error while running tar.
+
+  We may run tar multiple times because of "soft" errors.  The result is from
+  the last run instance.
+  """
+
+
+def CreateTarball(target, cwd, sudo=False, compression=COMP_XZ, chroot=None,
+                  inputs=None, timeout=300, extra_args=None, **kwargs):
+  """Create a tarball.  Executes 'tar' on the commandline.
+
+  Args:
+    target: The path of the tar file to generate.
+    cwd: The directory to run the tar command.
+    sudo: Whether to run with "sudo".
+    compression: The type of compression desired.  See the FindCompressor
+      function for details.
+    chroot: See FindCompressor().
+    inputs: A list of files or directories to add to the tarball.  If unset,
+      defaults to ".".
+    timeout: The number of seconds to wait on soft failure.
+    extra_args: A list of extra args to pass to "tar".
+    kwargs: Any run options/overrides to use.
+
+  Returns:
+    The cmd_result object returned by the run invocation.
+
+  Raises:
+    CreateTarballError: if the tar command failed, possibly after retry.
+  """
+  if inputs is None:
+    inputs = ['.']
+
+  if extra_args is None:
+    extra_args = []
+  kwargs.setdefault('debug_level', logging.INFO)
+
+  comp = FindCompressor(compression, chroot=chroot)
+  cmd = (['tar'] +
+         extra_args +
+         ['--sparse', '-I', comp, '-cf', target])
+  if len(inputs) > _THRESHOLD_TO_USE_T_FOR_TAR:
+    cmd += ['--null', '-T', '/dev/stdin']
+    rc_input = b'\0'.join(x.encode('utf-8') for x in inputs)
+  else:
+    cmd += list(inputs)
+    rc_input = None
+
+  rc_func = sudo_run if sudo else run
+
+  # If tar fails with status 1, retry twice. Once after timeout seconds and
+  # again 2*timeout seconds after that.
+  for try_count in range(3):
+    try:
+      result = rc_func(cmd, cwd=cwd, **dict(kwargs, check=False,
+                                            input=rc_input))
+    except RunCommandError as rce:
+      # There are cases where run never executes the command (cannot find tar,
+      # cannot execute tar, such as when cwd does not exist). Although the run
+      # command will show low-level problems, we also want to log the context
+      # of what CreateTarball was trying to do.
+      logging.error('CreateTarball unable to run tar for %s in %s. cmd={%s}',
+                    target, cwd, cmd)
+      raise rce
+    if result.returncode == 0:
+      return result
+    if result.returncode != 1 or try_count > 1:
+      # Since the build is abandoned at this point, we will take 5
+      # entire minutes to track down the competing process.
+      # Error will have the low-level tar command error, so log the context
+      # of the tar command (target file, current working dir).
+      logging.error('CreateTarball failed creating %s in %s. cmd={%s}',
+                    target, cwd, cmd)
+      raise CreateTarballError('CreateTarball', result)
+
+    assert result.returncode == 1
+    time.sleep(timeout * (try_count + 1))
+    logging.warning('CreateTarball: tar: source modification time changed '
+                    '(see crbug.com/547055), retrying')
+    logging.PrintBuildbotStepWarnings()
+
+
+def GetInput(prompt):
+  """Helper function to grab input from a user.   Makes testing easier."""
+  # We have people use GetInput() so they don't have to use these bad builtins
+  # themselves or deal with version skews.
+  # pylint: disable=bad-builtin,input-builtin,raw_input-builtin,undefined-variable
+  if sys.version_info.major < 3:
+    return raw_input(prompt)
+  else:
+    return input(prompt)
+
+
+def GetChoice(title, options, group_size=0):
+  """Ask user to choose an option from the list.
+
+  When |group_size| is 0, then all items in |options| will be extracted and
+  shown at the same time.  Otherwise, the items will be extracted |group_size|
+  at a time, and then shown to the user.  This makes it easier to support
+  generators that are slow, extremely large, or people usually want to pick
+  from the first few choices.
+
+  Args:
+    title: The text to display before listing options.
+    options: Iterable which provides options to display.
+    group_size: How many options to show before asking the user to choose.
+
+  Returns:
+    An integer of the index in |options| the user picked.
+  """
+  def PromptForChoice(max_choice, more):
+    prompt = 'Please choose an option [0-%d]' % max_choice
+    if more:
+      prompt += ' (Enter for more options)'
+    prompt += ': '
+
+    while True:
+      choice = GetInput(prompt)
+      if more and not choice.strip():
+        return None
+      try:
+        choice = int(choice)
+      except ValueError:
+        print('Input is not an integer')
+        continue
+      if choice < 0 or choice > max_choice:
+        print('Choice %d out of range (0-%d)' % (choice, max_choice))
+        continue
+      return choice
+
+  print(title)
+  max_choice = 0
+  for i, opt in enumerate(options):
+    if i and group_size and not i % group_size:
+      choice = PromptForChoice(i - 1, True)
+      if choice is not None:
+        return choice
+    print('  [%d]: %s' % (i, opt))
+    max_choice = i
+
+  return PromptForChoice(max_choice, False)
+
+
+def BooleanPrompt(prompt='Do you want to continue?', default=True,
+                  true_value='yes', false_value='no', prolog=None):
+  """Helper function for processing boolean choice prompts.
+
+  Args:
+    prompt: The question to present to the user.
+    default: Boolean to return if the user just presses enter.
+    true_value: The text to display that represents a True returned.
+    false_value: The text to display that represents a False returned.
+    prolog: The text to display before prompt.
+
+  Returns:
+    True or False.
+  """
+  true_value, false_value = true_value.lower(), false_value.lower()
+  true_text, false_text = true_value, false_value
+  if true_value == false_value:
+    raise ValueError('true_value and false_value must differ: got %r'
+                     % true_value)
+
+  if default:
+    true_text = true_text[0].upper() + true_text[1:]
+  else:
+    false_text = false_text[0].upper() + false_text[1:]
+
+  prompt = ('\n%s (%s/%s)? ' % (prompt, true_text, false_text))
+
+  if prolog:
+    prompt = ('\n%s\n%s' % (prolog, prompt))
+
+  while True:
+    try:
+      response = GetInput(prompt).lower()
+    except EOFError:
+      # If the user hits CTRL+D, or stdin is disabled, use the default.
+      print()
+      response = None
+    except KeyboardInterrupt:
+      # If the user hits CTRL+C, just exit the process.
+      print()
+      Die('CTRL+C detected; exiting')
+
+    if not response:
+      return default
+    if true_value.startswith(response):
+      if not false_value.startswith(response):
+        return True
+      # common prefix between the two...
+    elif false_value.startswith(response):
+      return False
+
+
+def BooleanShellValue(sval, default, msg=None):
+  """See if the string value is a value users typically consider as boolean
+
+  Often times people set shell variables to different values to mean "true"
+  or "false".  For example, they can do:
+    export FOO=yes
+    export BLAH=1
+    export MOO=true
+  Handle all that user ugliness here.
+
+  If the user picks an invalid value, you can use |msg| to display a non-fatal
+  warning rather than raising an exception.
+
+  Args:
+    sval: The string value we got from the user.
+    default: If we can't figure out if the value is true or false, use this.
+    msg: If |sval| is an unknown value, use |msg| to warn the user that we
+         could not decode the input.  Otherwise, raise ValueError().
+
+  Returns:
+    The interpreted boolean value of |sval|.
+
+  Raises:
+    ValueError() if |sval| is an unknown value and |msg| is not set.
+  """
+  if sval is None:
+    return default
+
+  if isinstance(sval, six.string_types):
+    s = sval.lower()
+    if s in ('yes', 'y', '1', 'true'):
+      return True
+    elif s in ('no', 'n', '0', 'false'):
+      return False
+
+  if msg is not None:
+    logging.warning('%s: %r', msg, sval)
+    return default
+  else:
+    raise ValueError('Could not decode as a boolean value: %r' % sval)
+
+
+# Suppress whacked complaints about abstract class being unused.
+class MasterPidContextManager(object):
+  """Allow context managers to restrict their exit to within the same PID."""
+
+  # In certain cases we actually want this ran outside
+  # of the main pid- specifically in backup processes
+  # doing cleanup.
+  ALTERNATE_MASTER_PID = None
+
+  def __init__(self):
+    self._invoking_pid = None
+
+  def __enter__(self):
+    self._invoking_pid = os.getpid()
+    return self._enter()
+
+  def __exit__(self, exc_type, exc, exc_tb):
+    curpid = os.getpid()
+    if curpid == self.ALTERNATE_MASTER_PID:
+      self._invoking_pid = curpid
+    if curpid == self._invoking_pid:
+      return self._exit(exc_type, exc, exc_tb)
+
+  def _enter(self):
+    raise NotImplementedError(self, '_enter')
+
+  def _exit(self, exc_type, exc, exc_tb):
+    raise NotImplementedError(self, '_exit')
+
+
+class ContextManagerStack(object):
+  """Context manager that is designed to safely allow nesting and stacking.
+
+  Python2.7 directly supports a with syntax generally removing the need for
+  this, although this form avoids indentation hell if there is a lot of context
+  managers.  It also permits more programmatic control and allowing conditional
+  usage.
+
+  For Python2.6, see http://docs.python.org/library/contextlib.html; the short
+  version is that there is a race in the available stdlib/language rules under
+  2.6 when dealing w/ multiple context managers, thus this safe version was
+  added.
+
+  For each context manager added to this instance, it will unwind them,
+  invoking them as if it had been constructed as a set of manually nested
+  with statements.
+  """
+
+  def __init__(self):
+    self._stack = []
+
+  def Add(self, functor, *args, **kwargs):
+    """Add a context manager onto the stack.
+
+    Usage of this is essentially the following:
+    >>> stack.add(Timeout, 60)
+
+    It must be done in this fashion, else there is a mild race that exists
+    between context manager instantiation and initial __enter__.
+
+    Invoking it in the form specified eliminates that race.
+
+    Args:
+      functor: A callable to instantiate a context manager.
+      args and kwargs: positional and optional args to functor.
+
+    Returns:
+      The newly created (and __enter__'d) context manager.
+      Note: This is not the same value as the "with" statement -- that returns
+      the value from the __enter__ function while this is the manager itself.
+    """
+    obj = None
+    try:
+      obj = functor(*args, **kwargs)
+      return obj
+    finally:
+      if obj is not None:
+        obj.__enter__()
+        self._stack.append(obj)
+
+  def __enter__(self):
+    # Nothing to do in this case.  The individual __enter__'s are done
+    # when the context managers are added, which will likely be after
+    # the __enter__ method of this stack is called.
+    return self
+
+  def __exit__(self, exc_type, exc, exc_tb):
+    # Exit each context manager in stack in reverse order, tracking the results
+    # to know whether or not to suppress the exception raised (or to switch that
+    # exception to a new one triggered by an individual handler's __exit__).
+    for handler in reversed(self._stack):
+      # pylint: disable=bare-except
+      try:
+        if handler.__exit__(exc_type, exc, exc_tb):
+          exc_type = exc = exc_tb = None
+      except:
+        exc_type, exc, exc_tb = sys.exc_info()
+
+    self._stack = []
+
+    # Return True if any exception was handled.
+    if all(x is None for x in (exc_type, exc, exc_tb)):
+      return True
+
+    # Raise any exception that is left over from exiting all context managers.
+    # Normally a single context manager would return False to allow caller to
+    # re-raise the exception itself, but here the exception might have been
+    # raised during the exiting of one of the individual context managers.
+    six.reraise(exc_type, exc, exc_tb)
+
+
+def iflatten_instance(iterable,
+                      terminate_on_kls=(six.string_types, six.binary_type)):
+  """Derivative of snakeoil.lists.iflatten_instance; flatten an object.
+
+  Given an object, flatten it into a single depth iterable-
+  stopping descent on objects that either aren't iterable, or match
+  isinstance(obj, terminate_on_kls).
+
+  Examples:
+    >>> print list(iflatten_instance([1, 2, "as", ["4", 5]))
+    [1, 2, "as", "4", 5]
+  """
+  def descend_into(item):
+    if isinstance(item, terminate_on_kls):
+      return False
+    try:
+      iter(item)
+    except TypeError:
+      return False
+    # Note strings can be infinitely descended through- thus this
+    # recursion limiter.
+    return not isinstance(item, six.string_types) or len(item) > 1
+
+  if not descend_into(iterable):
+    yield iterable
+    return
+  for item in iterable:
+    if not descend_into(item):
+      yield item
+    else:
+      for subitem in iflatten_instance(item, terminate_on_kls):
+        yield subitem
+
+
+@contextlib.contextmanager
+def Open(obj, mode='r'):
+  """Convenience ctx that accepts a file path or an already open file object."""
+  if isinstance(obj, six.string_types):
+    with open(obj, mode=mode) as f:
+      yield f
+  else:
+    yield obj
+
+
+def SafeRun(functors, combine_exceptions=False):
+  """Executes a list of functors, continuing on exceptions.
+
+  Args:
+    functors: An iterable of functors to call.
+    combine_exceptions: If set, and multiple exceptions are encountered,
+      SafeRun will raise a RuntimeError containing a list of all the exceptions.
+      If only one exception is encountered, then the default behavior of
+      re-raising the original exception with unmodified stack trace will be
+      kept.
+
+  Raises:
+    The first exception encountered, with corresponding backtrace, unless
+    |combine_exceptions| is specified and there is more than one exception
+    encountered, in which case a RuntimeError containing a list of all the
+    exceptions that were encountered is raised.
+  """
+  errors = []
+
+  for f in functors:
+    try:
+      f()
+    except Exception as e:
+      # Append the exception object and the traceback.
+      errors.append((e, sys.exc_info()[2]))
+
+  if errors:
+    if len(errors) == 1 or not combine_exceptions:
+      # To preserve the traceback.
+      inst, tb = errors[0]
+      six.reraise(type(inst), inst, tb)
+    else:
+      raise RuntimeError([e[0] for e in errors])
+
+
+def UserDateTimeFormat(timeval=None):
+  """Format a date meant to be viewed by a user
+
+  The focus here is to have a format that is easily readable by humans,
+  but still easy (and unambiguous) for a machine to parse.  Hence, we
+  use the RFC 2822 date format (with timezone name appended).
+
+  Args:
+    timeval: Either a datetime object or a floating point time value as accepted
+             by gmtime()/localtime().  If None, the current time is used.
+
+  Returns:
+    A string format such as 'Wed, 20 Feb 2013 15:25:15 -0500 (EST)'
+  """
+  if isinstance(timeval, datetime):
+    timeval = time.mktime(timeval.timetuple())
+  return '%s (%s)' % (email.utils.formatdate(timeval=timeval, localtime=True),
+                      time.strftime('%Z', time.localtime(timeval)))
+
+
+def ParseUserDateTimeFormat(time_string):
+  """Parse a time string into a floating point time value.
+
+  This function is essentially the inverse of UserDateTimeFormat.
+
+  Args:
+    time_string: A string datetime represetation in RFC 2822 format, such as
+                 'Wed, 20 Feb 2013 15:25:15 -0500 (EST)'.
+
+  Returns:
+    Floating point Unix timestamp (seconds since epoch).
+  """
+  return email.utils.mktime_tz(email.utils.parsedate_tz(time_string))
+
+
+def GetDefaultBoard():
+  """Gets the default board.
+
+  Returns:
+    The default board (as a string), or None if either the default board
+    file was missing or malformed.
+  """
+  default_board_file_name = os.path.join(constants.SOURCE_ROOT, 'src',
+                                         'scripts', '.default_board')
+  try:
+    with open(default_board_file_name) as default_board_file:
+      default_board = default_board_file.read().strip()
+      # Check for user typos like whitespace
+      if not re.match('[a-zA-Z0-9-_]*$', default_board):
+        logging.warning('Noticed invalid default board: |%s|. Ignoring this '
+                        'default.', default_board)
+        default_board = None
+  except IOError:
+    return None
+
+  return default_board
+
+
+def SetDefaultBoard(board):
+  """Set the default board.
+
+  Args:
+    board (str): The name of the board to save as the default.
+
+  Returns:
+    bool - True if successfully wrote default, False otherwise.
+  """
+  config_path = os.path.join(constants.CROSUTILS_DIR, '.default_board')
+  try:
+    with open(config_path, 'w') as f:
+      f.write(board)
+  except IOError as e:
+    logging.error('Unable to write default board: %s', e)
+    return False
+
+  return True
+
+
+def GetBoard(device_board, override_board=None, force=False, strict=False):
+  """Gets the board name to use.
+
+  Ask user to confirm when |override_board| and |device_board| are
+  both None.
+
+  Args:
+    device_board: The board detected on the device.
+    override_board: Overrides the board.
+    force: Force using the default board if |device_board| is None.
+    strict: If True, abort if no valid board can be found.
+
+  Returns:
+    Returns the first non-None board in the following order:
+    |override_board|, |device_board|, and GetDefaultBoard().
+
+  Raises:
+    DieSystemExit: If board is not set or user enters no.
+  """
+  if override_board:
+    return override_board
+
+  board = device_board or GetDefaultBoard()
+  if not device_board:
+    if not board and strict:
+      Die('No board specified and no default board found.')
+    msg = 'Cannot detect board name; using default board %s.' % board
+    if not force and not BooleanPrompt(default=False, prolog=msg):
+      Die('Exiting...')
+
+    logging.warning(msg)
+
+  return board
+
+
+# Structure to hold the values produced by TimedSection.
+#
+#  Attributes:
+#    start: The absolute start time as a datetime.
+#    finish: The absolute finish time as a datetime, or None if in progress.
+#    delta: The runtime as a timedelta, or None if in progress.
+TimedResults = cros_collections.Collection(
+    'TimedResults', start=None, finish=None, delta=None)
+
+
+@contextlib.contextmanager
+def TimedSection():
+  """Context manager to time how long a code block takes.
+
+  Examples:
+    with cros_build_lib.TimedSection() as timer:
+      DoWork()
+    logging.info('DoWork took %s', timer.delta)
+
+  Context manager value will be a TimedResults instance.
+  """
+  # Create our context manager value.
+  times = TimedResults(start=datetime.now())
+  try:
+    yield times
+  finally:
+    times.finish = datetime.now()
+    times.delta = times.finish - times.start
+
+
+def GetRandomString():
+  """Returns a random string.
+
+  It will be 32 characters long, although callers shouldn't rely on this.
+  Only lowercase & numbers are used to avoid case-insensitive collisions.
+  """
+  # Start with current time.  This "scopes" the following random data.
+  stamp = b'%x' % int(time.time())
+  # Add in some entropy.  This reads more bytes than strictly necessary, but
+  # it guarantees that we always have enough bytes below.
+  data = os.urandom(16)
+  # Then convert it to a lowercase base32 string of 32 characters.
+  return base64.b32encode(stamp + data).decode('utf-8')[0:32].lower()
+
+
+def MachineDetails():
+  """Returns a string to help identify the source of a job.
+
+  This is not meant for machines to parse; instead, we want content that is easy
+  for humans to read when trying to figure out where "something" is coming from.
+  For example, when a service has grabbed a lock in Google Storage, and we want
+  to see what process actually triggered that (in case it is a test gone rogue),
+  the content in here should help triage.
+
+  Note: none of the details included may be secret so they can be freely pasted
+  into bug reports/chats/logs/etc...
+
+  Note: this content should not be large
+
+  Returns:
+    A string with content that helps identify this system/process/etc...
+  """
+  return '\n'.join((
+      'PROG=%s' % inspect.stack()[-1][1],
+      'USER=%s' % getpass.getuser(),
+      'HOSTNAME=%s' % GetHostName(fully_qualified=True),
+      'PID=%s' % os.getpid(),
+      'TIMESTAMP=%s' % UserDateTimeFormat(),
+      'RANDOM_JUNK=%s' % GetRandomString(),
+  )) + '\n'
+
+
+def UnbufferedTemporaryFile(**kwargs):
+  """Handle buffering changes in tempfile.TemporaryFile."""
+  assert 'bufsize' not in kwargs
+  assert 'buffering' not in kwargs
+  if sys.version_info.major < 3:
+    kwargs['bufsize'] = 0
+  else:
+    kwargs['buffering'] = 0
+  return tempfile.TemporaryFile(**kwargs)
+
+
+def UnbufferedNamedTemporaryFile(**kwargs):
+  """Handle buffering changes in tempfile.NamedTemporaryFile."""
+  assert 'bufsize' not in kwargs
+  assert 'buffering' not in kwargs
+  if sys.version_info.major < 3:
+    kwargs['bufsize'] = 0
+  else:
+    kwargs['buffering'] = 0
+  return tempfile.NamedTemporaryFile(**kwargs)
diff --git a/utils/frozen_chromite/lib/cros_collections.py b/utils/frozen_chromite/lib/cros_collections.py
new file mode 100644
index 0000000..d63c6db
--- /dev/null
+++ b/utils/frozen_chromite/lib/cros_collections.py
@@ -0,0 +1,137 @@
+# -*- coding: utf-8 -*-
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Chromite extensions on top of the collections module."""
+
+from __future__ import print_function
+
+
+def _CollectionExec(expr, classname):
+  """Hack to workaround <=Python-2.7.8 exec bug.
+
+  See https://bugs.python.org/issue21591 for details.
+
+  TODO(crbug.com/998624): Drop this in Jan 2020.
+  """
+  namespace = {}
+  exec(expr, {}, namespace)  # pylint: disable=exec-used
+  return namespace[classname]
+
+
+# We have nested kwargs below, so disable the |kwargs| naming here.
+# pylint: disable=docstring-misnamed-args
+def Collection(classname, **default_kwargs):
+  """Create a new class with mutable named members.
+
+  This is like collections.namedtuple, but mutable.  Also similar to the
+  python 3.3 types.SimpleNamespace.
+
+  Examples:
+    # Declare default values for this new class.
+    Foo = cros_build_lib.Collection('Foo', a=0, b=10)
+    # Create a new class but set b to 4.
+    foo = Foo(b=4)
+    # Print out a (will be the default 0) and b (will be 4).
+    print('a = %i, b = %i' % (foo.a, foo.b))
+  """
+
+  def sn_init(self, **kwargs):
+    """The new class's __init__ function."""
+    # First verify the kwargs don't have excess settings.
+    valid_keys = set(self.__slots__)
+    these_keys = set(kwargs.keys())
+    invalid_keys = these_keys - valid_keys
+    if invalid_keys:
+      raise TypeError('invalid keyword arguments for this object: %r' %
+                      invalid_keys)
+
+    # Now initialize this object.
+    for k in valid_keys:
+      setattr(self, k, kwargs.get(k, default_kwargs[k]))
+
+  def sn_repr(self):
+    """The new class's __repr__ function."""
+    return '%s(%s)' % (classname, ', '.join(
+        '%s=%r' % (k, getattr(self, k)) for k in self.__slots__))
+
+  # Give the new class a unique name and then generate the code for it.
+  classname = 'Collection_%s' % classname
+  expr = '\n'.join((
+      'class %(classname)s(object):',
+      '  __slots__ = ["%(slots)s"]',
+  )) % {
+      'classname': classname,
+      'slots': '", "'.join(sorted(default_kwargs)),
+  }
+
+  # Create the class in a local namespace as exec requires.
+  new_class = _CollectionExec(expr, classname)
+
+  # Bind the helpers.
+  new_class.__init__ = sn_init
+  new_class.__repr__ = sn_repr
+
+  return new_class
+# pylint: enable=docstring-misnamed-args
+
+
+def GroupByKey(input_iter, key):
+  """Split an iterable of dicts, based on value of a key.
+
+  GroupByKey([{'a': 1}, {'a': 2}, {'a': 1, 'b': 2}], 'a') =>
+    {1: [{'a': 1}, {'a': 1, 'b': 2}], 2: [{'a': 2}]}
+
+  Args:
+    input_iter: An iterable of dicts.
+    key: A string specifying the key name to split by.
+
+  Returns:
+    A dictionary, mapping from each unique value for |key| that
+    was encountered in |input_iter| to a list of entries that had
+    that value.
+  """
+  split_dict = dict()
+  for entry in input_iter:
+    split_dict.setdefault(entry.get(key), []).append(entry)
+  return split_dict
+
+
+def GroupNamedtuplesByKey(input_iter, key):
+  """Split an iterable of namedtuples, based on value of a key.
+
+  Args:
+    input_iter: An iterable of namedtuples.
+    key: A string specifying the key name to split by.
+
+  Returns:
+    A dictionary, mapping from each unique value for |key| that
+    was encountered in |input_iter| to a list of entries that had
+    that value.
+  """
+  split_dict = {}
+  for entry in input_iter:
+    split_dict.setdefault(getattr(entry, key, None), []).append(entry)
+  return split_dict
+
+
+def InvertDictionary(origin_dict):
+  """Invert the key value mapping in the origin_dict.
+
+  Given an origin_dict {'key1': {'val1', 'val2'}, 'key2': {'val1', 'val3'},
+  'key3': {'val3'}}, the returned inverted dict will be
+  {'val1': {'key1', 'key2'}, 'val2': {'key1'}, 'val3': {'key2', 'key3'}}
+
+  Args:
+    origin_dict: A dict mapping each key to a group (collection) of values.
+
+  Returns:
+    An inverted dict mapping each key to a set of its values.
+  """
+  new_dict = {}
+  for origin_key, origin_values in origin_dict.items():
+    for origin_value in origin_values:
+      new_dict.setdefault(origin_value, set()).add(origin_key)
+
+  return new_dict
diff --git a/utils/frozen_chromite/lib/cros_logging.py b/utils/frozen_chromite/lib/cros_logging.py
new file mode 100644
index 0000000..778eef9
--- /dev/null
+++ b/utils/frozen_chromite/lib/cros_logging.py
@@ -0,0 +1,145 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Logging module to be used by all scripts.
+
+cros_logging is a wrapper around logging with additional support for NOTICE
+level. This is to be used instead of the default logging module. The new
+logging level can only be used from here.
+
+The log levels should be used as follows:
+
+DEBUG: Enabled on the CLI with --debug. This is the noisiest logging level.
+Often, as the name suggests, it may contain debugging information you wouldn't
+otherwise need.
+
+INFO: Enabled on the CLI with --verbose. Logging at this level should contain
+relatively fine-grained info about the steps the process is performing, but
+should be light on details (which should be in debug).
+
+NOTICE: The default log level. It should relay a high level overview of what
+the process is doing. It should NOT be a noisy output.
+
+WARNING: Unexpected scenarios that are well handled and do not interrupt the
+process, things like retrying an operation or missing optional information
+needed to complete a portion of a process.
+
+ERROR: Problems that are fatal to a specific operation or script, e.g.
+unable to read a file or invalid arguments.
+
+CRITICAL/FATAL: Rarely needed. These should reflect an extraordinary error that
+might require the shutdown of an application or lead to data loss.
+
+WARNING, ERROR, CRITICAL/FATAL: These levels are always included in the above
+levels as one would expect. Limiting the output of a script to just these log
+levels is rarely desirable, but the --log-level argument can be used to do so.
+"""
+
+from __future__ import print_function
+
+import sys
+# pylint: disable=unused-wildcard-import, wildcard-import
+from logging import *
+# pylint: enable=unused-wildcard-import, wildcard-import
+
+# Have to import shutdown explicitly from logging because it is not included
+# in logging's __all__.
+# pylint: disable=unused-import
+from logging import shutdown
+# pylint: enable=unused-import
+
+# Import as private to avoid polluting module namespace.
+from autotest_lib.utils.frozen_chromite.lib import buildbot_annotations as _annotations
+
+
+# Remove deprecated APIs to force use of new ones.
+del WARN
+del warn
+
+
+# Notice Level.
+NOTICE = 25
+addLevelName(NOTICE, 'NOTICE')
+
+
+# Notice implementation.
+def notice(message, *args, **kwargs):
+  """Log 'msg % args' with severity 'NOTICE'."""
+  log(NOTICE, message, *args, **kwargs)
+
+
+# Only buildbot aware entry-points need to spew buildbot specific logs. Require
+# user action for the special log lines.
+_buildbot_markers_enabled = False
+def EnableBuildbotMarkers():
+  # pylint: disable=global-statement
+  global _buildbot_markers_enabled
+  _buildbot_markers_enabled = True
+
+
+def _PrintForBuildbot(handle, annotation_class, *args):
+  """Log a line for buildbot.
+
+  This function dumps a line to log recognizable by buildbot if
+  EnableBuildbotMarkers has been called. Otherwise, it dumps the same line in a
+  human friendly way that buildbot ignores.
+
+  Args:
+    handle: The pipe to dump the log to. If None, log to sys.stderr.
+    annotation_class: Annotation subclass for the type of buildbot log.
+    buildbot_tag: A tag specifying the type of buildbot log.
+    *args: The rest of the str arguments to be dumped to the log.
+  """
+  if handle is None:
+    handle = sys.stderr
+  if annotation_class == _annotations.SetEmailNotifyProperty:
+    annotation = annotation_class(*args)
+  else:
+    # Cast each argument, because we end up getting all sorts of objects from
+    # callers.
+    str_args = [str(x) for x in args]
+    annotation = annotation_class(*str_args)
+  if _buildbot_markers_enabled:
+    line = str(annotation)
+  else:
+    line = annotation.human_friendly
+  handle.write('\n' + line + '\n')
+
+
+def PrintBuildbotLink(text, url, handle=None):
+  """Prints out a link to buildbot."""
+  _PrintForBuildbot(handle, _annotations.StepLink, text, url)
+
+
+def PrintKitchenSetBuildProperty(name, data, handle=None):
+  """Prints out a request to set a build property to a JSON value."""
+  _PrintForBuildbot(handle, _annotations.SetBuildProperty, name, data)
+
+
+def PrintKitchenSetEmailNotifyProperty(name, data, handle=None):
+  """Prints out a request to set an email_notify build property."""
+  _PrintForBuildbot(handle, _annotations.SetEmailNotifyProperty, name, data)
+
+
+def PrintBuildbotStepText(text, handle=None):
+  """Prints out stage text to buildbot."""
+  _PrintForBuildbot(handle, _annotations.StepText, text)
+
+
+def PrintBuildbotStepWarnings(handle=None):
+  """Marks a stage as having warnings."""
+  PrintBuildbotStepText('[FAILED BUT FORGIVEN]', handle=handle)
+  # Warnings not supported by LUCI, so working around until re-added.
+  _PrintForBuildbot(handle, _annotations.StepWarnings)
+
+
+def PrintBuildbotStepFailure(handle=None):
+  """Marks a stage as having failures."""
+  _PrintForBuildbot(handle, _annotations.StepFailure)
+
+
+def PrintBuildbotStepName(name, handle=None):
+  """Marks a step name for buildbot to display."""
+  _PrintForBuildbot(handle, _annotations.BuildStep, name)
diff --git a/utils/frozen_chromite/lib/failure_message_lib.py b/utils/frozen_chromite/lib/failure_message_lib.py
new file mode 100644
index 0000000..6270b65
--- /dev/null
+++ b/utils/frozen_chromite/lib/failure_message_lib.py
@@ -0,0 +1,365 @@
+# -*- coding: utf-8 -*-
+# Copyright 2017 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module to manage stage failure messages."""
+
+from __future__ import print_function
+
+import collections
+import json
+import re
+
+from autotest_lib.utils.frozen_chromite.lib import cros_logging as logging
+
+# Currently, an exception is reported to CIDB failureTabe using the exception
+# class name as the exception_type. failure_message_lib.FailureMessageManager
+# uses the exception_type to decide which StageFailureMessage class to use
+# to rebuild the failure message. Whenever you need to change the names of these
+# classes, please add the new class names to their corresponding type lists,
+# and DO NOT remove the old class names from the type lists.
+# TODO (nxia): instead of using the class name as the exception type when
+# reporting an exception to CIDB, we need to have an attribute like
+# EXCEPTION_CATEGORY (say EXCEPTION_TYPE) and this type cannot be changed or
+# removed from EXCEPTION_TYPE_LIST. But we can add new types to the list.
+BUILD_SCRIPT_FAILURE_TYPES = ('BuildScriptFailure',)
+PACKAGE_BUILD_FAILURE_TYPES = ('PackageBuildFailure',)
+
+
+# These keys must exist as column names from failureView in cidb.
+FAILURE_KEYS = (
+    'id', 'build_stage_id', 'outer_failure_id', 'exception_type',
+    'exception_message', 'exception_category', 'extra_info',
+    'timestamp', 'stage_name', 'board', 'stage_status', 'build_id',
+    'master_build_id', 'builder_name', 'build_number',
+    'build_config', 'build_status', 'important', 'buildbucket_id')
+
+
+# A namedtuple containing values fetched from CIDB failureView.
+_StageFailure = collections.namedtuple('_StageFailure', FAILURE_KEYS)
+
+
+class StageFailure(_StageFailure):
+  """A class presenting values of a failure fetched from CIDB failureView."""
+
+  @classmethod
+  def GetStageFailureFromMessage(cls, stage_failure_message):
+    """Create StageFailure from a StageFailureMessage instance.
+
+    Args:
+      stage_failure_message: An instance of StageFailureMessage.
+
+    Returns:
+      An instance of StageFailure.
+    """
+    return StageFailure(
+        stage_failure_message.failure_id,
+        stage_failure_message.build_stage_id,
+        stage_failure_message.outer_failure_id,
+        stage_failure_message.exception_type,
+        stage_failure_message.exception_message,
+        stage_failure_message.exception_category,
+        stage_failure_message.extra_info, None,
+        stage_failure_message.stage_name, None, None, None, None, None, None,
+        None, None, None, None)
+
+  @classmethod
+  def GetStageFailureFromDicts(cls, failure_dict, stage_dict, build_dict):
+    """Get StageFailure from value dictionaries.
+
+    Args:
+      failure_dict: A dict presenting values of a tuple from failureTable.
+      stage_dict: A dict presenting values of a tuple from buildStageTable.
+      build_dict: A dict presenting values of a tuple from buildTable.
+
+    Returns:
+      An instance of StageFailure.
+    """
+    return StageFailure(
+        failure_dict['id'], failure_dict['build_stage_id'],
+        failure_dict['outer_failure_id'], failure_dict['exception_type'],
+        failure_dict['exception_message'], failure_dict['exception_category'],
+        failure_dict['extra_info'], failure_dict['timestamp'],
+        stage_dict['name'], stage_dict['board'], stage_dict['status'],
+        build_dict['id'], build_dict['master_build_id'],
+        build_dict['builder_name'],
+        build_dict['build_number'], build_dict['build_config'],
+        build_dict['status'], build_dict['important'],
+        build_dict['buildbucket_id'])
+
+
+class StageFailureMessage(object):
+  """Message class contains information of a general stage failure.
+
+  Failed stages report stage failures to CIDB failureTable (see more details
+  in failures_lib.ReportStageFailure). This class constructs a failure
+  message instance from the stage failure information stored in CIDB.
+  """
+
+  def __init__(self, stage_failure, extra_info=None, stage_prefix_name=None):
+    """Construct a StageFailureMessage instance.
+
+    Args:
+      stage_failure: An instance of StageFailure.
+      extra_info: The extra info of the origin failure, default to None.
+      stage_prefix_name: The prefix name (string) of the failed stage,
+        default to None.
+    """
+    self.failure_id = stage_failure.id
+    self.build_stage_id = stage_failure.build_stage_id
+    self.stage_name = stage_failure.stage_name
+    self.exception_type = stage_failure.exception_type
+    self.exception_message = stage_failure.exception_message
+    self.exception_category = stage_failure.exception_category
+    self.outer_failure_id = stage_failure.outer_failure_id
+
+    if extra_info is not None:
+      self.extra_info = extra_info
+    else:
+      # No extra_info provided, decode extra_info from stage_failure.
+      self.extra_info = self._DecodeExtraInfo(stage_failure.extra_info)
+
+    if stage_prefix_name is not None:
+      self.stage_prefix_name = stage_prefix_name
+    else:
+      # No stage_prefix_name provided, extra prefix name from stage_failure.
+      self.stage_prefix_name = self._ExtractStagePrefixName(self.stage_name)
+
+  def __str__(self):
+    return ('[failure id] %s [stage name] %s [stage prefix name] %s '
+            '[exception type] %s [exception category] %s [exception message] %s'
+            ' [extra info] %s' %
+            (self.failure_id, self.stage_name, self.stage_prefix_name,
+             self.exception_type, self.exception_category,
+             self.exception_message, self.extra_info))
+
+  def _DecodeExtraInfo(self, extra_info):
+    """Decode extra info json into dict.
+
+    Args:
+      extra_info: The extra_info of the origin exception, default to None.
+
+    Returns:
+      An empty dict if extra_info is None; extra_info itself if extra_info is
+      a dict; else, load the json string into a dict and return it.
+    """
+    if not extra_info:
+      return {}
+    elif isinstance(extra_info, dict):
+      return extra_info
+    else:
+      try:
+        return  json.loads(extra_info)
+      except ValueError as e:
+        logging.error('Cannot decode extra_info: %s', e)
+        return {}
+
+  # TODO(nxia): Force format checking on stage names when they're created
+  def _ExtractStagePrefixName(self, stage_name):
+    """Extract stage prefix name given a full stage name.
+
+    Format examples in our current CIDB buildStageTable:
+      HWTest [bvt-arc] -> HWTest
+      HWTest -> HWTest
+      ImageTest -> ImageTest
+      ImageTest [amd64-generic] -> ImageTest
+      VMTest (attempt 1) -> VMTest
+      VMTest [amd64-generic] (attempt 1) -> VMTest
+
+    Args:
+      stage_name: The full stage name (string) recorded in CIDB.
+
+    Returns:
+      The prefix stage name (string).
+    """
+    pattern = r'([^ ]+)( +\[([^]]+)\])?( +\(([^)]+)\))?'
+    m = re.compile(pattern).match(stage_name)
+    if m is not None:
+      return m.group(1)
+    else:
+      return stage_name
+
+
+class BuildScriptFailureMessage(StageFailureMessage):
+  """Message class contains information of a BuildScriptFailure."""
+
+  def GetShortname(self):
+    """Return the short name (string) of the run command."""
+    return self.extra_info.get('shortname')
+
+
+class PackageBuildFailureMessage(StageFailureMessage):
+  """Message class contains information of a PackagebuildFailure."""
+
+  def GetShortname(self):
+    """Return the short name (string) of the run command."""
+    return self.extra_info.get('shortname')
+
+  def GetFailedPackages(self):
+    """Return a list of packages (strings) that failed to build."""
+    return self.extra_info.get('failed_packages', [])
+
+
+class CompoundFailureMessage(StageFailureMessage):
+  """Message class contains information of a CompoundFailureMessage."""
+
+  def __init__(self, stage_failure, **kwargs):
+    """Construct a CompoundFailureMessage instance.
+
+    Args:
+      stage_failure: An instance of StageFailure.
+      kwargs: Extra message information to pass to StageFailureMessage.
+    """
+    super(CompoundFailureMessage, self).__init__(stage_failure, **kwargs)
+
+    self.inner_failures = []
+
+  def __str__(self):
+    msg_str = super(CompoundFailureMessage, self).__str__()
+
+    for failure in self.inner_failures:
+      msg_str += ('(Inner Stage Failure Message) %s' % str(failure))
+
+    return msg_str
+
+  @staticmethod
+  def GetFailureMessage(failure_message):
+    """Convert a regular failure message instance to CompoundFailureMessage.
+
+    Args:
+      failure_message: An instance of StageFailureMessage.
+
+    Returns:
+      A CompoundFailureMessage instance.
+    """
+    return CompoundFailureMessage(
+        StageFailure.GetStageFailureFromMessage(failure_message),
+        extra_info=failure_message.extra_info,
+        stage_prefix_name=failure_message.stage_prefix_name)
+
+  def HasEmptyList(self):
+    """Check whether the inner failure list is empty.
+
+    Returns:
+      True if self.inner_failures is empty; else, False.
+    """
+    return not bool(self.inner_failures)
+
+  def HasExceptionCategories(self, exception_categories):
+    """Check whether any of the inner failures matches the exception categories.
+
+    Args:
+      exception_categories: A set of exception categories (members of
+        constants.EXCEPTION_CATEGORY_ALL_CATEGORIES).
+
+    Returns:
+      True if any of the inner failures matches a memeber in
+      exception_categories; else, False.
+    """
+    return any(x.exception_category in exception_categories
+               for x in self.inner_failures)
+
+  def MatchesExceptionCategories(self, exception_categories):
+    """Check whether all of the inner failures matches the exception categories.
+
+    Args:
+      exception_categories: A set of exception categories (members of
+        constants.EXCEPTION_CATEGORY_ALL_CATEGORIES).
+
+    Returns:
+      True if all of the inner failures match a memeber in
+      exception_categories; else, False.
+    """
+    return (not self.HasEmptyList() and
+            all(x.exception_category in exception_categories
+                for x in self.inner_failures))
+
+
+class FailureMessageManager(object):
+  """Manager class to create a failure message or reconstruct messages."""
+
+  @classmethod
+  def CreateMessage(cls, stage_failure, **kwargs):
+    """Create a failure message instance depending on the exception type.
+
+    Args:
+      stage_failure: An instance of StageFailure.
+      kwargs: Extra message information to pass to StageFailureMessage.
+
+    Returns:
+      A failure message instance of StageFailureMessage class (or its
+        sub-class)
+    """
+    if stage_failure.exception_type in BUILD_SCRIPT_FAILURE_TYPES:
+      return BuildScriptFailureMessage(stage_failure, **kwargs)
+    elif stage_failure.exception_type in PACKAGE_BUILD_FAILURE_TYPES:
+      return PackageBuildFailureMessage(stage_failure, **kwargs)
+    else:
+      return StageFailureMessage(stage_failure, **kwargs)
+
+  @classmethod
+  def ReconstructMessages(cls, failure_messages):
+    """Reconstruct failure messages by nesting messages.
+
+    A failure message with not none outer_failure_id is an inner failure of its
+    outer failure message(failure_id == outer_failure_id). This method takes a
+    list of failure messages, reconstructs the list by 1) converting the outer
+    failure message into a CompoundFailureMessage instance 2) insert the inner
+    failure messages to the inner_failures list of their outer failure messages.
+    CompoundFailures in CIDB aren't nested
+    (see failures_lib.ReportStageFailure), so there isn't another
+    inner failure list layer in a inner failure message and there're no circular
+    dependencies.
+
+    For example, given failure_messages list
+      [A(failure_id=1),
+       B(failure_id=2, outer_failure_id=1),
+       C(failure_id=3, outer_failure_id=1),
+       D(failure_id=4),
+       E(failure_id=5, outer_failure_id=4),
+       F(failure_id=6)]
+    this method returns a reconstructed list:
+      [A(failure_id=1, inner_failures=[B(failure_id=2, outer_failure_id=1),
+                                       C(failure_id=3, outer_failure_id=1)]),
+       D(failure_id=4, inner_failures=[E(failure_id=5, outer_failure_id=4)]),
+       F(failure_id=6)]
+
+    Args:
+      failure_messages: A list a failure message instances not nested.
+
+    Returns:
+      A list of failure message instances of StageFailureMessage class (or its
+        sub-class). Failure messages with not None outer_failure_id are nested
+        into the inner_failures list of their outer failure messages.
+    """
+    failure_message_dict = {x.failure_id: x for x in failure_messages}
+
+    for failure in failure_messages:
+      if failure.outer_failure_id is not None:
+        assert failure.outer_failure_id in failure_message_dict
+        outer_failure = failure_message_dict[failure.outer_failure_id]
+        if not isinstance(outer_failure, CompoundFailureMessage):
+          outer_failure = CompoundFailureMessage.GetFailureMessage(
+              outer_failure)
+          failure_message_dict[outer_failure.failure_id] = outer_failure
+
+        outer_failure.inner_failures.append(failure)
+        del failure_message_dict[failure.failure_id]
+
+    return list(failure_message_dict.values())
+
+  @classmethod
+  def ConstructStageFailureMessages(cls, stage_failures):
+    """Construct stage failure messages from failure entries from CIDB.
+
+    Args:
+      stage_failures: A list of StageFailure instances.
+
+    Returns:
+      A list of stage failure message instances of StageFailureMessage class
+      (or its sub-class). See return type of ReconstructMessages().
+    """
+    failure_messages = [cls.CreateMessage(f) for f in stage_failures]
+
+    return cls.ReconstructMessages(failure_messages)
diff --git a/utils/frozen_chromite/lib/failures_lib.py b/utils/frozen_chromite/lib/failures_lib.py
new file mode 100644
index 0000000..2db1c1c
--- /dev/null
+++ b/utils/frozen_chromite/lib/failures_lib.py
@@ -0,0 +1,481 @@
+# -*- coding: utf-8 -*-
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Classes of failure types."""
+
+from __future__ import print_function
+
+import collections
+import json
+import sys
+import traceback
+
+from autotest_lib.utils.frozen_chromite.lib import constants
+from autotest_lib.utils.frozen_chromite.lib import cros_build_lib
+from autotest_lib.utils.frozen_chromite.lib import failure_message_lib
+from autotest_lib.utils.frozen_chromite.lib import metrics
+
+
+class StepFailure(Exception):
+  """StepFailure exceptions indicate that a cbuildbot step failed.
+
+  Exceptions that derive from StepFailure should meet the following
+  criteria:
+    1) The failure indicates that a cbuildbot step failed.
+    2) The necessary information to debug the problem has already been
+       printed in the logs for the stage that failed.
+    3) __str__() should be brief enough to include in a Commit Queue
+       failure message.
+  """
+
+  # The constants.EXCEPTION_CATEGORY_ALL_CATEGORIES values that this exception
+  # maps to. Subclasses should redefine this class constant to map to a
+  # different category.
+  EXCEPTION_CATEGORY = constants.EXCEPTION_CATEGORY_UNKNOWN
+
+  def EncodeExtraInfo(self):
+    """Encode extra_info into a json string, can be overwritten by subclasses"""
+
+  def ConvertToStageFailureMessage(self, build_stage_id, stage_name,
+                                   stage_prefix_name=None):
+    """Convert StepFailure to StageFailureMessage.
+
+    Args:
+      build_stage_id: The id of the build stage.
+      stage_name: The name (string) of the failed stage.
+      stage_prefix_name: The prefix name (string) of the failed stage,
+          default to None.
+
+    Returns:
+      An instance of failure_message_lib.StageFailureMessage.
+    """
+    stage_failure = failure_message_lib.StageFailure(
+        None, build_stage_id, None, self.__class__.__name__, str(self),
+        self.EXCEPTION_CATEGORY, self.EncodeExtraInfo(), None, stage_name,
+        None, None, None, None, None, None, None, None, None, None)
+    return failure_message_lib.StageFailureMessage(
+        stage_failure, stage_prefix_name=stage_prefix_name)
+
+
+# A namedtuple to hold information of an exception.
+ExceptInfo = collections.namedtuple(
+    'ExceptInfo', ['type', 'str', 'traceback'])
+
+
+def CreateExceptInfo(exception, tb):
+  """Creates a list of ExceptInfo objects from |exception| and |tb|.
+
+  Creates an ExceptInfo object from |exception| and |tb|. If
+  |exception| is a CompoundFailure with non-empty list of exc_infos,
+  simly returns exception.exc_infos. Note that we do not preserve type
+  of |exception| in this case.
+
+  Args:
+    exception: The exception.
+    tb: The textual traceback.
+
+  Returns:
+    A list of ExceptInfo objects.
+  """
+  if isinstance(exception, CompoundFailure) and exception.exc_infos:
+    return exception.exc_infos
+
+  return [ExceptInfo(exception.__class__, str(exception), tb)]
+
+
+class CompoundFailure(StepFailure):
+  """An exception that contains a list of ExceptInfo objects."""
+
+  def __init__(self, message='', exc_infos=None):
+    """Initializes an CompoundFailure instance.
+
+    Args:
+      message: A string describing the failure.
+      exc_infos: A list of ExceptInfo objects.
+    """
+    self.exc_infos = exc_infos if exc_infos else []
+    if not message:
+      # By default, print all stored ExceptInfo objects. This is the
+      # preferred behavior because we'd always have the full
+      # tracebacks to debug the failure.
+      message = '\n'.join('{e.type}: {e.str}\n{e.traceback}'.format(e=ex)
+                          for ex in self.exc_infos)
+    self.msg = message
+
+    super(CompoundFailure, self).__init__(message)
+
+  def ToSummaryString(self):
+    """Returns a string with type and string of each ExceptInfo object.
+
+    This does not include the textual tracebacks on purpose, so the
+    message is more readable on the waterfall.
+    """
+    if self.HasEmptyList():
+      # Fall back to return self.message if list is empty.
+      return self.msg
+    else:
+      return '\n'.join(['%s: %s' % (e.type, e.str) for e in self.exc_infos])
+
+  def HasEmptyList(self):
+    """Returns True if self.exc_infos is empty."""
+    return not bool(self.exc_infos)
+
+  def HasFailureType(self, cls):
+    """Returns True if any of the failures matches |cls|."""
+    return any(issubclass(x.type, cls) for x in self.exc_infos)
+
+  def MatchesFailureType(self, cls):
+    """Returns True if all failures matches |cls|."""
+    return (not self.HasEmptyList() and
+            all(issubclass(x.type, cls) for x in self.exc_infos))
+
+  def HasFatalFailure(self, whitelist=None):
+    """Determine if there are non-whitlisted failures.
+
+    Args:
+      whitelist: A list of whitelisted exception types.
+
+    Returns:
+      Returns True if any failure is not in |whitelist|.
+    """
+    if not whitelist:
+      return not self.HasEmptyList()
+
+    for ex in self.exc_infos:
+      if all(not issubclass(ex.type, cls) for cls in whitelist):
+        return True
+
+    return False
+
+  def ConvertToStageFailureMessage(self, build_stage_id, stage_name,
+                                   stage_prefix_name=None):
+    """Convert CompoundFailure to StageFailureMessage.
+
+    Args:
+      build_stage_id: The id of the build stage.
+      stage_name: The name (string) of the failed stage.
+      stage_prefix_name: The prefix name (string) of the failed stage,
+          default to None.
+
+    Returns:
+      An instance of failure_message_lib.StageFailureMessage.
+    """
+    stage_failure = failure_message_lib.StageFailure(
+        None, build_stage_id, None, self.__class__.__name__, str(self),
+        self.EXCEPTION_CATEGORY, self.EncodeExtraInfo(), None, stage_name,
+        None, None, None, None, None, None, None, None, None, None)
+    compound_failure_message = failure_message_lib.CompoundFailureMessage(
+        stage_failure, stage_prefix_name=stage_prefix_name)
+
+    for exc_class, exc_str, _ in self.exc_infos:
+      inner_failure = failure_message_lib.StageFailure(
+          None, build_stage_id, None, exc_class.__name__, exc_str,
+          _GetExceptionCategory(exc_class), None, None, stage_name,
+          None, None, None, None, None, None, None, None, None, None)
+      innner_failure_message = failure_message_lib.StageFailureMessage(
+          inner_failure, stage_prefix_name=stage_prefix_name)
+      compound_failure_message.inner_failures.append(innner_failure_message)
+
+    return compound_failure_message
+
+
+class ExitEarlyException(Exception):
+  """Exception when a stage finishes and exits early."""
+
+# ExitEarlyException is to simulate sys.exit(0), and SystemExit derives
+# from BaseException, so should not catch ExitEarlyException as Exception
+# and reset type to re-raise.
+EXCEPTIONS_TO_EXCLUDE = (ExitEarlyException,)
+
+class SetFailureType(object):
+  """A wrapper to re-raise the exception as the pre-set type."""
+
+  def __init__(self, category_exception, source_exception=None,
+               exclude_exceptions=EXCEPTIONS_TO_EXCLUDE):
+    """Initializes the decorator.
+
+    Args:
+      category_exception: The exception type to re-raise as. It must be
+        a subclass of CompoundFailure.
+      source_exception: The exception types to re-raise. By default, re-raise
+        all Exception classes.
+      exclude_exceptions: Do not set the type of the exception if it's subclass
+        of one exception in exclude_exceptions. Default to EXCLUSIVE_EXCEPTIONS.
+    """
+    assert issubclass(category_exception, CompoundFailure)
+    self.category_exception = category_exception
+    self.source_exception = source_exception
+    if self.source_exception is None:
+      self.source_exception = Exception
+    self.exclude_exceptions = exclude_exceptions
+
+  def __call__(self, functor):
+    """Returns a wrapped function."""
+    def wrapped_functor(*args, **kwargs):
+      try:
+        return functor(*args, **kwargs)
+      except self.source_exception:
+        # Get the information about the original exception.
+        exc_type, exc_value, _ = sys.exc_info()
+        exc_traceback = traceback.format_exc()
+        if self.exclude_exceptions is not None:
+          for exclude_exception in self.exclude_exceptions:
+            if issubclass(exc_type, exclude_exception):
+              raise
+        if issubclass(exc_type, self.category_exception):
+          # Do not re-raise if the exception is a subclass of the set
+          # exception type because it offers more information.
+          raise
+        else:
+          exc_infos = CreateExceptInfo(exc_value, exc_traceback)
+          raise self.category_exception(exc_infos=exc_infos)
+
+    return wrapped_functor
+
+
+class RetriableStepFailure(StepFailure):
+  """This exception is thrown when a step failed, but should be retried."""
+
+
+# TODO(nxia): Everytime the class name is changed, add the new class name to
+# BUILD_SCRIPT_FAILURE_TYPES.
+class BuildScriptFailure(StepFailure):
+  """This exception is thrown when a build command failed.
+
+  It is intended to provide a shorter summary of what command failed,
+  for usage in failure messages from the Commit Queue, so as to ensure
+  that developers aren't spammed with giant error messages when common
+  commands (e.g. build_packages) fail.
+  """
+
+  EXCEPTION_CATEGORY = constants.EXCEPTION_CATEGORY_BUILD
+
+  def __init__(self, exception, shortname):
+    """Construct a BuildScriptFailure object.
+
+    Args:
+      exception: A RunCommandError object.
+      shortname: Short name for the command we're running.
+    """
+    StepFailure.__init__(self)
+    assert isinstance(exception, cros_build_lib.RunCommandError)
+    self.exception = exception
+    self.shortname = shortname
+    self.args = (exception, shortname)
+
+  def __str__(self):
+    """Summarize a build command failure briefly."""
+    result = self.exception.result
+    if result.returncode:
+      return '%s failed (code=%s)' % (self.shortname, result.returncode)
+    else:
+      return self.exception.msg
+
+  def EncodeExtraInfo(self):
+    """Encode extra_info into a json string.
+
+    Returns:
+      A json string containing shortname.
+    """
+    extra_info_dict = {
+        'shortname': self.shortname,
+    }
+    return json.dumps(extra_info_dict)
+
+
+# TODO(nxia): Everytime the class name is changed, add the new class name to
+# PACKAGE_BUILD_FAILURE_TYPES
+class PackageBuildFailure(BuildScriptFailure):
+  """This exception is thrown when packages fail to build."""
+
+  def __init__(self, exception, shortname, failed_packages):
+    """Construct a PackageBuildFailure object.
+
+    Args:
+      exception: The underlying exception.
+      shortname: Short name for the command we're running.
+      failed_packages: List of packages that failed to build.
+    """
+    BuildScriptFailure.__init__(self, exception, shortname)
+    self.failed_packages = set(failed_packages)
+    self.args = (exception, shortname, failed_packages)
+
+  def __str__(self):
+    return ('Packages failed in %s: %s'
+            % (self.shortname, ' '.join(sorted(self.failed_packages))))
+
+  def EncodeExtraInfo(self):
+    """Encode extra_info into a json string.
+
+    Returns:
+      A json string containing shortname and failed_packages.
+    """
+    extra_info_dict = {
+        'shortname': self.shortname,
+        'failed_packages': list(self.failed_packages)
+    }
+    return json.dumps(extra_info_dict)
+
+  def BuildCompileFailureOutputJson(self):
+    """Build proto BuildCompileFailureOutput compatible JSON output.
+
+    Returns:
+      A json string with BuildCompileFailureOutput proto as json.
+    """
+    failures = []
+    for pkg in self.failed_packages:
+      failures.append({'rule': 'emerge', 'output_targets': pkg})
+    wrapper = {'failures': failures}
+    return json.dumps(wrapper, indent=2)
+
+class InfrastructureFailure(CompoundFailure):
+  """Raised if a stage fails due to infrastructure issues."""
+
+  EXCEPTION_CATEGORY = constants.EXCEPTION_CATEGORY_INFRA
+
+
+# ChromeOS Test Lab failures.
+class TestLabFailure(InfrastructureFailure):
+  """Raised if a stage fails due to hardware lab infrastructure issues."""
+
+  EXCEPTION_CATEGORY = constants.EXCEPTION_CATEGORY_LAB
+
+
+class SuiteTimedOut(TestLabFailure):
+  """Raised if a test suite timed out with no test failures."""
+
+
+class BoardNotAvailable(TestLabFailure):
+  """Raised if the board is not available in the lab."""
+
+
+class SwarmingProxyFailure(TestLabFailure):
+  """Raised when error related to swarming proxy occurs."""
+
+
+# Gerrit-on-Borg failures.
+class GoBFailure(InfrastructureFailure):
+  """Raised if a stage fails due to Gerrit-on-Borg (GoB) issues."""
+
+
+class GoBQueryFailure(GoBFailure):
+  """Raised if a stage fails due to Gerrit-on-Borg (GoB) query errors."""
+
+
+class GoBSubmitFailure(GoBFailure):
+  """Raised if a stage fails due to Gerrit-on-Borg (GoB) submission errors."""
+
+
+class GoBFetchFailure(GoBFailure):
+  """Raised if a stage fails due to Gerrit-on-Borg (GoB) fetch errors."""
+
+
+# Google Storage failures.
+class GSFailure(InfrastructureFailure):
+  """Raised if a stage fails due to Google Storage (GS) issues."""
+
+
+class GSUploadFailure(GSFailure):
+  """Raised if a stage fails due to Google Storage (GS) upload issues."""
+
+
+class GSDownloadFailure(GSFailure):
+  """Raised if a stage fails due to Google Storage (GS) download issues."""
+
+
+# Builder failures.
+class BuilderFailure(InfrastructureFailure):
+  """Raised if a stage fails due to builder issues."""
+
+
+class MasterSlaveVersionMismatchFailure(BuilderFailure):
+  """Raised if a slave build has a different full_version than its master."""
+
+# Crash collection service failures.
+class CrashCollectionFailure(InfrastructureFailure):
+  """Raised if a stage fails due to crash collection services."""
+
+
+class TestFailure(StepFailure):
+  """Raised if a test stage (e.g. VMTest) fails."""
+
+  EXCEPTION_CATEGORY = constants.EXCEPTION_CATEGORY_TEST
+
+
+class TestWarning(StepFailure):
+  """Raised if a test stage (e.g. VMTest) returns a warning code."""
+
+
+def ReportStageFailure(exception, metrics_fields=None):
+  """Reports stage failure to Mornach along with inner exceptions.
+
+  Args:
+    exception: The failure exception to report.
+    metrics_fields: (Optional) Fields for ts_mon metric.
+  """
+  _InsertFailureToMonarch(
+      exception_category=_GetExceptionCategory(type(exception)),
+      metrics_fields=metrics_fields)
+
+  # This assumes that CompoundFailure can't be nested.
+  if isinstance(exception, CompoundFailure):
+    for exc_class, _, _ in exception.exc_infos:
+      _InsertFailureToMonarch(
+          exception_category=_GetExceptionCategory(exc_class),
+          metrics_fields=metrics_fields)
+
+
+def _InsertFailureToMonarch(
+    exception_category=constants.EXCEPTION_CATEGORY_UNKNOWN,
+    metrics_fields=None):
+  """Report a single stage failure to Mornach if needed.
+
+  Args:
+    exception_category: (Optional) one of
+                        constants.EXCEPTION_CATEGORY_ALL_CATEGORIES,
+                        Default: 'unknown'.
+    metrics_fields: (Optional) Fields for ts_mon metric.
+  """
+  if (metrics_fields is not None and
+      exception_category != constants.EXCEPTION_CATEGORY_UNKNOWN):
+    counter = metrics.Counter(constants.MON_STAGE_FAILURE_COUNT)
+    metrics_fields['exception_category'] = exception_category
+    counter.increment(fields=metrics_fields)
+
+
+def GetStageFailureMessageFromException(stage_name, build_stage_id,
+                                        exception, stage_prefix_name=None):
+  """Get StageFailureMessage from an exception.
+
+  Args:
+    stage_name: The name (string) of the failed stage.
+    build_stage_id: The id of the failed build stage.
+    exception: The BaseException instance to convert to StageFailureMessage.
+    stage_prefix_name: The prefix name (string) of the failed stage,
+        default to None.
+
+  Returns:
+    An instance of failure_message_lib.StageFailureMessage.
+  """
+  if isinstance(exception, StepFailure):
+    return exception.ConvertToStageFailureMessage(
+        build_stage_id, stage_name, stage_prefix_name=stage_prefix_name)
+  else:
+    stage_failure = failure_message_lib.StageFailure(
+        None, build_stage_id, None, type(exception).__name__, str(exception),
+        _GetExceptionCategory(type(exception)), None, None, stage_name,
+        None, None, None, None, None, None, None, None, None, None)
+
+    return failure_message_lib.StageFailureMessage(
+        stage_failure, stage_prefix_name=stage_prefix_name)
+
+
+def _GetExceptionCategory(exception_class):
+  # Do not use try/catch. If a subclass of StepFailure does not have a valid
+  # EXCEPTION_CATEGORY, it is a programming error, not a runtime error.
+  if issubclass(exception_class, StepFailure):
+    return exception_class.EXCEPTION_CATEGORY
+  else:
+    return constants.EXCEPTION_CATEGORY_UNKNOWN
diff --git a/utils/frozen_chromite/lib/gce.py b/utils/frozen_chromite/lib/gce.py
new file mode 100644
index 0000000..f7bdac9
--- /dev/null
+++ b/utils/frozen_chromite/lib/gce.py
@@ -0,0 +1,714 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A convinient wrapper of the GCE python API.
+
+Public methods in class GceContext raise HttpError when the underlining call to
+Google API fails, or gce.Error on other failures.
+"""
+
+from __future__ import print_function
+
+from googleapiclient.discovery import build
+from googleapiclient.errors import HttpError
+from googleapiclient.http import HttpRequest
+import httplib2
+from oauth2client.client import GoogleCredentials
+
+from autotest_lib.utils.frozen_chromite.lib import cros_logging as logging
+from autotest_lib.utils.frozen_chromite.lib import timeout_util
+
+
+class Error(Exception):
+  """Base exception for this module."""
+
+
+class ResourceNotFoundError(Error):
+  """Exceptions raised when requested GCE resource was not found."""
+
+
+class RetryOnServerErrorHttpRequest(HttpRequest):
+  """A HttpRequest that will be retried on server errors automatically."""
+
+  def __init__(self, num_retries, *args, **kwargs):
+    """Constructor for RetryOnServerErrorHttpRequest."""
+    self.num_retries = num_retries
+    super(RetryOnServerErrorHttpRequest, self).__init__(*args, **kwargs)
+
+  def execute(self, http=None, num_retries=None):
+    """Excutes a RetryOnServerErrorHttpRequest.
+
+    HttpRequest.execute() has the option of automatically retrying on server
+    errors, i.e., 500 status codes. Call it with a non-zero value of
+    |num_retries| will cause failed requests to be retried.
+
+    Args:
+      http: The httplib2.http to send this request through.
+      num_retries: Number of retries. Class default value will be used if
+          omitted.
+
+    Returns:
+      A deserialized object model of the response body as determined
+          by the postproc. See HttpRequest.execute().
+    """
+    return super(RetryOnServerErrorHttpRequest, self).execute(
+        http=http, num_retries=num_retries or self.num_retries)
+
+
+def _GetMetdataValue(metadata, key):
+  """Finds a value corresponding to a given metadata key.
+
+  Args:
+    metadata: metadata object, i.e. a dict containing containing 'items'
+      - a list of key-value pairs.
+    key: name of the key.
+
+  Returns:
+    Corresponding value or None if it was not found.
+  """
+  for item in metadata['items']:
+    if item['key'] == key:
+      return item['value']
+  return None
+
+
+def _UpdateMetadataValue(metadata, key, value):
+  """Updates a single key-value pair in a metadata object.
+
+  Args:
+    metadata: metadata object, i.e. a dict containing containing 'items'
+      - a list of key-value pairs.
+    key: name of the key.
+    value: new value for the key, or None if it should be removed.
+  """
+  items = metadata.setdefault('items', [])
+  for item in items:
+    if item['key'] == key:
+      if value is None:
+        items.remove(item)
+      else:
+        item['value'] = value
+      return
+
+  if value is not None:
+    items.append({
+        'key': key,
+        'value': value,
+    })
+
+
+class GceContext(object):
+  """A convinient wrapper around the GCE Python API."""
+
+  # These constants are made public so that users can customize as they need.
+  DEFAULT_TIMEOUT_SEC = 5 * 60
+  INSTANCE_OPERATIONS_TIMEOUT_SEC = 10 * 60
+  IMAGE_OPERATIONS_TIMEOUT_SEC = 10 * 60
+
+  _GCE_SCOPES = (
+      'https://www.googleapis.com/auth/compute',  # CreateInstance, CreateImage
+      'https://www.googleapis.com/auth/devstorage.full_control', # CreateImage
+  )
+  _DEFAULT_NETWORK = 'default'
+  _DEFAULT_MACHINE_TYPE = 'n1-standard-8'
+
+  # Project default service account and scopes.
+  _DEFAULT_SERVICE_ACCOUNT_EMAIL = 'default'
+  # The list is in line with what the gcloud cli uses.
+  # https://cloud.google.com/sdk/gcloud/reference/compute/instances/create
+  _DEFAULT_INSTANCE_SCOPES = [
+      'https://www.googleapis.com/auth/cloud.useraccounts.readonly',
+      'https://www.googleapis.com/auth/devstorage.read_only',
+      'https://www.googleapis.com/auth/logging.write',
+  ]
+
+  # This is made public to allow easy customization of the retry behavior.
+  RETRIES = 2
+
+  def __init__(self, project, zone, credentials, thread_safe=False):
+    """Initializes GceContext.
+
+    Args:
+      project: The GCP project to create instances in.
+      zone: The default zone to create instances in.
+      credentials: The credentials used to call the GCE API.
+      thread_safe: Whether the client is expected to be thread safe.
+    """
+    self.project = project
+    self.zone = zone
+
+    def _BuildRequest(http, *args, **kwargs):
+      """Custom request builder."""
+      return self._BuildRetriableRequest(self.RETRIES, http, thread_safe,
+                                         credentials, *args, **kwargs)
+
+    self.gce_client = build('compute', 'v1', credentials=credentials,
+                            requestBuilder=_BuildRequest)
+
+    self.region = self.GetZoneRegion(zone)
+
+  @classmethod
+  def ForServiceAccount(cls, project, zone, json_key_file):
+    """Creates a GceContext using service account credentials.
+
+    About service account:
+    https://developers.google.com/api-client-library/python/auth/service-accounts
+
+    Args:
+      project: The GCP project to create images and instances in.
+      zone: The default zone to create instances in.
+      json_key_file: Path to the service account JSON key.
+
+    Returns:
+      GceContext.
+    """
+    credentials = GoogleCredentials.from_stream(json_key_file).create_scoped(
+        cls._GCE_SCOPES)
+    return GceContext(project, zone, credentials)
+
+  @classmethod
+  def ForServiceAccountThreadSafe(cls, project, zone, json_key_file):
+    """Creates a thread-safe GceContext using service account credentials.
+
+    About service account:
+    https://developers.google.com/api-client-library/python/auth/service-accounts
+
+    Args:
+      project: The GCP project to create images and instances in.
+      zone: The default zone to create instances in.
+      json_key_file: Path to the service account JSON key.
+
+    Returns:
+      GceContext.
+    """
+    credentials = GoogleCredentials.from_stream(json_key_file).create_scoped(
+        cls._GCE_SCOPES)
+    return GceContext(project, zone, credentials, thread_safe=True)
+
+  def CreateAddress(self, name, region=None):
+    """Reserves an external IP address.
+
+    Args:
+      name: The name to assign to the address.
+      region: Region to reserved the address in.
+
+    Returns:
+      The reserved address as a string.
+    """
+    body = {
+        'name': name,
+    }
+    operation = self.gce_client.addresses().insert(
+        project=self.project,
+        region=region or self.region,
+        body=body).execute()
+    self._WaitForRegionOperation(
+        operation['name'], region,
+        timeout_sec=self.INSTANCE_OPERATIONS_TIMEOUT_SEC)
+
+    address = self.gce_client.addresses().get(
+        project=self.project,
+        region=region or self.region,
+        address=name).execute()
+
+    return address['address']
+
+  def DeleteAddress(self, name, region=None):
+    """Frees up an external IP address.
+
+    Args:
+      name: The name of the address.
+      region: Region of the address.
+    """
+    operation = self.gce_client.addresses().delete(
+        project=self.project,
+        region=region or self.region,
+        address=name).execute()
+    self._WaitForRegionOperation(
+        operation['name'], region=region or self.region,
+        timeout_sec=self.INSTANCE_OPERATIONS_TIMEOUT_SEC)
+
+  def GetZoneRegion(self, zone=None):
+    """Resolves name of the region that a zone belongs to.
+
+    Args:
+      zone: The zone to resolve.
+
+    Returns:
+      Name of the region corresponding to the zone.
+    """
+    zone_resource = self.gce_client.zones().get(
+        project=self.project,
+        zone=zone or self.zone).execute()
+    return zone_resource['region'].split('/')[-1]
+
+  def CreateInstance(self, name, image, zone=None, network=None, subnet=None,
+                     machine_type=None, default_scopes=True,
+                     static_address=None, **kwargs):
+    """Creates an instance with the given image and waits until it's ready.
+
+    Args:
+      name: Instance name.
+      image: Fully spelled URL of the image, e.g., for private images,
+          'global/images/my-private-image', or for images from a
+          publicly-available project,
+          'projects/debian-cloud/global/images/debian-7-wheezy-vYYYYMMDD'.
+          Details:
+          https://cloud.google.com/compute/docs/reference/latest/instances/insert
+      zone: The zone to create the instance in. Default zone will be used if
+          omitted.
+      network: An existing network to create the instance in. Default network
+          will be used if omitted.
+      subnet: The subnet to create the instance in.
+      machine_type: The machine type to use. Default machine type will be used
+          if omitted.
+      default_scopes: If true, the default scopes are added to the instances.
+      static_address: External IP address to assign to the instance as a string.
+          If None an emphemeral address will be used.
+      kwargs: Other possible Instance Resource properties.
+          https://cloud.google.com/compute/docs/reference/latest/instances#resource
+          Note that values from kwargs will overrule properties constructed from
+          positinal arguments, i.e., name, image, zone, network and
+          machine_type.
+
+    Returns:
+      URL to the created instance.
+    """
+    logging.info('Creating instance "%s" with image "%s" ...', name, image)
+    network = 'global/networks/%s' % network or self._DEFAULT_NETWORK
+    machine_type = 'zones/%s/machineTypes/%s' % (
+        zone or self.zone, machine_type or self._DEFAULT_MACHINE_TYPE)
+    service_accounts = (
+        {
+            'email': self._DEFAULT_SERVICE_ACCOUNT_EMAIL,
+            'scopes': self._DEFAULT_INSTANCE_SCOPES,
+        },
+    ) if default_scopes else ()
+
+    config = {
+        'name': name,
+        'machineType': machine_type,
+        'disks': (
+            {
+                'boot': True,
+                'autoDelete': True,
+                'initializeParams': {
+                    'sourceImage': image,
+                },
+            },
+        ),
+        'networkInterfaces': (
+            {
+                'network': network,
+                'accessConfigs': (
+                    {
+                        'type': 'ONE_TO_ONE_NAT',
+                        'name': 'External NAT',
+                    },
+                ),
+            },
+        ),
+        'serviceAccounts' : service_accounts,
+    }
+    config.update(**kwargs)
+    if static_address is not None:
+      config['networkInterfaces'][0]['accessConfigs'][0]['natIP'] = (
+          static_address)
+    if subnet is not None:
+      region = self.GetZoneRegion(zone)
+      config['networkInterfaces'][0]['subnetwork'] = (
+          'regions/%s/subnetworks/%s' % (region, subnet)
+      )
+    operation = self.gce_client.instances().insert(
+        project=self.project,
+        zone=zone or self.zone,
+        body=config).execute()
+    self._WaitForZoneOperation(
+        operation['name'],
+        timeout_sec=self.INSTANCE_OPERATIONS_TIMEOUT_SEC,
+        timeout_handler=lambda: self.DeleteInstance(name))
+    return operation['targetLink']
+
+  def DeleteInstance(self, name, zone=None):
+    """Deletes an instance with the name and waits until it's done.
+
+    Args:
+      name: Name of the instance to delete.
+      zone: Zone where the instance is in. Default zone will be used if omitted.
+    """
+    logging.info('Deleting instance "%s" ...', name)
+    operation = self.gce_client.instances().delete(
+        project=self.project,
+        zone=zone or self.zone,
+        instance=name).execute()
+    self._WaitForZoneOperation(
+        operation['name'], timeout_sec=self.INSTANCE_OPERATIONS_TIMEOUT_SEC)
+
+  def StartInstance(self, name, zone=None):
+    """Starts an instance with the name and waits until it's done.
+
+    Args:
+      name: Name of the instance to start.
+      zone: Zone where the instance is in. Default zone will be used if omitted.
+    """
+    logging.info('Starting instance "%s" ...', name)
+    operation = self.gce_client.instances().start(
+        project=self.project,
+        zone=zone or self.zone,
+        instance=name).execute()
+    self._WaitForZoneOperation(
+        operation['name'], timeout_sec=self.INSTANCE_OPERATIONS_TIMEOUT_SEC)
+
+  def StopInstance(self, name, zone=None):
+    """Stops an instance with the name and waits until it's done.
+
+    Args:
+      name: Name of the instance to stop.
+      zone: Zone where the instance is in. Default zone will be used if omitted.
+    """
+    logging.info('Stopping instance "%s" ...', name)
+    operation = self.gce_client.instances().stop(
+        project=self.project,
+        zone=zone or self.zone,
+        instance=name).execute()
+    self._WaitForZoneOperation(
+        operation['name'], timeout_sec=self.INSTANCE_OPERATIONS_TIMEOUT_SEC)
+
+  def CreateImage(self, name, source):
+    """Creates an image with the given |source|.
+
+    Args:
+      name: Name of the image to be created.
+      source:
+        Google Cloud Storage object of the source disk, e.g.,
+        'https://storage.googleapis.com/my-gcs-bucket/test_image.tar.gz'.
+
+    Returns:
+      URL to the created image.
+    """
+    logging.info('Creating image "%s" with "source" %s ...', name, source)
+    config = {
+        'name': name,
+        'rawDisk': {
+            'source': source,
+        },
+    }
+    operation = self.gce_client.images().insert(
+        project=self.project,
+        body=config).execute()
+    self._WaitForGlobalOperation(operation['name'],
+                                 timeout_sec=self.IMAGE_OPERATIONS_TIMEOUT_SEC,
+                                 timeout_handler=lambda: self.DeleteImage(name))
+    return operation['targetLink']
+
+  def DeleteImage(self, name):
+    """Deletes an image and waits until it's deleted.
+
+    Args:
+      name: Name of the image to delete.
+    """
+    logging.info('Deleting image "%s" ...', name)
+    operation = self.gce_client.images().delete(
+        project=self.project,
+        image=name).execute()
+    self._WaitForGlobalOperation(operation['name'],
+                                 timeout_sec=self.IMAGE_OPERATIONS_TIMEOUT_SEC)
+
+  def ListInstances(self, zone=None):
+    """Lists all instances.
+
+    Args:
+      zone: Zone where the instances are in. Default zone will be used if
+            omitted.
+
+    Returns:
+      A list of Instance Resources if found, or an empty list otherwise.
+    """
+    result = self.gce_client.instances().list(project=self.project,
+                                              zone=zone or self.zone).execute()
+    return result.get('items', [])
+
+  def ListImages(self):
+    """Lists all images.
+
+    Returns:
+      A list of Image Resources if found, or an empty list otherwise.
+    """
+    result = self.gce_client.images().list(project=self.project).execute()
+    return result.get('items', [])
+
+  def GetInstance(self, instance, zone=None):
+    """Gets an Instance Resource by name and zone.
+
+    Args:
+      instance: Name of the instance.
+      zone: Zone where the instance is in. Default zone will be used if omitted.
+
+    Returns:
+      An Instance Resource.
+
+    Raises:
+      ResourceNotFoundError if instance was not found, or HttpError on other
+      HTTP failures.
+    """
+    try:
+      return self.gce_client.instances().get(project=self.project,
+                                             zone=zone or self.zone,
+                                             instance=instance).execute()
+    except HttpError as e:
+      if e.resp.status == 404:
+        raise ResourceNotFoundError(
+            'Instance "%s" for project "%s" in zone "%s" was not found.' %
+            (instance, self.project, zone or self.zone))
+      else:
+        raise
+
+  def GetInstanceIP(self, instance, zone=None):
+    """Gets the external IP of an instance.
+
+    Args:
+      instance: Name of the instance to get IP for.
+      zone: Zone where the instance is in. Default zone will be used if omitted.
+
+    Returns:
+      External IP address of the instance.
+
+    Raises:
+      Error: Something went wrong when trying to get IP for the instance.
+    """
+    result = self.GetInstance(instance, zone)
+    try:
+      return result['networkInterfaces'][0]['accessConfigs'][0]['natIP']
+    except (KeyError, IndexError):
+      raise Error('Failed to get IP address for instance %s' % instance)
+
+  def GetInstanceInternalIP(self, instance, zone=None):
+    """Gets the internal IP of an instance."""
+    result = self.GetInstance(instance, zone)
+    try:
+      return result['networkInterfaces'][0]['networkIP']
+    except (KeyError, IndexError):
+      raise Error('Failed to get internal IP for instance %s' % instance)
+
+  def GetImage(self, image):
+    """Gets an Image Resource by name.
+
+    Args:
+      image: Name of the image to look for.
+
+    Returns:
+      An Image Resource.
+
+    Raises:
+      ResourceNotFoundError: The requested image was not found.
+    """
+    try:
+      return self.gce_client.images().get(project=self.project,
+                                          image=image).execute()
+    except HttpError as e:
+      if e.resp.status == 404:
+        raise ResourceNotFoundError('Image "%s" for project "%s" was not found.'
+                                    % (image, self.project))
+      else:
+        raise
+
+  def InstanceExists(self, instance, zone=None):
+    """Checks if an instance exists in the current project.
+
+    Args:
+      instance: Name of the instance to check existence of.
+      zone: Zone where the instance is in. Default zone will be used if omitted.
+
+    Returns:
+      True if the instance exists or False otherwise.
+    """
+    try:
+      return self.GetInstance(instance, zone) is not None
+    except ResourceNotFoundError:
+      return False
+
+  def ImageExists(self, image):
+    """Checks if an image exists in the current project.
+
+    Args:
+      image: Name of the image to check existence of.
+
+    Returns:
+      True if the instance exists or False otherwise.
+    """
+    try:
+      return self.GetImage(image) is not None
+    except ResourceNotFoundError:
+      return False
+
+  def GetCommonInstanceMetadata(self, key):
+    """Looks up a single project metadata value.
+
+    Args:
+      key: Metadata key name.
+
+    Returns:
+      Metadata value corresponding to the key, or None if it was not found.
+    """
+    projects_data = self.gce_client.projects().get(
+        project=self.project).execute()
+    metadata = projects_data['commonInstanceMetadata']
+    return _GetMetdataValue(metadata, key)
+
+  def SetCommonInstanceMetadata(self, key, value):
+    """Sets a single project metadata value.
+
+    Args:
+      key: Metadata key to be set.
+      value: New value, or None if the given key should be removed.
+    """
+    projects_data = self.gce_client.projects().get(
+        project=self.project).execute()
+    metadata = projects_data['commonInstanceMetadata']
+    _UpdateMetadataValue(metadata, key, value)
+    operation = self.gce_client.projects().setCommonInstanceMetadata(
+        project=self.project,
+        body=metadata).execute()
+    self._WaitForGlobalOperation(operation['name'])
+
+  def GetInstanceMetadata(self, instance, key):
+    """Looks up instance's metadata value.
+
+    Args:
+      instance: Name of the instance.
+      key: Metadata key name.
+
+    Returns:
+      Metadata value corresponding to the key, or None if it was not found.
+    """
+    instance_data = self.GetInstance(instance)
+    metadata = instance_data['metadata']
+    return self._GetMetdataValue(metadata, key)
+
+  def SetInstanceMetadata(self, instance, key, value):
+    """Sets a single instance metadata value.
+
+    Args:
+      instance: Name of the instance.
+      key: Metadata key to be set.
+      value: New value, or None if the given key should be removed.
+    """
+    instance_data = self.GetInstance(instance)
+    metadata = instance_data['metadata']
+    _UpdateMetadataValue(metadata, key, value)
+    operation = self.gce_client.instances().setMetadata(
+        project=self.project,
+        zone=self.zone,
+        instance=instance,
+        body=metadata).execute()
+    self._WaitForZoneOperation(operation['name'])
+
+  def _WaitForZoneOperation(self, operation, zone=None, timeout_sec=None,
+                            timeout_handler=None):
+    """Waits until a GCE ZoneOperation is finished or timed out.
+
+    Args:
+      operation: The GCE operation to wait for.
+      zone: The zone that |operation| belongs to.
+      timeout_sec: The maximum number of seconds to wait for.
+      timeout_handler: A callable to be executed when timeout happens.
+
+    Raises:
+      Error when timeout happens or the operation fails.
+    """
+    get_request = self.gce_client.zoneOperations().get(
+        project=self.project, zone=zone or self.zone, operation=operation)
+    self._WaitForOperation(operation, get_request, timeout_sec,
+                           timeout_handler=timeout_handler)
+
+  def _WaitForRegionOperation(self, operation, region, timeout_sec=None,
+                              timeout_handler=None):
+    """Waits until a GCE RegionOperation is finished or timed out.
+
+    Args:
+      operation: The GCE operation to wait for.
+      region: The region that |operation| belongs to.
+      timeout_sec: The maximum number of seconds to wait for.
+      timeout_handler: A callable to be executed when timeout happens.
+
+    Raises:
+      Error when timeout happens or the operation fails.
+    """
+    get_request = self.gce_client.regionOperations().get(
+        project=self.project, region=region or self.region, operation=operation)
+    self._WaitForOperation(operation, get_request, timeout_sec,
+                           timeout_handler=timeout_handler)
+
+  def _WaitForGlobalOperation(self, operation, timeout_sec=None,
+                              timeout_handler=None):
+    """Waits until a GCE GlobalOperation is finished or timed out.
+
+    Args:
+      operation: The GCE operation to wait for.
+      timeout_sec: The maximum number of seconds to wait for.
+      timeout_handler: A callable to be executed when timeout happens.
+
+    Raises:
+      Error when timeout happens or the operation fails.
+    """
+    get_request = self.gce_client.globalOperations().get(project=self.project,
+                                                         operation=operation)
+    self._WaitForOperation(operation, get_request, timeout_sec=timeout_sec,
+                           timeout_handler=timeout_handler)
+
+  def _WaitForOperation(self, operation, get_operation_request,
+                        timeout_sec=None, timeout_handler=None):
+    """Waits until timeout or the request gets a response with a 'DONE' status.
+
+    Args:
+      operation: The GCE operation to wait for.
+      get_operation_request:
+        The HTTP request to get the operation's status.
+        This request will be executed periodically until it returns a status
+        'DONE'.
+      timeout_sec: The maximum number of seconds to wait for.
+      timeout_handler: A callable to be executed when times out.
+
+    Raises:
+      Error when timeout happens or the operation fails.
+    """
+    def _IsDone():
+      result = get_operation_request.execute()
+      if result['status'] == 'DONE':
+        if 'error' in result:
+          raise Error(result['error'])
+        return True
+      return False
+
+    try:
+      timeout = timeout_sec or self.DEFAULT_TIMEOUT_SEC
+      logging.info('Waiting up to %d seconds for operation [%s] to complete...',
+                   timeout, operation)
+      timeout_util.WaitForReturnTrue(_IsDone, timeout, period=1)
+    except timeout_util.TimeoutError:
+      if timeout_handler:
+        timeout_handler()
+      raise Error('Timeout wating for operation [%s] to complete' % operation)
+
+  def _BuildRetriableRequest(self, num_retries, http, thread_safe=False,
+                             credentials=None, *args, **kwargs):
+    """Builds a request that will be automatically retried on server errors.
+
+    Args:
+      num_retries: The maximum number of times to retry until give up.
+      http: An httplib2.Http object that this request will be executed through.
+      thread_safe: Whether or not the request needs to be thread-safe.
+      credentials: Credentials to apply to the request.
+      *args: Optional positional arguments.
+      **kwargs: Optional keyword arguments.
+
+    Returns:
+      RetryOnServerErrorHttpRequest: A request that will automatically retried
+          on server errors.
+    """
+    if thread_safe:
+      # Create a new http object for every request.
+      http = credentials.authorize(httplib2.Http())
+    return RetryOnServerErrorHttpRequest(num_retries, http, *args, **kwargs)
diff --git a/utils/frozen_chromite/lib/git.py b/utils/frozen_chromite/lib/git.py
new file mode 100644
index 0000000..9e578ec
--- /dev/null
+++ b/utils/frozen_chromite/lib/git.py
@@ -0,0 +1,1567 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Common functions for interacting with git and repo."""
+
+from __future__ import print_function
+
+import collections
+import datetime
+import errno
+import fnmatch
+import hashlib
+import os
+import re
+import string
+import subprocess
+from xml import sax
+
+import six
+
+from autotest_lib.utils.frozen_chromite.lib import config_lib
+from autotest_lib.utils.frozen_chromite.lib import constants
+from autotest_lib.utils.frozen_chromite.lib import cros_build_lib
+from autotest_lib.utils.frozen_chromite.lib import cros_logging as logging
+from autotest_lib.utils.frozen_chromite.lib import osutils
+
+
+class GitException(Exception):
+  """An exception related to git."""
+
+
+# remote: git remote name (e.g., 'origin',
+#   'https://chromium.googlesource.com/chromiumos/chromite.git', etc.).
+# ref: git remote/local ref name (e.g., 'refs/heads/master').
+# project_name: git project name (e.g., 'chromiumos/chromite'.)
+_RemoteRef = collections.namedtuple(
+    '_RemoteRef', ('remote', 'ref', 'project_name'))
+
+
+class RemoteRef(_RemoteRef):
+  """Object representing a remote ref."""
+
+  def __new__(cls, remote, ref, project_name=None):
+    return super(RemoteRef, cls).__new__(cls, remote, ref, project_name)
+
+
+def FindRepoDir(path):
+  """Returns the nearest higher-level repo dir from the specified path.
+
+  Args:
+    path: The path to use. Defaults to cwd.
+  """
+  return osutils.FindInPathParents(
+      '.repo', path, test_func=os.path.isdir)
+
+
+def FindRepoCheckoutRoot(path):
+  """Get the root of your repo managed checkout."""
+  repo_dir = FindRepoDir(path)
+  if repo_dir:
+    return os.path.dirname(repo_dir)
+  else:
+    return None
+
+
+def IsSubmoduleCheckoutRoot(path, remote, url):
+  """Tests to see if a directory is the root of a git submodule checkout.
+
+  Args:
+    path: The directory to test.
+    remote: The remote to compare the |url| with.
+    url: The exact URL the |remote| needs to be pointed at.
+  """
+  if os.path.isdir(path):
+    remote_url = cros_build_lib.run(
+        ['git', '--git-dir', path, 'config', 'remote.%s.url' % remote],
+        stdout=True, debug_level=logging.DEBUG,
+        check=False, encoding='utf-8').output.strip()
+    if remote_url == url:
+      return True
+  return False
+
+
+def GetGitGitdir(pwd):
+  """Probes for a git gitdir directory rooted at a directory.
+
+  Args:
+    pwd: Directory to probe. If a checkout, should be the root.
+
+  Returns:
+    Path of the gitdir directory. None if the directory is not a git repo.
+  """
+  if os.path.isdir(os.path.join(pwd, '.git')):
+    return os.path.join(pwd, '.git')
+  # Is this directory a bare repo with no checkout?
+  if os.path.isdir(os.path.join(
+      pwd, 'objects')) and os.path.isdir(os.path.join(pwd, 'refs')):
+    return pwd
+  return None
+
+
+def IsGitRepositoryCorrupted(cwd):
+  """Verify that the specified git repository is not corrupted.
+
+  Args:
+    cwd: The git repository to verify.
+
+  Returns:
+    True if the repository is corrupted.
+  """
+  cmd = ['fsck', '--no-progress', '--no-dangling']
+  try:
+    GarbageCollection(cwd)
+    RunGit(cwd, cmd)
+    return False
+  except cros_build_lib.RunCommandError as ex:
+    logging.warning(str(ex))
+    return True
+
+
+_HEX_CHARS = frozenset(string.hexdigits)
+
+
+def IsSHA1(value, full=True):
+  """Returns True if the given value looks like a sha1.
+
+  If full is True, then it must be full length- 40 chars.  If False, >=6, and
+  <40.
+  """
+  if not all(x in _HEX_CHARS for x in value):
+    return False
+  l = len(value)
+  if full:
+    return l == 40
+  return l >= 6 and l <= 40
+
+
+def IsRefsTags(value):
+  """Return True if the given value looks like a tag.
+
+  Currently this is identified via refs/tags/ prefixing.
+  """
+  return value.startswith('refs/tags/')
+
+
+def GetGitRepoRevision(cwd, branch='HEAD', short=False):
+  """Find the revision of a branch.
+
+  Args:
+    cwd: The git repository to work with.
+    branch: Branch name. Defaults to current branch.
+    short: If set, output shorter unique SHA-1.
+
+  Returns:
+    Revision SHA-1.
+  """
+  cmd = ['rev-parse', branch]
+  if short:
+    cmd.insert(1, '--short')
+  return RunGit(cwd, cmd).output.strip()
+
+
+def IsReachable(cwd, to_ref, from_ref):
+  """Determine whether one commit ref is reachable from another.
+
+  Args:
+    cwd: The git repository to work with.
+    to_ref: The commit ref that may be reachable.
+    from_ref: The commit ref that |to_ref| may be reachable from.
+
+  Returns:
+    True if |to_ref| is reachable from |from_ref|.
+
+  Raises:
+    RunCommandError: if some error occurs, such as a commit ref not existing.
+  """
+  try:
+    RunGit(cwd, ['merge-base', '--is-ancestor', to_ref, from_ref])
+  except cros_build_lib.RunCommandError as e:
+    if e.result.returncode == 1:
+      return False
+    raise
+  return True
+
+
+def DoesCommitExistInRepo(cwd, commit):
+  """Determine whether a commit (SHA1 or ref) exists in a repo.
+
+  Args:
+    cwd: A directory within the project repo.
+    commit: The commit to look for. This can be a SHA1 or it can be a ref.
+
+  Returns:
+    True if the commit exists in the repo.
+  """
+  try:
+    RunGit(cwd, ['rev-list', '-n1', commit, '--'])
+  except cros_build_lib.RunCommandError as e:
+    if e.result.returncode == 128:
+      return False
+    raise
+  return True
+
+
+def GetCurrentBranch(cwd):
+  """Returns current branch of a repo, and None if repo is on detached HEAD."""
+  try:
+    ret = RunGit(cwd, ['symbolic-ref', '-q', 'HEAD'])
+    return StripRefsHeads(ret.output.strip(), False)
+  except cros_build_lib.RunCommandError as e:
+    if e.result.returncode != 1:
+      raise
+    return None
+
+
+def StripRefsHeads(ref, strict=True):
+  """Remove leading 'refs/heads/' from a ref name.
+
+  If strict is True, an Exception is thrown if the ref doesn't start with
+  refs/heads.  If strict is False, the original ref is returned.
+  """
+  if not ref.startswith('refs/heads/') and strict:
+    raise Exception('Ref name %s does not start with refs/heads/' % ref)
+
+  return ref.replace('refs/heads/', '')
+
+
+def StripRefs(ref):
+  """Remove leading 'refs/heads', 'refs/remotes/[^/]+/' from a ref name."""
+  ref = StripRefsHeads(ref, False)
+  if ref.startswith('refs/remotes/'):
+    return ref.split('/', 3)[-1]
+  return ref
+
+
+def NormalizeRef(ref):
+  """Convert git branch refs into fully qualified form."""
+  if ref and not ref.startswith('refs/'):
+    ref = 'refs/heads/%s' % ref
+  return ref
+
+
+def NormalizeRemoteRef(remote, ref):
+  """Convert git branch refs into fully qualified remote form."""
+  if ref:
+    # Support changing local ref to remote ref, or changing the remote
+    # for a remote ref.
+    ref = StripRefs(ref)
+
+    if not ref.startswith('refs/'):
+      ref = 'refs/remotes/%s/%s' % (remote, ref)
+
+  return ref
+
+
+class ProjectCheckout(dict):
+  """Attributes of a given project in the manifest checkout.
+
+  TODO(davidjames): Convert this into an ordinary object instead of a dict.
+  """
+
+  def __init__(self, attrs):
+    """Constructor.
+
+    Args:
+      attrs: The attributes associated with this checkout, as a dictionary.
+    """
+    dict.__init__(self, attrs)
+
+  def AssertPushable(self):
+    """Verify that it is safe to push changes to this repository."""
+    if not self['pushable']:
+      remote = self['remote']
+      raise AssertionError('Remote %s is not pushable.' % (remote,))
+
+  def IsBranchableProject(self):
+    """Return whether we can create a branch in the repo for this project."""
+    # Backwards compatibility is an issue here. Older manifests used a heuristic
+    # based on where the project is hosted. We must continue supporting it.
+    # (crbug.com/470690)
+    # Prefer explicit tagging.
+    if (self[constants.MANIFEST_ATTR_BRANCHING] ==
+        constants.MANIFEST_ATTR_BRANCHING_CREATE):
+      return True
+    if self[constants.MANIFEST_ATTR_BRANCHING] in (
+        constants.MANIFEST_ATTR_BRANCHING_PIN,
+        constants.MANIFEST_ATTR_BRANCHING_TOT):
+      return False
+
+    # Old heuristic.
+    site_params = config_lib.GetSiteParams()
+    if (self['remote'] not in site_params.CROS_REMOTES or
+        self['remote'] not in site_params.BRANCHABLE_PROJECTS):
+      return False
+    return re.match(site_params.BRANCHABLE_PROJECTS[self['remote']],
+                    self['name'])
+
+  def IsPinnableProject(self):
+    """Return whether we should pin to a revision on the CrOS branch."""
+    # Backwards compatibility is an issue here. Older manifests used a different
+    # tag to spcify pinning behaviour. Support both for now. (crbug.com/470690)
+    # Prefer explicit tagging.
+    if self[constants.MANIFEST_ATTR_BRANCHING] != '':
+      return (self[constants.MANIFEST_ATTR_BRANCHING] ==
+              constants.MANIFEST_ATTR_BRANCHING_PIN)
+
+    # Old heuristic.
+    return cros_build_lib.BooleanShellValue(self.get('pin'), True)
+
+  def GetPath(self, absolute=False):
+    """Get the path to the checkout.
+
+    Args:
+      absolute: If True, return an absolute path. If False,
+        return a path relative to the repo root.
+    """
+    return self['local_path'] if absolute else self['path']
+
+
+class Manifest(object):
+  """SAX handler that parses the manifest document.
+
+  Attributes:
+    checkouts_by_name: A dictionary mapping the names for <project> tags to a
+      list of ProjectCheckout objects.
+    checkouts_by_path: A dictionary mapping paths for <project> tags to a single
+      ProjectCheckout object.
+    default: The attributes of the <default> tag.
+    includes: A list of XML files that should be pulled in to the manifest.
+      These includes are represented as a list of (name, path) tuples.
+    manifest_include_dir: If given, this is where to start looking for
+      include targets.
+    projects: DEPRECATED. A dictionary mapping the names for <project> tags to
+      a single ProjectCheckout object. This is now deprecated, since each
+      project can map to multiple ProjectCheckout objects.
+    remotes: A dictionary mapping <remote> tags to the associated attributes.
+    revision: The revision of the manifest repository. If not specified, this
+      will be TOT.
+  """
+
+  _instance_cache = {}
+
+  def __init__(self, source, manifest_include_dir=None):
+    """Initialize this instance.
+
+    Args:
+      source: The path to the manifest to parse.  May be a file handle.
+      manifest_include_dir: If given, this is where to start looking for
+        include targets.
+    """
+    self.source = source
+    self.default = {}
+    self._current_project_path = None
+    self._current_project_name = None
+    self._annotations = {}
+    self.checkouts_by_path = {}
+    self.checkouts_by_name = {}
+    self.remotes = {}
+    self.includes = []
+    self.revision = None
+    self.manifest_include_dir = manifest_include_dir
+    self._RunParser(source)
+    self.includes = tuple(self.includes)
+
+  def _RequireAttr(self, attr, attrs):
+    name = attrs.get('name')
+    assert attr in attrs, ('%s is missing a "%s" attribute; attrs: %r' %
+                           (name, attr, attrs))
+
+  def _RunParser(self, source, finalize=True):
+    parser = sax.make_parser()
+    handler = sax.handler.ContentHandler()
+    handler.startElement = self._StartElement
+    handler.endElement = self._EndElement
+    parser.setContentHandler(handler)
+
+    # Python 2 seems to expect either a file name (as a string) or an
+    # opened file as the parameter to parser.parse, whereas Python 3
+    # seems to expect a URL (as a string) or opened file. Make it
+    # compatible with both by opening files first.
+    with cros_build_lib.Open(source) as f:
+      parser.parse(f)
+
+    if finalize:
+      self._FinalizeAllProjectData()
+
+  def _StartElement(self, name, attrs):
+    """Stores the default manifest properties and per-project overrides."""
+    attrs = dict(attrs.items())
+    if name == 'default':
+      self.default = attrs
+    elif name == 'remote':
+      self._RequireAttr('name', attrs)
+      attrs.setdefault('alias', attrs['name'])
+      self.remotes[attrs['name']] = attrs
+    elif name == 'project':
+      self._RequireAttr('name', attrs)
+      self._current_project_path = attrs.get('path', attrs['name'])
+      self._current_project_name = attrs['name']
+      self.checkouts_by_path[self._current_project_path] = attrs
+      checkout = self.checkouts_by_name.setdefault(self._current_project_name,
+                                                   [])
+      checkout.append(attrs)
+      self._annotations = {}
+    elif name == 'annotation':
+      self._RequireAttr('name', attrs)
+      self._RequireAttr('value', attrs)
+      self._annotations[attrs['name']] = attrs['value']
+    elif name == 'manifest':
+      self.revision = attrs.get('revision')
+    elif name == 'include':
+      if self.manifest_include_dir is None:
+        raise OSError(
+            errno.ENOENT, 'No manifest_include_dir given, but an include was '
+            'encountered; attrs=%r' % (attrs,))
+      # Include is calculated relative to the manifest that has the include;
+      # thus set the path temporarily to the dirname of the target.
+      original_include_dir = self.manifest_include_dir
+      include_path = os.path.realpath(
+          os.path.join(original_include_dir, attrs['name']))
+      self.includes.append((attrs['name'], include_path))
+      self._RunParser(include_path, finalize=False)
+
+  def _EndElement(self, name):
+    """Store any child element properties into the parent element."""
+    if name == 'project':
+      assert (self._current_project_name is not None and
+              self._current_project_path is not None), (
+                  'Malformed xml: Encountered unmatched </project>')
+      self.checkouts_by_path[self._current_project_path].update(
+          self._annotations)
+      for checkout in self.checkouts_by_name[self._current_project_name]:
+        checkout.update(self._annotations)
+      self._current_project_path = None
+      self._current_project_name = None
+
+  def _FinalizeAllProjectData(self):
+    """Rewrite projects mixing defaults in and adding our attributes."""
+    for path_data in self.checkouts_by_path.values():
+      self._FinalizeProjectData(path_data)
+
+  def _FinalizeProjectData(self, attrs):
+    """Sets up useful properties for a project.
+
+    Args:
+      attrs: The attribute dictionary of a <project> tag.
+    """
+    for key in ('remote', 'revision'):
+      attrs.setdefault(key, self.default.get(key))
+
+    remote = attrs['remote']
+    assert remote in self.remotes, ('%s: %s not in %s' %
+                                    (self.source, remote, self.remotes))
+    remote_name = attrs['remote_alias'] = self.remotes[remote]['alias']
+
+    # 'repo manifest -r' adds an 'upstream' attribute to the project tag for the
+    # manifests it generates.  We can use the attribute to get a valid branch
+    # instead of a sha1 for these types of manifests.
+    upstream = attrs.get('upstream', attrs['revision'])
+    if IsSHA1(upstream):
+      # The current version of repo we use has a bug: When you create a new
+      # repo checkout from a revlocked manifest, the 'upstream' attribute will
+      # just point at a SHA1. The default revision will still be correct,
+      # however. For now, return the default revision as our best guess as to
+      # what the upstream branch for this repository would be. This guess may
+      # sometimes be wrong, but it's correct for all of the repositories where
+      # we need to push changes (e.g., the overlays).
+      # TODO(davidjames): Either fix the repo bug, or update our logic here to
+      # check the manifest repository to find the right tracking branch.
+      upstream = self.default.get('revision', 'refs/heads/master')
+
+    attrs['tracking_branch'] = 'refs/remotes/%s/%s' % (
+        remote_name, StripRefs(upstream),
+    )
+
+    site_params = config_lib.GetSiteParams()
+    attrs['pushable'] = remote in site_params.GIT_REMOTES
+    if attrs['pushable']:
+      attrs['push_remote'] = remote
+      attrs['push_remote_url'] = site_params.GIT_REMOTES[remote]
+      attrs['push_url'] = '%s/%s' % (attrs['push_remote_url'], attrs['name'])
+    groups = set(attrs.get('groups', 'default').replace(',', ' ').split())
+    groups.add('default')
+    attrs['groups'] = frozenset(groups)
+
+    # Compute the local ref space.
+    # Sanitize a couple path fragments to simplify assumptions in this
+    # class, and in consuming code.
+    attrs.setdefault('path', attrs['name'])
+    for key in ('name', 'path'):
+      attrs[key] = os.path.normpath(attrs[key])
+
+    if constants.MANIFEST_ATTR_BRANCHING in attrs:
+      assert (attrs[constants.MANIFEST_ATTR_BRANCHING] in
+              constants.MANIFEST_ATTR_BRANCHING_ALL)
+    else:
+      attrs[constants.MANIFEST_ATTR_BRANCHING] = ''
+
+  @staticmethod
+  def _GetManifestHash(source, ignore_missing=False):
+    if isinstance(source, six.string_types):
+      try:
+        # TODO(build): convert this to osutils.ReadFile once these
+        # classes are moved out into their own module (if possible;
+        # may still be cyclic).
+        with open(source, 'rb') as f:
+          return hashlib.md5(f.read()).hexdigest()
+      except EnvironmentError as e:
+        if e.errno != errno.ENOENT or not ignore_missing:
+          raise
+    source.seek(0)
+    md5 = hashlib.md5(source.read()).hexdigest()
+    source.seek(0)
+    return md5
+
+  @classmethod
+  def Cached(cls, source, manifest_include_dir=None):
+    """Return an instance, reusing an existing one if possible.
+
+    May be a seekable filehandle, or a filepath.
+    See __init__ for an explanation of these arguments.
+    """
+
+    md5 = cls._GetManifestHash(source)
+    obj, sources = cls._instance_cache.get(md5, (None, ()))
+    if manifest_include_dir is None and sources:
+      # We're being invoked in a different way than the orignal
+      # caching; disregard the cached entry.
+      # Most likely, the instantiation will explode; let it fly.
+      obj, sources = None, ()
+    for include_target, target_md5 in sources:
+      if cls._GetManifestHash(include_target, True) != target_md5:
+        obj = None
+        break
+    if obj is None:
+      obj = cls(source, manifest_include_dir=manifest_include_dir)
+      sources = tuple((abspath, cls._GetManifestHash(abspath))
+                      for (target, abspath) in obj.includes)
+      cls._instance_cache[md5] = (obj, sources)
+
+    return obj
+
+
+class ManifestCheckout(Manifest):
+  """A Manifest Handler for a specific manifest checkout."""
+
+  _instance_cache = {}
+
+  def __init__(self, path, manifest_path=None, search=True):
+    """Initialize this instance.
+
+    Args:
+      path: Path into a manifest checkout (doesn't have to be the root).
+      manifest_path: If supplied, the manifest to use.  Else the manifest
+        in the root of the checkout is used.  May be a seekable file handle.
+      search: If True, the path can point into the repo, and the root will
+        be found automatically.  If False, the path *must* be the root, else
+        an OSError ENOENT will be thrown.
+
+    Raises:
+      OSError: if a failure occurs.
+    """
+    self.root, manifest_path = self._NormalizeArgs(
+        path, manifest_path, search=search)
+
+    self.manifest_path = os.path.realpath(manifest_path)
+    # The include dir is always the manifest repo, not where the manifest file
+    # happens to live.
+    manifest_include_dir = os.path.join(self.root, '.repo', 'manifests')
+    self.manifest_branch = self._GetManifestsBranch(self.root)
+    self._content_merging = {}
+    Manifest.__init__(self, self.manifest_path,
+                      manifest_include_dir=manifest_include_dir)
+
+  @staticmethod
+  def _NormalizeArgs(path, manifest_path=None, search=True):
+    root = FindRepoCheckoutRoot(path)
+    if root is None:
+      raise OSError(errno.ENOENT, "Couldn't find repo root: %s" % (path,))
+    root = os.path.normpath(os.path.realpath(root))
+    if not search:
+      if os.path.normpath(os.path.realpath(path)) != root:
+        raise OSError(errno.ENOENT, 'Path %s is not a repo root, and search '
+                      'is disabled.' % path)
+    if manifest_path is None:
+      manifest_path = os.path.join(root, '.repo', 'manifest.xml')
+    return root, manifest_path
+
+  @staticmethod
+  def IsFullManifest(checkout_root):
+    """Returns True iff the given checkout is using a full manifest.
+
+    This method should go away as part of the cleanup related to brbug.com/854.
+
+    Args:
+      checkout_root: path to the root of an SDK checkout.
+
+    Returns:
+      True iff the manifest selected for the given SDK is a full manifest.
+      In this context we'll accept any manifest for which there are no groups
+      defined.
+    """
+    manifests_git_repo = os.path.join(checkout_root, '.repo', 'manifests.git')
+    cmd = ['config', '--local', '--get', 'manifest.groups']
+    result = RunGit(manifests_git_repo, cmd, check=False)
+
+    if result.output.strip():
+      # Full layouts don't define groups.
+      return False
+
+    return True
+
+  def FindCheckouts(self, project, branch=None):
+    """Returns the list of checkouts for a given |project|/|branch|.
+
+    Args:
+      project: Project name to search for.
+      branch: Branch to use.
+
+    Returns:
+      A list of ProjectCheckout objects.
+    """
+    checkouts = []
+    for checkout in self.checkouts_by_name.get(project, []):
+      tracking_branch = checkout['tracking_branch']
+      if branch is None or StripRefs(branch) == StripRefs(tracking_branch):
+        checkouts.append(checkout)
+    return checkouts
+
+  def FindCheckout(self, project, branch=None, strict=True):
+    """Returns the checkout associated with a given project/branch.
+
+    Args:
+      project: The project to look for.
+      branch: The branch that the project is tracking.
+      strict: Raise AssertionError if a checkout cannot be found.
+
+    Returns:
+      A ProjectCheckout object.
+
+    Raises:
+      AssertionError if there is more than one checkout associated with the
+      given project/branch combination.
+    """
+    checkouts = self.FindCheckouts(project, branch)
+    if len(checkouts) < 1:
+      if strict:
+        raise AssertionError('Could not find checkout of %s' % (project,))
+      return None
+    elif len(checkouts) > 1:
+      raise AssertionError('Too many checkouts found for %s' % project)
+    return checkouts[0]
+
+  def ListCheckouts(self):
+    """List the checkouts in the manifest.
+
+    Returns:
+      A list of ProjectCheckout objects.
+    """
+    return list(self.checkouts_by_path.values())
+
+  def FindCheckoutFromPath(self, path, strict=True):
+    """Find the associated checkouts for a given |path|.
+
+    The |path| can either be to the root of a project, or within the
+    project itself (chromite.cbuildbot for example).  It may be relative
+    to the repo root, or an absolute path.  If |path| is not within a
+    checkout, return None.
+
+    Args:
+      path: Path to examine.
+      strict: If True, fail when no checkout is found.
+
+    Returns:
+      None if no checkout is found, else the checkout.
+    """
+    # Realpath everything sans the target to keep people happy about
+    # how symlinks are handled; exempt the final node since following
+    # through that is unlikely even remotely desired.
+    tmp = os.path.join(self.root, os.path.dirname(path))
+    path = os.path.join(os.path.realpath(tmp), os.path.basename(path))
+    path = os.path.normpath(path) + '/'
+    candidates = []
+    for checkout in self.ListCheckouts():
+      if path.startswith(checkout['local_path'] + '/'):
+        candidates.append((checkout['path'], checkout))
+
+    if not candidates:
+      if strict:
+        raise AssertionError('Could not find repo project at %s' % (path,))
+      return None
+
+    # The checkout with the greatest common path prefix is the owner of
+    # the given pathway. Return that.
+    return max(candidates)[1]
+
+  def _FinalizeAllProjectData(self):
+    """Rewrite projects mixing defaults in and adding our attributes."""
+    Manifest._FinalizeAllProjectData(self)
+    for key, value in self.checkouts_by_path.items():
+      self.checkouts_by_path[key] = ProjectCheckout(value)
+    for key, value in self.checkouts_by_name.items():
+      self.checkouts_by_name[key] = \
+          [ProjectCheckout(x) for x in value]
+
+  def _FinalizeProjectData(self, attrs):
+    Manifest._FinalizeProjectData(self, attrs)
+    attrs['local_path'] = os.path.join(self.root, attrs['path'])
+
+  @staticmethod
+  def _GetManifestsBranch(root):
+    """Get the tracking branch of the manifest repository.
+
+    Returns:
+      The branch name.
+    """
+    # Suppress the normal "if it ain't refs/heads, we don't want none o' that"
+    # check for the merge target; repo writes the ambigious form of the branch
+    # target for `repo init -u url -b some-branch` usages (aka, 'master'
+    # instead of 'refs/heads/master').
+    path = os.path.join(root, '.repo', 'manifests')
+    current_branch = GetCurrentBranch(path)
+    if current_branch != 'default':
+      raise OSError(errno.ENOENT,
+                    'Manifest repository at %s is checked out to %s.  '
+                    "It should be checked out to 'default'."
+                    % (root, 'detached HEAD' if current_branch is None
+                       else current_branch))
+
+    result = GetTrackingBranchViaGitConfig(
+        path, 'default', allow_broken_merge_settings=True, for_checkout=False)
+
+    if result is not None:
+      return StripRefsHeads(result.ref, False)
+
+    raise OSError(errno.ENOENT,
+                  "Manifest repository at %s is checked out to 'default', but "
+                  'the git tracking configuration for that branch is broken; '
+                  'failing due to that.' % (root,))
+
+  # pylint: disable=arguments-differ
+  @classmethod
+  def Cached(cls, path, manifest_path=None, search=True):
+    """Return an instance, reusing an existing one if possible.
+
+    Args:
+      path: The pathway into a checkout; the root will be found automatically.
+      manifest_path: if given, the manifest.xml to use instead of the
+        checkouts internal manifest.  Use with care.
+      search: If True, the path can point into the repo, and the root will
+        be found automatically.  If False, the path *must* be the root, else
+        an OSError ENOENT will be thrown.
+    """
+    root, manifest_path = cls._NormalizeArgs(path, manifest_path,
+                                             search=search)
+
+    md5 = cls._GetManifestHash(manifest_path)
+    obj, sources = cls._instance_cache.get((root, md5), (None, ()))
+    for include_target, target_md5 in sources:
+      if cls._GetManifestHash(include_target, True) != target_md5:
+        obj = None
+        break
+    if obj is None:
+      obj = cls(root, manifest_path=manifest_path)
+      sources = tuple((abspath, cls._GetManifestHash(abspath))
+                      for (target, abspath) in obj.includes)
+      cls._instance_cache[(root, md5)] = (obj, sources)
+    return obj
+
+
+def RunGit(git_repo, cmd, **kwargs):
+  """Wrapper for git commands.
+
+  This suppresses print_cmd, and suppresses output by default.  Git
+  functionality w/in this module should use this unless otherwise
+  warranted, to standardize git output (primarily, keeping it quiet
+  and being able to throw useful errors for it).
+
+  Args:
+    git_repo: Pathway to the git repo to operate on.
+    cmd: A sequence of the git subcommand to run.  The 'git' prefix is
+      added automatically.  If you wished to run 'git remote update',
+      this would be ['remote', 'update'] for example.
+    kwargs: Any run or GenericRetry options/overrides to use.
+
+  Returns:
+    A CommandResult object.
+  """
+  kwargs.setdefault('print_cmd', False)
+  kwargs.setdefault('cwd', git_repo)
+  kwargs.setdefault('capture_output', True)
+  kwargs.setdefault('encoding', 'utf-8')
+  return cros_build_lib.run(['git'] + cmd, **kwargs)
+
+
+def Init(git_repo):
+  """Create a new git repository, in the given location.
+
+  Args:
+    git_repo: Path for where to create a git repo. Directory will be created if
+              it doesnt exist.
+  """
+  osutils.SafeMakedirs(git_repo)
+  RunGit(git_repo, ['init'])
+
+
+def Clone(dest_path, git_url, reference=None, depth=None, branch=None,
+          single_branch=False):
+  """Clone a git repository, into the given directory.
+
+  Args:
+    dest_path: Path to clone into. Will be created if it doesn't exist.
+    git_url: Git URL to clone from.
+    reference: Path to a git repositry to reference in the clone. See
+      documentation for `git clone --reference`.
+    depth: Create a shallow clone with the given history depth. Cannot be used
+      with 'reference'.
+    branch: Branch to use for the initial HEAD. Defaults to the remote's HEAD.
+    single_branch: Clone only the requested branch.
+  """
+  if reference and depth:
+    raise ValueError('reference and depth are mutually exclusive')
+  osutils.SafeMakedirs(dest_path)
+  cmd = ['clone', git_url, dest_path]
+  if reference:
+    cmd += ['--reference', reference]
+  if depth:
+    cmd += ['--depth', str(int(depth))]
+  if branch:
+    cmd += ['--branch', branch]
+  if single_branch:
+    cmd += ['--single-branch']
+  RunGit(dest_path, cmd, print_cmd=True)
+
+
+def ShallowFetch(git_repo, git_url, sparse_checkout=None):
+  """Fetch a shallow git repository.
+
+  Args:
+    git_repo: Path of the git repo.
+    git_url: Url to fetch the git repository from.
+    sparse_checkout: List of file paths to fetch.
+  """
+  Init(git_repo)
+  RunGit(git_repo, ['remote', 'add', 'origin', git_url])
+  if sparse_checkout is not None:
+    assert isinstance(sparse_checkout, list)
+    RunGit(git_repo, ['config', 'core.sparsecheckout', 'true'])
+    osutils.WriteFile(os.path.join(git_repo, '.git/info/sparse-checkout'),
+                      '\n'.join(sparse_checkout))
+    logging.info('Sparse checkout: %s', sparse_checkout)
+
+  utcnow = datetime.datetime.utcnow
+  start = utcnow()
+  # Only fetch TOT git metadata without revision history.
+  RunGit(git_repo, ['fetch', '--depth=1'],
+         print_cmd=True, stderr=True, capture_output=False)
+  # Pull the files in sparse_checkout.
+  RunGit(git_repo, ['pull', 'origin', 'master'],
+         print_cmd=True, stderr=True, capture_output=False)
+  logging.info('ShallowFetch completed in %s.', utcnow() - start)
+
+
+def FindGitTopLevel(path):
+  """Returns the top-level directory of the given git working tree path."""
+  try:
+    ret = RunGit(path, ['rev-parse', '--show-toplevel'])
+    return ret.output.strip()
+  except cros_build_lib.RunCommandError:
+    return None
+
+
+def GetProjectUserEmail(git_repo):
+  """Get the email configured for the project."""
+  output = RunGit(git_repo, ['var', 'GIT_COMMITTER_IDENT']).output
+  m = re.search(r'<([^>]*)>', output.strip())
+  return m.group(1) if m else None
+
+
+def MatchBranchName(git_repo, pattern, namespace=''):
+  """Return branches who match the specified regular expression.
+
+  Args:
+    git_repo: The git repository to operate upon.
+    pattern: The regexp to search with.
+    namespace: The namespace to restrict search to (e.g. 'refs/heads/').
+
+  Returns:
+    List of matching branch names (with |namespace| trimmed).
+  """
+  output = RunGit(git_repo, ['ls-remote', git_repo, namespace + '*']).output
+  branches = [x.split()[1] for x in output.splitlines()]
+  branches = [x[len(namespace):] for x in branches if x.startswith(namespace)]
+
+  # Try exact match first.
+  match = re.compile(r'(^|/)%s$' % (pattern,), flags=re.I)
+  ret = [x for x in branches if match.search(x)]
+  if ret:
+    return ret
+
+  # Fall back to regex match if no exact match.
+  match = re.compile(pattern, flags=re.I)
+  return [x for x in branches if match.search(x)]
+
+
+class AmbiguousBranchName(Exception):
+  """Error if given branch name matches too many branches."""
+
+
+def MatchSingleBranchName(*args, **kwargs):
+  """Match exactly one branch name, else throw an exception.
+
+  Args:
+    See MatchBranchName for more details; all args are passed on.
+
+  Returns:
+    The branch name.
+
+  Raises:
+    raise AmbiguousBranchName if we did not match exactly one branch.
+  """
+  ret = MatchBranchName(*args, **kwargs)
+  if len(ret) != 1:
+    raise AmbiguousBranchName('Did not match exactly 1 branch: %r' % ret)
+  return ret[0]
+
+
+def GetTrackingBranchViaGitConfig(git_repo, branch, for_checkout=True,
+                                  allow_broken_merge_settings=False,
+                                  recurse=10):
+  """Pull the remote and upstream branch of a local branch
+
+  Args:
+    git_repo: The git repository to operate upon.
+    branch: The branch to inspect.
+    for_checkout: Whether to return localized refspecs, or the remote's
+      view of it.
+    allow_broken_merge_settings: Repo in a couple of spots writes invalid
+      branch.mybranch.merge settings; if these are encountered, they're
+      normally treated as an error and this function returns None.  If
+      this option is set to True, it suppresses this check.
+    recurse: If given and the target is local, then recurse through any
+      remote=. (aka locals).  This is enabled by default, and is what allows
+      developers to have multiple local branches of development dependent
+      on one another; disabling this makes that work flow impossible,
+      thus disable it only with good reason.  The value given controls how
+      deeply to recurse.  Defaults to tracing through 10 levels of local
+      remotes. Disabling it is a matter of passing 0.
+
+  Returns:
+    A RemoteRef, or None.  If for_checkout, then it returns the localized
+    version of it.
+  """
+  try:
+    cmd = ['config', '--get-regexp',
+           r'branch\.%s\.(remote|merge)' % re.escape(branch)]
+    data = RunGit(git_repo, cmd).output.splitlines()
+
+    prefix = 'branch.%s.' % (branch,)
+    data = [x.split() for x in data]
+    vals = dict((x[0][len(prefix):], x[1]) for x in data)
+    if len(vals) != 2:
+      if not allow_broken_merge_settings:
+        return None
+      elif 'merge' not in vals:
+        # There isn't anything we can do here.
+        return None
+      elif 'remote' not in vals:
+        # Repo v1.9.4 and up occasionally invalidly leave the remote out.
+        # Only occurs for the manifest repo fortunately.
+        vals['remote'] = 'origin'
+    remote, rev = vals['remote'], vals['merge']
+    # Suppress non branches; repo likes to write revisions and tags here,
+    # which is wrong (git hates it, nor will it honor it).
+    if rev.startswith('refs/remotes/'):
+      if for_checkout:
+        return RemoteRef(remote, rev)
+      # We can't backtrack from here, or at least don't want to.
+      # This is likely refs/remotes/m/ which repo writes when dealing
+      # with a revision locked manifest.
+      return None
+    if not rev.startswith('refs/heads/'):
+      # We explicitly don't allow pushing to tags, nor can one push
+      # to a sha1 remotely (makes no sense).
+      if not allow_broken_merge_settings:
+        return None
+    elif remote == '.':
+      if recurse == 0:
+        raise Exception(
+            'While tracing out tracking branches, we recursed too deeply: '
+            'bailing at %s' % branch)
+      return GetTrackingBranchViaGitConfig(
+          git_repo, StripRefsHeads(rev), for_checkout=for_checkout,
+          allow_broken_merge_settings=allow_broken_merge_settings,
+          recurse=recurse - 1)
+    elif for_checkout:
+      rev = 'refs/remotes/%s/%s' % (remote, StripRefsHeads(rev))
+    return RemoteRef(remote, rev)
+  except cros_build_lib.RunCommandError as e:
+    # 1 is the retcode for no matches.
+    if e.result.returncode != 1:
+      raise
+  return None
+
+
+def GetTrackingBranchViaManifest(git_repo, for_checkout=True, for_push=False,
+                                 manifest=None):
+  """Gets the appropriate push branch via the manifest if possible.
+
+  Args:
+    git_repo: The git repo to operate upon.
+    for_checkout: Whether to return localized refspecs, or the remote's
+      view of it.  Note that depending on the remote, the remote may differ
+      if for_push is True or set to False.
+    for_push: Controls whether the remote and refspec returned is explicitly
+      for pushing.
+    manifest: A Manifest instance if one is available, else a
+      ManifestCheckout is created and used.
+
+  Returns:
+    A RemoteRef, or None.  If for_checkout, then it returns the localized
+    version of it.
+  """
+  try:
+    if manifest is None:
+      manifest = ManifestCheckout.Cached(git_repo)
+
+    checkout = manifest.FindCheckoutFromPath(git_repo, strict=False)
+
+    if checkout is None:
+      return None
+
+    if for_push:
+      checkout.AssertPushable()
+
+    if for_push:
+      remote = checkout['push_remote']
+    else:
+      remote = checkout['remote']
+
+    if for_checkout:
+      revision = checkout['tracking_branch']
+    else:
+      revision = checkout['revision']
+      if not revision.startswith('refs/heads/'):
+        return None
+
+    project_name = checkout.get('name', None)
+
+    return RemoteRef(remote, revision, project_name=project_name)
+  except EnvironmentError as e:
+    if e.errno != errno.ENOENT:
+      raise
+  return None
+
+
+def GetTrackingBranch(git_repo, branch=None, for_checkout=True, fallback=True,
+                      manifest=None, for_push=False):
+  """Gets the appropriate push branch for the specified directory.
+
+  This function works on both repo projects and regular git checkouts.
+
+  Assumptions:
+   1. We assume the manifest defined upstream is desirable.
+   2. No manifest?  Assume tracking if configured is accurate.
+   3. If none of the above apply, you get 'origin', 'master' or None,
+      depending on fallback.
+
+  Args:
+    git_repo: Git repository to operate upon.
+    branch: Find the tracking branch for this branch.  Defaults to the
+      current branch for |git_repo|.
+    for_checkout: Whether to return localized refspecs, or the remotes
+      view of it.
+    fallback: If true and no remote/branch could be discerned, return
+      'origin', 'master'.  If False, you get None.
+      Note that depending on the remote, the remote may differ
+      if for_push is True or set to False.
+    for_push: Controls whether the remote and refspec returned is explicitly
+      for pushing.
+    manifest: A Manifest instance if one is available, else a
+      ManifestCheckout is created and used.
+
+  Returns:
+    A RemoteRef, or None.
+  """
+  result = GetTrackingBranchViaManifest(git_repo, for_checkout=for_checkout,
+                                        manifest=manifest, for_push=for_push)
+  if result is not None:
+    return result
+
+  if branch is None:
+    branch = GetCurrentBranch(git_repo)
+  if branch:
+    result = GetTrackingBranchViaGitConfig(git_repo, branch,
+                                           for_checkout=for_checkout)
+    if result is not None:
+      if (result.ref.startswith('refs/heads/') or
+          result.ref.startswith('refs/remotes/')):
+        return result
+
+  if not fallback:
+    return None
+  if for_checkout:
+    return RemoteRef('origin', 'refs/remotes/origin/master')
+  return RemoteRef('origin', 'master')
+
+
+def CreateBranch(git_repo, branch, branch_point='HEAD', track=False):
+  """Create a branch.
+
+  Args:
+    git_repo: Git repository to act on.
+    branch: Name of the branch to create.
+    branch_point: The ref to branch from.  Defaults to 'HEAD'.
+    track: Whether to setup the branch to track its starting ref.
+  """
+  cmd = ['checkout', '-B', branch, branch_point]
+  if track:
+    cmd.append('--track')
+  RunGit(git_repo, cmd)
+
+
+def AddPath(path):
+  """Use 'git add' on a path.
+
+  Args:
+    path: Path to the git repository and the path to add.
+  """
+  dirname, filename = os.path.split(path)
+  RunGit(dirname, ['add', '--', filename])
+
+
+def RmPath(path):
+  """Use 'git rm' on a file.
+
+  Args:
+    path: Path to the git repository and the path to rm.
+  """
+  dirname, filename = os.path.split(path)
+  RunGit(dirname, ['rm', '--', filename])
+
+
+def GetObjectAtRev(git_repo, obj, rev, binary=False):
+  """Return the contents of a git object at a particular revision.
+
+  This could be used to look at an old version of a file or directory, for
+  instance, without modifying the working directory.
+
+  Args:
+    git_repo: Path to a directory in the git repository to query.
+    obj: The name of the object to read.
+    rev: The revision to retrieve.
+    binary: If true, return bytes instead of decoding as a UTF-8 string.
+
+  Returns:
+    The content of the object.
+  """
+  rev_obj = '%s:%s' % (rev, obj)
+  encoding = None if binary else 'utf-8'
+  return RunGit(git_repo, ['show', rev_obj], encoding=encoding).output
+
+
+def RevertPath(git_repo, filename, rev):
+  """Revert a single file back to a particular revision and 'add' it with git.
+
+  Args:
+    git_repo: Path to the directory holding the file.
+    filename: Name of the file to revert.
+    rev: Revision to revert the file to.
+  """
+  RunGit(git_repo, ['checkout', rev, '--', filename])
+
+
+# In Log, we use "format" to refer to the --format flag to
+# git. Disable the nags from pylint.
+# pylint: disable=redefined-builtin
+def Log(git_repo, format=None, after=None, until=None,
+        reverse=False, date=None, max_count=None, grep=None,
+        rev='HEAD', paths=None):
+  """Return git log output for the given arguments.
+
+  For more detailed description of the parameters, run `git help log`.
+
+  Args:
+    git_repo: Path to a directory in the git repository.
+    format: Passed directly to the --format flag.
+    after: Passed directly to --after flag.
+    until: Passed directly to --until flag.
+    reverse: If true, set --reverse flag.
+    date: Passed directly to --date flag.
+    max_count: Passed directly to --max-count flag.
+    grep: Passed directly to --grep flag.
+    rev: Commit (or revision range) to log.
+    paths: List of paths to log commits for (enumerated after final -- ).
+
+  Returns:
+    The raw log output as a string.
+  """
+  cmd = ['log']
+  if format:
+    cmd.append('--format=%s' % format)
+  if after:
+    cmd.append('--after=%s' % after)
+  if until:
+    cmd.append('--until=%s' % until)
+  if reverse:
+    cmd.append('--reverse')
+  if date:
+    cmd.append('--date=%s' % date)
+  if max_count:
+    cmd.append('--max-count=%s' % max_count)
+  if grep:
+    cmd.append('--grep=%s' % grep)
+  cmd.append(rev)
+  if paths:
+    cmd.append('--')
+    cmd.extend(paths)
+  return RunGit(git_repo, cmd, errors='replace').stdout
+# pylint: enable=redefined-builtin
+
+
+def GetChangeId(git_repo, rev='HEAD'):
+  """Retrieve the Change-Id from the commit message
+
+  Args:
+    git_repo: Path to the git repository where the commit is
+    rev: Commit to inspect, defaults to HEAD
+
+  Returns:
+    The Gerrit Change-Id assigned to the commit if it exists.
+  """
+  log = Log(git_repo, max_count=1, format='format:%B', rev=rev)
+  m = re.findall(r'^Change-Id: (I[a-fA-F0-9]{40})$', log, flags=re.M)
+  if not m:
+    return None
+  elif len(m) > 1:
+    raise ValueError('Too many Change-Ids found')
+  else:
+    return m[0]
+
+
+def Commit(git_repo, message, amend=False, allow_empty=False,
+           reset_author=False):
+  """Commit with git.
+
+  Args:
+    git_repo: Path to the git repository to commit in.
+    message: Commit message to use.
+    amend: Whether to 'amend' the CL, default False
+    allow_empty: Whether to allow an empty commit. Default False.
+    reset_author: Whether to reset author according to current config.
+
+  Returns:
+    The Gerrit Change-ID assigned to the CL if it exists.
+  """
+  cmd = ['commit', '-m', message]
+  if amend:
+    cmd.append('--amend')
+  if allow_empty:
+    cmd.append('--allow-empty')
+  if reset_author:
+    cmd.append('--reset-author')
+  RunGit(git_repo, cmd)
+  return GetChangeId(git_repo)
+
+
+_raw_diff_components = ('src_mode', 'dst_mode', 'src_sha', 'dst_sha',
+                        'status', 'score', 'src_file', 'dst_file')
+# RawDiffEntry represents a line of raw formatted git diff output.
+RawDiffEntry = collections.namedtuple('RawDiffEntry', _raw_diff_components)
+
+
+# This regular expression pulls apart a line of raw formatted git diff output.
+DIFF_RE = re.compile(
+    r':(?P<src_mode>[0-7]*) (?P<dst_mode>[0-7]*) '
+    r'(?P<src_sha>[0-9a-f]*)(\.)* (?P<dst_sha>[0-9a-f]*)(\.)* '
+    r'(?P<status>[ACDMRTUX])(?P<score>[0-9]+)?\t'
+    r'(?P<src_file>[^\t]+)\t?(?P<dst_file>[^\t]+)?')
+
+
+def RawDiff(path, target):
+  """Return the parsed raw format diff of target
+
+  Args:
+    path: Path to the git repository to diff in.
+    target: The target to diff.
+
+  Returns:
+    A list of RawDiffEntry's.
+  """
+  entries = []
+
+  cmd = ['diff', '-M', '--raw', target]
+  diff = RunGit(path, cmd).output
+  diff_lines = diff.strip().splitlines()
+  for line in diff_lines:
+    match = DIFF_RE.match(line)
+    if not match:
+      raise GitException('Failed to parse diff output: %s' % line)
+    entries.append(RawDiffEntry(*match.group(*_raw_diff_components)))
+
+  return entries
+
+
+def UploadCL(git_repo, remote, branch, local_branch='HEAD', draft=False,
+             reviewers=None, **kwargs):
+  """Upload a CL to gerrit. The CL should be checked out currently.
+
+  Args:
+    git_repo: Path to the git repository with the CL to upload checked out.
+    remote: The remote to upload the CL to.
+    branch: Branch to upload to.
+    local_branch: Branch to upload.
+    draft: Whether to upload as a draft.
+    reviewers: Add the reviewers to the CL.
+    kwargs: Extra options for GitPush. capture_output defaults to False so
+      that the URL for new or updated CLs is shown to the user.
+  """
+  ref = ('refs/drafts/%s' if draft else 'refs/for/%s') % branch
+  if reviewers:
+    reviewer_list = ['r=%s' % i for i in reviewers]
+    ref = ref + '%'+ ','.join(reviewer_list)
+  remote_ref = RemoteRef(remote, ref)
+  kwargs.setdefault('capture_output', False)
+  kwargs.setdefault('stderr', subprocess.STDOUT)
+  return GitPush(git_repo, local_branch, remote_ref, **kwargs)
+
+
+def GitPush(git_repo, refspec, push_to, force=False, dry_run=False,
+            capture_output=True, skip=False, **kwargs):
+  """Wrapper for pushing to a branch.
+
+  Args:
+    git_repo: Git repository to act on.
+    refspec: The local ref to push to the remote.
+    push_to: A RemoteRef object representing the remote ref to push to.
+    force: Whether to bypass non-fastforward checks.
+    dry_run: If True, do everything except actually push the remote ref.
+    capture_output: Whether to capture output for this command.
+    skip: Log the git command that would have been run, but don't run it; this
+      avoids e.g. remote access checks that still apply to |dry_run|.
+  """
+  cmd = ['push', push_to.remote, '%s:%s' % (refspec, push_to.ref)]
+  if force:
+    cmd.append('--force')
+  if dry_run:
+    cmd.append('--dry-run')
+
+  if skip:
+    logging.info('Would have run "%s"', cmd)
+    return
+
+  return RunGit(git_repo, cmd, capture_output=capture_output,
+                **kwargs)
+
+
+# TODO(build): Switch callers of this function to use CreateBranch instead.
+def CreatePushBranch(branch, git_repo, sync=True, remote_push_branch=None):
+  """Create a local branch for pushing changes inside a repo repository.
+
+  Args:
+    branch: Local branch to create.
+    git_repo: Git repository to create the branch in.
+    sync: Update remote before creating push branch.
+    remote_push_branch: A RemoteRef to push to. i.e.,
+                        RemoteRef('cros', 'master').  By default it tries to
+                        automatically determine which tracking branch to use
+                        (see GetTrackingBranch()).
+  """
+  if not remote_push_branch:
+    remote_push_branch = GetTrackingBranch(git_repo, for_push=True)
+
+  if sync:
+    cmd = ['remote', 'update', remote_push_branch.remote]
+    RunGit(git_repo, cmd)
+
+  RunGit(git_repo, ['checkout', '-B', branch, '-t', remote_push_branch.ref])
+
+
+def SyncPushBranch(git_repo, remote, target, use_merge=False, **kwargs):
+  """Sync and rebase or merge a local push branch to the latest remote version.
+
+  Args:
+    git_repo: Git repository to rebase in.
+    remote: The remote returned by GetTrackingBranch(for_push=True)
+    target: The branch name returned by GetTrackingBranch().  Must
+      start with refs/remotes/ (specifically must be a proper remote
+      target rather than an ambiguous name).
+    use_merge: Default: False. If True, use merge to bring local branch up to
+      date with remote branch. Otherwise, use rebase.
+    kwargs: Arguments passed through to RunGit.
+  """
+  subcommand = 'merge' if use_merge else 'rebase'
+
+  if not target.startswith('refs/remotes/'):
+    raise Exception(
+        'Was asked to %s to a non branch target w/in the push pathways.  '
+        'This is highly indicative of an internal bug.  remote %s, %s %s'
+        % (subcommand, remote, subcommand, target))
+
+  cmd = ['remote', 'update', remote]
+  RunGit(git_repo, cmd, **kwargs)
+
+  try:
+    RunGit(git_repo, [subcommand, target], **kwargs)
+  except cros_build_lib.RunCommandError:
+    # Looks like our change conflicts with upstream. Cleanup our failed
+    # rebase.
+    RunGit(git_repo, [subcommand, '--abort'], check=False, **kwargs)
+    raise
+
+
+def PushBranch(branch, git_repo, dryrun=False,
+               staging_branch=None, auto_merge=False):
+  """General method to push local git changes.
+
+  This method only works with branches created via the CreatePushBranch
+  function.
+
+  Args:
+    branch: Local branch to push.  Branch should have already been created
+      with a local change committed ready to push to the remote branch.  Must
+      also already be checked out to that branch.
+    git_repo: Git repository to push from.
+    dryrun: Git push --dry-run if set to True.
+    staging_branch: Push change commits to the staging_branch if it's not None
+    auto_merge: Enable Gerrit's auto-merge feature. See here for more info:
+      https://gerrit-review.googlesource.com/Documentation/user-upload.html#auto_merge
+      Note: The setting must be enabled in Gerrit UI for the specific repo.
+
+  Raises:
+    GitPushFailed if push was unsuccessful after retries
+  """
+  remote_ref = GetTrackingBranch(git_repo, branch, for_checkout=False,
+                                 for_push=True)
+  # Don't like invoking this twice, but there is a bit of API
+  # impedence here; cros_mark_as_stable
+  local_ref = GetTrackingBranch(git_repo, branch, for_push=True)
+
+  if not remote_ref.ref.startswith('refs/heads/'):
+    raise Exception('Was asked to push to a non branch namespace: %s' %
+                    remote_ref.ref)
+
+  if auto_merge:
+    remote_ref = RemoteRef(remote=remote_ref.remote,
+                           ref=remote_ref.ref.replace(
+                               'heads', 'for', 1) + '%notify=NONE,submit',
+                           project_name=remote_ref.project_name)
+  # reference = staging_branch if staging_branch is not None else remote_ref.ref
+  if staging_branch is not None:
+    remote_ref = remote_ref._replace(ref=staging_branch)
+
+  logging.debug('Trying to push %s to %s:%s',
+                git_repo, branch, remote_ref.ref)
+
+  if dryrun:
+    dryrun = True
+
+  SyncPushBranch(git_repo, remote_ref.remote, local_ref.ref)
+
+  try:
+    GitPush(git_repo, branch, remote_ref, skip=dryrun, print_cmd=True,
+            debug_level=logging.DEBUG)
+  except cros_build_lib.RunCommandError:
+    raise
+
+  logging.info('Successfully pushed %s to %s %s:%s',
+               git_repo, remote_ref.remote, branch, remote_ref.ref)
+
+
+def CleanAndDetachHead(git_repo):
+  """Remove all local changes and checkout a detached head.
+
+  Args:
+    git_repo: Directory of git repository.
+  """
+  RunGit(git_repo, ['am', '--abort'], check=False)
+  RunGit(git_repo, ['rebase', '--abort'], check=False)
+  RunGit(git_repo, ['clean', '-dfx'])
+  RunGit(git_repo, ['checkout', '--detach', '-f', 'HEAD'])
+
+
+def CleanAndCheckoutUpstream(git_repo, refresh_upstream=True):
+  """Remove all local changes and checkout the latest origin.
+
+  All local changes in the supplied repo will be removed. The branch will
+  also be switched to a detached head pointing at the latest origin.
+
+  Args:
+    git_repo: Directory of git repository.
+    refresh_upstream: If True, run a remote update prior to checking it out.
+  """
+  remote_ref = GetTrackingBranch(git_repo, for_push=refresh_upstream)
+  CleanAndDetachHead(git_repo)
+  if refresh_upstream:
+    RunGit(git_repo, ['remote', 'update', remote_ref.remote])
+  RunGit(git_repo, ['checkout', remote_ref.ref])
+
+
+def GetChromiteTrackingBranch():
+  """Returns the remote branch associated with chromite."""
+  cwd = os.path.dirname(os.path.realpath(__file__))
+  result_ref = GetTrackingBranch(cwd, for_checkout=False, fallback=False)
+  if result_ref:
+    branch = result_ref.ref
+    if branch.startswith('refs/heads/'):
+      # Normal scenario.
+      return StripRefsHeads(branch)
+    # Reaching here means it was refs/remotes/m/blah, or just plain invalid,
+    # or that we're on a detached head in a repo not managed by chromite.
+
+  # Manually try the manifest next.
+  try:
+    manifest = ManifestCheckout.Cached(cwd)
+    # Ensure the manifest knows of this checkout.
+    if manifest.FindCheckoutFromPath(cwd, strict=False):
+      return manifest.manifest_branch
+  except EnvironmentError as e:
+    if e.errno != errno.ENOENT:
+      raise
+
+  # Not a manifest checkout.
+  logging.notice(
+      "Chromite checkout at %s isn't controlled by repo, nor is it on a "
+      'branch (or if it is, the tracking configuration is missing or broken).  '
+      'Falling back to assuming the chromite checkout is derived from '
+      "'master'; this *may* result in breakage." % cwd)
+  return 'master'
+
+
+def GarbageCollection(git_repo, prune_all=False):
+  """Cleanup unnecessary files and optimize the local repository.
+
+  Args:
+    git_repo: Directory of git repository.
+    prune_all: If True, prune all loose objects regardless of gc.pruneExpire.
+  """
+  # Use --auto so it only runs if housekeeping is necessary.
+  cmd = ['gc', '--auto']
+  if prune_all:
+    cmd.append('--prune=all')
+  RunGit(git_repo, cmd)
+
+
+def DeleteStaleLocks(git_repo):
+  """Clean up stale locks left behind in a git repo.
+
+  This might occur if an earlier git command was killed during an operation.
+  Warning: This is dangerous because these locks are intended to prevent
+  corruption. Only use this if you are sure that no other git process is
+  accessing the repo (such as at the beginning of a fresh build).
+
+  Args"
+    git_repo: Directory of git repository.
+  """
+  git_gitdir = GetGitGitdir(git_repo)
+  if not git_gitdir:
+    raise GitException('Not a valid git repo: %s' % git_repo)
+
+  for root, _, filenames in os.walk(git_gitdir):
+    for filename in fnmatch.filter(filenames, '*.lock'):
+      p = os.path.join(root, filename)
+      logging.info('Found stale git lock, removing: %s', p)
+      os.remove(p)
diff --git a/utils/frozen_chromite/lib/gob_util.py b/utils/frozen_chromite/lib/gob_util.py
new file mode 100644
index 0000000..eb33abb
--- /dev/null
+++ b/utils/frozen_chromite/lib/gob_util.py
@@ -0,0 +1,856 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utilities for requesting information for a gerrit server via https.
+
+https://gerrit-review.googlesource.com/Documentation/rest-api.html
+"""
+
+from __future__ import print_function
+
+import datetime
+import json
+import os
+import re
+import socket
+import sys
+import warnings
+
+import httplib2
+try:
+  from oauth2client import gce
+except ImportError:  # Newer oauth2client versions put it in .contrib
+  # pylint: disable=import-error,no-name-in-module
+  from oauth2client.contrib import gce
+import six
+from six.moves import html_parser as HTMLParser
+from six.moves import http_client as httplib
+from six.moves import http_cookiejar as cookielib
+from six.moves import urllib
+
+from autotest_lib.utils.frozen_chromite.lib import auth
+from autotest_lib.utils.frozen_chromite.lib import constants
+from autotest_lib.utils.frozen_chromite.lib import cros_logging as logging
+from autotest_lib.utils.frozen_chromite.lib import git
+from autotest_lib.utils.frozen_chromite.lib import retry_util
+from autotest_lib.utils.frozen_chromite.lib import timeout_util
+from autotest_lib.utils.frozen_chromite.lib import cros_build_lib
+from autotest_lib.utils.frozen_chromite.utils import memoize
+
+
+_GAE_VERSION = 'GAE_VERSION'
+
+
+class ErrorParser(HTMLParser.HTMLParser):
+  """Class to parse GOB error message reported as HTML.
+
+  Only data inside <div id='af-error-container'> section is retrieved from the
+  GOB error message. Retrieved data is processed as follows:
+
+  - newlines are removed
+  - each <br> tag is replaced with '\n'
+  - each <p> tag is replaced with '\n\n'
+  """
+
+  def __init__(self):
+    HTMLParser.HTMLParser.__init__(self)
+    self.in_div = False
+    self.err_data = ''
+
+  def handle_starttag(self, tag, attrs):
+    tag_id = [x[1] for x in attrs if x[0] == 'id']
+    if tag == 'div' and tag_id and tag_id[0] == 'af-error-container':
+      self.in_div = True
+      return
+
+    if self.in_div:
+      if tag == 'p':
+        self.err_data += '\n\n'
+        return
+
+      if tag == 'br':
+        self.err_data += '\n'
+        return
+
+  def handle_endtag(self, tag):
+    if tag == 'div':
+      self.in_div = False
+
+  def handle_data(self, data):
+    if self.in_div:
+      self.err_data += data.replace('\n', '')
+
+  def ParsedDiv(self):
+    return self.err_data.strip()
+
+
+@memoize.Memoize
+def _GetAppCredentials():
+  """Returns the singleton Appengine credentials for gerrit code review."""
+  return gce.AppAssertionCredentials(
+      scope='https://www.googleapis.com/auth/gerritcodereview')
+
+
+TRY_LIMIT = 11
+SLEEP = 0.5
+REQUEST_TIMEOUT_SECONDS = 120  # 2 minutes.
+
+# Controls the transport protocol used to communicate with Gerrit servers using
+# git. This is parameterized primarily to enable cros_test_lib.GerritTestCase.
+GIT_PROTOCOL = 'https'
+
+# The GOB conflict errors which could be ignorable.
+GOB_CONFLICT_ERRORS = (
+    br'change is closed',
+    br'Cannot reduce vote on labels for closed change',
+)
+
+GOB_CONFLICT_ERRORS_RE = re.compile(br'|'.join(GOB_CONFLICT_ERRORS),
+                                    re.IGNORECASE)
+
+GOB_ERROR_REASON_CLOSED_CHANGE = 'CLOSED CHANGE'
+
+
+class GOBError(Exception):
+  """Exception class for errors commuicating with the gerrit-on-borg service."""
+  def __init__(self, http_status=None, reason=None):
+    self.http_status = http_status
+    self.reason = reason
+
+    message = ''
+    if http_status is not None:
+      message += '(http_status): %d' % (http_status,)
+    if reason is not None:
+      message += '(reason): %s' % (reason,)
+    if not message:
+      message = 'Unknown error'
+
+    super(GOBError, self).__init__(message)
+
+
+class InternalGOBError(GOBError):
+  """Exception class for GOB errors with status >= 500"""
+
+
+def _QueryString(param_dict, first_param=None):
+  """Encodes query parameters in the key:val[+key:val...] format specified here:
+
+  https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#list-changes
+  """
+  q = [urllib.parse.quote(first_param)] if first_param else []
+  q.extend(['%s:%s' % (key, val) for key, val in param_dict.items()])
+  return '+'.join(q)
+
+
+def GetCookies(host, path, cookie_paths=None):
+  """Returns cookies that should be set on a request.
+
+  Used by CreateHttpConn for any requests that do not already specify a Cookie
+  header. All requests made by this library are HTTPS.
+
+  Args:
+    host: The hostname of the Gerrit service.
+    path: The path on the Gerrit service, already including /a/ if applicable.
+    cookie_paths: Files to look in for cookies. Defaults to looking in the
+      standard places where GoB places cookies.
+
+  Returns:
+    A dict of cookie name to value, with no URL encoding applied.
+  """
+  cookies = {}
+  if cookie_paths is None:
+    cookie_paths = (constants.GOB_COOKIE_PATH, constants.GITCOOKIES_PATH)
+  for cookie_path in cookie_paths:
+    if os.path.isfile(cookie_path):
+      with open(cookie_path) as f:
+        for line in f:
+          fields = line.strip().split('\t')
+          if line.strip().startswith('#') or len(fields) != 7:
+            continue
+          domain, xpath, key, value = fields[0], fields[2], fields[5], fields[6]
+          if cookielib.domain_match(host, domain) and path.startswith(xpath):
+            cookies[key] = value
+  return cookies
+
+
+def CreateHttpConn(host, path, reqtype='GET', headers=None, body=None):
+  """Opens an https connection to a gerrit service, and sends a request."""
+  path = '/a/' + path.lstrip('/')
+  headers = headers or {}
+  if _InAppengine():
+    # TODO(phobbs) how can we choose to only run this on GCE / AppEngine?
+    credentials = _GetAppCredentials()
+    try:
+      headers.setdefault(
+          'Authorization',
+          'Bearer %s' % credentials.get_access_token().access_token)
+    except gce.HttpAccessTokenRefreshError as e:
+      logging.debug('Failed to retreive gce access token: %s', e)
+    # Not in an Appengine or GCE environment.
+    except httplib2.ServerNotFoundError as e:
+      pass
+
+  cookies = GetCookies(host, path)
+  if 'Cookie' not in headers and cookies:
+    headers['Cookie'] = '; '.join('%s=%s' % (n, v) for n, v in cookies.items())
+  elif 'Authorization' not in headers:
+    try:
+      git_creds = auth.GitCreds()
+    except auth.AccessTokenError:
+      git_creds = None
+    if git_creds:
+      headers.setdefault('Authorization', 'Bearer %s' % git_creds)
+    else:
+      logging.debug(
+          'No gitcookies file, Appengine credentials, or LUCI git creds found.')
+
+  if 'User-Agent' not in headers:
+    # We may not be in a git repository.
+    try:
+      version = git.GetGitRepoRevision(
+          os.path.dirname(os.path.realpath(__file__)))
+    except cros_build_lib.RunCommandError:
+      version = 'unknown'
+    headers['User-Agent'] = ' '.join((
+        'autotest.chromite.lib.gob_util',
+        os.path.basename(sys.argv[0]),
+        version,
+    ))
+
+  if body:
+    body = json.JSONEncoder().encode(body)
+    headers.setdefault('Content-Type', 'application/json')
+  if logging.getLogger().isEnabledFor(logging.DEBUG):
+    logging.debug('%s https://%s%s', reqtype, host, path)
+    for key, val in headers.items():
+      if key.lower() in ('authorization', 'cookie'):
+        val = 'HIDDEN'
+      logging.debug('%s: %s', key, val)
+    if body:
+      logging.debug(body)
+  conn = httplib.HTTPSConnection(host)
+  conn.req_host = host
+  conn.req_params = {
+      'url': path,
+      'method': reqtype,
+      'headers': headers,
+      'body': body,
+  }
+  conn.request(**conn.req_params)
+  return conn
+
+
+def _InAppengine():
+  """Returns whether we're in the Appengine environment."""
+  return _GAE_VERSION in os.environ
+
+
+def FetchUrl(host, path, reqtype='GET', headers=None, body=None,
+             ignore_204=False, ignore_404=True):
+  """Fetches the http response from the specified URL.
+
+  Args:
+    host: The hostname of the Gerrit service.
+    path: The path on the Gerrit service. This will be prefixed with '/a'
+          automatically.
+    reqtype: The request type. Can be GET or POST.
+    headers: A mapping of extra HTTP headers to pass in with the request.
+    body: A string of data to send after the headers are finished.
+    ignore_204: for some requests gerrit-on-borg will return 204 to confirm
+                proper processing of the request. When processing responses to
+                these requests we should expect this status.
+    ignore_404: For many requests, gerrit-on-borg will return 404 if the request
+                doesn't match the database contents.  In most such cases, we
+                want the API to return None rather than raise an Exception.
+
+  Returns:
+    The connection's reply, as bytes.
+  """
+  @timeout_util.TimeoutDecorator(REQUEST_TIMEOUT_SECONDS)
+  def _FetchUrlHelper():
+    err_prefix = 'A transient error occured while querying %s:\n' % (host,)
+    try:
+      conn = CreateHttpConn(host, path, reqtype=reqtype, headers=headers,
+                            body=body)
+      response = conn.getresponse()
+    except socket.error as ex:
+      logging.warning('%s%s', err_prefix, str(ex))
+      raise
+
+    # Normal/good responses.
+    response_body = response.read()
+    if response.status == 204 and ignore_204:
+      # This exception is used to confirm expected response status.
+      raise GOBError(http_status=response.status, reason=response.reason)
+    if response.status == 404 and ignore_404:
+      return b''
+    elif response.status == 200:
+      return response_body
+
+    # Bad responses.
+    logging.debug('response msg:\n%s', response.msg)
+    http_version = 'HTTP/%s' % ('1.1' if response.version == 11 else '1.0')
+    msg = ('%s %s %s\n%s %d %s\nResponse body: %r' %
+           (reqtype, conn.req_params['url'], http_version,
+            http_version, response.status, response.reason,
+            response_body))
+
+    # Ones we can retry.
+    if response.status >= 500:
+      # A status >=500 is assumed to be a possible transient error; retry.
+      logging.warning('%s%s', err_prefix, msg)
+      raise InternalGOBError(
+          http_status=response.status,
+          reason=response.reason)
+
+    # Ones we cannot retry.
+    home = os.environ.get('HOME', '~')
+    url = 'https://%s/new-password' % host
+    if response.status in (302, 303, 307):
+      err_prefix = ('Redirect found; missing/bad %s/.gitcookies credentials or '
+                    'permissions (0600)?\n See %s' % (home, url))
+    elif response.status in (400,):
+      err_prefix = 'Permission error; talk to the admins of the GoB instance'
+    elif response.status in (401,):
+      err_prefix = ('Authorization error; missing/bad %s/.gitcookies '
+                    'credentials or permissions (0600)?\n See %s' % (home, url))
+    elif response.status in (422,):
+      err_prefix = ('Bad request body?')
+
+    logging.warning(err_prefix)
+
+    # If GOB output contained expected error message, reduce log visibility of
+    # raw GOB output reported below.
+    ep = ErrorParser()
+    ep.feed(response_body.decode('utf-8'))
+    ep.close()
+    parsed_div = ep.ParsedDiv()
+    if parsed_div:
+      logging.warning('GOB Error:\n%s', parsed_div)
+      logging_function = logging.debug
+    else:
+      logging_function = logging.warning
+
+    logging_function(msg)
+    if response.status >= 400:
+      # The 'X-ErrorId' header is set only on >= 400 response code.
+      logging_function('X-ErrorId: %s', response.getheader('X-ErrorId'))
+
+    try:
+      logging.warning('conn.sock.getpeername(): %s', conn.sock.getpeername())
+    except AttributeError:
+      logging.warning('peer name unavailable')
+
+    if response.status == httplib.CONFLICT:
+      # 409 conflict
+      if GOB_CONFLICT_ERRORS_RE.search(response_body):
+        raise GOBError(
+            http_status=response.status,
+            reason=GOB_ERROR_REASON_CLOSED_CHANGE)
+      else:
+        raise GOBError(http_status=response.status, reason=response.reason)
+    else:
+      raise GOBError(http_status=response.status, reason=response.reason)
+
+  return retry_util.RetryException(
+      (socket.error, InternalGOBError, timeout_util.TimeoutError),
+      TRY_LIMIT,
+      _FetchUrlHelper, sleep=SLEEP, backoff_factor=2)
+
+
+def FetchUrlJson(*args, **kwargs):
+  """Fetch the specified URL and parse it as JSON.
+
+  See FetchUrl for arguments.
+  """
+  fh = FetchUrl(*args, **kwargs)
+
+  # In case ignore_404 is True, we want to return None instead of
+  # raising an exception.
+  if not fh:
+    return None
+
+  # The first line of the response should always be: )]}'
+  if not fh.startswith(b")]}'"):
+    raise GOBError(http_status=200, reason='Unexpected json output: %r' % fh)
+
+  _, _, json_data = fh.partition(b'\n')
+  return json.loads(json_data)
+
+
+def QueryChanges(host, param_dict, first_param=None, limit=None, o_params=None,
+                 start=None):
+  """Queries a gerrit-on-borg server for changes matching query terms.
+
+  Args:
+    host: The Gerrit server hostname.
+    param_dict: A dictionary of search parameters, as documented here:
+        https://gerrit-review.googlesource.com/Documentation/user-search.html
+    first_param: A change identifier
+    limit: Maximum number of results to return.
+    o_params: A list of additional output specifiers, as documented here:
+        https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#list-changes
+    start: Offset in the result set to start at.
+
+  Returns:
+    A list of json-decoded query results.
+  """
+  # Note that no attempt is made to escape special characters; YMMV.
+  if not param_dict and not first_param:
+    raise RuntimeError('QueryChanges requires search parameters')
+  path = 'changes/?q=%s' % _QueryString(param_dict, first_param)
+  if start:
+    path = '%s&S=%d' % (path, start)
+  if limit:
+    path = '%s&n=%d' % (path, limit)
+  if o_params:
+    path = '%s&%s' % (path, '&'.join(['o=%s' % p for p in o_params]))
+  # Don't ignore 404; a query should always return a list, even if it's empty.
+  return FetchUrlJson(host, path, ignore_404=False)
+
+
+def MultiQueryChanges(host, param_dict, change_list, limit=None, o_params=None,
+                      start=None):
+  """Initiate a query composed of multiple sets of query parameters."""
+  if not change_list:
+    raise RuntimeError(
+        "MultiQueryChanges requires a list of change numbers/id's")
+  q = ['q=%s' % '+OR+'.join(urllib.parse.quote(str(x)) for x in change_list)]
+  if param_dict:
+    q.append(_QueryString(param_dict))
+  if limit:
+    q.append('n=%d' % limit)
+  if start:
+    q.append('S=%s' % start)
+  if o_params:
+    q.extend(['o=%s' % p for p in o_params])
+  path = 'changes/?%s' % '&'.join(q)
+  try:
+    result = FetchUrlJson(host, path, ignore_404=False)
+  except GOBError as e:
+    msg = '%s:\n%s' % (e, path)
+    raise GOBError(http_status=e.http_status, reason=msg)
+  return result
+
+
+def GetGerritFetchUrl(host):
+  """Given a gerrit host name returns URL of a gerrit instance to fetch from."""
+  return 'https://%s/' % host
+
+
+def GetChangePageUrl(host, change_number):
+  """Given a gerrit host name and change number, return change page url."""
+  return 'https://%s/#/c/%d/' % (host, change_number)
+
+
+def _GetChangePath(change):
+  """Given a change id, return a path prefix for the change."""
+  return 'changes/%s' % str(change).replace('/', '%2F')
+
+
+def GetChangeUrl(host, change):
+  """Given a gerrit host name and change id, return an url for the change."""
+  return 'https://%s/a/%s' % (host, _GetChangePath(change))
+
+
+def GetChange(host, change):
+  """Query a gerrit server for information about a single change."""
+  return FetchUrlJson(host, _GetChangePath(change))
+
+
+def GetChangeReview(host, change, revision=None):
+  """Get the current review information for a change."""
+  if revision is None:
+    revision = 'current'
+  path = '%s/revisions/%s/review' % (_GetChangePath(change), revision)
+  return FetchUrlJson(host, path)
+
+
+def GetChangeCommit(host, change, revision=None):
+  """Get the current review information for a change."""
+  if revision is None:
+    revision = 'current'
+  path = '%s/revisions/%s/commit' % (_GetChangePath(change), revision)
+  return FetchUrlJson(host, path)
+
+
+def GetChangeCurrentRevision(host, change):
+  """Get information about the latest revision for a given change."""
+  jmsg = GetChangeReview(host, change)
+  if jmsg:
+    return jmsg.get('current_revision')
+
+
+def GetChangeDetail(host, change, o_params=None):
+  """Query a gerrit server for extended information about a single change."""
+  path = '%s/detail' % _GetChangePath(change)
+  if o_params:
+    path = '%s?%s' % (path, '&'.join(['o=%s' % p for p in o_params]))
+  return FetchUrlJson(host, path)
+
+
+def GetChangeReviewers(host, change):
+  """Get information about all reviewers attached to a change.
+
+  Args:
+    host: The Gerrit host to interact with.
+    change: The Gerrit change ID.
+  """
+  warnings.warn('GetChangeReviewers is deprecated; use GetReviewers instead.')
+  GetReviewers(host, change)
+
+
+def ReviewedChange(host, change):
+  """Mark a gerrit change as reviewed."""
+  path = '%s/reviewed' % _GetChangePath(change)
+  return FetchUrlJson(host, path, reqtype='PUT', ignore_404=False)
+
+
+def UnreviewedChange(host, change):
+  """Mark a gerrit change as unreviewed."""
+  path = '%s/unreviewed' % _GetChangePath(change)
+  return FetchUrlJson(host, path, reqtype='PUT', ignore_404=False)
+
+
+def IgnoreChange(host, change):
+  """Ignore a gerrit change."""
+  path = '%s/ignore' % _GetChangePath(change)
+  return FetchUrlJson(host, path, reqtype='PUT', ignore_404=False)
+
+
+def UnignoreChange(host, change):
+  """Unignore a gerrit change."""
+  path = '%s/unignore' % _GetChangePath(change)
+  return FetchUrlJson(host, path, reqtype='PUT', ignore_404=False)
+
+
+def AbandonChange(host, change, msg=''):
+  """Abandon a gerrit change."""
+  path = '%s/abandon' % _GetChangePath(change)
+  body = {'message': msg}
+  return FetchUrlJson(host, path, reqtype='POST', body=body, ignore_404=False)
+
+
+def RestoreChange(host, change, msg=''):
+  """Restore a previously abandoned change."""
+  path = '%s/restore' % _GetChangePath(change)
+  body = {'message': msg}
+  return FetchUrlJson(host, path, reqtype='POST', body=body, ignore_404=False)
+
+
+def DeleteDraft(host, change):
+  """Delete a gerrit draft change."""
+  path = _GetChangePath(change)
+  try:
+    FetchUrl(host, path, reqtype='DELETE', ignore_204=True, ignore_404=False)
+  except GOBError as e:
+    # On success, gerrit returns status 204; anything else is an error.
+    if e.http_status != 204:
+      raise
+  else:
+    raise GOBError(
+        http_status=200,
+        reason='Unexpectedly received a 200 http status while deleting draft '
+               ' %r' % change)
+
+
+def SubmitChange(host, change, revision=None, wait_for_merge=True):
+  """Submits a gerrit change via Gerrit."""
+  if revision is None:
+    revision = 'current'
+  path = '%s/revisions/%s/submit' % (_GetChangePath(change), revision)
+  body = {'wait_for_merge': wait_for_merge}
+  return FetchUrlJson(host, path, reqtype='POST', body=body, ignore_404=False)
+
+
+def CheckChange(host, change, sha1=None):
+  """Performs consistency checks on the change, and fixes inconsistencies.
+
+  This is useful for forcing Gerrit to check whether a change has already been
+  merged into the git repo. Namely, if |sha1| is provided and the change is in
+  'NEW' status, Gerrit will check if a change with that |sha1| is in the repo
+  and mark the change as 'MERGED' if it exists.
+
+  Args:
+    host: The Gerrit host to interact with.
+    change: The Gerrit change ID.
+    sha1: An optional hint of the commit's SHA1 in Git.
+  """
+  path = '%s/check' % (_GetChangePath(change),)
+  if sha1:
+    body, headers = {'expect_merged_as': sha1}, {}
+  else:
+    body, headers = {}, {'Content-Length': '0'}
+
+  return FetchUrlJson(host, path, reqtype='POST',
+                      body=body, ignore_404=False,
+                      headers=headers)
+
+
+def GetAssignee(host, change):
+  """Get assignee for a change."""
+  path = '%s/assignee' % _GetChangePath(change)
+  return FetchUrlJson(host, path)
+
+
+def AddAssignee(host, change, assignee):
+  """Add reviewers to a change.
+
+  Args:
+    host: The Gerrit host to interact with.
+    change: The Gerrit change ID.
+    assignee: Gerrit account email as a string
+  """
+  path = '%s/assignee' % _GetChangePath(change)
+  body = {'assignee': assignee}
+  return  FetchUrlJson(host, path, reqtype='PUT', body=body, ignore_404=False)
+
+
+def MarkPrivate(host, change):
+  """Marks the given CL as private.
+
+  Args:
+    host: The gob host to interact with.
+    change: CL number on the given host.
+  """
+  path = '%s/private' % _GetChangePath(change)
+  try:
+    FetchUrlJson(host, path, reqtype='POST', ignore_404=False)
+  except GOBError as e:
+    # 201: created -- change was successfully marked private.
+    if e.http_status != 201:
+      raise
+  else:
+    raise GOBError(
+        http_status=200,
+        reason='Change was already marked private',
+    )
+
+
+def MarkNotPrivate(host, change):
+  """Sets the private bit on given CL to False.
+
+  Args:
+    host: The gob host to interact with.
+    change: CL number on the given host.
+  """
+  path = '%s/private.delete' % _GetChangePath(change)
+  try:
+    FetchUrlJson(host, path, reqtype='POST', ignore_404=False, ignore_204=True)
+  except GOBError as e:
+    if e.http_status == 204:
+      # 204: no content -- change was successfully marked not private.
+      pass
+    elif e.http_status == 409:
+      raise GOBError(
+          http_status=e.http_status,
+          reason='Change was already marked not private',
+      )
+    else:
+      raise
+  else:
+    raise GOBError(
+        http_status=200,
+        reason='Got unexpected 200 when marking change not private.',
+    )
+
+
+def GetReviewers(host, change):
+  """Get information about all reviewers attached to a change.
+
+  Args:
+    host: The Gerrit host to interact with.
+    change: The Gerrit change ID.
+  """
+  path = '%s/reviewers' % _GetChangePath(change)
+  return FetchUrlJson(host, path)
+
+
+def AddReviewers(host, change, add=None, notify=None):
+  """Add reviewers to a change."""
+  if not add:
+    return
+  if isinstance(add, six.string_types):
+    add = (add,)
+  body = {}
+  if notify:
+    body['notify'] = notify
+  path = '%s/reviewers' % _GetChangePath(change)
+  for r in add:
+    body['reviewer'] = r
+    jmsg = FetchUrlJson(host, path, reqtype='POST', body=body, ignore_404=False)
+  return jmsg
+
+
+def RemoveReviewers(host, change, remove=None, notify=None):
+  """Remove reveiewers from a change."""
+  if not remove:
+    return
+  if isinstance(remove, six.string_types):
+    remove = (remove,)
+  body = {}
+  if notify:
+    body['notify'] = notify
+  for r in remove:
+    path = '%s/reviewers/%s/delete' % (_GetChangePath(change), r)
+    try:
+      FetchUrl(host, path, reqtype='POST', body=body, ignore_204=True)
+    except GOBError as e:
+      # On success, gerrit returns status 204; anything else is an error.
+      if e.http_status != 204:
+        raise
+
+
+def SetReview(host, change, revision=None, msg=None, labels=None, notify=None):
+  """Set labels and/or add a message to a code review."""
+  if revision is None:
+    revision = 'current'
+  if not msg and not labels:
+    return
+  path = '%s/revisions/%s/review' % (_GetChangePath(change), revision)
+  body = {}
+  if msg:
+    body['message'] = msg
+  if labels:
+    body['labels'] = labels
+  if notify:
+    body['notify'] = notify
+  response = FetchUrlJson(host, path, reqtype='POST', body=body)
+  if response is None:
+    raise GOBError(
+        http_status=404,
+        reason='CL %s not found in %s' % (change, host))
+  if labels:
+    for key, val in labels.items():
+      if ('labels' not in response or key not in response['labels'] or
+          int(response['labels'][key] != int(val))):
+        raise GOBError(
+            http_status=200,
+            reason='Unable to set "%s" label on change %s.' % (key, change))
+
+
+def SetTopic(host, change, topic):
+  """Set |topic| for a change. If |topic| is empty, it will be deleted"""
+  path = '%s/topic' % _GetChangePath(change)
+  body = {'topic': topic}
+  return FetchUrlJson(host, path, reqtype='PUT', body=body, ignore_404=False)
+
+
+def SetHashtags(host, change, add, remove):
+  """Adds and / or removes hashtags from a change.
+
+  Args:
+    host: Hostname (without protocol prefix) of the gerrit server.
+    change: A gerrit change number.
+    add: a list of hashtags to be added.
+    remove: a list of hashtags to be removed.
+  """
+  path = '%s/hashtags' % _GetChangePath(change)
+  return FetchUrlJson(host, path, reqtype='POST',
+                      body={'add': add, 'remove': remove},
+                      ignore_404=False)
+
+
+def ResetReviewLabels(host, change, label, value='0', revision=None,
+                      message=None, notify=None):
+  """Reset the value of a given label for all reviewers on a change."""
+  if revision is None:
+    revision = 'current'
+  # This is tricky when working on the "current" revision, because there's
+  # always the risk that the "current" revision will change in between API
+  # calls.  So, the code dereferences the "current" revision down to a literal
+  # sha1 at the beginning and uses it for all subsequent calls.  As a sanity
+  # check, the "current" revision is dereferenced again at the end, and if it
+  # differs from the previous "current" revision, an exception is raised.
+  current = (revision == 'current')
+  jmsg = GetChangeDetail(
+      host, change, o_params=['CURRENT_REVISION', 'CURRENT_COMMIT'])
+  if current:
+    revision = jmsg['current_revision']
+  value = str(value)
+  path = '%s/revisions/%s/review' % (_GetChangePath(change), revision)
+  message = message or (
+      '%s label set to %s programmatically by chromite.' % (label, value))
+  for review in jmsg.get('labels', {}).get(label, {}).get('all', []):
+    if str(review.get('value', value)) != value:
+      body = {
+          'message': message,
+          'labels': {label: value},
+          'on_behalf_of': review['_account_id'],
+      }
+      if notify:
+        body['notify'] = notify
+      response = FetchUrlJson(host, path, reqtype='POST', body=body)
+      if str(response['labels'][label]) != value:
+        username = review.get('email', jmsg.get('name', ''))
+        raise GOBError(
+            http_status=200,
+            reason='Unable to set %s label for user "%s" on change %s.' % (
+                label, username, change))
+  if current:
+    new_revision = GetChangeCurrentRevision(host, change)
+    if not new_revision:
+      raise GOBError(
+          http_status=200,
+          reason='Could not get review information for change "%s"' % change)
+    elif new_revision != revision:
+      raise GOBError(
+          http_status=200,
+          reason='While resetting labels on change "%s", a new patchset was '
+                 'uploaded.' % change)
+
+
+def GetTipOfTrunkRevision(git_url):
+  """Returns the current git revision on the master branch."""
+  parsed_url = urllib.parse.urlparse(git_url)
+  path = parsed_url[2].rstrip('/') + '/+log/master?n=1&format=JSON'
+  j = FetchUrlJson(parsed_url[1], path, ignore_404=False)
+  if not j:
+    raise GOBError(
+        reason='Could not find revision information from %s' % git_url)
+  try:
+    return j['log'][0]['commit']
+  except (IndexError, KeyError, TypeError):
+    msg = ('The json returned by https://%s%s has an unfamiliar structure:\n'
+           '%s\n' % (parsed_url[1], path, j))
+    raise GOBError(reason=msg)
+
+
+def GetCommitDate(git_url, commit):
+  """Returns the date of a particular git commit.
+
+  The returned object is naive in the sense that it doesn't carry any timezone
+  information - you should assume UTC.
+
+  Args:
+    git_url: URL for the repository to get the commit date from.
+    commit: A git commit identifier (e.g. a sha1).
+
+  Returns:
+    A datetime object.
+  """
+  parsed_url = urllib.parse.urlparse(git_url)
+  path = '%s/+log/%s?n=1&format=JSON' % (parsed_url.path.rstrip('/'), commit)
+  j = FetchUrlJson(parsed_url.netloc, path, ignore_404=False)
+  if not j:
+    raise GOBError(
+        reason='Could not find revision information from %s' % git_url)
+  try:
+    commit_timestr = j['log'][0]['committer']['time']
+  except (IndexError, KeyError, TypeError):
+    msg = ('The json returned by https://%s%s has an unfamiliar structure:\n'
+           '%s\n' % (parsed_url.netloc, path, j))
+    raise GOBError(reason=msg)
+  try:
+    # We're parsing a string of the form 'Tue Dec 02 17:48:06 2014'.
+    return datetime.datetime.strptime(commit_timestr,
+                                      constants.GOB_COMMIT_TIME_FORMAT)
+  except ValueError:
+    raise GOBError(reason='Failed parsing commit time "%s"' % commit_timestr)
+
+
+def GetAccount(host):
+  """Get information about the user account."""
+  return FetchUrlJson(host, 'accounts/self')
diff --git a/utils/frozen_chromite/lib/gs.py b/utils/frozen_chromite/lib/gs.py
new file mode 100644
index 0000000..f97f4d5
--- /dev/null
+++ b/utils/frozen_chromite/lib/gs.py
@@ -0,0 +1,1417 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Library to make common google storage operations more reliable."""
+
+from __future__ import print_function
+
+import collections
+import contextlib
+import datetime
+import errno
+import fnmatch
+import getpass
+import glob
+import hashlib
+import os
+import re
+import shutil
+import subprocess
+import tempfile
+
+import six
+from six.moves import urllib
+
+from autotest_lib.utils.frozen_chromite.lib import constants
+from autotest_lib.utils.frozen_chromite.lib import cache
+from autotest_lib.utils.frozen_chromite.lib import cros_build_lib
+from autotest_lib.utils.frozen_chromite.lib import cros_collections
+from autotest_lib.utils.frozen_chromite.lib import cros_logging as logging
+from autotest_lib.utils.frozen_chromite.lib import osutils
+from autotest_lib.utils.frozen_chromite.lib import path_util
+from autotest_lib.utils.frozen_chromite.lib import retry_stats
+from autotest_lib.utils.frozen_chromite.lib import retry_util
+from autotest_lib.utils.frozen_chromite.lib import signals
+from autotest_lib.utils.frozen_chromite.lib import timeout_util
+
+
+# This bucket has the allAuthenticatedUsers:READER ACL.
+AUTHENTICATION_BUCKET = 'gs://chromeos-authentication-bucket/'
+
+# Public path, only really works for files.
+PUBLIC_BASE_HTTPS_URL = 'https://storage.googleapis.com/'
+
+# Private path for files.
+PRIVATE_BASE_HTTPS_URL = 'https://storage.cloud.google.com/'
+
+# Private path for directories.
+# TODO(akeshet): this is a workaround for b/27653354. If that is ultimately
+# fixed, revisit this workaround.
+PRIVATE_BASE_HTTPS_DOWNLOAD_URL = 'https://stainless.corp.google.com/browse/'
+BASE_GS_URL = 'gs://'
+
+# Format used by "gsutil ls -l" when reporting modified time.
+DATETIME_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
+
+# Regexp for parsing each line of output from "gsutil ls -l".
+# This regexp is prepared for the generation and meta_generation values,
+# too, even though they are not expected until we use "-a".
+#
+# A detailed listing looks like:
+#    99908  2014-03-01T05:50:08Z  gs://bucket/foo/abc#1234  metageneration=1
+#                                 gs://bucket/foo/adir/
+#    99908  2014-03-04T01:16:55Z  gs://bucket/foo/def#5678  metageneration=1
+# TOTAL: 2 objects, 199816 bytes (495.36 KB)
+LS_LA_RE = re.compile(
+    r'^\s*(?P<content_length>\d*?)\s+'
+    r'(?P<creation_time>\S*?)\s+'
+    r'(?P<url>[^#$]+).*?'
+    r'('
+    r'#(?P<generation>\d+)\s+'
+    r'meta_?generation=(?P<metageneration>\d+)'
+    r')?\s*$')
+LS_RE = re.compile(r'^\s*(?P<content_length>)(?P<creation_time>)(?P<url>.*)'
+                   r'(?P<generation>)(?P<metageneration>)\s*$')
+
+# Format used by ContainsWildCard, which is duplicated from
+# https://github.com/GoogleCloudPlatform/gsutil/blob/v4.21/gslib/storage_url.py#L307.
+WILDCARD_REGEX = re.compile(r'[*?\[\]]')
+
+
+def PathIsGs(path):
+  """Determine if a path is a Google Storage URI."""
+  return path.startswith(BASE_GS_URL)
+
+
+def CanonicalizeURL(url, strict=False):
+  """Convert provided URL to gs:// URL, if it follows a known format.
+
+  Args:
+    url: URL to canonicalize.
+    strict: Raises exception if URL cannot be canonicalized.
+  """
+  for prefix in (PUBLIC_BASE_HTTPS_URL,
+                 PRIVATE_BASE_HTTPS_URL,
+                 PRIVATE_BASE_HTTPS_DOWNLOAD_URL,
+                 'https://pantheon.corp.google.com/storage/browser/',
+                 'https://commondatastorage.googleapis.com/'):
+    if url.startswith(prefix):
+      return url.replace(prefix, BASE_GS_URL, 1)
+
+  if not PathIsGs(url) and strict:
+    raise ValueError('Url %r cannot be canonicalized.' % url)
+
+  return url
+
+
+def GetGsURL(bucket, for_gsutil=False, public=True, suburl=''):
+  """Construct a Google Storage URL
+
+  Args:
+    bucket: The Google Storage bucket to use
+    for_gsutil: Do you want a URL for passing to `gsutil`?
+    public: Do we want the public or private url
+    suburl: A url fragment to tack onto the end
+
+  Returns:
+    The fully constructed URL
+  """
+  url = 'gs://%s/%s' % (bucket, suburl)
+
+  if for_gsutil:
+    return url
+  else:
+    return GsUrlToHttp(url, public=public)
+
+
+def GsUrlToHttp(path, public=True, directory=False):
+  """Convert a GS URL to a HTTP URL for the same resource.
+
+  Because the HTTP Urls are not fixed (and may not always be simple prefix
+  replacements), use this method to centralize the conversion.
+
+  Directories need to have different URLs from files, because the Web UIs for GS
+  are weird and really inconsistent. Also public directories probably
+  don't work, and probably never will (permissions as well as UI).
+
+  e.g. 'gs://chromeos-image-archive/path/file' ->
+       'https://pantheon/path/file'
+
+  Args:
+    path: GS URL to convert.
+    public: Is this URL for Googler access, or publicly visible?
+    directory: Force this URL to be treated as a directory?
+               We try to autodetect on False.
+
+  Returns:
+    https URL as a string.
+  """
+  assert PathIsGs(path)
+  directory = directory or path.endswith('/')
+
+  # Public HTTP URls for directories don't work'
+  # assert not public or not directory,
+
+  if public:
+    return path.replace(BASE_GS_URL, PUBLIC_BASE_HTTPS_URL, 1)
+  else:
+    if directory:
+      return path.replace(BASE_GS_URL, PRIVATE_BASE_HTTPS_DOWNLOAD_URL, 1)
+    else:
+      return path.replace(BASE_GS_URL, PRIVATE_BASE_HTTPS_URL, 1)
+
+
+class GSContextException(Exception):
+  """Base exception for all exceptions thrown by GSContext."""
+
+
+# Since the underlying code uses run, some callers might be trying to
+# catch cros_build_lib.RunCommandError themselves.  Extend that class so that
+# code continues to work.
+class GSCommandError(GSContextException, cros_build_lib.RunCommandError):
+  """Thrown when an error happened we couldn't decode."""
+
+
+class GSContextPreconditionFailed(GSContextException):
+  """Thrown when google storage returns code=PreconditionFailed."""
+
+
+class GSNoSuchKey(GSContextException):
+  """Thrown when google storage returns code=NoSuchKey."""
+
+
+# Detailed results of GSContext.Stat.
+#
+# The fields directory correspond to gsutil stat results.
+#
+#  Field name        Type         Example
+#   creation_time     datetime     Sat, 23 Aug 2014 06:53:20 GMT
+#   content_length    int          74
+#   content_type      string       application/octet-stream
+#   hash_crc32c       string       BBPMPA==
+#   hash_md5          string       ms+qSYvgI9SjXn8tW/5UpQ==
+#   etag              string       CNCgocbmqMACEAE=
+#   generation        int          1408776800850000
+#   metageneration    int          1
+#
+# Note: We omit a few stat fields as they are not always available, and we
+# have no callers that want this currently.
+#
+#   content_language  string/None  en   # This field may be None.
+GSStatResult = collections.namedtuple(
+    'GSStatResult',
+    ('creation_time', 'content_length', 'content_type', 'hash_crc32c',
+     'hash_md5', 'etag', 'generation', 'metageneration'))
+
+
+# Detailed results of GSContext.List.
+GSListResult = collections.namedtuple(
+    'GSListResult',
+    ('url', 'creation_time', 'content_length', 'generation', 'metageneration'))
+
+
+ErrorDetails = cros_collections.Collection(
+    'ErrorDetails',
+    type=None, message_pattern='', retriable=None, exception=None)
+
+
+class GSCounter(object):
+  """A counter class for Google Storage."""
+
+  def __init__(self, ctx, path):
+    """Create a counter object.
+
+    Args:
+      ctx: A GSContext object.
+      path: The path to the counter in Google Storage.
+    """
+    self.ctx = ctx
+    self.path = path
+
+  def Get(self):
+    """Get the current value of a counter."""
+    try:
+      return int(self.ctx.Cat(self.path))
+    except GSNoSuchKey:
+      return 0
+
+  def AtomicCounterOperation(self, default_value, operation):
+    """Atomically set the counter value using |operation|.
+
+    Args:
+      default_value: Default value to use for counter, if counter
+                     does not exist.
+      operation: Function that takes the current counter value as a
+                 parameter, and returns the new desired value.
+
+    Returns:
+      The new counter value. None if value could not be set.
+    """
+    generation, _ = self.ctx.GetGeneration(self.path)
+    for _ in range(self.ctx.retries + 1):
+      try:
+        value = default_value if generation == 0 else operation(self.Get())
+        self.ctx.Copy('-', self.path, input=str(value), version=generation)
+        return value
+      except (GSContextPreconditionFailed, GSNoSuchKey):
+        # GSContextPreconditionFailed is thrown if another builder is also
+        # trying to update the counter and we lost the race. GSNoSuchKey is
+        # thrown if another builder deleted the counter. In either case, fetch
+        # the generation again, and, if it has changed, try the copy again.
+        new_generation, _ = self.ctx.GetGeneration(self.path)
+        if new_generation == generation:
+          raise
+        generation = new_generation
+
+  def Increment(self):
+    """Increment the counter.
+
+    Returns:
+      The new counter value. None if value could not be set.
+    """
+    return self.AtomicCounterOperation(1, lambda x: x + 1)
+
+  def Decrement(self):
+    """Decrement the counter.
+
+    Returns:
+      The new counter value. None if value could not be set.
+    """
+    return self.AtomicCounterOperation(-1, lambda x: x - 1)
+
+  def Reset(self):
+    """Reset the counter to zero.
+
+    Returns:
+      The new counter value. None if value could not be set.
+    """
+    return self.AtomicCounterOperation(0, lambda x: 0)
+
+  def StreakIncrement(self):
+    """Increment the counter if it is positive, otherwise set it to 1.
+
+    Returns:
+      The new counter value. None if value could not be set.
+    """
+    return self.AtomicCounterOperation(1, lambda x: x + 1 if x > 0 else 1)
+
+  def StreakDecrement(self):
+    """Decrement the counter if it is negative, otherwise set it to -1.
+
+    Returns:
+      The new counter value. None if value could not be set.
+    """
+    return self.AtomicCounterOperation(-1, lambda x: x - 1 if x < 0 else -1)
+
+
+class GSContext(object):
+  """A class to wrap common google storage operations."""
+
+  # Error messages that indicate an invalid BOTO config.
+  AUTHORIZATION_ERRORS = ('no configured', 'none configured',
+                          'detail=Authorization', '401 Anonymous caller')
+
+  DEFAULT_BOTO_FILE = os.path.expanduser('~/.boto')
+  DEFAULT_GSUTIL_TRACKER_DIR = os.path.expanduser('~/.gsutil/tracker-files')
+  # This is set for ease of testing.
+  DEFAULT_GSUTIL_BIN = None
+  DEFAULT_GSUTIL_BUILDER_BIN = '/b/build/third_party/gsutil/gsutil'
+  # How many times to retry uploads.
+  DEFAULT_RETRIES = 3
+
+  # Multiplier for how long to sleep (in seconds) between retries; will delay
+  # (1*sleep) the first time, then (2*sleep), continuing via attempt * sleep.
+  DEFAULT_SLEEP_TIME = 60
+
+  GSUTIL_VERSION = '4.51'
+  GSUTIL_TAR = 'gsutil_%s.tar.gz' % GSUTIL_VERSION
+  GSUTIL_URL = (PUBLIC_BASE_HTTPS_URL +
+                'chromeos-mirror/gentoo/distfiles/%s' % GSUTIL_TAR)
+  GSUTIL_API_SELECTOR = 'JSON'
+
+  RESUMABLE_UPLOAD_ERROR = (b'Too many resumable upload attempts failed '
+                            b'without progress')
+  RESUMABLE_DOWNLOAD_ERROR = (b'Too many resumable download attempts failed '
+                              b'without progress')
+
+  # TODO: Below is a list of known flaky errors that we should
+  # retry. The list needs to be extended.
+  RESUMABLE_ERROR_MESSAGE = (
+      RESUMABLE_DOWNLOAD_ERROR,
+      RESUMABLE_UPLOAD_ERROR,
+      b'ResumableUploadException',
+      b'ResumableUploadAbortException',
+      b'ResumableDownloadException',
+      b'ssl.SSLError: The read operation timed out',
+      # TODO: Error messages may change in different library versions,
+      # use regexes to match resumable error messages.
+      b"ssl.SSLError: ('The read operation timed out',)",
+      b'ssl.SSLError: _ssl.c:495: The handshake operation timed out',
+      b'Unable to find the server',
+      b"doesn't match cloud-supplied digest",
+      b'ssl.SSLError: [Errno 8]',
+      b'EOF occurred in violation of protocol',
+      # TODO(nxia): crbug.com/775330 narrow down the criteria for retrying
+      b'AccessDeniedException',
+  )
+
+  # We have seen flaky errors with 5xx return codes
+  # See b/17376491 for the "JSON decoding" error.
+  # We have seen transient Oauth 2.0 credential errors (crbug.com/414345).
+  TRANSIENT_ERROR_MESSAGE = (
+      b'ServiceException: 5',
+      b'Failure: No JSON object could be decoded',
+      b'Oauth 2.0 User Account',
+      b'InvalidAccessKeyId',
+      b'socket.error: [Errno 104] Connection reset by peer',
+      b'Received bad request from server',
+      b"can't start new thread",
+  )
+
+  @classmethod
+  def GetDefaultGSUtilBin(cls, cache_dir=None, cache_user=None):
+    if cls.DEFAULT_GSUTIL_BIN is None:
+      if cache_dir is None:
+        cache_dir = path_util.GetCacheDir()
+      if cache_dir is not None:
+        common_path = os.path.join(cache_dir, constants.COMMON_CACHE)
+        tar_cache = cache.TarballCache(common_path, cache_user=cache_user)
+        key = (cls.GSUTIL_TAR,)
+        # The common cache will not be LRU, removing the need to hold a read
+        # lock on the cached gsutil.
+        ref = tar_cache.Lookup(key)
+        ref.SetDefault(cls.GSUTIL_URL)
+        cls.DEFAULT_GSUTIL_BIN = os.path.join(ref.path, 'gsutil', 'gsutil')
+        cls._CompileCrcmod(ref.path)
+      else:
+        # Check if the default gsutil path for builders exists. If
+        # not, try locating gsutil. If none exists, simply use 'gsutil'.
+        gsutil_bin = cls.DEFAULT_GSUTIL_BUILDER_BIN
+        if not os.path.exists(gsutil_bin):
+          gsutil_bin = osutils.Which('gsutil')
+        if gsutil_bin is None:
+          gsutil_bin = 'gsutil'
+        cls.DEFAULT_GSUTIL_BIN = gsutil_bin
+
+    return cls.DEFAULT_GSUTIL_BIN
+
+  @classmethod
+  def _CompileCrcmod(cls, path):
+    """Try to setup a compiled crcmod for gsutil.
+
+    The native crcmod code is much faster than the python implementation, and
+    enables some more features (otherwise gsutil internally disables them).
+    Try to compile the module on demand in the crcmod tree bundled with gsutil.
+
+    For more details, see:
+    https://cloud.google.com/storage/docs/gsutil/addlhelp/CRC32CandInstallingcrcmod
+    """
+    src_root = os.path.join(path, 'gsutil', 'third_party', 'crcmod')
+
+    # Try to build it once.
+    flag = os.path.join(src_root, '.chromite.tried.build')
+    if os.path.exists(flag):
+      return
+    # Flag things now regardless of how the attempt below works out.
+    try:
+      osutils.Touch(flag)
+    except IOError as e:
+      # If the gsutil dir was cached previously as root, but now we're
+      # non-root, just flag it and return.
+      if e.errno == errno.EACCES:
+        logging.debug('Skipping gsutil crcmod compile due to permissions')
+        cros_build_lib.sudo_run(['touch', flag], debug_level=logging.DEBUG)
+        return
+      else:
+        raise
+
+    # See if the system includes one in which case we're done.
+    # We probe `python` as that's what gsutil uses for its shebang.
+    result = cros_build_lib.run(
+        ['python', '-c', 'from crcmod.crcmod import _usingExtension; '
+         'exit(0 if _usingExtension else 1)'], check=False, capture_output=True)
+    if result.returncode == 0:
+      return
+
+    # See if the local copy has one.
+    for pyver in ('python2', 'python3'):
+      logging.debug('Attempting to compile local crcmod for %s gsutil', pyver)
+      with osutils.TempDir(prefix='chromite.gsutil.crcmod') as tempdir:
+        result = cros_build_lib.run(
+            [pyver, 'setup.py', 'build', '--build-base', tempdir,
+             '--build-platlib', tempdir],
+            cwd=src_root, capture_output=True, check=False,
+            debug_level=logging.DEBUG)
+        if result.returncode:
+          continue
+
+        # Locate the module in the build dir.
+        copied = False
+        for mod_path in glob.glob(
+            os.path.join(tempdir, 'crcmod', '_crcfunext*.so')):
+          dst_mod_path = os.path.join(src_root, pyver, 'crcmod',
+                                      os.path.basename(mod_path))
+          try:
+            shutil.copy2(mod_path, dst_mod_path)
+            copied = True
+          except shutil.Error:
+            pass
+
+        if not copied:
+          # If the module compile failed (missing compiler/headers/whatever),
+          # then the setup.py build command above would have passed, but there
+          # won't actually be a _crcfunext.so module.  Check for it here to
+          # disambiguate other errors from shutil.copy2.
+          logging.debug('No crcmod module produced (missing host compiler?)')
+          continue
+
+  def __init__(self, boto_file=None, cache_dir=None, acl=None,
+               dry_run=False, gsutil_bin=None, init_boto=False, retries=None,
+               sleep=None, cache_user=None):
+    """Constructor.
+
+    Args:
+      boto_file: Fully qualified path to user's .boto credential file.
+      cache_dir: The absolute path to the cache directory. Use the default
+        fallback if not given.
+      acl: If given, a canned ACL. It is not valid to pass in an ACL file
+        here, because most gsutil commands do not accept ACL files. If you
+        would like to use an ACL file, use the SetACL command instead.
+      dry_run: Testing mode that prints commands that would be run.
+      gsutil_bin: If given, the absolute path to the gsutil binary.  Else
+        the default fallback will be used.
+      init_boto: If set to True, GSContext will check during __init__ if a
+        valid boto config is configured, and if not, will attempt to ask the
+        user to interactively set up the boto config.
+      retries: Number of times to retry a command before failing.
+      sleep: Amount of time to sleep between failures.
+      cache_user: user for creating cache_dir for gsutil. Default is None.
+    """
+    if gsutil_bin is None:
+      gsutil_bin = self.GetDefaultGSUtilBin(cache_dir, cache_user=cache_user)
+    else:
+      self._CheckFile('gsutil not found', gsutil_bin)
+    self.gsutil_bin = gsutil_bin
+
+    # The version of gsutil is retrieved on demand and cached here.
+    self._gsutil_version = None
+
+    # Increase the number of retries. With 10 retries, Boto will try a total of
+    # 11 times and wait up to 2**11 seconds (~30 minutes) in total, not
+    # not including the time spent actually uploading or downloading.
+    self.gsutil_flags = ['-o', 'Boto:num_retries=10']
+
+    # Set HTTP proxy if environment variable http_proxy is set
+    # (crbug.com/325032).
+    if 'http_proxy' in os.environ:
+      url = urllib.parse.urlparse(os.environ['http_proxy'])
+      if not url.hostname or (not url.username and url.password):
+        logging.warning('GS_ERROR: Ignoring env variable http_proxy because it '
+                        'is not properly set: %s', os.environ['http_proxy'])
+      else:
+        self.gsutil_flags += ['-o', 'Boto:proxy=%s' % url.hostname]
+        if url.username:
+          self.gsutil_flags += ['-o', 'Boto:proxy_user=%s' % url.username]
+        if url.password:
+          self.gsutil_flags += ['-o', 'Boto:proxy_pass=%s' % url.password]
+        if url.port:
+          self.gsutil_flags += ['-o', 'Boto:proxy_port=%d' % url.port]
+
+    # Prefer boto_file if specified, else prefer the env then the default.
+    if boto_file is None:
+      boto_file = os.environ.get('BOTO_CONFIG')
+    if boto_file is None and os.path.isfile(self.DEFAULT_BOTO_FILE):
+      # Only set boto file to DEFAULT_BOTO_FILE if it exists.
+      boto_file = self.DEFAULT_BOTO_FILE
+
+    self.boto_file = boto_file
+
+    self.acl = acl
+
+    self.dry_run = dry_run
+    self.retries = self.DEFAULT_RETRIES if retries is None else int(retries)
+    self._sleep_time = self.DEFAULT_SLEEP_TIME if sleep is None else int(sleep)
+
+    if init_boto and not dry_run:
+      # We can't really expect gsutil to even be present in dry_run mode.
+      self._InitBoto()
+
+  @property
+  def gsutil_version(self):
+    """Return the version of the gsutil in this context."""
+    if not self._gsutil_version:
+      if self.dry_run:
+        self._gsutil_version = self.GSUTIL_VERSION
+      else:
+        cmd = ['-q', 'version']
+
+        # gsutil has been known to return version to stderr in the past, so
+        # use stderr=subprocess.STDOUT.
+        result = self.DoCommand(cmd, stdout=True, stderr=subprocess.STDOUT)
+
+        # Expect output like: 'gsutil version 3.35' or 'gsutil version: 4.5'.
+        match = re.search(r'^\s*gsutil\s+version:?\s+([\d.]+)', result.output,
+                          re.IGNORECASE)
+        if match:
+          self._gsutil_version = match.group(1)
+        else:
+          raise GSContextException('Unexpected output format from "%s":\n%s.' %
+                                   (result.cmdstr, result.output))
+
+    return self._gsutil_version
+
+  def _CheckFile(self, errmsg, afile):
+    """Pre-flight check for valid inputs.
+
+    Args:
+      errmsg: Error message to display.
+      afile: Fully qualified path to test file existance.
+    """
+    if not os.path.isfile(afile):
+      raise GSContextException('%s, %s is not a file' % (errmsg, afile))
+
+  def _TestGSLs(self):
+    """Quick test of gsutil functionality."""
+    # The bucket in question is readable by any authenticated account.
+    # If we can list it's contents, we have valid authentication.
+    cmd = ['ls', AUTHENTICATION_BUCKET]
+    result = self.DoCommand(cmd, retries=0, debug_level=logging.DEBUG,
+                            stderr=True, check=False)
+
+    # Did we fail with an authentication error?
+    if (result.returncode == 1 and
+        any(e in result.error for e in self.AUTHORIZATION_ERRORS)):
+      logging.warning('gsutil authentication failure msg: %s', result.error)
+      return False
+
+    return True
+
+  def _ConfigureBotoConfig(self):
+    """Make sure we can access protected bits in GS."""
+    print('Configuring gsutil. **Please use your @google.com account.**')
+    try:
+      if not self.boto_file:
+        self.boto_file = self.DEFAULT_BOTO_FILE
+      self.DoCommand(['config'], retries=0, debug_level=logging.CRITICAL,
+                     print_cmd=False)
+    finally:
+      if (os.path.exists(self.boto_file) and not
+          os.path.getsize(self.boto_file)):
+        os.remove(self.boto_file)
+        raise GSContextException('GS config could not be set up.')
+
+  def _InitBoto(self):
+    if not self._TestGSLs():
+      self._ConfigureBotoConfig()
+
+  def Cat(self, path, **kwargs):
+    """Returns the contents of a GS object."""
+    kwargs.setdefault('stdout', True)
+    encoding = kwargs.setdefault('encoding', None)
+    errors = kwargs.setdefault('errors', None)
+    if not PathIsGs(path):
+      # gsutil doesn't support cat-ting a local path, so read it ourselves.
+      mode = 'rb' if encoding is None else 'r'
+      try:
+        return osutils.ReadFile(path, mode=mode, encoding=encoding,
+                                errors=errors)
+      except Exception as e:
+        if getattr(e, 'errno', None) == errno.ENOENT:
+          raise GSNoSuchKey('Cat Error: file %s does not exist' % path)
+        else:
+          raise GSContextException(str(e))
+    elif self.dry_run:
+      return b'' if encoding is None else ''
+    else:
+      return self.DoCommand(['cat', path], **kwargs).output
+
+  def StreamingCat(self, path, chunksize=0x100000):
+    """Returns the content of a GS file as a stream.
+
+    Unlike Cat or Copy, this function doesn't support any internal retry or
+    validation by computing checksum of downloaded data. Users should perform
+    their own validation, or use Cat() instead.
+
+    Args:
+      path: Full gs:// path of the src file.
+      chunksize: At most how much data read from upstream and yield to callers
+        at a time. The default value is 1 MB.
+
+    Yields:
+      The file content, chunk by chunk, as bytes.
+    """
+    assert PathIsGs(path)
+
+    if self.dry_run:
+      return (lambda: (yield ''))()
+
+    cmd = [self.gsutil_bin] + self.gsutil_flags + ['cat', path]
+    proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+
+    def read_content():
+      try:
+        while True:
+          data = proc.stdout.read(chunksize)
+          if not data and proc.poll() is not None:
+            break
+          if data:
+            yield data
+
+        rc = proc.poll()
+        if rc:
+          raise GSCommandError(
+              'Cannot stream cat %s from Google Storage!' % path, rc, None)
+      finally:
+        if proc.returncode is None:
+          proc.stdout.close()
+          proc.terminate()
+
+    return read_content()
+
+  def CopyInto(self, local_path, remote_dir, filename=None, **kwargs):
+    """Upload a local file into a directory in google storage.
+
+    Args:
+      local_path: Local file path to copy.
+      remote_dir: Full gs:// url of the directory to transfer the file into.
+      filename: If given, the filename to place the content at; if not given,
+        it's discerned from basename(local_path).
+      **kwargs: See Copy() for documentation.
+
+    Returns:
+      The generation of the remote file.
+    """
+    filename = filename if filename is not None else local_path
+    # Basename it even if an explicit filename was given; we don't want
+    # people using filename as a multi-directory path fragment.
+    return self.Copy(local_path,
+                     '%s/%s' % (remote_dir, os.path.basename(filename)),
+                     **kwargs)
+
+  @staticmethod
+  def GetTrackerFilenames(dest_path):
+    """Returns a list of gsutil tracker filenames.
+
+    Tracker files are used by gsutil to resume downloads/uploads. This
+    function does not handle parallel uploads.
+
+    Args:
+      dest_path: Either a GS path or an absolute local path.
+
+    Returns:
+      The list of potential tracker filenames.
+    """
+    dest = urllib.parse.urlsplit(dest_path)
+    filenames = []
+    if dest.scheme == 'gs':
+      prefix = 'upload'
+      bucket_name = dest.netloc
+      object_name = dest.path.lstrip('/')
+      filenames.append(
+          re.sub(r'[/\\]', '_', 'resumable_upload__%s__%s__%s.url' %
+                 (bucket_name, object_name, GSContext.GSUTIL_API_SELECTOR)))
+    else:
+      prefix = 'download'
+      filenames.append(
+          re.sub(r'[/\\]', '_', 'resumable_download__%s__%s.etag' %
+                 (dest.path, GSContext.GSUTIL_API_SELECTOR)))
+
+    hashed_filenames = []
+    for filename in filenames:
+      m = hashlib.sha1(filename.encode())
+      hashed_filenames.append('%s_TRACKER_%s.%s' %
+                              (prefix, m.hexdigest(), filename[-16:]))
+
+    return hashed_filenames
+
+  def _RetryFilter(self, e):
+    """Returns whether to retry RunCommandError exception |e|.
+
+    Args:
+      e: Exception object to filter. Exception may be re-raised as
+         as different type, if _RetryFilter determines a more appropriate
+         exception type based on the contents of |e|.
+    """
+    error_details = self._MatchKnownError(e)
+    if error_details.exception:
+      raise error_details.exception
+    return error_details.retriable
+
+  def _MatchKnownError(self, e):
+    """Function to match known RunCommandError exceptions.
+
+    Args:
+      e: Exception object to filter.
+
+    Returns:
+      An ErrorDetails instance with details about the message pattern found.
+    """
+    if not retry_util.ShouldRetryCommandCommon(e):
+      if not isinstance(e, cros_build_lib.RunCommandError):
+        error_type = 'unknown'
+      else:
+        error_type = 'failed_to_launch'
+      return ErrorDetails(type=error_type, retriable=False)
+
+    # e is guaranteed by above filter to be a RunCommandError
+    if e.result.returncode < 0:
+      sig_name = signals.StrSignal(-e.result.returncode)
+      logging.info('Child process received signal %d; not retrying.', sig_name)
+      return ErrorDetails(type='received_signal', message_pattern=sig_name,
+                          retriable=False)
+
+    error = e.result.error
+    if error:
+      # Since the captured error will use the encoding the user requested,
+      # normalize to bytes for testing below.
+      if isinstance(error, six.text_type):
+        error = error.encode('utf-8')
+
+      # gsutil usually prints PreconditionException when a precondition fails.
+      # It may also print "ResumableUploadAbortException: 412 Precondition
+      # Failed", so the logic needs to be a little more general.
+      if (b'PreconditionException' in error or
+          b'412 Precondition Failed' in error):
+        return ErrorDetails(type='precondition_exception', retriable=False,
+                            exception=GSContextPreconditionFailed(e))
+
+      # If the file does not exist, one of the following errors occurs. The
+      # "stat" command leaves off the "CommandException: " prefix, but it also
+      # outputs to stdout instead of stderr and so will not be caught here
+      # regardless.
+      if (b'CommandException: No URLs matched' in error or
+          b'NotFoundException:' in error or
+          b'One or more URLs matched no objects' in error):
+        return ErrorDetails(type='no_such_key', retriable=False,
+                            exception=GSNoSuchKey(e))
+
+      logging.warning('GS_ERROR: %s ', error)
+
+      # Temporary fix: remove the gsutil tracker files so that our retry
+      # can hit a different backend. This should be removed after the
+      # bug is fixed by the Google Storage team (see crbug.com/308300).
+      resumable_error = _FirstSubstring(error, self.RESUMABLE_ERROR_MESSAGE)
+      if resumable_error:
+        # Only remove the tracker files if we try to upload/download a file.
+        if 'cp' in e.result.cmd[:-2]:
+          # Assume a command: gsutil [options] cp [options] src_path dest_path
+          # dest_path needs to be a fully qualified local path, which is already
+          # required for GSContext.Copy().
+          tracker_filenames = self.GetTrackerFilenames(e.result.cmd[-1])
+          logging.info('Potential list of tracker files: %s',
+                       tracker_filenames)
+          for tracker_filename in tracker_filenames:
+            tracker_file_path = os.path.join(self.DEFAULT_GSUTIL_TRACKER_DIR,
+                                             tracker_filename)
+            if os.path.exists(tracker_file_path):
+              logging.info('Deleting gsutil tracker file %s before retrying.',
+                           tracker_file_path)
+              logging.info('The content of the tracker file: %s',
+                           osutils.ReadFile(tracker_file_path))
+              osutils.SafeUnlink(tracker_file_path)
+        return ErrorDetails(type='resumable',
+                            message_pattern=resumable_error.decode('utf-8'),
+                            retriable=True)
+
+      transient_error = _FirstSubstring(error, self.TRANSIENT_ERROR_MESSAGE)
+      if transient_error:
+        return ErrorDetails(type='transient',
+                            message_pattern=transient_error.decode('utf-8'),
+                            retriable=True)
+
+    return ErrorDetails(type='unknown', retriable=False)
+
+  # TODO(mtennant): Make a private method.
+  def DoCommand(self, gsutil_cmd, headers=(), retries=None, version=None,
+                parallel=False, **kwargs):
+    """Run a gsutil command, suppressing output, and setting retry/sleep.
+
+    Args:
+      gsutil_cmd: The (mostly) constructed gsutil subcommand to run.
+      headers: A list of raw headers to pass down.
+      parallel: Whether gsutil should enable parallel copy/update of multiple
+        files. NOTE: This option causes gsutil to use significantly more
+        memory, even if gsutil is only uploading one file.
+      retries: How many times to retry this command (defaults to setting given
+        at object creation).
+      version: If given, the generation; essentially the timestamp of the last
+        update.  Note this is not the same as sequence-number; it's
+        monotonically increasing bucket wide rather than reset per file.
+        The usage of this is if we intend to replace/update only if the version
+        is what we expect.  This is useful for distributed reasons- for example,
+        to ensure you don't overwrite someone else's creation, a version of
+        0 states "only update if no version exists".
+
+    Returns:
+      A RunCommandResult object.
+    """
+    kwargs = kwargs.copy()
+    kwargs.setdefault('stderr', True)
+    kwargs.setdefault('encoding', 'utf-8')
+
+    cmd = [self.gsutil_bin]
+    cmd += self.gsutil_flags
+    for header in headers:
+      cmd += ['-h', header]
+    if version is not None:
+      cmd += ['-h', 'x-goog-if-generation-match:%d' % int(version)]
+
+    # Enable parallel copy/update of multiple files if stdin is not to
+    # be piped to the command. This does not split a single file into
+    # smaller components for upload.
+    if parallel and kwargs.get('input') is None:
+      cmd += ['-m']
+
+    cmd.extend(gsutil_cmd)
+
+    if retries is None:
+      retries = self.retries
+
+    extra_env = kwargs.pop('extra_env', {})
+    if self.boto_file and os.path.isfile(self.boto_file):
+      extra_env.setdefault('BOTO_CONFIG', self.boto_file)
+
+    if self.dry_run:
+      logging.debug("%s: would've run: %s", self.__class__.__name__,
+                    cros_build_lib.CmdToStr(cmd))
+    else:
+      try:
+        return retry_stats.RetryWithStats(retry_stats.GSUTIL,
+                                          self._RetryFilter,
+                                          retries, cros_build_lib.run,
+                                          cmd, sleep=self._sleep_time,
+                                          extra_env=extra_env, **kwargs)
+      except cros_build_lib.RunCommandError as e:
+        raise GSCommandError(e.msg, e.result, e.exception)
+
+  def Copy(self, src_path, dest_path, acl=None, recursive=False,
+           skip_symlinks=True, auto_compress=False, **kwargs):
+    """Copy to/from GS bucket.
+
+    Canned ACL permissions can be specified on the gsutil cp command line.
+
+    More info:
+    https://developers.google.com/storage/docs/accesscontrol#applyacls
+
+    Args:
+      src_path: Fully qualified local path or full gs:// path of the src file.
+      dest_path: Fully qualified local path or full gs:// path of the dest
+                 file.
+      acl: One of the google storage canned_acls to apply.
+      recursive: Whether to copy recursively.
+      skip_symlinks: Skip symbolic links when copying recursively.
+      auto_compress: Automatically compress with gzip when uploading.
+
+    Returns:
+      The generation of the remote file.
+
+    Raises:
+      RunCommandError if the command failed despite retries.
+    """
+    # -v causes gs://bucket/path#generation to be listed in output.
+    cmd = ['cp', '-v']
+
+    # Certain versions of gsutil (at least 4.3) assume the source of a copy is
+    # a directory if the -r option is used. If it's really a file, gsutil will
+    # look like it's uploading it but not actually do anything. We'll work
+    # around that problem by surpressing the -r flag if we detect the source
+    # is a local file.
+    if recursive and not os.path.isfile(src_path):
+      cmd.append('-r')
+      if skip_symlinks:
+        cmd.append('-e')
+
+    if auto_compress:
+      cmd.append('-Z')
+
+    acl = self.acl if acl is None else acl
+    if acl is not None:
+      cmd += ['-a', acl]
+
+    with cros_build_lib.ContextManagerStack() as stack:
+      # Write the input into a tempfile if possible. This is needed so that
+      # gsutil can retry failed requests.  We allow the input to be a string
+      # or bytes regardless of the output encoding.
+      if src_path == '-' and kwargs.get('input') is not None:
+        f = stack.Add(tempfile.NamedTemporaryFile, mode='wb')
+        data = kwargs['input']
+        if isinstance(data, six.text_type):
+          data = data.encode('utf-8')
+        f.write(data)
+        f.flush()
+        del kwargs['input']
+        src_path = f.name
+
+      cmd += ['--', src_path, dest_path]
+
+      if not (PathIsGs(src_path) or PathIsGs(dest_path)):
+        # Don't retry on local copies.
+        kwargs.setdefault('retries', 0)
+
+      kwargs['capture_output'] = True
+      try:
+        result = self.DoCommand(cmd, **kwargs)
+        if self.dry_run:
+          return None
+
+        # Now we parse the output for the current generation number.  Example:
+        #   Created: gs://chromeos-throw-away-bucket/foo#1360630664537000.1
+        m = re.search(r'Created: .*#(\d+)([.](\d+))?\n', result.error)
+        if m:
+          return int(m.group(1))
+        else:
+          return None
+      except GSNoSuchKey as e:
+        # If the source was a local file, the error is a quirk of gsutil 4.5
+        # and should be ignored. If the source was remote, there might
+        # legitimately be no such file. See crbug.com/393419.
+        if os.path.isfile(src_path):
+          return None
+
+        # Temp log for crbug.com/642986, should be removed when the bug
+        # is fixed.
+        logging.warning('Copy Error: src %s dest %s: %s '
+                        '(Temp log for crbug.com/642986)',
+                        src_path, dest_path, e)
+        raise
+
+  def CreateWithContents(self, gs_uri, contents, **kwargs):
+    """Creates the specified file with specified contents.
+
+    Args:
+      gs_uri: The URI of a file on Google Storage.
+      contents: String or bytes with contents to write to the file.
+      kwargs: See additional options that Copy takes.
+
+    Raises:
+      See Copy.
+    """
+    self.Copy('-', gs_uri, input=contents, **kwargs)
+
+  # TODO: Merge LS() and List()?
+  def LS(self, path, **kwargs):
+    """Does a directory listing of the given gs path.
+
+    Args:
+      path: The path to get a listing of.
+      kwargs: See options that DoCommand takes.
+
+    Returns:
+      A list of paths that matched |path|.  Might be more than one if a
+      directory or path include wildcards/etc...
+    """
+    if self.dry_run:
+      return []
+
+    if not PathIsGs(path):
+      # gsutil doesn't support listing a local path, so just run 'ls'.
+      kwargs.pop('retries', None)
+      kwargs.pop('headers', None)
+      kwargs['capture_output'] = True
+      kwargs.setdefault('encoding', 'utf-8')
+      result = cros_build_lib.run(['ls', path], **kwargs)
+      return result.output.splitlines()
+    else:
+      return [x.url for x in self.List(path, **kwargs)]
+
+  def List(self, path, details=False, **kwargs):
+    """Does a directory listing of the given gs path.
+
+    Args:
+      path: The path to get a listing of.
+      details: Whether to include size/timestamp info.
+      kwargs: See options that DoCommand takes.
+
+    Returns:
+      A list of GSListResult objects that matched |path|.  Might be more
+      than one if a directory or path include wildcards/etc...
+    """
+    ret = []
+    if self.dry_run:
+      return ret
+
+    cmd = ['ls']
+    if details:
+      cmd += ['-l']
+    cmd += ['--', path]
+
+    # We always request the extended details as the overhead compared to a plain
+    # listing is negligible.
+    kwargs['stdout'] = True
+    lines = self.DoCommand(cmd, **kwargs).output.splitlines()
+
+    if details:
+      # The last line is expected to be a summary line.  Ignore it.
+      lines = lines[:-1]
+      ls_re = LS_LA_RE
+    else:
+      ls_re = LS_RE
+
+    # Handle optional fields.
+    intify = lambda x: int(x) if x else None
+
+    # Parse out each result and build up the results list.
+    for line in lines:
+      match = ls_re.search(line)
+      if not match:
+        raise GSContextException('unable to parse line: %s' % line)
+      if match.group('creation_time'):
+        timestamp = datetime.datetime.strptime(match.group('creation_time'),
+                                               DATETIME_FORMAT)
+      else:
+        timestamp = None
+
+      ret.append(GSListResult(
+          content_length=intify(match.group('content_length')),
+          creation_time=timestamp,
+          url=match.group('url'),
+          generation=intify(match.group('generation')),
+          metageneration=intify(match.group('metageneration'))))
+
+    return ret
+
+  def GetSize(self, path, **kwargs):
+    """Returns size of a single object (local or GS)."""
+    if not PathIsGs(path):
+      return os.path.getsize(path)
+    else:
+      return self.Stat(path, **kwargs).content_length
+
+  def Move(self, src_path, dest_path, **kwargs):
+    """Move/rename to/from GS bucket.
+
+    Args:
+      src_path: Fully qualified local path or full gs:// path of the src file.
+      dest_path: Fully qualified local path or full gs:// path of the dest file.
+      kwargs: See options that DoCommand takes.
+    """
+    cmd = ['mv', '--', src_path, dest_path]
+    return self.DoCommand(cmd, **kwargs)
+
+  def SetACL(self, upload_url, acl=None, **kwargs):
+    """Set access on a file already in google storage.
+
+    Args:
+      upload_url: gs:// url that will have acl applied to it.
+      acl: An ACL permissions file or canned ACL.
+      kwargs: See options that DoCommand takes.
+    """
+    if acl is None:
+      if not self.acl:
+        raise GSContextException(
+            'SetAcl invoked w/out a specified acl, nor a default acl.')
+      acl = self.acl
+
+    self.DoCommand(['acl', 'set', acl, upload_url], **kwargs)
+
+  def ChangeACL(self, upload_url, acl_args_file=None, acl_args=None, **kwargs):
+    """Change access on a file already in google storage with "acl ch".
+
+    Args:
+      upload_url: gs:// url that will have acl applied to it.
+      acl_args_file: A file with arguments to the gsutil acl ch command. The
+                     arguments can be spread across multiple lines. Comments
+                     start with a # character and extend to the end of the
+                     line. Exactly one of this argument or acl_args must be
+                     set.
+      acl_args: A list of arguments for the gsutil acl ch command. Exactly
+                one of this argument or acl_args must be set.
+      kwargs: See options that DoCommand takes.
+    """
+    if acl_args_file and acl_args:
+      raise GSContextException(
+          'ChangeACL invoked with both acl_args and acl_args set.')
+    if not acl_args_file and not acl_args:
+      raise GSContextException(
+          'ChangeACL invoked with neither acl_args nor acl_args set.')
+
+    if acl_args_file:
+      lines = osutils.ReadFile(acl_args_file).splitlines()
+      # Strip out comments.
+      lines = [x.split('#', 1)[0].strip() for x in lines]
+      acl_args = ' '.join([x for x in lines if x]).split()
+
+    # Some versions of gsutil bubble up precondition failures even when we
+    # didn't request it due to how ACL changes happen internally to gsutil.
+    # https://crbug.com/763450
+    # We keep the retry limit a bit low because DoCommand already has its
+    # own level of retries.
+    retry_util.RetryException(
+        GSContextPreconditionFailed, 3, self.DoCommand,
+        ['acl', 'ch'] + acl_args + [upload_url], **kwargs)
+
+  def Exists(self, path, **kwargs):
+    """Checks whether the given object exists.
+
+    Args:
+      path: Local path or gs:// url to check.
+      kwargs: Flags to pass to DoCommand.
+
+    Returns:
+      True if the path exists; otherwise returns False.
+    """
+    if not PathIsGs(path):
+      return os.path.exists(path)
+
+    try:
+      self.Stat(path, **kwargs)
+    except GSNoSuchKey:
+      return False
+
+    return True
+
+  def Remove(self, path, recursive=False, ignore_missing=False, **kwargs):
+    """Remove the specified file.
+
+    Args:
+      path: Full gs:// url of the file to delete.
+      recursive: Remove recursively starting at path.
+      ignore_missing: Whether to suppress errors about missing files.
+      kwargs: Flags to pass to DoCommand.
+    """
+    cmd = ['rm']
+    if 'recurse' in kwargs:
+      raise TypeError('"recurse" has been renamed to "recursive"')
+    if recursive:
+      cmd.append('-R')
+    cmd.append('--')
+    cmd.append(path)
+    try:
+      self.DoCommand(cmd, **kwargs)
+    except GSNoSuchKey:
+      if not ignore_missing:
+        raise
+
+  def GetGeneration(self, path):
+    """Get the generation and metageneration of the given |path|.
+
+    Returns:
+      A tuple of the generation and metageneration.
+    """
+    try:
+      res = self.Stat(path)
+    except GSNoSuchKey:
+      return 0, 0
+
+    return res.generation, res.metageneration
+
+  def Stat(self, path, **kwargs):
+    """Stat a GS file, and get detailed information.
+
+    Args:
+      path: A GS path for files to Stat. Wildcards are NOT supported.
+      kwargs: Flags to pass to DoCommand.
+
+    Returns:
+      A GSStatResult object with all fields populated.
+
+    Raises:
+      Assorted GSContextException exceptions.
+    """
+    try:
+      res = self.DoCommand(['stat', '--', path], stdout=True, **kwargs)
+    except GSCommandError as e:
+      # Because the 'gsutil stat' command logs errors itself (instead of
+      # raising errors internally like other commands), we have to look
+      # for errors ourselves.  See the related bug report here:
+      # https://github.com/GoogleCloudPlatform/gsutil/issues/288
+      # Example line:
+      # No URLs matched gs://bucket/file
+      if e.result.error and e.result.error.startswith('No URLs matched'):
+        raise GSNoSuchKey('Stat Error: No URLs matched %s.' % path)
+
+      # No idea what this is, so just choke.
+      raise
+
+    # In dryrun mode, DoCommand doesn't return an object, so we need to fake
+    # out the behavior ourselves.
+    if self.dry_run:
+      return GSStatResult(
+          creation_time=datetime.datetime.now(),
+          content_length=0,
+          content_type='application/octet-stream',
+          hash_crc32c='AAAAAA==',
+          hash_md5='',
+          etag='',
+          generation=0,
+          metageneration=0)
+
+    # We expect Stat output like the following. However, the Content-Language
+    # line appears to be optional based on how the file in question was
+    # created.
+    #
+    # gs://bucket/path/file:
+    #     Creation time:      Sat, 23 Aug 2014 06:53:20 GMT
+    #     Content-Language:   en
+    #     Content-Length:     74
+    #     Content-Type:       application/octet-stream
+    #     Hash (crc32c):      BBPMPA==
+    #     Hash (md5):         ms+qSYvgI9SjXn8tW/5UpQ==
+    #     ETag:               CNCgocbmqMACEAE=
+    #     Generation:         1408776800850000
+    #     Metageneration:     1
+
+    if not res.output.startswith('gs://'):
+      raise GSContextException('Unexpected stat output: %s' % res.output)
+
+    def _GetField(name, optional=False):
+      m = re.search(r'%s:\s*(.+)' % re.escape(name), res.output)
+      if m:
+        return m.group(1)
+      elif optional:
+        return None
+      else:
+        raise GSContextException('Field "%s" missing in "%s"' %
+                                 (name, res.output))
+
+    return GSStatResult(
+        creation_time=datetime.datetime.strptime(
+            _GetField('Creation time'), '%a, %d %b %Y %H:%M:%S %Z'),
+        content_length=int(_GetField('Content-Length')),
+        content_type=_GetField('Content-Type'),
+        hash_crc32c=_GetField('Hash (crc32c)'),
+        hash_md5=_GetField('Hash (md5)', optional=True),
+        etag=_GetField('ETag'),
+        generation=int(_GetField('Generation')),
+        metageneration=int(_GetField('Metageneration')))
+
+  def Counter(self, path):
+    """Return a GSCounter object pointing at a |path| in Google Storage.
+
+    Args:
+      path: The path to the counter in Google Storage.
+    """
+    return GSCounter(self, path)
+
+  def WaitForGsPaths(self, paths, timeout, period=10):
+    """Wait until a list of files exist in GS.
+
+    Args:
+      paths: The list of files to wait for.
+      timeout: Max seconds to wait for file to appear.
+      period: How often to check for files while waiting.
+
+    Raises:
+      timeout_util.TimeoutError if the timeout is reached.
+    """
+    # Copy the list of URIs to wait for, so we don't modify the callers context.
+    pending_paths = paths[:]
+
+    def _CheckForExistence():
+      pending_paths[:] = [x for x in pending_paths if not self.Exists(x)]
+
+    def _Retry(_return_value):
+      # Retry, if there are any pending paths left.
+      return pending_paths
+
+    timeout_util.WaitForSuccess(_Retry, _CheckForExistence,
+                                timeout=timeout, period=period)
+
+  def ContainsWildcard(self, url):
+    """Checks whether url_string contains a wildcard.
+
+    Args:
+      url: URL string to check.
+
+    Returns:
+      True if |url| contains a wildcard.
+    """
+    return bool(WILDCARD_REGEX.search(url))
+
+  def GetGsNamesWithWait(self, pattern, url, timeout=600, period=10,
+                         is_regex_pattern=False):
+    """Returns the google storage names specified by the given pattern.
+
+    This method polls Google Storage until the target files specified by the
+    pattern is available or until the timeout occurs. Because we may not know
+    the exact name of the target files, the method accepts a filename pattern,
+    to identify whether a file whose name matches the pattern exists
+    (e.g. use pattern '*_full_*' to search for the full payload
+    'chromeos_R17-1413.0.0-a1_x86-mario_full_dev.bin'). Returns the name only
+    if found before the timeout.
+
+    Warning: GS listing are not perfect, and are eventually consistent. Doing a
+    search for file existence is a 'best effort'. Calling code should be aware
+    and ready to handle that.
+
+    Args:
+      pattern: a path pattern (glob or regex) identifying the files we need.
+      url: URL of the Google Storage bucket.
+      timeout: how many seconds are we allowed to keep trying.
+      period: how many seconds to wait between attempts.
+      is_regex_pattern: Whether the pattern is a regex (otherwise a glob).
+
+    Returns:
+      The list of files matching the pattern in Google Storage bucket or None
+      if the files are not found and hit the timeout_util.TimeoutError.
+    """
+    def _GetGsName():
+      uploaded_list = [os.path.basename(p.url) for p in self.List(url)]
+
+      if is_regex_pattern:
+        filter_re = re.compile(pattern)
+        matching_names = [f for f in uploaded_list
+                          if filter_re.search(f) is not None]
+      else:
+        matching_names = fnmatch.filter(uploaded_list, pattern)
+
+      return matching_names
+
+    try:
+      matching_names = None
+      if not (is_regex_pattern or self.ContainsWildcard(pattern)):
+        try:
+          self.WaitForGsPaths(['%s/%s' % (url, pattern)], timeout)
+          return [os.path.basename(pattern)]
+        except GSCommandError:
+          pass
+
+      if not matching_names:
+        matching_names = timeout_util.WaitForSuccess(
+            lambda x: not x, _GetGsName, timeout=timeout, period=period)
+
+      logging.debug('matching_names=%s, is_regex_pattern=%r',
+                    matching_names, is_regex_pattern)
+      return matching_names
+    except timeout_util.TimeoutError:
+      return None
+
+
+def _FirstMatch(predicate, elems):
+  """Returns the first element matching the given |predicate|.
+
+  Args:
+    predicate: A function which takes an element and returns a bool
+    elems: A sequence of elements.
+  """
+  matches = [x for x in elems if predicate(x)]
+  return matches[0] if matches else None
+
+
+def _FirstSubstring(superstring, haystack):
+  """Returns the first elem of |haystack| which is a substring of |superstring|.
+
+  Args:
+    superstring: A string to search for substrings of.
+    haystack: A sequence of strings to search through.
+  """
+  return _FirstMatch(lambda s: s in superstring, haystack)
+
+
+@contextlib.contextmanager
+def TemporaryURL(prefix):
+  """Context manager to generate a random URL.
+
+  At the end, the URL will be deleted.
+  """
+  url = '%s/chromite-temp/%s/%s/%s' % (constants.TRASH_BUCKET, prefix,
+                                       getpass.getuser(),
+                                       cros_build_lib.GetRandomString())
+  ctx = GSContext()
+  ctx.Remove(url, ignore_missing=True, recursive=True)
+  try:
+    yield url
+  finally:
+    ctx.Remove(url, ignore_missing=True, recursive=True)
diff --git a/utils/frozen_chromite/lib/locking.py b/utils/frozen_chromite/lib/locking.py
new file mode 100644
index 0000000..26e6ad4
--- /dev/null
+++ b/utils/frozen_chromite/lib/locking.py
@@ -0,0 +1,407 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Basic locking functionality."""
+
+from __future__ import print_function
+
+import contextlib
+import os
+import errno
+import fcntl
+import stat
+import tempfile
+
+from autotest_lib.utils.frozen_chromite.lib import cros_build_lib
+from autotest_lib.utils.frozen_chromite.lib import cros_logging as logging
+from autotest_lib.utils.frozen_chromite.lib import osutils
+from autotest_lib.utils.frozen_chromite.lib import retry_util
+from autotest_lib.utils.frozen_chromite.lib import timeout_util
+
+
+LOCKF = 'lockf'
+FLOCK = 'flock'
+
+
+class LockNotAcquiredError(Exception):
+  """Signals that the lock was not acquired."""
+
+
+class LockingError(Exception):
+  """Signals miscellaneous problems in the locking process."""
+
+
+@contextlib.contextmanager
+def _optional_timer_context(timeout):
+  """Use the timeout_util.Timeout contextmanager if timeout is set."""
+  if timeout:
+    with timeout_util.Timeout(timeout):
+      yield
+  else:
+    yield
+
+
+class _Lock(cros_build_lib.MasterPidContextManager):
+  """Base lockf based locking.  Derivatives need to override _GetFd"""
+
+  def __init__(self, description=None, verbose=True, locktype=LOCKF,
+               blocking=True, blocking_timeout=None):
+    """Initialize this instance.
+
+    Two types of locks are available: LOCKF and FLOCK.
+
+    Use LOCKF (POSIX locks) if:
+      - you need to lock a file between processes created by the
+        parallel/multiprocess libraries
+
+    Use FLOCK (BSD locks) if these scenarios apply:
+      - you need to lock a file between shell scripts running the flock program
+      - you need the lock to be bound to the fd and thus inheritable across
+        execs
+
+    Note: These two locks are completely independent; using one on a path will
+          not block using the other on the same path.
+
+    Args:
+      path: On disk pathway to lock.  Can be a directory or a file.
+      description: A description for this lock- what is it protecting?
+      verbose: Verbose logging?
+      locktype: Type of lock to use (lockf or flock).
+      blocking: If True, use a blocking lock.
+      blocking_timeout: If not None, time is seconds to wait on blocking calls.
+    """
+    cros_build_lib.MasterPidContextManager.__init__(self)
+    self._verbose = verbose
+    self.description = description
+    self._fd = None
+    self.locking_mechanism = fcntl.flock if locktype == FLOCK else fcntl.lockf
+    # Store (to log) the locktype string.
+    self.locktype = locktype
+    self.blocking = blocking
+    self.blocking_timeout = blocking_timeout
+
+  @property
+  def fd(self):
+    if self._fd is None:
+      self._fd = self._GetFd()
+      # Ensure that all derivatives of this lock can't bleed the fd
+      # across execs.
+      fcntl.fcntl(self._fd, fcntl.F_SETFD,
+                  fcntl.fcntl(self._fd, fcntl.F_GETFD) | fcntl.FD_CLOEXEC)
+    return self._fd
+
+  def _GetFd(self):
+    raise NotImplementedError(self, '_GetFd')
+
+  def _enforce_lock(self, flags, message):
+    # Try nonblocking first, if it fails, display the context/message,
+    # and then wait on the lock.
+    try:
+      self.locking_mechanism(self.fd, flags|fcntl.LOCK_NB)
+      return
+    except EnvironmentError as e:
+      if e.errno == errno.EDEADLK:
+        self.unlock()
+      elif e.errno != errno.EAGAIN:
+        raise
+    if self.description:
+      message = '%s: blocking (LOCK_NB) (%s) while %s' % (self.description,
+                                                          self.locktype,
+                                                          message)
+    if not self.blocking:
+      self.close()
+      raise LockNotAcquiredError(message)
+    if self._verbose:
+      logging.info(message)
+
+    try:
+      with _optional_timer_context(self.blocking_timeout):
+        self.locking_mechanism(self.fd, flags)
+    except timeout_util.TimeoutError:
+      description = self.description or 'locking._enforce_lock'
+      logging.error(
+          'Timed out after waiting %d seconds for blocking lock (%s): %s',
+          self.blocking_timeout, self.locktype, description)
+      raise
+    except EnvironmentError as e:
+      if e.errno != errno.EDEADLK:
+        message = ('%s: blocking wait failed errno %s'
+                   % (self.description, e))
+        raise
+      self.unlock()
+      self.locking_mechanism(self.fd, flags)
+    logging.debug('%s: lock has been acquired (%s), continuing.',
+                  self.description, self.locktype)
+
+  def lock(self, shared=False):
+    """Take a lock of type |shared|.
+
+    Any existing lock will be updated if need be.
+
+    Args:
+      shared: If True make the lock shared.
+
+    Returns:
+      self, allowing it to be used as a `with` target.
+
+    Raises:
+      IOError if the operation fails in some way.
+      LockNotAcquiredError if the lock couldn't be acquired (non-blocking
+        mode only).
+    """
+    self._enforce_lock(
+        fcntl.LOCK_SH if shared else fcntl.LOCK_EX,
+        'taking a %s lock' % ('shared' if shared else 'exclusive'))
+    return self
+
+  def read_lock(self, message='taking read lock'):
+    """Take a read lock (shared), downgrading from write if required.
+
+    Args:
+      message: A description of what/why this lock is being taken.
+
+    Returns:
+      self, allowing it to be used as a `with` target.
+
+    Raises:
+      IOError if the operation fails in some way.
+    """
+    self._enforce_lock(fcntl.LOCK_SH, message)
+    return self
+
+  def write_lock(self, message='taking write lock'):
+    """Take a write lock (exclusive), upgrading from read if required.
+
+    Note that if the lock state is being upgraded from read to write,
+    a deadlock potential exists- as such we *will* release the lock
+    to work around it.  Any consuming code should not assume that
+    transitioning from shared to exclusive means no one else has
+    gotten at the critical resource in between for this reason.
+
+    Args:
+      message: A description of what/why this lock is being taken.
+
+    Returns:
+      self, allowing it to be used as a `with` target.
+
+    Raises:
+      IOError if the operation fails in some way.
+    """
+    self._enforce_lock(fcntl.LOCK_EX, message)
+    return self
+
+  def unlock(self):
+    """Release any locks held.  Noop if no locks are held.
+
+    Raises:
+      IOError if the operation fails in some way.
+    """
+    if self._fd is not None:
+      logging.debug('%s: lock is being released (%s).',
+                    self.description, self.locktype)
+      self.locking_mechanism(self._fd, fcntl.LOCK_UN)
+
+  def __del__(self):
+    # TODO(ferringb): Convert this to snakeoil.weakref.WeakRefFinalizer
+    # if/when that rebasing occurs.
+    self.close()
+
+  def close(self):
+    """Release the underlying lock and close the fd."""
+    if self._fd is not None:
+      self.unlock()
+      os.close(self._fd)
+      self._fd = None
+
+  def _enter(self):
+    # Force the fd to be opened via touching the property.
+    # We do this to ensure that even if entering a context w/out a lock
+    # held, we can do locking in that critical section if the code requests it.
+    # pylint: disable=pointless-statement
+    self.fd
+    return self
+
+  def _exit(self, exc_type, exc, exc_tb):
+    try:
+      self.unlock()
+    finally:
+      self.close()
+
+  def IsLocked(self):
+    """Return True if the lock is grabbed."""
+    return bool(self._fd)
+
+
+class FileLock(_Lock):
+  """Use a specified file as a locking mechanism."""
+
+  def __init__(self, path, description=None, verbose=True,
+               locktype=LOCKF, world_writable=False, blocking=True,
+               blocking_timeout=None):
+    """Initializer for FileLock.
+
+    Args:
+      path: On disk pathway to lock.  Can be a directory or a file.
+      description: A description for this lock- what is it protecting?
+      verbose: Verbose logging?
+      locktype: Type of lock to use (lockf or flock).
+      world_writable: If true, the lock file will be created as root and be made
+        writable to all users.
+      blocking: If True, use a blocking lock.
+      blocking_timeout: If not None, time is seconds to wait on blocking calls.
+    """
+    if description is None:
+      description = 'lock %s' % (path,)
+    _Lock.__init__(self, description=description, verbose=verbose,
+                   locktype=locktype, blocking=blocking,
+                   blocking_timeout=blocking_timeout)
+    self.path = os.path.abspath(path)
+    self.world_writable = world_writable
+
+  def _GetFd(self):
+    if self.world_writable:
+      create = True
+      try:
+        create = stat.S_IMODE(os.stat(self.path).st_mode) != 0o666
+      except OSError as e:
+        if e.errno != errno.ENOENT:
+          raise
+      if create:
+        osutils.SafeMakedirs(os.path.dirname(self.path), sudo=True)
+        cros_build_lib.sudo_run(['touch', self.path], print_cmd=False)
+        cros_build_lib.sudo_run(['chmod', '666', self.path], print_cmd=False)
+
+    # If we're on py3.4 and this attribute is exposed, use it to close
+    # the threading race between open and fcntl setting; this is
+    # extremely paranoid code, but might as well.
+    cloexec = getattr(os, 'O_CLOEXEC', 0)
+    # There exist race conditions where the lock may be created by
+    # root, thus denying subsequent accesses from others. To prevent
+    # this, we create the lock with mode 0o666.
+    try:
+      value = os.umask(000)
+      fd = os.open(self.path, os.W_OK|os.O_CREAT|cloexec, 0o666)
+    finally:
+      os.umask(value)
+    return fd
+
+
+class ProcessLock(_Lock):
+  """Process level locking visible to parent/child only.
+
+  This lock is basically a more robust version of what
+  multiprocessing.Lock does.  That implementation uses semaphores
+  internally which require cleanup/deallocation code to run to release
+  the lock; a SIGKILL hitting the process holding the lock violates those
+  assumptions leading to a stuck lock.
+
+  Thus this implementation is based around locking of a deleted tempfile;
+  lockf locks are guranteed to be released once the process/fd is closed.
+  """
+
+  def _GetFd(self):
+    with tempfile.TemporaryFile() as f:
+      # We don't want to hold onto the object indefinitely; we just want
+      # the fd to a temporary inode, preferably one that isn't vfs accessible.
+      # Since TemporaryFile closes the fd once the object is GC'd, we just
+      # dupe the fd so we retain a copy, while the original TemporaryFile
+      # goes away.
+      return os.dup(f.fileno())
+
+
+class PortableLinkLock(object):
+  """A more primitive lock that relies on the atomicity of creating hardlinks.
+
+  Use this lock if you need to be compatible with shadow utils like groupadd
+  or useradd.
+  """
+
+  def __init__(self, path, max_retry=0, sleep=1):
+    """Construct an instance.
+
+    Args:
+      path: path to file to lock on.  Multiple processes attempting to lock the
+        same path will compete for a system wide lock.
+      max_retry: maximum number of times to attempt to acquire the lock.
+      sleep: See retry_util.GenericRetry's sleep parameter.
+    """
+    self._path = path
+    self._target_path = None
+    # These two poorly named variables are just passed straight through to
+    # retry_util.RetryException.
+    self._max_retry = max_retry
+    self._sleep = sleep
+
+  def __enter__(self):
+    fd, self._target_path = tempfile.mkstemp(
+        prefix=self._path + '.chromite.portablelock.')
+    os.close(fd)
+    try:
+      retry_util.RetryException(OSError, self._max_retry,
+                                os.link, self._target_path, self._path,
+                                sleep=self._sleep)
+    except OSError:
+      raise LockNotAcquiredError('Timeout while trying to lock %s' % self._path)
+    finally:
+      osutils.SafeUnlink(self._target_path)
+
+    return self
+
+  def __exit__(self, exc_type, exc_val, exc_tb):
+    try:
+      if self._target_path:
+        osutils.SafeUnlink(self._target_path)
+    finally:
+      osutils.SafeUnlink(self._path)
+
+
+class PipeLock(object):
+  """A simple one-way lock based on pipe().
+
+  This is used when code is calling os.fork() directly and needs to synchronize
+  behavior between the two.  The same process should not try to use Wait/Post
+  as it will just see its own results.  If you need bidirection locks, you'll
+  need to create two yourself.
+
+  Be sure to delete the lock when you're done to prevent fd leakage.
+  """
+
+  def __init__(self):
+    # TODO(vapier): Simplify this when we're Python 3 only.
+    # pylint: disable=using-constant-test
+    pipe2 = getattr(os, 'pipe2', None)
+    if pipe2:
+      cloexec = getattr(os, 'O_CLOEXEC', 0)
+      # Pylint-1.7 is unable to handle this conditional logic.
+      # pylint: disable=not-callable
+      pipes = pipe2(cloexec)
+    else:
+      pipes = os.pipe()
+    self.read_fd, self.write_fd = pipes
+
+  def Wait(self, size=1):
+    """Read |size| bytes from the pipe.
+
+    Args:
+      size: How many bytes to read.  It must match the length of |data| passed
+        by the other end during its call to Post.
+
+    Returns:
+      The data read back.
+    """
+    return os.read(self.read_fd, size)
+
+  def Post(self, data=b'!'):
+    """Write |data| to the pipe.
+
+    Args:
+      data: The data to send to the other side calling Wait.  It must be of the
+        exact length that is passed to Wait.
+    """
+    os.write(self.write_fd, data)
+
+  def __del__(self):
+    os.close(self.read_fd)
+    os.close(self.write_fd)
diff --git a/utils/frozen_chromite/lib/metrics.py b/utils/frozen_chromite/lib/metrics.py
new file mode 100644
index 0000000..af47921
--- /dev/null
+++ b/utils/frozen_chromite/lib/metrics.py
@@ -0,0 +1,861 @@
+# -*- coding: utf-8 -*-
+# Copyright 2016 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wrapper library around ts_mon.
+
+This library provides some wrapper functionality around ts_mon, to make it more
+friendly to developers. It also provides import safety, in case ts_mon is not
+deployed with your code.
+"""
+
+from __future__ import division
+from __future__ import print_function
+
+import collections
+import contextlib
+import ssl
+import time
+from functools import wraps
+
+import six
+from six.moves import queue as Queue
+
+from autotest_lib.utils.frozen_chromite.lib import cros_logging as logging
+
+try:
+  from infra_libs import ts_mon
+except (ImportError, RuntimeError):
+  ts_mon = None
+
+
+# This number is chosen because 1.16^100 seconds is about
+# 32 days. This is a good compromise between bucket size
+# and dynamic range.
+_SECONDS_BUCKET_FACTOR = 1.16
+
+# If none, we create metrics in this process. Otherwise, we send metrics via
+# this Queue to a dedicated flushing processes.
+# These attributes are set by chromite.lib.ts_mon_config.SetupTsMonGlobalState.
+FLUSHING_PROCESS = None
+MESSAGE_QUEUE = None
+
+_MISSING = object()
+
+MetricCall = collections.namedtuple('MetricCall', [
+    'metric_name', 'metric_args', 'metric_kwargs',
+    'method', 'method_args', 'method_kwargs',
+    'reset_after'
+])
+
+
+def _FlushingProcessClosed():
+  """Returns whether the metrics flushing process has been closed."""
+  return (FLUSHING_PROCESS is not None and
+          FLUSHING_PROCESS.exitcode is not None)
+
+
+class ProxyMetric(object):
+  """Redirects any method calls to the message queue."""
+  def __init__(self, metric, metric_args, metric_kwargs):
+    self.metric = metric
+    self.metric_args = metric_args
+    self.reset_after = metric_kwargs.pop('reset_after', False)
+    self.metric_kwargs = metric_kwargs
+
+  def __getattr__(self, method_name):
+    """Redirects all method calls to the MESSAGE_QUEUE."""
+    def enqueue(*args, **kwargs):
+      if not _FlushingProcessClosed():
+        try:
+          MESSAGE_QUEUE.put_nowait(
+              MetricCall(
+                  metric_name=self.metric,
+                  metric_args=self.metric_args,
+                  metric_kwargs=self.metric_kwargs,
+                  method=method_name,
+                  method_args=args,
+                  method_kwargs=kwargs,
+                  reset_after=self.reset_after))
+        except Queue.Full:
+          logging.warning(
+              "Metrics queue is full; skipped sending metric '%s'",
+              self.metric)
+      else:
+        try:
+          exit_code = FLUSHING_PROCESS.exitcode
+        except AttributeError:
+          exit_code = None
+        logging.warning(
+            'Flushing process has been closed (exit code %s),'
+            " skipped sending metric '%s'",
+            exit_code,
+            self.metric)
+
+    return enqueue
+
+
+def _Indirect(fn):
+  """Decorates a function to be indirect If MESSAGE_QUEUE is set.
+
+  If MESSAGE_QUEUE is set, the indirect function will return a proxy metrics
+  object; otherwise, it behaves normally.
+  """
+  @wraps(fn)
+  def AddToQueueIfPresent(*args, **kwargs):
+    if MESSAGE_QUEUE:
+      return ProxyMetric(fn.__name__, args, kwargs)
+    else:
+      # Whether to reset the metric after the flush; this is only used by
+      # |ProxyMetric|, so remove this from the kwargs.
+      kwargs.pop('reset_after', None)
+      return fn(*args, **kwargs)
+  return AddToQueueIfPresent
+
+
+class MockMetric(object):
+  """Mock metric object, to be returned if ts_mon is not set up."""
+
+  def _mock_method(self, *args, **kwargs):
+    pass
+
+  def __getattr__(self, _):
+    return self._mock_method
+
+
+def _ImportSafe(fn):
+  """Decorator which causes |fn| to return MockMetric if ts_mon not imported."""
+  @wraps(fn)
+  def wrapper(*args, **kwargs):
+    if ts_mon:
+      return fn(*args, **kwargs)
+    else:
+      return MockMetric()
+
+  return wrapper
+
+
+class FieldSpecAdapter(object):
+  """Infers the types of fields values to work around field_spec requirement.
+
+  See: https://chromium-review.googlesource.com/c/432120/ for the change
+  which added a required field_spec argument. This class is a temporary
+  workaround to allow inferring the field_spec if is not provided.
+  """
+  FIELD_CLASSES = {} if ts_mon is None else {
+      bool: ts_mon.BooleanField,
+      int: ts_mon.IntegerField,
+      str: ts_mon.StringField,
+      six.text_type: ts_mon.StringField,
+  }
+
+  def __init__(self, metric_cls, *args, **kwargs):
+    self._metric_cls = metric_cls
+    self._args = args
+    self._kwargs = kwargs
+    self._instance = _MISSING
+
+  def __getattr__(self, prop):
+    """Return a wrapper which constructs the metric object on demand.
+
+    Args:
+      prop: The property name
+
+    Returns:
+      If self._instance has been created, the instance's .|prop| property,
+      otherwise, a wrapper function which creates the ._instance and then
+      calls the |prop| method on the instance.
+    """
+    if self._instance is not _MISSING:
+      return getattr(self._instance, prop)
+
+    def func(*args, **kwargs):
+      if self._instance is not _MISSING:
+        return getattr(self._instance, prop)(*args, **kwargs)
+      fields = FieldSpecAdapter._InferFields(prop, args, kwargs)
+      self._kwargs['field_spec'] = FieldSpecAdapter._InferFieldSpec(fields)
+      self._instance = self._metric_cls(*self._args, **self._kwargs)
+      return getattr(self._instance, prop)(*args, **kwargs)
+
+    func.__name__ = prop
+    return func
+
+  @staticmethod
+  def _InferFields(method_name, args, kwargs):
+    """Infers the fields argument.
+
+    Args:
+      method_name: The method called.
+      args: The args list
+      kwargs: The keyword args
+    """
+    if 'fields' in kwargs:
+      return kwargs['fields']
+
+    if method_name == 'increment' and args:
+      return args[0]
+
+    if len(args) >= 2:
+      return args[1]
+
+  @staticmethod
+  def _InferFieldSpec(fields):
+    """Infers the fields types from the given fields.
+
+    Args:
+      fields: A dictionary with metric fields.
+    """
+    if not fields or not ts_mon:
+      return None
+
+    return [FieldSpecAdapter.FIELD_CLASSES[type(v)](field)
+            for (field, v) in sorted(fields.items())]
+
+
+def _OptionalFieldSpec(fn):
+  """Decorates a function to allow an optional description and field_spec."""
+  @wraps(fn)
+  def wrapper(*args, **kwargs):
+    kwargs = dict(**kwargs)  # It's bad practice to mutate **kwargs
+    # Slightly different than .setdefault, this line sets a default even when
+    # the key is present (as long as the value is not truthy). Empty or None is
+    # not allowed for descriptions.
+    kwargs['description'] = kwargs.get('description') or 'No description.'
+    if 'field_spec' in kwargs and kwargs['field_spec'] is not _MISSING:
+      return fn(*args, **kwargs)
+    else:
+      return FieldSpecAdapter(fn, *args, **kwargs)
+  return wrapper
+
+
+def _Metric(fn):
+  """A pipeline of decorators to apply to our metric constructors."""
+  return _OptionalFieldSpec(_ImportSafe(_Indirect(fn)))
+
+
+# This is needed for the reset_after flag used by @Indirect.
+# pylint: disable=unused-argument
+
+@_Metric
+def CounterMetric(name, reset_after=False, description=None,
+                  field_spec=_MISSING, start_time=None):
+  """Returns a metric handle for a counter named |name|."""
+  return ts_mon.CounterMetric(name,
+                              description=description, field_spec=field_spec,
+                              start_time=start_time)
+Counter = CounterMetric
+
+
+@_Metric
+def GaugeMetric(name, reset_after=False, description=None, field_spec=_MISSING):
+  """Returns a metric handle for a gauge named |name|."""
+  return ts_mon.GaugeMetric(name, description=description,
+                            field_spec=field_spec)
+Gauge = GaugeMetric
+
+
+@_Metric
+def CumulativeMetric(name, reset_after=False, description=None,
+                     field_spec=_MISSING):
+  """Returns a metric handle for a cumulative float named |name|."""
+  return ts_mon.CumulativeMetric(name, description=description,
+                                 field_spec=field_spec)
+
+
+@_Metric
+def StringMetric(name, reset_after=False, description=None,
+                 field_spec=_MISSING):
+  """Returns a metric handle for a string named |name|."""
+  return ts_mon.StringMetric(name, description=description,
+                             field_spec=field_spec)
+String = StringMetric
+
+
+@_Metric
+def BooleanMetric(name, reset_after=False, description=None,
+                  field_spec=_MISSING):
+  """Returns a metric handle for a boolean named |name|."""
+  return ts_mon.BooleanMetric(name, description=description,
+                              field_spec=field_spec)
+Boolean = BooleanMetric
+
+
+@_Metric
+def FloatMetric(name, reset_after=False, description=None, field_spec=_MISSING):
+  """Returns a metric handle for a float named |name|."""
+  return ts_mon.FloatMetric(name, description=description,
+                            field_spec=field_spec)
+Float = FloatMetric
+
+
+@_Metric
+def CumulativeDistributionMetric(name, reset_after=False, description=None,
+                                 bucketer=None, field_spec=_MISSING):
+  """Returns a metric handle for a cumulative distribution named |name|."""
+  return ts_mon.CumulativeDistributionMetric(
+      name, description=description, bucketer=bucketer, field_spec=field_spec)
+CumulativeDistribution = CumulativeDistributionMetric
+
+
+@_Metric
+def DistributionMetric(name, reset_after=False, description=None,
+                       bucketer=None, field_spec=_MISSING):
+  """Returns a metric handle for a distribution named |name|."""
+  return ts_mon.NonCumulativeDistributionMetric(
+      name, description=description, bucketer=bucketer, field_spec=field_spec)
+Distribution = DistributionMetric
+
+
+@_Metric
+def CumulativeSmallIntegerDistribution(name, reset_after=False,
+                                       description=None, field_spec=_MISSING):
+  """Returns a metric handle for a cumulative distribution named |name|.
+
+  This differs slightly from CumulativeDistribution, in that the underlying
+  metric uses a uniform bucketer rather than a geometric one.
+
+  This metric type is suitable for holding a distribution of numbers that are
+  nonnegative integers in the range of 0 to 100.
+  """
+  return ts_mon.CumulativeDistributionMetric(
+      name,
+      bucketer=ts_mon.FixedWidthBucketer(1),
+      description=description,
+      field_spec=field_spec)
+
+
+@_Metric
+def CumulativeSecondsDistribution(name, scale=1, reset_after=False,
+                                  description=None, field_spec=_MISSING):
+  """Returns a metric handle for a cumulative distribution named |name|.
+
+  The distribution handle returned by this method is better suited than the
+  default one for recording handling times, in seconds.
+
+  This metric handle has bucketing that is optimized for time intervals
+  (in seconds) in the range of 1 second to 32 days. Use |scale| to adjust this
+  (e.g. scale=0.1 covers a range from .1 seconds to 3.2 days).
+
+  Args:
+    name: string name of metric
+    scale: scaling factor of buckets, and size of the first bucket. default: 1
+    reset_after: Should the metric be reset after reporting.
+    description: A string description of the metric.
+    field_spec: A sequence of ts_mon.Field objects to specify the field schema.
+  """
+  b = ts_mon.GeometricBucketer(growth_factor=_SECONDS_BUCKET_FACTOR,
+                               scale=scale)
+  return ts_mon.CumulativeDistributionMetric(
+      name, bucketer=b, units=ts_mon.MetricsDataUnits.SECONDS,
+      description=description, field_spec=field_spec)
+
+SecondsDistribution = CumulativeSecondsDistribution
+
+
+@_Metric
+def PercentageDistribution(
+    name, num_buckets=1000, reset_after=False,
+    description=None, field_spec=_MISSING):
+  """Returns a metric handle for a cumulative distribution for percentage.
+
+  The distribution handle returned by this method is better suited for reporting
+  percentage values than the default one. The bucketing is optimized for values
+  in [0,100].
+
+  Args:
+    name: The name of this metric.
+    num_buckets: This metric buckets the percentage values before
+        reporting. This argument controls the number of the bucket the range
+        [0,100] is divided in. The default gives you 0.1% resolution.
+    reset_after: Should the metric be reset after reporting.
+    description: A string description of the metric.
+    field_spec: A sequence of ts_mon.Field objects to specify the field schema.
+  """
+  # The last bucket actually covers [100, 100 + 1.0/num_buckets), so it
+  # corresponds to values that exactly match 100%.
+  bucket_width = 100 / num_buckets
+  b = ts_mon.FixedWidthBucketer(bucket_width, num_buckets)
+  return ts_mon.CumulativeDistributionMetric(
+      name, bucketer=b,
+      description=description, field_spec=field_spec)
+
+
+@contextlib.contextmanager
+def SecondsTimer(name, fields=None, description=None, field_spec=_MISSING,
+                 scale=1, record_on_exception=True, add_exception_field=False):
+  """Record the time of an operation to a CumulativeSecondsDistributionMetric.
+
+  Records the time taken inside of the context block, to the
+  CumulativeSecondsDistribution named |name|, with the given fields.
+
+  Examples:
+    # Time the doSomething() call, with field values that are independent of the
+    # results of the operation.
+    with SecondsTimer('timer/name', fields={'foo': 'bar'},
+                      description='My timer',
+                      field_spec=[ts_mon.StringField('foo'),
+                                  ts_mon.BooleanField('success')]):
+      doSomething()
+
+    # Time the doSomethingElse call, with field values that depend on the
+    # results of that operation. Note that it is important that a default value
+    # is specified for these fields, in case an exception is thrown by
+    # doSomethingElse()
+    f = {'success': False, 'foo': 'bar'}
+    with SecondsTimer('timer/name', fields=f, description='My timer',
+                      field_spec=[ts_mon.StringField('foo')]) as c:
+      doSomethingElse()
+      c['success'] = True
+
+    # Incorrect Usage!
+    with SecondsTimer('timer/name', description='My timer') as c:
+      doSomething()
+      c['foo'] = bar # 'foo' is not a valid field, because no default
+                     # value for it was specified in the context constructor.
+                     # It will be silently ignored.
+
+  Args:
+    name: The name of the metric to create
+    fields: The fields of the metric to create.
+    description: A string description of the metric.
+    field_spec: A sequence of ts_mon.Field objects to specify the field schema.
+    scale: A float to scale the CumulativeSecondsDistribution buckets by.
+    record_on_exception: Whether to record metrics if an exception is raised.
+    add_exception_field: Whether to add a BooleanField('encountered_exception')
+        to the FieldSpec provided, and set its value to True iff an exception
+        was raised in the context.
+  """
+  if field_spec is not None and field_spec is not _MISSING:
+    field_spec.append(ts_mon.BooleanField('encountered_exception'))
+
+  m = CumulativeSecondsDistribution(
+      name, scale=scale, description=description, field_spec=field_spec)
+  f = fields or {}
+  f = dict(f)
+  keys = list(f)
+  t0 = _GetSystemClock()
+
+  error = True
+  try:
+    yield f
+    error = False
+  finally:
+    if record_on_exception and add_exception_field:
+      keys.append('encountered_exception')
+      f.setdefault('encountered_exception', error)
+    # Filter out keys that were not part of the initial key set. This is to
+    # avoid inconsistent fields.
+    # TODO(akeshet): Doing this filtering isn't super efficient. Would be better
+    # to implement some key-restricted subclass or wrapper around dict, and just
+    # yield that above rather than yielding a regular dict.
+    if record_on_exception or not error:
+      dt = _GetSystemClock() - t0
+      # TODO(ayatane): Handle backward clock jumps.  See _GetSystemClock.
+      if dt >= 0:
+        m.add(dt, fields={k: f[k] for k in keys})
+
+
+def SecondsTimerDecorator(name, fields=None, description=None,
+                          field_spec=_MISSING, scale=1,
+                          record_on_exception=True, add_exception_field=False):
+  """Decorator to time the duration of function calls.
+
+  Examples:
+    @SecondsTimerDecorator('timer/name', fields={'foo': 'bar'},
+                           description='My timer',
+                           field_spec=[ts_mon.StringField('foo')])
+    def Foo(bar):
+      return doStuff()
+
+    is equivalent to
+
+    def Foo(bar):
+      with SecondsTimer('timer/name', fields={'foo': 'bar'},
+                        description='My timer',
+                        field_spec=[ts_mon.StringField('foo')])
+        return doStuff()
+
+  Args:
+    name: The name of the metric to create
+    fields: The fields of the metric to create
+    description: A string description of the metric.
+    field_spec: A sequence of ts_mon.Field objects to specify the field schema.
+    scale: A float to scale the distrubtion by
+    record_on_exception: Whether to record metrics if an exception is raised.
+    add_exception_field: Whether to add a BooleanField('encountered_exception')
+        to the FieldSpec provided, and set its value to True iff an exception
+        was raised in the context.
+  """
+  def decorator(fn):
+    @wraps(fn)
+    def wrapper(*args, **kwargs):
+      with SecondsTimer(name, fields=fields, description=description,
+                        field_spec=field_spec, scale=scale,
+                        record_on_exception=record_on_exception,
+                        add_exception_field=add_exception_field):
+        return fn(*args, **kwargs)
+
+    return wrapper
+
+  return decorator
+
+
+@contextlib.contextmanager
+def SecondsInstanceTimer(name, fields=None, description=None,
+                         field_spec=_MISSING, record_on_exception=True,
+                         add_exception_field=False):
+  """Record the time of an operation to a FloatMetric.
+
+  Records the time taken inside of the context block, to the
+  Float metric named |name|, with the given fields.  This is
+  a non-cumulative metric; this represents the absolute time
+  taken for a specific block.  The duration is stored in a float
+  to provide flexibility in the future for higher accuracy.
+
+  Examples:
+    # Time the doSomething() call, with field values that are independent of the
+    # results of the operation.
+    with SecondsInstanceTimer('timer/name', fields={'foo': 'bar'},
+                              description='My timer',
+                              field_spec=[ts_mon.StringField('foo'),
+                                          ts_mon.BooleanField('success')]):
+      doSomething()
+
+    # Time the doSomethingElse call, with field values that depend on the
+    # results of that operation. Note that it is important that a default value
+    # is specified for these fields, in case an exception is thrown by
+    # doSomethingElse()
+    f = {'success': False, 'foo': 'bar'}
+    with SecondsInstanceTimer('timer/name', fields=f, description='My timer',
+                              field_spec=[ts_mon.StringField('foo')]) as c:
+      doSomethingElse()
+      c['success'] = True
+
+    # Incorrect Usage!
+    with SecondsInstanceTimer('timer/name', description='My timer') as c:
+      doSomething()
+      c['foo'] = bar # 'foo' is not a valid field, because no default
+                     # value for it was specified in the context constructor.
+                     # It will be silently ignored.
+
+  Args:
+    name: The name of the metric to create
+    fields: The fields of the metric to create.
+    description: A string description of the metric.
+    field_spec: A sequence of ts_mon.Field objects to specify the field schema.
+    record_on_exception: Whether to record metrics if an exception is raised.
+    add_exception_field: Whether to add a BooleanField('encountered_exception')
+        to the FieldSpec provided, and set its value to True iff an exception
+        was raised in the context.
+
+  Yields:
+    Float based metric measing the duration of execution.
+  """
+  if field_spec is not None and field_spec is not _MISSING:
+    field_spec.append(ts_mon.BooleanField('encountered_exception'))
+
+  m = FloatMetric(name, description=description, field_spec=field_spec)
+  f = dict(fields or {})
+  keys = list(f)
+  t0 = _GetSystemClock()
+
+  error = True
+  try:
+    yield f
+    error = False
+  finally:
+    if record_on_exception and add_exception_field:
+      keys.append('encountered_exception')
+      f.setdefault('encountered_exception', error)
+    # Filter out keys that were not part of the initial key set. This is to
+    # avoid inconsistent fields.
+    # TODO(akeshet): Doing this filtering isn't super efficient. Would be better
+    # to implement some key-restricted subclass or wrapper around dict, and just
+    # yield that above rather than yielding a regular dict.
+    if record_on_exception or not error:
+      dt = _GetSystemClock() - t0
+      m.set(dt, fields={k: f[k] for k in keys})
+
+
+def SecondsInstanceTimerDecorator(name, fields=None, description=None,
+                                  field_spec=_MISSING,
+                                  record_on_exception=True,
+                                  add_exception_field=False):
+  """Decorator to time the gauge duration of function calls.
+
+  Examples:
+    @SecondsInstanceTimerDecorator('timer/name', fields={'foo': 'bar'},
+                                   description='My timer',
+                                   field_spec=[ts_mon.StringField('foo'),
+                                               ts_mon.BooleanField('success')]):
+
+    def Foo(bar):
+      return doStuff()
+
+    is equivalent to
+
+    def Foo(bar):
+      with SecondsInstanceTimer('timer/name', fields={'foo': 'bar'},
+                                description='My timer',
+                                field_spec=[ts_mon.StringField('foo'),
+                                            ts_mon.BooleanField('success')]):
+        return doStuff()
+
+  Args:
+    name: The name of the metric to create
+    fields: The fields of the metric to create
+    description: A string description of the metric.
+    field_spec: A sequence of ts_mon.Field objects to specify the field schema.
+    record_on_exception: Whether to record metrics if an exception is raised.
+    add_exception_field: Whether to add a BooleanField('encountered_exception')
+        to the FieldSpec provided, and set its value to True iff an exception
+        was raised in the context.
+
+  Returns:
+    A SecondsInstanceTimer metric decorator.
+  """
+  def decorator(fn):
+    @wraps(fn)
+    def wrapper(*args, **kwargs):
+      with SecondsInstanceTimer(name, fields=fields, description=description,
+                                field_spec=field_spec,
+                                record_on_exception=record_on_exception,
+                                add_exception_field=add_exception_field):
+        return fn(*args, **kwargs)
+
+    return wrapper
+
+  return decorator
+
+
+@contextlib.contextmanager
+def SuccessCounter(name, fields=None, description=None, field_spec=_MISSING):
+  """Create a counter that tracks if something succeeds.
+
+  Args:
+    name: The name of the metric to create
+    fields: The fields of the metric
+    description: A string description of the metric.
+    field_spec: A sequence of ts_mon.Field objects to specify the field schema.
+  """
+  c = Counter(name)
+  f = fields or {}
+  f = f.copy()
+  # We add in the additional field success.
+  keys = list(f) + ['success']
+  success = False
+  try:
+    yield f
+    success = True
+  finally:
+    f.setdefault('success', success)
+    f = {k: f[k] for k in keys}
+    c.increment(fields=f)
+
+
+@contextlib.contextmanager
+def Presence(name, fields=None, description=None, field_spec=_MISSING):
+  """A counter of 'active' things.
+
+  This keeps track of how many name's are active at any given time. However,
+  it's only suitable for long running tasks, since the initial true value may
+  never be written out if the task doesn't run for at least a minute.
+  """
+  b = Boolean(name, description=None, field_spec=field_spec)
+  b.set(True, fields=fields)
+  try:
+    yield
+  finally:
+    b.set(False, fields=fields)
+
+
+class RuntimeBreakdownTimer(object):
+  """Record the time of an operation and the breakdown into sub-steps.
+
+  Examples:
+    with RuntimeBreakdownTimer('timer/name', fields={'foo':'bar'},
+                               description='My timer',
+                               field_spec=[ts_mon.StringField('foo')]) as timer:
+      with timer.Step('first_step'):
+        doFirstStep()
+      with timer.Step('second_step'):
+        doSecondStep()
+      # The time spent next will show up under .../timer/name/breakdown_no_step
+      doSomeNonStepWork()
+
+  This will emit the following metrics:
+  - .../timer/name/total_duration - A CumulativeSecondsDistribution metric for
+        the time spent inside the outer with block.
+  - .../timer/name/breakdown/first_step and
+    .../timer/name/breakdown/second_step - PercentageDistribution metrics for
+        the fraction of time devoted to each substep.
+  - .../timer/name/breakdown_unaccounted - PercentageDistribution metric for the
+        fraction of time that is not accounted for in any of the substeps.
+  - .../timer/name/bucketing_loss - PercentageDistribution metric buckets values
+        before reporting them as distributions. This causes small errors in the
+        reported values because they are rounded to the reported buckets lower
+        bound. This is a CumulativeMetric measuring the total rounding error
+        accrued in reporting all the percentages. The worst case bucketing loss
+        for x steps is (x+1)/10. So, if you time across 9 steps, you should
+        expect no more than 1% rounding error.
+  [experimental]
+  - .../timer/name/duration_breakdown - A Float metric, with one stream per Step
+        indicating the ratio of time spent in that step. The different steps are
+        differentiated via a field with key 'step_name'. Since some of the time
+        can be spent outside any steps, these ratios will sum to <= 1.
+
+  NB: This helper can only be used if the field values are known at the
+  beginning of the outer context and do not change as a result of any of the
+  operations timed.
+  """
+
+  PERCENT_BUCKET_COUNT = 1000
+
+  _StepMetrics = collections.namedtuple('_StepMetrics', ['name', 'time_s'])
+
+  def __init__(self, name, fields=None, description=None, field_spec=_MISSING):
+    self._name = name
+    self._fields = fields
+    self._field_spec = field_spec
+    self._description = description
+    self._outer_t0 = None
+    self._total_time_s = 0
+    self._inside_step = False
+    self._step_metrics = []
+
+  def __enter__(self):
+    self._outer_t0 = _GetSystemClock()
+    return self
+
+  def __exit__(self, _type, _value, _traceback):
+    self._RecordTotalTime()
+
+    outer_timer = CumulativeSecondsDistribution(
+        '%s/total_duration' % (self._name,),
+        field_spec=self._field_spec,
+        description=self._description)
+    outer_timer.add(self._total_time_s, fields=self._fields)
+
+    for name, percent in self._GetStepBreakdowns().items():
+      step_metric = PercentageDistribution(
+          '%s/breakdown/%s' % (self._name, name),
+          num_buckets=self.PERCENT_BUCKET_COUNT,
+          field_spec=self._field_spec,
+          description=self._description)
+      step_metric.add(percent, fields=self._fields)
+
+      fields = dict(self._fields) if self._fields is not None else dict()
+      fields['step_name'] = name
+      # TODO(pprabhu): Convert _GetStepBreakdowns() to return ratios instead of
+      # percentage when the old PercentageDistribution reporting is deleted.
+      Float('%s/duration_breakdown' % self._name).set(percent / 100,
+                                                      fields=fields)
+
+    unaccounted_metric = PercentageDistribution(
+        '%s/breakdown_unaccounted' % self._name,
+        num_buckets=self.PERCENT_BUCKET_COUNT,
+        field_spec=self._field_spec,
+        description=self._description)
+    unaccounted_metric.add(self._GetUnaccountedBreakdown(), fields=self._fields)
+
+    bucketing_loss_metric = CumulativeMetric(
+        '%s/bucketing_loss' % self._name,
+        field_spec=self._field_spec,
+        description=self._description)
+    bucketing_loss_metric.increment_by(self._GetBucketingLoss(),
+                                       fields=self._fields)
+
+  @contextlib.contextmanager
+  def Step(self, step_name):
+    """Start a new step named step_name in the timed operation.
+
+    Note that it is not possible to start a step inside a step. i.e.,
+
+    with RuntimeBreakdownTimer('timer') as timer:
+      with timer.Step('outer_step'):
+        with timer.Step('inner_step'):
+          # will by design raise an exception.
+
+    Args:
+      step_name: The name of the step being timed.
+    """
+    if self._inside_step:
+      logging.error('RuntimeBreakdownTimer.Step is not reentrant. '
+                    'Dropping step: %s', step_name)
+      yield
+      return
+
+    self._inside_step = True
+    t0 = _GetSystemClock()
+    try:
+      yield
+    finally:
+      self._inside_step = False
+      step_time_s = _GetSystemClock() - t0
+      # TODO(ayatane): Handle backward clock jumps.  See _GetSystemClock.
+      step_time_s = max(0, step_time_s)
+      self._step_metrics.append(self._StepMetrics(step_name, step_time_s))
+
+  def _GetStepBreakdowns(self):
+    """Returns percentage of time spent in each step.
+
+    Must be called after |_RecordTotalTime|.
+    """
+    if not self._total_time_s:
+      return {}
+    return {x.name: (x.time_s * 100) / self._total_time_s
+            for x in self._step_metrics}
+
+  def _GetUnaccountedBreakdown(self):
+    """Returns the percentage time spent outside of all steps.
+
+    Must be called after |_RecordTotalTime|.
+    """
+    breakdown_percentages = sum(self._GetStepBreakdowns().values())
+    return max(0, 100 - breakdown_percentages)
+
+  def _GetBucketingLoss(self):
+    """Compute the actual loss in reported percentages due to bucketing.
+
+    Must be called after |_RecordTotalTime|.
+    """
+    reported = list(self._GetStepBreakdowns().values())
+    reported.append(self._GetUnaccountedBreakdown())
+    bucket_width = 100 / self.PERCENT_BUCKET_COUNT
+    return sum(x % bucket_width for x in reported)
+
+  def _RecordTotalTime(self):
+    self._total_time_s = _GetSystemClock() - self._outer_t0
+    # TODO(ayatane): Handle backward clock jumps.  See _GetSystemClock.
+    self._total_time_s = max(0, self._total_time_s)
+
+
+def _GetSystemClock():
+  """Return a clock time.
+
+  The only thing that the return value can be used for is to subtract from
+  other instances to determine time elapsed.
+  """
+  # TODO(ayatane): We should use a monotonic clock to measure this,
+  # but Python 2 does not have one.
+  return time.time()
+
+
+def Flush(reset_after=()):
+  """Flushes metrics, but warns on transient errors.
+
+  Args:
+    reset_after: A list of metrics to reset after flushing.
+  """
+  if not ts_mon:
+    return
+
+  try:
+    ts_mon.flush()
+    while reset_after:
+      reset_after.pop().reset()
+  except ssl.SSLError as e:
+    logging.warning('Caught transient network error while flushing: %s', e)
+  except Exception as e:
+    logging.error('Caught exception while flushing: %s', e)
diff --git a/utils/frozen_chromite/lib/nebraska_wrapper.py b/utils/frozen_chromite/lib/nebraska_wrapper.py
new file mode 100644
index 0000000..71240cd
--- /dev/null
+++ b/utils/frozen_chromite/lib/nebraska_wrapper.py
@@ -0,0 +1,372 @@
+# -*- coding: utf-8 -*-
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing methods and classes to interact with a nebraska instance.
+"""
+
+from __future__ import print_function
+
+import base64
+import os
+import shutil
+import multiprocessing
+import subprocess
+
+from six.moves import urllib
+
+from autotest_lib.utils.frozen_chromite.lib import constants
+from autotest_lib.utils.frozen_chromite.lib import cros_build_lib
+from autotest_lib.utils.frozen_chromite.lib import cros_logging as logging
+from autotest_lib.utils.frozen_chromite.lib import gob_util
+from autotest_lib.utils.frozen_chromite.lib import osutils
+from autotest_lib.utils.frozen_chromite.lib import path_util
+from autotest_lib.utils.frozen_chromite.lib import remote_access
+from autotest_lib.utils.frozen_chromite.lib import timeout_util
+
+
+NEBRASKA_FILENAME = 'nebraska.py'
+
+# Error msg in loading shared libraries when running python command.
+ERROR_MSG_IN_LOADING_LIB = 'error while loading shared libraries'
+
+
+class Error(Exception):
+  """Base exception class of nebraska errors."""
+
+
+class NebraskaStartupError(Error):
+  """Thrown when the nebraska fails to start up."""
+
+
+class NebraskaStopError(Error):
+  """Thrown when the nebraska fails to stop."""
+
+
+class RemoteNebraskaWrapper(multiprocessing.Process):
+  """A wrapper for nebraska.py on a remote device.
+
+  We assume there is no chroot on the device, thus we do not launch
+  nebraska inside chroot.
+  """
+  NEBRASKA_TIMEOUT = 30
+  KILL_TIMEOUT = 10
+
+  # Keep in sync with nebraska.py if not passing these directly to nebraska.
+  RUNTIME_ROOT = '/run/nebraska'
+  PID_FILE_PATH = os.path.join(RUNTIME_ROOT, 'pid')
+  PORT_FILE_PATH = os.path.join(RUNTIME_ROOT, 'port')
+  LOG_FILE_PATH = '/tmp/nebraska.log'
+  REQUEST_LOG_FILE_PATH = '/tmp/nebraska_request_log.json'
+
+  NEBRASKA_PATH = os.path.join('/usr/local/bin', NEBRASKA_FILENAME)
+
+  def __init__(self, remote_device, nebraska_bin=None,
+               update_payloads_address=None, update_metadata_dir=None,
+               install_payloads_address=None, install_metadata_dir=None,
+               ignore_appid=False):
+    """Initializes the nebraska wrapper.
+
+    Args:
+      remote_device: A remote_access.RemoteDevice object.
+      nebraska_bin: The path to the nebraska binary.
+      update_payloads_address: The root address where the payloads will be
+          served.  it can either be a local address (file://) or a remote
+          address (http://)
+      update_metadata_dir: A directory where json files for payloads required
+          for update are located.
+      install_payloads_address: Same as update_payloads_address for install
+          operations.
+      install_metadata_dir: Similar to update_metadata_dir but for install
+          payloads.
+      ignore_appid: True to tell Nebraska to ignore the update request's
+          App ID. This allows mismatching the source and target version boards.
+          One specific use case is updating between <board> and
+          <board>-kernelnext images.
+    """
+    super(RemoteNebraskaWrapper, self).__init__()
+
+    self._device = remote_device
+    self._hostname = remote_device.hostname
+
+    self._update_payloads_address = update_payloads_address
+    self._update_metadata_dir = update_metadata_dir
+    self._install_payloads_address = install_payloads_address
+    self._install_metadata_dir = install_metadata_dir
+    self._ignore_appid = ignore_appid
+
+    self._nebraska_bin = nebraska_bin or self.NEBRASKA_PATH
+
+    self._port_file = self.PORT_FILE_PATH
+    self._pid_file = self.PID_FILE_PATH
+    self._log_file = self.LOG_FILE_PATH
+
+    self._port = None
+    self._pid = None
+
+  def _RemoteCommand(self, *args, **kwargs):
+    """Runs a remote shell command.
+
+    Args:
+      *args: See remote_access.RemoteDevice documentation.
+      **kwargs: See remote_access.RemoteDevice documentation.
+    """
+    kwargs.setdefault('debug_level', logging.DEBUG)
+    return self._device.run(*args, **kwargs)
+
+  def _PortFileExists(self):
+    """Checks whether the port file exists in the remove device or not."""
+    result = self._RemoteCommand(
+        ['test', '-f', self._port_file], check=False)
+    return result.returncode == 0
+
+  def _ReadPortNumber(self):
+    """Reads the port number from the port file on the remote device."""
+    if not self.is_alive():
+      raise NebraskaStartupError('Nebraska is not alive, so no port file yet!')
+
+    try:
+      timeout_util.WaitForReturnTrue(self._PortFileExists, period=5,
+                                     timeout=self.NEBRASKA_TIMEOUT)
+    except timeout_util.TimeoutError:
+      self.terminate()
+      raise NebraskaStartupError('Timeout (%s) waiting for remote nebraska'
+                                 ' port_file' % self.NEBRASKA_TIMEOUT)
+
+    self._port = int(self._RemoteCommand(
+        ['cat', self._port_file], capture_output=True).output.strip())
+
+  def IsReady(self):
+    """Returns True if nebraska is ready to accept requests."""
+    if not self.is_alive():
+      raise NebraskaStartupError('Nebraska is not alive, so not ready!')
+
+    url = 'http://%s:%d/%s' % (remote_access.LOCALHOST_IP, self._port,
+                               'health_check')
+    # Running curl through SSH because the port on the device is not accessible
+    # by default.
+    result = self._RemoteCommand(
+        ['curl', url, '-o', '/dev/null'], check=False)
+    return result.returncode == 0
+
+  def _WaitUntilStarted(self):
+    """Wait until the nebraska has started."""
+    if not self._port:
+      self._ReadPortNumber()
+
+    try:
+      timeout_util.WaitForReturnTrue(self.IsReady,
+                                     timeout=self.NEBRASKA_TIMEOUT,
+                                     period=5)
+    except timeout_util.TimeoutError:
+      raise NebraskaStartupError('Nebraska did not start.')
+
+    self._pid = int(self._RemoteCommand(
+        ['cat', self._pid_file], capture_output=True).output.strip())
+    logging.info('Started nebraska with pid %s', self._pid)
+
+  def run(self):
+    """Launches a nebraska process on the device.
+
+    Starts a background nebraska and waits for it to finish.
+    """
+    logging.info('Starting nebraska on %s', self._hostname)
+
+    if not self._update_metadata_dir:
+      raise NebraskaStartupError(
+          'Update metadata directory location is not passed.')
+
+    cmd = [
+        'python', self._nebraska_bin,
+        '--update-metadata', self._update_metadata_dir,
+    ]
+
+    if self._update_payloads_address:
+      cmd += ['--update-payloads-address', self._update_payloads_address]
+    if self._install_metadata_dir:
+      cmd += ['--install-metadata', self._install_metadata_dir]
+    if self._install_payloads_address:
+      cmd += ['--install-payloads-address', self._install_payloads_address]
+    if self._ignore_appid:
+      cmd += ['--ignore-appid']
+
+    try:
+      self._RemoteCommand(cmd, stdout=True, stderr=subprocess.STDOUT)
+    except cros_build_lib.RunCommandError as err:
+      msg = 'Remote nebraska failed (to start): %s' % str(err)
+      logging.error(msg)
+      raise NebraskaStartupError(msg)
+
+  def Start(self):
+    """Starts the nebraska process remotely on the remote device."""
+    if self.is_alive():
+      logging.warning('Nebraska is already running, not running again.')
+      return
+
+    self.start()
+    self._WaitUntilStarted()
+
+  def Stop(self):
+    """Stops the nebraska instance if its running.
+
+    Kills the nebraska instance with SIGTERM (and SIGKILL if SIGTERM fails).
+    """
+    logging.debug('Stopping nebraska instance with pid %s', self._pid)
+    if self.is_alive():
+      self._RemoteCommand(['kill', str(self._pid)], check=False)
+    else:
+      logging.debug('Nebraska is not running, stopping nothing!')
+      return
+
+    self.join(self.KILL_TIMEOUT)
+    if self.is_alive():
+      logging.warning('Nebraska is unstoppable. Killing with SIGKILL.')
+      try:
+        self._RemoteCommand(['kill', '-9', str(self._pid)])
+      except cros_build_lib.RunCommandError as e:
+        raise NebraskaStopError('Unable to stop Nebraska: %s' % e)
+
+  def GetURL(self, ip=remote_access.LOCALHOST_IP,
+             critical_update=False, no_update=False):
+    """Returns the URL which the devserver is running on.
+
+    Args:
+      ip: The ip of running nebraska if different than localhost.
+      critical_update: Whether nebraska has to instruct the update_engine that
+          the update is a critical one or not.
+      no_update: Whether nebraska has to give a noupdate response even if it
+          detected an update.
+
+    Returns:
+      An HTTP URL that can be passed to the update_engine_client in --omaha_url
+          flag.
+    """
+    query_dict = {}
+    if critical_update:
+      query_dict['critical_update'] = True
+    if no_update:
+      query_dict['no_update'] = True
+    query_string = urllib.parse.urlencode(query_dict)
+
+    return ('http://%s:%d/update/%s' %
+            (ip, self._port, (('?%s' % query_string) if query_string else '')))
+
+  def PrintLog(self):
+    """Print Nebraska log to stdout."""
+    if self._RemoteCommand(
+        ['test', '-f', self._log_file], check=False).returncode != 0:
+      logging.error('Nebraska log file %s does not exist on the device.',
+                    self._log_file)
+      return
+
+    result = self._RemoteCommand(['cat', self._log_file], capture_output=True)
+    output = '--- Start output from %s ---\n' % self._log_file
+    output += result.output
+    output += '--- End output from %s ---' % self._log_file
+    return output
+
+  def CollectLogs(self, target_log):
+    """Copies the nebraska logs from the device.
+
+    Args:
+      target_log: The file to copy the log to from the device.
+    """
+    try:
+      self._device.CopyFromDevice(self._log_file, target_log)
+    except (remote_access.RemoteAccessException,
+            cros_build_lib.RunCommandError) as err:
+      logging.error('Failed to copy nebraska logs from device, ignoring: %s',
+                    str(err))
+
+  def CollectRequestLogs(self, target_log):
+    """Copies the nebraska logs from the device.
+
+    Args:
+      target_log: The file to write the log to.
+    """
+    if not self.is_alive():
+      return
+
+    request_log_url = 'http://%s:%d/requestlog' % (remote_access.LOCALHOST_IP,
+                                                   self._port)
+    try:
+      self._RemoteCommand(
+          ['curl', request_log_url, '-o', self.REQUEST_LOG_FILE_PATH])
+      self._device.CopyFromDevice(self.REQUEST_LOG_FILE_PATH, target_log)
+    except (remote_access.RemoteAccessException,
+            cros_build_lib.RunCommandError) as err:
+      logging.error('Failed to get requestlog from nebraska. ignoring: %s',
+                    str(err))
+
+  def CheckNebraskaCanRun(self):
+    """Checks to see if we can start nebraska.
+
+    If the stateful partition is corrupted, Python or other packages needed for
+    rootfs update may be missing on |device|.
+
+    This will also use `ldconfig` to update library paths on the target
+    device if it looks like that's causing problems, which is necessary
+    for base images.
+
+    Raise NebraskaStartupError if nebraska cannot start.
+    """
+
+    # Try to capture the output from the command so we can dump it in the case
+    # of errors. Note that this will not work if we were requested to redirect
+    # logs to a |log_file|.
+    cmd_kwargs = {'capture_output': True, 'stderr': subprocess.STDOUT}
+    cmd = ['python', self._nebraska_bin, '--help']
+    logging.info('Checking if we can run nebraska on the device...')
+    try:
+      self._RemoteCommand(cmd, **cmd_kwargs)
+    except cros_build_lib.RunCommandError as e:
+      logging.warning('Cannot start nebraska.')
+      logging.warning(e.result.error)
+      if ERROR_MSG_IN_LOADING_LIB in str(e):
+        logging.info('Attempting to correct device library paths...')
+        try:
+          self._RemoteCommand(['ldconfig'], **cmd_kwargs)
+          self._RemoteCommand(cmd, **cmd_kwargs)
+          logging.info('Library path correction successful.')
+          return
+        except cros_build_lib.RunCommandError as e2:
+          logging.warning('Library path correction failed:')
+          logging.warning(e2.result.error)
+          raise NebraskaStartupError(e.result.error)
+
+      raise NebraskaStartupError(str(e))
+
+  @staticmethod
+  def GetNebraskaSrcFile(source_dir, force_download=False):
+    """Returns path to nebraska source file.
+
+    nebraska is copied to source_dir, either from a local file or by
+    downloading from googlesource.com.
+
+    Args:
+      force_download: True to always download nebraska from googlesource.com.
+    """
+    assert os.path.isdir(source_dir), ('%s must be a valid directory.'
+                                       % source_dir)
+
+    nebraska_path = os.path.join(source_dir, NEBRASKA_FILENAME)
+    checkout = path_util.DetermineCheckout()
+    if checkout.type == path_util.CHECKOUT_TYPE_REPO and not force_download:
+      # ChromeOS checkout. Copy existing file to destination.
+      local_src = os.path.join(constants.SOURCE_ROOT, 'src', 'platform',
+                               'dev', 'nebraska', NEBRASKA_FILENAME)
+      assert os.path.isfile(local_src), "%s doesn't exist" % local_src
+      shutil.copy2(local_src, source_dir)
+    else:
+      # Download from googlesource.
+      logging.info('Downloading nebraska from googlesource')
+      nebraska_url_path = '%s/+/%s/%s?format=text' % (
+          'chromiumos/platform/dev-util', 'refs/heads/main',
+          'nebraska/nebraska.py')
+      contents_b64 = gob_util.FetchUrl(constants.EXTERNAL_GOB_HOST,
+                                       nebraska_url_path)
+      osutils.WriteFile(nebraska_path,
+                        base64.b64decode(contents_b64).decode('utf-8'))
+
+    return nebraska_path
diff --git a/utils/frozen_chromite/lib/operation.py b/utils/frozen_chromite/lib/operation.py
new file mode 100644
index 0000000..c3595c4
--- /dev/null
+++ b/utils/frozen_chromite/lib/operation.py
@@ -0,0 +1,689 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Operation, including output and progress display
+
+This module implements the concept of an operation, which has regular progress
+updates, verbose text display and perhaps some errors.
+"""
+
+from __future__ import division
+from __future__ import print_function
+
+import collections
+import contextlib
+import fcntl
+import multiprocessing
+import os
+import pty
+import re
+import struct
+import sys
+import termios
+
+from six.moves import queue as Queue
+
+from autotest_lib.utils.frozen_chromite.lib import cros_logging as logging
+from autotest_lib.utils.frozen_chromite.lib import osutils
+from autotest_lib.utils.frozen_chromite.lib import parallel
+from autotest_lib.utils.frozen_chromite.lib.terminal import Color
+from autotest_lib.utils.frozen_chromite.utils import outcap
+
+
+# Define filenames for captured stdout and stderr.
+STDOUT_FILE = 'stdout'
+STDERR_FILE = 'stderr'
+
+_TerminalSize = collections.namedtuple('_TerminalSize', ('lines', 'columns'))
+
+
+class _BackgroundTaskComplete(object):
+  """Sentinal object to indicate that the background task is complete."""
+
+
+class ProgressBarOperation(object):
+  """Wrapper around long running functions to show progress.
+
+  This class is intended to capture the output of a long running fuction, parse
+  the output, and display a progress bar.
+
+  To display a progress bar for a function foo with argument foo_args, this is
+  the usage case:
+    1) Create a class that inherits from ProgressBarOperation (e.g.
+    FooTypeOperation. In this class, override the ParseOutput method to parse
+    the output of foo.
+    2) op = operation.FooTypeOperation()
+       op.Run(foo, foo_args)
+  """
+
+  # Subtract 10 characters from the width of the terminal because these are used
+  # to display the percentage as well as other spaces.
+  _PROGRESS_BAR_BORDER_SIZE = 10
+
+  # By default, update the progress bar every 100 ms.
+  _PROGRESS_BAR_UPDATE_INTERVAL = 0.1
+
+  def __init__(self):
+    self._queue = multiprocessing.Queue()
+    self._stderr = None
+    self._stdout = None
+    self._stdout_path = None
+    self._stderr_path = None
+    self._progress_bar_displayed = False
+    self._isatty = os.isatty(sys.stdout.fileno())
+
+  def _GetTerminalSize(self, fd=pty.STDOUT_FILENO):
+    """Return a terminal size object for |fd|.
+
+    Note: Replace with os.terminal_size() in python3.3.
+    """
+    winsize = struct.pack('HHHH', 0, 0, 0, 0)
+    data = fcntl.ioctl(fd, termios.TIOCGWINSZ, winsize)
+    winsize = struct.unpack('HHHH', data)
+    return _TerminalSize(int(winsize[0]), int(winsize[1]))
+
+  def ProgressBar(self, progress):
+    """This method creates and displays a progress bar.
+
+    If not in a terminal, we do not display a progress bar.
+
+    Args:
+      progress: a float between 0 and 1 that represents the fraction of the
+        current progress.
+    """
+    if not self._isatty:
+      return
+    self._progress_bar_displayed = True
+    progress = max(0.0, min(1.0, progress))
+    width = max(1, self._GetTerminalSize().columns -
+                self._PROGRESS_BAR_BORDER_SIZE)
+    block = int(width * progress)
+    shaded = '#' * block
+    unshaded = '-' * (width - block)
+    text = '\r [%s%s] %d%%' % (shaded, unshaded, progress * 100)
+    sys.stdout.write(text)
+    sys.stdout.flush()
+
+  def OpenStdoutStderr(self):
+    """Open the stdout and stderr streams."""
+    if self._stdout is None and self._stderr is None:
+      self._stdout = open(self._stdout_path, 'r')
+      self._stderr = open(self._stderr_path, 'r')
+
+  def Cleanup(self):
+    """Method to cleanup progress bar.
+
+    If progress bar has been printed, then we make sure it displays 100% before
+    exiting.
+    """
+    if self._progress_bar_displayed:
+      self.ProgressBar(1)
+      sys.stdout.write('\n')
+      sys.stdout.flush()
+
+  def ParseOutput(self, output=None):
+    """Method to parse output and update progress bar.
+
+    This method should be overridden to read and parse the lines in _stdout and
+    _stderr.
+
+    One example use of this method could be to detect 'foo' in stdout and
+    increment the progress bar every time foo is seen.
+
+    def ParseOutput(self):
+      stdout = self._stdout.read()
+      if 'foo' in stdout:
+        # Increment progress bar.
+
+    Args:
+      output: Pass in output to parse instead of reading from self._stdout and
+        self._stderr.
+    """
+    raise NotImplementedError('Subclass must override this method.')
+
+  # TODO(ralphnathan): Deprecate this function and use parallel._BackgroundTask
+  # instead (brbug.com/863)
+  def WaitUntilComplete(self, update_period):
+    """Return True if running background task has completed."""
+    try:
+      x = self._queue.get(timeout=update_period)
+      if isinstance(x, _BackgroundTaskComplete):
+        return True
+    except Queue.Empty:
+      return False
+
+  def CaptureOutputInBackground(self, func, *args, **kwargs):
+    """Launch func in background and capture its output.
+
+    Args:
+      func: Function to execute in the background and whose output is to be
+        captured.
+      log_level: Logging level to run the func at. By default, it runs at log
+        level info.
+    """
+    log_level = kwargs.pop('log_level', logging.INFO)
+    restore_log_level = logging.getLogger().getEffectiveLevel()
+    logging.getLogger().setLevel(log_level)
+    try:
+      with outcap.OutputCapturer(
+          stdout_path=self._stdout_path, stderr_path=self._stderr_path,
+          quiet_fail=False):
+        func(*args, **kwargs)
+    finally:
+      self._queue.put(_BackgroundTaskComplete())
+      logging.getLogger().setLevel(restore_log_level)
+
+  # TODO (ralphnathan): Store PID of spawned process.
+  def Run(self, func, *args, **kwargs):
+    """Run func, parse its output, and update the progress bar.
+
+    Args:
+      func: Function to execute in the background and whose output is to be
+        captured.
+      update_period: Optional argument to specify the period that output should
+        be read.
+      log_level: Logging level to run the func at. By default, it runs at log
+        level info.
+    """
+    update_period = kwargs.pop('update_period',
+                               self._PROGRESS_BAR_UPDATE_INTERVAL)
+
+    # If we are not running in a terminal device, do not display the progress
+    # bar.
+    if not self._isatty:
+      log_level = kwargs.pop('log_level', logging.INFO)
+      restore_log_level = logging.getLogger().getEffectiveLevel()
+      logging.getLogger().setLevel(log_level)
+      try:
+        func(*args, **kwargs)
+      finally:
+        logging.getLogger().setLevel(restore_log_level)
+      return
+
+    with osutils.TempDir() as tempdir:
+      self._stdout_path = os.path.join(tempdir, STDOUT_FILE)
+      self._stderr_path = os.path.join(tempdir, STDERR_FILE)
+      osutils.Touch(self._stdout_path)
+      osutils.Touch(self._stderr_path)
+      try:
+        with parallel.BackgroundTaskRunner(
+            self.CaptureOutputInBackground, func, *args, **kwargs) as queue:
+          queue.put([])
+          self.OpenStdoutStderr()
+          while True:
+            self.ParseOutput()
+            if self.WaitUntilComplete(update_period):
+              break
+        # Before we exit, parse the output again to update progress bar.
+        self.ParseOutput()
+        # Final sanity check to update the progress bar to 100% if it was used
+        # by ParseOutput
+        self.Cleanup()
+      except:
+        # Add a blank line before the logging message so the message isn't
+        # touching the progress bar.
+        sys.stdout.write('\n')
+        logging.error('Oops. Something went wrong.')
+        # Raise the exception so it can be caught again.
+        raise
+
+
+class ParallelEmergeOperation(ProgressBarOperation):
+  """ProgressBarOperation specific for scripts/parallel_emerge.py."""
+
+  def __init__(self):
+    super(ParallelEmergeOperation, self).__init__()
+    self._total = None
+    self._completed = 0
+    self._printed_no_packages = False
+    self._events = ['Fetched ', 'Completed ']
+    self._msg = None
+
+  def _GetTotal(self, output):
+    """Get total packages by looking for Total: digits packages."""
+    match = re.search(r'Total: (\d+) packages', output)
+    return int(match.group(1)) if match else None
+
+  def SetProgressBarMessage(self, msg):
+    """Message to be shown before the progress bar is displayed with 0%.
+
+       The message is not displayed if the progress bar is not going to be
+       displayed.
+    """
+    self._msg = msg
+
+  def ParseOutput(self, output=None):
+    """Parse the output of emerge to determine how to update progress bar.
+
+    1) Figure out how many packages exist. If the total number of packages to be
+    built is zero, then we do not display the progress bar.
+    2) Whenever a package is downloaded or built, 'Fetched' and 'Completed' are
+    printed respectively. By counting counting 'Fetched's and 'Completed's, we
+    can determine how much to update the progress bar by.
+
+    Args:
+      output: Pass in output to parse instead of reading from self._stdout and
+        self._stderr.
+
+    Returns:
+      A fraction between 0 and 1 indicating the level of the progress bar. If
+      the progress bar isn't displayed, then the return value is -1.
+    """
+    if output is None:
+      stdout = self._stdout.read()
+      stderr = self._stderr.read()
+      output = stdout + stderr
+
+    if self._total is None:
+      temp = self._GetTotal(output)
+      if temp is not None:
+        self._total = temp * len(self._events)
+        if self._msg is not None:
+          logging.notice(self._msg)
+
+    for event in self._events:
+      self._completed += output.count(event)
+
+    if not self._printed_no_packages and self._total == 0:
+      logging.notice('No packages to build.')
+      self._printed_no_packages = True
+
+    if self._total:
+      progress = self._completed / self._total
+      self.ProgressBar(progress)
+      return progress
+    else:
+      return -1
+
+
+# TODO(sjg): When !isatty(), keep stdout and stderr separate so they can be
+# redirected separately
+# TODO(sjg): Add proper docs to this fileno
+# TODO(sjg): Handle stdin wait in quite mode, rather than silently stalling
+
+class Operation(object):
+  """Class which controls stdio and progress of an operation in progress.
+
+  This class is created to handle stdio for a running subprocess. It filters
+  it looking for errors and progress information. Optionally it can output the
+  stderr and stdout to the terminal, but it is normally supressed.
+
+  Progress information is garnered from the subprocess output based on
+  knowledge of the legacy scripts, but at some point will move over to using
+  real progress information reported through new python methods which will
+  replace the scripts.
+
+  Each operation has a name, and this class handles displaying this name
+  as it reports progress.
+
+  Operation Objects
+  =================
+
+  verbose: True / False
+    In verbose mode all output from subprocesses is displayed, otherwise
+    this output is normally supressed, unless we think it indicates an error.
+
+  progress: True / False
+    The output from subprocesses can be analysed in a very basic manner to
+    try to present progress information to the user.
+
+  explicit_verbose: True / False
+    False if we are not just using default verbosity. In that case we allow
+    verbosity to be enabled on request, since the user has not explicitly
+    disabled it. This is used by commands that the user issues with the
+    expectation that output would ordinarily be visible.
+  """
+
+  def __init__(self, name, color=None):
+    """Create a new operation.
+
+    Args:
+      name: Operation name in a form to be displayed for the user.
+      color: Determines policy for sending color to stdout; see terminal.Color
+        for details on interpretation on the value.
+    """
+    self._name = name   # Operation name.
+    self.verbose = False   # True to echo subprocess output.
+    self.progress = True   # True to report progress of the operation
+    self._column = 0    # Current output column (always 0 unless verbose).
+    self._update_len = 0    # Length of last progress update message.
+    self._line = ''   # text of current line, so far
+    self.explicit_verbose = False
+
+    self._color = Color(enabled=color)
+
+    # -1 = no newline pending
+    #  n = newline pending, and line length of last line was n
+    self._pending_nl = -1
+
+    # the type of the last stream to emit data on the current lines
+    # can be sys.stdout, sys.stderr (both from the subprocess), or None
+    # for our own mesages
+    self._cur_stream = None
+
+    self._error_count = 0   # number of error lines we have reported
+
+  def __del__(self):
+    """Object is about to be destroyed, so finish out output cleanly."""
+    self.FinishOutput()
+
+  def FinishOutput(self):
+    """Finish off any pending output.
+
+    This finishes any output line currently in progress and resets the color
+    back to normal.
+    """
+    self._FinishLine(self.verbose, final=True)
+    if self._column and self.verbose:
+      print(self._color.Stop())
+      self._column = 0
+
+  def WereErrorsDetected(self):
+    """Returns whether any errors have been detected.
+
+    Returns:
+      True if any errors have been detected in subprocess output so far.
+      False otherwise
+    """
+    return self._error_count > 0
+
+  def SetName(self, name):
+    """Set the name of the operation as displayed to the user.
+
+    Args:
+      name: Operation name.
+    """
+    self._name = name
+
+  def _FilterOutputForErrors(self, line, print_error):
+    """Filter a line of output to look for and display errors.
+
+    This uses a few regular expression searches to spot common error reports
+    from subprocesses. A count of these is kept so we know how many occurred.
+    Optionally they are displayed in red on the terminal.
+
+    Args:
+      line: the output line to filter, as a string.
+      print_error: True to print the error, False to just record it.
+    """
+    bad_things = ['Cannot GET', 'ERROR', '!!!', 'FAILED']
+    for bad_thing in bad_things:
+      if re.search(bad_thing, line, flags=re.IGNORECASE):
+        self._error_count += 1
+        if print_error:
+          print(self._color.Color(self._color.RED, line))
+          break
+
+  def _FilterOutputForProgress(self, line):
+    """Filter a line of output to look for and dispay progress information.
+
+    This uses a simple regular expression search to spot progress information
+    coming from subprocesses. This is sent to the _Progress() method.
+
+    Args:
+      line: the output line to filter, as a string.
+    """
+    match = re.match(r'Pending (\d+).*Total (\d+)', line)
+    if match:
+      pending = int(match.group(1))
+      total = int(match.group(2))
+      self._Progress(total - pending, total)
+
+  def _Progress(self, upto, total):
+    """Record and optionally display progress information.
+
+    Args:
+      upto: which step we are up to in the operation (integer, from 0).
+      total: total number of steps in operation,
+    """
+    if total > 0:
+      update_str = '%s...%d%% (%d of %d)' % (self._name,
+                                             upto * 100 // total, upto, total)
+      if self.progress:
+        # Finish the current line, print progress, and remember its length.
+        self._FinishLine(self.verbose)
+
+        # Sometimes the progress string shrinks and in this case we need to
+        # blank out the characters at the end of the line that will not be
+        # overwritten by the new line
+        pad = max(self._update_len - len(update_str), 0)
+        sys.stdout.write(update_str + (' ' * pad) + '\r')
+        self._update_len = len(update_str)
+
+  def _FinishLine(self, display, final=False):
+    """Finish off the current line and prepare to start a new one.
+
+    If a new line is pending from the previous line, then this will be output,
+    along with a color reset if needed.
+
+    We also handle removing progress messages from the output. This is done
+    using a carriage return character, following by spaces.
+
+    Args:
+      display: True to display output, False to suppress it
+      final: True if this is the final output before we exit, in which case
+          we must clean up any remaining progress message by overwriting
+          it with spaces, then carriage return
+    """
+    if display:
+      if self._pending_nl != -1:
+        # If out last output line was shorter than the progress info
+        # add spaces.
+        if self._pending_nl < self._update_len:
+          print(' ' * (self._update_len - self._pending_nl), end='')
+
+        # Output the newline, and reset our counter.
+        sys.stdout.write(self._color.Stop())
+        print()
+
+    # If this is the last thing that this operation will print, we need to
+    # close things off. So if there is some text on the current line but not
+    # enough to overwrite all the progress information we have sent, add some
+    # more spaces.
+    if final and self._update_len:
+      print(' ' * self._update_len, '\r', end='')
+
+    self._pending_nl = -1
+
+  def _CheckStreamAndColor(self, stream, display):
+    """Check that we're writing to the same stream as last call.  No?  New line.
+
+    If starting a new line, set the color correctly:
+      stdout  Magenta
+      stderr  Red
+      other   White / no colors
+
+    Args:
+      stream: The stream we're going to write to.
+      display: True to display it on terms, False to suppress it.
+    """
+    if self._column > 0 and stream != self._cur_stream:
+      self._FinishLine(display)
+      if display:
+        print(self._color.Stop())
+
+      self._column = 0
+      self._line = ''
+
+    # Use colors for child output.
+    if self._column == 0:
+      self._FinishLine(display)
+      if display:
+        color = None
+        if stream == sys.stdout:
+          color = self._color.MAGENTA
+        elif stream == sys.stderr:
+          color = self._color.RED
+        if color:
+          sys.stdout.write(self._color.Start(color))
+
+      self._cur_stream = stream
+
+  def _Out(self, stream, text, display, newline=False, do_output_filter=True):
+    """Output some text received from a child, or generated internally.
+
+    This method is the guts of the Operation class since it understands how to
+    convert a series of output requests on different streams into something
+    coherent for the user.
+
+    If the stream has changed, then a new line is started even if we were
+    still halfway through the previous line. This prevents stdout and stderr
+    becoming mixed up quite so badly.
+
+    We use color to indicate lines which are stdout and stderr. If the output
+    received from the child has color codes in it already, we pass these
+    through, so our colors can be overridden. If output is redirected then we
+    do not add color by default. Note that nothing stops the child from adding
+    it, but since we present ourselves as a terminal to the child, one might
+    hope that the child will not generate color.
+
+    If display is False, then we will not actually send this text to the
+    terminal. This is uses when verbose is required to be False.
+
+    Args:
+      stream: stream on which the text was received:
+        sys.stdout    - received on stdout
+        sys.stderr    - received on stderr
+        None          - generated by us / internally
+      text: text to output
+      display: True to display it on terms, False to suppress it
+      newline: True to start a new line after this text, False to put the next
+        lot of output immediately after this.
+      do_output_filter: True to look through output for errors and progress.
+    """
+    self._CheckStreamAndColor(stream, display)
+
+    # Output what we have, and remember what column we are up to.
+    if display:
+      sys.stdout.write(text)
+      self._column += len(text)
+      # If a newline is required, remember to output it later.
+      if newline:
+        self._pending_nl = self._column
+        self._column = 0
+
+    self._line += text
+
+    # If we now have a whole line, check it for errors and progress.
+    if newline:
+      if do_output_filter:
+        self._FilterOutputForErrors(self._line, print_error=not display)
+        self._FilterOutputForProgress(self._line)
+      self._line = ''
+
+  def Output(self, stream, data):
+    r"""Handle the output of a block of text from the subprocess.
+
+    All subprocess output should be sent through this method. It is split into
+    lines which are processed separately using the _Out() method.
+
+    Args:
+      stream: Which file the output come in on:
+        sys.stdout: stdout
+        sys.stderr: stderr
+        None: Our own internal output
+      data: Output data as a big string, potentially containing many lines of
+        text. Each line should end with \r\n. There is no requirement to send
+        whole lines - this method happily handles fragments and tries to
+        present then to the user as early as possible
+
+    #TODO(sjg): Just use a list as the input parameter to avoid the split.
+    """
+    # We cannot use splitlines() here as we need this exact behavior
+    lines = data.split('\r\n')
+
+    # Output each full line, with a \n after it.
+    for line in lines[:-1]:
+      self._Out(stream, line, display=self.verbose, newline=True)
+
+    # If we have a partial line at the end, output what we have.
+    # We will continue it later.
+    if lines[-1]:
+      self._Out(stream, lines[-1], display=self.verbose)
+
+    # Flush so that the terminal will receive partial line output (now!)
+    sys.stdout.flush()
+
+  def Outline(self, line):
+    r"""Output a line of text to the display.
+
+    This outputs text generated internally, such as a warning message or error
+    summary. It ensures that our message plays nicely with child output if
+    any.
+
+    Args:
+      line: text to output (without \n on the end)
+    """
+    self._Out(None, line, display=True, newline=True)
+    self._FinishLine(display=True)
+
+  def Info(self, line):
+    r"""Output a line of information text to the display in verbose mode.
+
+    Args:
+      line: text to output (without \n on the end)
+    """
+    self._Out(None, self._color.Color(self._color.BLUE, line),
+              display=self.verbose, newline=True, do_output_filter=False)
+    self._FinishLine(display=True)
+
+  def Notice(self, line):
+    r"""Output a line of notification text to the display.
+
+    Args:
+      line: text to output (without \n on the end)
+    """
+    self._Out(None, self._color.Color(self._color.GREEN, line),
+              display=True, newline=True, do_output_filter=False)
+    self._FinishLine(display=True)
+
+  def Warning(self, line):
+    r"""Output a line of warning text to the display.
+
+    Args:
+      line: text to output (without \n on the end)
+    """
+    self._Out(None, self._color.Color(self._color.YELLOW, line),
+              display=True, newline=True, do_output_filter=False)
+    self._FinishLine(display=True)
+
+  def Error(self, line):
+    r"""Output a line of error text to the display.
+
+    Args:
+      line: text to output (without \n on the end)
+    """
+    self._Out(None, self._color.Color(self._color.RED, line),
+              display=True, newline=True, do_output_filter=False)
+    self._FinishLine(display=True)
+
+  def Die(self, line):
+    r"""Output a line of error text to the display and die.
+
+    Args:
+      line: text to output (without \n on the end)
+    """
+    self.Error(line)
+    sys.exit(1)
+
+  @contextlib.contextmanager
+  def RequestVerbose(self, request):
+    """Perform something in verbose mode if the user hasn't disallowed it
+
+    This is intended to be used with something like:
+
+      with oper.RequestVerbose(True):
+        ... do some things that generate output
+
+    Args:
+      request: True to request verbose mode if available, False to do nothing.
+    """
+    old_verbose = self.verbose
+    if request and not self.explicit_verbose:
+      self.verbose = True
+    try:
+      yield
+    finally:
+      self.verbose = old_verbose
diff --git a/utils/frozen_chromite/lib/osutils.py b/utils/frozen_chromite/lib/osutils.py
new file mode 100644
index 0000000..dc5b1de
--- /dev/null
+++ b/utils/frozen_chromite/lib/osutils.py
@@ -0,0 +1,1414 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Common file and os related utilities, including tempdir manipulation."""
+
+from __future__ import print_function
+
+import collections
+import contextlib
+import ctypes
+import ctypes.util
+import datetime
+import errno
+import glob
+import hashlib
+import os
+import pwd
+import re
+import shutil
+import stat
+import subprocess
+import tempfile
+
+import six
+
+from autotest_lib.utils.frozen_chromite.lib import cros_build_lib
+from autotest_lib.utils.frozen_chromite.lib import cros_logging as logging
+from autotest_lib.utils.frozen_chromite.lib import retry_util
+from autotest_lib.utils.frozen_chromite.utils import key_value_store
+
+
+# Env vars that tempdir can be gotten from; minimally, this
+# needs to match python's tempfile module and match normal
+# unix standards.
+_TEMPDIR_ENV_VARS = ('TMPDIR', 'TEMP', 'TMP')
+
+
+def GetNonRootUser():
+  """Returns a non-root user. Defaults to the current user.
+
+  If the current user is root, returns the username of the person who
+  ran the emerge command. If running using sudo, returns the username
+  of the person who ran the sudo command. If no non-root user is
+  found, returns None.
+  """
+  uid = os.getuid()
+  if uid == 0:
+    user = os.environ.get('PORTAGE_USERNAME', os.environ.get('SUDO_USER'))
+  else:
+    user = pwd.getpwuid(os.getuid()).pw_name
+
+  if user == 'root':
+    return None
+  else:
+    return user
+
+
+def IsChildProcess(pid, name=None):
+  """Return True if pid is a child of the current process.
+
+  Args:
+    pid: Child pid to search for in current process's pstree.
+    name: Name of the child process.
+
+  Note:
+    This function is not fool proof. If the process tree contains wierd names,
+    an incorrect match might be possible.
+  """
+  cmd = ['pstree', '-Ap', str(os.getpid())]
+  pstree = cros_build_lib.run(cmd, capture_output=True, print_cmd=False,
+                              encoding='utf-8').stdout
+  if name is None:
+    match = '(%d)' % pid
+  else:
+    match = '-%s(%d)' % (name, pid)
+  return match in pstree
+
+
+def ExpandPath(path):
+  """Returns path after passing through realpath and expanduser."""
+  return os.path.realpath(os.path.expanduser(path))
+
+
+def IsSubPath(path, other):
+  """Returns whether |path| is a sub path of |other|."""
+  path = os.path.abspath(path)
+  other = os.path.abspath(other)
+  if path == other:
+    return True
+  return path.startswith(other + os.sep)
+
+
+def AllocateFile(path, size, makedirs=False):
+  """Allocates a file of a certain |size| in |path|.
+
+  Args:
+    path: Path to allocate the file.
+    size: The length, in bytes, of the desired file.
+    makedirs: If True, create missing leading directories in the path.
+  """
+  if makedirs:
+    SafeMakedirs(os.path.dirname(path))
+
+  with open(path, 'w') as out:
+    out.truncate(size)
+
+
+# All the modes that we allow people to pass to WriteFile.  This allows us to
+# make assumptions about the input so we can update it if needed.
+_VALID_WRITE_MODES = {
+    # Read & write, but no truncation, and file offset is 0.
+    'r+', 'r+b',
+    # Writing (and maybe reading) with truncation.
+    'w', 'wb', 'w+', 'w+b',
+    # Writing (and maybe reading), but no truncation, and file offset is at end.
+    'a', 'ab', 'a+', 'a+b',
+}
+
+
+def WriteFile(path, content, mode='w', encoding=None, errors=None, atomic=False,
+              makedirs=False, sudo=False):
+  """Write the given content to disk.
+
+  Args:
+    path: Pathway to write the content to.
+    content: Content to write.  May be either an iterable, or a string.
+    mode: The mode to use when opening the file.  'w' is for text files (see the
+      following settings) and 'wb' is for binary files.  If appending, pass
+      'w+', etc...
+    encoding: The encoding of the file content.  Text files default to 'utf-8'.
+    errors: How to handle encoding errors.  Text files default to 'strict'.
+    atomic: If the updating of the file should be done atomically.  Note this
+            option is incompatible w/ append mode.
+    makedirs: If True, create missing leading directories in the path.
+    sudo: If True, write the file as root.
+  """
+  if mode not in _VALID_WRITE_MODES:
+    raise ValueError('mode must be one of {"%s"}, not %r' %
+                     ('", "'.join(sorted(_VALID_WRITE_MODES)), mode))
+
+  if sudo and atomic and ('a' in mode or '+' in mode):
+    raise ValueError('append mode does not work in sudo+atomic mode')
+
+  if 'b' in mode:
+    if encoding is not None or errors is not None:
+      raise ValueError('binary mode does not use encoding/errors')
+  else:
+    if encoding is None:
+      encoding = 'utf-8'
+    if errors is None:
+      errors = 'strict'
+
+  if makedirs:
+    SafeMakedirs(os.path.dirname(path), sudo=sudo)
+
+  # TODO(vapier): We can merge encoding/errors into the open call once we are
+  # Python 3 only.  Until then, we have to handle it ourselves.
+  if 'b' in mode:
+    write_wrapper = lambda x: x
+  else:
+    mode += 'b'
+    def write_wrapper(iterable):
+      for item in iterable:
+        yield item.encode(encoding, errors)
+
+  # If the file needs to be written as root and we are not root, write to a temp
+  # file, move it and change the permission.
+  if sudo and os.getuid() != 0:
+    if 'a' in mode or '+' in mode:
+      # Use dd to run through sudo & append the output, and write the new data
+      # to it through stdin.
+      cros_build_lib.sudo_run(
+          ['dd', 'conv=notrunc', 'oflag=append', 'status=none',
+           'of=%s' % (path,)], print_cmd=False, input=content)
+
+    else:
+      with tempfile.NamedTemporaryFile(mode=mode, delete=False) as temp:
+        write_path = temp.name
+        temp.writelines(write_wrapper(
+            cros_build_lib.iflatten_instance(content)))
+      os.chmod(write_path, 0o644)
+
+      try:
+        mv_target = path if not atomic else path + '.tmp'
+        cros_build_lib.sudo_run(['mv', write_path, mv_target],
+                                print_cmd=False, stderr=True)
+        Chown(mv_target, user='root', group='root')
+        if atomic:
+          cros_build_lib.sudo_run(['mv', mv_target, path],
+                                  print_cmd=False, stderr=True)
+
+      except cros_build_lib.RunCommandError:
+        SafeUnlink(write_path)
+        SafeUnlink(mv_target)
+        raise
+
+  else:
+    # We have the right permissions, simply write the file in python.
+    write_path = path
+    if atomic:
+      write_path = path + '.tmp'
+    with open(write_path, mode) as f:
+      f.writelines(write_wrapper(cros_build_lib.iflatten_instance(content)))
+
+    if not atomic:
+      return
+
+    try:
+      os.rename(write_path, path)
+    except EnvironmentError:
+      SafeUnlink(write_path)
+      raise
+
+
+def Touch(path, makedirs=False, mode=None):
+  """Simulate unix touch. Create if doesn't exist and update its timestamp.
+
+  Args:
+    path: a string, file name of the file to touch (creating if not present).
+    makedirs: If True, create missing leading directories in the path.
+    mode: The access permissions to set.  In the style of chmod.  Defaults to
+          using the umask.
+  """
+  if makedirs:
+    SafeMakedirs(os.path.dirname(path))
+
+  # Create the file if nonexistant.
+  open(path, 'a').close()
+  if mode is not None:
+    os.chmod(path, mode)
+  # Update timestamp to right now.
+  os.utime(path, None)
+
+
+def Chown(path, user=None, group=None, recursive=False):
+  """Simple sudo chown path to the user.
+
+  Defaults to user running command. Does nothing if run as root user unless
+  a new owner is provided.
+
+  Args:
+    path: str - File/directory to chown.
+    user: str|int|None - User to chown the file to. Defaults to current user.
+    group: str|int|None - Group to assign the file to.
+    recursive: Also chown child files/directories recursively.
+  """
+  if user is None:
+    user = GetNonRootUser() or ''
+  else:
+    user = str(user)
+
+  group = '' if group is None else str(group)
+
+  if user or group:
+    cmd = ['chown']
+    if recursive:
+      cmd += ['-R']
+    cmd += ['%s:%s' % (user, group), path]
+    cros_build_lib.sudo_run(cmd, print_cmd=False,
+                            stderr=True, stdout=True)
+
+
+def ReadFile(path, mode='r', encoding=None, errors=None):
+  """Read a given file on disk.  Primarily useful for one off small files.
+
+  The defaults are geared towards reading UTF-8 encoded text.
+
+  Args:
+    path: The file to read.
+    mode: The mode to use when opening the file.  'r' is for text files (see the
+      following settings) and 'rb' is for binary files.
+    encoding: The encoding of the file content.  Text files default to 'utf-8'.
+    errors: How to handle encoding errors.  Text files default to 'strict'.
+
+  Returns:
+    The content of the file, either as bytes or a string (with the specified
+    encoding).
+  """
+  if mode not in ('r', 'rb'):
+    raise ValueError('mode may only be "r" or "rb", not %r' % (mode,))
+
+  if 'b' in mode:
+    if encoding is not None or errors is not None:
+      raise ValueError('binary mode does not use encoding/errors')
+  else:
+    if encoding is None:
+      encoding = 'utf-8'
+    if errors is None:
+      errors = 'strict'
+
+  with open(path, 'rb') as f:
+    # TODO(vapier): We can merge encoding/errors into the open call once we are
+    # Python 3 only.  Until then, we have to handle it ourselves.
+    ret = f.read()
+    if 'b' not in mode:
+      ret = ret.decode(encoding, errors)
+    return ret
+
+
+def MD5HashFile(path):
+  """Calculate the md5 hash of a given file path.
+
+  Args:
+    path: The path of the file to hash.
+
+  Returns:
+    The hex digest of the md5 hash of the file.
+  """
+  contents = ReadFile(path, mode='rb')
+  return hashlib.md5(contents).hexdigest()
+
+
+def SafeSymlink(source, dest, sudo=False):
+  """Create a symlink at |dest| pointing to |source|.
+
+  This will override the |dest| if the symlink exists. This operation is not
+  atomic.
+
+  Args:
+    source: source path.
+    dest: destination path.
+    sudo: If True, create the link as root.
+  """
+  if sudo and os.getuid() != 0:
+    cros_build_lib.sudo_run(['ln', '-sfT', source, dest],
+                            print_cmd=False, stderr=True)
+  else:
+    SafeUnlink(dest)
+    os.symlink(source, dest)
+
+
+def SafeUnlink(path, sudo=False):
+  """Unlink a file from disk, ignoring if it doesn't exist.
+
+  Returns:
+    True if the file existed and was removed, False if it didn't exist.
+  """
+  try:
+    os.unlink(path)
+    return True
+  except EnvironmentError as e:
+    if e.errno == errno.ENOENT:
+      return False
+
+    if not sudo:
+      raise
+
+  # If we're still here, we're falling back to sudo.
+  cros_build_lib.sudo_run(['rm', '--', path], print_cmd=False, stderr=True)
+  return True
+
+
+def SafeMakedirs(path, mode=0o775, sudo=False, user='root'):
+  """Make parent directories if needed.  Ignore if existing.
+
+  Args:
+    path: The path to create.  Intermediate directories will be created as
+          needed. This can be either a |Path| or |str|.
+    mode: The access permissions in the style of chmod.
+    sudo: If True, create it via sudo, thus root owned.
+    user: If |sudo| is True, run sudo as |user|.
+
+  Returns:
+    True if the directory had to be created, False if otherwise.
+
+  Raises:
+    EnvironmentError: If the makedir failed.
+    RunCommandError: If using run and the command failed for any reason.
+  """
+  if sudo and not (os.getuid() == 0 and user == 'root'):
+    if os.path.isdir(path):
+      return False
+    cros_build_lib.sudo_run(
+        ['mkdir', '-p', '--mode', '%o' % mode, str(path)], user=user,
+        print_cmd=False, stderr=True, stdout=True)
+    cros_build_lib.sudo_run(
+        ['chmod', '%o' % mode, str(path)],
+        print_cmd=False, stderr=True, stdout=True)
+    return True
+
+  try:
+    os.makedirs(path, mode)
+    # If we made the directory, force the mode.
+    os.chmod(path, mode)
+    return True
+  except EnvironmentError as e:
+    if e.errno != errno.EEXIST or not os.path.isdir(path):
+      raise
+
+  # If the mode on the directory does not match the request, log it.
+  # It is the callers responsibility to coordinate mode values if there is a
+  # need for that.
+  if stat.S_IMODE(os.stat(path).st_mode) != mode:
+    try:
+      os.chmod(path, mode)
+    except EnvironmentError:
+      # Just make sure it's a directory.
+      if not os.path.isdir(path):
+        raise
+  return False
+
+
+class MakingDirsAsRoot(Exception):
+  """Raised when creating directories as root."""
+
+
+def SafeMakedirsNonRoot(path, mode=0o775, user=None):
+  """Create directories and make sure they are not owned by root.
+
+  See SafeMakedirs for the arguments and returns.
+  """
+  if user is None:
+    user = GetNonRootUser()
+
+  if user is None or user == 'root':
+    raise MakingDirsAsRoot('Refusing to create %s as user %s!' % (path, user))
+
+  created = False
+  should_chown = False
+  try:
+    created = SafeMakedirs(path, mode=mode, user=user)
+    if not created:
+      # Sometimes, the directory exists, but is owned by root. As a HACK, we
+      # will chown it to the requested user.
+      stat_info = os.stat(path)
+      should_chown = (stat_info.st_uid == 0)
+  except OSError as e:
+    if e.errno == errno.EACCES:
+      # Sometimes, (a prefix of the) path we're making the directory in may be
+      # owned by root, and so we fail. As a HACK, use da power to create
+      # directory and then chown it.
+      created = should_chown = SafeMakedirs(path, mode=mode, sudo=True)
+
+  if should_chown:
+    Chown(path, user=user)
+
+  return created
+
+
+class BadPathsException(Exception):
+  """Raised by various osutils path manipulation functions on bad input."""
+
+
+def CopyDirContents(from_dir, to_dir, symlinks=False, allow_nonempty=False):
+  """Copy contents of from_dir to to_dir. Both should exist.
+
+  shutil.copytree allows one to copy a rooted directory tree along with the
+  containing directory. OTOH, this function copies the contents of from_dir to
+  an existing directory. For example, for the given paths:
+
+  from/
+    inside/x.py
+    y.py
+  to/
+
+  shutil.copytree('from', 'to')
+  # Raises because 'to' already exists.
+
+  shutil.copytree('from', 'to/non_existent_dir')
+  to/non_existent_dir/
+    inside/x.py
+    y.py
+
+  CopyDirContents('from', 'to')
+  to/
+    inside/x.py
+    y.py
+
+  Args:
+    from_dir: The directory whose contents should be copied. Must exist. Either
+      a |Path| or a |str|.
+    to_dir: The directory to which contents should be copied. Must exist.
+      Either a |Path| or a |str|.
+    symlinks: Whether symlinks should be copied or dereferenced. When True, all
+        symlinks will be copied as symlinks into the destination. When False,
+        the symlinks will be dereferenced and the contents copied over.
+    allow_nonempty: If True, do not die when to_dir is nonempty.
+
+  Raises:
+    BadPathsException: if the source / target directories don't exist, or if
+        target directory is non-empty when allow_nonempty=False.
+    OSError: on esoteric permission errors.
+  """
+  if not os.path.isdir(from_dir):
+    raise BadPathsException('Source directory %s does not exist.' % from_dir)
+  if not os.path.isdir(to_dir):
+    raise BadPathsException('Destination directory %s does not exist.' % to_dir)
+  if os.listdir(to_dir) and not allow_nonempty:
+    raise BadPathsException('Destination directory %s is not empty.' % to_dir)
+
+  for name in os.listdir(from_dir):
+    from_path = os.path.join(from_dir, name)
+    to_path = os.path.join(to_dir, name)
+    if symlinks and os.path.islink(from_path):
+      os.symlink(os.readlink(from_path), to_path)
+    elif os.path.isdir(from_path):
+      shutil.copytree(from_path, to_path, symlinks=symlinks)
+    elif os.path.isfile(from_path):
+      shutil.copy2(from_path, to_path)
+
+
+def RmDir(path, ignore_missing=False, sudo=False):
+  """Recursively remove a directory.
+
+  Args:
+    path: Path of directory to remove. Either a |Path| or |str|.
+    ignore_missing: Do not error when path does not exist.
+    sudo: Remove directories as root.
+  """
+  # Using `sudo` is a bit expensive, so try to delete everything natively first.
+  try:
+    shutil.rmtree(path)
+    return
+  except EnvironmentError as e:
+    if ignore_missing and e.errno == errno.ENOENT:
+      return
+
+    if not sudo:
+      raise
+
+  # If we're still here, we're falling back to sudo.
+  try:
+    cros_build_lib.sudo_run(
+        ['rm', '-r%s' % ('f' if ignore_missing else '',), '--', str(path)],
+        debug_level=logging.DEBUG, stdout=True, stderr=True)
+  except cros_build_lib.RunCommandError:
+    if not ignore_missing or os.path.exists(path):
+      # If we're not ignoring the rm ENOENT equivalent, throw it;
+      # if the pathway still exists, something failed, thus throw it.
+      raise
+
+
+class EmptyDirNonExistentException(BadPathsException):
+  """EmptyDir was called on a non-existent directory without ignore_missing."""
+
+
+def EmptyDir(path, ignore_missing=False, sudo=False, exclude=()):
+  """Remove all files inside a directory, including subdirs.
+
+  Args:
+    path: Path of directory to empty.
+    ignore_missing: Do not error when path does not exist.
+    sudo: Remove directories as root.
+    exclude: Iterable of file names to exclude from the cleanup. They should
+             exactly match the file or directory name in path.
+             e.g. ['foo', 'bar']
+
+  Raises:
+    EmptyDirNonExistentException: if ignore_missing false, and dir is missing.
+    OSError: If the directory is not user writable.
+  """
+  path = ExpandPath(path)
+  exclude = set(exclude)
+
+  if not os.path.exists(path):
+    if ignore_missing:
+      return
+    raise EmptyDirNonExistentException(
+        'EmptyDir called non-existent: %s' % path)
+
+  # We don't catch OSError if path is not a directory.
+  for candidate in os.listdir(path):
+    if candidate not in exclude:
+      subpath = os.path.join(path, candidate)
+      # Both options can throw OSError if there is a permission problem.
+      if os.path.isdir(subpath):
+        RmDir(subpath, ignore_missing=ignore_missing, sudo=sudo)
+      else:
+        SafeUnlink(subpath, sudo)
+
+
+def Which(binary, path=None, mode=os.X_OK, root=None):
+  """Return the absolute path to the specified binary.
+
+  Args:
+    binary: The binary to look for.
+    path: Search path. Defaults to os.environ['PATH'].
+    mode: File mode to check on the binary.
+    root: Path to automatically prefix to every element of |path|.
+
+  Returns:
+    The full path to |binary| if found (with the right mode). Otherwise, None.
+  """
+  if path is None:
+    path = os.environ.get('PATH', '')
+  for p in path.split(os.pathsep):
+    if root and p.startswith('/'):
+      # Don't prefix relative paths.  We might want to support this at some
+      # point, but it's not worth the coding hassle currently.
+      p = os.path.join(root, p.lstrip('/'))
+    p = os.path.join(p, binary)
+    if os.path.isfile(p) and os.access(p, mode):
+      return p
+  return None
+
+
+def FindMissingBinaries(needed_tools):
+  """Verifies that the required tools are present on the system.
+
+  This is especially important for scripts that are intended to run
+  outside the chroot.
+
+  Args:
+    needed_tools: an array of string specified binaries to look for.
+
+  Returns:
+    If all tools are found, returns the empty list. Otherwise, returns the
+    list of missing tools.
+  """
+  return [binary for binary in needed_tools if Which(binary) is None]
+
+
+def DirectoryIterator(base_path):
+  """Iterates through the files and subdirs of a directory."""
+  for root, dirs, files in os.walk(base_path):
+    for e in [d + os.sep for d in dirs] + files:
+      yield os.path.join(root, e)
+
+
+def IteratePaths(end_path):
+  """Generator that iterates down to |end_path| from root /.
+
+  Args:
+    end_path: The destination. If this is a relative path, it will be resolved
+        to absolute path. In all cases, it will be normalized.
+
+  Yields:
+    All the paths gradually constructed from / to |end_path|. For example:
+    IteratePaths("/this/path") yields "/", "/this", and "/this/path".
+  """
+  return reversed(list(IteratePathParents(end_path)))
+
+
+def IteratePathParents(start_path):
+  """Generator that iterates through a directory's parents.
+
+  Args:
+    start_path: The path to start from.
+
+  Yields:
+    The passed-in path, along with its parents.  i.e.,
+    IteratePathParents('/usr/local') would yield '/usr/local', '/usr', and '/'.
+  """
+  path = os.path.abspath(start_path)
+  # There's a bug that abspath('//') returns '//'. We need to renormalize it.
+  if path == '//':
+    path = '/'
+  yield path
+  while path.strip('/'):
+    path = os.path.dirname(path)
+    yield path
+
+
+def FindInPathParents(path_to_find, start_path, test_func=None, end_path=None):
+  """Look for a relative path, ascending through parent directories.
+
+  Ascend through parent directories of current path looking for a relative
+  path.  I.e., given a directory structure like:
+  -/
+   |
+   --usr
+     |
+     --bin
+     |
+     --local
+       |
+       --google
+
+  the call FindInPathParents('bin', '/usr/local') would return '/usr/bin', and
+  the call FindInPathParents('google', '/usr/local') would return
+  '/usr/local/google'.
+
+  Args:
+    path_to_find: The relative path to look for.
+    start_path: The path to start the search from.  If |start_path| is a
+      directory, it will be included in the directories that are searched.
+    test_func: The function to use to verify the relative path.  Defaults to
+      os.path.exists.  The function will be passed one argument - the target
+      path to test.  A True return value will cause AscendingLookup to return
+      the target.
+    end_path: The path to stop searching.
+  """
+  if end_path is not None:
+    end_path = os.path.abspath(end_path)
+  if test_func is None:
+    test_func = os.path.exists
+  for path in IteratePathParents(start_path):
+    if path == end_path:
+      return None
+    target = os.path.join(path, path_to_find)
+    if test_func(target):
+      return target
+  return None
+
+
+def SetGlobalTempDir(tempdir_value, tempdir_env=None):
+  """Set the global temp directory to the specified |tempdir_value|
+
+  Args:
+    tempdir_value: The new location for the global temp directory.
+    tempdir_env: Optional. A list of key/value pairs to set in the
+      environment. If not provided, set all global tempdir environment
+      variables to point at |tempdir_value|.
+
+  Returns:
+    Returns (old_tempdir_value, old_tempdir_env).
+
+    old_tempdir_value: The old value of the global temp directory.
+    old_tempdir_env: A list of the key/value pairs that control the tempdir
+      environment and were set prior to this function. If the environment
+      variable was not set, it is recorded as None.
+  """
+  # pylint: disable=protected-access
+  with tempfile._once_lock:
+    old_tempdir_value = GetGlobalTempDir()
+    old_tempdir_env = tuple((x, os.environ.get(x)) for x in _TEMPDIR_ENV_VARS)
+
+    # Now update TMPDIR/TEMP/TMP, and poke the python
+    # internals to ensure all subprocess/raw tempfile
+    # access goes into this location.
+    if tempdir_env is None:
+      os.environ.update((x, tempdir_value) for x in _TEMPDIR_ENV_VARS)
+    else:
+      for key, value in tempdir_env:
+        if value is None:
+          os.environ.pop(key, None)
+        else:
+          os.environ[key] = value
+
+    # Finally, adjust python's cached value (we know it's cached by here
+    # since we invoked _get_default_tempdir from above).  Note this
+    # is necessary since we want *all* output from that point
+    # forward to go to this location.
+    tempfile.tempdir = tempdir_value
+
+  return (old_tempdir_value, old_tempdir_env)
+
+
+def GetGlobalTempDir():
+  """Get the path to the current global tempdir.
+
+  The global tempdir path can be modified through calls to SetGlobalTempDir.
+  """
+  # pylint: disable=protected-access
+  return tempfile._get_default_tempdir()
+
+
+def _TempDirSetup(self, prefix='tmp', set_global=False, base_dir=None):
+  """Generate a tempdir, modifying the object, and env to use it.
+
+  Specifically, if set_global is True, then from this invocation forward,
+  python and all subprocesses will use this location for their tempdir.
+
+  The matching _TempDirTearDown restores the env to what it was.
+  """
+  # Stash the old tempdir that was used so we can
+  # switch it back on the way out.
+  self.tempdir = tempfile.mkdtemp(prefix=prefix, dir=base_dir)
+  os.chmod(self.tempdir, 0o700)
+
+  if set_global:
+    self._orig_tempdir_value, self._orig_tempdir_env = \
+        SetGlobalTempDir(self.tempdir)
+
+
+def _TempDirTearDown(self, force_sudo, delete=True):
+  # Note that _TempDirSetup may have failed, resulting in these attributes
+  # not being set; this is why we use getattr here (and must).
+  tempdir = getattr(self, 'tempdir', None)
+  try:
+    if tempdir is not None and delete:
+      RmDir(tempdir, ignore_missing=True, sudo=force_sudo)
+  except EnvironmentError as e:
+    # Suppress ENOENT since we may be invoked
+    # in a context where parallel wipes of the tempdir
+    # may be occuring; primarily during hard shutdowns.
+    if e.errno != errno.ENOENT:
+      raise
+
+  # Restore environment modification if necessary.
+  orig_tempdir_value = getattr(self, '_orig_tempdir_value', None)
+  if orig_tempdir_value is not None:
+    # pylint: disable=protected-access
+    SetGlobalTempDir(orig_tempdir_value, self._orig_tempdir_env)
+
+
+class TempDir(object):
+  """Object that creates a temporary directory.
+
+  This object can either be used as a context manager or just as a simple
+  object. The temporary directory is stored as self.tempdir in the object, and
+  is returned as a string by a 'with' statement.
+  """
+
+  def __init__(self, **kwargs):
+    """Constructor. Creates the temporary directory.
+
+    Args:
+      prefix: See tempfile.mkdtemp documentation.
+      base_dir: The directory to place the temporary directory.
+      set_global: Set this directory as the global temporary directory.
+      delete: Whether the temporary dir should be deleted as part of cleanup.
+          (default: True)
+      sudo_rm: Whether the temporary dir will need root privileges to remove.
+          (default: False)
+    """
+    self.kwargs = kwargs.copy()
+    self.delete = kwargs.pop('delete', True)
+    self.sudo_rm = kwargs.pop('sudo_rm', False)
+    self.tempdir = None
+    _TempDirSetup(self, **kwargs)
+
+  def SetSudoRm(self, enable=True):
+    """Sets |sudo_rm|, which forces us to delete temporary files as root."""
+    self.sudo_rm = enable
+
+  def Cleanup(self):
+    """Clean up the temporary directory."""
+    if self.tempdir is not None:
+      try:
+        _TempDirTearDown(self, self.sudo_rm, delete=self.delete)
+      finally:
+        self.tempdir = None
+
+  def __enter__(self):
+    """Return the temporary directory."""
+    return self.tempdir
+
+  def __exit__(self, exc_type, exc_value, exc_traceback):
+    try:
+      self.Cleanup()
+    except Exception:
+      if exc_type:
+        # If an exception from inside the context was already in progress,
+        # log our cleanup exception, then allow the original to resume.
+        logging.error('While exiting %s:', self, exc_info=True)
+
+        if self.tempdir:
+          # Log all files in tempdir at the time of the failure.
+          try:
+            logging.error('Directory contents were:')
+            for name in os.listdir(self.tempdir):
+              logging.error('  %s', name)
+          except OSError:
+            logging.error('  Directory did not exist.')
+
+          # Log all mounts at the time of the failure, since that's the most
+          # common cause.
+          mount_results = cros_build_lib.run(
+              ['mount'], stdout=True, stderr=subprocess.STDOUT,
+              check=False)
+          logging.error('Mounts were:')
+          logging.error('  %s', mount_results.output)
+
+      else:
+        # If there was not an exception from the context, raise ours.
+        raise
+
+  def __del__(self):
+    self.Cleanup()
+
+  def __str__(self):
+    return self.tempdir if self.tempdir else ''
+
+
+def TempDirDecorator(func):
+  """Populates self.tempdir with path to a temporary writeable directory."""
+  def f(self, *args, **kwargs):
+    with TempDir() as tempdir:
+      self.tempdir = tempdir
+      return func(self, *args, **kwargs)
+
+  f.__name__ = func.__name__
+  f.__doc__ = func.__doc__
+  f.__module__ = func.__module__
+  return f
+
+
+def TempFileDecorator(func):
+  """Populates self.tempfile with path to a temporary writeable file"""
+  def f(self, *args, **kwargs):
+    with tempfile.NamedTemporaryFile(dir=self.tempdir, delete=False) as f:
+      self.tempfile = f.name
+    return func(self, *args, **kwargs)
+
+  f.__name__ = func.__name__
+  f.__doc__ = func.__doc__
+  f.__module__ = func.__module__
+  return TempDirDecorator(f)
+
+
+# Flags synced from sys/mount.h.  See mount(2) for details.
+MS_RDONLY = 1
+MS_NOSUID = 2
+MS_NODEV = 4
+MS_NOEXEC = 8
+MS_SYNCHRONOUS = 16
+MS_REMOUNT = 32
+MS_MANDLOCK = 64
+MS_DIRSYNC = 128
+MS_NOATIME = 1024
+MS_NODIRATIME = 2048
+MS_BIND = 4096
+MS_MOVE = 8192
+MS_REC = 16384
+MS_SILENT = 32768
+MS_POSIXACL = 1 << 16
+MS_UNBINDABLE = 1 << 17
+MS_PRIVATE = 1 << 18
+MS_SLAVE = 1 << 19
+MS_SHARED = 1 << 20
+MS_RELATIME = 1 << 21
+MS_KERNMOUNT = 1 << 22
+MS_I_VERSION = 1 << 23
+MS_STRICTATIME = 1 << 24
+MS_ACTIVE = 1 << 30
+MS_NOUSER = 1 << 31
+
+
+def Mount(source, target, fstype, flags, data=''):
+  """Call the mount(2) func; see the man page for details."""
+  libc = ctypes.CDLL(ctypes.util.find_library('c'), use_errno=True)
+  # These fields might be a string or 0 (for NULL).  Convert to bytes.
+  def _MaybeEncode(s):
+    return s.encode('utf-8') if isinstance(s, six.string_types) else s
+  if libc.mount(_MaybeEncode(source), _MaybeEncode(target),
+                _MaybeEncode(fstype), ctypes.c_int(flags),
+                _MaybeEncode(data)) != 0:
+    e = ctypes.get_errno()
+    raise OSError(e, os.strerror(e))
+
+
+def MountDir(src_path, dst_path, fs_type=None, sudo=True, makedirs=True,
+             mount_opts=('nodev', 'noexec', 'nosuid'), skip_mtab=False,
+             **kwargs):
+  """Mount |src_path| at |dst_path|
+
+  Args:
+    src_path: Source of the new mount.
+    dst_path: Where to mount things.
+    fs_type: Specify the filesystem type to use.  Defaults to autodetect.
+    sudo: Run through sudo.
+    makedirs: Create |dst_path| if it doesn't exist.
+    mount_opts: List of options to pass to `mount`.
+    skip_mtab: Whether to write new entries to /etc/mtab.
+    kwargs: Pass all other args to run.
+  """
+  if sudo:
+    runcmd = cros_build_lib.sudo_run
+  else:
+    runcmd = cros_build_lib.run
+
+  if makedirs:
+    SafeMakedirs(dst_path, sudo=sudo)
+
+  cmd = ['mount', src_path, dst_path]
+  if skip_mtab:
+    cmd += ['-n']
+  if fs_type:
+    cmd += ['-t', fs_type]
+  if mount_opts:
+    cmd += ['-o', ','.join(mount_opts)]
+  runcmd(cmd, **kwargs)
+
+
+def MountTmpfsDir(path, name='osutils.tmpfs', size='5G',
+                  mount_opts=('nodev', 'noexec', 'nosuid'), **kwargs):
+  """Mount a tmpfs at |path|
+
+  Args:
+    path: Directory to mount the tmpfs.
+    name: Friendly name to include in mount output.
+    size: Size of the temp fs.
+    mount_opts: List of options to pass to `mount`.
+    kwargs: Pass all other args to MountDir.
+  """
+  mount_opts = list(mount_opts) + ['size=%s' % size]
+  MountDir(name, path, fs_type='tmpfs', mount_opts=mount_opts, **kwargs)
+
+
+def UmountDir(path, lazy=True, sudo=True, cleanup=True):
+  """Unmount a previously mounted temp fs mount.
+
+  Args:
+    path: Directory to unmount.
+    lazy: Whether to do a lazy unmount.
+    sudo: Run through sudo.
+    cleanup: Whether to delete the |path| after unmounting.
+             Note: Does not work when |lazy| is set.
+  """
+  if sudo:
+    runcmd = cros_build_lib.sudo_run
+  else:
+    runcmd = cros_build_lib.run
+
+  cmd = ['umount', '-d', path]
+  if lazy:
+    cmd += ['-l']
+  runcmd(cmd, debug_level=logging.DEBUG)
+
+  if cleanup:
+    # We will randomly get EBUSY here even when the umount worked.  Suspect
+    # this is due to the host distro doing stupid crap on us like autoscanning
+    # directories when they get mounted.
+    def _retry(e):
+      # When we're using `rm` (which is required for sudo), we can't cleanly
+      # detect the aforementioned failure.  This is because `rm` will see the
+      # errno, handle itself, and then do exit(1).  Which means all we see is
+      # that rm failed.  Assume it's this issue as -rf will ignore most things.
+      if isinstance(e, cros_build_lib.RunCommandError):
+        return True
+      elif isinstance(e, OSError):
+        # When we aren't using sudo, we do the unlink ourselves, so the exact
+        # errno is bubbled up to us and we can detect it specifically without
+        # potentially ignoring all other possible failures.
+        return e.errno == errno.EBUSY
+      else:
+        # Something else, we don't know so do not retry.
+        return False
+    retry_util.GenericRetry(_retry, 60, RmDir, path, sudo=sudo, sleep=1)
+
+
+def UmountTree(path):
+  """Unmounts |path| and any submounts under it."""
+  # Scrape it from /proc/mounts since it's easily accessible;
+  # additionally, unmount in reverse order of what's listed there
+  # rather than trying a reverse sorting; it's possible for
+  # mount /z /foon
+  # mount /foon/blah -o loop /a
+  # which reverse sorting cannot handle.
+  path = os.path.realpath(path).rstrip('/') + '/'
+  mounts = [mtab.destination for mtab in IterateMountPoints() if
+            mtab.destination.startswith(path) or
+            mtab.destination == path.rstrip('/')]
+
+  for mount_pt in reversed(mounts):
+    UmountDir(mount_pt, lazy=False, cleanup=False)
+
+
+def SetEnvironment(env):
+  """Restore the environment variables to that of passed in dictionary."""
+  os.environ.clear()
+  os.environ.update(env)
+
+
+def SourceEnvironment(script, whitelist, ifs=',', env=None, multiline=False):
+  """Returns the environment exported by a shell script.
+
+  Note that the script is actually executed (sourced), so do not use this on
+  files that have side effects (such as modify the file system).  Stdout will
+  be sent to /dev/null, so just echoing is OK.
+
+  Args:
+    script: The shell script to 'source'.
+    whitelist: An iterable of environment variables to retrieve values for.
+    ifs: When showing arrays, what separator to use.
+    env: A dict of the initial env to pass down.  You can also pass it None
+         (to clear the env) or True (to preserve the current env).
+    multiline: Allow a variable to span multiple lines.
+
+  Returns:
+    A dictionary containing the values of the whitelisted environment
+    variables that are set.
+  """
+  dump_script = ['source "%s" >/dev/null' % script,
+                 'IFS="%s"' % ifs]
+  for var in whitelist:
+    # Note: If we want to get more exact results out of bash, we should switch
+    # to using `declare -p "${var}"`.  It would require writing a custom parser
+    # here, but it would be more robust.
+    dump_script.append(
+        '[[ "${%(var)s+set}" == "set" ]] && echo "%(var)s=\\"${%(var)s[*]}\\""'
+        % {'var': var})
+  dump_script.append('exit 0')
+
+  if env is None:
+    env = {}
+  elif env is True:
+    env = None
+  output = cros_build_lib.run(['bash'], env=env, capture_output=True,
+                              print_cmd=False, encoding='utf-8',
+                              input='\n'.join(dump_script)).output
+  return key_value_store.LoadData(output, multiline=multiline)
+
+
+def ListBlockDevices(device_path=None, in_bytes=False):
+  """Lists all block devices.
+
+  Args:
+    device_path: device path (e.g. /dev/sdc).
+    in_bytes: whether to display size in bytes.
+
+  Returns:
+    A list of BlockDevice items with attributes 'NAME', 'RM', 'TYPE',
+    'SIZE' (RM stands for removable).
+  """
+  keys = ['NAME', 'RM', 'TYPE', 'SIZE']
+  BlockDevice = collections.namedtuple('BlockDevice', keys)
+
+  cmd = ['lsblk', '--pairs']
+  if in_bytes:
+    cmd.append('--bytes')
+
+  if device_path:
+    cmd.append(device_path)
+
+  cmd += ['--output', ','.join(keys)]
+  result = cros_build_lib.dbg_run(cmd, capture_output=True, encoding='utf-8')
+  devices = []
+  for line in result.stdout.strip().splitlines():
+    d = {}
+    for k, v in re.findall(r'(\S+?)=\"(.+?)\"', line):
+      d[k] = v
+
+    devices.append(BlockDevice(**d))
+
+  return devices
+
+
+def GetDeviceInfo(device, keyword='model'):
+  """Get information of |device| by searching through device path.
+
+    Looks for the file named |keyword| in the path upwards from
+    /sys/block/|device|/device. This path is a symlink and will be fully
+    expanded when searching.
+
+  Args:
+    device: Device name (e.g. 'sdc').
+    keyword: The filename to look for (e.g. product, model).
+
+  Returns:
+    The content of the |keyword| file.
+  """
+  device_path = os.path.join('/sys', 'block', device)
+  if not os.path.isdir(device_path):
+    raise ValueError('%s is not a valid device path.' % device_path)
+
+  path_list = ExpandPath(os.path.join(device_path, 'device')).split(os.path.sep)
+  while len(path_list) > 2:
+    target = os.path.join(os.path.sep.join(path_list), keyword)
+    if os.path.isfile(target):
+      return ReadFile(target).strip()
+
+    path_list = path_list[:-1]
+
+
+def GetDeviceSize(device_path, in_bytes=False):
+  """Returns the size of |device|.
+
+  Args:
+    device_path: Device path (e.g. '/dev/sdc').
+    in_bytes: If set True, returns the size in bytes.
+
+  Returns:
+    Size of the device in human readable format unless |in_bytes| is set.
+  """
+  devices = ListBlockDevices(device_path=device_path, in_bytes=in_bytes)
+  for d in devices:
+    if d.TYPE == 'disk':
+      return int(d.SIZE) if in_bytes else d.SIZE
+
+  raise ValueError('No size info of %s is found.' % device_path)
+
+
+FileInfo = collections.namedtuple(
+    'FileInfo', ['path', 'owner', 'size', 'atime', 'mtime'])
+
+
+def StatFilesInDirectory(path, recursive=False, to_string=False):
+  """Stat files in the directory |path|.
+
+  Args:
+    path: Path to the target directory.
+    recursive: Whether to recurisvely list all files in |path|.
+    to_string: Whether to return a string containing the metadata of the
+      files.
+
+  Returns:
+    If |to_string| is False, returns a list of FileInfo objects. Otherwise,
+    returns a string of metadata of the files.
+  """
+  path = ExpandPath(path)
+  def ToFileInfo(path, stat_val):
+    return FileInfo(path,
+                    pwd.getpwuid(stat_val.st_uid)[0],
+                    stat_val.st_size,
+                    datetime.datetime.fromtimestamp(stat_val.st_atime),
+                    datetime.datetime.fromtimestamp(stat_val.st_mtime))
+
+  file_infos = []
+  for root, dirs, files in os.walk(path, topdown=True):
+    for filename in dirs + files:
+      filepath = os.path.join(root, filename)
+      file_infos.append(ToFileInfo(filepath, os.lstat(filepath)))
+
+    if not recursive:
+      # Process only the top-most directory.
+      break
+
+  if not to_string:
+    return file_infos
+
+  msg = 'Listing the content of %s' % path
+  msg_format = ('Path: {x.path}, Owner: {x.owner}, Size: {x.size} bytes, '
+                'Accessed: {x.atime}, Modified: {x.mtime}')
+  msg = '%s\n%s' % (msg,
+                    '\n'.join([msg_format.format(x=x) for x in file_infos]))
+  return msg
+
+
+@contextlib.contextmanager
+def ChdirContext(target_dir):
+  """A context manager to chdir() into |target_dir| and back out on exit.
+
+  Args:
+    target_dir: A target directory to chdir into.
+  """
+
+  cwd = os.getcwd()
+  os.chdir(target_dir)
+  try:
+    yield
+  finally:
+    os.chdir(cwd)
+
+
+def _SameFileSystem(path1, path2):
+  """Determine whether two paths are on the same filesystem.
+
+  Be resilient to nonsense paths. Return False instead of blowing up.
+  """
+  try:
+    return os.stat(path1).st_dev == os.stat(path2).st_dev
+  except OSError:
+    return False
+
+
+class MountOverlayContext(object):
+  """A context manager for mounting an OverlayFS directory.
+
+  An overlay filesystem will be mounted at |mount_dir|, and will be unmounted
+  when the context exits.
+  """
+
+  OVERLAY_FS_MOUNT_ERRORS = (32,)
+  def __init__(self, lower_dir, upper_dir, mount_dir, cleanup=False):
+    """Initialize.
+
+    Args:
+      lower_dir: The lower directory (read-only).
+      upper_dir: The upper directory (read-write).
+      mount_dir: The mount point for the merged overlay.
+      cleanup: Whether to remove the mount point after unmounting. This uses an
+          internal retry logic for cases where unmount is successful but the
+          directory still appears busy, and is generally more resilient than
+          removing it independently.
+    """
+    self._lower_dir = lower_dir
+    self._upper_dir = upper_dir
+    self._mount_dir = mount_dir
+    self._cleanup = cleanup
+    self.tempdir = None
+
+  def __enter__(self):
+    # Upstream Kernel 3.18 and the ubuntu backport of overlayfs have different
+    # APIs. We must support both.
+    try_legacy = False
+    stashed_e_overlay_str = None
+
+    # We must ensure that upperdir and workdir are on the same filesystem.
+    if _SameFileSystem(self._upper_dir, GetGlobalTempDir()):
+      _TempDirSetup(self)
+    elif _SameFileSystem(self._upper_dir, os.path.dirname(self._upper_dir)):
+      _TempDirSetup(self, base_dir=os.path.dirname(self._upper_dir))
+    else:
+      logging.debug('Could create find a workdir on the same filesystem as %s. '
+                    'Trying legacy API instead.',
+                    self._upper_dir)
+      try_legacy = True
+
+    if not try_legacy:
+      try:
+        MountDir('overlay', self._mount_dir, fs_type='overlay', makedirs=False,
+                 mount_opts=('lowerdir=%s' % self._lower_dir,
+                             'upperdir=%s' % self._upper_dir,
+                             'workdir=%s' % self.tempdir),
+                 quiet=True)
+      except cros_build_lib.RunCommandError as e_overlay:
+        if e_overlay.result.returncode not in self.OVERLAY_FS_MOUNT_ERRORS:
+          raise
+        logging.debug('Failed to mount overlay filesystem. Trying legacy API.')
+        stashed_e_overlay_str = str(e_overlay)
+        try_legacy = True
+
+    if try_legacy:
+      try:
+        MountDir('overlayfs', self._mount_dir, fs_type='overlayfs',
+                 makedirs=False,
+                 mount_opts=('lowerdir=%s' % self._lower_dir,
+                             'upperdir=%s' % self._upper_dir),
+                 quiet=True)
+      except cros_build_lib.RunCommandError as e_overlayfs:
+        logging.error('All attempts at mounting overlay filesystem failed.')
+        if stashed_e_overlay_str is not None:
+          logging.error('overlay: %s', stashed_e_overlay_str)
+        logging.error('overlayfs: %s', str(e_overlayfs))
+        raise
+
+    return self
+
+  def __exit__(self, exc_type, exc_value, traceback):
+    UmountDir(self._mount_dir, cleanup=self._cleanup)
+    _TempDirTearDown(self, force_sudo=True)
+
+
+MountInfo = collections.namedtuple(
+    'MountInfo',
+    'source destination filesystem options')
+
+
+def IterateMountPoints(proc_file='/proc/mounts'):
+  """Iterate over all mounts as reported by "/proc/mounts".
+
+  Args:
+    proc_file: A path to a file whose content is similar to /proc/mounts.
+      Default to "/proc/mounts" itself.
+
+  Returns:
+    A generator that yields MountInfo objects.
+  """
+  with open(proc_file) as f:
+    for line in f:
+      # Escape any \xxx to a char.
+      source, destination, filesystem, options, _, _ = [
+          re.sub(r'\\([0-7]{3})', lambda m: chr(int(m.group(1), 8)), x)
+          for x in line.split()
+      ]
+      mtab = MountInfo(source, destination, filesystem, options)
+      yield mtab
+
+
+def IsMounted(path):
+  """Determine if |path| is already mounted or not."""
+  path = os.path.realpath(path).rstrip('/')
+  mounts = [mtab.destination for mtab in IterateMountPoints()]
+  if path in mounts:
+    return True
+
+  return False
+
+
+def ResolveSymlinkInRoot(file_name, root):
+  """Resolve a symlink |file_name| relative to |root|.
+
+  This can be used to resolve absolute symlinks within an alternative root
+  path (i.e. chroot). For example:
+
+    ROOT-A/absolute_symlink --> /an/abs/path
+    ROOT-A/relative_symlink --> a/relative/path
+
+    absolute_symlink will be resolved to ROOT-A/an/abs/path
+    relative_symlink will be resolved to ROOT-A/a/relative/path
+
+  Args:
+    file_name (str): A path to the file.
+    root (str|None): A path to the root directory.
+
+  Returns:
+    |file_name| if |file_name| is not a symlink. Otherwise, the ultimate path
+    that |file_name| points to, with links resolved relative to |root|.
+  """
+  count = 0
+  while os.path.islink(file_name):
+    count += 1
+    if count > 128:
+      raise ValueError('Too many link levels for %s.' % file_name)
+    link = os.readlink(file_name)
+    if link.startswith('/'):
+      file_name = os.path.join(root, link[1:]) if root else link
+    else:
+      file_name = os.path.join(os.path.dirname(file_name), link)
+  return file_name
+
+
+def ResolveSymlink(file_name):
+  """Resolve a symlink |file_name| to an absolute path.
+
+  This is similar to ResolveSymlinkInRoot, but does not resolve absolute
+  symlinks to an alternative root, and normalizes the path before returning.
+
+  Args:
+    file_name (str): The symlink.
+
+  Returns:
+    str - |file_name| if |file_name| is not a symlink. Otherwise, the ultimate
+    path that |file_name| points to.
+  """
+  return os.path.realpath(ResolveSymlinkInRoot(file_name, None))
+
+
+def IsInsideVm():
+  """Return True if we are running inside a virtual machine.
+
+  The detection is based on the model of the hard drive.
+  """
+  for blk_model in glob.glob('/sys/block/*/device/model'):
+    if os.path.isfile(blk_model):
+      model = ReadFile(blk_model)
+      if model.startswith('VBOX') or model.startswith('VMware'):
+        return True
+
+  return False
diff --git a/utils/frozen_chromite/lib/parallel.py b/utils/frozen_chromite/lib/parallel.py
new file mode 100644
index 0000000..bc894f3
--- /dev/null
+++ b/utils/frozen_chromite/lib/parallel.py
@@ -0,0 +1,848 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module for running cbuildbot stages in the background."""
+
+from __future__ import print_function
+
+import collections
+import contextlib
+import ctypes
+import errno
+import functools
+import multiprocessing
+from multiprocessing.managers import SyncManager
+import os
+import signal
+import sys
+import time
+import traceback
+
+import six
+from six.moves import queue as Queue
+
+from autotest_lib.utils.frozen_chromite.lib import failures_lib
+from autotest_lib.utils.frozen_chromite.lib import results_lib
+from autotest_lib.utils.frozen_chromite.lib import cros_build_lib
+from autotest_lib.utils.frozen_chromite.lib import cros_logging as logging
+from autotest_lib.utils.frozen_chromite.lib import osutils
+from autotest_lib.utils.frozen_chromite.lib import signals
+from autotest_lib.utils.frozen_chromite.lib import timeout_util
+
+
+_BUFSIZE = 1024
+
+
+class HackTimeoutSyncManager(SyncManager):
+  """Increase the process join timeout in SyncManager.
+
+  The timeout for the manager process to join in the core library is
+  too low. The process is often killed before shutting down properly,
+  resulting in temporary directories (pymp-xxx) not being cleaned
+  up. This class increases the default timeout.
+  """
+
+  @staticmethod
+  def _finalize_manager(process, *args, **kwargs):
+    """Shutdown the manager process."""
+
+    def _join(functor, *args, **kwargs):
+      timeout = kwargs.get('timeout')
+      if not timeout is None and timeout < 1:
+        kwargs['timeout'] = 1
+
+      functor(*args, **kwargs)
+
+    process.join = functools.partial(_join, process.join)
+    SyncManager._finalize_manager(process, *args, **kwargs)
+
+
+def IgnoreSigintAndSigterm():
+  """Ignores any future SIGINTs and SIGTERMs."""
+  signal.signal(signal.SIGINT, signal.SIG_IGN)
+  signal.signal(signal.SIGTERM, signal.SIG_IGN)
+
+
+def Manager():
+  """Create a background process for managing interprocess communication.
+
+  This manager wraps multiprocessing.Manager() and ensures that any sockets
+  created during initialization are created under the /tmp tree rather than in a
+  custom temp directory. This is needed because TMPDIR might be really long, and
+  named sockets are limited to 108 characters.
+
+  Examples:
+    with Manager() as manager:
+      queue = manager.Queue()
+      ...
+
+  Returns:
+    The return value of multiprocessing.Manager()
+  """
+  # Use a short directory in /tmp. Do not use /tmp directly to keep these
+  # temperary files together and because certain environments do not like too
+  # many top-level paths in /tmp (see crbug.com/945523).
+  # Make it mode 1777 to mirror /tmp, so that we don't have failures when root
+  # calls parallel first, and some other user calls it later.
+  tmp_dir = '/tmp/chromite.parallel.%d' % os.geteuid()
+  osutils.SafeMakedirs(tmp_dir, mode=0o1777)
+  old_tempdir_value, old_tempdir_env = osutils.SetGlobalTempDir(tmp_dir)
+  try:
+    m = HackTimeoutSyncManager()
+    # SyncManager doesn't handle KeyboardInterrupt exceptions well; pipes get
+    # broken and E_NOENT or E_PIPE errors are thrown from various places. We
+    # can just ignore SIGINT in the SyncManager and things will close properly
+    # when the enclosing with-statement exits.
+    m.start(IgnoreSigintAndSigterm)
+    return m
+  finally:
+    osutils.SetGlobalTempDir(old_tempdir_value, old_tempdir_env)
+
+
+class BackgroundFailure(failures_lib.CompoundFailure):
+  """Exception to show a step failed while running in a background process."""
+
+
+class ProcessExitTimeout(Exception):
+  """Raised if a process cannot exit within the timeout."""
+
+
+class ProcessUnexpectedExit(Exception):
+  """Raised if a process exits unexpectedly."""
+
+
+class ProcessSilentTimeout(Exception):
+  """Raised when there is no output for a prolonged period of time."""
+
+
+class UnexpectedException(Exception):
+  """Raised when exception occurs at an unexpected place."""
+
+
+class _BackgroundTask(multiprocessing.Process):
+  """Run a task in the background.
+
+  This task may be the 'Run' function from a buildbot stage or just a plain
+  function. It will be run in the background. Output from this task is saved
+  to a temporary file and is printed when the 'Wait' function is called.
+  """
+
+  # The time we give Python to startup and exit.
+  STARTUP_TIMEOUT = 60 * 5
+  EXIT_TIMEOUT = 60 * 10
+
+  # The time we allow processes to be silent. This is in place so that we
+  # eventually catch hanging processes, and print the remainder of our output.
+  # Do not increase this. Instead, adjust your program to print regular progress
+  # updates, so that cbuildbot (and buildbot) can know that it has not hung.
+  SILENT_TIMEOUT = 60 * 145
+
+  # The amount by which we reduce the SILENT_TIMEOUT every time we launch
+  # a subprocess. This helps ensure that children get a chance to enforce the
+  # SILENT_TIMEOUT prior to the parents enforcing it.
+  SILENT_TIMEOUT_STEP = 30
+  MINIMUM_SILENT_TIMEOUT = 60 * 135
+
+  # The time before terminating or killing a task.
+  SIGTERM_TIMEOUT = 30
+  SIGKILL_TIMEOUT = 60
+
+  # How long we allow debug commands to run (so we don't hang will trying to
+  # recover from a hang).
+  DEBUG_CMD_TIMEOUT = 60
+
+  # Interval we check for updates from print statements.
+  PRINT_INTERVAL = 1
+
+  def __init__(self, task, queue, semaphore=None, task_args=None,
+               task_kwargs=None):
+    """Create a new _BackgroundTask object.
+
+    If semaphore is supplied, it will be acquired for the duration of the
+    steps that are run in the background. This can be used to limit the
+    number of simultaneous parallel tasks.
+
+    Args:
+      task: The task (a functor) to run in the background.
+      queue: A queue to be used for managing communication between the parent
+        and child process. This queue must be valid for the length of the
+        life of the child process, until the parent has collected its status.
+      semaphore: The lock to hold while |task| runs.
+      task_args: A list of args to pass to the |task|.
+      task_kwargs: A dict of optional args to pass to the |task|.
+    """
+    multiprocessing.Process.__init__(self)
+    self._task = task
+    self._queue = queue
+    self._semaphore = semaphore
+    self._started = multiprocessing.Event()
+    self._killing = multiprocessing.Event()
+    self._output = None
+    self._parent_pid = None
+    self._task_args = task_args if task_args else ()
+    self._task_kwargs = task_kwargs if task_kwargs else {}
+
+  def _WaitForStartup(self):
+    # TODO(davidjames): Use python-2.7 syntax to simplify this.
+    self._started.wait(self.STARTUP_TIMEOUT)
+    msg = 'Process failed to start in %d seconds' % self.STARTUP_TIMEOUT
+    assert self._started.is_set(), msg
+
+  @classmethod
+  def _DebugRunCommand(cls, cmd, **kwargs):
+    """Swallow any exception run raises.
+
+    Since these commands are for purely informational purposes, we don't
+    random issues causing the bot to die.
+
+    Returns:
+      Stdout on success
+    """
+    log_level = kwargs['debug_level']
+    try:
+      with timeout_util.Timeout(cls.DEBUG_CMD_TIMEOUT):
+        return cros_build_lib.run(cmd, **kwargs).output
+    except (cros_build_lib.RunCommandError, timeout_util.TimeoutError) as e:
+      logging.log(log_level, 'Running %s failed: %s', cmd[0], str(e))
+      return ''
+
+  # Debug commands to run in gdb.  A class member so tests can stub it out.
+  GDB_COMMANDS = (
+      'info proc all',
+      'info threads',
+      'thread apply all py-list',
+      'thread apply all py-bt',
+      'thread apply all bt',
+      'detach',
+  )
+
+  @classmethod
+  def _DumpDebugPid(cls, log_level, pid):
+    """Dump debug info about the hanging |pid|."""
+    pid = str(pid)
+    commands = (
+        ('pstree', '-Apals', pid),
+        ('lsof', '-p', pid),
+    )
+    for cmd in commands:
+      cls._DebugRunCommand(cmd, debug_level=log_level, check=False,
+                           log_output=True, encoding='utf-8')
+
+    stdin = '\n'.join(['echo \\n>>> %s\\n\n%s' % (x, x)
+                       for x in cls.GDB_COMMANDS])
+    cmd = ('gdb', '--nx', '-q', '-p', pid, '-ex', 'set prompt',)
+    cls._DebugRunCommand(cmd, debug_level=log_level, check=False,
+                         log_output=True, input=stdin, encoding='utf-8')
+
+  def Kill(self, sig, log_level, first=False):
+    """Kill process with signal, ignoring if the process is dead.
+
+    Args:
+      sig: Signal to send.
+      log_level: The log level of log messages.
+      first: Whether this is the first signal we've sent.
+    """
+    self._killing.set()
+    self._WaitForStartup()
+    if logging.getLogger().isEnabledFor(log_level):
+      # Dump debug information about the hanging process.
+      logging.log(log_level, 'Killing %r (sig=%r %s)', self.pid, sig,
+                  signals.StrSignal(sig))
+
+      if first:
+        ppid = str(self.pid)
+        output = self._DebugRunCommand(
+            ('pgrep', '-P', ppid), debug_level=log_level, print_cmd=False,
+            check=False, capture_output=True)
+        for pid in [ppid] + output.splitlines():
+          self._DumpDebugPid(log_level, pid)
+
+    try:
+      os.kill(self.pid, sig)
+    except OSError as ex:
+      if ex.errno != errno.ESRCH:
+        raise
+
+  def Cleanup(self, silent=False):
+    """Wait for a process to exit."""
+    if os.getpid() != self._parent_pid or self._output is None:
+      return
+    try:
+      # Print output from subprocess.
+      if not silent and logging.getLogger().isEnabledFor(logging.DEBUG):
+        with open(self._output.name, 'r') as f:
+          for line in f:
+            logging.debug(line.rstrip('\n'))
+    finally:
+      # Clean up our temporary file.
+      osutils.SafeUnlink(self._output.name)
+      self._output.close()
+      self._output = None
+
+  def Wait(self):
+    """Wait for the task to complete.
+
+    Output from the task is printed as it runs.
+
+    If an exception occurs, return a string containing the traceback.
+    """
+    try:
+      # Flush stdout and stderr to be sure no output is interleaved.
+      sys.stdout.flush()
+      sys.stderr.flush()
+
+      # File position pointers are shared across processes, so we must open
+      # our own file descriptor to ensure output is not lost.
+      self._WaitForStartup()
+      silent_death_time = time.time() + self.SILENT_TIMEOUT
+      results = []
+      with open(self._output.name, 'r') as output:
+        pos = 0
+        running, exited_cleanly, task_errors, run_errors = (True, False, [], [])
+        while running:
+          # Check whether the process is still alive.
+          running = self.is_alive()
+
+          try:
+            errors, results = \
+                self._queue.get(True, self.PRINT_INTERVAL)
+            if errors:
+              task_errors.extend(errors)
+
+            running = False
+            exited_cleanly = True
+          except Queue.Empty:
+            pass
+
+          if not running:
+            # Wait for the process to actually exit. If the child doesn't exit
+            # in a timely fashion, kill it.
+            self.join(self.EXIT_TIMEOUT)
+            if self.exitcode is None:
+              msg = '%r hung for %r seconds' % (self, self.EXIT_TIMEOUT)
+              run_errors.extend(
+                  failures_lib.CreateExceptInfo(ProcessExitTimeout(msg), ''))
+              self._KillChildren([self])
+            elif not exited_cleanly:
+              msg = ('%r exited unexpectedly with code %s'
+                     % (self, self.exitcode))
+              run_errors.extend(
+                  failures_lib.CreateExceptInfo(ProcessUnexpectedExit(msg), ''))
+
+          # Read output from process.
+          output.seek(pos)
+          buf = output.read(_BUFSIZE)
+
+          if buf:
+            silent_death_time = time.time() + self.SILENT_TIMEOUT
+          elif running and time.time() > silent_death_time:
+            msg = ('No output from %r for %r seconds' %
+                   (self, self.SILENT_TIMEOUT))
+            run_errors.extend(
+                failures_lib.CreateExceptInfo(ProcessSilentTimeout(msg), ''))
+            self._KillChildren([self])
+
+            # Read remaining output from the process.
+            output.seek(pos)
+            buf = output.read(_BUFSIZE)
+            running = False
+
+          # Print output so far.
+          while buf:
+            sys.stdout.write(buf)
+            pos += len(buf)
+            if len(buf) < _BUFSIZE:
+              break
+            buf = output.read(_BUFSIZE)
+
+          # Print error messages if anything exceptional occurred.
+          if run_errors:
+            logging.PrintBuildbotStepFailure()
+            traceback.print_stack()
+            logging.warning('\n'.join(x.str for x in run_errors if x))
+            logging.info('\n'.join(x.str for x in task_errors if x))
+
+          sys.stdout.flush()
+          sys.stderr.flush()
+
+      # Propagate any results.
+      for result in results:
+        results_lib.Results.Record(*result)
+
+    finally:
+      self.Cleanup(silent=True)
+
+    # If an error occurred, return it.
+    return run_errors + task_errors
+
+  def start(self):
+    """Invoke multiprocessing.Process.start after flushing output/err."""
+    if self.SILENT_TIMEOUT < self.MINIMUM_SILENT_TIMEOUT:
+      raise AssertionError('Maximum recursion depth exceeded in %r' % self)
+
+    sys.stdout.flush()
+    sys.stderr.flush()
+    tmp_dir = '/tmp/chromite.parallel.%d' % os.geteuid()
+    osutils.SafeMakedirs(tmp_dir, mode=0o1777)
+    self._output = cros_build_lib.UnbufferedNamedTemporaryFile(
+        delete=False, dir=tmp_dir, prefix='chromite-parallel-')
+    self._parent_pid = os.getpid()
+    return multiprocessing.Process.start(self)
+
+  def run(self):
+    """Run the list of steps."""
+    if self._semaphore is not None:
+      self._semaphore.acquire()
+
+    errors = failures_lib.CreateExceptInfo(
+        UnexpectedException('Unexpected exception in %r' % self), '')
+    pid = os.getpid()
+    try:
+      errors = self._Run()
+    finally:
+      if not self._killing.is_set() and os.getpid() == pid:
+        results = results_lib.Results.Get()
+        self._queue.put((errors, results))
+        if self._semaphore is not None:
+          self._semaphore.release()
+
+  def _Run(self):
+    """Internal method for running the list of steps."""
+    # Register a handler for a signal that is rarely used.
+    def trigger_bt(_sig_num, frame):
+      logging.error('pre-kill notification (SIGXCPU); traceback:\n%s',
+                    ''.join(traceback.format_stack(frame)))
+    signal.signal(signal.SIGXCPU, trigger_bt)
+
+    sys.stdout.flush()
+    sys.stderr.flush()
+    errors = []
+    # Send all output to a named temporary file.
+    with open(self._output.name, 'wb', 0) as output:
+      # Back up sys.std{err,out}. These aren't used, but we keep a copy so
+      # that they aren't garbage collected. We intentionally don't restore
+      # the old stdout and stderr at the end, because we want shutdown errors
+      # to also be sent to the same log file.
+      _orig_stdout, _orig_stderr = sys.stdout, sys.stderr
+
+      # Replace std{out,err} with unbuffered file objects.
+      os.dup2(output.fileno(), sys.__stdout__.fileno())
+      os.dup2(output.fileno(), sys.__stderr__.fileno())
+      # The API of these funcs changed between versions.
+      if sys.version_info.major < 3:
+        sys.stdout = os.fdopen(sys.__stdout__.fileno(), 'w', 0)
+        sys.stderr = os.fdopen(sys.__stderr__.fileno(), 'w', 0)
+      else:
+        sys.stdout = os.fdopen(sys.__stdout__.fileno(), 'w', closefd=False)
+        sys.stderr = os.fdopen(sys.__stderr__.fileno(), 'w', closefd=False)
+
+      try:
+        self._started.set()
+        results_lib.Results.Clear()
+
+        # Reduce the silent timeout by the prescribed amount.
+        cls = self.__class__
+        cls.SILENT_TIMEOUT -= cls.SILENT_TIMEOUT_STEP
+
+        # Actually launch the task.
+        self._task(*self._task_args, **self._task_kwargs)
+      except failures_lib.StepFailure as ex:
+        errors.extend(failures_lib.CreateExceptInfo(
+            ex, traceback.format_exc()))
+      except BaseException as ex:
+        errors.extend(failures_lib.CreateExceptInfo(
+            ex, traceback.format_exc()))
+        if self._killing.is_set():
+          traceback.print_exc()
+      finally:
+        sys.stdout.flush()
+        sys.stderr.flush()
+
+    return errors
+
+  @classmethod
+  def _KillChildren(cls, bg_tasks, log_level=logging.WARNING):
+    """Kill a deque of background tasks.
+
+    This is needed to prevent hangs in the case where child processes refuse
+    to exit.
+
+    Args:
+      bg_tasks: A list filled with _BackgroundTask objects.
+      log_level: The log level of log messages.
+    """
+    logging.log(log_level, 'Killing tasks: %r', bg_tasks)
+    siglist = (
+        (signal.SIGXCPU, cls.SIGTERM_TIMEOUT),
+        (signal.SIGTERM, cls.SIGKILL_TIMEOUT),
+        (signal.SIGKILL, None),
+    )
+    first = True
+    for sig, timeout in siglist:
+      # Send signal to all tasks.
+      for task in bg_tasks:
+        task.Kill(sig, log_level, first)
+      first = False
+
+      # Wait for all tasks to exit, if requested.
+      if timeout is None:
+        for task in bg_tasks:
+          task.join()
+          task.Cleanup()
+        break
+
+      # Wait until timeout expires.
+      end_time = time.time() + timeout
+      while bg_tasks:
+        time_left = end_time - time.time()
+        if time_left <= 0:
+          break
+        task = bg_tasks[-1]
+        task.join(time_left)
+        if task.exitcode is not None:
+          task.Cleanup()
+          bg_tasks.pop()
+
+  @classmethod
+  @contextlib.contextmanager
+  def ParallelTasks(cls, steps, max_parallel=None, halt_on_error=False):
+    """Run a list of functions in parallel.
+
+    This function launches the provided functions in the background, yields,
+    and then waits for the functions to exit.
+
+    The output from the functions is saved to a temporary file and printed as if
+    they were run in sequence.
+
+    If exceptions occur in the steps, we join together the tracebacks and print
+    them after all parallel tasks have finished running. Further, a
+    BackgroundFailure is raised with full stack traces of all exceptions.
+
+    Args:
+      steps: A list of functions to run.
+      max_parallel: The maximum number of simultaneous tasks to run in parallel.
+        By default, run all tasks in parallel.
+      halt_on_error: After the first exception occurs, halt any running steps,
+        and squelch any further output, including any exceptions that might
+        occur.
+    """
+
+    semaphore = None
+    if max_parallel is not None:
+      semaphore = multiprocessing.Semaphore(max_parallel)
+
+    # First, start all the steps.
+    with Manager() as manager:
+      bg_tasks = collections.deque()
+      for step in steps:
+        task = cls(step, queue=manager.Queue(), semaphore=semaphore)
+        task.start()
+        bg_tasks.append(task)
+
+      foreground_except = None
+      try:
+        yield
+      except BaseException:
+        foreground_except = sys.exc_info()
+      finally:
+        errors = []
+        skip_bg_wait = halt_on_error and foreground_except is not None
+        # Wait for each step to complete.
+        while not skip_bg_wait and bg_tasks:
+          task = bg_tasks.popleft()
+          task_errors = task.Wait()
+          if task_errors:
+            errors.extend(task_errors)
+            if halt_on_error:
+              break
+
+        # If there are still tasks left, kill them.
+        if bg_tasks:
+          cls._KillChildren(bg_tasks, log_level=logging.DEBUG)
+
+        # Propagate any exceptions; foreground exceptions take precedence.
+        if foreground_except is not None:
+          # contextlib ignores caught exceptions unless explicitly re-raised.
+          six.reraise(foreground_except[0], foreground_except[1],
+                      foreground_except[2])
+        if errors:
+          raise BackgroundFailure(exc_infos=errors)
+
+  @staticmethod
+  def TaskRunner(queue, task, onexit=None, task_args=None, task_kwargs=None):
+    """Run task(*input) for each input in the queue.
+
+    Returns when it encounters an _AllTasksComplete object on the queue.
+    If exceptions occur, save them off and re-raise them as a
+    BackgroundFailure once we've finished processing the items in the queue.
+
+    Args:
+      queue: A queue of tasks to run. Add tasks to this queue, and they will
+        be run.
+      task: Function to run on each queued input.
+      onexit: Function to run after all inputs are processed.
+      task_args: A list of args to pass to the |task|.
+      task_kwargs: A dict of optional args to pass to the |task|.
+    """
+    if task_args is None:
+      task_args = []
+    elif not isinstance(task_args, list):
+      task_args = list(task_args)
+    if task_kwargs is None:
+      task_kwargs = {}
+
+    errors = []
+    while True:
+      # Wait for a new item to show up on the queue. This is a blocking wait,
+      # so if there's nothing to do, we just sit here.
+      x = queue.get()
+      if isinstance(x, _AllTasksComplete):
+        # All tasks are complete, so we should exit.
+        break
+      elif not isinstance(x, list):
+        x = task_args + list(x)
+      else:
+        x = task_args + x
+
+      # If no tasks failed yet, process the remaining tasks.
+      if not errors:
+        try:
+          task(*x, **task_kwargs)
+        except BaseException as ex:
+          errors.extend(
+              failures_lib.CreateExceptInfo(ex, traceback.format_exc()))
+
+    # Run exit handlers.
+    if onexit:
+      onexit()
+
+    # Propagate any exceptions.
+    if errors:
+      raise BackgroundFailure(exc_infos=errors)
+
+
+def RunParallelSteps(steps, max_parallel=None, halt_on_error=False,
+                     return_values=False):
+  """Run a list of functions in parallel.
+
+  This function blocks until all steps are completed.
+
+  The output from the functions is saved to a temporary file and printed as if
+  they were run in sequence.
+
+  If exceptions occur in the steps, we join together the tracebacks and print
+  them after all parallel tasks have finished running. Further, a
+  BackgroundFailure is raised with full stack traces of all exceptions.
+
+  Examples:
+    # This snippet will execute in parallel:
+    #   somefunc()
+    #   anotherfunc()
+    #   funcfunc()
+    steps = [somefunc, anotherfunc, funcfunc]
+    RunParallelSteps(steps)
+    # Blocks until all calls have completed.
+
+  Args:
+    steps: A list of functions to run.
+    max_parallel: The maximum number of simultaneous tasks to run in parallel.
+      By default, run all tasks in parallel.
+    halt_on_error: After the first exception occurs, halt any running steps,
+      and squelch any further output, including any exceptions that might occur.
+    return_values: If set to True, RunParallelSteps returns a list containing
+      the return values of the steps.  Defaults to False.
+
+  Returns:
+    If |return_values| is True, the function will return a list containing the
+    return values of the steps.
+  """
+  def ReturnWrapper(queue, fn):
+    """Put the return value of |fn| into |queue|."""
+    queue.put(fn())
+
+  full_steps = []
+  queues = []
+  with cros_build_lib.ContextManagerStack() as stack:
+    if return_values:
+      # We use a managed queue here, because the child process will wait for the
+      # queue(pipe) to be flushed (i.e., when items are read from the queue)
+      # before exiting, and with a regular queue this may result in hangs for
+      # large return values.  But with a managed queue, the manager process will
+      # read the items and hold on to them until the managed queue goes out of
+      # scope and is cleaned up.
+      manager = stack.Add(Manager)
+      for step in steps:
+        queue = manager.Queue()
+        queues.append(queue)
+        full_steps.append(functools.partial(ReturnWrapper, queue, step))
+    else:
+      full_steps = steps
+
+    with _BackgroundTask.ParallelTasks(full_steps, max_parallel=max_parallel,
+                                       halt_on_error=halt_on_error):
+      pass
+
+    if return_values:
+      return [queue.get_nowait() for queue in queues]
+
+
+class _AllTasksComplete(object):
+  """Sentinel object to indicate that all tasks are complete."""
+
+
+@contextlib.contextmanager
+def BackgroundTaskRunner(task, *args, **kwargs):
+  """Run the specified task on each queued input in a pool of processes.
+
+  This context manager starts a set of workers in the background, who each
+  wait for input on the specified queue. For each input on the queue, these
+  workers run task(*args + *input, **kwargs). Note that certain kwargs will
+  not pass through to the task (see Args below for the list).
+
+  The output from these tasks is saved to a temporary file. When control
+  returns to the context manager, the background output is printed in order,
+  as if the tasks were run in sequence.
+
+  If exceptions occur in the steps, we join together the tracebacks and print
+  them after all parallel tasks have finished running. Further, a
+  BackgroundFailure is raised with full stack traces of all exceptions.
+
+  Examples:
+    # This will run somefunc(1, 'small', 'cow', foo='bar') in the background
+    # as soon as data is added to the queue (i.e. queue.put() is called).
+
+    def somefunc(arg1, arg2, arg3, foo=None):
+      ...
+
+    with BackgroundTaskRunner(somefunc, 1, foo='bar') as queue:
+      ... do random stuff ...
+      queue.put(['small', 'cow'])
+      ... do more random stuff while somefunc() runs ...
+    # Exiting the with statement will block until all calls have completed.
+
+  Args:
+    task: Function to run on each queued input.
+    queue: A queue of tasks to run. Add tasks to this queue, and they will
+      be run in the background.  If None, one will be created on the fly.
+    processes: Number of processes to launch.
+    onexit: Function to run in each background process after all inputs are
+      processed.
+    halt_on_error: After the first exception occurs, halt any running steps, and
+      squelch any further output, including any exceptions that might occur.
+      Halts on exceptions in any of the background processes, or in the
+      foreground process using the BackgroundTaskRunner.
+  """
+
+  queue = kwargs.pop('queue', None)
+  processes = kwargs.pop('processes', None)
+  onexit = kwargs.pop('onexit', None)
+  halt_on_error = kwargs.pop('halt_on_error', False)
+
+  with cros_build_lib.ContextManagerStack() as stack:
+    if queue is None:
+      manager = stack.Add(Manager)
+      queue = manager.Queue()
+
+    if not processes:
+      processes = multiprocessing.cpu_count()
+
+    child = functools.partial(_BackgroundTask.TaskRunner, queue, task,
+                              onexit=onexit, task_args=args,
+                              task_kwargs=kwargs)
+    steps = [child] * processes
+    with _BackgroundTask.ParallelTasks(steps, halt_on_error=halt_on_error):
+      try:
+        yield queue
+      finally:
+        for _ in range(processes):
+          queue.put(_AllTasksComplete())
+
+
+def RunTasksInProcessPool(task, inputs, processes=None, onexit=None):
+  """Run the specified function with each supplied input in a pool of processes.
+
+  This function runs task(*x) for x in inputs in a pool of processes. This
+  function blocks until all tasks are completed.
+
+  The output from these tasks is saved to a temporary file. When control
+  returns to the context manager, the background output is printed in order,
+  as if the tasks were run in sequence.
+
+  If exceptions occur in the steps, we join together the tracebacks and print
+  them after all parallel tasks have finished running. Further, a
+  BackgroundFailure is raised with full stack traces of all exceptions.
+
+  Examples:
+    # This snippet will execute in parallel:
+    #   somefunc('hi', 'fat', 'code')
+    #   somefunc('foo', 'bar', 'cow')
+
+    def somefunc(arg1, arg2, arg3):
+      ...
+    ...
+    inputs = [
+      ['hi', 'fat', 'code'],
+      ['foo', 'bar', 'cow'],
+    ]
+    RunTasksInProcessPool(somefunc, inputs)
+    # Blocks until all calls have completed.
+
+  Args:
+    task: Function to run on each input.
+    inputs: List of inputs.
+    processes: Number of processes, at most, to launch.
+    onexit: Function to run in each background process after all inputs are
+      processed.
+
+  Returns:
+    Returns a list containing the return values of the task for each input.
+  """
+  if not processes:
+    # - Use >=16 processes by default, in case it's a network-bound operation.
+    # - Try to use all of the CPUs, in case it's a CPU-bound operation.
+    processes = min(max(16, multiprocessing.cpu_count()), len(inputs))
+
+  with Manager() as manager:
+    # Set up output queue.
+    out_queue = manager.Queue()
+    fn = lambda idx, task_args: out_queue.put((idx, task(*task_args)))
+
+    # Micro-optimization: Setup the queue so that BackgroundTaskRunner
+    # doesn't have to set up another Manager process.
+    queue = manager.Queue()
+
+    with BackgroundTaskRunner(fn, queue=queue, processes=processes,
+                              onexit=onexit) as queue:
+      for idx, input_args in enumerate(inputs):
+        queue.put((idx, input_args))
+
+    return [x[1] for x in sorted(out_queue.get() for _ in range(len(inputs)))]
+
+
+PR_SET_PDEATHSIG = 1
+
+
+def ExitWithParent(sig=signal.SIGHUP):
+  """Sets this process to receive |sig| when the parent dies.
+
+  Note: this uses libc, so it only works on linux.
+
+  Args:
+    sig: Signal to recieve. Defaults to SIGHUP.
+
+  Returns:
+    Whether we were successful in setting the deathsignal flag
+  """
+  libc_name = ctypes.util.find_library('c')
+  if not libc_name:
+    return False
+  try:
+    libc = ctypes.CDLL(libc_name)
+    libc.prctl(PR_SET_PDEATHSIG, sig)
+    return True
+  # We might not be able to load the library (OSError), or prctl might be
+  # missing (AttributeError)
+  except (OSError, AttributeError):
+    return False
diff --git a/utils/frozen_chromite/lib/path_util.py b/utils/frozen_chromite/lib/path_util.py
new file mode 100644
index 0000000..ef2a178
--- /dev/null
+++ b/utils/frozen_chromite/lib/path_util.py
@@ -0,0 +1,365 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Handle path inference and translation."""
+
+from __future__ import print_function
+
+import collections
+import os
+import tempfile
+
+from autotest_lib.utils.frozen_chromite.lib import constants
+from autotest_lib.utils.frozen_chromite.lib import cros_build_lib
+from autotest_lib.utils.frozen_chromite.lib import git
+from autotest_lib.utils.frozen_chromite.lib import osutils
+from autotest_lib.utils.frozen_chromite.utils import memoize
+
+
+GENERAL_CACHE_DIR = '.cache'
+CHROME_CACHE_DIR = 'cros_cache'
+
+CHECKOUT_TYPE_UNKNOWN = 'unknown'
+CHECKOUT_TYPE_GCLIENT = 'gclient'
+CHECKOUT_TYPE_REPO = 'repo'
+
+CheckoutInfo = collections.namedtuple(
+    'CheckoutInfo', ['type', 'root', 'chrome_src_dir'])
+
+
+class ChrootPathResolver(object):
+  """Perform path resolution to/from the chroot.
+
+  Attributes:
+    source_path: Value to override default source root inference.
+    source_from_path_repo: Whether to infer the source root from the converted
+      path's repo parent during inbound translation; overrides |source_path|.
+  """
+
+  # TODO(garnold) We currently infer the source root based on the path's own
+  # encapsulating repository. This is a heuristic catering to paths are being
+  # translated to be used in a chroot that's not associated with the currently
+  # executing code (for example, cbuildbot run on a build root or a foreign
+  # tree checkout). This approach might result in arbitrary repo-contained
+  # paths being translated to invalid chroot paths where they actually should
+  # not, and other valid source paths failing to translate because they are not
+  # repo-contained. Eventually we'll want to make this behavior explicit, by
+  # either passing a source_root value, or requesting to infer it from the path
+  # (source_from_path_repo=True), but otherwise defaulting to the executing
+  # code's source root in the normal case. When that happens, we'll be
+  # switching source_from_path_repo to False by default. See chromium:485746.
+
+  def __init__(self, source_path=None, source_from_path_repo=True):
+    self._inside_chroot = cros_build_lib.IsInsideChroot()
+    self._source_path = (constants.SOURCE_ROOT if source_path is None
+                         else source_path)
+    self._source_from_path_repo = source_from_path_repo
+
+    # The following are only needed if outside the chroot.
+    if self._inside_chroot:
+      self._chroot_path = None
+      self._chroot_link = None
+      self._chroot_to_host_roots = None
+    else:
+      self._chroot_path = self._GetSourcePathChroot(self._source_path)
+      # The chroot link allows us to resolve paths when the chroot is symlinked
+      # to the default location. This is generally not used, but it is useful
+      # for CI for optimization purposes. We will trust them not to do something
+      # dumb, like symlink to /, but this doesn't enable that kind of behavior
+      # anyway, just allows resolving paths correctly from outside the chroot.
+      self._chroot_link = self._ReadChrootLink(self._chroot_path)
+
+      # Initialize mapping of known root bind mounts.
+      self._chroot_to_host_roots = (
+          (constants.CHROOT_SOURCE_ROOT, self._source_path),
+          (constants.CHROOT_CACHE_ROOT, self._GetCachePath),
+      )
+
+  @classmethod
+  @memoize.MemoizedSingleCall
+  def _GetCachePath(cls):
+    """Returns the cache directory."""
+    return os.path.realpath(GetCacheDir())
+
+  def _GetSourcePathChroot(self, source_path):
+    """Returns path to the chroot directory of a given source root."""
+    if source_path is None:
+      return None
+    return os.path.join(source_path, constants.DEFAULT_CHROOT_DIR)
+
+  def _ReadChrootLink(self, path):
+    """Convert a chroot symlink to its absolute path.
+
+    This contains defaults/edge cases assumptions for chroot paths. Not
+    recommended for non-chroot paths.
+
+    Args:
+      path (str|None): The path to resolve.
+
+    Returns:
+      str|None: The resolved path if the provided path is a symlink, None
+        otherwise.
+    """
+    # Mainly for the "if self._source_from_path_repo:" branch in _GetChrootPath.
+    # _GetSourcePathChroot can return None, so double check it here.
+    if not path:
+      return None
+
+    abs_path = os.path.abspath(path)
+    link = osutils.ResolveSymlink(abs_path)
+
+    # ResolveSymlink returns the passed path when the path isn't a symlink. We
+    # can skip some redundant work when its falling back on the link when the
+    # chroot is not a symlink.
+    if link == abs_path:
+      return None
+
+    return link
+
+  def _TranslatePath(self, path, src_root, dst_root_input):
+    """If |path| starts with |src_root|, replace it using |dst_root_input|.
+
+    Args:
+      path: An absolute path we want to convert to a destination equivalent.
+      src_root: The root that path needs to be contained in.
+      dst_root_input: The root we want to relocate the relative path into, or a
+        function returning this value.
+
+    Returns:
+      A translated path, or None if |src_root| is not a prefix of |path|.
+
+    Raises:
+      ValueError: If |src_root| is a prefix but |dst_root_input| yields None,
+        which means we don't have sufficient information to do the translation.
+    """
+    if not path.startswith(os.path.join(src_root, '')) and path != src_root:
+      return None
+    dst_root = dst_root_input() if callable(dst_root_input) else dst_root_input
+    if dst_root is None:
+      raise ValueError('No target root to translate path to')
+    return os.path.join(dst_root, path[len(src_root):].lstrip(os.path.sep))
+
+  def _GetChrootPath(self, path):
+    """Translates a fully-expanded host |path| into a chroot equivalent.
+
+    This checks path prefixes in order from the most to least "contained": the
+    chroot itself, then the cache directory, and finally the source tree. The
+    idea is to return the shortest possible chroot equivalent.
+
+    Args:
+      path: A host path to translate.
+
+    Returns:
+      An equivalent chroot path.
+
+    Raises:
+      ValueError: If |path| is not reachable from the chroot.
+    """
+    new_path = None
+
+    # Preliminary: compute the actual source and chroot paths to use. These are
+    # generally the precomputed values, unless we're inferring the source root
+    # from the path itself.
+    source_path = self._source_path
+    chroot_path = self._chroot_path
+    chroot_link = self._chroot_link
+
+    if self._source_from_path_repo:
+      path_repo_dir = git.FindRepoDir(path)
+      if path_repo_dir is not None:
+        source_path = os.path.abspath(os.path.join(path_repo_dir, '..'))
+      chroot_path = self._GetSourcePathChroot(source_path)
+      chroot_link = self._ReadChrootLink(chroot_path)
+
+    # First, check if the path happens to be in the chroot already.
+    if chroot_path is not None:
+      new_path = self._TranslatePath(path, chroot_path, '/')
+      # Or in the symlinked dir.
+      if new_path is None and chroot_link is not None:
+        new_path = self._TranslatePath(path, chroot_link, '/')
+
+    # Second, check the cache directory.
+    if new_path is None:
+      new_path = self._TranslatePath(path, self._GetCachePath(),
+                                     constants.CHROOT_CACHE_ROOT)
+
+    # Finally, check the current SDK checkout tree.
+    if new_path is None and source_path is not None:
+      new_path = self._TranslatePath(path, source_path,
+                                     constants.CHROOT_SOURCE_ROOT)
+
+    if new_path is None:
+      raise ValueError('Path is not reachable from the chroot')
+
+    return new_path
+
+  def _GetHostPath(self, path):
+    """Translates a fully-expanded chroot |path| into a host equivalent.
+
+    We first attempt translation of known roots (source). If any is successful,
+    we check whether the result happens to point back to the chroot, in which
+    case we trim the chroot path prefix and recurse. If neither was successful,
+    just prepend the chroot path.
+
+    Args:
+      path: A chroot path to translate.
+
+    Returns:
+      An equivalent host path.
+
+    Raises:
+      ValueError: If |path| could not be mapped to a proper host destination.
+    """
+    new_path = None
+
+    # Attempt resolution of known roots.
+    for src_root, dst_root in self._chroot_to_host_roots:
+      new_path = self._TranslatePath(path, src_root, dst_root)
+      if new_path is not None:
+        break
+
+    if new_path is None:
+      # If no known root was identified, just prepend the chroot path.
+      new_path = self._TranslatePath(path, '', self._chroot_path)
+    else:
+      # Check whether the resolved path happens to point back at the chroot, in
+      # which case trim the chroot path or link prefix and continue recursively.
+      path = self._TranslatePath(new_path, self._chroot_path, '/')
+      if path is None and self._chroot_link:
+        path = self._TranslatePath(new_path, self._chroot_link, '/')
+
+      if path is not None:
+        new_path = self._GetHostPath(path)
+
+    return new_path
+
+  def _ConvertPath(self, path, get_converted_path):
+    """Expands |path|; if outside the chroot, applies |get_converted_path|.
+
+    Args:
+      path: A path to be converted.
+      get_converted_path: A conversion function.
+
+    Returns:
+      An expanded and (if needed) converted path.
+
+    Raises:
+      ValueError: If path conversion failed.
+    """
+    # NOTE: We do not want to expand wrapper script symlinks because this
+    # prevents them from working. Therefore, if the path points to a file we
+    # only resolve its dirname but leave the basename intact. This means our
+    # path resolution might return unusable results for file symlinks that
+    # point outside the reachable space. These are edge cases in which the user
+    # is expected to resolve the realpath themselves in advance.
+    expanded_path = os.path.expanduser(path)
+    if os.path.isfile(expanded_path):
+      expanded_path = os.path.join(
+          os.path.realpath(os.path.dirname(expanded_path)),
+          os.path.basename(expanded_path))
+    else:
+      expanded_path = os.path.realpath(expanded_path)
+
+    if self._inside_chroot:
+      return expanded_path
+
+    try:
+      return get_converted_path(expanded_path)
+    except ValueError as e:
+      raise ValueError('%s: %s' % (e, path))
+
+  def ToChroot(self, path):
+    """Resolves current environment |path| for use in the chroot."""
+    return self._ConvertPath(path, self._GetChrootPath)
+
+  def FromChroot(self, path):
+    """Resolves chroot |path| for use in the current environment."""
+    return self._ConvertPath(path, self._GetHostPath)
+
+
+def DetermineCheckout(cwd=None):
+  """Gather information on the checkout we are in.
+
+  There are several checkout types, as defined by CHECKOUT_TYPE_XXX variables.
+  This function determines what checkout type |cwd| is in, for example, if |cwd|
+  belongs to a `repo` checkout.
+
+  Returns:
+    A CheckoutInfo object with these attributes:
+      type: The type of checkout.  Valid values are CHECKOUT_TYPE_*.
+      root: The root of the checkout.
+      chrome_src_dir: If the checkout is a Chrome checkout, the path to the
+        Chrome src/ directory.
+  """
+  checkout_type = CHECKOUT_TYPE_UNKNOWN
+  root, path = None, None
+
+  cwd = cwd or os.getcwd()
+  for path in osutils.IteratePathParents(cwd):
+    gclient_file = os.path.join(path, '.gclient')
+    if os.path.exists(gclient_file):
+      checkout_type = CHECKOUT_TYPE_GCLIENT
+      break
+    repo_dir = os.path.join(path, '.repo')
+    if os.path.isdir(repo_dir):
+      checkout_type = CHECKOUT_TYPE_REPO
+      break
+
+  if checkout_type != CHECKOUT_TYPE_UNKNOWN:
+    root = path
+
+  # Determine the chrome src directory.
+  chrome_src_dir = None
+  if checkout_type == CHECKOUT_TYPE_GCLIENT:
+    chrome_src_dir = os.path.join(root, 'src')
+
+  return CheckoutInfo(checkout_type, root, chrome_src_dir)
+
+
+def FindCacheDir():
+  """Returns the cache directory location based on the checkout type."""
+  checkout = DetermineCheckout()
+  if checkout.type == CHECKOUT_TYPE_REPO:
+    return os.path.join(checkout.root, GENERAL_CACHE_DIR)
+  elif checkout.type == CHECKOUT_TYPE_GCLIENT:
+    return os.path.join(checkout.chrome_src_dir, 'build', CHROME_CACHE_DIR)
+  elif checkout.type == CHECKOUT_TYPE_UNKNOWN:
+    return os.path.join(tempfile.gettempdir(), 'chromeos-cache')
+  else:
+    raise AssertionError('Unexpected type %s' % checkout.type)
+
+
+def GetCacheDir():
+  """Returns the current cache dir."""
+  return os.environ.get(constants.SHARED_CACHE_ENVVAR, FindCacheDir())
+
+
+def ToChrootPath(path, source_path=None):
+  """Resolves current environment |path| for use in the chroot.
+
+  Args:
+    path: string path to translate into chroot namespace.
+    source_path: string path to root of source checkout with chroot in it.
+
+  Returns:
+    The same path converted to "inside chroot" namespace.
+
+  Raises:
+    ValueError: If the path references a location not available in the chroot.
+  """
+  return ChrootPathResolver(source_path=source_path).ToChroot(path)
+
+
+def FromChrootPath(path, source_path=None):
+  """Resolves chroot |path| for use in the current environment.
+
+  Args:
+    path: string path to translate out of chroot namespace.
+    source_path: string path to root of source checkout with chroot in it.
+
+  Returns:
+    The same path converted to "outside chroot" namespace.
+  """
+  return ChrootPathResolver(source_path=source_path).FromChroot(path)
diff --git a/utils/frozen_chromite/lib/remote_access.py b/utils/frozen_chromite/lib/remote_access.py
new file mode 100644
index 0000000..e59b5a0
--- /dev/null
+++ b/utils/frozen_chromite/lib/remote_access.py
@@ -0,0 +1,1389 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Library containing functions to access a remote test device."""
+
+from __future__ import print_function
+
+import glob
+import os
+import re
+import shutil
+import socket
+import stat
+import subprocess
+import tempfile
+import time
+
+import six
+
+from autotest_lib.utils.frozen_chromite.lib import constants
+from autotest_lib.utils.frozen_chromite.lib import cros_build_lib
+from autotest_lib.utils.frozen_chromite.lib import cros_logging as logging
+from autotest_lib.utils.frozen_chromite.lib import osutils
+from autotest_lib.utils.frozen_chromite.lib import parallel
+from autotest_lib.utils.frozen_chromite.lib import timeout_util
+from autotest_lib.utils.frozen_chromite.scripts import cros_set_lsb_release
+from autotest_lib.utils.frozen_chromite.utils import memoize
+
+
+_path = os.path.dirname(os.path.realpath(__file__))
+TEST_PRIVATE_KEY = os.path.normpath(
+    os.path.join(_path, '../ssh_keys/testing_rsa'))
+del _path
+
+CHUNK_SIZE = 50 * 1024 * 1024
+DEGREE_OF_PARALLELISM = 8
+LOCALHOST = 'localhost'
+LOCALHOST_IP = '127.0.0.1'
+ROOT_ACCOUNT = 'root'
+
+# IP used for testing that is a valid IP address, but would fail quickly if
+# actually used for any real operation (e.g. pinging or making connections).
+# https://en.wikipedia.org/wiki/IPv4#Special-use_addresses
+TEST_IP = '0.1.2.3'
+
+REBOOT_MAX_WAIT = 180
+REBOOT_SSH_CONNECT_TIMEOUT = 2
+REBOOT_SSH_CONNECT_ATTEMPTS = 2
+CHECK_INTERVAL = 5
+DEFAULT_SSH_PORT = 22
+# Ssh returns status 255 when it encounters errors in its own code.  Otherwise
+# it returns the status of the command that it ran on the host, including
+# possibly 255.  Here we assume that 255 indicates only ssh errors.  This may
+# be a reasonable guess for our purposes.
+SSH_ERROR_CODE = 255
+
+# SSH default known_hosts filepath.
+KNOWN_HOSTS_PATH = os.path.expanduser('~/.ssh/known_hosts')
+
+# Dev/test packages are installed in these paths.
+DEV_BIN_PATHS = '/usr/local/bin:/usr/local/sbin'
+
+
+class RemoteAccessException(Exception):
+  """Base exception for this module."""
+
+
+class SSHConnectionError(RemoteAccessException):
+  """Raised when SSH connection has failed."""
+
+  def IsKnownHostsMismatch(self):
+    """Returns True if this error was caused by a known_hosts mismatch.
+
+    Will only check for a mismatch, this will return False if the host
+    didn't exist in known_hosts at all.
+    """
+    # Checking for string output is brittle, but there's no exit code that
+    # indicates why SSH failed so this might be the best we can do.
+    # RemoteAccess.RemoteSh() sets LC_MESSAGES=C so we only need to check for
+    # the English error message.
+    # Verified for OpenSSH_6.6.1p1.
+    return 'REMOTE HOST IDENTIFICATION HAS CHANGED' in str(self)
+
+
+class DeviceNotPingableError(RemoteAccessException):
+  """Raised when device is not pingable."""
+
+
+class DefaultDeviceError(RemoteAccessException):
+  """Raised when a default ChromiumOSDevice can't be found."""
+
+
+class CatFileError(RemoteAccessException):
+  """Raised when error occurs while trying to cat a remote file."""
+
+
+class RunningPidsError(RemoteAccessException):
+  """Raised when unable to get running pids on the device."""
+
+
+def NormalizePort(port, str_ok=True):
+  """Checks if |port| is a valid port number and returns the number.
+
+  Args:
+    port: The port to normalize.
+    str_ok: Accept |port| in string. If set False, only accepts
+      an integer. Defaults to True.
+
+  Returns:
+    A port number (integer).
+  """
+  err_msg = '%s is not a valid port number.' % port
+
+  if not str_ok and not isinstance(port, int):
+    raise ValueError(err_msg)
+
+  port = int(port)
+  if port <= 0 or port >= 65536:
+    raise ValueError(err_msg)
+
+  return port
+
+
+def GetUnusedPort(ip=LOCALHOST, family=socket.AF_INET,
+                  stype=socket.SOCK_STREAM):
+  """Returns a currently unused port.
+
+  Examples:
+    Note: Since this does not guarantee the port remains unused when you
+    attempt to bind it, your code should retry in a loop like so:
+    while True:
+      try:
+        port = remote_access.GetUnusedPort()
+        <attempt to bind the port>
+        break
+      except socket.error as e:
+        if e.errno == errno.EADDRINUSE:
+          continue
+        <fallback/raise>
+
+  Args:
+    ip: IP to use to bind the port.
+    family: Address family.
+    stype: Socket type.
+
+  Returns:
+    A port number (integer).
+  """
+  s = None
+  try:
+    s = socket.socket(family, stype)
+    s.bind((ip, 0))
+    return s.getsockname()[1]
+  # TODO(vapier): Drop socket.error when we're Python 3-only.
+  # pylint: disable=overlapping-except
+  except (socket.error, OSError):
+    pass
+  finally:
+    if s is not None:
+      s.close()
+
+
+def RunCommandFuncWrapper(func, msg, *args, **kwargs):
+  """Wraps a function that invokes cros_build_lib.run.
+
+  If the command failed, logs warning |msg| if check is not set;
+  logs error |msg| if check is set.
+
+  Args:
+    func: The function to call.
+    msg: The message to display if the command failed.
+    ignore_failures: If True, ignore failures during the command.
+    *args: Arguments to pass to |func|.
+    **kwargs: Keyword arguments to pass to |func|.
+
+  Returns:
+    The result of |func|.
+
+  Raises:
+    cros_build_lib.RunCommandError if the command failed and check is set.
+  """
+  check = kwargs.pop('check', True)
+  ignore_failures = kwargs.pop('ignore_failures', False)
+  result = func(*args, check=False, **kwargs)
+
+  if not ignore_failures:
+    if result.returncode != 0 and check:
+      raise cros_build_lib.RunCommandError(msg, result)
+
+    if result.returncode != 0:
+      logging.warning(msg)
+
+
+def CompileSSHConnectSettings(**kwargs):
+  """Creates a list of SSH connection options.
+
+  Any ssh_config option can be specified in |kwargs|, in addition,
+  several options are set to default values if not specified. Any
+  option can be set to None to prevent this function from assigning
+  a value so that the SSH default value will be used.
+
+  This function doesn't check to make sure the |kwargs| options are
+  valid, so a typo or invalid setting won't be caught until the
+  resulting arguments are passed into an SSH call.
+
+  Args:
+    kwargs: A dictionary of ssh_config settings.
+
+  Returns:
+    A list of arguments to pass to SSH.
+  """
+  settings = {
+      'ConnectTimeout': 30,
+      'ConnectionAttempts': 4,
+      'NumberOfPasswordPrompts': 0,
+      'Protocol': 2,
+      'ServerAliveInterval': 10,
+      'ServerAliveCountMax': 3,
+      'StrictHostKeyChecking': 'no',
+      'UserKnownHostsFile': '/dev/null',
+  }
+  settings.update(kwargs)
+  return ['-o%s=%s' % (k, v) for k, v in settings.items() if v is not None]
+
+
+def RemoveKnownHost(host, known_hosts_path=KNOWN_HOSTS_PATH):
+  """Removes |host| from a known_hosts file.
+
+  `ssh-keygen -R` doesn't work on bind mounted files as they can only
+  be updated in place. Since we bind mount the default known_hosts file
+  when entering the chroot, this function provides an alternate way
+  to remove hosts from the file.
+
+  Args:
+    host: The host name to remove from the known_hosts file.
+    known_hosts_path: Path to the known_hosts file to change. Defaults
+                      to the standard SSH known_hosts file path.
+
+  Raises:
+    cros_build_lib.RunCommandError if ssh-keygen fails.
+  """
+  # `ssh-keygen -R` creates a backup file to retain the old 'known_hosts'
+  # content and never deletes it. Using TempDir here to make sure both the temp
+  # files created by us and `ssh-keygen -R` are deleted afterwards.
+  with osutils.TempDir(prefix='remote-access-') as tempdir:
+    temp_file = os.path.join(tempdir, 'temp_known_hosts')
+    try:
+      # Using shutil.copy2 to preserve the file ownership and permissions.
+      shutil.copy2(known_hosts_path, temp_file)
+    except IOError:
+      # If |known_hosts_path| doesn't exist neither does |host| so we're done.
+      return
+    cros_build_lib.run(['ssh-keygen', '-R', host, '-f', temp_file], quiet=True)
+    shutil.copy2(temp_file, known_hosts_path)
+
+
+class PortForwardSpec(object):
+  """Represent the information required to define an SSH tunnel."""
+
+  def __init__(self, local_port, remote_host='localhost', remote_port=None,
+               local_host='localhost'):
+    if remote_port is None:
+      remote_port = local_port
+    self.local_port = NormalizePort(local_port)
+    self.remote_port = NormalizePort(remote_port)
+    self.local_host = local_host
+    self.remote_host = remote_host
+
+  @property
+  def command_line_spec(self):
+    """Return the port forwarding spec for the `ssh` command."""
+    if not self.remote_host:
+      return '%d:%s:%d' % (self.remote_port, self.local_host, self.local_port)
+    return '%s:%d:%s:%d' % (self.remote_host, self.remote_port, self.local_host,
+                            self.local_port)
+
+
+class RemoteAccess(object):
+  """Provides access to a remote test machine."""
+
+  DEFAULT_USERNAME = ROOT_ACCOUNT
+
+  def __init__(self, remote_host, tempdir, port=None, username=None,
+               private_key=None, debug_level=logging.DEBUG, interactive=True):
+    """Construct the object.
+
+    Args:
+      remote_host: The ip or hostname of the remote test machine.  The test
+                   machine should be running a ChromeOS test image.
+      tempdir: A directory that RemoteAccess can use to store temporary files.
+               It's the responsibility of the caller to remove it.
+      port: The ssh port of the test machine to connect to.
+      username: The ssh login username (default: root).
+      private_key: The identify file to pass to `ssh -i` (default: testing_rsa).
+      debug_level: Logging level to use for all run invocations.
+      interactive: If set to False, pass /dev/null into stdin for the sh cmd.
+    """
+    self.tempdir = tempdir
+    self.remote_host = remote_host
+    self.port = port
+    self.username = username if username else self.DEFAULT_USERNAME
+    self.debug_level = debug_level
+    private_key_src = private_key if private_key else TEST_PRIVATE_KEY
+    self.private_key = os.path.join(
+        tempdir, os.path.basename(private_key_src))
+
+    self.interactive = interactive
+    shutil.copyfile(private_key_src, self.private_key)
+    os.chmod(self.private_key, stat.S_IRUSR)
+
+  @staticmethod
+  def _mockable_popen(*args, **kwargs):
+    """This wraps subprocess.Popen so it can be mocked in unit tests."""
+    return subprocess.Popen(*args, **kwargs)
+
+  @property
+  def target_ssh_url(self):
+    return '%s@%s' % (self.username, self.remote_host)
+
+  def _GetSSHCmd(self, connect_settings=None):
+    if connect_settings is None:
+      connect_settings = CompileSSHConnectSettings()
+
+    cmd = ['ssh']
+    if self.port:
+      cmd += ['-p', str(self.port)]
+    cmd += connect_settings
+    cmd += ['-oIdentitiesOnly=yes', '-i', self.private_key]
+    if not self.interactive:
+      cmd.append('-n')
+
+    return cmd
+
+  def GetSSHCommand(self, connect_settings=None):
+    """Returns the ssh command that can be used to connect to the device
+
+    Args:
+      connect_settings: dict of additional ssh options
+
+    Returns:
+      ['ssh', '...', 'user@host']
+    """
+    ssh_cmd = self._GetSSHCmd(connect_settings=connect_settings)
+    ssh_cmd.append(self.target_ssh_url)
+
+    return ssh_cmd
+
+  def RemoteSh(self, cmd, connect_settings=None, check=True,
+               remote_sudo=False, remote_user=None, ssh_error_ok=False,
+               **kwargs):
+    """Run a sh command on the remote device through ssh.
+
+    Args:
+      cmd: The command string or list to run. None or empty string/list will
+           start an interactive session.
+      connect_settings: The SSH connect settings to use.
+      check: Throw an exception when the command exits with a non-zero
+             returncode.  This does not cover the case where the ssh command
+             itself fails (return code 255).  See ssh_error_ok.
+      ssh_error_ok: Does not throw an exception when the ssh command itself
+                    fails (return code 255).
+      remote_sudo: If set, run the command in remote shell with sudo.
+      remote_user: If set, run the command as the specified user.
+      **kwargs: See cros_build_lib.run documentation.
+
+    Returns:
+      A CommandResult object.  The returncode is the returncode of the command,
+      or 255 if ssh encountered an error (could not connect, connection
+      interrupted, etc.)
+
+    Raises:
+      RunCommandError when error is not ignored through the check flag.
+      SSHConnectionError when ssh command error is not ignored through
+      the ssh_error_ok flag.
+    """
+    kwargs.setdefault('capture_output', True)
+    kwargs.setdefault('encoding', 'utf-8')
+    kwargs.setdefault('debug_level', self.debug_level)
+    # Force English SSH messages. SSHConnectionError.IsKnownHostsMismatch()
+    # requires English errors to detect a known_hosts key mismatch error.
+    kwargs.setdefault('extra_env', {})['LC_MESSAGES'] = 'C'
+
+    prev_user = self.username
+    if remote_user:
+      self.username = remote_user
+
+    ssh_cmd = self.GetSSHCommand(connect_settings=connect_settings)
+
+    if cmd:
+      ssh_cmd.append('--')
+
+      if remote_sudo and self.username != ROOT_ACCOUNT:
+        # Prepend sudo to cmd.
+        ssh_cmd.append('sudo')
+
+      if isinstance(cmd, six.string_types):
+        if kwargs.get('shell'):
+          ssh_cmd = '%s %s' % (' '.join(ssh_cmd),
+                               cros_build_lib.ShellQuote(cmd))
+        else:
+          ssh_cmd += [cmd]
+      else:
+        ssh_cmd += cmd
+
+    try:
+      return cros_build_lib.run(ssh_cmd, **kwargs)
+    except cros_build_lib.RunCommandError as e:
+      if ((e.result.returncode == SSH_ERROR_CODE and ssh_error_ok) or
+          (e.result.returncode and e.result.returncode != SSH_ERROR_CODE
+           and not check)):
+        return e.result
+      elif e.result.returncode == SSH_ERROR_CODE:
+        raise SSHConnectionError(e.result.error)
+      else:
+        raise
+    finally:
+      # Restore the previous user if we temporarily changed it earlier.
+      self.username = prev_user
+
+  def CreateTunnel(self, to_local=None, to_remote=None, connect_settings=None):
+    """Establishes a SSH tunnel to the remote device as a background process.
+
+    Args:
+      to_local: A list of PortForwardSpec objects to forward from the local
+          machine to the remote machine.
+      to_remote: A list of PortForwardSpec to forward from the remote machine
+          to the local machine.
+      connect_settings: The SSH connect settings to use.
+
+    Returns:
+      A Popen object. Note that it represents an already started background
+      process. Calling poll() on the return value can be used to check that
+      the tunnel is still running. To close the tunnel call terminate().
+    """
+
+    ssh_cmd = self._GetSSHCmd(connect_settings=connect_settings)
+    if to_local is not None:
+      ssh_cmd.extend(
+          token for spec in to_local for token in ('-L',
+                                                   spec.command_line_spec))
+    if to_remote is not None:
+      ssh_cmd.extend(
+          token for spec in to_remote for token in ('-R',
+                                                    spec.command_line_spec))
+    ssh_cmd.append('-N')
+    ssh_cmd.append(self.target_ssh_url)
+
+    logging.log(self.debug_level, '%s', cros_build_lib.CmdToStr(ssh_cmd))
+
+    return RemoteAccess._mockable_popen(ssh_cmd)
+
+  def _GetBootId(self, rebooting=False):
+    """Obtains unique boot session identifier.
+
+    If rebooting is True, uses a SSH connection with a short timeout,
+    which will wait for at most about ten seconds. If the network returns
+    an error (e.g. host unreachable) the delay can be shorter.
+    If rebooting is True and an ssh error occurs, None is returned.
+    """
+    if rebooting:
+      # In tests SSH seems to be waiting rather longer than would be expected
+      # from these parameters. These values produce a ~5 second wait.
+      connect_settings = CompileSSHConnectSettings(
+          ConnectTimeout=REBOOT_SSH_CONNECT_TIMEOUT,
+          ConnectionAttempts=REBOOT_SSH_CONNECT_ATTEMPTS)
+      result = self.RemoteSh(['cat', '/proc/sys/kernel/random/boot_id'],
+                             connect_settings=connect_settings,
+                             check=False, ssh_error_ok=True,
+                             log_output=True)
+      if result.returncode == SSH_ERROR_CODE:
+        return None
+      elif result.returncode == 0:
+        return result.output.rstrip()
+      else:
+        raise Exception('Unexpected error code %s getting boot ID.'
+                        % result.returncode)
+    else:
+      result = self.RemoteSh(['cat', '/proc/sys/kernel/random/boot_id'],
+                             log_output=True)
+      return result.output.rstrip()
+
+
+  def CheckIfRebooted(self, old_boot_id):
+    """Checks if the remote device has successfully rebooted
+
+    This compares the remote device old and current boot IDs.  If
+    ssh errors occur, the device has likely not booted and False is
+    returned.  Basically only returns True if it is proven that the
+    device has rebooted.  May throw exceptions.
+
+    Returns:
+      True if the device has successfully rebooted, False otherwise.
+    """
+    new_boot_id = self._GetBootId(rebooting=True)
+    if new_boot_id is None:
+      logging.debug('Unable to get new boot_id after reboot from boot_id %s',
+                    old_boot_id)
+      return False
+    elif new_boot_id == old_boot_id:
+      logging.debug('Checking if rebooted from boot_id %s, still running %s',
+                    old_boot_id, new_boot_id)
+      return False
+    else:
+      logging.debug('Checking if rebooted from boot_id %s, now running %s',
+                    old_boot_id, new_boot_id)
+      return True
+
+  def AwaitReboot(self, old_boot_id, timeout_sec=REBOOT_MAX_WAIT):
+    """Await reboot away from old_boot_id.
+
+    Args:
+      old_boot_id: The boot_id that must be transitioned away from for success.
+      timeout_sec: How long to wait for reboot.
+
+    Returns:
+      True if the device has successfully rebooted.
+    """
+    try:
+      timeout_util.WaitForReturnTrue(lambda: self.CheckIfRebooted(old_boot_id),
+                                     timeout_sec, period=CHECK_INTERVAL)
+    except timeout_util.TimeoutError:
+      return False
+    return True
+
+  def RemoteReboot(self, timeout_sec=REBOOT_MAX_WAIT):
+    """Reboot the remote device."""
+    logging.info('Rebooting %s...', self.remote_host)
+    old_boot_id = self._GetBootId()
+    # Use ssh_error_ok=True in the remote shell invocations because the reboot
+    # might kill sshd before the connection completes normally.
+    self.RemoteSh(['reboot'], ssh_error_ok=True, remote_sudo=True)
+    time.sleep(CHECK_INTERVAL)
+    if not self.AwaitReboot(old_boot_id, timeout_sec):
+      cros_build_lib.Die('Reboot has not completed after %s seconds; giving up.'
+                         % (timeout_sec,))
+
+  def Rsync(self, src, dest, to_local=False, follow_symlinks=False,
+            recursive=True, inplace=False, verbose=False, sudo=False,
+            remote_sudo=False, compress=True, **kwargs):
+    """Rsync a path to the remote device.
+
+    Rsync a path to the remote device. If |to_local| is set True, it
+    rsyncs the path from the remote device to the local machine.
+
+    Args:
+      src: The local src directory.
+      dest: The remote dest directory.
+      to_local: If set, rsync remote path to local path.
+      follow_symlinks: If set, transform symlinks into referent
+        path. Otherwise, copy symlinks as symlinks.
+      recursive: Whether to recursively copy entire directories.
+      inplace: If set, cause rsync to overwrite the dest files in place.  This
+        conserves space, but has some side effects - see rsync man page.
+      verbose: If set, print more verbose output during rsync file transfer.
+      sudo: If set, invoke the command via sudo.
+      remote_sudo: If set, run the command in remote shell with sudo.
+      compress: If set, compress file data during the transfer.
+      **kwargs: See cros_build_lib.run documentation.
+    """
+    kwargs.setdefault('debug_level', self.debug_level)
+
+    ssh_cmd = ' '.join(self._GetSSHCmd())
+    rsync_cmd = ['rsync', '--perms', '--verbose', '--times',
+                 '--omit-dir-times', '--exclude', '.svn']
+    rsync_cmd.append('--copy-links' if follow_symlinks else '--links')
+    rsync_sudo = 'sudo' if (
+        remote_sudo and self.username != ROOT_ACCOUNT) else ''
+    rsync_cmd += ['--rsync-path',
+                  'PATH=%s:$PATH %s rsync' % (DEV_BIN_PATHS, rsync_sudo)]
+
+    if verbose:
+      rsync_cmd.append('--progress')
+    if recursive:
+      rsync_cmd.append('--recursive')
+    if inplace:
+      rsync_cmd.append('--inplace')
+    if compress:
+      rsync_cmd.append('--compress')
+    logging.info('Using rsync compression: %s', compress)
+
+    if to_local:
+      rsync_cmd += ['--rsh', ssh_cmd,
+                    '[%s]:%s' % (self.target_ssh_url, src), dest]
+    else:
+      rsync_cmd += ['--rsh', ssh_cmd, src,
+                    '[%s]:%s' % (self.target_ssh_url, dest)]
+
+    rc_func = cros_build_lib.run
+    if sudo:
+      rc_func = cros_build_lib.sudo_run
+    return rc_func(rsync_cmd, print_cmd=verbose, **kwargs)
+
+  def RsyncToLocal(self, *args, **kwargs):
+    """Rsync a path from the remote device to the local machine."""
+    return self.Rsync(*args, to_local=kwargs.pop('to_local', True), **kwargs)
+
+  def Scp(self, src, dest, to_local=False, recursive=True, verbose=False,
+          sudo=False, **kwargs):
+    """Scp a file or directory to the remote device.
+
+    Args:
+      src: The local src file or directory.
+      dest: The remote dest location.
+      to_local: If set, scp remote path to local path.
+      recursive: Whether to recursively copy entire directories.
+      verbose: If set, print more verbose output during scp file transfer.
+      sudo: If set, invoke the command via sudo.
+      remote_sudo: If set, run the command in remote shell with sudo.
+      **kwargs: See cros_build_lib.run documentation.
+
+    Returns:
+      A CommandResult object containing the information and return code of
+      the scp command.
+    """
+    remote_sudo = kwargs.pop('remote_sudo', False)
+    if remote_sudo and self.username != ROOT_ACCOUNT:
+      # TODO: Implement scp with remote sudo.
+      raise NotImplementedError('Cannot run scp with sudo!')
+
+    kwargs.setdefault('debug_level', self.debug_level)
+    # scp relies on 'scp' being in the $PATH of the non-interactive,
+    # SSH login shell.
+    scp_cmd = ['scp']
+    if self.port:
+      scp_cmd += ['-P', str(self.port)]
+    scp_cmd += CompileSSHConnectSettings(ConnectTimeout=60)
+    scp_cmd += ['-i', self.private_key]
+
+    if not self.interactive:
+      scp_cmd.append('-n')
+
+    if recursive:
+      scp_cmd.append('-r')
+    if verbose:
+      scp_cmd.append('-v')
+
+    # Check for an IPv6 address
+    if ':' in self.remote_host:
+      target_ssh_url = '%s@[%s]' % (self.username, self.remote_host)
+    else:
+      target_ssh_url = self.target_ssh_url
+
+    if to_local:
+      scp_cmd += ['%s:%s' % (target_ssh_url, src), dest]
+    else:
+      scp_cmd += glob.glob(src) + ['%s:%s' % (target_ssh_url, dest)]
+
+    rc_func = cros_build_lib.run
+    if sudo:
+      rc_func = cros_build_lib.sudo_run
+
+    return rc_func(scp_cmd, print_cmd=verbose, **kwargs)
+
+  def ScpToLocal(self, *args, **kwargs):
+    """Scp a path from the remote device to the local machine."""
+    return self.Scp(*args, to_local=kwargs.pop('to_local', True), **kwargs)
+
+  def PipeToRemoteSh(self, producer_cmd, cmd, **kwargs):
+    """Run a local command and pipe it to a remote sh command over ssh.
+
+    Args:
+      producer_cmd: Command to run locally with its results piped to |cmd|.
+      cmd: Command to run on the remote device.
+      **kwargs: See RemoteSh for documentation.
+    """
+    result = cros_build_lib.run(producer_cmd, print_cmd=False,
+                                capture_output=True)
+    return self.RemoteSh(cmd, input=kwargs.pop('input', result.output),
+                         **kwargs)
+
+
+class RemoteDeviceHandler(object):
+  """A wrapper of RemoteDevice."""
+
+  def __init__(self, *args, **kwargs):
+    """Creates a RemoteDevice object."""
+    self.device = RemoteDevice(*args, **kwargs)
+
+  def __enter__(self):
+    """Return the temporary directory."""
+    return self.device
+
+  def __exit__(self, _type, _value, _traceback):
+    """Cleans up the device."""
+    self.device.Cleanup()
+
+
+class ChromiumOSDeviceHandler(object):
+  """A wrapper of ChromiumOSDevice."""
+
+  def __init__(self, *args, **kwargs):
+    """Creates a RemoteDevice object."""
+    self.device = ChromiumOSDevice(*args, **kwargs)
+
+  def __enter__(self):
+    """Return the temporary directory."""
+    return self.device
+
+  def __exit__(self, _type, _value, _traceback):
+    """Cleans up the device."""
+    self.device.Cleanup()
+
+
+class RemoteDevice(object):
+  """Handling basic SSH communication with a remote device."""
+
+  DEFAULT_BASE_DIR = '/tmp/remote-access'
+
+  def __init__(self, hostname, port=None, username=None,
+               base_dir=DEFAULT_BASE_DIR, connect_settings=None,
+               private_key=None, debug_level=logging.DEBUG, ping=False,
+               connect=True):
+    """Initializes a RemoteDevice object.
+
+    Args:
+      hostname: The hostname of the device.
+      port: The ssh port of the device.
+      username: The ssh login username.
+      base_dir: The base work directory to create on the device, or
+        None. Required in order to use run(), but
+        BaseRunCommand() will be available in either case.
+      connect_settings: Default SSH connection settings.
+      private_key: The identify file to pass to `ssh -i`.
+      debug_level: Setting debug level for logging.
+      ping: Whether to ping the device before attempting to connect.
+      connect: True to set up the connection, otherwise set up will
+        be automatically deferred until device use.
+    """
+    self.hostname = hostname
+    self.port = port
+    self.username = username
+    # The tempdir is for storing the rsa key and/or some temp files.
+    self.tempdir = osutils.TempDir(prefix='ssh-tmp')
+    self.connect_settings = (connect_settings if connect_settings else
+                             CompileSSHConnectSettings())
+    self.private_key = private_key
+    self.debug_level = debug_level
+    # The temporary work directories on the device.
+    self._base_dir = base_dir
+    self._work_dir = None
+    # Use GetAgent() instead of accessing this directly for deferred connect.
+    self._agent = None
+    self.cleanup_cmds = []
+
+    if ping and not self.Pingable():
+      raise DeviceNotPingableError('Device %s is not pingable.' % self.hostname)
+
+    if connect:
+      self._Connect()
+
+  def Pingable(self, timeout=20):
+    """Returns True if the device is pingable.
+
+    Args:
+      timeout: Timeout in seconds (default: 20 seconds).
+
+    Returns:
+      True if the device responded to the ping before |timeout|.
+    """
+    try:
+      addrlist = socket.getaddrinfo(self.hostname, 22)
+    except socket.gaierror:
+      # If the hostname is the name of a "Host" entry in ~/.ssh/config,
+      # it might be ssh-able but not pingable.
+      # If the hostname is truly bogus, ssh will fail immediately, so
+      # we can safely skip the ping step.
+      logging.info('Hostname "%s" not found, falling through to ssh',
+                   self.hostname)
+      return True
+
+    if addrlist[0][0] == socket.AF_INET6:
+      ping_command = 'ping6'
+    else:
+      ping_command = 'ping'
+
+    result = cros_build_lib.run(
+        [ping_command, '-c', '1', '-w', str(timeout), self.hostname],
+        check=False,
+        capture_output=True)
+    return result.returncode == 0
+
+  def GetAgent(self):
+    """Agent accessor; connects the agent if necessary."""
+    if not self._agent:
+      self._Connect()
+    return self._agent
+
+  def _Connect(self):
+    """Sets up the SSH connection and internal state."""
+    self._agent = RemoteAccess(self.hostname, self.tempdir.tempdir,
+                               port=self.port, username=self.username,
+                               private_key=self.private_key)
+
+  @property
+  def work_dir(self):
+    """The work directory to create on the device.
+
+    This property exists so we can create the remote paths on demand.  For
+    some use cases, it'll never be needed, so skipping creation is faster.
+    """
+    if self._base_dir is None:
+      return None
+
+    if self._work_dir is None:
+      self._work_dir = self.BaseRunCommand(
+          ['mkdir', '-p', self._base_dir, '&&',
+           'mktemp', '-d', '--tmpdir=%s' % self._base_dir],
+          capture_output=True).output.strip()
+      logging.debug('The temporary working directory on the device is %s',
+                    self._work_dir)
+      self.RegisterCleanupCmd(['rm', '-rf', self._work_dir])
+
+    return self._work_dir
+
+  def HasProgramInPath(self, binary):
+    """Checks if the given binary exists on the device."""
+    result = self.GetAgent().RemoteSh(
+        ['PATH=%s:$PATH which' % DEV_BIN_PATHS, binary], check=False)
+    return result.returncode == 0
+
+  def HasRsync(self):
+    """Checks if rsync exists on the device."""
+    return self.HasProgramInPath('rsync')
+
+  @memoize.MemoizedSingleCall
+  def HasGigabitEthernet(self):
+    """Checks if the device has a gigabit ethernet port.
+
+    The function checkes the device's first ethernet interface (eth0).
+    """
+    result = self.GetAgent().RemoteSh(['ethtool', 'eth0'], check=False,
+                                      capture_output=True)
+    return re.search(r'Speed: \d+000Mb/s', result.output)
+
+  def IsSELinuxAvailable(self):
+    """Check whether the device has SELinux compiled in."""
+    # Note that SELinux can be enabled for some devices that lack SELinux
+    # tools, so we need to check for the existence of the restorecon bin along
+    # with the sysfs check.
+    return (self.HasProgramInPath('restorecon') and
+            self.IfFileExists('/sys/fs/selinux/enforce'))
+
+  def IsSELinuxEnforced(self):
+    """Check whether the device has SELinux-enforced."""
+    if not self.IsSELinuxAvailable():
+      return False
+    return self.CatFile('/sys/fs/selinux/enforce', max_size=None).strip() == '1'
+
+  def RegisterCleanupCmd(self, cmd, **kwargs):
+    """Register a cleanup command to be run on the device in Cleanup().
+
+    Args:
+      cmd: command to run. See RemoteAccess.RemoteSh documentation.
+      **kwargs: keyword arguments to pass along with cmd. See
+        RemoteAccess.RemoteSh documentation.
+    """
+    self.cleanup_cmds.append((cmd, kwargs))
+
+  def Cleanup(self):
+    """Remove work/temp directories and run all registered cleanup commands."""
+    for cmd, kwargs in self.cleanup_cmds:
+      # We want to run through all cleanup commands even if there are errors.
+      kwargs.setdefault('check', False)
+      try:
+        self.BaseRunCommand(cmd, **kwargs)
+      except SSHConnectionError:
+        logging.error('Failed to connect to host in Cleanup, so '
+                      'SSHConnectionError will not be raised.')
+
+    self.tempdir.Cleanup()
+
+  def _CopyToDeviceInParallel(self, src, dest):
+    """Chop source file in chunks, send them to destination in parallel.
+
+    Transfer chunks of file in parallel and assemble in destination if the
+    file size is larger than chunk size. Fall back to scp mode otherwise.
+
+    Args:
+      src: Local path as a string.
+      dest: rsync/scp path of the form <host>:/<path> as a string.
+    """
+    src_filename = os.path.basename(src)
+    chunk_prefix = src_filename + '_'
+    with osutils.TempDir() as tempdir:
+      chunk_path = os.path.join(tempdir, chunk_prefix)
+      try:
+        cmd = ['split', '-b', str(CHUNK_SIZE), src, chunk_path]
+        cros_build_lib.run(cmd)
+        input_list = [[chunk_file, dest, 'scp']
+                      for chunk_file in glob.glob(chunk_path + '*')]
+        parallel.RunTasksInProcessPool(self.CopyToDevice,
+                                       input_list,
+                                       processes=DEGREE_OF_PARALLELISM)
+        logging.info('Assembling these chunks now.....')
+        chunks = '%s/%s*' % (dest, chunk_prefix)
+        final_dest = '%s/%s' % (dest, src_filename)
+        assemble_cmd = ['cat', chunks, '>', final_dest]
+        self.run(assemble_cmd)
+        cleanup_cmd = ['rm', '-f', chunks]
+        self.run(cleanup_cmd)
+      except IOError:
+        logging.err('Could not complete the payload transfer...')
+        raise
+    logging.info('Successfully copy %s to %s in chunks in parallel', src, dest)
+
+  def CopyToDevice(self, src, dest, mode, **kwargs):
+    """Copy path to device.
+
+    Args:
+      src: Local path as a string.
+      dest: rsync/scp path of the form <host>:/<path> as a string.
+      mode: must be one of 'rsync', 'scp', or 'parallel'.
+        * Use rsync --compress when copying compressible (factor > 2, text/log)
+        files. This uses a quite a bit of CPU but preserves bandwidth.
+        * Use rsync without compression when delta transfering a whole directory
+        tree which exists at the destination and changed very little (say
+        telemetry directory or unpacked stateful or unpacked rootfs). It also
+        often works well for an uncompressed archive, copied over a previous
+        copy (which must exist at the destination) needing minor updates.
+        * Use scp when we have incompressible files (say already compressed),
+        especially if we know no previous version exist at the destination.
+        * Use parallel when we want to transfer a large file with chunks
+        and transfer them in degree of parallelism for speed especially for
+        slow network (congested, long haul, worse SNR).
+    """
+    assert mode in ['rsync', 'scp', 'parallel']
+    logging.info('[mode:%s] copy: %s -> %s:%s', mode, src, self.hostname, dest)
+    if mode == 'parallel':
+      # Chop and send chunks in parallel only if the file size is larger than
+      # CHUNK_SIZE.
+      if os.stat(src).st_size > CHUNK_SIZE:
+        self._CopyToDeviceInParallel(src, dest)
+        return
+      else:
+        logging.info('%s is too small for parallelism, fall back to scp', src)
+        mode = 'scp'
+    msg = 'Could not copy %s to device.' % src
+    # Fall back to scp if device has no rsync. Happens when stateful is cleaned.
+    if mode == 'scp' or not self.HasRsync():
+      # scp always follow symlinks
+      kwargs.pop('follow_symlinks', None)
+      func = self.GetAgent().Scp
+    else:
+      func = self.GetAgent().Rsync
+
+    return RunCommandFuncWrapper(func, msg, src, dest, **kwargs)
+
+  def CopyFromDevice(self, src, dest, mode='scp', **kwargs):
+    """Copy path from device.
+
+    Adding --compress recommended for text like log files.
+
+    Args:
+      src: rsync/scp path of the form <host>:/<path> as a string.
+      dest: Local path as a string.
+      mode: See mode on CopyToDevice.
+    """
+    msg = 'Could not copy %s from device.' % src
+    # Fall back to scp if device has no rsync. Happens when stateful is cleaned.
+    if mode == 'scp' or not self.HasRsync():
+      # scp always follow symlinks
+      kwargs.pop('follow_symlinks', None)
+      func = self.GetAgent().ScpToLocal
+    else:
+      func = self.GetAgent().RsyncToLocal
+
+    return RunCommandFuncWrapper(func, msg, src, dest, **kwargs)
+
+  def CopyFromWorkDir(self, src, dest, **kwargs):
+    """Copy path from working directory on the device."""
+    return self.CopyFromDevice(os.path.join(self.work_dir, src), dest, **kwargs)
+
+  def CopyToWorkDir(self, src, dest='', **kwargs):
+    """Copy path to working directory on the device."""
+    return self.CopyToDevice(src, os.path.join(self.work_dir, dest), **kwargs)
+
+  def _TestPath(self, path, option, **kwargs):
+    """Tests a given path for specific options."""
+    kwargs.setdefault('check', False)
+    result = self.run(['test', option, path], **kwargs)
+    return result.returncode == 0
+
+  def IfFileExists(self, path, **kwargs):
+    """Check if the given file exists on the device."""
+    return self._TestPath(path, '-f', **kwargs)
+
+  def IfPathExists(self, path, **kwargs):
+    """Check if the given path exists on the device."""
+    return self._TestPath(path, '-e', **kwargs)
+
+  def IsDirWritable(self, path):
+    """Checks if the given directory is writable on the device.
+
+    Args:
+      path: Directory on the device to check.
+    """
+    tmp_file = os.path.join(path, '.tmp.remote_access.is.writable')
+    result = self.GetAgent().RemoteSh(
+        ['touch', tmp_file, '&&', 'rm', tmp_file],
+        check=False, remote_sudo=True, capture_output=True)
+    return result.returncode == 0
+
+  def IsFileExecutable(self, path):
+    """Check if the given file is executable on the device.
+
+    Args:
+      path: full path to the file on the device to check.
+
+    Returns:
+      True if the file is executable, and false if the file does not exist or is
+      not executable.
+    """
+    cmd = ['test', '-f', path, '-a', '-x', path,]
+    result = self.GetAgent().RemoteSh(cmd, remote_sudo=True, check=False,
+                                      capture_output=True)
+    return result.returncode == 0
+
+  def GetSize(self, path):
+    """Gets the size of the given file on the device.
+
+    Args:
+      path: full path to the file on the device.
+
+    Returns:
+      Size of the file in number of bytes.
+
+    Raises:
+      ValueError if failed to get file size from the remote output.
+      cros_build_lib.RunCommandError if |path| does not exist or the remote
+      command to get file size has failed.
+    """
+    cmd = ['du', '-Lb', '--max-depth=0', path]
+    result = self.BaseRunCommand(cmd, remote_sudo=True, capture_output=True)
+    return int(result.output.split()[0])
+
+  def CatFile(self, path, max_size=1000000):
+    """Reads the file on device to string if its size is less than |max_size|.
+
+    Args:
+      path: The full path to the file on the device to read.
+      max_size: Read the file only if its size is less than |max_size| in bytes.
+        If None, do not check its size and always cat the path.
+
+    Returns:
+      A string of the file content.
+
+    Raises:
+      CatFileError if failed to read the remote file or the file size is larger
+      than |max_size|.
+    """
+    if max_size is not None:
+      try:
+        file_size = self.GetSize(path)
+      except (ValueError, cros_build_lib.RunCommandError) as e:
+        raise CatFileError('Failed to get size of file "%s": %s' % (path, e))
+      if file_size > max_size:
+        raise CatFileError('File "%s" is larger than %d bytes' %
+                           (path, max_size))
+
+    result = self.BaseRunCommand(['cat', path], remote_sudo=True,
+                                 check=False, capture_output=True)
+    if result.returncode:
+      raise CatFileError('Failed to read file "%s" on the device' % path)
+    return result.output
+
+  def DeletePath(self, path, relative_to_work_dir=False, recursive=False):
+    """Deletes a path on the remote device.
+
+    Args:
+      path: The path on the remote device that should be deleted.
+      relative_to_work_dir: If true, the path is relative to |self.work_dir|.
+      recursive: If true, the |path| is deleted recursively.
+
+    Raises:
+      cros_build_lib.RunCommandError if |path| does not exist or the remote
+      command to delete the |path| has failed.
+    """
+    if relative_to_work_dir:
+      path = os.path.join(self.work_dir, path)
+
+    cmd = ['rm', '-f']
+    if recursive:
+      cmd += ['-r']
+    cmd += [path]
+
+    self.run(cmd)
+
+  def PipeOverSSH(self, filepath, cmd, **kwargs):
+    """Cat a file and pipe over SSH."""
+    producer_cmd = ['cat', filepath]
+    return self.GetAgent().PipeToRemoteSh(producer_cmd, cmd, **kwargs)
+
+  def GetRunningPids(self, exe, full_path=True):
+    """Get all the running pids on the device with the executable path.
+
+    Args:
+      exe: The executable path to get pids for.
+      full_path: Whether |exe| is a full executable path.
+
+    Raises:
+      RunningPidsError when failing to parse out pids from command output.
+      SSHConnectionError when error occurs during SSH connection.
+    """
+    try:
+      cmd = ['pgrep', exe]
+      if full_path:
+        cmd.append('-f')
+      result = self.GetAgent().RemoteSh(cmd, check=False,
+                                        capture_output=True)
+      try:
+        return [int(pid) for pid in result.output.splitlines()]
+      except ValueError:
+        logging.error('Parsing output failed:\n%s', result.output)
+        raise RunningPidsError('Unable to get running pids of %s' % exe)
+    except SSHConnectionError:
+      logging.error('Error connecting to device %s', self.hostname)
+      raise
+
+  def Reboot(self, timeout_sec=REBOOT_MAX_WAIT):
+    """Reboot the device."""
+    return self.GetAgent().RemoteReboot(timeout_sec=timeout_sec)
+
+  # TODO(vapier): Delete this shim once chromite & users migrate.
+  def BaseRunCommand(self, cmd, **kwargs):
+    """Backwards compat API."""
+    return self.base_run(cmd, **kwargs)
+
+  def base_run(self, cmd, **kwargs):
+    """Executes a shell command on the device with output captured by default.
+
+    Args:
+      cmd: command to run. See RemoteAccess.RemoteSh documentation.
+      **kwargs: keyword arguments to pass along with cmd. See
+        RemoteAccess.RemoteSh documentation.
+    """
+    kwargs.setdefault('debug_level', self.debug_level)
+    kwargs.setdefault('connect_settings', self.connect_settings)
+    try:
+      return self.GetAgent().RemoteSh(cmd, **kwargs)
+    except SSHConnectionError:
+      logging.error('Error connecting to device %s', self.hostname)
+      raise
+
+  def run(self, cmd, **kwargs):
+    """Executes a shell command on the device with output captured by default.
+
+    Also sets environment variables using dictionary provided by
+    keyword argument |extra_env|.
+
+    Args:
+      cmd: command to run. See RemoteAccess.RemoteSh documentation.
+      **kwargs: keyword arguments to pass along with cmd. See
+        RemoteAccess.RemoteSh documentation.
+    """
+    # Handle setting environment variables on the device by copying
+    # and sourcing a temporary environment file.
+    extra_env = kwargs.pop('extra_env', None)
+    if extra_env:
+      remote_sudo = kwargs.pop('remote_sudo', False)
+      if remote_sudo and self.GetAgent().username == ROOT_ACCOUNT:
+        remote_sudo = False
+
+      new_cmd = []
+      flat_vars = ['%s=%s' % (k, cros_build_lib.ShellQuote(v))
+                   for k, v in extra_env.items()]
+
+      # If the vars are too large for the command line, do it indirectly.
+      # We pick 32k somewhat arbitrarily -- the kernel should accept this
+      # and rarely should remote commands get near that size.
+      ARG_MAX = 32 * 1024
+
+      # What the command line would generally look like on the remote.
+      if isinstance(cmd, six.string_types):
+        if not kwargs.get('shell', False):
+          raise ValueError("'shell' must be True when 'cmd' is a string.")
+        cmdline = ' '.join(flat_vars) + ' ' + cmd
+      else:
+        if kwargs.get('shell', False):
+          raise ValueError("'shell' must be False when 'cmd' is a list.")
+        cmdline = ' '.join(flat_vars + cmd)
+      if len(cmdline) > ARG_MAX:
+        env_list = ['export %s' % x for x in flat_vars]
+        with tempfile.NamedTemporaryFile(dir=self.tempdir.tempdir,
+                                         prefix='env') as f:
+          logging.debug('Environment variables: %s', ' '.join(env_list))
+          osutils.WriteFile(f.name, '\n'.join(env_list))
+          self.CopyToWorkDir(f.name)
+          env_file = os.path.join(self.work_dir, os.path.basename(f.name))
+          new_cmd += ['.', '%s;' % env_file]
+          if remote_sudo:
+            new_cmd += ['sudo', '-E']
+      else:
+        if remote_sudo:
+          new_cmd += ['sudo']
+        new_cmd += flat_vars
+
+      if isinstance(cmd, six.string_types):
+        cmd = ' '.join(new_cmd) + ' ' + cmd
+      else:
+        cmd = new_cmd + cmd
+
+    return self.BaseRunCommand(cmd, **kwargs)
+
+  def CheckIfRebooted(self, old_boot_id):
+    """Checks if the remote device has successfully rebooted
+
+    This compares the remote device old and current boot IDs.  If
+    ssh errors occur, the device has likely not booted and False is
+    returned.  Basically only returns True if it is proven that the
+    device has rebooted.  May throw exceptions.
+
+    Returns:
+      True if the device has successfully rebooted, false otherwise.
+    """
+    return self.GetAgent().CheckIfRebooted(old_boot_id)
+
+  def AwaitReboot(self, old_boot_id):
+    """Await reboot away from old_boot_id.
+
+    Args:
+      old_boot_id: The boot_id that must be transitioned away from for success.
+
+    Returns:
+      True if the device has successfully rebooted.
+    """
+    return self.GetAgent().AwaitReboot(old_boot_id)
+
+
+class ChromiumOSDevice(RemoteDevice):
+  """Basic commands to interact with a ChromiumOS device over SSH connection."""
+
+  MAKE_DEV_SSD_BIN = '/usr/share/vboot/bin/make_dev_ssd.sh'
+  MOUNT_ROOTFS_RW_CMD = ['mount', '-o', 'remount,rw', '/']
+  LIST_MOUNTS_CMD = ['cat', '/proc/mounts']
+
+  def __init__(self, hostname, include_dev_paths=True, **kwargs):
+    """Initializes this object.
+
+    Args:
+      hostname: A network hostname.
+      include_dev_paths: If true, add DEV_BIN_PATHS to $PATH for all commands.
+      kwargs: Args to pass to the parent constructor.
+    """
+    super(ChromiumOSDevice, self).__init__(hostname, **kwargs)
+    self._orig_path = None
+    self._path = None
+    self._include_dev_paths = include_dev_paths
+    self._lsb_release = {}
+
+  @property
+  def orig_path(self):
+    """The $PATH variable on the device."""
+    if not self._orig_path:
+      try:
+        result = self.BaseRunCommand(['echo', '${PATH}'])
+      except cros_build_lib.RunCommandError as e:
+        logging.error('Failed to get $PATH on the device: %s', e.result.error)
+        raise
+
+      self._orig_path = result.output.strip()
+
+    return self._orig_path
+
+  @property
+  def path(self):
+    """The $PATH variable on the device prepended with DEV_BIN_PATHS."""
+    if not self._path:
+      # If the remote path already has our dev paths (which is common), then
+      # there is no need for us to prepend.
+      orig_paths = self.orig_path.split(':')
+      for path in reversed(DEV_BIN_PATHS.split(':')):
+        if path not in orig_paths:
+          orig_paths.insert(0, path)
+
+      self._path = ':'.join(orig_paths)
+
+    return self._path
+
+  @property
+  def lsb_release(self):
+    """The /etc/lsb-release content on the device.
+
+    Returns a dict of entries in /etc/lsb-release file. If multiple entries
+    have the same key, only the first entry is recorded. Returns an empty dict
+    if the reading command failed or the file is corrupted (i.e., does not have
+    the format of <key>=<value> for every line).
+    """
+    if not self._lsb_release:
+      try:
+        content = self.CatFile(constants.LSB_RELEASE_PATH, max_size=None)
+      except CatFileError as e:
+        logging.debug(
+            'Failed to read "%s" on the device: %s',
+            constants.LSB_RELEASE_PATH, e)
+      else:
+        try:
+          self._lsb_release = dict(e.split('=', 1)
+                                   for e in reversed(content.splitlines()))
+        except ValueError:
+          logging.error('File "%s" on the device is mal-formatted.',
+                        constants.LSB_RELEASE_PATH)
+
+    return self._lsb_release
+
+  @property
+  def board(self):
+    """The board name of the device."""
+    return self.lsb_release.get(cros_set_lsb_release.LSB_KEY_BOARD, '')
+
+  @property
+  def version(self):
+    """The OS version of the device."""
+    return self.lsb_release.get(cros_set_lsb_release.LSB_KEY_VERSION, '')
+
+  @property
+  def app_id(self):
+    """The App ID of the device."""
+    return self.lsb_release.get(cros_set_lsb_release.LSB_KEY_APPID_RELEASE, '')
+
+  def _RemountRootfsAsWritable(self):
+    """Attempts to Remount the root partition."""
+    logging.info("Remounting '/' with rw...")
+    self.run(self.MOUNT_ROOTFS_RW_CMD, check=False, remote_sudo=True)
+
+  def _RootfsIsReadOnly(self):
+    """Returns True if rootfs on is mounted as read-only."""
+    r = self.run(self.LIST_MOUNTS_CMD, capture_output=True)
+    for line in r.output.splitlines():
+      if not line:
+        continue
+
+      chunks = line.split()
+      if chunks[1] == '/' and 'ro' in chunks[3].split(','):
+        return True
+
+    return False
+
+  def DisableRootfsVerification(self):
+    """Disables device rootfs verification."""
+    logging.info('Disabling rootfs verification on device...')
+    self.run(
+        [self.MAKE_DEV_SSD_BIN, '--remove_rootfs_verification', '--force'],
+        check=False, remote_sudo=True)
+    # TODO(yjhong): Make sure an update is not pending.
+    logging.info('Need to reboot to actually disable the verification.')
+    self.Reboot()
+    # After reboot, the rootfs is mounted read-only, so remount as read-write.
+    self._RemountRootfsAsWritable()
+
+  def MountRootfsReadWrite(self):
+    """Checks mount types and remounts them as read-write if needed.
+
+    Returns:
+      True if rootfs is mounted as read-write. False otherwise.
+    """
+    if not self._RootfsIsReadOnly():
+      return True
+
+    # If the image on the device is built with rootfs verification
+    # disabled, we can simply remount '/' as read-write.
+    self._RemountRootfsAsWritable()
+
+    if not self._RootfsIsReadOnly():
+      return True
+
+    logging.info('Unable to remount rootfs as rw (normal w/verified rootfs).')
+    # If the image is built with rootfs verification, turn it off.
+    self.DisableRootfsVerification()
+
+    return not self._RootfsIsReadOnly()
+
+  def run(self, cmd, **kwargs):
+    """Executes a shell command on the device with output captured by default.
+
+    Also makes sure $PATH is set correctly by adding DEV_BIN_PATHS to
+    'PATH' in |extra_env| if self._include_dev_paths is True.
+
+    Args:
+      cmd: command to run. See RemoteAccess.RemoteSh documentation.
+      **kwargs: keyword arguments to pass along with cmd. See
+        RemoteAccess.RemoteSh documentation.
+    """
+    if self._include_dev_paths:
+      extra_env = kwargs.pop('extra_env', {})
+      extra_env.setdefault('PATH', self.path)
+      kwargs['extra_env'] = extra_env
+    return super(ChromiumOSDevice, self).run(cmd, **kwargs)
diff --git a/utils/frozen_chromite/lib/results_lib.py b/utils/frozen_chromite/lib/results_lib.py
new file mode 100644
index 0000000..8921009
--- /dev/null
+++ b/utils/frozen_chromite/lib/results_lib.py
@@ -0,0 +1,313 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Classes for collecting results of our BuildStages as they run."""
+
+from __future__ import print_function
+
+import collections
+import datetime
+import math
+import os
+
+from autotest_lib.utils.frozen_chromite.lib import constants
+from autotest_lib.utils.frozen_chromite.lib import failures_lib
+from autotest_lib.utils.frozen_chromite.lib import cros_build_lib
+from autotest_lib.utils.frozen_chromite.lib import cros_logging as logging
+
+def _GetCheckpointFile(buildroot):
+  return os.path.join(buildroot, '.completed_stages')
+
+
+def WriteCheckpoint(buildroot):
+  """Drops a completed stages file with current state."""
+  completed_stages_file = _GetCheckpointFile(buildroot)
+  with open(completed_stages_file, 'w+') as save_file:
+    Results.SaveCompletedStages(save_file)
+
+
+def LoadCheckpoint(buildroot):
+  """Restore completed stage info from checkpoint file."""
+  completed_stages_file = _GetCheckpointFile(buildroot)
+  if not os.path.exists(completed_stages_file):
+    logging.warning('Checkpoint file not found in buildroot %s', buildroot)
+    return
+
+  with open(completed_stages_file, 'r') as load_file:
+    Results.RestoreCompletedStages(load_file)
+
+
+class RecordedTraceback(object):
+  """This class represents a traceback recorded in the list of results."""
+
+  def __init__(self, failed_stage, failed_prefix, exception, traceback):
+    """Construct a RecordedTraceback object.
+
+    Args:
+      failed_stage: The stage that failed during the build. E.g., HWTest [bvt]
+      failed_prefix: The prefix of the stage that failed. E.g., HWTest
+      exception: The raw exception object.
+      traceback: The full stack trace for the failure, as a string.
+    """
+    self.failed_stage = failed_stage
+    self.failed_prefix = failed_prefix
+    self.exception = exception
+    self.traceback = traceback
+
+
+_result_fields = ['name', 'result', 'description', 'prefix', 'board', 'time']
+Result = collections.namedtuple('Result', _result_fields)
+
+
+class _Results(object):
+  """Static class that collects the results of our BuildStages as they run."""
+
+  SUCCESS = 'Stage was successful'
+  FORGIVEN = 'Stage failed but was optional'
+  SKIPPED = 'Stage was skipped'
+  NON_FAILURE_TYPES = (SUCCESS, FORGIVEN, SKIPPED)
+
+  SPLIT_TOKEN = r'\_O_/'
+
+  def __init__(self):
+    # List of results for all stages that's built up as we run. Members are of
+    #  the form ('name', SUCCESS | FORGIVEN | Exception, None | description)
+    self._results_log = []
+
+    # A list of instances of failure_message_lib.StageFailureMessage to present
+    # the exceptions threw by failed stages.
+    self._failure_message_results = []
+
+    # Stages run in a previous run and restored. Stored as a dictionary of
+    # names to previous records.
+    self._previous = {}
+
+    self.start_time = datetime.datetime.now()
+
+  def Clear(self):
+    """Clear existing stage results."""
+    self.__init__()
+
+  def PreviouslyCompletedRecord(self, name):
+    """Check to see if this stage was previously completed.
+
+    Returns:
+      A boolean showing the stage was successful in the previous run.
+    """
+    return self._previous.get(name)
+
+  def BuildSucceededSoFar(self, buildstore=None, buildbucket_id=None,
+                          name=None):
+    """Return true if all stages so far have passing states.
+
+    This method returns true if all was successful or forgiven or skipped.
+
+    Args:
+      buildstore: A BuildStore instance to make DB calls.
+      buildbucket_id: buildbucket_id of the build to check.
+      name: stage name of current stage.
+    """
+    build_succeess = all(entry.result in self.NON_FAILURE_TYPES
+                         for entry in self._results_log)
+
+    # When timeout happens and background tasks are killed, the statuses
+    # of the background stage tasks may get lost. BuildSucceededSoFar may
+    # still return build_succeess = True when the killed stage tasks were
+    # failed. Add one more verification step in _BuildSucceededFromCIDB to
+    # check the stage status in CIDB.
+    return (build_succeess and
+            self._BuildSucceededFromCIDB(buildstore=buildstore,
+                                         buildbucket_id=buildbucket_id,
+                                         name=name))
+
+  def _BuildSucceededFromCIDB(self, buildstore=None, buildbucket_id=None,
+                              name=None):
+    """Return True if all stages recorded in buildbucket have passing states.
+
+    Args:
+      buildstore: A BuildStore instance to make DB calls.
+      buildbucket_id: buildbucket_id of the build to check.
+      name: stage name of current stage.
+    """
+    if (buildstore is not None and buildstore.AreClientsReady()
+        and buildbucket_id is not None):
+      stages = buildstore.GetBuildsStages(buildbucket_ids=[buildbucket_id])
+      for stage in stages:
+        if name is not None and stage['name'] == name:
+          logging.info("Ignore status of %s as it's the current stage.",
+                       stage['name'])
+          continue
+        if stage['status'] not in constants.BUILDER_NON_FAILURE_STATUSES:
+          logging.warning('Failure in previous stage %s with status %s.',
+                          stage['name'], stage['status'])
+          return False
+
+    return True
+
+  def StageHasResults(self, name):
+    """Return true if stage has posted results."""
+    return name in [entry.name for entry in self._results_log]
+
+  def _RecordStageFailureMessage(self, name, exception, prefix=None,
+                                 build_stage_id=None):
+    self._failure_message_results.append(
+        failures_lib.GetStageFailureMessageFromException(
+            name, build_stage_id, exception, stage_prefix_name=prefix))
+
+  def Record(self, name, result, description=None, prefix=None, board='',
+             time=0, build_stage_id=None):
+    """Store off an additional stage result.
+
+    Args:
+      name: The name of the stage (e.g. HWTest [bvt])
+      result:
+        Result should be one of:
+          Results.SUCCESS if the stage was successful.
+          Results.SKIPPED if the stage was skipped.
+          Results.FORGIVEN if the stage had warnings.
+          Otherwise, it should be the exception stage errored with.
+      description:
+        The textual backtrace of the exception, or None
+      prefix: The prefix of the stage (e.g. HWTest). Defaults to
+        the value of name.
+      board: The board associated with the stage, if any. Defaults to ''.
+      time: How long the result took to complete.
+      build_stage_id: The id of the failed build stage to record, default to
+        None.
+    """
+    if prefix is None:
+      prefix = name
+
+    # Convert exception to stage_failure_message and record it.
+    if isinstance(result, BaseException):
+      self._RecordStageFailureMessage(name, result, prefix=prefix,
+                                      build_stage_id=build_stage_id)
+
+    result = Result(name, result, description, prefix, board, time)
+    self._results_log.append(result)
+
+  def GetStageFailureMessage(self):
+    return self._failure_message_results
+
+  def Get(self):
+    """Fetch stage results.
+
+    Returns:
+      A list with one entry per stage run with a result.
+    """
+    return self._results_log
+
+  def GetPrevious(self):
+    """Fetch stage results.
+
+    Returns:
+      A list of stages names that were completed in a previous run.
+    """
+    return self._previous
+
+  def SaveCompletedStages(self, out):
+    """Save the successfully completed stages to the provided file |out|."""
+    for entry in self._results_log:
+      if entry.result != self.SUCCESS:
+        break
+      out.write(self.SPLIT_TOKEN.join(str(x) for x in entry) + '\n')
+
+  def RestoreCompletedStages(self, out):
+    """Load the successfully completed stages from the provided file |out|."""
+    # Read the file, and strip off the newlines.
+    for line in out:
+      record = line.strip().split(self.SPLIT_TOKEN)
+      if len(record) != len(_result_fields):
+        logging.warning('State file does not match expected format, ignoring.')
+        # Wipe any partial state.
+        self._previous = {}
+        break
+
+      self._previous[record[0]] = Result(*record)
+
+  def GetTracebacks(self):
+    """Get a list of the exceptions that failed the build.
+
+    Returns:
+      A list of RecordedTraceback objects.
+    """
+    tracebacks = []
+    for entry in self._results_log:
+      # If entry.result is not in NON_FAILURE_TYPES, then the stage failed, and
+      # entry.result is the exception object and entry.description is a string
+      # containing the full traceback.
+      if entry.result not in self.NON_FAILURE_TYPES:
+        traceback = RecordedTraceback(entry.name, entry.prefix, entry.result,
+                                      entry.description)
+        tracebacks.append(traceback)
+    return tracebacks
+
+  def Report(self, out, current_version=None):
+    """Generate a user friendly text display of the results data.
+
+    Args:
+      out: Output stream to write to (e.g. sys.stdout).
+      current_version: ChromeOS version associated with this report.
+    """
+    results = self._results_log
+
+    line = '*' * 60 + '\n'
+    edge = '*' * 2
+
+    if current_version:
+      out.write(line)
+      out.write(edge +
+                ' RELEASE VERSION: ' +
+                current_version +
+                '\n')
+
+    out.write(line)
+    out.write(edge + ' Stage Results\n')
+    warnings = False
+
+    for entry in results:
+      name, result, run_time = (entry.name, entry.result, entry.time)
+      timestr = datetime.timedelta(seconds=math.ceil(run_time))
+
+      # Don't print data on skipped stages.
+      if result == self.SKIPPED:
+        continue
+
+      out.write(line)
+      details = ''
+      if result == self.SUCCESS:
+        status = 'PASS'
+      elif result == self.FORGIVEN:
+        status = 'FAILED BUT FORGIVEN'
+        warnings = True
+      else:
+        status = 'FAIL'
+        if isinstance(result, cros_build_lib.RunCommandError):
+          # If there was a run error, give just the command that failed, not
+          # its full argument list, since those are usually too long.
+          details = ' in %s' % result.result.cmd[0]
+        elif isinstance(result, failures_lib.BuildScriptFailure):
+          # BuildScriptFailure errors publish a 'short' name of the
+          # command that failed.
+          details = ' in %s' % result.shortname
+        else:
+          # There was a normal error. Give the type of exception.
+          details = ' with %s' % type(result).__name__
+
+      out.write('%s %s %s (%s)%s\n' % (edge, status, name, timestr, details))
+
+    out.write(line)
+
+    for x in self.GetTracebacks():
+      if x.failed_stage and x.traceback:
+        out.write('\nFailed in stage %s:\n\n' % x.failed_stage)
+        out.write(x.traceback)
+        out.write('\n')
+
+    if warnings:
+      logging.PrintBuildbotStepWarnings(out)
+
+
+Results = _Results()
diff --git a/utils/frozen_chromite/lib/retry_stats.py b/utils/frozen_chromite/lib/retry_stats.py
new file mode 100644
index 0000000..52aab97
--- /dev/null
+++ b/utils/frozen_chromite/lib/retry_stats.py
@@ -0,0 +1,183 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Infrastructure for collecting statistics about retries."""
+
+from __future__ import print_function
+
+import collections
+import datetime
+
+from autotest_lib.utils.frozen_chromite.lib import parallel
+from autotest_lib.utils.frozen_chromite.lib import retry_util
+
+
+# Well known categories we gather stats for.
+CIDB = 'CIDB'
+GSUTIL = 'Google Storage'
+
+
+class UnconfiguredStatsCategory(Exception):
+  """We tried to use a Stats Category without configuring it."""
+
+
+# Create one of these for each retry call.
+#   attempts: a list of all attempts to perform the action.
+StatEntry = collections.namedtuple(
+    'StatEntry',
+    ('category', 'attempts'))
+
+# Create one of these for each attempt to call the function.
+#  time: The time for this attempt in seconds.
+#  exception: None for a successful attempt, or a string exception description.
+Attempt = collections.namedtuple(
+    'Attempt',
+    ('time', 'exception'))
+
+
+# After Setup, contains a multiprocess proxy array.
+# The array holds StatEntry values for each event seen.
+_STATS_COLLECTION = None
+
+
+def SetupStats():
+  """Prepare a given category to collect stats.
+
+  This must be called BEFORE any new processes that might read or write to
+  these stat values are created. It is safe to call this more than once,
+  but most efficient to only make a single call.
+  """
+  # Pylint thinks our manager has no members.
+  m = parallel.Manager()
+
+  # pylint: disable=global-statement
+  # Create a new stats collection structure that is multiprocess usable.
+  global _STATS_COLLECTION
+  _STATS_COLLECTION = m.list()
+
+
+def _SuccessFilter(entry):
+  """Returns True if the StatEntry succeeded (perhaps after retries)."""
+  # If all attempts contain an exception, they all failed.
+  return not all(a.exception for a in entry.attempts)
+
+
+def _RetryCount(entry):
+  """Returns the number of retries in this StatEntry."""
+  # If all attempts contain an exception, they all failed.
+  return max(len(entry.attempts) - 1, 0)
+
+
+def CategoryStats(category):
+  """Return stats numbers for a given category.
+
+  success is the number of times a given command succeeded, even if it had to be
+  retried.
+
+  failure is the number of times we exhausting all retries without success.
+
+  retry is the total number of times we retried a command, unrelated to eventual
+  success or failure.
+
+  Args:
+    category: A string that defines the 'namespace' for these stats.
+
+  Returns:
+    succuess, failure, retry values as integers.
+  """
+  # Convert the multiprocess proxy list into a local simple list.
+  local_stats_collection = list(_STATS_COLLECTION)
+
+  # Extract the values for the category we care about.
+  stats = [e for e in local_stats_collection if e.category == category]
+
+  success = len([e for e in stats if _SuccessFilter(e)])
+  failure = len(stats) - success
+  retry = sum([_RetryCount(e) for e in stats])
+
+  return success, failure, retry
+
+def ReportCategoryStats(out, category):
+  """Dump stats reports for a given category.
+
+  Args:
+    out: Output stream to write to (e.g. sys.stdout).
+    category: A string that defines the 'namespace' for these stats.
+  """
+  success, failure, retry = CategoryStats(category)
+
+  line = '*' * 60 + '\n'
+  edge = '*' * 2
+
+  out.write(line)
+  out.write(edge + ' Performance Statistics for %s' % category + '\n')
+  out.write(edge + '\n')
+  out.write(edge + ' Success: %d' % success + '\n')
+  out.write(edge + ' Failure: %d' % failure + '\n')
+  out.write(edge + ' Retries: %d' % retry + '\n')
+  out.write(edge + ' Total: %d' % (success + failure) + '\n')
+  out.write(line)
+
+
+def ReportStats(out):
+  """Dump stats reports for a given category.
+
+  Args:
+    out: Output stream to write to (e.g. sys.stdout).
+    category: A string that defines the 'namespace' for these stats.
+  """
+  categories = sorted(set(e.category for e in _STATS_COLLECTION))
+
+  for category in categories:
+    ReportCategoryStats(out, category)
+
+
+def RetryWithStats(category, handler, max_retry, functor, *args, **kwargs):
+  """Wrapper around retry_util.GenericRetry that collects stats.
+
+  This wrapper collects statistics about each failure or retry. Each
+  category is defined by a unique string. Each category should be setup
+  before use (actually, before processes are forked).
+
+  All other arguments are blindly passed to retry_util.GenericRetry.
+
+  Args:
+    category: A string that defines the 'namespace' for these stats.
+    handler: See retry_util.GenericRetry.
+    max_retry: See retry_util.GenericRetry.
+    functor: See retry_util.GenericRetry.
+    args: See retry_util.GenericRetry.
+    kwargs: See retry_util.GenericRetry.
+
+  Returns:
+    See retry_util.GenericRetry raises.
+
+  Raises:
+    See retry_util.GenericRetry raises.
+  """
+  statEntry = StatEntry(category, attempts=[])
+
+  # Wrap the work method, so we can gather info.
+  def wrapper(*args, **kwargs):
+    start = datetime.datetime.now()
+
+    try:
+      result = functor(*args, **kwargs)
+    except Exception as e:
+      end = datetime.datetime.now()
+      e_description = '%s: %s' % (type(e).__name__, e)
+      statEntry.attempts.append(Attempt(end - start, e_description))
+      raise
+
+    end = datetime.datetime.now()
+    statEntry.attempts.append(Attempt(end - start, None))
+    return result
+
+  try:
+    return retry_util.GenericRetry(handler, max_retry, wrapper,
+                                   *args, **kwargs)
+  finally:
+    if _STATS_COLLECTION is not None:
+      _STATS_COLLECTION.append(statEntry)
diff --git a/utils/frozen_chromite/lib/retry_util.py b/utils/frozen_chromite/lib/retry_util.py
new file mode 100644
index 0000000..7e9f336
--- /dev/null
+++ b/utils/frozen_chromite/lib/retry_util.py
@@ -0,0 +1,430 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Basic infrastructure for implementing retries."""
+
+from __future__ import print_function
+
+import functools
+import random
+import re
+import sys
+import time
+
+import six
+
+from autotest_lib.utils.frozen_chromite.lib import cros_build_lib
+from autotest_lib.utils.frozen_chromite.lib import cros_logging as logging
+
+
+# Match stderr of curl's --fail option to see HTTP status code.
+CURL_STATUS_RE = re.compile(br'The requested URL returned error: (\d+) ')
+
+
+def _CreateExceptionRetryHandler(exception):
+  """Returns a retry handler for given exception(s).
+
+  Please see WithRetry class document for details.
+  """
+  if not (isinstance(exception, type) and issubclass(exception, Exception) or
+          (isinstance(exception, tuple) and
+           all(issubclass(e, Exception) for e in exception))):
+    raise TypeError('exceptions should be an exception (or tuple), not %r' %
+                    exception)
+  return lambda exc: isinstance(exc, exception)
+
+
+class _RetryDelayStrategy(object):
+  """The strategy of the delay between each retry attempts.
+
+  Please see WithRetry class document for details.
+  """
+
+  def __init__(self, sleep=0, backoff_factor=1, jitter=0):
+    if sleep < 0:
+      raise ValueError('sleep must be >= 0: %s' % sleep)
+
+    if backoff_factor < 1:
+      raise ValueError('backoff_factor must be 1 or greater: %s'
+                       % backoff_factor)
+
+    if jitter < 0:
+      raise ValueError('jitter must be >= 0: %s' % jitter)
+
+    self._sleep = sleep
+    self._backoff_factor = backoff_factor
+    self._jitter = jitter
+
+  def Sleep(self, attempt):
+    """Sleep to delay the current retry."""
+    assert attempt >= 1, 'Expect attempt is always positive: %s' % attempt
+    if self._backoff_factor > 1:
+      sleep_duration = self._sleep * self._backoff_factor ** (attempt - 1)
+    else:
+      sleep_duration = self._sleep * attempt
+
+    # If |jitter| is set, add a random jitter sleep.
+    jitter = random.uniform(.5 * self._jitter, 1.5 * self._jitter)
+    total = sleep_duration + jitter
+    if total:
+      logging.debug('Retrying in %f (%f + jitter %f) seconds ...',
+                    total, sleep_duration, jitter)
+      time.sleep(total)
+
+
+class WithRetry(object):
+  """Decorator to handle retry on exception.
+
+  Examples:
+    @WithRetry(max_retry=3)
+    def _run():
+      ... do something ...
+    _run()
+
+    If _run() raises an exception, it retries at most three times.
+
+  Retrying strategy.
+
+  If the decorated function throws an Exception instance, then this class
+  checks whether the retry should be continued or not based on the given
+  |handler| or |exception| as follows.
+  - If |handler| is given, which should be a callback which takes an exception
+    and returns bool, calls it with the thrown exception.
+    If the |handler| returns True, retry will be continued. Otherwise no
+    further retry will be made, and an exception will be raised.
+  - If |exception| is given, which is an exception class or a tuple of
+    exception classes, iff the thrown exception is a instance of the given
+    exception class(es) (or its subclass), continues to retry. Otherwise no
+    further retry will be made, and an exception will be raised.
+  - If neither is given, just continues to retry on any Exception instance.
+  - Note: it is not allowed to specify both |handler| and |exception| at once.
+
+  Delay strategy.
+
+  Between for each attempt, some delay can be set, as follows.
+  - If |sleep| is given, the delay between the first and second attempts is
+    |sleep| secs.
+  - The delay between the second and third attempts, and later, depends on
+    |sleep| and |backoff_factor|.
+    - If |backoff_factor| is not given, the delay will be linearly increased,
+      as |sleep| * (number of attempts). E.g., if |sleep| is 1, the delays
+      will be 1, 2, 3, 4, 5, ... and so on.
+    - If |backoff_factor| is given, the delay will be exponentially increased,
+      as |sleep| * |backoff_factor| ** (number of attempts - 1). E.g., if
+      |sleep| is 1, and |backoff_factor| is 2, the delay will be,
+      1, 2, 4, 8, 16, ... and so on
+  - Note: Keep in mind that, if |backoff_factor| is not given, the total
+    delay time will be triangular value of |max_retry| multiplied by the
+    |sleep| value. E.g., |max_retry| is 5, and |sleep| is 10, will be
+    T5 (i.e. 5 + 4 + 3 + 2 + 1) times 10 = 150 seconds total. Rather than
+    use a large sleep value, you should lean more towards large retries
+    and lower sleep intervals, or by utilizing |backoff_factor|.
+  - In addition, for each delay, random duration of the delay can be added,
+    as 'jitter'. (Often, this helps to avoid consecutive conflicting situation)
+    |jitter| is specifies the duration of jitter delay, randomized up to
+    50% in either direction.
+  """
+
+  def __init__(self,
+               max_retry, handler=None, exception=None, log_all_retries=False,
+               sleep=0, backoff_factor=1, jitter=0,
+               raise_first_exception_on_failure=True, exception_to_raise=None,
+               status_callback=None):
+    """Initialize.
+
+    Args:
+      max_retry: A positive integer representing how many times to retry the
+          command before giving up.  Worst case, the command is invoked
+          (max_retry + 1) times before failing.
+      handler: Please see above for details.
+      exception: Please see above for details.
+      log_all_retries: when True, logs all retries.
+      sleep: Please see above for details.
+      backoff_factor: Please see above for details.
+      jitter: Please see above for details.
+      raise_first_exception_on_failure: determines which excecption is raised
+          upon failure after retries. If True, the first exception that was
+          encountered. Otherwise, the final one.
+      exception_to_raise: Optional exception type. If given, raises its
+          instance, instead of the one raised from the retry body.
+      status_callback: Optional callback invoked after each call of |functor|.
+          It takes two arguments: |attempt| which is the index of the last
+          attempt (0-based), and |success| representing whether the last attempt
+          was successfully done or not. If the callback raises an exception, no
+          further retry will be made, and the exception will be propagated to
+          the caller.
+    """
+    if max_retry < 0:
+      raise ValueError('max_retry needs to be zero or more: %d' % max_retry)
+    self._max_retry = max_retry
+
+    if handler is not None and exception is not None:
+      raise ValueError('handler and exception cannot be specified at once')
+    self._handler = (
+        handler or _CreateExceptionRetryHandler(exception or Exception))
+
+    self._log_all_retries = log_all_retries
+    self._retry_delay = _RetryDelayStrategy(sleep, backoff_factor, jitter)
+    self._raise_first_exception_on_failure = raise_first_exception_on_failure
+    self._exception_to_raise = exception_to_raise
+    self._status_callback = status_callback or (lambda attempt, success: None)
+
+  def __call__(self, func):
+    @functools.wraps(func)
+    def _Wrapper(*args, **kwargs):
+      fname = getattr(func, '__qualname__',
+                      getattr(func, '__name__', '<nameless>'))
+      exc_info = None
+      for attempt in range(self._max_retry + 1):
+        if attempt:
+          self._retry_delay.Sleep(attempt)
+
+        if attempt and self._log_all_retries:
+          logging.debug('Retrying %s (attempt %d)', fname, attempt + 1)
+
+        try:
+          ret = func(*args, **kwargs)
+        except Exception as e:
+          # Note we're not snagging BaseException, so
+          # MemoryError/KeyboardInterrupt and friends don't enter this except
+          # block.
+
+          # If raise_first_exception_on_failure, we intentionally ignore
+          # any failures in later attempts since we'll throw the original
+          # failure if all retries fail.
+          if exc_info is None or not self._raise_first_exception_on_failure:
+            exc_info = sys.exc_info()
+
+          try:
+            self._status_callback(attempt, False)
+          except Exception:
+            # In case callback raises an exception, quit the retry.
+            # For further investigation, log the original exception here.
+            logging.error('Ending retry due to Exception raised by a callback. '
+                          'Original exception raised during the attempt is '
+                          'as follows: ',
+                          exc_info=exc_info)
+            # Reraise the exception raised from the status_callback.
+            raise
+
+          if not self._handler(e):
+            logging.debug('ending retries with error: %s(%s)', e.__class__, e)
+            break
+          logging.exception('func call has failed')
+        else:
+          # Run callback in outside of try's main block, in order to avoid
+          # accidental capture of an Exception which may be raised in callback.
+          self._status_callback(attempt, True)
+          return ret
+
+      # Did not return, meaning all attempts failed. Raise the exception.
+      if self._exception_to_raise:
+        raise self._exception_to_raise('%s: %s' % (exc_info[0], exc_info[1]))
+      six.reraise(exc_info[0], exc_info[1], exc_info[2])
+    return _Wrapper
+
+
+def GenericRetry(handler, max_retry, functor, *args, **kwargs):
+  """Generic retry loop w/ optional break out depending on exceptions.
+
+  Runs functor(*args, **(kwargs excluding params for retry)) as a retry body.
+
+  Please see WithRetry for details about retrying parameters.
+  """
+  # Note: the default value needs to be matched with the ones of WithRetry's
+  # ctor.
+  log_all_retries = kwargs.pop('log_all_retries', False)
+  delay_sec = kwargs.pop('delay_sec', 0)
+  sleep = kwargs.pop('sleep', 0)
+  backoff_factor = kwargs.pop('backoff_factor', 1)
+  status_callback = kwargs.pop('status_callback', None)
+  raise_first_exception_on_failure = kwargs.pop(
+      'raise_first_exception_on_failure', True)
+  exception_to_raise = kwargs.pop('exception_to_raise', None)
+
+  @WithRetry(
+      max_retry=max_retry, handler=handler, log_all_retries=log_all_retries,
+      sleep=sleep, backoff_factor=backoff_factor, jitter=delay_sec,
+      raise_first_exception_on_failure=raise_first_exception_on_failure,
+      exception_to_raise=exception_to_raise,
+      status_callback=status_callback)
+  def _run():
+    return functor(*args, **kwargs)
+  return _run()
+
+
+def RetryException(exception, max_retry, functor, *args, **kwargs):
+  """Convenience wrapper for GenericRetry based on exceptions.
+
+  Runs functor(*args, **(kwargs excluding params for retry)) as a retry body.
+
+  Please see WithRetry for details about retrying parameters.
+  """
+  log_all_retries = kwargs.pop('log_all_retries', False)
+  delay_sec = kwargs.pop('delay_sec', 0)
+  sleep = kwargs.pop('sleep', 0)
+  backoff_factor = kwargs.pop('backoff_factor', 1)
+  status_callback = kwargs.pop('status_callback', None)
+  raise_first_exception_on_failure = kwargs.pop(
+      'raise_first_exception_on_failure', True)
+  exception_to_raise = kwargs.pop('exception_to_raise', None)
+
+  @WithRetry(
+      max_retry=max_retry, exception=exception,
+      log_all_retries=log_all_retries,
+      sleep=sleep, backoff_factor=backoff_factor, jitter=delay_sec,
+      raise_first_exception_on_failure=raise_first_exception_on_failure,
+      exception_to_raise=exception_to_raise,
+      status_callback=status_callback)
+  def _run():
+    return functor(*args, **kwargs)
+  return _run()
+
+
+def RetryCommand(functor, max_retry, *args, **kwargs):
+  """Wrapper for run that will retry a command.
+
+  Args:
+    functor: run function to run; retries will only occur on
+      RunCommandError exceptions being thrown.
+    max_retry: A positive integer representing how many times to retry
+      the command before giving up.  Worst case, the command is invoked
+      (max_retry + 1) times before failing.
+    sleep: Optional keyword.  Multiplier for how long to sleep between
+      retries; will delay (1*sleep) the first time, then (2*sleep),
+      continuing via attempt * sleep.
+    retry_on: If provided, we will retry on any exit codes in the given list.
+      Note: A process will exit with a negative exit code if it is killed by a
+      signal. By default, we retry on all non-negative exit codes.
+    error_check: Optional callback to check the error output.  Return None to
+      fall back to |retry_on|, or True/False to set the retry directly.
+    log_retries: Whether to log a warning when retriable errors occur.
+    args: Positional args passed to run; see run for specifics.
+    kwargs: Optional args passed to run; see run for specifics.
+
+  Returns:
+    A CommandResult object.
+
+  Raises:
+    RunCommandError: Raised on error.
+  """
+  values = kwargs.pop('retry_on', None)
+  error_check = kwargs.pop('error_check', lambda x: None)
+  log_retries = kwargs.pop('log_retries', True)
+
+  def ShouldRetry(exc):
+    """Return whether we should retry on a given exception."""
+    if not ShouldRetryCommandCommon(exc):
+      return False
+    if values is None and exc.result.returncode < 0:
+      logging.info('Child process received signal %d; not retrying.',
+                   -exc.result.returncode)
+      return False
+
+    ret = error_check(exc)
+    if ret is not None:
+      return ret
+
+    if values is None or exc.result.returncode in values:
+      if log_retries:
+        logging.warning('Command failed with retriable error.\n%s', exc)
+      return True
+    return False
+
+  return GenericRetry(ShouldRetry, max_retry, functor, *args, **kwargs)
+
+
+def ShouldRetryCommandCommon(exc):
+  """Returns whether any run should retry on a given exception."""
+  if not isinstance(exc, cros_build_lib.RunCommandError):
+    return False
+  if exc.result.returncode is None:
+    logging.error('Child process failed to launch; not retrying:\n'
+                  'command: %s', exc.result.cmdstr)
+    return False
+  return True
+
+
+def RunCommandWithRetries(max_retry, *args, **kwargs):
+  """Wrapper for run that will retry a command
+
+  Args:
+    max_retry: See RetryCommand and run.
+    *args: See RetryCommand and run.
+    **kwargs: See RetryCommand and run.
+
+  Returns:
+    A CommandResult object.
+
+  Raises:
+    RunCommandError: Raised on error.
+  """
+  return RetryCommand(cros_build_lib.run, max_retry, *args, **kwargs)
+
+
+class DownloadError(Exception):
+  """Fetching file via curl failed"""
+
+
+def RunCurl(curl_args, *args, **kwargs):
+  """Runs curl and wraps around all necessary hacks.
+
+  Args:
+    curl_args: Command line to pass to curl. Must be list of str.
+    *args, **kwargs: See RunCommandWithRetries and run.
+      Note that retry_on, error_check, sleep, backoff_factor cannot be
+      overwritten.
+
+  Returns:
+    A CommandResult object.
+
+  Raises:
+    DownloadError: Whenever curl fails for any reason.
+  """
+  cmd = ['curl'] + curl_args
+
+  # These values were discerned via scraping the curl manpage; they're all
+  # retry related (dns failed, timeout occurred, etc, see  the manpage for
+  # exact specifics of each).
+  # Note we allow 22 to deal w/ 500's- they're thrown by google storage
+  # occasionally.  This is also thrown when getting 4xx, but curl doesn't
+  # make it easy to differentiate between them.
+  # Note we allow 35 to deal w/ Unknown SSL Protocol error, thrown by
+  # google storage occasionally.
+  # Finally, we do not use curl's --retry option since it generally doesn't
+  # actually retry anything; code 18 for example, it will not retry on.
+  retriable_exits = frozenset([5, 6, 7, 15, 18, 22, 26, 28, 35, 52, 56])
+
+  def _CheckExit(exc):
+    """Filter out specific error codes when getting exit 22
+
+    Curl will exit(22) for a wide range of HTTP codes -- both the 4xx and 5xx
+    set.  For the 4xx, we don't want to retry.  We have to look at the output.
+    """
+    assert isinstance(exc, cros_build_lib.RunCommandError)
+    if exc.result.returncode == 22:
+      logging.debug('curl stderr %s', exc.result.error)
+      matched = CURL_STATUS_RE.search(exc.result.error)
+      if not matched:
+        # Unexpected stderr.  It may not be error output from --fail.
+        return True
+      status_code = matched.group(1)
+      return not status_code.startswith(b'4')
+
+    # We'll let the common exit code filter do the right thing.
+    return None
+
+  try:
+    return RunCommandWithRetries(
+        10, cmd, retry_on=retriable_exits, error_check=_CheckExit,
+        sleep=3, backoff_factor=1.6,
+        stderr=True, extra_env={'LC_MESSAGES': 'C'}, *args, **kwargs)
+  except cros_build_lib.RunCommandError as e:
+    if e.result.returncode in (51, 58, 60):
+      # These are the return codes of failing certs as per 'man curl'.
+      raise DownloadError(
+          'Download failed with certificate error? Try "sudo c_rehash".')
+    raise DownloadError('Curl failed w/ exit code %i: %s' %
+                        (e.result.returncode, e.result.error))
diff --git a/utils/frozen_chromite/lib/signals.py b/utils/frozen_chromite/lib/signals.py
new file mode 100644
index 0000000..4622484
--- /dev/null
+++ b/utils/frozen_chromite/lib/signals.py
@@ -0,0 +1,139 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2011-2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Signal related functionality."""
+
+from __future__ import print_function
+
+import signal
+import contextlib
+
+
+def RelaySignal(handler, signum, frame):
+  """Notify a listener returned from getsignal of receipt of a signal.
+
+  Returns:
+    True if it was relayed to the target, False otherwise.
+    False in particular occurs if the target isn't relayable.
+  """
+  if handler in (None, signal.SIG_IGN):
+    return True
+  elif handler == signal.SIG_DFL:
+    # This scenario is a fairly painful to handle fully, thus we just
+    # state we couldn't handle it and leave it to client code.
+    return False
+  handler(signum, frame)
+  return True
+
+
+def SignalModuleUsable(_signal=signal.signal, _SIGUSR1=signal.SIGUSR1):
+  """Verify that the signal module is usable and won't segfault on us.
+
+  See http://bugs.python.org/issue14173.  This function detects if the
+  signals module is no longer safe to use (which only occurs during
+  final stages of the interpreter shutdown) and heads off a segfault
+  if signal.* was accessed.
+
+  This shouldn't be used by anything other than functionality that is
+  known and unavoidably invoked by finalizer code during python shutdown.
+
+  Finally, the default args here are intentionally binding what we need
+  from the signal module to do the necessary test; invoking code shouldn't
+  pass any options, nor should any developer ever remove those default
+  options.
+
+  Note that this functionality is intended to be removed just as soon
+  as all consuming code installs their own SIGTERM handlers.
+  """
+  # Track any signals we receive while doing the check.
+  received, actual = [], None
+  def handler(signum, frame):
+    received.append([signum, frame])
+  try:
+    # Play with sigusr1, since it's not particularly used.
+    actual = _signal(_SIGUSR1, handler)
+    _signal(_SIGUSR1, actual)
+    return True
+  except (TypeError, AttributeError, SystemError, ValueError):
+    # The first three exceptions can be thrown depending on the state of the
+    # signal module internal Handlers array; we catch all, and interpret it
+    # as if we were invoked during sys.exit cleanup.
+    # The last exception can be thrown if we're trying to be used in a thread
+    # which is not the main one.  This can come up with standard python modules
+    # such as BaseHTTPServer.HTTPServer.
+    return False
+  finally:
+    # And now relay those signals to the original handler.  Not all may
+    # be delivered- the first may throw an exception for example.  Not our
+    # problem however.
+    for signum, frame in received:
+      actual(signum, frame)
+
+
+@contextlib.contextmanager
+def DeferSignals(*args):
+  """Context Manger to defer signals during a critical block.
+
+  If a signal comes in for the masked signals, the original handler
+  is ran after the  critical block has exited.
+
+  Args:
+    args: Which signals to ignore.  If none are given, defaults to
+      SIGINT and SIGTERM.
+  """
+  signals = args
+  if not signals:
+    signals = [signal.SIGINT, signal.SIGTERM, signal.SIGALRM]
+
+  # Rather than directly setting the handler, we first pull the handlers, then
+  # set the new handler.  The ordering has to be done this way to ensure that
+  # if someone passes in a bad signum (or a signal lands prior to starting the
+  # critical block), we can restore things to pristine state.
+  handlers = dict((signum, signal.getsignal(signum)) for signum in signals)
+
+  received = []
+  def handler(signum, frame):
+    received.append((signum, frame))
+
+  try:
+    for signum in signals:
+      signal.signal(signum, handler)
+
+    yield
+
+  finally:
+    for signum, original in handlers.items():
+      signal.signal(signum, original)
+
+    for signum, frame in received:
+      RelaySignal(handlers[signum], signum, frame)
+
+
+def StrSignal(sig_num):
+  """Convert a signal number to the symbolic name
+
+  Note: Some signal number have multiple names, so you might get
+  back a confusing result like "SIGIOT|SIGABRT".  Since they have
+  the same signal number, it's impossible to say which one is right.
+
+  Args:
+    sig_num: The numeric signal you wish to convert
+
+  Returns:
+    A string of the signal name(s)
+  """
+  # Handle realtime signals first since they are unnamed.
+  if sig_num >= signal.SIGRTMIN and sig_num < signal.SIGRTMAX:
+    return 'SIGRT_%i' % sig_num
+
+  # Probe the module looking for matching signal constant.
+  sig_names = []
+  for name, num in signal.__dict__.items():
+    if name.startswith('SIG') and num == sig_num:
+      sig_names.append(name)
+  if sig_names:
+    return '|'.join(sig_names)
+  else:
+    return 'SIG_%i' % sig_num
diff --git a/utils/frozen_chromite/lib/stateful_updater.py b/utils/frozen_chromite/lib/stateful_updater.py
new file mode 100644
index 0000000..5c09a88
--- /dev/null
+++ b/utils/frozen_chromite/lib/stateful_updater.py
@@ -0,0 +1,113 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module for updating the stateful partition on the device.
+
+Use this module to update the stateful partition given a stateful payload
+(e.g. stateful.tgz) on the device. This module untars/uncompresses the payload
+on the device into var_new and dev_image_new directories. Optinonally, you can
+ask this module to reset a stateful partition by preparing it to be clobbered on
+reboot.
+"""
+
+from __future__ import print_function
+
+import os
+import tempfile
+
+from autotest_lib.utils.frozen_chromite.lib import constants
+from autotest_lib.utils.frozen_chromite.lib import cros_build_lib
+from autotest_lib.utils.frozen_chromite.lib import cros_logging as logging
+from autotest_lib.utils.frozen_chromite.lib import osutils
+
+
+class Error(Exception):
+  """Base exception class of StatefulUpdater errors."""
+
+
+class StatefulUpdater(object):
+  """The module for updating the stateful partition."""
+
+  UPDATE_TYPE_STANDARD = 'standard'
+  UPDATE_TYPE_CLOBBER = 'clobber'
+
+  _VAR_DIR = 'var_new'
+  _DEV_IMAGE_DIR = 'dev_image_new'
+  _UPDATE_TYPE_FILE = '.update_available'
+
+  def __init__(self, device, stateful_dir=constants.STATEFUL_DIR):
+    """Initializes the module.
+
+    Args:
+      device: The ChromiumOsDevice to be updated.
+      stateful_dir: The stateful directory on the Chromium OS device.
+    """
+    self._device = device
+    self._stateful_dir = stateful_dir
+    self._var_dir = os.path.join(self._stateful_dir, self._VAR_DIR)
+    self._dev_image_dir = os.path.join(self._stateful_dir, self._DEV_IMAGE_DIR)
+    self._update_type_file = os.path.join(self._stateful_dir,
+                                          self._UPDATE_TYPE_FILE)
+
+  def Update(self, payload_path_on_device, update_type=None):
+    """Updates the stateful partition given the update file.
+
+    Args:
+      payload_path_on_device: The path to the stateful update (stateful.tgz)
+        on the DUT.
+      update_type: The type of the stateful update to be marked. Accepted
+        values: 'standard' (default) and 'clobber'.
+    """
+    if not self._device.IfPathExists(payload_path_on_device):
+      raise Error('Missing the file: %s' % payload_path_on_device)
+
+    try:
+      cmd = ['tar', '--ignore-command-error', '--overwrite',
+             '--directory', self._stateful_dir, '-xzf', payload_path_on_device]
+      self._device.run(cmd)
+    except cros_build_lib.RunCommandError as e:
+      raise Error('Failed to untar the stateful update with error %s' % e)
+
+    # Make sure target directories are generated on the device.
+    if (not self._device.IfPathExists(self._var_dir) or
+        not self._device.IfPathExists(self._dev_image_dir)):
+      raise Error('Missing var or dev_image in stateful payload.')
+
+    self._MarkUpdateType(update_type if update_type is not None
+                         else self.UPDATE_TYPE_STANDARD)
+
+  def _MarkUpdateType(self, update_type):
+    """Marks the type of the update.
+
+    Args:
+      update_type: The type of the update to be marked. See Update()
+    """
+    if update_type not in (self.UPDATE_TYPE_CLOBBER, self.UPDATE_TYPE_STANDARD):
+      raise Error('Invalid update type %s' % update_type)
+
+    with tempfile.NamedTemporaryFile() as f:
+      if update_type == self.UPDATE_TYPE_STANDARD:
+        logging.notice('Performing standard stateful update...')
+      elif update_type == self.UPDATE_TYPE_CLOBBER:
+        logging.notice('Restoring stateful to factory_install '
+                       'with dev_image...')
+        osutils.WriteFile(f.name, 'clobber')
+
+      try:
+        self._device.CopyToDevice(f.name, self._update_type_file, 'scp')
+      except cros_build_lib.RunCommandError as e:
+        raise Error('Failed to copy update type file to device with error %s' %
+                    e)
+
+  def Reset(self):
+    """Resets the stateful partition."""
+    logging.info('Resetting stateful update state.')
+
+    try:
+      self._device.run(['rm', '-rf', self._update_type_file,
+                        self._var_dir, self._dev_image_dir])
+    except cros_build_lib.RunCommandError as e:
+      logging.warning('(ignoring) Failed to delete stateful update paths with'
+                      ' error: %s', e)
diff --git a/utils/frozen_chromite/lib/structured.py b/utils/frozen_chromite/lib/structured.py
new file mode 100644
index 0000000..a43a2c7
--- /dev/null
+++ b/utils/frozen_chromite/lib/structured.py
@@ -0,0 +1,75 @@
+# -*- coding: utf-8 -*-
+# Copyright 2017 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A convenience class for objects which can be converted to JSON."""
+
+from __future__ import print_function
+
+_BUILTINS = (str, int, float, bool, type(None))
+
+
+class Structured(object):
+  """An object with a approved set of public properties (.VISIBLE_KEYS)"""
+
+  def _Keys(self):
+    seen = set()
+    for cls in type(self).mro():
+      for k in getattr(cls, 'VISIBLE_KEYS', ()):
+        if k not in seen:
+          yield k
+          seen.add(k)
+
+  def ToDict(self):
+    return ToStructure(self)
+
+
+def ToStructure(value):
+  """Makes an object JSON-encodable.
+
+  Args:
+    value: An object which can be converted to a JSON-encodable object.
+
+  Returns:
+    An object which is legal to pass into json.dumps(obj), namely
+    type JSONEncodable = (
+        str | int | float | NoneType | dict<str, JSONEncodable> |
+        list<JSONEncodable>
+
+  Raises:
+    StackOverflow if the object has circular references (parent -> child ->
+    parent).
+  """
+  if isinstance(value, _BUILTINS):
+    return value
+
+  elif hasattr(value, '_Keys'):
+    ret = {}
+    for k in value._Keys():  # pylint: disable=protected-access
+      v = ToStructure(getattr(value, k, None))
+      if v is not None:
+        ret[k] = v
+    return ret
+
+  elif isinstance(value, dict):
+    ret = {}
+    for k, v in value.items():
+      v = ToStructure(v)
+      if v is not None:
+        ret[k] = v
+    return ret
+
+  else:
+    try:
+      iterator = iter(value)
+    except TypeError:
+      return None
+
+    ret = []
+    for element in iterator:
+      v = ToStructure(element)
+      if v is not None:
+        ret.append(v)
+
+    return ret
\ No newline at end of file
diff --git a/utils/frozen_chromite/lib/terminal.py b/utils/frozen_chromite/lib/terminal.py
new file mode 100644
index 0000000..38a0195
--- /dev/null
+++ b/utils/frozen_chromite/lib/terminal.py
@@ -0,0 +1,90 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Terminal utilities
+
+This module handles terminal interaction including ANSI color codes.
+"""
+
+from __future__ import print_function
+
+import os
+import sys
+
+from autotest_lib.utils.frozen_chromite.lib import cros_build_lib
+
+
+class Color(object):
+  """Conditionally wraps text in ANSI color escape sequences."""
+  BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8)
+  BOLD = -1
+  COLOR_START = '\033[1;%dm'
+  BOLD_START = '\033[1m'
+  RESET = '\033[0m'
+
+  def __init__(self, enabled=None):
+    """Create a new Color object, optionally disabling color output.
+
+    Args:
+      enabled: True if color output should be enabled. If False then this
+        class will not add color codes at all.
+    """
+    self._enabled = enabled
+    if self._enabled is None:
+      self._enabled = self.UserEnabled()
+      if self._enabled is None:
+        self._enabled = sys.stdout.isatty()
+
+  def Start(self, color):
+    """Returns a start color code.
+
+    Args:
+      color: Color to use, .e.g BLACK, RED, etc.
+
+    Returns:
+      If color is enabled, returns an ANSI sequence to start the given color,
+      otherwise returns empty string
+    """
+    if self._enabled:
+      return self.COLOR_START % (color + 30)
+    return ''
+
+  def Stop(self):
+    """Returns a stop color code.
+
+    Returns:
+      If color is enabled, returns an ANSI color reset sequence, otherwise
+      returns empty string
+    """
+    if self._enabled:
+      return self.RESET
+    return ''
+
+  def Color(self, color, text):
+    """Returns text with conditionally added color escape sequences.
+
+    Keyword arguments:
+      color: Text color -- one of the color constants defined in this class.
+      text: The text to color.
+
+    Returns:
+      If self._enabled is False, returns the original text. If it's True,
+      returns text with color escape sequences based on the value of color.
+    """
+    if not self._enabled:
+      return text
+    if color == self.BOLD:
+      start = self.BOLD_START
+    else:
+      start = self.COLOR_START % (color + 30)
+    return start + text + self.RESET
+
+  @staticmethod
+  def UserEnabled():
+    """See if the global colorization preference is enabled ($NOCOLOR env)"""
+    is_disabled = cros_build_lib.BooleanShellValue(
+        os.environ.get('NOCOLOR'), msg='$NOCOLOR env var is invalid',
+        default=None)
+    return not is_disabled if is_disabled is not None else None
diff --git a/utils/frozen_chromite/lib/timeout_util.py b/utils/frozen_chromite/lib/timeout_util.py
new file mode 100644
index 0000000..4191597
--- /dev/null
+++ b/utils/frozen_chromite/lib/timeout_util.py
@@ -0,0 +1,317 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Functions for implementing timeouts."""
+
+from __future__ import print_function
+
+import contextlib
+import datetime
+import functools
+import signal
+import threading
+import time
+
+from autotest_lib.utils.frozen_chromite.lib import cros_logging as logging
+
+
+class TimeoutError(Exception):  # pylint: disable=redefined-builtin
+  """Raises when code within Timeout has been run too long."""
+
+
+def Timedelta(num, zero_ok=False):
+  """Normalize |num| (in seconds) into a datetime.timedelta."""
+  if not isinstance(num, datetime.timedelta):
+    num = datetime.timedelta(seconds=num)
+  if zero_ok:
+    if num.total_seconds() < 0:
+      raise ValueError('timing must be >= 0, not %s' % (num,))
+  else:
+    if num.total_seconds() <= 0:
+      raise ValueError('timing must be greater than 0, not %s' % (num,))
+  return num
+
+
+def _ScheduleTimer(seconds, interval=0):
+  """Schedules the timer to raise SIGALRM.
+
+  If |seconds| is less than minimum resolution, it would be round up to the
+  resolution.
+  Note: if the seconds is very short, the signal can be delivered almost
+  immediately, so that handler can be called even in this stack.
+
+  Args:
+    seconds: How long to wait before sending SIGALRM, in seconds.
+    interval: (Optional) interval schedule for the timer.
+  """
+  # Min resolution of itimer. See man setitimer(2) for details.
+  MIN_SECONDS = 0.000001
+  signal.setitimer(signal.ITIMER_REAL, max(seconds, MIN_SECONDS), interval)
+
+
+def _CancelTimer():
+  """Cancels the currently scheduled SIGALRM timer.
+
+  Returns:
+    Previous timer, which is a pair of scheduled timeout and interval.
+  """
+  return signal.setitimer(signal.ITIMER_REAL, 0)
+
+
+@contextlib.contextmanager
+def Timeout(max_run_time,
+            error_message='Timeout occurred- waited %(time)s seconds.',
+            reason_message=None):
+  """ContextManager that alarms if code is ran for too long.
+
+  Timeout can run nested and raises a TimeoutException if the timeout
+  is reached. Timeout can also nest underneath FatalTimeout.
+
+  Args:
+    max_run_time: How long to wait before sending SIGALRM.  May be a number
+      (in seconds, can be fractional) or a datetime.timedelta object.
+    error_message: Optional string to wrap in the TimeoutError exception on
+      timeout. If not provided, default template will be used.
+    reason_message: Optional string to be appended to the TimeoutError
+      error_message string. Provide a custom message here if you want to have
+      a purpose-specific message without overriding the default template in
+      |error_message|.
+  """
+  max_run_time = Timedelta(max_run_time).total_seconds()
+  if reason_message:
+    error_message += reason_message
+
+  # pylint: disable=unused-argument
+  def kill_us(sig_num, frame):
+    raise TimeoutError(error_message % {'time': max_run_time})
+
+  previous_time = time.time()
+  previous_timeout, previous_interval = _CancelTimer()
+  original_handler = signal.signal(signal.SIGALRM, kill_us)
+
+  try:
+    # Signal the min in case the leftover time was smaller than this timeout.
+    # This needs to be called in try block, otherwise, finally may not be
+    # called in case that the timeout duration is too short.
+    _ScheduleTimer(min(previous_timeout or float('inf'), max_run_time))
+    yield
+  finally:
+    # Cancel the alarm request and restore the original handler.
+    _CancelTimer()
+    signal.signal(signal.SIGALRM, original_handler)
+
+    # Ensure the previous handler will fire if it was meant to.
+    if previous_timeout:
+      remaining_timeout = previous_timeout - (time.time() - previous_time)
+      # It is ok to pass negative remaining_timeout. Please see also comments
+      # of _ScheduleTimer for more details.
+      _ScheduleTimer(remaining_timeout, previous_interval)
+
+
+@contextlib.contextmanager
+def FatalTimeout(max_run_time, display_message=None):
+  """ContextManager that exits the program if code is run for too long.
+
+  This implementation is fairly simple, thus multiple timeouts
+  cannot be active at the same time.
+
+  Additionally, if the timeout has elapsed, it'll trigger a SystemExit
+  exception within the invoking code, ultimately propagating that past
+  itself.  If the underlying code tries to suppress the SystemExit, once
+  a minute it'll retrigger SystemExit until control is returned to this
+  manager.
+
+  Args:
+    max_run_time: How long to wait.  May be a number (in seconds, can be
+      fractional) or a datetime.timedelta object.
+    display_message: Optional string message to be included in timeout
+      error message, if the timeout occurs.
+  """
+  max_run_time = Timedelta(max_run_time).total_seconds()
+
+  # pylint: disable=unused-argument
+  def kill_us(sig_num, frame):
+    # While this SystemExit *should* crash it's way back up the
+    # stack to our exit handler, we do have live/production code
+    # that uses blanket except statements which could suppress this.
+    # As such, keep scheduling alarms until our exit handler runs.
+    # Note that there is a potential conflict via this code, and
+    # run's kill_timeout; thus we set the alarming interval
+    # fairly high.
+    _ScheduleTimer(60)
+
+    # The cbuildbot stage that gets aborted by this timeout should be treated as
+    # failed by buildbot.
+    error_message = ('Timeout occurred- waited %i seconds, failing.' %
+                     max_run_time)
+    if display_message:
+      error_message += ' Timeout reason: %s' % display_message
+    logging.PrintBuildbotStepFailure()
+    logging.error(error_message)
+    raise SystemExit(error_message)
+
+  if signal.getitimer(signal.ITIMER_REAL)[0]:
+    raise Exception('FatalTimeout cannot be used in parallel to other alarm '
+                    'handling code; failing')
+
+  original_handler = signal.signal(signal.SIGALRM, kill_us)
+  try:
+    _ScheduleTimer(max_run_time)
+    yield
+  finally:
+    # Cancel the alarm request and restore the original handler.
+    _CancelTimer()
+    signal.signal(signal.SIGALRM, original_handler)
+
+
+def TimeoutDecorator(max_time):
+  """Decorator used to ensure a func is interrupted if it's running too long."""
+  # Save off the built-in versions of time.time, signal.signal, and
+  # signal.alarm, in case they get mocked out later. We want to ensure that
+  # tests don't accidentally mock out the functions used by Timeout.
+  def _Save():
+    return (time.time, signal.signal, signal.setitimer, signal.getitimer,
+            signal.SIGALRM, signal.ITIMER_REAL)
+  def _Restore(values):
+    (time.time, signal.signal, signal.setitimer, signal.getitimer,
+     signal.SIGALRM, signal.ITIMER_REAL) = values
+  builtins = _Save()
+
+  def NestedTimeoutDecorator(func):
+    @functools.wraps(func)
+    def TimeoutWrapper(*args, **kwargs):
+      new = _Save()
+      try:
+        _Restore(builtins)
+        with Timeout(max_time):
+          _Restore(new)
+          try:
+            return func(*args, **kwargs)
+          finally:
+            _Restore(builtins)
+      finally:
+        _Restore(new)
+
+    return TimeoutWrapper
+
+  return NestedTimeoutDecorator
+
+
+def WaitForReturnTrue(*args, **kwargs):
+  """Periodically run a function, waiting in between runs.
+
+  Continues to run until the function returns True.
+
+  Args:
+    See WaitForReturnValue([True], ...)
+
+  Raises:
+    TimeoutError when the timeout is exceeded.
+  """
+  WaitForReturnValue([True], *args, **kwargs)
+
+
+def WaitForReturnValue(values, *args, **kwargs):
+  """Periodically run a function, waiting in between runs.
+
+  Continues to run until the function return value is in the list
+  of accepted |values|.  See WaitForSuccess for more details.
+
+  Args:
+    values: A list or set of acceptable return values.
+    *args, **kwargs: See WaitForSuccess for remaining arguments.
+
+  Returns:
+    The value most recently returned by |func|.
+
+  Raises:
+    TimeoutError when the timeout is exceeded.
+  """
+  def _Retry(return_value):
+    return return_value not in values
+
+  return WaitForSuccess(_Retry, *args, **kwargs)
+
+
+def WaitForSuccess(retry_check, func, timeout, period=1, side_effect_func=None,
+                   func_args=None, func_kwargs=None, fallback_timeout=10):
+  """Periodically run a function, waiting in between runs.
+
+  Continues to run given function until return value is accepted by retry check.
+
+  To retry based on raised exceptions see GenericRetry in retry_util.
+
+  Args:
+    retry_check: A functor that will be passed the return value of |func| as
+      the only argument.  If |func| should be retried |retry_check| should
+      return True.
+    func: The function to run to test for a value.
+    timeout: The maximum amount of time to wait.  May be a number (in seconds)
+      or a datetime.timedelta object.
+    period: How long between calls to |func|.  May be a number (in seconds) or
+      a datetime.timedelta object.
+    side_effect_func: Optional function to be called between polls of func,
+      typically to output logging messages. The remaining time will be passed
+      as a datetime.timedelta object.
+    func_args: Optional list of positional arguments to be passed to |func|.
+    func_kwargs: Optional dictionary of keyword arguments to be passed to
+                 |func|.
+    fallback_timeout: We set a secondary timeout based on sigalarm this many
+                      seconds after the initial timeout. This should NOT be
+                      considered robust, but can allow timeouts inside blocking
+                      methods.
+
+  Returns:
+    The value most recently returned by |func| that was not flagged for retry.
+
+  Raises:
+    TimeoutError when the timeout is exceeded.
+  """
+  timeout = Timedelta(timeout, zero_ok=True)
+  period = Timedelta(period, zero_ok=True)
+  fallback_timeout = Timedelta(fallback_timeout)
+  func_args = func_args or []
+  func_kwargs = func_kwargs or {}
+
+  end = datetime.datetime.now() + timeout
+
+  # pylint: disable=protected-access
+  # It is used to get the main thread '_MainThread'. Without python 3.4, there
+  # may be no perfect solutions. See this discussion for details:
+  # http://stackoverflow.com/questions/23206787.
+  is_main_thread = isinstance(threading.current_thread(),
+                              threading._MainThread)
+  # pylint: enable=protected-access
+  def retry():
+    while True:
+      # Guarantee we always run at least once.
+      value = func(*func_args, **func_kwargs)
+      if not retry_check(value):
+        return value
+
+      # Run the user's callback func if available.
+      if side_effect_func:
+        delta = end - datetime.datetime.now()
+        if delta.total_seconds() < 0:
+          delta = datetime.timedelta(seconds=0)
+        side_effect_func(delta)
+
+      # If we're just going to sleep past the timeout period, abort now.
+      delta = end - datetime.datetime.now()
+      if delta <= period:
+        raise TimeoutError('Timed out after %s' % timeout)
+
+      time.sleep(period.total_seconds())
+
+  if not is_main_thread:
+    # Warning: the function here is not working in the main thread. Since
+    # signal only works in main thread, this function may run longer than
+    # timeout or even hang.
+    return retry()
+  else:
+    # Use a sigalarm after an extra delay, in case a function we call is
+    # blocking for some reason. This should NOT be considered reliable.
+    with Timeout(timeout + fallback_timeout):
+      return retry()
diff --git a/utils/frozen_chromite/lib/ts_mon_config.py b/utils/frozen_chromite/lib/ts_mon_config.py
new file mode 100644
index 0000000..c9cc9a4
--- /dev/null
+++ b/utils/frozen_chromite/lib/ts_mon_config.py
@@ -0,0 +1,397 @@
+# -*- coding: utf-8 -*-
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wrapper for inframon's command-line flag based configuration."""
+
+from __future__ import print_function
+
+import argparse
+import contextlib
+import multiprocessing
+import os
+import socket
+import signal
+import time
+
+from six.moves import queue as Queue
+
+import six
+
+from autotest_lib.utils.frozen_chromite.lib import cros_logging as logging
+from autotest_lib.utils.frozen_chromite.lib import metrics
+from autotest_lib.utils.frozen_chromite.lib import parallel
+
+try:
+  from infra_libs.ts_mon import config
+  from infra_libs.ts_mon import BooleanField
+  from infra_libs.ts_mon import IntegerField
+  from infra_libs.ts_mon import StringField
+  import googleapiclient.discovery
+except (ImportError, RuntimeError) as e:
+  config = None
+  logging.warning('Failed to import ts_mon, monitoring is disabled: %s', e)
+
+
+_WasSetup = False
+_CommonMetricFields = {}
+
+FLUSH_INTERVAL = 60
+
+
+@contextlib.contextmanager
+def TrivialContextManager():
+  """Context manager with no side effects."""
+  yield
+
+
+def GetMetricFieldSpec(fields=None):
+  """Return the corresponding field_spec for metric fields.
+
+  Args:
+    fields: Dictionary containing metric fields.
+
+  Returns:
+    field_spec: List containing any *Field object associated with metric.
+  """
+  field_spec = []
+  if fields:
+    for key, val in fields.items():
+      if isinstance(val, bool):
+        field_spec.append(BooleanField(key))
+      elif isinstance(val, int):
+        field_spec.append(IntegerField(key))
+      elif isinstance(val, six.string_types):
+        field_spec.append(StringField(key))
+      else:
+        logging.error("Couldn't classify the metric field %s:%s",
+                      key, val)
+
+  return field_spec
+
+def AddCommonFields(fields=None, field_spec=None):
+  """Add cbuildbot-wide common fields to a given field set.
+
+  Args:
+    fields: Dictionary containing metric fields to which common metric fields
+            will be added.
+    field_spec: List containing any *Field object associated with metric.
+
+  Returns:
+    Dictionary containing complete set of metric fields to be applied to
+    metric and a list of corresponding field_spec.
+  """
+  metric_fields = (dict(_CommonMetricFields) if _CommonMetricFields
+                   else {})
+
+  if metric_fields:
+    metric_fields.update(fields or {})
+    return metric_fields, GetMetricFieldSpec(metric_fields)
+  else:
+    return fields, field_spec
+
+
+def SetupTsMonGlobalState(service_name,
+                          indirect=False,
+                          suppress_exception=True,
+                          short_lived=False,
+                          auto_flush=True,
+                          common_metric_fields=None,
+                          debug_file=None,
+                          task_num=0):
+  """Uses a dummy argument parser to get the default behavior from ts-mon.
+
+  Args:
+    service_name: The name of the task we are sending metrics from.
+    indirect: Whether to create a metrics.METRICS_QUEUE object and a separate
+              process for indirect metrics flushing. Useful for forking,
+              because forking would normally create a duplicate ts_mon thread.
+    suppress_exception: True to silence any exception during the setup. Default
+              is set to True.
+    short_lived: Whether this process is short-lived and should use the autogen
+              hostname prefix.
+    auto_flush: Whether to create a thread to automatically flush metrics every
+              minute.
+    common_metric_fields: Dictionary containing the metric fields that will be
+              added to all metrics.
+    debug_file: If non-none, send metrics to this path instead of to PubSub.
+    task_num: (Default 0) The task_num target field of the metrics to emit.
+  """
+  if not config:
+    return TrivialContextManager()
+
+  # The flushing subprocess calls .flush manually.
+  if indirect:
+    auto_flush = False
+
+  if common_metric_fields:
+    _CommonMetricFields.update(common_metric_fields)
+
+  # google-api-client has too much noisey logging.
+  options = _GenerateTsMonArgparseOptions(
+      service_name, short_lived, auto_flush, debug_file, task_num)
+
+  if indirect:
+    return _CreateTsMonFlushingProcess(options)
+  else:
+    _SetupTsMonFromOptions(options, suppress_exception)
+    return TrivialContextManager()
+
+
+def _SetupTsMonFromOptions(options, suppress_exception):
+  """Sets up ts-mon global state given parsed argparse options.
+
+  Args:
+    options: An argparse options object containing ts-mon flags.
+    suppress_exception: True to silence any exception during the setup. Default
+                        is set to True.
+  """
+  googleapiclient.discovery.logger.setLevel(logging.WARNING)
+  try:
+    config.process_argparse_options(options)
+    logging.notice('ts_mon was set up.')
+    global _WasSetup  # pylint: disable=global-statement
+    _WasSetup = True
+  except Exception as e:
+    logging.warning('Failed to configure ts_mon, monitoring is disabled: %s', e,
+                    exc_info=True)
+    if not suppress_exception:
+      raise
+
+
+def _GenerateTsMonArgparseOptions(service_name, short_lived,
+                                  auto_flush, debug_file, task_num):
+  """Generates an arg list for ts-mon to consume.
+
+  Args:
+    service_name: The name of the task we are sending metrics from.
+    short_lived: Whether this process is short-lived and should use the autogen
+                 hostname prefix.
+    auto_flush: Whether to create a thread to automatically flush metrics every
+                minute.
+    debug_file: If non-none, send metrics to this path instead of to PubSub.
+    task_num: Override the default task num of 0.
+  """
+  parser = argparse.ArgumentParser()
+  config.add_argparse_options(parser)
+
+  args = [
+      '--ts-mon-target-type', 'task',
+      '--ts-mon-task-service-name', service_name,
+      '--ts-mon-task-job-name', service_name,
+  ]
+
+  if debug_file:
+    args.extend(['--ts-mon-endpoint', 'file://' + debug_file])
+
+  # Short lived processes will have autogen: prepended to their hostname and
+  # use task-number=PID to trigger shorter retention policies under
+  # chrome-infra@, and used by a Monarch precomputation to group across the
+  # task number.
+  # Furthermore, we assume they manually call ts_mon.Flush(), because the
+  # ts_mon thread will drop messages if the process exits before it flushes.
+  if short_lived:
+    auto_flush = False
+    fqdn = socket.getfqdn().lower()
+    host = fqdn.split('.')[0]
+    args.extend(['--ts-mon-task-hostname', 'autogen:' + host,
+                 '--ts-mon-task-number', str(os.getpid())])
+  elif task_num:
+    args.extend(['--ts-mon-task-number', str(task_num)])
+
+  args.extend(['--ts-mon-flush', 'auto' if auto_flush else 'manual'])
+  return parser.parse_args(args=args)
+
+
+@contextlib.contextmanager
+def _CreateTsMonFlushingProcess(options):
+  """Creates a separate process to flush ts_mon metrics.
+
+  Useful for multiprocessing scenarios where we don't want multiple ts-mon
+  threads send contradictory metrics. Instead, functions in
+  chromite.lib.metrics will send their calls to a Queue, which is consumed by a
+  dedicated flushing process.
+
+  Args:
+    options: An argparse options object to configure ts-mon with.
+
+  Side effects:
+    Sets chromite.lib.metrics.MESSAGE_QUEUE, which causes the metric functions
+    to send their calls to the Queue instead of creating the metrics.
+  """
+  # If this is nested, we don't need to create another queue and another
+  # message consumer. Do nothing to continue to use the existing queue.
+  if metrics.MESSAGE_QUEUE or metrics.FLUSHING_PROCESS:
+    return
+
+  with parallel.Manager() as manager:
+    message_q = manager.Queue()
+
+    metrics.FLUSHING_PROCESS = multiprocessing.Process(
+        target=lambda: _SetupAndConsumeMessages(message_q, options))
+    metrics.FLUSHING_PROCESS.start()
+
+    # this makes the chromite.lib.metric functions use the queue.
+    # note - we have to do this *after* forking the ConsumeMessages process.
+    metrics.MESSAGE_QUEUE = message_q
+
+    try:
+      yield message_q
+    finally:
+      _CleanupMetricsFlushingProcess()
+
+
+def _CleanupMetricsFlushingProcess():
+  """Sends sentinal value to flushing process and .joins it."""
+  # Now that there is no longer a process to listen to the Queue, re-set it
+  # to None so that any future metrics are created within this process.
+  message_q = metrics.MESSAGE_QUEUE
+  flushing_process = metrics.FLUSHING_PROCESS
+  metrics.MESSAGE_QUEUE = None
+  metrics.FLUSHING_PROCESS = None
+
+  # If the process has already died, we don't need to try to clean it up.
+  if not flushing_process.is_alive():
+    return
+
+  # Send the sentinal value for "flush one more time and exit".
+  try:
+    message_q.put(None)
+  # If the flushing process quits, the message Queue can become full.
+  except IOError:
+    if not flushing_process.is_alive():
+      return
+
+  logging.info('Waiting for ts_mon flushing process to finish...')
+  flushing_process.join(timeout=FLUSH_INTERVAL*2)
+  if flushing_process.is_alive():
+    flushing_process.terminate()
+  if flushing_process.exitcode:
+    logging.warning('ts_mon_config flushing process did not exit cleanly.')
+  logging.info('Finished waiting for ts_mon process.')
+
+
+def _SetupAndConsumeMessages(message_q, options):
+  """Sets up ts-mon, and starts a MetricConsumer loop.
+
+  Args:
+    message_q: The metric multiprocessing.Queue to read from.
+    options: An argparse options object to configure ts-mon with.
+  """
+  # Configure ts-mon, but don't start up a sending thread.
+  _SetupTsMonFromOptions(options, suppress_exception=True)
+  if not _WasSetup:
+    return
+
+  return MetricConsumer(message_q).Consume()
+
+
+class MetricConsumer(object):
+  """Configures ts_mon and gets metrics from a message queue.
+
+  This class is meant to be used in a subprocess. It configures itself
+  to receive a SIGHUP signal when the parent process dies, and catches the
+  signal in order to have a chance to flush any pending metrics one more time
+  before quitting.
+  """
+  def __init__(self, message_q):
+    # If our parent dies, finish flushing before exiting.
+    self.reset_after_flush = []
+    self.last_flush = 0
+    self.pending = False
+    self.message_q = message_q
+
+    if parallel.ExitWithParent(signal.SIGHUP):
+      signal.signal(signal.SIGHUP, lambda _sig, _stack: self._WaitToFlush())
+
+
+  def Consume(self):
+    """Emits metrics from self.message_q, flushing periodically.
+
+    The loop is terminated by a None entry on the Queue, which is a friendly
+    signal from the parent process that it's time to shut down. Before
+    returning, we wait to flush one more time to make sure that all the
+    metrics were sent.
+    """
+    message = self.message_q.get()
+    while message:
+      self._CallMetric(message)
+      message = self._WaitForNextMessage()
+
+    if self.pending:
+      self._WaitToFlush()
+
+
+  def _CallMetric(self, message):
+    """Calls the metric method from |message|, ignoring exceptions."""
+    try:
+      cls = getattr(metrics, message.metric_name)
+      message.method_kwargs.setdefault('fields', {})
+      message.metric_kwargs.setdefault('field_spec', [])
+      message.method_kwargs['fields'], message.metric_kwargs['field_spec'] = (
+          AddCommonFields(message.method_kwargs['fields'],
+                          message.metric_kwargs['field_spec']))
+      metric = cls(*message.metric_args, **message.metric_kwargs)
+      if message.reset_after:
+        self.reset_after_flush.append(metric)
+      getattr(metric, message.method)(
+          *message.method_args,
+          **message.method_kwargs)
+      self.pending = True
+    except Exception:
+      logging.exception('Caught an exception while running %s',
+                        _MethodCallRepr(message))
+
+
+  def _WaitForNextMessage(self):
+    """Waits for a new message, flushing every |FLUSH_INTERVAL| seconds."""
+    while True:
+      time_delta = self._FlushIfReady()
+      try:
+        timeout = FLUSH_INTERVAL - time_delta
+        message = self.message_q.get(timeout=timeout)
+        return message
+      except Queue.Empty:
+        pass
+
+
+  def _WaitToFlush(self):
+    """Sleeps until the next time we can call metrics.Flush(), then flushes."""
+    time_delta = time.time() - self.last_flush
+    time.sleep(max(0, FLUSH_INTERVAL - time_delta))
+    metrics.Flush(reset_after=self.reset_after_flush)
+
+
+  def _FlushIfReady(self):
+    """Call metrics.Flush() if we are ready and have pending metrics.
+
+    This allows us to only call flush every FLUSH_INTERVAL seconds.
+    """
+    now = time.time()
+    time_delta = now - self.last_flush
+    if time_delta > FLUSH_INTERVAL:
+      self.last_flush = now
+      time_delta = 0
+      metrics.Flush(reset_after=self.reset_after_flush)
+      self.pending = False
+    return time_delta
+
+
+def _MethodCallRepr(message):
+  """Gives a string representation of |obj|.|method|(*|args|, **|kwargs|)
+
+  Args:
+    message: A MetricCall object.
+  """
+  if not message:
+    return repr(message)
+  obj = message.metric_name
+  method = message.method
+  args = message.method_args
+  kwargs = message.method_kwargs
+
+  args_strings = ([repr(x) for x in args] +
+                  [(str(k) + '=' + repr(v))
+                   for k, v in kwargs.items()])
+  return '%s.%s(%s)' % (repr(obj), method, ', '.join(args_strings))
diff --git a/client/common_lib/perf_expectations/__init__.py b/utils/frozen_chromite/scripts/__init__.py
similarity index 100%
rename from client/common_lib/perf_expectations/__init__.py
rename to utils/frozen_chromite/scripts/__init__.py
diff --git a/utils/frozen_chromite/scripts/cros_set_lsb_release.py b/utils/frozen_chromite/scripts/cros_set_lsb_release.py
new file mode 100644
index 0000000..c9f9efa
--- /dev/null
+++ b/utils/frozen_chromite/scripts/cros_set_lsb_release.py
@@ -0,0 +1,51 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility for setting the /etc/lsb-release file of an image."""
+
+from __future__ import print_function
+
+
+# LSB keys:
+# Set google-specific version numbers:
+# CHROMEOS_RELEASE_BOARD is the target board identifier.
+# CHROMEOS_RELEASE_BRANCH_NUMBER is the ChromeOS branch number
+# CHROMEOS_RELEASE_BUILD_NUMBER is the ChromeOS build number
+# CHROMEOS_RELEASE_BUILD_TYPE is the type of build (official, from developers,
+# etc..)
+# CHROMEOS_RELEASE_CHROME_MILESTONE is the Chrome milestone (also named Chrome
+#   branch).
+# CHROMEOS_RELEASE_DESCRIPTION is the version displayed by Chrome; see
+#   chrome/browser/chromeos/chromeos_version_loader.cc.
+# CHROMEOS_RELEASE_NAME is a human readable name for the build.
+# CHROMEOS_RELEASE_PATCH_NUMBER is the patch number for the current branch.
+# CHROMEOS_RELEASE_TRACK and CHROMEOS_RELEASE_VERSION are used by the software
+#   update service.
+# CHROMEOS_RELEASE_KEYSET is the named of the keyset used to sign this build.
+# TODO(skrul):  Remove GOOGLE_RELEASE once Chromium is updated to look at
+#   CHROMEOS_RELEASE_VERSION for UserAgent data.
+LSB_KEY_NAME = 'CHROMEOS_RELEASE_NAME'
+LSB_KEY_AUSERVER = 'CHROMEOS_AUSERVER'
+LSB_KEY_DEVSERVER = 'CHROMEOS_DEVSERVER'
+LSB_KEY_TRACK = 'CHROMEOS_RELEASE_TRACK'
+LSB_KEY_BUILD_TYPE = 'CHROMEOS_RELEASE_BUILD_TYPE'
+LSB_KEY_DESCRIPTION = 'CHROMEOS_RELEASE_DESCRIPTION'
+LSB_KEY_BOARD = 'CHROMEOS_RELEASE_BOARD'
+LSB_KEY_KEYSET = 'CHROMEOS_RELEASE_KEYSET'
+LSB_KEY_UNIBUILD = 'CHROMEOS_RELEASE_UNIBUILD'
+LSB_KEY_BRANCH_NUMBER = 'CHROMEOS_RELEASE_BRANCH_NUMBER'
+LSB_KEY_BUILD_NUMBER = 'CHROMEOS_RELEASE_BUILD_NUMBER'
+LSB_KEY_CHROME_MILESTONE = 'CHROMEOS_RELEASE_CHROME_MILESTONE'
+LSB_KEY_PATCH_NUMBER = 'CHROMEOS_RELEASE_PATCH_NUMBER'
+LSB_KEY_VERSION = 'CHROMEOS_RELEASE_VERSION'
+LSB_KEY_BUILDER_PATH = 'CHROMEOS_RELEASE_BUILDER_PATH'
+LSB_KEY_GOOGLE_RELEASE = 'GOOGLE_RELEASE'
+LSB_KEY_APPID_RELEASE = 'CHROMEOS_RELEASE_APPID'
+LSB_KEY_APPID_BOARD = 'CHROMEOS_BOARD_APPID'
+LSB_KEY_APPID_CANARY = 'CHROMEOS_CANARY_APPID'
+LSB_KEY_ARC_VERSION = 'CHROMEOS_ARC_VERSION'
+LSB_KEY_ARC_ANDROID_SDK_VERSION = 'CHROMEOS_ARC_ANDROID_SDK_VERSION'
+
+CANARY_APP_ID = '{90F229CE-83E2-4FAF-8479-E368A34938B1}'
diff --git a/utils/frozen_chromite/ssh_keys/testing_rsa b/utils/frozen_chromite/ssh_keys/testing_rsa
new file mode 100644
index 0000000..d50a630
--- /dev/null
+++ b/utils/frozen_chromite/ssh_keys/testing_rsa
@@ -0,0 +1,27 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEoAIBAAKCAQEAvsNpFdK5lb0GfKx+FgsrsM/2+aZVFYXHMPdvGtTz63ciRhq0
+Jnw7nln1SOcHraSz3/imECBg8NHIKV6rA+B9zbf7pZXEv20x5Ul0vrcPqYWC44PT
+tgsgvi8s0KZUZN93YlcjZ+Q7BjQ/tuwGSaLWLqJ7hnHALMJ3dbEM9fKBHQBCrG5H
+OaWD2gtXj7jp04M/WUnDDdemq/KMg6E9jcrJOiQ39IuTpas4hLQzVkKAKSrpl6MY
+2etHyoNarlWhcOwitArEDwf3WgnctwKstI/MTKB5BTpO2WXUNUv4kXzA+g8/l1al
+jIG13vtd9A/IV3KFVx/sLkkjuZ7z2rQXyNKuJwIBIwKCAQA79EWZJPh/hI0CnJyn
+16AEXp4T8nKDG2p9GpCiCGnq6u2Dvz/u1pZk97N9T+x4Zva0GvJc1vnlST7objW/
+Y8/ET8QeGSCT7x5PYDqiVspoemr3DCyYTKPkADKn+cLAngDzBXGHDTcfNP4U6xfr
+Qc5JK8BsFR8kApqSs/zCU4eqBtp2FVvPbgUOv3uUrFnjEuGs9rb1QZ0K6o08L4Cq
+N+e2nTysjp78blakZfqlurqTY6iJb0ImU2W3T8sV6w5GP1NT7eicXLO3WdIRB15a
+evogPeqtMo8GcO62wU/D4UCvq4GNEjvYOvFmPzXHvhTxsiWv5KEACtleBIEYmWHA
+POwrAoGBAOKgNRgxHL7r4bOmpLQcYK7xgA49OpikmrebXCQnZ/kZ3QsLVv1QdNMH
+Rx/ex7721g8R0oWslM14otZSMITCDCMWTYVBNM1bqYnUeEu5HagFwxjQ2tLuSs8E
+SBzEr96JLfhwuBhDH10sQqn+OQG1yj5acs4Pt3L4wlYwMx0vs1BxAoGBANd9Owro
+5ONiJXfKNaNY/cJYuLR+bzGeyp8oxToxgmM4UuA4hhDU7peg4sdoKJ4XjB9cKMCz
+ZGU5KHKKxNf95/Z7aywiIJEUE/xPRGNP6tngRunevp2QyvZf4pgvACvk1tl9B3HH
+7J5tY/GRkT4sQuZYpx3YnbdP5Y6Kx33BF7QXAoGAVCzghVQR/cVT1QNhvz29gs66
+iPIrtQnwUtNOHA6i9h+MnbPBOYRIpidGTaqEtKTTKisw79JjJ78X6TR4a9ML0oSg
+c1K71z9NmZgPbJU25qMN80ZCph3+h2f9hwc6AjLz0U5wQ4alP909VRVIX7iM8paf
+q59wBiHhyD3J16QAxhsCgYBu0rCmhmcV2rQu+kd4lCq7uJmBZZhFZ5tny9MlPgiK
+zIJkr1rkFbyIfqCDzyrU9irOTKc+iCUA25Ek9ujkHC4m/aTU3lnkNjYp/OFXpXF3
+XWZMY+0Ak5uUpldG85mwLIvATu3ivpbyZCTFYM5afSm4StmaUiU5tA+oZKEcGily
+jwKBgBdFLg+kTm877lcybQ04G1kIRMf5vAXcConzBt8ry9J+2iX1ddlu2K2vMroD
+1cP/U/EmvoCXSOGuetaI4UNQwE/rGCtkpvNj5y4twVLh5QufSOl49V0Ut0mwjPXw
+HfN/2MoO07vQrjgsFylvrw9A79xItABaqKndlmqlwMZWc9Ne
+-----END RSA PRIVATE KEY-----
diff --git a/client/common_lib/cros/fake_device_server/__init__.py b/utils/frozen_chromite/third_party/__init__.py
similarity index 100%
copy from client/common_lib/cros/fake_device_server/__init__.py
copy to utils/frozen_chromite/third_party/__init__.py
diff --git a/utils/frozen_chromite/third_party/apiclient/__init__.py b/utils/frozen_chromite/third_party/apiclient/__init__.py
new file mode 100644
index 0000000..d75e7a1
--- /dev/null
+++ b/utils/frozen_chromite/third_party/apiclient/__init__.py
@@ -0,0 +1,42 @@
+"""Retain apiclient as an alias for googleapiclient."""
+
+from six import iteritems
+
+import googleapiclient
+
+try:
+  import oauth2client
+except ImportError:
+  raise RuntimeError(
+      'Previous version of google-api-python-client detected; due to a '
+      'packaging issue, we cannot perform an in-place upgrade. To repair, '
+      'remove and reinstall this package, along with oauth2client and '
+      'uritemplate. One can do this with pip via\n'
+      '  pip install -I google-api-python-client'
+  )
+
+from googleapiclient import channel
+from googleapiclient import discovery
+from googleapiclient import errors
+from googleapiclient import http
+from googleapiclient import mimeparse
+from googleapiclient import model
+from googleapiclient import sample_tools
+from googleapiclient import schema
+
+__version__ = googleapiclient.__version__
+
+_SUBMODULES = {
+    'channel': channel,
+    'discovery': discovery,
+    'errors': errors,
+    'http': http,
+    'mimeparse': mimeparse,
+    'model': model,
+    'sample_tools': sample_tools,
+    'schema': schema,
+}
+
+import sys
+for module_name, module in iteritems(_SUBMODULES):
+  sys.modules['apiclient.%s' % module_name] = module
diff --git a/utils/frozen_chromite/third_party/googleapiclient/__init__.py b/utils/frozen_chromite/third_party/googleapiclient/__init__.py
new file mode 100644
index 0000000..0753586
--- /dev/null
+++ b/utils/frozen_chromite/third_party/googleapiclient/__init__.py
@@ -0,0 +1,27 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+__version__ = "1.5.3"
+
+# Set default logging handler to avoid "No handler found" warnings.
+import logging
+
+try:  # Python 2.7+
+    from logging import NullHandler
+except ImportError:
+    class NullHandler(logging.Handler):
+        def emit(self, record):
+            pass
+
+logging.getLogger(__name__).addHandler(NullHandler())
diff --git a/utils/frozen_chromite/third_party/googleapiclient/channel.py b/utils/frozen_chromite/third_party/googleapiclient/channel.py
new file mode 100644
index 0000000..a38b4ff
--- /dev/null
+++ b/utils/frozen_chromite/third_party/googleapiclient/channel.py
@@ -0,0 +1,293 @@
+"""Channel notifications support.
+
+Classes and functions to support channel subscriptions and notifications
+on those channels.
+
+Notes:
+  - This code is based on experimental APIs and is subject to change.
+  - Notification does not do deduplication of notification ids, that's up to
+    the receiver.
+  - Storing the Channel between calls is up to the caller.
+
+
+Example setting up a channel:
+
+  # Create a new channel that gets notifications via webhook.
+  channel = new_webhook_channel("https://example.com/my_web_hook")
+
+  # Store the channel, keyed by 'channel.id'. Store it before calling the
+  # watch method because notifications may start arriving before the watch
+  # method returns.
+  ...
+
+  resp = service.objects().watchAll(
+    bucket="some_bucket_id", body=channel.body()).execute()
+  channel.update(resp)
+
+  # Store the channel, keyed by 'channel.id'. Store it after being updated
+  # since the resource_id value will now be correct, and that's needed to
+  # stop a subscription.
+  ...
+
+
+An example Webhook implementation using webapp2. Note that webapp2 puts
+headers in a case insensitive dictionary, as headers aren't guaranteed to
+always be upper case.
+
+  id = self.request.headers[X_GOOG_CHANNEL_ID]
+
+  # Retrieve the channel by id.
+  channel = ...
+
+  # Parse notification from the headers, including validating the id.
+  n = notification_from_headers(channel, self.request.headers)
+
+  # Do app specific stuff with the notification here.
+  if n.resource_state == 'sync':
+    # Code to handle sync state.
+  elif n.resource_state == 'exists':
+    # Code to handle the exists state.
+  elif n.resource_state == 'not_exists':
+    # Code to handle the not exists state.
+
+
+Example of unsubscribing.
+
+  service.channels().stop(channel.body())
+"""
+from __future__ import absolute_import
+
+import datetime
+import uuid
+
+from googleapiclient import errors
+import six
+
+# Oauth2client < 3 has the positional helper in 'util', >= 3 has it
+# in '_helpers'.
+try:
+  from oauth2client import util
+except ImportError:
+  from oauth2client import _helpers as util
+
+
+# The unix time epoch starts at midnight 1970.
+EPOCH = datetime.datetime.utcfromtimestamp(0)
+
+# Map the names of the parameters in the JSON channel description to
+# the parameter names we use in the Channel class.
+CHANNEL_PARAMS = {
+    'address': 'address',
+    'id': 'id',
+    'expiration': 'expiration',
+    'params': 'params',
+    'resourceId': 'resource_id',
+    'resourceUri': 'resource_uri',
+    'type': 'type',
+    'token': 'token',
+    }
+
+X_GOOG_CHANNEL_ID     = 'X-GOOG-CHANNEL-ID'
+X_GOOG_MESSAGE_NUMBER = 'X-GOOG-MESSAGE-NUMBER'
+X_GOOG_RESOURCE_STATE = 'X-GOOG-RESOURCE-STATE'
+X_GOOG_RESOURCE_URI   = 'X-GOOG-RESOURCE-URI'
+X_GOOG_RESOURCE_ID    = 'X-GOOG-RESOURCE-ID'
+
+
+def _upper_header_keys(headers):
+  new_headers = {}
+  for k, v in six.iteritems(headers):
+    new_headers[k.upper()] = v
+  return new_headers
+
+
+class Notification(object):
+  """A Notification from a Channel.
+
+  Notifications are not usually constructed directly, but are returned
+  from functions like notification_from_headers().
+
+  Attributes:
+    message_number: int, The unique id number of this notification.
+    state: str, The state of the resource being monitored.
+    uri: str, The address of the resource being monitored.
+    resource_id: str, The unique identifier of the version of the resource at
+      this event.
+  """
+  @util.positional(5)
+  def __init__(self, message_number, state, resource_uri, resource_id):
+    """Notification constructor.
+
+    Args:
+      message_number: int, The unique id number of this notification.
+      state: str, The state of the resource being monitored. Can be one
+        of "exists", "not_exists", or "sync".
+      resource_uri: str, The address of the resource being monitored.
+      resource_id: str, The identifier of the watched resource.
+    """
+    self.message_number = message_number
+    self.state = state
+    self.resource_uri = resource_uri
+    self.resource_id = resource_id
+
+
+class Channel(object):
+  """A Channel for notifications.
+
+  Usually not constructed directly, instead it is returned from helper
+  functions like new_webhook_channel().
+
+  Attributes:
+    type: str, The type of delivery mechanism used by this channel. For
+      example, 'web_hook'.
+    id: str, A UUID for the channel.
+    token: str, An arbitrary string associated with the channel that
+      is delivered to the target address with each event delivered
+      over this channel.
+    address: str, The address of the receiving entity where events are
+      delivered. Specific to the channel type.
+    expiration: int, The time, in milliseconds from the epoch, when this
+      channel will expire.
+    params: dict, A dictionary of string to string, with additional parameters
+      controlling delivery channel behavior.
+    resource_id: str, An opaque id that identifies the resource that is
+      being watched. Stable across different API versions.
+    resource_uri: str, The canonicalized ID of the watched resource.
+  """
+
+  @util.positional(5)
+  def __init__(self, type, id, token, address, expiration=None,
+               params=None, resource_id="", resource_uri=""):
+    """Create a new Channel.
+
+    In user code, this Channel constructor will not typically be called
+    manually since there are functions for creating channels for each specific
+    type with a more customized set of arguments to pass.
+
+    Args:
+      type: str, The type of delivery mechanism used by this channel. For
+        example, 'web_hook'.
+      id: str, A UUID for the channel.
+      token: str, An arbitrary string associated with the channel that
+        is delivered to the target address with each event delivered
+        over this channel.
+      address: str,  The address of the receiving entity where events are
+        delivered. Specific to the channel type.
+      expiration: int, The time, in milliseconds from the epoch, when this
+        channel will expire.
+      params: dict, A dictionary of string to string, with additional parameters
+        controlling delivery channel behavior.
+      resource_id: str, An opaque id that identifies the resource that is
+        being watched. Stable across different API versions.
+      resource_uri: str, The canonicalized ID of the watched resource.
+    """
+    self.type = type
+    self.id = id
+    self.token = token
+    self.address = address
+    self.expiration = expiration
+    self.params = params
+    self.resource_id = resource_id
+    self.resource_uri = resource_uri
+
+  def body(self):
+    """Build a body from the Channel.
+
+    Constructs a dictionary that's appropriate for passing into watch()
+    methods as the value of body argument.
+
+    Returns:
+      A dictionary representation of the channel.
+    """
+    result = {
+        'id': self.id,
+        'token': self.token,
+        'type': self.type,
+        'address': self.address
+        }
+    if self.params:
+      result['params'] = self.params
+    if self.resource_id:
+      result['resourceId'] = self.resource_id
+    if self.resource_uri:
+      result['resourceUri'] = self.resource_uri
+    if self.expiration:
+      result['expiration'] = self.expiration
+
+    return result
+
+  def update(self, resp):
+    """Update a channel with information from the response of watch().
+
+    When a request is sent to watch() a resource, the response returned
+    from the watch() request is a dictionary with updated channel information,
+    such as the resource_id, which is needed when stopping a subscription.
+
+    Args:
+      resp: dict, The response from a watch() method.
+    """
+    for json_name, param_name in six.iteritems(CHANNEL_PARAMS):
+      value = resp.get(json_name)
+      if value is not None:
+        setattr(self, param_name, value)
+
+
+def notification_from_headers(channel, headers):
+  """Parse a notification from the webhook request headers, validate
+    the notification, and return a Notification object.
+
+  Args:
+    channel: Channel, The channel that the notification is associated with.
+    headers: dict, A dictionary like object that contains the request headers
+      from the webhook HTTP request.
+
+  Returns:
+    A Notification object.
+
+  Raises:
+    errors.InvalidNotificationError if the notification is invalid.
+    ValueError if the X-GOOG-MESSAGE-NUMBER can't be converted to an int.
+  """
+  headers = _upper_header_keys(headers)
+  channel_id = headers[X_GOOG_CHANNEL_ID]
+  if channel.id != channel_id:
+    raise errors.InvalidNotificationError(
+        'Channel id mismatch: %s != %s' % (channel.id, channel_id))
+  else:
+    message_number = int(headers[X_GOOG_MESSAGE_NUMBER])
+    state = headers[X_GOOG_RESOURCE_STATE]
+    resource_uri = headers[X_GOOG_RESOURCE_URI]
+    resource_id = headers[X_GOOG_RESOURCE_ID]
+    return Notification(message_number, state, resource_uri, resource_id)
+
+
+@util.positional(2)
+def new_webhook_channel(url, token=None, expiration=None, params=None):
+    """Create a new webhook Channel.
+
+    Args:
+      url: str, URL to post notifications to.
+      token: str, An arbitrary string associated with the channel that
+        is delivered to the target address with each notification delivered
+        over this channel.
+      expiration: datetime.datetime, A time in the future when the channel
+        should expire. Can also be None if the subscription should use the
+        default expiration. Note that different services may have different
+        limits on how long a subscription lasts. Check the response from the
+        watch() method to see the value the service has set for an expiration
+        time.
+      params: dict, Extra parameters to pass on channel creation. Currently
+        not used for webhook channels.
+    """
+    expiration_ms = 0
+    if expiration:
+      delta = expiration - EPOCH
+      expiration_ms = delta.microseconds/1000 + (
+          delta.seconds + delta.days*24*3600)*1000
+      if expiration_ms < 0:
+        expiration_ms = 0
+
+    return Channel('web_hook', str(uuid.uuid4()),
+                   token, url, expiration=expiration_ms,
+                   params=params)
+
diff --git a/utils/frozen_chromite/third_party/googleapiclient/discovery.py b/utils/frozen_chromite/third_party/googleapiclient/discovery.py
new file mode 100644
index 0000000..598b222
--- /dev/null
+++ b/utils/frozen_chromite/third_party/googleapiclient/discovery.py
@@ -0,0 +1,1109 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Client for discovery based APIs.
+
+A client library for Google's discovery based APIs.
+"""
+from __future__ import absolute_import
+import six
+from six.moves import zip
+
+__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+__all__ = [
+    'build',
+    'build_from_document',
+    'fix_method_name',
+    'key2param',
+    ]
+
+from six import BytesIO
+from six.moves import http_client
+from six.moves.urllib.parse import urlencode, urlparse, urljoin, \
+  urlunparse, parse_qsl
+
+# Standard library imports
+import copy
+try:
+  from email.generator import BytesGenerator
+except ImportError:
+  from email.generator import Generator as BytesGenerator
+from email.mime.multipart import MIMEMultipart
+from email.mime.nonmultipart import MIMENonMultipart
+import json
+import keyword
+import logging
+import mimetypes
+import os
+import re
+
+# Third-party imports
+import httplib2
+import uritemplate
+
+# Local imports
+from googleapiclient import mimeparse
+from googleapiclient.errors import HttpError
+from googleapiclient.errors import InvalidJsonError
+from googleapiclient.errors import MediaUploadSizeError
+from googleapiclient.errors import UnacceptableMimeTypeError
+from googleapiclient.errors import UnknownApiNameOrVersion
+from googleapiclient.errors import UnknownFileType
+from googleapiclient.http import BatchHttpRequest
+from googleapiclient.http import HttpRequest
+from googleapiclient.http import MediaFileUpload
+from googleapiclient.http import MediaUpload
+from googleapiclient.model import JsonModel
+from googleapiclient.model import MediaModel
+from googleapiclient.model import RawModel
+from googleapiclient.schema import Schemas
+from oauth2client.client import GoogleCredentials
+
+# Oauth2client < 3 has the positional helper in 'util', >= 3 has it
+# in '_helpers'.
+try:
+  from oauth2client.util import _add_query_parameter
+  from oauth2client.util import positional
+except ImportError:
+  from oauth2client._helpers import _add_query_parameter
+  from oauth2client._helpers import positional
+
+
+# The client library requires a version of httplib2 that supports RETRIES.
+httplib2.RETRIES = 1
+
+logger = logging.getLogger(__name__)
+
+URITEMPLATE = re.compile('{[^}]*}')
+VARNAME = re.compile('[a-zA-Z0-9_-]+')
+DISCOVERY_URI = ('https://www.googleapis.com/discovery/v1/apis/'
+                 '{api}/{apiVersion}/rest')
+V1_DISCOVERY_URI = DISCOVERY_URI
+V2_DISCOVERY_URI = ('https://{api}.googleapis.com/$discovery/rest?'
+                    'version={apiVersion}')
+DEFAULT_METHOD_DOC = 'A description of how to use this function'
+HTTP_PAYLOAD_METHODS = frozenset(['PUT', 'POST', 'PATCH'])
+_MEDIA_SIZE_BIT_SHIFTS = {'KB': 10, 'MB': 20, 'GB': 30, 'TB': 40}
+BODY_PARAMETER_DEFAULT_VALUE = {
+    'description': 'The request body.',
+    'type': 'object',
+    'required': True,
+}
+MEDIA_BODY_PARAMETER_DEFAULT_VALUE = {
+    'description': ('The filename of the media request body, or an instance '
+                    'of a MediaUpload object.'),
+    'type': 'string',
+    'required': False,
+}
+
+# Parameters accepted by the stack, but not visible via discovery.
+# TODO(dhermes): Remove 'userip' in 'v2'.
+STACK_QUERY_PARAMETERS = frozenset(['trace', 'pp', 'userip', 'strict'])
+STACK_QUERY_PARAMETER_DEFAULT_VALUE = {'type': 'string', 'location': 'query'}
+
+# Library-specific reserved words beyond Python keywords.
+RESERVED_WORDS = frozenset(['body'])
+
+# patch _write_lines to avoid munging '\r' into '\n'
+# ( https://bugs.python.org/issue18886 https://bugs.python.org/issue19003 )
+class _BytesGenerator(BytesGenerator):
+  _write_lines = BytesGenerator.write
+
+def fix_method_name(name):
+  """Fix method names to avoid reserved word conflicts.
+
+  Args:
+    name: string, method name.
+
+  Returns:
+    The name with a '_' prefixed if the name is a reserved word.
+  """
+  if keyword.iskeyword(name) or name in RESERVED_WORDS:
+    return name + '_'
+  else:
+    return name
+
+
+def key2param(key):
+  """Converts key names into parameter names.
+
+  For example, converting "max-results" -> "max_results"
+
+  Args:
+    key: string, the method key name.
+
+  Returns:
+    A safe method name based on the key name.
+  """
+  result = []
+  key = list(key)
+  if not key[0].isalpha():
+    result.append('x')
+  for c in key:
+    if c.isalnum():
+      result.append(c)
+    else:
+      result.append('_')
+
+  return ''.join(result)
+
+
+@positional(2)
+def build(serviceName,
+          version,
+          http=None,
+          discoveryServiceUrl=DISCOVERY_URI,
+          developerKey=None,
+          model=None,
+          requestBuilder=HttpRequest,
+          credentials=None,
+          cache_discovery=True,
+          cache=None):
+  """Construct a Resource for interacting with an API.
+
+  Construct a Resource object for interacting with an API. The serviceName and
+  version are the names from the Discovery service.
+
+  Args:
+    serviceName: string, name of the service.
+    version: string, the version of the service.
+    http: httplib2.Http, An instance of httplib2.Http or something that acts
+      like it that HTTP requests will be made through.
+    discoveryServiceUrl: string, a URI Template that points to the location of
+      the discovery service. It should have two parameters {api} and
+      {apiVersion} that when filled in produce an absolute URI to the discovery
+      document for that service.
+    developerKey: string, key obtained from
+      https://code.google.com/apis/console.
+    model: googleapiclient.Model, converts to and from the wire format.
+    requestBuilder: googleapiclient.http.HttpRequest, encapsulator for an HTTP
+      request.
+    credentials: oauth2client.Credentials, credentials to be used for
+      authentication.
+    cache_discovery: Boolean, whether or not to cache the discovery doc.
+    cache: googleapiclient.discovery_cache.base.CacheBase, an optional
+      cache object for the discovery documents.
+
+  Returns:
+    A Resource object with methods for interacting with the service.
+  """
+  params = {
+      'api': serviceName,
+      'apiVersion': version
+      }
+
+  if http is None:
+    http = httplib2.Http()
+
+  for discovery_url in (discoveryServiceUrl, V2_DISCOVERY_URI,):
+    requested_url = uritemplate.expand(discovery_url, params)
+
+    try:
+      content = _retrieve_discovery_doc(requested_url, http, cache_discovery,
+                                        cache)
+      return build_from_document(content, base=discovery_url, http=http,
+          developerKey=developerKey, model=model, requestBuilder=requestBuilder,
+          credentials=credentials)
+    except HttpError as e:
+      if e.resp.status == http_client.NOT_FOUND:
+        continue
+      else:
+        raise e
+
+  raise UnknownApiNameOrVersion(
+        "name: %s  version: %s" % (serviceName, version))
+
+
+def _retrieve_discovery_doc(url, http, cache_discovery, cache=None):
+  """Retrieves the discovery_doc from cache or the internet.
+
+  Args:
+    url: string, the URL of the discovery document.
+    http: httplib2.Http, An instance of httplib2.Http or something that acts
+      like it through which HTTP requests will be made.
+    cache_discovery: Boolean, whether or not to cache the discovery doc.
+    cache: googleapiclient.discovery_cache.base.Cache, an optional cache
+      object for the discovery documents.
+
+  Returns:
+    A unicode string representation of the discovery document.
+  """
+  if cache_discovery:
+    from . import discovery_cache
+    from .discovery_cache import base
+    if cache is None:
+      cache = discovery_cache.autodetect()
+    if cache:
+      content = cache.get(url)
+      if content:
+        return content
+
+  actual_url = url
+  # REMOTE_ADDR is defined by the CGI spec [RFC3875] as the environment
+  # variable that contains the network address of the client sending the
+  # request. If it exists then add that to the request for the discovery
+  # document to avoid exceeding the quota on discovery requests.
+  if 'REMOTE_ADDR' in os.environ:
+    actual_url = _add_query_parameter(url, 'userIp', os.environ['REMOTE_ADDR'])
+  logger.info('URL being requested: GET %s', actual_url)
+
+  resp, content = http.request(actual_url)
+
+  if resp.status >= 400:
+    raise HttpError(resp, content, uri=actual_url)
+
+  try:
+    content = content.decode('utf-8')
+  except AttributeError:
+    pass
+
+  try:
+    service = json.loads(content)
+  except ValueError as e:
+    logger.error('Failed to parse as JSON: ' + content)
+    raise InvalidJsonError()
+  if cache_discovery and cache:
+    cache.set(url, content)
+  return content
+
+
+@positional(1)
+def build_from_document(
+    service,
+    base=None,
+    future=None,
+    http=None,
+    developerKey=None,
+    model=None,
+    requestBuilder=HttpRequest,
+    credentials=None):
+  """Create a Resource for interacting with an API.
+
+  Same as `build()`, but constructs the Resource object from a discovery
+  document that is it given, as opposed to retrieving one over HTTP.
+
+  Args:
+    service: string or object, the JSON discovery document describing the API.
+      The value passed in may either be the JSON string or the deserialized
+      JSON.
+    base: string, base URI for all HTTP requests, usually the discovery URI.
+      This parameter is no longer used as rootUrl and servicePath are included
+      within the discovery document. (deprecated)
+    future: string, discovery document with future capabilities (deprecated).
+    http: httplib2.Http, An instance of httplib2.Http or something that acts
+      like it that HTTP requests will be made through.
+    developerKey: string, Key for controlling API usage, generated
+      from the API Console.
+    model: Model class instance that serializes and de-serializes requests and
+      responses.
+    requestBuilder: Takes an http request and packages it up to be executed.
+    credentials: object, credentials to be used for authentication.
+
+  Returns:
+    A Resource object with methods for interacting with the service.
+  """
+
+  if http is None:
+    http = httplib2.Http()
+
+  # future is no longer used.
+  future = {}
+
+  if isinstance(service, six.string_types):
+    service = json.loads(service)
+
+  if  'rootUrl' not in service and (isinstance(http, (HttpMock,
+                                                      HttpMockSequence))):
+      logger.error("You are using HttpMock or HttpMockSequence without" +
+                   "having the service discovery doc in cache. Try calling " +
+                   "build() without mocking once first to populate the " +
+                   "cache.")
+      raise InvalidJsonError()
+
+  base = urljoin(service['rootUrl'], service['servicePath'])
+  schema = Schemas(service)
+
+  if credentials:
+    # If credentials were passed in, we could have two cases:
+    # 1. the scopes were specified, in which case the given credentials
+    #    are used for authorizing the http;
+    # 2. the scopes were not provided (meaning the Application Default
+    #    Credentials are to be used). In this case, the Application Default
+    #    Credentials are built and used instead of the original credentials.
+    #    If there are no scopes found (meaning the given service requires no
+    #    authentication), there is no authorization of the http.
+    if (isinstance(credentials, GoogleCredentials) and
+        credentials.create_scoped_required()):
+      scopes = service.get('auth', {}).get('oauth2', {}).get('scopes', {})
+      if scopes:
+        credentials = credentials.create_scoped(list(scopes.keys()))
+      else:
+        # No need to authorize the http object
+        # if the service does not require authentication.
+        credentials = None
+
+    if credentials:
+      http = credentials.authorize(http)
+
+  if model is None:
+    features = service.get('features', [])
+    model = JsonModel('dataWrapper' in features)
+  return Resource(http=http, baseUrl=base, model=model,
+                  developerKey=developerKey, requestBuilder=requestBuilder,
+                  resourceDesc=service, rootDesc=service, schema=schema)
+
+
+def _cast(value, schema_type):
+  """Convert value to a string based on JSON Schema type.
+
+  See http://tools.ietf.org/html/draft-zyp-json-schema-03 for more details on
+  JSON Schema.
+
+  Args:
+    value: any, the value to convert
+    schema_type: string, the type that value should be interpreted as
+
+  Returns:
+    A string representation of 'value' based on the schema_type.
+  """
+  if schema_type == 'string':
+    if type(value) == type('') or type(value) == type(u''):
+      return value
+    else:
+      return str(value)
+  elif schema_type == 'integer':
+    return str(int(value))
+  elif schema_type == 'number':
+    return str(float(value))
+  elif schema_type == 'boolean':
+    return str(bool(value)).lower()
+  else:
+    if type(value) == type('') or type(value) == type(u''):
+      return value
+    else:
+      return str(value)
+
+
+def _media_size_to_long(maxSize):
+  """Convert a string media size, such as 10GB or 3TB into an integer.
+
+  Args:
+    maxSize: string, size as a string, such as 2MB or 7GB.
+
+  Returns:
+    The size as an integer value.
+  """
+  if len(maxSize) < 2:
+    return 0
+  units = maxSize[-2:].upper()
+  bit_shift = _MEDIA_SIZE_BIT_SHIFTS.get(units)
+  if bit_shift is not None:
+    return int(maxSize[:-2]) << bit_shift
+  else:
+    return int(maxSize)
+
+
+def _media_path_url_from_info(root_desc, path_url):
+  """Creates an absolute media path URL.
+
+  Constructed using the API root URI and service path from the discovery
+  document and the relative path for the API method.
+
+  Args:
+    root_desc: Dictionary; the entire original deserialized discovery document.
+    path_url: String; the relative URL for the API method. Relative to the API
+        root, which is specified in the discovery document.
+
+  Returns:
+    String; the absolute URI for media upload for the API method.
+  """
+  return '%(root)supload/%(service_path)s%(path)s' % {
+      'root': root_desc['rootUrl'],
+      'service_path': root_desc['servicePath'],
+      'path': path_url,
+  }
+
+
+def _fix_up_parameters(method_desc, root_desc, http_method):
+  """Updates parameters of an API method with values specific to this library.
+
+  Specifically, adds whatever global parameters are specified by the API to the
+  parameters for the individual method. Also adds parameters which don't
+  appear in the discovery document, but are available to all discovery based
+  APIs (these are listed in STACK_QUERY_PARAMETERS).
+
+  SIDE EFFECTS: This updates the parameters dictionary object in the method
+  description.
+
+  Args:
+    method_desc: Dictionary with metadata describing an API method. Value comes
+        from the dictionary of methods stored in the 'methods' key in the
+        deserialized discovery document.
+    root_desc: Dictionary; the entire original deserialized discovery document.
+    http_method: String; the HTTP method used to call the API method described
+        in method_desc.
+
+  Returns:
+    The updated Dictionary stored in the 'parameters' key of the method
+        description dictionary.
+  """
+  parameters = method_desc.setdefault('parameters', {})
+
+  # Add in the parameters common to all methods.
+  for name, description in six.iteritems(root_desc.get('parameters', {})):
+    parameters[name] = description
+
+  # Add in undocumented query parameters.
+  for name in STACK_QUERY_PARAMETERS:
+    parameters[name] = STACK_QUERY_PARAMETER_DEFAULT_VALUE.copy()
+
+  # Add 'body' (our own reserved word) to parameters if the method supports
+  # a request payload.
+  if http_method in HTTP_PAYLOAD_METHODS and 'request' in method_desc:
+    body = BODY_PARAMETER_DEFAULT_VALUE.copy()
+    body.update(method_desc['request'])
+    parameters['body'] = body
+
+  return parameters
+
+
+def _fix_up_media_upload(method_desc, root_desc, path_url, parameters):
+  """Updates parameters of API by adding 'media_body' if supported by method.
+
+  SIDE EFFECTS: If the method supports media upload and has a required body,
+  sets body to be optional (required=False) instead. Also, if there is a
+  'mediaUpload' in the method description, adds 'media_upload' key to
+  parameters.
+
+  Args:
+    method_desc: Dictionary with metadata describing an API method. Value comes
+        from the dictionary of methods stored in the 'methods' key in the
+        deserialized discovery document.
+    root_desc: Dictionary; the entire original deserialized discovery document.
+    path_url: String; the relative URL for the API method. Relative to the API
+        root, which is specified in the discovery document.
+    parameters: A dictionary describing method parameters for method described
+        in method_desc.
+
+  Returns:
+    Triple (accept, max_size, media_path_url) where:
+      - accept is a list of strings representing what content types are
+        accepted for media upload. Defaults to empty list if not in the
+        discovery document.
+      - max_size is a long representing the max size in bytes allowed for a
+        media upload. Defaults to 0L if not in the discovery document.
+      - media_path_url is a String; the absolute URI for media upload for the
+        API method. Constructed using the API root URI and service path from
+        the discovery document and the relative path for the API method. If
+        media upload is not supported, this is None.
+  """
+  media_upload = method_desc.get('mediaUpload', {})
+  accept = media_upload.get('accept', [])
+  max_size = _media_size_to_long(media_upload.get('maxSize', ''))
+  media_path_url = None
+
+  if media_upload:
+    media_path_url = _media_path_url_from_info(root_desc, path_url)
+    parameters['media_body'] = MEDIA_BODY_PARAMETER_DEFAULT_VALUE.copy()
+    if 'body' in parameters:
+      parameters['body']['required'] = False
+
+  return accept, max_size, media_path_url
+
+
+def _fix_up_method_description(method_desc, root_desc):
+  """Updates a method description in a discovery document.
+
+  SIDE EFFECTS: Changes the parameters dictionary in the method description with
+  extra parameters which are used locally.
+
+  Args:
+    method_desc: Dictionary with metadata describing an API method. Value comes
+        from the dictionary of methods stored in the 'methods' key in the
+        deserialized discovery document.
+    root_desc: Dictionary; the entire original deserialized discovery document.
+
+  Returns:
+    Tuple (path_url, http_method, method_id, accept, max_size, media_path_url)
+    where:
+      - path_url is a String; the relative URL for the API method. Relative to
+        the API root, which is specified in the discovery document.
+      - http_method is a String; the HTTP method used to call the API method
+        described in the method description.
+      - method_id is a String; the name of the RPC method associated with the
+        API method, and is in the method description in the 'id' key.
+      - accept is a list of strings representing what content types are
+        accepted for media upload. Defaults to empty list if not in the
+        discovery document.
+      - max_size is a long representing the max size in bytes allowed for a
+        media upload. Defaults to 0L if not in the discovery document.
+      - media_path_url is a String; the absolute URI for media upload for the
+        API method. Constructed using the API root URI and service path from
+        the discovery document and the relative path for the API method. If
+        media upload is not supported, this is None.
+  """
+  path_url = method_desc['path']
+  http_method = method_desc['httpMethod']
+  method_id = method_desc['id']
+
+  parameters = _fix_up_parameters(method_desc, root_desc, http_method)
+  # Order is important. `_fix_up_media_upload` needs `method_desc` to have a
+  # 'parameters' key and needs to know if there is a 'body' parameter because it
+  # also sets a 'media_body' parameter.
+  accept, max_size, media_path_url = _fix_up_media_upload(
+      method_desc, root_desc, path_url, parameters)
+
+  return path_url, http_method, method_id, accept, max_size, media_path_url
+
+
+def _urljoin(base, url):
+  """Custom urljoin replacement supporting : before / in url."""
+  # In general, it's unsafe to simply join base and url. However, for
+  # the case of discovery documents, we know:
+  #  * base will never contain params, query, or fragment
+  #  * url will never contain a scheme or net_loc.
+  # In general, this means we can safely join on /; we just need to
+  # ensure we end up with precisely one / joining base and url. The
+  # exception here is the case of media uploads, where url will be an
+  # absolute url.
+  if url.startswith('http://') or url.startswith('https://'):
+    return urljoin(base, url)
+  new_base = base if base.endswith('/') else base + '/'
+  new_url = url[1:] if url.startswith('/') else url
+  return new_base + new_url
+
+
+# TODO(dhermes): Convert this class to ResourceMethod and make it callable
+class ResourceMethodParameters(object):
+  """Represents the parameters associated with a method.
+
+  Attributes:
+    argmap: Map from method parameter name (string) to query parameter name
+        (string).
+    required_params: List of required parameters (represented by parameter
+        name as string).
+    repeated_params: List of repeated parameters (represented by parameter
+        name as string).
+    pattern_params: Map from method parameter name (string) to regular
+        expression (as a string). If the pattern is set for a parameter, the
+        value for that parameter must match the regular expression.
+    query_params: List of parameters (represented by parameter name as string)
+        that will be used in the query string.
+    path_params: Set of parameters (represented by parameter name as string)
+        that will be used in the base URL path.
+    param_types: Map from method parameter name (string) to parameter type. Type
+        can be any valid JSON schema type; valid values are 'any', 'array',
+        'boolean', 'integer', 'number', 'object', or 'string'. Reference:
+        http://tools.ietf.org/html/draft-zyp-json-schema-03#section-5.1
+    enum_params: Map from method parameter name (string) to list of strings,
+       where each list of strings is the list of acceptable enum values.
+  """
+
+  def __init__(self, method_desc):
+    """Constructor for ResourceMethodParameters.
+
+    Sets default values and defers to set_parameters to populate.
+
+    Args:
+      method_desc: Dictionary with metadata describing an API method. Value
+          comes from the dictionary of methods stored in the 'methods' key in
+          the deserialized discovery document.
+    """
+    self.argmap = {}
+    self.required_params = []
+    self.repeated_params = []
+    self.pattern_params = {}
+    self.query_params = []
+    # TODO(dhermes): Change path_params to a list if the extra URITEMPLATE
+    #                parsing is gotten rid of.
+    self.path_params = set()
+    self.param_types = {}
+    self.enum_params = {}
+
+    self.set_parameters(method_desc)
+
+  def set_parameters(self, method_desc):
+    """Populates maps and lists based on method description.
+
+    Iterates through each parameter for the method and parses the values from
+    the parameter dictionary.
+
+    Args:
+      method_desc: Dictionary with metadata describing an API method. Value
+          comes from the dictionary of methods stored in the 'methods' key in
+          the deserialized discovery document.
+    """
+    for arg, desc in six.iteritems(method_desc.get('parameters', {})):
+      param = key2param(arg)
+      self.argmap[param] = arg
+
+      if desc.get('pattern'):
+        self.pattern_params[param] = desc['pattern']
+      if desc.get('enum'):
+        self.enum_params[param] = desc['enum']
+      if desc.get('required'):
+        self.required_params.append(param)
+      if desc.get('repeated'):
+        self.repeated_params.append(param)
+      if desc.get('location') == 'query':
+        self.query_params.append(param)
+      if desc.get('location') == 'path':
+        self.path_params.add(param)
+      self.param_types[param] = desc.get('type', 'string')
+
+    # TODO(dhermes): Determine if this is still necessary. Discovery based APIs
+    #                should have all path parameters already marked with
+    #                'location: path'.
+    for match in URITEMPLATE.finditer(method_desc['path']):
+      for namematch in VARNAME.finditer(match.group(0)):
+        name = key2param(namematch.group(0))
+        self.path_params.add(name)
+        if name in self.query_params:
+          self.query_params.remove(name)
+
+
+def createMethod(methodName, methodDesc, rootDesc, schema):
+  """Creates a method for attaching to a Resource.
+
+  Args:
+    methodName: string, name of the method to use.
+    methodDesc: object, fragment of deserialized discovery document that
+      describes the method.
+    rootDesc: object, the entire deserialized discovery document.
+    schema: object, mapping of schema names to schema descriptions.
+  """
+  methodName = fix_method_name(methodName)
+  (pathUrl, httpMethod, methodId, accept,
+   maxSize, mediaPathUrl) = _fix_up_method_description(methodDesc, rootDesc)
+
+  parameters = ResourceMethodParameters(methodDesc)
+
+  def method(self, **kwargs):
+    # Don't bother with doc string, it will be over-written by createMethod.
+
+    for name in six.iterkeys(kwargs):
+      if name not in parameters.argmap:
+        raise TypeError('Got an unexpected keyword argument "%s"' % name)
+
+    # Remove args that have a value of None.
+    keys = list(kwargs.keys())
+    for name in keys:
+      if kwargs[name] is None:
+        del kwargs[name]
+
+    for name in parameters.required_params:
+      if name not in kwargs:
+        raise TypeError('Missing required parameter "%s"' % name)
+
+    for name, regex in six.iteritems(parameters.pattern_params):
+      if name in kwargs:
+        if isinstance(kwargs[name], six.string_types):
+          pvalues = [kwargs[name]]
+        else:
+          pvalues = kwargs[name]
+        for pvalue in pvalues:
+          if re.match(regex, pvalue) is None:
+            raise TypeError(
+                'Parameter "%s" value "%s" does not match the pattern "%s"' %
+                (name, pvalue, regex))
+
+    for name, enums in six.iteritems(parameters.enum_params):
+      if name in kwargs:
+        # We need to handle the case of a repeated enum
+        # name differently, since we want to handle both
+        # arg='value' and arg=['value1', 'value2']
+        if (name in parameters.repeated_params and
+            not isinstance(kwargs[name], six.string_types)):
+          values = kwargs[name]
+        else:
+          values = [kwargs[name]]
+        for value in values:
+          if value not in enums:
+            raise TypeError(
+                'Parameter "%s" value "%s" is not an allowed value in "%s"' %
+                (name, value, str(enums)))
+
+    actual_query_params = {}
+    actual_path_params = {}
+    for key, value in six.iteritems(kwargs):
+      to_type = parameters.param_types.get(key, 'string')
+      # For repeated parameters we cast each member of the list.
+      if key in parameters.repeated_params and type(value) == type([]):
+        cast_value = [_cast(x, to_type) for x in value]
+      else:
+        cast_value = _cast(value, to_type)
+      if key in parameters.query_params:
+        actual_query_params[parameters.argmap[key]] = cast_value
+      if key in parameters.path_params:
+        actual_path_params[parameters.argmap[key]] = cast_value
+    body_value = kwargs.get('body', None)
+    media_filename = kwargs.get('media_body', None)
+
+    if self._developerKey:
+      actual_query_params['key'] = self._developerKey
+
+    model = self._model
+    if methodName.endswith('_media'):
+      model = MediaModel()
+    elif 'response' not in methodDesc:
+      model = RawModel()
+
+    headers = {}
+    headers, params, query, body = model.request(headers,
+        actual_path_params, actual_query_params, body_value)
+
+    expanded_url = uritemplate.expand(pathUrl, params)
+    url = _urljoin(self._baseUrl, expanded_url + query)
+
+    resumable = None
+    multipart_boundary = ''
+
+    if media_filename:
+      # Ensure we end up with a valid MediaUpload object.
+      if isinstance(media_filename, six.string_types):
+        (media_mime_type, encoding) = mimetypes.guess_type(media_filename)
+        if media_mime_type is None:
+          raise UnknownFileType(media_filename)
+        if not mimeparse.best_match([media_mime_type], ','.join(accept)):
+          raise UnacceptableMimeTypeError(media_mime_type)
+        media_upload = MediaFileUpload(media_filename,
+                                       mimetype=media_mime_type)
+      elif isinstance(media_filename, MediaUpload):
+        media_upload = media_filename
+      else:
+        raise TypeError('media_filename must be str or MediaUpload.')
+
+      # Check the maxSize
+      if media_upload.size() is not None and media_upload.size() > maxSize > 0:
+        raise MediaUploadSizeError("Media larger than: %s" % maxSize)
+
+      # Use the media path uri for media uploads
+      expanded_url = uritemplate.expand(mediaPathUrl, params)
+      url = _urljoin(self._baseUrl, expanded_url + query)
+      if media_upload.resumable():
+        url = _add_query_parameter(url, 'uploadType', 'resumable')
+
+      if media_upload.resumable():
+        # This is all we need to do for resumable, if the body exists it gets
+        # sent in the first request, otherwise an empty body is sent.
+        resumable = media_upload
+      else:
+        # A non-resumable upload
+        if body is None:
+          # This is a simple media upload
+          headers['content-type'] = media_upload.mimetype()
+          body = media_upload.getbytes(0, media_upload.size())
+          url = _add_query_parameter(url, 'uploadType', 'media')
+        else:
+          # This is a multipart/related upload.
+          msgRoot = MIMEMultipart('related')
+          # msgRoot should not write out it's own headers
+          setattr(msgRoot, '_write_headers', lambda self: None)
+
+          # attach the body as one part
+          msg = MIMENonMultipart(*headers['content-type'].split('/'))
+          msg.set_payload(body)
+          msgRoot.attach(msg)
+
+          # attach the media as the second part
+          msg = MIMENonMultipart(*media_upload.mimetype().split('/'))
+          msg['Content-Transfer-Encoding'] = 'binary'
+
+          payload = media_upload.getbytes(0, media_upload.size())
+          msg.set_payload(payload)
+          msgRoot.attach(msg)
+          # encode the body: note that we can't use `as_string`, because
+          # it plays games with `From ` lines.
+          fp = BytesIO()
+          g = _BytesGenerator(fp, mangle_from_=False)
+          g.flatten(msgRoot, unixfrom=False)
+          body = fp.getvalue()
+
+          multipart_boundary = msgRoot.get_boundary()
+          headers['content-type'] = ('multipart/related; '
+                                     'boundary="%s"') % multipart_boundary
+          url = _add_query_parameter(url, 'uploadType', 'multipart')
+
+    logger.info('URL being requested: %s %s' % (httpMethod,url))
+    return self._requestBuilder(self._http,
+                                model.response,
+                                url,
+                                method=httpMethod,
+                                body=body,
+                                headers=headers,
+                                methodId=methodId,
+                                resumable=resumable)
+
+  docs = [methodDesc.get('description', DEFAULT_METHOD_DOC), '\n\n']
+  if len(parameters.argmap) > 0:
+    docs.append('Args:\n')
+
+  # Skip undocumented params and params common to all methods.
+  skip_parameters = list(rootDesc.get('parameters', {}).keys())
+  skip_parameters.extend(STACK_QUERY_PARAMETERS)
+
+  all_args = list(parameters.argmap.keys())
+  args_ordered = [key2param(s) for s in methodDesc.get('parameterOrder', [])]
+
+  # Move body to the front of the line.
+  if 'body' in all_args:
+    args_ordered.append('body')
+
+  for name in all_args:
+    if name not in args_ordered:
+      args_ordered.append(name)
+
+  for arg in args_ordered:
+    if arg in skip_parameters:
+      continue
+
+    repeated = ''
+    if arg in parameters.repeated_params:
+      repeated = ' (repeated)'
+    required = ''
+    if arg in parameters.required_params:
+      required = ' (required)'
+    paramdesc = methodDesc['parameters'][parameters.argmap[arg]]
+    paramdoc = paramdesc.get('description', 'A parameter')
+    if '$ref' in paramdesc:
+      docs.append(
+          ('  %s: object, %s%s%s\n    The object takes the'
+          ' form of:\n\n%s\n\n') % (arg, paramdoc, required, repeated,
+            schema.prettyPrintByName(paramdesc['$ref'])))
+    else:
+      paramtype = paramdesc.get('type', 'string')
+      docs.append('  %s: %s, %s%s%s\n' % (arg, paramtype, paramdoc, required,
+                                          repeated))
+    enum = paramdesc.get('enum', [])
+    enumDesc = paramdesc.get('enumDescriptions', [])
+    if enum and enumDesc:
+      docs.append('    Allowed values\n')
+      for (name, desc) in zip(enum, enumDesc):
+        docs.append('      %s - %s\n' % (name, desc))
+  if 'response' in methodDesc:
+    if methodName.endswith('_media'):
+      docs.append('\nReturns:\n  The media object as a string.\n\n    ')
+    else:
+      docs.append('\nReturns:\n  An object of the form:\n\n    ')
+      docs.append(schema.prettyPrintSchema(methodDesc['response']))
+
+  setattr(method, '__doc__', ''.join(docs))
+  return (methodName, method)
+
+
+def createNextMethod(methodName):
+  """Creates any _next methods for attaching to a Resource.
+
+  The _next methods allow for easy iteration through list() responses.
+
+  Args:
+    methodName: string, name of the method to use.
+  """
+  methodName = fix_method_name(methodName)
+
+  def methodNext(self, previous_request, previous_response):
+    """Retrieves the next page of results.
+
+Args:
+  previous_request: The request for the previous page. (required)
+  previous_response: The response from the request for the previous page. (required)
+
+Returns:
+  A request object that you can call 'execute()' on to request the next
+  page. Returns None if there are no more items in the collection.
+    """
+    # Retrieve nextPageToken from previous_response
+    # Use as pageToken in previous_request to create new request.
+
+    if 'nextPageToken' not in previous_response or not previous_response['nextPageToken']:
+      return None
+
+    request = copy.copy(previous_request)
+
+    pageToken = previous_response['nextPageToken']
+    parsed = list(urlparse(request.uri))
+    q = parse_qsl(parsed[4])
+
+    # Find and remove old 'pageToken' value from URI
+    newq = [(key, value) for (key, value) in q if key != 'pageToken']
+    newq.append(('pageToken', pageToken))
+    parsed[4] = urlencode(newq)
+    uri = urlunparse(parsed)
+
+    request.uri = uri
+
+    logger.info('URL being requested: %s %s' % (methodName,uri))
+
+    return request
+
+  return (methodName, methodNext)
+
+
+class Resource(object):
+  """A class for interacting with a resource."""
+
+  def __init__(self, http, baseUrl, model, requestBuilder, developerKey,
+               resourceDesc, rootDesc, schema):
+    """Build a Resource from the API description.
+
+    Args:
+      http: httplib2.Http, Object to make http requests with.
+      baseUrl: string, base URL for the API. All requests are relative to this
+          URI.
+      model: googleapiclient.Model, converts to and from the wire format.
+      requestBuilder: class or callable that instantiates an
+          googleapiclient.HttpRequest object.
+      developerKey: string, key obtained from
+          https://code.google.com/apis/console
+      resourceDesc: object, section of deserialized discovery document that
+          describes a resource. Note that the top level discovery document
+          is considered a resource.
+      rootDesc: object, the entire deserialized discovery document.
+      schema: object, mapping of schema names to schema descriptions.
+    """
+    self._dynamic_attrs = []
+
+    self._http = http
+    self._baseUrl = baseUrl
+    self._model = model
+    self._developerKey = developerKey
+    self._requestBuilder = requestBuilder
+    self._resourceDesc = resourceDesc
+    self._rootDesc = rootDesc
+    self._schema = schema
+
+    self._set_service_methods()
+
+  def _set_dynamic_attr(self, attr_name, value):
+    """Sets an instance attribute and tracks it in a list of dynamic attributes.
+
+    Args:
+      attr_name: string; The name of the attribute to be set
+      value: The value being set on the object and tracked in the dynamic cache.
+    """
+    self._dynamic_attrs.append(attr_name)
+    self.__dict__[attr_name] = value
+
+  def __getstate__(self):
+    """Trim the state down to something that can be pickled.
+
+    Uses the fact that the instance variable _dynamic_attrs holds attrs that
+    will be wiped and restored on pickle serialization.
+    """
+    state_dict = copy.copy(self.__dict__)
+    for dynamic_attr in self._dynamic_attrs:
+      del state_dict[dynamic_attr]
+    del state_dict['_dynamic_attrs']
+    return state_dict
+
+  def __setstate__(self, state):
+    """Reconstitute the state of the object from being pickled.
+
+    Uses the fact that the instance variable _dynamic_attrs holds attrs that
+    will be wiped and restored on pickle serialization.
+    """
+    self.__dict__.update(state)
+    self._dynamic_attrs = []
+    self._set_service_methods()
+
+  def _set_service_methods(self):
+    self._add_basic_methods(self._resourceDesc, self._rootDesc, self._schema)
+    self._add_nested_resources(self._resourceDesc, self._rootDesc, self._schema)
+    self._add_next_methods(self._resourceDesc, self._schema)
+
+  def _add_basic_methods(self, resourceDesc, rootDesc, schema):
+    # If this is the root Resource, add a new_batch_http_request() method.
+    if resourceDesc == rootDesc:
+      batch_uri = '%s%s' % (
+        rootDesc['rootUrl'], rootDesc.get('batchPath', 'batch'))
+      def new_batch_http_request(callback=None):
+        """Create a BatchHttpRequest object based on the discovery document.
+
+        Args:
+          callback: callable, A callback to be called for each response, of the
+            form callback(id, response, exception). The first parameter is the
+            request id, and the second is the deserialized response object. The
+            third is an apiclient.errors.HttpError exception object if an HTTP
+            error occurred while processing the request, or None if no error
+            occurred.
+
+        Returns:
+          A BatchHttpRequest object based on the discovery document.
+        """
+        return BatchHttpRequest(callback=callback, batch_uri=batch_uri)
+      self._set_dynamic_attr('new_batch_http_request', new_batch_http_request)
+
+    # Add basic methods to Resource
+    if 'methods' in resourceDesc:
+      for methodName, methodDesc in six.iteritems(resourceDesc['methods']):
+        fixedMethodName, method = createMethod(
+            methodName, methodDesc, rootDesc, schema)
+        self._set_dynamic_attr(fixedMethodName,
+                               method.__get__(self, self.__class__))
+        # Add in _media methods. The functionality of the attached method will
+        # change when it sees that the method name ends in _media.
+        if methodDesc.get('supportsMediaDownload', False):
+          fixedMethodName, method = createMethod(
+              methodName + '_media', methodDesc, rootDesc, schema)
+          self._set_dynamic_attr(fixedMethodName,
+                                 method.__get__(self, self.__class__))
+
+  def _add_nested_resources(self, resourceDesc, rootDesc, schema):
+    # Add in nested resources
+    if 'resources' in resourceDesc:
+
+      def createResourceMethod(methodName, methodDesc):
+        """Create a method on the Resource to access a nested Resource.
+
+        Args:
+          methodName: string, name of the method to use.
+          methodDesc: object, fragment of deserialized discovery document that
+            describes the method.
+        """
+        methodName = fix_method_name(methodName)
+
+        def methodResource(self):
+          return Resource(http=self._http, baseUrl=self._baseUrl,
+                          model=self._model, developerKey=self._developerKey,
+                          requestBuilder=self._requestBuilder,
+                          resourceDesc=methodDesc, rootDesc=rootDesc,
+                          schema=schema)
+
+        setattr(methodResource, '__doc__', 'A collection resource.')
+        setattr(methodResource, '__is_resource__', True)
+
+        return (methodName, methodResource)
+
+      for methodName, methodDesc in six.iteritems(resourceDesc['resources']):
+        fixedMethodName, method = createResourceMethod(methodName, methodDesc)
+        self._set_dynamic_attr(fixedMethodName,
+                               method.__get__(self, self.__class__))
+
+  def _add_next_methods(self, resourceDesc, schema):
+    # Add _next() methods
+    # Look for response bodies in schema that contain nextPageToken, and methods
+    # that take a pageToken parameter.
+    if 'methods' in resourceDesc:
+      for methodName, methodDesc in six.iteritems(resourceDesc['methods']):
+        if 'response' in methodDesc:
+          responseSchema = methodDesc['response']
+          if '$ref' in responseSchema:
+            responseSchema = schema.get(responseSchema['$ref'])
+          hasNextPageToken = 'nextPageToken' in responseSchema.get('properties',
+                                                                   {})
+          hasPageToken = 'pageToken' in methodDesc.get('parameters', {})
+          if hasNextPageToken and hasPageToken:
+            fixedMethodName, method = createNextMethod(methodName + '_next')
+            self._set_dynamic_attr(fixedMethodName,
+                                   method.__get__(self, self.__class__))
diff --git a/utils/frozen_chromite/third_party/googleapiclient/discovery_cache/__init__.py b/utils/frozen_chromite/third_party/googleapiclient/discovery_cache/__init__.py
new file mode 100644
index 0000000..f86a06d
--- /dev/null
+++ b/utils/frozen_chromite/third_party/googleapiclient/discovery_cache/__init__.py
@@ -0,0 +1,45 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Caching utility for the discovery document."""
+
+from __future__ import absolute_import
+
+import logging
+import datetime
+
+
+LOGGER = logging.getLogger(__name__)
+
+DISCOVERY_DOC_MAX_AGE = 60 * 60 * 24  # 1 day
+
+
+def autodetect():
+  """Detects an appropriate cache module and returns it.
+
+  Returns:
+    googleapiclient.discovery_cache.base.Cache, a cache object which
+    is auto detected, or None if no cache object is available.
+  """
+  try:
+    from google.appengine.api import memcache
+    from . import appengine_memcache
+    return appengine_memcache.cache
+  except Exception:
+    try:
+      from . import file_cache
+      return file_cache.cache
+    except Exception as e:
+      LOGGER.warning(e, exc_info=True)
+      return None
diff --git a/utils/frozen_chromite/third_party/googleapiclient/discovery_cache/appengine_memcache.py b/utils/frozen_chromite/third_party/googleapiclient/discovery_cache/appengine_memcache.py
new file mode 100644
index 0000000..7e43e66
--- /dev/null
+++ b/utils/frozen_chromite/third_party/googleapiclient/discovery_cache/appengine_memcache.py
@@ -0,0 +1,55 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""App Engine memcache based cache for the discovery document."""
+
+import logging
+
+# This is only an optional dependency because we only import this
+# module when google.appengine.api.memcache is available.
+from google.appengine.api import memcache
+
+from . import base
+from ..discovery_cache import DISCOVERY_DOC_MAX_AGE
+
+
+LOGGER = logging.getLogger(__name__)
+
+NAMESPACE = 'google-api-client'
+
+
+class Cache(base.Cache):
+  """A cache with app engine memcache API."""
+
+  def __init__(self, max_age):
+      """Constructor.
+
+      Args:
+        max_age: Cache expiration in seconds.
+      """
+      self._max_age = max_age
+
+  def get(self, url):
+    try:
+      return memcache.get(url, namespace=NAMESPACE)
+    except Exception as e:
+      LOGGER.warning(e, exc_info=True)
+
+  def set(self, url, content):
+    try:
+      memcache.set(url, content, time=int(self._max_age), namespace=NAMESPACE)
+    except Exception as e:
+      LOGGER.warning(e, exc_info=True)
+
+cache = Cache(max_age=DISCOVERY_DOC_MAX_AGE)
diff --git a/utils/frozen_chromite/third_party/googleapiclient/discovery_cache/base.py b/utils/frozen_chromite/third_party/googleapiclient/discovery_cache/base.py
new file mode 100644
index 0000000..00e466d
--- /dev/null
+++ b/utils/frozen_chromite/third_party/googleapiclient/discovery_cache/base.py
@@ -0,0 +1,45 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""An abstract class for caching the discovery document."""
+
+import abc
+
+
+class Cache(object):
+  """A base abstract cache class."""
+  __metaclass__ = abc.ABCMeta
+
+  @abc.abstractmethod
+  def get(self, url):
+    """Gets the content from the memcache with a given key.
+
+    Args:
+      url: string, the key for the cache.
+
+    Returns:
+      object, the value in the cache for the given key, or None if the key is
+      not in the cache.
+    """
+    raise NotImplementedError()
+
+  @abc.abstractmethod
+  def set(self, url, content):
+    """Sets the given key and content in the cache.
+
+    Args:
+      url: string, the key for the cache.
+      content: string, the discovery document.
+    """
+    raise NotImplementedError()
diff --git a/utils/frozen_chromite/third_party/googleapiclient/discovery_cache/file_cache.py b/utils/frozen_chromite/third_party/googleapiclient/discovery_cache/file_cache.py
new file mode 100644
index 0000000..31434db
--- /dev/null
+++ b/utils/frozen_chromite/third_party/googleapiclient/discovery_cache/file_cache.py
@@ -0,0 +1,136 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""File based cache for the discovery document.
+
+The cache is stored in a single file so that multiple processes can
+share the same cache. It locks the file whenever accesing to the
+file. When the cache content is corrupted, it will be initialized with
+an empty cache.
+"""
+
+from __future__ import division
+
+import datetime
+import json
+import logging
+import os
+import tempfile
+import threading
+
+try:
+  from oauth2client.contrib.locked_file import LockedFile
+except ImportError:
+  # oauth2client < 2.0.0
+  from oauth2client.locked_file import LockedFile
+
+from . import base
+from ..discovery_cache import DISCOVERY_DOC_MAX_AGE
+
+LOGGER = logging.getLogger(__name__)
+
+FILENAME = 'google-api-python-client-discovery-doc.cache'
+EPOCH = datetime.datetime.utcfromtimestamp(0)
+
+
+def _to_timestamp(date):
+  try:
+    return (date - EPOCH).total_seconds()
+  except AttributeError:
+    # The following is the equivalent of total_seconds() in Python2.6.
+    # See also: https://docs.python.org/2/library/datetime.html
+    delta = date - EPOCH
+    return ((delta.microseconds + (delta.seconds + delta.days * 24 * 3600)
+             * 10**6) / 10**6)
+
+
+def _read_or_initialize_cache(f):
+  f.file_handle().seek(0)
+  try:
+    cache = json.load(f.file_handle())
+  except Exception:
+    # This means it opens the file for the first time, or the cache is
+    # corrupted, so initializing the file with an empty dict.
+    cache = {}
+    f.file_handle().truncate(0)
+    f.file_handle().seek(0)
+    json.dump(cache, f.file_handle())
+  return cache
+
+
+class Cache(base.Cache):
+  """A file based cache for the discovery documents."""
+
+  def __init__(self, max_age):
+      """Constructor.
+
+      Args:
+        max_age: Cache expiration in seconds.
+      """
+      self._max_age = max_age
+      self._file = os.path.join(tempfile.gettempdir(), FILENAME)
+      f = LockedFile(self._file, 'a+', 'r')
+      try:
+        f.open_and_lock()
+        if f.is_locked():
+          _read_or_initialize_cache(f)
+        # If we can not obtain the lock, other process or thread must
+        # have initialized the file.
+      except Exception as e:
+        LOGGER.warning(e, exc_info=True)
+      finally:
+        f.unlock_and_close()
+
+  def get(self, url):
+    f = LockedFile(self._file, 'r+', 'r')
+    try:
+      f.open_and_lock()
+      if f.is_locked():
+        cache = _read_or_initialize_cache(f)
+        if url in cache:
+          content, t = cache.get(url, (None, 0))
+          if _to_timestamp(datetime.datetime.now()) < t + self._max_age:
+            return content
+        return None
+      else:
+        LOGGER.debug('Could not obtain a lock for the cache file.')
+        return None
+    except Exception as e:
+      LOGGER.warning(e, exc_info=True)
+    finally:
+      f.unlock_and_close()
+
+  def set(self, url, content):
+    f = LockedFile(self._file, 'r+', 'r')
+    try:
+      f.open_and_lock()
+      if f.is_locked():
+        cache = _read_or_initialize_cache(f)
+        cache[url] = (content, _to_timestamp(datetime.datetime.now()))
+        # Remove stale cache.
+        for k, (_, timestamp) in list(cache.items()):
+          if _to_timestamp(datetime.datetime.now()) >= timestamp + self._max_age:
+            del cache[k]
+        f.file_handle().truncate(0)
+        f.file_handle().seek(0)
+        json.dump(cache, f.file_handle())
+      else:
+        LOGGER.debug('Could not obtain a lock for the cache file.')
+    except Exception as e:
+      LOGGER.warning(e, exc_info=True)
+    finally:
+      f.unlock_and_close()
+
+
+cache = Cache(max_age=DISCOVERY_DOC_MAX_AGE)
diff --git a/utils/frozen_chromite/third_party/googleapiclient/errors.py b/utils/frozen_chromite/third_party/googleapiclient/errors.py
new file mode 100644
index 0000000..1b79d2f
--- /dev/null
+++ b/utils/frozen_chromite/third_party/googleapiclient/errors.py
@@ -0,0 +1,146 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Errors for the library.
+
+All exceptions defined by the library
+should be defined in this file.
+"""
+from __future__ import absolute_import
+
+__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+
+import json
+
+# Oauth2client < 3 has the positional helper in 'util', >= 3 has it
+# in '_helpers'.
+try:
+  from oauth2client import util
+except ImportError:
+  from oauth2client import _helpers as util
+
+
+class Error(Exception):
+  """Base error for this module."""
+  pass
+
+
+class HttpError(Error):
+  """HTTP data was invalid or unexpected."""
+
+  @util.positional(3)
+  def __init__(self, resp, content, uri=None):
+    self.resp = resp
+    if not isinstance(content, bytes):
+        raise TypeError("HTTP content should be bytes")
+    self.content = content
+    self.uri = uri
+
+  def _get_reason(self):
+    """Calculate the reason for the error from the response content."""
+    reason = self.resp.reason
+    try:
+      data = json.loads(self.content.decode('utf-8'))
+      reason = data['error']['message']
+    except (ValueError, KeyError):
+      pass
+    if reason is None:
+      reason = ''
+    return reason
+
+  def __repr__(self):
+    if self.uri:
+      return '<HttpError %s when requesting %s returned "%s">' % (
+          self.resp.status, self.uri, self._get_reason().strip())
+    else:
+      return '<HttpError %s "%s">' % (self.resp.status, self._get_reason())
+
+  __str__ = __repr__
+
+
+class InvalidJsonError(Error):
+  """The JSON returned could not be parsed."""
+  pass
+
+
+class UnknownFileType(Error):
+  """File type unknown or unexpected."""
+  pass
+
+
+class UnknownLinkType(Error):
+  """Link type unknown or unexpected."""
+  pass
+
+
+class UnknownApiNameOrVersion(Error):
+  """No API with that name and version exists."""
+  pass
+
+
+class UnacceptableMimeTypeError(Error):
+  """That is an unacceptable mimetype for this operation."""
+  pass
+
+
+class MediaUploadSizeError(Error):
+  """Media is larger than the method can accept."""
+  pass
+
+
+class ResumableUploadError(HttpError):
+  """Error occured during resumable upload."""
+  pass
+
+
+class InvalidChunkSizeError(Error):
+  """The given chunksize is not valid."""
+  pass
+
+class InvalidNotificationError(Error):
+  """The channel Notification is invalid."""
+  pass
+
+class BatchError(HttpError):
+  """Error occured during batch operations."""
+
+  @util.positional(2)
+  def __init__(self, reason, resp=None, content=None):
+    self.resp = resp
+    self.content = content
+    self.reason = reason
+
+  def __repr__(self):
+      return '<BatchError %s "%s">' % (self.resp.status, self.reason)
+
+  __str__ = __repr__
+
+
+class UnexpectedMethodError(Error):
+  """Exception raised by RequestMockBuilder on unexpected calls."""
+
+  @util.positional(1)
+  def __init__(self, methodId=None):
+    """Constructor for an UnexpectedMethodError."""
+    super(UnexpectedMethodError, self).__init__(
+        'Received unexpected call %s' % methodId)
+
+
+class UnexpectedBodyError(Error):
+  """Exception raised by RequestMockBuilder on unexpected bodies."""
+
+  def __init__(self, expected, provided):
+    """Constructor for an UnexpectedMethodError."""
+    super(UnexpectedBodyError, self).__init__(
+        'Expected: [%s] - Provided: [%s]' % (expected, provided))
diff --git a/utils/frozen_chromite/third_party/googleapiclient/http.py b/utils/frozen_chromite/third_party/googleapiclient/http.py
new file mode 100644
index 0000000..34f1081
--- /dev/null
+++ b/utils/frozen_chromite/third_party/googleapiclient/http.py
@@ -0,0 +1,1730 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Classes to encapsulate a single HTTP request.
+
+The classes implement a command pattern, with every
+object supporting an execute() method that does the
+actuall HTTP request.
+"""
+from __future__ import absolute_import
+import six
+from six.moves import http_client
+from six.moves import range
+
+__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+
+from six import BytesIO, StringIO
+from six.moves.urllib.parse import urlparse, urlunparse, quote, unquote
+
+import base64
+import copy
+import gzip
+import httplib2
+import json
+import logging
+import mimetypes
+import os
+import random
+import socket
+import sys
+import time
+import uuid
+
+# TODO(issue 221): Remove this conditional import jibbajabba.
+try:
+  import ssl
+except ImportError:
+  _ssl_SSLError = object()
+else:
+  _ssl_SSLError = ssl.SSLError
+
+from email.generator import Generator
+from email.mime.multipart import MIMEMultipart
+from email.mime.nonmultipart import MIMENonMultipart
+from email.parser import FeedParser
+
+# Oauth2client < 3 has the positional helper in 'util', >= 3 has it
+# in '_helpers'.
+try:
+  from oauth2client import util
+except ImportError:
+  from oauth2client import _helpers as util
+
+from googleapiclient import mimeparse
+from googleapiclient.errors import BatchError
+from googleapiclient.errors import HttpError
+from googleapiclient.errors import InvalidChunkSizeError
+from googleapiclient.errors import ResumableUploadError
+from googleapiclient.errors import UnexpectedBodyError
+from googleapiclient.errors import UnexpectedMethodError
+from googleapiclient.model import JsonModel
+
+
+LOGGER = logging.getLogger(__name__)
+
+DEFAULT_CHUNK_SIZE = 512*1024
+
+MAX_URI_LENGTH = 2048
+
+_TOO_MANY_REQUESTS = 429
+
+
+def _should_retry_response(resp_status, content):
+  """Determines whether a response should be retried.
+
+  Args:
+    resp_status: The response status received.
+    content: The response content body.
+
+  Returns:
+    True if the response should be retried, otherwise False.
+  """
+  # Retry on 5xx errors.
+  if resp_status >= 500:
+    return True
+
+  # Retry on 429 errors.
+  if resp_status == _TOO_MANY_REQUESTS:
+    return True
+
+  # For 403 errors, we have to check for the `reason` in the response to
+  # determine if we should retry.
+  if resp_status == six.moves.http_client.FORBIDDEN:
+    # If there's no details about the 403 type, don't retry.
+    if not content:
+      return False
+
+    # Content is in JSON format.
+    try:
+      data = json.loads(content.decode('utf-8'))
+      reason = data['error']['errors'][0]['reason']
+    except (UnicodeDecodeError, ValueError, KeyError):
+      LOGGER.warning('Invalid JSON content from response: %s', content)
+      return False
+
+    LOGGER.warning('Encountered 403 Forbidden with reason "%s"', reason)
+
+    # Only retry on rate limit related failures.
+    if reason in ('userRateLimitExceeded', 'rateLimitExceeded', ):
+      return True
+
+  # Everything else is a success or non-retriable so break.
+  return False
+
+
+def _retry_request(http, num_retries, req_type, sleep, rand, uri, method, *args,
+                   **kwargs):
+  """Retries an HTTP request multiple times while handling errors.
+
+  If after all retries the request still fails, last error is either returned as
+  return value (for HTTP 5xx errors) or thrown (for ssl.SSLError).
+
+  Args:
+    http: Http object to be used to execute request.
+    num_retries: Maximum number of retries.
+    req_type: Type of the request (used for logging retries).
+    sleep, rand: Functions to sleep for random time between retries.
+    uri: URI to be requested.
+    method: HTTP method to be used.
+    args, kwargs: Additional arguments passed to http.request.
+
+  Returns:
+    resp, content - Response from the http request (may be HTTP 5xx).
+  """
+  resp = None
+  content = None
+  for retry_num in range(num_retries + 1):
+    if retry_num > 0:
+      # Sleep before retrying.
+      sleep_time = rand() * 2 ** retry_num
+      LOGGER.warning(
+          'Sleeping %.2f seconds before retry %d of %d for %s: %s %s, after %s',
+          sleep_time, retry_num, num_retries, req_type, method, uri,
+          resp.status if resp else exception)
+      sleep(sleep_time)
+
+    try:
+      exception = None
+      resp, content = http.request(uri, method, *args, **kwargs)
+    # Retry on SSL errors and socket timeout errors.
+    except _ssl_SSLError as ssl_error:
+      exception = ssl_error
+    except socket.error as socket_error:
+      # errno's contents differ by platform, so we have to match by name.
+      if socket.errno.errorcode.get(socket_error.errno) not in (
+          'WSAETIMEDOUT', 'ETIMEDOUT', 'EPIPE', 'ECONNABORTED', ):
+        raise
+      exception = socket_error
+
+    if exception:
+      if retry_num == num_retries:
+        raise exception
+      else:
+        continue
+
+    if not _should_retry_response(resp.status, content):
+      break
+
+  return resp, content
+
+
+class MediaUploadProgress(object):
+  """Status of a resumable upload."""
+
+  def __init__(self, resumable_progress, total_size):
+    """Constructor.
+
+    Args:
+      resumable_progress: int, bytes sent so far.
+      total_size: int, total bytes in complete upload, or None if the total
+        upload size isn't known ahead of time.
+    """
+    self.resumable_progress = resumable_progress
+    self.total_size = total_size
+
+  def progress(self):
+    """Percent of upload completed, as a float.
+
+    Returns:
+      the percentage complete as a float, returning 0.0 if the total size of
+      the upload is unknown.
+    """
+    if self.total_size is not None:
+      return float(self.resumable_progress) / float(self.total_size)
+    else:
+      return 0.0
+
+
+class MediaDownloadProgress(object):
+  """Status of a resumable download."""
+
+  def __init__(self, resumable_progress, total_size):
+    """Constructor.
+
+    Args:
+      resumable_progress: int, bytes received so far.
+      total_size: int, total bytes in complete download.
+    """
+    self.resumable_progress = resumable_progress
+    self.total_size = total_size
+
+  def progress(self):
+    """Percent of download completed, as a float.
+
+    Returns:
+      the percentage complete as a float, returning 0.0 if the total size of
+      the download is unknown.
+    """
+    if self.total_size is not None:
+      return float(self.resumable_progress) / float(self.total_size)
+    else:
+      return 0.0
+
+
+class MediaUpload(object):
+  """Describes a media object to upload.
+
+  Base class that defines the interface of MediaUpload subclasses.
+
+  Note that subclasses of MediaUpload may allow you to control the chunksize
+  when uploading a media object. It is important to keep the size of the chunk
+  as large as possible to keep the upload efficient. Other factors may influence
+  the size of the chunk you use, particularly if you are working in an
+  environment where individual HTTP requests may have a hardcoded time limit,
+  such as under certain classes of requests under Google App Engine.
+
+  Streams are io.Base compatible objects that support seek(). Some MediaUpload
+  subclasses support using streams directly to upload data. Support for
+  streaming may be indicated by a MediaUpload sub-class and if appropriate for a
+  platform that stream will be used for uploading the media object. The support
+  for streaming is indicated by has_stream() returning True. The stream() method
+  should return an io.Base object that supports seek(). On platforms where the
+  underlying httplib module supports streaming, for example Python 2.6 and
+  later, the stream will be passed into the http library which will result in
+  less memory being used and possibly faster uploads.
+
+  If you need to upload media that can't be uploaded using any of the existing
+  MediaUpload sub-class then you can sub-class MediaUpload for your particular
+  needs.
+  """
+
+  def chunksize(self):
+    """Chunk size for resumable uploads.
+
+    Returns:
+      Chunk size in bytes.
+    """
+    raise NotImplementedError()
+
+  def mimetype(self):
+    """Mime type of the body.
+
+    Returns:
+      Mime type.
+    """
+    return 'application/octet-stream'
+
+  def size(self):
+    """Size of upload.
+
+    Returns:
+      Size of the body, or None of the size is unknown.
+    """
+    return None
+
+  def resumable(self):
+    """Whether this upload is resumable.
+
+    Returns:
+      True if resumable upload or False.
+    """
+    return False
+
+  def getbytes(self, begin, end):
+    """Get bytes from the media.
+
+    Args:
+      begin: int, offset from beginning of file.
+      length: int, number of bytes to read, starting at begin.
+
+    Returns:
+      A string of bytes read. May be shorter than length if EOF was reached
+      first.
+    """
+    raise NotImplementedError()
+
+  def has_stream(self):
+    """Does the underlying upload support a streaming interface.
+
+    Streaming means it is an io.IOBase subclass that supports seek, i.e.
+    seekable() returns True.
+
+    Returns:
+      True if the call to stream() will return an instance of a seekable io.Base
+      subclass.
+    """
+    return False
+
+  def stream(self):
+    """A stream interface to the data being uploaded.
+
+    Returns:
+      The returned value is an io.IOBase subclass that supports seek, i.e.
+      seekable() returns True.
+    """
+    raise NotImplementedError()
+
+  @util.positional(1)
+  def _to_json(self, strip=None):
+    """Utility function for creating a JSON representation of a MediaUpload.
+
+    Args:
+      strip: array, An array of names of members to not include in the JSON.
+
+    Returns:
+       string, a JSON representation of this instance, suitable to pass to
+       from_json().
+    """
+    t = type(self)
+    d = copy.copy(self.__dict__)
+    if strip is not None:
+      for member in strip:
+        del d[member]
+    d['_class'] = t.__name__
+    d['_module'] = t.__module__
+    return json.dumps(d)
+
+  def to_json(self):
+    """Create a JSON representation of an instance of MediaUpload.
+
+    Returns:
+       string, a JSON representation of this instance, suitable to pass to
+       from_json().
+    """
+    return self._to_json()
+
+  @classmethod
+  def new_from_json(cls, s):
+    """Utility class method to instantiate a MediaUpload subclass from a JSON
+    representation produced by to_json().
+
+    Args:
+      s: string, JSON from to_json().
+
+    Returns:
+      An instance of the subclass of MediaUpload that was serialized with
+      to_json().
+    """
+    data = json.loads(s)
+    # Find and call the right classmethod from_json() to restore the object.
+    module = data['_module']
+    m = __import__(module, fromlist=module.split('.')[:-1])
+    kls = getattr(m, data['_class'])
+    from_json = getattr(kls, 'from_json')
+    return from_json(s)
+
+
+class MediaIoBaseUpload(MediaUpload):
+  """A MediaUpload for a io.Base objects.
+
+  Note that the Python file object is compatible with io.Base and can be used
+  with this class also.
+
+    fh = BytesIO('...Some data to upload...')
+    media = MediaIoBaseUpload(fh, mimetype='image/png',
+      chunksize=1024*1024, resumable=True)
+    farm.animals().insert(
+        id='cow',
+        name='cow.png',
+        media_body=media).execute()
+
+  Depending on the platform you are working on, you may pass -1 as the
+  chunksize, which indicates that the entire file should be uploaded in a single
+  request. If the underlying platform supports streams, such as Python 2.6 or
+  later, then this can be very efficient as it avoids multiple connections, and
+  also avoids loading the entire file into memory before sending it. Note that
+  Google App Engine has a 5MB limit on request size, so you should never set
+  your chunksize larger than 5MB, or to -1.
+  """
+
+  @util.positional(3)
+  def __init__(self, fd, mimetype, chunksize=DEFAULT_CHUNK_SIZE,
+      resumable=False):
+    """Constructor.
+
+    Args:
+      fd: io.Base or file object, The source of the bytes to upload. MUST be
+        opened in blocking mode, do not use streams opened in non-blocking mode.
+        The given stream must be seekable, that is, it must be able to call
+        seek() on fd.
+      mimetype: string, Mime-type of the file.
+      chunksize: int, File will be uploaded in chunks of this many bytes. Only
+        used if resumable=True. Pass in a value of -1 if the file is to be
+        uploaded as a single chunk. Note that Google App Engine has a 5MB limit
+        on request size, so you should never set your chunksize larger than 5MB,
+        or to -1.
+      resumable: bool, True if this is a resumable upload. False means upload
+        in a single request.
+    """
+    super(MediaIoBaseUpload, self).__init__()
+    self._fd = fd
+    self._mimetype = mimetype
+    if not (chunksize == -1 or chunksize > 0):
+      raise InvalidChunkSizeError()
+    self._chunksize = chunksize
+    self._resumable = resumable
+
+    self._fd.seek(0, os.SEEK_END)
+    self._size = self._fd.tell()
+
+  def chunksize(self):
+    """Chunk size for resumable uploads.
+
+    Returns:
+      Chunk size in bytes.
+    """
+    return self._chunksize
+
+  def mimetype(self):
+    """Mime type of the body.
+
+    Returns:
+      Mime type.
+    """
+    return self._mimetype
+
+  def size(self):
+    """Size of upload.
+
+    Returns:
+      Size of the body, or None of the size is unknown.
+    """
+    return self._size
+
+  def resumable(self):
+    """Whether this upload is resumable.
+
+    Returns:
+      True if resumable upload or False.
+    """
+    return self._resumable
+
+  def getbytes(self, begin, length):
+    """Get bytes from the media.
+
+    Args:
+      begin: int, offset from beginning of file.
+      length: int, number of bytes to read, starting at begin.
+
+    Returns:
+      A string of bytes read. May be shorted than length if EOF was reached
+      first.
+    """
+    self._fd.seek(begin)
+    return self._fd.read(length)
+
+  def has_stream(self):
+    """Does the underlying upload support a streaming interface.
+
+    Streaming means it is an io.IOBase subclass that supports seek, i.e.
+    seekable() returns True.
+
+    Returns:
+      True if the call to stream() will return an instance of a seekable io.Base
+      subclass.
+    """
+    return True
+
+  def stream(self):
+    """A stream interface to the data being uploaded.
+
+    Returns:
+      The returned value is an io.IOBase subclass that supports seek, i.e.
+      seekable() returns True.
+    """
+    return self._fd
+
+  def to_json(self):
+    """This upload type is not serializable."""
+    raise NotImplementedError('MediaIoBaseUpload is not serializable.')
+
+
+class MediaFileUpload(MediaIoBaseUpload):
+  """A MediaUpload for a file.
+
+  Construct a MediaFileUpload and pass as the media_body parameter of the
+  method. For example, if we had a service that allowed uploading images:
+
+
+    media = MediaFileUpload('cow.png', mimetype='image/png',
+      chunksize=1024*1024, resumable=True)
+    farm.animals().insert(
+        id='cow',
+        name='cow.png',
+        media_body=media).execute()
+
+  Depending on the platform you are working on, you may pass -1 as the
+  chunksize, which indicates that the entire file should be uploaded in a single
+  request. If the underlying platform supports streams, such as Python 2.6 or
+  later, then this can be very efficient as it avoids multiple connections, and
+  also avoids loading the entire file into memory before sending it. Note that
+  Google App Engine has a 5MB limit on request size, so you should never set
+  your chunksize larger than 5MB, or to -1.
+  """
+
+  @util.positional(2)
+  def __init__(self, filename, mimetype=None, chunksize=DEFAULT_CHUNK_SIZE,
+               resumable=False):
+    """Constructor.
+
+    Args:
+      filename: string, Name of the file.
+      mimetype: string, Mime-type of the file. If None then a mime-type will be
+        guessed from the file extension.
+      chunksize: int, File will be uploaded in chunks of this many bytes. Only
+        used if resumable=True. Pass in a value of -1 if the file is to be
+        uploaded in a single chunk. Note that Google App Engine has a 5MB limit
+        on request size, so you should never set your chunksize larger than 5MB,
+        or to -1.
+      resumable: bool, True if this is a resumable upload. False means upload
+        in a single request.
+    """
+    self._filename = filename
+    fd = open(self._filename, 'rb')
+    if mimetype is None:
+      # No mimetype provided, make a guess.
+      mimetype, _ = mimetypes.guess_type(filename)
+      if mimetype is None:
+        # Guess failed, use octet-stream.
+        mimetype = 'application/octet-stream'
+    super(MediaFileUpload, self).__init__(fd, mimetype, chunksize=chunksize,
+                                          resumable=resumable)
+
+  def to_json(self):
+    """Creating a JSON representation of an instance of MediaFileUpload.
+
+    Returns:
+       string, a JSON representation of this instance, suitable to pass to
+       from_json().
+    """
+    return self._to_json(strip=['_fd'])
+
+  @staticmethod
+  def from_json(s):
+    d = json.loads(s)
+    return MediaFileUpload(d['_filename'], mimetype=d['_mimetype'],
+                           chunksize=d['_chunksize'], resumable=d['_resumable'])
+
+
+class MediaInMemoryUpload(MediaIoBaseUpload):
+  """MediaUpload for a chunk of bytes.
+
+  DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or StringIO for
+  the stream.
+  """
+
+  @util.positional(2)
+  def __init__(self, body, mimetype='application/octet-stream',
+               chunksize=DEFAULT_CHUNK_SIZE, resumable=False):
+    """Create a new MediaInMemoryUpload.
+
+  DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or StringIO for
+  the stream.
+
+  Args:
+    body: string, Bytes of body content.
+    mimetype: string, Mime-type of the file or default of
+      'application/octet-stream'.
+    chunksize: int, File will be uploaded in chunks of this many bytes. Only
+      used if resumable=True.
+    resumable: bool, True if this is a resumable upload. False means upload
+      in a single request.
+    """
+    fd = BytesIO(body)
+    super(MediaInMemoryUpload, self).__init__(fd, mimetype, chunksize=chunksize,
+                                              resumable=resumable)
+
+
+class MediaIoBaseDownload(object):
+  """"Download media resources.
+
+  Note that the Python file object is compatible with io.Base and can be used
+  with this class also.
+
+
+  Example:
+    request = farms.animals().get_media(id='cow')
+    fh = io.FileIO('cow.png', mode='wb')
+    downloader = MediaIoBaseDownload(fh, request, chunksize=1024*1024)
+
+    done = False
+    while done is False:
+      status, done = downloader.next_chunk()
+      if status:
+        print "Download %d%%." % int(status.progress() * 100)
+    print "Download Complete!"
+  """
+
+  @util.positional(3)
+  def __init__(self, fd, request, chunksize=DEFAULT_CHUNK_SIZE):
+    """Constructor.
+
+    Args:
+      fd: io.Base or file object, The stream in which to write the downloaded
+        bytes.
+      request: googleapiclient.http.HttpRequest, the media request to perform in
+        chunks.
+      chunksize: int, File will be downloaded in chunks of this many bytes.
+    """
+    self._fd = fd
+    self._request = request
+    self._uri = request.uri
+    self._chunksize = chunksize
+    self._progress = 0
+    self._total_size = None
+    self._done = False
+
+    # Stubs for testing.
+    self._sleep = time.sleep
+    self._rand = random.random
+
+  @util.positional(1)
+  def next_chunk(self, num_retries=0):
+    """Get the next chunk of the download.
+
+    Args:
+      num_retries: Integer, number of times to retry with randomized
+            exponential backoff. If all retries fail, the raised HttpError
+            represents the last request. If zero (default), we attempt the
+            request only once.
+
+    Returns:
+      (status, done): (MediaDownloadStatus, boolean)
+         The value of 'done' will be True when the media has been fully
+         downloaded.
+
+    Raises:
+      googleapiclient.errors.HttpError if the response was not a 2xx.
+      httplib2.HttpLib2Error if a transport error has occured.
+    """
+    headers = {
+        'range': 'bytes=%d-%d' % (
+            self._progress, self._progress + self._chunksize)
+        }
+    http = self._request.http
+
+    resp, content = _retry_request(
+        http, num_retries, 'media download', self._sleep, self._rand, self._uri,
+        'GET', headers=headers)
+
+    if resp.status in [200, 206]:
+      if 'content-location' in resp and resp['content-location'] != self._uri:
+        self._uri = resp['content-location']
+      self._progress += len(content)
+      self._fd.write(content)
+
+      if 'content-range' in resp:
+        content_range = resp['content-range']
+        length = content_range.rsplit('/', 1)[1]
+        self._total_size = int(length)
+      elif 'content-length' in resp:
+        self._total_size = int(resp['content-length'])
+
+      if self._progress == self._total_size:
+        self._done = True
+      return MediaDownloadProgress(self._progress, self._total_size), self._done
+    else:
+      raise HttpError(resp, content, uri=self._uri)
+
+
+class _StreamSlice(object):
+  """Truncated stream.
+
+  Takes a stream and presents a stream that is a slice of the original stream.
+  This is used when uploading media in chunks. In later versions of Python a
+  stream can be passed to httplib in place of the string of data to send. The
+  problem is that httplib just blindly reads to the end of the stream. This
+  wrapper presents a virtual stream that only reads to the end of the chunk.
+  """
+
+  def __init__(self, stream, begin, chunksize):
+    """Constructor.
+
+    Args:
+      stream: (io.Base, file object), the stream to wrap.
+      begin: int, the seek position the chunk begins at.
+      chunksize: int, the size of the chunk.
+    """
+    self._stream = stream
+    self._begin = begin
+    self._chunksize = chunksize
+    self._stream.seek(begin)
+
+  def read(self, n=-1):
+    """Read n bytes.
+
+    Args:
+      n, int, the number of bytes to read.
+
+    Returns:
+      A string of length 'n', or less if EOF is reached.
+    """
+    # The data left available to read sits in [cur, end)
+    cur = self._stream.tell()
+    end = self._begin + self._chunksize
+    if n == -1 or cur + n > end:
+      n = end - cur
+    return self._stream.read(n)
+
+
+class HttpRequest(object):
+  """Encapsulates a single HTTP request."""
+
+  @util.positional(4)
+  def __init__(self, http, postproc, uri,
+               method='GET',
+               body=None,
+               headers=None,
+               methodId=None,
+               resumable=None):
+    """Constructor for an HttpRequest.
+
+    Args:
+      http: httplib2.Http, the transport object to use to make a request
+      postproc: callable, called on the HTTP response and content to transform
+                it into a data object before returning, or raising an exception
+                on an error.
+      uri: string, the absolute URI to send the request to
+      method: string, the HTTP method to use
+      body: string, the request body of the HTTP request,
+      headers: dict, the HTTP request headers
+      methodId: string, a unique identifier for the API method being called.
+      resumable: MediaUpload, None if this is not a resumbale request.
+    """
+    self.uri = uri
+    self.method = method
+    self.body = body
+    self.headers = headers or {}
+    self.methodId = methodId
+    self.http = http
+    self.postproc = postproc
+    self.resumable = resumable
+    self.response_callbacks = []
+    self._in_error_state = False
+
+    # Pull the multipart boundary out of the content-type header.
+    major, minor, params = mimeparse.parse_mime_type(
+        self.headers.get('content-type', 'application/json'))
+
+    # The size of the non-media part of the request.
+    self.body_size = len(self.body or '')
+
+    # The resumable URI to send chunks to.
+    self.resumable_uri = None
+
+    # The bytes that have been uploaded.
+    self.resumable_progress = 0
+
+    # Stubs for testing.
+    self._rand = random.random
+    self._sleep = time.sleep
+
+  @util.positional(1)
+  def execute(self, http=None, num_retries=0):
+    """Execute the request.
+
+    Args:
+      http: httplib2.Http, an http object to be used in place of the
+            one the HttpRequest request object was constructed with.
+      num_retries: Integer, number of times to retry with randomized
+            exponential backoff. If all retries fail, the raised HttpError
+            represents the last request. If zero (default), we attempt the
+            request only once.
+
+    Returns:
+      A deserialized object model of the response body as determined
+      by the postproc.
+
+    Raises:
+      googleapiclient.errors.HttpError if the response was not a 2xx.
+      httplib2.HttpLib2Error if a transport error has occured.
+    """
+    if http is None:
+      http = self.http
+
+    if self.resumable:
+      body = None
+      while body is None:
+        _, body = self.next_chunk(http=http, num_retries=num_retries)
+      return body
+
+    # Non-resumable case.
+
+    if 'content-length' not in self.headers:
+      self.headers['content-length'] = str(self.body_size)
+    # If the request URI is too long then turn it into a POST request.
+    if len(self.uri) > MAX_URI_LENGTH and self.method == 'GET':
+      self.method = 'POST'
+      self.headers['x-http-method-override'] = 'GET'
+      self.headers['content-type'] = 'application/x-www-form-urlencoded'
+      parsed = urlparse(self.uri)
+      self.uri = urlunparse(
+          (parsed.scheme, parsed.netloc, parsed.path, parsed.params, None,
+           None)
+          )
+      self.body = parsed.query
+      self.headers['content-length'] = str(len(self.body))
+
+    # Handle retries for server-side errors.
+    resp, content = _retry_request(
+          http, num_retries, 'request', self._sleep, self._rand, str(self.uri),
+          method=str(self.method), body=self.body, headers=self.headers)
+
+    for callback in self.response_callbacks:
+      callback(resp)
+    if resp.status >= 300:
+      raise HttpError(resp, content, uri=self.uri)
+    return self.postproc(resp, content)
+
+  @util.positional(2)
+  def add_response_callback(self, cb):
+    """add_response_headers_callback
+
+    Args:
+      cb: Callback to be called on receiving the response headers, of signature:
+
+      def cb(resp):
+        # Where resp is an instance of httplib2.Response
+    """
+    self.response_callbacks.append(cb)
+
+  @util.positional(1)
+  def next_chunk(self, http=None, num_retries=0):
+    """Execute the next step of a resumable upload.
+
+    Can only be used if the method being executed supports media uploads and
+    the MediaUpload object passed in was flagged as using resumable upload.
+
+    Example:
+
+      media = MediaFileUpload('cow.png', mimetype='image/png',
+                              chunksize=1000, resumable=True)
+      request = farm.animals().insert(
+          id='cow',
+          name='cow.png',
+          media_body=media)
+
+      response = None
+      while response is None:
+        status, response = request.next_chunk()
+        if status:
+          print "Upload %d%% complete." % int(status.progress() * 100)
+
+
+    Args:
+      http: httplib2.Http, an http object to be used in place of the
+            one the HttpRequest request object was constructed with.
+      num_retries: Integer, number of times to retry with randomized
+            exponential backoff. If all retries fail, the raised HttpError
+            represents the last request. If zero (default), we attempt the
+            request only once.
+
+    Returns:
+      (status, body): (ResumableMediaStatus, object)
+         The body will be None until the resumable media is fully uploaded.
+
+    Raises:
+      googleapiclient.errors.HttpError if the response was not a 2xx.
+      httplib2.HttpLib2Error if a transport error has occured.
+    """
+    if http is None:
+      http = self.http
+
+    if self.resumable.size() is None:
+      size = '*'
+    else:
+      size = str(self.resumable.size())
+
+    if self.resumable_uri is None:
+      start_headers = copy.copy(self.headers)
+      start_headers['X-Upload-Content-Type'] = self.resumable.mimetype()
+      if size != '*':
+        start_headers['X-Upload-Content-Length'] = size
+      start_headers['content-length'] = str(self.body_size)
+
+      resp, content = _retry_request(
+          http, num_retries, 'resumable URI request', self._sleep, self._rand,
+          self.uri, method=self.method, body=self.body, headers=start_headers)
+
+      if resp.status == 200 and 'location' in resp:
+        self.resumable_uri = resp['location']
+      else:
+        raise ResumableUploadError(resp, content)
+    elif self._in_error_state:
+      # If we are in an error state then query the server for current state of
+      # the upload by sending an empty PUT and reading the 'range' header in
+      # the response.
+      headers = {
+          'Content-Range': 'bytes */%s' % size,
+          'content-length': '0'
+          }
+      resp, content = http.request(self.resumable_uri, 'PUT',
+                                   headers=headers)
+      status, body = self._process_response(resp, content)
+      if body:
+        # The upload was complete.
+        return (status, body)
+
+    if self.resumable.has_stream():
+      data = self.resumable.stream()
+      if self.resumable.chunksize() == -1:
+        data.seek(self.resumable_progress)
+        chunk_end = self.resumable.size() - self.resumable_progress - 1
+      else:
+        # Doing chunking with a stream, so wrap a slice of the stream.
+        data = _StreamSlice(data, self.resumable_progress,
+                            self.resumable.chunksize())
+        chunk_end = min(
+            self.resumable_progress + self.resumable.chunksize() - 1,
+            self.resumable.size() - 1)
+    else:
+      data = self.resumable.getbytes(
+          self.resumable_progress, self.resumable.chunksize())
+
+      # A short read implies that we are at EOF, so finish the upload.
+      if len(data) < self.resumable.chunksize():
+        size = str(self.resumable_progress + len(data))
+
+      chunk_end = self.resumable_progress + len(data) - 1
+
+    headers = {
+        'Content-Range': 'bytes %d-%d/%s' % (
+            self.resumable_progress, chunk_end, size),
+        # Must set the content-length header here because httplib can't
+        # calculate the size when working with _StreamSlice.
+        'Content-Length': str(chunk_end - self.resumable_progress + 1)
+        }
+
+    for retry_num in range(num_retries + 1):
+      if retry_num > 0:
+        self._sleep(self._rand() * 2**retry_num)
+        LOGGER.warning(
+            'Retry #%d for media upload: %s %s, following status: %d'
+            % (retry_num, self.method, self.uri, resp.status))
+
+      try:
+        resp, content = http.request(self.resumable_uri, method='PUT',
+                                     body=data,
+                                     headers=headers)
+      except:
+        self._in_error_state = True
+        raise
+      if not _should_retry_response(resp.status, content):
+        break
+
+    return self._process_response(resp, content)
+
+  def _process_response(self, resp, content):
+    """Process the response from a single chunk upload.
+
+    Args:
+      resp: httplib2.Response, the response object.
+      content: string, the content of the response.
+
+    Returns:
+      (status, body): (ResumableMediaStatus, object)
+         The body will be None until the resumable media is fully uploaded.
+
+    Raises:
+      googleapiclient.errors.HttpError if the response was not a 2xx or a 308.
+    """
+    if resp.status in [200, 201]:
+      self._in_error_state = False
+      return None, self.postproc(resp, content)
+    elif resp.status == 308:
+      self._in_error_state = False
+      # A "308 Resume Incomplete" indicates we are not done.
+      self.resumable_progress = int(resp['range'].split('-')[1]) + 1
+      if 'location' in resp:
+        self.resumable_uri = resp['location']
+    else:
+      self._in_error_state = True
+      raise HttpError(resp, content, uri=self.uri)
+
+    return (MediaUploadProgress(self.resumable_progress, self.resumable.size()),
+            None)
+
+  def to_json(self):
+    """Returns a JSON representation of the HttpRequest."""
+    d = copy.copy(self.__dict__)
+    if d['resumable'] is not None:
+      d['resumable'] = self.resumable.to_json()
+    del d['http']
+    del d['postproc']
+    del d['_sleep']
+    del d['_rand']
+
+    return json.dumps(d)
+
+  @staticmethod
+  def from_json(s, http, postproc):
+    """Returns an HttpRequest populated with info from a JSON object."""
+    d = json.loads(s)
+    if d['resumable'] is not None:
+      d['resumable'] = MediaUpload.new_from_json(d['resumable'])
+    return HttpRequest(
+        http,
+        postproc,
+        uri=d['uri'],
+        method=d['method'],
+        body=d['body'],
+        headers=d['headers'],
+        methodId=d['methodId'],
+        resumable=d['resumable'])
+
+
+class BatchHttpRequest(object):
+  """Batches multiple HttpRequest objects into a single HTTP request.
+
+  Example:
+    from googleapiclient.http import BatchHttpRequest
+
+    def list_animals(request_id, response, exception):
+      \"\"\"Do something with the animals list response.\"\"\"
+      if exception is not None:
+        # Do something with the exception.
+        pass
+      else:
+        # Do something with the response.
+        pass
+
+    def list_farmers(request_id, response, exception):
+      \"\"\"Do something with the farmers list response.\"\"\"
+      if exception is not None:
+        # Do something with the exception.
+        pass
+      else:
+        # Do something with the response.
+        pass
+
+    service = build('farm', 'v2')
+
+    batch = BatchHttpRequest()
+
+    batch.add(service.animals().list(), list_animals)
+    batch.add(service.farmers().list(), list_farmers)
+    batch.execute(http=http)
+  """
+
+  @util.positional(1)
+  def __init__(self, callback=None, batch_uri=None):
+    """Constructor for a BatchHttpRequest.
+
+    Args:
+      callback: callable, A callback to be called for each response, of the
+        form callback(id, response, exception). The first parameter is the
+        request id, and the second is the deserialized response object. The
+        third is an googleapiclient.errors.HttpError exception object if an HTTP error
+        occurred while processing the request, or None if no error occurred.
+      batch_uri: string, URI to send batch requests to.
+    """
+    if batch_uri is None:
+      batch_uri = 'https://www.googleapis.com/batch'
+    self._batch_uri = batch_uri
+
+    # Global callback to be called for each individual response in the batch.
+    self._callback = callback
+
+    # A map from id to request.
+    self._requests = {}
+
+    # A map from id to callback.
+    self._callbacks = {}
+
+    # List of request ids, in the order in which they were added.
+    self._order = []
+
+    # The last auto generated id.
+    self._last_auto_id = 0
+
+    # Unique ID on which to base the Content-ID headers.
+    self._base_id = None
+
+    # A map from request id to (httplib2.Response, content) response pairs
+    self._responses = {}
+
+    # A map of id(Credentials) that have been refreshed.
+    self._refreshed_credentials = {}
+
+  def _refresh_and_apply_credentials(self, request, http):
+    """Refresh the credentials and apply to the request.
+
+    Args:
+      request: HttpRequest, the request.
+      http: httplib2.Http, the global http object for the batch.
+    """
+    # For the credentials to refresh, but only once per refresh_token
+    # If there is no http per the request then refresh the http passed in
+    # via execute()
+    creds = None
+    if request.http is not None and hasattr(request.http.request,
+        'credentials'):
+      creds = request.http.request.credentials
+    elif http is not None and hasattr(http.request, 'credentials'):
+      creds = http.request.credentials
+    if creds is not None:
+      if id(creds) not in self._refreshed_credentials:
+        creds.refresh(http)
+        self._refreshed_credentials[id(creds)] = 1
+
+    # Only apply the credentials if we are using the http object passed in,
+    # otherwise apply() will get called during _serialize_request().
+    if request.http is None or not hasattr(request.http.request,
+        'credentials'):
+      creds.apply(request.headers)
+
+  def _id_to_header(self, id_):
+    """Convert an id to a Content-ID header value.
+
+    Args:
+      id_: string, identifier of individual request.
+
+    Returns:
+      A Content-ID header with the id_ encoded into it. A UUID is prepended to
+      the value because Content-ID headers are supposed to be universally
+      unique.
+    """
+    if self._base_id is None:
+      self._base_id = uuid.uuid4()
+
+    return '<%s+%s>' % (self._base_id, quote(id_))
+
+  def _header_to_id(self, header):
+    """Convert a Content-ID header value to an id.
+
+    Presumes the Content-ID header conforms to the format that _id_to_header()
+    returns.
+
+    Args:
+      header: string, Content-ID header value.
+
+    Returns:
+      The extracted id value.
+
+    Raises:
+      BatchError if the header is not in the expected format.
+    """
+    if header[0] != '<' or header[-1] != '>':
+      raise BatchError("Invalid value for Content-ID: %s" % header)
+    if '+' not in header:
+      raise BatchError("Invalid value for Content-ID: %s" % header)
+    base, id_ = header[1:-1].rsplit('+', 1)
+
+    return unquote(id_)
+
+  def _serialize_request(self, request):
+    """Convert an HttpRequest object into a string.
+
+    Args:
+      request: HttpRequest, the request to serialize.
+
+    Returns:
+      The request as a string in application/http format.
+    """
+    # Construct status line
+    parsed = urlparse(request.uri)
+    request_line = urlunparse(
+        ('', '', parsed.path, parsed.params, parsed.query, '')
+        )
+    status_line = request.method + ' ' + request_line + ' HTTP/1.1\n'
+    major, minor = request.headers.get('content-type', 'application/json').split('/')
+    msg = MIMENonMultipart(major, minor)
+    headers = request.headers.copy()
+
+    if request.http is not None and hasattr(request.http.request,
+        'credentials'):
+      request.http.request.credentials.apply(headers)
+
+    # MIMENonMultipart adds its own Content-Type header.
+    if 'content-type' in headers:
+      del headers['content-type']
+
+    for key, value in six.iteritems(headers):
+      msg[key] = value
+    msg['Host'] = parsed.netloc
+    msg.set_unixfrom(None)
+
+    if request.body is not None:
+      msg.set_payload(request.body)
+      msg['content-length'] = str(len(request.body))
+
+    # Serialize the mime message.
+    fp = StringIO()
+    # maxheaderlen=0 means don't line wrap headers.
+    g = Generator(fp, maxheaderlen=0)
+    g.flatten(msg, unixfrom=False)
+    body = fp.getvalue()
+
+    return status_line + body
+
+  def _deserialize_response(self, payload):
+    """Convert string into httplib2 response and content.
+
+    Args:
+      payload: string, headers and body as a string.
+
+    Returns:
+      A pair (resp, content), such as would be returned from httplib2.request.
+    """
+    # Strip off the status line
+    status_line, payload = payload.split('\n', 1)
+    protocol, status, reason = status_line.split(' ', 2)
+
+    # Parse the rest of the response
+    parser = FeedParser()
+    parser.feed(payload)
+    msg = parser.close()
+    msg['status'] = status
+
+    # Create httplib2.Response from the parsed headers.
+    resp = httplib2.Response(msg)
+    resp.reason = reason
+    resp.version = int(protocol.split('/', 1)[1].replace('.', ''))
+
+    content = payload.split('\r\n\r\n', 1)[1]
+
+    return resp, content
+
+  def _new_id(self):
+    """Create a new id.
+
+    Auto incrementing number that avoids conflicts with ids already used.
+
+    Returns:
+       string, a new unique id.
+    """
+    self._last_auto_id += 1
+    while str(self._last_auto_id) in self._requests:
+      self._last_auto_id += 1
+    return str(self._last_auto_id)
+
+  @util.positional(2)
+  def add(self, request, callback=None, request_id=None):
+    """Add a new request.
+
+    Every callback added will be paired with a unique id, the request_id. That
+    unique id will be passed back to the callback when the response comes back
+    from the server. The default behavior is to have the library generate it's
+    own unique id. If the caller passes in a request_id then they must ensure
+    uniqueness for each request_id, and if they are not an exception is
+    raised. Callers should either supply all request_ids or nevery supply a
+    request id, to avoid such an error.
+
+    Args:
+      request: HttpRequest, Request to add to the batch.
+      callback: callable, A callback to be called for this response, of the
+        form callback(id, response, exception). The first parameter is the
+        request id, and the second is the deserialized response object. The
+        third is an googleapiclient.errors.HttpError exception object if an HTTP error
+        occurred while processing the request, or None if no errors occurred.
+      request_id: string, A unique id for the request. The id will be passed to
+        the callback with the response.
+
+    Returns:
+      None
+
+    Raises:
+      BatchError if a media request is added to a batch.
+      KeyError is the request_id is not unique.
+    """
+    if request_id is None:
+      request_id = self._new_id()
+    if request.resumable is not None:
+      raise BatchError("Media requests cannot be used in a batch request.")
+    if request_id in self._requests:
+      raise KeyError("A request with this ID already exists: %s" % request_id)
+    self._requests[request_id] = request
+    self._callbacks[request_id] = callback
+    self._order.append(request_id)
+
+  def _execute(self, http, order, requests):
+    """Serialize batch request, send to server, process response.
+
+    Args:
+      http: httplib2.Http, an http object to be used to make the request with.
+      order: list, list of request ids in the order they were added to the
+        batch.
+      request: list, list of request objects to send.
+
+    Raises:
+      httplib2.HttpLib2Error if a transport error has occured.
+      googleapiclient.errors.BatchError if the response is the wrong format.
+    """
+    message = MIMEMultipart('mixed')
+    # Message should not write out it's own headers.
+    setattr(message, '_write_headers', lambda self: None)
+
+    # Add all the individual requests.
+    for request_id in order:
+      request = requests[request_id]
+
+      msg = MIMENonMultipart('application', 'http')
+      msg['Content-Transfer-Encoding'] = 'binary'
+      msg['Content-ID'] = self._id_to_header(request_id)
+
+      body = self._serialize_request(request)
+      msg.set_payload(body)
+      message.attach(msg)
+
+    # encode the body: note that we can't use `as_string`, because
+    # it plays games with `From ` lines.
+    fp = StringIO()
+    g = Generator(fp, mangle_from_=False)
+    g.flatten(message, unixfrom=False)
+    body = fp.getvalue()
+
+    headers = {}
+    headers['content-type'] = ('multipart/mixed; '
+                               'boundary="%s"') % message.get_boundary()
+
+    resp, content = http.request(self._batch_uri, method='POST', body=body,
+                                 headers=headers)
+
+    if resp.status >= 300:
+      raise HttpError(resp, content, uri=self._batch_uri)
+
+    # Prepend with a content-type header so FeedParser can handle it.
+    header = 'content-type: %s\r\n\r\n' % resp['content-type']
+    # PY3's FeedParser only accepts unicode. So we should decode content
+    # here, and encode each payload again.
+    if six.PY3:
+      content = content.decode('utf-8')
+    for_parser = header + content
+
+    parser = FeedParser()
+    parser.feed(for_parser)
+    mime_response = parser.close()
+
+    if not mime_response.is_multipart():
+      raise BatchError("Response not in multipart/mixed format.", resp=resp,
+                       content=content)
+
+    for part in mime_response.get_payload():
+      request_id = self._header_to_id(part['Content-ID'])
+      response, content = self._deserialize_response(part.get_payload())
+      # We encode content here to emulate normal http response.
+      if isinstance(content, six.text_type):
+        content = content.encode('utf-8')
+      self._responses[request_id] = (response, content)
+
+  @util.positional(1)
+  def execute(self, http=None):
+    """Execute all the requests as a single batched HTTP request.
+
+    Args:
+      http: httplib2.Http, an http object to be used in place of the one the
+        HttpRequest request object was constructed with. If one isn't supplied
+        then use a http object from the requests in this batch.
+
+    Returns:
+      None
+
+    Raises:
+      httplib2.HttpLib2Error if a transport error has occured.
+      googleapiclient.errors.BatchError if the response is the wrong format.
+    """
+    # If we have no requests return
+    if len(self._order) == 0:
+      return None
+
+    # If http is not supplied use the first valid one given in the requests.
+    if http is None:
+      for request_id in self._order:
+        request = self._requests[request_id]
+        if request is not None:
+          http = request.http
+          break
+
+    if http is None:
+      raise ValueError("Missing a valid http object.")
+
+    # Special case for OAuth2Credentials-style objects which have not yet been
+    # refreshed with an initial access_token.
+    if getattr(http.request, 'credentials', None) is not None:
+      creds = http.request.credentials
+      if not getattr(creds, 'access_token', None):
+        LOGGER.info('Attempting refresh to obtain initial access_token')
+        creds.refresh(http)
+
+    self._execute(http, self._order, self._requests)
+
+    # Loop over all the requests and check for 401s. For each 401 request the
+    # credentials should be refreshed and then sent again in a separate batch.
+    redo_requests = {}
+    redo_order = []
+
+    for request_id in self._order:
+      resp, content = self._responses[request_id]
+      if resp['status'] == '401':
+        redo_order.append(request_id)
+        request = self._requests[request_id]
+        self._refresh_and_apply_credentials(request, http)
+        redo_requests[request_id] = request
+
+    if redo_requests:
+      self._execute(http, redo_order, redo_requests)
+
+    # Now process all callbacks that are erroring, and raise an exception for
+    # ones that return a non-2xx response? Or add extra parameter to callback
+    # that contains an HttpError?
+
+    for request_id in self._order:
+      resp, content = self._responses[request_id]
+
+      request = self._requests[request_id]
+      callback = self._callbacks[request_id]
+
+      response = None
+      exception = None
+      try:
+        if resp.status >= 300:
+          raise HttpError(resp, content, uri=request.uri)
+        response = request.postproc(resp, content)
+      except HttpError as e:
+        exception = e
+
+      if callback is not None:
+        callback(request_id, response, exception)
+      if self._callback is not None:
+        self._callback(request_id, response, exception)
+
+
+class HttpRequestMock(object):
+  """Mock of HttpRequest.
+
+  Do not construct directly, instead use RequestMockBuilder.
+  """
+
+  def __init__(self, resp, content, postproc):
+    """Constructor for HttpRequestMock
+
+    Args:
+      resp: httplib2.Response, the response to emulate coming from the request
+      content: string, the response body
+      postproc: callable, the post processing function usually supplied by
+                the model class. See model.JsonModel.response() as an example.
+    """
+    self.resp = resp
+    self.content = content
+    self.postproc = postproc
+    if resp is None:
+      self.resp = httplib2.Response({'status': 200, 'reason': 'OK'})
+    if 'reason' in self.resp:
+      self.resp.reason = self.resp['reason']
+
+  def execute(self, http=None):
+    """Execute the request.
+
+    Same behavior as HttpRequest.execute(), but the response is
+    mocked and not really from an HTTP request/response.
+    """
+    return self.postproc(self.resp, self.content)
+
+
+class RequestMockBuilder(object):
+  """A simple mock of HttpRequest
+
+    Pass in a dictionary to the constructor that maps request methodIds to
+    tuples of (httplib2.Response, content, opt_expected_body) that should be
+    returned when that method is called. None may also be passed in for the
+    httplib2.Response, in which case a 200 OK response will be generated.
+    If an opt_expected_body (str or dict) is provided, it will be compared to
+    the body and UnexpectedBodyError will be raised on inequality.
+
+    Example:
+      response = '{"data": {"id": "tag:google.c...'
+      requestBuilder = RequestMockBuilder(
+        {
+          'plus.activities.get': (None, response),
+        }
+      )
+      googleapiclient.discovery.build("plus", "v1", requestBuilder=requestBuilder)
+
+    Methods that you do not supply a response for will return a
+    200 OK with an empty string as the response content or raise an excpetion
+    if check_unexpected is set to True. The methodId is taken from the rpcName
+    in the discovery document.
+
+    For more details see the project wiki.
+  """
+
+  def __init__(self, responses, check_unexpected=False):
+    """Constructor for RequestMockBuilder
+
+    The constructed object should be a callable object
+    that can replace the class HttpResponse.
+
+    responses - A dictionary that maps methodIds into tuples
+                of (httplib2.Response, content). The methodId
+                comes from the 'rpcName' field in the discovery
+                document.
+    check_unexpected - A boolean setting whether or not UnexpectedMethodError
+                       should be raised on unsupplied method.
+    """
+    self.responses = responses
+    self.check_unexpected = check_unexpected
+
+  def __call__(self, http, postproc, uri, method='GET', body=None,
+               headers=None, methodId=None, resumable=None):
+    """Implements the callable interface that discovery.build() expects
+    of requestBuilder, which is to build an object compatible with
+    HttpRequest.execute(). See that method for the description of the
+    parameters and the expected response.
+    """
+    if methodId in self.responses:
+      response = self.responses[methodId]
+      resp, content = response[:2]
+      if len(response) > 2:
+        # Test the body against the supplied expected_body.
+        expected_body = response[2]
+        if bool(expected_body) != bool(body):
+          # Not expecting a body and provided one
+          # or expecting a body and not provided one.
+          raise UnexpectedBodyError(expected_body, body)
+        if isinstance(expected_body, str):
+          expected_body = json.loads(expected_body)
+        body = json.loads(body)
+        if body != expected_body:
+          raise UnexpectedBodyError(expected_body, body)
+      return HttpRequestMock(resp, content, postproc)
+    elif self.check_unexpected:
+      raise UnexpectedMethodError(methodId=methodId)
+    else:
+      model = JsonModel(False)
+      return HttpRequestMock(None, '{}', model.response)
+
+
+class HttpMock(object):
+  """Mock of httplib2.Http"""
+
+  def __init__(self, filename=None, headers=None):
+    """
+    Args:
+      filename: string, absolute filename to read response from
+      headers: dict, header to return with response
+    """
+    if headers is None:
+      headers = {'status': '200'}
+    if filename:
+      f = open(filename, 'rb')
+      self.data = f.read()
+      f.close()
+    else:
+      self.data = None
+    self.response_headers = headers
+    self.headers = None
+    self.uri = None
+    self.method = None
+    self.body = None
+    self.headers = None
+
+
+  def request(self, uri,
+              method='GET',
+              body=None,
+              headers=None,
+              redirections=1,
+              connection_type=None):
+    self.uri = uri
+    self.method = method
+    self.body = body
+    self.headers = headers
+    return httplib2.Response(self.response_headers), self.data
+
+
+class HttpMockSequence(object):
+  """Mock of httplib2.Http
+
+  Mocks a sequence of calls to request returning different responses for each
+  call. Create an instance initialized with the desired response headers
+  and content and then use as if an httplib2.Http instance.
+
+    http = HttpMockSequence([
+      ({'status': '401'}, ''),
+      ({'status': '200'}, '{"access_token":"1/3w","expires_in":3600}'),
+      ({'status': '200'}, 'echo_request_headers'),
+      ])
+    resp, content = http.request("http://examples.com")
+
+  There are special values you can pass in for content to trigger
+  behavours that are helpful in testing.
+
+  'echo_request_headers' means return the request headers in the response body
+  'echo_request_headers_as_json' means return the request headers in
+     the response body
+  'echo_request_body' means return the request body in the response body
+  'echo_request_uri' means return the request uri in the response body
+  """
+
+  def __init__(self, iterable):
+    """
+    Args:
+      iterable: iterable, a sequence of pairs of (headers, body)
+    """
+    self._iterable = iterable
+    self.follow_redirects = True
+
+  def request(self, uri,
+              method='GET',
+              body=None,
+              headers=None,
+              redirections=1,
+              connection_type=None):
+    resp, content = self._iterable.pop(0)
+    if content == 'echo_request_headers':
+      content = headers
+    elif content == 'echo_request_headers_as_json':
+      content = json.dumps(headers)
+    elif content == 'echo_request_body':
+      if hasattr(body, 'read'):
+        content = body.read()
+      else:
+        content = body
+    elif content == 'echo_request_uri':
+      content = uri
+    if isinstance(content, six.text_type):
+      content = content.encode('utf-8')
+    return httplib2.Response(resp), content
+
+
+def set_user_agent(http, user_agent):
+  """Set the user-agent on every request.
+
+  Args:
+     http - An instance of httplib2.Http
+         or something that acts like it.
+     user_agent: string, the value for the user-agent header.
+
+  Returns:
+     A modified instance of http that was passed in.
+
+  Example:
+
+    h = httplib2.Http()
+    h = set_user_agent(h, "my-app-name/6.0")
+
+  Most of the time the user-agent will be set doing auth, this is for the rare
+  cases where you are accessing an unauthenticated endpoint.
+  """
+  request_orig = http.request
+
+  # The closure that will replace 'httplib2.Http.request'.
+  def new_request(uri, method='GET', body=None, headers=None,
+                  redirections=httplib2.DEFAULT_MAX_REDIRECTS,
+                  connection_type=None):
+    """Modify the request headers to add the user-agent."""
+    if headers is None:
+      headers = {}
+    if 'user-agent' in headers:
+      headers['user-agent'] = user_agent + ' ' + headers['user-agent']
+    else:
+      headers['user-agent'] = user_agent
+    resp, content = request_orig(uri, method, body, headers,
+                        redirections, connection_type)
+    return resp, content
+
+  http.request = new_request
+  return http
+
+
+def tunnel_patch(http):
+  """Tunnel PATCH requests over POST.
+  Args:
+     http - An instance of httplib2.Http
+         or something that acts like it.
+
+  Returns:
+     A modified instance of http that was passed in.
+
+  Example:
+
+    h = httplib2.Http()
+    h = tunnel_patch(h, "my-app-name/6.0")
+
+  Useful if you are running on a platform that doesn't support PATCH.
+  Apply this last if you are using OAuth 1.0, as changing the method
+  will result in a different signature.
+  """
+  request_orig = http.request
+
+  # The closure that will replace 'httplib2.Http.request'.
+  def new_request(uri, method='GET', body=None, headers=None,
+                  redirections=httplib2.DEFAULT_MAX_REDIRECTS,
+                  connection_type=None):
+    """Modify the request headers to add the user-agent."""
+    if headers is None:
+      headers = {}
+    if method == 'PATCH':
+      if 'oauth_token' in headers.get('authorization', ''):
+        LOGGER.warning(
+            'OAuth 1.0 request made with Credentials after tunnel_patch.')
+      headers['x-http-method-override'] = "PATCH"
+      method = 'POST'
+    resp, content = request_orig(uri, method, body, headers,
+                        redirections, connection_type)
+    return resp, content
+
+  http.request = new_request
+  return http
diff --git a/utils/frozen_chromite/third_party/googleapiclient/mimeparse.py b/utils/frozen_chromite/third_party/googleapiclient/mimeparse.py
new file mode 100644
index 0000000..bc9ad09
--- /dev/null
+++ b/utils/frozen_chromite/third_party/googleapiclient/mimeparse.py
@@ -0,0 +1,175 @@
+# Copyright 2014 Joe Gregorio
+#
+# Licensed under the MIT License
+
+"""MIME-Type Parser
+
+This module provides basic functions for handling mime-types. It can handle
+matching mime-types against a list of media-ranges. See section 14.1 of the
+HTTP specification [RFC 2616] for a complete explanation.
+
+   http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1
+
+Contents:
+ - parse_mime_type():   Parses a mime-type into its component parts.
+ - parse_media_range(): Media-ranges are mime-types with wild-cards and a 'q'
+                          quality parameter.
+ - quality():           Determines the quality ('q') of a mime-type when
+                          compared against a list of media-ranges.
+ - quality_parsed():    Just like quality() except the second parameter must be
+                          pre-parsed.
+ - best_match():        Choose the mime-type with the highest quality ('q')
+                          from a list of candidates.
+"""
+from __future__ import absolute_import
+from functools import reduce
+import six
+
+__version__ = '0.1.3'
+__author__ = 'Joe Gregorio'
+__email__ = 'joe@bitworking.org'
+__license__ = 'MIT License'
+__credits__ = ''
+
+
+def parse_mime_type(mime_type):
+    """Parses a mime-type into its component parts.
+
+    Carves up a mime-type and returns a tuple of the (type, subtype, params)
+    where 'params' is a dictionary of all the parameters for the media range.
+    For example, the media range 'application/xhtml;q=0.5' would get parsed
+    into:
+
+       ('application', 'xhtml', {'q', '0.5'})
+       """
+    parts = mime_type.split(';')
+    params = dict([tuple([s.strip() for s in param.split('=', 1)])\
+            for param in parts[1:]
+                  ])
+    full_type = parts[0].strip()
+    # Java URLConnection class sends an Accept header that includes a
+    # single '*'. Turn it into a legal wildcard.
+    if full_type == '*':
+        full_type = '*/*'
+    (type, subtype) = full_type.split('/')
+
+    return (type.strip(), subtype.strip(), params)
+
+
+def parse_media_range(range):
+    """Parse a media-range into its component parts.
+
+    Carves up a media range and returns a tuple of the (type, subtype,
+    params) where 'params' is a dictionary of all the parameters for the media
+    range.  For example, the media range 'application/*;q=0.5' would get parsed
+    into:
+
+       ('application', '*', {'q', '0.5'})
+
+    In addition this function also guarantees that there is a value for 'q'
+    in the params dictionary, filling it in with a proper default if
+    necessary.
+    """
+    (type, subtype, params) = parse_mime_type(range)
+    if 'q' not in params or not params['q'] or \
+            not float(params['q']) or float(params['q']) > 1\
+            or float(params['q']) < 0:
+        params['q'] = '1'
+
+    return (type, subtype, params)
+
+
+def fitness_and_quality_parsed(mime_type, parsed_ranges):
+    """Find the best match for a mime-type amongst parsed media-ranges.
+
+    Find the best match for a given mime-type against a list of media_ranges
+    that have already been parsed by parse_media_range(). Returns a tuple of
+    the fitness value and the value of the 'q' quality parameter of the best
+    match, or (-1, 0) if no match was found. Just as for quality_parsed(),
+    'parsed_ranges' must be a list of parsed media ranges.
+    """
+    best_fitness = -1
+    best_fit_q = 0
+    (target_type, target_subtype, target_params) =\
+            parse_media_range(mime_type)
+    for (type, subtype, params) in parsed_ranges:
+        type_match = (type == target_type or\
+                      type == '*' or\
+                      target_type == '*')
+        subtype_match = (subtype == target_subtype or\
+                         subtype == '*' or\
+                         target_subtype == '*')
+        if type_match and subtype_match:
+            param_matches = reduce(lambda x, y: x + y, [1 for (key, value) in \
+                    six.iteritems(target_params) if key != 'q' and \
+                    key in params and value == params[key]], 0)
+            fitness = (type == target_type) and 100 or 0
+            fitness += (subtype == target_subtype) and 10 or 0
+            fitness += param_matches
+            if fitness > best_fitness:
+                best_fitness = fitness
+                best_fit_q = params['q']
+
+    return best_fitness, float(best_fit_q)
+
+
+def quality_parsed(mime_type, parsed_ranges):
+    """Find the best match for a mime-type amongst parsed media-ranges.
+
+    Find the best match for a given mime-type against a list of media_ranges
+    that have already been parsed by parse_media_range(). Returns the 'q'
+    quality parameter of the best match, 0 if no match was found. This function
+    bahaves the same as quality() except that 'parsed_ranges' must be a list of
+    parsed media ranges.
+    """
+
+    return fitness_and_quality_parsed(mime_type, parsed_ranges)[1]
+
+
+def quality(mime_type, ranges):
+    """Return the quality ('q') of a mime-type against a list of media-ranges.
+
+    Returns the quality 'q' of a mime-type when compared against the
+    media-ranges in ranges. For example:
+
+    >>> quality('text/html','text/*;q=0.3, text/html;q=0.7,
+                  text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5')
+    0.7
+
+    """
+    parsed_ranges = [parse_media_range(r) for r in ranges.split(',')]
+
+    return quality_parsed(mime_type, parsed_ranges)
+
+
+def best_match(supported, header):
+    """Return mime-type with the highest quality ('q') from list of candidates.
+
+    Takes a list of supported mime-types and finds the best match for all the
+    media-ranges listed in header. The value of header must be a string that
+    conforms to the format of the HTTP Accept: header. The value of 'supported'
+    is a list of mime-types. The list of supported mime-types should be sorted
+    in order of increasing desirability, in case of a situation where there is
+    a tie.
+
+    >>> best_match(['application/xbel+xml', 'text/xml'],
+                   'text/*;q=0.5,*/*; q=0.1')
+    'text/xml'
+    """
+    split_header = _filter_blank(header.split(','))
+    parsed_header = [parse_media_range(r) for r in split_header]
+    weighted_matches = []
+    pos = 0
+    for mime_type in supported:
+        weighted_matches.append((fitness_and_quality_parsed(mime_type,
+                                 parsed_header), pos, mime_type))
+        pos += 1
+    weighted_matches.sort()
+
+    return weighted_matches[-1][0][1] and weighted_matches[-1][2] or ''
+
+
+def _filter_blank(i):
+    for s in i:
+        if s.strip():
+            yield s
diff --git a/utils/frozen_chromite/third_party/googleapiclient/model.py b/utils/frozen_chromite/third_party/googleapiclient/model.py
new file mode 100644
index 0000000..dded04e
--- /dev/null
+++ b/utils/frozen_chromite/third_party/googleapiclient/model.py
@@ -0,0 +1,389 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Model objects for requests and responses.
+
+Each API may support one or more serializations, such
+as JSON, Atom, etc. The model classes are responsible
+for converting between the wire format and the Python
+object representation.
+"""
+from __future__ import absolute_import
+import six
+
+__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+
+import json
+import logging
+
+from six.moves.urllib.parse import urlencode
+
+from googleapiclient import __version__
+from googleapiclient.errors import HttpError
+
+
+LOGGER = logging.getLogger(__name__)
+
+dump_request_response = False
+
+
+def _abstract():
+  raise NotImplementedError('You need to override this function')
+
+
+class Model(object):
+  """Model base class.
+
+  All Model classes should implement this interface.
+  The Model serializes and de-serializes between a wire
+  format such as JSON and a Python object representation.
+  """
+
+  def request(self, headers, path_params, query_params, body_value):
+    """Updates outgoing requests with a serialized body.
+
+    Args:
+      headers: dict, request headers
+      path_params: dict, parameters that appear in the request path
+      query_params: dict, parameters that appear in the query
+      body_value: object, the request body as a Python object, which must be
+                  serializable.
+    Returns:
+      A tuple of (headers, path_params, query, body)
+
+      headers: dict, request headers
+      path_params: dict, parameters that appear in the request path
+      query: string, query part of the request URI
+      body: string, the body serialized in the desired wire format.
+    """
+    _abstract()
+
+  def response(self, resp, content):
+    """Convert the response wire format into a Python object.
+
+    Args:
+      resp: httplib2.Response, the HTTP response headers and status
+      content: string, the body of the HTTP response
+
+    Returns:
+      The body de-serialized as a Python object.
+
+    Raises:
+      googleapiclient.errors.HttpError if a non 2xx response is received.
+    """
+    _abstract()
+
+
+class BaseModel(Model):
+  """Base model class.
+
+  Subclasses should provide implementations for the "serialize" and
+  "deserialize" methods, as well as values for the following class attributes.
+
+  Attributes:
+    accept: The value to use for the HTTP Accept header.
+    content_type: The value to use for the HTTP Content-type header.
+    no_content_response: The value to return when deserializing a 204 "No
+        Content" response.
+    alt_param: The value to supply as the "alt" query parameter for requests.
+  """
+
+  accept = None
+  content_type = None
+  no_content_response = None
+  alt_param = None
+
+  def _log_request(self, headers, path_params, query, body):
+    """Logs debugging information about the request if requested."""
+    if dump_request_response:
+      LOGGER.info('--request-start--')
+      LOGGER.info('-headers-start-')
+      for h, v in six.iteritems(headers):
+        LOGGER.info('%s: %s', h, v)
+      LOGGER.info('-headers-end-')
+      LOGGER.info('-path-parameters-start-')
+      for h, v in six.iteritems(path_params):
+        LOGGER.info('%s: %s', h, v)
+      LOGGER.info('-path-parameters-end-')
+      LOGGER.info('body: %s', body)
+      LOGGER.info('query: %s', query)
+      LOGGER.info('--request-end--')
+
+  def request(self, headers, path_params, query_params, body_value):
+    """Updates outgoing requests with a serialized body.
+
+    Args:
+      headers: dict, request headers
+      path_params: dict, parameters that appear in the request path
+      query_params: dict, parameters that appear in the query
+      body_value: object, the request body as a Python object, which must be
+                  serializable by json.
+    Returns:
+      A tuple of (headers, path_params, query, body)
+
+      headers: dict, request headers
+      path_params: dict, parameters that appear in the request path
+      query: string, query part of the request URI
+      body: string, the body serialized as JSON
+    """
+    query = self._build_query(query_params)
+    headers['accept'] = self.accept
+    headers['accept-encoding'] = 'gzip, deflate'
+    if 'user-agent' in headers:
+      headers['user-agent'] += ' '
+    else:
+      headers['user-agent'] = ''
+    headers['user-agent'] += 'google-api-python-client/%s (gzip)' % __version__
+
+    if body_value is not None:
+      headers['content-type'] = self.content_type
+      body_value = self.serialize(body_value)
+    self._log_request(headers, path_params, query, body_value)
+    return (headers, path_params, query, body_value)
+
+  def _build_query(self, params):
+    """Builds a query string.
+
+    Args:
+      params: dict, the query parameters
+
+    Returns:
+      The query parameters properly encoded into an HTTP URI query string.
+    """
+    if self.alt_param is not None:
+      params.update({'alt': self.alt_param})
+    astuples = []
+    for key, value in six.iteritems(params):
+      if type(value) == type([]):
+        for x in value:
+          x = x.encode('utf-8')
+          astuples.append((key, x))
+      else:
+        if isinstance(value, six.text_type) and callable(value.encode):
+          value = value.encode('utf-8')
+        astuples.append((key, value))
+    return '?' + urlencode(astuples)
+
+  def _log_response(self, resp, content):
+    """Logs debugging information about the response if requested."""
+    if dump_request_response:
+      LOGGER.info('--response-start--')
+      for h, v in six.iteritems(resp):
+        LOGGER.info('%s: %s', h, v)
+      if content:
+        LOGGER.info(content)
+      LOGGER.info('--response-end--')
+
+  def response(self, resp, content):
+    """Convert the response wire format into a Python object.
+
+    Args:
+      resp: httplib2.Response, the HTTP response headers and status
+      content: string, the body of the HTTP response
+
+    Returns:
+      The body de-serialized as a Python object.
+
+    Raises:
+      googleapiclient.errors.HttpError if a non 2xx response is received.
+    """
+    self._log_response(resp, content)
+    # Error handling is TBD, for example, do we retry
+    # for some operation/error combinations?
+    if resp.status < 300:
+      if resp.status == 204:
+        # A 204: No Content response should be treated differently
+        # to all the other success states
+        return self.no_content_response
+      return self.deserialize(content)
+    else:
+      LOGGER.debug('Content from bad request was: %s' % content)
+      raise HttpError(resp, content)
+
+  def serialize(self, body_value):
+    """Perform the actual Python object serialization.
+
+    Args:
+      body_value: object, the request body as a Python object.
+
+    Returns:
+      string, the body in serialized form.
+    """
+    _abstract()
+
+  def deserialize(self, content):
+    """Perform the actual deserialization from response string to Python
+    object.
+
+    Args:
+      content: string, the body of the HTTP response
+
+    Returns:
+      The body de-serialized as a Python object.
+    """
+    _abstract()
+
+
+class JsonModel(BaseModel):
+  """Model class for JSON.
+
+  Serializes and de-serializes between JSON and the Python
+  object representation of HTTP request and response bodies.
+  """
+  accept = 'application/json'
+  content_type = 'application/json'
+  alt_param = 'json'
+
+  def __init__(self, data_wrapper=False):
+    """Construct a JsonModel.
+
+    Args:
+      data_wrapper: boolean, wrap requests and responses in a data wrapper
+    """
+    self._data_wrapper = data_wrapper
+
+  def serialize(self, body_value):
+    if (isinstance(body_value, dict) and 'data' not in body_value and
+        self._data_wrapper):
+      body_value = {'data': body_value}
+    return json.dumps(body_value)
+
+  def deserialize(self, content):
+    try:
+        content = content.decode('utf-8')
+    except AttributeError:
+        pass
+    body = json.loads(content)
+    if self._data_wrapper and isinstance(body, dict) and 'data' in body:
+      body = body['data']
+    return body
+
+  @property
+  def no_content_response(self):
+    return {}
+
+
+class RawModel(JsonModel):
+  """Model class for requests that don't return JSON.
+
+  Serializes and de-serializes between JSON and the Python
+  object representation of HTTP request, and returns the raw bytes
+  of the response body.
+  """
+  accept = '*/*'
+  content_type = 'application/json'
+  alt_param = None
+
+  def deserialize(self, content):
+    return content
+
+  @property
+  def no_content_response(self):
+    return ''
+
+
+class MediaModel(JsonModel):
+  """Model class for requests that return Media.
+
+  Serializes and de-serializes between JSON and the Python
+  object representation of HTTP request, and returns the raw bytes
+  of the response body.
+  """
+  accept = '*/*'
+  content_type = 'application/json'
+  alt_param = 'media'
+
+  def deserialize(self, content):
+    return content
+
+  @property
+  def no_content_response(self):
+    return ''
+
+
+class ProtocolBufferModel(BaseModel):
+  """Model class for protocol buffers.
+
+  Serializes and de-serializes the binary protocol buffer sent in the HTTP
+  request and response bodies.
+  """
+  accept = 'application/x-protobuf'
+  content_type = 'application/x-protobuf'
+  alt_param = 'proto'
+
+  def __init__(self, protocol_buffer):
+    """Constructs a ProtocolBufferModel.
+
+    The serialzed protocol buffer returned in an HTTP response will be
+    de-serialized using the given protocol buffer class.
+
+    Args:
+      protocol_buffer: The protocol buffer class used to de-serialize a
+      response from the API.
+    """
+    self._protocol_buffer = protocol_buffer
+
+  def serialize(self, body_value):
+    return body_value.SerializeToString()
+
+  def deserialize(self, content):
+    return self._protocol_buffer.FromString(content)
+
+  @property
+  def no_content_response(self):
+    return self._protocol_buffer()
+
+
+def makepatch(original, modified):
+  """Create a patch object.
+
+  Some methods support PATCH, an efficient way to send updates to a resource.
+  This method allows the easy construction of patch bodies by looking at the
+  differences between a resource before and after it was modified.
+
+  Args:
+    original: object, the original deserialized resource
+    modified: object, the modified deserialized resource
+  Returns:
+    An object that contains only the changes from original to modified, in a
+    form suitable to pass to a PATCH method.
+
+  Example usage:
+    item = service.activities().get(postid=postid, userid=userid).execute()
+    original = copy.deepcopy(item)
+    item['object']['content'] = 'This is updated.'
+    service.activities.patch(postid=postid, userid=userid,
+      body=makepatch(original, item)).execute()
+  """
+  patch = {}
+  for key, original_value in six.iteritems(original):
+    modified_value = modified.get(key, None)
+    if modified_value is None:
+      # Use None to signal that the element is deleted
+      patch[key] = None
+    elif original_value != modified_value:
+      if type(original_value) == type({}):
+        # Recursively descend objects
+        patch[key] = makepatch(original_value, modified_value)
+      else:
+        # In the case of simple types or arrays we just replace
+        patch[key] = modified_value
+    else:
+      # Don't add anything to patch if there's no change
+      pass
+  for key in modified:
+    if key not in original:
+      patch[key] = modified[key]
+
+  return patch
diff --git a/utils/frozen_chromite/third_party/googleapiclient/sample_tools.py b/utils/frozen_chromite/third_party/googleapiclient/sample_tools.py
new file mode 100644
index 0000000..2b4e7b4
--- /dev/null
+++ b/utils/frozen_chromite/third_party/googleapiclient/sample_tools.py
@@ -0,0 +1,103 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utilities for making samples.
+
+Consolidates a lot of code commonly repeated in sample applications.
+"""
+from __future__ import absolute_import
+
+__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+__all__ = ['init']
+
+
+import argparse
+import httplib2
+import os
+
+from googleapiclient import discovery
+from oauth2client import client
+from oauth2client import file
+from oauth2client import tools
+
+
+def init(argv, name, version, doc, filename, scope=None, parents=[], discovery_filename=None):
+  """A common initialization routine for samples.
+
+  Many of the sample applications do the same initialization, which has now
+  been consolidated into this function. This function uses common idioms found
+  in almost all the samples, i.e. for an API with name 'apiname', the
+  credentials are stored in a file named apiname.dat, and the
+  client_secrets.json file is stored in the same directory as the application
+  main file.
+
+  Args:
+    argv: list of string, the command-line parameters of the application.
+    name: string, name of the API.
+    version: string, version of the API.
+    doc: string, description of the application. Usually set to __doc__.
+    file: string, filename of the application. Usually set to __file__.
+    parents: list of argparse.ArgumentParser, additional command-line flags.
+    scope: string, The OAuth scope used.
+    discovery_filename: string, name of local discovery file (JSON). Use when discovery doc not available via URL.
+
+  Returns:
+    A tuple of (service, flags), where service is the service object and flags
+    is the parsed command-line flags.
+  """
+  if scope is None:
+    scope = 'https://www.googleapis.com/auth/' + name
+
+  # Parser command-line arguments.
+  parent_parsers = [tools.argparser]
+  parent_parsers.extend(parents)
+  parser = argparse.ArgumentParser(
+      description=doc,
+      formatter_class=argparse.RawDescriptionHelpFormatter,
+      parents=parent_parsers)
+  flags = parser.parse_args(argv[1:])
+
+  # Name of a file containing the OAuth 2.0 information for this
+  # application, including client_id and client_secret, which are found
+  # on the API Access tab on the Google APIs
+  # Console <http://code.google.com/apis/console>.
+  client_secrets = os.path.join(os.path.dirname(filename),
+                                'client_secrets.json')
+
+  # Set up a Flow object to be used if we need to authenticate.
+  flow = client.flow_from_clientsecrets(client_secrets,
+      scope=scope,
+      message=tools.message_if_missing(client_secrets))
+
+  # Prepare credentials, and authorize HTTP object with them.
+  # If the credentials don't exist or are invalid run through the native client
+  # flow. The Storage object will ensure that if successful the good
+  # credentials will get written back to a file.
+  storage = file.Storage(name + '.dat')
+  credentials = storage.get()
+  if credentials is None or credentials.invalid:
+    credentials = tools.run_flow(flow, storage, flags)
+  http = credentials.authorize(http = httplib2.Http())
+
+  if discovery_filename is None:
+    # Construct a service object via the discovery service.
+    service = discovery.build(name, version, http=http)
+  else:
+    # Construct a service object using a local discovery document file.
+    with open(discovery_filename) as discovery_file:
+      service = discovery.build_from_document(
+          discovery_file.read(),
+          base='https://www.googleapis.com/',
+          http=http)
+  return (service, flags)
diff --git a/utils/frozen_chromite/third_party/googleapiclient/schema.py b/utils/frozen_chromite/third_party/googleapiclient/schema.py
new file mode 100644
index 0000000..9feaf28
--- /dev/null
+++ b/utils/frozen_chromite/third_party/googleapiclient/schema.py
@@ -0,0 +1,318 @@
+# Copyright 2014 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Schema processing for discovery based APIs
+
+Schemas holds an APIs discovery schemas. It can return those schema as
+deserialized JSON objects, or pretty print them as prototype objects that
+conform to the schema.
+
+For example, given the schema:
+
+ schema = \"\"\"{
+   "Foo": {
+    "type": "object",
+    "properties": {
+     "etag": {
+      "type": "string",
+      "description": "ETag of the collection."
+     },
+     "kind": {
+      "type": "string",
+      "description": "Type of the collection ('calendar#acl').",
+      "default": "calendar#acl"
+     },
+     "nextPageToken": {
+      "type": "string",
+      "description": "Token used to access the next
+         page of this result. Omitted if no further results are available."
+     }
+    }
+   }
+ }\"\"\"
+
+ s = Schemas(schema)
+ print s.prettyPrintByName('Foo')
+
+ Produces the following output:
+
+  {
+   "nextPageToken": "A String", # Token used to access the
+       # next page of this result. Omitted if no further results are available.
+   "kind": "A String", # Type of the collection ('calendar#acl').
+   "etag": "A String", # ETag of the collection.
+  },
+
+The constructor takes a discovery document in which to look up named schema.
+"""
+from __future__ import absolute_import
+import six
+
+# TODO(jcgregorio) support format, enum, minimum, maximum
+
+__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+
+import copy
+
+# Oauth2client < 3 has the positional helper in 'util', >= 3 has it
+# in '_helpers'.
+try:
+  from oauth2client import util
+except ImportError:
+  from oauth2client import _helpers as util
+
+
+class Schemas(object):
+  """Schemas for an API."""
+
+  def __init__(self, discovery):
+    """Constructor.
+
+    Args:
+      discovery: object, Deserialized discovery document from which we pull
+        out the named schema.
+    """
+    self.schemas = discovery.get('schemas', {})
+
+    # Cache of pretty printed schemas.
+    self.pretty = {}
+
+  @util.positional(2)
+  def _prettyPrintByName(self, name, seen=None, dent=0):
+    """Get pretty printed object prototype from the schema name.
+
+    Args:
+      name: string, Name of schema in the discovery document.
+      seen: list of string, Names of schema already seen. Used to handle
+        recursive definitions.
+
+    Returns:
+      string, A string that contains a prototype object with
+        comments that conforms to the given schema.
+    """
+    if seen is None:
+      seen = []
+
+    if name in seen:
+      # Do not fall into an infinite loop over recursive definitions.
+      return '# Object with schema name: %s' % name
+    seen.append(name)
+
+    if name not in self.pretty:
+      self.pretty[name] = _SchemaToStruct(self.schemas[name],
+          seen, dent=dent).to_str(self._prettyPrintByName)
+
+    seen.pop()
+
+    return self.pretty[name]
+
+  def prettyPrintByName(self, name):
+    """Get pretty printed object prototype from the schema name.
+
+    Args:
+      name: string, Name of schema in the discovery document.
+
+    Returns:
+      string, A string that contains a prototype object with
+        comments that conforms to the given schema.
+    """
+    # Return with trailing comma and newline removed.
+    return self._prettyPrintByName(name, seen=[], dent=1)[:-2]
+
+  @util.positional(2)
+  def _prettyPrintSchema(self, schema, seen=None, dent=0):
+    """Get pretty printed object prototype of schema.
+
+    Args:
+      schema: object, Parsed JSON schema.
+      seen: list of string, Names of schema already seen. Used to handle
+        recursive definitions.
+
+    Returns:
+      string, A string that contains a prototype object with
+        comments that conforms to the given schema.
+    """
+    if seen is None:
+      seen = []
+
+    return _SchemaToStruct(schema, seen, dent=dent).to_str(self._prettyPrintByName)
+
+  def prettyPrintSchema(self, schema):
+    """Get pretty printed object prototype of schema.
+
+    Args:
+      schema: object, Parsed JSON schema.
+
+    Returns:
+      string, A string that contains a prototype object with
+        comments that conforms to the given schema.
+    """
+    # Return with trailing comma and newline removed.
+    return self._prettyPrintSchema(schema, dent=1)[:-2]
+
+  def get(self, name):
+    """Get deserialized JSON schema from the schema name.
+
+    Args:
+      name: string, Schema name.
+    """
+    return self.schemas[name]
+
+
+class _SchemaToStruct(object):
+  """Convert schema to a prototype object."""
+
+  @util.positional(3)
+  def __init__(self, schema, seen, dent=0):
+    """Constructor.
+
+    Args:
+      schema: object, Parsed JSON schema.
+      seen: list, List of names of schema already seen while parsing. Used to
+        handle recursive definitions.
+      dent: int, Initial indentation depth.
+    """
+    # The result of this parsing kept as list of strings.
+    self.value = []
+
+    # The final value of the parsing.
+    self.string = None
+
+    # The parsed JSON schema.
+    self.schema = schema
+
+    # Indentation level.
+    self.dent = dent
+
+    # Method that when called returns a prototype object for the schema with
+    # the given name.
+    self.from_cache = None
+
+    # List of names of schema already seen while parsing.
+    self.seen = seen
+
+  def emit(self, text):
+    """Add text as a line to the output.
+
+    Args:
+      text: string, Text to output.
+    """
+    self.value.extend(["  " * self.dent, text, '\n'])
+
+  def emitBegin(self, text):
+    """Add text to the output, but with no line terminator.
+
+    Args:
+      text: string, Text to output.
+      """
+    self.value.extend(["  " * self.dent, text])
+
+  def emitEnd(self, text, comment):
+    """Add text and comment to the output with line terminator.
+
+    Args:
+      text: string, Text to output.
+      comment: string, Python comment.
+    """
+    if comment:
+      divider = '\n' + '  ' * (self.dent + 2) + '# '
+      lines = comment.splitlines()
+      lines = [x.rstrip() for x in lines]
+      comment = divider.join(lines)
+      self.value.extend([text, ' # ', comment, '\n'])
+    else:
+      self.value.extend([text, '\n'])
+
+  def indent(self):
+    """Increase indentation level."""
+    self.dent += 1
+
+  def undent(self):
+    """Decrease indentation level."""
+    self.dent -= 1
+
+  def _to_str_impl(self, schema):
+    """Prototype object based on the schema, in Python code with comments.
+
+    Args:
+      schema: object, Parsed JSON schema file.
+
+    Returns:
+      Prototype object based on the schema, in Python code with comments.
+    """
+    stype = schema.get('type')
+    if stype == 'object':
+      self.emitEnd('{', schema.get('description', ''))
+      self.indent()
+      if 'properties' in schema:
+        for pname, pschema in six.iteritems(schema.get('properties', {})):
+          self.emitBegin('"%s": ' % pname)
+          self._to_str_impl(pschema)
+      elif 'additionalProperties' in schema:
+        self.emitBegin('"a_key": ')
+        self._to_str_impl(schema['additionalProperties'])
+      self.undent()
+      self.emit('},')
+    elif '$ref' in schema:
+      schemaName = schema['$ref']
+      description = schema.get('description', '')
+      s = self.from_cache(schemaName, seen=self.seen)
+      parts = s.splitlines()
+      self.emitEnd(parts[0], description)
+      for line in parts[1:]:
+        self.emit(line.rstrip())
+    elif stype == 'boolean':
+      value = schema.get('default', 'True or False')
+      self.emitEnd('%s,' % str(value), schema.get('description', ''))
+    elif stype == 'string':
+      value = schema.get('default', 'A String')
+      self.emitEnd('"%s",' % str(value), schema.get('description', ''))
+    elif stype == 'integer':
+      value = schema.get('default', '42')
+      self.emitEnd('%s,' % str(value), schema.get('description', ''))
+    elif stype == 'number':
+      value = schema.get('default', '3.14')
+      self.emitEnd('%s,' % str(value), schema.get('description', ''))
+    elif stype == 'null':
+      self.emitEnd('None,', schema.get('description', ''))
+    elif stype == 'any':
+      self.emitEnd('"",', schema.get('description', ''))
+    elif stype == 'array':
+      self.emitEnd('[', schema.get('description'))
+      self.indent()
+      self.emitBegin('')
+      self._to_str_impl(schema['items'])
+      self.undent()
+      self.emit('],')
+    else:
+      self.emit('Unknown type! %s' % stype)
+      self.emitEnd('', '')
+
+    self.string = ''.join(self.value)
+    return self.string
+
+  def to_str(self, from_cache):
+    """Prototype object based on the schema, in Python code with comments.
+
+    Args:
+      from_cache: callable(name, seen), Callable that retrieves an object
+         prototype for a schema with the given name. Seen is a list of schema
+         names already seen as we recursively descend the schema definition.
+
+    Returns:
+      Prototype object based on the schema, in Python code with comments.
+      The lines of the code will all be properly indented.
+    """
+    self.from_cache = from_cache
+    return self._to_str_impl(self.schema)
diff --git a/utils/frozen_chromite/third_party/infra_libs/.coveragerc b/utils/frozen_chromite/third_party/infra_libs/.coveragerc
new file mode 100644
index 0000000..741eb75
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/.coveragerc
@@ -0,0 +1,27 @@
+# This file exist despite having .coveragerc in infra_libs because on Windows
+# bots, we only execute tests in certain modules of infra_libs (including this
+# one) and the latest version of coverage throws an exception when given a
+# non-existant config file.
+
+[run]
+include = ./packages/infra_libs/infra_libs/*
+
+[report]
+exclude_lines =
+    # Have to re-enable the standard pragma
+    pragma: no cover
+
+    # Don't complain about missing debug-only code:
+    def __repr__
+    if self\.debug
+
+    # Don't complain if tests don't hit defensive assertion code:
+    raise AssertionError
+    raise NotImplementedError
+
+    # Don't complain if non-runnable code isn't run:
+    if 0:
+    if __name__ == ['"]__main__['"]:
+
+[expect_tests]
+expected_coverage_min = 100
diff --git a/utils/frozen_chromite/third_party/infra_libs/__init__.py b/utils/frozen_chromite/third_party/infra_libs/__init__.py
new file mode 100644
index 0000000..ccfee2f
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/__init__.py
@@ -0,0 +1,8 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from . import ts_mon  # Must be imported first so httplib2_utils can import it.
+
+from infra_libs.httplib2_utils import RetriableHttp, InstrumentedHttp, HttpMock
+from infra_libs.utils import temporary_directory
diff --git a/utils/frozen_chromite/third_party/infra_libs/httplib2_utils.py b/utils/frozen_chromite/third_party/infra_libs/httplib2_utils.py
new file mode 100644
index 0000000..034e1ec
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/httplib2_utils.py
@@ -0,0 +1,278 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import base64
+import collections
+import copy
+import json
+import logging
+import re
+import socket
+import time
+
+import httplib2
+import oauth2client.client
+import six
+from six.moves import http_client as httplib
+
+from googleapiclient import errors
+from infra_libs.ts_mon.common import http_metrics
+
+# TODO(nxia): crbug.com/790760 upgrade oauth2client to 4.1.2.
+oauth2client_util_imported = False
+try:
+  from oauth2client import util
+  oauth2client_util_imported = True
+except ImportError:
+  pass
+
+
+# default timeout for http requests, in seconds
+DEFAULT_TIMEOUT = 30
+
+
+class AuthError(Exception):
+  pass
+
+
+class DelegateServiceAccountCredentials(
+    oauth2client.client.AssertionCredentials):
+  """Authorizes an HTTP client with a service account for which we are an actor.
+
+  This class uses the IAM API to sign a JWT with the private key of another
+  service account for which we have the "Service Account Actor" role.
+  """
+
+  MAX_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds
+  _SIGN_BLOB_URL = 'https://iam.googleapis.com/v1/%s:signBlob'
+
+  def __init__(self, http, service_account_email, scopes, project='-'):
+    """
+    Args:
+      http: An httplib2.Http object that is authorized by another
+        oauth2client.client.OAuth2Credentials with credentials that have the
+        service account actor role on the service_account_email.
+      service_account_email: The email address of the service account for which
+        to obtain an access token.
+      scopes: The desired scopes for the token.
+      project: The cloud project to which service_account_email belongs.  The
+        default of '-' makes the IAM API figure it out for us.
+    """
+    if not oauth2client_util_imported:
+      raise AssertionError('Failed to import oauth2client.util.')
+    super(DelegateServiceAccountCredentials, self).__init__(None)
+    self._service_account_email = service_account_email
+    self._scopes = util.scopes_to_string(scopes)
+    self._http = http
+    self._name = 'projects/%s/serviceAccounts/%s' % (
+        project, service_account_email)
+
+  def sign_blob(self, blob):
+    response, content = self._http.request(
+        self._SIGN_BLOB_URL % self._name,
+        method='POST',
+        body=json.dumps({'bytesToSign': base64.b64encode(blob)}),
+        headers={'Content-Type': 'application/json'})
+    if response.status != 200:
+      raise AuthError('Failed to sign blob as %s: %d %s' % (
+          self._service_account_email, response.status, response.reason))
+
+    data = json.loads(content)
+    return data['keyId'], data['signature']
+
+  def _generate_assertion(self):
+    # This is copied with small modifications from
+    # oauth2client.service_account._ServiceAccountCredentials.
+
+    header = {
+        'alg': 'RS256',
+        'typ': 'JWT',
+    }
+
+    now = int(time.time())
+    payload = {
+        'aud': self.token_uri,
+        'scope': self._scopes,
+        'iat': now,
+        'exp': now + self.MAX_TOKEN_LIFETIME_SECS,
+        'iss': self._service_account_email,
+    }
+
+    assertion_input = (
+        self._urlsafe_b64encode(header) + b'.' +
+        self._urlsafe_b64encode(payload))
+
+    # Sign the assertion.
+    _, rsa_bytes = self.sign_blob(assertion_input)
+    signature = rsa_bytes.rstrip(b'=')
+
+    return assertion_input + b'.' + signature
+
+  def _urlsafe_b64encode(self, data):
+    # Copied verbatim from oauth2client.service_account.
+    return base64.urlsafe_b64encode(
+        json.dumps(data, separators=(',', ':')).encode('UTF-8')).rstrip(b'=')
+
+
+class RetriableHttp(object):
+  """A httplib2.Http object that retries on failure."""
+
+  def __init__(self, http, max_tries=5, backoff_time=1,
+               retrying_statuses_fn=None):
+    """
+    Args:
+      http: an httplib2.Http instance
+      max_tries: a number of maximum tries
+      backoff_time: a number of seconds to sleep between retries
+      retrying_statuses_fn: a function that returns True if a given status
+                            should be retried
+    """
+    self._http = http
+    self._max_tries = max_tries
+    self._backoff_time = backoff_time
+    self._retrying_statuses_fn = retrying_statuses_fn or \
+                                 set(range(500,599)).__contains__
+
+  def request(self, uri, method='GET', body=None, *args, **kwargs):
+    for i in range(1, self._max_tries + 1):
+      try:
+        response, content = self._http.request(uri, method, body, *args,
+                                               **kwargs)
+
+        if self._retrying_statuses_fn(response.status):
+          logging.info('RetriableHttp: attempt %d receiving status %d, %s',
+                       i, response.status,
+                       'final attempt' if i == self._max_tries else \
+                       'will retry')
+        else:
+          break
+      except (ValueError, errors.Error,
+              socket.timeout, socket.error, socket.herror, socket.gaierror,
+              httplib2.HttpLib2Error) as error:
+        logging.info('RetriableHttp: attempt %d received exception: %s, %s',
+                     i, error, 'final attempt' if i == self._max_tries else \
+                     'will retry')
+        if i == self._max_tries:
+          raise
+      time.sleep(self._backoff_time)
+
+    return response, content
+
+  def __getattr__(self, name):
+    return getattr(self._http, name)
+
+  def __setattr__(self, name, value):
+    if name in ('request', '_http', '_max_tries', '_backoff_time',
+                '_retrying_statuses_fn'):
+      self.__dict__[name] = value
+    else:
+      setattr(self._http, name, value)
+
+
+class InstrumentedHttp(httplib2.Http):
+  """A httplib2.Http object that reports ts_mon metrics about its requests."""
+
+  def __init__(self, name, time_fn=time.time, timeout=DEFAULT_TIMEOUT,
+               **kwargs):
+    """
+    Args:
+      name: An identifier for the HTTP requests made by this object.
+      time_fn: Function returning the current time in seconds. Use for testing
+        purposes only.
+    """
+
+    super(InstrumentedHttp, self).__init__(timeout=timeout, **kwargs)
+    self.fields = {'name': name, 'client': 'httplib2'}
+    self.time_fn = time_fn
+
+  def _update_metrics(self, status, start_time):
+    status_fields = {'status': status}
+    status_fields.update(self.fields)
+    http_metrics.response_status.increment(fields=status_fields)
+
+    duration_msec = (self.time_fn() - start_time) * 1000
+    http_metrics.durations.add(duration_msec, fields=self.fields)
+
+  def request(self, uri, method="GET", body=None, *args, **kwargs):
+    request_bytes = 0
+    if body is not None:
+      request_bytes = len(body)
+    http_metrics.request_bytes.add(request_bytes, fields=self.fields)
+
+    start_time = self.time_fn()
+    try:
+      response, content = super(InstrumentedHttp, self).request(
+          uri, method, body, *args, **kwargs)
+    except socket.timeout:
+      self._update_metrics(http_metrics.STATUS_TIMEOUT, start_time)
+      raise
+    except (socket.error, socket.herror, socket.gaierror):
+      self._update_metrics(http_metrics.STATUS_ERROR, start_time)
+      raise
+    except (httplib.HTTPException, httplib2.HttpLib2Error) as ex:
+      status = http_metrics.STATUS_EXCEPTION
+      if 'Deadline exceeded while waiting for HTTP response' in str(ex):
+        # Raised on Appengine (gae_override/httplib.py).
+        status = http_metrics.STATUS_TIMEOUT
+      self._update_metrics(status, start_time)
+      raise
+    http_metrics.response_bytes.add(len(content), fields=self.fields)
+
+    self._update_metrics(response.status, start_time)
+
+    return response, content
+
+
+class HttpMock(object):
+  """Mock of httplib2.Http"""
+  HttpCall = collections.namedtuple('HttpCall', ('uri', 'method', 'body',
+                                                 'headers'))
+
+  def __init__(self, uris):
+    """
+    Args:
+      uris(dict): list of  (uri, headers, body). `uri` is a regexp for
+        matching the requested uri, (headers, body) gives the values returned
+        by the mock. Uris are tested in the order from `uris`.
+        `headers` is a dict mapping headers to value. The 'status' key is
+        mandatory. `body` is a string.
+        Ex: [('.*', {'status': 200}, 'nicely done.')]
+    """
+    self._uris = []
+    self.requests_made = []
+
+    for value in uris:
+      if not isinstance(value, (list, tuple)) or len(value) != 3:
+        raise ValueError("'uris' must be a sequence of (uri, headers, body)")
+      uri, headers, body = value
+      compiled_uri = re.compile(uri)
+      if not isinstance(headers, dict):
+        raise TypeError("'headers' must be a dict")
+      if not 'status' in headers:
+        raise ValueError("'headers' must have 'status' as a key")
+
+      new_headers = copy.copy(headers)
+      new_headers['status'] = int(new_headers['status'])
+
+      if not isinstance(body, six.string_types):
+        raise TypeError("'body' must be a string, got %s" % type(body))
+      self._uris.append((compiled_uri, new_headers, body))
+
+  # pylint: disable=unused-argument
+  def request(self, uri,
+              method='GET',
+              body=None,
+              headers=None,
+              redirections=1,
+              connection_type=None):
+    self.requests_made.append(self.HttpCall(uri, method, body, headers))
+    headers = None
+    body = None
+    for candidate in self._uris:
+      if candidate[0].match(uri):
+        _, headers, body = candidate
+        break
+    if not headers:
+      raise AssertionError("Unexpected request to %s" % uri)
+    return httplib2.Response(headers), body
diff --git a/utils/frozen_chromite/third_party/infra_libs/ts_mon/.coveragerc b/utils/frozen_chromite/third_party/infra_libs/ts_mon/.coveragerc
new file mode 100644
index 0000000..4bcf905
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/ts_mon/.coveragerc
@@ -0,0 +1,27 @@
+# This file exist despite having .coveragerc in infra_libs because on Windows
+# bots, we only execute tests in certain modules of infra_libs (including this
+# one) and the latest version of coverage throws an exception when given a
+# non-existant config file.
+
+[run]
+include = ./packages/infra_libs/infra_libs/ts_mon/*
+
+[report]
+exclude_lines =
+    # Have to re-enable the standard pragma
+    pragma: no cover
+
+    # Don't complain about missing debug-only code:
+    def __repr__
+    if self\.debug
+
+    # Don't complain if tests don't hit defensive assertion code:
+    raise AssertionError
+    raise NotImplementedError
+
+    # Don't complain if non-runnable code isn't run:
+    if 0:
+    if __name__ == ['"]__main__['"]:
+
+[expect_tests]
+expected_coverage_min = 100
diff --git a/utils/frozen_chromite/third_party/infra_libs/ts_mon/__init__.py b/utils/frozen_chromite/third_party/infra_libs/ts_mon/__init__.py
new file mode 100644
index 0000000..4a60d3d
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/ts_mon/__init__.py
@@ -0,0 +1,46 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from infra_libs.ts_mon.config import add_argparse_options
+from infra_libs.ts_mon.config import process_argparse_options
+
+from infra_libs.ts_mon.common.distribution import Distribution
+from infra_libs.ts_mon.common.distribution import FixedWidthBucketer
+from infra_libs.ts_mon.common.distribution import GeometricBucketer
+
+from infra_libs.ts_mon.common.errors import MonitoringError
+from infra_libs.ts_mon.common.errors import MonitoringDecreasingValueError
+from infra_libs.ts_mon.common.errors import MonitoringDuplicateRegistrationError
+from infra_libs.ts_mon.common.errors import MonitoringIncrementUnsetValueError
+from infra_libs.ts_mon.common.errors import MonitoringInvalidFieldTypeError
+from infra_libs.ts_mon.common.errors import MonitoringInvalidValueTypeError
+from infra_libs.ts_mon.common.errors import MonitoringTooManyFieldsError
+from infra_libs.ts_mon.common.errors import MonitoringNoConfiguredMonitorError
+from infra_libs.ts_mon.common.errors import MonitoringNoConfiguredTargetError
+
+from infra_libs.ts_mon.common.helpers import ScopedIncrementCounter
+from infra_libs.ts_mon.common.helpers import ScopedMeasureTime
+
+from infra_libs.ts_mon.common.interface import close
+from infra_libs.ts_mon.common.interface import flush
+from infra_libs.ts_mon.common.interface import register_global_metrics
+from infra_libs.ts_mon.common.interface import register_global_metrics_callback
+from infra_libs.ts_mon.common.interface import reset_for_unittest
+
+from infra_libs.ts_mon.common.metrics import BooleanField
+from infra_libs.ts_mon.common.metrics import IntegerField
+from infra_libs.ts_mon.common.metrics import StringField
+
+from infra_libs.ts_mon.common.metrics import BooleanMetric
+from infra_libs.ts_mon.common.metrics import CounterMetric
+from infra_libs.ts_mon.common.metrics import CumulativeDistributionMetric
+from infra_libs.ts_mon.common.metrics import CumulativeMetric
+from infra_libs.ts_mon.common.metrics import FloatMetric
+from infra_libs.ts_mon.common.metrics import GaugeMetric
+from infra_libs.ts_mon.common.metrics import MetricsDataUnits
+from infra_libs.ts_mon.common.metrics import NonCumulativeDistributionMetric
+from infra_libs.ts_mon.common.metrics import StringMetric
+
+from infra_libs.ts_mon.common.targets import TaskTarget
+from infra_libs.ts_mon.common.targets import DeviceTarget
diff --git a/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/__init__.py b/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/__init__.py
new file mode 100644
index 0000000..50b23df
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/distribution.py b/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/distribution.py
new file mode 100644
index 0000000..99b0fb1
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/distribution.py
@@ -0,0 +1,139 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import bisect
+import collections
+
+
+class _Bucketer(object):
+  """Bucketing function for histograms recorded by the Distribution class."""
+
+  def __init__(self, width, growth_factor, num_finite_buckets, scale=1.0):
+    """The bucket sizes are controlled by width and growth_factor, and the total
+    number of buckets is set by num_finite_buckets:
+
+    Args:
+      width: fixed size of each bucket (ignores |scale|).
+      growth_factor: if non-zero, the size of each bucket increases by another
+          multiplicative factor of this factor (see lower bound formula below).
+      num_finite_buckets: the number of finite buckets.  There are two
+          additional buckets - an underflow and an overflow bucket - that have
+          lower and upper bounds of Infinity.
+      scale: overall scale factor to apply to buckets, if using geometric
+          buckets.
+
+    Specify a width for fixed-size buckets or specify a growth_factor for bucket
+    sizes that follow a geometric progression.  Specifying both is not valid.
+
+    For fixed-size buckets::
+
+      The i'th bucket covers the interval [(i-1) * width, i * width),  where i
+      ranges from 1 to num_finite_buckets, inclusive:
+
+      bucket number                   lower bound      upper bound
+      i == 0 (underflow)              -inf             0
+      1 <= i <= num_buckets           (i-1) * width    i * width
+      i == num_buckets+1 (overflow)   (i-1) * width    +inf
+
+    For geometric buckets::
+
+      The i'th bucket covers the interval [factor^(i-1), factor^i) * scale
+      where i ranges from 1 to num_finite_buckets inclusive.
+
+      bucket number                   lower bound            upper bound
+      i == 0 (underflow)              -inf                   scale
+      1 <= i <= num_buckets           factor^(i-1) * scale   factor^i * scale
+      i == num_buckets+1 (overflow)   factor^(i-1) * scale   +inf
+    """
+
+    if num_finite_buckets < 0:
+      raise ValueError('num_finite_buckets must be >= 0 (was %d)' %
+          num_finite_buckets)
+    if width != 0 and growth_factor != 0:
+      raise ValueError('a Bucketer must be created with either a width or a '
+                       'growth factor, not both')
+
+    self.width = width
+    self.growth_factor = growth_factor
+    self.num_finite_buckets = num_finite_buckets
+    self.total_buckets = num_finite_buckets + 2
+    self.underflow_bucket = 0
+    self.overflow_bucket = self.total_buckets - 1
+    self.scale = scale
+
+    if width != 0:
+      self._lower_bounds = [float('-Inf')] + self._linear_bounds()
+    else:
+      self._lower_bounds = [float('-Inf')] + self._exponential_bounds()
+
+    # Sanity check the bucket lower bounds we created.
+    assert len(self._lower_bounds) == self.total_buckets
+    assert all(x < y for x, y in zip(
+        self._lower_bounds, self._lower_bounds[1:])), (
+        'bucket boundaries must be monotonically increasing')
+
+  def __eq__(self, other):
+    return (type(self) is type(other) and
+            self.width == other.width and
+            self.growth_factor == other.growth_factor and
+            self.num_finite_buckets == other.num_finite_buckets and
+            self.scale == other.scale)
+
+  def _linear_bounds(self):
+    return [self.width * i for i in range(self.num_finite_buckets + 1)]
+
+  def _exponential_bounds(self):
+    return [
+        self.scale * self.growth_factor ** i
+        for i in range(self.num_finite_buckets + 1)]
+
+  def bucket_for_value(self, value):
+    """Returns the index of the bucket that this value belongs to."""
+
+    # bisect.bisect_left is wrong because the buckets are of [lower, upper) form
+    return bisect.bisect(self._lower_bounds, value) - 1
+
+  def bucket_boundaries(self, bucket):
+    """Returns a tuple that is the [lower, upper) bounds of this bucket.
+
+    The lower bound of the first bucket is -Infinity, and the upper bound of the
+    last bucket is +Infinity.
+    """
+
+    if bucket < 0 or bucket >= self.total_buckets:
+      raise IndexError('bucket %d out of range' % bucket)
+    if bucket == self.total_buckets - 1:
+      return (self._lower_bounds[bucket], float('Inf'))
+    return (self._lower_bounds[bucket], self._lower_bounds[bucket + 1])
+
+
+def FixedWidthBucketer(width, num_finite_buckets=100):
+  """Convenience function that returns a fixed width Bucketer."""
+  return _Bucketer(width=width, growth_factor=0.0,
+      num_finite_buckets=num_finite_buckets)
+
+
+def GeometricBucketer(growth_factor=10**0.2, num_finite_buckets=100,
+                      scale=1.0):
+  """Convenience function that returns a geometric progression Bucketer."""
+  return _Bucketer(width=0, growth_factor=growth_factor,
+      num_finite_buckets=num_finite_buckets, scale=scale)
+
+
+class Distribution(object):
+  """Holds a histogram distribution.
+
+  Buckets are chosen for values by the provided Bucketer.
+  """
+
+  def __init__(self, bucketer):
+    self.bucketer = bucketer
+    self.sum = 0
+    self.count = 0
+    self.buckets = collections.defaultdict(int)
+
+  def add(self, value):
+    self.buckets[self.bucketer.bucket_for_value(value)] += 1
+    self.sum += value
+    self.count += 1
diff --git a/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/errors.py b/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/errors.py
new file mode 100644
index 0000000..cc55933
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/errors.py
@@ -0,0 +1,137 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Classes representing errors that can be raised by the monitoring library."""
+
+
+class MonitoringError(Exception):
+  """Base class for exceptions raised by this module."""
+
+
+class MonitoringDecreasingValueError(MonitoringError):
+  """Raised when setting a metric value that should increase but doesn't."""
+
+  def __init__(self, metric, old_value, new_value):
+    self.metric = metric
+    self.old_value = old_value
+    self.new_value = new_value
+
+  def __str__(self):
+    return ('Monotonically increasing metric "%s" was given value "%s", which '
+            'is not greater than or equal to "%s".' % (
+                self.metric, self.new_value, self.old_value))
+
+
+class MonitoringDuplicateRegistrationError(MonitoringError):
+  """Raised when trying to register a metric with the same name as another."""
+
+  def __init__(self, metric):
+    self.metric = metric
+
+  def __str__(self):
+    return 'Different metrics with the same name "%s" were both registered.' % (
+        self.metric)
+
+
+class MonitoringIncrementUnsetValueError(MonitoringError):
+  """Raised when trying to increment a metric which hasn't been set."""
+
+  def __init__(self, metric):
+    self.metric = metric
+
+  def __str__(self):
+    return 'Metric "%s" was incremented without first setting a value.' % (
+        self.metric)
+
+
+class MonitoringInvalidValueTypeError(MonitoringError):
+  """Raised when sending a metric value is not a valid type."""
+
+  def __init__(self, metric, value):
+    self.metric = metric
+    self.value = value
+
+  def __str__(self):
+    return 'Metric "%s" was given invalid value "%s" (%s).' % (
+        self.metric, self.value, type(self.value))
+
+
+class MonitoringInvalidFieldTypeError(MonitoringError):
+  """Raised when sending a metric with a field value of an invalid type."""
+
+  def __init__(self, metric, field, value):
+    self.metric = metric
+    self.field = field
+    self.value = value
+
+  def __str__(self):
+    return 'Metric "%s" was given field "%s" with invalid value "%s" (%s).' % (
+        self.metric, self.field, self.value, type(self.value))
+
+
+class MonitoringTooManyFieldsError(MonitoringError):
+  """Raised when sending a metric with more than 7 fields."""
+
+  def __init__(self, metric, fields):
+    self.metric = metric
+    self.fields = fields
+
+  def __str__(self):
+    return 'Metric "%s" was given too many (%d > 7) fields: %s.' % (
+        self.metric, len(self.fields), self.fields)
+
+
+class MonitoringNoConfiguredMonitorError(MonitoringError):
+  """Raised when sending a metric without configuring the global Monitor."""
+
+  def __init__(self, metric):
+    self.metric = metric
+
+  def __str__(self):
+    if self.metric is not None:
+      return 'Metric "%s" was sent before initializing the global Monitor.' % (
+          self.metric)
+    else:
+      return 'Metrics were sent before initializing the global Monitor.'
+
+
+class MonitoringNoConfiguredTargetError(MonitoringError):
+  """Raised when sending a metric with no global nor local Target."""
+
+  def __init__(self, metric):
+    self.metric = metric
+
+  def __str__(self):
+    if self.metric is not None:
+      return 'Metric "%s" was sent with no Target configured.' % (self.metric)
+    else:
+      return 'Metrics were sent with no Target configured.'
+
+
+class MonitoringFailedToFlushAllMetricsError(MonitoringError):
+  """Raised when some error is encountered in flushing specific metrics."""
+
+  def __init__(self, error_count):
+    self.error_count = error_count
+
+  def __str__(self):
+    return ('Failed to flush %d metrics. See tracebacks above' %
+            (self.error_count))
+
+
+class MetricDefinitionError(MonitoringError):
+  """Raised when a metric was defined incorrectly."""
+
+
+class WrongFieldsError(MonitoringError):
+  """Raised when a metric is given different fields to its definition."""
+
+  def __init__(self, metric_name, got, expected):
+    self.metric_name = metric_name
+    self.got = got
+    self.expected = expected
+
+  def __str__(self):
+    return 'Metric "%s" is defined with %s fields but was given %s' % (
+        self.metric_name, self.expected, self.got)
diff --git a/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/helpers.py b/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/helpers.py
new file mode 100644
index 0000000..e54ccd9
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/helpers.py
@@ -0,0 +1,155 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper classes that make it easier to instrument code for monitoring."""
+
+
+from infra_libs.ts_mon.common import metrics
+
+import time
+
+
+class ScopedIncrementCounter(object):
+  """Increment a counter when the wrapped code exits.
+
+  The counter will be given a 'status' = 'success' or 'failure' label whose
+  value will be set to depending on whether the wrapped code threw an exception.
+
+  Example:
+
+    mycounter = Counter('foo/stuff_done')
+    with ScopedIncrementCounter(mycounter):
+      DoStuff()
+
+  To set a custom status label and status value:
+
+    mycounter = Counter('foo/http_requests')
+    with ScopedIncrementCounter(mycounter, 'response_code') as sc:
+      status = MakeHttpRequest()
+      sc.set_status(status)  # This custom status now won't be overwritten if
+                             # the code later raises an exception.
+  """
+
+  def __init__(self, counter, label='status', success_value='success',
+               failure_value='failure'):
+    self.counter = counter
+    self.label = label
+    self.success_value = success_value
+    self.failure_value = failure_value
+    self.status = None
+
+  def set_failure(self):
+    self.set_status(self.failure_value)
+
+  def set_status(self, status):
+    self.status = status
+
+  def __enter__(self):
+    self.status = None
+    return self
+
+  def __exit__(self, exc_type, exc_value, traceback):
+    if self.status is None:
+      if exc_type is None:
+        self.status = self.success_value
+      else:
+        self.status = self.failure_value
+    self.counter.increment({self.label: self.status})
+
+
+class ScopedMeasureTime(object):
+  """Report durations metric with status when the wrapped code exits.
+
+  The metric must be CumulativeDistributionMetric with a field to set status.
+  The status field will be set to 'success' or 'failure' depending on whether
+  the wrapped code threw an exception. The status field values can be customized
+  with constructor kwargs or by calling `set_status`.
+
+  A new instance of this class should be constructed each time it is used.
+
+  Example:
+
+    mymetric = CumulativeDistributionMetric(
+      'xxx/durations', 'duration of xxx op'
+      [StringField('status')],
+      bucketer=ts_mon.GeometricBucketer(10**0.04),
+      units=ts_mon.MetricsDataUnits.SECONDS)
+    with ScopedMeasureTime(mymetric):
+      DoStuff()
+
+  To set a custom label and status value:
+
+    mymetric = CumulativeDistributionMetric(
+      'xxx/durations', 'duration of xxx op'
+      [IntegerField('response_code')],
+      bucketer=ts_mon.GeometricBucketer(10**0.04),
+      units=ts_mon.MetricsDataUnits.MILLISECONDS)
+    with ScopedMeasureTime(mymetric, field='response_code') as sd:
+      sd.set_status(404)  # This custom status now won't be overwritten
+                          # even if exception is raised later.
+
+  To annotate the duration with some other fields, use extra_fields_values:
+
+    mymetric = CumulativeDistributionMetric(
+      'xxx/durations', 'duration of xxx op'
+      [StringField('status'),
+       StringField('type')],
+      bucketer=ts_mon.GeometricBucketer(10**0.04),
+      units=ts_mon.MetricsDataUnits.SECONDS)
+    with ScopedMeasureTime(mymetric, extra_fields_values={'type': 'normal'}):
+      DoStuff()
+  """
+
+  _UNITS_PER_SECOND = {
+      metrics.MetricsDataUnits.SECONDS: 1e0,
+      metrics.MetricsDataUnits.MILLISECONDS: 1e3,
+      metrics.MetricsDataUnits.MICROSECONDS: 1e6,
+      metrics.MetricsDataUnits.NANOSECONDS: 1e9,
+  }
+
+  def __init__(self, metric, field='status', success_value='success',
+               failure_value='failure', extra_fields_values=(),
+               time_fn=time.time):
+    assert isinstance(metric, metrics.CumulativeDistributionMetric)
+    assert sum(1 for spec in metric.field_spec if spec.name == field) == 1, (
+        'typo in field name `%s`?' % field)
+    assert metric.units in self._UNITS_PER_SECOND, (
+        'metric\'s units (%s) is not one of %s' %
+        (metric.units, self._UNITS_PER_SECOND.keys()))
+
+    self._metric = metric
+    self._field_values = dict(extra_fields_values)
+    assert field not in self._field_values
+    self._field_values[field] = None
+    self._field = field
+    self._units_per_second = self._UNITS_PER_SECOND[metric.units]
+    self._success_value = success_value
+    self._failure_value = failure_value
+    self._start_timestamp = None
+    self._time_fn = time_fn
+
+  def set_status(self, status):
+    assert self._start_timestamp is not None, (
+        'set_status must be called only inside with statement')
+    self._field_values[self._field] = status
+
+  def set_failure(self):
+    return self.set_status(self._failure_value)
+
+  def __enter__(self):
+    assert self._start_timestamp is None, ('re-use of ScopedMeasureTime '
+                                           'instances detected')
+    self._start_timestamp = self._time_fn()
+    return self
+
+  def __exit__(self, exc_type, exc_value, traceback):
+    elapsed_seconds = self._time_fn() - self._start_timestamp
+    if self._field_values[self._field] is None:
+      if exc_type is None:
+        self._field_values[self._field] = self._success_value
+      else:
+        self._field_values[self._field] = self._failure_value
+
+    self._metric.add(elapsed_seconds * self._units_per_second,
+                     self._field_values)
diff --git a/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/http_metrics.py b/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/http_metrics.py
new file mode 100644
index 0000000..9c806f5
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/http_metrics.py
@@ -0,0 +1,102 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from infra_libs.ts_mon.common import distribution
+from infra_libs.ts_mon.common import metrics
+
+
+# Extending HTTP status codes to client-side errors and timeouts.
+STATUS_OK = 200
+STATUS_ERROR = 901
+STATUS_TIMEOUT = 902
+STATUS_EXCEPTION = 909
+
+
+# 90% of durations are in the range 11-1873ms.  Growth factor 10^0.06 puts that
+# range into 37 buckets.  Max finite bucket value is 12 minutes.
+_duration_bucketer = distribution.GeometricBucketer(10**0.06)
+
+# 90% of sizes are in the range 0.17-217014 bytes.  Growth factor 10^0.1 puts
+# that range into 54 buckets.  Max finite bucket value is 6.3GB.
+_size_bucketer = distribution.GeometricBucketer(10**0.1)
+
+
+request_bytes = metrics.CumulativeDistributionMetric('http/request_bytes',
+    'Bytes sent per http request (body only).', [
+        metrics.StringField('name'),
+        metrics.StringField('client'),
+    ],
+    bucketer=_size_bucketer)
+response_bytes = metrics.CumulativeDistributionMetric('http/response_bytes',
+    'Bytes received per http request (content only).', [
+        metrics.StringField('name'),
+        metrics.StringField('client'),
+    ],
+    bucketer=_size_bucketer)
+durations = metrics.CumulativeDistributionMetric('http/durations',
+    'Time elapsed between sending a request and getting a'
+    ' response (including parsing) in milliseconds.', [
+        metrics.StringField('name'),
+        metrics.StringField('client'),
+    ],
+    bucketer=_duration_bucketer)
+response_status = metrics.CounterMetric('http/response_status',
+    'Number of responses received by HTTP status code.', [
+        metrics.IntegerField('status'),
+        metrics.StringField('name'),
+        metrics.StringField('client'),
+    ])
+
+
+server_request_bytes = metrics.CumulativeDistributionMetric(
+    'http/server_request_bytes',
+    'Bytes received per http request (body only).', [
+        metrics.IntegerField('status'),
+        metrics.StringField('name'),
+        metrics.BooleanField('is_robot'),
+    ],
+    bucketer=_size_bucketer)
+server_response_bytes = metrics.CumulativeDistributionMetric(
+    'http/server_response_bytes',
+    'Bytes sent per http request (content only).', [
+        metrics.IntegerField('status'),
+        metrics.StringField('name'),
+        metrics.BooleanField('is_robot'),
+    ],
+    bucketer=_size_bucketer)
+server_durations = metrics.CumulativeDistributionMetric('http/server_durations',
+    'Time elapsed between receiving a request and sending a'
+    ' response (including parsing) in milliseconds.', [
+        metrics.IntegerField('status'),
+        metrics.StringField('name'),
+        metrics.BooleanField('is_robot'),
+    ],
+    bucketer=_duration_bucketer)
+server_response_status = metrics.CounterMetric('http/server_response_status',
+    'Number of responses sent by HTTP status code.', [
+        metrics.IntegerField('status'),
+        metrics.StringField('name'),
+        metrics.BooleanField('is_robot'),
+    ])
+
+
+def update_http_server_metrics(endpoint_name, response_status_code, elapsed_ms,
+                               request_size=None, response_size=None,
+                               user_agent=None):
+  fields = {'status': response_status_code, 'name': endpoint_name,
+            'is_robot': False}
+  if user_agent is not None:
+    # We must not log user agents, but we can store whether or not the
+    # user agent string indicates that the requester was a Google bot.
+    fields['is_robot'] = (
+        'GoogleBot' in user_agent or
+        'GoogleSecurityScanner' in user_agent or
+        user_agent == 'B3M/prober')
+
+  server_durations.add(elapsed_ms, fields=fields)
+  server_response_status.increment(fields=fields)
+  if request_size is not None:
+    server_request_bytes.add(request_size, fields=fields)
+  if response_size is not None:
+    server_response_bytes.add(response_size, fields=fields)
diff --git a/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/interface.py b/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/interface.py
new file mode 100644
index 0000000..6aeb21c
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/interface.py
@@ -0,0 +1,300 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Classes representing the monitoring interface for tasks or devices.
+
+Usage:
+  import argparse
+  from infra_libs import ts_mon
+
+  p = argparse.ArgumentParser()
+  ts_mon.add_argparse_options(p)
+  args = p.parse_args()  # Must contain info for Monitor (and optionally Target)
+  ts_mon.process_argparse_options(args)
+
+  # Will use the default Target set up via command line args:
+  m = ts_mon.BooleanMetric('/my/metric/name', fields={'foo': 1, 'bar': 'baz'})
+  m.set(True)
+
+  # Use a custom Target:
+  t = ts_mon.TaskTarget('service', 'job', 'region', 'host')  # or DeviceTarget
+  m2 = ts_mon.GaugeMetric('/my/metric/name2', fields={'asdf': 'qwer'}, target=t)
+  m2.set(5)
+
+Library usage:
+  from infra_libs.ts_mon import CounterMetric
+  # No need to set up Monitor or Target, assume calling code did that.
+  c = CounterMetric('/my/counter', fields={'source': 'mylibrary'})
+  c.set(0)
+  for x in range(100):
+    c.increment()
+"""
+
+import datetime
+import logging
+import random
+import threading
+import time
+
+from infra_libs.ts_mon.common import errors
+from infra_libs.ts_mon.common import metric_store
+from infra_libs.ts_mon.protos import metrics_pb2
+
+# The maximum number of MetricsData messages to include in each HTTP request.
+# MetricsCollections larger than this will be split into multiple requests.
+METRICS_DATA_LENGTH_LIMIT = 500
+
+
+class State(object):
+  """Package-level state is stored here so that it is easily accessible.
+
+  Configuration is kept in this one object at the global level so that all
+  libraries in use by the same tool or service can all take advantage of the
+  same configuration.
+  """
+
+  def __init__(self, store_ctor=None, target=None):
+    """Optional arguments are for unit tests."""
+    if store_ctor is None:  # pragma: no branch
+      store_ctor = metric_store.InProcessMetricStore
+    # The Monitor object that will be used to send all metrics.
+    self.global_monitor = None
+    # The Target object that will be paired with all metrics that don't supply
+    # their own.
+    self.target = target
+    # The flush mode being used to control when metrics are pushed.
+    self.flush_mode = None
+    # A predicate to determine if metrics should be sent.
+    self.flush_enabled_fn = lambda: True
+    # The background thread that flushes metrics every
+    # --ts-mon-flush-interval-secs seconds.  May be None if
+    # --ts-mon-flush != 'auto' or --ts-mon-flush-interval-secs == 0.
+    self.flush_thread = None
+    # All metrics created by this application.
+    self.metrics = {}
+    # The MetricStore object that holds the actual metric values.
+    self.store = store_ctor(self)
+    # Cached time of the last flush. Useful mostly in AppEngine apps.
+    self.last_flushed = datetime.datetime.utcfromtimestamp(0)
+    # Metric name prefix
+    self.metric_name_prefix = '/chrome/infra/'
+    # Metrics registered with register_global_metrics.  Keyed by metric name.
+    self.global_metrics = {}
+    # Callbacks registered with register_global_metrics_callback.  Keyed by the
+    # arbitrary string provided by the user.  Called before each flush.
+    self.global_metrics_callbacks = {}
+    # Whether to call invoke_global_callbacks() on every flush().  Set to False
+    # on Appengine because it does its own thing.
+    self.invoke_global_callbacks_on_flush = True
+
+  def reset_for_unittest(self):
+    self.metrics = {}
+    self.global_metrics = {}
+    self.global_metrics_callbacks = {}
+    self.invoke_global_callbacks_on_flush = True
+    self.last_flushed = datetime.datetime.utcfromtimestamp(0)
+    self.store.reset_for_unittest()
+
+state = State()
+
+
+def flush():
+  """Send all metrics that are registered in the application."""
+  if not state.flush_enabled_fn():
+    logging.debug('ts_mon: sending metrics is disabled.')
+    return
+
+  if not state.global_monitor:
+    raise errors.MonitoringNoConfiguredMonitorError(None)
+  if not state.target:
+    raise errors.MonitoringNoConfiguredTargetError(None)
+
+  if state.invoke_global_callbacks_on_flush:
+    invoke_global_callbacks()
+
+  rpcs = []
+  for proto in _generate_proto():
+    rpcs.append(state.global_monitor.send(proto))
+  for rpc in rpcs:
+    if rpc is not None:
+      state.global_monitor.wait(rpc)
+  state.last_flushed = datetime.datetime.utcnow()
+
+
+def _generate_proto():
+  """Generate MetricsPayload for global_monitor.send()."""
+  proto = metrics_pb2.MetricsPayload()
+
+  # Key: Target, value: MetricsCollection.
+  collections = {}
+
+  # Key: (Target, metric name) tuple, value: MetricsDataSet.
+  data_sets = {}
+
+  count = 0
+  for (target, metric, start_time, end_time, fields_values
+       ) in state.store.get_all():
+    for fields, value in fields_values.items():
+      if count >= METRICS_DATA_LENGTH_LIMIT:
+        yield proto
+        proto = metrics_pb2.MetricsPayload()
+        collections.clear()
+        data_sets.clear()
+        count = 0
+
+      if target not in collections:
+        collections[target] = proto.metrics_collection.add()
+        target.populate_target_pb(collections[target])
+      collection = collections[target]
+
+      key = (target, metric.name)
+      new_data_set = None
+      if key not in data_sets:
+        new_data_set = metrics_pb2.MetricsDataSet()
+        metric.populate_data_set(new_data_set)
+
+      data = metrics_pb2.MetricsData()
+      metric.populate_data(data, start_time, end_time, fields, value)
+
+      # All required data protos have been successfully populated. Now we can
+      # insert them in serialized proto and bookeeping data structures.
+      if new_data_set is not None:
+        collection.metrics_data_set.add().CopyFrom(new_data_set)
+        data_sets[key] = collection.metrics_data_set[-1]
+      data_sets[key].data.add().CopyFrom(data)
+      count += 1
+
+  if count > 0:
+    yield proto
+
+
+def register(metric):
+  """Adds the metric to the list of metrics sent by flush().
+
+  This is called automatically by Metric's constructor.
+  """
+  # If someone is registering the same metric object twice, that's okay, but
+  # registering two different metric objects with the same metric name is not.
+  for m in state.metrics.values():
+    if metric == m:
+      state.metrics[metric.name] = metric
+      return
+  if metric.name in state.metrics:
+    raise errors.MonitoringDuplicateRegistrationError(metric.name)
+
+  state.metrics[metric.name] = metric
+
+
+def unregister(metric):
+  """Removes the metric from the list of metrics sent by flush()."""
+  del state.metrics[metric.name]
+
+
+def close():
+  """Stops any background threads and waits for them to exit."""
+  if state.flush_thread is not None:
+    state.flush_thread.stop()
+
+
+def reset_for_unittest(disable=False):
+  state.reset_for_unittest()
+  state.flush_enabled_fn = lambda: not disable
+
+
+def register_global_metrics(metrics):
+  """Declare metrics as global.
+
+  Outside Appengine this has no effect.
+
+  On Appengine, registering a metric as "global" simply means it will be reset
+  every time the metric is sent. This allows any instance to send such a metric
+  to a shared stream, e.g. by overriding target fields like task_num (instance
+  ID), host_name (version) or job_name (module name).
+
+  There is no "unregister". Multiple calls add up. It only needs to be called
+  once, similar to gae_ts_mon.initialize().
+
+  Args:
+    metrics (iterable): a collection of Metric objects.
+  """
+  state.global_metrics.update({m.name: m for m in metrics})
+
+
+def register_global_metrics_callback(name, callback):
+  """Register a named function to compute global metrics values.
+
+  There can only be one callback for a given name. Setting another callback with
+  the same name will override the previous one. To disable a callback, set its
+  function to None.
+
+  Args:
+    name (string): name of the callback.
+    callback (function): this function will be called without arguments every
+      minute.  On Appengine it is called once for the whole application from the
+      gae_ts_mon cron job. It is intended to set the values of the global
+      metrics.
+  """
+  if not callback:
+    if name in state.global_metrics_callbacks:
+      del state.global_metrics_callbacks[name]
+  else:
+    state.global_metrics_callbacks[name] = callback
+
+
+def invoke_global_callbacks():
+  for name, callback in state.global_metrics_callbacks.items():
+    logging.debug('Invoking callback %s', name)
+    try:
+      callback()
+    except Exception:
+      logging.exception('Monitoring global callback "%s" failed', name)
+
+
+class _FlushThread(threading.Thread):
+  """Background thread that flushes metrics on an interval."""
+
+  def __init__(self, interval_secs, stop_event=None):
+    super(_FlushThread, self).__init__(name='ts_mon')
+
+    if stop_event is None:
+      stop_event = threading.Event()
+
+    self.daemon = True
+    self.interval_secs = interval_secs
+    self.stop_event = stop_event
+
+  def _flush_and_log_exceptions(self):
+    try:
+      flush()
+    except Exception:
+      logging.exception('Automatic monitoring flush failed.')
+
+  def run(self):
+    # Jitter the first interval so tasks started at the same time (say, by cron)
+    # on different machines don't all send metrics simultaneously.
+    next_timeout = random.uniform(self.interval_secs / 2.0, self.interval_secs)
+
+    while True:
+      if self.stop_event.wait(next_timeout):
+        return
+
+      # Try to flush every N seconds exactly so rate calculations are more
+      # consistent.
+      start = time.time()
+      self._flush_and_log_exceptions()
+      flush_duration = time.time() - start
+      next_timeout = self.interval_secs - flush_duration
+
+      if next_timeout < 0:
+        logging.warning(
+            'Last monitoring flush took %f seconds (longer than '
+            '--ts-mon-flush-interval-secs = %f seconds)',
+            flush_duration, self.interval_secs)
+        next_timeout = 0
+
+  def stop(self):
+    """Stops the background thread and performs a final flush."""
+
+    self.stop_event.set()
+    self.join()
diff --git a/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/metric_store.py b/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/metric_store.py
new file mode 100644
index 0000000..b08b649
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/metric_store.py
@@ -0,0 +1,232 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import copy
+import itertools
+import threading
+import time
+
+from infra_libs.ts_mon.common import errors
+
+
+def default_modify_fn(name):
+  def _modify_fn(value, delta):
+    if delta < 0:
+      raise errors.MonitoringDecreasingValueError(name, None, delta)
+    return value + delta
+  return _modify_fn
+
+
+class MetricStore(object):
+  """A place to store values for each metric.
+
+  Several methods take "a normalized field tuple".  This is a tuple of
+  (key, value) tuples sorted by key.  (The reason this is given as a tuple
+  instead of a dict is because tuples are hashable and can be used as dict keys,
+  dicts can not).
+
+  The MetricStore is also responsible for keeping the start_time of each metric.
+  This is what goes into the start_timestamp_us field in the MetricsData proto
+  for cumulative metrics and distributions, and helps Monarch identify when a
+  counter was reset.  This is the MetricStore's job because an implementation
+  might share counter values across multiple instances of a task (like on
+  Appengine), so the start time must be associated with that value so that it
+  can be reset for all tasks at once when the value is reset.
+
+  External metric stores (like those backed by memcache) may be cleared (either
+  wholly or partially) at any time.  When this happens the MetricStore *must*
+  generate a new start_time for all the affected metrics.
+
+  Metrics can specify their own explicit start time if they are mirroring the
+  value of some external counter that started counting at a known time.
+
+  Otherwise the MetricStore's time_fn (defaults to time.time()) is called the
+  first time a metric is set or incremented, or after it is cleared externally.
+  """
+
+  def __init__(self, state, time_fn=None):
+    self._state = state
+    self._time_fn = time_fn or time.time
+
+  def get(self, name, fields, target_fields, default=None):
+    """Fetches the current value for the metric.
+
+    Args:
+      name (string): the metric's name.
+      fields (tuple): a normalized field tuple.
+      target_fields (dict or None): target fields to override.
+      default: the value to return if the metric has no value of this set of
+          field values.
+    """
+    raise NotImplementedError
+
+  def get_all(self):
+    """Returns an iterator over all the metrics present in the store.
+
+    The iterator yields 5-tuples:
+      (target, metric, start_time, end_time, field_values)
+    """
+    raise NotImplementedError
+
+  def set(self, name, fields, target_fields, value, enforce_ge=False):
+    """Sets the metric's value.
+
+    Args:
+      name: the metric's name.
+      fields: a normalized field tuple.
+      target_fields (dict or None): target fields to override.
+      value: the new value for the metric.
+      enforce_ge: if this is True, raise an exception if the new value is
+          less than the old value.
+
+    Raises:
+      MonitoringDecreasingValueError: if enforce_ge is True and the new value is
+          smaller than the old value.
+    """
+    raise NotImplementedError
+
+  def incr(self, name, fields, target_fields, delta, modify_fn=None):
+    """Increments the metric's value.
+
+    Args:
+      name: the metric's name.
+      fields: a normalized field tuple.
+      target_fields (dict or None): target fields to override.
+      delta: how much to increment the value by.
+      modify_fn: this function is called with the original value and the delta
+          as its arguments and is expected to return the new value.  The
+          function must be idempotent as it may be called multiple times.
+    """
+    raise NotImplementedError
+
+  def reset_for_unittest(self, name=None):
+    """Clears the values metrics.  Useful in unittests.
+
+    Args:
+      name: the name of an individual metric to reset, or if None resets all
+        metrics.
+    """
+    raise NotImplementedError
+
+  def _start_time(self, name):
+    if name in self._state.metrics:
+      ret = self._state.metrics[name].start_time
+      if ret is not None:
+        return ret
+
+    return self._time_fn()
+
+
+class _TargetFieldsValues(object):
+  """Holds all values for a single metric.
+
+  Values are keyed by metric fields and target fields (which override the
+  default target fields configured globally for the process).
+  """
+
+  def __init__(self, start_time):
+    self.start_time = start_time
+
+    # {normalized_target_fields: {normalized_metric_fields: value}}
+    self._values = collections.defaultdict(dict)
+
+  def _get_target_values(self, target_fields):
+    # Normalize the target fields by converting them into a hashable tuple.
+    if not target_fields:
+      target_fields = {}
+    key = tuple(sorted(target_fields.items()))
+
+    return self._values[key]
+
+  def get_value(self, fields, target_fields, default=None):
+    return self._get_target_values(target_fields).get(
+        fields, default)
+
+  def set_value(self, fields, target_fields, value):
+    self._get_target_values(target_fields)[fields] = value
+
+  def iter_targets(self, default_target):
+    for target_fields, fields_values in self._values.items():
+      if target_fields:
+        target = copy.copy(default_target)
+        target.update({k: v for k, v in target_fields})
+      else:
+        target = default_target
+      yield target, fields_values
+
+  def __deepcopy__(self, memo_dict):
+    ret = _TargetFieldsValues(self.start_time)
+    ret._values = copy.deepcopy(self._values, memo_dict)
+    return ret
+
+
+class InProcessMetricStore(MetricStore):
+  """A thread-safe metric store that keeps values in memory."""
+
+  def __init__(self, state, time_fn=None):
+    super(InProcessMetricStore, self).__init__(state, time_fn=time_fn)
+
+    self._values = {}
+    self._thread_lock = threading.Lock()
+
+  def _entry(self, name):
+    if name not in self._values:
+      self._reset(name)
+
+    return self._values[name]
+
+  def get(self, name, fields, target_fields, default=None):
+    return self._entry(name).get_value(fields, target_fields, default)
+
+  def iter_field_values(self, name):
+    return itertools.chain.from_iterable(
+        x.items() for _, x
+        in self._entry(name).iter_targets(self._state.target))
+
+  def get_all(self):
+    # Make a copy of the metric values in case another thread (or this
+    # generator's consumer) modifies them while we're iterating.
+    with self._thread_lock:
+      values = copy.deepcopy(self._values)
+    end_time = self._time_fn()
+
+    for name, metric_values in values.items():
+      if name not in self._state.metrics:
+        continue
+      start_time = metric_values.start_time
+      for target, fields_values in metric_values.iter_targets(
+          self._state.target):
+        yield (target, self._state.metrics[name], start_time, end_time,
+               fields_values)
+
+  def set(self, name, fields, target_fields, value, enforce_ge=False):
+    with self._thread_lock:
+      if enforce_ge:
+        old_value = self._entry(name).get_value(fields, target_fields, 0)
+        if value < old_value:
+          raise errors.MonitoringDecreasingValueError(name, old_value, value)
+
+      self._entry(name).set_value(fields, target_fields, value)
+
+  def incr(self, name, fields, target_fields, delta, modify_fn=None):
+    if delta < 0:
+      raise errors.MonitoringDecreasingValueError(name, None, delta)
+
+    if modify_fn is None:
+      modify_fn = default_modify_fn(name)
+
+    with self._thread_lock:
+      self._entry(name).set_value(fields, target_fields, modify_fn(
+          self.get(name, fields, target_fields, 0), delta))
+
+  def reset_for_unittest(self, name=None):
+    if name is not None:
+      self._reset(name)
+    else:
+      for name in self._values.keys():
+        self._reset(name)
+
+  def _reset(self, name):
+    self._values[name] = _TargetFieldsValues(self._start_time(name))
diff --git a/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/metrics.py b/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/metrics.py
new file mode 100644
index 0000000..00d0c63
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/metrics.py
@@ -0,0 +1,561 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Classes representing individual metrics that can be sent."""
+
+import re
+
+import six
+
+from infra_libs.ts_mon.protos import metrics_pb2
+
+from infra_libs.ts_mon.common import distribution
+from infra_libs.ts_mon.common import errors
+from infra_libs.ts_mon.common import interface
+
+
+MICROSECONDS_PER_SECOND = 1000000
+
+
+class Field(object):
+  FIELD_NAME_PATTERN = re.compile(r'[A-Za-z_][A-Za-z0-9_]*')
+
+  allowed_python_types = None
+  type_enum = None
+  field_name = None
+
+  def __init__(self, name):
+    if not self.FIELD_NAME_PATTERN.match(name):
+      raise errors.MetricDefinitionError(
+          'Invalid metric field name "%s" - must match the regex "%s"' % (
+                name, self.FIELD_NAME_PATTERN.pattern))
+
+    self.name = name
+
+  def __eq__(self, other):
+    return (type(self) == type(other) and
+            self.__dict__ == other.__dict__)
+
+  def validate_value(self, metric_name, value):
+    if not isinstance(value, self.allowed_python_types):
+      raise errors.MonitoringInvalidFieldTypeError(
+          metric_name, self.name, value)
+
+  def populate_proto(self, proto, value):
+    setattr(proto, self.field_name, value)
+
+
+class StringField(Field):
+  allowed_python_types = six.string_types
+  type_enum = metrics_pb2.MetricsDataSet.MetricFieldDescriptor.STRING
+  field_name = 'string_value'
+
+
+class IntegerField(Field):
+  allowed_python_types = six.integer_types
+  type_enum = metrics_pb2.MetricsDataSet.MetricFieldDescriptor.INT64
+  field_name = 'int64_value'
+
+
+class BooleanField(Field):
+  allowed_python_types = bool
+  type_enum = metrics_pb2.MetricsDataSet.MetricFieldDescriptor.BOOL
+  field_name = 'bool_value'
+
+
+class Metric(object):
+  """Abstract base class for a metric.
+
+  A Metric is an attribute that may be monitored across many targets. Examples
+  include disk usage or the number of requests a server has received. A single
+  process may keep track of many metrics.
+
+  Note that Metric objects may be initialized at any time (for example, at the
+  top of a library), but cannot be sent until the underlying Monitor object
+  has been set up (usually by the top-level process parsing the command line).
+
+  A Metric can actually store multiple values that are identified by a set of
+  fields (which are themselves key-value pairs).  Fields can be passed to the
+  set() or increment() methods to modify a particular value, or passed to the
+  constructor in which case they will be used as the defaults for this Metric.
+
+  The unit of measurement for Metric data should be specified with
+  MetricsDataUnits when a Metric object is created:
+  e.g., MetricsDataUnits.SECONDS, MetricsDataUnits.BYTES, and etc..,
+  See `MetricsDataUnits` class for a full list of units.
+
+  Do not directly instantiate an object of this class.
+  Use the concrete child classes instead:
+  * StringMetric for metrics with string value
+  * BooleanMetric for metrics with boolean values
+  * CounterMetric for metrics with monotonically increasing integer values
+  * GaugeMetric for metrics with arbitrarily varying integer values
+  * CumulativeMetric for metrics with monotonically increasing float values
+  * FloatMetric for metrics with arbitrarily varying float values
+
+  See http://go/inframon-doc for help designing and using your metrics.
+  """
+
+  def __init__(self, name, description, field_spec, units=None):
+    """Create an instance of a Metric.
+
+    Args:
+      name (str): the file-like name of this metric
+      description (string): help string for the metric. Should be enough to
+                            know what the metric is about.
+      field_spec (list): a list of Field subclasses to define the fields that
+                         are allowed on this metric.  Pass a list of either
+                         StringField, IntegerField or BooleanField here.
+      units (string): the unit used to measure data for given metric. Some
+                      common units are pre-defined in the MetricsDataUnits
+                      class.
+    """
+    field_spec = field_spec or []
+
+    self._name = name.lstrip('/')
+
+    if not isinstance(description, six.string_types):
+      raise errors.MetricDefinitionError('Metric description must be a string')
+    if not description:
+      raise errors.MetricDefinitionError('Metric must have a description')
+    if (not isinstance(field_spec, (list, tuple)) or
+        any(not isinstance(x, Field) for x in field_spec)):
+      raise errors.MetricDefinitionError(
+          'Metric constructor takes a list of Fields, or None')
+    if len(field_spec) > 7:
+      raise errors.MonitoringTooManyFieldsError(self._name, field_spec)
+
+    self._start_time = None
+    self._field_spec = field_spec
+    self._sorted_field_names = sorted(x.name for x in field_spec)
+    self._description = description
+    self._units = units
+
+    interface.register(self)
+
+  def __eq__(self, other):
+    return (type(self) == type(other)
+            and self.__dict__ == other.__dict__)
+
+  @property
+  def field_spec(self):
+    return list(self._field_spec)
+
+  @property
+  def name(self):
+    return self._name
+
+  @property
+  def start_time(self):
+    return self._start_time
+
+  @property
+  def units(self):
+    return self._units
+
+  def is_cumulative(self):
+    raise NotImplementedError()
+
+  def unregister(self):
+    interface.unregister(self)
+
+  def populate_data_set(self, data_set):
+    """Populate MetricsDataSet."""
+    data_set.metric_name = '%s%s' % (interface.state.metric_name_prefix,
+                                     self._name)
+    data_set.description = self._description or ''
+    if self._units is not None:
+      data_set.annotations.unit = self._units
+
+    if self.is_cumulative():
+      data_set.stream_kind = metrics_pb2.CUMULATIVE
+    else:
+      data_set.stream_kind = metrics_pb2.GAUGE
+
+    self._populate_value_type(data_set)
+    self._populate_field_descriptors(data_set)
+
+  def populate_data(self, data, start_time, end_time, fields, value):
+    """Populate a new metrics_pb2.MetricsData.
+
+    Args:
+      data (metrics_pb2.MetricsData): protocol buffer into
+        which to populate the current metric values.
+      start_time (int): timestamp in microseconds since UNIX epoch.
+    """
+    data.start_timestamp.seconds = int(start_time)
+    data.end_timestamp.seconds = int(end_time)
+
+    self._populate_fields(data, fields)
+    self._populate_value(data, value)
+
+  def _populate_field_descriptors(self, data_set):
+    """Populate `field_descriptor` in MetricsDataSet.
+
+    Args:
+      data_set (metrics_pb2.MetricsDataSet): a data set protobuf to populate
+    """
+    for spec in self._field_spec:
+      descriptor = data_set.field_descriptor.add()
+      descriptor.name = spec.name
+      descriptor.field_type = spec.type_enum
+
+  def _populate_fields(self, data, field_values):
+    """Fill in the fields attribute of a metric protocol buffer.
+
+    Args:
+      metric (metrics_pb2.MetricsData): a metrics protobuf to populate
+      field_values (tuple): field values
+    """
+    for spec, value in zip(self._field_spec, field_values):
+      field = data.field.add()
+      field.name = spec.name
+      spec.populate_proto(field, value)
+
+  def _validate_fields(self, fields):
+    """Checks the correct number and types of field values were provided.
+
+    Args:
+      fields (dict): A dict of field values given by the user, or None.
+
+    Returns:
+      fields' values as a tuple, in the same order as the field_spec.
+
+    Raises:
+      WrongFieldsError: if you provide a different number of fields to those
+        the metric was defined with.
+      MonitoringInvalidFieldTypeError: if the field value was the wrong type for
+        the field spec.
+    """
+    fields = fields or {}
+
+    if not isinstance(fields, dict):
+      raise ValueError('fields should be a dict, got %r (%s)' % (
+          fields, type(fields)))
+
+    if sorted(fields) != self._sorted_field_names:
+      raise errors.WrongFieldsError(
+          self.name, fields.keys(), self._sorted_field_names)
+
+    for spec in self._field_spec:
+      spec.validate_value(self.name, fields[spec.name])
+
+    return tuple(fields[spec.name] for spec in self._field_spec)
+
+  def _populate_value(self, data, value):
+    """Fill in the the data values of a metric protocol buffer.
+
+    Args:
+      data (metrics_pb2.MetricsData): a metrics protobuf to populate
+      value (see concrete class): the value of the metric to be set
+    """
+    raise NotImplementedError()
+
+  def _populate_value_type(self, data_set):
+    """Fill in the the data values of a metric protocol buffer.
+
+    Args:
+      data_set (metrics_pb2.MetricsDataSet): a MetricsDataSet protobuf to
+          populate
+    """
+    raise NotImplementedError()
+
+  def set(self, value, fields=None, target_fields=None):
+    """Set a new value for this metric. Results in sending a new value.
+
+    The subclass should do appropriate type checking on value and then call
+    self._set_and_send_value.
+
+    Args:
+      value (see concrete class): the value of the metric to be set
+      fields (dict): metric field values
+      target_fields (dict): overwrite some of the default target fields
+    """
+    raise NotImplementedError()
+
+  def get(self, fields=None, target_fields=None):
+    """Returns the current value for this metric.
+
+    Subclasses should never use this to get a value, modify it and set it again.
+    Instead use _incr with a modify_fn.
+    """
+    return interface.state.store.get(
+        self.name, self._validate_fields(fields), target_fields)
+
+  def get_all(self):
+    return interface.state.store.iter_field_values(self.name)
+
+  def reset(self):
+    """Clears the values of this metric.  Useful in unit tests.
+
+    It might be easier to call ts_mon.reset_for_unittest() in your setUp()
+    method instead of resetting every individual metric.
+    """
+
+    interface.state.store.reset_for_unittest(self.name)
+
+  def _set(self, fields, target_fields, value, enforce_ge=False):
+    interface.state.store.set(
+        self.name, self._validate_fields(fields), target_fields,
+        value, enforce_ge=enforce_ge)
+
+  def _incr(self, fields, target_fields, delta, modify_fn=None):
+    interface.state.store.incr(
+        self.name, self._validate_fields(fields), target_fields,
+        delta, modify_fn=modify_fn)
+
+
+class StringMetric(Metric):
+  """A metric whose value type is a string."""
+
+  def _populate_value(self, data, value):
+    data.string_value = value
+
+  def _populate_value_type(self, data_set):
+    data_set.value_type = metrics_pb2.STRING
+
+  def set(self, value, fields=None, target_fields=None):
+    if not isinstance(value, six.string_types):
+      raise errors.MonitoringInvalidValueTypeError(self._name, value)
+    self._set(fields, target_fields, value)
+
+  def is_cumulative(self):
+    return False
+
+
+class BooleanMetric(Metric):
+  """A metric whose value type is a boolean."""
+
+  def _populate_value(self, data, value):
+    data.bool_value = value
+
+  def _populate_value_type(self, data_set):
+    data_set.value_type = metrics_pb2.BOOL
+
+  def set(self, value, fields=None, target_fields=None):
+    if not isinstance(value, bool):
+      raise errors.MonitoringInvalidValueTypeError(self._name, value)
+    self._set(fields, target_fields, value)
+
+  def is_cumulative(self):
+    return False
+
+
+class NumericMetric(Metric):  # pylint: disable=abstract-method
+  """Abstract base class for numeric (int or float) metrics."""
+
+  def increment(self, fields=None, target_fields=None):
+    self._incr(fields, target_fields, 1)
+
+  def increment_by(self, step, fields=None, target_fields=None):
+    self._incr(fields, target_fields, step)
+
+
+class CounterMetric(NumericMetric):
+  """A metric whose value type is a monotonically increasing integer."""
+
+  def __init__(self, name, description, field_spec, start_time=None,
+               units=None):
+    self._start_time = start_time
+    super(CounterMetric, self).__init__(
+        name, description, field_spec, units=units)
+
+  def _populate_value(self, data, value):
+    data.int64_value = value
+
+  def _populate_value_type(self, data_set):
+    data_set.value_type = metrics_pb2.INT64
+
+  def set(self, value, fields=None, target_fields=None):
+    if not isinstance(value, six.integer_types):
+      raise errors.MonitoringInvalidValueTypeError(self._name, value)
+    self._set(fields, target_fields, value, enforce_ge=True)
+
+  def increment_by(self, step, fields=None, target_fields=None):
+    if not isinstance(step, six.integer_types):
+      raise errors.MonitoringInvalidValueTypeError(self._name, step)
+    self._incr(fields, target_fields, step)
+
+  def is_cumulative(self):
+    return True
+
+
+class GaugeMetric(NumericMetric):
+  """A metric whose value type is an integer."""
+
+  def _populate_value(self, data, value):
+    data.int64_value = value
+
+  def _populate_value_type(self, data_set):
+    data_set.value_type = metrics_pb2.INT64
+
+  def set(self, value, fields=None, target_fields=None):
+    if not isinstance(value, six.integer_types):
+      raise errors.MonitoringInvalidValueTypeError(self._name, value)
+    self._set(fields, target_fields, value)
+
+  def is_cumulative(self):
+    return False
+
+
+class CumulativeMetric(NumericMetric):
+  """A metric whose value type is a monotonically increasing float."""
+
+  def __init__(self, name, description, field_spec, start_time=None,
+               units=None):
+    self._start_time = start_time
+    super(CumulativeMetric, self).__init__(
+        name, description, field_spec, units=units)
+
+  def _populate_value(self, data, value):
+    data.double_value = value
+
+  def _populate_value_type(self, data_set):
+    data_set.value_type = metrics_pb2.DOUBLE
+
+  def set(self, value, fields=None, target_fields=None):
+    if not isinstance(value, (float, int)):
+      raise errors.MonitoringInvalidValueTypeError(self._name, value)
+    self._set(fields, target_fields, float(value), enforce_ge=True)
+
+  def is_cumulative(self):
+    return True
+
+
+class FloatMetric(NumericMetric):
+  """A metric whose value type is a float."""
+
+  def _populate_value(self, metric, value):
+    metric.double_value = value
+
+  def _populate_value_type(self, data_set_pb):
+    data_set_pb.value_type = metrics_pb2.DOUBLE
+
+  def set(self, value, fields=None, target_fields=None):
+    if not isinstance(value, (float, int)):
+      raise errors.MonitoringInvalidValueTypeError(self._name, value)
+    self._set(fields, target_fields, float(value))
+
+  def is_cumulative(self):
+    return False
+
+
+class _DistributionMetricBase(Metric):
+  """A metric that holds a distribution of values.
+
+  By default buckets are chosen from a geometric progression, each bucket being
+  approximately 1.59 times bigger than the last.  In practice this is suitable
+  for many kinds of data, but you may want to provide a FixedWidthBucketer or
+  GeometricBucketer with different parameters."""
+
+  def __init__(self, name, description, field_spec, is_cumulative=True,
+               bucketer=None, start_time=None, units=None):
+    self._start_time = start_time
+
+    if bucketer is None:
+      bucketer = distribution.GeometricBucketer()
+
+    self._is_cumulative = is_cumulative
+    self.bucketer = bucketer
+    super(_DistributionMetricBase, self).__init__(
+        name, description, field_spec, units=units)
+
+  def _populate_value(self, metric, value):
+    pb = metric.distribution_value
+
+    # Copy the bucketer params.
+    if value.bucketer.width == 0:
+      pb.exponential_buckets.growth_factor = value.bucketer.growth_factor
+      pb.exponential_buckets.scale = value.bucketer.scale
+      pb.exponential_buckets.num_finite_buckets = (
+          value.bucketer.num_finite_buckets)
+    else:
+      pb.linear_buckets.width = value.bucketer.width
+      pb.linear_buckets.offset = 0.0
+      pb.linear_buckets.num_finite_buckets = value.bucketer.num_finite_buckets
+
+    # Copy the distribution bucket values.  Include the overflow buckets on
+    # either end.
+    pb.bucket_count.extend(
+        value.buckets.get(i, 0) for i in
+        range(0, value.bucketer.total_buckets))
+
+    pb.count = value.count
+    pb.mean = float(value.sum) / max(value.count, 1)
+
+  def _populate_value_type(self, data_set_pb):
+    data_set_pb.value_type = metrics_pb2.DISTRIBUTION
+
+  def add(self, value, fields=None, target_fields=None):
+    def modify_fn(dist, value):
+      if dist == 0:
+        dist = distribution.Distribution(self.bucketer)
+      dist.add(value)
+      return dist
+
+    self._incr(fields, target_fields, value, modify_fn=modify_fn)
+
+  def set(self, value, fields=None, target_fields=None):
+    """Replaces the distribution with the given fields with another one.
+
+    This only makes sense on non-cumulative DistributionMetrics.
+
+    Args:
+      value: A infra_libs.ts_mon.Distribution.
+    """
+
+    if self._is_cumulative:
+      raise TypeError(
+          'Cannot set() a cumulative DistributionMetric (use add() instead)')
+
+    if not isinstance(value, distribution.Distribution):
+      raise errors.MonitoringInvalidValueTypeError(self._name, value)
+
+    self._set(fields, target_fields, value)
+
+  def is_cumulative(self):
+    return self._is_cumulative
+
+
+class CumulativeDistributionMetric(_DistributionMetricBase):
+  """A DistributionMetric with is_cumulative set to True."""
+
+  def __init__(self, name, description, field_spec, bucketer=None, units=None):
+    super(CumulativeDistributionMetric, self).__init__(
+        name, description, field_spec,
+        is_cumulative=True,
+        bucketer=bucketer,
+        units=units)
+
+
+class NonCumulativeDistributionMetric(_DistributionMetricBase):
+  """A DistributionMetric with is_cumulative set to False."""
+
+  def __init__(self, name, description, field_spec, bucketer=None, units=None):
+    super(NonCumulativeDistributionMetric, self).__init__(
+        name, description, field_spec,
+        is_cumulative=False,
+        bucketer=bucketer,
+        units=units)
+
+
+class MetricsDataUnits(object):
+  """An container for units of measurement for Metrics data."""
+
+  UNKNOWN_UNITS = '{unknown}'
+  SECONDS = 's'
+  MILLISECONDS = 'ms'
+  MICROSECONDS = 'us'
+  NANOSECONDS = 'ns'
+  BITS = 'B'
+  BYTES = 'By'
+  KILOBYTES = 'kBy'
+  MEGABYTES = 'MBy'
+  GIGABYTES = 'GBy'
+  KIBIBYTES = 'kiBy'
+  MEBIBYTES = 'MiBy'
+  GIBIBYTES = 'GiBy'
+  AMPS = 'A'
+  MILLIAMPS = 'mA'
+  DEGREES_CELSIUS = 'Cel'
diff --git a/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/monitors.py b/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/monitors.py
new file mode 100644
index 0000000..38a483e
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/monitors.py
@@ -0,0 +1,162 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Classes representing the monitoring interface for tasks or devices."""
+
+import json
+import logging
+import socket
+
+import httplib2
+
+from googleapiclient import errors
+from infra_libs import httplib2_utils
+from infra_libs.ts_mon.common import pb_to_popo
+try: # pragma: no cover
+  from oauth2client import gce
+except ImportError: # pragma: no cover
+  from oauth2client.contrib import gce
+from oauth2client.client import GoogleCredentials
+from oauth2client.file import Storage
+
+# Special string that can be passed through as the credentials path to use the
+# default Appengine or GCE service account.
+APPENGINE_CREDENTIALS = ':appengine'
+GCE_CREDENTIALS = ':gce'
+
+
+class CredentialFactory(object):
+  """Base class for things that can create OAuth2Credentials."""
+
+  @classmethod
+  def from_string(cls, path):
+    """Creates an appropriate subclass from a file path or magic string."""
+
+    if path == APPENGINE_CREDENTIALS:
+      return AppengineCredentials()
+    if path == GCE_CREDENTIALS:
+      return GCECredentials()
+    return FileCredentials(path)
+
+  def create(self, scopes):
+    raise NotImplementedError
+
+
+class GCECredentials(CredentialFactory):
+  def create(self, scopes):
+    return gce.AppAssertionCredentials(scopes)
+
+
+class AppengineCredentials(CredentialFactory):
+  def create(self, scopes):  # pragma: no cover
+    # This import doesn't work outside appengine, so delay it until it's used.
+    from oauth2client import appengine
+    return appengine.AppAssertionCredentials(scopes)
+
+
+class FileCredentials(CredentialFactory):
+  def __init__(self, path):
+    self.path = path
+
+  def create(self, scopes):
+    with open(self.path, 'r') as fh:
+      data = json.load(fh)
+    if data.get('type', None):
+      credentials = GoogleCredentials.from_stream(self.path)
+      credentials = credentials.create_scoped(scopes)
+      return credentials
+    return Storage(self.path).get()
+
+
+class DelegateServiceAccountCredentials(CredentialFactory):
+  IAM_SCOPE = 'https://www.googleapis.com/auth/iam'
+
+  def __init__(self, service_account_email, base):
+    self.base = base
+    self.service_account_email = service_account_email
+
+  def create(self, scopes):
+    logging.info('Delegating to service account %s', self.service_account_email)
+    http = httplib2_utils.InstrumentedHttp('actor-credentials')
+    http = self.base.create([self.IAM_SCOPE]).authorize(http)
+    return httplib2_utils.DelegateServiceAccountCredentials(
+        http, self.service_account_email, scopes)
+
+
+class Monitor(object):
+  """Abstract base class encapsulating the ability to collect and send metrics.
+
+  This is a singleton class. There should only be one instance of a Monitor at
+  a time. It will be created and initialized by process_argparse_options. It
+  must exist in order for any metrics to be sent, although both Targets and
+  Metrics may be initialized before the underlying Monitor. If it does not exist
+  at the time that a Metric is sent, an exception will be raised.
+
+  send() can be either synchronous or asynchronous.  If synchronous, it needs to
+  make the HTTP request, wait for a response and return None.
+  If asynchronous, send() should start the request and immediately return some
+  object which is later passed to wait() once all requests have been started.
+  """
+
+  _SCOPES = []
+
+  def send(self, metric_pb):
+    raise NotImplementedError()
+
+  def wait(self, state):  # pragma: no cover
+    pass
+
+
+class HttpsMonitor(Monitor):
+
+  _SCOPES = ['https://www.googleapis.com/auth/prodxmon']
+
+  def __init__(self, endpoint, credential_factory, http=None, ca_certs=None):
+    self._endpoint = endpoint
+    credentials = credential_factory.create(self._SCOPES)
+    if http is None:
+      http = httplib2_utils.RetriableHttp(
+          httplib2_utils.InstrumentedHttp('acq-mon-api', ca_certs=ca_certs))
+    self._http = credentials.authorize(http)
+
+  def encode_to_json(self, metric_pb):
+    return json.dumps({'payload': pb_to_popo.convert(metric_pb)})
+
+  def send(self, metric_pb):
+    body = self.encode_to_json(metric_pb)
+
+    try:
+      resp, content = self._http.request(self._endpoint,
+          method='POST',
+          body=body,
+          headers={'Content-Type': 'application/json'})
+      if resp.status != 200:
+        logging.warning('HttpsMonitor.send received status %d: %s', resp.status,
+                        content)
+    except (ValueError, errors.Error,
+            socket.timeout, socket.error, socket.herror, socket.gaierror,
+            httplib2.HttpLib2Error):
+      logging.exception('HttpsMonitor.send failed')
+
+
+class DebugMonitor(Monitor):
+  """Class which writes metrics to logs or a local file for debugging."""
+  def __init__(self, filepath=None):
+    if filepath is None:
+      self._fh = None
+    else:
+      self._fh = open(filepath, 'a')
+
+  def send(self, metric_pb):
+    text = str(metric_pb)
+    logging.info('Flushing monitoring metrics:\n%s', text)
+    if self._fh is not None:
+      self._fh.write(text + '\n\n')
+      self._fh.flush()
+
+
+class NullMonitor(Monitor):
+  """Class that doesn't send metrics anywhere."""
+  def send(self, metric_pb):
+    pass
diff --git a/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/pb_to_popo.py b/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/pb_to_popo.py
new file mode 100644
index 0000000..abec76e
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/pb_to_popo.py
@@ -0,0 +1,53 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import sys
+
+from google.protobuf.descriptor import FieldDescriptor as fd
+import six
+
+
+def convert(pb):
+  """Convert protobuf to plain-old-python-object"""
+  obj = {}
+  for field, value in pb.ListFields():
+    if field.label == fd.LABEL_REPEATED:
+      obj[field.name] = list(_get_json_func(field.type)(v) for v in value)
+    else:
+      obj[field.name] = _get_json_func(field.type)(value)
+  return obj
+
+
+def _get_json_func(field_type):
+  if field_type in _FD_TO_JSON:
+    return _FD_TO_JSON[field_type]
+  else: # pragma: no cover
+    logging.warning("pb_to_popo doesn't support converting %s", field_type)
+    return six.text_type
+
+
+if sys.version_info.major < 3:
+  _64bit_type = long
+else:
+  _64bit_type = int
+
+_FD_TO_JSON  = {
+  fd.TYPE_BOOL: bool,
+  fd.TYPE_DOUBLE: float,
+  fd.TYPE_ENUM: int,
+  fd.TYPE_FIXED32: float,
+  fd.TYPE_FIXED64: float,
+  fd.TYPE_FLOAT: float,
+  fd.TYPE_INT32: int,
+  fd.TYPE_INT64: _64bit_type,
+  fd.TYPE_SFIXED32: float,
+  fd.TYPE_SFIXED64: float,
+  fd.TYPE_SINT32: int,
+  fd.TYPE_SINT64: _64bit_type,
+  fd.TYPE_STRING: six.text_type,
+  fd.TYPE_UINT32: int,
+  fd.TYPE_UINT64: _64bit_type,
+  fd.TYPE_MESSAGE: convert
+}
diff --git a/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/standard_metrics.py b/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/standard_metrics.py
new file mode 100644
index 0000000..f237023
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/standard_metrics.py
@@ -0,0 +1,19 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Metrics common to all tasks and devices."""
+
+from infra_libs.ts_mon.common import metrics
+
+
+up = metrics.BooleanMetric(
+    'presence/up',
+    'Set to True when the program is running, missing otherwise.',
+    None)
+
+
+def init():
+  # TODO(dsansome): Add more metrics for git revision, cipd package version,
+  # uptime, etc.
+  up.set(True)
diff --git a/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/targets.py b/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/targets.py
new file mode 100644
index 0000000..be78aa5
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/ts_mon/common/targets.py
@@ -0,0 +1,125 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Classes representing the monitoring interface for tasks or devices."""
+
+
+class Target(object):
+  """Abstract base class for a monitoring target.
+
+  A Target is a "thing" that should be monitored, for example, a device or a
+  process. The majority of the time, a single process will have only a single
+  Target.
+
+  Do not directly instantiate an object of this class.
+  Use the concrete child classes instead:
+  * TaskTarget to monitor a job or tasks running in (potentially) many places;
+  * DeviceTarget to monitor a host machine that may be running a task.
+  """
+
+  def __init__(self):
+    # Subclasses should list the updatable target fields here.
+    self._fields = tuple()
+
+  def populate_target_pb(self, collection_pb):
+    """Populate the 'target' into a MetricsCollection."""
+    raise NotImplementedError()
+
+  def to_dict(self):
+    """Return target field values as a dictionary."""
+    return {field: getattr(self, field) for field in self._fields}
+
+  def update(self, target_fields):
+    """Update values of some target fields given as a dict."""
+    for field, value in target_fields.items():
+      if field not in self._fields:
+        raise AttributeError('Bad target field: %s' % field)
+      # Make sure the attribute actually exists in the object.
+      getattr(self, field)
+      setattr(self, field, value)
+
+  def __eq__(self, other):
+    if type(self) != type(other):
+      return False
+
+    for field in self._fields:
+      if getattr(self, field) != getattr(other,field):
+        return False
+
+    return True
+
+  def __hash__(self):
+    return hash(tuple(sorted(self.to_dict())))
+
+class DeviceTarget(Target):
+  """Monitoring interface class for monitoring specific hosts or devices."""
+
+  def __init__(self, region, role, network, hostname):
+    """Create a Target object exporting info about a specific device.
+
+    Args:
+      region (str): physical region in which the device is located.
+      role (str): role of the device.
+      network (str): virtual network on which the device is located.
+      hostname (str): name by which the device self-identifies.
+    """
+    super(DeviceTarget, self).__init__()
+    self.region = region
+    self.role = role
+    self.network = network
+    self.hostname = hostname
+    self.realm = 'ACQ_CHROME'
+    self.alertable = True
+    self._fields = ('region', 'role', 'network', 'hostname')
+
+  def populate_target_pb(self, collection):
+    """Populate the 'network_device' target into metrics_pb2.MetricsCollection.
+
+    Args:
+      collection (metrics_pb2.MetricsCollection): the collection proto to be
+          populated.
+    """
+    collection.network_device.metro = self.region
+    collection.network_device.role = self.role
+    collection.network_device.hostgroup = self.network
+    collection.network_device.hostname = self.hostname
+    collection.network_device.realm = self.realm
+    collection.network_device.alertable = self.alertable
+
+
+class TaskTarget(Target):
+  """Monitoring interface class for monitoring active jobs or processes."""
+
+  def __init__(self, service_name, job_name, region, hostname, task_num=0):
+    """Create a Target object exporting info about a specific task.
+
+    Args:
+      service_name (str): service of which this task is a part.
+      job_name (str): specific name of this task.
+      region (str): general region in which this task is running.
+      hostname (str): specific machine on which this task is running.
+      task_num (int): replication id of this task.
+    """
+    super(TaskTarget, self).__init__()
+    self.service_name = service_name
+    self.job_name = job_name
+    self.region = region
+    self.hostname = hostname
+    self.task_num = task_num
+    self._fields = ('service_name', 'job_name', 'region',
+                    'hostname', 'task_num')
+
+  def populate_target_pb(self, collection):
+    """Populate the 'task' target into metrics_pb2.MetricsCollection.
+
+    Args:
+      collection (metrics_pb2.MetricsCollection): the collection proto to be
+          populated.
+    """
+    collection.task.service_name = self.service_name
+    collection.task.job_name = self.job_name
+    collection.task.data_center = self.region
+    collection.task.host_name = self.hostname
+    collection.task.task_num = self.task_num
+
diff --git a/utils/frozen_chromite/third_party/infra_libs/ts_mon/config.proto b/utils/frozen_chromite/third_party/infra_libs/ts_mon/config.proto
new file mode 100644
index 0000000..e7c1338
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/ts_mon/config.proto
@@ -0,0 +1,13 @@
+syntax = "proto3";
+
+// ts_mon's config file in /etc/chrome-infra/ts-mon.json is a JSON-encoded
+// ConfigFile message.
+// Note: this .proto file isn't currently used to encode/decode the config file,
+// it's just here as a reference.
+message ConfigFile {
+  // Url to post monitoring metrics to.  file:// URLs are supported as well.
+  string endpoint = 1;
+
+  // Path to a pkcs8 json credential file.
+  string credentials = 2;
+}
diff --git a/utils/frozen_chromite/third_party/infra_libs/ts_mon/config.py b/utils/frozen_chromite/third_party/infra_libs/ts_mon/config.py
new file mode 100644
index 0000000..89fa8fe
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/ts_mon/config.py
@@ -0,0 +1,251 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import logging
+import os
+import socket
+import sys
+import re
+
+import requests
+
+from infra_libs.ts_mon.common import interface
+from infra_libs.ts_mon.common import monitors
+from infra_libs.ts_mon.common import standard_metrics
+from infra_libs.ts_mon.common import targets
+
+
+def load_machine_config(filename):
+  if not os.path.exists(filename):
+    logging.info('Configuration file does not exist, ignoring: %s', filename)
+    return {}
+
+  try:
+    with open(filename) as fh:
+      return json.load(fh)
+  except Exception:
+    logging.error('Configuration file couldn\'t be read: %s', filename)
+    raise
+
+
+def _default_region(fqdn):
+  # Check if we're running in a GCE instance.
+  try:
+    r = requests.get(
+        'http://metadata.google.internal/computeMetadata/v1/instance/zone',
+        headers={'Metadata-Flavor': 'Google'},
+        timeout=1.0)
+  except requests.exceptions.RequestException:
+    pass
+  else:
+    if r.status_code == requests.codes.ok:
+      # The zone is the last slash-separated component.
+      return r.text.split('/')[-1]
+
+  try:
+    return fqdn.split('.')[1]  # [chrome|golo]
+  except IndexError:
+    return ''
+
+
+def _default_network(host):
+  try:
+    # Regular expression that matches the vast majority of our host names.
+    # Matches everything of the form 'masterN', 'masterNa', and 'foo-xN'.
+    return re.match(r'^([\w-]*?-[acm]|master)(\d+)a?$', host).group(2)  # N
+  except AttributeError:
+    return ''
+
+
+def add_argparse_options(parser):
+  """Add monitoring related flags to a process' argument parser.
+
+  Args:
+    parser (argparse.ArgumentParser): the parser for the main process.
+  """
+  if sys.platform == 'win32':  # pragma: no cover
+    default_config_file = 'C:\\chrome-infra\\ts-mon.json'
+  else:  # pragma: no cover
+    default_config_file = '/etc/chrome-infra/ts-mon.json'
+
+  parser = parser.add_argument_group('Timeseries Monitoring Options')
+  parser.add_argument(
+      '--ts-mon-config-file',
+      default=default_config_file,
+      help='path to a JSON config file that contains suitable values for '
+           '"endpoint" and "credentials" for this machine. This config file is '
+           'intended to be shared by all processes on the machine, as the '
+           'values depend on the machine\'s position in the network, IP '
+           'whitelisting and deployment of credentials. (default: %(default)s)')
+  parser.add_argument(
+      '--ts-mon-endpoint',
+      help='url (file:// or https://) to post monitoring metrics to. If set, '
+           'overrides the value in --ts-mon-config-file')
+  parser.add_argument(
+      '--ts-mon-credentials',
+      help='path to a pkcs8 json credential file. If set, overrides the value '
+           'in --ts-mon-config-file')
+  parser.add_argument(
+      '--ts-mon-ca-certs',
+      help='path to file containing root CA certificates for SSL server '
+           'certificate validation. If not set, a CA cert file bundled with '
+           'httplib2 is used.')
+  parser.add_argument(
+      '--ts-mon-flush',
+      choices=('manual', 'auto'), default='auto',
+      help=('metric push behavior: manual (only send when flush() is called), '
+            'or auto (send automatically every --ts-mon-flush-interval-secs '
+            'seconds). (default: %(default)s)'))
+  parser.add_argument(
+      '--ts-mon-flush-interval-secs',
+      type=int,
+      default=60,
+      help=('automatically push metrics on this interval if '
+            '--ts-mon-flush=auto.'))
+  parser.add_argument(
+      '--ts-mon-autogen-hostname',
+      action="store_true",
+      help=('Indicate that the hostname is autogenerated. '
+            'This option must be set on autoscaled GCE VMs, Kubernetes pods, '
+            'or any other hosts with dynamically generated names.'))
+
+  parser.add_argument(
+      '--ts-mon-target-type',
+      choices=('device', 'task'),
+      default='device',
+      help='the type of target that is being monitored ("device" or "task").'
+           ' (default: %(default)s)')
+
+  fqdn = socket.getfqdn().lower()  # foo-[a|m]N.[chrome|golo].chromium.org
+  host = fqdn.split('.')[0]  # foo-[a|m]N
+  region = _default_region(fqdn)
+  network = _default_network(host)
+
+  parser.add_argument(
+      '--ts-mon-device-hostname',
+      default=host,
+      help='name of this device, (default: %(default)s)')
+  parser.add_argument(
+      '--ts-mon-device-region',
+      default=region,
+      help='name of the region this devices lives in. (default: %(default)s)')
+  parser.add_argument(
+      '--ts-mon-device-role',
+      default='default',
+      help='Role of the device. (default: %(default)s)')
+  parser.add_argument(
+      '--ts-mon-device-network',
+      default=network,
+      help='name of the network this device is connected to. '
+           '(default: %(default)s)')
+
+  parser.add_argument(
+      '--ts-mon-task-service-name',
+      help='name of the service being monitored')
+  parser.add_argument(
+      '--ts-mon-task-job-name',
+      help='name of this job instance of the task')
+  parser.add_argument(
+      '--ts-mon-task-region',
+      default=region,
+      help='name of the region in which this task is running '
+           '(default: %(default)s)')
+  parser.add_argument(
+      '--ts-mon-task-hostname',
+      default=host,
+      help='name of the host on which this task is running '
+           '(default: %(default)s)')
+  parser.add_argument(
+      '--ts-mon-task-number', type=int, default=0,
+      help='number (e.g. for replication) of this instance of this task '
+           '(default: %(default)s)')
+
+  parser.add_argument(
+      '--ts-mon-metric-name-prefix',
+      default='/chrome/infra/',
+      help='metric name prefix for all metrics (default: %(default)s)')
+
+  parser.add_argument(
+      '--ts-mon-use-new-proto',
+      default=True, action='store_true',
+      help='deprecated and ignored')
+
+
+def process_argparse_options(args):
+  """Process command line arguments to initialize the global monitor.
+
+  Also initializes the default target.
+
+  Starts a background thread to automatically flush monitoring metrics if not
+  disabled by command line arguments.
+
+  Args:
+    args (argparse.Namespace): the result of parsing the command line arguments
+  """
+  # Parse the config file if it exists.
+  config = load_machine_config(args.ts_mon_config_file)
+  endpoint = config.get('endpoint', '')
+  credentials = config.get('credentials', '')
+  autogen_hostname = config.get('autogen_hostname', False)
+
+  # Command-line args override the values in the config file.
+  if args.ts_mon_endpoint is not None:
+    endpoint = args.ts_mon_endpoint
+  if args.ts_mon_credentials is not None:
+    credentials = args.ts_mon_credentials
+
+  if args.ts_mon_target_type == 'device':
+    hostname = args.ts_mon_device_hostname
+    if args.ts_mon_autogen_hostname or autogen_hostname:
+      hostname = 'autogen:' + hostname
+    interface.state.target = targets.DeviceTarget(
+        args.ts_mon_device_region,
+        args.ts_mon_device_role,
+        args.ts_mon_device_network,
+        hostname)
+  if args.ts_mon_target_type == 'task':
+    # Reimplement ArgumentParser.error, since we don't have access to the parser
+    if not args.ts_mon_task_service_name:
+      print >> sys.stderr, ('Argument --ts-mon-task-service-name must be '
+                            'provided when the target type is "task".')
+      sys.exit(2)
+    if not args.ts_mon_task_job_name:
+      print >> sys.stderr, ('Argument --ts-mon-task-job-name must be provided '
+                            'when the target type is "task".')
+      sys.exit(2)
+    hostname = args.ts_mon_task_hostname
+    if args.ts_mon_autogen_hostname or autogen_hostname:
+      hostname = 'autogen:' + hostname
+    interface.state.target = targets.TaskTarget(
+        args.ts_mon_task_service_name,
+        args.ts_mon_task_job_name,
+        args.ts_mon_task_region,
+        hostname,
+        args.ts_mon_task_number)
+
+  interface.state.metric_name_prefix = args.ts_mon_metric_name_prefix
+  interface.state.global_monitor = monitors.NullMonitor()
+
+  if endpoint.startswith('file://'):
+    interface.state.global_monitor = monitors.DebugMonitor(
+        endpoint[len('file://'):])
+  elif endpoint.startswith('https://'):
+    interface.state.global_monitor = monitors.HttpsMonitor(
+        endpoint, monitors.CredentialFactory.from_string(credentials),
+        ca_certs=args.ts_mon_ca_certs)
+  elif endpoint.lower() == 'none' or not endpoint:
+    logging.info('ts_mon monitoring has been explicitly disabled')
+  else:
+    logging.error('ts_mon monitoring is disabled because the endpoint provided'
+                  ' is invalid or not supported: %s', endpoint)
+
+  interface.state.flush_mode = args.ts_mon_flush
+
+  if args.ts_mon_flush == 'auto':
+    interface.state.flush_thread = interface._FlushThread(
+        args.ts_mon_flush_interval_secs)
+    interface.state.flush_thread.start()
+
+  standard_metrics.init()
diff --git a/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/README.md b/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/README.md
new file mode 100644
index 0000000..c4ff163
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/README.md
@@ -0,0 +1,9 @@
+Updating the *.proto files: see go/updating-tsmon-protos
+
+To generate the `*_pb2.py` files from the `*proto` files:
+
+    cd infra_libs/ts_mon/protos/new
+    protoc --python_out=. *.proto
+
+protoc version tested: libprotoc 3.0.0
+
diff --git a/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/__init__.py b/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/__init__.py
new file mode 100644
index 0000000..1aaf0e1
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/__init__.py
@@ -0,0 +1,4 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/acquisition_network_device.proto b/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/acquisition_network_device.proto
new file mode 100644
index 0000000..73fc276
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/acquisition_network_device.proto
@@ -0,0 +1,22 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+syntax = "proto2";
+
+package ts_mon.proto;
+
+message NetworkDevice {
+  enum TypeId { MESSAGE_TYPE_ID = 34049749; };
+  optional string proxy_environment = 5;
+  optional string acquisition_name = 10;
+  optional string pop = 30;
+  optional bool alertable = 101;
+  optional string realm = 102;
+  optional int64 asn = 103;
+  optional string metro = 104;
+  optional string role = 105;
+  optional string hostname = 106;
+  optional string vendor = 70;
+  optional string hostgroup = 108;
+  optional string proxy_zone = 100;
+}
diff --git a/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/acquisition_network_device_pb2.py b/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/acquisition_network_device_pb2.py
new file mode 100644
index 0000000..b571a8a
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/acquisition_network_device_pb2.py
@@ -0,0 +1,164 @@
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: acquisition_network_device.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='acquisition_network_device.proto',
+  package='ts_mon.proto',
+  serialized_pb=_b('\n acquisition_network_device.proto\x12\x0cts_mon.proto\"\x88\x02\n\rNetworkDevice\x12\x19\n\x11proxy_environment\x18\x05 \x01(\t\x12\x18\n\x10\x61\x63quisition_name\x18\n \x01(\t\x12\x0b\n\x03pop\x18\x1e \x01(\t\x12\x11\n\talertable\x18\x65 \x01(\x08\x12\r\n\x05realm\x18\x66 \x01(\t\x12\x0b\n\x03\x61sn\x18g \x01(\x03\x12\r\n\x05metro\x18h \x01(\t\x12\x0c\n\x04role\x18i \x01(\t\x12\x10\n\x08hostname\x18j \x01(\t\x12\x0e\n\x06vendor\x18\x46 \x01(\t\x12\x11\n\thostgroup\x18l \x01(\t\x12\x12\n\nproxy_zone\x18\x64 \x01(\t\" \n\x06TypeId\x12\x16\n\x0fMESSAGE_TYPE_ID\x10\xd5\x9d\x9e\x10')
+)
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+
+_NETWORKDEVICE_TYPEID = _descriptor.EnumDescriptor(
+  name='TypeId',
+  full_name='ts_mon.proto.NetworkDevice.TypeId',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='MESSAGE_TYPE_ID', index=0, number=34049749,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=283,
+  serialized_end=315,
+)
+_sym_db.RegisterEnumDescriptor(_NETWORKDEVICE_TYPEID)
+
+
+_NETWORKDEVICE = _descriptor.Descriptor(
+  name='NetworkDevice',
+  full_name='ts_mon.proto.NetworkDevice',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='proxy_environment', full_name='ts_mon.proto.NetworkDevice.proxy_environment', index=0,
+      number=5, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='acquisition_name', full_name='ts_mon.proto.NetworkDevice.acquisition_name', index=1,
+      number=10, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='pop', full_name='ts_mon.proto.NetworkDevice.pop', index=2,
+      number=30, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='alertable', full_name='ts_mon.proto.NetworkDevice.alertable', index=3,
+      number=101, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='realm', full_name='ts_mon.proto.NetworkDevice.realm', index=4,
+      number=102, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='asn', full_name='ts_mon.proto.NetworkDevice.asn', index=5,
+      number=103, type=3, cpp_type=2, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='metro', full_name='ts_mon.proto.NetworkDevice.metro', index=6,
+      number=104, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='role', full_name='ts_mon.proto.NetworkDevice.role', index=7,
+      number=105, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='hostname', full_name='ts_mon.proto.NetworkDevice.hostname', index=8,
+      number=106, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='vendor', full_name='ts_mon.proto.NetworkDevice.vendor', index=9,
+      number=70, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='hostgroup', full_name='ts_mon.proto.NetworkDevice.hostgroup', index=10,
+      number=108, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='proxy_zone', full_name='ts_mon.proto.NetworkDevice.proxy_zone', index=11,
+      number=100, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+    _NETWORKDEVICE_TYPEID,
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=51,
+  serialized_end=315,
+)
+
+_NETWORKDEVICE_TYPEID.containing_type = _NETWORKDEVICE
+DESCRIPTOR.message_types_by_name['NetworkDevice'] = _NETWORKDEVICE
+
+NetworkDevice = _reflection.GeneratedProtocolMessageType('NetworkDevice', (_message.Message,), dict(
+  DESCRIPTOR = _NETWORKDEVICE,
+  __module__ = 'acquisition_network_device_pb2'
+  # @@protoc_insertion_point(class_scope:ts_mon.proto.NetworkDevice)
+  ))
+_sym_db.RegisterMessage(NetworkDevice)
+
+
+# @@protoc_insertion_point(module_scope)
diff --git a/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/acquisition_task.proto b/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/acquisition_task.proto
new file mode 100644
index 0000000..2306500
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/acquisition_task.proto
@@ -0,0 +1,18 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+syntax = "proto2";
+
+package ts_mon.proto;
+
+message Task {
+  enum TypeId { MESSAGE_TYPE_ID = 34049749; };
+  optional string proxy_environment = 5;
+  optional string acquisition_name = 10;
+  optional string service_name = 20;
+  optional string job_name = 30;
+  optional string data_center = 40;
+  optional string host_name = 50;
+  optional int32 task_num = 60;
+  optional string proxy_zone = 70;
+}
diff --git a/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/acquisition_task_pb2.py b/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/acquisition_task_pb2.py
new file mode 100644
index 0000000..9097fe9
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/acquisition_task_pb2.py
@@ -0,0 +1,136 @@
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: acquisition_task.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='acquisition_task.proto',
+  package='ts_mon.proto',
+  serialized_pb=_b('\n\x16\x61\x63quisition_task.proto\x12\x0cts_mon.proto\"\xd3\x01\n\x04Task\x12\x19\n\x11proxy_environment\x18\x05 \x01(\t\x12\x18\n\x10\x61\x63quisition_name\x18\n \x01(\t\x12\x14\n\x0cservice_name\x18\x14 \x01(\t\x12\x10\n\x08job_name\x18\x1e \x01(\t\x12\x13\n\x0b\x64\x61ta_center\x18( \x01(\t\x12\x11\n\thost_name\x18\x32 \x01(\t\x12\x10\n\x08task_num\x18< \x01(\x05\x12\x12\n\nproxy_zone\x18\x46 \x01(\t\" \n\x06TypeId\x12\x16\n\x0fMESSAGE_TYPE_ID\x10\xd5\x9d\x9e\x10')
+)
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+
+_TASK_TYPEID = _descriptor.EnumDescriptor(
+  name='TypeId',
+  full_name='ts_mon.proto.Task.TypeId',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='MESSAGE_TYPE_ID', index=0, number=34049749,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=220,
+  serialized_end=252,
+)
+_sym_db.RegisterEnumDescriptor(_TASK_TYPEID)
+
+
+_TASK = _descriptor.Descriptor(
+  name='Task',
+  full_name='ts_mon.proto.Task',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='proxy_environment', full_name='ts_mon.proto.Task.proxy_environment', index=0,
+      number=5, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='acquisition_name', full_name='ts_mon.proto.Task.acquisition_name', index=1,
+      number=10, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='service_name', full_name='ts_mon.proto.Task.service_name', index=2,
+      number=20, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='job_name', full_name='ts_mon.proto.Task.job_name', index=3,
+      number=30, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='data_center', full_name='ts_mon.proto.Task.data_center', index=4,
+      number=40, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='host_name', full_name='ts_mon.proto.Task.host_name', index=5,
+      number=50, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='task_num', full_name='ts_mon.proto.Task.task_num', index=6,
+      number=60, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='proxy_zone', full_name='ts_mon.proto.Task.proxy_zone', index=7,
+      number=70, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+    _TASK_TYPEID,
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=41,
+  serialized_end=252,
+)
+
+_TASK_TYPEID.containing_type = _TASK
+DESCRIPTOR.message_types_by_name['Task'] = _TASK
+
+Task = _reflection.GeneratedProtocolMessageType('Task', (_message.Message,), dict(
+  DESCRIPTOR = _TASK,
+  __module__ = 'acquisition_task_pb2'
+  # @@protoc_insertion_point(class_scope:ts_mon.proto.Task)
+  ))
+_sym_db.RegisterMessage(Task)
+
+
+# @@protoc_insertion_point(module_scope)
diff --git a/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/any.proto b/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/any.proto
new file mode 100644
index 0000000..b66ab09
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/any.proto
@@ -0,0 +1,11 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+syntax = "proto2";
+
+package ts_mon.proto;
+
+message Any {
+  optional string type_url = 1 [ctype=STRING_PIECE];
+  optional bytes value = 2 [ctype=CORD];
+}
diff --git a/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/any_pb2.py b/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/any_pb2.py
new file mode 100644
index 0000000..ca0eca7
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/any_pb2.py
@@ -0,0 +1,78 @@
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: any.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='any.proto',
+  package='ts_mon.proto',
+  serialized_pb=_b('\n\tany.proto\x12\x0cts_mon.proto\".\n\x03\x41ny\x12\x14\n\x08type_url\x18\x01 \x01(\tB\x02\x08\x02\x12\x11\n\x05value\x18\x02 \x01(\x0c\x42\x02\x08\x01')
+)
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+
+
+_ANY = _descriptor.Descriptor(
+  name='Any',
+  full_name='ts_mon.proto.Any',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='type_url', full_name='ts_mon.proto.Any.type_url', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))),
+    _descriptor.FieldDescriptor(
+      name='value', full_name='ts_mon.proto.Any.value', index=1,
+      number=2, type=12, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b(""),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=27,
+  serialized_end=73,
+)
+
+DESCRIPTOR.message_types_by_name['Any'] = _ANY
+
+Any = _reflection.GeneratedProtocolMessageType('Any', (_message.Message,), dict(
+  DESCRIPTOR = _ANY,
+  __module__ = 'any_pb2'
+  # @@protoc_insertion_point(class_scope:ts_mon.proto.Any)
+  ))
+_sym_db.RegisterMessage(Any)
+
+
+_ANY.fields_by_name['type_url'].has_options = True
+_ANY.fields_by_name['type_url']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))
+_ANY.fields_by_name['value'].has_options = True
+_ANY.fields_by_name['value']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))
+# @@protoc_insertion_point(module_scope)
diff --git a/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/metrics.proto b/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/metrics.proto
new file mode 100644
index 0000000..8d23d15
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/metrics.proto
@@ -0,0 +1,128 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+syntax = "proto2";
+
+package ts_mon.proto;
+
+import "any.proto";
+import "timestamp.proto";
+import "acquisition_network_device.proto";
+import "acquisition_task.proto";
+
+message MetricsPayload {
+  repeated MetricsCollection metrics_collection = 1;
+}
+
+message MetricsCollection {
+  repeated MetricsDataSet metrics_data_set = 1;
+  oneof target_schema {
+    NetworkDevice network_device = 11;
+    Task task = 12;
+  }
+}
+
+message MetricsDataSet {
+  optional string metric_name = 1;
+  repeated MetricFieldDescriptor field_descriptor = 2;
+  optional StreamKind stream_kind = 3;
+  optional ValueType value_type = 4;
+  optional string description = 5;
+  optional Annotations annotations = 6;
+  repeated MetricsData data = 7;
+  message MetricFieldDescriptor {
+    optional string name = 1;
+
+    optional FieldType field_type = 2;
+    enum FieldType {
+      STRING = 0;
+      INT64 = 1;
+      BOOL = 2;
+    }
+  }
+}
+
+message MetricsData {
+  oneof value {
+    bool bool_value = 1;
+    string string_value = 2;
+    int64 int64_value = 3;
+    double double_value = 4;
+    Distribution distribution_value = 5;
+  }
+
+  repeated MetricField field = 6;
+  message MetricField {
+    optional string name = 1;
+
+    oneof value {
+      string string_value = 2;
+      int64 int64_value = 3;
+      bool bool_value = 4;
+    }
+  }
+
+  optional Timestamp start_timestamp = 7;
+  optional Timestamp end_timestamp = 8;
+
+  message Distribution {
+    optional int64 count = 1;
+    optional double mean = 2;
+    optional double sum_of_squared_deviation = 3;
+    optional double minimum = 4;
+    optional double maximum = 5;
+
+    oneof bucket_options {
+      LinearOptions linear_buckets = 6;
+      ExponentialOptions exponential_buckets = 7;
+      ExplicitOptions explicit_buckets = 8;
+    }
+
+    message LinearOptions {
+      optional int32 num_finite_buckets = 1;
+      optional double width = 2;
+      optional double offset = 3;
+    }
+
+    message ExponentialOptions {
+      optional int32 num_finite_buckets = 1;
+      optional double growth_factor = 2;
+      optional double scale = 3;
+    }
+
+    message ExplicitOptions {
+      repeated double bound = 1 [packed = true];
+    }
+
+    repeated int64 bucket_count = 9 [packed = true];
+
+    repeated Exemplar exemplar = 10;
+
+    message Exemplar {
+      optional double value = 1;
+      optional Timestamp timestamp = 2;
+      repeated Any attachment = 3;
+    }
+  }
+}
+
+message Annotations {
+  optional string unit = 1;
+  optional bool timestamp = 2;
+  optional string deprecation = 3;
+  repeated Any annotation = 4;
+}
+
+enum StreamKind {
+  GAUGE = 0;
+  CUMULATIVE = 1;
+  DELTA = 2;
+}
+
+enum ValueType {
+  BOOL = 0;
+  STRING = 1;
+  INT64 = 2;
+  DOUBLE = 3;
+  DISTRIBUTION = 4;
+}
diff --git a/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/metrics_pb2.py b/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/metrics_pb2.py
new file mode 100644
index 0000000..39cfa46
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/metrics_pb2.py
@@ -0,0 +1,926 @@
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: metrics.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from . import any_pb2
+from . import timestamp_pb2
+from . import acquisition_network_device_pb2
+from . import acquisition_task_pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='metrics.proto',
+  package='ts_mon.proto',
+  serialized_pb=_b('\n\rmetrics.proto\x12\x0cts_mon.proto\x1a\tany.proto\x1a\x0ftimestamp.proto\x1a acquisition_network_device.proto\x1a\x16\x61\x63quisition_task.proto\"M\n\x0eMetricsPayload\x12;\n\x12metrics_collection\x18\x01 \x03(\x0b\x32\x1f.ts_mon.proto.MetricsCollection\"\xb7\x01\n\x11MetricsCollection\x12\x36\n\x10metrics_data_set\x18\x01 \x03(\x0b\x32\x1c.ts_mon.proto.MetricsDataSet\x12\x35\n\x0enetwork_device\x18\x0b \x01(\x0b\x32\x1b.ts_mon.proto.NetworkDeviceH\x00\x12\"\n\x04task\x18\x0c \x01(\x0b\x32\x12.ts_mon.proto.TaskH\x00\x42\x0f\n\rtarget_schema\"\xe5\x03\n\x0eMetricsDataSet\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t\x12L\n\x10\x66ield_descriptor\x18\x02 \x03(\x0b\x32\x32.ts_mon.proto.MetricsDataSet.MetricFieldDescriptor\x12-\n\x0bstream_kind\x18\x03 \x01(\x0e\x32\x18.ts_mon.proto.StreamKind\x12+\n\nvalue_type\x18\x04 \x01(\x0e\x32\x17.ts_mon.proto.ValueType\x12\x13\n\x0b\x64\x65scription\x18\x05 \x01(\t\x12.\n\x0b\x61nnotations\x18\x06 \x01(\x0b\x32\x19.ts_mon.proto.Annotations\x12\'\n\x04\x64\x61ta\x18\x07 \x03(\x0b\x32\x19.ts_mon.proto.MetricsData\x1a\xa5\x01\n\x15MetricFieldDescriptor\x12\x0c\n\x04name\x18\x01 \x01(\t\x12P\n\nfield_type\x18\x02 \x01(\x0e\x32<.ts_mon.proto.MetricsDataSet.MetricFieldDescriptor.FieldType\",\n\tFieldType\x12\n\n\x06STRING\x10\x00\x12\t\n\x05INT64\x10\x01\x12\x08\n\x04\x42OOL\x10\x02\"\xd3\t\n\x0bMetricsData\x12\x14\n\nbool_value\x18\x01 \x01(\x08H\x00\x12\x16\n\x0cstring_value\x18\x02 \x01(\tH\x00\x12\x15\n\x0bint64_value\x18\x03 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x04 \x01(\x01H\x00\x12\x44\n\x12\x64istribution_value\x18\x05 \x01(\x0b\x32&.ts_mon.proto.MetricsData.DistributionH\x00\x12\x34\n\x05\x66ield\x18\x06 \x03(\x0b\x32%.ts_mon.proto.MetricsData.MetricField\x12\x30\n\x0fstart_timestamp\x18\x07 \x01(\x0b\x32\x17.ts_mon.proto.Timestamp\x12.\n\rend_timestamp\x18\x08 \x01(\x0b\x32\x17.ts_mon.proto.Timestamp\x1ai\n\x0bMetricField\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x16\n\x0cstring_value\x18\x02 \x01(\tH\x00\x12\x15\n\x0bint64_value\x18\x03 \x01(\x03H\x00\x12\x14\n\nbool_value\x18\x04 \x01(\x08H\x00\x42\x07\n\x05value\x1a\x94\x06\n\x0c\x44istribution\x12\r\n\x05\x63ount\x18\x01 \x01(\x03\x12\x0c\n\x04mean\x18\x02 \x01(\x01\x12 \n\x18sum_of_squared_deviation\x18\x03 \x01(\x01\x12\x0f\n\x07minimum\x18\x04 \x01(\x01\x12\x0f\n\x07maximum\x18\x05 \x01(\x01\x12N\n\x0elinear_buckets\x18\x06 \x01(\x0b\x32\x34.ts_mon.proto.MetricsData.Distribution.LinearOptionsH\x00\x12X\n\x13\x65xponential_buckets\x18\x07 \x01(\x0b\x32\x39.ts_mon.proto.MetricsData.Distribution.ExponentialOptionsH\x00\x12R\n\x10\x65xplicit_buckets\x18\x08 \x01(\x0b\x32\x36.ts_mon.proto.MetricsData.Distribution.ExplicitOptionsH\x00\x12\x18\n\x0c\x62ucket_count\x18\t \x03(\x03\x42\x02\x10\x01\x12\x41\n\x08\x65xemplar\x18\n \x03(\x0b\x32/.ts_mon.proto.MetricsData.Distribution.Exemplar\x1aJ\n\rLinearOptions\x12\x1a\n\x12num_finite_buckets\x18\x01 \x01(\x05\x12\r\n\x05width\x18\x02 \x01(\x01\x12\x0e\n\x06offset\x18\x03 \x01(\x01\x1aV\n\x12\x45xponentialOptions\x12\x1a\n\x12num_finite_buckets\x18\x01 \x01(\x05\x12\x15\n\rgrowth_factor\x18\x02 \x01(\x01\x12\r\n\x05scale\x18\x03 \x01(\x01\x1a$\n\x0f\x45xplicitOptions\x12\x11\n\x05\x62ound\x18\x01 \x03(\x01\x42\x02\x10\x01\x1al\n\x08\x45xemplar\x12\r\n\x05value\x18\x01 \x01(\x01\x12*\n\ttimestamp\x18\x02 \x01(\x0b\x32\x17.ts_mon.proto.Timestamp\x12%\n\nattachment\x18\x03 \x03(\x0b\x32\x11.ts_mon.proto.AnyB\x10\n\x0e\x62ucket_optionsB\x07\n\x05value\"j\n\x0b\x41nnotations\x12\x0c\n\x04unit\x18\x01 \x01(\t\x12\x11\n\ttimestamp\x18\x02 \x01(\x08\x12\x13\n\x0b\x64\x65precation\x18\x03 \x01(\t\x12%\n\nannotation\x18\x04 \x03(\x0b\x32\x11.ts_mon.proto.Any*2\n\nStreamKind\x12\t\n\x05GAUGE\x10\x00\x12\x0e\n\nCUMULATIVE\x10\x01\x12\t\n\x05\x44\x45LTA\x10\x02*J\n\tValueType\x12\x08\n\x04\x42OOL\x10\x00\x12\n\n\x06STRING\x10\x01\x12\t\n\x05INT64\x10\x02\x12\n\n\x06\x44OUBLE\x10\x03\x12\x10\n\x0c\x44ISTRIBUTION\x10\x04')
+  ,
+  dependencies=[any_pb2.DESCRIPTOR,timestamp_pb2.DESCRIPTOR,acquisition_network_device_pb2.DESCRIPTOR,acquisition_task_pb2.DESCRIPTOR,])
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+_STREAMKIND = _descriptor.EnumDescriptor(
+  name='StreamKind',
+  full_name='ts_mon.proto.StreamKind',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='GAUGE', index=0, number=0,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='CUMULATIVE', index=1, number=1,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='DELTA', index=2, number=2,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=2216,
+  serialized_end=2266,
+)
+_sym_db.RegisterEnumDescriptor(_STREAMKIND)
+
+StreamKind = enum_type_wrapper.EnumTypeWrapper(_STREAMKIND)
+_VALUETYPE = _descriptor.EnumDescriptor(
+  name='ValueType',
+  full_name='ts_mon.proto.ValueType',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='BOOL', index=0, number=0,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='STRING', index=1, number=1,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='INT64', index=2, number=2,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='DOUBLE', index=3, number=3,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='DISTRIBUTION', index=4, number=4,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=2268,
+  serialized_end=2342,
+)
+_sym_db.RegisterEnumDescriptor(_VALUETYPE)
+
+ValueType = enum_type_wrapper.EnumTypeWrapper(_VALUETYPE)
+GAUGE = 0
+CUMULATIVE = 1
+DELTA = 2
+BOOL = 0
+STRING = 1
+INT64 = 2
+DOUBLE = 3
+DISTRIBUTION = 4
+
+
+_METRICSDATASET_METRICFIELDDESCRIPTOR_FIELDTYPE = _descriptor.EnumDescriptor(
+  name='FieldType',
+  full_name='ts_mon.proto.MetricsDataSet.MetricFieldDescriptor.FieldType',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='STRING', index=0, number=0,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='INT64', index=1, number=1,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='BOOL', index=2, number=2,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=824,
+  serialized_end=868,
+)
+_sym_db.RegisterEnumDescriptor(_METRICSDATASET_METRICFIELDDESCRIPTOR_FIELDTYPE)
+
+
+_METRICSPAYLOAD = _descriptor.Descriptor(
+  name='MetricsPayload',
+  full_name='ts_mon.proto.MetricsPayload',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='metrics_collection', full_name='ts_mon.proto.MetricsPayload.metrics_collection', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=117,
+  serialized_end=194,
+)
+
+
+_METRICSCOLLECTION = _descriptor.Descriptor(
+  name='MetricsCollection',
+  full_name='ts_mon.proto.MetricsCollection',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='metrics_data_set', full_name='ts_mon.proto.MetricsCollection.metrics_data_set', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='network_device', full_name='ts_mon.proto.MetricsCollection.network_device', index=1,
+      number=11, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='task', full_name='ts_mon.proto.MetricsCollection.task', index=2,
+      number=12, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+    _descriptor.OneofDescriptor(
+      name='target_schema', full_name='ts_mon.proto.MetricsCollection.target_schema',
+      index=0, containing_type=None, fields=[]),
+  ],
+  serialized_start=197,
+  serialized_end=380,
+)
+
+
+_METRICSDATASET_METRICFIELDDESCRIPTOR = _descriptor.Descriptor(
+  name='MetricFieldDescriptor',
+  full_name='ts_mon.proto.MetricsDataSet.MetricFieldDescriptor',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='ts_mon.proto.MetricsDataSet.MetricFieldDescriptor.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='field_type', full_name='ts_mon.proto.MetricsDataSet.MetricFieldDescriptor.field_type', index=1,
+      number=2, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+    _METRICSDATASET_METRICFIELDDESCRIPTOR_FIELDTYPE,
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=703,
+  serialized_end=868,
+)
+
+_METRICSDATASET = _descriptor.Descriptor(
+  name='MetricsDataSet',
+  full_name='ts_mon.proto.MetricsDataSet',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='metric_name', full_name='ts_mon.proto.MetricsDataSet.metric_name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='field_descriptor', full_name='ts_mon.proto.MetricsDataSet.field_descriptor', index=1,
+      number=2, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='stream_kind', full_name='ts_mon.proto.MetricsDataSet.stream_kind', index=2,
+      number=3, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='value_type', full_name='ts_mon.proto.MetricsDataSet.value_type', index=3,
+      number=4, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='description', full_name='ts_mon.proto.MetricsDataSet.description', index=4,
+      number=5, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='annotations', full_name='ts_mon.proto.MetricsDataSet.annotations', index=5,
+      number=6, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='data', full_name='ts_mon.proto.MetricsDataSet.data', index=6,
+      number=7, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[_METRICSDATASET_METRICFIELDDESCRIPTOR, ],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=383,
+  serialized_end=868,
+)
+
+
+_METRICSDATA_METRICFIELD = _descriptor.Descriptor(
+  name='MetricField',
+  full_name='ts_mon.proto.MetricsData.MetricField',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='ts_mon.proto.MetricsData.MetricField.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='string_value', full_name='ts_mon.proto.MetricsData.MetricField.string_value', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='int64_value', full_name='ts_mon.proto.MetricsData.MetricField.int64_value', index=2,
+      number=3, type=3, cpp_type=2, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='bool_value', full_name='ts_mon.proto.MetricsData.MetricField.bool_value', index=3,
+      number=4, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+    _descriptor.OneofDescriptor(
+      name='value', full_name='ts_mon.proto.MetricsData.MetricField.value',
+      index=0, containing_type=None, fields=[]),
+  ],
+  serialized_start=1201,
+  serialized_end=1306,
+)
+
+_METRICSDATA_DISTRIBUTION_LINEAROPTIONS = _descriptor.Descriptor(
+  name='LinearOptions',
+  full_name='ts_mon.proto.MetricsData.Distribution.LinearOptions',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='num_finite_buckets', full_name='ts_mon.proto.MetricsData.Distribution.LinearOptions.num_finite_buckets', index=0,
+      number=1, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='width', full_name='ts_mon.proto.MetricsData.Distribution.LinearOptions.width', index=1,
+      number=2, type=1, cpp_type=5, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='offset', full_name='ts_mon.proto.MetricsData.Distribution.LinearOptions.offset', index=2,
+      number=3, type=1, cpp_type=5, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1769,
+  serialized_end=1843,
+)
+
+_METRICSDATA_DISTRIBUTION_EXPONENTIALOPTIONS = _descriptor.Descriptor(
+  name='ExponentialOptions',
+  full_name='ts_mon.proto.MetricsData.Distribution.ExponentialOptions',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='num_finite_buckets', full_name='ts_mon.proto.MetricsData.Distribution.ExponentialOptions.num_finite_buckets', index=0,
+      number=1, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='growth_factor', full_name='ts_mon.proto.MetricsData.Distribution.ExponentialOptions.growth_factor', index=1,
+      number=2, type=1, cpp_type=5, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='scale', full_name='ts_mon.proto.MetricsData.Distribution.ExponentialOptions.scale', index=2,
+      number=3, type=1, cpp_type=5, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1845,
+  serialized_end=1931,
+)
+
+_METRICSDATA_DISTRIBUTION_EXPLICITOPTIONS = _descriptor.Descriptor(
+  name='ExplicitOptions',
+  full_name='ts_mon.proto.MetricsData.Distribution.ExplicitOptions',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='bound', full_name='ts_mon.proto.MetricsData.Distribution.ExplicitOptions.bound', index=0,
+      number=1, type=1, cpp_type=5, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1933,
+  serialized_end=1969,
+)
+
+_METRICSDATA_DISTRIBUTION_EXEMPLAR = _descriptor.Descriptor(
+  name='Exemplar',
+  full_name='ts_mon.proto.MetricsData.Distribution.Exemplar',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='value', full_name='ts_mon.proto.MetricsData.Distribution.Exemplar.value', index=0,
+      number=1, type=1, cpp_type=5, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='timestamp', full_name='ts_mon.proto.MetricsData.Distribution.Exemplar.timestamp', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='attachment', full_name='ts_mon.proto.MetricsData.Distribution.Exemplar.attachment', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1971,
+  serialized_end=2079,
+)
+
+_METRICSDATA_DISTRIBUTION = _descriptor.Descriptor(
+  name='Distribution',
+  full_name='ts_mon.proto.MetricsData.Distribution',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='count', full_name='ts_mon.proto.MetricsData.Distribution.count', index=0,
+      number=1, type=3, cpp_type=2, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='mean', full_name='ts_mon.proto.MetricsData.Distribution.mean', index=1,
+      number=2, type=1, cpp_type=5, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='sum_of_squared_deviation', full_name='ts_mon.proto.MetricsData.Distribution.sum_of_squared_deviation', index=2,
+      number=3, type=1, cpp_type=5, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='minimum', full_name='ts_mon.proto.MetricsData.Distribution.minimum', index=3,
+      number=4, type=1, cpp_type=5, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='maximum', full_name='ts_mon.proto.MetricsData.Distribution.maximum', index=4,
+      number=5, type=1, cpp_type=5, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='linear_buckets', full_name='ts_mon.proto.MetricsData.Distribution.linear_buckets', index=5,
+      number=6, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='exponential_buckets', full_name='ts_mon.proto.MetricsData.Distribution.exponential_buckets', index=6,
+      number=7, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='explicit_buckets', full_name='ts_mon.proto.MetricsData.Distribution.explicit_buckets', index=7,
+      number=8, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='bucket_count', full_name='ts_mon.proto.MetricsData.Distribution.bucket_count', index=8,
+      number=9, type=3, cpp_type=2, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
+    _descriptor.FieldDescriptor(
+      name='exemplar', full_name='ts_mon.proto.MetricsData.Distribution.exemplar', index=9,
+      number=10, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[_METRICSDATA_DISTRIBUTION_LINEAROPTIONS, _METRICSDATA_DISTRIBUTION_EXPONENTIALOPTIONS, _METRICSDATA_DISTRIBUTION_EXPLICITOPTIONS, _METRICSDATA_DISTRIBUTION_EXEMPLAR, ],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+    _descriptor.OneofDescriptor(
+      name='bucket_options', full_name='ts_mon.proto.MetricsData.Distribution.bucket_options',
+      index=0, containing_type=None, fields=[]),
+  ],
+  serialized_start=1309,
+  serialized_end=2097,
+)
+
+_METRICSDATA = _descriptor.Descriptor(
+  name='MetricsData',
+  full_name='ts_mon.proto.MetricsData',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='bool_value', full_name='ts_mon.proto.MetricsData.bool_value', index=0,
+      number=1, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='string_value', full_name='ts_mon.proto.MetricsData.string_value', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='int64_value', full_name='ts_mon.proto.MetricsData.int64_value', index=2,
+      number=3, type=3, cpp_type=2, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='double_value', full_name='ts_mon.proto.MetricsData.double_value', index=3,
+      number=4, type=1, cpp_type=5, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='distribution_value', full_name='ts_mon.proto.MetricsData.distribution_value', index=4,
+      number=5, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='field', full_name='ts_mon.proto.MetricsData.field', index=5,
+      number=6, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='start_timestamp', full_name='ts_mon.proto.MetricsData.start_timestamp', index=6,
+      number=7, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='end_timestamp', full_name='ts_mon.proto.MetricsData.end_timestamp', index=7,
+      number=8, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[_METRICSDATA_METRICFIELD, _METRICSDATA_DISTRIBUTION, ],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+    _descriptor.OneofDescriptor(
+      name='value', full_name='ts_mon.proto.MetricsData.value',
+      index=0, containing_type=None, fields=[]),
+  ],
+  serialized_start=871,
+  serialized_end=2106,
+)
+
+
+_ANNOTATIONS = _descriptor.Descriptor(
+  name='Annotations',
+  full_name='ts_mon.proto.Annotations',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='unit', full_name='ts_mon.proto.Annotations.unit', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='timestamp', full_name='ts_mon.proto.Annotations.timestamp', index=1,
+      number=2, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='deprecation', full_name='ts_mon.proto.Annotations.deprecation', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='annotation', full_name='ts_mon.proto.Annotations.annotation', index=3,
+      number=4, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2108,
+  serialized_end=2214,
+)
+
+_METRICSPAYLOAD.fields_by_name['metrics_collection'].message_type = _METRICSCOLLECTION
+_METRICSCOLLECTION.fields_by_name['metrics_data_set'].message_type = _METRICSDATASET
+_METRICSCOLLECTION.fields_by_name['network_device'].message_type = acquisition_network_device_pb2._NETWORKDEVICE
+_METRICSCOLLECTION.fields_by_name['task'].message_type = acquisition_task_pb2._TASK
+_METRICSCOLLECTION.oneofs_by_name['target_schema'].fields.append(
+  _METRICSCOLLECTION.fields_by_name['network_device'])
+_METRICSCOLLECTION.fields_by_name['network_device'].containing_oneof = _METRICSCOLLECTION.oneofs_by_name['target_schema']
+_METRICSCOLLECTION.oneofs_by_name['target_schema'].fields.append(
+  _METRICSCOLLECTION.fields_by_name['task'])
+_METRICSCOLLECTION.fields_by_name['task'].containing_oneof = _METRICSCOLLECTION.oneofs_by_name['target_schema']
+_METRICSDATASET_METRICFIELDDESCRIPTOR.fields_by_name['field_type'].enum_type = _METRICSDATASET_METRICFIELDDESCRIPTOR_FIELDTYPE
+_METRICSDATASET_METRICFIELDDESCRIPTOR.containing_type = _METRICSDATASET
+_METRICSDATASET_METRICFIELDDESCRIPTOR_FIELDTYPE.containing_type = _METRICSDATASET_METRICFIELDDESCRIPTOR
+_METRICSDATASET.fields_by_name['field_descriptor'].message_type = _METRICSDATASET_METRICFIELDDESCRIPTOR
+_METRICSDATASET.fields_by_name['stream_kind'].enum_type = _STREAMKIND
+_METRICSDATASET.fields_by_name['value_type'].enum_type = _VALUETYPE
+_METRICSDATASET.fields_by_name['annotations'].message_type = _ANNOTATIONS
+_METRICSDATASET.fields_by_name['data'].message_type = _METRICSDATA
+_METRICSDATA_METRICFIELD.containing_type = _METRICSDATA
+_METRICSDATA_METRICFIELD.oneofs_by_name['value'].fields.append(
+  _METRICSDATA_METRICFIELD.fields_by_name['string_value'])
+_METRICSDATA_METRICFIELD.fields_by_name['string_value'].containing_oneof = _METRICSDATA_METRICFIELD.oneofs_by_name['value']
+_METRICSDATA_METRICFIELD.oneofs_by_name['value'].fields.append(
+  _METRICSDATA_METRICFIELD.fields_by_name['int64_value'])
+_METRICSDATA_METRICFIELD.fields_by_name['int64_value'].containing_oneof = _METRICSDATA_METRICFIELD.oneofs_by_name['value']
+_METRICSDATA_METRICFIELD.oneofs_by_name['value'].fields.append(
+  _METRICSDATA_METRICFIELD.fields_by_name['bool_value'])
+_METRICSDATA_METRICFIELD.fields_by_name['bool_value'].containing_oneof = _METRICSDATA_METRICFIELD.oneofs_by_name['value']
+_METRICSDATA_DISTRIBUTION_LINEAROPTIONS.containing_type = _METRICSDATA_DISTRIBUTION
+_METRICSDATA_DISTRIBUTION_EXPONENTIALOPTIONS.containing_type = _METRICSDATA_DISTRIBUTION
+_METRICSDATA_DISTRIBUTION_EXPLICITOPTIONS.containing_type = _METRICSDATA_DISTRIBUTION
+_METRICSDATA_DISTRIBUTION_EXEMPLAR.fields_by_name['timestamp'].message_type = timestamp_pb2._TIMESTAMP
+_METRICSDATA_DISTRIBUTION_EXEMPLAR.fields_by_name['attachment'].message_type = any_pb2._ANY
+_METRICSDATA_DISTRIBUTION_EXEMPLAR.containing_type = _METRICSDATA_DISTRIBUTION
+_METRICSDATA_DISTRIBUTION.fields_by_name['linear_buckets'].message_type = _METRICSDATA_DISTRIBUTION_LINEAROPTIONS
+_METRICSDATA_DISTRIBUTION.fields_by_name['exponential_buckets'].message_type = _METRICSDATA_DISTRIBUTION_EXPONENTIALOPTIONS
+_METRICSDATA_DISTRIBUTION.fields_by_name['explicit_buckets'].message_type = _METRICSDATA_DISTRIBUTION_EXPLICITOPTIONS
+_METRICSDATA_DISTRIBUTION.fields_by_name['exemplar'].message_type = _METRICSDATA_DISTRIBUTION_EXEMPLAR
+_METRICSDATA_DISTRIBUTION.containing_type = _METRICSDATA
+_METRICSDATA_DISTRIBUTION.oneofs_by_name['bucket_options'].fields.append(
+  _METRICSDATA_DISTRIBUTION.fields_by_name['linear_buckets'])
+_METRICSDATA_DISTRIBUTION.fields_by_name['linear_buckets'].containing_oneof = _METRICSDATA_DISTRIBUTION.oneofs_by_name['bucket_options']
+_METRICSDATA_DISTRIBUTION.oneofs_by_name['bucket_options'].fields.append(
+  _METRICSDATA_DISTRIBUTION.fields_by_name['exponential_buckets'])
+_METRICSDATA_DISTRIBUTION.fields_by_name['exponential_buckets'].containing_oneof = _METRICSDATA_DISTRIBUTION.oneofs_by_name['bucket_options']
+_METRICSDATA_DISTRIBUTION.oneofs_by_name['bucket_options'].fields.append(
+  _METRICSDATA_DISTRIBUTION.fields_by_name['explicit_buckets'])
+_METRICSDATA_DISTRIBUTION.fields_by_name['explicit_buckets'].containing_oneof = _METRICSDATA_DISTRIBUTION.oneofs_by_name['bucket_options']
+_METRICSDATA.fields_by_name['distribution_value'].message_type = _METRICSDATA_DISTRIBUTION
+_METRICSDATA.fields_by_name['field'].message_type = _METRICSDATA_METRICFIELD
+_METRICSDATA.fields_by_name['start_timestamp'].message_type = timestamp_pb2._TIMESTAMP
+_METRICSDATA.fields_by_name['end_timestamp'].message_type = timestamp_pb2._TIMESTAMP
+_METRICSDATA.oneofs_by_name['value'].fields.append(
+  _METRICSDATA.fields_by_name['bool_value'])
+_METRICSDATA.fields_by_name['bool_value'].containing_oneof = _METRICSDATA.oneofs_by_name['value']
+_METRICSDATA.oneofs_by_name['value'].fields.append(
+  _METRICSDATA.fields_by_name['string_value'])
+_METRICSDATA.fields_by_name['string_value'].containing_oneof = _METRICSDATA.oneofs_by_name['value']
+_METRICSDATA.oneofs_by_name['value'].fields.append(
+  _METRICSDATA.fields_by_name['int64_value'])
+_METRICSDATA.fields_by_name['int64_value'].containing_oneof = _METRICSDATA.oneofs_by_name['value']
+_METRICSDATA.oneofs_by_name['value'].fields.append(
+  _METRICSDATA.fields_by_name['double_value'])
+_METRICSDATA.fields_by_name['double_value'].containing_oneof = _METRICSDATA.oneofs_by_name['value']
+_METRICSDATA.oneofs_by_name['value'].fields.append(
+  _METRICSDATA.fields_by_name['distribution_value'])
+_METRICSDATA.fields_by_name['distribution_value'].containing_oneof = _METRICSDATA.oneofs_by_name['value']
+_ANNOTATIONS.fields_by_name['annotation'].message_type = any_pb2._ANY
+DESCRIPTOR.message_types_by_name['MetricsPayload'] = _METRICSPAYLOAD
+DESCRIPTOR.message_types_by_name['MetricsCollection'] = _METRICSCOLLECTION
+DESCRIPTOR.message_types_by_name['MetricsDataSet'] = _METRICSDATASET
+DESCRIPTOR.message_types_by_name['MetricsData'] = _METRICSDATA
+DESCRIPTOR.message_types_by_name['Annotations'] = _ANNOTATIONS
+DESCRIPTOR.enum_types_by_name['StreamKind'] = _STREAMKIND
+DESCRIPTOR.enum_types_by_name['ValueType'] = _VALUETYPE
+
+MetricsPayload = _reflection.GeneratedProtocolMessageType('MetricsPayload', (_message.Message,), dict(
+  DESCRIPTOR = _METRICSPAYLOAD,
+  __module__ = 'metrics_pb2'
+  # @@protoc_insertion_point(class_scope:ts_mon.proto.MetricsPayload)
+  ))
+_sym_db.RegisterMessage(MetricsPayload)
+
+MetricsCollection = _reflection.GeneratedProtocolMessageType('MetricsCollection', (_message.Message,), dict(
+  DESCRIPTOR = _METRICSCOLLECTION,
+  __module__ = 'metrics_pb2'
+  # @@protoc_insertion_point(class_scope:ts_mon.proto.MetricsCollection)
+  ))
+_sym_db.RegisterMessage(MetricsCollection)
+
+MetricsDataSet = _reflection.GeneratedProtocolMessageType('MetricsDataSet', (_message.Message,), dict(
+
+  MetricFieldDescriptor = _reflection.GeneratedProtocolMessageType('MetricFieldDescriptor', (_message.Message,), dict(
+    DESCRIPTOR = _METRICSDATASET_METRICFIELDDESCRIPTOR,
+    __module__ = 'metrics_pb2'
+    # @@protoc_insertion_point(class_scope:ts_mon.proto.MetricsDataSet.MetricFieldDescriptor)
+    ))
+  ,
+  DESCRIPTOR = _METRICSDATASET,
+  __module__ = 'metrics_pb2'
+  # @@protoc_insertion_point(class_scope:ts_mon.proto.MetricsDataSet)
+  ))
+_sym_db.RegisterMessage(MetricsDataSet)
+_sym_db.RegisterMessage(MetricsDataSet.MetricFieldDescriptor)
+
+MetricsData = _reflection.GeneratedProtocolMessageType('MetricsData', (_message.Message,), dict(
+
+  MetricField = _reflection.GeneratedProtocolMessageType('MetricField', (_message.Message,), dict(
+    DESCRIPTOR = _METRICSDATA_METRICFIELD,
+    __module__ = 'metrics_pb2'
+    # @@protoc_insertion_point(class_scope:ts_mon.proto.MetricsData.MetricField)
+    ))
+  ,
+
+  Distribution = _reflection.GeneratedProtocolMessageType('Distribution', (_message.Message,), dict(
+
+    LinearOptions = _reflection.GeneratedProtocolMessageType('LinearOptions', (_message.Message,), dict(
+      DESCRIPTOR = _METRICSDATA_DISTRIBUTION_LINEAROPTIONS,
+      __module__ = 'metrics_pb2'
+      # @@protoc_insertion_point(class_scope:ts_mon.proto.MetricsData.Distribution.LinearOptions)
+      ))
+    ,
+
+    ExponentialOptions = _reflection.GeneratedProtocolMessageType('ExponentialOptions', (_message.Message,), dict(
+      DESCRIPTOR = _METRICSDATA_DISTRIBUTION_EXPONENTIALOPTIONS,
+      __module__ = 'metrics_pb2'
+      # @@protoc_insertion_point(class_scope:ts_mon.proto.MetricsData.Distribution.ExponentialOptions)
+      ))
+    ,
+
+    ExplicitOptions = _reflection.GeneratedProtocolMessageType('ExplicitOptions', (_message.Message,), dict(
+      DESCRIPTOR = _METRICSDATA_DISTRIBUTION_EXPLICITOPTIONS,
+      __module__ = 'metrics_pb2'
+      # @@protoc_insertion_point(class_scope:ts_mon.proto.MetricsData.Distribution.ExplicitOptions)
+      ))
+    ,
+
+    Exemplar = _reflection.GeneratedProtocolMessageType('Exemplar', (_message.Message,), dict(
+      DESCRIPTOR = _METRICSDATA_DISTRIBUTION_EXEMPLAR,
+      __module__ = 'metrics_pb2'
+      # @@protoc_insertion_point(class_scope:ts_mon.proto.MetricsData.Distribution.Exemplar)
+      ))
+    ,
+    DESCRIPTOR = _METRICSDATA_DISTRIBUTION,
+    __module__ = 'metrics_pb2'
+    # @@protoc_insertion_point(class_scope:ts_mon.proto.MetricsData.Distribution)
+    ))
+  ,
+  DESCRIPTOR = _METRICSDATA,
+  __module__ = 'metrics_pb2'
+  # @@protoc_insertion_point(class_scope:ts_mon.proto.MetricsData)
+  ))
+_sym_db.RegisterMessage(MetricsData)
+_sym_db.RegisterMessage(MetricsData.MetricField)
+_sym_db.RegisterMessage(MetricsData.Distribution)
+_sym_db.RegisterMessage(MetricsData.Distribution.LinearOptions)
+_sym_db.RegisterMessage(MetricsData.Distribution.ExponentialOptions)
+_sym_db.RegisterMessage(MetricsData.Distribution.ExplicitOptions)
+_sym_db.RegisterMessage(MetricsData.Distribution.Exemplar)
+
+Annotations = _reflection.GeneratedProtocolMessageType('Annotations', (_message.Message,), dict(
+  DESCRIPTOR = _ANNOTATIONS,
+  __module__ = 'metrics_pb2'
+  # @@protoc_insertion_point(class_scope:ts_mon.proto.Annotations)
+  ))
+_sym_db.RegisterMessage(Annotations)
+
+
+_METRICSDATA_DISTRIBUTION_EXPLICITOPTIONS.fields_by_name['bound'].has_options = True
+_METRICSDATA_DISTRIBUTION_EXPLICITOPTIONS.fields_by_name['bound']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
+_METRICSDATA_DISTRIBUTION.fields_by_name['bucket_count'].has_options = True
+_METRICSDATA_DISTRIBUTION.fields_by_name['bucket_count']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
+# @@protoc_insertion_point(module_scope)
diff --git a/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/timestamp.proto b/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/timestamp.proto
new file mode 100644
index 0000000..83ab8ec
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/timestamp.proto
@@ -0,0 +1,11 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+syntax = "proto2";
+
+package ts_mon.proto;
+
+message Timestamp {
+  optional int64 seconds = 1;
+  optional int32 nanos = 2;
+}
diff --git a/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/timestamp_pb2.py b/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/timestamp_pb2.py
new file mode 100644
index 0000000..10bb23d
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/ts_mon/protos/timestamp_pb2.py
@@ -0,0 +1,74 @@
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: timestamp.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='timestamp.proto',
+  package='ts_mon.proto',
+  serialized_pb=_b('\n\x0ftimestamp.proto\x12\x0cts_mon.proto\"+\n\tTimestamp\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05')
+)
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+
+
+_TIMESTAMP = _descriptor.Descriptor(
+  name='Timestamp',
+  full_name='ts_mon.proto.Timestamp',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='seconds', full_name='ts_mon.proto.Timestamp.seconds', index=0,
+      number=1, type=3, cpp_type=2, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='nanos', full_name='ts_mon.proto.Timestamp.nanos', index=1,
+      number=2, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=33,
+  serialized_end=76,
+)
+
+DESCRIPTOR.message_types_by_name['Timestamp'] = _TIMESTAMP
+
+Timestamp = _reflection.GeneratedProtocolMessageType('Timestamp', (_message.Message,), dict(
+  DESCRIPTOR = _TIMESTAMP,
+  __module__ = 'timestamp_pb2'
+  # @@protoc_insertion_point(class_scope:ts_mon.proto.Timestamp)
+  ))
+_sym_db.RegisterMessage(Timestamp)
+
+
+# @@protoc_insertion_point(module_scope)
diff --git a/utils/frozen_chromite/third_party/infra_libs/utils.py b/utils/frozen_chromite/third_party/infra_libs/utils.py
new file mode 100644
index 0000000..5e59872
--- /dev/null
+++ b/utils/frozen_chromite/third_party/infra_libs/utils.py
@@ -0,0 +1,49 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Miscellaneous utility functions."""
+
+import contextlib
+import shutil
+import sys
+import tempfile
+
+
+# We're trying to be compatible with Python3 tempfile.TemporaryDirectory
+# context manager here. And they used 'dir' as a keyword argument.
+# pylint: disable=redefined-builtin
+@contextlib.contextmanager
+def temporary_directory(suffix="", prefix="tmp", dir=None,
+                        keep_directory=False):
+  """Create and return a temporary directory.  This has the same
+  behavior as mkdtemp but can be used as a context manager.  For
+  example:
+
+    with temporary_directory() as tmpdir:
+      ...
+
+  Upon exiting the context, the directory and everything contained
+  in it are removed.
+
+  Args:
+    suffix, prefix, dir: same arguments as for tempfile.mkdtemp.
+    keep_directory (bool): if True, do not delete the temporary directory
+      when exiting. Useful for debugging.
+
+  Returns:
+    tempdir (str): full path to the temporary directory.
+  """
+  tempdir = None  # Handle mkdtemp raising an exception
+  try:
+    tempdir = tempfile.mkdtemp(suffix, prefix, dir)
+    yield tempdir
+
+  finally:
+    if tempdir and not keep_directory:  # pragma: no branch
+      try:
+        # TODO(pgervais,496347) Make this work reliably on Windows.
+        shutil.rmtree(tempdir, ignore_errors=True)
+      except OSError as ex:  # pragma: no cover
+        print >> sys.stderr, (
+          "ERROR: {!r} while cleaning up {!r}".format(ex, tempdir))
diff --git a/utils/frozen_chromite/third_party/oauth2client/LICENSE b/utils/frozen_chromite/third_party/oauth2client/LICENSE
new file mode 100644
index 0000000..b506d50
--- /dev/null
+++ b/utils/frozen_chromite/third_party/oauth2client/LICENSE
@@ -0,0 +1,22 @@
+ Copyright 2014 Google Inc.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+Dependent Modules
+=================
+
+This code has the following dependencies
+above and beyond the Python standard library:
+
+uritemplates - Apache License 2.0
+httplib2 - MIT License
diff --git a/utils/frozen_chromite/third_party/oauth2client/README.md b/utils/frozen_chromite/third_party/oauth2client/README.md
new file mode 100644
index 0000000..005aff5
--- /dev/null
+++ b/utils/frozen_chromite/third_party/oauth2client/README.md
@@ -0,0 +1,29 @@
+[![Build Status](https://travis-ci.org/google/oauth2client.svg?branch=master)](https://travis-ci.org/google/oauth2client)
+[![Coverage Status](https://coveralls.io/repos/google/oauth2client/badge.svg?branch=master&service=github)](https://coveralls.io/github/google/oauth2client?branch=master)
+[![Documentation Status](https://readthedocs.org/projects/oauth2client/badge/?version=latest)](http://oauth2client.readthedocs.org/)
+
+This is a client library for accessing resources protected by OAuth 2.0.
+
+Installation
+============
+
+To install, simply say
+
+```bash
+$ pip install --upgrade oauth2client
+```
+
+Contributing
+============
+
+Please see the [CONTRIBUTING page][1] for more information. In particular, we
+love pull requests -- but please make sure to sign the contributor license
+agreement.
+
+Supported Python Versions
+=========================
+
+We support Python 2.6, 2.7, 3.3+. More information [in the docs][2].
+
+[1]: https://github.com/google/oauth2client/blob/master/CONTRIBUTING.md
+[2]: http://oauth2client.readthedocs.org/#supported-python-versions
diff --git a/utils/frozen_chromite/third_party/oauth2client/README.swarming b/utils/frozen_chromite/third_party/oauth2client/README.swarming
new file mode 100644
index 0000000..9193d48
--- /dev/null
+++ b/utils/frozen_chromite/third_party/oauth2client/README.swarming
@@ -0,0 +1,14 @@
+Name: oauth2client
+Short Name: oauth2client
+URL: https://github.com/google/oauth2client/archive/v1.5.2.tar.gz
+Version: 1.5.2
+Revision: 73d9d55447de97dfe541395817a0c8241701f7d6
+License: Apache License, Version 2.0
+
+Description:
+The oauth2client is a client library for OAuth 2.0.
+
+Local Modifications:
+- Kept oauth2client/.
+- Removed: appengine.py devshell.py django_orm.py flask_util.py
+- Kept LICENSE and README.md.
diff --git a/utils/frozen_chromite/third_party/oauth2client/__init__.py b/utils/frozen_chromite/third_party/oauth2client/__init__.py
new file mode 100644
index 0000000..f7c36c1
--- /dev/null
+++ b/utils/frozen_chromite/third_party/oauth2client/__init__.py
@@ -0,0 +1,23 @@
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Client library for using OAuth2, especially with Google APIs."""
+
+__version__ = '1.5.2'
+
+GOOGLE_AUTH_URI = 'https://accounts.google.com/o/oauth2/auth'
+GOOGLE_DEVICE_URI = 'https://accounts.google.com/o/oauth2/device/code'
+GOOGLE_REVOKE_URI = 'https://accounts.google.com/o/oauth2/revoke'
+GOOGLE_TOKEN_URI = 'https://accounts.google.com/o/oauth2/token'
+GOOGLE_TOKEN_INFO_URI = 'https://www.googleapis.com/oauth2/v2/tokeninfo'
diff --git a/utils/frozen_chromite/third_party/oauth2client/_helpers.py b/utils/frozen_chromite/third_party/oauth2client/_helpers.py
new file mode 100644
index 0000000..39bfeb6
--- /dev/null
+++ b/utils/frozen_chromite/third_party/oauth2client/_helpers.py
@@ -0,0 +1,103 @@
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Helper functions for commonly used utilities."""
+
+import base64
+import json
+import six
+
+
+def _parse_pem_key(raw_key_input):
+    """Identify and extract PEM keys.
+
+    Determines whether the given key is in the format of PEM key, and extracts
+    the relevant part of the key if it is.
+
+    Args:
+        raw_key_input: The contents of a private key file (either PEM or
+                       PKCS12).
+
+    Returns:
+        string, The actual key if the contents are from a PEM file, or
+        else None.
+    """
+    offset = raw_key_input.find(b'-----BEGIN ')
+    if offset != -1:
+        return raw_key_input[offset:]
+
+
+def _json_encode(data):
+    return json.dumps(data, separators=(',', ':'))
+
+
+def _to_bytes(value, encoding='ascii'):
+    """Converts a string value to bytes, if necessary.
+
+    Unfortunately, ``six.b`` is insufficient for this task since in
+    Python2 it does not modify ``unicode`` objects.
+
+    Args:
+        value: The string/bytes value to be converted.
+        encoding: The encoding to use to convert unicode to bytes. Defaults
+                  to "ascii", which will not allow any characters from ordinals
+                  larger than 127. Other useful values are "latin-1", which
+                  which will only allows byte ordinals (up to 255) and "utf-8",
+                  which will encode any unicode that needs to be.
+
+    Returns:
+        The original value converted to bytes (if unicode) or as passed in
+        if it started out as bytes.
+
+    Raises:
+        ValueError if the value could not be converted to bytes.
+    """
+    result = (value.encode(encoding)
+              if isinstance(value, six.text_type) else value)
+    if isinstance(result, six.binary_type):
+        return result
+    else:
+        raise ValueError('%r could not be converted to bytes' % (value,))
+
+
+def _from_bytes(value):
+    """Converts bytes to a string value, if necessary.
+
+    Args:
+        value: The string/bytes value to be converted.
+
+    Returns:
+        The original value converted to unicode (if bytes) or as passed in
+        if it started out as unicode.
+
+    Raises:
+        ValueError if the value could not be converted to unicode.
+    """
+    result = (value.decode('utf-8')
+              if isinstance(value, six.binary_type) else value)
+    if isinstance(result, six.text_type):
+        return result
+    else:
+        raise ValueError('%r could not be converted to unicode' % (value,))
+
+
+def _urlsafe_b64encode(raw_bytes):
+    raw_bytes = _to_bytes(raw_bytes, encoding='utf-8')
+    return base64.urlsafe_b64encode(raw_bytes).rstrip(b'=')
+
+
+def _urlsafe_b64decode(b64string):
+    # Guard against unicode strings, which base64 can't handle.
+    b64string = _to_bytes(b64string)
+    padded = b64string + b'=' * (4 - len(b64string) % 4)
+    return base64.urlsafe_b64decode(padded)
diff --git a/utils/frozen_chromite/third_party/oauth2client/_openssl_crypt.py b/utils/frozen_chromite/third_party/oauth2client/_openssl_crypt.py
new file mode 100644
index 0000000..d024cf3
--- /dev/null
+++ b/utils/frozen_chromite/third_party/oauth2client/_openssl_crypt.py
@@ -0,0 +1,139 @@
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""OpenSSL Crypto-related routines for oauth2client."""
+
+import base64
+
+from OpenSSL import crypto
+
+from oauth2client._helpers import _parse_pem_key
+from oauth2client._helpers import _to_bytes
+
+
+class OpenSSLVerifier(object):
+    """Verifies the signature on a message."""
+
+    def __init__(self, pubkey):
+        """Constructor.
+
+        Args:
+            pubkey: OpenSSL.crypto.PKey, The public key to verify with.
+        """
+        self._pubkey = pubkey
+
+    def verify(self, message, signature):
+        """Verifies a message against a signature.
+
+        Args:
+        message: string or bytes, The message to verify. If string, will be
+                 encoded to bytes as utf-8.
+        signature: string or bytes, The signature on the message. If string,
+                   will be encoded to bytes as utf-8.
+
+        Returns:
+            True if message was signed by the private key associated with the
+            public key that this object was constructed with.
+        """
+        message = _to_bytes(message, encoding='utf-8')
+        signature = _to_bytes(signature, encoding='utf-8')
+        try:
+            crypto.verify(self._pubkey, signature, message, 'sha256')
+            return True
+        except crypto.Error:
+            return False
+
+    @staticmethod
+    def from_string(key_pem, is_x509_cert):
+        """Construct a Verified instance from a string.
+
+        Args:
+            key_pem: string, public key in PEM format.
+            is_x509_cert: bool, True if key_pem is an X509 cert, otherwise it
+                          is expected to be an RSA key in PEM format.
+
+        Returns:
+            Verifier instance.
+
+        Raises:
+            OpenSSL.crypto.Error: if the key_pem can't be parsed.
+        """
+        if is_x509_cert:
+            pubkey = crypto.load_certificate(crypto.FILETYPE_PEM, key_pem)
+        else:
+            pubkey = crypto.load_privatekey(crypto.FILETYPE_PEM, key_pem)
+        return OpenSSLVerifier(pubkey)
+
+
+class OpenSSLSigner(object):
+    """Signs messages with a private key."""
+
+    def __init__(self, pkey):
+        """Constructor.
+
+        Args:
+            pkey: OpenSSL.crypto.PKey (or equiv), The private key to sign with.
+        """
+        self._key = pkey
+
+    def sign(self, message):
+        """Signs a message.
+
+        Args:
+            message: bytes, Message to be signed.
+
+        Returns:
+            string, The signature of the message for the given key.
+        """
+        message = _to_bytes(message, encoding='utf-8')
+        return crypto.sign(self._key, message, 'sha256')
+
+    @staticmethod
+    def from_string(key, password=b'notasecret'):
+        """Construct a Signer instance from a string.
+
+        Args:
+            key: string, private key in PKCS12 or PEM format.
+            password: string, password for the private key file.
+
+        Returns:
+            Signer instance.
+
+        Raises:
+            OpenSSL.crypto.Error if the key can't be parsed.
+        """
+        parsed_pem_key = _parse_pem_key(key)
+        if parsed_pem_key:
+            pkey = crypto.load_privatekey(crypto.FILETYPE_PEM, parsed_pem_key)
+        else:
+            password = _to_bytes(password, encoding='utf-8')
+            pkey = crypto.load_pkcs12(key, password).get_privatekey()
+        return OpenSSLSigner(pkey)
+
+
+def pkcs12_key_as_pem(private_key_text, private_key_password):
+    """Convert the contents of a PKCS12 key to PEM using OpenSSL.
+
+    Args:
+        private_key_text: String. Private key.
+        private_key_password: String. Password for PKCS12.
+
+    Returns:
+        String. PEM contents of ``private_key_text``.
+    """
+    decoded_body = base64.b64decode(private_key_text)
+    private_key_password = _to_bytes(private_key_password)
+
+    pkcs12 = crypto.load_pkcs12(decoded_body, private_key_password)
+    return crypto.dump_privatekey(crypto.FILETYPE_PEM,
+                                  pkcs12.get_privatekey())
diff --git a/utils/frozen_chromite/third_party/oauth2client/_pycrypto_crypt.py b/utils/frozen_chromite/third_party/oauth2client/_pycrypto_crypt.py
new file mode 100644
index 0000000..7b277aa
--- /dev/null
+++ b/utils/frozen_chromite/third_party/oauth2client/_pycrypto_crypt.py
@@ -0,0 +1,128 @@
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""pyCrypto Crypto-related routines for oauth2client."""
+
+from Crypto.PublicKey import RSA
+from Crypto.Hash import SHA256
+from Crypto.Signature import PKCS1_v1_5
+from Crypto.Util.asn1 import DerSequence
+
+from oauth2client._helpers import _parse_pem_key
+from oauth2client._helpers import _to_bytes
+from oauth2client._helpers import _urlsafe_b64decode
+
+
+class PyCryptoVerifier(object):
+    """Verifies the signature on a message."""
+
+    def __init__(self, pubkey):
+        """Constructor.
+
+        Args:
+            pubkey: OpenSSL.crypto.PKey (or equiv), The public key to verify
+            with.
+        """
+        self._pubkey = pubkey
+
+    def verify(self, message, signature):
+        """Verifies a message against a signature.
+
+        Args:
+            message: string or bytes, The message to verify. If string, will be
+                     encoded to bytes as utf-8.
+            signature: string or bytes, The signature on the message.
+
+        Returns:
+            True if message was signed by the private key associated with the
+            public key that this object was constructed with.
+        """
+        message = _to_bytes(message, encoding='utf-8')
+        return PKCS1_v1_5.new(self._pubkey).verify(
+            SHA256.new(message), signature)
+
+    @staticmethod
+    def from_string(key_pem, is_x509_cert):
+        """Construct a Verified instance from a string.
+
+        Args:
+            key_pem: string, public key in PEM format.
+            is_x509_cert: bool, True if key_pem is an X509 cert, otherwise it
+                          is expected to be an RSA key in PEM format.
+
+        Returns:
+            Verifier instance.
+        """
+        if is_x509_cert:
+            key_pem = _to_bytes(key_pem)
+            pemLines = key_pem.replace(b' ', b'').split()
+            certDer = _urlsafe_b64decode(b''.join(pemLines[1:-1]))
+            certSeq = DerSequence()
+            certSeq.decode(certDer)
+            tbsSeq = DerSequence()
+            tbsSeq.decode(certSeq[0])
+            pubkey = RSA.importKey(tbsSeq[6])
+        else:
+            pubkey = RSA.importKey(key_pem)
+        return PyCryptoVerifier(pubkey)
+
+
+class PyCryptoSigner(object):
+    """Signs messages with a private key."""
+
+    def __init__(self, pkey):
+        """Constructor.
+
+        Args:
+            pkey, OpenSSL.crypto.PKey (or equiv), The private key to sign with.
+        """
+        self._key = pkey
+
+    def sign(self, message):
+        """Signs a message.
+
+        Args:
+            message: string, Message to be signed.
+
+        Returns:
+            string, The signature of the message for the given key.
+        """
+        message = _to_bytes(message, encoding='utf-8')
+        return PKCS1_v1_5.new(self._key).sign(SHA256.new(message))
+
+    @staticmethod
+    def from_string(key, password='notasecret'):
+        """Construct a Signer instance from a string.
+
+        Args:
+            key: string, private key in PEM format.
+            password: string, password for private key file. Unused for PEM
+                      files.
+
+        Returns:
+            Signer instance.
+
+        Raises:
+            NotImplementedError if the key isn't in PEM format.
+        """
+        parsed_pem_key = _parse_pem_key(key)
+        if parsed_pem_key:
+            pkey = RSA.importKey(parsed_pem_key)
+        else:
+            raise NotImplementedError(
+                'PKCS12 format is not supported by the PyCrypto library. '
+                'Try converting to a "PEM" '
+                '(openssl pkcs12 -in xxxxx.p12 -nodes -nocerts > '
+                'privatekey.pem) '
+                'or using PyOpenSSL if native code is an option.')
+        return PyCryptoSigner(pkey)
diff --git a/utils/frozen_chromite/third_party/oauth2client/client.py b/utils/frozen_chromite/third_party/oauth2client/client.py
new file mode 100644
index 0000000..0bfe004
--- /dev/null
+++ b/utils/frozen_chromite/third_party/oauth2client/client.py
@@ -0,0 +1,2243 @@
+# Copyright 2014 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""An OAuth 2.0 client.
+
+Tools for interacting with OAuth 2.0 protected resources.
+"""
+
+import base64
+import collections
+import copy
+import datetime
+import json
+import logging
+import os
+import socket
+import sys
+import tempfile
+import time
+import shutil
+import six
+from six.moves import urllib
+
+import httplib2
+from oauth2client import GOOGLE_AUTH_URI
+from oauth2client import GOOGLE_DEVICE_URI
+from oauth2client import GOOGLE_REVOKE_URI
+from oauth2client import GOOGLE_TOKEN_URI
+from oauth2client import GOOGLE_TOKEN_INFO_URI
+from oauth2client._helpers import _from_bytes
+from oauth2client._helpers import _to_bytes
+from oauth2client._helpers import _urlsafe_b64decode
+from oauth2client import clientsecrets
+from oauth2client import util
+
+
+__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+
+HAS_OPENSSL = False
+HAS_CRYPTO = False
+try:
+    from oauth2client import crypt
+    HAS_CRYPTO = True
+    if crypt.OpenSSLVerifier is not None:
+        HAS_OPENSSL = True
+except ImportError:
+    pass
+
+
+logger = logging.getLogger(__name__)
+
+# Expiry is stored in RFC3339 UTC format
+EXPIRY_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
+
+# Which certs to use to validate id_tokens received.
+ID_TOKEN_VERIFICATION_CERTS = 'https://www.googleapis.com/oauth2/v1/certs'
+# This symbol previously had a typo in the name; we keep the old name
+# around for now, but will remove it in the future.
+ID_TOKEN_VERIFICATON_CERTS = ID_TOKEN_VERIFICATION_CERTS
+
+# Constant to use for the out of band OAuth 2.0 flow.
+OOB_CALLBACK_URN = 'urn:ietf:wg:oauth:2.0:oob'
+
+# Google Data client libraries may need to set this to [401, 403].
+REFRESH_STATUS_CODES = [401]
+
+# The value representing user credentials.
+AUTHORIZED_USER = 'authorized_user'
+
+# The value representing service account credentials.
+SERVICE_ACCOUNT = 'service_account'
+
+# The environment variable pointing the file with local
+# Application Default Credentials.
+GOOGLE_APPLICATION_CREDENTIALS = 'GOOGLE_APPLICATION_CREDENTIALS'
+# The ~/.config subdirectory containing gcloud credentials. Intended
+# to be swapped out in tests.
+_CLOUDSDK_CONFIG_DIRECTORY = 'gcloud'
+# The environment variable name which can replace ~/.config if set.
+_CLOUDSDK_CONFIG_ENV_VAR = 'CLOUDSDK_CONFIG'
+
+# The error message we show users when we can't find the Application
+# Default Credentials.
+ADC_HELP_MSG = (
+    'The Application Default Credentials are not available. They are '
+    'available if running in Google Compute Engine. Otherwise, the '
+    'environment variable ' +
+    GOOGLE_APPLICATION_CREDENTIALS +
+    ' must be defined pointing to a file defining the credentials. See '
+    'https://developers.google.com/accounts/docs/'
+    'application-default-credentials for more information.')
+
+# The access token along with the seconds in which it expires.
+AccessTokenInfo = collections.namedtuple(
+    'AccessTokenInfo', ['access_token', 'expires_in'])
+
+DEFAULT_ENV_NAME = 'UNKNOWN'
+
+# If set to True _get_environment avoid GCE check (_detect_gce_environment)
+NO_GCE_CHECK = os.environ.setdefault('NO_GCE_CHECK', 'False')
+
+_SERVER_SOFTWARE = 'SERVER_SOFTWARE'
+_GCE_METADATA_HOST = '169.254.169.254'
+_METADATA_FLAVOR_HEADER = 'Metadata-Flavor'
+_DESIRED_METADATA_FLAVOR = 'Google'
+
+
+class SETTINGS(object):
+    """Settings namespace for globally defined values."""
+    env_name = None
+
+
+class Error(Exception):
+    """Base error for this module."""
+
+
+class FlowExchangeError(Error):
+    """Error trying to exchange an authorization grant for an access token."""
+
+
+class AccessTokenRefreshError(Error):
+    """Error trying to refresh an expired access token."""
+
+
+class HttpAccessTokenRefreshError(AccessTokenRefreshError):
+    """Error (with HTTP status) trying to refresh an expired access token."""
+    def __init__(self, *args, **kwargs):
+        super(HttpAccessTokenRefreshError, self).__init__(*args)
+        self.status = kwargs.get('status')
+
+
+class TokenRevokeError(Error):
+    """Error trying to revoke a token."""
+
+
+class UnknownClientSecretsFlowError(Error):
+    """The client secrets file called for an unknown type of OAuth 2.0 flow."""
+
+
+class AccessTokenCredentialsError(Error):
+    """Having only the access_token means no refresh is possible."""
+
+
+class VerifyJwtTokenError(Error):
+    """Could not retrieve certificates for validation."""
+
+
+class NonAsciiHeaderError(Error):
+    """Header names and values must be ASCII strings."""
+
+
+class ApplicationDefaultCredentialsError(Error):
+    """Error retrieving the Application Default Credentials."""
+
+
+class OAuth2DeviceCodeError(Error):
+    """Error trying to retrieve a device code."""
+
+
+class CryptoUnavailableError(Error, NotImplementedError):
+    """Raised when a crypto library is required, but none is available."""
+
+
+def _abstract():
+    raise NotImplementedError('You need to override this function')
+
+
+class MemoryCache(object):
+    """httplib2 Cache implementation which only caches locally."""
+
+    def __init__(self):
+        self.cache = {}
+
+    def get(self, key):
+        return self.cache.get(key)
+
+    def set(self, key, value):
+        self.cache[key] = value
+
+    def delete(self, key):
+        self.cache.pop(key, None)
+
+
+class Credentials(object):
+    """Base class for all Credentials objects.
+
+    Subclasses must define an authorize() method that applies the credentials
+    to an HTTP transport.
+
+    Subclasses must also specify a classmethod named 'from_json' that takes a
+    JSON string as input and returns an instantiated Credentials object.
+    """
+
+    NON_SERIALIZED_MEMBERS = ['store']
+
+    def authorize(self, http):
+        """Take an httplib2.Http instance (or equivalent) and authorizes it.
+
+        Authorizes it for the set of credentials, usually by replacing
+        http.request() with a method that adds in the appropriate headers and
+        then delegates to the original Http.request() method.
+
+        Args:
+            http: httplib2.Http, an http object to be used to make the refresh
+                  request.
+        """
+        _abstract()
+
+    def refresh(self, http):
+        """Forces a refresh of the access_token.
+
+        Args:
+            http: httplib2.Http, an http object to be used to make the refresh
+                  request.
+        """
+        _abstract()
+
+    def revoke(self, http):
+        """Revokes a refresh_token and makes the credentials void.
+
+        Args:
+            http: httplib2.Http, an http object to be used to make the revoke
+                  request.
+        """
+        _abstract()
+
+    def apply(self, headers):
+        """Add the authorization to the headers.
+
+        Args:
+            headers: dict, the headers to add the Authorization header to.
+        """
+        _abstract()
+
+    def _to_json(self, strip):
+        """Utility function that creates JSON repr. of a Credentials object.
+
+        Args:
+            strip: array, An array of names of members to not include in the
+                   JSON.
+
+        Returns:
+            string, a JSON representation of this instance, suitable to pass to
+            from_json().
+        """
+        t = type(self)
+        d = copy.copy(self.__dict__)
+        for member in strip:
+            if member in d:
+                del d[member]
+        if (d.get('token_expiry') and
+                isinstance(d['token_expiry'], datetime.datetime)):
+            d['token_expiry'] = d['token_expiry'].strftime(EXPIRY_FORMAT)
+        # Add in information we will need later to reconsistitue this instance.
+        d['_class'] = t.__name__
+        d['_module'] = t.__module__
+        for key, val in d.items():
+            if isinstance(val, bytes):
+                d[key] = val.decode('utf-8')
+            if isinstance(val, set):
+                d[key] = list(val)
+        return json.dumps(d)
+
+    def to_json(self):
+        """Creating a JSON representation of an instance of Credentials.
+
+        Returns:
+            string, a JSON representation of this instance, suitable to pass to
+            from_json().
+        """
+        return self._to_json(Credentials.NON_SERIALIZED_MEMBERS)
+
+    @classmethod
+    def new_from_json(cls, s):
+        """Utility class method to instantiate a Credentials subclass from JSON.
+
+        Expects the JSON string to have been produced by to_json().
+
+        Args:
+            s: string or bytes, JSON from to_json().
+
+        Returns:
+            An instance of the subclass of Credentials that was serialized with
+            to_json().
+        """
+        json_string_as_unicode = _from_bytes(s)
+        data = json.loads(json_string_as_unicode)
+        # Find and call the right classmethod from_json() to restore
+        # the object.
+        module_name = data['_module']
+        try:
+            module_obj = __import__(module_name)
+        except ImportError:
+            # In case there's an object from the old package structure,
+            # update it
+            module_name = module_name.replace('.googleapiclient', '')
+            module_obj = __import__(module_name)
+
+        module_obj = __import__(module_name,
+                                fromlist=module_name.split('.')[:-1])
+        kls = getattr(module_obj, data['_class'])
+        from_json = getattr(kls, 'from_json')
+        return from_json(json_string_as_unicode)
+
+    @classmethod
+    def from_json(cls, unused_data):
+        """Instantiate a Credentials object from a JSON description of it.
+
+        The JSON should have been produced by calling .to_json() on the object.
+
+        Args:
+            unused_data: dict, A deserialized JSON object.
+
+        Returns:
+            An instance of a Credentials subclass.
+        """
+        return Credentials()
+
+
+class Flow(object):
+    """Base class for all Flow objects."""
+    pass
+
+
+class Storage(object):
+    """Base class for all Storage objects.
+
+    Store and retrieve a single credential. This class supports locking
+    such that multiple processes and threads can operate on a single
+    store.
+    """
+
+    def acquire_lock(self):
+        """Acquires any lock necessary to access this Storage.
+
+        This lock is not reentrant.
+        """
+        pass
+
+    def release_lock(self):
+        """Release the Storage lock.
+
+        Trying to release a lock that isn't held will result in a
+        RuntimeError.
+        """
+        pass
+
+    def locked_get(self):
+        """Retrieve credential.
+
+        The Storage lock must be held when this is called.
+
+        Returns:
+            oauth2client.client.Credentials
+        """
+        _abstract()
+
+    def locked_put(self, credentials):
+        """Write a credential.
+
+        The Storage lock must be held when this is called.
+
+        Args:
+            credentials: Credentials, the credentials to store.
+        """
+        _abstract()
+
+    def locked_delete(self):
+        """Delete a credential.
+
+        The Storage lock must be held when this is called.
+        """
+        _abstract()
+
+    def get(self):
+        """Retrieve credential.
+
+        The Storage lock must *not* be held when this is called.
+
+        Returns:
+            oauth2client.client.Credentials
+        """
+        self.acquire_lock()
+        try:
+            return self.locked_get()
+        finally:
+            self.release_lock()
+
+    def put(self, credentials):
+        """Write a credential.
+
+        The Storage lock must be held when this is called.
+
+        Args:
+            credentials: Credentials, the credentials to store.
+        """
+        self.acquire_lock()
+        try:
+            self.locked_put(credentials)
+        finally:
+            self.release_lock()
+
+    def delete(self):
+        """Delete credential.
+
+        Frees any resources associated with storing the credential.
+        The Storage lock must *not* be held when this is called.
+
+        Returns:
+            None
+        """
+        self.acquire_lock()
+        try:
+            return self.locked_delete()
+        finally:
+            self.release_lock()
+
+
+def clean_headers(headers):
+    """Forces header keys and values to be strings, i.e not unicode.
+
+    The httplib module just concats the header keys and values in a way that
+    may make the message header a unicode string, which, if it then tries to
+    contatenate to a binary request body may result in a unicode decode error.
+
+    Args:
+        headers: dict, A dictionary of headers.
+
+    Returns:
+        The same dictionary but with all the keys converted to strings.
+    """
+    clean = {}
+    try:
+        for k, v in six.iteritems(headers):
+            if not isinstance(k, six.binary_type):
+                k = str(k)
+            if not isinstance(v, six.binary_type):
+                v = str(v)
+            clean[_to_bytes(k)] = _to_bytes(v)
+    except UnicodeEncodeError:
+        raise NonAsciiHeaderError(k, ': ', v)
+    return clean
+
+
+def _update_query_params(uri, params):
+    """Updates a URI with new query parameters.
+
+    Args:
+        uri: string, A valid URI, with potential existing query parameters.
+        params: dict, A dictionary of query parameters.
+
+    Returns:
+        The same URI but with the new query parameters added.
+    """
+    parts = urllib.parse.urlparse(uri)
+    query_params = dict(urllib.parse.parse_qsl(parts.query))
+    query_params.update(params)
+    new_parts = parts._replace(query=urllib.parse.urlencode(query_params))
+    return urllib.parse.urlunparse(new_parts)
+
+
+class OAuth2Credentials(Credentials):
+    """Credentials object for OAuth 2.0.
+
+    Credentials can be applied to an httplib2.Http object using the authorize()
+    method, which then adds the OAuth 2.0 access token to each request.
+
+    OAuth2Credentials objects may be safely pickled and unpickled.
+    """
+
+    @util.positional(8)
+    def __init__(self, access_token, client_id, client_secret, refresh_token,
+                 token_expiry, token_uri, user_agent, revoke_uri=None,
+                 id_token=None, token_response=None, scopes=None,
+                 token_info_uri=None):
+        """Create an instance of OAuth2Credentials.
+
+        This constructor is not usually called by the user, instead
+        OAuth2Credentials objects are instantiated by the OAuth2WebServerFlow.
+
+        Args:
+            access_token: string, access token.
+            client_id: string, client identifier.
+            client_secret: string, client secret.
+            refresh_token: string, refresh token.
+            token_expiry: datetime, when the access_token expires.
+            token_uri: string, URI of token endpoint.
+            user_agent: string, The HTTP User-Agent to provide for this
+                        application.
+            revoke_uri: string, URI for revoke endpoint. Defaults to None; a
+                        token can't be revoked if this is None.
+            id_token: object, The identity of the resource owner.
+            token_response: dict, the decoded response to the token request.
+                            None if a token hasn't been requested yet. Stored
+                            because some providers (e.g. wordpress.com) include
+                            extra fields that clients may want.
+            scopes: list, authorized scopes for these credentials.
+          token_info_uri: string, the URI for the token info endpoint. Defaults
+                          to None; scopes can not be refreshed if this is None.
+
+        Notes:
+            store: callable, A callable that when passed a Credential
+                   will store the credential back to where it came from.
+                   This is needed to store the latest access_token if it
+                   has expired and been refreshed.
+        """
+        self.access_token = access_token
+        self.client_id = client_id
+        self.client_secret = client_secret
+        self.refresh_token = refresh_token
+        self.store = None
+        self.token_expiry = token_expiry
+        self.token_uri = token_uri
+        self.user_agent = user_agent
+        self.revoke_uri = revoke_uri
+        self.id_token = id_token
+        self.token_response = token_response
+        self.scopes = set(util.string_to_scopes(scopes or []))
+        self.token_info_uri = token_info_uri
+
+        # True if the credentials have been revoked or expired and can't be
+        # refreshed.
+        self.invalid = False
+
+    def authorize(self, http):
+        """Authorize an httplib2.Http instance with these credentials.
+
+        The modified http.request method will add authentication headers to
+        each request and will refresh access_tokens when a 401 is received on a
+        request. In addition the http.request method has a credentials
+        property, http.request.credentials, which is the Credentials object
+        that authorized it.
+
+        Args:
+            http: An instance of ``httplib2.Http`` or something that acts
+                  like it.
+
+        Returns:
+            A modified instance of http that was passed in.
+
+        Example::
+
+            h = httplib2.Http()
+            h = credentials.authorize(h)
+
+        You can't create a new OAuth subclass of httplib2.Authentication
+        because it never gets passed the absolute URI, which is needed for
+        signing. So instead we have to overload 'request' with a closure
+        that adds in the Authorization header and then calls the original
+        version of 'request()'.
+        """
+        request_orig = http.request
+
+        # The closure that will replace 'httplib2.Http.request'.
+        def new_request(uri, method='GET', body=None, headers=None,
+                        redirections=httplib2.DEFAULT_MAX_REDIRECTS,
+                        connection_type=None):
+            if not self.access_token:
+                logger.info('Attempting refresh to obtain '
+                            'initial access_token')
+                self._refresh(request_orig)
+
+            # Clone and modify the request headers to add the appropriate
+            # Authorization header.
+            if headers is None:
+                headers = {}
+            else:
+                headers = dict(headers)
+            self.apply(headers)
+
+            if self.user_agent is not None:
+                if 'user-agent' in headers:
+                    headers['user-agent'] = (self.user_agent + ' ' +
+                                             headers['user-agent'])
+                else:
+                    headers['user-agent'] = self.user_agent
+
+            body_stream_position = None
+            if all(getattr(body, stream_prop, None) for stream_prop in
+                   ('read', 'seek', 'tell')):
+                body_stream_position = body.tell()
+
+            resp, content = request_orig(uri, method, body,
+                                         clean_headers(headers),
+                                         redirections, connection_type)
+
+            # A stored token may expire between the time it is retrieved and
+            # the time the request is made, so we may need to try twice.
+            max_refresh_attempts = 2
+            for refresh_attempt in range(max_refresh_attempts):
+                if resp.status not in REFRESH_STATUS_CODES:
+                    break
+                logger.info(
+                    'OAuth token TTL expired, auto-refreshing (attempt %s/%s)',
+                    refresh_attempt + 1,
+                    max_refresh_attempts)
+                self._refresh(request_orig)
+                self.apply(headers)
+                if body_stream_position is not None:
+                    body.seek(body_stream_position)
+
+                resp, content = request_orig(uri, method, body,
+                                             clean_headers(headers),
+                                             redirections, connection_type)
+
+            return (resp, content)
+
+        # Replace the request method with our own closure.
+        http.request = new_request
+
+        # Set credentials as a property of the request method.
+        setattr(http.request, 'credentials', self)
+
+        return http
+
+    def refresh(self, http):
+        """Forces a refresh of the access_token.
+
+        Args:
+            http: httplib2.Http, an http object to be used to make the refresh
+                  request.
+        """
+        self._refresh(http.request)
+
+    def revoke(self, http):
+        """Revokes a refresh_token and makes the credentials void.
+
+        Args:
+            http: httplib2.Http, an http object to be used to make the revoke
+                  request.
+        """
+        self._revoke(http.request)
+
+    def apply(self, headers):
+        """Add the authorization to the headers.
+
+        Args:
+            headers: dict, the headers to add the Authorization header to.
+        """
+        headers['Authorization'] = 'Bearer ' + self.access_token
+
+    def has_scopes(self, scopes):
+        """Verify that the credentials are authorized for the given scopes.
+
+        Returns True if the credentials authorized scopes contain all of the
+        scopes given.
+
+        Args:
+            scopes: list or string, the scopes to check.
+
+        Notes:
+            There are cases where the credentials are unaware of which scopes
+            are authorized. Notably, credentials obtained and stored before
+            this code was added will not have scopes, AccessTokenCredentials do
+            not have scopes. In both cases, you can use refresh_scopes() to
+            obtain the canonical set of scopes.
+        """
+        scopes = util.string_to_scopes(scopes)
+        return set(scopes).issubset(self.scopes)
+
+    def retrieve_scopes(self, http):
+        """Retrieves the canonical list of scopes for this access token.
+
+        Gets the scopes from the OAuth2 provider.
+
+        Args:
+            http: httplib2.Http, an http object to be used to make the refresh
+                  request.
+
+        Returns:
+            A set of strings containing the canonical list of scopes.
+        """
+        self._retrieve_scopes(http.request)
+        return self.scopes
+
+    def to_json(self):
+        return self._to_json(Credentials.NON_SERIALIZED_MEMBERS)
+
+    @classmethod
+    def from_json(cls, s):
+        """Instantiate a Credentials object from a JSON description of it.
+
+        The JSON should have been produced by calling .to_json() on the object.
+
+        Args:
+            data: dict, A deserialized JSON object.
+
+        Returns:
+            An instance of a Credentials subclass.
+        """
+        s = _from_bytes(s)
+        data = json.loads(s)
+        if (data.get('token_expiry') and
+                not isinstance(data['token_expiry'], datetime.datetime)):
+            try:
+                data['token_expiry'] = datetime.datetime.strptime(
+                    data['token_expiry'], EXPIRY_FORMAT)
+            except ValueError:
+                data['token_expiry'] = None
+        retval = cls(
+            data['access_token'],
+            data['client_id'],
+            data['client_secret'],
+            data['refresh_token'],
+            data['token_expiry'],
+            data['token_uri'],
+            data['user_agent'],
+            revoke_uri=data.get('revoke_uri', None),
+            id_token=data.get('id_token', None),
+            token_response=data.get('token_response', None),
+            scopes=data.get('scopes', None),
+            token_info_uri=data.get('token_info_uri', None))
+        retval.invalid = data['invalid']
+        return retval
+
+    @property
+    def access_token_expired(self):
+        """True if the credential is expired or invalid.
+
+        If the token_expiry isn't set, we assume the token doesn't expire.
+        """
+        if self.invalid:
+            return True
+
+        if not self.token_expiry:
+            return False
+
+        now = datetime.datetime.utcnow()
+        if now >= self.token_expiry:
+            logger.info('access_token is expired. Now: %s, token_expiry: %s',
+                        now, self.token_expiry)
+            return True
+        return False
+
+    def get_access_token(self, http=None):
+        """Return the access token and its expiration information.
+
+        If the token does not exist, get one.
+        If the token expired, refresh it.
+        """
+        if not self.access_token or self.access_token_expired:
+            if not http:
+                http = httplib2.Http()
+            self.refresh(http)
+        return AccessTokenInfo(access_token=self.access_token,
+                               expires_in=self._expires_in())
+
+    def set_store(self, store):
+        """Set the Storage for the credential.
+
+        Args:
+            store: Storage, an implementation of Storage object.
+                   This is needed to store the latest access_token if it
+                   has expired and been refreshed. This implementation uses
+                   locking to check for updates before updating the
+                   access_token.
+        """
+        self.store = store
+
+    def _expires_in(self):
+        """Return the number of seconds until this token expires.
+
+        If token_expiry is in the past, this method will return 0, meaning the
+        token has already expired.
+
+        If token_expiry is None, this method will return None. Note that
+        returning 0 in such a case would not be fair: the token may still be
+        valid; we just don't know anything about it.
+        """
+        if self.token_expiry:
+            now = datetime.datetime.utcnow()
+            if self.token_expiry > now:
+                time_delta = self.token_expiry - now
+                # TODO(orestica): return time_delta.total_seconds()
+                # once dropping support for Python 2.6
+                return time_delta.days * 86400 + time_delta.seconds
+            else:
+                return 0
+
+    def _updateFromCredential(self, other):
+        """Update this Credential from another instance."""
+        self.__dict__.update(other.__getstate__())
+
+    def __getstate__(self):
+        """Trim the state down to something that can be pickled."""
+        d = copy.copy(self.__dict__)
+        del d['store']
+        return d
+
+    def __setstate__(self, state):
+        """Reconstitute the state of the object from being pickled."""
+        self.__dict__.update(state)
+        self.store = None
+
+    def _generate_refresh_request_body(self):
+        """Generate the body that will be used in the refresh request."""
+        body = urllib.parse.urlencode({
+            'grant_type': 'refresh_token',
+            'client_id': self.client_id,
+            'client_secret': self.client_secret,
+            'refresh_token': self.refresh_token,
+        })
+        return body
+
+    def _generate_refresh_request_headers(self):
+        """Generate the headers that will be used in the refresh request."""
+        headers = {
+            'content-type': 'application/x-www-form-urlencoded',
+        }
+
+        if self.user_agent is not None:
+            headers['user-agent'] = self.user_agent
+
+        return headers
+
+    def _refresh(self, http_request):
+        """Refreshes the access_token.
+
+        This method first checks by reading the Storage object if available.
+        If a refresh is still needed, it holds the Storage lock until the
+        refresh is completed.
+
+        Args:
+            http_request: callable, a callable that matches the method
+                          signature of httplib2.Http.request, used to make the
+                          refresh request.
+
+        Raises:
+            HttpAccessTokenRefreshError: When the refresh fails.
+        """
+        if not self.store:
+            self._do_refresh_request(http_request)
+        else:
+            self.store.acquire_lock()
+            try:
+                new_cred = self.store.locked_get()
+
+                if (new_cred and not new_cred.invalid and
+                        new_cred.access_token != self.access_token and
+                        not new_cred.access_token_expired):
+                    logger.info('Updated access_token read from Storage')
+                    self._updateFromCredential(new_cred)
+                else:
+                    self._do_refresh_request(http_request)
+            finally:
+                self.store.release_lock()
+
+    def _do_refresh_request(self, http_request):
+        """Refresh the access_token using the refresh_token.
+
+        Args:
+            http_request: callable, a callable that matches the method
+                          signature of httplib2.Http.request, used to make the
+                          refresh request.
+
+        Raises:
+            HttpAccessTokenRefreshError: When the refresh fails.
+        """
+        body = self._generate_refresh_request_body()
+        headers = self._generate_refresh_request_headers()
+
+        logger.info('Refreshing access_token')
+        resp, content = http_request(
+            self.token_uri, method='POST', body=body, headers=headers)
+        content = _from_bytes(content)
+        if resp.status == 200:
+            d = json.loads(content)
+            self.token_response = d
+            self.access_token = d['access_token']
+            self.refresh_token = d.get('refresh_token', self.refresh_token)
+            if 'expires_in' in d:
+                self.token_expiry = datetime.timedelta(
+                    seconds=int(d['expires_in'])) + datetime.datetime.utcnow()
+            else:
+                self.token_expiry = None
+            # On temporary refresh errors, the user does not actually have to
+            # re-authorize, so we unflag here.
+            self.invalid = False
+            if self.store:
+                self.store.locked_put(self)
+        else:
+            # An {'error':...} response body means the token is expired or
+            # revoked, so we flag the credentials as such.
+            logger.info('Failed to retrieve access token: %s', content)
+            error_msg = 'Invalid response %s.' % resp['status']
+            try:
+                d = json.loads(content)
+                if 'error' in d:
+                    error_msg = d['error']
+                    if 'error_description' in d:
+                        error_msg += ': ' + d['error_description']
+                    self.invalid = True
+                    if self.store:
+                        self.store.locked_put(self)
+            except (TypeError, ValueError):
+                pass
+            raise HttpAccessTokenRefreshError(error_msg, status=resp.status)
+
+    def _revoke(self, http_request):
+        """Revokes this credential and deletes the stored copy (if it exists).
+
+        Args:
+            http_request: callable, a callable that matches the method
+                          signature of httplib2.Http.request, used to make the
+                          revoke request.
+        """
+        self._do_revoke(http_request, self.refresh_token or self.access_token)
+
+    def _do_revoke(self, http_request, token):
+        """Revokes this credential and deletes the stored copy (if it exists).
+
+        Args:
+            http_request: callable, a callable that matches the method
+                          signature of httplib2.Http.request, used to make the
+                          refresh request.
+            token: A string used as the token to be revoked. Can be either an
+                   access_token or refresh_token.
+
+        Raises:
+            TokenRevokeError: If the revoke request does not return with a
+                              200 OK.
+        """
+        logger.info('Revoking token')
+        query_params = {'token': token}
+        token_revoke_uri = _update_query_params(self.revoke_uri, query_params)
+        resp, content = http_request(token_revoke_uri)
+        if resp.status == 200:
+            self.invalid = True
+        else:
+            error_msg = 'Invalid response %s.' % resp.status
+            try:
+                d = json.loads(_from_bytes(content))
+                if 'error' in d:
+                    error_msg = d['error']
+            except (TypeError, ValueError):
+                pass
+            raise TokenRevokeError(error_msg)
+
+        if self.store:
+            self.store.delete()
+
+    def _retrieve_scopes(self, http_request):
+        """Retrieves the list of authorized scopes from the OAuth2 provider.
+
+        Args:
+            http_request: callable, a callable that matches the method
+                          signature of httplib2.Http.request, used to make the
+                          revoke request.
+        """
+        self._do_retrieve_scopes(http_request, self.access_token)
+
+    def _do_retrieve_scopes(self, http_request, token):
+        """Retrieves the list of authorized scopes from the OAuth2 provider.
+
+        Args:
+            http_request: callable, a callable that matches the method
+                          signature of httplib2.Http.request, used to make the
+                          refresh request.
+            token: A string used as the token to identify the credentials to
+                   the provider.
+
+        Raises:
+            Error: When refresh fails, indicating the the access token is
+                   invalid.
+        """
+        logger.info('Refreshing scopes')
+        query_params = {'access_token': token, 'fields': 'scope'}
+        token_info_uri = _update_query_params(self.token_info_uri,
+                                              query_params)
+        resp, content = http_request(token_info_uri)
+        content = _from_bytes(content)
+        if resp.status == 200:
+            d = json.loads(content)
+            self.scopes = set(util.string_to_scopes(d.get('scope', '')))
+        else:
+            error_msg = 'Invalid response %s.' % (resp.status,)
+            try:
+                d = json.loads(content)
+                if 'error_description' in d:
+                    error_msg = d['error_description']
+            except (TypeError, ValueError):
+                pass
+            raise Error(error_msg)
+
+
+class AccessTokenCredentials(OAuth2Credentials):
+    """Credentials object for OAuth 2.0.
+
+    Credentials can be applied to an httplib2.Http object using the
+    authorize() method, which then signs each request from that object
+    with the OAuth 2.0 access token. This set of credentials is for the
+    use case where you have acquired an OAuth 2.0 access_token from
+    another place such as a JavaScript client or another web
+    application, and wish to use it from Python. Because only the
+    access_token is present it can not be refreshed and will in time
+    expire.
+
+    AccessTokenCredentials objects may be safely pickled and unpickled.
+
+    Usage::
+
+        credentials = AccessTokenCredentials('<an access token>',
+            'my-user-agent/1.0')
+        http = httplib2.Http()
+        http = credentials.authorize(http)
+
+    Raises:
+        AccessTokenCredentialsExpired: raised when the access_token expires or
+                                       is revoked.
+    """
+
+    def __init__(self, access_token, user_agent, revoke_uri=None):
+        """Create an instance of OAuth2Credentials
+
+        This is one of the few types if Credentials that you should contrust,
+        Credentials objects are usually instantiated by a Flow.
+
+        Args:
+            access_token: string, access token.
+            user_agent: string, The HTTP User-Agent to provide for this
+                        application.
+            revoke_uri: string, URI for revoke endpoint. Defaults to None; a
+                        token can't be revoked if this is None.
+        """
+        super(AccessTokenCredentials, self).__init__(
+            access_token,
+            None,
+            None,
+            None,
+            None,
+            None,
+            user_agent,
+            revoke_uri=revoke_uri)
+
+    @classmethod
+    def from_json(cls, s):
+        data = json.loads(_from_bytes(s))
+        retval = AccessTokenCredentials(
+            data['access_token'],
+            data['user_agent'])
+        return retval
+
+    def _refresh(self, http_request):
+        raise AccessTokenCredentialsError(
+            'The access_token is expired or invalid and can\'t be refreshed.')
+
+    def _revoke(self, http_request):
+        """Revokes the access_token and deletes the store if available.
+
+        Args:
+            http_request: callable, a callable that matches the method
+                          signature of httplib2.Http.request, used to make the
+                          revoke request.
+        """
+        self._do_revoke(http_request, self.access_token)
+
+
+def _detect_gce_environment():
+    """Determine if the current environment is Compute Engine.
+
+    Returns:
+        Boolean indicating whether or not the current environment is Google
+        Compute Engine.
+    """
+    # NOTE: The explicit ``timeout`` is a workaround. The underlying
+    #       issue is that resolving an unknown host on some networks will take
+    #       20-30 seconds; making this timeout short fixes the issue, but
+    #       could lead to false negatives in the event that we are on GCE, but
+    #       the metadata resolution was particularly slow. The latter case is
+    #       "unlikely".
+    connection = six.moves.http_client.HTTPConnection(
+        _GCE_METADATA_HOST, timeout=1)
+
+    try:
+        headers = {_METADATA_FLAVOR_HEADER: _DESIRED_METADATA_FLAVOR}
+        connection.request('GET', '/', headers=headers)
+        response = connection.getresponse()
+        if response.status == 200:
+            return (response.getheader(_METADATA_FLAVOR_HEADER) ==
+                    _DESIRED_METADATA_FLAVOR)
+    except socket.error:  # socket.timeout or socket.error(64, 'Host is down')
+        logger.info('Timeout attempting to reach GCE metadata service.')
+        return False
+    finally:
+        connection.close()
+
+
+def _in_gae_environment():
+    """Detects if the code is running in the App Engine environment.
+
+    Returns:
+        True if running in the GAE environment, False otherwise.
+    """
+    if SETTINGS.env_name is not None:
+        return SETTINGS.env_name in ('GAE_PRODUCTION', 'GAE_LOCAL')
+
+    try:
+        import google.appengine  # noqa: unused import
+    except ImportError:
+        pass
+    else:
+        server_software = os.environ.get(_SERVER_SOFTWARE, '')
+        if server_software.startswith('Google App Engine/'):
+            SETTINGS.env_name = 'GAE_PRODUCTION'
+            return True
+        elif server_software.startswith('Development/'):
+            SETTINGS.env_name = 'GAE_LOCAL'
+            return True
+
+    return False
+
+
+def _in_gce_environment():
+    """Detect if the code is running in the Compute Engine environment.
+
+    Returns:
+        True if running in the GCE environment, False otherwise.
+    """
+    if SETTINGS.env_name is not None:
+        return SETTINGS.env_name == 'GCE_PRODUCTION'
+
+    if NO_GCE_CHECK != 'True' and _detect_gce_environment():
+        SETTINGS.env_name = 'GCE_PRODUCTION'
+        return True
+    return False
+
+
+class GoogleCredentials(OAuth2Credentials):
+    """Application Default Credentials for use in calling Google APIs.
+
+    The Application Default Credentials are being constructed as a function of
+    the environment where the code is being run.
+    More details can be found on this page:
+    https://developers.google.com/accounts/docs/application-default-credentials
+
+    Here is an example of how to use the Application Default Credentials for a
+    service that requires authentication::
+
+        from googleapiclient.discovery import build
+        from oauth2client.client import GoogleCredentials
+
+        credentials = GoogleCredentials.get_application_default()
+        service = build('compute', 'v1', credentials=credentials)
+
+        PROJECT = 'bamboo-machine-422'
+        ZONE = 'us-central1-a'
+        request = service.instances().list(project=PROJECT, zone=ZONE)
+        response = request.execute()
+
+        print(response)
+    """
+
+    def __init__(self, access_token, client_id, client_secret, refresh_token,
+                 token_expiry, token_uri, user_agent,
+                 revoke_uri=GOOGLE_REVOKE_URI):
+        """Create an instance of GoogleCredentials.
+
+        This constructor is not usually called by the user, instead
+        GoogleCredentials objects are instantiated by
+        GoogleCredentials.from_stream() or
+        GoogleCredentials.get_application_default().
+
+        Args:
+            access_token: string, access token.
+            client_id: string, client identifier.
+            client_secret: string, client secret.
+            refresh_token: string, refresh token.
+            token_expiry: datetime, when the access_token expires.
+            token_uri: string, URI of token endpoint.
+            user_agent: string, The HTTP User-Agent to provide for this
+                        application.
+            revoke_uri: string, URI for revoke endpoint. Defaults to
+                        GOOGLE_REVOKE_URI; a token can't be revoked if this
+                        is None.
+        """
+        super(GoogleCredentials, self).__init__(
+            access_token, client_id, client_secret, refresh_token,
+            token_expiry, token_uri, user_agent, revoke_uri=revoke_uri)
+
+    def create_scoped_required(self):
+        """Whether this Credentials object is scopeless.
+
+        create_scoped(scopes) method needs to be called in order to create
+        a Credentials object for API calls.
+        """
+        return False
+
+    def create_scoped(self, scopes):
+        """Create a Credentials object for the given scopes.
+
+        The Credentials type is preserved.
+        """
+        return self
+
+    @property
+    def serialization_data(self):
+        """Get the fields and values identifying the current credentials."""
+        return {
+            'type': 'authorized_user',
+            'client_id': self.client_id,
+            'client_secret': self.client_secret,
+            'refresh_token': self.refresh_token
+        }
+
+    @staticmethod
+    def _implicit_credentials_from_gae():
+        """Attempts to get implicit credentials in Google App Engine env.
+
+        If the current environment is not detected as App Engine, returns None,
+        indicating no Google App Engine credentials can be detected from the
+        current environment.
+
+        Returns:
+            None, if not in GAE, else an appengine.AppAssertionCredentials
+            object.
+        """
+        if not _in_gae_environment():
+            return None
+
+        return _get_application_default_credential_GAE()
+
+    @staticmethod
+    def _implicit_credentials_from_gce():
+        """Attempts to get implicit credentials in Google Compute Engine env.
+
+        If the current environment is not detected as Compute Engine, returns
+        None, indicating no Google Compute Engine credentials can be detected
+        from the current environment.
+
+        Returns:
+            None, if not in GCE, else a gce.AppAssertionCredentials object.
+        """
+        if not _in_gce_environment():
+            return None
+
+        return _get_application_default_credential_GCE()
+
+    @staticmethod
+    def _implicit_credentials_from_files():
+        """Attempts to get implicit credentials from local credential files.
+
+        First checks if the environment variable GOOGLE_APPLICATION_CREDENTIALS
+        is set with a filename and then falls back to a configuration file (the
+        "well known" file) associated with the 'gcloud' command line tool.
+
+        Returns:
+            Credentials object associated with the
+            GOOGLE_APPLICATION_CREDENTIALS file or the "well known" file if
+            either exist. If neither file is define, returns None, indicating
+            no credentials from a file can detected from the current
+            environment.
+        """
+        credentials_filename = _get_environment_variable_file()
+        if not credentials_filename:
+            credentials_filename = _get_well_known_file()
+            if os.path.isfile(credentials_filename):
+                extra_help = (' (produced automatically when running'
+                              ' "gcloud auth login" command)')
+            else:
+                credentials_filename = None
+        else:
+            extra_help = (' (pointed to by ' + GOOGLE_APPLICATION_CREDENTIALS +
+                          ' environment variable)')
+
+        if not credentials_filename:
+            return
+
+        # If we can read the credentials from a file, we don't need to know
+        # what environment we are in.
+        SETTINGS.env_name = DEFAULT_ENV_NAME
+
+        try:
+            return _get_application_default_credential_from_file(
+                credentials_filename)
+        except (ApplicationDefaultCredentialsError, ValueError) as error:
+            _raise_exception_for_reading_json(credentials_filename,
+                                              extra_help, error)
+
+    @classmethod
+    def _get_implicit_credentials(cls):
+        """Gets credentials implicitly from the environment.
+
+        Checks environment in order of precedence:
+        - Google App Engine (production and testing)
+        - Environment variable GOOGLE_APPLICATION_CREDENTIALS pointing to
+          a file with stored credentials information.
+        - Stored "well known" file associated with `gcloud` command line tool.
+        - Google Compute Engine production environment.
+
+        Raises:
+            ApplicationDefaultCredentialsError: raised when the credentials
+                                                fail to be retrieved.
+        """
+        # Environ checks (in order).
+        environ_checkers = [
+            cls._implicit_credentials_from_gae,
+            cls._implicit_credentials_from_files,
+            cls._implicit_credentials_from_gce,
+        ]
+
+        for checker in environ_checkers:
+            credentials = checker()
+            if credentials is not None:
+                return credentials
+
+        # If no credentials, fail.
+        raise ApplicationDefaultCredentialsError(ADC_HELP_MSG)
+
+    @staticmethod
+    def get_application_default():
+        """Get the Application Default Credentials for the current environment.
+
+        Raises:
+            ApplicationDefaultCredentialsError: raised when the credentials
+                                                fail to be retrieved.
+        """
+        return GoogleCredentials._get_implicit_credentials()
+
+    @staticmethod
+    def from_stream(credential_filename):
+        """Create a Credentials object by reading information from a file.
+
+        It returns an object of type GoogleCredentials.
+
+        Args:
+            credential_filename: the path to the file from where the
+                                 credentials are to be read
+
+        Raises:
+            ApplicationDefaultCredentialsError: raised when the credentials
+                                                fail to be retrieved.
+        """
+        if credential_filename and os.path.isfile(credential_filename):
+            try:
+                return _get_application_default_credential_from_file(
+                    credential_filename)
+            except (ApplicationDefaultCredentialsError, ValueError) as error:
+                extra_help = (' (provided as parameter to the '
+                              'from_stream() method)')
+                _raise_exception_for_reading_json(credential_filename,
+                                                  extra_help,
+                                                  error)
+        else:
+            raise ApplicationDefaultCredentialsError(
+                'The parameter passed to the from_stream() '
+                'method should point to a file.')
+
+
+def _save_private_file(filename, json_contents):
+    """Saves a file with read-write permissions on for the owner.
+
+    Args:
+        filename: String. Absolute path to file.
+        json_contents: JSON serializable object to be saved.
+    """
+    temp_filename = tempfile.mktemp()
+    file_desc = os.open(temp_filename, os.O_WRONLY | os.O_CREAT, 0o600)
+    with os.fdopen(file_desc, 'w') as file_handle:
+        json.dump(json_contents, file_handle, sort_keys=True,
+                  indent=2, separators=(',', ': '))
+    shutil.move(temp_filename, filename)
+
+
+def save_to_well_known_file(credentials, well_known_file=None):
+    """Save the provided GoogleCredentials to the well known file.
+
+    Args:
+        credentials: the credentials to be saved to the well known file;
+                     it should be an instance of GoogleCredentials
+        well_known_file: the name of the file where the credentials are to be
+                         saved; this parameter is supposed to be used for
+                         testing only
+    """
+    # TODO(orestica): move this method to tools.py
+    # once the argparse import gets fixed (it is not present in Python 2.6)
+
+    if well_known_file is None:
+        well_known_file = _get_well_known_file()
+
+    config_dir = os.path.dirname(well_known_file)
+    if not os.path.isdir(config_dir):
+        raise OSError('Config directory does not exist: %s' % config_dir)
+
+    credentials_data = credentials.serialization_data
+    _save_private_file(well_known_file, credentials_data)
+
+
+def _get_environment_variable_file():
+    application_default_credential_filename = (
+      os.environ.get(GOOGLE_APPLICATION_CREDENTIALS,
+                     None))
+
+    if application_default_credential_filename:
+        if os.path.isfile(application_default_credential_filename):
+            return application_default_credential_filename
+        else:
+            raise ApplicationDefaultCredentialsError(
+                'File ' + application_default_credential_filename +
+                ' (pointed by ' +
+                GOOGLE_APPLICATION_CREDENTIALS +
+                ' environment variable) does not exist!')
+
+
+def _get_well_known_file():
+    """Get the well known file produced by command 'gcloud auth login'."""
+    # TODO(orestica): Revisit this method once gcloud provides a better way
+    # of pinpointing the exact location of the file.
+
+    WELL_KNOWN_CREDENTIALS_FILE = 'application_default_credentials.json'
+
+    default_config_dir = os.getenv(_CLOUDSDK_CONFIG_ENV_VAR)
+    if default_config_dir is None:
+        if os.name == 'nt':
+            try:
+                default_config_dir = os.path.join(os.environ['APPDATA'],
+                                                  _CLOUDSDK_CONFIG_DIRECTORY)
+            except KeyError:
+                # This should never happen unless someone is really
+                # messing with things.
+                drive = os.environ.get('SystemDrive', 'C:')
+                default_config_dir = os.path.join(drive, '\\',
+                                                  _CLOUDSDK_CONFIG_DIRECTORY)
+        else:
+            default_config_dir = os.path.join(os.path.expanduser('~'),
+                                              '.config',
+                                              _CLOUDSDK_CONFIG_DIRECTORY)
+
+    return os.path.join(default_config_dir, WELL_KNOWN_CREDENTIALS_FILE)
+
+
+def _get_application_default_credential_from_file(filename):
+    """Build the Application Default Credentials from file."""
+
+    from oauth2client import service_account
+
+    # read the credentials from the file
+    with open(filename) as file_obj:
+        client_credentials = json.load(file_obj)
+
+    credentials_type = client_credentials.get('type')
+    if credentials_type == AUTHORIZED_USER:
+        required_fields = set(['client_id', 'client_secret', 'refresh_token'])
+    elif credentials_type == SERVICE_ACCOUNT:
+        required_fields = set(['client_id', 'client_email', 'private_key_id',
+                               'private_key'])
+    else:
+        raise ApplicationDefaultCredentialsError(
+            "'type' field should be defined (and have one of the '" +
+            AUTHORIZED_USER + "' or '" + SERVICE_ACCOUNT + "' values)")
+
+    missing_fields = required_fields.difference(client_credentials.keys())
+
+    if missing_fields:
+        _raise_exception_for_missing_fields(missing_fields)
+
+    if client_credentials['type'] == AUTHORIZED_USER:
+        return GoogleCredentials(
+            access_token=None,
+            client_id=client_credentials['client_id'],
+            client_secret=client_credentials['client_secret'],
+            refresh_token=client_credentials['refresh_token'],
+            token_expiry=None,
+            token_uri=GOOGLE_TOKEN_URI,
+            user_agent='Python client library')
+    else:  # client_credentials['type'] == SERVICE_ACCOUNT
+        return service_account._ServiceAccountCredentials(
+            service_account_id=client_credentials['client_id'],
+            service_account_email=client_credentials['client_email'],
+            private_key_id=client_credentials['private_key_id'],
+            private_key_pkcs8_text=client_credentials['private_key'],
+            scopes=[])
+
+
+def _raise_exception_for_missing_fields(missing_fields):
+    raise ApplicationDefaultCredentialsError(
+        'The following field(s) must be defined: ' + ', '.join(missing_fields))
+
+
+def _raise_exception_for_reading_json(credential_file,
+                                      extra_help,
+                                      error):
+    raise ApplicationDefaultCredentialsError(
+      'An error was encountered while reading json file: ' +
+      credential_file + extra_help + ': ' + str(error))
+
+
+def _get_application_default_credential_GAE():
+    from oauth2client.appengine import AppAssertionCredentials
+
+    return AppAssertionCredentials([])
+
+
+def _get_application_default_credential_GCE():
+    from oauth2client.gce import AppAssertionCredentials
+
+    return AppAssertionCredentials([])
+
+
+class AssertionCredentials(GoogleCredentials):
+    """Abstract Credentials object used for OAuth 2.0 assertion grants.
+
+    This credential does not require a flow to instantiate because it
+    represents a two legged flow, and therefore has all of the required
+    information to generate and refresh its own access tokens. It must
+    be subclassed to generate the appropriate assertion string.
+
+    AssertionCredentials objects may be safely pickled and unpickled.
+    """
+
+    @util.positional(2)
+    def __init__(self, assertion_type, user_agent=None,
+                 token_uri=GOOGLE_TOKEN_URI,
+                 revoke_uri=GOOGLE_REVOKE_URI,
+                 **unused_kwargs):
+        """Constructor for AssertionFlowCredentials.
+
+        Args:
+            assertion_type: string, assertion type that will be declared to the
+                            auth server
+            user_agent: string, The HTTP User-Agent to provide for this
+                        application.
+            token_uri: string, URI for token endpoint. For convenience defaults
+                       to Google's endpoints but any OAuth 2.0 provider can be
+                       used.
+            revoke_uri: string, URI for revoke endpoint.
+        """
+        super(AssertionCredentials, self).__init__(
+            None,
+            None,
+            None,
+            None,
+            None,
+            token_uri,
+            user_agent,
+            revoke_uri=revoke_uri)
+        self.assertion_type = assertion_type
+
+    def _generate_refresh_request_body(self):
+        assertion = self._generate_assertion()
+
+        body = urllib.parse.urlencode({
+            'assertion': assertion,
+            'grant_type': 'urn:ietf:params:oauth:grant-type:jwt-bearer',
+        })
+
+        return body
+
+    def _generate_assertion(self):
+        """Generate assertion string to be used in the access token request."""
+        _abstract()
+
+    def _revoke(self, http_request):
+        """Revokes the access_token and deletes the store if available.
+
+        Args:
+            http_request: callable, a callable that matches the method
+                          signature of httplib2.Http.request, used to make the
+                          revoke request.
+        """
+        self._do_revoke(http_request, self.access_token)
+
+
+def _RequireCryptoOrDie():
+    """Ensure we have a crypto library, or throw CryptoUnavailableError.
+
+    The oauth2client.crypt module requires either PyCrypto or PyOpenSSL
+    to be available in order to function, but these are optional
+    dependencies.
+    """
+    if not HAS_CRYPTO:
+        raise CryptoUnavailableError('No crypto library available')
+
+
+class SignedJwtAssertionCredentials(AssertionCredentials):
+    """Credentials object used for OAuth 2.0 Signed JWT assertion grants.
+
+    This credential does not require a flow to instantiate because it
+    represents a two legged flow, and therefore has all of the required
+    information to generate and refresh its own access tokens.
+
+    SignedJwtAssertionCredentials requires either PyOpenSSL, or PyCrypto
+    2.6 or later. For App Engine you may also consider using
+    AppAssertionCredentials.
+    """
+
+    MAX_TOKEN_LIFETIME_SECS = 3600  # 1 hour in seconds
+
+    @util.positional(4)
+    def __init__(self,
+                 service_account_name,
+                 private_key,
+                 scope,
+                 private_key_password='notasecret',
+                 user_agent=None,
+                 token_uri=GOOGLE_TOKEN_URI,
+                 revoke_uri=GOOGLE_REVOKE_URI,
+                 **kwargs):
+        """Constructor for SignedJwtAssertionCredentials.
+
+        Args:
+            service_account_name: string, id for account, usually an email
+                                  address.
+            private_key: string or bytes, private key in PKCS12 or PEM format.
+            scope: string or iterable of strings, scope(s) of the credentials
+                   being requested.
+            private_key_password: string, password for private_key, unused if
+                                  private_key is in PEM format.
+            user_agent: string, HTTP User-Agent to provide for this
+                        application.
+            token_uri: string, URI for token endpoint. For convenience defaults
+                       to Google's endpoints but any OAuth 2.0 provider can be
+                       used.
+            revoke_uri: string, URI for revoke endpoint.
+            kwargs: kwargs, Additional parameters to add to the JWT token, for
+                    example sub=joe@xample.org.
+
+        Raises:
+            CryptoUnavailableError if no crypto library is available.
+        """
+        _RequireCryptoOrDie()
+        super(SignedJwtAssertionCredentials, self).__init__(
+            None,
+            user_agent=user_agent,
+            token_uri=token_uri,
+            revoke_uri=revoke_uri,
+        )
+
+        self.scope = util.scopes_to_string(scope)
+
+        # Keep base64 encoded so it can be stored in JSON.
+        self.private_key = base64.b64encode(_to_bytes(private_key))
+        self.private_key_password = private_key_password
+        self.service_account_name = service_account_name
+        self.kwargs = kwargs
+
+    @classmethod
+    def from_json(cls, s):
+        data = json.loads(_from_bytes(s))
+        retval = SignedJwtAssertionCredentials(
+            data['service_account_name'],
+            base64.b64decode(data['private_key']),
+            data['scope'],
+            private_key_password=data['private_key_password'],
+            user_agent=data['user_agent'],
+            token_uri=data['token_uri'],
+            **data['kwargs']
+        )
+        retval.invalid = data['invalid']
+        retval.access_token = data['access_token']
+        return retval
+
+    def _generate_assertion(self):
+        """Generate the assertion that will be used in the request."""
+        now = int(time.time())
+        payload = {
+            'aud': self.token_uri,
+            'scope': self.scope,
+            'iat': now,
+            'exp': now + SignedJwtAssertionCredentials.MAX_TOKEN_LIFETIME_SECS,
+            'iss': self.service_account_name
+        }
+        payload.update(self.kwargs)
+        logger.debug(str(payload))
+
+        private_key = base64.b64decode(self.private_key)
+        return crypt.make_signed_jwt(crypt.Signer.from_string(
+            private_key, self.private_key_password), payload)
+
+# Only used in verify_id_token(), which is always calling to the same URI
+# for the certs.
+_cached_http = httplib2.Http(MemoryCache())
+
+
+@util.positional(2)
+def verify_id_token(id_token, audience, http=None,
+                    cert_uri=ID_TOKEN_VERIFICATION_CERTS):
+    """Verifies a signed JWT id_token.
+
+    This function requires PyOpenSSL and because of that it does not work on
+    App Engine.
+
+    Args:
+        id_token: string, A Signed JWT.
+        audience: string, The audience 'aud' that the token should be for.
+        http: httplib2.Http, instance to use to make the HTTP request. Callers
+              should supply an instance that has caching enabled.
+        cert_uri: string, URI of the certificates in JSON format to
+                  verify the JWT against.
+
+    Returns:
+        The deserialized JSON in the JWT.
+
+    Raises:
+        oauth2client.crypt.AppIdentityError: if the JWT fails to verify.
+        CryptoUnavailableError: if no crypto library is available.
+    """
+    _RequireCryptoOrDie()
+    if http is None:
+        http = _cached_http
+
+    resp, content = http.request(cert_uri)
+    if resp.status == 200:
+        certs = json.loads(_from_bytes(content))
+        return crypt.verify_signed_jwt_with_certs(id_token, certs, audience)
+    else:
+        raise VerifyJwtTokenError('Status code: %d' % resp.status)
+
+
+def _extract_id_token(id_token):
+    """Extract the JSON payload from a JWT.
+
+    Does the extraction w/o checking the signature.
+
+    Args:
+        id_token: string or bytestring, OAuth 2.0 id_token.
+
+    Returns:
+        object, The deserialized JSON payload.
+    """
+    if type(id_token) == bytes:
+        segments = id_token.split(b'.')
+    else:
+        segments = id_token.split(u'.')
+
+    if len(segments) != 3:
+        raise VerifyJwtTokenError(
+            'Wrong number of segments in token: %s' % id_token)
+
+    return json.loads(_from_bytes(_urlsafe_b64decode(segments[1])))
+
+
+def _parse_exchange_token_response(content):
+    """Parses response of an exchange token request.
+
+    Most providers return JSON but some (e.g. Facebook) return a
+    url-encoded string.
+
+    Args:
+        content: The body of a response
+
+    Returns:
+        Content as a dictionary object. Note that the dict could be empty,
+        i.e. {}. That basically indicates a failure.
+    """
+    resp = {}
+    content = _from_bytes(content)
+    try:
+        resp = json.loads(content)
+    except Exception:
+        # different JSON libs raise different exceptions,
+        # so we just do a catch-all here
+        resp = dict(urllib.parse.parse_qsl(content))
+
+    # some providers respond with 'expires', others with 'expires_in'
+    if resp and 'expires' in resp:
+        resp['expires_in'] = resp.pop('expires')
+
+    return resp
+
+
+@util.positional(4)
+def credentials_from_code(client_id, client_secret, scope, code,
+                          redirect_uri='postmessage', http=None,
+                          user_agent=None, token_uri=GOOGLE_TOKEN_URI,
+                          auth_uri=GOOGLE_AUTH_URI,
+                          revoke_uri=GOOGLE_REVOKE_URI,
+                          device_uri=GOOGLE_DEVICE_URI,
+                          token_info_uri=GOOGLE_TOKEN_INFO_URI):
+    """Exchanges an authorization code for an OAuth2Credentials object.
+
+    Args:
+        client_id: string, client identifier.
+        client_secret: string, client secret.
+        scope: string or iterable of strings, scope(s) to request.
+        code: string, An authorization code, most likely passed down from
+              the client
+        redirect_uri: string, this is generally set to 'postmessage' to match
+                      the redirect_uri that the client specified
+        http: httplib2.Http, optional http instance to use to do the fetch
+        token_uri: string, URI for token endpoint. For convenience defaults
+                   to Google's endpoints but any OAuth 2.0 provider can be
+                   used.
+        auth_uri: string, URI for authorization endpoint. For convenience
+                  defaults to Google's endpoints but any OAuth 2.0 provider
+                  can be used.
+        revoke_uri: string, URI for revoke endpoint. For convenience
+                    defaults to Google's endpoints but any OAuth 2.0 provider
+                    can be used.
+        device_uri: string, URI for device authorization endpoint. For
+                    convenience defaults to Google's endpoints but any OAuth
+                    2.0 provider can be used.
+
+    Returns:
+        An OAuth2Credentials object.
+
+    Raises:
+        FlowExchangeError if the authorization code cannot be exchanged for an
+        access token
+    """
+    flow = OAuth2WebServerFlow(client_id, client_secret, scope,
+                               redirect_uri=redirect_uri,
+                               user_agent=user_agent, auth_uri=auth_uri,
+                               token_uri=token_uri, revoke_uri=revoke_uri,
+                               device_uri=device_uri,
+                               token_info_uri=token_info_uri)
+
+    credentials = flow.step2_exchange(code, http=http)
+    return credentials
+
+
+@util.positional(3)
+def credentials_from_clientsecrets_and_code(filename, scope, code,
+                                            message=None,
+                                            redirect_uri='postmessage',
+                                            http=None,
+                                            cache=None,
+                                            device_uri=None):
+    """Returns OAuth2Credentials from a clientsecrets file and an auth code.
+
+    Will create the right kind of Flow based on the contents of the
+    clientsecrets file or will raise InvalidClientSecretsError for unknown
+    types of Flows.
+
+    Args:
+        filename: string, File name of clientsecrets.
+        scope: string or iterable of strings, scope(s) to request.
+        code: string, An authorization code, most likely passed down from
+              the client
+        message: string, A friendly string to display to the user if the
+                 clientsecrets file is missing or invalid. If message is
+                 provided then sys.exit will be called in the case of an error.
+                 If message in not provided then
+                 clientsecrets.InvalidClientSecretsError will be raised.
+        redirect_uri: string, this is generally set to 'postmessage' to match
+                      the redirect_uri that the client specified
+        http: httplib2.Http, optional http instance to use to do the fetch
+        cache: An optional cache service client that implements get() and set()
+               methods. See clientsecrets.loadfile() for details.
+        device_uri: string, OAuth 2.0 device authorization endpoint
+
+    Returns:
+        An OAuth2Credentials object.
+
+    Raises:
+        FlowExchangeError: if the authorization code cannot be exchanged for an
+                           access token
+        UnknownClientSecretsFlowError: if the file describes an unknown kind
+                                       of Flow.
+        clientsecrets.InvalidClientSecretsError: if the clientsecrets file is
+                                                 invalid.
+    """
+    flow = flow_from_clientsecrets(filename, scope, message=message,
+                                   cache=cache, redirect_uri=redirect_uri,
+                                   device_uri=device_uri)
+    credentials = flow.step2_exchange(code, http=http)
+    return credentials
+
+
+class DeviceFlowInfo(collections.namedtuple('DeviceFlowInfo', (
+        'device_code', 'user_code', 'interval', 'verification_url',
+        'user_code_expiry'))):
+    """Intermediate information the OAuth2 for devices flow."""
+
+    @classmethod
+    def FromResponse(cls, response):
+        """Create a DeviceFlowInfo from a server response.
+
+        The response should be a dict containing entries as described here:
+
+        http://tools.ietf.org/html/draft-ietf-oauth-v2-05#section-3.7.1
+        """
+        # device_code, user_code, and verification_url are required.
+        kwargs = {
+            'device_code': response['device_code'],
+            'user_code': response['user_code'],
+        }
+        # The response may list the verification address as either
+        # verification_url or verification_uri, so we check for both.
+        verification_url = response.get(
+            'verification_url', response.get('verification_uri'))
+        if verification_url is None:
+            raise OAuth2DeviceCodeError(
+                'No verification_url provided in server response')
+        kwargs['verification_url'] = verification_url
+        # expires_in and interval are optional.
+        kwargs.update({
+            'interval': response.get('interval'),
+            'user_code_expiry': None,
+        })
+        if 'expires_in' in response:
+            kwargs['user_code_expiry'] = (
+                datetime.datetime.now() +
+                datetime.timedelta(seconds=int(response['expires_in'])))
+        return cls(**kwargs)
+
+
+class OAuth2WebServerFlow(Flow):
+    """Does the Web Server Flow for OAuth 2.0.
+
+    OAuth2WebServerFlow objects may be safely pickled and unpickled.
+    """
+
+    @util.positional(4)
+    def __init__(self, client_id,
+                 client_secret=None,
+                 scope=None,
+                 redirect_uri=None,
+                 user_agent=None,
+                 auth_uri=GOOGLE_AUTH_URI,
+                 token_uri=GOOGLE_TOKEN_URI,
+                 revoke_uri=GOOGLE_REVOKE_URI,
+                 login_hint=None,
+                 device_uri=GOOGLE_DEVICE_URI,
+                 token_info_uri=GOOGLE_TOKEN_INFO_URI,
+                 authorization_header=None,
+                 **kwargs):
+        """Constructor for OAuth2WebServerFlow.
+
+        The kwargs argument is used to set extra query parameters on the
+        auth_uri. For example, the access_type and approval_prompt
+        query parameters can be set via kwargs.
+
+        Args:
+            client_id: string, client identifier.
+            client_secret: string client secret.
+            scope: string or iterable of strings, scope(s) of the credentials
+                   being requested.
+            redirect_uri: string, Either the string 'urn:ietf:wg:oauth:2.0:oob'
+                          for a non-web-based application, or a URI that
+                          handles the callback from the authorization server.
+            user_agent: string, HTTP User-Agent to provide for this
+                        application.
+            auth_uri: string, URI for authorization endpoint. For convenience
+                      defaults to Google's endpoints but any OAuth 2.0 provider
+                      can be used.
+            token_uri: string, URI for token endpoint. For convenience
+                       defaults to Google's endpoints but any OAuth 2.0
+                       provider can be used.
+            revoke_uri: string, URI for revoke endpoint. For convenience
+                        defaults to Google's endpoints but any OAuth 2.0
+                        provider can be used.
+            login_hint: string, Either an email address or domain. Passing this
+                        hint will either pre-fill the email box on the sign-in
+                        form or select the proper multi-login session, thereby
+                        simplifying the login flow.
+            device_uri: string, URI for device authorization endpoint. For
+                        convenience defaults to Google's endpoints but any
+                        OAuth 2.0 provider can be used.
+            authorization_header: string, For use with OAuth 2.0 providers that
+                                  require a client to authenticate using a
+                                  header value instead of passing client_secret
+                                  in the POST body.
+            **kwargs: dict, The keyword arguments are all optional and required
+                      parameters for the OAuth calls.
+        """
+        # scope is a required argument, but to preserve backwards-compatibility
+        # we don't want to rearrange the positional arguments
+        if scope is None:
+            raise TypeError("The value of scope must not be None")
+        self.client_id = client_id
+        self.client_secret = client_secret
+        self.scope = util.scopes_to_string(scope)
+        self.redirect_uri = redirect_uri
+        self.login_hint = login_hint
+        self.user_agent = user_agent
+        self.auth_uri = auth_uri
+        self.token_uri = token_uri
+        self.revoke_uri = revoke_uri
+        self.device_uri = device_uri
+        self.token_info_uri = token_info_uri
+        self.authorization_header = authorization_header
+        self.params = {
+            'access_type': 'offline',
+            'response_type': 'code',
+        }
+        self.params.update(kwargs)
+
+    @util.positional(1)
+    def step1_get_authorize_url(self, redirect_uri=None, state=None):
+        """Returns a URI to redirect to the provider.
+
+        Args:
+            redirect_uri: string, Either the string 'urn:ietf:wg:oauth:2.0:oob'
+                          for a non-web-based application, or a URI that
+                          handles the callback from the authorization server.
+                          This parameter is deprecated, please move to passing
+                          the redirect_uri in via the constructor.
+            state: string, Opaque state string which is passed through the
+                   OAuth2 flow and returned to the client as a query parameter
+                   in the callback.
+
+        Returns:
+            A URI as a string to redirect the user to begin the authorization
+            flow.
+        """
+        if redirect_uri is not None:
+            logger.warning((
+                'The redirect_uri parameter for '
+                'OAuth2WebServerFlow.step1_get_authorize_url is deprecated. '
+                'Please move to passing the redirect_uri in via the '
+                'constructor.'))
+            self.redirect_uri = redirect_uri
+
+        if self.redirect_uri is None:
+            raise ValueError('The value of redirect_uri must not be None.')
+
+        query_params = {
+            'client_id': self.client_id,
+            'redirect_uri': self.redirect_uri,
+            'scope': self.scope,
+        }
+        if state is not None:
+            query_params['state'] = state
+        if self.login_hint is not None:
+            query_params['login_hint'] = self.login_hint
+        query_params.update(self.params)
+        return _update_query_params(self.auth_uri, query_params)
+
+    @util.positional(1)
+    def step1_get_device_and_user_codes(self, http=None):
+        """Returns a user code and the verification URL where to enter it
+
+        Returns:
+            A user code as a string for the user to authorize the application
+            An URL as a string where the user has to enter the code
+        """
+        if self.device_uri is None:
+            raise ValueError('The value of device_uri must not be None.')
+
+        body = urllib.parse.urlencode({
+            'client_id': self.client_id,
+            'scope': self.scope,
+        })
+        headers = {
+            'content-type': 'application/x-www-form-urlencoded',
+        }
+
+        if self.user_agent is not None:
+            headers['user-agent'] = self.user_agent
+
+        if http is None:
+            http = httplib2.Http()
+
+        resp, content = http.request(self.device_uri, method='POST', body=body,
+                                     headers=headers)
+        content = _from_bytes(content)
+        if resp.status == 200:
+            try:
+                flow_info = json.loads(content)
+            except ValueError as e:
+                raise OAuth2DeviceCodeError(
+                    'Could not parse server response as JSON: "%s", '
+                    'error: "%s"' % (content, e))
+            return DeviceFlowInfo.FromResponse(flow_info)
+        else:
+            error_msg = 'Invalid response %s.' % resp.status
+            try:
+                d = json.loads(content)
+                if 'error' in d:
+                    error_msg += ' Error: %s' % d['error']
+            except ValueError:
+                # Couldn't decode a JSON response, stick with the
+                # default message.
+                pass
+            raise OAuth2DeviceCodeError(error_msg)
+
+    @util.positional(2)
+    def step2_exchange(self, code=None, http=None, device_flow_info=None):
+        """Exchanges a code for OAuth2Credentials.
+
+        Args:
+            code: string, a dict-like object, or None. For a non-device
+                  flow, this is either the response code as a string, or a
+                  dictionary of query parameters to the redirect_uri. For a
+                  device flow, this should be None.
+            http: httplib2.Http, optional http instance to use when fetching
+                  credentials.
+            device_flow_info: DeviceFlowInfo, return value from step1 in the
+                              case of a device flow.
+
+        Returns:
+            An OAuth2Credentials object that can be used to authorize requests.
+
+        Raises:
+            FlowExchangeError: if a problem occurred exchanging the code for a
+                               refresh_token.
+            ValueError: if code and device_flow_info are both provided or both
+                        missing.
+        """
+        if code is None and device_flow_info is None:
+            raise ValueError('No code or device_flow_info provided.')
+        if code is not None and device_flow_info is not None:
+            raise ValueError('Cannot provide both code and device_flow_info.')
+
+        if code is None:
+            code = device_flow_info.device_code
+        elif not isinstance(code, six.string_types):
+            if 'code' not in code:
+                raise FlowExchangeError(code.get(
+                    'error', 'No code was supplied in the query parameters.'))
+            code = code['code']
+
+        post_data = {
+            'client_id': self.client_id,
+            'code': code,
+            'scope': self.scope,
+        }
+        if self.client_secret is not None:
+            post_data['client_secret'] = self.client_secret
+        if device_flow_info is not None:
+            post_data['grant_type'] = 'http://oauth.net/grant_type/device/1.0'
+        else:
+            post_data['grant_type'] = 'authorization_code'
+            post_data['redirect_uri'] = self.redirect_uri
+        body = urllib.parse.urlencode(post_data)
+        headers = {
+            'content-type': 'application/x-www-form-urlencoded',
+        }
+        if self.authorization_header is not None:
+            headers['Authorization'] = self.authorization_header
+        if self.user_agent is not None:
+            headers['user-agent'] = self.user_agent
+
+        if http is None:
+            http = httplib2.Http()
+
+        resp, content = http.request(self.token_uri, method='POST', body=body,
+                                     headers=headers)
+        d = _parse_exchange_token_response(content)
+        if resp.status == 200 and 'access_token' in d:
+            access_token = d['access_token']
+            refresh_token = d.get('refresh_token', None)
+            if not refresh_token:
+                logger.info(
+                    'Received token response with no refresh_token. Consider '
+                    "reauthenticating with approval_prompt='force'.")
+            token_expiry = None
+            if 'expires_in' in d:
+                token_expiry = (
+                    datetime.datetime.utcnow() +
+                    datetime.timedelta(seconds=int(d['expires_in'])))
+
+            extracted_id_token = None
+            if 'id_token' in d:
+                extracted_id_token = _extract_id_token(d['id_token'])
+
+            logger.info('Successfully retrieved access token')
+            return OAuth2Credentials(
+                access_token, self.client_id, self.client_secret,
+                refresh_token, token_expiry, self.token_uri, self.user_agent,
+                revoke_uri=self.revoke_uri, id_token=extracted_id_token,
+                token_response=d, scopes=self.scope,
+                token_info_uri=self.token_info_uri)
+        else:
+            logger.info('Failed to retrieve access token: %s', content)
+            if 'error' in d:
+                # you never know what those providers got to say
+                error_msg = (str(d['error']) +
+                             str(d.get('error_description', '')))
+            else:
+                error_msg = 'Invalid response: %s.' % str(resp.status)
+            raise FlowExchangeError(error_msg)
+
+
+@util.positional(2)
+def flow_from_clientsecrets(filename, scope, redirect_uri=None,
+                            message=None, cache=None, login_hint=None,
+                            device_uri=None):
+    """Create a Flow from a clientsecrets file.
+
+    Will create the right kind of Flow based on the contents of the
+    clientsecrets file or will raise InvalidClientSecretsError for unknown
+    types of Flows.
+
+    Args:
+        filename: string, File name of client secrets.
+        scope: string or iterable of strings, scope(s) to request.
+        redirect_uri: string, Either the string 'urn:ietf:wg:oauth:2.0:oob' for
+                      a non-web-based application, or a URI that handles the
+                      callback from the authorization server.
+        message: string, A friendly string to display to the user if the
+                 clientsecrets file is missing or invalid. If message is
+                 provided then sys.exit will be called in the case of an error.
+                 If message in not provided then
+                 clientsecrets.InvalidClientSecretsError will be raised.
+        cache: An optional cache service client that implements get() and set()
+               methods. See clientsecrets.loadfile() for details.
+        login_hint: string, Either an email address or domain. Passing this
+                    hint will either pre-fill the email box on the sign-in form
+                    or select the proper multi-login session, thereby
+                    simplifying the login flow.
+        device_uri: string, URI for device authorization endpoint. For
+                    convenience defaults to Google's endpoints but any
+                    OAuth 2.0 provider can be used.
+
+    Returns:
+        A Flow object.
+
+    Raises:
+        UnknownClientSecretsFlowError: if the file describes an unknown kind of
+                                       Flow.
+        clientsecrets.InvalidClientSecretsError: if the clientsecrets file is
+                                                 invalid.
+    """
+    try:
+        client_type, client_info = clientsecrets.loadfile(filename,
+                                                          cache=cache)
+        if client_type in (clientsecrets.TYPE_WEB,
+                           clientsecrets.TYPE_INSTALLED):
+            constructor_kwargs = {
+                'redirect_uri': redirect_uri,
+                'auth_uri': client_info['auth_uri'],
+                'token_uri': client_info['token_uri'],
+                'login_hint': login_hint,
+            }
+            revoke_uri = client_info.get('revoke_uri')
+            if revoke_uri is not None:
+                constructor_kwargs['revoke_uri'] = revoke_uri
+            if device_uri is not None:
+                constructor_kwargs['device_uri'] = device_uri
+            return OAuth2WebServerFlow(
+                client_info['client_id'], client_info['client_secret'],
+                scope, **constructor_kwargs)
+
+    except clientsecrets.InvalidClientSecretsError:
+        if message:
+            sys.exit(message)
+        else:
+            raise
+    else:
+        raise UnknownClientSecretsFlowError(
+            'This OAuth 2.0 flow is unsupported: %r' % client_type)
diff --git a/utils/frozen_chromite/third_party/oauth2client/clientsecrets.py b/utils/frozen_chromite/third_party/oauth2client/clientsecrets.py
new file mode 100644
index 0000000..eba1fd9
--- /dev/null
+++ b/utils/frozen_chromite/third_party/oauth2client/clientsecrets.py
@@ -0,0 +1,173 @@
+# Copyright 2014 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utilities for reading OAuth 2.0 client secret files.
+
+A client_secrets.json file contains all the information needed to interact with
+an OAuth 2.0 protected service.
+"""
+
+import json
+import six
+
+
+__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+
+# Properties that make a client_secrets.json file valid.
+TYPE_WEB = 'web'
+TYPE_INSTALLED = 'installed'
+
+VALID_CLIENT = {
+    TYPE_WEB: {
+        'required': [
+            'client_id',
+            'client_secret',
+            'redirect_uris',
+            'auth_uri',
+            'token_uri',
+        ],
+        'string': [
+            'client_id',
+            'client_secret',
+        ],
+    },
+    TYPE_INSTALLED: {
+        'required': [
+            'client_id',
+            'client_secret',
+            'redirect_uris',
+            'auth_uri',
+            'token_uri',
+        ],
+        'string': [
+            'client_id',
+            'client_secret',
+        ],
+    },
+}
+
+
+class Error(Exception):
+    """Base error for this module."""
+
+
+class InvalidClientSecretsError(Error):
+    """Format of ClientSecrets file is invalid."""
+
+
+def _validate_clientsecrets(clientsecrets_dict):
+    """Validate parsed client secrets from a file.
+
+    Args:
+        clientsecrets_dict: dict, a dictionary holding the client secrets.
+
+    Returns:
+        tuple, a string of the client type and the information parsed
+        from the file.
+    """
+    _INVALID_FILE_FORMAT_MSG = (
+        'Invalid file format. See '
+        'https://developers.google.com/api-client-library/'
+        'python/guide/aaa_client_secrets')
+
+    if clientsecrets_dict is None:
+        raise InvalidClientSecretsError(_INVALID_FILE_FORMAT_MSG)
+    try:
+        (client_type, client_info), = clientsecrets_dict.items()
+    except (ValueError, AttributeError):
+        raise InvalidClientSecretsError(
+            _INVALID_FILE_FORMAT_MSG + ' '
+            'Expected a JSON object with a single property for a "web" or '
+            '"installed" application')
+
+    if client_type not in VALID_CLIENT:
+        raise InvalidClientSecretsError(
+            'Unknown client type: %s.' % (client_type,))
+
+    for prop_name in VALID_CLIENT[client_type]['required']:
+        if prop_name not in client_info:
+            raise InvalidClientSecretsError(
+                'Missing property "%s" in a client type of "%s".' %
+                (prop_name, client_type))
+    for prop_name in VALID_CLIENT[client_type]['string']:
+        if client_info[prop_name].startswith('[['):
+            raise InvalidClientSecretsError(
+                'Property "%s" is not configured.' % prop_name)
+    return client_type, client_info
+
+
+def load(fp):
+    obj = json.load(fp)
+    return _validate_clientsecrets(obj)
+
+
+def loads(s):
+    obj = json.loads(s)
+    return _validate_clientsecrets(obj)
+
+
+def _loadfile(filename):
+    try:
+        with open(filename, 'r') as fp:
+            obj = json.load(fp)
+    except IOError:
+        raise InvalidClientSecretsError('File not found: "%s"' % filename)
+    return _validate_clientsecrets(obj)
+
+
+def loadfile(filename, cache=None):
+    """Loading of client_secrets JSON file, optionally backed by a cache.
+
+    Typical cache storage would be App Engine memcache service,
+    but you can pass in any other cache client that implements
+    these methods:
+
+    * ``get(key, namespace=ns)``
+    * ``set(key, value, namespace=ns)``
+
+    Usage::
+
+        # without caching
+        client_type, client_info = loadfile('secrets.json')
+        # using App Engine memcache service
+        from google.appengine.api import memcache
+        client_type, client_info = loadfile('secrets.json', cache=memcache)
+
+    Args:
+        filename: string, Path to a client_secrets.json file on a filesystem.
+        cache: An optional cache service client that implements get() and set()
+        methods. If not specified, the file is always being loaded from
+                 a filesystem.
+
+    Raises:
+        InvalidClientSecretsError: In case of a validation error or some
+                                   I/O failure. Can happen only on cache miss.
+
+    Returns:
+        (client_type, client_info) tuple, as _loadfile() normally would.
+        JSON contents is validated only during first load. Cache hits are not
+        validated.
+    """
+    _SECRET_NAMESPACE = 'oauth2client:secrets#ns'
+
+    if not cache:
+        return _loadfile(filename)
+
+    obj = cache.get(filename, namespace=_SECRET_NAMESPACE)
+    if obj is None:
+        client_type, client_info = _loadfile(filename)
+        obj = {client_type: client_info}
+        cache.set(filename, obj, namespace=_SECRET_NAMESPACE)
+
+    return next(six.iteritems(obj))
diff --git a/utils/frozen_chromite/third_party/oauth2client/crypt.py b/utils/frozen_chromite/third_party/oauth2client/crypt.py
new file mode 100644
index 0000000..c450c5c
--- /dev/null
+++ b/utils/frozen_chromite/third_party/oauth2client/crypt.py
@@ -0,0 +1,243 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2014 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Crypto-related routines for oauth2client."""
+
+import json
+import logging
+import time
+
+from oauth2client._helpers import _from_bytes
+from oauth2client._helpers import _json_encode
+from oauth2client._helpers import _to_bytes
+from oauth2client._helpers import _urlsafe_b64decode
+from oauth2client._helpers import _urlsafe_b64encode
+
+
+CLOCK_SKEW_SECS = 300  # 5 minutes in seconds
+AUTH_TOKEN_LIFETIME_SECS = 300  # 5 minutes in seconds
+MAX_TOKEN_LIFETIME_SECS = 86400  # 1 day in seconds
+
+logger = logging.getLogger(__name__)
+
+
+class AppIdentityError(Exception):
+    """Error to indicate crypto failure."""
+
+
+def _bad_pkcs12_key_as_pem(*args, **kwargs):
+    raise NotImplementedError('pkcs12_key_as_pem requires OpenSSL.')
+
+
+try:
+    from oauth2client._openssl_crypt import OpenSSLVerifier
+    from oauth2client._openssl_crypt import OpenSSLSigner
+    from oauth2client._openssl_crypt import pkcs12_key_as_pem
+except ImportError:  # pragma: NO COVER
+    OpenSSLVerifier = None
+    OpenSSLSigner = None
+    pkcs12_key_as_pem = _bad_pkcs12_key_as_pem
+
+try:
+    from oauth2client._pycrypto_crypt import PyCryptoVerifier
+    from oauth2client._pycrypto_crypt import PyCryptoSigner
+except ImportError:  # pragma: NO COVER
+    PyCryptoVerifier = None
+    PyCryptoSigner = None
+
+
+if OpenSSLSigner:
+    Signer = OpenSSLSigner
+    Verifier = OpenSSLVerifier
+elif PyCryptoSigner:  # pragma: NO COVER
+    Signer = PyCryptoSigner
+    Verifier = PyCryptoVerifier
+else:  # pragma: NO COVER
+    raise ImportError('No encryption library found. Please install either '
+                      'PyOpenSSL, or PyCrypto 2.6 or later')
+
+
+def make_signed_jwt(signer, payload):
+    """Make a signed JWT.
+
+    See http://self-issued.info/docs/draft-jones-json-web-token.html.
+
+    Args:
+        signer: crypt.Signer, Cryptographic signer.
+        payload: dict, Dictionary of data to convert to JSON and then sign.
+
+    Returns:
+        string, The JWT for the payload.
+    """
+    header = {'typ': 'JWT', 'alg': 'RS256'}
+
+    segments = [
+      _urlsafe_b64encode(_json_encode(header)),
+      _urlsafe_b64encode(_json_encode(payload)),
+    ]
+    signing_input = b'.'.join(segments)
+
+    signature = signer.sign(signing_input)
+    segments.append(_urlsafe_b64encode(signature))
+
+    logger.debug(str(segments))
+
+    return b'.'.join(segments)
+
+
+def _verify_signature(message, signature, certs):
+    """Verifies signed content using a list of certificates.
+
+    Args:
+        message: string or bytes, The message to verify.
+        signature: string or bytes, The signature on the message.
+        certs: iterable, certificates in PEM format.
+
+    Raises:
+        AppIdentityError: If none of the certificates can verify the message
+                          against the signature.
+    """
+    for pem in certs:
+        verifier = Verifier.from_string(pem, is_x509_cert=True)
+        if verifier.verify(message, signature):
+            return
+
+    # If we have not returned, no certificate confirms the signature.
+    raise AppIdentityError('Invalid token signature')
+
+
+def _check_audience(payload_dict, audience):
+    """Checks audience field from a JWT payload.
+
+    Does nothing if the passed in ``audience`` is null.
+
+    Args:
+        payload_dict: dict, A dictionary containing a JWT payload.
+        audience: string or NoneType, an audience to check for in
+                  the JWT payload.
+
+    Raises:
+        AppIdentityError: If there is no ``'aud'`` field in the payload
+                          dictionary but there is an ``audience`` to check.
+        AppIdentityError: If the ``'aud'`` field in the payload dictionary
+                          does not match the ``audience``.
+    """
+    if audience is None:
+        return
+
+    audience_in_payload = payload_dict.get('aud')
+    if audience_in_payload is None:
+        raise AppIdentityError('No aud field in token: %s' %
+                               (payload_dict,))
+    if audience_in_payload != audience:
+        raise AppIdentityError('Wrong recipient, %s != %s: %s' %
+                               (audience_in_payload, audience, payload_dict))
+
+
+def _verify_time_range(payload_dict):
+    """Verifies the issued at and expiration from a JWT payload.
+
+    Makes sure the current time (in UTC) falls between the issued at and
+    expiration for the JWT (with some skew allowed for via
+    ``CLOCK_SKEW_SECS``).
+
+    Args:
+        payload_dict: dict, A dictionary containing a JWT payload.
+
+    Raises:
+        AppIdentityError: If there is no ``'iat'`` field in the payload
+                          dictionary.
+        AppIdentityError: If there is no ``'exp'`` field in the payload
+                          dictionary.
+        AppIdentityError: If the JWT expiration is too far in the future (i.e.
+                          if the expiration would imply a token lifetime
+                          longer than what is allowed.)
+        AppIdentityError: If the token appears to have been issued in the
+                          future (up to clock skew).
+        AppIdentityError: If the token appears to have expired in the past
+                          (up to clock skew).
+    """
+    # Get the current time to use throughout.
+    now = int(time.time())
+
+    # Make sure issued at and expiration are in the payload.
+    issued_at = payload_dict.get('iat')
+    if issued_at is None:
+        raise AppIdentityError('No iat field in token: %s' % (payload_dict,))
+    expiration = payload_dict.get('exp')
+    if expiration is None:
+        raise AppIdentityError('No exp field in token: %s' % (payload_dict,))
+
+    # Make sure the expiration gives an acceptable token lifetime.
+    if expiration >= now + MAX_TOKEN_LIFETIME_SECS:
+        raise AppIdentityError('exp field too far in future: %s' %
+                               (payload_dict,))
+
+    # Make sure (up to clock skew) that the token wasn't issued in the future.
+    earliest = issued_at - CLOCK_SKEW_SECS
+    if now < earliest:
+        raise AppIdentityError('Token used too early, %d < %d: %s' %
+                               (now, earliest, payload_dict))
+    # Make sure (up to clock skew) that the token isn't already expired.
+    latest = expiration + CLOCK_SKEW_SECS
+    if now > latest:
+        raise AppIdentityError('Token used too late, %d > %d: %s' %
+                               (now, latest, payload_dict))
+
+
+def verify_signed_jwt_with_certs(jwt, certs, audience=None):
+    """Verify a JWT against public certs.
+
+    See http://self-issued.info/docs/draft-jones-json-web-token.html.
+
+    Args:
+        jwt: string, A JWT.
+        certs: dict, Dictionary where values of public keys in PEM format.
+        audience: string, The audience, 'aud', that this JWT should contain. If
+                  None then the JWT's 'aud' parameter is not verified.
+
+    Returns:
+        dict, The deserialized JSON payload in the JWT.
+
+    Raises:
+        AppIdentityError: if any checks are failed.
+    """
+    jwt = _to_bytes(jwt)
+
+    if jwt.count(b'.') != 2:
+        raise AppIdentityError(
+            'Wrong number of segments in token: %s' % (jwt,))
+
+    header, payload, signature = jwt.split(b'.')
+    message_to_sign = header + b'.' + payload
+    signature = _urlsafe_b64decode(signature)
+
+    # Parse token.
+    payload_bytes = _urlsafe_b64decode(payload)
+    try:
+        payload_dict = json.loads(_from_bytes(payload_bytes))
+    except:
+        raise AppIdentityError('Can\'t parse token: %s' % (payload_bytes,))
+
+    # Verify that the signature matches the message.
+    _verify_signature(message_to_sign, signature, certs.values())
+
+    # Verify the issued at and created times in the payload.
+    _verify_time_range(payload_dict)
+
+    # Check audience.
+    _check_audience(payload_dict, audience)
+
+    return payload_dict
diff --git a/utils/frozen_chromite/third_party/oauth2client/file.py b/utils/frozen_chromite/third_party/oauth2client/file.py
new file mode 100644
index 0000000..d0dd174
--- /dev/null
+++ b/utils/frozen_chromite/third_party/oauth2client/file.py
@@ -0,0 +1,122 @@
+# Copyright 2014 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utilities for OAuth.
+
+Utilities for making it easier to work with OAuth 2.0
+credentials.
+"""
+
+import os
+import threading
+
+from oauth2client.client import Credentials
+from oauth2client.client import Storage as BaseStorage
+
+
+__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+
+
+class CredentialsFileSymbolicLinkError(Exception):
+    """Credentials files must not be symbolic links."""
+
+
+class Storage(BaseStorage):
+    """Store and retrieve a single credential to and from a file."""
+
+    def __init__(self, filename):
+        self._filename = filename
+        self._lock = threading.Lock()
+
+    def _validate_file(self):
+        if os.path.islink(self._filename):
+            raise CredentialsFileSymbolicLinkError(
+                'File: %s is a symbolic link.' % self._filename)
+
+    def acquire_lock(self):
+        """Acquires any lock necessary to access this Storage.
+
+        This lock is not reentrant.
+        """
+        self._lock.acquire()
+
+    def release_lock(self):
+        """Release the Storage lock.
+
+        Trying to release a lock that isn't held will result in a
+        RuntimeError.
+        """
+        self._lock.release()
+
+    def locked_get(self):
+        """Retrieve Credential from file.
+
+        Returns:
+            oauth2client.client.Credentials
+
+        Raises:
+            CredentialsFileSymbolicLinkError if the file is a symbolic link.
+        """
+        credentials = None
+        self._validate_file()
+        try:
+            f = open(self._filename, 'rb')
+            content = f.read()
+            f.close()
+        except IOError:
+            return credentials
+
+        try:
+            credentials = Credentials.new_from_json(content)
+            credentials.set_store(self)
+        except ValueError:
+            pass
+
+        return credentials
+
+    def _create_file_if_needed(self):
+        """Create an empty file if necessary.
+
+        This method will not initialize the file. Instead it implements a
+        simple version of "touch" to ensure the file has been created.
+        """
+        if not os.path.exists(self._filename):
+            old_umask = os.umask(0o177)
+            try:
+                open(self._filename, 'a+b').close()
+            finally:
+                os.umask(old_umask)
+
+    def locked_put(self, credentials):
+        """Write Credentials to file.
+
+        Args:
+            credentials: Credentials, the credentials to store.
+
+        Raises:
+            CredentialsFileSymbolicLinkError if the file is a symbolic link.
+        """
+        self._create_file_if_needed()
+        self._validate_file()
+        f = open(self._filename, 'w')
+        f.write(credentials.to_json())
+        f.close()
+
+    def locked_delete(self):
+        """Delete Credentials file.
+
+        Args:
+            credentials: Credentials, the credentials to store.
+        """
+        os.unlink(self._filename)
diff --git a/utils/frozen_chromite/third_party/oauth2client/gce.py b/utils/frozen_chromite/third_party/oauth2client/gce.py
new file mode 100644
index 0000000..77b08f1
--- /dev/null
+++ b/utils/frozen_chromite/third_party/oauth2client/gce.py
@@ -0,0 +1,111 @@
+# Copyright 2014 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utilities for Google Compute Engine
+
+Utilities for making it easier to use OAuth 2.0 on Google Compute Engine.
+"""
+
+import json
+import logging
+from six.moves import urllib
+
+from oauth2client._helpers import _from_bytes
+from oauth2client import util
+from oauth2client.client import HttpAccessTokenRefreshError
+from oauth2client.client import AssertionCredentials
+
+
+__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+
+logger = logging.getLogger(__name__)
+
+# URI Template for the endpoint that returns access_tokens.
+META = ('http://metadata.google.internal/0.1/meta-data/service-accounts/'
+        'default/acquire{?scope}')
+
+
+class AppAssertionCredentials(AssertionCredentials):
+    """Credentials object for Compute Engine Assertion Grants
+
+    This object will allow a Compute Engine instance to identify itself to
+    Google and other OAuth 2.0 servers that can verify assertions. It can be
+    used for the purpose of accessing data stored under an account assigned to
+    the Compute Engine instance itself.
+
+    This credential does not require a flow to instantiate because it
+    represents a two legged flow, and therefore has all of the required
+    information to generate and refresh its own access tokens.
+    """
+
+    @util.positional(2)
+    def __init__(self, scope, **kwargs):
+        """Constructor for AppAssertionCredentials
+
+        Args:
+            scope: string or iterable of strings, scope(s) of the credentials
+                   being requested.
+        """
+        self.scope = util.scopes_to_string(scope)
+        self.kwargs = kwargs
+
+        # Assertion type is no longer used, but still in the
+        # parent class signature.
+        super(AppAssertionCredentials, self).__init__(None)
+
+    @classmethod
+    def from_json(cls, json_data):
+        data = json.loads(_from_bytes(json_data))
+        return AppAssertionCredentials(data['scope'])
+
+    def _refresh(self, http_request):
+        """Refreshes the access_token.
+
+        Skip all the storage hoops and just refresh using the API.
+
+        Args:
+            http_request: callable, a callable that matches the method
+                          signature of httplib2.Http.request, used to make
+                          the refresh request.
+
+        Raises:
+            HttpAccessTokenRefreshError: When the refresh fails.
+        """
+        query = '?scope=%s' % urllib.parse.quote(self.scope, '')
+        uri = META.replace('{?scope}', query)
+        response, content = http_request(uri)
+        content = _from_bytes(content)
+        if response.status == 200:
+            try:
+                d = json.loads(content)
+            except Exception as e:
+                raise HttpAccessTokenRefreshError(str(e),
+                                                  status=response.status)
+            self.access_token = d['accessToken']
+        else:
+            if response.status == 404:
+                content += (' This can occur if a VM was created'
+                            ' with no service account or scopes.')
+            raise HttpAccessTokenRefreshError(content, status=response.status)
+
+    @property
+    def serialization_data(self):
+        raise NotImplementedError(
+            'Cannot serialize credentials for GCE service accounts.')
+
+    def create_scoped_required(self):
+        return not self.scope
+
+    def create_scoped(self, scopes):
+        return AppAssertionCredentials(scopes, **self.kwargs)
diff --git a/utils/frozen_chromite/third_party/oauth2client/keyring_storage.py b/utils/frozen_chromite/third_party/oauth2client/keyring_storage.py
new file mode 100644
index 0000000..0a4c285
--- /dev/null
+++ b/utils/frozen_chromite/third_party/oauth2client/keyring_storage.py
@@ -0,0 +1,114 @@
+# Copyright 2014 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A keyring based Storage.
+
+A Storage for Credentials that uses the keyring module.
+"""
+
+import threading
+
+import keyring
+
+from oauth2client.client import Credentials
+from oauth2client.client import Storage as BaseStorage
+
+
+__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+
+
+class Storage(BaseStorage):
+    """Store and retrieve a single credential to and from the keyring.
+
+    To use this module you must have the keyring module installed. See
+    <http://pypi.python.org/pypi/keyring/>. This is an optional module and is
+    not installed with oauth2client by default because it does not work on all
+    the platforms that oauth2client supports, such as Google App Engine.
+
+    The keyring module <http://pypi.python.org/pypi/keyring/> is a
+    cross-platform library for access the keyring capabilities of the local
+    system. The user will be prompted for their keyring password when this
+    module is used, and the manner in which the user is prompted will vary per
+    platform.
+
+    Usage::
+
+        from oauth2client.keyring_storage import Storage
+
+        s = Storage('name_of_application', 'user1')
+        credentials = s.get()
+
+    """
+
+    def __init__(self, service_name, user_name):
+        """Constructor.
+
+        Args:
+            service_name: string, The name of the service under which the
+                          credentials are stored.
+            user_name: string, The name of the user to store credentials for.
+        """
+        self._service_name = service_name
+        self._user_name = user_name
+        self._lock = threading.Lock()
+
+    def acquire_lock(self):
+        """Acquires any lock necessary to access this Storage.
+
+        This lock is not reentrant.
+        """
+        self._lock.acquire()
+
+    def release_lock(self):
+        """Release the Storage lock.
+
+        Trying to release a lock that isn't held will result in a
+        RuntimeError.
+        """
+        self._lock.release()
+
+    def locked_get(self):
+        """Retrieve Credential from file.
+
+        Returns:
+            oauth2client.client.Credentials
+        """
+        credentials = None
+        content = keyring.get_password(self._service_name, self._user_name)
+
+        if content is not None:
+            try:
+                credentials = Credentials.new_from_json(content)
+                credentials.set_store(self)
+            except ValueError:
+                pass
+
+        return credentials
+
+    def locked_put(self, credentials):
+        """Write Credentials to file.
+
+        Args:
+            credentials: Credentials, the credentials to store.
+        """
+        keyring.set_password(self._service_name, self._user_name,
+                             credentials.to_json())
+
+    def locked_delete(self):
+        """Delete Credentials file.
+
+        Args:
+            credentials: Credentials, the credentials to store.
+        """
+        keyring.set_password(self._service_name, self._user_name, '')
diff --git a/utils/frozen_chromite/third_party/oauth2client/locked_file.py b/utils/frozen_chromite/third_party/oauth2client/locked_file.py
new file mode 100644
index 0000000..1028a7e
--- /dev/null
+++ b/utils/frozen_chromite/third_party/oauth2client/locked_file.py
@@ -0,0 +1,387 @@
+# Copyright 2014 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Locked file interface that should work on Unix and Windows pythons.
+
+This module first tries to use fcntl locking to ensure serialized access
+to a file, then falls back on a lock file if that is unavialable.
+
+Usage::
+
+    f = LockedFile('filename', 'r+b', 'rb')
+    f.open_and_lock()
+    if f.is_locked():
+      print('Acquired filename with r+b mode')
+      f.file_handle().write('locked data')
+    else:
+      print('Acquired filename with rb mode')
+    f.unlock_and_close()
+
+"""
+
+from __future__ import print_function
+
+import errno
+import logging
+import os
+import time
+
+from oauth2client import util
+
+
+__author__ = 'cache@google.com (David T McWherter)'
+
+logger = logging.getLogger(__name__)
+
+
+class CredentialsFileSymbolicLinkError(Exception):
+    """Credentials files must not be symbolic links."""
+
+
+class AlreadyLockedException(Exception):
+    """Trying to lock a file that has already been locked by the LockedFile."""
+    pass
+
+
+def validate_file(filename):
+    if os.path.islink(filename):
+        raise CredentialsFileSymbolicLinkError(
+            'File: %s is a symbolic link.' % filename)
+
+
+class _Opener(object):
+    """Base class for different locking primitives."""
+
+    def __init__(self, filename, mode, fallback_mode):
+        """Create an Opener.
+
+        Args:
+            filename: string, The pathname of the file.
+            mode: string, The preferred mode to access the file with.
+            fallback_mode: string, The mode to use if locking fails.
+        """
+        self._locked = False
+        self._filename = filename
+        self._mode = mode
+        self._fallback_mode = fallback_mode
+        self._fh = None
+        self._lock_fd = None
+
+    def is_locked(self):
+        """Was the file locked."""
+        return self._locked
+
+    def file_handle(self):
+        """The file handle to the file. Valid only after opened."""
+        return self._fh
+
+    def filename(self):
+        """The filename that is being locked."""
+        return self._filename
+
+    def open_and_lock(self, timeout, delay):
+        """Open the file and lock it.
+
+        Args:
+            timeout: float, How long to try to lock for.
+            delay: float, How long to wait between retries.
+        """
+        pass
+
+    def unlock_and_close(self):
+        """Unlock and close the file."""
+        pass
+
+
+class _PosixOpener(_Opener):
+    """Lock files using Posix advisory lock files."""
+
+    def open_and_lock(self, timeout, delay):
+        """Open the file and lock it.
+
+        Tries to create a .lock file next to the file we're trying to open.
+
+        Args:
+            timeout: float, How long to try to lock for.
+            delay: float, How long to wait between retries.
+
+        Raises:
+            AlreadyLockedException: if the lock is already acquired.
+            IOError: if the open fails.
+            CredentialsFileSymbolicLinkError if the file is a symbolic link.
+        """
+        if self._locked:
+            raise AlreadyLockedException('File %s is already locked' %
+                                         self._filename)
+        self._locked = False
+
+        validate_file(self._filename)
+        try:
+            self._fh = open(self._filename, self._mode)
+        except IOError as e:
+            # If we can't access with _mode, try _fallback_mode and don't lock.
+            if e.errno == errno.EACCES:
+                self._fh = open(self._filename, self._fallback_mode)
+                return
+
+        lock_filename = self._posix_lockfile(self._filename)
+        start_time = time.time()
+        while True:
+            try:
+                self._lock_fd = os.open(lock_filename,
+                                        os.O_CREAT | os.O_EXCL | os.O_RDWR)
+                self._locked = True
+                break
+
+            except OSError as e:
+                if e.errno != errno.EEXIST:
+                    raise
+                if (time.time() - start_time) >= timeout:
+                    logger.warn('Could not acquire lock %s in %s seconds',
+                                lock_filename, timeout)
+                    # Close the file and open in fallback_mode.
+                    if self._fh:
+                        self._fh.close()
+                    self._fh = open(self._filename, self._fallback_mode)
+                    return
+                time.sleep(delay)
+
+    def unlock_and_close(self):
+        """Unlock a file by removing the .lock file, and close the handle."""
+        if self._locked:
+            lock_filename = self._posix_lockfile(self._filename)
+            os.close(self._lock_fd)
+            os.unlink(lock_filename)
+            self._locked = False
+            self._lock_fd = None
+        if self._fh:
+            self._fh.close()
+
+    def _posix_lockfile(self, filename):
+        """The name of the lock file to use for posix locking."""
+        return '%s.lock' % filename
+
+
+try:
+    import fcntl
+
+    class _FcntlOpener(_Opener):
+        """Open, lock, and unlock a file using fcntl.lockf."""
+
+        def open_and_lock(self, timeout, delay):
+            """Open the file and lock it.
+
+            Args:
+                timeout: float, How long to try to lock for.
+                delay: float, How long to wait between retries
+
+            Raises:
+                AlreadyLockedException: if the lock is already acquired.
+                IOError: if the open fails.
+                CredentialsFileSymbolicLinkError: if the file is a symbolic
+                                                  link.
+            """
+            if self._locked:
+                raise AlreadyLockedException('File %s is already locked' %
+                                             self._filename)
+            start_time = time.time()
+
+            validate_file(self._filename)
+            try:
+                self._fh = open(self._filename, self._mode)
+            except IOError as e:
+                # If we can't access with _mode, try _fallback_mode and
+                # don't lock.
+                if e.errno in (errno.EPERM, errno.EACCES):
+                    self._fh = open(self._filename, self._fallback_mode)
+                    return
+
+            # We opened in _mode, try to lock the file.
+            while True:
+                try:
+                    fcntl.lockf(self._fh.fileno(), fcntl.LOCK_EX)
+                    self._locked = True
+                    return
+                except IOError as e:
+                    # If not retrying, then just pass on the error.
+                    if timeout == 0:
+                        raise
+                    if e.errno != errno.EACCES:
+                        raise
+                    # We could not acquire the lock. Try again.
+                    if (time.time() - start_time) >= timeout:
+                        logger.warn('Could not lock %s in %s seconds',
+                                    self._filename, timeout)
+                        if self._fh:
+                            self._fh.close()
+                        self._fh = open(self._filename, self._fallback_mode)
+                        return
+                    time.sleep(delay)
+
+        def unlock_and_close(self):
+            """Close and unlock the file using the fcntl.lockf primitive."""
+            if self._locked:
+                fcntl.lockf(self._fh.fileno(), fcntl.LOCK_UN)
+            self._locked = False
+            if self._fh:
+                self._fh.close()
+except ImportError:
+    _FcntlOpener = None
+
+
+try:
+    import pywintypes
+    import win32con
+    import win32file
+
+    class _Win32Opener(_Opener):
+        """Open, lock, and unlock a file using windows primitives."""
+
+        # Error #33:
+        #  'The process cannot access the file because another process'
+        FILE_IN_USE_ERROR = 33
+
+        # Error #158:
+        #  'The segment is already unlocked.'
+        FILE_ALREADY_UNLOCKED_ERROR = 158
+
+        def open_and_lock(self, timeout, delay):
+            """Open the file and lock it.
+
+            Args:
+                timeout: float, How long to try to lock for.
+                delay: float, How long to wait between retries
+
+            Raises:
+                AlreadyLockedException: if the lock is already acquired.
+                IOError: if the open fails.
+                CredentialsFileSymbolicLinkError: if the file is a symbolic
+                                                  link.
+            """
+            if self._locked:
+                raise AlreadyLockedException('File %s is already locked' %
+                                             self._filename)
+            start_time = time.time()
+
+            validate_file(self._filename)
+            try:
+                self._fh = open(self._filename, self._mode)
+            except IOError as e:
+                # If we can't access with _mode, try _fallback_mode
+                # and don't lock.
+                if e.errno == errno.EACCES:
+                    self._fh = open(self._filename, self._fallback_mode)
+                    return
+
+            # We opened in _mode, try to lock the file.
+            while True:
+                try:
+                    hfile = win32file._get_osfhandle(self._fh.fileno())
+                    win32file.LockFileEx(
+                        hfile,
+                        (win32con.LOCKFILE_FAIL_IMMEDIATELY |
+                         win32con.LOCKFILE_EXCLUSIVE_LOCK), 0, -0x10000,
+                        pywintypes.OVERLAPPED())
+                    self._locked = True
+                    return
+                except pywintypes.error as e:
+                    if timeout == 0:
+                        raise
+
+                    # If the error is not that the file is already
+                    # in use, raise.
+                    if e[0] != _Win32Opener.FILE_IN_USE_ERROR:
+                        raise
+
+                    # We could not acquire the lock. Try again.
+                    if (time.time() - start_time) >= timeout:
+                        logger.warn('Could not lock %s in %s seconds' % (
+                            self._filename, timeout))
+                        if self._fh:
+                            self._fh.close()
+                        self._fh = open(self._filename, self._fallback_mode)
+                        return
+                    time.sleep(delay)
+
+        def unlock_and_close(self):
+            """Close and unlock the file using the win32 primitive."""
+            if self._locked:
+                try:
+                    hfile = win32file._get_osfhandle(self._fh.fileno())
+                    win32file.UnlockFileEx(hfile, 0, -0x10000,
+                                           pywintypes.OVERLAPPED())
+                except pywintypes.error as e:
+                    if e[0] != _Win32Opener.FILE_ALREADY_UNLOCKED_ERROR:
+                        raise
+            self._locked = False
+            if self._fh:
+                self._fh.close()
+except ImportError:
+    _Win32Opener = None
+
+
+class LockedFile(object):
+    """Represent a file that has exclusive access."""
+
+    @util.positional(4)
+    def __init__(self, filename, mode, fallback_mode, use_native_locking=True):
+        """Construct a LockedFile.
+
+        Args:
+            filename: string, The path of the file to open.
+            mode: string, The mode to try to open the file with.
+            fallback_mode: string, The mode to use if locking fails.
+            use_native_locking: bool, Whether or not fcntl/win32 locking is
+                                used.
+        """
+        opener = None
+        if not opener and use_native_locking:
+            if _Win32Opener:
+                opener = _Win32Opener(filename, mode, fallback_mode)
+            if _FcntlOpener:
+                opener = _FcntlOpener(filename, mode, fallback_mode)
+
+        if not opener:
+            opener = _PosixOpener(filename, mode, fallback_mode)
+
+        self._opener = opener
+
+    def filename(self):
+        """Return the filename we were constructed with."""
+        return self._opener._filename
+
+    def file_handle(self):
+        """Return the file_handle to the opened file."""
+        return self._opener.file_handle()
+
+    def is_locked(self):
+        """Return whether we successfully locked the file."""
+        return self._opener.is_locked()
+
+    def open_and_lock(self, timeout=0, delay=0.05):
+        """Open the file, trying to lock it.
+
+        Args:
+            timeout: float, The number of seconds to try to acquire the lock.
+            delay: float, The number of seconds to wait between retry attempts.
+
+        Raises:
+            AlreadyLockedException: if the lock is already acquired.
+            IOError: if the open fails.
+        """
+        self._opener.open_and_lock(timeout, delay)
+
+    def unlock_and_close(self):
+        """Unlock and close a file."""
+        self._opener.unlock_and_close()
diff --git a/utils/frozen_chromite/third_party/oauth2client/multistore_file.py b/utils/frozen_chromite/third_party/oauth2client/multistore_file.py
new file mode 100644
index 0000000..5a12797
--- /dev/null
+++ b/utils/frozen_chromite/third_party/oauth2client/multistore_file.py
@@ -0,0 +1,484 @@
+# Copyright 2014 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Multi-credential file store with lock support.
+
+This module implements a JSON credential store where multiple
+credentials can be stored in one file. That file supports locking
+both in a single process and across processes.
+
+The credential themselves are keyed off of:
+
+* client_id
+* user_agent
+* scope
+
+The format of the stored data is like so::
+
+    {
+      'file_version': 1,
+      'data': [
+          {
+              'key': {
+                  'clientId': '<client id>',
+                  'userAgent': '<user agent>',
+                  'scope': '<scope>'
+              },
+              'credential': {
+                  # JSON serialized Credentials.
+              }
+          }
+      ]
+    }
+
+"""
+
+import errno
+import json
+import logging
+import os
+import threading
+
+from oauth2client.client import Credentials
+from oauth2client.client import Storage as BaseStorage
+from oauth2client import util
+from oauth2client.locked_file import LockedFile
+
+
+__author__ = 'jbeda@google.com (Joe Beda)'
+
+logger = logging.getLogger(__name__)
+
+# A dict from 'filename'->_MultiStore instances
+_multistores = {}
+_multistores_lock = threading.Lock()
+
+
+class Error(Exception):
+    """Base error for this module."""
+
+
+class NewerCredentialStoreError(Error):
+    """The credential store is a newer version than supported."""
+
+
+@util.positional(4)
+def get_credential_storage(filename, client_id, user_agent, scope,
+                           warn_on_readonly=True):
+    """Get a Storage instance for a credential.
+
+    Args:
+        filename: The JSON file storing a set of credentials
+        client_id: The client_id for the credential
+        user_agent: The user agent for the credential
+        scope: string or iterable of strings, Scope(s) being requested
+        warn_on_readonly: if True, log a warning if the store is readonly
+
+    Returns:
+        An object derived from client.Storage for getting/setting the
+        credential.
+    """
+    # Recreate the legacy key with these specific parameters
+    key = {'clientId': client_id, 'userAgent': user_agent,
+           'scope': util.scopes_to_string(scope)}
+    return get_credential_storage_custom_key(
+      filename, key, warn_on_readonly=warn_on_readonly)
+
+
+@util.positional(2)
+def get_credential_storage_custom_string_key(filename, key_string,
+                                             warn_on_readonly=True):
+    """Get a Storage instance for a credential using a single string as a key.
+
+    Allows you to provide a string as a custom key that will be used for
+    credential storage and retrieval.
+
+    Args:
+        filename: The JSON file storing a set of credentials
+        key_string: A string to use as the key for storing this credential.
+        warn_on_readonly: if True, log a warning if the store is readonly
+
+    Returns:
+        An object derived from client.Storage for getting/setting the
+        credential.
+    """
+    # Create a key dictionary that can be used
+    key_dict = {'key': key_string}
+    return get_credential_storage_custom_key(
+      filename, key_dict, warn_on_readonly=warn_on_readonly)
+
+
+@util.positional(2)
+def get_credential_storage_custom_key(filename, key_dict,
+                                      warn_on_readonly=True):
+    """Get a Storage instance for a credential using a dictionary as a key.
+
+    Allows you to provide a dictionary as a custom key that will be used for
+    credential storage and retrieval.
+
+    Args:
+        filename: The JSON file storing a set of credentials
+        key_dict: A dictionary to use as the key for storing this credential.
+                  There is no ordering of the keys in the dictionary. Logically
+                  equivalent dictionaries will produce equivalent storage keys.
+        warn_on_readonly: if True, log a warning if the store is readonly
+
+    Returns:
+        An object derived from client.Storage for getting/setting the
+        credential.
+    """
+    multistore = _get_multistore(filename, warn_on_readonly=warn_on_readonly)
+    key = util.dict_to_tuple_key(key_dict)
+    return multistore._get_storage(key)
+
+
+@util.positional(1)
+def get_all_credential_keys(filename, warn_on_readonly=True):
+    """Gets all the registered credential keys in the given Multistore.
+
+    Args:
+        filename: The JSON file storing a set of credentials
+        warn_on_readonly: if True, log a warning if the store is readonly
+
+    Returns:
+        A list of the credential keys present in the file.  They are returned
+        as dictionaries that can be passed into
+        get_credential_storage_custom_key to get the actual credentials.
+    """
+    multistore = _get_multistore(filename, warn_on_readonly=warn_on_readonly)
+    multistore._lock()
+    try:
+        return multistore._get_all_credential_keys()
+    finally:
+        multistore._unlock()
+
+
+@util.positional(1)
+def _get_multistore(filename, warn_on_readonly=True):
+    """A helper method to initialize the multistore with proper locking.
+
+    Args:
+        filename: The JSON file storing a set of credentials
+        warn_on_readonly: if True, log a warning if the store is readonly
+
+    Returns:
+        A multistore object
+    """
+    filename = os.path.expanduser(filename)
+    _multistores_lock.acquire()
+    try:
+        multistore = _multistores.setdefault(
+            filename, _MultiStore(filename, warn_on_readonly=warn_on_readonly))
+    finally:
+        _multistores_lock.release()
+    return multistore
+
+
+class _MultiStore(object):
+    """A file backed store for multiple credentials."""
+
+    @util.positional(2)
+    def __init__(self, filename, warn_on_readonly=True):
+        """Initialize the class.
+
+        This will create the file if necessary.
+        """
+        self._file = LockedFile(filename, 'r+', 'r')
+        self._thread_lock = threading.Lock()
+        self._read_only = False
+        self._warn_on_readonly = warn_on_readonly
+
+        self._create_file_if_needed()
+
+        # Cache of deserialized store. This is only valid after the
+        # _MultiStore is locked or _refresh_data_cache is called. This is
+        # of the form of:
+        #
+        # ((key, value), (key, value)...) -> OAuth2Credential
+        #
+        # If this is None, then the store hasn't been read yet.
+        self._data = None
+
+    class _Storage(BaseStorage):
+        """A Storage object that can read/write a single credential."""
+
+        def __init__(self, multistore, key):
+            self._multistore = multistore
+            self._key = key
+
+        def acquire_lock(self):
+            """Acquires any lock necessary to access this Storage.
+
+            This lock is not reentrant.
+            """
+            self._multistore._lock()
+
+        def release_lock(self):
+            """Release the Storage lock.
+
+            Trying to release a lock that isn't held will result in a
+            RuntimeError.
+            """
+            self._multistore._unlock()
+
+        def locked_get(self):
+            """Retrieve credential.
+
+            The Storage lock must be held when this is called.
+
+            Returns:
+                oauth2client.client.Credentials
+            """
+            credential = self._multistore._get_credential(self._key)
+            if credential:
+                credential.set_store(self)
+            return credential
+
+        def locked_put(self, credentials):
+            """Write a credential.
+
+            The Storage lock must be held when this is called.
+
+            Args:
+                credentials: Credentials, the credentials to store.
+            """
+            self._multistore._update_credential(self._key, credentials)
+
+        def locked_delete(self):
+            """Delete a credential.
+
+            The Storage lock must be held when this is called.
+
+            Args:
+                credentials: Credentials, the credentials to store.
+            """
+            self._multistore._delete_credential(self._key)
+
+    def _create_file_if_needed(self):
+        """Create an empty file if necessary.
+
+        This method will not initialize the file. Instead it implements a
+        simple version of "touch" to ensure the file has been created.
+        """
+        if not os.path.exists(self._file.filename()):
+            old_umask = os.umask(0o177)
+            try:
+                open(self._file.filename(), 'a+b').close()
+            finally:
+                os.umask(old_umask)
+
+    def _lock(self):
+        """Lock the entire multistore."""
+        self._thread_lock.acquire()
+        try:
+            self._file.open_and_lock()
+        except IOError as e:
+            if e.errno == errno.ENOSYS:
+                logger.warn('File system does not support locking the '
+                            'credentials file.')
+            elif e.errno == errno.ENOLCK:
+                logger.warn('File system is out of resources for writing the '
+                            'credentials file (is your disk full?).')
+            elif e.errno == errno.EDEADLK:
+                logger.warn('Lock contention on multistore file, opening '
+                            'in read-only mode.')
+            else:
+                raise
+        if not self._file.is_locked():
+            self._read_only = True
+            if self._warn_on_readonly:
+                logger.warn('The credentials file (%s) is not writable. '
+                            'Opening in read-only mode. Any refreshed '
+                            'credentials will only be '
+                            'valid for this run.', self._file.filename())
+        if os.path.getsize(self._file.filename()) == 0:
+            logger.debug('Initializing empty multistore file')
+            # The multistore is empty so write out an empty file.
+            self._data = {}
+            self._write()
+        elif not self._read_only or self._data is None:
+            # Only refresh the data if we are read/write or we haven't
+            # cached the data yet. If we are readonly, we assume is isn't
+            # changing out from under us and that we only have to read it
+            # once. This prevents us from whacking any new access keys that
+            # we have cached in memory but were unable to write out.
+            self._refresh_data_cache()
+
+    def _unlock(self):
+        """Release the lock on the multistore."""
+        self._file.unlock_and_close()
+        self._thread_lock.release()
+
+    def _locked_json_read(self):
+        """Get the raw content of the multistore file.
+
+        The multistore must be locked when this is called.
+
+        Returns:
+            The contents of the multistore decoded as JSON.
+        """
+        assert self._thread_lock.locked()
+        self._file.file_handle().seek(0)
+        return json.load(self._file.file_handle())
+
+    def _locked_json_write(self, data):
+        """Write a JSON serializable data structure to the multistore.
+
+        The multistore must be locked when this is called.
+
+        Args:
+            data: The data to be serialized and written.
+        """
+        assert self._thread_lock.locked()
+        if self._read_only:
+            return
+        self._file.file_handle().seek(0)
+        json.dump(data, self._file.file_handle(),
+                  sort_keys=True, indent=2, separators=(',', ': '))
+        self._file.file_handle().truncate()
+
+    def _refresh_data_cache(self):
+        """Refresh the contents of the multistore.
+
+        The multistore must be locked when this is called.
+
+        Raises:
+            NewerCredentialStoreError: Raised when a newer client has written
+            the store.
+        """
+        self._data = {}
+        try:
+            raw_data = self._locked_json_read()
+        except Exception:
+            logger.warn('Credential data store could not be loaded. '
+                        'Will ignore and overwrite.')
+            return
+
+        version = 0
+        try:
+            version = raw_data['file_version']
+        except Exception:
+            logger.warn('Missing version for credential data store. It may be '
+                        'corrupt or an old version. Overwriting.')
+        if version > 1:
+            raise NewerCredentialStoreError(
+                'Credential file has file_version of %d. '
+                'Only file_version of 1 is supported.' % version)
+
+        credentials = []
+        try:
+            credentials = raw_data['data']
+        except (TypeError, KeyError):
+            pass
+
+        for cred_entry in credentials:
+            try:
+                key, credential = self._decode_credential_from_json(cred_entry)
+                self._data[key] = credential
+            except:
+                # If something goes wrong loading a credential, just ignore it
+                logger.info('Error decoding credential, skipping',
+                            exc_info=True)
+
+    def _decode_credential_from_json(self, cred_entry):
+        """Load a credential from our JSON serialization.
+
+        Args:
+            cred_entry: A dict entry from the data member of our format
+
+        Returns:
+            (key, cred) where the key is the key tuple and the cred is the
+            OAuth2Credential object.
+        """
+        raw_key = cred_entry['key']
+        key = util.dict_to_tuple_key(raw_key)
+        credential = None
+        credential = Credentials.new_from_json(
+            json.dumps(cred_entry['credential']))
+        return (key, credential)
+
+    def _write(self):
+        """Write the cached data back out.
+
+        The multistore must be locked.
+        """
+        raw_data = {'file_version': 1}
+        raw_creds = []
+        raw_data['data'] = raw_creds
+        for (cred_key, cred) in self._data.items():
+            raw_key = dict(cred_key)
+            raw_cred = json.loads(cred.to_json())
+            raw_creds.append({'key': raw_key, 'credential': raw_cred})
+        self._locked_json_write(raw_data)
+
+    def _get_all_credential_keys(self):
+        """Gets all the registered credential keys in the multistore.
+
+        Returns:
+            A list of dictionaries corresponding to all the keys currently
+            registered
+        """
+        return [dict(key) for key in self._data.keys()]
+
+    def _get_credential(self, key):
+        """Get a credential from the multistore.
+
+        The multistore must be locked.
+
+        Args:
+            key: The key used to retrieve the credential
+
+        Returns:
+            The credential specified or None if not present
+        """
+        return self._data.get(key, None)
+
+    def _update_credential(self, key, cred):
+        """Update a credential and write the multistore.
+
+        This must be called when the multistore is locked.
+
+        Args:
+            key: The key used to retrieve the credential
+            cred: The OAuth2Credential to update/set
+        """
+        self._data[key] = cred
+        self._write()
+
+    def _delete_credential(self, key):
+        """Delete a credential and write the multistore.
+
+        This must be called when the multistore is locked.
+
+        Args:
+            key: The key used to retrieve the credential
+        """
+        try:
+            del self._data[key]
+        except KeyError:
+            pass
+        self._write()
+
+    def _get_storage(self, key):
+        """Get a Storage object to get/set a credential.
+
+        This Storage is a 'view' into the multistore.
+
+        Args:
+            key: The key used to retrieve the credential
+
+        Returns:
+            A Storage object that can be used to get/set this cred
+        """
+        return self._Storage(self, key)
diff --git a/utils/frozen_chromite/third_party/oauth2client/service_account.py b/utils/frozen_chromite/third_party/oauth2client/service_account.py
new file mode 100644
index 0000000..8d3dc65
--- /dev/null
+++ b/utils/frozen_chromite/third_party/oauth2client/service_account.py
@@ -0,0 +1,133 @@
+# Copyright 2014 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A service account credentials class.
+
+This credentials class is implemented on top of rsa library.
+"""
+
+import base64
+import time
+
+from pyasn1.codec.ber import decoder
+from pyasn1_modules.rfc5208 import PrivateKeyInfo
+import rsa
+
+from oauth2client import GOOGLE_REVOKE_URI
+from oauth2client import GOOGLE_TOKEN_URI
+from oauth2client._helpers import _json_encode
+from oauth2client._helpers import _to_bytes
+from oauth2client._helpers import _urlsafe_b64encode
+from oauth2client import util
+from oauth2client.client import AssertionCredentials
+
+
+class _ServiceAccountCredentials(AssertionCredentials):
+    """Class representing a service account (signed JWT) credential."""
+
+    MAX_TOKEN_LIFETIME_SECS = 3600  # 1 hour in seconds
+
+    def __init__(self, service_account_id, service_account_email,
+                 private_key_id, private_key_pkcs8_text, scopes,
+                 user_agent=None, token_uri=GOOGLE_TOKEN_URI,
+                 revoke_uri=GOOGLE_REVOKE_URI, **kwargs):
+
+        super(_ServiceAccountCredentials, self).__init__(
+            None, user_agent=user_agent, token_uri=token_uri,
+            revoke_uri=revoke_uri)
+
+        self._service_account_id = service_account_id
+        self._service_account_email = service_account_email
+        self._private_key_id = private_key_id
+        self._private_key = _get_private_key(private_key_pkcs8_text)
+        self._private_key_pkcs8_text = private_key_pkcs8_text
+        self._scopes = util.scopes_to_string(scopes)
+        self._user_agent = user_agent
+        self._token_uri = token_uri
+        self._revoke_uri = revoke_uri
+        self._kwargs = kwargs
+
+    def _generate_assertion(self):
+        """Generate the assertion that will be used in the request."""
+
+        header = {
+            'alg': 'RS256',
+            'typ': 'JWT',
+            'kid': self._private_key_id
+        }
+
+        now = int(time.time())
+        payload = {
+            'aud': self._token_uri,
+            'scope': self._scopes,
+            'iat': now,
+            'exp': now + _ServiceAccountCredentials.MAX_TOKEN_LIFETIME_SECS,
+            'iss': self._service_account_email
+        }
+        payload.update(self._kwargs)
+
+        first_segment = _urlsafe_b64encode(_json_encode(header))
+        second_segment = _urlsafe_b64encode(_json_encode(payload))
+        assertion_input = first_segment + b'.' + second_segment
+
+        # Sign the assertion.
+        rsa_bytes = rsa.pkcs1.sign(assertion_input, self._private_key,
+                                   'SHA-256')
+        signature = base64.urlsafe_b64encode(rsa_bytes).rstrip(b'=')
+
+        return assertion_input + b'.' + signature
+
+    def sign_blob(self, blob):
+        # Ensure that it is bytes
+        blob = _to_bytes(blob, encoding='utf-8')
+        return (self._private_key_id,
+                rsa.pkcs1.sign(blob, self._private_key, 'SHA-256'))
+
+    @property
+    def service_account_email(self):
+        return self._service_account_email
+
+    @property
+    def serialization_data(self):
+        return {
+            'type': 'service_account',
+            'client_id': self._service_account_id,
+            'client_email': self._service_account_email,
+            'private_key_id': self._private_key_id,
+            'private_key': self._private_key_pkcs8_text
+        }
+
+    def create_scoped_required(self):
+        return not self._scopes
+
+    def create_scoped(self, scopes):
+        return _ServiceAccountCredentials(self._service_account_id,
+                                          self._service_account_email,
+                                          self._private_key_id,
+                                          self._private_key_pkcs8_text,
+                                          scopes,
+                                          user_agent=self._user_agent,
+                                          token_uri=self._token_uri,
+                                          revoke_uri=self._revoke_uri,
+                                          **self._kwargs)
+
+
+def _get_private_key(private_key_pkcs8_text):
+    """Get an RSA private key object from a pkcs8 representation."""
+    private_key_pkcs8_text = _to_bytes(private_key_pkcs8_text)
+    der = rsa.pem.load_pem(private_key_pkcs8_text, 'PRIVATE KEY')
+    asn1_private_key, _ = decoder.decode(der, asn1Spec=PrivateKeyInfo())
+    return rsa.PrivateKey.load_pkcs1(
+        asn1_private_key.getComponentByName('privateKey').asOctets(),
+        format='DER')
diff --git a/utils/frozen_chromite/third_party/oauth2client/tools.py b/utils/frozen_chromite/third_party/oauth2client/tools.py
new file mode 100644
index 0000000..629866b
--- /dev/null
+++ b/utils/frozen_chromite/third_party/oauth2client/tools.py
@@ -0,0 +1,244 @@
+# Copyright 2014 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Command-line tools for authenticating via OAuth 2.0
+
+Do the OAuth 2.0 Web Server dance for a command line application. Stores the
+generated credentials in a common file that is used by other example apps in
+the same directory.
+"""
+
+from __future__ import print_function
+
+import logging
+import socket
+import sys
+
+from six.moves import BaseHTTPServer
+from six.moves import urllib
+from six.moves import input
+
+from oauth2client import client
+from oauth2client import util
+
+
+__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+__all__ = ['argparser', 'run_flow', 'message_if_missing']
+
+_CLIENT_SECRETS_MESSAGE = """WARNING: Please configure OAuth 2.0
+
+To make this sample run you will need to populate the client_secrets.json file
+found at:
+
+   %s
+
+with information from the APIs Console <https://code.google.com/apis/console>.
+
+"""
+
+
+def _CreateArgumentParser():
+    try:
+        import argparse
+    except ImportError:
+        return None
+    parser = argparse.ArgumentParser(add_help=False)
+    parser.add_argument('--auth_host_name', default='localhost',
+                        help='Hostname when running a local web server.')
+    parser.add_argument('--noauth_local_webserver', action='store_true',
+                        default=False, help='Do not run a local web server.')
+    parser.add_argument('--auth_host_port', default=[8080, 8090], type=int,
+                        nargs='*', help='Port web server should listen on.')
+    parser.add_argument(
+        '--logging_level', default='ERROR',
+        choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
+        help='Set the logging level of detail.')
+    return parser
+
+# argparser is an ArgumentParser that contains command-line options expected
+# by tools.run(). Pass it in as part of the 'parents' argument to your own
+# ArgumentParser.
+argparser = _CreateArgumentParser()
+
+
+class ClientRedirectServer(BaseHTTPServer.HTTPServer):
+    """A server to handle OAuth 2.0 redirects back to localhost.
+
+    Waits for a single request and parses the query parameters
+    into query_params and then stops serving.
+    """
+    query_params = {}
+
+
+class ClientRedirectHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+    """A handler for OAuth 2.0 redirects back to localhost.
+
+    Waits for a single request and parses the query parameters
+    into the servers query_params and then stops serving.
+    """
+
+    def do_GET(self):
+        """Handle a GET request.
+
+        Parses the query parameters and prints a message
+        if the flow has completed. Note that we can't detect
+        if an error occurred.
+        """
+        self.send_response(200)
+        self.send_header("Content-type", "text/html")
+        self.end_headers()
+        query = self.path.split('?', 1)[-1]
+        query = dict(urllib.parse.parse_qsl(query))
+        self.server.query_params = query
+        self.wfile.write(
+            b"<html><head><title>Authentication Status</title></head>")
+        self.wfile.write(
+            b"<body><p>The authentication flow has completed.</p>")
+        self.wfile.write(b"</body></html>")
+
+    def log_message(self, format, *args):
+        """Do not log messages to stdout while running as cmd. line program."""
+
+
+@util.positional(3)
+def run_flow(flow, storage, flags, http=None):
+    """Core code for a command-line application.
+
+    The ``run()`` function is called from your application and runs
+    through all the steps to obtain credentials. It takes a ``Flow``
+    argument and attempts to open an authorization server page in the
+    user's default web browser. The server asks the user to grant your
+    application access to the user's data. If the user grants access,
+    the ``run()`` function returns new credentials. The new credentials
+    are also stored in the ``storage`` argument, which updates the file
+    associated with the ``Storage`` object.
+
+    It presumes it is run from a command-line application and supports the
+    following flags:
+
+        ``--auth_host_name`` (string, default: ``localhost``)
+           Host name to use when running a local web server to handle
+           redirects during OAuth authorization.
+
+        ``--auth_host_port`` (integer, default: ``[8080, 8090]``)
+           Port to use when running a local web server to handle redirects
+           during OAuth authorization. Repeat this option to specify a list
+           of values.
+
+        ``--[no]auth_local_webserver`` (boolean, default: ``True``)
+           Run a local web server to handle redirects during OAuth
+           authorization.
+
+    The tools module defines an ``ArgumentParser`` the already contains the
+    flag definitions that ``run()`` requires. You can pass that
+    ``ArgumentParser`` to your ``ArgumentParser`` constructor::
+
+        parser = argparse.ArgumentParser(
+            description=__doc__,
+            formatter_class=argparse.RawDescriptionHelpFormatter,
+            parents=[tools.argparser])
+        flags = parser.parse_args(argv)
+
+    Args:
+        flow: Flow, an OAuth 2.0 Flow to step through.
+        storage: Storage, a ``Storage`` to store the credential in.
+        flags: ``argparse.Namespace``, The command-line flags. This is the
+               object returned from calling ``parse_args()`` on
+               ``argparse.ArgumentParser`` as described above.
+        http: An instance of ``httplib2.Http.request`` or something that
+              acts like it.
+
+    Returns:
+        Credentials, the obtained credential.
+    """
+    logging.getLogger().setLevel(getattr(logging, flags.logging_level))
+    if not flags.noauth_local_webserver:
+        success = False
+        port_number = 0
+        for port in flags.auth_host_port:
+            port_number = port
+            try:
+                httpd = ClientRedirectServer((flags.auth_host_name, port),
+                                             ClientRedirectHandler)
+            except socket.error:
+                pass
+            else:
+                success = True
+                break
+        flags.noauth_local_webserver = not success
+        if not success:
+            print('Failed to start a local webserver listening '
+                  'on either port 8080')
+            print('or port 8090. Please check your firewall settings and locally')
+            print('running programs that may be blocking or using those ports.')
+            print()
+            print('Falling back to --noauth_local_webserver and continuing with')
+            print('authorization.')
+            print()
+
+    if not flags.noauth_local_webserver:
+        oauth_callback = 'http://%s:%s/' % (flags.auth_host_name, port_number)
+    else:
+        oauth_callback = client.OOB_CALLBACK_URN
+    flow.redirect_uri = oauth_callback
+    authorize_url = flow.step1_get_authorize_url()
+
+    if not flags.noauth_local_webserver:
+        import webbrowser
+        webbrowser.open(authorize_url, new=1, autoraise=True)
+        print('Your browser has been opened to visit:')
+        print()
+        print('    ' + authorize_url)
+        print()
+        print('If your browser is on a different machine then '
+              'exit and re-run this')
+        print('application with the command-line parameter ')
+        print()
+        print('  --noauth_local_webserver')
+        print()
+    else:
+        print('Go to the following link in your browser:')
+        print()
+        print('    ' + authorize_url)
+        print()
+
+    code = None
+    if not flags.noauth_local_webserver:
+        httpd.handle_request()
+        if 'error' in httpd.query_params:
+            sys.exit('Authentication request was rejected.')
+        if 'code' in httpd.query_params:
+            code = httpd.query_params['code']
+        else:
+            print('Failed to find "code" in the query parameters '
+                  'of the redirect.')
+            sys.exit('Try running with --noauth_local_webserver.')
+    else:
+        code = input('Enter verification code: ').strip()
+
+    try:
+        credential = flow.step2_exchange(code, http=http)
+    except client.FlowExchangeError as e:
+        sys.exit('Authentication has failed: %s' % e)
+
+    storage.put(credential)
+    credential.set_store(storage)
+    print('Authentication successful.')
+
+    return credential
+
+
+def message_if_missing(filename):
+    """Helpful message to display if the CLIENT_SECRETS file is missing."""
+    return _CLIENT_SECRETS_MESSAGE % filename
diff --git a/utils/frozen_chromite/third_party/oauth2client/util.py b/utils/frozen_chromite/third_party/oauth2client/util.py
new file mode 100644
index 0000000..1150e2b
--- /dev/null
+++ b/utils/frozen_chromite/third_party/oauth2client/util.py
@@ -0,0 +1,224 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Common utility library."""
+
+import functools
+import inspect
+import logging
+
+import six
+from six.moves import urllib
+
+
+__author__ = [
+    'rafek@google.com (Rafe Kaplan)',
+    'guido@google.com (Guido van Rossum)',
+]
+
+__all__ = [
+    'positional',
+    'POSITIONAL_WARNING',
+    'POSITIONAL_EXCEPTION',
+    'POSITIONAL_IGNORE',
+]
+
+logger = logging.getLogger(__name__)
+
+POSITIONAL_WARNING = 'WARNING'
+POSITIONAL_EXCEPTION = 'EXCEPTION'
+POSITIONAL_IGNORE = 'IGNORE'
+POSITIONAL_SET = frozenset([POSITIONAL_WARNING, POSITIONAL_EXCEPTION,
+                            POSITIONAL_IGNORE])
+
+positional_parameters_enforcement = POSITIONAL_WARNING
+
+
+def positional(max_positional_args):
+    """A decorator to declare that only the first N arguments my be positional.
+
+    This decorator makes it easy to support Python 3 style keyword-only
+    parameters. For example, in Python 3 it is possible to write::
+
+        def fn(pos1, *, kwonly1=None, kwonly1=None):
+            ...
+
+    All named parameters after ``*`` must be a keyword::
+
+        fn(10, 'kw1', 'kw2')  # Raises exception.
+        fn(10, kwonly1='kw1')  # Ok.
+
+    Example
+    ^^^^^^^
+
+    To define a function like above, do::
+
+        @positional(1)
+        def fn(pos1, kwonly1=None, kwonly2=None):
+            ...
+
+    If no default value is provided to a keyword argument, it becomes a
+    required keyword argument::
+
+        @positional(0)
+        def fn(required_kw):
+            ...
+
+    This must be called with the keyword parameter::
+
+        fn()  # Raises exception.
+        fn(10)  # Raises exception.
+        fn(required_kw=10)  # Ok.
+
+    When defining instance or class methods always remember to account for
+    ``self`` and ``cls``::
+
+        class MyClass(object):
+
+            @positional(2)
+            def my_method(self, pos1, kwonly1=None):
+                ...
+
+            @classmethod
+            @positional(2)
+            def my_method(cls, pos1, kwonly1=None):
+                ...
+
+    The positional decorator behavior is controlled by
+    ``util.positional_parameters_enforcement``, which may be set to
+    ``POSITIONAL_EXCEPTION``, ``POSITIONAL_WARNING`` or
+    ``POSITIONAL_IGNORE`` to raise an exception, log a warning, or do
+    nothing, respectively, if a declaration is violated.
+
+    Args:
+        max_positional_arguments: Maximum number of positional arguments. All
+                                  parameters after the this index must be
+                                  keyword only.
+
+    Returns:
+        A decorator that prevents using arguments after max_positional_args
+        from being used as positional parameters.
+
+    Raises:
+        TypeError: if a key-word only argument is provided as a positional
+                   parameter, but only if
+                   util.positional_parameters_enforcement is set to
+                   POSITIONAL_EXCEPTION.
+    """
+
+    def positional_decorator(wrapped):
+        @functools.wraps(wrapped)
+        def positional_wrapper(*args, **kwargs):
+            if len(args) > max_positional_args:
+                plural_s = ''
+                if max_positional_args != 1:
+                    plural_s = 's'
+                message = ('%s() takes at most %d positional '
+                           'argument%s (%d given)' % (
+                               wrapped.__name__, max_positional_args,
+                               plural_s, len(args)))
+                if positional_parameters_enforcement == POSITIONAL_EXCEPTION:
+                    raise TypeError(message)
+                elif positional_parameters_enforcement == POSITIONAL_WARNING:
+                    logger.warning(message)
+                else:  # IGNORE
+                    pass
+            return wrapped(*args, **kwargs)
+        return positional_wrapper
+
+    if isinstance(max_positional_args, six.integer_types):
+        return positional_decorator
+    else:
+        args, _, _, defaults = inspect.getargspec(max_positional_args)
+        return positional(len(args) - len(defaults))(max_positional_args)
+
+
+def scopes_to_string(scopes):
+    """Converts scope value to a string.
+
+    If scopes is a string then it is simply passed through. If scopes is an
+    iterable then a string is returned that is all the individual scopes
+    concatenated with spaces.
+
+    Args:
+        scopes: string or iterable of strings, the scopes.
+
+    Returns:
+        The scopes formatted as a single string.
+    """
+    if isinstance(scopes, six.string_types):
+        return scopes
+    else:
+        return ' '.join(scopes)
+
+
+def string_to_scopes(scopes):
+    """Converts stringifed scope value to a list.
+
+    If scopes is a list then it is simply passed through. If scopes is an
+    string then a list of each individual scope is returned.
+
+    Args:
+        scopes: a string or iterable of strings, the scopes.
+
+    Returns:
+        The scopes in a list.
+    """
+    if not scopes:
+        return []
+    if isinstance(scopes, six.string_types):
+        return scopes.split(' ')
+    else:
+        return scopes
+
+
+def dict_to_tuple_key(dictionary):
+    """Converts a dictionary to a tuple that can be used as an immutable key.
+
+    The resulting key is always sorted so that logically equivalent
+    dictionaries always produce an identical tuple for a key.
+
+    Args:
+        dictionary: the dictionary to use as the key.
+
+    Returns:
+        A tuple representing the dictionary in it's naturally sorted ordering.
+    """
+    return tuple(sorted(dictionary.items()))
+
+
+def _add_query_parameter(url, name, value):
+    """Adds a query parameter to a url.
+
+    Replaces the current value if it already exists in the URL.
+
+    Args:
+        url: string, url to add the query parameter to.
+        name: string, query parameter name.
+        value: string, query parameter value.
+
+    Returns:
+        Updated query parameter. Does not update the url if value is None.
+    """
+    if value is None:
+        return url
+    else:
+        parsed = list(urllib.parse.urlparse(url))
+        q = dict(urllib.parse.parse_qsl(parsed[4]))
+        q[name] = value
+        parsed[4] = urllib.parse.urlencode(q)
+        return urllib.parse.urlunparse(parsed)
diff --git a/utils/frozen_chromite/third_party/oauth2client/xsrfutil.py b/utils/frozen_chromite/third_party/oauth2client/xsrfutil.py
new file mode 100644
index 0000000..10bbe3f
--- /dev/null
+++ b/utils/frozen_chromite/third_party/oauth2client/xsrfutil.py
@@ -0,0 +1,107 @@
+#
+# Copyright 2014 the Melange authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helper methods for creating & verifying XSRF tokens."""
+
+import base64
+import binascii
+import hmac
+import time
+
+from oauth2client._helpers import _to_bytes
+from oauth2client import util
+
+__authors__ = [
+    '"Doug Coker" <dcoker@google.com>',
+    '"Joe Gregorio" <jcgregorio@google.com>',
+]
+
+# Delimiter character
+DELIMITER = b':'
+
+# 1 hour in seconds
+DEFAULT_TIMEOUT_SECS = 60 * 60
+
+
+@util.positional(2)
+def generate_token(key, user_id, action_id='', when=None):
+    """Generates a URL-safe token for the given user, action, time tuple.
+
+    Args:
+        key: secret key to use.
+        user_id: the user ID of the authenticated user.
+        action_id: a string identifier of the action they requested
+                   authorization for.
+        when: the time in seconds since the epoch at which the user was
+              authorized for this action. If not set the current time is used.
+
+    Returns:
+        A string XSRF protection token.
+    """
+    digester = hmac.new(_to_bytes(key, encoding='utf-8'))
+    digester.update(_to_bytes(str(user_id), encoding='utf-8'))
+    digester.update(DELIMITER)
+    digester.update(_to_bytes(action_id, encoding='utf-8'))
+    digester.update(DELIMITER)
+    when = _to_bytes(str(when or int(time.time())), encoding='utf-8')
+    digester.update(when)
+    digest = digester.digest()
+
+    token = base64.urlsafe_b64encode(digest + DELIMITER + when)
+    return token
+
+
+@util.positional(3)
+def validate_token(key, token, user_id, action_id="", current_time=None):
+    """Validates that the given token authorizes the user for the action.
+
+    Tokens are invalid if the time of issue is too old or if the token
+    does not match what generateToken outputs (i.e. the token was forged).
+
+    Args:
+        key: secret key to use.
+        token: a string of the token generated by generateToken.
+        user_id: the user ID of the authenticated user.
+        action_id: a string identifier of the action they requested
+                   authorization for.
+
+    Returns:
+        A boolean - True if the user is authorized for the action, False
+        otherwise.
+    """
+    if not token:
+        return False
+    try:
+        decoded = base64.urlsafe_b64decode(token)
+        token_time = int(decoded.split(DELIMITER)[-1])
+    except (TypeError, ValueError, binascii.Error):
+        return False
+    if current_time is None:
+        current_time = time.time()
+    # If the token is too old it's not valid.
+    if current_time - token_time > DEFAULT_TIMEOUT_SECS:
+        return False
+
+    # The given token should match the generated one with the same time.
+    expected_token = generate_token(key, user_id, action_id=action_id,
+                                    when=token_time)
+    if len(token) != len(expected_token):
+        return False
+
+    # Perform constant time comparison to avoid timing attacks
+    different = 0
+    for x, y in zip(bytearray(token), bytearray(expected_token)):
+        different |= x ^ y
+    return not different
diff --git a/utils/frozen_chromite/third_party/python2/README.md b/utils/frozen_chromite/third_party/python2/README.md
new file mode 100644
index 0000000..f5dcac0
--- /dev/null
+++ b/utils/frozen_chromite/third_party/python2/README.md
@@ -0,0 +1,5 @@
+This directory contains modules only for use in Python 2 (e.g.,
+httplib2 has separate versions for Python 2 vs. 3). Make sure to
+replicate the equivalent modules under `../python3/`.
+
+Remove this directory when Python 2 support is dropped.
diff --git a/utils/frozen_chromite/third_party/python2/httplib2/LICENSE b/utils/frozen_chromite/third_party/python2/httplib2/LICENSE
new file mode 100644
index 0000000..ae38286
--- /dev/null
+++ b/utils/frozen_chromite/third_party/python2/httplib2/LICENSE
@@ -0,0 +1,23 @@
+Httplib2 Software License
+
+Copyright (c) 2006 by Joe Gregorio
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without restriction,
+including without limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of the Software,
+and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/utils/frozen_chromite/third_party/python2/httplib2/README.chromium b/utils/frozen_chromite/third_party/python2/httplib2/README.chromium
new file mode 100644
index 0000000..534bd20
--- /dev/null
+++ b/utils/frozen_chromite/third_party/python2/httplib2/README.chromium
@@ -0,0 +1,16 @@
+Name: httplib2
+Short Name: httplib2
+URL: https://github.com/httplib2/httplib2
+Version: 0.13.1
+Revision: cfba1201736e0060a9cb82eab73ad49988ab7416
+License: MIT License
+
+Description:
+A comprehensive HTTP client library in Python.
+
+Local Modifications:
+Individual versions for Python2/3 were separated and put under
+third_party/python{2,3}/httplib2. Test and packaging code stripped.
+
+Notes:
+Required by oauth2client library.
diff --git a/utils/frozen_chromite/third_party/python2/httplib2/__init__.py b/utils/frozen_chromite/third_party/python2/httplib2/__init__.py
new file mode 100644
index 0000000..2428e4b
--- /dev/null
+++ b/utils/frozen_chromite/third_party/python2/httplib2/__init__.py
@@ -0,0 +1,2231 @@
+"""Small, fast HTTP client library for Python.
+
+Features persistent connections, cache, and Google App Engine Standard
+Environment support.
+"""
+
+from __future__ import print_function
+
+__author__ = "Joe Gregorio (joe@bitworking.org)"
+__copyright__ = "Copyright 2006, Joe Gregorio"
+__contributors__ = [
+    "Thomas Broyer (t.broyer@ltgt.net)",
+    "James Antill",
+    "Xavier Verges Farrero",
+    "Jonathan Feinberg",
+    "Blair Zajac",
+    "Sam Ruby",
+    "Louis Nyffenegger",
+    "Alex Yu",
+]
+__license__ = "MIT"
+__version__ = '0.13.1'
+
+import base64
+import calendar
+import copy
+import email
+import email.FeedParser
+import email.Message
+import email.Utils
+import errno
+import gzip
+import httplib
+import os
+import random
+import re
+import StringIO
+import sys
+import time
+import urllib
+import urlparse
+import zlib
+
+try:
+    from hashlib import sha1 as _sha, md5 as _md5
+except ImportError:
+    # prior to Python 2.5, these were separate modules
+    import sha
+    import md5
+
+    _sha = sha.new
+    _md5 = md5.new
+import hmac
+from gettext import gettext as _
+import socket
+
+try:
+    from httplib2 import socks
+except ImportError:
+    try:
+        import socks
+    except (ImportError, AttributeError):
+        socks = None
+
+# Build the appropriate socket wrapper for ssl
+ssl = None
+ssl_SSLError = None
+ssl_CertificateError = None
+try:
+    import ssl  # python 2.6
+except ImportError:
+    pass
+if ssl is not None:
+    ssl_SSLError = getattr(ssl, "SSLError", None)
+    ssl_CertificateError = getattr(ssl, "CertificateError", None)
+
+
+def _ssl_wrap_socket(
+    sock, key_file, cert_file, disable_validation, ca_certs, ssl_version, hostname
+):
+    if disable_validation:
+        cert_reqs = ssl.CERT_NONE
+    else:
+        cert_reqs = ssl.CERT_REQUIRED
+    if ssl_version is None:
+        ssl_version = ssl.PROTOCOL_SSLv23
+
+    if hasattr(ssl, "SSLContext"):  # Python 2.7.9
+        context = ssl.SSLContext(ssl_version)
+        context.verify_mode = cert_reqs
+        context.check_hostname = cert_reqs != ssl.CERT_NONE
+        if cert_file:
+            context.load_cert_chain(cert_file, key_file)
+        if ca_certs:
+            context.load_verify_locations(ca_certs)
+        return context.wrap_socket(sock, server_hostname=hostname)
+    else:
+        return ssl.wrap_socket(
+            sock,
+            keyfile=key_file,
+            certfile=cert_file,
+            cert_reqs=cert_reqs,
+            ca_certs=ca_certs,
+            ssl_version=ssl_version,
+        )
+
+
+def _ssl_wrap_socket_unsupported(
+    sock, key_file, cert_file, disable_validation, ca_certs, ssl_version, hostname
+):
+    if not disable_validation:
+        raise CertificateValidationUnsupported(
+            "SSL certificate validation is not supported without "
+            "the ssl module installed. To avoid this error, install "
+            "the ssl module, or explicity disable validation."
+        )
+    ssl_sock = socket.ssl(sock, key_file, cert_file)
+    return httplib.FakeSocket(sock, ssl_sock)
+
+
+if ssl is None:
+    _ssl_wrap_socket = _ssl_wrap_socket_unsupported
+
+if sys.version_info >= (2, 3):
+    from iri2uri import iri2uri
+else:
+
+    def iri2uri(uri):
+        return uri
+
+
+def has_timeout(timeout):  # python 2.6
+    if hasattr(socket, "_GLOBAL_DEFAULT_TIMEOUT"):
+        return timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT
+    return timeout is not None
+
+
+__all__ = [
+    "Http",
+    "Response",
+    "ProxyInfo",
+    "HttpLib2Error",
+    "RedirectMissingLocation",
+    "RedirectLimit",
+    "FailedToDecompressContent",
+    "UnimplementedDigestAuthOptionError",
+    "UnimplementedHmacDigestAuthOptionError",
+    "debuglevel",
+    "ProxiesUnavailableError",
+]
+
+# The httplib debug level, set to a non-zero value to get debug output
+debuglevel = 0
+
+# A request will be tried 'RETRIES' times if it fails at the socket/connection level.
+RETRIES = 2
+
+# Python 2.3 support
+if sys.version_info < (2, 4):
+
+    def sorted(seq):
+        seq.sort()
+        return seq
+
+
+# Python 2.3 support
+def HTTPResponse__getheaders(self):
+    """Return list of (header, value) tuples."""
+    if self.msg is None:
+        raise httplib.ResponseNotReady()
+    return self.msg.items()
+
+
+if not hasattr(httplib.HTTPResponse, "getheaders"):
+    httplib.HTTPResponse.getheaders = HTTPResponse__getheaders
+
+
+# All exceptions raised here derive from HttpLib2Error
+class HttpLib2Error(Exception):
+    pass
+
+
+# Some exceptions can be caught and optionally
+# be turned back into responses.
+class HttpLib2ErrorWithResponse(HttpLib2Error):
+    def __init__(self, desc, response, content):
+        self.response = response
+        self.content = content
+        HttpLib2Error.__init__(self, desc)
+
+
+class RedirectMissingLocation(HttpLib2ErrorWithResponse):
+    pass
+
+
+class RedirectLimit(HttpLib2ErrorWithResponse):
+    pass
+
+
+class FailedToDecompressContent(HttpLib2ErrorWithResponse):
+    pass
+
+
+class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse):
+    pass
+
+
+class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse):
+    pass
+
+
+class MalformedHeader(HttpLib2Error):
+    pass
+
+
+class RelativeURIError(HttpLib2Error):
+    pass
+
+
+class ServerNotFoundError(HttpLib2Error):
+    pass
+
+
+class ProxiesUnavailableError(HttpLib2Error):
+    pass
+
+
+class CertificateValidationUnsupported(HttpLib2Error):
+    pass
+
+
+class SSLHandshakeError(HttpLib2Error):
+    pass
+
+
+class NotSupportedOnThisPlatform(HttpLib2Error):
+    pass
+
+
+class CertificateHostnameMismatch(SSLHandshakeError):
+    def __init__(self, desc, host, cert):
+        HttpLib2Error.__init__(self, desc)
+        self.host = host
+        self.cert = cert
+
+
+class NotRunningAppEngineEnvironment(HttpLib2Error):
+    pass
+
+
+# Open Items:
+# -----------
+# Proxy support
+
+# Are we removing the cached content too soon on PUT (only delete on 200 Maybe?)
+
+# Pluggable cache storage (supports storing the cache in
+#   flat files by default. We need a plug-in architecture
+#   that can support Berkeley DB and Squid)
+
+# == Known Issues ==
+# Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator.
+# Does not handle Cache-Control: max-stale
+# Does not use Age: headers when calculating cache freshness.
+
+# The number of redirections to follow before giving up.
+# Note that only GET redirects are automatically followed.
+# Will also honor 301 requests by saving that info and never
+# requesting that URI again.
+DEFAULT_MAX_REDIRECTS = 5
+
+from httplib2 import certs
+CA_CERTS = certs.where()
+
+# Which headers are hop-by-hop headers by default
+HOP_BY_HOP = [
+    "connection",
+    "keep-alive",
+    "proxy-authenticate",
+    "proxy-authorization",
+    "te",
+    "trailers",
+    "transfer-encoding",
+    "upgrade",
+]
+
+
+def _get_end2end_headers(response):
+    hopbyhop = list(HOP_BY_HOP)
+    hopbyhop.extend([x.strip() for x in response.get("connection", "").split(",")])
+    return [header for header in response.keys() if header not in hopbyhop]
+
+
+URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
+
+
+def parse_uri(uri):
+    """Parses a URI using the regex given in Appendix B of RFC 3986.
+
+        (scheme, authority, path, query, fragment) = parse_uri(uri)
+    """
+    groups = URI.match(uri).groups()
+    return (groups[1], groups[3], groups[4], groups[6], groups[8])
+
+
+def urlnorm(uri):
+    (scheme, authority, path, query, fragment) = parse_uri(uri)
+    if not scheme or not authority:
+        raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri)
+    authority = authority.lower()
+    scheme = scheme.lower()
+    if not path:
+        path = "/"
+    # Could do syntax based normalization of the URI before
+    # computing the digest. See Section 6.2.2 of Std 66.
+    request_uri = query and "?".join([path, query]) or path
+    scheme = scheme.lower()
+    defrag_uri = scheme + "://" + authority + request_uri
+    return scheme, authority, request_uri, defrag_uri
+
+
+# Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/)
+re_url_scheme = re.compile(r"^\w+://")
+re_unsafe = re.compile(r"[^\w\-_.()=!]+")
+
+
+def safename(filename):
+    """Return a filename suitable for the cache.
+    Strips dangerous and common characters to create a filename we
+    can use to store the cache in.
+    """
+    if isinstance(filename, str):
+        filename_bytes = filename
+        filename = filename.decode("utf-8")
+    else:
+        filename_bytes = filename.encode("utf-8")
+    filemd5 = _md5(filename_bytes).hexdigest()
+    filename = re_url_scheme.sub("", filename)
+    filename = re_unsafe.sub("", filename)
+
+    # limit length of filename (vital for Windows)
+    # https://github.com/httplib2/httplib2/pull/74
+    # C:\Users\    <username>    \AppData\Local\Temp\  <safe_filename>  ,   <md5>
+    #   9 chars + max 104 chars  +     20 chars      +       x       +  1  +  32  = max 259 chars
+    # Thus max safe filename x = 93 chars. Let it be 90 to make a round sum:
+    filename = filename[:90]
+
+    return ",".join((filename, filemd5))
+
+
+NORMALIZE_SPACE = re.compile(r"(?:\r\n)?[ \t]+")
+
+
+def _normalize_headers(headers):
+    return dict(
+        [
+            (key.lower(), NORMALIZE_SPACE.sub(value, " ").strip())
+            for (key, value) in headers.iteritems()
+        ]
+    )
+
+
+def _parse_cache_control(headers):
+    retval = {}
+    if "cache-control" in headers:
+        parts = headers["cache-control"].split(",")
+        parts_with_args = [
+            tuple([x.strip().lower() for x in part.split("=", 1)])
+            for part in parts
+            if -1 != part.find("=")
+        ]
+        parts_wo_args = [
+            (name.strip().lower(), 1) for name in parts if -1 == name.find("=")
+        ]
+        retval = dict(parts_with_args + parts_wo_args)
+    return retval
+
+
+# Whether to use a strict mode to parse WWW-Authenticate headers
+# Might lead to bad results in case of ill-formed header value,
+# so disabled by default, falling back to relaxed parsing.
+# Set to true to turn on, usefull for testing servers.
+USE_WWW_AUTH_STRICT_PARSING = 0
+
+# In regex below:
+#    [^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+             matches a "token" as defined by HTTP
+#    "(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?"    matches a "quoted-string" as defined by HTTP, when LWS have already been replaced by a single space
+# Actually, as an auth-param value can be either a token or a quoted-string, they are combined in a single pattern which matches both:
+#    \"?((?<=\")(?:[^\0-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x08\x0A-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?
+WWW_AUTH_STRICT = re.compile(
+    r"^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$"
+)
+WWW_AUTH_RELAXED = re.compile(
+    r"^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(?<!\")[^ \t\r\n,]+(?!\"))\"?)(.*)$"
+)
+UNQUOTE_PAIRS = re.compile(r"\\(.)")
+
+
+def _parse_www_authenticate(headers, headername="www-authenticate"):
+    """Returns a dictionary of dictionaries, one dict
+    per auth_scheme."""
+    retval = {}
+    if headername in headers:
+        try:
+
+            authenticate = headers[headername].strip()
+            www_auth = (
+                USE_WWW_AUTH_STRICT_PARSING and WWW_AUTH_STRICT or WWW_AUTH_RELAXED
+            )
+            while authenticate:
+                # Break off the scheme at the beginning of the line
+                if headername == "authentication-info":
+                    (auth_scheme, the_rest) = ("digest", authenticate)
+                else:
+                    (auth_scheme, the_rest) = authenticate.split(" ", 1)
+                # Now loop over all the key value pairs that come after the scheme,
+                # being careful not to roll into the next scheme
+                match = www_auth.search(the_rest)
+                auth_params = {}
+                while match:
+                    if match and len(match.groups()) == 3:
+                        (key, value, the_rest) = match.groups()
+                        auth_params[key.lower()] = UNQUOTE_PAIRS.sub(
+                            r"\1", value
+                        )  # '\\'.join([x.replace('\\', '') for x in value.split('\\\\')])
+                    match = www_auth.search(the_rest)
+                retval[auth_scheme.lower()] = auth_params
+                authenticate = the_rest.strip()
+
+        except ValueError:
+            raise MalformedHeader("WWW-Authenticate")
+    return retval
+
+
+# TODO: add current time as _entry_disposition argument to avoid sleep in tests
+def _entry_disposition(response_headers, request_headers):
+    """Determine freshness from the Date, Expires and Cache-Control headers.
+
+    We don't handle the following:
+
+    1. Cache-Control: max-stale
+    2. Age: headers are not used in the calculations.
+
+    Not that this algorithm is simpler than you might think
+    because we are operating as a private (non-shared) cache.
+    This lets us ignore 's-maxage'. We can also ignore
+    'proxy-invalidate' since we aren't a proxy.
+    We will never return a stale document as
+    fresh as a design decision, and thus the non-implementation
+    of 'max-stale'. This also lets us safely ignore 'must-revalidate'
+    since we operate as if every server has sent 'must-revalidate'.
+    Since we are private we get to ignore both 'public' and
+    'private' parameters. We also ignore 'no-transform' since
+    we don't do any transformations.
+    The 'no-store' parameter is handled at a higher level.
+    So the only Cache-Control parameters we look at are:
+
+    no-cache
+    only-if-cached
+    max-age
+    min-fresh
+    """
+
+    retval = "STALE"
+    cc = _parse_cache_control(request_headers)
+    cc_response = _parse_cache_control(response_headers)
+
+    if (
+        "pragma" in request_headers
+        and request_headers["pragma"].lower().find("no-cache") != -1
+    ):
+        retval = "TRANSPARENT"
+        if "cache-control" not in request_headers:
+            request_headers["cache-control"] = "no-cache"
+    elif "no-cache" in cc:
+        retval = "TRANSPARENT"
+    elif "no-cache" in cc_response:
+        retval = "STALE"
+    elif "only-if-cached" in cc:
+        retval = "FRESH"
+    elif "date" in response_headers:
+        date = calendar.timegm(email.Utils.parsedate_tz(response_headers["date"]))
+        now = time.time()
+        current_age = max(0, now - date)
+        if "max-age" in cc_response:
+            try:
+                freshness_lifetime = int(cc_response["max-age"])
+            except ValueError:
+                freshness_lifetime = 0
+        elif "expires" in response_headers:
+            expires = email.Utils.parsedate_tz(response_headers["expires"])
+            if None == expires:
+                freshness_lifetime = 0
+            else:
+                freshness_lifetime = max(0, calendar.timegm(expires) - date)
+        else:
+            freshness_lifetime = 0
+        if "max-age" in cc:
+            try:
+                freshness_lifetime = int(cc["max-age"])
+            except ValueError:
+                freshness_lifetime = 0
+        if "min-fresh" in cc:
+            try:
+                min_fresh = int(cc["min-fresh"])
+            except ValueError:
+                min_fresh = 0
+            current_age += min_fresh
+        if freshness_lifetime > current_age:
+            retval = "FRESH"
+    return retval
+
+
+def _decompressContent(response, new_content):
+    content = new_content
+    try:
+        encoding = response.get("content-encoding", None)
+        if encoding in ["gzip", "deflate"]:
+            if encoding == "gzip":
+                content = gzip.GzipFile(fileobj=StringIO.StringIO(new_content)).read()
+            if encoding == "deflate":
+                content = zlib.decompress(content, -zlib.MAX_WBITS)
+            response["content-length"] = str(len(content))
+            # Record the historical presence of the encoding in a way the won't interfere.
+            response["-content-encoding"] = response["content-encoding"]
+            del response["content-encoding"]
+    except (IOError, zlib.error):
+        content = ""
+        raise FailedToDecompressContent(
+            _("Content purported to be compressed with %s but failed to decompress.")
+            % response.get("content-encoding"),
+            response,
+            content,
+        )
+    return content
+
+
+def _updateCache(request_headers, response_headers, content, cache, cachekey):
+    if cachekey:
+        cc = _parse_cache_control(request_headers)
+        cc_response = _parse_cache_control(response_headers)
+        if "no-store" in cc or "no-store" in cc_response:
+            cache.delete(cachekey)
+        else:
+            info = email.Message.Message()
+            for key, value in response_headers.iteritems():
+                if key not in ["status", "content-encoding", "transfer-encoding"]:
+                    info[key] = value
+
+            # Add annotations to the cache to indicate what headers
+            # are variant for this request.
+            vary = response_headers.get("vary", None)
+            if vary:
+                vary_headers = vary.lower().replace(" ", "").split(",")
+                for header in vary_headers:
+                    key = "-varied-%s" % header
+                    try:
+                        info[key] = request_headers[header]
+                    except KeyError:
+                        pass
+
+            status = response_headers.status
+            if status == 304:
+                status = 200
+
+            status_header = "status: %d\r\n" % status
+
+            header_str = info.as_string()
+
+            header_str = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", header_str)
+            text = "".join([status_header, header_str, content])
+
+            cache.set(cachekey, text)
+
+
+def _cnonce():
+    dig = _md5(
+        "%s:%s"
+        % (time.ctime(), ["0123456789"[random.randrange(0, 9)] for i in range(20)])
+    ).hexdigest()
+    return dig[:16]
+
+
+def _wsse_username_token(cnonce, iso_now, password):
+    return base64.b64encode(
+        _sha("%s%s%s" % (cnonce, iso_now, password)).digest()
+    ).strip()
+
+
+# For credentials we need two things, first
+# a pool of credential to try (not necesarily tied to BAsic, Digest, etc.)
+# Then we also need a list of URIs that have already demanded authentication
+# That list is tricky since sub-URIs can take the same auth, or the
+# auth scheme may change as you descend the tree.
+# So we also need each Auth instance to be able to tell us
+# how close to the 'top' it is.
+
+
+class Authentication(object):
+    def __init__(
+        self, credentials, host, request_uri, headers, response, content, http
+    ):
+        (scheme, authority, path, query, fragment) = parse_uri(request_uri)
+        self.path = path
+        self.host = host
+        self.credentials = credentials
+        self.http = http
+
+    def depth(self, request_uri):
+        (scheme, authority, path, query, fragment) = parse_uri(request_uri)
+        return request_uri[len(self.path) :].count("/")
+
+    def inscope(self, host, request_uri):
+        # XXX Should we normalize the request_uri?
+        (scheme, authority, path, query, fragment) = parse_uri(request_uri)
+        return (host == self.host) and path.startswith(self.path)
+
+    def request(self, method, request_uri, headers, content):
+        """Modify the request headers to add the appropriate
+        Authorization header. Over-ride this in sub-classes."""
+        pass
+
+    def response(self, response, content):
+        """Gives us a chance to update with new nonces
+        or such returned from the last authorized response.
+        Over-rise this in sub-classes if necessary.
+
+        Return TRUE is the request is to be retried, for
+        example Digest may return stale=true.
+        """
+        return False
+
+
+class BasicAuthentication(Authentication):
+    def __init__(
+        self, credentials, host, request_uri, headers, response, content, http
+    ):
+        Authentication.__init__(
+            self, credentials, host, request_uri, headers, response, content, http
+        )
+
+    def request(self, method, request_uri, headers, content):
+        """Modify the request headers to add the appropriate
+        Authorization header."""
+        headers["authorization"] = (
+            "Basic " + base64.b64encode("%s:%s" % self.credentials).strip()
+        )
+
+
+class DigestAuthentication(Authentication):
+    """Only do qop='auth' and MD5, since that
+    is all Apache currently implements"""
+
+    def __init__(
+        self, credentials, host, request_uri, headers, response, content, http
+    ):
+        Authentication.__init__(
+            self, credentials, host, request_uri, headers, response, content, http
+        )
+        challenge = _parse_www_authenticate(response, "www-authenticate")
+        self.challenge = challenge["digest"]
+        qop = self.challenge.get("qop", "auth")
+        self.challenge["qop"] = (
+            ("auth" in [x.strip() for x in qop.split()]) and "auth" or None
+        )
+        if self.challenge["qop"] is None:
+            raise UnimplementedDigestAuthOptionError(
+                _("Unsupported value for qop: %s." % qop)
+            )
+        self.challenge["algorithm"] = self.challenge.get("algorithm", "MD5").upper()
+        if self.challenge["algorithm"] != "MD5":
+            raise UnimplementedDigestAuthOptionError(
+                _("Unsupported value for algorithm: %s." % self.challenge["algorithm"])
+            )
+        self.A1 = "".join(
+            [
+                self.credentials[0],
+                ":",
+                self.challenge["realm"],
+                ":",
+                self.credentials[1],
+            ]
+        )
+        self.challenge["nc"] = 1
+
+    def request(self, method, request_uri, headers, content, cnonce=None):
+        """Modify the request headers"""
+        H = lambda x: _md5(x).hexdigest()
+        KD = lambda s, d: H("%s:%s" % (s, d))
+        A2 = "".join([method, ":", request_uri])
+        self.challenge["cnonce"] = cnonce or _cnonce()
+        request_digest = '"%s"' % KD(
+            H(self.A1),
+            "%s:%s:%s:%s:%s"
+            % (
+                self.challenge["nonce"],
+                "%08x" % self.challenge["nc"],
+                self.challenge["cnonce"],
+                self.challenge["qop"],
+                H(A2),
+            ),
+        )
+        headers["authorization"] = (
+            'Digest username="%s", realm="%s", nonce="%s", '
+            'uri="%s", algorithm=%s, response=%s, qop=%s, '
+            'nc=%08x, cnonce="%s"'
+        ) % (
+            self.credentials[0],
+            self.challenge["realm"],
+            self.challenge["nonce"],
+            request_uri,
+            self.challenge["algorithm"],
+            request_digest,
+            self.challenge["qop"],
+            self.challenge["nc"],
+            self.challenge["cnonce"],
+        )
+        if self.challenge.get("opaque"):
+            headers["authorization"] += ', opaque="%s"' % self.challenge["opaque"]
+        self.challenge["nc"] += 1
+
+    def response(self, response, content):
+        if "authentication-info" not in response:
+            challenge = _parse_www_authenticate(response, "www-authenticate").get(
+                "digest", {}
+            )
+            if "true" == challenge.get("stale"):
+                self.challenge["nonce"] = challenge["nonce"]
+                self.challenge["nc"] = 1
+                return True
+        else:
+            updated_challenge = _parse_www_authenticate(
+                response, "authentication-info"
+            ).get("digest", {})
+
+            if "nextnonce" in updated_challenge:
+                self.challenge["nonce"] = updated_challenge["nextnonce"]
+                self.challenge["nc"] = 1
+        return False
+
+
+class HmacDigestAuthentication(Authentication):
+    """Adapted from Robert Sayre's code and DigestAuthentication above."""
+
+    __author__ = "Thomas Broyer (t.broyer@ltgt.net)"
+
+    def __init__(
+        self, credentials, host, request_uri, headers, response, content, http
+    ):
+        Authentication.__init__(
+            self, credentials, host, request_uri, headers, response, content, http
+        )
+        challenge = _parse_www_authenticate(response, "www-authenticate")
+        self.challenge = challenge["hmacdigest"]
+        # TODO: self.challenge['domain']
+        self.challenge["reason"] = self.challenge.get("reason", "unauthorized")
+        if self.challenge["reason"] not in ["unauthorized", "integrity"]:
+            self.challenge["reason"] = "unauthorized"
+        self.challenge["salt"] = self.challenge.get("salt", "")
+        if not self.challenge.get("snonce"):
+            raise UnimplementedHmacDigestAuthOptionError(
+                _("The challenge doesn't contain a server nonce, or this one is empty.")
+            )
+        self.challenge["algorithm"] = self.challenge.get("algorithm", "HMAC-SHA-1")
+        if self.challenge["algorithm"] not in ["HMAC-SHA-1", "HMAC-MD5"]:
+            raise UnimplementedHmacDigestAuthOptionError(
+                _("Unsupported value for algorithm: %s." % self.challenge["algorithm"])
+            )
+        self.challenge["pw-algorithm"] = self.challenge.get("pw-algorithm", "SHA-1")
+        if self.challenge["pw-algorithm"] not in ["SHA-1", "MD5"]:
+            raise UnimplementedHmacDigestAuthOptionError(
+                _(
+                    "Unsupported value for pw-algorithm: %s."
+                    % self.challenge["pw-algorithm"]
+                )
+            )
+        if self.challenge["algorithm"] == "HMAC-MD5":
+            self.hashmod = _md5
+        else:
+            self.hashmod = _sha
+        if self.challenge["pw-algorithm"] == "MD5":
+            self.pwhashmod = _md5
+        else:
+            self.pwhashmod = _sha
+        self.key = "".join(
+            [
+                self.credentials[0],
+                ":",
+                self.pwhashmod.new(
+                    "".join([self.credentials[1], self.challenge["salt"]])
+                )
+                .hexdigest()
+                .lower(),
+                ":",
+                self.challenge["realm"],
+            ]
+        )
+        self.key = self.pwhashmod.new(self.key).hexdigest().lower()
+
+    def request(self, method, request_uri, headers, content):
+        """Modify the request headers"""
+        keys = _get_end2end_headers(headers)
+        keylist = "".join(["%s " % k for k in keys])
+        headers_val = "".join([headers[k] for k in keys])
+        created = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
+        cnonce = _cnonce()
+        request_digest = "%s:%s:%s:%s:%s" % (
+            method,
+            request_uri,
+            cnonce,
+            self.challenge["snonce"],
+            headers_val,
+        )
+        request_digest = (
+            hmac.new(self.key, request_digest, self.hashmod).hexdigest().lower()
+        )
+        headers["authorization"] = (
+            'HMACDigest username="%s", realm="%s", snonce="%s",'
+            ' cnonce="%s", uri="%s", created="%s", '
+            'response="%s", headers="%s"'
+        ) % (
+            self.credentials[0],
+            self.challenge["realm"],
+            self.challenge["snonce"],
+            cnonce,
+            request_uri,
+            created,
+            request_digest,
+            keylist,
+        )
+
+    def response(self, response, content):
+        challenge = _parse_www_authenticate(response, "www-authenticate").get(
+            "hmacdigest", {}
+        )
+        if challenge.get("reason") in ["integrity", "stale"]:
+            return True
+        return False
+
+
+class WsseAuthentication(Authentication):
+    """This is thinly tested and should not be relied upon.
+    At this time there isn't any third party server to test against.
+    Blogger and TypePad implemented this algorithm at one point
+    but Blogger has since switched to Basic over HTTPS and
+    TypePad has implemented it wrong, by never issuing a 401
+    challenge but instead requiring your client to telepathically know that
+    their endpoint is expecting WSSE profile="UsernameToken"."""
+
+    def __init__(
+        self, credentials, host, request_uri, headers, response, content, http
+    ):
+        Authentication.__init__(
+            self, credentials, host, request_uri, headers, response, content, http
+        )
+
+    def request(self, method, request_uri, headers, content):
+        """Modify the request headers to add the appropriate
+        Authorization header."""
+        headers["authorization"] = 'WSSE profile="UsernameToken"'
+        iso_now = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
+        cnonce = _cnonce()
+        password_digest = _wsse_username_token(cnonce, iso_now, self.credentials[1])
+        headers["X-WSSE"] = (
+            'UsernameToken Username="%s", PasswordDigest="%s", '
+            'Nonce="%s", Created="%s"'
+        ) % (self.credentials[0], password_digest, cnonce, iso_now)
+
+
+class GoogleLoginAuthentication(Authentication):
+    def __init__(
+        self, credentials, host, request_uri, headers, response, content, http
+    ):
+        from urllib import urlencode
+
+        Authentication.__init__(
+            self, credentials, host, request_uri, headers, response, content, http
+        )
+        challenge = _parse_www_authenticate(response, "www-authenticate")
+        service = challenge["googlelogin"].get("service", "xapi")
+        # Bloggger actually returns the service in the challenge
+        # For the rest we guess based on the URI
+        if service == "xapi" and request_uri.find("calendar") > 0:
+            service = "cl"
+        # No point in guessing Base or Spreadsheet
+        # elif request_uri.find("spreadsheets") > 0:
+        #    service = "wise"
+
+        auth = dict(
+            Email=credentials[0],
+            Passwd=credentials[1],
+            service=service,
+            source=headers["user-agent"],
+        )
+        resp, content = self.http.request(
+            "https://www.google.com/accounts/ClientLogin",
+            method="POST",
+            body=urlencode(auth),
+            headers={"Content-Type": "application/x-www-form-urlencoded"},
+        )
+        lines = content.split("\n")
+        d = dict([tuple(line.split("=", 1)) for line in lines if line])
+        if resp.status == 403:
+            self.Auth = ""
+        else:
+            self.Auth = d["Auth"]
+
+    def request(self, method, request_uri, headers, content):
+        """Modify the request headers to add the appropriate
+        Authorization header."""
+        headers["authorization"] = "GoogleLogin Auth=" + self.Auth
+
+
+AUTH_SCHEME_CLASSES = {
+    "basic": BasicAuthentication,
+    "wsse": WsseAuthentication,
+    "digest": DigestAuthentication,
+    "hmacdigest": HmacDigestAuthentication,
+    "googlelogin": GoogleLoginAuthentication,
+}
+
+AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"]
+
+
+class FileCache(object):
+    """Uses a local directory as a store for cached files.
+    Not really safe to use if multiple threads or processes are going to
+    be running on the same cache.
+    """
+
+    def __init__(
+        self, cache, safe=safename
+    ):  # use safe=lambda x: md5.new(x).hexdigest() for the old behavior
+        self.cache = cache
+        self.safe = safe
+        if not os.path.exists(cache):
+            os.makedirs(self.cache)
+
+    def get(self, key):
+        retval = None
+        cacheFullPath = os.path.join(self.cache, self.safe(key))
+        try:
+            f = file(cacheFullPath, "rb")
+            retval = f.read()
+            f.close()
+        except IOError:
+            pass
+        return retval
+
+    def set(self, key, value):
+        cacheFullPath = os.path.join(self.cache, self.safe(key))
+        f = file(cacheFullPath, "wb")
+        f.write(value)
+        f.close()
+
+    def delete(self, key):
+        cacheFullPath = os.path.join(self.cache, self.safe(key))
+        if os.path.exists(cacheFullPath):
+            os.remove(cacheFullPath)
+
+
+class Credentials(object):
+    def __init__(self):
+        self.credentials = []
+
+    def add(self, name, password, domain=""):
+        self.credentials.append((domain.lower(), name, password))
+
+    def clear(self):
+        self.credentials = []
+
+    def iter(self, domain):
+        for (cdomain, name, password) in self.credentials:
+            if cdomain == "" or domain == cdomain:
+                yield (name, password)
+
+
+class KeyCerts(Credentials):
+    """Identical to Credentials except that
+    name/password are mapped to key/cert."""
+
+    pass
+
+
+class AllHosts(object):
+    pass
+
+
+class ProxyInfo(object):
+    """Collect information required to use a proxy."""
+
+    bypass_hosts = ()
+
+    def __init__(
+        self,
+        proxy_type,
+        proxy_host,
+        proxy_port,
+        proxy_rdns=True,
+        proxy_user=None,
+        proxy_pass=None,
+        proxy_headers=None,
+    ):
+        """Args:
+
+          proxy_type: The type of proxy server.  This must be set to one of
+          socks.PROXY_TYPE_XXX constants.  For example:  p =
+          ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP, proxy_host='localhost',
+          proxy_port=8000)
+          proxy_host: The hostname or IP address of the proxy server.
+          proxy_port: The port that the proxy server is running on.
+          proxy_rdns: If True (default), DNS queries will not be performed
+          locally, and instead, handed to the proxy to resolve.  This is useful
+          if the network does not allow resolution of non-local names. In
+          httplib2 0.9 and earlier, this defaulted to False.
+          proxy_user: The username used to authenticate with the proxy server.
+          proxy_pass: The password used to authenticate with the proxy server.
+          proxy_headers: Additional or modified headers for the proxy connect
+          request.
+        """
+        self.proxy_type = proxy_type
+        self.proxy_host = proxy_host
+        self.proxy_port = proxy_port
+        self.proxy_rdns = proxy_rdns
+        self.proxy_user = proxy_user
+        self.proxy_pass = proxy_pass
+        self.proxy_headers = proxy_headers
+
+    def astuple(self):
+        return (
+            self.proxy_type,
+            self.proxy_host,
+            self.proxy_port,
+            self.proxy_rdns,
+            self.proxy_user,
+            self.proxy_pass,
+            self.proxy_headers,
+        )
+
+    def isgood(self):
+        return (self.proxy_host != None) and (self.proxy_port != None)
+
+    def applies_to(self, hostname):
+        return not self.bypass_host(hostname)
+
+    def bypass_host(self, hostname):
+        """Has this host been excluded from the proxy config"""
+        if self.bypass_hosts is AllHosts:
+            return True
+
+        hostname = "." + hostname.lstrip(".")
+        for skip_name in self.bypass_hosts:
+            # *.suffix
+            if skip_name.startswith(".") and hostname.endswith(skip_name):
+                return True
+            # exact match
+            if hostname == "." + skip_name:
+                return True
+        return False
+
+    def __repr__(self):
+        return (
+            "<ProxyInfo type={p.proxy_type} "
+            "host:port={p.proxy_host}:{p.proxy_port} rdns={p.proxy_rdns}"
+            + " user={p.proxy_user} headers={p.proxy_headers}>"
+        ).format(p=self)
+
+
+def proxy_info_from_environment(method="http"):
+    """Read proxy info from the environment variables.
+    """
+    if method not in ["http", "https"]:
+        return
+
+    env_var = method + "_proxy"
+    url = os.environ.get(env_var, os.environ.get(env_var.upper()))
+    if not url:
+        return
+    return proxy_info_from_url(url, method, None)
+
+
+def proxy_info_from_url(url, method="http", noproxy=None):
+    """Construct a ProxyInfo from a URL (such as http_proxy env var)
+    """
+    url = urlparse.urlparse(url)
+    username = None
+    password = None
+    port = None
+    if "@" in url[1]:
+        ident, host_port = url[1].split("@", 1)
+        if ":" in ident:
+            username, password = ident.split(":", 1)
+        else:
+            password = ident
+    else:
+        host_port = url[1]
+    if ":" in host_port:
+        host, port = host_port.split(":", 1)
+    else:
+        host = host_port
+
+    if port:
+        port = int(port)
+    else:
+        port = dict(https=443, http=80)[method]
+
+    proxy_type = 3  # socks.PROXY_TYPE_HTTP
+    pi = ProxyInfo(
+        proxy_type=proxy_type,
+        proxy_host=host,
+        proxy_port=port,
+        proxy_user=username or None,
+        proxy_pass=password or None,
+        proxy_headers=None,
+    )
+
+    bypass_hosts = []
+    # If not given an explicit noproxy value, respect values in env vars.
+    if noproxy is None:
+        noproxy = os.environ.get("no_proxy", os.environ.get("NO_PROXY", ""))
+    # Special case: A single '*' character means all hosts should be bypassed.
+    if noproxy == "*":
+        bypass_hosts = AllHosts
+    elif noproxy.strip():
+        bypass_hosts = noproxy.split(",")
+        bypass_hosts = filter(bool, bypass_hosts)  # To exclude empty string.
+
+    pi.bypass_hosts = bypass_hosts
+    return pi
+
+
+class HTTPConnectionWithTimeout(httplib.HTTPConnection):
+    """HTTPConnection subclass that supports timeouts
+
+    All timeouts are in seconds. If None is passed for timeout then
+    Python's default timeout for sockets will be used. See for example
+    the docs of socket.setdefaulttimeout():
+    http://docs.python.org/library/socket.html#socket.setdefaulttimeout
+    """
+
+    def __init__(self, host, port=None, strict=None, timeout=None, proxy_info=None):
+        httplib.HTTPConnection.__init__(self, host, port, strict)
+        self.timeout = timeout
+        self.proxy_info = proxy_info
+
+    def connect(self):
+        """Connect to the host and port specified in __init__."""
+        # Mostly verbatim from httplib.py.
+        if self.proxy_info and socks is None:
+            raise ProxiesUnavailableError(
+                "Proxy support missing but proxy use was requested!"
+            )
+        msg = "getaddrinfo returns an empty list"
+        if self.proxy_info and self.proxy_info.isgood():
+            use_proxy = True
+            proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers = (
+                self.proxy_info.astuple()
+            )
+
+            host = proxy_host
+            port = proxy_port
+        else:
+            use_proxy = False
+
+            host = self.host
+            port = self.port
+
+        for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
+            af, socktype, proto, canonname, sa = res
+            try:
+                if use_proxy:
+                    self.sock = socks.socksocket(af, socktype, proto)
+                    self.sock.setproxy(
+                        proxy_type,
+                        proxy_host,
+                        proxy_port,
+                        proxy_rdns,
+                        proxy_user,
+                        proxy_pass,
+                        proxy_headers,
+                    )
+                else:
+                    self.sock = socket.socket(af, socktype, proto)
+                    self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+                # Different from httplib: support timeouts.
+                if has_timeout(self.timeout):
+                    self.sock.settimeout(self.timeout)
+                    # End of difference from httplib.
+                if self.debuglevel > 0:
+                    print("connect: (%s, %s) ************" % (self.host, self.port))
+                    if use_proxy:
+                        print(
+                            "proxy: %s ************"
+                            % str(
+                                (
+                                    proxy_host,
+                                    proxy_port,
+                                    proxy_rdns,
+                                    proxy_user,
+                                    proxy_pass,
+                                    proxy_headers,
+                                )
+                            )
+                        )
+                if use_proxy:
+                    self.sock.connect((self.host, self.port) + sa[2:])
+                else:
+                    self.sock.connect(sa)
+            except socket.error as msg:
+                if self.debuglevel > 0:
+                    print("connect fail: (%s, %s)" % (self.host, self.port))
+                    if use_proxy:
+                        print(
+                            "proxy: %s"
+                            % str(
+                                (
+                                    proxy_host,
+                                    proxy_port,
+                                    proxy_rdns,
+                                    proxy_user,
+                                    proxy_pass,
+                                    proxy_headers,
+                                )
+                            )
+                        )
+                if self.sock:
+                    self.sock.close()
+                self.sock = None
+                continue
+            break
+        if not self.sock:
+            raise socket.error(msg)
+
+
+class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
+    """This class allows communication via SSL.
+
+    All timeouts are in seconds. If None is passed for timeout then
+    Python's default timeout for sockets will be used. See for example
+    the docs of socket.setdefaulttimeout():
+    http://docs.python.org/library/socket.html#socket.setdefaulttimeout
+    """
+
+    def __init__(
+        self,
+        host,
+        port=None,
+        key_file=None,
+        cert_file=None,
+        strict=None,
+        timeout=None,
+        proxy_info=None,
+        ca_certs=None,
+        disable_ssl_certificate_validation=False,
+        ssl_version=None,
+    ):
+        httplib.HTTPSConnection.__init__(
+            self, host, port=port, key_file=key_file, cert_file=cert_file, strict=strict
+        )
+        self.timeout = timeout
+        self.proxy_info = proxy_info
+        if ca_certs is None:
+            ca_certs = CA_CERTS
+        self.ca_certs = ca_certs
+        self.disable_ssl_certificate_validation = disable_ssl_certificate_validation
+        self.ssl_version = ssl_version
+
+    # The following two methods were adapted from https_wrapper.py, released
+    # with the Google Appengine SDK at
+    # http://googleappengine.googlecode.com/svn-history/r136/trunk/python/google/appengine/tools/https_wrapper.py
+    # under the following license:
+    #
+    # Copyright 2007 Google Inc.
+    #
+    # Licensed under the Apache License, Version 2.0 (the "License");
+    # you may not use this file except in compliance with the License.
+    # You may obtain a copy of the License at
+    #
+    #     http://www.apache.org/licenses/LICENSE-2.0
+    #
+    # Unless required by applicable law or agreed to in writing, software
+    # distributed under the License is distributed on an "AS IS" BASIS,
+    # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    # See the License for the specific language governing permissions and
+    # limitations under the License.
+    #
+
+    def _GetValidHostsForCert(self, cert):
+        """Returns a list of valid host globs for an SSL certificate.
+
+        Args:
+          cert: A dictionary representing an SSL certificate.
+        Returns:
+          list: A list of valid host globs.
+        """
+        if "subjectAltName" in cert:
+            return [x[1] for x in cert["subjectAltName"] if x[0].lower() == "dns"]
+        else:
+            return [x[0][1] for x in cert["subject"] if x[0][0].lower() == "commonname"]
+
+    def _ValidateCertificateHostname(self, cert, hostname):
+        """Validates that a given hostname is valid for an SSL certificate.
+
+        Args:
+          cert: A dictionary representing an SSL certificate.
+          hostname: The hostname to test.
+        Returns:
+          bool: Whether or not the hostname is valid for this certificate.
+        """
+        hosts = self._GetValidHostsForCert(cert)
+        for host in hosts:
+            host_re = host.replace(".", "\.").replace("*", "[^.]*")
+            if re.search("^%s$" % (host_re,), hostname, re.I):
+                return True
+        return False
+
+    def connect(self):
+        "Connect to a host on a given (SSL) port."
+
+        msg = "getaddrinfo returns an empty list"
+        if self.proxy_info and self.proxy_info.isgood():
+            use_proxy = True
+            proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers = (
+                self.proxy_info.astuple()
+            )
+
+            host = proxy_host
+            port = proxy_port
+        else:
+            use_proxy = False
+
+            host = self.host
+            port = self.port
+
+        address_info = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM)
+        for family, socktype, proto, canonname, sockaddr in address_info:
+            try:
+                if use_proxy:
+                    sock = socks.socksocket(family, socktype, proto)
+
+                    sock.setproxy(
+                        proxy_type,
+                        proxy_host,
+                        proxy_port,
+                        proxy_rdns,
+                        proxy_user,
+                        proxy_pass,
+                        proxy_headers,
+                    )
+                else:
+                    sock = socket.socket(family, socktype, proto)
+                    sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+
+                if has_timeout(self.timeout):
+                    sock.settimeout(self.timeout)
+
+                if use_proxy:
+                    sock.connect((self.host, self.port) + sockaddr[:2])
+                else:
+                    sock.connect(sockaddr)
+                self.sock = _ssl_wrap_socket(
+                    sock,
+                    self.key_file,
+                    self.cert_file,
+                    self.disable_ssl_certificate_validation,
+                    self.ca_certs,
+                    self.ssl_version,
+                    self.host,
+                )
+                if self.debuglevel > 0:
+                    print("connect: (%s, %s)" % (self.host, self.port))
+                    if use_proxy:
+                        print(
+                            "proxy: %s"
+                            % str(
+                                (
+                                    proxy_host,
+                                    proxy_port,
+                                    proxy_rdns,
+                                    proxy_user,
+                                    proxy_pass,
+                                    proxy_headers,
+                                )
+                            )
+                        )
+                if not self.disable_ssl_certificate_validation:
+                    cert = self.sock.getpeercert()
+                    hostname = self.host.split(":", 0)[0]
+                    if not self._ValidateCertificateHostname(cert, hostname):
+                        raise CertificateHostnameMismatch(
+                            "Server presented certificate that does not match "
+                            "host %s: %s" % (hostname, cert),
+                            hostname,
+                            cert,
+                        )
+            except (
+                ssl_SSLError,
+                ssl_CertificateError,
+                CertificateHostnameMismatch,
+            ) as e:
+                if sock:
+                    sock.close()
+                if self.sock:
+                    self.sock.close()
+                self.sock = None
+                # Unfortunately the ssl module doesn't seem to provide any way
+                # to get at more detailed error information, in particular
+                # whether the error is due to certificate validation or
+                # something else (such as SSL protocol mismatch).
+                if getattr(e, "errno", None) == ssl.SSL_ERROR_SSL:
+                    raise SSLHandshakeError(e)
+                else:
+                    raise
+            except (socket.timeout, socket.gaierror):
+                raise
+            except socket.error as msg:
+                if self.debuglevel > 0:
+                    print("connect fail: (%s, %s)" % (self.host, self.port))
+                    if use_proxy:
+                        print(
+                            "proxy: %s"
+                            % str(
+                                (
+                                    proxy_host,
+                                    proxy_port,
+                                    proxy_rdns,
+                                    proxy_user,
+                                    proxy_pass,
+                                    proxy_headers,
+                                )
+                            )
+                        )
+                if self.sock:
+                    self.sock.close()
+                self.sock = None
+                continue
+            break
+        if not self.sock:
+            raise socket.error(msg)
+
+
+SCHEME_TO_CONNECTION = {
+    "http": HTTPConnectionWithTimeout,
+    "https": HTTPSConnectionWithTimeout,
+}
+
+
+def _new_fixed_fetch(validate_certificate):
+
+    def fixed_fetch(
+        url,
+        payload=None,
+        method="GET",
+        headers={},
+        allow_truncated=False,
+        follow_redirects=True,
+        deadline=None,
+    ):
+        return fetch(
+            url,
+            payload=payload,
+            method=method,
+            headers=headers,
+            allow_truncated=allow_truncated,
+            follow_redirects=follow_redirects,
+            deadline=deadline,
+            validate_certificate=validate_certificate,
+        )
+
+    return fixed_fetch
+
+
+class AppEngineHttpConnection(httplib.HTTPConnection):
+    """Use httplib on App Engine, but compensate for its weirdness.
+
+    The parameters key_file, cert_file, proxy_info, ca_certs,
+    disable_ssl_certificate_validation, and ssl_version are all dropped on
+    the ground.
+    """
+
+    def __init__(
+        self,
+        host,
+        port=None,
+        key_file=None,
+        cert_file=None,
+        strict=None,
+        timeout=None,
+        proxy_info=None,
+        ca_certs=None,
+        disable_ssl_certificate_validation=False,
+        ssl_version=None,
+    ):
+        httplib.HTTPConnection.__init__(
+            self, host, port=port, strict=strict, timeout=timeout
+        )
+
+
+class AppEngineHttpsConnection(httplib.HTTPSConnection):
+    """Same as AppEngineHttpConnection, but for HTTPS URIs.
+
+    The parameters proxy_info, ca_certs, disable_ssl_certificate_validation,
+    and ssl_version are all dropped on the ground.
+    """
+
+    def __init__(
+        self,
+        host,
+        port=None,
+        key_file=None,
+        cert_file=None,
+        strict=None,
+        timeout=None,
+        proxy_info=None,
+        ca_certs=None,
+        disable_ssl_certificate_validation=False,
+        ssl_version=None,
+    ):
+        httplib.HTTPSConnection.__init__(
+            self,
+            host,
+            port=port,
+            key_file=key_file,
+            cert_file=cert_file,
+            strict=strict,
+            timeout=timeout,
+        )
+        self._fetch = _new_fixed_fetch(not disable_ssl_certificate_validation)
+
+
+# Use a different connection object for Google App Engine Standard Environment.
+def is_gae_instance():
+    server_software = os.environ.get('SERVER_SOFTWARE', '')
+    if (server_software.startswith('Google App Engine/') or
+        server_software.startswith('Development/') or
+        server_software.startswith('testutil/')):
+        return True
+    return False
+
+
+try:
+    if not is_gae_instance():
+        raise NotRunningAppEngineEnvironment()
+
+    from google.appengine.api import apiproxy_stub_map
+    if apiproxy_stub_map.apiproxy.GetStub("urlfetch") is None:
+        raise ImportError
+
+    from google.appengine.api.urlfetch import fetch
+
+    # Update the connection classes to use the Googel App Engine specific ones.
+    SCHEME_TO_CONNECTION = {
+        "http": AppEngineHttpConnection,
+        "https": AppEngineHttpsConnection,
+    }
+except (ImportError, NotRunningAppEngineEnvironment):
+    pass
+
+
+class Http(object):
+    """An HTTP client that handles:
+
+    - all methods
+    - caching
+    - ETags
+    - compression,
+    - HTTPS
+    - Basic
+    - Digest
+    - WSSE
+
+    and more.
+    """
+
+    def __init__(
+        self,
+        cache=None,
+        timeout=None,
+        proxy_info=proxy_info_from_environment,
+        ca_certs=None,
+        disable_ssl_certificate_validation=False,
+        ssl_version=None,
+    ):
+        """If 'cache' is a string then it is used as a directory name for
+        a disk cache. Otherwise it must be an object that supports the
+        same interface as FileCache.
+
+        All timeouts are in seconds. If None is passed for timeout
+        then Python's default timeout for sockets will be used. See
+        for example the docs of socket.setdefaulttimeout():
+        http://docs.python.org/library/socket.html#socket.setdefaulttimeout
+
+        `proxy_info` may be:
+          - a callable that takes the http scheme ('http' or 'https') and
+            returns a ProxyInfo instance per request. By default, uses
+            proxy_nfo_from_environment.
+          - a ProxyInfo instance (static proxy config).
+          - None (proxy disabled).
+
+        ca_certs is the path of a file containing root CA certificates for SSL
+        server certificate validation.  By default, a CA cert file bundled with
+        httplib2 is used.
+
+        If disable_ssl_certificate_validation is true, SSL cert validation will
+        not be performed.
+
+        By default, ssl.PROTOCOL_SSLv23 will be used for the ssl version.
+        """
+        self.proxy_info = proxy_info
+        self.ca_certs = ca_certs
+        self.disable_ssl_certificate_validation = disable_ssl_certificate_validation
+        self.ssl_version = ssl_version
+
+        # Map domain name to an httplib connection
+        self.connections = {}
+        # The location of the cache, for now a directory
+        # where cached responses are held.
+        if cache and isinstance(cache, basestring):
+            self.cache = FileCache(cache)
+        else:
+            self.cache = cache
+
+        # Name/password
+        self.credentials = Credentials()
+
+        # Key/cert
+        self.certificates = KeyCerts()
+
+        # authorization objects
+        self.authorizations = []
+
+        # If set to False then no redirects are followed, even safe ones.
+        self.follow_redirects = True
+
+        # Which HTTP methods do we apply optimistic concurrency to, i.e.
+        # which methods get an "if-match:" etag header added to them.
+        self.optimistic_concurrency_methods = ["PUT", "PATCH"]
+
+        # If 'follow_redirects' is True, and this is set to True then
+        # all redirecs are followed, including unsafe ones.
+        self.follow_all_redirects = False
+
+        self.ignore_etag = False
+
+        self.force_exception_to_status_code = False
+
+        self.timeout = timeout
+
+        # Keep Authorization: headers on a redirect.
+        self.forward_authorization_headers = False
+
+    def __getstate__(self):
+        state_dict = copy.copy(self.__dict__)
+        # In case request is augmented by some foreign object such as
+        # credentials which handle auth
+        if "request" in state_dict:
+            del state_dict["request"]
+        if "connections" in state_dict:
+            del state_dict["connections"]
+        return state_dict
+
+    def __setstate__(self, state):
+        self.__dict__.update(state)
+        self.connections = {}
+
+    def _auth_from_challenge(self, host, request_uri, headers, response, content):
+        """A generator that creates Authorization objects
+           that can be applied to requests.
+        """
+        challenges = _parse_www_authenticate(response, "www-authenticate")
+        for cred in self.credentials.iter(host):
+            for scheme in AUTH_SCHEME_ORDER:
+                if scheme in challenges:
+                    yield AUTH_SCHEME_CLASSES[scheme](
+                        cred, host, request_uri, headers, response, content, self
+                    )
+
+    def add_credentials(self, name, password, domain=""):
+        """Add a name and password that will be used
+        any time a request requires authentication."""
+        self.credentials.add(name, password, domain)
+
+    def add_certificate(self, key, cert, domain):
+        """Add a key and cert that will be used
+        any time a request requires authentication."""
+        self.certificates.add(key, cert, domain)
+
+    def clear_credentials(self):
+        """Remove all the names and passwords
+        that are used for authentication"""
+        self.credentials.clear()
+        self.authorizations = []
+
+    def _conn_request(self, conn, request_uri, method, body, headers):
+        i = 0
+        seen_bad_status_line = False
+        while i < RETRIES:
+            i += 1
+            try:
+                if hasattr(conn, "sock") and conn.sock is None:
+                    conn.connect()
+                conn.request(method, request_uri, body, headers)
+            except socket.timeout:
+                raise
+            except socket.gaierror:
+                conn.close()
+                raise ServerNotFoundError("Unable to find the server at %s" % conn.host)
+            except ssl_SSLError:
+                conn.close()
+                raise
+            except socket.error as e:
+                err = 0
+                if hasattr(e, "args"):
+                    err = getattr(e, "args")[0]
+                else:
+                    err = e.errno
+                if err == errno.ECONNREFUSED:  # Connection refused
+                    raise
+                if err in (errno.ENETUNREACH, errno.EADDRNOTAVAIL) and i < RETRIES:
+                    continue  # retry on potentially transient socket errors
+            except httplib.HTTPException:
+                # Just because the server closed the connection doesn't apparently mean
+                # that the server didn't send a response.
+                if hasattr(conn, "sock") and conn.sock is None:
+                    if i < RETRIES - 1:
+                        conn.close()
+                        conn.connect()
+                        continue
+                    else:
+                        conn.close()
+                        raise
+                if i < RETRIES - 1:
+                    conn.close()
+                    conn.connect()
+                    continue
+            try:
+                response = conn.getresponse()
+            except httplib.BadStatusLine:
+                # If we get a BadStatusLine on the first try then that means
+                # the connection just went stale, so retry regardless of the
+                # number of RETRIES set.
+                if not seen_bad_status_line and i == 1:
+                    i = 0
+                    seen_bad_status_line = True
+                    conn.close()
+                    conn.connect()
+                    continue
+                else:
+                    conn.close()
+                    raise
+            except (socket.error, httplib.HTTPException):
+                if i < RETRIES - 1:
+                    conn.close()
+                    conn.connect()
+                    continue
+                else:
+                    conn.close()
+                    raise
+            else:
+                content = ""
+                if method == "HEAD":
+                    conn.close()
+                else:
+                    content = response.read()
+                response = Response(response)
+                if method != "HEAD":
+                    content = _decompressContent(response, content)
+            break
+        return (response, content)
+
+    def _request(
+        self,
+        conn,
+        host,
+        absolute_uri,
+        request_uri,
+        method,
+        body,
+        headers,
+        redirections,
+        cachekey,
+    ):
+        """Do the actual request using the connection object
+        and also follow one level of redirects if necessary"""
+
+        auths = [
+            (auth.depth(request_uri), auth)
+            for auth in self.authorizations
+            if auth.inscope(host, request_uri)
+        ]
+        auth = auths and sorted(auths)[0][1] or None
+        if auth:
+            auth.request(method, request_uri, headers, body)
+
+        (response, content) = self._conn_request(
+            conn, request_uri, method, body, headers
+        )
+
+        if auth:
+            if auth.response(response, body):
+                auth.request(method, request_uri, headers, body)
+                (response, content) = self._conn_request(
+                    conn, request_uri, method, body, headers
+                )
+                response._stale_digest = 1
+
+        if response.status == 401:
+            for authorization in self._auth_from_challenge(
+                host, request_uri, headers, response, content
+            ):
+                authorization.request(method, request_uri, headers, body)
+                (response, content) = self._conn_request(
+                    conn, request_uri, method, body, headers
+                )
+                if response.status != 401:
+                    self.authorizations.append(authorization)
+                    authorization.response(response, body)
+                    break
+
+        if (
+            self.follow_all_redirects
+            or (method in ["GET", "HEAD"])
+            or response.status == 303
+        ):
+            if self.follow_redirects and response.status in [300, 301, 302, 303, 307]:
+                # Pick out the location header and basically start from the beginning
+                # remembering first to strip the ETag header and decrement our 'depth'
+                if redirections:
+                    if "location" not in response and response.status != 300:
+                        raise RedirectMissingLocation(
+                            _(
+                                "Redirected but the response is missing a Location: header."
+                            ),
+                            response,
+                            content,
+                        )
+                    # Fix-up relative redirects (which violate an RFC 2616 MUST)
+                    if "location" in response:
+                        location = response["location"]
+                        (scheme, authority, path, query, fragment) = parse_uri(location)
+                        if authority == None:
+                            response["location"] = urlparse.urljoin(
+                                absolute_uri, location
+                            )
+                    if response.status == 301 and method in ["GET", "HEAD"]:
+                        response["-x-permanent-redirect-url"] = response["location"]
+                        if "content-location" not in response:
+                            response["content-location"] = absolute_uri
+                        _updateCache(headers, response, content, self.cache, cachekey)
+                    if "if-none-match" in headers:
+                        del headers["if-none-match"]
+                    if "if-modified-since" in headers:
+                        del headers["if-modified-since"]
+                    if (
+                        "authorization" in headers
+                        and not self.forward_authorization_headers
+                    ):
+                        del headers["authorization"]
+                    if "location" in response:
+                        location = response["location"]
+                        old_response = copy.deepcopy(response)
+                        if "content-location" not in old_response:
+                            old_response["content-location"] = absolute_uri
+                        redirect_method = method
+                        if response.status in [302, 303]:
+                            redirect_method = "GET"
+                            body = None
+                        (response, content) = self.request(
+                            location,
+                            method=redirect_method,
+                            body=body,
+                            headers=headers,
+                            redirections=redirections - 1,
+                        )
+                        response.previous = old_response
+                else:
+                    raise RedirectLimit(
+                        "Redirected more times than rediection_limit allows.",
+                        response,
+                        content,
+                    )
+            elif response.status in [200, 203] and method in ["GET", "HEAD"]:
+                # Don't cache 206's since we aren't going to handle byte range requests
+                if "content-location" not in response:
+                    response["content-location"] = absolute_uri
+                _updateCache(headers, response, content, self.cache, cachekey)
+
+        return (response, content)
+
+    def _normalize_headers(self, headers):
+        return _normalize_headers(headers)
+
+    # Need to catch and rebrand some exceptions
+    # Then need to optionally turn all exceptions into status codes
+    # including all socket.* and httplib.* exceptions.
+
+    def request(
+        self,
+        uri,
+        method="GET",
+        body=None,
+        headers=None,
+        redirections=DEFAULT_MAX_REDIRECTS,
+        connection_type=None,
+    ):
+        """ Performs a single HTTP request.
+
+        The 'uri' is the URI of the HTTP resource and can begin with either
+        'http' or 'https'. The value of 'uri' must be an absolute URI.
+
+        The 'method' is the HTTP method to perform, such as GET, POST, DELETE,
+        etc. There is no restriction on the methods allowed.
+
+        The 'body' is the entity body to be sent with the request. It is a
+        string object.
+
+        Any extra headers that are to be sent with the request should be
+        provided in the 'headers' dictionary.
+
+        The maximum number of redirect to follow before raising an
+        exception is 'redirections. The default is 5.
+
+        The return value is a tuple of (response, content), the first
+        being and instance of the 'Response' class, the second being
+        a string that contains the response entity body.
+        """
+        conn_key = ''
+        
+        try:
+            if headers is None:
+                headers = {}
+            else:
+                headers = self._normalize_headers(headers)
+
+            if "user-agent" not in headers:
+                headers["user-agent"] = "Python-httplib2/%s (gzip)" % __version__
+
+            uri = iri2uri(uri)
+
+            (scheme, authority, request_uri, defrag_uri) = urlnorm(uri)
+
+            proxy_info = self._get_proxy_info(scheme, authority)
+
+            conn_key = scheme + ":" + authority
+            conn = self.connections.get(conn_key)
+            if conn is None:
+                if not connection_type:
+                    connection_type = SCHEME_TO_CONNECTION[scheme]
+                certs = list(self.certificates.iter(authority))
+                if scheme == "https":
+                    if certs:
+                        conn = self.connections[conn_key] = connection_type(
+                            authority,
+                            key_file=certs[0][0],
+                            cert_file=certs[0][1],
+                            timeout=self.timeout,
+                            proxy_info=proxy_info,
+                            ca_certs=self.ca_certs,
+                            disable_ssl_certificate_validation=self.disable_ssl_certificate_validation,
+                            ssl_version=self.ssl_version,
+                        )
+                    else:
+                        conn = self.connections[conn_key] = connection_type(
+                            authority,
+                            timeout=self.timeout,
+                            proxy_info=proxy_info,
+                            ca_certs=self.ca_certs,
+                            disable_ssl_certificate_validation=self.disable_ssl_certificate_validation,
+                            ssl_version=self.ssl_version,
+                        )
+                else:
+                    conn = self.connections[conn_key] = connection_type(
+                        authority, timeout=self.timeout, proxy_info=proxy_info
+                    )
+                conn.set_debuglevel(debuglevel)
+
+            if "range" not in headers and "accept-encoding" not in headers:
+                headers["accept-encoding"] = "gzip, deflate"
+
+            info = email.Message.Message()
+            cached_value = None
+            if self.cache:
+                cachekey = defrag_uri.encode("utf-8")
+                cached_value = self.cache.get(cachekey)
+                if cached_value:
+                    # info = email.message_from_string(cached_value)
+                    #
+                    # Need to replace the line above with the kludge below
+                    # to fix the non-existent bug not fixed in this
+                    # bug report: http://mail.python.org/pipermail/python-bugs-list/2005-September/030289.html
+                    try:
+                        info, content = cached_value.split("\r\n\r\n", 1)
+                        feedparser = email.FeedParser.FeedParser()
+                        feedparser.feed(info)
+                        info = feedparser.close()
+                        feedparser._parse = None
+                    except (IndexError, ValueError):
+                        self.cache.delete(cachekey)
+                        cachekey = None
+                        cached_value = None
+            else:
+                cachekey = None
+
+            if (
+                method in self.optimistic_concurrency_methods
+                and self.cache
+                and "etag" in info
+                and not self.ignore_etag
+                and "if-match" not in headers
+            ):
+                # http://www.w3.org/1999/04/Editing/
+                headers["if-match"] = info["etag"]
+
+            if method not in ["GET", "HEAD"] and self.cache and cachekey:
+                # RFC 2616 Section 13.10
+                self.cache.delete(cachekey)
+
+            # Check the vary header in the cache to see if this request
+            # matches what varies in the cache.
+            if method in ["GET", "HEAD"] and "vary" in info:
+                vary = info["vary"]
+                vary_headers = vary.lower().replace(" ", "").split(",")
+                for header in vary_headers:
+                    key = "-varied-%s" % header
+                    value = info[key]
+                    if headers.get(header, None) != value:
+                        cached_value = None
+                        break
+
+            if (
+                cached_value
+                and method in ["GET", "HEAD"]
+                and self.cache
+                and "range" not in headers
+            ):
+                if "-x-permanent-redirect-url" in info:
+                    # Should cached permanent redirects be counted in our redirection count? For now, yes.
+                    if redirections <= 0:
+                        raise RedirectLimit(
+                            "Redirected more times than rediection_limit allows.",
+                            {},
+                            "",
+                        )
+                    (response, new_content) = self.request(
+                        info["-x-permanent-redirect-url"],
+                        method="GET",
+                        headers=headers,
+                        redirections=redirections - 1,
+                    )
+                    response.previous = Response(info)
+                    response.previous.fromcache = True
+                else:
+                    # Determine our course of action:
+                    #   Is the cached entry fresh or stale?
+                    #   Has the client requested a non-cached response?
+                    #
+                    # There seems to be three possible answers:
+                    # 1. [FRESH] Return the cache entry w/o doing a GET
+                    # 2. [STALE] Do the GET (but add in cache validators if available)
+                    # 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request
+                    entry_disposition = _entry_disposition(info, headers)
+
+                    if entry_disposition == "FRESH":
+                        if not cached_value:
+                            info["status"] = "504"
+                            content = ""
+                        response = Response(info)
+                        if cached_value:
+                            response.fromcache = True
+                        return (response, content)
+
+                    if entry_disposition == "STALE":
+                        if (
+                            "etag" in info
+                            and not self.ignore_etag
+                            and not "if-none-match" in headers
+                        ):
+                            headers["if-none-match"] = info["etag"]
+                        if "last-modified" in info and not "last-modified" in headers:
+                            headers["if-modified-since"] = info["last-modified"]
+                    elif entry_disposition == "TRANSPARENT":
+                        pass
+
+                    (response, new_content) = self._request(
+                        conn,
+                        authority,
+                        uri,
+                        request_uri,
+                        method,
+                        body,
+                        headers,
+                        redirections,
+                        cachekey,
+                    )
+
+                if response.status == 304 and method == "GET":
+                    # Rewrite the cache entry with the new end-to-end headers
+                    # Take all headers that are in response
+                    # and overwrite their values in info.
+                    # unless they are hop-by-hop, or are listed in the connection header.
+
+                    for key in _get_end2end_headers(response):
+                        info[key] = response[key]
+                    merged_response = Response(info)
+                    if hasattr(response, "_stale_digest"):
+                        merged_response._stale_digest = response._stale_digest
+                    _updateCache(
+                        headers, merged_response, content, self.cache, cachekey
+                    )
+                    response = merged_response
+                    response.status = 200
+                    response.fromcache = True
+
+                elif response.status == 200:
+                    content = new_content
+                else:
+                    self.cache.delete(cachekey)
+                    content = new_content
+            else:
+                cc = _parse_cache_control(headers)
+                if "only-if-cached" in cc:
+                    info["status"] = "504"
+                    response = Response(info)
+                    content = ""
+                else:
+                    (response, content) = self._request(
+                        conn,
+                        authority,
+                        uri,
+                        request_uri,
+                        method,
+                        body,
+                        headers,
+                        redirections,
+                        cachekey,
+                    )
+        except Exception as e:
+            is_timeout = isinstance(e, socket.timeout)
+            if is_timeout:
+                conn = self.connections.pop(conn_key, None)
+                if conn:
+                    conn.close()
+                    
+            if self.force_exception_to_status_code:
+                if isinstance(e, HttpLib2ErrorWithResponse):
+                    response = e.response
+                    content = e.content
+                    response.status = 500
+                    response.reason = str(e)
+                elif is_timeout:
+                    content = "Request Timeout"
+                    response = Response(
+                        {
+                            "content-type": "text/plain",
+                            "status": "408",
+                            "content-length": len(content),
+                        }
+                    )
+                    response.reason = "Request Timeout"
+                else:
+                    content = str(e)
+                    response = Response(
+                        {
+                            "content-type": "text/plain",
+                            "status": "400",
+                            "content-length": len(content),
+                        }
+                    )
+                    response.reason = "Bad Request"
+            else:
+                raise
+
+        return (response, content)
+
+    def _get_proxy_info(self, scheme, authority):
+        """Return a ProxyInfo instance (or None) based on the scheme
+        and authority.
+        """
+        hostname, port = urllib.splitport(authority)
+        proxy_info = self.proxy_info
+        if callable(proxy_info):
+            proxy_info = proxy_info(scheme)
+
+        if hasattr(proxy_info, "applies_to") and not proxy_info.applies_to(hostname):
+            proxy_info = None
+        return proxy_info
+
+
+class Response(dict):
+    """An object more like email.Message than httplib.HTTPResponse."""
+
+    """Is this response from our local cache"""
+    fromcache = False
+    """HTTP protocol version used by server.
+
+    10 for HTTP/1.0, 11 for HTTP/1.1.
+    """
+    version = 11
+
+    "Status code returned by server. "
+    status = 200
+    """Reason phrase returned by server."""
+    reason = "Ok"
+
+    previous = None
+
+    def __init__(self, info):
+        # info is either an email.Message or
+        # an httplib.HTTPResponse object.
+        if isinstance(info, httplib.HTTPResponse):
+            for key, value in info.getheaders():
+                self[key.lower()] = value
+            self.status = info.status
+            self["status"] = str(self.status)
+            self.reason = info.reason
+            self.version = info.version
+        elif isinstance(info, email.Message.Message):
+            for key, value in info.items():
+                self[key.lower()] = value
+            self.status = int(self["status"])
+        else:
+            for key, value in info.iteritems():
+                self[key.lower()] = value
+            self.status = int(self.get("status", self.status))
+            self.reason = self.get("reason", self.reason)
+
+    def __getattr__(self, name):
+        if name == "dict":
+            return self
+        else:
+            raise AttributeError(name)
diff --git a/utils/frozen_chromite/third_party/python2/httplib2/cacerts.txt b/utils/frozen_chromite/third_party/python2/httplib2/cacerts.txt
new file mode 100644
index 0000000..a2a9833
--- /dev/null
+++ b/utils/frozen_chromite/third_party/python2/httplib2/cacerts.txt
@@ -0,0 +1,2196 @@
+# Issuer: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc.
+# Subject: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc.
+# Label: "GTE CyberTrust Global Root"
+# Serial: 421
+# MD5 Fingerprint: ca:3d:d3:68:f1:03:5c:d0:32:fa:b8:2b:59:e8:5a:db
+# SHA1 Fingerprint: 97:81:79:50:d8:1c:96:70:cc:34:d8:09:cf:79:44:31:36:7e:f4:74
+# SHA256 Fingerprint: a5:31:25:18:8d:21:10:aa:96:4b:02:c7:b7:c6:da:32:03:17:08:94:e5:fb:71:ff:fb:66:67:d5:e6:81:0a:36
+-----BEGIN CERTIFICATE-----
+MIICWjCCAcMCAgGlMA0GCSqGSIb3DQEBBAUAMHUxCzAJBgNVBAYTAlVTMRgwFgYD
+VQQKEw9HVEUgQ29ycG9yYXRpb24xJzAlBgNVBAsTHkdURSBDeWJlclRydXN0IFNv
+bHV0aW9ucywgSW5jLjEjMCEGA1UEAxMaR1RFIEN5YmVyVHJ1c3QgR2xvYmFsIFJv
+b3QwHhcNOTgwODEzMDAyOTAwWhcNMTgwODEzMjM1OTAwWjB1MQswCQYDVQQGEwJV
+UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU
+cnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds
+b2JhbCBSb290MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVD6C28FCc6HrH
+iM3dFw4usJTQGz0O9pTAipTHBsiQl8i4ZBp6fmw8U+E3KHNgf7KXUwefU/ltWJTS
+r41tiGeA5u2ylc9yMcqlHHK6XALnZELn+aks1joNrI1CqiQBOeacPwGFVw1Yh0X4
+04Wqk2kmhXBIgD8SFcd5tB8FLztimQIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAG3r
+GwnpXtlR22ciYaQqPEh346B8pt5zohQDhT37qw4wxYMWM4ETCJ57NE7fQMh017l9
+3PR2VX2bY1QY6fDq81yx2YtCHrnAlU66+tXifPVoYb+O7AWXX1uw16OFNMQkpw0P
+lZPvy5TYnh+dXIVtx6quTx8itc2VrbqnzPmrC3p/
+-----END CERTIFICATE-----
+
+# Issuer: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division
+# Subject: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division
+# Label: "Thawte Server CA"
+# Serial: 1
+# MD5 Fingerprint: c5:70:c4:a2:ed:53:78:0c:c8:10:53:81:64:cb:d0:1d
+# SHA1 Fingerprint: 23:e5:94:94:51:95:f2:41:48:03:b4:d5:64:d2:a3:a3:f5:d8:8b:8c
+# SHA256 Fingerprint: b4:41:0b:73:e2:e6:ea:ca:47:fb:c4:2f:8f:a4:01:8a:f4:38:1d:c5:4c:fa:a8:44:50:46:1e:ed:09:45:4d:e9
+-----BEGIN CERTIFICATE-----
+MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx
+FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
+VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv
+biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm
+MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx
+MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT
+DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3
+dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl
+cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3
+DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD
+gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91
+yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX
+L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj
+EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG
+7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e
+QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ
+qdq5snUb9kLy78fyGPmJvKP/iiMucEc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division
+# Subject: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division
+# Label: "Thawte Premium Server CA"
+# Serial: 1
+# MD5 Fingerprint: 06:9f:69:79:16:66:90:02:1b:8c:8c:a2:c3:07:6f:3a
+# SHA1 Fingerprint: 62:7f:8d:78:27:65:63:99:d2:7d:7f:90:44:c9:fe:b3:f3:3e:fa:9a
+# SHA256 Fingerprint: ab:70:36:36:5c:71:54:aa:29:c2:c2:9f:5d:41:91:16:3b:16:2a:22:25:01:13:57:d5:6d:07:ff:a7:bc:1f:72
+-----BEGIN CERTIFICATE-----
+MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx
+FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
+VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv
+biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy
+dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t
+MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB
+MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG
+A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp
+b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl
+cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv
+bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE
+VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ
+ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR
+uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG
+9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI
+hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM
+pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg==
+-----END CERTIFICATE-----
+
+# Issuer: O=Equifax OU=Equifax Secure Certificate Authority
+# Subject: O=Equifax OU=Equifax Secure Certificate Authority
+# Label: "Equifax Secure CA"
+# Serial: 903804111
+# MD5 Fingerprint: 67:cb:9d:c0:13:24:8a:82:9b:b2:17:1e:d1:1b:ec:d4
+# SHA1 Fingerprint: d2:32:09:ad:23:d3:14:23:21:74:e4:0d:7f:9d:62:13:97:86:63:3a
+# SHA256 Fingerprint: 08:29:7a:40:47:db:a2:36:80:c7:31:db:6e:31:76:53:ca:78:48:e1:be:bd:3a:0b:01:79:a7:07:f9:2c:f1:78
+-----BEGIN CERTIFICATE-----
+MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV
+UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy
+dGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1
+MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVx
+dWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0B
+AQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6f
+BeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+A
+cJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kC
+AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQ
+MA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlm
+aWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgw
+ODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gj
+IBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQF
+MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA
+A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y
+7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh
+1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4
+-----END CERTIFICATE-----
+
+# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network
+# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network
+# Label: "Verisign Class 3 Public Primary Certification Authority - G2"
+# Serial: 167285380242319648451154478808036881606
+# MD5 Fingerprint: a2:33:9b:4c:74:78:73:d4:6c:e7:c1:f3:8d:cb:5c:e9
+# SHA1 Fingerprint: 85:37:1c:a6:e5:50:14:3d:ce:28:03:47:1b:de:3a:09:e8:f8:77:0f
+# SHA256 Fingerprint: 83:ce:3c:12:29:68:8a:59:3d:48:5f:81:97:3c:0f:91:95:43:1e:da:37:cc:5e:36:43:0e:79:c7:a8:88:63:8b
+-----BEGIN CERTIFICATE-----
+MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ
+BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh
+c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy
+MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp
+emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X
+DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw
+FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg
+UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo
+YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5
+MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB
+AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4
+pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0
+13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID
+AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk
+U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i
+F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY
+oJ2daZH9
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Label: "GlobalSign Root CA"
+# Serial: 4835703278459707669005204
+# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a
+# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c
+# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99
+-----BEGIN CERTIFICATE-----
+MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG
+A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
+b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw
+MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i
+YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT
+aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ
+jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp
+xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp
+1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG
+snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ
+U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8
+9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B
+AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz
+yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE
+38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP
+AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad
+DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME
+HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
+# Label: "GlobalSign Root CA - R2"
+# Serial: 4835703278459682885658125
+# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30
+# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe
+# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e
+-----BEGIN CERTIFICATE-----
+MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1
+MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL
+v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8
+eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq
+tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd
+C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa
+zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB
+mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH
+V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n
+bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG
+3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs
+J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO
+291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS
+ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd
+AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7
+TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority
+# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority
+# Label: "ValiCert Class 1 VA"
+# Serial: 1
+# MD5 Fingerprint: 65:58:ab:15:ad:57:6c:1e:a8:a7:b5:69:ac:bf:ff:eb
+# SHA1 Fingerprint: e5:df:74:3c:b6:01:c4:9b:98:43:dc:ab:8c:e8:6a:81:10:9f:e4:8e
+# SHA256 Fingerprint: f4:c1:49:55:1a:30:13:a3:5b:c7:bf:fe:17:a7:f3:44:9b:c1:ab:5b:5a:0a:e7:4b:06:c2:3b:90:00:4c:01:04
+-----BEGIN CERTIFICATE-----
+MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
+IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
+BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
+aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
+9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNTIyMjM0OFoXDTE5MDYy
+NTIyMjM0OFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
+azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
+YXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
+Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
+cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDYWYJ6ibiWuqYvaG9Y
+LqdUHAZu9OqNSLwxlBfw8068srg1knaw0KWlAdcAAxIiGQj4/xEjm84H9b9pGib+
+TunRf50sQB1ZaG6m+FiwnRqP0z/x3BkGgagO4DrdyFNFCQbmD3DD+kCmDuJWBQ8Y
+TfwggtFzVXSNdnKgHZ0dwN0/cQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFBoPUn0
+LBwGlN+VYH+Wexf+T3GtZMjdd9LvWVXoP+iOBSoh8gfStadS/pyxtuJbdxdA6nLW
+I8sogTLDAHkY7FkXicnGah5xyf23dKUlRWnFSKsZ4UWKJWsZ7uW7EvV/96aNUcPw
+nXS3qT6gpf+2SQMT2iLM7XGCK5nPOrf1LXLI
+-----END CERTIFICATE-----
+
+# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority
+# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority
+# Label: "ValiCert Class 2 VA"
+# Serial: 1
+# MD5 Fingerprint: a9:23:75:9b:ba:49:36:6e:31:c2:db:f2:e7:66:ba:87
+# SHA1 Fingerprint: 31:7a:2a:d0:7f:2b:33:5e:f5:a1:c3:4e:4b:57:e8:b7:d8:f1:fc:a6
+# SHA256 Fingerprint: 58:d0:17:27:9c:d4:dc:63:ab:dd:b1:96:a6:c9:90:6c:30:c4:e0:87:83:ea:e8:c1:60:99:54:d6:93:55:59:6b
+-----BEGIN CERTIFICATE-----
+MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
+IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
+BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
+aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
+9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy
+NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
+azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
+YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
+Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
+cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY
+dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9
+WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS
+v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v
+UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu
+IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC
+W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd
+-----END CERTIFICATE-----
+
+# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority
+# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority
+# Label: "RSA Root Certificate 1"
+# Serial: 1
+# MD5 Fingerprint: a2:6f:53:b7:ee:40:db:4a:68:e7:fa:18:d9:10:4b:72
+# SHA1 Fingerprint: 69:bd:8c:f4:9c:d3:00:fb:59:2e:17:93:ca:55:6a:f3:ec:aa:35:fb
+# SHA256 Fingerprint: bc:23:f9:8a:31:3c:b9:2d:e3:bb:fc:3a:5a:9f:44:61:ac:39:49:4c:4a:e1:5a:9e:9d:f1:31:e9:9b:73:01:9a
+-----BEGIN CERTIFICATE-----
+MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
+IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
+BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
+aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
+9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYy
+NjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
+azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
+YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
+Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
+cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfD
+cnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs
+2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqY
+JJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliE
+Zwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJ
+n0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/A
+PhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Label: "Verisign Class 3 Public Primary Certification Authority - G3"
+# Serial: 206684696279472310254277870180966723415
+# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09
+# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6
+# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44
+-----BEGIN CERTIFICATE-----
+MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
+CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
+cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
+LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
+aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
+dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
+VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
+aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
+bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
+IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b
+N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t
+KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu
+kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm
+CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ
+Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu
+imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te
+2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe
+DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC
+/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p
+F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt
+TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Label: "Verisign Class 4 Public Primary Certification Authority - G3"
+# Serial: 314531972711909413743075096039378935511
+# MD5 Fingerprint: db:c8:f2:27:2e:b1:ea:6a:29:23:5d:fe:56:3e:33:df
+# SHA1 Fingerprint: c8:ec:8c:87:92:69:cb:4b:ab:39:e9:8d:7e:57:67:f3:14:95:73:9d
+# SHA256 Fingerprint: e3:89:36:0d:0f:db:ae:b3:d2:50:58:4b:47:30:31:4e:22:2f:39:c1:56:a0:20:14:4e:8d:96:05:61:79:15:06
+-----BEGIN CERTIFICATE-----
+MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
+CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
+cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
+LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
+aWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
+dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
+VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
+aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
+bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
+IENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK3LpRFpxlmr8Y+1
+GQ9Wzsy1HyDkniYlS+BzZYlZ3tCD5PUPtbut8XzoIfzk6AzufEUiGXaStBO3IFsJ
++mGuqPKljYXCKtbeZjbSmwL0qJJgfJxptI8kHtCGUvYynEFYHiK9zUVilQhu0Gbd
+U6LM8BDcVHOLBKFGMzNcF0C5nk3T875Vg+ixiY5afJqWIpA7iCXy0lOIAgwLePLm
+NxdLMEYH5IBtptiWLugs+BGzOA1mppvqySNb247i8xOOGlktqgLw7KSHZtzBP/XY
+ufTsgsbSPZUd5cBPhMnZo0QoBmrXRazwa2rvTl/4EYIeOGM0ZlDUPpNz+jDDZq3/
+ky2X7wMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAj/ola09b5KROJ1WrIhVZPMq1
+CtRK26vdoV9TxaBXOcLORyu+OshWv8LZJxA6sQU8wHcxuzrTBXttmhwwjIDLk5Mq
+g6sFUYICABFna/OIYUdfA5PVWw3g8dShMjWFsjrbsIKr0csKvE+MW8VLADsfKoKm
+fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c
+2NU8Qh0XwRJdRTjDOPP8hS6DRkiy1yBfkjaP53kPmF6Z6PDQpLv1U70qzlmwr25/
+bLvSHgCwIe34QWKCudiyxLtGUPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Subject: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Label: "Entrust.net Secure Server CA"
+# Serial: 927650371
+# MD5 Fingerprint: df:f2:80:73:cc:f1:e6:61:73:fc:f5:42:e9:c5:7c:ee
+# SHA1 Fingerprint: 99:a6:9b:e6:1a:fe:88:6b:4d:2b:82:00:7c:b8:54:fc:31:7e:15:39
+# SHA256 Fingerprint: 62:f2:40:27:8c:56:4c:4d:d8:bf:7d:9d:4f:6f:36:6e:a8:94:d2:2f:5f:34:d9:89:a9:83:ac:ec:2f:ff:ed:50
+-----BEGIN CERTIFICATE-----
+MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC
+VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u
+ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc
+KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u
+ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1
+MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE
+ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j
+b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF
+bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg
+U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA
+A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/
+I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3
+wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC
+AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb
+oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5
+BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p
+dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk
+MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp
+b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu
+dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0
+MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi
+E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa
+MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI
+hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN
+95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd
+2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Label: "Entrust.net Premium 2048 Secure Server CA"
+# Serial: 946059622
+# MD5 Fingerprint: ba:21:ea:20:d6:dd:db:8f:c1:57:8b:40:ad:a1:fc:fc
+# SHA1 Fingerprint: 80:1d:62:d0:7b:44:9d:5c:5c:03:5c:98:ea:61:fa:44:3c:2a:58:fe
+# SHA256 Fingerprint: d1:c3:39:ea:27:84:eb:87:0f:93:4f:c5:63:4e:4a:a9:ad:55:05:01:64:01:f2:64:65:d3:7a:57:46:63:35:9f
+-----BEGIN CERTIFICATE-----
+MIIEXDCCA0SgAwIBAgIEOGO5ZjANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML
+RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp
+bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5
+IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0xOTEy
+MjQxODIwNTFaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3
+LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp
+YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG
+A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq
+K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe
+sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX
+MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT
+XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/
+HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH
+4QIDAQABo3QwcjARBglghkgBhvhCAQEEBAMCAAcwHwYDVR0jBBgwFoAUVeSB0RGA
+vtiJuQijMfmhJAkWuXAwHQYDVR0OBBYEFFXkgdERgL7YibkIozH5oSQJFrlwMB0G
+CSqGSIb2fQdBAAQQMA4bCFY1LjA6NC4wAwIEkDANBgkqhkiG9w0BAQUFAAOCAQEA
+WUesIYSKF8mciVMeuoCFGsY8Tj6xnLZ8xpJdGGQC49MGCBFhfGPjK50xA3B20qMo
+oPS7mmNz7W3lKtvtFKkrxjYR0CvrB4ul2p5cGZ1WEvVUKcgF7bISKo30Axv/55IQ
+h7A6tcOdBTcSo8f0FbnVpDkWm1M6I5HxqIKiaohowXkCIryqptau37AUX7iH0N18
+f3v/rxzP5tsHrV7bhZ3QKw0z2wTR5klAEyt2+z7pnIkPFc4YsIV4IU9rTw76NmfN
+B/L/CNDi3tm/Kq+4h4YhPATKt5Rof8886ZjXOP/swNlQ8C5LWK5Gb9Auw2DaclVy
+vUxFnmG6v4SBkgPR0ml8xQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Label: "Baltimore CyberTrust Root"
+# Serial: 33554617
+# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4
+# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74
+# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ
+RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD
+VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX
+DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y
+ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy
+VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr
+mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr
+IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK
+mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu
+XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy
+dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye
+jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1
+BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3
+DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92
+9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx
+jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0
+Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz
+ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS
+R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp
+-----END CERTIFICATE-----
+
+# Issuer: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc.
+# Subject: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc.
+# Label: "Equifax Secure Global eBusiness CA"
+# Serial: 1
+# MD5 Fingerprint: 8f:5d:77:06:27:c4:98:3c:5b:93:78:e7:d7:7d:9b:cc
+# SHA1 Fingerprint: 7e:78:4a:10:1c:82:65:cc:2d:e1:f1:6d:47:b4:40:ca:d9:0a:19:45
+# SHA256 Fingerprint: 5f:0b:62:ea:b5:e3:53:ea:65:21:65:16:58:fb:b6:53:59:f4:43:28:0a:4a:fb:d1:04:d7:7d:10:f9:f0:4c:07
+-----BEGIN CERTIFICATE-----
+MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc
+MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT
+ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw
+MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj
+dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l
+c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC
+UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc
+58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/
+o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH
+MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr
+aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA
+A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA
+Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv
+8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV
+-----END CERTIFICATE-----
+
+# Issuer: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc.
+# Subject: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc.
+# Label: "Equifax Secure eBusiness CA 1"
+# Serial: 4
+# MD5 Fingerprint: 64:9c:ef:2e:44:fc:c6:8f:52:07:d0:51:73:8f:cb:3d
+# SHA1 Fingerprint: da:40:18:8b:91:89:a3:ed:ee:ae:da:97:fe:2f:9d:f5:b7:d1:8a:41
+# SHA256 Fingerprint: cf:56:ff:46:a4:a1:86:10:9d:d9:65:84:b5:ee:b5:8a:51:0c:42:75:b0:e5:f9:4f:40:bb:ae:86:5e:19:f6:73
+-----BEGIN CERTIFICATE-----
+MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc
+MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT
+ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw
+MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j
+LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ
+KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo
+RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu
+WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw
+Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD
+AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK
+eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM
+zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+
+WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN
+/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ==
+-----END CERTIFICATE-----
+
+# Issuer: O=Equifax Secure OU=Equifax Secure eBusiness CA-2
+# Subject: O=Equifax Secure OU=Equifax Secure eBusiness CA-2
+# Label: "Equifax Secure eBusiness CA 2"
+# Serial: 930140085
+# MD5 Fingerprint: aa:bf:bf:64:97:da:98:1d:6f:c6:08:3a:95:70:33:ca
+# SHA1 Fingerprint: 39:4f:f6:85:0b:06:be:52:e5:18:56:cc:10:e1:80:e8:82:b3:85:cc
+# SHA256 Fingerprint: 2f:27:4e:48:ab:a4:ac:7b:76:59:33:10:17:75:50:6d:c3:0e:e3:8e:f6:ac:d5:c0:49:32:cf:e0:41:23:42:20
+-----BEGIN CERTIFICATE-----
+MIIDIDCCAomgAwIBAgIEN3DPtTANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV
+UzEXMBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2Vj
+dXJlIGVCdXNpbmVzcyBDQS0yMB4XDTk5MDYyMzEyMTQ0NVoXDTE5MDYyMzEyMTQ0
+NVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkVxdWlmYXggU2VjdXJlMSYwJAYD
+VQQLEx1FcXVpZmF4IFNlY3VyZSBlQnVzaW5lc3MgQ0EtMjCBnzANBgkqhkiG9w0B
+AQEFAAOBjQAwgYkCgYEA5Dk5kx5SBhsoNviyoynF7Y6yEb3+6+e0dMKP/wXn2Z0G
+vxLIPw7y1tEkshHe0XMJitSxLJgJDR5QRrKDpkWNYmi7hRsgcDKqQM2mll/EcTc/
+BPO3QSQ5BxoeLmFYoBIL5aXfxavqN3HMHMg3OrmXUqesxWoklE6ce8/AatbfIb0C
+AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEX
+MBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2VjdXJl
+IGVCdXNpbmVzcyBDQS0yMQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTkw
+NjIzMTIxNDQ1WjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUUJ4L6q9euSBIplBq
+y/3YIHqngnYwHQYDVR0OBBYEFFCeC+qvXrkgSKZQasv92CB6p4J2MAwGA1UdEwQF
+MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA
+A4GBAAyGgq3oThr1jokn4jVYPSm0B482UJW/bsGe68SQsoWou7dC4A8HOd/7npCy
+0cE+U58DRLB+S/Rv5Hwf5+Kx5Lia78O9zt4LMjTZ3ijtM2vE1Nc9ElirfQkty3D1
+E4qUoSek1nDFbZS1yX2doNLGCEnZZpum0/QL3MUmV+GRMOrN
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Subject: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Label: "AddTrust Low-Value Services Root"
+# Serial: 1
+# MD5 Fingerprint: 1e:42:95:02:33:92:6b:b9:5f:c0:7f:da:d6:b2:4b:fc
+# SHA1 Fingerprint: cc:ab:0e:a0:4c:23:01:d6:69:7b:dd:37:9f:cd:12:eb:24:e3:94:9d
+# SHA256 Fingerprint: 8c:72:09:27:9a:c0:4e:27:5e:16:d0:7f:d3:b7:75:e8:01:54:b5:96:80:46:e3:1f:52:dd:25:76:63:24:e9:a7
+-----BEGIN CERTIFICATE-----
+MIIEGDCCAwCgAwIBAgIBATANBgkqhkiG9w0BAQUFADBlMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
+b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwHhcNMDAwNTMw
+MTAzODMxWhcNMjAwNTMwMTAzODMxWjBlMQswCQYDVQQGEwJTRTEUMBIGA1UEChML
+QWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYD
+VQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUA
+A4IBDwAwggEKAoIBAQCWltQhSWDia+hBBwzexODcEyPNwTXH+9ZOEQpnXvUGW2ul
+CDtbKRY654eyNAbFvAWlA3yCyykQruGIgb3WntP+LVbBFc7jJp0VLhD7Bo8wBN6n
+tGO0/7Gcrjyvd7ZWxbWroulpOj0OM3kyP3CCkplhbY0wCI9xP6ZIVxn4JdxLZlyl
+dI+Yrsj5wAYi56xz36Uu+1LcsRVlIPo1Zmne3yzxbrww2ywkEtvrNTVokMsAsJch
+PXQhI2U0K7t4WaPW4XY5mqRJjox0r26kmqPZm9I4XJuiGMx1I4S+6+JNM3GOGvDC
++Mcdoq0Dlyz4zyXG9rgkMbFjXZJ/Y/AlyVMuH79NAgMBAAGjgdIwgc8wHQYDVR0O
+BBYEFJWxtPCUtr3H2tERCSG+wa9J/RB7MAsGA1UdDwQEAwIBBjAPBgNVHRMBAf8E
+BTADAQH/MIGPBgNVHSMEgYcwgYSAFJWxtPCUtr3H2tERCSG+wa9J/RB7oWmkZzBl
+MQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFk
+ZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENB
+IFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBACxtZBsfzQ3duQH6lmM0MkhHma6X
+7f1yFqZzR1r0693p9db7RcwpiURdv0Y5PejuvE1Uhh4dbOMXJ0PhiVYrqW9yTkkz
+43J8KiOavD7/KCrto/8cI7pDVwlnTUtiBi34/2ydYB7YHEt9tTEv2dB8Xfjea4MY
+eDdXL+gzB2ffHsdrKpV2ro9Xo/D0UrSpUwjP4E/TelOL/bscVjby/rK25Xa71SJl
+pz/+0WatC7xrmYbvP33zGDLKe8bjq2RGlfgmadlVg3sslgf/WSxEo8bl6ancoWOA
+WiFeIc9TVPC6b4nbqKqVz4vjccweGyBECMB6tkD9xOQ14R0WHNC8K47Wcdk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
+# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
+# Label: "AddTrust External Root"
+# Serial: 1
+# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f
+# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68
+# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2
+-----BEGIN CERTIFICATE-----
+MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs
+IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290
+MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux
+FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h
+bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v
+dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt
+H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9
+uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX
+mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX
+a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN
+E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0
+WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD
+VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0
+Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU
+cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx
+IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN
+AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH
+YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5
+6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC
+Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX
+c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a
+mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Subject: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Label: "AddTrust Public Services Root"
+# Serial: 1
+# MD5 Fingerprint: c1:62:3e:23:c5:82:73:9c:03:59:4b:2b:e9:77:49:7f
+# SHA1 Fingerprint: 2a:b6:28:48:5e:78:fb:f3:ad:9e:79:10:dd:6b:df:99:72:2c:96:e5
+# SHA256 Fingerprint: 07:91:ca:07:49:b2:07:82:aa:d3:c7:d7:bd:0c:df:c9:48:58:35:84:3e:b2:d7:99:60:09:ce:43:ab:6c:69:27
+-----BEGIN CERTIFICATE-----
+MIIEFTCCAv2gAwIBAgIBATANBgkqhkiG9w0BAQUFADBkMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
+b3JrMSAwHgYDVQQDExdBZGRUcnVzdCBQdWJsaWMgQ0EgUm9vdDAeFw0wMDA1MzAx
+MDQxNTBaFw0yMDA1MzAxMDQxNTBaMGQxCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtB
+ZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIDAeBgNV
+BAMTF0FkZFRydXN0IFB1YmxpYyBDQSBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOC
+AQ8AMIIBCgKCAQEA6Rowj4OIFMEg2Dybjxt+A3S72mnTRqX4jsIMEZBRpS9mVEBV
+6tsfSlbunyNu9DnLoblv8n75XYcmYZ4c+OLspoH4IcUkzBEMP9smcnrHAZcHF/nX
+GCwwfQ56HmIexkvA/X1id9NEHif2P0tEs7c42TkfYNVRknMDtABp4/MUTu7R3AnP
+dzRGULD4EfL+OHn3Bzn+UZKXC1sIXzSGAa2Il+tmzV7R/9x98oTaunet3IAIx6eH
+1lWfl2royBFkuucZKT8Rs3iQhCBSWxHveNCD9tVIkNAwHM+A+WD+eeSI8t0A65RF
+62WUaUC6wNW0uLp9BBGo6zEFlpROWCGOn9Bg/QIDAQABo4HRMIHOMB0GA1UdDgQW
+BBSBPjfYkrAfd59ctKtzquf2NGAv+jALBgNVHQ8EBAMCAQYwDwYDVR0TAQH/BAUw
+AwEB/zCBjgYDVR0jBIGGMIGDgBSBPjfYkrAfd59ctKtzquf2NGAv+qFopGYwZDEL
+MAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQLExRBZGRU
+cnVzdCBUVFAgTmV0d29yazEgMB4GA1UEAxMXQWRkVHJ1c3QgUHVibGljIENBIFJv
+b3SCAQEwDQYJKoZIhvcNAQEFBQADggEBAAP3FUr4JNojVhaTdt02KLmuG7jD8WS6
+IBh4lSknVwW8fCr0uVFV2ocC3g8WFzH4qnkuCRO7r7IgGRLlk/lL+YPoRNWyQSW/
+iHVv/xD8SlTQX/D67zZzfRs2RcYhbbQVuE7PnFylPVoAjgbjPGsye/Kf8Lb93/Ao
+GEjwxrzQvzSAlsJKsW2Ox5BF3i9nrEUEo3rcVZLJR2bYGozH7ZxOmuASu7VqTITh
+4SINhwBk/ox9Yjllpu9CtoAlEmEBqCQTcAARJl/6NVDFSMwGR+gn2HCNX2TmoUQm
+XiLsks3/QppEIW1cxeMiHV9HEufOX1362KqxMy3ZdvJOOjMMK7MtkAY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Subject: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Label: "AddTrust Qualified Certificates Root"
+# Serial: 1
+# MD5 Fingerprint: 27:ec:39:47:cd:da:5a:af:e2:9a:01:65:21:a9:4c:bb
+# SHA1 Fingerprint: 4d:23:78:ec:91:95:39:b5:00:7f:75:8f:03:3b:21:1e:c5:4d:8b:cf
+# SHA256 Fingerprint: 80:95:21:08:05:db:4b:bc:35:5e:44:28:d8:fd:6e:c2:cd:e3:ab:5f:b9:7a:99:42:98:8e:b8:f4:dc:d0:60:16
+-----BEGIN CERTIFICATE-----
+MIIEHjCCAwagAwIBAgIBATANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
+b3JrMSMwIQYDVQQDExpBZGRUcnVzdCBRdWFsaWZpZWQgQ0EgUm9vdDAeFw0wMDA1
+MzAxMDQ0NTBaFw0yMDA1MzAxMDQ0NTBaMGcxCzAJBgNVBAYTAlNFMRQwEgYDVQQK
+EwtBZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIzAh
+BgNVBAMTGkFkZFRydXN0IFF1YWxpZmllZCBDQSBSb290MIIBIjANBgkqhkiG9w0B
+AQEFAAOCAQ8AMIIBCgKCAQEA5B6a/twJWoekn0e+EV+vhDTbYjx5eLfpMLXsDBwq
+xBb/4Oxx64r1EW7tTw2R0hIYLUkVAcKkIhPHEWT/IhKauY5cLwjPcWqzZwFZ8V1G
+87B4pfYOQnrjfxvM0PC3KP0q6p6zsLkEqv32x7SxuCqg+1jxGaBvcCV+PmlKfw8i
+2O+tCBGaKZnhqkRFmhJePp1tUvznoD1oL/BLcHwTOK28FSXx1s6rosAx1i+f4P8U
+WfyEk9mHfExUE+uf0S0R+Bg6Ot4l2ffTQO2kBhLEO+GRwVY18BTcZTYJbqukB8c1
+0cIDMzZbdSZtQvESa0NvS3GU+jQd7RNuyoB/mC9suWXY6QIDAQABo4HUMIHRMB0G
+A1UdDgQWBBQ5lYtii1zJ1IC6WA+XPxUIQ8yYpzALBgNVHQ8EBAMCAQYwDwYDVR0T
+AQH/BAUwAwEB/zCBkQYDVR0jBIGJMIGGgBQ5lYtii1zJ1IC6WA+XPxUIQ8yYp6Fr
+pGkwZzELMAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQL
+ExRBZGRUcnVzdCBUVFAgTmV0d29yazEjMCEGA1UEAxMaQWRkVHJ1c3QgUXVhbGlm
+aWVkIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBABmrder4i2VhlRO6aQTv
+hsoToMeqT2QbPxj2qC0sVY8FtzDqQmodwCVRLae/DLPt7wh/bDxGGuoYQ992zPlm
+hpwsaPXpF/gxsxjE1kh9I0xowX67ARRvxdlu3rsEQmr49lx95dr6h+sNNVJn0J6X
+dgWTP5XHAeZpVTh/EGGZyeNfpso+gmNIquIISD6q8rKFYqa0p9m9N5xotS1WfbC3
+P6CxB9bpT9zeRXEwMn8bLgn5v1Kh7sKAPgZcLlVAwRv1cEWw3F369nJad9Jjzc9Y
+iQBCYz95OdBEsIJuQRno3eDBiFrRHnGTHyQwdOUeqN48Jzd/g66ed8/wMLH/S5no
+xqE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Label: "Entrust Root Certification Authority"
+# Serial: 1164660820
+# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4
+# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9
+# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c
+-----BEGIN CERTIFICATE-----
+MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0
+Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW
+KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl
+cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw
+NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw
+NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy
+ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV
+BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ
+KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo
+Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4
+4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9
+KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI
+rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi
+94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB
+sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi
+gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo
+kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE
+vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA
+A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t
+O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua
+AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP
+9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/
+eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m
+0vdXcDazv/wor3ElhVsT/h5/WrQ8
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc.
+# Subject: CN=GeoTrust Global CA O=GeoTrust Inc.
+# Label: "GeoTrust Global CA"
+# Serial: 144470
+# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5
+# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12
+# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a
+-----BEGIN CERTIFICATE-----
+MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT
+MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i
+YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG
+EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg
+R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9
+9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq
+fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv
+iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU
+1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+
+bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW
+MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA
+ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l
+uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn
+Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS
+tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF
+PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un
+hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV
+5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Global CA 2 O=GeoTrust Inc.
+# Subject: CN=GeoTrust Global CA 2 O=GeoTrust Inc.
+# Label: "GeoTrust Global CA 2"
+# Serial: 1
+# MD5 Fingerprint: 0e:40:a7:6c:de:03:5d:8f:d1:0f:e4:d1:8d:f9:6c:a9
+# SHA1 Fingerprint: a9:e9:78:08:14:37:58:88:f2:05:19:b0:6d:2b:0d:2b:60:16:90:7d
+# SHA256 Fingerprint: ca:2d:82:a0:86:77:07:2f:8a:b6:76:4f:f0:35:67:6c:fe:3e:5e:32:5e:01:21:72:df:3f:92:09:6d:b7:9b:85
+-----BEGIN CERTIFICATE-----
+MIIDZjCCAk6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBEMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3QgR2xvYmFs
+IENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMTkwMzA0MDUwMDAwWjBEMQswCQYDVQQG
+EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3Qg
+R2xvYmFsIENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDvPE1A
+PRDfO1MA4Wf+lGAVPoWI8YkNkMgoI5kF6CsgncbzYEbYwbLVjDHZ3CB5JIG/NTL8
+Y2nbsSpr7iFY8gjpeMtvy/wWUsiRxP89c96xPqfCfWbB9X5SJBri1WeR0IIQ13hL
+TytCOb1kLUCgsBDTOEhGiKEMuzozKmKY+wCdE1l/bztyqu6mD4b5BWHqZ38MN5aL
+5mkWRxHCJ1kDs6ZgwiFAVvqgx306E+PsV8ez1q6diYD3Aecs9pYrEw15LNnA5IZ7
+S4wMcoKK+xfNAGw6EzywhIdLFnopsk/bHdQL82Y3vdj2V7teJHq4PIu5+pIaGoSe
+2HSPqht/XvT+RSIhAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE
+FHE4NvICMVNHK266ZUapEBVYIAUJMB8GA1UdIwQYMBaAFHE4NvICMVNHK266ZUap
+EBVYIAUJMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQUFAAOCAQEAA/e1K6td
+EPx7srJerJsOflN4WT5CBP51o62sgU7XAotexC3IUnbHLB/8gTKY0UvGkpMzNTEv
+/NgdRN3ggX+d6YvhZJFiCzkIjKx0nVnZellSlxG5FntvRdOW2TF9AjYPnDtuzywN
+A0ZF66D0f0hExghAzN4bcLUprbqLOzRldRtxIR0sFAqwlpW41uryZfspuk/qkZN0
+abby/+Ea0AzRdoXLiiW9l14sbxWZJue2Kf8i7MkCx1YAzUm5s2x7UwQa4qjJqhIF
+I8LO57sEAszAR6LkxCkvW0VXiVHuPOtSCP8HNR6fNWpHSlaY0VqFH4z1Ir+rzoPz
+4iIprn2DQKi6bA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc.
+# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc.
+# Label: "GeoTrust Universal CA"
+# Serial: 1
+# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48
+# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79
+# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12
+-----BEGIN CERTIFICATE-----
+MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy
+c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE
+BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0
+IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV
+VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8
+cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT
+QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh
+F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v
+c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w
+mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd
+VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX
+teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ
+f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe
+Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+
+nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB
+/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY
+MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG
+9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc
+aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX
+IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn
+ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z
+uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN
+Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja
+QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW
+koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9
+ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt
+DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm
+bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
+# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
+# Label: "GeoTrust Universal CA 2"
+# Serial: 1
+# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7
+# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79
+# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b
+-----BEGIN CERTIFICATE-----
+MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy
+c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD
+VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1
+c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
+AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81
+WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG
+FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq
+XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL
+se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb
+KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd
+IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73
+y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt
+hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc
+QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4
+Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV
+HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ
+KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z
+dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ
+L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr
+Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo
+ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY
+T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz
+GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m
+1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV
+OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH
+6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX
+QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS
+-----END CERTIFICATE-----
+
+# Issuer: CN=America Online Root Certification Authority 1 O=America Online Inc.
+# Subject: CN=America Online Root Certification Authority 1 O=America Online Inc.
+# Label: "America Online Root Certification Authority 1"
+# Serial: 1
+# MD5 Fingerprint: 14:f1:08:ad:9d:fa:64:e2:89:e7:1c:cf:a8:ad:7d:5e
+# SHA1 Fingerprint: 39:21:c1:15:c1:5d:0e:ca:5c:cb:5b:c4:f0:7d:21:d8:05:0b:56:6a
+# SHA256 Fingerprint: 77:40:73:12:c6:3a:15:3d:5b:c0:0b:4e:51:75:9c:df:da:c2:37:dc:2a:33:b6:79:46:e9:8e:9b:fa:68:0a:e3
+-----BEGIN CERTIFICATE-----
+MIIDpDCCAoygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEc
+MBoGA1UEChMTQW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBP
+bmxpbmUgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAxMB4XDTAyMDUyODA2
+MDAwMFoXDTM3MTExOTIwNDMwMFowYzELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0Ft
+ZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2EgT25saW5lIFJvb3Qg
+Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMTCCASIwDQYJKoZIhvcNAQEBBQADggEP
+ADCCAQoCggEBAKgv6KRpBgNHw+kqmP8ZonCaxlCyfqXfaE0bfA+2l2h9LaaLl+lk
+hsmj76CGv2BlnEtUiMJIxUo5vxTjWVXlGbR0yLQFOVwWpeKVBeASrlmLojNoWBym
+1BW32J/X3HGrfpq/m44zDyL9Hy7nBzbvYjnF3cu6JRQj3gzGPTzOggjmZj7aUTsW
+OqMFf6Dch9Wc/HKpoH145LcxVR5lu9RhsCFg7RAycsWSJR74kEoYeEfffjA3PlAb
+2xzTa5qGUwew76wGePiEmf4hjUyAtgyC9mZweRrTT6PP8c9GsEsPPt2IYriMqQko
+O3rHl+Ee5fSfwMCuJKDIodkP1nsmgmkyPacCAwEAAaNjMGEwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUAK3Zo/Z59m50qX8zPYEX10zPM94wHwYDVR0jBBgwFoAU
+AK3Zo/Z59m50qX8zPYEX10zPM94wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB
+BQUAA4IBAQB8itEfGDeC4Liwo+1WlchiYZwFos3CYiZhzRAW18y0ZTTQEYqtqKkF
+Zu90821fnZmv9ov761KyBZiibyrFVL0lvV+uyIbqRizBs73B6UlwGBaXCBOMIOAb
+LjpHyx7kADCVW/RFo8AasAFOq73AI25jP4BKxQft3OJvx8Fi8eNy1gTIdGcL+oir
+oQHIb/AUr9KZzVGTfu0uOMe9zkZQPXLjeSWdm4grECDdpbgyn43gKd8hdIaC2y+C
+MMbHNYaz+ZZfRtsMRf3zUMNvxsNIrUam4SdHCh0Om7bCd39j8uB9Gr784N/Xx6ds
+sPmuujz9dLQR6FgNgLzTqIA6me11zEZ7
+-----END CERTIFICATE-----
+
+# Issuer: CN=America Online Root Certification Authority 2 O=America Online Inc.
+# Subject: CN=America Online Root Certification Authority 2 O=America Online Inc.
+# Label: "America Online Root Certification Authority 2"
+# Serial: 1
+# MD5 Fingerprint: d6:ed:3c:ca:e2:66:0f:af:10:43:0d:77:9b:04:09:bf
+# SHA1 Fingerprint: 85:b5:ff:67:9b:0c:79:96:1f:c8:6e:44:22:00:46:13:db:17:92:84
+# SHA256 Fingerprint: 7d:3b:46:5a:60:14:e5:26:c0:af:fc:ee:21:27:d2:31:17:27:ad:81:1c:26:84:2d:00:6a:f3:73:06:cc:80:bd
+-----BEGIN CERTIFICATE-----
+MIIFpDCCA4ygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEc
+MBoGA1UEChMTQW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBP
+bmxpbmUgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAyMB4XDTAyMDUyODA2
+MDAwMFoXDTM3MDkyOTE0MDgwMFowYzELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0Ft
+ZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2EgT25saW5lIFJvb3Qg
+Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIP
+ADCCAgoCggIBAMxBRR3pPU0Q9oyxQcngXssNt79Hc9PwVU3dxgz6sWYFas14tNwC
+206B89enfHG8dWOgXeMHDEjsJcQDIPT/DjsS/5uN4cbVG7RtIuOx238hZK+GvFci
+KtZHgVdEglZTvYYUAQv8f3SkWq7xuhG1m1hagLQ3eAkzfDJHA1zEpYNI9FdWboE2
+JxhP7JsowtS013wMPgwr38oE18aO6lhOqKSlGBxsRZijQdEt0sdtjRnxrXm3gT+9
+BoInLRBYBbV4Bbkv2wxrkJB+FFk4u5QkE+XRnRTf04JNRvCAOVIyD+OEsnpD8l7e
+Xz8d3eOyG6ChKiMDbi4BFYdcpnV1x5dhvt6G3NRI270qv0pV2uh9UPu0gBe4lL8B
+PeraunzgWGcXuVjgiIZGZ2ydEEdYMtA1fHkqkKJaEBEjNa0vzORKW6fIJ/KD3l67
+Xnfn6KVuY8INXWHQjNJsWiEOyiijzirplcdIz5ZvHZIlyMbGwcEMBawmxNJ10uEq
+Z8A9W6Wa6897GqidFEXlD6CaZd4vKL3Ob5Rmg0gp2OpljK+T2WSfVVcmv2/LNzGZ
+o2C7HK2JNDJiuEMhBnIMoVxtRsX6Kc8w3onccVvdtjc+31D1uAclJuW8tf48ArO3
++L5DwYcRlJ4jbBeKuIonDFRH8KmzwICMoCfrHRnjB453cMor9H124HhnAgMBAAGj
+YzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFE1FwWg4u3OpaaEg5+31IqEj
+FNeeMB8GA1UdIwQYMBaAFE1FwWg4u3OpaaEg5+31IqEjFNeeMA4GA1UdDwEB/wQE
+AwIBhjANBgkqhkiG9w0BAQUFAAOCAgEAZ2sGuV9FOypLM7PmG2tZTiLMubekJcmn
+xPBUlgtk87FYT15R/LKXeydlwuXK5w0MJXti4/qftIe3RUavg6WXSIylvfEWK5t2
+LHo1YGwRgJfMqZJS5ivmae2p+DYtLHe/YUjRYwu5W1LtGLBDQiKmsXeu3mnFzccc
+obGlHBD7GL4acN3Bkku+KVqdPzW+5X1R+FXgJXUjhx5c3LqdsKyzadsXg8n33gy8
+CNyRnqjQ1xU3c6U1uPx+xURABsPr+CKAXEfOAuMRn0T//ZoyzH1kUQ7rVyZ2OuMe
+IjzCpjbdGe+n/BLzJsBZMYVMnNjP36TMzCmT/5RtdlwTCJfy7aULTd3oyWgOZtMA
+DjMSW7yV5TKQqLPGbIOtd+6Lfn6xqavT4fG2wLHqiMDn05DpKJKUe2h7lyoKZy2F
+AjgQ5ANh1NolNscIWC2hp1GvMApJ9aZphwctREZ2jirlmjvXGKL8nDgQzMY70rUX
+Om/9riW99XJZZLF0KjhfGEzfz3EEWjbUvy+ZnOjZurGV5gJLIaFb1cFPj65pbVPb
+AZO1XB4Y3WRayhgoPmMEEf0cjQAPuDffZ4qdZqkCapH/E8ovXYO8h5Ns3CRRFgQl
+Zvqz2cK6Kb6aSDiCmfS/O0oxGfm/jiEzFMpPVF/7zvuPcX/9XhmgD0uRuMRUvAaw
+RY8mkaKO/qk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AAA Certificate Services O=Comodo CA Limited
+# Subject: CN=AAA Certificate Services O=Comodo CA Limited
+# Label: "Comodo AAA Services root"
+# Serial: 1
+# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0
+# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49
+# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4
+-----BEGIN CERTIFICATE-----
+MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb
+MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
+GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj
+YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM
+GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP
+ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua
+BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe
+3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4
+YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR
+rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm
+ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU
+oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF
+MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v
+QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t
+b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF
+AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q
+GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz
+Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2
+G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi
+l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3
+smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Secure Certificate Services O=Comodo CA Limited
+# Subject: CN=Secure Certificate Services O=Comodo CA Limited
+# Label: "Comodo Secure Services root"
+# Serial: 1
+# MD5 Fingerprint: d3:d9:bd:ae:9f:ac:67:24:b3:c8:1b:52:e1:b9:a9:bd
+# SHA1 Fingerprint: 4a:65:d5:f4:1d:ef:39:b8:b8:90:4a:4a:d3:64:81:33:cf:c7:a1:d1
+# SHA256 Fingerprint: bd:81:ce:3b:4f:65:91:d1:1a:67:b5:fc:7a:47:fd:ef:25:52:1b:f9:aa:4e:18:b9:e3:df:2e:34:a7:80:3b:e8
+-----BEGIN CERTIFICATE-----
+MIIEPzCCAyegAwIBAgIBATANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJHQjEb
+MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
+GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEkMCIGA1UEAwwbU2VjdXJlIENlcnRp
+ZmljYXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVow
+fjELMAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxJDAiBgNV
+BAMMG1NlY3VyZSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBAMBxM4KK0HDrc4eCQNUd5MvJDkKQ+d40uaG6EfQlhfPM
+cm3ye5drswfxdySRXyWP9nQ95IDC+DwN879A6vfIUtFyb+/Iq0G4bi4XKpVpDM3S
+HpR7LZQdqnXXs5jLrLxkU0C8j6ysNstcrbvd4JQX7NFc0L/vpZXJkMWwrPsbQ996
+CF23uPJAGysnnlDOXmWCiIxe004MeuoIkbY2qitC++rCoznl2yY4rYsK7hljxxwk
+3wN42ubqwUcaCwtGCd0C/N7Lh1/XMGNooa7cMqG6vv5Eq2i2pRcV/b3Vp6ea5EQz
+6YiO/O1R65NxTq0B50SOqy3LqP4BSUjwwN3HaNiS/j0CAwEAAaOBxzCBxDAdBgNV
+HQ4EFgQUPNiTiMLAggnMAZkGkyDpnnAJY08wDgYDVR0PAQH/BAQDAgEGMA8GA1Ud
+EwEB/wQFMAMBAf8wgYEGA1UdHwR6MHgwO6A5oDeGNWh0dHA6Ly9jcmwuY29tb2Rv
+Y2EuY29tL1NlY3VyZUNlcnRpZmljYXRlU2VydmljZXMuY3JsMDmgN6A1hjNodHRw
+Oi8vY3JsLmNvbW9kby5uZXQvU2VjdXJlQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmww
+DQYJKoZIhvcNAQEFBQADggEBAIcBbSMdflsXfcFhMs+P5/OKlFlm4J4oqF7Tt/Q0
+5qo5spcWxYJvMqTpjOev/e/C6LlLqqP05tqNZSH7uoDrJiiFGv45jN5bBAS0VPmj
+Z55B+glSzAVIqMk/IQQezkhr/IXownuvf7fM+F86/TXGDe+X3EyrEeFryzHRbPtI
+gKvcnDe4IRRLDXE97IMzbtFuMhbsmMcWi1mmNKsFVy2T96oTy9IT4rcuO81rUBcJ
+aD61JlfutuC23bkpgHl9j6PwpCikFcSF9CfUa7/lXORlAnZUtOM3ZiTTGWHIUhDl
+izeauan5Hb/qmZJhlv8BzaFfDbxxvA6sCx1HRR3B7Hzs/Sk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Trusted Certificate Services O=Comodo CA Limited
+# Subject: CN=Trusted Certificate Services O=Comodo CA Limited
+# Label: "Comodo Trusted Services root"
+# Serial: 1
+# MD5 Fingerprint: 91:1b:3f:6e:cd:9e:ab:ee:07:fe:1f:71:d2:b3:61:27
+# SHA1 Fingerprint: e1:9f:e3:0e:8b:84:60:9e:80:9b:17:0d:72:a8:c5:ba:6e:14:09:bd
+# SHA256 Fingerprint: 3f:06:e5:56:81:d4:96:f5:be:16:9e:b5:38:9f:9f:2b:8f:f6:1e:17:08:df:68:81:72:48:49:cd:5d:27:cb:69
+-----BEGIN CERTIFICATE-----
+MIIEQzCCAyugAwIBAgIBATANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJHQjEb
+MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
+GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDElMCMGA1UEAwwcVHJ1c3RlZCBDZXJ0
+aWZpY2F0ZSBTZXJ2aWNlczAeFw0wNDAxMDEwMDAwMDBaFw0yODEyMzEyMzU5NTla
+MH8xCzAJBgNVBAYTAkdCMRswGQYDVQQIDBJHcmVhdGVyIE1hbmNoZXN0ZXIxEDAO
+BgNVBAcMB1NhbGZvcmQxGjAYBgNVBAoMEUNvbW9kbyBDQSBMaW1pdGVkMSUwIwYD
+VQQDDBxUcnVzdGVkIENlcnRpZmljYXRlIFNlcnZpY2VzMIIBIjANBgkqhkiG9w0B
+AQEFAAOCAQ8AMIIBCgKCAQEA33FvNlhTWvI2VFeAxHQIIO0Yfyod5jWaHiWsnOWW
+fnJSoBVC21ndZHoa0Lh73TkVvFVIxO06AOoxEbrycXQaZ7jPM8yoMa+j49d/vzMt
+TGo87IvDktJTdyR0nAducPy9C1t2ul/y/9c3S0pgePfw+spwtOpZqqPOSC+pw7IL
+fhdyFgymBwwbOM/JYrc/oJOlh0Hyt3BAd9i+FHzjqMB6juljatEPmsbS9Is6FARW
+1O24zG71++IsWL1/T2sr92AkWCTOJu80kTrV44HQsvAEAtdbtz6SrGsSivnkBbA7
+kUlcsutT6vifR4buv5XAwAaf0lteERv0xwQ1KdJVXOTt6wIDAQABo4HJMIHGMB0G
+A1UdDgQWBBTFe1i97doladL3WRaoszLAeydb9DAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zCBgwYDVR0fBHwwejA8oDqgOIY2aHR0cDovL2NybC5jb21v
+ZG9jYS5jb20vVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMuY3JsMDqgOKA2hjRo
+dHRwOi8vY3JsLmNvbW9kby5uZXQvVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMu
+Y3JsMA0GCSqGSIb3DQEBBQUAA4IBAQDIk4E7ibSvuIQSTI3S8NtwuleGFTQQuS9/
+HrCoiWChisJ3DFBKmwCL2Iv0QeLQg4pKHBQGsKNoBXAxMKdTmw7pSqBYaWcOrp32
+pSxBvzwGa+RZzG0Q8ZZvH9/0BAKkn0U+yNj6NkZEUD+Cl5EfKNsYEYwq5GWDVxIS
+jBc/lDb+XbDABHcTuPQV1T84zJQ6VdCsmPW6AF/ghhmBeC8owH7TzEIK9a5QoNE+
+xqFx7D+gIIxmOom0jtTYsU0lR+4viMi14QVFwL4Ucd56/Y57fU0IlqUSc/Atyjcn
+dBInTMu2l+nZrghtWjlA3QVHdWpaIbOjGM9O9y5Xt5hwXsjEeLBi
+-----END CERTIFICATE-----
+
+# Issuer: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com
+# Subject: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com
+# Label: "UTN DATACorp SGC Root CA"
+# Serial: 91374294542884689855167577680241077609
+# MD5 Fingerprint: b3:a5:3e:77:21:6d:ac:4a:c0:c9:fb:d5:41:3d:ca:06
+# SHA1 Fingerprint: 58:11:9f:0e:12:82:87:ea:50:fd:d9:87:45:6f:4f:78:dc:fa:d6:d4
+# SHA256 Fingerprint: 85:fb:2f:91:dd:12:27:5a:01:45:b6:36:53:4f:84:02:4a:d6:8b:69:b8:ee:88:68:4f:f7:11:37:58:05:b3:48
+-----BEGIN CERTIFICATE-----
+MIIEXjCCA0agAwIBAgIQRL4Mi1AAIbQR0ypoBqmtaTANBgkqhkiG9w0BAQUFADCB
+kzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
+Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho
+dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZBgNVBAMTElVUTiAtIERBVEFDb3Jw
+IFNHQzAeFw05OTA2MjQxODU3MjFaFw0xOTA2MjQxOTA2MzBaMIGTMQswCQYDVQQG
+EwJVUzELMAkGA1UECBMCVVQxFzAVBgNVBAcTDlNhbHQgTGFrZSBDaXR5MR4wHAYD
+VQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxITAfBgNVBAsTGGh0dHA6Ly93d3cu
+dXNlcnRydXN0LmNvbTEbMBkGA1UEAxMSVVROIC0gREFUQUNvcnAgU0dDMIIBIjAN
+BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3+5YEKIrblXEjr8uRgnn4AgPLit6
+E5Qbvfa2gI5lBZMAHryv4g+OGQ0SR+ysraP6LnD43m77VkIVni5c7yPeIbkFdicZ
+D0/Ww5y0vpQZY/KmEQrrU0icvvIpOxboGqBMpsn0GFlowHDyUwDAXlCCpVZvNvlK
+4ESGoE1O1kduSUrLZ9emxAW5jh70/P/N5zbgnAVssjMiFdC04MwXwLLA9P4yPykq
+lXvY8qdOD1R8oQ2AswkDwf9c3V6aPryuvEeKaq5xyh+xKrhfQgUL7EYw0XILyulW
+bfXv33i+Ybqypa4ETLyorGkVl73v67SMvzX41MPRKA5cOp9wGDMgd8SirwIDAQAB
+o4GrMIGoMAsGA1UdDwQEAwIBxjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRT
+MtGzz3/64PGgXYVOktKeRR20TzA9BgNVHR8ENjA0MDKgMKAuhixodHRwOi8vY3Js
+LnVzZXJ0cnVzdC5jb20vVVROLURBVEFDb3JwU0dDLmNybDAqBgNVHSUEIzAhBggr
+BgEFBQcDAQYKKwYBBAGCNwoDAwYJYIZIAYb4QgQBMA0GCSqGSIb3DQEBBQUAA4IB
+AQAnNZcAiosovcYzMB4p/OL31ZjUQLtgyr+rFywJNn9Q+kHcrpY6CiM+iVnJowft
+Gzet/Hy+UUla3joKVAgWRcKZsYfNjGjgaQPpxE6YsjuMFrMOoAyYUJuTqXAJyCyj
+j98C5OBxOvG0I3KgqgHf35g+FFCgMSa9KOlaMCZ1+XtgHI3zzVAmbQQnmt/VDUVH
+KWss5nbZqSl9Mt3JNjy9rjXxEZ4du5A/EkdOjtd+D2JzHVImOBwYSf0wdJrE5SIv
+2MCN7ZF6TACPcn9d2t0bi0Vr591pl6jFVkwPDPafepE39peC4N1xaf92P2BNPM/3
+mfnGV/TJVTl4uix5yaaIK/QI
+-----END CERTIFICATE-----
+
+# Issuer: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com
+# Subject: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com
+# Label: "UTN USERFirst Hardware Root CA"
+# Serial: 91374294542884704022267039221184531197
+# MD5 Fingerprint: 4c:56:41:e5:0d:bb:2b:e8:ca:a3:ed:18:08:ad:43:39
+# SHA1 Fingerprint: 04:83:ed:33:99:ac:36:08:05:87:22:ed:bc:5e:46:00:e3:be:f9:d7
+# SHA256 Fingerprint: 6e:a5:47:41:d0:04:66:7e:ed:1b:48:16:63:4a:a3:a7:9e:6e:4b:96:95:0f:82:79:da:fc:8d:9b:d8:81:21:37
+-----BEGIN CERTIFICATE-----
+MIIEdDCCA1ygAwIBAgIQRL4Mi1AAJLQR0zYq/mUK/TANBgkqhkiG9w0BAQUFADCB
+lzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
+Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho
+dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3Qt
+SGFyZHdhcmUwHhcNOTkwNzA5MTgxMDQyWhcNMTkwNzA5MTgxOTIyWjCBlzELMAkG
+A1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEe
+MBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8v
+d3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdh
+cmUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCx98M4P7Sof885glFn
+0G2f0v9Y8+efK+wNiVSZuTiZFvfgIXlIwrthdBKWHTxqctU8EGc6Oe0rE81m65UJ
+M6Rsl7HoxuzBdXmcRl6Nq9Bq/bkqVRcQVLMZ8Jr28bFdtqdt++BxF2uiiPsA3/4a
+MXcMmgF6sTLjKwEHOG7DpV4jvEWbe1DByTCP2+UretNb+zNAHqDVmBe8i4fDidNd
+oI6yqqr2jmmIBsX6iSHzCJ1pLgkzmykNRg+MzEk0sGlRvfkGzWitZky8PqxhvQqI
+DsjfPe58BEydCl5rkdbux+0ojatNh4lz0G6k0B4WixThdkQDf2Os5M1JnMWS9Ksy
+oUhbAgMBAAGjgbkwgbYwCwYDVR0PBAQDAgHGMA8GA1UdEwEB/wQFMAMBAf8wHQYD
+VR0OBBYEFKFyXyYbKJhDlV0HN9WFlp1L0sNFMEQGA1UdHwQ9MDswOaA3oDWGM2h0
+dHA6Ly9jcmwudXNlcnRydXN0LmNvbS9VVE4tVVNFUkZpcnN0LUhhcmR3YXJlLmNy
+bDAxBgNVHSUEKjAoBggrBgEFBQcDAQYIKwYBBQUHAwUGCCsGAQUFBwMGBggrBgEF
+BQcDBzANBgkqhkiG9w0BAQUFAAOCAQEARxkP3nTGmZev/K0oXnWO6y1n7k57K9cM
+//bey1WiCuFMVGWTYGufEpytXoMs61quwOQt9ABjHbjAbPLPSbtNk28Gpgoiskli
+CE7/yMgUsogWXecB5BKV5UU0s4tpvc+0hY91UZ59Ojg6FEgSxvunOxqNDYJAB+gE
+CJChicsZUN/KHAG8HQQZexB2lzvukJDKxA4fFm517zP4029bHpbj4HR3dHuKom4t
+3XbWOTCC8KucUvIqx69JXn7HaOWCgchqJ/kniCrVWFCVH/A7HFe7fRQ5YiuayZSS
+KqMiDP+JJn1fIytH1xUdqWqeUQ0qUZ6B+dQ7XnASfxAynB67nfhmqA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Label: "XRamp Global CA Root"
+# Serial: 107108908803651509692980124233745014957
+# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1
+# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6
+# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2
+-----BEGIN CERTIFICATE-----
+MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB
+gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk
+MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY
+UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx
+NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3
+dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy
+dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB
+dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6
+38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP
+KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q
+DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4
+qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa
+JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi
+PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P
+BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs
+jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0
+eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD
+ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR
+vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt
+qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa
+IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy
+i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ
+O+7ETPTsJ3xCwnR8gooJybQDJbw=
+-----END CERTIFICATE-----
+
+# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Label: "Go Daddy Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67
+# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4
+# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4
+-----BEGIN CERTIFICATE-----
+MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh
+MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE
+YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3
+MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo
+ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg
+MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN
+ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA
+PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w
+wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi
+EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY
+avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+
+YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE
+sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h
+/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5
+IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
+ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy
+OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P
+TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ
+HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER
+dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf
+ReYNnyicsbkqWletNw+vHX/bvZ8=
+-----END CERTIFICATE-----
+
+# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Label: "Starfield Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24
+# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a
+# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58
+-----BEGIN CERTIFICATE-----
+MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl
+MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp
+U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw
+NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE
+ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp
+ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3
+DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf
+8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN
++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0
+X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa
+K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA
+1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G
+A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR
+zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0
+YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD
+bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w
+DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3
+L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D
+eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl
+xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp
+VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY
+WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q=
+-----END CERTIFICATE-----
+
+# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
+# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
+# Label: "StartCom Certification Authority"
+# Serial: 1
+# MD5 Fingerprint: 22:4d:8f:8a:fc:f7:35:c2:bb:57:34:90:7b:8b:22:16
+# SHA1 Fingerprint: 3e:2b:f7:f2:03:1b:96:f3:8c:e6:c4:d8:a8:5d:3e:2d:58:47:6a:0f
+# SHA256 Fingerprint: c7:66:a9:be:f2:d4:07:1c:86:3a:31:aa:49:20:e8:13:b2:d1:98:60:8c:b7:b7:cf:e2:11:43:b8:36:df:09:ea
+-----BEGIN CERTIFICATE-----
+MIIHyTCCBbGgAwIBAgIBATANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJJTDEW
+MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg
+Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh
+dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM2WhcNMzYwOTE3MTk0NjM2WjB9
+MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi
+U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh
+cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA
+A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk
+pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf
+OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C
+Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT
+Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi
+HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM
+Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w
++2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+
+Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3
+Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B
+26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID
+AQABo4ICUjCCAk4wDAYDVR0TBAUwAwEB/zALBgNVHQ8EBAMCAa4wHQYDVR0OBBYE
+FE4L7xqkQFulF2mHMMo0aEPQQa7yMGQGA1UdHwRdMFswLKAqoCiGJmh0dHA6Ly9j
+ZXJ0LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMCugKaAnhiVodHRwOi8vY3Js
+LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMIIBXQYDVR0gBIIBVDCCAVAwggFM
+BgsrBgEEAYG1NwEBATCCATswLwYIKwYBBQUHAgEWI2h0dHA6Ly9jZXJ0LnN0YXJ0
+Y29tLm9yZy9wb2xpY3kucGRmMDUGCCsGAQUFBwIBFilodHRwOi8vY2VydC5zdGFy
+dGNvbS5vcmcvaW50ZXJtZWRpYXRlLnBkZjCB0AYIKwYBBQUHAgIwgcMwJxYgU3Rh
+cnQgQ29tbWVyY2lhbCAoU3RhcnRDb20pIEx0ZC4wAwIBARqBl0xpbWl0ZWQgTGlh
+YmlsaXR5LCByZWFkIHRoZSBzZWN0aW9uICpMZWdhbCBMaW1pdGF0aW9ucyogb2Yg
+dGhlIFN0YXJ0Q29tIENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFBvbGljeSBhdmFp
+bGFibGUgYXQgaHR0cDovL2NlcnQuc3RhcnRjb20ub3JnL3BvbGljeS5wZGYwEQYJ
+YIZIAYb4QgEBBAQDAgAHMDgGCWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNT
+TCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTANBgkqhkiG9w0BAQUFAAOCAgEAFmyZ
+9GYMNPXQhV59CuzaEE44HF7fpiUFS5Eyweg78T3dRAlbB0mKKctmArexmvclmAk8
+jhvh3TaHK0u7aNM5Zj2gJsfyOZEdUauCe37Vzlrk4gNXcGmXCPleWKYK34wGmkUW
+FjgKXlf2Ysd6AgXmvB618p70qSmD+LIU424oh0TDkBreOKk8rENNZEXO3SipXPJz
+ewT4F+irsfMuXGRuczE6Eri8sxHkfY+BUZo7jYn0TZNmezwD7dOaHZrzZVD1oNB1
+ny+v8OqCQ5j4aZyJecRDjkZy42Q2Eq/3JR44iZB3fsNrarnDy0RLrHiQi+fHLB5L
+EUTINFInzQpdn4XBidUaePKVEFMy3YCEZnXZtWgo+2EuvoSoOMCZEoalHmdkrQYu
+L6lwhceWD3yJZfWOQ1QOq92lgDmUYMA0yZZwLKMS9R9Ie70cfmu3nZD0Ijuu+Pwq
+yvqCUqDvr0tVk+vBtfAii6w0TiYiBKGHLHVKt+V9E9e4DGTANtLJL4YSjCMJwRuC
+O3NJo2pXh5Tl1njFmUNj403gdy3hZZlyaQQaRwnmDwFWJPsfvw55qVguucQJAX6V
+um0ABj6y6koQOdjQK/W/7HW/lwLFCRsI3FU34oH7N4RDYiDK51ZLZer+bMEkkySh
+NOsF/5oirpt9P/FlUQqmMGqz9IgcgA38corog14=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root CA"
+# Serial: 17154717934120587862167794914071425081
+# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72
+# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43
+# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c
+-----BEGIN CERTIFICATE-----
+MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c
+JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP
+mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+
+wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4
+VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/
+AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB
+AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
+BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun
+pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC
+dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf
+fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm
+NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx
+H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe
++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root CA"
+# Serial: 10944719598952040374951832963794454346
+# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e
+# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36
+# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61
+-----BEGIN CERTIFICATE-----
+MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD
+QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB
+CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97
+nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt
+43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P
+T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4
+gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO
+BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR
+TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw
+DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr
+hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg
+06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF
+PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls
+YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk
+CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert High Assurance EV Root CA"
+# Serial: 3553400076410547919724730734378100087
+# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a
+# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25
+# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
+ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
+LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
+RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm
++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW
+PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM
+xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB
+Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3
+hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg
+EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA
+FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec
+nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z
+eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF
+hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2
+Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe
+vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep
++OkuE6N36B9K
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
+# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
+# Label: "GeoTrust Primary Certification Authority"
+# Serial: 32798226551256963324313806436981982369
+# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf
+# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96
+# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c
+-----BEGIN CERTIFICATE-----
+MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY
+MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo
+R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx
+MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK
+Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9
+AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA
+ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0
+7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W
+kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI
+mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ
+KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1
+6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl
+4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K
+oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj
+UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU
+AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA"
+# Serial: 69529181992039203566298953787712940909
+# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12
+# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81
+# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f
+-----BEGIN CERTIFICATE-----
+MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB
+qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV
+BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw
+NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j
+LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG
+A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl
+IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs
+W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta
+3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk
+6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6
+Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J
+NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA
+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP
+r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU
+DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz
+YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX
+xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2
+/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/
+LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7
+jVaMaA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Class 3 Public Primary Certification Authority - G5"
+# Serial: 33037644167568058970164719475676101450
+# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c
+# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5
+# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df
+-----BEGIN CERTIFICATE-----
+MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB
+yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
+ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp
+U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW
+ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL
+MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
+ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln
+biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
+U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1
+nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex
+t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz
+SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG
+BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+
+rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/
+NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E
+BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH
+BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy
+aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv
+MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE
+p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y
+5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK
+WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ
+4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N
+hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO Certification Authority O=COMODO CA Limited
+# Label: "COMODO Certification Authority"
+# Serial: 104350513648249232941998508985834464573
+# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75
+# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b
+# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66
+-----BEGIN CERTIFICATE-----
+MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB
+gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV
+BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw
+MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl
+YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P
+RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3
+UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI
+2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8
+Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp
++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+
+DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O
+nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW
+/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g
+PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u
+QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY
+SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv
+IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/
+RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4
+zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd
+BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB
+ZQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
+# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
+# Label: "Network Solutions Certificate Authority"
+# Serial: 116697915152937497490437556386812487904
+# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e
+# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce
+# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c
+-----BEGIN CERTIFICATE-----
+MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi
+MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu
+MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp
+dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV
+UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO
+ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz
+c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP
+OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl
+mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF
+BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4
+qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw
+gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB
+BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu
+bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp
+dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8
+6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/
+h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH
+/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv
+wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN
+pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Label: "COMODO ECC Certification Authority"
+# Serial: 41578283867086692638256921589707938090
+# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23
+# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11
+# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7
+-----BEGIN CERTIFICATE-----
+MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT
+IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw
+MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy
+ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N
+T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv
+biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR
+FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J
+cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW
+BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm
+fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv
+GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA
+# Subject: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA
+# Label: "TC TrustCenter Class 2 CA II"
+# Serial: 941389028203453866782103406992443
+# MD5 Fingerprint: ce:78:33:5c:59:78:01:6e:18:ea:b9:36:a0:b9:2e:23
+# SHA1 Fingerprint: ae:50:83:ed:7c:f4:5c:bc:8f:61:c6:21:fe:68:5d:79:42:21:15:6e
+# SHA256 Fingerprint: e6:b8:f8:76:64:85:f8:07:ae:7f:8d:ac:16:70:46:1f:07:c0:a1:3e:ef:3a:1f:f7:17:53:8d:7a:ba:d3:91:b4
+-----BEGIN CERTIFICATE-----
+MIIEqjCCA5KgAwIBAgIOLmoAAQACH9dSISwRXDswDQYJKoZIhvcNAQEFBQAwdjEL
+MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV
+BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDIgQ0ExJTAjBgNVBAMTHFRDIFRydXN0
+Q2VudGVyIENsYXNzIDIgQ0EgSUkwHhcNMDYwMTEyMTQzODQzWhcNMjUxMjMxMjI1
+OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i
+SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQTElMCMGA1UEAxMc
+VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD
+ggEPADCCAQoCggEBAKuAh5uO8MN8h9foJIIRszzdQ2Lu+MNF2ujhoF/RKrLqk2jf
+tMjWQ+nEdVl//OEd+DFwIxuInie5e/060smp6RQvkL4DUsFJzfb95AhmC1eKokKg
+uNV/aVyQMrKXDcpK3EY+AlWJU+MaWss2xgdW94zPEfRMuzBwBJWl9jmM/XOBCH2J
+XjIeIqkiRUuwZi4wzJ9l/fzLganx4Duvo4bRierERXlQXa7pIXSSTYtZgo+U4+lK
+8edJsBTj9WLL1XK9H7nSn6DNqPoByNkN39r8R52zyFTfSUrxIan+GE7uSNQZu+99
+5OKdy1u2bv/jzVrndIIFuoAlOMvkaZ6vQaoahPUCAwEAAaOCATQwggEwMA8GA1Ud
+EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTjq1RMgKHbVkO3
+kUrL84J6E1wIqzCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy
+dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18yX2NhX0lJLmNybIaBn2xkYXA6
+Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz
+JTIwMiUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290
+Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u
+TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEAjNfffu4bgBCzg/XbEeprS6iS
+GNn3Bzn1LL4GdXpoUxUc6krtXvwjshOg0wn/9vYua0Fxec3ibf2uWWuFHbhOIprt
+ZjluS5TmVfwLG4t3wVMTZonZKNaL80VKY7f9ewthXbhtvsPcW3nS7Yblok2+XnR8
+au0WOB9/WIFaGusyiC2y8zl3gK9etmF1KdsjTYjKUCjLhdLTEKJZbtOTVAB6okaV
+hgWcqRmY5TFyDADiZ9lA4CQze28suVyrZZ0srHbqNZn1l7kPJOzHdiEoZa5X6AeI
+dUpWoNIFOqTmjZKILPPy4cHGYdtBxceb9w4aUUXCYWvcZCcXjFq32nQozZfkvQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=TC TrustCenter Class 3 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 3 CA
+# Subject: CN=TC TrustCenter Class 3 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 3 CA
+# Label: "TC TrustCenter Class 3 CA II"
+# Serial: 1506523511417715638772220530020799
+# MD5 Fingerprint: 56:5f:aa:80:61:12:17:f6:67:21:e6:2b:6d:61:56:8e
+# SHA1 Fingerprint: 80:25:ef:f4:6e:70:c8:d4:72:24:65:84:fe:40:3b:8a:8d:6a:db:f5
+# SHA256 Fingerprint: 8d:a0:84:fc:f9:9c:e0:77:22:f8:9b:32:05:93:98:06:fa:5c:b8:11:e1:c8:13:f6:a1:08:c7:d3:36:b3:40:8e
+-----BEGIN CERTIFICATE-----
+MIIEqjCCA5KgAwIBAgIOSkcAAQAC5aBd1j8AUb8wDQYJKoZIhvcNAQEFBQAwdjEL
+MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV
+BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDMgQ0ExJTAjBgNVBAMTHFRDIFRydXN0
+Q2VudGVyIENsYXNzIDMgQ0EgSUkwHhcNMDYwMTEyMTQ0MTU3WhcNMjUxMjMxMjI1
+OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i
+SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQTElMCMGA1UEAxMc
+VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD
+ggEPADCCAQoCggEBALTgu1G7OVyLBMVMeRwjhjEQY0NVJz/GRcekPewJDRoeIMJW
+Ht4bNwcwIi9v8Qbxq63WyKthoy9DxLCyLfzDlml7forkzMA5EpBCYMnMNWju2l+Q
+Vl/NHE1bWEnrDgFPZPosPIlY2C8u4rBo6SI7dYnWRBpl8huXJh0obazovVkdKyT2
+1oQDZogkAHhg8fir/gKya/si+zXmFtGt9i4S5Po1auUZuV3bOx4a+9P/FRQI2Alq
+ukWdFHlgfa9Aigdzs5OW03Q0jTo3Kd5c7PXuLjHCINy+8U9/I1LZW+Jk2ZyqBwi1
+Rb3R0DHBq1SfqdLDYmAD8bs5SpJKPQq5ncWg/jcCAwEAAaOCATQwggEwMA8GA1Ud
+EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTUovyfs8PYA9NX
+XAek0CSnwPIA1DCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy
+dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18zX2NhX0lJLmNybIaBn2xkYXA6
+Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz
+JTIwMyUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290
+Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u
+TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEANmDkcPcGIEPZIxpC8vijsrlN
+irTzwppVMXzEO2eatN9NDoqTSheLG43KieHPOh6sHfGcMrSOWXaiQYUlN6AT0PV8
+TtXqluJucsG7Kv5sbviRmEb8yRtXW+rIGjs/sFGYPAfaLFkB2otE6OF0/ado3VS6
+g0bsyEa1+K+XwDsJHI/OcpY9M1ZwvJbL2NV9IJqDnxrcOfHFcqMRA/07QlIp2+gB
+95tejNaNhk4Z+rwcvsUhpYeeeC422wlxo3I0+GzjBgnyXlal092Y+tTmBvTwtiBj
+S+opvaqCZh77gaqnN60TGOaSw4HBM7uIHqHn4rS9MWwOUT1v+5ZWgOI2F9Hc5A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA
+# Subject: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA
+# Label: "TC TrustCenter Universal CA I"
+# Serial: 601024842042189035295619584734726
+# MD5 Fingerprint: 45:e1:a5:72:c5:a9:36:64:40:9e:f5:e4:58:84:67:8c
+# SHA1 Fingerprint: 6b:2f:34:ad:89:58:be:62:fd:b0:6b:5c:ce:bb:9d:d9:4f:4e:39:f3
+# SHA256 Fingerprint: eb:f3:c0:2a:87:89:b1:fb:7d:51:19:95:d6:63:b7:29:06:d9:13:ce:0d:5e:10:56:8a:8a:77:e2:58:61:67:e7
+-----BEGIN CERTIFICATE-----
+MIID3TCCAsWgAwIBAgIOHaIAAQAC7LdggHiNtgYwDQYJKoZIhvcNAQEFBQAweTEL
+MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV
+BAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEmMCQGA1UEAxMdVEMgVHJ1
+c3RDZW50ZXIgVW5pdmVyc2FsIENBIEkwHhcNMDYwMzIyMTU1NDI4WhcNMjUxMjMx
+MjI1OTU5WjB5MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIg
+R21iSDEkMCIGA1UECxMbVEMgVHJ1c3RDZW50ZXIgVW5pdmVyc2FsIENBMSYwJAYD
+VQQDEx1UQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0EgSTCCASIwDQYJKoZIhvcN
+AQEBBQADggEPADCCAQoCggEBAKR3I5ZEr5D0MacQ9CaHnPM42Q9e3s9B6DGtxnSR
+JJZ4Hgmgm5qVSkr1YnwCqMqs+1oEdjneX/H5s7/zA1hV0qq34wQi0fiU2iIIAI3T
+fCZdzHd55yx4Oagmcw6iXSVphU9VDprvxrlE4Vc93x9UIuVvZaozhDrzznq+VZeu
+jRIPFDPiUHDDSYcTvFHe15gSWu86gzOSBnWLknwSaHtwag+1m7Z3W0hZneTvWq3z
+wZ7U10VOylY0Ibw+F1tvdwxIAUMpsN0/lm7mlaoMwCC2/T42J5zjXM9OgdwZu5GQ
+fezmlwQek8wiSdeXhrYTCjxDI3d+8NzmzSQfO4ObNDqDNOMCAwEAAaNjMGEwHwYD
+VR0jBBgwFoAUkqR1LKSevoFE63n8isWVpesQdXMwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFJKkdSyknr6BROt5/IrFlaXrEHVzMA0G
+CSqGSIb3DQEBBQUAA4IBAQAo0uCG1eb4e/CX3CJrO5UUVg8RMKWaTzqwOuAGy2X1
+7caXJ/4l8lfmXpWMPmRgFVp/Lw0BxbFg/UU1z/CyvwbZ71q+s2IhtNerNXxTPqYn
+8aEt2hojnczd7Dwtnic0XQ/CNnm8yUpiLe1r2X1BQ3y2qsrtYbE3ghUJGooWMNjs
+ydZHcnhLEEYUjl8Or+zHL6sQ17bxbuyGssLoDZJz3KL0Dzq/YSMQiZxIQG5wALPT
+ujdEWBF6AmqI8Dc08BnprNRlc/ZpjGSUOnmFKbAWKwyCPwacx/0QK54PLLae4xW/
+2TYcuiUaUj0a7CIMHOCkoj3w6DnPgcB77V0fb8XQC9eY
+-----END CERTIFICATE-----
+
+# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc
+# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc
+# Label: "Cybertrust Global Root"
+# Serial: 4835703278459682877484360
+# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1
+# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6
+# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3
+-----BEGIN CERTIFICATE-----
+MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG
+A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh
+bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE
+ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS
+b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5
+7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS
+J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y
+HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP
+t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz
+FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY
+XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/
+MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw
+hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js
+MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA
+A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj
+Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx
+XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o
+omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc
+A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW
+WL1WMRJOEcgh4LMRkWXbtKaIOM5V
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
+# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
+# Label: "GeoTrust Primary Certification Authority - G3"
+# Serial: 28809105769928564313984085209975885599
+# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05
+# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd
+# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4
+-----BEGIN CERTIFICATE-----
+MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB
+mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT
+MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s
+eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv
+cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ
+BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg
+MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0
+BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz
++uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm
+hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn
+5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W
+JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL
+DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC
+huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw
+HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB
+AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB
+zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN
+kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD
+AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH
+SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G
+spki4cErx5z481+oghLrGREt
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA - G2"
+# Serial: 71758320672825410020661621085256472406
+# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f
+# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12
+# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57
+-----BEGIN CERTIFICATE-----
+MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp
+IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi
+BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw
+MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh
+d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig
+YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v
+dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/
+BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6
+papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E
+BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K
+DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3
+KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox
+XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA - G3"
+# Serial: 127614157056681299805556476275995414779
+# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31
+# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2
+# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c
+-----BEGIN CERTIFICATE-----
+MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB
+rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV
+BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa
+Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl
+LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u
+MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl
+ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm
+gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8
+YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf
+b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9
+9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S
+zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk
+OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV
+HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA
+2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW
+oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu
+t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c
+KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM
+m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu
+MdRAGmI0Nj81Aa6sY6A=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
+# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
+# Label: "GeoTrust Primary Certification Authority - G2"
+# Serial: 80682863203381065782177908751794619243
+# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a
+# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0
+# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66
+-----BEGIN CERTIFICATE-----
+MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL
+MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj
+KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2
+MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0
+eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV
+BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw
+NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV
+BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH
+MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL
+So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal
+tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG
+CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT
+qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz
+rD6ogRLQy7rQkgu2npaqBA+K
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Universal Root Certification Authority"
+# Serial: 85209574734084581917763752644031726877
+# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19
+# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54
+# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c
+-----BEGIN CERTIFICATE-----
+MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB
+vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
+ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp
+U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W
+ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
+Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX
+MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0
+IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y
+IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh
+bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF
+AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF
+9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH
+H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H
+LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN
+/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT
+rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud
+EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw
+WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs
+exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud
+DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4
+sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+
+seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz
+4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+
+BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR
+lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3
+7M2CYfE45k+XmCpajQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Class 3 Public Primary Certification Authority - G4"
+# Serial: 63143484348153506665311985501458640051
+# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41
+# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a
+# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79
+-----BEGIN CERTIFICATE-----
+MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL
+MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
+ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln
+biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
+U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG
+A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp
+U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg
+SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln
+biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5
+IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm
+GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve
+fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw
+AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ
+aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj
+aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW
+kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC
+4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga
+FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Label: "GlobalSign Root CA - R3"
+# Serial: 4835703278459759426209954
+# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28
+# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad
+# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b
+-----BEGIN CERTIFICATE-----
+MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4
+MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8
+RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT
+gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm
+KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd
+QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ
+XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw
+DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o
+LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU
+RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp
+jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK
+6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX
+mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs
+Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH
+WD9f
+-----END CERTIFICATE-----
+
+# Issuer: CN=TC TrustCenter Universal CA III O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA
+# Subject: CN=TC TrustCenter Universal CA III O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA
+# Label: "TC TrustCenter Universal CA III"
+# Serial: 2010889993983507346460533407902964
+# MD5 Fingerprint: 9f:dd:db:ab:ff:8e:ff:45:21:5f:f0:6c:9d:8f:fe:2b
+# SHA1 Fingerprint: 96:56:cd:7b:57:96:98:95:d0:e1:41:46:68:06:fb:b8:c6:11:06:87
+# SHA256 Fingerprint: 30:9b:4a:87:f6:ca:56:c9:31:69:aa:a9:9c:6d:98:88:54:d7:89:2b:d5:43:7e:2d:07:b2:9c:be:da:55:d3:5d
+-----BEGIN CERTIFICATE-----
+MIID4TCCAsmgAwIBAgIOYyUAAQACFI0zFQLkbPQwDQYJKoZIhvcNAQEFBQAwezEL
+MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV
+BAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEoMCYGA1UEAxMfVEMgVHJ1
+c3RDZW50ZXIgVW5pdmVyc2FsIENBIElJSTAeFw0wOTA5MDkwODE1MjdaFw0yOTEy
+MzEyMzU5NTlaMHsxCzAJBgNVBAYTAkRFMRwwGgYDVQQKExNUQyBUcnVzdENlbnRl
+ciBHbWJIMSQwIgYDVQQLExtUQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0ExKDAm
+BgNVBAMTH1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQSBJSUkwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDC2pxisLlxErALyBpXsq6DFJmzNEubkKLF
+5+cvAqBNLaT6hdqbJYUtQCggbergvbFIgyIpRJ9Og+41URNzdNW88jBmlFPAQDYv
+DIRlzg9uwliT6CwLOunBjvvya8o84pxOjuT5fdMnnxvVZ3iHLX8LR7PH6MlIfK8v
+zArZQe+f/prhsq75U7Xl6UafYOPfjdN/+5Z+s7Vy+EutCHnNaYlAJ/Uqwa1D7KRT
+yGG299J5KmcYdkhtWyUB0SbFt1dpIxVbYYqt8Bst2a9c8SaQaanVDED1M4BDj5yj
+dipFtK+/fz6HP3bFzSreIMUWWMv5G/UPyw0RUmS40nZid4PxWJ//AgMBAAGjYzBh
+MB8GA1UdIwQYMBaAFFbn4VslQ4Dg9ozhcbyO5YAvxEjiMA8GA1UdEwEB/wQFMAMB
+Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRW5+FbJUOA4PaM4XG8juWAL8RI
+4jANBgkqhkiG9w0BAQUFAAOCAQEAg8ev6n9NCjw5sWi+e22JLumzCecYV42Fmhfz
+dkJQEw/HkG8zrcVJYCtsSVgZ1OK+t7+rSbyUyKu+KGwWaODIl0YgoGhnYIg5IFHY
+aAERzqf2EQf27OysGh+yZm5WZ2B6dF7AbZc2rrUNXWZzwCUyRdhKBgePxLcHsU0G
+DeGl6/R1yrqc0L2z0zIkTO5+4nYES0lT2PLpVDP85XEfPRRclkvxOvIAu2y0+pZV
+CIgJwcyRGSmwIC3/yzikQOEXvnlhgP8HA4ZMTnsGnxGGjYnuJ8Tb4rwZjgvDwxPH
+LQNjO9Po5KIqwoIIlBZU8O8fJ5AluA0OKBtHd0e9HKgl8ZS0Zg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Label: "Go Daddy Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01
+# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b
+# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT
+EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp
+ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz
+NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH
+EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE
+AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw
+DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD
+E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH
+/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy
+DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh
+GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR
+tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA
+AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
+FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX
+WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu
+9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr
+gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo
+2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO
+LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI
+4uJEvlz36hz1
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96
+# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e
+# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5
+-----BEGIN CERTIFICATE-----
+MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs
+ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw
+MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6
+b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj
+aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp
+Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
+ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg
+nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1
+HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N
+Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN
+dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0
+HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G
+CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU
+sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3
+4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg
+8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K
+pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1
+mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Services Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2
+# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f
+# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5
+-----BEGIN CERTIFICATE-----
+MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs
+ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5
+MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD
+VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy
+ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy
+dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p
+OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2
+8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K
+Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe
+hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk
+6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw
+DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q
+AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI
+bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB
+ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z
+qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd
+iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn
+0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN
+sSi6
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Commercial O=AffirmTrust
+# Subject: CN=AffirmTrust Commercial O=AffirmTrust
+# Label: "AffirmTrust Commercial"
+# Serial: 8608355977964138876
+# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7
+# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7
+# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP
+Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr
+ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL
+MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1
+yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr
+VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/
+nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG
+XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj
+vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt
+Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g
+N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC
+nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Networking O=AffirmTrust
+# Subject: CN=AffirmTrust Networking O=AffirmTrust
+# Label: "AffirmTrust Networking"
+# Serial: 8957382827206547757
+# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f
+# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f
+# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y
+YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua
+kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL
+QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp
+6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG
+yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i
+QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO
+tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu
+QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ
+Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u
+olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48
+x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium O=AffirmTrust
+# Subject: CN=AffirmTrust Premium O=AffirmTrust
+# Label: "AffirmTrust Premium"
+# Serial: 7893706540734352110
+# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57
+# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27
+# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a
+-----BEGIN CERTIFICATE-----
+MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz
+dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG
+A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U
+cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf
+qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ
+JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ
++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS
+s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5
+HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7
+70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG
+V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S
+qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S
+5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia
+C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX
+OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE
+FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2
+KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg
+Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B
+8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ
+MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc
+0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ
+u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF
+u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH
+YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8
+GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO
+RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e
+KeC2uAloGRwYQw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Label: "AffirmTrust Premium ECC"
+# Serial: 8401224907861490260
+# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d
+# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb
+# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23
+-----BEGIN CERTIFICATE-----
+MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC
+VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ
+cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ
+BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt
+VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D
+0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9
+ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G
+A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs
+aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I
+flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
+# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
+# Label: "StartCom Certification Authority"
+# Serial: 45
+# MD5 Fingerprint: c9:3b:0d:84:41:fc:a4:76:79:23:08:57:de:10:19:16
+# SHA1 Fingerprint: a3:f1:33:3f:e2:42:bf:cf:c5:d1:4e:8f:39:42:98:40:68:10:d1:a0
+# SHA256 Fingerprint: e1:78:90:ee:09:a3:fb:f4:f4:8b:9c:41:4a:17:d6:37:b7:a5:06:47:e9:bc:75:23:22:72:7f:cc:17:42:a9:11
+-----BEGIN CERTIFICATE-----
+MIIHhzCCBW+gAwIBAgIBLTANBgkqhkiG9w0BAQsFADB9MQswCQYDVQQGEwJJTDEW
+MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg
+Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh
+dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM3WhcNMzYwOTE3MTk0NjM2WjB9
+MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi
+U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh
+cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA
+A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk
+pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf
+OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C
+Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT
+Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi
+HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM
+Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w
++2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+
+Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3
+Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B
+26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID
+AQABo4ICEDCCAgwwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD
+VR0OBBYEFE4L7xqkQFulF2mHMMo0aEPQQa7yMB8GA1UdIwQYMBaAFE4L7xqkQFul
+F2mHMMo0aEPQQa7yMIIBWgYDVR0gBIIBUTCCAU0wggFJBgsrBgEEAYG1NwEBATCC
+ATgwLgYIKwYBBQUHAgEWImh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL3BvbGljeS5w
+ZGYwNAYIKwYBBQUHAgEWKGh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL2ludGVybWVk
+aWF0ZS5wZGYwgc8GCCsGAQUFBwICMIHCMCcWIFN0YXJ0IENvbW1lcmNpYWwgKFN0
+YXJ0Q29tKSBMdGQuMAMCAQEagZZMaW1pdGVkIExpYWJpbGl0eSwgcmVhZCB0aGUg
+c2VjdGlvbiAqTGVnYWwgTGltaXRhdGlvbnMqIG9mIHRoZSBTdGFydENvbSBDZXJ0
+aWZpY2F0aW9uIEF1dGhvcml0eSBQb2xpY3kgYXZhaWxhYmxlIGF0IGh0dHA6Ly93
+d3cuc3RhcnRzc2wuY29tL3BvbGljeS5wZGYwEQYJYIZIAYb4QgEBBAQDAgAHMDgG
+CWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNTTCBDZXJ0aWZpY2F0aW9uIEF1
+dGhvcml0eTANBgkqhkiG9w0BAQsFAAOCAgEAjo/n3JR5fPGFf59Jb2vKXfuM/gTF
+wWLRfUKKvFO3lANmMD+x5wqnUCBVJX92ehQN6wQOQOY+2IirByeDqXWmN3PH/UvS
+Ta0XQMhGvjt/UfzDtgUx3M2FIk5xt/JxXrAaxrqTi3iSSoX4eA+D/i+tLPfkpLst
+0OcNOrg+zvZ49q5HJMqjNTbOx8aHmNrs++myziebiMMEofYLWWivydsQD032ZGNc
+pRJvkrKTlMeIFw6Ttn5ii5B/q06f/ON1FE8qMt9bDeD1e5MNq6HPh+GlBEXoPBKl
+CcWw0bdT82AUuoVpaiF8H3VhFyAXe2w7QSlc4axa0c2Mm+tgHRns9+Ww2vl5GKVF
+P0lDV9LdJNUso/2RjSe15esUBppMeyG7Oq0wBhjA2MFrLH9ZXF2RsXAiV+uKa0hK
+1Q8p7MZAwC+ITGgBF3f0JBlPvfrhsiAhS90a2Cl9qrjeVOwhVYBsHvUwyKMQ5bLm
+KhQxw4UtjJixhlpPiVktucf3HMiKf8CdBUrmQk9io20ppB+Fq9vlgcitKj1MXVuE
+JnHEhV5xJMqlG2zYYdMa4FTbzrqpMrUi9nNBCV24F10OD5mQ1kfabwo6YigUZ4LZ
+8dCAWZvLMdibD4x3TrVoivJs9iQOLWxwxXPR3hTQcY+203sC9uO41Alua551hDnm
+fyWl8kgAwKQB2j8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=StartCom Certification Authority G2 O=StartCom Ltd.
+# Subject: CN=StartCom Certification Authority G2 O=StartCom Ltd.
+# Label: "StartCom Certification Authority G2"
+# Serial: 59
+# MD5 Fingerprint: 78:4b:fb:9e:64:82:0a:d3:b8:4c:62:f3:64:f2:90:64
+# SHA1 Fingerprint: 31:f1:fd:68:22:63:20:ee:c6:3b:3f:9d:ea:4a:3e:53:7c:7c:39:17
+# SHA256 Fingerprint: c7:ba:65:67:de:93:a7:98:ae:1f:aa:79:1e:71:2d:37:8f:ae:1f:93:c4:39:7f:ea:44:1b:b7:cb:e6:fd:59:95
+-----BEGIN CERTIFICATE-----
+MIIFYzCCA0ugAwIBAgIBOzANBgkqhkiG9w0BAQsFADBTMQswCQYDVQQGEwJJTDEW
+MBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoGA1UEAxMjU3RhcnRDb20gQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkgRzIwHhcNMTAwMTAxMDEwMDAxWhcNMzkxMjMxMjM1
+OTAxWjBTMQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoG
+A1UEAxMjU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgRzIwggIiMA0G
+CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2iTZbB7cgNr2Cu+EWIAOVeq8Oo1XJ
+JZlKxdBWQYeQTSFgpBSHO839sj60ZwNq7eEPS8CRhXBF4EKe3ikj1AENoBB5uNsD
+vfOpL9HG4A/LnooUCri99lZi8cVytjIl2bLzvWXFDSxu1ZJvGIsAQRSCb0AgJnoo
+D/Uefyf3lLE3PbfHkffiAez9lInhzG7TNtYKGXmu1zSCZf98Qru23QumNK9LYP5/
+Q0kGi4xDuFby2X8hQxfqp0iVAXV16iulQ5XqFYSdCI0mblWbq9zSOdIxHWDirMxW
+RST1HFSr7obdljKF+ExP6JV2tgXdNiNnvP8V4so75qbsO+wmETRIjfaAKxojAuuK
+HDp2KntWFhxyKrOq42ClAJ8Em+JvHhRYW6Vsi1g8w7pOOlz34ZYrPu8HvKTlXcxN
+nw3h3Kq74W4a7I/htkxNeXJdFzULHdfBR9qWJODQcqhaX2YtENwvKhOuJv4KHBnM
+0D4LnMgJLvlblnpHnOl68wVQdJVznjAJ85eCXuaPOQgeWeU1FEIT/wCc976qUM/i
+UUjXuG+v+E5+M5iSFGI6dWPPe/regjupuznixL0sAA7IF6wT700ljtizkC+p2il9
+Ha90OrInwMEePnWjFqmveiJdnxMaz6eg6+OGCtP95paV1yPIN93EfKo2rJgaErHg
+TuixO/XWb/Ew1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE
+AwIBBjAdBgNVHQ4EFgQUS8W0QGutHLOlHGVuRjaJhwUMDrYwDQYJKoZIhvcNAQEL
+BQADggIBAHNXPyzVlTJ+N9uWkusZXn5T50HsEbZH77Xe7XRcxfGOSeD8bpkTzZ+K
+2s06Ctg6Wgk/XzTQLwPSZh0avZyQN8gMjgdalEVGKua+etqhqaRpEpKwfTbURIfX
+UfEpY9Z1zRbkJ4kd+MIySP3bmdCPX1R0zKxnNBFi2QwKN4fRoxdIjtIXHfbX/dtl
+6/2o1PXWT6RbdejF0mCy2wl+JYt7ulKSnj7oxXehPOBKc2thz4bcQ///If4jXSRK
+9dNtD2IEBVeC2m6kMyV5Sy5UGYvMLD0w6dEG/+gyRr61M3Z3qAFdlsHB1b6uJcDJ
+HgoJIIihDsnzb02CVAAgp9KP5DlUFy6NHrgbuxu9mk47EDTcnIhT76IxW1hPkWLI
+wpqazRVdOKnWvvgTtZ8SafJQYqz7Fzf07rh1Z2AQ+4NQ+US1dZxAF7L+/XldblhY
+XzD8AK6vM8EOTmy6p6ahfzLbOOCxchcKK5HsamMm7YnUeMx0HgX4a/6ManY5Ka5l
+IxKVCCIcl85bBu4M4ru8H0ST9tg4RQUh7eStqxK2A6RCLi3ECToDZ2mEmuFZkIoo
+hdVddLHRDiBYmxOlsGOm7XtH/UVVMKTumtTm4ofvmMkyghEpIrwACjFeLQ/Ajulr
+so8uBtjRkcfGEvRM/TAXw8HaOFvjqermobp573PYtlNXLfbQ4ddI
+-----END CERTIFICATE-----
+
+# Issuer: O=Digital Signature Trust Co., CN=DST Root CA X3
+# Subject: O=Digital Signature Trust Co., CN=DST Root CA X3
+# Label: "IdenTrust DST Root CA X3"
+# Serial: 44AFB080D6A327BA893039862EF8406B
+# MD5 Fingerprint: 41:03:52:DC:0F:F7:50:1B:16:F0:02:8E:BA:6F:45:C5
+# SHA1 Fingerprint: DA:C9:02:4F:54:D8:F6:DF:94:93:5F:B1:73:26:38:CA:6A:D7:7C:13
+# SHA256 Fingerprint: 06:87:26:03:31:A7:24:03:D9:09:F1:05:E6:9B:CF:0D:32:E1:BD:24:93:FF:C6:D9:20:6D:11:BC:D6:77:07:39
+-----BEGIN CERTIFICATE-----
+MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/
+MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT
+DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow
+PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD
+Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
+AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O
+rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq
+OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b
+xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw
+7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD
+aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV
+HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG
+SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69
+ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr
+AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz
+R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5
+JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo
+Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G2, OU=www.digicert.com, O=DigiCert Inc, C=US
+# Subject: CN=DigiCert Global Root G2, OU=www.digicert.com, O=DigiCert Inc, C=US
+# Serial: 33af1e6a711a9a0bb2864b11d09fae5
+# MD5 Fingerprint: E4:A6:8A:C8:54:AC:52:42:46:0A:FD:72:48:1B:2A:44
+# SHA1 Fingerprint: DF:3C:24:F9:BF:D6:66:76:1B:26:80:73:FE:06:D1:CC:8D:4F:82:A4
+# SHA256 Fingerprint: CB:3C:CB:B7:60:31:E5:E0:13:8F:8D:D3:9A:23:F9:DE:47:FF:C3:5E:43:C1:14:4C:EA:27:D4:6A:5A:B1:CB:5F
+-----BEGIN CERTIFICATE-----
+MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH
+MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI
+2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx
+1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ
+q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz
+tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ
+vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP
+BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV
+5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY
+1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4
+NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG
+Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91
+8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe
+pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl
+MrY=
+-----END CERTIFICATE-----
diff --git a/utils/frozen_chromite/third_party/python2/httplib2/certs.py b/utils/frozen_chromite/third_party/python2/httplib2/certs.py
new file mode 100644
index 0000000..59d1ffc
--- /dev/null
+++ b/utils/frozen_chromite/third_party/python2/httplib2/certs.py
@@ -0,0 +1,42 @@
+"""Utilities for certificate management."""
+
+import os
+
+certifi_available = False
+certifi_where = None
+try:
+    from certifi import where as certifi_where
+    certifi_available = True
+except ImportError:
+    pass
+
+custom_ca_locater_available = False
+custom_ca_locater_where = None
+try:
+    from ca_certs_locater import get as custom_ca_locater_where
+    custom_ca_locater_available = True
+except ImportError:
+    pass
+
+
+BUILTIN_CA_CERTS = os.path.join(
+    os.path.dirname(os.path.abspath(__file__)), "cacerts.txt"
+)
+
+
+def where():
+    env = os.environ.get("HTTPLIB2_CA_CERTS")
+    if env is not None:
+        if os.path.isfile(env):
+            return env
+        else:
+            raise RuntimeError("Environment variable HTTPLIB2_CA_CERTS not a valid file")
+    if custom_ca_locater_available:
+        return custom_ca_locater_where()
+    if certifi_available:
+        return certifi_where()
+    return BUILTIN_CA_CERTS
+
+
+if __name__ == "__main__":
+    print(where())
diff --git a/utils/frozen_chromite/third_party/python2/httplib2/iri2uri.py b/utils/frozen_chromite/third_party/python2/httplib2/iri2uri.py
new file mode 100644
index 0000000..0a978a7
--- /dev/null
+++ b/utils/frozen_chromite/third_party/python2/httplib2/iri2uri.py
@@ -0,0 +1,123 @@
+"""Converts an IRI to a URI."""
+
+__author__ = "Joe Gregorio (joe@bitworking.org)"
+__copyright__ = "Copyright 2006, Joe Gregorio"
+__contributors__ = []
+__version__ = "1.0.0"
+__license__ = "MIT"
+
+import urlparse
+
+# Convert an IRI to a URI following the rules in RFC 3987
+#
+# The characters we need to enocde and escape are defined in the spec:
+#
+# iprivate =  %xE000-F8FF / %xF0000-FFFFD / %x100000-10FFFD
+# ucschar = %xA0-D7FF / %xF900-FDCF / %xFDF0-FFEF
+#         / %x10000-1FFFD / %x20000-2FFFD / %x30000-3FFFD
+#         / %x40000-4FFFD / %x50000-5FFFD / %x60000-6FFFD
+#         / %x70000-7FFFD / %x80000-8FFFD / %x90000-9FFFD
+#         / %xA0000-AFFFD / %xB0000-BFFFD / %xC0000-CFFFD
+#         / %xD0000-DFFFD / %xE1000-EFFFD
+
+escape_range = [
+    (0xA0, 0xD7FF),
+    (0xE000, 0xF8FF),
+    (0xF900, 0xFDCF),
+    (0xFDF0, 0xFFEF),
+    (0x10000, 0x1FFFD),
+    (0x20000, 0x2FFFD),
+    (0x30000, 0x3FFFD),
+    (0x40000, 0x4FFFD),
+    (0x50000, 0x5FFFD),
+    (0x60000, 0x6FFFD),
+    (0x70000, 0x7FFFD),
+    (0x80000, 0x8FFFD),
+    (0x90000, 0x9FFFD),
+    (0xA0000, 0xAFFFD),
+    (0xB0000, 0xBFFFD),
+    (0xC0000, 0xCFFFD),
+    (0xD0000, 0xDFFFD),
+    (0xE1000, 0xEFFFD),
+    (0xF0000, 0xFFFFD),
+    (0x100000, 0x10FFFD),
+]
+
+
+def encode(c):
+    retval = c
+    i = ord(c)
+    for low, high in escape_range:
+        if i < low:
+            break
+        if i >= low and i <= high:
+            retval = "".join(["%%%2X" % ord(o) for o in c.encode("utf-8")])
+            break
+    return retval
+
+
+def iri2uri(uri):
+    """Convert an IRI to a URI. Note that IRIs must be
+    passed in a unicode strings. That is, do not utf-8 encode
+    the IRI before passing it into the function."""
+    if isinstance(uri, unicode):
+        (scheme, authority, path, query, fragment) = urlparse.urlsplit(uri)
+        authority = authority.encode("idna")
+        # For each character in 'ucschar' or 'iprivate'
+        #  1. encode as utf-8
+        #  2. then %-encode each octet of that utf-8
+        uri = urlparse.urlunsplit((scheme, authority, path, query, fragment))
+        uri = "".join([encode(c) for c in uri])
+    return uri
+
+
+if __name__ == "__main__":
+    import unittest
+
+    class Test(unittest.TestCase):
+        def test_uris(self):
+            """Test that URIs are invariant under the transformation."""
+            invariant = [
+                u"ftp://ftp.is.co.za/rfc/rfc1808.txt",
+                u"http://www.ietf.org/rfc/rfc2396.txt",
+                u"ldap://[2001:db8::7]/c=GB?objectClass?one",
+                u"mailto:John.Doe@example.com",
+                u"news:comp.infosystems.www.servers.unix",
+                u"tel:+1-816-555-1212",
+                u"telnet://192.0.2.16:80/",
+                u"urn:oasis:names:specification:docbook:dtd:xml:4.1.2",
+            ]
+            for uri in invariant:
+                self.assertEqual(uri, iri2uri(uri))
+
+        def test_iri(self):
+            """Test that the right type of escaping is done for each part of the URI."""
+            self.assertEqual(
+                "http://xn--o3h.com/%E2%98%84",
+                iri2uri(u"http://\N{COMET}.com/\N{COMET}"),
+            )
+            self.assertEqual(
+                "http://bitworking.org/?fred=%E2%98%84",
+                iri2uri(u"http://bitworking.org/?fred=\N{COMET}"),
+            )
+            self.assertEqual(
+                "http://bitworking.org/#%E2%98%84",
+                iri2uri(u"http://bitworking.org/#\N{COMET}"),
+            )
+            self.assertEqual("#%E2%98%84", iri2uri(u"#\N{COMET}"))
+            self.assertEqual(
+                "/fred?bar=%E2%98%9A#%E2%98%84",
+                iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"),
+            )
+            self.assertEqual(
+                "/fred?bar=%E2%98%9A#%E2%98%84",
+                iri2uri(iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")),
+            )
+            self.assertNotEqual(
+                "/fred?bar=%E2%98%9A#%E2%98%84",
+                iri2uri(
+                    u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode("utf-8")
+                ),
+            )
+
+    unittest.main()
diff --git a/utils/frozen_chromite/third_party/python2/httplib2/socks.py b/utils/frozen_chromite/third_party/python2/httplib2/socks.py
new file mode 100644
index 0000000..5cef776
--- /dev/null
+++ b/utils/frozen_chromite/third_party/python2/httplib2/socks.py
@@ -0,0 +1,510 @@
+"""SocksiPy - Python SOCKS module.
+
+Version 1.00
+
+Copyright 2006 Dan-Haim. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+1. Redistributions of source code must retain the above copyright notice, this
+   list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice,
+   this list of conditions and the following disclaimer in the documentation
+   and/or other materials provided with the distribution.
+3. Neither the name of Dan Haim nor the names of his contributors may be used
+   to endorse or promote products derived from this software without specific
+   prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA
+OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE.
+
+This module provides a standard socket-like interface for Python
+for tunneling connections through SOCKS proxies.
+
+Minor modifications made by Christopher Gilbert (http://motomastyle.com/) for
+use in PyLoris (http://pyloris.sourceforge.net/).
+
+Minor modifications made by Mario Vilas (http://breakingcode.wordpress.com/)
+mainly to merge bug fixes found in Sourceforge.
+"""
+
+import base64
+import socket
+import struct
+import sys
+
+if getattr(socket, "socket", None) is None:
+    raise ImportError("socket.socket missing, proxy support unusable")
+
+PROXY_TYPE_SOCKS4 = 1
+PROXY_TYPE_SOCKS5 = 2
+PROXY_TYPE_HTTP = 3
+PROXY_TYPE_HTTP_NO_TUNNEL = 4
+
+_defaultproxy = None
+_orgsocket = socket.socket
+
+
+class ProxyError(Exception):
+    pass
+
+
+class GeneralProxyError(ProxyError):
+    pass
+
+
+class Socks5AuthError(ProxyError):
+    pass
+
+
+class Socks5Error(ProxyError):
+    pass
+
+
+class Socks4Error(ProxyError):
+    pass
+
+
+class HTTPError(ProxyError):
+    pass
+
+
+_generalerrors = (
+    "success",
+    "invalid data",
+    "not connected",
+    "not available",
+    "bad proxy type",
+    "bad input",
+)
+
+_socks5errors = (
+    "succeeded",
+    "general SOCKS server failure",
+    "connection not allowed by ruleset",
+    "Network unreachable",
+    "Host unreachable",
+    "Connection refused",
+    "TTL expired",
+    "Command not supported",
+    "Address type not supported",
+    "Unknown error",
+)
+
+_socks5autherrors = (
+    "succeeded",
+    "authentication is required",
+    "all offered authentication methods were rejected",
+    "unknown username or invalid password",
+    "unknown error",
+)
+
+_socks4errors = (
+    "request granted",
+    "request rejected or failed",
+    "request rejected because SOCKS server cannot connect to identd on the client",
+    "request rejected because the client program and identd report different "
+    "user-ids",
+    "unknown error",
+)
+
+
+def setdefaultproxy(
+    proxytype=None, addr=None, port=None, rdns=True, username=None, password=None
+):
+    """setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
+    Sets a default proxy which all further socksocket objects will use,
+    unless explicitly changed.
+    """
+    global _defaultproxy
+    _defaultproxy = (proxytype, addr, port, rdns, username, password)
+
+
+def wrapmodule(module):
+    """wrapmodule(module)
+
+    Attempts to replace a module's socket library with a SOCKS socket. Must set
+    a default proxy using setdefaultproxy(...) first.
+    This will only work on modules that import socket directly into the
+    namespace;
+    most of the Python Standard Library falls into this category.
+    """
+    if _defaultproxy != None:
+        module.socket.socket = socksocket
+    else:
+        raise GeneralProxyError((4, "no proxy specified"))
+
+
+class socksocket(socket.socket):
+    """socksocket([family[, type[, proto]]]) -> socket object
+    Open a SOCKS enabled socket. The parameters are the same as
+    those of the standard socket init. In order for SOCKS to work,
+    you must specify family=AF_INET, type=SOCK_STREAM and proto=0.
+    """
+
+    def __init__(
+        self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None
+    ):
+        _orgsocket.__init__(self, family, type, proto, _sock)
+        if _defaultproxy != None:
+            self.__proxy = _defaultproxy
+        else:
+            self.__proxy = (None, None, None, None, None, None)
+        self.__proxysockname = None
+        self.__proxypeername = None
+        self.__httptunnel = True
+
+    def __recvall(self, count):
+        """__recvall(count) -> data
+        Receive EXACTLY the number of bytes requested from the socket.
+        Blocks until the required number of bytes have been received.
+        """
+        data = self.recv(count)
+        while len(data) < count:
+            d = self.recv(count - len(data))
+            if not d:
+                raise GeneralProxyError((0, "connection closed unexpectedly"))
+            data = data + d
+        return data
+
+    def sendall(self, content, *args):
+        """ override socket.socket.sendall method to rewrite the header
+        for non-tunneling proxies if needed
+        """
+        if not self.__httptunnel:
+            content = self.__rewriteproxy(content)
+        return super(socksocket, self).sendall(content, *args)
+
+    def __rewriteproxy(self, header):
+        """ rewrite HTTP request headers to support non-tunneling proxies
+        (i.e. those which do not support the CONNECT method).
+        This only works for HTTP (not HTTPS) since HTTPS requires tunneling.
+        """
+        host, endpt = None, None
+        hdrs = header.split("\r\n")
+        for hdr in hdrs:
+            if hdr.lower().startswith("host:"):
+                host = hdr
+            elif hdr.lower().startswith("get") or hdr.lower().startswith("post"):
+                endpt = hdr
+        if host and endpt:
+            hdrs.remove(host)
+            hdrs.remove(endpt)
+            host = host.split(" ")[1]
+            endpt = endpt.split(" ")
+            if self.__proxy[4] != None and self.__proxy[5] != None:
+                hdrs.insert(0, self.__getauthheader())
+            hdrs.insert(0, "Host: %s" % host)
+            hdrs.insert(0, "%s http://%s%s %s" % (endpt[0], host, endpt[1], endpt[2]))
+        return "\r\n".join(hdrs)
+
+    def __getauthheader(self):
+        auth = self.__proxy[4] + ":" + self.__proxy[5]
+        return "Proxy-Authorization: Basic " + base64.b64encode(auth)
+
+    def setproxy(
+        self,
+        proxytype=None,
+        addr=None,
+        port=None,
+        rdns=True,
+        username=None,
+        password=None,
+        headers=None,
+    ):
+        """setproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
+
+        Sets the proxy to be used.
+        proxytype -    The type of the proxy to be used. Three types
+                are supported: PROXY_TYPE_SOCKS4 (including socks4a),
+                PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP
+        addr -        The address of the server (IP or DNS).
+        port -        The port of the server. Defaults to 1080 for SOCKS
+                servers and 8080 for HTTP proxy servers.
+        rdns -        Should DNS queries be preformed on the remote side
+                (rather than the local side). The default is True.
+                Note: This has no effect with SOCKS4 servers.
+        username -    Username to authenticate with to the server.
+                The default is no authentication.
+        password -    Password to authenticate with to the server.
+                Only relevant when username is also provided.
+        headers -     Additional or modified headers for the proxy connect
+        request.
+        """
+        self.__proxy = (proxytype, addr, port, rdns, username, password, headers)
+
+    def __negotiatesocks5(self, destaddr, destport):
+        """__negotiatesocks5(self,destaddr,destport)
+        Negotiates a connection through a SOCKS5 server.
+        """
+        # First we'll send the authentication packages we support.
+        if (self.__proxy[4] != None) and (self.__proxy[5] != None):
+            # The username/password details were supplied to the
+            # setproxy method so we support the USERNAME/PASSWORD
+            # authentication (in addition to the standard none).
+            self.sendall(struct.pack("BBBB", 0x05, 0x02, 0x00, 0x02))
+        else:
+            # No username/password were entered, therefore we
+            # only support connections with no authentication.
+            self.sendall(struct.pack("BBB", 0x05, 0x01, 0x00))
+        # We'll receive the server's response to determine which
+        # method was selected
+        chosenauth = self.__recvall(2)
+        if chosenauth[0:1] != chr(0x05).encode():
+            self.close()
+            raise GeneralProxyError((1, _generalerrors[1]))
+        # Check the chosen authentication method
+        if chosenauth[1:2] == chr(0x00).encode():
+            # No authentication is required
+            pass
+        elif chosenauth[1:2] == chr(0x02).encode():
+            # Okay, we need to perform a basic username/password
+            # authentication.
+            self.sendall(
+                chr(0x01).encode()
+                + chr(len(self.__proxy[4]))
+                + self.__proxy[4]
+                + chr(len(self.__proxy[5]))
+                + self.__proxy[5]
+            )
+            authstat = self.__recvall(2)
+            if authstat[0:1] != chr(0x01).encode():
+                # Bad response
+                self.close()
+                raise GeneralProxyError((1, _generalerrors[1]))
+            if authstat[1:2] != chr(0x00).encode():
+                # Authentication failed
+                self.close()
+                raise Socks5AuthError((3, _socks5autherrors[3]))
+            # Authentication succeeded
+        else:
+            # Reaching here is always bad
+            self.close()
+            if chosenauth[1] == chr(0xFF).encode():
+                raise Socks5AuthError((2, _socks5autherrors[2]))
+            else:
+                raise GeneralProxyError((1, _generalerrors[1]))
+        # Now we can request the actual connection
+        req = struct.pack("BBB", 0x05, 0x01, 0x00)
+        # If the given destination address is an IP address, we'll
+        # use the IPv4 address request even if remote resolving was specified.
+        try:
+            ipaddr = socket.inet_aton(destaddr)
+            req = req + chr(0x01).encode() + ipaddr
+        except socket.error:
+            # Well it's not an IP number,  so it's probably a DNS name.
+            if self.__proxy[3]:
+                # Resolve remotely
+                ipaddr = None
+                req = (
+                    req
+                    + chr(0x03).encode()
+                    + chr(len(destaddr)).encode()
+                    + destaddr.encode()
+                )
+            else:
+                # Resolve locally
+                ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
+                req = req + chr(0x01).encode() + ipaddr
+        req = req + struct.pack(">H", destport)
+        self.sendall(req)
+        # Get the response
+        resp = self.__recvall(4)
+        if resp[0:1] != chr(0x05).encode():
+            self.close()
+            raise GeneralProxyError((1, _generalerrors[1]))
+        elif resp[1:2] != chr(0x00).encode():
+            # Connection failed
+            self.close()
+            if ord(resp[1:2]) <= 8:
+                raise Socks5Error((ord(resp[1:2]), _socks5errors[ord(resp[1:2])]))
+            else:
+                raise Socks5Error((9, _socks5errors[9]))
+        # Get the bound address/port
+        elif resp[3:4] == chr(0x01).encode():
+            boundaddr = self.__recvall(4)
+        elif resp[3:4] == chr(0x03).encode():
+            resp = resp + self.recv(1)
+            boundaddr = self.__recvall(ord(resp[4:5]))
+        else:
+            self.close()
+            raise GeneralProxyError((1, _generalerrors[1]))
+        boundport = struct.unpack(">H", self.__recvall(2))[0]
+        self.__proxysockname = (boundaddr, boundport)
+        if ipaddr != None:
+            self.__proxypeername = (socket.inet_ntoa(ipaddr), destport)
+        else:
+            self.__proxypeername = (destaddr, destport)
+
+    def getproxysockname(self):
+        """getsockname() -> address info
+        Returns the bound IP address and port number at the proxy.
+        """
+        return self.__proxysockname
+
+    def getproxypeername(self):
+        """getproxypeername() -> address info
+        Returns the IP and port number of the proxy.
+        """
+        return _orgsocket.getpeername(self)
+
+    def getpeername(self):
+        """getpeername() -> address info
+        Returns the IP address and port number of the destination
+        machine (note: getproxypeername returns the proxy)
+        """
+        return self.__proxypeername
+
+    def __negotiatesocks4(self, destaddr, destport):
+        """__negotiatesocks4(self,destaddr,destport)
+        Negotiates a connection through a SOCKS4 server.
+        """
+        # Check if the destination address provided is an IP address
+        rmtrslv = False
+        try:
+            ipaddr = socket.inet_aton(destaddr)
+        except socket.error:
+            # It's a DNS name. Check where it should be resolved.
+            if self.__proxy[3]:
+                ipaddr = struct.pack("BBBB", 0x00, 0x00, 0x00, 0x01)
+                rmtrslv = True
+            else:
+                ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
+        # Construct the request packet
+        req = struct.pack(">BBH", 0x04, 0x01, destport) + ipaddr
+        # The username parameter is considered userid for SOCKS4
+        if self.__proxy[4] != None:
+            req = req + self.__proxy[4]
+        req = req + chr(0x00).encode()
+        # DNS name if remote resolving is required
+        # NOTE: This is actually an extension to the SOCKS4 protocol
+        # called SOCKS4A and may not be supported in all cases.
+        if rmtrslv:
+            req = req + destaddr + chr(0x00).encode()
+        self.sendall(req)
+        # Get the response from the server
+        resp = self.__recvall(8)
+        if resp[0:1] != chr(0x00).encode():
+            # Bad data
+            self.close()
+            raise GeneralProxyError((1, _generalerrors[1]))
+        if resp[1:2] != chr(0x5A).encode():
+            # Server returned an error
+            self.close()
+            if ord(resp[1:2]) in (91, 92, 93):
+                self.close()
+                raise Socks4Error((ord(resp[1:2]), _socks4errors[ord(resp[1:2]) - 90]))
+            else:
+                raise Socks4Error((94, _socks4errors[4]))
+        # Get the bound address/port
+        self.__proxysockname = (
+            socket.inet_ntoa(resp[4:]),
+            struct.unpack(">H", resp[2:4])[0],
+        )
+        if rmtrslv != None:
+            self.__proxypeername = (socket.inet_ntoa(ipaddr), destport)
+        else:
+            self.__proxypeername = (destaddr, destport)
+
+    def __negotiatehttp(self, destaddr, destport):
+        """__negotiatehttp(self,destaddr,destport)
+        Negotiates a connection through an HTTP server.
+        """
+        # If we need to resolve locally, we do this now
+        if not self.__proxy[3]:
+            addr = socket.gethostbyname(destaddr)
+        else:
+            addr = destaddr
+        headers = ["CONNECT ", addr, ":", str(destport), " HTTP/1.1\r\n"]
+        wrote_host_header = False
+        wrote_auth_header = False
+        if self.__proxy[6] != None:
+            for key, val in self.__proxy[6].iteritems():
+                headers += [key, ": ", val, "\r\n"]
+                wrote_host_header = key.lower() == "host"
+                wrote_auth_header = key.lower() == "proxy-authorization"
+        if not wrote_host_header:
+            headers += ["Host: ", destaddr, "\r\n"]
+        if not wrote_auth_header:
+            if self.__proxy[4] != None and self.__proxy[5] != None:
+                headers += [self.__getauthheader(), "\r\n"]
+        headers.append("\r\n")
+        self.sendall("".join(headers).encode())
+        # We read the response until we get the string "\r\n\r\n"
+        resp = self.recv(1)
+        while resp.find("\r\n\r\n".encode()) == -1:
+            resp = resp + self.recv(1)
+        # We just need the first line to check if the connection
+        # was successful
+        statusline = resp.splitlines()[0].split(" ".encode(), 2)
+        if statusline[0] not in ("HTTP/1.0".encode(), "HTTP/1.1".encode()):
+            self.close()
+            raise GeneralProxyError((1, _generalerrors[1]))
+        try:
+            statuscode = int(statusline[1])
+        except ValueError:
+            self.close()
+            raise GeneralProxyError((1, _generalerrors[1]))
+        if statuscode != 200:
+            self.close()
+            raise HTTPError((statuscode, statusline[2]))
+        self.__proxysockname = ("0.0.0.0", 0)
+        self.__proxypeername = (addr, destport)
+
+    def connect(self, destpair):
+        """connect(self, despair)
+        Connects to the specified destination through a proxy.
+        destpar - A tuple of the IP/DNS address and the port number.
+        (identical to socket's connect).
+        To select the proxy server use setproxy().
+        """
+        # Do a minimal input check first
+        if (
+            (not type(destpair) in (list, tuple))
+            or (len(destpair) < 2)
+            or (not isinstance(destpair[0], basestring))
+            or (type(destpair[1]) != int)
+        ):
+            raise GeneralProxyError((5, _generalerrors[5]))
+        if self.__proxy[0] == PROXY_TYPE_SOCKS5:
+            if self.__proxy[2] != None:
+                portnum = self.__proxy[2]
+            else:
+                portnum = 1080
+            _orgsocket.connect(self, (self.__proxy[1], portnum))
+            self.__negotiatesocks5(destpair[0], destpair[1])
+        elif self.__proxy[0] == PROXY_TYPE_SOCKS4:
+            if self.__proxy[2] != None:
+                portnum = self.__proxy[2]
+            else:
+                portnum = 1080
+            _orgsocket.connect(self, (self.__proxy[1], portnum))
+            self.__negotiatesocks4(destpair[0], destpair[1])
+        elif self.__proxy[0] == PROXY_TYPE_HTTP:
+            if self.__proxy[2] != None:
+                portnum = self.__proxy[2]
+            else:
+                portnum = 8080
+            _orgsocket.connect(self, (self.__proxy[1], portnum))
+            self.__negotiatehttp(destpair[0], destpair[1])
+        elif self.__proxy[0] == PROXY_TYPE_HTTP_NO_TUNNEL:
+            if self.__proxy[2] != None:
+                portnum = self.__proxy[2]
+            else:
+                portnum = 8080
+            _orgsocket.connect(self, (self.__proxy[1], portnum))
+            if destpair[1] == 443:
+                self.__negotiatehttp(destpair[0], destpair[1])
+            else:
+                self.__httptunnel = False
+        elif self.__proxy[0] == None:
+            _orgsocket.connect(self, (destpair[0], destpair[1]))
+        else:
+            raise GeneralProxyError((4, _generalerrors[4]))
diff --git a/utils/frozen_chromite/third_party/python3/README.md b/utils/frozen_chromite/third_party/python3/README.md
new file mode 100644
index 0000000..ae1f8de
--- /dev/null
+++ b/utils/frozen_chromite/third_party/python3/README.md
@@ -0,0 +1,6 @@
+This directory contains modules only for use in Python 3 (e.g.,
+httplib2 has separate versions for Python 2 vs. 3). Make sure to
+replicate the equivalent modules under `../python2/`.
+
+Move the contents of this directory to the parent directory when
+Python 2 support is dropped.
diff --git a/utils/frozen_chromite/third_party/python3/httplib2/LICENSE b/utils/frozen_chromite/third_party/python3/httplib2/LICENSE
new file mode 100644
index 0000000..ae38286
--- /dev/null
+++ b/utils/frozen_chromite/third_party/python3/httplib2/LICENSE
@@ -0,0 +1,23 @@
+Httplib2 Software License
+
+Copyright (c) 2006 by Joe Gregorio
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without restriction,
+including without limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of the Software,
+and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/utils/frozen_chromite/third_party/python3/httplib2/README.chromium b/utils/frozen_chromite/third_party/python3/httplib2/README.chromium
new file mode 100644
index 0000000..534bd20
--- /dev/null
+++ b/utils/frozen_chromite/third_party/python3/httplib2/README.chromium
@@ -0,0 +1,16 @@
+Name: httplib2
+Short Name: httplib2
+URL: https://github.com/httplib2/httplib2
+Version: 0.13.1
+Revision: cfba1201736e0060a9cb82eab73ad49988ab7416
+License: MIT License
+
+Description:
+A comprehensive HTTP client library in Python.
+
+Local Modifications:
+Individual versions for Python2/3 were separated and put under
+third_party/python{2,3}/httplib2. Test and packaging code stripped.
+
+Notes:
+Required by oauth2client library.
diff --git a/utils/frozen_chromite/third_party/python3/httplib2/__init__.py b/utils/frozen_chromite/third_party/python3/httplib2/__init__.py
new file mode 100644
index 0000000..23992aa
--- /dev/null
+++ b/utils/frozen_chromite/third_party/python3/httplib2/__init__.py
@@ -0,0 +1,2043 @@
+# -*- coding: utf-8 -*-
+"""Small, fast HTTP client library for Python."""
+
+__author__ = "Joe Gregorio (joe@bitworking.org)"
+__copyright__ = "Copyright 2006, Joe Gregorio"
+__contributors__ = [
+    "Thomas Broyer (t.broyer@ltgt.net)",
+    "James Antill",
+    "Xavier Verges Farrero",
+    "Jonathan Feinberg",
+    "Blair Zajac",
+    "Sam Ruby",
+    "Louis Nyffenegger",
+    "Mark Pilgrim",
+    "Alex Yu",
+]
+__license__ = "MIT"
+__version__ = '0.13.1'
+
+import base64
+import calendar
+import copy
+import email
+import email.feedparser
+from email import header
+import email.message
+import email.utils
+import errno
+from gettext import gettext as _
+import gzip
+from hashlib import md5 as _md5
+from hashlib import sha1 as _sha
+import hmac
+import http.client
+import io
+import os
+import random
+import re
+import socket
+import ssl
+import sys
+import time
+import urllib.parse
+import zlib
+
+try:
+    import socks
+except ImportError:
+    # TODO: remove this fallback and copypasted socksipy module upon py2/3 merge,
+    # idea is to have soft-dependency on any compatible module called socks
+    from . import socks
+from .iri2uri import iri2uri
+
+
+def has_timeout(timeout):
+    if hasattr(socket, "_GLOBAL_DEFAULT_TIMEOUT"):
+        return timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT
+    return timeout is not None
+
+
+__all__ = [
+    "debuglevel",
+    "FailedToDecompressContent",
+    "Http",
+    "HttpLib2Error",
+    "ProxyInfo",
+    "RedirectLimit",
+    "RedirectMissingLocation",
+    "Response",
+    "RETRIES",
+    "UnimplementedDigestAuthOptionError",
+    "UnimplementedHmacDigestAuthOptionError",
+]
+
+# The httplib debug level, set to a non-zero value to get debug output
+debuglevel = 0
+
+# A request will be tried 'RETRIES' times if it fails at the socket/connection level.
+RETRIES = 2
+
+
+# All exceptions raised here derive from HttpLib2Error
+class HttpLib2Error(Exception):
+    pass
+
+
+# Some exceptions can be caught and optionally
+# be turned back into responses.
+class HttpLib2ErrorWithResponse(HttpLib2Error):
+    def __init__(self, desc, response, content):
+        self.response = response
+        self.content = content
+        HttpLib2Error.__init__(self, desc)
+
+
+class RedirectMissingLocation(HttpLib2ErrorWithResponse):
+    pass
+
+
+class RedirectLimit(HttpLib2ErrorWithResponse):
+    pass
+
+
+class FailedToDecompressContent(HttpLib2ErrorWithResponse):
+    pass
+
+
+class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse):
+    pass
+
+
+class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse):
+    pass
+
+
+class MalformedHeader(HttpLib2Error):
+    pass
+
+
+class RelativeURIError(HttpLib2Error):
+    pass
+
+
+class ServerNotFoundError(HttpLib2Error):
+    pass
+
+
+class ProxiesUnavailableError(HttpLib2Error):
+    pass
+
+
+# Open Items:
+# -----------
+
+# Are we removing the cached content too soon on PUT (only delete on 200 Maybe?)
+
+# Pluggable cache storage (supports storing the cache in
+#   flat files by default. We need a plug-in architecture
+#   that can support Berkeley DB and Squid)
+
+# == Known Issues ==
+# Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator.
+# Does not handle Cache-Control: max-stale
+# Does not use Age: headers when calculating cache freshness.
+
+# The number of redirections to follow before giving up.
+# Note that only GET redirects are automatically followed.
+# Will also honor 301 requests by saving that info and never
+# requesting that URI again.
+DEFAULT_MAX_REDIRECTS = 5
+
+# Which headers are hop-by-hop headers by default
+HOP_BY_HOP = [
+    "connection",
+    "keep-alive",
+    "proxy-authenticate",
+    "proxy-authorization",
+    "te",
+    "trailers",
+    "transfer-encoding",
+    "upgrade",
+]
+
+from httplib2 import certs
+CA_CERTS = certs.where()
+
+# PROTOCOL_TLS is python 3.5.3+. PROTOCOL_SSLv23 is deprecated.
+# Both PROTOCOL_TLS and PROTOCOL_SSLv23 are equivalent and means:
+# > Selects the highest protocol version that both the client and server support.
+# > Despite the name, this option can select “TLS” protocols as well as “SSL”.
+# source: https://docs.python.org/3.5/library/ssl.html#ssl.PROTOCOL_TLS
+DEFAULT_TLS_VERSION = getattr(ssl, "PROTOCOL_TLS", None) or getattr(
+    ssl, "PROTOCOL_SSLv23"
+)
+
+def _build_ssl_context(
+    disable_ssl_certificate_validation, ca_certs, cert_file=None, key_file=None,
+    maximum_version=None, minimum_version=None,
+):
+    if not hasattr(ssl, "SSLContext"):
+        raise RuntimeError("httplib2 requires Python 3.2+ for ssl.SSLContext")
+
+    context = ssl.SSLContext(DEFAULT_TLS_VERSION)
+    context.verify_mode = (
+        ssl.CERT_NONE if disable_ssl_certificate_validation else ssl.CERT_REQUIRED
+    )
+
+    # SSLContext.maximum_version and SSLContext.minimum_version are python 3.7+.
+    # source: https://docs.python.org/3/library/ssl.html#ssl.SSLContext.maximum_version
+    if maximum_version is not None:
+        if hasattr(context, "maximum_version"):
+            context.maximum_version = getattr(ssl.TLSVersion, maximum_version)
+        else:
+            raise RuntimeError("setting tls_maximum_version requires Python 3.7 and OpenSSL 1.1 or newer")
+    if minimum_version is not None:
+        if hasattr(context, "minimum_version"):
+            context.minimum_version = getattr(ssl.TLSVersion, minimum_version)
+        else:
+            raise RuntimeError("setting tls_minimum_version requires Python 3.7 and OpenSSL 1.1 or newer")
+
+    # check_hostname requires python 3.4+
+    # we will perform the equivalent in HTTPSConnectionWithTimeout.connect() by calling ssl.match_hostname
+    # if check_hostname is not supported.
+    if hasattr(context, "check_hostname"):
+        context.check_hostname = not disable_ssl_certificate_validation
+
+    context.load_verify_locations(ca_certs)
+
+    if cert_file:
+        context.load_cert_chain(cert_file, key_file)
+
+    return context
+
+
+def _get_end2end_headers(response):
+    hopbyhop = list(HOP_BY_HOP)
+    hopbyhop.extend([x.strip() for x in response.get("connection", "").split(",")])
+    return [header for header in list(response.keys()) if header not in hopbyhop]
+
+
+URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
+
+
+def parse_uri(uri):
+    """Parses a URI using the regex given in Appendix B of RFC 3986.
+
+        (scheme, authority, path, query, fragment) = parse_uri(uri)
+    """
+    groups = URI.match(uri).groups()
+    return (groups[1], groups[3], groups[4], groups[6], groups[8])
+
+
+def urlnorm(uri):
+    (scheme, authority, path, query, fragment) = parse_uri(uri)
+    if not scheme or not authority:
+        raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri)
+    authority = authority.lower()
+    scheme = scheme.lower()
+    if not path:
+        path = "/"
+    # Could do syntax based normalization of the URI before
+    # computing the digest. See Section 6.2.2 of Std 66.
+    request_uri = query and "?".join([path, query]) or path
+    scheme = scheme.lower()
+    defrag_uri = scheme + "://" + authority + request_uri
+    return scheme, authority, request_uri, defrag_uri
+
+
+# Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/)
+re_url_scheme = re.compile(r"^\w+://")
+re_unsafe = re.compile(r"[^\w\-_.()=!]+", re.ASCII)
+
+
+def safename(filename):
+    """Return a filename suitable for the cache.
+    Strips dangerous and common characters to create a filename we
+    can use to store the cache in.
+    """
+    if isinstance(filename, bytes):
+        filename_bytes = filename
+        filename = filename.decode("utf-8")
+    else:
+        filename_bytes = filename.encode("utf-8")
+    filemd5 = _md5(filename_bytes).hexdigest()
+    filename = re_url_scheme.sub("", filename)
+    filename = re_unsafe.sub("", filename)
+
+    # limit length of filename (vital for Windows)
+    # https://github.com/httplib2/httplib2/pull/74
+    # C:\Users\    <username>    \AppData\Local\Temp\  <safe_filename>  ,   <md5>
+    #   9 chars + max 104 chars  +     20 chars      +       x       +  1  +  32  = max 259 chars
+    # Thus max safe filename x = 93 chars. Let it be 90 to make a round sum:
+    filename = filename[:90]
+
+    return ",".join((filename, filemd5))
+
+
+NORMALIZE_SPACE = re.compile(r"(?:\r\n)?[ \t]+")
+
+
+def _normalize_headers(headers):
+    return dict(
+        [
+            (
+                _convert_byte_str(key).lower(),
+                NORMALIZE_SPACE.sub(_convert_byte_str(value), " ").strip(),
+            )
+            for (key, value) in headers.items()
+        ]
+    )
+
+
+def _convert_byte_str(s):
+    if not isinstance(s, str):
+        return str(s, "utf-8")
+    return s
+
+
+def _parse_cache_control(headers):
+    retval = {}
+    if "cache-control" in headers:
+        parts = headers["cache-control"].split(",")
+        parts_with_args = [
+            tuple([x.strip().lower() for x in part.split("=", 1)])
+            for part in parts
+            if -1 != part.find("=")
+        ]
+        parts_wo_args = [
+            (name.strip().lower(), 1) for name in parts if -1 == name.find("=")
+        ]
+        retval = dict(parts_with_args + parts_wo_args)
+    return retval
+
+
+# Whether to use a strict mode to parse WWW-Authenticate headers
+# Might lead to bad results in case of ill-formed header value,
+# so disabled by default, falling back to relaxed parsing.
+# Set to true to turn on, usefull for testing servers.
+USE_WWW_AUTH_STRICT_PARSING = 0
+
+# In regex below:
+#    [^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+             matches a "token" as defined by HTTP
+#    "(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?"    matches a "quoted-string" as defined by HTTP, when LWS have already been replaced by a single space
+# Actually, as an auth-param value can be either a token or a quoted-string, they are combined in a single pattern which matches both:
+#    \"?((?<=\")(?:[^\0-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x08\x0A-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?
+WWW_AUTH_STRICT = re.compile(
+    r"^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$"
+)
+WWW_AUTH_RELAXED = re.compile(
+    r"^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(?<!\")[^ \t\r\n,]+(?!\"))\"?)(.*)$"
+)
+UNQUOTE_PAIRS = re.compile(r"\\(.)")
+
+
+def _parse_www_authenticate(headers, headername="www-authenticate"):
+    """Returns a dictionary of dictionaries, one dict
+    per auth_scheme."""
+    retval = {}
+    if headername in headers:
+        try:
+            authenticate = headers[headername].strip()
+            www_auth = (
+                USE_WWW_AUTH_STRICT_PARSING and WWW_AUTH_STRICT or WWW_AUTH_RELAXED
+            )
+            while authenticate:
+                # Break off the scheme at the beginning of the line
+                if headername == "authentication-info":
+                    (auth_scheme, the_rest) = ("digest", authenticate)
+                else:
+                    (auth_scheme, the_rest) = authenticate.split(" ", 1)
+                # Now loop over all the key value pairs that come after the scheme,
+                # being careful not to roll into the next scheme
+                match = www_auth.search(the_rest)
+                auth_params = {}
+                while match:
+                    if match and len(match.groups()) == 3:
+                        (key, value, the_rest) = match.groups()
+                        auth_params[key.lower()] = UNQUOTE_PAIRS.sub(
+                            r"\1", value
+                        )  # '\\'.join([x.replace('\\', '') for x in value.split('\\\\')])
+                    match = www_auth.search(the_rest)
+                retval[auth_scheme.lower()] = auth_params
+                authenticate = the_rest.strip()
+        except ValueError:
+            raise MalformedHeader("WWW-Authenticate")
+    return retval
+
+
+def _entry_disposition(response_headers, request_headers):
+    """Determine freshness from the Date, Expires and Cache-Control headers.
+
+    We don't handle the following:
+
+    1. Cache-Control: max-stale
+    2. Age: headers are not used in the calculations.
+
+    Not that this algorithm is simpler than you might think
+    because we are operating as a private (non-shared) cache.
+    This lets us ignore 's-maxage'. We can also ignore
+    'proxy-invalidate' since we aren't a proxy.
+    We will never return a stale document as
+    fresh as a design decision, and thus the non-implementation
+    of 'max-stale'. This also lets us safely ignore 'must-revalidate'
+    since we operate as if every server has sent 'must-revalidate'.
+    Since we are private we get to ignore both 'public' and
+    'private' parameters. We also ignore 'no-transform' since
+    we don't do any transformations.
+    The 'no-store' parameter is handled at a higher level.
+    So the only Cache-Control parameters we look at are:
+
+    no-cache
+    only-if-cached
+    max-age
+    min-fresh
+    """
+
+    retval = "STALE"
+    cc = _parse_cache_control(request_headers)
+    cc_response = _parse_cache_control(response_headers)
+
+    if (
+        "pragma" in request_headers
+        and request_headers["pragma"].lower().find("no-cache") != -1
+    ):
+        retval = "TRANSPARENT"
+        if "cache-control" not in request_headers:
+            request_headers["cache-control"] = "no-cache"
+    elif "no-cache" in cc:
+        retval = "TRANSPARENT"
+    elif "no-cache" in cc_response:
+        retval = "STALE"
+    elif "only-if-cached" in cc:
+        retval = "FRESH"
+    elif "date" in response_headers:
+        date = calendar.timegm(email.utils.parsedate_tz(response_headers["date"]))
+        now = time.time()
+        current_age = max(0, now - date)
+        if "max-age" in cc_response:
+            try:
+                freshness_lifetime = int(cc_response["max-age"])
+            except ValueError:
+                freshness_lifetime = 0
+        elif "expires" in response_headers:
+            expires = email.utils.parsedate_tz(response_headers["expires"])
+            if None == expires:
+                freshness_lifetime = 0
+            else:
+                freshness_lifetime = max(0, calendar.timegm(expires) - date)
+        else:
+            freshness_lifetime = 0
+        if "max-age" in cc:
+            try:
+                freshness_lifetime = int(cc["max-age"])
+            except ValueError:
+                freshness_lifetime = 0
+        if "min-fresh" in cc:
+            try:
+                min_fresh = int(cc["min-fresh"])
+            except ValueError:
+                min_fresh = 0
+            current_age += min_fresh
+        if freshness_lifetime > current_age:
+            retval = "FRESH"
+    return retval
+
+
+def _decompressContent(response, new_content):
+    content = new_content
+    try:
+        encoding = response.get("content-encoding", None)
+        if encoding in ["gzip", "deflate"]:
+            if encoding == "gzip":
+                content = gzip.GzipFile(fileobj=io.BytesIO(new_content)).read()
+            if encoding == "deflate":
+                content = zlib.decompress(content, -zlib.MAX_WBITS)
+            response["content-length"] = str(len(content))
+            # Record the historical presence of the encoding in a way the won't interfere.
+            response["-content-encoding"] = response["content-encoding"]
+            del response["content-encoding"]
+    except (IOError, zlib.error):
+        content = ""
+        raise FailedToDecompressContent(
+            _("Content purported to be compressed with %s but failed to decompress.")
+            % response.get("content-encoding"),
+            response,
+            content,
+        )
+    return content
+
+
+def _bind_write_headers(msg):
+    def _write_headers(self):
+        # Self refers to the Generator object.
+        for h, v in msg.items():
+            print("%s:" % h, end=" ", file=self._fp)
+            if isinstance(v, header.Header):
+                print(v.encode(maxlinelen=self._maxheaderlen), file=self._fp)
+            else:
+                # email.Header got lots of smarts, so use it.
+                headers = header.Header(
+                    v, maxlinelen=self._maxheaderlen, charset="utf-8", header_name=h
+                )
+                print(headers.encode(), file=self._fp)
+        # A blank line always separates headers from body.
+        print(file=self._fp)
+
+    return _write_headers
+
+
+def _updateCache(request_headers, response_headers, content, cache, cachekey):
+    if cachekey:
+        cc = _parse_cache_control(request_headers)
+        cc_response = _parse_cache_control(response_headers)
+        if "no-store" in cc or "no-store" in cc_response:
+            cache.delete(cachekey)
+        else:
+            info = email.message.Message()
+            for key, value in response_headers.items():
+                if key not in ["status", "content-encoding", "transfer-encoding"]:
+                    info[key] = value
+
+            # Add annotations to the cache to indicate what headers
+            # are variant for this request.
+            vary = response_headers.get("vary", None)
+            if vary:
+                vary_headers = vary.lower().replace(" ", "").split(",")
+                for header in vary_headers:
+                    key = "-varied-%s" % header
+                    try:
+                        info[key] = request_headers[header]
+                    except KeyError:
+                        pass
+
+            status = response_headers.status
+            if status == 304:
+                status = 200
+
+            status_header = "status: %d\r\n" % status
+
+            try:
+                header_str = info.as_string()
+            except UnicodeEncodeError:
+                setattr(info, "_write_headers", _bind_write_headers(info))
+                header_str = info.as_string()
+
+            header_str = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", header_str)
+            text = b"".join(
+                [status_header.encode("utf-8"), header_str.encode("utf-8"), content]
+            )
+
+            cache.set(cachekey, text)
+
+
+def _cnonce():
+    dig = _md5(
+        (
+            "%s:%s"
+            % (time.ctime(), ["0123456789"[random.randrange(0, 9)] for i in range(20)])
+        ).encode("utf-8")
+    ).hexdigest()
+    return dig[:16]
+
+
+def _wsse_username_token(cnonce, iso_now, password):
+    return base64.b64encode(
+        _sha(("%s%s%s" % (cnonce, iso_now, password)).encode("utf-8")).digest()
+    ).strip()
+
+
+# For credentials we need two things, first
+# a pool of credential to try (not necesarily tied to BAsic, Digest, etc.)
+# Then we also need a list of URIs that have already demanded authentication
+# That list is tricky since sub-URIs can take the same auth, or the
+# auth scheme may change as you descend the tree.
+# So we also need each Auth instance to be able to tell us
+# how close to the 'top' it is.
+
+
+class Authentication(object):
+    def __init__(
+        self, credentials, host, request_uri, headers, response, content, http
+    ):
+        (scheme, authority, path, query, fragment) = parse_uri(request_uri)
+        self.path = path
+        self.host = host
+        self.credentials = credentials
+        self.http = http
+
+    def depth(self, request_uri):
+        (scheme, authority, path, query, fragment) = parse_uri(request_uri)
+        return request_uri[len(self.path) :].count("/")
+
+    def inscope(self, host, request_uri):
+        # XXX Should we normalize the request_uri?
+        (scheme, authority, path, query, fragment) = parse_uri(request_uri)
+        return (host == self.host) and path.startswith(self.path)
+
+    def request(self, method, request_uri, headers, content):
+        """Modify the request headers to add the appropriate
+        Authorization header. Over-rise this in sub-classes."""
+        pass
+
+    def response(self, response, content):
+        """Gives us a chance to update with new nonces
+        or such returned from the last authorized response.
+        Over-rise this in sub-classes if necessary.
+
+        Return TRUE is the request is to be retried, for
+        example Digest may return stale=true.
+        """
+        return False
+
+    def __eq__(self, auth):
+        return False
+
+    def __ne__(self, auth):
+        return True
+
+    def __lt__(self, auth):
+        return True
+
+    def __gt__(self, auth):
+        return False
+
+    def __le__(self, auth):
+        return True
+
+    def __ge__(self, auth):
+        return False
+
+    def __bool__(self):
+        return True
+
+
+class BasicAuthentication(Authentication):
+    def __init__(
+        self, credentials, host, request_uri, headers, response, content, http
+    ):
+        Authentication.__init__(
+            self, credentials, host, request_uri, headers, response, content, http
+        )
+
+    def request(self, method, request_uri, headers, content):
+        """Modify the request headers to add the appropriate
+        Authorization header."""
+        headers["authorization"] = "Basic " + base64.b64encode(
+            ("%s:%s" % self.credentials).encode("utf-8")
+        ).strip().decode("utf-8")
+
+
+class DigestAuthentication(Authentication):
+    """Only do qop='auth' and MD5, since that
+    is all Apache currently implements"""
+
+    def __init__(
+        self, credentials, host, request_uri, headers, response, content, http
+    ):
+        Authentication.__init__(
+            self, credentials, host, request_uri, headers, response, content, http
+        )
+        challenge = _parse_www_authenticate(response, "www-authenticate")
+        self.challenge = challenge["digest"]
+        qop = self.challenge.get("qop", "auth")
+        self.challenge["qop"] = (
+            ("auth" in [x.strip() for x in qop.split()]) and "auth" or None
+        )
+        if self.challenge["qop"] is None:
+            raise UnimplementedDigestAuthOptionError(
+                _("Unsupported value for qop: %s." % qop)
+            )
+        self.challenge["algorithm"] = self.challenge.get("algorithm", "MD5").upper()
+        if self.challenge["algorithm"] != "MD5":
+            raise UnimplementedDigestAuthOptionError(
+                _("Unsupported value for algorithm: %s." % self.challenge["algorithm"])
+            )
+        self.A1 = "".join(
+            [
+                self.credentials[0],
+                ":",
+                self.challenge["realm"],
+                ":",
+                self.credentials[1],
+            ]
+        )
+        self.challenge["nc"] = 1
+
+    def request(self, method, request_uri, headers, content, cnonce=None):
+        """Modify the request headers"""
+        H = lambda x: _md5(x.encode("utf-8")).hexdigest()
+        KD = lambda s, d: H("%s:%s" % (s, d))
+        A2 = "".join([method, ":", request_uri])
+        self.challenge["cnonce"] = cnonce or _cnonce()
+        request_digest = '"%s"' % KD(
+            H(self.A1),
+            "%s:%s:%s:%s:%s"
+            % (
+                self.challenge["nonce"],
+                "%08x" % self.challenge["nc"],
+                self.challenge["cnonce"],
+                self.challenge["qop"],
+                H(A2),
+            ),
+        )
+        headers["authorization"] = (
+            'Digest username="%s", realm="%s", nonce="%s", '
+            'uri="%s", algorithm=%s, response=%s, qop=%s, '
+            'nc=%08x, cnonce="%s"'
+        ) % (
+            self.credentials[0],
+            self.challenge["realm"],
+            self.challenge["nonce"],
+            request_uri,
+            self.challenge["algorithm"],
+            request_digest,
+            self.challenge["qop"],
+            self.challenge["nc"],
+            self.challenge["cnonce"],
+        )
+        if self.challenge.get("opaque"):
+            headers["authorization"] += ', opaque="%s"' % self.challenge["opaque"]
+        self.challenge["nc"] += 1
+
+    def response(self, response, content):
+        if "authentication-info" not in response:
+            challenge = _parse_www_authenticate(response, "www-authenticate").get(
+                "digest", {}
+            )
+            if "true" == challenge.get("stale"):
+                self.challenge["nonce"] = challenge["nonce"]
+                self.challenge["nc"] = 1
+                return True
+        else:
+            updated_challenge = _parse_www_authenticate(
+                response, "authentication-info"
+            ).get("digest", {})
+
+            if "nextnonce" in updated_challenge:
+                self.challenge["nonce"] = updated_challenge["nextnonce"]
+                self.challenge["nc"] = 1
+        return False
+
+
+class HmacDigestAuthentication(Authentication):
+    """Adapted from Robert Sayre's code and DigestAuthentication above."""
+
+    __author__ = "Thomas Broyer (t.broyer@ltgt.net)"
+
+    def __init__(
+        self, credentials, host, request_uri, headers, response, content, http
+    ):
+        Authentication.__init__(
+            self, credentials, host, request_uri, headers, response, content, http
+        )
+        challenge = _parse_www_authenticate(response, "www-authenticate")
+        self.challenge = challenge["hmacdigest"]
+        # TODO: self.challenge['domain']
+        self.challenge["reason"] = self.challenge.get("reason", "unauthorized")
+        if self.challenge["reason"] not in ["unauthorized", "integrity"]:
+            self.challenge["reason"] = "unauthorized"
+        self.challenge["salt"] = self.challenge.get("salt", "")
+        if not self.challenge.get("snonce"):
+            raise UnimplementedHmacDigestAuthOptionError(
+                _("The challenge doesn't contain a server nonce, or this one is empty.")
+            )
+        self.challenge["algorithm"] = self.challenge.get("algorithm", "HMAC-SHA-1")
+        if self.challenge["algorithm"] not in ["HMAC-SHA-1", "HMAC-MD5"]:
+            raise UnimplementedHmacDigestAuthOptionError(
+                _("Unsupported value for algorithm: %s." % self.challenge["algorithm"])
+            )
+        self.challenge["pw-algorithm"] = self.challenge.get("pw-algorithm", "SHA-1")
+        if self.challenge["pw-algorithm"] not in ["SHA-1", "MD5"]:
+            raise UnimplementedHmacDigestAuthOptionError(
+                _(
+                    "Unsupported value for pw-algorithm: %s."
+                    % self.challenge["pw-algorithm"]
+                )
+            )
+        if self.challenge["algorithm"] == "HMAC-MD5":
+            self.hashmod = _md5
+        else:
+            self.hashmod = _sha
+        if self.challenge["pw-algorithm"] == "MD5":
+            self.pwhashmod = _md5
+        else:
+            self.pwhashmod = _sha
+        self.key = "".join(
+            [
+                self.credentials[0],
+                ":",
+                self.pwhashmod.new(
+                    "".join([self.credentials[1], self.challenge["salt"]])
+                )
+                .hexdigest()
+                .lower(),
+                ":",
+                self.challenge["realm"],
+            ]
+        )
+        self.key = self.pwhashmod.new(self.key).hexdigest().lower()
+
+    def request(self, method, request_uri, headers, content):
+        """Modify the request headers"""
+        keys = _get_end2end_headers(headers)
+        keylist = "".join(["%s " % k for k in keys])
+        headers_val = "".join([headers[k] for k in keys])
+        created = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
+        cnonce = _cnonce()
+        request_digest = "%s:%s:%s:%s:%s" % (
+            method,
+            request_uri,
+            cnonce,
+            self.challenge["snonce"],
+            headers_val,
+        )
+        request_digest = (
+            hmac.new(self.key, request_digest, self.hashmod).hexdigest().lower()
+        )
+        headers["authorization"] = (
+            'HMACDigest username="%s", realm="%s", snonce="%s",'
+            ' cnonce="%s", uri="%s", created="%s", '
+            'response="%s", headers="%s"'
+        ) % (
+            self.credentials[0],
+            self.challenge["realm"],
+            self.challenge["snonce"],
+            cnonce,
+            request_uri,
+            created,
+            request_digest,
+            keylist,
+        )
+
+    def response(self, response, content):
+        challenge = _parse_www_authenticate(response, "www-authenticate").get(
+            "hmacdigest", {}
+        )
+        if challenge.get("reason") in ["integrity", "stale"]:
+            return True
+        return False
+
+
+class WsseAuthentication(Authentication):
+    """This is thinly tested and should not be relied upon.
+    At this time there isn't any third party server to test against.
+    Blogger and TypePad implemented this algorithm at one point
+    but Blogger has since switched to Basic over HTTPS and
+    TypePad has implemented it wrong, by never issuing a 401
+    challenge but instead requiring your client to telepathically know that
+    their endpoint is expecting WSSE profile="UsernameToken"."""
+
+    def __init__(
+        self, credentials, host, request_uri, headers, response, content, http
+    ):
+        Authentication.__init__(
+            self, credentials, host, request_uri, headers, response, content, http
+        )
+
+    def request(self, method, request_uri, headers, content):
+        """Modify the request headers to add the appropriate
+        Authorization header."""
+        headers["authorization"] = 'WSSE profile="UsernameToken"'
+        iso_now = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
+        cnonce = _cnonce()
+        password_digest = _wsse_username_token(cnonce, iso_now, self.credentials[1])
+        headers["X-WSSE"] = (
+            'UsernameToken Username="%s", PasswordDigest="%s", '
+            'Nonce="%s", Created="%s"'
+        ) % (self.credentials[0], password_digest, cnonce, iso_now)
+
+
+class GoogleLoginAuthentication(Authentication):
+    def __init__(
+        self, credentials, host, request_uri, headers, response, content, http
+    ):
+        from urllib.parse import urlencode
+
+        Authentication.__init__(
+            self, credentials, host, request_uri, headers, response, content, http
+        )
+        challenge = _parse_www_authenticate(response, "www-authenticate")
+        service = challenge["googlelogin"].get("service", "xapi")
+        # Bloggger actually returns the service in the challenge
+        # For the rest we guess based on the URI
+        if service == "xapi" and request_uri.find("calendar") > 0:
+            service = "cl"
+        # No point in guessing Base or Spreadsheet
+        # elif request_uri.find("spreadsheets") > 0:
+        #    service = "wise"
+
+        auth = dict(
+            Email=credentials[0],
+            Passwd=credentials[1],
+            service=service,
+            source=headers["user-agent"],
+        )
+        resp, content = self.http.request(
+            "https://www.google.com/accounts/ClientLogin",
+            method="POST",
+            body=urlencode(auth),
+            headers={"Content-Type": "application/x-www-form-urlencoded"},
+        )
+        lines = content.split("\n")
+        d = dict([tuple(line.split("=", 1)) for line in lines if line])
+        if resp.status == 403:
+            self.Auth = ""
+        else:
+            self.Auth = d["Auth"]
+
+    def request(self, method, request_uri, headers, content):
+        """Modify the request headers to add the appropriate
+        Authorization header."""
+        headers["authorization"] = "GoogleLogin Auth=" + self.Auth
+
+
+AUTH_SCHEME_CLASSES = {
+    "basic": BasicAuthentication,
+    "wsse": WsseAuthentication,
+    "digest": DigestAuthentication,
+    "hmacdigest": HmacDigestAuthentication,
+    "googlelogin": GoogleLoginAuthentication,
+}
+
+AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"]
+
+
+class FileCache(object):
+    """Uses a local directory as a store for cached files.
+    Not really safe to use if multiple threads or processes are going to
+    be running on the same cache.
+    """
+
+    def __init__(
+        self, cache, safe=safename
+    ):  # use safe=lambda x: md5.new(x).hexdigest() for the old behavior
+        self.cache = cache
+        self.safe = safe
+        if not os.path.exists(cache):
+            os.makedirs(self.cache)
+
+    def get(self, key):
+        retval = None
+        cacheFullPath = os.path.join(self.cache, self.safe(key))
+        try:
+            f = open(cacheFullPath, "rb")
+            retval = f.read()
+            f.close()
+        except IOError:
+            pass
+        return retval
+
+    def set(self, key, value):
+        cacheFullPath = os.path.join(self.cache, self.safe(key))
+        f = open(cacheFullPath, "wb")
+        f.write(value)
+        f.close()
+
+    def delete(self, key):
+        cacheFullPath = os.path.join(self.cache, self.safe(key))
+        if os.path.exists(cacheFullPath):
+            os.remove(cacheFullPath)
+
+
+class Credentials(object):
+    def __init__(self):
+        self.credentials = []
+
+    def add(self, name, password, domain=""):
+        self.credentials.append((domain.lower(), name, password))
+
+    def clear(self):
+        self.credentials = []
+
+    def iter(self, domain):
+        for (cdomain, name, password) in self.credentials:
+            if cdomain == "" or domain == cdomain:
+                yield (name, password)
+
+
+class KeyCerts(Credentials):
+    """Identical to Credentials except that
+    name/password are mapped to key/cert."""
+
+    pass
+
+
+class AllHosts(object):
+    pass
+
+
+class ProxyInfo(object):
+    """Collect information required to use a proxy."""
+
+    bypass_hosts = ()
+
+    def __init__(
+        self,
+        proxy_type,
+        proxy_host,
+        proxy_port,
+        proxy_rdns=True,
+        proxy_user=None,
+        proxy_pass=None,
+        proxy_headers=None,
+    ):
+        """Args:
+
+          proxy_type: The type of proxy server.  This must be set to one of
+          socks.PROXY_TYPE_XXX constants.  For example:  p =
+          ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP, proxy_host='localhost',
+          proxy_port=8000)
+          proxy_host: The hostname or IP address of the proxy server.
+          proxy_port: The port that the proxy server is running on.
+          proxy_rdns: If True (default), DNS queries will not be performed
+          locally, and instead, handed to the proxy to resolve.  This is useful
+          if the network does not allow resolution of non-local names. In
+          httplib2 0.9 and earlier, this defaulted to False.
+          proxy_user: The username used to authenticate with the proxy server.
+          proxy_pass: The password used to authenticate with the proxy server.
+          proxy_headers: Additional or modified headers for the proxy connect
+          request.
+        """
+        if isinstance(proxy_user, str):
+            proxy_user = proxy_user.encode()
+        if isinstance(proxy_pass, str):
+            proxy_pass = proxy_pass.encode()
+        self.proxy_type, self.proxy_host, self.proxy_port, self.proxy_rdns, self.proxy_user, self.proxy_pass, self.proxy_headers = (
+            proxy_type,
+            proxy_host,
+            proxy_port,
+            proxy_rdns,
+            proxy_user,
+            proxy_pass,
+            proxy_headers,
+        )
+
+    def astuple(self):
+        return (
+            self.proxy_type,
+            self.proxy_host,
+            self.proxy_port,
+            self.proxy_rdns,
+            self.proxy_user,
+            self.proxy_pass,
+            self.proxy_headers,
+        )
+
+    def isgood(self):
+        return socks and (self.proxy_host != None) and (self.proxy_port != None)
+
+    def applies_to(self, hostname):
+        return not self.bypass_host(hostname)
+
+    def bypass_host(self, hostname):
+        """Has this host been excluded from the proxy config"""
+        if self.bypass_hosts is AllHosts:
+            return True
+
+        hostname = "." + hostname.lstrip(".")
+        for skip_name in self.bypass_hosts:
+            # *.suffix
+            if skip_name.startswith(".") and hostname.endswith(skip_name):
+                return True
+            # exact match
+            if hostname == "." + skip_name:
+                return True
+        return False
+
+    def __repr__(self):
+        return (
+            "<ProxyInfo type={p.proxy_type} "
+            "host:port={p.proxy_host}:{p.proxy_port} rdns={p.proxy_rdns}"
+            + " user={p.proxy_user} headers={p.proxy_headers}>"
+        ).format(p=self)
+
+
+def proxy_info_from_environment(method="http"):
+    """Read proxy info from the environment variables.
+    """
+    if method not in ("http", "https"):
+        return
+
+    env_var = method + "_proxy"
+    url = os.environ.get(env_var, os.environ.get(env_var.upper()))
+    if not url:
+        return
+    return proxy_info_from_url(url, method, noproxy=None)
+
+
+def proxy_info_from_url(url, method="http", noproxy=None):
+    """Construct a ProxyInfo from a URL (such as http_proxy env var)
+    """
+    url = urllib.parse.urlparse(url)
+    username = None
+    password = None
+    port = None
+    if "@" in url[1]:
+        ident, host_port = url[1].split("@", 1)
+        if ":" in ident:
+            username, password = ident.split(":", 1)
+        else:
+            password = ident
+    else:
+        host_port = url[1]
+    if ":" in host_port:
+        host, port = host_port.split(":", 1)
+    else:
+        host = host_port
+
+    if port:
+        port = int(port)
+    else:
+        port = dict(https=443, http=80)[method]
+
+    proxy_type = 3  # socks.PROXY_TYPE_HTTP
+    pi = ProxyInfo(
+        proxy_type=proxy_type,
+        proxy_host=host,
+        proxy_port=port,
+        proxy_user=username or None,
+        proxy_pass=password or None,
+        proxy_headers=None,
+    )
+
+    bypass_hosts = []
+    # If not given an explicit noproxy value, respect values in env vars.
+    if noproxy is None:
+        noproxy = os.environ.get("no_proxy", os.environ.get("NO_PROXY", ""))
+    # Special case: A single '*' character means all hosts should be bypassed.
+    if noproxy == "*":
+        bypass_hosts = AllHosts
+    elif noproxy.strip():
+        bypass_hosts = noproxy.split(",")
+        bypass_hosts = tuple(filter(bool, bypass_hosts))  # To exclude empty string.
+
+    pi.bypass_hosts = bypass_hosts
+    return pi
+
+
+class HTTPConnectionWithTimeout(http.client.HTTPConnection):
+    """HTTPConnection subclass that supports timeouts
+
+    HTTPConnection subclass that supports timeouts
+
+    All timeouts are in seconds. If None is passed for timeout then
+    Python's default timeout for sockets will be used. See for example
+    the docs of socket.setdefaulttimeout():
+    http://docs.python.org/library/socket.html#socket.setdefaulttimeout
+    """
+
+    def __init__(self, host, port=None, timeout=None, proxy_info=None):
+        http.client.HTTPConnection.__init__(self, host, port=port, timeout=timeout)
+
+        self.proxy_info = proxy_info
+        if proxy_info and not isinstance(proxy_info, ProxyInfo):
+            self.proxy_info = proxy_info("http")
+
+    def connect(self):
+        """Connect to the host and port specified in __init__."""
+        if self.proxy_info and socks is None:
+            raise ProxiesUnavailableError(
+                "Proxy support missing but proxy use was requested!"
+            )
+        if self.proxy_info and self.proxy_info.isgood() and self.proxy_info.applies_to(self.host):
+            use_proxy = True
+            proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers = (
+                self.proxy_info.astuple()
+            )
+
+            host = proxy_host
+            port = proxy_port
+        else:
+            use_proxy = False
+
+            host = self.host
+            port = self.port
+            proxy_type = None
+
+        socket_err = None
+
+        for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
+            af, socktype, proto, canonname, sa = res
+            try:
+                if use_proxy:
+                    self.sock = socks.socksocket(af, socktype, proto)
+                    self.sock.setproxy(
+                        proxy_type,
+                        proxy_host,
+                        proxy_port,
+                        proxy_rdns,
+                        proxy_user,
+                        proxy_pass,
+                    )
+                else:
+                    self.sock = socket.socket(af, socktype, proto)
+                    self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+                if has_timeout(self.timeout):
+                    self.sock.settimeout(self.timeout)
+                if self.debuglevel > 0:
+                    print(
+                        "connect: ({0}, {1}) ************".format(self.host, self.port)
+                    )
+                    if use_proxy:
+                        print(
+                            "proxy: {0} ************".format(
+                                str(
+                                    (
+                                        proxy_host,
+                                        proxy_port,
+                                        proxy_rdns,
+                                        proxy_user,
+                                        proxy_pass,
+                                        proxy_headers,
+                                    )
+                                )
+                            )
+                        )
+
+                self.sock.connect((self.host, self.port) + sa[2:])
+            except socket.error as e:
+                socket_err = e
+                if self.debuglevel > 0:
+                    print("connect fail: ({0}, {1})".format(self.host, self.port))
+                    if use_proxy:
+                        print(
+                            "proxy: {0}".format(
+                                str(
+                                    (
+                                        proxy_host,
+                                        proxy_port,
+                                        proxy_rdns,
+                                        proxy_user,
+                                        proxy_pass,
+                                        proxy_headers,
+                                    )
+                                )
+                            )
+                        )
+                if self.sock:
+                    self.sock.close()
+                self.sock = None
+                continue
+            break
+        if not self.sock:
+            raise socket_err
+
+
+class HTTPSConnectionWithTimeout(http.client.HTTPSConnection):
+    """This class allows communication via SSL.
+
+    All timeouts are in seconds. If None is passed for timeout then
+    Python's default timeout for sockets will be used. See for example
+    the docs of socket.setdefaulttimeout():
+    http://docs.python.org/library/socket.html#socket.setdefaulttimeout
+    """
+
+    def __init__(
+        self,
+        host,
+        port=None,
+        key_file=None,
+        cert_file=None,
+        timeout=None,
+        proxy_info=None,
+        ca_certs=None,
+        disable_ssl_certificate_validation=False,
+        tls_maximum_version=None,
+        tls_minimum_version=None,
+    ):
+
+        self.disable_ssl_certificate_validation = disable_ssl_certificate_validation
+        self.ca_certs = ca_certs if ca_certs else CA_CERTS
+
+        self.proxy_info = proxy_info
+        if proxy_info and not isinstance(proxy_info, ProxyInfo):
+            self.proxy_info = proxy_info("https")
+
+        context = _build_ssl_context(
+            self.disable_ssl_certificate_validation, self.ca_certs, cert_file, key_file,
+            maximum_version=tls_maximum_version, minimum_version=tls_minimum_version,
+        )
+        super(HTTPSConnectionWithTimeout, self).__init__(
+            host,
+            port=port,
+            key_file=key_file,
+            cert_file=cert_file,
+            timeout=timeout,
+            context=context,
+        )
+
+    def connect(self):
+        """Connect to a host on a given (SSL) port."""
+        if self.proxy_info and self.proxy_info.isgood():
+            use_proxy = True
+            proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers = (
+                self.proxy_info.astuple()
+            )
+
+            host = proxy_host
+            port = proxy_port
+        else:
+            use_proxy = False
+
+            host = self.host
+            port = self.port
+            proxy_type = None
+            proxy_headers = None
+
+        socket_err = None
+
+        address_info = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM)
+        for family, socktype, proto, canonname, sockaddr in address_info:
+            try:
+                if use_proxy:
+                    sock = socks.socksocket(family, socktype, proto)
+
+                    sock.setproxy(
+                        proxy_type,
+                        proxy_host,
+                        proxy_port,
+                        proxy_rdns,
+                        proxy_user,
+                        proxy_pass,
+                    )
+                else:
+                    sock = socket.socket(family, socktype, proto)
+                    sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+                if has_timeout(self.timeout):
+                    sock.settimeout(self.timeout)
+                sock.connect((self.host, self.port))
+
+                self.sock = self._context.wrap_socket(sock, server_hostname=self.host)
+
+                # Python 3.3 compatibility: emulate the check_hostname behavior
+                if (
+                    not hasattr(self._context, "check_hostname")
+                    and not self.disable_ssl_certificate_validation
+                ):
+                    try:
+                        ssl.match_hostname(self.sock.getpeercert(), self.host)
+                    except Exception:
+                        self.sock.shutdown(socket.SHUT_RDWR)
+                        self.sock.close()
+                        raise
+
+                if self.debuglevel > 0:
+                    print("connect: ({0}, {1})".format(self.host, self.port))
+                    if use_proxy:
+                        print(
+                            "proxy: {0}".format(
+                                str(
+                                    (
+                                        proxy_host,
+                                        proxy_port,
+                                        proxy_rdns,
+                                        proxy_user,
+                                        proxy_pass,
+                                        proxy_headers,
+                                    )
+                                )
+                            )
+                        )
+            except (ssl.SSLError, ssl.CertificateError) as e:
+                if sock:
+                    sock.close()
+                if self.sock:
+                    self.sock.close()
+                self.sock = None
+                raise
+            except (socket.timeout, socket.gaierror):
+                raise
+            except socket.error as e:
+                socket_err = e
+                if self.debuglevel > 0:
+                    print("connect fail: ({0}, {1})".format((self.host, self.port)))
+                    if use_proxy:
+                        print(
+                            "proxy: {0}".format(
+                                str(
+                                    (
+                                        proxy_host,
+                                        proxy_port,
+                                        proxy_rdns,
+                                        proxy_user,
+                                        proxy_pass,
+                                        proxy_headers,
+                                    )
+                                )
+                            )
+                        )
+                if self.sock:
+                    self.sock.close()
+                self.sock = None
+                continue
+            break
+        if not self.sock:
+            raise socket_err
+
+
+SCHEME_TO_CONNECTION = {
+    "http": HTTPConnectionWithTimeout,
+    "https": HTTPSConnectionWithTimeout,
+}
+
+
+class Http(object):
+    """An HTTP client that handles:
+
+    - all methods
+    - caching
+    - ETags
+    - compression,
+    - HTTPS
+    - Basic
+    - Digest
+    - WSSE
+
+    and more.
+    """
+
+    def __init__(
+        self,
+        cache=None,
+        timeout=None,
+        proxy_info=proxy_info_from_environment,
+        ca_certs=None,
+        disable_ssl_certificate_validation=False,
+        tls_maximum_version=None,
+        tls_minimum_version=None,
+    ):
+        """If 'cache' is a string then it is used as a directory name for
+        a disk cache. Otherwise it must be an object that supports the
+        same interface as FileCache.
+
+        All timeouts are in seconds. If None is passed for timeout
+        then Python's default timeout for sockets will be used. See
+        for example the docs of socket.setdefaulttimeout():
+        http://docs.python.org/library/socket.html#socket.setdefaulttimeout
+
+        `proxy_info` may be:
+          - a callable that takes the http scheme ('http' or 'https') and
+            returns a ProxyInfo instance per request. By default, uses
+            proxy_info_from_environment.
+          - a ProxyInfo instance (static proxy config).
+          - None (proxy disabled).
+
+        ca_certs is the path of a file containing root CA certificates for SSL
+        server certificate validation.  By default, a CA cert file bundled with
+        httplib2 is used.
+
+        If disable_ssl_certificate_validation is true, SSL cert validation will
+        not be performed.
+
+        tls_maximum_version / tls_minimum_version require Python 3.7+ /
+        OpenSSL 1.1.0g+. A value of "TLSv1_3" requires OpenSSL 1.1.1+.
+"""
+        self.proxy_info = proxy_info
+        self.ca_certs = ca_certs
+        self.disable_ssl_certificate_validation = disable_ssl_certificate_validation
+        self.tls_maximum_version = tls_maximum_version
+        self.tls_minimum_version = tls_minimum_version
+        # Map domain name to an httplib connection
+        self.connections = {}
+        # The location of the cache, for now a directory
+        # where cached responses are held.
+        if cache and isinstance(cache, str):
+            self.cache = FileCache(cache)
+        else:
+            self.cache = cache
+
+        # Name/password
+        self.credentials = Credentials()
+
+        # Key/cert
+        self.certificates = KeyCerts()
+
+        # authorization objects
+        self.authorizations = []
+
+        # If set to False then no redirects are followed, even safe ones.
+        self.follow_redirects = True
+
+        # Which HTTP methods do we apply optimistic concurrency to, i.e.
+        # which methods get an "if-match:" etag header added to them.
+        self.optimistic_concurrency_methods = ["PUT", "PATCH"]
+
+        # If 'follow_redirects' is True, and this is set to True then
+        # all redirecs are followed, including unsafe ones.
+        self.follow_all_redirects = False
+
+        self.ignore_etag = False
+
+        self.force_exception_to_status_code = False
+
+        self.timeout = timeout
+
+        # Keep Authorization: headers on a redirect.
+        self.forward_authorization_headers = False
+
+    def __getstate__(self):
+        state_dict = copy.copy(self.__dict__)
+        # In case request is augmented by some foreign object such as
+        # credentials which handle auth
+        if "request" in state_dict:
+            del state_dict["request"]
+        if "connections" in state_dict:
+            del state_dict["connections"]
+        return state_dict
+
+    def __setstate__(self, state):
+        self.__dict__.update(state)
+        self.connections = {}
+
+    def _auth_from_challenge(self, host, request_uri, headers, response, content):
+        """A generator that creates Authorization objects
+           that can be applied to requests.
+        """
+        challenges = _parse_www_authenticate(response, "www-authenticate")
+        for cred in self.credentials.iter(host):
+            for scheme in AUTH_SCHEME_ORDER:
+                if scheme in challenges:
+                    yield AUTH_SCHEME_CLASSES[scheme](
+                        cred, host, request_uri, headers, response, content, self
+                    )
+
+    def add_credentials(self, name, password, domain=""):
+        """Add a name and password that will be used
+        any time a request requires authentication."""
+        self.credentials.add(name, password, domain)
+
+    def add_certificate(self, key, cert, domain):
+        """Add a key and cert that will be used
+        any time a request requires authentication."""
+        self.certificates.add(key, cert, domain)
+
+    def clear_credentials(self):
+        """Remove all the names and passwords
+        that are used for authentication"""
+        self.credentials.clear()
+        self.authorizations = []
+
+    def _conn_request(self, conn, request_uri, method, body, headers):
+        i = 0
+        seen_bad_status_line = False
+        while i < RETRIES:
+            i += 1
+            try:
+                if conn.sock is None:
+                    conn.connect()
+                conn.request(method, request_uri, body, headers)
+            except socket.timeout:
+                conn.close()
+                raise
+            except socket.gaierror:
+                conn.close()
+                raise ServerNotFoundError("Unable to find the server at %s" % conn.host)
+            except socket.error as e:
+                errno_ = (
+                    e.args[0].errno if isinstance(e.args[0], socket.error) else e.errno
+                )
+                if errno_ in (errno.ENETUNREACH, errno.EADDRNOTAVAIL) and i < RETRIES:
+                    continue  # retry on potentially transient errors
+                raise
+            except http.client.HTTPException:
+                if conn.sock is None:
+                    if i < RETRIES - 1:
+                        conn.close()
+                        conn.connect()
+                        continue
+                    else:
+                        conn.close()
+                        raise
+                if i < RETRIES - 1:
+                    conn.close()
+                    conn.connect()
+                    continue
+                # Just because the server closed the connection doesn't apparently mean
+                # that the server didn't send a response.
+                pass
+            try:
+                response = conn.getresponse()
+            except (http.client.BadStatusLine, http.client.ResponseNotReady):
+                # If we get a BadStatusLine on the first try then that means
+                # the connection just went stale, so retry regardless of the
+                # number of RETRIES set.
+                if not seen_bad_status_line and i == 1:
+                    i = 0
+                    seen_bad_status_line = True
+                    conn.close()
+                    conn.connect()
+                    continue
+                else:
+                    conn.close()
+                    raise
+            except socket.timeout:
+                raise
+            except (socket.error, http.client.HTTPException):
+                conn.close()
+                if i == 0:
+                    conn.close()
+                    conn.connect()
+                    continue
+                else:
+                    raise
+            else:
+                content = b""
+                if method == "HEAD":
+                    conn.close()
+                else:
+                    content = response.read()
+                response = Response(response)
+                if method != "HEAD":
+                    content = _decompressContent(response, content)
+
+            break
+        return (response, content)
+
+    def _request(
+        self,
+        conn,
+        host,
+        absolute_uri,
+        request_uri,
+        method,
+        body,
+        headers,
+        redirections,
+        cachekey,
+    ):
+        """Do the actual request using the connection object
+        and also follow one level of redirects if necessary"""
+
+        auths = [
+            (auth.depth(request_uri), auth)
+            for auth in self.authorizations
+            if auth.inscope(host, request_uri)
+        ]
+        auth = auths and sorted(auths)[0][1] or None
+        if auth:
+            auth.request(method, request_uri, headers, body)
+
+        (response, content) = self._conn_request(
+            conn, request_uri, method, body, headers
+        )
+
+        if auth:
+            if auth.response(response, body):
+                auth.request(method, request_uri, headers, body)
+                (response, content) = self._conn_request(
+                    conn, request_uri, method, body, headers
+                )
+                response._stale_digest = 1
+
+        if response.status == 401:
+            for authorization in self._auth_from_challenge(
+                host, request_uri, headers, response, content
+            ):
+                authorization.request(method, request_uri, headers, body)
+                (response, content) = self._conn_request(
+                    conn, request_uri, method, body, headers
+                )
+                if response.status != 401:
+                    self.authorizations.append(authorization)
+                    authorization.response(response, body)
+                    break
+
+        if (
+            self.follow_all_redirects
+            or (method in ["GET", "HEAD"])
+            or response.status == 303
+        ):
+            if self.follow_redirects and response.status in [300, 301, 302, 303, 307]:
+                # Pick out the location header and basically start from the beginning
+                # remembering first to strip the ETag header and decrement our 'depth'
+                if redirections:
+                    if "location" not in response and response.status != 300:
+                        raise RedirectMissingLocation(
+                            _(
+                                "Redirected but the response is missing a Location: header."
+                            ),
+                            response,
+                            content,
+                        )
+                    # Fix-up relative redirects (which violate an RFC 2616 MUST)
+                    if "location" in response:
+                        location = response["location"]
+                        (scheme, authority, path, query, fragment) = parse_uri(location)
+                        if authority == None:
+                            response["location"] = urllib.parse.urljoin(
+                                absolute_uri, location
+                            )
+                    if response.status == 301 and method in ["GET", "HEAD"]:
+                        response["-x-permanent-redirect-url"] = response["location"]
+                        if "content-location" not in response:
+                            response["content-location"] = absolute_uri
+                        _updateCache(headers, response, content, self.cache, cachekey)
+                    if "if-none-match" in headers:
+                        del headers["if-none-match"]
+                    if "if-modified-since" in headers:
+                        del headers["if-modified-since"]
+                    if (
+                        "authorization" in headers
+                        and not self.forward_authorization_headers
+                    ):
+                        del headers["authorization"]
+                    if "location" in response:
+                        location = response["location"]
+                        old_response = copy.deepcopy(response)
+                        if "content-location" not in old_response:
+                            old_response["content-location"] = absolute_uri
+                        redirect_method = method
+                        if response.status in [302, 303]:
+                            redirect_method = "GET"
+                            body = None
+                        (response, content) = self.request(
+                            location,
+                            method=redirect_method,
+                            body=body,
+                            headers=headers,
+                            redirections=redirections - 1,
+                        )
+                        response.previous = old_response
+                else:
+                    raise RedirectLimit(
+                        "Redirected more times than redirection_limit allows.",
+                        response,
+                        content,
+                    )
+            elif response.status in [200, 203] and method in ["GET", "HEAD"]:
+                # Don't cache 206's since we aren't going to handle byte range requests
+                if "content-location" not in response:
+                    response["content-location"] = absolute_uri
+                _updateCache(headers, response, content, self.cache, cachekey)
+
+        return (response, content)
+
+    def _normalize_headers(self, headers):
+        return _normalize_headers(headers)
+
+    # Need to catch and rebrand some exceptions
+    # Then need to optionally turn all exceptions into status codes
+    # including all socket.* and httplib.* exceptions.
+
+    def request(
+        self,
+        uri,
+        method="GET",
+        body=None,
+        headers=None,
+        redirections=DEFAULT_MAX_REDIRECTS,
+        connection_type=None,
+    ):
+        """ Performs a single HTTP request.
+The 'uri' is the URI of the HTTP resource and can begin
+with either 'http' or 'https'. The value of 'uri' must be an absolute URI.
+
+The 'method' is the HTTP method to perform, such as GET, POST, DELETE, etc.
+There is no restriction on the methods allowed.
+
+The 'body' is the entity body to be sent with the request. It is a string
+object.
+
+Any extra headers that are to be sent with the request should be provided in the
+'headers' dictionary.
+
+The maximum number of redirect to follow before raising an
+exception is 'redirections. The default is 5.
+
+The return value is a tuple of (response, content), the first
+being and instance of the 'Response' class, the second being
+a string that contains the response entity body.
+        """
+        conn_key = ''
+
+        try:
+            if headers is None:
+                headers = {}
+            else:
+                headers = self._normalize_headers(headers)
+
+            if "user-agent" not in headers:
+                headers["user-agent"] = "Python-httplib2/%s (gzip)" % __version__
+
+            uri = iri2uri(uri)
+
+            (scheme, authority, request_uri, defrag_uri) = urlnorm(uri)
+
+            conn_key = scheme + ":" + authority
+            conn = self.connections.get(conn_key)
+            if conn is None:
+                if not connection_type:
+                    connection_type = SCHEME_TO_CONNECTION[scheme]
+                certs = list(self.certificates.iter(authority))
+                if issubclass(connection_type, HTTPSConnectionWithTimeout):
+                    if certs:
+                        conn = self.connections[conn_key] = connection_type(
+                            authority,
+                            key_file=certs[0][0],
+                            cert_file=certs[0][1],
+                            timeout=self.timeout,
+                            proxy_info=self.proxy_info,
+                            ca_certs=self.ca_certs,
+                            disable_ssl_certificate_validation=self.disable_ssl_certificate_validation,
+                            tls_maximum_version=self.tls_maximum_version,
+                            tls_minimum_version=self.tls_minimum_version,
+                        )
+                    else:
+                        conn = self.connections[conn_key] = connection_type(
+                            authority,
+                            timeout=self.timeout,
+                            proxy_info=self.proxy_info,
+                            ca_certs=self.ca_certs,
+                            disable_ssl_certificate_validation=self.disable_ssl_certificate_validation,
+                            tls_maximum_version=self.tls_maximum_version,
+                            tls_minimum_version=self.tls_minimum_version,
+                        )
+                else:
+                    conn = self.connections[conn_key] = connection_type(
+                        authority, timeout=self.timeout, proxy_info=self.proxy_info
+                    )
+                conn.set_debuglevel(debuglevel)
+
+            if "range" not in headers and "accept-encoding" not in headers:
+                headers["accept-encoding"] = "gzip, deflate"
+
+            info = email.message.Message()
+            cached_value = None
+            if self.cache:
+                cachekey = defrag_uri
+                cached_value = self.cache.get(cachekey)
+                if cached_value:
+                    try:
+                        info, content = cached_value.split(b"\r\n\r\n", 1)
+                        info = email.message_from_bytes(info)
+                        for k, v in info.items():
+                            if v.startswith("=?") and v.endswith("?="):
+                                info.replace_header(
+                                    k, str(*email.header.decode_header(v)[0])
+                                )
+                    except (IndexError, ValueError):
+                        self.cache.delete(cachekey)
+                        cachekey = None
+                        cached_value = None
+            else:
+                cachekey = None
+
+            if (
+                method in self.optimistic_concurrency_methods
+                and self.cache
+                and "etag" in info
+                and not self.ignore_etag
+                and "if-match" not in headers
+            ):
+                # http://www.w3.org/1999/04/Editing/
+                headers["if-match"] = info["etag"]
+
+            if method not in ["GET", "HEAD"] and self.cache and cachekey:
+                # RFC 2616 Section 13.10
+                self.cache.delete(cachekey)
+
+            # Check the vary header in the cache to see if this request
+            # matches what varies in the cache.
+            if method in ["GET", "HEAD"] and "vary" in info:
+                vary = info["vary"]
+                vary_headers = vary.lower().replace(" ", "").split(",")
+                for header in vary_headers:
+                    key = "-varied-%s" % header
+                    value = info[key]
+                    if headers.get(header, None) != value:
+                        cached_value = None
+                        break
+
+            if (
+                cached_value
+                and method in ["GET", "HEAD"]
+                and self.cache
+                and "range" not in headers
+            ):
+                if "-x-permanent-redirect-url" in info:
+                    # Should cached permanent redirects be counted in our redirection count? For now, yes.
+                    if redirections <= 0:
+                        raise RedirectLimit(
+                            "Redirected more times than redirection_limit allows.",
+                            {},
+                            "",
+                        )
+                    (response, new_content) = self.request(
+                        info["-x-permanent-redirect-url"],
+                        method="GET",
+                        headers=headers,
+                        redirections=redirections - 1,
+                    )
+                    response.previous = Response(info)
+                    response.previous.fromcache = True
+                else:
+                    # Determine our course of action:
+                    #   Is the cached entry fresh or stale?
+                    #   Has the client requested a non-cached response?
+                    #
+                    # There seems to be three possible answers:
+                    # 1. [FRESH] Return the cache entry w/o doing a GET
+                    # 2. [STALE] Do the GET (but add in cache validators if available)
+                    # 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request
+                    entry_disposition = _entry_disposition(info, headers)
+
+                    if entry_disposition == "FRESH":
+                        if not cached_value:
+                            info["status"] = "504"
+                            content = b""
+                        response = Response(info)
+                        if cached_value:
+                            response.fromcache = True
+                        return (response, content)
+
+                    if entry_disposition == "STALE":
+                        if (
+                            "etag" in info
+                            and not self.ignore_etag
+                            and not "if-none-match" in headers
+                        ):
+                            headers["if-none-match"] = info["etag"]
+                        if "last-modified" in info and not "last-modified" in headers:
+                            headers["if-modified-since"] = info["last-modified"]
+                    elif entry_disposition == "TRANSPARENT":
+                        pass
+
+                    (response, new_content) = self._request(
+                        conn,
+                        authority,
+                        uri,
+                        request_uri,
+                        method,
+                        body,
+                        headers,
+                        redirections,
+                        cachekey,
+                    )
+
+                if response.status == 304 and method == "GET":
+                    # Rewrite the cache entry with the new end-to-end headers
+                    # Take all headers that are in response
+                    # and overwrite their values in info.
+                    # unless they are hop-by-hop, or are listed in the connection header.
+
+                    for key in _get_end2end_headers(response):
+                        info[key] = response[key]
+                    merged_response = Response(info)
+                    if hasattr(response, "_stale_digest"):
+                        merged_response._stale_digest = response._stale_digest
+                    _updateCache(
+                        headers, merged_response, content, self.cache, cachekey
+                    )
+                    response = merged_response
+                    response.status = 200
+                    response.fromcache = True
+
+                elif response.status == 200:
+                    content = new_content
+                else:
+                    self.cache.delete(cachekey)
+                    content = new_content
+            else:
+                cc = _parse_cache_control(headers)
+                if "only-if-cached" in cc:
+                    info["status"] = "504"
+                    response = Response(info)
+                    content = b""
+                else:
+                    (response, content) = self._request(
+                        conn,
+                        authority,
+                        uri,
+                        request_uri,
+                        method,
+                        body,
+                        headers,
+                        redirections,
+                        cachekey,
+                    )
+        except Exception as e:
+            is_timeout = isinstance(e, socket.timeout)
+            if is_timeout:
+                conn = self.connections.pop(conn_key, None)
+                if conn:
+                    conn.close()
+
+            if self.force_exception_to_status_code:
+                if isinstance(e, HttpLib2ErrorWithResponse):
+                    response = e.response
+                    content = e.content
+                    response.status = 500
+                    response.reason = str(e)
+                elif isinstance(e, socket.timeout):
+                    content = b"Request Timeout"
+                    response = Response(
+                        {
+                            "content-type": "text/plain",
+                            "status": "408",
+                            "content-length": len(content),
+                        }
+                    )
+                    response.reason = "Request Timeout"
+                else:
+                    content = str(e).encode("utf-8")
+                    response = Response(
+                        {
+                            "content-type": "text/plain",
+                            "status": "400",
+                            "content-length": len(content),
+                        }
+                    )
+                    response.reason = "Bad Request"
+            else:
+                raise
+
+        return (response, content)
+
+
+class Response(dict):
+    """An object more like email.message than httplib.HTTPResponse."""
+
+    """Is this response from our local cache"""
+    fromcache = False
+    """HTTP protocol version used by server.
+
+    10 for HTTP/1.0, 11 for HTTP/1.1.
+    """
+    version = 11
+
+    "Status code returned by server. "
+    status = 200
+    """Reason phrase returned by server."""
+    reason = "Ok"
+
+    previous = None
+
+    def __init__(self, info):
+        # info is either an email.message or
+        # an httplib.HTTPResponse object.
+        if isinstance(info, http.client.HTTPResponse):
+            for key, value in info.getheaders():
+                key = key.lower()
+                prev = self.get(key)
+                if prev is not None:
+                    value = ", ".join((prev, value))
+                self[key] = value
+            self.status = info.status
+            self["status"] = str(self.status)
+            self.reason = info.reason
+            self.version = info.version
+        elif isinstance(info, email.message.Message):
+            for key, value in list(info.items()):
+                self[key.lower()] = value
+            self.status = int(self["status"])
+        else:
+            for key, value in info.items():
+                self[key.lower()] = value
+            self.status = int(self.get("status", self.status))
+
+    def __getattr__(self, name):
+        if name == "dict":
+            return self
+        else:
+            raise AttributeError(name)
diff --git a/utils/frozen_chromite/third_party/python3/httplib2/cacerts.txt b/utils/frozen_chromite/third_party/python3/httplib2/cacerts.txt
new file mode 100644
index 0000000..8020c1b
--- /dev/null
+++ b/utils/frozen_chromite/third_party/python3/httplib2/cacerts.txt
@@ -0,0 +1,2197 @@
+# Issuer: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc.
+# Subject: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc.
+# Label: "GTE CyberTrust Global Root"
+# Serial: 421
+# MD5 Fingerprint: ca:3d:d3:68:f1:03:5c:d0:32:fa:b8:2b:59:e8:5a:db
+# SHA1 Fingerprint: 97:81:79:50:d8:1c:96:70:cc:34:d8:09:cf:79:44:31:36:7e:f4:74
+# SHA256 Fingerprint: a5:31:25:18:8d:21:10:aa:96:4b:02:c7:b7:c6:da:32:03:17:08:94:e5:fb:71:ff:fb:66:67:d5:e6:81:0a:36
+-----BEGIN CERTIFICATE-----
+MIICWjCCAcMCAgGlMA0GCSqGSIb3DQEBBAUAMHUxCzAJBgNVBAYTAlVTMRgwFgYD
+VQQKEw9HVEUgQ29ycG9yYXRpb24xJzAlBgNVBAsTHkdURSBDeWJlclRydXN0IFNv
+bHV0aW9ucywgSW5jLjEjMCEGA1UEAxMaR1RFIEN5YmVyVHJ1c3QgR2xvYmFsIFJv
+b3QwHhcNOTgwODEzMDAyOTAwWhcNMTgwODEzMjM1OTAwWjB1MQswCQYDVQQGEwJV
+UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU
+cnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds
+b2JhbCBSb290MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVD6C28FCc6HrH
+iM3dFw4usJTQGz0O9pTAipTHBsiQl8i4ZBp6fmw8U+E3KHNgf7KXUwefU/ltWJTS
+r41tiGeA5u2ylc9yMcqlHHK6XALnZELn+aks1joNrI1CqiQBOeacPwGFVw1Yh0X4
+04Wqk2kmhXBIgD8SFcd5tB8FLztimQIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAG3r
+GwnpXtlR22ciYaQqPEh346B8pt5zohQDhT37qw4wxYMWM4ETCJ57NE7fQMh017l9
+3PR2VX2bY1QY6fDq81yx2YtCHrnAlU66+tXifPVoYb+O7AWXX1uw16OFNMQkpw0P
+lZPvy5TYnh+dXIVtx6quTx8itc2VrbqnzPmrC3p/
+-----END CERTIFICATE-----
+
+# Issuer: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division
+# Subject: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division
+# Label: "Thawte Server CA"
+# Serial: 1
+# MD5 Fingerprint: c5:70:c4:a2:ed:53:78:0c:c8:10:53:81:64:cb:d0:1d
+# SHA1 Fingerprint: 23:e5:94:94:51:95:f2:41:48:03:b4:d5:64:d2:a3:a3:f5:d8:8b:8c
+# SHA256 Fingerprint: b4:41:0b:73:e2:e6:ea:ca:47:fb:c4:2f:8f:a4:01:8a:f4:38:1d:c5:4c:fa:a8:44:50:46:1e:ed:09:45:4d:e9
+-----BEGIN CERTIFICATE-----
+MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx
+FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
+VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv
+biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm
+MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx
+MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT
+DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3
+dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl
+cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3
+DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD
+gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91
+yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX
+L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj
+EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG
+7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e
+QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ
+qdq5snUb9kLy78fyGPmJvKP/iiMucEc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division
+# Subject: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division
+# Label: "Thawte Premium Server CA"
+# Serial: 1
+# MD5 Fingerprint: 06:9f:69:79:16:66:90:02:1b:8c:8c:a2:c3:07:6f:3a
+# SHA1 Fingerprint: 62:7f:8d:78:27:65:63:99:d2:7d:7f:90:44:c9:fe:b3:f3:3e:fa:9a
+# SHA256 Fingerprint: ab:70:36:36:5c:71:54:aa:29:c2:c2:9f:5d:41:91:16:3b:16:2a:22:25:01:13:57:d5:6d:07:ff:a7:bc:1f:72
+-----BEGIN CERTIFICATE-----
+MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx
+FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
+VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv
+biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy
+dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t
+MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB
+MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG
+A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp
+b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl
+cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv
+bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE
+VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ
+ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR
+uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG
+9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI
+hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM
+pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg==
+-----END CERTIFICATE-----
+
+# Issuer: O=Equifax OU=Equifax Secure Certificate Authority
+# Subject: O=Equifax OU=Equifax Secure Certificate Authority
+# Label: "Equifax Secure CA"
+# Serial: 903804111
+# MD5 Fingerprint: 67:cb:9d:c0:13:24:8a:82:9b:b2:17:1e:d1:1b:ec:d4
+# SHA1 Fingerprint: d2:32:09:ad:23:d3:14:23:21:74:e4:0d:7f:9d:62:13:97:86:63:3a
+# SHA256 Fingerprint: 08:29:7a:40:47:db:a2:36:80:c7:31:db:6e:31:76:53:ca:78:48:e1:be:bd:3a:0b:01:79:a7:07:f9:2c:f1:78
+-----BEGIN CERTIFICATE-----
+MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV
+UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy
+dGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1
+MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVx
+dWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0B
+AQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6f
+BeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+A
+cJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kC
+AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQ
+MA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlm
+aWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgw
+ODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gj
+IBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQF
+MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA
+A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y
+7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh
+1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4
+-----END CERTIFICATE-----
+
+# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network
+# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network
+# Label: "Verisign Class 3 Public Primary Certification Authority - G2"
+# Serial: 167285380242319648451154478808036881606
+# MD5 Fingerprint: a2:33:9b:4c:74:78:73:d4:6c:e7:c1:f3:8d:cb:5c:e9
+# SHA1 Fingerprint: 85:37:1c:a6:e5:50:14:3d:ce:28:03:47:1b:de:3a:09:e8:f8:77:0f
+# SHA256 Fingerprint: 83:ce:3c:12:29:68:8a:59:3d:48:5f:81:97:3c:0f:91:95:43:1e:da:37:cc:5e:36:43:0e:79:c7:a8:88:63:8b
+-----BEGIN CERTIFICATE-----
+MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ
+BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh
+c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy
+MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp
+emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X
+DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw
+FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg
+UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo
+YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5
+MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB
+AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4
+pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0
+13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID
+AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk
+U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i
+F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY
+oJ2daZH9
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Label: "GlobalSign Root CA"
+# Serial: 4835703278459707669005204
+# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a
+# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c
+# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99
+-----BEGIN CERTIFICATE-----
+MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG
+A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
+b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw
+MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i
+YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT
+aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ
+jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp
+xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp
+1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG
+snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ
+U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8
+9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B
+AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz
+yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE
+38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP
+AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad
+DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME
+HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
+# Label: "GlobalSign Root CA - R2"
+# Serial: 4835703278459682885658125
+# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30
+# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe
+# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e
+-----BEGIN CERTIFICATE-----
+MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1
+MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL
+v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8
+eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq
+tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd
+C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa
+zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB
+mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH
+V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n
+bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG
+3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs
+J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO
+291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS
+ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd
+AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7
+TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority
+# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority
+# Label: "ValiCert Class 1 VA"
+# Serial: 1
+# MD5 Fingerprint: 65:58:ab:15:ad:57:6c:1e:a8:a7:b5:69:ac:bf:ff:eb
+# SHA1 Fingerprint: e5:df:74:3c:b6:01:c4:9b:98:43:dc:ab:8c:e8:6a:81:10:9f:e4:8e
+# SHA256 Fingerprint: f4:c1:49:55:1a:30:13:a3:5b:c7:bf:fe:17:a7:f3:44:9b:c1:ab:5b:5a:0a:e7:4b:06:c2:3b:90:00:4c:01:04
+-----BEGIN CERTIFICATE-----
+MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
+IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
+BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
+aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
+9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNTIyMjM0OFoXDTE5MDYy
+NTIyMjM0OFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
+azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
+YXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
+Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
+cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDYWYJ6ibiWuqYvaG9Y
+LqdUHAZu9OqNSLwxlBfw8068srg1knaw0KWlAdcAAxIiGQj4/xEjm84H9b9pGib+
+TunRf50sQB1ZaG6m+FiwnRqP0z/x3BkGgagO4DrdyFNFCQbmD3DD+kCmDuJWBQ8Y
+TfwggtFzVXSNdnKgHZ0dwN0/cQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFBoPUn0
+LBwGlN+VYH+Wexf+T3GtZMjdd9LvWVXoP+iOBSoh8gfStadS/pyxtuJbdxdA6nLW
+I8sogTLDAHkY7FkXicnGah5xyf23dKUlRWnFSKsZ4UWKJWsZ7uW7EvV/96aNUcPw
+nXS3qT6gpf+2SQMT2iLM7XGCK5nPOrf1LXLI
+-----END CERTIFICATE-----
+
+# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority
+# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority
+# Label: "ValiCert Class 2 VA"
+# Serial: 1
+# MD5 Fingerprint: a9:23:75:9b:ba:49:36:6e:31:c2:db:f2:e7:66:ba:87
+# SHA1 Fingerprint: 31:7a:2a:d0:7f:2b:33:5e:f5:a1:c3:4e:4b:57:e8:b7:d8:f1:fc:a6
+# SHA256 Fingerprint: 58:d0:17:27:9c:d4:dc:63:ab:dd:b1:96:a6:c9:90:6c:30:c4:e0:87:83:ea:e8:c1:60:99:54:d6:93:55:59:6b
+-----BEGIN CERTIFICATE-----
+MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
+IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
+BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
+aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
+9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy
+NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
+azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
+YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
+Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
+cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY
+dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9
+WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS
+v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v
+UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu
+IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC
+W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd
+-----END CERTIFICATE-----
+
+# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority
+# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority
+# Label: "RSA Root Certificate 1"
+# Serial: 1
+# MD5 Fingerprint: a2:6f:53:b7:ee:40:db:4a:68:e7:fa:18:d9:10:4b:72
+# SHA1 Fingerprint: 69:bd:8c:f4:9c:d3:00:fb:59:2e:17:93:ca:55:6a:f3:ec:aa:35:fb
+# SHA256 Fingerprint: bc:23:f9:8a:31:3c:b9:2d:e3:bb:fc:3a:5a:9f:44:61:ac:39:49:4c:4a:e1:5a:9e:9d:f1:31:e9:9b:73:01:9a
+-----BEGIN CERTIFICATE-----
+MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
+IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
+BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
+aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
+9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYy
+NjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
+azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
+YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
+Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
+cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfD
+cnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs
+2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqY
+JJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliE
+Zwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJ
+n0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/A
+PhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Label: "Verisign Class 3 Public Primary Certification Authority - G3"
+# Serial: 206684696279472310254277870180966723415
+# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09
+# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6
+# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44
+-----BEGIN CERTIFICATE-----
+MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
+CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
+cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
+LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
+aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
+dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
+VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
+aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
+bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
+IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b
+N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t
+KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu
+kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm
+CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ
+Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu
+imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te
+2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe
+DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC
+/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p
+F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt
+TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Label: "Verisign Class 4 Public Primary Certification Authority - G3"
+# Serial: 314531972711909413743075096039378935511
+# MD5 Fingerprint: db:c8:f2:27:2e:b1:ea:6a:29:23:5d:fe:56:3e:33:df
+# SHA1 Fingerprint: c8:ec:8c:87:92:69:cb:4b:ab:39:e9:8d:7e:57:67:f3:14:95:73:9d
+# SHA256 Fingerprint: e3:89:36:0d:0f:db:ae:b3:d2:50:58:4b:47:30:31:4e:22:2f:39:c1:56:a0:20:14:4e:8d:96:05:61:79:15:06
+-----BEGIN CERTIFICATE-----
+MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
+CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
+cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
+LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
+aWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
+dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
+VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
+aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
+bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
+IENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK3LpRFpxlmr8Y+1
+GQ9Wzsy1HyDkniYlS+BzZYlZ3tCD5PUPtbut8XzoIfzk6AzufEUiGXaStBO3IFsJ
++mGuqPKljYXCKtbeZjbSmwL0qJJgfJxptI8kHtCGUvYynEFYHiK9zUVilQhu0Gbd
+U6LM8BDcVHOLBKFGMzNcF0C5nk3T875Vg+ixiY5afJqWIpA7iCXy0lOIAgwLePLm
+NxdLMEYH5IBtptiWLugs+BGzOA1mppvqySNb247i8xOOGlktqgLw7KSHZtzBP/XY
+ufTsgsbSPZUd5cBPhMnZo0QoBmrXRazwa2rvTl/4EYIeOGM0ZlDUPpNz+jDDZq3/
+ky2X7wMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAj/ola09b5KROJ1WrIhVZPMq1
+CtRK26vdoV9TxaBXOcLORyu+OshWv8LZJxA6sQU8wHcxuzrTBXttmhwwjIDLk5Mq
+g6sFUYICABFna/OIYUdfA5PVWw3g8dShMjWFsjrbsIKr0csKvE+MW8VLADsfKoKm
+fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c
+2NU8Qh0XwRJdRTjDOPP8hS6DRkiy1yBfkjaP53kPmF6Z6PDQpLv1U70qzlmwr25/
+bLvSHgCwIe34QWKCudiyxLtGUPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Subject: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Label: "Entrust.net Secure Server CA"
+# Serial: 927650371
+# MD5 Fingerprint: df:f2:80:73:cc:f1:e6:61:73:fc:f5:42:e9:c5:7c:ee
+# SHA1 Fingerprint: 99:a6:9b:e6:1a:fe:88:6b:4d:2b:82:00:7c:b8:54:fc:31:7e:15:39
+# SHA256 Fingerprint: 62:f2:40:27:8c:56:4c:4d:d8:bf:7d:9d:4f:6f:36:6e:a8:94:d2:2f:5f:34:d9:89:a9:83:ac:ec:2f:ff:ed:50
+-----BEGIN CERTIFICATE-----
+MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC
+VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u
+ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc
+KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u
+ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1
+MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE
+ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j
+b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF
+bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg
+U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA
+A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/
+I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3
+wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC
+AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb
+oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5
+BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p
+dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk
+MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp
+b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu
+dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0
+MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi
+E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa
+MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI
+hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN
+95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd
+2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Label: "Entrust.net Premium 2048 Secure Server CA"
+# Serial: 946059622
+# MD5 Fingerprint: ba:21:ea:20:d6:dd:db:8f:c1:57:8b:40:ad:a1:fc:fc
+# SHA1 Fingerprint: 80:1d:62:d0:7b:44:9d:5c:5c:03:5c:98:ea:61:fa:44:3c:2a:58:fe
+# SHA256 Fingerprint: d1:c3:39:ea:27:84:eb:87:0f:93:4f:c5:63:4e:4a:a9:ad:55:05:01:64:01:f2:64:65:d3:7a:57:46:63:35:9f
+-----BEGIN CERTIFICATE-----
+MIIEXDCCA0SgAwIBAgIEOGO5ZjANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML
+RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp
+bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5
+IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0xOTEy
+MjQxODIwNTFaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3
+LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp
+YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG
+A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq
+K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe
+sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX
+MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT
+XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/
+HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH
+4QIDAQABo3QwcjARBglghkgBhvhCAQEEBAMCAAcwHwYDVR0jBBgwFoAUVeSB0RGA
+vtiJuQijMfmhJAkWuXAwHQYDVR0OBBYEFFXkgdERgL7YibkIozH5oSQJFrlwMB0G
+CSqGSIb2fQdBAAQQMA4bCFY1LjA6NC4wAwIEkDANBgkqhkiG9w0BAQUFAAOCAQEA
+WUesIYSKF8mciVMeuoCFGsY8Tj6xnLZ8xpJdGGQC49MGCBFhfGPjK50xA3B20qMo
+oPS7mmNz7W3lKtvtFKkrxjYR0CvrB4ul2p5cGZ1WEvVUKcgF7bISKo30Axv/55IQ
+h7A6tcOdBTcSo8f0FbnVpDkWm1M6I5HxqIKiaohowXkCIryqptau37AUX7iH0N18
+f3v/rxzP5tsHrV7bhZ3QKw0z2wTR5klAEyt2+z7pnIkPFc4YsIV4IU9rTw76NmfN
+B/L/CNDi3tm/Kq+4h4YhPATKt5Rof8886ZjXOP/swNlQ8C5LWK5Gb9Auw2DaclVy
+vUxFnmG6v4SBkgPR0ml8xQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Label: "Baltimore CyberTrust Root"
+# Serial: 33554617
+# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4
+# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74
+# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ
+RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD
+VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX
+DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y
+ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy
+VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr
+mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr
+IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK
+mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu
+XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy
+dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye
+jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1
+BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3
+DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92
+9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx
+jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0
+Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz
+ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS
+R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp
+-----END CERTIFICATE-----
+
+# Issuer: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc.
+# Subject: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc.
+# Label: "Equifax Secure Global eBusiness CA"
+# Serial: 1
+# MD5 Fingerprint: 8f:5d:77:06:27:c4:98:3c:5b:93:78:e7:d7:7d:9b:cc
+# SHA1 Fingerprint: 7e:78:4a:10:1c:82:65:cc:2d:e1:f1:6d:47:b4:40:ca:d9:0a:19:45
+# SHA256 Fingerprint: 5f:0b:62:ea:b5:e3:53:ea:65:21:65:16:58:fb:b6:53:59:f4:43:28:0a:4a:fb:d1:04:d7:7d:10:f9:f0:4c:07
+-----BEGIN CERTIFICATE-----
+MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc
+MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT
+ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw
+MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj
+dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l
+c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC
+UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc
+58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/
+o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH
+MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr
+aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA
+A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA
+Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv
+8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV
+-----END CERTIFICATE-----
+
+# Issuer: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc.
+# Subject: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc.
+# Label: "Equifax Secure eBusiness CA 1"
+# Serial: 4
+# MD5 Fingerprint: 64:9c:ef:2e:44:fc:c6:8f:52:07:d0:51:73:8f:cb:3d
+# SHA1 Fingerprint: da:40:18:8b:91:89:a3:ed:ee:ae:da:97:fe:2f:9d:f5:b7:d1:8a:41
+# SHA256 Fingerprint: cf:56:ff:46:a4:a1:86:10:9d:d9:65:84:b5:ee:b5:8a:51:0c:42:75:b0:e5:f9:4f:40:bb:ae:86:5e:19:f6:73
+-----BEGIN CERTIFICATE-----
+MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc
+MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT
+ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw
+MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j
+LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ
+KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo
+RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu
+WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw
+Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD
+AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK
+eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM
+zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+
+WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN
+/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ==
+-----END CERTIFICATE-----
+
+# Issuer: O=Equifax Secure OU=Equifax Secure eBusiness CA-2
+# Subject: O=Equifax Secure OU=Equifax Secure eBusiness CA-2
+# Label: "Equifax Secure eBusiness CA 2"
+# Serial: 930140085
+# MD5 Fingerprint: aa:bf:bf:64:97:da:98:1d:6f:c6:08:3a:95:70:33:ca
+# SHA1 Fingerprint: 39:4f:f6:85:0b:06:be:52:e5:18:56:cc:10:e1:80:e8:82:b3:85:cc
+# SHA256 Fingerprint: 2f:27:4e:48:ab:a4:ac:7b:76:59:33:10:17:75:50:6d:c3:0e:e3:8e:f6:ac:d5:c0:49:32:cf:e0:41:23:42:20
+-----BEGIN CERTIFICATE-----
+MIIDIDCCAomgAwIBAgIEN3DPtTANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV
+UzEXMBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2Vj
+dXJlIGVCdXNpbmVzcyBDQS0yMB4XDTk5MDYyMzEyMTQ0NVoXDTE5MDYyMzEyMTQ0
+NVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkVxdWlmYXggU2VjdXJlMSYwJAYD
+VQQLEx1FcXVpZmF4IFNlY3VyZSBlQnVzaW5lc3MgQ0EtMjCBnzANBgkqhkiG9w0B
+AQEFAAOBjQAwgYkCgYEA5Dk5kx5SBhsoNviyoynF7Y6yEb3+6+e0dMKP/wXn2Z0G
+vxLIPw7y1tEkshHe0XMJitSxLJgJDR5QRrKDpkWNYmi7hRsgcDKqQM2mll/EcTc/
+BPO3QSQ5BxoeLmFYoBIL5aXfxavqN3HMHMg3OrmXUqesxWoklE6ce8/AatbfIb0C
+AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEX
+MBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2VjdXJl
+IGVCdXNpbmVzcyBDQS0yMQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTkw
+NjIzMTIxNDQ1WjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUUJ4L6q9euSBIplBq
+y/3YIHqngnYwHQYDVR0OBBYEFFCeC+qvXrkgSKZQasv92CB6p4J2MAwGA1UdEwQF
+MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA
+A4GBAAyGgq3oThr1jokn4jVYPSm0B482UJW/bsGe68SQsoWou7dC4A8HOd/7npCy
+0cE+U58DRLB+S/Rv5Hwf5+Kx5Lia78O9zt4LMjTZ3ijtM2vE1Nc9ElirfQkty3D1
+E4qUoSek1nDFbZS1yX2doNLGCEnZZpum0/QL3MUmV+GRMOrN
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Subject: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Label: "AddTrust Low-Value Services Root"
+# Serial: 1
+# MD5 Fingerprint: 1e:42:95:02:33:92:6b:b9:5f:c0:7f:da:d6:b2:4b:fc
+# SHA1 Fingerprint: cc:ab:0e:a0:4c:23:01:d6:69:7b:dd:37:9f:cd:12:eb:24:e3:94:9d
+# SHA256 Fingerprint: 8c:72:09:27:9a:c0:4e:27:5e:16:d0:7f:d3:b7:75:e8:01:54:b5:96:80:46:e3:1f:52:dd:25:76:63:24:e9:a7
+-----BEGIN CERTIFICATE-----
+MIIEGDCCAwCgAwIBAgIBATANBgkqhkiG9w0BAQUFADBlMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
+b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwHhcNMDAwNTMw
+MTAzODMxWhcNMjAwNTMwMTAzODMxWjBlMQswCQYDVQQGEwJTRTEUMBIGA1UEChML
+QWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYD
+VQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUA
+A4IBDwAwggEKAoIBAQCWltQhSWDia+hBBwzexODcEyPNwTXH+9ZOEQpnXvUGW2ul
+CDtbKRY654eyNAbFvAWlA3yCyykQruGIgb3WntP+LVbBFc7jJp0VLhD7Bo8wBN6n
+tGO0/7Gcrjyvd7ZWxbWroulpOj0OM3kyP3CCkplhbY0wCI9xP6ZIVxn4JdxLZlyl
+dI+Yrsj5wAYi56xz36Uu+1LcsRVlIPo1Zmne3yzxbrww2ywkEtvrNTVokMsAsJch
+PXQhI2U0K7t4WaPW4XY5mqRJjox0r26kmqPZm9I4XJuiGMx1I4S+6+JNM3GOGvDC
++Mcdoq0Dlyz4zyXG9rgkMbFjXZJ/Y/AlyVMuH79NAgMBAAGjgdIwgc8wHQYDVR0O
+BBYEFJWxtPCUtr3H2tERCSG+wa9J/RB7MAsGA1UdDwQEAwIBBjAPBgNVHRMBAf8E
+BTADAQH/MIGPBgNVHSMEgYcwgYSAFJWxtPCUtr3H2tERCSG+wa9J/RB7oWmkZzBl
+MQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFk
+ZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENB
+IFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBACxtZBsfzQ3duQH6lmM0MkhHma6X
+7f1yFqZzR1r0693p9db7RcwpiURdv0Y5PejuvE1Uhh4dbOMXJ0PhiVYrqW9yTkkz
+43J8KiOavD7/KCrto/8cI7pDVwlnTUtiBi34/2ydYB7YHEt9tTEv2dB8Xfjea4MY
+eDdXL+gzB2ffHsdrKpV2ro9Xo/D0UrSpUwjP4E/TelOL/bscVjby/rK25Xa71SJl
+pz/+0WatC7xrmYbvP33zGDLKe8bjq2RGlfgmadlVg3sslgf/WSxEo8bl6ancoWOA
+WiFeIc9TVPC6b4nbqKqVz4vjccweGyBECMB6tkD9xOQ14R0WHNC8K47Wcdk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
+# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
+# Label: "AddTrust External Root"
+# Serial: 1
+# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f
+# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68
+# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2
+-----BEGIN CERTIFICATE-----
+MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs
+IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290
+MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux
+FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h
+bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v
+dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt
+H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9
+uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX
+mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX
+a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN
+E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0
+WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD
+VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0
+Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU
+cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx
+IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN
+AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH
+YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5
+6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC
+Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX
+c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a
+mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Subject: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Label: "AddTrust Public Services Root"
+# Serial: 1
+# MD5 Fingerprint: c1:62:3e:23:c5:82:73:9c:03:59:4b:2b:e9:77:49:7f
+# SHA1 Fingerprint: 2a:b6:28:48:5e:78:fb:f3:ad:9e:79:10:dd:6b:df:99:72:2c:96:e5
+# SHA256 Fingerprint: 07:91:ca:07:49:b2:07:82:aa:d3:c7:d7:bd:0c:df:c9:48:58:35:84:3e:b2:d7:99:60:09:ce:43:ab:6c:69:27
+-----BEGIN CERTIFICATE-----
+MIIEFTCCAv2gAwIBAgIBATANBgkqhkiG9w0BAQUFADBkMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
+b3JrMSAwHgYDVQQDExdBZGRUcnVzdCBQdWJsaWMgQ0EgUm9vdDAeFw0wMDA1MzAx
+MDQxNTBaFw0yMDA1MzAxMDQxNTBaMGQxCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtB
+ZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIDAeBgNV
+BAMTF0FkZFRydXN0IFB1YmxpYyBDQSBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOC
+AQ8AMIIBCgKCAQEA6Rowj4OIFMEg2Dybjxt+A3S72mnTRqX4jsIMEZBRpS9mVEBV
+6tsfSlbunyNu9DnLoblv8n75XYcmYZ4c+OLspoH4IcUkzBEMP9smcnrHAZcHF/nX
+GCwwfQ56HmIexkvA/X1id9NEHif2P0tEs7c42TkfYNVRknMDtABp4/MUTu7R3AnP
+dzRGULD4EfL+OHn3Bzn+UZKXC1sIXzSGAa2Il+tmzV7R/9x98oTaunet3IAIx6eH
+1lWfl2royBFkuucZKT8Rs3iQhCBSWxHveNCD9tVIkNAwHM+A+WD+eeSI8t0A65RF
+62WUaUC6wNW0uLp9BBGo6zEFlpROWCGOn9Bg/QIDAQABo4HRMIHOMB0GA1UdDgQW
+BBSBPjfYkrAfd59ctKtzquf2NGAv+jALBgNVHQ8EBAMCAQYwDwYDVR0TAQH/BAUw
+AwEB/zCBjgYDVR0jBIGGMIGDgBSBPjfYkrAfd59ctKtzquf2NGAv+qFopGYwZDEL
+MAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQLExRBZGRU
+cnVzdCBUVFAgTmV0d29yazEgMB4GA1UEAxMXQWRkVHJ1c3QgUHVibGljIENBIFJv
+b3SCAQEwDQYJKoZIhvcNAQEFBQADggEBAAP3FUr4JNojVhaTdt02KLmuG7jD8WS6
+IBh4lSknVwW8fCr0uVFV2ocC3g8WFzH4qnkuCRO7r7IgGRLlk/lL+YPoRNWyQSW/
+iHVv/xD8SlTQX/D67zZzfRs2RcYhbbQVuE7PnFylPVoAjgbjPGsye/Kf8Lb93/Ao
+GEjwxrzQvzSAlsJKsW2Ox5BF3i9nrEUEo3rcVZLJR2bYGozH7ZxOmuASu7VqTITh
+4SINhwBk/ox9Yjllpu9CtoAlEmEBqCQTcAARJl/6NVDFSMwGR+gn2HCNX2TmoUQm
+XiLsks3/QppEIW1cxeMiHV9HEufOX1362KqxMy3ZdvJOOjMMK7MtkAY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Subject: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Label: "AddTrust Qualified Certificates Root"
+# Serial: 1
+# MD5 Fingerprint: 27:ec:39:47:cd:da:5a:af:e2:9a:01:65:21:a9:4c:bb
+# SHA1 Fingerprint: 4d:23:78:ec:91:95:39:b5:00:7f:75:8f:03:3b:21:1e:c5:4d:8b:cf
+# SHA256 Fingerprint: 80:95:21:08:05:db:4b:bc:35:5e:44:28:d8:fd:6e:c2:cd:e3:ab:5f:b9:7a:99:42:98:8e:b8:f4:dc:d0:60:16
+-----BEGIN CERTIFICATE-----
+MIIEHjCCAwagAwIBAgIBATANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
+b3JrMSMwIQYDVQQDExpBZGRUcnVzdCBRdWFsaWZpZWQgQ0EgUm9vdDAeFw0wMDA1
+MzAxMDQ0NTBaFw0yMDA1MzAxMDQ0NTBaMGcxCzAJBgNVBAYTAlNFMRQwEgYDVQQK
+EwtBZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIzAh
+BgNVBAMTGkFkZFRydXN0IFF1YWxpZmllZCBDQSBSb290MIIBIjANBgkqhkiG9w0B
+AQEFAAOCAQ8AMIIBCgKCAQEA5B6a/twJWoekn0e+EV+vhDTbYjx5eLfpMLXsDBwq
+xBb/4Oxx64r1EW7tTw2R0hIYLUkVAcKkIhPHEWT/IhKauY5cLwjPcWqzZwFZ8V1G
+87B4pfYOQnrjfxvM0PC3KP0q6p6zsLkEqv32x7SxuCqg+1jxGaBvcCV+PmlKfw8i
+2O+tCBGaKZnhqkRFmhJePp1tUvznoD1oL/BLcHwTOK28FSXx1s6rosAx1i+f4P8U
+WfyEk9mHfExUE+uf0S0R+Bg6Ot4l2ffTQO2kBhLEO+GRwVY18BTcZTYJbqukB8c1
+0cIDMzZbdSZtQvESa0NvS3GU+jQd7RNuyoB/mC9suWXY6QIDAQABo4HUMIHRMB0G
+A1UdDgQWBBQ5lYtii1zJ1IC6WA+XPxUIQ8yYpzALBgNVHQ8EBAMCAQYwDwYDVR0T
+AQH/BAUwAwEB/zCBkQYDVR0jBIGJMIGGgBQ5lYtii1zJ1IC6WA+XPxUIQ8yYp6Fr
+pGkwZzELMAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQL
+ExRBZGRUcnVzdCBUVFAgTmV0d29yazEjMCEGA1UEAxMaQWRkVHJ1c3QgUXVhbGlm
+aWVkIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBABmrder4i2VhlRO6aQTv
+hsoToMeqT2QbPxj2qC0sVY8FtzDqQmodwCVRLae/DLPt7wh/bDxGGuoYQ992zPlm
+hpwsaPXpF/gxsxjE1kh9I0xowX67ARRvxdlu3rsEQmr49lx95dr6h+sNNVJn0J6X
+dgWTP5XHAeZpVTh/EGGZyeNfpso+gmNIquIISD6q8rKFYqa0p9m9N5xotS1WfbC3
+P6CxB9bpT9zeRXEwMn8bLgn5v1Kh7sKAPgZcLlVAwRv1cEWw3F369nJad9Jjzc9Y
+iQBCYz95OdBEsIJuQRno3eDBiFrRHnGTHyQwdOUeqN48Jzd/g66ed8/wMLH/S5no
+xqE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Label: "Entrust Root Certification Authority"
+# Serial: 1164660820
+# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4
+# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9
+# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c
+-----BEGIN CERTIFICATE-----
+MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0
+Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW
+KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl
+cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw
+NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw
+NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy
+ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV
+BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ
+KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo
+Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4
+4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9
+KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI
+rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi
+94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB
+sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi
+gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo
+kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE
+vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA
+A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t
+O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua
+AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP
+9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/
+eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m
+0vdXcDazv/wor3ElhVsT/h5/WrQ8
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc.
+# Subject: CN=GeoTrust Global CA O=GeoTrust Inc.
+# Label: "GeoTrust Global CA"
+# Serial: 144470
+# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5
+# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12
+# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a
+-----BEGIN CERTIFICATE-----
+MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT
+MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i
+YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG
+EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg
+R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9
+9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq
+fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv
+iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU
+1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+
+bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW
+MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA
+ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l
+uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn
+Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS
+tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF
+PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un
+hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV
+5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Global CA 2 O=GeoTrust Inc.
+# Subject: CN=GeoTrust Global CA 2 O=GeoTrust Inc.
+# Label: "GeoTrust Global CA 2"
+# Serial: 1
+# MD5 Fingerprint: 0e:40:a7:6c:de:03:5d:8f:d1:0f:e4:d1:8d:f9:6c:a9
+# SHA1 Fingerprint: a9:e9:78:08:14:37:58:88:f2:05:19:b0:6d:2b:0d:2b:60:16:90:7d
+# SHA256 Fingerprint: ca:2d:82:a0:86:77:07:2f:8a:b6:76:4f:f0:35:67:6c:fe:3e:5e:32:5e:01:21:72:df:3f:92:09:6d:b7:9b:85
+-----BEGIN CERTIFICATE-----
+MIIDZjCCAk6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBEMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3QgR2xvYmFs
+IENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMTkwMzA0MDUwMDAwWjBEMQswCQYDVQQG
+EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3Qg
+R2xvYmFsIENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDvPE1A
+PRDfO1MA4Wf+lGAVPoWI8YkNkMgoI5kF6CsgncbzYEbYwbLVjDHZ3CB5JIG/NTL8
+Y2nbsSpr7iFY8gjpeMtvy/wWUsiRxP89c96xPqfCfWbB9X5SJBri1WeR0IIQ13hL
+TytCOb1kLUCgsBDTOEhGiKEMuzozKmKY+wCdE1l/bztyqu6mD4b5BWHqZ38MN5aL
+5mkWRxHCJ1kDs6ZgwiFAVvqgx306E+PsV8ez1q6diYD3Aecs9pYrEw15LNnA5IZ7
+S4wMcoKK+xfNAGw6EzywhIdLFnopsk/bHdQL82Y3vdj2V7teJHq4PIu5+pIaGoSe
+2HSPqht/XvT+RSIhAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE
+FHE4NvICMVNHK266ZUapEBVYIAUJMB8GA1UdIwQYMBaAFHE4NvICMVNHK266ZUap
+EBVYIAUJMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQUFAAOCAQEAA/e1K6td
+EPx7srJerJsOflN4WT5CBP51o62sgU7XAotexC3IUnbHLB/8gTKY0UvGkpMzNTEv
+/NgdRN3ggX+d6YvhZJFiCzkIjKx0nVnZellSlxG5FntvRdOW2TF9AjYPnDtuzywN
+A0ZF66D0f0hExghAzN4bcLUprbqLOzRldRtxIR0sFAqwlpW41uryZfspuk/qkZN0
+abby/+Ea0AzRdoXLiiW9l14sbxWZJue2Kf8i7MkCx1YAzUm5s2x7UwQa4qjJqhIF
+I8LO57sEAszAR6LkxCkvW0VXiVHuPOtSCP8HNR6fNWpHSlaY0VqFH4z1Ir+rzoPz
+4iIprn2DQKi6bA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc.
+# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc.
+# Label: "GeoTrust Universal CA"
+# Serial: 1
+# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48
+# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79
+# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12
+-----BEGIN CERTIFICATE-----
+MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy
+c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE
+BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0
+IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV
+VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8
+cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT
+QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh
+F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v
+c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w
+mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd
+VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX
+teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ
+f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe
+Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+
+nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB
+/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY
+MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG
+9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc
+aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX
+IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn
+ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z
+uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN
+Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja
+QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW
+koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9
+ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt
+DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm
+bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
+# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
+# Label: "GeoTrust Universal CA 2"
+# Serial: 1
+# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7
+# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79
+# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b
+-----BEGIN CERTIFICATE-----
+MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy
+c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD
+VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1
+c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
+AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81
+WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG
+FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq
+XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL
+se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb
+KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd
+IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73
+y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt
+hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc
+QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4
+Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV
+HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ
+KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z
+dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ
+L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr
+Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo
+ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY
+T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz
+GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m
+1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV
+OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH
+6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX
+QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS
+-----END CERTIFICATE-----
+
+# Issuer: CN=America Online Root Certification Authority 1 O=America Online Inc.
+# Subject: CN=America Online Root Certification Authority 1 O=America Online Inc.
+# Label: "America Online Root Certification Authority 1"
+# Serial: 1
+# MD5 Fingerprint: 14:f1:08:ad:9d:fa:64:e2:89:e7:1c:cf:a8:ad:7d:5e
+# SHA1 Fingerprint: 39:21:c1:15:c1:5d:0e:ca:5c:cb:5b:c4:f0:7d:21:d8:05:0b:56:6a
+# SHA256 Fingerprint: 77:40:73:12:c6:3a:15:3d:5b:c0:0b:4e:51:75:9c:df:da:c2:37:dc:2a:33:b6:79:46:e9:8e:9b:fa:68:0a:e3
+-----BEGIN CERTIFICATE-----
+MIIDpDCCAoygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEc
+MBoGA1UEChMTQW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBP
+bmxpbmUgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAxMB4XDTAyMDUyODA2
+MDAwMFoXDTM3MTExOTIwNDMwMFowYzELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0Ft
+ZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2EgT25saW5lIFJvb3Qg
+Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMTCCASIwDQYJKoZIhvcNAQEBBQADggEP
+ADCCAQoCggEBAKgv6KRpBgNHw+kqmP8ZonCaxlCyfqXfaE0bfA+2l2h9LaaLl+lk
+hsmj76CGv2BlnEtUiMJIxUo5vxTjWVXlGbR0yLQFOVwWpeKVBeASrlmLojNoWBym
+1BW32J/X3HGrfpq/m44zDyL9Hy7nBzbvYjnF3cu6JRQj3gzGPTzOggjmZj7aUTsW
+OqMFf6Dch9Wc/HKpoH145LcxVR5lu9RhsCFg7RAycsWSJR74kEoYeEfffjA3PlAb
+2xzTa5qGUwew76wGePiEmf4hjUyAtgyC9mZweRrTT6PP8c9GsEsPPt2IYriMqQko
+O3rHl+Ee5fSfwMCuJKDIodkP1nsmgmkyPacCAwEAAaNjMGEwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUAK3Zo/Z59m50qX8zPYEX10zPM94wHwYDVR0jBBgwFoAU
+AK3Zo/Z59m50qX8zPYEX10zPM94wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB
+BQUAA4IBAQB8itEfGDeC4Liwo+1WlchiYZwFos3CYiZhzRAW18y0ZTTQEYqtqKkF
+Zu90821fnZmv9ov761KyBZiibyrFVL0lvV+uyIbqRizBs73B6UlwGBaXCBOMIOAb
+LjpHyx7kADCVW/RFo8AasAFOq73AI25jP4BKxQft3OJvx8Fi8eNy1gTIdGcL+oir
+oQHIb/AUr9KZzVGTfu0uOMe9zkZQPXLjeSWdm4grECDdpbgyn43gKd8hdIaC2y+C
+MMbHNYaz+ZZfRtsMRf3zUMNvxsNIrUam4SdHCh0Om7bCd39j8uB9Gr784N/Xx6ds
+sPmuujz9dLQR6FgNgLzTqIA6me11zEZ7
+-----END CERTIFICATE-----
+
+# Issuer: CN=America Online Root Certification Authority 2 O=America Online Inc.
+# Subject: CN=America Online Root Certification Authority 2 O=America Online Inc.
+# Label: "America Online Root Certification Authority 2"
+# Serial: 1
+# MD5 Fingerprint: d6:ed:3c:ca:e2:66:0f:af:10:43:0d:77:9b:04:09:bf
+# SHA1 Fingerprint: 85:b5:ff:67:9b:0c:79:96:1f:c8:6e:44:22:00:46:13:db:17:92:84
+# SHA256 Fingerprint: 7d:3b:46:5a:60:14:e5:26:c0:af:fc:ee:21:27:d2:31:17:27:ad:81:1c:26:84:2d:00:6a:f3:73:06:cc:80:bd
+-----BEGIN CERTIFICATE-----
+MIIFpDCCA4ygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEc
+MBoGA1UEChMTQW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBP
+bmxpbmUgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAyMB4XDTAyMDUyODA2
+MDAwMFoXDTM3MDkyOTE0MDgwMFowYzELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0Ft
+ZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2EgT25saW5lIFJvb3Qg
+Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIP
+ADCCAgoCggIBAMxBRR3pPU0Q9oyxQcngXssNt79Hc9PwVU3dxgz6sWYFas14tNwC
+206B89enfHG8dWOgXeMHDEjsJcQDIPT/DjsS/5uN4cbVG7RtIuOx238hZK+GvFci
+KtZHgVdEglZTvYYUAQv8f3SkWq7xuhG1m1hagLQ3eAkzfDJHA1zEpYNI9FdWboE2
+JxhP7JsowtS013wMPgwr38oE18aO6lhOqKSlGBxsRZijQdEt0sdtjRnxrXm3gT+9
+BoInLRBYBbV4Bbkv2wxrkJB+FFk4u5QkE+XRnRTf04JNRvCAOVIyD+OEsnpD8l7e
+Xz8d3eOyG6ChKiMDbi4BFYdcpnV1x5dhvt6G3NRI270qv0pV2uh9UPu0gBe4lL8B
+PeraunzgWGcXuVjgiIZGZ2ydEEdYMtA1fHkqkKJaEBEjNa0vzORKW6fIJ/KD3l67
+Xnfn6KVuY8INXWHQjNJsWiEOyiijzirplcdIz5ZvHZIlyMbGwcEMBawmxNJ10uEq
+Z8A9W6Wa6897GqidFEXlD6CaZd4vKL3Ob5Rmg0gp2OpljK+T2WSfVVcmv2/LNzGZ
+o2C7HK2JNDJiuEMhBnIMoVxtRsX6Kc8w3onccVvdtjc+31D1uAclJuW8tf48ArO3
++L5DwYcRlJ4jbBeKuIonDFRH8KmzwICMoCfrHRnjB453cMor9H124HhnAgMBAAGj
+YzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFE1FwWg4u3OpaaEg5+31IqEj
+FNeeMB8GA1UdIwQYMBaAFE1FwWg4u3OpaaEg5+31IqEjFNeeMA4GA1UdDwEB/wQE
+AwIBhjANBgkqhkiG9w0BAQUFAAOCAgEAZ2sGuV9FOypLM7PmG2tZTiLMubekJcmn
+xPBUlgtk87FYT15R/LKXeydlwuXK5w0MJXti4/qftIe3RUavg6WXSIylvfEWK5t2
+LHo1YGwRgJfMqZJS5ivmae2p+DYtLHe/YUjRYwu5W1LtGLBDQiKmsXeu3mnFzccc
+obGlHBD7GL4acN3Bkku+KVqdPzW+5X1R+FXgJXUjhx5c3LqdsKyzadsXg8n33gy8
+CNyRnqjQ1xU3c6U1uPx+xURABsPr+CKAXEfOAuMRn0T//ZoyzH1kUQ7rVyZ2OuMe
+IjzCpjbdGe+n/BLzJsBZMYVMnNjP36TMzCmT/5RtdlwTCJfy7aULTd3oyWgOZtMA
+DjMSW7yV5TKQqLPGbIOtd+6Lfn6xqavT4fG2wLHqiMDn05DpKJKUe2h7lyoKZy2F
+AjgQ5ANh1NolNscIWC2hp1GvMApJ9aZphwctREZ2jirlmjvXGKL8nDgQzMY70rUX
+Om/9riW99XJZZLF0KjhfGEzfz3EEWjbUvy+ZnOjZurGV5gJLIaFb1cFPj65pbVPb
+AZO1XB4Y3WRayhgoPmMEEf0cjQAPuDffZ4qdZqkCapH/E8ovXYO8h5Ns3CRRFgQl
+Zvqz2cK6Kb6aSDiCmfS/O0oxGfm/jiEzFMpPVF/7zvuPcX/9XhmgD0uRuMRUvAaw
+RY8mkaKO/qk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AAA Certificate Services O=Comodo CA Limited
+# Subject: CN=AAA Certificate Services O=Comodo CA Limited
+# Label: "Comodo AAA Services root"
+# Serial: 1
+# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0
+# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49
+# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4
+-----BEGIN CERTIFICATE-----
+MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb
+MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
+GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj
+YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM
+GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP
+ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua
+BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe
+3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4
+YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR
+rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm
+ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU
+oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF
+MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v
+QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t
+b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF
+AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q
+GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz
+Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2
+G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi
+l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3
+smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Secure Certificate Services O=Comodo CA Limited
+# Subject: CN=Secure Certificate Services O=Comodo CA Limited
+# Label: "Comodo Secure Services root"
+# Serial: 1
+# MD5 Fingerprint: d3:d9:bd:ae:9f:ac:67:24:b3:c8:1b:52:e1:b9:a9:bd
+# SHA1 Fingerprint: 4a:65:d5:f4:1d:ef:39:b8:b8:90:4a:4a:d3:64:81:33:cf:c7:a1:d1
+# SHA256 Fingerprint: bd:81:ce:3b:4f:65:91:d1:1a:67:b5:fc:7a:47:fd:ef:25:52:1b:f9:aa:4e:18:b9:e3:df:2e:34:a7:80:3b:e8
+-----BEGIN CERTIFICATE-----
+MIIEPzCCAyegAwIBAgIBATANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJHQjEb
+MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
+GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEkMCIGA1UEAwwbU2VjdXJlIENlcnRp
+ZmljYXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVow
+fjELMAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxJDAiBgNV
+BAMMG1NlY3VyZSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBAMBxM4KK0HDrc4eCQNUd5MvJDkKQ+d40uaG6EfQlhfPM
+cm3ye5drswfxdySRXyWP9nQ95IDC+DwN879A6vfIUtFyb+/Iq0G4bi4XKpVpDM3S
+HpR7LZQdqnXXs5jLrLxkU0C8j6ysNstcrbvd4JQX7NFc0L/vpZXJkMWwrPsbQ996
+CF23uPJAGysnnlDOXmWCiIxe004MeuoIkbY2qitC++rCoznl2yY4rYsK7hljxxwk
+3wN42ubqwUcaCwtGCd0C/N7Lh1/XMGNooa7cMqG6vv5Eq2i2pRcV/b3Vp6ea5EQz
+6YiO/O1R65NxTq0B50SOqy3LqP4BSUjwwN3HaNiS/j0CAwEAAaOBxzCBxDAdBgNV
+HQ4EFgQUPNiTiMLAggnMAZkGkyDpnnAJY08wDgYDVR0PAQH/BAQDAgEGMA8GA1Ud
+EwEB/wQFMAMBAf8wgYEGA1UdHwR6MHgwO6A5oDeGNWh0dHA6Ly9jcmwuY29tb2Rv
+Y2EuY29tL1NlY3VyZUNlcnRpZmljYXRlU2VydmljZXMuY3JsMDmgN6A1hjNodHRw
+Oi8vY3JsLmNvbW9kby5uZXQvU2VjdXJlQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmww
+DQYJKoZIhvcNAQEFBQADggEBAIcBbSMdflsXfcFhMs+P5/OKlFlm4J4oqF7Tt/Q0
+5qo5spcWxYJvMqTpjOev/e/C6LlLqqP05tqNZSH7uoDrJiiFGv45jN5bBAS0VPmj
+Z55B+glSzAVIqMk/IQQezkhr/IXownuvf7fM+F86/TXGDe+X3EyrEeFryzHRbPtI
+gKvcnDe4IRRLDXE97IMzbtFuMhbsmMcWi1mmNKsFVy2T96oTy9IT4rcuO81rUBcJ
+aD61JlfutuC23bkpgHl9j6PwpCikFcSF9CfUa7/lXORlAnZUtOM3ZiTTGWHIUhDl
+izeauan5Hb/qmZJhlv8BzaFfDbxxvA6sCx1HRR3B7Hzs/Sk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Trusted Certificate Services O=Comodo CA Limited
+# Subject: CN=Trusted Certificate Services O=Comodo CA Limited
+# Label: "Comodo Trusted Services root"
+# Serial: 1
+# MD5 Fingerprint: 91:1b:3f:6e:cd:9e:ab:ee:07:fe:1f:71:d2:b3:61:27
+# SHA1 Fingerprint: e1:9f:e3:0e:8b:84:60:9e:80:9b:17:0d:72:a8:c5:ba:6e:14:09:bd
+# SHA256 Fingerprint: 3f:06:e5:56:81:d4:96:f5:be:16:9e:b5:38:9f:9f:2b:8f:f6:1e:17:08:df:68:81:72:48:49:cd:5d:27:cb:69
+-----BEGIN CERTIFICATE-----
+MIIEQzCCAyugAwIBAgIBATANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJHQjEb
+MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
+GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDElMCMGA1UEAwwcVHJ1c3RlZCBDZXJ0
+aWZpY2F0ZSBTZXJ2aWNlczAeFw0wNDAxMDEwMDAwMDBaFw0yODEyMzEyMzU5NTla
+MH8xCzAJBgNVBAYTAkdCMRswGQYDVQQIDBJHcmVhdGVyIE1hbmNoZXN0ZXIxEDAO
+BgNVBAcMB1NhbGZvcmQxGjAYBgNVBAoMEUNvbW9kbyBDQSBMaW1pdGVkMSUwIwYD
+VQQDDBxUcnVzdGVkIENlcnRpZmljYXRlIFNlcnZpY2VzMIIBIjANBgkqhkiG9w0B
+AQEFAAOCAQ8AMIIBCgKCAQEA33FvNlhTWvI2VFeAxHQIIO0Yfyod5jWaHiWsnOWW
+fnJSoBVC21ndZHoa0Lh73TkVvFVIxO06AOoxEbrycXQaZ7jPM8yoMa+j49d/vzMt
+TGo87IvDktJTdyR0nAducPy9C1t2ul/y/9c3S0pgePfw+spwtOpZqqPOSC+pw7IL
+fhdyFgymBwwbOM/JYrc/oJOlh0Hyt3BAd9i+FHzjqMB6juljatEPmsbS9Is6FARW
+1O24zG71++IsWL1/T2sr92AkWCTOJu80kTrV44HQsvAEAtdbtz6SrGsSivnkBbA7
+kUlcsutT6vifR4buv5XAwAaf0lteERv0xwQ1KdJVXOTt6wIDAQABo4HJMIHGMB0G
+A1UdDgQWBBTFe1i97doladL3WRaoszLAeydb9DAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zCBgwYDVR0fBHwwejA8oDqgOIY2aHR0cDovL2NybC5jb21v
+ZG9jYS5jb20vVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMuY3JsMDqgOKA2hjRo
+dHRwOi8vY3JsLmNvbW9kby5uZXQvVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMu
+Y3JsMA0GCSqGSIb3DQEBBQUAA4IBAQDIk4E7ibSvuIQSTI3S8NtwuleGFTQQuS9/
+HrCoiWChisJ3DFBKmwCL2Iv0QeLQg4pKHBQGsKNoBXAxMKdTmw7pSqBYaWcOrp32
+pSxBvzwGa+RZzG0Q8ZZvH9/0BAKkn0U+yNj6NkZEUD+Cl5EfKNsYEYwq5GWDVxIS
+jBc/lDb+XbDABHcTuPQV1T84zJQ6VdCsmPW6AF/ghhmBeC8owH7TzEIK9a5QoNE+
+xqFx7D+gIIxmOom0jtTYsU0lR+4viMi14QVFwL4Ucd56/Y57fU0IlqUSc/Atyjcn
+dBInTMu2l+nZrghtWjlA3QVHdWpaIbOjGM9O9y5Xt5hwXsjEeLBi
+-----END CERTIFICATE-----
+
+# Issuer: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com
+# Subject: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com
+# Label: "UTN DATACorp SGC Root CA"
+# Serial: 91374294542884689855167577680241077609
+# MD5 Fingerprint: b3:a5:3e:77:21:6d:ac:4a:c0:c9:fb:d5:41:3d:ca:06
+# SHA1 Fingerprint: 58:11:9f:0e:12:82:87:ea:50:fd:d9:87:45:6f:4f:78:dc:fa:d6:d4
+# SHA256 Fingerprint: 85:fb:2f:91:dd:12:27:5a:01:45:b6:36:53:4f:84:02:4a:d6:8b:69:b8:ee:88:68:4f:f7:11:37:58:05:b3:48
+-----BEGIN CERTIFICATE-----
+MIIEXjCCA0agAwIBAgIQRL4Mi1AAIbQR0ypoBqmtaTANBgkqhkiG9w0BAQUFADCB
+kzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
+Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho
+dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZBgNVBAMTElVUTiAtIERBVEFDb3Jw
+IFNHQzAeFw05OTA2MjQxODU3MjFaFw0xOTA2MjQxOTA2MzBaMIGTMQswCQYDVQQG
+EwJVUzELMAkGA1UECBMCVVQxFzAVBgNVBAcTDlNhbHQgTGFrZSBDaXR5MR4wHAYD
+VQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxITAfBgNVBAsTGGh0dHA6Ly93d3cu
+dXNlcnRydXN0LmNvbTEbMBkGA1UEAxMSVVROIC0gREFUQUNvcnAgU0dDMIIBIjAN
+BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3+5YEKIrblXEjr8uRgnn4AgPLit6
+E5Qbvfa2gI5lBZMAHryv4g+OGQ0SR+ysraP6LnD43m77VkIVni5c7yPeIbkFdicZ
+D0/Ww5y0vpQZY/KmEQrrU0icvvIpOxboGqBMpsn0GFlowHDyUwDAXlCCpVZvNvlK
+4ESGoE1O1kduSUrLZ9emxAW5jh70/P/N5zbgnAVssjMiFdC04MwXwLLA9P4yPykq
+lXvY8qdOD1R8oQ2AswkDwf9c3V6aPryuvEeKaq5xyh+xKrhfQgUL7EYw0XILyulW
+bfXv33i+Ybqypa4ETLyorGkVl73v67SMvzX41MPRKA5cOp9wGDMgd8SirwIDAQAB
+o4GrMIGoMAsGA1UdDwQEAwIBxjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRT
+MtGzz3/64PGgXYVOktKeRR20TzA9BgNVHR8ENjA0MDKgMKAuhixodHRwOi8vY3Js
+LnVzZXJ0cnVzdC5jb20vVVROLURBVEFDb3JwU0dDLmNybDAqBgNVHSUEIzAhBggr
+BgEFBQcDAQYKKwYBBAGCNwoDAwYJYIZIAYb4QgQBMA0GCSqGSIb3DQEBBQUAA4IB
+AQAnNZcAiosovcYzMB4p/OL31ZjUQLtgyr+rFywJNn9Q+kHcrpY6CiM+iVnJowft
+Gzet/Hy+UUla3joKVAgWRcKZsYfNjGjgaQPpxE6YsjuMFrMOoAyYUJuTqXAJyCyj
+j98C5OBxOvG0I3KgqgHf35g+FFCgMSa9KOlaMCZ1+XtgHI3zzVAmbQQnmt/VDUVH
+KWss5nbZqSl9Mt3JNjy9rjXxEZ4du5A/EkdOjtd+D2JzHVImOBwYSf0wdJrE5SIv
+2MCN7ZF6TACPcn9d2t0bi0Vr591pl6jFVkwPDPafepE39peC4N1xaf92P2BNPM/3
+mfnGV/TJVTl4uix5yaaIK/QI
+-----END CERTIFICATE-----
+
+# Issuer: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com
+# Subject: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com
+# Label: "UTN USERFirst Hardware Root CA"
+# Serial: 91374294542884704022267039221184531197
+# MD5 Fingerprint: 4c:56:41:e5:0d:bb:2b:e8:ca:a3:ed:18:08:ad:43:39
+# SHA1 Fingerprint: 04:83:ed:33:99:ac:36:08:05:87:22:ed:bc:5e:46:00:e3:be:f9:d7
+# SHA256 Fingerprint: 6e:a5:47:41:d0:04:66:7e:ed:1b:48:16:63:4a:a3:a7:9e:6e:4b:96:95:0f:82:79:da:fc:8d:9b:d8:81:21:37
+-----BEGIN CERTIFICATE-----
+MIIEdDCCA1ygAwIBAgIQRL4Mi1AAJLQR0zYq/mUK/TANBgkqhkiG9w0BAQUFADCB
+lzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
+Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho
+dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3Qt
+SGFyZHdhcmUwHhcNOTkwNzA5MTgxMDQyWhcNMTkwNzA5MTgxOTIyWjCBlzELMAkG
+A1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEe
+MBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8v
+d3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdh
+cmUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCx98M4P7Sof885glFn
+0G2f0v9Y8+efK+wNiVSZuTiZFvfgIXlIwrthdBKWHTxqctU8EGc6Oe0rE81m65UJ
+M6Rsl7HoxuzBdXmcRl6Nq9Bq/bkqVRcQVLMZ8Jr28bFdtqdt++BxF2uiiPsA3/4a
+MXcMmgF6sTLjKwEHOG7DpV4jvEWbe1DByTCP2+UretNb+zNAHqDVmBe8i4fDidNd
+oI6yqqr2jmmIBsX6iSHzCJ1pLgkzmykNRg+MzEk0sGlRvfkGzWitZky8PqxhvQqI
+DsjfPe58BEydCl5rkdbux+0ojatNh4lz0G6k0B4WixThdkQDf2Os5M1JnMWS9Ksy
+oUhbAgMBAAGjgbkwgbYwCwYDVR0PBAQDAgHGMA8GA1UdEwEB/wQFMAMBAf8wHQYD
+VR0OBBYEFKFyXyYbKJhDlV0HN9WFlp1L0sNFMEQGA1UdHwQ9MDswOaA3oDWGM2h0
+dHA6Ly9jcmwudXNlcnRydXN0LmNvbS9VVE4tVVNFUkZpcnN0LUhhcmR3YXJlLmNy
+bDAxBgNVHSUEKjAoBggrBgEFBQcDAQYIKwYBBQUHAwUGCCsGAQUFBwMGBggrBgEF
+BQcDBzANBgkqhkiG9w0BAQUFAAOCAQEARxkP3nTGmZev/K0oXnWO6y1n7k57K9cM
+//bey1WiCuFMVGWTYGufEpytXoMs61quwOQt9ABjHbjAbPLPSbtNk28Gpgoiskli
+CE7/yMgUsogWXecB5BKV5UU0s4tpvc+0hY91UZ59Ojg6FEgSxvunOxqNDYJAB+gE
+CJChicsZUN/KHAG8HQQZexB2lzvukJDKxA4fFm517zP4029bHpbj4HR3dHuKom4t
+3XbWOTCC8KucUvIqx69JXn7HaOWCgchqJ/kniCrVWFCVH/A7HFe7fRQ5YiuayZSS
+KqMiDP+JJn1fIytH1xUdqWqeUQ0qUZ6B+dQ7XnASfxAynB67nfhmqA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Label: "XRamp Global CA Root"
+# Serial: 107108908803651509692980124233745014957
+# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1
+# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6
+# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2
+-----BEGIN CERTIFICATE-----
+MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB
+gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk
+MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY
+UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx
+NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3
+dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy
+dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB
+dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6
+38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP
+KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q
+DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4
+qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa
+JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi
+PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P
+BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs
+jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0
+eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD
+ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR
+vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt
+qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa
+IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy
+i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ
+O+7ETPTsJ3xCwnR8gooJybQDJbw=
+-----END CERTIFICATE-----
+
+# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Label: "Go Daddy Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67
+# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4
+# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4
+-----BEGIN CERTIFICATE-----
+MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh
+MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE
+YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3
+MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo
+ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg
+MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN
+ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA
+PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w
+wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi
+EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY
+avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+
+YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE
+sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h
+/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5
+IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
+ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy
+OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P
+TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ
+HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER
+dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf
+ReYNnyicsbkqWletNw+vHX/bvZ8=
+-----END CERTIFICATE-----
+
+# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Label: "Starfield Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24
+# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a
+# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58
+-----BEGIN CERTIFICATE-----
+MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl
+MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp
+U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw
+NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE
+ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp
+ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3
+DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf
+8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN
++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0
+X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa
+K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA
+1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G
+A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR
+zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0
+YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD
+bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w
+DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3
+L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D
+eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl
+xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp
+VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY
+WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q=
+-----END CERTIFICATE-----
+
+# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
+# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
+# Label: "StartCom Certification Authority"
+# Serial: 1
+# MD5 Fingerprint: 22:4d:8f:8a:fc:f7:35:c2:bb:57:34:90:7b:8b:22:16
+# SHA1 Fingerprint: 3e:2b:f7:f2:03:1b:96:f3:8c:e6:c4:d8:a8:5d:3e:2d:58:47:6a:0f
+# SHA256 Fingerprint: c7:66:a9:be:f2:d4:07:1c:86:3a:31:aa:49:20:e8:13:b2:d1:98:60:8c:b7:b7:cf:e2:11:43:b8:36:df:09:ea
+-----BEGIN CERTIFICATE-----
+MIIHyTCCBbGgAwIBAgIBATANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJJTDEW
+MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg
+Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh
+dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM2WhcNMzYwOTE3MTk0NjM2WjB9
+MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi
+U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh
+cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA
+A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk
+pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf
+OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C
+Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT
+Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi
+HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM
+Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w
++2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+
+Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3
+Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B
+26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID
+AQABo4ICUjCCAk4wDAYDVR0TBAUwAwEB/zALBgNVHQ8EBAMCAa4wHQYDVR0OBBYE
+FE4L7xqkQFulF2mHMMo0aEPQQa7yMGQGA1UdHwRdMFswLKAqoCiGJmh0dHA6Ly9j
+ZXJ0LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMCugKaAnhiVodHRwOi8vY3Js
+LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMIIBXQYDVR0gBIIBVDCCAVAwggFM
+BgsrBgEEAYG1NwEBATCCATswLwYIKwYBBQUHAgEWI2h0dHA6Ly9jZXJ0LnN0YXJ0
+Y29tLm9yZy9wb2xpY3kucGRmMDUGCCsGAQUFBwIBFilodHRwOi8vY2VydC5zdGFy
+dGNvbS5vcmcvaW50ZXJtZWRpYXRlLnBkZjCB0AYIKwYBBQUHAgIwgcMwJxYgU3Rh
+cnQgQ29tbWVyY2lhbCAoU3RhcnRDb20pIEx0ZC4wAwIBARqBl0xpbWl0ZWQgTGlh
+YmlsaXR5LCByZWFkIHRoZSBzZWN0aW9uICpMZWdhbCBMaW1pdGF0aW9ucyogb2Yg
+dGhlIFN0YXJ0Q29tIENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFBvbGljeSBhdmFp
+bGFibGUgYXQgaHR0cDovL2NlcnQuc3RhcnRjb20ub3JnL3BvbGljeS5wZGYwEQYJ
+YIZIAYb4QgEBBAQDAgAHMDgGCWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNT
+TCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTANBgkqhkiG9w0BAQUFAAOCAgEAFmyZ
+9GYMNPXQhV59CuzaEE44HF7fpiUFS5Eyweg78T3dRAlbB0mKKctmArexmvclmAk8
+jhvh3TaHK0u7aNM5Zj2gJsfyOZEdUauCe37Vzlrk4gNXcGmXCPleWKYK34wGmkUW
+FjgKXlf2Ysd6AgXmvB618p70qSmD+LIU424oh0TDkBreOKk8rENNZEXO3SipXPJz
+ewT4F+irsfMuXGRuczE6Eri8sxHkfY+BUZo7jYn0TZNmezwD7dOaHZrzZVD1oNB1
+ny+v8OqCQ5j4aZyJecRDjkZy42Q2Eq/3JR44iZB3fsNrarnDy0RLrHiQi+fHLB5L
+EUTINFInzQpdn4XBidUaePKVEFMy3YCEZnXZtWgo+2EuvoSoOMCZEoalHmdkrQYu
+L6lwhceWD3yJZfWOQ1QOq92lgDmUYMA0yZZwLKMS9R9Ie70cfmu3nZD0Ijuu+Pwq
+yvqCUqDvr0tVk+vBtfAii6w0TiYiBKGHLHVKt+V9E9e4DGTANtLJL4YSjCMJwRuC
+O3NJo2pXh5Tl1njFmUNj403gdy3hZZlyaQQaRwnmDwFWJPsfvw55qVguucQJAX6V
+um0ABj6y6koQOdjQK/W/7HW/lwLFCRsI3FU34oH7N4RDYiDK51ZLZer+bMEkkySh
+NOsF/5oirpt9P/FlUQqmMGqz9IgcgA38corog14=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root CA"
+# Serial: 17154717934120587862167794914071425081
+# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72
+# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43
+# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c
+-----BEGIN CERTIFICATE-----
+MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c
+JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP
+mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+
+wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4
+VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/
+AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB
+AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
+BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun
+pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC
+dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf
+fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm
+NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx
+H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe
++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root CA"
+# Serial: 10944719598952040374951832963794454346
+# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e
+# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36
+# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61
+-----BEGIN CERTIFICATE-----
+MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD
+QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB
+CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97
+nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt
+43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P
+T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4
+gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO
+BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR
+TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw
+DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr
+hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg
+06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF
+PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls
+YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk
+CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert High Assurance EV Root CA"
+# Serial: 3553400076410547919724730734378100087
+# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a
+# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25
+# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
+ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
+LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
+RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm
++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW
+PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM
+xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB
+Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3
+hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg
+EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA
+FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec
+nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z
+eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF
+hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2
+Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe
+vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep
++OkuE6N36B9K
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
+# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
+# Label: "GeoTrust Primary Certification Authority"
+# Serial: 32798226551256963324313806436981982369
+# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf
+# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96
+# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c
+-----BEGIN CERTIFICATE-----
+MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY
+MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo
+R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx
+MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK
+Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9
+AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA
+ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0
+7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W
+kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI
+mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ
+KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1
+6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl
+4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K
+oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj
+UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU
+AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA"
+# Serial: 69529181992039203566298953787712940909
+# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12
+# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81
+# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f
+-----BEGIN CERTIFICATE-----
+MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB
+qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV
+BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw
+NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j
+LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG
+A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl
+IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs
+W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta
+3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk
+6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6
+Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J
+NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA
+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP
+r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU
+DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz
+YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX
+xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2
+/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/
+LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7
+jVaMaA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Class 3 Public Primary Certification Authority - G5"
+# Serial: 33037644167568058970164719475676101450
+# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c
+# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5
+# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df
+-----BEGIN CERTIFICATE-----
+MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB
+yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
+ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp
+U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW
+ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL
+MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
+ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln
+biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
+U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1
+nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex
+t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz
+SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG
+BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+
+rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/
+NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E
+BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH
+BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy
+aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv
+MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE
+p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y
+5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK
+WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ
+4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N
+hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO Certification Authority O=COMODO CA Limited
+# Label: "COMODO Certification Authority"
+# Serial: 104350513648249232941998508985834464573
+# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75
+# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b
+# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66
+-----BEGIN CERTIFICATE-----
+MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB
+gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV
+BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw
+MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl
+YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P
+RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3
+UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI
+2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8
+Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp
++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+
+DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O
+nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW
+/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g
+PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u
+QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY
+SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv
+IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/
+RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4
+zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd
+BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB
+ZQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
+# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
+# Label: "Network Solutions Certificate Authority"
+# Serial: 116697915152937497490437556386812487904
+# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e
+# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce
+# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c
+-----BEGIN CERTIFICATE-----
+MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi
+MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu
+MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp
+dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV
+UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO
+ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz
+c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP
+OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl
+mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF
+BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4
+qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw
+gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB
+BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu
+bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp
+dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8
+6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/
+h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH
+/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv
+wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN
+pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Label: "COMODO ECC Certification Authority"
+# Serial: 41578283867086692638256921589707938090
+# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23
+# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11
+# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7
+-----BEGIN CERTIFICATE-----
+MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT
+IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw
+MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy
+ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N
+T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv
+biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR
+FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J
+cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW
+BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm
+fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv
+GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA
+# Subject: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA
+# Label: "TC TrustCenter Class 2 CA II"
+# Serial: 941389028203453866782103406992443
+# MD5 Fingerprint: ce:78:33:5c:59:78:01:6e:18:ea:b9:36:a0:b9:2e:23
+# SHA1 Fingerprint: ae:50:83:ed:7c:f4:5c:bc:8f:61:c6:21:fe:68:5d:79:42:21:15:6e
+# SHA256 Fingerprint: e6:b8:f8:76:64:85:f8:07:ae:7f:8d:ac:16:70:46:1f:07:c0:a1:3e:ef:3a:1f:f7:17:53:8d:7a:ba:d3:91:b4
+-----BEGIN CERTIFICATE-----
+MIIEqjCCA5KgAwIBAgIOLmoAAQACH9dSISwRXDswDQYJKoZIhvcNAQEFBQAwdjEL
+MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV
+BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDIgQ0ExJTAjBgNVBAMTHFRDIFRydXN0
+Q2VudGVyIENsYXNzIDIgQ0EgSUkwHhcNMDYwMTEyMTQzODQzWhcNMjUxMjMxMjI1
+OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i
+SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQTElMCMGA1UEAxMc
+VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD
+ggEPADCCAQoCggEBAKuAh5uO8MN8h9foJIIRszzdQ2Lu+MNF2ujhoF/RKrLqk2jf
+tMjWQ+nEdVl//OEd+DFwIxuInie5e/060smp6RQvkL4DUsFJzfb95AhmC1eKokKg
+uNV/aVyQMrKXDcpK3EY+AlWJU+MaWss2xgdW94zPEfRMuzBwBJWl9jmM/XOBCH2J
+XjIeIqkiRUuwZi4wzJ9l/fzLganx4Duvo4bRierERXlQXa7pIXSSTYtZgo+U4+lK
+8edJsBTj9WLL1XK9H7nSn6DNqPoByNkN39r8R52zyFTfSUrxIan+GE7uSNQZu+99
+5OKdy1u2bv/jzVrndIIFuoAlOMvkaZ6vQaoahPUCAwEAAaOCATQwggEwMA8GA1Ud
+EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTjq1RMgKHbVkO3
+kUrL84J6E1wIqzCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy
+dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18yX2NhX0lJLmNybIaBn2xkYXA6
+Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz
+JTIwMiUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290
+Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u
+TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEAjNfffu4bgBCzg/XbEeprS6iS
+GNn3Bzn1LL4GdXpoUxUc6krtXvwjshOg0wn/9vYua0Fxec3ibf2uWWuFHbhOIprt
+ZjluS5TmVfwLG4t3wVMTZonZKNaL80VKY7f9ewthXbhtvsPcW3nS7Yblok2+XnR8
+au0WOB9/WIFaGusyiC2y8zl3gK9etmF1KdsjTYjKUCjLhdLTEKJZbtOTVAB6okaV
+hgWcqRmY5TFyDADiZ9lA4CQze28suVyrZZ0srHbqNZn1l7kPJOzHdiEoZa5X6AeI
+dUpWoNIFOqTmjZKILPPy4cHGYdtBxceb9w4aUUXCYWvcZCcXjFq32nQozZfkvQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=TC TrustCenter Class 3 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 3 CA
+# Subject: CN=TC TrustCenter Class 3 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 3 CA
+# Label: "TC TrustCenter Class 3 CA II"
+# Serial: 1506523511417715638772220530020799
+# MD5 Fingerprint: 56:5f:aa:80:61:12:17:f6:67:21:e6:2b:6d:61:56:8e
+# SHA1 Fingerprint: 80:25:ef:f4:6e:70:c8:d4:72:24:65:84:fe:40:3b:8a:8d:6a:db:f5
+# SHA256 Fingerprint: 8d:a0:84:fc:f9:9c:e0:77:22:f8:9b:32:05:93:98:06:fa:5c:b8:11:e1:c8:13:f6:a1:08:c7:d3:36:b3:40:8e
+-----BEGIN CERTIFICATE-----
+MIIEqjCCA5KgAwIBAgIOSkcAAQAC5aBd1j8AUb8wDQYJKoZIhvcNAQEFBQAwdjEL
+MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV
+BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDMgQ0ExJTAjBgNVBAMTHFRDIFRydXN0
+Q2VudGVyIENsYXNzIDMgQ0EgSUkwHhcNMDYwMTEyMTQ0MTU3WhcNMjUxMjMxMjI1
+OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i
+SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQTElMCMGA1UEAxMc
+VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD
+ggEPADCCAQoCggEBALTgu1G7OVyLBMVMeRwjhjEQY0NVJz/GRcekPewJDRoeIMJW
+Ht4bNwcwIi9v8Qbxq63WyKthoy9DxLCyLfzDlml7forkzMA5EpBCYMnMNWju2l+Q
+Vl/NHE1bWEnrDgFPZPosPIlY2C8u4rBo6SI7dYnWRBpl8huXJh0obazovVkdKyT2
+1oQDZogkAHhg8fir/gKya/si+zXmFtGt9i4S5Po1auUZuV3bOx4a+9P/FRQI2Alq
+ukWdFHlgfa9Aigdzs5OW03Q0jTo3Kd5c7PXuLjHCINy+8U9/I1LZW+Jk2ZyqBwi1
+Rb3R0DHBq1SfqdLDYmAD8bs5SpJKPQq5ncWg/jcCAwEAAaOCATQwggEwMA8GA1Ud
+EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTUovyfs8PYA9NX
+XAek0CSnwPIA1DCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy
+dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18zX2NhX0lJLmNybIaBn2xkYXA6
+Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz
+JTIwMyUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290
+Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u
+TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEANmDkcPcGIEPZIxpC8vijsrlN
+irTzwppVMXzEO2eatN9NDoqTSheLG43KieHPOh6sHfGcMrSOWXaiQYUlN6AT0PV8
+TtXqluJucsG7Kv5sbviRmEb8yRtXW+rIGjs/sFGYPAfaLFkB2otE6OF0/ado3VS6
+g0bsyEa1+K+XwDsJHI/OcpY9M1ZwvJbL2NV9IJqDnxrcOfHFcqMRA/07QlIp2+gB
+95tejNaNhk4Z+rwcvsUhpYeeeC422wlxo3I0+GzjBgnyXlal092Y+tTmBvTwtiBj
+S+opvaqCZh77gaqnN60TGOaSw4HBM7uIHqHn4rS9MWwOUT1v+5ZWgOI2F9Hc5A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA
+# Subject: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA
+# Label: "TC TrustCenter Universal CA I"
+# Serial: 601024842042189035295619584734726
+# MD5 Fingerprint: 45:e1:a5:72:c5:a9:36:64:40:9e:f5:e4:58:84:67:8c
+# SHA1 Fingerprint: 6b:2f:34:ad:89:58:be:62:fd:b0:6b:5c:ce:bb:9d:d9:4f:4e:39:f3
+# SHA256 Fingerprint: eb:f3:c0:2a:87:89:b1:fb:7d:51:19:95:d6:63:b7:29:06:d9:13:ce:0d:5e:10:56:8a:8a:77:e2:58:61:67:e7
+-----BEGIN CERTIFICATE-----
+MIID3TCCAsWgAwIBAgIOHaIAAQAC7LdggHiNtgYwDQYJKoZIhvcNAQEFBQAweTEL
+MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV
+BAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEmMCQGA1UEAxMdVEMgVHJ1
+c3RDZW50ZXIgVW5pdmVyc2FsIENBIEkwHhcNMDYwMzIyMTU1NDI4WhcNMjUxMjMx
+MjI1OTU5WjB5MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIg
+R21iSDEkMCIGA1UECxMbVEMgVHJ1c3RDZW50ZXIgVW5pdmVyc2FsIENBMSYwJAYD
+VQQDEx1UQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0EgSTCCASIwDQYJKoZIhvcN
+AQEBBQADggEPADCCAQoCggEBAKR3I5ZEr5D0MacQ9CaHnPM42Q9e3s9B6DGtxnSR
+JJZ4Hgmgm5qVSkr1YnwCqMqs+1oEdjneX/H5s7/zA1hV0qq34wQi0fiU2iIIAI3T
+fCZdzHd55yx4Oagmcw6iXSVphU9VDprvxrlE4Vc93x9UIuVvZaozhDrzznq+VZeu
+jRIPFDPiUHDDSYcTvFHe15gSWu86gzOSBnWLknwSaHtwag+1m7Z3W0hZneTvWq3z
+wZ7U10VOylY0Ibw+F1tvdwxIAUMpsN0/lm7mlaoMwCC2/T42J5zjXM9OgdwZu5GQ
+fezmlwQek8wiSdeXhrYTCjxDI3d+8NzmzSQfO4ObNDqDNOMCAwEAAaNjMGEwHwYD
+VR0jBBgwFoAUkqR1LKSevoFE63n8isWVpesQdXMwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFJKkdSyknr6BROt5/IrFlaXrEHVzMA0G
+CSqGSIb3DQEBBQUAA4IBAQAo0uCG1eb4e/CX3CJrO5UUVg8RMKWaTzqwOuAGy2X1
+7caXJ/4l8lfmXpWMPmRgFVp/Lw0BxbFg/UU1z/CyvwbZ71q+s2IhtNerNXxTPqYn
+8aEt2hojnczd7Dwtnic0XQ/CNnm8yUpiLe1r2X1BQ3y2qsrtYbE3ghUJGooWMNjs
+ydZHcnhLEEYUjl8Or+zHL6sQ17bxbuyGssLoDZJz3KL0Dzq/YSMQiZxIQG5wALPT
+ujdEWBF6AmqI8Dc08BnprNRlc/ZpjGSUOnmFKbAWKwyCPwacx/0QK54PLLae4xW/
+2TYcuiUaUj0a7CIMHOCkoj3w6DnPgcB77V0fb8XQC9eY
+-----END CERTIFICATE-----
+
+# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc
+# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc
+# Label: "Cybertrust Global Root"
+# Serial: 4835703278459682877484360
+# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1
+# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6
+# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3
+-----BEGIN CERTIFICATE-----
+MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG
+A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh
+bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE
+ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS
+b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5
+7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS
+J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y
+HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP
+t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz
+FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY
+XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/
+MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw
+hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js
+MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA
+A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj
+Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx
+XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o
+omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc
+A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW
+WL1WMRJOEcgh4LMRkWXbtKaIOM5V
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
+# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
+# Label: "GeoTrust Primary Certification Authority - G3"
+# Serial: 28809105769928564313984085209975885599
+# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05
+# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd
+# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4
+-----BEGIN CERTIFICATE-----
+MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB
+mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT
+MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s
+eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv
+cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ
+BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg
+MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0
+BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz
++uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm
+hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn
+5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W
+JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL
+DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC
+huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw
+HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB
+AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB
+zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN
+kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD
+AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH
+SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G
+spki4cErx5z481+oghLrGREt
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA - G2"
+# Serial: 71758320672825410020661621085256472406
+# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f
+# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12
+# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57
+-----BEGIN CERTIFICATE-----
+MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp
+IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi
+BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw
+MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh
+d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig
+YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v
+dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/
+BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6
+papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E
+BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K
+DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3
+KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox
+XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA - G3"
+# Serial: 127614157056681299805556476275995414779
+# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31
+# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2
+# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c
+-----BEGIN CERTIFICATE-----
+MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB
+rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV
+BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa
+Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl
+LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u
+MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl
+ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm
+gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8
+YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf
+b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9
+9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S
+zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk
+OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV
+HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA
+2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW
+oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu
+t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c
+KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM
+m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu
+MdRAGmI0Nj81Aa6sY6A=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
+# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
+# Label: "GeoTrust Primary Certification Authority - G2"
+# Serial: 80682863203381065782177908751794619243
+# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a
+# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0
+# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66
+-----BEGIN CERTIFICATE-----
+MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL
+MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj
+KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2
+MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0
+eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV
+BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw
+NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV
+BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH
+MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL
+So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal
+tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG
+CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT
+qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz
+rD6ogRLQy7rQkgu2npaqBA+K
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Universal Root Certification Authority"
+# Serial: 85209574734084581917763752644031726877
+# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19
+# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54
+# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c
+-----BEGIN CERTIFICATE-----
+MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB
+vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
+ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp
+U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W
+ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
+Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX
+MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0
+IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y
+IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh
+bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF
+AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF
+9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH
+H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H
+LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN
+/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT
+rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud
+EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw
+WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs
+exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud
+DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4
+sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+
+seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz
+4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+
+BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR
+lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3
+7M2CYfE45k+XmCpajQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Class 3 Public Primary Certification Authority - G4"
+# Serial: 63143484348153506665311985501458640051
+# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41
+# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a
+# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79
+-----BEGIN CERTIFICATE-----
+MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL
+MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
+ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln
+biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
+U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG
+A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp
+U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg
+SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln
+biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5
+IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm
+GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve
+fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw
+AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ
+aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj
+aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW
+kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC
+4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga
+FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Label: "GlobalSign Root CA - R3"
+# Serial: 4835703278459759426209954
+# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28
+# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad
+# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b
+-----BEGIN CERTIFICATE-----
+MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4
+MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8
+RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT
+gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm
+KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd
+QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ
+XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw
+DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o
+LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU
+RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp
+jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK
+6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX
+mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs
+Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH
+WD9f
+-----END CERTIFICATE-----
+
+# Issuer: CN=TC TrustCenter Universal CA III O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA
+# Subject: CN=TC TrustCenter Universal CA III O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA
+# Label: "TC TrustCenter Universal CA III"
+# Serial: 2010889993983507346460533407902964
+# MD5 Fingerprint: 9f:dd:db:ab:ff:8e:ff:45:21:5f:f0:6c:9d:8f:fe:2b
+# SHA1 Fingerprint: 96:56:cd:7b:57:96:98:95:d0:e1:41:46:68:06:fb:b8:c6:11:06:87
+# SHA256 Fingerprint: 30:9b:4a:87:f6:ca:56:c9:31:69:aa:a9:9c:6d:98:88:54:d7:89:2b:d5:43:7e:2d:07:b2:9c:be:da:55:d3:5d
+-----BEGIN CERTIFICATE-----
+MIID4TCCAsmgAwIBAgIOYyUAAQACFI0zFQLkbPQwDQYJKoZIhvcNAQEFBQAwezEL
+MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV
+BAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEoMCYGA1UEAxMfVEMgVHJ1
+c3RDZW50ZXIgVW5pdmVyc2FsIENBIElJSTAeFw0wOTA5MDkwODE1MjdaFw0yOTEy
+MzEyMzU5NTlaMHsxCzAJBgNVBAYTAkRFMRwwGgYDVQQKExNUQyBUcnVzdENlbnRl
+ciBHbWJIMSQwIgYDVQQLExtUQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0ExKDAm
+BgNVBAMTH1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQSBJSUkwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDC2pxisLlxErALyBpXsq6DFJmzNEubkKLF
+5+cvAqBNLaT6hdqbJYUtQCggbergvbFIgyIpRJ9Og+41URNzdNW88jBmlFPAQDYv
+DIRlzg9uwliT6CwLOunBjvvya8o84pxOjuT5fdMnnxvVZ3iHLX8LR7PH6MlIfK8v
+zArZQe+f/prhsq75U7Xl6UafYOPfjdN/+5Z+s7Vy+EutCHnNaYlAJ/Uqwa1D7KRT
+yGG299J5KmcYdkhtWyUB0SbFt1dpIxVbYYqt8Bst2a9c8SaQaanVDED1M4BDj5yj
+dipFtK+/fz6HP3bFzSreIMUWWMv5G/UPyw0RUmS40nZid4PxWJ//AgMBAAGjYzBh
+MB8GA1UdIwQYMBaAFFbn4VslQ4Dg9ozhcbyO5YAvxEjiMA8GA1UdEwEB/wQFMAMB
+Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRW5+FbJUOA4PaM4XG8juWAL8RI
+4jANBgkqhkiG9w0BAQUFAAOCAQEAg8ev6n9NCjw5sWi+e22JLumzCecYV42Fmhfz
+dkJQEw/HkG8zrcVJYCtsSVgZ1OK+t7+rSbyUyKu+KGwWaODIl0YgoGhnYIg5IFHY
+aAERzqf2EQf27OysGh+yZm5WZ2B6dF7AbZc2rrUNXWZzwCUyRdhKBgePxLcHsU0G
+DeGl6/R1yrqc0L2z0zIkTO5+4nYES0lT2PLpVDP85XEfPRRclkvxOvIAu2y0+pZV
+CIgJwcyRGSmwIC3/yzikQOEXvnlhgP8HA4ZMTnsGnxGGjYnuJ8Tb4rwZjgvDwxPH
+LQNjO9Po5KIqwoIIlBZU8O8fJ5AluA0OKBtHd0e9HKgl8ZS0Zg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Label: "Go Daddy Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01
+# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b
+# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT
+EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp
+ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz
+NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH
+EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE
+AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw
+DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD
+E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH
+/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy
+DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh
+GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR
+tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA
+AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
+FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX
+WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu
+9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr
+gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo
+2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO
+LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI
+4uJEvlz36hz1
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96
+# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e
+# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5
+-----BEGIN CERTIFICATE-----
+MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs
+ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw
+MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6
+b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj
+aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp
+Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
+ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg
+nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1
+HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N
+Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN
+dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0
+HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G
+CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU
+sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3
+4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg
+8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K
+pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1
+mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Services Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2
+# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f
+# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5
+-----BEGIN CERTIFICATE-----
+MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs
+ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5
+MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD
+VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy
+ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy
+dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p
+OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2
+8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K
+Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe
+hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk
+6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw
+DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q
+AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI
+bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB
+ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z
+qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd
+iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn
+0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN
+sSi6
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Commercial O=AffirmTrust
+# Subject: CN=AffirmTrust Commercial O=AffirmTrust
+# Label: "AffirmTrust Commercial"
+# Serial: 8608355977964138876
+# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7
+# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7
+# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP
+Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr
+ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL
+MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1
+yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr
+VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/
+nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG
+XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj
+vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt
+Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g
+N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC
+nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Networking O=AffirmTrust
+# Subject: CN=AffirmTrust Networking O=AffirmTrust
+# Label: "AffirmTrust Networking"
+# Serial: 8957382827206547757
+# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f
+# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f
+# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y
+YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua
+kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL
+QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp
+6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG
+yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i
+QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO
+tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu
+QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ
+Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u
+olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48
+x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium O=AffirmTrust
+# Subject: CN=AffirmTrust Premium O=AffirmTrust
+# Label: "AffirmTrust Premium"
+# Serial: 7893706540734352110
+# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57
+# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27
+# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a
+-----BEGIN CERTIFICATE-----
+MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz
+dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG
+A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U
+cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf
+qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ
+JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ
++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS
+s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5
+HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7
+70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG
+V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S
+qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S
+5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia
+C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX
+OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE
+FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2
+KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg
+Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B
+8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ
+MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc
+0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ
+u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF
+u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH
+YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8
+GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO
+RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e
+KeC2uAloGRwYQw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Label: "AffirmTrust Premium ECC"
+# Serial: 8401224907861490260
+# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d
+# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb
+# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23
+-----BEGIN CERTIFICATE-----
+MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC
+VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ
+cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ
+BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt
+VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D
+0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9
+ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G
+A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs
+aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I
+flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
+# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
+# Label: "StartCom Certification Authority"
+# Serial: 45
+# MD5 Fingerprint: c9:3b:0d:84:41:fc:a4:76:79:23:08:57:de:10:19:16
+# SHA1 Fingerprint: a3:f1:33:3f:e2:42:bf:cf:c5:d1:4e:8f:39:42:98:40:68:10:d1:a0
+# SHA256 Fingerprint: e1:78:90:ee:09:a3:fb:f4:f4:8b:9c:41:4a:17:d6:37:b7:a5:06:47:e9:bc:75:23:22:72:7f:cc:17:42:a9:11
+-----BEGIN CERTIFICATE-----
+MIIHhzCCBW+gAwIBAgIBLTANBgkqhkiG9w0BAQsFADB9MQswCQYDVQQGEwJJTDEW
+MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg
+Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh
+dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM3WhcNMzYwOTE3MTk0NjM2WjB9
+MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi
+U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh
+cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA
+A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk
+pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf
+OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C
+Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT
+Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi
+HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM
+Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w
++2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+
+Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3
+Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B
+26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID
+AQABo4ICEDCCAgwwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD
+VR0OBBYEFE4L7xqkQFulF2mHMMo0aEPQQa7yMB8GA1UdIwQYMBaAFE4L7xqkQFul
+F2mHMMo0aEPQQa7yMIIBWgYDVR0gBIIBUTCCAU0wggFJBgsrBgEEAYG1NwEBATCC
+ATgwLgYIKwYBBQUHAgEWImh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL3BvbGljeS5w
+ZGYwNAYIKwYBBQUHAgEWKGh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL2ludGVybWVk
+aWF0ZS5wZGYwgc8GCCsGAQUFBwICMIHCMCcWIFN0YXJ0IENvbW1lcmNpYWwgKFN0
+YXJ0Q29tKSBMdGQuMAMCAQEagZZMaW1pdGVkIExpYWJpbGl0eSwgcmVhZCB0aGUg
+c2VjdGlvbiAqTGVnYWwgTGltaXRhdGlvbnMqIG9mIHRoZSBTdGFydENvbSBDZXJ0
+aWZpY2F0aW9uIEF1dGhvcml0eSBQb2xpY3kgYXZhaWxhYmxlIGF0IGh0dHA6Ly93
+d3cuc3RhcnRzc2wuY29tL3BvbGljeS5wZGYwEQYJYIZIAYb4QgEBBAQDAgAHMDgG
+CWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNTTCBDZXJ0aWZpY2F0aW9uIEF1
+dGhvcml0eTANBgkqhkiG9w0BAQsFAAOCAgEAjo/n3JR5fPGFf59Jb2vKXfuM/gTF
+wWLRfUKKvFO3lANmMD+x5wqnUCBVJX92ehQN6wQOQOY+2IirByeDqXWmN3PH/UvS
+Ta0XQMhGvjt/UfzDtgUx3M2FIk5xt/JxXrAaxrqTi3iSSoX4eA+D/i+tLPfkpLst
+0OcNOrg+zvZ49q5HJMqjNTbOx8aHmNrs++myziebiMMEofYLWWivydsQD032ZGNc
+pRJvkrKTlMeIFw6Ttn5ii5B/q06f/ON1FE8qMt9bDeD1e5MNq6HPh+GlBEXoPBKl
+CcWw0bdT82AUuoVpaiF8H3VhFyAXe2w7QSlc4axa0c2Mm+tgHRns9+Ww2vl5GKVF
+P0lDV9LdJNUso/2RjSe15esUBppMeyG7Oq0wBhjA2MFrLH9ZXF2RsXAiV+uKa0hK
+1Q8p7MZAwC+ITGgBF3f0JBlPvfrhsiAhS90a2Cl9qrjeVOwhVYBsHvUwyKMQ5bLm
+KhQxw4UtjJixhlpPiVktucf3HMiKf8CdBUrmQk9io20ppB+Fq9vlgcitKj1MXVuE
+JnHEhV5xJMqlG2zYYdMa4FTbzrqpMrUi9nNBCV24F10OD5mQ1kfabwo6YigUZ4LZ
+8dCAWZvLMdibD4x3TrVoivJs9iQOLWxwxXPR3hTQcY+203sC9uO41Alua551hDnm
+fyWl8kgAwKQB2j8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=StartCom Certification Authority G2 O=StartCom Ltd.
+# Subject: CN=StartCom Certification Authority G2 O=StartCom Ltd.
+# Label: "StartCom Certification Authority G2"
+# Serial: 59
+# MD5 Fingerprint: 78:4b:fb:9e:64:82:0a:d3:b8:4c:62:f3:64:f2:90:64
+# SHA1 Fingerprint: 31:f1:fd:68:22:63:20:ee:c6:3b:3f:9d:ea:4a:3e:53:7c:7c:39:17
+# SHA256 Fingerprint: c7:ba:65:67:de:93:a7:98:ae:1f:aa:79:1e:71:2d:37:8f:ae:1f:93:c4:39:7f:ea:44:1b:b7:cb:e6:fd:59:95
+-----BEGIN CERTIFICATE-----
+MIIFYzCCA0ugAwIBAgIBOzANBgkqhkiG9w0BAQsFADBTMQswCQYDVQQGEwJJTDEW
+MBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoGA1UEAxMjU3RhcnRDb20gQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkgRzIwHhcNMTAwMTAxMDEwMDAxWhcNMzkxMjMxMjM1
+OTAxWjBTMQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoG
+A1UEAxMjU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgRzIwggIiMA0G
+CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2iTZbB7cgNr2Cu+EWIAOVeq8Oo1XJ
+JZlKxdBWQYeQTSFgpBSHO839sj60ZwNq7eEPS8CRhXBF4EKe3ikj1AENoBB5uNsD
+vfOpL9HG4A/LnooUCri99lZi8cVytjIl2bLzvWXFDSxu1ZJvGIsAQRSCb0AgJnoo
+D/Uefyf3lLE3PbfHkffiAez9lInhzG7TNtYKGXmu1zSCZf98Qru23QumNK9LYP5/
+Q0kGi4xDuFby2X8hQxfqp0iVAXV16iulQ5XqFYSdCI0mblWbq9zSOdIxHWDirMxW
+RST1HFSr7obdljKF+ExP6JV2tgXdNiNnvP8V4so75qbsO+wmETRIjfaAKxojAuuK
+HDp2KntWFhxyKrOq42ClAJ8Em+JvHhRYW6Vsi1g8w7pOOlz34ZYrPu8HvKTlXcxN
+nw3h3Kq74W4a7I/htkxNeXJdFzULHdfBR9qWJODQcqhaX2YtENwvKhOuJv4KHBnM
+0D4LnMgJLvlblnpHnOl68wVQdJVznjAJ85eCXuaPOQgeWeU1FEIT/wCc976qUM/i
+UUjXuG+v+E5+M5iSFGI6dWPPe/regjupuznixL0sAA7IF6wT700ljtizkC+p2il9
+Ha90OrInwMEePnWjFqmveiJdnxMaz6eg6+OGCtP95paV1yPIN93EfKo2rJgaErHg
+TuixO/XWb/Ew1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE
+AwIBBjAdBgNVHQ4EFgQUS8W0QGutHLOlHGVuRjaJhwUMDrYwDQYJKoZIhvcNAQEL
+BQADggIBAHNXPyzVlTJ+N9uWkusZXn5T50HsEbZH77Xe7XRcxfGOSeD8bpkTzZ+K
+2s06Ctg6Wgk/XzTQLwPSZh0avZyQN8gMjgdalEVGKua+etqhqaRpEpKwfTbURIfX
+UfEpY9Z1zRbkJ4kd+MIySP3bmdCPX1R0zKxnNBFi2QwKN4fRoxdIjtIXHfbX/dtl
+6/2o1PXWT6RbdejF0mCy2wl+JYt7ulKSnj7oxXehPOBKc2thz4bcQ///If4jXSRK
+9dNtD2IEBVeC2m6kMyV5Sy5UGYvMLD0w6dEG/+gyRr61M3Z3qAFdlsHB1b6uJcDJ
+HgoJIIihDsnzb02CVAAgp9KP5DlUFy6NHrgbuxu9mk47EDTcnIhT76IxW1hPkWLI
+wpqazRVdOKnWvvgTtZ8SafJQYqz7Fzf07rh1Z2AQ+4NQ+US1dZxAF7L+/XldblhY
+XzD8AK6vM8EOTmy6p6ahfzLbOOCxchcKK5HsamMm7YnUeMx0HgX4a/6ManY5Ka5l
+IxKVCCIcl85bBu4M4ru8H0ST9tg4RQUh7eStqxK2A6RCLi3ECToDZ2mEmuFZkIoo
+hdVddLHRDiBYmxOlsGOm7XtH/UVVMKTumtTm4ofvmMkyghEpIrwACjFeLQ/Ajulr
+so8uBtjRkcfGEvRM/TAXw8HaOFvjqermobp573PYtlNXLfbQ4ddI
+-----END CERTIFICATE-----
+
+# Issuer: O=Digital Signature Trust Co., CN=DST Root CA X3
+# Subject: O=Digital Signature Trust Co., CN=DST Root CA X3
+# Label: "IdenTrust DST Root CA X3"
+# Serial: 44AFB080D6A327BA893039862EF8406B
+# MD5 Fingerprint: 41:03:52:DC:0F:F7:50:1B:16:F0:02:8E:BA:6F:45:C5
+# SHA1 Fingerprint: DA:C9:02:4F:54:D8:F6:DF:94:93:5F:B1:73:26:38:CA:6A:D7:7C:13
+# SHA256 Fingerprint: 06:87:26:03:31:A7:24:03:D9:09:F1:05:E6:9B:CF:0D:32:E1:BD:24:93:FF:C6:D9:20:6D:11:BC:D6:77:07:39
+-----BEGIN CERTIFICATE-----
+MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/
+MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT
+DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow
+PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD
+Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
+AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O
+rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq
+OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b
+xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw
+7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD
+aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV
+HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG
+SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69
+ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr
+AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz
+R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5
+JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo
+Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G2, OU=www.digicert.com, O=DigiCert Inc, C=US
+# Subject: CN=DigiCert Global Root G2, OU=www.digicert.com, O=DigiCert Inc, C=US
+# Serial: 33af1e6a711a9a0bb2864b11d09fae5
+# MD5 Fingerprint: E4:A6:8A:C8:54:AC:52:42:46:0A:FD:72:48:1B:2A:44
+# SHA1 Fingerprint: DF:3C:24:F9:BF:D6:66:76:1B:26:80:73:FE:06:D1:CC:8D:4F:82:A4
+# SHA256 Fingerprint: CB:3C:CB:B7:60:31:E5:E0:13:8F:8D:D3:9A:23:F9:DE:47:FF:C3:5E:43:C1:14:4C:EA:27:D4:6A:5A:B1:CB:5F
+-----BEGIN CERTIFICATE-----
+MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH
+MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI
+2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx
+1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ
+q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz
+tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ
+vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP
+BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV
+5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY
+1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4
+NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG
+Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91
+8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe
+pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl
+MrY=
+-----END CERTIFICATE-----
+
diff --git a/utils/frozen_chromite/third_party/python3/httplib2/certs.py b/utils/frozen_chromite/third_party/python3/httplib2/certs.py
new file mode 100644
index 0000000..59d1ffc
--- /dev/null
+++ b/utils/frozen_chromite/third_party/python3/httplib2/certs.py
@@ -0,0 +1,42 @@
+"""Utilities for certificate management."""
+
+import os
+
+certifi_available = False
+certifi_where = None
+try:
+    from certifi import where as certifi_where
+    certifi_available = True
+except ImportError:
+    pass
+
+custom_ca_locater_available = False
+custom_ca_locater_where = None
+try:
+    from ca_certs_locater import get as custom_ca_locater_where
+    custom_ca_locater_available = True
+except ImportError:
+    pass
+
+
+BUILTIN_CA_CERTS = os.path.join(
+    os.path.dirname(os.path.abspath(__file__)), "cacerts.txt"
+)
+
+
+def where():
+    env = os.environ.get("HTTPLIB2_CA_CERTS")
+    if env is not None:
+        if os.path.isfile(env):
+            return env
+        else:
+            raise RuntimeError("Environment variable HTTPLIB2_CA_CERTS not a valid file")
+    if custom_ca_locater_available:
+        return custom_ca_locater_where()
+    if certifi_available:
+        return certifi_where()
+    return BUILTIN_CA_CERTS
+
+
+if __name__ == "__main__":
+    print(where())
diff --git a/utils/frozen_chromite/third_party/python3/httplib2/iri2uri.py b/utils/frozen_chromite/third_party/python3/httplib2/iri2uri.py
new file mode 100644
index 0000000..86e361e
--- /dev/null
+++ b/utils/frozen_chromite/third_party/python3/httplib2/iri2uri.py
@@ -0,0 +1,124 @@
+# -*- coding: utf-8 -*-
+"""Converts an IRI to a URI."""
+
+__author__ = "Joe Gregorio (joe@bitworking.org)"
+__copyright__ = "Copyright 2006, Joe Gregorio"
+__contributors__ = []
+__version__ = "1.0.0"
+__license__ = "MIT"
+
+import urllib.parse
+
+# Convert an IRI to a URI following the rules in RFC 3987
+#
+# The characters we need to enocde and escape are defined in the spec:
+#
+# iprivate =  %xE000-F8FF / %xF0000-FFFFD / %x100000-10FFFD
+# ucschar = %xA0-D7FF / %xF900-FDCF / %xFDF0-FFEF
+#         / %x10000-1FFFD / %x20000-2FFFD / %x30000-3FFFD
+#         / %x40000-4FFFD / %x50000-5FFFD / %x60000-6FFFD
+#         / %x70000-7FFFD / %x80000-8FFFD / %x90000-9FFFD
+#         / %xA0000-AFFFD / %xB0000-BFFFD / %xC0000-CFFFD
+#         / %xD0000-DFFFD / %xE1000-EFFFD
+
+escape_range = [
+    (0xA0, 0xD7FF),
+    (0xE000, 0xF8FF),
+    (0xF900, 0xFDCF),
+    (0xFDF0, 0xFFEF),
+    (0x10000, 0x1FFFD),
+    (0x20000, 0x2FFFD),
+    (0x30000, 0x3FFFD),
+    (0x40000, 0x4FFFD),
+    (0x50000, 0x5FFFD),
+    (0x60000, 0x6FFFD),
+    (0x70000, 0x7FFFD),
+    (0x80000, 0x8FFFD),
+    (0x90000, 0x9FFFD),
+    (0xA0000, 0xAFFFD),
+    (0xB0000, 0xBFFFD),
+    (0xC0000, 0xCFFFD),
+    (0xD0000, 0xDFFFD),
+    (0xE1000, 0xEFFFD),
+    (0xF0000, 0xFFFFD),
+    (0x100000, 0x10FFFD),
+]
+
+
+def encode(c):
+    retval = c
+    i = ord(c)
+    for low, high in escape_range:
+        if i < low:
+            break
+        if i >= low and i <= high:
+            retval = "".join(["%%%2X" % o for o in c.encode("utf-8")])
+            break
+    return retval
+
+
+def iri2uri(uri):
+    """Convert an IRI to a URI. Note that IRIs must be
+    passed in a unicode strings. That is, do not utf-8 encode
+    the IRI before passing it into the function."""
+    if isinstance(uri, str):
+        (scheme, authority, path, query, fragment) = urllib.parse.urlsplit(uri)
+        authority = authority.encode("idna").decode("utf-8")
+        # For each character in 'ucschar' or 'iprivate'
+        #  1. encode as utf-8
+        #  2. then %-encode each octet of that utf-8
+        uri = urllib.parse.urlunsplit((scheme, authority, path, query, fragment))
+        uri = "".join([encode(c) for c in uri])
+    return uri
+
+
+if __name__ == "__main__":
+    import unittest
+
+    class Test(unittest.TestCase):
+        def test_uris(self):
+            """Test that URIs are invariant under the transformation."""
+            invariant = [
+                "ftp://ftp.is.co.za/rfc/rfc1808.txt",
+                "http://www.ietf.org/rfc/rfc2396.txt",
+                "ldap://[2001:db8::7]/c=GB?objectClass?one",
+                "mailto:John.Doe@example.com",
+                "news:comp.infosystems.www.servers.unix",
+                "tel:+1-816-555-1212",
+                "telnet://192.0.2.16:80/",
+                "urn:oasis:names:specification:docbook:dtd:xml:4.1.2",
+            ]
+            for uri in invariant:
+                self.assertEqual(uri, iri2uri(uri))
+
+        def test_iri(self):
+            """Test that the right type of escaping is done for each part of the URI."""
+            self.assertEqual(
+                "http://xn--o3h.com/%E2%98%84",
+                iri2uri("http://\N{COMET}.com/\N{COMET}"),
+            )
+            self.assertEqual(
+                "http://bitworking.org/?fred=%E2%98%84",
+                iri2uri("http://bitworking.org/?fred=\N{COMET}"),
+            )
+            self.assertEqual(
+                "http://bitworking.org/#%E2%98%84",
+                iri2uri("http://bitworking.org/#\N{COMET}"),
+            )
+            self.assertEqual("#%E2%98%84", iri2uri("#\N{COMET}"))
+            self.assertEqual(
+                "/fred?bar=%E2%98%9A#%E2%98%84",
+                iri2uri("/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"),
+            )
+            self.assertEqual(
+                "/fred?bar=%E2%98%9A#%E2%98%84",
+                iri2uri(iri2uri("/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")),
+            )
+            self.assertNotEqual(
+                "/fred?bar=%E2%98%9A#%E2%98%84",
+                iri2uri(
+                    "/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode("utf-8")
+                ),
+            )
+
+    unittest.main()
diff --git a/utils/frozen_chromite/third_party/python3/httplib2/socks.py b/utils/frozen_chromite/third_party/python3/httplib2/socks.py
new file mode 100644
index 0000000..2926b4e
--- /dev/null
+++ b/utils/frozen_chromite/third_party/python3/httplib2/socks.py
@@ -0,0 +1,510 @@
+"""SocksiPy - Python SOCKS module.
+
+Version 1.00
+
+Copyright 2006 Dan-Haim. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+1. Redistributions of source code must retain the above copyright notice, this
+   list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright notice,
+   this list of conditions and the following disclaimer in the documentation
+   and/or other materials provided with the distribution.
+3. Neither the name of Dan Haim nor the names of his contributors may be used
+   to endorse or promote products derived from this software without specific
+   prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA
+OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE.
+
+This module provides a standard socket-like interface for Python
+for tunneling connections through SOCKS proxies.
+
+Minor modifications made by Christopher Gilbert (http://motomastyle.com/) for
+use in PyLoris (http://pyloris.sourceforge.net/).
+
+Minor modifications made by Mario Vilas (http://breakingcode.wordpress.com/)
+mainly to merge bug fixes found in Sourceforge.
+"""
+
+import base64
+import socket
+import struct
+import sys
+
+if getattr(socket, "socket", None) is None:
+    raise ImportError("socket.socket missing, proxy support unusable")
+
+PROXY_TYPE_SOCKS4 = 1
+PROXY_TYPE_SOCKS5 = 2
+PROXY_TYPE_HTTP = 3
+PROXY_TYPE_HTTP_NO_TUNNEL = 4
+
+_defaultproxy = None
+_orgsocket = socket.socket
+
+
+class ProxyError(Exception):
+    pass
+
+
+class GeneralProxyError(ProxyError):
+    pass
+
+
+class Socks5AuthError(ProxyError):
+    pass
+
+
+class Socks5Error(ProxyError):
+    pass
+
+
+class Socks4Error(ProxyError):
+    pass
+
+
+class HTTPError(ProxyError):
+    pass
+
+
+_generalerrors = (
+    "success",
+    "invalid data",
+    "not connected",
+    "not available",
+    "bad proxy type",
+    "bad input",
+)
+
+_socks5errors = (
+    "succeeded",
+    "general SOCKS server failure",
+    "connection not allowed by ruleset",
+    "Network unreachable",
+    "Host unreachable",
+    "Connection refused",
+    "TTL expired",
+    "Command not supported",
+    "Address type not supported",
+    "Unknown error",
+)
+
+_socks5autherrors = (
+    "succeeded",
+    "authentication is required",
+    "all offered authentication methods were rejected",
+    "unknown username or invalid password",
+    "unknown error",
+)
+
+_socks4errors = (
+    "request granted",
+    "request rejected or failed",
+    "request rejected because SOCKS server cannot connect to identd on the client",
+    "request rejected because the client program and identd report different "
+    "user-ids",
+    "unknown error",
+)
+
+
+def setdefaultproxy(
+    proxytype=None, addr=None, port=None, rdns=True, username=None, password=None
+):
+    """setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
+    Sets a default proxy which all further socksocket objects will use,
+    unless explicitly changed.
+    """
+    global _defaultproxy
+    _defaultproxy = (proxytype, addr, port, rdns, username, password)
+
+
+def wrapmodule(module):
+    """wrapmodule(module)
+
+    Attempts to replace a module's socket library with a SOCKS socket. Must set
+    a default proxy using setdefaultproxy(...) first.
+    This will only work on modules that import socket directly into the
+    namespace;
+    most of the Python Standard Library falls into this category.
+    """
+    if _defaultproxy != None:
+        module.socket.socket = socksocket
+    else:
+        raise GeneralProxyError((4, "no proxy specified"))
+
+
+class socksocket(socket.socket):
+    """socksocket([family[, type[, proto]]]) -> socket object
+    Open a SOCKS enabled socket. The parameters are the same as
+    those of the standard socket init. In order for SOCKS to work,
+    you must specify family=AF_INET, type=SOCK_STREAM and proto=0.
+    """
+
+    def __init__(
+        self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None
+    ):
+        _orgsocket.__init__(self, family, type, proto, _sock)
+        if _defaultproxy != None:
+            self.__proxy = _defaultproxy
+        else:
+            self.__proxy = (None, None, None, None, None, None)
+        self.__proxysockname = None
+        self.__proxypeername = None
+        self.__httptunnel = True
+
+    def __recvall(self, count):
+        """__recvall(count) -> data
+        Receive EXACTLY the number of bytes requested from the socket.
+        Blocks until the required number of bytes have been received.
+        """
+        data = self.recv(count)
+        while len(data) < count:
+            d = self.recv(count - len(data))
+            if not d:
+                raise GeneralProxyError((0, "connection closed unexpectedly"))
+            data = data + d
+        return data
+
+    def sendall(self, content, *args):
+        """ override socket.socket.sendall method to rewrite the header
+        for non-tunneling proxies if needed
+        """
+        if not self.__httptunnel:
+            content = self.__rewriteproxy(content)
+        return super(socksocket, self).sendall(content, *args)
+
+    def __rewriteproxy(self, header):
+        """ rewrite HTTP request headers to support non-tunneling proxies
+        (i.e. those which do not support the CONNECT method).
+        This only works for HTTP (not HTTPS) since HTTPS requires tunneling.
+        """
+        host, endpt = None, None
+        hdrs = header.split("\r\n")
+        for hdr in hdrs:
+            if hdr.lower().startswith("host:"):
+                host = hdr
+            elif hdr.lower().startswith("get") or hdr.lower().startswith("post"):
+                endpt = hdr
+        if host and endpt:
+            hdrs.remove(host)
+            hdrs.remove(endpt)
+            host = host.split(" ")[1]
+            endpt = endpt.split(" ")
+            if self.__proxy[4] != None and self.__proxy[5] != None:
+                hdrs.insert(0, self.__getauthheader())
+            hdrs.insert(0, "Host: %s" % host)
+            hdrs.insert(0, "%s http://%s%s %s" % (endpt[0], host, endpt[1], endpt[2]))
+        return "\r\n".join(hdrs)
+
+    def __getauthheader(self):
+        auth = self.__proxy[4] + b":" + self.__proxy[5]
+        return "Proxy-Authorization: Basic " + base64.b64encode(auth).decode()
+
+    def setproxy(
+        self,
+        proxytype=None,
+        addr=None,
+        port=None,
+        rdns=True,
+        username=None,
+        password=None,
+        headers=None,
+    ):
+        """setproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
+
+        Sets the proxy to be used.
+        proxytype -    The type of the proxy to be used. Three types
+                are supported: PROXY_TYPE_SOCKS4 (including socks4a),
+                PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP
+        addr -        The address of the server (IP or DNS).
+        port -        The port of the server. Defaults to 1080 for SOCKS
+                servers and 8080 for HTTP proxy servers.
+        rdns -        Should DNS queries be preformed on the remote side
+                (rather than the local side). The default is True.
+                Note: This has no effect with SOCKS4 servers.
+        username -    Username to authenticate with to the server.
+                The default is no authentication.
+        password -    Password to authenticate with to the server.
+                Only relevant when username is also provided.
+        headers -     Additional or modified headers for the proxy connect
+        request.
+        """
+        self.__proxy = (proxytype, addr, port, rdns, username, password, headers)
+
+    def __negotiatesocks5(self, destaddr, destport):
+        """__negotiatesocks5(self,destaddr,destport)
+        Negotiates a connection through a SOCKS5 server.
+        """
+        # First we'll send the authentication packages we support.
+        if (self.__proxy[4] != None) and (self.__proxy[5] != None):
+            # The username/password details were supplied to the
+            # setproxy method so we support the USERNAME/PASSWORD
+            # authentication (in addition to the standard none).
+            self.sendall(struct.pack("BBBB", 0x05, 0x02, 0x00, 0x02))
+        else:
+            # No username/password were entered, therefore we
+            # only support connections with no authentication.
+            self.sendall(struct.pack("BBB", 0x05, 0x01, 0x00))
+        # We'll receive the server's response to determine which
+        # method was selected
+        chosenauth = self.__recvall(2)
+        if chosenauth[0:1] != chr(0x05).encode():
+            self.close()
+            raise GeneralProxyError((1, _generalerrors[1]))
+        # Check the chosen authentication method
+        if chosenauth[1:2] == chr(0x00).encode():
+            # No authentication is required
+            pass
+        elif chosenauth[1:2] == chr(0x02).encode():
+            # Okay, we need to perform a basic username/password
+            # authentication.
+            packet = bytearray()
+            packet.append(0x01)
+            packet.append(len(self.__proxy[4]))
+            packet.extend(self.__proxy[4])
+            packet.append(len(self.__proxy[5]))
+            packet.extend(self.__proxy[5])
+            self.sendall(packet)
+            authstat = self.__recvall(2)
+            if authstat[0:1] != chr(0x01).encode():
+                # Bad response
+                self.close()
+                raise GeneralProxyError((1, _generalerrors[1]))
+            if authstat[1:2] != chr(0x00).encode():
+                # Authentication failed
+                self.close()
+                raise Socks5AuthError((3, _socks5autherrors[3]))
+            # Authentication succeeded
+        else:
+            # Reaching here is always bad
+            self.close()
+            if chosenauth[1] == chr(0xFF).encode():
+                raise Socks5AuthError((2, _socks5autherrors[2]))
+            else:
+                raise GeneralProxyError((1, _generalerrors[1]))
+        # Now we can request the actual connection
+        req = struct.pack("BBB", 0x05, 0x01, 0x00)
+        # If the given destination address is an IP address, we'll
+        # use the IPv4 address request even if remote resolving was specified.
+        try:
+            ipaddr = socket.inet_aton(destaddr)
+            req = req + chr(0x01).encode() + ipaddr
+        except socket.error:
+            # Well it's not an IP number,  so it's probably a DNS name.
+            if self.__proxy[3]:
+                # Resolve remotely
+                ipaddr = None
+                req = (
+                    req
+                    + chr(0x03).encode()
+                    + chr(len(destaddr)).encode()
+                    + destaddr.encode()
+                )
+            else:
+                # Resolve locally
+                ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
+                req = req + chr(0x01).encode() + ipaddr
+        req = req + struct.pack(">H", destport)
+        self.sendall(req)
+        # Get the response
+        resp = self.__recvall(4)
+        if resp[0:1] != chr(0x05).encode():
+            self.close()
+            raise GeneralProxyError((1, _generalerrors[1]))
+        elif resp[1:2] != chr(0x00).encode():
+            # Connection failed
+            self.close()
+            if ord(resp[1:2]) <= 8:
+                raise Socks5Error((ord(resp[1:2]), _socks5errors[ord(resp[1:2])]))
+            else:
+                raise Socks5Error((9, _socks5errors[9]))
+        # Get the bound address/port
+        elif resp[3:4] == chr(0x01).encode():
+            boundaddr = self.__recvall(4)
+        elif resp[3:4] == chr(0x03).encode():
+            resp = resp + self.recv(1)
+            boundaddr = self.__recvall(ord(resp[4:5]))
+        else:
+            self.close()
+            raise GeneralProxyError((1, _generalerrors[1]))
+        boundport = struct.unpack(">H", self.__recvall(2))[0]
+        self.__proxysockname = (boundaddr, boundport)
+        if ipaddr != None:
+            self.__proxypeername = (socket.inet_ntoa(ipaddr), destport)
+        else:
+            self.__proxypeername = (destaddr, destport)
+
+    def getproxysockname(self):
+        """getsockname() -> address info
+        Returns the bound IP address and port number at the proxy.
+        """
+        return self.__proxysockname
+
+    def getproxypeername(self):
+        """getproxypeername() -> address info
+        Returns the IP and port number of the proxy.
+        """
+        return _orgsocket.getpeername(self)
+
+    def getpeername(self):
+        """getpeername() -> address info
+        Returns the IP address and port number of the destination
+        machine (note: getproxypeername returns the proxy)
+        """
+        return self.__proxypeername
+
+    def __negotiatesocks4(self, destaddr, destport):
+        """__negotiatesocks4(self,destaddr,destport)
+        Negotiates a connection through a SOCKS4 server.
+        """
+        # Check if the destination address provided is an IP address
+        rmtrslv = False
+        try:
+            ipaddr = socket.inet_aton(destaddr)
+        except socket.error:
+            # It's a DNS name. Check where it should be resolved.
+            if self.__proxy[3]:
+                ipaddr = struct.pack("BBBB", 0x00, 0x00, 0x00, 0x01)
+                rmtrslv = True
+            else:
+                ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
+        # Construct the request packet
+        req = struct.pack(">BBH", 0x04, 0x01, destport) + ipaddr
+        # The username parameter is considered userid for SOCKS4
+        if self.__proxy[4] != None:
+            req = req + self.__proxy[4]
+        req = req + chr(0x00).encode()
+        # DNS name if remote resolving is required
+        # NOTE: This is actually an extension to the SOCKS4 protocol
+        # called SOCKS4A and may not be supported in all cases.
+        if rmtrslv:
+            req = req + destaddr + chr(0x00).encode()
+        self.sendall(req)
+        # Get the response from the server
+        resp = self.__recvall(8)
+        if resp[0:1] != chr(0x00).encode():
+            # Bad data
+            self.close()
+            raise GeneralProxyError((1, _generalerrors[1]))
+        if resp[1:2] != chr(0x5A).encode():
+            # Server returned an error
+            self.close()
+            if ord(resp[1:2]) in (91, 92, 93):
+                self.close()
+                raise Socks4Error((ord(resp[1:2]), _socks4errors[ord(resp[1:2]) - 90]))
+            else:
+                raise Socks4Error((94, _socks4errors[4]))
+        # Get the bound address/port
+        self.__proxysockname = (
+            socket.inet_ntoa(resp[4:]),
+            struct.unpack(">H", resp[2:4])[0],
+        )
+        if rmtrslv != None:
+            self.__proxypeername = (socket.inet_ntoa(ipaddr), destport)
+        else:
+            self.__proxypeername = (destaddr, destport)
+
+    def __negotiatehttp(self, destaddr, destport):
+        """__negotiatehttp(self,destaddr,destport)
+        Negotiates a connection through an HTTP server.
+        """
+        # If we need to resolve locally, we do this now
+        if not self.__proxy[3]:
+            addr = socket.gethostbyname(destaddr)
+        else:
+            addr = destaddr
+        headers = ["CONNECT ", addr, ":", str(destport), " HTTP/1.1\r\n"]
+        wrote_host_header = False
+        wrote_auth_header = False
+        if self.__proxy[6] != None:
+            for key, val in self.__proxy[6].iteritems():
+                headers += [key, ": ", val, "\r\n"]
+                wrote_host_header = key.lower() == "host"
+                wrote_auth_header = key.lower() == "proxy-authorization"
+        if not wrote_host_header:
+            headers += ["Host: ", destaddr, "\r\n"]
+        if not wrote_auth_header:
+            if self.__proxy[4] != None and self.__proxy[5] != None:
+                headers += [self.__getauthheader(), "\r\n"]
+        headers.append("\r\n")
+        self.sendall("".join(headers).encode())
+        # We read the response until we get the string "\r\n\r\n"
+        resp = self.recv(1)
+        while resp.find("\r\n\r\n".encode()) == -1:
+            resp = resp + self.recv(1)
+        # We just need the first line to check if the connection
+        # was successful
+        statusline = resp.splitlines()[0].split(" ".encode(), 2)
+        if statusline[0] not in ("HTTP/1.0".encode(), "HTTP/1.1".encode()):
+            self.close()
+            raise GeneralProxyError((1, _generalerrors[1]))
+        try:
+            statuscode = int(statusline[1])
+        except ValueError:
+            self.close()
+            raise GeneralProxyError((1, _generalerrors[1]))
+        if statuscode != 200:
+            self.close()
+            raise HTTPError((statuscode, statusline[2]))
+        self.__proxysockname = ("0.0.0.0", 0)
+        self.__proxypeername = (addr, destport)
+
+    def connect(self, destpair):
+        """connect(self, despair)
+        Connects to the specified destination through a proxy.
+        destpar - A tuple of the IP/DNS address and the port number.
+        (identical to socket's connect).
+        To select the proxy server use setproxy().
+        """
+        # Do a minimal input check first
+        if (
+            (not type(destpair) in (list, tuple))
+            or (len(destpair) < 2)
+            or (not isinstance(destpair[0], (str, bytes)))
+            or (type(destpair[1]) != int)
+        ):
+            raise GeneralProxyError((5, _generalerrors[5]))
+        if self.__proxy[0] == PROXY_TYPE_SOCKS5:
+            if self.__proxy[2] != None:
+                portnum = self.__proxy[2]
+            else:
+                portnum = 1080
+            _orgsocket.connect(self, (self.__proxy[1], portnum))
+            self.__negotiatesocks5(destpair[0], destpair[1])
+        elif self.__proxy[0] == PROXY_TYPE_SOCKS4:
+            if self.__proxy[2] != None:
+                portnum = self.__proxy[2]
+            else:
+                portnum = 1080
+            _orgsocket.connect(self, (self.__proxy[1], portnum))
+            self.__negotiatesocks4(destpair[0], destpair[1])
+        elif self.__proxy[0] == PROXY_TYPE_HTTP:
+            if self.__proxy[2] != None:
+                portnum = self.__proxy[2]
+            else:
+                portnum = 8080
+            _orgsocket.connect(self, (self.__proxy[1], portnum))
+            self.__negotiatehttp(destpair[0], destpair[1])
+        elif self.__proxy[0] == PROXY_TYPE_HTTP_NO_TUNNEL:
+            if self.__proxy[2] != None:
+                portnum = self.__proxy[2]
+            else:
+                portnum = 8080
+            _orgsocket.connect(self, (self.__proxy[1], portnum))
+            if destpair[1] == 443:
+                self.__negotiatehttp(destpair[0], destpair[1])
+            else:
+                self.__httptunnel = False
+        elif self.__proxy[0] == None:
+            _orgsocket.connect(self, (destpair[0], destpair[1]))
+        else:
+            raise GeneralProxyError((4, _generalerrors[4]))
diff --git a/utils/frozen_chromite/third_party/uritemplate/__init__.py b/utils/frozen_chromite/third_party/uritemplate/__init__.py
new file mode 100644
index 0000000..0e7f415
--- /dev/null
+++ b/utils/frozen_chromite/third_party/uritemplate/__init__.py
@@ -0,0 +1,265 @@
+#!/usr/bin/env python
+
+"""
+URI Template (RFC6570) Processor
+"""
+
+__copyright__ = """\
+Copyright 2011-2013 Joe Gregorio
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import re
+try:
+   from urllib.parse import quote
+except ImportError:
+   from urllib import quote
+
+
+
+__version__ = "0.6"
+
+RESERVED = ":/?#[]@!$&'()*+,;="
+OPERATOR = "+#./;?&|!@"
+MODIFIER = ":^"
+TEMPLATE = re.compile(r"{([^\}]+)}")
+
+
+def variables(template):
+    '''Returns the set of keywords in a uri template'''
+    vars = set()
+    for varlist in TEMPLATE.findall(template):
+        if varlist[0] in OPERATOR:
+            varlist = varlist[1:]
+        varspecs = varlist.split(',')
+        for var in varspecs:
+            # handle prefix values
+            var = var.split(':')[0]
+            # handle composite values
+            if var.endswith('*'):
+                var = var[:-1]
+            vars.add(var)
+    return vars
+
+
+def _quote(value, safe, prefix=None):
+    if prefix is not None:
+        return quote(str(value)[:prefix], safe)
+    return quote(str(value), safe)
+
+
+def _tostring(varname, value, explode, prefix, operator, safe=""):
+    if isinstance(value, list):
+        return ",".join([_quote(x, safe) for x in value])
+    if isinstance(value, dict):
+        keys = sorted(value.keys())
+        if explode:
+            return ",".join([_quote(key, safe) + "=" + \
+                             _quote(value[key], safe) for key in keys])
+        else:
+            return ",".join([_quote(key, safe) + "," + \
+                             _quote(value[key], safe) for key in keys])
+    elif value is None:
+        return
+    else:
+        return _quote(value, safe, prefix)
+
+
+def _tostring_path(varname, value, explode, prefix, operator, safe=""):
+    joiner = operator
+    if isinstance(value, list):
+        if explode:
+            out = [_quote(x, safe) for x in value if value is not None]
+        else:
+            joiner = ","
+            out = [_quote(x, safe) for x in value if value is not None]
+        if out:
+            return joiner.join(out)
+        else:
+            return
+    elif isinstance(value, dict):
+        keys = sorted(value.keys())
+        if explode:
+            out = [_quote(key, safe) + "=" + \
+                   _quote(value[key], safe) for key in keys \
+                   if value[key] is not None]
+        else:
+            joiner = ","
+            out = [_quote(key, safe) + "," + \
+                   _quote(value[key], safe) \
+                   for key in keys if value[key] is not None]
+        if out:
+            return joiner.join(out)
+        else:
+            return
+    elif value is None:
+        return
+    else:
+        return _quote(value, safe, prefix)
+
+
+def _tostring_semi(varname, value, explode, prefix, operator, safe=""):
+    joiner = operator
+    if operator == "?":
+        joiner = "&"
+    if isinstance(value, list):
+        if explode:
+            out = [varname + "=" + _quote(x, safe) \
+                   for x in value if x is not None]
+            if out:
+                return joiner.join(out)
+            else:
+                return
+        else:
+            return varname + "=" + ",".join([_quote(x, safe) \
+                                             for x in value])
+    elif isinstance(value, dict):
+        keys = sorted(value.keys())
+        if explode:
+            return joiner.join([_quote(key, safe) + "=" + \
+                                _quote(value[key], safe) \
+                                for key in keys if key is not None])
+        else:
+            return varname + "=" + ",".join([_quote(key, safe) + "," + \
+                             _quote(value[key], safe) for key in keys \
+                             if key is not None])
+    else:
+        if value is None:
+            return
+        elif value:
+            return (varname + "=" + _quote(value, safe, prefix))
+        else:
+            return varname
+
+
+def _tostring_query(varname, value, explode, prefix, operator, safe=""):
+    joiner = operator
+    if operator in ["?", "&"]:
+        joiner = "&"
+    if isinstance(value, list):
+        if 0 == len(value):
+            return None
+        if explode:
+            return joiner.join([varname + "=" + _quote(x, safe) \
+                                for x in value])
+        else:
+            return (varname + "=" + ",".join([_quote(x, safe) \
+                                             for x in value]))
+    elif isinstance(value, dict):
+        if 0 == len(value):
+            return None
+        keys = sorted(value.keys())
+        if explode:
+            return joiner.join([_quote(key, safe) + "=" + \
+                                _quote(value[key], safe) \
+                                for key in keys])
+        else:
+            return varname + "=" + \
+                   ",".join([_quote(key, safe) + "," + \
+                             _quote(value[key], safe) for key in keys])
+    else:
+        if value is None:
+            return
+        elif value:
+            return (varname + "=" + _quote(value, safe, prefix))
+        else:
+            return (varname  + "=")
+
+
+TOSTRING = {
+    "" : _tostring,
+    "+": _tostring,
+    "#": _tostring,
+    ";": _tostring_semi,
+    "?": _tostring_query,
+    "&": _tostring_query,
+    "/": _tostring_path,
+    ".": _tostring_path,
+    }
+
+
+def expand(template, variables):
+    """
+    Expand template as a URI Template using variables.
+    """
+    def _sub(match):
+        expression = match.group(1)
+        operator = ""
+        if expression[0] in OPERATOR:
+            operator = expression[0]
+            varlist = expression[1:]
+        else:
+            varlist = expression
+
+        safe = ""
+        if operator in ["+", "#"]:
+            safe = RESERVED
+        varspecs = varlist.split(",")
+        varnames = []
+        defaults = {}
+        for varspec in varspecs:
+            default = None
+            explode = False
+            prefix = None
+            if "=" in varspec:
+                varname, default = tuple(varspec.split("=", 1))
+            else:
+                varname = varspec
+            if varname[-1] == "*":
+                explode = True
+                varname = varname[:-1]
+            elif ":" in varname:
+                try:
+                    prefix = int(varname[varname.index(":")+1:])
+                except ValueError:
+                    raise ValueError("non-integer prefix '{0}'".format(
+                       varname[varname.index(":")+1:]))
+                varname = varname[:varname.index(":")]
+            if default:
+                defaults[varname] = default
+            varnames.append((varname, explode, prefix))
+
+        retval = []
+        joiner = operator
+        start = operator
+        if operator == "+":
+            start = ""
+            joiner = ","
+        if operator == "#":
+            joiner = ","
+        if operator == "?":
+            joiner = "&"
+        if operator == "&":
+            start = "&"
+        if operator == "":
+            joiner = ","
+        for varname, explode, prefix in varnames:
+            if varname in variables:
+                value = variables[varname]
+                if not value and value != "" and varname in defaults:
+                    value = defaults[varname]
+            elif varname in defaults:
+                value = defaults[varname]
+            else:
+                continue
+            expanded = TOSTRING[operator](
+              varname, value, explode, prefix, operator, safe=safe)
+            if expanded is not None:
+                retval.append(expanded)
+        if len(retval) > 0:
+            return start + joiner.join(retval)
+        else:
+            return ""
+
+    return TEMPLATE.sub(_sub, template)
diff --git a/client/common_lib/perf_expectations/__init__.py b/utils/frozen_chromite/utils/__init__.py
similarity index 100%
copy from client/common_lib/perf_expectations/__init__.py
copy to utils/frozen_chromite/utils/__init__.py
diff --git a/utils/frozen_chromite/utils/attrs_freezer.py b/utils/frozen_chromite/utils/attrs_freezer.py
new file mode 100644
index 0000000..05fb960
--- /dev/null
+++ b/utils/frozen_chromite/utils/attrs_freezer.py
@@ -0,0 +1,73 @@
+# -*- coding: utf-8 -*-
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helpers for constructing objects with frozen attributes."""
+
+from __future__ import print_function
+
+import types
+
+import six
+
+
+class Error(Exception):
+  """Raised when frozen attribute value is modified."""
+
+
+class Class(type):
+  """Metaclass for any class to support freezing attribute values.
+
+  This metaclass can be used by any class to add the ability to
+  freeze attribute values with the Freeze method.
+
+  Use by adding this line before a class:
+    @six.add_metaclass(attrs_freezer.Class)
+  """
+  _FROZEN_ERR_MSG = 'Attribute values are frozen, cannot alter %s.'
+
+  def __new__(cls, clsname, bases, scope):
+    # Create Freeze method that freezes current attributes.
+    if 'Freeze' in scope:
+      raise TypeError('Class %s has its own Freeze method, cannot use with'
+                      ' the attrs_freezer.Class metaclass.' % clsname)
+
+    # Make sure cls will have _FROZEN_ERR_MSG set.
+    scope.setdefault('_FROZEN_ERR_MSG', cls._FROZEN_ERR_MSG)
+
+    # Create the class.
+    # pylint: disable=bad-super-call
+    newcls = super(Class, cls).__new__(cls, clsname, bases, scope)
+
+    # Replace cls.__setattr__ with the one that honors freezing.
+    orig_setattr = newcls.__setattr__
+
+    def SetAttr(obj, name, value):
+      """If the object is frozen then abort."""
+      # pylint: disable=protected-access
+      if getattr(obj, '_frozen', False):
+        raise Error(obj._FROZEN_ERR_MSG % name)
+      if isinstance(orig_setattr, types.MethodType):
+        orig_setattr(obj, name, value)
+      else:
+        super(newcls, obj).__setattr__(name, value)
+    newcls.__setattr__ = SetAttr
+
+    # Add new newcls.Freeze method.
+    def Freeze(obj):
+      # pylint: disable=protected-access
+      obj._frozen = True
+    newcls.Freeze = Freeze
+
+    return newcls
+
+
+@six.add_metaclass(Class)
+class Mixin(object):
+  """Alternate mechanism for freezing attributes in a class.
+
+  If an existing class is not a new-style class then it will be unable to
+  use the attrs_freezer.Class metaclass directly.  Simply use this class
+  as a mixin instead to accomplish the same thing.
+  """
diff --git a/utils/frozen_chromite/utils/key_value_store.py b/utils/frozen_chromite/utils/key_value_store.py
new file mode 100644
index 0000000..0c0e8bf
--- /dev/null
+++ b/utils/frozen_chromite/utils/key_value_store.py
@@ -0,0 +1,105 @@
+# -*- coding: utf-8 -*-
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Common python commands used by various build scripts."""
+
+from __future__ import print_function
+
+import contextlib
+import errno
+
+import six
+
+
+@contextlib.contextmanager
+def _Open(obj, mode='r'):
+  """Convenience ctx that accepts a file path or an already open file object."""
+  if isinstance(obj, six.string_types):
+    with open(obj, mode=mode) as f:
+      yield f
+  else:
+    yield obj
+
+
+def LoadData(data, multiline=False, source='<data>'):
+  """Turn key=value content into a dict
+
+  Note: If you're designing a new data store, please use json rather than
+  this format.  This func is designed to work with legacy/external files
+  where json isn't an option.
+
+  Only UTF-8 content is supported currently.
+
+  Args:
+    data: The data to parse.
+    multiline: Allow a value enclosed by quotes to span multiple lines.
+    source: Helpful string for users to diagnose source of errors.
+
+  Returns:
+    a dict of all the key=value pairs found in the file.
+  """
+  d = {}
+
+  key = None
+  in_quotes = None
+  for raw_line in data.splitlines(True):
+    line = raw_line.split('#')[0]
+    if not line.strip():
+      continue
+
+    # Continue processing a multiline value.
+    if multiline and in_quotes and key:
+      if line.rstrip()[-1] == in_quotes:
+        # Wrap up the multiline value if the line ends with a quote.
+        d[key] += line.rstrip()[:-1]
+        in_quotes = None
+      else:
+        d[key] += line
+      continue
+
+    chunks = line.split('=', 1)
+    if len(chunks) != 2:
+      raise ValueError('Malformed key=value file %r; line %r'
+                       % (source, raw_line))
+    key = chunks[0].strip()
+    val = chunks[1].strip()
+    if len(val) >= 2 and val[0] in '"\'' and val[0] == val[-1]:
+      # Strip matching quotes on the same line.
+      val = val[1:-1]
+    elif val and multiline and val[0] in '"\'':
+      # Unmatched quote here indicates a multiline value. Do not
+      # strip the '\n' at the end of the line.
+      in_quotes = val[0]
+      val = chunks[1].lstrip()[1:]
+    d[key] = val
+
+  return d
+
+
+def LoadFile(obj, ignore_missing=False, multiline=False):
+  """Turn a key=value file into a dict
+
+  Note: If you're designing a new data store, please use json rather than
+  this format.  This func is designed to work with legacy/external files
+  where json isn't an option.
+
+  Only UTF-8 content is supported currently.
+
+  Args:
+    obj: The file to read.  Can be a path or an open file object.
+    ignore_missing: If the file does not exist, return an empty dict.
+    multiline: Allow a value enclosed by quotes to span multiple lines.
+
+  Returns:
+    a dict of all the key=value pairs found in the file.
+  """
+  try:
+    with _Open(obj) as f:
+      return LoadData(f.read(), multiline=multiline, source=obj)
+  except EnvironmentError as e:
+    if not (ignore_missing and e.errno == errno.ENOENT):
+      raise
+
+  return {}
diff --git a/utils/frozen_chromite/utils/memoize.py b/utils/frozen_chromite/utils/memoize.py
new file mode 100644
index 0000000..79d61f8
--- /dev/null
+++ b/utils/frozen_chromite/utils/memoize.py
@@ -0,0 +1,107 @@
+# -*- coding: utf-8 -*-
+# Copyright 2018 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Functions for automatic caching of expensive function calls."""
+
+from __future__ import print_function
+
+import functools
+import sys
+
+import six
+
+
+def MemoizedSingleCall(functor):
+  """Decorator for simple functor targets, caching the results
+
+  The functor must accept no arguments beyond either a class or self (depending
+  on if this is used in a classmethod/instancemethod context).  Results of the
+  wrapped method will be written to the class/instance namespace in a specially
+  named cached value.  All future invocations will just reuse that value.
+
+  Note that this cache is per-process, so sibling and parent processes won't
+  notice updates to the cache.
+  """
+  # TODO(build): Should we rebase to snakeoil.klass.cached* functionality?
+  # pylint: disable=protected-access
+  @functools.wraps(functor)
+  def wrapper(obj):
+    key = wrapper._cache_key
+    val = getattr(obj, key, None)
+    if val is None:
+      val = functor(obj)
+      setattr(obj, key, val)
+    return val
+
+  # Use name mangling to store the cached value in a (hopefully) unique place.
+  wrapper._cache_key = '_%s_cached' % (functor.__name__.lstrip('_'),)
+  return wrapper
+
+
+def Memoize(f):
+  """Decorator for memoizing a function.
+
+  Caches all calls to the function using a ._memo_cache dict mapping (args,
+  kwargs) to the results of the first function call with those args and kwargs.
+
+  If any of args or kwargs are not hashable, trying to store them in a dict will
+  cause a ValueError.
+
+  Note that this cache is per-process, so sibling and parent processes won't
+  notice updates to the cache.
+  """
+  # pylint: disable=protected-access
+  f._memo_cache = {}
+
+  @functools.wraps(f)
+  def wrapper(*args, **kwargs):
+    # Make sure that the key is hashable... as long as the contents of args and
+    # kwargs are hashable.
+    # TODO(phobbs) we could add an option to use the id(...) of an object if
+    # it's not hashable.  Then "MemoizedSingleCall" would be obsolete.
+    key = (tuple(args), tuple(sorted(kwargs.items())))
+    if key in f._memo_cache:
+      return f._memo_cache[key]
+
+    result = f(*args, **kwargs)
+    f._memo_cache[key] = result
+    return result
+
+  return wrapper
+
+
+def SafeRun(functors, combine_exceptions=False):
+  """Executes a list of functors, continuing on exceptions.
+
+  Args:
+    functors: An iterable of functors to call.
+    combine_exceptions: If set, and multiple exceptions are encountered,
+      SafeRun will raise a RuntimeError containing a list of all the exceptions.
+      If only one exception is encountered, then the default behavior of
+      re-raising the original exception with unmodified stack trace will be
+      kept.
+
+  Raises:
+    The first exception encountered, with corresponding backtrace, unless
+    |combine_exceptions| is specified and there is more than one exception
+    encountered, in which case a RuntimeError containing a list of all the
+    exceptions that were encountered is raised.
+  """
+  errors = []
+
+  for f in functors:
+    try:
+      f()
+    except Exception as e:
+      # Append the exception object and the traceback.
+      errors.append((e, sys.exc_info()[2]))
+
+  if errors:
+    if len(errors) == 1 or not combine_exceptions:
+      # To preserve the traceback.
+      inst, tb = errors[0]
+      six.reraise(inst, None, tb)
+    else:
+      raise RuntimeError([e[0] for e in errors])
diff --git a/utils/frozen_chromite/utils/outcap.py b/utils/frozen_chromite/utils/outcap.py
new file mode 100644
index 0000000..0fdea3f
--- /dev/null
+++ b/utils/frozen_chromite/utils/outcap.py
@@ -0,0 +1,229 @@
+# -*- coding: utf-8 -*-
+# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tools for capturing program output at a low level.
+
+Mostly useful for capturing stdout/stderr as directly assigning to those
+variables won't work everywhere.
+"""
+
+from __future__ import print_function
+
+import os
+import re
+import sys
+import tempfile
+
+
+class _FdCapturer(object):
+  """Helper class to capture output at the file descriptor level.
+
+  This is meant to be used with sys.stdout or sys.stderr. By capturing
+  file descriptors, this will also intercept subprocess output, which
+  reassigning sys.stdout or sys.stderr will not do.
+
+  Output will only be captured, it will no longer be printed while
+  the capturer is active.
+  """
+
+  def __init__(self, source, output=None):
+    """Construct the _FdCapturer object.
+
+    Does not start capturing until Start() is called.
+
+    Args:
+      source: A file object to capture. Typically sys.stdout or
+        sys.stderr, but will work with anything that implements flush()
+        and fileno().
+      output: A file name where the captured output is to be stored. If None,
+        then the output will be stored to a temporary file.
+    """
+    self._source = source
+    self._captured = ''
+    self._saved_fd = None
+    self._tempfile = None
+    self._capturefile = None
+    self._capturefile_reader = None
+    self._capturefile_name = output
+
+  def _SafeCreateTempfile(self, tempfile_obj):
+    """Ensure that the tempfile is created safely.
+
+    (1) Stash away a reference to the tempfile.
+    (2) Unlink the file from the filesystem.
+
+    (2) ensures that if we crash, the file gets deleted. (1) ensures that while
+    we are running, we hold a reference to the file so the system does not close
+    the file.
+
+    Args:
+      tempfile_obj: A tempfile object.
+    """
+    self._tempfile = tempfile_obj
+    os.unlink(tempfile_obj.name)
+
+  def Start(self):
+    """Begin capturing output."""
+    if self._capturefile_name is None:
+      tempfile_obj = tempfile.NamedTemporaryFile(delete=False)
+      self._capturefile = tempfile_obj.file
+      self._capturefile_name = tempfile_obj.name
+      self._capturefile_reader = open(self._capturefile_name)
+      self._SafeCreateTempfile(tempfile_obj)
+    else:
+      # Open file passed in for writing. Set buffering=1 for line level
+      # buffering.
+      self._capturefile = open(self._capturefile_name, 'w', buffering=1)
+      self._capturefile_reader = open(self._capturefile_name)
+    # Save the original fd so we can revert in Stop().
+    self._saved_fd = os.dup(self._source.fileno())
+    os.dup2(self._capturefile.fileno(), self._source.fileno())
+
+  def Stop(self):
+    """Stop capturing output."""
+    self.GetCaptured()
+    if self._saved_fd is not None:
+      os.dup2(self._saved_fd, self._source.fileno())
+      os.close(self._saved_fd)
+      self._saved_fd = None
+    # If capturefile and capturefile_reader exist, close them as they were
+    # opened in self.Start().
+    if self._capturefile_reader is not None:
+      self._capturefile_reader.close()
+      self._capturefile_reader = None
+    if self._capturefile is not None:
+      self._capturefile.close()
+      self._capturefile = None
+
+  def GetCaptured(self):
+    """Return all output captured up to this point.
+
+    Can be used while capturing or after Stop() has been called.
+    """
+    self._source.flush()
+    if self._capturefile_reader is not None:
+      self._captured += self._capturefile_reader.read()
+    return self._captured
+
+  def ClearCaptured(self):
+    """Erase all captured output."""
+    self.GetCaptured()
+    self._captured = ''
+
+
+class OutputCapturer(object):
+  """Class for capturing stdout/stderr output.
+
+  Class is designed as a 'ContextManager'.
+
+  Examples:
+    with cros_build_lib.OutputCapturer() as output:
+      # Capturing of stdout/stderr automatically starts now.
+      # Do stuff that sends output to stdout/stderr.
+      # Capturing automatically stops at end of 'with' block.
+
+    # stdout/stderr can be retrieved from the OutputCapturer object:
+    stdout = output.GetStdoutLines() # Or other access methods
+
+    # Some Assert methods are only valid if capturing was used in test.
+    self.AssertOutputContainsError() # Or other related methods
+
+    # OutputCapturer can also be used to capture output to specified files.
+    with self.OutputCapturer(stdout_path='/tmp/stdout.txt') as output:
+      # Do stuff.
+      # stdout will be captured to /tmp/stdout.txt.
+  """
+
+  OPER_MSG_SPLIT_RE = re.compile(r'^\033\[1;.*?\033\[0m$|^[^\n]*$',
+                                 re.DOTALL | re.MULTILINE)
+
+  __slots__ = ['_stdout_capturer', '_stderr_capturer', '_quiet_fail']
+
+  def __init__(self, stdout_path=None, stderr_path=None, quiet_fail=False):
+    """Initalize OutputCapturer with capture files.
+
+    If OutputCapturer is initialized with filenames to capture stdout and stderr
+    to, then those files are used. Otherwise, temporary files are created.
+
+    Args:
+      stdout_path: File to capture stdout to. If None, a temporary file is used.
+      stderr_path: File to capture stderr to. If None, a temporary file is used.
+      quiet_fail: If True fail quietly without printing the captured stdout and
+        stderr.
+    """
+    self._stdout_capturer = _FdCapturer(sys.stdout, output=stdout_path)
+    self._stderr_capturer = _FdCapturer(sys.stderr, output=stderr_path)
+    self._quiet_fail = quiet_fail
+
+  def __enter__(self):
+    # This method is called with entering 'with' block.
+    self.StartCapturing()
+    return self
+
+  def __exit__(self, exc_type, exc_val, exc_tb):
+    # This method is called when exiting 'with' block.
+    self.StopCapturing()
+
+    if exc_type and not self._quiet_fail:
+      print('Exception during output capturing: %r' % (exc_val,))
+      stdout = self.GetStdout()
+      if stdout:
+        print('Captured stdout was:\n%s' % stdout)
+      else:
+        print('No captured stdout')
+      stderr = self.GetStderr()
+      if stderr:
+        print('Captured stderr was:\n%s' % stderr)
+      else:
+        print('No captured stderr')
+
+  def StartCapturing(self):
+    """Begin capturing stdout and stderr."""
+    self._stdout_capturer.Start()
+    self._stderr_capturer.Start()
+
+  def StopCapturing(self):
+    """Stop capturing stdout and stderr."""
+    self._stdout_capturer.Stop()
+    self._stderr_capturer.Stop()
+
+  def ClearCaptured(self):
+    """Clear any captured stdout/stderr content."""
+    self._stdout_capturer.ClearCaptured()
+    self._stderr_capturer.ClearCaptured()
+
+  def GetStdout(self):
+    """Return captured stdout so far."""
+    return self._stdout_capturer.GetCaptured()
+
+  def GetStderr(self):
+    """Return captured stderr so far."""
+    return self._stderr_capturer.GetCaptured()
+
+  def _GetOutputLines(self, output, include_empties):
+    """Split |output| into lines, optionally |include_empties|.
+
+    Return array of lines.
+    """
+
+    lines = self.OPER_MSG_SPLIT_RE.findall(output)
+    if not include_empties:
+      lines = [ln for ln in lines if ln]
+
+    return lines
+
+  def GetStdoutLines(self, include_empties=True):
+    """Return captured stdout so far as array of lines.
+
+    If |include_empties| is false filter out all empty lines.
+    """
+    return self._GetOutputLines(self.GetStdout(), include_empties)
+
+  def GetStderrLines(self, include_empties=True):
+    """Return captured stderr so far as array of lines.
+
+    If |include_empties| is false filter out all empty lines.
+    """
+    return self._GetOutputLines(self.GetStderr(), include_empties)
diff --git a/utils/generate_metadata.py b/utils/generate_metadata.py
new file mode 100644
index 0000000..826fdb8
--- /dev/null
+++ b/utils/generate_metadata.py
@@ -0,0 +1,143 @@
+#!/usr/bin/python3
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Generate metadata for build from Autotest ctrl files."""
+
+import argparse
+import os
+import six
+import sys
+
+# If running in Autotest dir, keep this.
+os.environ["PY_VERSION"] = '3'
+
+import common
+
+# NOTE: this MUST be run in Python3, if we get configured back to PY2, exit.
+if six.PY2:
+    exit(1)
+
+from autotest_lib.server.cros.dynamic_suite import control_file_getter
+from autotest_lib.client.common_lib import control_data
+
+from chromiumos.test.api import test_case_metadata_pb2 as tc_metadata_pb
+from chromiumos.test.api import test_harness_pb2 as th_pb
+from chromiumos.test.api import test_case_pb2 as tc_pb
+
+HARNESS = th_pb.TestHarness.Tauto()
+
+
+def parse_local_arguments(args):
+    """Parse the CLI."""
+    parser = argparse.ArgumentParser(
+            description="Prep Autotest, Tast, & Services for DockerBuild.")
+    parser.add_argument('-autotest_path',
+                        dest='autotest_path',
+                        default='../../../../third_party/autotest/files/',
+                        help='path to autotest/files relative to this script.')
+    parser.add_argument('-output_file',
+                        dest='output_file',
+                        default=None,
+                        help='Where to write the serialized pb.')
+    return parser.parse_args(args)
+
+
+def read_file(filename):
+    """Read the given file."""
+    with open(filename, 'r') as f:
+        return f.read()
+
+
+def all_control_files(args):
+    """Return all control files as control file objs."""
+    subpaths = ['server/site_tests', 'client/site_tests']
+    start_cwd = os.getcwd()
+    try:
+        os.chdir(args.autotest_path)
+
+        # Might not be needed, but this resolves out the ../
+        autotest_path = os.getcwd()
+
+        directories = [os.path.join(autotest_path, p) for p in subpaths]
+        f = control_file_getter.FileSystemGetter(directories)
+    except Exception as e:
+        raise Exception("Failed to find control files at path %s",
+                        args.autotest_path)
+
+    finally:
+        os.chdir(start_cwd)
+    return f._get_control_file_list()
+
+
+def serialize_test_case_info(data):
+    """Return a serialized TestCaseInfo obj."""
+    serialized_contacts = tc_metadata_pb.Contact(email=data.author)
+    return tc_metadata_pb.TestCaseInfo(owners=[serialized_contacts])
+
+
+def serialize_tags(data):
+    """Return a serialized tags obj (list)."""
+    serialized_tags = []
+    for value in data.dependencies:
+        serialized_tags.append(tc_pb.TestCase.Tag(value=value))
+    for value in data.attributes:
+        serialized_tags.append(tc_pb.TestCase.Tag(value=value))
+    if data.test_class:
+        serialized_tags.append(
+                tc_pb.TestCase.Tag(
+                        value="test_class:{}".format(data.test_class)))
+    return serialized_tags
+
+
+def serialize_test_case(data):
+    """Return a serialized api.TestCase obj."""
+    serialized_testcase_id = tc_pb.TestCase.Id(value="tauto." + data.name)
+    tags = serialize_tags(data)
+    return tc_pb.TestCase(id=serialized_testcase_id, name=data.name, tags=tags)
+
+
+def serialized_test_case_exec(data):
+    """Return a serialized TestCaseExec obj."""
+    serialized_test_harness = th_pb.TestHarness(tauto=HARNESS)
+    return tc_metadata_pb.TestCaseExec(test_harness=serialized_test_harness)
+
+
+def serialized_test_case_metadata(data):
+    """Return a TestCaseMetadata obj from a given control file."""
+    serialized_meta_data = tc_metadata_pb.TestCaseMetadata(
+            test_case_exec=serialized_test_case_exec(data),
+            test_case=serialize_test_case(data),
+            test_case_info=serialize_test_case_info(data))
+    return serialized_meta_data
+
+
+def serialized_test_case_metadata_list(data):
+    """Return a TestCaseMetadataList obj from a list of TestCaseMetadata pb."""
+    serialized_meta_data_list = tc_metadata_pb.TestCaseMetadataList(
+            values=data)
+    return serialized_meta_data_list
+
+
+def main():
+    """Generate the metadata, and if an output path is given, save it."""
+    args = parse_local_arguments(sys.argv[1:])
+    ctrlfiles = all_control_files(args)
+    serialized_metadata = []
+    for file_path in ctrlfiles:
+        text = read_file(file_path)
+        path = ctrlfiles[1]
+
+        test = control_data.parse_control_string(text,
+                                                 raise_warnings=True,
+                                                 path=path)
+        serialized_metadata.append(serialized_test_case_metadata(test))
+
+    serialized = serialized_test_case_metadata_list(serialized_metadata)
+    if args.output_file:
+        with open(args.output_file, 'wb') as wf:
+            wf.write(serialized.SerializeToString())
+
+
+if __name__ == '__main__':
+    main()
diff --git a/utils/generate_metadata_unittest.py b/utils/generate_metadata_unittest.py
new file mode 100644
index 0000000..58d5f6c
--- /dev/null
+++ b/utils/generate_metadata_unittest.py
@@ -0,0 +1,160 @@
+#!/usr/bin/python3
+
+# Copyright (c) 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import six
+import unittest
+import tempfile
+import shutil
+
+os.environ["PY_VERSION"] = '3'
+
+import common
+
+# These tests are strictly not supported in python2.
+if six.PY2:
+    exit(0)
+
+from autotest_lib.client.common_lib import control_data
+from autotest_lib.utils import generate_metadata
+
+CONTROL_DATA1 = """
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'an author with email@google.com'
+NAME = 'fake_test1'
+PURPOSE = 'A fake test.'
+ATTRIBUTES = 'suite:fake_suite1, suite:fake_suite2'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'audio'
+TEST_TYPE = 'client'
+DEPENDENCIES = 'fakedep1'
+
+DOC = '''
+a doc
+'''
+
+job.run_test('fake_test1')
+
+"""
+
+CONTROL_DATA2 = """
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'an author with email@google.com'
+NAME = 'fake_test2'
+PURPOSE = 'A fake test.'
+ATTRIBUTES = 'suite:fake_suite1, suite:fake_suite2'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'audio'
+TEST_TYPE = 'client'
+DEPENDENCIES = 'fakedep2'
+
+DOC = '''
+a doc
+'''
+
+job.run_test('fake_test2')
+
+"""
+
+
+class Namespace:
+    """Stub for mocking args."""
+
+    def __init__(self, **kwargs):
+        self.__dict__.update(kwargs)
+
+
+class MetadataTest(unittest.TestCase):
+    """Test generate_metadata."""
+
+    def setUp(self):
+        """Build up a tmp directory to host control files."""
+        self.tmp_dir = tempfile.mkdtemp()
+        os.makedirs(os.path.join(self.tmp_dir, 'server/site_tests'))
+        os.makedirs(os.path.join(self.tmp_dir, 'client/site_tests'))
+        self.path1 = os.path.join(self.tmp_dir, 'server/site_tests',
+                                  'control.1')
+        self.path2 = os.path.join(self.tmp_dir, 'client/site_tests',
+                                  'control.2')
+        self.test1 = control_data.parse_control_string(CONTROL_DATA1,
+                                                       raise_warnings=True,
+                                                       path=self.path1)
+        self.test2 = control_data.parse_control_string(CONTROL_DATA2,
+                                                       raise_warnings=True,
+                                                       path=self.path2)
+
+    def tearDown(self):
+        """Delete the tmp directory."""
+        shutil.rmtree(self.tmp_dir)
+
+    def test_args(self):
+        """Test CLI."""
+        parsed = generate_metadata.parse_local_arguments(
+                ['-autotest_path', '/tauto/path', '-output_file', 'testout'])
+        self.assertEqual(parsed.autotest_path, '/tauto/path')
+        self.assertEqual(parsed.output_file, 'testout')
+
+    def test_all_control_files(self):
+        """Test all_control_files finds all ctrl files in the expected dirs."""
+        with open(self.path1, 'w') as wf:
+            wf.write(CONTROL_DATA1)
+        with open(self.path2, 'w') as wf:
+            wf.write(CONTROL_DATA2)
+
+        files = generate_metadata.all_control_files(
+                Namespace(autotest_path=self.tmp_dir))
+
+        # Verify the files are found.
+        self.assertEqual(set(files), set([self.path1, self.path2]))
+
+    def test_serialization(self):
+        """Test a single control file gets properly serialized."""
+        meta_data = generate_metadata.serialized_test_case_metadata(self.test1)
+        self.assertEqual(meta_data.test_case.id.value, 'tauto.fake_test1')
+        self.assertEqual(meta_data.test_case.name, 'fake_test1')
+        # verify tags
+        expected_tags = set([
+                'fakedep1', 'test_class:audio', 'suite:fake_suite1',
+                'suite:fake_suite2'
+        ])
+        actual_tags = set([item.value for item in meta_data.test_case.tags])
+        self.assertEqual(expected_tags, actual_tags)
+        # verify harness. This is a bit of a hack but works and keeps import
+        # hacking down.
+        self.assertIn('tauto', str(meta_data.test_case_exec.test_harness))
+        # verify owners
+        expected_owners = set(
+                [item.email for item in meta_data.test_case_info.owners])
+        self.assertEqual(expected_owners,
+                         set(['an author with email@google.com']))
+
+    def test_serialized_test_case_metadata_list(self):
+        """Test all control file get properly serialized."""
+        serialized_list = generate_metadata.serialized_test_case_metadata_list(
+                [
+                        generate_metadata.serialized_test_case_metadata(
+                                self.test1),
+                        generate_metadata.serialized_test_case_metadata(
+                                self.test2)
+                ])
+        names = set([item.test_case.name for item in serialized_list.values])
+        self.assertEqual(set(['fake_test1', 'fake_test2']), names)
+
+
+if __name__ == '__main__':
+    if six.PY2:
+        print('cannot run in py2')
+        exit(0)
+    else:
+        unittest.main()
diff --git a/utils/gslib_unittest.py b/utils/gslib_unittest.py
index 8316e32..4acca30 100755
--- a/utils/gslib_unittest.py
+++ b/utils/gslib_unittest.py
@@ -1,5 +1,5 @@
-#!/usr/bin/python2.7
-#
+#!/usr/bin/python3
+
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
diff --git a/utils/install_docker_chroot.sh b/utils/install_docker_chroot.sh
new file mode 100755
index 0000000..47d0db8
--- /dev/null
+++ b/utils/install_docker_chroot.sh
@@ -0,0 +1,51 @@
+#!/bin/bash
+
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script install pip2, pip3, docker library for python2 and python3.
+
+
+function install_docker_library {
+    mkdir /tmp/docker_library_bootstrap
+
+    # Install pip2
+    wget -O /tmp/docker_library_bootstrap/get_pip2.py https://bootstrap.pypa.io/pip/2.7/get-pip.py
+    python2 /tmp/docker_library_bootstrap/get_pip2.py
+
+    # Install pip3
+    wget -O /tmp/docker_library_bootstrap/get_pip3.py https://bootstrap.pypa.io/pip/3.6/get-pip.py
+    python3 /tmp/docker_library_bootstrap/get_pip3.py
+
+    # Install Docker Python SDK
+   pip2 install docker==4.4.4 --upgrade
+   pip3 install docker==4.4.4 --upgrade
+
+    # Cleaning up
+    rm -rf /tmp/docker_library_bootstrap
+}
+
+cat << EOF
+###############################################################################
+IMPORTANT: Please read below information
+###############################################################################
+The script will install the following into your system:
+    - pip2
+    - pip3
+    - python2/3 Docker SDK
+Please run the script using sudo within chroot or container as this might
+permanently changed your environment.
+
+DO NOT RUN THIS ON YOUR WORKSTATION.
+###############################################################################
+EOF
+
+while true; do
+    read -p "Do you wish to proceed? [y/N]: " yn
+    case "$yn" in
+        [Yy]* ) install_docker_library; break;;
+        [Nn]* ) exit;;
+        * ) echo "Please answer yes or no.";;
+    esac
+done
\ No newline at end of file
diff --git a/utils/labellib.py b/utils/labellib.py
index 0adec5c..01292cc 100644
--- a/utils/labellib.py
+++ b/utils/labellib.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
@@ -15,8 +16,12 @@
 strings, which are common keyval label values.
 """
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
 import collections
 import re
+import six
 
 
 class Key(object):
@@ -86,7 +91,7 @@
         """Return labels as a list of strings."""
         str_labels = self._plain_labels[:]
         keyval_labels = (KeyvalLabel(key, value)
-                         for key, value in self.iteritems())
+                         for key, value in six.iteritems(self))
         str_labels.extend(format_keyval_label(label)
                           for label in keyval_labels)
         return str_labels
diff --git a/utils/labellib_unittest.py b/utils/labellib_unittest.py
index 8caedb4..f5be52c 100755
--- a/utils/labellib_unittest.py
+++ b/utils/labellib_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.7
+#!/usr/bin/python3
 #
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
diff --git a/utils/modelviz/common.py b/utils/modelviz/common.py
deleted file mode 100644
index 41607e1..0000000
--- a/utils/modelviz/common.py
+++ /dev/null
@@ -1,8 +0,0 @@
-import os, sys
-dirname = os.path.dirname(sys.modules[__name__].__file__)
-autotest_dir = os.path.abspath(os.path.join(dirname, "..", ".."))
-client_dir = os.path.join(autotest_dir, "client")
-sys.path.insert(0, client_dir)
-import setup_modules
-sys.path.pop(0)
-setup_modules.setup(base_path=autotest_dir, root_module_name="autotest_lib")
diff --git a/utils/modelviz/generate_schema_diagrams.py b/utils/modelviz/generate_schema_diagrams.py
deleted file mode 100755
index c770580..0000000
--- a/utils/modelviz/generate_schema_diagrams.py
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/usr/bin/env python2
-
-"""
-Generates schema diagrams for Django apps.  Just run the script with no
-arguments.  If you don't have them installed, you'll need "dot" from the
-Graphviz package and Django.
-"""
-
-import common
-import os
-
-ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
-PROJECTS = (
-        ('frontend', 'tko'),
-        ('frontend', 'afe'),
-    )
-
-
-def main():
-    for project, app in PROJECTS:
-        settings = 'autotest_lib.%s.settings' % project
-        os.environ['DJANGO_SETTINGS_MODULE'] = settings
-
-        # import after setting DJANGO_SETTINGS_MODULE
-        from autotest_lib.contrib import modelviz
-
-        # hack to force reload of settings and app list
-        import django.conf
-        from django.db.models import loading
-        reload(django.conf)
-        reload(loading)
-
-        print 'Analyzing', project
-        dot_contents = modelviz.generate_dot([app])
-
-        dot_path = project + '.dot'
-        dotfile = open(dot_path, 'w')
-        dotfile.write(dot_contents)
-        dotfile.close()
-        print 'Wrote', dot_path
-
-        png_path = project + '.png'
-        os.system('dot -Tpng -o %s %s' % (png_path, dot_path))
-        print 'Generated', png_path
-        print
-
-        del os.environ['DJANGO_SETTINGS_MODULE']
-
-
-if __name__ == '__main__':
-    main()
diff --git a/utils/packager.py b/utils/packager.py
index 0fb0787..4e792d1 100755
--- a/utils/packager.py
+++ b/utils/packager.py
@@ -1,9 +1,12 @@
-#!/usr/bin/python2 -u
+#!/usr/bin/python3 -u
 
 """
 Utility to upload or remove the packages from the packages repository.
 """
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
 import logging, optparse, os, shutil, sys, tempfile
 import common
 from autotest_lib.client.common_lib import utils as client_utils
@@ -25,7 +28,7 @@
     For profilers we need to exclude everything except the __init__.py
     file so that the profilers can be imported.
     '''
-    exclude_string = ('--exclude="deps/*" --exclude="tests/*" '
+    exclude_string = ('--exclude="deps/*" --exclude="tests/*" --exclude=.git '
                       '--exclude="site_tests/*" --exclude="**.pyc"')
 
     # Get the profilers directory
@@ -98,7 +101,7 @@
         # --action=tar_only send in clients in the command line. Please
         # confirm the behaviour is expected before this type is enabled for
         # "tar_only" actions.
-        print ('Tar action not supported for pkg_type= %s, name = %s' %
+        print('Tar action not supported for pkg_type= %s, name = %s' %
                 pkg_type, name)
         return None
     # For all packages, the work-dir should have 'client' appended to it.
@@ -122,7 +125,7 @@
     exclude_string = ' .'
     names = [p.strip() for p in pkg_names.split(',')]
     for name in names:
-        print "process_packages: Processing %s ... " % name
+        print("process_packages: Processing %s ... " % name)
         if pkg_type == 'client':
             pkg_dir = src_dir
             exclude_string = get_exclude_string(pkg_dir)
@@ -164,7 +167,7 @@
             try:
                 try:
                     packages.check_diskspace(temp_dir)
-                except error.RepoDiskFullError, e:
+                except error.RepoDiskFullError as e:
                     msg = ("Temporary directory for packages %s does not have "
                            "enough space available: %s" % (temp_dir, e))
                     raise error.RepoDiskFullError(msg)
@@ -191,7 +194,7 @@
                 shutil.rmtree(temp_dir)
         elif action == ACTION_REMOVE:
             pkgmgr.remove_pkg(pkg_name, remove_checksum=True)
-        print "Done."
+        print("Done.")
 
 
 def tar_packages(pkgmgr, pkg_type, pkg_names, src_dir, temp_dir):
@@ -200,7 +203,7 @@
     exclude_string = ' .'
     names = [p.strip() for p in pkg_names.split(',')]
     for name in names:
-        print "tar_packages: Processing %s ... " % name
+        print("tar_packages: Processing %s ... " % name)
         if pkg_type == 'client':
             pkg_dir = src_dir
             exclude_string = get_exclude_string(pkg_dir)
@@ -221,9 +224,9 @@
             (pkg_name, pkg_name)) + exclude_string)
         # Check if tarball already exists. If it does, don't duplicate
         # the effort.
-        tarball_path = os.path.join(pkg_dir, pkg_name);
+        tarball_path = os.path.join(pkg_dir, pkg_name)
         if os.path.exists(tarball_path):
-          print("tar_packages: Tarball %s already exists" % tarball_path);
+          print("tar_packages: Tarball %s already exists" % tarball_path)
         else:
             tarball_path = pkgmgr.tar_package(pkg_name, pkg_dir,
                                               temp_dir, exclude_string_tar)
@@ -239,8 +242,8 @@
     temp_dir = tempfile.mkdtemp()
     try:
         packages.check_diskspace(temp_dir)
-    except error.RepoDiskFullError, e:
-        print ("Temp destination for packages is full %s, aborting upload: %s"
+    except error.RepoDiskFullError as e:
+        print("Temp destination for packages is full %s, aborting upload: %s"
                % (temp_dir, e))
         os.rmdir(temp_dir)
         sys.exit(1)
@@ -304,7 +307,7 @@
     elif name in names_site_test:
         src_dir = os.path.join(client_dir, 'site_tests')
     else:
-        print "Test %s not found" % name
+        print("Test %s not found" % name)
         sys.exit(0)
     return src_dir
 
diff --git a/utils/parallel.py b/utils/parallel.py
index 00b2bf9..6a03abf 100644
--- a/utils/parallel.py
+++ b/utils/parallel.py
@@ -1,4 +1,10 @@
+# Lint as: python2, python3
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
 import os, sys
+import six
+from six.moves import range
 
 
 class ParallelError(Exception):
@@ -31,9 +37,9 @@
                 functions[fn] = set()
 
         dependents = {}
-        for fn, deps in functions.iteritems():
+        for fn, deps in six.iteritems(functions):
             dependents[fn] = []
-        for fn, deps in functions.iteritems():
+        for fn, deps in six.iteritems(functions):
             for dep in deps:
                 dependents[dep].append(fn)
 
@@ -55,7 +61,7 @@
 
 
     def run_until_completion(self):
-        for fn, deps in self.functions.iteritems():
+        for fn, deps in six.iteritems(self.functions):
             if len(deps) == 0:
                 self.ready_to_run.append(fn)
 
@@ -63,7 +69,7 @@
         while len(self.pid_map) > 0 or len(self.ready_to_run) > 0:
             max_allowed = self.max_procs - len(self.pid_map)
             max_able = len(self.ready_to_run)
-            for i in xrange(min(max_allowed, max_able)):
+            for i in range(min(max_allowed, max_able)):
                 self._run(self.ready_to_run.pop())
 
             # Handle one proc that's finished.
diff --git a/utils/pylintrc b/utils/pylintrc
index 42aa8e0..ef11578 100644
--- a/utils/pylintrc
+++ b/utils/pylintrc
@@ -14,7 +14,7 @@
 # Profiled execution.
 #profile=no
 
-# Add <file or directory> to the black list. It should be a base name, not a
+# Add <file or directory> to the deny list. It should be a base name, not a
 # path. You may set this option multiple times.
 #ignore=CVS
 
diff --git a/utils/reindent.py b/utils/reindent.py
index 65749f0..ff1a9e8 100755
--- a/utils/reindent.py
+++ b/utils/reindent.py
@@ -39,12 +39,18 @@
 you'd prefer. You can always use the --nobackup option to prevent this.
 """
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 __version__ = "1"
 
 import tokenize
 import os, shutil
 import sys
 
+from six.moves import range
+
 verbose    = 0
 recurse    = 0
 dryrun     = 0
@@ -52,8 +58,8 @@
 
 def usage(msg=None):
     if msg is not None:
-        print >> sys.stderr, msg
-    print >> sys.stderr, __doc__
+        print(msg, file=sys.stderr)
+    print(__doc__, file=sys.stderr)
 
 def errprint(*args):
     sep = ""
@@ -68,7 +74,7 @@
     try:
         opts, args = getopt.getopt(sys.argv[1:], "drnvh",
                         ["dryrun", "recurse", "nobackup", "verbose", "help"])
-    except getopt.error, msg:
+    except getopt.error as msg:
         usage(msg)
         return
     for o, a in opts:
@@ -94,7 +100,7 @@
 def check(file):
     if os.path.isdir(file) and not os.path.islink(file):
         if verbose:
-            print "listing directory", file
+            print("listing directory", file)
         names = os.listdir(file)
         for name in names:
             fullname = os.path.join(file, name)
@@ -105,10 +111,10 @@
         return
 
     if verbose:
-        print "checking", file, "...",
+        print("checking", file, "...", end=' ')
     try:
         f = open(file)
-    except IOError, msg:
+    except IOError as msg:
         errprint("%s: I/O Error: %s" % (file, str(msg)))
         return
 
@@ -116,24 +122,24 @@
     f.close()
     if r.run():
         if verbose:
-            print "changed."
+            print("changed.")
             if dryrun:
-                print "But this is a dry run, so leaving it alone."
+                print("But this is a dry run, so leaving it alone.")
         if not dryrun:
             bak = file + ".bak"
             if makebackup:
                 shutil.copyfile(file, bak)
                 if verbose:
-                    print "backed up", file, "to", bak
+                    print("backed up", file, "to", bak)
             f = open(file, "w")
             r.write(f)
             f.close()
             if verbose:
-                print "wrote new", file
+                print("wrote new", file)
         return True
     else:
         if verbose:
-            print "unchanged."
+            print("unchanged.")
         return False
 
 def _rstrip(line, JUNK='\n \t'):
@@ -158,7 +164,7 @@
         # Raw file lines.
         self.raw = f.readlines()
 
-        # File lines, rstripped & tab-expanded.  Dummy at start is so
+        # File lines, rstripped & tab-expanded.  Stub at start is so
         # that we can use tokenize's 1-based line numbering easily.
         # Note that a line is all-blank iff it's "\n".
         self.lines = [_rstrip(line).expandtabs() + "\n"
@@ -203,7 +209,7 @@
                     want = have2want.get(have, -1)
                     if want < 0:
                         # Then it probably belongs to the next real stmt.
-                        for j in xrange(i+1, len(stats)-1):
+                        for j in range(i+1, len(stats)-1):
                             jline, jlevel = stats[j]
                             if jlevel >= 0:
                                 if have == getlspace(lines[jline]):
@@ -213,7 +219,7 @@
                                            # comment like this one,
                         # in which case we should shift it like its base
                         # line got shifted.
-                        for j in xrange(i-1, -1, -1):
+                        for j in range(i-1, -1, -1):
                             jline, jlevel = stats[j]
                             if jlevel >= 0:
                                 want = have + getlspace(after[jline-1]) - \
@@ -254,13 +260,14 @@
         return line
 
     # Line-eater for tokenize.
-    def tokeneater(self, type, token, (sline, scol), end, line,
+    def tokeneater(self, type, token, sline_scol, end, line,
                    INDENT=tokenize.INDENT,
                    DEDENT=tokenize.DEDENT,
                    NEWLINE=tokenize.NEWLINE,
                    COMMENT=tokenize.COMMENT,
                    NL=tokenize.NL):
 
+        (sline, scol) = sline_scol
         if type == NEWLINE:
             # A program statement, or ENDMARKER, will eventually follow,
             # after some (possibly empty) run of tokens of the form
diff --git a/utils/reverify_repair_failed.py b/utils/reverify_repair_failed.py
index 48679df..478f9ca 100755
--- a/utils/reverify_repair_failed.py
+++ b/utils/reverify_repair_failed.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 """
 Send all Repair Failed hosts that the user running this script has access to
@@ -8,6 +8,10 @@
 hosts have overcome whatever issue caused the failure and are useful again.
 """
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import optparse, os, sys
 
 import common
@@ -29,11 +33,11 @@
     # The old RPC interface didn't return anything.
     # A more recent one returns a list of hostnames to make this message useful.
     if hostnames:
-        print 'The following Repair Failed hosts on', options.server,
-        print 'will be reverified:'
-        print ' '.join(hostnames)
+        print('The following Repair Failed hosts on', options.server, end=' ')
+        print('will be reverified:')
+        print(' '.join(hostnames))
     else:
-        print 'Repair Failed hosts on', options.server, 'will be reverified.'
+        print('Repair Failed hosts on', options.server, 'will be reverified.')
 
 
 if __name__ == '__main__':
diff --git a/utils/run_pylint.py b/utils/run_pylint.py
index ab6f0b3..f8b8f0a 100755
--- a/utils/run_pylint.py
+++ b/utils/run_pylint.py
@@ -69,6 +69,10 @@
 run_pylint.py filename.py
 """
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import fnmatch
 import logging
 import os
@@ -81,11 +85,11 @@
 # Do a basic check to see if pylint is even installed.
 try:
     import pylint
-    from pylint.__pkginfo__ import version as pylint_version
+    from pylint import __version__ as pylint_version
 except ImportError:
     print ("Unable to import pylint, it may need to be installed."
            " Run 'sudo aptitude install pylint' if you haven't already.")
-    sys.exit(1)
+    raise
 
 pylint_version_parsed = tuple(map(int, pylint_version.split('.')))
 
@@ -95,6 +99,10 @@
 import astroid
 import pylint.lint
 from pylint.checkers import base, imports, variables
+import six
+from six.moves import filter
+from six.moves import map
+from six.moves import zip
 
 # need to put autotest root dir on sys.path so pylint will be happy
 autotest_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
@@ -297,14 +305,14 @@
 
 def check_file(file_path, base_opts):
     """
-    Invokes pylint on files after confirming that they're not black listed.
+    Invokes pylint on files after confirming that they're not block listed.
 
     @param base_opts: pylint base options.
     @param file_path: path to the file we need to run pylint on.
 
     @returns pylint return code
     """
-    if not isinstance(file_path, basestring):
+    if not isinstance(file_path, six.string_types):
         raise TypeError('expected a string as filepath, got %s'%
             type(file_path))
 
@@ -339,7 +347,7 @@
     """
     files = []
 
-    os.path.walk(dir_path, visit, files)
+    os.walk(dir_path, visit, files)
 
     return batch_check_files(files, base_opts)
 
diff --git a/utils/site_check_dut_usage.py b/utils/site_check_dut_usage.py
index 6c5c067..0f8606a 100755
--- a/utils/site_check_dut_usage.py
+++ b/utils/site_check_dut_usage.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -12,6 +12,10 @@
 utils/site_check_dut_usage.py 11/1/2011 11/5/2011 netbook_LABEL
 """
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import datetime
 import optparse
 import sys
@@ -102,25 +106,25 @@
 
         # Print the list of hostnames if the user requested.
         if self._list_hostnames:
-            print '=================================================='
-            print 'Machines with label:'
+            print('==================================================')
+            print('Machines with label:')
             for machine in machines:
-                print machine
-            print '=================================================='
+                print(machine)
+            print('==================================================')
 
         # Print the usage summary.
-        print '=================================================='
-        print 'Total running time', total_run_time
-        print 'Total queued time', total_queued_time
-        print 'Total number of machines', num_machines
-        print 'Average time spent running tests per machine ', avg_run_time
-        print 'Average Job Time ', datetime.timedelta(seconds=int(
-                avg_job_run_time))
-        print 'Average Time Job Queued ', datetime.timedelta(seconds=int(
-                avg_job_queued_time))
-        print 'Total duration ', duration
-        print 'Usage ', usage
-        print '=================================================='
+        print('==================================================')
+        print('Total running time', total_run_time)
+        print('Total queued time', total_queued_time)
+        print('Total number of machines', num_machines)
+        print('Average time spent running tests per machine ', avg_run_time)
+        print('Average Job Time ', datetime.timedelta(seconds=int(
+                avg_job_run_time)))
+        print('Average Time Job Queued ', datetime.timedelta(seconds=int(
+                avg_job_queued_time)))
+        print('Total duration ', duration)
+        print('Usage ', usage)
+        print('==================================================')
 
 
     def run(self):
@@ -136,7 +140,7 @@
 
         durations = self.find_all_durations()
         if not durations:
-            print 'Query returned no results.'
+            print('Query returned no results.')
         else:
             self.calculate_usage(durations)
 
diff --git a/utils/start_gsc_devboard_host.py b/utils/start_gsc_devboard_host.py
new file mode 100644
index 0000000..f86bce6
--- /dev/null
+++ b/utils/start_gsc_devboard_host.py
@@ -0,0 +1,22 @@
+# Copyright 2022 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Starts a GSCDevboardHost manually for local testing."""
+
+import logging
+import os
+
+import common
+from autotest_lib.server.hosts import gsc_devboard_host
+
+# Start service per env vars DOCKER_HOST, DEBUGGER_SERIAL, DEVBOARDSVC_PORT
+logging.basicConfig(level=logging.INFO)
+e = os.environ
+h = gsc_devboard_host.GSCDevboardHost()
+h._initialize('',
+              service_debugger_serial=e.get('DEBUGGER_SERIAL'),
+              service_port=e.get('DEVBOARDSVC_PORT',
+                                 gsc_devboard_host.DEFAULT_SERVICE_PORT))
+h.start_service()
+logging.info("Service started, container endpoint at %s:%s", h.service_ip,
+             h.service_port)
diff --git a/utils/summarize_loadtest.py b/utils/summarize_loadtest.py
index d85e428..452a1fb 100755
--- a/utils/summarize_loadtest.py
+++ b/utils/summarize_loadtest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 
 # Copyright 2017 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -6,6 +6,10 @@
 
 """Load generator for devserver."""
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import argparse
 import itertools
 import json
@@ -141,14 +145,13 @@
     # - Ignore non-provisions.
     # - Filter via the specified FILTER_ARGS arguments.
     # - Filter via explicit filter request.
-    entries = filter(lambda x: x['name'] != 'Runner', all_entries)
+    entries = [x for x in all_entries if x['name'] != 'Runner']
     for arg in FILTER_ARGS:
         if options.__dict__.get(arg):
-            entries = filter(lambda x: x[arg] in
-                                       options.__dict__[arg].split(','),
-                             entries)
+            entries = [x for x in entries if x[arg] in
+                       options.__dict__[arg].split(',')]
     if options.filter:
-        entries = filter(lambda x: eval(options.filter, {'re': re}, x), entries)
+        entries = [x for x in entries if eval(options.filter, {'re': re}, x)]
 
     # Group the entries based on specified keys.
     groups = group_entries(options.group.split(',') if options.group else None,
diff --git a/utils/terminal.py b/utils/terminal.py
index 94298d4..dca8197 100644
--- a/utils/terminal.py
+++ b/utils/terminal.py
@@ -1,3 +1,4 @@
+# Lint as: python2, python3
 # -*- coding: utf-8 -*-
 # Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -8,7 +9,10 @@
 This module handles terminal interaction including ANSI color codes.
 """
 
+from __future__ import absolute_import
+from __future__ import division
 from __future__ import print_function
+from six.moves import range
 
 
 class Color(object):
diff --git a/utils/tko_publish.py b/utils/tko_publish.py
index 9ed505d..b48e279 100755
--- a/utils/tko_publish.py
+++ b/utils/tko_publish.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/python3
 """
 This script will scan an autotest server results directory for job result
 directories that have completed and that have not yet been published on
@@ -8,6 +8,10 @@
 has been published yet).
 """
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
 import sys, os, re, optparse
 
 import common
@@ -53,7 +57,7 @@
     # mark the jobdir as published
     fd = open(os.path.join(jobdir, PUBLISH_FLAGFILE), 'w')
     fd.close()
-    print 'Published', jobdir
+    print('Published', jobdir)
 
 
 def main():
@@ -102,7 +106,7 @@
     options, args = parser.parse_args()
 
     if len(args) < 2:
-        print USAGE
+        print(USAGE)
         sys.exit(-1)
 
     options.resultsdir = args[0]
diff --git a/utils/unittest_suite.py b/utils/unittest_suite.py
index f0f3d77..9708e4d 100755
--- a/utils/unittest_suite.py
+++ b/utils/unittest_suite.py
@@ -1,5 +1,8 @@
-#!/usr/bin/python2 -u
+#!/usr/bin/python3 -u
 
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
 import os, sys, unittest, optparse
 import common
 from autotest_lib.utils import parallel
@@ -36,7 +39,6 @@
         'execution_engine_unittest.py',
         'service_proxy_lib_test.py',
         'site_parse_unittest.py',
-        'server_manager_unittest.py',
         ))
 
 REQUIRES_MYSQLDB = set((
@@ -103,8 +105,6 @@
     'lxc_functional_test.py',
     'service_unittest.py',
     'zygote_unittest.py',
-    # Require sponge utils installed in site-packages
-    'sponge_utils_functional_test.py',
     ))
 
 LONG_TESTS = (REQUIRES_MYSQLDB |
@@ -134,7 +134,7 @@
     if not options.debug:
         parallel.redirect_io()
 
-    print "Running %s" % '.'.join(mod_names)
+    print("Running %s" % '.'.join(mod_names))
     mod = common.setup_modules.import_module(mod_names[-1],
                                              '.'.join(mod_names[:-1]))
     test = unittest.defaultTestLoader.loadTestsFromModule(mod)
@@ -172,7 +172,7 @@
             # Skip all subdirectories below this one, it is not a module.
             del sub_dirs[:]
             if options.debug:
-                print 'Skipping', dir_path
+                print('Skipping', dir_path)
             continue  # Skip this directory.
 
         # Look for unittest files.
@@ -192,7 +192,7 @@
                 names = path_no_py[len(ROOT)+1:].split('/')
                 modules.append(['autotest_lib'] + names)
                 if options.debug:
-                    print 'testing', path_no_py
+                    print('testing', path_no_py)
     return modules
 
 
@@ -220,14 +220,14 @@
         modules = scan_for_modules(start, options)
 
     if options.debug:
-        print 'Number of test modules found:', len(modules)
+        print('Number of test modules found:', len(modules))
 
     chroot = is_inside_chroot()
     functions = {}
     for module_names in modules:
         if not chroot and module_names[-1] in REQUIRES_CHROOT:
             if options.debug:
-                print ('Test %s requires to run in chroot, skipped.' %
+                print('Test %s requires to run in chroot, skipped.' %
                        module_names[-1])
             continue
         # Create a function that'll test a particular module.  module=module
@@ -244,7 +244,7 @@
             dargs['max_simultaneous_procs'] = 1
         pe = parallel.ParallelExecute(functions, **dargs)
         pe.run_until_completion()
-    except parallel.ParallelError, err:
+    except parallel.ParallelError as err:
         return err.errors
     return []
 
@@ -262,13 +262,13 @@
     absolute_start = os.path.join(ROOT, options.start)
     errors = find_and_run_tests(absolute_start, options)
     if errors:
-        print "%d tests resulted in an error/failure:" % len(errors)
+        print("%d tests resulted in an error/failure:" % len(errors))
         for error in errors:
-            print "\t%s" % error
-        print "Rerun", sys.argv[0], "--debug to see the failure details."
+            print("\t%s" % error)
+        print("Rerun", sys.argv[0], "--debug to see the failure details.")
         sys.exit(1)
     else:
-        print "All passed!"
+        print("All passed!")
         sys.exit(0)